diff --git a/.config/.last_opt_in_prompt.yaml b/.config/.last_opt_in_prompt.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0967ef424bce6791893e9a57bb952f80fd536e93 --- /dev/null +++ b/.config/.last_opt_in_prompt.yaml @@ -0,0 +1 @@ +{} diff --git a/.config/.last_survey_prompt.yaml b/.config/.last_survey_prompt.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c73c5981cb644d11750bb39d09a8a90c9af35623 --- /dev/null +++ b/.config/.last_survey_prompt.yaml @@ -0,0 +1 @@ +last_prompt_time: 1718716991.5380163 diff --git a/.config/.last_update_check.json b/.config/.last_update_check.json new file mode 100644 index 0000000000000000000000000000000000000000..acb1523e3ebd8ef6b85a0b579d6bb43f8ed01ab7 --- /dev/null +++ b/.config/.last_update_check.json @@ -0,0 +1 @@ +{"last_update_check_time": 1718716999.9053707, "last_update_check_revision": 20240607152945, "notifications": [], "last_nag_times": {}} \ No newline at end of file diff --git a/.config/active_config b/.config/active_config new file mode 100644 index 0000000000000000000000000000000000000000..331d858ce9b12fa6720414196a9dd6e0b6a0faaa --- /dev/null +++ b/.config/active_config @@ -0,0 +1 @@ +default \ No newline at end of file diff --git a/.config/config_sentinel b/.config/config_sentinel new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.config/configurations/config_default b/.config/configurations/config_default new file mode 100644 index 0000000000000000000000000000000000000000..ee06685b6841afd85a59e8ea5bc7ee8a27d6fe74 --- /dev/null +++ b/.config/configurations/config_default @@ -0,0 +1,6 @@ +[component_manager] +disable_update_check = true + +[compute] +gce_metadata_read_timeout_sec = 0 + diff --git a/.config/default_configs.db b/.config/default_configs.db new file mode 100644 index 0000000000000000000000000000000000000000..e8a2c56e9e0369b0e66531a0ddfec7c2b10a73ee Binary files /dev/null and b/.config/default_configs.db differ diff --git a/.config/gce b/.config/gce new file mode 100644 index 0000000000000000000000000000000000000000..c1f22fbc23bb6ee67824843d6685826db10313d3 --- /dev/null +++ b/.config/gce @@ -0,0 +1 @@ +False \ No newline at end of file diff --git a/.config/logs/2024.06.18/13.22.38.097292.log b/.config/logs/2024.06.18/13.22.38.097292.log new file mode 100644 index 0000000000000000000000000000000000000000..dd6edfb4644661394d978682e0b3375110c5f10d --- /dev/null +++ b/.config/logs/2024.06.18/13.22.38.097292.log @@ -0,0 +1,534 @@ +2024-06-18 13:22:50,123 DEBUG root Loaded Command Group: ['gcloud', 'components'] +2024-06-18 13:22:50,127 DEBUG root Loaded Command Group: ['gcloud', 'components', 'update'] +2024-06-18 13:22:50,129 DEBUG root Running [gcloud.components.update] with arguments: [--allow-no-backup: "True", --compile-python: "True", --quiet: "True", COMPONENT-IDS:6: "['core', 'gcloud-deps', 'bq', 'gcloud', 'gcloud-crc32c', 'gsutil']"] +2024-06-18 13:22:50,130 INFO ___FILE_ONLY___ Beginning update. This process may take several minutes. + +2024-06-18 13:22:50,152 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:22:50,289 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components-2.json HTTP/1.1" 200 222658 +2024-06-18 13:22:50,306 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,307 INFO ___FILE_ONLY___ +Your current Google Cloud CLI version is: 480.0.0 + +2024-06-18 13:22:50,307 INFO ___FILE_ONLY___ Installing components from version: 480.0.0 + +2024-06-18 13:22:50,307 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,307 DEBUG root Chosen display Format:table[box,title="These components will be removed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right) +2024-06-18 13:22:50,308 DEBUG root Chosen display Format:table[box,title="These components will be updated."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right) +2024-06-18 13:22:50,309 DEBUG root Chosen display Format:table[box,title="These components will be installed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right) +2024-06-18 13:22:50,435 INFO ___FILE_ONLY___ ┌─────────────────────────────────────────────────────────────────────────────┐ +2024-06-18 13:22:50,435 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,435 INFO ___FILE_ONLY___ │ These components will be installed. │ +2024-06-18 13:22:50,435 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ ├─────────────────────────────────────────────────────┬────────────┬──────────┤ +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ │ Name │ Version │ Size │ +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ ├─────────────────────────────────────────────────────┼────────────┼──────────┤ +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ BigQuery Command Line Tool +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,436 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ 2.1.5 +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ 1.7 MiB +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ BigQuery Command Line Tool (Platform Specific) +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,437 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ 2.0.101 +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ < 1 MiB +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ Bundled Python 3.11 +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,438 INFO ___FILE_ONLY___ 3.11.8 +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ 75.1 MiB +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ Cloud Storage Command Line Tool +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,439 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ 5.29 +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ 11.3 MiB +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ Cloud Storage Command Line Tool (Platform Specific) +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ 5.27 +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,440 INFO ___FILE_ONLY___ < 1 MiB +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ Google Cloud CLI Core Libraries (Platform Specific) +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ 2024.01.06 +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ < 1 MiB +2024-06-18 13:22:50,441 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ Google Cloud CRC32C Hash Tool +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ 1.0.0 +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ 1.2 MiB +2024-06-18 13:22:50,442 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ gcloud cli dependencies +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ 2021.04.16 +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,443 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,444 INFO ___FILE_ONLY___ < 1 MiB +2024-06-18 13:22:50,444 INFO ___FILE_ONLY___ +2024-06-18 13:22:50,444 INFO ___FILE_ONLY___ │ +2024-06-18 13:22:50,444 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,444 INFO ___FILE_ONLY___ └─────────────────────────────────────────────────────┴────────────┴──────────┘ +2024-06-18 13:22:50,444 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,444 INFO ___FILE_ONLY___ + +2024-06-18 13:22:50,448 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:22:50,587 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/RELEASE_NOTES HTTP/1.1" 200 1228039 +2024-06-18 13:22:50,621 INFO ___FILE_ONLY___ For the latest full release notes, please visit: + https://cloud.google.com/sdk/release_notes + + +2024-06-18 13:22:50,623 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:22:50,623 INFO ___FILE_ONLY___ ╠═ Creating update staging area ═╣ + +2024-06-18 13:22:50,624 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:22:50,624 INFO ___FILE_ONLY___ ══════ +2024-06-18 13:22:50,624 INFO ___FILE_ONLY___ ══════ +2024-06-18 13:22:50,624 INFO ___FILE_ONLY___ ══════ +2024-06-18 13:22:50,838 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:50,899 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:50,958 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,025 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,082 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,241 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,315 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,362 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,413 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,462 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,575 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,625 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,673 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,729 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,788 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,842 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,894 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:51,955 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,015 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,093 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,153 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,211 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,271 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,334 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,396 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,452 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,510 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,564 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,624 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,662 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,705 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,738 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,779 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,814 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,848 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,884 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,920 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:52,980 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,063 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,206 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,367 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,367 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:22:53,434 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:22:53,434 INFO ___FILE_ONLY___ ╠═ Installing: BigQuery Command Line Tool ═╣ + +2024-06-18 13:22:53,434 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:22:53,438 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:22:53,511 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bq-20240524155722.tar.gz HTTP/1.1" 200 1789662 +2024-06-18 13:22:53,522 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,522 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,522 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,522 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,523 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,525 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,525 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,525 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,525 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,525 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,525 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,525 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,526 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,526 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,526 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,526 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,651 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,656 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,661 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,666 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,670 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,674 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,678 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,682 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,688 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,692 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,696 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,700 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,704 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,710 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,713 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,718 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,725 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,728 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,735 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,740 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,745 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,750 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,754 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,758 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,763 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,766 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,770 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,774 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,778 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,783 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:53,783 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:22:53,798 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:22:53,798 INFO ___FILE_ONLY___ ╠═ Installing: BigQuery Command Line Tool (Platform Spec... ═╣ + +2024-06-18 13:22:53,798 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:22:53,802 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:22:53,934 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bq-nix-20240106004423.tar.gz HTTP/1.1" 200 2026 +2024-06-18 13:22:53,935 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:22:53,936 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:22:53,936 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:22:53,944 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:22:53,944 INFO ___FILE_ONLY___ ╠═ Installing: Bundled Python 3.11 ═╣ + +2024-06-18 13:22:53,944 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:22:53,949 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════ +2024-06-18 13:22:53,949 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:22:53,951 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:22:53,951 INFO ___FILE_ONLY___ ╠═ Installing: Bundled Python 3.11 ═╣ + +2024-06-18 13:22:53,951 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:22:53,955 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:22:54,092 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bundled-python3-unix-linux-x86_64-20240510142152.tar.gz HTTP/1.1" 200 78697278 +2024-06-18 13:22:54,359 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,362 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,365 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,368 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,371 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,374 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,376 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,379 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,382 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,385 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,388 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,390 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,393 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,396 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,398 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,401 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,404 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,407 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,410 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,412 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,415 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,418 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,421 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,423 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,426 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,429 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,432 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,435 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,438 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:54,441 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,650 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,677 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,703 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,729 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,754 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,779 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,803 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,828 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,853 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,878 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,903 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,928 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,952 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:56,977 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,002 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,027 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,054 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,490 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,531 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,587 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,626 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,780 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,918 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,957 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:57,998 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:58,067 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:58,103 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:58,148 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,274 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,309 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,309 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:22:59,390 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:22:59,390 INFO ___FILE_ONLY___ ╠═ Installing: Cloud Storage Command Line Tool ═╣ + +2024-06-18 13:22:59,391 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:22:59,395 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:22:59,536 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gsutil-20240510142152.tar.gz HTTP/1.1" 200 11893574 +2024-06-18 13:22:59,574 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,575 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,576 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,576 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,577 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,577 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,578 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,578 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,579 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,579 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,580 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,580 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,581 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,581 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,582 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,583 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,583 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,584 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,584 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,585 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,585 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,586 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,586 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,587 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,587 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,588 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,588 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,589 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,589 INFO ___FILE_ONLY___ ═ +2024-06-18 13:22:59,590 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,293 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,330 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,357 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,387 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,415 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,439 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,461 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,483 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,502 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,524 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,548 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,581 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,614 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,651 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,676 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,696 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,718 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,742 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,769 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,788 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,810 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,835 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,858 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,878 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,902 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,926 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:00,977 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,005 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,037 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,060 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,060 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,114 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:01,114 INFO ___FILE_ONLY___ ╠═ Installing: Cloud Storage Command Line Tool (Platform... ═╣ + +2024-06-18 13:23:01,115 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:01,118 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:01,251 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gsutil-nix-20240106004423.tar.gz HTTP/1.1" 200 2042 +2024-06-18 13:23:01,252 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:01,253 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:01,253 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,262 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:01,262 INFO ___FILE_ONLY___ ╠═ Installing: Default set of gcloud commands ═╣ + +2024-06-18 13:23:01,262 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:01,266 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════ +2024-06-18 13:23:01,266 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,268 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:01,269 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CLI Core Libraries (Platform... ═╣ + +2024-06-18 13:23:01,269 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:01,272 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:01,408 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-core-nix-20240106004423.tar.gz HTTP/1.1" 200 2410 +2024-06-18 13:23:01,409 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:01,410 INFO ___FILE_ONLY___ ═══════════════ +2024-06-18 13:23:01,411 INFO ___FILE_ONLY___ ═══════════════ +2024-06-18 13:23:01,411 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,419 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:01,419 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CRC32C Hash Tool ═╣ + +2024-06-18 13:23:01,419 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:01,423 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:01,557 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gcloud-crc32c-linux-x86_64-20231215195722.tar.gz HTTP/1.1" 200 1287877 +2024-06-18 13:23:01,567 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,567 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,567 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,567 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,567 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,567 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,568 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,569 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,570 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,570 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,570 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,570 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,570 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,570 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,570 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:01,603 INFO ___FILE_ONLY___ ═══════════════ +2024-06-18 13:23:01,604 INFO ___FILE_ONLY___ ═══════════════ +2024-06-18 13:23:01,604 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,612 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:01,612 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CRC32C Hash Tool ═╣ + +2024-06-18 13:23:01,612 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:01,617 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════ +2024-06-18 13:23:01,617 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,619 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:01,619 INFO ___FILE_ONLY___ ╠═ Installing: gcloud cli dependencies ═╣ + +2024-06-18 13:23:01,619 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:01,622 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:01,754 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gcloud-deps-linux-x86_64-20210416153011.tar.gz HTTP/1.1" 200 104 +2024-06-18 13:23:01,755 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:01,755 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:01,755 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,763 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:01,763 INFO ___FILE_ONLY___ ╠═ Creating backup and activating new installation ═╣ + +2024-06-18 13:23:01,763 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:01,763 DEBUG root Attempting to move directory [/tools/google-cloud-sdk] to [/tools/google-cloud-sdk.staging/.install/.backup] +2024-06-18 13:23:01,763 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:01,763 DEBUG root Attempting to move directory [/tools/google-cloud-sdk.staging] to [/tools/google-cloud-sdk] +2024-06-18 13:23:01,763 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:01,764 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:01,767 DEBUG root Updating notification cache... +2024-06-18 13:23:01,767 INFO ___FILE_ONLY___ + +2024-06-18 13:23:01,769 INFO ___FILE_ONLY___ Performing post processing steps... +2024-06-18 13:23:01,769 DEBUG root Executing command: ['/tools/google-cloud-sdk/bin/gcloud', 'components', 'post-process'] +2024-06-18 13:23:11,510 DEBUG ___FILE_ONLY___ +2024-06-18 13:23:11,510 DEBUG ___FILE_ONLY___ +2024-06-18 13:23:11,533 INFO ___FILE_ONLY___ +Update done! + + +2024-06-18 13:23:11,536 DEBUG root Chosen display Format:none +2024-06-18 13:23:11,537 INFO root Display format: "none" diff --git a/.config/logs/2024.06.18/13.23.02.197770.log b/.config/logs/2024.06.18/13.23.02.197770.log new file mode 100644 index 0000000000000000000000000000000000000000..a7dab0ad4dc20c3a5a450f102fb201fda79ad706 --- /dev/null +++ b/.config/logs/2024.06.18/13.23.02.197770.log @@ -0,0 +1,5 @@ +2024-06-18 13:23:02,198 DEBUG root Loaded Command Group: ['gcloud', 'components'] +2024-06-18 13:23:02,201 DEBUG root Loaded Command Group: ['gcloud', 'components', 'post_process'] +2024-06-18 13:23:02,203 DEBUG root Running [gcloud.components.post-process] with arguments: [] +2024-06-18 13:23:11,427 DEBUG root Chosen display Format:none +2024-06-18 13:23:11,428 INFO root Display format: "none" diff --git a/.config/logs/2024.06.18/13.23.12.081812.log b/.config/logs/2024.06.18/13.23.12.081812.log new file mode 100644 index 0000000000000000000000000000000000000000..8d1947c9cb49cda2fbee28d7aff1fd2ecdc59ee6 --- /dev/null +++ b/.config/logs/2024.06.18/13.23.12.081812.log @@ -0,0 +1,169 @@ +2024-06-18 13:23:12,082 DEBUG root Loaded Command Group: ['gcloud', 'components'] +2024-06-18 13:23:12,085 DEBUG root Loaded Command Group: ['gcloud', 'components', 'update'] +2024-06-18 13:23:12,087 DEBUG root Running [gcloud.components.update] with arguments: [--quiet: "True", COMPONENT-IDS:8: "['gcloud', 'core', 'bq', 'gsutil', 'compute', 'preview', 'alpha', 'beta']"] +2024-06-18 13:23:12,088 INFO ___FILE_ONLY___ Beginning update. This process may take several minutes. + +2024-06-18 13:23:12,096 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:12,229 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components-2.json HTTP/1.1" 200 222658 +2024-06-18 13:23:12,247 WARNING root Component [compute] no longer exists. +2024-06-18 13:23:12,248 WARNING root Component [preview] no longer exists. +2024-06-18 13:23:12,248 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,249 INFO ___FILE_ONLY___ +Your current Google Cloud CLI version is: 480.0.0 + +2024-06-18 13:23:12,249 INFO ___FILE_ONLY___ Installing components from version: 480.0.0 + +2024-06-18 13:23:12,249 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,249 DEBUG root Chosen display Format:table[box,title="These components will be removed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right) +2024-06-18 13:23:12,250 DEBUG root Chosen display Format:table[box,title="These components will be updated."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right) +2024-06-18 13:23:12,251 DEBUG root Chosen display Format:table[box,title="These components will be installed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right) +2024-06-18 13:23:12,285 INFO ___FILE_ONLY___ ┌──────────────────────────────────────────────┐ +2024-06-18 13:23:12,285 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ │ These components will be installed. │ +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ ├───────────────────────┬────────────┬─────────┤ +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ │ Name │ Version │ Size │ +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ ├───────────────────────┼────────────┼─────────┤ +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,286 INFO ___FILE_ONLY___ gcloud Alpha Commands +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ 2024.06.07 +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ < 1 MiB +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ gcloud Beta Commands +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,287 INFO ___FILE_ONLY___ 2024.06.07 +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ < 1 MiB +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ │ +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ └───────────────────────┴────────────┴─────────┘ +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,288 INFO ___FILE_ONLY___ + +2024-06-18 13:23:12,292 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:12,431 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/RELEASE_NOTES HTTP/1.1" 200 1228039 +2024-06-18 13:23:12,468 INFO ___FILE_ONLY___ For the latest full release notes, please visit: + https://cloud.google.com/sdk/release_notes + + +2024-06-18 13:23:12,470 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:12,470 INFO ___FILE_ONLY___ ╠═ Creating update staging area ═╣ + +2024-06-18 13:23:12,470 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:12,470 INFO ___FILE_ONLY___ ══════ +2024-06-18 13:23:13,082 INFO ___FILE_ONLY___ ══════ +2024-06-18 13:23:13,082 INFO ___FILE_ONLY___ ══════ +2024-06-18 13:23:13,370 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:13,443 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:13,491 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:13,551 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:13,612 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:13,759 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:13,808 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:13,884 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,018 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,097 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,178 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,230 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,292 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,347 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,406 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,476 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,577 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,669 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,724 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,784 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,847 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,905 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:14,967 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,033 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,090 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,144 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,250 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,306 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,375 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,441 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,507 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,590 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,683 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,755 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,818 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,878 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:15,940 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:16,011 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:16,067 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:16,136 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:16,193 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:16,253 INFO ___FILE_ONLY___ ═ +2024-06-18 13:23:16,253 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:19,604 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:19,604 INFO ___FILE_ONLY___ ╠═ Installing: gcloud Alpha Commands ═╣ + +2024-06-18 13:23:19,605 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:19,609 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:19,746 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-alpha-20240607152945.tar.gz HTTP/1.1" 200 800 +2024-06-18 13:23:19,747 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:19,749 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:19,749 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:19,756 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:19,757 INFO ___FILE_ONLY___ ╠═ Installing: gcloud Beta Commands ═╣ + +2024-06-18 13:23:19,757 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:19,760 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443 +2024-06-18 13:23:19,891 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-beta-20240607152945.tar.gz HTTP/1.1" 200 797 +2024-06-18 13:23:19,892 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:19,893 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:19,893 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:19,900 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗ + +2024-06-18 13:23:19,901 INFO ___FILE_ONLY___ ╠═ Creating backup and activating new installation ═╣ + +2024-06-18 13:23:19,901 INFO ___FILE_ONLY___ ╚ +2024-06-18 13:23:19,901 DEBUG root Attempting to move directory [/tools/google-cloud-sdk] to [/tools/google-cloud-sdk.staging/.install/.backup] +2024-06-18 13:23:19,901 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:19,901 DEBUG root Attempting to move directory [/tools/google-cloud-sdk.staging] to [/tools/google-cloud-sdk] +2024-06-18 13:23:19,901 INFO ___FILE_ONLY___ ══════════════════════════════ +2024-06-18 13:23:19,901 INFO ___FILE_ONLY___ ╝ + +2024-06-18 13:23:19,905 DEBUG root Updating notification cache... +2024-06-18 13:23:19,905 INFO ___FILE_ONLY___ + +2024-06-18 13:23:19,907 INFO ___FILE_ONLY___ Performing post processing steps... +2024-06-18 13:23:19,908 DEBUG root Executing command: ['/tools/google-cloud-sdk/bin/gcloud', 'components', 'post-process'] +2024-06-18 13:23:29,718 DEBUG ___FILE_ONLY___ +2024-06-18 13:23:29,718 DEBUG ___FILE_ONLY___ +2024-06-18 13:23:29,922 INFO ___FILE_ONLY___ +Update done! + + +2024-06-18 13:23:29,925 DEBUG root Chosen display Format:none +2024-06-18 13:23:29,925 INFO root Display format: "none" diff --git a/.config/logs/2024.06.18/13.23.20.359666.log b/.config/logs/2024.06.18/13.23.20.359666.log new file mode 100644 index 0000000000000000000000000000000000000000..1cec422f5be1bfd4dd0825e4f7c7a1bce3f1b220 --- /dev/null +++ b/.config/logs/2024.06.18/13.23.20.359666.log @@ -0,0 +1,5 @@ +2024-06-18 13:23:20,360 DEBUG root Loaded Command Group: ['gcloud', 'components'] +2024-06-18 13:23:20,362 DEBUG root Loaded Command Group: ['gcloud', 'components', 'post_process'] +2024-06-18 13:23:20,364 DEBUG root Running [gcloud.components.post-process] with arguments: [] +2024-06-18 13:23:29,624 DEBUG root Chosen display Format:none +2024-06-18 13:23:29,625 INFO root Display format: "none" diff --git a/.config/logs/2024.06.18/13.23.30.494468.log b/.config/logs/2024.06.18/13.23.30.494468.log new file mode 100644 index 0000000000000000000000000000000000000000..1ae87ec0ba6bab2d09f7f8656bd7e29a76d50154 --- /dev/null +++ b/.config/logs/2024.06.18/13.23.30.494468.log @@ -0,0 +1,8 @@ +2024-06-18 13:23:30,496 DEBUG root Loaded Command Group: ['gcloud', 'config'] +2024-06-18 13:23:30,546 DEBUG root Loaded Command Group: ['gcloud', 'config', 'set'] +2024-06-18 13:23:30,548 DEBUG root Running [gcloud.config.set] with arguments: [SECTION/PROPERTY: "component_manager/disable_update_check", VALUE: "true"] +2024-06-18 13:23:30,549 INFO ___FILE_ONLY___ Updated property [component_manager/disable_update_check]. + +2024-06-18 13:23:30,550 DEBUG root Chosen display Format:default +2024-06-18 13:23:30,551 INFO root Display format: "default" +2024-06-18 13:23:30,551 DEBUG root SDK update checks are disabled. diff --git a/.config/logs/2024.06.18/13.23.31.099704.log b/.config/logs/2024.06.18/13.23.31.099704.log new file mode 100644 index 0000000000000000000000000000000000000000..de1390c7e86361f40623195735f9cd918990462d --- /dev/null +++ b/.config/logs/2024.06.18/13.23.31.099704.log @@ -0,0 +1,8 @@ +2024-06-18 13:23:31,101 DEBUG root Loaded Command Group: ['gcloud', 'config'] +2024-06-18 13:23:31,154 DEBUG root Loaded Command Group: ['gcloud', 'config', 'set'] +2024-06-18 13:23:31,157 DEBUG root Running [gcloud.config.set] with arguments: [SECTION/PROPERTY: "compute/gce_metadata_read_timeout_sec", VALUE: "0"] +2024-06-18 13:23:31,158 INFO ___FILE_ONLY___ Updated property [compute/gce_metadata_read_timeout_sec]. + +2024-06-18 13:23:31,159 DEBUG root Chosen display Format:default +2024-06-18 13:23:31,160 INFO root Display format: "default" +2024-06-18 13:23:31,161 DEBUG root SDK update checks are disabled. diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..23dd4c9f5f903b799ec99355d64288e76d9605b8 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +sample_data/mnist_test.csv filter=lfs diff=lfs merge=lfs -text +sample_data/mnist_train_small.csv filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9e815a1fb90cddf4ffabffd50a6955ce07af6b7d --- /dev/null +++ b/README.md @@ -0,0 +1,107 @@ +--- +base_model: google/pegasus-x-base +tags: +- generated_from_trainer +model-index: +- name: google/pegasus-x-base + results: [] +--- + + + +# google/pegasus-x-base + +This model is a fine-tuned version of [google/pegasus-x-base](https://huggingface.co/google/pegasus-x-base) on an unknown dataset. +It achieves the following results on the evaluation set: +- Loss: 1.0135 + +## Model description + +More information needed + +## Intended uses & limitations + +More information needed + +## Training and evaluation data + +More information needed + +## Training procedure + +### Training hyperparameters + +The following hyperparameters were used during training: +- learning_rate: 5e-05 +- train_batch_size: 2 +- eval_batch_size: 2 +- seed: 42 +- gradient_accumulation_steps: 4 +- total_train_batch_size: 8 +- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 +- lr_scheduler_type: linear +- num_epochs: 5 +- mixed_precision_training: Native AMP + +### Training results + +| Training Loss | Epoch | Step | Validation Loss | +|:-------------:|:------:|:----:|:---------------:| +| 8.9092 | 0.1008 | 10 | 8.5348 | +| 7.9162 | 0.2015 | 20 | 7.5592 | +| 7.3907 | 0.3023 | 30 | 6.9080 | +| 6.8587 | 0.4030 | 40 | 6.1464 | +| 5.7817 | 0.5038 | 50 | 5.2883 | +| 5.0792 | 0.6045 | 60 | 3.9477 | +| 4.1259 | 0.7053 | 70 | 2.7538 | +| 3.0821 | 0.8060 | 80 | 1.7983 | +| 2.2714 | 0.9068 | 90 | 1.4814 | +| 1.7994 | 1.0076 | 100 | 1.4092 | +| 1.4936 | 1.1083 | 110 | 1.3189 | +| 1.6535 | 1.2091 | 120 | 1.2445 | +| 1.3122 | 1.3098 | 130 | 1.2139 | +| 1.0667 | 1.4106 | 140 | 1.1800 | +| 1.274 | 1.5113 | 150 | 1.1507 | +| 1.1739 | 1.6121 | 160 | 1.1279 | +| 1.1871 | 1.7128 | 170 | 1.1094 | +| 1.2037 | 1.8136 | 180 | 1.0973 | +| 1.0839 | 1.9144 | 190 | 1.0832 | +| 1.0738 | 2.0151 | 200 | 1.0752 | +| 1.0955 | 2.1159 | 210 | 1.0695 | +| 1.1285 | 2.2166 | 220 | 1.0629 | +| 0.9973 | 2.3174 | 230 | 1.0574 | +| 1.0522 | 2.4181 | 240 | 1.0557 | +| 1.0803 | 2.5189 | 250 | 1.0458 | +| 1.0707 | 2.6196 | 260 | 1.0425 | +| 1.1868 | 2.7204 | 270 | 1.0384 | +| 1.0117 | 2.8212 | 280 | 1.0374 | +| 0.9206 | 2.9219 | 290 | 1.0347 | +| 1.0099 | 3.0227 | 300 | 1.0306 | +| 1.0459 | 3.1234 | 310 | 1.0307 | +| 1.0721 | 3.2242 | 320 | 1.0313 | +| 1.015 | 3.3249 | 330 | 1.0278 | +| 1.0358 | 3.4257 | 340 | 1.0237 | +| 0.9608 | 3.5264 | 350 | 1.0206 | +| 1.0416 | 3.6272 | 360 | 1.0202 | +| 0.9304 | 3.7280 | 370 | 1.0201 | +| 1.0447 | 3.8287 | 380 | 1.0187 | +| 1.0007 | 3.9295 | 390 | 1.0180 | +| 1.1681 | 4.0302 | 400 | 1.0168 | +| 1.0258 | 4.1310 | 410 | 1.0163 | +| 1.1054 | 4.2317 | 420 | 1.0153 | +| 0.907 | 4.3325 | 430 | 1.0154 | +| 0.935 | 4.4332 | 440 | 1.0151 | +| 0.9904 | 4.5340 | 450 | 1.0145 | +| 0.9735 | 4.6348 | 460 | 1.0142 | +| 0.9633 | 4.7355 | 470 | 1.0138 | +| 1.2809 | 4.8363 | 480 | 1.0136 | +| 1.0361 | 4.9370 | 490 | 1.0135 | + + +### Framework versions + +- Transformers 4.41.2 +- Pytorch 2.3.0+cu121 +- Datasets 2.20.0 +- Tokenizers 0.19.1 diff --git a/config.json b/config.json new file mode 100644 index 0000000000000000000000000000000000000000..66d25e038112d14a86ed0aef120db7ca49afe0db --- /dev/null +++ b/config.json @@ -0,0 +1,63 @@ +{ + "_name_or_path": "google/pegasus-x-base", + "activation_dropout": 0.1, + "activation_function": "relu", + "add_bias_logits": false, + "add_final_layer_norm": true, + "architectures": [ + "PegasusXForConditionalGeneration" + ], + "attention_dropout": 0.1, + "block_size": 512, + "bos_token_id": 0, + "classif_dropout": 0.0, + "classifier_dropout": 0.0, + "d_model": 768, + "decoder_attention_heads": 12, + "decoder_ffn_dim": 3072, + "decoder_layerdrop": 0.0, + "decoder_layers": 12, + "decoder_start_token_id": 0, + "dropout": 0.1, + "early_stopping": true, + "encoder_attention_heads": 12, + "encoder_ffn_dim": 3072, + "encoder_layerdrop": 0.0, + "encoder_layers": 12, + "eos_token_id": 1, + "extra_pos_embeddings": 1, + "force_bos_token_to_be_generated": false, + "forced_eos_token_id": 1, + "gradient_checkpointing": false, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1", + "2": "LABEL_2" + }, + "init_std": 0.02, + "is_encoder_decoder": true, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_2": 2 + }, + "length_penalty": 2.0, + "max_length": 512, + "max_position_embeddings": 16384, + "min_length": 100, + "model_type": "pegasus_x", + "no_repeat_ngram_size": 3, + "normalize_before": true, + "normalize_embedding": false, + "num_beams": 8, + "num_global_tokens": 128, + "num_hidden_layers": 12, + "pad_token_id": 0, + "scale_embedding": true, + "stagger_local_blocks": true, + "static_position_embeddings": true, + "torch_dtype": "float32", + "transformers_version": "4.41.2", + "use_cache": true, + "vocab_size": 96103 +} diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-1031.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-1031.txt new file mode 100644 index 0000000000000000000000000000000000000000..ec4b5dde460aa6c1135d080a1fd512e82842d057 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-1031.txt @@ -0,0 +1 @@ +a large number of current language processing systems use a part-of-speech tagger for pre-processing. the tagger assigns a (unique or ambiguous) part-ofspeech tag to each token in the input and passes its output to the next processing level, usually a parser. furthermore, there is a large interest in part-ofspeech tagging for corpus annotation projects, who create valuable linguistic resources by a combination of automatic processing and human correction. for both applications, a tagger with the highest possible accuracy is required. the debate about which paradigm solves the part-of-speech tagging problem best is not finished. recent comparisons of approaches that can be trained on corpora (van halteren et al., 1998; volk and schneider, 1998) have shown that in most cases statistical aproaches (cutting et al., 1992; schmid, 1995; ratnaparkhi, 1996) yield better results than finite-state, rule-based, or memory-based taggers (brill, 1993; daelemans et al., 1996). they are only surpassed by combinations of different systems, forming a "voting tagger". among the statistical approaches, the maximum entropy framework has a very strong position. nevertheless, a recent independent comparison of 7 taggers (zavrel and daelemans, 1999) has shown that another approach even works better: markov models combined with a good smoothing technique and with handling of unknown words. this tagger, tnt, not only yielded the highest accuracy, it also was the fastest both in training and tagging. the tagger comparison was organized as a "blackbox test": set the same task to every tagger and compare the outcomes. this paper describes the models and techniques used by tnt together with the implementation. the reader will be surprised how simple the underlying model is. the result of the tagger comparison seems to support the maxime "the simplest is the best". however, in this paper we clarify a number of details that are omitted in major previous publications concerning tagging with markov models. as two examples, (rabiner, 1989) and (charniak et al., 1993) give good overviews of the techniques and equations used for markov models and part-ofspeech tagging, but they are not very explicit in the details that are needed for their application. we argue that it is not only the choice of the general model that determines the result of the tagger but also the various "small" decisions on alternatives. the aim of this paper is to give a detailed account of the techniques used in tnt. additionally, we present results of the tagger on the negra corpus (brants et al., 1999) and the penn treebank (marcus et al., 1993). the penn treebank results reported here for the markov model approach are at least equivalent to those reported for the maximum entropy approach in (ratnaparkhi, 1996). for a comparison to other taggers, the reader is referred to (zavrel and daelemans, 1999).we have shown that a tagger based on markov models yields state-of-the-art results, despite contrary claims found in the literature. for a comparison to other taggers, the reader is referred to (zavrel and daelemans, 1999). a large number of current language processing systems use a part-of-speech tagger for pre-processing. tnt is freely available to universities and related organizations for research purposes (see http://www.coli.uni-sb.derthorstenant). the penn treebank results reported here for the markov model approach are at least equivalent to those reported for the maximum entropy approach in (ratnaparkhi, 1996). the tagger assigns a (unique or ambiguous) part-ofspeech tag to each token in the input and passes its output to the next processing level, usually a parser. additionally, we present results of the tagger on the negra corpus (brants et al., 1999) and the penn treebank (marcus et al., 1993). it is a very interesting future research topic to determine the advantages of either of these approaches, to find the reason for their high accuracies, and to find a good combination of both. furthermore, there is a large interest in part-ofspeech tagging for corpus annotation projects, who create valuable linguistic resources by a combination of automatic processing and human correction. for example, the markov model tagger used in the comparison of (van halteren et al., 1998) yielded worse results than all other taggers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-1043.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-1043.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c2a9217a86adcec18a2ec9ff2b9f57cd001a960 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-1043.txt @@ -0,0 +1 @@ +current automatic summarizers usually rely on sentence extraction to produce summaries. human professionals also often reuse the input documents to generate summaries; however, rather than simply extracting sentences and stringing them together, as most current summarizers do, humans often "edit" the extracted sentences in some way so that the resulting summary is concise and coherent. we analyzed a set of articles and identified six major operations that can be used for editing the extracted sentences, including removing extraneous phrases from an extracted sentence, combining a reduced sentence with other sentences, syntactic transformation, substituting phrases in an extracted sentence with their paraphrases, substituting phrases with more general or specific descriptions, and reordering the extracted sentences (jing and mckeown, 1999; jing and mckeown, 2000). we call the operation of removing extraneous phrases from an extracted sentence sentence reduction. it is one of the most effective operations that can be used to edit the extracted sentences. reduction can remove material at any granularity: a word, a prepositional phrase, a gerund, a to-infinitive or a clause. we use the term "phrase" here to refer to any of the above components that can be removed in reduction. the following example shows an original sentence and its reduced form written by a human professional: original sentence: when it arrives sometime next year in new tv sets, the v-chip will give parents a new and potentially revolutionary device to block out programs they don't want their children to see. reduced sentence by humans: the v-chip will give parents a device to block out programs they don't want their children to see. we implemented an automatic sentence reduction system. input to the reduction system includes extracted sentences, as well as the original document. output of reduction are reduced forms of the extracted sentences, which can either be used to produce summaries directly, or be merged with other sentences. the reduction system uses multiple sources of knowledge to make reduction decisions, including syntactic knowledge, context, and statistics computed from a training corpus. we evaluated the system against the output of human professionals. the program achieved a success rate of 81.3%, meaning that 81.3% of reduction decisions made by the system agreed with those of humans. sentence reduction improves the conciseness of automatically generated summaries, making it concise and on target. it can also improve the coherence of generated summaries, since extraneous phrases that can potentially introduce incoherece are removed. we collected 500 sentences and their corresponding reduced forms written by humans, and found that humans reduced the length of these 500 sentences by 44.2% on average. this indicates that a good sentence reduction system can improve the conciseness of generated summaries significantly. in the next section, we describe the sentence reduction algorithm in details. in section 3, we introduce the evaluation scheme used to access the performance of the system and present evaluation results. in section 4, we discuss other applications of sentence reduction, the interaction between reduction and other modules in a summarization system, and related work on sentence simplication. finally, we the goal of sentence reduction is to "reduce without major loss"; that is, we want to remove as many extraneous phrases as possible from an extracted sentence so that it can be concise, but without detracting from the main idea the sentence conveys. ideally, we want to remove a phrase from an extracted sentence only if it is irrelevant to the main topic. to achieve this, the system relies on multiple sources of knowledge to make reduction decisions. we first introduce the resources in the system and then describe the reduction algorithm. (1) the corpus. one of the key features of the system is that it uses a corpus consisting of original sentences and their corresponding reduced forms written by humans for training and testing purpose. this corpus was created using an automatic program we have developed to automatically analyze human-written abstracts. the program, called the decomposition program, matches phrases in a human-written summary sentence to phrases in the original document (jing and mckeown, 1999). the human-written abstracts were collected from the free daily news service "communicationsrelated headlines", provided by the benton foundation (http://www.benton.org). the articles in the corpus are news reports on telecommunication related issues, but they cover a wide range of topics, such as law, labor, and company mergers. database to date. it provides lexical relations between words, including synonymy, antonymy, meronymy, entailment (e.g., eat —> chew), or causation (e.g., kill --* die). these lexical links are used to identify the focus in the local context. (4) the syntactic parser. we use the english slot grammar(esg) parser developed at ibm (mccord, 1990) to analyze the syntactic structure of an input sentence and produce a sentence parse tree. the esg parser not only annotates the syntactic category of a phrase (e.g., "np" or "vp"), it also annotates the thematic role of a phrase (e.g., "subject" or "object"). there are five steps in the reduction program: step 1: syntactic parsing. we first parse the input sentence using the esg parser and produce the sentence parse tree. the operations in all other steps are performed based on this parse tree. each following step annotates each node in the parse tree with additional information, such as syntactic or context importance, which are used later to determine which phrases (they are represented as subtrees in a parse tree) can be considered extraneous and thus removed. step 2: grammar checking. in this step, we determine which components of a sentence must not be deleted to keep the sentence grammatical. to do this, we traverse the parse tree produced in the first step in top-down order and mark, for each node in the parse tree, which of its children are grammatically obligatory. we use two sources of knowledge for this purpose. one source includes simple, linguistic-based rules that use the thematic role structure produced by the esg parser. for instance, for a sentence, the main verb, the subject, and the object(s) are essential if they exist, but a prepositional phrase is not; for a noun phrase, the head noun is essential, but an adjective modifier of the head noun is not. the other source we rely on is the large-scale lexicon we described earlier. the information in the lexicon is used to mark the obligatory arguments of verb phrases. for example, for the verb "convince", the lexicon has the following entry: this entry indicates that the verb "convince" can be followed by a noun phrase and a prepositional phrase starting with the preposition "of' (e.g., he convinced me of his innocence). it can also be followed by a noun phrase and a to-infinitive phrase (e.g., he convinced me to go to the party). this information prevents the system from deleting the "of" prepositional phrase or the to-infinitive that is part of the verb phrase. at the end of this step, each node in the parse tree — including both leaf nodes and intermediate nodes — is annotated with a value indicating whether it is grammatically obligatory. note that whether a node is obligatory is relative to its parent node only. for example, whether a determiner is obligatory is relative to the noun phrase it is in; whether a prepositional phrase is obligatory is relative to the sentence or the phrase it is in. step 3: context information. in this step, the system decides which components in the sentence are most related to the main topic being discussed. to measure the importance of a phrase in the local context, the system relies on lexical links between words. the hypothesis is that the more connected a word is with other words in the local context, the more likely it is to be the focus of the local context. we link the words in the extracted sentence with words in its local context, if they are repetitions, morphologically related, or linked in wordnet through one of the lexical relations. the system then computes an importance score for each word in the extracted sentence, based on the number of links it has with other words and the types of links. the formula for computing the context importance score for a word w is as follows: here, i represents the different types of lexical relations the system considered, including repetition, inflectional relation, derivational relation, and the lexical relations from wordnet. we assigned a weight to each type of lexical relation, represented by li in the formula. relations such as repetition or inflectional relation are considered more important and are assigned higher weights, while relations such as hypernym are considered less important and assigned lower weights. nu (w) in the formula represents the number of a particular type of lexical links the word w has with words in the local context. after an importance score is computed for each word, each phrase in the 'sentence gets a score by adding up the scores of its children nodes in the parse tree. this score indicates how important the phrase is in the local context. step 4: corpus evidence. the program uses a corpus consisting of sentences reduced by human professionals and their corresponding original sentences to compute how likely humans remove a certain phrase. the system first parsed the sentences in the corpus using esg parser. it then marked which subtrees in these parse trees (i.e., phrases in the sentences) were removed by humans. using this corpus of marked parse trees, we can compute how likely a subtree is removed from its parent node. for example, we can compute the probability that the "when" temporal clause is removed when the main verb is "give", represented as prob("when-clause is removed" i "v=give"), or the probability that the to-infinitive modifier of the head noun "device" is removed, represented as prob("to-infinitive modifier is removed" i"n=device"). these probabilities are computed using bayes's rule. for example, the probability that the "when" temporal clause is removed when the main verb is "give", prob("when-clause is removed" i "v=give"), is computed as the product of prob( "v=give" i "when-clause is removed") (i.e., the probability that the main verb is "give" when the "when" clause is removed) and prob("when-clause is removed") (i.e., the probability that the "when" clause is removed), divided by prob("v=give") (i.e., the probability that the main verb is "give"). besides computing the probability that a phrase is removed, we also compute two other types of probabilities: the probability that a phrase is reduced (i.e., the phrase is not removed as a whole, but some components in the phrase are removed), and the probability that a phrase is unchanged at all (i.e., neither removed nor reduced). these corpus probabilities help us capture human practice. for example, for sentences like "the agency reported that ..." , "the other source says that ..." , "the new study suggests that ..." , the thatclause following the say-verb (i.e., report, say, and suggest) in each sentence is very rarely changed at all by professionals. the system can capture this human practice, since the probability that that-clause of the verb say or report being unchanged at all will be relatively high, which will help the system to avoid removing components in the that-clause. these corpus probabilities are computed beforehand using a training corpus. they are then stored in a table and loaded at running time. step 5: final decision. the final reduction decisions are based on the results from all the earlier steps. to decide which phrases to remove, the system traverses the sentence parse tree, which now have been annotated with different types of information from earlier steps, in the top-down order and decides which subtrees should be removed, reduced or unchanged. a subtree (i.e., a phrase) is removed only if it is not grammatically obligatory, not the focus of the local context (indicated by a low importance score), and has a reasonable probability of being removed by humans. figure 1 shows sample output of the reduction program. the reduced sentences produced by humans are also provided for comparison.current automatic summarizers usually rely on sentence extraction to produce summaries. the reduced sentences produced by humans are also provided for comparison. this material is based upon work supported by the national science foundation under grant no. figure 1 shows sample output of the reduction program. we call the operation of removing extraneous phrases from an extracted sentence sentence reduction. a subtree (i.e., a phrase) is removed only if it is not grammatically obligatory, not the focus of the local context (indicated by a low importance score), and has a reasonable probability of being removed by humans. it is one of the most effective operations that can be used to edit the extracted sentences. reduction can remove material at any granularity: a word, a prepositional phrase, a gerund, a to-infinitive or a clause. to decide which phrases to remove, the system traverses the sentence parse tree, which now have been annotated with different types of information from earlier steps, in the top-down order and decides which subtrees should be removed, reduced or unchanged. the final reduction decisions are based on the results from all the earlier steps. we use the term "phrase" here to refer to any of the above components that can be removed in reduction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2004.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2004.txt new file mode 100644 index 0000000000000000000000000000000000000000..bfb30adc10386ccb0c9f32550718ba7c6fc2ebfd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2004.txt @@ -0,0 +1 @@ +even moderately long documents typically address several topics or different aspects of the same topic. the aim of linear text segmentation is to discover the topic boundaries. the uses of this procedure include information retrieval (hearst and plaunt, 1993; hearst, 1994; yaari, 1997; reynar, 1999), summarization (reynar, 1998), text understanding, anaphora resolution (kozima, 1993), language modelling (morris and hirst, 1991; beeferman et al., 1997b) and improving document navigation for the visually disabled (choi, 2000). this paper focuses on domain independent methods for segmenting written text. we present a new algorithm that builds on previous work by reynar (reynar, 1998; reynar, 1994). the primary distinction of our method is the use of a ranking scheme and the cosine similarity measure (van rijsbergen, 1979) in formulating the similarity matrix. we propose that the similarity values of short text segments is statistically insignificant. thus, one can only rely on their order, or rank, for clustering.a segmentation algorithm has two key elements, a, clustering strategy and a similarity measure. even moderately long documents typically address several topics or different aspects of the same topic. we would also like to develop a linear time and multi-source version of the algorithm. thus, one can only rely on their order, or rank, for clustering. the significance of our results has been confirmed by both t-test and ks-test. given the quality of an algorithm is task dependent, the following experiments focus on the relative performance. c99, k98 and r98 are all polynomial time algorithms. it would be interesting to compare c99 with the multi-source method described in (beeferman et al., 1999) using the tdt corpus. existing work falls into one of two categories, lexical cohesion methods and multi-source methods (yaari, 1997). our results show divisive clustering (r98) is more precise than sliding window (h94) and lexical chains (k98) for locating topic boundaries. the definition of a topic segment ranges from complete stories (allan et al., 1998) to summaries (ponte and croft, 1997). if one disregards segmentation accuracy, h94 has the best algorithmic performance (linear). the focus is on the segmentation of transcribed spoken text and broadcast news stories where the presentation format and regular cues can be exploited to improve accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2009.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2009.txt new file mode 100644 index 0000000000000000000000000000000000000000..65393172772a0cee619f56b94b107a82b3502220 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2009.txt @@ -0,0 +1 @@ +word sense disambiguation is often cast as a problem in supervised learning, where a disambiguator is induced from a corpus of manually sense—tagged text using methods from statistics or machine learning. these approaches typically represent the context in which each sense—tagged instance of a word occurs with a set of linguistically motivated features. a learning algorithm induces a representative model from these features which is employed as a classifier to perform disambiguation. this paper presents a corpus—based approach that results in high accuracy by combining a number of very simple classifiers into an ensemble that performs disambiguation via a majority vote. this is motivated by the observation that enhancing the feature set or learning algorithm used in a corpus—based approach does not usually improve disambiguation accuracy beyond what can be attained with shallow lexical features and a simple supervised learning algorithm. for example, a naive bayesian classifier (duda and hart, 1973) is based on a blanket assumption about the interactions among features in a sensetagged corpus and does not learn a representative model. despite making such an assumption, this proves to be among the most accurate techniques in comparative studies of corpus—based word sense disambiguation methodologies (e.g., (leacock et al., 1993), (mooney, 1996), (ng and lee, 1996), (pedersen and bruce, 1997)). these studies represent the context in which an ambiguous word occurs with a wide variety of features. however, when the contribution of each type of feature to overall accuracy is analyzed (eg. (ng and lee, 1996)), shallow lexical features such as co—occurrences and collocations prove to be stronger contributors to accuracy than do deeper, linguistically motivated features such as part—of—speech and verb—object relationships. it has also been shown that the combined accuracy of an ensemble of multiple classifiers is often significantly greater than that of any of the individual classifiers that make up the ensemble (e.g., (dietterich, 1997)). in natural language processing, ensemble techniques have been successfully applied to part— of—speech tagging (e.g., (brill and wu, 1998)) and parsing (e.g., (henderson and brill, 1999)). when combined with a history of disambiguation success using shallow lexical features and naive bayesian classifiers, these findings suggest that word sense disambiguation might best be improved by combining the output of a number of such classifiers into an ensemble. this paper begins with an introduction to the naive bayesian classifier. the features used to represent the context in which ambiguous words occur are presented, followed by the method for selecting the classifiers to include in the ensemble. then, the line and interesi data is described. experimental results disambiguating these words with an ensemble of naive bayesian classifiers are shown to rival previously published results. this paper closes with a discussion of the choices made in formulating this methodology and plans for future work.word sense disambiguation is often cast as a problem in supervised learning, where a disambiguator is induced from a corpus of manually sense—tagged text using methods from statistics or machine learning. this paper closes with a discussion of the choices made in formulating this methodology and plans for future work. a preliminary version of this paper appears in (pedersen, 2000). experimental results disambiguating these words with an ensemble of naive bayesian classifiers are shown to rival previously published results. these approaches typically represent the context in which each sense—tagged instance of a word occurs with a set of linguistically motivated features. a naive bayesian classifier assumes that all the feature variables representing a problem are conditionally independent given the value of a classification variable. each of the nine member classifiers votes for the most probable sense given the particular context represented by that classifier; the ensemble disambiguates by assigning the sense that receives a majority of the votes. this work extends ideas that began in collaboration with rebecca bruce and janyce wiebe. this paper shows that word sense disambiguation accuracy can be improved by combining a number of simple classifiers into an ensemble. this approach was evaluated using the widely studied nouns line and interest, which are disambiguated with accuracy of 88% and 89%, which rivals the best previously published results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2018.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2018.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab6c720fbdfde4cb2a85f996fe49c4f7a13928c6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2018.txt @@ -0,0 +1 @@ +we present a new parser for parsing down to penn tree-bank style parse trees [16] that achieves 90.1% average precision/recall for sentences of length < 40, and 89.5% for sentences of length < 100, when trained and tested on the previously established [5,9,10,15,17] "standard" sections of the wall street journal tree-bank. this represents a 13% decrease in error rate over the best single-parser results on this corpus [9]. following [5,10], our parser is based upon a probabilistic generative model. that is, for all sentences s and all parses 7r, the parser assigns a probability p(s , 7r) = p(r), the equality holding when we restrict consideration to 7r whose yield * this research was supported in part by nsf grant lis sbr 9720368. the author would like to thank mark johnson and all the rest of the brown laboratory for linguistic information processing. is s. then for any s the parser returns the parse ir that maximizes this probability. that is, the parser implements the function arg maxrp(7r s) = arg maxirp(7r, s) = arg maxrp(w). what fundamentally distinguishes probabilistic generative parsers is how they compute p(r), and it is to that topic we turn next.what fundamentally distinguishes probabilistic generative parsers is how they compute p(r), and it is to that topic we turn next. it is to this project that our future parsing work will be devoted. we have presented a lexicalized markov grammar parsing model that achieves (using the now standard training/testing/development sections of the penn treebank) an average precision/recall of 91.1% on sentences of length < 40 and 89.5% on sentences of length < 100. indeed, we initiated this line of work in an attempt to create a parser that would be flexible enough to allow modifications for parsing down to more semantic levels of detail. this corresponds to an error reduction of 13% over the best previously published single parser results on this test set, those of collins [9]. we present a new parser for parsing down to penn tree-bank style parse trees [16] that achieves 90.1% average precision/recall for sentences of length < 40, and 89.5% for sentences of length < 100, when trained and tested on the previously established [5,9,10,15,17] "standard" sections of the wall street journal tree-bank. in the previous sections we have concentrated on the relation of the parser to a maximumentropy approach, the aspect of the parser that is most novel. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2019.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2019.txt new file mode 100644 index 0000000000000000000000000000000000000000..cba5c843f3fb09fa47795bcb821abde861eedd4d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2019.txt @@ -0,0 +1 @@ +a good indicator of whether a person knows the meaning of a word is the ability to use it appropriately in a sentence (miller and gildea, 1987). much information about usage can be obtained from quite a limited context: choueka and lusignan (1985) found that people can typically recognize the intended sense of a polysemous word by looking at a narrow window of one or two words around it. statistically-based computer programs have been able to do the same with a high level of accuracy (kilgarriff and palmer, 2000). the goal of our work is to automatically identify inappropriate usage of specific vocabulary words in essays by looking at the local contextual cues around a target word. we have developed a statistical system, alek (assessing lexical knowledge), that uses statistical analysis for this purpose. a major objective of this research is to avoid the laborious and costly process of collecting errors (or negative evidence) for each word that we wish to evaluate. instead, we train alek on a general corpus of english and on edited text containing example uses of the target word. the system identifies inappropriate usage based on differences between the word's local context cues in an essay and the models of context it has derived from the corpora of well-formed sentences. a requirement for alek has been that all steps in the process be automated, beyond choosing the words to be tested and assessing the results. once a target word is chosen, preprocessing, building a model of the word's appropriate usage, and identifying usage errors in essays is performed without manual intervention. alek has been developed using the test of english as a foreign language (toefl) administered by the educational testing service. toefl is taken by foreign students who are applying to us undergraduate and graduate-level programs.a good indicator of whether a person knows the meaning of a word is the ability to use it appropriately in a sentence (miller and gildea, 1987). toefl is taken by foreign students who are applying to us undergraduate and graduate-level programs. the problem of error detection does not entail finding similarities to appropriate usage, rather it requires identifying one element among the contextual cues that simply does not fit. approaches to detecting errors by non-native writers typically produce grammars that look for specific expected error types (schneider and mccoy, 1998; park, palmer and washburn, 1997). the unsupervised techniques that we have presented for inferring negative evidence are effective in recognizing grammatical errors in written text. however, its techniques could be incorporated into a grammar checker for native speakers. alek has been developed using the test of english as a foreign language (toefl) administered by the educational testing service. much information about usage can be obtained from quite a limited context: choueka and lusignan (1985) found that people can typically recognize the intended sense of a polysemous word by looking at a narrow window of one or two words around it. under this approach, essays written by esl students are collected and examined for errors. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2024.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2024.txt new file mode 100644 index 0000000000000000000000000000000000000000..30ca172e52f8b224e9a6a43dd3194b451ec0bacf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2024.txt @@ -0,0 +1 @@ +there is a big gap between the summaries produced by current automatic summarizers and the abstracts written by human professionals. certainly one factor contributing to this gap is that automatic systems can not always correctly identify the important topics of an article. another factor, however, which has received little attention, is that automatic summarizers have poor text generation techniques. most automatic summarizers rely on extracting key sentences or paragraphs from an article to produce a summary. since the extracted sentences are disconnected in the original article, when they are strung together, the resulting summary can be inconcise, incoherent, and sometimes even misleading. we present a cut and paste based text summarization technique, aimed at reducing the gap between automatically generated summaries and human-written abstracts. rather than focusing on how to identify key sentences, as do other researchers, we study how to generate the text of a summary once key sentences have been extracted. the main idea of cut and paste summarization is to reuse the text in an article to generate the summary. however, instead of simply extracting sentences as current summarizers do, the cut and paste system will "smooth" the extracted sentences by editing them. such edits mainly involve cutting phrases and pasting them together in novel ways. the key features of this work are:there is a big gap between the summaries produced by current automatic summarizers and the abstracts written by human professionals. the key features of this work are: finally, we conclude and discuss future work. this paper presents a novel architecture for text summarization using cut and paste techniques observed in human-written abstracts. we thank ibm for licensing us the esg parser and the mitre corporation for licensing us the coreference resolution system. we will also extend the system to query-based summarization and investigate whether the system can be modified for multiple document summarization. however, the combination operations and combination rules that we derived from corpus analysis are significantly different from those used in the above system, which mostly came from operations in traditional natural language generation. any opinions, findings, and conclusions or recommendations expressed in this material are those of the authors and do not necessarily reflect the views of the national science foundation. we identified six operations that can be used alone or together to transform extracted sentences into sentences in human-written abstracts. ing operations. we defined six operations that can be used alone, sequentially, or simultaneously to transform selected sentences from an article into the corresponding summary sentences in its human-written abstract: \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2026.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2026.txt new file mode 100644 index 0000000000000000000000000000000000000000..fa20818566181badae0b4d5291f73a91fd14bac2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2026.txt @@ -0,0 +1 @@ +this paper presents three trainable systems for surface natural language generation (nlg). surface nlg, for our purposes, consists of generating a grammatical natural language phrase that expresses the meaning of an input semantic representation. the systems take a "corpus-based" or "machinelearning" approach to surface nlg, and learn to generate phrases from semantic input by statistically analyzing examples of phrases and their corresponding semantic representations. the determination of the content in the semantic representation, or "deep" generation, is not discussed here. instead, the systems assume that the input semantic representation is fixed and only deal with how to express it in natural language. this paper discusses previous approaches to surface nlg, and introduces three trainable systems for surface nlg, called nlg1, nlg2, and nlg3. quantitative evaluation of experiments in the air travel domain will also be discussed.this paper presents three trainable systems for surface natural language generation (nlg). this paper presents the first systems (known to the author) that use a statistical learning approach to produce natural language text directly from a semantic representation. we conjecture that nlg2 and nlg3 should work in other domains which have a complexity similar to air travel, as well as available annotated data. quantitative evaluation of experiments in the air travel domain will also be discussed. the nlg2 and nlg3 systems automatically attempt to generalize from the knowledge inherent in the training corpus of templates, so that they can generate templates for novel attribute sets. in contrast, (langkilde and knight, 1998) uses corpus-derived statistical knowledge to rank plausible hypotheses from a grammarbased surface generation component. templates are the easiest way to implement surface nlg. this limitation can be overcome by using features on values, so that nlg2 and nlg3 might discover — to use a hypothetical example — that "flights leaving $city-fr" is preferred over "flights from $city-fr" when $city-fr is a particular value, such as "miami". our current approach has the limitation that it ignores the values of attributes, even though they might strongly influence the word order and word choice. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2030.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2030.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e81b90211432c32af0820f86099b8c4806739ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2030.txt @@ -0,0 +1 @@ +since 1995, a few statistical parsing algorithms (magerman, 1995; collins, 1996 and 1997; charniak, 1997; rathnaparki, 1997) demonstrated a breakthrough in parsing accuracy, as measured against the university of pennsylvania treebank as a gold standard. yet, relatively few have embedded one of these algorithms in a task. chiba, (1999) was able to use such a parsing algorithm to reduce perplexity with the long term goal of improved speech recognition. in this paper, we report adapting a lexicalized, probabilistic context-free parser with head rules (lpcfg-hr) to information extraction. the technique was benchmarked in the seventh message understanding conference (muc-7) in 1998. several technical challenges confronted us and were solved: treebank on wall street journal adequately train the algorithm for new york times newswire, which includes dozens of newspapers? manually creating sourcespecific training data for syntax was not required. instead, our parsing algorithm, trained on the upenn treebank, was run on the new york times source to create unsupervised syntactic training which was constrained to be consistent with semantic annotation.this simple semantic annotation was the only source of task knowledge used to configure the model. instead, our parsing algorithm, trained on the upenn treebank, was run on the new york times source to create unsupervised syntactic training which was constrained to be consistent with semantic annotation. we have demonstrated, at least for one problem, that a lexicalized, probabilistic context-free parser with head rules (lpcfghr) can be used effectively for information extraction. our system for muc-7 consisted of the sentential model described in this paper, coupled with a simple probability model for cross-sentence merging. while performance did not quite match the best previously reported results for any of these three tasks, we were pleased to observe that the scores were at or near state-of-the-art levels for all cases. since 1995, a few statistical parsing algorithms (magerman, 1995; collins, 1996 and 1997; charniak, 1997; rathnaparki, 1997) demonstrated a breakthrough in parsing accuracy, as measured against the university of pennsylvania treebank as a gold standard. for the following example, the template relation in figure 2 was to be generated: "donald m. goldstein, a historian at the university of pittsburgh who helped write..." the semantics — that is, the entities and relations — can then be directly extracted from these sentential trees. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2031.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2031.txt new file mode 100644 index 0000000000000000000000000000000000000000..c7342c9f8f0cb5fd5c8c05a6928a92acaaede739 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2031.txt @@ -0,0 +1 @@ +parsing sentences using statistical information gathered from a treebank was first examined a decade ago in (chitrad and grishman, 1990) and is by now a fairly well-studied problem ((charniak, 1997), (collins, 1997), (ratnaparkhi, 1997)). but to date, the end product of the parsing process has for the most part been a bracketing with simple constituent labels like np, vp, or sbar. the penn treebank contains a great deal of additional syntactic and semantic information from which to gather statistics; reproducing more of this information automatically is a goal which has so far been mostly ignored. this paper details a process by which some of this information—the function tags— may be recovered automatically. in the penn treebank, there are 20 tags (figure 1) that can be appended to constituent labels in order to indicate additional information about the syntactic or semantic role of the constituent. we have divided them into four categories (given in figure 2) based on those in the bracketing guidelines (bies et al., 1995). a constituent can be tagged with multiple tags, but never with two tags from the same category.1 in actuality, the case where a constituent has tags from all four categories never happens, but constituents with three tags do occur (rarely). at a high level, we can simply say that having the function tag information for a given text is useful just because any further information would help. but specifically, there are distinct advantages for each of the various categories. grammatical tags are useful for any application trying to follow the thread of the text—they find the 'who does what' of each clause, which can be useful to gain information about the situation or to learn more about the behaviour of the words in the sentence. the form/function tags help to find those constituents behaving in ways not conforming to their labelled type, as well as further clarifying the behaviour of adverbial phrases. information retrieval applications specialising in describing events, as with a number of the muc applications, could greatly benefit from some of these in determining the where-when-why of things. noting a topicalised constituent could also prove useful to these applications, and it might also help in discourse analysis, or pronoun resolution. finally, the 'miscellaneous' tags are convenient at various times; particularly the clr 'closely related' tag, which among other things marks phrasal verbs and prepositional ditransitives. to our knowledge, there has been no attempt so far to recover the function tags in parsing treebank text. in fact, we know of only one project that used them at all: (collins, 1997) defines certain constituents as complements based on a combination of label and function tag information. this boolean condition is then used to train an improved parser.this work presents a method for assigning function tags to text that has been parsed to the simple label level. this boolean condition is then used to train an improved parser. in fact, we know of only one project that used them at all: (collins, 1997) defines certain constituents as complements based on a combination of label and function tag information. but to date, the end product of the parsing process has for the most part been a bracketing with simple constituent labels like np, vp, or sbar. • there is no reason to think that this work could not be integrated directly into the parsing process, particularly if one's parser is already geared partially or entirely towards feature-based statistics; the function tag information could prove quite useful within the parse itself, to rank several parses to find the most plausible. it is as yet unclear just to what degree these tagging errors in the corpus are affecting our results. we have found it useful to define our statistical model in terms of features. there are, it seems, two reasonable baselines for this and future work. this data is very important in distinguishing, for example, 'by john' (where john might be a logical subject) from 'by next year' (a temporal modifier) and 'by selling it' (an adverbial indicating manner). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2034.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2034.txt new file mode 100644 index 0000000000000000000000000000000000000000..9dcd812aae24332fc6dc0df3b656ae8f31d51a65 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A00-2034.txt @@ -0,0 +1 @@ +diathesis alternations are alternate ways in which the arguments of a verb are expressed syntactically. the syntactic changes are sometimes accompanied by slight changes in the meaning of the verb. an example of the causative alternation is given in (1) below. in this alternation, the object of the transitive variant can also appear as the subject of the intransitive variant. in the conative alternation, the transitive form alternates with a prepositional phrase construction involving either at or on. an example of the conative alternation is given in (2). we refer to alternations where a particular semantic role appears in different grammatical roles in alternate realisations as "role switching alternations" (rsas). it is these alternations that our method applies to. recently, there has been interest in corpus-based methods to identify alternations (mccarthy and korhonen, 1998; lapata, 1999), and associated verb classifications (stevenson and merlo, 1999). these have either relied on a priori knowledge specified for the alternations in advance, or are not suitable for a wide range of alternations. the fully automatic method outlined here is applied to the causative and conative alternations, but is applicable to other rsas.the fully automatic method outlined here is applied to the causative and conative alternations, but is applicable to other rsas. diathesis alternations are alternate ways in which the arguments of a verb are expressed syntactically. however, a considerably larger corpus would be required to overcome the sparse data problem for other rsa alternations. we have discovered a significant relationship between the similarity of selectional preferences at the target slots, and participation in the causative and conative alternations. diathesis alternations have been proposed for a number of nlp tasks. we propose a method to acquire knowledge of alternation participation directly from corpora, with frequency information available as a by-product. notably, only one negative decision was made because of the disparate frame frequencies, which reduces the cost of combining the argument head data. the syntactic changes are sometimes accompanied by slight changes in the meaning of the verb. these have either relied on a priori knowledge specified for the alternations in advance, or are not suitable for a wide range of alternations. for the conative, a sample of 16 verbs was used and this time accuracy was only 56%. earlier work by resnik (1993) demonstrated a link between selectional preference strength and participation in alternations where the direct object is omitted. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A88-1019.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A88-1019.txt new file mode 100644 index 0000000000000000000000000000000000000000..d41bbbc2fbdbcf025277c1e65bfa1189b736a455 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A88-1019.txt @@ -0,0 +1 @@ +it is well-known that part of speech depends on context. the word "table," for example, can be a verb in some contexts (e.g., "he will table the motion") and a noun in others (e.g., "the table is ready"). a program has been written which tags each word in an input sentence with the most likely part of speech. the program produces the following output for the two "table" sentences just mentioned: (pps = subject pronoun; md = modal; vb = verb (no inflection); at = article; nn = noun; bez = present 3rd sg form of "to be"; jj = adjective; notation is borrowed from [francis and kucera, pp. 6-8]) part of speech tagging is an important practical problem with potential applications in many areas including speech synthesis, speech recognition, spelling correction, proof-reading, query answering, machine translation and searching large text data bases (e.g., patents, newspapers). the author is particularly interested in speech synthesis applications, where it is clear that pronunciation sometimes depends on part of speech. consider the following three examples where pronunciation depends on part of speech. first, there are words like "wind" where the noun has a different vowel than the verb. that is, the noun "wind" has a short vowel as in "the wind is strong," whereas the verb "wind" has a long vowel as in "don't forget to wind your watch." secondly, the pronoun "that" is stressed as in "did you see that?" unlike the complementizer "that," as in "it is a shame that he's leaving." thirdly, note the difference between "oily fluid" and "transmission fluid"; as a general rule, an adjective-noun sequence such as "oily fluid" is typically stressed on the right whereas a noun-noun sequence such as "transmission fluid" is typically stressed on the left. these are but three of the many constructions which would sound more natural if the synthesizer had access to accurate part of speech information. perhaps the most important application of tagging programs is as a tool for future research. a number of large projects such as [cobuild] have recently been collecting large corpora (101000 million words) in order to better describe how language is actually used in practice: "for the first time, a dictionary has been compiled by the thorough examination of representative group of english texts, spoken and written, running to many millions of words. this means that in addition to all the tools of the conventional dictionary makers... the dictionary is based on hard, measureable evidence." [cobuild, p. xv] it is likely that there will be more and more research projects collecting larger and larger corpora. a reliable parts program might greatly enhance the value of these corpora to many of these researchers. the program uses a linear time dynamic programming algorithm to find an assignment of parts of speech to words that optimizes the product of (a) lexical probabilities (probability of observing part of speech i given word j), and (b) contextual probabilities (probability of observing part of speech i given k previous parts of speech). probability estimates were obtained by training on the tagged brown corpus [francis and kucera], a corpus of approximately 1,000,000 words with part of speech tags assigned laboriously by hand over many years. program performance is encouraging (95-99% "correct", depending on the definition of "correct"). a small 400 word sample is presented in the appendix, and is judged to be 99.5% correct. it is surprising that a local "bottom-up" approach can perform so well. most errors are attributable to defects in the lexicon; remarkably few errors are related to the inadequacies of the extremely over-simplified grammar (a trigram model). apparently, "long distance" dependences are not very important, at least most of the time. one might have thought that ngram models weren't adequate for the task since it is wellknown that they are inadequate for determining grammaticality: "we find that no finite-state markov process that produces symbols with transition from state to state can serve as an english grammar. furthermore, the particular subclass of such processes that produce norder statistical approximations to english do not come closer, with increasing n, to matching the output of an english grammar." [chomsky, p. 113] chomslcy's conclusion was based on the observation that constructions such as: have long distance dependencies that span across any fixed length window n. thus, ngram models are clearly inadequate for many natural language applications. however, for the tagging application, the ngram approximation may be acceptable since long distance dependencies do not seem to be very important. statistical ngram models were quite popular in the 1950s, and have been regaining popularity over the past few years. the ibm speech group is perhaps the strongest advocate of ngram methods, especially in other applications such as speech recognition. robert mercer (private communication, 1982) has experimented with the tagging application, using a restricted corpus (laser patents) and small vocabulary (1000 words). another group of researchers working in lancaster around the same time, leech, garside and atwell, also found ngram models highly effective; they report 96.7% success in automatically tagging the lob corpus, using a bigram model modified with heuristics to cope with more important trigrams. the present work developed independently from the lob project. many people who have not worked in computational linguistics have a strong intuition that lexical ambiguity is usually not much of a problem. it is commonly believed that most words have just one part of speech, and that the few exceptions such as "table" are easily disambiguated by context in most cases. in contrast, most experts in computational linguists have found lexical ambiguity to be a major issue; it is said that practically any content word can be used as a noun, verb or adjective,i and that local context is not always adequate to disambiguate. introductory texts are full of ambiguous sentences such as where no amount of syntactic parsing will help. these examples are generally taken to indicate that the parser must allow for multiple possibilities and that grammar formalisms such as lr(k) are inadequate for natural language since these formalisms cannot cope with ambiguity. this argument was behind a large set of objections to marcus' "lr(k)-like" deterministic parser. although it is clear that an expert in computational linguistics can dream up arbitrarily hard sentences, it may be, as marcus suggested, that most texts are not very hard in practice. recall that marcus hypothesized most decisions can be resolved by the parser within a small window (i.e., three buffer cells), and there are only a few problematic cases where the parser becomes confused. he called these confusing cases "garden paths," by analogy with the famous example: • the horse raced past the barn fell. with just a few exceptions such as these "garden paths," marcus assumes, there is almost always a unique "best" interpretation which can be found with very limited resources. the proposed stochastic approach is largely compatible with this; the proposed approach 1. from an information theory point of view, one can quantity ambiguity in bits. in the case of the brown tagged corpus, the lexical entropy, the conditional entropy of the part of speech given the word is about 0.25 bits per part of speech. this is considerably smaller than the contextual entropy, the conditional entropy of the part of speech given the next two parts of speech. this entropy is estimated to be about 2 bits per part of speech. assumes that it is almost always sufficient to assign each word a unique "best" part of speech (and this can be accomplished with a very efficient linear time dynamic programming algorithm). after reading introductory discussions of "flying planes can be dangerous," one might have expected that lexical ambiguity was so pervasive that it would be hopeless to try to assign just one part of speech to each word and in just one linear time pass over the input words.find all assignments of parts of speech to "a" and score. the proposed method omitted only 5 of 243 noun phrase brackets in the appendix. it is well-known that part of speech depends on context. there is some tendency to underestimate the number of brackets and run two noun phrases together as in [np the time fairchild]. this is considerably smaller than the contextual entropy, the conditional entropy of the part of speech given the next two parts of speech. this entropy is estimated to be about 2 bits per part of speech. assumes that it is almost always sufficient to assign each word a unique "best" part of speech (and this can be accomplished with a very efficient linear time dynamic programming algorithm). a program has been written which tags each word in an input sentence with the most likely part of speech. in the case of the brown tagged corpus, the lexical entropy, the conditional entropy of the part of speech given the word is about 0.25 bits per part of speech. the method works remarkably well considering how simple it is. after reading introductory discussions of "flying planes can be dangerous," one might have expected that lexical ambiguity was so pervasive that it would be hopeless to try to assign just one part of speech to each word and in just one linear time pass over the input words. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1006.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1006.txt new file mode 100644 index 0000000000000000000000000000000000000000..b8e99c03292671083a32f024b0d5342a7dd4f991 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1006.txt @@ -0,0 +1 @@ +this paper presents the joyce system as an example of a fully-implemented, application-oriented text generation system. joyce covers the whole range of tasks associated with text generation, from content selection to morphological processing. it was developped as part of the interface of the software design environment ulysses. the following design goals were set for it: while we were able to exploit existing research for many of the design issues, it turned out that we needed to develop our own approach to text planning (ra.mbow 1990). this paper will present the system and attempt to show how these design objectives led to particular design decisions. the structure of the paper is as follows. in section 2, we will present the underlying application and give examples of the output of the system. in section 3, we will discuss the overall structure of joyce. we then discuss the three main components in turn: the text planner in section 4, the sentence planner in section 5 and the realizer in section 6. we will discuss the text planner in some detail since it represents a new approach to the problem. section 7 traces the generation of a short text. in section 8, we address the problem of portability, and wind up by discussing some shortcomings of joyce in the conclusion.this paper presents the joyce system as an example of a fully-implemented, application-oriented text generation system. in section 8, we address the problem of portability, and wind up by discussing some shortcomings of joyce in the conclusion. we are aware of several shortcomings of joyce, which we will address in future versions of the system. ple in text planning, it appears to play an important role as a constraint on possible text structures. ii has met the design objectives of speed and quality, and our experience in porting the text generator to new task: and to new applications indicates that joyce is a flexibl( system that can adapt to a variety of text generatior tasks. it passes it through the incrementor to the formater, which downgrades it when a classified corrected reading leaves through p34. initial results, including a prototype, are encouraging. furthermore, it helps determine the use of connectives between rhetorically related clauses. despite these shortcomings, joyce has proven to be a successful and useful tool in the ulysses user interface. the joyce text generation system was developped part of the software design environment ulysses (korelsky and ulysses staff 1988; rosenthal et al 1988) ulysses includes a graphical environment for the design of secure, distributed software systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1018.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1018.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c64526b21c486fddfecf2039e6a28fda749ad46 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1018.txt @@ -0,0 +1 @@ +many words are ambiguous in their part of speech. for example, "tag" can be a noun or a verb. however, when a word appears in the context of other words, the ambiguity is often reduced: in "a tag is a part-of-speech label," the word "tag" can only be a noun. a part-of-speech tagger is a system that uses context to assign parts of speech to words. automatic text tagging is an important first step in discovering the linguistic structure of large text corpora. part-of-speech information facilitates higher-level analysis, such as recognizing noun phrases and other patterns in text. for a tagger to function as a practical component in a language processing system, we believe that a tagger must be: robust text corpora contain ungrammatical constructions, isolated phrases (such as titles), and nonlinguistic data (such as tables). corpora are also likely to contain words that are unknown to the tagger. it is desirable that a tagger deal gracefully with these situations. efficient if a tagger is to be used to analyze arbitrarily large corpora, it must be efficient—performing in time linear in the number of words tagged. any training required should also be fast, enabling rapid turnaround with new corpora and new text genres. accurate a tagger should attempt to assign the correct part-of-speech tag to every word encountered. tunable a tagger should be able to take advantage of linguistic insights. one should be able to correct systematic errors by supplying appropriate a priori "hints." it should be possible to give different hints for different corpora. reusable the effort required to retarget a tagger to new corpora, new tagsets, and new languages should be minimal.reusable the effort required to retarget a tagger to new corpora, new tagsets, and new languages should be minimal. many words are ambiguous in their part of speech. for example, "tag" can be a noun or a verb. the algorithm has an accuracy of approximately 80% in assigning grammatical functions. several different approaches have been used for building text taggers. by using the fact that words are typically associated with only a few part-ofspeech categories, and carefully ordering the computation, the algorithms have linear complexity (section 3.3). one should be able to correct systematic errors by supplying appropriate a priori "hints." it should be possible to give different hints for different corpora. we have used the tagger in a number of applications. if a noun phrase is labeled, it is also annotated as to whether the governing verb is the closest verb group to the right or to the left. we describe three applications here: phrase recognition; word sense disambiguation; and grammatical function assignment. probabilities corresponding to category sequences that never occurred in the training data are assigned small, non-zero values, ensuring that the model will accept any sequence of tokens, while still providing the most likely tagging. vocabulary independence is achieved by predicting categories for words not in the lexicon, using both context and suffix information. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1021.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1021.txt new file mode 100644 index 0000000000000000000000000000000000000000..39b633d486c27f24aceec5314d86f9a0f3ccbc1b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A92-1021.txt @@ -0,0 +1 @@ +there has been a dramatic increase in the application of probabilistic models to natural language processing over the last few years. the appeal of stochastic techniques over traditional rule-based techniques comes from the ease with which the necessary statistics can be automatically acquired and the fact that very little handcrafted knowledge need be built into the system. in contrast, the rules in rule-based systems are usually difficult to construct and are typically not very robust. one area in which the statistical approach has done particularly well is automatic part of speech tagging, assigning each word in an input sentence its proper part of speech [church 88; cutting et al. 92; derose 88; deroualt and merialdo 86; garside et al. 87; jelinek 85; kupiec 89; meteer et al. 911. stochastic taggers have obtained a high degree of accuracy without performing any syntactic analysis on the input. these stochastic part of speech taggers make use of a markov model which captures lexical and contextual information. the parameters of the model can be estimated from tagged ([church 88; derose 88; deroualt and merialdo 86; garside et al. 87; meteer et al. 91]) or untag,ged ([cutting et al. 92; jelinek 85; kupiec 89]) text. once the parameters of the model are estimated, a sentence can then be automatically tagged by assigning it the tag sequence which is assigned the highest probability by the model. performance is often enhanced with the aid of various higher level pre- and postprocessing procedures or by manually tuning the model. a number of rule-based taggers have been built [klein and simmons 63; green and rubin 71; hindle 89]. [klein and simmons 63] and [green and rubin 71] both have error rates substantially higher than state of the art stochastic taggers. [hindle 89] disambiguates words within a deterministic parser. we wanted to determine whether a simple rule-based tagger without any knowledge of syntax can perform as well as a stochastic tagger, or if part of speech tagging really is a domain to which stochastic techniques are better suited. in this paper we describe a rule-based tagger which performs as well as taggers based upon probabilistic models. the rule-based tagger overcomes the limitations common in rule-based approaches to language processing: it is robust, and the rules are automatically acquired. in addition, the tagger has many advantages over stochastic taggers, including: a vast reduction in stored information required, the perspicuity of a small set of meaningful rules as opposed to the large tables of statistics needed for stochastic taggers, ease of finding and implementing improvements to the tagger, and better portability from one tag set or corpus genre to another.we have presented a simple part of speech tagger which performs as well as existing stochastic taggers, but has significant advantages over these taggers. there has been a dramatic increase in the application of probabilistic models to natural language processing over the last few years. the fact that the simple rule-based tagger can perform so well should offer encouragement for researchers to further explore rule-based tagging, searching for a better and more expressive set of patch templates and other variations on this simple but effective theme. the rule-based tagger overcomes the limitations common in rule-based approaches to language processing: it is robust, and the rules are automatically acquired. the tagger is extremely portable. the appeal of stochastic techniques over traditional rule-based techniques comes from the ease with which the necessary statistics can be automatically acquired and the fact that very little handcrafted knowledge need be built into the system. perhaps the biggest contribution of this work is in demonstrating that the stochastic method is not the only viable approach for part of speech tagging. in this paper we describe a rule-based tagger which performs as well as taggers based upon probabilistic models. this makes it easy to experiment with extensions to the tagger. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1006.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1006.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a754d03bf616828f8d88c10f53cf7d5c1846742 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1006.txt @@ -0,0 +1 @@ +the statistical corpus-based renaissance in computational linguistics has produced a number of interesting technologies, including part-of-speech tagging and bilingual word alignment. unfortunately, these technologies are still not as widely deployed in practical applications as they might be. part-ofspeech taggers are used in a few applications, such as speech synthesis (sproat et al., 1992) and question answering (kupiec, 1993b). word alignment is newer, found only in a few places (gale and church, 1991a; brown et al., 1993; dagan et al., 1993). it is used at ibm for estimating parameters of their statistical machine translation prototype (brown et al., 1993). we suggest that part of speech tagging and word alignment could have an important role in glossary construction for translation. glossaries are extremely important for translation. how would microsoft, or some other software vendor, want the term "character menu" to be translated in their manuals? technical terms are difficult for translators because they are generally not as familiar with the subject domain as either the author of the source text or the reader of the target text. in many cases, there may be a number of acceptable translations, but it is important for the sake of consistency to standardize on a single one. it would be unacceptable for a manual to use a variety of synonyms for a particular menu or button. customarily, translation houses make extensive job-specific glossaries to ensure consistency and correctness of technical terminology for large jobs. a glossary is a list of terms and their translations.' we will subdivide the task of constructing a glossary into two subtasks: (1) generating a list of terms, and (2) finding the translation equivalents. the first task will be referred to as the monolingual task and the second as the bilingual task. how should a glossary be constructed? translation schools teach their students to read as much background material as possible in both the source and target languages, an extremely time-consuming process, as the introduction to hann's (1992, p. 8) text on technical translation indicates: contrary to popular opinion, the job of a technical translator has little in common with other linguistic professions, such as literature translation, foreign correspondence or interpreting. apart from an expert knowledge of both languages..., all that is required for the latter professions is a few general dictionaries, whereas a technical translator needs a whole library of specialized dictionaries, encyclopedias and 'the source and target fields are standard, though many other fields can also be found, e.g., usage notes, part of speech constraints, comments, etc. technical literature in both languages; he is more concerned with the exact meanings of terms than with stylistic considerations and his profession requires certain 'detective' skills as well as linguistic and literary ones. beginners in this profession have an especially hard time... this book attempts to meet this requirement. unfortunately, the academic prescriptions are often too expensive for commercial practice. translators need just-in-time glossaries. they cannot afford to do a lot of background reading and "detective" work when they are being paid by the word. they need something more practical. we propose a tool, termight, that automates some of the more tedious and laborious aspects of terminology research. the tool relies on part-of-speech tagging and word-alignment technologies to extract candidate terms and translations. it then sorts the extracted candidates and presents them to the user along with reference concordance lines, supporting efficient construction of glossaries. the tool is currently being used by the translators at at&t business translation services (formerly at&t language line services). termight may prove useful in contexts other than human-based translation. primarily, it can support customization of machine translation (mt) lexicons to a new domain. in fact, the arguments for constructing a job-specific glossary for human-based translation may hold equally well for an mt-based process, emphasizing the need for a productivity tool. the monolingual component of termight can be used to construct terminology lists in other applications, such as technical writing, book indexing, hypertext linking, natural language interfaces, text categorization and indexing in digital libraries and information retrieval (salton, 1988; cherry, 1990; harding, 1982; bourigault, 1992; damerau, 1993), while the bilingual component can be useful for information retrieval in multilingual text collections (landauer and littman, 1990).we have shown that terminology research provides a good application for robust natural language technology, in particular for part-of-speech tagging and word-alignment algorithms. the statistical corpus-based renaissance in computational linguistics has produced a number of interesting technologies, including part-of-speech tagging and bilingual word alignment. in particular, we have found the following to be very effective: as the need for efficient knowledge acquisition tools becomes widely recognized, we hope that this experience with termight will be found useful for other text-related systems as well. in fact, the arguments for constructing a job-specific glossary for human-based translation may hold equally well for an mt-based process, emphasizing the need for a productivity tool. unfortunately, these technologies are still not as widely deployed in practical applications as they might be. primarily, it can support customization of machine translation (mt) lexicons to a new domain. part-ofspeech taggers are used in a few applications, such as speech synthesis (sproat et al., 1992) and question answering (kupiec, 1993b). termight may prove useful in contexts other than human-based translation. word alignment is newer, found only in a few places (gale and church, 1991a; brown et al., 1993; dagan et al., 1993). the monolingual component of termight can be used to construct terminology lists in other applications, such as technical writing, book indexing, hypertext linking, natural language interfaces, text categorization and indexing in digital libraries and information retrieval (salton, 1988; cherry, 1990; harding, 1982; bourigault, 1992; damerau, 1993), while the bilingual component can be useful for information retrieval in multilingual text collections (landauer and littman, 1990). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1009.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1009.txt new file mode 100644 index 0000000000000000000000000000000000000000..d7655f9226e1000683da5a79b24af5014f825267 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1009.txt @@ -0,0 +1 @@ +part-of-speech tagging is the process of assigning grammatical categories to individual words in a corpus. one widely used approach makes use of a statistical technique called a hidden markov model (hmm). the model is defined by two collections of parameters: the transition probabilities, which express the probability that a tag follows the preceding one (or two for a second order model); and the lexical probabilities, giving the probability that a word has a given tag without regard to words on either side of it. to tag a text, the tags with non-zero probability are hypothesised for each word, and the most probable sequence of tags given the sequence of words is determined from the probabilities. two algorithms are commonly used, known as the forward-backward (fb) and viterbi algorithms. fb assigns a probability to every tag on every word, while viterbi prunes tags which cannot be chosen because their probability is lower than the ones of competing hypotheses, with a corresponding gain in computational efficiency. for an introduction to the algorithms, see cutting et at. (1992), or the lucid description by sharman (1990). there are two principal sources for the parameters of the model. if a tagged corpus prepared by a human annotator is available, the transition and lexical probabilities can be estimated from the frequencies of pairs of tags and of tags associated with words. alternatively, a procedure called baumwelch (bw) re-estimation may be used, in which an untagged corpus is passed through the fb algorithm with some initial model, and the resulting probabilities used to determine new values for the lexical and transition probabilities. by iterating the algorithm with the same corpus, the parameters of the model can be made to converge on values which are locally optimal for the given text. the degree of convergence can be measured using a perplexity measure, the sum of plog2p for hypothesis probabilities p, which gives an estimate of the degree of disorder in the model. the algorithm is again described by cutting et ad. and by sharman, and a mathematical justification for it can be found in huang et at. (1990). the first major use of hmms for part of speech tagging was in claws (garside et a/., 1987) in the 1970s. with the availability of large corpora and fast computers, there has been a recent resurgence of interest, and a number of variations on and alternatives to the fb, viterbi and bw algorithms have been tried; see the work of, for example, church (church, 1988), brill (brill and marcus, 1992; brill, 1992), derose (derose, 1988) and kupiec (kupiec, 1992). one of the most effective taggers based on a pure hmm is that developed at xerox (cutting et al., 1992). an important aspect of this tagger is that it will give good accuracy with a minimal amount of manually tagged training data. 96% accuracy correct assignment of tags to word token, compared with a human annotator, is quoted, over a 500000 word corpus. the xerox tagger attempts to avoid the need for a hand-tagged training corpus as far as possible. instead, an approximate model is constructed by hand, which is then improved by bw re-estimation on an untagged training corpus. in the above example, 8 iterations were sufficient. the initial model set up so that some transitions and some tags in the lexicon are favoured, and hence having a higher initial probability. convergence of the model is improved by keeping the number of parameters in the model down. to assist in this, low frequency items in the lexicon are grouped together into equivalence classes, such that all words in a given equivalence class have the same tags and lexical probabilities, and whenever one of the words is looked up, then the data common to all of them is used. re-estimation on any of the words in a class therefore counts towards re-estimation for all of them'. the results of the xerox experiment appear very encouraging. preparing tagged corpora either by hand is labour-intensive and potentially error-prone, and although a semi-automatic approach can be used (marcus et al., 1993), it is a good thing to reduce the human involvement as much as possible. however, some careful examination of the experiment is needed. in the first place, cutting et a/. do not compare the success rate in their work with that achieved from a hand-tagged training text with no re-estimation. secondly, it is unclear how much the initial biasing contributes the success rate. if significant human intervention is needed to provide the biasing, then the advantages of automatic training become rather weaker, especially if such intervention is needed on each new text domain. the kind of biasing cutting et a/. describe reflects linguistic insights combined with an understanding of the predictions a tagger could reasonably be expected to make and the ones it could not. the aim of this paper is to examine the role that training plays in the tagging process, by an experimental evaluation of how the accuracy of the tagger varies with the initial conditions. the results suggest that a completely unconstrained initial model does not produce good quality results, and that one 'the technique was originally developed by kupiec (kupiec, 1989). accurately trained from a hand-tagged corpus will generally do better than using an approach based on re-estimation, even when the training comes from a different source. a second experiment shows that there are different patterns of re-estimation, and that these patterns vary more or less regularly with a broad characterisation of the initial conditions. the outcome of the two experiments together points to heuristics for making effective use of training and reestimation, together with some directions for further research. work similar to that described here has been carried out by merialdo (1994), with broadly similar conclusions. we will discuss this work below. the principal contribution of this work is to separate the effect of the lexical and transition parameters of the model, and to show how the results vary with different degree of similarity between the training and test data.in the end it may turn out there is simply no way of making the prediction without a source of information extrinsic to both model and corpus. part-of-speech tagging is the process of assigning grammatical categories to individual words in a corpus. the principal contribution of this work is to separate the effect of the lexical and transition parameters of the model, and to show how the results vary with different degree of similarity between the training and test data. from the observations in the previous section, we propose the following guidelines for how to train a hmm for use in tagging: able, use bw re-estimation with standard convergence tests such as perplexity. one widely used approach makes use of a statistical technique called a hidden markov model (hmm). we will discuss this work below. work similar to that described here has been carried out by merialdo (1994), with broadly similar conclusions. the general pattern of the results presented does not vary greatly with the corpus and tagset used. to tag a text, the tags with non-zero probability are hypothesised for each word, and the most probable sequence of tags given the sequence of words is determined from the probabilities. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1016.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1016.txt new file mode 100644 index 0000000000000000000000000000000000000000..cd6fdac210eafa796553bb4d111dd000b0495b47 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A94-1016.txt @@ -0,0 +1 @@ +machine-readable dictionary (the collins spanish/english), the lexicons used by the kbmt modules, a large set of user-generated bilingual glossaries as well as a gazetteer and a list of proper and organization names. the outputs from these engines (target language words and phrases) are recorded in a chart whose positions correspond to words in the source language input. as a result of the operation of each of the mt engines, new edges are added to the chart, each labeled with the translation of a region of the input string and indexed by this region's beginning and end positions. we will refer to all of these edges as components (as in "components of the translation") for the remainder of this article. the kbmt and ebmt engines also carry a quality score for each output element. the kbmt scores are produced based on whether any questionable heuristics were used in the source analysis or target generation. the ebmt scores are produced using a technique based on human judgements, as described in (nirenburg et al., 1994a), submitted. figure 1 presents a general view of the operation of our multi-engine mt system. the chart manager selects the overall best cover from the collection of candidate partial translations by normalizing each component's quality score (positive, with larger being better), and then selecting the best combination of components with the help of the chart walk algorithm. figure 2 illustrates the result of this process on the example spanish sentence: al momenta de su yenta a iberia, viasa contaba con ocho aviones, que tenzan en promedio 13 anos de vuelo which can be translated into english as at the moment of its sale to iberia, viasa had eight airplanes, which had on average thirteen years of flight (time). this is a sentence from one of the 1993 arpa mt evaluation texts. for each component, the starting and ending positions in the chart, the corresponding source language words, and alternative translations are shown, as well as the engine and the engine-internal quality scores. inspection of these translations shows numerous problems; for example, at position 12, "aviones" is translated, among other things, as "aircrafts". it must be remembered that these were generated automatically from an on-line dictionary, without any lexical feature marking or other human intervention. it is well known that such automatic methods are at the moment less than perfect, to say the least. in our current system, this is not a major problem, since the results go through a mandatory editing step, as described below. the chart manager normalizes the internal scores to make them directly comparable. in the case of kbmt and ebmt, the pre-existing scores are modified, while lexical transfer results are scored based on the estimated reliability of individual databases, from 0.5 up to 15. currently the kbmt scores are reduced by a constant, except for known erroneous output, which has its score set to zero. the internal ebmt scores range from 0 being perfect to 10,000 being worthless; but the scores are nonlinear. so a region selected by a threshold is converted linearly into scores ranging from zero to a normalized maximum ebmt score. the normalization levels were empirically determined in the initial experiment by having several individuals judge the comparative average quality of the outputs in an actual translation run. in every case, the base score produced by the scoring functions is currently multiplied by the length of the candidate in words, on the assumption that longer items are better. we intend to test a variety of functions in order to find the right contribution of the length factor. figure 3 presents the chart walk algorithm used to produce a single, best, non-overlapping, contiguous combination (cover) of the available component translations, assuming correct component quality scores. the code is organized as a recursive divideand-conquer procedure: to calculate the cover of a region of the input, it is repeatedly split into two parts, at each possible position. each time, the best possible cover for each part is recursively found, and the two scores are combined to give a score for the chart walk containing the two best subwalks. these different splits are then compared with each other and with components from the chart spanning the whole region (if any), and the overall best result is without dynamic programming, this would have a d 2 combinatorial time complexity. dynamic programl 2.5 ming utilizes a large array to store partial results, so that the best cover of any given subsequence is only computed once; the second time that a recursive call would compute the same result, it is retrieved from the array instead. this reduces the time complexity to 0(n3), and in practice it uses an insignificant part of total processing time. g 5 all possible combinations of components are cornd 2 pared: this is not a heuristic method, but an efficient exhaustive one. this is what assures that the chog 5 sen cover is optimal. this assumes, in addition to the scores actually being correct, that the scores are compositional, in the sense that the combined score for a set of components really represents their quality as a group. this might not be the case, for example, if gaps or overlaps are allowed in some cases (perhaps where they contain the same words in the same positions). we calculate the combined score for a sequence of d 2 components as the weighted average of their individual scores. weighting by length is necessary so that g 5 the same components, when combined in a different order, produce the same combined scores. otherwise the algorithm can produce inconsistent results. e 8.8 the chart walk algorithm can also be thought of as filling in the two-dimensional dynamic-programming arrayl . figure 4 shows an intermediate point in the filling of the array. in this figure, each element (i,j) is initially the best score of any single chart compod 2 nent covering the input region from word i to word j. dashes indicate that no one component covers exnote that this array is a different data structure from the chart. actly that region. (in rows 1 through 7, the array has not yet been operated on, so it still shows its initial state.) after processing (see rows 9 through 22), each element is the score for the best set of components covering the input from word i to word j (the best cover for this substring)2. (only a truncated score is shown for each element in the figure, for readability. there is also a list of best components associated with each element.) the array is upper triangular since the starting position of a component i must be less than or equal to its ending position j. for any position, the score is calculated based on a combination of scores in the row to its left and in the column below it, versus the previous contents of the array cell for its position. so the array must be filled from the bottom-up, and left to right. intuitively, this is because larger regions must be built up from smaller regions within them. for example, to calculate element (8,10), we compute the length-weighted averages of the scores of the best walks over the pair of elements (8,8) and (9,10) versus the pair (8,9) and (10,10), and compare them with the scores of any single chart components going from 8 to 10 (there were none), and take the maximum. referring to figure 2 again, this corresponds to a choice between combining the translations of (8,8) viasa and (9,10) contaba con versus combining the (not shown) translations of (8,9) viasa contaba and (10,10) con. (this (8,9) element was itself previously built up from single word components.) thus, we compare (2*1+ 10*2)/3 = 7.33 with (3.5*2+2*1)/3 = 3.0 and select the first, 7.33. the first wins because contaba con has a high score as an idiom from the glossary. figure 5 shows the final array. when the element in the top-right corner is produced (5.78), the algorithm is finished, and the associated set of components is the final chart walk result shown in figure 2. it may seem that the scores should increase towards the top-right corner. this has not generally been the case. while the system produces a number of high-scoring short components, many lowscoring components have to be included to span the entire input. since the score is a weighted average, these low-scoring components pull the combined score down. a clear example can be seen at position (18,18), which has a score of 15. the scores above and to its right each average this 15 with a 5, for total values of 10.0 (all the lengths happen to be 1), and the score continues to decrease with distance from this point as one moves towards the final score, which does include the component for (18,18) in the cover. the chart-oriented integration of mt engines does not easily support deviations from the linear order of the source text elements, as when discontinuous constituents translate contiguous strings or in the case of cross-component substring order differences. we use a language pair-dependent set of postprocessing rules to alleviate this (for example, by switching the order of adjacent single-word adjective and noun components).we use a language pair-dependent set of postprocessing rules to alleviate this (for example, by switching the order of adjacent single-word adjective and noun components). the outputs from these engines (target language words and phrases) are recorded in a chart whose positions correspond to words in the source language input. ultimately, a multi-engine system depends on the quality of each particular engine. the chart-oriented integration of mt engines does not easily support deviations from the linear order of the source text elements, as when discontinuous constituents translate contiguous strings or in the case of cross-component substring order differences. a less ambitious version of this idea would be to run the low-scoring engines only where there are gaps in the normally high-scoring engines. as a result of the operation of each of the mt engines, new edges are added to the chart, each labeled with the translation of a region of the input string and indexed by this region's beginning and end positions. machine-readable dictionary (the collins spanish/english), the lexicons used by the kbmt modules, a large set of user-generated bilingual glossaries as well as a gazetteer and a list of proper and organization names. a clear example can be seen at position (18,18), which has a score of 15. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1004.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1004.txt new file mode 100644 index 0000000000000000000000000000000000000000..68cd2b27502806b4684b217beb55bb515e35447b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1004.txt @@ -0,0 +1 @@ +the task of identifying sentence boundaries in text has not received as much attention as it deserves. many freely available natural language processing tools require their input to be divided into sentences, but make no mention of how to accomplish this (e.g. (brill, 1994; collins, 1996)). others perform the division implicitly without discussing performance (e.g. (cutting et al., 1992)). on first glance, it may appear that using a short list, of sentence-final punctuation marks, such as ., ?, and !, is sufficient. however, these punctuation marks are not used exclusively to mark sentence breaks. for example, embedded quotations may contain any of the sentence-ending punctuation marks and . is used as a decimal point, in email addresses, to indicate ellipsis and in abbreviations. both ! and ? are somewhat less ambiguous *the authors would like to acknowledge the support of arpa grant n66001-94-c-6043, aro grant daah0494-g-0426 and nsf grant sbr89-20230. but appear in proper names and may be used multiple times for emphasis to mark a single sentence boundary. lexically-based rules could be written and exception lists used to disambiguate the difficult cases described above. however, the lists will never be exhaustive, and multiple rules may interact badly since punctuation marks exhibit absorption properties. sites which logically should be marked with multiple punctuation marks will often only have one ((nunberg, 1990) as summarized in (white, 1995)). for example, a sentence-ending abbreviation will most likely not be followed by an additional period if the abbreviation already contains one (e.g. note that d.0 is followed by only a single . in the president lives in washington, d.c.). as a result, we believe that manually writing rules is not a good approach. instead, we present a solution based on a maximum entropy model which requires a few hints about what. information to use and a corpus annotated with sentence boundaries. the model trains easily and performs comparably to systems that require vastly more information. training on 39441 sentences takes 18 minutes on a sun ultra sparc and disambiguating the boundaries in a single wall street journal article requires only 1.4 seconds.the task of identifying sentence boundaries in text has not received as much attention as it deserves. training on 39441 sentences takes 18 minutes on a sun ultra sparc and disambiguating the boundaries in a single wall street journal article requires only 1.4 seconds. we would also like to thank the anonymous reviewers for their helpful insights. we would like to thank david palmer for giving us the test data he and marti hearst used for their sentence detection experiments. many freely available natural language processing tools require their input to be divided into sentences, but make no mention of how to accomplish this (e.g. we have described an approach to identifying sentence boundaries which performs comparably to other state-of-the-art systems that require vastly more resources. the model trains easily and performs comparably to systems that require vastly more information. to our knowledge, there have been few papers about identifying sentence boundaries. furthermore, we showed tha.t a small training corpus is sufficient for good performance, and we estimate that annotating enough data to achieve good performance would require only several hours of work, in comparison to the many hours required to generate pos tag and lexical probabilities. liberman and church suggest in (liberma.n and church, 1992) that a. system could be quickly built to divide newswire text into sentences with a nearly negligible error rate, but do not actually build such a system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1011.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1011.txt new file mode 100644 index 0000000000000000000000000000000000000000..289a71614dac2ff5c841ea4b37673ef6f2c6cd8b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1011.txt @@ -0,0 +1 @@ +we are concerned with surface-syntactic parsing of running text. our main goal is to describe syntactic analyses of sentences using dependency links that show the head-modifier relations between words. in addition, these links have labels that refer to the syntactic function of the modifying word. a simplified example is in figure 1, where the link between i and see denotes that i is the modifier of see and its syntactic function is that of subject. similarly, a modifies bird, and it is a determiner. first, in this paper, we explain some central concepts of the constraint grammar framework from which many of the ideas are derived. then, we give some linguistic background to the notations we are using, with a brief comparison to other current dependency formalisms and systems. new formalism is described briefly, and it is utilised in a small toy grammar to illustrate how the formalism works. finally, the real parsing system, with a grammar of some 2 500 rules, is evaluated. the parser corresponds to over three man-years of work, which does not include the lexical analyser and the morphological disambiguator, both parts of the existing english constraint grammar parser (karlsson et al., 1995). the parsers can be tested via www'.we are concerned with surface-syntactic parsing of running text. the parsers can be tested via www'. voutilainen and juha heikkild created the original engcg lexicon. we are using atro voutilainen's (1995) improved part-of-speech disambiguation grammar which runs in the cg-2 parser. however, the comparison to other current systems suggests that our dependency parser is very promising both theoretically and practically. in this paper, we have presented some main features of our new framework for dependency syntax. our work is partly based on the work done with the constraint grammar framework that was originally proposed by fred karlsson (1990). for instance, our main goal is to describe syntactic analyses of sentences using dependency links that show the head-modifier relations between words. the distinction between the complements and the adjuncts is vague in the implementation; neither the complements nor the adjuncts are obligatory. the results are not strictly comparable because the syntactic description is somewhat different. the evaluation was done using small excerpts of data, not used in the development of the system. means that a nominal head (nom-head is a set that contains part-of-speech tags that may represent a nominal head) may not appear anywhere to the left (not *-1). for instance, the verb decide has the tag

which means that the prepositional phrase on is typically attached to it. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1014.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1014.txt new file mode 100644 index 0000000000000000000000000000000000000000..f4e3e26bff5f8e36536ef5257230262e582cb5ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1014.txt @@ -0,0 +1 @@ +the work reported in this paper aims at providing syntactically annotated corpora (treebanks') for stochastic grammar induction. in particular, we focus on several methodological issues concerning the annotation of non-configurational languages. in section 2, we examine the appropriateness of existing annotation schemes. on the basis of these considerations, we formulate several additional requirements. a formalism complying with these requirements is described in section 3. section 4 deals with the treatment of selected phenomena. for a description of the annotation tool see section 5.for a description of the annotation tool see section 5. as the annotation scheme described in this paper focusses on annotating argument structure rather than constituent trees, it differs from existing treebanks in several aspects. its extension is subject to further investigations. the work reported in this paper aims at providing syntactically annotated corpora (treebanks') for stochastic grammar induction. the development of linguistically interpreted corpora presents a laborious and time-consuming task. combining raw language data with linguistic information offers a promising basis for the development of new efficient and robust nlp methods. these differences can be illustrated by a comparison with the penn treebank annotation scheme. partial automation included in the current version significantly reduces the manna.1 effort. a uniform representation of local and non-local dependencies makes the structure more transparent'. owing to the partial automation, the average annotation efficiency improves by 25% (from around 4 minutes to 3 minutes per sentence). such a word order independent representation has the advantage of all structural information being encoded in a single data structure. realworld texts annotated with different strata of linguistic information can be used for grammar induction. in order to make the annotation process more efficient, extra effort has been put. into the development of an annotation tool. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1029.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1029.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b2d06b7690bfebd4e0733b6160734f71ee1dd74 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1029.txt @@ -0,0 +1 @@ +in the past decade, the speech recognition community has had huge successes in applying hidden markov models, or hmm's to their problems. more recently, the natural language processing community has effectively employed these models for part-ofspeech tagging, as in the seminal (church, 1988) and other, more recent efforts (weischedel et al., 1993). we would now propose that hmm's have successfully been applied to the problem of name-finding. we have built a named-entity (ne) recognition system using a slightly-modified version of an hmm; we call our system "nymble". to our knowledge, nymble out-performs the best published results of any other learning name-finder. furthermore, it performs at or above the 90% accuracy level, often considered "near-human performance". the system arose from the ne task as specified in the last message understanding conference (muc), where organization names, person names, location names, times, dates, percentages and money amounts were to be delimited in text using sgml-markup. we will describe the various models employed, the methods for training these models and the method for "decoding" on test data (the term "decoding" borrowed from the speech recognition community, since one goal of traversing an hmm is to recover the hidden state sequence). to date, we have successfully trained and used the model on both english and spanish, the latter for met, the multi-lingual entity task.we have shown that using a fairly simple probabilistic model, finding names and other numerical entities as specified by the muc tasks can be performed with "near-human performance", often likened to an f of 90 or above. to date, we have successfully trained and used the model on both english and spanish, the latter for met, the multi-lingual entity task. in the past decade, the speech recognition community has had huge successes in applying hidden markov models, or hmm's to their problems. given the incredibly difficult nature of many nlp tasks, this example of a learned, stochastic approach to name-finding lends credence to the argument that the nlp community ought to push these approaches, to find the limit of phenomena that may be captured by probabilistic, finite-state methods. also, name-finding can be directly employed for link analysis and other information retrieval problems. the basic premise of the approach is to consider the raw text encountered when decoding as though it had passed through a noisy channel, where it had been originally marked with named entities.' we would like to incorporate the following into the current model: while our initial results have been quite favorable, there is still much that can be done potentially to improve performance and completely close the gap between learned and rule-based name-finding systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1030.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1030.txt new file mode 100644 index 0000000000000000000000000000000000000000..81667be8b73d21c06a7be516887bb9bba219773f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1030.txt @@ -0,0 +1 @@ +text processing applications, such as machine translation systems, information retrieval systems or natural-language understanding systems, need to identify multi-word expressions that refer to proper names of people, organizations, places, laws and other entities. when encountering mrs. candy hill in input text, for example, a machine translation system should not attempt to look up the translation of candy and hill, but should translate mrs. to the appropriate personal title in the target language and preserve the rest of the name intact. similarly, an information retrieval system should not attempt to expand candy to all of its morphological variants or suggest synonyms (wacholder et al. 1994). the need to identify proper names has two aspects: the recognition of known names and the discovery of new names. since obtaining and maintaining a name database requires significant effort, many applications need to operate in the absence of such a resource. without a database, names need to be discovered in the text and linked to entities they refer to. even where name databases exist, text needs to be scanned for new names that are formed when entities, such as countries or commercial companies, are created, or for unknown names which become important when the entities they refer to become topical. this situation is the norm for dynamic applications such as news providing services or internet information indexing. the next section describes the different types of proper name ambiguities we have observed. section 3 discusses the role of context and world knowledge in their disambiguation; section 4 describes the process of name discovery as implemented in nominator, a module for proper name recognition developed at the ibm t.j. watson research center. sections 5-7 elaborate on nominator's disambiguation heuristics.ambiguity remains one of the main challenges in the processing of natural language text. because of these difficulties, we believe that for the forseeable future, practical applications to discover new names in text will continue to require the sort of human effort invested in nominator. text processing applications, such as machine translation systems, information retrieval systems or natural-language understanding systems, need to identify multi-word expressions that refer to proper names of people, organizations, places, laws and other entities. sections 5-7 elaborate on nominator's disambiguation heuristics. name identification requires resolution of a subset of the types of structural and semantic ambiguities encountered in the analysis of nouns and noun phrases (nps) in natural language processing. many of these uncategorized names are titles of articles, books and other works of art that we currently do not handle. in the rest of the paper we describe the resources and heuristics we have designed and implemented in nominator and the extent to which they resolve these ambiguities. an evaluation of an earlier version of nominator, was performed on 88 wall street journal documents (nist 1993) that had been set aside for testing. all of these ambiguities must be dealt with if proper names are to be identified correctly. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1039.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1039.txt new file mode 100644 index 0000000000000000000000000000000000000000..e581fe671a1f86c247f04006ce1ebb0b9cf3157a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1039.txt @@ -0,0 +1 @@ +systems that generate natural language output as part of their interaction with a user have become a major area of research and development. typically, natural language generation is divided into several phases, namely text planning (determining output content and structure), sentence planning (determining abstract target language resources to express content, such as lexical items and syntactic constructions), and realization (producing the final text string) (reiter, 1994). while text and sentence planning may sometimes be combined, a realizer is almost always included as a distinct module. it is in the realizer that knowledge about the target language resides (syntax, morphology, idiosyncratic properties of lexical items). realization is fairly well understood both from a linguistic and from a computational point of view, and therefore most projects that use text generation do not include the realizer in the scope of their research. instead, such projects use an off-the-shelf realizer, among which penman (bateman, 1996) and surge/fuf (elhadad and robin, 1996) are probably the most popular. in this technical note and demo we present a new off-theshelf realizer, realpro. realpro is derived from previous systems (iordanskaja et al., 1988; iordanslcaja et al., 1992; rambow and korelsky, 1992), but represents a new design and a completely new implementation. realpro has the following characteristics, which we believe are unique in this combination: we reserve a more detailed comparison with penman and fuf, as well as with alethgen/gl (coch, 1996) (which is perhaps the system most similar to realpro, since they are based on the same linguistic theory and are both implemented with speed in mind), for a more extensive paper. this technical note presents realpro, concentrating on its structure, its coverage, its interfaces, and its performance.this technical note presents realpro, concentrating on its structure, its coverage, its interfaces, and its performance. systems that generate natural language output as part of their interaction with a user have become a major area of research and development. the development of realpro was partially supported by usaf rome laboratory under contracts f3060293-c-0015, f30602-94-c-0124, and f30602-92-c-0163, and by darpa under contracts f30602-95-2-0005 and f30602-96-c-0220. we are grateful to r. kittredge, t. korelsky, d. mccullough, a. nasr, e. reiter, and m. white as well as to three anonymous reviewers for helpful comments about earlier drafts of this technical note and/or about realpro. the input to realpro is a syntactic dependency structure. this means that realpro gives the developer control over the output, while taking care of the linguistic details. realpro is licensed free of charge to qualified academic institutions, and is licensed for a fee to commercial sites. the system is fully operational, runs on pc as well as on unix work stations, and is currently used in an application we have developed (lavoie et al., 1997) as well as in several on-going projects (weather report generation, machine translation, project report generation). the architecture of realpro is based on meaningtext theory, which posits a sequence of correspondences between different levels of representation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1052.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1052.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4ff639e8bb607b6fe719ca2d63196f846f9c087 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/A97-1052.txt @@ -0,0 +1 @@ +predicate subcategorization is a key component of a lexical entry, because most, if not all, recent syntactic theories 'project' syntactic structure from the lexicon. therefore, a wide-coverage parser utilizing such a lexicalist grammar must have access to an accurate and comprehensive dictionary encoding (at a minimum) the number and category of a predicate's arguments and ideally also information about control with predicative arguments, semantic selection preferences on arguments, and so forth, to allow the recovery of the correct predicate-argument structure. if the parser uses statistical techniques to rank analyses, it is also critical that the dictionary encode the relative frequency of distinct subcategorization classes for each predicate. several substantial machine-readable subcategorization dictionaries exist for english, either built largely automatically from machine-readable versions of conventional learners' dictionaries, or manually by (computational) linguists (e.g. the alvey nl tools (anlt) dictionary, boguraev et al. (1987); the comlex syntax dictionary, grishman et al. (1994)). unfortunately, neither approach can yield a genuinely accurate or comprehensive computational lexicon, because both rest ultimately on the manual efforts of lexicographers / linguists and are, therefore, prone to errors of omission and commission which are hard or impossible to detect automatically (e.g. boguraev & briscoe, 1989; see also section 3.1 below for an example). furthermore, manual encoding is labour intensive and, therefore, it is costly to extend it to neologisms, information not currently encoded (such as relative frequency of different subcategorizations), or other (sub)languages. these problems are compounded by the fact that predicate subcategorization is closely associated to lexical sense and the senses of a word change between corpora, sublanguages and/or subject domains (jensen, 1991). in a recent experiment with a wide-coverage parsing system utilizing a lexicalist grammatical framework, briscoe & carroll (1993) observed that half of parse failures on unseen test data were caused by inaccurate subcategorization information in the anlt dictionary. the close connection between sense and subcategorization and between subject domain and sense makes it likely that a fully accurate 'static' subcategorization dictionary of a language is unattainable in any case. moreover, although schabes (1992) and others have proposed `lexicalized' probabilistic grammars to improve the accuracy of parse ranking, no wide-coverage parser has yet been constructed incorporating probabilities of different subcategorizations for individual predicates, because of the problems of accurately estimating them. these problems suggest that automatic construction or updating of subcategorization dictionaries from textual corpora is a more promising avenue to pursue. preliminary experiments acquiring a few verbal subcategorization classes have been reported by brent (1991, 1993), manning (1993), and ushioda et at. (1993). in these experiments the maximum number of distinct subcategorization classes recognized is sixteen, and only ushioda et at. attempt to derive relative subcategorization frequency for individual predicates. we describe a new system capable of distinguishing 160 verbal subcategorization classes—a superset of those found in the anlt and comlex syntax dictionaries. the classes also incorporate information about control of predicative arguments and alternations such as particle movement and extraposition. we report an initial experiment which demonstrates that this system is capable of acquiring the subcategorization classes of verbs and the relative frequencies of these classes with comparable accuracy to the less ambitious extant systems. we achieve this performance by exploiting a more sophisticated robust statistical parser which yields complete though 'shallow' parses, a more comprehensive subcategorization class classifier, and a priori estimates of the probability of membership of these classes. we also describe a small-scale experiment which demonstrates that subcategorization class frequency information for individual verbs can be used to improve parsing accuracy.we also describe a small-scale experiment which demonstrates that subcategorization class frequency information for individual verbs can be used to improve parsing accuracy. predicate subcategorization is a key component of a lexical entry, because most, if not all, recent syntactic theories 'project' syntactic structure from the lexicon. the experiment and comparison reported above suggests that our more comprehensive subcategorization class extractor is able both to assign classes to individual verbal predicates and also to rank them according to relative frequency with comparable accuracy to extant systems. boguraev & briscoe, 1987). we achieve this performance by exploiting a more sophisticated robust statistical parser which yields complete though 'shallow' parses, a more comprehensive subcategorization class classifier, and a priori estimates of the probability of membership of these classes. we have also demonstrated that a subcategorization dictionary built with the system can improve the accuracy of a probabilistic parser by an appreciable amount. if the parser uses statistical techniques to rank analyses, it is also critical that the dictionary encode the relative frequency of distinct subcategorization classes for each predicate. we report an initial experiment which demonstrates that this system is capable of acquiring the subcategorization classes of verbs and the relative frequencies of these classes with comparable accuracy to the less ambitious extant systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1007.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1007.txt new file mode 100644 index 0000000000000000000000000000000000000000..af2341fad5cb33c33c0f6bb1730f5ca597911967 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1007.txt @@ -0,0 +1 @@ +moreover, in ma w cases it; is very important not to deviate from certain linguis- tic standards in generation, in which case hand- crafted grammars give excellent control. how- ever, in other applications tbr nlg the variety of the output is much bigger, and the demands on the quality of the output somewhat less strin- gent. a typical example is nlg in the con- text of (interlingua- or transthr-based) machine translation. another reason for reb~xing the quality of the output may be that not enough time is available to develop a flfll grammar tbr a new target language in nlg. in all these cases, stochastic ("empiricist") methods pro- vide an alternative to hand-crafted ("rational- ist") approaches to nlg. to our knowledge, the first to use stochastic techniques in nlg were langkilde and knight (1998a) and (1998b). in this paper, we present fergus (flexible em- piricist/rationalist generation using syntax). fertgus follows langkilde and knights seminal work in using an n-gram language model, but; we augment it with a tree-based stochastic model and a traditional tree-based syntactic grammar. more recent work on aspects of stochastic gen- eration include (langkilde and knight, 2000), (malouf, 1999) and (ratnaparkhi, 2000). betbre we describe in more detail how we use stochastic models in nlg, we recall the basic tasks in nlg (rainbow and korelsky, 1992; re- iter, 1994). during text p lanning, content and structure of the target text; are determined to achieve the overall communicative goal. dur- ing sentence planning, linguistic means - in particular, lexical and syntactic means are de- termined to convey smaller pieces of meaning. l)uring real izat ion, the specification chosen in sentence planning is transtbrmed into a surface string, by line~rizing and intlecting words in the sentence (and typically, adding function words). as in the work by langkilde and knight, our work ignores the text planning stage, but it; does address the sentence, planning and the realiza- tion stages. the structure of the paper is as tbllows.explo i t ing a probabi l ist ic hierarchical mode l for generat ion srinivas bangalore and owen rambow at&t labs research 180 park avenue f lorham park, nj 07932 {sr in?, rambow}@research, a r t . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1044.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1044.txt new file mode 100644 index 0000000000000000000000000000000000000000..d61720659a31c88b44aa157fd3be4a995329b338 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1044.txt @@ -0,0 +1 @@ +such features include sense, register, do- main spccilicity, pragmatic restrictions on usage, scnlan- lic markcdncss, and orientation, as well as automatically ictcnlifiecl links between words (e.g., semantic rclalcd- hess, syllollynly, antonylny, and tneronymy). aulomal- ically learning features of this type from hugc corpora allows the construction or augmentation of lexicons, and the assignment of scmanlic htbcls lo words and phrases in running text. this information in turn can bc used to help dcterlninc addilional features at the it?teal, clause, sentence, or document level. tiffs paper explores lira benelits that some lexical fea- tures of adjectives offer lor the prediction of a contexlual sentence-level feature, suojectivity. subjectivity in nat- ural language re[crs to aspects of language used to ex- press opinions and ewfluations. the computatiomtl task addressed here is to distinguish sentences used to present opinions and other tbrms of subjectivity (suojective sen- tences, e.g., "at several different layers, its a fascinating title") from sentences used to objectively present factual information (objective sentences, e.g., "bell industries inc. increased its quarterly to 10 cents from 7 cents a share"). much research in discourse processing has focused on task-oriented and insmmtional dialogs. the task ad- dressed here comes to the fore in other genres, especially news reporting and lnternet lorums, in which opinions of various agents are expressed and where subjectivity judgements couht help in recognizing inllammatory rues- sages ("llanles) and mining online sources for product reviews. ()thor (asks for whicll subjectivity recognition is potentially very useful include infornmtion extraction and information retrieval. assigning sub.icctivity labels to documents or portions of documents is an example of non-topical characteri?ation f information. current in- formation extraction and rolricval lechnology focuses al- most exclusively on lhe subject matter of the documcnls. yet, additiomtl components of a document inllucncc its relevance to imrlicuhu ? users or tasks, including, for ex- alnple, the evidential slatus el: lhc material presented, and attitudes adopted in fawn" or against a lmrticular person, event, or posilion (e.g., articles on a presidenlial cam- paign wrillen to promote a specific candidate). in sum- marization, subjectivity judgmcnls could be included in documcllt proiilcs to augment aulomatically produced docunacnt summaries, and to hel l) the user make rele- vance judgments when using a search engine. ()thor work on sub.iectivity (wicbc et al., 1999; bruce and wicbc, 2000) has established a positive and statisti- cally signilicant correlation with the presence of adiec- lives.effects of adjective orientation and gradability on sentence subjectivity vas i le ios hatz ivass i log lou depar tment o1 computer sc ience co lumbia un ivers i l y new york, ny 10027 vh@cs , co lumbia , edu janyce m. wiebe depar tment o f computer sc ience new mex ico state un ivers i ty las cruces , nm 88003 w iebe@cs , nmsu. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1072.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1072.txt new file mode 100644 index 0000000000000000000000000000000000000000..78cc47ff7b0463d67101437fb4651831a64ad6f3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-1072.txt @@ -0,0 +1 @@ +toi)ic signatures can lie used to identify the t)resence of a (:omph~x conce.pt a concept hat consists of several related coinl)onents in fixed relationships. ]~.c.stauvant-uisit, for examph~, invoh,es at h,ast the concel)ts lltcgfit, t.(tt, pay, and possibly waiter, all(l dragon boat pcstivai (in tat- wan) involves the ct)llc(!l)t,s cal(tlztlt,s (a talisman to ward off evil), rnoza (something with the t)ower of preventing pestilen(:e and strengthening health), pic- tures of ch, un9 kuei (a nemesis of evil spirits), eggs standing on end, etc. only when the concepts co- occur is one licensed to infer the comph:x concept; cat or moza alone, for example, are not sufficient. at this time, we do not c.onsider the imerrelationships among tile concepts. since many texts may describe all the compo- nents of a comi)lex concept without ever exi)lic- itly mentioning the mlderlying complex concel/t--a tol)ic--itself, systems that have to identify topic(s), for summarization or information retrieval, require a method of infcuring comt)hx concellts flom their component words in the text. 2 re la ted work in late 1970s, ])e.long (dejong, 1982) developed a system called i"tiump (fast reading understand- ing and memory program) to skim newspaper sto- ries and extract the main details. frump uses a data structure called sketchy script to organize its world knowhdge. each sketchy script is what frumi ) knows al)out what can occur in l)articu- lar situations such as denmnstrations, earthquakes, labor strike.s, an(t so on. frump selects a t)artic- ular sketchy script based on clues to styled events in news articles. in other words, frump selects an eml)t3 ~ t(uni)late 1whose slots will be tilled on the fly as t"f[ump reads a news artme. a summary is gen- erated })ased on what has been (:al)tured or filled in the teml)iate. the recent success of infornmtion extractk)n re- search has encoreaged the fi{um1 ) api)roach. the summons (summarizing online news artmes) system (mckeown and radev, 1999) takes tem- l)late outputs of information extra(:tion systems de- velofmd for muc conference and generating smn- maries of multit)le news artmes. frump and sum- mons both rely on t/rior knowledge of their do- mains, th)wever, to acquire such t)rior knowledge is lal)or-intensive and time-consuming. i~)r exam-- l)le, the unive.rsity of massa(:husetts circus sys- l.enl use(l ill the muc-3 (saic, 1998) terrorism do- main required about 1500 i)erson-llours to define ex- traction lmtterns 2 (rilotf, 1996).the automated acquisit ion of topic signatures for text summarizat ion chin -yew l in and eduard hovy in fo rmat ion s(:i(umes i l l s t i tu te un ivers i ty of southern ca l i fo rn ia mar ina del rey, ca 90292, usa { cyl,hovy }c~isi.edu abst rac t in order to produce, a good summary, one has to identify the most relevant portions of a given text. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2136.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2136.txt new file mode 100644 index 0000000000000000000000000000000000000000..6ee003ae771174893e04ce8fee9812880e0b49f9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2136.txt @@ -0,0 +1 @@ +we evaluate exdisco by com- paring the pertbrmance of discovered patterns against that of manually constructed systems on actual extraction tasks. 0 introduct ion intbrmation extraction is the selective xtrac- tion of specified types of intbrmation from nat- ural language text. the intbrmation to be extracted may consist of particular semantic classes of objects (entities), relationships among these entities, and events in which these entities participate. the extraction system places this intbrmation into a data base tbr retrieval and subsequent processing. in this paper we shall be concerned primar- ily with the extraction of intbrmation about events. in the terminology which has evolved tiom the message understanding conferences (muc, 1995; muc, 1993), we shall use the term subject domain to refer to a broad class of texts, such as business news, and tile term scenario to refer to tile specification of tile particular events to be extracted. for example, the "manage- ment succession" scenario for muc-6, which we shall refer to throughout this paper, involves in- formation about corporate executives tarting and leaving positions. the fundamental problem we face in port- ing an extraction system to a new scenario is to identify the many ways in which intbrmation about a type of event may be expressed in the text;. typically, there will be a few common tbrms of expression which will quickly come to nfind when a system is being developed. how- ever, the beauty of natural language (and the challenge tbr computational linguists) is that there are many variants which an imaginative writer cast use, and which the system needs to capture. finding these variants may involve studying very large amounts of text; in the sub- ject domain. this has been a major impediment to the portability and performance of event ex- traction systems. we present; in this paper a new approach to finding these variants automatically flom a large corpus, without the need to read or amlo- tate the corpus. this approach as been evalu- ated on actual event extraction scenarios. in the next section we outline the strncture of our extraction system, and describe the discov- ery task in the context of this system.automatic acquisition of domain knowledge for information extraction roman yangarber, ralph grishman past tapanainen courant inst i tute of conexor oy mathemat ica l sciences helsinki, f in land new york university {roman [ grishman}@cs, nyu. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2137.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2137.txt new file mode 100644 index 0000000000000000000000000000000000000000..dab961e295dad1b5acc07397fdc2d08e722ce657 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2137.txt @@ -0,0 +1 @@ +5/]lell ,]le lcsllll;s are better with the new tcch- ni(lue , a question arises as t() wh(,l;h(;r these l:(`-- sult; (litleren(:es are due t() the new technique a(:t;ually 1)eing l)cl;t(x or just; due 1;o (:han(:e. un- tortmmtely, one usually callll()t) directly answer the qnesl;ion "what is the 1)robatfility that 1;11(; now l;(x:hni(luc, is t)el;lx~r givell l;he results on the t(,sl, dal;a sol;": i)(new technique is better [ test set results) ]~ul; with statistics, one cml answer the follow- ing proxy question: if the new technique was a(> tually no ditterent han the old t(,(hnique ((;he * this paper reports on work l)erfonncd at the mitr1,; corporation under the sul)porl: of the mitilj,; ,qponsored research l)rogrmn. warren grcit[, l ,ynette il irschlnm b christilm l)orall, john llen(lerson, kelmeth church, ted l)unning, wessel kraaij, milch marcus and an anony- mous reviewer l)rovided hell)rid suggestions. copyright @2000 the mitre corl)oration. all rights r(~s(nvcd. null hyl)othesis), wh~tt is 1:11(; 1)robat)ility that the results on the test set would l)e at least this skewed in the new techniques favor (box eta] . thai; is, what is p(test se, t results at least this skew(a in the new techni(lues favor i new technique is no (liffercnt than the old) if the i)robtfl)ility is small enough (5% off;on is used as the threshold), then one will rqiect the mill hyi)othems and say that the differences in 1;he results are :sta.tisl;ically siglfilicant" ai; that thrt,shold level. this 1)al)(n" examines some of th(`- 1)ossil)le me?hods for trying to detect statistically signif- leant diflelenc(`-s in three commonly used met- li(:s: telall, 1)re(ision and balanced f-score. many of these met;ire(is arc foun(t to be i)rol)lem- a.ti(" ill a, so, t; of exl)erinw, nts that are performed. thes(~ methods have a, tendency to ullderesti- mat(`- th(, signili(:ance, of the results, which tends t() 1hake one, 1)elieve thai; some new techni(tuc is no 1)el;l;er l;lmn the (:urrent technique even when il; is. this mtderest imate comes flom these lnc|h- ells assuming l;hat; the te(:hlfi(tues being con> lmrcd produce indepen(lc, nt results when in our exl)eriments , the techniques 1)eing coml)ared tend to 1)reduce l)ositively corr(`-lated results. to handle this problem, we, point out some st~ttistical tests, like the lnatche(t-pair t, sign and wilcoxon tests (harnett, 1982, see. 8.7 and 15.5), which do not make this assulnption. one call its(, l;llcse tes ts oll i;hc recall nlel;r ic, but l;he precision an(l 1)alanced f-score metric have too coml)lex a tbrm for these tests. for such com- 1)lex lne|;ri(;s~ we llse a colnplll;e-in|;clisiv(~ ran- domization test (cohen, 1995, sec. 5.3), which also ~tvoids this indet)en(lence assmnption.more accurate tes ts ibr the s ta t i s t i ca l s ign i f i cance of resu l t d i f ferences * alexander yeh mitre corp. 202 burli l lgl;on rd. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2163.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2163.txt new file mode 100644 index 0000000000000000000000000000000000000000..6659bb05529607e15d412f59f316075ca872c8ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C00-2163.txt @@ -0,0 +1 @@ +here .fi = f denotes tile (15ench) source and e{ = e denotes the (english) target string. most smt models (brown et al., 1993; vogel et al., 1996) try to model word-to-word corresl)ondences between source and target words using an alignment nmpl)ing from source l)osition j to target position i = aj. we can rewrite tim t)robal)ility pr(fille~) t) 3, in- troducing the hidden alignments ai 1 := al ...aj...a.l (aj c {0 , . , /} ) : pr(f~lel) = ~pr(f i ,a~le{) .1 ? j -1 i~ = e h pr(fj ajlf i -"al e l ) q, j=l to allow fbr french words wlfich do not directly cor- respond to any english word an artificial empty word c0 is added to the target sentence at position i=0. the different alignment models we present pro- vide different decoint)ositions of pr(f~,a~le(). an alignnlent 5~ for which holds a~ = argmax pr(fi , al[ei) at for a specific model is called v i terb i al ignment of" this model. in this paper we will describe extensions to tile hidden-markov alignment model froln (vogel et al., 1.996) and compare tlmse to models 1 - 4 of (brown et al., 1993). we t)roi)ose to measure the quality of an alignment nlodel using the quality of tlle viterbi alignment compared to a manually-produced align- ment. this has the advantage that once having pro- duced a reference alignlnent, the evaluation itself can be performed automatically. in addition, it results in a very precise and relia.ble valuation criterion which is well suited to assess various design decisions in modeling and training of statistical alignment mod- els. it, is well known that manually pertbrming a word aligmnent is a colnplicated and ambiguous task (melamed, 1998). therefore, to produce tlle refer- ence alignment we use a relined annotation scheme which reduces the complications and mnbiguities oc- curring in the immual construction of a word align- ment. as we use tile alignment models for machine translation purposes, we also evahlate the resulting translation quality of different nlodels. 2 al ignment w i th hmm in the hidden-markov alignment model we assume a first-order dependence for tim aligmnents aj and that the translation probability depends olfly on aj and not oil (tj_l: - ~- el) =p(ajl.a compar i son of a l ignment mode ls for s ta t i s t i ca l mach ine trans la t ion franz josef och and hermann ney lehrstuhl fiir informatik vi, comlmter science department rwth aachen - university of technology d-52056 aachen, germany {och, ney}~inf ormat ik. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1011.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1011.txt new file mode 100644 index 0000000000000000000000000000000000000000..3f8b544efddb7a6aa9fd8e95c5f20f451a78c1c7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1011.txt @@ -0,0 +1 @@ +we address here the problem of base np translation, in which for a given base noun phrase in a source language (e.g., ?information age? in english), we are to find out its possible translation(s) in a target language (e.g., ? in chinese). we define a base np as a simple and non-recursive noun phrase. in many cases, base nps represent holistic and non-divisible concepts, and thus accurate translation of them from one language to another is extremely important in applications like machine translation, cross language information retrieval, and foreign language writing assistance. in this paper, we propose a new method for base np translation, which contains two steps: (1) translation candidate collection, and (2) translation selection. in translation candidate collection, for a given base np in the source language, we look for its translation candidates in the target language. to do so, we use a word-to-word translation dictionary and corpus data in the target language on the web. in translation selection, we determine the possible translation(s) from among the candidates. we use non-parallel corpus data in the two languages on the web and employ one of the two methods which we have developed. in the first method, we view the problem as that of classification and employ an ensemble of na?ve bayesian classifiers constructed with the em algorithm. we will use ?em-nbc-ensemble? to denote this method, hereafter. in the second method, we view the problem as that of calculating similarities between context vectors and use tf-idf vectors also constructed with the em algorithm. we will use ?em-tf-idf? to denote this method. experimental results indicate that our method is very effective, and the coverage and top 3 accuracy of translation at the final stage are 91.4% and 79.8%, respectively. the results are significantly better than those of the baseline methods relying on existing technologies. the higher performance of our method can be attributed to the enormity of the web data used and the employment of the em algorithm.discriminatively trained taggers, on the other hand, have difficulties to handle the huge number of features which are active at the same time if any possible combination of context attributes defines a separate feature. we presented a hmm pos tagger for fine-grained tagsets which splits the pos tags into attributevectors and estimates the conditional probabilities of the attributes with decision trees. the backoff smoothing methods of traditional n-gram pos taggers require an ordering of the reduced contexts which is not available, here. in ex periments with german and czech corpora, this method achieved a higher tagging accuracy than two state-of-the-art general-purpose pos taggers (tnt and svmtool). context prob. decision trees are ideal for this task because the iden tification of relevant attribute combinations is at the heart of this method. a hidden-markov-model part-of-speech tagger (brants, 2000, e.g.) computes the most probable pos tag sequence ? t n 1 = ? t 1 , ..., ? t n for a given word sequence w n 1 . ? t n 1 = argmax t n 1 p(t n 1 , w n 1 )the joint probability of the two sequences is de fined as the product of context probabilities and lexical probabilities over all pos tags: p(t n 1 , w n 1 ) = n ? i=1 p(t i |t i?1 i?k ) ? ?? \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1054.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1054.txt new file mode 100644 index 0000000000000000000000000000000000000000..bd45ab909eedb3cd4bf0022857689947d183f8da --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1054.txt @@ -0,0 +1 @@ +named entity (ne) recognition is a task in whichproper nouns and numerical information in a docu ment are detected and classified into categories suchas person, organization, and date. it is a key technol ogy of information extraction and open-domain question answering (voorhees and harman, 2000). we are building a trainable open-domain question answering system called saiqa-ii. in this paper, we show that an ne recognizer based on support vector machines (svms) gives better scores thanconventional systems. svms have given high per formance in various classification tasks (joachims, 1998; kudo and matsumoto, 2001). however, it turned out that off-the-shelf svm classifiers are too inefficient for ne recognition. the recognizer runs at a rate of only 85 bytes/sec on an athlon 1.3 ghz linux pc, while rule-based systems (e.g., isozaki, (2001)) can process several kilobytes in a second. the major reason is the inefficiency of svm classifiers. there are otherreports on the slowness of svm classifiers. another svm-based ne recognizer (yamada and mat sumoto, 2001) is 0.8 sentences/sec on a pentium iii 933 mhz pc. an svm-based part-of-speech (pos). tagger (nakagawa et al, 2001) is 20 tokens/sec on an alpha 21164a 500 mhz processor. it is difficult to use such slow systems in practical applications. in this paper, we present a method that makes the ne system substantially faster. this method can also be applied to other tasks in natural languageprocessing such as chunking and pos tagging. another problem with svms is its incomprehensibil ity. it is not clear which features are important or how they work. the above method is also useful for finding useless features. we also mention a method to reduce training time. 1.1 support vector machines. suppose we have a set of training data for a two class problem: , where ffflfi is a feature vector of the ffi -th sample in the training data and !$#%# is the label forthe sample. the goal is to find a decision func tion that accurately predicts for unseen . a non-linear svm classifier gives a decision function ( ) * sign ,+-) for an input vector where +-) .* / 0 21)3 546879: !6; here, () *=!$# means is a member of a cer tain class and () $* # means is not a mem ber. 7 s are called support vectors and are repre sentatives of training examples. is the numberof support vectors. therefore, computational com plexity of +?) is proportional to . support vectorsand other constants are determined by solving a cer tain quadratic programming problem. 4687@ is akernel that implicitly maps vectors into a higher di mensional space. typical kernels use dot products: 4687@ a*cbed7@ . a polynomial kernel of degree fis given by bg? *hi#j!kg l . we can use vari mm m m n m m m m m m m m m n m o o o o o n o o o o o o o o o o o o m : positive example, o : negative example n m , n o : support vectors figure 1: support vector machine ous kernels, and the design of an appropriate kernel for a particular application is an important research issue.figure 1 shows a linearly separable case. the de cision hyperplane defined by +-) p*rq separatespositive and negative examples by the largest mar gin. the solid line indicates the decision hyperplaneand two parallel dotted lines indicate the margin be tween positive and negative examples. since such aseparating hyperplane may not exist, a positive pa rameter s is introduced to allow misclassifications. see vapnik (1995). 1.2 svm-based ne recognition. as far as we know, the first svm-based ne system was proposed by yamada et al (2001) for japanese.his system is an extension of kudo?s chunking sys tem (kudo and matsumoto, 2001) that gave the best performance at conll-2000 shared tasks. in theirsystem, every word in a sentence is classified sequentially from the beginning or the end of a sen tence. however, since yamada has not compared it with other methods under the same conditions, it is not clear whether his ne system is better or not. here, we show that our svm-based ne system ismore accurate than conventional systems. our sys tem uses the viterbi search (allen, 1995) instead of sequential determination.for training, we use ?crl data?, which was prepared for irex (information retrieval and extrac tion exercise1, sekine and eriguchi (2000)). it has about 19,000 nes in 1,174 articles. we also use additional data by isozaki (2001). both datasets are based on mainichi newspaper?s 1994 and 1995 cd-roms. we use irex?s formal test data calledgeneral that has 1,510 named entities in 71 ar ticles from mainichi newspaper of 1999. systems are compared in terms of general?s f-measure 1http://cs.nyu.edu/cs/projects/proteus/irexwhich is the harmonic mean of ?recall? and ?preci sion? and is defined as follows. recall = m/(the number of correct nes), precision = m/(the number of nes extracted by a system), where m is the number of nes correctly extracted and classified by the system.we developed an svm-based ne system by following our ne system based on maximum entropy (me) modeling (isozaki, 2001). we sim ply replaced the me model with svm classifiers.the above datasets are processed by a morphological analyzer chasen 2.2.12. it tokenizes a sen tence into words and adds pos tags. chasen uses about 90 pos tags such as common-noun and location-name. since most unknown words are proper nouns, chasen?s parameters for unknownwords are modified for better results. then, a char acter type tag is added to each word. it uses 17character types such as all-kanji and small integer. see isozaki (2001) for details. now, japanese ne recognition is solved by theclassification of words (sekine et al, 1998; borth wick, 1999; uchimoto et al, 2000). for instance, the words in ?president george herbert bush saidclinton is . . . are classified as follows: ?president? = other, ?george? = person-begin, ?her bert? = person-middle, ?bush? = person-end, ?said? = other, ?clinton? = person-single, ?is? = other. in this way, the first word of a person?s name is labeled as person-begin. the last word is labeled as person-end. other words in the nameare person-middle. if a person?s name is expressed by a single word, it is labeled as person single. if a word does not belong to any namedentities, it is labeled as other. since irex de fines eight ne classes, words are classified into 33 ( *utwvex!k# ) categories.each sample is represented by 15 features be cause each word has three features (part-of-speech tag, character type, and the word itself), and two preceding words and two succeeding words are also used for context dependence. although infrequent features are usually removed to prevent overfitting, we use all features because svms are robust. each sample is represented by a long binary vector, i.e., a sequence of 0 (false) and 1 (true). for instance, ?bush? in the above example is represented by a 2http://chasen.aist-nara.ac.jp/ vector p*yg[z\#^]_ g[z `a] described below. only 15 elements are 1. bdcfe8ghji // current word is not ?alice? bdc klghme // current word is ?bush? bdc nghji // current word is not ?charlie? : bdcfe^opikpqpghme // current pos is a proper noun bdcfe^opinipghji // current pos is not a verb : bdc nqre^sre ghji // previous word is not ?henry? bdc nqre^skghme // previous word is ?herbert? :here, we have to consider the following problems. first, svms can solve only a two-class problem. therefore, we have to reduce the above multi class problem to a group of two-class problems. second, we have to consider consistency among word classes in a sentence. for instance, a word classified as person-begin should be followed by person-middle or person-end. it impliesthat the system has to determine the best combina tions of word classes from numerous possibilities.here, we solve these problems by combining exist ing methods. there are a few approaches to extend svms to cover t -class problems. here, we employ the ?oneclass versus all others? approach. that is, each clas sifier (%u ) is trained to distinguish members of a class v from non-members. in this method, two or more classifiers may give !$# to an unseen vector or no classifier may give !$# . one common way to avoid such situations is to compare + u ) values and to choose the class index v of the largest + u ) . the consistency problem is solved by the viterbi search. since svms do not output probabilities, we use the svm+sigmoid method (platt, 2000). that is, we use a sigmoid function wxg? j*y#zi#{! |l}~ {g to map + u ) to a probability-like value. the output of the viterbi search is adjusted by a postprocessor for wrong word boundaries. the adjustment rules are also statistically determined (isozaki, 2001). 1.3 comparison of ne recognizers. we use a fixed value ?* #q9q . f-measures are not very sensitive to  unless  is too small. whenwe used 1,038,986 training vectors, general?s f measure was 89.64% for ?*?q?# and 90.03% for 6*?#q9q . we employ the quadratic kernel ( f *y? ) because it gives the best results. polynomial kernels of degree 1, 2, and 3 resulted in 83.03%, 88.31%, f-measure (%) ? ? rg+dt ? ? me ? ? svm 0 20 40 60 80 100 120 crl data ???e? ?^??:??? 76 78 80 82 84 86 88 90 number of nes in training data ( ?? ) figure 2: f-measures of ne systems and 87.04% respectively when we used 569,994 training vectors. figure 2 compares ne recognizers in terms ofgeneral?s f-measures. ?svm? in the figure in dicates f-measures of our system trained by kudo?s tinysvm-0.073 with s?*?q?# . it attained 85.04% when we used only crl data. ?me? indicates our me system and ?rg+dt? indicates a rule-basedmachine learning system (isozaki, 2001). according to this graph, ?svm? is better than the other sys tems.however, svm classifiers are too slow. fa mous svm-light 3.50 (joachims, 1999) took 1.2 days to classify 569,994 vectors derived from 2 mb documents. that is, it runs at only 19 bytes/sec. tinysvm?s classifier seems best optimized among publicly available svm toolkits, but it still works at only 92 bytes/sec.tinysvm?s classifier seems best optimized among publicly available svm toolkits, but it still works at only 92 bytes/sec. named entity (ne) recognition is a task in whichproper nouns and numerical information in a docu ment are detected and classified into categories suchas person, organization, and date. that is, it runs at only 19 bytes/sec. is better than the other sys tems.however, svm classifiers are too slow. it is a key technol ogy of information extraction and open-domain question answering (voorhees and harman, 2000). our svm-based ne recognizer attained f = 90.03%. we are building a trainable open-domain question answering system called saiqa-ii. in this paper, we show that an ne recognizer based on support vector machines (svms) gives better scores thanconventional systems. according to this graph, ?svm? indicates a rule-basedmachine learning system (isozaki, 2001). svms have given high per formance in various classification tasks (joachims, 1998; kudo and matsumoto, 2001). fa mous svm-light 3.50 (joachims, 1999) took 1.2 days to classify 569,994 vectors derived from 2 mb documents. however, it turned out that off-the-shelf svm classifiers are too inefficient for ne recognition. ?me? indicates our me system and ?rg+dt? the major reason is the inefficiency of svm classifiers. we also thank shigeru katagiri and ken-ichiro ishii for their support. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1114.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1114.txt new file mode 100644 index 0000000000000000000000000000000000000000..633e71276d9f8b002cf77af6f919e56d4d76d01f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1114.txt @@ -0,0 +1 @@ +semantic knowledge for particular domains isincreasingly important in nlp. many applications such as word-sense disambiguation, in formation extraction and speech recognitionall require lexicons. the coverage of handbuilt lexical resources such as wordnet (fellbaum, 1998) has increased dramatically in re cent years, but leaves several problems andchallenges. coverage is poor in many criti cal, rapidly changing domains such as current affairs, medicine and technology, where much time is still spent by human experts employed to recognise and classify new terms. mostlanguages remain poorly covered in compari son with english. hand-built lexical resourceswhich cannot be automatically updated can of ten be simply misleading. for example, using wordnet to recognise that the word apple refers to a fruit or a tree is a grave error in the many situations where this word refers to a computer manufacturer, a sense which wordnet does notcover. for nlp to reach a wider class of appli cations in practice, the ability to assemble andupdate appropriate semantic knowledge auto matically will be vital. this paper describes a method for arranging semantic information into a graph (bolloba?s, 1998), where the nodes are words and the edges(also called links) represent relationships be tween words. the paper is arranged as follows. section 2 reviews previous work on semanticsimilarity and lexical acquisition. section 3 de scribes how the graph model was built from the pos-tagged british national corpus. section 4 describes a new incremental algorithm used to build categories of words step by step from thegraph model. section 5 demonstrates this algo rithm in action and evaluates the results againstwordnet classes, obtaining state-of-the-art re sults. section 6 describes how the graph modelcan be used to recognise when words are polysemous and to obtain groups of words represen tative of the different senses.semantic knowledge for particular domains isincreasingly important in nlp. section 6 describes how the graph modelcan be used to recognise when words are polysemous and to obtain groups of words represen tative of the different senses. so far we have presented a graph model built upon noun co-occurrence which performs much better than previously reported methods at the task of automatic lexical acquisition. 2 1http://infomap.stanford.edu/graphs 2http://muchmore.dfki.defigure 1: automatically generated graph show ing the word apple and semantically related nouns this isan important task, because assembling and tuning lexicons for specific nlp systems is increas ingly necessary. many applications such as word-sense disambiguation, in formation extraction and speech recognitionall require lexicons. wepresent a new method for word-sense discrimi nation in section 6. section 5 demonstrates this algo rithm in action and evaluates the results againstwordnet classes, obtaining state-of-the-art re sults. most work on automatic lexical acquisition has been based at some point on the notion of semantic similarity. we now take a step furtherand present a simple method for not only as sembling words with similar meanings, but for empirically recognising when a word has several meanings. in this section we give examples of lexical cat egories extracted by our method and evaluatethem against the corresponding classes in word net. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1144.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1144.txt new file mode 100644 index 0000000000000000000000000000000000000000..03264fe47816481103ecc77e12ed4affcb32dcda --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1144.txt @@ -0,0 +1 @@ +broad-coverage lexical resources such as wordnet are extremely useful in applications such as word sense disambiguation (leacock, chodorow, miller 1998) and question answering (pasca and harabagiu 2001). however, they often include many rare senses while missing domain-specific senses. for example, in wordnet, the words dog, computer and company all have a sense that is a hyponym of person. such rare senses make it difficult for a coreference resolution system to use wordnet to enforce the constraint that personal pronouns (e.g. he or she) must refer to a person. on the other hand, wordnet misses the user-interface object sense of the word dialog (as often used in software manuals). one way to deal with these problems is to use a clustering algorithm to automatically induce semantic classes (lin and pantel 2001). many clustering algorithms represent a cluster by the centroid of all of its members (e.g., k means) (mcqueen 1967) or by a representative element (e.g., k-medoids) (kaufmann and rousseeuw 1987). when averaging over all elements in a cluster, the centroid of a cluster may be unduly influenced by elements that only marginally belong to the cluster or by elements that also belong to other clusters. for example, when clustering words, we can use the contexts of the words as features and group together the words that tend to appear in similar contexts. for instance, u.s. state names can be clustered this way because they tend to appear in the following contexts: (list a) ___ appellate court campaign in ___ ___ capital governor of ___ ___ driver's license illegal in ___ ___ outlaws sth. primary in ___ ___'s sales tax senator for ___ if we create a centroid of all the state names, the centroid will also contain features such as: (list b) ___'s airport archbishop of ___ ___'s business district fly to ___ ___'s mayor mayor of ___ ___'s subway outskirts of ___ because some of the state names (like new york and washington) are also names of cities. using a single representative from a cluster may be problematic too because each individual element has its own idiosyncrasies that may not be shared by other members of the cluster. in this paper, we propose a clustering algo rithm, cbc (clustering by committee), in which the centroid of a cluster is constructed by averaging the feature vectors of a subset of the cluster members. the subset is viewed as a committee that determines which other elements belong to the cluster. by carefully choosing committee members, the features of the centroid tend to be the more typical features of the target class. for example, our system chose the following committee members to compute the centroid of the state cluster: illinois, michigan, minnesota, iowa, wisconsin, indiana, nebraska and vermont. as a result, the centroid contains only features like those in list a. evaluating clustering results is a very difficult task. we introduce a new evaluation methodol ogy that is based on the editing distance between output clusters and classes extracted from wordnet (the answer key).we introduce a new evaluation methodol ogy that is based on the editing distance between output clusters and classes extracted from wordnet (the answer key). we presented a clustering algorithm, cbc, for automatically discovering concepts from text. broad-coverage lexical resources such as wordnet are extremely useful in applications such as word sense disambiguation (leacock, chodorow, miller 1998) and question answering (pasca and harabagiu 2001). clustering algorithms are generally categorized as hierarchical and partitional. as a result, the centroid contains only features like those in list a. evaluating clustering results is a very difficult task. however, they often include many rare senses while missing domain-specific senses. the parameters k and t are usually considered to be small numbers. this research was partly supported by natural sciences and engineering research council of canada grant ogp121338 and scholarship pgsb207797. we generated clusters from a news corpus using cbc and compared them with classes extracted from wordnet (miller 1990). test data. in hierarchical agglomerative algorithms, clusters are constructed by iteratively merging the most similar clusters. five of the 943 clusters discovered by cbc from s13403 along with their features with top-15 highest mutual information and the wordnet classes that have the largest intersection with each cluster. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1145.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1145.txt new file mode 100644 index 0000000000000000000000000000000000000000..16f08d4f05e12fdac3edd4b8ad86dec1cd3b24c0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1145.txt @@ -0,0 +1 @@ +the penn chinese treebank (ctb) is an ongoing project, with its objective being to create a segmented chinese corpus annotated with pos tags and syntactic brackets. the first installment of the project (ctb-i) consists of xinhua newswire between the years 1994 and 1998, totaling 100,000 words, fully segmented, pos-tagged and syntactically bracketed and it has been released to the public via the penn linguistic data consortium (ldc). the preliminary results of this phase of the project have been reported in xia et al (2000). currently the second installment of the project, the 400,000-word ctb-ii is being developed and is expected to be completed early in the year 2003. ctb-ii will follow the standards set up in the segmentation (xia 2000b), pos tagging (xia 2000a) and bracketing guidelines (xue and xia 2000) and it will use articles from peoples' daily, hong kong newswire and material translated into chinese from other languages in addition to the xinhua newswire used in ctb-i in an effort to diversify the sources. the availability of ctb-i changed our approach to ctb-ii considerably. due to the existence of ctb-i, we were able to train new automatic chinese language processing (clp) tools, which crucially use annotated corpora as training material. these tools are then used for preprocessing in the development of the ctb-ii. we also developed tools to control the quality of the corpus. in this paper, we will address three issues in the development of the chinese treebank: annotation speed, annotation accuracy and usability of the corpus. specifically, we attempt to answer four questions: (i) how do we speed up the annotation process, (ii) how do we maintain high quality, i.e. annotation accuracy and inter-annotator consistency during the annotation process, and (iii) for what purposes is the corpus applicable, and (iv) what are our future plans? although we will touch upon linguistic problems that are specific to chinese, we believe these issues are general enough for the development of any single language corpus. 1 annotation speed. there are three main factors that affect the annotation speed : annotators? background, guideline design and more importantly, the availability of preprocessing tools. we will discuss how each of these three factors affects annotation speed. 1.1 annotator background. even with the best sets of guidelines, it is important that annotators have received considerable training in linguistics, particularly in syntax. in both the segmentation/pos tagging phase and the syntactic bracketing phase, understanding the structure of the sentences is essential for correct annotation with reasonable speed. for example, for example, the penn chinese treebank (ctb) is an ongoing project, with its objective being to create a segmented chinese corpus annotated with pos tags and syntactic brackets. in both the segmentation/pos tagging phase and the syntactic bracketing phase, understanding the structure of the sentences is essential for correct annotation with reasonable speed. the first installment of the project (ctb-i) consists of xinhua newswire between the years 1994 and 1998, totaling 100,000 words, fully segmented, pos-tagged and syntactically bracketed and it has been released to the public via the penn linguistic data consortium (ldc). even with the best sets of guidelines, it is important that annotators have received considerable training in linguistics, particularly in syntax. the preliminary results of this phase of the project have been reported in xia et al (2000). 1.1 annotator background. currently the second installment of the project, the 400,000-word ctb-ii is being developed and is expected to be completed early in the year 2003. we will discuss how each of these three factors affects annotation speed. background, guideline design and more importantly, the availability of preprocessing tools. ctb-ii will follow the standards set up in the segmentation (xia 2000b), pos tagging (xia 2000a) and bracketing guidelines (xue and xia 2000) and it will use articles from peoples' daily, hong kong newswire and material translated into chinese from other languages in addition to the xinhua newswire used in ctb-i in an effort to diversify the sources. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1150.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1150.txt new file mode 100644 index 0000000000000000000000000000000000000000..0004d2193a7c548e096aab08e4501723ab51d238 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-1150.txt @@ -0,0 +1 @@ +open-domain question answering (lehnert, 1986; harabagiu et al, 2001; light et al, 2001) and storycomprehension (hirschman et al, 1999) have become important directions in natural language pro cessing. question answering is a retrieval task morechallenging than common search engine tasks be cause its purpose is to find an accurate and conciseanswer to a question rather than a relevant docu ment. the difficulty is more acute in tasks such as story comprehension in which the target text is less likely to overlap with the text in the questions. for this reason, advanced natural language techniques rather than simple key term extraction are needed.one of the important stages in this process is analyz ing the question to a degree that allows determining the ?type? of the sought after answer. in the treccompetition (voorhees, 2000), participants are requested to build a system which, given a set of en glish questions, can automatically extract answers (a short phrase) of no more than 50 bytes from a5-gigabyte document library. participants have re research supported by nsf grants iis-9801638 and itr iis 0085836 and an onr muri award. alized that locating an answer accurately hinges on first filtering out a wide range of candidates (hovy et al, 2001; ittycheriah et al, 2001) based on some categorization of answer types. this work develops a machine learning approach to question classification (qc) (harabagiu et al, 2001; hermjakob, 2001). our goal is to categorize questions into different semantic classes that impose constraints on potential answers, so that they can be utilized in later stages of the question answeringprocess. for example, when considering the question q: what canadian city has the largest popula tion?, the hope is to classify this question as havinganswer type city, implying that only candidate an swers that are cities need consideration.based on the snow learning architecture, we develop a hierarchical classifier that is guided by a lay ered semantic hierarchy of answer types and is able to classify questions into fine-grained classes. wesuggest that it is useful to consider this classifica tion task as a multi-label classification and find that it is possible to achieve good classification results(over 90%) despite the fact that the number of dif ferent labels used is fairly large, 50. we observe thatlocal features are not sufficient to support this accu racy, and that inducing semantic features is crucial for good performance. the paper is organized as follows: sec. 2 presents the question classification problem; sec. 3 discusses the learning issues involved in qc and presents ourlearning approach; sec. 4 describes our experimen tal study.this paper presents a machine learning approach to question classification. 4 describes our experimen tal study. open-domain question answering (lehnert, 1986; harabagiu et al, 2001; light et al, 2001) and storycomprehension (hirschman et al, 1999) have become important directions in natural language pro cessing. in future work we plan to investigate further the application of deeper semantic analysis (including better named entity and semantic categorization) to feature extraction, automate the generation of thesemantic features and develop a better understand ing to some of the learning issues involved in thedifference between a flat and a hierarchical classi fier. 3 discusses the learning issues involved in qc and presents ourlearning approach; sec. question answering is a retrieval task morechallenging than common search engine tasks be cause its purpose is to find an accurate and conciseanswer to a question rather than a relevant docu ment. we designed two experiments to test the accuracy ofour classifier on trec questions. we define question classification(qc) here to be the task that, given a question, maps it to one of k classes, which provide a semantic constraint on the sought-after answer1. the ambiguity causes the classifier not to output equivalent term as the first choice. the first experi ment evaluates the contribution of different featuretypes to the quality of the classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-2025.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-2025.txt new file mode 100644 index 0000000000000000000000000000000000000000..a4f839364064fbf34af28cc6a2df4ad1ef3f58ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C02-2025.txt @@ -0,0 +1 @@ +for the past decade or more, symbolic, linguistically ori- ented methods and statistical or machine learning ap- proaches to nlp have often been perceived as incompat- ible or even competing paradigms. while shallow and probabilistic processing techniques have produced use- ful results in many classes of applications, they have not met the full range of needs for nlp, particularly where precise interpretation is important, or where the variety of linguistic expression is large relative to the amount of training data available. on the other hand, deep approaches to nlp have only recently achieved broad enough grammatical coverage and sufficient processing efficiency to allow the use of precise linguistic grammars in certain types of real-world applications. in particular, applications of broad-coverage analyti- cal grammars for parsing or generation require the use of sophisticated statistical techniques for resolving ambigu- ities; the transfer of head-driven phrase structure gram- mar (hpsg) systems into industry, for example, has am- plified the need for general parse ranking, disambigua- tion, and robust recovery techniques. we observe general consensus on the necessity for bridging activities, com- bining symbolic and stochastic approaches to nlp. but although we find promising research in stochastic pars- ing in a number of frameworks, there is a lack of appro- priately rich and dynamic language corpora for hpsg. likewise, stochastic parsing has so far been focussed on information-extraction-type applications and lacks any depth of semantic interpretation. the redwoods initia- tive is designed to fill in this gap. in the next section, we present some of the motivation for the lingo redwoods project as a treebank develop- ment process. although construction of the treebank is in its early stages, we present in section 3 some prelim- inary results of using the treebank data already acquired on concrete applications. we show, for instance, that even simple statistical models of parse ranking trained on the redwoods corpus built so far can disambiguate parses with close to 80% accuracy. 2 a rich and dynamic treebank the redwoods treebank is based on open-source hpsg resources developed by a broad consortium of re- search groups including researchers at stanford (usa), saarbru?cken (germany), cambridge, edinburgh, and sussex (uk), and tokyo (japan). their wide distribution and common acceptance make the hpsg framework and resources an excellent anchor point for the redwoods treebanking initiative. the key innovative aspect of the redwoods ap- proach to treebanking is the anchoring of all linguis- tic data captured in the treebank to the hpsg frame- work and a generally-available broad-coverage gram- mar of english, the lingo english resource grammar (flickinger, 2000) as implemented with the lkb gram- mar development environment (copestake, 2002). un- like existing treebanks, there is no need to define a (new) form of grammatical representation specific to the tree- bank.the lingo redwoods treebank motivation and preliminary applications stephan oepen, kristina toutanova, stuart shieber, christopher manning, dan flickinger, and thorsten brants {oe |kristina |manning |dan}@csli.stanford.edu, shieber@deas.harvard.edu, brants@parc.xerox.com abstract the lingo redwoods initiative is a seed activity in the de- sign and development of a new type of treebank. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1010.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1010.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1c08a6f78a0ba551fb3912374f6ab563240b01c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1010.txt @@ -0,0 +1 @@ +there has been a steadily increasing interest in syntactic parsing based on dependency analysis in re cent years. one important reason seems to be thatdependency parsing offers a good compromise be tween the conflicting demands of analysis depth, on the one hand, and robustness and efficiency, on the other. thus, whereas a complete dependency structure provides a fully disambiguated analysisof a sentence, this analysis is typically less complex than in frameworks based on constituent analysis and can therefore often be computed determin istically with reasonable accuracy. deterministicmethods for dependency parsing have now been ap plied to a variety of languages, including japanese (kudo and matsumoto, 2000), english (yamada and matsumoto, 2003), turkish (oflazer, 2003), and swedish (nivre et al, 2004). for english, the interest in dependency parsing has been weaker than for other languages. to some extent, this can probably be explained by the strong tradition of constituent analysis in anglo-american linguistics, but this trend has been reinforced by the fact that the major treebank of american english,the penn treebank (marcus et al, 1993), is anno tated primarily with constituent analysis. on the other hand, the best available parsers trained on thepenn treebank, those of collins (1997) and charniak (2000), use statistical models for disambigua tion that make crucial use of dependency relations. moreover, the deterministic dependency parser of yamada and matsumoto (2003), when trained on the penn treebank, gives a dependency accuracy that is almost as good as that of collins (1997) and charniak (2000). the parser described in this paper is similar to that of yamada and matsumoto (2003) in that it uses a deterministic parsing algorithm in combination with a classifier induced from a treebank. however, there are also important differences between the twoapproaches. first of all, whereas yamada and matsumoto employs a strict bottom-up algorithm (es sentially shift-reduce parsing) with multiple passes over the input, the present parser uses the algorithmproposed in nivre (2003), which combines bottom up and top-down processing in a single pass in order to achieve incrementality. this also means that the time complexity of the algorithm used here is linearin the size of the input, while the algorithm of ya mada and matsumoto is quadratic in the worst case. another difference is that yamada and matsumoto use support vector machines (vapnik, 1995), whilewe instead rely on memory-based learning (daele mans, 1999). most importantly, however, the parser presented in this paper constructs labeled dependency graphs, i.e. dependency graphs where arcs are labeled with dependency types. as far as we know, this makesit different from all previous systems for dependency parsing applied to the penn treebank (eis ner, 1996; yamada and matsumoto, 2003), althoughthere are systems that extract labeled grammatical relations based on shallow parsing, e.g. buchholz (2002). the fact that we are working with labeled dependency graphs is also one of the motivations for choosing memory-based learning over sup port vector machines, since we require a multi-class classifier. even though it is possible to use svmfor multi-class classification, this can get cumber some when the number of classes is large. (for the the ? dep finger-pointing ? np-sbj has already ? advp begun ? vp . ? dep figure 1: dependency graph for english sentenceunlabeled dependency parser of yamada and matsumoto (2003) the classification problem only in volves three classes.) the parsing methodology investigated here haspreviously been applied to swedish, where promis ing results were obtained with a relatively smalltreebank (approximately 5000 sentences for train ing), resulting in an attachment score of 84.7% and a labeled accuracy of 80.6% (nivre et al, 2004).1 however, since there are no comparable resultsavailable for swedish, it is difficult to assess the significance of these findings, which is one of the reasons why we want to apply the method to a bench mark corpus such as the the penn treebank, even though the annotation in this corpus is not ideal for labeled dependency parsing.the paper is structured as follows. section 2 describes the parsing algorithm, while section 3 ex plains how memory-based learning is used to guidethe parser. experimental results are reported in sec tion 4, and conclusions are stated in section 5.sentences whose unlabeled dependency structure is completely correct (yamada and mat sumoto, 2003). there has been a steadily increasing interest in syntactic parsing based on dependency analysis in re cent years. experimental results are reported in sec tion 4, and conclusions are stated in section 5. the conversion of the penn tree bank to dependency trees has been performed using head rules kindly provided by hiroyasu yamada and yuji matsumoto. the memory-based classifiers used in the experiments have been constructed using thetilburg memory-based learner (timbl) (daelemans et al, 2003). all metrics except cm are calculated as meanscores per word, and punctuation tokens are con sistently excluded.table 1 shows the attachment score, both unla beled and labeled, for the two different state models with the two different label sets. first of all, we see that model 1 gives better accuracy than model 2 with the smaller label set g, which confirms our expectations that the added part-of-speech featuresare helpful when the dependency labels are less informative. one important reason seems to be thatdependency parsing offers a good compromise be tween the conflicting demands of analysis depth, on the one hand, and robustness and efficiency, on the other. section 2 describes the parsing algorithm, while section 3 ex plains how memory-based learning is used to guidethe parser. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1024.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1024.txt new file mode 100644 index 0000000000000000000000000000000000000000..b01d4f2048a4b13940ab54c5d0411e075439f156 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1024.txt @@ -0,0 +1 @@ +large context-free grammars extracted from tree banks achieve high coverage and accuracy, but they are difficult to parse with because of their massive ambiguity. the application of standard chart-parsing techniques often fails due to excessive memory and runtime requirements.treebank grammars are mostly used as probabilis tic grammars and users are usually only interested in the best analysis, the viterbi parse. to speed up viterbi parsing, sophisticated search strategies havebeen developed which find the most probable anal ysis without examining the whole set of possible analyses (charniak et al, 1998; klein and manning,2003a). these methods reduce the number of gener ated edges, but increase the amount of time needed for each edge. the parser described in this paper follows a contrary approach: instead of reducing the number of edges, it minimises the costs of building edges in terms of memory and runtime.the new parser, called bitpar, is based on a bit vector implementation (cf. (graham et al, 1980)) of the well-known cocke-younger-kasami (cky) algorithm (kasami, 1965; younger, 1967). it buildsa compact ?parse forest? representation of all anal yses in two steps. in the first step, a cky-style recogniser fills the chart with constituents. in the second step, the parse forest is built top-down from the chart. viterbi parses are computed in four steps. again, the first step is a cky recogniser which is followed by a top-down filtering of the chart, the bottom-up computation of the viterbi probabilities, and the top-down extraction of the best parse.the rest of the paper is organised as follows: sec tion 2 explains the transformation of the grammar to chomsky normal form. the following sectionsdescribe the recogniser algorithm (sec. 3), improvements of the recogniser by means of bit-vector op erations (sec. 4), and the generation of parse forests(sec. 5), and viterbi parses (sec. 6). section 7 discusses the advantages of the new architecture, sec tion 8 describes experimental results, and section 9 summarises the paper.large context-free grammars extracted from tree banks achieve high coverage and accuracy, but they are difficult to parse with because of their massive ambiguity. section 7 discusses the advantages of the new architecture, sec tion 8 describes experimental results, and section 9 summarises the paper. the cky algorithm requires a grammar in chom sky normal form where the right-hand side of eachrule either consists of two non-terminals or a single terminal symbol. (the rule a 5), and viterbi parses (sec. the application of standard chart-parsing techniques often fails due to excessive memory and runtime requirements.treebank grammars are mostly used as probabilis tic grammars and users are usually only interested in the best analysis, the viterbi parse. boring symbols on the right-hand sides of rules. 4), and the generation of parse forests(sec. bitpar uses a modified ver sion of the cky algorithm allowing also chain rules (rules with a single non-terminal on the right-handside). to speed up viterbi parsing, sophisticated search strategies havebeen developed which find the most probable anal ysis without examining the whole set of possible analyses (charniak et al, 1998; klein and manning,2003a). 3), improvements of the recogniser by means of bit-vector op erations (sec. these methods reduce the number of gener ated edges, but increase the amount of time needed for each edge. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1041.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1041.txt new file mode 100644 index 0000000000000000000000000000000000000000..6178a7fd1c055fb1425b81ed79a798ed21789147 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1041.txt @@ -0,0 +1 @@ +lexicalised grammar formalisms such as lexicalized tree adjoining grammar (ltag) and com binatory categorial grammar (ccg) assign one or more syntactic structures to each word in a sentencewhich are then manipulated by the parser. supertag ging was introduced for ltag as a way of increasingparsing efficiency by reducing the number of struc tures assigned to each word (bangalore and joshi, 1999). supertagging has more recently been applied to ccg (clark, 2002; curran and clark, 2003).supertagging accuracy is relatively high for man ually constructed ltags (bangalore and joshi,1999). however, for ltags extracted automati cally from the penn treebank, performance is much lower (chen et al, 1999; chen et al, 2002). in fact, performance for such grammars is below that needed for successful integration into a full parser (sarkar et al, 2000). in this paper we demonstratethat ccg supertagging accuracy is not only sufficient for accurate and robust parsing using an auto matically extracted grammar, but also offers several practical advantages. our wide-coverage ccg parser uses a log-linear model to select an analysis. the model paramaters are estimated using a discriminative method, that is,one which requires all incorrect parses for a sentence as well as the correct parse. since an auto matically extracted ccg grammar can produce anextremely large number of parses, the use of a su pertagger is crucial in limiting the total number of parses for the training data to a computationally manageable number. the supertagger is also crucial for increasing thespeed of the parser. we show that spectacular in creases in speed can be obtained, without affectingaccuracy or coverage, by tightly integrating the su pertagger with the ccg grammar and parser. to achieve maximum speed, the supertagger initially assigns only a small number of ccg categories toeach word, and the parser only requests more cate gories from the supertagger if it cannot provide an analysis. we also demonstrate how extra constraints on the category combinations, and the application of beam search using the parsing model, can further increase parsing speed.this is the first work we are aware of to succes fully integrate a supertagger with a full parser which uses a lexicalised grammar automatically extractedfrom the penn treebank. we also report signifi cantly higher parsing speeds on newspaper text than any previously reported for a full wide-coverage parser. our results confirm that wide-coverage ccg parsing is feasible for many large-scale nlp tasks.our results confirm that wide-coverage ccg parsing is feasible for many large-scale nlp tasks. this paper has shown that by tightly integrating a supertagger with a ccg parser, very fast parse times can be achieved for penn treebank wsj text. this research was supported by epsrc grant gr/m96889, and a commonwealth scholarship and a sydney university travelling scholarship to the second author. lexicalised grammar formalisms such as lexicalized tree adjoining grammar (ltag) and com binatory categorial grammar (ccg) assign one or more syntactic structures to each word in a sentencewhich are then manipulated by the parser. we also report signifi cantly higher parsing speeds on newspaper text than any previously reported for a full wide-coverage parser. supertag ging was introduced for ltag as a way of increasingparsing efficiency by reducing the number of struc tures assigned to each word (bangalore and joshi, 1999). the previous section showed how to combine the supertagger and parser for the purpose of creating training data, assuming the correct category for each word is known. the best speeds we have reported for the ccg parser are an order of magnitude faster. to give one example, the number of categories in the tag dictionary?s entry for the wordis is 45 (only considering categories which have appeared at least 10 times in the training data). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1051.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1051.txt new file mode 100644 index 0000000000000000000000000000000000000000..2ac81516981159539fdec12ff3cb7a35a43b37f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1051.txt @@ -0,0 +1 @@ +the importance of learning to manipulate monolingual paraphrase relationships for applications like summarization, search, and dialog has been highlighted by a number of recent efforts (barzilay & mckeown 2001; shinyama et al 2002; lee & barzilay 2003; lin & pantel 2001). while several different learning methods have been applied to this problem, all share a need for large amounts of data in the form of pairs or sets of strings that are likely to exhibit lexical and/or structural paraphrase alternations. one approach1 1 an alternative approach involves identifying anchor points--pairs of words linked in a known way--and collecting the strings that intervene. (shinyama, et al 2002; lin & pantel 2001). since our interest is in that has been successfully used is edit distance, a measure of similarity between strings. the assumption is that strings separated by a small edit distance will tend to be similar in meaning: the leading indicators measure the economy? the leading index measures the economy?. lee & barzilay (2003), for example, use multi sequence alignment (msa) to build a corpus of paraphrases involving terrorist acts. their goal is to extract sentential templates that can be used in high-precision generation of paraphrase alter nations within a limited domain. our goal here is rather different: our interest lies in constructing a monolingual broad-domain corpus of pairwise aligned sentences. such data would be amenable to conventional statistical machine translation (smt) techniques (e.g., those discussed in och & ney 2003).2 in what follows we compare two strategies for unsupervised construction of such a corpus, one employing string similarity and the other associating sentences that may overlap very little at the string level. we measure the relative utility of the two derived monolingual corpora in the context of word alignment techniques developed originally for bilingual text. we show that although the edit distance corpus is well-suited as training data for the alignment algorithms currently used in smt, it is an incomplete source of information about paraphrase relations, which exhibit many of the characteristics of comparable bilingual corpora or free translations. many of the more complex alternations that characterize monolingual paraphrase, such as large-scale lexical alternations and constituent reorderings, are not readily learning sentence level paraphrases, including major constituent reorganizations, we do not address this approach here. 2 barzilay & mckeown (2001) consider the possibility of using smt machinery, but reject the idea because of the noisy, comparable nature of their dataset. captured by edit distance techniques, which conflate semantic similarity with formal similarity. we conclude that paraphrase research would benefit by identifying richer data sources and developing appropriate learning techniques.edit distance identifies sentence pairs that exhibit lexical and short phrasal alternations that can be aligned with considerable success. we conclude that paraphrase research would benefit by identifying richer data sources and developing appropriate learning techniques. we remain, however, responsible for all content. given a large dataset and a well-motivated clustering of documents, useful datasets can be gleaned even without resorting to more sophisticated techniques figure 2. the importance of learning to manipulate monolingual paraphrase relationships for applications like summarization, search, and dialog has been highlighted by a number of recent efforts (barzilay & mckeown 2001; shinyama et al 2002; lee & barzilay 2003; lin & pantel 2001). captured by edit distance techniques, which conflate semantic similarity with formal similarity. we have also benefited from discussions with ken church, mark johnson, daniel marcu and franz och. while several different learning methods have been applied to this problem, all share a need for large amounts of data in the form of pairs or sets of strings that are likely to exhibit lexical and/or structural paraphrase alternations. our two paraphrase datasets are distilled from a corpus of news articles gathered from thousands of news sources over an extended period. to explore some of the differences between the training sets, we hand-examined a random sample of sentence pairs from each corpus type. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1059.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1059.txt new file mode 100644 index 0000000000000000000000000000000000000000..16a22e4d82943d635620863fa5b2ea5fbac7a2d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1059.txt @@ -0,0 +1 @@ +language models (lm) are applied in many natural language processing applications, such as speech recognition and machine translation, to encapsulate syntactic, semantic and pragmatic information. for systems which learn from given data we frequently observe a severe drop in performance when moving to a new genre or new domain. in speech recognition a number of adaptation techniques have been developed to cope with this situation. in statistical machine translation we have a similar situation, i.e. estimate the model parameter from some data, and use the system to translate sentences which may not be well covered by the training data. therefore, the potential of adaptation techniques needs to be explored for machine translation applications. statistical machine translation is based on the noisy channel model, where the translation hypothesis is searched over the space defined by a translation model and a target language (brown et al, 1993). statistical machine translation can be formulated as follows: )()|(maxarg)|(maxarg* tptspstpt tt ?== where t is the target sentence, and s is the source sentence. p(t) is the target language model and p(s|t) is the translation model. the argmax operation is the search, which is done by the decoder. in the current study we modify the target language model p(t), to represent the test data better, and thereby improve the translation quality. (janiszek, et al 2001) list the following approaches to language model adaptation: ? linear interpolation of a general and a domain specific model (seymore, rosenfeld, 1997). back off of domain specific probabilities with those of a specific model (besling, meier, 1995). retrieval of documents pertinent to the new domain and training a language model on-line with those data (iyer, ostendorf, 1999, mahajan et. al. 1999). maximum entropy, minimum discrimination adaptation (chen, et. al., 1998). adaptation by linear transformation of vectors of bigram counts in a reduced space (demori, federico, 1999). smoothing and adaptation in a dual space via latent semantic analysis, modeling long-term semantic dependencies, and trigger combinations. (j. bellegarda, 2000). our approach can be characterized as unsupervised data augmentation by retrieval of relevant documents from large monolingual corpora, and interpolation of the specific language model, build from the retrieved data, with a background language model. to be more specific, the following steps are carried out to do the language model adaptation. first, a baseline statistical machine translation system, using a large general language model, is applied to generate initial translations. then these translations hypotheses are reformulated as queries to retrieve similar sentences from a very large text collection. a small domain specific language model is build using the retrieved sentences and linearly interpolated with the background language model. this new interpolated language model in applied in a second decoding run to produce the final translations. there are a number of interesting questions pertaining to this approach: ? which information can and should used to generate the queries: the first-best translation only, or also translation alternatives. how should we construct the queries, just as simple bag-of-words, or can we incorporate more structure to make them more powerful. how many documents should be retrieved to build the specific language models, and on what granularity should this be done, i.e. what is a document in the information retrieval process. the paper is structured as follows: section 2 outlines the sentence retrieval approach, and three bag-of-words query models are designed and explored; structured query models are introduced in section 3. in section 4 we present translation experiments are presented for the different query. finally, summary is given in section 5.this might be especially useful for structured query models generated from the translation lattices. in this paper, we studied language model adaptation for statistical machine translation. language models (lm) are applied in many natural language processing applications, such as speech recognition and machine translation, to encapsulate syntactic, semantic and pragmatic information. finally, summary is given in section 5. in section 4 we present translation experiments are presented for the different query. for systems which learn from given data we frequently observe a severe drop in performance when moving to a new genre or new domain. in speech recognition a number of adaptation techniques have been developed to cope with this situation. the paper is structured as follows: section 2 outlines the sentence retrieval approach, and three bag-of-words query models are designed and explored; structured query models are introduced in section 3. our language model adaptation is an unsupervised data augmentation approach guided by query models. how many documents should be retrieved to build the specific language models, and on what granularity should this be done, i.e. what is a document in the information retrieval process. in statistical machine translation we have a similar situation, i.e. estimate the model parameter from some data, and use the system to translate sentences which may not be well covered by the training data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1072.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1072.txt new file mode 100644 index 0000000000000000000000000000000000000000..102b8349447aec0b9f8b141dd6a9eff1872724c4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1072.txt @@ -0,0 +1 @@ +to automatically evaluate machine translations, the machine translation community recently adopted an n-gram co-occurrence scoring procedure bleu (papineni et al 2001). a similar metric, nist, used by nist (nist 2002) in a couple of machine translation evaluations in the past two years is based on bleu. the main idea of bleu is to measure the translation closeness between a candidate translation and a set of reference translations with a numerical metric. although the idea of using objective functions to automatically evaluate machine translation quality is not new (su et al 1992), the success of bleu prompts a lot of interests in developing better automatic evaluation metrics. for example, akiba et al (2001) proposed a metric called red based on edit distances over a set of multiple references. nie?en et al (2000) calculated the length normalized edit distance, called word error rate (wer), between a candidate and multiple reference translations. leusch et al (2003) proposed a related measure called position independent word error rate (per) that did not consider word position, i.e. using bag-of-words instead. turian et al (2003) introduced general text matcher (gtm) based on accuracy measures such as recall, precision, and f-measure. with so many different automatic metrics available, it is necessary to have a common and objective way to evaluate these metrics. comparison of automatic evaluation metrics are usually conducted on corpus level using correlation analysis between human scores and automatic scores such as bleu, nist, wer, and per. however, the performance of automatic metrics in terms of human vs. system correlation analysis is not stable across different evaluation settings. for example, table 1 shows the pearson?s linear correlation coefficient analysis of 8 machine translation systems from 2003 nist chinese english machine translation evaluation. the pearson? correlation coefficients are computed according to different automatic evaluation methods vs. human assigned adequacy and fluency. bleu1, 4, and 12 are bleu with maximum n-gram lengths of 1, 4, and 12 respectively. gtm10, 20, and 30 are gtm with exponents of 1.0, 2.0, and 3.0 respectively. 95% confidence intervals are estimated using bootstrap resampling (davison and hinkley 1997). from the bleu group, we found that shorter bleu has better adequacy correlation while longer bleu has better fluency correlation. gtm with smaller exponent has better adequacy correlation and gtm with larger exponent has better fluency correlation. nist is very good in adequacy correlation but not as good as gtm30 in fluency correlation. based on these observations, we are not able to conclude which metric is the best because it depends on the manual evaluation criteria. this results also indicate that high correlation between human and automatic scores in both adequacy and fluency cannot always been achieved at the same time. the best performing metrics in fluency according to table 1 are bleu12 and gtm30 (dark/green cells). however, many metrics are statistically equivalent (gray cells) to them when we factor in the 95% confidence intervals. for example, even per is as good as bleu12 in adequacy. one reason for this might be due to data sparseness since only 8 systems are available. the other potential problem for correlation analysis of human vs. automatic framework is that high corpus-level correlation might not translate to high sentence-level correlation. however, high sentence-level correlation is often an important property that machine translation researchers look for. for example, candidate translations shorter than 12 words would have zero bleu12 score but bleu12 has the best correlation with human judgment in fluency as shown in table 1. in order to evaluate the ever increasing number of automatic evaluation metrics for machine translation objectively, efficiently, and reliably, we introduce a new evaluation method: orange. we describe orange in details in section 2 and briefly introduce three new automatic metrics that will be used in comparisons in section 3. the results of comparing several existing automatic metrics and the three new automatic metrics using orange will be presented in section 4. we conclude this paper and discuss future directions in section 5.to automatically evaluate machine translations, the machine translation community recently adopted an n-gram co-occurrence scoring procedure bleu (papineni et al 2001). ranging from 0 to 9 (rouge-s0 to s9) and without any skip distance limit (rouge-s*) we compute the average score of the references and then rank the candidate translations and the references according to these automatic scores. however, we plan to conduct the sampling procedure to verify this is indeed the case. we conjecture that this is the case for the currently available machine translation systems. we conclude this paper and discuss future directions in section 5. the results of comparing several existing automatic metrics and the three new automatic metrics using orange will be presented in section 4. the orange score for each metric is calculated as the average rank of the average reference (oracle) score over the whole corpus (872 sentences) divided by the length of the n-best list plus 1. if the portion is small then the orange method can be confidently applied. assuming the length of the n-best list is n and the size of the corpus is s (in number of sentences), we compute orange as follows: orange = )1( )( 1 + ??? \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1080.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1080.txt new file mode 100644 index 0000000000000000000000000000000000000000..5dbe2d0f6c33beec840b8cede4dfb5f0c802f8ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1080.txt @@ -0,0 +1 @@ +the empiricist revolution in computational linguistics has dramatically shifted the accepted boundary between what kinds of knowledge are best supplied by humans and what kinds are best learned from data, with much of the human supplied knowledge now being in the form of annotations of data. as we look to the future, we expect that relatively unsupervised methods will grow in applicability, reducing the need for expensive human annotation of data. with respect to part-of-speech tagging, we believe that the way forward from the relatively small number of languages for which we can currently identify parts of speech in context with reasonable accuracy will make use of unsupervised methods that require only an untagged corpus and a lexicon of words and their possible parts of speech. we believe this based on the fact that such lexicons exist for many more languages (in the form of conventional dictionaries) than extensive human-tagged training corpora exist for. unsupervised part-of-speech tagging, as defined above, has been attempted using a variety of learning algorithms (brill 1995, church, 1988, cutting et. al. 1992, elworthy, 1994 kupiec 1992, merialdo 1991). while this makes unsupervised part-of-speech tagging a relatively well-studied problem, published results to date have not been comparable with respect to the training and test data used, or the lexicons which have been made available to the learners. in this paper, we provide the first comprehensive comparison of methods for unsupervised part-of speech tagging. in addition, we explore two new ideas for improving tagging accuracy. first, we explore an hmm approach to tagging that uses context on both sides of the word to be tagged, inspired by previous work on building bidirectionality into graphical models (lafferty et. al. 2001, toutanova et. al. 2003). second we describe a method for sequential unsupervised training of tag sequence and lexical probabilities in an hmm, which we observe leads to improved accuracy over simultaneous training with certain types of models. in section 2, we provide a brief description of the methods we evaluate and review published results. section 3 describes the contextualized variation on hmm tagging that we have explored. in section 4 we provide a direct comparison of several unsupervised part-of-speech taggers, which is followed by section 5, in which we present a new method for training with suboptimal lexicons. in section 6, we revisit our new approach to hmm tagging, this time, in the supervised framework.in section 6, we revisit our new approach to hmm tagging, this time, in the supervised framework. in the future, we will consider making an increase the context-size, which helped toutanova et al (2003). we have presented a comprehensive evaluation of several methods for unsupervised part-of-speech tagging, comparing several variations of hidden markov model taggers and unsupervised transformation-based learning using the same corpus and same lexicons. the empiricist revolution in computational linguistics has dramatically shifted the accepted boundary between what kinds of knowledge are best supplied by humans and what kinds are best learned from data, with much of the human supplied knowledge now being in the form of annotations of data. in section 4 we provide a direct comparison of several unsupervised part-of-speech taggers, which is followed by section 5, in which we present a new method for training with suboptimal lexicons. this result falls only slightly below the full-blown training intensive dependency-based conditional model. as we look to the future, we expect that relatively unsupervised methods will grow in applicability, reducing the need for expensive human annotation of data. as one more way to assess the potential benefit from using left and right context in an hmm tagger, we tested our tagging model in the supervised framework, using the same sections of the treebank previously allocated for unsupervised training, development and testing. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1081.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1081.txt new file mode 100644 index 0000000000000000000000000000000000000000..d02b940af326bcf7dce3395e988191fb98fb263b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1081.txt @@ -0,0 +1 @@ +unlike english and other western languages, many asian languages such as chinese, japanese, and thai, do not delimit words by white-space. wordsegmentation is therefore a key precursor for language processing tasks in these languages. for chinese, there has been significant research on find ing word boundaries in unsegmented sequences(see (sproat and shih, 2002) for a review). un fortunately, building a chinese word segmentation system is complicated by the fact that there is no standard definition of word boundaries in chinese. approaches to chinese segmentation fall roughly into two categories: heuristic dictionary-based methods and statistical machine learning methods.in dictionary-based methods, a predefined dictio nary is used along with hand-generated rules for segmenting input sequence (wu, 1999). howeverthese approaches have been limited by the impossibility of creating a lexicon that includes all possible chinese words and by the lack of robust statistical inference in the rules. machine learning approaches are more desirable and have been successful in both unsupervised learning (peng and schuur mans, 2001) and supervised learning (teahan et al, 2000). many current approaches suffer from either lackof exact inference over sequences or difficulty in incorporating domain knowledge effectively into seg mentation. domain knowledge is either not used, used in a limited way, or used in a complicated way spread across different components. for example,the n-gram generative language modeling based ap proach of teahan et al(2000) does not use domainknowledge. gao et al(2003) uses class-based language for word segmentation where some word cat egory information can be incorporated. zhang et al (2003) use a hierarchical hidden markov model to incorporate lexical knowledge. a recent advance in this area is xue (2003), in which the author uses a sliding-window maximum entropy classifier to tag chinese characters into one of four position tags, and then covert these tags into a segmentation using rules. maximum entropy models give tremendousflexibility to incorporate arbitrary features. how ever, a traditional maximum entropy tagger, as used in xue (2003), labels characters without consideringdependencies among the predicted segmentation labels that is inherent in the state transitions of finite state sequence models. linear-chain conditional random fields (crfs) (lafferty et al, 2001) are models that address both issues above. unlike heuristic methods, they are principled probabilistic finite state models onwhich exact inference over sequences can be ef ficiently performed. unlike generative n-gram or hidden markov models, they have the ability to straightforwardly combine rich domain knowledge, for example in this paper, in the form of multiple readily-available lexicons. furthermore, they arediscriminatively-trained, and are often more accurate than generative models, even with the same fea tures. in their most general form, crfs are arbitrary undirected graphical models trained to maximize the conditional probability of the desired outputs given the corresponding inputs. in the linear-chainspecial case we use here, they can be roughly un derstood as discriminatively-trained hidden markovmodels with next-state transition functions represented by exponential models (as in maximum en tropy classifiers), and with great flexibility to viewthe observation sequence in terms of arbitrary, over lapping features, with long-range dependencies, and at multiple levels of granularity. these beneficialproperties suggests that crfs are a promising ap proach for chinese word segmentation.new word detection is one of the most impor tant problems in chinese information processing.many machine learning approaches have been pro posed (chen and bai, 1998; wu and jiang, 2000; nie et al, 1995). new word detection is normally considered as a separate process from segmentation.however, integrating them would benefit both seg mentation and new word detection. crfs provide aconvenient framework for doing this. they can pro duce not only a segmentation, but also confidence in local segmentation decisions, which can be usedto find new, unfamiliar character sequences sur rounded by high-confidence segmentations. thus, our new word detection is not a stand-alone process, but an integral part of segmentation. newly detected words are re-incorporated into our word lexicon,and used to improve segmentation. improved seg mentation can then be further used to improve new word detection. comparing chinese word segmentation accuracyacross systems can be difficult because many re search papers use different data sets and different ground-rules. some published results claim 98% or99% segmentation precision and recall, but these ei ther count only the words that occur in the lexicon, or use unrealistically simple data, lexicons that haveextremely small (or artificially non-existant) outof-vocabulary rates, short sentences or many numbers. a recent chinese word segmentation competition (sproat and emerson, 2003) has made compar isons easier. the competition provided four datasets with significantly different segmentation guidelines, and consistent train-test splits. the performance ofparticipating system varies significantly across different datasets. our system achieves top performance in two of the runs, and a state-of-the-art per formance on average. this indicates that crfs are a viable model for robust chinese word segmentation.this indicates that crfs are a viable model for robust chinese word segmentation. unlike english and other western languages, many asian languages such as chinese, japanese, and thai, do not delimit words by white-space. wordsegmentation is therefore a key precursor for language processing tasks in these languages. the contribution of this paper is three-fold. our system achieves top performance in two of the runs, and a state-of-the-art per formance on average. the performance ofparticipating system varies significantly across different datasets. for chinese, there has been significant research on find ing word boundaries in unsegmented sequences(see (sproat and shih, 2002) for a review). feature function are represented as f(yt?2, yt?1, yt,x). acknowledgmentsthis work was supported in part by the center for intelligent information retrieval, in part by the cen tral intelligence agency, the national security agencyand national science foundation under nsf grant #iis 0326249, and in part by spawarsyscen-sd grant number n66001-02-1-8903. conditional random fields (crfs) are undirected graphical models trained to maximize a conditional probability (lafferty et al, 2001). these datasets represent four different segmentation standards. un fortunately, building a chinese word segmentation system is complicated by the fact that there is no standard definition of word boundaries in chinese. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1111.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1111.txt new file mode 100644 index 0000000000000000000000000000000000000000..af592417f1fc444940973b7de8fa43434b4ebc2a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1111.txt @@ -0,0 +1 @@ +the natural language processing (nlp) com munity has recently seen a growth in corpus-based methods. algorithms light in linguistic theories but rich in available training data have been successfully applied to several applications such as ma chine translation (och and ney 2002), information extraction (etzioni et al 2004), and question an swering (brill et al 2001). in the last decade, we have seen an explosion in the amount of available digital text resources. it is estimated that the internet contains hundreds of terabytes of text data, most of which is in an unstructured format. yet, many nlp algorithms tap into only megabytes or gigabytes of this information. in this paper, we make a step towards acquiring semantic knowledge from terabytes of data. we present an algorithm for extracting is-a relations, designed for the terascale, and compare it to a state of the art method that employs deep analysis of text (pantel and ravichandran 2004). we show that by simply utilizing more data on this task, we can achieve similar performance to a linguisticallyrich approach. the current state of the art co occurrence model requires an estimated 10 years just to parse a 1tb corpus (see table 1). instead of using a syntactically motivated co-occurrence ap proach as above, our system uses lexico-syntactic rules. in particular, it finds lexico-pos patterns by making modifications to the basic edit distance algorithm. once these patterns have been learnt, the algorithm for finding new is-a relations runs in o(n), where n is the number of sentences. in semantic hierarchies such as wordnet (miller 1990), an is-a relation between two words x and y represents a subordinate relationship (i.e. x is more specific than y). many algorithms have recently been proposed to automatically mine is-a (hypo nym/hypernym) relations between words. here, we focus on is-a relations that are characterized by the questions ?what/who is x?? for example, table 2 shows a sample of 10 is-a relations discovered by the algorithms presented in this paper. in this table, we call azalea, tiramisu, and winona ryder in stances of the respective concepts flower, dessert and actress. these kinds of is-a relations would be useful for various purposes such as ontology con struction, semantic information retrieval, question answering, etc. the main contribution of this paper is a comparison of the quality of our pattern-based and co occurrence models as a function of processing time and corpus size. also, the paper lays a foundation for terascale acquisition of knowledge. we will show that, for very small or very large corpora or for situations where recall is valued over precision, the pattern-based approach is best.the natural language processing (nlp) com munity has recently seen a growth in corpus-based methods. we will show that, for very small or very large corpora or for situations where recall is valued over precision, the pattern-based approach is best. there is a long standing need for higher quality performance in nlp systems. our biggest challenge as we venture to the terascale is to use our new found wealth not only to build better systems, but to im prove our understanding of language. also, the paper lays a foundation for terascale acquisition of knowledge. previous approaches to extracting is-a relations fall under two categories: pattern-based and co occurrence-based approaches. there is promise for increasing our system accuracy by re ranking the outputs of the top-5 hypernyms. algorithms light in linguistic theories but rich in available training data have been successfully applied to several applications such as ma chine translation (och and ney 2002), information extraction (etzioni et al 2004), and question an swering (brill et al 2001). in this section, we empirically compare the pattern-based and co-occurrence-based models pre sented in section 3 and section 4. re cently, pantel and ravichandran (2004) extended this approach by making use of all syntactic de pendency features for each noun. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1146.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1146.txt new file mode 100644 index 0000000000000000000000000000000000000000..02ee8dbf7ff02315993076255d0382e67524018e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1146.txt @@ -0,0 +1 @@ +over recent years, many natural language pro cessing (nlp) techniques have been developedthat might benefit from knowledge of distribu tionally similar words, i.e., words that occur in similar contexts. for example, the sparse dataproblem can make it difficult to construct language models which predict combinations of lex ical events. similarity-based smoothing (brown et al, 1992; dagan et al, 1999) is an intuitivelyappealing approach to this problem where prob abilities of unseen co-occurrences are estimatedfrom probabilities of seen co-occurrences of dis tributionally similar events.other potential applications apply the hy pothesised relationship (harris, 1968) betweendistributional similarity and semantic similar ity; i.e., similarity in the meaning of words can be predicted from their distributional similarity.one advantage of automatically generated the sauruses (grefenstette, 1994; lin, 1998; curranand moens, 2002) over large-scale manually cre ated thesauruses such as wordnet (fellbaum,1998) is that they might be tailored to a partic ular genre or domain.however, due to the lack of a tight defini tion for the concept of distributional similarity and the broad range of potential applications, alarge number of measures of distributional similarity have been proposed or adopted (see section 2). previous work on the evaluation of dis tributional similarity methods tends to either compare sets of distributionally similar words to a manually created semantic resource (lin, 1998; curran and moens, 2002) or be orientedtowards a particular task such as language mod elling (dagan et al, 1999; lee, 1999). the first approach is not ideal since it assumes that the goal of distributional similarity methods is topredict semantic similarity and that the semantic resource used is a valid gold standard. further, the second approach is clearly advanta geous when one wishes to apply distributional similarity methods in a particular application area. however, it is not at all obvious that oneuniversally best measure exists for all applica tions (weeds and weir, 2003). thus, applying adistributional similarity technique to a new ap plication necessitates evaluating a large number of distributional similarity measures in addition to evaluating the new model or algorithm. we propose a shift in focus from attemptingto discover the overall best distributional sim ilarity measure to analysing the statistical and linguistic properties of sets of distributionally similar words returned by different measures. this will make it possible to predict in advanceof any experimental evaluation which distributional similarity measures might be most appro priate for a particular application. further, we explore a problem faced by the automatic thesaurus generation community, which is that distributional similarity methodsdo not seem to offer any obvious way to distinguish between the semantic relations of syn onymy, antonymy and hyponymy. previous work on this problem (caraballo, 1999; lin et al., 2003) involves identifying specific phrasal patterns within text e.g., ?xs and other ys? is used as evidence that x is a hyponym of y. our work explores the connection between relativefrequency, distributional generality and seman tic generality with promising results. the rest of this paper is organised as follows.in section 2, we present ten distributional simi larity measures that have been proposed for use in nlp. in section 3, we analyse the variation in neighbour sets returned by these measures. in section 4, we take one fundamental statisticalproperty (word frequency) and analyse correla tion between this and the nearest neighbour setsgenerated. in section 5, we relate relative fre quency to a concept of distributional generalityand the semantic relation of hyponymy. in sec tion 6, we consider the effects that this has on a potential application of distributional similarity techniques, which is judging compositionality of collocations.in sec tion 6, we consider the effects that this has on a potential application of distributional similarity techniques, which is judging compositionality of collocations. over recent years, many natural language pro cessing (nlp) techniques have been developedthat might benefit from knowledge of distribu tionally similar words, i.e., words that occur in similar contexts. we have presented an analysis of a set of dis tributional similarity measures. we would liketo thank adam kilgarriff and bill keller for use ful discussions. in section 5, we relate relative fre quency to a concept of distributional generalityand the semantic relation of hyponymy. for example, the sparse dataproblem can make it difficult to construct language models which predict combinations of lex ical events. thus, it would seem that the three-way connection betweendistributional generality, hyponymy and rela tive frequency exists for verbs as well as nouns. in this section, we introduce some basic con cepts and then discuss the ten distributional similarity measures used in this study. in its most general sense, a collocation is a habitual or lexicalised word combination. we have seen that there is a large amount of variation in the neighbours selected by different measures andtherefore the choice of measure in a given appli cation is likely to be important. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1180.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1180.txt new file mode 100644 index 0000000000000000000000000000000000000000..17e9529e428ce981c0c93d47608a05d6b0e69519 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1180.txt @@ -0,0 +1 @@ +the levels of accuracy and robustness recently achieved by statistical parsers (e.g. collins (1999),charniak (2000)) have led to their use in a num ber of nlp applications, such as question-answering(pasca and harabagiu, 2001), machine translation (charniak et al, 2003), sentence simplifica tion (carroll et al, 1999), and a linguist?s search engine (resnik and elkiss, 2003). such parsers typically return phrase-structure trees in the styleof the penn treebank, but without traces and co indexation. however, the usefulness of this outputis limited, since the underlying meaning (as repre sented in a predicate-argument structure or logical form) is difficult to reconstruct from such skeletal parse trees.in this paper we demonstrate how a widecoverage statistical parser using combinatory categorial grammar (ccg) can be used to generate semantic representations. there are a number of ad vantages to using ccg for this task. first, ccg provides ?surface compositional? analysis of certainsyntactic phenomena such as coordination and ex traction, allowing the logical form to be obtained for such cases in a straightforward way. second, ccg isa lexicalised grammar, and only uses a small num ber of semantically transparent combinatory rules tocombine ccg categories. hence providing a compositional semantics for ccg simply amounts to assigning semantic representations to the lexical en tries and interpreting the combinatory rules. andthird, there exist highly accurate, efficient and ro bust ccg parsers which can be used directly for this task (clark and curran, 2004b; hockenmaier, 2003).the existing ccg parsers deliver predicate argu ment structures, but not semantic representations that can be used for inference. the present paper seeks to extend one of these wide coverage parsers by using it to build logical forms suitable for use invarious nlp applications that require semantic in terpretation.we show how to construct first-order represen tations from ccg derivations using the ?-calculus, and demonstrate that semantic representations can be produced for over 97% of the sentences in unseen wsj text. the only other deep parser we are aware of to achieve such levels of robustness for the wsj is kaplan et al (2004). the use of the ?-calculusis integral to our method. however, first-order rep resentations are simply used as a proof-of-concept; we could have used drss (kamp and reyle, 1993)or some other representation more tailored to the ap plication in hand.there is some existing work with a similar motivation to ours. briscoe and carroll (2002) gen erate underspecified semantic representations fromtheir robust parser. toutanova et al (2002) and ka plan et al (2004) combine statistical methods with a linguistically motivated grammar formalism (hpsg and lfg respectively) in an attempt to achieve levels of robustness and accuracy comparable to the penn treebank parsers (which kaplan et al do achieve). however, there is a key difference between these approaches and ours. in our approach the creation of the semantic representations forms a completely it could cost taxpayers 15 million to install and residents 1 million a year to maintain np in our approach the creation of the semantic representations forms a completely it could cost taxpayers 15 million to install and residents 1 million a year to maintain np the levels of accuracy and robustness recently achieved by statistical parsers (e.g. collins (1999),charniak (2000)) have led to their use in a num ber of nlp applications, such as question-answering(pasca and harabagiu, 2001), machine translation (charniak et al, 2003), sentence simplifica tion (carroll et al, 1999), and a linguist?s search engine (resnik and elkiss, 2003). however, there is a key difference between these approaches and ours. such parsers typically return phrase-structure trees in the styleof the penn treebank, but without traces and co indexation. however, the usefulness of this outputis limited, since the underlying meaning (as repre sented in a predicate-argument structure or logical form) is difficult to reconstruct from such skeletal parse trees.in this paper we demonstrate how a widecoverage statistical parser using combinatory categorial grammar (ccg) can be used to generate semantic representations. toutanova et al (2002) and ka plan et al (2004) combine statistical methods with a linguistically motivated grammar formalism (hpsg and lfg respectively) in an attempt to achieve levels of robustness and accuracy comparable to the penn treebank parsers (which kaplan et al do achieve). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1197.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1197.txt new file mode 100644 index 0000000000000000000000000000000000000000..4379f18549a741c3e44c8f3f4c6143a74f684af9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1197.txt @@ -0,0 +1 @@ +semantic parsing of sentences is believed to be animportant task toward natural language understand ing, and has immediate applications in tasks such information extraction and question answering. we study semantic role labeling(srl). for each verb in a sentence, the goal is to identify all constituents that fill a semantic role, and to determine their roles,such as agent, patient or instrument, and their ad juncts, such as locative, temporal or manner. the propbank project (kingsbury and palmer, 2002) provides a large human-annotated corpus of semantic verb-argument relations. specifically, we use the data provided in the conll-2004 shared task of semantic-role labeling (carreras and ma`rquez, 2003) which consists of a portion of thepropbank corpus, allowing us to compare the per formance of our approach with other systems. previous approaches to the srl task have madeuse of a full syntactic parse of the sentence in or der to define argument boundaries and to determine the role labels (gildea and palmer, 2002; chen and rambow, 2003; gildea and hockenmaier, 2003;pradhan et al, 2003; pradhan et al, 2004; sur deanu et al, 2003). in this work, following the conll-2004 shared task definition, we assume thatthe srl system takes as input only partial syn tactic information, and no external lexico-semantic knowledge bases. specifically, we assume as input resources a part-of-speech tagger, a shallow parser that can process the input to the level of basedchunks and clauses (tjong kim sang and buch holz, 2000; tjong kim sang and de?jean, 2001), and a named-entity recognizer (tjong kim sang and de meulder, 2003). we do not assume a full parse as input. srl is a difficult task, and one cannot expecthigh levels of performance from either purely man ual classifiers or purely learned classifiers. rather, supplemental linguistic information must be used to support and correct a learning system. so far,machine learning approaches to srl have incorpo rated linguistic information only implicitly, via theclassifiers? features. the key innovation in our ap proach is the development of a principled method tocombine machine learning techniques with linguistic and structural constraints by explicitly incorpo rating inference into the decision process. in the machine learning part, the system we present here is composed of two phases. first, a set of argument candidates is produced using twolearned classifiers?one to discover beginning po sitions and one to discover end positions of each argument type. hopefully, this phase discovers a small superset of all arguments in the sentence (foreach verb). in a second learning phase, the candi date arguments from the first phase are re-scored using a classifier designed to determine argument type, given a candidate argument.unfortunately, it is difficult to utilize global prop erties of the sentence into the learning phases.however, the inference level it is possible to incorporate the fact that the set of possible rolelabelings is restricted by both structural and lin guistic constraints?for example, arguments cannotstructurally overlap, or, given a predicate, some ar gument structures are illegal. the overall decision problem must produce an outcome that consistent with these constraints. we encode the constraints aslinear inequalities, and use integer linear programming(ilp) as an inference procedure to make a final decision that is both consistent with the con straints and most likely according to the learningsystem. although ilp is generally a computationally hard problem, there are efficient implementations that can run on thousands of variables and constraints. in our experiments, we used the commer cial ilp package (xpress-mp, 2003), and were able to process roughly twenty sentences per second.semantic parsing of sentences is believed to be animportant task toward natural language understand ing, and has immediate applications in tasks such information extraction and question answering. in our experiments, we used the commer cial ilp package (xpress-mp, 2003), and were able to process roughly twenty sentences per second. we study semantic role labeling(srl). although ilp is generally a computationally hard problem, there are efficient implementations that can run on thousands of variables and constraints. the goal of the semantic-role labeling task is to dis cover the verb-argument structure for a given input sentence. we show that linguistic information is useful for se mantic role labeling, both in extracting features and dist. prec. see the details of the definition in kingsbury and palmer (2002) and carreras and ma`rquez (2003). as more constraints are considered, we ex pect the overall performance to improve. for each verb in a sentence, the goal is to identify all constituents that fill a semantic role, and to determine their roles,such as agent, patient or instrument, and their ad juncts, such as locative, temporal or manner. we encode the constraints aslinear inequalities, and use integer linear programming(ilp) as an inference procedure to make a final decision that is both consistent with the con straints and most likely according to the learningsystem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1200.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1200.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c0b8c1ea92aeb59f3d30ec4da2551cd315bde72 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C04-1200.txt @@ -0,0 +1 @@ +what is an opinion? the many opinions on opinions are reflected in a considerable literature (aristotle 1954; perelman 1970; toulmin et al 1979; wallace 1975; toulmin 2003). recent computational work either focuses on sentence ?subjectivity? (wiebe et al 2002; riloff et al 2003), concentrates just on explicit statements of evaluation, such as of films (turney 2002; pang et al 2002), or focuses on just one aspect of opinion, e.g., (hatzivassiloglou and mckeown 1997) on adjectives. we wish to study opinion in general; our work most closely resembles that of (yu and hatzivassiloglou 2003). since an analytic definition of opinion is probably impossible anyway, we will not summarize past discussion or try to define formally what is and what is not an opinion. for our purposes, we describe an opinion as a quadruple [topic, holder, claim, sentiment] in which the holder believes a claim about the topic, and in many cases associates a sentiment, such as good or bad, with the belief. for example, the following opinions contain claims but no sentiments: ?i believe the world is flat? ?the gap is likely to go bankrupt? ?bin laden is hiding in pakistan? ?water always flushes anti-clockwise in the southern hemisphere? like yu and hatzivassiloglou (2003), we want to automatically identify sentiments, which in this work we define as an explicit or implicit expression in text of the holder?s positive, negative, or neutral regard toward the claim about the topic. (other sentiments we plan to study later.) sentiments always involve the holder?s emotions or desires, and may be present explicitly or only implicitly: ?i think that attacking iraq would put the us in a difficult position? (implicit) ?the us attack on iraq is wrong? (explicit) ?i like ike? (explicit) ?we should decrease our dependence on oil? (implicit) ?reps. tom petri and william f. goodling asserted that counting illegal aliens violates citizens? basic right to equal representation? (implicit) in this paper we address the following challenge problem. given a topic (e.g., ?should abortion be banned??) and a set of texts about the topic, find the sentiments expressed about (claims about) the topic (but not its supporting subtopics) in each text, and identify the people who hold each sentiment. to avoid the problem of differentiating between shades of sentiments, we simplify the problem to: identify just expressions of positive, negative, or neutral sentiments, together with their holders. in addition, for sentences that do not express a sentiment but simply state that some sentiment(s) exist(s), return these sentences in a separate set. for example, given the topic ?what should be done with medicare?? the sentence ?after years of empty promises, congress has rolled out two medicare prescription plans, one from house republicans and the other from the democratic sentence pos tagger verbs nounsadjectives adjective senti ment classifier sentiment sentiment sentence sentiment classifier opinion region + polarity + holder holder finder named entity tagger sentence sentence texts + topic sentiment sentiment sentiment v rbs verb senti ment classifier nouns noun senti ment classifier wordnet sentence : figure 1: system architecture. sens. bob graham of florida and zell miller of georgia? should be returned in the separate set. we approach the problem in stages, starting with words and moving on to sentences. we take as unit sentiment carrier a single word, and first classify each adjective, verb, and noun by its sentiment. we experimented with several classifier models. but combining sentiments requires additional care, as table 1 shows. california supreme court agreed that the state?s new term-limit law was constitutional. california supreme court disagreed that the state?s new term-limit law was constitutional. california supreme court agreed that the state?s new term-limit law was unconstitutional. california supreme court disagreed that the state?s new term-limit law was unconstitutional. table 1: combining sentiments. a sentence might even express opinions of different people. when combining word-level sentiments, we therefore first determine for each holder a relevant region within the sentence and then experiment with various models for combining word sentiments. we describe our models and algorithm in section 2, system experiments and discussion in section 3, and conclude in section 4.what is an opinion? we describe our models and algorithm in section 2, system experiments and discussion in section 3, and conclude in section 4. when combining word-level sentiments, we therefore first determine for each holder a relevant region within the sentence and then experiment with various models for combining word sentiments. sentiment recognition is a challenging and difficult part of understanding opinions. the many opinions on opinions are reflected in a considerable literature (aristotle 1954; perelman 1970; toulmin et al 1979; wallace 1975; toulmin 2003). a sentence might even express opinions of different people. recent computational work either focuses on sentence ?subjectivity? nonetheless, as the experiments show, encouraging results can be obtained even with relatively simple models and only a small amount of manual seeding effort. table 1: combining sentiments. we wish to study opinion in general; our work most closely resembles that of (yu and hatzivassiloglou 2003). the first experiment examines the two word sentiment classifier models and the second the three sentence sentiment classifier models. unfortunately, in most cases it classifies neutral and weak sentiment sentences as non-opinion bearing sentences. since an analytic definition of opinion is probably impossible anyway, we will not summarize past discussion or try to define formally what is and what is not an opinion. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1018.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1018.txt new file mode 100644 index 0000000000000000000000000000000000000000..7febfc88dace760fe1e44a641912b0800d4ad2e8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1018.txt @@ -0,0 +1 @@ +automatic sentence compression can be broadly described as the task of creating a grammaticalsummary of a single sentence with minimal information loss. it has recently attracted much attention, in part because of its relevance to applications. examples include the generation of sub titles from spoken transcripts (vandeghinste and pan, 2004), the display of text on small screens such as mobile phones or pdas (corston-oliver, 2001), and, notably, summarisation (jing, 2000; lin, 2003). most prior work has focused on a specific instantiation of sentence compression, namely word deletion. given an input sentence of words, w 1 , w 2 . . . w n , a compression is formed by dropping any subset of these words (knight c ? 2008. licensed under the creative commonsattribution-noncommercial-share alike 3.0 unported li cense (http://creativecommons.org/licenses/by-nc-sa/3.0/). some rights reserved. and marcu, 2002). the simplification renders the task computationally feasible, allowing efficient decoding using a dynamic program (knight andmarcu, 2002; turner and charniak, 2005; mcdon ald, 2006). furthermore, constraining the problemto word deletion affords substantial modeling flexibility. indeed, a variety of models have been successfully developed for this task ranging from in stantiations of the noisy-channel model (knight and marcu, 2002; galley and mckeown, 2007;turner and charniak, 2005), to large-margin learn ing (mcdonald, 2006; cohn and lapata, 2007), and integer linear programming (clarke, 2008). however, the simplification also renders the tasksomewhat artificial. there are many rewrite operations that could compress a sentence, besides deletion, including reordering, substitution, and inser tion. in fact, professional abstractors tend to use these operations to transform selected sentences from an article into the corresponding summary sentences (jing, 2000). therefore, in this paper we consider sentence compression from a more general perspective and generate abstracts rather than extracts. in this framework, the goal is to find a summary of theoriginal sentence which is grammatical and conveys the most important information without necessarily using the same words in the same or der. our task is related to, but different from, paraphrase extraction (barzilay, 2003). we must not only have access to paraphrases (i.e., rewrite rules), but also be able to combine them in order to generate new text, while attempting to produce a shorter resulting string. quirk et al (2004) present an end-to-end paraphrasing system inspired byphrase-based machine translation that can both ac quire paraphrases and use them to generate new strings. however, their model is limited to lexical substitution ? no reordering takes place ? and is 137 lacking the compression objective.once we move away from extractive compres sion we are faced with two problems. first, wemust find an appropriate training set for our abstractive task. compression corpora are not natu rally available and existing paraphrase corpora do not normally contain compressions. our second problem concerns the modeling task itself. ideally, our learning framework should handle structural mismatches and complex rewriting operations.in what follows, we first present a new cor pus for abstractive compression which we created by having annotators compress sentences while rewriting them. besides obtaining useful data formodeling purposes, we also demonstrate that ab stractive compression is a meaningful task. we then present a tree-to-tree transducer capable of transforming an input parse tree into a compressed parse tree. our approach is based on synchronous tree substitution grammar (stsg, eisner (2003)),a formalism that can account for structural mismatches, and is trained discriminatively. specifi cally, we generalise the model of cohn and lapata (2007) to our abstractive task. we present a noveltree-to-tree grammar extraction method which acquires paraphrases from bilingual corpora and ensure coherent output by including a ngram language model as a feature. we also develop a number of loss functions suited to the abstractive compression task. we hope that some of the work described here might be of relevance to other gen eration tasks such as machine translation (eisner, 2003), multi-document summarisation (barzilay, 2003), and text simplification (carroll et al, 1999).automatic sentence compression can be broadly described as the task of creating a grammaticalsummary of a single sentence with minimal information loss. special thanks to phil blunsom, james clarke and miles osborne for their insightful suggestions. we first performed an analysis of variance (anova)to examine the effect of different system compres sions. our results are summarised in table 4, where we show the mean ratings for our system (abstract), the baseline (extract), and the gold standard. we also develop a number of loss functions suited to the abstractive compression task. acknowledgements the authors acknowledge the support of epsrc (grants gr/t04540/01 and gr/t04557/01). we hope that some of the work described here might be of relevance to other gen eration tasks such as machine translation (eisner, 2003), multi-document summarisation (barzilay, 2003), and text simplification (carroll et al, 1999). finally, we planto apply the model to other paraphrasing tasks in cluding fully abstractive document summarisation (daum?e iii and marcu, 2002). the anova revealed a reliable effect on both grammaticality and importance (significant over both subjects and items (p < 0.01)).we next examined in more detail between system differences. it has recently attracted much attention, in part because of its relevance to applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1022.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1022.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0f8bd3e84f68febeff74f5bf4695cc53caaa48f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1022.txt @@ -0,0 +1 @@ +the field of research in natural language processing (nlp) applications for l2 language is constantly growing. this is largely driven by the ex panding population of l2 english speakers, whose varying levels of ability may require different types of nlp tools from those designed primarily for native speakers of the language. these include applications for use by the individual and within instructional contexts. among the key tools are error-checking applications, focusing particularly on areas which learners find the most challenging. prepositions and determiners are known to be oneof the most frequent sources of error for l2 en glish speakers, a finding supported by our analysisof a small error-tagged corpus we created (determiners 17% of errors, prepositions 12%). there fore, in developing a system for automatic error detection in l2 writing, it seems desirable to focus on these problematic, and very common, parts of speech (pos).this paper gives a brief overview of the prob lems posed by these pos and of related work. we c ? 2008. licensed under the creative commonsattribution-noncommercial-share alike 3.0 unported li cense (http://creativecommons.org/licenses/by-nc-sa/3.0/). some rights reserved. then present our proposed approach on both l1 and l2 data and discuss the results obtained so far.the field of research in natural language processing (nlp) applications for l2 language is constantly growing. then present our proposed approach on both l1 and l2 data and discuss the results obtained so far. rachele de felice was supported by an ahrc scholar ship for the duration of her studies. this paper discussed a contextual feature based approach to the automatic acquisition of models of use for prepositions and determiners, whichachieve an accuracy of 70.06% and 92.15% re spectively, and showed how it can be applied to anerror correction task for l2 writing, with promis ing early results. however, in noting both divergences and similarities between the two learners, human and machine, we may be able to derive useful insights into the way the learning processes operate, and what factors could be more or less important for them. prepositions are challenging for learners because they can appear to have an idiosyncratic behaviour which does not follow any predictable pattern even across nearly identical contexts. in developing this model, our first aim was not to create something which learns like a human, butsomething that works in the best and most effi cient possible way. therefore, here, too, it is very hard tocome up with clear-cut rules predicting every pos sible kind of occurrence. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1107.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1107.txt new file mode 100644 index 0000000000000000000000000000000000000000..a44b462316eecfa54d313485374c338d5438c1ad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1107.txt @@ -0,0 +1 @@ +in many nlp applications, such as question an swering (qa) and information extraction (ie), it is crucial to recognize whether a specific target meaning is inferred from a text. for example, a qa system has to deduce that ?sco sued ibm? is inferred from ?sco won a lawsuit against ibm? to answer ?whom did sco sue??. this type of reasoning has been identified as a core semanticinference paradigm by the generic textual entail ment framework (giampiccolo et al, 2007). an important type of knowledge needed for such inference is entailment rules. an entailmentrule specifies a directional inference relation be tween two templates, text patterns with variables, such as ?x win lawsuit against y ? x sue y ?. applying this rule by matching ?x win lawsuit against y ? in the above text allows a qa system to c ? 2008. licensed under the creative commonsattribution-noncommercial-share alike 3.0 unported li cense (http://creativecommons.org/licenses/by-nc-sa/3.0/). some rights reserved.infer ?x sue y ? and identify ?ibm?, y ?s instantiation, as the answer for the above question. entail ment rules capture linguistic and world-knowledge inferences and are used as an important building block within different applications, e.g. (romano et al, 2006). one reason for the limited performance of generic semantic inference systems is the lack of broad-scale knowledge-bases of entailment rules (in analog to lexical resources such as wordnet). supervised learning of broad coverage rule-sets is an arduous task. this sparked intensive research on unsupervised acquisition of entailment rules (and similarly paraphrases) e.g. (lin and pantel, 2001; szpektor et al, 2004; sekine, 2005). most unsupervised entailment rule acquisitionmethods learn binary rules, rules between tem plates with two variables, ignoring unary rules, rules between unary templates (templates withonly one variable). however, a predicate quite of ten appears in the text with just a single variable(e.g. intransitive verbs or passives), where infer ence requires unary rules, e.g. ?x take a nap?x sleep? (further motivations in section 3.1).in this paper we focus on unsupervised learning of unary entailment rules. two learning ap proaches are proposed. in our main approach, rules are learned by measuring how similar the variable instantiations of two templates in a corpusare. in addition to adapting state-of-the-art similar ity measures for unary rule learning, we propose a new measure, termed balanced-inclusion, which balances the notion of directionality in entailment with the common notion of symmetric semantic similarity. in a second approach, unary rules arederived from binary rules learned by state-of-the art binary rule learning methods. we tested the various unsupervised unary rule 849learning methods, as well as a binary rule learn ing method, on a test set derived from a standard ie benchmark. this provides the first comparisonbetween the performance of unary and binary rule sets. several results rise from our evaluation: (a) while most work on unsupervised learning ignored unary rules, all tested unary methods outperformed the binary method; (b) it is better to learn unary rules directly than to derive them from a binary rule-base; (c) our proposed balanced-inclusion measure outperformed all other tested methods interms of f1 measure. moreover, only balancedinclusion improved f1 score over a baseline infer ence that does not use entailment rules at all .we presented two approaches for unsupervised ac quisition of unary entailment rules from regular (non-comparable) corpora. in many nlp applications, such as question an swering (qa) and information extraction (ie), it is crucial to recognize whether a specific target meaning is inferred from a text. moreover, only balancedinclusion improved f1 score over a baseline infer ence that does not use entailment rules at all . for example, a qa system has to deduce that ?sco sued ibm? this provides the first comparisonbetween the performance of unary and binary rule sets. we implemented the unary rule learning algo rithms described in section 3 and the binary dirt algorithm (lin and pantel, 2001). this section reviews relevant distributional simi larity measures, both symmetric and directional, which were applied for either lexical similarity or unsupervised entailment rule learning. by assuming correct matches in these cases we isolate the recall of the rule-set (along with the seeds), which yields 39% recall. is inferred from ?sco won a lawsuit against ibm? to overcome this limitation, we use a more expressive template struc ture. how ever, 25% of the mentions were missed because of incorrect syntactic matching of correctly learned rules. in the first approach, rules are directly learned based on distributionalsimilarity measures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1109.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1109.txt new file mode 100644 index 0000000000000000000000000000000000000000..698f43b5f9f274950300a6b2e7e5b22e9a131091 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1109.txt @@ -0,0 +1 @@ +the long-term goal of our work is to develop asystem which detects errors in grammar and us age so that appropriate feedback can be given to non-native english writers, a large and growing segment of the world?s population. estimates arethat in china alone as many as 300 million people are currently studying english as a second lan guage (esl). usage errors involving prepositions are among the most common types seen in thewriting of non-native english speakers. for ex ample, (izumi et al, 2003) reported error rates for english prepositions that were as high as 10% ina japanese learner corpus. errors can involve incorrect selection (?we arrived to the station?), ex traneous use (?he went to outside?), and omission (?we are fond null beer?). what is responsiblefor making preposition usage so difficult for non native speakers? c ? 2008. licensed under the creative commonsattribution-noncommercial-share alike 3.0 unported li cense (http://creativecommons.org/licenses/by-nc-sa/3.0/). some rights reserved. at least part of the difficulty seems to be due tothe great variety of linguistic functions that prepositions serve. when a preposition marks the argument of a predicate, such as a verb, an adjective, or a noun, preposition selection is con strained by the argument role that it marks, thenoun which fills that role, and the particular predi cate. many english verbs also display alternations (levin, 1993) in which an argument is sometimes marked by a preposition and sometimes not (e.g., ?they loaded the wagon with hay? / ?they loaded hay on the wagon?). when prepositions introduceadjuncts, such as those of time or manner, selec tion is constrained by the object of the preposition (?at length?, ?in time?, ?with haste?). finally, the selection of a preposition for a given context also depends upon the intended meaning of the writer (?we sat at the beach?, ?on the beach?, ?near the beach?, ?by the beach?). with so many sources of variation in englishpreposition usage, we wondered if the task of se lecting a preposition for a given context might prove challenging even for native speakers. to investigate this possibility, we randomly selected200 sentences from microsoft?s encarta encyclopedia, and, in each sentence, we replaced a ran domly selected preposition with a blank line. we then asked two native english speakers to perform a cloze task by filling in the blank with the best preposition, given the context provided by the rest of the sentence. our results showed only about75% agreement between the two raters, and be tween each of our raters and encarta.the presence of so much variability in prepo sition function and usage makes the task of thelearner a daunting one. it also poses special chal lenges for developing and evaluating an nlp error detection system. this paper addresses both the 865 development and evaluation of such a system. first, we describe a machine learning system that detects preposition errors in essays of esl writers. to date there have been relatively few attempts to address preposition error detection,though the sister task of detecting determiner errors has been the focus of more research. our system performs comparably with other leading sys tems. we extend our previous work (chodorow etal., 2007) by experimenting with combination fea tures, as well as features derived from the google n-gram corpus and comlex (grishman et al, 1994).second, we discuss drawbacks in current meth ods of annotating esl data and evaluating errordetection systems, which are not limited to prepo sition errors. while the need for annotation by multiple raters has been well established in nlp tasks (carletta, 1996), most previous work in error detection has surprisingly relied on only one raterto either create an annotated corpus of learner errors, or to check the system?s output. some grammatical errors, such as number disagreement be tween subject and verb, no doubt show very highreliability, but others, such as usage errors involv ing prepositions or determiners are likely to be much less reliable. our results show that relyingon one rater for system evaluation can be problem atic, and we provide a sampling approach which can facilitate using multiple raters for this task. in the next section, we describe a system that automatically detects errors involving incorrect preposition selection (?we arrived to the station?) and extraneous preposition usage (?he went to outside?). in sections 3 and 4, we discuss theproblem of relying on only one rater for exhaus tive annotation and show how multiple raters can be used more efficiently with a sampling approach.finally, in section 5 we present an analysis of com mon preposition errors that non-native speakers make.this paper has two contributions to the field of error detection in non-native writing. we wouldalso like to acknowledge the three anonymous reviewers and derrick higgins for their helpful com ments and feedback. the long-term goal of our work is to develop asystem which detects errors in grammar and us age so that appropriate feedback can be given to non-native english writers, a large and growing segment of the world?s population. and extraneous preposition usage (?he went to outside?). estimates arethat in china alone as many as 300 million people are currently studying english as a second lan guage (esl). in sections 3 and 4, we discuss theproblem of relying on only one rater for exhaus tive annotation and show how multiple raters can be used more efficiently with a sampling approach.finally, in section 5 we present an analysis of com mon preposition errors that non-native speakers make. in the next section, we describe a system that automatically detects errors involving incorrect preposition selection (?we arrived to the station?) usage errors involving prepositions are among the most common types seen in thewriting of non-native english speakers. we have used a maximum entropy (me) classi fier (ratnaparkhi, 1998) to build a model of correctpreposition usage for 34 common english prepo sitions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1114.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1114.txt new file mode 100644 index 0000000000000000000000000000000000000000..b3bd593d9600bb7b5b5e2057d2d5e726120eb6c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C08-1114.txt @@ -0,0 +1 @@ +a pair of words (petrify:stone) is analogous to another pair (vaporize:gas) when the semantic re lations between the words in the first pair are highly similar to the relations in the second pair. two words (levied and imposed) are synonymousin a context (levied a tax) when they can be interchanged (imposed a tax), they are are antony mous when they have opposite meanings (black c ? 2008, national research council of canada (nrc).licensed to the coling 2008 organizing committee for pub lication in coling 2008 and for re-publishing in any form or medium. and white), and they are associated when they tend to co-occur (doctor and hospital).on the surface, it appears that these are four distinct semantic classes, requiring distinct nlp al gorithms, but we propose a uniform approach to all four. we subsume synonyms, antonyms, and associations under analogies. in essence, we say that x and y are antonyms when the pair x:y is analogous to the pair black:white, x and y are synonyms when they are analogous to the pair levied:imposed, and x and y are associated when they are analogous to the pair doctor:hospital. there is past work on recognizing analogies(reitman, 1965), synonyms (landauer and dumais, 1997), antonyms (lin et al, 2003), and asso ciations (lesk, 1969), but each of these four tasks has been examined separately, in isolation from the others. as far as we know, the algorithm proposed here is the first attempt to deal with all four tasks using a uniform approach. we believe that it isimportant to seek nlp algorithms that can han dle a broad range of semantic phenomena, becausedeveloping a specialized algorithm for each phe nomenon is a very inefficient research strategy.it might seem that a lexicon, such as word net (fellbaum, 1998), contains all the information we need to handle these four tasks. however, weprefer to take a corpus-based approach to seman tics. veale (2004) used wordnet to answer 374 multiple-choice sat analogy questions, achievingan accuracy of 43%, but the best corpus-based ap proach attains an accuracy of 56% (turney, 2006). another reason to prefer a corpus-based approachto a lexicon-based approach is that the former re quires less human labour, and thus it is easier to extend to other languages.in section 2, we describe our algorithm for rec ognizing analogies. we use a standard supervised 905 machine learning approach, with feature vectorsbased on the frequencies of patterns in a large cor pus. we use a support vector machine (svm) to learn how to classify the feature vectors (platt, 1998; witten and frank, 1999). section 3 presents four sets of experiments. we apply our algorithm for recognizing analogies to multiple-choice analogy questions from the sat college entrance test, multiple-choice synonym questions from the toefl (test of english as aforeign language), esl (english as a second language) practice questions for distinguishing syn onyms and antonyms, and a set of word pairs thatare labeled similar, associated, and both, devel oped for experiments in cognitive psychology.we discuss the results of the experiments in section 4. the accuracy of the algorithm is competitive with other systems, but the strength of the al gorithm is that it is able to handle all four tasks, with no tuning of the learning parameters to the particular task. it performs well, although it iscompeting against specialized algorithms, devel oped for single tasks.related work is examined in section 5 and limitations and future work are considered in sec tion 6. we conclude in section 7.in this paper, we have described a uniform approach to analogies, synonyms, antonyms, and as sociations, in which all of these phenomena are subsumed by analogies. a pair of words (petrify:stone) is analogous to another pair (vaporize:gas) when the semantic re lations between the words in the first pair are highly similar to the relations in the second pair. acknowledgementsthanks to joel martin and the anonymous review ers of coling 2008 for their helpful comments. we conclude in section 7. we view the problem ofrecognizing analogies as the classification of se mantic relations between words. some work is required to fit each probleminto the general framework of pairclass (supervised classification of word pairs) but the core al gorithm is the same in each case. this paper is a small step towards that goal. other potential applications in clude any task that involves semantic relations, such as word sense disambiguation, informationretrieval, information extraction, and metaphor in terpretation. the main limitation of pairclass is the need for a large corpus. it performs well, although it iscompeting against specialized algorithms, devel oped for single tasks.related work is examined in section 5 and limitations and future work are considered in sec tion 6. we may view the task of recognizing word analogies as a problem of classifying word pairs (see table 1). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-1011.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-1011.txt new file mode 100644 index 0000000000000000000000000000000000000000..f8eb850d17252e35bfc0ddb4c872229a70993ee7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-1011.txt @@ -0,0 +1 @@ +highly accurate dependency parsers have high de mands on resources and long parsing times. the training of a parser frequently takes several days and the parsing of a sentence can take on averageup to a minute. the parsing time usage is impor tant for many applications. for instance, dialog systems only have a few hundred milliseconds toanalyze a sentence and machine translation sys tems, have to consider in that time some thousandtranslation alternatives for the translation of a sen tence. parsing and training times can be improved by methods that maintain the accuracy level, or methods that trade accuracy against better parsing times. software developers and researchers areusually unwilling to reduce the quality of their ap plications. consequently, we have to consider atfirst methods to improve a parser, which do not in volve an accuracy loss, such as faster algorithms,faster implementation of algorithms, parallel al gorithms that use several cpu cores, and feature selection that eliminates the features that do not improve accuracy. we employ, as a basis for our parser, the secondorder maximum spanning tree dependency pars ing algorithm of carreras (2007). this algorithmfrequently reaches very good, or even the best la beled attachment scores, and was one of the most used parsing algorithms in the shared task 2009 of the conference on natural language learning (conll) (hajic? et al, 2009). we combined thisparsing algorithm with the passive-aggressive perceptron algorithm (crammer et al, 2003; mcdon ald et al, 2005; crammer et al, 2006). a parser build out of these two algorithms provides a good baseline and starting point to improve upon the parsing and training times. the rest of the paper is structured as follows. in section 2, we describe related work. in section 3, we analyze the time usage of the components of 89the parser. in section 4, we introduce a new kernel that resolves some of the bottlenecks and im proves the performance. in section 5, we describethe parallel parsing algorithms which nearly allowed us to divide the parsing times by the number of cores. in section 6, we determine the opti mal setting for the non-projective approximationalgorithm. in section 7, we conclude with a sum mary and an outline of further research.noun phrase (np) coreference resolution is the task of identifying which nps (or mentions) refer to the same real-world entity or concept. we discuss our cluster-ranking approach in section 4, evaluate it in section 5, and conclude in section 6. section 3 describes our baseline coreference models: mentionpair, entity-mention, and mention-ranking. traditional learning-based coreference resolvers operate by training a model for classifying whether two mentions are co-referring or not (e.g., soon et al. (2001), ng and cardie (2002b), kehler et al. overall, we believe that our cluster-ranking approach advances the state-of-the-art in coreference resolution both theoretically and empirically. (2004), ponzetto and strube (2006)). joint inference is different from our jointlearning approach, which allows the two tasks to be learned jointly and not independently. we thank the three anonymous reviewers for their invaluable comments on the paper. we have presented a cluster-ranking approach that recasts the mention resolution process as the problem of finding the best preceding cluster to link an active mention to. section 2 discusses related work. this work was supported in part by nsf grant iis-0812261. heuristic-based cluster ranking. despite their initial successes, these mention-pair models have at least two major weaknesses. the rest of the paper is structured as follows. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-1152.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-1152.txt new file mode 100644 index 0000000000000000000000000000000000000000..a80d3552b939f83fb767a121f95b84e4929721e7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-1152.txt @@ -0,0 +1 @@ +sentence simplification transforms long and dif ficult sentences into shorter and more readable ones. this helps humans read texts more easilyand faster. reading assistance is thus an important application of sentence simplification, espe cially for people with reading disabilities (carrollet al, 1999; inui et al, 2003), low-literacy read ers (watanabe et al, 2009), or non-native speakers (siddharthan, 2002).not only human readers but also nlp applications can benefit from sentence simplification. the original motivation for sentence simplification is using it as a preprocessor to facili tate parsing or translation tasks (chandrasekar et al., 1996). complex sentences are considered as stumbling blocks for such systems. more recently,sentence simplification has also been shown help ful for summarization (knight and marcu, 2000), ? this work has been supported by the emmy noether program of the german research foundation (dfg) underthe grant no. gu 798/3-1, and by the volkswagen founda tion as part of the lichtenberg-professorship program under the grant no. i/82806.sentence fusion (filippova and strube, 2008b), se mantic role labeling (vickrey and koller, 2008), question generation (heilman and smith, 2009), paraphrase generation (zhao et al, 2009) and biomedical information extraction (jonnalagadda and gonzalez, 2009).at sentence level, reading difficulty stems either from lexical or syntactic complexity. sen tence simplification can therefore be classifiedinto two types: lexical simplification and syntac tic simplification (carroll et al, 1999). these two types of simplification can be further implemented by a set of simplification operations. splitting, dropping, reordering, and substitution are widely accepted as important simplification operations. the splitting operation splits a long sentence intoseveral shorter sentences to decrease the complex ity of the long sentence. the dropping operation further removes unimportant parts of a sentence to make it more concise. the reordering operationinterchanges the order of the split sentences (sid dharthan, 2006) or parts in a sentence (watanabeet al, 2009). finally, the substitution operation re places difficult phrases or words with their simpler synonyms.in most cases, different simplification operations happen simultaneously. it is therefore nec essary to consider the simplification process as a combination of different operations and treatthem as a whole. however, most of the existing models only consider one of these operations. siddharthan (2006) and petersen and ostendorf (2007) focus on sentence splitting, while sen tence compression systems (filippova and strube, 2008a) mainly use the dropping operation. as faras lexical simplification is concerned, word substitution is usually done by selecting simpler syn onyms from wordnet based on word frequency (carroll et al, 1999).in this paper, we propose a sentence simplifica tion model by tree transformation which is based 1353 on techniques from statistical machine translation (smt) (yamada and knight, 2001; yamada andknight, 2002; graehl et al, 2008). our model in tegrally covers splitting, dropping, reordering and phrase/word substitution. the parameters of ourmodel can be efficiently learned from complex simple parallel datasets. the transformation froma complex sentence to a simple sentence is con ducted by applying a sequence of simplification operations. an expectation maximization (em) algorithm is used to iteratively train our model. we also propose a method based on monolingualword mapping which speeds up the training pro cess significantly. finally, a decoder is designed to generate the simplified sentences using a greedy strategy and integrates language models.in order to train our model, we further com pile a large-scale complex-simple parallel dataset(pwkp) from simple english wikipedia1 and en glish wikipedia2, as such datasets are rare.we organize the remainder of the paper as follows: section 2 describes the pwkp dataset. sec tion 3 presents our tsm model. sections 4 and 5 are devoted to training and decoding, respectively. section 6 details the evaluation. the conclusions follow in the final section.sentence simplification transforms long and dif ficult sentences into shorter and more readable ones. in this paper, we presented a novel large-scale par allel dataset pwkp for sentence simplification. the conclusions follow in the final section. the evaluation shows that tsm can achieve better overall readability scores than a set of baseline systems. this helps humans read texts more easilyand faster. section 6 details the evaluation. our evaluation dataset consists of 100 complex sentences and 131 parallel simple sentences from pwkp. in the future, we will investigate more sophisticated features and rules to enhance tsm. we collected a paired dataset from the english wikipedia and simple english wikipedia. sections 4 and 5 are devoted to training and decoding, respectively. we first per form 1 to 1 mapping with sentence-level tf*idf and then combine the pairs with the same complex sentence and adjacent simple sentences. they have not been used for training.four baseline systems are compared in our eval uation. as the dependency. but the parser returns ?su perset? obviously, the purpose of mosesis cross-lingual translation rather than monolin 1358 gual simplification. should be a dependency of ?called?. the original motivation for sentence simplification is using it as a preprocessor to facili tate parsing or translation tasks (chandrasekar et al., 1996). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-2005.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-2005.txt new file mode 100644 index 0000000000000000000000000000000000000000..9bea20075fd4c8097fd5135f17bcad1a44859d67 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-2005.txt @@ -0,0 +1 @@ +twitter is one of the most popular social network websites and has been growing at a very fast pace. the number of twitter users reached an estimated75 million by the end of 2009, up from approx imately 5 million in the previous year. through the twitter platform, users share either informationor opinions about personalities, politicians, prod ucts, companies, events (prentice and huffman, 2008) etc. this has been attracting the attention of different communities interested in analyzing its content. sentiment detection of tweets is one of the basicanalysis utility functions needed by various applications over twitter data. many systems and ap proaches have been implemented to automatically detect sentiment on texts (e.g., news articles, web reviews and web blogs) (pang et al, 2002; pang and lee, 2004; wiebe and riloff, 2005; glance et al, 2005; wilson et al, 2005). most of theseapproaches use the raw word representation (n grams) as features to build a model for sentiment detection and perform this task over large pieces of texts. however, the main limitation of usingthese techniques for the twitter context is mes sages posted on twitter, so-called tweets, are veryshort. the maximum size of a tweet is 140 char acters. in this paper, we propose a 2-step sentiment analysis classification method for twitter, whichfirst classifies messages as subjective and ob jective, and further distinguishes the subjectivetweets as positive or negative. to reduce the la beling effort in creating these classifiers, instead of using manually annotated data to compose thetraining data, as regular supervised learning ap proaches, we leverage sources of noisy labels asour training data. these noisy labels were pro vided by a few sentiment detection websites over twitter data. to better utilize these sources, we verify the potential value of using and combining them, providing an analysis of the provided labels, examine different strategies of combining these sources in order to obtain the best outcome; and, propose a more robust feature set that captures a more abstract representation of tweets, composedby meta-information associated to words and spe cific characteristics of how tweets are written. by using it, we aim to handle better: the problem of lack of information on tweets, helping on thegeneralization process of the classification algo rithms; and the noisy and biased labels provided by those websites.the remainder of this paper is organized as fol lows. in section 2, we provide some context about messages on twitter and about the websites used as label sources. we introduce the features used in the sentiment detection and also provide a deep analysis of the labels generated by those sources in section 3. we examine different strategies of 36 combining these sources and present an extensive experimental evaluation in section 4. finally, we discuss previous works related to ours in section 5and conclude in section 6, where we outline direc tions and future work.as future work, we want to perform a more fine grained analysis of sentences in order to identifyits main focus and then based the sentiment clas sification on it. we have presented an effective and robust sen timent detection approach for twitter messages, which uses biased and noisy labels as input to build its models. finally, we discuss previous works related to ours in section 5and conclude in section 6, where we outline direc tions and future work. twitter is one of the most popular social network websites and has been growing at a very fast pace. in this section, we give some context about twitter messages and the sources used for our data-driven approach. we examine different strategies of 36 combining these sources and present an extensive experimental evaluation in section 4. http://bit.ly/9k4n9p #obama figure 1: example of a tweet. we showed in section 4 that our approach works better than theirs for this problem, obtaining lower error rates. there is a rich literature in the area of sentiment detection (see e.g., (pang et al, 2002; pang and lee, 2004; wiebe and riloff, 2005; go et al,2009; glance et al, 2005). the number of twitter users reached an estimated75 million by the end of 2009, up from approx imately 5 million in the previous year. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-2028.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-2028.txt new file mode 100644 index 0000000000000000000000000000000000000000..65da03637189ec57dcba79b2bc5676bb6d6a9bb4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C10-2028.txt @@ -0,0 +1 @@ +a huge amount of social media including news,forums, product reviews and blogs contain nu merous sentiment-based sentences. sentiment is defined as ?a personal belief or judgment that ?* both authors equally contributed to this paper.is not founded on proof or certainty?1. senti ment expressions may describe the mood of thewriter (happy/sad/bored/grateful/...) or the opin ion of the writer towards some specific entity (x is great/i hate x, etc.). automated identification of diverse sentimenttypes can be beneficial for many nlp systems such as review summarization systems, dia logue systems and public media analysis systems. sometimes it is directly requested by the user toobtain articles or sentences with a certain senti ment value (e.g give me all positive reviews of product x/ show me articles which explain why movie x is boring). in some other cases obtaining sentiment value can greatly enhance information extraction tasks like review summarization. whilethe majority of existing sentiment extraction sys tems focus on polarity identification (e.g., positive vs. negative reviews) or extraction of a handful of pre-specified mood labels, there are many useful and relatively unexplored sentiment types. sentiment extraction systems usually require an extensive set of manually supplied sentiment words or a handcrafted sentiment-specific dataset. with the recent popularity of article tagging, some social media types like blogs allow users to add sentiment tags to articles. this allows to use blogsas a large user-labeled dataset for sentiment learning and identification. however, the set of sentiment tags in most blog platforms is somewhat re stricted. moreover, the assigned tag applies to the whole blog post while a finer grained sentiment extraction is needed (mcdonald et al, 2007).with the recent popularity of the twitter micro blogging service, a huge amount of frequently 1wordnet 2.1 definitions. 241self-standing short textual sentences (tweets) became openly available for the research community. many of these tweets contain a wide vari ety of user-defined hashtags. some of these tagsare sentiment tags which assign one or more senti ment values to a tweet. in this paper we propose away to utilize such tagged twitter data for classi fication of a wide variety of sentiment types from text. we utilize 50 twitter tags and 15 smileys assentiment labels which allow us to build a classifier for dozens of sentiment types for short tex tual sentences. in our study we use four different feature types (punctuation, words, n-grams and patterns) for sentiment classification and evaluate the contribution of each feature type for this task.we show that our framework successfully identi fies sentiment types of the untagged tweets. we confirm the quality of our algorithm using human judges. we also explore the dependencies and overlap between different sentiment types represented by smileys and twitter tags. section 2 describes related work. section 3 details classification features and the algorithm, while section 4 describes the dataset and labels. automated and manual evaluation protocols and results are presented in section 5, followed by a short discussion.a huge amount of social media including news,forums, product reviews and blogs contain nu merous sentiment-based sentences. we presented a framework which allows an au tomatic identification and classification of various sentiment types in short text fragments which isbased on twitter data. automated and manual evaluation protocols and results are presented in section 5, followed by a short discussion. while hashtag labels arespecific to twitter data, the obtained feature vectors are not heavily twitter-specific and in the fu ture we would like to explore the applicability oftwitter data for sentiment multi-class identifica tion and classification in other domains. section 3 details classification features and the algorithm, while section 4 describes the dataset and labels. sentiment is defined as ?a personal belief or judgment that ?* both authors equally contributed to this paper.is not founded on proof or certainty?1. senti ment expressions may describe the mood of thewriter (happy/sad/bored/grateful/...) to the best of our knowledge, there are no works employing twitter hashtags to learn a wide range of emotions and the re lations between the different emotions. sentiment analysis tasks typically combine twodifferent tasks: (1) identifying sentiment expres sions, and (2) determining the polarity (sometimes called valence) of the expressed sentiment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C86-1016.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C86-1016.txt new file mode 100644 index 0000000000000000000000000000000000000000..c78444f30cf368b65b7f070d7f22ab2d729e6a79 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C86-1016.txt @@ -0,0 +1 @@ +at the other end of the range covered by d-patr are unification-based categorial grammars (klein, steedman, uszkoreit, wittenburg) in which all the syntactic information is incorporated in the lexicon and the remaining few combinatorial rules that build phrases are function application and composition. definite-clause grammars (pereira and warren) can also be encoded in the patr formalism. what these approaches have in common is that syntactic rules and lexieal entries can be written down as sets of attribute-value pairs. moreover, because a value at the end of one path of attributes can be shared by another path, the structures that are generated by such grammars can be thought of as directed graphs cdags"). unification is the key operation for building these structures. because unification is associative and commutative, statements in a unification-based grammar formalism are order-independent and bidirectional with respect to parsing and generation. for a comprehensive introduction tounification-based approaches togrammar, see shieber 1986 (forthcoming). the idea that led to the present version of d-patr was to produce a simple compact system for experimenting with unification-based grammars that would run on machines maller than the symbolics 3600 for which the original tati~ implementation at sri had been created. the first version of i)-patr, initially called }lug, was written at the scandinavian summer workshop for computational linguistics in helsinki, finland, at the end of august 1985. although the actual notation for writing rules in d-patr in some respects differs from the notation in the original pati? system, essentially both systems implement the samegrammar formalism. to emphasize this point, the two implementations are now called z-patr (zeta-lisp patr) and d patr (interlisp-d patr). a number of innovations that came in with l) patr (hug) have since migrated to z-patr. a case in point is the method for minimizing copying in unification that is discussed in the section on parsing and unification. other implementation differences remain--for example, in the parsing algorithm and in the treatment of gaps--but grammars written for d-patr are convertible into z-patr format, and vice versa.d-patr: a deve lopment env i ronment fo r un i f i ca t ion -based grammars lauri karttunen artificial intelligence center sri international 333 ravenswood avenue menlo park, ca 94025 usa and center for the study of language and information stanford university 1 introduction i)-patr is a development environment for unification-based grammars on xerox l i00 series work stations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C86-1045.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C86-1045.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c1f4d2505508f2c416dfaffb17622cc96a9b5be --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C86-1045.txt @@ -0,0 +1 @@ +the work on merging strategies from unification grammars and categorial grammars has its origins in several research efforst that have been pursued in parallel. one of them is the grammar development on the patr system (shieber et al, 1983; shieber, 1984) at sri. for quite a while now i have been using the excellent facilities of patr for the design and testing of experimental\[ cugs. such grammars currently run on two patr implementations: stuart shieber's zetalisp version on the symbolics 3600 and lauri karttunen's interlisp-d w:rsion on the xerox 1109. the work on cugs has influenced our efforts to develop a larger patr grammar, and will do so even more in the future. on the theoretical side, this work is part of ongoing research on such topics as word order variation, modification, and german syntax within projects at sri and csli (stanford university). the structure of the paper eflects the diverse nature of the enterprise. in the first section, i will introduce the basic notions of cugs and demonstrate them through examples in patr notation. the second section discusses the motivation for this work and some of its theoretical implications. the third section sketches a linguistically motivated cug framework with a strong lexical syntax that accomodates word order variation. the paper concludes with a brief discussion of possible cug approaches tolong-distance d pendencies. 1. basic notions of categorial unification. grammars 1.2. unif ication grammars and categorial. grammars both terms, unification grammar (ug) and categorial grammar (cg), stand for whole families of related grammar formalisms whose basic notions are widely known.l yet, for the characterization f the class of formalisms i want to discuss, it will be useful to review the most central concepts of both ug and cg. unification grammar formalisms employ complex feature structures as their syntactic representations. these structures encode partial information about constituents. either term or graph unification is utilized as the main operation for checking, propagating, and merging of the information in these complex representations. most unification grammars also use the complex feature structures for the linking of syntactic and semantic information. in traditional categorial grammars, all information about possible syntactic ombinations of constituents is encoded in their categories. those grammars allow only binary combinations. one of the two combined constituents, the functor, encodes the combination funtion, the other constituent serves as the argument to this function. instead ot7 phrase structure rules, the grammar contains one or, in some formalisms, two combination rules that combine a functor and an argument by applying the function encoded in the functor to the argument constituent. most categorial grammars only combine constituents whose terminal strings concatenate in the input string, but this need not be so. in most categorial grammar formalisms, it is assumed that the syntactic functor-argument structure in the corresponding compositional semantics. 187 there are usually two types of grammatical categories in a categorial grammar, basic and derived ones. basic categories are just category symbols, derived categories are functions from one (derived or basic) category to another. a derived category that encodes a function from category a to category b might be written b/a if the functor combines with an argument to its right or b~, if it expects the argument to its left. thus, if we assume just two basic categories, n and s, then n/s, s/n, n\s, s\n, (s\n)/n, (n/s\(s\(n/n)), etc. are also categories. not all of these categories will ever occur in the derivation of sentences. the set of actually occurring categories depends on the lexical categories of the language. assume the following simple sample grammar: (2) basic categories: n, s lexical categories: n (paul, peter) (s\n)fn (likes) the grammar is used for the sample derivation in (3): (3) peter likes paul n (s\n)fin n skn s it should be clear from my brief description that the defining characteristics of unification grammar have nothing to do with the ones of categorial grammar. we will see that the properties of both grammar types actually complement each other quite wetl. 1.2. a sample cug in patr notat ion since the first categorial unification grammars were written in the patr formalism and tested on the patr systems implemented at sri, and since patr is especially well suited for the emulation of other grammar formalisms, i will use its notation. the representations in patr are directed acyclic graphs (dags) 2 . rules have two parts, a head and a body. the head is a context-free rewrite rule and the body is a dag. here is an example, a simple rule that forms a sentence by combining anoun phrase with a verb phrase. 188 (4) head xo -~ x1, x2 body in unification otation = s = np = vp = body in graph notation xo r s np the rule states that two constituents x1 and x2 can combine to form a constituent x0 if the terminal string covered by x1 immediately precedes the terminal string of x2 and if the dags of x0, x1, and x2 unify with the x0, x1, and x2 subgraphs of the rule body, respectively. i will now show the most straight-forward encoding of a categorial grammar in this notation. there are two types of constituent graphs. constituent graphs for basic categories are of the following form: (5) n s of course, there might be more features associated with the constituent: (6) /oe 7 n s finite 3 sg derived constituents have graphs of the following form: (7) arg (t0b) backward functional application (bfa) value -~ functor argument < value > = < functor va l> = :--: left. this is the graph associated with the vp likes paul: in graph notation: (8) ,. /~ left /~ agr ca~//pers / form cat/pers~nu m s finite n 3 sg it corresponds to the derived-category s mboh (9) s \ n form : finite pers : 3 num: sg (10a) and (10b) are the rules that combine constituents. as in tradit ional categorial grammars, two such rules sufice. (10a) forward functional application (ffa) value -~ functor argument = = = right. in graph notation: val u e~j -~~'~. / funct? r l . ~rgu right ment val u e ~- - j j -~-~rg u ment / left if backward functional application is used to combine the constituents peter and likes paul, the result is a finite sentence. however, if the same rule is applied to the identical constituents likes paul and likes paul, again a finite sentence is obtained. '\]\['his is so because the graph for likes paul actually unifies with the value of arg in the same graph. this can be easily remedied by modifying the graph for the vp slightly. by st ipulat ing that the argument must not have an unfilled argument position, one can rule out derivcd categories as subject arguments tbr the vp: (ii) /0o-i /?e?tum s finite n 3 sg 1.3. extens ions to the basic formal i sm. in this subsection \[want to discuss very briefly a few extensions of' the basic model that make it more suitable for the encoding of natural- language rammars. the first one is the sorting of fimctors according to their own syntactic category. this move might be described alternat ively as defining the type of a constituent as being defined by both a set of syntactic (and semantic) 189 attributes and a function from categories to categories. this function is also expressed as the value of an attribute. for a basic category the value of the function attribute is nil. the following graph is a simplified example of a functor category (prenominal djective in a language with case and number agreement within the np). ~ ~/ ~ ~'~unction ca;~/ ~s: :m - -~gr the combination rules need accordingly. this is the modified functional application. to be changed rule of forward value -~ functor argument = < argument > = < functor function arg > = right. in a traditional categorial grammar, a derived category is exhaustively described by the argument and value categories. but often, syntacticians want to make more fine grained distinctions. an example is vp modification. in a traditional categorial grammar, two different vp modifiers, lets say an adverb and an adverbial clause, would receive the same translation. (12) peter called him angrily n (s\n)fn n (s\n)/(s~q) (13) peter called him at work n (s\n)/n n (s\n)/(s~an) 190 but what should be the category for very? if it receives the category ((s\n)\(s\n))/((s\n)\(s~n)) to allow the derivation of (14), the ungrammatical sentence (15) is also permitted. (14) peter called him very angrily n (s\n)/n n ((s\n)\(sln))/ (s\n)/(s~x\[) ((s\n)\(s~n')) (15) *peter called him very n (s\n)/n n ((s\n)\(s~))/ ((s\n)\(s\n)) at work (s\n)/(s~) if functor categories are permitted to carry features of their own that are not necessarily bound to to any features of their argument and value categories, this problem disappears. adverbs and adverbial clauses could receive different features even if their categories encode the same combination function. another solution to the problem involves the encoding of the difference in the value part of the functor. yet this solution is not only unintuitive but also contradicts a linguistic generalization. it is unintuitive because there is no difference in the distribution of the resulting vps. the only difference holds between the modifiers themselves. the gene~:alization that is violated by the encoding of the difference in the value subgraphs is the endocentricity of the vp. the modified vp shares all syntactic features with its head, the lower vp. yet the feature that indicates the difference between adverbs and adverbial phrases could not be in both the argument and the value parts of the functor, otherwise iterations of the two types of modifiers as they occur in the following pair of sentences would be ruled out. (16a) peter called him very angrily at work. (16b) peter called him at work very angrily. another augmentation is based on the patr strategy for linking syntax and semantics. most grammars written in patr use the constituent graphs also for encoding semantic information. every constituent has an attribute called trans or semantics. the value of this attribute contains minimally the internal semantic fnnction-argument structure of the constituent, but may also encode additional semantic information. the separate encoding of the semantics allows for a compositional semantics even in construction in which syntactic and semantic structure divert as in certain raising constructions. the following graph for a ficticious prenominal adjective that was introduced earlier contains translation attributes for the functor, the argument and the value. the meaning of the adjective is indicated by the atom red. cat ~ / functi% ~rans adj acc ing ~._~g red at first glance, the lexical graphs--even the ones that are used in the highly simplified examples--seem to exhibit an excessive degree of complexity and redundancy. however, the lexical approach to syntax is built on the assumption that the lexicon is structured. to create a lexicon that is structured according to linguistic generalizations, weintroduced lexical templates early on in the development of patr. templates are graphs that contain structure shared by a class of lexical entries. lexical graphs can be partially or fully defined in terms of templates, which themselves can be defined in terms of templates. if a template name appeam in the definition of some graph, the graph is simply unified with the graph denoted by the template. the next augmentation is already built into the formalism. categorial grammarians have recognized the limitations of fimctional application as the sole mode of combining constituents for a long time. one of the obvious extensions to classical categorial grammar was the utilization of functional composition as a further combination mode. a good example of a categorial grammar that employs both functional application and functional composition is steedman (1985). forward functional composition permits the following combination ofcategories: (21) a/b + b/c = a/c the resulting category inherits the argument place for c from the argument b/c. neither steedman's nor any other cg i am aware of permits functional composition i its full generality. in order to prevent overgeneration, functional composition as well as other combination modes that are discussed by steedman are restricted to apply to certain categories only. this somehow violates the spirit of a categorial grammar. steedman's combination rules, for instance, are net universal. in cug, functional composition is subsumed under functional application. it is the functor category that determines whether simple functional application, or functional composition, or either one may take place. conjunction is a good case for demonstrating the versatility. consider the following sentences: 3 (22a) peter andpaul like bananas. (22b) peter likes bananas and paul likes oranges. (22c) peter likes and buys bananas. the conjunction and may combine two simple argument categories (22a), two functors with one unfilled argument position (22b), or two functors with more than one unfilled argument position (22c). if the conjuncts have unfilled argument positions, the conjoined phrase needs to inherit them through functional composition. the simplified lexical graph for and is given under (23). in order to avoid a thicket of crossing edges, i have expressed some of the relevant bindings by indices. 191 (23) c ~ .. r ( the most appealing feature of this way of utilizing functional composition is that no additional combinators are required. no restriction on such a rule need to be formulated. it is only the lexical entries for functors that either demand, permit, or forbid functional composition. extensions to the formalism that i have experimented with that cannot be discussed in the frame of this paper are the use of multiple stacks for leftward and rightward arguments and the dcg-like encoding of the ordering positions in the graphs. in sections 3. and 4., i will discuss further extensions ofthe formalism and specific linguistic analyses. the following section contains a summary of the motivations for working on and with cug and the main objectives of this work.order variat ion worder order variation has always been one of the hardest problems for categorial grammars. rules that propagate gap information might also include rules that permit parasitic gaps along the lines of steedman's rules of functional substitution. the work on merging strategies from unification grammars and categorial grammars has its origins in several research efforst that have been pursued in parallel. one of them is the grammar development on the patr system (shieber et al, 1983; shieber, 1984) at sri. functional composition together with type-raising can be used to obtain all permutations of the sentences that are generated by a traditional categorial grammar. the following section contains a summary of the motivations for working on and with cug and the main objectives of this work. and 4., i will discuss further extensions ofthe formalism and specific linguistic analyses. in a cug that only contains two (or at least very ? few) rules, the first method of duplicating rules appears preferrable over the gap threading approach. it is only the lexical entries for functors that either demand, permit, or forbid functional composition. in sections 3. totally free word order does therefore not pose an unsurmountable problem to the categorial approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-1016.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-1016.txt new file mode 100644 index 0000000000000000000000000000000000000000..44e46301bb101817e716b0d6f9a60f19ee8ae069 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-1016.txt @@ -0,0 +1 @@ +the steps of the proposed translation process are: (1) partition the source text into a set of fixed locutioris. (2) use the glossary plus coutextual information to select im corresponding set of fixed ioctttions into a sequen{e forming the target sentence. (3) arrange the words of the talget fixed locutions into a sequence forming the target sentence. we have developed stalistical techniques facilitating both tile autonlatic reation of the glossary, and the performance of tile three translation steps, all on the basis of an aliglnncllt of corresponding sentences in tile two texts. while wc are not yet able to provide examples of french / english tcanslation, we present some encouraging intermediate results concerning lossary creation and the arrangement of target wold seq l ie) lees . introduction in this paper we will outline an approach to automatic translation that utilizes techniques of statistical information extraction from large data bases. these self-organizing techniques have proven successful in the field of automatic speech recognition [1,2,3]. statistical approaches have also been used recently in lexicography [41 and natural anguage processing [3,5,6]. the idea of automatic translation by statistical (information thco,etic) methods was proposed many years ago by warren weaver [711. as will be seen in the body of tile paper, tile suggested technique is based on the availability of pairs of large corresponding texts that are iranslations of each other. i l l particular, we have chosen to work with the english and french languages because we were able to obtain the biqingual l lansard corpus of proceedings of the canadian parliament containing 30 million words of text [8]. we also prefer to apply our ideas initially to two languages whose word orcter is similar, a condition that french and english satisfy. our approach eschews the use of an internmdiate ,nechalfism (language) that would encode the "meaning" of tile source text. the proposal will seem especially radical since very little will be sakl about employment of conventional grammars. this omissiol], however, is not essential, and may only rcllect our relative lack of tools as well as our uncertainty about tile degree of grammar sophistication required.a statistical approach to language translat ion p. brown, j. cocke, s. del i ,a pietra, v. della p ietra, f. jel inek, r, mf, rcf, r, and p. roossin ibm research divis ion t.j. watson research center depar tment of computer science p.o. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2121.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2121.txt new file mode 100644 index 0000000000000000000000000000000000000000..502645a9778622c2087ebfbcc4ea4f78feec05ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2121.txt @@ -0,0 +1 @@ +we argue that even if one extends the domain of locality of cfgs to trees, us- ing only substitution does not givo the freedom to choose the head of each structure. we show how adjunction al- lows us to lexicalize a cfg freely. we then show how a lexicalized grammar naturally follows from the extended omain of locality of tags and present some of the linguistic advantages ofour approach. a novel general parsing strategy for lexicalized gram- mars is discussed. in a first stage, the parser builds a set structures corresponding to the input sentence and in a second stage, the sentence is parsed with respect o this set. the strategy is independent of the linguistic theory adopted and of the underlying rammar formalism. how- ever, we focus our attention on tags. since the set of trees needed to parse an input sentence is supposed to be finite, the parser can use in principle any search strategy. thus, in particular, a top-down strategy can be used since problems due to recursive structures are eliminated. the parser is also able to use non-local information to guide the search. we then explain how the earley-type parser for tags can be modified to take advantage of this approach. *this work is partially supported by aro grant daa29-84-9- 007, darpa grant n0014-85-k0018, nsf grants mcs-82-191169 and dgr-84-10413. the second author is also partially supported by j.w. zellldja grant. the authors would llke to thank mitch marcus for his helpful conunents about this work.parsing strategies with lexicalized grammars: appl icat ion to tree adjoining grammars * yves schabes, anne abe ille**and arav ind k. joshi department of computer and information science university of pennsylvania philadelphia pa 19104-6389 usa schabes~linc.cis.upenn.edu abeille~cis.upenn.edu joshi~eis.upenn.edu abstract in this paper we present a general parsing strategy that arose from the development of an earley-type parsing al- gorithm for tags (schabes and joshi 1988) and from re- cent linguistic work in tags (abeille 1988). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2128.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2128.txt new file mode 100644 index 0000000000000000000000000000000000000000..251d424f8a1f61b40742981b573df984906ace40 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2128.txt @@ -0,0 +1 @@ +this tuning allows the parsing system to operate within the same realm of efficiency as previous architectures for parsing alone, but with much greater flexibility for engaging in other processing regimes. 1 introduction the use of a single grammar for both parsing and generation is an idea ~ith a certain elegance, the desirability of which several researchers nave noted. of course, judging the correctness of such a system re- quires a characterization f the meaning of grammars that is indepen- dent of their use by a particular processing, mechanism--that is, the brmalism in which the grammars are expressed must have an abstract ~emantics. as a paradigm example of such a formalism, we might take ~ny of the various logic- or unification-based grammar formalisms. as described by pereira and warren [1983], the parsing of strings ~ccording to the specifications ofa grammar with an independent log- cal semantics can be thought of as the constructive proving of the ;trings grammaticality: parsing can he viewed as logical deduction. -3ut, given a deductive framework that can represent the semantics ff the formalism abstractly enough to be independent of processing, he generation of strings matching some criteria can equally well be hought of as a deductive process, namely, a process of constructive ~roof of the existence of a string that matches the criteria. the dif- erence rests in which information is given as premises and what the ~oal is to be proved. this observation opens up the following possi- bility: not only can a single grammar be used by different processes ~ngaged in various "directions" of processing, but one and the same anguage-processing architecture can be employed for processing the grammar in the various modes. in particular, parsing and generatioa :an be viewed as two processes engaged in by a single parameterized ;heorem prover for the logical interpretation of the formalism. we will discuss our current implementation f such an architecture, ~hich is parameterized in such a way that it can be used either for ~arsing or generation with respect o grammars written in a particular ~rammar formalism which has a logical semantics, the patr formal- sm. furthermore, the architecture allows fine tuning to reflect differ- mt l::ocessing strategies, including parsing models intended to mimic )s~cholinguistic phenomena. this tuning allows the parsing system to )perate within the same realm of efficiency as previous architectures or parsing alone, but with much greater flexibility for engaging in ,ther processing regimes. *this research was sponsored by the nippon telegraph and telephone corpo- ation under a contract with sri international. 2 language processing as deduction viewed intuitively, natural-language-utterance gen ration is a nonde- terministic top-down process of building a phrase that conforms to certain given criteria, e.g., that the phrase be a sentence and that it convey a particular meaning. parsing, on the other hand, is usu- ally thought of as proceeding bottom-up in an effort to determine what properties hold of a given expression.a uniform architecture for parsing and generation stuart m. shieber artificial intelligence center sri international menlo park, california, usa* abst rac t the use of a single grammar for both parsing and generation is an idea with a certain elegance, the desirability of which several researchers have noted. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2147.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2147.txt new file mode 100644 index 0000000000000000000000000000000000000000..f7255abb9acd0fc2ce1dd3d6a663f63f5ff1533f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C88-2147.txt @@ -0,0 +1 @@ +1986, kasper et al. 1986] involving the logical formulation of feature structures. s s np / ~ ~ vp det n v i i i the man met u i u 2 u 3 figure 1: initial trees nnnp / det i ~c n i woman 1 int roduct ion tree adjoining grammars (tag) were first introduced by joshi, levy, and takalmshi [joshi et al. the first study of this system, from the point of view of its formal properties and linguistic applicability, was carried out by joshi in [joshi 1985]. tags have been used in providing linguistic analyses; a detailed study of the linguistic relevance was done by kroch and joshi in [kroch et al. in this paper, we show lmw tags can be embedded in a feature struc- ture based framework. feature structure based tree adjoining grammars (ftag) are introduced in section 2, and is f611owed by a comparsion of the descriptive capacity of ftag and tag. a restricted version of ftag is proposed and some possible linguistic stipulations are considered. in section 3, we introduce a calculus, which is an extension of the logical calculus of rounds and kasper [rounds et al. 1986, kasper et al. 1986] allowing a-abstraction and application, in order to describe the structures used in ftags. finally, in section 4, we summarize the work presented in this paper. 1.1 in t roduct ion to t ree ad jo in ing grammars tree adjoining grammars (tag), unlike other grammatical systems used in computational linguistics, is a tree rewriting system. unlike the string rewriting formalisms which writes recursion into the rules that generate the phrase structure, a tag factors reeursion and dependencies into a finite set of elementary trees. the elementary trees in a tag correspond to minimal inguistic structures that localize the dependencies such as agreement, subcategorization, a d filler-gap.feature structures based tree adjoining grammars 1 k. vijay-shanker department of computer and information sciences university of delaware newark, de 19711 u.s.a a. k. joshi del)artment of computer and information science university of pennsylvania philadelphia, pa 19104 u.s.a abstract we have embedded tree adjoining grammars (tag) in a fea- ture structure based unification system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-2067.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-2067.txt new file mode 100644 index 0000000000000000000000000000000000000000..27b2f00ccdbd86fdbb257a372748e900729d58b6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-2067.txt @@ -0,0 +1 @@ +automated language understanding requires the determination f the concept which a given use of a word represents, a process referred to as word sense disambiguation (wsd). wsd is typically effected in natural llanguage processing systems by utilizing semantic teature lists for each word in the system's lexicon, together with restriction mechanisms such as case role selection. however, it is often impractical to manually encode such information, especially for generalized text where the variety and meaning of words is potentially unrestricted. furthermore, restriction mechanisms usually operate within a single sentence~ and thus the broader context cannot assist in the disambiguation process. in this paper, we describe a means tor automatically building very large neural networks (vlnns) from definition texts in machine-readable dictionaries, and denmnstrate he use of these networks for wsd. our method brings together two earlier, independent approaches to wsd: the use of machine-readable dictionaries and spreading and activation models. the automatic onstruction of vlnns enables real-size experiments with neural networks, which in turn the authors would like to acknowledge the contributions of st~phanc tlari6 and gavin huntlcy to the work presented in this paper. provides insight into their behavior and design and can lead to possible improvements.automated language understanding requires the determination f the concept which a given use of a word represents, a process referred to as word sense disambiguation (wsd). the use of word relations implicitly encoded in machine-readable dictionaries, coupled with the neural network strategy, seems to offer a promising approach to wsd. the model we describe here is only a first step toward a fuller understanding and refinement of the use of vlnns for language processing, and it opens several interesting avenues for further application and research. provides insight into their behavior and design and can lead to possible improvements. our experimentation with vlnns has also shed light on the role of and need for various other parameters, uch as thresholds, decay, etc. machine-readable dictionaries jbr wsd. our approach to wsd takes advantage of both strategies outlined above, but enables us to address solutions to their shortcomings. wsd is typically effected in natural llanguage processing systems by utilizing semantic teature lists for each word in the system's lexicon, together with restriction mechanisms such as case role selection. more practically, it is simply difficult to imagine how vectors of several thousands of microfeamrcs for each one of the lens of thousands of words and hundreds of thousands of senses can be realistically encoded by hand. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3030.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3030.txt new file mode 100644 index 0000000000000000000000000000000000000000..56211bcfb3445c1f8a3b19b0189f727d1bcfe4de --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3030.txt @@ -0,0 +1 @@ +the formalism is a linguistic one. it relies on transitional probabilities in an indirect way. the probabilities are not part of the description. the descriptive statements, constraints, do not have the ordinary task of defining the notion correct sentence in l. they are less categorical in nature, more closely tied to morphological features, and more directly geared towards the basic task of pars- ing. we see this task as one of inferring surface structure from a stream of concrete tokens in a basically bottom-up mode. constraints are formu- lated on the basis of extensive corpus studies. they may reflect absolute, ruleqike facts, or probabilistic tendencies where a certain risk is judged to be proper to take. constraints of the former rule-like type are of course preferable. the ensemble of constraints for language l con- stitute a constraint grammar (cg) for l. a cg is intended to be used by the constraint grammar parser cgp, implemented as a lisp interpreter. our input tokens to cgp are morphologically ana- lyzed word-forms. one central idea is to maximize the use of morphological information for parsing purposes. all relevant structure is assigned directly via lexicon, morphology, and simple mappings from morphology to syntax. ]he task of the constraints is basically to discard as many alternatives as possible, the optimum being a fully disambiguated sentence with one syntactic reading only. the second central idea is to treat morphological disambiguation and syntactic labelling by the same mechanism of discarding improper alternatives. 168 a good parsing formalism should satisfy many re- quirements: the constraints should be declarative rather than procedural, they should be able to cope with any real-world text-sentence (i.e.constraint grammar as a framework for parsing running text fred karlsson university of helsinki department of general linguistics hallituskatu 11 sf-00100 helsinki finland e-mail: karlss?n@finuh.bitnet 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3044.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3044.txt new file mode 100644 index 0000000000000000000000000000000000000000..02736a9d1d7b5ec74b16caf3cf22a8baacd0187e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3044.txt @@ -0,0 +1 @@ +(2) transdr the source matching expression into the target matching expression. (3) construct the target sentence from the target matching expression. this mechanism generates some candidates of translation. to select, the best translation out of them, we define the score of a translation. 1 in t roduct ion use of extracted information fiom examples or example-based translation is becoming the new wave of machine translation. the ba.- sic idea. of example~based translation is very simple: translate a source sentence by imitat- ing the translation example of a similar sen- tence in the database. the idea first appeared in [nagao 84], and some research has followed it [sumita 88][sato 89][sadler 89a. but a great deal of effort is still needed to im- plemenl the idea. in our previous work, we show how to select. the best target word in case-frame translation based on examples[sato 89]. in this paper, we concentrate on two problems: 1. ltow to combine some fragments of trans- lation examph~s in order to translate one sentence? 2. tlow to select tile best tra.nslation out of inany candidates? we show partial solutions for them in mbt2. mbt2 is the second prototype system in our memory-based translation project.. mbt2 ca.n do bi-directional m~nslation between an english word-dependency tree and a japanese word- dependency tree.toward memory--based translation satoshi sato and ma.koto nagao dept. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3045.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3045.txt new file mode 100644 index 0000000000000000000000000000000000000000..b833f272bc0bebe3e20405c69d5fd0b995a7458d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3045.txt @@ -0,0 +1 @@ +tree-adjoining rammars (tag) constitute a grammat- ical formalism with attractive properties for the strong characterization f the syntax of natural angtmges, that is, characterization of the analysis trees of the expres- sions in the language (kroch and joshi, 1985; kroch, 1989)) among these properties are that o the domain of locality in tags is larger than lot formalisms lhat augment context-free grammars (such as lexical-functkmal, or generalized or head- driven phrase-structure grammar), and ? the statements of dependencies and recursion pos- sibilities in a tree are factored, the former following from primitive dependencies in elementary trees, the latter a consequence of an operatkm of adjunc- tion of trees. these unique properties of tags present a challenge tot the application of tags beyond the limited confines of syntax, for instance, to the task of semantic interpre- tation or automatic tr~mslation of natural anguage. the slandm'd methods of moving beyond syntax to interpre- tation make use in one way or another of the compo- sitional structure of the analysis tree that is manifested in the tree's derivation. any version of compositional 1we assume familiarity throughout the paper with previous work on tags. see, for instance, the introduction by joshi (1987). semantics, or syntax.directed translation relies on such a methodology to some extent. however, in the case of tags, the compositional structure of the tree is not miro rored by its derivational structure, so that a method for constructing semantics based on the compositional syn- tactic structure will be inherently nonderivational, that is, construction of the semantics will be independent of the derivation of the tree, and therefore subsequent. on the other hand, a method mirroring the deriva- tional structure will not necessarily be compositional with respect to tile derived structures of expressions. ai+ tl~ough such a method would be quite different from ttle primarily compositional methods previously postulated, it may have advantages, given that certain aspects of language seem to be noncompositional. (see section 4.) in this paper, we present a varim~t of tags, called synchronous tags, which characterize correstxmdences between languages. the formalism's intended usage is to relate expressions of natural anguages to their asso- ciated semantics represented in a logical form language, or to their translations in another natural language; in summary, we intend the formalism to allow tags to be used beyond their role in syntax proper. we also discuss its application to concrete xamples, and mention some computational issues that arise in its interpretation.we also discuss its application to concrete xamples, and mention some computational issues that arise in its interpretation. if the representation can be left im- plicit, the optimization can be maintained, but retrieval of explicit representations will be combinatorially more complex. the synchronous tag formalism is inherently nondirec- tional. tree-adjoining rammars (tag) constitute a grammat- ical formalism with attractive properties for the strong characterization f the syntax of natural angtmges, that is, characterization of the analysis trees of the expres- sions in the language (kroch and joshi, 1985; kroch, 1989)) among these properties are that o the domain of locality in tags is larger than lot formalisms lhat augment context-free grammars (such as lexical-functkmal, or generalized or head- driven phrase-structure grammar), and ? the statements of dependencies and recursion pos- sibilities in a tree are factored, the former following from primitive dependencies in elementary trees, the latter a consequence of an operatkm of adjunc- tion of trees. derivation is not defined in terms of constructing 6the subscript x on certain nodes is the value of a feature on the nodes corresponding to the variable bound by the quantifier. if incrementality requires that we generate xplicit representations of all possible interpre- tations (i.e., target derivations) of the string seen so far, then this optimization cannot be used, and parsing will be highly inefficient. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3052.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3052.txt new file mode 100644 index 0000000000000000000000000000000000000000..ec376b3e0720b073da5bce85f10482110e1b7d4d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3052.txt @@ -0,0 +1 @@ +authors describe these extensions as "inheritance grammars", "in- heritance networks", :ii;ature sorts", "typed t~ature structures",...[1, 3, 5, 13, 17, 15, 9, 11, 7, 8]. these formalisms exhibit, to various degrees, one or several of the following properties, characteristic of the so-called object-oriented paradigm: a high level of abstraction, a capacity of inference, modularity and distributed control. abstraction and modular- ity are needed when the linguist wants to describe a hierarchy of concepts (like a lexical hierarchy or the hierarchy of phrasal categories), and to describe linguistic data at different levels (e.g. morphology, syntax, semantics). at first glance it seems rather natural to develop separate modules for different lin- guistic levels, and to describe separately their inter- actions; however, great difficulties are encountered when these modules have to be integrated. usually, there are two choices. either everything is described in a single place using a deeply intricate data struc- ture, like packing both syntactic and semantic equa- tions in cf rules in some lfg extensions (e.g. [10]); the price is a loss in understmtdability and general~ ity. or descriptions are kept separate and the pro- eessing is done accordingly: first, a morphological phase, then a syntactic analysis, and then a semantic analysis, without any communication between these different steps [4]. the price is that interdependent constraints between these levels are lost, resulting in inadequate linguistic description or very complex control strategies at the implementation level. in this paper, we argue that typed unification gram- mars give the linguist a formal framework which has the desirable properties. we will give an introduc- tion to such a formalism, called if,_ (~iyped i"ea- ture structure), which integrates disjunctions, con- junctions and conditional expressions of typed fea- ture structures. this introduction will start from a very simple dcg, and will show how one can write a dcg-like grammar in tfs, making use of the typing system to enforce general constraints valid for classes of objects and to modularize linguistic descriptions. we then show that further abstraction leads to a i-[psg-like grammar. it is not our goal to give here a formal account of the formalism (the interested reader should refer to [2] where a very clear tbrmal semantics on which tfs is based is given), and we will use an informal approach wherever possible.typed unification grammars martin c. emele, dhni zajac project polygloss* university of stuttgart ims~cl/ifl~ais, keplerstrage 17, d - 7000 stuttgart 1, federal republic of germany {emele,zajac} @is.informatik.uni-st ut gart.dbp.de abstract we introduce tfs, a computer formal- ism in the class of logic ibrmaiisms which integrates a powerful type system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3063.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3063.txt new file mode 100644 index 0000000000000000000000000000000000000000..15df5b104c1efa585a969cca95b420ec9bd0747d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C90-3063.txt @@ -0,0 +1 @@ +ttle results of the experiment show that in most of the cases the cooccurrence statistics indeed reflect the semantic onstraints and thus provide a basis {or a useful disambiguat.ion tool. 1 introduct ion the use of selectional constraints i one of the most popular methods in applying semantic information to the resolution of ambiguities in natural anguages. the constraints typically specify which combina- tions of semantic lasses are acceptable in subject- verb-object relationships and other syntactic struc- tures. this information is used to filter ont some analyses of ambiguous constructs or to set prefer- ences between alternatives. though the use of selectional constraints i very popular, there is very little success (if any) in im- plementing this method for broad domains. the major problem is the huge amount of information that must be acquired in order to achieve a rea- sonable representation of a large domain. in order to overcome this problem, our project suggests an alternative to the traditional model, based on auto- matic acquisition of constraints flom a large corpus. the rest of the paper describes how this method is used to resolve anaphora references. similarly, the constraints are used also to resolve syntactic am- biguities, but this will not be described here. the *part of this resemch was conducted wb.ile visiting ibm t. j. watson research center, yorktown ileights, ny reader should bare in mind that like the conven- tional use of selectional constraints, our method is inteuded to work in co,tjunction with other disam- biguation means. these, such as various syntactic and pragmatic onstraints and heuristics [carbonetl and brown p.)88, tlobbs 1978], represent additional levels of knowledge and are essential when selec- tional constraints are not sufficient. 2 the statist ical approach according to the statistical model, cooccurrence patterns that were observed in tile corpns are used as selection patterns. whenever several alternatives are presented by an ambiguous construct, we prefer the one correspot~ding t.omore frequent patterns. when using selectional constraints for anaphora resolution, the referent must satisfy the constraints which are imposed on the anaphor. if the anaphor participates in a certain syntactic relation, like be- ing an object of some verb, then the substitution of the anaphor with the referent must satisfy the selectional constraim.s.automatic processing of large corpora fbr the resolution of anaphor references ido dagan * alon itai computer science department technion, tiaifa, israel dagan~techunix .b i tnet , i ta i~ cs.technion, ac.il abstract manual acquisition of semantic onstraints in broad domains is very expensive. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1019.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1019.txt new file mode 100644 index 0000000000000000000000000000000000000000..7c7582f1b2f406e4931c411c6a37cb75f794d646 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1019.txt @@ -0,0 +1 @@ +chinese sentences arc cx)mposed with string of characters without blanks to mark words. however the basic unit for sentence parsing and understanding is word. therefore the first step of processing chinese sentences is to identify the words( i.e. segment the character strings of the sentences into word strings). most of the current chinese natural language processing systems include a processor for word iden- tification. also there are many word segmentation techniques been developed. usually they use a lexicon with a large set of entries to match input sentences \[2,10,12,13,14,21\]. it is very often that there are many l~)ssible different successful matchings. therefore the major focus for word identification were on thc resolu- tion of ambiguities. however many other important as- pects, such as what should be done, in what depth and what are considered to be the correct identifications were totally ignored. high identification rates are claimed to be achieved, but none of them were mea- sured under equal bases. there is no agreement in what extend words are considered to be correctly iden- tified. for instance, compounds occur very often in chi- nese text, but none of the existing systems except ours pay much attention to identify them. proper name is another type of words which cannot be listed exhaus- tively in the lexicon. therefore simple matching algo- rithms can not successfully identify either compounds or proper names. in this paper, we like to raise the ptx~blems and the difficulties in identifying words and suggest the possible solutions.chinese sentences arc cx)mposed with string of characters without blanks to mark words. in this paper, we like to raise the ptx~blems and the difficulties in identifying words and suggest the possible solutions. however the basic unit for sentence parsing and understanding is word. therefore simple matching algo- rithms can not successfully identify either compounds or proper names. proper name is another type of words which cannot be listed exhaus- tively in the lexicon. therefore the first step of processing chinese sentences is to identify the words( i.e. segment the character strings of the sentences into word strings). for instance, compounds occur very often in chi- nese text, but none of the existing systems except ours pay much attention to identify them. most of the current chinese natural language processing systems include a processor for word iden- tification. also there are many word segmentation techniques been developed. there is no agreement in what extend words are considered to be correctly iden- tified. high identification rates are claimed to be achieved, but none of them were mea- sured under equal bases. usually they use a lexicon with a large set of entries to match input sentences \[2,10,12,13,14,21\]. it is very often that there are many l~)ssible different successful matchings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1025.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1025.txt new file mode 100644 index 0000000000000000000000000000000000000000..363f0619eeb8003cc1d1cea5f7294b22b0f809a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1025.txt @@ -0,0 +1 @@ +morphological information on the leaf nodes of trees ? transducers that encode morphological alternations an analysis of an inflected word form is produced by mapping the input form to a sequence of lexical forms through the transducers and by composing some out- put from the annotations on the leaf nodes of the lexical paths that were traversed. comprehensive morphological descrip- tions of this type have been developed for several languages including finnish, swedish, russian, english, swahili, and arabic. although they have several good features, these kimmo-systems also have some limitations. the ones we want to ad- dress in this paper are the following: (1) lexical representations tend to be arbitrary. because it is difficult to write and test two-level systems that map between pairs of radically dissimilar forms, lexical representations i existing two-level analyzers tend to stay close to the surface forms. this is not a problem for morpho- logically simple languages like english because, for most words, inflected forms are very similar to the canonical dictionary entry. except for a small number of irregular verbs and nouns, it is not difficult to create a two-level description for english in which lexical forms coincide with the canonical citation forms found in a dictionary. however, current analyzers for mor- phologically more complex languages (finnish and russian, for example) are not as satisfying in this respect. in these systems, lexical forms typically contain diacritic markers and special symbols; they are not real words in the language. for example, in finnish the lexical counterpart of otin i took might be rendered as ottallln, where t, al, and i1 are an arbitrary encoding of morpho- logical alternations that determine the allomorphs of the stem and the past tense morpheme. the canonical citation form ottaa to take is composed from annotations on the leaf nodes of the letter trees that are linked to match the input. it is not in any direct way related to the lexical form produced by the transducers. (2) morphological categories are not directly encoded as part of the lexical form. instead of morphemes like plural or past, we typically see suffix strings like +s, and +ed, which do not by themselves indi- cate what morpheme they express.two-level morphology with composition lauri karttunen, ronald m. kaplan, and annie zaenen xerox palo alto research center center for the study of language and information stanjbrd university 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1038.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1038.txt new file mode 100644 index 0000000000000000000000000000000000000000..4cf8e47ef59dd54abcb598d4e6957bd30ac6fa29 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-1038.txt @@ -0,0 +1 @@ +in particular, as compared to these algorithms, human speakers pay far less attention to reducing the length of a referring expression, and far more attention to making sure they use attributes and values that human hearers can easily process; in the terms introduced in [da188,da189], hearers are more concerned with the principle of sensitivity than with the principle of efficiency. we have designed a new referring expression generation algorithm that is based on the~ observations, and believe that the new algorithm is more practical for real-world natu- ral language generation systems than the algorithms we have previously proposed. in particular, the al- gorithm is: ? fast: its run-time is linear in the number of distrac- tors, and independent of the number of possible modifiers; ? sensitive to human preferences: it attempts to use easily perceivable attributes and basic-level [ros78] attribute values; and ? supported by serc grant gr/f/36750. e-mail ad- dress is e.reiter@ed. taiso of the centre for cognitive science at the univer- sity of edinburgh. e-mail address i r. daleqed. ehud re i te r*and rober t da le f depar tment of art i f ic ia l inte l l igence un ivers i ty of ed inburgh ed inburgh eh1 1tln scot land ? domain-independent: he core algorithm should work in any domain, once an appropriate knowl- edge base and user model has been set up. a version of the algorithm has been implemented within the idas natural-language neration system [rml92], and it is performing satisfactorily. the algorithm presented in this paper only gener- ates definite noun phrases that identify an object that is in the current focus of attention. algorithms and models that can be used to generate pronominal and one-anaphoric referring expressions have been presented elsewhere, .g., [sid81,gjw83,da189]. we have recently begun to look at the problem of gen- erating referring expressions for objects that are not in the current focus of attention; this is discussed in the section on future work.a fast algorithm for the generation of referring expressions abst rac t we simplify previous work in the development of algorithms for the generation of referring expre~ sions while at the same time taking account of psy- cholinguistic findings and transcript data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-2070.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-2070.txt new file mode 100644 index 0000000000000000000000000000000000000000..fafe08fd75baf45bbd67728aced81a3642749175 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-2070.txt @@ -0,0 +1 @@ +our use of class models overcomes this knowledge acquisition bottleneck, enabling training on unresuicted monolingual text without human intervention. applied to the 10 million word groliers encyclopedia, the system correctly disambiguated 92% of the instances of 12 polysemous words that have been previously studied in the literature. problem formulation this paper presents an approach to word sense disambiguation that uses classes of words to derive models useful for disambignating individual words in context. "sense" is not a well defined concept; it has been based on subjective and often subtle distinctions in topic, register, dialect, collocation, part of speech and valency. for the purposes of this study, we will define the senses of a word as the categories li ted for that word in rogers international thesaurus (fourth edition - chapman, 1977). 1sense disambiguation will constitute 1. note that his edition of rogers thesaurus is much more 0ttm$ive than the 1911 vmsion, though somewhat more difficult to obtain in electronic form, one could me other other concept hlemrehics, such as wordnet (miller, 1990) or the ldoce mbject codes (slator, 1991). all that it necessary is ? set of semamic categories and ? list of the words in each category. selecting the listed category which is most probable given the surrounding context. this may appear to be a particularly crude approximation, but as shown in the example below and in the table of results, it is surprisingly successful. i nput output tvr.admillsauachedto cranu were used to lift heavy tools for supplying powe? for cranes, hoists, and lift s. tools hovetlfitheisht,atower crane is oftea med .sb tm* tools ?labocate oaumhip ribalds cranes build ? nest of vegetafi animal are more closely tv.lated to cranes and rails .sn they ran animal low tees ,pp at least five crane species are in danger of animal.word-sense disambiguation using statistical models of rogets categories trained on large corpora david yarowsky at&t bell laboratories 600 mountain avenue murray hil l n j, 07974 yarowsky@research.att .com abst rac t this paper describes a program that disambignates english word senses in unrestricted text using statistical models of the major rogets thesaurus categories. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-2082.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-2082.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0d7ea1c4aa854d6b776e7254f8a1838ee31a2ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-2082.txt @@ -0,0 +1 @@ +extensions and applications to areas uch as information retrieval are suggested. 1 in t roduct ion currently there is much interest in the automatic ac- quisition of lexiea[ syntax and semantics, with the goal of building up large lexicons for natural lain guage processing. projects that center around ex- tracting lexical information from machine readable dictionaries (mrds) have shown much success but are inherently limited, since the set of entries within a dictionary is fixed. in order to find terms and ex- pressions that are not defined in mrds we must turn to other textual resources. for this purpose, we view a text corpus not only as a source of information, but also as a source of information about the language it is written in. when interpreting unrestricted, omain-independent text, it is difficult to determine in advance what kind of infbrmation will be encountered and how it will be expressed. instead of interpreting everything in the text in great detail, we can searcil for specific lexical relations that are expressed in well-known ways. sur- prisingly useful information can be found with only a very simple understanding of a text. consider the following sentence: 1. (si) the bow lu te , such as the bambara ndang, is plucked and has an ind iv idual curved neck :for each string. most fluent readers of english who }lave never be- fore encountered the term q3amhara ndang" will nev- ertheless from this sentence infer that a "bambara udang" is a kind of "bow iute". this is true even if tile reader has only a fuzzy conception of what a how lute is. note that the attthor of the sentence is not de- liberately defining the term, as would a dictionary or a childrens book containing a didactic sentence like a bambara ndang is a kind of bow lute. however, the semantics of the lexico-syntactic construction i - dicated by the pattern: (la) npo ..... h as {np1, np2 . (and ior)} np,, are such that they imply (lb) for all np , , 1 < i< n, hyponym(npi, npo) thus from sentence (si) we conclude hyponym ( "barn bare n dang", "how lu re").automatic acquisition of hyponyms ~om large text corpora mart i a. hearst computer science division, 571 evans hall un ivers i ty of cal i fornia, berkeley berkeley, ca 94720 and xerox palo a l to research center mart i~cs , berkeley, edu abst rac t we describe a method for the automatic acquisition of the hyponymy lexical relation from unrestricted text. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-3126.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-3126.txt new file mode 100644 index 0000000000000000000000000000000000000000..cd8280b47662ba8dfdf3ef96d0402af7bad3a598 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-3126.txt @@ -0,0 +1 @@ +since that article is written in dutch, we will translate some parts of it more or less literally in this introduction. according to scba, the current radition of language processing systems is based on linguistically motivated competence models of natural imlguages. "llte problems that these systems lull iato, suggest file necessity of a more perfommnce oriented model of language processing, that takes into account the statistical properties of real language use. qllerefore scha proposes a system ritat makes use of an annotated corpus. anmyzing a new input means that the system attempts to find the most probable way to reconstruct the input out of fragments that already exist in the corpus. the problems with competence grammars that are mentioned in schas aiticle, include the explosion of ambiguities, the fact tilat itunmn judgemeats on grammaticality are not stable, that competence granunars do not account for language h~alge, alld that no existing rule-based grammar gives a descriptively adequate characterization of an actual language. according to scha, tile deveh,pment of a fornml gnatunar fur natural latlguage gets more difficult ,as tire grammar gets larger. when the number of phenotnena one has already takea into account gets larger, the number of iareractions that must be considered when ,me tries to introduce all account of a new pllenomenon grows accordingly. as to tile problem of ,mtbiguity, it has turned out that as soon as a formal gratmnar clmracterizes a non-trivial part of a natural anguage, almost every input sentence of reasonable length gets ml re]manageably large number of different structural analyses (and * the author wishes to thank his colleagues at the department of computational linguistics of the ilaiversity of amsterdam for many fruitful discussions, and, in particular, remko scha, martin van den berg, kwee tjoe l,iong and frodenk somsen for valuable comments on earlier w~rsions of this paper. semantical interpretations). i "lids is problenmtic since most of these interpretations ~re not perceived as lvossible by a hunmn language user, while there are no systematic reasons 111 exclude tileln on syutactic or sematltic grounds. often it is just a ntatter of relative implausibility: tile only reason why a certain iarerpmtarion of a sentence is not perceived, is that aanther interprctatilm is much more plausible. competence and performance tale lhnriations of the current language procossing systerus are not suprising: riley are the direct consequence of rile fact that these systems implement chart]skys notion of a coutpetence grmnmar. the formal grilnuuars that constitute the subject-nmtter of theoretieal linguistics, aim at characterizing the clnnpetencc of tile langnage user. but the preferences language users have m the case of ambiguous entences, are paradigm instances of perfonatmce phenomena.a computational model of language data oriented parsing rens bolt* department of computational i jnguistics university of amsterdmn spuistraat 134 1012 vii amsterdam the netherlands rens@alf.let.uva.nl performance: abstract 1)ata oriented parsing (ix)p) is a model where no abstract rules, but language xt~riences in the ti3ru~ of all ,malyzed colpus, constitute the basis for langnage processing. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-3150.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-3150.txt new file mode 100644 index 0000000000000000000000000000000000000000..4504b59dc3bead70eef48fbeae67a9b25b6152bd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C92-3150.txt @@ -0,0 +1 @@ +in this article, the type of analysis used (surface grammatical nalysis) is highlighted, as the methodological pproach adopted to adapt the rules (experimental approach). i ) const i tu t ing constituting a terminology of a subject field, that is to say establishing a list of the terminological units that represent the concepts of this field, is an oft-encountered problem. for the research development division of electricit6 de france (french electricity board), this problem arose in the information documentation sector. an automatic indexing system, using different thesauri according to the application, has been operational for three years or more [monteil 1990]. the terminologists and information scientists need a terminology a te rmino logy extraction tool in order to keep these thesauri up to date in constantly changing fields and to create "ex nihilo" thesauri for new fields. this is the reason why the terminological extracting software, lexter, was developed, forming the first link in the chain that goes to make up the thesaurus. a corpus of french- language texts is fed into lexter, which gives out a list of likely terminological units, which are then passed on to art expert for validation. aulxs de coling-92, nantes, 23-28 ao~r 1992 9 7 7 proc. of coling-92, nantes, aug. 23-28, 1992 2) what is a terminological unit ? the main aim here is not to provide a rigorous definition of what a terminological unit is, but rather to outline its essential features, and thus to justify the hypotheses (concerning the form of terminological units) on which lexter is based. semantic function : the representation of the concept the first characteristic of the terminological unit is its function as the representation of a concept. the terminological unit plays this role of representation in the framework of a terminology, which is the linguistic evidence of the organisation of a field of knowledge in the form of a network of concepts; the terminological unit represents a concept, uniquely and completely, taken out of any textual context. the existence of this one-to-one relationship between a linguistic expression and an extra-linguistic object is, as we shall see, a situation which particulary concerns the terminological units. the appearance of a new terminological unit is most often a parallel process to that of the birth of the concept which it represents. this "birth" is marked by the consensus of a certain scientific ommunity.surface grammatical analysis for the extraction of terminological noun phrases didier bourigault ecole des hautes etudes en sciences sociales et electlicit6 de france direction des etudes et recherches 1, avenue du g6n6ral de gaulle 92141 clamart cedex france tel : +33 1 47 65 50 64 abstract lexter is a software package for extracting terminology. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1027.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1027.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6488c01f134fb53f356241feab82d315de6e83c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1027.txt @@ -0,0 +1 @@ +in an utterance, this ambiguity is normally resolved by the context of a word: e.g. in the seutence "the 1977 p6s could store two pages of data. ", store can only be an intluitive. a part-of-speech tagger is a system which automat- ically assigns the part of speech to words using con- textual information. potential applications for part- of-speech taggers exist in many areas inclnding speech recognition, speech synthesis, machine translation and information retrieval. l)ifiereut methods have been used for the im plemen- ration of part-of-speech taggers. taggit (greene, rnbin, 1971), an early system, which was used for the initial tagging of the brown corpus was rule-based. it was able to assign the correct part-of-speech to about 77 % of the words in the brown corpus. in another approach contextual dependencies are modelled statistically. churcb (1988) and kempe (1993) use second order markov models and train their systems on large handtagged corpora. using this metbod, they are able to tag more than 96 % of their test words with the correct part-of-speech. the need for reliably tagged training data, however, is a prob- lem for languages, where such data is not available in sufficient quantities. jelinek (1985) and cutting et al. (1992) circumvent his problem by training their taggers on untagged ata using tile itaum-welch algo- rithm (also know as the forward-backward algorithm). they report rates of correctly tagged words which are comparable to that presented by church (1988) and kempe (1993).part-of-speech tagging with neural networks hehnut schmid institute for computational linguistics, azenbergstr.12, 70174 stuttgart, germany, schmid@ims.uni-stuttgart.de topic area: large text corpora, part-of-speech tag- ging, neural networks 1 abstract text corpora which are tagged with part-of-speech in- formation are useful in many areas of linguistic re- search. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1032.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1032.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e0bb71eda08b28627e32e3223baad07420ae2b8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1032.txt @@ -0,0 +1 @@ +qhe proposed japanese morphological nalyzer achieved 95. l% recall and 94.6% precision for open text when it was trained and tested on the ati? 1 introduct ion in recent years, we have seen a fair number of l)al)ers re- porting accuracies ofmore than 95% for english part of speech tagging with statistical language modeling tech- niques [2-4, 10, 11]. on the other hand, there are few works on stochastic japanese morphological nalysis [9, 12, 14], and they dont seem to have convinced the japanese nlp community that the statistically-based teclmiques are superior to conventional rule-based tech- niques uch as [16, 17]. we show in this paper that we can buihl a stochastic japanese morphological nalyzer that offers approxi- mately 95% accuracy on a statistical language model- ing technique and an efficient two-pass n-best search strategy. we used tile simple tri-pos model as the tagging model for japanese. probability estimates were ob- tained after training on the ati{ l)ialogue database [5], whose word segmentation a d part of speech tag assignment were laboriously performed by hand. we propose a novel search strategy for getting the n best morphological nalysis hypotheses for the in- put sentence. it consists of the forward dynamic pro- gramming search and the backward a* search. the proposed algorithm amalgamates and extends three well-known algorithms in different fields: the minimum connective-cost method [7] for japanese morphologi- cal analysis, extended viterbi algorithm for charac- ter recognition [6], and "l~ee-trellis n-best search for speech recognition [15]. we also propose a novel method for handling un- known words uniformly within the statistical pproach. using character trigrams ms tim word model, it gener- ates the n-best word hypotheses that match the left- most substrings starting at a given position in the input senten ce. moreover, we propose a novel method for evaluat- ing the performance of morphological analyzers. un- like english, japanese does not place spaces between words. it is difficult, even for native japanese, to place word boundaries consistently because of the aggluti- native nature of the language. thus, there were no standard performance metrics.a stochastic japanese morphological analyzer using a forward-dp backward-a* n-best search algor i thm masa.aki nagata ntt network information systems l~,~bor~ttorics 1-2356 take, yokosuka-shi, kanagaw~t, 238-03 japan (tel) 4-81-468-59-2796 (fax) +81-468-59-3428 (e-mail) nagata@nttnly.ntt . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1042.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1042.txt new file mode 100644 index 0000000000000000000000000000000000000000..486c0fa16d08819475cddb5d015917df18b8896b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1042.txt @@ -0,0 +1 @@ +in l)articnlar, the feature set is more detailed than those of the major commercial dictionaries, such ;us the oxford adwmced learners dictionary (oali)) [d] and the longnum dictionary of contemporary english (ldoce) [8], which haw~ i)een widely used as a source o[ lexical i,,for,,lal, ioil ill ];lll- guage analyzers. 1 in addil.ion, we have ahned to be irio,e corrlpreheiisive ill capturhig featt, res (hi partic.u- ]ar, stibcategorization [eatures) than co,ii,llercial dic tlonaries. 2 structure ti le word list was derived fion, the file prepared by prof. roger mitten from the oxford adwn,ced learners dictionary, and contains about 38,000 head forms, although some purely british terms have been omitted, loach entry is organized as a nested set of typed feature-vahle ists. we currently use a lisp-like parenthesized list notation, although the lexicon couhl ito facilii~ate the transit ion to comlex by currenl, users of these dictionaries, we have i)reparcd mappings froln comi,ex classes to those of several other dictionaries. be readily mapped into other hwn,s, such as sc, mi,- marked text, if desired. sollie sauil)le dicticl l ,ary entries are shown ilt f igure 1. the first syml/ol gives the part of speech; a word with several parts of speech will have several dictionary entries, one for each part of speech. each e,itry has all :orth foati lre, g iv ing the base fo,lfl of ti le word, no,ins, verbs, and adjectiw~s with irregular inorphology will liave featt,res for the irregular fo,.iris :plural, :past, :past- part, etc. words which take con-,i)leirients will have a subcatego,ization (:sube) [eat,ire. for exaniple> the verb "ai )andon" eali occur with a ilollri phrase followed by a prepositional phrase with tim preposition "to" (e.g., "1 abandoned hii,i to the linguists.") or with just a ,loll,, phrase comple i f ient ( " [ aballdone(l the shill."). other syntactic features are recorded under :features. for example, the noun "abandon" is marked as (count- able :pval ("wlth")), indicating that it must appear in the singular with a deter,niner unless it is preceded by the preposzion "with". 2.1 subcategor i za t ion we have paid p~uticular attention to providing detailed subcategorization information (information about complement s ructure), both for verbs and for tllose nouns and adjectives which do take cmnl)lements. in order to insure the coml)leteness of our codes, we studied the codiug e)ul)loyed by s(weral other u,ajor texicous, includh,g (,he ihandeis verh lexlcolt 2, the a(jqijii,ex prc, ject [10], the nyu linguistic string lroject [9], the oali), and ia)oci], a, nd, whenever feasime, haw~ sought to incorporate distinctions made in any of these all(tie,tortes.comlex syntax : bu i ld ing a computat iona l lex icon ra lph gr i shm:m, cather ine mac leod, and adam mcyers computer science depar tment , new york un ivers i ty 715 broadw,~y, 7th f loor , new york, ny 10003, u.s.a. {gr i s lnnan ,mac leod ,me.yers } (@cs.nyu.e(ht abstract we des((tile tile design of comlex syntax, a co,nputa- tional lexicon providing detailed syntactic iuformation ff)r approximately 38,000 english headwords. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1079.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1079.txt new file mode 100644 index 0000000000000000000000000000000000000000..20bf1a8504811e43e399acbae77ade462df1ced4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-1079.txt @@ -0,0 +1 @@ +principle-based grammars, such as govern- ment-binding (gb) theory (chomsky, 1981; haegeman, 1991), offer many advantages over rule-based and unification-based grammars, such as the universality ofprinciples and mod- ularity of components in the grammar. prin- ciples are constraints over x-bar structures. most previous principle-based parsers, e.g., (dorr, 1991; font, 1991; johnson, 1991), es- sentially generate all possible x-bar structures of a sentence and then use the principles to fil- ter out the illicit ones. the drawback of this approach is the inefficiency due 1;o the large number of candidate structures to be. filtered out. the problem persists even when w~rions techniques such as optimal ordering of princi- ples (fong, 1991), and corontining (dorr, 1991; johnson, 1991) are used. this problem may also account for the fact that these parsers are experimental nd have limited coverage. this paper describes an efficient, broad- coverage, principle-based parser, called prin- cipar. the main innovation in principar is that it applies principles to descriptions o17 x- bar structures rather than the structures them- selves. x-bar structures of a sentence are only built when their descriptions have satisfied all the pri ncil)les. o dynamic data \ [~ static dala l)rocegsing module data flow figure 1: '.pile architecture of principar figure i shows the architecture of prin- cipar. sentence analysis is divided into three steps. the lexical analyser first converts the in- put sentence into a set of texical items. then, a message passing algorithm for ob-parsing is used to construct a shared parse forest. fi- nally, a parse tree retriever is used to enumer- ate the parse trees. the key idea of the parsing algorithm was presented in (tin, 199:1). this paper presents some implementation details and experimental results.this paper presents some implementation details and experimental results. the links in the net- work re.present relationships bel;ween the cat- egories. the correct parses for all the sentences in tm)le 1 are returned by the parser. the parser in pihncipar is based on a message-passing framework proposed by \],in (1993) and l,in and ooebel (1993), which uses a network to encode the grammar. bonnie dorr for comments about sections 1, 2, and 3. acknowledgements the author wishes to thanl? the nodes in tile grammar network represent grammati- cal categories (e.g., np, nbar, n) or subcate- gories, such as v:np (transitive verbs that take nps as complements). f igure'2 depicts ~ port:ion c" tile gr ;unmar network for |dnglish. gb-principles are implemented as lo- cal constraints attached to the nodes and 482 perco la t ion cormtra in ts attached to links in the network. this re'- search was supported by naturm sciences and engineering research council of canada grant ogp121338. table 1 lists the parsing time and the number of parses for several ex- ample sentences. ' 2 \ i t " "ip cpspe~.. our experiments have shown that the parser is very fast. , - /~ / \~ i aai ~ i'p " ni i vi : 1 t . . ".,.... \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2174.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2174.txt new file mode 100644 index 0000000000000000000000000000000000000000..96467d5c345db506e6b8690e6f298b222f70d8dc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2174.txt @@ -0,0 +1 @@ +text types thor(; are. different types of l;exl [exl.s "al)oui," l.he sa.me th ing m~ty be in differing geurcs, of difl(~rem. i y i)es, ;rod of v;trying quality. texts vary along st.ver;d param. el.ers, a.ll relcwull, for l,he gcuera.l inlortlu~tiol~ rel, ri(wal problem of real.thing rea(lcr needs m.i texts. (liven this variat ion, in a text retrieval eonl.ext, the l)rol)lems arc (i) i (mttifying ;cures, and (ii) choosing criteria t,o ch,s-- ter texts of the smnc gem:e, wit, h l)redictal>le l>recision aml rcca.ll. this should uot he eonfused with t, he issue of idenl.ifying topics, m,d choosiug criw+ria that. diserinl- inatc on(: topic from auother. all.hough u(>t orthogonal to gem(, del)endent; wu+iat, ion, the wuiat, ioll i, hat, rela, l,es dirc(-t.ly to (:onw.ui; and topic is moug or, her (litu<.usions. na.l,ura.lly, there is (;o-va.riancc.. iexl.s al)oul. (:(+rl.aitl topics ula,y only occur iu (:(;rt;ailt g(!tll(!s, alt(] {.exl.s ill eertaiu ge.nres may only t.rea.t c(ql.ain topics; mosl. l.ol)- ics do, however, occur iu several ;cures, which is what inl;erests us here. douglas i~il)et: has sl, udied l;exl, variat.ion along scv eral l )aranmtcrs, and found that t,cxt.s can i)(,, cousidcrcd to wvry along live ditnensious. in his st, udy, he clush.rs [~ai.ures according t.o eowuiauce, t.o find tmderlyiug di mens ions (198!)). we wish to liud a method for idenl.ifv- in ; easily eomput.al)h; i)[tl:al,|et.chs t.hat ra.l>idly classify previously illls(?]~{ecogni:zing ]:f:xt genii.es wl r l l s:lb,/l:ll,i,; ~/~i,;ii/i(~s using discii . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2178.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2178.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0535acc02105426f9088b8bb8cc5423c8875c9f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2178.txt @@ -0,0 +1 @@ +there have been quite a number of recent papers on parallel text: brown et al(1990, 1991, 1993), chen (1993), church (1993), church et al(1993), dagan et al(1993), gale and church (1991, 1993), isabelle (1992), kay and rgsenschein (1993), klavans and tzoukermann (1990), kupiec (1993), matsumoto (1991), ogden and gonzales (1993), shemtov (1993), simard et al(1992), warwick- armstrong and russell (1990), wu (to appear). most of this work has been focused on european language pairs, especially english-french. it remains an open question how well these methods might generalize to other language pairs, especially pairs such as english-japanese and english- chinese. in previous work (church et al 1993), we have reported some preliminary success in aligning the english and japanese versions of the awk manual (aho, kernighan, weinberger (1980)), using charalign (church, 1993), a method that looks for character sequences that are the same in both the source and target. the charalign method was designed for european language pairs, where cognates often share character sequences, e.g., government and gouvernement. in general, this approach doesn't work between languages uch as english and japanese which are written in different alphabets. the awk manual happens to contain a large number of examples and technical words that are the same in the english source and target japanese. it remains an open question how we might be able to align a broader class of texts, especially those that are written in different character sets and share relatively few character sequences. the k-vec method attempts to address this question.k-vec starts by estimating the lexicon. consider the example: fisheries --~ p~ches. the k-vec method attempts to address this question. in this way, we might be able to apply word_align to a broader class of language combinations including possibly english-japanese and english-chinese. this estimate could be used as a starting point for a more detailed alignment algorithm such as word_align (dagan et al, 1993). the k-vec algorithm generates a quick-and-dirty estimate of a bilingual exicon. the k-vec algorithm will discover this fact by noting that the distribution of fisheries in the english text is similar to the distribution of p~ches in the french. the concordances for fisheries and p~ches are shown in tables 1 and 2 (at the end of this paper). currently, word_align depends on charalign (church, 1993) to generate a starting point, which limits its applicability to european languages since char_align was designed for language pairs that share a common alphabet. these tables were computed from a small fragment ofthe. 2.3 royale languages 2.3 grief grievance 7. there have been quite a number of recent papers on parallel text: brown et al(1990, 1991, 1993), chen (1993), church (1993), church et al(1993), dagan et al(1993), gale and church (1991, 1993), isabelle (1992), kay and rgsenschein (1993), klavans and tzoukermann (1990), kupiec (1993), matsumoto (1991), ogden and gonzales (1993), shemtov (1993), simard et al(1992), warwick- armstrong and russell (1990), wu (to appear). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2195.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2195.txt new file mode 100644 index 0000000000000000000000000000000000000000..dd5ac46a06f3966ff096e7897d8b889464af3497 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C94-2195.txt @@ -0,0 +1 @@ +prel)ositioual phrase attachment disambiguation is a difficult problem. take, for example, the sen- rouge: ( l ) buy a ear \[p,o with a steering wheel\]. we would guess that the correct interpretation is that one should buy cars that come with steer- ing wheels, and not that one should use a steering wheel as barter for purchasing a car. \]n this case, we are helped by our world knowledge about auto- mobiles and automobile parts, and about typical methods of barter, which we can draw upon to cor- rectly disambignate he sentence. beyond possibly needing such rich semantic or conceptual int'ornla- tion, a l tmann and steedman (as88) show that there a,re certain cases where a discourse model is needed to correctly disambiguate prepositional phrase atta.chment. however, while there are certainly cases of an> biguity that seem to need some deep knowledge, either linguistic or conceptual, one might ask whag sort of performance could 1oe achieved by a sys- tem thai uses somewhat superficial knowledge au- *parts of this work done a.t the computer and hp lbrmation science department, university of penn- sylvania were supported by by darpa and afosr jointly under grant no. afosr-90-0066, and by aro grant no. daal 03-89-c0031 pr\[ (first author) and by an ibm gradmtte fellowship (second author). this work was also supported at mit by arpa under con- tract n000t4-89-j-la32= monitored through the office of naval resear<:h (lirst a.uthor). tomatically ~xtracted from a large corpus. recent work has shown thai; this approach olds promise (h\]~,91, hr93). hi this paper we describe a new rule-based ap- proach to prepositional phrase attachment, disam- biguation. a set of silnple rules is learned au- tomatically to try to prediet proper attachment based on any of a number of possible contextual giles. baseline l l indle and rooth (iir91, 1\[17{93) describe corpus-based approach to disambiguating between prepositional phrase attachlnent to the main verb and to the object nonn phrase (such as in the ex- ample sentence above). they first point out that simple attachment s rategies snch as right associa- tion (kim73) and miuimal a.tbtchment (fra78) do not work well i,l practice' (see (wfb90)). they then suggest using lexical preference, estimated from a large corpus of text, as a method of re- solving attachment ambiguity, a technique the}' call "lexical association." from a large corpus of pursed text, they first find all nonn phrase heads, and then record the verb (if' any) that precedes the head, and the preposition (if any) that follows it, as well as some other syntactic inforlnation about the sentence. an algorithm is then specified 1,o try to extract attachment information h'om this table of co-occurrences. i!'or instance, a table entry is cousidered a definite instance of the prepositional phrase attaching to the noun if: '\['he noun phrase occm:s in a context where no verb could license the prepositional phrase, specifically if the noun phrase is in a subjeet or other pre-verbal position. they specify seven different procedures for decid- ing whether a table entry is au instance of no attachment, sure noun attach, sm:e verb attach, or all ambiguous attach. using these procedures, they are able to extract frequency information, 1198 counting t, he numl)e,r of times a ptu:ticular verb or ncmn a.ppe~u:s with a pal:tieuh~r l~reposition. these frequen(;ies erve a.s training d~t;a for the statistical model they use to predict correct i~ttachmenl to dismnbigu;~te s ntence (l), they would compute the likelihood of the preposition with giwm the verb buy, {rod eolltrast that with the likelihood of that preposition given i:he liottll whed. ()he, problem wit;h this ,~pproa~ch is tll~tt it is limited in what rel~tionships are examined to make mi ~d;tachment decision. simply extending t\[indle and l{,ooth's model to allow r)r relalion- ships such as tlml~ i)e.tweell the verb and the' ob- ject o\[' the preposition would i:esult ill too large a. parameter spa.ce, given ~my realistic quantity of traiuing data. another prol)lem of the method, shared by ma.ny statistical approaches, is that the. model ~(:quired (inring training is rel)reser~ted in a huge, t~d)le of probabilities, pl:ecludiug any stra.ightf'orward analysis of its workings. ' l~-ansformat ion-based er ror -dr iven learn ing tra, ns\]bl'm~d;ion-lmsed errol:-dhven learlting is ~ sin@e learning a.lgorithm tlmt has t)eeu applied to a. number of natural la.ngm,ge prol)ie.ms, includ- jllg l)a.t't o\[' speech tagging and syuta.cl, ic l)m:sing (1h:i92, \]h:i93a, bri!)gb, bri9d). figure :1 illus- trates the learning l)l:occ'ss, l:irsl, tlll;21nlola, ted text; is l)assed through the initial-st;ate mmota- tot. 'l'lw~ initial-stat, e area)tater can range in com- plexity from quite trivial (e.g. assigning rmtdom strll(:ttll:c) to quit, e sophistica.ted (e.g. assigning the output of a. i{nowledge-based ;/l/llot;~l, tol' that was created by hand). ouce text has beeu passed through the iuitia.l-state almol, at.or, it. is then (;ore- pared to the h'ugh,, as indicated ill a luamlally an- nota,tea eorl)lls , and transformations are le~u'ned that can be applied to the oul, put of the iuitial state remora, tot t;o make it, better resemble the :ruffs. so far, ouly ~ greedy search al)proach as been used: at eaeh itera.tion o\[' learning, t.he tra nsfo> nl~tion is found whose application results in the greatest iml)rovenmnt; ha.t transfk)rmation is then added to the ordered trmlsforlmltiou list and the corpus is upd~d.ed by a.pplying the. learned trans formation. (see, (i{,mg,\[) for a detailed discussiou of this algorithm in the context of machiue, le, aru-- iug issues.) ottce 3,11 ordered list; of transform~tions i learned, new text, can be mmotated hy first ai> plying the initial state ~mnotator to it and then applying each o\[' the traam'ormations, iu order. unannotati{d \] "i'i~x'i' 1nh'\[ai, l state annotatlid text ti~.i j'\['l l , ~ e , n el( ~-~ rui ,i-s figure \[: transfonm~tion-i~ased error.-driven l,earlfiug. r lh:ansformation-b ased prepos i t iona l phrase at tachment we will now show how transformation-based e.rrol> driwm igmfing can be used to resolve prep(~si- tiered phrase at, tachment ambiguity. the l)reposi- tioiml phrase a.tt~munent |ea.riter learns tra.nsfor-- ill~ttiolls \[?onl a c,)l:l>tls o\[ 4-tuples of the \['orm (v i11 i\] 1|9), where v is ~1 w;rl), nl is the head of its objecl, llolni \]phrase, i ) is the \])l'epositioll, and 11:2 is the head of the noun phrase, governed by the prel)c, sition (for e,-:anq~le, sce/v :1~' bo:q/,l o,/p the h711/~2). 1,'or all sentences that conlbrm to this pattern in the penn treeb~mk w{dl st, l:eet 3ourlml corpns (msm93), such a 4-tuplc was formed, attd each :l-tuple was paired with the at~aehnteut de- cision used in the treebauk parse) '\['here were 12,766 4q;ul)les in all, which were randomly split into 12,206 trnining s**mples and 500 test samples. \[n this e?periment (as in (\[ii~,9\], i\]l{93)), tim at- tachment choice for l)repositional i)hrases was i)e- i,ween the oh.iecl~ mmn and l,he matrix verb. \[n the initial sl,~te mmotator, all prepositional phrases i \])at.terns were extra.clxxl usj.ng tgrep, a. tree-based grep program written by rich pito. '\]'\]te 4-tuples were cxtract;ed autom~tk:ally, a.ud mista.kes were not. m~vn tta.lly pruned out. 1199 are attached to the object, noun. 2 this is tile at- tachment predicted by right association (kim73). the allowable transforlnations are described by the following templates: ? change the attachment location from x to y if: - n l i sw - n2 is w - v isw -- p is w - n l is w1 and n2 is w2 - n l i swl andv isw2 here "from x to y" can be either "from nl to v" or "from v to nl ," w (w1, w2, etc.) can be any word, and the ellipsis indicates that the complete set of transformations permits matching on any combination of values for v, n l , p, and n2, with the exception of patterns that specify vahms for all four. for example, one allowable transformation would be change the attachment location from nl to v if p is "until". learning proceeds as follows. first, the train- ing set is processed according to the start state annotator, in this case attaching all prepositional phrases low (attached to nl) . then, in essence, each possible transtbrmation is scored by apply- ing it to the corpus and cornputing the reduction (or increase) in error rate. in reality, the search is data driven, and so the vast majority of al- lowable transformations are not examined. the best-scoring transformation then becomes the first transformation i the learned list. it is applied to the training corpus, and learning continues on the modified corpus. this process is iterated until no rule can he found that reduces the error rate. in the experiment, a tol, al of 471 transfor- mations were learned - - figure 3 shows the first twenty. 3 initial accuracy on the test set is 64.0% when prepositional phrases are always attached to the object noun. after applying the transforma- tions, accuracy increases to 80.8%. figure 2 shows a plot of test-set accuracy as a function of the nulnber of training instances. it is interesting to note that the accuracy curve has not yet, reached a 2if it is the case that attaching to the verb would be a better start state in some corpora, this decision could be parameterized. zin transformation #8, word token amount appears because it was used as the head noun for noun phrases representing percentage amounts, e.g. "5%." the rule captures the very regular appearance in the penn tree- bank wall street journal corpus of parses like sales for the yea," \[v'p rose \[np5yo\]\[pp in fiscal 1988\]\]. accuracy 81.00 rl 80.00 !! 79,00 t 77.00 !--r . . . / - -f . . . %oo!1 / i 74:001 . . . _ _ t .... _ _ 73.00 j - 72.00 l l i _ __ / __ . ,?!>2 - 70.00 69.00 68.00 67.00 64.00 0.00 5.00 i q ! i t t!aining size x 103 10.00 figure 2: accuracy as a function of l;raining corpus size (no word class information). plateau, suggesting that more training data wonld lead to further improvements. adding word class in format ion in the above experiment, all trans\[brmations are. triggered hy words or groups of words, and it is surprising that good performance is achieved even in spite of the inevitable sparse data problems. there are a number of ways to address the sparse data problem. one of the obvious ways, mapping words to part of speech, seerns unlikely to help. h> stead, semanl, ic class information is an attraclive alternative. we incorporated the idea of using semantic ino tbrmation in the lbllowing way. using the word~ net noun hierarchy (milg0), each noun in the ffa{ning and test corpus was associated with a set containing the noun itself ph.ts the name of every semantic lass that noun appears in (if any). 4 the transformation template is modified so that in ad- dition to asking if a nmm matches ome word w, 4class names corresponded to unique "synonynl set" identifiers within the wordnet noun database. a noun "appears in" a class if it falls within the hy- ponym (is-a) tree below that class. in the experiments reported here we used wordnet version :l.2. 1200 1 2 4 5 (3 7 8 9 10 ii 12 :13 \]4 15 \[6 17 \[8 119 2()_ change att{:~ehment location l"r~m~ to ( ;omit ion n1 v p is at n\ ] \ / p is as n1 v i ) is iulo n:i \/ p is ,l}'om n:i v p is with n\] v n2 is year n 1 v p is by i? is i~ and n i v ni ix amounl n \[ \/ \]' is lhrough ni v \]) is d'urb~g ni v v ix p,ul n1 v n2 is mou.lk n\[ v 1' is ulldcr nj v 1 ) is after v is have and n1 v i' is b~ n:\[ v p is wilk.oul v ni p is of v is buy and n1 \/ p is for n:i v p is bejbl"( v is have and ni v p is o~ x/ l( v v ~ v v / v v ,/ figure 3: the \[irst 20 transforntat;ions learned tbr preposil;ional phrase ~ttachme, n|;. it: (~an a/so ask if" it is a~ member of some class c. s this al)proaeh i;o data. sparseness i similar to tllat of (l{,es93b, li, l\[93), where {~ method ix proposed for using wordnet in conjunction with a corpus to ohtain class-based statisl, ie,q. ()lit' method here is ltlllc\]l simpler, however, in i;hat we a.re only us- ing boolean values to indieal;e whel;her ~ word can be a member of' a class, rather than esl, imat ing ~ filll se{, of joint probabil it ies involving (:lasses. since the tr;ulsformation-based al)l/roach with classes ccm gener~dize ill a way that the approach without classes is ml~l)le to, we woldd expect f'cwer l;ransf'ormal;ions to be necessary, l!;xperimeah, ally, this is indeed the case. in a second experiment;, l;raining a.ml testing were era:tied out on the same samples as i , the previous experiment, bul; i;his t ime using the ext, ende, d tra ns lbrmat ion t(;ml)la.tes for word classes. a total of 266 transformations were learned. applying l.hese transt'ormai.ions to the test set l'eslllted in a.n accuracy of' 81.8%. \[n figure 4 we show tile lirst 20 tra.nsform{~l, ions lem'ned using ilollll classes. class descriptions arc surrounded by square bracl{ets. (; 'phe first; grans- ibrmation st~l.cs thai. if" n2 is a. nomt i, hal; describes time (i.e. ix a. member of wordnet class that in- cludes tim nouns "y(;ar," "month," "week," and others), thell the preltositiomd phrase should be al;tache(\[ t,() the w;rb, since, tim(; is \]nlml more l ikely io modify a yet'it (e.g. le,vc lh(: re(cling iu an hour) thajl a, lloun. this exlw, r iment also demonstrates how rely \[~?~l;ul:e-based lexicon or word classiflcat, ion scheme cau triviajly be incorljorated into the learner, by exlencling l;ransfot'nlal,iolls to allow thent to make l'efel'ealc(? |;o it wol:(\[ g i l t \ [ { l i ly o\[' its features. \],valuation against other algorithms in (lil~91, hr93), tra.inittg is done on a superset el' sentence types ttsed ill train- ing the transforlj~atiolfbased learner. the transformation-based learner is i, rained on sen- tences containing v, n\[ and p, whereas the algo- r i thm describe.d by l l indle and i~,ooth ca.n zdso use sentences (;ontailfing only v and p, (n' only nl and i1. \[11 their lmper, they tra.in on ow~r 200,000 sen- lettces with prel)ositions f'rotn the associated press (apt newswire, trod i;hey quote a.n accuracy of 78- 80% on ap test &~ta.. ~' for reasons of ~: u n- t ime c\[lk:icn(:y, transfonmll, ions tmddng re\['crence 1:o tile classes of both n l a,nd n2 were iloi; p(~l?lxiitl, tr(i. gi;or expository purposes, the u.iqm'. wordnet id('.ntilicrs luwe been replaced by words lh~ll describe the cont, cnt of the class. 1207 (~lml~.ge \] at tachment , / location / # li'rom t 'fo \[ condition 1 n1 v n2 is \[time\]prel)ositioual phrase attachment disambiguation is a difficult problem. wordnet id('.ntilicrs luwe been replaced by words lh~ll describe the cont, cnt of the class. 1207 (~lml~.ge \] at tachment , / location / # li'rom t 'fo \[ condition 1 n1 v n2 is \[time\] take, for example, the sen- rouge: ( l ) buy a ear \[p,o with a steering wheel\]. 4 n1 v p is into 5 n 1 v p is from 6 n1 v 1 ) is wilh 7 n1 v p is of p is in and ni is 8 n 1 v \[measure, quanlily, amou~l\] p is by all.el 9 n1 v n2 is \[abslraclion\] i 0 ni v p is lhro'ugh 1) is in and n i is 11 ni v \[group,group.in.g\]. ~' for reasons of ~: u n- t ime c\[lk:icn(:y, transfonmll, ions tmddng re\['crence 1:o tile classes of both n l a,nd n2 were iloi; p(~l?lxiitl, tr(i. gi;or expository purposes, the u.iqm'. ilow- ever, this l)eeomes less of a probh'.m as atmotated eorl}ora beeolne increasingly available, and sug- gests the comhinat ion o1:' supexvised and uusuper vised methods as a.u ilfl;eresth g ave\]me \['or \['urther rese;ire\] \[. |;o it wol:(\[ g i l t \ [ { l i ly o\[' its features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1005.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1005.txt new file mode 100644 index 0000000000000000000000000000000000000000..30fa3f36db9a256afbf8171c77ebf0d34c4eec65 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1005.txt @@ -0,0 +1 @@ +pau gargallo 5, 08028 barcelona. g.rigau@lsi.upc.es abst ract . this paper presents a method for the resolution of lexical ambiguity of nouns and its automatic evaluation over the brown corpus. the method relies on the use oil the wide-coverage noun taxonomy of wordnet and the notion of conceptual distance among concepts, captured by a conceptual density formula developed for this purpose. this fully automatic method requires no hand coding of lexical entries, hand tagging of text nor any kind of training process. the results of the experiments have been automatically evaluated against semcor, the sense-tagged version of the brown corpus. 1 int roduct ion much of recent work in lexical ambiguity resolution offers the prospect hat a disambiguation system might be able to receive as input unrestricted text and tag each word with the most likely sense with fairly reasonable accuracy and efficiency. the most extended approach use the context of the word to be disambiguatcd together with inlormation about each of its word senses to solve this problem. interesting experiments have been performed in recent years using preexisting lexical knowledge resources: [cowie el al. 92], [wilks et al. 93] with ldoce, [yarowsky 92] with rogets international thesaurus, and [sussna 93], [voorhees 9311, [richardson etal. 94], [resnik 95] with wordnet. although each of these techniques looks promising for disambiguation, either they have been only applied to a small number of words, a few sentences or not in a public domain corpus. for this reason we have tried to disambiguate all the nouns from real *eneko agirre was supported by a grant from the basque goverment. part of this work is included in projects 141226-ta248/95 of the basque country university and pi95-054 of the basque government.word sense disambiguation using conceptual density eneko agirre* lengoaia eta sistema informatikoak saila. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1021.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1021.txt new file mode 100644 index 0000000000000000000000000000000000000000..488a07cb998b63f222ab6839a6f34cea1089ba60 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1021.txt @@ -0,0 +1 @@ +(l,appin and leass, 1994) describe an algorithm for pronominal anaphora resolution with high rate of cor- rect analyses. while one of the strong points of this algorithm is that it operates primarily on syntactic in- formation ahme, this also turns out to be a limiting factor for its wide use: current state-of-the-art of prac- tically applicable parsing technology still falls short of robust and reliable delivery of syntactic analysis of real texts to the level of detail and precision that the filters a nd constraints described by i ,appin and l ,eass assume. we are particularly interested in a class of text pro- cessing applications, capable of delivery of content analysis to a depth inw~lving non-trivial amount of discourse processing, including anaphora resolution. the operational context prohibits us from making any assumptions concerning domain, style, and genre of input; as a result, we have developed a text processing framework which builds its capabilities entirely on the basis of a considerably shallower linguistic analysis of the input stream, thus trading off depth of base level analysis for breadth of cown:age. in this paper, we present work on modifying the lmp- pin/leass algorithm in a way which enables it to work off a flat morpho-syntactic analysis of the sentences of a text, while retaining a degree of quality and accuracy in pronorainal anaphora resolution comparable to that reported in (lappin and l,eass, 1994). the modifica- tions discussed below make the algorithm available to a wide range of text processing frameworks, which, due to the lack of full syntactic parsing capability, nor- really would have been unable to use this high preci- sion anap hora resolution tool. the work is additionally important, we feel, as it shows that informatkm about the content and logical structure of a text, in princi-. pie a core requirement for higher level semantic and discourse processes, can be effectively approximated by the right mix of constituent analysis and inferences about functional relations.lappin and leass' algorithm for pronominal anaphora resolution is capable of high accuracy, but requires in- depth, full, syntactic parsing of text. pie a core requirement for higher level semantic and discourse processes, can be effectively approximated by the right mix of constituent analysis and inferences about functional relations. (l,appin and leass, 1994) describe an algorithm for pronominal anaphora resolution with high rate of cor- rect analyses. the overall success of the algo- rithm is important, then, not only for the immediate utility of the particular modifications, but also because the strategy we have developed for circumventing the need for full syntactic analysis is applicable to other in- terpretation tasks which, like the problem of anaphora resolution, lie in the space of higher level semantic and discourse analysis. the coref value of the pronoun is set to that of the an- tecedent, adding it to the the antecedent's coref class, and the salience of the class is recalculated accordingly. quantitative evaluation shows the anaphora resolution algorithm described here to run at a rate of 75'70 accu- racy. the base level linguistic analysis for actaphora resolu- tion is the output of a part of speech tagger, augmented with syntactic function annotatkms for each input to. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1055.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1055.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5f39ea1be1c8180a1bacedcfb2a407dfe352401 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1055.txt @@ -0,0 +1 @@ +finally, we show that we can provide effective acquisition techniques for novel word senses using a combi- nation of online sources. 1 in t roduct ion this paper addresses the issue of word-sense ambigu- ity in extraction from machine-readable resources for the construction of large-scale knowledge sources. we describe two experiments: one which ignored word- sense distinctions, resulting in 6.3% accuracy for seman- tic classification of verbs based on (levin, 1993); and one which exploited word-sense distinctions, resulting in 97.9% accuracy. these experiments were dual pur- pose: (l) to validate the central thesis of the work of (levin, 1993), i.e., that verb semantics and syntactic be- havior are predictably related; (2) to demonstrate hat a 15-fold improvement can be achieved in deriving se- mantic information from syntactic ues if we first divide the syntactic ues into distinct groupings that correlate with different word senses. finally, we show that we can provide effective acquisition techniques for novel word senses using a combination of online sources, in particular, longmans dictionary of contemporary en- glish (ldoce) (procter, 1978), levins verb classifica- tion scheme (levin, 1993), and wordnet (miller, 1985). we have used these techniques to build a database of 10,000 english verb entries containing semantic infor- mation that we are currently porting into languages such as arabic, spanish, and korean for multilingual nlp tasks such as foreign language tutoring and ma- chine translation. 322 2 automat ic lex ica l acqu is i t ion fo r nlp tasks as machine-readable resources (i.e., online dictionaries, thesauri, and other knowledge sources) become read- ily available to nlp researchers, automated acquisition has become increasingly more attractive. several re- searchers have noted that the average time needed to construct a lexical entry can be as much as 30 min- utes (see, e.g., (neff and mccord, 1990; copestakc et al., 1995; walker and amsler, 1986)). given that we are aiming for large-scale lexicons of 20-60,000 words, automation of the acquisition process has become a ne- cessity. previous research in automatic acquisition focuscs primarily on the use of statistical techniques, such as bilingual alignment (church and hanks, 1990; kla- vans and tzoukermann, 1996; wu and xia, 1995), or extraction of syntactic constructions from online dic- tionaries and corpora (brant, 1993; dorr, garman, and weinberg, 1995). others who have taken a more knowledge-based (interlingual) approach (lonsdale, mi- tamura, and nyberg, 1996) do not provide a means for systematically deriving the relation between sur- face syntactic structures and their underlying semantic representations. those who have taken more argument structures into account, e.g., (copestake t al., 1995), do not take full advantage of the systematic relation be- tween syntax and semantics during lexical acquisition. we adopt the central thesis of levin (1993), i.e., that the semantic class of a verb and its syntactic behav- ior are predictably related. we base our work on a correlation between semantic classes and patterns of grammar codes in the longmans dictionary of con- temporary english (ldoce) (procter, 1978). while the ldoce has been used previously in automatic x- traction tasks (alshawi, 1989; farwell, guthrie, and wilks, 1993; boguraev and briscoe, 1989; ,wilks et al., 1989; wilks et al., 1990) these tasks are primarily con- cerned with the extraction of other types of informa- tion including syntactic phrase structure and broad ar- gument restrictions or with the derivation of semantic structures from definition analyses.role of word sense disambiguation i lexical acquisition: predicting semantics from syntactic cues bonn ie j. dor r and doug jones depar tment of computer sc ience and ins t i tu te for advanced computer stud ies un ivers i ty of mary land a.v. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1058.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1058.txt new file mode 100644 index 0000000000000000000000000000000000000000..8e48d95ec270d545af7e61d53b1b6b2b6fc23c26 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1058.txt @@ -0,0 +1 @@ +in these results, the generative model performs significantly better than the others, and does about equally well at assigning pa.rt- of-speech tags. 1 in t roduct ion in recent years, the statistical parsing community has begun to reach out; for syntactic formalisms that recognize the individuality of words, l,ink grammars (sleator and pemperley, 1991) and lex- icalized tree-adjoining ranunars (schabes, 1992) have now received stochastic treatments. other researchers, not wishing to abandon context-flee grammar (ci"g) but disillusioned with its lexica] blind spot, have tried to re-parameterize stochas- tic ci"g in context-sensitive ways (black et al., 1992) or have augmented the formalism with lex- ical headwords (magerman, 1995; collins, 11996). in this paper, we 1)resent a [lexible l)robat)ilistic parser that simultaneously assigns both part-of- sl)eech tags and a bare-bones dependency struc- ture (illustrate.d in l!igure 1). the choice o t a simple syntactic structure is deliberate: we would like to ask some basic questions about where hx- ical relationships al)pear and how best, to exploit *this materia.l is based upon work supported un- der a national science i%undation graduate fellow- ship, and has benefited greatly from discussions with mike collins, dan m(:lame(l, mitch marcus and ad- wait ratnaparkhi. (a) tile man in the coiner taught his dachsht , ld io play gol f i;os dt nn in dt nn vbd pp.p$ nn to vh nn /? man n ~.. phty~ j j - y , .% (b) the ill __ ~ / .~dachshund it) gol f . ) f cofllel his file figure 1: (a) a bare-l>ones dependen(-y parse. ]]a<:h word points to a single t)arent, the word it modities; the head of the sentence points to the eos (end-of: sentence) ma.rk. crossing links and cycles arc not al- lowed. (b) constituent structure and sub(:ategoriza- tion may be highlighted by displaying the same de- pendencies as a lexical tree. it is uscflfl to look into thes0 basic ques- tions before trying to tine-tmm the performance of systems whose behavior is harder to understand. 1 the main contribution of the work is to i)ro- pose three distin(t, lexiealist hyl)otheses abou(. (,he probability space underlying sehl]ence structure. we il]ustrate how each hypothesis is (:xl)ressed in a depemteney framework, and how each can be used to guide our parser toward its favored so- lution.three new probabi l is t ic mode ls for dependency parsing: an exploration* j ason m. e i sner cis depar tment , un ivers i ty of pe lmsy lva i f ia . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1079.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1079.txt new file mode 100644 index 0000000000000000000000000000000000000000..356d7ebcb735b1e07e8911864f2fafb8f5dc83ad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-1079.txt @@ -0,0 +1 @@ +1 the muc evaluations we have just completed the sixth in a series of message understanding conferences, which have been organized by nrad, the rdt&e division of the naval command, control and ocean surveil- lance center (formerly nosc, the naval ocean systems center) with the support of darpa, the defense advanced research projects agency. this paper looks briefly at the history of these conferences and then examines the considerations which led to the structure of muc-6} the message understanding conferences were initiated by nosc to assess and to foster research on the automated analysis of military messages containing textual information. although called "conferences", the distinguishing characteristic of the mucs are not the conferences themselves, but the evaluations to which participants must submit in order to be permitted to attend the conference. for each muc, participating roups have been given sample messages and instructions on the type of information to be extracted, and have developed a system to process uch messages. then, shortly before the conference, participants are given a set of test messages to be run through their system (without making any changes to the system); the output of each participants system 1the full proceedings of the conference are to be distributed by morgan kaufmann publishers, san ma- teo, california; earlier muc proeeedings~ for muc-3, 4, and 5, are also available from morgan kaufmann. beth sundheim naval command, control and ocean surveillance center research, development, test and evaluation division (nrad) code 44208 53140 gatchell road san diego, cmifornia 92152-7420 sundhe im@poj ke . mi l is then evaluated against a manually-prepared an- swer key. the mucs are remarkable in part because of the degree to which these evaluations have defined a prograin of research and development. darpa has a number of information science and technol- ogy programs which are driven in large part, by regular evaluations. the mucs are notable, how- ever, in that they in large part have shaped the research program in information extraction and brought it to its current state} 2 early history muc-1 (1987) was basically exploratory; each group designed its own format for recording the information in the document, and there was no formal evaluation. by muc-2 (1989), the task had crystalized as one of template filling. one re- ceives a description of a class of events to be iden- tiffed in the text; for each of these events one must fill a template with information about the event. the template has slots for information about the event, such as the type of event, the agent, the time and place, the effect, etc. for muc-2, the template had 10 slots. both muc-1 and muc- 2 involved sanitized forms of military messages about naval sightings and engagements.message unders tand ing conference - 6: a br ie f h is tory ralph grishman dept. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-2141.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-2141.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1ea669df27a98a67eab3f55b92a6719a4c99f6a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-2141.txt @@ -0,0 +1 @@ +in this paper, we address the problem of word alignments for a bilingual corpus. in the recent years, there have been a number of papers con- sidering this or similar problems: (brown et al, 1990), (dagan et al, 1993), (kay et al, 1993), (fung et al, 1993). in our approach, we use a first-order hidden markov model (hmm) (aelinek, 1976), which is similar, but not identical to those used in speech recognition. the key component of this approach is to make the alignment probabilities dependent not on the absolute position of the word align- ment, but on its relative position; i.e. we consider the differences in the index of the word positions rather than the index itself. the organization of the paper is as follows. after reviewing the statistical approach to ma- chine translation, we first describe the convention- al model (mixture model). we then present our first-order hmm approach in lull detail. finally we present some experimental results and compare our model with the conventional model.finally we present some experimental results and compare our model with the conventional model. in this paper, we address the problem of word alignments for a bilingual corpus. a key issne in modeling the string translation probability pr(j'~le i) is the question of how we define the correspondence b tween the words of the english sentence and the words of the french sentence. the ultimate test of the different alignment and translation models can only be car- ried out in the framework of a fully operational translation system. in this section, we describe two models for word alignrnent in detail: ,. models describ- ing these types of dependencies are referred to as alignment models. on the oth- er hand, large jumps due to different word order- ings in the two languages are successfully modeled. we are presently studying and testing a nmltilevel hmm model that allows only a small number of large jumps. in typical cases, we can assume a sort of pairwise dependence by considering all word pairs (fj, ei) for a given sentence pair i.-/1\[~'j', elqlj' we fur- ther constrain this model by assigning each french word to exactly one english word. a mixture-based alignment model, which was introduced in (brown et al, 1990); ? an hmm-based alignment model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-2183.txt b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-2183.txt new file mode 100644 index 0000000000000000000000000000000000000000..b117e8f7d19944da392a8c9ebc26587205286691 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs/C96-2183.txt @@ -0,0 +1 @@ +structure of the sentence, to identify the <:omponents o be separated out. obviously a parser couhl be used to obtain the complete structure of the sentence. ][owever, hill parsing is slow a+nd i)rone to fa.ilure, especially on <:omph!x sentences. in this l)aper, we consider two alternatives to fu]l parsing which could be use

=1.17 in /usr/local/lib/python3.10/dist-packages (from datasets) (1.25.2)\n","Requirement already satisfied: pyarrow>=12.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (14.0.2)\n","Requirement already satisfied: pyarrow-hotfix in /usr/local/lib/python3.10/dist-packages (from datasets) (0.6)\n","Collecting dill<0.3.9,>=0.3.0 (from datasets)\n"," Downloading dill-0.3.8-py3-none-any.whl (116 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m17.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from datasets) (2.0.3)\n","Collecting requests>=2.32.1 (from datasets)\n"," Downloading requests-2.32.3-py3-none-any.whl (64 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.9/64.9 kB\u001b[0m \u001b[31m10.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: tqdm>=4.62.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (4.66.4)\n","Collecting xxhash (from datasets)\n"," Downloading xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m24.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting multiprocess (from datasets)\n"," Downloading multiprocess-0.70.16-py310-none-any.whl (134 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m18.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: fsspec[http]<=2024.3.1,>=2023.1.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (2023.6.0)\n","Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets) (3.9.5)\n","Requirement already satisfied: huggingface-hub>=0.21.2 in /usr/local/lib/python3.10/dist-packages (from datasets) (0.23.2)\n","Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from datasets) (24.0)\n","Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (6.0.1)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.3.1)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (23.2.0)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.4.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (6.0.5)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.9.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (4.0.3)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub>=0.21.2->datasets) (4.12.1)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (2024.6.2)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2023.4)\n","Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2024.1)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas->datasets) (1.16.0)\n","Installing collected packages: xxhash, requests, dill, multiprocess, datasets\n"," Attempting uninstall: requests\n"," Found existing installation: requests 2.31.0\n"," Uninstalling requests-2.31.0:\n"," Successfully uninstalled requests-2.31.0\n","\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n","google-colab 1.0.0 requires requests==2.31.0, but you have requests 2.32.3 which is incompatible.\u001b[0m\u001b[31m\n","\u001b[0mSuccessfully installed datasets-2.19.2 dill-0.3.8 multiprocess-0.70.16 requests-2.32.3 xxhash-3.4.1\n"]},{"output_type":"display_data","data":{"application/vnd.colab-display-data+json":{"pip_warning":{"packages":["requests"]},"id":"80bef387d17f43f785066f39f43245dc"}},"metadata":{}}],"source":["pip install datasets"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"5QkZgRtym6QX","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1717517072772,"user_tz":-240,"elapsed":6379,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"d469302a-9e59-4841-8b03-0e4e4a945092"},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting rouge_score\n"," Downloading rouge_score-0.1.2.tar.gz (17 kB)\n"," Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: absl-py in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.4.0)\n","Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (from rouge_score) (3.8.1)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.25.2)\n","Requirement already satisfied: six>=1.14.0 in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.16.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (1.4.2)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (2024.5.15)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (4.66.4)\n","Building wheels for collected packages: rouge_score\n"," Building wheel for rouge_score (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24933 sha256=c88639beef646889d9826ebc03998b77688f195fbd6d6d22829bfc470f83a9af\n"," Stored in directory: /root/.cache/pip/wheels/5f/dd/89/461065a73be61a532ff8599a28e9beef17985c9e9c31e541b4\n","Successfully built rouge_score\n","Installing collected packages: rouge_score\n","Successfully installed rouge_score-0.1.2\n"]}],"source":["pip install rouge_score"]},{"cell_type":"code","execution_count":1,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"9srcg82ts73r","outputId":"d74f439d-342a-40bf-e4c0-1f949dcde3fc","executionInfo":{"status":"ok","timestamp":1717985908348,"user_tz":-240,"elapsed":59845,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting accelerate\n"," Downloading accelerate-0.31.0-py3-none-any.whl (309 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m309.4/309.4 kB\u001b[0m \u001b[31m8.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate) (1.25.2)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (24.0)\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate) (5.9.5)\n","Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate) (6.0.1)\n","Requirement already satisfied: torch>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (2.3.0+cu121)\n","Requirement already satisfied: huggingface-hub in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.23.2)\n","Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.4.3)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.14.0)\n","Requirement already satisfied: typing-extensions>=4.8.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (4.12.1)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (1.12.1)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.3)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.1.4)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2023.6.0)\n","Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\n","Collecting nvidia-cuda-runtime-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\n","Collecting nvidia-cuda-cupti-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\n","Collecting nvidia-cudnn-cu12==8.9.2.26 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\n","Collecting nvidia-cublas-cu12==12.1.3.1 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\n","Collecting nvidia-cufft-cu12==11.0.2.54 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\n","Collecting nvidia-curand-cu12==10.3.2.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\n","Collecting nvidia-cusolver-cu12==11.4.5.107 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\n","Collecting nvidia-cusparse-cu12==12.1.0.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\n","Collecting nvidia-nccl-cu12==2.20.5 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl (176.2 MB)\n","Collecting nvidia-nvtx-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\n","Requirement already satisfied: triton==2.3.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2.3.0)\n","Collecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch>=1.10.0->accelerate)\n"," Downloading nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_x86_64.whl (21.3 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.3/21.3 MB\u001b[0m \u001b[31m40.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (2.31.0)\n","Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (4.66.4)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.10.0->accelerate) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2024.6.2)\n","Requirement already satisfied: mpmath<1.4.0,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.10.0->accelerate) (1.3.0)\n","Installing collected packages: nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, accelerate\n","Successfully installed accelerate-0.31.0 nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.20.5 nvidia-nvjitlink-cu12-12.5.40 nvidia-nvtx-cu12-12.1.105\n"]}],"source":["pip install accelerate -U"]},{"cell_type":"code","execution_count":5,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":35},"id":"YIRJ_hpoIAoH","outputId":"37675152-52b6-4041-e70f-1d6d1e3e8d1a","executionInfo":{"status":"ok","timestamp":1717986063225,"user_tz":-240,"elapsed":4831,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["'0.31.0'"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"string"}},"metadata":{},"execution_count":5}],"source":["import accelerate\n","\n","accelerate.__version__"]},{"cell_type":"code","execution_count":6,"metadata":{"id":"NITeHKNnwsOG","executionInfo":{"status":"ok","timestamp":1717986066611,"user_tz":-240,"elapsed":882,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["import torch\n","torch.cuda.empty_cache()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"Vjnu4pNwSbRS","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1717499511356,"user_tz":-240,"elapsed":10420,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"7af869de-9390-4124-b6c6-3b8dd61cf9bc"},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting prettyprint\n"," Downloading prettyprint-0.1.5.tar.gz (2.1 kB)\n"," Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Building wheels for collected packages: prettyprint\n"," Building wheel for prettyprint (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for prettyprint: filename=prettyprint-0.1.5-py3-none-any.whl size=3027 sha256=1a844ebdf0c53a3cc082d7fc0327d7d57c0b250bf96dbf448a78637d1f36628d\n"," Stored in directory: /root/.cache/pip/wheels/b2/d0/51/477413885481c635ab7c6400f96f47b8a0971bbc1241ff9c9f\n","Successfully built prettyprint\n","Installing collected packages: prettyprint\n","Successfully installed prettyprint-0.1.5\n"]}],"source":["!pip install prettyprint\n","import prettyprint as pprint"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"G1H5ew4oUEcD","outputId":"57f3faf0-c62b-4e6b-b365-d71b1f9dad11"},"outputs":[{"name":"stdout","output_type":"stream","text":["Mon May 27 04:30:22 2024 \n","+---------------------------------------------------------------------------------------+\n","| NVIDIA-SMI 535.104.05 Driver Version: 535.104.05 CUDA Version: 12.2 |\n","|-----------------------------------------+----------------------+----------------------+\n","| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\n","| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\n","| | | MIG M. |\n","|=========================================+======================+======================|\n","| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n","| N/A 35C P8 8W / 70W | 0MiB / 15360MiB | 0% Default |\n","| | | N/A |\n","+-----------------------------------------+----------------------+----------------------+\n"," \n","+---------------------------------------------------------------------------------------+\n","| Processes: |\n","| GPU GI CI PID Type Process name GPU Memory |\n","| ID ID Usage |\n","|=======================================================================================|\n","| No running processes found |\n","+---------------------------------------------------------------------------------------+\n"]}],"source":["!nvidia-smi"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"NTval3Qx7ngS","executionInfo":{"status":"ok","timestamp":1717499522136,"user_tz":-240,"elapsed":10789,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"3dcbbd8a-50ca-4bdd-869c-8851a66b45d4"},"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: matplotlib in /usr/local/lib/python3.10/dist-packages (3.7.1)\n","Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (1.2.1)\n","Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (0.12.1)\n","Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (4.52.4)\n","Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (1.4.5)\n","Requirement already satisfied: numpy>=1.20 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (1.25.2)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (24.0)\n","Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (9.4.0)\n","Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (3.1.2)\n","Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib) (2.8.2)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.7->matplotlib) (1.16.0)\n"]}],"source":["pip install matplotlib"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"olP3Wvn-7ngT","executionInfo":{"status":"ok","timestamp":1717499531969,"user_tz":-240,"elapsed":9838,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"4d707167-8111-42ea-fa77-764cf68aef2c"},"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.2.2)\n","Requirement already satisfied: numpy>=1.17.3 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.25.2)\n","Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.11.4)\n","Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.4.2)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0)\n"]}],"source":["pip install scikit-learn"]},{"cell_type":"code","execution_count":9,"metadata":{"id":"7xnIpegCiImk","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1717986945420,"user_tz":-240,"elapsed":127024,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"cd8e08d6-d5c6-42fd-f3c9-9729d4f734d0"},"outputs":[{"output_type":"stream","name":"stdout","text":[" input_text \\\n","0 many natural language processing (nlp) applica... \n","1 conceptual natural language processing typical... \n","2 in this paper, we focus on the problem of dete... \n","3 recent research in empirical (corpus-based) na... \n","4 many statistical or machine-learning approache... \n",".. ... \n","988 machine-readable dictionary (the collins spani... \n","989 current automatic summarizers usually rely on ... \n","990 parsing sentences using statistical informatio... \n","991 we present a new parser for parsing down to pe... \n","992 word sense disambiguation is often cast as a p... \n","\n"," target_text \n","0 noun phrase coreference as clustering this pap... \n","1 an empirical approach to conceptual case frame... \n","2 detecting text similarity over short passages:... \n","3 comparative experiments on disambiguating word... \n","4 unsupervised models for named entity classific... \n",".. ... \n","988 three heads are better than one machine transl... \n","989 sentence reduction for automatic text summariz... \n","990 assigning function tags to parsed text it is g... \n","991 a maximum-entropy-inspired parser we present a... \n","992 a simple approach to building ensembles of nai... \n","\n","[993 rows x 2 columns]\n"]}],"source":["import os\n","import glob\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","from sklearn.model_selection import train_test_split\n","from transformers import LEDTokenizer, LEDForConditionalGeneration, Trainer, TrainingArguments\n","from datasets import Dataset, load_metric\n","\n","# Load data from txt files\n","def load_data(input_dir, target_dir):\n"," data = {'input_text': [], 'target_text': []}\n"," input_files = glob.glob(os.path.join(input_dir, '*.txt'))\n","\n"," for input_file in input_files:\n"," filename = os.path.basename(input_file)\n"," target_file = os.path.join(target_dir,filename)\n","\n"," with open(input_file, 'r',encoding=\"utf8\") as f:\n"," input_text = f.read()\n"," with open(target_file, 'r',encoding=\"utf8\") as f:\n"," target_text = f.read()\n","\n"," data['input_text'].append(input_text)\n"," data['target_text'].append(target_text)\n","\n"," return pd.DataFrame(data)\n","\n","# Define dataset paths\n","input_dir = '/content/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/inputs'\n","target_dir = '/content/drive/MyDrive/RA_Internship/HIPORANK/DATASET_HIPORANK/dataset/targets'\n","\n","# Load data\n","data_df = load_data(input_dir, target_dir)\n","\n","print(data_df)\n","\n","# Split data\n","train_df, temp_df = train_test_split(data_df, test_size=0.2, random_state=42)\n","eval_df, test_df = train_test_split(temp_df, test_size=0.5, random_state=42)\n","\n","# Convert to Dataset\n","train_dataset = Dataset.from_pandas(train_df)\n","eval_dataset = Dataset.from_pandas(eval_df)\n","test_dataset = Dataset.from_pandas(test_df)\n","\n","\n","\n","\n","\n"]},{"cell_type":"code","execution_count":10,"metadata":{"id":"iVclerTAUi95","executionInfo":{"status":"ok","timestamp":1717986958454,"user_tz":-240,"elapsed":704,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["from transformers import AutoTokenizer"]},{"cell_type":"code","execution_count":11,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":336,"referenced_widgets":["adb36cd54327481f8e08d431c1c2be47","60363acdc50545d59fb9e81cf8cc8fa4","a2c8fb676e3a4174801dc0727f48dbf9","c394654fd60c4b0dbda234eb96cb8326","3f8950fff5624985861ae46f180b2f77","f6bb7ef31a854006a131cb363cb2ccc4","033b3dc6b42046e088da8db49ca6be53","0da350cbe5b449df9248f638644b25bd","aa80c5fbb7ae4edcb73913405fe9d178","174bb6a3e93749ea965f55fa2529a66a","5bbaffc2fe6b41898643e54e5efc414a","6feacd7780204b95a0ac9e0960bb7051","d3de27f89000452b94fc080ea4006ac8","9b3b12d72784444cb7b7229b3ece5ae9","f3c9b36c7dcf4aacbe1a8fa2bb34dc1a","e20ff2fa8456469689e89e8934823937","9b229c8dbf834924aec022ac46391126","4692006aa8f84e1584e7a2d7701fd9fe","2d1ba36b6d584990ad30ddc836f303aa","200da62c75b649649f9950ac6ce5a2b6","4fbf0c311ebb4f6f879f4067f1f65e55","0f764bc386ac4bffbfdbad29164d4f1b","4f91be8ff6e046e3800e9372f37707a2","74e1c3d2a5ae41f98df23399bbad3ff2","6f439cd9628b44a2a02bf3a932bc8ce3","411731b5a4c9432499d4d1d6ae8afa1a","6c167d5777c6402ba48100e0783fabc2","2f9336479e394c2581ed76af1ad22523","b82473f9dcc74ebca50936d8969808fa","ac1cf21d6b8c412ab8ba2c3a854080d1","d35c7a932715443cb712acc0d0475c60","889cb440760b4f26b521f659663aff49","140cd7b096b24165afa37b610c360be5","525cd158dae8408d861fcf29daa58409","88998570b9bd4f8b9a257785ae37fb34","4c2d8b62e0944cac8eeaf16fd69a3a4d","4c3a621f4ce540a8b5e42364e1224257","f7adb4e02235409b8e8f1bc9945c498d","f049fc09d6c241c7a4b2c0f1f4767ea0","b27c653f9bfc4fb8a280b0d2b0804c53","dfa41b69a5034879b0116e921befa879","54512cf0b90e4a6d845b3871ab097eab","96ee1082136341d690d6d2fbbbdb5764","641b003d8ae147d398199eb882e81f5f","0400e3e1fb784b34a3daa304c86264e7","2da5ccea478143fc95c47645f4dffeb4","b8cec55e5f104516a99818274ea06e91","e89efbee5da041e3b6f4e9139bd87bb5","870fd5a9a65b45119a86950bce9b14a9","27b351d742354cc7af3eefca1f69dbd9","f05fba1b782b451b9380add982882dc1","8a01297ecef44fe19e350148d592c69b","90bd0543ab5c4bcd8e13f1eb190350be","212af8fd13834b13ba97ebced3f6409a","db6aa38d0ba94e3dae56367cc71ff967"]},"id":"n3omQKJ4UvC5","outputId":"8da30be7-d89d-4892-8e86-114a513f0be3","executionInfo":{"status":"ok","timestamp":1717986965849,"user_tz":-240,"elapsed":5653,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_token.py:89: UserWarning: \n","The secret `HF_TOKEN` does not exist in your Colab secrets.\n","To authenticate with the Hugging Face Hub, create a token in your settings tab (https://huggingface.co/settings/tokens), set it as secret in your Google Colab and restart your session.\n","You will be able to reuse this secret in all of your notebooks.\n","Please note that authentication is recommended but still optional to access public models or datasets.\n"," warnings.warn(\n"]},{"output_type":"display_data","data":{"text/plain":["tokenizer_config.json: 0%| | 0.00/27.0 [00:00 1\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mprettyprint\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mpprint\u001b[39;00m\n","\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'prettyprint'"]}],"source":["import prettyprint as pprint"]},{"cell_type":"code","execution_count":16,"metadata":{"id":"7WlGFTdri6ie","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1717987022748,"user_tz":-240,"elapsed":809,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"89c7ea5a-3173-4576-b501-b741c078f699"},"outputs":[{"output_type":"stream","name":"stdout","text":["INPUT: when analyzing text, automatically detecting emotions such as joy, sadness, fear, anger, and surprise is useful for a number of purposes, including identifying blogs that express specific emotions towards the topic of interest, identifying what emotion a newspaper headline is trying to evoke, and devising automatic dialogue systems that respond appropriately to different emotional states of the user. often different emotions are expressed through different words. for example, delightful and yummy indicate the emotion of joy, gloomy and cry are indicative of sadness, 26 shout and boiling are indicative of anger, and so on. therefore an emotion lexicon—a list of emotions and words that are indicative of each emotion—is likely to be useful in identifying emotions in text. words may evoke different emotions in different contexts, and the emotion evoked by a phrase or a sentence is not simply the sum of emotions conveyed by the words in it, but the emotion lexicon will be a useful component for any sophisticated emotion detecting algorithm. the lexicon will also be useful for evaluating automatic methods that identify the emotions evoked by a word. such algorithms may then be used to automatically generate emotion lexicons in languages where no such lexicons exist. as of now, high-quality high-coverage emotion lexicons do not exist for any language, although there are a few limited-coverage lexicons for a handful of languages, for example, the wordnet affect lexicon (wal) (strapparava and valitutti, 2004) for six basic emotions and the general inquirer (gi) (stone et al., 1966), which categorizes words into a number of categories, including positive and negative semantic orientation. amazon has an online service called mechanical turk that can be used to obtain a large amount of human annotation in an efficient and inexpensive manner (snow et al., 2008; callison-burch, 2009).1 however, one must define the task carefully to obtain annotations of high quality. several checks must be placed to ensure that random and erroneous annotations are discouraged, rejected, and re-annotated. in this paper, we show how we compiled a moderate-sized english emotion lexicon by manual annotation through amazon’s mechanical turk service. this dataset, which we will call emolez, is many times as large as the only other known emotion lexicon, wordnet affect lexicon. more importantly, the terms in this lexicon are carefully chosen to include some of the most frequent nouns, verbs, adjectives, and adverbs. beyond unigrams, it has a large number of commonly used bigrams. we also include some words from the general inquirer and some from wordnet affect lexicon, to allow comparison of annotations between the various resources. we perform an extensive analysis of the annotations to answer several questions that have not been properly addressed so far. for instance, how hard is it for humans to annotate words with the emotions they evoke? what percentage of commonly used terms, in each part of speech, evoke an emotion? are emotions more commonly evoked by nouns, verbs, adjectives, or adverbs? is there a correlation between the semantic orientation of a word and the emotion it evokes? which emotions tend to go together; that is, which emotions are evoked simultaneously by the same term? this work is intended to be a pilot study before we create a much larger emotion lexicon with tens of thousands of terms. we focus on the emotions of joy, sadness, anger, fear, trust, disgust, surprise, and anticipation— argued by many to be the basic and prototypical emotions (plutchik, 1980). complex emotions can be viewed as combinations of these basic emotions.complex emotions can be viewed as combinations of these basic emotions. this research was funded by the national research council canada (nrc). and last but not least, thanks to the more than 1000 anonymous people who answered the emotion survey with diligence and care. often different emotions are expressed through different words. we focus on the emotions of joy, sadness, anger, fear, trust, disgust, surprise, and anticipation— argued by many to be the basic and prototypical emotions (plutchik, 1980). when analyzing text, automatically detecting emotions such as joy, sadness, fear, anger, and surprise is useful for a number of purposes, including identifying blogs that express specific emotions towards the topic of interest, identifying what emotion a newspaper headline is trying to evoke, and devising automatic dialogue systems that respond appropriately to different emotional states of the user. this work is intended to be a pilot study before we create a much larger emotion lexicon with tens of thousands of terms. we will explore the variance in emotion evoked by near-synonyms, and also how common it is for words with many meanings to evoke different emotions in different senses. for example, delightful and yummy indicate the emotion of joy, gloomy and cry are indicative of sadness, 26 shout and boiling are indicative of anger, and so on.\n","TARGET: emotions evoked by common words and phrases: using mechanical turk to create an emotion lexicon even though considerable attention has been given to semantic orientation of words and the creation of large polarity lexicons, research in emotion analysis has had to rely on limited and small emotion lexicons. in this paper, we show how we create a high-quality, moderate-sized emotion lexicon using mechanical turk. in addition to questions about emotions evoked by terms, we show how the inclusion of a word choice question can discourage malicious data entry, help identify instances where the annotator may not be familiar with the target term (allowing us to reject such annotations), and help obtain annotations at sense level (rather than at word level). we perform an extensive analysis of the annotations to better understand the distribution of emotions evoked by terms of different parts of speech. we identify which emotions tend to be evoked simultaneously by the same term and show that certain emotions indeed go hand in hand. we focus on emotion evoked by common words and phrases. we explore the use of mechanical turk to build the lexicon based on human judgment. we create a crowd sourced term emotion association lexicon consisting of associations of over 10,000 word-sense pairs with eight emotions joy, sadness, anger, fear, trust, disgust, surprise, and anticipation argued to be the basic and prototypical emotions.\n","hello\n"]}],"source":["print(\"INPUT: \",train_dataset['input_text'][146])\n","print(\"TARGET: \",train_dataset['target_text'][146])\n","print(\"hello\")"]},{"cell_type":"code","execution_count":17,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":49,"referenced_widgets":["45b55efbee6e4f62a5de7071bbc043aa","d10651e9391a410ea111ae794e5f268b","5128448cf6584ba7bfb672365b269db8","e8eb60da94bb434681b7c3975716bf97","c31a36e8c2f14d5da3c757ead4e86d0d","37b8f83071ba431a90ae7227e9d41418","83d0e6772b0a4dabb69d41aa6d1e94b4","d1023711b801470b8811dde0b6362f64","fa32a29288634de28824e5533f47a338","627a217165c348b8bf6029d3da971956","59ff0398cb5a4b839e187d41bbd20207"]},"id":"OTR1wU63WBOr","outputId":"809b9b16-9e01-4f72-af54-f30b60cb6333","executionInfo":{"status":"ok","timestamp":1717987028282,"user_tz":-240,"elapsed":2088,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"display_data","data":{"text/plain":["Map: 0%| | 0/99 [00:00=1.14.0 in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.16.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (1.4.2)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (2024.5.15)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (4.66.4)\n","Building wheels for collected packages: rouge_score\n"," Building wheel for rouge_score (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24933 sha256=33f623cb03c94c81d403f7425dedad596efad637e9caa9ffa3111bf225e96a26\n"," Stored in directory: /root/.cache/pip/wheels/5f/dd/89/461065a73be61a532ff8599a28e9beef17985c9e9c31e541b4\n","Successfully built rouge_score\n","Installing collected packages: rouge_score\n","Successfully installed rouge_score-0.1.2\n"]},{"output_type":"stream","name":"stderr","text":[":2: FutureWarning: load_metric is deprecated and will be removed in the next major version of datasets. Use 'evaluate.load' instead, from the new library 🤗 Evaluate: https://huggingface.co/docs/evaluate\n"," rouge = load_metric(\"rouge\")\n","/usr/local/lib/python3.10/dist-packages/datasets/load.py:759: FutureWarning: The repository for rouge contains custom code which must be executed to correctly load the metric. You can inspect the repository content at https://raw.githubusercontent.com/huggingface/datasets/2.19.2/metrics/rouge/rouge.py\n","You can avoid this message in future by passing the argument `trust_remote_code=True`.\n","Passing `trust_remote_code=True` will be mandatory to load this metric from the next major release of `datasets`.\n"," warnings.warn(\n"]},{"output_type":"display_data","data":{"text/plain":["Downloading builder script: 0%| | 0.00/2.17k [00:00=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate) (1.25.2)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (24.0)\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate) (5.9.5)\n","Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate) (6.0.1)\n","Requirement already satisfied: torch>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (2.3.0+cu121)\n","Requirement already satisfied: huggingface-hub in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.23.2)\n","Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.4.3)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.14.0)\n","Requirement already satisfied: typing-extensions>=4.8.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (4.12.0)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (1.12.1)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.3)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.1.4)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2023.6.0)\n","Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\n","Collecting nvidia-cuda-runtime-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\n","Collecting nvidia-cuda-cupti-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\n","Collecting nvidia-cudnn-cu12==8.9.2.26 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\n","Collecting nvidia-cublas-cu12==12.1.3.1 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\n","Collecting nvidia-cufft-cu12==11.0.2.54 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\n","Collecting nvidia-curand-cu12==10.3.2.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\n","Collecting nvidia-cusolver-cu12==11.4.5.107 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\n","Collecting nvidia-cusparse-cu12==12.1.0.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\n","Collecting nvidia-nccl-cu12==2.20.5 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl (176.2 MB)\n","Collecting nvidia-nvtx-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\n","Requirement already satisfied: triton==2.3.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2.3.0)\n","Collecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch>=1.10.0->accelerate)\n"," Downloading nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_x86_64.whl (21.3 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.3/21.3 MB\u001b[0m \u001b[31m35.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (2.31.0)\n","Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (4.66.4)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.10.0->accelerate) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2024.2.2)\n","Requirement already satisfied: mpmath<1.4.0,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.10.0->accelerate) (1.3.0)\n","Installing collected packages: nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, accelerate\n","Successfully installed accelerate-0.30.1 nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.20.5 nvidia-nvjitlink-cu12-12.5.40 nvidia-nvtx-cu12-12.1.105\n"]}],"source":["!pip install accelerate -U"]},{"cell_type":"code","execution_count":23,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"h6UGccgsXO3S","outputId":"488a866e-aec6-47cb-cf6e-ef5c8e5a7bb2","executionInfo":{"status":"ok","timestamp":1717987090841,"user_tz":-240,"elapsed":10,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/transformers/training_args.py:1474: FutureWarning: `evaluation_strategy` is deprecated and will be removed in version 4.46 of 🤗 Transformers. Use `eval_strategy` instead\n"," warnings.warn(\n"]}],"source":["# enable fp16 apex training\n","training_args = Seq2SeqTrainingArguments(\n"," predict_with_generate=True,\n"," evaluation_strategy=\"steps\",\n"," per_device_train_batch_size=batch_size,\n"," per_device_eval_batch_size=batch_size,\n"," fp16=True,\n"," output_dir=\"./\",\n"," logging_steps=5,\n"," eval_steps=10,\n"," save_steps=10,\n"," save_total_limit=2,\n"," gradient_accumulation_steps=4,\n"," num_train_epochs=5,\n",")"]},{"cell_type":"code","execution_count":24,"metadata":{"id":"SM9e_n8xY6KE","executionInfo":{"status":"ok","timestamp":1717987098614,"user_tz":-240,"elapsed":702,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["trainer = Seq2SeqTrainer(\n"," model=led,\n"," tokenizer=tokenizer,\n"," args=training_args,\n"," #compute_metrics=compute_metrics,\n"," train_dataset=train_dataset,\n"," eval_dataset=eval_dataset,\n",")"]},{"cell_type":"code","execution_count":25,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":1000},"id":"kLjtjhv4ZNCP","outputId":"83650b17-15b0-4bb3-c993-c0a96a149003","executionInfo":{"status":"ok","timestamp":1717989727969,"user_tz":-240,"elapsed":2616151,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"display_data","data":{"text/plain":[""],"text/html":["\n","
\n"," \n"," \n"," [495/495 43:30, Epoch 4/5]\n","
\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
StepTraining LossValidation Loss
102.8616002.892442
202.8923002.818325
302.9791002.763944
402.9044002.727571
502.4280002.716155
602.9009002.694287
702.9211002.668237
802.7291002.652847
902.6494002.652525
1002.7393002.635740
1102.3916002.638418
1202.4493002.626249
1302.4752002.601421
1402.1968002.606759
1502.5380002.597970
1602.4522002.595938
1702.4397002.601662
1802.4763002.583722
1901.9990002.574931
2002.0956002.569558
2102.1285002.609941
2202.1804002.593116
2302.0031002.591318
2402.0940002.587461
2502.2214002.563926
2602.0745002.572323
2702.3377002.574980
2801.9967002.570967
2902.1091002.569414
3002.0384002.560555
3101.9828002.597120
3202.1608002.585729
3301.9558002.579349
3402.0719002.576947
3501.8055002.580410
3602.0445002.575760
3702.0795002.592396
3802.0730002.574471
3902.0314002.569654
4002.0928002.573113
4101.9158002.594249
4202.0540002.584558
4301.8497002.596341
4401.8353002.594334
4501.9786002.589064
4601.9003002.591411
4701.9248002.587555
4802.1843002.587325
4901.9193002.587291

"]},"metadata":{}},{"output_type":"stream","name":"stderr","text":["Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n"]},{"output_type":"execute_result","data":{"text/plain":["TrainOutput(global_step=495, training_loss=2.2669311041783806, metrics={'train_runtime': 2615.821, 'train_samples_per_second': 1.518, 'train_steps_per_second': 0.189, 'total_flos': 2.138562229174272e+16, 'train_loss': 2.2669311041783806, 'epoch': 4.987405541561713})"]},"metadata":{},"execution_count":25}],"source":["trainer.train()"]},{"cell_type":"code","execution_count":26,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":734},"id":"7q8GUp1cQDiW","outputId":"539f16d2-ebf1-4d70-b07f-3f428ce37038","executionInfo":{"status":"ok","timestamp":1717989737782,"user_tz":-240,"elapsed":1542,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"display_data","data":{"text/plain":["

"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAcoAAADvCAYAAAByipTtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABStUlEQVR4nO2dd1xT1/vHPwkb2SjLAQ4coOJEcbcKbsWttVU7tLXYaoet1qqotbZYu7T6rR1q25911lm1IhZx7z1w42IpskQgkPP74/TmJpBckjAS4Hm/Xnnddc7NuYeQT57nPOc5MsYYA0EQBEEQWpGbugEEQRAEYc6QUBIEQRCEBCSUBEEQBCEBCSVBEARBSEBCSRAEQRASkFASBEEQhAQklARBEAQhAQklQRAEQUhAQkkQBEEQEpBQEoSRTJgwAX5+fqZuBkEQ5QwJJVHlkMlker1iY2NN3VQNYmNjIZPJsGnTJlM3xeQcOnQIffv2Re3atWFra4t69eph4MCBWLt2rapMTk4OIiMjze7vSFQ9LE3dAIIoa37//XeN499++w3R0dHFzjdr1qxU7/PTTz9BqVSW6h5EcTZu3IhRo0ahVatWmDp1KlxdXXHnzh3ExcXhp59+wksvvQSAC+W8efMAAD169DBhi4mqDgklUeV4+eWXNY6PHTuG6OjoYueLkpOTA3t7e73fx8rKyqj2EdJERkYiICAAx44dg7W1tca1lJQUE7WKqM6Q65WolvTo0QPNmzfH6dOn0a1bN9jb2+OTTz4BAGzbtg39+/eHj48PbGxs0LBhQyxYsACFhYUa9yg6Rnn37l3IZDJ89dVXWLlyJRo2bAgbGxu0b98eJ0+eLLO23759GyNGjICbmxvs7e3RsWNH/P3338XKLV26FIGBgbC3t4erqyvatWun4brMysrCtGnT4OfnBxsbG3h4eCA0NBRnzpzR+d6bNm2CTCbDgQMHil378ccfIZPJcOnSJQBAUlISXn31VdSpUwc2Njbw9vbG4MGDcffuXcnnu3XrFtq3b19MJAHAw8MDAO/rWrVqAQDmzZuncqdHRkaqyl67dg3Dhw+Hm5sbbG1t0a5dO2zfvl3jfqtXr4ZMJkNcXBzefPNNuLu7w8nJCePGjcPTp08l20lUH8iiJKotT548Qd++fTF69Gi8/PLL8PT0BMC/PB0cHPD+++/DwcEB+/fvx5w5c5CZmYnFixeXeN+1a9ciKysLb775JmQyGaKiojB06FDcvn271FZocnIyOnXqhJycHLz77rtwd3fHmjVrMGjQIGzatAlDhgwBwN3C7777LoYPH46pU6ciNzcXFy5cwPHjx1Wuy7feegubNm3ClClTEBAQgCdPnuDQoUO4evUq2rRpo/X9+/fvDwcHB2zYsAHdu3fXuLZ+/XoEBgaiefPmAIBhw4bh8uXLeOedd+Dn54eUlBRER0fj3r17kkFQvr6+iImJwYMHD1CnTh2tZWrVqoUVK1Zg8uTJGDJkCIYOHQoAaNmyJQDg8uXL6Ny5M2rXro0ZM2agRo0a2LBhA8LDw7F582ZVPwlMmTIFLi4uiIyMRHx8PFasWIGEhATVuDFRzWEEUcWJiIhgRT/q3bt3ZwDY//73v2Llc3Jyip178803mb29PcvNzVWdGz9+PPP19VUd37lzhwFg7u7uLC0tTXV+27ZtDADbsWOHZDv//fdfBoBt3LhRZ5lp06YxAOzgwYOqc1lZWax+/frMz8+PFRYWMsYYGzx4MAsMDJR8P2dnZxYRESFZRhtjxoxhHh4erKCgQHUuMTGRyeVyNn/+fMYYY0+fPmUA2OLFiw2+/y+//MIAMGtra/bCCy+w2bNns4MHD6qeTSA1NZUBYHPnzi12j549e7IWLVpo/L2USiXr1KkT8/f3V51btWoVA8Datm3L8vPzVeejoqIYALZt2zaD209UPcj1SlRbbGxs8OqrrxY7b2dnp9rPysrC48eP0bVrV+Tk5ODatWsl3nfUqFFwdXVVHXft2hUAd5mWll27diE4OBhdunRRnXNwcMCkSZNw9+5dXLlyBQDg4uKCBw8eSLp8XVxccPz4cTx69MigNowaNQopKSka0aabNm2CUqnEqFGjAPA+tLa2RmxsrMEuzNdeew179uxBjx49cOjQISxYsABdu3aFv78/jhw5UmL9tLQ07N+/HyNHjlT9/R4/fownT56gd+/euHHjBh4+fKhRZ9KkSRrW/uTJk2FpaYldu3YZ1HaiakJCSVRbateurXUc7PLlyxgyZAicnZ3h5OSEWrVqqQKBMjIySrxvvXr1NI4F0SyLMa+EhAQ0adKk2HkhgjchIQEA8PHHH8PBwQHBwcHw9/dHREQEDh8+rFEnKioKly5dQt26dREcHIzIyEi9xLxPnz5wdnbG+vXrVefWr1+PVq1aoXHjxgD4j5Avv/wSu3fvhqenJ7p164aoqCgkJSXp9Zy9e/fGP//8g/T0dMTFxSEiIgIJCQkYMGBAiQE9N2/eBGMMs2fPRq1atTRec+fOBVA8KMjf31/j2MHBAd7e3iWOpxLVAxJKotqibjkKpKeno3v37jh//jzmz5+PHTt2IDo6Gl9++SUA6DUdxMLCQut5xljpGmwAzZo1Q3x8PNatW4cuXbpg8+bN6NKli0ooAGDkyJG4ffs2li5dCh8fHyxevBiBgYHYvXu35L1tbGwQHh6OLVu2oKCgAA8fPsThw4dV1qTAtGnTcP36dSxatAi2traYPXs2mjVrhrNnz+r9HPb29ujatSuWLVuGTz/9FE+fPi2xfcLf6MMPP0R0dLTWV6NGjfRuA0FQMA9BqBEbG4snT57gr7/+Qrdu3VTn79y5Y8JWifj6+iI+Pr7YecEl7OvrqzpXo0YNjBo1CqNGjUJ+fj6GDh2KhQsXYubMmbC1tQUAeHt74+2338bbb7+NlJQUtGnTBgsXLkTfvn0l2zFq1CisWbMGMTExuHr1KhhjxYQSABo2bIgPPvgAH3zwAW7cuIFWrVphyZIl+OOPPwx+9nbt2gEAEhMTAUBnkE2DBg0A8Ok7vXr10uveN27cwAsvvKA6zs7ORmJiIvr162dwO4mqB1mUBKGGYA2qW3/5+flYvny5qZqkQb9+/XDixAkcPXpUde7Zs2dYuXIl/Pz8EBAQAIBH9KpjbW2NgIAAMMagUChQWFhYzI3s4eEBHx8f5OXlldiOXr16wc3NDevXr8f69esRHByM+vXrq67n5OQgNzdXo07Dhg3h6OhY4v1jYmK0nhfGCwXXszDnNT09vdhz9OjRAz/++KNKVNVJTU0tdm7lypVQKBSq4xUrVqCgoKDEHwxE9YAsSoJQo1OnTnB1dcX48ePx7rvvQiaT4ffff69Qt+nmzZu1Bg2NHz8eM2bMwJ9//om+ffvi3XffhZubG9asWYM7d+5g8+bNkMv5b9+wsDB4eXmhc+fO8PT0xNWrV7Fs2TL0798fjo6OSE9PR506dTB8+HAEBQXBwcEB+/btw8mTJ7FkyZIS22hlZYWhQ4di3bp1ePbsGb766iuN69evX0fPnj0xcuRIBAQEwNLSElu2bEFycjJGjx4tee/Bgwejfv36GDhwIBo2bIhnz55h37592LFjB9q3b4+BAwcC4K7zgIAArF+/Ho0bN4abmxuaN2+O5s2b44cffkCXLl3QokULTJw4EQ0aNEBycjKOHj2KBw8e4Pz58xrvmZ+fr2pvfHw8li9fji5dumDQoEEl9gVRDTBlyC1BVAS6pofomj5x+PBh1rFjR2ZnZ8d8fHzYRx99xP755x8GgP3777+qcrqmh2ibEgEd0xjUEaaH6HoJU0Ju3brFhg8fzlxcXJitrS0LDg5mO3fu1LjXjz/+yLp168bc3d2ZjY0Na9iwIZs+fTrLyMhgjDGWl5fHpk+fzoKCgpijoyOrUaMGCwoKYsuXL5dsozrR0dEMAJPJZOz+/fsa1x4/fswiIiJY06ZNWY0aNZizszPr0KED27BhQ4n3/fPPP9no0aNZw4YNmZ2dHbO1tWUBAQFs1qxZLDMzU6PskSNHWNu2bZm1tXWxPr516xYbN24c8/LyYlZWVqx27dpswIABbNOmTaoywvSQAwcOsEmTJjFXV1fm4ODAxo4dy548eaJ3XxBVGxljFfhTmSAIwoxYvXo1Xn31VZw8eVI1BkoQRaExSoIgCIKQgISSIAiCICQgoSQIgiAICWiMkiAIgiAkIIuSIAiCICQgoSQIgiAICapdwgGlUolHjx7B0dGR1pkjCIKoxjDGkJWVBR8fH1WyDm1UO6F89OgR6tata+pmEARBEGbC/fv3dS4SDlRDoXR0dATAO8bJyUnvegqFAnv37kVYWFipV6mvilD/SEP9Iw31jzTUP9IY2z+ZmZmoW7euShd0Ue2EUnC3Ojk5GSyU9vb2cHJyog+qFqh/pKH+kYb6RxrqH2lK2z8lDcNRMA9BEARBSEBCSRAEQRASVDvXa1mQkgIcPw7Y2ABhYaZuDUEQBFGekFAawfHjwKBBQPv2JJQEUdUoLCzUWMTZHFAoFLC0tERubi4KCwtN3RyzQ1f/WFhYwNLSstRTAUkojcDDg29TUkzbDoIgypbs7Gw8ePCgQhfq1gfGGLy8vHD//n2a/60Fqf6xt7eHt7c3rK2tjb4/CaUR1KrFtySUBFF1KCwsxIMHD2Bvb49atWqZlSAplUpkZ2fDwcFBcmJ8dUVb/zDGkJ+fj9TUVNy5cwf+/v5G9x0JpREIFuXz58CzZ0CNGqZtD0EQpUehUIAxhlq1asHOzs7UzdFAqVQiPz8ftra2JJRa0NU/dnZ2sLKyQkJCguq6MVCPG0GNGoDwf0RWJUFULczJkiRKT1n8sCChNAKZjMYpCYIgqgsklEYijFOmppq2HQRBEET5QkJpJGRREgRRVfHz88O3335r6maYDSSURkJCSRCEqZHJZJKvyMhIo+578uRJTJo0qVRt69GjB6ZNm1aqe5gLFPVqJOR6JQjC1CQmJqr2169fjzlz5iA+Pl51zsHBQbXPGENhYSEsLUv+2q8lfMERAMiiNBqyKAmiasMYn/5lipe++Q68vLxUL2dnZ8hkMtXxtWvX4OjoiN27d6Nt27awsbHBoUOHcOvWLQwePBienp5wcHBA+/btsW/fPo37FnW9ymQy/PzzzxgyZAjs7e3h7++P7du3l6p/N2/ejMDAQNjY2MDPzw9LlizRuL58+XL4+/vD1tYWnp6eGD58uOrapk2b0KJFC9jZ2cHd3R1hYWF49uxZqdojBVmURkJCSRBVm5wcQM0gq1Cys8tufvaMGTPw1VdfoUGDBnB1dcX9+/fRr18/LFy4EDY2Nvjtt98wcOBAxMfHo169ejrvM2/ePERFRWHx4sVYunQpxo4di4SEBLi5uRncptOnT2PkyJGIjIzEqFGjcOTIEbz99ttwd3fHhAkTcOrUKbz77rv4/fff0alTJ6SlpeHgwYMAuBU9ZswYREVFYciQIcjKykJcXFy5ZlMioTQScr0SBFEZmD9/PkJDQ1XHbm5uCAoKUh0vWLAAW7Zswfbt2zFlyhSd95kwYQLGjBkDAPj888/x/fff48SJE+jTp4/Bbfr666/Rs2dPzJ49GwDQuHFjXLlyBYsXL8aECRNw79491KhRAwMGDICjoyN8fX3RunVrAFwoCwoKMHToUPj6+gIAAgMDkZmZaXA79IWE0kjIoiSIqo29PbfsTPXeZUW7du00jrOzsxEZGYm///5bJTrPnz/HvXv3JO/TsmVL1X6NGjXg5OSEFCO/AK9evYrBgwdrnOvcuTO+/fZbFBYWIjQ0FL6+vmjQoAH69OmDPn36qNy+QUFB6NmzJ1q0aIHevXsjLCwMQ4cOhYWFhVFt0QcaozQSdaE0s/zJBEGUATIZd3+a4lWWyYFqFPHhfvjhh9iyZQs+//xzHDx4EOfOnUOLFi2Qn58veR8rK6si/SODUqksu4aq4ejoiDNnzuDPP/+Et7c35syZg6CgIKSnp8PCwgLR0dHYvXs3AgICsHTpUjRr1gwJCQnl0haAhNJoBNerQgFkZJi2LQRBEPpy+PBhTJgwAUOGDEGLFi3g5eWFu3fvVmgbmjVrhsOHDxdrV+PGjVWWoaWlJXr16oWoqChcuHABd+/exf79+wFwke7cuTPmzZuHs2fPwtraGjt37iy39pLr1UhsbQFHRyAri49TuriYukUEQRAl4+/vj7/++gsDBw6ETCbD7Nmzy80yTE1Nxblz5zTOeXt744MPPkD79u2xYMECjBo1CkePHsWyZcuwfPlyAMDOnTtx+/ZtdOvWDa6urti1axeUSiWaNGmC48ePIyYmBmFhYfDw8MDx48eRmpqKxo0bl8szACSUpcLDgwtlSgrg72/q1hAEQZTM119/jddeew2dOnVCzZo18fHHH5dbIMzatWuxdu1ajXMLFizAp59+ig0bNmDOnDlYsGABvL29MX/+fEyYMAEA4OLigr/++guRkZHIzc2Fv78//vzzTwQGBuLq1auIi4vDt99+i8zMTPj6+uKrr77SCFgqa0goS4GHB3DrFgX0EARheiZMmKASGoBnxtE2ZcLPz0/lwhSIiIjQOC7qitV2n/T0dMn2xMbGSl4fNmwYhg0bpvValy5ddNZv1qwZ9uzZo3FOqVSWa9QrjVGWApoiQhAEUfUhoSwFNEWEIAii6kNCWQpIKAmCIKo+JJSlgFyvBEEQVR8SylJAFiVBVD3KM2coUfGUxd+ThLIUkFASRNVBmOheUoYaonKRk5MDoHhmIUOg6SGlgFyvBFF1sLS0hL29PVJTU2FlZQW53HzsCKVSifz8fOTm5ppVu8wFbf3DGENOTg5SUlLg4uJSqlywJJSlQLAoU1MBpRKgzy9BVF5kMhm8vb1x586dcs0bagyMMTx//hx2dnaQlWUi2CqCVP+4uLjAy8urVPcnoSwFNWvyrVIJpKWJxwRBVE6sra3h7+9vdu5XhUKBuLg4dOvWrVQuxKqKrv6xsrIqk1VFTCqUK1aswIoVK1RZIAIDAzFnzhz07dtXZ52NGzdi9uzZuHv3Lvz9/fHll1+iX79+FdRiTaysAFdX4OlTPk5JQkkQlR+5XA5bW1tTN0MDCwsLFBQUwNbWloRSC+XdPyZ1FtapUwdffPEFTp8+jVOnTuHFF1/E4MGDcfnyZa3ljxw5gjFjxuD111/H2bNnER4ejvDwcFy6dKmCWy6i7n4lCIIgqh4mFcqBAweiX79+8Pf3R+PGjbFw4UI4ODjg2LFjWst/99136NOnD6ZPn45mzZphwYIFaNOmDZYtW1bBLRehyFeCIIiqjdmMURYWFmLjxo149uwZQkJCtJY5evQo3n//fY1zvXv3xtatW3XeNy8vD3l5eapjIXGuQqGAQqHQu31C2aJ13N0tAMiRmFgIhaJ8lqqpDOjqH4JD/SMN9Y801D/SGNs/+pY3uVBevHgRISEhyM3NhYODA7Zs2YKAgACtZZOSkuDp6alxztPTE0lJSTrvv2jRIsybN6/Y+b1798Le3t7g9kZHR2sc5+a2BFAfR47cgK9vvEH3UijkiI72RUDAY/j5ZRncFnOkaP8QmlD/SEP9Iw31jzSG9o8wx7IkTC6UTZo0wblz55CRkYFNmzZh/PjxOHDggE6xNJSZM2dqWKGZmZmoW7cuwsLC4OTkpPd9FAoFoqOjERoaqjFYfPKkHHv2AC4u/ujXr6He9ysoAMaMscC2bXJ07KhEXFyh3nXNEV39Q3Cof6Sh/pGG+kcaY/tH36W5TC6U1tbWaNSoEQCgbdu2OHnyJL777jv8+OOPxcp6eXkhOTlZ41xycrLkHBkbGxvY2NgUO29lZWXUB65oPW9vvn382AJWVvqFISuVwBtvANu28eMLF+SwsJBXiXmYxvZrdYH6RxrqH2mof6QxtH/0LWt2X81KpVJjTFGdkJAQxMTEaJyLjo7WOaZZERianYcxYNo04LffAAsLwNISyMkBiqyTShAEQZgJJhXKmTNnIi4uDnfv3sXFixcxc+ZMxMbGYuzYsQCAcePGYebMmaryU6dOxZ49e7BkyRJcu3YNkZGROHXqFKZMmWKqRzAo6pUxYNYsYOlSQCYD1qwBAgP5NR0zYgiCIAgTY1KhTElJwbhx49CkSRP07NkTJ0+exD///IPQ0FAAwL1795CYmKgq36lTJ6xduxYrV65EUFAQNm3ahK1bt6J58+amegS9hVKpBN55B1i0iB8vWwaMHSsKZUVNBY2PB0aOBK5fr5j3IwiCqOyYdIzyl19+kbweGxtb7NyIESMwYsSIcmqR4Qiu17Q0HqBjqaVH8/OBCROAP//kluSyZcDbb/NrgsZrsyjPnQNq1AD8/Q1rE2NAYaH2tsydC2zcyNv9ww+G3ZcgCKI6YnZjlJUNNzcxGfrjx8Wv5+QAgwdzkbS0BNauFUUS0G1RPnoEhIQAL77Ihc8QZs4EHB2Bs2c1zxcUAHv38n2yKAmCIPSDhLKUWFiIOV61uV9nzwb27AHs7YEdO4DRozWvC0J57Rq3AgViY4HcXODBA+DJE/3bwxiwahWvWzRw+PhxnpcWIKEkCILQFxLKMkBX5OvDh6J7c906oE+f4nXr1wfs7IC8PODWLfH8wYPi/r17+rclPl4U7L/+4lakwK5dmvd8/lz/+xIEQVRXSCjLACGg58EDzfMLF3IB7NoVGDBAe125HBByK6i7X40VygMHxP3UVCAuTjzevVuz7M2b+t+XIAiiukJCWQa0bMm3n34KCPkQ7twBfv6Z73/2GQ/i0UXRgJ4nTzSDe4wRSmtrvt24kW8fPeJjljIZ0PC/BELkfiUIgigZEsoyYN48oEkTblEOG8ajXOfPBxQKIDQU6NZNun7RuZSHD2te11coGROF8oMP+Pavv/jY5549/Lh9e6BTJ74fb1hqWg1yc4F9+zTHVQmCIKoiJJRlgLMzT0fn7MxFbsQInnkHABYsKLl+0chXwe0qWIX6CuXNm9xytLbmka9ubny8Mi5OHJ/s2xdo3Jjvl8aiXLiQ/wiYM8f4exAEQVQGSCjLiCZNxHmS27fzBAMDBwIdOpRcV3C9xsdza/TQIX7crx/f6iuUgjXZoQOfHhIezo/XrgWEpPr9+pWNUArC+/33fA4pQRBEVYWEsgzp2xf44gvxeP58/erVrcuFraAAuHABOHWKn/8vk5/BQtmjB98KeRlWrQIyM3l0brt2pRfKjAyeDAEAsrN5AgWCIIiqCgllGTN9OvDNN8Dq1UCrVvrVkcnEyNdff+WCWbs2j5YFgMREHj0rhfr4ZPfufNuzJ+DqKo4j9unDo2yFTD9Pnhg2R1PgyBFuMVv8t1jKd99xwSQIgqiKkFCWMTIZXx1k/HjD6gnu199/59uuXfm0E2GFsIcPpevfuQPcvw9YWfGMPgDfF9yvALd4AZ4Wr04dvn/jhmHtBMQx1LFjgUaNuOv155/po0QQRNWEvt3MBCGgR7DMunbloluvHj8uyf0qWJPt2/MsQAKC+1UuB8LCxPOlcb8KczN79AA+/pjvf/ONHAoFfZwIgqh60DebmSAIpYDgdjVUKIXxSYHQUGDyZODLLwF3d/G8sUL5/Dlw8iTf79YNGDeOW6eJiTLs31/XsJsRBEFUAkgozQT1lcJcXUXhNFQohfFJAUtLYPly4MMPNc8bK5QnTvDIXB8foEEDPhVFuPf69U2wYoW8RDcxQRBEZYKE0kzw9gZcXPh+587iiiT6CGVCAnD3Lg+uEZIJlIQ2obxyhYvfiy8CGzZwQSyK4HYVXMMA8MYbQN26DGlpdpg61QJ16vBx0iNH9GsLQRCEOUNCaSbIZKJVKbhdAf2E8tgxvm3bFnBw0O/9BKG8cYNHsALA55/zoKB//wVGjeLTVmbP1hRMIZBHPdtQjRrAkSMFmDDhEjp2VKrapO/0GIIgCHOGhNKMmD8fePllYOJE8Zw+QimkolN335aEnx93y+bk8Gw+SUncigT4mKaPD8/q89lnPMsPwKetCFaiupgDgKcnEB5+C3FxhapkBOqroRAEQVRWSCjNiBde4NNDXF3Fc+pCqWsBZ8F9KliJ+mBlxd2sQv0ff+S5aTt14mOaCQnAypX8+tdfAzExPKn6s2eaY6jaEK4lJFAuWIIgKj8klGZO3f8CSZ89ExddLoowF9IQoVQvf+kS8L//8f133uFbS0tu2b71Fj8ePx7YupXvd+0qjqFqo3ZtLsQKRcnzPwmCIMwdEkozx85OXO9Sm/uVMeMsSvXyX3/NXa8+Pnz1E3W++oqXe/iQj2ECxd2uRbGwAHx9+f7t24a1iSAIwtwwSijv37+PB2qrFJ84cQLTpk3DSsFXR5QpUuOUjx8D6ema60zqiyCUCQl8O3kytwTVqVED+L//4xamQEnLhgGiW/fOHcPaRBAEYW4YJZQvvfQS/v33XwBAUlISQkNDceLECcyaNQvzKdSxzJESSsGarFcPsLU17L7qFqi1NTBpkvZy7doBkZF838EBaN265HvXr8+3ZFESBFHZsSy5SHEuXbqE4OBgAMCGDRvQvHlzHD58GHv37sVbb72FObRIYZmij1Aa6nYtWmf0aNHFq40ZM7jV2qxZcatTG2RREgRRVTBKKBUKBWz+y9a9b98+DBo0CADQtGlTJCYmll3rCADSQmlsIA/AxyRr1QJSU4F335Uua2EBfPKJ/vcmi5IgiKqCUa7XwMBA/O9//8PBgwcRHR2NPn36AAAePXoEd/WEokSZUF4WpUzGF2Des4cnKyhLyKIkCKKqYJRF+eWXX2LIkCFYvHgxxo8fj6CgIADA9u3bVS5ZouzQRyiFNSYNpV074+qVhGBRJiXxpAbqK5oQBEFUJowSyh49euDx48fIzMyEq9rs+EmTJsGevhHLHEEoHz3icxOFMUKlsnSu1/LE1RVwdgYyMngeWmFhaoIgiMqGUa7X58+fIy8vTyWSCQkJ+PbbbxEfHw8PqYgQwihq1eILODPGxVLgwQMgN5cLpzBv0VyQyUT3K41TEgRRmTFKKAcPHozffvsNAJCeno4OHTpgyZIlCA8Px4oVK8q0gQTPgiNk6FF3vwpu14YNNec5mguC+5XGKQmCqMwYJZRnzpxB1//Ss2zatAmenp5ISEjAb7/9hu+//75MG0hwtI1TmqvbVcCUFmVSEkAB2ARBlAVGCWVOTg4cHR0BAHv37sXQoUMhl8vRsWNHJAhpXvRg0aJFaN++PRwdHeHh4YHw8HDEC0th6GD16tWQyWQaL1tDZ9pXQrQJZWkiXisCU00RycsD2rThq6mkpFTsexMEUfUwSigbNWqErVu34v79+/jnn38QFhYGAEhJSYGTk5Pe9zlw4AAiIiJw7NgxREdHQ6FQICwsDM+ePZOs5+TkhMTERNXLEHGurAhiuG+feK60Ea/lTVlPEWGMJ4cviXPnuDWZlsZz1RIEQZQGo4Ryzpw5+PDDD+Hn54fg4GCEhIQA4NZla33ym/3Hnj17MGHCBAQGBiIoKAirV6/GvXv3cPr0acl6MpkMXl5eqpenp6cxj1GpeOklPla5fz9w9So/V5ksSl1LhOnLw4dAhw48sOnuXemywkLWAPDDDzyhAkEQhLEYFQIyfPhwdOnSBYmJiao5lADQs2dPDBkyxOjGZGRkAADc3Nwky2VnZ8PX1xdKpRJt2rTB559/jkAdCyTm5eUhLy9PdZyZmQmAZxdSKBR6t00oa0idssTHBxgwwALbt8vx/feFWLJEiTt3LAHIUL++AiZqlgpt/ePjA8hklnj2TIbERAVq1TLu3qdPyzB0qAUSE2UAgNjYAowdq1t5jx61gPAbMCcHiIoqxOefK4178zLC1J8fc4f6RxrqH2mM7R99y8sYK91vfWEVkTp16pTmNlAqlRg0aBDS09Nx6NAhneWOHj2KGzduoGXLlsjIyMBXX32FuLg4XL58WWsbIiMjMW/evGLn165dW+nmfF64UBNz5nSGrW0BFiw4jOnTu8PWtgB//vk3ZDJTt047r78ehidP7BAVFYfGjXUsqCnBkSPe+PbbNsjPF3/TjR59DaNH6x7LfvPNXkhOroF+/W5j164GsLUtwMqV0XByyjfqGQT49BwH5OVZQCZjkMsZ3N2fw8GhoFT3JQjCNOTk5OCll15CRkaG5LChUUKpVCrx2WefYcmSJcjOzgYAODo64oMPPsCsWbMgl1rVVweTJ0/G7t27cejQIYNEV6FQoFmzZhgzZgwWLFhQ7Lo2i7Ju3bp4/PixQeOpCoUC0dHRCA0NhZU+WcHLAcaAoCBLXLsmQ69eSuzbJ0erVgwnTpj+i1pX/7z4ogUOHZLj998LMGqUYR+1uDgZevXiAtmnjxItWzJERVngpZeUWL26UGud1FSgdm3+/snJCvTubYlz52T4+ONCLFhQOqtyzRoZJk7UdMLY2zNcu1YALy/puubw+TFnqH+kof6Rxtj+yczMRM2aNUsUSqNcr7NmzcIvv/yCL774Ap07dwYAHDp0CJGRkcjNzcXChQsNut+UKVOwc+dOxMXFGWyZWllZoXXr1rh586bW6zY2NqoE7kXrGfOBM7ZeWfHOO0BEBLBvH/8x0rixzKz+cYr2T4MGwKFDwL17lnqtOqLO33/z7eDBwKZNcmzZwo/v3JHDykr7j7EzZ/i2aVPAw8MKc+cCQ4YAy5dbYOJEC8hkQGYmzxxkSJKGggJx4eqaNXmS+CdPgJwcGa5etVLNcy0JU39+zB3qH2mof6QxtH/0LWtUMM+aNWvw888/Y/LkyWjZsiVatmyJt99+Gz/99BNWr16t930YY5gyZQq2bNmC/fv3o74Q/WEAhYWFuHjxIry9vQ2uWxl55RXgv5k5AMw3kEegaORrQQGweDEXr+BgoHZtvrzXwYPF6x4/zrfDhvGECo0a8WMdv4k06nTowLeDBwNBQUBWFq/fsCFfT7NRI+DCheL1Cws1sx8JbNrEn6FmTb7QdVIS8MIL/FpSknQfEARRuTFKKNPS0tC0adNi55s2bYq0tDS97xMREYE//vgDa9euhaOjI5KSkpCUlITnz5+ryowbNw4zZ85UHc+fPx979+7F7du3cebMGbz88stISEjAG2+8YcyjVDocHYEJE8RjcxfKonMpP/qIv7ZuBU6e5KKUmgqsWqVZT6EQrUMhz37DhnybmsqtQm0UFUqZDIiKEvPj1qjBE7QXFABLlxavP306F+/ly8VzjPF7ANyiF4a2hWDr5GTJLiAIopJjlFAGBQVh2bJlxc4vW7YMLVu21Ps+K1asQEZGBnr06AFvb2/Va/369aoy9+7d01jj8unTp5g4cSKaNWuGfv36ITMzE0eOHEFANcq6HREh7pvrHEoBdYvy11+Bb77hx3PnAlu2iGIVF6dZ79IlnsfW2Vl8RicnqCJnb90q/l5KJXDiBN/v2FE8HxbG518WFADZ2cDu3fz82rU8abtAYiKfTgIA06aJ99q3Dzh7lguket8L45JkURJE1caoMcqoqCj0798f+/btU82hPHr0KO7fv49du3bpfR994ohiY2M1jr/55ht8I3zbVlOaNAEiI3kKu/JaJqusECzKhATgrbf4/ty5vP0AtwynTuXC9/Aht+YA0TIMDubzRwUaNuQW5a1b3IWqTnw8Fz47O6BFC81r6kMRXbsCgYHA5cvAb79xKxEAvvsOyM/n76dQACNHcqv2yy/59TfeANSXWyWLkiCqB0ZZlN27d8f169cxZMgQpKenIz09HUOHDsXly5fx+++/l3UbCS3MnQv88Yd5JkNXx9ubr3yiVHLxGTYMmDNHvO7kJAqeulUpWHNFlzeVGqcUxLVtW+l+kclE0V6xgrtWMzL4PgCsXs0FOSEB6N0biInhwTvvvad5H7IoCaJ6YJRQAoCPjw8WLlyIzZs3Y/Pmzfjss8/w9OlT/PLLL2XZPqKSI5eLVmVQELBmjaaFCADduvGtNqEUxhoFhHFKba7XouOTUrzyCh+vvHqVv++PP3LrNiAAGDsW2LCBC/ypU7z86NGAn5/mPaQsyrQ0YMcOHhxEEETlxmihJAh9+egjoE8fYNs2Lk5FKSqUmZnAlSt8v317zbL6WJTq45O6cHbmgggA337LXwAP5pHLeVL1774Ty0+fXvweUhblhx8CgwZxwSWKU1jIp9cQRGWAhJIod159lQfQ6Jq32KUL3165wscfT5/m7tB69VBsIr9gURYVypwccbqHPhYlAEyezLdbt/JAnjp1eE5dgUmTgO+/B376iVvDRREsyidPUCyF4OXLfFtC2uJqy/jxvP+EvMUEYc6QUBImp2ZNHlwD8OQEutyugGhRPngAqM0iwunT3Erx9uaCpw+tWmlan++9B1hbi8cyGQ/00TXzyN2dj10yVjzxurAcWgmrxlVb9u7lf68jR0zdEoIoGYNCQYYOHSp5PT09vTRtIaox3btzK+zAAVFkigbyAFxUnZy4e/bOHT6mCGiOTxqS93byZL7aiIsLMHGiYW22sODTVZKS+Diljw8/n58vjltWNaHcsAH45RegUyegf3/uojY0Y2VKivjDwhSLehOEoRgklM7OziVeHzduXKkaRFRPunXjk/zj4sTFlrVZlDIZd7+ePcsDegShFGYR6TM+qc7YsdztGhysmfFIX7y8uFCqj1M+fCguK3b7NhdOc01abwiM8bHX+/e5RRgZybMqzZhRPCJYCsEtDZBQEpUDg4RyVdH0KQRRRnTtyrdnz/KtEFCjjUaNeDlhnDIrS1zQun9/w97XwgL4+GPD2yugLfL1/n1xv7CQi4EwtloS2dnAm28C4eHAiBHGt6s8OH+eP5u9PdC3LxfLlBRg9myeoEHfHwMklERlg8YoCbPAx0ccfwSA5s21R8gCxaeI7NkD5OXx+jqWJS03tEW+Cq5jAUPcr+vW8YxBU6aY39SSHTv4NjSU575NSuLi+OyZ6AXQh0uXxH0SSqIyQEJJmA3CNBFAOnK16BQRYVWRIUMq3sVZkkUJANeu6X8/ITl8SgoPbDInBKEcOJBv7e2hWjXFEMFTtygfP9adt5cgzAUSSsJsUBdKbYE8AuoWZX6+uBxXeHi5NU0n2ixKQSiF1d0MsSjVV1HZuLF0bStLEhN5EntA070t5PLVVygZ0xRKQ+oShKkgoSTMBn2FUrAo797l42SZmVywDA3kKQukLEphfqi+QvnwobgcGQBs3sxT/5U3O3fyMWIhclgbwo+R4GDNua2GCmViIvD0KR8bFuamklAS5g4JJWE2+PnxQJaXXuJjlLrw8eHWWkEBTwgA8HUnDZ2mUBYIQqltjLJXL77VVygFV2tgIM8clJRUdvMMV67kwVF79mie37aNu6wPHQK0LAikoqjbVcBQoRSsyUaNxIhlEkrC3CGhJMwGmQz43/+A//s/adGTy0X3a3Q03w4ZUv7t04ZgXWmzKF98kW+fPNEvXZsglC++yIUfKBv3K2PA55/zSOG+fXmUr0IBbN/OI2sLCng5XaL8/LnYzwMGaF4zVCiFQJ7mzQ2vSxCmgoSSqJSoT7dwcgJeeME07RAsyrQ0Pl6anc1diwDQtKkY7HL9eslRRsL4ZNeuwPDhfL8s3K83b/KVUIRAp6gonkN3+HAumEIQ1O3b2vPW7t/PxbJu3eKp/Iy1KAMDK49QKpUA5VKp3pBQEpUS9akk/ftrpp6rSNzcxCW9UlJEa9LJib+aNOHH16+LdRjjeWnVBTA9XcxV26ULn4Lh6MjHLaXGDvVh716+feEFPq3D2ZnPiVQogFGjeLYdYf3Ow4eL1xfcrgMGFI8qFsTu4UO+0HZJVEaLMiqK/50Fq5qofpBQEpUSdYvSVG5XgLuBPTz4flKSKJT16vFt06Z8e+2aqDDLl3PL7P33xfscPcoFtGFDnq/W1pavPgJwcSsNglCGhfH1QM+d4302daq4pmnnzrxMUfcrYzzYByg+PgnwlIIODrxcQoJ0O9QjXtUtyrt3zW/OqDr79/O2r11r6pYQpoKEkqiUCBaljQ0fdzMl6uOUglAKLlfRouRCyRiwdCk/t2yZaGmqu10FBPfrpk1iSjxDUSj4Fz3AhRLgQVN//cWXFhOs4U6d+LaoRXn2LLcWa9TQ7t6WyfS3DO/d465pKyvA3x+oXZvvKxT8PcwVoW1CmkSifMjPN18XNwklUSnp0QMYOpQHqTg4mLYt6pGvJQnl0aMyVRRsYSHw6ad8XwjkEaaUAEDv3lyg7t0TV1SR4tgxPqZZ9Fx2Nk/erm2pMAHBojxzRnNVlnXr+DYsjFu52tAllP/8wwOzBARrskkTLpAWFuKi3rdvm28yXEEo794t2WomjGfiRO6duXHD1C0pDgklUSmxseGioO6+NBXqFqUwNURwvQpCefs2UFgow+rV/F+uSxdujW3cyK1JQQjVLUo7O9HdWVL0a0EBH0McPhzYtUs8L7hdQ0OlI4n9/LjLV6EATp0S7/nHH3z/lVd01xWEUkgpCPD1QcPDgZdf5tYroOl2LVpXff6oIWRlGV9XH549AzIyxOMDB8rvvaozOTnA+vX886fPj8KKhoSSIEqJetKBohZlnTpc8BQKGRISnLBxI7ecFi0ChIV2xozhuWo9PLhLUp2RI/l2wwZp9+u5c+IUlE8+EQOF1McnpZDJirtf9+3jCQLc3aWTzWuzKI8fF4N7IiJ4JLB6IE/xusZZlEOGAI0bi1mDypqiLmESyvLh4EH+PwAYlje4oiChJIhSop7GrqhQyuX8ixwANmxojGfPZPD3567O+fO5ZSx8GQtWpjp9+nDX8v370tGv//4r7p8/z12mT56IAhIaWvJzCO5XQShXr+bbMWOko4q1CaW6oCQl8eW5pC1Kw4Xy4kUgJoZbvj/9ZHB1vSgqlDROWT4IP+gAEkqCqJJIjVECovv12DG+svNrr3FBrFcPeOcdsZy621XAzk6Mft2wQXcbBKEUooFnzwZ27+ZWaPPm4qLSUqhHvj59Cmzdyo/Hj5euJ7zn7dui1RsXx7evvMKf9ddfudULaLcojXGf/vKLuL9hg+bYalkhCGW7dvxHz+3bxZPeE6WHhJIgqjiCRXn1Kh9rAbjLVUAQSgCQyxnU1zafORNwceH7Qiafoqi7X7UlH1AoxKjZNWu4cN++LY7fluR2FWjdmgtzWhrw2WfcFRYQALRtK13P11dcbis1lUcvHj0qPl9EBN8vLOQBQYI4AsZblHl5wO+/831raz6OKMz3LEsEoWzWTFwfVV/365kzvO8+/LBicvZWVh490lx6jYSSIKoggkWZmsq3Hh6aEaLCXEoA6NOHaVh3bm78i3fHDqBlS+33792bJy94+FAUIHXOnOGRra6uQEgIMGeOZnv0FUorK56xBwC++45vJ0woeekyGxvxh8Ht29zdm5vL51g2bcojk4XgpmbNeLSrgBD1+vixDDk5+q8jv3UrF/Q6dYD33uPnfvtN7+p6Iwhl7do80hrQTyjj4vh0mjNngCVLgOnTjZ/iY+48fcqjq41FSOQgfM5IKAmiCqK+mgag6XYFNC3KCROKmxYtWxbPoaqOra2Y+1Wb+1Vwu3bvzt2Db7whWmo2NtpduroQ3K+FhfxeL7+sXz31cUrB7dqtG//yc3TkrldHR55bVh0nJy6oAJCcbK93OwW366uvcjEHeMJ39Zy7ZYG6UHbvzvdLGqfctYv/uMnM5D8MAODrr4EvvjCuDTk5wKefynHpkrtxNyhnRo/mP9CMHb8V3K6CR4WEkiCqIC4umsEugvUkEBAA1KnDUKdOFvr1M86sENyvGzcWz2IjCKWQEMDaWvxS7tePL7CsL4JQAtwS9fbWr54uoRTo2ZNbHjNnFq8rjHEmJ9fQ673u3BGtkFdf5VZrcDDvlz//1K+9As+f87mRp05pTm8RUBfKLl34j4ebN3UnSNi0if+oyc3lP35On+YWJcCjkVeuNKx9AI+QjoqywKpVEkvqmIiEBFHo1McZ9UWpFP+Wwo8yEkqCqILIZGIaO6C4RWlnB1y6VICvvjpgdE7asDCeozUxUTN7jkIhJitQz5wzYgTPqrNqlWHvExIi7pcUxKOOIJTXr4vtEywwAXWXq7a6SUn6KbrwTL16ia5bYdxXX/frmTP8R4C9PZ9D2r49t/yLTnZXF0oXFz6OC2h3v2Znc2u+oIAvFffXX/xv//77XCQB4K23xL+XPiQmcmsUAB49qmF27lv1tH7GuF/Pn+dDBA4OYtBaTg4f7zYnSCgJogxQd78WFUqAfyHb2hqf0NTaWsxpq+5+PXmSf7G4u2tOuwCAVq24uBqCmxswbRq3hsLD9a8niN3ff/MkAM7OYqJ1fesmJZVsURYWikL5+uvi+VGj+Bjr2bN82khJrFsnrpRiZcVfhYWauW4LC7lQAVwoAVH8tQnlqlU8qKhxYy7YVlbitc8+421kzDCrct48MUDs+XMrPH6sf12AW7TCottlDWNiQgqAfxYNzdmrnrDf1ZX/sADMz6okoSSIMkAI6AG0C2VZILhf164V3YSC27VHj7JbuPqbb3hwka6UddoQxC4tjW+7dNFtQeqqq88Y5Y4dwIMHXNDVhbxmTTEpghANK8XZs3y7bBmPoJ00iR9fuSKWSUkRx2qFv68Q0PPvv5rBOYWFYgDUtGnFn10mA6ZM4fvbtum30kp8PPDzz3zfxoa/2a1b+kcHP3jAhX3AAM2o0rLi/HneXzY2PNVidrZm/+mDekIMdc8MCaUaixYtQvv27eHo6AgPDw+Eh4cjXo/l4Ddu3IimTZvC1tYWLVq0wC71nF0EYQLULcqiY5RlRa9e3PX39Cn/YklMFAMoTLUep4D6lA9Ac3xS37oJCU7Iz9ddLiNDnHc6cWJxIRfcr6tWSbvuGBPndAYH8y9oIejm6lWxnOB29fISk8d37cotxRs3NMdDd+zgP15cXaEx/UedTp24ZZqZWXw8LzWVL6i9Y4c4lWTWLC7AAwcCHToIQqn7uYoyfbrYD/p8RW7frj2qWhdCHt8BA4AOHfi+Ie7XZ89EN7QQmV2rFt+SUKpx4MABRERE4NixY4iOjoZCoUBYWBieSXzKjxw5gjFjxuD111/H2bNnER4ejvDwcFwqj59MBKEnFWFRWlnxL7wGDXjQTJ8+4nigYOmYilq1uFUhUHR8Uop27QAPD4a0NDt8+63ur6QPP+RWUsOGPKFCUQYO5KvKPH4M/PCD7vd7+JCXsbAQ3cMBAXyrbhGpj08KuLiI7x0RwdsDcCsc4GOQNXR4kOVyMep3/XrNa+++y9e9HDSIi/ann/JcxnI5D+YxNNVfbKyY0B7gCeqliI/nQUj9+vEx1pIoLBTHJ19+GejYke8bIpSxsXzOra+vmLrRXC1KMDMiJSWFAWAHDhzQWWbkyJGsf//+Guc6dOjA3nzzTb3eIyMjgwFgGRkZBrUtPz+fbd26leXn5xtUr7pQ3fvn++8ZAxizsGCsoKD49bLsn5s3GfP05O8HMObhwZhSWerblpoWLXh77O0ZM/Qxf/1VwQDG7OyU7O7d4tf37BGfV+Lrgf32Gy/j7s5YZqb2Mtu38zLNm4vnEhP5ObmcsefP+bnly/m58HDN+goFY8HB/FpoKGMnT/J9S0vGHjyQfs4jR3hZBwfGcnL4uStXGJPJ+HknJ/E5AcZee42XmT+/gAGMjR1bKP0G/7WveXNev3dvvrW2Ziw7W3cd4VkBxs6cKfEtWEwML+viwlhuLmPbtvHjgICS6woMGMDrvPOOeG7CBH7u88/1vw9jxv9/6asH+s/wrQAy/kvT7+bmprPM0aNH8X6RJSN69+6NrUK+rSLk5eUhT8i2CyAzMxMAoFAooFAo9G6bUNaQOtWJ6t4/7u4yAJaoXZtBqSwolomlLPunXj2+mHKvXpbIyJChWzclCgpMv/Kxn58FLl6UIyRECaAQhjzqyJEKLFmSjsuXa+Kdd5TYvFl8nowM4I03LAHIMGVKIUJClDrvPXw48Nlnlrh+XYZvvinEzJnF562eOiUHYIGgICUUCv4+bm6Ai4sl0tNluHxZgZYtgXv3eDlv70IoFJr3+flnIDjYEtHRMly9ygDIMGKEEh4e0s/dti1Qr54l7t2TYefOAoSHM8ybZwHG5Bg8WIlffy3E6tVyLFsmR14e8OmnBVAoAF9fJQAL3LrFSvwMLVsmx6VLFnB3Z1izpgAdO1ri7l0Z9u0r0Dk9KTbWAoKD8eDBQjRvLp1K6Pffeflhw5SQywv/y1pkhatXGR4/LigxiOzGDWDnTh7t9NZbClWfubvzPk9KKt7nUhj7/6VvebMRSqVSiWnTpqFz585o3lz3fKGkpCR4qvu5AHh6eiJJCGErwqJFizBv3rxi5/fu3Qt7QyaY/Ue0MOmH0Ep17Z/sbDtYW78If/+H2LXrnM5yZdk/M2e6YvNmf3TsGI9duzJKrlDOuLo2AhCIBg0uYdcuw5O3vvmmI957rwd27JAjMvIEgoOTkZhojzVrAvHggQ+8vLLRpUssdu2S/lEwYEBtfP11O0RFFaJRo2jUqKHpS/znn2AA3rCxuYxdu8RM7l5eXZCe7o4//zyPBw8e4sSJ1gDqISsrHrt2FV8k8eWXG+Dnn1vgwQPuDm3bNk6vv0Pr1oG4d68Rvv8+CSkp8diwgc+07979AA4ezETDhtyVq1QCFy7wV2qqC4DuuHZNgV27dPtR09Nt8OmnPQFYYOTI8zh2LAGNGwfh7l0//PTTPQDFQ4IZA/btCwPAQ07/+usRfH3P6HyP/Hw5NmzoA0COBg2OYNcuvmyNp2cvJCfXwPLlJxEUlCrZBytXtgDQAO3aJeHGjeOqaTlPnjQE0Bznzj3Crl2626ALQ/+/coSQ4hKQMWYeM3MmT56M3bt349ChQ6ijniizCNbW1lizZg3GjBmjOrd8+XLMmzcPyVrScmizKOvWrYvHjx/DyclJ7/YpFApER0cjNDQUVupx3wQA6h+Ah/Hb2WlP+VYd+kehAC5ckKFNG1Zi2rvidXn/HDjQD998Y4U6dRh8fRkOHxbHLGNiCtC1a8lfV4WFQJs2lrh6VYbZswsxe7amZeLvb4mEBBmiowvQvbt4vzfftMCqVXLMmlWIuXOV6NfPAvv2yfHLLwV45ZXi76tUAn37WuDff+Xo2lWJmBj9rPpTp2To1MkS9vYMoaEM27bJMXCgphVdlJQUBerU4T/s09IUOhcrHzvWAhs3ytG6NcORIwWwsAC2bJFh1ChL+PszXL5cfADy1i2gWTPxM9mgAcO1a7oHKrdvl2H4cEvUrctw40aBKtr6lVcssH69HJGRhfjkE93W4NOnQP36lsjJkWHPngK8+KLYt3/8IcNrr1miZ08ldu/W30ti7P9XZmYmatasiYyMDEk9MAuLcsqUKdi5cyfi4uIkRRIAvLy8iglicnIyvIrmEfsPGxsb2NjYFDtvZWVl1BeWsfWqC9W5f/SZs1iV+8fKSgzqMJY5c3h2m/v3ZXjwQAaZjC8RNm0a8OKL+n1dWVnx+YcjRwLffWeB996zgKsrv5aWxrPJAEC7dpYacx0FR1Z8vAWsrCzw6BE/rldPs5w669bxhACvvSaHlZV+sZEdO/IkB3fvyrBtG/9FMW+edH0PD8DBIR/Z2da4f99Ka17gjRv5y8ICWLlSBltb3ujevfm5GzdkePjQCn5+mvWESNfmzflSaLdvy5CWZoUijrti5fv1k8HGRuyYkBAepHTyJO8/XaxZw39UtmgBhIVZavyoEvIgp6bq35/qGPr/pW9Zk0a9MsYwZcoUbNmyBfv370d9Ic2GBCEhIYiJidE4Fx0djRD1lCIEQVRKatTg0y769gW+/JIvafXPP/zYEIYN41/EmZnAihXieWFaSP364qotAkWniGiLei2KhwdPFyisOaoPMpk4Jxbgka5Cxh8pvLz4bABtU0SSk4HJk/n+J5/wSGIBZ2fxB4y2NHNCysGBA8XoX6lpIkJka9GvXOE9jh/XnQC+oABYupTvT5tW3PtirlGvJhXKiIgI/PHHH1i7di0cHR2RlJSEpKQkPFdbWG7cuHGYqZYgcurUqdizZw+WLFmCa9euITIyEqdOncIUYTYvQRCVms6d+TSYjz6SFikp5HI+nQTgCdSF4Coh0YA2YRKE8vp1HkD0X2yh0W2QYtQocX/uXP3q6BJKxoA33+QLdbdqxaeWFKV3b77VNk1EPTdvp058Xz1DkTr5+TwvLlBcKFu14hmkHj/WXMRbnS1b+I+fWrV4mr+iCEKZmmpeS5OZVChXrFiBjIwM9OjRA97e3qrXerVJRvfu3UOikEcKQKdOnbB27VqsXLkSQUFB2LRpE7Zu3SoZAEQQRPVj+HC+Osnt22LKOUEoW7UqXr5ePZ5qUH19TwcHfo+ypnVr4KuvuLUrrHNZEt7e2oXyjz94th8rK+7W1JZPWBDKmBjNeZIPHvD+kcu5SArip8uiPHeOZzJydxfnPgrY2IjPcuwYt+Y//JCLors7z6376qv8+uTJ2jM/CQkHCgv5WKbAs2c8FZ+hKfLKCpOOUeoTRxSrZe2WESNGYETR9XoIgiDUsLcHxowBfvyRT+d44QVpi1Iu5yuRnDkjrmhRHtYkwF2OH3xgWB0vLx6hqS6UmZk8WQHAx2V1rWnati2fApOWxl2jwioxwg+C1q35DwLBojx1iluPRUVXENCOHbUHrXXowEVy2TL+fNqWPXNyEt3ERbG25i7x9HTufnV3F59t8WI+HiysP1qRUK5XgiCqLELi9M2bgUePgGvX+LGuMUFhjG7fPr4tL6E0Bm2u1927uag0asRT1unCwoIHRQGa7ldBKIWUg40bc0HNzRXHc9URhFJXSIh6hp7kZH6/bdt4kNDZs1ykr10rvoarOtrGKYVUd7t3665XnpBQEgRRZWnXjltZeXnAjBl83KtWLTG6sijCOKWQys4chTIhAaoJ+tu28e3QoWI+Wl306cO3S5fyuZlA8bVDZTJp96sQyKMrurl7d+6CdXTkFuDFizxYKSCAu7uDg0te47SoUCqVYlL3I0dgUCKLsoKEkiCIKotMJlqVwqoirVtrdxsColAKmJNQurrmwtaWobAQuHePC4aQ7Hzw4JLrjx7NXavp6TwJ+dGj3NID+GovArqEMjGRi7RczgVPG97ePOtOQgIfnzRm/dWiQpmQwJduA/hY5RnD8xCUGhJKgiCqNGPHan5hS03FEFyvAuYklHK5uFD1rVvcGszI4BaysHqHFLa2PCCmVSvuFhVWnAkM5MuUCQhCWTTyVX2+paOj7vepWxeqeavGUFQoBetXQLCCKxISSoIgqjTu7tw1KSAllA0bai64bE5CCfCsOQAXyu3b+bmBA/Vf+9PFhY9RNm7M3dFA8SXRgoO5KN+/L66OApQ8PllWqE8RAUShFFzL2hbNLm9IKAmCqPII7ldAWigtLTWTB5ibUDZsKAqlMD6pj9tVHQ8PHqwkrJsqrAUp4OAgRs+qu19LGp8sK4palBf/S087fDjfHjpU8dNESCgJgqjyvPgiXwtyxAgeISqF+jiluQmlsC7ltm187M7Oji/obSh16wInT/KForUJrTB9ZMkSbnlKJRooa3S5Xl95hbt8MzKKu2PLGxJKgiCqPHI5sGEDf8lL+NYThFIuh858p6ZCcL3evMmPQ0P5fFFj8PAABgzQHtj0/vvcTXv8OJ/zeP48nzLi5mZYuj5j2wVwoczJgWplkTZtxKCjih6nJKEkCIJQQwjo8fIqecpFRSMIpcCgQeX1PjzBuVwOrFoFRETw87oSDZQl6kJ55Yo4pcfTUxxPrehxShJKgiAINV54gU9zGDLE1C0pjp+faBHLZNwiLC/CwvhcSIC7aYHyd7sColA+fQqcPs33W7bkz9u9Oz+Oi9OdeL08IKEkCIJQw9OTrxyybJmpW1Ica2s+vghw6668XcPvvcfHBgXKO5AH4FNLhCje/fv5tkULvm3blo/LPnkiJoWoCEgoCYIgilDe7sXS0KQJ3xoa7WoMMhnPlRsaCgQFiUE+5YlcLiZHF1ZUFKJwra3FfLQVOU5JQkkQBFGJ+Pxznte1olYWtLPjcy/PneP7FYHgfn3yhG/Vk72bYpzSzIaqCYIgCCnatuWviqSiLWxBKAFuYapnTCo6TlkRbSOLkiAIgjAr1IWycWNNSzY4mLtgExPFaTLlDVmUBEEQhFmhLpRF19i0swOionhQU0krkZQVJJQEQRCEWaEulELEqzpTp1ZcWwByvRIEQRBmhpRFaQpIKAmCIAizgoSSIAiCICQQEik4OQG+vqZtC0BjlARBEISZ0bYt8MYbPMLVHJI/kFASBEEQZoWFBfDTT6ZuhQi5XgmCIAhCAhJKgiAIgpCAhJIgCIIgJCChJAiCIAgJql0wD/tvtc/MzEyD6ikUCuTk5CAzMxNWVlbl0bRKDfWPNNQ/0lD/SEP9I42x/SPoACthFehqJ5RZWVkAgLrC6qcEQRBEtSYrKwvOzs46r8tYSVJaxVAqlXj06BEcHR0hM2CCTmZmJurWrYv79+/DycmpHFtYOaH+kYb6RxrqH2mof6Qxtn8YY8jKyoKPjw/kct0jkdXOopTL5ahTp47R9Z2cnOiDKgH1jzTUP9JQ/0hD/SONMf0jZUkKUDAPQRAEQUhAQkkQBEEQEpBQ6omNjQ3mzp0LGxsbUzfFLKH+kYb6RxrqH2mof6Qp7/6pdsE8BEEQBGEIZFESBEEQhAQklARBEAQhAQklQRAEQUhAQkkQBEEQEpBQ6skPP/wAPz8/2NraokOHDjhx4oSpm1TuxMXFYeDAgfDx8YFMJsPWrVs1rjPGMGfOHHh7e8POzg69evXCjRs3NMqkpaVh7NixcHJygouLC15//XVkZ2dX4FOUH4sWLUL79u3h6OgIDw8PhIeHIz4+XqNMbm4uIiIi4O7uDgcHBwwbNgzJyckaZe7du4f+/fvD3t4eHh4emD59OgoKCiryUcqFFStWoGXLlqpJ4CEhIdi9e7fqenXum6J88cUXkMlkmDZtmupcde+fyMhIyGQyjVfTpk1V1yu0fxhRIuvWrWPW1tbs119/ZZcvX2YTJ05kLi4uLDk52dRNK1d27drFZs2axf766y8GgG3ZskXj+hdffMGcnZ3Z1q1b2fnz59mgQYNY/fr12fPnz1Vl+vTpw4KCgtixY8fYwYMHWaNGjdiYMWMq+EnKh969e7NVq1axS5cusXPnzrF+/fqxevXqsezsbFWZt956i9WtW5fFxMSwU6dOsY4dO7JOnTqprhcUFLDmzZuzXr16sbNnz7Jdu3axmjVrspkzZ5rikcqU7du3s7///ptdv36dxcfHs08++YRZWVmxS5cuMcaqd9+oc+LECebn58datmzJpk6dqjpf3ftn7ty5LDAwkCUmJqpeqampqusV2T8klHoQHBzMIiIiVMeFhYXMx8eHLVq0yIStqliKCqVSqWReXl5s8eLFqnPp6enMxsaG/fnnn4wxxq5cucIAsJMnT6rK7N69m8lkMvbw4cMKa3tFkZKSwgCwAwcOMMZ4f1hZWbGNGzeqyly9epUBYEePHmWM8R8jcrmcJSUlqcqsWLGCOTk5sby8vIp9gArA1dWV/fzzz9Q3/5GVlcX8/f1ZdHQ06969u0ooqX+4UAYFBWm9VtH9Q67XEsjPz8fp06fRq1cv1Tm5XI5evXrh6NGjJmyZablz5w6SkpI0+sXZ2RkdOnRQ9cvRo0fh4uKCdu3aqcr06tULcrkcx48fr/A2lzcZGRkAADc3NwDA6dOnoVAoNPqoadOmqFevnkYftWjRAp6enqoyvXv3RmZmJi5fvlyBrS9fCgsLsW7dOjx79gwhISHUN/8RERGB/v37a/QDQJ8dgRs3bsDHxwcNGjTA2LFjce/ePQAV3z/VLim6oTx+/BiFhYUanQ0Anp6euHbtmolaZXqSkpIAQGu/CNeSkpLg4eGhcd3S0hJubm6qMlUFpVKJadOmoXPnzmjevDkA/vzW1tZwcXHRKFu0j7T1oXCtsnPx4kWEhIQgNzcXDg4O2LJlCwICAnDu3Llq3zfr1q3DmTNncPLkyWLX6LMDdOjQAatXr0aTJk2QmJiIefPmoWvXrrh06VKF9w8JJUGUAREREbh06RIOHTpk6qaYFU2aNMG5c+eQkZGBTZs2Yfz48Thw4ICpm2Vy7t+/j6lTpyI6Ohq2trambo5Z0rdvX9V+y5Yt0aFDB/j6+mLDhg2ws7Or0LaQ67UEatasCQsLi2LRVMnJyfDy8jJRq0yP8OxS/eLl5YWUlBSN6wUFBUhLS6tSfTdlyhTs3LkT//77r8YSbl5eXsjPz0d6erpG+aJ9pK0PhWuVHWtrazRq1Aht27bFokWLEBQUhO+++67a983p06eRkpKCNm3awNLSEpaWljhw4AC+//57WFpawtPTs1r3jzZcXFzQuHFj3Lx5s8I/PySUJWBtbY22bdsiJiZGdU6pVCImJgYhISEmbJlpqV+/Pry8vDT6JTMzE8ePH1f1S0hICNLT03H69GlVmf3790OpVKJDhw4V3uayhjGGKVOmYMuWLdi/fz/q16+vcb1t27awsrLS6KP4+Hjcu3dPo48uXryo8YMiOjoaTk5OCAgIqJgHqUCUSiXy8vKqfd/07NkTFy9exLlz51Svdu3aYezYsar96tw/2sjOzsatW7fg7e1d8Z8fg0ORqiHr1q1jNjY2bPXq1ezKlSts0qRJzMXFRSOaqiqSlZXFzp49y86ePcsAsK+//pqdPXuWJSQkMMb49BAXFxe2bds2duHCBTZ48GCt00Nat27Njh8/zg4dOsT8/f2rzPSQyZMnM2dnZxYbG6sRwp6Tk6Mq89Zbb7F69eqx/fv3s1OnTrGQkBAWEhKiui6EsIeFhbFz586xPXv2sFq1alWJEP8ZM2awAwcOsDt37rALFy6wGTNmMJlMxvbu3csYq959ow31qFfGqH8++OADFhsby+7cucMOHz7MevXqxWrWrMlSUlIYYxXbPySUerJ06VJWr149Zm1tzYKDg9mxY8dM3aRy599//2UAir3Gjx/PGONTRGbPns08PT2ZjY0N69mzJ4uPj9e4x5MnT9iYMWOYg4MDc3JyYq+++irLysoywdOUPdr6BgBbtWqVqszz58/Z22+/zVxdXZm9vT0bMmQIS0xM1LjP3bt3Wd++fZmdnR2rWbMm++CDD5hCoajgpyl7XnvtNebr68usra1ZrVq1WM+ePVUiyVj17httFBXK6t4/o0aNYt7e3sza2prVrl2bjRo1it28eVN1vSL7h5bZIgiCIAgJaIySIAiCICQgoSQIgiAICUgoCYIgCEICEkqCIAiCkICEkiAIgiAkIKEkCIIgCAlIKAmCIAhCAhJKgiAIgpCAhJIgCIIgJCChJIhKTmpqKiZPnox69erBxsYGXl5e6N27Nw4fPgwAkMlk2Lp1q2kbSRCVGFqPkiAqOcOGDUN+fj7WrFmDBg0aIDk5GTExMXjy5Impm0YQVQLK9UoQlZj09HS4uroiNjYW3bt3L3bdz88PCQkJqmNfX1/cvXsXALBt2zbMmzcPV65cgY+PD8aPH49Zs2bB0pL/fpbJZFi+fDm2b9+O2NhYeHt7IyoqCsOHD6+QZyMIc4FcrwRRiXFwcICDgwO2bt2KvLy8YtdPnjwJAFi1ahUSExNVxwcPHsS4ceMwdepUXLlyBT/++CNWr16NhQsXatSfPXs2hg0bhvPnz2Ps2LEYPXo0rl69Wv4PRhBmBFmUBFHJ2bx5MyZOnIjnz5+jTZs26N69O0aPHo2WLVsC4Jbhli1bEB4erqrTq1cv9OzZEzNnzlSd++OPP/DRRx/h0aNHqnpvvfUWVqxYoSrTsWNHtGnTBsuXL6+YhyMIM4AsSoKo5AwbNgyPHj3C9u3b0adPH8TGxqJNmzZYvXq1zjrnz5/H/PnzVRapg4MDJk6ciMTEROTk5KjKCavFqx+TRUlUNyiYhyCqALa2tggNDUVoaChmz56NN954A3PnzsWECRO0ls/Ozsa8efMwdOhQrfciCEKELEqCqIIEBATg2bNnAAArKysUFhZqXG/Tpg3i4+PRqFGjYi+5XPxaOHbsmEa9Y8eOoVmzZuX/AARhRpBFSRCVmCdPnmDEiBF47bXX0LJlSzg6OuLUqVOIiorC4MGDAfDI15iYGHTu3Bk2NjZwdXXFnDlzMGDAANSrVw/Dhw+HXC7H+fPncenSJXz22Weq+2/cuBHt2rVDly5d8H//9384ceIEfvnlF1M9LkGYBkYQRKUlNzeXzZgxg7Vp04Y5Ozsze3t71qRJE/bpp5+ynJwcxhhj27dvZ40aNWKWlpbM19dXVXfPnj2sU6dOzM7Ojjk5ObHg4GC2cuVK1XUA7IcffmChoaHMxsaG+fn5sfXr11f0IxKEyaGoV4IgtKItWpYgqiM0RkkQBEEQEpBQEgRBEIQEFMxDEIRWaFSGIDhkURIEQRCEBCSUBEEQBCEBCSVBEARBSEBCSRAEQRASkFASBEEQhAQklARBEAQhAQklQRAEQUhAQkkQBEEQEvw/bL3P1LCRUYcAAAAASUVORK5CYII=\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAc0AAADvCAYAAACQVo+UAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABDHklEQVR4nO3dd1hTZ/sH8G9YYS8BAUVw44K6oKh1lOEeOOpqFds6cb2/2tY92qptbX1t9dVqHbRaKy4UtzhQcaCiaC0Wt9AqKCpDEAjk/v3xlGhkJGBIGPfnus6V5JznnNy5jdx5zniOhIgIjDHGGFNJT9cBMMYYY5UFF03GGGNMTVw0GWOMMTVx0WSMMcbUxEWTMcYYUxMXTcYYY0xNXDQZY4wxNXHRZIwxxtTERZMxxhhTExdNxsqJRCLB/PnzdR0GY0yDuGiyKi0kJAQSiaTY6dy5czqN7969e5BIJPjuu+90GkdF8Mcff2DgwIFwdXWFsbExatWqBX9/fyxfvlyp3aJFi7Br1y7dBMmqPQNdB8CYNnzxxReoW7duofkNGjTQQTTsdWfOnEGXLl1Qp04djB49Go6OjkhMTMS5c+fwww8/YNKkSYq2ixYtwsCBA9GvXz/dBcyqLS6arFro3r072rRpo+swWDEWLlwIKysrXLhwAdbW1krLHj16pJugGCsC755l1Z5MJoOtrS1GjRpVaFl6ejqMjY0xbdo0AEBubi7mzp2L1q1bw8rKCmZmZnjnnXdw/Pjxco3x0aNH+Oijj1CzZk0YGxvD09MTv/zyS6F2W7ZsQevWrWFhYQFLS0u0aNECP/zwg2K5TCbDggUL0LBhQxgbG6NGjRro0KEDIiIiin3vixcvQiKRFPl+hw4dgkQiwd69ewEAGRkZmDp1Ktzc3CCVSuHg4AB/f39cunSpxM93+/ZtNGvWrFDBBAAHBwfFc4lEgszMTPzyyy+KXexBQUGK5f/88w8+/PBD1KxZE1KpFM2aNcP69euVthcZGQmJRILQ0FDMnDkTjo6OMDMzQ58+fZCYmFhinIxxT5NVC2lpaUhJSVGaJ5FIUKNGDRgaGiIwMBA7d+7E6tWrYWRkpGiza9cu5OTkYMiQIQBEEV27di2GDh2K0aNHIyMjA+vWrUPXrl1x/vx5vPXWWxqP/cWLF+jcuTNu3bqFiRMnom7duti2bRuCgoKQmpqKKVOmAAAiIiIwdOhQ+Pr64ptvvgEAXL9+HadPn1a0mT9/PhYvXoyPP/4YXl5eSE9Px8WLF3Hp0iX4+/sX+f5t2rRBvXr1sHXrVowcOVJpWWhoKGxsbNC1a1cAwLhx47B9+3ZMnDgRTZs2xZMnTxAVFYXr16+jVatWxX5GV1dXnD17FteuXUPz5s2Lbbdx40ZF7GPGjAEA1K9fHwCQnJyMt99+GxKJBBMnToS9vT0OHDiAjz76COnp6Zg6darSthYuXAiJRILPP/8cjx49wrJly+Dn54fY2FiYmJgUGwOr5oixKmzDhg0EoMhJKpUq2h06dIgA0J49e5TW79GjB9WrV0/xOi8vj3JycpTaPHv2jGrWrEkffvih0nwANG/evBLju3v3LgGgJUuWFNtm2bJlBIA2bdqkmJebm0s+Pj5kbm5O6enpREQ0ZcoUsrS0pLy8vGK35enpST179iwxpqLMmDGDDA0N6enTp4p5OTk5ZG1trfS5raysKDg4uNTbP3z4MOnr65O+vj75+PjQZ599RocOHaLc3NxCbc3MzGjkyJGF5n/00Ufk5OREKSkpSvOHDBlCVlZWlJWVRUREx48fJwBUq1YtRe6IiLZu3UoA6Icffih1/Kz64N2zrFr43//+h4iICKXpwIEDiuXvvvsu7OzsEBoaqpj37NkzREREYPDgwYp5+vr6ip6oXC7H06dPkZeXhzZt2qjcBVlW+/fvh6OjI4YOHaqYZ2hoiMmTJ+P58+c4ceIEAMDa2hqZmZkl7mq1trbGn3/+iZs3b5YqhsGDB0Mmk2Hnzp2KeYcPH0ZqaqpSfqytrREdHY0HDx6Uavv+/v44e/Ys+vTpgytXruDbb79F165dUatWLYSHh6tcn4iwY8cO9O7dG0SElJQUxdS1a1ekpaUV+vcZMWIELCwsFK8HDhwIJycn7N+/v1Sxs2pG11WbsfJU0NO8cOGCyrZjx44lCwsLys7OJiKitWvXEgCKjY1VahcSEkItWrQgQ0NDpZ5r3bp1ldpBQz3Nxo0b0zvvvFNofmxsLAGgFStWEBFRcnIyNWnSRNGLGjVqFB04cEBpnRMnTpC1tTUBoObNm9O0adPoypUrJcZYwN3dnfz9/RWv33//fbKzsyOZTKaYFxoaSsbGxqSnp0dt27alefPm0e3bt9XafoGcnBw6f/48zZgxg4yNjcnQ0JD+/PNPxfKieprJycnF7lEomHbu3ElEL3ua69evL/Te77zzDjVu3LhU8bLqhXuajP1ryJAhyMjIUPRAt27dCnd3d3h6eirabNq0CUFBQahfvz7WrVuHgwcPIiIiAu+++y7kcrmuQgcgTpiJjY1FeHg4+vTpg+PHj6N79+5KxyE7duyI27dvY/369WjevDnWrl2LVq1aYe3atSq3P3jwYBw/fhwpKSnIyclBeHg4BgwYAAODl6dGvPfee7hz5w6WL18OZ2dnLFmyBM2aNVPq1atiZGSEtm3bYtGiRVi1ahVkMhm2bdtW4joFuX///fcL7VEomNq3b692DIwVS9dVm7HyVJqeZn5+Pjk5OdGQIUPo8ePHZGBgUKin2LdvX6pXrx7J5XKl+e3atSNXV1eledBQTzMgIIAcHR0pPz9faf6WLVuKPA776ucZO3YsAaCbN28W2SYjI4NatmxJtWrVKjFOIqK4uDgCQD/99BOFhYURADp+/HiJ6yQnJ1OtWrWoffv2KrdflD/++IMA0NixYxXzzM3NC/U08/LyyMLCgoYOHapymwU9zRkzZijNl8vl5OTkRF27di1TrKx64J4mY//S09PDwIEDsWfPHmzcuBF5eXlKx+sAcUwTEMfQCkRHR+Ps2bPlFlePHj2QlJSkdLw1Ly8Py5cvh7m5OTp16gQAePLkidJ6enp68PDwAADk5OQU2cbc3BwNGjRQLC9JkyZN0KJFC4SGhiI0NBROTk7o2LGjYnl+fj7S0tKU1nFwcICzs7PK7R8/flwppwUKji82btxYMc/MzAypqalK7fT19TFgwADs2LED165dK7Sdx48fF5r366+/IiMjQ/F6+/btePjwIbp3715irKx640tOWLVw4MAB/PXXX4Xmt2vXDvXq1VO8Hjx4MJYvX4558+ahRYsWaNKkiVL7Xr16YefOnQgMDETPnj1x9+5d/PTTT2jatCmeP39e5viOHj2K7OzsQvP79euHMWPGYPXq1QgKCkJMTAzc3Nywfft2nD59GsuWLVOczPLxxx/j6dOnePfdd1G7dm3cv38fy5cvx1tvvaX4HE2bNkXnzp3RunVr2Nra4uLFi4pLRNQxePBgzJ07F8bGxvjoo4+gp/fyd3dGRgZq166NgQMHwtPTE+bm5jhy5AguXLiA77//vsTtTpo0CVlZWQgMDIS7uztyc3Nx5swZhIaGws3NTeka2tatW+PIkSNYunQpnJ2dUbduXXh7e+Prr7/G8ePH4e3tjdGjR6Np06Z4+vQpLl26hCNHjuDp06dK72lra4sOHTpg1KhRSE5OxrJly9CgQQOMHj1arVywakrXXV3GylNJl5wAoA0bNii1l8vl5OLiQgDoq6++KrQ9uVxOixYtIldXV5JKpdSyZUvau3cvjRw58o12zxY3bdy4kYjEbs5Ro0aRnZ0dGRkZUYsWLQrFvn37dgoICCAHBwcyMjKiOnXq0NixY+nhw4eKNl999RV5eXmRtbU1mZiYkLu7Oy1cuLDISzuKcvPmTUVsUVFRSstycnLo008/JU9PT7KwsCAzMzPy9PSklStXqtzugQMH6MMPPyR3d3cyNzcnIyMjatCgAU2aNImSk5OV2v7111/UsWNHMjExIQBKu2qTk5MpODiYXFxcyNDQkBwdHcnX15fWrFmjaFOwe/b333+nGTNmkIODA5mYmFDPnj3p/v37auWBVV8SoiL2iTDGWBUVGRmJLl26YNu2bRg4cKCuw2GVDB/TZIwxxtTERZMxxhhTExdNxhhjTE18TJMxxhhTE/c0GWOMMTVx0WSMMcbUVO0GN5DL5Xjw4AEsLCwgkUh0HQ5jjDEdISJkZGTA2dlZaaAOVSvpzKJFi6hNmzZkbm5O9vb21LdvX/rrr79KXCc3N5cWLFhA9erVI6lUSh4eHoXu5FCSxMRElXdD4IknnnjiqfpMiYmJatcQnfY0T5w4geDgYLRt2xZ5eXmYOXMmAgICEBcXBzMzsyLXmT17NjZt2oSff/4Z7u7uOHToEAIDA3HmzBm0bNlS5XsWDDmWmJgIS0vLEtvKZDIcPnwYAQEBMDQ0LP0HrAY4R6pxjtTDeVKNc6QedfOUnp4OFxcXpfuqqqLTonnw4EGl1yEhIXBwcEBMTIzSQNCv2rhxI2bNmoUePXoAAMaPH48jR47g+++/x6ZNm1S+Z8EuWUtLS7WKpqmpKSwtLfkLWgzOkWqcI/VwnlTjHKmntHkqzaG6CnVMs+AOCba2tsW2ycnJgbGxsdI8ExMTREVFFdv+1TsspKenAxBJlclkJcZTsFxVu+qMc6Qa50g9nCfVOEfqUTdPZcljhblOUy6Xo0+fPkhNTS22AALAsGHDcOXKFezatQv169fH0aNH0bdvX+Tn5xd5+6H58+djwYIFheZv3rwZpqamGv0MjDHGKo+srCwMGzYMaWlpKvc8FqgwRXP8+PE4cOAAoqKiULt27WLbPX78GKNHj8aePXsgkUhQv359+Pn5Yf369Xjx4kWh9kX1NF1cXJCSkqLW7tmIiAj4+/srd/FfvIDk3DkgJQU0aFDpP2wVUmyOmALnSD2cJ9U4R+pRN0/p6emws7MrVdGsELtnJ06ciL179+LkyZMlFkwAsLe3x65du5CdnY0nT57A2dkZ06dPV7on4qukUimkUmmh+YaGhmp/6Qq1PX8e6NoVcHAAhg4F+NKVUuWzuuIcqaei5Sk/P7/C7A7Nz8+HgYEB8vPz1b9EohoqyJNEIlE8FqUs3zOdFk0iwqRJkxAWFobIyEjUrVtX7XWNjY1Rq1YtyGQy7NixA++99145RvqaNm0AqRR49Ai4dQto2FB7780Y05rnz5/j77//RgXZIQcigqOjIxITE/k68xIU5Onu3bswMzODk5MTjIyMNLJtnRbN4OBgbN68Gbt374aFhQWSkpIAAFZWVjAxMQEAjBgxArVq1cLixYsBANHR0fjnn3/w1ltv4Z9//sH8+fMhl8vx2WefaS9wqRRo2xaIihITF03Gqpz8/Hz8/fffMDU1hb29fYUoUnK5HM+fP4e5uTn3NEtQkCcjIyOkpKTg7t27aNiwoUZyptOiuWrVKgBA586dleZv2LABQUFBAICEhASlD5qdnY3Zs2fjzp07MDc3R48ePbBx40ZYW1trKep/dejwsmiOGqXd92aMlTuZTAYigr29veJHvK7J5XLk5ubC2NiYi2YJCvJkaWkJIyMj3L9/X5G3N6Xz3bOqREZGKr3u1KkT4uLiyimiUujQQTyWcKYvY6zyqwg9TFZ2mv5xwT9VyqpdO/F444Y4tskYY6zK46JZVjY2QPPm4vnp07qNhTHGmFZw0XwTvIuWMVYN3bt3DxKJBLGxsboOReu4aL4JLpqMsQomKCgIEomk0NStWzetxtG5c2dMnTpVq++pDRVicINK6513xOOlS0BmJlDMnVkYY0ybunXrhg0bNijNK2qQF1Z63NN8E3XqAC4uQF6eGCWIMVZ1EYkfx7qYSjm4glQqhaOjo9JkY2MDQIzfPXjwYKX2MpkMdnZ2+PXXXwGIO1B16NAB1tbWqFGjBnr16oXbt29rJo//2rFjB5o1awapVAo3Nzd8//33SstXrlyJhg0bwtjYGDVr1sTAgQMVy7Zv344WLVrAxMQENWrUgJ+fHzIzMzUaX3G4p/mmOnQAfv9d7KLt0kXX0TDGyktWFmBurpv3fv5cY3uyhg8fjkGDBikGSQCAQ4cOISsrC4GBgQCAzMxM/N///R88PDzw/PlzzJ07F4GBgYiNjdXIJRwxMTF47733MH/+fAwePBhnzpzBhAkTUKNGDQQFBeHixYuYPHkyNm7ciHbt2uHp06c4deoUAODhw4cYOnQovv32WwQGBiIjIwOnTp3S2qhNXDTfVEHR/PcflDHGdG3v3r2Kglhg5syZmDlzJrp27QozMzOEhYXhgw8+ACDu+tSnTx/FzZgHDBigtO769ethb2+PuLg4NC+4auANLF26FL6+vpgzZw4AoFGjRoiLi8OSJUsQFBSEhIQEmJmZoVevXrCwsICrqytatmwJQBTNvLw89O/fH66urgCAFi1avHFM6uKi+aYKTgY6e1bspjXglDJWJZmaih6frt67FLp06aIYca1AwX2KDQwM8N577+G3337DBx98gMzMTOzevRtbtmxRtL158ybmzp2L6OhopKSkQC6XAxAjtGmiaF6/fh19+/ZVmte+fXssW7YM+fn58Pf3h6urK+rVq4du3bqhW7duCAwMhKmpKTw9PeHr64sWLVqga9euCAgIwMCBAxW7n8sb/4V/U82aAVZWQFoacPUq0KqVriNijJUHiaTSnOxnZmaGBg0aFLt8+PDh6NSpEx49eoSIiAiYmJgonV3bu3dvuLq64ueff4azszPkcjmaN2+O3NxcbYQPCwsLXLp0CZGRkTh8+DDmzp2L+fPn48KFC7C2tkZERATOnDmDw4cPY/ny5Zg1axaio6NLddOPsuITgd6Uvv7L0YH40hPGWCXQrl07uLi4IDQ0FL/99hsGDRqkuE3WkydPEB8fj9mzZ8PX1xdNmjTBs2fPNPr+TZo0wenXBoU5ffo0GjVqBH19fQCiR+zn54dvv/0WV69exb1793Ds2DEAYmjD9u3bY8GCBbh8+TKMjIwQFham0RiLwz1NTejQAThwQBTNyZN1HQ1jrJrLyclR3DWqgIGBAezs7BSvhw0bhp9++gk3btzA8ePHFfNtbGxQo0YNrFmzBk5OTkhISMD06dPLFMfjx48LDYDg5OSETz75BG3btsWXX36JwYMH4+zZs1ixYgVWrlwJQByTvXPnDjp27AgbGxvs378fcrkcjRs3RnR0NI4ePYqAgAA4ODggOjoajx8/RpMmTcoUY6lRNZOWlkYAKC0tTWXb3Nxc2rVrF+Xm5pbc8MQJIoDIyYlILtdQpJWD2jmqxjhH6qloeXrx4gXFxcXRixcvdB2KQn5+Pj179ozy8/OLbTNy5EgCUGhq3LixUru4uDgCQK6uriR/7e9WREQENWnShKRSKXl4eFBkZCQBoLCwMCIiunv3LgGgy5cvFxtHp06diozjyy+/JCKi7du3U9OmTcnQ0JDq1KlDS5YsUax76tQp6tSpE9nY2JCJiQl5eHhQaGioIu6uXbuSvb09SaVSatSoES1fvrzYPJX071iaelCAe5qa0LYtYGgIPHwI3L0L1Kun64gYY9VUSEgIQkJCVLZr0qRJsZdp+Pn5Fbqb1Ktt3dzcVF7i8fodql43YMCAQmfpFujQoUOx6zdp0gQHDx4scdvliY9paoKJCdCmjXjOxzUZY6zK4qKpKQVD6nHRZIyxKouLpqbw4O2MMVblcdHUlILLTq5fB1JSdBsLY4yxcsFFU1Nq1ACaNhXPz5zRbSyMMY1RdcILq9g0/e/HRVOTCnbR8ji0jFV6BRfZa2sUHFY+srKyAEAxeMOb4ktONKlDB2DNGj6uyVgVYGBgAFNTUzx+/BiGhoYaubvHm5LL5cjNzUV2dnaFiKeiksvlyMnJwZMnT5CSkgJra2vFj6A3xUVTkwp6mjEx4jZCpRxkmTFWcUgkEjg5OeHu3bu4f/++rsMBIHY1vnjxAiYmJpBIJLoOp8J6NU82NjZwdHTU2La5aGqSmxvg7Aw8eABcuAB06qTriBhjb8DIyAgNGzasMLtoZTIZTp48iY4dO2psd2NVVJAnX19fGBsba3TbXDQ1SSIRvc2tW8UuWi6ajFV6enp6Gv/DW1b6+vrIy8uDsbExF80SFORJU7tkX8U7xTWNr9dkjLEqi4umphUUzTNngPx83cbCGGNMo7hoapqHB2BhAaSni5tSM8YYqzK4aGqavv7LY5n79uk2FsYYYxrFRbM89O8vHnfu1G0cjDHGNIqLZnno3RvQ0wMuXxb312SMMVYlcNEsD3Z2L3fRhoXpNhbGGGMao9OiuXjxYrRt2xYWFhZwcHBAv379EB8fr3K9ZcuWoXHjxjAxMYGLiwv+85//IDs7WwsRlwLvomWMsSpHp0XzxIkTCA4Oxrlz5xAREQGZTIaAgABkZmYWu87mzZsxffp0zJs3D9evX8e6desQGhqKmTNnajFyNfTrJx7PnAEePtRpKIwxxjRDpyMCHTx4UOl1SEgIHBwcEBMTg44dOxa5zpkzZ9C+fXsMGzYMAODm5oahQ4ciOjq63OMtldq1AW9vIDoa2L0bGDdO1xExxhh7QxVqGL20tDQAgK2tbbFt2rVrh02bNuH8+fPw8vLCnTt3sH//fnzwwQdFts/JyUFOTo7idXp6OgAxNqFMJisxnoLlqtoVR69vX+hHR0O+YwfyP/qoTNuo6N40R9UB50g9nCfVOEfqUTdPZcmjhCrIHVblcjn69OmD1NRURKkYgu7HH3/EtGnTQETIy8vDuHHjsGrVqiLbzp8/HwsWLCg0f/PmzTAt57uQmD14AL8JEyDX18fBX36BzNy8XN+PMcaY+rKysjBs2DCkpaXB0tJSrXUqTNEcP348Dhw4gKioKNSuXbvYdpGRkRgyZAi++uoreHt749atW5gyZQpGjx6NOXPmFGpfVE/TxcUFKSkpKpMkk8kQEREBf3//Mg+ObNCqFSTXriFv/XrQ+++XaRsVmSZyVNVxjtTDeVKNc6QedfOUnp4OOzu7UhXNCrF7duLEidi7dy9OnjxZYsEEgDlz5uCDDz7Axx9/DABo0aIFMjMzMWbMGMyaNavQjVmlUimkUmmh7RgaGqr9pStN20IGDACuXYNBeDgwalTZtlEJvFGOqgnOkXo4T6pxjtSjKk9lyaFOz54lIkycOBFhYWE4duwY6tatq3KdrKysQoWx4PYvFaTTrKzg0pODB4ESzgpmjDFW8em0aAYHB2PTpk3YvHkzLCwskJSUhKSkJLx48ULRZsSIEZgxY4bide/evbFq1Sps2bIFd+/eRUREBObMmYPevXuXy73T3liLFkD9+kB2tiicjDHGKi2d7p4tOHmnc+fOSvM3bNiAoKAgAEBCQoJSz3L27NmQSCSYPXs2/vnnH9jb26N3795YuHChtsIuHYkECAwEvvtODHQwYICuI2KMMVZGOi2a6uxOjYyMVHptYGCAefPmYd68eeUUVTno318Uzb17gZwcoIhjrIwxxio+HntWG7y9AScncY/NY8d0HQ1jjLEy4qKpDXp6YhctwGPRMsZYJcZFU1sKzqLdvRvIz9dtLIwxxsqEi6a2dOwI2NoCjx8Dp0/rOhrGGGNlwEVTWwwNgT59xHPeRcsYY5USF01tevUemxVxIAbGGGMl4qKpTf7+gJkZkJgIxMToOhrGGGOlxEVTm4yNgZ49xfPQUN3GwhhjrNS4aGrb8OHi8eefgX/vH8oYY6xy4KKpbb16AU2bioK5cqWuo2GMMVYKXDS1TU8PKBiA/r//BbKydBsPY4wxtXHR1IUhQ4C6dcU1m2vX6joaxhhjaipT0UxMTMTff/+teH3+/HlMnToVa9as0VhgVZqBAfD55+L5kiVAbq5u42GMMaaWMhXNYcOG4fjx4wCApKQk+Pv74/z585g1axa++OILjQZYZQUFiUHc//4b2LhR19EwxhhTQ5mK5rVr1+Dl5QUA2Lp1K5o3b44zZ87gt99+Q0hIiCbjq7qkUmDaNPH866+BvDzdxsMYY0ylMhVNmUwG6b/3hDxy5Aj6/Ds8nLu7Ox4+fKi56Kq6MWOAGjWAW7eA7dt1HQ1jjDEVylQ0mzVrhp9++gmnTp1CREQEunXrBgB48OABatSoodEAqzRzc2DKFPF80SJALtdtPIwxxkpUpqL5zTffYPXq1ejcuTOGDh0KT09PAEB4eLhity1T08SJgIUF8McfwL59uo6GMcZYCQzKslLnzp2RkpKC9PR02NjYKOaPGTMGpqamGguuWrCxASZMAL75Bli4UAx+IJHoOirGGGNFKFNP88WLF8jJyVEUzPv372PZsmWIj4+Hg4ODRgOsFv7zHzEubXQ08O9ZyYwxxiqeMhXNvn374tdffwUApKamwtvbG99//z369euHVatWaTTAaqFmTeDjj8XzhQt1GwtjjLFilaloXrp0Ce+88w4AYPv27ahZsybu37+PX3/9FT/++KNGA6w2Pv1UDHpw7Bhw7pyuo2GMMVaEMhXNrKwsWFhYAAAOHz6M/v37Q09PD2+//Tbu37+v0QCrjTp1gA8+EM9nzADS03UbD2OMsULKVDQbNGiAXbt2ITExEYcOHUJAQAAA4NGjR7C0tNRogNXK9OmAoSEQGQk0bw4cPKjriBhjjL2iTEVz7ty5mDZtGtzc3ODl5QUfHx8AotfZsmVLjQZYrTRqBBw+DNSrByQmAt27A6NGAc+e6ToyxhhjKGPRHDhwIBISEnDx4kUcOnRIMd/X1xf//e9/NRZctdS5M3D1KjB1qrj0JCRE3H9z924dB8YYY6zMtwZzdHREy5Yt8eDBA8UdT7y8vODu7q6x4KotMzNxr82oKKBxYyApCejXDxg6VNxOjDHGmE6UqWjK5XJ88cUXsLKygqurK1xdXWFtbY0vv/wSch4KTnPatQNiY8WxTn19YMsWoFkzIC5O15Exxli1VKaiOWvWLKxYsQJff/01Ll++jMuXL2PRokVYvnw55syZo+kYqzdjY2DxYnEZSrNmoqfZrx+QlqbryBhjrNop0zB6v/zyC9auXau4uwkAeHh4oFatWpgwYQIW8gX6mtemjRgtqHVr4OZN4P33xXFOvTLvYWeMMVZKZfqL+/Tp0yKPXbq7u+Pp06dvHBQrhr09EBYm7sW5dy/AN/xmjDGtKlPR9PT0xIoVKwrNX7FiBTw8PNTezuLFi9G2bVtYWFjAwcEB/fr1Q3x8fInrdO7cGRKJpNDUs2fPUn+OSql1a2D1avF8wQIgPFy38TDGWDVSpt2z3377LXr27IkjR44ortE8e/YsEhMTsX//frW3c+LECQQHB6Nt27bIy8vDzJkzERAQgLi4OJiZmRW5zs6dO5Gbm6t4/eTJE3h6emLQoEFl+SiV08iRwMWLwIoVYhSh8+fFWbaMMcbKVZl6mp06dcKNGzcQGBiI1NRUpKamon///vjzzz+xceNGtbdz8OBBBAUFoVmzZvD09ERISAgSEhIQExNT7Dq2trZwdHRUTBERETA1Na1eRRMAli4F3nlHDLcXGAhkZOg6IsYYq/LK1NMEAGdn50In/Fy5cgXr1q3DmjVryrTNtH/PCLW1tVV7nXXr1mHIkCHF9kxzcnKQk5OjeJ3+75iuMpkMMpmsxG0XLFfVTmc2b4bB229Dcv065B98gPzQUK2fGFThc1QBcI7Uw3lSjXOkHnXzVJY8SoiIyhRVEa5cuYJWrVohPz+/1OvK5XL06dMHqampiIqKUmud8+fPw9vbG9HR0fDy8iqyzfz587FgwYJC8zdv3lwlbphtc+MG2s+cCf28PMQNH46b1a3HzRhjZZSVlYVhw4YhLS1N7XHTK0zRHD9+PA4cOICoqCjUrl1brXXGjh2Ls2fP4urVq8W2Kaqn6eLigpSUFJVJkslkiIiIgL+/PwwNDdX7IDogWb8eBuPGgSQS5IeHg7p21dp7V5Yc6RLnSD2cJ9U4R+pRN0/p6emws7MrVdEs8+5ZTZo4cSL27t2LkydPql0wMzMzsWXLFnyh4rILqVQKqVRaaL6hoaHaX7rStNWJsWOBS5cgWbMGBqNHA3/+CZRiF7cmVPgcVQCcI/VwnlTjHKlHVZ7KksNSFc3+/fuXuDw1NbVUb05EmDRpEsLCwhAZGYm6deuqve62bduQk5OD999/v1TvWWUtWwacOAHEx4vB3n/9VdcRMcZYlVOqomllZaVy+YgRI9TeXnBwMDZv3ozdu3fDwsICSUlJiu2YmJgAAEaMGIFatWph8eLFSuuuW7cO/fr1Q40aNUrzEaouExNxR5T27YGNG4FBg4DevXUdFWOMVSmlKpobNmzQ6JuvWrUKgBiw4PX3CQoKAgAkJCRA77UzQuPj4xEVFYXDhw9rNJ5K7+23gU8+AZYsAcaM0cluWsYYq8p0ekxTnXOQIiMjC81r3LixWutWS198AezZA/z1FzBliuh1MsYY0wge7buqMTYWu2n19IBNm3iYPcYY0yAumlWRtzcwbZp4PnYswIPoM8aYRnDRrKoWLACaNAGSkoDJk3UdDWOMVQlcNKsqY2Ngwwaxm/a338S9NxljjL0RLppV2au7aceN4920jDH2hrhoVnWv7qYdOxYowxCHjDHGBC6aVV3B2bT6+sD27cCAAUBWlq6jYoyxSomLZnXg5QWEhgJSqTi26esLpKToOirGGKt0uGhWFwMGAEeOADY2wLlzQLt2wO3buo6KMcYqFS6a1UmHDsCZM4CrK3DzpiicFy7oOirGGKs0uGhWN+7uwNmzQMuWwKNHQOfOwL59uo6KMcYqhQpxP02mZU5O4jZigwYBhw4BffuKQd69vABDQzEZGLx8bm4O1Kyp66gZY0znuGhWVxYWYmD3MWPE2bX/938ltx8xAli7VhRRxhirprhoVmeGhsD69eI6zl9/BXJyAJkMyMsTjwVTZqZYnpEBbNkCGBnpOnLGGNMJPqZZ3UkkwGefAdeuiZOD7t0D/v4bSE4WIwhlZIjLVIyMgLAwIDAQyM7WddSMMaYTXDSZar17i125JibA/v1Ar16i98kYY9UMF02mnoAA4MABwMwMOHoU6N4dSE/XdVSMMaZVXDSZ+jp1AiIiAEtL4NQpUUhTU4tvn5rKg8QzxqoULpqsdHx8gGPHAFtbIDoaBgEBsLx7F5IDB4ClS8Wg8J07A46OYvQhOztxp5WcHF1Hzhhjb4zPnmWl17o1cPw44OcHSWwsusTGFt+WCPj+e9FD3bQJaNFCa2EyxpimcU+TlY2HB3DyJKhJE+QbGYGaNxeDJcyeLYrjhQtAWpo489beHrh6FWjTRvRG5XJdR88YY2XCPU1Wdu7uyLtyBfv37kWPXr1gWNTAB336AH/8AXz8MbB3L/DJJ2LYvpAQwMVF6yEzxtib4J4me3N6Kr5GNWsC4eHA6tWAqak4JurhAfz+u3biY4wxDeGiybRDIhFD9l2+LMa4TU0Fhg0D5s3TdWSMMaY2LppMuxo1AqKigDlzxOsvvxSDxzPGWCXARZNpn6Eh8MUXwIcfirNrR4wQJw0xxlgFx0WT6c6yZUC9ekBCAjBxoq6jYYwxlbhoMt2xsBCXp+jpicfQUF1HxBhjJeKiyXTLxweYOVM8HzdO3GGFMcYqKC6aTPfmzhUDH6SmAkFB1W/wgydPxLFdxliFx0WT6Z6hodg9a2Ii7qDy44+6jkh7jh0TIyZ17w5kZek6GsaYCjotmosXL0bbtm1hYWEBBwcH9OvXD/Hx8SrXS01NRXBwMJycnCCVStGoUSPs379fCxGzctO4sRhiDwCmTxc3xX5dZiawa5c467Z7d2DbtsrfK128WPQyDx0SnykjQ9cRMcZKoNOieeLECQQHB+PcuXOIiIiATCZDQEAAMku4wXFubi78/f1x7949bN++HfHx8fj5559Rq1YtLUbOysXYsUDPnuKOKMOHi8fkZGDtWjEcn50dEBgIbNgAHDwIvPce8NZbwM6dlbN4xsUBR46IE6EsLYGTJ1Xfbo0xplM6HXv24MGDSq9DQkLg4OCAmJgYdOzYsch11q9fj6dPn+LMmTOKsU7d3NzKO1SmDRKJKJAtWogB3t3dgfv3lY/3uboCffsC5ubAihViXNsBAwBPT2D+fLFMIin9e9+7B+zYIR7nzhW7TMvb8uXisW9fcTJUQABw7hzg6wscPgzUqFH+MTDGSqVCDdie9u8F7ra2tsW2CQ8Ph4+PD4KDg7F7927Y29tj2LBh+Pzzz6Gvr1+ofU5ODnJeuZdjeno6AEAmk0Emk5UYT8FyVe2qM43nqEYNSH76CQYDB4oCBkDeujWoVy/Ie/cWBbWgKE6eDL0ffoDeihWQXLkCBAaCWrZE/pw5oIAAwMio5Pe6fRt6O3dCsnMn9GJiFLPp5EnkHT4s7hmqAUXm6NkzGPz6KyQA8iZMAHl6AocPw6BHD0guXQJ17oy8gwcBBweNxFAZ8P831ThH6lE3T2XJo4SoYpy2J5fL0adPH6SmpiIqKqrYdu7u7rh37x6GDx+OCRMm4NatW5gwYQImT56MeUWMYzp//nwsWLCg0PzNmzfD1NRUo5+BaY7TmTMwyshAcps2yFbR4zJMT0eD3btRb98+GGRnAwBIIkG2jQ1e2NvjhZ0dsuztkW1nhxd2djBPTITz2bOwvnNHsQ3S00NK06aw+PtvGKem4lmDBjizYAHyzMzK5fPV37ULzUNCkObmhsj//lfxQ8AiMRHt5s6F8bNnyKhdG2e++ALZGirelVm98HCYPnqEGwMHItfaWtfhVDkW9+7B+s4dZNvaKv7P5Eulug6r3GVlZWHYsGFIS0uDpaWlWutUmKI5fvx4HDhwAFFRUahdu3ax7Ro1aoTs7GzcvXtX0bNcunQplixZgocPHxZqX1RP08XFBSkpKSqTJJPJEBERAX9//6Jve8UqVo4eP4be0qXQ+/lnSP7do1AS0tcHde4M6t8f8j59xN1Y/vwTBv7+kKSkQP7228jft08MwvAGCuUoPx8G7u6Q3L+PvNWrQaNGKa9w8yYMunaF5O+/QfXrI+/QIaBOnTeKoTIo9rt0+TIMvb0BAFSjBvKXLgUNGVK23fCVXIn/3x48gN6WLZAHBgJ166q3wZwc6C1YAL2lSyF57bwAqlEDcHEB1a4NcnMDdekC8vUVdyqq4NT9u5Seng47O7tSFc0KsXt24sSJ2Lt3L06ePFliwQQAJycnGBoaKu2KbdKkCZKSkpCbmwuj13bJSaVSSIv4xWRoaKj2H/nStK2uKkSOnJ2B774DliwBHj8Ww/MVNdnZAf37Q9KvHyR2dgAAxbfprbfEyTldukDv3DnoBQYCBw5o5A+FIkf79oljtba2MPjgA3HJzauaNhUnBfn6QnL7NgwbNhTHWGvVEp/x1cc6dcQ1rlWoN1rou/Tdd+LRyAiSJ09gMHIksH07sGqVyEFFI5cD33wDPHggHsuhyBTKUVqaOPv6+nXoz50LTJ4sjpPb2BS/kUuXgJEjX56p7u0tzt5OSACeP4fkyRPgyRNIYmPF8hUrAGNjwM8P6N0b6NVLfA+Lkp8PJCYCd+4AeXk6+46q+rtUlr9ZOi2aRIRJkyYhLCwMkZGRqKvGr6P27dtj8+bNkMvl0Pv3Po43btyAk5NToYLJqimJRBwLdHAQ/1lLy9NTXALi5yeKV9++wJ494g+GJvzwg3gcM0Zcm1qUunXFe/fqBVy5Ajx6JKbLl4tu36iRGF2pYGrWDCjiGH+lEx8vCiQgTpLau1fcGWfPHpGf778XlyBVlF5nXp644fovv4jXN24Au3dr7rtTlPx8cZu969cBqVScdf7dd8D69eLWe+PGKR/fl8mAhQvFlJcn/p+sXg306yeWE4kinJAgCl9ioiis+/aJ8wz27hUTIP5/9e4tfhjcuSOm27dFu7w85TgbN1b+jjZtWim/ozotmsHBwdi8eTN2794NCwsLJCUlAQCsrKxg8u8fkxEjRqBWrVpYvHgxALEbd8WKFZgyZQomTZqEmzdvYtGiRZg8ebLOPgergtq2FT3MgADR8xwwQFza8qbHea5eBSIjxR+LCRNKblu7tiiSjx+LXss//xR+vHlTTDduiKngj7W5ubhv6dy5QKdObxazLn3zjfgj3qcP0LKlmPr3F4Xy/HlRoLZsAX7+GdD1WfQyGfD++8DWreLfVyoVZ0EPGiTOzC6vH/UzZgD794sfYKdOiR9X06aJS5qmTBFnaX/7rSiK166J3mXBj6+BA4GVK5XPFpdIAGtrMXl4vJy/fLlYf88ecVP58+eBixfFVBQjI/HjLz8fuHVL/ACKjwdCQsRyCwugVSvxqK8PGBiIx1efF9zgvuBHkUSi/HzGDO3/u5MOAShy2rBhg6JNp06daOTIkUrrnTlzhry9vUkqlVK9evVo4cKFlJeXp9Z7pqWlEQBKS0tT2TY3N5d27dpFubm5pflY1UqVz1FkJJGJCRFA1K8fURk+p1KOPv5YbGvQIM3FmJJCtG8f0ezZRL6+RBYW4j0A8fzWLc29Vzkq9F26f5/IwEB8jrNnlRvn5RF99x2RsbFYbmoqcnvyJJFcrv3gX7wg6t1bxGJoSLRjB9Hx4y/j69+/TN+d1xXK0a+/vvy33rLlZUOZjGj1aiIHh5fLW7UiMjISz21tiX7//c1y9fAh0dq14rs8eDDRzJlE69aJz52QQJSf/7Lt48dEe/cSzZpF9O67RObmL+N6k+nCBfXyVIzS1IMCOt89q0pkZGSheT4+Pjh37lw5RMTYazp1Er+qe/USoxH5+Ih7gXbvXvpdgk+eiOECAXHMSVNq1AB69BATIH7Zx8UB48cDp08DQ4aIx8p2+OK778QuvnffBd5+W3mZvj7wySdi1/nHH4sbma9dK6a6dYEPPhBTgwblH2dmpujFHTkidsPu3Cm+H4DYNdu7t5g3YoT499fULsnoaGD0aPF89mxg8OCXywwMxO7/oUNFb/3778UxTEB8l9esAZyc3uz9HR2Bjz4Skyp2dmLgkp49xev8fNFrvXoVyM0V/875+S8fC54XlMcCBc8LHos7plqe1C6vVQT3NDWr2uRo3z4iM7OXv3C9vYkOHlTrl3pBjvK++urlL35t9IYSEkSPAiD6z3/K//3ekNJ3KTn5ZQ//yJGSV5TLRe/mww+Ve9kAUbt2RKtWEV2/TpSdrToIuZzo5k3Ra/v0U6IhQ4i+/57oypWi/83S0og6dBDvZWYm4njdnj2i9wkQjRyp3AMrJUWO7twhcnR8uQdE1TYTE8Xn+e033fTEtaw8e5pcNEtQbQrCG6hWOUpOJpo27eUfc4DIx4fo8OES/xDl5ubS7h07SF67tlgnJER7MYeHv4w1PFx771sGSt+lmTNFzF5epfsjn5lJtHkzUbduRHp6ygVUIiGqU4eoSxexK/frr4m2bRO7KadNE/OtrIrfFVizJtHw4UQbNogi9OQJUZs2YpmVVeFdyK/asYNIX1+0HT26zIUrNzeXwkNDKb9VK7GtFi2IMjLKtK2qjIumBnHR1KxqmaOHD0XPreB4FSB6G3v3Ftmbyc3NpejPPhPt7O3F8S9tmjr15XGshISybUMuFz8aTp4URf/YMaLnzzUapuK79PgxkaWliDksrOwbfPBAHPf08irdMTSplKhtW6Jx44i++oqoe3dxzPT1dgXbtLMjunRJdTy///6ykE+cKI7LllJuTg4lvvPOy/e9c6cMian6quwxTcYqJUdHcUeWTz8Vx4t++gmIihLHiiwsgG7dxLG2Hj0U18nV37NHrDtuXPleflCUr78WZ1XGxIhLE44fF8e8ipORIY7P/fWXONux4PH1geQNDMR1re3bv5w0cIxJ76efgPR0cUlCnz5l35CTkzju+cknosw9fizO4rx9W0y3bolJIhFn5bZuLaZmzQpfO5uTA5w9K/ISESHOGH3+XLzHkSMiVlWGDBHH74KCxDWPmzeLy5q6dhVnaRd3jTqRuPwjJgb6u3ej9qlTIAMDSLZvV38QA6YxFWZEIG1JT0+HlZWVWiNAyGQy7N+/Hz169ND9hfsVFOcI4tKP774DQkOBV0elMjAAOnVCvpcX9BcvFn/o7t/XzckLt2+LwpCRIU4a+fLLwm2ePhX3Mv3hh6LvtCKRiMEU6tUTl7n8/XfhNm5uopA6OooRll6fnJ2LvdhfJpPhUFgYek6cCMnjx8DGjeISjoro2TNx3ehbb5X+hJqQEOA//ymc46ZNRQH19xcnF8XEiJN3YmLESWSvyFu5Egbjx7/JJ6jS1P27VJp6oPBmneDKh3fPahbn6BX5+UTR0eJ4XLNmhXbn5Q8erNv4tmx5eWzv1ZNrkpKIPvtMeRdmvXri+N2XXxJt3SpOhMnKUt7e/fvi+OGECUSenoWPIRY16esTjRpV5G7F3NxcujJ6tGjn5iYum6iqZDKi06eJ5s4VJ5Wpyp2BAVHLlpQ/ahRFT5/O/99U4N2zjFUGenpiQAEvLzHayq1bwO7dkO/ahRc3b8Jo+nTd3sB28GDg6FExEMDw4WKEl19+Ea//Hegenp7ArFliAAFVl0bUqSOmoUPF6/R00fu6dUvcB7WoKTNT3A9140YxQMGsWS/H1c3NRYOwMPH8889L3oVc2RkYAO3aiWnBAtHLP3JEDIYQGQlYWb3cXdy6tbi7j1SKfJkMD/fvR0tdx1+NVeFvJWM61qAB8MknyJ88GUf270ePZs10HRGwbJk4NnftmvIQg97eYrdtz55lH5LO0lIcmwsIKL5NdLQYpejwYXGtYEiIuNZwxgxIDh6EaUoKyNERkqCgssVQWdnaipuqv/eeriNhKuj0hy9jTMtMTcWx14Ljip07ix7O2bPiRKbyHsPV21uM63vqFNClizgx5n//A+rXh/7nnwMA5FOmaP9kKcbUxD1Nxqqbpk3FIPDPn4sTWXShQwfg2DFxJu/cuUBUFCQ5Ocg1M4NkzBhoaMwcxjSOiyZj1ZE2hpdTR5cu4m4lR45A/vPPiK1fHy3f8P6ljJUn3j3LGNMtiQTw90f+b7/h4etjzDJWwXDRZIwxxtTERZMxxhhTExdNxhhjTE1cNBljjDE1VbuzZ+nfoXbT09NVtpXJZMjKykJ6enr1HVdVBc6Rapwj9XCeVOMcqUfdPBXUASrFEOzVrmhmZGQAAFxcXHQcCWOMsYogIyMDVlZWarWtdnc5kcvlePDgASwsLCBRMfpJeno6XFxckJiYqP4I+NUM50g1zpF6OE+qcY7Uo26eiAgZGRlwdnaGnp56RyurXU9TT08PtYu7b10xLC0t+QuqAudINc6RejhPqnGO1KNOntTtYRbgE4EYY4wxNXHRZIwxxtTERbMEUqkU8+bNg1Qq1XUoFRbnSDXOkXo4T6pxjtRTnnmqdicCMcYYY2XFPU3GGGNMTVw0GWOMMTVx0WSMMcbUxEWTMcYYUxMXzWL873//g5ubG4yNjeHt7Y3z58/rOiStOXnyJHr37g1nZ2dIJBLs2rVLaTkRYe7cuXBycoKJiQn8/Pxw8+ZNpTZPnz7F8OHDYWlpCWtra3z00Ud4/vy5Fj9F+Vq8eDHatm0LCwsLODg4oF+/foiPj1dqk52djeDgYNSoUQPm5uYYMGAAkpOTldokJCSgZ8+eMDU1hYODAz799FPk5eVp86OUq1WrVsHDw0NxkbmPjw8OHDigWM45Kuzrr7+GRCLB1KlTFfOqe57mz58PiUSiNLm7uyuWazU/xArZsmULGRkZ0fr16+nPP/+k0aNHk7W1NSUnJ+s6NK3Yv38/zZo1i3bu3EkAKCwsTGn5119/TVZWVrRr1y66cuUK9enTh+rWrUsvXrxQtOnWrRt5enrSuXPn6NSpU9SgQQMaOnSolj9J+enatStt2LCBrl27RrGxsdSjRw+qU6cOPX/+XNFm3Lhx5OLiQkePHqWLFy/S22+/Te3atVMsz8vLo+bNm5Ofnx9dvnyZ9u/fT3Z2djRjxgxdfKRyER4eTvv27aMbN25QfHw8zZw5kwwNDenatWtExDl63fnz58nNzY08PDxoypQpivnVPU/z5s2jZs2a0cOHDxXT48ePFcu1mR8umkXw8vKi4OBgxev8/HxydnamxYsX6zAq3Xi9aMrlcnJ0dKQlS5Yo5qWmppJUKqXff/+diIji4uIIAF24cEHR5sCBAySRSOiff/7RWuza9OjRIwJAJ06cICKRE0NDQ9q2bZuizfXr1wkAnT17lojEjxM9PT1KSkpStFm1ahVZWlpSTk6Odj+AFtnY2NDatWs5R6/JyMighg0bUkREBHXq1ElRNDlPomh6enoWuUzb+eHds6/Jzc1FTEwM/Pz8FPP09PTg5+eHs2fP6jCyiuHu3btISkpSyo+VlRW8vb0V+Tl79iysra3Rpk0bRRs/Pz/o6ekhOjpa6zFrQ1paGgDA1tYWABATEwOZTKaUJ3d3d9SpU0cpTy1atEDNmjUVbbp27Yr09HT8+eefWoxeO/Lz87FlyxZkZmbCx8eHc/Sa4OBg9OzZUykfAH+XCty8eRPOzs6oV68ehg8fjoSEBADaz0+1G7BdlZSUFOTn5yslFwBq1qyJv/76S0dRVRxJSUkAUGR+CpYlJSXBwcFBabmBgQFsbW0VbaoSuVyOqVOnon379mjevDkAkQMjIyNYW1srtX09T0XlsWBZVfHHH3/Ax8cH2dnZMDc3R1hYGJo2bYrY2FjO0b+2bNmCS5cu4cKFC4WW8XcJ8Pb2RkhICBo3boyHDx9iwYIFeOedd3Dt2jWt54eLJmNvKDg4GNeuXUNUVJSuQ6mQGjdujNjYWKSlpWH79u0YOXIkTpw4oeuwKozExERMmTIFERERMDY21nU4FVL37t0Vzz08PODt7Q1XV1ds3boVJiYmWo2Fd8++xs7ODvr6+oXOvEpOToajo6OOoqo4CnJQUn4cHR3x6NEjpeV5eXl4+vRplcvhxIkTsXfvXhw/flzplnOOjo7Izc1FamqqUvvX81RUHguWVRVGRkZo0KABWrdujcWLF8PT0xM//PAD5+hfMTExePToEVq1agUDAwMYGBjgxIkT+PHHH2FgYICaNWtynl5jbW2NRo0a4datW1r/HnHRfI2RkRFat26No0ePKubJ5XIcPXoUPj4+OoysYqhbty4cHR2V8pOeno7o6GhFfnx8fJCamoqYmBhFm2PHjkEul8Pb21vrMZcHIsLEiRMRFhaGY8eOoW7dukrLW7duDUNDQ6U8xcfHIyEhQSlPf/zxh9IPjIiICFhaWqJp06ba+SA6IJfLkZOTwzn6l6+vL/744w/ExsYqpjZt2mD48OGK55wnZc+fP8ft27fh5OSk/e9RqU9jqga2bNlCUqmUQkJCKC4ujsaMGUPW1tZKZ15VZRkZGXT58mW6fPkyAaClS5fS5cuX6f79+0QkLjmxtram3bt309WrV6lv375FXnLSsmVLio6OpqioKGrYsGGVuuRk/PjxZGVlRZGRkUqnwWdlZSnajBs3jurUqUPHjh2jixcvko+PD/n4+CiWF5wGHxAQQLGxsXTw4EGyt7evMpcJEBFNnz6dTpw4QXfv3qWrV6/S9OnTSSKR0OHDh4mIc1ScV8+eJeI8ffLJJxQZGUl3796l06dPk5+fH9nZ2dGjR4+ISLv54aJZjOXLl1OdOnXIyMiIvLy86Ny5c7oOSWuOHz9OAApNI0eOJCJx2cmcOXOoZs2aJJVKydfXl+Lj45W28eTJExo6dCiZm5uTpaUljRo1ijIyMnTwacpHUfkBQBs2bFC0efHiBU2YMIFsbGzI1NSUAgMD6eHDh0rbuXfvHnXv3p1MTEzIzs6OPvnkE5LJZFr+NOXnww8/JFdXVzIyMiJ7e3vy9fVVFEwizlFxXi+a1T1PgwcPJicnJzIyMqJatWrR4MGD6datW4rl2swP3xqMMcYYUxMf02SMMcbUxEWTMcYYUxMXTcYYY0xNXDQZY4wxNXHRZIwxxtTERZMxxhhTExdNxhhjTE1cNBljjDE1cdFkjDHG1MRFk7FK7vHjxxg/fjzq1KkDqVQKR0dHdO3aFadPnwYASCQS7Nq1S7dBMlZF8P00GavkBgwYgNzcXPzyyy+oV68ekpOTcfToUTx58kTXoTFW5fDYs4xVYqmpqbCxsUFkZCQ6depUaLmbmxvu37+veO3q6op79+4BAHbv3o0FCxYgLi4Ozs7OGDlyJGbNmgUDA/FbWiKRYOXKlQgPD0dkZCScnJzw7bffYuDAgVr5bIxVRLx7lrFKzNzcHObm5ti1axdycnIKLb9w4QIAYMOGDXj48KHi9alTpzBixAhMmTIFcXFxWL16NUJCQrBw4UKl9efMmYMBAwbgypUrGD58OIYMGYLr16+X/wdjrILiniZjldyOHTswevRovHjxAq1atUKnTp0wZMgQeHh4ABA9xrCwMPTr10+xjp+fH3x9fTFjxgzFvE2bNuGzzz7DgwcPFOuNGzcOq1atUrR5++230apVK6xcuVI7H46xCoZ7moxVcgMGDMCDBw8QHh6Obt26ITIyEq1atUJISEix61y5cgVffPGFoqdqbm6O0aNH4+HDh8jKylK08/HxUVrPx8eHe5qsWuMTgRirAoyNjeHv7w9/f3/MmTMHH3/8MebNm4egoKAi2z9//hwLFixA//79i9wWY6xo3NNkrApq2rQpMjMzAQCGhobIz89XWt6qVSvEx8ejQYMGhSY9vZd/Fs6dO6e03rlz59CkSZPy/wCMVVDc02SsEnvy5AkGDRqEDz/8EB4eHrCwsMDFixfx7bffom/fvgDEGbRHjx5F+/btIZVKYWNjg7lz56JXr16oU6cOBg4cCD09PVy5cgXXrl3DV199pdj+tm3b0KZNG3To0AG//fYbzp8/j3Xr1unq4zKme8QYq7Sys7Np+vTp1KpVK7KysiJTU1Nq3LgxzZ49m7KysoiIKDw8nBo0aEAGBgbk6uqqWPfgwYPUrl07MjExIUtLS/Ly8qI1a9YolgOg//3vf+Tv709SqZTc3NwoNDRU2x+RsQqFz55ljBWpqLNuGavu+JgmY4wxpiYumowxxpia+EQgxliR+MgNY4VxT5MxxhhTExdNxhhjTE1cNBljjDE1cdFkjDHG1MRFkzHGGFMTF03GGGNMTVw0GWOMMTVx0WSMMcbU9P87Hax1zjzt0AAAAABJRU5ErkJggg==\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAcoAAADvCAYAAAByipTtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABlBUlEQVR4nO2dd1hUx9fHv7sLLL13Czbsgl2xYRSxxy4SEkt+GjWaNyaaYhJ7SzAmMRpNTFFTLFhQY9QEK/aKXbEhqICASm/L7rx/jHcL7C7LCizK+TzPfe7duW12WO73njNnzogYYwwEQRAEQWhFbOoKEARBEERVhoSSIAiCIPRAQkkQBEEQeiChJAiCIAg9kFASBEEQhB5IKAmCIAhCDySUBEEQBKEHEkqCIAiC0AMJJUEQBEHogYSSqNKMHTsWderUMXU1jKJ79+7o3r27qauhk7lz50IkEpm6GgRR5SGhJIxCJBIZtBw+fNjUVa3y1KlTR2f79enTx9TVw9ixY2Fra2vqapicwsJCLF++HK1atYK9vT0cHR3RrFkzvPPOO7h586byuBMnTmDu3LlIT083XWWJcsXM1BUgXk7++OMPjc+///47oqKiSpQ3adLkhe7z888/Q6FQvNA1XgZatmyJ6dOnlyj39vY2QW0IbQwbNgx79+5FaGgoJkyYAJlMhps3b2L37t3o1KkTGjduDIAL5bx58zB27Fg4OjqattJEuUBCSRjFm2++qfH51KlTiIqKKlFenNzcXFhbWxt8H3Nzc6Pq97JRo0aNUtuOMB1nz57F7t27sWjRInz22Wca+1auXEnW4ysOuV6JCqN79+5o3rw5zp8/j27dusHa2lr5kNm5cyf69+8Pb29vSKVS1K9fHwsWLIBcLte4RvE+yvv370MkEuHrr7/GmjVrUL9+fUilUrRr1w5nz54ttU5Pnz7FjBkz0KJFC9ja2sLe3h59+/bFpUuXNI47fPgwRCIRIiIisGjRItSsWROWlpbo2bMn7ty5U+K6Ql2srKzQvn17HD161IgW083XX38NkUiE+Pj4EvtmzpwJCwsLPHv2DABw9OhRjBgxArVr14ZUKkWtWrXwwQcfIC8vr1zrVJwtW7agTZs2sLKygqurK9588008evRI45jk5GSMGzcONWvWhFQqhZeXFwYNGoT79+8rjzl37hx69+4NV1dXWFlZoW7dunj77bf13nvAgAGoV6+e1n0BAQFo27at8nNUVBS6dOkCR0dH2NraolGjRiXErzh3794FAHTu3LnEPolEAhcXFwC83/ejjz4CANStW1fpQlf/fn/++aeynZydnTFq1Cg8ePBA45rq/zudOnVStsOPP/6ot55ExUAWJVGhPHnyBH379sWoUaPw5ptvwsPDAwCwbt062Nra4sMPP4StrS0OHjyI2bNnIzMzE0uXLi31uhs2bEBWVhYmTpwIkUiE8PBwDB06FPfu3dNrhd67dw87duzAiBEjULduXTx+/Bg//fQTAgMDcf369RKuzi+//BJisRgzZsxARkYGwsPDERYWhtOnTyuP+fXXXzFx4kR06tQJ06ZNw7179/D666/D2dkZtWrVMqidZDIZ0tLSSpTb2NjAysoKI0eOxMcff4yIiAjlg1ggIiICwcHBcHJyAsAFKzc3F5MnT4aLiwvOnDmDFStW4OHDh9iyZYtB9Skr69atw7hx49CuXTssWbIEjx8/xvLly3H8+HHExMQoXZDDhg3DtWvX8N5776FOnTpISUlBVFQUEhISlJ+Dg4Ph5uaGTz/9FI6Ojrh//z62b9+u9/4hISEYPXo0zp49i3bt2inL4+PjcerUKeVv6tq1axgwYAD8/Pwwf/58SKVS3LlzB8ePH9d7fR8fHwDAX3/9hc6dO8PMTPujc+jQobh16xY2btyIb7/9Fq6urgAANzc3AMCiRYswa9YsjBw5EuPHj0dqaipWrFiBbt26abQTADx79gz9+vXDyJEjERoaioiICEyePBkWFhalvjgQ5QwjiHJgypQprPjPKTAwkAFgP/74Y4njc3NzS5RNnDiRWVtbs/z8fGXZmDFjmI+Pj/JzXFwcA8BcXFzY06dPleU7d+5kANjff/+tt575+flMLpdrlMXFxTGpVMrmz5+vLDt06BADwJo0acIKCgqU5cuXL2cA2JUrVxhjjBUWFjJ3d3fWsmVLjePWrFnDALDAwEC99WGMMR8fHwZA67JkyRLlcQEBAaxNmzYa5545c4YBYL///ruyTFvbLlmyhIlEIhYfH68smzNnTom/mTbGjBnDbGxsdO4X2qB58+YsLy9PWb57924GgM2ePZsxxtizZ88YALZ06VKd14qMjGQA2NmzZ0utlzoZGRlMKpWy6dOna5SHh4drfO9vv/2WAWCpqallur5CoVD+nj08PFhoaCj74YcfNNpTYOnSpQwAi4uL0yi/f/8+k0gkbNGiRRrlV65cYWZmZhrlwr2WLVumLCsoKGAtW7Zk7u7urLCwsEz1J14Mcr0SFYpUKsW4ceNKlFtZWSm3s7KykJaWhq5duyI3N1cjglAXISEhSgsKALp27QqAW4yl1Ucs5j97uVyOJ0+eKN1vFy5cKHH8uHHjYGFhofM+586dQ0pKCiZNmqRx3NixY+Hg4FDq9xDo0KEDoqKiSiyhoaEa3/n8+fNKNyAAbN68GVKpFIMGDVKWqbdtTk4O0tLS0KlTJzDGEBMTY3CdDEVog3fffReWlpbK8v79+6Nx48b4559/lPWysLDA4cOHlW7i4ggW1e7duyGTyQyug+BCj4iIAFObi37z5s3o2LEjateurXH9nTt3lilITCQS4d9//8XChQvh5OSEjRs3YsqUKfDx8UFISIhBfZTbt2+HQqHAyJEjkZaWplw8PT3h6+uLQ4cOaRxvZmaGiRMnKj9bWFhg4sSJSElJwfnz5w2uO/HikFASFUqNGjU0BETg2rVrGDJkCBwcHGBvbw83NzdlMEtGRkap1xUefAKCaOp6AAsoFAp8++238PX1hVQqhaurK9zc3HD58mWt9y3tPkKfoa+vr8Zx5ubmOvvMtOHq6oqgoKASi+DyA4ARI0ZALBZj8+bNAADGGLZs2YK+ffvC3t5eeVxCQgLGjh0LZ2dn2Nraws3NDYGBgQAMa9uyIrRBo0aNSuxr3Lixcr9UKsVXX32FvXv3wsPDA926dUN4eDiSk5OVxwcGBmLYsGGYN28eXF1dMWjQIKxduxYFBQWl1iMkJAQPHjzAyZMnAfB+xfPnzyMkJETjmM6dO2P8+PHw8PDAqFGjEBERYZBoSqVSfP7557hx4wYSExOxceNGdOzYEREREZg6dWqp59++fRuMMfj6+sLNzU1juXHjBlJSUjSO9/b2ho2NjUZZw4YNAUCjz5OoeEgoiQpF3boRSE9PR2BgIC5duoT58+fj77//RlRUFL766isAMOihJZFItJarWxPaWLx4MT788EN069YNf/75J/79919ERUWhWbNmWu9r7H0qAm9vb3Tt2hUREREAeKRxQkKChhDI5XL06tUL//zzDz755BPs2LEDUVFRWLduHQDD2rYimTZtGm7duoUlS5bA0tISs2bNQpMmTZSWrkgkwtatW3Hy5ElMnToVjx49wttvv402bdogOztb77UHDhwIa2trZftERERALBZjxIgRymOsrKwQHR2N/fv346233sLly5cREhKCXr16lQgk04eXlxdGjRqF6Oho+Pr6IiIiAkVFRXrPUSgUEIlE2Ldvn1bvwU8//WTw/YnKhYJ5iErn8OHDePLkCbZv345u3bopy+Pi4ir83lu3bsVrr72GX3/9VaM8PT1dGXhRFgSL7/bt2+jRo4eyXCaTIS4uDv7+/i9W4WKEhITg3XffRWxsLDZv3gxra2sMHDhQuf/KlSu4desW1q9fj9GjRyvLo6KiyrUe6ghtEBsbq9EGQpm6VQwA9evXx/Tp0zF9+nTcvn0bLVu2xLJly/Dnn38qj+nYsSM6duyIRYsWYcOGDQgLC8OmTZswfvx4nfWwsbHBgAEDsGXLFnzzzTfYvHkzunbtWiJASywWo2fPnujZsye++eYbLF68GJ9//jkOHTqEoKCgMn13c3Nz+Pn54fbt20o3qq5sR/Xr1wdjDHXr1lVahvpITExETk6OhlV569YtAHhps1W9rJBFSVQ6gpWmbpUVFhZi1apVlXLv4tbgli1bSgxjMJS2bdvCzc0NP/74IwoLC5Xl69atq5CxdcOGDYNEIsHGjRuxZcsWDBgwQONBqq1tGWNYvnx5uddFoG3btnB3d8ePP/6o4SLdu3cvbty4gf79+wPgY2jz8/M1zq1fvz7s7OyU5z179qzE36dly5YAYLD7NTExEb/88gsuXbqkYW0DfHhQcQy5/u3bt5GQkFCiPD09HSdPnoSTk5MyslX4exT/+w8dOhQSiQTz5s0r8R0ZY3jy5IlGWVFRkYaVWVhYiJ9++glubm5o06aNzroS5Q9ZlESl06lTJzg5OWHMmDH4v//7P4hEIvzxxx+V4s4cMGAA5s+fj3HjxqFTp064cuUK/vrrrzL1J6pjbm6OhQsXYuLEiejRowdCQkIQFxeHtWvXlumajx490rCoBGxtbTF48GDlZ3d3d7z22mv45ptvkJWVVUIIGjdujPr162PGjBl49OgR7O3tsW3btlL7bktDJpNh4cKFJcqdnZ3x7rvv4quvvsK4ceMQGBiI0NBQ5fCQOnXq4IMPPgDAraGePXti5MiRaNq0KczMzBAZGYnHjx9j1KhRAID169dj1apVGDJkCOrXr4+srCz8/PPPsLe3R79+/UqtZ79+/WBnZ4cZM2ZAIpFg2LBhGvvnz5+P6Oho9O/fHz4+PkhJScGqVatQs2ZNdOnSRed1L126hDfeeAN9+/ZF165d4ezsjEePHmH9+vVITEzEd999p3xJEUTs888/x6hRo2Bubo6BAweifv36WLhwIWbOnIn79+9j8ODBsLOzQ1xcHCIjI/HOO+9gxowZynt6e3vjq6++wv3799GwYUNs3rwZFy9exJo1a6pNIo4qgwkibYlXEF3DQ5o1a6b1+OPHj7OOHTsyKysr5u3tzT7++GP277//MgDs0KFDyuN0DQ/RNsQAAJszZ47eeubn57Pp06czLy8vZmVlxTp37sxOnjzJAgMDNYZyCMNDtmzZonG+cP+1a9dqlK9atYrVrVuXSaVS1rZtWxYdHV3imrrQNzxE/bsL/PzzzwwAs7Oz0xiOIXD9+nUWFBTEbG1tmaurK5swYQK7dOlSiXqXZXiIrvrVr19fedzmzZtZq1atmFQqZc7OziwsLIw9fPhQuT8tLY1NmTKFNW7cmNnY2DAHBwfWoUMHFhERoTzmwoULLDQ0lNWuXZtJpVLm7u7OBgwYwM6dO1dqPQXCwsIYABYUFFRi34EDB9igQYOYt7c3s7CwYN7e3iw0NJTdunVL7zUfP37MvvzySxYYGMi8vLyYmZkZc3JyYj169GBbt24tcfyCBQtYjRo1mFgsLjFUZNu2baxLly7MxsaG2djYsMaNG7MpU6aw2NhY5THC/865c+dYQEAAs7S0ZD4+PmzlypUGtwNRfogYM0FUAkEQBKGT7t27Iy0tDVevXjV1VQhQHyVBEARB6IWEkiAIgiD0QEJJEARBEHqgPkqCIAiC0ANZlARBEAShBxJKgiAIgtBDtUs4oFAokJiYCDs7O52ppgiCIIhXH8YYsrKy4O3trZxVSBvVTigTExMNnkyXIAiCePV58OABatasqXN/tRNKOzs7ALxh1KcmKg2ZTIb//vsPwcHBlD5KC9Q++qH20Q+1j36offRjbPtkZmaiVq1aSl3QRbUTSsHdam9vX2ahtLa2hr29Pf1QtUDtox9qH/1Q++iH2kc/L9o+pXXDUTAPQRAEQeiBhJIgCIIg9FDtXK/lQUoKcPo0IJUCwcGmrg1BEARRkZBQGsHp08DrrwPt2pFQEsSrhlwuh0wmM3U1NJDJZDAzM0N+fj7kcrmpq1Pl0NU+EokEZmZmLzwUkITSCNzd+TolxbT1IAiifMnOzsbDhw8rZRLxssAYg6enJx48eEDjv7Wgr32sra3h5eUFCwsLo69PQmkEbm58TUJJEK8OcrkcDx8+hLW1Ndzc3KqUICkUCmRnZ8PW1lbvwPjqirb2YYyhsLAQqampiIuLg6+vr9FtR0JpBIJFmZcH5OQANjamrQ9BEC+OTCYDYwxubm6wsrIydXU0UCgUKCwshKWlJQmlFnS1j5WVFczNzREfH6/cbwzU4kZgYwMI/0dkVRLEq0VVsiSJF6c8XixIKI1AJKJ+SoIgiOoCCaWRCP2UqammrQdBEARRsZBQGglZlARBvKrUqVMH3333namrUWUgoTQSEkqCIEyNSCTSu8ydO9eo6549exbvvPPOC9Wte/fumDZt2gtdo6pAUa9GQq5XgiBMTVJSknJ78+bNmD17NmJjY5Vltra2ym3GGORyOczMSn/suwkPOAIAWZRGQxYlQbzaMMaHf5liMTTfgaenp3JxcHCASCRSfr558ybs7Oywd+9etGnTBlKpFMeOHcPdu3cxaNAgeHh4wNbWFu3atcP+/fs1rlvc9SoSifDLL79gyJAhsLa2hq+vL3bt2vVC7btt2zY0a9YMUqkUderUwbJlyzT2r1q1Cr6+vrC0tISHhweGDx+u3Ld161a0aNECVlZWcHFxQXBwMHJycl6oPvogi9JISCgJ4tUmNxdQM8gqlezs8huf/emnn+Lrr79GvXr14OTkhAcPHqBfv35YtGgRpFIpfv/9dwwcOBCxsbGoXbu2zuvMmzcP4eHhWLp0KVasWIGwsDDEx8fD2dm5zHU6f/48Ro4ciblz5yIkJAQnTpzAu+++CxcXF4wdOxbnzp3D//3f/+GPP/5Ap06d8PTpUxw9ehQAt6JDQ0MRHh6OIUOGICsrC9HR0RWaTYmE0kjI9UoQxMvA/Pnz0atXL+VnZ2dn+Pv7Kz8vWLAAkZGR2LVrF6ZOnarzOmPHjkVoaCgAYPHixfj+++9x5swZ9OnTp8x1+uabb9CzZ0/MmjULANCwYUNcv34dS5cuxdixY5GQkAAbGxsMGDAAdnZ28PHxQatWrQBwoSwqKsLQoUPh4+MDAGjWrBkyMzPLXA9DIaE0ErIoCeLVxtqaW3amund50bZtW43P2dnZmDt3Lv755x+l6OTl5SEhIUHvdfz8/JTbNjY2sLe3R4qRD8AbN25g0KBBGmWdO3fGd999B7lcjl69esHHxwf16tVDnz590KdPH6Xb19/fHz179kSLFi3Qu3dvBAcHY+jQoZBIJEbVxRCoj9JI1IWyiuVPJgiiHBCJuPvTFEt5JgeyKebDnTFjBiIjI7F48WIcPXoUFy9eRIsWLVBYWKj3Oubm5sXaRwSFQlF+FVXDzs4OFy5cwMaNG+Hl5YXZs2fD398f6enpkEgkiIqKwt69e9G0aVOsWLECTZo0QXx8fIXUBSChNBrB9SqTARkZpq0LQRCEoRw/fhxjx47FkCFD0KJFC3h6euL+/fuVWocmTZrg+PHjJerVsGFDpWVoZmaGoKAghIeH4/Lly7h//z4OHjwIgIt0586dMW/ePMTExMDCwgK7d++usPqS69VILC0BOzsgK4v3Uzo6mrpGBEEQpePr64vt27dj4MCBEIlEmDVrVoVZhqmpqbh48aJGmZeXF6ZPn4527dphwYIFCAkJwcmTJ7Fy5UqsWrUKALB7927cu3cP3bp1g5OTE/bs2QOFQoFGjRrh9OnTOHDgAIKDg+Hu7o7Tp08jNTUVDRs2rJDvAJBQvhDu7lwoU1IAX19T14YgCKJ0vvnmG7z99tvo1KkTXF1d8cknn1RYIMyGDRuwYcMGjbIFCxbgiy++QEREBGbPno0FCxbAy8sL8+fPx9ixYwEAjo6O2L59O+bOnYv8/Hz4+vpi48aNaNasGW7cuIHo6Gh89913yMzMhI+PD77++muNgKXyhoTyBXB3B+7epYAegiBMz9ixY5VCA/DMONqGTNSpU0fpwhSYMmWKxufirlht10lPT9dbn8OHD+vdP2zYMAwbNkzrvi5duug8v0mTJti3b59GmUKhqNCoV+qjfAFoiAhBEMSrDwnlC0BDRAiCIF59SChfABJKgiCIVx8SyheAXK8EQRCvPiSULwBZlARBEK8+JJQvAAklQRDEqw8J5QtArleCIIhXHxLKF0CwKFNTgQpKbEEQBEGYGBLKF8DVla8VCuDpU9PWhSAIgqgYTCqUq1evhp+fH+zt7WFvb4+AgADs3btX7zlbtmxB48aNYWlpiRYtWmDPnj2VVNuSmJsDTk58m/opCYJ4Vbl//z5EIlGJvK3VBZMKZc2aNfHll1/i/PnzOHfuHHr06IFBgwbh2rVrWo8/ceIEQkND8b///Q8xMTEYPHgwBg8ejKtXr1ZyzVWou18JgiAqm7Fjx0IkEpVYjJlQ+UXo3r07pk2bVqn3rCxMKpQDBw5Ev3794Ovri4YNG2LRokWwtbXFqVOntB6/fPly9OnTBx999BGaNGmCBQsWoHXr1li5cmUl11wFRb4SBGFq+vTpg6SkJI1l48aNpq7WK0OVSYoul8uxZcsW5OTkICAgQOsxJ0+exIcffqhR1rt3b+zYsUPndQsKClBQUKD8LCTOlclkkMlkBtdPOLb4OS4uEgBiJCXJIZNV34geXe1DcKh99FMV2kcmk4ExBoVCwaedYgzIzTVNZaytNWZvFpKSC/VThzEGCwsLuAtv7WooFAqEhYVBLpdj06ZNynKZTIYaNWrg66+/xujRo7Fv3z4sXrwYV69ehUQiQceOHfHdd9+hfv36yusIa31Tcmmrn8C2bdswd+5c3LlzB15eXpg6darG83z16tX47rvv8ODBAzg4OKBLly7YsmULAGDr1q1YsGAB7ty5A2tra7Rq1QqRkZHKSan1tY9CoQBjDDKZTDnXpXo7GILJhfLKlSsICAhAfn4+bG1tERkZiaZNm2o9Njk5GR4eHhplHh4eSE5O1nn9JUuWYN68eSXK//vvP1hbW5e5vlFRURqf8/P9ANTFiRO34eMTW6ZryWRiREX5oGnTNNSpk1XmulRFircPoQm1j35M2T5mZmbw9PREdnY2CgsLgZwcONasaZK6pD98CDwXAXWysko+J2QyGYqKinTOnjF48GCMGzcOiYmJsLW1BQDs27cPubm56NmzJzIzM5GWloaJEyeiWbNmyMnJweLFizF48GAcPXoUYrEY2dnZAICcnByd9ykqKkJhYaHW/RcvXsSoUaPw6aefYsiQIThz5gxmzJgBa2trvPHGG4iJicH777+PH3/8Ee3bt0d6ejpOnjyJzMxMJCcnIywsDPPmzcOAAQOQlZWFkydPIiMjA3K5vNT2KSwsRF5eHqKjo1FUVKSxL9fAFyGTC2WjRo1w8eJFZGRkYOvWrRgzZgyOHDmiUyzLysyZMzXeWjIzM1GrVi0EBwfD3t7e4OvIZDJERUWhV69eMDc3V5afPSvGvn2Ao6Mv+vWrb/D1ioqA0FAJdu4Uo2NHBaKj5aWfVIXR1T4Eh9pHP1WhffLz8/HgwQPY2trC0tISKGZ9VCb29vYaQskYQ1ZWFuzs7CBSszQBwNzcHP/++y9qFhP1mTNnYubMmRgyZAimTp2KAwcO4K233gIA7Ny5EwMHDkSNGjUAAG+++abGuevXr4eHhwcePnyI5s2bKwXWxsZG53PTzMwMFhYWWvevWbMGPXr0wIIFCwAArVu3RlxcHH744QdMmjQJT548gY2NDUaMGAE7OzsAfKotALhz5w6KiooQGhoKHx8fACjhddTXPvn5+bCyskK3bt3431UNQ6fmMrlQWlhYoEGDBgCANm3a4OzZs1i+fDl++umnEsd6enri8ePHGmWPHz+Gp6enzutLpVJIpdIS5ebm5kb9QxY/z8uLr9PSJDA3N+wfS6EAxo8Hdu7kny9fFkMiEUP8CgzWMbZdqwvUPvoxZfvI5XKIRCKIxWKIxWLA1hZ4bklVNuJirlfBnSjUTx2RSITXXnsNq1ev1ih3dnaGWCyGhYUFRo4ciY0bN2LMmDHIycnBrl27sGnTJuW1bt++jdmzZ+P06dNIS0tT3u/hw4fw8/NTHqdsGx1oqx8A3Lx5E4MGDdLY16VLFyxfvhyMMfTu3Rs+Pj5o0KAB+vTpgz59+mDIkCFKN2vPnj3h7++P3r17Izg4GMOHD4eTMOSglPYRi8UQiURaf1uG/taq3KNZoVBo9CmqExAQgAMHDmiURUVF6ezTrAzKmp2HMWDaNOD33/kLq5kZ7wYpNk8qQRCmRiTiVp0plmJWUWnY2NigQYMGGouzs7Nyf1hYGA4cOICUlBTs2LEDVlZWGlGxAwcOxNOnT/Hzzz/j9OnTOH36NADutqwM7OzscOHCBWzcuBFeXl6YPXs2/P39kZ6eDolEgqioKOzduxdNmzbFihUr0KhRI8TFxVVK3QATC+XMmTMRHR2N+/fv48qVK5g5cyYOHz6MsLAwAMDo0aMxc+ZM5fHvv/8+9u3bh2XLluHmzZuYO3cuzp07h6lTp5rqK5Qp6pUx4PPPgRUr+P/B+vVAs2Z8n44RMQRBEC9Mp06dUKtWLWzevBl//fUXRowYobSmnjx5gtjYWHzxxRfo2bMnmjRpgmfPnpXr/Zs0aYLjx49rlB0/fhwNGzZUBtiYmZkhKCgI4eHhuHz5Mu7fv4+DBw8C4JZi586dMW/ePMTExMDCwgKRkZHlWkd9mNT1mpKSgtGjRyMpKQkODg7w8/PDv//+i169egEAEhISNMzoTp06YcOGDfjiiy/w2WefwdfXFzt27EDz5s1N9RUMFkqFAvi//wN++IF/XrkSCAsD9uwBLl0Crl4FBg6s2LoCQGwsMGsWsHAh0LBhxd+PIIiKp6CgoERQo5mZGVyF9GEA3njjDfz444+4desWDh06pCx3cnKCi4sL1qxZAy8vLyQkJODTTz81qh6pqaklkhJ4eXlh+vTpaNeuHRYsWICQkBCcPHkSK1euxKpVqwAAu3fvxr1799CtWzc4OTlhz549UCgUaNSoEU6fPo0DBw4gODgY7u7uOH36NFJTU9GkSROj6mgUrJqRkZHBALCMjIwynVdYWMh27NjBCgsLNcpTUhjjtiJjMpn2cwsKGAsN5ceIRIz98INq3+LFvDwsrOR5MTGM3bpVpmoyxhhTKHTXJSSE3+/dd8t+XX3oah+CQ+2jn6rQPnl5eez69essLy/PZHXQhVwuZ8+ePWNyubzEvjFjxjAAJZZGjRppHHf9+nUGgPn4+DCFQqGxLyoqijVp0oRJpVLm5+fHDh8+zACwyMhIxhhjcXFxDACLiYnRWcfAwECt9ViwYAFjjLGtW7eypk2bMnNzc1a7dm22dOlS5blHjx5lgYGBzMnJiVlZWTE/Pz+2efNmZb179+7N3NzcmFQqZQ0bNmQrVqwwuH30/V0N1QMSSgPR9Y9cVMRYfdFd9i3eZ0nxBSXOy8lhrE8fLk5mZoxt3Ki5f+dOvs/fX7P80SPGLC0Zq1mTC19Z+OQTfu6FC5rlMhljTk78fkFBZbtmaVSFB11VhtpHP1WhfV5WoSQqXiirXDDPy4YEcuwSD8Y0LIftsGDgyRON/bNmAfv28fHDf/8NjBqleb7QR3nzJqA+JOjwYSA/H3j4sMQl9cIYsHYtP7d44PDp04DQ9XDrluHXJAiCqM6QUL4oEgmW1whHJuxge+4IEBCgVKFHj1R9kps2AdpSL9atC1hZAQUFwN27qvKjR1XbCQmGVyc2VtVfun07H68poJ4/PiEByMsz/LoEQRDVFRLKcuB2/T7ohBPIdvEBbt8GOnYEDh3CokVcALt2BQYM0H6uWAwIuRXUc7sbK5RHjqi2U1OB6GjV5+ITs9y5Y/h1CYIgqisklOWAnx9wDc3R1eI0Ctt0BJ49AwsOhmzNbwB4hKm+YVFC0K4wROTJE83hIsYIpYUFXz9PlYjERCAmhtfjefpGcr8SBEEYAAllOTBvHtCoEXAxyQN9zA9CPnIUREVF+Fn+P2yq8wm6ddGfLL34WMpiw40MFkrGVEI5fTpfb9/O+z737eOf27UDOnXi27FlS02rQX4+sH+/Zr8qQbwKsOcJtolXg/L4e5JQlgMODjwdnYMDcOiUFYbmbcB8zAYAhNwPB4YN05sKSxBKwfUquF0Fq9BQobxzh1uOFhbAzJmAszPvr4yOVvVP9u2rGj/5IhblokVAr17A7NnGX4MgqhLCwPfKykZDVA5C4vMXSY1o8lyvrwqNGgEbNwL9+wO7/hZhF+bBplVDTL/2NrBjB/evrlyptbNScL3GxgKFhcCxY/xzv378VEOFUrAmO3QA7OyAwYOB334DNmwAhEkZ+vVTpct7EaEUhPf777n1+jyPMUG8tJiZmcHa2hqpqakwNzfXm9O0slEoFCgsLER+fn6VqldVQVv7MMaQm5uLlJQUODo6lphiqyyQUJYjffsCX34JfPIJ/9zztzAgpw7wxhtAfDxPvTN0KFeX51n7AaBWLS40WVnA5cvAuXO8PCzMOKHs3p2vR4zgQrl2LXeRurkBbduqLFVjhTIjAxCSb2Rnc/1XyzRIEC8lIpEIXl5eiIuLQ3x8vKmrowFjDHl5ebCysioxOwahv30cHR31TpxhCCSU5cxHH3EhcnICWrYEgM7A9eu8I/Obb3inYVQUj/CZMgWQSCAS8cjX06e5sBUVcR3t2pVfMymJR89qmQRFiXr/ZGAgX/fsyeshjJ3s04dH2fr68s9PnvDFxaVs3/HECZ6STyLhArx8OfDee2W7BkFURSwsLODr61vl3K8ymQzR0dHo1q0bzT6jBV3tY25u/kKWpAAJZTkjEvHZQTSwsQHCw7mJOGkScOoU8P77fAqRn34C2rRB8+ZcKP/4g5/StSvPIyuVcpF89AioV0/3fePigAcPAHNzPpQT4NuDB3OLEuAWr1CdmjV5MoPbt8sulEIfalgYF807d4BffhFT7ljilUAsFpeYt9DUSCQSFBUVwdLSkoRSCxXdPuTsrkz8/XlI6+rVPPLn/HmgfXtgzBj0sDsLQBXz07UrF93atfnn0tyvgjXZrh3PAiQwYgRfi8VAcLCq/EUCeoSxmd27q9zM334rhkxGPyeCIF496MlW2YjF3Kq8eZPns1MogN9/xxvftcdptMcYrIMl8pRu17IKpdA/KdCrFzB5MvDVV5qWo7FCmZcHnOWajm7dgNGjuXWalCTCwYO1ynYxgiCIlwASSlPh6cnDZE+dAt58E8zCAu1xFuswDo9ENdF8/UfAvXtlFkqhf1LAzAxYtQqYMUOz3FihPHOGR+Z6e3NXsIWF6tqbNzfC6tViPHpUtmsSBEFUZUgoTU2HDrxj8sFDzLNcgvvwgTN7CtGyr4EGDbBwXxtsxki03fIJ78/87z/eKagWbBAfz4d8SCSqZAKloU0or1/n4tejBxARoXELJYLbVXANA8D48UCtWgxPn1rh/fclqFmT95OeOFH25iAIgqhqkFBWEUTubtjf9lPUx11sG7OLh6gyBu+kCxiJLehzOZy7bHv35mGrVlZ8PX48kpb+iRp4iDZtAFtbw+4nCOXt29z7CwCLF/OgoEOHgJAQPmxl1ixNwRQCebp1U5XZ2AAnThRh7Nir6NiRX+zUKWD+/BdsFIIgiCoARb1WIebPB377TYIe3w4EnAYC8fG4uP4S1s25hzZOcXir8z2uZHFxQG4utyzv3EFH/IqHAB7fqA+M7847Kl97TWOsZnHq1OFu2dxcns3HzIxbkQDv09y5k5cvXMiPWbaMD1sRrEShD1XAwwMYPPgu+vVrhP37xejXT3M2FIIgiJcVEsoqxGuv8UWJjw+sR/lg+RzAphB4c9dzdydjPDfd+fPA4cO4+9th1HlyHh5Zd4Ff7wK//srP9/fnmYAGDODRtWoZPczNuZv11i2+HD0KyGTcdbtqFc+JsHYt8M47fPhnv36AvT2Qk8PHZgpp97Qh7IuP5+Msy2EYE0EQhMkg12sVp9bzQNKcHFXiAIhE3ITr1w8ID8cb9c/ACc9w4vN/eMaDdu34MZcu8aSsAQE8eGjcOGDbNp4CCCr369WrwI8/8m0hcYCZGTBhAvf2AsCYMTxLEMCtSX1ZtGrU4EIsk4ECewiCeOkhi7KKY2XFEw+kpPDIV2dnzf2McYswC/ZwCO0HNOvHd6Sl8Qkod+/mU4ekpgLr1vHFzAxo3BjzivzQDC0Qs7AFzFNbwNurFoYN00z/9PXXwMGD/B6LF/Oy4m7X4kgkgI8P9wzfu6ca4kIQBPEyYpRQPnjwACKRCDVr1gQAnDlzBhs2bEDTpk3xzjvvlGsFCS40glDytHgq0tKA9HTNeSYBAK6uwFtv8UUm45nWd+8G/v6bR/BcvYrWuIrWAJDKT8l/5gDz7s24j1UkAkQi2IhEOO0uwtHbIiiYCEnwQu+0VsDZ1kCLFoCODCb16nGhjIsrObaTIAjiZcIooXzjjTfwzjvv4K233kJycjJ69eqFZs2a4a+//kJycjJm09xL5Urt2jxRuraxlMLwjtq1dWoW94MKHaDLlvFcd5cv4+6OKzj9y2W0wBU0xk1Y5mdoHdPhCGCgesFXzxeJBGjSBGjVCmI/P3ilpkKUng4UFWFsTj6aoQA1f88H4vK5r7ZePa7mDRpwM5mSOxME8RJglFBevXoV7du3BwBERESgefPmOH78OP777z9MmjSJhLKc0Zd0QBDKMuVZrVULqFULli37I+wXXjQurBC/fRIL3LjB0+8wprEo5Az79ijQQHQXDXNigJgYbs5evQpcvQoJgPZqtwh9vuDw86U4trYq0WzYkJud3brpUXuCIAjTYJRQymQySJ9PZbF//368/vrrAIDGjRsjKSmp/GpHANAvlLdv87UxCcm9vfnUW6mpwJQPLLgrtUULrceKAfRT96ozxiN1YrhoKs6fx7Pbt+FUowbElpZ4kGaJw6cs4eBuiddHWvLM7vfucX9sQgJPanvpEl8AYMkSnqT2tdf4GNK+fYv5kl9hcnL4S0etWvqjpAiCMAlGCWWzZs3w448/on///oiKisKCBQsAAImJiXAp61QURKmUu0X5HJGIT8D85AnQpo0RJ9esyZeBAyGXyXBszx7069cPYnNzpJwHRrcFPMXA6yuKnVtQwFMJPR8HikuXgH//5QM3//mHLwBPqNCrFxfQ9HTVkpGhWtvYcMX39ga8vFTb3t58HIuFRcnF3JwL/bNnfElP19wuLOQvDG3b8mtWFImJwHff8ZDjrCw+KWnLlkDr1kCrVnxp0oTXtzqTl8dfrFxdyV1fnigUQGYmD+4T/i+ofbVilFB+9dVXGDJkCJYuXYoxY8bA398fALBr1y6lS5YoPwwRSmGOybLStq1x55VG3bp8nZzMExaoz2gCqRRo1IgvAowBV67wSN19+3jw0e3bKpNZH3Fx5Vp3DWrU4I3Uti0fdtOmDX9gvwg3bvBw4j/+4IFWALcks7L4gFYh/RHA26phQ96A5uaaiyD81tb8hUFYbG1Va39/Ptnpy/gAvHaND+r9/XculDY2/IdVty7v7xa2fXx4xn8XFx4mXtk8fcpnMo+J4eusLP6SI/xm3N3L/56Jifx3Eh3N/1cKCvhLa61aqhdYYVsi4d4cYbl7l6/j4oD8fM3rqv+upFIe2OfgADg68rWwbW/P/2cLCjSXwkK+NjPT/C2qL1KpMlhQ+bssvq2LZs34y2MlY5RQdu/eHWlpacjMzISTk5Oy/J133oG1xhORKA8EoUxM5M9VwcBQKF7M9VqRODnx/6mMDG48Nm1aygkiEeDnx5dPPuFvugcP8geBRKL6R1Vf29vzB2hSEm8cYUlK4m7hzEz+j6u+yOWqe1pZ8es4OakWR0e+7+JFnvz20SO+7NypOs/WVtNyVbdmXVz4NdTraWEBAHC+cQOSX37h0ccCXbrw7xscDMTGAhcuKN3ZuHiRf4crV17gLwFer6Agbp337Fm6lZydzS3r/Hy+FBSotvPz+Q/P2pq3g7oo29qqHoLGIpPxtv7hB+DwYc19OTnKPnGdWFryv4GzM187OfEyqbTkYmHBv1turvbFwoL/xuzsSq5TUlR/J21vsOq/l1q1uGC2bcs9Bi4u/DrCYmNTss0Y420t1CUtDbUOHIAkMpILo7a0V7GxhraybmQyvuTk8M/JyS9+zfJkwQLgiy8q/bZGCWVeXh4YY0qRjI+PR2RkJJo0aYLevXuXawUJ3o8oTOCcmMhfoAE+8XJ+PhdOoayqIBLxl/6YGP7yWqpQFsfens86PXhw+VZMLucPApGIN6o+srO5WJ09y5dz5/ibSXa2KqWRIVhZwczGBl3T0lRlgwfz5BDqWeyFPuIxY/hnhUJlARQW8noLa2G7sJA/SLOz+cMtJ0e1/ewZr3diIrfKfv+dX7d5cy6a7u78pSI5ma+FbWFSVGMQi/kP0sxMcy0sbm4lXzK8vQEHBzTcvBlm777L6ytca9AgYOpU3k4JCdwKEqwhYf3wIbfqior4P4TwclOZ1K2rcpfb2fGsWefO8en0Hjzgy/bt2s8Vi/k5traqv2duLhfL55gDfCiX+jktW/IAuK5d+QvBw4f8Purrhw/5b0WIOK9XT3Pb25v/zoq/UBYW8rbMzNTs6hC2MzN5HXS9gBQVaf4W1X+bhYWqQEGg5LY+TDQo2yihHDRoEIYOHYpJkyYhPT0dHTp0gLm5OdLS0vDNN99g8uTJ5V3Pao1YzF9KhTgYQRSF53T9+vx5VNWoW5cLZUV6RsuMRGJ4Tj1bW27xdemiKsvO1rRci28/far5MAGAvDyI8vIgNzOD6K23IP74Y6Bx49LvLxbzqOAGDcr8NZXk5/PJwqOi+BITU7pVBvAflJUVt8aERSrla7FYU5yzs3k/IsAfuoIbThs6EgCbA1A61NzdeVqoiRNVqakA7jbR5TphjLs8nzzhf4MnT/jy7FlJ96C6m1Aq5RadtbXmYmXFBSYzky9ZWZpre3suVK1acfe24IkoTlYW9xKcO8dfWm7c4L8P4bpyOW8zQYS0YWEBZmODpx4ecBw4EJLXXuMvDg4O2o83BvIE6sWox+uFCxfw7bffAgC2bt0KDw8PxMTEYNu2bZg9ezYJZQVQu7ZKKAWqqttVoF49vr53r/LvnZzMn53lHotja6v/ga2OXM4fhunpkKWlYf/NmwgaNQriygzOsbTk7taePYEvv+TRtQcOcLd2QQFvIE/Pkms7u7LdRy5XWQ0yGbcoBMtX2C4o4CHWxd3kiYlgSUl46uYGh08/hVlISOnWfnFEIpUrU+ggrwrY2fFJYotPFAvwH2henko0s7P59y4u2GZmKFILlpNU9+AuE2CUUObm5sLu+T/Sf//9h6FDh0IsFqNjx46Ij483+DpLlizB9u3bcfPmTVhZWaFTp0746quv0Eg9yKMY69atw7hx4zTKpFIp8ot3Sr9iaAvoeZGI18pAeF5VtlAWFPDA0YIC/gJfEbEUBiGRqPo+a9ZEoeBSNCWurnwOtZCQ8r2uRKISKiNQF4JqE+UrEqkE0dPT1LUh9GDUoK0GDRpgx44dePDgAf79918EBwcDAFJSUmBfhn+UI0eOYMqUKTh16hSioqIgk8kQHByMHKEjWQf29vZISkpSLmUR55cVQQz371eVvWjEa0UjWJTl5XplTBVjoI+LF7mh8vQpDy4lCIJ4EYwSytmzZ2PGjBmoU6cO2rdvj4CAAADcumzVqpXB19m3bx/Gjh2LZs2awd/fH+vWrUNCQgLOnz+v9zyRSARPT0/l4uHhYczXeKl44w3eNXTwILeSgJfLoiytj740Hj0COnTgsSD37+s/9tQp1fYPP3BvH0EQhLEY5XodPnw4unTpgqSkJOUYSgDo2bMnhgwZYnRlMp53ZjsXnyKjGNnZ2fDx8YFCoUDr1q2xePFiNNMxQWJBQQEK1AILMp8HWMhkMsiEMWwGIBxblnPKE29vYMAACXbtEuP77+VYtkyBuDgzACLUrSuDiaqlRFv7eHsDIpEZcnJESEqSwc3NuGufPy/C0KESJCXxEPrDh4sQFqZbeU+elEB4B8zNBcLD5Vi8WGHczcsJU/9+qjrUPvqh9tGPse1j6PEixl7sXf/hw4cAoJxJxFgUCgVef/11pKen49ixYzqPO3nyJG7fvg0/Pz9kZGTg66+/RnR0NK5du6a1DnPnzsW8efNKlG/YsOGlG/N5+bIrZs/uDEvLIixYcBwffRQIS8sibNz4T5UdT/6//wXjyRMrhIdHo2HDZ6WfUIwTJ7zw3XetUVioeqcbNeomRo3SPWZs4sQgPH5sg3797mHPnnqwtCzCmjVRsLcvNOo7CDAGJCbaoqBAApGIQSxmcHHJg61t0QtdlyAI05Cbm4s33ngDGRkZersNjRJKhUKBhQsXYtmyZch+PubKzs4O06dPx+effw6xEfkqJ0+ejL179+LYsWNlEl2ZTIYmTZogNDRUmUpPHW0WZa1atZCWllam/lSZTIaoqCj06tUL5iYKNmAM8Pc3w82bIgQFKbB/vxgtWzKcOWP6B7Wu9unRQ4Jjx8T4448ihISU7acWHS1CUBAXyD59FPDzYwgPl+CNNxRYt06u9ZzUVKBGDX7/x49l6N3bDBcvivDJJ3IsWPBiVuX69SJMmKDphLG2Zrh5s6jUWIyq8PupylD76IfaRz/Gtk9mZiZcXV1LFUqjXK+ff/45fv31V3z55Zfo3LkzAODYsWOYO3cu8vPzsWjRojJdb+rUqdi9ezeio6PLbJmam5ujVatWuHPnjtb9UqlUmcC9+HnG/OCMPa+8eO89YMoUYP9+/jLSsKGoSv3jFG+fevV4IpGEBLMyBzMKKV8HDQK2bhUjMpJ/josTw9xc+8vYhQt83bgx4O5ujjlzgCFDgFWrJJgwQQKRiEfiOzmVLUlDUZFq4mpXVx7k+eQJkJsrwo0b5hrD/fRh6t9PVYfaRz/UPvopa/sYeqxRwTzr16/HL7/8gsmTJ8PPzw9+fn5499138fPPP2PdunUGX4cxhqlTpyIyMhIHDx5EXSPGP8nlcly5cgVeFZm8ugrx1luaQ9yqaiCPQPHI16IiYOlSLl7t2/NUqu7umulNBU6f5uthw/j4d2HcvY53Io1zOnTg60GD+HjwrCx+fv36fIx4gwbA5cslz5fLVYlh1Nm6lX8HV1cgPp6P03ztNb6vqmX5IgiifDFKKJ8+fYrGWjKLNG7cGE+fPjX4OlOmTMGff/6JDRs2wM7ODsnJyUhOTkaekOUDwOjRozFz5kzl5/nz5+O///7DvXv3cOHCBbz55puIj4/H+PHjjfkqLx12dsDYsarPVV0oi4+l/PhjvuzYocqulpoKrF2reZ5MprIOhTz7wqxbqamqpDfFKS6UIhEQHq4amickYSkqAlYUn9UEPKtcjRo8F7cAY/waALfoha5tIdj68WO9TUAQxEuOUULp7++PlStXlihfuXIl/Pz8DL7O6tWrkZGRge7du8PLy0u5bN68WXlMQkKCxhyXz549w4QJE9CkSRP069cPmZmZOHHiBJqWOZnoy8uUKartqjqGUkDdovztN+B5QifMmQNERqrEKjpa87yrV3n2NQcH1Xe0t4cyclZbJjSFAjhzhm937KgqDw7m4y+Linjyk717efmGDZpZw5KS+HASAJg2TXWt/ft55jdra822F/olyaIkiFcbo/oow8PD0b9/f+zfv185hvLkyZN48OAB9uzZY/B1DIkjOlxsBoFvv/1WmT6vutKoETB3Lk9hV1HTZJUXgkUZHw9MmsS358zh9Qe4Zfj++1z4Hj3i1hygsgzbt9ecy7h+fW5R3r3LXajqxMZy4bOyKjn/tHpXRNeufLaea9d4nvD33uPly5fz9J9iMbdoR47kVu1XX/H948fziR8EyKIkiOqBURZlYGAgbt26hSFDhiA9PR3p6ekYOnQorl27hj/++KO860hoYc4c4M8/q2YydHW8vHj6SoWCi8+wYcDs2ar99vYqwVO3KgVrrvj0pvr6KQVxbdNGf7uIRCrRXr2au1YzMvg2AKxbxwU5Ph7o3ZunRpVIgA8+0LwOWZQEUT0wSigBwNvbG4sWLcK2bduwbds2LFy4EM+ePcOvv/5anvUjXnLEYpVV6e8PrF+vaSECfKYgQLtQCn2NAkI/pTbXa/H+SX289Rbvr7xxg9/3p5+4ddu0KRAWBkREcIE/d44fP2oUUKeO5jX0WZRPnwJ//605/SVBEC8nRgslQRjKxx8DffrwuWxtbEruLy6UmZl8zmSAz3erjiEWpXr/pC4cHLggAsB33/EF4ME8YjFPqr58uer4jz4qeQ19FuWMGcDrr3PBJUoil/PhNQTxMkBCSVQ448bxABpd4xaF6R6vX+f9j+fPc3do7dolJ1UQLMriQpmbqxruYYhFCQDCbHA7dvBAnpo1eU5dgXfeAb7/Hvj5Z24NF0ewKJ88QYkUgteu8XUpaYurLWPG8PYT8hYTRFWGhJIwOa6uPLgG4MkJdLldAZVF+fChaq5ggAuSXM77RA3NWdGypab1+cEHfHJ2AZGIB/roGnnk4sL7LhkrmXhdmA4tVnemvWrNf//xv9eJE6auCUGUTplCQYYOHap3f3p6+ovUhajGBAZyK+zIEZXIFA/kAbio2ttz92xcHO9TBDT7J8uS93byZD7biKMjMGFC2eoskfDhKsnJvJ/S25uXFxaq+i1fNaGMiAB+/RXo1Ano35+7qMuasTIlRfViYYpJvQmirJRJKB0cHErdP3r06BeqEFE96daND/KPjuYPUkC7RSkScfdrTAwP6BGEUhhFZEj/pDphYdzt2r69ZsYjQ/H05EKp3k/56JFqWrF797hwVtWk9WWBMd73+uABtwjnzuVZlT79tGREsD4EtzRAQkm8HJRJKNcWT59CEOVE1658HRPD10JAjTYaNODHCf2UWVmqCa379y/bfSUS4JNPyl5fAW2Rrw8eqLblci4GQt9qaWRnAxMnAoMHAyNGGF+viuDSJf7drK2Bvn25WKakALNm8QQNhr4MkFASLxvUR0lUCby9Vf2PANC8ufYIWaDkEJF9+4CCAn6+jmlJKwxtka+C61igLO7XTZt4xqCpU6ve0JK//+brXr147tvkZC6OOTkqL4AhXL2q2iahJF4GSCiJKoMwTATQH7lafIiIMKvIkCGV7+IszaIEgJs3Db+ekBw+JYUHNlUlBKEcOJCvra2hnDWlLIKnblGmpenO20sQVQUSSqLKoC6U2gJ5BNQtysJC1XRcgwdXWNV0os2iFIRSmN2tLBal+iwqW7a8WN3Kk6QknsQe0HRvC7l8DRVKxjSFsiznEoSpIKEkqgyGCqVgUd6/z/vJMjO5YJU1kKc80GdRCuNDDRXKR49U05EBwLZtPPVfRbN7N+8jFiKHtSG8jLRvrzm2taxCmZQEPHvG+4aFsakklERVh4SSqDLUqcMDWd54g/dR6sLbm1trRUU8IQDA550s6zCF8kAQSm19lEFBfG2oUAqu1mbNeOag5OTyG2e4Zg0Pjtq3T7N8507usj52DNAyIZCS4m5XgbIKpWBNNmigilgmoSSqOiSURJVBJAJ+/BH46y/9oicWq9yvUVF8PWRIxddPG4J1pc2i7NGDr588MSxdmyCUPXpw4QfKx/3KGLB4MY8U7tuXR/nKZMCuXTyytqiIH6dLlPPyVO08YIDmvrIKpRDI07x52c8lCFNBQkm8lKgPt7C3B157zTT1ECzKp095f2l2NnctAkDjxqpgl1u3So8yEvonu3YFhg/n2+Xhfr1zh8+EIgQ6hYfzHLrDh3PBFIKg7t3Tnrf24EEulrVqlUzlZ6xF2azZyyOUCgVAuVSqNySUxEuJ+lCS/v01U89VJs7Oqim9UlJU1qS9PV8aNeKfb91SncMYz0urLoDp6apctV268CEYdna831Jf36Eh/PcfX7/2Gh/W4eDAx0TKZEBICM+2I8zfefx4yfMFt+uAASWjigWxe/SIT7RdGi+jRRkezv/OglVNVD9IKImXEnWL0lRuV4C7gd3d+XZyskooa9fm68aN+frmTZXCrFrFLbMPP1Rd5+RJLqD16/N8tZaWfPYRgIvbiyAIZXAwnw/04kXeZu+/r5rTtHNnfkxx9ytjPNgHKNk/CfCUgra2/Lj4eP31UI94Vbco79+vemNG1Tl4kNd9wwZT14QwFSSUxEuJYFFKpbzfzZSo91MKQim4XFUWJRdKxoAVK3jZypUqS1Pd7SoguF+3blWlxCsrMhl/0ANcKAEeNLV9O59aTLCGO3Xi6+IWZUwMtxZtbLS7t0Uiwy3DhATumjY3B3x9gRo1+LZMxu9RVRHqJqRJJCqGwsKq6+ImoSReSrp3B4YO5UEqtramrYt65GtpQnnypEgZBSuXA198wbeFQB5hSAkA9O7NBSohQTWjij5OneJ9msXLsrN58nZtU4UJCBblhQuas7Js2sTXwcHcytWGLqH8918emCUgWJONGnGBlEhUk3rfu1d1k+EKQnn/fulWM2E8EyZw78zt26auSUlIKImXEqmUi4K6+9JUqFuUwtAQwfUqCOW9e4BcLsK6dfxfrksXbo1t2cKtSUEI1S1KKyuVu7O06NeiIt6HOHw4sGePqlxwu/bqpT+SuE4d7vKVyYBz51TX/PNPvv3WW7rPFYRSSCkI8PlBBw8G3nyTW6+Aptu1+Lnq40fLQlaW8ecaQk4OkJGh+nzkSMXdqzqTmwts3sx/f4a8FFY2JJQE8YKoJx0oblHWrMkFTyYTIT7eHlu2cMtpyRJAmGgnNJTnqnV35y5JdUaO5OuICP3u14sXVUNQPvtMFSik3j+pD5GopPt1/36eIMDFRX+yeW0W5enTquCeKVN4JLB6IE/Jc42zKIcMARo2VGUNKm+Ku4RJKCuGo0f5/wBQtrzBlQUJJUG8IOpp7IoLpVjMH+QAEBHREDk5Ivj6clfn/PncMhYexoKVqU6fPty1/OCB/ujXQ4dU25cucZfpkycqAenVq/TvIbhfBaFct46vQ0P1RxVrE0p1QUlO5tNz6bcoyy6UV64ABw5wy/fnn8t8ukEUF0rqp6wYhBc6gISSIF5J9PVRAir366lTfGbnt9/mgli7NvDee6rj1N2uAlZWqujXiAjddRCEUogGnjUL2LuXW6HNm6smldaHeuTrs2fAjh3885gx+s8T7nnvnsrqjY7m67fe4t/1t9+41QtotyiNcZ/++qtqOyJCs2+1vBCEsm1b/tJz717JpPfEi0NCSRCvOIJFeeMG72sBuMtVQBBKABCLGdTnNp85E3B05NtCJp/iqLtftSUfkMlUUbPr13PhvndP1X9bmttVoFUrLsxPnwILF3JXWNOmQJs2+s/z8VFNt5WayqMXT55Ufb8pU/i2XM4DggRxBIy3KAsKgD/+4NsWFrwfURjvWZ4IQtmkiWp+VEPdrxcu8LabMaNycva+rCQmak69RkJJEK8ggkWZmsrX7u6aEaLCWEoA6NOHaVh3zs78wfv334Cfn/br9+7Nkxc8eqQSIHUuXOCRrU5OQEAAMHu2Zn0MFUpzc56xBwCWL+frsWNLn7pMKlW9GNy7x929+fl8jGXjxjwyWQhuatKER7sKCFGvaWki5OYaPo/8jh1c0GvWBD74gJf9/rvBpxuMIJQ1avBIa8AwoYyO5sNpLlwAli0DPvrI+CE+VZ1nz3h0tbEIiRyE3xkJJUG8gqjPpgFoul0BTYty7NiSpoWfX8kcqupYWqpyv2pzvwpu18BA7h4cP15lqUml2l26uhDcr3I5v9abbxp2nno/peB27daNP/zs7Ljr1c6O55ZVx96eCyoAPH5sbXA9BbfruHFczAGe8F095255oC6UgYF8u7R+yj17+MtNZiZ/MQCAb74BvvzSuDrk5gJffCHG1asuxl2gghk1ir+gGdt/K7hdBY8KCSVBvII4OmoGuwjWk0DTpkDNmgw1a2ahXz/jzArB/bplS8ksNoJQCgkBLCxUD+V+/fgEy4YiCCXALVEvL8PO0yWUAj17cstj5syS5wp9nI8f2xh0r7g4lRUybhy3Wtu35+2ycaNh9RXIy+NjI8+d0xzeIqAulF268JeHO3d0J0jYupW/1OTn85ef8+e5RQnwaOQ1a8pWP4BHSIeHS7B2rZ4pdUxEfLxK6NT7GQ1FoVD9LYWXMhJKgngFEYlUaeyAkhallRVw9WoRvv76iNE5aYODeY7WpCTN7DkymSpZgXrmnBEjeFadtWvLdp+AANV2aUE86ghCeeuWqn6CBSag7nLVdm5ysmGKLnynoCCV61bo9zXU/XrhAn8JsLbmY0jbteOWf/HB7upC6ejI+3EB7e7X7GxuzRcV8anitm/nf/sPP+QiCQCTJqn+XoaQlMStUQBITLSpcu5b9bR+xrhfL13iXQS2tqqgtdxc3t9dlSChJIhyQN39WlwoAf5AtrQ0PqGphYUqp626+/XsWf5gcXHRHHYBAC1bcnEtC87OwLRp3BoaPNjw8wSx++cfngTAwUGVaN3Qc5OTS7co5XKVUP7vf6rykBDexxoTw4eNlMamTaqZUszN+SKXa+a6lcu5UAFcKAGV+GsTyrVreVBRw4ZcsM3NVfsWLuR1ZKxsVuW8eaoAsbw8c6SlGX4uwC1aYdLt8oYxVUIKgP8Wy5qzVz1hv5MTf7EAqp5VSUJJEOWAENADaBfK8kBwv27YoHITCm7X7t3Lb+Lqb7/lwUW6UtZpQxC7p0/5uksX3RakrnMN6aP8+2/g4UMu6OpC7uqqSoogRMPqIyaGr1eu5BG077zDP1+/rjomJUXVVyv8fYWAnkOHNINz5HJVANS0aSW/u0gETJ3Kt3fuNGymldhY4Jdf+LZUym92967h0cEPH3JhHzBAM6q0vLh0ibeXVMpTLWZna7afIagnxFD3zJBQqrFkyRK0a9cOdnZ2cHd3x+DBgxFrwHTwW7ZsQePGjWFpaYkWLVpgj3rOLoIwAeoWZfE+yvIiKIi7/p494w+WpCRVAIWp5uMUUB/yAWj2Txp6bny8PQoLdR+XkaEadzphQkkhF9yva9fqd90xphrT2b49f0ALQTc3bqiOE9yunp6q5PFdu3JL8fZtzf7Qv//mLy9OTtAY/qNOp07cMs3MLNmfl5rKJ9T++2/VUJLPP+cCPHAg0KGDIJS6v1dxPvpI1Q6GPCJ37dIeVa0LIY/vgAFAhw58uyzu15wclRtaiMx2c+NrEko1jhw5gilTpuDUqVOIioqCTCZDcHAwcvT8yk+cOIHQ0FD873//Q0xMDAYPHozBgwfjakW8MhGEgVSGRWluzh949erxoJk+fVT9gYKlYyrc3LhVIVC8f1IfbdsC7u4MT59a4bvvdD+SZszgVlL9+jyhQnEGDuSzyqSlAT/8oPt+jx7xYyQSlXu4aVO+VreI1PsnBRwdVfeeMoXXB+BWOMD7IG10eJDFYlXU7+bNmvv+7//4vJevv85F+4sveC5jsZgH85Q11d/hw6qE9gBPUK+P2FgehNSvH+9jLQ25XNU/+eabQMeOfLssQnn4MB9z6+OjSt1YVS1KsCpESkoKA8COHDmi85iRI0ey/v37a5R16NCBTZw40aB7ZGRkMAAsIyOjTHUrLCxkO3bsYIWFhWU6r7pQ3dvn++8ZAxiTSBgrKiq5vzzb584dxjw8+P0AxtzdGVMoXviyL0yLFrw+1taMlfVr/vabjAGMWVkp2P37Jffv26f6vnoeD+z33/kxLi6MZWZqP2bXLn5M8+aqsqQkXiYWM5aXx8tWreJlgwdrni+TMda+Pd/XqxdjZ8/ybTMzxh4+1P89T5zgx9raMpaby8uuX2dMJOLl9vaq7wkw9vbb/Jj584sYwFhYmFz/DZ7Xr3lzfn7v3nxtYcFYdrbuc4TvCjB24UKpt2AHDvBjHR0Zy89nbOdO/rlp09LPFRgwgJ/z3nuqsrFjednixYZfhzHj/78M1QPDR/hWAhnP0/Q7OzvrPObkyZP4sNiUEb1798YOId9WMQoKClAgZNsFkJmZCQCQyWSQyWQG1004tiznVCeqe/u4uIgAmKFGDQaFoqhEJpbybJ/atflkykFBZsjIEKFbNwWKikw/83GdOhJcuSJGQIACgBxl+aojR8qwbFk6rl1zxXvvKbBtm+r7ZGQA48ebARBh6lQ5AgIUOq89fDiwcKEZbt0S4dtv5Zg5s+S41XPnxAAk8PdXQCbj93F2BhwdzZCeLsK1azL4+QEJCfw4Ly85ZDLN6/zyC9C+vRmiokS4cYMBEGHECAXc3fV/7zZtgNq1zZCQIMLu3UUYPJhh3jwJGBNj0CAFfvtNjnXrxFi5UoyCAuCLL4ogkwE+PgoAEty9y0r9Da1cKcbVqxK4uDCsX1+Ejh3NcP++CPv3F+kcnnT4sASCg/HoUTmaN9efSuiPP/jxw4YpIBbLn2ctMseNGwxpaUWlBpHdvg3s3s2jnSZNkinbzMWFt3lycsk214ex/1+GHl9lhFKhUGDatGno3LkzmjfXPV4oOTkZHup+LgAeHh5IFkLYirFkyRLMmzevRPl///0H67IMMHtOlDDoh9BKdW2f7GwrWFj0gK/vI+zZc1HnceXZPjNnOmHbNl907BiLPXsySj+hgnFyagCgGerVu4o9e8qevHXiRDt88EF3/P23GHPnnkH79o+RlGSN9eub4eFDb3h6ZqNLl8PYs0f/S8GAATXwzTdtER4uR4MGUbCx0fQl/vtvewBekEqvYc8eVSZ3T88uSE93wcaNl/Dw4SOcOdMKQG1kZcViz56SkyS++WY9/PJLCzx8yN2hbdpEG/R3aNWqGRISGuD775ORkhKLiAg+0j4w8AiOHs1E/frclatQAJcv8yU11RFAIG7elGHPHt1+1PR0Kb74oicACUaOvIRTp+LRsKE/7t+vg59/TgBQMiSYMWD//mAAPOR0+/ZE+Phc0HmPwkIxIiL6ABCjXr0T2LOHT1vj4RGEx49tsGrVWfj7p+ptgzVrWgCoh7Ztk3H79mnlsJwnT+oDaI6LFxOxZ4/uOuiirP9fuUJIcSmIGKsaI3MmT56MvXv34tixY6ipniizGBYWFli/fj1CQ0OVZatWrcK8efPwWEtaDm0WZa1atZCWlgZ7e3uD6yeTyRAVFYVevXrBXD3umwBA7QPwMH4rK+0p36pD+8hkwOXLIrRuzUpNe1fyXN4+R470w7ffmqNmTQYfH4bjx1V9lgcOFKFr19IfV3I50Lq1GW7cEGHWLDlmzdK0THx9zRAfL0JUVBECA1XXmzhRgrVrxfj8cznmzFGgXz8J9u8X49dfi/DWWyXvq1AAfftKcOiQGF27KnDggGFW/blzInTqZAZra4ZevRh27hRj4EBNK7o4KSky1KzJX+yfPpXpnKw8LEyCLVvEaNWK4cSJIkgkQGSkCCEhZvD1Zbh2rWQH5N27QJMmqt9kvXoMN2/q7qjctUuE4cPNUKsWw+3bRcpo67fekmDzZjHmzpXjs890W4PPngF165ohN1eEffuK0KOHqm3//FOEt982Q8+eCuzda7iXxNj/r8zMTLi6uiIjI0OvHlQJi3Lq1KnYvXs3oqOj9YokAHh6epYQxMePH8OzeB6x50ilUkil0hLl5ubmRj2wjD2vulCd28eQMYuvcvuYm6uCOoxl9mye3ebBAxEePhRBJOJThE2bBvToYdjjytycjz8cORJYvlyCDz6QwMmJ73v6lGeTAYC2bc00xjoKjqzYWAnMzSVITOSfa9fWPE6dTZt4QoC33xbD3Nyw2MiOHXmSg/v3Rdi5k79RzJun/3x3d8DWthDZ2RZ48MBca17gLVv4IpEAa9aIYGnJK927Ny+7fVuER4/MUaeO5nlCpGvz5nwqtHv3RHj61BzFHHclju/XTwSpVNUwAQE8SOnsWd5+uli/nr9UtmgBBAebabxUCXmQU1MNb091yvr/ZeixJo16ZYxh6tSpiIyMxMGDB1FXSLOhh4CAABw4cECjLCoqCgHqKUUIgngpsbHhwy769gW++opPafXvv/xzWRg2jD+IMzOB1atV5cKwkLp1VbO2CBQfIqIt6rU47u48XaAw56ghiESqMbEAj3QVMv7ow9OTjwbQNkTk8WNg8mS+/dlnPJJYwMFB9QKjLc2ckHJw4EBV9K++YSJCZGvxR65wj9OndSeALyoCVqzg29OmlfS+VNWoV5MK5ZQpU/Dnn39iw4YNsLOzQ3JyMpKTk5GnNrHc6NGjMVMtQeT777+Pffv2YdmyZbh58ybmzp2Lc+fOYaowmpcgiJeazp35MJiPP9YvUvoQi/lwEoAnUBeCq4REA9qESRDKW7d4ANHz2EKj66CPkBDV9pw5hp2jSygZAyZO5BN1t2zJh5YUp3dvvtY2TEQ9N2+nTnxbPUOROoWFPC8uUFIoW7bkGaTS0jQn8VYnMpK//Li58TR/xRGEMjW1ak1NZlKhXL16NTIyMtC9e3d4eXkpl81qg4wSEhKQJOSRAtCpUyds2LABa9asgb+/P7Zu3YodO3boDQAiCKL6MXw4n53k3j1VyjlBKFu2LHl87do81aD6/J62tvwa5U2rVsDXX3NrV5jnsjS8vLQL5Z9/8mw/5ubcraktn7AglAcOaI6TfPiQt49YzEVSED9dFuXFizyTkYuLauyjgFSq+i6nTnFrfsYMLoouLjy37rhxfP/kydozPwkJB+Ry3pcpkJPDU/GVNUVeeWHSPkpD4ogOa5m7ZcSIERhRfL4egiAINaytgdBQ4Kef+HCO117Tb1GKxXwmkgsXVDNaVIQ1CXCX4/TpZTvH05NHaKoLZWYmT1YA8H5ZXXOatmnDh8A8fcpdo8IsMcILQatW/IVAsCjPnePWY3HRFQS0Y0ftQWsdOnCRXLmSfz9t057Z26vcxMWxsOAu8fR07n51cVF9t6VLeX+wMP9oZUK5XgmCeGUREqdv2wYkJgI3b/LPuvoEhT66/fv5uqKE0hi0uV737uWi0qABT1mnC4mEB0UBmu5XQSiFlIMNG3JBzc9X9eeqIwilrpAQ9Qw9jx/z6+3cyYOEYmK4SN+8WXIOV3W09VMKqe727tV9XkVCQkkQxCtL27bcyiooAD79lPd7ubmpoiuLI/RTCqnsqqJQxsdDOUB/506+HjpUlY9WF3368PWKFXxsJlBy7lCRSL/7VQjk0RXdHBjIXbB2dtwCvHKFBys1bcrd3e3blz7HaXGhVChUSd1PnECZElmUFySUBEG8sohEKqtSmFWkVSvtbkNAJZQCVUkonZzyYWnJIJcDCQlcMIRk54MGlX7+qFHctZqezpOQnzzJLT2Az/YioEsok5K4SIvFXPC04eXFs+7Ex/P+SWPmXy0ulPHxfOo2gPdVXih7HoIXhoSSIIhXmrAwzQe2vqEYgutVoCoJpVismqj67l1uDWZkcAtZmL1DH5aWPCCmZUvuFhVmnGnWjE9TJiAIZfHIV/XxlnZ2uu9TqxaU41aNobhQCtavgGAFVyYklARBvNK4uHDXpIA+oaxfX3PC5aoklADPmgNwody1i5cNHGj43J+OjryPsmFD7o4GSk6J1r49F+UHD1SzowCl90+WF+pDRACVUAquZW2TZlc0JJQEQbzyCO5XQL9QmplpJg+oakJZv75KKIX+SUPcruq4u/NgJWHeVGEuSAFbW1X0rLr7tbT+yfKiuEV55Xl62uHD+frYscofJkJCSRDEK0+PHnwuyBEjeISoPtT7KauaUArzUu7cyfvurKz4hN5lpVYt4OxZPlG0NqEVho8sW8YtT32JBsobXa7Xt97iLt+MjJLu2IqGhJIgiFcesRiIiOCLuJSnniCUYjF05js1FYLr9c4d/rlXLz5e1Bjc3YEBA7QHNn34IXfTnj7NxzxeusSHjDg7ly1dn7H1ArhQ5uZCObNI69aqoKPK7qckoSQIglBDCOjx9Cx9yEVlIwilwOuvV9R9eIJzsRhYuxaYMoWX60o0UJ6oC+X166ohPR4eqv7Uyu6nJKEkCIJQ47XX+DCHIUNMXZOS1KmjsohFIm4RVhTBwXwsJMDdtEDFu10BlVA+ewacP8+3/fz49w0M5J+jo3UnXq8ISCgJgiDU8PDgM4esXGnqmpTEwoL3LwLcuqto1/AHH/C+QYGKDuQB+NASIYr34EG+btGCr9u04f2yT56okkJUBiSUBEEQxaho9+KL0KgRX5c12tUYRCKeK7dXL8DfXxXkU5GIxark6MKMikIUroWFKh9tZfZTklASBEG8RCxezPO6VtbMglZWfOzlxYt8uzIQ3K9PnvC1erJ3U/RTVrGuaoIgCEIfbdrwpTKpbAtbEEqAW5jqGZOK91NWRt3IoiQIgiCqFOpC2bChpiXbvj13wSYlqYbJVDRkURIEQRBVCnWhLD7HppUVEB7Og5pKm4mkvCChJAiCIKoU6kIpRLyq8/77lVcXgFyvBEEQRBVDn0VpCkgoCYIgiCoFCSVBEARB6EFIpGBvD/j4mLYuAPVREgRBEFWMNm2A8eN5hGtVSP5AQkkQBEFUKSQS4OefTV0LFeR6JQiCIAg9kFASBEEQhB5IKAmCIAhCDySUBEEQBKGHahfMw57P9pmZmVmm82QyGXJzc5GZmQlzc/OKqNpLDbWPfqh99EPtox9qH/0Y2z6CDrBSZoGudkKZlZUFAKglzH5KEARBVGuysrLg4OCgc7+IlSalrxgKhQKJiYmws7ODqAwDdDIzM1GrVi08ePAA9vb2FVjDlxNqH/1Q++iH2kc/1D76MbZ9GGPIysqCt7c3xGLdPZHVzqIUi8WoWbOm0efb29vTD1UP1D76ofbRD7WPfqh99GNM++izJAUomIcgCIIg9EBCSRAEQRB6IKE0EKlUijlz5kAqlZq6KlUSah/9UPvoh9pHP9Q++qno9ql2wTwEQRAEURbIoiQIgiAIPZBQEgRBEIQeSCgJgiAIQg8klARBEAShBxJKA/nhhx9Qp04dWFpaokOHDjhz5oypq1ThREdHY+DAgfD29oZIJMKOHTs09jPGMHv2bHh5ecHKygpBQUG4ffu2xjFPnz5FWFgY7O3t4ejoiP/973/Izs6uxG9RcSxZsgTt2rWDnZ0d3N3dMXjwYMTGxmock5+fjylTpsDFxQW2trYYNmwYHj9+rHFMQkIC+vfvD2tra7i7u+Ojjz5CUVFRZX6VCmH16tXw8/NTDgIPCAjA3r17lfurc9sU58svv4RIJMK0adOUZdW9febOnQuRSKSxNG7cWLm/UtuHEaWyadMmZmFhwX777Td27do1NmHCBObo6MgeP35s6qpVKHv27GGff/452759OwPAIiMjNfZ/+eWXzMHBge3YsYNdunSJvf7666xu3bosLy9PeUyfPn2Yv78/O3XqFDt69Chr0KABCw0NreRvUjH07t2brV27ll29epVdvHiR9evXj9WuXZtlZ2crj5k0aRKrVasWO3DgADt37hzr2LEj69Spk3J/UVERa968OQsKCmIxMTFsz549zNXVlc2cOdMUX6lc2bVrF/vnn3/YrVu3WGxsLPvss8+Yubk5u3r1KmOsereNOmfOnGF16tRhfn5+7P3331eWV/f2mTNnDmvWrBlLSkpSLqmpqcr9ldk+JJQG0L59ezZlyhTlZ7lczry9vdmSJUtMWKvKpbhQKhQK5unpyZYuXaosS09PZ1KplG3cuJExxtj169cZAHb27FnlMXv37mUikYg9evSo0upeWaSkpDAA7MiRI4wx3h7m5uZsy5YtymNu3LjBALCTJ08yxvjLiFgsZsnJycpjVq9ezezt7VlBQUHlfoFKwMnJif3yyy/UNs/Jyspivr6+LCoqigUGBiqFktqHC6W/v7/WfZXdPuR6LYXCwkKcP38eQUFByjKxWIygoCCcPHnShDUzLXFxcUhOTtZoFwcHB3To0EHZLidPnoSjoyPatm2rPCYoKAhisRinT5+u9DpXNBkZGQAAZ2dnAMD58+chk8k02qhx48aoXbu2Rhu1aNECHh4eymN69+6NzMxMXLt2rRJrX7HI5XJs2rQJOTk5CAgIoLZ5zpQpU9C/f3+NdgDotyNw+/ZteHt7o169eggLC0NCQgKAym+fapcUvaykpaVBLpdrNDYAeHh44ObNmyaqlelJTk4GAK3tIuxLTk6Gu7u7xn4zMzM4Ozsrj3lVUCgUmDZtGjp37ozmzZsD4N/fwsICjo6OGscWbyNtbSjse9m5cuUKAgICkJ+fD1tbW0RGRqJp06a4ePFitW+bTZs24cKFCzh79myJffTbATp06IB169ahUaNGSEpKwrx589C1a1dcvXq10tuHhJIgyoEpU6bg6tWrOHbsmKmrUqVo1KgRLl68iIyMDGzduhVjxozBkSNHTF0tk/PgwQO8//77iIqKgqWlpamrUyXp27evctvPzw8dOnSAj48PIiIiYGVlVal1IddrKbi6ukIikZSIpnr8+DE8PT1NVCvTI3x3fe3i6emJlJQUjf1FRUV4+vTpK9V2U6dOxe7du3Ho0CGNKdw8PT1RWFiI9PR0jeOLt5G2NhT2vexYWFigQYMGaNOmDZYsWQJ/f38sX7682rfN+fPnkZKSgtatW8PMzAxmZmY4cuQIvv/+e5iZmcHDw6Nat482HB0d0bBhQ9y5c6fSfz8klKVgYWGBNm3a4MCBA8oyhUKBAwcOICAgwIQ1My1169aFp6enRrtkZmbi9OnTynYJCAhAeno6zp8/rzzm4MGDUCgU6NChQ6XXubxhjGHq1KmIjIzEwYMHUbduXY39bdq0gbm5uUYbxcbGIiEhQaONrly5ovFCERUVBXt7ezRt2rRyvkglolAoUFBQUO3bpmfPnrhy5QouXryoXNq2bYuwsDDldnVuH21kZ2fj7t278PLyqvzfT5lDkaohmzZtYlKplK1bt45dv36dvfPOO8zR0VEjmupVJCsri8XExLCYmBgGgH3zzTcsJiaGxcfHM8b48BBHR0e2c+dOdvnyZTZo0CCtw0NatWrFTp8+zY4dO8Z8fX1fmeEhkydPZg4ODuzw4cMaIey5ubnKYyZNmsRq167NDh48yM6dO8cCAgJYQECAcr8Qwh4cHMwuXrzI9u3bx9zc3F6JEP9PP/2UHTlyhMXFxbHLly+zTz/9lIlEIvbff/8xxqp322hDPeqVMWqf6dOns8OHD7O4uDh2/PhxFhQUxFxdXVlKSgpjrHLbh4TSQFasWMFq167NLCwsWPv27dmpU6dMXaUK59ChQwxAiWXMmDGMMT5EZNasWczDw4NJpVLWs2dPFhsbq3GNJ0+esNDQUGZra8vs7e3ZuHHjWFZWlgm+TfmjrW0AsLVr1yqPycvLY++++y5zcnJi1tbWbMiQISwpKUnjOvfv32d9+/ZlVlZWzNXVlU2fPp3JZLJK/jblz9tvv818fHyYhYUFc3NzYz179lSKJGPVu220UVwoq3v7hISEMC8vL2ZhYcFq1KjBQkJC2J07d5T7K7N9aJotgiAIgtAD9VESBEEQhB5IKAmCIAhCDySUBEEQBKEHEkqCIAiC0AMJJUEQBEHogYSSIAiCIPRAQkkQBEEQeiChJAiCIAg9kFASBEEQhB5IKAniJSc1NRWTJ09G7dq1IZVK4enpid69e+P48eMAAJFIhB07dpi2kgTxEkPzURLES86wYcNQWFiI9evXo169enj8+DEOHDiAJ0+emLpqBPFKQLleCeIlJj09HU5OTjh8+DACAwNL7K9Tpw7i4+OVn318fHD//n0AwM6dOzFv3jxcv34d3t7eGDNmDD7//HOYmfH3Z5FIhFWrVmHXrl04fPgwvLy8EB4ejuHDh1fKdyOIqgK5XgniJcbW1ha2trbYsWMHCgoKSuw/e/YsAGDt2rVISkpSfj569ChGjx6N999/H9evX8dPP/2EdevWYdGiRRrnz5o1C8OGDcOlS5cQFhaGUaNG4caNGxX/xQiiCkEWJUG85Gzbtg0TJkxAXl4eWrdujcDAQIwaNQp+fn4AuGUYGRmJwYMHK88JCgpCz549MXPmTGXZn3/+iY8//hiJiYnK8yZNmoTVq1crj+nYsSNat26NVatWVc6XI4gqAFmUBPGSM2zYMCQmJmLXrl3o06cPDh8+jNatW2PdunU6z7l06RLmz5+vtEhtbW0xYcIEJCUlITc3V3mcMFu8+meyKInqBgXzEMQrgKWlJXr16oVevXph1qxZGD9+PObMmYOxY8dqPT47Oxvz5s3D0KFDtV6LIAgVZFESxCtI06ZNkZOTAwAwNzeHXC7X2N+6dWvExsaiQYMGJRaxWPVYOHXqlMZ5p06dQpMmTSr+CxBEFYIsSoJ4iXny5AlGjBiBt99+G35+frCzs8O5c+cQHh6OQYMGAeCRrwcOHEDnzp0hlUrh5OSE2bNnY8CAAahduzaGDx8OsViMS5cu4erVq1i4cKHy+lu2bEHbtm3RpUsX/PXXXzhz5gx+/fVXU31dgjANjCCIl5b8/Hz26aefstatWzMHBwdmbW3NGjVqxL744guWm5vLGGNs165drEGDBszMzIz5+Pgoz923bx/r1KkTs7KyYvb29qx9+/ZszZo1yv0A2A8//MB69erFpFIpq1OnDtu8eXNlf0WCMDkU9UoQhFa0RcsSRHWE+igJgiAIQg8klARBEAShBwrmIQhCK9QrQxAcsigJgiAIQg8klARBEAShBxJKgiAIgtADCSVBEARB6IGEkiAIgiD0QEJJEARBEHogoSQIgiAIPZBQEgRBEIQe/h/3J/W2/C5iTwAAAABJRU5ErkJggg==\n"},"metadata":{}}],"source":["import pandas as pd\n","df=pd.DataFrame(trainer.state.log_history)\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","\n","# Assuming df is already defined, and train_loss and eval_loss are subsets of df\n","train_loss = df[['loss', 'step']]\n","eval_loss = df[['eval_loss', 'step']]\n","\n","# Remove NaN rows in both dataframes\n","train_loss_clean = train_loss.dropna()\n","eval_loss_clean = eval_loss.dropna()\n","\n","# Plotting the loss vs step for train_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting the loss vs step for eval_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting both losses together\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train and Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n"]},{"cell_type":"code","source":["df"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":443},"id":"wzsaK7v-kEJf","executionInfo":{"status":"ok","timestamp":1717989752528,"user_tz":-240,"elapsed":915,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"2ca81726-4644-48f1-eb2d-cee970d4396c"},"execution_count":27,"outputs":[{"output_type":"execute_result","data":{"text/plain":[" loss grad_norm learning_rate epoch step eval_loss \\\n","0 3.3778 5.859917 4.949495e-05 0.050378 5 NaN \n","1 2.8616 5.931467 4.898990e-05 0.100756 10 NaN \n","2 NaN NaN NaN 0.100756 10 2.892442 \n","3 2.8077 4.179818 4.848485e-05 0.151134 15 NaN \n","4 2.8923 3.683869 4.797980e-05 0.201511 20 NaN \n",".. ... ... ... ... ... ... \n","144 1.6750 3.195017 1.010101e-06 4.886650 485 NaN \n","145 1.9193 3.275897 5.050505e-07 4.937028 490 NaN \n","146 NaN NaN NaN 4.937028 490 2.587291 \n","147 1.7402 3.366930 0.000000e+00 4.987406 495 NaN \n","148 NaN NaN NaN 4.987406 495 NaN \n","\n"," eval_runtime eval_samples_per_second eval_steps_per_second \\\n","0 NaN NaN NaN \n","1 NaN NaN NaN \n","2 13.2966 7.446 3.760 \n","3 NaN NaN NaN \n","4 NaN NaN NaN \n",".. ... ... ... \n","144 NaN NaN NaN \n","145 NaN NaN NaN \n","146 13.2770 7.456 3.766 \n","147 NaN NaN NaN \n","148 NaN NaN NaN \n","\n"," train_runtime train_samples_per_second train_steps_per_second \\\n","0 NaN NaN NaN \n","1 NaN NaN NaN \n","2 NaN NaN NaN \n","3 NaN NaN NaN \n","4 NaN NaN NaN \n",".. ... ... ... \n","144 NaN NaN NaN \n","145 NaN NaN NaN \n","146 NaN NaN NaN \n","147 NaN NaN NaN \n","148 2615.821 1.518 0.189 \n","\n"," total_flos train_loss \n","0 NaN NaN \n","1 NaN NaN \n","2 NaN NaN \n","3 NaN NaN \n","4 NaN NaN \n",".. ... ... \n","144 NaN NaN \n","145 NaN NaN \n","146 NaN NaN \n","147 NaN NaN \n","148 2.138562e+16 2.266931 \n","\n","[149 rows x 14 columns]"],"text/html":["\n","
\n","
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
lossgrad_normlearning_rateepochstepeval_losseval_runtimeeval_samples_per_secondeval_steps_per_secondtrain_runtimetrain_samples_per_secondtrain_steps_per_secondtotal_flostrain_loss
03.37785.8599174.949495e-050.0503785NaNNaNNaNNaNNaNNaNNaNNaNNaN
12.86165.9314674.898990e-050.10075610NaNNaNNaNNaNNaNNaNNaNNaNNaN
2NaNNaNNaN0.100756102.89244213.29667.4463.760NaNNaNNaNNaNNaN
32.80774.1798184.848485e-050.15113415NaNNaNNaNNaNNaNNaNNaNNaNNaN
42.89233.6838694.797980e-050.20151120NaNNaNNaNNaNNaNNaNNaNNaNNaN
.............................................
1441.67503.1950171.010101e-064.886650485NaNNaNNaNNaNNaNNaNNaNNaNNaN
1451.91933.2758975.050505e-074.937028490NaNNaNNaNNaNNaNNaNNaNNaNNaN
146NaNNaNNaN4.9370284902.58729113.27707.4563.766NaNNaNNaNNaNNaN
1471.74023.3669300.000000e+004.987406495NaNNaNNaNNaNNaNNaNNaNNaNNaN
148NaNNaNNaN4.987406495NaNNaNNaNNaN2615.8211.5180.1892.138562e+162.266931
\n","

149 rows × 14 columns

\n","
\n","
\n","\n","
\n"," \n","\n"," \n","\n"," \n","
\n","\n","\n","
\n"," \n","\n","\n","\n"," \n","
\n","\n","
\n"," \n"," \n"," \n","
\n","\n","
\n","
\n"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"dataframe","variable_name":"df","summary":"{\n \"name\": \"df\",\n \"rows\": 149,\n \"fields\": [\n {\n \"column\": \"loss\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.3434175880701487,\n \"min\": 1.675,\n \"max\": 3.3778,\n \"num_unique_values\": 99,\n \"samples\": [\n 2.0484,\n 2.2095,\n 2.1843\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"grad_norm\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.520255763973683,\n \"min\": 2.5443077087402344,\n \"max\": 5.931466579437256,\n \"num_unique_values\": 99,\n \"samples\": [\n 3.1351263523101807,\n 3.144888162612915,\n 3.4184834957122803\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"learning_rate\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 1.4506471329641487e-05,\n \"min\": 0.0,\n \"max\": 4.94949494949495e-05,\n \"num_unique_values\": 99,\n \"samples\": [\n 1.8181818181818182e-05,\n 2.9292929292929294e-05,\n 1.5151515151515152e-06\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"epoch\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 1.448976098446401,\n \"min\": 0.05037783375314862,\n \"max\": 4.987405541561713,\n \"num_unique_values\": 99,\n \"samples\": [\n 3.1738035264483626,\n 2.065491183879093,\n 4.836272040302267\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"step\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 143,\n \"min\": 5,\n \"max\": 495,\n \"num_unique_values\": 99,\n \"samples\": [\n 315,\n 205,\n 480\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_loss\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.06697069290132963,\n \"min\": 2.5605547428131104,\n \"max\": 2.892441749572754,\n \"num_unique_values\": 49,\n \"samples\": [\n 2.6067590713500977,\n 2.5914108753204346,\n 2.587324857711792\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_runtime\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.04416750746857253,\n \"min\": 13.0695,\n \"max\": 13.3754,\n \"num_unique_values\": 49,\n \"samples\": [\n 13.3209,\n 13.3007,\n 13.2112\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_samples_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.024989844876244775,\n \"min\": 7.402,\n \"max\": 7.575,\n \"num_unique_values\": 36,\n \"samples\": [\n 7.494,\n 7.456,\n 7.427\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_steps_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.01265132888075318,\n \"min\": 3.738,\n \"max\": 3.826,\n \"num_unique_values\": 28,\n \"samples\": [\n 3.766,\n 3.776,\n 3.748\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_runtime\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 2615.821,\n \"max\": 2615.821,\n \"num_unique_values\": 1,\n \"samples\": [\n 2615.821\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_samples_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 1.518,\n \"max\": 1.518,\n \"num_unique_values\": 1,\n \"samples\": [\n 1.518\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_steps_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 0.189,\n \"max\": 0.189,\n \"num_unique_values\": 1,\n \"samples\": [\n 0.189\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"total_flos\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 2.138562229174272e+16,\n \"max\": 2.138562229174272e+16,\n \"num_unique_values\": 1,\n \"samples\": [\n 2.138562229174272e+16\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_loss\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 2.2669311041783806,\n \"max\": 2.2669311041783806,\n \"num_unique_values\": 1,\n \"samples\": [\n 2.2669311041783806\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n }\n ]\n}"}},"metadata":{},"execution_count":27}]},{"cell_type":"code","source":["# Assuming 'your_actual_output_directory' is the correct path where your model files are located\n","trainer.args.output_dir = '/content/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb'\n","\n","trainer.push_to_hub()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":173,"referenced_widgets":["ef0dc73277ff45f5a34d353aa49da560","422ec094f0fa45c982a3686de1ac1df2","e839adcc10a54697bdb8faf74b32eba7","2d30f042f5384898ad2747dedfef40a7","837c6570a2fe4217b2507009142b0d44","122cd9b7af6b49a9a672ce7668f48044","c8ce184aeaef431798668b7e8ed6a02d","03b51ae6448a4bcbadfd1a1fabdf5680","964085fd71a647d69293c48de9d44078","0ed1c5a1be8a422fbf37ab5fc0817866","7eb23e7dd7dd49808655da3f7e04ffe7"]},"id":"BsPnIUQ5i3Tj","executionInfo":{"status":"ok","timestamp":1717991495188,"user_tz":-240,"elapsed":9670,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"aee1b69f-6baa-493f-cb85-594f3ac5c4e9"},"execution_count":29,"outputs":[{"output_type":"stream","name":"stderr","text":["Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n"]},{"output_type":"display_data","data":{"text/plain":["training_args.bin: 0%| | 0.00/5.30k [00:00=2.0.0 (from evaluate)\n"," Downloading datasets-2.19.2-py3-none-any.whl (542 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m542.1/542.1 kB\u001b[0m \u001b[31m5.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from evaluate) (1.25.2)\n","Collecting dill (from evaluate)\n"," Downloading dill-0.3.8-py3-none-any.whl (116 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m13.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from evaluate) (2.0.3)\n","Requirement already satisfied: requests>=2.19.0 in /usr/local/lib/python3.10/dist-packages (from evaluate) (2.31.0)\n","Requirement already satisfied: tqdm>=4.62.1 in /usr/local/lib/python3.10/dist-packages (from evaluate) (4.66.4)\n","Collecting xxhash (from evaluate)\n"," Downloading xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m20.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting multiprocess (from evaluate)\n"," Downloading multiprocess-0.70.16-py310-none-any.whl (134 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m18.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: fsspec[http]>=2021.05.0 in /usr/local/lib/python3.10/dist-packages (from evaluate) (2023.6.0)\n","Requirement already satisfied: huggingface-hub>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from evaluate) (0.23.2)\n","Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from evaluate) (24.0)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (3.14.0)\n","Requirement already satisfied: pyarrow>=12.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (14.0.2)\n","Requirement already satisfied: pyarrow-hotfix in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (0.6)\n","Collecting requests>=2.19.0 (from evaluate)\n"," Downloading requests-2.32.3-py3-none-any.whl (64 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.9/64.9 kB\u001b[0m \u001b[31m8.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (3.9.5)\n","Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (6.0.1)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub>=0.7.0->evaluate) (4.12.1)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (2024.6.2)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->evaluate) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->evaluate) (2023.4)\n","Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas->evaluate) (2024.1)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.3.1)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (23.2.0)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.4.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (6.0.5)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.9.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (4.0.3)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas->evaluate) (1.16.0)\n","Installing collected packages: xxhash, requests, dill, multiprocess, datasets, evaluate\n"," Attempting uninstall: requests\n"," Found existing installation: requests 2.31.0\n"," Uninstalling requests-2.31.0:\n"," Successfully uninstalled requests-2.31.0\n","\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n","google-colab 1.0.0 requires requests==2.31.0, but you have requests 2.32.3 which is incompatible.\u001b[0m\u001b[31m\n","\u001b[0mSuccessfully installed datasets-2.19.2 dill-0.3.8 evaluate-0.4.2 multiprocess-0.70.16 requests-2.32.3 xxhash-3.4.1\n"]},{"output_type":"display_data","data":{"application/vnd.colab-display-data+json":{"pip_warning":{"packages":["requests"]},"id":"1b11a3f4549d46ef834d30aa92a1c59a"}},"metadata":{}}]},{"source":["!pip install rouge_score"],"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"JhnsXhiG2CWs","executionInfo":{"status":"ok","timestamp":1717992473963,"user_tz":-240,"elapsed":8544,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"5514a1d1-524d-4659-cfe9-d9d2e7dbd6cc"},"execution_count":9,"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting rouge_score\n"," Downloading rouge_score-0.1.2.tar.gz (17 kB)\n"," Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: absl-py in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.4.0)\n","Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (from rouge_score) (3.8.1)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.25.2)\n","Requirement already satisfied: six>=1.14.0 in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.16.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (1.4.2)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (2024.5.15)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (4.66.4)\n","Building wheels for collected packages: rouge_score\n"," Building wheel for rouge_score (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24933 sha256=168b4badeb6312bb6379796e24a0fd6725b6d396e6e0d7eed0bbb302376ee608\n"," Stored in directory: /root/.cache/pip/wheels/5f/dd/89/461065a73be61a532ff8599a28e9beef17985c9e9c31e541b4\n","Successfully built rouge_score\n","Installing collected packages: rouge_score\n","Successfully installed rouge_score-0.1.2\n"]}]},{"cell_type":"code","source":["from evaluate import load\n","# Load the ROUGE metric\n","import evaluate\n","rouge = evaluate.load('rouge')"],"metadata":{"id":"RdsN4yTZ1dgn","executionInfo":{"status":"ok","timestamp":1717992508803,"user_tz":-240,"elapsed":1357,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":10,"outputs":[]},{"cell_type":"code","source":["candidates = [generated_summary]\n","\n","references = [[target_text]\n"," ]\n","results = rouge.compute(predictions=candidates, references=references)\n","print(results)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"AQl7VcgT2NSM","executionInfo":{"status":"ok","timestamp":1717992545609,"user_tz":-240,"elapsed":416,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"f0476ce9-a2bc-446a-fced-1742640177e7"},"execution_count":13,"outputs":[{"output_type":"stream","name":"stdout","text":["{'rouge1': 0.45502645502645495, 'rouge2': 0.1702127659574468, 'rougeL': 0.2380952380952381, 'rougeLsum': 0.2380952380952381}\n"]}]}],"metadata":{"colab":{"provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.12.3"},"widgets":{"application/vnd.jupyter.widget-state+json":{"5848c165bbcc43798510a4b7de6ac025":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_b0bb01a086014c899d02218b52dbb587","IPY_MODEL_678580242fcc493db844628c9f7ceeaa","IPY_MODEL_46c4fdeedf494a688132c40d799a304d"],"layout":"IPY_MODEL_cff98db809994469961760bd2b8a9bff"}},"b0bb01a086014c899d02218b52dbb587":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_7b1d3959e1924f70891bb710680e38ae","placeholder":"​","style":"IPY_MODEL_0db0b115f0374072a664abe056291d2c","value":"config.json: 100%"}},"678580242fcc493db844628c9f7ceeaa":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_4d37806af79b44ee9b47bae73432ddc3","max":1282,"min":0,"orientation":"horizontal","style":"IPY_MODEL_3935523fe3434e17ac46ec2bca0d2591","value":1282}},"46c4fdeedf494a688132c40d799a304d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a7b93b0d13294061bbd5f4900bb67522","placeholder":"​","style":"IPY_MODEL_e8a56214e9584fa0abfd72db0d8bff93","value":" 1.28k/1.28k [00:00<00:00, 101kB/s]"}},"cff98db809994469961760bd2b8a9bff":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7b1d3959e1924f70891bb710680e38ae":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0db0b115f0374072a664abe056291d2c":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4d37806af79b44ee9b47bae73432ddc3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3935523fe3434e17ac46ec2bca0d2591":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a7b93b0d13294061bbd5f4900bb67522":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e8a56214e9584fa0abfd72db0d8bff93":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0f98696a4f8d496cb5d9122a361df917":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_08a8e634ca4e4d60be998bf85e50d3d5","IPY_MODEL_d9425277e0b04ab986adab8058849336","IPY_MODEL_eb1e576545194454b26d14376f71ef98"],"layout":"IPY_MODEL_a1736f19e9e14787a9b90075a1f59b51"}},"08a8e634ca4e4d60be998bf85e50d3d5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_009cbbd4286b448693a181757a39409c","placeholder":"​","style":"IPY_MODEL_74d49d89ac934d1c87e7fbd39f456943","value":"model.safetensors: 100%"}},"d9425277e0b04ab986adab8058849336":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_7b464cd62b2246038aa163cb68be98d5","max":647614116,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6263de0a2ca647cb83d9e49fc820db92","value":647614116}},"eb1e576545194454b26d14376f71ef98":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e3f0b803c7634adea791d6ef00fe4ff9","placeholder":"​","style":"IPY_MODEL_e326f9fe55724d82b4ba94820ca8bf39","value":" 648M/648M [00:28<00:00, 21.8MB/s]"}},"a1736f19e9e14787a9b90075a1f59b51":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"009cbbd4286b448693a181757a39409c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"74d49d89ac934d1c87e7fbd39f456943":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7b464cd62b2246038aa163cb68be98d5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6263de0a2ca647cb83d9e49fc820db92":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"e3f0b803c7634adea791d6ef00fe4ff9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e326f9fe55724d82b4ba94820ca8bf39":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4c35efb2cd264fe0b162fc52a027076d":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_5d2d2f56da0e439a92ffaa15d12afed9","IPY_MODEL_4fe8644c63e54417af9823a1d18d1551","IPY_MODEL_99b426841b0642b28e6a25569e2153e0"],"layout":"IPY_MODEL_57a295f308544000aac01beaa936a108"}},"5d2d2f56da0e439a92ffaa15d12afed9":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_48d046c9253846598a8585227dbf2c2e","placeholder":"​","style":"IPY_MODEL_3c9e8dc215f74d03848cc6696e01e869","value":"generation_config.json: 100%"}},"4fe8644c63e54417af9823a1d18d1551":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_e2b3cd22c179449d99e3a92c90c9737b","max":295,"min":0,"orientation":"horizontal","style":"IPY_MODEL_ba84a501d78948fd8a19cddd52b61fb0","value":295}},"99b426841b0642b28e6a25569e2153e0":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e4c18749e7d24332bef7da70d876aff9","placeholder":"​","style":"IPY_MODEL_4b6d32887f0b497499c78af6690cacc2","value":" 295/295 [00:00<00:00, 30.3kB/s]"}},"57a295f308544000aac01beaa936a108":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"48d046c9253846598a8585227dbf2c2e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3c9e8dc215f74d03848cc6696e01e869":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e2b3cd22c179449d99e3a92c90c9737b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ba84a501d78948fd8a19cddd52b61fb0":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"e4c18749e7d24332bef7da70d876aff9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4b6d32887f0b497499c78af6690cacc2":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b8c1e1aa424e487e86df0f425da849e1":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_d40dec6af89e47a295e4b48d5c8f4734","IPY_MODEL_7e14c561413d4a548add644cf765e2e9","IPY_MODEL_4e479c7ac61541c2a0bada43886da71a"],"layout":"IPY_MODEL_740bc7bb7fb44c7d980c0297bcd7bfb6"}},"d40dec6af89e47a295e4b48d5c8f4734":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_0a231f9c3a77421fb91b439bd20d4f10","placeholder":"​","style":"IPY_MODEL_5cc1120ded8f4e91ac5185b6bdedd5e7","value":"tokenizer_config.json: 100%"}},"7e14c561413d4a548add644cf765e2e9":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_0090c0e19c9c4d39b0c98ec758a3d72c","max":1212,"min":0,"orientation":"horizontal","style":"IPY_MODEL_3898ab3c5dce4176ae50bba9c09623d7","value":1212}},"4e479c7ac61541c2a0bada43886da71a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a430886fb5d745c7834541a8687fe245","placeholder":"​","style":"IPY_MODEL_781d34031f954393af04fd0b60911ddc","value":" 1.21k/1.21k [00:00<00:00, 106kB/s]"}},"740bc7bb7fb44c7d980c0297bcd7bfb6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0a231f9c3a77421fb91b439bd20d4f10":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5cc1120ded8f4e91ac5185b6bdedd5e7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0090c0e19c9c4d39b0c98ec758a3d72c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3898ab3c5dce4176ae50bba9c09623d7":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a430886fb5d745c7834541a8687fe245":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"781d34031f954393af04fd0b60911ddc":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3584a735ee284f0b8c88757813efaf86":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_c947203568b94464a0747ffb43218dd5","IPY_MODEL_88de79a33e144481b067296c2fec3a13","IPY_MODEL_12129bf7c47b4a7e8e98a7dbcf574c91"],"layout":"IPY_MODEL_0af043937ca949edbcd703fc5474e496"}},"c947203568b94464a0747ffb43218dd5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_cf5fb501633c428bbcb96b1a28ecfbe5","placeholder":"​","style":"IPY_MODEL_4befd48f393242b5a3e29b7c2bf57e88","value":"vocab.json: 100%"}},"88de79a33e144481b067296c2fec3a13":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_c99dedbeffb74a63915ed8f760d085eb","max":798293,"min":0,"orientation":"horizontal","style":"IPY_MODEL_d0a3c5c77a864c6fba65dd7293c1fab8","value":798293}},"12129bf7c47b4a7e8e98a7dbcf574c91":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a05516bba01e42c7be5dc4105e128fd3","placeholder":"​","style":"IPY_MODEL_7c82336ed7c0447cb5a5da96ea09cf23","value":" 798k/798k [00:00<00:00, 3.26MB/s]"}},"0af043937ca949edbcd703fc5474e496":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cf5fb501633c428bbcb96b1a28ecfbe5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4befd48f393242b5a3e29b7c2bf57e88":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c99dedbeffb74a63915ed8f760d085eb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d0a3c5c77a864c6fba65dd7293c1fab8":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a05516bba01e42c7be5dc4105e128fd3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7c82336ed7c0447cb5a5da96ea09cf23":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e70bad9f3772496ba72f895943d58444":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_52a85c5a71cb4866ac60d5e2f92b836f","IPY_MODEL_d486b1add058420fbe2a53f2c807de35","IPY_MODEL_c0dab47729c344aabfd58a453b50b7ee"],"layout":"IPY_MODEL_b61ff126cc644faebf2a34b407d476ee"}},"52a85c5a71cb4866ac60d5e2f92b836f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5f9abb070af84d6c9ffb1747238f8823","placeholder":"​","style":"IPY_MODEL_c9affc7ef61e4afdb209f564a36a82a7","value":"merges.txt: 100%"}},"d486b1add058420fbe2a53f2c807de35":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_b3863d62e3684d849f8ce78f9ca313bb","max":456318,"min":0,"orientation":"horizontal","style":"IPY_MODEL_aef0502ee8134d8ab2f5fc936271e573","value":456318}},"c0dab47729c344aabfd58a453b50b7ee":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_d9515470f22743fbac42773ca79562bb","placeholder":"​","style":"IPY_MODEL_dac9b42265b14eea89cbb1849e99ec93","value":" 456k/456k [00:00<00:00, 25.2MB/s]"}},"b61ff126cc644faebf2a34b407d476ee":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5f9abb070af84d6c9ffb1747238f8823":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c9affc7ef61e4afdb209f564a36a82a7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b3863d62e3684d849f8ce78f9ca313bb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"aef0502ee8134d8ab2f5fc936271e573":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"d9515470f22743fbac42773ca79562bb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dac9b42265b14eea89cbb1849e99ec93":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4e21bf2dbe11493bae594034557e7392":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0db8ee02ff0e4a68944999a7c46560ab","IPY_MODEL_681c5b9d90bf48d699024d829454934f","IPY_MODEL_ba94513db6c4472b962c8112edbfc49d"],"layout":"IPY_MODEL_415943e0edef48d5b8aecadd92bf370a"}},"0db8ee02ff0e4a68944999a7c46560ab":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5e7b8b180b56426183901fa4699e79ad","placeholder":"​","style":"IPY_MODEL_100286eff7cb4c8bbf0b4102263b7cc1","value":"tokenizer.json: 100%"}},"681c5b9d90bf48d699024d829454934f":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_59ef85a3460c49008caf08c1885b708a","max":2108907,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6c79cfcd1d0741eca4f443fc78edc7ae","value":2108907}},"ba94513db6c4472b962c8112edbfc49d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_eda9fa293e1140388cdf1370cedeb3ab","placeholder":"​","style":"IPY_MODEL_2dc9b6f6a972438ba94ead2cc3d00530","value":" 2.11M/2.11M [00:00<00:00, 4.41MB/s]"}},"415943e0edef48d5b8aecadd92bf370a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5e7b8b180b56426183901fa4699e79ad":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"100286eff7cb4c8bbf0b4102263b7cc1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"59ef85a3460c49008caf08c1885b708a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6c79cfcd1d0741eca4f443fc78edc7ae":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"eda9fa293e1140388cdf1370cedeb3ab":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2dc9b6f6a972438ba94ead2cc3d00530":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"833988820b52407b8ce129ae243aa676":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_73ec1d8dab594d1dab9a653171ecdd0f","IPY_MODEL_c5df12a2a6f64d6fb48ebb1e9bdfc3b7","IPY_MODEL_732fd4d0981c441995aafd0cdf4e7abe"],"layout":"IPY_MODEL_3861f745da6a40f6b34006fef99dcec4"}},"73ec1d8dab594d1dab9a653171ecdd0f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_13ecf0bb82e543e6bb4a84d1d8b7503d","placeholder":"​","style":"IPY_MODEL_7b5df58fac6d4f7f8070edc41015e5c8","value":"special_tokens_map.json: 100%"}},"c5df12a2a6f64d6fb48ebb1e9bdfc3b7":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_543e9cde20cd4e6ab033864819544057","max":957,"min":0,"orientation":"horizontal","style":"IPY_MODEL_20d0f57828fe4084b7a5c0e5e4aff10d","value":957}},"732fd4d0981c441995aafd0cdf4e7abe":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_716a26014b754e599bc8dda94617c87f","placeholder":"​","style":"IPY_MODEL_f288c94806f6490589640a7fba4268f0","value":" 957/957 [00:00<00:00, 89.8kB/s]"}},"3861f745da6a40f6b34006fef99dcec4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"13ecf0bb82e543e6bb4a84d1d8b7503d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7b5df58fac6d4f7f8070edc41015e5c8":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"543e9cde20cd4e6ab033864819544057":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"20d0f57828fe4084b7a5c0e5e4aff10d":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"716a26014b754e599bc8dda94617c87f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f288c94806f6490589640a7fba4268f0":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8862e2f4b1cc48a6b4ec736e00e08357":{"model_module":"@jupyter-widgets/controls","model_name":"VBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"VBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"VBoxView","box_style":"","children":["IPY_MODEL_58d997978dda440b840837a502c76342","IPY_MODEL_c1ea9f0e1e4744098bc197fc2acbf1ca","IPY_MODEL_4409f38f2fcc45648a281adb1bd9e72e","IPY_MODEL_ae1cd6a6e06a49448427ec224818591d"],"layout":"IPY_MODEL_317ff2114ead472ea385f0040f505ebb"}},"2cf2f82034a741f5a10f8f904d0c5f04":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_6cd60eb05c3b48ffb9a402d2ef5b4e8f","placeholder":"​","style":"IPY_MODEL_4202c18e5abf4dcfbe0c9adea75c2d6d","value":"

Copy a token from your Hugging Face\ntokens page and paste it below.
Immediately click login after copying\nyour token or it might be stored in plain text in this notebook file.
"}},"a4444a43a3e049fb8cb044c0b1f86995":{"model_module":"@jupyter-widgets/controls","model_name":"PasswordModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"PasswordModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"PasswordView","continuous_update":true,"description":"Token:","description_tooltip":null,"disabled":false,"layout":"IPY_MODEL_7bf1df7f08284ec89a1ebde0bafb4a94","placeholder":"​","style":"IPY_MODEL_3172f59a8ffc4cf8a68b0593bf06f8e3","value":""}},"9961234abed54e329d323a915e7a7998":{"model_module":"@jupyter-widgets/controls","model_name":"CheckboxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"CheckboxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"CheckboxView","description":"Add token as git credential?","description_tooltip":null,"disabled":false,"indent":true,"layout":"IPY_MODEL_27470134d73f4c49b0aff4d76e662519","style":"IPY_MODEL_2899226a257446d39d3232b0ed3c9c43","value":true}},"09969b7b2264487c9d4a5d0a290b2b28":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ButtonView","button_style":"","description":"Login","disabled":false,"icon":"","layout":"IPY_MODEL_843069590d7c4fd08d36f0e437e7c954","style":"IPY_MODEL_7880edb1a04f415b857ed9a6608d6e36","tooltip":""}},"97e44495edb4432197e42a915f9c8a63":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_b6995092e1d548b996f28525f91b2c3c","placeholder":"​","style":"IPY_MODEL_60092722850d40d0a6c24d4089e317c0","value":"\nPro Tip: If you don't already have one, you can create a dedicated\n'notebooks' token with 'write' access, that you can then easily reuse for all\nnotebooks.
"}},"317ff2114ead472ea385f0040f505ebb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":"center","align_self":null,"border":null,"bottom":null,"display":"flex","flex":null,"flex_flow":"column","grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":"50%"}},"6cd60eb05c3b48ffb9a402d2ef5b4e8f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4202c18e5abf4dcfbe0c9adea75c2d6d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7bf1df7f08284ec89a1ebde0bafb4a94":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3172f59a8ffc4cf8a68b0593bf06f8e3":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"27470134d73f4c49b0aff4d76e662519":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2899226a257446d39d3232b0ed3c9c43":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"843069590d7c4fd08d36f0e437e7c954":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7880edb1a04f415b857ed9a6608d6e36":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","button_color":null,"font_weight":""}},"b6995092e1d548b996f28525f91b2c3c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"60092722850d40d0a6c24d4089e317c0":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ab9effe3ff7b4a98be72bf9dca71c95c":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c34c455dc3a64eac90af09b28554de86","placeholder":"​","style":"IPY_MODEL_2903571cdccc4628aecf1c9acd7d3685","value":"Connecting..."}},"c34c455dc3a64eac90af09b28554de86":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2903571cdccc4628aecf1c9acd7d3685":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"58d997978dda440b840837a502c76342":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_16b53f932a734a90b5034c9e38d6421f","placeholder":"​","style":"IPY_MODEL_4f2e764c745044aba2b9af3aedab4a3f","value":"Token is valid (permission: write)."}},"c1ea9f0e1e4744098bc197fc2acbf1ca":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_26db96a6ab6741f69965f3250c187600","placeholder":"​","style":"IPY_MODEL_ffe2edc3719b43fd95521ec21c29040a","value":"Your token has been saved in your configured git credential helpers (store)."}},"4409f38f2fcc45648a281adb1bd9e72e":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1973709108744330bdbc287ff88990f4","placeholder":"​","style":"IPY_MODEL_76b0f38e122c415ab0b67b6394abefab","value":"Your token has been saved to /root/.cache/huggingface/token"}},"ae1cd6a6e06a49448427ec224818591d":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ac2a119ae1574cf7b2d3e25b36894648","placeholder":"​","style":"IPY_MODEL_e511b7975fe5458889e406b7639a1794","value":"Login successful"}},"16b53f932a734a90b5034c9e38d6421f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4f2e764c745044aba2b9af3aedab4a3f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"26db96a6ab6741f69965f3250c187600":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ffe2edc3719b43fd95521ec21c29040a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"1973709108744330bdbc287ff88990f4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"76b0f38e122c415ab0b67b6394abefab":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ac2a119ae1574cf7b2d3e25b36894648":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e511b7975fe5458889e406b7639a1794":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"adb36cd54327481f8e08d431c1c2be47":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_60363acdc50545d59fb9e81cf8cc8fa4","IPY_MODEL_a2c8fb676e3a4174801dc0727f48dbf9","IPY_MODEL_c394654fd60c4b0dbda234eb96cb8326"],"layout":"IPY_MODEL_3f8950fff5624985861ae46f180b2f77"}},"60363acdc50545d59fb9e81cf8cc8fa4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f6bb7ef31a854006a131cb363cb2ccc4","placeholder":"​","style":"IPY_MODEL_033b3dc6b42046e088da8db49ca6be53","value":"tokenizer_config.json: 100%"}},"a2c8fb676e3a4174801dc0727f48dbf9":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_0da350cbe5b449df9248f638644b25bd","max":27,"min":0,"orientation":"horizontal","style":"IPY_MODEL_aa80c5fbb7ae4edcb73913405fe9d178","value":27}},"c394654fd60c4b0dbda234eb96cb8326":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_174bb6a3e93749ea965f55fa2529a66a","placeholder":"​","style":"IPY_MODEL_5bbaffc2fe6b41898643e54e5efc414a","value":" 27.0/27.0 [00:00<00:00, 2.20kB/s]"}},"3f8950fff5624985861ae46f180b2f77":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f6bb7ef31a854006a131cb363cb2ccc4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"033b3dc6b42046e088da8db49ca6be53":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0da350cbe5b449df9248f638644b25bd":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"aa80c5fbb7ae4edcb73913405fe9d178":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"174bb6a3e93749ea965f55fa2529a66a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5bbaffc2fe6b41898643e54e5efc414a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"6feacd7780204b95a0ac9e0960bb7051":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_d3de27f89000452b94fc080ea4006ac8","IPY_MODEL_9b3b12d72784444cb7b7229b3ece5ae9","IPY_MODEL_f3c9b36c7dcf4aacbe1a8fa2bb34dc1a"],"layout":"IPY_MODEL_e20ff2fa8456469689e89e8934823937"}},"d3de27f89000452b94fc080ea4006ac8":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_9b229c8dbf834924aec022ac46391126","placeholder":"​","style":"IPY_MODEL_4692006aa8f84e1584e7a2d7701fd9fe","value":"config.json: 100%"}},"9b3b12d72784444cb7b7229b3ece5ae9":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_2d1ba36b6d584990ad30ddc836f303aa","max":1092,"min":0,"orientation":"horizontal","style":"IPY_MODEL_200da62c75b649649f9950ac6ce5a2b6","value":1092}},"f3c9b36c7dcf4aacbe1a8fa2bb34dc1a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_4fbf0c311ebb4f6f879f4067f1f65e55","placeholder":"​","style":"IPY_MODEL_0f764bc386ac4bffbfdbad29164d4f1b","value":" 1.09k/1.09k [00:00<00:00, 91.2kB/s]"}},"e20ff2fa8456469689e89e8934823937":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9b229c8dbf834924aec022ac46391126":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4692006aa8f84e1584e7a2d7701fd9fe":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"2d1ba36b6d584990ad30ddc836f303aa":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"200da62c75b649649f9950ac6ce5a2b6":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"4fbf0c311ebb4f6f879f4067f1f65e55":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0f764bc386ac4bffbfdbad29164d4f1b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4f91be8ff6e046e3800e9372f37707a2":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_74e1c3d2a5ae41f98df23399bbad3ff2","IPY_MODEL_6f439cd9628b44a2a02bf3a932bc8ce3","IPY_MODEL_411731b5a4c9432499d4d1d6ae8afa1a"],"layout":"IPY_MODEL_6c167d5777c6402ba48100e0783fabc2"}},"74e1c3d2a5ae41f98df23399bbad3ff2":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_2f9336479e394c2581ed76af1ad22523","placeholder":"​","style":"IPY_MODEL_b82473f9dcc74ebca50936d8969808fa","value":"vocab.json: 100%"}},"6f439cd9628b44a2a02bf3a932bc8ce3":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_ac1cf21d6b8c412ab8ba2c3a854080d1","max":898822,"min":0,"orientation":"horizontal","style":"IPY_MODEL_d35c7a932715443cb712acc0d0475c60","value":898822}},"411731b5a4c9432499d4d1d6ae8afa1a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_889cb440760b4f26b521f659663aff49","placeholder":"​","style":"IPY_MODEL_140cd7b096b24165afa37b610c360be5","value":" 899k/899k [00:00<00:00, 4.15MB/s]"}},"6c167d5777c6402ba48100e0783fabc2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2f9336479e394c2581ed76af1ad22523":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b82473f9dcc74ebca50936d8969808fa":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ac1cf21d6b8c412ab8ba2c3a854080d1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d35c7a932715443cb712acc0d0475c60":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"889cb440760b4f26b521f659663aff49":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"140cd7b096b24165afa37b610c360be5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"525cd158dae8408d861fcf29daa58409":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_88998570b9bd4f8b9a257785ae37fb34","IPY_MODEL_4c2d8b62e0944cac8eeaf16fd69a3a4d","IPY_MODEL_4c3a621f4ce540a8b5e42364e1224257"],"layout":"IPY_MODEL_f7adb4e02235409b8e8f1bc9945c498d"}},"88998570b9bd4f8b9a257785ae37fb34":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f049fc09d6c241c7a4b2c0f1f4767ea0","placeholder":"​","style":"IPY_MODEL_b27c653f9bfc4fb8a280b0d2b0804c53","value":"merges.txt: 100%"}},"4c2d8b62e0944cac8eeaf16fd69a3a4d":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_dfa41b69a5034879b0116e921befa879","max":456318,"min":0,"orientation":"horizontal","style":"IPY_MODEL_54512cf0b90e4a6d845b3871ab097eab","value":456318}},"4c3a621f4ce540a8b5e42364e1224257":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_96ee1082136341d690d6d2fbbbdb5764","placeholder":"​","style":"IPY_MODEL_641b003d8ae147d398199eb882e81f5f","value":" 456k/456k [00:00<00:00, 33.1MB/s]"}},"f7adb4e02235409b8e8f1bc9945c498d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f049fc09d6c241c7a4b2c0f1f4767ea0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b27c653f9bfc4fb8a280b0d2b0804c53":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"dfa41b69a5034879b0116e921befa879":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"54512cf0b90e4a6d845b3871ab097eab":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"96ee1082136341d690d6d2fbbbdb5764":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"641b003d8ae147d398199eb882e81f5f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0400e3e1fb784b34a3daa304c86264e7":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_2da5ccea478143fc95c47645f4dffeb4","IPY_MODEL_b8cec55e5f104516a99818274ea06e91","IPY_MODEL_e89efbee5da041e3b6f4e9139bd87bb5"],"layout":"IPY_MODEL_870fd5a9a65b45119a86950bce9b14a9"}},"2da5ccea478143fc95c47645f4dffeb4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_27b351d742354cc7af3eefca1f69dbd9","placeholder":"​","style":"IPY_MODEL_f05fba1b782b451b9380add982882dc1","value":"special_tokens_map.json: 100%"}},"b8cec55e5f104516a99818274ea06e91":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_8a01297ecef44fe19e350148d592c69b","max":772,"min":0,"orientation":"horizontal","style":"IPY_MODEL_90bd0543ab5c4bcd8e13f1eb190350be","value":772}},"e89efbee5da041e3b6f4e9139bd87bb5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_212af8fd13834b13ba97ebced3f6409a","placeholder":"​","style":"IPY_MODEL_db6aa38d0ba94e3dae56367cc71ff967","value":" 772/772 [00:00<00:00, 66.6kB/s]"}},"870fd5a9a65b45119a86950bce9b14a9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"27b351d742354cc7af3eefca1f69dbd9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f05fba1b782b451b9380add982882dc1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8a01297ecef44fe19e350148d592c69b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"90bd0543ab5c4bcd8e13f1eb190350be":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"212af8fd13834b13ba97ebced3f6409a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"db6aa38d0ba94e3dae56367cc71ff967":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"073c8ff199e44eed97872fb6f1652dba":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_7d3052ef63c04a70aeffa0ce779ab7de","IPY_MODEL_51399bec39784d38a471c9ad9359e325","IPY_MODEL_88968d75884d4a91adc2a373ab2ec194"],"layout":"IPY_MODEL_7c3ae00e304f4e0e87efe90a6f8beb8f"}},"7d3052ef63c04a70aeffa0ce779ab7de":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5d3a15fe2c244a94bf695da3cbeb92a4","placeholder":"​","style":"IPY_MODEL_a9756f53c5f842acb8eb4f71a8518f64","value":"Map: 100%"}},"51399bec39784d38a471c9ad9359e325":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_3ef9b624c38641ba99e42161bc72ab6a","max":794,"min":0,"orientation":"horizontal","style":"IPY_MODEL_7a37dec764b74053af984f47bb8faa4a","value":794}},"88968d75884d4a91adc2a373ab2ec194":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_95b00c6f85154fb2938ab9450c008bbe","placeholder":"​","style":"IPY_MODEL_77335a8981774d6b9cef6219eb19da51","value":" 794/794 [00:06<00:00, 109.42 examples/s]"}},"7c3ae00e304f4e0e87efe90a6f8beb8f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5d3a15fe2c244a94bf695da3cbeb92a4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a9756f53c5f842acb8eb4f71a8518f64":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3ef9b624c38641ba99e42161bc72ab6a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7a37dec764b74053af984f47bb8faa4a":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"95b00c6f85154fb2938ab9450c008bbe":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"77335a8981774d6b9cef6219eb19da51":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"45b55efbee6e4f62a5de7071bbc043aa":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_d10651e9391a410ea111ae794e5f268b","IPY_MODEL_5128448cf6584ba7bfb672365b269db8","IPY_MODEL_e8eb60da94bb434681b7c3975716bf97"],"layout":"IPY_MODEL_c31a36e8c2f14d5da3c757ead4e86d0d"}},"d10651e9391a410ea111ae794e5f268b":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_37b8f83071ba431a90ae7227e9d41418","placeholder":"​","style":"IPY_MODEL_83d0e6772b0a4dabb69d41aa6d1e94b4","value":"Map: 100%"}},"5128448cf6584ba7bfb672365b269db8":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_d1023711b801470b8811dde0b6362f64","max":99,"min":0,"orientation":"horizontal","style":"IPY_MODEL_fa32a29288634de28824e5533f47a338","value":99}},"e8eb60da94bb434681b7c3975716bf97":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_627a217165c348b8bf6029d3da971956","placeholder":"​","style":"IPY_MODEL_59ff0398cb5a4b839e187d41bbd20207","value":" 99/99 [00:00<00:00, 115.17 examples/s]"}},"c31a36e8c2f14d5da3c757ead4e86d0d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"37b8f83071ba431a90ae7227e9d41418":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"83d0e6772b0a4dabb69d41aa6d1e94b4":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d1023711b801470b8811dde0b6362f64":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fa32a29288634de28824e5533f47a338":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"627a217165c348b8bf6029d3da971956":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"59ff0398cb5a4b839e187d41bbd20207":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f3f6a67ea13b40fc8c88078ebd694770":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_1a450601dd124d579cb59a6cb13c7cd8","IPY_MODEL_c386d294056143aab4ddefc8a812c7c5","IPY_MODEL_c09fc5aca048454f8810f6dd8972b22b"],"layout":"IPY_MODEL_5bc5ecba03ee4314b4817b19f29a42de"}},"1a450601dd124d579cb59a6cb13c7cd8":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_424ba27899824d0dbd36839625738152","placeholder":"​","style":"IPY_MODEL_6de8cb080b1c4f42a7d31c366ff95476","value":"pytorch_model.bin: 100%"}},"c386d294056143aab4ddefc8a812c7c5":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_2dcc40faa369463e9cff5965be686b7a","max":647693783,"min":0,"orientation":"horizontal","style":"IPY_MODEL_7ed55465e7dc4b2093f89c7c48daccb6","value":647693783}},"c09fc5aca048454f8810f6dd8972b22b":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5d4322ef1b2d45c090276a8742cdc30b","placeholder":"​","style":"IPY_MODEL_16c2fc77be4846f6a42bfb2195ec3870","value":" 648M/648M [00:01<00:00, 456MB/s]"}},"5bc5ecba03ee4314b4817b19f29a42de":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"424ba27899824d0dbd36839625738152":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6de8cb080b1c4f42a7d31c366ff95476":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"2dcc40faa369463e9cff5965be686b7a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7ed55465e7dc4b2093f89c7c48daccb6":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"5d4322ef1b2d45c090276a8742cdc30b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"16c2fc77be4846f6a42bfb2195ec3870":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9de9ec685fec4ba2ad6504442bfe4dc4":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_3c2e431867594f3b84f76aae705eeae0","IPY_MODEL_22722f46b92f411dbf824bc7a039e867","IPY_MODEL_7d6625586d454ab5b1373a9f0fc72c1c"],"layout":"IPY_MODEL_181c3d250f254cdbbdf2d80f2f080629"}},"3c2e431867594f3b84f76aae705eeae0":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3f4f99776ef24ba4ad4cddb153754f8c","placeholder":"​","style":"IPY_MODEL_42c400180a8b4b94ac304a3b5774d095","value":"generation_config.json: 100%"}},"22722f46b92f411dbf824bc7a039e867":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_3ce0a381dcd54f0b8e1ead0c7de86cc9","max":168,"min":0,"orientation":"horizontal","style":"IPY_MODEL_b0d591aab2b14c0981eb6ad4d114acff","value":168}},"7d6625586d454ab5b1373a9f0fc72c1c":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3228948be73947f8b6c340502e5b674a","placeholder":"​","style":"IPY_MODEL_2cafd7a1984549f6816526b9f8fe45b7","value":" 168/168 [00:00<00:00, 15.2kB/s]"}},"181c3d250f254cdbbdf2d80f2f080629":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3f4f99776ef24ba4ad4cddb153754f8c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"42c400180a8b4b94ac304a3b5774d095":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3ce0a381dcd54f0b8e1ead0c7de86cc9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b0d591aab2b14c0981eb6ad4d114acff":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"3228948be73947f8b6c340502e5b674a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2cafd7a1984549f6816526b9f8fe45b7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"686f2bfcd3e140f4a1df1004b70b863b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_f22a43d0d1854474bcd76900a7323532","IPY_MODEL_cf0e6bb3a5e14b0c91a314bda65f1a10","IPY_MODEL_6498de61d15a46cb982f461ca1fc1424"],"layout":"IPY_MODEL_05bab9bdf6e54dc59bf575883174c65a"}},"f22a43d0d1854474bcd76900a7323532":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_6decfb6716334aa78040397e922917b2","placeholder":"​","style":"IPY_MODEL_72457b1576d44ac7b257aca0409ad400","value":"Downloading builder script: "}},"cf0e6bb3a5e14b0c91a314bda65f1a10":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_ffc21f391076431d82b66cdd3f6ab89f","max":2169,"min":0,"orientation":"horizontal","style":"IPY_MODEL_27ab60cb083b4e46ba245fc1be367221","value":2169}},"6498de61d15a46cb982f461ca1fc1424":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f8d7bdf7dc85447bac5a770ab19f66d6","placeholder":"​","style":"IPY_MODEL_846891a3388f45489c2be3b13e535a39","value":" 5.65k/? [00:00<00:00, 401kB/s]"}},"05bab9bdf6e54dc59bf575883174c65a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6decfb6716334aa78040397e922917b2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"72457b1576d44ac7b257aca0409ad400":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ffc21f391076431d82b66cdd3f6ab89f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"27ab60cb083b4e46ba245fc1be367221":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"f8d7bdf7dc85447bac5a770ab19f66d6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"846891a3388f45489c2be3b13e535a39":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ef0dc73277ff45f5a34d353aa49da560":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_422ec094f0fa45c982a3686de1ac1df2","IPY_MODEL_e839adcc10a54697bdb8faf74b32eba7","IPY_MODEL_2d30f042f5384898ad2747dedfef40a7"],"layout":"IPY_MODEL_837c6570a2fe4217b2507009142b0d44"}},"422ec094f0fa45c982a3686de1ac1df2":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_122cd9b7af6b49a9a672ce7668f48044","placeholder":"​","style":"IPY_MODEL_c8ce184aeaef431798668b7e8ed6a02d","value":"training_args.bin: 100%"}},"e839adcc10a54697bdb8faf74b32eba7":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_03b51ae6448a4bcbadfd1a1fabdf5680","max":5304,"min":0,"orientation":"horizontal","style":"IPY_MODEL_964085fd71a647d69293c48de9d44078","value":5304}},"2d30f042f5384898ad2747dedfef40a7":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_0ed1c5a1be8a422fbf37ab5fc0817866","placeholder":"​","style":"IPY_MODEL_7eb23e7dd7dd49808655da3f7e04ffe7","value":" 5.30k/5.30k [00:00<00:00, 51.2kB/s]"}},"837c6570a2fe4217b2507009142b0d44":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"122cd9b7af6b49a9a672ce7668f48044":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c8ce184aeaef431798668b7e8ed6a02d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"03b51ae6448a4bcbadfd1a1fabdf5680":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"964085fd71a647d69293c48de9d44078":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"0ed1c5a1be8a422fbf37ab5fc0817866":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7eb23e7dd7dd49808655da3f7e04ffe7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9bec1b742b93498295e9f94243c1f431":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_030335b4477b49a4abb4b29c1e0939fc","IPY_MODEL_1dfa9e77b67d4c5e9ae7e4fec37c88ea","IPY_MODEL_45870d32af3d49e189acdd3fcbacdadf"],"layout":"IPY_MODEL_f7b9aa650f04486ebc6eb151a19dbf4e"}},"030335b4477b49a4abb4b29c1e0939fc":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_01c5f81549fe446c83eb848842ff09c1","placeholder":"​","style":"IPY_MODEL_cc92eca0c78c480ab100e1bd63439658","value":"config.json: 100%"}},"1dfa9e77b67d4c5e9ae7e4fec37c88ea":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_affa369a32ce41229fb61d5c2700326f","max":1282,"min":0,"orientation":"horizontal","style":"IPY_MODEL_2d45a98df6ff487fb1ac4d108555386b","value":1282}},"45870d32af3d49e189acdd3fcbacdadf":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1061fe7d61fd4d939dc7752102bb8e72","placeholder":"​","style":"IPY_MODEL_dadde30982884bf6a7320e3e4c025bc7","value":" 1.28k/1.28k [00:00<00:00, 27.1kB/s]"}},"f7b9aa650f04486ebc6eb151a19dbf4e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"01c5f81549fe446c83eb848842ff09c1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cc92eca0c78c480ab100e1bd63439658":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"affa369a32ce41229fb61d5c2700326f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2d45a98df6ff487fb1ac4d108555386b":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"1061fe7d61fd4d939dc7752102bb8e72":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dadde30982884bf6a7320e3e4c025bc7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"2bda047b50264cbf84feb60d343a66ec":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_62516f13d2d34defb87f70a5701433a1","IPY_MODEL_a2a51f7fe07c4bafbc810636a5e771dd","IPY_MODEL_a525bb7e594e417487a82522709b0883"],"layout":"IPY_MODEL_b47ff53c504242548a47ed526148c957"}},"62516f13d2d34defb87f70a5701433a1":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_06cec0dca36141178227978f4ad99bc6","placeholder":"​","style":"IPY_MODEL_262bbcbcb4ca41658e5978b0e483477f","value":"model.safetensors: 100%"}},"a2a51f7fe07c4bafbc810636a5e771dd":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_3af98e95bd6845b2885b34e684c86aa8","max":647614116,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6979cdf8c6b54e43b19fe21ffb500ac9","value":647614116}},"a525bb7e594e417487a82522709b0883":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_268d85aee81142039ca34cccbb312464","placeholder":"​","style":"IPY_MODEL_2073cfa22db14518a9a8cf4c3e0f89eb","value":" 648M/648M [00:08<00:00, 78.5MB/s]"}},"b47ff53c504242548a47ed526148c957":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"06cec0dca36141178227978f4ad99bc6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"262bbcbcb4ca41658e5978b0e483477f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3af98e95bd6845b2885b34e684c86aa8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6979cdf8c6b54e43b19fe21ffb500ac9":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"268d85aee81142039ca34cccbb312464":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2073cfa22db14518a9a8cf4c3e0f89eb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7e17f38f528b4b7e9776a3598de2e31a":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_af61051e95584fdc9f404bf64d254b3a","IPY_MODEL_09c45f312a5a4ea0a5341a9ddb4af6c2","IPY_MODEL_2c2ca79e7d8c4cd084f5cebc4823c177"],"layout":"IPY_MODEL_23180d8f98b04af18e74be51766a0d54"}},"af61051e95584fdc9f404bf64d254b3a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_51bda236ef14427e98bed4fff0668ce5","placeholder":"​","style":"IPY_MODEL_f76e09f63277434ba2dcaa0cd7fcfd25","value":"generation_config.json: 100%"}},"09c45f312a5a4ea0a5341a9ddb4af6c2":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_9ae1cbf474474332a5010ed7337ca137","max":163,"min":0,"orientation":"horizontal","style":"IPY_MODEL_a49c864bfff64ca29b8cf4221a31ba65","value":163}},"2c2ca79e7d8c4cd084f5cebc4823c177":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_8b609e02c4b84e3688d5a783a41e5d4d","placeholder":"​","style":"IPY_MODEL_6819555a72424f2da079dd93134dd6c9","value":" 163/163 [00:00<00:00, 2.94kB/s]"}},"23180d8f98b04af18e74be51766a0d54":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"51bda236ef14427e98bed4fff0668ce5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f76e09f63277434ba2dcaa0cd7fcfd25":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9ae1cbf474474332a5010ed7337ca137":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a49c864bfff64ca29b8cf4221a31ba65":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"8b609e02c4b84e3688d5a783a41e5d4d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6819555a72424f2da079dd93134dd6c9":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"349e777715824518af3e575fd2542fe3":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_8ac250467c4745be9b37eabeb9e38a33","IPY_MODEL_edc8185249524c859875e2a791c3474c","IPY_MODEL_e70db060ce024d959c1681dbd7f0e2d4"],"layout":"IPY_MODEL_f4f9e3ad9c124deeb6e581dd10e50d53"}},"8ac250467c4745be9b37eabeb9e38a33":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f2a5ece7f5c643859cbd2721a5f43ba6","placeholder":"​","style":"IPY_MODEL_edfc5c84c53d4a4484eadfdc2747feb5","value":"tokenizer_config.json: 100%"}},"edc8185249524c859875e2a791c3474c":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_aae3a9287656458f9329541feb15da56","max":1212,"min":0,"orientation":"horizontal","style":"IPY_MODEL_66cac4c495e942908888d3fedf031935","value":1212}},"e70db060ce024d959c1681dbd7f0e2d4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_14ab8e9d93944dc6ac8bceb321010b8c","placeholder":"​","style":"IPY_MODEL_97cd012aff0640969cf4eb138fdaec62","value":" 1.21k/1.21k [00:00<00:00, 17.1kB/s]"}},"f4f9e3ad9c124deeb6e581dd10e50d53":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f2a5ece7f5c643859cbd2721a5f43ba6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"edfc5c84c53d4a4484eadfdc2747feb5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"aae3a9287656458f9329541feb15da56":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"66cac4c495e942908888d3fedf031935":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"14ab8e9d93944dc6ac8bceb321010b8c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"97cd012aff0640969cf4eb138fdaec62":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9fadcc6912484d8594918c7e21d702f8":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_ff6cd16885824214bfd84af8f1109304","IPY_MODEL_137319a201ff4b5bb31c40a5d9624ffe","IPY_MODEL_ddd59b4dcfe54c8992b9545c99e9808b"],"layout":"IPY_MODEL_7c3013e0d1df420fb7a54f4bd2a69393"}},"ff6cd16885824214bfd84af8f1109304":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_0b4e52fbcb0b45b9b7796cc8cbf05643","placeholder":"​","style":"IPY_MODEL_ceb46ef766814c90a59461c6c3b695f9","value":"vocab.json: 100%"}},"137319a201ff4b5bb31c40a5d9624ffe":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_d8720c5007da4134ac5c1f9f6c003d4c","max":798293,"min":0,"orientation":"horizontal","style":"IPY_MODEL_9fa60b27125c47d1962c91214a03b38f","value":798293}},"ddd59b4dcfe54c8992b9545c99e9808b":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_722794cc8eba4aedadbc35fedc1c09e5","placeholder":"​","style":"IPY_MODEL_39ee2f9fff5c43f19913ea348771327e","value":" 798k/798k [00:00<00:00, 5.68MB/s]"}},"7c3013e0d1df420fb7a54f4bd2a69393":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0b4e52fbcb0b45b9b7796cc8cbf05643":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ceb46ef766814c90a59461c6c3b695f9":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d8720c5007da4134ac5c1f9f6c003d4c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9fa60b27125c47d1962c91214a03b38f":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"722794cc8eba4aedadbc35fedc1c09e5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"39ee2f9fff5c43f19913ea348771327e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f78da64e94684ea0b7113c4b9d428a54":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_91c3d41136964ed49fa85a54f627262e","IPY_MODEL_4dbcefecab754e40a0e0fa59dee425f9","IPY_MODEL_39bc5d3c9f5747d48638913d120e75cb"],"layout":"IPY_MODEL_2a3b37518b75481f915ce2eda1964461"}},"91c3d41136964ed49fa85a54f627262e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fa04bdcac02f4a0685753fc9480d161f","placeholder":"​","style":"IPY_MODEL_6f9a48b26b7f4b9fb291ea6b57851884","value":"merges.txt: 100%"}},"4dbcefecab754e40a0e0fa59dee425f9":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_2d387e8c9b9b43d9bff2e66dcda17c19","max":456318,"min":0,"orientation":"horizontal","style":"IPY_MODEL_b183f88aa1b54d2b9579a6834b859ed2","value":456318}},"39bc5d3c9f5747d48638913d120e75cb":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c28c19742d4f489b9e640a042a75b7df","placeholder":"​","style":"IPY_MODEL_5936499a9808403981dc5401c384c7ae","value":" 456k/456k [00:00<00:00, 9.87MB/s]"}},"2a3b37518b75481f915ce2eda1964461":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fa04bdcac02f4a0685753fc9480d161f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6f9a48b26b7f4b9fb291ea6b57851884":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"2d387e8c9b9b43d9bff2e66dcda17c19":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b183f88aa1b54d2b9579a6834b859ed2":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"c28c19742d4f489b9e640a042a75b7df":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5936499a9808403981dc5401c384c7ae":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"1b57ec576d57437ebe5101b49c19e5f3":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_dad1eec0fcc24fd0b5e670aa4ef3a101","IPY_MODEL_bdee2aaeb76b481387a7f66a56f445dd","IPY_MODEL_dd908832cd374f2ab13aef9ba1ee3ed8"],"layout":"IPY_MODEL_ada9d2f3f8d6480d9cb62288aa67573c"}},"dad1eec0fcc24fd0b5e670aa4ef3a101":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c70d6d9e9d9d476a8e0abd2730afded1","placeholder":"​","style":"IPY_MODEL_2482dbac99414cb19646ad7e99dea6a5","value":"tokenizer.json: 100%"}},"bdee2aaeb76b481387a7f66a56f445dd":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_b1ddce86ac1d4e41a44740996a8f18cd","max":2108907,"min":0,"orientation":"horizontal","style":"IPY_MODEL_1eb5c0c04cff4abfbf5d183378cd92e7","value":2108907}},"dd908832cd374f2ab13aef9ba1ee3ed8":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_af09dd74e79e40ef94d155bdfb255253","placeholder":"​","style":"IPY_MODEL_17476e334a774a4ea5eec0c6484bb63b","value":" 2.11M/2.11M [00:00<00:00, 16.6MB/s]"}},"ada9d2f3f8d6480d9cb62288aa67573c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c70d6d9e9d9d476a8e0abd2730afded1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2482dbac99414cb19646ad7e99dea6a5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b1ddce86ac1d4e41a44740996a8f18cd":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1eb5c0c04cff4abfbf5d183378cd92e7":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"af09dd74e79e40ef94d155bdfb255253":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"17476e334a774a4ea5eec0c6484bb63b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b5fca471892c4066a99d0ff70a9b731c":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_04be1eec15e14c608ae325efbb6a1b9e","IPY_MODEL_0ac61cd83fad45758b64073197808a32","IPY_MODEL_d30a24badb1f4099a206099c9d10e60f"],"layout":"IPY_MODEL_1aaa2883d3764b6a84b01f990633de9e"}},"04be1eec15e14c608ae325efbb6a1b9e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_84d42effd4cc4d9cbf9fccd4930d0747","placeholder":"​","style":"IPY_MODEL_9499d3d6955c42418c23b96c552b6065","value":"special_tokens_map.json: 100%"}},"0ac61cd83fad45758b64073197808a32":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_bca99ce4a5cc49edaa0e17d5c4c6bbf1","max":957,"min":0,"orientation":"horizontal","style":"IPY_MODEL_56e9978e10a74ca6bd03f3219bb7977d","value":957}},"d30a24badb1f4099a206099c9d10e60f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_341ac4d694e84bc38e7a0792fdbac62f","placeholder":"​","style":"IPY_MODEL_fb41c8269f3a4c2d988a18d24e3185e5","value":" 957/957 [00:00<00:00, 18.3kB/s]"}},"1aaa2883d3764b6a84b01f990633de9e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"84d42effd4cc4d9cbf9fccd4930d0747":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9499d3d6955c42418c23b96c552b6065":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"bca99ce4a5cc49edaa0e17d5c4c6bbf1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"56e9978e10a74ca6bd03f3219bb7977d":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"341ac4d694e84bc38e7a0792fdbac62f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fb41c8269f3a4c2d988a18d24e3185e5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}}},"nbformat":4,"nbformat_minor":0} \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/README.md b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7d16c08991eec0b0ddc3a7bbfb1fa30bd459ffa8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/README.md @@ -0,0 +1,108 @@ +--- +license: apache-2.0 +base_model: allenai/led-base-16384 +tags: +- generated_from_trainer +model-index: +- name: Trying_LED_Model_Hiporank_final_setting.ipynb + results: [] +--- + + + +# Trying_LED_Model_Hiporank_final_setting.ipynb + +This model is a fine-tuned version of [allenai/led-base-16384](https://huggingface.co/allenai/led-base-16384) on the None dataset. +It achieves the following results on the evaluation set: +- Loss: 2.5873 + +## Model description + +More information needed + +## Intended uses & limitations + +More information needed + +## Training and evaluation data + +More information needed + +## Training procedure + +### Training hyperparameters + +The following hyperparameters were used during training: +- learning_rate: 5e-05 +- train_batch_size: 2 +- eval_batch_size: 2 +- seed: 42 +- gradient_accumulation_steps: 4 +- total_train_batch_size: 8 +- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 +- lr_scheduler_type: linear +- num_epochs: 5 +- mixed_precision_training: Native AMP + +### Training results + +| Training Loss | Epoch | Step | Validation Loss | +|:-------------:|:------:|:----:|:---------------:| +| 2.8616 | 0.1008 | 10 | 2.8924 | +| 2.8923 | 0.2015 | 20 | 2.8183 | +| 2.9791 | 0.3023 | 30 | 2.7639 | +| 2.9044 | 0.4030 | 40 | 2.7276 | +| 2.428 | 0.5038 | 50 | 2.7162 | +| 2.9009 | 0.6045 | 60 | 2.6943 | +| 2.9211 | 0.7053 | 70 | 2.6682 | +| 2.7291 | 0.8060 | 80 | 2.6528 | +| 2.6494 | 0.9068 | 90 | 2.6525 | +| 2.7393 | 1.0076 | 100 | 2.6357 | +| 2.3916 | 1.1083 | 110 | 2.6384 | +| 2.4493 | 1.2091 | 120 | 2.6262 | +| 2.4752 | 1.3098 | 130 | 2.6014 | +| 2.1968 | 1.4106 | 140 | 2.6068 | +| 2.538 | 1.5113 | 150 | 2.5980 | +| 2.4522 | 1.6121 | 160 | 2.5959 | +| 2.4397 | 1.7128 | 170 | 2.6017 | +| 2.4763 | 1.8136 | 180 | 2.5837 | +| 1.999 | 1.9144 | 190 | 2.5749 | +| 2.0956 | 2.0151 | 200 | 2.5696 | +| 2.1285 | 2.1159 | 210 | 2.6099 | +| 2.1804 | 2.2166 | 220 | 2.5931 | +| 2.0031 | 2.3174 | 230 | 2.5913 | +| 2.094 | 2.4181 | 240 | 2.5875 | +| 2.2214 | 2.5189 | 250 | 2.5639 | +| 2.0745 | 2.6196 | 260 | 2.5723 | +| 2.3377 | 2.7204 | 270 | 2.5750 | +| 1.9967 | 2.8212 | 280 | 2.5710 | +| 2.1091 | 2.9219 | 290 | 2.5694 | +| 2.0384 | 3.0227 | 300 | 2.5606 | +| 1.9828 | 3.1234 | 310 | 2.5971 | +| 2.1608 | 3.2242 | 320 | 2.5857 | +| 1.9558 | 3.3249 | 330 | 2.5793 | +| 2.0719 | 3.4257 | 340 | 2.5769 | +| 1.8055 | 3.5264 | 350 | 2.5804 | +| 2.0445 | 3.6272 | 360 | 2.5758 | +| 2.0795 | 3.7280 | 370 | 2.5924 | +| 2.073 | 3.8287 | 380 | 2.5745 | +| 2.0314 | 3.9295 | 390 | 2.5697 | +| 2.0928 | 4.0302 | 400 | 2.5731 | +| 1.9158 | 4.1310 | 410 | 2.5942 | +| 2.054 | 4.2317 | 420 | 2.5846 | +| 1.8497 | 4.3325 | 430 | 2.5963 | +| 1.8353 | 4.4332 | 440 | 2.5943 | +| 1.9786 | 4.5340 | 450 | 2.5891 | +| 1.9003 | 4.6348 | 460 | 2.5914 | +| 1.9248 | 4.7355 | 470 | 2.5876 | +| 2.1843 | 4.8363 | 480 | 2.5873 | +| 1.9193 | 4.9370 | 490 | 2.5873 | + + +### Framework versions + +- Transformers 4.41.2 +- Pytorch 2.3.0+cu121 +- Datasets 2.19.2 +- Tokenizers 0.19.1 diff --git a/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/config.json b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/config.json new file mode 100644 index 0000000000000000000000000000000000000000..5d0ce183dddc8769c4e51fd23d8ea605c44f37d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/config.json @@ -0,0 +1,59 @@ +{ + "_name_or_path": "allenai/led-base-16384", + "activation_dropout": 0.0, + "activation_function": "gelu", + "architectures": [ + "LEDForConditionalGeneration" + ], + "attention_dropout": 0.0, + "attention_window": [ + 1024, + 1024, + 1024, + 1024, + 1024, + 1024 + ], + "bos_token_id": 0, + "classif_dropout": 0.0, + "classifier_dropout": 0.0, + "d_model": 768, + "decoder_attention_heads": 12, + "decoder_ffn_dim": 3072, + "decoder_layerdrop": 0.0, + "decoder_layers": 6, + "decoder_start_token_id": 2, + "dropout": 0.1, + "early_stopping": true, + "encoder_attention_heads": 12, + "encoder_ffn_dim": 3072, + "encoder_layerdrop": 0.0, + "encoder_layers": 6, + "eos_token_id": 2, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1", + "2": "LABEL_2" + }, + "init_std": 0.02, + "is_encoder_decoder": true, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_2": 2 + }, + "length_penalty": 2.0, + "max_decoder_position_embeddings": 1024, + "max_encoder_position_embeddings": 16384, + "max_length": 512, + "min_length": 100, + "model_type": "led", + "no_repeat_ngram_size": 3, + "num_beams": 2, + "num_hidden_layers": 6, + "pad_token_id": 1, + "torch_dtype": "float32", + "transformers_version": "4.41.2", + "use_cache": false, + "vocab_size": 50265 +} diff --git a/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/generation_config.json b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..78684a9d428f24a5caa2d261f5e23abc377f2794 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/generation_config.json @@ -0,0 +1,8 @@ +{ + "_from_model_config": true, + "bos_token_id": 0, + "decoder_start_token_id": 2, + "eos_token_id": 2, + "pad_token_id": 1, + "transformers_version": "4.41.2" +} diff --git a/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/merges.txt b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..226b0752cac7789c48f0cb3ec53eda48b7be36cc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/merges.txt @@ -0,0 +1,50001 @@ +#version: 0.2 +Ġ t +Ġ a +h e +i n +r e +o n +Ġt he +e r +Ġ s +a t +Ġ w +Ġ o +e n +Ġ c +i t +i s +a n +o r +e s +Ġ b +e d +Ġ f +in g +Ġ p +o u +Ġa n +a l +a r +Ġt o +Ġ m +Ġo f +Ġ in +Ġ d +Ġ h +Ġan d +i c +a s +l e +Ġt h +i on +o m +l l +en t +Ġ n +Ġ l +s t +Ġ re +v e +Ġ e +r o +l y +Ġb e +Ġ g +Ġ T +c t +Ġ S +i d +o t +Ġ I +u t +e t +Ġ A +Ġ is +Ġ on +i m +a m +o w +a y +a d +s e +Ġth at +Ġ C +i g +Ġf or +a c +Ġ y +v er +u r +Ġ u +l d +Ġs t +Ġ M +' s +Ġ he +Ġ it +at ion +it h +i r +c e +Ġy ou +i l +Ġ B +Ġw h +o l +Ġ P +Ġw ith +Ġ 1 +t er +c h +Ġa s +Ġw e +Ġ ( +n d +i ll +Ġ D +i f +Ġ 2 +a g +er s +k e +Ġ " +Ġ H +e m +Ġc on +Ġ W +Ġ R +he r +Ġw as +Ġ r +o d +Ġ F +u l +at e +Ġa t +r i +p p +o re +ĠT he +Ġs e +u s +Ġp ro +Ġh a +u m +Ġa re +Ġd e +a in +an d +Ġo r +ig h +es t +is t +a b +r om +Ġ N +t h +Ġc om +Ġ G +u n +o p +0 0 +Ġ L +Ġn ot +es s +Ġe x +Ġ v +re s +Ġ E +e w +it y +an t +Ġb y +e l +o s +or t +o c +q u +Ġf rom +Ġha ve +Ġs u +i ve +ou ld +Ġs h +Ġth is +n t +r a +p e +igh t +ar t +m ent +Ġa l +u st +en d +- - +al l +Ġ O +ac k +Ġc h +Ġ le +i es +re d +ar d +â Ģ +ou t +Ġ J +Ġa b +e ar +i v +al ly +ou r +o st +g h +p t +Ġp l +as t +Ġc an +a k +om e +u d +T he +Ġh is +Ġd o +Ġg o +Ġh as +g e +' t +Ġ U +r ou +Ġs a +Ġ j +Ġb ut +Ġw or +Ġa ll +e ct +Ġ k +am e +Ġw ill +o k +Ġw he +Ġthe y +id e +0 1 +f f +ic h +p l +t her +Ġt r +. . +Ġin t +i e +u re +ag e +Ġn e +i al +a p +in e +ic e +Ġm e +Ġo ut +an s +on e +on g +ion s +Ġwh o +Ġ K +Ġu p +Ġthe ir +Ġa d +Ġ 3 +Ġu s +at ed +ou s +Ġm ore +u e +o g +ĠS t +in d +i ke +Ġs o +im e +p er +. " +b er +i z +a ct +Ġon e +Ġsa id +Ġ - +a re +Ġyou r +c c +ĠT h +Ġc l +e p +a ke +ab le +i p +Ġcon t +Ġwh ich +i a +Ġ im +Ġab out +Ġwe re +ver y +u b +Ġh ad +Ġ en +Ġcom p +, " +ĠI n +Ġu n +Ġa g +i re +ac e +a u +ar y +Ġw ould +as s +r y +Ġ âĢ +c l +o ok +e re +s o +Ġ V +ig n +i b +Ġof f +Ġt e +v en +Ġ Y +i le +o se +it e +or m +Ġ2 01 +Ġre s +Ġm an +Ġp er +Ġo ther +or d +ul t +Ġbe en +Ġl ike +as e +an ce +k s +ay s +ow n +en ce +Ġd is +ct ion +Ġan y +Ġa pp +Ġs p +in t +res s +ation s +a il +Ġ 4 +ic al +Ġthe m +Ġhe r +ou nt +ĠC h +Ġa r +Ġ if +Ġthe re +Ġp e +Ġy ear +a v +Ġm y +Ġs ome +Ġwhe n +ou gh +ac h +Ġth an +r u +on d +ic k +Ġo ver +ve l +Ġ qu +Ċ Ċ +Ġs c +re at +re e +ĠI t +ou nd +p ort +Ġal so +Ġp art +f ter +Ġk n +Ġbe c +Ġt ime +en s +Ġ 5 +op le +Ġwh at +Ġn o +d u +m er +an g +Ġn ew +-- -- +Ġg et +or y +it ion +ing s +Ġj ust +Ġint o +Ġ 0 +ent s +o ve +t e +Ġpe ople +Ġp re +Ġit s +Ġre c +Ġt w +i an +ir st +ar k +or s +Ġwor k +ad e +o b +Ġs he +Ġo ur +w n +in k +l ic +Ġ1 9 +ĠH e +is h +nd er +au se +Ġh im +on s +Ġ [ +Ġ ro +f orm +i ld +at es +ver s +Ġon ly +o ll +Ġs pe +c k +e ll +am p +Ġa cc +Ġb l +i ous +ur n +f t +o od +Ġh ow +he d +Ġ ' +Ġa fter +a w +Ġat t +o v +n e +Ġpl ay +er v +ic t +Ġc ould +it t +Ġa m +Ġf irst +Ġ 6 +Ġa ct +Ġ $ +e c +h ing +u al +u ll +Ġcom m +o y +o ld +c es +at er +Ġf e +Ġbe t +w e +if f +Ġtw o +oc k +Ġb ack +) . +id ent +Ġu nder +rou gh +se l +x t +Ġm ay +rou nd +Ġp o +p h +is s +Ġd es +Ġm ost +Ġd id +Ġad d +j ect +Ġin c +f ore +Ġp ol +on t +Ġag ain +cl ud +ter n +Ġkn ow +Ġne ed +Ġcon s +Ġc o +Ġ . +Ġw ant +Ġse e +Ġ 7 +n ing +i ew +ĠTh is +c ed +Ġe ven +Ġin d +t y +ĠW e +at h +Ġthe se +Ġp r +Ġu se +Ġbec ause +Ġf l +n g +Ġn ow +ĠâĢ ĵ +c om +is e +Ġm ake +Ġthe n +ow er +Ġe very +ĠU n +Ġse c +os s +u ch +Ġe m +Ġ = +ĠR e +i ed +r it +Ġin v +le ct +Ġsu pp +at ing +Ġl ook +m an +pe ct +Ġ 8 +ro w +Ġb u +Ġwhe re +if ic +Ġyear s +i ly +Ġd iff +Ġsh ould +Ġre m +T h +I n +Ġe v +d ay +' re +ri b +Ġre l +s s +Ġde f +Ġr ight +Ġs y +) , +l es +00 0 +he n +Ġth rough +ĠT r +_ _ +Ġw ay +Ġd on +Ġ , +Ġ1 0 +as ed +Ġas s +ub lic +Ġre g +ĠA nd +i x +Ġ very +Ġin clud +ot her +Ġim p +ot h +Ġsu b +ĠâĢ Ķ +Ġbe ing +ar g +ĠW h += = +ib le +Ġdo es +an ge +r am +Ġ 9 +er t +p s +it ed +ation al +Ġb r +Ġd own +Ġman y +ak ing +Ġc all +ur ing +it ies +Ġp h +ic s +al s +Ġde c +at ive +en er +Ġbe fore +il ity +Ġwe ll +Ġm uch +ers on +Ġth ose +Ġsu ch +Ġ ke +Ġ end +ĠB ut +as on +t ing +Ġl ong +e f +Ġth ink +y s +Ġbe l +Ġs m +it s +a x +Ġo wn +Ġpro v +Ġs et +if e +ment s +b le +w ard +Ġsh ow +Ġp res +m s +om et +Ġo b +Ġs ay +ĠS h +t s +f ul +Ġe ff +Ġg u +Ġin st +u nd +re n +c ess +Ġ ent +ĠY ou +Ġgo od +Ġst art +in ce +Ġm ade +t t +st em +ol og +u p +Ġ | +um p +Ġhe l +ver n +ul ar +u ally +Ġa c +Ġm on +Ġl ast +Ġ2 00 +1 0 +Ġst ud +u res +ĠA r +sel f +ar s +mer ic +u es +c y +Ġm in +oll ow +Ġc ol +i o +Ġm od +Ġc ount +ĠC om +he s +Ġf in +a ir +i er +âĢ Ķ +re ad +an k +at ch +e ver +Ġst r +Ġpo int +or k +ĠN ew +Ġs ur +o ol +al k +em ent +Ġus ed +ra ct +we en +Ġs ame +ou n +ĠA l +c i +Ġdiff ere +Ġwh ile +---- ---- +Ġg ame +ce pt +Ġs im +.. . +Ġin ter +e k +Ġre port +Ġpro du +Ġst ill +l ed +a h +Ġhe re +Ġwor ld +Ġth ough +Ġn um +ar ch +im es +al e +ĠS e +ĠI f +/ / +ĠL e +Ġre t +Ġre f +Ġtr ans +n er +ut ion +ter s +Ġt ake +ĠC l +Ġcon f +w ay +a ve +Ġgo ing +Ġs l +u g +ĠA meric +Ġspe c +Ġh and +Ġbet ween +ist s +ĠD e +o ot +I t +Ġe ar +Ġagain st +Ġh igh +g an +a z +at her +Ġex p +Ġo p +Ġin s +Ġg r +Ġhel p +Ġre qu +et s +in s +ĠP ro +is m +Ġf ound +l and +at a +us s +am es +Ġp erson +Ġg reat +p r +Ġs ign +ĠA n +' ve +Ġs omet +Ġs er +h ip +Ġr un +Ġ : +Ġt er +ire ct +Ġf ollow +Ġd et +ic es +Ġf ind +1 2 +Ġm em +Ġc r +e red +e x +Ġex t +ut h +en se +c o +Ġte am +v ing +ou se +as h +at t +v ed +Ġsy stem +ĠA s +d er +iv es +m in +Ġle ad +ĠB l +c ent +Ġa round +Ġgo vern +Ġc ur +vel op +an y +Ġc our +al th +ag es +iz e +Ġc ar +od e +Ġl aw +Ġre ad +' m +c on +Ġre al +Ġsupp ort +Ġ1 2 +.. .. +Ġre ally +n ess +Ġf act +Ġd ay +Ġb oth +y ing +Ġs erv +ĠF or +Ġth ree +Ġw om +Ġm ed +od y +ĠThe y +5 0 +Ġex per +t on +Ġe ach +ak es +Ġc he +Ġc re +in es +Ġre p +1 9 +g g +ill ion +Ġg rou +ut e +i k +W e +g et +E R +Ġm et +Ġs ays +o x +Ġd uring +er n +iz ed +a red +Ġf am +ic ally +Ġha pp +ĠI s +Ġch ar +m ed +v ent +Ġg ener +i ent +p le +i et +re nt +1 1 +v es +pt ion +Ġ2 0 +form ation +Ġc or +Ġoff ic +ie ld +Ġto o +is ion +Ġin f +Ġ Z +t he +o ad +Ġp ublic +Ġpro g +r ic +* * +Ġw ar +Ġp ower +v iew +Ġf ew +Ġl oc +Ġdiffere nt +Ġst ate +Ġhe ad +' ll +Ġp oss +Ġst at +re t +ant s +Ġv al +Ġis s +Ġc le +i vers +an c +Ġex pl +Ġan other +Ġ Q +Ġa v +th ing +n ce +W h +Ġch ild +Ġs ince +i red +l ess +Ġl ife +Ġde velop +itt le +Ġde p +Ġp ass +ã ĥ +Ġt urn +or n +Th is +b ers +ro ss +ĠA d +Ġf r +Ġres p +Ġsec ond +o h +Ġ / +Ġdis c +Ġ & +Ġsomet hing +Ġcomp le +Ġ ed +Ġf il +Ġmon th +a j +u c +Ġgovern ment +Ġwith out +Ġle g +Ġd ist +Ġp ut +Ġqu est +an n +Ġpro t +2 0 +Ġne ver +i ence +Ġle vel +Ġar t +Ġth ings +Ġm ight +Ġeff ect +Ġcont ro +Ġc ent +Ġ1 8 +Ġall ow +Ġbel ie +ch ool +ot t +Ġinc re +Ġfe el +Ġres ult +Ġl ot +Ġf un +ot e +Ġt y +ere st +Ġcont in +Ġus ing +Ġb ig +2 01 +Ġas k +Ġb est +Ġ ) +I N +Ġo pp +3 0 +Ġnum ber +in ess +S t +le ase +Ġc a +Ġm ust +Ġd irect +Ġg l +Ġ < +Ġop en +Ġp ost +Ġcom e +Ġse em +ord ing +Ġwe ek +ate ly +it al +Ġe l +ri end +Ġf ar +Ġt ra +in al +Ġp ri +ĠU S +Ġpl ace +Ġfor m +Ġto ld +" : +ain s +at ure +ĠTr ump +Ġst and +Ġ # +id er +ĠF r +Ġne xt +Ġs oc +Ġp ur +Ġle t +Ġl ittle +Ġh um +Ġ i +r on +1 5 +Ġ1 5 +Ġcomm un +Ġm ark +ĠThe re +Ġw r +ĠTh at +Ġin formation +w ays +Ġb us +a pp +Ġinv est +m e +Ġh ard +ain ed +e ad +Ġim port +Ġapp ro +Ġt est +Ġt ri +Ġre st +os ed +Ġf ull +Ġc are +ĠS p +Ġc ase +O N +Ġs k +Ġl ess +Ġ + +Ġpart ic +ĠP l +ab ly +u ck +is hed +ch n +b e +Ġl ist +at or +Ġto p +Ġad v +ĠB e +ru ct +Ġd em +r ation +l ing +g y +re en +g er +Ġh ome +Ġle ft +Ġbet ter +Ġd ata +Ġ1 1 +Ġatt ack +Ġpro ble +l ine +ard s +Ġbe h +r al +ĠH ow +ĠS he +ar ge +Ġ -- +: // +Ġb ro +ĠP h +at s +Ġbu ild +w w +id ed +a im +as es +en cy +Ġm ain +in ed +Ġinclud ing +Ġ { +Ġg ot +Ġint erest +Ġke ep +Ġ X +Ġe as +ain ing +Ġcl ass +âĢ ¦ +ĠN o +Ġv ar +Ġsm all +amp le +A T +Ġ ide +ĠS o +Ġre ce +Ġpol it +Ġm ov +Ġpl an +Ġper cent +iv ing +Ġc amp +Ġp ay +1 4 +s c +is ed +Ġu nt +one y +pl oy +== == +Ġdid n +ĠI nd +el s +ert ain +Ġp os +__ __ +i ver +Ġpro cess +Ġprog ram +if ied +ĠR ep +1 6 +u ro +olog y +at ter +in a +Ġn ame +ĠA ll +Ġf our +Ġret urn +v ious +b s +Ġcall ed +Ġm ove +ĠS c +ir d +Ġgrou p +Ġb re +Ġm en +Ġc ap +t en +e e +Ġd ri +le g +he re +uth or +Ġp at +Ġcur rent +id es +Ġp op +t o +ent ion +Ġal ways +Ġm il +Ġwom en +Ġ1 6 +Ġo ld +iv en +ra ph +ĠO r +r or +ent ly +Ġn ear +ĠE x +re am +s h +Ġ1 4 +Ġf ree +iss ion +st and +ĠC on +al ity +us ed +1 3 +Ġdes ign +Ġch ange +Ġch ang +Ġb o +Ġv is +em ber +Ġb ook +read y +Ġk ill +2 5 +pp ed +Ġa way +Ġab le +Ġcount ry +Ġcon st +ar n +Ġor der +A R +i or +i um +or th +1 8 +ail able +Ġs w +Ġm illion +Ġ1 3 +at ic +t ed +ĠG o +Ġo per +en g +Ġth ing +aj or +con om +ĠCom m +Ġwh y +u red +ur al +Ġs chool +b y +ĠM ar +Ġa ff +Ġd ays +Ġan n +us h +an e +I f +e g +Ġpro f +Ġhe alth +ou th +B ut +ion al +. , +Ġs ol +Ġal ready +Ġ3 0 +Ġchar act +H e +Ġf riend +E S +i ans +ic le +' d +ĠO n +Ġle ast +Ġp rom +Ġd r +Ġh ist +it her +Ġ est +i qu +1 7 +s on +Ġte ll +Ġt alk +oh n +o int +le ction +A N +Ġunt il +au gh +Ġl ater +Ġ ve +Ġv iew +end ing +iv ed +Ġwor d +w are +Ġc ost +Ġen ough +Ġg ive +ĠUn ited +Ġte chn +are nt +O R +Ġp ar +ĠD r +Ġ201 6 +r ist +er ing +Ġ  +Ġl arge +s ide +ac y +cc ess +Ġw in +Ġimport ant +Ġ19 9 +Ġdoes n +Ġ1 7 +Ġbus iness +Ġcle ar +Ġre se +" , +ur y +Ġe qu +as ter +al f +ĠAmeric an +n ect +Ġex pect +ivers ity +Ġo cc +ĠF l +Ġk ind +Ġme an +Ġp ast +Ġde v +Ġb as +le t +ra ft +Ġor gan +Ġde l +Ġper form +Ġst ory +Ġse ason +ĠC ol +Ġcl aim +Ġc ame +Ġwith in +Ġl ine +Ġpro ject +ĠA t +Ġcontro l +end ed +ĠS y +Ġa ir +iz ation +Ġ * +le y +Ġm oney +id d +Y ou +f or +Ġfam ily +Ġm aking +Ġb it +Ġpol ice +Ġhapp en +Ġ vers +on y +u ff +ĠW hen +Ġs it +ide o +l f +is on +Ġsu re +g in +Ġapp ear +Ġl ight +Ġ es +o f +Ġw ater +Ġt imes +n ot +Ġg row +Ġcomp any +ĠT e +ow s +Ġm ar +our ce +i ol +ar m +b r +Ġex ample +Ġcon c +Ġf ore +ĠT o +p ro +E N +ri es +Ġ2 5 +ĠC an +ne y +Ġact ually +Ġe ver +ur ity +ak en +ap s +Ġt ax +Ġm ajor +am a +Ġof ten +er al +Ġhum an +Ġj ob +is ter +Ġav ailable +oc r +en n +a id +iv id +Ġrec ord +? " +Ġs ing +ĠA m +id ence +Ġnew s +st er +Ġe conom +Ġfollow ing +ĠB r +is ing +Ġh our +m ost +um ent +Ġse x +Ġdes c +Ġbec ome +ĠE d +Ġto ok +Ġha ving +Ġprodu ct +a ult +A s +ar ing +Ġme ans +Ġh op +un e +Ġch o +Ġc ertain +Ġn on +Ġde al +2 4 +le ment +oc i +en e +Ġs ide +ĠP r +ĠM ay +Ġre ason +u ed +c hed +ul ation +Ġe lect +Ġoffic ial +Ġposs ible +Ġh old +and s +ot s +Ġc ity +or ies +Ġse ver +Ġchild ren +Ġon ce +Ġact iv +l er +Ġn ight +it ions +ĠJ ohn +a pe +pl ay +Ġd one +Ġl im +Ġwork ing +ĠP res +or ld +e b +ĠC o +Ġb ody +ail s +ut es +ĠM r +Ġwhe ther +Ġa uthor +ro p +Ġpro per +Ġse en +) ; +Ġf ac +ĠS u +Ġcon d +it ing +Ġcour se +Ġ } +-------- -------- +a ign +Ġev ent +Ġen g +Ġp ot +Ġin tern +i am +Ġsh ort +em pt +ã Ĥ +ĠG od +il ar +8 0 +Ġor ig +I S +our n +ab ility +it ive +Ġd am +Ġ1 00 +Ġp ress +Ġdo ing +Ġprot ect +r ing +Ġthough t +Ġquest ion +re w +ĠW ar +Ġsever al +ĠSt ate +Ġg iven +Ġf und +ĠT w +Ġw ent +an ces +w ork +p or +m y +4 0 +Ġar g +art ment +ust om +Ġpol ic +Ġme et +Ġc reat +2 2 +ĠSt ates +Ġg ames +ra w +ut ure +Ġunder stand +ur s +ĠO b +l ish +s y +Ġm akes +Ġw on +ag on +Ġh tt +Ġl ove +ent ial +Ġcomple te +p ar +ĠI m +A L +Ġacc ount + ł +ore d +ver t +Ġ ident +Ġ201 5 +Ġother s +ĠM in +i ber +ver age +The re +ition al +d d +Ġpro b +Ġyou ng +Ġal ong +Ġacc ording +Ġy et +Ġmem bers +ĠWh at +o id +ĠM an +A nd +Ġam ong +a i +Ġem ploy +ĠR es +Ġ > +Ġinv ol +Ġl ow +a f +ĠC ar +Ġh ig +ĠO ne +ĠS ec +in ation +Ġlike ly +Ġan t +ag ed +ĠR uss +Ġb en +Ġre le +F or +b ack +ĠN ot +Ġpres ident +b all +Ġacc ess +ivid ual +ĠD em +ĠE uro +6 0 +Ġkn own +ir l +ĠG r +Ġear ly +u se +iet y +âĢ ĵ +Ġf ight +Ġs ent +Ġto day +Ġmark et +" . +Ġb ased +Ġstr ong +ur ther +Ġde b +m ber +Ġproble m +Ġde ath +Ġsoc ial +im ate +A S +ort un +Ġcamp aign +er y +C h +Ġe y +i ally +Ġm us +w h +p os +Ġ er +Ġsa f +Ġmonth s +ir on +Ġv iol +Ġf ive +Ġst re +Ġplay ers +in c +al d +y ear +a un +Ġsu ccess +Ġpres ent +ere nce +Ġ201 4 +Ġsu gg +Ġpartic ular +Ġtr y +Ġsugg est +ĠCh rist +on es +Ġpri v +2 3 +Ġc rit +Ġl and +Ġloc al +if y +2 9 +Ġa ut +E D +ĠG u +Ġm ult +Ġpolit ical +Ġask ed +Ġfor mer +it ter +ri pt +Ġcl ose +Ġp ract +ĠY ork +Ġget ting +Ġac ross +Ġcom b +Ġbelie ve +Ġ z +Ġto get +Ġtoget her +ĠC ent +ir c +Ġind ividual +ĠM c +2 7 +is k +ĠE ng +Ġf ace +Ġ2 4 +Ġval ue +Ġare a +e v +Ġw rit +ĠPres ident +Ġv ot +Ġke y +Ġm om +p ut +Ġany thing +Ġexper ience +att le +Ġm ind +a ff +om m +Ġf uture +g ed +Ġc ut +Ġto t +it ch +Ġv ideo +Ġinvest ig +Ġn et +ĠM y +r ict +i en +. ) +Ġimp ro +th ough +ward s +Ġcon nect +ĠM ed +sel ves +ens ive +m b +o ber +at ors +A n +Ġ5 0 +Ġre du +res ent +Ġab ove +Ġf re +ĠEuro pe +s w +Ġam ount +ĠA pp +Ġe ither +Ġmil it +Ġan al +Ġf ail +ĠE n +al es +Ġspec ial +Ġbl ack +I T +c her +Ġlook ing +Ġf ire +y n +Ġal most +o on +Ġstud y +Ġm iss +c hes +ro wn +Ġt re +Ġcommun ity +Ġmed ia +Ġf ood +Ġcom es +ĠUn iversity +Ġsing le +Wh at +u ly +Ġh alf +ag ue +h od +ĠRep ublic +Ġstart ed +Ġqu ick +ot o +b ook +Ġiss ue +it or +Ġel se +Ġcons ider +2 6 +ro du +Ġt aken +2 8 +9 9 +ĠW ith +Ġtr ue +Ġw a +Ġtr ad +Ġag o +Ġm ess +ie f +Ġadd ed +o ke +Ġb ad +Ġf av +3 3 +Ġsim ilar +as k +ĠD on +Ġcharact er +ort s +ĠH ouse +Ġreport ed +Ġty pe +v al +i od +ĠHow ever +Ġt arg +Ġent ire +pp ing +Ġhist ory +Ġl ive +ff ic +.... .... +ed eral +Ġtr ying +Ġdisc uss +ĠH ar +ac es +l ished +Ġse lf +os p +re st +Ġro om +el t +Ġf all +ol ution +Ġe t +Ġ x +Ġis n +Ġide a +b o +Ġs ound +ĠD ep +Ġsome one +ci ally +ull y +Ġf oc +Ġob ject +if t +ap er +Ġplay er +Ġr ather +Ġserv ice +as hing +ĠD o +ĠP art +ru g +m on +p ly +Ġm or +Ġnot hing +Ġprov ide +I C +un g +Ġpart y +Ġex ist +Ġm ag +7 0 +Ġr ul +Ġh ouse +Ġbeh ind +Ġhow ever +ĠW orld +Ġs um +Ġapp lic +Ġ ; +Ġfun ction +g r +ĠP ol +Ġfr ont +2 00 +Ġser ies +Ġt em +Ġty p +ill s +Ġo pt +Ġpoint s +Ġbel ow +itt ed +Ġspec ific +Ġ201 7 +um b +Ġr a +Ġpre vious +Ġpre t +re me +Ġc ustom +Ġcour t +ĠM e +Ġre pl +Ġwho le +g o +c er +Ġt reat +ĠA ct +Ġprob ably +Ġle arn +end er +ĠA ss +Ġvers ion +n ow +Ġche ck +ĠC al +R E +min ist +O n +our ces +Ġben ef +Ġd oc +Ġdet er +Ġen c +Ġsu per +Ġadd ress +Ġv ict +Ġ201 3 +Ġme as +t r +Ġf ield +W hen +Ġsign ific +u ge +Ġfe at +Ġcomm on +l oad +Ġbe gin +Ġbr ing +Ġa ction +er man +Ġdesc rib +Ġind ust +Ġwant ed +ri ed +m ing +Ġatt empt +4 5 +f er +Ġd ue +ress ion +# # +Ġsh all +Ġs ix +o o +Ġst ep +Ġp ub +Ġhim self +Ġ2 3 +Ġc op +Ġd est +Ġst op +A C +ib ility +Ġl ab +ic ult +Ġhour s +Ġcre ate +Ġf urther +ĠAmeric a +ĠC ity +Ġd ou +he ad +S T +ĠN orth +c ing +Ġn ational +u le +ĠIn st +Ġt aking +ĠQ u +ir t +Ġre d +Ġrese arch +v iron +ĠG e +Ġbre ak +an a +Ġsp ace +ater ial +Ġrec ent +ĠA b +Ġgener al +Ġh it +Ġper iod +Ġevery thing +ive ly +Ġph ys +Ġsay ing +an ks +Ġc ou +Ġc ult +ac ed +e al +u ation +Ġc oun +l u +Ġinclud e +Ġpos ition +ĠA fter +ĠCan ad +ĠE m +Ġim m +ĠR ed +Ġp ick +Ġcom pl +Ġm atter +re g +e xt +ang u +is c +o le +a ut +Ġcomp et +e ed +f ect +Ġ2 1 +ĠS en +ĠThe se +as ing +Ġcan not +Ġin it +Ġrel ations +ac hed +Ġb ar +Ġ4 0 +ĠT H +Ġ201 2 +Ġv ol +Ġg round +Ġsec urity +Ġup d +il t +3 5 +Ġconc ern +ĠJ ust +Ġwh ite +Ġseem s +ĠH er +pe cially +i ents +Ġann oun +Ġf ig +ight s +Ġst ri +l ike +id s +Ġs us +Ġw atch +Ġ â +Ġw ind +ĠC ont +Ġit self +Ġm ass +A l +y le +iqu e +ĠN ational +Ġab s +Ġp ack +Ġout side +Ġan im +Ġp ain +et er +Ġman ag +du ct +og n +Ġ ] +ĠSe pt +se c +o ff +ĠJ an +Ġf oot +ad es +Ġth ird +Ġm ot +Ġev idence +int on +Ġth reat +a pt +pl es +c le +Ġl o +Ġde cl +Ġit em +med i +Ġrep resent +om b +am er +Ġsignific ant +og raph +s u +Ġc al +i res +00 00 +I D +A M +Ġsim ply +Ġlong er +Ġf ile +O T +c he +S o +ate g +or g +ĠH is +Ġen er +Ġd om +Ġup on +il i +": " +Ġthem selves +Ġcom ing +Ġqu ite +Ġdiff icult +ĠB ar +il ities +re l +end s +c ial +6 4 +Ġwom an +ra p +y r +Ġne cess +ip s +Ġte xt +Ġrequ ire +Ġmilit ary +Ġre view +Ġresp ons +7 5 +Ġsub ject +Ġinst ead +Ġiss ues +Ġg en +" ," +Ġmin utes +Ġwe ap +r ay +am ed +t ime +b l +H ow +Ġc ode +ĠS m +Ġhig her +ĠSt e +r is +Ġp age +Ġstud ents +ĠIn tern +Ġmet hod +ĠA ug +ĠP er +ĠA g +Ġpolic y +ĠS w +Ġex ec +Ġac cept +um e +rib ut +Ġword s +Ġfin al +Ġchang es +ĠDem ocr +Ġfriend s +Ġres pect +Ġe p +Ġcomp an +iv il +Ġdam age +** ** +og le +viron ment +Ġne g +ent al +Ġa p +Ġtot al +iv al +! " +l im +Ġneed s +Ġag re +Ġdevelop ment +Ġa ge +ip le +2 1 +Ġresult s +ĠA f +S h +Ġg un +ĠOb ama +ro ll +Ġ @ +Ġright s +ĠB rit +Ġrun ning +Ġwas n +Ġp ort +Ġr ate +Ġpret ty +Ġtarg et +Ġsa w +Ġc irc +Ġwor ks +ic ro +al t +o ver +ww w +Th at +l ier +Ġevery one +ud e +Ġp ie +idd le +ra el +Ġr ad +Ġbl ock +Ġw alk +T o +ã ģ +n es +ĠA ust +a ul +ro te +ĠS outh +ess ion +op h +Ġshow s +Ġs ite +Ġj o +Ġr isk +cl us +l t +Ġin j +id ing +ĠS pe +Ġch all +ir m +Ġ2 2 +itt ing +st r +Ġh y +L E +ke y +Ġbe gan +at ur +ashing ton +l am +ĠD av +b it +Ġs ize +ĠP ar +3 8 +ourn al +f ace +Ġdec ision +Ġl arg +Ġj ud +re ct +Ġcontin ue +ĠO ct +ove red +ĠI nt +==== ==== +Ġp arent +ĠW ill +Ġeas y +Ġd rug +ang er +Ġs ense +Ġd i +id ay +Ġener gy +ist ic +Ġass oci +ar ter +ob al +e ks +ĠE l +ur ch +Ġg irl +o e +it le +Ġ2 8 +ĠC he +Ġrequ est +Ġso on +Ġh ost +k y +Ġst ates +om es +Ġm aterial +le x +Ġmom ent +Ġan sw +on se +Ġes pecially +Ġn orm +Ġserv ices +p ite +r an +Ġro le +4 4 +) : +Ġc red +C l +____ ____ +Ġm at +Ġl og +ĠCl inton +O U +Ġoff ice +Ġ2 6 +Ġch arg +Ġtr ack +m a +Ġhe art +Ġb all +Ġperson al +Ġbuild ing +n a +s et +b ody +ĠBl ack +Ġincre ase +itt en +Ġneed ed +3 6 +3 2 += " +Ġl ost +Ġbec ame +Ġgrou ps +ĠM us +Ġw rote +ĠP e +Ġpro p +j oy +à © +ĠWh ite +Ġde ad +. ' +Ġhtt p +Ġwe bs +O S +Ġins ide +Ġwr ong +Ġstat ement +Ġ ... +y l +Ġfil m +Ġmus ic +Ġsh are +ific ation +Ġre lease +Ġfor ward +Ġst ay +Ġcomp ut +it te +s er +Ġorig inal +Ġc ard +Ġc and +Ġd iv +at ural +Ġfav or +O M +Ġc ases +us es +Ġse ction +Ġle ave +g ing +ov ed +ĠW ashington +3 9 +ĠG l +Ġrequ ired +act ion +ap an +o or +it er +ĠK ing +Ġcount ries +ĠG erman +ll ing +Ġ2 7 +3 4 +Ġquest ions +Ġpr im +Ġc ell +Ġsh oot +Ġany one +ĠW est +Ġaff ect +ep end +Ġon line +ĠIs rael +ĠSept ember +Ġab ility +Ġcont ent +is es +Ġre ve +Ġl aun +Ġind ic +Ġfor ce +c ast +Ġso ld +av ing +f l +Ġso ft +Ġcompan ies +ce ed +Ġart icle +Ġa ud +Ġre v +Ġed uc +Ġplay ing +0 5 +Ġhe ld +ct or +Ġrele ased +Ġf ederal +3 7 +Ġad minist +Ġinter view +Ġinst all +Ġrece ived +Ġs ource +u k +P h +Ġser ious +Ġcre ated +Ġc ause +Ġim medi +Ġdef in +u el +ĠDep artment +ct ions +ĠC our +ĠN ow +z e +it es +it ution +Ġl ate +Ġspe ak +n ers +Ġleg al +ar i +ĠC or +Ġwe eks +Ġmod el +Ġp red +Ġex act +B C +ĠB y +IN G +os ing +Ġt akes +Ġreg ard +Ġopp ortun +Ġpr ice +Ġ19 8 +ĠA pr +f ully +Ġor d +Ġproble ms +ru ction +h am +ĠC ount +le ge +Ġlead ers +E T +le v +Ġde ep +olog ical +es e +h aps +ĠS ome +Ġp ers +Ġcont ract +Ġrelations hip +s p +ou d +Ġb ase +4 8 +m it +A d +anc ial +Ġcons um +Ġpot ential +Ġl angu +re m +et h +Ġrel ig +ress ed +6 6 +Ġl ink +Ġl ower +ay er +ĠJ une +Ġf em +un t +er c +ur d +Ġcont act +Ġ ill +Ġm other +Ġest ab +h tt +ĠM arch +ĠB ro +ĠCh ina +Ġ2 9 +Ġs qu +Ġprov ided +Ġa verage +as ons +Ġ201 1 +Ġex am +l in +5 5 +n ed +Ġper fect +Ġt ou +al se +u x +Ġbu y +Ġsh ot +Ġcol lect +Ġph ot +Ġplay ed +Ġsur pr +Ġofficial s +Ġsim ple +av y +Ġindust ry +Ġhand s +g round +Ġp ull +Ġr ound +Ġus er +Ġr ange +u ary +Ġpriv ate +op s +e es +Ġw ays +ĠM ich +Ġve h +Ġex cept +Ġter ms +im um +pp er +I ON +ore s +ĠDr agon +ou l +Ġd en +Ġperform ance +Ġb ill +c il +4 7 +Ġen vironment +Ġex c +ad d +Ġwor th +Ġp ict +Ġch ance +Ġ201 8 +b or +Ġspe ed +ict ion +Ġal leg +ĠJ apan +at ory +re et +Ġm atch +ĠI I +Ġst ru +ord er +Ġst e +Ġl iving +Ġst ruct +in o +Ġse par +her n +Ġresp onse +Ġen joy +Ġv ia +A D +um ents +ace book +Ġmem ber +ib r +iz ing +Ġto ol +ĠM on +ĠWh ile +h ood +ĠA ng +ĠD ef +Ġoff er +T r +a ur +Ġturn ed +ĠJ uly +d own +an ced +Ġrec ently +ĠE ar +Ġc e +ĠSt ar +ĠC ong +rough t +Ġbl ood +Ġhop e +Ġcom ment +ain t +Ġar ri +il es +Ġpartic ip +ough t +ri ption +0 8 +4 9 +Ġg ave +Ġse lect +Ġkill ed +sy ch +Ġgo es +i j +Ġc oll +Ġimp act +at ives +ĠS er +0 9 +ĠAug ust +Ġb oy +d e +ĠD es +Ġf elt +U S +Ġexpect ed +Ġim age +ĠM ark +cc ording +o ice +E C +ĠM ag +en ed +h old +ĠP ost +Ġpre vent +N o +Ġinvol ved +Ġey es +Ġquick ly +A t +un k +Ġbeh av +Ġ ur +Ġl ed +c ome +e y +Ġcand id +Ġear lier +Ġfoc us +et y +P ro +led ge +ix ed +ill ed +Ġpop ular +A P +Ġset t +l ight +Ġvar ious +in ks +Ġlevel s +Ġro ad +ell ig +ab les +he l +itte e +ĠG ener +y pe +Ġhe ard +ic les +Ġm is +Ġus ers +ĠS an +Ġimpro ve +Ġf ather +Ġse arch +The y +v il +Ġprof ess +Ġkn ew +Ġl oss +Ġev ents +6 5 +Ġb illion +0 7 +0 2 +ĠNew s +ĠA M +Ġco ver +w here +ens ion +Ġb ott +Ġare as +en ces +op e +ĠTw itter +a el +Ġget s +ĠGo ogle +Ġs n +i ant +Ġv ote +Ġnear ly +Ġinclud ed +Ġrec ogn +z z +m m +al ed +Ġhappen ed +0 4 +Ġh ot +Ġwho se +Ġc ivil +Ġsu ff +o es +it iz +ĠSy ri +Ġresp ond +Ġh on +Ġfeat ures +Ġeconom ic +ĠApr il +r im +Ġtechn ology +Ġo ption +ag ing +Ġpur ch +R e +Ġl at +ch ie +is l +Ġrec omm +u f +Ġtr aining +Ġeffect s +Ġf ast +Ġ201 0 +Ġocc ur +Ġwebs ite +Ġem ail +Ġs ens +e ch +Ġo il +Ġinf lu +Ġcurrent ly +ĠS ch +ĠAd d +Ġgo al +Ġsc ient +Ġcon v +1 00 +em y +Ġdec ided +Ġtra vel +Ġm ention +L L +0 3 +Ġe lection +Ġph one +Ġlook s +Ġsit uation +Ġc y +Ġh or +b ed +ĠCour t +a ily +av es +Ġqu ality +ĠCom p +w ise +Ġt able +Ġst aff +ĠW ind +et t +Ġtri ed +ide red +Ġadd ition +Ġb ox +Ġl ack +ar ily +Ġw ide +Ġm id +Ġbo ard +ys is +Ġant i +h a +Ġd ig +en ing +Ġd ro +C on +6 8 +Ġsl ow +b ased +se qu +Ġp ath +E x +ak er +Ġwork ed +Ġp en +Ġeng ine +Ġlook ed +ĠSu per +ĠS erv +Ġvict im +U n +Ġproper ty +Ġint rodu +Ġexec ut +ĠP M +L e +Ġcol or +ĠM ore +Ġ6 0 +Ġnet work +Ġd ate +c ul +id ge +Ġext ra +3 1 +Ġs le +6 7 +Ġw ond +Ġreport s +j ust +ĠAust ral +Ġcap ital +Ġen s +Ġcomm and +Ġallow ed +Ġpre p +Ġca pt +h ib +Ġnum bers +ch an +Ġf air +m p +om s +Ġre ach +W ith +t ain +Ġbro ad +Ġcou ple +ec ause +ly ing +ĠF eb +Ġsc reen +Ġl ives +Ġpri or +ĠCong ress +A r +Ġappro ach +Ġe mer +ar ies +ĠD is +s erv +ĠN e +Ġbu ilt +c ies +Ġre pe +Ġrul es +for ce +ĠP al +Ġfin ancial +Ġcons idered +ĠCh ar +n ces +ĠI S +Ġb rought +Ġb i +i ers +ĠS im +O P +Ġproduct s +Ġvis it +Ġdoc ument +Ġcon duct +Ġcomplete ly +in ing +ĠCal if +ib ly +Ġwr itten +ĠT V +em ents +Ġd raw +O ne +Ġpub lished +Ġsec ret +r ain +he t +ĠF acebook +ond ay +ĠU p +Ġsex ual +Ġth ous +ĠP at +Ġ ess +Ġstand ard +Ġar m +g es +ect ion +Ġf ell +Ġfore ign +an i +ĠFr iday +Ġreg ular +in ary +Ġincre ased +Ġus ually +Ġdem on +Ġd ark +Ġadd itional +ro l +ĠO f +Ġprodu ction +! ! +und red +Ġintern ational +id ents +ĠF ree +rou p +Ġr ace +Ġm ach +Ġh uge +A ll +le ar +ove mber +Ġto wn +Ġatt ention +ĠO ff +y ond +ĠThe n +f ield +Ġter ror +ra z +ĠB o +Ġmeet ing +ĠP ark +Ġar rest +Ġf ear +Ġa w +ĠV al +or ing +' , +Ġext reme +ar r +Ġwork ers +A fter +Ġ3 1 +n et +am ent +Ġdirect ly +Ġpop ulation +ub e +ĠOct ober +ĠI N +ĠJan uary +5 9 +ĠDav id +Ġc ross +ce mber +ĠF irst +Ġmess age +ir it +Ġn ation +Ġp oll +is ions +Ġansw er +n y +is ode +Ġcar ry +ĠRuss ia +Ġhe ar +eng th +ro y +Ġn atural +in ally +Ġdo g +m itted +Ġtr ade +Ġsub st +Ġmult iple +ĠAf ric +Ġf ans +Ġs ort +Ġgl obal +ic ation +ĠW ed +ar a +Ġa chie +Ġlangu age +ve y +Ġt al +Ġnecess ary +Ġdet ails +Ġs en +ĠS und +ĠRe g +ĠR ec +0 6 +Ġs il +ress ive +Ġmed ical +un ch +orn ia +Ġu nd +f ort +oc ks +ĠM onday +ues day +c raft +7 7 +ur t +Ġ ver +ĠH ill +Ġrece ive +Ġmor ning +es tern +Ġb ank +Ġs at +ir th +ĠH igh +Ġdev ice +ĠTH E +ĠCent er +Ġsaf e +Ġp le +ĠCanad a +Ġsystem s +Ġass ist +Ġsur v +Ġb attle +ĠS oc +vert is +S he +Ġp aper +Ġgrow th +Ġc ast +S c +Ġpl ans +ll ed +Ġpart s +Ġw all +Ġmove ment +Ġpract ice +im ately +Ġdis play +Ġsomet imes +om p +ĠP aul +ĠY es +k ing +5 8 +o ly +Ġs on +Ġav oid +ok es +ĠJ ew +Ġto wards +as c +Ġ // +ĠK ore +Ġtalk ing +Ġcor rect +Ġsp ent +ic ks +i able +e ared +Ġter m +Ġwant s +om ing +Ġ ut +Ġdou b +Ġfor ces +Ġp lease +6 9 +ĠN ovember +at form +ond on +Ġon es +Ġimmedi ately +ĠRuss ian +ĠM et +Ġde g +Ġparent s +C H +ĠAmeric ans +al y +ĠM od +Ġsh own +Ġcond itions +Ġst uff +Ġre b +ĠY our +Ġinclud es +n own +ĠS am +Ġexper ien +m ission +ĠE ven +augh t +Ġannoun ced +ĠRepublic an +Ġdeter min +Ġdescrib ed +ĠCount y +( ) +Ġdo or +Ġchang ed +Ġne igh +ĠH ere +Ġcle an +Ġp an +ĠDe cember +ĠEurope an +ir ing +ap ter +Ġcl ub +ĠT uesday +Ġp aid +ĠN et +Ġattack s +Ġcharact ers +Ġal one +Ġdirect or +d om +Ġ3 5 +Ġl oad +Ġr out +ĠCalif ornia +Ġfin ally +Ġr ac +Ġcont r +Ġexact ly +res h +p ri +ĠIs lam +Ġn ature +Ġcare er +Ġlat est +Ġcon vers +ĠS l +p ose +ci ent +ĠIn c +iv ity +8 8 +ĠA tt +ĠM or +nes day +Ġwe ight +k en +Ġnot e +Ġteam s +Ġ \ +air s +ĠG reen +Ġh undred +on ent +Ġstre ng +Ġcons ist +ic ated +Ġreg ul +Ġl ic +ast ic +Ġt en +urs day +ellig ence +ous ly +ĠU K +B I +Ġcost s +Ġind epend +ĠA P +Ġnorm al +Ġh om +Ġob vious +Ġs we +Ġst ar +Ġread y +ac her +Ġimp lement +g est +Ġs ong +ĠG et +ĠL ab +Ġinterest ing +us ing +Ġg iving +ĠSund ay +Ġet c +Ġm iddle +Ġrem ember +r ight +os ition +ut ions +Ġm ax +4 6 +Ġyour self +Ġdem and +Ġtreat ment +Ġd anger +ĠC ons +Ġgu y +ĠBrit ish +Ġphys ical +Ġrel ated +Ġrem ain +Ġcould n +Ġref er +Ġc itiz +b ox +EN T +bo ard +Ġin n +I G +er o +ĠSt reet +osp ital +ren ch +cher s +Ġst ra +O L +ag er +ĠA N +Ġeas ily +I A +en ge +in y +Ġcl os +ock ed +Ġus es +ĠC oun +I m +u ild +? ? +m ore +Ġan g +Ġwr ite +ol ute +5 7 +Ġlead er +Ġread ing +< / +Ġaut om +est s +4 3 +Ġleg isl +ĠG old +Ġdesign ed +ĠS T +ĠLe g +a res +Ġbe aut +ĠT ex +Ġappear s +Ġstru gg +ĠR om +Ġ 00 +Ġcho ice +Ġparticular ly +ĠF rom +op er +ĠL ondon +ann ed +Ġallow s +ob ile +Ġdiffere nce +âĢ ¢ +ĠV iew +ĠWed nesday +Ġal though +Ġrel ative +Ġapplic ation +ate ver +Ġare n +Ġmy self +Ġim ag +Ġdis e +Ġsoc iety +Ġfre qu +ĠEng lish +Ġpo or +ĠD ay +Ġwrit ing +Ġse ven +Ġstart ing +Ġb ud +Ġpr int +ĠTr ans +uf act +ĠSt ud +n ew +Ġcr im +Ġg ives +Ġco ol +a e +i ance +ĠGener al +Ġthink ing +Ġsa ve +Ġlim ited +ĠPart y +Ġmean ing +p en +ow ers +ĠJ ack +E M +Ġn ice +ru pt +Ġg as +Ġe ight +Ġfe et +Ġeff ort +Ġ ign +ic it +B l +co in +Ġop in +Ġbr ain +Wh ile +he st +ĠTh ursday +Ġwould n +augh ter +Ġtou ch +le ments +Ġstud ies +Ġcent er +c ont +or ge +Ġcomput er +Ġinvestig ation +P l +or ks +Ġ200 8 +Ġincre asing +Ġst ore +Ġcom ments +Ġb al +m en +Ġdo ll +Ġl iber +Ġw ife +Ġlaw s +atur day +it ness +Ġmod ern +ĠS k +Ġadminist ration +Ġopportun ity +Ġs al +Ġpower ful +M y +Ġclaim s +ĠEar th +ord s +Ġt itle +Ġes c +n ame +N ot +om en +Ġbe yond +Ġc amer +Ġse ll +it ute +ear ch +Ġapp l +im ent +4 2 +ĠAr t +Ġun f +Ġviol ence +ur g +ĠE ast +Ġcomp ared +Ġopt ions +Ġthrough out +Ġv s +ig r +. [ +ac hes +7 8 +Ġfil es +F L +E L +ar ian +ĠJ ames +ĠA ir +an ch +Ġdet ail +Ġpie ce +P S +Ġn amed +Ġeduc ation +Ġdri ve +Ġitem s +Ġstud ent +ic ed +: : +ic o +Ġth row +Ġsc ene +Ġcomple x +Ġ200 9 +Ġpre c +ĠB re +7 9 +Ġcon cept +Ġstat us +am ing +Ġd ied +Ġknow ledge +Ġbegin ning +O D +ru ary +Ġcertain ly +Ġgu ys +Ġsl ight +in n +ound s +Ġf ine +Ġf at +ic ations +Ġper haps +ĠA nt +Ġinc ome +Ġhtt ps +Ġmajor ity +port s +st on +Ġgreat er +Ġfe ed +ent ially +Ġsaf ety +Ġun ique +and om +Ġg one +Ġshow ed +Ġhist or +Ġcoun ter +i us +id a +Ġlead ing +i pe +Ġs end +ĠDon ald +er ve +Ġdef ense +ines e +Ġy es +ĠF ire +ĠMus lim +ra q +Ġcontin ued +os h +Ġprov ides +Ġpr ison +ĠP re +Ġhapp y +Ġeconom y +Ġtr ust +ag s +ĠG ame +Ġweap ons +um an +ĠC le +it ation +Ġanal ysis +ĠT imes +Ġsc ience +- > +Ġfig ure +Ġdis app +ent y +Ġsoft ware +Ġu lt +Ġoffic ers +N ew +I s +Ġrem ains +ĠInd ia +Ġp sych +ri ef +Ġc at +es c +Ġob serv +Ġst age +ĠD ark +Ġent er +ch ange +Ġpass ed +Ġdes pite +ĠO ut +Ġmov ie +r s +Ġv oice +m ine +ĠPl ay +Ġto ward +ĠT er +Ġreg ion +Ġval ues +or ters +Ġm ount +Ġoffic er +ĠO ther +b an +Ġh ous +w ood +ro om +I V +ĠS un +se e +ĠO ver +ro g +9 0 +Ġl ay +ĠT ur +a wn +Ġpress ure +ĠS ub +Ġbook s +ed om +ĠS and +A A +ag o +Ġre asons +f ord +Ġactiv ity +U T +N ow +ĠSen ate +ce ll +n ight +Ġcall s +in ter +Ġlet ter +ĠR ob +ĠJ e +Ġcho ose +ĠL aw +G et +B e +Ġro b +Ġtyp es +Ġpl atform +Ġqu arter +R A +ĠT ime +Ġmay be +ĠC r +9 5 +p re +Ġmov ing +Ġl if +Ġgo ld +Ġs om +Ġpat ients +Ġtr uth +ĠK e +ur ance +ant ly +m ar +Ġchar ge +ĠG reat +Ġce le +---------------- ---------------- +Ġro ck +ro id +an cy +Ġcred it +a ud +B y +ĠE very +Ġmov ed +ing er +rib ution +Ġn ames +Ġstra ight +ĠHe alth +ĠW ell +Ġfe ature +Ġr ule +Ġsc he +in ated +ĠMich ael +ber g +4 1 +il ed +b and +Ġcl ick +ĠAng el +on ents +Â Ń +ĠI raq +ĠS aturday +Ġa ware +p art +Ġpat tern +O W +ĠL et +Ġgr ad +ign ed +Ġassoci ated +Ġst yle +n o +i ation +a ith +il ies +Ġst ories +ur ation +Ġindividual s +ĠâĢ ¦ +m iss +ĠAss oci +ish ing +ab y +Ġsum mer +ĠB en +Ġ3 2 +Ġar ch +ut y +ĠTex as +h ol +Ġfull y +Ġm ill +Ġfollow ed +ĠB ill +ĠInd ian +ĠSec ret +ĠB el +ĠFeb ruary +Ġjob s +Ġseem ed +ĠGo vern +i pped +Ġreal ity +Ġl ines +Ġp ark +Ġmeas ure +ĠO ur +I M +Ġbro ther +Ġgrow ing +Ġb an +Ġest im +Ġc ry +ĠS chool +Ġme chan +ĠO F +ĠWind ows +Ġr ates +ĠO h +Ġpos itive +Ġcult ure +ist ics +ic a +Ġh ar +y a +ite ly +i pp +Ġm ap +en cies +ĠWill iam +I I +ak ers +5 6 +ĠM art +ĠR em +Ġal tern +it ude +Ġco ach +row d +D on +Ġk ids +Ġj ournal +Ġcor por +Ġf alse +Ġwe b +Ġsle ep +Ġcont ain +Ġst o +Ġb ed +iver se +ĠR ich +ĠCh inese +Ġp un +Ġme ant +k nown +Ġnot ice +Ġfavor ite +a ven +Ġcond ition +Ġpur pose +) ) +Ġorgan ization +Ġchall eng +Ġman ufact +Ġsus p +ĠA c +Ġcrit ic +un es +uc lear +Ġm er +vent ion +Ġ8 0 +Ġm ist +ĠU s +ĠT or +htt p +ol f +Ġlarg er +Ġadv ant +Ġrese ar +Ġact ions +m l +Ġke pt +Ġa im +, ' +c ol +Ġbenef its +if ying +Ġact ual +ĠIntern ational +Ġveh icle +Ġch ief +Ġeff orts +ĠLe ague +ĠM ost +Ġwa it +Ġad ult +Ġover all +Ġspe ech +Ġhigh ly +Ġfem ale +Ġer ror +Ġeffect ive +5 4 +Ġenc our +w ell +Ġfail ed +Ġcons erv +Ġprogram s +Ġt rou +Ġa head +5 00 +vertis ement +I P +ĠF ound +p ir +Ġ % +Ġcr ime +and er +Ġloc ation +ĠI ran +Ġbehav ior +az ing +Ġr are +Ġem b +Ġca used +Ġsh ip +Ġact ive +Ġcont ribut +Ġg reen +Ġac qu +Ġref lect +ven ue +Ġf irm +Ġb irth +] . +Ġclear ly +Ġem ot +Ġag ency +ri age +Ġmem ory +9 8 +S A +ĠSe e +ac ing +C C +Ġbig gest +Ġr ap +Ġbas ic +Ġb and +e at +Ġsus pect +ĠM ac +Ġ9 0 +m ark +ist an +Ġsp read +am s +k i +as y +ra v +ĠR ober +Ġdemon str +r ated +Ġabs olute +Ġpl aces +Ġim pl +ibr ary +Ġc ards +Ġdest roy +Ġv irt +ve re +Ġapp eared +y an +p oint +Ġbe g +Ġtem per +s pe +ant ed +ear s +ĠD irect +Ġl ength +Ġbl og +am b +Ġint eg +Ġres ources +ac c +if ul +Ġsp ot +Ġfor ced +Ġthous ands +ĠMin ister +Ġqu al +ĠF rench +at ically +Ġgener ally +Ġdr ink +Ġth us +I L +od es +Ġappro pri +ĠRe ad +Ġwh om +Ġey e +Ġcol lege +Ġ4 5 +ire ction +Ġens ure +Ġapp arent +id ers +Ġrelig ious +Ġmin or +ol ic +Ġt ro +ĠWh y +rib ute +m et +Ġprim ary +Ġdevelop ed +Ġpe ace +Ġsk in +st e +av a +Ġbl ue +Ġfam ilies +Ġ ir +Ġapp ly +Ġin form +ĠSm ith +C T +i i +Ġlim it +Ġres ist +........ ........ +um n +Ġconf lic +Ġtw e +ud d +ĠT om +Ġl iter +qu e +b on +Ġha ir +Ġevent ually +Ġp us +Ġhelp ed +Ġag g +or ney +ĠApp le +Ġf it +ĠS ur +Ġpre m +Ġs ales +Ġsecond s +Ġstreng th +Ġfeel ing +¿ ½ +Ġt our +Ġknow s +o om +Ġex erc +Ġsom ew +ï ¿½ +> > +Ġsp okes +Ġide as +Ġreg ist +so ft +ĠD el +ĠP C +Ġpro pos +Ġlaun ch +Ġbott om +T H +ĠP lease +v est +it z +ĠIn ter +Ġsc ript +Ġr at +ar ning +Ġ il +ĠJ er +ĠA re +Ġwh atever +ok en +ci ence +Ġmod e +Ġag ree +Ġs ources +Ġinit ial +Ġrest rict +Ġwond er +us ion +## ## +ĠS il +vil le +Ġb urn +t w +as ion +Ġ £ +Ġn or +u ing +Ġre ached +Ġs un +Ġc ateg +ig ration +Ġc ook +Ġprom ot +Ġm ale +Ġcl imate +Ġf ix +Ġalleg ed +U R +all ed +Ġim ages +C ont +ot a +Ġschool s +i os +Ġd rop +Ġst ream +ĠM o +Ġprevious ly +al ing +Ġp et +Ġdou ble +Ġ( @ +ann el +Ġdef ault +t ies +Ġr ank +ĠD ec +ĠCoun cil +Ġweap on +Ġst ock +Ġanal y +ĠSt r +Ġpict ure +ĠPol ice +f erence +Ġcent ury +Ġcitiz ens +Ġon to +Ġexp and +Ġhe ro +ĠS ol +Ġw ild +Ġupd ate +Ġcustom ers +r ont +d ef +Ġl ik +Ġcrim inal +ĠChrist ian +S P +7 6 +Ġle aving +Ġother wise +ĠD ist +Ġbas is +5 2 +5 3 +ic ip +ĠB er +Ġrecomm end +Ġfl oor +Ġc rowd +ol es +Ġ7 0 +Ġcent ral +ĠE v +Ġd ream +Ġdown load +Ġconf ir +ĠTh om +Ġwind ow +Ġhapp ens +Ġun it +Ġt end +Ġs pl +Ġbec omes +Ġfight ing +Ġpred ict +ĠP ress +ĠP ower +Ġhe avy +ak ed +Ġf an +or ter +ate gy +B A +iz es +Ġsp end +H ere +Ġ200 7 +Ġad op +ĠH am +Ġfoot ball +ĠP ort +od ay +5 1 +amp ions +Ġtrans fer +h t +Ġ3 8 +ter m +ac ity +Ġb ur +] , +tern al +r ig +b ut +Ġthere fore +ĠB ecause +res p +re y +Ġm ission +S ome +Ġnot ed +Ġass um +Ġdise ase +Ġed it +Ġprog ress +r d +ĠB rown +oc al +Ġadd ing +Ġra ised +ĠAn y +Ġt ick +Ġsee ing +ĠPe ople +Ġagre ement +Ġser ver +Ġw at +Ġdeb ate +Ġsupp osed +il ing +Ġlarg est +Ġsuccess ful +ĠP ri +ĠDemocr atic +Ġj ump +ĠSyri a +Ġown ers +Ġoff ers +Ġshoot ing +Ġeff ic +se y +Ġha ven +ver se +te red +ĠL ight +im al +ĠB ig +Ġdef end +Ġbe at +Ġrecord s +% ) +Ġsc en +Ġemploy ees +Ġdev ices +he m +Ġcom mer +ĠM ex +Ġbenef it +ĠPro f +Ġil leg +Ġsur face +ĠAl so +Ġh arm +ing ly +w ide +ĠA lex +Ġsh ut +ĠC ur +Ġl ose +p m +Ġchall enge +se mb +Ġst ation +Ġint elligence +Ġacc ur +ĠFl or +Ġrequ ires +ĠM al +b um +Ġh ospital +Ġsp irit +Ġoff ered +Ġprodu ce +ĠComm un +Ġcreat ing +Ġcr is +s pect +Ġend ed +Ġd aily +Ġvot ers +land s +i as +i h +on a +Ġsm art +ĠOff ice +ĠL ord +ri al +ĠIntern et +Ġcirc um +Ġextreme ly +' . +Ġopin ion +ĠM il +Ġg ain +B S +ĠF in +y p +Ġuse ful +Ġbud get +Ġcom fort +is f +Ġback ground +el ine +Ġep isode +Ġen emy +Ġtri al +Ġestab lish +d ate +ĠC ap +Ġcontin ues +Ġshow ing +ĠUn ion +w ith +Ġpost ed +ĠSy stem +Ġe at +ri an +Ġr ise +ĠGerman y +il s +Ġsign ed +Ġv ill +Ġgr and +m or +ĠEng land +Ġproject s +um ber +Ġconf erence +z a +Ġrespons ible +ĠAr ab +Ġlearn ed +âĢĶ âĢĶ +i pping +ĠGe orge +O C +Ġreturn ed +ĠAustral ia +Ġb rief +Q u +Ġbr and +ill ing +ab led +Ġhig hest +Ġtr ain +ĠComm ission +wh ile +Ġn om +cept ion +Ġm ut +ĠBl ue +Ġinc ident +v ant +8 6 +ĠI D +Ġn uclear +7 4 +ĠL ike +ĠR E +ĠM icro +l i +m ail +Ġcharg es +8 9 +Ġad just +ad o +Ġear th +N A +Ġpr ices +P A +Ġd raft +Ġrun s +Ġcandid ate +ens es +Ġmanag ement +ĠPh il +ĠM iss +Ġte ach +g ram +Ġunderstand ing +a it +ic ago +A dd +ĠE p +sec ut +Ġsepar ate +Ġinst ance +Ġe th +Ġun less +**** **** +ĠF ore +in ate +Ġoper ations +S p +Ġf aith +g ar +ĠCh urch +ron ic +Ġconf ig +os ure +Ġactiv ities +Ġtrad itional +Ġ3 6 +Ġd irection +Ġmach ine +Ġsur round +Ġp ush +un ction +ĠE U +Ġeas ier +Ġarg ument +G B +Ġm icro +Ġsp ending +iz ations +Ġthe ory +ad ow +Ġcall ing +ĠL ast +Ġd er +Ġinflu ence +Ġcomm it +Ġph oto +Ġun c +ist ry +g n +ast e +ack s +Ġdis p +ad y +d o +ĠG ood +Ġ ` +Ġw ish +Ġreve aled +Âł Âł +l ig +Ġen force +ĠComm ittee +Ġche m +Ġmil es +Ġinterest ed +Ġsol ution +ic y +in ct +Ġ- > +ĠD et +Ġrem oved +Ġcomp ar +e ah +Ġpl ant +ĠS ince +Ġachie ve +Ġadvant age +Ġslight ly +b ing +Ġpl aced +u nder +201 5 +ĠM ad +Ġt im +os es +Ġc ru +ĠR ock +Ġmost ly +Ġneg ative +Ġset ting +Ġprodu ced +Ġm ur +Ġconnect ion +ĠM er +Ġdri ver +Ġexecut ive +Ġass ault +Ġb orn +ĠV er +t ained +Ġstruct ure +Ġredu ce +Ġdec ades +Ġd ed +u ke +ĠM any +idd en +Ġle ague +S e +Ġjo in +Ġdis co +Ġd ie +c ks +act ions +Ġass ess +ag n +Ġgo als +our s +I R +Ġsen ior +ill er +m od +ip ment +oc ol +u y +ĠQ ue +Ġpart ies +ir gin +Ġle arning +it able +Ġstre et +Ġcamer a +A pp +Ġsk ills +b re +c ious +Ġcele br +ĠFr anc +Ġexist ing +Ġwill ing +l or +Ġ id +ĠSp ace +Ġcrit ical +ĠL a +ortun ately +Ġser ve +Ġc old +Ġspec ies +T S +Ġanim als +ĠB ay +Ġold er +ĠU nder +est ic +ĠT re +Ġte acher +Ġpre fer +v is +Ġth read +ĠM att +Ġmanag er +ãĥ » +Ġprofess ional +ĠV ol +Ġnot es +The se +ul a +Ġf resh +ent ed +u zz +ed y +clus ion +ĠR el +Ġdoub t +E O +Ġopen ed +ĠB it +Ad vertisement +Ġgu ess +ĠU N +Ġse qu +Ġexpl ain +ott en +Ġatt ract +ak s +Ġstr ing +Ġcont ext +oss ible +ĠRepublic ans +Ġsol id +Ġc ities +Ġask ing +Ġr andom +u ps +ur ies +ar ant +dd en +g l +ĠFlor ida +Ġdep end +ĠSc ott +Ġ3 3 +Ġi T +ic on +Ġmention ed +Ġ2 000 +Ġclaim ed +Ġdefin itely +ul f +Ġc ore +Ġopen ing +ĠCon st +wh ich +ĠT ra +A G +7 2 +Ġbelie ved +ad a +Ġ4 8 +ĠSec urity +yr ight +ĠP et +ĠL ou +Ġhold ing +======== ======== +Ġ ice +Ġb row +Ġauthor ities +h ost +w ord +Ġsc ore +ĠD iv +Ġcell s +Ġtrans l +Ġneigh bor +Ġrem ove +u ct +Ġdist rict +ĠA ccording +Ġwor se +Ġconcern s +Ġpresident ial +Ġpolic ies +ĠH all +7 3 +Ġh us +A Y +Ġ200 6 +ĠJ ud +Ġindepend ent +ĠJust ice +ili ar +pr int +igh ter +Ġprotect ion +z en +Ġsu dden +h ouse +ĠJ es +P R +ĠIn f +Ġb ul +Ġ _ +ĠServ ice +ĠP R +Ġstr ategy +ff ect +Ġgirl s +Ġmiss ing +oy al +ĠTe am +ul ated +Ġd at +Ġpolit ics +ab or +A ccording +Ġspe ll +Ġg raph +ort hern +T C +A b +Ġlab or +is her +Ġk ick +ĠiT unes +Ġstep s +pos es +Ġsmall er +E n +ber t +Ġro ll +Ġresear chers +Ġcl osed +Ġtrans port +Ġlaw y +________ ________ +ĠCh icago +Ġas pect +Ġn one +Ġmar riage +9 6 +Ġe lements +ĠF re +ĠS al +Ġd ram +F C +t op +e qu +Ġhe aring +Ġsupport ed +Ġtest ing +co hol +Ġmass ive +Ġst ick +Ġgu ard +is co +ph one +F rom +How ever +Ġb order +Ġcop y +ograph y +l ist +7 1 +Ġown er +cl ass +ru it +r ate +ĠO nce +Ġdig ital +Ġt ask +ER S +Ġinc red +t es ++ + +ĠFr ance +Ġb reat +ow l +Ġiss ued +ĠW estern +Ġdet ect +Ġpart ners +Ġsh ared +ĠC all +Ġcan cer +ac he +rib e +Ġexpl ained +Ġhe at +{ " +Ġinvest ment +ĠB ook +Ġw ood +Ġtool s +ĠAl though +Ġbelie f +Ġcris is +Ġg e +ĠM P +Ġoper ation +ty pe +~ ~ +g a +Ġcont ains +ant a +Ġexp ress +ĠG roup +ĠJ ournal +k a +Ġam b +ĠUS A +Ġfind ing +Ġfund ing +h ow +Ġestab lished +ide os +Ġdeg ree +Ġdanger ous +ang ing +Ġfre edom +pp ort +out hern +Ġch urch +Ġc atch +ĠTw o +Ġpres ence +ĠGu ard +U p +Ġauthor ity +ĠPro ject +Ġbut ton +Ġcon sequ +Ġval id +Ġwe ak +Ġstart s +Ġref erence +ĠM em +" ) +U N +or age +ĠO pen +Ġcol lection +y m +g ency +Ġbeaut iful +ro s +Ġtell s +Ġwa iting +n el +Ġprov iding +ĠDemocr ats +Ġd aughter +Ġm aster +Ġpur poses +ĠJapan ese +Ġequ al +Ġturn s +Ġdoc uments +Ġwatch ing +R es +Ġr an +201 4 +Ġre ject +ĠKore a +Ġvictim s +Le vel +ere nces +Ġw itness +Ġ3 4 +Ġre form +com ing +Ġocc up +Ġc aught +Ġtra ffic +ad ing +Ġmod els +ar io +Ġserv ed +Ġb atter +u ate +ĠSecret ary +Ġagre ed +Ġtr uly +yn am +ĠR et +Ġun its +ĠRes earch +h and +az ine +ĠM ike +Ġvar iety +ot al +Ġam azing +Ġconfir med +Ġentire ly +Ġpurch ase +Ġe lement +Ġc ash +Ġdeter mine +D e +Ġc ars +ĠW all +â ĸ +Ġview s +Ġdrug s +Ġdep artment +ĠSt ep +u it +Ġ3 9 +as ure +ĠCl ass +Ġc overed +ĠB ank +Ġme re +u ana +Ġmult i +Ġm ix +Ġun like +lev ision +Ġsto pped +Ġs em +ĠG al +ul es +Ġwe l +ĠJohn son +l a +Ġsk ill +Ġbec oming +ri e +Ġappropri ate +f e +ell ow +ĠPro t +ul ate +oc ation +Ġweek end +od ies +Ġsit es +Ġanim al +ĠT im +Ġsc ale +Ġcharg ed +Ġinst ruct +ill a +Ġmethod s +Ġc ert +Ġjud ge +ĠH el +Ġdoll ars +Ġstand ing +ĠS qu +Ġdeb t +l iam +Ġdri ving +ĠS um +ĠEd ition +Ġal bum +and on +I F +ĠU k +6 3 +ad er +Ġcommer cial +es h +ĠGovern ment +Ġdisc overed +Ġout put +ĠHill ary +ĠCar ol +Ġ200 5 +Ġab use +anc ing +Ġsw itch +Ġann ual +T w +Ġst ated +ag ement +in ner +Ġdem ocr +Ġres idents +Ġallow ing +Ġfact ors +od d +Ġf uck +em ies +Ġoccur red +ot i +Ġn orth +ĠP ublic +Ġinj ury +Ġins urance +C L +oll y +ã Ģ +Ġrepe ated +Ġar ms +ang ed +Ġconst ruction +Ġf le +P U +ic ians +Ġfor ms +ĠMc C +ant ic +Ġm ental +p ire +Ġequ ipment +Ġf ant +Ġdiscuss ion +Ġregard ing +k in +ar p +Ġch air +og ue +Ġpro ceed +ĠI d +O ur +Ġmur der +M an +Ġ4 9 +as p +Ġsupp ly +Ġin put +Ġwe alth +liam ent +Ġpro ced +or ial +ĠSt at +ĠN FL +hen s +ĠInst itute +Ġput ting +ourn ament +et ic +Ġloc ated +Ġk id +er ia +r un +Ġpr inc +Ġ ! +go ing +ĠB et +Ġcl ot +Ġtell ing +Ġprop osed +i ot +or ry +Ġfund s +g ment +ĠL ife +Ġb aby +ĠB ack +Ġsp oke +Im age +Ġear n +ĠA T +g u +Ġex change +ĠL in +ov ing +Ġp air +M ore +az on +Ġarrest ed +Ġkill ing +c an +ĠC ard +y d +Ġident ified +Ġm obile +Ġthan ks +ony m +ĠF orm +Ġhundred s +ĠCh ris +ĠC at +Ġtre nd +h at +ĠA v +om an +Ġelect ric +ĠW il +S E +O f +Ġrest aur +ot ed +Ġtr ig +Ġn ine +Ġb omb +Wh y + ¯ +Ġco verage +Ġapp eal +ĠRober t +ĠS up +Ġfin ished +Ġfl ow +Ġdel iver +Ġcal cul +Ġphot os +Ġph il +Ġpie ces +Ġapp re +k es +Ġr ough +D o +Ġpart ner +Ġconcern ed +Ġ3 7 +ĠG en +C ol +ct ors +Ġ= > +st ate +Ġsuggest ed +ĠFor ce +C E +Ġher self +ĠPl an +w orks +o oth +ren cy +Ġcor ner +Ġhus band +Ġintern et +ĠA ut +em s +os en +ĠAt l +g en +Ġbal ance +6 2 +Ġsound s +te xt +Ġar r +ov es +Ġmill ions +Ġrad io +Ġsat isf +ĠD am +M r +G o +S pe +Ġcomb at +r ant +ĠG ree +Ġf uel +Ġdist ance +Ġtest s +Ġdec re +ĠE r +Ġman aged +D S +Ġt it +Ġmeas ures +ĠL iber +Ġatt end +as hed +ĠJ ose +ĠN ight +d it +ĠN ov +ĠE nd +out s +Ġgener ation +Ġadv oc +y th +Ġconvers ation +ĠS ky +act ive +ce l +ri er +ĠFr ank +Ġg ender +Ġcon cent +Ġcar ried +and a +ĠV irgin +Ġarri ved +ic ide +ad ed +Ġfail ure +Ġmin imum +le ts +Ġwor st +Ġkeep ing +Ġint ended +Ġilleg al +Ġsub sc +Ġdetermin ed +Ġtri p +Y es +Ġra ise +Ġ ~ +Ġfeel s +Ġpack age +ĠJ o +h i +201 6 +re al +Ġf ra +Ġsy mb +M e +uck y +p ret +ĠK h +ĠEd it +ĠWe b +em ic +ĠCol or +Ġjust ice +I nt +Ġfar m +ck now +" > +el ess +Ġredu ced +Ġ5 00 +x x +ĠR ad +ĠW ood +Ġcl in +Ġhy p +il er +ur a +k ins +8 5 +6 1 +ĠThe ir +ĠM ary +Ġs an +Ġno vel +ĠWh o +Ġcap acity +Ġimp ossible +Ġpl ays +Ġmin ister +ij uana +ic ate +ĠS et +Ġf ram +Ġ ing +Ġcommun ities +ĠF BI +it a +Ġb on +Ġstr ateg +Ġinterest s +l ock +g ers +m as +ĠAN D +Ġconflic t +Ġrequire ments +Ġs ac +Ġoper ating +in i +rel ated +Ġcomm itted +Ġrelative ly +Ġs outh +¯ ¯ +Ġaff ord +Ġident ity +Ġdec isions +Ġacc used +pl ace +Ġvict ory +o ch +i at +N ame +C om +t ion +ed s +Ġsee k +Ġt ight +ĠIm ages +Ġinit i +Ġhum ans +Ġfam iliar +Ġaud ience +Ġintern al +vent ure +Ġs ides +ĠT O +Ġd im +Ġcon clud +Ġapp oint +Ġenforce ment +ĠJ im +ĠAssoci ation +Ġcircum st +ĠCanad ian +Ġjo ined +Ġdiffere nces +ĠL os +Ġprot est +Ġtw ice +w in +Ġgl ass +ars h +ĠAr my +Ġexp ression +Ġdec ide +Ġplan ning +an ia +Ġhand le +ĠMicro soft +ĠN or +Ġmax imum +ĠRe v +Ġse a +Ġev al +Ġhel ps +re f +Ġb ound +Ġm outh +Ġstand ards +Ġcl im +ĠC amp +ĠF ox +cl es +Ġar my +ĠTe chn +ack ing +x y +S S +Ġ4 2 +Ġbu g +ĠUk rain +ĠM ax +ĠJ ones +ĠSh ow +l o +Ġplan et +Ġ7 5 +Ġwin ning +Ġf aster +Ġspe ct +Ġbro ken +T R +Ġdef ined +Ġhealth y +Ġcompet ition +htt ps +ĠIs land +ĠF e +Ġannoun ce +ĠC up +ĠInst ead +Ġcl ient +Ġposs ibly +se ction +ock et +l ook +Ġfin ish +Ġcre w +Ġres erv +Ġed itor +Ġh ate +Ġs ale +Ġcontro vers +Ġp ages +w ing +Ġnum er +Ġopp osition +Ġ200 4 +Ġref uge +Ġfl ight +Ġap art +ĠL at +A meric +ĠAfric a +Ġapplic ations +ĠPal est +ĠB ur +Ġg ar +ĠSoc ial +Ġup gr +Ġsh ape +Ġspe aking +ans ion +a o +ĠS n +Ġwor ry +ĠBrit ain +P lease +rou d +Ġh un +Ġintrodu ced +Ġd iet +I nd +ĠSec ond +Ġfun ctions +ut s +ĠE ach +ĠJe ff +Ġst ress +Ġaccount s +Ġgu arant +ĠAn n +ed ia +Ġhon est +Ġt ree +ĠAfric an +ĠB ush +} , +Ġs ch +ĠOn ly +Ġf if +ig an +Ġexerc ise +ĠEx p +Ġscient ists +Ġlegisl ation +ĠW ork +ĠS pr +à Ĥ +ĠH uman +Ġ è +Ġsur vey +Ġr ich +ri p +Ġmain tain +Ġfl o +Ġleaders hip +st ream +ĠIslam ic +Ġ 01 +ĠCol lege +Ġmag ic +ĠPr ime +Ġfig ures +201 7 +ind er +x ual +ĠDe ad +Ġabsolute ly +Ġfour th +Ġpresent ed +resp ond +rib le +Ġal cohol +at o +ĠD E +por ary +Ġgr ab +Ġvar i +Ġqu ant +ĠPh oto +Ġpl us +r ick +ar ks +Ġaltern ative +Ġp il +Ġappro x +th at +Ġobject s +ĠR o +ĠAnd roid +Ġsignificant ly +ĠR oad +k ay +R ead +av or +Ġa cknow +ĠH D +ĠS ing +O r +ĠM ont +Ġun s +pro f +Ġneg oti +ĠAr ch +ik i +Ġte levision +ĠJew ish +Ġcomm ittee +Ġmot or +Ġappear ance +Ġs itting +Ġstri ke +ĠD own +com p +ĠH ist +Ġf old +ac ement +ĠLou is +Ġbel ong +ĠâĢ ¢ +Ġm ort +Ġprep ared +Ġ6 4 +ĠM aster +Ġind eed +ĠD en +Ġre nt +T A +our ney +ar c +S u +9 7 +Ġadv ice +Ġchang ing +Ġlist ed +Ġlaun ched +is ation +ĠP eter +is hes +Ġl ived +ĠM el +ĠSup reme +ĠF ederal +Ġ) ; +ruct ure +Ġset s +Ġphil os +u ous +Ġ ł +Ġappl ied +ĠN OT +Ġhous ing +ĠM ount +Ġo dd +Ġsu st +D A +ffic ient +Ġ ? +ol ved +Ġp owers +Ġth r +Ġrem aining +ĠW ater +L C +Ġca uses +ãģ ® +Ġman ner +ad s +Ġsuggest s +Ġend s +stand ing +f ig +ĠD un +id th +Ġg ay +Ġter min +ĠAngel es +M S +Ġscient ific +Ġco al +ap ers +b ar +ĠThom as +Ġsy m +ĠR un +th is +P C +igr ants +Ġmin ute +ĠDist rict +cell ent +Ġle aves +Ġcomple ted +am in +Ġfoc used +Ġmon itor +Ġveh icles +M A +ĠM ass +ĠGr and +Ġaffect ed +itution al +Ġconst ruct +Ġfollow s +Ġt on +re ens +Ġh omes +ĠE xt +ĠLe vel +r ast +ĠI r +Ġel im +Ġlarge ly +ĠJ oe +Ġvot es +all s +Ġbusiness es +ĠFound ation +ĠCent ral +Ġy ards +Ġmaterial s +ul ner +Ġgu ide +Ġclos er +um s +Ġsp orts +ed er +J ust +Ġtax es +8 4 +ĠO ld +Ġdec ade +ol a +Ġv ir +Ġdro pped +Ġdel ay +it ect +Ġsec ure +ste in +le vel +Ġtre ated +Ġfil ed +ain e +Ġv an +Ġm ir +Ġcol umn +ict ed +e per +Ġro t +Ġcons ult +Ġent ry +Ġmar ijuana +ĠD ou +Ġapparent ly +ok ing +clus ive +Ġincre ases +an o +Ġspecific ally +Ġte le +ens ions +Ġrelig ion +ab ilities +Ġfr ame +ĠN ote +ĠLe e +Ġhelp ing +Ġed ge +ost on +Ġorgan izations +à ĥ +ĠB oth +hip s +Ġbig ger +Ġbo ost +ĠSt and +Ġro w +ul s +ab ase +Ġr id +L et +are n +ra ve +Ġst ret +P D +Ġv ision +Ġwe aring +Ġappre ci +Ġa ward +ĠU se +Ġfact or +w ar +ul ations +) ( +Ġg od +Ġter rit +Ġpar am +ast s +8 7 +Ġen emies +ĠG ames +F F +Ġacc ident +W ell +ĠMart in +T ER +Ġat h +ĠHe ll +Ġfor g +Ġve ter +ĠMed ic +f ree +Ġst ars +Ġexp ensive +Ġac ad +ra wn +ĠW he +Ġl ock +Ġform at +Ġsold iers +s m +Ġag ent +Ġrespons ibility +or a +ĠS cience +Ġrap id +Ġt ough +ĠJes us +Ġbelie ves +M L +Ġwe ar +le te +Ãĥ ÃĤ +ĠD ri +Ġcomm ission +ĠB ob +O h +ap ed +Ġwar m +ÃĥÃĤ ÃĥÃĤ +Ġ200 3 +ort ion +Ġhas n +ust er +Ġun ivers +ĠI ll +Ġk ing +olog ies +9 4 +ĠT em +ĠM os +Ġpat ient +ĠMex ico +ce an +ĠDe ath +ĠSand ers +y ou +ĠC ast +ĠComp any +pt y +Ġhappen ing +F P +ĠB attle +Ġb ought +A m +M od +U s +ut ers +ĠC re +ĠTh ose +Ġ4 4 +is er +Ġs oul +ĠT op +ĠHar ry +ĠA w +Ġse at +ff ee +Ġrev olution +Ġ( " +ĠD uring +et te +Ġr ing +Ġoff ensive +Ġreturn s +Ġv ideos +Ġdis cl +Ġfam ous +en ced +ĠS ign +ĠR iver +Ġ3 00 +P M +ĠB us +ĠC H +Ġcandid ates +ard en +Ġpercent age +Ġvis ual +Ġthan k +Ġtrou ble +ner gy +Ġ200 1 +Ġpro ve +ash ion +Ġen h +ĠL ong +U M +Ġconnect ed +Ġposs ibility +O ver +Ġexper t +Ġl ibrary +art s +ĠDirect or +Ġfell ow +9 2 +ir ty +Ġd ry +Ġsign s +ĠL ove +Ġqu iet +f oot +Ġp ure +ĠH un +Ġf illed +ph as +ĠE lect +end ment +ĠEx pl +Ġun able +n s +m o +Ġv ast +ob e +Ġident ify +app ing +ĠCarol ina +g ress +Ġpro te +Ġf ish +Ġcircumst ances +raz y +ĠPh ot +Ġb odies +ĠM ur +Ġdevelop ing +ĠA R +Ġexperien ced +Ġsubst ant +ĠBo ard +es ome +Ġdom estic +Ġcomb ined +ĠP ut +Ġchem ical +ĠCh ild +Ġpo ol +ĠC y +Ġe gg +c ons +st ers +Ġh urt +Ġmark ets +Ġconserv ative +Ġsupp orters +Ġag encies +id el +O b +ur b +Ġ4 3 +ĠDef ense +y e +ĠA p +du le +Ġtemper ature +Ġconduct ed +ĠCh ief +Ġpull ed +Ġf ol +L ast +ont o +os is +V ER +D es +ĠP an +F irst +Ġadv ance +Ġlic ense +r ors +ĠJ on +Ġimag ine +Ġhe ll +Ġf ixed +Ġinc or +os ite +ĠL og +ick en +] : +Ġsurpr ise +h ab +Ġc raft +ol t +ĠJ ul +Ġd ial +Ġrele vant +Ġent ered +Ġlead s +ĠA D +ĠCle an +Ġpict ures +ess or +Ġal t +Ġpay ing +P er +ĠMark et +Ġupd ates +am ily +ĠT ype +ĠH ome +Ġ5 5 +semb ly +rom e +8 3 +Ġgreat est +Ġhe ight +Ġhe av +ain ts +Ġlist en +as er +ĠS H +Ġcap able +ac le +Ġpers pect +in ating +Ġoff ering +ry pt +ĠDe velop +ab in +r c +Ġbr ight +al ty +ar row +Ġsupp l +ind ing +ack ed +gy pt +ĠAn other +p g +ĠVirgin ia +ĠL u +Ġpl anned +Ġp it +Ġswe et +T ype +ĠD i +Ġtyp ically +ĠFranc isco +Ġpro spect +ĠD an +Ġte en +re es +Ġsc hed +Ġh ol +Ġsc r +Ġlot s +l ife +Ġnews p +Ġfor get +ĠN one +ĠM iddle +ĠR yan +ed d +Ġse vere +Ġsu it +ll er +9 3 +Ġcor respond +Ġexpl os +u ations +Ġfl ag +g ame +r id +Ġpr in +ĠD ata +Ġde ploy +ĠEn ter +su it +gh an +ĠM en +Ġthough ts +Ġmat ters +Ġad apt +ĠA ri +Ġf ill +Ġfor th +Ġs am +Ġ4 1 +Ġpay ment +ĠH or +Ġsp ring +du c +Ġl osing +Ġbring ing +F O +al a +Ġdist ribution +he red +b our +ĠIsrael i +om a +Ġcomb ination +Ġpl enty +V E +C an +ĠH aw +Ġper man +ĠSpe cial +Ġto w +Ġsee king +Ġexam ples +Ġclass es +c r +Ġbe er +Ġmov es +ĠI P +ĠK n +Ġpan el +E ven +Ġproper ly +Ġr is +Ġpl ug +Ġestim ated +E very +Ġdef ensive +ag raph +Ġpre gn +Ġinst it +ĠV ict +Ġvol ume +Ġpos itions +Ġl inks +ĠPro gram +ĠWe ek +ag ues +Ġtrans form +k er +ĠC EO +Ġc as +Ġopp onent +Ġtwe et +ĠC ode +Ġsh op +Ġf ly +Ġtal ks +Ġb ag +Ph one +Ġa id +Ġpl ants +Ġ6 5 +Ġatt orney +ar ters +qu est +ĠMag ic +Ġbeg ins +Ġmy ster +Ġenvironment al +Ġst orage +N N +Ġm arg +Ġs ke +Ġmet al +ell y +Ġord ered +Ġrem ained +Ġl oved +Ġprom pt +Ġupd ated +Ġexper ts +Ġwalk ing +Ġan cient +Ġperform ed +AT E +Ġne ither +i ency +Ġmanufact ure +ĠP ak +Ġselect ed +Ġm ine +Ġult imately +Ġexpl an +Ġlab el +ĠServ ices +ribut ed +Tr ump +Ġsy n +ĠU lt +S C +Ġme at +Ġg iant +ĠW ars +ĠO N +Ġad m +Ġinter pret +Ġeven ing +Ġev il +ĠB oston +ĠW ild +Ġ à +ĠBit coin +ĠAm azon +D r +ĠIn formation +Ġobvious ly +Ġadv anced +Ph oto +ol ar +Ġwe ather +Ġsymb ol +Ġso le +Ġpot entially +ost er +Ġorig inally +m un +3 00 +az e +ess ions +Ġde ck +Ġst ood +Ġyou th +ĠB ern +R ep +ĠT est +Ġbas ically +ot ic +Ġinvol ve +ol it +ly n +S ee +Ġair craft +Ġconf irm +E W +Ġmess ages +ĠRich ard +Ġk it +Ġpro hib +Ġv ulner +is ters +Ġexist ence +Ġturn ing +ĠS P +Ġdes ire +Ġfl at +Ġm ent +se ason +ang es +Ġneighbor hood +ĠL ake +AT ION +Ġpoint ed +b ur +Ġinn ov +uc ks +U L +Ġprofess or +Ġexp ressed +A B +ic ious +Ġ200 2 +ĠDe v +Ġs ession +Ġb are +s en +Ġdis s +ĠC ath +ĠP ass +ĠP oint +Ġdo ctor +or row +ail ed +ĠR ub +ĠD C +ĠChar l +p erson +Ġwrit er +igh ters +ure au +Ġob lig +Ġrecord ed +Ġbro ke +Ġord ers +il ty +Ġmot ion +in ity +l aw +ad ium +Ġimm igration +Ġcontr ast +Ġb att +Ġex cellent +Ġtechn ical +am i +Ġt un +Ġcl oud +ĠY ear +ge on +Ġcre ation +Ġstr ange +Ġa uth +Ġfor t +b orn +Ġext ent +ĠT oday +ĠCl ub +Ġr ain +Ġs ample +Ġaccept ed +Ġt act +Ġf ired +ĠS on +Ġstand s +Ġb oot +Ġ4 7 +Ġstat ements +Ġvers ions +Ġse lling +ound ed +Ġ199 0 +Ġwere n +ĠW atch +Ġexper iment +P ost +Ġret ail +ul ed +In st +un te +ãĥ ¼ +Ġdep art +Ġb ond +i very +om pl +Ġre action +ĠSyri an +ĠP ac +app ed +ani el +D P +Ġres olution +Ġre act +Ġappro ved +on om +m ond +ĠO ffic +-- - +Ġrepl ace +Ġt ack +Ġsp ort +Ġch ain +Ġemer gency +r ad +ĠPalest in +Ġ4 6 +Ġautom atically +Ġrout e +Ġp al +Ġb anks +ĠPar is +ĠMed ia +ro ad +ic ing +i xt +ist ed +Ġg rew +Ġco ord +ĠW here +om in +Ġsub s +� � +Ġ ± +Ġcorpor ate +Ġse lection +n oon +ĠRep ort +c s +clud ing +ord ers +anc he +ĠIt s +Ġslow ly +ĠE gypt +ĠA cc +Ġcol le +iqu es +E X +Ġattempt s +ur l +ĠC ross +Ġfind ings +ĠS C +ĠO R +Ġind ex +ens ity +ĠW ay +ĠL and +Ġsh ock +d is +Ġd ynam +Ġc art +m osp +S ince +i est +ĠB oy +Ġst orm +ĠCont in +201 3 +he w +il it +Ġess ential +iqu id +O ther +ive red +Ġreason able +A ct +Ġsub sequ +ĠP ack +ĠF ort +Ġconsider ing +Ġun iversity +l og +Ġmar ried +Ġill ust +ĠTr ue +£ ı +Ġnumer ous +rast ructure +Ġserious ly +Ġrefer red +u a +Ġconsist ent +on na +ĠRe al +ru ption +ci ples +Ġfact s +9 1 +ot es +er g +The n +Ġacc ompl +N ote +Ġre venue +Ġpass ing +Ġm al +e en +ĠY et +Ġg ather +ter day +ew ork +ĠA uthor +P e +Ġopt im +Ġr ub +Ġè £ı +Ġun known +st one +Ġun ion +ol ve +Ġopportun ities +Ġbrow ser +ĠW al +ĠC ost +Ġreport ing +st s +p et +Ġs and +Ġsudden ly +Ġsurpr ising +ĠV R +Ġsomew hat +ĠB as +ult ure +iz z +ĠC D +Ġchalleng es +Ġsett ings +Ġexperien ces +ĠF ull +Ġcan n +Ġrece iving +ES T +Ġj oint +Ġcult ural +Ġa st +8 2 +as tern +ce ived +ĠC ru +Ġb ull +p ired +am m +Ġfac ing +p ower +Ġb oss +ĠH ol +Ġinst r +Ġincreasing ly +Ġsh ift +Ġstre ets +ĠWilliam s +ab b +Ġl ie +Ġl augh +ĠC a +P L +Ġadult s +Ġcustom er +Ġob tained +Ġsupport ing +ht ml +f ire +Ġdetail ed +Ġpick ed +ĠR ight +ld er +E E +st ood +ĠK im +Ġw ire +Ġs ight +Ġdevelop ers +Ġpers ons +Ġs ad +Ġc up +Ġwar ning +Ġboy s +l ong +Ġb ird +f o +Ġw al +Ġobserv ed +Ġz one +iven ess +Ġch annel +c ript +Ġref used +ĠAg ain +Ġsu c +Ġspokes man +ĠRe f +r ite +ou ston +ãĥ ³ +ĠS her +Ġact s +ĠN ame +Ġstrugg le +ar ry +omet imes +Ġdisc rim +H T +Ġcateg ory +Ġreal ize +Ġemploy ee +ĠAf ghan +en ger +Ġgun s +ĠSte ve +ĠM ot +ĠO l +ok ed +Ġth ick +Ġfair ly +ill y +Ġsur ve +ĠM at +we ight +â Ķ +Ġtro ops +Ġag ents +Ġbatter y +Ġmot iv +à ¡ +S ec +d en +o very +L S +Ġfl u +Ġconf ident +ĠO per +Ġem pty +Ġp hen +Ġse ctor +Ġexc ited +Ġrem ote +ap h +o en +Ġdestroy ed +Ġmor al +ĠH P +ĠR on +Ġd ress +ĠB at +Ġl it +ĠM S +Ġa f +H L +r um +is ms +Ġshould n +Ġsym pt +ĠTor onto +het ic +Ġcar bon +Ġinstall ed +Ġviol ent +Ġsol ar +j a +Ġpract ices +Ġr ide +ĠP enn +Ġimpro ved +Ġaud io +Ġbehav i +ĠP S +Ġe ating +D ata +ĠRe view +p ass +cl aim +u ated +ang ers +c hen +Ġproper ties +Ġany where +An other +Ġbl ow +ĠJack son +Ġp roud +Ġplan e +l ines +Ġsqu are +Ġpro of +ans as +Ġtalk ed +m akers +Ġs ister +Ġhold s +Ġres ident +Ġ= = +Ġresist ance +Ġspl it +Ġpro secut +Ġconf idence +res ents +Ġcut s +Ġexcept ion +Ġz ero +Get ty +Ġcop yright +Ġtot ally +orm al +ific ations +ĠAustral ian +Ġs ick +Ġ1 50 +Ġhouse hold +Ġfe es +Ġdri vers +og en +ĠN Y +Ġnecess arily +Ġregul ations +ear ing +s l +Ġperspect ive +c are +ic ial +H is +Ġesc ape +Ġsurpr ised +ĠV an +ur rent +Ġv ac +8 1 +ĠTh us +Ġem phas +ĠCh ampions +ĠI ce +Ġn arr +Ġhead s +Ġca using +b el +f ortunately +ĠM a +Ġtarg ets +ci pl +Ġafter noon +Ġadd s +ĠMay be +ĠF our +ess ed +ple te +Ġus ual +ch o +ing u +Ġwith d +ĠE nergy +ĠE conom +O O +Ġart icles +Ġinj ured +Ġman age +Ġexpl ains +Ġdi agn +R ec +at ures +Ġlink ed +Ġdiscuss ed +Ġexpl o +Ġocc asion +ath an +Ġopp osite +Ġfac es +Ġden ied +ĠK night +Ġn ut +Ġapprox imately +Ġdisapp oint +onym ous +ĠB est +ĠL o +ĠH y +ĠA ff +Ġvot ing +an while +ĠII I +Ġinstit utions +ag ram +ĠD aily +Ġdr ag +Ġnear by +Ġgu ilty +Ġcon ver +P re +s hip +Ġre ward +Ġphilos oph +ĠS S +u gh +Ġapp s +f riend +Ġu pper +Ġad vert +Ġs now +Ġfr ust +Ġour selves +F r +ĠD ie +amp ion +Ġdis miss +Ġc ere +Ġsign al +f rom +Ġ ). +Ġ5 2 +Ġcr imes +it ors +est ival +use um +Ġcoun cil +ĠS aud +M ay +ĠG un +ic ian +et her +Ġsu fficient +ĠH en +so le +Ġhistor ical +ĠF ar +ĠT urn +Ġp in +Ġsuc ceed +m at +ly mp +Ġtrad ition +ĠO k +Ġc ro +Ġdesc ription +al le +Ġsk y +T e +Ġwide ly +Ġw ave +Ġdefin ition +ĠJew s +Ġcy cle +Ġref ere +Ġbr ings +us al +Ġal ive +Ġfrequ ently +Ġint ention +ĠCont rol +l v +y stem +Ġpriv acy +g ent +ren ce +ĠQu est +ĠChrist mas +Ġr ail +Ġco oper +Ġtest ed +ĠC apt +as ks +Ġcomfort able +Ġdel ivered +sc ape +Ġdep th +ĠG OP +Ġwrit es +Ġass ets +Ġsa v +im ents +Ġtrans ition +Ġart ist +ĠL ook +Ġl ob +Ġcomp onents +ar ity +Ġwalk ed +Ġro ot +Ġparticip ants +Ġnot iced +Ġres c +Ġn av +ĠAd minist +d a +ut ral +pl ate +Ġimport ance +Ġass ert +ious ly +c ription +Ġinj uries +ĠChe ck +Ġregist ered +Ġint ent +Ġmiss ed +ograph ic +Ġsent ence +oun ter +Ġassist ance +ev in +Ġdat abase +Ġbuild ings +Ġclass ic +Ġth inks +ĠOh io +P r +ug g +Ġfe e +p an +Ġeffect ively +Ġfac ility +Ġbe ar +Ġch apter +Ġdog s +ĠCol umb +Ġl atter +it ial +Ġad mitted +T V +ĠGe org +Ġpost s +\ \ +Ġlawy er +Ġequ ival +Ġm and +Ġcontro lled +ĠW alk +ĠAnd rew +Ġmen u +am ental +Ġprotect ed +v a +Ġadminist r +or al +Ġre in +ĠS ar +Ġamount s +Ġn ative +ĠM oon +Ġrep resents +Ġab andon +Ġcarry ing +Ġt ank +m ary +Ġdecl ared +T ube +Ġh at +Ġpun ish +el lect +m es +Ġun iverse +ĠR od +ph y +Ġinf rastructure +Ġ5 1 +Ġopp osed +ow nt +c a +ĠM ake +Ġhard ware +Ġco ffee +R el +b al +w orld +ĠS af +ĠSe a +in als +Ġown ed +Ġh all +ers ion +Ġdescrib e +ĠP ot +Ġport ion +Ġat mosp +Ġgovern ments +Ġdep ending +Ġoff ense +Ġtr ick +aw a +ĠL ine +ĠV is +ĠH ard +ĠOr ig +ĠCl ick +Ġdes k +ĠVal ley +ĠS ov +Ġmov ies +Ġrem ark +Ġm ail +Ġcons cious +Ġrul ing +ĠR ights +Ġmed ic +he nt +ĠW omen +> < +Ġrepl aced +ĠP rem +ĠTh anks +Ġre new +ĠB all +if orm +Ġsh ots +C omm +Ġar med +Ġconst ant +Ġt aste +Ġreal ized +Ġbu ff +Ġm o +Ġeffic ient +M ost +or ation +if ies +Ġcommun ication +Ġfl ood +Ġconsequ ences +Ġany way +ig g +ĠG M +ĠTh ank +Ġ iron +Ġev olution +ĠC op +tw itter +Ġ9 5 +Ġrelationship s +ad el +ĠYou ng +Ġpropos al +ay ers +uild ing +ĠH ot +OR E +c os +Ġcoll abor +P G +ax y +Ġknow ing +Ġsupport s +ow ed +Ġcontrol s +Ġmere ly +um er +Ġath let +Ġf ashion +p ath +Ġg ift +Ġer a +AN D +Ġkind s +ĠKore an +Ġleg it +ul ous +Ġess entially +Ġthe rap +n ic +Ġsuff ered +Ġh ur +Ġprom ise +Ġex cess +Ġover w +Ġpr ime +ĠH ouston +er ry +ĠM s +R S +201 2 +Ġst ores +ĠO lymp +Ġj ourney +Al though +S ub +ĠE duc +ĠCh apter +Ġrequest s +Ġconsum ers +Ġt iny +Ġis ol +ĠF air +b a +ĠY OU +Ġcr ash +ce ler +Ġemot ional +Ġgood s +Ġelect ed +Ġmod er +ĠLin ux +Ġbl ocks +Ġis land +ĠSoc iety +Ġelect ions +Ġbroad cast +Ġche ap +Ġn ations +Ġse asons +4 00 +Ġwas te +ĠS at +Ġfield s +em ploy +Ġprof ile +Ġauth ors +AL L +ĠG ra +w est +ĠT y +Ġdeath s +Ġv acc +Ġfor med +Ġd u +Ġon going +ĠMuslim s +el f +ig ure +Ġass ume +ĠUkrain e +w ater +Ġco ast +Ġvot ed +g or +ĠA S +ĠMich igan +az a +ĠAr m +i ro +Ġf lex +as ters +' ' +Ġwel come +ar l +Ġloc ations +ig ation +ĠF il +Ġbu ying +Ġarch itect +Ġhard er +ĠC ub +Ġinter face +Ġrestaur ant +Ġdisco ver +Ġex ceed +Ġfav our +ger y +Ġd uty +Ġp itch +ad or +ĠM ach +b oy +Ġrespond ed +Ġext ended +her s +M any +ra id +if er +ĠIn s +S er +Ġmed ium +s he +ĠS ports +Ġmag azine +ut ation +Ġlim its +ĠG all +Ġex ternal +raz il +Ġyoung er +t le +Ġrem ind +ĠC ON +Ġimmedi ate +Ġh idden +Ġvol unte +Ġsim pl +od cast +Ġph ase +d r +Ġpl ot +Ġexp osure +R I +og rap +v in +an ish +ĠAc ad +ĠEng ine +Ġexp ansion +ĠP ay +Y our +Ġpus hed +ĠE ll +ĠHe ad +Ġmarket ing +ĠA C +k et +Ġh its +Ġg ro +ĠA ge +ĠSc ot +] [ +Ġst im +Ġi Phone +Ī Ĵ +Ġn arrow +ĠGet ty +ĠTur key +Ġperfect ly +Ġen able +ut ch +Ġprec ise +Ġreg ime +Ġsh if +Ġcomp ens +g un +d iv +Ġch osen +ĠK en +An y +Ġtre es +Ġrecomm ended +ĠR en +u able +ĠH T +F ollow +E G +ĠH and +ĠK enn +Ġarg uments +Ġex ists +Ġb ike +ĠCons erv +Ġbre aking +ĠG ar +Ġc razy +Ġvirt ual +ay lor +ix el +Ġ19 80 +Ġper mission +ĠSer ies +Ġconsum er +Ġclose ly +c alled +Ġ5 4 +Ġhop es +Ġar ray +ĠW in +ĠLab our +Ġsp ons +ĠI re +Ġp ow +Ġread ers +Ġemploy ment +Ġcreat ure +Ġresult ing +Ġaccur ate +Ġmom ents +Ġarg ued +Ġp ed +D uring +Ġ5 3 +ĠT al +Ġs ought +Ġsuff ering +Ġ icon +le e +Ġ( $ +al ian + ° +Ġp ra +Ġbon us +( " +k o +Ġact ing +D E +f all +Ġcompar ison +Ġsm ooth +ĠN AS +u pp +ĠJose ph +ep ing +ĠT ake +ĠM id +Ġs ending +f ast +ĠF all +Ġdeal ing +us er +ĠOr gan +C o +Ġatt ached +Ġse es +% . +Ġtyp ical +AR T +Ġfind s +ĠAs ia +um in +ĠC ore +ĠE nt +in ent +u ce +ĠBl ood +ĠN ever +Ġem ails +Ġhigh light +Ġconf ront +at us +ut ed +Ġun us +Ġtop ic +ĠAd am +Ġb le +at i +Ġunder stood +S et +st ruct +T P +Ġm ob +a a +ĠSt art +pect ed +se ll +Ġded icated +ĠC A +u an +Ġsong s +esc ription +Ġte ch +Ġr ape +Ġas ide +Ġgr ant +Ġ5 6 +s ub +Ġarg ue +Ġcont aining +Ġsche dule +Ġliber al +Ġpublic ly +Ġheav ily +ĠU t +in er +ĠS ection +ĠC are +we et +l s +D is +âĶ Ģ +ĠF ollow +B ack +ĠI T +Ġb es +j i +ĠH it +est ed +Ġevery body +ĠSw ed +Ġfem in +Ġfac ilities +Ġcon ven +C omp +ĠO S +c ore +Ġan x +Ġdiv ision +ĠC am +ĠSt an +m ates +Ġexpl ore +pl om +Ġsh ares +pl oad +an es +Ġide al +et ers +ĠB ase +Ġpl astic +Ġdist inct +ĠNet work +ĠSe attle +Ġtrad ing +ens us +int end +Ġex hib +Ġinit ially +ĠF ood +Ġthous and +ĠBus iness +act er +Ġpar agraph +Ġrough ly +Ġw ww +Ġcreat ive +ĠCon f +Ġconsum ption +Ġfil ms +ag an +Ġob tain +Ġt all +Ġt or +Ġacknow led +Ġg rown +al o +K E +Ġ4 00 +end ers +t aining +U G +Ġsu icide +Ġwat ched +ĠL ist +al i +re hens +Ġsurround ing +Ġp ip +Ġf lying +ĠJ ava +ord an +Ġserv ing +in ations +p ost +Ġsh o +A v +Ġj ail +z y +Ġ199 9 +Ġ< / +Ġliter ally +ĠS ir +Ġexp osed +Ġl ies +st ar +Ġb at +Ġear ned +ĠD ig +Ġspec ified +ĠSe ason +Ġdeg rees +Don ald +Ġcent re +Ġsh aring +Ġwin ter +ĠC O +C he +Ġ Î +M P +Ġun w +Ġfew er +ĠM ir +Ġsomew here +ĠK ey +Ġattack ed +ĠK ir +Ġdom ain +Ġstrong er +Ġ9 9 +Ġpen alty +I d +Sc ript +Ġdecl ined +Ġne ck +Ġfra ud +Ġcur rency +Ġr ising +R C +âĢ¦ âĢ¦ +H z +Ġt ab +Ġtal ent +n am +ĠN BA +Ġvill age +Ġleg s +ĠN ext +E d +Ġac id +Ġhy d +8 00 +Ġinvol ving +ĠIm age +ĠBe fore +F l +Ġyes terday +S ource +Ġterror ist +Ġsu p +Ġsy nt +ĠSaud i +Ġw est +Ġr u +b urg +Ġvis ible +Ġstru ck +r ison +Ġaw esome +Ġd rawn +Ġansw ers +ĠG irl +ĠR am +Ġthreat s +Ġdef eat +os it +Ġv ent +atur ally +Americ an +end a +ĠH oly +Ġr um +% , +c ase +ĠHist ory +ĠYou Tube +Ġsit uations +ĠD NA +S te +Ġsa ved +It em +Ġrec ip +olog ist +Ġfac ed +Ġel ig +O nce +ĠL i +u h +Ġmist ake +ĠDiv ision +ĠB ell +Ġsympt oms + ® +Ġdom in +Ġfall ing +Ġend ing +as hes +Ġmat ches +ĠOn line +Ġexplan ation +D ef +red it +Ġany more +ĠT otal +ĠF OR +us hed +Ġlet ters +Ġris ks +ĠO K +Ġreported ly +: \ +Ġpl ate +Ġsubject s +Ġattempt ed +if ier +ian a +Ġunlike ly +ĠTh ough +um a +ĠIn vest +ĠPr in +ic an +ĠD ar +ĠColor ado +au g +Ġve get +a os +ri a +Ġshe l +Ġmark ed +Ġ( ) +Ġsp r +p o +ĠL ink +Ġdef e +ĠJ r +Ġthem e +Ġpass ion +ĠP en +Ġinf o +iz er +Ġsh it +ĠC ivil +ap se +c re +Ġpo ly +Ġcomp onent +ĠChar les +ĠIre land +ĠPro v +Ġdo ctors +Ġgr anted +Ġpain t +Ġhon or +Ġsm oke +Ġpay ments +Ġprim arily +ĠKing dom +r ich +ate ll +Ġde als +Ġsched uled +Ġfund amental +Ġprote in +Ġnewsp aper +Ġcl ients +yth on +ĠD ate +h us +Ġfeed back +Ġstret ch +Ġc ock +Ġhot el +ĠQue en +Ġsu gar +Ġj u +Ġmil k +Ġappro val +ĠL ive +Ġequival ent +ef ully +Ġins ert +z ona +Ġext ension +d ri +J ohn +Ġacc omp +S m +ĠF und +Ġconst antly +Ġ` ` +Ġgener ated +ĠA ction +ĠP sych +ĠT ri +Ġrecogn ize +Ġv ary +ph a +ĠR a +d f +et ch +ĠSov iet +Tw o +Ġpattern s +Ġprof ession +an ing +T ime +ĠL im +Ġcol ors +ĠA z +ĠT R +Ġinf ect +Ġphen omen +Ġshe ll +Al so +Ġput s +Ġdel ivery +Ġbro wn +Ġprocess ing +Ġlight s +ess age +ĠBro ok +ĠA ud +l ation +Ġindust rial +L ike +ĠB razil +rou s +ES S +ĠL uc +Ġsome how +Ġ8 5 +Ġpro port +Ġpolit icians +Ġindic ate +Ġh ole +Ġtechn iques +Ġcompet itive +Ġph r +Ġv o +ist ent +ĠD ream +Ġcamp us +Ġaspect s +Ġhelp ful +Ġsh ield +or se +Ġtrig ger +m al +Ġ5 8 +Ġt ort +Ġperson ally +Ġt ag +Ġkeep s +ĠV ideo +Ġben ch +Ġg ap +a ire +Ġe ast +Ġrec overy +per ial +Ġprof it +ĠM ic +Ġ5 7 +Ġcol on +Ġstrong ly +st yle +Ġalleg ations +h an +Ġrep orters +j o +r ine +arg et +and al +Ġ0 3 +Ġfl ash +tr ans +Ġstr ict +Ġpark ing +ĠPak istan +Ġl i +Ġwe ird +ĠE ric +Ġreg ions +ĠJ un +Ġint ellect +ĠW H +od ing +rib utes +up id +ĠT it +Ġf inger +or ia +Ġe lev +ĠF ield +Ġcon clusion +; ; +Ġfeel ings +Ġext ensive +Ġm ixed +Ġne uro +v y +Ġhar ass +ĠC irc +ou ch +Ġterrit ory +Ġsuccess fully +M ar +Ġing red +Ġoverw hel +Ġl ayer +V iew +Ġall ies +ill ance +ĠTh ree +Ġb unch +Ġnorm ally +Ġnet works +Ġsac r +ĠC IA +b les +Ġch ose +Ġopp onents +Ġregard less +Ġfr anch +Ġpre f +ĠP o +Ġbr idge +ann a +ĠSil ver +Ġw age +p age +ri or +Ġrad ical +ĠL ittle +Ġman ip +Ġsecret ary +Ġg ang +D R +F A +Ġdec ent +ĠSp irit +Ġun cle +ĠDevelop ment +Ġinvest ors +Ġwall s +Ġpub lish +Ġgener ate +iss ions +c ar +Ġprom ote +Ġcut ting +Ġche st +Ġdrink ing +Ġcollect ed +Ġ7 2 +Ġhop ing +Ġem br +gor ith +Ġwar ned +Ġinstruct ions +O G +ĠD id +ĠAg ency +Ġg ear +Ġcritic ism +ĠF urther +Ġut il +ann y +R ed +Ġcoun sel +ĠAs ian +Ġredu ction +p ool +Ġteach ing +Ġdeep ly +i y +Ġestim ates +Ġcho ices +Ġperman ent +in em +ke l +Ġf asc +p se +f ile +ĠL ow +ĠP erson +Ġt ournament +st al +Ġm el +U ST +ĠR ay +az i +V al +Ġcont ained +ĠH olly +Ġw ake +Ġreve al +Ġprocess es +ĠIS IS +Ġ0 9 +Ġbl ind +Ġste el +ĠB ad +Ġcare fully +app y +ro it +Ġg aming +Ġhous es +ĠC oll +Ġtr uck +er m +Ġsc ored +Ġocc as +ret urn +b ound +v ar +Ġsh arp +Ġaf raid +ĠE X +am ber +c ific +Ġsche me +N C +ĠPol it +Ġdecl ine +Ġ199 8 +Ġpus hing +Ġposs ession +Ġpriv ile +Ġteacher s +Ġy ield +H A +ĠDav is +it led +#### #### +Ġr ig +ĠD aniel +ac on +Ġh ide +ut en +Ġcolle agues +Ġprin ciples +Ġl oud +Ġs in +ĠDem on +Ġst one +Ġ0 2 +Ġt aught +Ġter rible +Ġst uck +ĠPol icy +te en +Ġimplement ation +ĠB BC +ĠAP I +Ġwhe el +all as +Ġch ampions +ol ars +play er +Ġrepeated ly +ĠSt ill +Ġlik es +ast y +es ter +ĠCath olic +R L +Ġb ath +Ġno ise +t itle +Ġn orthern +P art +Ġmag n +Ġf ab +ĠAs h +Ġdis pl +Ġtick et +Ġm urd +Ġalong side +ĠMus ic +Ġr iver +ĠSte el +ĠC L +ĠPl ayer +ĠM ult +ow ing +re p +s ize +Ġt ur +ĠGeorg ia +isc al +ra ction +Ġc able +Ġ5 9 +Ġw ins +Ġup coming +Ġsurv ive +Ġins pired +ĠEduc ation +Ġstat istics +ĠF oot +iam i +Ġy ellow +ĠP age +. - +ĠH as +Ġur ban +Ġa x +es sel +\ " +Ġquarter back +Ġreg ister +ĠLab or +Ġab ilities +ĠF amily +Ġvar iable +ĠPr ice +Ġcont em +Ġth in +ĠE qu +d ata +Ġg otten +Ġconst it +Ġas ks +Ġt ail +Ġexc iting +ĠE ffect +ĠSp anish +Ġencour age +ins on +ĠA h +Ġcommit ment +C S +Ġr ally +Ġ: : +Ġsubs id +Ġsp in +Ġcapt ured +201 8 +Ġinn oc +Ġalleged ly +ĠC ome +Ġart ists +ĠN umber +Ġelect ronic +Ġreg ional +ap es +Ġw ra +Ġmy th +pr ise +ĠM iller +ĠC reat +ĠEp isode +b ell +Ġdirect ed +Ġext ract +Ġs orry +Ġv ice +ag ger +ĠSu pport +Ġ6 6 +ĠI ron +Ġwonder ful +Ġg ra +N et +ion e +E ng +Ġsh ips +ik es +ĠK evin +it ar +Ġactiv ists +tr ue +ĠAri zona +ent h +ĠDes pite +ĠS E +Ġha bit +ern el +Ġin qu +Ġab ortion +Ġv oid +Ġexpl icit +Ġeng aged +Ġang ry +Ġr ating +Ġfr ag +b ro +ick ing +d ev +Ġwor ried +Ġob ser +Ġap artment +ĠG T +Ġest ate +ĠConst itution +em on +ĠS now +Ġcount y +Ġdis ag +ĠStep hen +Ġimm igrants +w ind +ĠN ations +Ġfol ks +O ut +Ġg all +Ġtarget ed +Ġst ead +ĠB on +ĠL ib +Ġinform ed +Ġ12 0 +ch ain +idel ines +or ough +Ġdri ven +Ġregular ly +Ġbas ket +Ġprinc iple +oc ument +Ġst un +ib ilities +ĠRom an +ĠAb out +Ġal ert +Ġdemocr acy +Ġrepresent ed +H S +c ers +p arent +Ar t +p ack +Ġdi plom +re ts +ĠN O +Ġcapt ure +ĠAd v +Ħ ¢ +Ġannounce ment +ĠL ear +Ġh ook +Ġpur s +ĠS uch +ĠC amer +Ġrefuge es +ĠV e +P ol +Ġrecogn ized +l ib +Ġhad n +A ss +Ġpil ot +us hing +Ġreturn ing +Ġtra il +ĠSt one +Ġrout ine +Ġcour ts +Ġdes per +Ġfriend ly +ĠIt aly +Ġpl ed +Ġbreat h +Ġstud io +N S +Ġimp ressive +ĠAfghan istan +Ġf ing +Ġd ownt +ink ing +ĠR og +i ary +col or +se x +ar on +Ġf ault +ĠN ick +D own +ĠR ose +ĠS outhern +X X +is odes +L ist +6 00 +Ġout come +er r +Ġelse where +Ġret ire +Ġp ounds +ĠGl obal +Pe ople +Ġcommun ications +Ġlo an +Ġrat io +ĠEm pire +Ġg onna +Ġinv ent +D F +Ġ19 70 +ĠComm on +p at +Ġprom ised +Ġd inner +ĠH om +Ġcreat es +Ġoper ate +ver ty +ĠJ ordan +et ime +Ġsust ain +R eg +Ġincred ible +im a +Ġwar rant +Ġm m +A tt +Ġlaw suit +Ġreview s +it ure +ĠS ource +l ights +ĠF ord +Ġ6 3 +g roup +st ore +Ġfeat ured +Ġfore ver +Ġpo verty +ĠP op +ĠC NN +az z +ab is +ach ing +Ġl aid +ĠSu pp +Ġfil ter +en a +ĠCommun ity +Ġcreat ures +u ction +ĠR oyal +Ġassoci ation +ĠCon nect +ĠBr ad +âĸ Ī +l ers +the re +ĠG i +Ġval uable +AC K +ĠT aylor +Ġl iquid +ĠAtt orney +ĠCar l +ĠF inal +ag a +ĠWil son +B ecause +ĠProf essor +ak a +Ġincred ibly +r ance +! ) +R ef +s k +Ġsol utions +Ġatmosp here +Ġbl ame +um es +ĠN ob +C A +um ps +r ical +ĠPut in +ĠD est +or ic +ĠP A +Ġrespect ively +w an +Ġfif th +â Ħ¢ +ĠC ry +Ġgovern or +res ident +Ġpurch ased +Ġh ack +Ġint ense +ob s +Ġorig in +Ġdef ine +Ġcare ful +** * +Ġshould er +Cl ick +Ġt ied +Ġdest ruction +ou red +Ġno body +Ġh o +ĠEx per +Ġt ip +" ; +Ġtechn ique +Ġj ur +ĠP ok +b ow +Ġleg end +Ġacc ord +Ġbus y +ĠInt el +Ġh ang +ak i +. ] +âĢĶâĢĶ âĢĶâĢĶ +Ġsur gery +Ġrep rodu +Ġun iform +Ġscen es +c ode +Ġ6 2 +l isher +ĠH ave +ph ia +Ġcry pt +Ġrec on +Ġsc ream +Ġadop ted +Ġsc ores +N e +ĠIt alian +in cluding +B O +Ġindic ated +Ġent ertain +G u +T ext +i el +Ġtw enty +Ġeng age +off s +ĠPac ific +Ġsm ile +Ġperson nel +Ġto ler +Ġdo ors +Ġt one +Ġmach ines +Ġent ering +ten ance +C O +ĠJer sey +Ġfore st +Ġhor se +Ġcompl aint +ĠSpr ing +y o +ĠPl us +ed ing +ĠRet urn +qu arters +ial s +c ow +Ġacad emic +Ġf ruit +Ġ199 6 +og ether +Ġw ine +Ġpur su +ĠSte ven +Ġlic ens +Wh o +Ġclot hes +re ction +Ġsqu ad +Ġst able +Ġr aw +z ens +St ar +ut ies +anc er +Ġke ys +ĠM u +Ġcompl icated +ig er +ĠTe xt +Ġabs or +Ġ6 8 +Ġfun ny +Ġrel ief +ĠL ew +ĠC ook +Ġch art +Ġdraw ing +G E +Ġmod ule +ĠB ull +I LL +Ġs alt +0000 0000 +il le +Ġres ource +aw ay +adel phia +ĠB ru +Ġ6 7 +Ġsome body +Ġparticip ate +Ġro se +we red +Ġmus cle +Ġcons ent +Ġcontin uing +ĠGuard ian +ĠOr der +reg on +Ġre ar +Ġprov ision +Ġlik ed +ri ent +Ġb ra +Tr ans +Ġmeet ings +Ġto x +Ġcon vent +Ġaut o +Ġrec ording +ĠSo ft +00 1 +ĠR oll +Ġprogram ming +Ġp ic +Ġprov ed +Ġst ab +ĠA st +Ġca ption +ul ating +ĠAtt ack +Ġnew ly +Ġ199 7 +f r +Ġdis cipl +ĠGree k +Ġed ition +ĠDo es +ĠB ox +if le +ack et +Ġpass es +Ġgu est +Ġac celer +it als +U D +Ġaut hent +ĠR est +ov al +t a +u ine +Ġarm or +ĠT own +Ġcomp at +Ġinc hes +Des pite +Ġass ign +he rent +Ġprep are +ĠM eg +oc key +Ġdep ends +Ġtrack s +w atch +Ġl ists +ĠN orthern +Ġal ter +re c +ĠE astern +Ġcond em +Ġevery where +? ' +Ġaff ili +Ġf ought +": {" +Ġm ac +it arian +Ġsc ope +ĠA L +aw s +ar ms +Ġqu e +Ġenjoy ed +nes ota +Ġagg ressive +ĠSt ory +ĠI V +Ġrec ipe +Ġrare ly +ĠMed ical +val ue +ang el +ay ing +omet hing +Ġsub section +Ġs outhern +Ġfrequ ency +re te +roll ed +ult s +ĠN ic +Ġbeh alf +Ġsequ ence +ab et +Ġcontrovers ial +Ġcomp rom +Ġwork er +Ġmain ly +Ġal gorith +ĠM ajor +or ce +g ender +Ġorgan ized +Ġf ake +Ġconclud ed +ĠE D +ĠEx ec +r age +Ġch ances +ber ry +ĠTr ad +Ġconfig uration +Ġwithd raw +Ġf ro +ud es +ĠBro ther +ĠB rian +Ġtri es +Ġsam ples +Ġb id +ĠGold en +Ġphot ograph +if est +ĠD O +ĠPar liament +******** ******** +R em +Ġcont est +Ġsign ing +p x +ĠZ eal +âĶĢ âĶĢ +E ar +Ġex it +Be fore +ĠCor por +n ull +mon th +Ġrac ial +ott ed +ĠV eg +ĠRe uters +Ġsw ord +ps on +ĠRom ney +a ed +Ġt rib +Ġin ner +Ġprot ocol +ĠB i +ĠM iami +ever al +p ress +Ġsh ipping +ĠAm endment +ĠHow ard +con nect +ĠD isc +ĠJ ac +iam ond +ĠThere fore +s es +ĠPrin cess +ĠUS B +ĠAn th +Ġsurve illance +Ġap olog +Ġ6 1 +ow a +Ġf ulf +j s +Ġl uck +ust ed +Ġ § +n i +Ġant icip +em an +Ġwin ner +Ġsil ver +ll a +ic ity +Ġunus ual +Ġcr ack +Ġt ies +e z +Ġpract ical +Ġprov ince +ĠPl ace +Ġprior ity +IC E +Ġdescrib es +Ġbr anch +F orm +ask a +miss ions +b i +Ġp orn +ĠTur k +Ġent hus +Ġf ighters +Ġ0 8 +ĠDet roit +Ġfound ation +av id +A re +Ġjud gment +cl ing +Ġsol ve +ĠDes ign +W here +hes is +ĠT ro +a fter +Ġne utral +ĠPalestin ian +ĠHolly wood +Ġadv is +ĠN on +y es +ol is +Ġrep utation +Ġsm ell +Ġb read +ĠB ul +ĠBe ach +Ġclaim ing +Ġgen etic +Ġtechn ologies +Ġupgr ade +row s +Ġdevelop er +ĠJ osh +ĠDis ney +erv ed +ip al +Ġun ex +Ġbare ly +t hen +ĠP ub +Ġill ness +et ary +ĠB al +Ġp atch +Ġbut t +Ġst upid +ĠD og +ĠD allas +f ront +ie ce +Ġprot ests +Ġch at +oen ix +Ġw ing +Ġpar liament +Ġ7 7 +ose xual +Ġre nder +pt ions +ĠCo ast +os a +ĠG reg +h op +ĠMan agement +Ġbit coin +Ġrec over +Ġincor por +or ne +ĠUs ing +Ġpre ced +Ġthreat ened +Ġspirit ual +ĠE vent +ĠF red +Ġadvert ising +Ġimprove ments +ĠC ustom +Ġer rors +Ġsens itive +ĠN avy +Ġcre am +L ook +Ġex clusive +Ġcomp rehens +Ġde leg +Ġcon ce +Ġrem em +Ġstruct ures +Ġst ored +N D +Ġ1 000 +U P +ĠB udd +A F +w oman +ĠAcad emy +ð Ł +se a +Ġtem porary +Ab out +es ters +Ġtick ets +Ġposs ess +in ch +o z +Ġl a +Ġcontract s +Ġun p +Ġc ig +ĠK at +ult ural +as m +Ġmount ain +ĠCapt ain +St ep +m aking +ĠSp ain +Ġequ ally +Ġl ands +at ers +Ġreject ed +er a +im m +ri x +C D +Ġtrans action +g ener +less ly +Ġ| | +Ġc os +ĠHen ry +Ġprov isions +Ġg ained +Ġdirect ory +Ġra ising +ĠS ep +ol en +ond er +Ġcon sole +in st +Ġb om +Ġunc ertain +1 50 +ock ing +Ġmeas ured +Ġpl ain +Ġse ats +Ġd ict +S L +af e +Ġest imate +iz on +at hered +Ġcontribut ed +Ġep isodes +omm od +G r +AN T +Ġ6 9 +G ener +Ġ2 50 +vious ly +rog en +Ġterror ism +Ġmove ments +ent le +oun ce +ĠS oul +Ġpre v +ĠT able +act s +ri ors +t ab +Ġsuff er +Ġn erv +Ġmain stream +ĠW olf +Ġfranch ise +b at +Ġdem ands +Ġag enda +Ġdo zen +Ġclin ical +iz ard +ĠO p +t d +Ġvis ited +ĠPer haps +Ġact or +Ġde lic +Ġcont ribute +Ġin ject +ĠE s +ac co +Ġlist ening +Ġcon gress +epend ent +Ġprem ium +Ġ7 6 +ĠIr ish +Ġass igned +ĠPh ys +Ġworld wide +Ġnarr ative +ot ype +m ont +b ase +ĠB owl +ĠAdminist ration +Ġrel ation +ĠE V +C P +Ġco vers +Ġ7 8 +Ġcert ific +Ġgr ass +Ġ0 4 +pir acy +ir a +Ġengine ering +ĠM ars +Ġun employ +ĠFore ign +st ract +Ġv en +Ġst eal +Ġrepl ied +Ġult imate +Ġtit les +d ated +Ġj oy +a us +Ġhy per +ak u +Ġoffic ially +ĠPro duct +Ġdifficult y +per or +Ġresult ed +rib ed +l ink +wh o +~~ ~~ +ĠSpe ed +ĠV iet +W ind +ĠBar ack +Ġrestrict ions +ĠSh are +Ġ199 5 +ition ally +Ġbeaut y +op t +Ġm aps +ĠC R +ĠN ation +ĠCru z +W ill +Ġelectric ity +Ġor g +Ġb urd +Ġviol ation +Ġus age +Ġper mit +ĠCh ron +ĠF ant +Ġn aturally +Ġ0 7 +Ġth rown +ĠAw oken +Ġal ien +ĠHer o +ĠK ent +ĠR ick +ri ke +Ġp ace +}, {" +G L +Ġpo ison +ĠT ower +Ġform al +al ysis +Ġgen uine +Ġk il +a ver +Ġproced ure +ĠPro p +intend o +ĠM ain +as ant +Ġtr ained +G ame +ĠL oad +ĠM A +Ġcru cial +Ġle ts +ĠF R +Ġch ampion +1 01 +ĠCon ference +Ġwrit ers +Ġconnect ions +Ġo kay +ir ms +ĠR and +Ġenc ounter +ĠB uff +Ġachie ved +Ġche cks +isc ons +Ġassist ant +Ġwhen ever +ĠA ccess +ĠU r +b in +Ġcl ock +is p +op her +Ġb orrow +Ġm ad +Ġperson ality +on ly +IS T +ab ama +Ġg ains +Ġcommon ly +Ġter r +Ġhyp ot +Ġre ly +Ġt iss +iscons in +Ġrid ic +f unction +ĠO regon +Ġun com +r ating +el and +ĠN C +Ġm oon +ann on +Ġvulner able +ut ive +³³ ³³ +ĠRad io +Ġw estern +se ct +ĠT ony +Ġocc urs +ĠO s +ĠH on +Ã Ń +Ġv essel +ĠScot land +Ġdiscrim ination +Ġsubsequ ent +st ring +Ġfant asy +ĠSh adow +Ġtest im +W E +it i +r as +Ġbo at +Ġmar ks +Ġord inary +Ġre n +Ġrepresent ative +Ġpet ition +Ġ7 3 +Ġad venture +Ġign ore +ĠPhil adelphia +ĠS av +V P +Ġfact ory +Ġt asks +Ġdep ression +z ed +................ ................ +ĠSt orm +Ġc ogn +Ġelig ible +Ġredu cing +v ia +Ġ0 5 +Ġstri king +Ġdoll ar +h o +O V +Ġinstr ument +Ġphilosoph y +ĠMo ore +ĠA venue +Ġrul ed +ĠFr ont +IN E +ĠM ah +Ġscen ario +ĠNAS A +Ġen orm +Ġdeb ut +Ġte a +T oday +Ġabs ence +S im +Ġh am +le ep +Ġt ables +ĠHe art +M I +K e +re qu +V D +m ap +Ġchair man +Ġp ump +Ġrapid ly +v i +Ġsubstant ial +E P +d es +ch ant +ili pp +ĠS anta +ri ers +anche ster +L oad +ĠC ase +Ġsa ving +Ġ7 4 +ĠA FP +er ning +oun ced +ĠMin nesota +ĠW as +Ġrec ru +Ġassess ment +ĠB ron +U E +Ġdynam ic +Ġf urn +ul ator +Ġprop ag +h igh +Ġacc ommod +Ġst ack +ĠS us +w rit +Ġre ven +ĠGod d +ĠZeal and +ab s +Ġbr ut +Ġper pet +h ot +Ġhard ly +ĠB urn +ãĤ ¹ +Ġst y +Ġtrans actions +Ġg ate +Ġsc reens +Ġsub mitted +Ġ1 01 +Ġlangu ages +ugh t +em en +Ġfall s +Ġc oc +Ĥ ¬ +Ġstri kes +p a +Ġdel iber +ĠI M +Ġrel ax +ann els +ĠSen ator +Ġext rem +Ġ} , +ĠDe b +Ġbe ll +Ġdis order +c ut +Ġi OS +Ġl ocked +Ġem issions +Ġshort ly +" ] +ĠJud ge +ĠS ometimes +Ġr ival +Ġd ust +Ġreach ing +F ile +¯¯ ¯¯ +ino is +ĠJ ason +Ġs atell +are t +Ġst ations +Ġag ric +ĠTechn ology +com es +ĠUn fortunately +ĠChild ren +Ġappl ies +ast ed +Ġan ger +ail ability +ĠDam age +Ġcomp are +ĠStand ard +Ġaim ed +ĠB a +angu age +Ġreg ulation +Ġj ury +Ġair port +Ġse ctions +ĠPr ince +em ed +Ġmedic ine +Ġh itting +Ġsp ark +ol ves +Ġad s +St ate +Ġfood s +Ġrepl acement +Ġch icken +Ġlow est +Ġmind s +Ġinvol ves +u i +Ġarr ang +Ġproced ures +ĠWh ich +ivers ary +Ġb ills +Ġimprove ment +Ġin ev +Ġexpect ations +Ġintellect ual +Ġsp aces +Ġmechan ism +2 50 +bre ak +ĠZ e +ĠT enn +ĠB alt +Ġbar rel +Ġstat ic +man n +Pol ice +Ġt ips +Ġhand ling +c us +od ed +il ton +ir y +Ġjournal ists +our se +Ġcom ic +Ġnom ine +IT Y +Ġvers us +Ġlo op +Ġsur f +ĠInd ust +ĠHun ter +Ġbelief s +is an +Ġset up +Ġbre w +im age +Ġcomput ers +f ol +} ," +ĠMed al +Ġtax p +Ġdisplay ed +Ġg rav +Ġf iscal +M on +ĠMos cow +ĠK ong +ĠCent re +Ġcamer as +ĠMr s +ĠH ay +Ġa ver +ĠK elly +p y +Ġrequire ment +Ġent itled +omb ie +Ġsh adow +ag ic +ĠA k +Ġel ite +Ġdiv ided +Ġhead ing +Ġcop ies +Ġloss es +Ġv it +k ed +ĠB ry +Ġan s +ĠSte am +Ġrep orter +he im +ĠIt em +Ġsuper ior +d on +ere nt +à ¶ +Ġtherap y +Ġpe ak +ĠMod el +Ġl ying +Ġg am +z er +r itten +Ġrespons es +Ġconsider ation +ĠB ible +Ġl oyal +Ġinst ant +Ġp m +ĠFore st +à ¼ +Ġext end +Ġconv icted +Ġfound er +Ġconv in +ĠO ak +che ck +Ġsch olars +p ed +Ġover se +T op +c ount +ĠAr k + · +Ġ0 6 +ĠL A +m d +ĠLat in +im ental +ĠC PU +Ġsubst ance +Ġminor ity +Ġmanufact uring +E r +ocol ate +Ġatt ended +ĠMan ager +r ations +Ġappreci ate +om y +GB T +id ency +B L +Ġguarant ee +pos ition +Ġo cean +clud e +Ġhead ed +Ġt ape +Ġlo ose +Ġlog ic +Ġpro ven +Ġsp ir +Ġad mit +is a +Ġinvestig ate +Ġ199 4 +sy lv +ĠL ost +c est +Ġ7 1 +Ġrequest ed +Ġwind ows +ĠPok é +ĠWith out +M et +Ġbehavi our +Ġread er +Ġh ung +ĠKe ep +Ġro les +Ġimplement ed +Ġbl ank +Ġserv es +ĠJ ay +Ġc ited +ĠF riend +prof it +ap on +Ġrep air +it em +arr ass +Ġcrit ics +ad i +ĠF ather +Ġsh out +Ġf ool +Ġ8 8 +Ġprodu cing +Ġl ib +Ġround s +Ġcirc le +Ġpre par +Ġsub mit +Ġn ic +mor row +ãĥ « +U nder +Ġv ital +ater n +Ġpass word +Ġpublic ation +Ġprom inent +Ġspeak s +Ġb ars +Ġde eper +ĠM ill +port ed +Ġw id +Ġbut ter +Ġsm oking +Ġindic ates +K ey +rop ri +ĠF ile +all ing +ast ing +ĠR us +Ġad j +Ġ7 9 +av al +Ġpres um +bur gh +on ic +Ġf ur +Ġpoll s +ik a +Ġsecond ary +Ġmon ster +ig s +ĠCur rent +E vent +Ġowners hip +end ar +Ġarri ve +ĠT ax +Ġn ull +ĠPri v +Ġth ro +Ġk iss +c at +Ġup set +ang le +it ches +ect or +olog ists +ĠGal axy +Ġcor ruption +Ġh int +ent er +ĠH ospital +Ġgreat ly +Ġbeg un +es y +Ġso il +ĠAnt on +Ġmain tenance +ãĥ © +Ġdo zens +Ġhuman ity +ĠAl abama +Ġr om +w orth +ap ing +sylv ania +l ah +Ġg athered +G A +Ġattack ing +f ound +ĠSqu are +Ġar bit +ict ions +ĠW isconsin +Ġd ance +ĠS aint +arch y +Ġbase ball +Ġcontribut ions +Ġliter ature +Ġex ha +per ty +t est +Ġb ab +Ġcontain er +let ter +Ġfall en +Ġwebs ites +Ġbott le +ĠS ac +Ġbre ast +ĠP L +Ġveter an +Ġinterview s +ĠA le +Ġb anned +eng ers +ĠRev olution +in th +Ġconc erning +IV E +Ġexp enses +ĠMatt hew +ĠColumb ia +d s +ist ance +Ġent ity +.. ." +Ġrel iable +Ġpar alle +ĠChrist ians +Ġopin ions +Ġin du +l ow +Ġcompet e +Ġth orough +Ġemploy ed +Ġestablish ment +ig en +ĠC ro +Ġlawy ers +ĠSt ation +T E +ĠL ind +ĠP ur +it ary +Ġeffic iency +âĢ IJ +ĠL y +Ġm ask +Ġdis aster +Ġag es +ER E +es is +ĠH old +Ġcas ual +b led +Ġen abled +ĠEn vironment +ĠInt elligence +i per +ĠM ap +ĠB E +Ġemer ged +is dom +Ġc abin +Ġregist ration +Ġfing ers +Ġro ster +Ġfram ework +ĠDo ctor +et ts +Ġtransport ation +Ġaware ness +H er +Ġattempt ing +O ff +ĠSt ore +ÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤ +ĠK now +Ġdef ence +Ġsc an +ĠT en +ĠCh air +ĠP H +ĠAtl anta +Ġfuck ing +Ġans wered +b n +ĠK ar +Ġcateg ories +Ġr ational +Ġc ust +Ġrob ot +Ġcorrect ly +Ġg if +Ġgraph ics +m ic +Ġground s +ĠO pp +i ate +Ġdist ributed +Ġsan ctions +Ġchalleng ing +ut o +Ġingred ients +Ġinv ited +Ġfound ed +ĠRe qu +d ed +Ġb owl +Ġbrother s +ĠH a +I O +Ġw ages +im ore +oc ial +Ġse ed +ative ly +Ġaddress es +ĠI owa +ab eth +Ġatt itude +is d +ch ild +Ġm ole +Ġdisco very +y ard +B r +Ġ8 2 +Ġsuppl ies +ell ing +Ġdist ingu +C R +Ġre cept +Ġ vert +Ġsw im +b ec +d oor +ĠY eah +Ġg al +Ġinter act +ĠE SP +ĠC S +amp s +Ġconvin ced +Ġobject ive +Ġdis h +ĠPhot os +l ad +Ġdownt own +o il +in ction +Ġto morrow +ĠC OM +Ġsurv ival +sh ot +Ġsett lement +C ons +ĠX box +int erest +ĠS M +arg o +en ess +Ġeth nic +b ered +M in +ĠT ok +Ġinc ent +ĠComm and +Ġmain tained +Ġbreak s +br idge +at ar +ag g +ĠF inally +un icip +ĠO nt +le ft +Ġrecogn ition +Ġ* / +ĠP ers +Ġwe lf +Ġaddress ed +ĠK ansas +Ġvir us +Ġwhere as +Ġp apers +ram s +ĠMin istry +Ġple asure +Ġacqu ired +Ġd uration +j pg +Ġcal m +ĠN HL +Ġburn ing +Ġfold er +ick ed +ĠP y +ĠIll inois +Cl ass +ĠGodd ess +Ġperform ing +Ġwelf are +j ar +In ter +Ġl in +Ġenh ance +Ġnot ion +f are +yp es +ĠAre a +Ġcann abis +ĠDie go +f s +ĠM anchester +com m +in ite +Ġcover ing +ĠS ound +Ġ19 60 +Ġ8 4 +e lect +z ing +Ġcitiz en +Ġph ones +Ġr aid +Ġign ored +ĠOb ject +Ġu pload +c ard +Ġmod ified +Ġroom s +ia h +r ange +he ast +ach us +Ġsuggest ing +âĢ ĭ +gr ade +E l +Ġclot hing +Ġr h +ĠH an +un ity +en cing +ĠAust in +sec ution +t ra +d em +ĠQ ual +Ġhe aven +Ġst ages +Ġw edd +pl us +ific ial +ĠIm m +ĠH o +iet ies +Ġphr ase +Ġbr ill +act ory +Ġprov iders +Ġsil ence +Ġa er +ĠA I +ĠAd venture +Ġplatform s +Ġdemonstr ated +Ġinter f +ing ton +Ġr aces +Ġgr ade +ult ane +ĠTh rough +f alse +Ġb ow +ĠA B +Ġfl avor +Ġhistor ic +g ov +Ġcol our +Ġview ed +ĠEm ail +el come +Ġinter vention +Ġd iversity +Ġperiod s +Ġre verse +ĠV ery +Ġqu ote +ĠLe ft +th rough +Ġsc rew +Ġland ing +Ġp ill +Ġw et +Ġprot esters +Ġrepe at +av ed +er k +Ġsal ary +ĠPenn sylvania +St ill +Ġmay or +Ġkit chen +Ġfeat uring +ĠM useum +ĠT ournament +ĠF al +Ġser vers +U C +Ġany body +im g +ĠTr ade +ixt ure +the less +Ġfin ance +Ġcl osing +ĠPat ri +i ac +ab el +Ġ> > +or ous +Ġf irms +sc reen +un a +Ġemb arrass +ul se +Ġlet ting +Ġth rew +ile y +Ġch annels +l an +ĠVeg as +Ġse ar +Ġfant astic +ar re +uzz le +ĠD er +Th ose +Ġsw ing +Ġshe et +ind ex +co ver +og an +Ġvari ables +ĠTe ch +Ġsp oken +ac hel +ĠD a +ĠMount ain +Ġload ed +Ġfoot age +vers ion +Ġun l +ĠPh oenix +Ġthrow ing +Ġf iring +Ġtrack ing +Ġw idth +Ġstrugg ling +ro oms +ot ion +Ġmonth ly +ĠSer ver +Ġegg s +op en +M C +Ġ199 3 +Ġh ired +Ġstay ed +ĠAll en +Ġst ro +Ġ9 8 +st ep +ĠTurk ish +Ġfab ric +ist ing +ĠD om +Ġd ates +Ġpr on +Ġbasket ball +Ġl ucky +ĠArab ia +Ġassum ed +est y +Ġaff airs +Ġgl ad +ĠInd eed +ĠF A +ĠW ord +Ġjo ining +if ice +p read +ir ts +ĠSe lect +Ġpop ulations +aw are +Ġn ose +Ġcompl aints +st art +Ġsc oring +Th anks +Ġmin ing +Ġvisit ors +S H +Ġdam aged +Ġcharacter istics +ĠP ent +D C +Ġ8 3 +ĠS ix +r ates +Ġfl ags +ĠB rew +d og +M ark +// // +Ġexec ution +Ġj oke +ph ones +Ġtestim ony +Ġob st +Q L +ĠC ut +Ġstud ied +ĠN intendo +ick et +ĠN BC +Ġl ad +ĠB ra +ĠM oh +Ġk ernel +Ġoverwhel ming +Ġag ed +Ġapplic able +ĠC ond +Ġroad s +ĠBl ock +m ade +od ge +Ġcomm ands +Ġoff ices +vel and +Ġt ut +Ġrece iver +ĠF ro +Ġsho pping +Ġi P +ĠSt re +ĠA BC +Ġentertain ment +ĠB ow +ort ed +M c +Ġread s +gr ad +ĠCol lect +Ġâ ĪĴ +ĠCap ital +eder ation +Ġemploy er +Ġinvolve ment +Ġanx iety +al ia +Ġro of +ĠAm ong +ĠDemocr at +Ġstat s +ĠV ill +Ġconst itutional +Ġrefer ring +itt y +Ġtack le +out ube +Ġback ed +ĠH ong +ĠBro ad +Ġe le +ĠO tt +Ġ199 2 +h our +achus etts +C al +Ġdefe ated +Ġ8 1 +es p +Ġseem ingly +w as +ĠJ enn +ĠK urd +Ġg ene +Ġdisc ount +R et +EC T +( ); +Ġclub s +Ġs id +ĠM arsh +Che ck +Ġp p +ĠE ag +ides pread +Ġbe ings +F T +Ġintrodu ction +ĠCh ange +AR D +Ġ1 10 +ad ows +ier ce +Ġme al +a uthor +ĠB ang +lah oma +Ġr anks +201 1 +?? ?? +m ax +Ġcoll apse +Ġop ens +Ġe cho +Ġs oph +Ġrac ist +Ġenorm ous +Ġw aves +Ġt ap +Ġcomprehens ive +. -- +ĠR oy +Ġfarm ers +Rel ated +a ired +ron es +ĠC rim +Ġproport ion +Ġdesign s +Ġnegoti ations +Ġvirt ually +ĠBat man +Ġwar n +Ġlegit imate +m ate +Ġcon vention +, , +net ic +ĠS D +Ġconsist ently +Ġcompens ation +Ġpunish ment +Ġy e +Ġt ie +ĠB ureau +ir lf +ĠB u +ĠA ren +ĠPh ilipp +Ġkn ife +Ġmem ories +ĠR oss +Ġang le +Ġ8 6 +ĠTh under +Ġre nd +ĠT our +Ġcount s +s ung +ĠIm p +Ġeduc ational +Ġaccess ible +C OM +Ġd rew +y er +G l +am ine +OR T +O B +I B +m aster +Ġtri als +og y +h ar +ĠTr ust +Ġprefer red +irlf riend +ĠN ev +Ġb in +Ġc ow +P age +Ġsign ature +ĠB L +7 00 +Ġret ired +Ġby tes +Ġneigh b +ĠLeg end +Ġdev ast +Ġsuspect ed +is ons +ĠPoké mon +sc ale +Ġcap abilities +Ġre vel +Ġche ese +d y +igr ant +Ġfail ing +b its +ĠHer oes +ĠG host +ĠS cient +Ġappoint ed +ur i +Ġinst itution +Ġexpand ed +g reg +Ġmonitor ing +Ġp odcast +Ġcoal ition +Ġ9 6 +J o +Ġst olen +ĠS ab +Ġstop s +Ġhol iday +Ġint r +C ar +Bl ack +ĠL GBT +Ġwar ming +ĠAnd erson +Ġ8 9 +Ġprodu cer +M ed +Ġaccur acy +ĠMar vel +iz abeth +ĠPat rick +m ony +Ġmin i +ac les +Ġover t +the y +Ġmembers hip +ĠV en +Ġex ch +Ġrem oval +ĠD ave +T Y +m ad +ĠF ind +Ġad equ +Ġe c +Ġte eth +Ġemot ion +Ġper m +Ġsole ly +d b +Ġextra ord +IG HT +c al +Ġgu idelines +Ġd ying +Ġsusp ended +ĠPrem ier +ĠAnth ony +el ve +Ġd ad +ĠE th +ĠFoot ball +Ġabandon ed +Ġ< < +Ġm arch +Ġhor ror +âĢ¦ " +Ġchild hood +Ġcampaign s +Ġl unch +ĠAl bert +bl ock +âĸĪ âĸĪ +ound ing +Ġb one +or gan +ad ers +ĠFl ash +ĠDri ve +Ġton ight +Ġw ars +ĠF L +Ġform ation +con st +New s +Ġcom pe +or ious +ĠSt aff +Ġdiscuss ions +ĠProt ection +ĠJ am +Ġcrit eria +Ġinstall ation +Ġaccompl ish +iz za +Ġpub lisher +Ġresc ue +ĠT ry +U LL +ĠS om +ĠH op +ore t +th s +ord on +Ġp ocket +ĠIn v +Down load +ĠCr ime +Ġb ene +ĠGu ide +ĠAs sembly +Ġparam eters +I E +ĠAlex ander +Ġconc ert +ĠSc he +Ġsh oes +Ġvis iting +Ġrec all +Ġb ub +Ġr ural +Ġconc rete +ĠR os +N ext +R uss +Ġlo ans +ĠSh ield +Ġtre m +hem at +k g +ĠHar ris +is ition +ĠM ove +ĠF C +Ġf ate +ĠCh o +Ġt ired +Ġprinc ipal +h ist +ien ces +ath y +Ġse vent +Ġm ood +Ġstrateg ic +Ġdise ases +Ġfor um +Ġtem por +Ġhead quarters +P ar +ig e +fl ix +Ġgu itar +Ġ9 4 +On ly +Ġrele ases +ro ph +================ ================ +Ġ6 00 +ĠContin ue +ig ate +ĠC rit +sy stem +Ġdis abled +Ġunex pected +ith ub +Ġuncle ar +ĠE st +Ġcontr ad +Ġstrateg ies +vent ures +Ġpass age +AM E +Ġimpro ving +Ġreve als +Ġdecre ase +ov a +Ġann oy +ĠSh ort +ĠL ibrary +Ġcy ber +n ell +ĠH ur +ĠC B +Ġphot ograp +U I +Ġs ed +G e +Ġ8 7 +Ġd iverse +Ġencour aged +Ġcons piracy +Ġbird s +Ġoper ator +Ġhand ful +Ġclass ified +? ) +Ġdram atic +Ġinvestig ators +it o +Ġw idespread +ĠR oom +-------------------------------- -------------------------------- +Ġcollect ive +Ġjournal ist +St ring +Ġtemper atures +il a +Ġgu id +Ġins pect +Ġmiss ile +ĠMay or +Ġman ual +Ġsim ultane +Ġrat ings +Ġsu ck +Ġ9 7 +Ġunivers al +Ġph arm +Ġdis rupt +ian o +A V +Ġf t +Ġstat ist +old s +ĠWalk er +ph p +Ġunder t +ĠL as +ish op +nt il +res hold +ĠWhe ther +M s +Ġden y +ĠCl oud +Ġprov ider +Ġsurv iv +ĠUp date +h as +Ġmist akes +ch arge +pl ed +r ity +Ġn ode +ĠMass achusetts +ool s +lic ation +Ġf ails +em ale +or i +back s +Ġsh irt +Ġ' ' +ĠN AT +Ġwat ers +els on +Ġe ase +Ġsc ar +Ġcont ents +m ind +Ġcont ribution +Ġsh r +Ġhand ed +Ġst ability +Ġtra ve +E m +Ġmir ror +12 3 +Ġwe igh +Ġf iction +ou ver +ist ant +r ition +ĠF ed +Ġphys ically +Ġst ake +ĠArt icle +ĠAr c +ĠLew is +ĠM ind +Ġdemonstr ate +Ġprof its +v ision +om ic +ol id +Ġbatt les +Ġdri ves +Ġeas tern +ĠS ony +!! ! +ar ation +v ard +ĠG L +port ation +Ġ9 2 +Ġlaw makers +Ġprotect ing +ĠE PA +Ġy eah +Ġsh ame +ol ph +e ven +x it +Ġatt ach +Ġrepresent ing +Ġob s +ĠUt ah +iff s +ĠFre edom +à ³ +A K +Ġinc idents +it age +Ġview ers +c d +Ġm ouse +Ġcl ar +Ġaccord ance +Ġb ot +c or +ĠSum mer +he ld +Ġinnoc ent +Ġiniti ative +ol s +________________ ________________ +Ġsp ots +p ace +Ġconvent ional +Ġcorpor ations +Ġblock ed +H D +at tered +Ġref ers +Ġbu ck +ĠDig ital +12 0 +Ġtop ics +T F +Ä ģ +br id +re ement +Ġunder lying +ĠM ember +Ġinvestig ating +Ġpregn ancy +Ġtouch down +ĠB and +ĠCall er +Ġinst ances +P P +w a +G ood +Ġ199 1 +ĠC old +Ġfear s +Ġrem arks +Ĩ Ĵ +at al +Ġm it +Ġexper iments +i pt +Col or +ind u +Up date +Ġ9 3 +A g +Ġ å +anc ouver +B oth +Ġjud ges +Ob ject +Ġst ere +umb n +Ġparticip ation +ĠSt ars +ĠJ ere +Ġweek ly +ĠB an +Ġconvers ations +ĠP itt +u z +ĠIndian a +ĠK ick +Ġinf ection +Ġhero es +Ġsett led +Ġstri p +Ġh al +Ġd ump +ĠS ci +Ġl es +Ġref erences +ĠU RL +ĠBr idge +Ġwant ing +For ce +Ġex clus +Me anwhile +m n +Ġg entle +m aker +sen al +ĠG ro +ou ri +ĠR ain +ĠAll iance +Ġl ift +el a +S D +ĠCle veland +Ġrank ed +Ġst adium +Ġdead ly +ä ¸ +Ġr iding +ar ia +ĠAr mor +Ġdocument ation +ĠGree ce +ree k +Ġl ens +ĠS a +Ġg ross +ĠE mer +ag ers +ĠD ub +ĠR h +ĠAM D +Ġarri val +Ġdes ert +Ġsupp lement +ĠRes p +Ġkn ee +Ġmarg in +f ont +og g +201 0 +ĠP ir +ĠP rom +iv als +Ġint ake +Ġdifferent ly +ug s +Ġb its +clud ed +Ġsearch ing +ĠD u +um ble +Ġfunction al +ĠBalt imore +ĠC ould +Ġdes ired +Ġcirc uit +ĠL yn +ĠG O +ĠF alse +re pre +' : +alt ies +Ġmin im +Ġdro ve +ĠSh ould +Ġh ip +Ġpro s +Ġut ility +ĠN ature +ĠM ode +P resident +o pp +r at +form ance +Ġconcent ration +Ġf ont +ĠB ud +Ġam id +Ġre vers +ĠM L +B ar +Ġinter action +Ġjur isd +Ġspell s +d ep +f il +Ġcivil ians +ut ter +ĠCo oper +ĠBel ow +Ġent rance +Ġcon vert +Ġcontrovers y +ow ered +Ġcontr ary +Ġar c +ĠExec utive +ĠOffic er +Ġpack ages +Ġprog ressive +w idth +Ġreserv ed +v ol +ĠSam sung +Ġprint ed +Ġcent ers +Ġintrodu ce +ĠKenn edy +Ġodd s +Ġsure ly +Ġindepend ence +Ġpass engers +repre ne +ĠBe h +Ġl oves +ĠESP N +Ġfac ilit +Ġident ical +Ġdo ct +Ġpartners hip +con f +ĠH ide +Ġconf used +ĠC ow +M en +Ġw rest +ĠIraq i +Ġh oles +ĠStud ies +Ġpregn ant +h ard +Ġsign als +I X +Ġpull ing +Ġgrad uate +Ġnomine e +D ate +Ġper mitted +Ġâ Ĥ¬ +ĠOk lahoma +St art +Ġauthor ized +Ġal arm +ĠC os +v an +Ġgener ations +c ular +Ġdr agon +ĠSoft ware +ĠEd ward +Ġcontro ller +S en +ge red +ĠV ik +Ġappro ached +Th ank +Ġcan ce +Ġform ula +ĠSm all +Ġweak ness +Ġr amp +it udes +j ud +Ġbrill iant +Ġacc us +s ource +Ġ8 00 +ĠE vil +S w +Ġhom eless +we ek +i ens +r ics +ĠTh ird +T O +Ġorgan ic +Ġpresent ation +ag h +ĠDown load +v ation +Ġas sembly +or able +hold ers +ĠBern ie +ĠHel p +Ġt ong +ĠF ight +Ġbe ach +B ook +ĠL ic +Ġr ush +ĠR ound +ou p +ĠMar x +Ġcalcul ated +ĠDe vil +ĠSar ah +Ġoccasion ally +Ġbul let +Av ailable +g ate +Ġ9 1 +Ġh osp +Ġprom ises +ĠH IV +ĠSt adium +ĠSt ock +ĠCorpor ation +g age +N G +ĠC redit +Ġs ne +ib l +Ġacc um +s uch +Ġterror ists +Ġconscious ness +ĠZ h +Ġdram a +ool a +pir ation +Ġlab our +ĠN in +Ġut ter +Ġdemocr atic +Ġass ass +il ation +Ġg est +Ġab road +Ġmet ab +Ġs orts +Ġfl av +U B +Ġm g +ĠNot hing +ĠO d +Ġmus ical +200 9 +Ġdro ps +oc ated +ater al +0000 00 +Ġg re +Ġequ ality +Ġburd en +Ġv ig +ĠLe ader +-------- ---- +Ġcere mony +Ġf ighter +Ġact ors +Ġ æ +am an +F i +Ġal ign +put er +Ġe lder +ĠN SA +Ġrepresent ation +ĠOnt ario +IT H +usal em +Ġharass ment +itz er +Ġsy mp +Ġbox es +ĠD R +Ġman ifest +at re +Ġ ^ +Ġd ies +le ton +Ġmiss ions +et he +Ġres olve +Ġfollow ers +Ġas c +Ġk m +l ord +am med +Ġsil ent +ĠAssoci ated +Ġtim ing +Ġprison ers +ĠK ings +ĠF ive +Ġtow er +Ġappro aches +Ġprecise ly +Ġb ureau +ĠM other +ĠI ss +Ġkey board +it ual +Ġfund ed +Ġstay ing +Ġpsych ological +Ġm ile +ĠLe on +ĠBar b +w ill +Ġw ider +ĠAtl antic +Ġt ill +ĠR ome +ro t +Ġaccomp an +Ġfl our +ac o +W orld +ĠExp ress +ĠY u +C or +Ġple ased +part y +Ġpoint ing +Ġinf lation +Ġro y +Ġ ), +ain er +Ġwedd ing +orm on +Ġrequ iring +Ġqual ified +Ġse gment +EN D +Ġs izes +e als +Ġcor rupt +ass ador +Ġcele b +Ġdream s +ĠM ess +Ġcheck ing +ĠV ersion +Ġprep aring +Ġact ively +ĠD iff +Ġl ux +ĠW inter +act eria +ĠN E +Ġdep uty +Ġtrans gender +Ġsum mary +Ġin her +er ies +ch ar +ĠY an +Ġkn ock +ĠP ath +Ġl ip +roll er +Ġimp ression +Ġcelebr ate +Ġsl ide +Ġgu ests +Ġcl ip +F S +Ġsav ings +Ġcapt ain +Ġleg acy +ĠDen ver +Ġw ounded +tab oola +AC T +Ġpurs ue +Ġo xy +Ġ q +Ġsem i +ĠN eed +ĠAff airs +Ġob sc +Ġcheck ed +Ġd ual +C ode +ĠM D +le m +ult y +Ġ © +ĠEl izabeth +Ġcent uries +ard ed +s rc +Ġev ident +enn is +at in +Ġunemploy ment +ĠMar io +Ġint im +Ch rist +Ġbi ological +Ġsold ier +ĠAdd ed +Ġm ath +ĠG il +Ġbi as +Ġd ating +ĠO cean +Ġm ice +M us +h ire +ĠT es +Ser ver +lim ited +S ize +Ġmet ers +Ġrock et +es see +Ġcertific ate +ĠIran ian +AS S +Ġgr id +D ec +Ġro lling +com mun +ĠSwed en +b ury +Ġtiss ue +Ġrac ism +ĠL ocal +Ġmyster y +Ġexam ine +Ġst em +Ġs its +Ġhop ed +ot ing +Ġdial ogue +Ġpers u +W atch +l ay +M AN +Ġch ronic +ĠPort land +mark et +ĠS EC +Ġparalle l +Ġsc andal +Ġcar ries +Ġphenomen on +h uman +ack er +ĠO x +Ġretire ment +tain ment +ov ie +ĠG ear +Ġd uties +Ġdo se +Ġsc roll +M B +in f +Ġsa uce +Ġland scape +red dit +ĠChampions hip +ĠRed dit +al id +Ġco in +Ġover s +Ġpost ing +ab out +Ġf el +and y +Ġb old +Ġfocus ing +e ffect +G R +Ġde emed +Ġrecommend ations +Ġste pped +Ġvot er +ĠDe ep +ĠInst agram +Ġmoder ate +ĠMary land +Ġrestrict ed +ĠM B +ĠCh all +Ġto b +Ġc ir +ĠO cc +ĠE ver +Ġcoll aps +IN FO += - +ĠP ict +ĠAcc ount +n c +Ġo ught +Ġex port +Ġdr unk +( ' +Ġw ise +ĠM ort +ne cess +Ġan cest +ĠInc re +Ġfrequ ent +m ir +Ġinterpret ation +Ġdepend ent +Ġco ins +ĠB ol +V ideo +ĠJust in +Ġfat al +Ġcook ing +Ġconf usion +ip her +Ġcust ody +ĠMor gan +om ach +ĠGovern or +Ġrestaur ants +el ing +Ġacknowled ged +Ġthe r +Ġgen es +ch ing +He y +Ġtact ics +ĠMex ican +Ġv end +Ġhe s +qu er +Ġnot ing +ĠCamer on +Ġtarget ing +ro ck +Ġcred its +Ġemot ions +Ġrepresent atives +new s +Ġlegisl ative +Ġrem oving +Ġtweet ed +ĠCar ter +ĠF ixed +Ġfor cing +Ġspeak er +Ġm ales +ĠViet nam +l ined +Ġconcept s +Ġvo ices +o ir +ĠT rib +W he +ĠJer usalem +ĠS ant +Ġc ul +Ġl ady +ĠHaw ai +Ġar ts +ĠIn n +ĠMach ine +ĠEm peror +Ġsl ot +g ly +ĠPro cess +II I +Ġathlet es +ĠTem ple +ĠRep resent +Ġpres c +Ġt ons +Ġgold en +Ġp unch +ĠG R +iver pool +Ġen act +Ġlob by +Ġm os +Ġpick ing +Ġlif etime +Ġcogn itive +E ach +z o +Ġd ub +Ġcons ists +ol n +Ġf estival +am ous +Ġint ellig +w ords +ĠSm art +Ġde le +Ġl apt +Ġmag ical +ĠS in +b us +ur ities +igh th +ĠRub y +ĠS ure +ol ving +Ġj un +O ST +Ġimp osed +Ġast ron +Ġcor rel +ĠN S +ĠK it +ĠF uture +b urn +Ġimm une +oc us +Ġcour ses +ĠSt ring +Ġle an +Ġg host +Ġout comes +Ġexp ense +Ġevery day +Ġaccept able +A h +Ġequ ipped +Ġor ange +F R +ĠD utch +Th ough +ĠR ank +Q U +ĠRober ts +wh at +re nd +Ġdisapp ear +Ġsp awn +ĠL am +o is +Ġdes erve +Ġmin imal +Ġnerv ous +ĠW ould +Ġro ok +ĠV ancouver +Ġres ign +sh ire +ĠW orks +ĠB uild +Ġafford able +ĠG ary +ĠAren a +Ġh anging +Ġimpl ications +ĠS ong +Ġmain taining +Ġgu ards +C ON +Ġder ived +Ġexecut ed +Ġthe ories +Ġqu oted +ĠAnd re +og a +sel ess +in fo +ĠBel g +Ġt ears +ĠSur v +Ġbirth day +ig ious +im mer +Ġspect rum +Ġarchitect ure +Ġrec ruit +arm a +T able +Ġmon sters +ĠG ov +Ġdest ination +Ġattract ive +Ġf oss +ĠMore over +Ġpres ents +TH E +Ġrep ly +pt on +Ġc um +Ġdel ight +Ġaffect s +Ġdon ations +ĠT oy +ĠH im +M ENT +Ġover come +it ched +ĠFant asy +ĠH at +ĠBe ast +b ott +Ġinvestig ations +R un +Ġhun ting +d i +f und +Ġs essions +est yle +Ġport ray +oid s +Y eah +Ġcommun icate +Ġcom edy +ĠY ang +Ġbel t +ĠMar ine +Ġpredict ed +Pl ay +Ġimportant ly +Ġremark able +Ġelim inate +D avid +Ġb ind +V ID +Ġadvoc ates +ĠG aza +im p +D B +ĠN a +ĠSim ilar +I ES +Ġchar ity +v as +m ath +Ġâ ĸ +ok er +nd um +Ġcap s +ĠH al +2 000 +e an +Ġfle et +Ġrec re +R ight +Ġsleep ing +ij ing +k ind +Ġdesign ated +à ¤ +Ġanim ation +ke e +ĠInt rodu +Ġ/ > +Ġdelay ed +Ġtrem end +Ġcur ious +U se +Ġle ct +d am +Ġinnov ation +ĠPoint s +Ġload ing +Ġdisp ute +ct ic +ird s +ĠB Y +Ġn urs +ĠVal ue +ION S +ĠH um +Ġtem plate +m ers +Ġappear ances +ĠEnter tainment +Ġtransl ation +Ġsa ke +Ġbene ath +Ġin hib +Ġe uro +abet es +Ġstud ying +ĠM as +Ġper ceived +Ġexam ined +Ġe ager +Ġco aches +Ġim per +ch i +Ġprodu ces +" ). +ĠEvery one +Ġm unicip +Ġg irlfriend +Ġh ire +ĠV ice +Ġsu itable +op y +Ġin equ +ĠD uke +f ish +f irst +ĠO bs +Ġinter ior +ĠBru ce +ĠR y +Ġanal ys +Ġconsider able +Ġfore cast +Ġf ert +ors hip +ĠD rug +ĠA LL +: " +th ur +ĠM ail +Ġball ot +Ġinst antly +ĠCh annel +Ġp icks +Ġ198 9 +Ġt ent +ol i +Ġcivil ian +b ling +ell o +b u +Ġin ch +Ġlog o +Ġcooper ation +Ġwal ks +Ġinvest ments +Ġimp rison +ĠF estival +ĠK y +Ġleg ally +Ġg ri +ch arg +S l +Ġthreat ening +du ction +fl ow +Ġdismiss ed +ibr aries +c ap +e le +ĠMc G +ĠHar vard +ĠConserv ative +ĠC BS +p ng +Ġro ots +ĠH aving +umb led +ĠF un +\ / +ĠS earch +ple x +Ġdiscuss ing +Ġcontin u +ĠT ai +ĠW ik +F ree +f it +Ġref use +Ġmanag ing +Ġsy nd +ip edia +w alk +Ġprofession als +Ġguid ance +Ġunivers ities +Ġas semb +unt u +F inally +AS E +ĠAut o +ĠH ad +Ġann iversary +L D +ĠD ur +ĠUlt imate +ih ad +pro duct +Ġtrans it +Ġrest ore +Ġexpl aining +Ġass et +Ġtransfer red +Ġbur st +ap olis +ĠMag azine +ĠC ra +ĠB R +gg ed +ĠH E +M ich +b et +ĠL ady +yl um +erv es +Ġme ets +wh ite +L og +Ġcorrespond ing +Ġins isted +G G +Ġsurround ed +Ġt ens +Ġl ane +Ġco inc +h ome +Ġexist ed +ect ed +ĠDou ble +lam m +Ġske pt +ex p +Ġper ception +ie v +ĠBe ing +o ft +Ġadop t +. : +] ; +Wind ows +Ġsatell ite +AS H +Ġinf ant +d escription +ĠMe anwhile +c m +oc a +ĠT reat +act or +Ġtob acco +ĠN orm +em ption +Ġfl esh +Ġj e +o op +ĠHe aven +Ġbe ating +an im +Ġgather ing +Ġcult iv +G O +ab e +ĠJon athan +ĠSaf ety +Ġbad ly +pro t +Ġcho osing +Ġcontact ed +Ġqu it +Ġdist ur +Ġst ir +Ġto ken +D et +ĠP a +Ġfunction ality +00 3 +s ome +Ġlimit ations +Ġmet h +b uild +con fig +N T +re ll +ble m +ĠM om +Ġveter ans +ĠH u +Ġtrend s +are r +ĠG iven +ĠCa ption +m ay +AS T +Ġwond ering +ĠCl ark +n ormal +Ġsepar ated +Ġdes p +st ic +b rew +Ġrel ating +ĠN ik +ĠF arm +Ġenthus i +g ood +d eb +Ġactiv ist +Ġm art +Ġexplos ion +ĠEconom ic +L ink +Ġins ight +Ġconven ient +Ġcounter part +su pport +ĠV irt +ag en +ĠTenn essee +ĠSim on +ĠA ward +OC K +ĠF igure +Ġoverse as +Ġpr ide +ĠC as +n ote +m g +C urrent +Ġdispl ays +cont ent +Ġtravel ing +Ġhosp itals +ĠFin ancial +ĠP ast +Ġdefend ant +Ġstream ing +m ble +ĠBer lin +uk i +Ġdist ribut +Ġant ib +Ġch ocolate +ĠCast le +Ġinter rupt +ĠR ow +Ġconvers ion +Ġbug s +ĠR ather +li est +L Y +ĠJe an +com mon +ak h +Ġ1 30 +ot ton +ĠDe an +Ġam endment +Ġgame play +ĠWar ren +od a +Ġhigh lights +Ġir re +ĠNAT O +Ġball s +Ġdemand ing +U RE +ĠL uke +F igure +st op +on ia +z one +iz ers +ĠW R +Ġaward ed +Ġregul atory +ĠH art +ĠS N +pl ing +Ġs our +ĠP ixel +us ive +Ġf et +ĠS ent +Ġautom atic +Ġf er +vern ment +ĠKh an +T ON +f ather +Ġextraord inary +th rop +ĠP ython +ĠG PU +Ġsex ually +Ġdesk top +it ivity +ĠAnton io +Ġo rient +Ġe ars +ob by +ous es +vertis ements +Ġmanufacture rs +ic ient +min ute +Ġconv iction +Ġg arden +p ublic +Ġsatisf ied +f old +O K +Ġin hab +ĠTh ink +Ġprogram me +Ġst omach +Ġcoord in +Ġh oly +Ġth reshold +Ġr het +Ġser ial +Ġemploy ers +ĠEvery thing +ra h +Ġb other +Ġbr ands +Val ue +ĠT ed +ĠPlan et +Ġp ink +ĠFurther more +s a +P E +re ck +ĠUS D +ot te +Ġ& & +Ġland ed +g ets +Ġprodu cers +Ġhealth care +Ġdomin ant +Ġdest ro +Ġam ended +ch ron +Ġf its +ĠSy d +ĠAuthor ity +AT CH +Ġfight s +ĠL LC +Ġ-- - +ĠCor p +Ġtox ic +spe cific +ĠC orn +ĠChe l +Ġtele phone +ĠP ant +Ġmyster ious +aun ch +od ox +med ia +Ġwitness es +ag u +Ġquestion ed +ĠBre xit +ĠRem ember +ene z +Ġend orse +iat ric +ĠId ent +Ġridic ulous +1 10 +Ġpr ayer +Ġscient ist +Ġ19 50 +ĠA qu +Ġunder ground +ĠU FC +m are +ĠL ater +w ich +Ġsubsc rib +Ġhost s +Ġer r +Ġgr ants +ant om +Ġsum mon +ear ly +ĠC lear +ĠPr im +Ġsusp ension +Ġguarant eed +app er +Ġr ice +ĠSe an +ĠSh in +Ġrefere ndum +Ġfl ed +r ust +Ġ3 60 +ter y +Ġsh ocked +B R +ĠO il +ĠAll ah +Ġpart ly +Ġign or +Ġtrans mission +Ġhom osexual +ivers al +Ġhop efully +ãĤ ¤ +Ġless on +L eg +Ġ .. +Y et +t able +app ropri +re tt +Ġbo ards +Ġincor rect +Ġb acteria +ar u +am ac +Ġsn ap +.' " +Ġpar ad +t em +he art +Ġav ailability +Ġw isdom +Ġ( + +Ġpri est +ĠÂł ĠÂł +O pen +Ġsp an +Ġparam eter +Ġconv ince +Ġ( %) +r ac +Ġf o +Ġsafe ly +Ġconver ted +ĠOlymp ic +Ġres erve +Ġhe aling +ĠM ine +M ax +Ġin herent +ĠGra ham +Ġinteg rated +D em +Ġpip eline +Ġapp lying +Ġem bed +ĠCharl ie +Ġc ave +200 8 +Ġcons ensus +Ġre wards +P al +ĠHT ML +Ġpopular ity +look ing +ĠSw ord +ĠAr ts +' ) +Ġelect ron +clus ions +Ġinteg rity +Ġexclus ively +Ġgr ace +Ġtort ure +Ġburn ed +tw o +Ġ18 0 +P rodu +Ġent reprene +raph ics +Ġg ym +ric ane +ĠT am +Ġadministr ative +Ġmanufacture r +Ġ vel +ĠN i +Ġisol ated +ĠMedic ine +Ġback up +Ġpromot ing +Ġcommand er +Ġfle e +ĠRus sell +Ġforg otten +ĠMiss ouri +Ġres idence +m ons +Ġrese mb +Ġw and +Ġmeaning ful +P T +Ġb ol +Ġhe lic +Ġwealth y +Ġr ifle +str ong +row ing +pl an +as ury +âĢ¦ . +Ġexpand ing +ĠHam ilton +Ġrece ives +S I +eat ures +ĠAn im +RE E +P ut +Ġbrief ly +ri ve +Ġstim ul +Ġ`` ( +Ġ __ +Ġch ip +Ġha z +Ġpri ze +ĠTh ings +AC E +ul in +d ict +ok u +Ġassoci ate +ock ets +y outube +St ory +ateg ory +Ġm ild +ail ing +ĠY e +O rig +ĠK a +or ig +Ġpropag anda +Ġan onymous +Ġstrugg led +Ġout rage +AT ED +ĠBe ijing +r ary +Ġle ather +Ġworld s +Ġbroad er +12 5 +id al +ĠBet ter +Ġt ear +E xt +Ġpropos als +Ġit er +ĠSqu ad +Ġvol unt +m i +D id +ĠP u +p in +Ġspeak ers +Ġb orders +Ġfig ured += ' +Ġsimultane ously +aed a +Ġcharg ing +Ġur ged +Ġcon j +25 6 +ĠG ordon +mer ce +Ġdocument ary +Sh are +it ol +ON E +ĠG arden +h att +ĠThom pson +ane ous +ap ore +Ġt anks +Ġless ons +tr ack +Ġout standing +Ġvolunte ers +Ġsp ray +Ġmanag ers +l arge +Ġcamp s +Ġart ificial +ĠR u +Ġb ags +th al +Ġcompat ible +ĠBl ade +Ġf ed +Ġarg ues +F I +Ġunf air +Ġcor n +Ġoff set +Ġdirect ions +Ġdisappoint ed +ĠCon vention +Ġview ing +M E +oc ity +Ġtown s +Ġlay ers +Ġro lled +Ġjump ed +Ġatt ribute +Ġun necess +inc oln +Ġsupp ose +ĠNet her +ch a +Ġbur ied +Ġsix th +B en +ress ing +OU R +Ġw ound +Ġcy cl +Ġmechan isms +Ġcongress ional +ĠE lement +Ġagre ements +Ġdec or +Ġclos est +ĠM it +Go ogle +} } +Ġm ixture +Ġflu id +S ign +ĠSch olar +Ġp ist +ask et +ab ling +Ġrac ing +he ro +ri el +ass y +Ġche aper +b en +Ġvert ical +amac are +ĠRead ing +g ments +Ġhelic op +Ġsacr ifice +ay a +p aren +V A +ĠL es +ĠStud io +Ġviol ations +ĠAn na +ac er +é ¾ +ĠR at +ĠBe ck +ĠD ick +ĠA CT +Ġcomp osition +Ġtext ure +ĠO wn +Ġsmart phone +ĠN A +Ġfor b +im port +Ġdef ending +il st +re r +Ġo h +ĠJere my +Ġbank ing +cept ions +Ġrespect ive +/ . +Ġdr inks +ĠW i +Ġb ands +ĠL iverpool +Ġg rip +ĠB uy +Ġopen ly +Ġreview ed +per t +Ġver ify +ĠCo le +ĠW ales +M O +Ġun pre +Ġshel ter +ĠIm perial +Ġgu i +ĠD ak +Ġsuggest ions +Ġexplicit ly +Ġsl ave +Ġblock chain +Ġcompet ing +Ġprom ising +S ON +Ġsoc cer +Ġconst itution +4 29 +Ġdist ract +ĠU ser +es ides +ĠMet hod +ĠTok yo +Ġaccompan ied +Cl ient +s ur +al og +Ġident ification +Ġinv asion +as ma +Ġindust ries +pp ers +Ġsub tle +ĠUn it +n atural +Ġsurv ived +Ġfl aw +ĺ ħ +ĠH oll +Ġdef icit +Ġtut orial +ĠCh ance +Ġarg uing +Ġcontem porary +Ġinteg ration +for ward +Ġt um +it is +Ġh iding +ĠD omin +ĠT an +ĠB uilding +ĠV in +Ġspokes person +ĠNot es +Ġemer ging +Ġprepar ation +Ġpro st +Ġsuspect s +Ġaut onom +D escription +Ġdeal t +ĠP ear +Ġstead y +Ġdecre ased +Ġso vere +ĠCl in +Ġgrad ually +ors es +ĠW AR +S erv +ãĤ ¢ +h r +Ġd irty +ĠB arn +ĠB C +Ġd il +Ġcal endar +Ġcompl iance +Ġch amber +b b +Ġpass enger +ate ful +ĠT itle +ĠSyd ney +ĠG ot +Ġdark ness +Ġdef ect +Ġpack ed +ass ion +Ġgod s +Ġh arsh +IC K +le ans +Ġalgorith m +Ġoxy gen +Ġvis its +Ġbl ade +Ġkil omet +ĠKent ucky +Ġkill er +P ack +enn y +Ġdiv ine +Ġnom ination +be ing +Ġeng ines +Ġc ats +Ġbuff er +ĠPh ill +Ġtra ff +AG E +Ġtong ue +Ġrad iation +ere r +m em +ĠExpl icit +é¾ į +Ġcou ples +Ġphys ics +ĠMc K +Ġpolit ically +aw ks +ĠBl oom +Ġwor ship +e ger +ut er +ĠF O +Ġmat hemat +Ġsent enced +Ġdis k +ĠM arg +Ġ/ * +P I +Ġoption al +Ġbab ies +Ġse eds +ĠScott ish +Ġth y +] ] +ĠHit ler +P H +ng th +Ġrec overed +ing e +Ġpow der +Ġl ips +Ġdesign er +Ġdis orders +Ġcour age +Ġch aos +" },{" +Ġcar rier +b ably +H igh +ĠR T +es ity +l en +Ġrout es +u ating +F il +N OT +w all +s burgh +Ġeng aging +ĠJava Script +ore r +li hood +Ġun ions +ĠF ederation +ĠTes la +Ġcomple tion +ĠT a +Ġprivile ge +ĠOr ange +Ġne ur +paren cy +Ġb ones +Ġtit led +Ġprosecut ors +ĠM E +Ġengine er +ĠUn iverse +ĠH ig +n ie +o ard +Ġheart s +ĠG re +uss ion +Ġmin istry +Ġpen et +ĠN ut +ĠO w +ĠX P +in stein +Ġbul k +S ystem +ic ism +ĠMarket able +Ġpre val +Ġpost er +Ġatt ending +ur able +Ġlicens ed +ĠG h +et ry +ĠTrad able +Ġbl ast +à ¤ +ĠTit an +ell ed +d ie +H ave +ĠFl ame +Ġprof ound +Ġparticip ating +Ġan ime +ĠE ss +Ġspec ify +Ġregard ed +ĠSpe ll +Ġs ons +own ed +Ġm erc +Ġexper imental +land o +h s +ĠDun geon +in os +Ġcomp ly +ĠSystem s +ar th +Ġse ized +l ocal +ĠGirl s +ud o +on ed +ĠF le +Ġconstruct ed +Ġhost ed +Ġsc ared +act ic +ĠIs lands +ĠM ORE +Ġbl ess +Ġblock ing +Ġch ips +Ġev ac +P s +Ġcorpor ation +Ġo x +Ġlight ing +Ġneighb ors +ĠU b +ar o +Ġbe ef +ĠU ber +F acebook +ar med +it ate +ĠR ating +ĠQu ick +Ġoccup ied +Ġaim s +ĠAdd itionally +ĠInt erest +Ġdram atically +Ġhe al +Ġpain ting +Ġengine ers +M M +ĠM ust +Ġquant ity +P aul +Ġearn ings +ĠPost s +st ra +ãĥ¼ ãĥ +Ġst ance +Ġdro pping +sc ript +Ġd ressed +M ake +Ġjust ify +ĠL td +Ġprompt ed +Ġscr ut +Ġspeed s +ĠGi ants +om er +ĠEd itor +Ġdescrib ing +ĠL ie +ment ed +Ġnow here +oc aly +Ġinst ruction +fort able +Ġent ities +Ġc m +ĠN atural +Ġinqu iry +Ġpress ed +iz ont +for ced +Ġra ises +ĠNet flix +ĠS ide +Ġout er +Ġamong st +im s +ows ki +Ġclim b +ne ver +Ġcomb ine +d ing +Ġcomp r +Ġsignific ance +Ġremem bered +ĠNev ada +ĠT el +ĠSc ar +ĠWar riors +ĠJ ane +Ġcou p +b as +Ġtermin al +, - +O H +Ġt ension +Ġw ings +ĠMy ster +�� �� +ĠUn like +val id +viron ments +ĠAl i +Ġn aked +book s +ĠM un +ĠG ulf +Ġd ensity +Ġdim in +Ġdesper ate +Ġpres idency +Ġ198 6 +h y +IN D +Ġun lock +im ens +Ġhand led +ĠE b +Ġdisapp eared +Ġgen re +Ġ198 8 +Ġdetermin ation +St ream +ik o +ap ters +Ġacknow ledge +J an +Ġcapital ism +P at +Ġ20 20 +Ġpain ful +Ġcur ve +Ġbom bs +st orm +ĠMet al +en cer +ĠF ig +ĠA aron +anc hes +Ġins piration +Ġexha ust +t ains +ash i +Ġdesc ript +Ġr itual +ĠChel sea +Ġpromot ion +ĠH ung +ĠW ard +iv a +ĠE T +Ġto ss +all ow +ĠFranc is +D ep +Ġhapp iness +ĠGl ass +Ġbet a +Ġstreng then +N E +o a +Ġbutt ons +ĠMur ray +Ġkick ed +Qu est +ĠT alk +ĠS everal +ĠZ ero +Ġdr one +ul k +Ġc am +ĠM obile +Ġprevent ing +Ġret ro +ĠA x +Ġcru el +Ġflo at +. ), +Ġfil ing +ĠGr ant +ĠB or +Ġr ib +Ġchampions hip +ĠM erc +Ġsty les +Ġc ake +Ġbuild s +ĠS elf +io x +Ġep ic +oy d +B el +ĠSt ew +. ( +ah u +ĠBe yond +Ġout s +Ġsol o +ĠT ree +Ġpres erve +Ġt ub +AR E +ro c +ĠIm pro +ĠW right +Ġbu nd +Ġtr aged +Ġoccas ional +b ian +Sec ond +r ons +Ġinter actions +form ed +s ing +Ġown s +Ġh ockey +Gener al +Ġlog ical +Ġexp end +Ġesc al +ĠGr iff +ĠC rown +ĠRes erve +Ġsto pping +Ġexc use +sec ond +Ġoper ated +Ġre aches +ĠMal ays +Ġpoll ution +ĠBrook lyn +Ġde lete +Ġhas h +Bl ock +ah a +âĢ ³ +Ġsh orter +p iece +> >> +ĠM ormon +t or +Ġpartic les +ĠB art +ry ption +Ġad min +Ġsqu ee +VID IA +Ġcreat or +iam eter +ic ular +N BC +Ġgrab bed +Ġn odd +Ġr ated +Ġrot ation +Ġgr asp +Ġexcess ive +ĠE C +ĠWh it +Ġinvent ory +ault s +ĠF B +Ġe cosystem +Ġbill ions +Ġvent ure +n amed +Ġdef ender +out e +Inst ead +ir able +W ar +Ġassum ption +Ġb ite +Ġearth qu +t ail +sp ace +Ġgif ts +boy s +Ġinev itable +Ġstruct ural +Ġbenef icial +Ġcompe lling +h ole +erv ation +Ġco at +o j +inc arn +ĠY ears +Ġdetermin ing +Ġrhet oric +Ġbound aries +Ġwh ites +A nt +add y +) - +ra ham +eter min +Ġhar vest +ĠCon c +Ġlapt op +ĠM atch +Ġenjoy ing +cc a +oll ar +Ġtri ps +Ġadd iction +ĠS ak +Ġpow ered +Ġc ous +ĠRuss ians +ie re +Ġret rie +qu ality +Ġdiff er +Ġking dom +ĠL aur +ĠCap itol +Ġcon clusions +ĠAl tern +ĠN av +Ġtrans parent +B ER +G roup +ĠCom plete +Ġinf er +Ġint rig +Ġins ane +R O +oph ob +is en +qu al +Mich ael +Ġm useum +ĠP ope +Ġres et +r ative +f ive +Ġagg reg +itte es +osit ory +Ġcar b +ĠRec ord +Ġdec ides +ĠF ix +Ġexcept ions +ĠCommission er +un s +ĠEnvironment al +Ġlegend ary +ist ence +Ġtun nel +k m +Ġins ult +Ġt roll +Ġsh ake +Ġdet ention +qu es +ĠCh rome +ĠF iles +Ġsub t +Ġprospect s +Ġpro l +re nder +pro of +Ġperform ances +St r +Ġh ref +ern ame +Ġachieve ment +Ġf ut +F ull +ĠLe ban +go ogle +ãĥ Ī +amp a +May be +Ġproject ed +ĠE mb +Ġcol leg +Ġa wards +Ġâ Ķ +G old +ĠBl ake +ĠR aj +if ting +Ġp ending +Ġinst inct +Ġdevelop ments +Con nect +ĠM and +ĠW ITH +ĠPhilipp ines +prof ile +Ġalt ogether +ĠB und +ĠT D +oo oo +amp ed +ip h +Ġste am +Ġold est +Ġdet ection +ul pt +Ġ ç +ĠWay ne +200 6 +f a +Ġcir cles +ĠF u +Ġdon ors +appropri ate +ĠDak ota +j amin +Ġmotiv ated +Ġpurch ases +ĠLouis iana +ĠS pl +Ġgl obe +Ġ10 5 +z ip +c all +Ġdepart ments +Ġsustain able +10 5 +ĠO P +if iers +Ġprevent ed +Ġinc omp +ĠComm ander +Ġdom inated +Ġ » +Ġinvest ed +Ġcomplex ity +Ġin cl +Ġens uring +Ġreal m +yn c +ĠInd ependent +r ained +ĠJ en +ĠFl ight +Ġat he +Ġspec ulation +ĠT E +oc ate +t ic +Ġpl aint +her ry +Ġto y +Ġ1 11 +Ġpl ates +st atus +ĠIs a +Ġdev oted +C op +ĠE S +25 5 +ur rency +M ain +Ġsl aves +Ġpe pper +Ġqu otes +Ġce iling +ĠF ish +Ġtrans formation +Ġfra ction +Ġadvant ages +Ġto ile +Ġstun ning +Ġmo ist +bre aking +s i +ĠL ocation +ĠMed ium +Ġtext s +Ġu gly +Ġb io +. âĢĶ +ĠB ased +Ġtr ains +ĠW ing +ĠAn cient +ĠRec ords +ĠH ope +Spe cial +ades h +ob i +[ / +Ġtempor arily +V er +h u +os er +Ġover night +Ġm amm +ĠTre asury +ĠV enezuel +ĠMeg a +Ġt ar +Ġexpect s +bl ack +or ph +\\ \\ +Ġaccept ance +Ġrad ar +s is +Ġjun ior +Ġfram es +Ġobserv ation +ac ies +P ower +ĠAdv anced +M ag +olog ically +ĠMe chan +Ġsent ences +Ġanaly sts +augh ters +force ment +Ġv ague +Ġcl ause +Ġdirect ors +Ġeval uate +Ġcabin et +M att +ĠClass ic +A ng +Ġcl er +ĠB uck +Ġresear cher +Ġ16 0 +Ġpoor ly +Ġexperien cing +ĠP ed +ĠMan hattan +Ġfre ed +Ġthem es +ad vant +Ġn in +Ġpra ise +10 4 +ĠLib ya +b est +Ġtrust ed +Ġce ase +Ġd ign +D irect +Ġbomb ing +Ġm igration +ĠSci ences +Ġmunicip al +ĠA verage +Ġgl ory +Ġreve aling +Ġare na +Ġuncertain ty +Ġbattle field +ia o +G od +Ġc inem +ra pe +el le +ap ons +Ġlist ing +Ġwa ited +Ġsp otted +ke ley +ĠAud io +e or +ard ing +idd ing +ig ma +ĠN eg +Ġl one +Ġ ---- +ex e +d eg +Ġtrans f +Ġwas h +Ġsl avery +Ġexpl oring +ĠW W +ats on +Ġen cl +l ies +ĠC reek +Ġwood en +Man ager +ĠBr and +um my +ĠAr thur +Ġbureau cr +Ġbl end +ar ians +F urther +Ġsupposed ly +Ġwind s +Ġ19 79 +Ġgrav ity +Ġanalys es +ĠTra vel +ĠV eter +Ġd umb +Ġaltern ate +g al +Ġconsum ed +Ġeffect iveness +.' ' +Ġpath s +ond a +L A +ĠStr ong +Ġen ables +Ġesc aped +Ġ" " +Ġ1 12 +Ġ198 3 +Ġsm iled +Ġtend ency +F ire +Ġp ars +ĠR oc +Ġl ake +Ġf itness +ĠA th +ĠH orn +Ġh ier +Ġimp ose +m other +Ġp ension +ic ut +bor ne +ic iary +. _ +ĠS U +Ġpol ar +is y +eng u +itial ized +AT A +w rite +Ġexerc ises +ĠD iamond +ot ypes +Ġharm ful +on z +Ġprint ing +st ory +Ġexpert ise +ĠG er +Ġtraged y +ĠF ly +Ġd ivid +amp ire +st ock +M em +Ġre ign +Ġun ve +Ġam end +ĠProp het +Ġmut ual +ĠF ac +Ġrepl acing +H ar +ĠCirc uit +Ġthro at +ĠSh ot +Ġbatter ies +Ġto ll +Ġaddress ing +ĠMedic aid +Ġp upp +ĠN ar +ol k +Ġequ ity +M R +ĠHis pan +ĠL arge +m id +D ev +Ġexp ed +Ġdem o +ĠMarsh all +erg us +Ġf iber +Ġdiv orce +ĠCre ate +Ġsl ower +ĠPark er +ĠStud ent +ĠTr aining +Ret urn +ĠT ru +Ġc ub +ĠRe ached +Ġpan ic +Ġqu arters +Ġre ct +Ġtreat ing +Ġr ats +ĠChristian ity +ol er +Ġsac red +Ġdecl are +ul ative +et ing +Ġdeliver ing +est one +Ġt el +ĠL arry +Ġmet a +ac cept +art z +ĠRog er +hand ed +Ġhead er +Ġtra pped +ĠCent ury +Ġkn ocked +ĠOx ford +Ġsurviv ors +b ot +Ġdemon stration +Ġd irt +Ġass ists +OM E +ĠD raft +ortun ate +fol io +pe red +ust ers +g t +ĠL ock +Ġjud icial +ver ted +Ġsec ured +out ing +ĠBook s +Ġhost ing +Ġlif ted +l ength +Ġj er +Ġwhe els +ĠR ange +umbn ails +Ġdiagn osis +te ch +ĠStew art +ĠP ract +Ġnation wide +Ġde ar +Ġoblig ations +Ġgrow s +Ġmand atory +Ġsusp icious +! ' +A pr +G reat +Ġmort gage +Ġprosecut or +Ġeditor ial +ĠK r +Ġprocess ed +ung le +Ġflex ibility +Ear lier +ĠC art +ĠS ug +Ġfoc uses +Ġstart up +Ġbre ach +ĠT ob +cy cle +ãĢ Į +ro se +Ġb izarre +ãĢ į +Ġveget ables +$ $ +Ġret reat +osh i +ĠSh op +ĠG round +ĠSt op +ĠHawai i +ĠA y +Per haps +ĠBe aut +uff er +enn a +Ġproduct ivity +F ixed +cont rol +Ġabs ent +ĠCamp aign +G reen +Ġident ifying +Ġreg ret +Ġpromot ed +ĠSe ven +Ġer u +ne ath +aug hed +ĠP in +ĠL iving +C ost +om atic +me ga +ĠN ig +oc y +Ġin box +Ġem pire +Ġhor izont +Ġbr anches +Ġmet aph +Act ive +ed i +ĠFil m +ĠS omething +Ġmod s +inc ial +ĠOrig inal +G en +Ġspir its +Ġear ning +H ist +Ġr iders +Ġsacr ific +M T +ĠV A +ĠS alt +Ġoccup ation +ĠM i +Ġdis g +lic t +Ġn it +Ġn odes +e em +ĠP ier +Ġhat red +ps y +ãĥ ī +Ġthe ater +Ġsophistic ated +Ġdef ended +Ġbes ides +Ġthorough ly +ĠMedic are +Ġbl amed +arent ly +Ġcry ing +F OR +pri v +Ġsing ing +ĠI l +Ġc ute +o ided +olit ical +ĠNe uro +å ¤ +Ġdon ation +ĠEag les +ĠG ive +T om +Ġsubstant ially +ĠLic ense +ĠJ a +Ġg rey +ĠAn imal +ĠE R +ĠU nd +Ġke en +Ġconclud e +ĠMississ ippi +Eng ine +ĠStud ios +P ress +o vers +ll ers +Ġ3 50 +ĠR angers +Ġr ou +ert o +E p +iss a +iv an +Ġse al +ĠReg ist +dis play +Ġwe aken +u um +ĠComm ons +ĠS ay +Ġcult ures +Ġl aughed +Ġsl ip +Ġtreat ments +iz able +m art +ĠR ice +Ġbe ast +Ġob esity +ĠLa ure +ig a +Wh ich +hold er +Ġelder ly +Ġp ays +Ġcompl ained +Ġc rop +Ġpro c +Ġexplos ive +ĠF an +ĠAr senal +A uthor +ef ul +Ġme als +Ġ( - +id ays +Ġimag ination +Ġann ually +Ġm s +as ures +H ead +ik h +m atic +Ġboy friend +ĠCom puter +Ġb ump +Ġsur ge +ĠCra ig +ĠKir k +D el +medi ate +Ġscen arios +ĠM ut +ĠSt ream +Ġcompet itors +Ù Ħ +ĠStan ford +ĠRes ources +az ed +b age +Ġorgan is +ĠRe lease +Ġsepar ately +Ġha bits +Ġmeasure ments +ĠCl ose +Ġaccomp any +Ġg ly +Ġt ang +ĠR ou +Ġplug in +Ġcon vey +ĠChall enge +oot s +j an +Ġcur s +ĠRel ations +ke eper +Ġapproach ing +p ing +Spe aking +Ġarrang ement +ĠV I +are ttes +Ġaffect ing +Ġperm its +b ecause +Ġu seless +ĠH us +!! !! +Ġdestro ying +Un fortunately +Ġfasc inating +S em +Ġelect oral +Ġtrans parency +ĠCh aos +Ġvolunte er +Ġstatist ical +Ġactiv ated +ro x +We b +H E +ĠHamp shire +is ive +M ap +Ġtr ash +ĠLaw rence +st ick +C r +Ġr ings +EX T +Ġoper ational +op es +D oes +ĠEv ans +Ġwitness ed +P ort +Ġlaunch ing +ec onom +w ear +ĠPart icip +um m +cul es +ĠR AM +ĠT un +Ġass ured +Ġb inary +Ġbet ray +Ġexpl oration +ĠF el +Ġad mission +it ated +S y +Ġav oided +ĠSim ulator +Ġcelebr ated +ĠElect ric +¥ ŀ +Ġcl uster +itzer land +he alth +L ine +ĠN ash +at on +Ġsp are +Ġenter prise +ĠD IS +clud es +Ġfl ights +Ġreg ards +ĠÃ Ĺ +h alf +Ġtr ucks +Ġcontact s +Ġunc ons +ĠCl imate +Ġimm ense +N EW +oc c +ect ive +Ġemb od +Ġpat rol +Ġbes ide +Ġv iable +Ġcre ep +Ġtrig gered +ver ning +Ġcompar able +q l +Ġg aining +ass es +Ġ( ); +ĠG rey +ĠM LS +s ized +Ġpros per +" ? +Ġpoll ing +Ġsh ar +ĠR C +Ġfire arm +or ient +Ġf ence +Ġvari ations +g iving +ĠP i +osp el +Ġpled ge +Ġc ure +Ġsp y +Ġviol ated +Ġr ushed +Ġstro ke +ĠBl og +sel s +ĠE c +,' ' +Ġp ale +ĠColl ins +ter ror +ĠCanad ians +Ġt une +Ġlabor atory +Ġn ons +t arian +Ġdis ability +ĠG am +Ġsing er +al g +ĠSen ior +Ġtrad ed +ĠWar rior +Ġinf ring +ĠFrank lin +Ġstr ain +ĠSwed ish +Ġsevent h +ĠB enn +ĠT ell +Ġsynd rome +Ġwond ered +id en +++ ++ +ig o +Ġpur ple +Ġjournal ism +Ġreb el +Ġf u +bl og +Ġinv ite +ren cies +ĠCont act +Is rael +ĠCont ent +Ġche er +Ġbed room +ĠEngine ering +ĠQue ens +Ġd well +ĠPlay Station +ĠD im +ĠCol on +l r +Ġoper ates +Ġmotiv ation +US A +ast ered +C ore +ĠTr uth +ol o +OS E +ĠMem ory +Ġpred ec +Ġan arch +Ġ19 20 +ĠY am +à ¨ +b id +Ġgr ateful +Ġexc itement +Ġtre asure +Ġlong est +ct ive +Ġdes erves +Ġreserv es +Ġcop s +ĠOtt awa +ĠEgypt ian +ank ed +Ġart if +Ġhypot hesis +: / +Ġpurch asing +Ġlove ly +H P +Ġdiv ide +Ġstrict ly +Ġquestion ing +Ġtaxp ayers +ĠJ oy +Ġroll s +ĠHe avy +Ġp orts +Ġmag netic +Ġinf lamm +Ġbr ush +t ics +â ĪĴ +Ġbott les +pp y +Ġp add +ãĤ ¯ +m illion +Ġdevast ating +Ġcomp iled +Ġmed ication +Ġtw elve +ĠPer ry +Sp ace +im b +y our +Ġle aked +ĠT ar +Ġun ity +Ġinfect ed +Ġtravel ed +ID E +ĠMc Donald +t xt +ĠPr inc +Ġinter ven +ĠTai wan +ĠP ow +Ġbe aring +ĠTh read +Ġz ones +iz ards +un ks +Ch apter +ll or +Ġ · +Ġw ounds +Ġdisc retion +Ġsucceed ed +ik ing +Ġicon ic +C all +Ġscreen ing +ĠM is +ict s +Ġmin isters +Ġsepar ation +Pl ayer +Ġb ip +Ġbel oved +Ġcount ing +ĠE ye +ar ound +ing ing +Ġtable t +Ġoff ence +in ance +h ave +ĠInf o +ĠNin ja +Ġprotect ive +ĠC ass +M ac +ĠQual ity +N orth +Ġ ic +ĠCub a +ĠChron icle +ĠPro perty +Ġfast est +ot os +ĠG erm +OW N +Ġbo om +ĠStan ley +ergus on +Ġcle ver +Ġent ers +m ode +ter ior +ĠS ens +Ġlin ear +AR K +Ġcomp aring +Ġpure ly +Ġsaf er +ĠPot ter +Ġc ups +R T +Ġgl uc +Ġatt ributed +Ġdu pl +ĠP ap +Ġprec ious +Ġp a +iction ary +ĠT ig +ĠTo o +ol utions +st an +Ġrob ots +Ġlob b +Ġstat ute +Ġprevent ion +w estern +16 0 +ĠAct ive +ĠMar ia +h al +N one +ell ar +ĠK B +ĠPart ners +ĠSing le +ĠFollow ing +ang o +ac ious +Ġth ou +Ġk g +Ġinflu ential +ĠFriend s +S ur +ain ted +Ġfor ums +Ġst arter +Ġcitizens hip +ĠE lection +on ge +ot ation +os ph +;; ;; +ut ical +p ur +ere n +Ġaccus ations +bit ious +ab bit +ĠOr d +Post ed +ir k +Ġsens itivity +ic he +ĠAm y +ĠF ab +Ġsum mit +Ġped est +Ġrub ber +Ġagric ultural +Ġcan cel +A E +Ġin aug +Ġcont am +Ġfirm ly +i w +st age +ĠK an +Ġt ier +Ġinv ention +Ġtransl ated +ĠR ules +B ox +Tw itter +ID S +Ġp izza +Ġdeb ug +ĠD rop +v s +Ġh orses +b ig +Ġb oring +Ġh ood +ĠMcC ain +at ched +ĠBro s +Ġsk ip +Ġess ay +st at +ĠLeg ends +Ġam munition +au c +Ġshoot er +Ġun h +Ġsuppl ied +Ġgener ic +ĠS K +ib an +yr ics +Ġ25 5 +Ġclim bing +Form er +Ġfl ip +Ġjump ing +Ġfrust ration +ĠTer ry +Ġneighborhood s +Ġmed ian +be an +Ġbr ains +Follow ing +Ġsh aped +Ġdraw s +Ġal tered +J ack +Ġrecip es +Ġsk illed +we alth +ach i +e lection +Ġbehavi ors +de als +ĠU ntil +F e +Ġdecl aration +mar ks +ĠBet ween +cel ona +Ġres on +Ġbub ble +Am ong +Ġim perial +G S +Ġfemin ist +200 5 +ĠK yle +Ġaccount ing +ĠTe le +ĠT yr +Ġconnect ing +Ġre hab +ĠP red +s im +Ġmeant ime +Ġphys ician +M W +ĠCamp bell +ĠBr andon +Ġcontribut ing +ĠR ule +ĠWe ight +ĠN ap +Ġinter active +Ġv ag +Ġhel met +ĠCom b +f our +Ġsh ipped +Ġcomple ting +ĠP D +PD ATE +Ġspread ing +Ġsc ary +erv ing +ĠG as +Ġfr ank +s chool +Ġrom antic +Ġstab il +R ob +Ġaccur ately +Ġac ute +ĠH ann +Ġsymbol s +Ġcivil ization +ĠA W +Ġlight ning +Ġcons iders +Ġven ue +Ġ × +Ġo ven +ĠS F +h is +Ġn u +ĠLear n +Ġpe oples +Ġst d +Ġsle e +Ġs lic +ĠStat istics +Ġcor ners +ĠB aker +Ġ: ) +ment ation +ol ver +Ġlaugh ing +ĠT odd +ond e +ĠH ills +Ġn uts +ĠW oman +pl ane +Ġl iver +ĠIn side +S orry +Ġagre es +Ġfund ament +ĠF isher +Ġa uction +Ġthread s +gl as +ĠBas ic +ĠN at +Ġlack ing +Ġceleb ration +j u +Ġs illy +E uro +Ġt att +ight y +cont rolled +T est +ĠSing h +Ġr age +Ġrh yth +o ffic +ĠPh antom +Ġhead lines +Ġrespond ing +ĠMor ning +Ġvit amin +Ġboot s +ĠS ite +al in +p i +Ġvir al +ĠU C +D ER +ĠSe x +Ġst ocks +c urrent +Ġch urches +ĠR are +ĠMur phy +Ġden ial +ĠG aming +Ġtou g +Ġn ick +Ġm akers +ĠRon ald +Ġgener ous +ĠD oc +ĠMor ris +Ġtransform ed +ĠN ormal +Ġ10 4 +ĠKick starter +ĠUp on +On line +ĠI RS +Ġw rap +Ġl oving +Ġarri ves +ĠD ue +Ġhe ter +ĠM ade +Ġrent al +Ġbelong s +Ġatt orneys +Ġcro ps +Ġmat ched +ul um +ol ine +10 9 +Ġdis par +Ġbuy ers +ĠCam bridge +Ġeth ics +rou ps +Ġjust ified +Ġmarg inal +Ġrespect ed +win ning +Ġnodd ed +ĠSer ge +ĠForm er +C raft +######## ######## +ĠWar ner +Ġd ash +et e +Ġent ert +ĠE scape +out heast +Ġkn ees +ĠB omb +Ġr ug +P ass +Ġatt itudes +go vernment +ĠPri or +Ġqual ities +Ġnot ification +ĠPh one +l ie +Ġanticip ated +ĠCom bat +ĠBar ry +Ġ198 2 +Us ers +on er +Ġcomput ing +ĠConnect icut +Ġless er +Ġpe ers +ĠC u +Ġtechn ically +Ġsub mission +ĠUn iversal +Ġman ually +our ge +Ġrespond ents +ĠB TC +ĠH ost +Ġf are +ĠB ird +Ġrece ipt +al so +Ġj ack +Ġagric ulture +Ġsk ull +Ġ! = +Ġpass ive +ĠC I +Ġsoc ieties +Ġremind ed +Ġinter ference +B uy +Ġâ ľ +g on +Ġscrut iny +ĠW itch +Ġconduct ing +Ġ ãĥ +Ġexch anges +ĠMit chell +Ġinhab it +Ġtw ist +B D +Ġwhere ver +group on +Ġj okes +ĠBen jamin +ĠR andom +fr ame +ĠL ions +Ġhighlight ed +ĠArk ansas +E nt +Ġp ile +Ġpre lim +g s +mind ed +Ġfel ony +ĠG A +ĠL uck +Ġpract ically +ĠB os +Ġact ress +D am +ĠB ou +Ġvis a +Ġembed ded +Ġhy brid +Ġear liest +Ġsoon er +s ocial +ĠH A +Ġste ep +Ġdis advant +Ġexplo it +ĠE gg +ĠUlt ra +Ġnecess ity +L ocal +ie ge +Ġd ated +Ġmass es +Ġsubsc ription +pl ess +Ġan onym +Ġpresum ably +Bl ue +The ir +asket ball +ĠPhil ip +Ġcom ed +load ed +r ane +Ġref lection +Ch ina +Ġext ends +Ġform ing +Ġund ers +200 1 +Ġgr at +Ġconcent rations +Ġins ulin +Ġsec ular +Ġwh ilst +Ġwin ners +Ad vertisements +Ġdeliber ately +ĠWork ing +Ġs ink +et ics +d ale +Ġmand ate +Ġg ram +Ġvac ation +Ġwarn ings +ri pp +ĠTH AT +Ġcomment ary +Ġint u +Ġa est +Ġreason ing +Ġbreak down +ĠZ ombie +Ġ-- > +ĠPolit ical +c ott +Ġthr ust +Ġtechn ological +Ġdec iding +Ġtraff icking +L ong +W elcome +pr ising +ĠCommun ications +Ġend ors +Ġsw ift +Ġmetab ol +co ins +res a +ĠHT TP +Ġen roll +ĠH appy +us r +int age +Ġ[ " +u ably +ĠM aterial +Ġrepe al +Se pt +k h +ĠMod i +Ġunder neath +ĠI L +sh ore +Ġdiagn osed +ace utical +Ġsh ower +au x +ĠSw itch +ĠStre ngth +Ġj ihad +n ational +Ġtra uma +uss y +on i +Ġcons olid +Ġcal ories +ĠF lynn +ag ged +16 8 +ĠP ink +Ġfulf ill +Ġch ains +Ġnot ably +ĠA V +L ife +ĠCh uck +m us +ĠUr ban +ĠH end +Ġdep osit +ĠS ad +Ġaff air +OR K +ie val +ĠF DA +Ġt rop +ĠOver all +Ġvirt ue +Ġsatisf action +au nd +Ġl un +ĠSw itzerland +ĠOper ation +pro cess +Ġsh ook +Ġcount ies +le ased +ĠCharl otte +1 12 +Ġtrans cript +Ġre dd +p ush +ĠHe y +ĠAn alysis +[ " +Ġaltern atives +ard less +Ġele ph +Ġpre jud +ĠLe af +H aving +ĠH ub +Ġexpress ions +ĠVol ume +Ġshock ing +ĠRed s +Ġread ily +Ġplan ets +ad ata +Ġcollaps ed +ĠMad rid +Ġir rit +i pper +ĠEn c +ĠW ire +Ġbu zz +ĠG P +ash a +Ġaccident ally +ur u +Ġfrust rated +ĠS A +Ġhung ry +ĠH uff +Ġlab els +ant o +ĠE P +Ġbar riers +) | +ĠBer keley +ĠJ ets +Ġp airs +ĠL an +J ames +ĠB ear +Ġhum or +ĠLiber ty +Ġmagn itude +Ġag ing +ĠM ason +Ġfriends hip +umb ling +Ġemer ge +Ġnewsp apers +Ġam bitious +ĠRich ards +atern al +Ġ198 1 +Ġcook ies +Ġsc ulpt +Ġpur suit +L ocation +Ġscript s +p c +Ġarrang ements +Ġd iameter +Ġl oses +am ation +Ġl iqu +ĠJ ake +aret te +Ġunderstand s +ĠZ en +v m +Ġappro ve +Ġw ip +Ġult ra +Ġint end +ĠD I +asc ular +Ġst ays +ĠK or +ĠK l +Ġinvest ing +L a +Ġbelie ving +b ad +m outh +Ġtaxp ayer +ãĥ ĥ +ĠQue bec +Ġl ap +ĠSw iss +d rop +Ġdr ain +ir i +et c +ft en +ĠN ex +Ġst raw +Ġscream ing +Ġcount ed +Ġdam aging +Ġamb assador +cent ury +Ġpro x +Ġarrest s +u v +il ateral +ĠCh arg +Ġpresc ribed +Ġindepend ently +Ġf ierce +ĠB aby +Ġb rave +Ġsu its += > +Ġbas eline +ĠR ate +Ġis lands +Ġ( ( +g reen +ix els +Ġname ly +ĠVill age +th an +am y +V ersion +g mail +ential s +ĠS ud +ĠMel bourne +Ġarri ving +Ġquant um +e ff +rop olitan +T ri +Ġfun eral +ĠI R +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +ĠC ob +it ably +Ġt urb +Ġcomb o +Re view +Ġdeploy ment +u ity +ĠB ott +Ġinv isible +Ġrender ing +Ġunl ocked +Ġa qu +ĠVlad imir +Ġp ad +ĠBr ain +ĠLeg acy +dr agon +ĠKurd ish +Ġsound ed +Ġdet ained +ĠD M +g ary +Ġd aughters +Ġdistur bing +uk a +ĠPar ad +Ġt ast +Ġunf ortunate +Ġu l +em in +Ġattend ance +tr l +Ġpar ks +ĠMem orial +ĠAl ice +oth y +gu ard +ĠD ise +ĠSh an +ĠFor um +R ich +Ġshif ted +ue z +Ġl ighter +ĠMag n +Ġc od +S ch +ham mad +P ub +3 50 +ĠP okemon +Ġprot otype +Ġun re +B ase +ĠStud ents +ĠRep ly +ĠCommun ist +Ġg au +ĠTy ler +I Z +Ġparticip ated +Ġsup rem +ĠDet ails +Ġvessel s +ro d +Ġt ribe +ke ep +Ġassum ptions +Ġp ound +Ġcr ude +ĠAv ailable +Ġswim ming +Ġin clusion +Ġadv ances +c ulation +Ġconserv ation +Ġover d +ĠBuff alo +Art icle +ed ge +Ġaw a +ĠMad ison +Ġsid ew +Ġcat ast +ĠK rist +uc le +ĠHigh way +ĠTer ror +Ġactiv ation +Ġuncons cious +ĠSat an +ĠSus an +ill ery +Ġarr anged +i op +Ġrum ors +ur ring +th ink +ĠKe ith +ĠK ind +Ġavoid ing +by n +n ut +ĠSpe aker +r us +n ames +Ġgu ilt +ĠOlymp ics +Ġsa il +ĠM es +lev ant +ĠColumb us +a ft +C ity +S outh +ĠHar vey +ĠP un +S everal +Ġment ally +Ġimp ress +m ount +ĠUb untu +âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ +ĠSuper man +ĠMP s +Ġintent ions +ĠR acing +Ġlike lihood +Ġ2 40 +T otal +Ġto ys +ĠW atson +Ġur ge +L ear +ĠP aper +Ġoccur ring +ĠB eng +ĠC ert +Ġst ones +T im +ĠTw in +z b +ĠD ynam +Ġpolit ician +k ens +ĠEnter prise +UT ERS +Ġab ol +Ġref resh +Ġarbit rary +pe ction +Ġtrou bles +Ġ} ); +t v +Ġpil ots +Ġdist ribute +Ġaud it +Ġp ause +orig inal +Ġr ivals + £ +F ig +T L +ab il +ry ing +L in +ion ed +l on +Ġf ancy +Ġcr ashed +Ġt ract +Ġshe d +Ġcons ume +B ased +down load +in it +Ġvolt age +Int rodu +Ġcondem ned +ĠFin ance +res pect +Ġex cluded +Ġestablish ing +her ic +Ġher itage +Ġspect acular +Ġun st +ĠSnow den +ĠL ane +S an +Ġprotect ions +st ruction +inc inn +Ġmac ro +C ustom +ios ity +Ġes p +Ġfunction ing +Ġm ush +Ġp uzzle +Ġeth ical +M al +Ġgo verning +ĠF erguson +Ġrest ored +Ġst ressed +ĠCoun ter +ĠK as +cl ip +AN S +Ġse iz +U K +by ss +old own +ap i +Ġperman ently +oun ters +W est +Th rough +L ight +at oes +Ġne at +Ġc ord +ure r +Ġsevere ly +ĠA ven +Ġinter rog +Ġtri ple +G iven +N umber +Ġar ise +Ġs her +pl ant +Ġfl ower +ĠC ou +Ġat e +Ġnew er +b ul +Ġmean while +ĠL air +Ġadjust ment +ĠCop yright +Ġd ivers +i ological +Ġgam ers +o at +Ġhistor ically +Ġanal og +Ġlong time +Ġpres cription +ĠM ist +ĠHy per +ĠM aine +ĠDe ity +Ġmulti pl +ĠRe incarn +ĠH yd +ĠP ic +S il +r ants +ĠC ris +. ; +( { +epend ence +Ġrec y +ate ur +Ġqu ad +Ġgl ob +Ġcon ced +te am +Ġcapital ist +ĠL ot +Ġroy al +ĠCy ber +Ġblack s +met ic +ri v +ĠD anny +Ġsp o +ĠR O +Ġanim ated +rypt ed +ĠDep uty +Ġrend ered +F E +Ġstre ak +Ġcloud s +ĠDou g +~~~~ ~~~~ +Ġdisc our +ĠVe h +Ġpsych ology +ĠJ ourney +Ġcry stal +ĠFro st +Ġsuspic ion +Ġrel ate +or us +ĠC rypt +ĠN VIDIA +com ed +ut ing +incinn ati +Ġvulner ability +ost ic +Ġisol ation +Ġcool ing +ĠCoal ition +Ġ1 19 +F our +ĠDe al +Ġâ ī +se mble +ram ent +ĠBar celona +Ġ10 2 +Ġcoc aine +ocaly pse +F eb +ogen ic +Ġmut ation +Ġcrypt oc +ĠK el +ĠG it +a is +Ġs isters +AN K +Ġactiv ate +T er +Ġd read +yl on +Ġprop ri +A ust +ĠDef ault +Ġout door +Ġshe er +ce ive +Ġg ently +Ð ¾ +Pro gram +Ġâ ĨĴ +Ġve gan +ĠCr us +Ġrespons ibilities +ĠH R +OL D +Ġprev ents +Ġst iff +ĠW ere +Ġathlet ic +ĠSc ore +Ġ) : +Ġcolumn s +ĠL oc +av ailable +ĠF ram +ĠS essions +Ġcompan ion +Ġpack s +14 0 +ĠKn ights +Ġf art +Ġstream s +Ġsh ore +Ġapp eals +ĠPer formance +h aul +ĠSt ra +ĠN ag +10 3 +ĠTrans portation +B B +E v +z an +P ublic +Ġtw in +uls ion +M ult +Ġelect ro +Ġstat ue +ation ally +ĠN ort +Ġins pection +/ * +ig ue +Ġcomp assion +ĠT ales +ĠSte in +ĠSc reen +ĠB ug +ĠL ion +g irl +Ġwithdraw al +Ġobject ives +Ġblood y +Ġprelim inary +Ġj acket +Ġdim ensions +ĠC ool +ĠOcc up +Ġw reck +Ġdoub led +ank ing +Ġ19 75 +Ġglass es +ĠW ang +pro v +P ath +connect ed +ĠMult i +ĠNor way +agon ist +Ġfe ared +Ġtouch ing +Ġarg uably +¯¯¯¯ ¯¯¯¯ +ĠNC AA +che m +Ġsp at +ĠW WE +ĠC el +ig ger +Ġattack er +ĠJo in +ob ject +ett a +Ġelim inated +d et +Ġdest ruct +ĠLuc as +ct uary +18 0 +ĠBr ady +ĠBl ues +B ay +au kee +Ġtim eline +Ġdeleg ates +w ritten +uff icient +Ġsh apes +Cop yright +ou ble +serv ice +Ġp ione +Ġcolleg es +Ġrow s +Ġsp ite +Ġassess ed +3 60 +Ġle ase +Ġconfident ial +ck er +ĠMan ning +ĠV oice +Ġse aled +Ġcalcul ate +N O +ĠAss istant +Ġteen ager +ul ent +ather ine +Ġm ock +Ġd iamond +Ġf est +Ġsw itched +Ġres ume +ĠPu erto +Ġl anes +ir ation +ĠSimilar ly +Ġro d +ĠS el +ĠPal ace +ĠLim ited +e ous +Ġvar iant +Ġw ard +Ġ) ) +Sh ow +OO K +A lex +ĠN ep +br is +ĠWik ipedia +Ġexcept ional +Ġman ages +ĠD raw +Ag ain +Ġco pper +ut t +Ġex ports +Ġport folio +Ġelev ated +R ated +ĠOther wise +ĠT act +ĠShe l +ĠT X +" âĢĶ +Ġres ur +ĠW a +ven ant +Ġmon etary +pe ople +E mail +Ġfif ty +ĠS weet +ĠMalays ia +Ġconf using +ĠR io +ud a +uten ant +" ); +Ġpra ised +Ġvol umes +t urn +Ġm ature +Ġnon profit +Ġpassion ate +ĠPriv ate +Ġ10 3 +Ġdesc end +ç ¥ŀ +uff y +head ed +Whe ther +ri en +ze ch +be it +Ġch rom +ĠMc M +Ġd ancing +Ġe leg +ĠNot iced +11 5 +Ġadvoc acy +ENT S +amb ling +ĠMin or +ĠF inn +Ġprior ities +Ġthere of +ĠSt age +ĠRog ers +Ġsubst itute +ĠJ ar +ĠJeff erson +Ġlight ly +10 2 +ĠL isa +u its +ys ical +Ġshif ts +Ġd rones +Ġwork place +Ġres id +ens ed +ah n +Ġpref erences +ser ver +Ġdeb ates +d oc +ĠGod s +Ġhelicop ter +Ġhon our +Ġconsider ably +ed ed +ĠF emale +ĠAn ne +Ġre un +ĠF ace +ĠHall ow +ĠBud get +Ġcondem n +Ġt ender +Pro f +ocr atic +ĠTurn er +ĠAg ric +Ġ19 76 +Ġa pt +d isc +ĠF ighter +ĠA ur +Ġgar bage +in put +ĠK arl +ĠOl iver +ĠL anguage +k n +N on +ĠCl ar +Ġtrad itions +Ġad vertisement +ĠS or +Ġarch ive +Ġvill ages +7 50 +Ġimplement ing +w aukee +Ġdiet ary +Ġswitch ing +Rep ublic +Ġvel ocity +Ġc it +ĠA wards +Ġfin ancing +Ġlast ed +) ] +Ġrem inder +P erson +Ġprec ision +Ġdesign ers +ĠF ried +ĠB order +Ġtr agic +Ġw ield +Ġiniti atives +ĠT ank +w er +Ġjo ins +R o +in ery +Ġar row +Ġgener ating +found er +Ġsear ches +Ġrandom ly +A ccess +Ġb atch +Ġp osed +l at +Ġpursu ing +as a +Ġtest ified +form ing +ĠSh ar +w iki +ĠE ither +S ometimes +Ġsen ators +ĠJohn ny +ĠTal iban +ĠG PS +":" / +ãģ® å +Ġanaly zed +ĠRub io +ĠMove ment +op ard +ii i +St and +f ight +Ġign oring +i ang +ĠG N +so ever +ĠST AT +Ġref using +Ġswe at +Ġb ay +P ORT +ir med +ak y +Ġdis pro +Ġlabel ed +Ġ10 8 +H ello +Ġple asant +ab a +Ġtri umph +Ġab oard +Ġinc om +ĠC row +le tt +Ġfol k +Ġch ase +` ` +ĠBr us +Ġte ens +c ue +Ġter rain +h yd +il ight +OR Y +Su pport +ew s +ll i +rain ts +ĠC and +Ġab used +ach ment +l arg +B as +ĠC ancer +Ġ19 78 +Ġsupp orter +ac cess +ĠTer min +ĠT ampa +ĠAN Y +Ġnew est +ĠCrim inal +ed u +Ġ19 30 +Ġadm its +Ġend e +Ġfail ures +ur ate +ful ness +cy cl +ĠSub ject +Ġinf inite +th ree +W A +p it +ĠInst all +R ad +ili ation +G M +Ġcontin ent +Ġaccommod ate +ĠCl ay +Ġp up +ĠF unction +Ġham mer +ĠAlbert a +Ġrev ised +Ġminor ities +Ġmeasure ment +Con nell +Ġdis able +ĠM ix +In cre +Ġfor k +ĠR osen +Ġimpl ies +umb lr +AN G +Ġprote ins +Ġagg ression +Ġfacilit ate +S N +Ġilleg ally +u er +Ġacad em +Ġp uzz +ĠSh ift +p ay +oll o +Ġaud iences +B uild +Ġno ble +Ġsynt ax +â ĺħ +Ġbe am +ĠB ed +ĠA ld +Ġorig ins +v ideo +Ġ19 77 +ĠAss ault +Ġgar age +Te am +Ġver dict +Ġd war +ĠVirt ual +e vent +Ke ep +Ġsent iment +Ġwild life +sh irt +Ġb urg +Ġrecommend ation +rep resent +Ġgall ery +own ers +Ġsch olar +Ġconven ience +ĠSw ift +Ġconv inc +C ap +Ġwar fare +ĠVis ual +Ġconst itute +Ġab ort +ĠWe ather +ĠLook ing +ĠH em +Ġmart ial +Ġinc oming +et ition +Ġtoler ance +ĠCre ated +Ġfl ows +ĠE lder +Ġsoul s +Ġf oul +ĠP ain +ĠC AN +Ġ2 20 +b c +he nd +Ġgen ius +R eal +ĠW r +omet er +p ad +Ġlim iting +ĠS i +ĠL ore +ĠAd ventures +Ġvar ied +D isc +f in +ĠPerson al +Ch ris +Ġinv ented +Ġd ive +ĠR ise +Ġo z +ĠCom ics +Ġexp ose +ĠRe b +let ters +s ite +im ated +Ġh acking +Ġeduc ated +ĠNob ody +Ġdep ri +Ġincent ive +ãĤ · +Ġovers ight +Ġtrib es +ĠBelg ium +Ġlicens ing +our t +Produ ct +ah l +ĠG em +Ġspecial ist +Ġc ra +ann ers +ĠCor byn +Ġ19 73 +RE AD +Ġsum mar +Ġover look +ĠApp lication +Ġin appropriate +Ġdownload ed +Q ue +ĠB ears +Ġth umb +ĠChar acter +ĠReincarn ated +ĠS id +Ġdemonstr ates +s ky +ĠBloom berg +ĠAr ray +ĠRes ults +ĠFour th +ĠED T +ĠO scar +c end +Ġ10 6 +ĠN ULL +ĠH ERE +m atch +ĠBr un +Ġgluc ose +ie g +eg u +Ġcert ified +Ġrel ie +Ġhuman itarian +Ġpr ayers +K ing +Ġn an +h ou +10 8 +ul u +Ġrenew able +Ġdistingu ish +Ġd ense +ĠV ent +ĠPack age +ĠB oss +Ġedit ors +Ġm igr +T ra +ĠPet ers +ĠAr ctic +200 4 +ĠC ape +Ġloc ally +Ġlast ing +Ġhand y +. ). +P an +ĠR ES +Ind ex +Ġt ensions +Ġformer ly +Ġide ological +Ġsens ors +Ġdeal ers +Ġdef ines +S k +Ġproceed s +Ġpro xy +az ines +ĠB ash +ĠP ad +ĠC raft +eal ous +Ġshe ets +omet ry +J une +cl ock +T T +ĠThe atre +ĠB uzz +Ġch apters +Ġmill enn +Ġd ough +ĠCongress ional +Ġimag ined +av ior +Ġclin ic +Ġ19 45 +Ġhold er +ro ot +oles ter +Ġrest art +B N +ĠHam as +ĠJ ob +Ġor b +Ġr am +Ġdiscl ose +Ġtransl ate +Ġimm igrant +Ġannoy ing +Ġtreat y +an ium +ĠTe a +ĠLeg ion +Ġcrowd s +ĠB ec +ĠA er +oh yd +B ro +Look ing +Ġl bs +Ġagg ress +Ġse am +Ġinter cept +ĠM I +mer cial +act iv +ĠC it +Ġdim ension +Ġconsist ency +Ġr ushing +ĠDou glas +Ġtr im +Inst all +ick er +Ġsh y +10 6 +Ġment ions +pe lled +ĠT ak +c ost +Ġclass room +Ġfort une +dri ven +Ġun le +ĠWhe el +Ġinvest or +ĠM asters +k it +Ġassoci ations +ĠEv olution +op ing +us cript +Ġprov incial +ĠWal ter +av i +S O +Ġun limited +Eng lish +ĠC ards +ĠEb ola +ne red +Ġreven ge +Ġout right +um per +Ġf itting +ĠSol id +Ġform ally +Ġproblem atic +Ġhaz ard +Ġenc ryption +Ġstraight forward +ĠA K +Ġp se +ĠOr b +ĠCh amber +ĠM ak +Cont ents +Ġloyal ty +Ġl yrics +ĠSy m +Ġwel comed +Ġcook ed +Ġmon op +Ġn urse +Ġmis leading +Ġe ternal +Ġshif ting +Ġ+ = +V is +Ġinst itutional +ill ary +Ġp ant +VER T +ĠA CC +ĠEn h +Ġinc on +ĠRE UTERS +Ġdon ated +âĢ¦âĢ¦ âĢ¦âĢ¦ +In tern +Ġexhib it +Ġt ire +ĠR ic +ĠCh ampion +ĠMu hammad +N ING +ĠSoc cer +Ġmob ility +Ġvary ing +ĠM ovie +Ġl ord +o ak +F ield +Ġve ctor +us ions +Ġsc rap +Ġen abling +m ake +T or +. * +| | +ĠWe bsite +ĠN PC +Ġsocial ist +ĠBill y +ĠAdd itional +Ġc argo +Ġfar ms +ĠSo on +ĠPri ze +Ġmid night +Ġ9 00 +se en +ĠSp ot +Ġshe ep +Ġspons ored +ĠH i +ĠJ ump +Ġ19 67 +Micro soft +ĠAg ent +Ġch arts +d ir +Ġadj acent +Ġtr icks +Ġman ga +Ġex agger +/ > +foot ball +ĠF CC +G C +ĠT ier +and ra +OU ND +% ), +Ġfru its +V C +ĠA A +R ober +Ġmid st +â Ĺ +ank a +Ġlegisl ature +ĠNe il +Ġtour ists +" " +ĠWar ning +ĠNever theless +ĠOffic ial +ĠWh atever +Ġm old +Ġdraft ed +Ġsubst ances +Ġbre ed +Ġt ags +ĠT ask +Ġver b +Ġmanufact ured +com ments +ĠPol ish +Pro v +Ġdetermin es +Ob ama +k ers +Ġutter ly +Ġse ct +sc he +ĠG ates +ĠCh ap +Ġal uminum +Ġz ombie +ĠT ouch +ĠU P +Ġsatisf y +Ġpred omin +asc ript +Ġelabor ate +Ġ19 68 +Ġmeas uring +ĠV ari +any ahu +Ġs ir +ul ates +id ges +ick ets +ĠSp encer +T M +oub ted +Ġpre y +Ġinstall ing +ĠC ab +re ed +re ated +Su pp +Ġwr ist +ĠK erry +10 7 +ĠK le +ĠR achel +Ġc otton +ĠA RE +ĠE le +Cont rol +Ġload s +ĠD od +an as +b one +Ġclass ical +ĠReg ional +ĠInt eg +V M +Ġdes ires +Ġaut ism +support ed +ĠM essage +Ġcomp act +writ er +Ġ10 9 +ĠHur ricane +c ision +Ġcy cles +Ġdr ill +Ġcolle ague +Ġm aker +G erman +Ġmist aken +S un +ĠG ay +Ġwhat soever +Ġsell s +ĠA irl +l iv +ĠO ption +Ġsol ved +Ġse ctors +Ġhorizont al +Ġequ ation +ĠSk ill +ĠB io +g ement +ĠSn ap +ĠLeg al +Ġtradem ark +Ġmake up +Ġassemb led +Ġsa ves +ĠHallow een +ĠVer mont +ĠFR OM +Ġfar ming +ĠP odcast +accept able +ĠHig her +Ġas leep +ull ivan +Ġrefere n +ĠLe v +Ġbul lets +ok o +H C +Ġst airs +Ġmain tains +ĠL ower +ĠV i +Ġmar ine +Ġac res +Ġcoordin ator +ĠJ oh +Ġcounterpart s +ĠBrother s +Ġind ict +b ra +Ġch unk +Ġc ents +H ome +ĠMon th +Ġaccording ly +if les +ĠGerm ans +ĠSy n +H ub +Ġey eb +âĶĢâĶĢ âĶĢâĶĢ +Ġr anges +ĠHoll and +ĠRob ot +f c +M ike +Ġpl asma +Ġsw ap +Ġath lete +ĠR ams +,' " +Ġinfect ions +Ġcor rid +Ġv ib +Ġpat ches +Ġtradition ally +Ġrevel ation +Ġswe ep +Ġgl ance +Ġin ex +200 3 +ĠR aw +work ing +os ures +ĠD at +ĠLyn ch +Ġle verage +ĠRe id +Ġcorrel ation +ian ces +av ascript +Ġrep ository +ret ty +Ġ19 72 +24 0 +Ġo un +p ol +ĠRe ed +Ġtact ical +is ite +App le +ĠQu inn +Ġrap ed +ill o +Euro pe +Ġalgorith ms +ĠRod rig +i u +Ġill um +Ġf ame +Ġintrodu cing +Ġdel ays +ĠRaid ers +Ġwh istle +Ġnovel s +ĠRe ally +Ġder iv +Ġpublic ations +ĠNe ither +ĠCom merce +Ġa ston +l anguage +Not es +ĠR oth +ĠF ear +Ġm ate +Ġpar ade +ĠQ B +Ġman eu +ĠC incinnati +m itting +Ġwa ist +ĠR ew +Ġdisc ont +Ð ° +Ġst aring +Ġal ias +Ġsec urities +Ġtoile t +ĠJ edi +Ġun law +v ised +//// //// +] ( +ĠWe iss +Ġpre st +ĠComp an +Ġmem o +ĠGr ace +J uly +ĠEl ite +cent er +ĠSt ay +Ġgal axy +Ġto oth +ĠS ettings +Ġsubject ed +ãĤ ¦ +Ġline back +Ġretail ers +ĠW ant +Ġd angers +A ir +Ġvolunt ary +ew ay +Ġinterpret ed +ot ine +à § +Ġp el +Serv ice +ĠEvent ually +Ġcare ers +Ġthreat en +Ġmem or +ĠBrad ley +anc ies +s n +ĠUn known +N ational +Ġsh adows +ail and +ĠD ash +Every one +izz ard +M arch += ( +Ġpull s +Ġstr anger +Ġback wards +ĠBern ard +imens ional +Ġch ron +Ġtheoret ical +k top +Ġw are +ĠInvest ig +ĠIn iti +ĠOper ations +o ven +oc ide +* / +Ġfl ames +ĠC ash +sh it +Ġc ab +ĠAn aly +ĠSe ah +Ġdefin ing +Ġorder ing +Ġimm un +Ġpers istent +AC H +Russ ian +m ans +Ġh ind +Ġphot ography + © +Ġh ug +Ġ10 7 +ĠH ence +i ots +ude au +Ġsubsid ies +Ġroutine ly +ĠDev ice +it ic +Ġdisg ust +land er +Ġ19 40 +Ġassign ment +ĠB esides +w ick +ĠD ust +us c +struct ed +11 1 +de velop +Ġf ond +Ġinter section +Ġdign ity +Ġcommission er +With out +re ach +Ġcart oon +Ġsc ales +ãĥ Ń +F IG +Ġsurve ys +ĠIndones ia +Ġart work +Ġun ch +Ġcy cling +un ct +au er +or ate +ĠOb viously +Ġcharacter ized +fe ld +Ġaff irm +Ġinn ings +Ġ é +Ġal iens +Ġcl oth +et ooth +ĠC ertain + § +Ġdig est +k now +ĠX L +Ġpredict ions +Ġd in +W AR +Ġafter math +Ex ample +ĠSu ccess +ĠTh r +IG N +Ġmin er +B us +Ġcl arity +heim er +ĠO UT +ĠS end +ĠCirc le +ĠD iet +Ġpron ounced +Ġcreat ors +Ġearthqu ake +atter y +ge ons +Ġo d +Ġlay ing +or p +U lt +pro ject +Ġunder min +Ġsequ el +S am +ĠDark ness +Ġre ception +b ull +Y S +ĠV ir +Ġsequ ences +ĠCo in +Ġout fit +ĠW ait +1 19 +Ġdel ivers +.... .. +Ġbl own +ĠE sc +ĠM ath +per m +ĠU l +Ġgl im +Ġfac ial +Ġgreen house +Ġto kens +/ - +ĠAnn ual +ĠON E +Ġteen age +ĠPhys ical +ĠL ang +ĠC elt +Ġsu ed +ivid ually +Ġpat ience +ch air +reg ular +Ġa ug +in v +ex cept +ĠL il +Ġn est +f d +s um +ĠCh ase +Russ ia +ĠJenn ifer +Ġoff season +Over all +F ore +Ġr iot +A ud +form er +Ġdefend ers +ĠC T +iot ic +rib ly +Ġautom ated +Ġpen is +Ġins ist +Ġdi agram +ĠS QL +ĠG arc +Ġw itch +cl ient +ier ra +am bers +Ġrec ount +f ar +V ery +oster one +Ġappreci ated +ĠPer fect +S ection +Ġd oses +oca ust +Ġcost ly +Ġg rams +ĠSh i +Ġwrest ling +Ġ19 71 +Ġtro phy +Ġn erve +ĠK az +ĠExper ience +Ġpled ged +Ġplay back +Ġcreat ivity +by e +Ġattack ers +Ġhold ers +ĠCo ach +ĠPh D +Ġtransf ers +Ġcol ored +ĠH indu +Ġd rown +Ġlist ened +ĠW A +ias m +P O +Ġappeal ing +Ġdiscl osed +ĠCh icken +ag ging +Ġple aded +Ġnav igation +ĠReturn s +Ġ[ [ +R OR +E A +Ġphotograp her +ĠR ider +ipp ers +Ġsl ice +Ġe rect +Ġhe d +iss ance +ĠVik ings +ur ious +Ġapp et +oubted ly +Ch ild +Ġauthent ic +o os +ĠM aking +Ġannoun cing +Ġb od +Ġmet er +ĠN ine +ĠR ogue +Ġwork force +Ġrenew ed +Ġorganis ations +ac s +P LE +Sh ort +Ġcomp ounds +ĠVis it +Ġen velop +ear th +Ġsupport ive +gg le +ĠBrus sels +ĠGu ild +Cre ate +RE L +Ġaver aged +Ġ19 69 +ri ages +Ġlength y +Ġforg ot +O kay +ĠE rd +Ġdeal er +Ġrec ession +D D +Ġdesper ately +Ġhun ger +Ġst icks +Ġm ph +ĠF aith +Ġintention ally +Ġdem ol +ue ller +ĠS ale +Ġde bris +s pring +Ġle ap +>> >> +Ġcontain ers +se lling +rane an +atter ing +Ġcomment ed +ĠC M +on ut +Ġwood s +es pecially +Ġorgan ize +iv ic +ĠWood s +ang a +s qu +Ġm aj +am on +Ġax is +Ġ19 74 +ĠDen mark +Ġwar rior +ĠP and +Ġout lined +ĠB O +ins ula +z illa +eb ook +Ġd are +Ġsear ched +Ġnav igate +S n +writ ing +Ġun ited +J apan +ĠHe brew +Ġfl ame +Ġrel ies +Ġcatch ing +ĠSh o +Ġimprison ment +Ġp ockets +Ġclos ure +ĠF am +t im +ade qu +Act ivity +Ġrecru iting +ĠW ATCH +ĠArgent ina +d est +Ġapolog ize +or o +Ġlack s +Ġtun ed +ĠGriff in +Ġinf amous +Ġcelebr ity +ss on +Ġ ---------------------------------------------------------------- +ĠIs is +ĠDis play +Ġcred ibility +Ġeconom ies +Ġhead line +ĠCow boys +Ġind ef +Ġl ately +Ġincent ives +but ton +ĠM ob +A ut +Ġres igned +ĠO m +c amp +Ġprof iles +Ġsche mes +olph ins +ay ed +Cl inton +en h +ĠY ahoo +Ġab st +Ġan k +su its +Ġw ished +ĠMar co +udd en +Ġsp here +ĠB ishop +Ġincorpor ated +ĠPl ant +11 4 +Ġh ated +p ic +Ġdon ate +Ġl ined +Ġbe ans +Ġsteal ing +Ġcost ume +Ġsher iff +Ġfor ty +Ġint act +Ġadapt ed +Ġtrave lling +b art +Ġnice ly +Ġdri ed +Ġsc al +os ity +NOT E +ĠB h +ĠBron cos +ĠI gn +Ġint imate +Ġchem istry +Ġopt imal +D eb +ĠGener ation +Ġ] , +ich i +ĠW ii +ĠYOU R +vent ions +W rite +Ġpop ul +un ning +ĠW or +V ol +Ġqu een +head s +K K +Ġanaly ze +op ic +ear chers +Ġd ot +leg raph +ast ically +Ġupgr ades +Ġca res +Ġext ending +Ġfree ze +Ġin ability +Ġorg ans +Ġpret end +Ġout let +11 3 +ol an +ĠM all +ul ing +t alk +Ġexpress ing +ĠAl ways +ĠBe gin +f iles +Ġlic enses +% % +ĠM itt +Ġfil ters +ĠMil waukee +G N +Ġunf old +M o +Ġnut rition +pp o +B o +Ġfound ing +Ġunder mine +Ġeas iest +ĠC zech +ĠM ack +Ġsexual ity +ĠN ixon +W in +ĠAr n +ĠK in +ãĤ £ +ic er +Ġfort un +Ġsurf aces +agh d +Ġcar riers +ĠP ART +ĠT ib +Ġinter val +Ġfrust rating +ĠSh ip +ĠAr med +ff e +Ġbo ats +ĠAb raham +in is +Ġsu ited +th read +i ov +ab ul +ĠVenezuel a +Ġto m +su per +Ġcast le +alth ough +iox ide +ec hes +Ġevolution ary +Ġnegoti ate +Ġconfront ed +Rem ember +Ġ17 0 +S uch +Ġ9 11 +m ult +ĠA byss +ur ry +ke es +spe c +ĠBarb ara +Ġbelong ing +Ġvill ain +ist ani +Ġaccount able +Ġport ions +ĠDe cl +U r +ĠK ate +g re +Ġmag azines +UC K +Ġregul ate +om on +ĠAl most +Ġover view +Ġsc ram +Ġl oot +ĠF itz +Ġcharacter istic +ĠSn ake +s ay +ĠR ico +Ġtra it +ĠJo ined +au cus +Ġadapt ation +ĠAirl ines +Ġarch ae +ĠI de +Ġb ikes +Ġliter ary +Ġinflu ences +ĠUs ed +C reat +Ġple a +ĠDef ence +ĠAss ass +Ġp ond +UL T +) " +Ġeval uated +Ġob taining +Ġdem ographic +Ġvig il +ale y +Ġsp ouse +ĠSeah awks +resp ons +ĠB elt +um atic +Ġr ises +run ner +ĠMichel le +Ġpot ent +r ace +ĠP AC +F ind +olester ol +IS S +ĠIntrodu ced +ress es +ign ment +O s +ĠT u +ĠDe x +ic ides +Ġspark ed +ĠLaur a +ĠBry ant +Ġsm iling +ĠNex us +Ġdefend ants +ĠCat al +Ġdis hes +sh aped +Ġpro long +m t +( $ +ãĢ Ĥ +Ġcalcul ations +ĠS ame +Ġp iv +H H +Ġcance lled +Ġgr in +Ġterrit ories +ist ically +C ome +ĠP arent +Pro ject +Ġneg lig +ĠPriv acy +Ġam mo +LE CT +olute ly +ĠEp ic +Ġmis under +w al +Apr il +m os +path y +ĠC arson +Ġalbum s +ĠE asy +Ġpist ol +< < +Ġ\ ( +t arget +hel p +Ġinter pre +cons cious +ĠH ousing +ĠJ oint +12 7 +Ġbe ers +s cience +ĠFire fox +effect ive +ĠC abin +ĠO kay +ĠApp lic +Ġspace craft +ĠS R +ve t +ĠStr ange +S B +Ġcor ps +iber al +e fficient +Ġpreval ence +Ġeconom ists +11 8 +Th read +ord able +OD E +ĠC ant +=- =- +if iable +ĠA round +Ġpo le +Ġwilling ness +CL A +ĠK id +Ġcomple ment +Ġsc attered +Ġin mates +Ġble eding +e very +Ġque ue +ĠTr ain +Ġh ij +Ġme lee +ple ted +Ġdig it +Ġg em +offic ial +Ġlif ting +Ð µ +Re qu +it utes +Ġpack aging +ĠWork ers +h ran +ĠLeban on +ol esc +Ġpun ished +ĠJ uan +Ġj am +ĠD ocument +Ġm apping +ic ates +Ġinev itably +Ġvan illa +ĠT on +Ġwat ches +Ġle agues +Ġiniti ated +deg ree +port ion +Ġrec alls +Ġru in +Ġm elt +I AN +Ġhe m +Ex p +Ġb aking +ĠCol omb +at ible +Ġrad ius +pl ug +ĠI F +et ically +Ġf ict +H ER +ĠT ap +atin um +Ġin k +Ġco h +ĠW izard +b oth +te x +Ġsp ends +ĠCurrent ly +ĠP it +Ġneur ons +ig nt +Ġr all +Ġbus es +b uilding +Ġadjust ments +Ġc ried +ibl ical +att ed +ĠZ ion +ĠM atter +Ġmed itation +ĠD ennis +Ġour s +ĠT ab +Ġrank ings +ort al +Ġad vers +Ġsur render +ĠG ob +ci um +om as +im eter +Ġmulti player +Ġhero in +Ġoptim istic +Ġindic ator +ĠBr ig +Ġgro cery +Ġapplic ant +ĠRock et +v id +Ex ception +p ent +Ġorgan izing +Ġenc ounters +ĠT OD +Ġjew el +S ave +ĠChrist ie +Ġhe ating +Ġl azy +ĠC P +Ġcous in +Con fig +Ġreg ener +Ġne arest +Ġachie ving +EN S +th row +ĠRich mond +ant le +200 2 +Ġan ten +b ird +13 3 +Ġn arc +r aint +un ny +ĠHispan ic +ourn aments +Ġprop he +ĠTh ailand +ĠT i +Ġinject ion +Ġinher it +rav is +Ġmed i +Ġwho ever +ĠDE BUG +G P +ĠH ud +C ard +p rom +Ġp or +Ġover head +L aw +Ġviol ate +Ġhe ated +Ġdescript ions +Ġachieve ments +ĠBe er +ĠQu ant +W as +Ġe ighth +ĠI v +Ġspecial ized +U PDATE +ĠD elta +P op +J ul +ĠAs k +oph y +Ġnews letters +ĠT ool +Ġg ard +ĠConf eder +ĠGM T +ĠAb bott +Ġimm unity +ĠV M +Is lam +Ġimpl icit +w d +Ġ19 44 +rav ity +omet ric +Ġsurv iving +ur ai +ĠPr ison +Ġr ust +ĠSk etch +Ġbe es +ĠThe ory +Ġmer it +T ex +ch at +Ġm im +Ġpast e +ĠK och +Ġignor ance +ĠSh oot +Ġbas ement +Un ited +ĠAd vis +he ight +Ġf oster +Ġdet ain +in formation +Ġne ural +' ; +Ġprov es +all ery +Ġinv itation +um bers +Ġc attle +Ġbicy cle +z i +Ġconsult ant +Ġap ology +ĠT iger +Ġ12 3 +99 9 +Ġind ividually +r t +ig ion +ĠBrazil ian +Ġdist urb +Ġentreprene urs +Ġfore sts +cer pt +pl ates +p her +clip se +Ġtw itter +Ġac ids +ograph ical +h um +ĠB ald +if ully +Ġcomp iler +ĠD A +Ġdon or +as i +Ġtrib al +l ash +ĠCon fig +Ġapplic ants +Ġsal aries +13 5 +Put in +ĠF ocus +ir s +Ġmisc onduct +ĠH az +Ġeat en +M obile +Mus lim +ĠMar cus +v iol +Ġfavor able +Ġst ub +ad in +ĠH ob +Ġfaith ful +Ġelectron ics +Ġvac uum +w ait +back ed +econom ic +d ist +Ġten ure +Ġsince re +ĠT ogether +ĠW ave +Ġprog ression +Ġden ying +Ġdist ress +br aska +th ird +Ġmix ing +Ġcolon ial +Ġpriv ately +Ġun rest +atern ity +Ġprem ises +ant i +greg ation +Ġlic ence +ĠH ind +ĠSam uel +Ġconvinc ing +ĠA ce +ĠR ust +ĠNet anyahu +Ġhand les +ĠP atch +orient ed +ah o +ĠG onz +Ġhack ers +claim er +Ġcustom s +ĠGr an +f ighters +Ġl uc +Ġman uscript +aren thood +Ġdev il +Ġwar riors +Ġoff enders +Will iam +Ġhol idays +Ġnight mare +Ġle ver +iff erent +St at +Ġexhib ition +put ed +ĠP ure +Ġal pha +Ġenthus iasm +ĠRepresent atives +E AR +ĠT yp +Ġwhe at +ĠAl f +Ġcor rection +Ġev angel +AT T +M iss +Ġs oup +Ġimpl ied +par am +Ġsex y +ĠL ux +Ġrep ublic +p atch +ab lish +Ġic ons +Ġfather s +ĠG ET +ĠCar ib +Ġregul ated +ĠCo hen +ĠBob by +Ġn er +Ġb ent +vent ory +ĠAl ong +ĠE ST +ĠWall ace +Ġmurd ers +r ise +ke ll +ĠCommon wealth +Ġn asty +et a +ĠM IT +Ġadminist ered +Ġgenuine ly +Ed itor +n ick +Ġhyd ro +**************** **************** +ĠB le +Ġfin es +Ġg orge +aus ible +r h +Ġapp le +ment ioned +Ġro pe +ot yp +H R +Ġdisappoint ing +Ġc age +n ik +Ġdoub ts +ĠF REE +print s +ĠM UST +Ġvend ors +ĠIn qu +Ġliber als +Ġcontract or +Ġup side +child ren +Ġtrick y +Ġregul ators +charg ed +l iter +Ġ *** +Ġreb ell +l ang +Ġloc als +Ġphys icians +Ġhe y +ar se +t m +ĠLe x +Ġbehavior al +success ful +F X +Ġbr ick +ov ic +Ġcon form +Ġreview ing +Ġins ights +Ġbi ology +ĠRem ove +ĠExt ra +Ġcomm itting +indu ced +ignt y +ig m +Ġat omic +Comm on +ĠE M +ĠP ere +ĠIt ems +e h +Ġpres erved +ĠH ood +Ġprison er +Ġbankrupt cy +Ġg ren +us hes +Ġexplo itation +Ġsign atures +Ġfin an +] ," +ĠM R +Ġme g +rem lin +Ġmusic ians +Ġselect ing +Ġexam ining +IN K +l ated +H i +Ġart ic +Ġp ets +Ġimp air +ĠM AN +Ġtable ts +in clude +R ange +Ġca ut +Ġlog s +Ġmount ing +Ġun aware +Ġdynam ics +ĠPalest ine +ĠQu arter +ĠPur ple +Ġm a +ĠIm port +Ġcollect ions +ci ation +Ġsuccess or +Ġcl one +Ġaim ing +Ġposs essed +Ġstick ing +Ġsh aking +Ġloc ate +ĠH ockey +T urn +17 0 +Ġfif teen +ĠHar rison +Ġcontinu ously +ĠT C +ĠVal ent +ĠRes cue +Ġby pass +am ount +Ġm ast +Ġprotect s +Ġart istic +Ġsomet ime +Ġsh oe +Ġshout ed +ific ant +et itive +ĠReg ister +ĠJ in +Ġconcent rated +ling ton +on ies +Ġgener ator +yr im +ĠAr men +Ġclear ing +id o +ĠT W +al ph +Ġlad ies +H ard +Ġdial og +Ġinput s +æ ľ +Ġpos es +Ġsl ots +ĠPrem ium +Ġle aks +Ġboss es +Ġ11 3 +c ourse +A cc +ĠNew ton +ĠAust ria +ĠM age +Ġte aches +ab ad +Ġwe ars +Ġc yl +Ġcur se +ĠS ales +ĠW ings +Ġp sy +Ġg aps +ĠIce land +ĠP interest +Ġland lord +Ġdefin itions +ĠK er +Ġsufficient ly +ĠP ence +ĠArch itect +Ġsur pass +Ġ11 4 +Ġsuper hero +ĠDise ase +Ġpri ests +ĠC ulture +Ġdefin itive +Ġsecret ly +ĠD ance +inst all +ch ief +ĠJess ica +W ould +Up dated +Ġlock er +ĠK ay +Ġmem orial +è ¦ +f at +Ġdis gu +Ġflav ors +ĠBase ball +ĠRes istance +Ġk icks +Ġen v +Ġteen agers +D ark +ĠC AR +Ġh alt +ĠL G +ĠGab riel +Ġfe ver +Ġs atur +Ġm all +Ġaffili ate +ĠS leep +ĠSpe cific +ĠV el +Ġj ar +ĠSac red +ĠEd wards +ĠA CL +Ġret ained +ĠG iant +Ġlim itation +in ces +Ġref usal +ĠT ale +ĠBut ler +Ġacc idents +ĠC SS +Ġimport ed +ĠCop y +Î ± +ER T +z el +Ġdiv isions +h ots +ĠAl b +ĠD S +Load er +W ashington +at isf +ĠCreat ive +\ . +ĠAut om +red ict +Ġrecept or +ĠCarl os +Met hod +ok a +Ġmal icious +Ġste pping +, [ +ĠD ad +Ġatt raction +ĠEffect s +ĠPir ate +ĠC er +ĠIndust ry +ĠR ud +Ġchar ter +Ġd ining +Ġins ists +Ġconfig ure +Ġ( # +ĠSim ple +ĠSc roll +UT C +17 5 +ĠK on +Ġmarket place +Ġ ãĤ +Ġref res +Ġg ates +er red +ĠP od +Ġbeh ave +Fr ank +n ode +Ġendors ed +he tt +as ive +ĠHom eland +Ġr ides +ĠLe ave +er ness +Ġflood ing +A FP +Ġris en +Ġcontin ually +Ġun anim +ĠCont ract +ĠP as +Ġgu ided +ĠCh ile +b d +Ġsu cc +pt ic +Ġcomm ittees +ĠL uther +ĠAny one +Ġs ab +12 4 +Ġp ixel +ĠB ak +ĠT ag +ĠBenn ett +En ter +sm all +ĠPresident ial +Ġp ul +Ġcontr ace +arch ive +Ġcoast al +ĠK ids +19 2 +âĢ ² +ick y +ING TON +Ġw olf +ĠSt alin +T ur +id get +am as +ĠUn less +Ġspons or +Ġmor ph +ĠCho ose +Ġrun ner +Ġun bel +Ġm ud +ĠMan a +Ġdub bed +Ġg odd +ure rs +wind ow +Ġrel ied +Ġcelebr ating +os c +Ġ13 5 +Ġlobb ying +Ġincom plete +Ġrestrict ion +Ġinc ap +it us +Ġexpect ation +ĠAp ollo +Ġint ens +Ġsyn c +G H +Ġmanip ulation +B Y +Ġspe ar +Ġbre asts +Ġvol can +il ia +M aterial +Ġform ats +ĠB ast +Ġparliament ary +Ġsn ake +Ġserv ants +ĠTr udeau +ĠGr im +ĠArab ic +ĠSC P +ĠBoy s +st ation +Ġprospect ive +ord e +in itialized +Ġb ored +AB LE +Ġaccess ed +Ġtax i +ĠShe ll +aid en +urs ed +in ates +ĠIns urance +ĠPet e +Sept ember +6 50 +Ġad ventures +ĠCo ver +Ġt ribute +Ġsk etch +Ġem power +Ġ Ø +ĠGl enn +ĠD aw += \" +ĠPolit ics +Ġgu ides +Ġd ioxide +ĠG ore +ĠBr ight +ĠS ierra +Ġval ued +c ond +Ġpo inter +Se lect +Ġrisk y +Ġabsor b +im ages +Ġref uses +Ġbon uses +__ _ +Ġh ilar +ĠF eatures +2 20 +ĠCollect or +F oot +Ġ19 64 +cul us +Ġd awn +Ġwork out +ĠL O +Ġphilosoph ical +ĠSand y +ĠYou th +Ġl iable +A f +bl ue +Ġovert urn +less ness +ĠTrib une +ĠIn g +Ġfact ories +Ġcat ches +Ġpr one +Ġmat rix +Ġlog in +Ġin acc +Ġex ert +s ys +Ġneed le +ĠQ ur +Ġnot ified +ould er +t x +Ġremind s +Ġpublisher s +Ġn ort +Ġg it +Ġfl ies +ĠEm ily +Ġflow ing +ĠAl ien +ĠStr ateg +Ġhard est +Ġmod ification +AP I +ĠM Y +Ġcr ashes +st airs +n umber +Ġur ging +ch annel +ĠFal con +Ġinhabit ants +Ġterr ifying +Ġutil ize +Ġban ner +Ġcig arettes +Ġsens es +ĠHol mes +Ġpract ition +ĠPhill ips +ott o +Ġcomp ile +Mod el +ĠK o +Ġ[ ] +Americ ans +ĠTer ms +Ġmed ications +ĠAn a +Ġfundament ally +ĠNot ice +Ġwe aker +Ġ 0000 +Ġgar lic +Ġout break +Ġeconom ist +ĠB irth +Ġobst acles +ar cer +ĠOr thodox +Ġplace bo +ĠC rew +asp berry +ĠAng els +Ġdis charge +Ġdestruct ive +11 7 +ĠR ising +Ġd airy +l ate +Ġcoll ision +ĠTig ers +ean or +ocument ed +ĠIn valid +Ġd ont +ĠL iter +ĠV a +Ġhyd rogen +Ġvari ants +ĠBrown s +Ġ19 65 +Ġind igenous +Ġtrad es +Ġremain der +Ġswe pt +ĠImp act +Ġred ist +Ġun int +grad uate +ãĥ ķ +ĠW ILL +ãģ® ç +ĠCrit ical +Ġf isher +Ġv icious +Ġrevers ed +Y ear +ĠS ox +Ġshoot ings +Ġfil ming +Ġtouchdown s +ai res +m el +Ġgrand father +Ġaffect ion +ing le +Ġover ly +Add itional +Ġsup reme +ĠGr ad +Ġsport ing +Ġmer cy +ĠBrook s +ount y +Ġperform s +Ġtight ly +Ġdem ons +Ġkill ings +Ġfact ion +ĠNov a +aut s +Ġund oubtedly +ar in +Ġunder way +ra k +Ġl iv +ĠReg ion +Ġbrief ing +s ers +cl oud +ĠM ik +us p +Ġpred iction +az or +Ġport able +ĠG and +Ġpresent ing +Ġ10 80 + » +ush i +ĠSp ark +there um +Ġjust ification +ĠN y +Ġcontract ors +ming ham +ĠSt yle +å ħ +ĠChron icles +ĠPict ure +Ġprov ing +Ġw ives +set t +Ġmole cules +ĠFair y +Ġconsist ing +Ġp ier +al one +in ition +Ġn ucle +j son +Ġg otta +Ġmob il +Ġver bal +ar ium +Ġmon ument +uck ed +Ġ25 6 +T ech +mine craft +ĠTr ack +Ġt ile +Ġcompat ibility +as is +Ġs add +Ġinstruct ed +ĠM ueller +Ġle thal +Ġhorm one +Ġor che +el se +Ġske let +Ġentert aining +Ġminim ize +ag ain +Ġunder go +Ġconst raints +Ġcig arette +ĠIslam ist +Ġtravel s +ĠPant hers +l ings +C are +Ġlaw suits +ur as +Ġcry st +Ġlow ered +Ġaer ial +Ġcomb inations +Ġha un +Ġch a +Ġv ine +Ġquant ities +Ġlink ing +b ank +Ġso y +B ill +ĠAngel a +Ġrecip ient +ĠProt est +Ġs ocket +Ġsolid arity +Ġâ Ĩ +m ill +Ġvar ies +ĠPak istani +Dr agon +Ġun e +Ġhor izon +³³³³ ³³³³ +Ġprov inces +Ġfrank ly +Ġenact ed +not es +[ ' +Ġ19 2 +ocr acy +Ġendorse ment +Ġover time +Tr ue +L ab +lic ted +ĠD NC +Ġbe ats +ĠJam ie +15 2 +ĠIN T +Cont act +Ġaccount ed +h ash +ĠPack ers +p ires +Ġles bian +Ġamend ments +Ġhop eful +ĠFin land +Ġspot light +Ġconfig ured +Ġtrou bled +Ġg aze +ĠCal gary +Ġrel iability +Ġins urg +sw er +b uy +ĠSk in +Ġp ixels +Ġhand gun +Ġpar as +Ġcateg or +ĠE L +ĠRe x +Ind eed +Ġkind a +Ġconj unction +ĠBry an +ĠMan ufact +y ang +Pl us +S QL +ish ment +Ġdom inate +Ġn ail +Ġo ath +Ġeru pt +ĠF ine +it bart +ĠCh ip +ĠAb d +ĠN am +Ġbuy er +Ġdiss ent +Le aks +Cont in +Ġr ider +ĠSome one +Ġill usion +c in +ĠBoe ing +Ġin adequ +ov ation +i ants +Ġreb uild +4 50 +ĠDest iny +S W +ĠT ill +H it +ia z +ĠBang l +acher s +ĠRe form +Ġse gments +Ġsystem atic +d c +ĠConserv atives +Ġport al +h or +ĠDragon bound +Ġdrag ged +om o +Ġthe e +ad vert +ĠRep orts +ĠE t +Ġbarrel s +Aug ust +Ġcompar isons +Ġhe x +Ġan throp +" [ +bor ough +ab i +Ġpict ured +play ing +ĠAdd ress +ĠMir ror +Sm ith +Ġt ires +ĠN PR +AA AA +Ġclass ification +ĠTh an +ĠH arm +ĠR A +Ġreject ion +min ation +Ġr anged +ĠF alls +D I +H ost +ãĤ ´ +ĠEx ample +list ed +th irds +Ġsaf egu +br and +Ġprob able +Can ada +IT ION +ĠQ aeda +Ġch ick +Ġimport s +h it +l oc +W W +Ġble w +Ġany time +Ġwh oles +ik ed +Ġcal culation +cre ate +ĠO ri +Ġupgr aded +Ġapp ar +ut ory +ĠM ol +B rit +ĠJ ong +IN AL +ĠStart ing +Ġd ice +urt le +Ġre lying +cl osure +Ġprof itable +Ġsl aughter +ĠMan ual +c aster +Ġ" $ +Ġfe ather +ĠSim ply +ie ves +Ġdeter ior +ĠPC I +Ġst amp +Ġfl aws +Ġsh ade +ham mer +Ġpass port +Ġcont ing +am el +Ġobser vers +Ġneg lect +ĠR B +ĠBrother hood +Ġskept ical +f amily +us k +Ġemotion ally +â Ļ +ĠBet a +ason able +id ity +ĠM ul +Ġkick ing +ĠC arm +oll ah +VERT IS +ĠAt hen +Ġlad der +ĠBul let +å £ +00 01 +ĠWild life +ĠM ask +ĠN an +R ev +Ġun acceptable +leg al +Ġcrowd ed +ag i +ĠC ox +j e +Ġmor ality +Ġfu els +Ġc ables +Ġman kind +ĠCarib bean +Ġanch or +Ġby te +ĠO ften +ĠO z +Ġcraft ed +Ġhistor ian +ĠW u +Ġtow ers +ĠCitiz ens +Ġhel m +Ġcred entials +Ġsing ular +ĠJes se +Ġtack les +Ġcont empt +Ġa fore +ĠSh adows +Ġn il +Ġur gent +app le +bl ood +Ġv on +Ġoff line +Ġbreat he +Ġj umps +Ġirre levant +ox ic +om al +import ant +J im +Ġgl oves +arm ing +dep th +Ġtal ents +ook ie +ĠS B +Ġpal m +uff s +est a +IG H +Ġcan on +ĠVer izon +ĠP le +Ġcou pled +vel t +Ġfundra ising +ĠGet ting +ĠD LC +Ġmathemat ical +ĠH S +ĠCard inals +te lling +Ġspons ors +Ġ Ï +ĠBull s +op tion +Ġprop ose +Ġmem orable +Ġembr aced +Ġdecl ining +He alth +ed a +Ġ} ; +Ġsp am +m ile +Ġpit cher +ĠE ight +Ġcar ing +ut ic +ro le +Ġair line +ernand ez +ĠAth let +Ġcert ification +ux e +rig er +Ġem pir +Ġsens ation +Ġdis m +Ġb olt +Ġev olve +H ouse +Ġconsult ation +ĠD uty +Ġtou ches +ĠN athan +Ġf aint +h ad +" ( +ĠCons umer +ĠExt reme +Ġ12 7 +ĠHer m +ĠSac rament +iz oph +Ġanx ious +ul ously +Ġsoc ially +ĠU TC +Ġsol ving +ĠLet ter +Hist ory +ed uc +Pr ice +) ); +Ġrel oad +am ic +Ġp ork +Ġdisc ourse +Ġt ournaments +ai ro +ĠK ur +ĠCost a +Ġviol ating +Ġinterf ere +Ġrecre ational +uff le +Ġspe eches +Ġneed ing +Ġremem bers +Ġcred ited +n ia +f ocused +amer a +Ġb ru +um bs +ĠCub an +Ġpreced ing +Ġnons ense +ac ial +Ġsmart phones +ĠSt ories +S ports +ĠEmer gency +oun cing +ef ined +Ġb er +Ġconsult ing +Ġm asters +he astern +." [ +ĠRun ning +Ġsus cept +ĠF eng +Americ a +pr ises +st itial +ĠWeek ly +ĠGreat er +mod ules +if ter +G raphics +ul er +Ġwho lly +Ġsupp ress +Ġconce aled +Ġhapp ily +Ġaccept s +ĠEn joy +Ġr ivers +ĠEx cept +2 25 +ĠN HS +ĠMc Connell +Ġp ussy +fer red +ut able +Ġatt ain +Ġ> = +Ġdepos its +roph ic +Ġnot orious +ĠSh aw +il itation +Ġepid emic +all ic +Ġsmall est +ov ich +Ġaccess ories +per ties +Ġsur plus +ĠMe ch +Ġamb ig +ĠImm igration +Ġch im +ev al +Ġpract icing +ĠMyster y +Ġdom ains +ĠSil icon +app s +Ġkilomet ers +e a +ĠSm ash +Ġwarrant y +Ġn ost +s il +re v +J on +ĠDub lin +Ġtast es +Ġb out +g reat +er ror +Ġsw itches +ĠB apt +D O +ok i +Ġsour ced +pro du +Ġattach ment +ĠIss ue +ĠQuest ion +Jo in +Ġf itted +Ġunlaw ful +^ ^ +ere k +Ġauthent ication +Ġst ole +Ġaccount ability +l abel +S earch +Ġal beit +atic an +fund ed +ĠAdd ing +ĠI Q +Ġsub mar +l it +a que +ĠLear ning +Ġint eger +M aster +ĠCh rom +Ġprem ier +O p +ĠLi u +Ġbl essed +ĠGl obe +ĠResp onse +Ġlegit im +ĠMer kel +Ġdispos al + ´ +Ġgau ge +pe at +Ġindu ced +Ġquestion able +arth y +ĠV it +ĠF eed +U ntil +U t +worth y +R Y +ĠH erald +ĠHam mer +Ġmed al +ĠR ivers +ĠH ack +Ġclar ify +Ġtrack ed +Ġautonom ous +Ġten ant +ĠQ atar +er ie +Ġgr im +ĠMon itor +Ġresist ant +ĠSpe c +ĠWell s +N AS +14 8 +Ġmin ers +iot ics +Ġmiss es +11 6 +g ian +g it +ĠE yes +p res +Ġgrad uated +Ġang el +Ġsyn chron +Ġefficient ly +Ġtrans mitted +H arry +Ġglob ally +EN CE +ĠMont ana +r aged +ĠPre vention +Ġp iss +ĠL l +Ġshe lf +ĠB JP +ĠTest ament +ĠL ate +ik er +ĠH app +ĠJul ian +h all +Ġsp ont +Ġshut down +Ġincons istent +Ġsubscrib ers +Ġske leton +ĠNe braska +Ġins pire +ĠV oid +F eed +Ġang les +ĠSpr ings +Ġbench mark +Ġvacc ines +izoph ren +se xual +uff ed +Ġsh ine +ĠK ath +Ġgest ure +ine a +Ġr ip +Ġopp ression +Ġcons cience +b t +ĠL um +Ġinc idence +ĠF a +w r +Ġmin eral +ĠSp urs +alk y +Ġth under +Ġop io +Be ing +ĠPal m +Ġwas ted +Ġl b +i aries +ĠIniti ative +Ġcur ric +Ġmark er +ĠMc L +Ġext ensions +ĠP v +ĠAr ms +Ġoffer ings +Ġdef enses +Ġvend or +Ġcontrad ict +ĠCol in +Ġredd it +Ġper ipher +12 2 +Ġs ins +E dit +IC T +So ft +ĠSh ah +Ġadministr ator +ĠT rip +Ġporn ography +Ġtu ition +in ence +ĠPro gress +Ġcat alog +Ġsu ite +Ġh ike +Ġreprodu ctive +eng ine +Ġd rought +ĠNo ah +Ġ2 30 +Ġd ude +Ġrelax ed +Ġpart ition +Ġparticip ant +Ġtel esc +Ġfe as +ĠF F +own er +Ġswe eping +Ġl enses +Ġmatch up +ĠRe pl +ourn als +Ġcred ible +Ġgrand mother +Ġther mal +Ġsubscrib ing +Ġident ities +col m +U CT +Ġreluct ant +us ers +ĠC ort +Ġassist ed +OS S +ATION S +IS H +Ġpharm aceutical +ic able +ad ian +ĠSon ic +ĠF ury +ĠM ong +A H +ĠPsych ology +Ġph osph +Ġtreat s +Ń Ķ +Ġstead ily +ĠHell o +Ġrel ates +Ġcl ue +Ex pl +a uth +Ġrev ision +Ġe ld +os ion +Ġbr on +14 4 +ri kes +Ġmin es +Ġblank et +ĠF ail +el ed +ĠIm agine +ĠPl anned +a ic +Re quest +M ad +ĠHor se +ĠEag le +Ġcap ac +15 7 +Ġl ing +ĠN ice +ĠP arenthood +min ster +og s +ens itive +Not hing +Ġcar n +F in +ĠP E +Ġr ifles +ĠL P +S and +Ġgui Active +Ġtour ist +C NN +Ġunve iled +Ġpredec essor +} { +u ber +Ġoff shore +Ġopt ical +ĠR ot +ĠPear l +et on +Ġst ared +Ġfart her +at ility +cont in +ĠG y +ĠF oster +ĠC oc +ri ents +Ġdesign ing +ĠEconom y +ON G +W omen +ĠN ancy +er ver +Ġmas cul +Ġcasual ties +Ġ2 25 +ĠS ullivan +ĠCh oice +Ġa ster +w s +Ġhot els +Ġconsider ations +Ġcou ch +ĠSt rip +ĠG n +Ġmanip ulate +l ied +Ġsynt hetic +Ġassault ed +Ġoff enses +ĠDra ke +Ġim pe +Oct ober +ĠHer itage +h l +ĠBl air +Un like +Ġg rief +Ġ4 50 +Ġopt ed +Ġresign ation +il o +Ġver se +ĠT omb +Ġu pt +Ġa ired +ĠH ook +ĠML B +Ġassum es +out ed +ĠV ers +Ġinfer ior +Ġbund le +ĠD NS +ograp her +Ġmult ip +ĠSoul s +Ġillust rated +Ġtact ic +Ġdress ing +Ġdu o +Con f +Ġrel ent +Ġc ant +Ġscar ce +Ġcand y +ĠC F +Ġaffili ated +Ġspr int +yl an +ĠGarc ia +Ġj unk +Pr int +ex ec +C rit +Ġport rait +ir ies +ĠOF F +Ġdisp utes +W R +L ove +ãģ Ħ +ĠRe yn +Ġh ipp +op ath +Ġflo ors +ĠFe el +Ġwor ries +Ġsett lements +ĠP os +Ġmos que +Ġfin als +Ġcr ushed +ĠPro bably +ĠB ot +ĠM ans +ĠPer iod +Ġsovere ignty +Ġsell er +Ġap ost +Ġam ateur +Ġd orm +Ġconsum ing +Ġarm our +ĠRo ose +Ġint ensive +Ġelim inating +ĠSun ni +ĠAle ppo +j in +Ġadv ise +p al +ĠH alo +Ġdes cent +Ġsimpl er +Ġbo oth +ST R +L ater +ĠC ave +== = +Ġm ol +Ġf ist +Ġshot gun +su pp +Ġrob bery +E ffect +Ġobsc ure +ĠProf essional +Ġemb assy +Ġmilit ant +Ġinc arcer +Ġgener ates +Ġlaun ches +Ġadministr ators +Ġsh aft +Ġcirc ular +Ġfresh man +ĠW es +ĠJo el +ĠD rew +ĠDun can +ĠApp arently +s ight +ĠIntern al +ĠInd ividual +ĠF E +Ġb ore +ĠM t +Ġbroad ly +ĠO ptions +ount ain +ip es +ĠV ideos +20 4 +Ġh ills +Ġsim ulation +Ġdisappoint ment +it an +ĠLabor atory +Ġup ward +Ġbound ary +Ġdark er +h art +Ġdomin ance +C ong +ĠOr acle +ĠL ords +Ġscholars hip +ĠVin cent +ed e +ĠR ah +Ġencour ages +ro v +Ġqu o +Ġprem ise +ĠCris is +ĠHol ocaust +Ġrhyth m +Ġmet ric +cl ub +Ġtransport ed +Ġn od +ĠP ist +Ġancest ors +ĠFred er +th umbnails +ĠC E +ON D +Ph il +ven ge +ĠProduct s +cast le +Ġqual ifying +ĠK aren +VERTIS EMENT +Ġmight y +Ġexplan ations +Ġfix ing +D i +Ġdecl aring +Ġanonym ity +Ġju ven +ĠN ord +ĠDo om +ĠAct ually +O k +ph is +ĠDes ert +Ġ11 6 +I K +ĠF M +Ġinc omes +V EL +ok ers +Ġpe cul +Ġlight weight +g ue +Ġacc ent +Ġincre ment +ĠCh an +Ġcompl aining +ĠB aghd +Ġmidfield er +Ġover haul +Pro cess +ĠH ollow +ĠTit ans +Sm all +man uel +ĠUn ity +ĠEv ents +S ty +Ġdispro portion +n esty +en es +ĠC od +Ġdemonstr ations +ĠCrim son +ĠO H +Ġen rolled +Ġc el +ĠBre tt +Ġa ide +Ġhe els +Ġbroad band +Ġmark ing +Ġw izard +ĠN J +ĠChief s +Ġingred ient +Ġd ug +ĠSh ut +urch ase +end or +Ġfar mer +ĠGold man +12 9 +15 5 +Or der +Ġl ion +i ably +Ġst ain +ar ray +ilit ary +ĠFA Q +Ġexpl oded +ĠMcC arthy +ĠT weet +ĠG reens +ek ing +l n +ens en +Ġmotor cycle +Ġpartic le +Ġch olesterol +B ron +Ġst air +Ġox id +Ġdes irable +ib les +Ġthe or +for cing +Ġpromot ional +ov o +b oot +ĠBon us +raw ling +Ġshort age +ĠP sy +Ġrecru ited +Ġinf ants +Ġtest osterone +Ġded uct +Ġdistinct ive +Ġfirm ware +bu ilt +14 5 +Ġexpl ored +Ġfact ions +Ġv ide +Ġtatt oo +Ġfinan cially +Ġfat igue +Ġproceed ing +const itutional +Ġmis er +Ġch airs +gg ing +ipp le +Ġd ent +Ġdis reg +ç Ķ +st ant +ll o +b ps +aken ing +Ġab normal +ĠE RA +å£ « +ĠH BO +ĠM AR +Ġcon cess +Ġserv ant +Ġas pir +l av +ĠPan el +am o +Ġprec ip +Ġrecord ings +Ġproceed ed +Ġcol ony +ĠT ang +ab lo +Ġstri pped +Le ft +to o +Ġpot atoes +Ġfin est +% ). +Ġc rap +ĠZ ach +ab ases +ĠG oth +Ġbillion aire +w olf +Ġsan ction +S K +Ġlog ged +P o +ey ed +un al +Ġcr icket +Ġarm ies +Ġunc overed +Cl oud +ó n +Ġreb ounds +Ġm es +O per +P ac +Ġnation ally +Ġinsert ed +p ict +Ġgovern ance +Ð ¸ +Ġprivile ges +G ET +Ġfavor ites +im ity +Ġlo ver +the m +em pl +Ġgorge ous +An n +Ġsl ipped +Ġve to +B ob +Ġsl im +u cc +ĠF ame +udden ly +Ġden ies +ĠM aur +Ġdist ances +Ġw anna +t ar +ĠS ER +Ġâ Ī +Ġle mon +at hetic +Ġlit eral +Ġdistingu ished +Ġansw ering +G I +Ġrelig ions +ĠPhil os +ĠL ay +Ġcomp os +ire ments +ĠK os +ine z +roll ing +Ġyoung est +and ise +ĠB orn +Ġalt ar +am ina +ĠB oot +v oc +Ġdig ging +Ġpress ures +Ġl en +26 4 +Ġassass ination +ĠBir mingham +ĠMy th +Ġsovere ign +ĠArt ist +ĠPhot ograph +Ġdep icted +Ġdisp ens +orth y +Ġamb ul +int eg +ĠC ele +ĠTib et +Ġhier archy +Ġc u +Ġpre season +ĠPet erson +Ġcol ours +Ġworry ing +Ġback ers +ĠPal mer +ĠÎ ¼ +Ġcontribut or +Ġhear ings +Ġur ine +Ġ Ù +ourge ois +Sim ilar +ĠZ immer +s omething +ĠUS C +Ġstrength s +ĠF I +Ġlog ging +As ked +ĠTh ai +in qu +ĠW alt +Ġcrew s +it ism +3 01 +Ġshar ply +um ed +Ġred irect +r ators +In f +ĠWe apons +Ġte asp +19 99 +L ive +ĠEs pecially +ĠS ter +ĠVeter ans +Ġint ro +other apy +Ġmal ware +Ġbre eding +Ġmole cular +ĠR oute +ĠCom ment +oc hem +Ġa in +Se ason +Ġlineback er +Ä « +ĠEconom ics +es ar +ĠL ives +ĠEm ma +Ġk in +ĠTer rit +Ġpl anted +ot on +ĠBut ter +ĠSp ons +P ER +Ġdun geon +Ġsymb olic +Ġfil med +Ġdi ets +Ġconclud es +Ġcertain ty +ĠForm at +Ġstr angers +form at +ĠPh ase +Ġcop ied +Ġmet res +ld a +ĠUs ers +Ġdeliber ate +Ġwas hed +ĠL ance +im ation +Ġimpro per +ĠGen esis +ick r +ĠK ush +Ġreal ise +Ġembarrass ing +alk ing +b ucks +Ġver ified +Ġout line +year s +ĠIn come +20 2 +Ġz ombies +F inal +ĠMill enn +Ġmod ifications +ĠV ision +ĠM oses +ver b +iter ranean +ĠJ et +Ġnav al +ĠA gg +Ġur l +Ġvict ories +Ġnon etheless +Ġinj ust +ĠF act +ç ļ +Ġins ufficient +re view +face book +Ġnegoti ating +Ġguarant ees +im en +uten berg +Ġg ambling +Ġcon gr +Load ing +Ġnever theless +Ġpres idents +ĠIndust rial +Ġ11 8 +Ġp oured +ĠT ory +Ġ17 5 +Ġ: = +Sc ott +ange red +T ok +Ġorgan izers +M at +ĠG rowth +Ġad ul +Ġens ures +Ġ11 7 +é¾į å +Ġmass acre +Ġgr ades +be fore +AD VERTISEMENT +ĠSl ow +ĠM MA +âĢĶ " +ĠV atican +Q aeda +Ġo we +66 66 +ĠS orry +ĠGr ass +Ġbackground s +Ġexha usted +Ġcl an +Ġcomprom ised +ĠE lf +ĠIsa ac +ens on +In vest +IF A +Ġinterrupt ed +ãĥī ãĥ© +Ġtw isted +ĠDrag ons +M ode +ĠK remlin +Ġfert il +he res +ph an +ĠN ode +f ed +ĠOr c +Ġunw illing +C ent +Ġprior it +Ġgrad uates +Ġsubject ive +Ġiss uing +ĠL t +Ġview er +Ġw oke +Th us +bro ok +Ġdep ressed +Ġbr acket +ĠG or +ĠFight ing +Ġstri ker +Rep ort +ĠPortug al +Ġne o +w ed +19 9 +Ġflee ing +sh adow +ident ified +US E +Ste am +Ġstret ched +Ġrevel ations +art ed +ĠD w +Ġalign ment +est on +ĠJ ared +S ep +Ġblog s +up date +g om +r isk +Ġcl ash +ĠH our +Ġrun time +Ġunw anted +Ġsc am +Ġr ack +Ġen light +on est +ĠF err +Ġconv ictions +Ġp iano +Ġcirc ulation +ĠW elcome +Ġback lash +ĠW ade +Ġrece ivers +ot ive +J eff +Ġnetwork ing +ĠPre p +ĠExpl orer +Ġlect ure +Ġupload ed +ĠMe at +B LE +ĠNaz is +ĠSy nd +st ud +ro ots +ri ans +Ġportray ed +Ġ ?? +ĠBudd ha +s un +Rober t +ĠCom plex +Ġover see +Ġste alth +T itle +ĠJ obs +ĠK um +Ġappreci ation +ĠM OD +Ġbas ics +Ġcl ips +Ġnurs ing +Ġpropos ition +Ġreal ised +ĠNY C +Ġall ocated +ri um +ar an +ĠPro duction +ĠV ote +Ġsm ugg +Ġhun ter +az er +ĠCh anges +Ġfl uct +y on +Ar ray +Ġk its +W ater +Ġuncom mon +Ġrest ing +ell s +w ould +Ġpurs ued +Ġassert ion +omet own +ĠMos ul +ĠPl atform +io let +Ġshare holders +Ġtra ils +P ay +ĠEn forcement +ty pes +ĠAn onymous +Ġsatisf ying +il ogy +Ġ( ' +w ave +c ity +Ste ve +Ġconfront ation +ĠE ld +C apt +ah an +ht m +ĠC trl +ON S +2 30 +if a +hold ing +Ġdelic ate +Ġj aw +ĠGo ing +or um +S al +Ġd ull +ĠB eth +Ġpr isons +Ġe go +ĠEl sa +avor ite +ĠG ang +ĠN uclear +Ġsp ider +ats u +Ġsam pling +Ġabsor bed +ĠPh arm +iet h +Ġbuck et +ĠRec omm +O F +ĠF actory +AN CE +Ġb acter +H as +ĠObs erv +12 1 +Ġprem iere +De velop +Ġcur rencies +C ast +Ġaccompany ing +ĠNash ville +Ġfat ty +ĠBre nd +Ġloc ks +Ġcent ered +ĠU T +augh s +or ie +ĠAff ordable +v ance +D L +em et +Ġthr one +ĠBlu etooth +Ġn aming +if ts +AD E +Ġcorrect ed +Ġprompt ly +ĠST R +Ġgen ome +Ġcop e +Ġval ley +Ġround ed +ĠK end +al ion +p ers +Ġtour ism +Ġst ark +v l +Ġblow ing +ĠSche dule +st d +Ġunh appy +Ġlit igation +ced es +Ġand roid +Ġinteg ral +ere rs +ud ed +t ax +Ġre iter +ĠMot ors +oci ated +Ġwond ers +ĠAp ost +uck ing +ĠRoose velt +f ram +Ġyield s +Ġconstit utes +aw k +Int erest +Ġinter im +Ġbreak through +ĠC her +Ġpro sec +ĠD j +ĠM T +Res p +ĠP T +Ġs perm +ed it +B T +Lin ux +count ry +le ague +Ġd ick +Ġo ct +Ġinsert ing +Ġsc ra +ĠBrew ing +Ġ19 66 +Ġrun ners +Ġpl un +id y +ĠD ian +Ġdys function +Ġex clusion +Ġdis gr +Ġincorpor ate +Ġrecon c +Ġnom inated +ĠAr cher +d raw +achel or +Ġwrit ings +Ġshall ow +Ġh ast +ĠB MW +ĠR S +Ġth igh +Ġ19 63 +Ġl amb +Ġfav ored +ag le +Ġcool er +ĠH ours +ĠG U +ĠOrig in +Ġglim pse +---------------- ---- +L im +Ġche ek +Ġj ealous +- ' +Ġhar ness +ĠPo ison +Ġdis abilities +ne apolis +Ġout look +Ġnot ify +ĠIndian apolis +Ġab rupt +ns ic +Ġenc rypted +Ġfor fe +reat h +Ġr abb +Ġfound ations +Ġcompl iment +ĠInter view +ĠS we +Ġad olesc +Ġmon itors +ĠSacrament o +Ġtime ly +Ġcontem pl +Ġposition ed +Ġpost ers +ph ies +iov ascular +v oid +ĠFif th +Ġinvestig ative +OU N +Ġinteg rate +ĠIN C +ish a +ibl ings +ĠRe quest +ĠRodrig uez +Ġsl ides +ĠD X +Ġfemin ism +Ġdat as +Ġb end +ir us +ĠNig eria +F ox +Ch ange +Ġair plane +ĠLad en +Ġpublic ity +ixt y +Ġcommit ments +Ġaggreg ate +Ġdisplay ing +ĠAr row +Ġ12 2 +Ġrespect s +and roid +s ix +ĠSh a +Ġrest oration +) \ +W S +oy s +Ġillust rate +with out +12 6 +ĠâĶ Ĥ +Ġpick up +n els +Ġ .... +f ood +ĠF en +) ? +Ġphenomen a +Ġcompan ions +ĠW rite +Ġsp ill +Ġbr idges +ĠUp dated +ĠF o +Ġinsect s +ASH INGTON +Ġsc are +il tr +ĠZh ang +Ġsever ity +Ġind ul +14 9 +ĠCo ffee +Ġnorm s +Ġp ulse +ĠF T +Ġhorr ific +ĠDest roy +ĠJ SON +Ġo live +Ġdiscuss es +R est +E lect +ĠW inn +ĠSurv iv +ĠH ait +S ure +op ed +Ġro oted +ĠS ke +ĠBron ze +Ġl ol +Def ault +Ġcommod ity +red ited +Ġliber tarian +Ġforb idden +Ġgr an +à ¨ +Ġl ag +en z +dri ve +Ġmathemat ics +Ġw ires +Ġcrit ically +Ġcarb ohyd +ĠChance llor +ĠEd die +Ġban ning +ĠF ri +Ġcompl ications +et ric +ĠBangl adesh +Ġband width +St op +ĠOrig inally +Ġhalf way +yn asty +sh ine +Ġt ales +rit ies +av ier +Ġspin ning +ĠWH O +Ġneighbour hood +b ach +Ġcommer ce +ĠS le +B U +Ġentreprene ur +Ġpecul iar +ĠCom ments +f re +3 20 +IC S +Ġimag ery +ĠCan on +ĠElect ronic +sh ort +( ( +D ig +Ġcomm em +u ced +Ġincl ined +ĠSum mon +Ġcl iff +ĠMed iterranean +Ġpo etry +Ġprosper ity +ĠRe ce +Ġp ills +m ember +Ġfin ale +un c +ĠG ig +ä ½ +Ġl od +Ġback ward +- + +ĠFor ward +Ġth ri +s ure +Ġso ap +ĠF X +R ES +ĠSe xual +oul os +Ġfool ish +Ġright eous +Ġco ff +terror ism +ust ain +ot er +Ġab uses +ne xt +Ġab usive +Ġthere after +Ġprohib ition +ĠS UP +Ġd ip +Ġr ipped +Ġinher ited +Ġb ats +st ru +G T +Ġflaw ed +ph abet +Ġf og +do ors +Ġim aging +Ġdig its +ĠHung ary +Ġar rog +Ġteach ings +Ġprotocol s +ĠB anks +à ¸ +p ound +ĠC urt +." ) +. / +Ġex emption +end ix +ĠM ull +Ġimpro ves +ĠG amer +d imensional +I con +ĠMarg aret +St atus +d ates +Ġint ends +Ġdep ict +Ġpark ed +J oe +ĠMar ines +chn ology +! ). +Ġjud ged +Ġwe ights +R ay +Ġapart ments +he ster +Ġrein force +Ġoff ender +occ up +Ġs ore +e pt +ĠPH P +ĠB row +Ġauthor ization +ĠR isk +ĠDel aware +ĠQ U +Ġnot ifications +Ġsun light +Ġex clude +d at +Ġm esh +ĠSud an +Ġbelong ed +Ġsub way +Ġno on +ĠInter ior +ol ics +ĠL akers +Ġc oding +Dis claimer +Cal if +O ld +Ġdis l +???? ? +Ġconfir ms +Ġrecruit ment +Ġhom icide +Cons ider +ĠJeff rey +ft y +} ; +Ġobject ion +do ing +ĠLe o +W ant +Ġgl ow +ĠClar ke +ĠNorm an +Ġver ification +Ġpack et +ĠForm ula +Ġpl ag +es ville +Ġshout ing +Ġo v +ĠR EC +ĠB ub +Ġn inth +Ġener g +Ġvalid ity +Ġup s +j ack +Ġneighbor ing +ĠN ec +ew orks +ĠH ab +are z +Ġsp ine +Ġevent ual +ĠLe aders +ĠC arn +Ġprob ation +Ġrom ance +ms g +ĠMechan ical +ER Y +R ock +Ġpart isan +N ode +ass ets +min ent +Ġforeign ers +Ġtest ify +ĠUs ually +l ords +ĠG ren +ĠPow ell +BI L +Ġs r +Ġadd ict +Ġshell s +Ġs igh +ĠY ale +tern ity +Ġ7 50 +E U +ĠR ifle +Ġpat ron +em a +ĠB annon +an ity +Ġtrop ical +ĠV II +c ross +Every thing +ĠIS O +Ġhum ble +ass ing +ĠF IG +Ġupd ating +ys on +Ġcal cium +Ġcompet ent +Ġste ering +Pro t +ĠS Y +ĠFin als +ĠR ug +15 9 +13 7 +ĠG olf +Ġ12 6 +Ġaccommod ation +ĠHug hes +Ġaest hetic +art isan +ĠTw ilight +Ġpr ince +ĠAgric ulture +ĠDis co +Ġpreced ent +Ġtyp ing +author ized +O ption +ĠA ub +l ishes +ach t +m ag +P eter +ĠU FO +mont on +ĠL ith +Ġa rom +Ġsec uring +Ġconf ined +priv ate +Ġsw ords +Ġmark ers +Ġmetab olic +se lect +ĠCur se +ĠO t +g ressive +Ġinc umb +ĠS aga +Ġpr iced +Ġclear ance +Cont ent +Ġdr illing +Ġnot ices +Ġb ourgeois +Ġv est +Ġcook ie +ĠGuard ians +ry s +in yl +Ġ12 4 +Ġpl ausible +on gh +ĠOd in +Ġconcept ion +ĠY uk +ĠBaghd ad +ĠFl ag +Aust ral +ĠI BM +Ġintern ationally +ĠWiki Leaks +I ED +Ġc yn +Ġcho oses +ĠP ill +Ġcomb ining +Ġrad i +ĠMoh ammed +def ense +atch ing +Sub ject +ic iency +Fr ame +Ġ{ " +Ġche ss +Ġtim er +19 0 +Ġt in +Ġord inance +emet ery +Ġacc using +Ġnotice able +Ġcent res +Ġl id +ĠM ills +img ur +Ġz oom +erg ic +Ġcomp ression +pr im +f ind +Ġsur g +Ġp and +ĠK ee +ĠCh ad +cell ence +oy le +Ġsocial ism +ĠT ravis +ĠM Hz +Ġgu ild +ALL Y +ĠSub scribe +ĠRel ated +Ġoccur rence +itch ing +Ġfict ional +Ġcr ush +ĠE A +c od +m ix +ĠTri ple +Ġretrie ve +Ġstimul us +Ġpsych iat +ĠDo or +Ġhomosexual ity +Ġelement ary +Ġcell ular +id ian +ĠL aun +Ġintrig uing +Ġfo am +ĠB ass +id i +its u +Ġass ure +Ġcongr at +Ġbusiness man +ĠBo ost +cl ose +Ġl ied +Ġsc iences +ĠO mega +ĠG raphics +Ġ< = +sp oken +Ġconnect ivity +S aturday +ĠAven gers +Ġto ggle +Ġank le +Ġnational ist +mod el +ĠP ool +ophob ia +V ar +ĠM ons +ator ies +Ġaggress ively +C lear +For ge +act ers +Ġhed ge +Ġpip es +Ġbl unt +Ġs q +Ġremote ly +W ed +as ers +Ġref riger +Ġt iles +Ġresc ued +Ġcompr ised +ins ky +Ġman if +avan augh +Ġprol ifer +Ġal igned +x ml +Ġtri v +Ġcoord ination +ĠP ER +ĠQu ote +13 4 +b f +ĠS aw +Ġtermin ation +Ġ19 0 +Ġadd itions +Ġtri o +Ġproject ions +Ġpositive ly +Ġin clusive +Ġmem br +19 90 +old er +Ġpract iced +ink le +Ar ch +Ġstar ters +ari us +Ġinter mediate +ĠBen ef +ĠK iller +Ġinter ventions +ĠK il +ĠF lying +In v +Ġprem ature +Ġpsych iatric +Ġind ie +Ġcoll ar +ĠRain bow +af i +Ġdis ruption +ĠFO X +cast ing +Ġmis dem +c ro +Ġw ipe +ard on +Ġb ast +ĠTom my +ĠRepresent ative +Ġbell y +ĠP O +ĠBre itbart +13 2 +Ġmess aging +Sh ould +Ref erences +ĠG RE +ist ical +L P +ĠC av +ĠC razy +Ġintu itive +ke eping +ĠM oss +Ġdiscont in +ĠMod ule +Ġun related +ĠPract ice +ĠTrans port +Ġstatist ically +orn s +Ġs ized +p u +Ġca f +ĠWorld s +ĠRod gers +ĠL un +ĠCom ic +l iving +Ġc ared +Ġclim bed +) { +Ġconsist ed +Ġmed ieval +fol k +Ġh acked +Ġd ire +ĠHerm ione +Ġt ended +ce ans +D aniel +w ent +Ġlegisl ators +Ġred es +g ames +Ġg n +am iliar +Ġ+ + +gg y +th reat +Ġmag net +Ġper ceive +Ġz ip +Ġindict ment +Ġcrit ique +g ard +ĠSaf e +ĠC ream +Ġad vent +ob a +Ġv owed +ous ands +Ġsk i +Ġabort ions +u art +Ġstun ned +Ġadv ancing +Ġlack ed +Ġ\ " +Ġsch izophren +Ġeleg ant +Ġconf erences +Ġcance led +ĠHud son +ĠHop efully +Ġtr ump +Ġfrequ encies +Ġmet eor +ĠJun ior +ĠFle et +ĠMal colm +ĠT ools +Ġ ........ +Ġh obby +ĠEurope ans +Ġ15 00 +ĠInt o +Ġs way +ĠApp ro +ĠCom pl +Comm unity +Ġt ide +ĠSum mit +ä » +Ġinter vals +ĠE ther +Ġhabit at +ĠSteven s +lish ing +ĠDom ain +Ġtrig gers +Ġch asing +Ġchar m +ĠFl ower +it ored +Ġbless ing +Ġtext ures +F ive +Ġliqu or +R P +F IN +Ġ19 62 +C AR +Un known +Ġres il +ĠL ily +Ġabund ance +Ġpredict able +r ar +Ġbull shit +le en +che t +M or +M uch +ä ¹ +Ġemphas ized +Ġcr ust +Ġprim itive +Ġenjoy able +ĠPict ures +Ġteam mate +pl er +ĠT ol +ĠK ane +Ġsummon ed +th y +ram a +ĠH onda +Ġreal izing +Ġquick er +Ġconcent rate +cle ar +Ġ2 10 +ĠErd ogan +ar is +Ġrespond s +ĠB I +Ġelig ibility +Ġpus hes +ĠId aho +Ġagg rav +Ġru ins +ur ations +Ġb ans +Ġan at +sh are +Ġgr ind +h in +um en +Ġut ilities +ĠYan kees +Ġdat abases +ĠD D +Ġdispl aced +Ġdepend encies +Ġstim ulation +h un +h ouses +ĠP retty +ĠRaven s +ĠTOD AY +Ġassoci ates +Ġthe rape +cl ed +Ġde er +Ġrep airs +rent ice +Ġrecept ors +Ġrem ed +ĠC e +Ġmar riages +Ġball ots +ĠSold ier +Ġhilar ious +op l +13 8 +Ġinherent ly +Ġignor ant +Ġb ounce +ĠE aster +REL ATED +ĠCur rency +E V +ãĥ ŀ +ĠLe ad +Ġdece ased +B rien +ĠMus k +J S +Ġmer ge +heart ed +c reat +m itt +m und +ĠâĢ ĭ +ĠB ag +Ġproject ion +Ġj ava +ĠStand ards +ĠLeon ard +Ġcoc onut +ĠPop ulation +Ġtra ject +Ġimp ly +Ġcur iosity +ĠD B +ĠF resh +ĠP or +Ġheav ier +ne ys +gom ery +Ġdes erved +Ġphr ases +ĠG C +Ġye ast +d esc +De ath +Ġreb oot +Ġmet adata +IC AL +Ġrep ay +ĠInd ependence +Ġsubur ban +ical s +Ġat op +Ġall ocation +gener ation +ĠG ram +Ġmoist ure +Ġp ine +ĠLiber als +Ġa ides +Ġund erest +ĠBer ry +Ġcere mon +3 70 +ast rous +ĠPir ates +Ġt ense +ĠIndust ries +ĠApp eals +ĠN ear +Ġè£ı ç +Ġlo vers +ĠC AP +ĠC raw +Ġg iants +Ġeffic acy +E lement +ĠBeh avior +ĠToy ota +Ġint est +P riv +A I +Ġmaneu ver +Ġperfect ion +Ġb ang +p aper +r ill +Ge orge +b order +in ters +ĠS eth +Ġcl ues +ĠLe vi +ĠRe venue +14 7 +Ġv apor +Ġfortun ate +Ġthreat ens +Ġve t +Ġdepend ency +ers ed +art icle +ĠBl izzard +Ġch lor +Ġmin us +ĠB ills +Ġcryptoc urrency +Ġmetabol ism +ter ing +Ġp estic +step s +ĠTre asure +ract ed +ĠConst ant +Ġtem p +13 9 +ĠDet ective +ur ally +Ġrecover ing +Ġcort ex +Ġ14 4 +cl osed +Ġprejud ice +aun ted +Ġstorm s +ĠN OW +Ġmach inery +Add ress +Ġcompe lled +27 0 +Ġdesp air +b ane +Ġveget able +Ġbed s +Lear n +Ġcolor ful +Ġsp ike +Ġmarg ins +Ġsymp athy +Ġworks hop +ĠC BC +S at +Ġburn s +ĠG ender +Ġ12 9 +ĠC able +Ġdeb ts +ĠThe resa +Ġreflect ing +Ġa irst +Ġr im +ram id +Ġweakness es +W rit +ogg le +t i +ĠCh arge +Ġwe ighed +Ġ( . +Ġl aughter +Ġrou ter +ĠDemocr acy +D ear +Ġhas ht +Ġd y +Ġhint s +run ning +Ġfin ishes +ar us +M ass +res ult +asc us +Ġv intage +Ġcon qu +Ġwild ly +ac ist +Ġl ingu +Ġprot agonist +st rom +te enth +ĠSol o +m ac +f illed +Ġre nown +it ives +Ġmot ive +ĠAnt ar +ĠM ann +ĠAd just +Ġrock ets +Ġtrou bling +e i +Ġorgan isms +ass is +Christ ian +Ġ14 5 +ĠH ass +Ġsw all +Ġw ax +ĠSurv ival +V S +ĠM urd +v d +stand ard +Ġdrag ons +Ġacceler ation +r ational +f inal +Ġp aired +ĠE thereum +Ġinterf aces +Ġres ent +Ġartif acts +Å « +are l +Ġcompet itor +ĠNich olas +ĠSur face +c pp +ĠT ot +Ġeconom ically +Ġorgan ised +Ġen forced +in ho +Ġvar ieties +Ġab dom +ĠBa iley +id av +ĠSal v +p aid +Ġalt itude +ess ert +ĠG utenberg +are a +op oulos +Ġprofess ors +igg s +ĠF ate +he y +Ġ3 000 +D ist +Ġtw ins +c ill +ĠM aps +Ġtra ps +Ġwe ed +ĠK iss +Ġy oga +Ġrecip ients +ĠWest minster +Ġpool s +ĠWal mart +18 8 +ĠSchool s +att ack +ĠAR M +par agraph +W arning +j l +Ġself ish +anche z +ĠHe ights +F re +ĠS oph +Ġ -------------------------------- +t ml +33 3 +Ġraid s +Ġsatell ites +KE Y +Ġlast s +Ñ Ĥ +In s +ĠD ame +Ġunp redict +// / +gh ai +Ġart illery +Ġcru ise +Ġg el +ĠCabin et +Ġbl ows +ĠE sp +Ġprox imity +ot he +ĠSk ills +ĠU pper +ob o +ĠN DP +Ġenjoy s +Ġrepe ating +ĠConst ruction +ĠQuest ions +H illary +Ġu int +Ġprocess ors +ĠGib son +ĠMult iple +q a +ĠB om +ĠM iles +vent ional +Ġhur ts +s kin +ĠA IDS +Ġadvis ers +ĠR oot +Ġmethod ology +ĠD ale +Ġdet on +ĠKnow ledge +sequ ently +Ġ12 1 +Ġconnect s +C y +ĠD anger +Ġcontribut ors +ĠB ent +Ġbr ass +ĠGun s +int o +ĠFort une +Ġbro ker +bal ance +Ġlength s +Ġv ic +Ġaver aging +Ġappropri ately +ĠCamer a +Ġsand wich +ĠCD C +Ġcoord inate +Ġnav ig +Ġgood ness +l aim +Ġbra ke +Ġextrem ist +ĠW ake +ĠM end +ĠT iny +ĠC OL +ĠR F +ĠD ual +ĠW ine +C ase +Ġref ined +Ġl amp +L ead +Ġb apt +ĠCar b +ĠS add +ĠMin neapolis +PD F +Ear ly +ĠH idden +I ts +ĠT IME +Ġp ap +Ġcommission ed +ĠF ew +ĠCol ts +ĠB ren +Ġbot hered +Ġlike wise +Ex per +ĠSch w +c ry +n n +ĠM itch +im on +M G +b m +UM P +r ays +Ġregist ry +Ġ2 70 +ach ine +re lla +ant ing +00 000 +Ġru ined +sp ot +Ġt a +Ġmaxim ize +Ġincon ven +D ead +H uman +En abled +ĠMar ie +Ġch ill +ĠParad ise +Ġstar ring +ĠLat ino +ĠProt ocol +ĠE VER +Ġsuppl iers +m essage +ĠBro ck +Ġser um +âĸĪâĸĪ âĸĪâĸĪ +Ġen comp +Ġamb ition +ues e +Ġar rows +And rew +Ġanten na +Ġ19 61 +ĠB ark +Ġb ool +ãĤ ª +ĠSt orage +Ġrail way +Ġtoug her +ĠC ad +Ġwas hing +P y +' ] +em bed +ĠMem phis +ack le +Ġfam ously +ĠF ortunately +ov ies +Ġmind set +Ġsne ak +ĠD h +RA W +ĠSim pson +Ġliv est +Ġland mark +Ġc ement +L ow +Ġthr illed +ĠCour se +in el +Ġch uck +id ate +gl obal +Ġwh it +Ġ � +ad ays +s ki +ĠS V +Ġvir uses +30 6 +ĠResp ons +Ġthe aters +ĠBr anch +ĠGene va +ĠM K +Ġunbel iev +Ġcommun ist +Orig inal +ĠRe ceived +ĠTrans fer +ĠAr g +In put +ĠStr ategy +Ġpal ace +the ning +D ri +Ġsent encing +umbn ail +Ġp ins +re cy +Ġs iblings +Get ting +ĠB U +ĠNorth west +Ġprolong ed +ĠSak ura +C omb +ĠB our +Ġinadequ ate +ĠK ash +Ġus ername +ĠImpro ve +Ġbatt ling +ĠM AC +Ġcurric ulum +Ġs oda +ĠC annon +Ġsens ible +sp ons +De cember +Ġw icked +ĠP engu +Ġdict ators +ĠHe arts +og yn +Ġsimilar ities +ĠSt ats +Ġh ollow +it ations +": [ +Ġh over +ĠList en +s ch +S und +Ġc ad +ĠPar ks +Ġl ur +Ġhy pe +ĠL em +N AME +is ure +Fr iday +Ġshoot s +Ġclos es +Ġd b +ĠR idge +ĠDiff erent +Ġrepl ies +ĠBroad way +op ers +Ġint oler +ĠZe us +akes pe +Ġpropri etary +Ġrequest ing +Ġcontro llers +ĠM IN +im edia +be cca +Ġexp ans +Ġoil s +B ot +ĠCh and +Ġpr inter +Ġto pped +ĠP OL +ĠEar lier +S ocial +av in +Ġdecre ases +ĠSe b +Ġspecific ations +ĠBl ast +ĠK urt +Ġfre el +B rown +Ġdil ig +ro e +ĠPro blem +ĠQu ad +Ġdecent ral +ĠV ector +an ut +Ġplug ins +ĠGreg ory +Ġfuck ed +el ines +ĠAmb assador +t ake +Ġcle ans +ong yang +An onymous +st ro +" } +al ine +ĠO dd +ĠE ug +2 16 +Ġbo il +ĠP owers +Ġnurs es +Ob viously +ĠTechn ical +Ġexceed ed +OR S +Ġextrem ists +Ġtr aces +ex pl +Ġcom r +ĠS ach +) / +Ġm asks +Ġsc i +B on +Ġreg ression +we gian +Ġadvis or +it ures +ĠV o +ex ample +ĠInst ruct +Ġs iege +Ġredu ctions +pt r +Ġstat utory +Ġrem oves +Ġp uck +red its +Ġbe e +Ġsal ad +Ġpromot ions +ĠJosh ua +with standing +ET H +ĠCh a +im us +Ġexpend iture +aun ting +Ġdelight ed +Ġ15 5 +be h +Ġcar pet +ĠSp art +Ġj ungle +l ists +Ġbull ying +ĠNob el +ĠGl en +Ġreferen ced +Ġintrodu ces +se in +Ġcho pped +gl ass +ĠW rest +Ġneutral ity +Ġâ Ļ +Ġinvestig ator +Ġshel ves +Ġun constitutional +Ġreprodu ction +Ġmer chant +m ia +Ġmet rics +Ġexplos ives +ĠSon ia +Ġbod ily +Ġthick ness +Ġpredomin antly +ĠAb ility +Ġmon itored +IC H +Ġ] . +ĠMart inez +Ġvis ibility +Ġqu eries +Ġgen ocide +ĠWar fare +Qu ery +Ġstud ios +Ġemb ry +Ġcorrid or +Ġclean ed +com plete +ĠM H +Ġenroll ment +ING S +Ġimpact ed +Ġdis astrous +ĠY un +ĠCl aire +ĠBas ically +y t +uster ity +Ġindirect ly +w ik +Ġd od +ĠCar r +Ġam p +Ġprohib it +ĠIn itial +ĠR d +ij i +Ġeduc ate +c orn +i ott +ĠBeaut y +Ġdetect ive +ĠCon n +s ince +Ġst agger +Ġob ese +Ġb ree +olog ic +is se +walk er +Ġbl ades +Ġlaw ful +fun c +ĠBeh ind +Ġappet ite +Ġ( * +Ġt ennis +Ġoff spring +Ġj ets +Ġstruct ured +Ġafore mentioned +N ov +Ġsc aling +f ill +Ġst ew +Ġcur b +ĠStep han +ed In +S F +ob ic +é ŃĶ +ou g +ĠM M +Ġgen etically +ope z +13 6 +Ġu mb +anc ers +Ġcoh ort +Ġmerch andise +Ġimp osing +ĠLegisl ature +ĠArch ive +iv ia +ĠN aval +Ġoff ences +Ġmir acle +Ġsn apped +Ġf oes +Ġextensive ly +ĠR af +Ġc ater +ed ience +K it +ĠB in +Ġrecomm ends +ĠC ities +Ġrig id +ĠRE AD +ĠNob le +ĠT ian +Ġcertific ates +ant is +o iler +ĠBudd hist +d id +Ġsurvey ed +Ġdown ward +Ġprint s +ĠMot ion +ron ics +ĠS ans +oss ibly +u ctions +Ġcolon ies +ĠDan ish +un it +Ġsp oil +Ġadvis ory +ber ries +Pl an +Ġspecific ation +op hers +ĠRes ource +Ġsh irts +prising ly +commun ications +Ġtriv ial +Ġmention ing +ise xual +Ġsupp lements +Ġsuper vision +B P +v or +Ġw it +Ġco oldown +Ġplaint iff +ĠReview s +ĠS ri +ĠM int +ĠSug ar +Ġafter ward +ĠPri est +ĠInvest ment +og ene +ĠT aking +Ġstretch ing +Ġinflamm ation +ĠTe hran +Ġl ining +Ġfree zing +ĠEnt ity +Ġins piring +spe cial +pr ice +Ġsu e +ĠP orter +oun ge +ET A +ĠD erek +ĠLu is +u o +ym ph +Ġex terior +ih il +ĠAsh ley +in ator +Ġnut rients +ĠTh rones +Ġfin ances +ĠIn spect +Ġspe cially +ĠRequ ired +ĠP TS +ĠViol ence +oint ed +sh ots +Ġex cerpt +co on +IN S +ĠG ri +Ġrecogn ised +We ek +You ng +Ġv om +is le +ĠCur ry +ĠBudd h +Ġnot ebook +Ġd urable +/ ? +ĠG ad +ĠP upp +Ġforg ive +p ark +Ġpersonal ities +an alysis +cl amation +Ġelev ator +Ġware house +ĠR ole +un n +Ġillust ration +ĠSc an +Ġatmosp heric +Im port +AN C +rict ed +f u +01 0 +Ġar che +Ġreward ed +akespe are +Ġintern ally +ĠR BI +alk er +Ġeleph ant +ow itz +ĠP izza +Ġbip artisan +é s +Ġslow ed +ĠSt ark +Ġover ride +OU S +Ġ3 20 +undred s +ĠDe ck +ĠC ensus +be e +14 6 +ot or +Ġ ip +Ġu b +oc ations +ĠBut ton +r ice +Ġc ripp +ff f +Ġorig inated +Ġoverwhel med +app a +Ġfore most +âĢ ij +ĠL EG +re lease +eat ured +at ches +Ġre ps +Ġl ending +ĠRe ference +ĠCl ient +16 5 +vent h +Com plete +ĠPat rol +Ġsw orn +c am +Ġshut tle +ĠR alph +Ġh ometown +- , +on al +ĠB P +å ı +Ġpersu ade +ĠAlex and +Ġcomb ines +Ġv ivid +ĠL ag +Ġenc oding +Ġsal vation +w en +ĠRec overy +i ya +Un iversity +ĠB iden +Ġbud gets +ĠTex ans +f its +Ġhon ored +Ġp ython +T D +## # +cl one +Ġbl ink +ĠL iquid +Ġunemploy ed +Ġcl ashes +ĠCoun sel +Ġdirect ing +Ġpun ct +ĠFal cons +Ġsh ark +ĠDam ascus +Ġje ans +Ġemb ark +Ġse ize +Ġup wards +2 80 +ĠE z +ĠAny thing +Ġex otic +l ower +ĠCreat or +ĠU m +Ġsubur bs +ber ger +ĠW end +Ġm int +ĠX X +ĠD ro +Ġsuff ers +Ġher b +t ree +Ġfrag ile +Ġflood ed +ĠAl cohol +ole an +ny der +ĠK O +F ram +Ġ13 6 +Ġow ed +ĠMe lee +ĠH ash +Ġwh isk +Ġsu do +r r +Qu ick +app ro +Ġi i +ĠEx amples +he e +Ġpromot es +per ature +k ar +ĠHon or +Ġs odium +ĠL if +ros so +intend ent +Ġcorrespond ent +F ound +sec ret +Ġident ifies +ag ne +Ġl ou +ĠP P +Ġcoinc idence +m ove +Ġmilit ia +Ġinf iltr +ĠPrim ary +Ġpitch ing +ĠI b +ĠGO OD +ãĤ ¸ +ĠW izards +ir al +ĠVen us +R R +ĠâĢ ķ +ĠCase y +Ġsad ly +Ġadm ire +Ġembarrass ed +c b +M el +Ġtub es +Ġbeaut ifully +ĠQueens land +Bel ow +re z +qu et +ple asant +Ġ « +C amp +Ġdec isive +19 98 +ĠL amb +ut ton +h n +ĠJ agu +au nder +ĠC ord +Ġcl erk +Ġca ffe +Ġwip ed +Ġre im +ĠMount ains +Ġimprison ed +Ġdevelop s +ĠP ra +Ġmodel ing +Any one +ance l +ĠS it +Ġshield s +Ġl awn +Ġcard iovascular +Ġdemonstr ating +Ġpar se +ĠIsrael is +Ġeuro s +14 3 +Ġgl orious +ins ki +ec d +Ġcondition ing +Ġhel pless +Ġmicro sc +ĠHar bor +Ġst akes +Ġ2 60 +Ġun equ +ĠFl oyd +Ġd amp +Ġappar atus +ĠLaw s +Ġcoun ters +Ġindu ce +at able +ĠAh med +Ġsl am +N ovember +Ġpers ist +Ġim minent +á n +Ġsh red +Ġph ases +ĠEd monton +ĠArm strong +ĠMe et +ĠK itty +Ñ Ģ +c irc +ĠAd ult +Ġa rose +ĠX en +D an +g ow +Ġsuper f +ĠAd mir +Ġend ure +Ġkey word +yr us +Ġy arn +Ġpath way +ĠHop kins +mid t +Ġcens orship +d ependent +Ġinstruct or +S ources +Ġto e +Ġball oon +N ob +Ġsw ear +ĠCast ro +Ġgl oss +ĠK avanaugh +Ġremark ably +Ph otos +ĠN om +ĠS outheast +y ers +Ġvalid ation +Ġcann on +ĠVict ory +ĠPier re +Ġcaut ious +Aud io +Ġf etch +ĠG ift +ĠH yp +Ġrem edy +Z E +Ġsc ent +Ġbe ard +ĠR ut +- " +Ġpat ents +H y +Ġun just +Ġpot ato +Ġforth coming +Ġche f +ĠR ift +aff e +ĠR OM +ĠL aunch +Ġp ads +ĠNe o +Ġon set +Ġsquee ze +s afe +Ġpref ix +ĠT M +ĠN early +ĠClin ical +ĠM ental +ot iation +ĠUn ic +ant ry +ĠC ir +Ġep it +à ¦ +Ġextract ed +verse ly +ri ad +Ġstr ains +Ġto ps +Ġpo em +ĠRand y +ĠMap le +TH ER +up iter +ĠSS D +ļ é +Ġun con +per ing +Ġsle pt +in ers +Ġunder water +ĠEv idence +g one +20 5 +Ġhistor ians +Ġsynt hesis +Ġf rog +b asketball +Ġvibr ant +Ġsub ord +Ġ3 65 +ĠD ial +Ġcooper ate +HA HA +Ġgreet ed +15 8 +Ġj azz +Ġinto x +ĠWalk ing +Ġsuper visor +ĠF usion +ĠMer cedes +s end +H am +s d +n l +Ġtour s +ĠF IFA +Ġcul p +g d +30 4 +Ġple as +Ġillust rates +ĠColomb ia +Ġhighlight ing +ĠSum mary +Ġexp osing +ĠD ru +Ġir ony +r itional +ĠCar roll +ĠEll is +P ict +ĠR apt +Ġad apter +Ġun m +Ġcor pse +Ġceleb rities +D en +at um +ĠAp ocalypse +ĠW ag +lin ing +Ġhorm ones +R ub +ĠX i +ĠV aults +20 8 +alky rie +inos aur +Ġfeed s +v ity +Ġdefe ating +W ait +Ġemphas ize +ĠSteel ers +yr inth +le ys +ĠWhe never +Current ly +ĠCl ock +Ġcollect ively +any on +ĠJ P +Ġment ality +Ġdownload s +Ġsurround ings +ĠBarn es +Ġflags hip +Ġindic ators +Ġgra pp +Jan uary +ĠElement al +ĠAthen a +ib al +Ġs ights +Ġcap ita +ĠTreat y +Ġvo iced +ĠG az +let te +Ġy a +Ġexp ired +Leg end +H ot +n ature +Ġunst able +Ġ2 80 +à º +Com ment +AL E +Ġquest s +Ġhand ler +n is +Ġvers atile +Ġconce al +enge ance +ĠInter active +Ġobs essed +ĠDog s +Ġcr acked +S ound +s v +ĠD ylan +ro ads +f x +ĠCath olics +ĠH ag +Ġsl ammed +Ġgl owing +s ale +Ġtiss ues +ĠCh i +ne e +Ġc her +s ic +ur rection +Ġb acon +ul atory +) ." +Ġir regular +FOR M +ass ed +Ġintention al +Ġcompens ate +ĠSpe aking +ĠS ets +15 3 +Ġconvent ions +b ands +em ade +Ġe cc +ĠWin ston +ĠAssass in +ĠBelg ian +Ġdepend ence +Ġnic he +Ġb ark +ĠJ azz +Ġdisadvant age +Ġgas oline +Ġ16 5 +çļ Ħ +ess a +mod ule +ang ular +O Y +ĠTreat ment +it as +ol ation +ĠArn old +Ġfe ud +ĠN est +Ġthe atre +ew ater +Ġmin ors +olic y +ĠH aven +div ision +Ġtr unk +F ar +ĠP ull +Ġcapt uring +Ġ18 00 +ĠTe en +Ġex empl +Ġclin ics +ĠB urg +Ġsubst it +Ġpay load +ĠL av +ĠT roy +ĠW itness +Ġfrag ments +Ġpass words +Ġg ospel +ĠG in +Ġten ants +ol ith +S ix +Pre vious +ĠAg es +ĠDar win +Ġbl at +Ġem pathy +sm ith +b ag +ĠE cho +ĠC amb +ĠM add +ĠB oo +Ġred e +ĠBurn ing +Ġsmooth ly +ĠAd rian +ĠV ampire +ĠMon sters +ste am +Sty le +M a +re a +ĠD war +aly st +urs or +Ġelim ination +Ġcrypt o +ch t +ĠE ternal +âĢ¦ ] +ĠS orce +I ll +N ER +Ġu h +Con clusion +w age +Ġresp ir +Ġrem inis +het ical +Ġg y +Ġutil ized +ic idal +Ġ19 00 +Ġhun ters +ĠSw an +ĠRe act +Ġvis itor +ĠThanks giving +30 8 +Post s +Ġh ips +19 97 +om ers +Ġkn ocking +ĠVeh icle +Ġt il +Ġ13 8 +Ġm i +ĠInvest igation +ĠKen ya +Ġcas ino +Ġmot ives +Ġreg ain +re x +Ġweek ends +Ġstab bed +bor o +Ġexplo ited +ĠHA VE +ĠTe levision +c ock +Ġprepar ations +Ġende av +ĠRem ote +ĠM aker +ĠPro du +ĠEv an +Ġinform ational +ĠLouis ville +15 4 +ĠDream s +Ġpl ots +ĠRun ner +Ġhur ting +Ġacad emy +ĠMont gomery +n m +ĠL anc +ĠAl z +2 10 +el ong +Ġretail er +Ġar ising +Ġrebell ion +Ġbl onde +play ed +Ġinstrument al +C ross +Ġret ention +Ġtherape utic +Ġse as +Ġinfant ry +ĠCl int +Ġprompt ing +Ġbit ch +Ġst ems +ĠK ra +Ġthe sis +ĠB og +ru ed +Ġk ings +Ġcl ay +ific ent +ĠY ES +ĠTh ing +ĠCub s +vey ard +els h +in arily +ĠE y +ĠRoll ing +Ġev olving +Ind ia +Ġrecogn izes +Ġgrad uation +is ers +Ġfert ility +ĠMil an +Comm and +Ġbox ing +Ġ19 43 +Ġgl uten +ĠEm ir +Ġid ol +Ġcon ceived +ĠCre ation +Mer it +udd y +uss ions +ĠLie utenant +iet al +Ġunch anged +ĠSc ale +ĠCrime a +ball s +ator ial +Ġdepth s +Ġempir ical +Ġtrans m +Ġuns afe +miss ible +com fort +15 6 +Ġmechan ic +00 2 +l ins +Ġsm oked +P os +Ġslow ing +Ġl av +Tex as +Ġche ating +ĠMet ropolitan +eth yl +Ġdiscover ing +as se +Ġpen cil +ĠPy ongyang +Ġclos et +ĠShe et +ĠEnt ry +ou stic +Ġmy st +er ate +ari at +Ġminer als +Ġmusic ian +ĠP ul +ĠM az +24 9 +Ġper missions +Ġ iv +en ary +ick ers +ĠB ing +he a +en able +Ġgri ev +Ġassert ed +ĠColon el +Ġaff idav +w o +Ġse ated +ĠR ide +Ġpaint ings +ĠP ix +Ġ13 7 +ish i +umb ai +g otten +ĠEar l +Ġin ning +Ġc ensus +Ġtrave lled +ĠCons ult +18 5 +b ind +Ġsimpl icity +Ġoverlook ed +ĠHelp ful +Ġmon key +Ġoverwhelming ly +Bl ood +ĠFl int +ĠJ ama +ĠPres ent +ĠR age +ĠT A +pt ive +Ġturn out +w ald +ĠD olphins +ĠV PN +Ġon ion +Ġcraft ing +m ma +ĠMerc ury +Ġarr ange +Ġalert s +ĠO T +zb ollah +Ġg ases +ĠRichards on +s al +l ar +Ġfro st +Ġlower ing +Ġacc laim +Ġstart ups +ĠG ain +ess ment +Ġguard ian +äº º +ĠP ie +ĠL inks +Ġmer its +Ġaw ake +Ġparent al +Ġexceed s +Ġid le +ĠPil ot +Ġe Bay +ĠAc cept +ipe g +C am +ĠK ot +Ġtrad ers +olit ics +unk er +ĠP ale +os i +an mar +Ġ19 47 +ĠF ell +est ial +it ating +G F +ĠS r +if ted +Ġconnect or +ĠB one +ill es +2 60 +h ma +Ġoverl ap +ĠGit Hub +Ġclean er +ĠBapt ist +ĠW AS +Ġlung s +Ñ ģ +ĠB UT +Ġc ite +Ġpit ched +reat ment +Ġtro phies +ĠN u +38 6 +ĠPr ide +Ġattend ees +[ ] +17 9 +Ġspat ial +Ġpri zes +ĠRel igion +Ġshow case +ĠC ategory +vid ia +T arget +Pro perty +? , +Ġf usion +p ie +ĠU CLA +Ġsound track +Ġprin cess +ĠC aval +sh ould +Ġlim bs +Back ground +Ġlone ly +Ġc ores +ĠT ail +she et +Ġ13 2 +R a +ãĤ « +ĠB olt +Ġbook ed +Ġadmin ister +Ġequ als +w y +Ġobserv ing +ĠBar on +ĠAd obe +Ġv irgin +ĠSocial ist +M ove +gh azi +ĠLind a +2 12 +Ġbre wing +Ġmerch ants +bur se +Ġdiv or +Ġmet als +ĠN er +Ġsum s +ĠEn emy +Ġen vision +Ġgrant ing +ĠH oney +ĠSk yrim +Ġsoc io +gr aded +Ġselect ive +W ASHINGTON +Ġ19 48 +ĠSir ius +ĠG ross +act ivity +ĠI van +Ġfur ious +BS D +ĠPre vious +Ġrespons ive +Ġchar itable +Ġle aning +ĠP ew +Ġviol ates +\\\\ \\\\ +ĠCom ing +w ire +Ġpo et +Ġres olutions +comm and +ĠPortug uese +Ġnick name +Ġde af +Feb ruary +Ġrecogn ise +Ġentire ty +Ġseason al +pl aced +ĠTe legraph +Ġmicro phone +our ing +Ġgr ains +Ġgovern ed +Ġpost p +ĠW aters +in ement +Ġund ocumented +ĠCom cast +Ġf ox +Ġassault s +re on +man y +ĠJen kins +ĠAny way +Ġassess ments +Ġdown s +ĠM ouse +Ġsuper b +k t +ĠD ow +Ġtax ation +4 01 +Ġsm iles +Ġundert aken +Ġex h +Ġenthusi astic +Ġtw ent +Ġgovernment al +Ġautonom y +ĠTechn ologies +ĠCh ain +Ġpreval ent +f b +Ġnic otine +og ram +j ob +Ġawa iting +ĠMen u +Ġdep uties +k ov +ish ops +But ton +ĠShan ghai +Ġdies el +ĠD uck +R yan +ĠPC s +N F +j ury +ent e +Ġinacc urate +edd y +Wh atever +Ġshow c +ĠN ad +od us +et r +Ġplaint iffs +ĠW OR +ĠAss ange +Ġpriv at +Ġpremium s +Ġt am +UR L +Ġel ites +ĠR anger +otten ham +ĠH off +ĠAt hens +Ġdefin ite +Ġs ighed +Ġeven ly +2 11 +ĠAm ber +ak ia +Ġmail ing +Ġcr ashing +ĠConfeder ate +ru gged +W al +ĠDep ths +Ġjuven ile +Ġreact or +Introdu ction +ĠDel uxe +19 95 +ĠS anchez +ĠM ead +iv able +: - +ĠPlan ning +ĠT rap +qu in +ĠProt ect +ve red +In formation +Ġkid ney +inn amon +l as +Ġpolic ing +Ġtoler ate +ĠQ i +Ġbi ased +F ort +ĠK i +s ave +Ġprivile ged +Ġbe asts +ĠGl as +ĠC inem +Ġcome back +Sund ay +Ġext inction +h ops +Ġtrans mit +Ġdoub les +ĠFl at +16 7 +Ġdis puted +Ġinjust ice +f oo +V ict +role um +ĠJul ie +Con text +ĠR arity +iss ue +Comp onent +Ġcounsel ing +an ne +d ark +Ġobject ions +u ilt +Ġg ast +Ġpl ac +Ġun used +ãĥ ĩ +ĠT rial +ĠJ as +hed ral +ob b +Ġtempor al +ĠPR O +ĠN W +ĠAnn iversary +L arge +Ġther m +Ġd avid +Ġsystem ic +ĠSh ir +m ut +ĠNe pt +add ress +Ġscan ning +Ġunderstand able +Ġcan vas +C at +ĠZ oo +Ġang els +L O +ĠStat ement +ĠS ig +ov able +ĠA way +sh aring +ocr ats +st ated +Ġweigh ing +N or +w ild +B ey +Ġaston ishing +ĠReyn olds +Ġop ener +Ġtrain er +Ġsurg ical +p n +Ġadjust ing +whe el +Ġf rown +erv ative +Ġsusp end +With in +te in +Ġobst acle +Ġliber ties +ym es +Ġur anium +ans om +an ol +ub a +ĠL oss +Ġa rous +ĠHend erson +W ow +s pl +c ur +ĠÂ Ń +Ġtheir s +Dam age +Ġdownload ing +Ġdisc ern +ĠSt o +ĠFl a +Ġh ath +ĠA j +Ġun pleasant +Europe an +exp ensive +Ġscreens hot +ĠU V +Ġall ied +ĠPers ian +Ġmonop oly +Ġat om +ĠReds kins +"> < +Ġcan cell +Ġcinem a +13 1 +f air +ĠAlf red +Ġd uck +arg s +22 3 +ĠIS I +Ġsign aling +in ar +Ġlaugh s +Ġfor wards +Ġreck less +Ġlisten ers +at ivity +Ġvast ly +n ant +L ess +ĠHun ting +ĠScient ific +IT ED +Ġkn ight +ĠH TC +us a +t mp +Ġr ude +ĠLegend ary +Ġar ises +B ad +ĠCl aim +pe g +Ġreal ities +Th ink +Ġ ° +Ġro de +Ġstri ve +Ġan ecd +Ġshort s +Ġhypot hes +Ġcoord inated +ĠGand hi +ĠF PS +R ED +Ġsuscept ible +Ġshr ink +ĠCh art +Hel p +Ġ ion +de ep +rib es +ĠK ai +ĠCustom er +Sum mary +Ġc ough +w ife +Ġl end +Ġposition ing +Ġlot tery +ĠC anyon +Ġf ade +Ġbron ze +ĠKenn y +Ġbo asts +ĠEnh anced +rec ord +Ġemer gence +Ġa kin +ĠB ert +it ous +âĸ ij +Ġst ip +Ġexch anged +om ore +als h +Ġreserv oir +Ġstand point +W M +Ġiniti ate +Ġdec ay +Ġbrew ery +Ġter ribly +Ġmort al +lev ard +Ġrev is +N I +el o +Ġconf ess +ĠMS NBC +Ġsub missions +Cont roller +Ġ20 2 +ĠR uth +} ); +ĠAz ure +Ġ ." +20 6 +ĠMarket ing +Ġl aund +ien cies +Ġrenown ed +ĠT rou +ĠN GO +ble ms +Ġterr ified +Ġwar ns +Ġper t +Ġuns ure +4 80 +ale z +ult z +ĠOut side +Ġst yl +ĠUnder ground +Ġp anc +Ġd ictionary +Ġf oe +rim inal +ĠNor wegian +Ġj ailed +Ġm aternal +é e +ĠLu cy +c op +Ch o +Ġuns igned +ĠZe lda +ĠIns ider +ĠContin ued +Ġ13 3 +ĠNar uto +ĠMajor ity +16 9 +ĠW o +ãĤ ĵ +Ġpast or +Ġinform al +Ð ½ +an throp +jo in +ãģ Ĺ +it ational +N P +ĠWrit ing +f n +ĠB ever +19 5 +Ġy elling +Ġdr astically +Ġe ject +Ġne ut +Ġth rive +ĠFre qu +ou x +Ġpossess es +ĠSen ators +ĠD ES +ĠSh akespeare +ĠFran co +ĠL B +uch i +Ġinc arn +Ġfound ers +F unction +Ġbright ness +ĠB T +Ġwh ale +ĠThe ater +m ass +ĠD oll +S omething +Ġecho ed +ĠHe x +c rit +af ia +Ġgodd ess +Ġele ven +ĠPre view +ĠAur ora +Ġ4 01 +uls ive +ĠLog an +in burgh +ĠCent ers +ĠON LY +ĠA id +Ġparad ox +Ġh urd +ĠL C +D ue +c ourt +Ġoff ended +Ġeval uating +ĠMatthew s +Ġto mb +Ġpay roll +Ġextra ction +ĠH ands +if i +Ġsuper natural +ĠCOM M +] = +dog s +Ġ5 12 +ĠMe eting +Rich ard +ĠMax imum +Ġide als +Th ings +m and +ĠReg ardless +Ġhum ili +b uffer +L ittle +ĠD ani +ĠN ak +Ġliber ation +ĠA be +ĠO L +Ġstuff ed +ac a +ind a +raph ic +Ġmos qu +Ġcampaign ing +Ġoccup y +S qu +r ina +ĠW el +ĠV S +Ġphys ic +Ġp uls +r int +oad ed +ET F +ĠArch ives +Ġven ues +h ner +ĠTur bo +Ġl ust +Ġappeal ed +que z +il ib +ĠTim othy +Ġo mn +d ro +Ġobs ession +ĠSav age +19 96 +Gl obal +J es +2 14 +Ġsl iding +Ġdisapp ro +ĠMag ical +Ġvolunt arily +g b +ane y +Ġprop het +ĠRe in +ĠJul ia +ĠW orth +aur us +Ġb ounds +ie u +)) ) +Ġcro re +ĠCitiz en +S ky +Ġcolumn ist +Ġseek ers +ond o +IS A +ĠL ength +Ġnost alg +Ġnew com +Ġdet rim +ent ric +3 75 +ĠG E +Ġaut op +Ġacadem ics +App Data +ĠS hen +Ġid iot +ĠTrans it +Ġteasp oon +W il +K O +ĠCom edy +> , +Ġpop ulated +W D +Ġp igs +ĠO culus +Ġsymp athetic +Ġmar athon +19 8 +Ġseiz ure +s ided +Ġd op +irt ual +L and +ĠFl oor +osa urs +... ] +Ġl os +Ġsubsid iary +E Y +ĠPart s +ĠSt ef +ĠJud iciary +Ġ13 4 +Ġmir rors +Ġk et +t imes +Ġneuro log +Ġc av +ĠGu est +Ġtum or +sc ill +ĠLl oyd +E st +Ġcle arer +Ġstere otypes +Ġd ur +not hing +Red dit +Ġnegoti ated +---------------- -------- +23 5 +Ġfl own +ĠSe oul +ĠRes ident +ĠS CH +Ġdisappear ance +ĠV ince +g rown +Ġgrab s +r il +ĠInf inite +ĠTw enty +Ġpedest rian +Ġjer sey +ĠF ur +ĠInf inity +ĠEll iott +Ġment or +Ġmor ally +Ġob ey +sec ure +iff e +Ġantib iotics +ang led +ĠFre eman +ĠIntrodu ction +J un +Ġm arsh +ic ans +ĠEV ENTS +och ond +W all +icult y +Ġmisdem eanor +Ġl y +Th omas +ĠRes olution +Ġanim ations +ĠD ry +Ġinter course +ĠNew castle +ĠH og +ĠEqu ipment +17 7 +Ġterrit orial +Ġarch ives +20 3 +Fil ter +ĠMun ich +Ġcommand ed +ĠW and +Ġpit ches +ĠCro at +Ġrat ios +ĠM its +Ġaccum ulated +ĠSpecific ally +Ġgentle man +acer b +Ġp enn +Ġa ka +ĠF uk +Ġinterven e +ĠRef uge +ĠAlz heimer +Ġsuccess ion +oh an +d oes +L ord +Ġsepar at +Ġcorrespond ence +Ġsh iny +P rior +Ġs ulf +Ġmiser able +Ġded ication +( ). +Ġspecial ists +Ġdefect s +ĠC ult +ĠX ia +Ġje opard +ĠO re +Ab ility +Ġle ar +Ġamb itions +ĠB MI +ĠArab s +Ġ19 42 +Ġpres ervation +ific ate +Ġash amed +l oss +ĠRest aur +Ġrese mble +Ġen rich +ĠK N +ĠCl an +fl oat +Ġplay able +IT T +Ġharm ony +arr ison +ĠWe instein +w ere +Ġpoison ing +ĠCom put +ĠWord Press +m ajor +ĠVal ve +F an +ĠTh row +ĠRom ans +ĠDep ression +ad os +Ġtort ured +Ġbal ancing +bott om +Ġacqu iring +ĠMon te +ard i +Ġa ura +Ġ# # +ĠStand ing +ĠAtl as +C F +Ġintr ins +ĠBen ghazi +Ġcamp ing +Ġt apped +bl ade +st rous +ĠR abb +ĠW ritten +t ip +ĠNe igh +ster dam +ĠAll ow +ĠHe aling +ĠR hod +n um +Ġcaffe ine +ĠPer cent +Ġbo o +Ġapp les +30 5 +Ġwel coming +Ġappl aud +Ġa usterity + ± +ĠRe ality +ef e +å ® +Ġsu cks +Ġtab s +ĠPay Pal +Ġback pack +Ġgif ted +abul ary +ĠSc out +ir teen +Ġch in +Ġo mitted +Ġnegative ly +Ġaccess ing +ĠE arn +Ġambul ance +Ġhead phones +Ġ20 5 +ĠRef resh +p resident +ĠKit chen +ĠEnt ered +ĠS nyder +00 5 +om ical +Ġborrow ed +ĠN em +Ġav iation +Ġst all +rim ination +Ġuniform s +it ime +ĠSim mons +ener gy +ab lished +y y +qual ified +Ġrall ies +ĠSt uart +fl ight +Ġgang s +r ag +Ġv ault +lu x +ĠCom par +Ġdesign ation +20 9 +ĠJ os +d ollar +z ero +Ġwell s +30 3 +Ġconstitu ents +Ġhe ck +Ġc ows +Ġcommand ers +Ġdifferent ial +ĠC atherine +29 9 +Ġval ve +Ġbr ace +Ġperspect ives +c ert +f act +icular ly +ĠMc N +pl anes +Ġint ric +Ġpe as +ov an +Ġtoss ed +ret ch +ĠL opez +Ġunf amiliar +de ath +ĠA part +ĠCh ang +Ġrelie ved +rop he +Ġair ports +Ġfre ak +ut il +M ill +ĠCh in +ĠOw en +m ale +ĠBro ken +ĠWind s +ro b +r ising +Ġfire fighters +Ġauthor itarian +Ġ14 8 +Bit coin +ex ternal +Ġbrow sers +iche ver +or ian +Ġun b +Ġpo ke +ĠZ ot +M id +ĠPop ular +Ġco vert +Ġcont ributes +Ġ6 50 +Ġcont ention +G ate +Ġcons oles +Ġchrom os +ĠI X +Ġvis ually +ĠE isen +Ġjewel ry +Ġdeleg ation +Ġacceler ate +ĠR iley +Ġsl ope +Ġind oor +it ially +Ġhuge ly +Ġtun nels +Ġfin ed +Ġdirect ive +Ġfore head +ustom ed +Ġsk ate +Mus ic +g as +Ġrecogn izing +am bo +Ġover weight +ĠGr ade +Ù Ĭ +Ġsound ing +Ġlock ing +ĠR EM +St ore +Ġexc av +ĠLike wise +ĠL ights +Ġel bow +ĠSupp ly +w ic +Ġhands ome +19 94 +C oll +Ġadequ ately +ĠAssoci ate +Ġstri ps +Ġcrack down +Ġmar vel +ĠK un +Ġpass ages +@@ @@ +ĠT all +Ġthought ful +names e +Ġprost itution +bus iness +Ġball istic +person al +c ig +iz ational +R ound +ĠÂłĠÂł ĠÂłĠÂł +ĠCole man +Ġadm itting +ĠPl ug +Ġbit coins +ĠSu z +Ġfair ness +Ġsupp lier +Ġcatast rophic +ĠHel en +o qu +M arc +ĠArt icles +g ie +Ġend angered +Ġdest iny +ĠVol t +ol ia +ax is +Ġche at +Ġun ified +IC O +qu ote +30 2 +ĠS ed +Ġsupp ression +Ġanaly zing +Ġsqu at +Ġfig uring +Ġcoordin ates +Ġch unks +Ġ19 46 +Ġsub p +Ġw iki +ĠFor bes +ĠJ upiter +ĠE rik +im er +ĠCom mercial +\ ) +Ġlegitim acy +Ġd ental +ĠMe an +Ġdefic its +5 50 +Orig inally +ĠHor ror +Ġcontam ination +ll ah +Ġconf isc +ĠCl are +T B +ĠF ailed +an ed +Ġrul er +ĠCont roller +Ġfemin ists +F ix +g ay +20 7 +Ġr abbit +Th ird +ownt own +Ġgl ue +Ġvol atile +Ġsh ining +Ġf oll +Ġimp aired +Ġsup ers +æ Ī +Ġcl utch +ļé ĨĴ +Ġpro let +Ġ( ! +Ġy elled +ĠK iev +ĠEr n +ĠSh ock +K B +Ġsit uated +qu ery +ĠN as +Ġan nex +char acter +ĠHol iday +Ġautom ation +ĠJ ill +ĠRem astered +Ġl inem +Ġwild erness +ĠHor izon +ĠGu inea +A Z +Ġmain land +Ġsec recy +LE ASE +Ġp unk +ĠProv ince +( ), +Spe ed +Ġhand ing +ĠSeb ast +S ir +r ase +Ġj ournals +Ġcon gest +ĠT ut +ir rel +Ġschizophren ia +Ġmis ogyn +health y +I ron +Ġreact ed +- $ +25 2 +Ġpl ural +Ġpl um +Ġbarg ain +Ġground ed +f inder +Ġdis se +ĠL az +O OD +Ġat roc +F actory +Ġmin ions +Ġo ri +ĠB rave +ĠP RE +ĠMy anmar +ĠH od +Ġexped ition +Ġexpl ode +ĠCo ord +Ġext r +ĠB rief +ĠAD HD +Ġhard core +feed ing +Ġd ile +ĠF ruit +Ġvacc ination +ĠM ao +osp here +Ġcont ests +- | +Ġf ren +isp here +R om +ĠSh arp +ĠTre nd +Ġdis connect +âĢ¢ âĢ¢ +Ġper secution +Ear th +Ġhealth ier +38 4 +Ġc ob +ĠTr inity +OW S +AN N +Ġspecial ty +Ġg ru +Ġcooper ative +wh y +Start ing +ĠIss ues +st re +ens or +Ġ18 5 +Ad v +! ? +ĠRe vel +em ia +ĠH ulk +Ġcelebr ations +ĠS ou +ra ud +ĠKle in +Ġun real +con text +Ġpartners hips +Ġadop ting +t ical +Ġspl ash +ĠHe zbollah +c ategory +cycl op +xt on +ĠD ot +urd y +t z +Ġenvelop e +ĠN L +â ķ +Ġwhere in +Spe c +18 4 +Ġte lev +al iation +Ġmyth s +å ° +Ġrig orous +Ġcommun icating +Ġobser ver +Ġre he +ĠW ash +Ġapolog ized +ĠT in +Ġexpend itures +work ers +d ocument +Ġhes itate +ĠLen in +Ġunpredict able +Ġrenew al +cl er +ok ia +ĠCON T +Ġpost season +Tok ens +Ġex acerb +Ġbet ting +Ġ14 7 +Ġelev ation +W ood +ĠSol omon +19 4 +00 4 +out put +Ġredu nd +ĠM umbai +Ġp H +Ġreprodu ce +ĠD uration +MA X +Ġb og +C BS +ĠBal ance +ĠS gt +ĠRec ent +Ġc d +Ġpo pped +Ġincomp et +pro p +ay an +g uy +Pac ific +Ġty r +Ġ{ { +ĠMy stic +ĠD ana +Ġmast urb +Ġge ometry +à ¢ +ĠCor rect +Ġtraject ory +Ġdistract ed +Ġf oo +ĠW elsh +L uc +m ith +Ġrug by +Ġrespir atory +Ġtri angle +Ġ2 15 +Ġunder graduate +ĠSuper ior +ch anging +_ - +Ġright ly +Ġrefere e +Ġluc rative +Ġun authorized +Ġresemb les +ĠGN U +ĠDer by +Ġpath ways +ĠL ed +Ġend urance +Ġst int +Ġcollect or +F ast +Ġd ots +Ġnational s +ĠSec urities +Ġwh ip +Par am +Ġlearn s +M agic +Ġdetail ing +m oon +Ġbroadcast ing +Ġb aked +26 5 +hol m +ĠS ah +ĠHus sein +ĠCourt esy +17 4 +Ġ14 6 +Ġge ographic +pe ace +Ġjud ging +ĠS tern +B ur +Ġstory line +G un +ĠSt ick +24 5 +30 7 +ãĤ´ ãĥ³ +ĠAdminist rator +Ġbur nt +Ġp ave +ch oes +Ex ec +Ġcamp uses +Res ult +Ġmut ations +ĠCh arter +Ġcapt ures +Ġcomp ares +Ġbad ge +S cient +Ġer ad +ier y +o i +ett es +ĠE state +Ġst rap +Ġproud ly +Ġf ried +Ġwithd rawn +ĠV oy +ph ony +It ems +ĠP ierce +b ard +Ġann otation +ant on +ill on +Im pro +... ) +Ġhapp ier +---- -- +ad just +Ġstaff ers +Ġactiv ism +Ġper f +Ġal right +N eed +Ġcomm ence +Ġopio id +ĠAm anda +E s +ĠP ars +ĠK aw +W orks +24 8 +Ġind o +t c +end ant +ĠM oto +Ġlegal ization +OT E +Ġtask ed +Ġt sp +ĠACT IONS +16 6 +Ġrefres hing +ĠN R +ĠPere z +Ġinfring ement +S Y +List en +in ning +k u +Ġrot ate +pro gram +ar ah +Des ign +Ġ( £ +Ġst oring +Ġwar rants +Ġjud gement +ĠB rist +us ually +ph oto +ĠR an +ĠP ine +Ġoutrage ous +ĠValent ine +lu ence +ĠEvery body +Al tern +Ġrele vance +Ġtermin ated +Ġd essert +Ġfulf illed +Ġprosecut ed +ĠW ords +Ġm igrant +Ġcultiv ation +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +idel ity +ĠV ern +ĠLog in +Ġmetaph or +ĠT ip +Ġrecru its +ĠP ig +rib ing +Ġenthusi asts +ex per +Ġfright ening +ĠH air +ans on +str ate +Ġh i +He ight +Ġown ing +n one +Ġdis like +Ġkn ives +pher d +Ġloud ly +ĠAP Is +Dis play +ĠL ac +ĠUS S +ab l +ver ages +J ew +Ġ17 2 +ĠHist orical +at oon +ĠPhys ics +in tern +Ġwarm th +Ġto pp +D M +Ġgun man +Ġem peror +od i +ãĥ £ +in atory +ĠR ib +Ġ13 1 +ĠSat urn +ĠSh ining +Ġw aking +Qu otes +Ġcomed ian +en berg + ½ +Ġbelie vers +Ġpaper work +c ustom +Ġle v +Ġl ament +Ġpour ing +22 2 +p olitical +ĠSupp lement +m aid +Ġcruel ty +Ġt read +ys ics +A w +rit es +Ġmod ifier +ĠP osition +Ad am +l b +ub s +Ġimper fect +Ġcl usters +ĠEngine er +ĠC herry +Ġinaug uration +ĠS au +Ġembod iment +ĠUn cle +Ġover r +Ġexplos ions +c ule +ĠPrinc eton +ĠAndre a +Ġincorrect ly +Ġearn est +Ġpil gr +ĠS print +Ġslee ve +Ġhe ars +ĠAm azing +Ġbrow sing +ag in +Ġhom eland +Ġha w +Ġd iving +ist ered +17 8 +Ġbarg aining +ĠArc ade +Ġdeleg ate +ters on +................................ ................................ +ĠJackson ville +27 5 +Ġst agn +Ġad am +ĠSher man +C B +Ġsub urb +ĠFood s +Ġconver ting +ĠAr ist +Ġch ambers +l ove +Ġam ino +ĠG an +Ġmad ness +m c +ĠUS E +def ined +Ġul tr +ind ust +Ġw olves +l ance +Add itionally +Ġcr acks +as ia +ĠRe ason +ĠP ump +Ġaccident al +ĠL aser +ĠR id +Ġinitial ized +ell i +Ġun named +Ġn oun +ĠPass ed +Ġhost age +ĠEth iop +sh irts +Ġun rel +ĠEmb assy +Ġ19 41 +Ġat oms +Ġpur ported +16 4 +ĠF i +Ġgall ons +ĠMon ica +Ġp g +en ment +Ġsort ed +ĠG ospel +Ġhe ights +Ġtr aced +Ġunder going +She ll +Ġs acks +Ġproport ions +Ġhall uc +F ont +ac et +Ġwar mer +ĠIN TER +Ġgrab bing +Pl ug +Ġreal ization +ĠBur ke +Ġen chant +AT ER +ĠSe ed +Ġabund ant +F M +Ġc ivic +V s +is i +Ġv ow +Ġre per +ĠPartners hip +Ġpenet ration +Ġax e +Ġsh attered +ĠZ ombies +Ġv inyl +ĠAl ert +e on +Ġoblig ed +ĠIll ust +ĠPl aza +ĠFront ier +Ġdavid jl +ĠSer ial +ĠH av +ĠNut rition +B i +Ġâĸ Ī +ĠJ ays +lin ux +Ġhur ry +Ġv oy +Ġhop eless +ĠSte alth +Ġ ãģ +ess ors +tt le +b org +ĠSaf ari +f ell +Ġw ary +d ue +ĠAb ove +H a +E LL +Ġnot or +ĠW on +T oo +Ġoccup ations +Ġposs essions +Ġinv iting +Ġpred ators +Ġacceler ated +Ġ15 7 +uter te +ĠC ube +e ast +acc ount +G ive +Ġtrans plant +red ients +id able +Ġscreens hots +ĠG und +ĠF S +Ġtravel ers +Ġsens ory +ĠF iat +ĠRock ets +İ ĭ +_ { +F riend +Ġchar ming +AL S +Ġenjoy ment +m ph +Ġ5 000 +ĠRE G +Ù Ĩ +b ia +Ġcomp ilation +ro st +ĠV P +ĠSch ne +201 9 +Ġcop ying +M ORE +ĠFl ore +f alls +2 15 +t otal +Ġdis ciples +d ouble +Ġexceed ing +Ġsm ashed +Ġconcept ual +ĠRom ania +ĠB rent +ĠI CE +ĠT ou +Ġg rap +Ġn ails +18 9 +ãĥ ĺ +Ġproc ure +e ur +Ġconfir ming +ĠC ec +aw i +ĠEd en +Ġn g +Ġengine ered +at ics +Ġhook ed +Ġdisgust ing +ĠMur der +ãĤ ¿ +L ibrary +Ġ16 8 +Al most +hem atic +Men u +ĠNot re +ĠJ ur +Ġkidn apped +Ġhack er +ĠJ ade +Ġcreep y +Ġdraw ings +ĠSpons or +Ġcycl ists +ĠGob lin +Ġoptim ized +Ġst aged +ĠMc D +bet ween +A ge +en o +S ex +ĠW ide +n ings +av is +Ġincap able +ĠK ob +Ġreward ing +ĠL one +oles cent +Ġcontract ed +Ġstick y +J ose +B all +f est +ĠIn put +ĠRec ently +Ġto mat +squ are +App lication +Ġnit rogen +Ġdupl icate +ĠRec on +ĠD ear +L ondon +Ġint ra +Ġd ock +Ġout reach +ĠM illion +Ġmamm als +am pton +V AL +Ġsn aps +Ġd os +ĠWh ole +ĠRead y +T ry +ĠWinn ipeg +ear ance +Ġinc urred +ren ched +ĠNS W +il ot +rain e +Ġc ube +g ot +Ġrun way +etermin ed +ĠHaw ks +Ġsurviv or +ĠW ish +ĠD in +ĠDE F +ĠV ault +18 7 +Ġmush rooms +Ġcris p +be y +ĠDisco very +Ġdevelopment al +Ġparad igm +Ġcha otic +ĠT su +Ġ3 33 +b ons +Ġbacter ial +Ġcomm its +Ġcos mic +Ġme ga +oc ative +ĠP aint +ophob ic +Ġv ain +Ġcar ved +ĠTh ief +ĠG ul +ows hip +Ġc ites +ĠEd inburgh +Ġdimin ished +Ġacknowled ges +ĠK ills +Ġmic row +ĠHer a +Ġsen iors +Ġwhere by +H op +at ron +Ġun available +ĠN ate +Ġ4 80 +Ġsl ated +ĠRe becca +ĠB attery +Ġgram mar +Ġhead set +Ġcurs or +Ġex cluding +any e +aunder ing +eb in +Ġfeas ible +ĠPub lishing +ĠLab s +ĠCl iff +ĠFerr ari +Ġp ac +vis ible +mark ed +pe ll +Ġpol ite +Ġstagger ing +ĠGal actic +Ġsuper st +Ġpar an +ĠOffic ers +ãĢ ģ +Ġspecific s +ul us +23 9 +ĠP aste +AM P +ĠPan ama +ĠDe lete +angu ard +rest rial +Ġhero ic +ĠD y +ا ÙĦ +Ġincumb ent +Ġcr unch +t ro +Ġsc oop +Ġblog ger +Ġsell ers +ure n +Ġmedic ines +ĠC aps +ĠAnim ation +ox y +Ġout ward +Ġinqu iries +22 9 +Ġpsych ologist +ĠS ask +ev il +Ġcontam inated +ãĤ ¨ +he rence +Ġbrand ed +ĠAbd ul +z h +Ġparagraph s +Ġmin s +Ġcor related +er b +Ġimp art +Ġmil estone +ĠSol utions +ot le +Ġunder cover +Ġmar ched +ĠCharg ers +f ax +ĠSec rets +Ġr uth +we ather +Ġfemin ine +Ġsh am +Ġprest igious +igg ins +Ġs ung +hist ory +ett le +gg ie +Ġout dated +ol and +Ġper ceptions +ĠS ession +ĠDod gers +u j +ĠE ND +D oc +Ġdefic iency +Gr and +ĠJ oker +Ġretro spect +Ġdiagn ostic +Ġharm less +Ġro gue +ĠA val +E qu +Ġtrans c +ĠRoberts on +ĠDep ending +ĠBurn s +iv o +Ġhost ility +F eatures +ĵ ĺ +Ġdis comfort +ĠL CD +spec ified +ĠEx pect +3 40 +Ġimper ative +ĠReg ular +Ch inese +Ġstate wide +Ġsy mm +Ġlo ops +Ġaut umn +N ick +Ġsh aping +Ġqu ot +Ġc herry +ĠCross ref +è¦ ļéĨĴ +Stand ard +he ed +ĠD ell +ĠViet namese +Ġo st +ĠV alkyrie +O A +Ass ad +Ġreb ound +ĠTra ffic +pl aces +æ ĺ +ĠB uc +17 2 +Ġshel ters +Ġins isting +ĠCertain ly +ĠKenn eth +ĠT CP +Ġpen al +ĠRe play +he ard +Ġdial ect +iz a +ĠF Y +it cher +ĠD L +Ġspir al +Ġquarterback s +Ġh ull +Ġgo ogle +Ġto dd +ĠSter ling +ĠPl ate +Ġsp ying +mb ol +ĠReal m +ĠPro ced +ĠCr ash +Ġtermin ate +Ġprotest ing +C enter +gu ided +Ġun cover +Ġboy cott +Ġreal izes +s ound +Ġpret ending +ĠV as +19 80 +Ġfram ed +Ġ13 9 +Ġdesc ended +Ġrehab ilitation +Ġborrow ing +ĠB uch +Ġbl ur +R on +ĠFro zen +en za +Ch ief +ĠP oor +Ġtransl ates +M IN +Ġ2 12 +J ECT +Ġerupt ed +Ġsuccess es +S EC +Ġpl ague +Ġg ems +d oms +Ġstret ches +ĠSp y +Ġstory telling +C redit +ĠP ush +Ġtra ction +Ġin effective +ĠL una +Ġt apes +Ġanaly tics +erc ise +Ġprogram mes +ĠCar bon +Ġbeh old +he avy +ĠConserv ation +ĠF IR +Ġs ack +ter min +ric ks +Ġhous ed +Ġunus ually +I ce +Ġexecut ing +ĠMor oc +ed ay +Ġed itions +Ġsm arter +ĠB A +Ġout law +Ġvan ished +ib a +AL SE +ĠSil va +23 8 +C ould +Ġphilos opher +Ġevac uated +Sec ret +14 2 +Ġvis as +ãĤ ¬ +ĠM alt +ĠClear ly +ĠN iger +ĠC airo +ĠF ist +3 80 +ĠX ML +aut o +it ant +Ġrein forced +Rec ord +ĠSurviv or +G Hz +Ġscrew s +parent s +Ġo ceans +ma res +Ġbra kes +vas ive +Ġhell o +ĠS IM +rim p +Ġo re +ĠArm our +24 7 +Ġterr ific +Ġt ones +14 1 +ĠMin utes +Ep isode +Ġcur ves +Ġinflamm atory +Ġbat ting +ĠBeaut iful +L ay +Ġunp op +v able +Ġr iots +ĠTact ics +b augh +ĠC ock +Ġorg asm +ĠS as +Ġconstruct or +et z +G ov +Ġant agon +Ġthe at +Ġde eds +ha o +c uts +ĠMc Cl +Ġu m +ĠScient ists +Ġgrass roots +ys sey +"] => +Ġsurf aced +Ġsh ades +Ġneighb ours +Ġad vertis +oy a +Ġmer ged +Up on +Ġg ad +Ġanticip ate +Any way +Ġsl ogan +Ġdis respect +I ran +ĠT B +act ed +Ġsubp oen +medi ately +OO OO +Ġwa iver +Ġvulner abilities +ott esville +ĠHuff ington +J osh +ĠD H +M onday +ĠEll en +K now +x on +it ems +22 8 +Ġf ills +ĠN ike +Ġcum ulative +and als +I r +Ġ ì +Ġfr iction +ig ator +Ġsc ans +ĠVi enna +ld om +Ġperform ers +P rim +Ġb idding +M ur +Ġlean ed +ĠPri x +al ks +Ġ[ âĢ¦] +ĠTw itch +ĠDevelop er +ĠG ir +Ġcall back +Ab stract +Ġacc ustomed +Ġfreed oms +ĠP G +ur acy +Ġl ump +is man +,, ,, +19 92 +ĠR ED +Ġwor m +M atch +ĠPl atinum +I J +ĠOwn er +Tri via +com pl +Ġnew born +Ġfant as +O wn +Ġ19 59 +Ġsymp ath +Ġub iqu +Ġoutput s +Ġal lev +Ġpr ag +K evin +Ġfav ors +Ġbur ial +Ġn urt +so lete +c ache +Ġ15 6 +Ġunl ocks +te chn +M aking +Ġcon quer +ad ic +æ ĸ +Ġel f +Ġelect orate +ĠKurd s +ĠSt ack +ĠSam urai +Ġâ ĺħ +Ġ{ } +ĠS aid +ĠFall out +Ġkind ness +ĠCustom s +ĠBou levard +Ġhelicop ters +ot ics +ĠVe get +com ment +Ġcritic ised +Ġpol ished +ĠRem ix +ĠC ultural +Ġrec ons +Ġdo i +at em +Sc reen +Ġbar red +Com ments +ĠGener ally +Ġsl ap +7 20 +V ari +p ine +Ġem pt +Ġh ats +ĠPlay ing +l ab +a verage +form s +ĠC otton +Ġcan s +ĠD ON +ĠSom alia +C rypt +ĠIncre ases +E ver +mod ern +Ġsur geon +3 000 +Ġrandom ized +================================ ================================ +B ern +im pl +ĠC OR +Ġpro claim +th ouse +Ġto es +Ġam ple +Ġpres erving +Ġdis bel +gr and +B esides +Ġsil k +ĠPat tern +h m +Ġenter prises +Ġaffidav it +ĠAdvis ory +Ġadvert ised +ĠRel igious +se ctions +psy ch +ĠField s +aw ays +Ġhasht ag +ĠNight mare +Ġv ampire +Ġfore nsic +rosso ver +n ar +Ġn avy +Ġvac ant +ĠD uel +Ġhall way +Ġface book +ident ally +ĠN RA +Ġm att +Ġhur ricane +ĠKir by +ĠP uzzle +Ġsk irt +ou st +du llah +Ġanal ogy +in ion +Ġtomat oes +ĠN V +ĠPe ak +ĠMe yer +Ġappoint ments +Ġm asc +Ġal ley +re hend +Ġchar ities +Ġund o +Ġdest inations +ĠTest ing +"> " +c ats +* . +Ġgest ures +gener al +Le ague +Ġpack ets +ĠInspect or +ĠBer g +Ġfraud ulent +Ġcritic ize +F un +Ġbl aming +nd ra +Ġsl ash +ĠE ston +Ġpropos ing +Ġwh ales +Ġtherap ist +Ġsub set +Ġle isure +EL D +ĠC VE +ĠAct ivity +Ġcul min +sh op +ĠD AY +is cher +ĠAdmir al +ĠAtt acks +Ġ19 58 +Ġmem oir +Ġfold ed +Ġsex ist +Ġ15 3 +ĠL I +Ġread ings +Ġembarrass ment +ĠEmploy ment +w art +ch in +Ġcontin uation +l ia +Rec ently +Ġd uel +Ġevac uation +ĠKash mir +Ġdis position +ĠR ig +Ġbol ts +Ġins urers +4 67 +M ex +Ġret aliation +Ġmis ery +Ġunre asonable +r aining +I mm +ĠP U +em er +Ġgen ital +ãĤ ³ +ĠC andy +Ġon ions +ĠP att +lin er +Ġconced ed +Ġf a +Ġfor c +ĠH ernandez +ĠGe off +deb ian +ĠTe ams +Ġc ries +Ġhome owners +23 7 +A BC +Ġst itch +Ġstat istic +Ġhead ers +ĠBi ology +Ġmot ors +ĠG EN +ĠL ip +Ġh ates +Ġhe el +S elf +i pl +ED IT +ort ing +Ġann ot +ĠSpe ech +old emort +ĠJ avascript +ĠLe Bron +Ġfoot print +Ġf n +Ġseiz ures +n as +h ide +Ġ19 54 +ĠBe e +ĠDecl aration +ĠKat ie +Ġreserv ations +N R +f emale +Ġsatur ated +Ġb iblical +Ġtroll s +Dev ice +ph otos +Ġdr ums +ãĥīãĥ© ãĤ´ãĥ³ +N ight +f ighter +ĠH ak +ri ber +Ġc ush +Ġdiscipl inary +ba um +ĠG H +ĠSch midt +ilib rium +Ġs ixty +ĠKush ner +ro ts +Ġp und +ĠR ac +Ġspr ings +Ġcon ve +Bus iness +F all +Ġqual ifications +Ġvers es +Ġnarc iss +ĠK oh +ĠW ow +ĠCharl ottesville +ed o +Ġinterrog ation +ĠW ool +36 5 +B rian +Ġâľ ĵ +Ġalleg es +ond s +id ation +ĠJack ie +y u +Ġl akes +Ġworth while +Ġcryst als +ĠJud a +Ġcomp rehend +Ġfl ush +Ġabsor ption +ĠO C +Ġfright ened +ĠCh ocolate +Mart in +Ġbu ys +Ġbu cks +Ġapp ell +ĠChampions hips +Ġlist ener +ĠDef ensive +Ġc z +ud s +ĠM ate +Ġre play +Ġdecor ated +Ġs unk +ĠV IP +ĠAn k +Ġ19 5 +aa aa +Nob ody +ĠMil k +ĠG ur +ĠM k +ĠS ara +Ġse ating +ĠW id +Tr ack +Ġemploy s +Ġgig antic +AP P +ãĤ § +in ventory +Ġtow el +at che +l asting +ĠT L +Ġlat ency +Ġkn e +B er +me aning +Ġup held +Ġplay ground +Ġm ant +S ide +Ġstere o +Ġnorth west +Ġexception ally +Ġr ays +Ġrec urring +D rive +Ġup right +Ġab duct +ĠMar athon +Ġgood bye +Ġal phabet +h p +Ġcourt room +ring ton +ot hing +T ag +Ġdiplom ats +Ġbar bar +ĠAqu a +18 3 +33 33 +Ġmat urity +Ġinst ability +ĠAp ache +Ġ= == +Ġfast ing +ĠGr id +Mod Loader +Ġ15 2 +A bs +ĠOper ating +ett i +Ġacqu aint +Don nell +ĠK em +ĠFor ge +Ġarm ored +M il +Ġphilos ophers +in vest +Pl ayers +â Ī +Ġmy riad +Ġcomr ades +R ot +Ġremember ing +Ġcorrespond s +Ġprogram mers +ĠLyn n +Ġo lig +Ġco herent +yn chron +ĠChem ical +Ġj ugg +p air +post s +E ye +ĠIn ner +Ġsem ester +ott est +ĠEmir ates +ric anes +or ously +m its +ĠW is +Ġd odge +l ocation +Ġf aded +Am azon +ĠPro ceed +ĠIN FO +j ournal +ĠTru ck +T en +Ġ2 17 +Ġstat utes +m obile +ĠT ypes +Rec omm +b uster +pe x +Ġleg ends +Ġhead ache +f aced +ĠWi Fi +if ty +ĠH ER +Ġcirc uits +ER ROR +22 6 +ol in +Ġcyl inder +osp ace +ik ers +P rem +Qu ant +Ġconflic ting +Ġslight est +Ġfor ged +ion age +Step hen +ĠK ub +ĠOpp ortun +ĠHe al +Ġbl o +Ġrul ers +Ġh uh +Ġsubmar ine +f y +ass er +Ġallow ance +ĠKas ich +ĠT as +ĠAustral ians +Forge ModLoader +ĠâĨ ij +ĠMat rix +am ins +Ġ12 00 +ĠAc qu +23 6 +D ocument +ĠBre aking +19 3 +ĠSub st +ĠRoll er +ĠPro perties +ĠN I +t ier +Ġcr ushing +Ġadvoc ating +Further more +keep ers +Ġsex ism +x d +Ġcall er +ĠS ense +chie ve +ĠT F +Ġfuel ed +Ġreminis cent +Ġobs ess +ur st +Ġup hold +ĠF ans +het ics +Ġâ Ĺ +ĠB ath +Ġbe verage +Ġo scill +25 4 +Ġpol es +Ġgrad ual +Ġex ting +ĠS uff +ĠS uddenly +Ġlik ing +Ġ19 49 +un ciation +am ination +ĠO mar +ĠL V +ĠCon sequently +Ġsynt hes +ĠG IF +Ġp ains +Ġinteract ing +u ously +inc re +Ġrum or +ĠScient ology +19 7 +ĠZ ig +Ġspe lling +ĠA SS +Ġexting u +ms on +Ġg h +Ġremark ed +ĠStrateg ic +ĠM ON +å ¥ +g ae +ĠWH AT +E ric +ĠCamp us +Ġmeth ane +Ġimag in +J UST +ĠAl m +X T +i q +ĠR SS +Ġwrong doing +att a +Ġbig ot +Ġdemonstr ators +ĠCal vin +ĠV illa +Ġmembr ane +ĠAw esome +Ġbenef ic +26 8 +Ġmagn ificent +ĠL ots +G reg +ĠBor is +Ġdetain ees +ĠH erman +Ġwhis pered +Ġa we +Prof essor +fund ing +Ġphys iological +ĠDest ruction +Ġlim b +Ġmanip ulated +Ġbub bles +Ġpse ud +Ġhyd ra +ĠBrist ol +Ġst ellar +ĠExp ansion +ĠK ell +ĠInterest ingly +Ġm ans +Ġdrag ging +Ġec ological +ĠF it +Ġg ent +Ġbenef ited +ĠHait i +Ġpoly g +ãĥ İ +Ġ20 30 +Ġpro w +Ġrecon struction +Ġwas t +Ġpsych ic +ĠGree ks +Hand ler +16 2 +ĠP ulse +Ġsol icit +Ġsy s +Ġinflu x +ĠG entle +per cent +Ġprolifer ation +Ġtax able +Ġdisreg ard +Ġesc aping +Ġg inger +Ġwith stand +Ġdevast ated +ĠD ew +ser ies +Ġinject ed +ela ide +Ġturn over +he at +Ļ Ĥ +H appy +ĠSil ent +ãĤ Ń +iv ism +Ġir rational +AM A +Ġre ef +r ub +Ġ16 2 +Ġbank ers +ĠEth ics +v v +Ġcritic isms +K n +18 6 +M ovie +ĠT ories +Ġno od +Ġdist ortion +F alse +od ore +Ġt asty +Res earch +ĠU ID +- ) +Ġdivor ced +ĠM U +ĠHay es +ĠIs n +ian i +ĠH Q +Ġ" # +ign ant +Ġtra umatic +ĠL ing +H un +Ġsab ot +on line +r andom +Ġren amed +ra red +K A +d ead +é t +ĠAss istance +Ġse af +++++ ++++ +Ġse ldom +ĠWeb b +Ġbo olean +u let +Ġref rain +ĠDI Y +ru le +Ġshut ting +Ġutil izing +load ing +ĠPar am +co al +oot er +Ġattract ing +ĠD ol +Ġher s +ag netic +ĠRe ach +im o +Ġdisc arded +ĠP ip +01 5 +ü r +Ġm ug +Im agine +C OL +Ġcurs ed +ĠSh ows +ĠCurt is +ĠSach s +spe aking +ĠV ista +ĠFram ework +ong o +Ġsub reddit +Ġcr us +ĠO val +R ow +g rowing +Ġinstall ment +Ġgl ac +ĠAdv ance +EC K +ĠLGBT Q +LE Y +Ġac et +Ġsuccess ive +ĠNic ole +Ġ19 57 +Qu ote +Ġcircumst ance +ack ets +Ġ14 2 +ort ium +Ġguess ed +ĠFr ame +Ġperpet rators +ĠAv iation +ĠBen ch +Ġhand c +A p +Ġ19 56 +25 9 +r and +Net Message +d in +urt les +h ig +ĠV III +ff iti +ĠSw ords +b ial +Ġkidn apping +dev ice +Ġb arn +ĠEl i +auc as +S end +Con structed +Ġ ½ +Ġneed les +Ġad vertisements +Ġv ou +Ġexhib ited +ĠFort ress +As k +B erry +TY PE +Ġcan cers +ump ing +ĠTerrit ory +Ġpr ud +Ġn as +Ġathe ist +Ġbal ances +ãģ Ł +ĠSh awn +& & +Ġland sc +ĠR GB +Ġpet ty +Ġex cellence +Ġtransl ations +Ġpar cel +ĠChe v +E ast +ĠOut put +im i +Ġamb ient +ĠTh reat +Ġvill ains +Ġ5 50 +IC A +Ġtall er +Ġle aking +c up +Ġpol ish +Ġinfect ious +ĠK C +Ġ@ @ +back ground +Ġbureaucr acy +ĠS ai +un less +it ious +ĠSky pe +At l +ID ENT +00 8 +Ġhyp ocr +Ġpit chers +Ġguess ing +ĠF INAL +Bet ween +Ġvill agers +Ġ25 2 +f ashion +ĠTun is +Be h +ĠEx c +ĠM ID +28 8 +ĠHas kell +19 6 +ĠN OR +Ġspec s +Ġinv ari +Ġgl ut +ĠC ars +Ġimp ulse +Ġhon ors +g el +Ġjurisd ictions +ĠBund le +ul as +Calif ornia +ĠIncre ase +Ġp ear +Ġsing les +Ġc ues +Ġunder went +ĠW S +Ġexagger ated +Ġdub ious +Ġfl ashing +L OG +) ]. +J ournal +t g +V an +ĠI stanbul +ĠIn sp +ĠFrank en +D raw +Ġsad ness +Ġiron ic +ĠF ry +x c +Ġ16 4 +is ch +W ay +ĠProtest ant +h orn +Ġun aff +ĠV iv +ill as +ĠProduct ions +ĠH ogan +Ġper imeter +ĠS isters +Ġspont aneous +Ġdown side +Ġdescend ants +Ġor n +w orm +Japan ese +Ġ19 55 +Ġ15 1 +ĠDo ing +els en +umb les +Ġrad ically +ĠDr um +ĠB ach +Ġli abilities +ĠO B +ĠElement ary +Ġmem e +yn es +Ġfinger print +ĠGr ab +Ġundert ake +Mem bers +ĠRead er +ĠSim s +g od +Ġhypot hetical +s cient +ĠA J +Ġchar ism +Ġad missions +ĠMiss ile +tr ade +Ġexerc ising +ĠBack ground +W ritten +Ġvoc als +whe ther +Ġv i +ĠW inner +Ġl itter +ĠSh ooting +ST EM +ãĤ ¡ +ĠA FL +Ġvari ability +Ġe ats +ĠD PS +b row +Ġeleph ants +Ġstr at +Ġ Å +Ġsett lers +Matt hew +Ġin advert +H I +ĠIM F +ĠGo al +Ġnerv es +John son +ey e +ablish ment +Th ursday +BIL ITY +H ad +am oto +het amine +ep s +Ġmit ochond +Ġcomp ressed +ĠTre vor +ĠAnim als +T ool +L ock +Ġtwe ak +Ġpin ch +Ġcancell ation +P ot +Ġfoc al +ĠAst ron +17 3 +ĠA SC +ĠO THER +umn i +Ġdem ise +d l +Ù ħ +Sem itism +Ġcr acking +Ġcollabor ative +Ġexpl ores +s ql +Ġher bs +Ġconfig urations +m is +ĠRes ult +ace y +ĠSm oke +Ġsan ct +el ia +Ġdeg ener +Ġdeep est +Ġscream ed +Ġn ap +Soft ware +ĠST AR +E F +ĠX in +spons ored +mans hip +23 3 +Ġprim aries +Ġfilter ing +Ġas semble +m il +ĠMy ers +b ows +Ġpun ched +M ic +Ġinnov ations +Ġfun c +and o +Ġfr acking +ĠV ul +о Ð +osh op +ĠIm mun +Ġsett ling +Ġadolesc ents +Ġreb uilding +Ġtransform ing +Ġpar ole +Ġhar bor +Ġbook ing +ot ional +onge vity +ĠY o +b ug +Ġemer ges +ĠMethod s +ĠCh u +P res +ĠDun geons +Ġtra iling +ĠR um +ĠH ugh +å¤ © +ĠE ra +ĠBatt les +Res ults +ĠTr ading +Ġvers a +c ss +ax ies +he et +Ġgre ed +19 89 +Ġgard ens +Ġconting ent +P ark +ĠLeaf s +h ook +ro be +Ġdiplom acy +ĠF uel +ĠInv asion +Ġupgr ading +M ale +Ġe lic +Ġrelent less +ĠCo venant +ap esh +ĠT rop +T y +pro duction +art y +Ġpun ches +ak o +cyclop edia +ĠR abbit +ĠHD MI +Ġ14 1 +Ġf oil +Item Image +ĠF G +Ġimplement ations +ĠP om +ixt ures +Ġaw ait +Ġ3 30 +am us +Ġumb rella +Ġfore see +se par +Ġcircum cision +Ġperipher al +S ay +ĠExper t +In c +Ġwithd rew +ĠAnd ers +f ried +Ġradio active +ĠOp ening +Ġboard ing +ĠN D +Ġover throw +Act iv +W P +ĠAct s +× Ļ +Ġmot ions +v ic +ĠM ighty +ĠDef ender +a er +Ġthank ful +ĠK illing +ĠBr is +mo il +Ġpredict ing +26 6 +ch oice +Ġkill ers +Ġinc ub +ĠChe st +ather ing +Ġpro claimed +fl ower +oss om +umbled ore +ĠCy cling +ĠOccup y +AG ES +P en +ĠY ug +Ġpack aged +Ġheight ened +c ot +st ack +C ond +Ġst amps +m age +Ġpersu aded +Ġens l +ĠCard inal +Ġsol itary +Ġpossess ing +ĠC ork +Ġev id +ĠT ay +Ġbl ues +Ġextrem ism +Ġlun ar +Ġcl own +Te chn +Ġfest ivals +ĠPv P +ĠL ar +Ġconsequ ently +p resent +Ġsom eday +ç İĭ +ĠMet eor +Ġtour ing +c ulture +Ġbe aches +S hip +c ause +ĠFl ood +ãĥ ¯ +Ġpur ity +th ose +Ġem ission +b olt +Ġch ord +ĠScript ure +L u +Ġ$ { +cre ated +Other s +25 8 +Ġelement al +Ġannoy ed +ĠA E +d an +ĠS ag +Res earchers +Ġfair y +âĢĵ âĢĵ +======== ==== +Sm art +GG GG +Ġskelet ons +Ġpup ils +link ed +Ġur gency +en abled +ĠF uck +Ġcoun cill +r ab +U AL +T I +Ġlif es +Ġconf essed +B ug +Ġharm on +ĠCON FIG +ĠNe utral +D ouble +Ġst aple +ĠSH A +Brit ish +ĠSN P +AT OR +oc o +Ġswing ing +ge x +ole on +pl ain +ĠMiss ing +ĠTro phy +v ari +ran ch +Ġ3 01 +4 40 +00000000 00000000 +Ġrest oring +Ġha ul +uc ing +ner g +Ġfut ures +Ġstrateg ist +quest ion +Ġlater al +ĠB ard +Ġs or +ĠRhod es +ĠD owntown +????? - +ĠL it +ĠB ened +Ġco il +st reet +ĠPort al +FI LE +ĠG ru +* , +23 1 +ne um +Ġsuck ed +Ġr apper +Ġtend encies +ĠLaure n +cell aneous +26 7 +Ġbrow se +Ġover c +head er +o ise +Ġbe et +ĠG le +St ay +Ġm um +Ġtyp ed +Ġdiscount s +T alk +ĠO g +ex isting +ĠS ell +u ph +C I +ĠAust rian +ĠW arm +Ġdismiss al +Ġaver ages +c amera +Ġalleg iance +L AN +=" # +Ġcomment ators +ĠSet ting +ĠMid west +Ġpharm ac +ĠEX P +Ġstain less +Ch icago +Ġt an +24 4 +Ġcountry side +ĠV ac +29 5 +Ġpin ned +Ġcr ises +Ġstandard ized +T ask +ĠJ ail +ĠD ocker +col ored +f orth +" }, +Ġpat rons +Ġsp ice +Ġm ourn +ĠM ood +Ġlaund ry +Ġequ ip +ĠM ole +y ll +ĠTH C +n ation +ĠSher lock +Ġiss u +ĠK re +ĠAmeric as +ĠA AA +Ġsystem atically +Ġcont ra +ĠS ally +Ġrational e +Ġcar riage +Ġpe aks +Ġcontrad iction +ens ation +ĠFail ure +Ġpro ps +Ġnames pace +Ġc ove +field s +ãĤ ĭ +Ġw ool +ĠC atch +Ġpresum ed +ĠD iana +r agon +ig i +Ġh amm +Ġst unt +ĠG UI +ĠObserv atory +ĠSh ore +Ġsmell s +ann ah +Ġcock pit +ĠD uterte +8 50 +Ġopp ressed +bre aker +ĠCont ribut +ĠPer u +ĠMons anto +ĠAtt empt +Ġcommand ing +Ġfr idge +ĠR in +ĠChe ss +ual ity +Ġo l +Republic an +ĠGl ory +ĠW IN +.... ... +ag ent +read ing +Ġin h +J ones +Ġcl icks +al an +Ġ[ ]; +ĠMaj esty +ĠC ed +op us +ate l +à ª +AR C +ĠEc uador +ãĥ ł +ĠK uro +Ġritual s +Ġcapt ive +Ġoun ce +Ġdisag reement +Ġsl og +f uel +P et +M ail +Ġexerc ised +Ġsol ic +Ġrain fall +Ġdev otion +ĠAss essment +Ġrob otic +opt ions +ĠR P +ĠFam ilies +ĠFl ames +Ġassign ments +00 7 +aked own +Ġvoc abulary +Re illy +Ġc aval +g ars +Ġsupp ressed +ĠS ET +ĠJohn s +Ġwar p +bro ken +Ġstat ues +Ġadvoc ated +Ġ2 75 +Ġper il +om orph +ĠF emin +per fect +Ġh atch +L ib +5 12 +Ġlif elong +3 13 +Ġche eks +Ġnum bered +ĠM ug +B ody +ra vel +We ight +ĠJ ak +ĠHe ath +Ġkiss ing +ĠJ UST +Ġw aving +u pload +Ġins ider +ĠPro gressive +ĠFil ter +tt a +ĠBe am +Ġviol ently +ip ation +Ġskept icism +Ġ19 18 +ĠAnn ie +ĠS I +Ġgen etics +Ġon board +at l +ĠFried man +ĠB ri +cept ive +Ġpir ate +ĠRep orter +27 8 +Ġmyth ology +Ġe clipse +Ġsk ins +Ġgly ph +ing ham +F iles +C our +w omen +Ġreg imes +Ġphotograp hed +K at +ĠMA X +Offic ials +Ġunexpected ly +Ġimpress ions +F ront +;;;; ;;;; +Ġsuprem acy +Ġs ang +Ġaggrav ated +Ġabrupt ly +ĠS ector +Ġexc uses +Ġcost ing +ide press +St ack +ĠR NA +ob il +Ġghost s +ld on +at ibility +Top ics +Ġreim burse +ĠH M +ĠDe g +Ġth ief +y et +ogen esis +le aning +ĠK ol +ĠB asketball +Ġf i +ĠSee ing +Ġrecy cling +Ġ[ - +Cong ress +Ġlect ures +P sy +Ġne p +Ġm aid +Ġori ented +A X +Ġrespect ful +re ne +fl ush +ĠUn loaded +re quest +gr id +ĠAltern atively +ĠHug o +Ġdec ree +ĠBuddh ism +and um +And roid +ĠCong o +ĠJoy ce +Ġacknowled ging +hes ive +ĠTom orrow +ĠH iro +th ren +ĠM aced +Ġho ax +ĠIncre ased +ĠPr adesh +W ild +____ __ +16 1 +Ġa unt +Ġdistribut ing +ĠT ucker +ĠSS L +ĠW olves +B uilding +ou lt +ĠLu o +ĠY as +ĠSp ir +ĠSh ape +ĠCamb od +ĠIP v +Ġm l +Ġext rad +39 0 +ĠPenn y +d ream +Ġstation ed +opt ional +ew orthy +. +ĠWorks hop +ĠRet ail +ĠAv atar +6 25 +N a +ĠV C +ĠSec ure +M Y +19 88 +oss ip +Ġpro state +Ġund en +Ġg amer +ĠCont ents +ĠWar hammer +ĠSent inel +3 10 +Ġse gregation +ĠF lex +ĠM AY +Ġdr ills +ĠDrug s +Islam ic +Ġsp ur +Ġca fe +Ġimag inary +Ġgu iding +Ġsw ings +ĠThe me +ob y +Ġn ud +Ġbe gging +Ġstr ongh +Ġreject ing +Ġpedest rians +ĠPro spect +R are +s le +Ġconcess ions +ĠConst itutional +Ġbe ams +Ġfib ers +p oon +Ġinstinct s +pro perty +ĠB IG +Sand ers +im ates +Ġco ating +Ġcorps es +ĠTR UE +check ed +Ġ16 6 +A sh +ĠJ S +ĠF iction +Ġcommun al +Ġener getic +oooo oooo +Ġnow adays +IL D +ib o +ĠSU V +R en +Ġdwell ing +Sil ver +Ġt ally +ĠM oving +Ġcow ard +Ġgener als +Ġhorn s +Ġcirc ulated +Ġrob bed +ĠUn limited +Ġharass ed +Ġinhib it +Ġcomp oser +ĠSpot ify +Ġspread s +3 64 +Ġsu icidal +Ġno ises +ĠSt ur +Ġs aga +ĠK ag +is o +Ġtheoret ically +M oney +Ġsimilar ity +Ġslic ed +ut ils +ing es +" - +Ġan th +Ġimp ed +Mod ule +Through out +Ġmen us +comm ittee +and i +ob j +in av +f ired +ĠAb dullah +Ġund ead +Ġfont s +H old +EN G +Ġsustain ability +Ġfl ick +Ġr azor +ĠF est +ĠChar acters +Ġword ing +Ġpopul ist +Ġcritic izing +Ġm use +v ine +Ġcard board +Ġkind ly +Ġfr inge +ĠThe ft +icult ural +Ġgovern ors +Ġ ���� +Ġ16 3 +Ġtime out +ĠA uth +Child ren +A U +Ġred emption +ĠAl ger +Ġ19 14 +Ġw aved +Ġastron auts +og rams +Ġsw amp +ĠFinn ish +Ġcand le +Ġton nes +ut m +Ġr ay +Ġsp un +Ġfear ful +art icles +Ġca us +or ically +ĠRequ ires +ĠG ol +Ġpop e +Ġinaug ural +Ġg le +AD A +ĠIS IL +ĠOff ensive +Ġwatch dog +Ġbal con +ent ity +ĠH oo +Ġgall on +AC C +Ġdoub ling +Ġimpl ication +ĠS ight +Ġdoct r +---- --- +Ġ\ \ +Ġm alt +R oll +Ġâī ¥ +Ġrec ap +add ing +u ces +ĠB end +fig ure +Ġtur key +Ġsoc ietal +ĠT ickets +Ġcommer cially +Ġsp icy +Ġ2 16 +ĠR amp +Ġsuperior ity +à ¯ +ĠTr acker +C arl +ĠC oy +ĠPatri ot +Ġconsult ed +Ġlist ings +Ġsle w +reens hot +ĠG one +Ġ[ ...] +30 9 +Ġh ottest +Ø ± +Ġrock y +ĠD iaz +Ġmass age +Ġpar aly +Ġp ony +A z +Ġcart ridge +ĠN Z +Ġsn ack +ĠLam ar +ple ment +ĠLes lie +Ġm ater +Ġsn ipp +24 6 +Ġjoint ly +ĠBris bane +ĠiP od +Ġpump ing +Ġgo at +ĠSh aron +eal ing +Ġcor on +Ġan omal +rah im +ĠConnect ion +Ġsculpt ure +Ġsched uling +ĠD addy +at hing +Ġeyeb rows +Ġcur ved +Ġsent iments +Ġdraft ing +D rop +( [ +Ġnom inal +ĠLeaders hip +ĠG row +Ġ17 6 +Ġconstruct ive +iv ation +Ġcorrupt ed +ger ald +ĠC ros +ĠChe ster +ĠL ap +ãģ ª +OT H +D ATA +Ġal mond +pro bably +I mp +Ġfe ast +ĠWar craft +F lor +Ġcheck point +Ġtrans cription +Ġ20 4 +Ġtwe aks +Ġrel ieve +S cience +Ġperform er +Z one +Ġtur moil +ig ated +hib it +ĠC afe +the med +Ġflu or +ben ch +Ġde com +ĠU nt +ĠBar rett +ĠF acts +Ġt asting +ĠPTS D +ĠSe al +ĠJuda ism +ĠDynam ic +ĠC ors +V e +ĠM ing +ĠTrans form +v on +ĠDef enders +ĠTact ical +ĠV on +ĠUn ivers +Ġdist orted +ĠB reath +?' " +Ġag on +ĠDead ly +Ġl an +ĠCy cle +orn ed +Ġrel iably +Ġgl or +ĠMon key +ãĥ ¡ +Ġad ren +Ġmicrow ave +ĠAl ban +irc raft +dig it +sm art +ĠD read +¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯ +{ { +ĠRoc hester +Ġsimpl ified +Ġinf licted +Ġtake over +Ġyour selves +ad itional +Ġmus cular +K S +Ġing en +T ax +ĠFe ature +27 7 +Ġcru c +Ġcr ate +Ġun identified +Ġacclaim ed +ĠM anga +ĠFr ances +ĠNep al +ĠG erald +ĠKu wait +Ġsl ain +ĠHe b +ĠG oku +ãģ® æ +28 6 +M rs +ĠC ody +ĠSan ctuary +01 6 +Ġdism ant +Ġdatas et +ĠH ond +b uck +ĠPat terson +Ġpal ette +ĠG D +ic ol +ĠL odge +Ġplanet ary +ak in +ĠRegist ered +ab we +ĠPeters burg +Ġha iled +ĠP iece +S che +ĠDO J +Ġen umer +18 1 +ĠObs erver +ĠB old +f ounded +com merce +Ġexplo its +ĠF inding +UR N +ĠS ne +ĠAc id +ay ette +ĠVal ues +Ġdr astic +Ġarchitect ural +Ġ" . +× ķ +ump ed +Ġwra pping +Ġwid ow +ĠSl ayer +l ace +on ce +German y +av oid +Ġtem ples +P AR +à ´ +ĠLuc ifer +ĠFl ickr +l ov +for ces +Ġsc outing +Ġlou der +tes y +Ġbefore hand +Ä ĵ +ĠNe on +ĠW ol +ĠTyp ically +ĠPolit ico +-+ -+ +Ġbuild er +Ġder ive +K ill +Ġp oker +Ġambig uous +Ġlif ts +Ġcy t +Ġrib s +ood le +ĠS ounds +h air +ĠSynd rome +t f +Ġproport ional +u id +Ġper taining +ĠKind le +ĠNeg ro +Ġreiter ated +ĠTon ight +oth s +ĠCorn ell +Ġo wing +Ġ20 8 +elf are +oc ating +ĠB irds +Sub scribe +Ġess ays +Ġburd ens +Ġillust rations +ar ious +ER AL +ĠCal cul +Ġx en +ĠLink edIn +ĠJ ung +Ġredes ign +Con nor +29 6 +Ġrevers al +ĠAd elaide +ĠL L +Ġs inking +Ġg um +US H +c apt +ĠGr imm +Ġfoot steps +ĠCB D +isp ers +Ġpro se +Wed nesday +ĠM ovies +ed in +Ġoverturn ed +Ġcontent ious +US B +~~~~~~~~ ~~~~~~~~ +ĠCo pper +Ġpoint less +N V +val ues +olph in +d ain +Ġdepos ited +ĠG W +Ġpreced ed +ĠCl a +ĠGo lem +ĠN im +ĠÎ ² +ĠEngine ers +m iddle +Ġfl att +oper ative +Ġcouncil s +imb abwe +el in +Ġstress ful +ĠL D +Ġres h +l ake +Ġwheel chair +ĠAltern ative +Ġoptim ize +oper ation +Ġpe ek +Ġones elf +ig il +Ġtrans itions +op athy +bl ank +Ġ16 9 +17 1 +________________________________ ________________________________ +Ġl aundering +En c +ĠD EC +Ġwork outs +Ġsp ikes +Ġdin osaurs +Ġdiscrim inatory +P ool +R ather +38 5 +R NA +tes ters +et o +ĠIdent ity +Ġve in +ĠBur ton +Ġarc ade +4 20 +Ult imately +ĠSad ly +à ° +p ill +Ġcub ic +ĠSpect rum +the se +st ates +Ġun official +h awks +ĠEVER Y +Ġrain bow +Ġincarcer ation +and ing +Ġsy ll +ĠEver ton +Ġ17 9 +ĠSer bia +Ġ18 9 +m eter +ĠMic key +Ġant iqu +Ġfact ual +ne ck +ĠN are +n orm +m ust +Ġhigh ways +Ġgl am +Ġdivid ing +ĠSquad ron +ĠMar tha +Ġbirth s +C over +//////// //////// +ĠW ong +Ph ot +ĠA LS +ri o +ĠNon etheless +ĠL emon +Ġ20 6 +ĠE E +Ġderiv ative +ĠWW II +v ote +Ġthere in +Ġsepar ating +44 6 +sy nc +ĠStre ets +Ġr att +Ġmunicip ality +ĠShort ly +Ġmon k +) ," +Ġscr ub +Ġoper atives +Ne ither +Pl ace +ĠLim it +F emale +ĠAct or +Char acter +Ġconstit uted +35 7 +Ġprotest ed +ĠSt raw +ĠHe ight +ild a +ĠTy ph +Ġflood s +Ġcos metic +W AY +pert ure +up on +t ons +ess ing +ĠP ocket +Ġro oft +ĠC aucas +Ġant idepress +Ġincomp atible +EC D +Ġoper a +ĠCont est +Ġgener ators +l ime +Def ense +19 87 +for um +Ġsav age +ĠHung arian +n z +Ġmet allic +Ġex pelled +Ġres idency +Ġdress es +66 6 +ĠC lement +f ires +C ategory +Ġge ek +al is +Ġc emetery +educ ated +Ġc rawl +ĠUn able +ĠT yson +ak is +Ġp ardon +ĠW ra +Ġstrengthen ed +ĠF ors +33 5 +ĠH C +ĠM ond +Ġvisual s +ĠBeat les +ett lement +Ġ ï +g ro +Ġb ash +Ġpo orest +Ġex cel +Ġaspir ations +ĠM unicip +ens ible +Ġceremon ies +Ġintimid ation +ĠCON TR +be ck +ĠK ap +as u +Ġtradem arks +ĠS ew +ĠComp etition +net work +ĠAr ri +ĠT et +Ro aming +W C +D at +Ġso b +Ġpair ing +Ġoverd ose +SA Y +ab er +Ġrev olt +ĠF ah +act ing +e q +est ation +F ight +ĠMar ks +27 3 +Ġ17 8 +R aw +ãģ ĭ +34 9 +bl ocks +Ġver ge +est ine +ĠPod esta +Ġinv asive +Ġprofound ly +ĠA o +e ach +Ġl est +inter pret +Ġshr inking +Ġerr one +Ġche es +ly s +ĠI vy +ĠDirect ory +Ġhint ed +V ICE +Ġcontact ing +ĠG ent +he i +Ġlabel ing +Ġmerc ury +ĠL ite +Ġexp ires +Ġdest abil +rit is +c u +Ġfeather s +Ġste er +Ġprogram med +ĠV ader +Go ing +ĠE lim +Ġy o +ĠMic he +Ġ20 3 +Ġslee ves +Ġb ully +ĠHum ans +36 8 +Ġcomp ress +ĠBan ner +AR S +Ġa while +Ġcal ib +Ġspons orship +ĠDiff iculty +ĠP apers +Ġident ifier +} . +Ġy og +ĠSh ia +Ġclean up +Ġvib e +int rodu +im ming +Austral ia +Ġout lines +ĠY outube +tr ain +ĠM akes +Ġde ported +Ġcent r +ĠD ug +ĠB oulder +ĠBuff y +Ġinj unction +ĠHar ley +ĠG roups +ĠD umbledore +ĠCl ara +Ġ" - +Ġsacrific ed +ep h +Sh adow +ib ling +Ġfreel ance +Ġevident ly +ph al +Ġret ains +M ir +Ġfin ite +d ar +ĠC ous +Ġrep aired +Ġperiod ic +Ġchampions hips +Ġaster oid +bl ind +Ġexpress ly +ĠAst ros +Ġsc aled +Ġge ographical +ĠRap ids +En joy +Ġel astic +ĠMoh amed +Mark et +be gin +Ġdisco vers +Ġtele communications +Ġscan ner +Ġen large +Ġsh arks +Ġpsy chedel +ĠRou ge +Ġsnap shot +is ine +X P +Ġpestic ides +ĠL SD +ĠDist ribution +re ally +Ġde gradation +Ġdisgu ise +Ġbi om +ĠEX T +Ġequ ations +Ġhaz ards +ĠComp ared +) * +Ġvirt ues +Ġeld ers +Ġenh ancing +ĠAc ross +er os +ang ling +Ġcomb ust +ucc i +Ġconc ussion +Ġcontrace ption +ĠK ang +Ġexpress es +Ġa ux +ĠP ione +Ġexhib its +Deb ug +OT AL +ĠAl ready +ĠWheel er +Ġexp ands +? : +Ġreconc iliation +Ġpir ates +Ġpur se +Ġdiscour age +Ġspect acle +R ank +Ġwra ps +ĠTh ought +Ġimp ending +O pp +ĠAng lo +ĠE UR +Ġscrew ed +ret ched +Ġencour agement +mod els +Ġconf use +mm m +ĠVit amin +âĸij âĸij +C ru +Ġkn ights +Ġdisc ard +Ġb ishops +ĠW ear +ĠGar rett +k an +ãĥ Ł +Ġmascul ine +cap ital +ĠA us +Ġfat ally +th anks +ĠA U +ĠG ut +12 00 +Ġ 00000000 +Ġsur rog +ĠBI OS +ra its +ĠWat ts +Ġresur rection +ĠElect oral +ĠT ips +4 000 +Ġnut rient +Ġdepict ing +Ġspr ink +Ġm uff +ĠL IM +ĠS ample +ps c +ib i +gener ated +Ġspec imens +Ġdiss atisf +Ġtail ored +Ġhold ings +ĠMonth ly +ĠE at +po ons +Ġne c +ĠC age +ĠLot us +ĠLan tern +Ġfront ier +Ġp ensions +Ġj oked +ĠHard y +=-=- =-=- +r ade +U ID +Ġr ails +Ġem it +Ġsl ate +Ġsm ug +Ġsp it +ĠCall s +ĠJac obs +f eat +ĠU E +Ġrest ruct +Ġregener ation +Ġenerg ies +ĠCon nor +OH N +ĠChe ese +Ġg er +Ġresur rect +man agement +N W +Ġpres ently +ĠBru ins +M ember +ĠM ang +id an +Ġboost ing +w yn ++ . +requ isite +ĠNY PD +ĠMe gan +ĠCond itions +Ġp ics +nes ium +ĠR ash +Ġ17 4 +ĠD ucks +Ġemb ro +z u +on ian +rel igious +Ġc raz +ĠAC A +ĠZ ucker +EM A +ĠPro s +We apon +ĠKn ox +ĠAr duino +Ġst ove +Ġheaven s +ĠP urchase +Ġher d +Ġfundra iser +Dig ital +5 000 +Ġprop onents +/ âĢĭ +Ġj elly +ĠVis a +Ġmon ks +Ġadvance ment +ĠW er +Ġ18 7 +e us +ert ility +Ġfet al +Ġ19 36 +L o +Ġout fits +Ġstair case +b omb +Ġcustom ized +cl air +T ree +Ġm apped +ĠConsider ing +ĠTor res +Ġmeth yl +Ġapprox imate +Ġdo om +ĠHans en +Ġc rossover +Ġstand alone +ä ¼ +Ġinv ites +Ġgra veyard +Ġh p +Donald Trump +Ġesc ort +G ar +Ġpredec essors +Ġh ay +Ġen zyme +ĠStra ight +vis ors +I ng +ane ously +ĠApp lied +Ġf ec +ĠDur ant +Ġout spoken +or b +Ġz eal +Ġdisgr ace +' ). +ĠChe ng +28 9 +ĠRen a +ĠSu icide +29 4 +Ġout raged +ĠNew man +ĠN vidia +ĠA ber +ĠB ers +Ġrecre ation +Wind ow +ĠD P +x e +Ġped oph +Ġfall out +ambo o +Ġpresent ations +ĠApp s +Ġh tml +3 45 +ĠX XX +Ġrub bing +ĠLe ather +Ġhum idity +se ys +est ablished +ĠUn its +64 6 +Ġrespect able +A uto +Ġthri ving +ĠInn ovation +ang s +Ext ra +reg ulation +29 8 +p ick +Ex amples +ĠC J +Att ack +Ġdr acon +L T +Ġstick er +re rs +Ġsun ny +I ss +reg ulated +d im +ĠAb stract +Ġhus bands +Off ice +om ination +it ars +AN GE +asc al +ĠK ris +ĠInf antry +Ġm alf +ĠA the +ĠR ally +bal anced +................ ........ +OU P +Ġmole cule +met ics +ĠSpl it +ĠInstruct ions +ĠN ights +c ards +Ġt ug +Ġcon e +å Ń +Ġt x +ĠDisc ussion +Ġcatast rophe +pp e +g io +Ġcommun ism +Ġhal ted +ĠGu ant +cle an +ĠSc hed +ĠK anye +Ġw ander +ĠSer iously +Ġ18 8 +enn ial +f ollow +product ive +ĠFl ow +ĠS ail +Ġc raw +Ġsim ulations +or u +ang les +ĠN olan +Ġmen stru +4 70 +Ġ20 7 +aj a +Ġcas ually +board ing +Ġ2 22 +ov y +ĠN umbers +um at +O E +28 7 +ĠCle mson +Ġcert s +Ġsl id +ĠT ribe +Ġto ast +Ġfort unes +Ġf als +ĠComm ittees +Ġg p +Ġf iery +ĠN ets +ĠAn ime +Pack age +ĠComp are +l aughter +in fect +Ġatroc ities +Ġjust ices +Ġins ults +ĠVern on +Ġsh aken +Ġperson a +est amp +36 7 +br ain +Ġexperiment ing +K en +ĠElect ronics +Ġ16 1 +dom ain +Ġgraph ical +b ishop +Ġwho pping +ĠEv angel +Ġadvertis ers +ĠSpe ar +Ġb ids +Ġdestro ys +ut z +Ġunders c +ĠAD D +Ġan ts +ĠC um +ipp les +ĠF ill +Ġgl anced +Ġind icted +ĠE ff +Ġmis con +ĠDes ktop +Ġab ide +ãĥ Ģ +ĠI o +ĠC oul +Ġcaps ule +ĠCh rys +M ON +Ġund es +ĠI RA +Ġc itation +Ġdict ate +ĠNet works +ĠConf lict +ĠSt uff +x a +is ec +ĠChem istry +Ġquarter ly +William s +an an +O pt +ĠAlexand ria +out heastern +ĠSpring field +ĠBlack s +Ġge ography +24 2 +Ġut most +ĠEx xon +ab outs +E VA +ĠEn able +ĠBar r +Ġdisag reed +ĠCy prus +Ġdement ia +Ġlab s +Ġubiqu itous +ĠLO VE +Ġconsolid ated +s r +Ġcream y +ĠTim ber +Reg ardless +ĠCert ificate +Ġ" ... +ogen ous +Capt ain +Ġinsult ing +ĠSor os +ĠInst r +ĠBulgar ia +bet ter +Ġsuck ing +ĠDavid son +at z +Ġcoll ateral +g if +Ġplag ued +ĠC ancel +ĠGard ner +R B +Ġsix teen +Rem ove +ur istic +c ook +R od +Ġcompr ising +f le +) âĢĶ +ĠVik ing +g rowth +agon al +Ġsr f +af ety +m ot +N early +st own +ĠF actor +Ġautom obile +Ġproced ural +m ask +amp ires +Ġdisapp ears +j ab +3 15 +Ġ19 51 +ne eded +Ġd aring +le ader +Ġp odium +Ġun healthy +Ġm und +Ġpy ramid +oc re +Ġkiss ed +Ġdream ed +ĠFant astic +ĠG ly +å Ĭ +Ġgreat ness +Ġsp ices +Ġmet ropolitan +Ġcomp uls +i ets +101 6 +ĠSh am +ĠP yr +fl ies +ĠMid night +Ġswall owed +Ġgen res +ĠL ucky +ĠRew ards +Ġdisp atch +ĠI PA +ĠApp ly +Ġa ven +al ities +3 12 +th ings +Ġ( ). +Ġm ates +ĠS z +ĠC OP +ol ate +O FF +Ġre charge +c aps +ĠYork er +ic one +Ġgal axies +ile aks +D ave +ĠP uzz +ĠCelt ic +ĠA FC +27 6 +ĠS ons +Ġaffirm ative +H or +Ġtutorial s +ĠC ITY +ĠR osa +ĠExt ension +Ser ies +Ġf ats +Ġr ab +l is +Ġun ic +Ġe ve +ĠSp in +Ġadul thood +ty p +Ġsect arian +Ġcheck out +ĠCy cl +S ingle +Ġmart yr +Ġch illing +88 8 +ou fl +Ġ] ; +Ġcongest ion +m k +ĠWhere as +Ġ19 38 +ur rencies +er ion +Ġbo ast +ĠPat ients +Ġch ap +ĠB D +real DonaldTrump +Ġexam ines +h ov +Ġstart ling +ĠBab ylon +w id +om ew +br ance +ĠOd yssey +w ig +Ġtor ch +ĠV ox +ĠMo z +ĠT roll +ĠAn s +Similar ly +ĠF ul +00 6 +Un less +ĠAl one +st ead +ĠPub lisher +r ights +t u +ĠDoes n +Ġprofession ally +Ġcl o +ic z +Ġste als +Ġ á +19 86 +Ġst urdy +ĠJoh ann +Ġmed als +Ġfil ings +ĠFr aser +d one +Ġmult inational +Ġf eder +Ġworth less +Ġp est +Yes terday +ank ind +Ġg ays +Ġb orne +ĠP OS +Pict ure +Ġpercent ages +25 1 +r ame +Ġpot ions +AM D +ĠLeban ese +Ġr ang +ĠL SU +ong s +Ġpen insula +ĠCl ause +AL K +oh a +ĠMac Book +Ġunanim ous +Ġl enders +Ġhang s +Ġfranch ises +ore rs +ĠUp dates +Ġisol ate +and ro +S oon +Ġdisrupt ive +ĠSur ve +Ġst itches +ĠSc orp +ĠDomin ion +Ġsupp lying +Ar g +Ġtur ret +ĠL uk +Ġbr ackets +* ) +ĠRevolution ary +ĠHon est +Ġnot icing +ĠSh annon +Ġafford ed +Ġth a +ĠJan et +! -- +ĠNare ndra +ĠPl ot +H ol +se ver +e enth +Ġobst ruction +Ġ10 24 +st aff +j as +or get +sc enes +l aughs +ĠF argo +cr ime +Ġorche str +Ġde let +ili ary +rie ved +Ġmilit ar +ĠGreen e +âĹ ı +ãģ ¦ +ĠGu ards +Ġunle ashed +ĠWe ber +Ġadjust able +Ġcal iber +Ġmotiv ations +Ġà ł +m Ah +ĠL anka +hand le +Ġp ent +ĠR av +ĠAng ular +ĠK au +umb ing +Ġphil anthrop +Ġde hyd +Ġtox icity +e er +ĠY ORK +w itz +å ¼ +ĠI E +commun ity +ĠA H +Ġret ali +Ġmass ively +ĠDani els +ĠD EL +Ġcar cin +Ur l +Ġrout ing +ĠNPC s +ĠR AF +ry ce +Ġwa ived +ĠGu atem +Every body +Ġco venant +Ġ17 3 +Ġrelax ing +Ġqu art +al most +Ġguard ed +ĠSold iers +ĠPL AY +Ġout going +L AND +Ġre write +ĠM OV +ĠIm per +ĠS olution +Ġphenomen al +Ġl ongevity +Ġimp at +ĠN issan +ir ie +Ġod or +ĠZ ar +ok s +Ġmilit ias +ĠSP EC +Ġtoler ated +ars er +ĠBrad ford ++ , +Ġsur real +s f +Can adian +Ġresemb lance +Ġcarbohyd rate +VI EW +Ġaccess ory +me al +larg est +ieg el +Some one +Ġtoug hest +os o +Ġfun nel +Ġcondemn ation +lu ent +Ġw ired +ĠSun set +Jes us +ĠP ST +ĠP ages +ĠTy coon +ĠP F +Ġselect ions +Ġ ठ+part isan +Ġhigh s +ĠR une +Ġcraft s +le ad +ĠParent s +Ġre claim +ek er +ĠAll ied +ae per +Ġlo oming +Ġbenefic iaries +ĠH ull +Stud ents +Jew ish +d j +Ġp act +tem plate +ĠOffic ials +ĠBay lor +Ġhe mp +Ġyouth s +ĠLevel s +ĠX iao +ĠC hes +Ġende avor +ĠRem oved +Ġhipp ocamp +H ell +ãĤ Ĭ +80 5 +Ġd inosaur +ĠWr ath +ĠIndones ian +Ġcalcul ator +ĠD ictionary +Ġ4 20 +ĠM AG +( _ +! , +t arians +Ġrestrict ing +rac use +Ġweek day +OU NT +Ġsh rugged +leg round +Ġb ald +ĠDo ctors +Ġt outed +ĠMax well +Ġ2 14 +Ġdiplom at +Ġrep ression +Ġconstitu ency +v ice +r anked +ĠNap oleon +g ang +ĠFore ver +t un +Ġbul b +ĠPD T +ĠC isco +V EN +Ġres umed +Ste ven +ĠManit oba +Ġfab ulous +ĠAg ents +19 84 +Ġam using +ĠMyster ies +Ġor thodox +fl oor +Ġquestion naire +Ġpenet rate +Ġfilm makers +ĠUn c +Ġst amped +Ġth irteen +Ġout field +Ġforward ed +Ġapp ra +Ġa ided +t ry +Ġunf ocused +ĠL iz +ĠWend y +ĠSc ene +Ch arg +Ġreject s +Ġleft ist +ĠProv idence +ĠBr id +reg n +Ġprophe cy +ĠL IVE +4 99 +Ġfor ge +ĠF ML +Ġintrins ic +ĠF rog +Ġw ont +ĠH olt +Ġfam ed +CL US +aeper nick +ĠH ate +ĠC ay +Ġregister ing +ort ality +rop y +ocaly ptic +a an +n av +Ġfasc ist +IF IED +Ġimpl icated +ĠRes ort +ĠChand ler +ĠBr ick +P in +ys c +Us age +ĠHel m +us ra +âĺħ âĺħ +ĠAb bas +Ġunanim ously +Ġke eper +Ġadd icted +?? ? +Ġhelm ets +Ġant ioxid +aps ed +80 8 +gi ene +Ġwa its +Ġmin ion +ra ved +ĠP orsche +Ġdream ing +Ġ17 1 +ĠC ain +Ġun for +ass o +ĠConfig uration +k un +hard t +Ġn ested +ĠL DS +L ES +Ġt ying +en os +Ġc ue +ĠMar qu +sk irts +Ġclick ed +Ġexp iration +ĠAccording ly +ĠW C +Ġbless ings +Ġaddict ive +ĠN arr +y x +ĠJagu ars +Ġrent s +ĠS iber +Ġt ipped +ous se +ĠFitz gerald +Ġhier arch +out ine +Ġwa velength +> . +ch id +ĠProcess ing +/ + +r anking +E asy +ĠConst ruct +Ġt et +ins ured +H UD +Ġqu oting +Ġcommun icated +in x +Ġin mate +Ġerect ed +ĠAbs olutely +ĠSure ly +Ġun im +ĠThr one +he id +Ġcl aws +Ġsuper star +ĠL enn +ĠWh is +U k +ab ol +Ġsk et +ĠN iet +Ġper ks +Ġaff inity +Ġopen ings +phas is +Ġdiscrim inate +T ip +v c +Ġgr inding +ĠJenn y +Ġast hma +hol es +ĠHom er +Ġreg isters +ĠGl ad +Ġcre ations +Ġlith ium +Ġappl ause +unt il +Just ice +ĠTur ks +Ġsc andals +Ġb ake +t ank +M ech +ĠMe ans +ĠM aid +Republic ans +is al +wind ows +ĠSant os +Ġveget ation +33 8 +t ri +Ġfl ux +ins ert +Ġclar ified +Ġmort g +ĠCh im +ĠT ort +Ġdiscl aim +met al +ĠAs ide +Ġindu ction +Ġinf l +Ġathe ists +amp h +Ġe ther +ĠV ital +ĠBu ilt +M ind +Ġweapon ry +S ET +Ġ18 6 +ad min +g am +cont ract +af a +Ġderiv atives +Ġsn acks +Ġch urn +E conom +Ġca pped +ĠUnder standing +ĠH ers +ĠI z +Ġd uct +I ENT +augh ty +Ġâľ Ķ +ĠN P +Ġsa iling +In itialized +Ġt ed +Ġreact ors +ĠL omb +Ġcho ke +ĠW orm +Ġadm iration +Ġsw ung +ens ibly +Ġr ash +ĠGo als +ĠImport ant +Sh ot +ĠR as +Ġtrain ers +ĠB un +Work ing +Ġhar med +ĠPand ora +ĠL TE +Ġmush room +ĠCH AR +ĠF ee +ĠM oy +B orn +ol iberal +ĠMart ial +Ġgentle men +Ġling ering +Offic ial +Ġgra ffiti +ĠN ames +D er +Ġqu int +ist rate +aze era +ĠNOT ICE +ĠFlore nce +Ġpay able +Ġdep icts +ĠSpe cies +He art +âĶĢâĶĢâĶĢâĶĢ âĶĢâĶĢâĶĢâĶĢ +Ġencl osed +Incre ases +D aily +ĠL is +Ġenact ment +ĠB acon +ĠSt eele +dem and +Ġ18 3 +Ġmouth s +Ġstr anded +Ġenhance ment +01 1 +ĠWh ats +Ġhe aled +en y +ĠR ab +Ġ3 40 +ĠLab yrinth +ro ach +ĠY osh +ĠCl ippers +Ġconcert s +Intern et +35 5 +Ġstick ers +Ġter med +ĠAx e +Ġgrand parents +Fr ance +ĠCl im +ĠU h +ul ic +Ġthr ill +cent ric +ĠOver view +ĠCond uct +Ġsubstant ive +Ġ18 2 +m ur +Ġstr ay +ĠCo ff +Ġrep etitive +ĠFor gotten +Ġqual ification +ew itness +ĠZ imbabwe +Ġsim ulated +ĠJ D +25 3 +ĠW are +Ġun sc +T imes +Ġsum mons +Ġdis connected +Ġ18 4 +ci us +ĠGu jar +od ka +Ġer ase +ĠTob acco +elect ed +Ġun cont +ĠShe pard +ĠL amp +Ġalert ed +Ġoper ative +arn a +u int +Ġneglig ence +ac ements +Ġsup ra +Ġprev ail +ĠSh ark +Ġbel ts +ãģ « +Ġt ighter +Engine ers +Ġin active +Ġexp onent +ĠWill ie +a ples +Ġhe ir +ĠH its +ian n +ĠS ays +Ġcurrent s +ĠBeng al +Ġar ist +B uffer +Ġbree ze +ĠWes ley +Col a +Ġpron oun +Ġde ed +ĠK ling +Ġof t +Ġinf lict +Ġpun ishing +Ġn m +ik u +OD UCT +01 4 +Ġsubsid y +ĠDE A +ĠHer bert +ĠJ al +B ank +Ġdef erred +Ġship ment +B ott +Ġal le +b earing +HT ML +Off line +Ġ2 13 +Ġscroll ing +Ġsc anned +ĠLib yan +ĠT OP +ch rom +d t +col umn +Psy NetMessage +Z ero +Ġtor so +0 50 +âķ IJ +Ġimp erson +ĠSchw artz +ud ic +Ġpiss ed +ĠS app +25 7 +ĠIS Ps +og l +Ġsuper vised +Ġad olescent +Ġatt ained +ĠDel ivery +ĠB unny +Ġ19 37 +Ġmini ature +Ġo s +Ġ3 70 +60 8 +ĠMour inho +Ġinn ate +Ġtem po +ĠN M +ĠFall en +00 9 +Ġprov ocative +Stream er +ĠBened ict +ĠBol she +Ġt urtle +ĠPC B +ĠEqu al +Direct or +ĠR end +Ġflu ids +Author ities +Ġcous ins +requ ency +ĠNeigh bor +s ets +sh ared +Char les +pass word +Ġg ears +Ġ2 11 +ĠHard ware +ri ka +Ġup stream +H om +Ġdisproportion ately +iv ities +Ġund efined +Ġelect rons +Ġcommem or +Event ually +Ġ> < +Ġir responsible +2 18 +ĠRe leased +ĠO VER +ĠI GN +ĠB read +st ellar +ĠS age +tt ed +dam age +ed ition +ĠPre c +Ġl ime +Ġconf inement +Ġcal orie +we apon +Ġdiff ering +ĠS ina +m ys +am d +Ġintric ate +k k +ĠP AT +ã o +st ones +lin ks +Ġr anch +Sem itic +Ġdifferent iate +ĠS inger +occup ied +Ġfort ress +c md +Ġinter ception +ĠAnk ara +Ġre pt +ĠSol itaire +Ġrem ake +p red +Ġd ared +aut ions +ĠB ACK +Run ning +Ġdebug ging +Ġgraph s +3 99 +ĠNig el +Ġb un +Ġpill ow +Ġprog ressed +fashion ed +Ġob edience +ER N +Ġrehe ars +C ell +t l +S her +Ġher ald +ĠPay ment +ĠC ory +ĠDe pt +Ġrep ent +ĠWe ak +uck land +Ġple asing +Ġshort ages +Ġjur ors +ĠK ab +q qa +Ant i +Ġw ow +ĠRC MP +Ġt sun +ĠS ic +Ġcomp rises +Ġsp ies +Ġprec inct +n u +Ġur ges +Ġtim ed +Ġstrip es +ĠB oots +Ġy en +Adv anced +Ġdisc rete +ĠArch angel +employ ment +D iff +Ġmon uments +Ġ20 9 +work er +Ġ19 6 +ĠI g +utter stock +T PS +J ac +Ġhomeless ness +Ġcomment ator +Ġrac ially +f ing +se ed +E le +ell ation +Ġeth anol +Ġpar ish +ĠD ong +ĠAw akening +Ġdev iation +ĠB earing +ĠTsu k +Ġrec ess +Ġl ymph +ĠCann abis +å ľ +ĠNEW S +Ġd ra +ĠStef an +ĠWr ong +ĠS AM +Ġloose ly +Ġinterpre ter +ĠPl ain +Go vernment +Ġbigot ry +Ġgren ades +ave z +pict ured +Ġmand ated +ĠMon k +ĠPed ro +Ġl ava +27 4 +Ġcyn ical +ĠScroll s +l ocks +M p +Ġcon gregation +orn ings +ph il +ĠI bid +Ġf erv +Ġdisapp earing +Ġarrog ant +sy n +ĠMa ver +ĠSu it +24 1 +Ġab bre +ack ers +P a +ĠY el +Whe never +Ġ23 5 +ĠV ine +ĠAn at +Ġext inct +LE T +Ġexecut able +V ERS +ox ide +D NA +ĠP rel +Ġresent ment +Ġcompr ise +ĠAv iv +Ġinter ceptions +Ġprol ific +IN A +ĠEr in +though t +2 19 +ĠPsychiat ry +un ky +chem ist +H o +ĠMcC oy +Ġbr icks +L os +ri ly +ĠUS SR +Ġr ud +Ġl aud +ĠW ise +ĠEmer ald +Ġrev ived +Ġdam ned +ĠRep air +id em +ct ica +Ġpatri arch +ĠN urs +me g +Ġcheap est +re ements +empt y +ĠCele br +Ġdepri vation +ch anted +ĠTh umbnails +E nergy +ĠEth an +ĠQ ing +Ġopp oses +W IND +v ik +ĠM au +ĠS UB +66 7 +G RE +ĠVol unte +nt on +C ook +å IJ +es que +Ġplum met +Ġsu ing +Ġpron ounce +Ġresist ing +ĠF ishing +ĠTri als +Ġy ell +Ġ3 10 +Ġin duct +Ġpersonal ized +oft en +R eb +EM BER +Ġview point +Ġexist ential +() ) +rem ove +MENT S +l asses +Ġev apor +Ġa isle +met a +Ġreflect ive +Ġentit lement +Ġdev ised +mus ic +asc ade +Ġwind ing +off set +Ġaccess ibility +ke red +Bet ter +ĠJohn ston +th inking +S now +ĠCroat ia +ĠAt omic +27 1 +34 8 +Ġtext book +ĠSix th +Ġ اÙĦ +Ġsl ider +ĠBur ger +b ol +S ync +Ġgrand children +Ġc erv ++ ) +Ġe ternity +Ġtweet ing +Ġspec ulative +Ġpiv otal +ĠW P +ĠT ER +ynam ic +Ġu pl +ĠC ats +per haps +Ġclass mates +Ġblat ant +' - +Ġl akh +ant ine +ĠB org +i om +/ ( +ĠAthlet ic +Ġs ar +OT A +ĠHoff man +Never theless +Ġad orable +Ġspawn ed +Ass ociated +ĠDom estic +Ġimpl ant +ĠLux em +ĠK ens +Ġp umps +ĠS AT +Att ributes +50 9 +av our +Ġcentral ized +ĠT N +Ġfresh ly +ĠA chieve +Ġouts iders +her ty +ĠRe e +ĠT owers +ĠD art +ak able +Ġm p +ĠHeaven ly +Ġr ipe +ĠCarol ine +ry an +Ġclass ics +Ġret iring +Ġ2 28 +Ġa h +Ġdeal ings +Ġpunch ing +ĠChap man +O ptions +max well +vol ume +Ġst al +Ġex ported +ĠQu ite +Ġnumer ical +B urn +F act +ĠKey stone +Ġtrend ing +Ġalter ing +ĠAfric ans +47 8 +ĠM N +ĠKn ock +Ġtempt ation +Ġprest ige +Over view +ĠTrad itional +ĠBah rain +Priv ate +ĠH OU +Ġbar r +ĠT at +C ube +US D +ĠGrand e +ĠG at +ĠFl o +Ġres ides +Ġind ec +vol ent +Ġperpet ual +ub es +Ġworld view +ĠQuant um +Ġfil tered +Ġen su +orget own +ERS ON +ĠM ild +37 9 +OT T +à ¥ +Ġvit amins +Ġrib bon +Ġsincere ly +ĠH in +Ġeight een +Ġcontradict ory +Ġgl aring +Ġexpect ancy +Ġcons pir +Ġmon strous +Ġ3 80 +re ci +Ġhand ic +Ġpump ed +Ġindic ative +Ġr app +Ġav ail +ĠLEG O +ĠMar ijuana +19 85 +ert on +Ġtwent ieth +################ ################ +ĠSw amp +Ġval uation +Ġaffili ates +adjust ed +ĠFac ility +26 2 +Ġenz ymes +itud inal +Ġimp rint +S ite +Ġinstall er +ĠT RA +m ology +lin ear +ĠCollect ive +ig ating +ĠT oken +Ġspec ulated +K N +ĠC ly +or ity +Ġdef er +Ġinspect ors +appro ved +R M +ĠSun s +Ġinform ing +ĠSy racuse +ib li +7 65 +Ġgl ove +Ġauthor ize +âĢ¦âĢ¦âĢ¦âĢ¦ âĢ¦âĢ¦âĢ¦âĢ¦ +ĠCru ise +Ġcontract ing +she ll +IF E +ĠJew el +p ract +ĠPhot oshop +ĠKnow ing +h arm +Ġattract ions +ad an +et us +01 8 +w agen +Al t +Ġmultip ly +Ġequ ilibrium +: { +ĠF ighters +ĠEd gar +Ġfour teen +Go vern +Ġmis use +Ġab using +Ġancest ry +ram er +64 4 +Ġwor ms +Ġthick er +ĠComb ine +Ġpeas ants +Ġv ind +Ġcon quest +Ġm ocked +Ġc innamon +ĠC ald +ĠGall up +Ġavoid ance +Ġincarn ation +ĠStr at +Ġt asted +ent a +ĠN eal +p ared +Ġtermin ology +ject ion +Scient ists +ĠIN S +ĠDe e +Ġdirect ories +R oad +ĠSh ap +br ight +ĠDirect ors +ĠCol umn +Ġb ob +Ġprefer ably +Ġgl itch +f urt +Ġe g +id is +C BC +Ġsur rendered +Ġtest ament +33 6 +ug gest +ĠN il +an other +Ġpat hetic +ĠDon na +Ġ2 18 +ĠA very +Ġwhis key +Ġf ixture +ĠCon quest +Ġbet s +O cc +ĠLe icester +] ." +Ġ) ); +Ġfl ashes +45 6 +Ġmask ed +ge bra +Ġcomput ed +che l +aud er +Ġdefe ats +ĠLiber ation +ĠOs ama +ĠV ive +Ch anges +Ch annel +Ġtar iffs +Ġm age +ĠS ax +Ġinadvert ently +ĠC RE +ĠRe aper +ink y +gr ading +Ġstere otyp +Ġcur l +ĠF ANT +Ġfram eworks +M om +ĠAn ch +Ġflav our +car bon +Ġperm itting +let cher +ĠMo zilla +ĠPark ing +ĠCh amp +Sc roll +Ġmurd erer +Ġrest ed +Ġow es +ĠP oss +AD D +IF F +res olution +ĠMin ing +Ġcompar ative +D im +Ġneighbour ing +ĠA ST +ĠT oxic +Ġbi ases +Ġgun fire +ur ous +ĠMom ent +19 83 +Ġper vasive +tt p +ĠNorm ally +r ir +S arah +ĠAlb any +Ġun sett +ĠS MS +ip ers +l ayer +ĠWh ites +up le +Ġtur bo +ĠLe eds +Ġthat s +ĠMin er +M ER +ĠRe ign +Ġper me +ĠBl itz +Ġ19 34 +Ġintimid ating +t ube +Ġecc entric +ab olic +box es +ĠAssoci ates +v otes +Ġsim ulate +um bo +aster y +Ġship ments +FF FF +an th +Ġseason ed +Ġexperiment ation +âĸ ł +law s +Me et +idd les +ant ics +R ating +IS IS +h ift +Ġfront s +b uf +01 7 +Ġun att +ĠD il +le ases +ĠGard ens +77 7 +t ouch +ve ll +45 8 +Ġ= ==== +s aving +Ġer osion +ĠQu in +Ġearn s +Ġaccomplish ment +ĠWe i +Ġ< [ +____ _ +Ġir rig +ĠT eddy +Ġconqu ered +ĠArm ored +Ġassert s +Ġmanip ulating +r é +Ġtranscript s +G allery +Ġplot ting +Ne il +Ġbetray al +load er +ĠS ul +Ġdispl acement +Ġroy alty +ĠW I +he it +ĠDev ices +alle l +Ġmunicipal ities +Ġcan al +St ars +ĠU AE +Ġ" âĢ¦ +ĠC U +ab ove +Ġreson ance +ĠguiActive Un +add ed +ĠBra ves +ĠI bn +Ġhere by +ĠB RE +Ġshare holder +ĠH ir +ĠJ i +Ġstrange ly +Ġadm ired +Ġpl ight +Ġb achelor +ĠP ole +cipl inary +T ony +ĠArmen ian +Ġun man +ĠZion ist +St age +isco ver +Ġautom otive +Ġs idelines +Ġsl ick +ĠRena issance +ĠF UN +Im ages +ĠH aj +Ġp ing +Ġshort cut +ĠBl vd +ĠLook s +Ġbur sts +Ġcl amp +Ġm ish +Ġsort ing +Ġpatri ot +Ġcorrect ness +ĠScand inav +ĠCaval iers +p ython +az ar +Ġ3 75 +ĠJa une +40 9 +Ġdetrim ental +Ġstab bing +Ġpoison ed +Ġf ountain +oc ent +or st +ĠMar i +Ġr ains +ĠO vers +ĠInst itution +ud get +AM Y +t ale +ĠK R +ĠPr ices +Ġhead aches +Ġlands l +ĠA ura +Bon us +ĠZ hao +ĠH ip +Ġhop s +ĠKurd istan +Ġexplo iting +ry n +Ġhypocr isy +op ening +Ġgun shot +Ġw ed +inter stitial +Inter stitial +Ġam en +Bre aking +Ġmarket ed +W ire +ĠC rowd +Contin ue +ĠK nown +ĠEffect ive +ore an +iz ons +Jose ph +Ġescal ation +us ername +Ġcur tain +AT ES +ĠP AR +ĠM iy +Ġcounter fe +l ene +Ġcont enders +d aily +ĠAs c +ĠPhill ip +most ly +Ġfil ename +he ne +Ġresemb ling +Ġst aging +ĠCh loe +Ġw iring +H on +ĠRen ew +ott age +ĠHy brid +m uch +Ġstro kes +Ġpolicy makers +AP TER +ĠArk ham +pl ot +Ġassist ants +Ġde port +ĠSe ga +Ġinflu enza +ĠC ursed +ĠK obe +Ġskin ny +Prov ider +ĠR ip +Ġincrement al +product s +B F +Ġd ome +ĠC redits +Ġlos ers +int s +ĠBet ty +ĠTal ent +ĠD AM +L v +E ss +Ġd ens +tem p +J udge +od ic +Ġ' ( +UR ES +ets k +V O +Ġretrie ved +Ġarchitect s +Ù ĩ +Ġeth ic +ĠSecond ary +st ocks +ad ia +Ġ3 25 +ĠOp inion +Ġsimultane ous +Ġd izz +ul p +Ġsmugg ling +ipp ery +R andom +f acing +ĠD as +Ġstock p +Ġdiscl osures +po inter +Ġcor al +ĠSe lection +ĠP ike +ival ent +Ġruth less +ĠR im +Ġensu ing +ĠExper iment +Ġcongress man +Ġbelie ver +Ġun specified +ĠM ord +Ġknowledge able +ĠV ERY +T X +Ġstra ps +Ġtur f +apesh ifter +Ġmar ital +Ġfl ock +ãģ Ĩ +26 3 +AM ES +ĠOpp osition +Ġtre asures +ĠG OD +Ġmodel ed +ĠWOR LD +Ġ( [ +ĠUs age +H F +Ġ$ ( +uss ed +Ġpione er +E ight +par se +b read +rit z +ĠMir anda +ĠK ant +++ ) +ore n +Ġprov oked +Ġbre eds +ĠIn cludes +ĠPast ebin +ĠFl ip +J ava +Ġbr ink +Ġrum ored +Ġun seen +Ġgar nered +ĠDef in +al ted +Ġtatt oos +Ġhes itation +is itions +ĠWe aver +ĠReport ing +Ġtherap ies +Ġconsult ants +Ġresid ual +ĠMal i +ĠRom a +i ago +ĠRes idents +ub i +Ġremed ies +Ġadapt ive +ĠAl ive +ĠBar cl +Ġwal lets +c rypt +etermin ation +ĠPel osi +Ġsl ipping +oton in +Ġall iances +pat rick +ir is +Ġor th +ĠPer kins +ĠDe V +ĠG ets +Ġdry ing +ge e +fore st +ĠFor get +ore m +33 9 +Ġvague ly +ĠD ion +ĠP orn +ĠH OW +Ġp neum +Ġrub ble +ĠT aste +enc ia +ĠG el +Ġd st +Ġ24 5 +ĠMoroc co +inf lamm +ĠTw ins +Ġb ots +d aughter +ĠB alk +Ġbre thren +Ġlog os +Ġgo bl +f ps +Ġsub division +Ġp awn +Ġsquee zed +Ġmor ale +ĠD W +' " +Ġkn ot +ook y +Ġdiv isive +Ġboost ed +ch y +ãĥ IJ +if act +Ġnewcom ers +ĠWrest ling +Ġsc outs +w olves +R at +Ġnin eteenth +ĠOs borne +St ats +Ġem powered +Ġpsych opath +ĠO EM +ugg age +ĠP K +ĠMoh ammad +P ak +Ġanarch ists +ĠExt ract +est hes +ĠStock holm +l oo +ĠG raph +Ġdeploy ing +ĠStr anger +ĠM old +Ġstaff er +Ġdiscount ed +uck le +ple ase +ĠLand ing +ÃŃ a +Ġ19 3 +Ġan te +Ġrep etition +Ġ+ /- +Ġpar ody +Ġlive ly +AA A +ĠHor us +Ġp its +ind ers +L OC +ĠVen ice +40 6 +ĠDis cover +â Ĩ +ellect ual +Ġp ens +Ġey el +ig uous +Im pl +Ġj oking +Ġinv al +ĠBel fast +Ġcredit ors +ĠSky walker +ov sky +Ġcease fire +Ġse als +is oft +) ). +ĠFel ix +IT S +Ġt resp +ĠBlock chain +ew are +ĠSch war +en ne +mount ed +ĠBe acon +les h +Ġimmense ly +Ġche ering +Em ploy +sc ene +ish ly +atche wan +ĠNic olas +Ġdr ained +ĠEx it +ĠAz erb +j un +Ġflo ated +u ania +De ep +Ġsuper v +Ġmyst ical +ĠD ollar +ĠApost le +ĠR EL +ĠProv ided +ĠB ucks +ãĥ ´ +cut ting +Ġenhance ments +ĠPengu ins +ĠIsa iah +Ġj erk +ĠW yn +Ġst alled +Ġcryptoc urrencies +ĠR oland +sing le +Ġl umin +ĠF ellow +ĠCap acity +ĠKaz akh +W N +Ġfin anced +38 9 +Ġt id +Ġcoll usion +ĠMy r +î Ģ +Sen ator +Ġped iatric +Ġneat ly +Ġsandwic hes +ĠArchitect ure +Ġt ucked +Ġbalcon y +Ġearthqu akes +qu ire +F uture +Ġhe fty +é Ĺ +Ġspecial izes +Ġstress es +Ġs ender +Ġmisunder standing +Ġep ile +Ġprov oke +ĠCol ors +Ġdis may +uk o +[ _ +58 6 +ne utral +Ġdon ating +ĠRand all +Mult i +Ġconvenient ly +ĠS ung +ĠC oca +Ġt ents +ĠAc celer +Ġpart nered +27 2 +ir ming +ĠB AS +s ometimes +Ġobject ed +ub ric +p osed +LC S +gr ass +Ġattribut able +V IS +Israel i +Ġrepe ats +ĠR M +v ag +ut a +in ous +Ġin ert +ĠMig uel +æ Ń +ĠHawai ian +B oard +Ġart ific +ĠAzerb ai +as io +ĠR ent +A IN +Ġappl iances +Ġnational ity +Ġass hole +ĠN eb +Ġnot ch +h ani +ĠBr ide +Av ailability +Ġintercept ed +Ġcontin ental +Ġsw elling +ĠPers pect +b ies +. < +ith metic +ĠL ara +Ġtempt ing +add r +Ġoversee ing +cl ad +ĠD V +ĠGing rich +Ġm un +ĠApp ropri +Ġalter ations +ĠPat reon +Ġha voc +Ġdiscipl ines +Ġnotor iously +aku ya +ier i +? ). +ĠW ent +Ġsil icon +Ġtre mb +Cont ainer +K nown +Ġmort ar +est e +ick a +Ar thur +ĠPre viously +ĠMart y +Ġsp arse +g ins +Ġin ward +ĠParticip ant +C opy +ĠM isc +Ġantib iotic +ĠRet ro +Ġel usive +Ġass ail +ĠBatt alion +ĠB ought +Ġdimin ish +ĠEuro pa +s ession +ĠDanger ous +ies el +Ġdisbel ief +Ġbl asts +ext reme +ĠBoy d +ĠProject s +ĠGu ys +Ġunder gone +Ġgr ill +ĠDw ight +Ġ19 7 +US ER +Ġfiles ystem +Ġcl ocks +T aylor +Ġwra pper +Ġfold ing +ous and +ĠPhilipp ine +ATION AL +ĠPer th +Ġas hes +Ġaccum ulate +ĠGate way +Sh op +orks hire +H an +ĠBar rel +ĠLe h +ĠX V +Ġwh im +Ġrep o +ĠC G +ĠM am +Ġincorpor ating +Ġbail out +Ġlingu istic +Ġdis integ +C LE +Ġcinem atic +ĠF iber +S yn +il ion +ĠCom pos +c hens +Ġne oc +Ġbo iled +F INE +on o +un cle +ik en +ĠB M +Î ¹ +Ġreceipt s +Ġdisp osed +ĠTh irty +ĠR ough +ĠA BS +Ġnot withstanding +oll en +# $ +Ġunrel iable +Ġbl oom +Ġmedi ocre +Ġtr am +ĠTas man +Ġsh akes +Ġmanifest o +ĠM W +Ġsatisf actory +Ġsh ores +Ġcomput ation +Ġassert ions +orm ons +ar ag +ab it +Dem ocrats +ĠL oot +ĠVol ks +ha ired +Ġgrav itational +S ing +ĠM iz +Ġthro ttle +Ġtyr anny +ĠView s +Ġrob ber +ĠMinor ity +Ġsh rine +sc ope +pur pose +Ġnucle us +our cing +ĠUS DA +ĠD HS +w ra +ĠBow ie +Sc ale +ĠB EL +x i +I ter +Ġ( ), +w right +Ġsail ors +ous ed +NAS A +ĠPro of +ĠMin eral +t oken +ĠF D +R ew +Ġe ll +6 30 +Ġchance llor +ĠG os +Ġamount ed +ĠRec re +ome z +ĠOpt im +ĠOl ive +Ġtrack er +ow ler +ĠUn ique +R oot +Ġmar itime +ĠQur an +ĠAd apt +Ġecosystem s +ĠRe peat +ĠS oy +ĠI MP +Ġgrad uating +and em +P ur +ĠRes et +ĠTr ick +ĠPh illy +ĠT ue +ĠMalays ian +Ġclim ax +Ġb ury +Ġcons pic +ĠSouth ampton +ĠFl owers +Ġesc orted +ĠEduc ational +ĠI RC +Ġbrut ally +e ating +Ġpill ar +ĠS ang +ĠJ ude +ar ling +ĠAm nesty +Ġrem inding +ĠAdminist rative +hes da +Ġfl ashed +ĠP BS +per ate +fe ature +Ġsw ipe +Ġgra ves +oult ry +26 1 +bre aks +ĠGu er +Ġsh rimp +ĠV oting +qu ist +Ġanaly tical +Ġtables poons +ĠS OU +Ġresear ched +Ġdisrupt ed +Ġj our +Ġrepl ica +Ġcart oons +b ians +} ) +c opy +G ot +ou ched +P UT +Ġsw arm +not ations +s aid +Ġreb uilt +Ġcollabor ate +Ġr aging +Ġn ar +Ġdem ographics +ĠD DR +Ġdist rust +oss ier +ĠK ro +Ġpump kin +Ġreg rets +Ġfatal ities +ĠL ens +ĠO le +p d +Ġpupp et +ĠOut look +ĠSt am +O l +F air +U U +Ġre written +Ä ± +Ġfasc inated +Ġve ctors +Ġtrib unal +u ay +ĠM ats +ĠCo ins +[ [ +Ġ18 1 +Ġrend ers +ĠK aepernick +Ġesp ionage +Ġsum m +Ġd itch +Acc ount +Ġspread sheet +Ġmut ant +p ast +40 7 +Ġd ye +Ġinit iation +Ġ4 000 +Ġpunish able +Ġth inner +ĠKh al +Ġinter medi +D un +ĠGoth am +Ġeager ly +Ġvag inal +p owers +V W +ĠWATCH ED +Ġpred ator +ams ung +Ġdispar ity +Ġ[ * +Ġam ph +Ġout skirts +ĠSpir its +Ġskelet al +Ð » +ĠR ear +Ġissu ance +ĠLog ic +re leased +Z Z +ĠB ound +Ent ry +Ġex its +is ol +ĠFound er +Ġw re +ĠGreen land +ĠM MO +t aker +IN C +ãģ ¾ +Ġhour ly +hen ko +Ġfantas ies +Ġdis ob +Ġdemol ition +ãĥ ĭ +Ġen listed +rat ulations +Ġmis guided +Ġens ured +Ġdiscour aged +m ort +Ġfl ank +Ġc ess +Ġreact s +ĠS ere +s ensitive +ĠSer pent +ass ad +Ġ24 7 +Ġcalm ly +b usters +Ġble ed +ĠSt ro +Ġamuse ment +ĠAntar ctica +Ġs cept +ĠG aw +a q +ason ic +Ġsp rawling +n ative +atur ated +ĠBattle field +IV ERS +E B +ĠG ems +ĠNorth western +ĠFil ms +ĠAut omatic +Ġappre hend +ãģ ¨ +Ġgui Name +Ġback end +Ġevid enced +ge ant +01 2 +ĠS iege +Ġexternal To +Ġunfocused Range +ĠguiActiveUn focused +Ġgui Icon +ĠexternalTo EVA +ĠexternalToEVA Only +F ri +ch ard +en aries +Ġchief s +Ġc f +ĠH UD +Ġcorro bor +Ġd B +ĠT aken +ĠPat ricia +ra il +ĠCh arm +ĠLiber tarian +rie ve +Person al +ĠO UR +ger ies +Ġdump ing +Ġneurolog ical +it imate +ĠClint ons +raft ed +ĠM olly +Ġtermin als +reg ister +Ġfl are +Ġenc oded +Ġautop sy +p el +m achine +Ġexempt ions +ĠRoy als +d istance +Ġdraft s +Ġl ame +ĠC unning +Ġsp ouses +ĠMark ets +ĠCar rier +Ġimp lying +ĠY ak +s id +Ġl oser +Ġvigil ant +Ġimpe achment +Ġaug mented +ĠEmploy ees +Ġunint ended +tern ally +ĠW att +Ġrecogn izable +ess im +æ Ŀ +Ġco ated +r ha +Ġlie utenant +ĠLegisl ation +pub lished +44 4 +01 3 +Ġide ally +ĠPass word +Ġsimpl ify +ĠMet a +ĠM RI +Ġple ading +organ ized +hand ler +Ġun ravel +cor rect +Ġ icy +Ġparan oid +Ġpass er +Ġinspect ions +of er +ĠHealth care +28 3 +ĠBr ut +iol a +for ge +ĠMed ieval +MS N +ie vers +ĠProgram ming +å ī +Ġ2 23 +m u +ĠC LE +ug a +Ġsho ppers +Ġinform ative +ĠPl ans +Ġsupplement ation +ĠT ests +ty ard +ocy tes +ĠVeg a +ĠGujar at +erman ent +Ex cept +ĠL OT +all a +ĠC umm +ĠO sw +Ġven om +ĠDeb t +ĠD OWN +Ġreun ion +Ġm uc +ĠRel ief +Ġge op +ĠðŁ ĺ +al ogue +An th +ech o +Ġcor ros +Ġrepl ication +ĠBl azing +ĠD aughter +Ġinf lic +ĠLind sey +Ù Ī +28 4 +Ex it +Ġgl oom +TA IN +Ġundermin ing +Ġadv ising +h idden +Ġover flow +Ġg or +urd ue +Ġe choes +enh agen +Ġimp uls +d rug +c ash +Ġas ync +Ġmir ac +at ts +p unk +Ġpiv ot +ĠLegisl ative +Ġblog gers +ĠCl aw +s burg +d yl +ĠRecomm end +Ġver te +Ġprohib iting +ĠPant her +Jon athan +Ġo min +Ġhate ful +28 1 +ĠOr che +ĠMurd och +down s +Ġas ymm +G ER +Al ways +Ġinform s +ĠW M +ĠP ony +ĠApp endix +ĠAr lington +J am +Ġmedic inal +ĠS lam +IT IES +Ġre aff +ĠR i +F G +S pring +b ool +Ġthigh s +Ġmark ings +ĠRa qqa +ĠL ak +p oll +ts ky +ĠMort y +ĠDef inition +Ġdeb unk +end ered +ĠLe one +a vers +Ġmortg ages +App arently +N ic +ha us +ĠTh ousands +au ld +Ġm ash +sh oot +Ġdi arr +Ġconscious ly +H ero +e as +ĠN aturally +ĠDestroy er +Ġdash board +serv ices +R og +Ġmillenn ials +Ġinv ade +- ( +Ġcomm issions +ĠA uckland +Ġbroadcast s +Ġfront al +Ġcr ank +ĠHist oric +Ġrum ours +CT V +Ġster il +Ġboost er +rock et +ãĤ ¼ +ut sche +ĠP I +Ġ2 33 +ĠProdu cer +ĠAnaly tics +Ġinval uable +Ġunint ention +ĠC Y +Ġscrut in +Ġg igg +Ġeng ulf +Ġprolet ariat +Ġh acks +ĠH ew +ar ak +ĠSl ime +ield ing +ag her +ĠEll iot +Ġtele com +Ġ2 19 +ult an +ĠAr bor +ĠSc outs +B an +Ġlifes pan +Ġbl asp +38 8 +Ġjud iciary +ĠContin ental +ask ing +Mc C +L ED +Ġbag gage +ĠSorce rer +Ġrem nants +ĠGriff ith +ets u +ĠSub aru +ĠPerson ality +des igned +ush ima +agn ar +Ġrec oil +Ġpass ions +\ ": +Ġte e +Ġabol ition +ĠCreat ing +j ac +Ġ19 4 +01 9 +Ġpill ars +ric hed +/ " +t k +Ġlive lihood +Ġro asted +ah on +ĠH utch +ass ert +Ġdivid end +Ġkn it +Ġd aunting +Ġdisturb ance +Ġsh ale +Ġcultiv ated +Ġrefriger ator +L B +ĠN ET +Ġcommercial s +Ġthink ers +45 5 +Ġch op +B road +Ġsuspic ions +Ġtag ged +l ifting +Ġsty lish +ĠShield s +Short ly +Ġt ails +A uth +ST E +ĠG AME +Ġse ism +ĠK is +olog ne +Ġcow ork +Ġforc ibly +Ġthy roid +ĠP B +AN E +mar ried +h orse +Ġpoly mer +ĠCh al +od or +DE BUG +ĠCon text +Ġbl iss +Ġpin point +ĠMat hemat +leg ram +ĠWeek end +Ġlab elled +Ġb art +it les +Ġest rogen +âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ +" ' +Ġvis ibly +Ġouts ider +aid a +Are a +Ġdisse min +Ġdish onest +ĠCl osed +ĠBullet in +ĠRam sey +sw ord +ĠX I +our ced +S ame +34 6 +ĠRe pe +ĠK ou +c ake +em is +C ache +ĠMe aning +ĠEn light +onom y +Ġmanifest ation +sw orth +J ay +Ġch ore +ö r +D ream +Ġsanction ed +Ġcult urally +ĠA ra +N av +Ġthe ological +Ġstr ut +ĠV O +ĠHand book +Ġconstruct ing +Ġ ¶ +ĠBenef its +ĠPsych ological +s ac +å ¸ +p olicy +ĠMat ters +ĠReport ed +ĠBy te +Ġvit ro +ĠM aiden +Ġl am +ĠJenn ings +Ġgar ment +ĠRut gers +ĠStaff ord +ĠWell ington +Ġinter mitt +Ġn pm +Ġord eal +Ġplug ged +o oming +in ished +fram ework +Ġtim ber +Ġc ass +Ġ8 50 +il ess +ĠRed ux +7 68 +St re +Ġsurpass ed +w hel +Ġparalle ls +Ġve il +ĠG I +ĠR EST +Ġread iness +s ort +Ġmod ifying +ĠSl ate +ru ff +Ġmar ble +Ġinf rared +Ġaud itor +ĠFANT ASY +ĠP overty +ĠS PD +Ġ" ( +K y +RA Y +Ġexecut ions +ĠBever ly +ĠMarx ism +ĠBur st +ĠK ali +est ones +Clear ly +E ll +ãģ § +ĠProceed ings +T oken +IF IC +ñ a +Cent ral +ĠH aley +ĠD rama +Ġform ations +OR N +Book s +Ġdom inating +ĠFly ers +ĠCompan ion +Ġdiscipl ined +ĠYug oslav +ĠSpell s +Ġv engeance +Ġland lords +L en +ĠO gre +ano ia +Ġpier cing +Ġcon greg +Ġscore r +ob ia +Ġnic kel +ĠLear ns +Ġre jo +Ġmaster piece +Fl ash +Ġinhab ited +ĠOpen GL +ĠD ud +ĠI CO +Ġar ter +Ġpl ur +Ġmaster y +Ġlong standing +st ed +Ġw ines +Ġtelev ised +ĠSh rine +ĠBay ern +Ġâ ĵĺ +Ġencl osure +j ohn +Ġprophe ts +ĠRes urrection +ĠOrd ers +Ġun even +r als +Ġd wind +ĠL ah +ĠSl oven +37 8 +Ġins istence +aff le +ĠCl one +Ġhard ship +ĠCongress man +Ġple ad +Ġreview ers +Ġc ured +Ġ19 35 +as ley +f ake +ĠTh inking +yd ia +P ART +ĠD ota +o it +Ġwh ipped +Ġb ouncing +ĠHispan ics +com ings +Ġcann abin +ĠCh ambers +ĠZ ack +Option al +Ġco ats +Ġprow ess +ĠNort on +Ġplain ly +Ġfre ight +Ġinhib ition +Ġcl am +Ġ30 3 +ke f +ale igh +L uke +Ġpsych o +ator ium +M ED +Ġtreat ies +Ġind isc +Ġd c +OP S +Ġresil ient +ĠInter state +Ġsl ack +Ġmund ane +Ġestab lishes +35 9 +Ġstr ained +Ġn ond +S us +Ġcast e +ar ate +ie ving +Ġunfair ly +Ġpars er +on ial +urs ive +V ia +ĠOtt o +ĠAuthor ities +stro ke +K R +ĠMer cy +Ġfurn ished +Ġout set +Ġmet ic +19 82 +olith ic +ĠT ent +og ical +ĠA ircraft +Ġh ides +ĠBec ame +Ġeduc ators +re aching +Ġvol atility +Ġtodd ler +ĠNAS CAR +ĠTw elve +ĠHigh lights +Ġgra pe +Ġspl its +Ġpe asant +Ġre neg +ĠMS I +Tem p +st ars +Ġtre k +ĠHy de +b inding +Ġreal ism +Ġox ide +ĠH os +Ġmount s +Ġbit ing +Ġcollaps ing +Ġpost al +Ġmuse ums +Ġdet ached +Ġrespect ing +Ġmonop ol +Ġwork flow +ĠC ake +Tem plate +ĠOrgan isation +Ġpers istence +36 9 +C oming +B rad +Ġredund ant +ĠG TA +Ġb ending +Ġrev oked +Ġoff ending +Ġfram ing +Ġprint f +Comm un +mem bers +Out side +Ġconst rued +Ġc oded +F ORE +Ġch ast +Ch at +Ind ian +ĠY ard +? !" +ĠP orts +ĠX avier +ĠR ET +' ." +ĠBo at +iv ated +ich t +umer able +D s +ĠDun n +Ġcoff in +Ġsecure ly +ĠRapt ors +ĠB es +Install ation +Ġin ception +ĠHealth y +end ants +Ġpsych ologists +ĠShe ikh +c ultural +ĠBlack Berry +sh ift +F red +oc he +Ġc akes +ĠS EO +ĠG ian +ĠAs ians +og ging +e lement +Ġpund its +ĠV augh +ĠG avin +Ġh itter +Ġdrown ed +Ġch alk +ĠZ ika +Ġmeas les +80 2 +âĢ¦ .. +ĠAW S +] " +Ġdist ort +ĠM ast +Ġantib odies +ĠM ash +Mem ory +ĠUg anda +ĠPro b +Ġvom iting +ĠTurn s +Ġoccup ying +Ġev asion +ĠTher apy +Ġprom o +Ġelect r +Ġblue print +ĠD re +pr iced +ĠDep ot +Ġallev iate +ĠSom ali +m arg +n ine +Ġnostalg ia +ĠShe pherd +Ġcaval ry +Ġtor ped +ĠBlood y +x b +Ġs ank +Ġgo alt +report print +embed reportprint +clone embedreportprint +ĠIn itially +ĠF ischer +Ġnot eworthy +c ern +Ġin efficient +raw download +rawdownload cloneembedreportprint +c ation +ĠD ynasty +l ag +D ES +Ġdistinct ly +ĠEston ia +Ġopen ness +Ġg ossip +ru ck +W idth +ĠIb rahim +Ġpet roleum +Ġav atar +ĠH ed +ath a +ĠHog warts +Ġc aves +67 8 +Ġsafegu ard +ĠM og +iss on +ĠDur ham +sl aught +ĠGrad uate +Ġsub conscious +ĠEx cellent +ĠD um +---- - +Ġp iles +ĠW ORK +ĠG arn +ĠF ol +ĠAT M +Ġavoid s +ĠT ul +Ġble ak +EL Y +iv ist +light ly +P ers +ĠD ob +ĠL S +Ġins anity +Î µ +atal ie +En large +Ġtw ists +Ġfault y +Ġpir acy +Ġimp over +Ġrug ged +ĠF ashion +Ġs ands +' ? +sw ick +Ġn atives +Ġhe n +ĠNo ise +ãĥ Ĺ +Ġg reens +Ġfree zer +Ġd ynasty +ĠFather s +ĠNew ark +Ġarchae ological +Ġo t +ob ar +Ġblock ade +Ġall erg +L V +Ġdeb it +ĠR FC +ĠMil ton +ĠPress ure +Ġwill ingly +Ġdisproportion ate +Ġopp ressive +Ġdiamond s +Ġbelong ings +19 70 +Ġbell s +Ġimperial ism +Ġ2 27 +Ġexpl oding +ĠE clipse +Ġ19 19 +Ġr ant +Ġnom inations +34 7 +Ġpeace fully +ric a +ĠF UCK +Ġvib ration +mal ink +Ġro pes +ĠIv anka +ĠBrew ery +ĠBook er +ĠOw ens +go ers +Serv ices +ĠSn ape +Ġ19 1 +39 5 +Ġ2 99 +just ice +Ġb ri +Ġdisc s +Ġprom inently +Ġvul gar +Ġsk ipping +l ves +Ġtsun ami +37 4 +ĠU rug +ĠE id +rec ated +p hen +Ġfault s +ĠStart ed +9 50 +Ġp i +Ġdetect or +Ġbast ard +Ġvalid ated +Space Engineers +OUR CE +Ġ( ~ +Ġuns ur +Ġaff irmed +Ġfasc ism +Ġres olving +ĠCh avez +ĠC yn +Ġdet ract +L ost +Ġrig ged +Ġhom age +ĠBrun o +55 5 +ec a +Ġpress es +Ġhum our +Ġsp acing +Ġ' / +olk ien +C oun +OP ER +T re +S on +ĠCambod ia +ier re +m ong +o zy +Ġliquid ity +ĠSov iets +ĠFernand o +Ġ2 29 +Ġsl ug +ĠCatal an +elect ric +Ġsc enery +ĠH earth +Ġconst rained +Ġgoal ie +ĠGu idelines +ĠAm mo +ĠPear son +Ġtax ed +Ġfet us +Resp onse +ĠAlex is +th ia +G uy +Ġrecon struct +Ġextrem es +Ġconclud ing +ĠP eg +ook s +Ġded uctions +R ose +Ġground breaking +ĠT arg +ãĥ ģ +ĠRe ve +res ource +Ġmo ons +Ġelectrom agnetic +Ġamid st +ĠVik tor +N ESS +B ACK +Ġcomm ute +ĠAna heim +Ġfluct uations +6 40 +Ġnood les +ĠCop enhagen +ĠT ide +ĠGri zz +ĠS EE +Ġpip elines +Ġsc ars +end o +ag us +ĠE TF +/ # +ĠBec ome +44 8 +Ġvis c +ĠRecomm ended +Ġj umper +Ġcogn ition +Ġassass in +Ġwitness ing +ĠSet up +Ġl ac +v im +IS M +p ages +SS L +35 8 +Ġad ject +indust rial +l ore +cher y +Ġgl itter +Ġc alf +Flor ida +Ġspoil ers +Ġsucceed s +Ġch anting +Ġslog ans +ĠTr acy +Vis it +rol ogy +Ġm ornings +Ġline age +Ġs ip +Ġintense ly +Ġflour ish +ĠSle eping +ĠF em +or por +ĠK lan +ĠDar th +h ack +ĠNi elsen +Ġtum ors +Ġprocure ment +ĠY orkshire +Ġra ided +K Y +An na +Ġ// [ +ĠDis order +ĠMust ang +ĠW en +ĠTry ing +s q +Ġdeliver ies +Ġshut ter +Ġcere bral +Ġbip olar +ĠC N +l ass +j et +Ġdeb ating +> : +Ġe agle +gr ades +ĠD ixon +UG C +M AS +ĠDr aco +ĠMach ines +aff er +Ġem an + ² +pr on +ĠG ym +Ġcompar atively +ĠTrib unal +PR O +Ġle x +Ġfert ile +Ġdep ressing +Ġsuperf icial +ess ential +ĠHun ters +g p +Ġprom inence +L iber +ĠAn cest +ote chnology +Ġm ocking +ĠTra ff +ĸ ļ +Med ium +I raq +Ġpsychiat rist +Quant ity +ĠL ect +Ġno isy +5 20 +G Y +Ġsl apped +ĠM TV +Ġpar a +p ull +Mult iple +as her +Ġn our +ĠSe g +Spe ll +v ous +ord ial +Sen ior +ĠGold berg +ĠPl asma +ne ed +Ġmess enger +ere t +Ġteam ed +Ġliter acy +ĠLe ah +ĠD oyle +Ġem itted +U X +Ġev ade +Ġm aze +Ġwrong ly +ĠL ars +Ġstere otype +Ġpled ges +Ġarom a +ĠM ET +Ġac re +ĠO D +Ġf f +Ġbrew eries +ĠH ilton +und le +ĠK ak +ĠThank fully +ĠCan ucks +in ctions +ĠApp ears +Ġco er +Ġundermin ed +ro vers +And re +Ġbl aze +um ers +Ġfam ine +amp hetamine +ulk an +Am ount +Ġdesper ation +wik ipedia +develop ment +ĠCor inth +uss ia +Jack son +L I +N ative +R s +Oh io +ĠKath leen +F ortunately +Ġattend ant +ĠPre ferred +ĠDid n +ĠV s +M is +Ġrespond ent +Ġb oun +st able +Ġp aved +Ġunex pl +ĠChe ney +L M +ĠC ull +bl own +Ġconfront ing +oc ese +serv ing +W i +ĠLith uania +ann i +Ġst alk +h d +Ġv ener +AP H +ynchron ous +UR R +um ably +hist oric +H alf +H ay +Ġresil ience +spe ction +Ġabandon ing +O bs +ĠDeb bie +Ġgrad ient +ĠPl aint +ĠCan al +AR CH +Ġexpans ive +Ġfun g +Ġb ounced +U nd +Ġprec autions +Ġclar ification +Ġd agger +Ġgri ps +Ġ µ +ĠRiver a +ĠUnd ead +is ites +ĠFIR ST +ñ o +aud i +Ġhost ages +Ġcompl iant +Ġal umni +Se ven +Ġcyber security +e ither +Col lect +Ġinvari ably +ĠS oci +Ġlaw maker +Ġa le +ĠPerson ally +N azi +Ġcustom ization +ĠPro c +ĠSask atchewan +eat uring +Ġsp ared +Ġdiscontin ued +Ġcomput ational +ĠMotor ola +Ġsuprem acist +government al +Ġparad ise +ĠDown ing +ĠNik on +Ġcat alyst +ber ra +Tor onto +8 75 +bet a +ĠMac ron +Ġunreal istic +ve ctor +ĠVeh icles +it iveness +ĠR V +ĠCol bert +s in +o ji +ent in +ĠKr ish +hell o +ff ield +ok y +ĠT ate +Ġmap le +Ġa ids +chem ical +33 4 +n uts +ĠWar p +Ġx x +ĠRob b +umer ous +_- _ +ft ime +ĠV W +Ġw inger +ĠD ome +t ools +ĠP V +ĠGe orgetown +Ġg eared +Ġjihad ists +Ġc p +Ġster oids +M other +cler osis +ĠDR M +nes ia +Ġl inger +Ġimm ersive +ĠC OUN +Ġoutwe igh +ens ual +B and +Ġtransform s +mat ched +ps ons +ĠJud icial +f actor +Ġrefer ral +Ġodd ly +ĠW enger +B ring +ĠB ows +60 2 +IC LE +Ġl ions +ĠAcad emic +ĠTh orn +ĠRa ider +kef eller +St orage +L ower +ĠOr t +ĠEqu ality +AL T +ĠS OC +T ypes +Ġl yn +ĠAss et +co at +TP P +C VE +ĠPione er +app lication +Mod ern +ĠH K +En vironment +Al right +R ain +IP P +ĠShi ite +Ġm ound +ĠAb ilities +cond ition +St aff +Ġcompet ence +ĠM oor +ĠDi ablo +Ġwith held +Ġost ensibly +ĠB rom +Ġms g +Ġden omin +ĠRef erences +ĠF P +Ġplun ged +Ġp amph +m oving +cent ral +Ġdown right +Ġf ading +T al +T yp +ĠTh y +uk es +it he +Ġo ve +Ġbatt led +Ġseaf ood +Ġfig ur +ĠR D +c rop +Ġsqu ads +{ \ +à ¹ +ĠE h +Ġinterview ing +ĠQ in +Ġas piring +PL IC +Ġcla uses +ĠG ast +ĠN ir +Ġl uggage +Ġh ose +Ġsystem d +Ġdesc ending +ĠRev ised +ĠR ails +al ign +70 9 +33 7 +Ġf ug +charg ing +t ags +Ġut er +k ish +WAR NING +49 0 +prof its +Ġvoy age +Ġa ce +ĠV anguard +ĠT anks +ĠM uk +Ġ2 26 +S afe +Ar mor +Ġvolcan ic +Ġwom b +ĠM IL +Ġbegin ner +ĠRec ogn +ĠA AP +PL AY +) ! +Ġdetect ing +c n +Ġbre aches +Bas ically +ĠP ag +ĠMunicip al +ĠInd ie +ĠL af +ĠDis able +ĠOl son +Ġrest rained +Ġrul ings +Ġhum ane +ev ents +ĠCinem a +display Text +ĠH atch +action Date +onna issance +Ġassault ing +ĠL ug +CH AT +Ġvig orous +ĠPer se +Ġintoler ance +ĠSnap chat +ĠSh arks +Ġd ummy +ĠDi agn +ĠGu itar +im eters +40 3 +RE G +A x +Ġsepar ates +ĠMah m +Ġt v +j ah +O OL +C irc +ĠWinds or +uss ian +Ġintu ition +Ġdis dain +ĠDon ovan +Ġ2 21 +E mb +Ġcondem ning +Ġgener osity +zz y +Ġpant ies +ĠPre vent +Action Code +AN A +34 2 +external ActionCode +Ġspec ifying +Ġcryst all +J ere +Ġru pt +ĠApp rentice +Ġprof iling +Ð º +St rike +Ġsid eline +Ġoblig ated +Ġocc ult +Ġbureaucr atic +ant ically +rupt ed +neg ative +ĠEthiop ia +ĠC ivic +Ġins iders +el igible +ĠTV s +ĠB AR +ĠT I +i ologist +ĠA IR +Ġsubstit uted +Ar ab +ĠS aul +ĠY og +p rem +Ġbuild ers +Ġstation ary +Ġdoubt ful +Ġvig orously +Ġthr illing +Ph ysical +ĠCare y +ĠHyd ra +geon ing +ĠS ly +y ton +Ġborrow ers +ĠPark inson +Ġ ë +ĠJama ica +Ġsat ir +Ġinsurg ents +ĠF irm +Ġis ot +ĠK arn +our ning +ak ens +doc s +l ittle +ĠMon aco +CL ASS +Tur key +L y +ĠCon an +ass ic +Ġstar red +ĠPac ers +et ies +Ġt ipping +M oon +ĠR w +s ame +Ġcav ity +Ġgo of +ĠZ o +Sh ock +um mer +Ġemphas izes +Ġreg rett +Ġnovel ty +Ġen vy +ĠPass ive +r w +50 5 +Ġind ifferent +ĠR ica +ĠHim self +ĠFred die +Ġad ip +ä¸ Ģ +Ġbreak out +Ġhur ried +ĠHu ang +ĠD isk +Ġro aming +?????- ?????- +U V +ĠRick y +ĠS igma +Ġmarginal ized +Ġed its +Ġ30 4 +mem ory +Ġspec imen +29 3 +ãģ ¯ +Ġvert ically +Ġaud ition +ĠHe ck +Ġc aster +ĠHold ings +ad al +ĠC ron +ĠL iam +Ġdef lect +P ick +ĠDeb ug +RE F +Ġvers atility +ot hes +class ified +ĠMah ar +ĠH ort +C ounter +st asy +not iced +33 1 +ĠSh im +f uck +ĠB ie +Ġair ing +ĠPro tein +ĠHold ing +Ġspect ators +ili ated +ĠThat cher +n osis +ãĥ¼ ãĥ³ +Te le +B oston +ĠTem pl +st ay +Ġdecl arations +47 9 +Vol ume +ĠDesign er +ĠOver watch +id ae +Ġon wards +Ġn ets +ĠMan ila +part icularly +Ġpolit ic +o other +Ġport raits +Ġpave ment +c ffff +Ġs aints +Ġbegin ners +ES PN +Ġshort comings +âķIJ âķIJ +Ġcom et +ĠOrgan ic +qu el +Ġhospital ized +Bre ak +Ġpe el +dyl ib +asp x +ur ances +ĠT IM +P g +Ġread able +ĠMal ik +Ġm uzzle +Ġbench marks +d al +ĠV acc +ĠH icks +60 9 +ĠB iblical +he ng +Ġover load +ĠCivil ization +Ġimm oral +Ġf ries +ãĤ Ĵ +Ġreprodu ced +Ġform ulation +j ug +ire z +g ear +Ġco ached +Mp Server +ĠS J +ĠK w +In it +d eal +ĠO ro +ĠL oki +ĠSong s +Ġ23 2 +ĠLou ise +asion ally +Ġunc ond +olly wood +Ġprogress ives +ĠEn ough +ĠDo e +Ġwreck age +Ġbr ushed +ĠBase Type +Ġz oning +ish able +het ically +ĠC aucus +ĠH ue +Ġk arma +ĠSport ing +Ġtrad er +Ġseem ing +ĠCapt ure +4 30 +b ish +Ġt unes +Ġindo ors +ĠSp here +ĠD ancing +TER N +Ġno b +ĠG ST +m aps +Ġpe ppers +F it +Ġoverse es +ĠRabb i +ĠR uler +vert ising +off ice +xx x +Ġra ft +Ch anged +Ġtext books +L inks +ĠO mn +ãĢ ij +Ġinconven ience +ĠDon etsk += ~ +Ġimplicit ly +Ġboost s +ĠB ones +ĠBo om +Cour tesy +Ġsens ational +AN Y +Ġgre edy +ed en +Ġinex per +ĠL er +ĠV ale +Ġtight en +ĠE AR +ĠN um +Ġancest or +S ent +ĠH orde +urg ical +all ah +Ġsa p +amb a +ĠSp read +tw itch +Ġgrand son +Ġfract ure +Ġmoder ator +ĠSe venth +ĠRe verse +Ġestim ation +Cho ose +Ġpar ach +Ġbar ric +ãĢ IJ +Ġcomp ass +Ġall ergic +âĢ ķ +OT HER +err illa +Ġw agon +Ġz inc +Ġrub bed +ĠFull er +ĠLuxem bourg +ĠHoo ver +Ġli ar +ĠEven ing +ĠCob b +est eem +Ġselect or +ĠB rawl +is ance +ĠE k +Ġtro op +Ġg uts +ĠApp eal +ĠTibet an +Ġrout ines +ĠM ent +Ġsummar ized +steam apps +Ġtr anqu +Ġ19 29 +or an +ĠAut hent +Ġg maxwell +Ġappre hens +Ġpo ems +Ġsa usage +ĠWeb ster +ur us +Ġthem ed +Ġl ounge +Ġcharg er +Sp oiler +Ġsp illed +h og +ĠSu nder +ĠA in +ĠAng ry +Ġdis qual +ĠFrequ ency +ĠEther net +Ġhel per +Per cent +Ġhorr ifying +Ġa il +ĠAll an +EE E +ĠCross ing +44 9 +Ġh olog +ĠPuzz les +ĠGo es +eren n +60 4 +ãģ ı +ĠRaf ael +Ġatt en +ĠE manuel +Ġup ro +ĠSus p +P sych +ĠTr ainer +ĠN ES +ĠHun ts +bec ue +Ġcounsel or +R ule +Ġtox ins +Ġb anners +r ifice +Ġgreet ing +Ġfren zy +Ġall ocate +Ġ* ) +ex pr +50 3 +ĠCh ick +ĠT orn +Ġconsolid ation +ĠF letcher +sw itch +fr ac +cl ips +ĠMcK in +ĠLun ar +Mon th +IT CH +Ġscholar ly +rap ed +39 8 +Ġ19 10 +Ġe greg +Ġin secure +Ġvict orious +cffff cc +Ġsing led +Ġel ves +ĠW ond +bur st +Ġcam oufl +ĠBL ACK +Ġcondition ed +ç ī +ans wered +Ġcompuls ory +asc ist +Ġpodcast s +ĠFrank furt +bn b +Ġne oliberal +ĠKey board +ĠBel le +w arm +Ġtrust s +Ġins ured +ĠBu cc +us able +60 7 +ĠPl ains +Ġ18 90 +Ġsabot age +Ġlod ged +f elt +Ġg a +ĠN arc +ĠSal em +Ġsevent y +ĠBl ank +p ocket +Ġwhis per +Ġm ating +om ics +ĠSal man +ĠK ad +Ġan gered +Ġcoll isions +Ġextraord inarily +Ġcoerc ion +G host +b irds +è Ģ +k ok +Ġper missible +avor able +Ġpo inters +Ġdiss ip +ac i +Ġtheat rical +ĠCos mic +Ġforget ting +Ġfinal ized +å¤ § +y out +l ibrary +Ġbo oming +ĠBel ieve +ĠTe acher +ĠL iv +ĠGOOD MAN +ĠDomin ican +OR ED +ĠPart ies +Ġprecip itation +ĠSl ot +R oy +ĠComb ined +Ġinteg rating +Ġch rome +Ġintest inal +ĠRe bell +Ġmatch ups +Ġblock buster +ĠLore n +ĠLe vy +Ġpre aching +ĠS ending +ĠPur pose +ra x +f if +Ġauthor itative +ĠP ET +ast ical +Ġdish on +Ġchat ting +Ġ"$ :/ +Connect ion +Ġrecre ate +Ġdel inqu +Ġbro th +ĠD irty +ĠAd min +z man +Ġscholars hips +Ġ25 3 +cont act +als a +7 67 +c reen +abb age +Ġ19 15 +Ġbl ended +Ġal armed +L anguage +35 6 +Ġbl ends +ĠCh anged +W olf +Ġhe pat +Creat ing +Ġper secut +Ġsweet ness +art e +Ġforfe iture +ĠRober to +im pro +N FL +ĠMag net +Det ailed +Ġinsign ificant +ĠPOL IT +ĠBB Q +ĠC PS +Ġse aw +amin er +m L +end if +f inals +Ġ26 5 +u ish +Ġ} ) +ĠPro blems +Ġem blem +Ġserious ness +Ġpars ing +Ġsubst itution +Ġpress ured +Ġrecy cled +ale b +Rub y +Ġprof iciency +Dri ver +ĠW ester +: ' +AF TA +Ġm antle +ĠClay ton +fl ag +Ġpractition er +c overed +ĠSt ruct +add afi +4 25 +ĠTown ship +ĠHyd ro +Lou is +34 3 +Ġcond o +ĠT ao +Ġutil ization +Ġnause a +ĠDem s +rid ges +p ause +Ġform ulas +Ġchall enger +37 6 +Ġdefect ive +ĠRail way +ĠPub Med +Ġyog urt +l bs +ĠNor folk +OP E +ĠMood y +Ġdistribut or +Ġscroll s +Ġextract s +St an +Ġv iability +Ġexp oses +Ġstar vation +ĠStep s +ĠD odd +f ew +ST D +33 2 +Ġclos ures +Ġcomplement ary +ĠS asha +ump y +Ġmon et +Ġartic ulate +ĠDo ct +k iller +Ġsc rim +Ġ2 64 +Ġprost itutes +Ġse vered +Ġattach ments +Ġcool ed +L ev +ĠF alk +f ail +Ġpolic eman +ĠD ag +Ġpray ed +ĠK ernel +Ġcl ut +Ġc ath +Ġan omaly +St orm +em aker +ĠBreak fast +ul i +o ire +J J +h z +Oper ation +ĠS ick +35 4 +ĠGuatem ala +R ate +Ġexp osures +f aces +ĠArch ae +ra f +ĠM ia +Ġ20 25 +Ġop aque +Ġdisgu ised +ĠHead quarters +S ah +Ġp ots +9 78 +ĠM alf +Ġfrown ed +Ġpoison ous +ĠCon vers +ee ks +Ġcr ab +." " +Ġtre ason +Ġr anc +Ġescal ating +Ġwar r +Ġmob s +Ġl amps +ĠSun shine +ĠBrun swick +Ph ones +Ġspe lled +ĠSk ip +Ġ20 50 +Ġ19 11 +ĠPl uto +ĠAm end +Ġme ats +38 7 +Ġst omp +ĠZh ou +ĠLevi athan +ĠHaz ard +ad v +ĠOr well +Ġal oud +Ġb umper +ĠAn arch +ub untu +ĠSer ious +f itting +ĠOption al +ĠCec il +RE AM +Ġser otonin +Ġcultiv ate +ag ogue +} \ +Ġmos ques +ĠSun ny +Ġre active +rev olution +ĠL up +ĠFed ora +Ġdefense man +ĠV ID +ist ine +Ġdrown ing +ĠBroad casting +Ġthr iller +ĠS cy +Ġacceler ating +Ġdirect s +od ied +b ike +d uration +Ġpain fully +R edd +Ġproduct ions +Ġg ag +Ġwh ist +Ġs ock +Ġinf initely +ĠConc ern +ĠCit adel +Ġlie u +Ġcand les +ogene ous +arg er +Ġheaven ly +inflamm atory +Per formance +C s +ruct ose +az aki +Ġp essim +Ġinf erence +Ġpow d +ĠZ oe +Ġpain ts +Ġd azz +pt a +-------- --- +Ġins pir +ĠExper imental +ĠKn ife +reg or +b ors +Ġshow ers +rom eda +Ġs aint +Ġben ign +ĠJ iang +Ġenvision ed +Ġsh roud +IF T +H O +Ġsh uff +ĠI CC +Ġse greg +Ġrevis it +ighth ouse +L i +Ġsub strate +ĠSe as +ĠRew ard +ĠH ep +ĠBr ass +s bm +Ġelim inates +Ġst amina +ĠV AT +ĠLo an +Ġconst raint +Ġappropri ated +Ġp es +ĠA LE +r anging +Ġ40 4 +39 2 +Ġintellectual s +ach u +Ġrestruct uring +ĠLe vin +Ġrun es +Ġdelight ful +Ġcarbohyd rates +ĠMod els +ĠExp o +Ġtransport ing +all oc +Ġring ing +S amsung +Ġscarce ly +ĠURL s +ĠM AS +Ġprot otypes +Ġnarr ator +ĠCPU s +cd n +ĠBart on +Ġdecided ly +ĠSh u +ix ir +oc ious +ĠMy st +N intendo +Ġre use +Ġforg iven +F ew +in ical +n at +Ġseam less +ĠEv a +ĠE VE +ĠJ O +land ers +Ġso fter +neg ie +Ġtrans ient +Ġorb ital +Ġfulf il +ĠK om +Hop efully +Ġdynam ically +ĠHun ger +å Ľ +ĠArmen ia +el man +ber to +Ġp ige +ĠID s +lim it +Ġve ins +Ġso aring +p acks +Gold en +ĠCr ab +ist or +ĠR PM +Ġ$ $ +g ression +Ġjihad ist +Ġgam ble +Ġcare g +Ġinf lated +F ace +ĠFire arms +ĠEm manuel +â Ŀ +Ġsh ocks +gr ab +Ġspl end +ĠHP V +ab ortion +Ab ove +Ent ity +play ers +Ġcomm enced +ul ence +Ġfulfill ment +Ġembod iments +ĠW elfare +Ġha il +Ġ< @ +tt en +Ġcat cher +ĠJ azeera +Ġvolcan o +Ġstabil ize +ĠHand ler +Ġintens ified +ĠAb rams +Ġhum iliation +p aced +60 5 +ĠCent OS +Spe cific +Ġhe ed +ĠC AM +ĠGal ile +D ie +Ġabol ished +ĠThom son +ĠTe achers +ĠW ass +j ong +ĠIS BN +ĠAll ies +sh ake +å · +v ict +How ard +Ġde em +Ġexceed ingly +ĠSmart stocks +ib e +Ġdoor way +Ġcompet ed +ig mat +Ġnational ists +Ġg room +ĠKe en +Ġdispos able +de cl +ĠT olkien +ĠSche me +Ġb iod +Ġav id +ĠEl on +ag ar +ĠT SA +R oman +Ġartific ially +Ġadvis ors +X L +ĠInf erno +36 6 +Ġted ious +ĠPhot ography +ĠCar rie +Ġtro pe +ĠSand ra +Ġdec imal +Que en +ĠGund am +ĠO M +ote ch +N BA +Ġ19 32 +Ġent renched +ĠMar ion +Ġfr aternity +Lab our +Hen ry +Ġlat itude +E ither +Ġenh ances +ĠPot ential +Ġsh ines +id ad +Ġbread th +Ġcapac ities +ĠðŁ ĻĤ +ĠBron x +Ġsex es +Ġdifferent iation +Ġheavy weight +ĠT aj +d ra +Ġmigr ate +Ġexhaust ion +ĠR UN +els ius +ĠCu omo +Ġgu itars +Ġcl ones +ĠSom ew +ĠP ry +------------ - +Ġwarr anted +cy cles +Ġsalv age +Ġdis ks +R ANT +ĠNGO s +ĠMart ian +":[ {" +Ġadd icts +oj ure +il let +Ġamazing ly +art ments +p ixel +ĠGPU s +Lay out +è £ +ĠTam il +ĠBas il +Ġimpart ial +ĠSt ructure +f ork +b ryce +Ġr idge +ĠHamb urg +ri ous +Ġbl itz +cig arettes +Ġcan ned +40 2 +Ġiron ically +Ġcompassion ate +ĠHaw kins +. # +ĠCat hedral +Ġrall ied +in ternal +Ġqu ota +st akes +T EXT +m om +Ġcomple tes +Ġ23 8 +Ġsh rug +ãĥ ij +ĠN inth +Ġrev ise +ĠProv ider +Ġtre acher +Ġqu asi +ĠPR ES +Ġdep osition +Ġconfidential ity +iss ors +Ġim balance +Ġspan ning +Ġang ular +ĠC ul +commun ication +ĠNor a +ĠGen ius +op ter +Ġs acked +Sp ot +Ġfine ly +ĠCH R +28 2 +w aves +Pal est +ĠRo hing +N L +è ¿ +Ġsh itty +ĠSc alia +4 75 +Pro gress +Ġreferen cing +Ġclass rooms +ab ee +Ġs od +hes ion +70 8 +ĠZucker berg +ĠFin ish +ĠScot ia +ĠSav ior +ĠInstall ation +an tha +( - +Ġ30 2 +ĠP unk +Ġcr ater +yout u +Ġro ast +Ġinflu encing +Ġd up +ĠJ R +ĠG rav +Ġstat ure +Ġbath rooms +A side +W iki +me an +ĠZ ak +ĠOn es +ĠN ath +Ġhyper t +Ġcommence ment +C ivil +Ġmoder ately +Ġdistribut ors +Ġbreast feeding +Ġ9 80 +ĠS ik +ĠC ig +ĠAM ER +R IP +ĠCare er +ust ing +Ġmess ed +Ġe h +ĠJ ensen +/ $ +Ġblack mail +Ġconvers ions +Ġscientific ally +Ġmant ra +p aying +Ġiv ory +ĠCour ts +OU GH +aunt let +Ser ial +B row +ĠH undreds +3 23 +Ġpe e +Ġlin ux +Ġsub mer +ĠPrinc ipal +48 5 +ĠD SL +ĠCous ins +Ġdoctr ines +ĠAthlet ics +Ġ3 15 +ĠK arma +Ġatt ent +ur ger +Ġpresc ribe +Ġenc aps +ĠC ame +Ġsecret ive +ĠCr imes +d n +C lean +ĠEgypt ians +ĠCar penter +Ġ ll +H um +ĠMil o +Ġcapital ists +Ġbrief ed +T we +ĠBas in +elve t +M os +Ġplun ge +ĠKa iser +ĠFu j +ill in +Ġsafegu ards +Ġo ste +ĠOpportun ity +ĠM afia +ĠCall ing +ap a +ur ban +br ush +ill ard +c é +int elligence +ĠL ob +ĠDru id +Ġsm oother +Ġfoot ing +Ġmotor ists +arc ity +Ġmascul inity +Ġm ism +Ġabdom inal +ĠTa vern +ĠR oh +Ġesc apes +s igned +Anth ony +Ġsacrific ing +Ġintim acy +Ġan terior +ĠK od +Ġmot if +Ġg raz +Ġvisual ization +Ġguitar ist +ĠTro tsky +m agic +D ar +ĠMor i +Ġw ards +Ġtoile ts +l est +Ġtele port +ĠSund ays +ĠPl at +ET S +Ġe Sports +Pat rick +ĠK atherine +en ko +Ġhas sle +ĠM ick +gg les +Ġh ob +aint ain +Ġair borne +Ġsp ans +Ġch ili +Ġa perture +Ġvolunte ered +ĠInc ident +ĠF res +ĠVeter an +augh tered +ing o +Ġun insured +CL OSE +Ġf use +Ġer otic +Ġadvert ise +ra ising +Text ure +Ġatt ends +ĠRE AL +udd led +Ġsm oot +Ġ30 5 +ĠWill is +Ġbl ond +An alysis +ĠV T +on ica +Ġstrongh old +R F +N M +. >> +Ġprosper ous +Ġbo asted +29 2 +ĠManufact uring +PR ESS +g ren +Ġpharm acy +ĠRoc kefeller +k ai +Ġth umbs +ĠH ut +Ġmother board +Ġguard ians +ĠAl ter +ll ular +Ġsh ack +Ġwise ly +Ġback bone +erv a +Ġsu icides +ĠMcG regor +ij ah +E mer +ĠB rav +Ġdesign ate +P OST +produ ced +Ġcleans ing +irl wind +ex istent +ĠHum ph +ĠPay ne +Ġv ested +Å ¡ +Ġstring ent +ion a +Ġuns ub +Ġsum med +ĠHer cules +sub ject +ĠR agnar +ĠN os +Ġcharacter ization +Ġsav vy +ĠDaw son +ĠCas ino +Ġf ri +ĠBar rier +Ġmis information +Ġins ulation +Ġcorrid ors +Ġair planes +ĠNo ct +ah i +Ġ19 16 +k b +arm ac +Ġsh un +Ġsche ma +Ġhorr ified +Ġ23 9 +aund ers +N B +i ates +er ity +ĠSh ard +Ġr arity +Ġgroup ed +ĠGh ana +again st +ĠBi ological +ĠA ware +ow ell +Ï Ħ +ĠBe au +sh aw +H ack +ĠJul ius +US S +ol son +aun a +c ru +ĠMaur ice +ĠI k +Ġsequ encing +Ġradical s +Ġ( ?, +v irtual +Ġany ways +Ġreper c +Ġhand lers +Ġhes itant +é ĥ +ĠM F +ple mentation +ass ociated +Ġcampaign ed +ĠY ue +ut ations +ĠY oga +Ġsim mer +Ġro ds +Ġmel ody +Ġconv oy +v ideos +Ġscreen ed +N eg +ochem ical +Ġ( )) +Ġultr as +Ġant ip +ĠIsland ers +70 4 +Ġfet ish +Ġridic ulously +ĠK art +Ġmitochond rial +Ġinterf ering +Build er +Ġover fl +Ġac ne +ĠM ud +ĠK err +f lex +ĠPost al +ĠBalt ic +47 7 +ĠPers ons +our age +H B +ĠM use +ĠImm ortal +ĠDri ving +Ġpet itions +Ġsubsc ript +Ġs orce +ĠProcess or +ut on +S ony +Ġph on +Ġr aced +ĠAnth rop +Ġday time +ĠEx ercise +Add ing +Ġeng ages +ĠQual comm +Ġmir acles +Ġmem es +ĠDr ink +ĠOri oles +Ġhair s +ĠPol ar +ath om +Ġsl ippery +ĠR emy +Ġcar amel +ĠY EAR +Ġal k +I gn +a ution +ĠMer lin +ĠC ran +Ġap ologies +Ġ4 10 +Ġout ing +ĠMem ories +app ointed +Ġcount ered +u ld +pos ing +Ġfire wall +ĠW ast +ĠW et +work ed +se ller +Ġrepe aled +ere o +ass uming +BL IC +m ite +ĠCEO s +ĠChap el +ellig ent +________________ ________ +D og +Ġw art +Ġsubsc riber +s ports +Ġbe gged +ĠM V +Ġsem if +eth ical +Ġpre ach +Ġrev ital +Ġpun itive +Ġshort cuts +Ġinstit uted +ĠWars aw +Ġabdom en +ĠK ING +Ġsuper intendent +Ġf ry +ĠGe o +T OR +Ġcontrad ictions +apt ic +Ġlandsc apes +b ugs +Ġcl ust +Ġvol ley +c ribed +Ġt andem +Ġrob es +WH AT +Ġpromot er +Ġel oqu +review ed +ĠD K +ĠPl ato +Ġf ps +T ank +ĠDer rick +Ġpriorit ize +as per +ĠHond uras +ĠCom pleted +ne c +Ġm og +n ir +ĠMay o +DE F +st all +in ness +ĠVolks wagen +Ġprec aution +ĠM ell +i ak +ist ries +Ġ24 8 +Ġoverl apping +Sen ate +ĠEnh ance +res y +rac ial +OR TS +ĠM ormons +Str ong +ĠCo ch +Mex ico +ĠMad uro +Ġj ars +Ġcan e +W ik +oll a +iff erence +Ġphysic ist +ĠMag gie +Ġ28 5 +Ġdep iction +ĠMcL aren +J u +Ġsl ows +Ġcommission ers +ĠWill ow +ĠExpl os +hov ah +Ġtechn ician +Ġhom icides +ĠFl av +ĠTr uman +Ġ100 00 +u ctor +Ġsh ader +News letter +45 7 +Ġre ver +Ġhard ened +Ġwhere abouts +Ġrede velop +Ġcar bs +Ġtra vers +Ġsqu irrel +Ġfoll ower +Ġs ings +50 8 +Ġrabb its +emon ium +Ġdocument ing +Ġmisunder stood +) ' +R ick +gg ies +Ġprem ie +Ġsk ating +Ġpass ports +Ġf ists +aged don +H aw +AC P +0 80 +ĠThough ts +ĠCarl son +Ġpriest hood +h ua +Ġdun geons +ĠLo ans +Ġant is +Ġfamiliar ity +ĠS abb +op al +ĠIn k +st rike +Ġc ram +Ġlegal ized +Ġcu isine +Ġfib re +Tra vel +ĠMon ument +OD Y +eth y +Ġinter state +ĠP UR +em porary +ĠArab ian +develop ed +Ġsadd le +Ġg ithub +ĠOff er +ĠIS P +ro let +ĠSUP ER +ĠDen is +Ġmultipl ier +Ġstir red +Interest ingly +Ġcustom ary +Ġbill ed +he x +Ġmultipl ied +Ġfl ipping +ĠCros by +Ġfundament als +ia e +ĠPlay ed +ĠAt om +am azon +ĠFl am +ee z +activ ated +Ġtables poon +Ġliberal ism +ĠPal in +ĠP atel +N um +ĠT AM +Ġs urn +ĠRel oaded +Ġco ined +" ], +ĠCl ash +ĠAg u +Ġprag matic +ĠActiv ate +Ġ8 02 +Ġtrail ers +Ġsil hou +Ġprob es +Ġcirc us +ĠB ain +ĠLind say +ĠAb bey +Del ivery +Ġconcess ion +Ġgast ro +ĠSpr ite +Ä Ł +and el +Ġg imm +Ġaut obi +ĠT urtle +Ġwonder fully +ĠHar am +ĠWorld wide +ĠHand le +Ġtheor ists +Ġsle ek +ĠZh u +ograph ically +EG A +ĠOwn ers +ath s +ĠAntar ctic +n atal +=" " +fl ags +`` `` +Ġs ul +K h +Ġpot assium +Ġlinem an +Ġcere al +ĠSe asons +Ġ20 22 +Ġmat hematic +Ġastron omers +prof essional +Ġf ares +cknow led +Ġch i +Ġyoung sters +Ġmistaken ly +Ġhem isphere +ĠDiv inity +r one +Ġ" , +r ings +Ġattract s +v ana +å ¹ +C AP +Ġplay list +Ġpor ch +ãģ £ +Ġincorpor ates +Ġso ak +Ġassert ing +ĠTerror ism +ĠP ablo +J a +ces ter +Ġfear ing +ĠPr ayer +Ġescal ated +G W +Ġro be +ĠBright on +ac ists +ĠSym phony +ĠDwar f +ĠPar ade +ĠLe go +Ġinex pl +Ġl ords +le af +RA G +l iber +Ġcig ars +ĠJe hovah +60 6 +WIND OWS +ĠLiber ia +eb us +He avy +Ġl ubric +ĠR W +angu ages +Ġnarrow ed +com puter +ĠE mber +Ġmurder ing +Ġdown stream +ĠT uls +ĠT ables +Top ic +ĠAcc uracy += / +l ost +ĠRe i +Ġprogress es +b ear +Ġestablish ments +Just in +ĠPe ach +ĠG omez +å ¿ +ĠTri angle +Id ent +ĠH ive +Res ources +Ġmix es +ĠAss uming +M u +Ġhyp oc +Ġs ane +ĠW an +id ious +Su ccess +Ġ io +Ang el +Ġdanger ously +ĠCreat ure +W ORK +: [ +ĠKat rina +List ener +M iller +ĠId lib +h ang +Ġcircum vent +h ref +Ġcel estial +ĠWe eks +ĠP ug +ĠDal ton +Ġsubpoen a +uk u +Ġpers isted +pe i +old ing +ĠDoc uments +ĠH ast +ĠC ENT +Ġprim er +Ġsyn onymous +Ġn ib +om bs +Ġnot ation +ĠD ish +ĠAt mosp +Ġforb id +ĠAN G +pat tern +l os +Ġproject iles +b rown +." , +ĠVen om +Ġfierce ly +ub lished +ĠU ran +ĠNic arag +4 10 +ĠC AL +OT OS +ĠMir acle +ĠEn chant +Ġguard ing +app end +Att ach +Ġlevel ed +Ġcond oms +ih ilation +64 9 +Ġnight mares +ĠTHE Y +ĠST ART +ĠK inn +Ġroomm ate +Ġhy giene +o pping +J ob +Ġl vl +ĠV ER +ĠKe eping +ab etic +Ġformat ting +eral a +Ġrev isions +Ġres urg +T el +ĠGood man +35 3 +p od +Ġind isp +ĠTrans lation +Ġg own +ĠM und +Ġc is +Ġby stand +col lect +ĠPun jab +act ively +ĠG amb +te ll +Ġimport ing +g encies +Ġloc om +ĠBr ill +H oly +ĠBer ger +Ġshow down +Ġrespond ers +IL Y +Ġt akedown +le ted +Ġmat tered +Ġpredict ive +Ġover lay +G PU +ĠV ick +Ġconvey ed +T ab +pe er +Sc an +Ġdefensive ly +v ae +Ġappro ving +Ġt iers +ĠV ia +quer ade +ĠSaud is +Ġdemol ished +ĠProp he +Ġmon o +Ġhospital ity +H AM +ĠAri el +M OD +ĠTor ah +Ġbl ah +ĠBel arus +erent ial +ĠT uc +Ġbank er +39 7 +Ġmosqu it +ĠScient ist +ĠMus ical +Ġh ust +Sh ift +Ġtor ment +Ġstand off +E duc +ĠF og +Ġampl ifier +Sh ape +Inst ance +ĠCrit ics +Ġda emon +H ouston +Ġmatt ress +ĠID F +Ġobsc ene +ĠA mer +hett i +Ġcomp iling +35 2 +vere tt +ĠRed uction +ist ration +ĠBl essed +ĠB achelor +3 16 +Ġpr ank +ĠVul can +dd ing +Ġm ourning +ĠQu int +ĠBl aster +test ing +Ġsed iment +>> > +ĠE ternity +ĠWH ERE +ĠM aze +Ġreact ing +ĠAl v +oms day +ĠC RA +Ġtransl ator +Ġbog us +at u +We bsite +oll s +Ġbapt ism +Ġs ibling +ĠAut umn +ve z +ãģ® é +gu ards +Ge org +assad ors +ĠFre ud +Ġcontin ents +ĠReg istry +Bern ie +ĸļ 士 +Ġtoler ant +ĠU W +Ġhor ribly +99 5 +ĠMID I +Ġimpat ient +oc ado +er i +ĠWor st +ĠNor ris +ĠTalk ing +Ġdef ends +ens able +Ġ20 21 +Ġanat omy +L ew +Ġdraw er +ĠCan berra +Ġpatri otic +é¾įå ĸļ士 +ĠAv g +AR M +Ġundis closed +Ġfare well +45 9 +b able +ĠAll ison +OL OG +Ġcon co +t ight +ĠAC PI +ĠM ines +l ich +ĠâĶ ľ +represent ed +200 000 +Ġenthusi ast +OT S +b il +ĠIng redients +Ġinvent or +ĠMy SQL +³³ Âł +ĠAB OUT +with in +Ġm k +B ul +ĠF ake +Ġdracon ian +W a +hel m +ĠTer ran +erv ille +Ġcommon place +SI ZE +Ġ" < +re place +ograph s +ĠSE LECT +inc ible +ĠMost ly +ĠShe ffield +ĠID E +ugg le +Ġcit ations +h urst +ĠUn ix +Ġunle ash +ĠP iper +ĠN ano +Ġsucc umb +Ġreluct ance +Ġ25 00 +ĠMer chant +Ġwire t +Ġcomb os +ĠBirth day +Ġchar coal +ĠU PS +ĠFair fax +Ġdrive way +ĠT ek +ĠP itch +ove re +Ġtechn icians +ĠAct ual +fl ation +ĠF iscal +ĠEm pty +an amo +Ġmag nesium +Ġsl ut +Ġgrow ers +Invest igators +( ): +ĠS atellite +ĠKe ynes +miss ive +l ane +Ġb orough +3 44 +ĠTE AM +ĠBet hesda +C V +h ower +ĠR AD +Ġch ant +ĠR iy +Ġcompos itions +Ġmild ly +Ġmedd ling +Ġag ility +ane ers +5 01 +Ġsyn th +ling er +29 1 +Ġex claimed +Part y +Ġcont amin +ĠMan or +ĠResp ond +Ġpra ising +Ġman ners +fle et +Sum mer +ĠLy nd +ĠDef initely +gr im +Ġbow ling +st ri +ç Ľ +y nt +Ġmand ates +D IV +Ġreconc ile +view s +ĠDam on +vet te +F lo +ĠGreat est +il on +ic ia +Ġportray al +Ġcush ion +50 4 +19 79 +oss al +App lic +sc ription +Ġmit igation +AT S +p ac +Ġer ased +Ġdefic iencies +ĠHolland e +ĠX u +Ġb red +Ġpregn ancies +f emin +Ġem ph +Ġpl anners +Ġout per +utter ing +Ġperpet rator +Ġm otto +ĠEll ison +ĠNE VER +Ġadmitted ly +AR I +ĠAzerbai jan +Ġmill isec +Ġcombust ion +ĠBott le +ĠL und +ĠP s +ĠD ress +Ġfabric ated +Ġbat tered +Ġs idel +ĠNot ting +Fore ign +ĠJer ome +0 20 +ĠAr bit +Ġkn ots +ĠR IGHT +M oving +ãģ Ļ +Ġsur geries +Ġcour thouse +Ġm astered +Ġhover ing +ĠBr an +ĠAl ison +Ġsaf est +m ilitary +Ġbull ied +Ġbar rage +Read er +ES E +ĠGe ographic +T ools +3 14 +ĠGe ek +ro th +gl ers +ĠF IN +Ï ģ +ĠA ston +al tern +48 8 +Ġveter in +G amer +Ġint el +ren ches +Sh ield +Ġam nesty +ĠB har +Ġp iled +Ġhonor able +ĠInst itutes +Ġso aked +Ġcom a +ĠE FF +34 1 +by tes +ĠG mail +le in +ĠCanad iens +m aterial +I l +Ġinstruct ors +ĠK Y +Ġconce ive +ub b +ĠP ossible +Ġeas ing +ĠChrist ina +Ġcar ic +ĠHD R +R OM +Ġsho vel +de lete +Ġp uff +ĠCh anging +Ġseam lessly +Att ribute +Ġacqu isitions +ak ery +ĠE F +Ġaut istic +ĠT akes +ĠPow der +ĠSt ir +5 10 +ĠBub ble +sett ings +ĠF owler +Ġmust ard +Ġmore over +Ġcopyright ed +ĠLED s +15 00 +æ ī +ĠH IS +en f +Ġcust od +ĠH uck +G i +Ġim g +An swer +C t +j ay +ĠInf rastructure +Ġfeder ally +L oc +Ġmicro bes +Ġover run +dd s +ot ent +adi ator +>>>> >>>> +Ġtorn ado +Ġadj ud +Ġintrig ued +Ġs i +ĠRevel ation +pro gress +Ġburgl ary +ĠSai yan +ĠK athy +Ġser pent +ĠAndre as +Ġcomp el +ess ler +ĠPl astic +ĠAd vent +ĠPos itive +ĠQ t +ĠHind us +reg istered +ular ity +Ġrighteous ness +Ġdemon ic +u itive +ĠB DS +ĠGre gg +c ia +ĠCrus ade +ĠSina i +W ARE ++ ( +Ġme ll +Ġder ail +y ards +A st +Ġnotice ably +ĠO ber +R am +Ġun noticed +Ġse q +av age +T s +Ġ6 40 +Ġconced e +Ġ] ) +F ill +Ġcapt ivity +ĠImprove ment +ĠCrus ader +ara oh +M AP +æ Ĺ +Ġstr ide +al ways +F ly +N it +Ġal gae +ĠCook ing +ĠDo ors +Mal ley +Ġpolic emen +ãģ į +Ġastron aut +access ible +49 5 +ĠR AW +cl iffe +udic rous +Ġdep ended +al ach +Ġvent ures +ra ke +Ġt its +ĠH ou +Ġcond om +ormon al +Ġind ent +Ġupload ing +Foot note +Import ant +Ġ27 1 +Ġmind ful +Ġcont ends +C ra +Ġcal ibr +ĠO ECD +plug in +F at +ĠIS S +ĠDynam ics +ans en +68 6 +' ), +Ġsp rite +Ġhand held +ĠH ipp +=~ =~ +Tr ust +Ġsem antics +ĠBund es +ĠRen o +ĠLiter ature +s ense +G ary +ĠA eg +ĠTr in +EE K +Ġcler ic +ĠSS H +Ġch rist +Ġinv ading +ib u +Ġen um +aur a +Ġal lege +ĠInc redible +B BC +Ġth ru +Ġsa iled +Ġem ulate +Ġin security +Ġc rou +Ġaccommod ations +Ġincompet ent +Ġsl ips +ĠEarth qu +s ama +IL LE +Ġi Phones +as aki +Ġby e +Ġar d +Ġext ras +Ġsl aughtered +Ġcrowd funding +res so +Ġfil ib +ĠER ROR +ĠT LS +e gg +ĠIt al +Ġen list +ĠCatal onia +ĠSc ots +Ġser geant +Ġdiss olve +N H +Ġstand ings +ri que +I Q +Ġbenef iciary +Ġaqu arium +You Tube +ĠPower Shell +Ġbright est +ĠWar rant +S old +Writ ing +Ġbegin nings +ĠRes erved +ĠLatin os +head ing +Ġ4 40 +Ġrooft op +AT ING +Ġ3 90 +VP N +G s +k ernel +turn ed +Ġprefer able +Ġturn overs +ĠH els +S a +ĠShin ji +ve h +ĠMOD ULE +V iol +Ġex iting +Ġj ab +ĠVan illa +Ġac ron +ĠG ap +ber n +A k +ĠMc Gu +Ġend lessly +ĠFar age +ĠNo el +V a +M K +Ġbr ute +ĠK ru +ĠES V +ĠOl ivia +âĢ ł +ĠK af +Ġtrust ing +Ġh ots +3 24 +Ġmal aria +Ġj son +Ġp ounding +ort ment +Count ry +Ġpostp oned +Ġunequ iv +? ), +ĠRo oney +udd ing +ĠLe ap +ur rence +sh apeshifter +ĠH AS +os ate +Ġca vern +Ġconserv atism +ĠB AD +Ġmile age +Ġarrest ing +V aults +Ġmix er +Dem ocratic +ĠB enson +Ġauth ored +8 000 +Ġpro active +ĠSpirit ual +t re +Ġincarcer ated +ĠS ort +Ġpe aked +Ġwield ing +re ciation +×Ļ × +P atch +ĠEm my +Ġex qu +tt o +ĠRat io +ĠP icks +ĠG ry +ph ant +Ġf ret +Ġeth n +Ġarch ived +% - +c ases +ĠBl aze +Ġim b +c v +y ss +im ony +Ġcount down +Ġaw akening +ĠTunis ia +ĠRe fer +ĠM J +Ġun natural +ĠCar negie +iz en +ĠN uggets +he ss +Ġev ils +64 7 +Ġintrodu ctory +l oving +ĠMcM ahon +Ġambig uity +L abel +ĠAlm ighty +Ġcolor ing +ĠCl aus +set ting +N ULL +ĠF avorite +ĠS IG +> ( +ĠSh iva +ĠMay er +Ġstorm ed +ĠCo verage +we apons +igh am +Ġun answered +Ġle ve +Ġc oy +c as +b ags +as ured +Se attle +ĠSant orum +ser ious +Ġcourage ous +ĠS oup +Ġconfisc ated +Ġ// / +Ġuncon ventional +Ġmom s +ĠRohing ya +ĠOrche stra +ĠPot ion +Ġdisc redit +ĠF IL +f ixed +ĠDe er +do i +ĠDim ension +Ġbureaucr ats +et een +Ġaction Group +oh m +Ġb umps +ĠUt ility +Ġsubmar ines +ren heit +re search +ĠShap iro +Ġsket ches +Ġde ceptive +ĠV il +es ame +ĠEss entially +Ġramp age +isk y +Ġmut tered +th ritis +Ġ23 6 +f et +b ars +Ġpup il +ĠTh ou +o S +s ong +Ġfract ured +Ġre vert +pict ure +Ġcrit erion +us her +Ġreperc ussions +ĠV intage +ĠSuper intendent +Offic ers +Ġflag ged +Ġbl ames +Ġin verse +ograp hers +Ġmakes hift +Ġdev oid +Ġfoss ils +ĠArist otle +ĠFund s +Ġde pleted +ĠFl u +ĠY uan +Ġw oes +Ġlip id +Ġsit u +requ isites +Ġfurn ish +ĠSam ar +Ġshame ful +Ġadverse ly +Ġad ept +Ġrem orse +Ġmurder ous +uck les +ĠE SL +Ġ3 14 +s ent +Ġred ef +ĠC ache +ĠP urs +ig ans +Ġ4 60 +Ġpres criptions +Ġf res +F uck +ocr ates +Tw enty +ĠWe ird +ĠT oggle +ĠC alled +itiz ens +Ġp oultry +Ġharvest ing +ãĤ¦ ãĤ¹ +Bott om +Ġcaution ed +t n +39 6 +ĠNik ki +Ġeval uations +Ġharass ing +Ġbind ings +ĠMon etary +Ġhit ters +Ġadvers ary +un ts +Ġset back +Ġenc rypt +ĠC ait +Ġl ows +eng es +ĠN orn +Ġbul bs +Ġbott led +ĠVoy ager +3 17 +Ġsp heres +p olitics +Ġsubt ract +Ġsens ations +Ġapp alling +Ġ3 16 +Ġenvironment ally +ĠST EM +Ġpub lishes +5 60 +Ġdilig ence +48 4 +Ġadv ises +Ġpet rol +Ġimag ining +Ġpatrol s +ĠInt eger +ĠAs hes +act us +ĠRad iant +ĠL T +it ability +ht aking +Set ting +Ġnu anced +ĠRe ef +ĠDevelop ers +N i +pie ces +99 0 +Lic ense +Ġlow ers +ĠOtt oman +3 27 +oo o +Ġqu itting +mark ets +Beh ind +Ġbas in +Ġdoc s +an ie +fl ash +ct l +Ġcivil ized +ĠFuk ushima +"] ," +ĠK S +ĠHonest ly +ar at +Ġconstruct s +ĠL ans +ĠD ire +ĠLI KE +ĠTrou ble +Ġwith holding +ĠOb livion +Ġsan ity +any a +Con st +Ġgro cer +ĠC elsius +Ġrecount ed +ĠW ife +B order +ate red +h appy +Ġspo iler +Ġlog ically +H all +Ġsucceed ing +Ġpoly morph +Ġax es +ĠShot gun +ĠS lim +ĠPrin ciples +ĠL eth +art a +Ġsc or +Sc reenshot +Ġrelax ation +#$ #$ +Ġdeter rent +idd y +Ġpower less +Ġles bians +Ġch ords +ĠEd ited +se lected +Ġseparat ists +000 2 +Ġair space +Ġturn around +Ġc unning +P ATH +P oly +Ġbomb ed +Ġt ion +x s +Ġwith hold +Ġw aged +ĠLiber ties +Fl ag +Ġcomfort ing +45 4 +ĠI ris +are rs +Ġr ag +Ġrel ocated +ĠGu arant +Ġstrateg ically +Ġgam ma +uber ty +ĠLock heed +g res +Ġgr illed +ĠLow e +st ats +ĠR ocks +Ġsens ing +Ġrent ing +ĠGe ological +ا Ø +ot rop +Ġse w +Ġimproper ly +48 6 +Ġâĸ ł +Ġstar ving +ĠB j +Disc ussion +3 28 +ĠCom bo +ĠFix es +N AT +Ġstri ving +th ora +Ġharvest ed +ĠP ing +Ġplay ful +Ġaven ues +Ġoccup ational +Ġw akes +ĠCou rier +Ġdrum mer +ĠBrow ser +ĠH outh +it u +Ġapp arel +p aste +Ġhun ted +ĠSecond ly +l ain +X Y +ĠP IN +ic ons +Ġcock tails +Ġs izable +Ġhurd les +est inal +ĠRecre ation +Ġe co +64 8 +ĠD ied +m int +Ġfinger prints +Ġdis pose +ĠBos nia +ts y +22 00 +Ġins pected +ĠF ou +Ġf uss +Ġamb ush +ĠR ak +Ġmanif ested +Pro secut +Ġsuff ice +ren ces +Ġcompens ated +ĠC yrus +Ġgen us +ĠWolver ine +ĠTrend s +Ġh ikes +ĠSe en +Ġen rol +C old +Ġpol itely +ĠSl av +ĠRu pert +Ġey ewitness +ĠAl to +Ġun comp +Ġposter ior +M ust +ĠHer z +Ġprogress ively +Ġ23 4 +Ġind ifference +ĠCunning ham +Ġacadem ia +Ġse wer +Ġast ounding +ĠA ES +r ather +Ġeld est +Ġclim bs +ĠAdd s +Ġout cry +Ġcont ag +ĠH ouses +Ġpe pt +ĠMel ania +interest ed +ĠU CH +ĠR oots +ĠHub bard +ĠT BD +ĠRoman ian +fil ename +St one +ĠIm pl +Ġchromos ome +C le +d x +Ġscram bled +ĠP t +Ġ24 2 +OP LE +Ġtremend ously +St reet +Ġcra ving +Ġbund led +ĠR G +p ipe +Ġinj uring +Ġarc ane +Part icip +ĠHero ic +st y +Ġto pping +ĠTemp est +rent ices +b h +Ġpar anoia +ĠUnic ode +Ġegreg ious +Ġ\ ' +ĠOsw ald +Ġgra vel +ĠSim psons +Ġbl and +ĠGuant anamo +Writ er +lin ers +ĠD ice +J C +Ġpar ity +Ġs ided +Ġ23 7 +ĠPyr rha +at ters +d k +F ine +comp an +Ġform ulated +ĠId ol +il ers +hem oth +ĠF av +Ġintr usion +Ġcar rots +ĠL ayer +ĠH acker +Ġ ---------------- +Ġmoder ation +é ģ +oc oc +Ġcharacter ize +ĠTe resa +Ġsocio economic +Ġper k +ĠParticip ation +tr aining +ĠPaul o +ph ys +Ġtrust worthy +Ġembod ied +ĠMer ch +c urrency +ĠPrior ity +Ġte asing +Ġabsor bing +Ġunf inished +ĠCompar ison +Ġdis ple +writ ers +Ġprofess ions +ĠPengu in +Ġang rily +ĠL INK +68 8 +ĠCor respond +Ġprev ailed +Ġcart el +l p +as ms +ĠRed emption +ĠIslam ists +effect s +d ose +ĠL atter +ĠHal ifax +Ġv as +ĠTop ics +ĠN amed +advert ising +zz a +IC ES +Ġret arded +ach able +ĠPupp et +ĠItem Level +Ġret ract +Ġident ifiable +A aron +ĠB uster +s ol +hel le +as semb +H ope +r anged +B a +ĠP urch +é Ģ +ĠSir i +Ġarri vals +Ġ19 12 +Ġshort ened +Ġ3 12 +Ġdiscrep ancy +ĠTem perature +ĠWal ton +Ġkind erg +p olit +Ġrem ix +Ġconnect ors +ãĥĺ ãĥ© +ĠKazakh stan +dom inated +Ġsu gars +im ble +ĠPan ic +ĠDem and +ĠCol ony +on en +ĠM ER +7 75 +ur ia +aza ar +ĠDeg ree +P ri +Ġsun shine +Ġ25 1 +Ġpsychedel ic +Ġdigit ally +ĠBra un +Ġsh immer +Ġsh ave +ĠTel esc +ĠAst ral +ĠVenezuel an +ĠO G +Ġc rawling +Int eg +ĠFe ather +Ġunfold ing +Ġappropri ation +Ġè£ı è +ĠMob ility +ĠN ey +- . +b ilt +L IN +ĠT ube +ĠCon versely +Ġkey boards +ĠC ao +Ġover th +Ġla ure +>> \ +ĠV iper +ach a +Off set +ĠR aleigh +ĠJ ae +J ordan +j p +Ġtotal itarian +Connect or +Ġobserv es +ĠSpart an +ĠIm mediately +ĠSc al +C ool +Ġt aps +Ġro ar +P ast +Ġch ars +ĠB ender +ĠShe ldon +Ġpain ter +Ġbe acon +ĠCreat ures +Ġdownt urn +Ġh inder +ĠAnd romeda +à Ľ +cc oli +ĠF itness +et rical +Ġutil izes +Ġsen ate +Ġen semble +Ġche ers +T W +Ġaff luent +k il +ry lic +ord ering +Com puter +Ġgru esome +ost ics +ĠUb isoft +ĠKel ley +Ġw rench +Ġbourgeois ie +IB LE +ĠPrest on +w orn +ar ist +reat ing +Ġst ained +ar ine +Ġsl ime +EN N +Ġche sts +Ġground water +ann ot +ĠTr ay +ĠLoc ke +ĠC TR +Ġd udes +ĠEx ternal +ĠDec oder +Ġpar amed +ĠMed line +80 9 +ĠD inner +rup al +g z +ĠG um +ĠDem o +j ee +Ġd h +ber man +arch s +Ġen qu +ĠEp stein +Ġdevast ation +Ġfriends hips +ĠAr d +Ġ23 1 +ĠRub in +ĠDist ance +Ġsp urred +Ġd ossier +Ġover looking +\\\\\\\\ \\\\\\\\ +Fore st +ĠCom es +\ ", +ĠIran ians +Ġf ixtures +L aughs +Ġcur ry +ĠKing ston +Ġsqu ash +Ġcat alogue +Ġabnormal ities +Ġdigest ive +.... ..... +Ġsubord inate +og ly +Ġ24 9 +M iddle +Ġmass ac +Ġburg ers +Ġdown stairs +Ġ19 31 +39 4 +ĠV G +Ġl asers +ĠS ikh +ĠAlex a +der ived +Ġcycl ist +ãģ® éŃĶ +onel iness +!!!! !!!! +Ġbuff s +leg ate +Ġrap ing +Ġrecomm ending +ro red +Ġmult icultural +un ique +Ġbusiness men +Ġune asy +ĠM AP +Ġdisp ersed +cipl ine +J ess +ĠK erala +å § +Ġabst raction +Sur v +U h +Ġprin ters +ij a +ow der +Ġanalog ous +ĠA SP +af er +Ġunfold ed +Ġlevel ing +Ġbre ached +ĠH earing +Ġn at +Ġtransl ating +crit ical +Ġant agonist +ĠYes terday +Ġfuzz y +w ash +m ere +Ġbe wild +ĠM ae +V irgin +ph rase +Ġsign aled +ĠH IGH +Ġprot ester +Ġgar ner +unk nown +Ġk ay +Ġabduct ed +Ġst alking +am n +Ġdes erving +ĠR iv +ĠJ orge +Ġscratch ing +ĠS aving +ip ing +Ġte ase +Ġmission ary +ĠMor row +T IME +P resent +Ġchem otherapy +tern ess +ĠH omes +ĠP urdue +Ġst aunch +ĠWhit ney +ĠTH ERE +Î ¼ +iat us +ĠErn est +ĠDe ploy +Ġcove ted +F ML +ĠDial ogue +Ġex ited +f ruit +Ġner d +":" "," +Ġv ivo +ru ly +4 60 +ĠAm en +rehens ible +Ġâ ĺ +D IR +Ġad herence +Ġche w +ĠCo ke +ĠSerge i +dig ital +ĠNe ck +g ently +enth al +/ ) +Ġwe ary +Ġgu ise +ĠConc ord +ĠOn ion +at cher +Ġb inge +ĠDirect ive +Ġman ned +ans k +Ġill usions +Ġbillion aires +38 3 +oly n +odynam ic +ĠWhe at +ĠA lic +Ġcol oured +ĠN AFTA +ab o +Ġmac ros +ind ependent +s weet +Ġsp ac +ĠK abul +Ġ Ä +em e +Ġdict ated +Ġsh outs += { +Ġr ipping +ĠSh ay +ĠCr icket +direct ed +Ġanalys ed +ĠWAR RANT +ag ons +ĠBlaz ers +Ġche ered +Ġar ithmetic +ĠTan z +37 3 +ĠFl ags +Ġ29 5 +Ġw itches +ĠIn cluded +ĠG ained +ĠBl ades +G am +ĠSam antha +ĠAtl antis +ĠPr att +Ġspo iled +ĠI B +ĠRam irez +Pro bably +re ro +ĠN g +ĠWar lock +t p +Ġover he +Ġadministr ations +Ġt int +Ġreg iment +Ġpist ols +Ġblank ets +Ġep ist +Ġbowl s +Ġhydra ulic +Ġde an +Ġj ung +Ġasc end +70 5 +ĠSant iago +à ® +Ġun avoid +ĠSh aman +re b +Ġstem ming +99 8 +ĠM G +st icks +esthes ia +ER O +Ġmor bid +ĠGr ill +ĠP oe +any l +Ġdele ting +ĠSurve illance +Ġdirect ives +Ġiter ations +ĠR ox +ĠMil ky +F ather +Ġpat ented +44 7 +Ġprec ursor +Ġm aiden +ĠP hen +ĠVe gan +ĠPat ent +K elly +Redd itor +Ġn ods +Ġvent ilation +ĠSchwar z +Ġw izards +Ġomin ous +ĠHe ads +ĠB G +Ġl umber +ĠSp iel +Ġis Enabled +Ġancest ral +ĠSh ips +Ġwrest ler +ph i +Ġy uan +ĠRebell ion +Ġice berg +Ġmag ically +Ġdivers ion +ar ro +yth m +ĠR iders +ĠRob bie +ĠK ara +ĠMain tenance +ĠHer b +Ġhar ms +p acked +ĠFe instein +Ġmarry ing +Ġbl ending +ĠR ates +Ġ18 80 +Ġwr ink +ĠUn ch +ĠTor ch +desc ribed +Ġhuman oid +ilit ating +ĠCon v +ĠFe ld +IGH TS +Ġwhistlebl ower +ort mund +ets y +arre tt +ĠMon o +ĠI ke +ĠC NBC +ĠW AY +ĠMD MA +ĠIndividual s +Ġsupplement al +Ġpower house +ĠSt ru +F ocus +aph ael +ĠCol leg +att i +Z A +Ġp erenn +ĠSign ature +ĠRod ney +Ġcub es +idd led +ĠD ante +ĠIN V +iling ual +ĠC th +Ġso fa +Ġintimid ate +ĠR oe +ĠDi plom +ĠCount ries +ays on +Ġextrad ition +Ġdis abling +ĠCard iff +Ġmemor andum +ĠTr ace +Ġ?? ? +se ctor +ĠRou hani +ĠY ates +ĠFree ze +Ġbl adder +M otor +ĠProm ise +ant asy +Ġforesee able +ĠC ologne +cont ainer +ĠTre es +ĠG ors +ĠSin clair +Ġbar ring +key e +Ġsl ashed +ĠStat istical +é ĩ +Ġâĸ º +All ows +Ġhum ility +Ġdr illed +ĠF urn +44 3 +Ġse wage +Ġhome page +Ġcour tyard +Ġv ile +Ġsubsid iaries +aj o +direct ory +Ġam mon +V ers +charg es +Ġ} } +ĠCh ains +Ġ24 6 +n ob +Ġper cept +Ġg rit +Ġfisher men +ĠIraq is +ĠDIS TR +ĠF ULL +ĠEval uation +g raph +at ial +Ġcooper ating +Ġmel an +Ġenlight ened +Ġal i +t ailed +Ġsal ute +Ġweak est +ĠBull dogs +U A +ĠAll oy +Ġsem en +oc ene +ĠWilliam son +s pr +, âĢĶ +ĠG F +itt ens +Be at +ĠJ unk +iph ate +ĠFarm ers +ĠBit coins +ig ers +d h +ĠL oyal +p ayer +Ġentert ained +Ġpenn ed +Ġcoup on +Que ue +Ġweaken ing +c arry +Ġunderest imate +Ġshoot out +Ġcharism atic +ĠProced ure +Ġprud ent +in ances +Ġric hes +Ġcort ical +Ġstr ides +Ġd rib +ĠOil ers +5 40 +ĠPer form +ĠBang kok +Ġe uth +S ER +Ġsimpl istic +t ops +camp aign +Q uality +Ġimpover ished +ĠEisen hower +Ġaug ment +ĠH arden +Ġinterven ed +Ġlist ens +ĠK ok +Ġs age +Ġrub bish +ĠD ed +Ġm ull +pe lling +Ġvide ot +Produ ction +D J +m iah +Ġadapt ations +Ġmed ically +Ġboard ed +Ġarrog ance +Ġscra pped +Ġopp ress +FORM ATION +Ġj unction +4 15 +EE EE +S kill +Ġsub du +ĠSug gest +ĠP ett +Ġle tt +ĠMan ip +ĠC af +ĠCooper ation +T her +Ġreg ained +¶ æ +ref lect +Ġth ugs +ĠShel by +Ġdict ates +ĠWe iner +ĠH ale +Ġbatt leground +s child +Ġcond ol +h unt +osit ories +Ġacc uses +Fil ename +Ġsh ri +Ġmotiv ate +Ġreflect ions +N ull +ĠL obby +¥ µ +ĠS ATA +ĠBack up +Ñ ĥ +n in +ĠCor rection +Ġju icy +ut ra +ĠP ric +Ġrest raining +ĠAir bnb +ĠAr rest +Ġappropri ations +Ġsl opes +Ġmans laughter +Ġwork ings +ĠH uss +ĠF rey +Le ave +ĠHarm ony +ĠF eder +Ġ4 30 +Ġt rench +Ġglad ly +Ġbull pen +ĠG au +b ones +Ġgro ove +Ġpre text +ã ħĭ +Ġtransm itter +ĠComp onent +Ġunder age +ĠEm pires +T ile +Ġo y +ĠMar vin +ĠC AS +Ġbl oss +Ġrepl icated +ĠMar iners +Marc us +ĠBl ocks +Ġliber ated +Ġbutter fly +Fe el +Ġfer mentation +Ġyou tube +Ġoff end +ĠTer m +res ist +Ġcess ation +Ġinsurg ency +Ġb ir +ĠRa ise +59 5 +Ġhypothes es +50 2 +Ġpl aque +ocr at +Ġjack ets +ĠHuff Post +am ong +Ġconf er +48 7 +ĠL illy +Ġadapt ing +ĠF ay +Ġsh oved +ve c +Ġref ine +Ġg on +Ġgun men +z ai +ĠShut tle +ĠI zan +Ġ19 13 +Ġple thora +· · +Ġ5 10 +Ġp uberty +Ġ24 1 +ĠWe alth +ĠAl ma +ĠM EM +ĠAd ults +C as +pr ison +R ace +Ġwater proof +Ġathlet icism +Ġcapital ize +ĠJu ice +Ġillum inated +ĠP ascal +Ġirrit ation +ĠWitness es +ad le +ĠAst ro +Ġf ax +ĠEl vis +Prim ary +ĠL ich +ĠEl ves +Ġres iding +Ġst umble +3 19 +ĠP KK +Ġadvers aries +D OS +ĠR itual +Ġsm ear +Ġar son +ident al +Ġsc ant +Ġmon archy +Ġhal ftime +Ġresid ue +Ġind ign +ĠSh aun +ĠEl m +aur i +A ff +W ATCH +ĠLy on +hel ps +36 1 +Ġlobby ist +Ġdimin ishing +Ġout breaks +Ġgo ats +f avorite +ĠN ah +son ian +ĠBo oster +Ġsand box +ĠF are +ĠMalt a +Ġatt Rot +ĠM OR +ld e +Ġnavig ating +T ouch +Ġunt rue +ĠDis aster +Ġl udicrous +Pass word +ĠJ FK +blog spot +4 16 +ĠUN DER +ern al +Ġdelay ing +T OP +Ġimpl ants +ĠAV G +ĠH uge +att r +Ġjournal istic +ĠPe yton +ĠI A +R ap +go al +ĠProgram me +Ġsm ashing +w ives +print ln +ĠPl ague +in us +EE P +Ġcru iser +ĠPar ish +umin ium +Ġoccup ants +ĠJ ihad +m op +Ġp int +Ġhe ct +ĠMe cca +direct or +ĠFund ing +ĠM ixed +Ġst ag +T ier +Ġg ust +Ġbright ly +ors i +Ġup hill +R D +Ġles ions +ĠBund y +liv ious +Ġbi ologist +ĠFac ulty +ĠAuthor ization +Ġ24 4 +All ow +ï ¸ +ĠGi ul +Ġpert inent +ot aur +es se +ĠRo of +Ġunman ned +35 1 +ĠSh ak +ĠO rient +Ġend anger +D ir +Ġrepl en +ed ient +Ġtail or +Ġgad gets +Ġaud ible +âĺ Ĩ +N ice +Ġbomb ard +ĠR ape +Ġdef iance +ĠTW O +ĠFilip ino +Ġunaff ected +erv atives +Ġso ared +ĠBol ton +Ġcomprom ising +ĠBrew ers +R AL +ĠA HL +icy cle +Ġv ampires +Ġdi pped +oy er +ĠX III +Ġsidew ays +ĠW aste +ĠD iss +ĠâĶľ âĶĢâĶĢ +$ . +Ġhabit ats +ĠBe ef +tr uth +tr ained +spl it +R us +And y +ĠB ram +RE P +p id +è£ ħ +ĠMut ant +An im +ĠMar ina +Ġfut ile +hig hest +f requency +Ġepile psy +Ġcop ing +Ġconc ise +Ġtr acing +ĠS UN +pan el +ĠSoph ie +ĠCrow ley +ĠAd olf +ĠShoot er +Ġsh aky +ĠI G +ĠL ies +ĠBar ber +p kg +Ġupt ake +Ġpred atory +UL TS +/ ** +Ġintox icated +ĠWest brook +od der +he ment +Ġbas eman +AP D +st orage +ĠFif ty +ed itor +G EN +UT ION +ir ting +Ġse wing +r ift +Ġag ony +ĠS ands +Ġ25 4 +C ash +Ġl odge +Ġp unt +N atural +ĠIde as +Ġerrone ous +ĠSens or +ĠHann ity +Ġ19 21 +Ġm ould +ĠG on +kay a +Ġanonym ously +ĠK EY +Ġsim ulator +W inter +Ġstream ed +50 7 +? ", +Ġte ased +Ġco efficient +Ġwart ime +ĠTH R +' '. +ĠBank ing +mp ire +Ġf andom +Ġl ia +G a +Ġdown hill +Ġinterpre ting +Ind ividual +N orm +Ġjealous y +bit coin +Ġple asures +ĠToy s +ĠChev rolet +ĠAd visor +IZ E +Ġrecept ions +70 6 +C ro +Ġ26 2 +Ġcit rus +ir u +Review er +ject ed +U ES +an z +19 81 +ĠWork er +Ġcompl ied +ores cent +contin ental +T on +ĠPr ism +ĠShe ep +Ġ28 8 +n ox +ĠV og +O rd +Ġreal ms +te k +Ġirrig ation +Ġbicy cles +Ġelectron ically +p oly +t all +() ); +Ġaest hetics +ĠInteg rated +Expl ore +Ġd unk +47 6 +p ain +ĠJac ques +ĠD mit +Fram es +Ġreun ited +Ġhum id +D ro +P olitical +Ġyouth ful +Ġent ails +Ġmosqu ito +36 3 +spe cies +Ġcoord inating +ĠMay hem +ĠMagn us +M ount +Impro ved +ĠST ATE +ATT LE +Ġflow ed +Ġtack led +Ġfashion ed +Ġre organ +iv ari +f inger +Ġreluct antly +et ting +ĠV and +you ng +ĠGar land +Ġpresum ption +Ġamen ities +ĠPle asant +on ential +ĠO xy +Ġmor als +ĠY ah +Read y +Sim on +En h +D emon +Ġcl ich +Mon itor +ĠD U +Ġwel comes +Ġstand out +Ġdread ful +Ġban anas +Ġball oons +h ooting +bas ic +Ġsuff ix +Ġd uly +can o +Ch ain +at os +Ġgeop olitical +Ġ( & +ĠGem ini +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +Ġacqu itted +L uck +prot ect +10 24 +Ġsc arcity +Ġmind fulness +ec ided +D N +pr ime +ĠPres idents +ĠVID EO +Ġ( âĪĴ +add ock +N OR +ĠP ru +p un +ĠL OL +)) )) +ĠL iqu +ĠS AS +Ġsty ling +Ġpunish ments +Ġnum b +Ġasc ertain +ĠRock ies +f lu +Th umbnail +Ġperpet rated +ĠSem i +Ġdis arm +ĠOld er +ĠEx ception +Ġexponent ially +ĠCommun ities +Ġabol ish +ĠPart ner +pt oms +Ġ7 77 +ĠFo ley +ĠC ases +Ġgre ase +ĠReb irth +G round +Ġ; ) +ĠDoct rine +ik ini +Y e +ĠBl ossom +Ġpers ists +b ill +Ġinf usion +Ġbud dies +9 11 +ĠPat ient +Ġdem os +Ġacquaint ance +ĠP aw +at ari +Ġx ml +Ġfasc ination +ĠSer ve +Ï Ĥ +br anded +Ġa z +Return s +Ġover shadow +Ġro am +Ġspeed y +n umbered +hel ial +Ġdisc iple +Ġass urances +g iven +pect ing +ĠN atalie +çĶ ° +Ġmosquit oes +rote in +Ġnumer ic +Ġindepend ents +Ġtrans itional +Ġreaction ary +ĠMech dragon +do ctor +Ġshort est +Ġsequ ential +ĠB ac +ĠAccount s +ãģ Į +ach y +ract ive +ĠReg iment +Ġbreat htaking +ffic iency +ĠB ates +Ġ3 11 +Ġward robe +ft s +ĠBer k +Sim ply +ĠRivers ide +iver ing +ident ial +lu cent +Ġen riched +ĠCon ver +ĠG iving +ãĥ Ļ +Ġlegal ize +ĠF TC +Ġfre aking +M ix +Ġter restrial +es ian +ci ents +W ing +LO AD +Ġled ge +ĠViol ent +ĠMet all +Ġ30 8 +Ġs outheastern +hett o +M eat +Ġslow down +Ġret reated +Jere my +end as +**** * +er ic +Ġre ins +opp able +ĠHuman ity +ear ances +rig an +C amera +Ġwa ivers +s oc +Ġalter ation +trans form +ĠC emetery +50 6 +Ġindef inite +Ġstim ulating +y g +60 3 +ĠS op +Ġdescript ive +Ph ase +ĠEd mund +Ġpneum onia +vent us +A mb +Ġlabor atories +ĠEx clusive +ug ar +W ere +Ġmalf unction +Ġhomosexual s +Ġ---- --- +un i +Ġturb ines +ĠEqu ity +D u +Ġmind ed +ĠR H +ĠBlack hawks +Ġfe ats +Ġ17 00 +re pl +36 2 +lad en +Ġindisp ensable +ly ss +tt i +Ġre el +Ġdiver ted +Ġlik eness +Ġsubscript ions +Ġfing ert +Ġfil thy +dest ruct +d raft +ĠBernard ino +l aunch +Ġper plex +ĠS UM +car b +Ġswe ater +ĠVent ure +ĠJ ag +ĠCele b +ĠV oters +Ġstead fast +Ġathlet ics +ĠHans on +ĠDr ac +Tr acker +Ġcomm end +ĠPres idency +ĠD ID +in formed +Ġweb page +P retty +Ġforce fully +ãĥĥ ãĤ¯ +Ġrel ocation +Ġsat ire +â ī +ĠSunder land +æ Ħ +V oice +???? ???? +Ġinform ant +Ġbow el +ĠUn iform +Ġ ..." +Ġpur ge +Ġpic nic +ĠU mb +ĠU PDATE +ĠSapp hire +ĠSt all +le arn +Ġobject ively +Ġob liter +Ġlooph ole +Ġjour neys +Ġo mission +Pro s +ĠSid ney +pl oma +Ġspray ed +Ġg uru +Ġtra itor +Ġtim et +Ġsn apping +ĠSe vent +urn al +ĠUk ip +Ġb owed +por al +l iberal +R os +Quest ions +i OS +Ġsummar ize +ST AT +Ġ18 50 +ap est +Ġl ender +ĠVari able +br inging +ĠL ORD +, ) +Ġcollaps es +x iety +ĠN ed +Y D +ĠSch a +Ġantib ody +Ġdis band +y re +ill usion +Ġro ver +s hed +ĠHiro sh +cc i +Ġcal am +ĠMort on +P interest +Ġ19 28 +ĠE uras +ord es +Ġf ences +ĠIn ventory +ĠVal encia +ĠU d +ĠT iff +Ġsqu e +Ġqu otation +Ġtroubles ome +er ker +QU EST +ĠKing doms +s outh +Ġle vy +Pr ince +ĠSt ing +Ġnick named +Ġapp e +Ġphot ographic +Ġcorp us +re ference +ĠT rog +U nt +) =( +ĠLat via +Ġactiv ating +Ġlicense e +Ġdispar ities +ĠNews letter +ãĥĥ ãĥĪ +Ġfree ing +ĠJe ep +ĠPer ception +ins k +Ġsil icone +ĠHay den +Le an +ĠSuz uki +ibr arian +66 8 +Ġsp or +Ġcorrel ations +ag hetti +Ġtu ber +ĠIP CC +il us +ĠV u +Ġwealth iest +ĠCarb uncle +an za +Ġfool ed +ĠZ ur +Ġd addy +ran o +il ian +Ġknock out +f man +requ ired +ĠWik ileaks +ĠD uffy +ON T +Ġins ol +ĠObject s +Ġb ou +ĠNord ic +ĠIns ert +sc an +Ġd ancers +Ġid iots +major ity +ĠNev ille +ĠFree BSD +Ġt art +pan ic +69 0 +Ġcoc oa +Ġsam pled +Ġlook up +Ind ust +Ġinject ions +gen re +Ġa u +Ġroad way +Ġgen itals +K ind +ĠEx aminer +ĠY az +F resh +Ġpar alysis +ĠAl uminum +Ġre ap +ok é +Ġsl oppy +ĠTun nel +pos ium +ner y +en ic +Ġher bal +ĠOut er +ĠBuild er +Ġinc ur +Ġide ologies +Ġback ups +cons uming +ĠDet ect +de ck +ĠKN OW +ĠG ret +ĠM IC +Ġtough ness +ĠEx hibit +Ġh ive +L es +ĠSCH OOL +ĠAt ari +ald e +ĠN ull +and estine +m ouse +Ġbrig ade +48 9 +Ġrev ol +ĠLaw son +ĠW ah +op oly +eb ted +ĠS aunders +Ġ3 13 +ĠW inc +Ġtab oo +ĠHel met +Ġw edge +ch ip +ĠT ina +b g +Ġinf uri +r n +Ġanomal ies +ĠSy nc +ĠEx am +ĠComm it +ĠDi ary +ĠALS O +ĠDe bor +omed ical +Ġcomprehens ion +6 55 +Ġempower ing +Ġ ire +Ġju ices +ĠE TH +ĠBox ing +=" / +Ġfacilit ated +p oke +ĠPars ons +ĠMod er +tra vel +Ġcivil izations +Ġliber tarians +Ġrun e +ĠCl arks +at hed +Ġcampaign ers +ĠDis patch +ĠFah renheit +ĠCap com +-------- -- +Ġl ace +Ġdr aining +Ġl iner +ĠArt ificial +é n +t ask +] ). +ĠGM O +ĠOper ator +ord inary +ĠInf luence +ĠU ps +Ġpot ency +uss en +osp ons +ĠSw im +ĠDead line +Un ity +Ġcul inary +Ġenlight enment +Ġwe arer +Ġmin ed +Ġp ly +Ġinc est +ĠDVD s +W alk +B TC +Tr ade +Ġdev al +ib and +ĠOvers ight +Palest inian +Ġd art +Ġm ul +L R +Ġrem ovable +ĠReal ms +ì Ŀ +Ġmisc ar +ĠV ulkan +68 5 +è re +ĠS ap +Ġmer ging +ĠCar ly +che ster +Ġbr isk +Ġlux urious +ĠGener ator +Ġbit terness +Ġed ible +Ġ24 3 +T G +Ġrect angle +With No +bel ow +J enn +Ġdark est +Ġh itch +Ġdos age +Ġsc aven +ĠK eller +ĠIllust rated +Certain ly +ĠMaver icks +Marg inal +Ġdiarr hea +Ġenorm ously +Ġ9 99 +sh r +qu art +Ġadam ant +ĠM ew +Ġren ovation +Ġcerv ical +ĠPercent age +en ers +ĠKim ber +Ġflo ats +Ġde x +ĠW itcher +ĠSwan sea +d m +Ġsal ty +y ellow +Ġca pe +ĠDr ain +ĠPaul a +ĠTol edo +les i +Mag azine +ĠW ick +ĠM n +ĠA ck +ĠR iding +AS ON +Ġhom ophobic +AR P +Ġwand ered +C PU +ood oo +ĠP ipe +Ġtight ening +ĠBut t +3 18 +Ġdesert ed +S ession +Ġfacilit ating +J ump +Ġemer gencies +OW ER +Ġexhaust ive +ĠAF TER +Ġheart beat +ĠLab el +ack y +ĠCert ified +ilt ration +Z e +ĠU tt +Ġ13 00 +Ġpres ume +ĠDis p +Ġsur ged +Ġdoll s +Col umb +Ġchim pan +ĠR azor +Ġt icks +Ġcouncill or +Ġpilgr image +ĠReb els +ĠQ C +ĠA uction +x ia +ik k +b red +Ġinsert ion +Ġco arse +d B +SE E +ĠZ ap +ĠF oo +Ġcontem por +ĠQuarter ly +ot ions +ĠAl chemist +ĠT rey +ĠDu o +S weet +80 4 +ĠGi ov +Ġfun n +N in +h off +Ġram ifications +Ġ19 22 +ĠExper ts +az es +Ġgar ments +ar ial +ĠN ab +Ġ25 7 +ĠV ed +Ġhum orous +ĠPom pe +Ġn ylon +Ġlur king +ĠSerge y +ĠMatt is +Ġmisogyn y +ĠComp onents +ĠWatch ing +ĠF olk +ract ical +B ush +Ġt aped +Ġgroup ing +Ġbe ads +Ġ20 48 +Ġcon du +quer que +Read ing +Ġgriev ances +Ult ra +Ġend point +H ig +ĠSt atic +ĠScar borough +L ua +ĠMess i +a qu +ĠPsy Net +ĠR udd +Ġa venue +v p +J er +Ġsh ady +ĠRes ist +ĠArt emis +Ġcare less +Ġbro kers +Ġtemper ament +Ġ5 20 +T ags +ĠTurn ing +Ġut tered +Ġp edd +Ġimpro vised +Ġ: ( +Ġtab l +Ġpl ains +16 00 +press ure +ĠEss ence +marg in +friend s +ĠRest oration +Ġpoll ut +ĠPok er +ĠAugust ine +ĠC IS +ĠSE AL +or ama +Ġth wart +se ek +Ġp agan + º +cp u +Ġg arn +Ġass ortment +ĠI LCS +t ower +Recomm ended +Ġun born +ĠRandom Redditor +ĠRandomRedditor WithNo +Ġparaly zed +Ġeru ption +Ġinter sect +ĠSt oke +ĠS co +B ind +å ¾ +ĠP NG +ĠNeg ative +ĠNO AA +Le on +Ġall oy +ĠL ama +ĠD iversity +5 75 +Ġunderest imated +ĠSc or +Ġm ural +Ġb usted +so on +l if +Ġnone x +Ġall ergy +ĠUnder world +ĠR ays +ĠBl asio +Ġh rs +ĠD ir +Ġ3 27 +by ter +Ġrepl acements +Ġactiv ates +ri ved +M H +Ġp ans +ĠH I +Ġlong itudinal +Ġnu isance +al er +Ġsw ell +ĠS igned +s ci +ĠIs les +ĠA GA +Ġdef iant +Ġson ic +oc on +K C +ĠA im +t ie +ah ah +Ġm L +D X +Ġb isc +ĠBill board +ĠSY STEM +NE Y +ga ard +Ġdist ressed +former ly +Al an +Ġche fs +Ġopt ics +ĠC omet +ĠAM C +Ġredes igned +irm ation +Ġsight ings +38 2 +3 11 +ĠW B +Ġcont raction +ĠT OTAL +D ual +Ġstart led +Ġunderstand ably +Ġsung lasses +ETH OD +Ġd ocker +Ġsurf ing +ĠH EL +ĠSl ack +ton es +Ġsh alt +Vis ual +49 8 +Dep artment +c ussion +Ġunrest ricted +Ġt ad +Ġre name +employ ed +Ġeduc ating +Ġgrin ned +bed room +ĠActiv ities +ĠV elvet +ĠSW AT +Ġsh uffle +ig or +Ġsatur ation +F inding +c ream +ic ter +Ġv odka +tr acking +te c +Ġfore ground +iest a +Ġve hement +ĠEC B +ĠT ie +E y +Ġt urtles +ĠRail road +ĠKat z +ĠFram es +Ġmen ace +ĠFell owship +ĠEss ential +ugg ish +Ġdri p +ch witz +ĠKy oto +s b +ĠN ina +Param eter +Ġal arms +ĠCl aud +Ġpione ering +Ġchief ly +ĠSc ream +Col lection +Ġthank fully +ĠRonald o +åŃ IJ +st rip +ĠDisney land +com mercial +See ing +S oul +Ġevac uate +Ġc iv +ĠAs he +Ġdiv ides +ĠD agger +rehens ive +Ġber ries +ĠD F +Ġs ushi +Ġplur ality +W I +Ġdisadvant aged +Ġbatt alion +ob iles +45 1 +Ġcl ing +Ġunden iable +ĠL ounge +Ġha unt +p he +Ġquant ify +Ġdiff ered +Ġ[* ] +ĠV iz +c um +sl ave +Ġvide og +Ġqu ar +Ġbund les +ĠAl onso +t ackle +Ġneur onal +Ġlandsl ide +conf irmed +ĠDep th +Ġrenew ables +B ear +ĠMaced onia +Ġjer seys +Ġb unk +ĠSp awn +ĠControl s +ĠBuch anan +Ġrobot ics +Ġemphas izing +ĠTut orial +h yp +ist on +Ġmonument al +æ ° +ĠCar ry +Ġt bsp +en ance +H ill +art hed +Ġro tten +De an +Ġtw isting +Ġgood will +Ġimm ersion +L iving +Ġbr ushes +ĠC GI +ĠAt k +tr aditional +Ġph antom +ĠSt amina +Ġexpans ions +ĠMar in +Ġembark ed +ĠE g +int estinal +ĠPE OPLE +ĠBo oth +ĠApp alach +Ġreleg ated +V T +M IT +Ġmust er +Ġwithdraw ing +Ġmicrosc ope +ĠG athering +ĠC rescent +ĠArgent ine +ĠDec re +ĠDomin ic +Ġbud s +ant age +ĠI on +Ġwid ened +ONS ORED +ĠGl oves +iann opoulos +raz en +fe el +Ġrepay ment +Ġhind sight +ĠRE ALLY +ĠPist ol +ĠBra h +Ġwat ts +Ġsurv ives +Ġfl urry +iss y +Al ert +ĠUrug uay +Ph oenix +S low +ĠG rave +ĠF ir +Ġmanage able +Ġtar iff +ĠU DP +ĠPist ons +ĠNiger ian +Ġstrike outs +Ġcos metics +whel ming +f ab +c ape +pro xy +Ġre think +Ġover coming +sim ple +Ġw oo +Ġdistract ing +ĠSt anton +ĠTuls a +ĠD ock +65 9 +Ġdisc ord +ĠEm acs +ĠV es +ĠR OB +Ġreass uring +Ġcons ortium +Muslim s +3 21 +Ġprompt s +se i +ĠH itch +imp osed +ĠF ool +Ġindisc rim +wr ong +bu querque +D avis +! ] +Ġtim eless +ĠNE ED +Ġpestic ide +Ġrally ing +ĠCal der +Ġå ¤ +Ġx p +ĠUn le +ĠEx port +lu aj +B uff +) [ +Ġsq or +S audi +Ġis tg +Ġindul ge +pro c +Ġdisg usted +Ġcomp ounded +Ġn em +Ġschool ing +ĠC ure +process ing +S ol +Ġpro verb +it ized +ĠAlv arez +Ġscar f +Ġrect angular +re ve +Ġh ormonal +ĠSt ress +itiz en +Ġ4 25 +girl s +ĠNo ir +ĠR app +Ġmar ches +ch urch +ĠUs es +Ġ40 5 +ĠBer m +Ġord inances +ĠJud gment +Charg es +ĠZ in +Ġdust y +Ġstraw berries +Ġper ce +ĠTh ur +ĠDebor ah +net flix +ĠLam bert +Ġam used +ĠGu ang +Y OU +R GB +ĠC CTV +Ġf iat +r ang +Ġf ederation +ĠM ant +ĠB ust +ĠM are +respect ive +ĠM igration +ĠB IT +59 0 +Ġpatriot ism +Ġout lining +reg ion +ĠJos é +Ġbl asting +ĠEz ra +B s +Ġundermin es +ĠSm ooth +Ġcl ashed +rad io +Ġtransition ing +ĠBucc aneers +ĠOw l +Ġplug s +Ġh iatus +ĠPin ball +Ġm ig +ĠNut r +ĠWolf e +Ġinteg ers +Ġor bits +ĠEd win +ĠDirect X +b ite +Ġbl azing +v r +Ed ge +ĠP ID +ex it +ĠCom ed +ĠPath finder +ĠGu id +ĠSign s +ĠZ er +ĠAg enda +Ġreimburse ment +M esh +i Phone +ĠMar cos +ĠS ites +h ate +en burg +Ġs ockets +p end +Bat man +v ir +ĠSH OW +Ġprovision al +con n +ĠDeath s +AT IVE +Pro file +sy m +J A +Ġnin ja +inst alled +id ates +eb ra +ĠOm aha +Ġse izing +ĠBe asts +Ġsal ts +M ission +Gener ally +ĠTr ilogy +he on +leg ates +Ġd ime +Ġf aire +par able +G raph +Ġtotal ing +Ġdiagram s +ĠYan uk +ple t +ĠMe h +Ġmyth ical +ĠStep hens +aut ical +ochem istry +Ġkil ograms +Ġel bows +anc ock +ĠB CE +ĠPr ague +Ġimpro v +ĠDev in +Ġ" \ +par alle +Ġsuprem acists +ĠB illion +Ġreg imen +inn acle +Ġrequ isite +ang an +ĠBur lington +ain ment +ĠObject ive +oms ky +G V +Ġun ilateral +Ġt c +Ġh ires +ment al +Ġinvol untary +Ġtrans pl +ĠASC II + ¨ +Ev ents +Ġdoub ted +ĠKa plan +ĠCour age +ig on +ĠMan aging +ĠT art +Ġfalse hood +ĠV iolet +Ġair s +Ġfertil izer +Brit ain +Ġaqu atic +ou f +W ords +ĠHart ford +Ġeven ings +ĠV engeance +qu ite +G all +ĠP ret +Ġp df +ĠL M +ĠSo chi +ĠInter cept +9 20 +Ġprofit ability +ĠId le +ĠMac Donald +ĠEst ablishment +um sy +Ġgather ings +ĠN aj +Charl ie +Ġas cent +ĠProt ector +Ġal gebra +Ġbi os +for ums +EL S +Introdu ced +Ġ3 35 +Ġastron omy +Cont ribut +ĠPol ic +Pl atform +Ġcontain ment +w rap +Ġcoron ary +ĠJ elly +man ager +Ġheart breaking +c air +ĠChe ro +c gi +Med ical +ĠAccount ability +! !" +oph ile +Ġpsych otic +ĠRest rict +Ġequ itable +iss ues +Ġ19 05 +ĠN ek +c ised +ĠTr acking +Ġo zone +Ġcook er +ros is +Ġre open +Ġinf inity +ĠPharm aceutical +ens ional +Att empt +ĠR ory +Mar co +Ġawa its +H OW +t reated +Ġbol st +Ġreve red +Ġp ods +opp ers +00 10 +Ġampl itude +ric an +SP ONSORED +Ġtrou sers +Ġhal ves +ĠK aine +ĠCut ler +ĠA UTH +Ġsplend id +Ġprevent ive +ĠDud ley +if acts +umin ati +ĠY in +Ġad mon +ĠV ag +Ġin verted +Ġhast ily +ĠH ague +L yn +Ġled ger +Ġastron omical +get ting +Ġcirc a +ĠC ic +ĠTenn is +Lim ited +Ġd ru +ĠBY U +Ġtrave llers +Ġp ane +ĠInt ro +Ġpatient ly +Ġa iding +Ġlo os +ĠT ough +Ġ29 3 +Ġconsum es +Source File +Ġ"" " +Ġbond ing +Ġtil ted +Ġmenstru al +ĠCel estial +UL AR +Plug in +Ġrisk ing +N az +ĠRiy adh +Ġacc redited +Ġsk irm +é Ľ +Ġexam iner +Ġmess ing +Ġnear ing +ĠC hern +ĠBeck ham +Ġsw apped +Ġgo ose +K ay +Ġlo fty +ĠWal let +Ġ[ ' +Ġap ocalypse +Ġb amboo +ĠSP ACE +ĠEl ena +Ġ30 6 +ac ons +Ġtight ened +Ġadolesc ence +Ġrain y +Ġvandal ism +ĠNew town +Ġcon ject +c akes +Ġche ated +Ġmoder ators +par ams +E FF +Ġdece it +ĠST L +ĠTanz ania +ĠR I +Ġ19 23 +ĠEx ile +the l +Ġthe olog +Ġquir ky +ĠIr vine +Ġneed y +or is +U m +K a +Ġmail box +3 22 +Ġb os +ĠPet ra +K ING +Ġenlarg ed +O ften +Ġbad ass +Ġ3 43 +ĠPl aces +ĠC AD +Ġpr istine +Ġinterven ing +d irection +Ġl az +ĠD SM +Ġproject ing +ĠF unk +ag og +pay ment +n ov +Ġch atter +AR B +Ġexam inations +ĠHouse hold +ĠG us +F ord +4 14 +B oss +Ġmy stic +Ġle aps +ĠB av +ul z +b udget +Foot ball +Ġsubsid ized +Ġfirst hand +Ġcoinc ide +oc ular +Con n +ĠColl abor +Ġfool s +am ura +ah ar +r ists +Ġsw ollen +Ġexp ended +ĠP au +s up +Ġsp ar +Ġkey note +s uff +Ġunequ al +Ġprogress ing +str ings +ĠGamer gate +Dis ney +ĠEle ven +om nia +Ġscript ed +Ġear ners +bro ther +ĠEn abled +æ ³ +Ġlar vae +ĠL OC +m ess +Wil son +ĠTem plate +success fully +Ġparam ount +Ġcamoufl age +Ġbind s +ĠQu iet +ĠSh utterstock +r ush +Ġmasc ot +fort une +ĠCol t +ĠBe yon +hab i +Ġha irc +Ġ26 7 +ĠDe us +Ġtw itch +Ġconcent rating +Ġn ipples +c ible +Ġg ir +N Z +M ath +n ih +Requ ired +Ġp onder +ĠS AN +Ġwedd ings +Ġl oneliness +N ES +ĠMah jong +69 5 +add le +ĠGar ner +ĠC OUR +Br idge +Ġsp ree +ĠCald well +Ġbri bery +Ġ���� ���� +plug ins +Ġr acket +Ġchamp agne +vers ible +V ote +Ġmod ifiers +May or +6 80 +Ġassemb lies +ĠS ultan +ĠN ing +ĠLad ies +Ġsulf ur +Ġor bs +Ġ---- - +____ ___ +ĠJournal ism +Ġes ports +Ġl ush +Ġh ue +Ġspect ral +H onest +ãĥ ı +Ġbus hes +Ġrein forcement +Ġre opened +ĠWhe els +ĠM org +rie ving +Ġaux iliary +Ġj Query +ĠB AT +tes que +Ġver tex +p ure +f rey +ãĤ º +d os +Ġty ph +Ġc ull +Ġe q +Ġdec on +Ġtoss ing +Ġdispar ate +ĠBr igham +print f +led ged +Ġsu nd +Ġco zy +Ġhepat itis +per forming +Ġav al +ĠG G +f uture +Ġpet ertodd +ĠKos ovo +Ġmagn ets +Al ready +ĠEd ison +ĠCe res +ĠRA ID +Ġbrill iance +57 6 +Ġder ives +Ġhypert ension +ĠÎ Ķ +Ġlamb da +Ġfl air +Ġmission aries +Ġrap es +ĠSt arter +ĠMon ths +Ġdef y +Ġseism ic +ĠR aphael +Ġeuro zone +65 6 +z sche +Ġscr atched +Ġb ows +ĠLenn on +ĠGa ia +Ġdri pping +f acts +A le +Ġfrog s +ĠBre ast +ogene ity +ĠProsecut or +Ġampl ified +ĠHod g +ĠF n +Th ousands +ĠNI H +ĠMonitor ing +FT WARE +ĠPri ebus +ĠG rowing +hun ter +Ġdiagn ose +ĠM ald +ĠL R +Ġcrown ed +Ġburst ing +Ġdiss olution +j avascript +Ġuseful ness +ĠExec ution +: ( +ĠIv ory +a ah +Ġpersecut ed +viol ence +ist as +ĠCr ate +Ġimpuls es +ĠSp ani +ed es +Hand le +ĠZ erg +think able +Last ly +Ġspont aneously +Ġinconven ient +Ġdismiss ing +Ġpl otted +Ġeight y +Ġ7 37 +r ish +ĠThor nton +ath am +Ġsit com +V en +Rec ipe +t el +l und +Ġcle ars +ĠSas uke +Ġ25 8 +Ġopt ing +Ġen raged +est hetic +ĠA e +uch s +Pre p +Fl ow +Ġrun off +ĠE ating +ĠG iles +ĠAct ing +res ources +ib aba +Ġr pm +Ġske wed +ĠBl anc +ĠS akuya +Ġhot ter +Ġ19 24 +op ian +ck o +Ġcr umbling +Ġcapt ains +ĠAppropri ations +le aders +dro pping +an uts +Ġrevers ing +ĠP ose +ĠS ek +Sc ot +ĠIde a +c ise +ĠSloven ia +Ġ3 17 +Do ctor +Ġcro cod +ald i +Se a +ĠFar rell +Ġmerc enaries +ĠR NC +ĠGu ess +Ġp acing +M achine +Streamer Bot +ĠChar ity +Ġ29 8 +Ġcann ons +ĠTob y +TPP StreamerBot +ĠPass ion +cf g +Th om +Ġbad ges +ĠBern stein +. âĢĵ +ĠP OP +ĠCon j +Ġinitial ization +Ġbiod iversity +D ub +Ġfeud al +Ġdisclaim er +Ġc row +Ġign ition +ar f +S HA +Ġk Hz +h azard +ĠArt ists +oe uv +67 9 +ĠRud y +N ine +ĠRam adan +å ½ +itt o +Ġadren aline +C ert +Ġsmell ed +Ġimp unity +Ġag endas +ĠRe born +ĠCon cent +ĠSe ems +Ġo mega +ĠDust in +Ġback er +ĠSau ce +ĠBoy le +W IN +Ġsp ins +Ġpa uses +u pt +Ġshred ded +Ġstra pped +ĠCor ruption +Ġscr atches +Ġn i +Ġatt ire +ĠS AF +Factory Reloaded +ĠI PS +Ġ( % +Ġsem inar +f ocus +c ivil +Ġ18 60 +int osh +Ġcontin ual +Ġabbre vi +ĠS ok +oc obo +X M +Ġfr antic +Ġunavoid able +Ġar tery +Ġannot ations +b ath +Cl imate +Ġd ors +ĠSl ide +co ord +ĠRel oad +ĠL DL +ĠLove craft +Ġunim agin +Ġresemb led +Ġbarr acks +n p +Ġsurrog ate +Ġcategor ized +ãĤ © +Ġvacc inated +Ġdrain age +Ġind ist +ĠWhats App +Ġ18 70 +oler ance +inv oke +am orph +Ġrecon nect +Ġem anc +Ġblind ness +Ġ12 80 +intern et +c ollar +Ġalt ru +Ġab yss +ĠT RI +65 7 +Ġinf used +HE AD +Ġforest ry +ĠWood y +ĠC i +w i +s am +78 4 +hol iday +Ġmog ul +ĠF ees +ĠD EN +In ternal +ur bed +f usc +at om +ĠIll usion +Ġpoll ed +Ġfl ap +Ġco ax +L GBT +An aly +ĠSect ions +ĠCalif orn +em n +Ġh ither +ĠN IGHT +Ġn ailed +ĠPip eline +39 1 +o of +ĠPr imal +vere nd +Ġsl ashing +Ġret ri +avi our +Ġdepart ing +g il +IS C +Ġmid way +Ġultras ound +Ġbeh aving +ĠT ara +class es +V irtual +ĠColon ial +Ġstri pping +Ġorchestr ated +ĠGra ves +45 2 +ĠIron ically +ĠWrit ers +Ġl ends +ĠMan z +Ġra ven +Ġoxid ative +Ġ26 6 +EL F +act ually +asc ar +D raft +Ġfavour able +Ġhumili ating +Ġf idelity +ĠH of +ĠX uan +49 6 +Ġlay ered +at is +79 0 +Ġpay check +it on +K ar +ĠVM ware +ĠFar mer +Ġserv ic +gl omer +Ġsl ump +ĠFab ric +ĠD OC +est ing +Ġreass ure +Ġph yl +v olt +it ory +R ules +Ġoxid ation +Ġpri zed +Ġmist ress +ĠDj ango +WAR N +å ij +Ġenc ode +ĠFeed back +Ġstupid ity +I an +ĠYugoslav ia +× ¨ +ac l +UT E +19 77 +Ġqual ifies +Ġpuls es +pret ty +Ġfro ze +Ġs s +Iter ator +Ġur gently +Ġm ailed +ĠCh am +Ġsust aining +Ġbas il +Ġpupp ies +il ant +ĠP LEASE +l ap +ace ous +F ear +ĠMaster y +aut omatic +ĠT AG +Ġant im +ag les +47 3 +fram es +Ġwh ispers +ĠWho ever +Ġbra very +ĠUK IP +ract ions +"" " +Ġt ame +Ġpart ed +every thing +CON T +Ġind ebted +Ġadd r +re k +IR ED +Ġem inent +cl inton +Ġo usted +Ġreview er +Ġmelt down +Ġre arr +ĠY ao +the real +aby te +Ġst umbling +Ġbat ches +Ġ25 9 +Ġcontrace ptive +Ġprost itute +ens is +De cl +ĠSt rikes +M ilitary +ĠO ath +v acc +pp ings +05 2 +Ġpart Name +amp ing +Rep orts +K I +CH R +Ġsubt ly +sw ers +Bl ake +us ual +Ġcontest ants +Ġcart ridges +ĠGRE AT +Ġbl ush +ĠâĢ º +47 2 +Ġreason ed +ãĥ ¤ +paralle led +Ġd yn +ag ate +Ġnight ly +å Ĩ +55 6 +Ġsem antic +ĠAdv oc +Ġ !! +Ġdisag rees +ĠB W +V eh +Ġharm ing +Ġembr aces +Ġstri ves +Ġin land +ĠK ard +Ġhe ats +ĠGin ny +ut an +ern aut +yl ene +ĠE lev +J D +Ġh ars +ĠStar r +Ġsk ysc +Ġcollabor ators +Us ually +Ġrev olutions +ĠSTAT S +Ġdism antle +Ġconfident ly +Ġkin etic +Al i +Ġpercent ile +Ġextract ing +ill ian +est ead +Ġphysic ists +ĠMarsh al +Ġfell owship +Ġd ashed +ĠU R +ĠSi oux +ĠComp act +am ide +P ython +ĠLe igh +ĠPharm ac +ist rates +her ical +Ġf ue +ĠE min +Ġ( { +ĠNeighbor hood +Ġdisrupt ing +ĠD up +Ġg land +ĠSe v +ĠMar ian +arg on +ĠD und +Ġ< !-- +Ġstr and +Ġstadium s +z os +Ġpsych osis +ĠR ack +Ġbrilliant ly +ï¸ ı +Ġsubmer ged +ĠInst it +ĠCh ow +Ġc ages +ĠH ats +ĠU rs +Ġdil uted +us at +ien ne +ĠMembers hip +ĠBur k +Ġ ie +Ġarche type +D rug +ult on +ĠSp ock +ĠMcK ay +ĠDep end +F eatured +S oc +19 78 +ĠB ere +Ġrelent lessly +Ġcripp ling +Ġar thritis +çĶ Ł +ĠTrop ical +ĠBul g +ĠCher yl +Ġadm irable +Ġsub title +Over ride +Ġorig inating +ĠC CP +Ġsw ore +ĠSo le +ĠDis orders +3 29 +Ġprocess ion +Ġref urb +Ġimm ersed +requ ently +Ġskept ics +Ġcer amic +m itter +en stein +b elt +ĠT IT +b idden +Ġf ir +m ist +> ] +Ġwe ave +ĠParad ox +Ġentr usted +ĠBarcl ays +Ġnovel ist +og ie +80 6 +Ġnin ety +Ġdisag reements +@@@@ @@@@ +ĠAus chwitz +c ars +ĠL ET +t ub +arant ine +P OS +Ġback story +Ġcheer ful +ĠR ag +ek a +bi ased +Ġinexper ienced +ak ra +ĠW itt +t an +Ġrap ist +Ġplate au +ch al +ĠInqu is +exp ression +Ġc ipher +Ġsh aving +add en +re ly +( \ +ism a +ĠReg ulatory +CH AR +ily n +N VIDIA +G U +Ġmur m +la us +Christ opher +Ġcontract ual +ĠPro xy +ĠJa ime +ĠMethod ist +Ġstew ards +st a +per ia +Ġphys iology +Ġbump ed +Ġf ructose +Austral ian +ĠMet allic +ĠMas querade +ar b +Ġprom ul +Ġdown fall +Ġbut cher +Ġb our +ĠIN FORMATION +ĠB is +pect s +ad ena +Ġcontempl ating +ar oo +cent ered +ĠPe aks +Us ed +Ġmod em +Ġg enders +Ġ8 000 +37 1 +Ġm aternity +ĠR az +Ġrock ing +Ġhandgun s +ĠD ACA +Aut om +ĠN ile +Ġtum ult +ĠBenef it +ĠAppro ach +works hop +ĠLe aving +G er +inst ead +Ġvibr ations +Ġrep ositories +49 7 +ĠA unt +ĠJ ub +ĠExp edition +Al pha +Ġs ans +Ġoverd ue +Ġoverc rowd +Ġlegisl atures +Ġp aternal +ĠLeon ardo +Ġexp ressive +Ġdistract ions +Ġsil enced +tr ust +Ġb iking +Ġ5 60 +Ġpropri et +Ġimp osition +Ġcon glomer +Ġ= ================================================================ +ĠTe aching +ĠY ose +int ensive +T own +Ġtroll ing +ĠGr ac +ĠAS US +Y o +Ġspecial s +ĠNep h +ĠGod zilla +Dat abase +ĠHe gel +Ġ27 2 +19 76 +ĠGl oria +Ġdis emb +ĠInvestig ations +ĠB ane +ag ements +St range +Ġtre asury +ĠPl ays +Ġundes irable +Ġwid ening +Ġverb ally +Ġinf ancy +Ġcut ter +f ml +Ġ21 00 +prot otype +f ine +Ġdec riminal +Ġdysfunction al +Ġbes ie +ĠErn st +z eb +Ġnort heastern +Ġa ust +por ate +ĠMar lins +Ġsegreg ated +ew orld +ĠMa her +Ġtra verse +Ġmon astery +ur gy +G ear +s and +Com pl +ĠE MP +Ġpl ent +ĠMer cer +Ġ27 6 +TA BLE +Config uration +H undreds +Ġpr ic +Ġcollabor ating +ĠPar amount +ĠCumm ings +Ġ( < +Ġrecord er +Ġfl ats +Ġ4 16 +wh ose +Font Size +ĠOr bit +Y R +Ġwr ists +Ġb akery +) } +ĠB ounty +ĠLanc aster +Ġend ings +acc ording +ĠSal am +e asy +75 5 +ĠBur r +ĠBarn ett +onom ous +Un ion +Ġpreced ence +ĠScholars hip +ĠU X +Ġroll out +Ġbo on +al m +ĠCan ter +æ µ +Ġround ing +Ġcl ad +Ġv ap +ĠF eatured +is ations +Ġ5 40 +pol ice +Ġunsett ling +Ġdr ifting +ĠLum ia +ĠObama Care +ĠF avor +Hy per +ĠRoth schild +ĠMil iband +an aly +ĠJul iet +H u +Ġrec alling +a head +69 6 +Ġunf avorable +Ġd ances +O x +Ġleg ality +Ġ40 3 +rom ancer +Ġinqu ire +ĠM oves +\ "> +ĠVari ant +ĠMess iah +ĠL CS +ĠBah á +75 6 +Ġeyeb row +Ġ ¥ +ĠMc F +ĠFort y +M as +Ġpan icked +Ġtransform ations +q q +Ġrev olves +ring e +ĠA i +ax e +Ġon ward +ĠC FR +ĠB are +log in +Ġliqu ids +Ġde comp +second ary +il an +ĠCon vert +ami ya +Ġprosecut ing +Ġâī ¡ +ĠYork ers +ĠByr ne +sl ow +aw ei +J ean +Ġ26 9 +ĠSky dragon +Ġ é +ĠNicarag ua +ĠHuck abee +ĠHigh ly +Ġamph ib +ĠPast or +ĠL ets +Ġbl urred +Ġvisc eral +ĠC BO +Ġcollabor ated +z ig +Leg al +Ġapart heid +Ġbr id +Ġpres et +ĠD ET +ĠAM A +× Ķ +arch ing +auc uses +build er +Ġpo etic +Ġem ulator +ĠMole cular +Ġhon oring +ise um +Ġtract or +ĠCl uster +ĠCal m +ared evil +Ġsidew alks +Ġviol in +Ġgeneral ized +ĠAle c +Ġemb argo +Ġfast ball +ĠHT TPS +ĠL ack +ĠCh ill +ri ver +C hel +ĠSw arm +ĠLev ine +ro ying +L aunch +Ġkick er +Ġadd itive +ĠDe als +W idget +cont aining +Ġescal ate +ĠOP EN +Ġtwe aked +Ġst ash +Ġsp arks +ĠEs sex +ĠE cc +Ġconv ict +Ġblog ging +I ER +ĠH L +Ġmurd erers +75 9 +ĠH ib +Ġde pl +ĠJ ord +S ac +Ġdis sect +ĠHow e +os her +Ġcustom izable +ĠFran z +Ġat ro +Ä ĩ +Ġ000 4 +Ġout post +R oss +Ġglyph osate +ĠHast ings +ĠBE FORE +Ġsh ove +o pped +ĠSc ala +Ġam ulet +an ian +Ġexacerb ated +Ġe ater +47 1 +UM E +Ġpul p +izont al +ĠZ am +ĠAT I +imm une +aby tes +Ġunnecess arily +ĠC AT +ĠAx is +Ġvisual ize +à ī +ĠRad ical +f m +Doc uments +ĠFor rest +Ġcontext ual +ĠSy mbol +Ġtent ative +ĠDO ES +ĠGood s +Ġintermitt ent +} : +medi ated +Ġridic ule +Ġathe ism +Ġpath ogens +ĠM um +Ġre introdu +Ġ30 7 +i HUD +Ġflash light +Ġsw earing +Ġp engu +B u +Ġrot ated +ĠCr ane +Ġ() ); +Ġfashion able +Ġendors ing +46 3 +) [ +Ġingest ion +Ġcook s +Ġ9 50 +ot omy +ĠIm am +Ġk a +Ġte aser +ĠGhost s +ĠãĤ µ +19 69 +Ï ĥ +ub by +Ġconver ter +zan ne +end e +ĠPre par +ĠNic kel +ĠChim era +h im +ĠTyr ann +ĠSabb ath +ĠNich ols +Ġra pt +ih ar +Ġshe lling +Ġillum inate +Ġdent ist +ut or +ĠInteg ration +Ġwh ims +ĠLiter ary +Be aut +Ġp archment +ag ara +Br and +Ġder og +âĢ¦ ) +ĠNor se +Ġunw itting +Ġc uc +Ġborder line +Ġupset ting +Ġrec ourse +Ġd raped +ĠRad ar +Ġcold er +ĠPep si +im inary +], [ +65 8 +V i +ĠF rem +ĠP es +Ġveter inary +ĠT ED +ĠEp idem +n ova +k id +Ġdev out +o ct +j ad +M oh +ĠP AY +Ġge ometric +Ġ3 23 +Ġcircum ference +ich ick +19 75 +ĠY uri +ĠSh all +ĠH over +un in +S pr +Ġg raft +ĠHapp iness +Ġdisadvant ages +att acks +Ġhub s +ĠStar Craft +é ĸ +Ġgall eries +ĠKor ra +Ġgrocer ies +ĠGors uch +Ġrap ists +Ġfun gi +ĠTyph oon +V ector +ĠEm press +b attle +4 68 +Ġparas ite +ĠBom ber +S G +ex ist +ĠP f +Ġun se +Ġsurge ons +B irth +ĠUn sure +ĠPrint ed +ĠBehavior al +ĠA ster +Pak istan +Ġun ethical +Ġs v +ĠIo T +Ġlay outs +P ain +Ġconst ants +ĠL W +ĠB ake +Ġtow els +Ġdeterior ation +ĠBol ivia +Ġblind ed +ĠW arden +ĠMist ress +Ġon stage +Ġcl ans +ĠB EST +19 60 +Ġant ique +Ġrhet orical +ĠPer cy +ĠRw anda +, . +B ruce +Ġtra umat +ĠParliament ary +Ġfoot note +id ia +ĠLear ned +se eking +gen ic +Ġdim ensional +H ide +èĢ ħ +Ġintrig ue +in se +Ġle ases +Ġapp rentices +w ashing +Ġ19 26 +V ILLE +Ġsw oop +s cl +Ġbed rooms +on ics +ĠCr unch +comp atible +Ġincap ac +ĠYemen i +ash tra +z hou +d anger +Ġmanifest ations +ĠDem ons +AA F +Secret ary +ACT ED +L OD +Ġam y +ra per +eth nic +4 17 +Ġpos itives +Ġ27 3 +ĠRefuge es +Ġus b +ĠV ald +odd y +ĠMahm oud +As ia +Ġskull s +ĠEx odus +ĠComp et +ĠL IC +ĠM ansion +ĠA me +Ġconsolid ate +storm s +ont ent +99 6 +Ġcl en +Ġm ummy +fl at +75 8 +ĠV OL +oter ic +n en +ĠMin ute +S ov +Ġfin er +R h +ly cer +Ġreinforce ments +ĠJohann es +ĠGall agher +Ġgym n +S uddenly +Ġext ortion +k r +i ator +T a +Ġhippocamp us +N PR +ĠComput ing +Ġsquare ly +Ġmod elling +ĠFor ums +ĠL isp +ĠKrish na +Ġ3 24 +Ġr ushes +Ġens ued +Ġcre eping +on te +n ai +il ater +ĠHorn ets +Ġob livious +IN ST +55 9 +Ġjeopard y +Ġdistingu ishing +j ured +Ġbeg s +sim ilar +ph ot +5 30 +ĠPark way +Ġs inks +ĠHearth stone +ib ur +ĠBat on +Av oid +Ġd ancer +Ġmag istrate +ary n +Ġdisturb ances +ĠRom ero +Ġpar aph +Ġmis chief +âĸ ĵ +ĠSh aria +Ġur inary +r oute +iv as +f itted +Ġeject ed +ĠAl buquerque +Ġ4 70 +Ġirrit ated +ĠZ ip +ĠB iol +à į +Ġden ounce +Ġbin aries +ĠVer se +Ġopp os +ĠKend rick +ĠG PL +Ġsp ew +ĠEl ijah +ĠE as +Ġdr ifted +so far +Ġannoy ance +ĠB ET +47 4 +ĠSt rongh +it ates +ĠCogn itive +oph one +ĠIdent ification +ocr ine +connect ion +Ġbox er +ĠAS D +ĠAre as +Y ang +t ch +ull ah +Ġdece ive +Comb at +ep isode +cre te +W itness +Ġcondol ences +ht ar +Ġhe als +Ġbuck ets +ĠLA W +B lu +Ġsl ab +ĠOR DER +oc l +att on +ĠSteven son +ĠG inger +ĠFriend ly +ĠVander bilt +sp irit +ig l +ĠReg arding +ĠPR OG +Ġse aling +start ing +Ġcard inal +ĠV ec +ĠBe ir +Ġmillisec onds +we ak +per se +Ġster ile +ĠCont emporary +ĠPh ant +ĠCl o +Ġout p +Ġex iled +Ġ27 7 +Ġself ie +Ġman ic +Ġn ano +ter ms +Alex ander +Ġres olves +Ġmillenn ia +Ġexpl odes +Ġconst ellation +Ġadul tery +m otion +D OC +Ġbroad casters +Ġkinderg arten +ĠMay weather +ĠE co +ich o +Ġ28 7 +l aun +Ġm ute +Ġdisc reet +Ġpres chool +Ġpre empt +De lete +ĠFre ed +P i +H K +Ġblock er +ĠC umber +Ġw rought +d ating +Ġins urer +Ġquot as +Ġpre ached +Ġev iction +ĠReg ina +ĠP ens +Ġsevent een +ĠN ass +D ick +Ġfold s +Ġd otted +ĠA ad +Un iversal +Ġp izz +ĠG uru +Ġso ils +Ġno vice +ĠNe ander +Ġst ool +Ġdeton ated +ĠPik achu +ĠMass ive +IV ER +ĠAb del +Ġsubdu ed +Ġtall est +Ġprec arious +Ġa y +r ification +ĠOb j +c ale +Ġun question +cul osis +ad as +igr ated +D ays +Ġque ens +ĠGaz ette +ĠCol our +ĠBow man +ĠJ J +ï ve +Ġdomin ates +Stud ent +Ġm u +Ġback log +ĠElect ro +Tr uth +48 3 +Ġcond ensed +r ules +ĠCons piracy +Ġacron ym +hand led +ĠMat te +j ri +ĠImp ossible +l ude +cre ation +Ġwar med +ĠSl ave +Ġmis led +Ġfer ment +ĠK ah +ink i +ke leton +cy l +ĠKar in +Hun ter +Reg ister +ĠSur rey +Ġst ares +ĠW idth +ĠN ay +ĠSk i +Ġblack list +uck et +Ġexp ulsion +im et +Ġret weet +vant age +Fe ature +Ġtro opers +Ġhom ers +9 69 +Ġconting ency +ĠW TC +ĠBrew er +fore ign +W are +S olar +Ġund ue +RE C +ulner able +path ic +ĠBo ise +Ġ3 22 +Ġarous ed +ĠY ing +ä¸ į +uel ess +Ġp as +Ġmor p +Ġfl oral +Ex press +ud ging +k B +ĠGr anted +Ø ¯ +ĠMich a +ĠGoth ic +ĠSPEC IAL +ĠRic ardo +F ran +Ġadminister ing +6 20 +por a +Ġ ® +Ġcomprom ises +Ġb itten +Ac cept +Th irty +Ð ² +Ġmater ially +ĠTer r +ig matic +ch ains +Ġdo ve +stad t +Mar vel +FA ULT +Ġwind shield +Ġ3 36 +ad ier +Ġsw apping +Ġflaw less +ĠPred ator +ĠMiche le +Ġprop ulsion +ĠPsych ic +Ġassign ing +Ġfabric ation +Ġbar ley +l ust +Ġtow ering +Ġalter cation +ĠBent ley +Sp here +Ġtun a +ĠClass es +Fre edom +un er +L ady +v oice +Ġcool est +or r +Ġpal p +$ { +Ġhyster ia +ĠMet atron +p ants +Ġspawn ing +Exper ts +ĠInvest ors +ĠAn archy +Ġshr unk +ĠVict im +Ġ28 9 +Ġec stasy +ĠB inding +58 5 +ĠMel ody +57 8 +ot ally +ĠE tsy +lig a +Ġapplaud ed +Ġswe ating +Ġredist ributed +Ġpop corn +Ġsem inal +f ur +ĠNeuro science +R and +ĠO st +ĠMadd en +ĠIncre asing +ĠDaw kins +ĠSub way +Ġar sen +cons erv +B UR +Ġsp iked +ĠLy ft +ĠImper ium +ĠDrop box +Ġfav oured +Ġencomp asses +gh ost +Ġins pires +Ġbur geoning +ĠY oshi +ĠVert ical +ĠAud itor +Ġint ending +Ġfilib uster +Bl oom +f ac +ĠCav s +ign ing +Ġcowork ers +ĠBarb arian +rem ember +FL AG +Ġaudit ory +ason ry +Col lege +Ġmut ed +gem ony +ob in +ĠPsych o +9 68 +Ġlav ish +Ġhierarch ical +ĠDr one +ou k +Ġcripp led +ĠMax im +Sl ot +Ġqu iz +ĠV id +if ling +Ġarchae ologists +Ġabandon ment +d ial +le on +ĠF as +T ed +Ġr aspberry +Ġmaneu vers +Ġbehavi ours +Ġins ure +Ġrem od +Sw itch +h oe +Ġsp aced +Ġafford ability +ĠF ern +not ation +ĠBal anced +Ġoccup ies +en vironment +Ġneck lace +Ġsed an +F U +ĠBrav o +Ġab users +ĠAn ita +met adata +ĠG ithub +ait o +ĠF aster +ĠWass erman +ĠF lesh +Ġth orn +r arily +ĠMer ry +w ine +Ġpopul ace +ĠL ann +Ġrepair ing +Ġpsy che +Ġmod ulation +aw aru +âĢĭ âĢĭ +ari j +Ġdecor ations +Ġapolog ise +ĠG arg +app ly +Ġgive away +ĠFl an +ĠWy att +U ber +Ġauthor ised +ĠMor al +HAHA HAHA +activ ate +Ġtorped o +ĠF AR +Ġam assed +ĠA ram +ark in +ĠVict ims +st ab +Ġo m +ĠE CO +Ġopio ids +Ġpurpose ly +ĠV est +Ġer g +at an +ĠSur gery +Ġcorrect ing +ĠOrt iz +ĠBe et +Ġrev oke +Ġfre eway +ĠH iggins +F ail +ĠFar ms +ĠAT P +h ound +Ġp oking +ĠCommun ists +mon ster +iment ary +Ġunlock ing +Ġunf it +we ed +en ario +at ical +ĠEnlight enment +ĠN G +ĠComp ensation +de en +ĠWid ow +ĠCind y +ĠAfter wards +Ġ6 000 +ikh ail +ag ically +Ġrat ified +Ġcasual ty +H OME +p sey +f ee +Ġspark ling +Ġd é +Ġconcert ed +C atal +Ġcomp lying +ĠA res +ĠD ent +Sh ut +Ġsk im +ad minist +Ġhost ilities +ĠG ins +Ġ6 08 +Ġm uddy +ĠMc Int +ĠDec ay +5 25 +Ġconspic uous +ĠEx posure +Ġresc ind +Ġwear able +Ġ3 28 +our met +ah s +ĠRob ots +Ġe clips +inst ance +ĠRE PORT +ĠApp l +0 30 +ĠSk ies +01 00 +Ġfall acy +S ocket +ĠRece iver +Ġsol ves +ĠButter fly +ĠSho pping +ĠFI RE +65 4 +Med ic +Ġsing ers +ĠNeed less +'' '' +isher s +ĠD ive +58 8 +Ġselect ively +Ġcl umsy +88 9 +Ġpurch aser +ear ned +ard y +Ġbenef iting +eng lish +Ġyield ing +ĠP our +Ġspin ach +Ġdel ve +ĠC rom +6 10 +Ġexport ing +ĠMA KE +Ġ26 3 +Ġg rop +Ġenv oy +ĠInqu iry +ĠLu igi +d ry +ĠT uring +Thumbnail Image +ĠVar iety +Ġfac et +Ġfl uffy +Ġexcerpt s +Ġsh orth +ĠOl sen +CL UD +Ġrel iant +ĠUN C +T our +Ġbat hing +Comp any +Ġglobal ization +P red +ĠMalf oy +Ġh oc +j am +craft ed +ĠBond s +ĠKiss inger +Eng land +Ġorder ly +cat entry +Ġ26 1 +Ġexch anging +ĠInt ent +ĠAmend ments +D OM +Ġst out +³³³³³³³³ ³³³³³³³³ +ĠAir bus +Ġ27 8 +hy de +P oll +Item ThumbnailImage +Ġlooph oles +ĠPill ar +Ġexpl or +St retch +A part +Ġun married +Lim it +ĠTransform ers +Ġintellect ually +unct ure +18 00 +Ġd arn +B razil +Ġleft over +ber us +f red +Mine craft +3 26 +ĠForm s +Ġproof s +ĠDes igned +Ġindex es +ĠSupp ose +EM S +ĠL oving +ĠBon nie +im ating +OT US +Ġconduct or +Ġbehav ed +ĠF ren +Ġsy nerg +Ġmillenn ium +Ġcater ing +ĠL auder +W r +ĠY iannopoulos +ĠAT F +Ġensl aved +Ġawaken ed +D VD +ĠED ITION +ĠConc ert +ĠChall enger +ĠH aku +umer ic +Ġdep recated +ĠSH AR +4 12 +Ġdy stop +Ġtremb ling +Ġdread ed +ĠSp ac +p adding +Re pl +ĠG arrison +M ini +Ġun paralleled +am ar +URR ENT +w reck +c ertain +t al +ĠC LS +app ings +Ġsens ed +Ġf encing +ĠPas o +ĠDes k +Ġsc off +Ġcontem plate +ĠL iga +l iquid +75 7 +Ġapp rentice +ĠUCH IJ +5 70 +ĠTh ousand +ĠIll um +Ġchampion ed +ãĤ Į +Ġelect ors +Ġ3 98 +ĠH ancock +round ed +ĠJ OHN +Ġuns atisf +Ġqual ifier +ĠGad get +EN E +Ġdead liest +ĠPl ants +Ġ ions +Ġacc ents +Ġtwe aking +Ġsh aved +F REE +ĠCh aser +Again st +9 60 +Ġmeth amphetamine +Ġnormal ized +Ġ$ \ +ĠPre cision +ĠGu am +Ġch oked +ĠX II +ĠCast ing +Tor rent +Ġscal p +ĠJagu ar +w it +Ġsem ic +ix ie +ĠG ould +Ġconf ines +N usra +ĠL on +ĠJ ugg +y cle +ĠCod ec +E gypt +Ġrest rain +ĠAl iens +Ġch oking +ĠD unk +ĠBell a +ab c +Ġsl ang +Ġneuro trans +s av +Ġempower ment +â ĨĴ +Ġclim bers +ĠM im +ĠF ra +ros se +Cap ital +ĠCth ulhu +Inter face +Ġprof icient +ĠIN TO +Ġ3 18 +ront al +5 80 +ĠDes pair +K enn +Ġscrim mage +ĠCo at +as ions +Ġwall paper +ĠJ ol +Ġresurg ence +Ġant iv +ĠB alls +² ¾ +Ġbuff ers +Ġsub system +ĠSt ellar +ĠL ung +A IDS +Ġerad icate +Ġblat antly +Ġbehav es +ĠN un +Ġant ics +ex port +DE V +w b +Ġph p +ĠInteg rity +Ġexplore r +Ġrev olving +auth ored +g ans +Ġbas k +Ġas ynchronous +å į +TH ING +69 8 +G ene +ĠR acer +ĠN ico +iss ued +Ġser mon +p ossibly +Ġsize of +Ġentrepreneur ial +ox in +ĠMin erva +Ġpl atoon +n os +ri ks +A UT +ĠAval anche +ĠDes c +ij 士 +ĠP oc +Ġconf erred +Î » +Ġpat ched +F BI +66 2 +Ġfract ures +Ġdetect s +Ġded icate +Ġconstitu ent +Ġcos mos +W T +Ġswe ats +Ġspr ung +b ara +s olid +Ġuns us +Ġbul ky +ĠPhilipp e +ĠFen rir +Ġtherap ists +ore al +^^ ^^ +Ġtotal ed +Ġboo ze +ĠR PC +Prosecut ors +Ġdis eng +ĠSh ared +Ġmotor cycles +Ġinvent ions +Ġlett uce +ĠMer ge +ĠJ C +Ġspiritual ity +ĠWAR NING +Ġunl ucky +ĠT ess +Ġtong ues +ĠD UI +T umblr +Ġle ans +Ġinv aders +Ġcan opy +ĠHur ricanes +ĠB ret +ĠAP PLIC +id ine +ick le +Reg arding +Ġve ggies +Ġe jac +ju ven +F ish +D EM +ĠD ino +Th row +ĠCheck ing +be ard +( & +Ġj ails +Ġh r +trans fer +iv ating +Ġfle ets +ĠIm ag +ĠMc Donnell +Ġsnipp et +Is a +ĠCh att +ĠSt ain +ĠSet FontSize +ĠO y +ĠMathemat ics +49 4 +Ġelectro ly +ĠG ott +ĠBr as +B OOK +ĠF inger +d ump +Ġmut ants +Ġrent als +Ġinter tw +Ġc reek +ail a +Bro ther +ĠDisc ord +pe e +raw ler +Ġcar p +Ġ27 9 +ãĤ· ãĥ£ +rel ations +Ġcontr asts +Col umn +Ġrec onnaissance +Ġun know +Ġl ooting +Ġregul ates +Ġopt imum +ĠChero kee +ĠA ry +Lat est +Ġroad side +Ġd anced +ĠUnic orn +A cknowled +Ġuncont roll +ĠM US +at io +ch ance +ha ven +VAL UE +Ġfavour ites +Ġceremon ial +b inary +pe ed +wood s +EM P +Ġv ascular +Ġcontempl ated +Ġbar ren +ĠL IST +Y ellow +ospons ors +Ġwhisk y +ĠM amm +ĠDeV os +min imum +H ung +44 2 +P ic +ĠSnap dragon +77 6 +Ġcar ving +Ġund ecided +Ġadvantage ous +Ġpal ms +ĠA Q +Ġst arch +L oop +Ġpadd le +Ġfl aming +ĠHor izons +An imation +bo ost +Ġprob abilities +ĠM ish +Ġex odus +ĠEditor ial +Ġfung us +Ġdissent ing +ĠDel icious +rog ram +ĠD yn +d isk +t om +Ġfab rics +ĠC ove +ĠB ans +Ġsoft en +ĠCON S +Ġin eligible +Ġestim ating +ĠLex ington +pract ice +of i +Ġshe dding +ĠN ope +Ġbreat hed +ĠCorinth ians +y ne +ek i +B ull +Ġatt aching +reens hots +Ġanaly se +ĠK appa +Ġuns ustainable +Ġinter pol +ank y +he mer +Ġprot agonists +Ġform atted +ĠBry ce +ĠAch illes +ĠAb edin +sh ock +Ġb um +b os +qu a +ĠW arn +q t +ĠDi abetes +8 64 +ĠIn visible +Ġvan ish +Ġtrans mitting +Ġmur ky +ĠFe i +Ġawa ited +ĠJur assic +umm ies +Ġmen acing +g all +C ath +B uilt +ild o +ĠV otes +Ġon t +Ġmun itions +ĠFre em +ÃŃ n +Ġdec ency +lo pp +ie ved +ĠG ord +Ġun thinkable +ĠNews week +Ġ3 21 +He at +Ġpresent er +ji ang +Ġpl ank +ĠAval on +Ġben z +ĠR out +Ġslam ming +ĠD ai +ou ter +ĠCook ie +ĠAlic ia +ge y +Ġvan ity +Ġow l +á µ +t ested +ĠAw akens +Ġcan v +Ġblind ly +ĠRid ley +ĠEm ails +Requ ires +ĠSer bian +ograp hed +if rame +eter ia +Ġaltern ating +qu iet +Ġsoc iology +ĠUn lock +ĠCommun ism +Ġo ps +Ġatt ribution +Ġab duction +ĠAb ram +Ġsidel ined +ĠB OOK +Ġref ining +ĠFe eling +ĠOs lo +ĠPru itt +r ack +ang ible +Ġcaut iously +ĠM ARK +eed s +M ouse +ĠStep h +ĠP air +S ab +99 7 +ĠBa al +B ec +Ġcomm a +ĠP all +ĠG ael +Ġmisunder stand +ĠP esh +Order able +Ġdis mal +ĠSh iny +% " +Ġreal istically +Ġpat io +ĠG w +ĠVirt ue +Ġexhaust ing +wh atever +oph ys +y ip +4 18 +Ad just +ĠWa iting +ess on +ĠMaz da +ĠDo zens +Ġstream lined +Ġincompet ence +ĠM eth +Ġeth os +ON ES +Ġincent iv +Ġgr itty +ĠBut cher +Head er +Ġexp onential +à Ł +Ġcorrel ate +Ġcons ensual +s ounding +R ing +Orig in +Ġcon clusive +fe et +ac ly +ĠF ernandez +Buy able +Ġd ucks +aunt lets +Ġel ong +Ġ28 6 +Ġsim ul +G as +ĠK irst +Ġprot r +ĠRob o +ĠAo E +op ol +Ġpsych ologically +sp in +ilater ally +ĠCon rad +W ave +44 1 +ĠAd vertisement +ĠHarm on +ĠOri ental +is Special +Ġpresum ptive +Ġw il +ĠK ier +ne a +Ġp pm +Ġhar bour +ĠW ired +comp any +Ġcor oner +atur days +ĠP roud +ĠN EXT +ĠFl ake +val ued +ce iver +Ġfra ught +Ġc asing +Ġrun away +Ġg in +ĠLaure nt +ĠHar lem +ĠCur iosity +qu ished +Ġneuro science +ĠH ulu +Ġborrow er +Ġpetition er +ĠCo oldown +W ARD +Ġinv oking +conf idence +For ward +Ġst s +pop ulation +Delivery Date +Fil m +ĠC ov +quick Ship +quickShip Available +prim ary +isSpecial Orderable +inventory Quantity +channel Availability +BO X +ĠMulti player +ĠJen ner +77 8 +ĠM d +Ġ~ /. +M N +Ġchild ish +Ġantioxid ant +ĠChrom ebook +Ġ27 4 +Ġscreen play +Ġadvent urous +ĠRelations hip +respons ive +ming ton +Ġcorner stone +ĠF ey +F IR +Ġrook ies +ĠF eaturing +Ġorig inate +Ġelectro des +ant es +Ġscript ures +Ġgl ued +Ġdiscont ent +Ġaff licted +lay out +B rave +Ġm osa +ĠQuant ity +ĠH ik +w inner +H ours +Ġent ail +ĠCell s +olog ue +Ġv il +Ġpre acher +Ġdecor ative +d ifferent +Ġprejud ices +ĠSm oking +ĠNotting ham +so Type +Ġrhyth ms +ĠAl ph +bl ast +Ste el +ĠDaniel le +Ġstr ife +Ġrem atch +so DeliveryDate +ĠF ork +t rip +ol ulu +hes es +C G +ĠPOLIT ICO +ost a +ĠDr ift +é¾įå ¥ +é¾įå¥ ij士 +Ġvet ting +ĠJin ping +ĠRec ession +Min or +ĠF raud +enf ranch +Ġconven ed +ĠNA ACP +ĠMill ions +ĠFarm ing +ĠW oo +ĠFl are +rit o +imm igrant +Ġvac ancy +ĠHE AD +ĠV aj +eg al +ĠV igil +Stud y +Ġru ining +Ġr acks +Ġhe ater +ĠRand olph +ĠBr ush +ĠT ir +Ø ¨ +Ġc ov +% ] +Ġrecount s +ĠO PT +ĠM elt +Ġtr uce +Ġcas inos +Ġcrus ade +Ġcarn age +Ġstri pe +ĠK yl +Text ures +Ġ6 98 +Ġpro clamation +Ġgood ies +Ġ........ .. +pro claimed +P olit +Ġtop ical +Ġspecial ize +ĠA min +g m +Ġanch ored +Ġbear ings +s ample +ĠHigh land +ĠAut ism +Ġmerc enary +Ġinterview er +L ER +ĠSom ers +Ġembry o +ĠAss y +Ġ28 1 +ĠEd iting +ĠCh osen +6 60 +Ġp ci +ĠThunder bolt +BI LL +Ġchuck led +jri wal +h of +Ġearth ly +() { +ind ependence +Ġdisp ers +ĠV endor +ĠG areth +Ġp als +P enn +ĠSub mit +ic um +Th u +Ġcl andestine +Ġcann ibal +ĠCl erk +E Stream +gal itarian +âĻ ¥ +g ew +Ġhor rend +ĠL ov +ĠRe action +ocr in +Class ic +Ġecho ing +Ġdiscl osing +ĠIns ight +og un +ĠInc arn +upload s +pp erc +guy en +Ġ19 01 +ĠB ars +68 7 +Ġb ribes +ĠFres no +ur at +ĠRe ese +Ġintr usive +Ġgri pping +ĠBlue print +ĠR asm +un ia +man aged +ĠHeb do +Ġ3 45 +Ġdec oding +Ġpo ets +Ġj aws +ĠF IGHT +am eless +ĠMead ows +ĠHar baugh +Inter view +ĠH osp +ĠB RA +Ġdelet ion +m ob +W alker +ĠMoon light +ĠJ ed +ĠSoph ia +Ġus ur +Ġfortun ately +ĠPut ting +ĠF old +Ġsan itation +Ġpart isans +IS ON +B ow +ĠCON C +ĠRed uced +ĠS utton +Ġtouch screen +Ġembry os +âĢ¢âĢ¢ âĢ¢âĢ¢ +ĠK rug +com bat +ĠPet roleum +Ġam d +ĠCos mos +Ġpresc ribing +Ġconform ity +ours es +Ġplent iful +Ġdis illusion +ĠEc ology +itt al +Ġf anc +Ġassass inated +regn ancy +Ġperenn ial +ĠBul lets +Ġst ale +Ġc ached +ĠJud ith +ĠDise ases +All en +Ġl as +Ġsh ards +ĠSu arez +ĠFriend ship +inter face +ĠSupp orters +add ons +46 2 +ĠIm ran +ĠW im +Ġnew found +ĠM b +An imal +Ġd arling +and e +Ġrh y +ĠTw isted +pos al +yn ski +Var ious +× ľ +ĠK iw +uy omi +Ġwell being +ĠL au +an os +Ġunm ist +Ġmac OS +Ġrest room +ĠOl iv +ĠAir ways +Ġtimet able +9 80 +Ġrad ios +v oy +ias co +Ġcloud y +ĠDraw ing +Any thing +Sy ria +ĠH ert +st aking +Ġun checked +Ġb razen +ĠN RS +69 7 +onom ic +est ablish +Ġl eng +Ġdi agonal +ĠF ior +L air +ĠSt ard +Ġdef icient +jo ining +be am +Ġomn ip +Ġbl ender +Ġsun rise +Mo ore +ĠF ault +ĠCost ume +ĠM ub +Fl ags +an se +Ġpay out +ĠGovern ors +ĠD illon +ĠBan ana +N ar +Ġtra iled +Ġimperial ist +um ann +ats uki +4 35 +ĠRoad s +Ġsl ur +ĠIde ally +Ġt renches +C trl +Ġmir rored +ĠZ el +ĠC rest +Comp at +ĠRoll s +sc rib +ĠTra ils +omet ers +w inter +Ġimm ortality +il ated +Ġcontrad icts +un iversal +ill ions +ĠM ama +opt im +AT URE +Ġge o +et ter +ĠCar lo +4 24 +Ġcanon ical +ĠStrongh old +n ear +Ġperf ume +Ġorche stra +od iac +Ġup he +Ġreign ing +vers ive +Ġc aucuses +ĠD EM +Ġinsult ed +Ġ---- -- +ĠCr ush +Ġroot ing +ĠWra ith +Ġwh ore +Ġto fu +C md +ĠB ree +Ġ$ _ +Ġr ive +ĠAd vertising +Ġw att +ĠH O +Ġpersu asive +ĠParam eters +Ġobserv ational +ĠN CT +ĠMo j +ĠSal on +Ġtr unc +Ġexqu isite +ĠMar a +Ġpo op +ĠAN N +Ex c +ĠWonder ful +ĠT aco +Ġhome owner +ĠSmith sonian +orpor ated +mm mm +Ġlo af +ĠYam ato +ĠInd o +Ġcl inging +á s +Ġimm utable +h ub +Or ange +Ġfingert ips +ĠWood en +ĠK idd +ĠJ PM +ĠDam n +C ow +c odes +48 2 +Ġiniti ating +ĠEl k +ĠCut ting +Ġabsent ee +ĠV ance +ĠLil ith +G UI +Ġobsc ured +Ġdwar ves +ĠCh op +ĠB oko +Val ues +Ġmult imedia +Ġbrew ed +Reg ular +CRIP TION +ĠMort al +Ġa pex +Ġtravel er +Ġbo ils +Ġspray ing +Rep resent +ĠStars hip +4 28 +Ġdisappro val +Ġshadow y +Ġlament ed +ĠRe place +ĠFran ç +67 7 +d or +Ġunst oppable +Ġcoh orts +gy n +ĠClass ics +ĠAm ph +Ġsl uggish +ĠAdd iction +ĠPad res +Ġins cription +Ġin human +min us +ĠJere miah +at ars +Ter ror +ĠT os +ĠSh arma +ast a +c atch +Ġpl umbing +ĠTim bers +Sh ar +H al +ĠO sc +Ġcou pling +hum ans +Ġsp onge +Ġid ols +ĠSp a +ĠAdv ocate +ĠBe ats +lu a +Ġtick ing +Ġload er +ĠG ron +8 10 +Ġstim ulated +Ġside bar +ĠManufact urer +ore And +19 73 +Ġpra ises +ĠFl ores +dis able +ĠElect rical +ra ise +E th +Ġmigr ated +Ġlect urer +K ids +ĠCa vern +Ġk ettle +Ġgly c +ĠMand ela +ĠF ully +å§ « +FIN EST +Ġsquee zing +ĠRy der +amp oo +oreAnd Online +Inst oreAndOnline +Buyable InstoreAndOnline +Ġcommem orate +ĠRamp age +Aust in +ĠSh roud +ĠRu ins +9 15 +ĠK H +Ġwater front +ĠE SC +b aby +ĠC out +ĠEm blem +Ġequival ents +49 2 +Un ique +ĠNiet zsche +brow ser +Ġim itation +ĠWere wolf +ĠKir in +ac as +' ," +Ġà ¾ +Review ed +Ġc unt +Ġvo ic +ĠLen ovo +Ġbond ed +48 1 +Ġinhib itors +Ġendeav ors +ĠHav ana +ĠSt out +ĠJ olly +A ctor +*/ ( +Ġoccur rences +ĠT ens +Incre ased +ĠACT ION +Ġ ãĢĮ +ĠRank ings +ĠB reat +Ġ30 9 +D ou +Ġimpact ing +ĠDuc hess +pre fix +Q B +Ġsummon ing +Ġbest owed +ĠKe pler +ĠPOW ER +c ube +ĠK its +ĠG rip +Ġop ium +Ġrep utable +t oc +ich ael +ĠR ipple +Ġcaf é +ĠZ oom +ĠBur ma +Ġwa ive +Ġst alls +Ġdem eanor +inc erity +Ġfluor ide +ĠSH OULD +Par is +Ġlong ing +Ġpl at +Ġgross ly +Ġbull s +Ġshowc asing +ex pected +ĠG addafi +engine ering +Re peat +ĠK ut +Ġconce ivable +Ġtrim med +osc ope +ĠCand idate +ĠT ears +rol og +Lew is +S UP +Ġroad map +Ġsal iva +Ġtrump et +Jim my +Ġmirac ulous +Ġcolon ization +Ġam put +ĠGN OME +ate ch +D ifferent +ĠE LE +ĠGovern ments +ĠA head +ãħĭ ãħĭ +word press +L IB +ĠIn clude +ĠDor othy +0 45 +ĠColomb ian +Ġle ased +88 4 +Ġde grading +ĠDa isy +i ations +Ġbapt ized +Ġsurn ame +co x +Ġblink ed +ãĥ ¢ +Ġpoll en +Ġder mat +Ġre gex +ĠNich olson +ĠE ater +ç ľ +rad or +Ġnarrow er +Ġhur ricanes +Ġhalluc inations +r idden +ISS ION +ĠFire fly +Ġattain ment +Ġnom inate +Ġav ocado +ĠM eredith +Ġt s +Ġreve rence +Ġe uph +Ġcr ates +ĠT EXT +Ġ4 43 +Ġ3 19 +J SON +iqu ette +Ġshort stop +ic key +Ġpro pelled +Ġap i +ĠTh ieves +77 9 +Ġovers aw +Ġcol i +ĠNic ola +Ġover cl +ik awa +ĠC yr +Ġ38 4 +78 9 +ĠAll ows +10 27 +Det roit +TR Y +set up +ĠSocial ism +Sov iet +s usp +ĠAP R +ĠShut down +Ġal uminium +zb ek +ĠL over +GGGG GGGG +Ġdemocr acies +Ġ19 08 +ĠMer rill +ĠFranco is +gd ala +Ġtraff ickers +ĠT il +ĠGo at +Ġsp ed +ĠRes erv +Ġpro d +55 2 +Ġc ac +ĠUn iv +ĠSch we +Ġsw irling +ĠWild erness +ĠEgg s +Ġsadd ened +Ġarch aic +H yd +Ġexcess ively +B RE +Ġaer ospace +ĠVo ices +Cra ig +Ġign ited +In itially +ĠMc A +Ġhand set +Ġreform ing +Ġfrust rations +ĠDead pool +ĠBel ichick +ract or +ĠRagnar ok +ĠD rupal +ĠApp roximately +19 20 +ĠHub ble +arm or +ĠSar as +ĠJon as +Ġnostalg ic +Ġfeas ibility +Sah aran +Ġorb iting +Ġ9 70 +R u +Ġsh in +ĠInvestig ators +Ġinconsist encies +ĠP AN +B G +Ġgraz ing +Ġdetect ors +ĠStart up +ĠFun ny +ĠNa omi +Consider ing +Ġh og +ut f +ce mic +Ġfort ified +ĠFun ctions +Ġcod ec +nut rition +H at +" ! +micro soft +55 8 +ĠTh in +ĠA CE +Al ias +ĠO PS +p apers +P K +ãĢ İ +Ġimpro bable +N orthern +equ al +Ġlook out +Ġty res +ĠMod ified +ĠK op +Abs olutely +Ġbuild up +sil ver +Ġaud i +Ġgro tesque +ĠSab er +ĠPres byter +ON Y +Ġglac iers +ĠSho als +ĠK ass +ĠH RC +ĠNic ol +ĠL unch +ĠF oss +âĸ Ĵ +AD RA +ĠOne Plus +o ing +ground s +Ġincident al +Ġdatas ets +68 9 +ĠClarks on +Ġassemb ling +ĠCorrect ions +Ġdrink ers +Ġqual ifiers +Ġle ash +Ġunf ounded +ĠH undred +Ġkick off +T i +Ġrecon cil +ĠGr ants +ĠCompl iance +ĠDexter ity +Ġ19 06 +w arn +D allas +Max imum +n ard +av ia +be aut +ens itivity +tr ace +Ġpione ers +ĠF ract +ãĢ ı +Ġpre cept +Ġgloss y +ĠI EEE +Ac ross +Ġ6 80 +S leep +che on +Ġsatir ical +ĠMin otaur +ĠCla ude +Ġr é +ape go +Ġcar rot +ĠSem in +ino a +Ġz o +Ind ependent +Ġdiagn oses +ĠC ue +M AR +Ġrend ition +ĠK ik +Ġpath ology +Ġselect s +Link edIn +Ġass ay +ĠD res +Ġtext ual +post ed +IT AL +ĠM aul +N eal +Ġinter connected +Ġerr atic +ĠVir us +Ġ5 30 +Ġenvironmental ists +ĠP helps +Ġeng agements +ĠIN ST +Ġeconom ical +nox ious +Ġg earing +izz y +Ġfavor ably +ĠMcG ill +T erm +Ġh anged +Ġball park +ĠRe yes +Ġbe ware +ĠP sal +ĠMass acre +q i +Ġin accessible +acly sm +Ġfr ay +ill ac +Ġbitter ly +ĠCert ification +Mich igan +Ġir respective +al ore +Em pty +Ġendorse ments +Ġund et +f g +equ ipped +Ġmerc iless +ĠC ust +Ġimm ature +Ġvou cher +ĠBlack well +Ñ ı +h awk +dis ciplinary +ile e +ĠMak oto +ĠD ude +ãĥĩ ãĤ£ +Y ears +Ġin ver +Ġsh aman +ĠY ong +ip el +ell en +ĠCath y +br ids +Ġs arc +65 1 +N ear +Ġground work +Ġam az +Ġ4 15 +ĠHunting ton +hew s +ĠB ung +Ġarbit rarily +ĠW it +ĠAl berto +Ġdis qualified +best os +46 1 +Ġp c +Ġ28 4 +ro bat +Rob in +Ġh ugs +ĠTrans ition +ĠOcc asionally +Ġ3 26 +ĠWh ilst +ĠLe y +Ġspaces hip +cs v +Ġun successfully +ĠA u +le ck +ĠWing ed +ĠGrizz lies +. � +Ġne arer +ĠSorce ress +ĠInd igo +El se +8 40 +let es +Co ach +Ġup bringing +ĠK es +Ġseparat ist +Ġrac ists +Ġch ained +Ġabst inence +lear ning +Ġrein stated +Ġsymm etry +Ġremind ers +ĠChe vy +Ġm ont +Ġexempl ary +ĠT OR +Z X +Ġqual itative +ĠSt amp +ĠSav annah +ĠRoss i +Ġp aed +Ġdispens aries +ĠWall s +ĠCh ronic +Ġcompliment ary +ĠBeir ut +Ġ+ --- +igs list +Ġcrypt ographic +mas ters +ĠCap itals +Ġmax imal +Ġent ropy +Point s +Ġcombat ants +l ip +ĠGl ob +ĠB MC +ph ase +th ank +HT TP +Ġcomm uter +Ġ\( \ +.. / +ĠReg ener +ĠDO I +ĠActiv ision +Ġsl it +os al +RE M +Ġch ants +Y u +Ke ys +Bre xit +ĠFor ced +Ari zona +Ġsquad ron +IS O +ĠMal one +Ġ3 38 +Ġcontrast ing +Ġt idal +Ġlib el +Ġimpl anted +Ġupro ar +ĠC ater +Ġpropos itions +M anchester +ĠEuro s +it amin +G il +ĠEl ven +ĠSe ek +ĠB ai +Ġredevelop ment +ĠTown s +ĠL ub +! ", +al on +K rist +Ġmeas urable +Ġimagin able +Ġapost les +Y N +7 60 +Ġster oid +Ġspecific ity +ĠL ocated +ĠBeck er +ĠE du +ĠDiet ary +uts ch +ĠMar ilyn +Ġbl ister +ĠM EP +ĠK oz +ĠC MS +y ahoo +ĠCar ney +Ġbo asting +ĠC aleb +By te +read s +ad en +Pro blem +ĠWood ward +S we +S up +ĠK GB +Set up +Ġtac it +Ġret ribution +Ġd ues +ĠM ü +. ? +ä¸ Ń +p ots +Ġcame o +ĠP AL +educ ation +A my +like ly +g ling +Ġconstitution ally +ĠHam m +ĠSpe ak +Ġwid gets +br ate +Ġcra ppy +ĠI ter +Ġanticip ating +ĠB out +P ixel +ĠY ep +ĠLaur ie +Ġh ut +Ġbullet in +ĠSal vation +Ġch ats +ear able +Honest ly +AL TH +onse qu +c ult +isco very +ovy ch +Ġse lves +ĠSat oshi +S ounds +Ġconver gence +ĠRosen berg +19 74 +Ġnas al +Ġfull est +Ġfer ocious +x us +ist e +AM S +Ġlobb ied +Ġso othing +ĠGun n +t oday +0 24 +Ġinspir ational +ĠN BN +p b +g ewater +or ah +all owed +ĠCol iseum +Ġspecial izing +Ġinsane ly +ĠT ape +del ay +Ġt arn +ĠP ound +Ġmel anch +Ġdeploy ments +il and +Ġless en +Ġfur ry +ĠUE FA +Ġblood shed +ĠMe ier +ither ing +Ġhe irs +ĠJ aw +ax ter +ĠPublic ations +Ġal ters +int ention +ĠWinc hester +d etermination +ĠLif etime +th in +Mon ster +7 80 +Ġapprox imation +Ġsuper markets +ĠSecond s +or os +h uge +Ġb ribe +ĠLIM ITED +un ed +Ġmis interpret +ĠIn jury +Ġ3 67 +Ġthreshold s +ĠCarn ival +Ġgastro intestinal +Ġguid eline +Ġde ceived +f eatures +Ġpurported ly +ĠRon nie +ĠNew t +Ġsp acious +as us +Ġsuperhero es +ĠCyn thia +le gged +k amp +ch io +Ġth umbnail +ĠShir ley +ill ation +Ġshe ds +ĠZ y +E PA +Ġdam s +Ġy awn +n ah +ĠPe ggy +ĠE rie +ĠJu ventus +ĠF ountain +r x +don ald +al bum +ĠComp rehensive +Ġc aching +ĠU z +ulner ability +ĠPrinc iple +ĠJ ian +ing ers +cast s +ĠOs iris +ch art +t ile +ĠTiff any +ĠPatt on +ĠWh ip +Ġovers ized +J e +ĠCind erella +ĠB orders +ĠDa esh +M ah +Ġdog ma +Ġcommun ists +v u +Coun cil +Ġfresh water +Ġw ounding +Ġdeb acle +Ġyoung ster +Ġthread ed +ĠB ots +ĠSav ings +ãģ Ĥ +ol ing +oh o +Ġillum ination +M RI +Ġlo osen +tr ump +ag ency +ur ion +Ġmoment arily +ĠCh un +ĠBud apest +ĠAl ley +D isk +Ġaston ished +ĠCon quer +ĠAccount ing +h aving +ĠWe in +ĠAl right +Ġrev olver +Ġdel usion +Ġrelic s +Ġad herent +qu ant +Ġhand made +or io +Ġcomb ating +c oded +Ġquad ru +re th +N ik +ĠTrib al +ĠMyster ious +Ġin hal +ĠWin ning +ĠClass ification +ch anged +Ġun ab +Ġsc orn +icip ated +w l +ond uctor +Ġrein forcing +ĠChild hood +an ova +Ġadventure r +Ġdoctor al +ĠStrateg ies +Ġengulf ed +ĠEnc ounter +Ġl ashes +Crit ical +ric ular +ĠU TF +oci ation +check ing +ĠConsult ing +Run time +per iod +ĠAs gard +Ġdist illed +ĠPas adena +ĠD ying +ĠCOUN TY +Ġgran ite +Ġsm ack +Ġparach ute +ĠS UR +Virgin ia +ĠF urious +78 7 +ĠO kin +Ġcam el +ĠM bps +19 72 +ĠCh ao +ĠC yan +j oice +ef er +ĠW rap +ĠDeb ate +S eg +Ġfore arm +ĠIgn ore +Ġtim estamp +Ġprob ing +ĠNo on +ĠGra il +f en +Ġdorm ant +ĠFirst ly +ĠE ighth +ĠH UN +ĠDes ire +or as +Girl s +ĠDes mond +z ar +am ines +O AD +exec ute +Ġbo obs +ĠAT L +_ ( +Chel sea +Ġmasturb ation +ĠCo C +Ġdestroy er +ĠCh omsky +Ġsc atter +ĠAss ets +79 6 +ĠC argo +Ġrecept ive +ĠSc ope +Ġmarket ers +Ġlaun chers +Ġax le +ĠSE A +se q +ĠM off +f inding +ĠGib bs +Georg ia +extreme ly +N J +Ġlab orers +st als +Ġmed iation +ĠH edge +at own +Ġi od +des pite +v ill +J ane +ex istence +Ġcoinc ided +ĠUt ilities +ĠChe ap +Ġlog istical +Ġcul mination +ĠNic otine +p ak +F older +Ġrod ents +st uff +Ġlaw fully +Ġreper to +io ch +j j +Dial ogue +HH HH +lic tion +Look s +Ġ29 7 +Ġtur rets +ĠAb andon +Ġinc ess +ĠTraff ord +Ġcur led +Ġprefer ring +Ġprivat ization +Ġir resist +ĠP anda +ĠSh ake +ĠMc Gr +ãĥ Ħ +und ers +Ġdiscrim inated +Ġbart ender +I LE +Atl antic +Ġprop ensity +ĠW iz +ĠG im +con ference +Ġrein forces +G h +w agon +Ġe erie +F al +Ġhug ged +rac ist +R IC +F u +Ġf iller +ĠSt ub +Ġeng raved +ĠWrest le +Ġimagin ative +ĠPe er +ĠFact ors +an us +ĠDrac ula +mon itor +Ġrou ters +ib ia +ĠBoo lean +end ale +ĠSl aughter +ĠSh ack +R FC +ĠSpiel berg +S ax +ĠPH OTO +ĠCl over +ĠR ae +Dep ending +ĠMem or +ar am +Ġpier ced +Ġcur tains +v ale +ĠInqu isition +ĠP oke +Ġforecast ing +Ġcompl ains +S ense +ĠHer mes +isc overed +Ġb ible +ĠMor ph +Ġg erm +78 5 +D ON +Ġcon gen +Ġcr ane +ĠD PR +Ġrespect fully +R oom +ĠN aw +ĠDal ai +re ason +ĠAng us +Educ ation +ĠTitan ic +Ë ľ +Ġo val +un ited +Ġthird s +Ġmoist ur +ĠC PC +M iami +Ġtent acles +ĠPol aris +ex c +ex clusive +ĠPra irie +Ġcol ossal +ĠBl end +sur prisingly +ÃŃ s +Ġindo ctr +Ġbas al +ĠMP EG +und o +Spl it +Develop ment +Ġlan tern +19 71 +Ġprov ocation +Ġang uish +ĠB ind +ĠLe ia +duc ers +ipp y +conserv ancy +Ġinitial ize +ĠTw ice +ĠSu k +Ġpred ic +Ġdi ploma +Ġsoc iop +Ing redients +Ġhamm ered +ĠIr ma +Q aida +Ġglim ps +ĠB ian +Ġst acking +Ġf end +gov track +Ġun n +dem ocratic +ig ree +Ġ5 80 +Ġ29 4 +Ġstraw berry +ID ER +Ġcher ished +ĠH ots +Ġinfer red +Ġ8 08 +ĠS ocrates +O regon +ĠR oses +ĠFO IA +Ġins ensitive +Ġ40 8 +Recomm end +ĠSh ine +Ġpain staking +UG E +ĠHell er +ĠEnter prises +I OR +ad j +N RS +L G +Ġalien ated +Ġacknowled gement +ĠA UD +ĠRen eg +Ġvou chers +Ġ9 60 +Ġm oot +ĠDim ensions +Ġc abbage +B right +g at +ĠK lu +Ġlat ent +Ġz e +ĠM eng +Ġdis perse +Ġpand emonium +H Q +Ġvirt uous +ĠLoc ations +ee per +prov ided +Ġse ams +ĠW T +iz o +PR OV +Ġtit anium +Ġrecol lection +Ġcr an +Ġ7 80 +ĠN F +49 1 +64 2 +p acking +59 8 +text ure +Sp ider +fre edom +cipl ed +ĠTAM ADRA +âĻ ¦ +aut hent +ĠW ANT +r ified +Ġr ites +Ġuter us +k iss +Ġâī ¤ +Ġsk illet +Ġdis enfranch +ĠGa al +Comp an +Ġage ing +gu ide +B alt +Ġiter ator +Ġdiscretion ary +t ips +Ġprim ates +ĠTechn ique +ĠPay ments +az el +ĠR OCK +stant ial +0 60 +Ġd mg +ĠJack ets +ĠPlay off +Ġnurs ery +ĠSy mb +art on +Ġannex ation +Color ado +Ġco ils +ĠSh oes +âĦ¢ : +ĠRo z +COM PLE +ĠEve rest +ĠTri umph +J oy +G rid +à ¼ +process or +ĠPros per +ĠSever us +ĠSelect ed +r g +ĠTay yip +St ra +Ġski ing +Ġ? ) +Ġpe g +Tes la +Ġtime frame +Ġmaster mind +ĠN B +scient ific +ĠSh it +gener ic +IN TER +N UM +Ġst roll +ĠEn ix +ĠM MR +ĠE MS +m ovie +Ĥ ª +Ġminim izing +idd ling +Ġilleg itimate +Ġprot otyp +Ġpremature ly +Ġmanual s +obb ies +ĠCass idy +D EC +des ktop +Ġaer os +Ġscreen ings +Ġdeb ilitating +ĠGr ind +nature conservancy +Ġf ades +ter mination +assets adobe +F actor +Ġdefinitive ly +P oké +ap ult +ĠLaf ayette +C orn +ĠCor al +Ġstagn ant +T ue +Ġdissatisf action +G ender +Ġkid neys +ĠG ow +ĠDef eat +ĠAsh ton +Ġcart els +Ġfore closure +ĠExpl ore +stre ngth +ot in +Ġveterin arian +Ġf umble +Ġpar ap +ĠSt rait +r ils +Ġpr ick +ĠBerm uda +ĠAm munition +skin ned +Ġab ound +ĠB raz +Ġshar per +ĠAsc ension +Ġ9 78 +Ġpreview s +Ġcommun ion +ĠX Y +Ġph ony +Ġnewcom er +Ġ3 32 +." ," +Ġredist ribution +Prot ect +ĠSo f +K al +Ġlip stick +w orst +Ġtang led +Ġretrospect ive +int eger +Ġvolunte ering +Ġ19 07 +Ġ -------------------- +ic hen +Ġunve iling +Ġsen seless +Ġfisher ies +\ - +Ġh inges +Ġcalcul us +My th +Ġund efeated +Ġoptim izations +Ġdep ress +Ġbill board +ĠY ad +ĠPy ramid +Is n +I de +Ġleg ion +ĠK ramer +ent anyl +Ġpenet rating +ĠHaw th +ĠPR ODUCT +ĠGer ard +ĠP act +ĠIn cluding +ĠEl ias +ĠEl aine +vis ual +Ġhum ming +Ġcond esc +ĠF asc +ä¸ Ĭ +Ġe galitarian +Ġdev s +ĠD ahl +O ps +D H +ĠB ounce +id ated +ald o +Ġrepublic an +Ġh amb +ĠS ett +ograph ies +CH APTER +Ġtrans sexual +Ġsky rocket +ans wer +Ġmark up +Ø ª +Ġhero ine +Comp are +ĠT av +Be ast +Ġsuccess ors +Ġna ïve +ĠBuck ley +st ress +me at +Ġdownload able +Ġindex ed +Ġsc aff +ĠL ump +ĠHom o +Stud io +In sp +Ġr acked +far ious +ĠPet ty +Ex ternal +Ġ19 09 +W ars +com mit +put ers +Ġun ob +ĠEr r +ĠE G +ĠAl am +ĠSiber ia +ĠAtmosp heric +IS TER +ĠSatan ic +trans lation +ĠL oud +tra umatic +l ique +Ġreson ate +ĠWel ch +Ġspark ing +ĠT OM +t one +Ġout l +Ġhandc uffed +ĠSer ie +8 01 +Ġland marks +ĠRee ves +Ġsoft ened +Ġdazz ling +ĠW anted +month s +Mag ikarp +Ġunt reated +ĠBed ford +M i +ĠDynam o +O re +79 5 +Ġwrong ful +Ġl ured +Ġcort isol +Ġve x +d rawn +ile t +Download ha +ĠF action +Ġlab yrinth +Ġhij acked +w aters +er ick +Ġsuper iors +ĠRow ling +ĠGu inness +Ġt d +99 2 +Ġune arthed +Ġcentr if +Ġsham eless +P od +ĠF ib +Ġ icing +Ġpredict or +Ġ29 2 +fore station +con struct +C and +@ # +Ġag itated +Ġre pr +OV A +Ġkn itting +ĠLim a +Ġf odder +68 4 +ĠPerson a +k l +7 01 +Ġbreak up +á ¸ +Ġapp alled +Ġantidepress ants +ĠSus sex +Har ris +ĠTher mal +ee ee +U pload +Ġg ulf +Ġdoor step +ĠSh ank +L U +ĠM EN +ĠP ond +s orry +Ġmis fortune +n ance +Ġb ona +M ut +Ġde graded +ĠL OG +ĠN ess +an imal +Ġa version +und own +Ġsupplement ed +ĠC ups +Ġ50 4 +Ġdep rive +ĠSpark le +Å Ĥ +ĠMed itation +auth ors +ĠSab an +ĠN aked +air d +ĠMand arin +ĠScript ures +ĠPerson nel +ĠMahar ashtra +Ġ19 03 +ĠP ai +ĠMir age +omb at +Access ory +Ġfrag mented +T ogether +Ġbelie vable +ĠGl adiator +al igned +ĠSl ug +M AT +Ġconvert ible +ĠBour bon +amer on +ĠRe hab +nt ax +Ġpowd ered +pill ar +Ġsm oker +ĠMans on +ĠB F +5 11 +ĠGood ell +ĠD AR +m ud +g art +Ġob edient +ĠTrans mission +ĠDon ation +8 80 +Ġbother ing +Material s +ãĤ ± +dest roy +Ġfore going +Ġanarch ism +ĠK ry +ice ps +Ġl ittered +ĠSch iff +Ġanecd otal +un its +Ġf ian +ĠSt im +ĠS OME +ĠInv aders +Ġbehaviour al +ĠVent ures +Ġsub lime +Ġfru ition +ĠPen alty +Ġcorros ion +¶ ħ +Ġlik ened +Ġbesie ged +ween ey +ĠCre ep +Ġlinem en +mult i +ic ably +ud der +Ġvital ity +Ġshort fall +ĠP ants +ap ist +H idden +ĠDro ps +med ical +Ġpron unciation +ĠN RL +Ġinsight ful +J V +ĠBe ard +ĠCh ou +Ġchar ms +Ġb ins +Ġamb assadors +ĠS aturdays +Ġinhib itor +ĠFr anch +6 01 +', ' +ĠCon or +art ney +ĠX peria +g rave +be es +ĠProtest ants +Ġso aking +ĠM andal +Ġph ased +Ġ6 60 +Ġsc ams +Ġbuzz ing +ĠItal ians +ĠLoren zo +ĠJ A +Ġhes itated +Ġcl iffs +ĠG OT +ingu ishable +Ġk o +Ġinter ruption +Z ip +Lear ning +Ġundersc ores +ĠBl ink +K u +57 9 +ĠAut ob +I RE +Ġwater ing +Ġpast ry +8 20 +Ġvision ary +ĠTempl ar +awa ited +Ġpist on +Ġant id +current ly +Ġp ard +Ġw aging +Ġnob ility +ĠY us +Ġinject ing +f aith +ĠP ASS +å º +Ġret ake +ĠPR OC +Ġcat hedral +b ash +Ġwrest lers +Ġpartner ing +Ġn oses +Ġ3 58 +Trans form +am en +Ġb outs +ĠId eal +ĠConstant in +Ġse p +ĠMon arch +att en +ĠPe oples +mod ified +Ġmor atorium +Ġpen chant +Ġoffensive ly +Ġprox ies +ok ane +ĠTaiwan ese +ĠP oo +ĠH OME +us ional +Ġver bs +ĠO man +vis ory +Ġpersu asion +Ġmult it +Ġsc issors +G ay +ow ay +oph ysical +l us +gn u +Ġap ocalyptic +Ġabsurd ity +Ġplay book +Ġautobi ography +I UM +Ġsne aking +ĠSim ulation +pp s +ell ery +Plan et +Ġright fully +Ġn iece +ĠN EC +ĠIP O +ĠDis closure +lean or +ous y +ST ER +Ġ28 2 +Cru z +Ch all +64 3 +ĠSurv ive +ĠF atal +ĠAm id +ap o +We apons +D EN +7 70 +ĠGreen wald +Ġlin en +al os +Ġpollut ants +ĠPCI e +k at +Ġp aw +ĠK raft +C hem +ĠTermin ator +Ġre incarn +Ġ] [ +ĠSe eds +Ġsilhou ette +ĠSt ores +Ġgro oming +ĠD irection +ĠIs abel +ĠBr idges +ðŁ ij +E ED +ĠM orsi +Ġval ves +ĠRank ed +ĠPh arma +ĠOrgan izations +Ġpenet rated +ĠRod ham +ĠProt oss +Ġove rest +Ġex asper +ĠT J +Ġ 000000 +Ġtrick le +Ġbour bon +WH O +Ġw retched +Ġmicrosc opic +Ġcheck list +Ġad orned +R oyal +Ad minist +ĠRet irement +ĠHig hest +We ather +ile ge +Ġincre ments +ĠC osponsors +Ġmas se +ĠS inn +r f +Ġh ordes +as sembly +75 4 +ĠNat asha +ĠTY PE +ĠGEN ERAL +Ġarr anging +Ġ40 7 +l ator +Ġg lean +Ġdisc redited +Ġclin icians +UN E +Ġachie ves +ĠEm erson +com plex += [ +Ġprincip ally +Ġfra il +p icked +Ġthan king +Ġre cl +ĠL AST +Ġsupp ressing +il ic +Ġantidepress ant +ĠLis bon +Ġth or +Ġsp a +Ġking doms +ĠPear ce +em o +Ġpl ung +Ġdiv est +Ġ ******************************** +b is +osp els +ad r +Sp irit +hall a +P ink +end ez +Ġresurrect ed +esc ape +ĠRosen stein +Ġge ological +Ġnecess ities +Ġcarn iv +ĠE lys +ĠBar ney +Ġ29 6 +dig y +ST ON +D OWN +Ġmil estones +Ġk er +Ġdismant ling +Ġre prim +Ġcross ings +19 45 +Ġpatri archy +Ġblasp hemy +Ġ3 59 +met ry +ĠOb esity +ĠDiff erences +bl ocking +ãĥķ ãĤ¡ +ich ita +ĠSab ha +ph alt +ĠCol o +ual a +effic ients +ĠMed ina +con sole +55 7 +ĠHann ibal +ĠHab it +ĠF ever +Ġthen ce +Ġsyn agogue +Ġessential s +Ġw ink +ĠTr ader +ID A +ĠSp oiler +ĠIceland ic +ĠHay ward +Ġpe ac +Ġmal ice +Ġflash back +Ġth w +Ġlay offs +L iquid +Ġtro oper +Ġh inge +ĠRead ers +Ph ill +ĠB auer +Cre ated +Ġaud its +ac compan +Ġunsus pecting +ier a +6666 6666 +Ġbro ch +Ġapprehend ed +ĠM alk +cer ning +ĠCod ex +O VER +M arsh +ĠD eng +ĠExp ression +Ġdisrespect ful +Ġasc ending +t ests +ĠPlaint iff +ster y +ĠAl ibaba +din and +ĠDem psey +Applic ations +mor al +Ġthrough put +Ġquar rel +Ġm ills +Ġhe mor +ĠC ASE +terror ist +st im +ifest yle +ro zen +CE PT +Ar k +u ci +lect ic +Ġirrit ating +she ets +A y +Ġrede emed +Ġhorn y +ĠTe ach +ĠS ear +dem ocracy +4 65 +ĠRest ore +Ġstand by +ĠP is +iff in +Ġsleep y +Ġextr ater +Ġcompl iments +Fram eworks +Ġinstall s +Ġb anging +sur face +found land +Ġmetaph ysical +Ġ28 3 +oul s +dev ices +Ar gs +ĠSac rifice +ĠMcC orm +es on +Cons ervative +ĠM ikhail +see ing +is ively +ĠRo oms +ĠGener ic +Ġenthusi astically +Ġgri pped +Ġcomed ic +ĠElectric ity +Ġgu errilla +Ġdec oration +ĠPerspect ive +Ġconsult ations +Ġun amb +Ġplag iar +Ġmagic ian +Ġe rection +ĠTour ism +or ied +ro xy +11 00 +T am +Ī è +Î ³ +× ª +ĠPred ators +Nit rome +Ġtelesc opes +project s +Ġun protected +Ġst ocked +ĠEnt reprene +nex pected +Ġwast ewater +V ill +Ġint imately +Ġi Cloud +ĠConst able +Ġspo of +Ġne farious +Ġfin s +Ġcens or +ĠMod es +ĠEs per +ar bon +Ġinter sections +Ġlaud ed +Ġphys i +Ġgener ously +ĠThe Nitrome +ĠTheNitrome Fan +Ġar isen +ĠÙ Ī +Ġg lands +ĠPav ilion +ĠGu pta +Ġuniform ly +Ġr amps +ri et +ĠWH EN +ĠVan essa +Ġrout ed +Ġlim p +ĠC PI +p ter +int uitive +Ġv aping +Ġexperiment ed +ĠOlymp us +ĠAm on +Ġsight ing +Ġinfiltr ate +ĠGentle man +Ġsign ings +ĠMe ow +ĠNav igation +che cks +4 33 +Ġel apsed +ĠBulg arian +esp ie +ĠS OM +d uring +Ġsp ills +anc a +ĠPly mouth +M AL +Ġdomest ically +ĠWater gate +ĠF AM +k illed +ed ited +ĠYour self +Ġsynchron ization +ĠPract ices +ST EP +Ġgen omes +ĠQ R +not ice +Ġloc ating +z in +Ġ3 29 +al cohol +Ġk itten +V o +Ġr inse +Ġgrapp le +ĠSc rew +ĠD ul +A IR +Ġle asing +ĠCaf é +Ġro ses +ĠRes pect +Ġmis lead +Ġperfect ed +Ġnud ity +Ġnon partisan +ĠCons umption +Report ing +Ġnu ances +Ġdeduct ible +ĠSh ots +Ġ3 77 +Ġæ ľ +ano oga +Ben ef +ĠB am +ĠS amp +if ix +Ġgal van +ĠMed als +rad ius +Ġno bles +Ġe aves +igr ate +K T +ĠHar bour +u ers +Ġrisk ed +re q +Ġneuro t +get table +ain a +Rom ney +Ġunder pin +Ġlo ft +ĠSub committee +ĠMong ol +b iz +Ġmanif ests +ass isted +ĠG aga +Ġsy nergy +Ġreligious ly +ĠPre f +ĠG erry +T AG +ĠCho i +4 66 +beh ind +ĠO u +Gold Magikarp +Ġhemor rh +R iver +Ġtend on +Ġinj ure +ĠF iona +Ġp ag +Ġag itation +|| || +ur an +ĠE SA +Ġest eem +Ġdod ging +Ġ4 12 +r ss +Ġce ases +ex cluding +Ġint akes +Ġinsert s +Ġemb old +ĠO ral +up uncture +4 11 +ĠUn ified +ĠDe le +Ġfurn ace +ĠCoy otes +ĠBr ach +L abor +Ġhand shake +Ġbru ises +Gr ade +éĹ ĺ +ĠGram my +ile en +St ates +ĠScandinav ian +ĠKard ash +8 66 +Ġeffort lessly +ĠDI RECT +ĠTH EN +ĠMe i +ert ation +19 68 +Ġgro in +w itch +Requ irements +98 5 +Ġroof s +Ġest ates +ĠH F +Ġha ha +Ġdense ly +ĠO CT +Ġpl astics +Ġincident ally +ĠTr acks +ĠTax es +Ġch anted +Ġforce ful +ĠBie ber +ĠK ahn +K ent +ĠC ot +lic ts +F ed +Ġhide ous +ĠVer d +ĠSynd icate +ĠIl legal +J et +ĠD AV +re asonable +c rew +Ġfundamental ist +Ġtruth ful +ĠJ ing +Ġl il +Ġdown ed +Ġen chanted +ĠPolic ies +ĠMcM aster +ĠH are +ides how +Ġpar ams +en cers +gorith m +Ġallow ances +Ġturb ulent +Ġcomplex ities +ĠK T +Ġ3 37 +ĠGen etic +F UN +D oug +t ick +Ġg igs +ument hal +Ġpatriarch al +Ġcal c +, ... +Ġc out +ĠGu an +Ġpath ological +ĠR ivals +Ġunder rated +Ġflu orescent +ĠJ iu +arna ev +ĠQu an +Ġ4 29 +Ġ ਠ+M ario +Con struct +ĠC itation +ĠR acial +ĠR SA +ĠF idel +Ġ3 95 +Person ally +C ause +à » +rad ical +in en +Ġvehement ly +ĠPap a +Ġintern ship +Ġfl akes +ĠRe ck +Luck ily +B ra +20 20 +rav ings +R N +W onder +Ser iously +Ġre usable +Ġpoll uted +ĠP eng +le igh +ind le +Ġcircuit ry +ĠMad onna +ĠB ART +Res idents +att ribute +Phil adelphia +Cl ub +Ġplan ner +Ġfr antically +Ġfaith fully +ĠTerrit ories +ĠL AT +ĠAnders en +an u +ĠP ARK +ĠS ora +i age +ĠPlay offs +ĠG CC +4 27 +Ġab norm +ĠL ever +Ġdisob edience +As ync +ĠShe a +V ert +Ġsk irts +ĠSaw yer +x p +Ġwors ening +Ġsc apego +ĠAng le +oth al +Ġtro ve +ĠSt y +ĠN guyen +mar ine +ide on +Dep ths +Bl og +ĠIll uminati +Ġtract s +Ġorgan ise +Ġo str +F s +Ġlever aging +ĠD aredevil +as ar +Ġl ang +Ġex termin +urs ions +ĠRom o +ãĤ¤ ãĥĪ +Ġcont ended +Ġencounter ing +ĠTable t +ĠAltern ate +sk ill +Ġswe ets +Ġco hesive +cap acity +Ġrep ud +Ġl izard +ro o +Ġpilgr ims +ĠR uff +ĠInstr ument +ĠLog o +uit ous +E H +Ġsales man +Ġank les +L ed +ĠPat ty +ud os +Own er +Ġdiscrep ancies +k j +M U +Ġuncond itional +Dragon Magazine +i ard +O ak +ĠConvers ation +be er +ĠOs aka +D elta +us ky +Ġsecret ion +Ġpl aza +Ġm ing +Ġde pletion +ĠM ous +ĠI TS +ĠH imal +ĠFle ming +Ġcyt ok +ĠH ick +Ġbat ters +ĠInt ellectual +6 75 +é r +IS ION +ĠQu entin +ĠCh apters +ih adi +Ġco aster +WAY S +ĠL izard +ĠY or +and ering +S kin +ha ust +ab by +Ġportray ing +Ġwield ed +d ash +Ġprop onent +Ġr ipple +Ġgrap hene +Ġfly er +Ġrec urrent +Ġdev ils +Ġwater fall +æĺ ¯ +go o +Text Color +Ġtam pering +IV ES +TR UMP +ĠAb el +ĠS AL +ĠHend ricks +ĠLu cius +b ots +Ġ40 96 +IST ORY +Gu est +ĠN X +in ant +Ben z +ĠLoad ed +ĠCle ver +t reatment +Ġta vern +Ġ3 39 +ĠT NT +ific antly +Tem perature +F el +Ġunder world +ĠJud ges +Ġ< + +Ġst ump +Ġoccup ancy +Ġab er +ĠF inder +) ", +ĠN unes +res et +in et +ect omy +Ġwell ness +ĠP eb +quart ered +and an +Ġneg atives +ĠTh iel +ĠCl ip +ĠL TD +Ġbl ight +Ġreperto ire +K yle +Ġqu er +ĠC es +Ġha pl +98 9 +ĠTh ames +isc opal +Des k +ivari ate +ĠEx cellence +found ation +Ġâ ĩ +X i +Ġmyster iously +esty les +Ġper ish +ĠEng els +ĠDE AD +09 0 +}} } +ĠUn real +Ġrest less +ID ES +orth odox +ĠInter mediate +Ġdin ners +ĠTr out +ĠSe ym +ĠHall s +og ged +Ġtraged ies +Ġdid nt +67 6 +Ġail ments +Ġobserv able +ĠV ide +ad apt +ĠD usk +Ġprofessional ism +ĠPres cott +ĠInd ies +p ox +ĠMe hran +W ide +Ġend emic +ĠPar an +B ird +Ġped als +ĠI U +ĠAdam ant +ĠH urt +Ġcorrel ates +urd en +Ġspons oring +cl imate +ĠUnivers ities +ĠK not +enn es +ĠDam ian +ĠAx el +S port +Ġbar b +ĠS no +sh own +ste en +ud ence +Ġnon violent +Ġhom ophobia +Ġbiom ass +ĠDet ail +Ġsrf N +ĠT une +accompan ied +I ENCE +Al bert +ĠMong o +z x +ĠCer berus +or bit +c ens +Ġsl ay +SH ARE +H Y +Ġb rawl +ĠPro be +Ġnonex istent +ĠClare nce +ĠBlack burn +Ġport als +ĠR ita +ĠRem ain +ĠLe vant +Ġtrick ed +ĠF erry +aver ing +ĠStraw berry +ĠAn swers +Ġhorrend ous +ĠA man +Supp lement +ĠT oad +Ġpe eled +Ġman oeuv +ĠU zbek +mond s +ĠH ector +Ġ40 2 +pe es +fix es +Ġd j +Ġres umes +Ġaccount ant +Ġadvers ity +Ġham pered +ĠL arson +Ġd oping +part s +H ur +Ġbe arded +Ġy r +ĠPlug in +å¥ ³ +Ġ/ ** +rol ley +Ġwaters hed +ĠSub mission +if lower +AS C +Ġcho ir +Ġsculpt ures +m A +incre asing +ai i +Ġsne akers +Ġconfront s +ĠEle phant +ĠEl ixir +Ġrec al +ĠT TL +w idget +ĠW ax +ĠGr ayson +Ġha irst +Ġhumili ated +ĠWAR N +app iness +ĠT TC +F uel +Ġpol io +Ġcomplex es +Ġbab e +ĠX IV +P F +). [ +P arts +Ġ4 35 +M eg +ĠY ards +ĠAL P +Ġy ells +Ġprin ces +Ġbull ies +ĠCapital ism +ex empt +FA Q +ĠSp onge +ĠAl a +Ġpleas antly +Ġbu f +Ġden ote +Ġunp ublished +Ġkne eling +asc a +Ġl apse +al ien +99 4 +Ġrefere es +ĠLaw yers +S anta +Ġpuzz ling +ĠProm etheus +ĠPh araoh +ĠDel ay +Ġfacilit ates +ĠC ES +Ġjew els +Ġbook let +ond ing +Ġpolar ization +ĠMor an +ĠSal ad +ĠS OS +ĠAdv ice +PH OTOS +IC AN +iat ures +ex press +ĠWonder land +ĠC ODE +ĠCL ASS +9 75 +Ġg rep +ĠD iesel +ĠGl ac +! ?" +Ġr m +o ine +disc rimination +ĠN urse +m allow +Ġv ortex +ĠCons ortium +Ġlarge Download +stra ight +augh lin +G rad +Ġpublic ized +ĠW aves +ĠRed d +Ġfest ivities +ĠM ane +ar ov +Ġfleet ing +ĠDr unk +ug en +C ele +Ġchromos omes +ĠD OT +-+-+ -+-+ +Ġbus iest +ĠBe aver +Sy rian +ĠK yr +k as +ĠCross Ref +19 50 +76 01 +Ġrepe aling +ĠWin ners +ĠMac ro +ĠD OD +bl ance +S ort +64 1 +Ġmet re +ĠD irk +Ġgo ggles +Ġdraw backs +Ġcomplain ant +Ġauthor izing +Ġantit rust +oper ated +Ġm ah +Ġexagger ation +Am azing +ĠSer aph +Ġha ze +w ow +Ġextingu ished +Ġcan yon +ĠB osh +Ġv ents +Ġsc rape +Cor rect +4 26 +Ġav g +Dem and +ĠâĪ ¼ +Ġmicrobi ota +"} ]," +ĠSt ev +B io +ĠPlan es +Ġsuggest ive +Ġdec ipher +ĠRefuge e +ĠKe jriwal +ĠGreen peace +Ġdecl ass +ĠSound ers +Ġth o +Ġdec rypt +Ġbr ushing +ĠJane iro +ip op +S i +8 77 +ĠGeoff rey +Ġc pu +ĠHaz el +Ġview points +Ġcris py +ĠNot ification +Ġsold er +ĠMod est +ĠHem isphere +Ġcass ette +in cludes +Ġident ifiers +ĠC ALL +in cent +T odd +ĠSwe ep +Ġ3 34 +b oss +Ġsm ir +gin x +Ġtown ship +Ġg rieving +ĠMos que +Net flix +AS ED +ĠMillenn ials +oc om +19 67 +Ġbold ly +s leep +Ġes che +arij uana +Ġsw irl +ĠPen al +Ġneglig ent +ĠStephen son +K ER +ĠZ oro +ris is +Ġlocal ization +ĠSeym our +ĠAng lic +red itation +prot ection +ĠPa ige +Ġo mit +ĠR ousse +ĠT ub +Ġinv itations +t ty +Ġm oss +ph ysical +C redits +Ġan archy +Ġchild care +Ġl ull +ĠM ek +ĠL anguages +lat est +ĠSan ford +Ġus ability +Ġdiff use +ĠD ATA +Ġsp rites +ĠVeget a +ĠProm otion +ãĥ¼ ãĤ¯ +rict ing +z ee +Tur kish +ĠTD s +pro ven +57 1 +Ġsmug glers +707 10 +Ġreform ed +ĠLo is +Ġun fl +ĠWITH OUT +ĠReturn ing +ann ie +ĠTom as +Fr anc +ĠProf it +ĠSER V +ĠR umble +ik uman +es an +Ġt esters +Ġgad get +Ġbrace let +ĠF SA +comp onent +Ġparamed ics +Ġj an +ĠRem em +ĠSk inner +Ġl ov +ĠQu ake +rom a +Ġfl ask +Pr inc +Ġover power +Ġlod ging +ĠK KK +ret te +Ġabsor bs +w rote +Ġ ," +K ings +ĠH ail +ĠFall ing +xt ap +ĠHel ena +ire ns +L arry +Ġpamph let +ĠC PR +G ro +ĠHirosh ima +Ġhol istic +". [ +Ġdet achment +Ġas pire +Ġcompl icit +ĠGreen wood +Ġresp awn +ĠSt upid +ĠFin ished +f al +b ass +Ġab hor +Ġmock ery +ĠFe ast +VID EO +Ġcon sec +ĠHung ry +P ull +ĠH ust +it ance +? ãĢį +) -- +ĠPar allel +con v +4 69 +ha ar +w ant +P aper +m ins +ĠTor o +ĠTR UMP +ĠR ai +D W +ĠW icked +ĠL ep +Ġfun ky +Ġdetrim ent +ios is +ache v +Ġde grade +im ilation +Ġret ard +Ġfrag mentation +Ġcow boy +ĠY PG +ĠH AL +Parent s +ĠS ieg +ĠStra uss +ĠRub ber +× IJ +Fr ag +Ġp t +Ġoption ally +ĠZ IP +ĠTrans cript +ĠD well +88 2 +M erc +ĠM OT +ãĥ¯ ãĥ³ +Ġhun ts +Ġexec utes +In cludes +Ġacid ic +ĠRespons ibility +ĠD umb +we i +And erson +ĠJas per +ight on +abs olutely +Ad ult +Ġpl under +Mor ning +ĠT ours +ĠD ane +Î º +ĠT EST +ĠG ina +Ġcan ine +aw an +Ġsocial ists +ĠS oda +Ġimp etus +ĠSupplement ary +oli ath +ĠKinn ikuman +mitted ly +second s +Ġorganis ers +Ġdocument aries +Vari able +GRE EN +Ġres orts +Ġbr agging +Ġ3 68 +Art ist +w k +bl ers +Un common +ĠRet rieved +Ġhect ares +Ġtox in +r ank +Ġfaith s +ĠG raphic +Ġve c +ĠL IA +Af rican +Ġard ent +end iary +L ake +ĠD OS +cient ious +ĠOk awaru +ĠAll y +ĠTim eline +D ash +ĠI c +contin ue +Ġt idy +Ġinstinct ively +ĠP ossibly +ĠOut door +ĠWould n +Ġl ich +ĠBr ay +ĠA X +Ġà ī +Ġ+ # +\ ' +Direct ory +ab iding +Ġf eral +ic ative +but t +Ġper verse +S alt +Ġwar ped +Ġnin eteen +Ġcabin ets +Ġsrf Attach +ĠSl oan +Ġpower ing +reg ation +F light +se vere +Ġst ren +Ġc og +ap ache +Ġâ Ŀ +Ġcaf eteria +p aces +ĠGrim oire +uton ium +Ġr aining +Ġcir cling +Ġlineback ers +c redit +Ġrep atri +ĠCam den +lic ense +Ġly ric +Ġdescript or +Ġval leys +Ġre q +Ġback stage +ĠPro hibition +ĠK et +Op ening +S ym +æĸ ¹ +Ġserv ings +Ġoverse en +Ġaster oids +ĠMod s +ĠSpr inger +ĠCont ainer +è » +ĠM ens +Ġmult im +Ġfire fighter +pe c +Ġchlor ine +Ð ¼ +end i +Ġsp aring +Ġpolyg amy +ĠR N +ĠP ell +Ġt igers +Ġflash y +ĠMad ame +S word +Ġpref rontal +Ġpre requisite +uc a +Ġw ifi +Ġmiscon ception +Ġharsh ly +ĠStream ing +ot om +ĠGiul iani +foot ed +Ġtub ing +ind ividual +z ek +n uclear +m ol +Ġright ful +49 3 +Ġspecial ization +Ġpassion ately +ĠVel ocity +ĠAv ailability +T enn +Ġl atch +ĠSome body +Ġhel ium +cl aw +Ġdi pping +XX X +Ġinter personal +7 10 +Ġsub ter +Ġbi ologists +ĠLight ing +Ġopt ic +Ġden im +end on +ĠC orm +Ġ3 41 +ĠC oup +Ġfear less +Ġal ot +ĠCliff ord +ĠRun time +ĠProv ision +up dated +lene ck +Ġneur on +Ġgrad ing +ĠC t +sequ ence +in ia +con cept +Ġro aring +ri val +ĠCaucas ian +Ġmon og +key es +Ġappell ate +Ġlia ison +EStream Frame +ĠPl um +! . +Ġsp herical +Ġper ished +Ġbl ot +Ġben ches +Ġ4 11 +Ġpione ered +Ġhur led +Jenn ifer +ĠYose mite +Ch air +Ġreef s +Ġelect or +ĠAnt hem +65 2 +Ġun install +Ġimp ede +Ġbl inking +Ġgot o +Dec re +A ren +Ġstabil ization +ĠDis abled +ĠYanuk ovych +Ġoutlaw ed +ĠVent ura +ten ess +Ġplant ation +Ġy acht +ĠHu awei +Ġsol vent +Ġgr acious +Ġcur iously +Ġcapac itor +Ġc x +ĠRef lex +Ph ys +ĠC f +pt in +cons ervative +Ġinv ocation +c our +F N +ĠNew ly +H our +As ian +ĠLe ading +ĠAer ospace +An ne +Ġpre natal +Ġdeterior ating +H CR +ĠNorm andy +ol ini +ĠAm bro +9 10 +Ġset backs +ĠT RE +Ġs ig +ĠSc ourge +59 7 +79 8 +Game play +Ġm sec +M X +Ġprice y +ĠL LP +aker u +Ġover arching +ĠB ale +Ġworld ly +Cl ark +Ġscen ic +Ġdisl iked +ĠCont rolled +T ickets +ĠE W +ab ies +ĠPl enty +Non etheless +Ġart isan +Trans fer +ĠF amous +Ġinf ield +ble y +Ġunres olved +ĠML A +ãĤ Ĥ +Cor rection +Ġdemocr at +ĠMore no +ro cal +il ings +Ġsail or +Ġr ife +h ung +Ġtrop es +Ġsn atched +ĠL IN +ĠB ib +ES A +ĠPre v +ĠCam el +run time +Ġob noxious +4 37 +Ġsum mers +Ġunexpl ained +ĠWal ters +cal iber +Ġg ull +ĠEnd urance +ä½ ľ +Ġ3 47 +Ir ish +Ġaer obic +Ġcr amped +ĠHon olulu +à © +us erc +ec ast +AC Y +ĠQu ery +ãĤ¹ ãĥĪ +Bet a +Ġsuscept ibility +ĠSh iv +ĠLim baugh +Ġà ĸ +ĠN XT +ĠM uss +ĠBrit ons +ES CO +EG IN +Ġ% % +Ġsec ession +ĠPat ron +ĠLu a +n aires +ĠJPM organ +us b +ocy te +Ġcouncill ors +ĠLi ang +f arm +Ġnerv ously +Ġattract iveness +ĠK ov +j ump +Pl ot +Ġst ains +ĠStat ue +ĠApost les +he ter +ĠSUP PORT +Ġoverwhel m +Y ES +Ġ29 1 +d ensity +Ġtra pping +M it +Ġf ide +ĠPam ela +atl antic +Dam n +Ġp ts +OP A +Ġserv icing +Ġoverfl owing +ul o +ĠE rit +t icket +light ing +ĠH mm +ãĥ¼ ãĥ« +im oto +Ġchuck le +4 23 +ãģ ķ +sh ape +Ġque ues +Ġanch ors +ãĤ¼ ãĤ¦ãĤ¹ +F er +Ġaw oke +Ġ6 66 +h ands +Ġdiver gence +Ġ50 5 +T ips +Ġdep ot +Ġske w +ĠDel iver +op ot +Ġdiv ul +ĠE B +uns igned +ĠUn i +X box +Ġfor ks +Ġ7 02 +å ¯ +Ġpromot ers +ĠV apor +Ġlev ied +sl ot +Ġpig ment +Ġcyl inders +C RE +Ġsn atch +Ġperpet ually +Ġl icking +ĠFe et +ĠKra ken +ĠHold en +ĠCLS ID +m r +Ġproject or +Ġden otes +Ġchap el +ĠTor rent +b ler +R oute +ĠDef endant +ĠPublisher s +ĠM ales +ĠInn ov +ĠAg ility +rit er +ty mology +st ores +L ind +Ġf olly +ĠZur ich +B le +Ġnurt ure +Ġcoast line +uch in +D omin +Ġfri vol +ĠCons olid +res ults +M J +Ġphyl ogen +Ġha uled +ĠW iley +ĠJess ie +ĠPrep are +ĠE ps +Ġtreasure r +I AS +Ġcolon ists +Ġin und +ĠWW F +ĠCon verted +6 000 +out side +ĠApp earance +ĠRel ic +ĠM ister +s aw +Ġresult ant +Ġadject ive +ĠLaure l +ĠHind i +b da +Pe ace +Ġreb irth +Ġmembr anes +Ġforward ing +Ġcoll ided +ĠCar olyn +K ansas +5 99 +ĠSolid GoldMagikarp +Be ck +Ġstress ing +ĠGo o +ĠCooper ative +Ġf s +ĠAr chie +L iter +ĠK lopp +J erry +Ġfoot wear +War ren +Ġsc ree +h are +Under standing +P ed +Ġanth ology +ĠAnn ounce +M ega +Ġflu ent +Ġbond age +ĠDisc ount +il ial +C art +ĠNight mares +Sh am +ĠB oll +uss ie +H ttp +Atl anta +Ġun recogn +ĠB id +Ġunder grad +Ġforg iving +ĠGl over +AAAA AAAA +4 45 +V G +pa io +kill ers +Ġrespons ibly +Ġmobil ize +Ġeffect ed +ĠL umin +Ġk ale +Ġinfring ing +ann ounced +Ġf itt +b atch +ĠT ackle +ĠL ime +ĠAP P +uke mia +Ġrub y +Ġex oner +ĠCas ual +0 70 +Ġpel vic +Ġautom ate +ĠK ear +ĠCoast al +Ġcre ed +Ġbored om +ĠSt un +ri ott +Ĥ İ +Ġregener ate +Ġcomed ians +ĠOP ER +Sp ons +id ium +on is +L ocated +05 7 +Ġsusp ense +ĠD ating +C ass +Ġneoc ons +ĠShin zo +Ġaw oken +ch rist +ĠMess ages +att led +ĠSpr ay +ĠSp ice +C W +Ġshield ing +ĠG aul +Am id +Ġparam ilitary +Ġmult if +ĠTan ner +il k +Ġgodd amn +g ements +Ġbe friend +m obi +Ġ3 88 +fold er +acc a +Ġins in +g ap +N ev +fif th +Ġpsychiat ry +b anks +TH IS +Ġhar b +ac qu +Ġfac ade +ĠPower Point +80 3 +Ġbl uff +Sh ares +Ġfavor ing +El izabeth +Ãį Ãį +Ġr anger +77 2 +ĠAr che +h ak +ĠGen etics +ĠF EMA +Ġev olves +Ġest e +ĠP ets +ĠM é +ĠInterest ing +ĠCanter bury +ch apter +ĠStar fleet +Sp anish +Ġdraw back +ĠNor wich +9 70 +n orth +ag anda +Ġtransform ative +ram ids +bi ology +ad ay +Ġpropag ation +ĠGam ma +ĠDen ise +ĠCalcul ator +ent imes +ĠB ett +Ġapp endix +ĠHD D +AK ING +Ġst igmat +Ġhol ster +Ġord inarily +Ch ance +ĠCont rary +Ġad hesive +Ġgather s +6 12 +re au +ony ms +ew ays +Ġindu ces +Ġinterchange able +se m +Wh it +Ġtr ance +Ġincorpor ation +ĠExt ras +Fin ancial +Ġawkward ly +ĠStur geon +ĠH Y +Norm ally +ĠEnd ing +ĠAss ist +enc rypted +Ġsub jug +Ġn os +Ġfan atic +C ub +C U +?" . +Ġirre versible +å Ĥ +03 1 +ĠH AR +sp read +ul ia += $ +Sc ope +L ots +Ġlif estyles +ol on +Ġf eds +Ġcongrat ulate +web kit +Ġindist inguishable +ĠSw ing +Ġcommand ments +qu ila +ab ella +m ethyl +ann abin +Ġo vere +Ġlob ster +ĠQU EST +ĠCONT IN +bern atorial +:::: :::: +ĠTra ve +ĠSam oa +AN I +75 2 +Ð ´ +userc ontent +ĠMod erate +y eah +ĠK itt +Ġwe e +Ġstuff ing +ĠInter vention +ĠD ign +Ġware houses +ĠF iji +Ġpel lets +Ġtake away +ĠT ABLE +ĠClass ical +col lection +Ġland fall +ĠMus cle +Ġsett les +ĠAD V +Ġ3 44 +L aura +Ġf ared +ĠPart ial +4 36 +oss ibility +ĠD aly +ĠT arant +ĠFu ji +am l +c ence +55 1 +ĠProced ures +ĠO CD +ĠU D +t in +Q UI +ach o +4 38 +Ġgl itches +Ġenchant ment +Ġcalcul ates +IR O +ĠH ua +alys es +ĠL ift +um o +Ġle apt +Ġhypothes ized +ĠGust av +it ans +VERS ION +æ ł +Rog er +Ġr and +ĠAd apter +Ġ3 31 +ĠPet ition +k ies +M ars +Ġunder cut +ze es +ĠLy ons +ĠDH CP +Miss ing +Ġretire es +Ġins idious +el i +> ) +. ãĢį +Ġfinal ists +ĠA ure +Ġacc user +Ġwas tes +ĠY s +ĠL ori +Ġconstitu encies +Ġsupp er +Ġmay hem +or ange +Ġmis placed +Ġmanager ial +Ġex ce +ĠCL I +Ġprim al +ĠL ent +Cry stal +h over +ĠN TS +end um +Ġd w +ĠAl c +n ostic +Ġpres erves +ĠTs arnaev +Ġtri pled +rel ative +Arc ade +k illing +ĠW EEK +ĠH anna +D ust +Com pleted +ģ « +Ġappro ves +ĠSur f +ĠLuther an +ven ants +Ġrobber ies +we ights +soft ware +at ana +ug al +Ġgrav y +ĠC ance +OLOG Y +ly ak +Ton ight +Ġunve il +Ġ19 04 +ĠMin ion +ent ious +st ice +pack ages +ĠG EAR +Ġg ol +ĠHutch inson +ĠProf ession +ĠG UN +ĠDiff erence +ĠTsuk uyomi +ĠLes bian +6 70 +Ġfug itive +ĠPlan etary +-------------------------------- ------------------------ +Ġacc rued +Ġch icks +Ġsto pp +Ġblock ers +C od +Ġcomment ers +ĠSomew here +ĠPhot ographer +the me +Ġmay oral +w u +Ġanten nas +Ġrev amped +ĠSubject s +it é +im ura +Ġentr ances +liter ally +Ġten ets +ĠO MG +ĠMP H +ĠDon key +ĠOff ense +Ġ" + +Sn ap +ĠAF B +Ġan imate +ĠS od +His panic +Ġinconsist ency +D b +F Y +Ex port +Ġa pe +Ġpear l +ib el +ĠPAC s +Ġ{ \ +Ġact u +ĠHS BC +camp us +Ġpay off +Ġde ities +ĠN ato +ou ple +Ġcens ored +ĠCl ojure +Ġconf ounding +en i +Ġreck on +op he +Ġspot ting +Ġsign ifies +Ġprop el +Ġfest ive +S uggest +Ġpled ging +ĠB erman +Ġrebell ious +Ġovershadow ed +Ġinfiltr ated +j obs +67 2 +Ġscal able +Ġdomin ion +ĠNew foundland +ĠMead ow +Ġpart itions +AM I +Ġsupplement ary +str ument +Ġhair y +Ġperpet uate +Ġnuts hell +ĠPot ato +ĠHob bit +Ġcur ses +Flo at +Ġquiet er +Ġfuel ing +Ġcaps ules +ĠL ust +ĠH aunted +Exec utive +Ġchild birth +G re +Ġrad iant +å İ +Ġm alls +Ġin ept +ĠWarrant y +Ġspect ator +E h +t hens +Ġculmin ating +æ © +ary a +ãĤ ® +ilit arian +ĠOR IG +ĠSp ending +pt ives +ĠS iren +ĠRec ording +ay ne +Ġv im +Ġspr ang +T ang +ĠM FT +mor ning +ĠWe ed +m peg +cess ion +ĠCh ung +7 30 +w arning +56 2 +handed ly +P oor +P olitics +: # +Ġp ian +Ġfec es +ĠDocument ation +Ġban ished +Ġ3 99 +ĠAR C +Ġhe inous +J ake +ĠAm ir +way ne +v re +os henko +Ġnotebook s +Ġfound ational +Ġmarvel ous +ixt ape +Ġwithdraw als +Ġh orde +ĠD habi +is able +ĠK D +Ġcontag ious +ĠD ip +ĠAr rows +Ġpronoun s +Ġmorph ine +ĠB US +68 2 +Ġk osher +fin ished +ĠInstr uments +Ġf used +yd en +ĠSal mon +F ab +aff ected +K EN +C ENT +Dom ain +Ġpoke mon +ĠDr inking +G rowing +ĠInvestig ative +ĠA ether +em i +Ġtabl oid +Ġrep ro +ĠNot withstanding +ĠBers erker +Ġdram as +Ġclich é +Ġb ung +ĠU RI +ĠD os +0 44 +Ġpast ors +Ġl s +Ġac rylic +aun ts +Ed ward +Ġmajor ities +B ang +Ġfield ing +ĠRepl acement +ĠAl chemy +pp ard +ĠRome o +ĠSan ct +ĠLav rov +ib ble +Inst ruct +Ġimp ractical +ĠPlay boy +ce phal +Ġsw aps +Ġk an +ĠThe o +Ġillust rating +Ġdismant led +ĠTrans gender +ĠG uth +UG H +Ġtriumph ant +Ġencomp ass +Ġbook mark +udd in +j er +Ġpred icate +ES H +Ġwhen ce +ĠAB E +Ġnon profits +Se qu +Ġdi abetic +Ġp end +Ġheart felt +sh i +Ġinter acts +ĠTele com +Ġbombard ment +dep ending +ĠLow ry +ĠAd mission +ĠBl ooming +ust ration +ene gger +B rew +Ġmol ten +ĠNer d +P IN +âĸ Ģ +ave ment +Ġtou red +Ġco efficients +ĠTray von +ans son +Ġsand y +t old +fl ows +Ġpop ulous +ĠT inder +ĠBl iss +R achel +Min imum +Ġcontest ant +ĠRed uce +ĠMor se +ĠGrass ley +ĠClick er +Ġexp r +Ġs incerity +Ġmar qu +Ġelic it +ĠPro position +ĠDemon ic +Ġtac os +G reek +Ġpost war +Ġin sofar +ĠP ork +Ġ35 2 +doctor al +walk ing +Ġmid term +ĠSam my +sight ed +ĠTR ANS +ic i +AL D +ĠUS L +ĠF ISA +ĠAm pl +ĠAlex andra +ine lli +Tr ain +Ġsign ify +ĠVers us +Ġob fusc +Ġk h +Ġagg ro +ĠRen ault +Ġ3 48 +5 18 +ox icity +0 22 +ĠTw ist +Ġgoof y +D ynamic +Ġbrief ings +m ight +8 99 +Ġderog atory +T ro +Ġfor ging +ĠKor an +ĠMar ried +ĠBuc s +Ġpal ate +ĠCon version +m able +4 13 +Ġ( _ +Ġs iph +ĠN EO +col lege +Ġmarg inally +Ġfl irt +ĠTra ps +ĠP ace +é »Ĵ +Ġgoalt ender +Ġforb ids +Ġcler ks +ĠT ant +ĠRobb ins +ĠPrint ing +Ġpremie red +Ġmagn ification +ĠT G +ĠR ouse +ĠM ock +odynam ics +Ġpre clude +ism o +ĠPul itzer +Ġaval anche +ĠK odi +rib une +ĠL ena +Elect ric +Ġref inery +Ġend owed +Ġcounsel ors +Ġd olphin +ĠM ith +Ġarm oured +hib ited +Beg in +ĠP W +O il +ĠV or +ĠShar if +ĠFraz ier +est ate +Ġj ams +Pro xy +Ġband its +ĠPresbyter ian +ĠPrem iere +t iny +ĠCru el +Test ing +Ġhom er +ĠV ERS +ĠPro l +ĠDep osit +ĠCoff in +Ġsemin ars +Ġs ql +ĠDef endants +Altern atively +ĠR ats +ç « +ethy st +' > +Ġiss uer +58 9 +Ġch aired +ĠAccess ories +man ent +Ġmar row +ĠPrim ordial +C N +Ġlimit less +ĠCarn age +Ġund rafted +q v +IN ESS +on ew +Ġco hesion +98 7 +Ġne cks +Ġfootball er +ĠG ER +Ġdetect able +ĠSupport ing +ĠCS V +oc ally +k Hz +Ġund e +Ġsh one +Ġbud ding +tra k +Stand ing +ĠStar craft +ĠKem p +Ben ch +Ġthw arted +ĠGround s +ath i +L isa +Dial og +ĠS X +V ision +Ġingen ious +Ù IJ +Ġfost ering +ĠZ a +ĠIn gram +Ġ" @ +N aturally +6 16 +0 35 +ĠF AC +H mm +55 4 +Ġacceler ator +ĠV end +Ġsun screen +Ġtuber culosis +rav iolet +ĠFunction al +ĠEr rors +ed ar +19 66 +ĠSpect re +ĠRec ipes +88 5 +ĠM ankind +L iverpool +Ġ| -- +Ġsubst itutes +ĠX T +w ired +Ġinc o +ĠAf gh +E va +ic c +S ong +K night +Ġdilig ently +ĠBroad cast +A id +Ġaf ar +ĠH MS +aton in +ĠGr ateful +Ġfire place +ĠOm ni +e uro +ĠF RE +ĠSh ib +ĠDig est +t oggle +Ġheads ets +Ġdiff usion +ĠSqu irrel +ĠF N +Ġdark ened +out her +Ġsleep s +ĠX er +gun s +Ġset ups +Ġpars ed +Ġmamm oth +ĠCur ious +g ob +ĠFitz patrick +ĠEm il +im ov +........ ..... +ĠB enny +Second ly +Ġheart y +Ġcons on +st ained +Ġgal actic +cl ave +Ġplummet ed +Ġp ests +Ġsw at +Ġrefer rals +ĠLion el +h oly +Ġunder dog +ĠSl ater +ĠProv ide +ĠAm ar +ress or +å Į +ong a +Ġtim id +Ġp iety +ĠD ek +Ġsur ging +az o +Ġ6 10 +Ġdes ks +ĠSp okane +ĠAn field +Ġwars hips +ĠCob ra +Ġar ming +clus ively +ĠBad ge +ag ascar +ĠPR ESS +ĠMcK enzie +ĠFer dinand +burn ing +Af ee +Ġtyr ann +ĠI w +ĠBo one +100 7 +ĠRe pt +Ċ Âł +Ġcar avan +ĠD ill +ĠBundes liga +Ch uck +Ġheal er +ãĥ¼ãĥ Ĩ +ĠH obby +Ġneg ate +Ġcrit iques +section al +mop olitan +Ġd x +Ġouts ourcing +ĠC ipher +t ap +Sh arp +Ġup beat +Ġhang ar +Ġcru ising +ĠNi agara +Ġ3 42 +ill us +ĠS v +Ġsubt itles +Ġsqu ared +Ġbook store +Ġrevolution aries +ĠCarl ton +ab al +Ut ah +Ġdesp ise +ĠU M +cons ider +aid o +Ġc arts +ĠT urtles +Tr aining +Ġhonor ary + ¢ +Ġtri angles +4 22 +Ġreprint ed +Ġgrace ful +ĠMong olia +Ġdisrupt ions +ĠB oh +Ġ3 49 +Ġdr ains +Ġcons ulate +Ġb ends +Ġm afia +ur on +ĠF ulton +m isc +Ġren al +Ġin action +ck ing +Ġphot ons +Ġbru ised +ĠC odes +og i +Ġn ests +ĠLove ly +ĠLib re +ĠD aryl +Ġ# ## +S ys +. ," +Ġfree zes +est ablishment +and owski +Ġcum bers +ĠSt arg +ĠBom bs +Ġleg ions +Ġhand writing +Ġgr un +ĠC ah +sequ ent +Ġm oth +ĠMS M +Ins ert +F if +Ġmot el +Ġdex ter +ĠB ild +hearted ly +Ġpro pe +ĠText ure +ĠJ unction +ynt hesis +oc ard +ĠVer a +ĠBar th +Ġμ g +Ġl ashed +Ġ35 1 +ĠZ amb +ĠSt aples +ĠCort ex +ĠCork er +Ġcontinu um +ĠWR ITE +unt a +rid or +Ġde ems +0 33 +ĠG OLD +p as +Ġrep ressive +ãĥĨ ãĤ£ +Ġbaff led +Sc ar +Ġc rave +Ġ ______ +Ġentrepreneurs hip +ĠDirector ate +Ġ' [ +Ġv ines +Ġasc ended +ĠGR OUP +ĠGood bye +Ġdo gged +ãĥ´ ãĤ¡ +Man ufact +Ġunimagin able +ri ots +ier rez +Ġrel ativity +ĠCraft ing +ra ught +ud en +c ookie +Ġassass ins +Ġdissatisf ied +ac ci +Ġcondu it +Sp read +ĠR ican +n ice +izz le +Ġsc ares +ĠWH Y +ph ans +5 35 +Ġprot racted +ĠKrist en +5 36 +ĠSc rib +ĠNe h +Ġtwent ies +Ġpredic ament +Ġhandc uffs +Ġfruit ful +ĠU L +ĠLud wig +Ġatt est +ĠBre aker +Ġbi ologically +ĠDeal er +Ġrenov ations +f w +ess en +Al ice +ĠHen ri +Ġun ilaterally +ĠS idd +h ai +ĠSt retch +S ales +Ġcumbers ome +ĠJ avier +Ġtrend y +Ġrot ting +ĠChall enges +Ġscra ps +Ġfac ets +ĠVer onica +ĠVer ge +ĠS ana +Al ien +ĠR ih +Ġrad ial +ect ar +Ġ6 30 +cl i +Mar ie +Ġwild fire +ĠCat o +h ander +Ġwait ress +Ġch ops +ĠS ECTION +Ġblunt ly +ĠCat alog +n ian +stud y +Ġpat rolling +ĠT enth +nex us +ĠN ON +op sy +Ġsc athing +s ie +Ġdeterior ated +V B +Naz is +Ġdep ictions +Ġauthent icated +ĠCon ce +k rit +Ġpromul g +ĠL ONG +U FC +ĠVis itors +ĠRec all +Ġrehab ilit +ĠSL I +Ġglac ier +ĠB ite +Ġ50 3 +Ġvom it +Ġfer mented +ĠKh alid +Ġgrad ed +ĠMag icka +ĠIch igo +power ful +ic ators +75 3 +Ġsh rew +Ġ35 6 +Ġlegal izing +Ġall otted +ĠArch demon +ith ing +igg urat +V OL +Le od +Ġo ily +Ġindu cing +Ġamy gdala +Ġadm ins +ĠAcqu isition +C AN +Ġsche matic +Ġmo an +ĠCamer oon +Ġt ink +Ġmer ry +Ġbutter flies +ĠGo ff +Ġworks pace +ĠCor ona +Ġj avascript +ĠD olphin +ĠCant or +4 64 +to e +AP S +ĠAg ing +Ġpadd ed +ĠZ heng +ĠHe ld +Ġest ranged +Ġ7 70 +. } +ĠDun ham +Ġsm okes +Ġcap itals +und ai +Sh in +ĠFound ing +Ġent itle +Ġcenter piece +D iscover +Ġthere to +al ert +ĠN ou +ĠAnaly st +l c +F H +FI ELD +ĠP OV +gr ay +Ġar cs +ĠH OT +Ġr s +Ġoblig atory +ĠArchitect s +ĠS ven +ĠF EC +0 200 +Christ mas +ĠAlban ia +rat om +58 7 +Ġhard ships +Ġaut os +ĠCharg es +Ġap es +Ġ3 76 +wal let +Ġintox ication +Ġgobl in +Ġ5 70 +++++++++ ++++++++ +ĠYel p +ĠMag netic +ĠBr iggs +R ail +Ġspawn s +ĠW iggins +Ġshowc ased +Ġres orted +ub en +Ġwh ipping +Ġim itate +Ġdigest ion +ĠUS PS +ĠG est +Ġye a +ĠT ight +ind al +ic as +` . +C AST +'' ; +ĠF et +opath ic +In valid +Ġregrett ed +Ġbro ccoli +ĠSc ores +e ve +Ġpost ings +Ġaccum ulating +Ġneed less +elf th +Ġmay ors +Ġsc rib +Ġanecd otes +Ġbot ched +ĠRib bon +ĠConstant ine +i uses +ess es +Ġdev ise +Comp ared +Ġp udding +Ġg arg +Ġev oke +79 7 +Ġdet ox +9 09 +ĠPie ces +ĠMcC artney +Ġmet ast +ĠK rypt +P OR +Ġt ending +ĠMerch ants +Pro of +ĠV arg +ĠPort able +ãĥ¼ãĥĨ ãĤ£ +B rain +25 00 +Ġfol iage +Ø ¹ +Ġment ors +ĠA ires +Ġminimal ist +Ġing ested +ĠTro jan +ĠQ ian +inv olved +0 27 +Ġer oded +RA FT +Ġbl urry +M ob +Ġbuff et +ĠFn atic +ae a +KN OWN +ĠIn it +s afety +en um +ACT ION +ĠCrus her +ĠD ates +Ġ ................ +c alling +ak ov +Ġvent ured +Ġ5 55 +au ga +H art +ĠA ero +M AC +Ġthin ly +Ġar ra +ST ATE +ild e +ĠJac qu +ĠFem ales +Ġthe orem +Ġ3 46 +Ġsmart est +ĠPU BLIC +ĠK ron +ĠB its +ĠV essel +ĠTele phone +Ġdec ap +Ġadj unct +ĠS EN +mer ga +Ġred acted +Ġpre historic +Ġexplan atory +ĠRun s +ĠUtt ar +ĠM anny +ĠAUTH OR +ĠUnle ashed +ĠBow ling +be ans +79 3 +Ġunivers es +Ġsens it +ĠK ung +re peat +ctr l +Ġp aced +Ġfull er +Cl ock +Ġrec omb +ĠF aul +ĠB unker +Ġpool ed +Ġan a +ĠM outh +LL OW +hum ane +Ġbull do +ĠMicha els +f am +Ġwreck ed +Ġport rays +ĠWh ale +ĠH es +Ġguess es +ĠBrow se +ĠL APD +Ġconsequ ential +ĠInn ocent +ĠD RAG +Ġtrans gress +ĠO aks +Ġtri via +ĠRes on +ĠA DS +-- + +ĠT oll +Ġgrasp ing +ĠTHE M +ĠT ags +ĠCon clusion +Ġpract icable +Ġho op +Ġunintention ally +Ġign ite +ĠM ov +ur ized +le hem +Ter min +Ġcolour ful +ĠLin ear +ĠEll ie +G y +Ġman power +Ġj s +Ġem oji +ĠSHAR ES +_ . +0000 7 +Ġsophistic ation +Ġunders core +Ġpract ise +Ġbl ob +op ens +Uk raine +Ke eping +Y C +J R +ult imate +Cl aim +Ġautom obiles +99 3 +ste el +Ġpart ing +ĠL ank +... ? +Ġ38 5 +Ġremem brance +Ġe ased +Ġcov ari +ĠS ind +Effect ive +Ġdisse mination +ĠMo ose +ĠCl apper +br ates +App ly +Ġinv is +Ġwors ened +âĢĶ - +Ġlegisl ator +ĠL ol +ĠRow e +Ġdealers hip +um ar +id ences +Ġinvestig ates +Ġc ascade +Ġbid der +ĠB EN +Iron ically +Ġpres iding +Ġd ing +Ġcontrad icted +Ġshut s +ĠF IX +Ġ3 66 +Dist rict +Ġsin ful +ĠChar isma +o ops +Ġtot ality +Ġrest itution +ĠOpt imus +ĠD ah +Ġcl ueless +urn ed +Ġnut rit +Ġland owners +Ġfl ushed +Ġbroad en +m ie +Ġprint ln +Ġn ig +ĠCorp us +J en +Ġprot o +ĠWik imedia +ĠPal o +C OR +Ġstory lines +Ġevangel icals +ĠDar rell +Ġrot or +ĠH W +sk illed +ery l +Ġbe gg +ĠBl umenthal +Ġwe aving +Ġdown wards +ĠJack et +ĠANG EL +Te chnology +Ġes oteric +alde hyde +Ġfur iously +Ġforeign er +We ak +CH O +ĠH ound +Exper ience +ĠPlay station +ĠM IA +ĠU ng +cl oth +ag all +Ġcal ming +iz ens +St ruct +ĠW itches +ĠCeleb ration +Ġ........ ...... +pt roller +ĠTC U +Ġb unny +ãĥ į +ut orial +Ġup scale +ĠSt a +ĠCol ossus +Ġchlor ide +ĠZ ac +ĠRe asons +ĠBrook ings +ĠWH ITE +][ / +ĠL ose +9 05 +Ġunders ide +ern els +Ġv ape +do zen +upp et +ĠST OP +mat ical +ĠStat ements +hed dar +P AC +Custom er +Ġmem os +ĠP J +end ars +ĠLim its +l augh +Ġstabil ized +ĠALE C +Y A +Up grade +al am +Ġtechn o +Ġan ew +fore seen +Ġcolleg iate +ĠPy ro +ĠD ism +Ġfront line +Ġammon ia +I U +Qu ite +John ny +ass in +G OP +ĠSt yles +ĠSovere ign +acter ial +5 49 +ĠR IP +ĠL ists +Ġ3 64 +ĠRece p +s ocket +ĠByr d +ĠCand le +An cient +Ġappell ant +en forcement +ace a +ans ki +Ġold s +88 6 +Ġsl urs +Ġem pires +Ġbuck le +Ġalien ation +ĠAber deen +Ġunic orn +Ġoverr iding +ĠL X +pp a +Ġdesp ised +ĠB ugs +ĠB ST +S outhern +5 33 +Ġhall mark +ĠPost er +Ġstem med +Ġprincip als +ĠT ECH +ĠSand wich +It aly +Ġche esy +ĠSet TextColor +ĠProt ective +ĠC ohn +J O +apt op +Re ason +Lead er +ĠUnder stand +ĠFr idays +ĠContin uous +Ġcl ipping +ĠR ye +Ġber th +tim er +ann is +re act +Ġbuff alo +ĠPar as +Ġ6 55 +Ġpres ided +ĠSun rise +Ġve ts +Ġcl oves +ĠMcC ull +Stre ngth +G AN +Ġill iter +ĠPric ing +l é +Ġresist or +Ġbr un +ĠSuff olk +Ñ ĭ +ĠL iver +Re leased +Ġwhat s +8 60 +ĠMe asures +Ġden ouncing +ĠRy zen +Ġsou ven +Ġcareg ivers +ch ini +ĠScar lett +Ġt rough +Cong ratulations +Ġtax is +ĠTrad ition +j it +Ġtable top +Ġhither to +Ġdis information +off ensive +h ra +ĠDISTR ICT +Ġcompl icate +chen ko +ĠRecon struction +Ġpalp able +Ġa usp +Ġ4 28 +Ġshowc ases +ĠPublic ation +know ledge +inn on +4 19 +Ġretri eval +and ers +Ġref ute +Ġinqu ired +g ur +Ġneg ativity +Ġcons erve +Ġafter life +Ġpres upp +ĠGill espie +Ġm t +ĠD N +T ap +Ġper pend +ĠS my +does n +Ġsp illing +Ġhyp ers +K ate +® , +ke pt +ĠP owered +Ġj a +ĠK lux +ard e +ab an +Ġ4 44 +Ġflatt ened +ĠImprove ments +urg a +ĠK und +Ġins cribed +Ġfac ult +Ġunpre pared +ĠCons umers +Ġsatisf ies +Ġpul monary +Ġinf iltration +Ġex ternally +Ġcongrat ulations +ag han +Ġair liner +Ġfl ung +Ġfly ers +G D +Ġsnipp ets +Ġrec ursive +Ġmaster ing +L ex +Ġovert ly +v g +Ġluck ily +Ġenc ro +ĠLanc et +ĠAbyss al +function al +Ġs ow +Ġsqu id +Ġnar ration +Ġn aughty +ĠHon our +ĠSpart ans +Ġsh atter +ĠTac oma +ĠCal ories +ĠR aces +Sub mit +Ġpurpose fully +w av +ĠY ok +F est +ĠG err +Met ro +Ġit iner +f amous +Ġ" { +in line +was her +Iss ue +ĠCL IENT +oz o +Vers ions +7 25 +ĠGl ock +Ġshield ed +ĠPC R +ENC Y +ĠWe ld +ĠSim pl +Ġredirect ed +ĠK ham +Ġ( > +Ġlab ou +Ġdi apers +ss l +Ġcell ar +organ isms +ore sc +ĠBer ks +did n +Sh ipping +C hest +Ġund one +Ġmillion aire +Ġc ords +ĠYoung er +appropri ately +Ġsequ els +u ve +ant icipated +Ġle wd +ĠSh irt +ĠDmit ry +V eter +Ġsl aying +ĠY ar +Ġcompl ication +I owa +ĠEric a +ĠBL M +g irlfriend +b odied +6 26 +19 63 +Ġintermedi ary +Ġcons olation +M ask +ĠSi em +ow an +Beg inning +Ġfix me +Ġculmin ated +Ġcon duc +ĠVolunte er +Ġpos itional +Ġgre ets +ĠDefin itions +Ġthink er +Ġingen uity +Ġfresh men +ĠMom ents +Ġ35 7 +ate urs +ĠFed Ex +s g +69 4 +Ġdwind ling +ĠBO X +sel age +Ġt mp +Ġst en +ĠS ut +Ġneighbourhood s +Ġclass mate +f ledged +Ġleft ists +Ġclim ates +ATH ER +ĠScy the +ul iffe +Ġs ag +Ġho pped +ĠF t +ĠE ck +ĠC K +ĠDo omsday +k ids +Ġgas ped +Ġmon iker +ĠL od +ĠC FL +t ions +r ums +fol ios +Ġm d +Ġunc anny +Ġtrans ports +ĠLab rador +Ġrail ways +Ġappl iance +ĠCTR L +æ Ģ +Pop ulation +ĠConfeder acy +Ġunb earable +Ġdors al +ĠIn form +op ted +ĠK ILL +Mar x +Ġhypoc ritical +q us +ĠN umerous +ĠGeorg ian +ĠAmbro se +ĠL och +Ġgu bernatorial +ĠX eon +ĠSupp orts +ens er +ee ly +ĠAven ger +19 65 +Ar my +Ġju xtap +Ġcho pping +ĠSpl ash +ĠS ustainable +ĠFin ch +Ġ18 61 +ict ive +at meal +ĠG ohan +Ġlights aber +ĠG PA +ug u +ĠRE PL +vari able +Ġher pes +Ġdesert s +ac iously +Ġsitu ational +week ly +ob l +Ġtext ile +ĠCorn wall +Ġcontrace ptives +ĠA ke +] - +ä¹ ĭ +: , +ĠW em +ĠB ihar +Ġ' . +Ġbe re +Ġanal ogue +ĠCook ies +Ġtake off +Whe el +Ġmaj estic +Ġcomm uting +0 23 +ĠCor pse +ass ment +min i +Ġgor illa +ĠAl as +ere e +Ġacquaint ances +ĠAd vantage +Ġspirit ually +Ġey ed +pm wiki +ĠE nder +Ġtrans lucent +Ġnight time +ĠIM AGES +5 45 +ĠK amp +ĠFre ak +Ġ ig +Port land +4 32 +ĠM ata +Ġmar ines +Ġh ors +ater asu +ĠAtt ribution +Ġ-------- - +Ġk ins +ĠBEL OW +++ + +Ġre eling +ol ed +Ġcl utter +ĠRel ative +Ġ4 27 +B US +Ġa vert +ĠChe ong +ĠA ble +ĠPry or +Develop er +Ġen cyclopedia +ĠUSA F +ĠG arry +Sp ain +Bl ocks +Ġexp osition +ĠGamer Gate +W OR +Ġstockp ile +Ġclot hed +ĠT one +ĠR ue +t umblr +Ġtreacher ous +Ġf rying +Ñ Į +ĠS ph +Ġrest raints +Ġemb odies +ĠG es +S afety +Ġnegoti ators +min ing +ĠAppalach ian +L OS +ĠJenn a +Ġpass ers +ç ĭ +sn ap +Ġshort en +creat or +Ġinn umerable +uther land +67 4 +ĠW OM +ĠAs cend +ĠArm ory +ĠTrans action +K ick +Ġsuit case +day Name +Ġwaste ful +mar riage +ĠMcC abe +ite ch +ĠO ss +Cl osure +ĠTreasure r +Ġindec ent +ĠD ull +Ġresid ences +19 59 +ĠS ettlement +Ham ilton +Ġself ies +ĠRank ing +ĠBark ley +ĠB ore +ĠW CS +ĠMar itime +ĠH uh +ĠForest ry +Ġcultiv ating +ĠBall ard +Ġg arrison +ĠSD L +9 30 +Ġnas cent +Ġirresist ible +Ġaw fully +\/ \/ +Ġequ ate +Ġanthrop ology +ĠSylv ia +Ġintest ine +Ġinnoc uous +cess ive +ag ra +ĠMet roid +G rant +8 55 +ģ ĸ +Ġ" _ +ãĥĥ ãĥī +Ġappra isal +ĠFred dy +04 6 +Ġ40 6 +Ġ18 30 +Ġd ocking +St atic +Ġp ont +ĠVolt age +ĠSt ead +ĠMort gage +ĠJon ah +Y L +CLASS IFIED +Ġas bestos +nik ov +Ġcoll agen +ĠOrb ital +P ocket +7 99 +Ġhy brids +inc hes +Ġinv oice +und y +Ġinequ alities +T rend +w ashed +B ALL +Ġluc id +ĠComment ary +Ġw itty +Br andon +Ġbru ising +Ġ6 20 +es cent +box ing +P OL +Ġ3 78 +R ect +Ġlic ences +ĠMcG ee +p ressed +D anny +Ġj ammed +ord inate +Ġle th +Ġdistingu ishes +ĠYam aha +IL S +ĠH ume +ĠC ategories +Rober ts +Ch art +Ġbeet le +ĠGra veyard +Ġ($ ) +o ÄŁ +Ġtw ilight +are lla +á ½ +Ġbooth s +ĠH HS +ĠFeld man +Ġexcav ation +Ġphilosoph ies +at ography +ĠGar age +te chnology +Ġunfor gettable +Ġver ifying +Ġsubord inates +E ls +Ġne b +G aming +EN A +ĠAchieve ment +it ters +ĠG abe +Ġd umps +for cer +Ġpo ignant +ĠM BA +ĠHe idi +ime i +Ġm ages +Ġliber ate +Ġcircum cised +ĠMer maid +ĠMat th +t ogether +ĠW ichita +Ġstore front +ĠAd in +V II +Four th +Ġexplore rs +W ER +Not able +Bro ok +m ens +F aith +-------- - +ĠJ ou +¬ ¼ +Ġpine apple +Ġam alg +el n +ark able +ĠãĤµ ãĥ¼ãĥĨãĤ£ +ĠãĤµãĥ¼ãĥĨãĤ£ ãĥ¯ãĥ³ +Ġov arian +ĠE choes +Ġhairc ut +Ġp av +Ġch illed +anas ia +Ġsty led +Ġd ab +ni per +Ġminister ial +ĠD UP +T an +Ġsul ph +ĠD eter +ĠBo hem +od an +Ġeduc ator +â ĵĺ +sp ir +Ch icken +ĠE leanor +Ġqu i +Ġheav iest +Ġgrasp ed +U RA +Ġcro oked +Jess ica +pro blem +Ġpred etermined +Ġman iac +Ġbreath s +ĠLauder dale +Ġh obbies +y z +Cr ime +Ġcharism a +d L +Ġle aping +Ġk ittens +Ang elo +ĠJ ACK +ĠSu zanne +Ġhal ting +ENT ION +Ġswall owing +ĠEarthqu ake +Ġeight eenth +ĠN IC +ĠIN F +ĠCons cious +Ġparticular s +circ le +7 40 +Ġbene volent +Ġ7 47 +Ġ4 90 +Ġr undown +ĠVal erie +ĠB UR +Ġcivil isation +ĠS chn +W B +ot ide +intern ational +Ġj ohn +Ġ19 02 +Ġpe anuts +Ġflav ored +k us +Ġro ared +Ġcut off +é £ +Ġorn ament +Ġarchitect ures +Ġ3 69 +ol or +ĠWild e +ĠC RC +ĠAdjust ed +Ġprov oking +land ish +Ġrational ity +Ġjust ifies +Ġdisp el +Ġa meric +ĠPol es +Ø © +Ġen vis +ĠD oodle +ä½ ¿ +igs aw +auld ron +Techn ical +T een +up hem +ĠX iang +Ġdetract ors +ĠZ i +ĠJournal ists +Ġconduc ive +ĠVolunte ers +Ġs d +Know ing +Ġtrans missions +ĠPL AN +ĠL IB +Ġall uded +Ġob e +Ġd ope +ĠGold stein +Ġwavelength s +ĠDest ination +nd a +ug i +Ġattent ive +ĠLe an +ral tar +Ġman g +mb uds +ak ings +b ender +Ġacc ol +Ġcraw led +N OW +Min nesota +Ġflour ished +ĠZ up +ĠSuper visor +ĠOliv ier +Ex cellent +Ġwid en +D one +Ġw ig +Ġmiscon ceptions +Cor p +W an +Ġvener able +ĠNot ably +ĠKling on +an imate +Bo ost +ĠS AY +miss ing +ibli ography +mel on +Ġpay day +Ø ³ +bo le +Ġve iled +ĠAl phabet +It alian +Ġever lasting +ĠR IS +ĠC ree +rom pt +Ġh ating +Ġgrin ning +Ġge ographically +OS H +Ġwe eping +ĠÂłĠÂłĠÂłĠÂł ĠÂłĠÂłĠÂłĠÂł +Ġimpe cc +Let ter +Ġblo ated +PL A +ĠFe in +Ġper sever +Th under +Ġa ur +ĠR L +Ġpit falls +âĸ º +Ġpredomin ant +Ġ5 25 +7 18 +AP E +7 14 +Ġfarm land +ĠQ iao +Ġv iolet +ĠBah amas +Ġinflic ting +ĠE fficiency +Ġhome brew +Ġundert ook +Ġcur ly +ĠHard ing +man ia +59 6 +Ġtem pered +Ġhar rowing +ĠP ledge +ĠFranken stein +è ª +M otion +Ġpredict ably +ĠExpl osion +oc using +er d +col o +FF ER +Ġback field +ĠV IDE +ue bl +N arr +ĠArg ument +Ġgen omic +Ġbout ique +Ġbatt ed +ĠB inary +Ġg amb +ĠRh ythm +67 3 +Ġa float +ĠOlymp ia +Y ING +Ġend if +is in +Ġwin ters +Ġsc attering +I v +D istance +Ġtr u +ĠCom fort +Ġne xus +Ġair flow +ĠByz antine +p ayers +con i +ĠB etsy +D eal +ĠN ug +ĠContin ent +red ibly +Ġoptim izing +al beit +Ġec static +ĠPro to +ç · +iv ot +âĸ Ħ +em p +rou nder +Ġcl out +ĠI ST +66 3 +ĠDoll ars +ĠD AC +Ġsubsc ribed +Ġrehears al +Ġam ps +ĠSh ang +es m +Ġspr inkle +Ġassail ant +ĠO o +ĠCoin base +T act +Ġret ina +Ġn uns +R ON +att o +Ġj ug +ĠSV G +Ġb ikini +ĠFI LE +ĠFound ers +ep ort +ĠK P +Ġrest ores +ĠTh ick +Ġash ore +Ġappro vals +R ender +M AG +G raham +ĠCort ana +ãĥ³ ãĤ¸ +ss h +or ians +ars ity +ĠInsp ired +u pper +Ġsign alling +Ġreb uke +Ġfl ares +Ġdownt ime +Stud ies +Ġstagn ation +ĠSequ ence +Ġgr unt +Ġass ures +ĠPL A +59 2 +Ġintra ven +d epend +Sus an +ĠManz iel +Man ia +Cont ract +Ġsl ams +Ġcult ured +Ġcred itor +L IST +ĠH UM +ĠChatt anooga +serv ed +Ġclo aked +ĠF TP +p owder +ĠSt ella +uct ive +Ġcheap ly +ĠMU CH +ĠGalile o +Ġsu ites +spe ech +Ġdeliber ations +ĠCh ips +« ĺ +Bal ance +ĠWyn ne +ĠAk ron +Ass et +Ġhon oured +Ġed ged +Like wise +anim ous +ĠW age +ĠEz ek +ad vertisement +ĠRT X +ĠM AD +Ġmigr ating +ĠS QU +Ġ4 75 +Ed ited +Ġshorth and +ĠBas ics +Ġcro tch +ĠEV EN +Ġv m +effic iency +Ġcal ves +ĠF rie +ĠBrill iant +Ġstri kers +Ġrepent ance +Ġarter ies +r l +B ed +h ap +Ġcrypt ography +ĠSab res +Ġ4 14 +vi ks +ih ara +aps es +T alking +Ġintertw ined +Ġdoc ks +Ġalle le +ĠArt ifact +ĠH IM +t orn +ç ķ +Ġop acity +ĠE ly +os uke +Ġn ipple +Ġhand written +ĠV K +ĠChamber lain +ĠLa os +ig raph +g row +Ġtr illions +Ġdescend ant +ĠSail or +as uring +Ġce ilings +ĠWare house +f lying +ĠGl ow +Ġn ont +Ġmiscar riage +Ġrig s +Ġmin istries +Ġelabor ated +Ġdel usional +ĠHum ane +Ġ3 79 +n ets +Ġblack out +add ers +Ġn p +ĠT ire +ro sc +Ġsub div +Ġlink age +Ġchron ological +ĠHER O +Ġres ettlement +ĠVin yl +Ġpast oral +ĠMob il +ĠBar bar +Co oldown +ĠF ritz +c riminal +re pe +Ġbell ig +ĠBre ed +Ġ4 18 +Ġsem blance +ij k +Ġcur tail +Ġclin ch +cont ained +ĠProm pt +ast on +Ġw i +Ġpursu its +5 15 +ĠGl oss +Ġfl ips +Ġcoup ons +Ġcl oning +ĠLike ly +Rem oved +ĠQu artz +r ices +ĠSpe ars +Ġp ious +Ġdep reciation +ĠD are +oun ces +am az +O nt +Ġp innacle +d ocker +0 26 +ĠW yr +ĠPro per +Ë Ī +n il +By tes +Ġseek er +t rial +Ġunf olds +ĠMar se +Ġextravag ant +ĠSurviv ors +RED ACTED +ĠSpeed way +ĠCra igslist +sub mit +ĠGener ations +Ġup holding +Ġblood stream +ĠMiss ions +ĠL awn +Ġlim bo +ene i +H uh +ĠWild cats +pre p +ĠMark us +ĠFor bidden +rit ic +IN O +Ġexhib iting +requ ent +ch uk +Ġhabit ual +ĠComp atibility +Dr ag +RIP T +uj ah +GR OUND +Ġdelinqu ent +Ġburn er +Ġcontempor aries +Ġgimm ick +load s +Ġno zzle +p odcast +ĠW ak +ĠStat en +ĠK uh +ãģ ĵ +inter rupted +Ġinv incible +ĠBurn ett +cig arette +ĠPeb ble +ĠTem porary +ĠMar ino +58 2 +Ġwast eland +ident ly +T x +Ġr ite +ĠPan asonic +ĠM iddles +ĠHort on +ae us +Ġc uring +Ġm ats +Ġadj ourn +Ġfears ome +pe z +bo ats +Ġpro pell +Ġconflic ted +ĠAng er +Ġinsurg ent +K arl +Ġco ales +Ġsouth western +Ġdis su +ĠO vert +******** **** +Ġbox ed +ĠBr une +aa a +Ġgard ening +ĠEng el +tr acks +Ġpur ified +Ġplace holder +ĠL ikes +Ġd an +G ab +Ġe ct +ĠF aw +ĠEl iot +Ġ' , +otrop ic +ĠRu in +hed on +Ġca ul +Ġa ft +ĠCad illac +gh a +ass ian +ud eb +ĠT ick +Ġadjust s +AR GET +5 37 +isc he +ant y +ĠFried rich +ĠBl izz +ĠA OL +Camp aign +Ġmamm al +ĠVe il +ĠK ev +ĠMaur it +ĠDam ien +N ation +E astern +Ġ{ : +Ġ= ================================ +Ġstereotyp ical +Ġatt ic +ĠCy borg +requ ire +Ġaward ing +ĠPap ua +bt n +b ent +B oo +Ġ( = +ĠX ander +ĠSomers et +Ġcatch y +Ġcert ify +STR UCT +Ġit al +Ġt ides +ĠBr ands +G ray +comp etitive +Ġcur ator +ĠD G +omin ium +ĠGM Os +ci ating +ĠCarm en +ow ard +Balt imore +Ġr gb +C u +Ġwip es +spe ll +IT NESS +Ġsummar izes +ĠRe vis +Ġwhistlebl owers +ĠBre ach +Ġcro chet +k os +ews ki +Ġrep et +Ġcrim son +ĠKar achi +read able +dim ension +ĠI gor +ild ed +ĠZ ed +ĠKe ane +ĠCos metic +DE P +Ġretreat ing +ĠU A +ens ical +Ġd usk +ĠDick ens +Ġaren as +ĠPass age +level s +Ġcur v +P ope +Ġch ores +ĠEl ise +ĠComp ass +b ub +Ġmamm alian +ĠSans krit +ĠAN C +ĠCr ack +Q ual +L aun +amp unk +Ġlearn ers +Ġglam orous +Ġfur the +erm ott +c and +Gener ic +Ġnarr ated +Ġdisorder ly +ĠTrans actions +ĠDet ention +ĠR oku +Ä į +Ġunder statement +ĠS aur +ĠRodrig o +ĠAS AP +S in +Ġre joice +Method s +Ġelectro de +Ġworsh ipped +Ġid i +ĠPhys icians +Ġpop up +Ġde ft +ĠRem oval +ĠBu enos +ver bs +Ġfun k +ush a +rict ion +ore a +ĠBang alore +ĠKen obi +zz i +Ġnorm ative +Ġgobl ins +Ġcaf es +ĠUN CLASSIFIED +ĠF ired +S IGN +Ġs clerosis +ĠV oter +ĠSon ny +ĠExt end +ĠEV s +Ar senal +Ġp si +Ġwid est +ĠT us +Ġlo oms +Ġjust ifying +ĠGr anger +è ¯ +Ref er +58 3 +Ġflour ishing +ab re +Ġr ave +ĠCont ra +Ġ18 98 +Add s +Ġf ul +ĠCo oke +some one += # +67 1 +Ġy ak +Ġar te +ĠMis cellaneous +ĠDet ection +ĠCl ancy +â ģ +ass ies +Ġval iant +ĠFemin ist +cor ruption +V el +P ear +Ġsucc inct +Ġquick est +k w +Ġsp itting +ĠL ibraries +åħ ī +ant z +D ad +ĠSpec ifications +rup ulous +and r +RES ULTS +Ġsnow ball +Ġpred is +ĠB axter +ĠNurs ing +ĠCh aff +s we +Ġout age +Ġnest ing +Ġnotor iety +tr igger +on ite +j on +Ġf ou +ook ed +ĠCelebr ity +re ality +Ġfat ig +Ġhug ging +Ġbother s +ĠPan zer +ĠCh andra +fig ured +Ġvol ts +ĠCloud s +Ġfee ble +ĠCur ve +ĠAs us +78 6 +abs or +ĠV ICE +ĠH ess +Ġmanufact ures +Ġgri zz +ĠPower ful +ac id +Ġsub sections +ĠKrug man +ĠAl ps +is u +Ġsequ est +ĠUlt ron +ĠT inker +ĠGo ose +Ġmism atch +Att orney +Ġmorph ology +ĠSix ers +ut tered +ĠE LECT +gr an +Rus sell +ĠG SL +Ġfort night +Ġ. ) +Ġapost le +pr one +el ist +Unt itled +ĠIm plementation +ist ors +Ġtank er +Ġpl ush +Ġattend ants +ĠT ik +ĠGreen wich +ĠY on +ĠSP L +cell s +unt led +S olution +ĠQu é +Ġvac ated +Ġupt ick +ĠMer idian +æ ĥ +ĠDr ill +9 25 +58 4 +Ġrenov ated +ĠKub rick +zy k +Ġl ousy +pp el +ohyd rate +ĠI zzy +lesi astical +CC C +ĠAj ax +Ġad apters +ĠPetra eus +Ġaffirm ation +ĠST OR +le ms +ad oes +ĠConstantin ople +Ġp onies +Ġl ighthouse +Ġadherent s +ĠBre es +omorph ic +Fight ing +Ġpl aster +ĠP VC +ĠOb st +Ġdear ly +ĠTo oth +icks on +Ġsh aming +P lex +A gg +ĠâĢ¦ " +Ġsub reddits +Ġpige on +ĠResident ial +ĠPass ing +Ġl um +ĠP ension +Ġpessim istic +Ġ4 32 +z inski +c ade +0 75 +Ġapolog ised +iy ah +Put ting +Ġgloom y +ĠLy me +=-=-=-=- =-=-=-=- +ĠT ome +ĠPsych iatric +ĠH IT +c ms +ap olog +Ġbreak er +Ġdeep en +Ġtheor ist +ĠHigh lands +Ġb aker +Ġst aples +Ġinterf ered +ĠAb ortion +jo ined +ch u +Ġform ulate +Ġvacc inations +Ġban ter +phe us +Ġoutfield er +ĠM eter +Ġ# #### +Ġ18 95 +Ġnarrow ing +ĠST ORY +f p +ĠC ST +ign ore +Ġproclaim ing +ĠR U +ĠB ALL +yn a +65 3 +Ġpos it +P RE +59 4 +ĠRegist rar +ĠPil grim +ic io +Ġpre tt +Ġlif eless +Ġ__ _ +Ne igh +ĠCh urches +orn o +Ġor cs +Ġkind red +ĠAud it +Ġmillenn ial +ĠPers ia +g ravity +ĠDis ability +ĠD ARK +W s +od on +Ġgrand daughter +ĠBro oke +ĠA DA +ER A +Ġpick ups +ĠWil kinson +ĠSh ards +ĠN K +Ġexp el +ĠKis lyak +Ġj argon +Ġpolar ized +ian e +Pub lisher +Ġreb utt +Ġapprehens ion +ĠK essler +Ġpr ism +F UL +19 64 +ĠL oll +ä ¿ +le thal +Å Ł +Ġg hetto +Ġb oulder +ĠSlow ly +ĠOsc ars +ĠInst ruction +ĠUl tr +ĠM oe +N ich +ĠP ATH +( * +ĠRE LEASE +un ing +rou se +en eg +Ġre imb +ĠDet ected +Do S +Ġster ling +Ġaggreg ation +ĠLone ly +ĠAtt end +hig her +Ġairst rike +ks on +SE LECT +Ġdef lation +ĠHer rera +C ole +rit ch +Ġadvis able +F ax +Ġwork around +Ġp id +mort em +ers en +Ġtyp o +Ġal um +78 2 +ĠJam al +script s +Ġcapt ives +ĠPres ence +ĠLie berman +angel o +Ġalcohol ism +ass i +Ġrec ite +Ġgap ing +Ġbask ets +ĠG ou +Brow ser +ne au +Ġcorrect ive +und a +sc oring +ĠX D +Ġfil ament +Ġdeep ening +ĠStain less +Int eger +Ġbu ggy +Ġten ancy +ĠMub arak +Ġt uple +ĠD roid +ĠS itting +Ġforfe it +ĠRasm ussen +ixt ies +es i +ĠKim mel +Ġmetic ulously +Ġap opt +ĠS eller +08 8 +ec ake +hem atically +T N +Ġmind less +Ġdig s +ĠAcc ord +ons ense +em ing +br ace +Ġe Book +ĠDist ribut +ĠInvest ments +w t +] ), +beh avior +56 3 +Ġbl inding +ĠPro testers +top ia +Ġreb orn +ĠKel vin +ĠDo ver +ĠD airy +ĠOut s +Ġ[ / +Ï Ģ +b p +ĠVan ity +ĠRec ap +ĠHOU SE +ĠF ACE +Ġ4 22 +69 2 +ĠAnt ioch +cook ed +Ġcoll ide +Ġa pr +Ġsle eper +ĠJar vis +Ġalternative ly +ĠLe aves +ĠM aw +Ġantiqu ity +ĠAdin ida +Ġab user +Poké mon +Ġass orted +ĠRev ision +ĠP iano +ĠG ideon +O cean +Ġsal on +Ġbust ling +ogn itive +ĠRah man +Ġwa iter +Ġpres ets +ĠO sh +ĠG HC +oper ator +Ġrept iles +Ġ4 13 +ĠG arr +ĠCh ak +Ġhas hes +Ġfail ings +Ġfolk lore +Ġab l +ĠC ena +ĠMac Arthur +ĠCOUR T +Ġperipher y +app ers +Ġreck oned +ĠInf lu +ĠC ET +Ġ3 72 +ĠDefin itive +ass ault +4 21 +Ġreservoir s +Ġd ives +ĠCo il +DA Q +Ġvivid ly +ĠR J +ĠBel lev +Ġec lectic +ĠShow down +ĠK M +ip ed +reet ings +ĠAs uka +L iberal +ĠÏ Ħ +Ġbystand ers +ĠGood win +uk ong +S it +ĠT rem +Ġcrim inally +ĠCirc us +ch rome +88 7 +Ġnan op +ĠOb i +ĠL OW +o gh +ĠAuth ors +ob yl +Ur ban +Ġt i +ĠWe ir +t rap +ag y +Ġparent heses +Ġout numbered +Ġcounter productive +ĠTob ias +ub is +P arser +ST AR +Ġsyn aptic +ĠG ears +Ġh iber +Ġdebunk ed +Ġex alted +aw atts +H OU +Ch urch +ĠPix ie +ĠU ri +ĠForm ation +ĠPred iction +C EO +Ġthro tt +ĠBrit ann +ĠMad agascar +ë ĭ +Ġbill boards +ĠRPG s +ĠBe es +complete ly +F IL +Ġdoes nt +ĠGreen berg +re ys +Ġsl ing +Ġempt ied +ĠPix ar +ĠDh arma +l uck +ingu ished +Ġend ot +Ġbab ys +05 9 +che st +r ats +Ġr idden +Ġbeet les +Ġillum inating +Ġfict itious +ĠProv incial +Ġ7 68 +Ġshe pherd +ĠR ender +Ġ18 96 +C rew +Ġmold ed +ĠXia omi +ĠSp iral +Ġdel im +Ġorgan ising +Ġho ops +ĠBe i +z hen +Ġfuck in +Ġdec ad +Ġun biased +am my +sw ing +Ġsmugg led +Ġk ios +ĠP ERSON +ĠInquis itor +Ġsnow y +Ġscrap ing +ĠBurg ess +P tr +ag ame +R W +Ġdro id +ĠL ys +ĠCass andra +Jac ob +Ġ35 4 +Ġpast ure +Ġfr anc +ĠScot ch +ĠEnd s +ĠI GF +def inition +Ġhyster ical +ĠBrown e +77 1 +Ġmobil ization +æ ķ +iqu eness +Th or +Ġspear headed +Ġembro iled +Ġconject ure +jud icial +Ch oice +Ġpaper back +P ir +Ġrec overs +ĠSur ge +ĠSh ogun +ĠPed iatrics +ãģ ł +Ġsweep s +ĠLabor atories +ĠP acks +al us +add in +Ġhead lights +g ra +Ev idence +COL OR +Ad min +Ĭ ± +Ġconco ct +s ufficient +Ġun marked +Ġrich ness +Ġdiss ertation +Ġseason ing +Ġg ib +ĠM ages +un ctions +ĠN id +che at +ĠTM Z +c itizens +ĠCatholic ism +n b +Ġdisemb ark +ĠPROG RAM +a ques +Ty ler +Or g +ĠSl ay +ĠN ero +ĠTown send +IN TON +te le +Ġmes mer +9 01 +Ġfire ball +ev idence +aff iliated +ĠFrench man +ĠAugust a +0 21 +Ġs led +Ġre used +ĠImmun ity +Ġwrest le +assemb led +Mar ia +Ġgun shots +ĠBarb ie +Ġcannabin oids +ĠTo ast +ĠK inder +IR D +Ġre juven +Ġg ore +Ġrupt ure +Ġbre aching +ĠCart oon +Ġ4 55 +ĠPale o +6 14 +Ġspe ars +ĠAm es +ab us +Mad ison +GR OUP +Ġab orted +y ah +Ġfel on +Ġcaus ation +Ġprep aid +Ġp itted +op lan +ĠShel ley +ĠRus so +ĠP agan +Ġwill fully +ĠCan aver +und rum +ĠSal ary +ĠAr paio +read er +ĠR ational +ĠOver se +ĠCa uses +Ġ* . +Ġw ob +Ke ith +ĠCons ent +man ac +77 3 +6 23 +Ġfate ful +et imes +Ġspir ited +ĠD ys +Ġhe gemony +Ġboy cot +ĠEn rique +em outh +Ġtim elines +ĠSah ara +ĠRel ax +ĠQuin cy +ĠLess ons +ĠE QU +SE A +N K +ĠCost co +Incre ase +Ġmotiv ating +ĠCh ong +am aru +ĠDiv ide +Ġped igree +ĠTasman ia +ĠPrel ude +L as +9 40 +57 4 +Ġch au +ĠSp iegel +un ic +-- > +ĠPhil ips +ĠKaf ka +Ġuphe aval +Ġsent imental +Ġsa x +ĠAk ira +ser ial +Mat rix +Ġelect ing +Ġcomment er +ĠNeb ula +ple ts +ĠNad u +ĠAd ren +Ġen shr +ĠR AND +fin ancial +ĠCly de +uther ford +Ġsign age +Ġde line +Ġphosph ate +rovers ial +f ascist +ĠV all +ĠBeth lehem +Ġfor s +Ġeng lish +S olid +N ature +Ġv a +ĠGu ests +Ġtant al +Ġauto immune +;;;;;;;; ;;;; +ĠTot ally +ĠO v +Ġdef ences +ĠCoc onut +Ġtranqu il +Ġpl oy +Ġflav ours +ĠFl ask +ãĤ¨ ãĥ« +ĠWest on +ĠVol vo +8 70 +Ġmicro phones +ver bal +R PG +Ġi ii +; } +0 28 +Ġhead lined +Ġprim ed +Ġho ard +ĠSh ad +ĠEN TER +Ġtri angular +Ġcap it +l ik +ĠAn cients +Ġl ash +Ġconv ol +Ġcolon el +en emy +G ra +Ġpub s +ut ters +Ġassign s +ĠPen et +ĠMon strous +ĠBow en +il ver +H aunted +ĠD ing +start ed +pl in +Ġcontamin ants +ĠDO E +ff en +ĠTechn ician +R y +Ġrob bers +Ġhot line +ĠGuard iola +ĠKau fman +row er +ĠDres den +ĠAl pine +E lf +Ġf mt +ĠS ard +urs es +g pu +Un ix +Ġunequiv ocally +ĠCitizens hip +qu ad +m ire +ĠS weeney +B attery +6 15 +Ġpanc akes +Ġo ats +M aps +ĠCont rast +mbuds man +ĠE PS +Ġsub committee +Ġsour cing +Ġs izing +ĠBuff er +ĠMand atory +Ġmoder ates +ĠPattern s +ĠCh ocobo +ĠZ an +ĠSTAT ES +ĠJud ging +ĠIn her +* : +Ġb il +ĠY en +Ġexh ilar +oll ower +z ers +Ġsn ug +max imum +Ġdesp icable +ĠP ACK +ĠAn nex +Ġsarcast ic +Ġlate x +Ġt amp +ĠS ao +b ah +ĠRe verend +ĠChin atown +ĠA UT +d ocumented +ĠGA BA +ĠCan aan +ĠÙ ħ +Ġgovern s +pre v +E sc +ĠEst imates +OS P +Ġendeav our +ĠCl osing +omet ime +every one +Ġwor sen +Ġsc anners +Ġdev iations +ĠRobot ics +ĠCom pton +Ġsorce rer +Ġend ogenous +Ġem ulation +ĠPier cing +ĠA ph +ĠS ocket +Ġb ould +ĠO U +ĠBorder lands +Ġ18 63 +G ordon +ĠW TO +Ġrestrict s +Ġmosa ic +Ġmel odies +ç Ħ +T ar +Ġdis son +ĠProv ides +Ġ ...... +b ek +F IX +Ġbro om +ans hip +Do ctors +Ġner ds +ĠReg ions +na issance +Ġmet e +Ġcre pt +pl ings +Ġgirlfriend s +kn it +ig ent +ow e +Ġus hered +ĠB az +M obil +4 34 +ĠPres ents +orig in +Ġins omnia +ĠA ux +4 39 +ĠCh ili +irs ch +G AME +Ġgest ation +alg ia +rom ising +$ , +c row +ĠIn spection +at omic +Rel ations +J OHN +rom an +ĠClock work +ĠBak r +m one +M ET +Ġthirst y +Ġb c +Ġfacult ies +R um +Ġnu ance +ĠD arius +ple ting +fter s +etch up +Reg istration +ĠK E +R ah +Ġpref erential +ĠL ash +ĠH H +Val id +ĠN AV +Ġstar ve +ĠG ong +z ynski +ĠAct ress +Ġw ik +Ġun accompanied +lv l +Br ide +AD S +ĠCommand o +ĠVaugh n +Wal let +Ġho pping +ĠV ie +Ġcave ats +Ġal as +if led +ab use +66 1 +Ġib n +Ġg ul +Ġrob bing +t il +IL A +Ġmit igating +Ġapt ly +Ġty rant +Ġmid day +ĠGil more +ĠDe cker +Ġ§ § +part ial +Ex actly +Ġphen otype +Ġ[+ ] +ĠP lex +ĠI ps +vers ions +Ġe book +Ġch ic +g ross +":" "},{" +ĠSur prisingly +M organ +Ġresid ues +ĠConf ederation +in feld +Ġl yr +mod erate +Ġperpend icular +V K +Ġsynchron ized +Ġrefres hed +Ġad ore +ĠTor ment +ol ina +Ġ26 00 +Item Tracker +Ġp ies +ĠF AT +ĠR HP +0 48 +ĠRES P +ĠB J +all ows +P and +Ġunw elcome +ĠV oc +ĠBast ard +ĠO W +ĠL AR +ĠHeal er +Environment al +ĠKen yan +ĠTr ance +ĠP ats +Ġali ases +ĠGar field +Ġcampaign er +Ġadvance ments +ĠOkin awa +ĠC oh +ows ky +Ġstar ved +Ġsize able +Ġ: -) +Ġm RNA +Ġsusp ensions +ist ar +Scot land +Pr in +-------------------------------- ---------------- +Ġ50 2 +Ġteasp oons +Ġ10 50 +Ġcoerc ive +ĠMason ic +edd ed +ĠPass enger +Ġl att +Ġbr aces +ĠSt eal +ĠNY T +ĠK ats +ĠCel est +ae z +T u +ĠCoul ter +ðŁ ĺ +Fl ickr +ĠWil mington +ith s +++ ; +Ġv ending +Ġneg ro +ĠPh i +ĠYellow stone +Call back +Ġsh ampoo +ĠSh ades +w at +Ġsuper human +Ġridic uled +Ġhol iest +om bo +Ġintern s +Ġh one +ĠPar agu +UR I +Ġd angling +ãĤ » +so v +ict ional +av ailability +Ġrev ocation +Ġd ow +in ic +ĠTHE IR +Ġis o +Ġout ings +ĠLeth al +Ġ) )) +Ġinacc ur +Ġout landish +Ġan us +let ico +id on +l ol +Ġun regulated +Ġsuccumb ed +Ġc uff +ĠWast eland +let al +Ġsub str +Ġcoff ers +Ġautom akers +ov i +ĠX ue +ĠDayton a +Ġjar ring +Ġf umes +Ġdisband ed +z ik +itt on +Ġstriking ly +Ġsp ores +Ad apter +.) : +ĠLynd on +ival ry +Ġor ally +Ġtumult uous +Ġdisple asure +Ġcon es +or rect +Ġappe ase +Ġder by +ĠTrip oli +ĠAl ess +Ġp oked +ĠGu ilty +v P +En ough +Ġorig inals +6 99 +Ġrabb i +Ġproverb ial +Ġpostp one +el ope +ĠMist y +Ġstaff ed +ĠUn employment +redit ary +Ġdilig ent +re comm +me asures +as in +8 25 +Ġpond s +Ġmm ol +ĠS AR +ĠC ARE +Ġ3 71 +Ġclen ched +ĠCors air +Ġcaric ature +z n +att ach +ĠSch ro +spe ak +p ainted +ĠS uc +ĠE NT +Ġcell ul +ĠP aid +di agn +WH ERE +Ġtext ed +B arn +Ġret racted +ĠRe ferred +S av +Ġup keep +Ġwork places +ĠTok ens +Ġampl ify +cl inical +Ġmult ic +mber g +Ġconvol uted +Reg ion +5 65 +ĠTop ic +Ġsn ail +Ġsal ine +Ġins urrection +ĠPet r +f orts +B AT +ĠNav ajo +Ġrud imentary +ĠLak sh +OND ON +Me asure +Ġtransform er +ĠGodd ard +Ġcoinc ides +ir in +R ex +ĠB ok +qu it +Ġshotgun s +Ġprolet arian +Ġsc orp +ĠAd a +5 14 +Ġsl ander +record ed +Ġemb ell +ris ome +Ġapolog izing +ĠMul cair +ĠGib raltar +Cl a +Ġall ot +ĠAtt ention +Ġ4 33 +le ave +Ġwh ine +ĠIss a +ĠFa ust +ĠBar ron +hen y +Ġvictim ized +J ews +Ġnurt uring +ett el +W inged +ĠSub tle +Ġflavor ful +ĠRep s +eng ed +call back +Ġdirection al +Ġcl asp +ĠDirect ions +plan et +icult ure +Hel per +ic ion +ac ia +Ġç ¥ŀ +Ġsur ges +Ġcan oe +ĠPrem iership +be en +Ġdef ied +ĠTro oper +Ġtrip od +Ġgas p +ĠE uph +ĠAd s +vern ight +high ly +R ole +Ġent angled +ĠZe it +6 18 +ĠRust y +Ġhaven s +ĠVaugh an +HA EL +ĠSER VICE +/ , +Ġstr icken +Ġdel usions +Ġb is +ĠH af +Ġgrat ification +Ġent icing +UN CH +Ad ams +ĠOL ED +ĠBeet le +Ġ18 99 +ĠSO FTWARE +ateg or +V L +ĠTot em +ĠG ators +AT URES +Ġimped ance +Reg istered +ĠC ary +ĠAer ial +on ne +en ium +Ġd red +ĠBe g +Ġconcurrent ly +Ġsuper power +ĠX an +j ew +imes ter +ĠDick inson +âĶ ģ +F la +Ġp ree +ĠRoll ins +© ¶æ +Ġden omination +ĠL ana +5 16 +Ġinc iting +sc ribed +j uries +ĠWond ers +app roximately +Ġsusp ending +Ġmountain ous +ĠL augh +oid al +N s +Det ect +) = +ĠL uthor +ĠSchwarz enegger +ĠMull er +ĠDev i +ec ycle +J ar +6 13 +ĠL ongh +B ah +ĠSP ORTS +n w +Ġref inement +Ġwater ways +Ġd iner +Bl ade +68 3 +F ac +Ġinitial s +Ġro g +Ġparan ormal +B UT +Ġ[ ( +ĠSw anson +ĠM esh +âĸ ¬ +Impro ve +ĠRad iation +ĠEst her +ĠE sk +ĠA ly +ik y +Ġir rad +ĠBuck ingham +Ġref ill +Ġ. _ +Re pe +CON CLUS +Ġdifferent iated +Ġchi rop +ĠAt kins +Pat tern +Ġexc ise +Ġcab al +N SA +ĠST A +ĠS IL +ĠPar aly +Ġr ye +ĠHow ell +ĠCount down +ness es +alys ed +Ġres ize +ãĤ ½ +Ġbudget ary +ĠStr as +w ang +Ġap iece +Ġprecinct s +Ġpe ach +Ġsky line +Ġ35 3 +pop ular +App earances +ĠMechan ics +ĠDev Online +S ullivan +Z en +Ġp u +op olis +5 44 +Ġde form +Ġcounter act +ĠL ange +Ġ4 17 +Con sole +77 4 +Ġnodd ing +Ġpopul ism +Ġhe p +Ġcoun selling +compl iance +U FF +Ġunden iably +Ġrail ing +ĠHor owitz +ĠSim one +ĠBung ie +Ġa k +ĠTal ks +x ff +fl ake +Cr ash +Ġsweat y +Ġban quet +ĠOFF IC +Ġinvent ive +Ġastron omer +ĠStam ford +ĠSc are +ĠGRE EN +olic ited +Ġr usher +Ġcent rist +ight ing +Ġsub class +Ġdis av +Ġdef und +ĠN anto +oci ate +m ast +Ġpac if +Ġm end +e ers +imm igration +ESS ION +Ġnumber ing +Ġlaugh able +ĠEnd ed +v iation +em ark +P itt +Ġmetic ulous +ĠL F +Ġcongrat ulated +ĠBir ch +Ġsway ed +Ġsemif inals +Ġhum ankind +m atter +ĠEqu ip +opa usal +S aid +ĠLay out +Ġvo icing +Ġth ug +Ġporn ographic +I PS +Ġmo aning +Ġgriev ance +Ġconf essions +esc al +TEXT URE +Aut hent +os aurus +P urchase +Ġreleg ation +al ter +ĠÂł Âł +Ġr iddled +Ġo gre +ĠLow ell +Occ up +E at +ĠHy der +ĠAdvis er +Com merce +H unt +ĠOr th +ĠComp etitive +ĠCL A +CD C +Ġsal ads +F le +Ġindustrial ized +` , +ĠO WN +Ġbec k +ĠPart icularly +oub t +Ġm M +ĠHuss ain +ĠChen nai +Ġ9 20 +Ġappoint ing +ĠCull en +,,,, ,,,, +Ġp ores +ver ified +Ġbi ochemical +em ate +Ġcoward ly +ĠHels inki +ĠEthiop ian +S OURCE +ER C +est ro +Ġbi otech +ĠS our +Ġbrew er +Bloom berg +Ġintens ify +Gl ass +an co +ĠF DR +gre SQL +ĠF ires +©¶æ ¥µ +ec o +100 1 +ĠHom eless +Ġinstant aneous +ĠH aste +ig el +D iamond +Ġp aving +Ġland fill +Ġd ads +h oun +: ] +Ġinc endiary +ĠLiving ston +ĠHil bert +ĠChe cks +st yles +in ators +ĠCl ive +ph rine +Ġchimpan zees +Ġp all +ĠJ M +ĠAad haar +ð Ŀ +Ġachie vable +dis abled +P ET +OOOO OOOO +M ot +Ġint angible +Ġbal let +ĠWe bs +ĠEst imated +Effect s +Ġb ailed +Josh ua +Ġturb ulence +Ġoccup ant +ĠDay light +Ġ36 1 +me et +Ġstat ically +Ġon look +Ġk i +il legal +Ġvel vet +Ġdehyd ration +Ġacqu ies +ĠRe z +ak ura +ĠU pton +at ro +Ġincomp rehensible +Ġback door +ĠRh ino +7 27 +Ġmath s +) + +Ġhe resy +Ġd f +ĠRoc he +ĠL ydia +Ġpanc reat +re ply +arre ll +Ġsolicit ation +Ġcirc adian +BI P +Ġfor ay +Ġcrypt ic +iz u +ime o +ĠTom ato +ĠH oms +ex amination +Ġqu arry +ĠVal iant +ĠJer icho +ĠIN CLUD +Ġ18 40 +5 19 +Ġres ists +Ġsnap shots +ĠSp ur +ĠAnt iqu +Log in +Ġbest selling +Ġant ic +ĠS utherland +ãĤ¢ ãĥ« +Ġ~ / +ĠP arm +è ĥ +P ages +int ensity +Ġimm obil +Ġ18 65 +zz o +Ġn ifty +Ġf entanyl +ĠPres ervation +op hen +Ġd arts +ĠD inosaur +po inters +ĠR ite +s uggest +aware ness +ĠSher idan +Ġst ances +Ġsor cery +Ġper jury +ĠNik ola +ie ver +Ġf iance +ĠJordan ian +ĠBall oon +Ġn ab +Ġk b +Ġhuman ities +ĠTan aka +hill ary +Ġconsult ancy +ĠZ ub +Ġrem ission +Ġconf id +CH Q +ĠF ug +Ġimpro vis +Y ep +/ _ +Ġunwilling ness +Ġport folios +05 5 +ĠInstruct or +aim an +Ġclaim ants +M bps +ĠBy e +re ceived +T weet +Ġind emn +ri z +am ara +N at +Ġeval uates +ĠL ur +ep ad +FO X +ĠTh ro +Ġrust y +Ġbed rock +ĠOp rah +J B +Ġmanip ulative +Ġwill ful +Ġrel apse +Ġext ant +The me +S ensor +ĠSt ability +go vern +Ġpo ppy +Ġkn ack +Ġins ulated +ĠT ile +ĠExt rem +Ġunt old +Ġconver ge +Ġref uel +ig roup +Ġdistort ions +Ġrav aged +Ġmechan ically +ĠRe illy +ĠN ose +ĠIncarn ation +ĠBeck y +abb ling +Ġt aco +Ġr ake +Ġmelanch oly +Ġillust rious +ĠDart mouth +Gu ide +ĠR azer +ĠBen z +Ult imate +ĠSur prise +Ġpage ant +off er +Who ever +Ġw iser +Ġchem ist +ĠHE LL +ĠBul k +Ġpl utonium +ĠCO VER +Ö ¼ +f ailed +Ġtire lessly +Ġinf ertility +ĠTr ident +ĠShow time +ĠC iv +V ice +requ ires +itt ance +Ġun controlled +interest ing +56 1 +Ġinnov ate +ateg ic +L ie +ĠS elling +U l +Ġsav ior +ĠT osh +Ġsw ast +P ASS +Ġr ink +Ġcard io +ĠI ro +ud i +Ġv antage +Ġv ans +ĠNi ño ++ = +Ġpropag ate +< ? +Ġmethod ological +204 39 +Ġtrig lycer +Ġing rained +ĠAn notations +arr anted +6 17 +ĠS odium +ĠA AC +techn ical +mult ipl +Ġ3 73 +å ĭ +Ġdec isively +Ġboost ers +Ġdessert s +ĠGren ade +Ġtest ifying +ĠSc ully +ID s +Ġlock down +ĠSc her +ĠR é +ĠWhit man +ĠRams ay +rem ote +Ġh ikers +ĠHy undai +Ġcons cientious +Ġcler ics +ĠSiber ian +ut i +is bury +Ġrel ayed +Ġqu artz +ĠC BI +seek ers +ull a +Ġweld ing +ĠSh al +ble acher +T ai +ĠSam son +Ġt umble +ĠInvest or +Ġsub contract +ĠShin ra +ow icz +j andro +d ad +Ġtermin ating +ĠNe ural +ä» £ +Ġleak age +ĠMid lands +ĠCaucas us +í ķ +c it +ll an +iv ably +ĠAlb ion +Ġ4 57 +Ġregist rations +Ġcomr ade +Ġclip board +0 47 +Ġdiscour aging +ĠO ops +Ad apt +Ġem path +n v +ĠPR OT +ĠDon n +ĠP ax +ĠB ayer +t is +Squ are +Ġfoot prints +part icip +ĠChile an +B rend +ind ucing +M agn +Ġclub house +ĠMagn um +Ġenc amp +ĠEth nic +uch a +ere y +Ġw atered +ĠCal ais +Ġcomplex ion +Ġsect s +Ġren ters +Ġbr as +oÄŁ an +Time out +Man agement +Ġinf ographic +P okemon +Cl ar +Ġloc ality +Ġfl ora +as el +P ont +Ġpop ulate +ĠO ng +Ġsubs istence +Ġa uctions +ĠMcA uliffe +ĠL OOK +br inger +Ġtit an +Ġmanif old +ĠâĹ ı +Ġcalibr ated +Ġcal iphate +ĠSH E +ĠCommission ers +ce ivable +j c +W inner +5 24 +Ġcond one +Other wise +Ġp iling +Ġem body +ĠCrime an +ut ics +ĠEx hibition +Ġ4 26 +e ering +Ġv ying +ĠH UGE +* =- +Ġprin cipled +à ¦ +Ġquir ks +ĠEdit ors +put ing +G ES +ĠF TA +ठ¾ +add on +ĠH AM +ĠFrie za +W oman +. $ +Ġc rib +ĠHer od +Ġtim ers +ĠSp aces +ĠMac intosh +at aka +Ġgl ide +Ġsmell ing +ĠB AL +Ġun su +Ġcond os +Ġbicy cl +ĠRev ival +55 3 +Ġjugg ling +H ug +ĠKardash ian +ĠBalk ans +mult iple +Ġnutrit ious +oc ry +19 00 +Ġinteg rates +Ġad joining +ĠF older +roll ment +ven ient +Ġu ber +y i +Ġwh iff +ĠJu ven +ĠB orough +net te +Ġb ilingual +ĠSp arks +ph thal +man ufact +Ġt outing +ĠPH I +Ke efe +Rew ard +Ġinf all +ĠTem per +typ ically +ĠNik ol +Ġregular s +Ġpseud onym +Ġexhib itions +Ġbl aster +Ġ40 9 +w arming +Ġrever ber +Ġrecip rocal +Ġ6 70 +ip ient +b ett +ĠBe gins +Ġit ching +ĠPh ar +Ass uming +Ġem itting +ĠML G +Ġbirth place +Ġt aunt +ĠL uffy +ĠAm it +Ġcir cled +ĠN ost +enn ett +Ġde forestation +ĠHist orically +ĠEvery day +Ġovert ake +79 2 +Ġn un +ĠLuc ia +Ġaccompan ies +ĠSe eking +ĠTr ash +an ism +R ogue +Ġnorth western +ĠSupplement al +ĠNY U +ĠF RI +ĠSat isf +x es +5 17 +Ġreass ured +Ġspor adic +Ġ7 01 +Ġmed ial +Ġcannabin oid +Ġbarbar ic +Ġep is +ĠExplos ive +ĠD ough +Ġuns olved +Support ed +Ġacknowled gment +sp awn +Ġkit chens +Ġ- = +talk ing +ic ist +ĠPeg asus +ĠPS U +Ġphot on +ĠAuthent ication +R G +@# & +76 2 +ĠCl air +Ġdi aper +Ġbr ist +ĠProsecut ors +ĠJ em +6 28 +ĠEvery where +ĠJean ne +equ ality +ãĥ© ãĥ³ +object s +ĠPel icans +Ġ39 2 +Ġbl u +b ys +ĠA go +Ġinstruction al +Ġdiscrim inating +ĠTR AN +ĠCorn el +ag os +Ġty re +Ġas piration +ĠBrid gewater +": - +! ". +ĠEn s +ĠCoc o +P ie +Ġdet ach +ĠC ouch +Ġphys ique +ĠOccup ations +osc opic +en ough +B uzz +App earance +Y P +Ġrac er +Ġcompl icity +r pm +T oy +Ġinterrupt s +ĠCat alyst +Ġut ilitarian +imp act +Ġsp aghetti +Ġp orous +Ġeste emed +Ġinc iner +ĠI OC +7 48 +Ġesp resso +ĠSm ile +abil ia +6 35 +Ġmathematic ian +Ġ4 24 +ĠK L +ĠH IP +Ġover heard +ĠT ud +ĠT ec +Ġqu izz +Ġfl attering +Ġcon n +âĢ İ +Ġatt aches +ĠR OS +ĠAC S +Ġt cp +ĠSh ame +sk ip +res pected +ĠTrin idad +gr ain +Ġfooth old +ĠUnch arted +ĠJul io +z l +av ored +ĠAn xiety +er rors +ĠCent auri +its ch +D addy +Ġclutch ing +ĠIm plement +ĠGut ierrez +Ġ7 60 +Ġtele portation +end ra +Ġrevers ible +st ros +Ad venture +08 3 +Ġliber ating +Ġas phalt +ĠSp end +AR DS +im sy +PR ES +ĠEmer ging +Ġwild fires +Ġtechn ologically +Ġem its +ĠART ICLE +Ġirregular ities +Ġcher ish +çī Ī +Ġst ink +ĠR ost +Econom ic +Ġcough ing +ĠMcC ann +pro perties +ilant ro +Ġreneg oti +Trans lation +Ġin quest +ĠGra pe +oot ers +gu i +ĠSwords man +ace ae +h itting +Ġr c +Ġexert ed +ĠS AP +it ent +Ġperil ous +Ġobsc urity +Ġassass inate +Ġab original +Ġresc uing +ĠSh attered +lock ing +all ion +Ch anging +ĠHar rington +ĠB ord +ĠAfgh ans +Jam ie +aret z +ĠAugust us +Ġ38 6 +8 30 +Ġj og +ok ingly +Tr igger +ĠH OR +Stat istics +Ġviewers hip +Ġadd itives +h ur +Ġmaxim izing +ĠR ove +ĠLou ie +ĠBuck et +ĠCHR IST +ou sel +Ġstre aks +ir ted +Ġt ert +Ġcolonial ism +Ġbur ying +y k +Cond ition +ĠDPR K +By Id +75 1 +âĹ ¼ +Ġwor risome +Ġvoc ational +sl ice +Ġsa ils +ĠCorrection al +95 4 +Ġt ul +K id +l uster +Ġfam ilial +ĠSp it +ĠEp iscopal +Specific ally +ĠVol cano +run s +q s +Ġve tted +Ġcram med +t rop +here r +Thank fully +Ġper cussion +Ġor anges +Ġround up +Ġ4 99 +x ious +Char acters +ĠZion ism +ĠR ao +ÃĽ ÃĽ +W F +Ġunintention al +ONE Y +Gr ab +Com mercial +Ġglut amate +ĠMcK enna +ru ciating +ning ton +ih u +Ch an +ĠSw ap +Ġleaf lets +Ġfunction ally +er ous +F arm +Ġcal oric +ĠLiter ally +con cert +Ġshe nan +Ġrep aid +ey es +Ġbas hing +ĠG orge +Ġcollabor ations +Ġun account +itch ie +Ġteam work +pp elin +Ġpip ing +Ġmin ced +Ġd iam +ri eg +Ġmasc ara +Ġsuck er +ĠMo ons +App s +ĠPe ck +Ġper v +ĠFl oat +o ley +ĠN ish +im ize +Ġarom atic +u in +end ish +! / +ĠB icycle +ĠAS IC +ile ged +ĠQuad ro +ios yn +Ġlock out +ĠW ink +SP EC +Attempt s +Ġseed ed +red o +ias is +Ġsn ag +ãĥķ ãĤ© +ãĤ ¶ +Ġground ing +Ġrelie ver +Ġfrivol ous +ĠG ifts +ĠF aces +Es pecially +Ġmicrobi ome +im ag +ĠSch l +ĠP les +ĠBle ach +ĠIr win +ĠE aton +ĠDisc iple +Ġmultipl ication +Ġcoer ced +Ġ4 19 +st h +E vil +B omb +Ġex orc +Ġstag gered +L ESS +Ġinert ia +ĠED IT +Ġgo b +Tr aditional +Ġclass y +Lear y +ĠP AGE +yr s +Ġtrans porter +Ġmat ured +Ġhij ab +Ġbi ome +Where as +Ġex termination +ĠT ues +ĠT akeru +ĠAud rey +er ial +ĠAd en +aff les +Ġnarciss istic +ĠB aird +UT F +I re +ĠCon nie +Ch amp +Ġwhis pering +ĠH att +D K +Ġdis infect +Ġdeduct ed +Ġpart ake +Ġdown grade +ĠEs ports +ĠContin uing +Ġdemocr atically +icro bial +itt a +Ġlim estone +Ġexempt ed +ĠFren zy +H erm +7 28 +Ġfled gling +Met a +765 61 +69 3 +% : +w ake +5 26 +ĠDis cipline +Ġvirgin ity +ĠLeg ions +ĠFrank ie +int ent +Ġrest rooms +ĠRou ter +da q +Ġobjection able +âĨ ij +w ark +ĠRah ul +g ain +activ ation +abs olute +ĠAccess ed +Ġ24 00 +ogg les +Ġsecond ly +ĠDEF ENSE +Ġpost age +wra pper +sh arp +7 29 +Ġcommun icates +Ġadd on +ĠMil itia +H ong +Ġsl umped +ĠJP EG +ĠI car +ad ish +68 1 +Ġmaj esty +ĠWolf gang +ĠEl astic +u per +Ġv iz +Ġunconscious ly +ĠST D +ĠS ass +Ġflower ing +ĠHel ic +ĠDra per +ĠAm ateur +Ġman ure +Ġdis ingen +ĠLe i +br ing +9 49 +Ġinhib ited +Ġhead quartered +Ġen igmatic +�� � +Ġred ress +R H +Ġratt led +Ġd iction +l io +ĠT BA +ĠSN AP +C alling +Ġfasc ists +ĠD ove +iew icz +0 36 +Ġco asts +ĠR ect +Ġ) ] +L ot +6 29 +ĠS EM +ĠPeters en +ĠExpl ain +ĠBo ards +ĠBe zos +ĠJ ournals +Ġ20 24 +p arser +Ġmist rust +Ġgr ate +ĠL ocked +bo a +S aint +g aming +Ġvow el +in ately +bl ow +All ah +Ġun matched +Ġb ordering +ĠExp end +n r +Or acle +rou ch +Ġcont iguous +ac us +Ġdist raught +58 1 +Ġanat omical +O X +ap ixel +8 33 +ĠPL US +Ġres usc +Ġab iding +57 3 +Ġvac ancies +Em ily +Ġhyp othal +ĠWer ner +ĠWe e +ĠDJ s +5 13 +Ġwitch craft +Ġac upuncture +ent ary +benef it +Product s +ĠP SP +ĠMP G +ĠJ inn +ĠJ arrett +Ġ4 45 +ĠIm aging +ĠP yth +Fin ish +Ġte x +Ġjuven iles +Ġhero ism +Ġdoubt less +ĠA ki +ĠT end +ĠPatri arch +Ġbit ters +ĠTele communications +it atively +ag na +Ġr g +ĠS OLD +Ġcomp ulsion +ĠN asa +ĠKath ryn +Ġmillion aires +Ġintrins ically +Ġbolst ered +time out +fl o +Ġtut or +p our +Stat ement +Ġ{ * +ĠRud olph +ĠKimber ly +rog ens +adi q +] + +Ġindign ation +Ġfract uring +ĠRe leases +ĠGr ain +pro tein +L ago +Ġvac ations +Ġboot ed +ĠTH REE +ĠH G +oresc ence +Ġt f +Ġso ar +iosyn cr +Ġgl ances +ĠSp oon +ĠJ ury +ĠCow boy +Ġcreat ively +Hig her +Ġsolic itor +Ġhaw k +ac io +89 6 +Ġsuperf lu +Ġbombs hell +ct ure +Ġbroker age +Ġraid ing +Ġf rench +Ġang led +Trans action +ĠGen ocide +u pe +ĠHait ian +57 2 +! : +Ġunwitting ly +iter ator +sc roll +Ġtall ied +Ġbi omedical +ĠC ARD +Ġe uphem +Ġbrain storm +a quin +K o +Mic helle +ĠR unes +ĠBall istic +ud ers +Ġmod esty +ĠiP ads +ĠEzek iel +Y E +Ġstars hip +Ġpower fully +Ġper l +ĠSh ade +ĠQu art +ĠE EG +Ġfisher man +OS ED +ĠTyp ical +df x +Ġmes hes +Ġet ched +worth iness +Ġtopp led +Ġ3 96 +or ius +We iss +Ġmy sql +ĠVal halla +Ù Ĵ +le asing +Ġrec omp +rap nel +S el +04 3 +Ġder ailed +ĠGu ides +IR T +Ġde human +ĠBritt any +" )) +Ġex claim +Ġb alk +Ġ8 40 +CLA IM +int el +L AB +Ġpe gged +Ġast roph +sm oking +Ġrig ging +Ġfix ation +Ġcat apult +ins ide +ĠC ascade +ĠBolshe vik +G aza +Dep th +Ġloud spe +Ġalmond s +me yer +l eness +j en +f resh +Ġunbeat en +ĠSqu id +ĠPres umably +Tim er +B W +Ġro sters +Ġell ipt +ĠHar riet +dat abase +ĠMut ual +ĠComm odore +uk ed +kn ife +ĠCOMM UN +h ya +Ġmel ts +arch ives +Ġrat ification +Ġmultip lying +Ġinter oper +Ġasc ert +w ings +ver ting +ĠScorp ion +ay e +ĠPorts mouth +ĠM TA +n it +iaz ep +Ġqu arantine +Ġslides how +Ġcent imeters +Ġsyn opsis +Ġsp ate +th irst +Ġnom inating +ĠMel vin +Pre view +Ġthro b +Ġgener ational +ĠRad ius +rest ling +put able +aw ar +N ECT +Ġunlaw fully +ĠRevel ations +Wik ipedia +sur v +Ġeye ing +ij n +ĠF W +Ġbr unt +Ġinter stellar +Ġcl itor +ĠCroat ian +ĠCh ic +ev a +ĠDis app +ĠA kin +iner ies +d ust +Interest ed +Ġgen esis +ĠE ucl +ö n +p icking +Ġmut ated +Ġdisappro ve +ĠHD L +Ġ6 25 +Ì ¶ +c ancer +Ġsqu ats +Ġle vers +Disc uss += ] +D ex +ĠVIDE OS +A UD +Ġtrans act +ĠKin ect +ĠK uala +ĠC yp +7 47 +Ġsh attering +Ġarsen ic +ĠInt ake +ĠAngel o +ĠQu it +ĠK he +Ġ18 93 +M aker +0 29 +ĠPain ting +Dis able +9 16 +Ġanal ges +Ġtact ile +Ġprop hes +Ġd iced +ĠTravel s +ĠHe ader +ĠClub s +Ass istant +Ġinc rim +Ġd ips +Ġcruc ifix +ĠShan ahan +ĠInter pret +Ġ40 90 +al ogy +abb a +Ġsimul ac +hus band +S IM +Ġrecy cle +uc er +ed ged +Ġre naissance +ĠBomb ay +Cath olic +ĠL INE +ĠCl othing +re ports +Ġpl aus +Ġd ag +ĠM ace +Z I +Ġintr uder +ĠVeter inary +g ru +Ġsne aky +ĠS ie +ĠC innamon +P OSE +Ġcou rier +ĠC NS +Ġemanc ipation +s it +Ġplay through +ĠFac ilities +v irt +ĠG auntlet +Thom pson +Ġunbeliev ably +Param eters +Ġst itching +ign e +ĠTH ESE +Priv acy +Ġshenan igans +Ġvit ri +ĠVal id +59 1 +Ń · +ĠProt otype +ink a +SC P +ĠT id +è Ī +old ed +Ġindividual ity +Ġbark ing +Ġm ars +ĠW D +Ġ8 20 +Ġt ir +Ġsl apping +Ġdisgr untled +ĠAng ola +ri us +ĠTorn ado +ĠTh urs +Ġcapt cha +Ġang st +ĠP og +ĠAssass ins +ĠAd idas +Ġjoy ful +Ġwh ining +Emer gency +Ġphosph orus +Ġatt rition +oph on +ĠTimber wolves +ĠJ ah +ĠBr inging +ĠW ad +ĠEn sure +oh l +ĠX ie +omm el +c mp +Ġz ipper +Ġrel at +ĠCor ridor +m ilo +T ING +Av g +Ġcro pped +] } +Ġr aged +ĠLump ur +ĠGuer rero +our ke +N ut +Ġoff sets +og lu +dr m +Ġmort als +lat able +Ġdismiss ive +ä¸ ī +Ġthro ats +Ġchips et +ĠSpot light +Catal og +art ist +G b +Ġch illy +Ġst oked +Ġ3 74 +W ard +L atin +Ġf iasco +Ġble ach +Ġb rav +Enh anced +Ġin oc +ĠFior ina +_ > +Ġle ukemia +Ġel uc +Ġannoun cer +ĠLith uan +ĠArm ageddon +å ĩ +Len in +ĠR uk +Ġpe pp +ĠRom antic +ĠP IT +ĠInter stellar +ĠAt kinson +R aid +J s +Go al +C ourse +Ġvan ishing +es ley +ĠR ounds +Els a +59 3 +Ġredund ancy +ĠST AND +Ġprop hetic +Ġhabit able +ry u +Ġfaint ly +M ODE +Ġfl anked +IR C +Aw esome +Ġsp urious +ĠZ ah +ĠMS G +Ġsh ading +Ġmotiv ational +ĠSant ana +ĠS PR +Ġexc ruciating +om ial +ĠM iko +ĠLe opard +A byss +Ġ[ | +d irty +Ġbath s +Ġdem oral +and re +P B +Ġun ification +Ġsac rament +Ġ[ & +Ġpric eless +Ġgel atin +Ġeman ating +ĠAll aah +98 6 +Ġout burst +Ġer as +ĠX VI +ĠSP I +O tt +ĠLaz arus +PL IED +F lying +blog s +W isconsin +R aven +Ġreb ate +Ġcreep s +ĠSp an +ĠPain ter +ĠKir a +ĠAm os +ĠCor vette +Cons umer +ĠRec over +ck i +Ġpes ky +ĠIn vention +Compan ies +Ġchalleng ers +ad emic +ĠUkrain ians +ĠNeuro log +ĠFors aken +Ġent rants +Ġemb attled +Ġdef unct +ĠGlac ier +Ġpo isons +ĠH orses +m akes +ĠD irt +Ġ4 23 +hh h +ĠTrans formation +QUI RE +................ .. +Ġtrave ller +ĠSe xy +ĠK ern +ip olar +Ġransom ware +oooooooo oooooooo +E c +rub y +Prof essional +ĠOut break +arg ument +G rey +ĠFif a +ĠCH O +ĠFOR M +ĠAm trak +- [ +Ġcr adle +Ġantioxid ants +ãģ®å ® +7 36 +ĠNAS L +ĠContribut ions +Ind iana +ĠST EP +C SS +Ġsal ient +Ġall ocations +yr ights +Ġm ashed +ĠCut ter +Sex ual +Ġp ounded +Ġfan base +Ġc asc +ĠTrans parency +Ġanaly tic +ĠSummon er +× ŀ +ĠAD C +det ail +Ġvan quished +Ġcr abs +ar ie +Dest roy +ĠS ack +Ġtrans istor +Al abama +ĠK oen +ĠFisher ies +c one +Ġannex ed +ĠM GM +es a +Ġf aked +ĠCong ratulations +Ġhind ered +Ġcorrection al +ĠI TV +lee ve +Ġin appropriately +lic ks +Ġtresp ass +Ġp aws +Ġnegoti ator +ĠChrist ensen +lim its +ĠDian ne +Ġeleg ance +ĠContract s +an ke +Ob j +Ġvigil ance +Ġcast les +ĠN AD +ĠHol o +Ġemph atically +ĠTit us +ĠServ ing +ĠRich ie +ĠP igs +5 68 +Ġanim osity +ĠAtt ributes +ĠU riel +M Q +my ra +ĠApplic ant +Ġpsychiat rists +ĠV ij +ĠAb by +ag ree +P ush +Ġk Wh +hib a +Ġinc ite +ĠWe asley +ĠTax i +minist ic +hy per +ĠF arn +Ġ6 01 +ĠNation wide +F ake +95 2 +Ġma ize +Ġinteract ed +Ġtransition ed +Ġparas itic +Ġharm onic +Ġdec aying +Ġbas eless +ns ics +Ġtrans pired +Ġabund antly +ĠFore nsic +Ġtread mill +ĠJ av +ab and +Ġssh d +Ġfront man +ĠJak arta +oll er +dro ps +ĠSERV ICES +rompt u +oph ical +h ospital +bled on +6 45 +Ġmid range +ĠEV ENT +cul ated +raw led +Ġper ched +Ġover board +ĠPe el +ĠP wr +ĠCar th +ĠCOM PLE +co e +sh all +Ġdeter rence +M ETHOD +ĠAbs ent +M EN +Ġs ill +ĠLE VEL +Y ork +Ġsin ners +ĠOP EC +ĠN ur +ĠDesign s +se lection +Ġunw orthy +CH A +Ġstreng thens +88 3 +ed ly +Ġslic ing +Ġmal nutrition +Ġfilm making +ĠPol k +ur ated +Ġ4 21 +bre akers +!' " +Ġwet lands +ĠDisc rimination +Ġallow able +Ġste ered +ĠSic ily +S AM +Ġmust ache +Ġm ids +Ġcl ipped +Ġcirc ulate +Ġbr ittle +ĠBuild ings +ra ised +ĠRound up +Ġwealth ier +Ġoverw rite +Ġover powered +ĠGerr ard +s ites +PD ATED +Ġacute ly +ĠGam ble +Ġp im +ĠK us +Typ ically +De ploy +ĠMoroc can +p otion +com be +Ġvigil ante +Ġ36 3 +St ew +ĠB agg +Ġres ided +ĠSp o +Ġrem nant +Ġempt iness +br ainer +Ġout patient +pri ority +Ġle ptin +ĠPay ton +ĠGle aming +ĠS hed +ĠPol o +ĠMormon ism +rest ricted +arl ane +w x +Ġcreat ine +ĠAn on +ĠST UD +ĠJ UL +ĠT ee +5 28 +08 9 +Ġhat ched +Dis patch +ĠCompos ite +Ġ45 1 +p uff +ĠX COM +ĠOr n +ĠTH ANK +END ED +ĠAshe ville +Ġà ľ +Ġman go +ĠS lightly +world ly +ĠW ander +ĠExp and +ĠCh r +M ist +Ġorthodox y +ĠUN ESCO +reg ate +Else where +k ie +ir led +Ġtopp le +Ġadopt ive +ĠLeg s +d ress +ĠS agan +b are +ĠGl ou +Cr unch +Ġhelp ers +Ġchron ically +ĠH uma +1 0000 +Ġaccommod ating +äº Ķ +Ġwrink les +Ġdod ged +four th +Ġpre con +Ġcompress or +ĠK are +Ġev ict +ĠWar wick +im ar +Ġmodern ization +Ġband wagon +Ġref uted +Ġnet ted +ĠNa ples +ĠGen ie +per ors +Ġfield ed +Ġde re +ĠPar ables +le es +Ġtr out +asp ers +Ġn ihil +Ġhapp iest +Ġflo ppy +ĠLo ft +ĠHe ard +Ġun ison +Ġl ug +ĠRed mond +class ic +Supp orters +SH IP +G MT +Ġfue lled +ç IJ +Ġd d +ĠEmin em +Ġ18 97 +NY SE +Ġsecret aries +ĠF IA +ĠCanaver al +F avorite +Ġp omp +Ġdetain ee +ers hip +aim on +i our +ĠA pex +Ġplant ations +am ia +ac ion +R ust +Ġtow ed +ĠTru ly +5 77 +Ġshel tered +r ider +W o +Ġl air +ĠInt elligent +impro ve +m atically +Ġet iquette +ad ra +all o +ĠJun o +any thing +ĠStru ggle +ĠPred ict +ĠGr imes +ĠAMER ICA +ct x +ĠSit uation +W OOD +Ġsol uble +me ier +Ġintoler able +ang ering +Ġun interrupted +Ġtool tip +Ġinterrog ated +Ġgun ned +ĠSne ak +æŃ ¦ +Ġt ether +Ġcr umble +L ens +Ġclust ered +ĠSy l +ĠHas an +Ġdystop ian +w ana +Ġjoy stick +ĠTh ib +amm u +Tom orrow +5 46 +Ġoverc ame +Ġminim ized +cept or +Run ner +ENG TH +ĠBrend a +ĠAchieve ments +Ġtor ches +Ġrapp ort +ĠInvestig ator +ĠHand ling +rel ation +g rey +8 15 +Ġk cal +ĠComm ands +d q +Ġcur ls +Ġbe arer +Ġcyn icism +it ri +ĠUse ful +B ee +D CS +Ġab ras +P ract +BIL ITIES +7 12 +Ġdebug ger +Ġdebt or +ĠL ia +ĠK ers +Ġexacerb ate +ĠSt acy +ĠB land +ĠSc enes +Ġbranch ing +âĸĪâĸĪâĸĪâĸĪ âĸĪâĸĪâĸĪâĸĪ +ape ake +Ġs alsa +Ġmish and +ĠKon ami +ĠN ib +Ġanecd ote +Ġagree able +Ï ī +ĠNath aniel +ĠHe isman +ĠB eware +Ġ18 86 +spect ive +69 1 +5 22 +Ġinhib its +Ġhas hing +Ġ18 89 +å° Ĩ +v ich +P ure +Ġsolid ly +Ġaspir in +im aru +Ġstreet car +ĠU CS +ĠJ udd +Ġflash backs +p ins +Ġ14 40 +ĠUN HCR +ĠSym ptoms +T IT +5 38 +F ra +% ); +Ġo oz +Ġcur few +Ġcal med +Ġparticip ates +Te X +Ġnons ensical +Ġfull back +ĠDe L +mon key +h ari +Ġmetabol ites +Ġloot ed +ĠAL WAYS +ĠB CC +L t +oc het +B one +Ġveto ed +Ġg cc +ĠCL ICK +Ġ18 88 +s af +Ġstiff ness +Ġlow ly +ĠGe h +vers on +ors et +Ġun foreseen +Ġan esthesia +ĠOpt ical +Ġrecon structed +ĠT up +sh ows +NEW S +ĠNewsp aper +ĠA SA +ter a +N umbers +Ġinexpl icable +× ij +Ġhard ness +unt arily +ĠA cer +grad ient +ARD IS +Ġwood land +Ġmetaph ors +ĠWem bley +ĠPa vel +phil is +Ġre writing +Ġpercept ual +Ġ10 70 +worm s +ĠDown s +Ġunsur prisingly +Ġtag ging +fl ame +Ġlit res +Ġboun ces +ĠB abe +sh ut +Ġoverd oses +ĠShe ila +ĠCh au +ĠBl ess +Capt ure +ĠSign ificant +ĠSc ion +Ġ38 9 +ĠMc H +ĠTitan ium +ĠMe al +amed a +ag ents +agg ressive +B illy +76 3 +ĠS aying +DER R +it one +Coll ins +B ound +Ġbol ted +ĠDM CA +95 3 +Ġun iqueness +Ġep igen +un ci +ant am +Ġreck oning +ch airs +OG R +ĠSen egal +Ġ18 62 +re levant +Ġ ¯ +Ġpharm acies +ĠG eral +v ier +Y an +OR PG +Ġrab id +b ending +ĠUN ITED +Ġ4 65 +As sembly +Ġwe ep +Ġbe hest +ĠMother s +ĠJ ace +h id +Ġwh irlwind +ĠUN IVERS +Ġut opian +Ġkidn ap +Ph ilipp +K in +89 3 +Ġlivest ream +ĠM ISS +Ġsub versive +ĠTechn iques +ĠJUST ICE +ĠB ASE +Ġ38 7 +Ġassail ants +ĠHard core +Ġsprink led +ĠP se +é ļ +print ed +ĠH au +OR GE +ĠT OUR +Ġl aced +Ġit ch +G iving +Ġport ed +78 1 +//////////////// //////////////// +bre eding +Ġlog ger +ĠH OL +inn ie +First ly +Ġembry onic +Ġdeleg ated +p ai +O IL +Ġcentr ally +ĠR x +ĠSc outing +D utch +Ġhe reditary +ĠCru iser +s at +5 29 +ĠMar riott +other mal +Ġprohib itions +E arn +ĠSt ab +ĠColleg es +ĠBel ief +st retched +ĠL H +ĠEntity Item +C IA +Ġun rem +Ġlaure ate +Ġdenomin ations +sum mary +h ler +S pect +ĠK laus +ĠBe ans +Ġins ur +ĠPA X +Ġfield er +ĠV et +ĠSp arrow +z ie +ĠS Q +ĠMond ays +ĠOff line +ĠLer ner +ĠExt ensions +Ire land +Ġpatron age +Ġcontrast ed +ĠMan ia +h irt +Mos cow +Ġcondem ns +ĠAn ge +Ġcomp osing +ĠPe pe +ĠP addock +Ġheter ogeneity +Ġide ologically +Ġf ishes +Ġcur sing +ĠR utherford +ĠFlo ating +ĠAm elia +Te a +Syn opsis +Ġstun ts +Ġbe ad +Ġstock ing +ĠM ILL +ob ook +mass ive +\ < +Ġh ump +ĠPref erences +Engine Debug +ge ist +ĠNiet o +ome ver +ish y +eval uate +col onial +Altern ative +ĠGo Pro +ĠV ortex +ĠNET WORK +ans ky +Sec ure +ĠTh rust +Sn ake +Ġparcel s +Ġsam urai +Ġactress es +N ap +M F +ifer ation +Be er +5 23 +ĠI ly +oint ment +P ing +Ġstri ped +ĠMell on +oss ession +Ġneut ron +end ium +Ġa ph +ĠFlav oring +Ġ38 3 +Ġrespons iveness +ĠJ indal +ĠHitch cock +Den ver +ĠDRAG ON +sm anship +ĠDu pl +Ġs ly +Ġweb cam +ĠTw ain +ĠDar ling +ili ate +cons umer +D IT +Ġnames ake +Ġun orthodox +Ġfun er +ĠPL oS +ĠCONTR OL +ozy g +ogl obin +F ACE +ER G +ĠD ia +ĠF iesta +ce le +0 34 +Ġencl ave +âĸ¬ âĸ¬ +on ement +al ist +M and +Ġhome grown +ĠF ancy +Ġconcept ions +ĠCont ains +ure en +Ġreiter ate +Ġme ager +Ġinstall ments +Sp awn +6 27 +Ġphot oc +ĠCab rera +ĠRos enthal +ĠLans ing +is ner +Ġinvest s +ĠUFO s +EX P +Hard ware +Ġtr agically +Ġconced es +ie ft +ch am +bor gh +ĠSch r +ĠMel anie +ĠH oy +Ġvisit ation +Ġid iosyncr +Ġfract ions +Ġfore skin +ob os +Ġpo aching +ĠVI EW +Ġstimul ates +ĠG ork +can on +M IC +ĠNem esis +ĠInd ra +ĠDM V +Ġ5 29 +Ġinspect ing +Ġgrand ma +ĠW hedon +ĠSh ant +ĠP urg +ik an +ĠT eg +ĠCL R +z ac +Vict oria +ĠVer ify +ion ics +Ġpart ying +ĠM ou +col our +Ġtestim onies +l ations +Ġpress uring +hi ro +ac ers +Ġf id +ang ler +ĠCS I +Ġhere after +Ġdiss idents +report ing +iph any +che v +Ġsol itude +Ġl obe +Ġind is +Ġcred ential +re cent +ad ult +ĠNir vana +ĠFranch ise +L ayer +H yp +ĠBerks hire +Ġwill s +t if +Ġtot em +ĠJud ah +rep air +Inst ant +5 48 +Ġemb assies +Ġbott leneck +Ġb ount +Ġtyp ew +ĠAl vin +j ing +im ilar +R ush +Ġbr im +ĠHEL P +A im +] ' +Ġpass ively +Ġbound ed +ĠR ated +Ġcriminal ity +Ġbiom ark +Ġdisp atcher +ĠTow ards +Ġ+ ++ +right eous +f rog +ĠP anc +C arter +0 32 +æ© Ł +Ġult raviolet +ĠLic ensed +ĠT ata +ĠBl essing +ĠG AM +Ġchem ically +ĠSe af +ĠRE LE +ĠMerc enary +capital ist +Ġform ulations +Ġann ihilation +ĠVer b +ĠAr gon +Ġun loaded +Ġmorp hed +Ġconqu ering +back er +I ELD +Ġtheft s +Ġfront runner +ĠRoy ale +ĠFund amental +el ight +C hip +necess ary +ay n +ĠSl ip +Ġ4 48 +cern ed +P ause +Ġshock ingly +ĠAB V +Ġcomp osure +7 33 +ĠMotors port +ah ime +Mur ray +M ach +Ġgr ids +Ġdeb ian +Ġfurther more +Ġdexter ity +ĠCollect ions +os lov +il age +b j +ĠMont eneg +Ġstrut Connector +Ġmassac res +Ġbrief s +fet ched +uv ian +ol ition +Fail ure +emon ic +Ġfl ared +Ġclaim ant +Ġc ures +Ġgive aways +ĠSubst ance +al ions +Ġcr inge +ĠK ul +Ġarist ocracy +ĠUl ster +ol ated +h ousing +ĠM IS +Ġgl ared +ĠWil helm +ne eds +lam bda +build ers +ĠV IS +Ġradi ator +ĠGhost busters +Ġ4 36 +act ual +Ġher ds +ç a +watch ing +Ġcounter ing +Ch arge +Ġchar red +Ġwar heads +Ġiod ine +ĠM acy +04 1 +Ġdepart ures +ĠS ins +Ġdy ed +ĠConcept s +g ado +7 13 +Ġquot ations +Ġg ist +ĠChrist y +Ġant igen +ĠHem p +ĠD rawn +ĠB arg +ez vous +Ġp aternity +Ġar du +ĠAnch orage +ĠR ik +Ġover loaded +ĠUs ername +ĠTam my +ĠN au +ĠCell ular +Ġw aning +Ġrod ent +ĠWor cester +il ts +ĠT ad +Ġdwell ings +Ġbull ish +4 31 +Ġretali ate +Ġmig raine +ĠChev ron +CH ECK +Ġdon key +c rim +SP A +ĠAn alog +Ġmarqu ee +ĠHa as +B ir +ĠGD DR +ĠDownload s +Ġwill power +ĠFor th +ĠRecord ed +Ġimp ossibility +ĠLog ged +ĠFr anks +ĠR att +in itions +Ġclean ers +Ġsore ly +Ġflick ering +ĠEx amination +c atching +allow een +Ms g +Ġdun no +F a +Ġdys ph +c razy +.' '. +Ġmain line +Ġc s +Ġp tr +ĠW ally +ig un +95 1 +ĠBig foot +f ights +Ġretrie ving +J r +Ġdupl ication +ĠExpl an +Ġrel ational +Ġqu aint +Ġbisc uits +Ġad o +Ġsh udder +Ġantid ote +blood ed +ks h +Ġsa uces +Ġrein vest +Ġdispens ary +ĠD iver +Ġ9 000 +stud ent +Ġin separ +esc ap +Ġtodd lers +ĠGP IO +ĠAss ignment +head ers +Ġlack luster +Ġab ack +95 6 +Ġtool bar +7 45 +Ġo ust +Ġcontempl ation +ĠPRES IDENT +Ġ4 58 +==== == +Ġguarantee ing +ĠHe ist +ĠCann es +Ļ ½ +Ġcollabor ator +ĠAm p +Ġg ou +ĠSH ALL +st ories +78 3 +Ġmobil ized +Ġbro od +ĠL U +ĠðŁ ij +Ġref in +ĠAnthrop ology +v ind +ill i +Ġwarrant ies +ĠB abel +Ġsw ath +Ġc aches +Ġantagon ists +art ifacts +Ġhot ly +ĠSt arts +ĠG ö +z ag +!! !!! +Ġsc ourge +Ġcons piring +ru its +re verse +ĠShe en +ĠJes uit +ĠGiov anni +ad ies +Ġbutt ocks +ear cher +ac an +Ġvolley ball +Ġshroud ed +Ġscore board +b ats +ĠI PM +Ġass es +Ġde regulation +ĠTe legram +ĠReb oot +Ġ7 000 +ĠCan ary +Ġk ernels +ĠFranç ois +ĠD uff +ĠP on +ĠLe ica +ĠGar min +Ġor phans +ĠClaud ia +Ġcal endars +ĠLe ilan +ent o +R ocket +Ġbr unch +ĠHaw king +ain ers +Ġsens ibilities +Ġk W +ĠK and +Ġre claimed +Ġinteresting ly +× © +rom y +J M +ĠEnhance ment +b ush +Sk ip +Ġrapp ers +Ġg azing +p edia +ath lon +Rev olution +Ġsn ipers +Ġre verted +Ġconglomer ate +T erry +79 4 +Ġhars her +Ġdes olate +ĠHit man +Comm ission +Ġ( / +âĢ¦ ." +Com par +Ġampl ification +om inated +Ġreg ress +ĠColl ider +Ġinform ants +Ġg azed diff --git a/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/model.safetensors b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..57be37292b14d7ca8275f1caa0489af4a415e0a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e36cd86311a0093e095e9e5f749374da61b29efee5bf0113740b41d006a84f57 +size 647614116 diff --git a/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/special_tokens_map.json b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..06a55849fa69a91e7179d909e373179ff7467d8c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/special_tokens_map.json @@ -0,0 +1,51 @@ +{ + "bos_token": { + "content": "", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "cls_token": { + "content": "", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "mask_token": { + "content": "", + "lstrip": true, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "sep_token": { + "content": "", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + } +} diff --git a/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/tokenizer.json b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..7d8cc8260e71054cbd8d7ef98bfc90191be97544 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HIPORANK/LED_model/Trying_LED_Model_Hiporank_final_setting.ipynb/tokenizer.json @@ -0,0 +1,100371 @@ +{ + "version": "1.0", + "truncation": { + "direction": "Right", + "max_length": 512, + "strategy": "LongestFirst", + "stride": 0 + }, + "padding": { + "strategy": { + "Fixed": 512 + }, + "direction": "Right", + "pad_to_multiple_of": null, + "pad_id": 1, + "pad_type_id": 0, + "pad_token": "" + }, + "added_tokens": [ + { + "id": 0, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "special": true + }, + { + "id": 1, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "special": true + }, + { + "id": 2, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "special": true + }, + { + "id": 3, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "special": true + }, + { + "id": 50264, + "content": "", + "single_word": false, + "lstrip": true, + "rstrip": false, + "normalized": true, + "special": true + } + ], + "normalizer": null, + "pre_tokenizer": { + "type": "ByteLevel", + "add_prefix_space": false, + "trim_offsets": true, + "use_regex": true + }, + "post_processor": { + "type": "RobertaProcessing", + "sep": [ + "", + 2 + ], + "cls": [ + "", + 0 + ], + "trim_offsets": true, + "add_prefix_space": false + }, + "decoder": { + "type": "ByteLevel", + "add_prefix_space": true, + "trim_offsets": true, + "use_regex": true + }, + "model": { + "type": "BPE", + "dropout": null, + "unk_token": null, + "continuing_subword_prefix": "", + "end_of_word_suffix": "", + "fuse_unk": false, + "byte_fallback": false, + "ignore_merges": false, + "vocab": { + "": 0, + "": 1, + "": 2, + "": 3, + ".": 4, + "Ġthe": 5, + ",": 6, + "Ġto": 7, + "Ġand": 8, + "Ġof": 9, + "Ġa": 10, + "Ġin": 11, + "-": 12, + "Ġfor": 13, + "Ġthat": 14, + "Ġon": 15, + "Ġis": 16, + "âĢ": 17, + "'s": 18, + "Ġwith": 19, + "ĠThe": 20, + "Ġwas": 21, + "Ġ\"": 22, + "Ġat": 23, + "Ġit": 24, + "Ġas": 25, + "Ġsaid": 26, + "Ļ": 27, + "Ġbe": 28, + "s": 29, + "Ġby": 30, + "Ġfrom": 31, + "Ġare": 32, + "Ġhave": 33, + "Ġhas": 34, + ":": 35, + "Ġ(": 36, + "Ġhe": 37, + "ĠI": 38, + "Ġhis": 39, + "Ġwill": 40, + "Ġan": 41, + "Ġthis": 42, + ")": 43, + "ĠâĢ": 44, + "Ġnot": 45, + "Ŀ": 46, + "Ġyou": 47, + "ľ": 48, + "Ġtheir": 49, + "Ġor": 50, + "Ġthey": 51, + "Ġwe": 52, + "Ġbut": 53, + "Ġwho": 54, + "Ġmore": 55, + "Ġhad": 56, + "Ġbeen": 57, + "Ġwere": 58, + "Ġabout": 59, + ",\"": 60, + "Ġwhich": 61, + "Ġup": 62, + "Ġits": 63, + "Ġcan": 64, + "Ġone": 65, + "Ġout": 66, + "Ġalso": 67, + "Ġ$": 68, + "Ġher": 69, + "Ġall": 70, + "Ġafter": 71, + ".\"": 72, + "/": 73, + "Ġwould": 74, + "'t": 75, + "Ġyear": 76, + "Ġwhen": 77, + "Ġfirst": 78, + "Ġshe": 79, + "Ġtwo": 80, + "Ġover": 81, + "Ġpeople": 82, + "ĠA": 83, + "Ġour": 84, + "ĠIt": 85, + "Ġtime": 86, + "Ġthan": 87, + "Ġinto": 88, + "Ġthere": 89, + "t": 90, + "ĠHe": 91, + "Ġnew": 92, + "ĠâĢĶ": 93, + "Ġlast": 94, + "Ġjust": 95, + "ĠIn": 96, + "Ġother": 97, + "Ġso": 98, + "Ġwhat": 99, + "I": 100, + "Ġlike": 101, + "a": 102, + "Ġsome": 103, + "S": 104, + "ë": 105, + "Ġthem": 106, + "Ġyears": 107, + "'": 108, + "Ġdo": 109, + "Ġyour": 110, + "Ġ-": 111, + "Ġ1": 112, + "\"": 113, + "Ġif": 114, + "Ġcould": 115, + "?": 116, + "Ġno": 117, + "i": 118, + "m": 119, + "Ġget": 120, + "ĠU": 121, + "Ġnow": 122, + "Ġhim": 123, + "Ġback": 124, + "ĠBut": 125, + "ĠâĢĵ": 126, + "Ġmy": 127, + "Ġ'": 128, + "Ġonly": 129, + "Ġthree": 130, + ";": 131, + "Ġ2": 132, + "The": 133, + "1": 134, + "Ġpercent": 135, + "Ġagainst": 136, + "Ġbefore": 137, + "Ġcompany": 138, + "o": 139, + "ĠTrump": 140, + "Ġhow": 141, + "Ġbecause": 142, + "Ġany": 143, + "Ġmost": 144, + "Ġbeing": 145, + "Ġmake": 146, + "Ġwhere": 147, + "Ġduring": 148, + "Ġthrough": 149, + "Ġwhile": 150, + "000": 151, + "ĠThis": 152, + "Ġmillion": 153, + "ing": 154, + "Ġ3": 155, + "Ġmade": 156, + "Ġwell": 157, + "Ġ10": 158, + "Ġdown": 159, + "Ġoff": 160, + "Ġsays": 161, + "Ġme": 162, + "ĠB": 163, + "Ġgoing": 164, + "Ġteam": 165, + "ĠWe": 166, + "Ġthose": 167, + "Ġgovernment": 168, + "Ġway": 169, + "We": 170, + "Ġmany": 171, + "Ġthen": 172, + "Ġwork": 173, + "Ġtold": 174, + "com": 175, + "2": 176, + "Ġgame": 177, + "ĠAnd": 178, + "in": 179, + "year": 180, + "Ġp": 181, + "Ġvery": 182, + "Ġday": 183, + "Ġhome": 184, + "Ġtake": 185, + "Ġweek": 186, + "Ġsince": 187, + "ĠNew": 188, + "Ġmay": 189, + "Ġeven": 190, + "Ġseason": 191, + "Ġsee": 192, + "Ġ2017": 193, + "Ġstate": 194, + "Ġ5": 195, + "ed": 196, + "Ġshould": 197, + "Ġaround": 198, + "Ġ2018": 199, + "Ġsecond": 200, + "Ġus": 201, + "Ġstill": 202, + "Ġmuch": 203, + "Ġ4": 204, + "Ġgood": 205, + "Ġthink": 206, + "%": 207, + "ĠS": 208, + "Ġthese": 209, + "Ġmarket": 210, + "ĠD": 211, + "th": 212, + "Ġgo": 213, + "'re": 214, + "Ġsuch": 215, + "Ġknow": 216, + "Ġincluding": 217, + "Ġdon": 218, + "y": 219, + "Ġnext": 220, + "ĠP": 221, + "Ġdid": 222, + "Ġunder": 223, + "Ġsay": 224, + "en": 225, + "ĠL": 226, + "Ġbetween": 227, + "Ġper": 228, + "ĠK": 229, + "ĠC": 230, + "Ġ6": 231, + "Ġworld": 232, + "Ġpart": 233, + "ĠN": 234, + "Ġright": 235, + "Ġwant": 236, + "Ġfour": 237, + "),": 238, + "Ġhigh": 239, + "Ġneed": 240, + "re": 241, + "e": 242, + "It": 243, + "Ġhelp": 244, + "5": 245, + "3": 246, + "Ġcountry": 247, + "ĠR": 248, + "Ġpolice": 249, + "A": 250, + "Ġlong": 251, + "ĠThey": 252, + "Ġend": 253, + "er": 254, + "ĠT": 255, + "ĠM": 256, + "u": 257, + "Ġboth": 258, + "Ġhere": 259, + "an": 260, + "on": 261, + "Ġ7": 262, + "Ġde": 263, + "ĠShe": 264, + "Ġbusiness": 265, + "Ġreport": 266, + "j": 267, + "ers": 268, + "Ġreally": 269, + "ĠPresident": 270, + "ar": 271, + "ĠG": 272, + "ĠFriday": 273, + "ĠF": 274, + "Ġbest": 275, + "Ġsame": 276, + "Ġanother": 277, + "Ġset": 278, + "old": 279, + "ĠThat": 280, + "as": 281, + "n": 282, + "Ġcome": 283, + "Ġfamily": 284, + "Ġpublic": 285, + "ĠFor": 286, + "ĠAs": 287, + "0": 288, + "ĠH": 289, + "Ġ8": 290, + "Ġ20": 291, + "Ġfive": 292, + "es": 293, + "ĠTuesday": 294, + "Ġn": 295, + "ĠThursday": 296, + "Ġquarter": 297, + "h": 298, + "Ġtop": 299, + "Ġgot": 300, + "Ġlife": 301, + "ĠMonday": 302, + "Ġfound": 303, + "Ġuse": 304, + "ĠW": 305, + "4": 306, + "ĠWednesday": 307, + "Ġown": 308, + "Ġaccording": 309, + "Ġplay": 310, + "Ġshow": 311, + "ĠSt": 312, + "Ġman": 313, + "Ġleft": 314, + "ĠUnited": 315, + "Ġ12": 316, + "Ġplace": 317, + "ĠIf": 318, + "Ġlot": 319, + "Ġformer": 320, + "Ġ0": 321, + ").": 322, + "Ġsupport": 323, + "ie": 324, + "Ġbillion": 325, + "Ġt": 326, + "Ġshares": 327, + "!": 328, + "z": 329, + "k": 330, + "ĠState": 331, + "Ġpoints": 332, + "Ġgroup": 333, + "Ġschool": 334, + "Ġinformation": 335, + "Ġ2016": 336, + "al": 337, + "r": 338, + "Ġwin": 339, + "Ġnews": 340, + "Ġused": 341, + "Ġput": 342, + "Ġcity": 343, + "ĠJ": 344, + "ĠThere": 345, + "Ġnumber": 346, + "C": 347, + "'ve": 348, + "Ġeach": 349, + "Ġtoo": 350, + "Ġwon": 351, + "ly": 352, + "Ġmonth": 353, + "is": 354, + "Ġadded": 355, + "Ġlook": 356, + "Ġbetter": 357, + "Ġevery": 358, + "Ġ&": 359, + "Ġdays": 360, + "Ġ9": 361, + "Ġtook": 362, + "Ġnight": 363, + "Ġe": 364, + "Ġ11": 365, + "os": 366, + "Ġfew": 367, + "or": 368, + "ĠNorth": 369, + "ĠYou": 370, + "Ġthird": 371, + "Ġgreat": 372, + "Ġcalled": 373, + "ĠOn": 374, + "Ġpast": 375, + "Ġcame": 376, + "Ġmonths": 377, + "ĠSaturday": 378, + "Ġ15": 379, + "Ġbig": 380, + "ĠE": 381, + "ĠUS": 382, + "Ġthings": 383, + "ĠO": 384, + "Ġd": 385, + "Ġstart": 386, + "B": 387, + "Ġstock": 388, + "Ġ30": 389, + "Ġwomen": 390, + "ĠSouth": 391, + "ĠMay": 392, + "Ġnever": 393, + "Ġpresident": 394, + "ĠSunday": 395, + "Ġwithout": 396, + "man": 397, + "8": 398, + "Ġdidn": 399, + "Ġlocal": 400, + "6": 401, + "Ġsomething": 402, + "Ġcase": 403, + "ĠAll": 404, + "it": 405, + "7": 406, + "ĠSo": 407, + "Ġchildren": 408, + "Ġaway": 409, + "Ġlittle": 410, + "Ġsix": 411, + "ĠCity": 412, + "ĠCounty": 413, + "Ġdata": 414, + "at": 415, + "Ġalready": 416, + "d": 417, + "Ġmoney": 418, + "Ġearly": 419, + "Ġacross": 420, + "Ġexpected": 421, + "Ġrun": 422, + "Ġlater": 423, + "am": 424, + "Ġprice": 425, + "Ġgames": 426, + "ĠMr": 427, + "b": 428, + "Ġmight": 429, + "Ġdifferent": 430, + "Ġreported": 431, + "Ġdeal": 432, + "Ġmedia": 433, + "Ġgrowth": 434, + "Ġcommunity": 435, + "ĠChina": 436, + "'m": 437, + "c": 438, + "Ġwent": 439, + "ĠNo": 440, + "Ġable": 441, + "Ġmaking": 442, + "Ġarea": 443, + "Ġfar": 444, + "Ġstatement": 445, + "ĠHouse": 446, + "Ġworking": 447, + "M": 448, + "Ġk": 449, + "Ġseen": 450, + "Ġcompanies": 451, + "Ġtoday": 452, + "Ġmembers": 453, + "Ġuntil": 454, + "Ġfull": 455, + "Ġagain": 456, + "Ġhalf": 457, + "Ġshare": 458, + "le": 459, + "Ġalways": 460, + "Ġcourt": 461, + "l": 462, + "and": 463, + "Ġchange": 464, + "Ġfind": 465, + "9": 466, + "Ġsystem": 467, + "ĠV": 468, + "ĠYork": 469, + "ĠAmerican": 470, + "Ġhead": 471, + "Ġplayers": 472, + "Ġdoes": 473, + "Ġhealth": 474, + "Ġm": 475, + "Ġpower": 476, + "Ġpoint": 477, + "Ġhit": 478, + "Ġ.": 479, + "Ġ--": 480, + "Ġfree": 481, + ".,": 482, + "Ġlead": 483, + "Ġseveral": 484, + "Ġrecent": 485, + "Ġcall": 486, + "N": 487, + "Ġlaw": 488, + "Ġkeep": 489, + "Ġopen": 490, + "ĠNews": 491, + "Ġgive": 492, + "ia": 493, + "ĠMarch": 494, + "D": 495, + "ĠNational": 496, + "ĠAt": 497, + "Ġtimes": 498, + "Ġfuture": 499, + "R": 500, + "Ġ14": 501, + "ĠJune": 502, + "Ġofficials": 503, + "Ġ18": 504, + "Ġimportant": 505, + "f": 506, + "Ġfinal": 507, + "Ġ13": 508, + "ĠOne": 509, + "P": 510, + "Ġfollowing": 511, + "Ġcar": 512, + "Ġleast": 513, + "Ġwater": 514, + "Ġevent": 515, + "Ġline": 516, + "Ġmove": 517, + "Ġservices": 518, + "Ġhaving": 519, + "ĠWhen": 520, + "Ġstudents": 521, + "ĠPolice": 522, + "el": 523, + "Ġam": 524, + "ĠZ": 525, + "Ġside": 526, + "Ġstory": 527, + "Ġdue": 528, + "Ġmeeting": 529, + "K": 530, + "Ġmust": 531, + "ĠStates": 532, + "Ġlikely": 533, + "G": 534, + "Ġcontinue": 535, + "Ġago": 536, + "Ġparty": 537, + "Ġmajor": 538, + "Ġindustry": 539, + "Ġless": 540, + "30": 541, + "Ġun": 542, + "Ġhard": 543, + "Ġservice": 544, + "Ġ16": 545, + "Ġlooking": 546, + "Ġheld": 547, + "ve": 548, + "Ġwhether": 549, + "ĠJuly": 550, + "Ġtaken": 551, + "Ġalong": 552, + "Ġasked": 553, + "Ġstarted": 554, + "Ġbecome": 555, + "Ġforward": 556, + "Ġresearch": 557, + "Ġoffice": 558, + "Ġpolitical": 559, + "to": 560, + "Ġtogether": 561, + "Ġgetting": 562, + "Ġplan": 563, + "Ġ25": 564, + "T": 565, + "Ġamong": 566, + "Ġcoming": 567, + "Ġdecision": 568, + "Ġvideo": 569, + "Ġ2015": 570, + "g": 571, + "ĠAfter": 572, + "Ġsecurity": 573, + "L": 574, + "Ġcare": 575, + "Ġgiven": 576, + "Ġavailable": 577, + "âĢĶ": 578, + "Ġs": 579, + "ĠWest": 580, + "'ll": 581, + "Ġpay": 582, + "Ġnear": 583, + "Ġsaying": 584, + "Ġannounced": 585, + "Ġprogram": 586, + "ĠApril": 587, + "Ġreal": 588, + "ĠUniversity": 589, + "ĠWith": 590, + "AP": 591, + "Ġsocial": 592, + "Ġclose": 593, + "et": 594, + "Ġcurrent": 595, + "Ġwhy": 596, + "F": 597, + "ĠTo": 598, + "ĠTwitter": 599, + "Ġthough": 600, + "Ġ17": 601, + "Ġtaking": 602, + "ĠInc": 603, + "Ġmen": 604, + "w": 605, + "Ġcomes": 606, + "ley": 607, + "Ġdoing": 608, + "Ġprocess": 609, + "ĠJohn": 610, + "ch": 611, + "00": 612, + "Ġfinancial": 613, + "Ġlow": 614, + "Ġenough": 615, + "ĠWhile": 616, + "Ġfurther": 617, + "Ġpost": 618, + "Ġfeel": 619, + "st": 620, + "Ġperson": 621, + "ĠFacebook": 622, + "ĠWorld": 623, + "Ġwithin": 624, + "ad": 625, + "Ġdone": 626, + "the": 627, + "Ġlate": 628, + "Ġtax": 629, + "Ġdoesn": 630, + "Ġthing": 631, + "Ġnational": 632, + "Ġjob": 633, + "Ġusing": 634, + "ĠHowever": 635, + "ic": 636, + "Ġcampaign": 637, + "Ġrecord": 638, + "Ġbehind": 639, + "://": 640, + "ĠDepartment": 641, + "p": 642, + "Ġothers": 643, + "ĠJanuary": 644, + "Ġorder": 645, + "Ġ[": 646, + "Ġsales": 647, + "Ġyet": 648, + "Ä": 649, + "Ġsmall": 650, + "Ġseries": 651, + "Ġface": 652, + "ĠWhat": 653, + "Ġ50": 654, + "Ġever": 655, + "Ġearlier": 656, + "Ġlove": 657, + "up": 658, + "Ġrights": 659, + "ĠAn": 660, + "ist": 661, + "Ġmorning": 662, + "ĠWashington": 663, + "Ġyoung": 664, + "Ġlatest": 665, + "ĠIndia": 666, + "Ġtrying": 667, + "Ġfire": 668, + "Ġled": 669, + "Ġstrong": 670, + "Ġreturn": 671, + "Ġlevel": 672, + "O": 673, + "Ġaverage": 674, + "Ġperiod": 675, + "Ġexperience": 676, + "ak": 677, + "Ġpossible": 678, + "Ġbelieve": 679, + "Ġinclude": 680, + "Ġoil": 681, + "Ġrecently": 682, + "Ġonce": 683, + "Ġknown": 684, + "Ġlost": 685, + "Ġsure": 686, + "us": 687, + "Ġweeks": 688, + "Ġfood": 689, + "Ġreports": 690, + "Ġrating": 691, + "ĠMinister": 692, + "Ġwoman": 693, + "Ġprovide": 694, + "Ġproject": 695, + "Ġissue": 696, + "Ġlive": 697, + "10": 698, + "Ġclear": 699, + "he": 700, + "Ġcost": 701, + "Ġplayed": 702, + "Ġreleased": 703, + "Ġcoach": 704, + "v": 705, + "Ġ24": 706, + "Ġseven": 707, + "Ġplans": 708, + "Ġdevelopment": 709, + "ur": 710, + "ĺ": 711, + "Ġincrease": 712, + "This": 713, + "Ġpolicy": 714, + "Ġcent": 715, + "Ġbased": 716, + "E": 717, + "il": 718, + "ĠDecember": 719, + "Ġglobal": 720, + "Ġtrade": 721, + "Ġhours": 722, + "Ġhigher": 723, + "Ġgoal": 724, + "H": 725, + "ĠAl": 726, + "Ġ100": 727, + "Ġminutes": 728, + "Ġelection": 729, + "ĠAmerica": 730, + "Ġrate": 731, + "ĠCh": 732, + "Ġ21": 733, + "...": 734, + "ĠWhite": 735, + "Ġdirector": 736, + "Ġposition": 737, + "Ġshot": 738, + "Ġlarge": 739, + "Ġc": 740, + "Ġb": 741, + "]": 742, + "Ġissues": 743, + "Ġdeath": 744, + "Ġbuilding": 745, + "Ġtotal": 746, + "Ġoften": 747, + "Ġv": 748, + "Ġcountries": 749, + "Ġhistory": 750, + "Ġoutside": 751, + "Ġfederal": 752, + "Ġ19": 753, + "Ġfact": 754, + "ĠHigh": 755, + "Ġcareer": 756, + "im": 757, + "Ġinternational": 758, + "ĠNovember": 759, + "Ġfront": 760, + "Ġkind": 761, + "Ġkey": 762, + "ra": 763, + "ĠSan": 764, + "Ġshort": 765, + "Ġname": 766, + "ĠAccording": 767, + "Ġcourse": 768, + "Ġre": 769, + "Ġwanted": 770, + "W": 771, + "ĠSeptember": 772, + "Ġinterest": 773, + "Ġrole": 774, + "Ġresults": 775, + "Ġeconomic": 776, + "Ġ2014": 777, + "Ġchance": 778, + "ĠOctober": 779, + "Ġspecial": 780, + "Ġofficial": 781, + "Ġneeds": 782, + "um": 783, + "Ġl": 784, + "Ġproducts": 785, + "Ġnon": 786, + "Ġ@": 787, + "ĠBank": 788, + "Ġahead": 789, + "Ġhouse": 790, + "U": 791, + "Ġboard": 792, + "Ġold": 793, + "Ġsaw": 794, + "Ġlower": 795, + "ĠEuropean": 796, + "Ġcontrol": 797, + "ĠRussia": 798, + "Ġeight": 799, + "Ġrelease": 800, + "Ġpotential": 801, + "Ġthought": 802, + "Ġinvestigation": 803, + "Ġonline": 804, + "based": 805, + "Ġtechnology": 806, + "ĠDonald": 807, + "id": 808, + "Ġbody": 809, + "Ġrisk": 810, + "ian": 811, + "Ġcapital": 812, + "Ġstaff": 813, + "Ġaction": 814, + "ĠLeague": 815, + "Ġplaying": 816, + "Ġmakes": 817, + "Ġalmost": 818, + "Ġperformance": 819, + "Ġ22": 820, + "Ġg": 821, + "Ġfilm": 822, + "Ġnearly": 823, + "ĠCenter": 824, + "Ġvisit": 825, + "ĠGroup": 826, + "Ġbank": 827, + "Ġbit": 828, + "Ġreceived": 829, + "ĠAugust": 830, + "Ġmilitary": 831, + "ĠHis": 832, + "ine": 833, + "Ġchief": 834, + "ĠSchool": 835, + "Ġbring": 836, + "ĠCourt": 837, + "Ġ(@": 838, + "Ġmeans": 839, + "ĠSh": 840, + "Ġfans": 841, + "Ġse": 842, + "Ġ40": 843, + "20": 844, + "\".": 845, + "V": 846, + "Ġcut": 847, + "Ġkilled": 848, + "Ġ#": 849, + "Ġprices": 850, + "Ġgave": 851, + "ĠStreet": 852, + "ir": 853, + "ĠY": 854, + "Ġcurrently": 855, + "Ġf": 856, + "ay": 857, + "ne": 858, + "te": 859, + "Ġtry": 860, + "ĠPark": 861, + "ĥ": 862, + "J": 863, + "Ġquestion": 864, + "Ġhand": 865, + "Ġeconomy": 866, + "Ġinvestors": 867, + "able": 868, + "Ġplayer": 869, + "ĠBy": 870, + "ĠDavid": 871, + "Ġloss": 872, + "ab": 873, + "Ġbelow": 874, + "Ġwrote": 875, + "co": 876, + "ate": 877, + "Ġrunning": 878, + "un": 879, + "Ġbegan": 880, + "Ġsingle": 881, + "Ġfield": 882, + "Ġ23": 883, + "Ġleader": 884, + "Ġw": 885, + "ĠCalifornia": 886, + "Ġfourth": 887, + "Ġactually": 888, + "Ġlist": 889, + "ll": 890, + "Ġcouple": 891, + "Ġstudy": 892, + "Ġteams": 893, + "He": 894, + "ah": 895, + "ĠCanada": 896, + "Ġla": 897, + "Ġresult": 898, + "Ġaccess": 899, + "Ġvote": 900, + "ĠMore": 901, + "ĠFebruary": 902, + "Ġrevenue": 903, + "Ġoffer": 904, + "Ġlet": 905, + "ier": 906, + "Ġbuy": 907, + "Ġattack": 908, + "Ġblack": 909, + "Ġr": 910, + "Ġareas": 911, + "Ġstop": 912, + "Ġimpact": 913, + "Ġmatch": 914, + "Ġinvestment": 915, + "Ġcustomers": 916, + "Ġleaders": 917, + "ies": 918, + "Ġmember": 919, + "Ġchild": 920, + "Ġroad": 921, + "ul": 922, + "Ġvalue": 923, + "Ġshows": 924, + "ĠDr": 925, + "ĠDe": 926, + "ant": 927, + "ĠLondon": 928, + "Ġroom": 929, + "Ġmusic": 930, + "Ġproduction": 931, + "Ġanything": 932, + "Ġfirm": 933, + "Ġbiggest": 934, + "Ġair": 935, + "Ġproblem": 936, + "Ġgeneral": 937, + "Ġwasn": 938, + "Ġi": 939, + "Ġprivate": 940, + "Ġespecially": 941, + "Ġadministration": 942, + "Ġadditional": 943, + "ĠCo": 944, + "Ġopportunity": 945, + "Ġhold": 946, + "&": 947, + "Ġmatter": 948, + "Ġsenior": 949, + "Ġclub": 950, + "Ġsomeone": 951, + "ĠÃ": 952, + "ĠEast": 953, + "Ġ2019": 954, + ".'": 955, + "Ġneeded": 956, + "ĠJames": 957, + "time": 958, + "Ġhowever": 959, + "Ġeverything": 960, + "Ġeveryone": 961, + "Ġdied": 962, + "Ġinvolved": 963, + "Ġfriends": 964, + "Ġisn": 965, + "Ġworth": 966, + "ik": 967, + "ĠCup": 968, + "Ġshowed": 969, + "There": 970, + "Ġ28": 971, + "Ġmeet": 972, + "Ġ26": 973, + "Ġ27": 974, + "Y": 975, + "Ġregion": 976, + "ĠPress": 977, + "ĠNow": 978, + "Ġson": 979, + "Ġspace": 980, + "Ġleading": 981, + "Ġstates": 982, + "Ġweekend": 983, + "Ġ£": 984, + "Ġmother": 985, + "Ġprevious": 986, + "ĠUK": 987, + "ĠMichael": 988, + "Ġleave": 989, + "est": 990, + "em": 991, + "Ġz": 992, + "ĠSome": 993, + "ors": 994, + "out": 995, + "15": 996, + "Ġwar": 997, + "Ġwebsite": 998, + "Ġstar": 999, + "X": 1000, + "ro": 1001, + "Ġtarget": 1002, + "Ġhimself": 1003, + "Ġturn": 1004, + "ĠEurope": 1005, + "Ġworked": 1006, + "Ġenergy": 1007, + "Ġscored": 1008, + "Ġ*": 1009, + "Ġsoon": 1010, + "Ġball": 1011, + "ĠTV": 1012, + "Ġannual": 1013, + "Ġ2013": 1014, + "Ġrace": 1015, + "ĠInternational": 1016, + "'d": 1017, + "ĠMarket": 1018, + "Ġconference": 1019, + "io": 1020, + "Ġo": 1021, + "Ġchanges": 1022, + "ig": 1023, + "Ġofficers": 1024, + "Ġinside": 1025, + "Ġform": 1026, + "Ġpublished": 1027, + "Ġphone": 1028, + "Ġco": 1029, + "Ġlegal": 1030, + "Ġexecutive": 1031, + "Ġfight": 1032, + "ings": 1033, + "Ġhope": 1034, + "Ġsummer": 1035, + "Ġofficer": 1036, + "Ġfootball": 1037, + "Ġproperty": 1038, + "@": 1039, + "Ġbook": 1040, + "Ġparents": 1041, + "Ġcosts": 1042, + "ac": 1043, + "Ġmanager": 1044, + "Ġcreate": 1045, + "Ġage": 1046, + "Ġemail": 1047, + "Ġmarkets": 1048, + "Ġmain": 1049, + "Ġhuman": 1050, + "Ġsent": 1051, + "Ġmanagement": 1052, + "ĠDay": 1053, + "ton": 1054, + "Ġcash": 1055, + "Ġfocus": 1056, + "Ġexpect": 1057, + "Ġtraining": 1058, + "Ġbecame": 1059, + "Ġwhose": 1060, + "Ġevents": 1061, + "Ġround": 1062, + "ĠLe": 1063, + "Ġfell": 1064, + "Ġabove": 1065, + "Ġanalysts": 1066, + "Ġtalk": 1067, + "Ġsituation": 1068, + "ri": 1069, + "ated": 1070, + "ke": 1071, + "Ġwants": 1072, + "ag": 1073, + "Ġlives": 1074, + "om": 1075, + "Ġal": 1076, + "Ġdemand": 1077, + "Ġsafety": 1078, + "Ġrest": 1079, + "ĠCouncil": 1080, + "Ġpersonal": 1081, + "Ġsite": 1082, + "ĠRussian": 1083, + "Ġmid": 1084, + "Ġnothing": 1085, + "Ġwhole": 1086, + "Ġbill": 1087, + "Ġsold": 1088, + "ĠBritish": 1089, + "se": 1090, + "Ġremain": 1091, + "12": 1092, + "Ġforeign": 1093, + "Ġshooting": 1094, + "Ġstay": 1095, + "50": 1096, + "ang": 1097, + "Ġhospital": 1098, + "Ġbad": 1099, + "Ġaddress": 1100, + "ĠKorea": 1101, + "Ġhappened": 1102, + "Ġcharges": 1103, + "Ġwhite": 1104, + "Ġ31": 1105, + "If": 1106, + "Ġearnings": 1107, + "Ġbreak": 1108, + "Ġlight": 1109, + "Ġterms": 1110, + "ĠChinese": 1111, + "ĠSenate": 1112, + "ana": 1113, + "Ġidea": 1114, + "ap": 1115, + "of": 1116, + "Ġnine": 1117, + "Ġcompared": 1118, + "Ġbuild": 1119, + "ard": 1120, + "In": 1121, + "Ġsimilar": 1122, + "Ġgas": 1123, + "Ġvictory": 1124, + "Ġ2012": 1125, + "Ġdebt": 1126, + "ĠMar": 1127, + "Ġarrested": 1128, + "Ġcomment": 1129, + "Ġincreased": 1130, + "Ġmedical": 1131, + "Ġ29": 1132, + "ĠJan": 1133, + "Ġgroups": 1134, + "Ġdespite": 1135, + "Ġfall": 1136, + "Ġtell": 1137, + "Ġworkers": 1138, + "Ġtown": 1139, + "é": 1140, + "Ġwife": 1141, + "Ġquestions": 1142, + "Ġcontinued": 1143, + "Ġheart": 1144, + "Ġmet": 1145, + "Ġbrought": 1146, + "Ġhelped": 1147, + "ĠCongress": 1148, + "Ġstep": 1149, + "Ġfather": 1150, + "Ġmoment": 1151, + "Ġproduct": 1152, + "Ġprobably": 1153, + "Ġlargest": 1154, + "Ġvehicle": 1155, + "ĠEngland": 1156, + "Ġallow": 1157, + "Ġstarting": 1158, + "Ġkids": 1159, + "Ġincident": 1160, + "Ġnet": 1161, + "Ġrates": 1162, + "ĠRead": 1163, + "Ġpressure": 1164, + "Ġincluded": 1165, + "Ġread": 1166, + "Ġissued": 1167, + "ol": 1168, + "Ġeither": 1169, + "Ġefforts": 1170, + "Ġincludes": 1171, + "ĠRepublican": 1172, + "ish": 1173, + "âĢ¦": 1174, + "Ġgoals": 1175, + "aj": 1176, + "Ġen": 1177, + "x": 1178, + "Ġraised": 1179, + "au": 1180, + "Ġlonger": 1181, + "ut": 1182, + "Ġwatch": 1183, + "ĠTexas": 1184, + "You": 1185, + "Ġrange": 1186, + "nd": 1187, + "Ġfunds": 1188, + "Ġremains": 1189, + "ĠMark": 1190, + "Ġ60": 1191, + "Ġque": 1192, + "sh": 1193, + "Ġinterview": 1194, + "Ġrather": 1195, + "Ġresidents": 1196, + "Ġgrowing": 1197, + "Ġpre": 1198, + "Ġpaid": 1199, + "Ġcases": 1200, + "ĠReuters": 1201, + "Ġdifficult": 1202, + "Ġsign": 1203, + "ĠGoogle": 1204, + "Ġhttps": 1205, + "ĠPaul": 1206, + "Ġliving": 1207, + "day": 1208, + "ĠQ": 1209, + "iz": 1210, + "ĠRed": 1211, + "Ġland": 1212, + "They": 1213, + "ĠRoad": 1214, + "_": 1215, + "ĠThese": 1216, + "Ġview": 1217, + "Ġagency": 1218, + "Ġreason": 1219, + "Ġallowed": 1220, + "ĠAustralia": 1221, + "az": 1222, + "ĠRe": 1223, + "Ġturned": 1224, + "11": 1225, + "Ġnation": 1226, + "Ġready": 1227, + "Ġpress": 1228, + "Ġbudget": 1229, + "Ġdaily": 1230, + "ĠChief": 1231, + "Ġfamilies": 1232, + "Ġsignificant": 1233, + "ĠFirst": 1234, + "Ġthemselves": 1235, + "Ġj": 1236, + "Ġruns": 1237, + "Ġaccused": 1238, + "Ġtakes": 1239, + "Ġspent": 1240, + "Ġvia": 1241, + "ot": 1242, + "ina": 1243, + "25": 1244, + "land": 1245, + "Ġexample": 1246, + "Ġauthorities": 1247, + "Ġdate": 1248, + "Ġended": 1249, + "all": 1250, + "Reuters": 1251, + "Ġbusinesses": 1252, + "ans": 1253, + "Ġdetails": 1254, + "Ġground": 1255, + "Ġpretty": 1256, + "ĠApple": 1257, + "ation": 1258, + "ĠSmith": 1259, + "ĠCompany": 1260, + "ĠFlorida": 1261, + "Ġdrug": 1262, + "Ġresponse": 1263, + "one": 1264, + "Ġeducation": 1265, + "Ġmean": 1266, + "Ġleague": 1267, + "Ġanyone": 1268, + "Ġminister": 1269, + "Ġtitle": 1270, + "Ġadding": 1271, + "Ġproblems": 1272, + "Ġopening": 1273, + "Ġconditions": 1274, + "Ġred": 1275, + "Ġdecided": 1276, + "Å": 1277, + "Ġposted": 1278, + "term": 1279, + "Ġamount": 1280, + "ĠEU": 1281, + "Ġsuccess": 1282, + "Ġevidence": 1283, + "ĠObama": 1284, + "Ġaddition": 1285, + "Ġprovided": 1286, + "ĠLos": 1287, + "Ġagreement": 1288, + "Ġstage": 1289, + "ens": 1290, + "Ġrelationship": 1291, + "ĠGeneral": 1292, + "Ġsector": 1293, + "Ġstudent": 1294, + "ating": 1295, + "Ġtest": 1296, + "\",": 1297, + "Ġwinning": 1298, + "Ġfelt": 1299, + "Ġsource": 1300, + "Z": 1301, + "Ġseems": 1302, + "Ġcause": 1303, + "Ġschools": 1304, + "Ġdrive": 1305, + "Ġensure": 1306, + "Ġhuge": 1307, + "ĠMy": 1308, + "ĠHealth": 1309, + "Ġscene": 1310, + "Ġgiving": 1311, + "Ġcenter": 1312, + "Ġpositive": 1313, + "Ġyards": 1314, + "Ġjobs": 1315, + "Ġaccount": 1316, + "Ġheard": 1317, + "Ġquality": 1318, + "Ġways": 1319, + "Ġimmediately": 1320, + "Ġemployees": 1321, + "are": 1322, + "Ġpass": 1323, + "ĠCEO": 1324, + "Ġreceive": 1325, + "Ġlooks": 1326, + "ĠAfrica": 1327, + "Ġthroughout": 1328, + "led": 1329, + "Ġrelated": 1330, + "Ġsell": 1331, + "ĠUnion": 1332, + "ĠPhoto": 1333, + "ter": 1334, + "Ġquickly": 1335, + "ĠHow": 1336, + "Ġvarious": 1337, + "Ġreach": 1338, + "Ġpick": 1339, + "Ġcharged": 1340, + "Ġquite": 1341, + "ent": 1342, + "q": 1343, + "ins": 1344, + "Ġphoto": 1345, + "Ġunderstand": 1346, + "ĠâĢ¢": 1347, + "Ġreached": 1348, + "Ġtrack": 1349, + "uk": 1350, + "Ġeffort": 1351, + "ville": 1352, + "Ġcentral": 1353, + "Ġdaughter": 1354, + "Ġcontract": 1355, + "Ġinjury": 1356, + "Ġopened": 1357, + "Ġ($": 1358, + "Ġstraight": 1359, + "17": 1360, + "Ġcredit": 1361, + "ĠIndian": 1362, + "Ġsexual": 1363, + "Ġworks": 1364, + "Ġeasy": 1365, + "18": 1366, + "Ġclosed": 1367, + "Ġh": 1368, + "Ġhappen": 1369, + "Ġforce": 1370, + "ler": 1371, + "Ġhappy": 1372, + "Ġshared": 1373, + "Ġoverall": 1374, + "Ġmoving": 1375, + "á": 1376, + "Ġprojects": 1377, + "ĠBlack": 1378, + "Ġconcerns": 1379, + "Ġclass": 1380, + "Ġtried": 1381, + "Ġappeared": 1382, + "Ġcontent": 1383, + "ĠDistrict": 1384, + "Ġterm": 1385, + "Ġinstead": 1386, + "ĠOffice": 1387, + "Ġcontinues": 1388, + "Ġlevels": 1389, + "Ġafternoon": 1390, + "Ġfund": 1391, + "Ġsale": 1392, + "Ġdriver": 1393, + "Ġask": 1394, + "Ġcannot": 1395, + "ner": 1396, + "end": 1397, + "ĠHere": 1398, + "field": 1399, + "Ġstore": 1400, + "www": 1401, + "Ġcertain": 1402, + "Ġself": 1403, + "Ġdollar": 1404, + "ĠHer": 1405, + "Ġpopular": 1406, + "Ġfollow": 1407, + "Ġspending": 1408, + "by": 1409, + "Ġmoved": 1410, + "Ġgoes": 1411, + "Ġcreated": 1412, + "Ġstand": 1413, + "Ġoperations": 1414, + "Ġlooked": 1415, + "Ġtreatment": 1416, + "ov": 1417, + "Ġdistrict": 1418, + "Ġsigned": 1419, + "Ġhands": 1420, + "Ġmodel": 1421, + "ĠAngeles": 1422, + "Ġy": 1423, + "Ġborder": 1424, + "Ġincome": 1425, + "ĠLast": 1426, + "Ġcharge": 1427, + "Ġdriving": 1428, + "ĠJapan": 1429, + "Ġrise": 1430, + "Ġtalks": 1431, + "Ġfollowed": 1432, + "Ġpreviously": 1433, + "Ġusers": 1434, + "Ġfunding": 1435, + "ĠJohnson": 1436, + "Ġ": 1437, + "ou": 1438, + "ai": 1439, + "Ġnamed": 1440, + "Ġfriend": 1441, + "ĠNov": 1442, + "Ġdefense": 1443, + "ĠBritain": 1444, + "Ġentire": 1445, + "Ġtrading": 1446, + "Ġfailed": 1447, + "ĠEl": 1448, + "Ġclaims": 1449, + "Ġcomments": 1450, + "Ġbeat": 1451, + "ib": 1452, + "Ġbasis": 1453, + "ĠJones": 1454, + "Ġpresent": 1455, + "ĠBe": 1456, + "Ġdouble": 1457, + "Ġrose": 1458, + "ite": 1459, + "Ġability": 1460, + "Ġoriginal": 1461, + "Ġdead": 1462, + "ĠCommission": 1463, + "ĠMe": 1464, + "Ġcompetition": 1465, + "Ġ2011": 1466, + "Ġknew": 1467, + "Ġmaterial": 1468, + "av": 1469, + "ĠFrance": 1470, + "Ġscore": 1471, + "Ġsense": 1472, + "Ġserious": 1473, + "Ġconfirmed": 1474, + "Ġanti": 1475, + "Ġviolence": 1476, + "Ġimprove": 1477, + "son": 1478, + "ó": 1479, + "ĠAP": 1480, + "Ġsh": 1481, + "Ġhost": 1482, + "ĠMike": 1483, + "Ġpatients": 1484, + "ĠNFL": 1485, + "Ġcrisis": 1486, + "Ġrevealed": 1487, + "ach": 1488, + "ĠPrime": 1489, + "Ġbuilt": 1490, + "ĠNot": 1491, + "Ġrules": 1492, + "Ġelse": 1493, + "Ġdepartment": 1494, + "Ġitself": 1495, + "ise": 1496, + "500": 1497, + "Ġcomplete": 1498, + "ion": 1499, + "Ġtrial": 1500, + "ĠBay": 1501, + "ĠDec": 1502, + "Ġattention": 1503, + "Ġtravel": 1504, + "ĠCentral": 1505, + "ry": 1506, + "Ġagreed": 1507, + "Ġmind": 1508, + "ĠMc": 1509, + "Ġ70": 1510, + "Ġcontact": 1511, + "ari": 1512, + "ĠTimes": 1513, + "Ġspot": 1514, + "ĠFrench": 1515, + "Ġgets": 1516, + "op": 1517, + "Ġbrand": 1518, + "Ġcalls": 1519, + "Ġbanks": 1520, + "Ġdesign": 1521, + "Ġsafe": 1522, + "Ġoffers": 1523, + "Ġpractice": 1524, + "ĠOf": 1525, + "á": 1526, + "ling": 1527, + "Ġtrue": 1528, + "off": 1529, + "Ġnumbers": 1530, + "Ġfun": 1531, + "Ġlearn": 1532, + "Ġmultiple": 1533, + "ĠIs": 1534, + "res": 1535, + "als": 1536, + "Ġcommon": 1537, + "ized": 1538, + "Ġchallenge": 1539, + "Ġcommittee": 1540, + "ĠOur": 1541, + "Ġbase": 1542, + "ani": 1543, + "ĠAssociation": 1544, + "ung": 1545, + "Ġnetwork": 1546, + "ĠBrown": 1547, + "Ġapproach": 1548, + "16": 1549, + "Ġfinished": 1550, + "Ġreview": 1551, + "Ġrequired": 1552, + "Ġapp": 1553, + "ĠMan": 1554, + "ĠâĢ¦": 1555, + "twitter": 1556, + "ĠDemocratic": 1557, + "13": 1558, + "Ġevening": 1559, + "ĠTom": 1560, + "ä": 1561, + "ĠAssociated": 1562, + "ĠCanadian": 1563, + "Ġcollege": 1564, + "Ġspokesman": 1565, + "Ġarticle": 1566, + "Ġtowards": 1567, + "ĠChicago": 1568, + "Ġmovie": 1569, + "14": 1570, + "ity": 1571, + "Ġforces": 1572, + "ĠChris": 1573, + "ĠDemocrats": 1574, + "Ġfeatures": 1575, + "Ġhearing": 1576, + "ĠX": 1577, + "ĠAlso": 1578, + "Ġmessage": 1579, + "age": 1580, + "Ġnoted": 1581, + "ĠSuper": 1582, + "Ġthousands": 1583, + "aw": 1584, + "ĠBill": 1585, + "ĠAr": 1586, + "ĠLa": 1587, + "ip": 1588, + "Ġ/": 1589, + "ĠDuring": 1590, + "Ġnote": 1591, + ".)": 1592, + "Ġwrong": 1593, + "if": 1594, + "Ġpassed": 1595, + "ĠTwo": 1596, + "Ġdie": 1597, + ",'": 1598, + "ĠDon": 1599, + "ĠGermany": 1600, + "Ġletter": 1601, + "Ġdescribed": 1602, + "ĠIran": 1603, + "ĠWilliams": 1604, + "Ġparticularly": 1605, + "Ġadd": 1606, + "Ġconversation": 1607, + "ĠSe": 1608, + "Ġhighest": 1609, + "be": 1610, + "Ġhomes": 1611, + "Ġsports": 1612, + "Ġgone": 1613, + "ĠAd": 1614, + "Ġel": 1615, + "Ġopportunities": 1616, + "Ġwords": 1617, + "Ġleaving": 1618, + "ĠChristmas": 1619, + "As": 1620, + "ĠGovernment": 1621, + "Ġsimply": 1622, + "Ġhusband": 1623, + "ĠResearch": 1624, + "ĠMexico": 1625, + "ates": 1626, + "ale": 1627, + "ĠGreen": 1628, + "$": 1629, + "od": 1630, + "ĠHall": 1631, + "Ġnatural": 1632, + "Ġoperating": 1633, + "les": 1634, + "ations": 1635, + "ĠKim": 1636, + "Ġgold": 1637, + "ok": 1638, + "Ġprovides": 1639, + "(": 1640, + "ell": 1641, + "Ġbegin": 1642, + "ĠParty": 1643, + "back": 1644, + "ĠAmazon": 1645, + "19": 1646, + "Ġmajority": 1647, + "ĠEven": 1648, + "Ġcheck": 1649, + "Ġweather": 1650, + "Ġorganization": 1651, + "Ġstories": 1652, + "ĠCar": 1653, + "Ġforced": 1654, + "ĠGeorge": 1655, + "Ġwalk": 1656, + "ong": 1657, + "Ġfiled": 1658, + "ĠJustice": 1659, + "Ġlaunched": 1660, + "Ġoffered": 1661, + "Ġwww": 1662, + "Ġconstruction": 1663, + "ĠBen": 1664, + "Ġserved": 1665, + "Ġ...": 1666, + "Ġparts": 1667, + "Ġcancer": 1668, + "Ġguys": 1669, + "Reporting": 1670, + "ash": 1671, + "less": 1672, + "Ġleadership": 1673, + "ĠCommittee": 1674, + "Ġregular": 1675, + "Ġcouncil": 1676, + "Ġcars": 1677, + "ĠDirector": 1678, + "Ġjudge": 1679, + "Ġvictims": 1680, + "ĠDaily": 1681, + "Ġkept": 1682, + "Ġeffect": 1683, + "Ġbeyond": 1684, + "pm": 1685, + "Ġtalking": 1686, + "Ġconsidered": 1687, + "ore": 1688, + "ĠAdvertisement": 1689, + "Ġst": 1690, + "ED": 1691, + "Ġmiddle": 1692, + "Ġraise": 1693, + "we": 1694, + "Ġclaimed": 1695, + "ino": 1696, + "Ġalleged": 1697, + "ĠPro": 1698, + "ĠScott": 1699, + "ĠOct": 1700, + "Ġconsider": 1701, + "ĠShare": 1702, + "Ġtraffic": 1703, + "ĠAfrican": 1704, + "Ġcouldn": 1705, + "Ġtoward": 1706, + "Ġsearch": 1707, + "But": 1708, + "Ġlaunch": 1709, + "Ġinjured": 1710, + "That": 1711, + "Ġalthough": 1712, + "Ġactivities": 1713, + "Ġchanged": 1714, + "Ġsources": 1715, + "Ġmissing": 1716, + "Ġu": 1717, + "Ġ35": 1718, + "Ġcover": 1719, + "ised": 1720, + "Ġ|": 1721, + "ow": 1722, + "ES": 1723, + "Ġdecades": 1724, + "ich": 1725, + "Ġcaused": 1726, + "Ġelections": 1727, + "ane": 1728, + "IS": 1729, + "Ġfeet": 1730, + "ĠBar": 1731, + "Ġversion": 1732, + "Ġgrow": 1733, + "Ġvehicles": 1734, + "Ġoptions": 1735, + "Ġindividual": 1736, + "Ġenvironment": 1737, + "ĠRobert": 1738, + "ĠValley": 1739, + "ĠFrom": 1740, + "per": 1741, + "ara": 1742, + "Ġsystems": 1743, + "Ġprotect": 1744, + "ĠKing": 1745, + "Ġinjuries": 1746, + "Ġfinally": 1747, + "Ġnuclear": 1748, + "40": 1749, + "Ġratio": 1750, + "Ġgun": 1751, + "ĠPakistan": 1752, + "ĠManagement": 1753, + "ĠAir": 1754, + "ce": 1755, + "Ġopposition": 1756, + "ment": 1757, + "ick": 1758, + "Ġpro": 1759, + "Ġact": 1760, + "Ġplatform": 1761, + "Ġlack": 1762, + "Ġpair": 1763, + "Ġ500": 1764, + "Ġcalling": 1765, + "ary": 1766, + "Ġprograms": 1767, + "Ġscheduled": 1768, + "Ġfast": 1769, + "Ġjoined": 1770, + "ĠWar": 1771, + "ĠEditing": 1772, + "ĠSince": 1773, + "ĠRyan": 1774, + "ĠMac": 1775, + "ĠBig": 1776, + "ĠLake": 1777, + "Ġdigital": 1778, + "When": 1779, + "ue": 1780, + "Ġassets": 1781, + "Ġseeing": 1782, + "ĠAct": 1783, + "Ġpartner": 1784, + "ĠBoard": 1785, + "Ġbeginning": 1786, + "Ġsupply": 1787, + "Ġmiles": 1788, + "Ġprison": 1789, + "ons": 1790, + "ĠAmericans": 1791, + "ub": 1792, + "ĠOr": 1793, + "me": 1794, + "Ġbenefits": 1795, + "Ġbenefit": 1796, + "Ġmeasures": 1797, + "Ġhear": 1798, + "Ġparties": 1799, + "Ġsuccessful": 1800, + "ĠJust": 1801, + "Ġvictim": 1802, + "Ġblock": 1803, + "Ġlimited": 1804, + "Ġtrip": 1805, + "ĠPeople": 1806, + "Ġserve": 1807, + "Ġart": 1808, + "ism": 1809, + "Ġwide": 1810, + "ĠSch": 1811, + "Ġ80": 1812, + "ĠThomas": 1813, + "Ġ90": 1814, + "Ġstocks": 1815, + "Ġgirl": 1816, + "ĠAsia": 1817, + "Ġseeking": 1818, + "Ġcertainly": 1819, + "ĠServices": 1820, + "ĠCollege": 1821, + "Ġcommunities": 1822, + "Ġextra": 1823, + "Ġ2010": 1824, + "ness": 1825, + "Ġholding": 1826, + "ous": 1827, + "Ġtough": 1828, + "ade": 1829, + "Ġmobile": 1830, + "Ġowns": 1831, + "ĠDo": 1832, + "ĠFire": 1833, + "Ġspoke": 1834, + "Ġreturned": 1835, + "Ġsize": 1836, + "Ġcriminal": 1837, + "ĠInstagram": 1838, + "Ġoffering": 1839, + "ĠGod": 1840, + "ĠService": 1841, + "Ġpage": 1842, + "her": 1843, + "Ġdeep": 1844, + "wood": 1845, + "Ġcrime": 1846, + "ĠSports": 1847, + "ile": 1848, + "ĠGlobal": 1849, + "Ġproposed": 1850, + "ain": 1851, + "Ġsession": 1852, + "ĠFederal": 1853, + "ĠSyria": 1854, + "Ġch": 1855, + "Ġthreat": 1856, + "Ġallegations": 1857, + "ĠRepublicans": 1858, + "ĠGerman": 1859, + "Ġstrategy": 1860, + "Ġcommercial": 1861, + "ING": 1862, + "ĠSecretary": 1863, + "Q": 1864, + "Ġreporters": 1865, + "100": 1866, + "ĠCapital": 1867, + "ĠBoth": 1868, + "ĠPost": 1869, + "ĠIsrael": 1870, + "Ġsave": 1871, + "ts": 1872, + "ill": 1873, + "Ġdrop": 1874, + "Ġreserved": 1875, + "ĠMany": 1876, + "Ġavoid": 1877, + "Ġ200": 1878, + "iv": 1879, + "Ġdamage": 1880, + "Ġcondition": 1881, + "Ġdropped": 1882, + "Ġdoor": 1883, + "Ġplanning": 1884, + "ire": 1885, + "Ġcard": 1886, + "Ġdesigned": 1887, + "Ġreduce": 1888, + "AN": 1889, + "ĠUn": 1890, + "ford": 1891, + "ĠThen": 1892, + "Ġpic": 1893, + "ĠCopyright": 1894, + "Ġrain": 1895, + "ĠMartin": 1896, + "Ġdomestic": 1897, + "45": 1898, + "ge": 1899, + "Ġmurder": 1900, + "Ġspeech": 1901, + "line": 1902, + "Ġhelping": 1903, + "Ġplanned": 1904, + "Ġfeature": 1905, + "ud": 1906, + "Ġtype": 1907, + "ham": 1908, + "ĠPublic": 1909, + "ja": 1910, + "Ġinsurance": 1911, + "Ġattacks": 1912, + "ĠCorp": 1913, + "Ġforecast": 1914, + "Ġresources": 1915, + "ma": 1916, + "?\"": 1917, + "ĠAm": 1918, + "ĠSept": 1919, + "Ġpush": 1920, + "Ġattorney": 1921, + "23": 1922, + "Ġemergency": 1923, + "Ġwinner": 1924, + "Ġblood": 1925, + "Ġnorth": 1926, + "ĠFeb": 1927, + "Ġbaby": 1928, + "Ġfloor": 1929, + "Ġspend": 1930, + "Ġex": 1931, + "Ġdollars": 1932, + "Ġunit": 1933, + "ĠHill": 1934, + "Ġder": 1935, + "ĠAbout": 1936, + "Ġalone": 1937, + "ization": 1938, + "Ġpresidential": 1939, + "Ġactivity": 1940, + "ĠTHE": 1941, + "ee": 1942, + "ber": 1943, + "ĠOther": 1944, + "Ġowner": 1945, + "Ġhour": 1946, + "Ġcities": 1947, + "Ġanswer": 1948, + "ide": 1949, + "Ġfully": 1950, + "ek": 1951, + "ists": 1952, + "Ġcoverage": 1953, + "Ġvs": 1954, + "Ġfigure": 1955, + "Ġpopulation": 1956, + "org": 1957, + "Ġsnow": 1958, + "Ġbecoming": 1959, + "ĠSam": 1960, + "ĠCarolina": 1961, + "Ġjoin": 1962, + "Ġprofit": 1963, + "Ġitems": 1964, + "Ġindex": 1965, + "Ġanalysis": 1966, + "Ġtournament": 1967, + "Ġstake": 1968, + "Ġperfect": 1969, + "way": 1970, + "Ġband": 1971, + "Ġgirls": 1972, + "Ġoption": 1973, + "Ġplays": 1974, + "oc": 1975, + "Ġproviding": 1976, + "ÃŃ": 1977, + "24": 1978, + "Ġwouldn": 1979, + "Ġones": 1980, + "Ġdeclined": 1981, + "Ġwritten": 1982, + "Ġvoters": 1983, + "Ġcandidate": 1984, + "Ġsuspect": 1985, + "Ġpolicies": 1986, + "Ġpeace": 1987, + "ast": 1988, + "Ġparticular": 1989, + "for": 1990, + "Ġhopes": 1991, + "Ġstation": 1992, + "ĠMost": 1993, + "Ġspeak": 1994, + "ĠRiver": 1995, + "Ġasking": 1996, + "Ġstatements": 1997, + "Ġfifth": 1998, + "ha": 1999, + "ĠNigeria": 2000, + "af": 2001, + "Ġexplained": 2002, + "Ġbar": 2003, + "Ġhousing": 2004, + "ĠSanta": 2005, + "Ġidentified": 2006, + "Ġsimple": 2007, + "Ġcritical": 2008, + "ĠClub": 2009, + "ĠSecurity": 2010, + "ĠLike": 2011, + "Ġstarts": 2012, + "art": 2013, + "Ġstreet": 2014, + "Ġreality": 2015, + "Ġheavy": 2016, + "Ġprogress": 2017, + "Ġshowing": 2018, + "Ġchallenges": 2019, + "Ġban": 2020, + "Ġcommitted": 2021, + "35": 2022, + "»": 2023, + "Ġdirectly": 2024, + "Ġaren": 2025, + "Ġclaim": 2026, + "ĠWestern": 2027, + "ind": 2028, + "Ġgives": 2029, + "ĠSaudi": 2030, + "Ġchoice": 2031, + "ĠTh": 2032, + "Ġapproved": 2033, + "Ġlocated": 2034, + "Ġarrived": 2035, + "22": 2036, + "Ġcaught": 2037, + "Ġprofessional": 2038, + "Ġmissed": 2039, + "Ġculture": 2040, + "ĠYear": 2041, + "ĠOhio": 2042, + "ĠLtd": 2043, + "ĠAnother": 2044, + "Ġseem": 2045, + "Ġbelieves": 2046, + "Ġbelieved": 2047, + "Ġcharacter": 2048, + "ĠAug": 2049, + "red": 2050, + "Ġfine": 2051, + "Ġprior": 2052, + "Ġthinking": 2053, + "Ġhttp": 2054, + "Ġ+": 2055, + "Ġzone": 2056, + "Ġputting": 2057, + "Ġcrash": 2058, + "ĠAustralian": 2059, + "ĠAb": 2060, + "Ġfocused": 2061, + "ĠREUTERS": 2062, + "ĠFox": 2063, + "ĠSp": 2064, + "Ġtraditional": 2065, + "Ġanalyst": 2066, + "Ġwait": 2067, + "IT": 2068, + "Ġrequest": 2069, + "ru": 2070, + "ians": 2071, + "ize": 2072, + "Ġfinish": 2073, + "Ġlaws": 2074, + "Ġran": 2075, + "ER": 2076, + "Ġsouth": 2077, + "Ġspeed": 2078, + "Ġmovement": 2079, + "Ġassault": 2080, + "Ġexchange": 2081, + "Ġappear": 2082, + "ĠSun": 2083, + "Ġle": 2084, + "Ġmaybe": 2085, + "Ġlosing": 2086, + "Ġsubject": 2087, + "ive": 2088, + "mer": 2089, + "ĠBusiness": 2090, + "ĠBl": 2091, + "Ġappears": 2092, + "Ġadvantage": 2093, + "ĠLee": 2094, + "ada": 2095, + "ĠUnder": 2096, + "Ġprevent": 2097, + "Ġrespect": 2098, + "Ġsex": 2099, + "Ġcentre": 2100, + "ĠJoe": 2101, + "ado": 2102, + "Ġtable": 2103, + "Ġequipment": 2104, + "Ġfair": 2105, + "Ġtour": 2106, + "Ġ32": 2107, + "ĠFinancial": 2108, + "Ġcounty": 2109, + "Ġdevices": 2110, + "Ġcustomer": 2111, + "Ġinfrastructure": 2112, + "Ġexpectations": 2113, + "Ġfacing": 2114, + "Ġupon": 2115, + "Ġcross": 2116, + "ĠOpen": 2117, + "AL": 2118, + "Ġquick": 2119, + "Ġattempt": 2120, + "Ġcompleted": 2121, + "Ġfacility": 2122, + "Ġconfidence": 2123, + "ĠSupreme": 2124, + "Ġpiece": 2125, + "our": 2126, + "Ġplaces": 2127, + "Ġsometimes": 2128, + "Ġpoor": 2129, + "Ġstorm": 2130, + "Ġhot": 2131, + "Ġaffected": 2132, + "na": 2133, + "Ġabuse": 2134, + "ĠMs": 2135, + "Ġword": 2136, + "over": 2137, + "Ġbrother": 2138, + "Ġnecessary": 2139, + "Ġeventually": 2140, + "ĠStar": 2141, + "Ġsend": 2142, + "Ġboy": 2143, + "ĠRs": 2144, + "Ġremember": 2145, + "21": 2146, + "Ġclimate": 2147, + "Ġcapacity": 2148, + "Ġresponsible": 2149, + "ĠMatt": 2150, + "month": 2151, + "Ġsuffered": 2152, + "%.": 2153, + "og": 2154, + "ĠPeter": 2155, + "Ġ,": 2156, + "Ġfeeling": 2157, + "ze": 2158, + "Ġbuying": 2159, + "oy": 2160, + "ij": 2161, + "Ġbought": 2162, + "Ġactions": 2163, + "Ġowned": 2164, + "Ġ___": 2165, + "Ġphysical": 2166, + "Ġspecific": 2167, + "Ġbattle": 2168, + "ĠEnergy": 2169, + "Ġpicture": 2170, + "Ġactive": 2171, + "Ġindividuals": 2172, + "Ġguy": 2173, + "Ġregional": 2174, + "Ġbond": 2175, + "ows": 2176, + "ĠToronto": 2177, + "Ġrule": 2178, + "Ġdevelop": 2179, + "Ġcrowd": 2180, + "Ġguilty": 2181, + "Ġfemale": 2182, + "Ġselling": 2183, + "ĠFollow": 2184, + "Ġmyself": 2185, + "ata": 2186, + "Ġdevice": 2187, + "Ġreasons": 2188, + "Ġrecords": 2189, + "Ġfighting": 2190, + "ON": 2191, + "ities": 2192, + "ĠHome": 2193, + "Ġstatus": 2194, + "Ġplant": 2195, + "Ġdrugs": 2196, + "ĠChurch": 2197, + "Ġcompletely": 2198, + "Ġdisease": 2199, + "Ġhighly": 2200, + "ĠParis": 2201, + "Ġdecade": 2202, + "Ġowners": 2203, + "Ġwall": 2204, + "Ġcamp": 2205, + "ĠSteve": 2206, + "Ġreporting": 2207, + "Ġearned": 2208, + "ĠImages": 2209, + "Ġexisting": 2210, + "ĠSen": 2211, + "Ġconcern": 2212, + "Ġhundreds": 2213, + "Ġsong": 2214, + "Ġknows": 2215, + "Ġunique": 2216, + "Ġlose": 2217, + "ĠKh": 2218, + "Ġapproximately": 2219, + "Ġhaven": 2220, + "Ġpark": 2221, + "Ġindependent": 2222, + "ĠAlthough": 2223, + "ĠAndrew": 2224, + "Ġpaper": 2225, + "Ġdeveloped": 2226, + "Ġrising": 2227, + "Ġdirect": 2228, + "Ġpurchase": 2229, + "Ġexactly": 2230, + "Ġq": 2231, + "Ġmassive": 2232, + "Ġbox": 2233, + "Ġchampion": 2234, + "ĠClinton": 2235, + "Ġvoice": 2236, + "Ġarrest": 2237, + "ĠKorean": 2238, + "Ġlearning": 2239, + "ĠVirginia": 2240, + "Ġsa": 2241, + "Ġpar": 2242, + "Ġchairman": 2243, + "Ġagencies": 2244, + "Ġhealthy": 2245, + "ĠThose": 2246, + "Ġpowerful": 2247, + "Ġ45": 2248, + "Ġdifference": 2249, + "ĠJackson": 2250, + "Ġenforcement": 2251, + "Ġdividend": 2252, + "qu": 2253, + "Ġenjoy": 2254, + "Ġruling": 2255, + "Ġongoing": 2256, + "Ġsoftware": 2257, + "ks": 2258, + "Ġlocation": 2259, + "Ġmostly": 2260, + "Ġcandidates": 2261, + "men": 2262, + "Ġbroke": 2263, + "What": 2264, + "ĠBr": 2265, + "Ġ2008": 2266, + "Ġconsumer": 2267, + "Ġdiscuss": 2268, + "Ġdi": 2269, + "Ġprimary": 2270, + "ĠEn": 2271, + "Ġgreen": 2272, + "Ġconcerned": 2273, + "Ġimage": 2274, + "ĠPremier": 2275, + "ĠMeanwhile": 2276, + "Ġfired": 2277, + "ĠBoston": 2278, + "ann": 2279, + "Ġcamera": 2280, + "Ġtraded": 2281, + "Ġhasn": 2282, + "Ġexcited": 2283, + "Ġincreasing": 2284, + "ĠDespite": 2285, + "Ġcitizens": 2286, + "Ġeuro": 2287, + "Ġreportedly": 2288, + "Ġminute": 2289, + "ĠWill": 2290, + "ĠLLC": 2291, + "Ġsp": 2292, + "ĠMichigan": 2293, + "Ġstopped": 2294, + "Ġeye": 2295, + "Ġdenied": 2296, + "Ġmodern": 2297, + "ĠWall": 2298, + "Ġdefinitely": 2299, + "point": 2300, + "Ġlines": 2301, + "Ġpolitics": 2302, + "Ġhotel": 2303, + "Ġretail": 2304, + "Ġstated": 2305, + "ĠOver": 2306, + "Ġgrew": 2307, + "Ġbroadcast": 2308, + "Ġlegislation": 2309, + "Ġfresh": 2310, + "Ġbid": 2311, + "Ġmanaged": 2312, + "Ġsociety": 2313, + "Ġscoring": 2314, + "ĠGet": 2315, + "Ġintelligence": 2316, + "Ġholiday": 2317, + "Ġgovernor": 2318, + "Ġestimated": 2319, + "Ġexperts": 2320, + "ĠJeff": 2321, + "Ġstruck": 2322, + "Ġhits": 2323, + "Ġcarry": 2324, + "Ġplaced": 2325, + "Ġstores": 2326, + "Ġexpressed": 2327, + "Ġvalued": 2328, + "Ġad": 2329, + "Ġtwice": 2330, + "ala": 2331, + "Ġdisplay": 2332, + "Ġusually": 2333, + "Ġresponded": 2334, + "Ġdog": 2335, + "AS": 2336, + "ĠFed": 2337, + "Ġ2009": 2338, + "Ġdocuments": 2339, + "Ġnormal": 2340, + "Ġtrain": 2341, + "Ġfl": 2342, + "Ġshown": 2343, + "ĠEd": 2344, + "Ġsort": 2345, + "Ġallegedly": 2346, + "Ġshots": 2347, + "ka": 2348, + "Ġaccounts": 2349, + "Ġyesterday": 2350, + "Ġcreating": 2351, + "Ġchurch": 2352, + "Ġbus": 2353, + "Ġaward": 2354, + "Ġequity": 2355, + "Ġphotos": 2356, + "Ġ33": 2357, + "Ġfiscal": 2358, + "je": 2359, + "Ġconsumers": 2360, + "ĠManchester": 2361, + "no": 2362, + "ĠKevin": 2363, + "Ġgain": 2364, + "Ġcorporate": 2365, + "Ġcivil": 2366, + "ĠMiddle": 2367, + "ally": 2368, + "Ġsound": 2369, + "ĠEnglish": 2370, + "IC": 2371, + "Ġwinds": 2372, + "Ġworst": 2373, + "ĠGrand": 2374, + "Ġeffective": 2375, + "ĠIsland": 2376, + "Ġdrivers": 2377, + "Ġfan": 2378, + "pe": 2379, + "Ġsides": 2380, + "ĠGo": 2381, + "Ġclean": 2382, + "âĢĵ": 2383, + "Ġtelevision": 2384, + "ĠJr": 2385, + "Ġallows": 2386, + "My": 2387, + "Ġgreater": 2388, + "ance": 2389, + "Ġdecisions": 2390, + "Ġrestaurant": 2391, + "ĠHospital": 2392, + "ĠTr": 2393, + "Ġbalance": 2394, + "Ġmph": 2395, + "Ġkeeping": 2396, + "Ġseconds": 2397, + "Ġweapons": 2398, + "ert": 2399, + "Ġpain": 2400, + "ass": 2401, + "Ġsteps": 2402, + "ger": 2403, + "ĠBrexit": 2404, + "Ġremaining": 2405, + "Ġbringing": 2406, + "ure": 2407, + "Ġweight": 2408, + "And": 2409, + "Ġwriting": 2410, + "Photo": 2411, + "ĠChristian": 2412, + "ob": 2413, + "Ġsport": 2414, + "Ġfigures": 2415, + "Ġtrust": 2416, + "Ġskills": 2417, + "Ġseat": 2418, + "Ġfaces": 2419, + "ck": 2420, + "Ġborn": 2421, + "Ġsuper": 2422, + "Ġfuel": 2423, + "Ġdel": 2424, + "Ġmeant": 2425, + "ica": 2426, + "Ġjustice": 2427, + "Ġspring": 2428, + "Ġkilling": 2429, + "Ġnegative": 2430, + "ĠRichard": 2431, + "Ġund": 2432, + "Ġfactors": 2433, + "Ġsigns": 2434, + "Ġlearned": 2435, + "ĠGame": 2436, + "Ġaudience": 2437, + "Ġdeliver": 2438, + "Ġillegal": 2439, + "Ġblue": 2440, + "Ġscreen": 2441, + "Ġremained": 2442, + "Ġannouncement": 2443, + "IN": 2444, + "Ġwaiting": 2445, + "Ġthanks": 2446, + "Ġimmigration": 2447, + "ĠFBI": 2448, + "Ġwarned": 2449, + "Ġmeasure": 2450, + "Ġdraw": 2451, + "Ġpositions": 2452, + "Ġdebut": 2453, + "ĠMedia": 2454, + "Ġallowing": 2455, + "air": 2456, + "hen": 2457, + "Ġmark": 2458, + "ys": 2459, + "Ġprepared": 2460, + "ĠVegas": 2461, + "ep": 2462, + "ice": 2463, + "2018": 2464, + "Ġdefensive": 2465, + "60": 2466, + "ĠBeach": 2467, + "Ġpulled": 2468, + "£": 2469, + "Ġlawyer": 2470, + "Ġcast": 2471, + "Ġsolution": 2472, + "Ġeyes": 2473, + "Ġmarketing": 2474, + "ĠFoundation": 2475, + "Ġrisks": 2476, + "ĠToday": 2477, + "za": 2478, + "Ġdraft": 2479, + "Ġice": 2480, + "26": 2481, + "ĠHar": 2482, + "ĠExecutive": 2483, + "Ġtruck": 2484, + "ions": 2485, + "ĠYour": 2486, + "ĠIreland": 2487, + "ĠJim": 2488, + "Ġha": 2489, + "Ġfear": 2490, + "Ġ36": 2491, + "UR": 2492, + "ĠFord": 2493, + "Ġwatching": 2494, + "ien": 2495, + "Ġstyle": 2496, + "ĠGood": 2497, + "Ġwearing": 2498, + "ĠHouston": 2499, + "Ġonto": 2500, + "Ġboost": 2501, + "Ġapplication": 2502, + "ĠDan": 2503, + "Ġspread": 2504, + "ĠDavis": 2505, + "Ġstrike": 2506, + "els": 2507, + "Ġwind": 2508, + "Ġinterested": 2509, + "Ġguard": 2510, + "Ġmission": 2511, + "Ġyourself": 2512, + "Ġoperation": 2513, + "Ġlarger": 2514, + "She": 2515, + "Ġseasons": 2516, + "28": 2517, + "27": 2518, + "Ġrespond": 2519, + "ci": 2520, + "ĠCentre": 2521, + "Our": 2522, + "Ġnames": 2523, + "Ġflight": 2524, + "Ġquarterback": 2525, + "Ġstandard": 2526, + "so": 2527, + "Ġsuggested": 2528, + "ĠMal": 2529, + "Ġolder": 2530, + "ini": 2531, + "Ġperhaps": 2532, + "ont": 2533, + "ĠInstitute": 2534, + "Ġmillions": 2535, + "Ġmental": 2536, + "ÃĤ": 2537, + "ga": 2538, + "Ġclients": 2539, + "Ġplease": 2540, + "Ġloan": 2541, + "Ġaware": 2542, + "ft": 2543, + "int": 2544, + "75": 2545, + "05": 2546, + "AY": 2547, + "ĠOut": 2548, + "Ġhair": 2549, + "ied": 2550, + "Ġseemed": 2551, + "ene": 2552, + "ty": 2553, + "NYSE": 2554, + "Ġoffensive": 2555, + "Ġtaxes": 2556, + "Ġinitial": 2557, + "ren": 2558, + "Ġseparate": 2559, + "la": 2560, + "ĠMiami": 2561, + "AC": 2562, + "Ġclearly": 2563, + "Ġfit": 2564, + "ĠCoast": 2565, + "Ġfirms": 2566, + "Ġpartners": 2567, + "Ġupcoming": 2568, + "Ġcold": 2569, + "Ġproposal": 2570, + "AT": 2571, + "Ġshut": 2572, + "ĠCommunity": 2573, + "Ġnature": 2574, + "ĠSal": 2575, + "Ġbottom": 2576, + "ting": 2577, + "ĠClick": 2578, + "Ġnice": 2579, + "ets": 2580, + "Ġhurt": 2581, + "itt": 2582, + "ama": 2583, + "Ġcarried": 2584, + "ĠCon": 2585, + "rd": 2586, + "Ġestate": 2587, + "ĠLas": 2588, + "ĠLaw": 2589, + "ng": 2590, + "Ġprotection": 2591, + "Ġproduce": 2592, + "Ġcurrency": 2593, + "Ġhappens": 2594, + "ĠPer": 2595, + "ney": 2596, + "ĠLong": 2597, + "Ġfellow": 2598, + "Ġcuts": 2599, + "Ġreading": 2600, + "ano": 2601, + "Ġproud": 2602, + "ost": 2603, + "ĠUN": 2604, + "ĠArizona": 2605, + "AD": 2606, + "Ġhelps": 2607, + "Ġwinter": 2608, + "Ġfinding": 2609, + "ĠGold": 2610, + "att": 2611, + "ĠWhy": 2612, + "Ġbasketball": 2613, + "lin": 2614, + "ĠCan": 2615, + "ĠBowl": 2616, + "ial": 2617, + "ĠAlex": 2618, + "200": 2619, + "AM": 2620, + "Ġpresence": 2621, + "Ġproduced": 2622, + "Ġdeveloping": 2623, + "Ġregarding": 2624, + "Ġdebate": 2625, + "Ġvice": 2626, + "ĠItaly": 2627, + "Ġsu": 2628, + "its": 2629, + "ator": 2630, + "Ġ34": 2631, + "Ġcomplex": 2632, + "Ġpresented": 2633, + "Ġresearchers": 2634, + "Ġslow": 2635, + "ya": 2636, + "Ġsanctions": 2637, + "Ġloved": 2638, + "Ġseek": 2639, + "Ġresponsibility": 2640, + "Ġadmitted": 2641, + "Ġalbum": 2642, + "Ġsolutions": 2643, + "Ġfacilities": 2644, + "ett": 2645, + "ĠGu": 2646, + "ĠWell": 2647, + "Ġlawmakers": 2648, + "Ġmiss": 2649, + "ful": 2650, + "ĠNick": 2651, + "'.": 2652, + "Ġfeels": 2653, + "Ġprime": 2654, + "Ġknowledge": 2655, + "Ġdeals": 2656, + "ĠTaylor": 2657, + "Ġsurvey": 2658, + "ĠFrancisco": 2659, + "Ġjoint": 2660, + "Ġwhom": 2661, + "Ġsit": 2662, + "01": 2663, + "Ġtr": 2664, + "Ġorganizations": 2665, + "ĠAvenue": 2666, + "ĠTheir": 2667, + "ĠTim": 2668, + "Ġrally": 2669, + "game": 2670, + "Ġbigger": 2671, + "Ġlawsuit": 2672, + "Ġrecorded": 2673, + "Ġfavorite": 2674, + "yard": 2675, + "Ġtransaction": 2676, + "Ġqu": 2677, + "oh": 2678, + "Ġinteresting": 2679, + "Ġinflation": 2680, + "ath": 2681, + "Ġstuff": 2682, + "Ġindustrial": 2683, + "ico": 2684, + "TS": 2685, + "Ġspeaking": 2686, + "Ġlosses": 2687, + "ID": 2688, + "ĠStadium": 2689, + "Ġstars": 2690, + "ĠWomen": 2691, + "ĠBlue": 2692, + "Ġwins": 2693, + "Ġdes": 2694, + "Ġcompetitive": 2695, + "ters": 2696, + "Ġpounds": 2697, + "Ġdirection": 2698, + "Ġinnings": 2699, + "ĠBest": 2700, + "Ġactor": 2701, + "Ġdangerous": 2702, + "Ġrequire": 2703, + "Ġplus": 2704, + "Ġsolid": 2705, + "Ġgeneration": 2706, + "Ġstrength": 2707, + "ĠMary": 2708, + "For": 2709, + "Ġplenty": 2710, + "ĠTeam": 2711, + "Ġinfluence": 2712, + "Ġfaced": 2713, + "Ġes": 2714, + "ĠIslamic": 2715, + "let": 2716, + "ĠDevelopment": 2717, + "Ġpath": 2718, + "Ġyouth": 2719, + "Ġcommitment": 2720, + "Ġbeautiful": 2721, + "ĠJack": 2722, + "ort": 2723, + "Ġten": 2724, + "Ġattend": 2725, + "ars": 2726, + "ón": 2727, + "Ġviews": 2728, + "Ġeuros": 2729, + "Ġauthor": 2730, + "Ġcore": 2731, + "Ġsupporters": 2732, + "ĠiPhone": 2733, + "Ġfashion": 2734, + "Ġsmaller": 2735, + "Ġelected": 2736, + "Ġuniversity": 2737, + "Ġpicked": 2738, + "wa": 2739, + "Ġordered": 2740, + "ĠSc": 2741, + "ĠÅ": 2742, + "Ġlargely": 2743, + "+": 2744, + "ĠAttorney": 2745, + "Ġpaying": 2746, + "AR": 2747, + "Ġconnection": 2748, + "Ġsetting": 2749, + "Ġna": 2750, + "ĠRock": 2751, + "Ġrecovery": 2752, + "ew": 2753, + "Ġserving": 2754, + "Ġsurprise": 2755, + "Ġoccurred": 2756, + "Ġdivision": 2757, + "Ġtelling": 2758, + "Ġmargin": 2759, + "Ġ2020": 2760, + "Ġsister": 2761, + "ĠNBA": 2762, + "Ġvoted": 2763, + "Ġcon": 2764, + "By": 2765, + "Ġ49": 2766, + "Ġfoot": 2767, + "ü": 2768, + "ĠTurkey": 2769, + "Ġamazing": 2770, + "Ġcombined": 2771, + "Ġappearance": 2772, + "Ġeasily": 2773, + "DAY": 2774, + "Ġnotes": 2775, + "ĠStart": 2776, + "Ġlanguage": 2777, + "Ġextremely": 2778, + "Ġcloudy": 2779, + "ĠLet": 2780, + "Ġdelivered": 2781, + "Ġimproved": 2782, + "Ġcollection": 2783, + "ĠPM": 2784, + "Ġestimates": 2785, + "Ġboys": 2786, + "izing": 2787, + "Ġtext": 2788, + "Ġcloser": 2789, + "Ġprotest": 2790, + "Ġprovince": 2791, + "Ġshop": 2792, + "Ġsmart": 2793, + "de": 2794, + "ĠSheriff": 2795, + "EN": 2796, + "Ġcorner": 2797, + "Ġpanel": 2798, + "Ġbooks": 2799, + "Ġsupported": 2800, + "Ġmentioned": 2801, + "ver": 2802, + "ĠMinistry": 2803, + "ĠPrince": 2804, + "ĠUSA": 2805, + "Ġreceiving": 2806, + "Ġchoose": 2807, + "ĠIN": 2808, + "ĠSpain": 2809, + "Ġsection": 2810, + "Ġconsidering": 2811, + "ĠCor": 2812, + "Ġwish": 2813, + "Ġwelcome": 2814, + "ĠConference": 2815, + "ere": 2816, + "ĠOfficer": 2817, + "Ġhoping": 2818, + "Ġportfolio": 2819, + "Ġstandards": 2820, + "Ġgrand": 2821, + "ĠReal": 2822, + "Ġsecure": 2823, + "ĠCorporation": 2824, + "ĠRep": 2825, + "ĠKelly": 2826, + "Ġstreets": 2827, + "Ġsitting": 2828, + "Ġslightly": 2829, + "ĠInvestment": 2830, + "99": 2831, + "ond": 2832, + "Ġunits": 2833, + "Ġvotes": 2834, + "Ġsegment": 2835, + "Ġchampionship": 2836, + "Ġsquad": 2837, + "iting": 2838, + "ron": 2839, + "®": 2840, + "Ġem": 2841, + "Ġtouch": 2842, + "Ġ38": 2843, + "Ġceremony": 2844, + "Ġdecide": 2845, + "Ġapproval": 2846, + "So": 2847, + "ĠPort": 2848, + "Ġsub": 2849, + "Ġsc": 2850, + "Ġrep": 2851, + "ĠWeek": 2852, + "Ġupper": 2853, + "Ġagree": 2854, + "ny": 2855, + "Ġmatches": 2856, + "ics": 2857, + "Ġtweeted": 2858, + "Ġheat": 2859, + "ĠGreat": 2860, + "Ġpenalty": 2861, + "Ġmass": 2862, + "Ġalongside": 2863, + "Ġherself": 2864, + "berg": 2865, + "Ġscience": 2866, + "Ġentered": 2867, + "Ġappeal": 2868, + "ĠPr": 2869, + "Ġfile": 2870, + "che": 2871, + "ĠReport": 2872, + "ĠThree": 2873, + "ĠNorthern": 2874, + "ĠJordan": 2875, + "Ġamid": 2876, + "Ġpace": 2877, + "Ġjail": 2878, + "Ġfinance": 2879, + "ĠYoung": 2880, + "32": 2881, + "Ġwilling": 2882, + "Ġconduct": 2883, + "ĠPar": 2884, + "Ġestablished": 2885, + "Ġreturns": 2886, + "Ġaid": 2887, + "Ġinternet": 2888, + "IA": 2889, + "29": 2890, + "Ġmeetings": 2891, + "Ġwarning": 2892, + "ĠCl": 2893, + "Ġcampus": 2894, + "Most": 2895, + "ĠFund": 2896, + "ĠWilliam": 2897, + "ĠJapanese": 2898, + "Ġconsensus": 2899, + "Ġbrain": 2900, + "!\"": 2901, + "Ġpoll": 2902, + "Ġtech": 2903, + "Ġtrend": 2904, + "Ġpotentially": 2905, + "Ġreduced": 2906, + "ĠShow": 2907, + "Ġ37": 2908, + "Ġhappening": 2909, + "ĠBrazil": 2910, + "pl": 2911, + "ĠCal": 2912, + "Ġcovered": 2913, + "Ġenter": 2914, + "TV": 2915, + "Ġcatch": 2916, + "foot": 2917, + "Ġunion": 2918, + "Ġexpansion": 2919, + "ĠSingapore": 2920, + "ĠDetroit": 2921, + "Ġattended": 2922, + "ats": 2923, + "Ġnewspaper": 2924, + "ĠDivision": 2925, + "news": 2926, + "Ġcap": 2927, + "Ġremoved": 2928, + "Ġ48": 2929, + "ĠRoyal": 2930, + "Ġwindow": 2931, + "Ġparking": 2932, + "Ġdark": 2933, + "Ġstanding": 2934, + "Ġupdate": 2935, + "Ġagent": 2936, + "Ġtransfer": 2937, + "ĠArmy": 2938, + "Ġuses": 2939, + "80": 2940, + "ĠTe": 2941, + "Ġintroduced": 2942, + "Ġmale": 2943, + "ĠSouthern": 2944, + "Ġratings": 2945, + "Ġisland": 2946, + "ĠMiller": 2947, + "Ġteachers": 2948, + "Ġadvice": 2949, + "Ġfamiliar": 2950, + "uf": 2951, + "Ġsought": 2952, + "Ġpor": 2953, + "ĠEric": 2954, + "Ġda": 2955, + "Ġideas": 2956, + "uh": 2957, + "Ġsixth": 2958, + "Ġtalent": 2959, + "ĠImage": 2960, + "ering": 2961, + "run": 2962, + "ments": 2963, + "Ġconducted": 2964, + "300": 2965, + "Ġurged": 2966, + "Ġdiscovered": 2967, + "Ġpl": 2968, + "Ġunderstanding": 2969, + "Ġoffense": 2970, + "Ġsecretary": 2971, + "Ġsk": 2972, + "Ġloans": 2973, + "ĠGr": 2974, + "Ġapplications": 2975, + "Ġcrude": 2976, + "go": 2977, + "ĠInstead": 2978, + "Ġopinion": 2979, + "Ġdoubt": 2980, + "ey": 2981, + "Ġdis": 2982, + "31": 2983, + "Ġexperienced": 2984, + "Ġleg": 2985, + "ĠCleveland": 2986, + "ven": 2987, + "Ġfailure": 2988, + "market": 2989, + "ack": 2990, + "Ġdecline": 2991, + "Ġchanging": 2992, + "Ġ300": 2993, + "Ġdefence": 2994, + "ĠBrian": 2995, + "Ġdelivery": 2996, + "Ġmarried": 2997, + "Ġdeclared": 2998, + "Ġpull": 2999, + "Ġlimit": 3000, + "ĠMORE": 3001, + "Ġdefeat": 3002, + "Ġexpand": 3003, + "ĠColorado": 3004, + "ĠRob": 3005, + "iss": 3006, + "Ġworse": 3007, + "Ġperform": 3008, + "ising": 3009, + "Ġ2007": 3010, + "ĠDel": 3011, + "Ġsurgery": 3012, + "Ġeasier": 3013, + "Ġmaintain": 3014, + "ĠEx": 3015, + "Ġtied": 3016, + "Ġeast": 3017, + "Ġuser": 3018, + "ola": 3019, + "Ġprogramme": 3020, + "Ġmanufacturing": 3021, + "Ġhitting": 3022, + "Ġx": 3023, + "Ġskin": 3024, + "Ġartist": 3025, + "Ġtells": 3026, + "Ġnearby": 3027, + "ĠDaniel": 3028, + "ĠPower": 3029, + "Ġdetermined": 3030, + "Ġactual": 3031, + "Ġtreated": 3032, + "Ġlived": 3033, + "Ġcomputer": 3034, + "Ġcool": 3035, + "oo": 3036, + "ĠPl": 3037, + "Ġeffects": 3038, + "Ġenvironmental": 3039, + "ĠMorgan": 3040, + "Ġflow": 3041, + "Ġachieve": 3042, + "ĠBell": 3043, + "Ġtesting": 3044, + "ĠBob": 3045, + "Ġwhatever": 3046, + "ĠBecause": 3047, + "US": 3048, + "ĠHollywood": 3049, + "Ġconflict": 3050, + "Ġwalking": 3051, + "ĠJudge": 3052, + "ĠAlabama": 3053, + "Ġaircraft": 3054, + "Ġte": 3055, + "well": 3056, + "Ġgoods": 3057, + "Ġidentify": 3058, + "Ġassociated": 3059, + "ĠVer": 3060, + "ĠEducation": 3061, + "Ġairport": 3062, + "IL": 3063, + "Ġfalling": 3064, + "Ġgiant": 3065, + "ĠMa": 3066, + "ĠMedical": 3067, + "Ġride": 3068, + "Ġden": 3069, + "º": 3070, + "ĠJose": 3071, + "Ġwest": 3072, + "ĠPacific": 3073, + "Ġvisitors": 3074, + "ĠWatch": 3075, + "ĠNations": 3076, + "Ġgains": 3077, + "Ġschedule": 3078, + "34": 3079, + "ĠExchange": 3080, + "Ġpayments": 3081, + "ĠII": 3082, + "70": 3083, + "No": 3084, + "ĠSyrian": 3085, + "ĠAdam": 3086, + "Ġne": 3087, + "Ġpartnership": 3088, + "Ġbl": 3089, + "ĠGeorgia": 3090, + "Ġsites": 3091, + "Ġmodels": 3092, + "Ġdegree": 3093, + "Ġdetermine": 3094, + "ĠWilson": 3095, + "Ġcontest": 3096, + "Ġprofessor": 3097, + "ĠChelsea": 3098, + "Ġmeaning": 3099, + "ĠGames": 3100, + "ĠTrust": 3101, + "ĠAsian": 3102, + "33": 3103, + "Ġlink": 3104, + "ĠUp": 3105, + "Ġholds": 3106, + "ĠTop": 3107, + "ĠItalian": 3108, + "ord": 3109, + "ĠKansas": 3110, + "Ġfarmers": 3111, + "Ġextended": 3112, + "Ġbirth": 3113, + "Ġreform": 3114, + "Ġrelations": 3115, + "Ġwrite": 3116, + "Ġsupporting": 3117, + "55": 3118, + "ita": 3119, + "Ġnotice": 3120, + "ster": 3121, + "Ġanimals": 3122, + "ĠJersey": 3123, + "Ġarm": 3124, + "ĠForeign": 3125, + "ĠLife": 3126, + "Ġtruly": 3127, + "ĠOnce": 3128, + "ĠMayor": 3129, + "ĠFree": 3130, + "ĠAgency": 3131, + "ĠWood": 3132, + "Ġpassing": 3133, + "DA": 3134, + "Ġ52": 3135, + "Ġmoves": 3136, + "Ġcom": 3137, + "house": 3138, + "ĠIts": 3139, + "Ġmarijuana": 3140, + "ines": 3141, + "Ġveteran": 3142, + "Ġvariety": 3143, + "ki": 3144, + "ff": 3145, + "amb": 3146, + "Ġlisted": 3147, + "Ġpushed": 3148, + "Ġvolume": 3149, + "Ġincreasingly": 3150, + "Ġkick": 3151, + "Ġrock": 3152, + "ank": 3153, + "Ġfees": 3154, + "Ġenable": 3155, + "Ġimages": 3156, + "Ġtruth": 3157, + "Ġministry": 3158, + "Ġrare": 3159, + "ĠDallas": 3160, + "ĠMinnesota": 3161, + "Ġcontributed": 3162, + "ĠCharles": 3163, + "Ġpercentage": 3164, + "Ġtechnical": 3165, + "ĠApp": 3166, + "Ġassistant": 3167, + "Ġinterests": 3168, + "Ġimmediate": 3169, + "38": 3170, + "ĠTown": 3171, + "Ġclosing": 3172, + "ĠAnthony": 3173, + "Ġsouthern": 3174, + "ase": 3175, + "ĠPutin": 3176, + "ĠForce": 3177, + "ba": 3178, + "Ġrefused": 3179, + "ĠStill": 3180, + "ix": 3181, + "ĠCol": 3182, + "Ġmaterials": 3183, + "Ġstructure": 3184, + "Ġdriven": 3185, + "Ġpatient": 3186, + "Ġbroken": 3187, + "Ġradio": 3188, + "Ġscale": 3189, + "Ġreplace": 3190, + "Ġ39": 3191, + "ĠLand": 3192, + "Ġdeputy": 3193, + "und": 3194, + "Ġcolor": 3195, + "OS": 3196, + "Ġroads": 3197, + "Ġcorruption": 3198, + "ĠRose": 3199, + "Ġemployee": 3200, + "ĠWater": 3201, + "Ġseats": 3202, + "Ġwalked": 3203, + "ec": 3204, + "Ġcents": 3205, + "Ġchain": 3206, + "Ġpayment": 3207, + "ĠAndroid": 3208, + "eb": 3209, + "Ġcommission": 3210, + "Ġthrow": 3211, + "Ġcount": 3212, + "Ġaccident": 3213, + "Ġexpensive": 3214, + "ered": 3215, + "ĠYes": 3216, + "ĠLouis": 3217, + "Ġstudies": 3218, + "Ġinvestigating": 3219, + "Ġcentury": 3220, + "Ġdiscussion": 3221, + "Ġinter": 3222, + "DAQ": 3223, + "ĠBefore": 3224, + "Ġinitially": 3225, + "*": 3226, + "Ġinvestments": 3227, + "Ġmulti": 3228, + "Ġtight": 3229, + "Ġconfident": 3230, + "Ġcounter": 3231, + "ĠQu": 3232, + "Ġgovernments": 3233, + "Ġarmed": 3234, + "Ġsuit": 3235, + "Ġrow": 3236, + "Ġlocations": 3237, + "Ġepisode": 3238, + "itch": 3239, + "Ġyounger": 3240, + "Ġfestival": 3241, + "Ġpitch": 3242, + "ĠOF": 3243, + "Ġtalked": 3244, + "ca": 3245, + "Ġprotests": 3246, + "Ġtargets": 3247, + "90": 3248, + "Ġoriginally": 3249, + "Ġsinger": 3250, + "Ġjourney": 3251, + "ug": 3252, + "Ġapply": 3253, + "Ġteacher": 3254, + "Ġchances": 3255, + "):": 3256, + "Ġdeaths": 3257, + "isation": 3258, + "ĠStephen": 3259, + "Ġcode": 3260, + "ĠChampionship": 3261, + "ĠJason": 3262, + "ĠAT": 3263, + "Ġaccept": 3264, + "ĠSeries": 3265, + "Ġvalues": 3266, + "Ġbed": 3267, + "ĠHarry": 3268, + "Ġflat": 3269, + "Ġtools": 3270, + "Ġpublicly": 3271, + "37": 3272, + "Ġpointed": 3273, + "ĠGolden": 3274, + "ps": 3275, + "Ġunable": 3276, + "ants": 3277, + "Ġestimate": 3278, + "Ġwarm": 3279, + "Ġbasic": 3280, + "ern": 3281, + "Ġraising": 3282, + "ĠRelated": 3283, + "Ġultimately": 3284, + "Ġnorthern": 3285, + "Ġplane": 3286, + "ĠVice": 3287, + "ĠRaj": 3288, + "ĠJustin": 3289, + "anc": 3290, + "Ġbrings": 3291, + "ĠArt": 3292, + "OT": 3293, + "Ġshift": 3294, + "ĠBBC": 3295, + "ĠSu": 3296, + "BS": 3297, + "Ġbag": 3298, + "Ġdoctor": 3299, + "Ġfill": 3300, + "Ġdowntown": 3301, + "Ġpossibility": 3302, + "ĠAg": 3303, + "Ġest": 3304, + "44": 3305, + "Ġstruggling": 3306, + "Ġlinked": 3307, + "Ġtickets": 3308, + "ĠJay": 3309, + "ĠCall": 3310, + "Ġstands": 3311, + "Ġwedding": 3312, + "Ġresident": 3313, + "eng": 3314, + "Ġleads": 3315, + "Ġadvance": 3316, + "ĠAtlanta": 3317, + "Ġtie": 3318, + "Ġadvanced": 3319, + "pt": 3320, + "burg": 3321, + "ĠEarlier": 3322, + "ĠSw": 3323, + "ĠZealand": 3324, + "Ġexercise": 3325, + "ĠAM": 3326, + "Ġaffect": 3327, + "Ġpossession": 3328, + "Ġinvolving": 3329, + "Ġ42": 3330, + "Ġwriter": 3331, + "ĠBeijing": 3332, + "Ġdoctors": 3333, + "Ġobviously": 3334, + "Ġer": 3335, + "ĠOlympic": 3336, + "Ġ75": 3337, + "ĠKhan": 3338, + "ĠFort": 3339, + "app": 3340, + "like": 3341, + "Ġsea": 3342, + "ock": 3343, + "Ġmix": 3344, + "ĠIraq": 3345, + "ĠMuslim": 3346, + "ĠFinally": 3347, + "Ġcontinuing": 3348, + "Ġpr": 3349, + "ĠKe": 3350, + "ĠJoseph": 3351, + "Ġexpects": 3352, + "Ġinstitutions": 3353, + "Ġconservative": 3354, + "own": 3355, + "ĠChairman": 3356, + "Ġreturning": 3357, + ".-": 3358, + "Ġstood": 3359, + "Ġvision": 3360, + "ess": 3361, + "Ġadults": 3362, + "Ġyield": 3363, + "Ġprove": 3364, + "Ġorders": 3365, + "Ġdream": 3366, + "36": 3367, + "related": 3368, + "Ġsl": 3369, + "Ġeverybody": 3370, + "ui": 3371, + "Ġrepresents": 3372, + "Ġdiscussed": 3373, + "Ġbecomes": 3374, + "Ġvillage": 3375, + "CC": 3376, + "Ġnegotiations": 3377, + "ĠPhiladelphia": 3378, + "Ġcelebrate": 3379, + "Ġfarm": 3380, + "ç": 3381, + "Ġregistered": 3382, + "ĠGovernor": 3383, + "OL": 3384, + "ĠMon": 3385, + "Ġfiling": 3386, + "04": 3387, + "SE": 3388, + "ĠAssembly": 3389, + "Ġactress": 3390, + "Ġsi": 3391, + "Ġthank": 3392, + "Ġheading": 3393, + "ĠWho": 3394, + "Ġfamous": 3395, + "Ġconsecutive": 3396, + "Ġmarriage": 3397, + "ette": 3398, + "NAS": 3399, + "acks": 3400, + "ĠPlease": 3401, + "ĠDiego": 3402, + "Ġbaseball": 3403, + "ĠMoore": 3404, + "Ġties": 3405, + "Ġcarrying": 3406, + "que": 3407, + "Ġturning": 3408, + "ĠMcC": 3409, + "ĠKen": 3410, + "OR": 3411, + "ĠStock": 3412, + "Ġbuildings": 3413, + "49": 3414, + "ĠVan": 3415, + "39": 3416, + "ĠSeattle": 3417, + "Ġwild": 3418, + "Ġcrew": 3419, + "Ġroute": 3420, + "ĠTime": 3421, + "Ġtonight": 3422, + "Ġmoments": 3423, + "Ġvideos": 3424, + "Ġinternal": 3425, + "ĠLiverpool": 3426, + "port": 3427, + "Ġchair": 3428, + "Ġrival": 3429, + "ĠScotland": 3430, + "round": 3431, + "ith": 3432, + "Ġbreaking": 3433, + "Ġvoting": 3434, + "ically": 3435, + "Ġproducer": 3436, + "ĠLove": 3437, + "Ġremove": 3438, + "PA": 3439, + "Ġasset": 3440, + "Ġrequires": 3441, + "Ġsigning": 3442, + "ages": 3443, + "Ġimpressive": 3444, + "ĠIrish": 3445, + "Ġauthority": 3446, + "Ġruled": 3447, + "Ġaimed": 3448, + "Ġcaptain": 3449, + "AG": 3450, + "Ġplants": 3451, + "ĠAnderson": 3452, + "ĠSpanish": 3453, + "Ġbanking": 3454, + "Ġthreats": 3455, + "Ġsuspended": 3456, + "Ġtests": 3457, + "Ġreligious": 3458, + "Ġelectric": 3459, + "ĠREAD": 3460, + "Ġstrategic": 3461, + "Ġsplit": 3462, + "ex": 3463, + "Ġpractices": 3464, + "ĠIsraeli": 3465, + "ĠArabia": 3466, + "ĠMoscow": 3467, + "Ġfranchise": 3468, + "Ġcustody": 3469, + "ĠOld": 3470, + "Ġrequirements": 3471, + "Ġquarterly": 3472, + "Ġcomfortable": 3473, + "Ġcrimes": 3474, + "Ġheaded": 3475, + "Ġnewsletter": 3476, + "Ġanimal": 3477, + "Ġregulations": 3478, + "long": 3479, + "ĠCNN": 3480, + "Ġassists": 3481, + "Ġshopping": 3482, + "ĠGov": 3483, + "ĠSecurities": 3484, + "Ġassistance": 3485, + "Ġnor": 3486, + "Ġrelatively": 3487, + "Ġincreases": 3488, + "Ġgenerally": 3489, + "Ġ55": 3490, + "Ġgained": 3491, + "Ġ41": 3492, + "Ġpictures": 3493, + "gan": 3494, + "Ġpop": 3495, + "Ġupdates": 3496, + "ĠRepublic": 3497, + "Ġrebounds": 3498, + "ĠPatrick": 3499, + "Ġrelief": 3500, + "Ġacting": 3501, + "ĠFestival": 3502, + "Ġ2006": 3503, + "Ġboss": 3504, + "Ġtypes": 3505, + "65": 3506, + "ĠYet": 3507, + "Ġpurpose": 3508, + "ning": 3509, + "Ġmatters": 3510, + "Ġcompete": 3511, + "ball": 3512, + "ĠRam": 3513, + "Ġsw": 3514, + "ĠFollowing": 3515, + "ĠBush": 3516, + "Ġtroops": 3517, + "Ġsupposed": 3518, + "Ġfreedom": 3519, + "Ġfeatured": 3520, + "Ġstorage": 3521, + "ĠInformation": 3522, + "ĠHong": 3523, + "Ġgolf": 3524, + "Ġagents": 3525, + "Ġfraud": 3526, + "Ġminimum": 3527, + "Ġartists": 3528, + "Ġeat": 3529, + "high": 3530, + "ĠFormer": 3531, + "ĠKong": 3532, + "ĠJosh": 3533, + "ĠDelhi": 3534, + "Ġshowers": 3535, + "ĠAcademy": 3536, + "Ġapartment": 3537, + "Ġvan": 3538, + "Ġfish": 3539, + "oe": 3540, + "Ġfilms": 3541, + "ĠBo": 3542, + "Ġedge": 3543, + "Ġpossibly": 3544, + "Ġtweet": 3545, + "09": 3546, + "Ġresolution": 3547, + "jo": 3548, + "Ġkill": 3549, + "Ġ44": 3550, + "Ġcell": 3551, + "Ġscheme": 3552, + "Ġth": 3553, + "Ġbonds": 3554, + "Ġentry": 3555, + "Ġsecret": 3556, + "Ġ43": 3557, + "Ġending": 3558, + "Ġweren": 3559, + "ĠCredit": 3560, + "ĠLive": 3561, + "Ġretired": 3562, + "Ġmachine": 3563, + "Ġsummit": 3564, + "Ġsharing": 3565, + "Ġacquired": 3566, + "Ġera": 3567, + "Ġwear": 3568, + "ical": 3569, + "07": 3570, + "Ġexciting": 3571, + "li": 3572, + "BC": 3573, + "ĠSocial": 3574, + "Ġhistoric": 3575, + "ĠChe": 3576, + "ĠLewis": 3577, + "ira": 3578, + "Ġstolen": 3579, + "ĠSpeaking": 3580, + "Ġsleep": 3581, + "Ġspokeswoman": 3582, + "week": 3583, + "Ġpurchased": 3584, + "Ġimportance": 3585, + "EC": 3586, + "Ġends": 3587, + "Ġdress": 3588, + "Ġparliament": 3589, + "ĠCruz": 3590, + "Ġcards": 3591, + "hi": 3592, + "ĠEmail": 3593, + "Ġrepresent": 3594, + "Ġbrands": 3595, + "ĠSenior": 3596, + "Ġparticipants": 3597, + "Ġfly": 3598, + "Ġidentity": 3599, + "ĠHam": 3600, + "ĠSky": 3601, + "ij": 3602, + "SA": 3603, + "Ġpromised": 3604, + "Ġtrouble": 3605, + "Ġsuffering": 3606, + "Ġleaves": 3607, + "Ġsuggest": 3608, + "Sh": 3609, + "Ġbusy": 3610, + "Ġproperties": 3611, + "Ġworldwide": 3612, + "Ġcloud": 3613, + "ĠSEC": 3614, + "Ġclosely": 3615, + "Ġmanage": 3616, + "Ġnumerous": 3617, + "Ġbackground": 3618, + "ĠExpress": 3619, + "Ġ65": 3620, + "ĠTony": 3621, + "ĠMadrid": 3622, + "ev": 3623, + "der": 3624, + "Ġsignificantly": 3625, + "Ġalternative": 3626, + "Ġship": 3627, + "head": 3628, + "ators": 3629, + "Ġdinner": 3630, + "ax": 3631, + "SC": 3632, + "Ġcriticism": 3633, + "ĠMah": 3634, + "ĠMin": 3635, + "rie": 3636, + "ĠTour": 3637, + "Ġbench": 3638, + "Ġadds": 3639, + "Ġseriously": 3640, + "star": 3641, + "ĠJournal": 3642, + "ĠDi": 3643, + "ali": 3644, + "Ġsentence": 3645, + "ĠSeveral": 3646, + "Ġmayor": 3647, + "ati": 3648, + "Ġsuggests": 3649, + "Ġbehavior": 3650, + "Ġstronger": 3651, + "ĠFood": 3652, + "Ġclient": 3653, + "not": 3654, + "ĠPrice": 3655, + "Ġtargeted": 3656, + "ĠSingh": 3657, + "ĠNetwork": 3658, + "Ġprosecutors": 3659, + "Ġdirected": 3660, + "ĠDemocrat": 3661, + "bl": 3662, + "ues": 3663, + "ĠFamily": 3664, + "Ġconnected": 3665, + "ĠChampions": 3666, + "Ġroughly": 3667, + "Ġabsolutely": 3668, + "08": 3669, + "Ġpassengers": 3670, + "ö": 3671, + "ĠSpecial": 3672, + "Ġcoast": 3673, + "Ġcomplaint": 3674, + "Ġ400": 3675, + "ĠEm": 3676, + "ves": 3677, + "Ġdogs": 3678, + "Ġhandle": 3679, + "Ġotherwise": 3680, + "Ġsees": 3681, + "Ġticket": 3682, + "ĠAward": 3683, + "All": 3684, + "Ġtask": 3685, + "Ġsongs": 3686, + "ĠAmong": 3687, + "Ġdedicated": 3688, + "Ġsteel": 3689, + "looking": 3690, + "Ġshortly": 3691, + "Ġtackle": 3692, + "ative": 3693, + "Ġminor": 3694, + "â": 3695, + "Ġprovider": 3696, + "vers": 3697, + "use": 3698, + "ives": 3699, + "Ġtypically": 3700, + "Ġarms": 3701, + "ĠAnt": 3702, + "ĠIS": 3703, + "Ġjump": 3704, + "Ġ©": 3705, + "47": 3706, + "aff": 3707, + "Ġmonthly": 3708, + "ĠMicrosoft": 3709, + "ĠCBS": 3710, + "Ġthreatened": 3711, + "Ġhonor": 3712, + "ĠMo": 3713, + "42": 3714, + "Ġinning": 3715, + "Ġpool": 3716, + "Ġhealthcare": 3717, + "ĠStory": 3718, + "ĠTennessee": 3719, + "Ġpromote": 3720, + "EL": 3721, + "Ġemotional": 3722, + "Ġpe": 3723, + "Ġfactor": 3724, + "Ġinvestigators": 3725, + "Ľ": 3726, + "ĠBack": 3727, + "ĠProject": 3728, + "Ġcu": 3729, + "side": 3730, + "Ġmessages": 3731, + "TH": 3732, + "eg": 3733, + "Ġexperiences": 3734, + "Ġcausing": 3735, + "Ġjoining": 3736, + "Ġpackage": 3737, + "Ġbodies": 3738, + "Ġlots": 3739, + "ĠHarris": 3740, + "Ġcl": 3741, + "ĠInternet": 3742, + "free": 3743, + "Ġperformed": 3744, + "Ġpieces": 3745, + "buy": 3746, + "Ġcaption": 3747, + "Ġweb": 3748, + "Ġcontracts": 3749, + "At": 3750, + "Ġattempted": 3751, + "Ġunlikely": 3752, + "Ġclick": 3753, + "Ġinvest": 3754, + "IM": 3755, + "ĠView": 3756, + "Ġneighborhood": 3757, + "Ġring": 3758, + "ĠFour": 3759, + "ail": 3760, + "46": 3761, + "One": 3762, + "Ġnative": 3763, + "CH": 3764, + "OM": 3765, + "Ġalcohol": 3766, + "ĠVal": 3767, + "Ġcharacters": 3768, + "ĠPat": 3769, + "Ġpoliticians": 3770, + "ĠMag": 3771, + "Ġbegins": 3772, + "ĠAk": 3773, + "Ġlos": 3774, + "Ġpersonnel": 3775, + "Ġenjoyed": 3776, + "ĠTechnology": 3777, + "Ġsun": 3778, + "ĠIT": 3779, + "Ġdocument": 3780, + "Ġdeficit": 3781, + "Ġcoalition": 3782, + "Ġmemory": 3783, + "Ġpushing": 3784, + "any": 3785, + "ified": 3786, + "Ġfounder": 3787, + "Ġ2000": 3788, + "2017": 3789, + "Ġvisited": 3790, + "ĠThough": 3791, + "ph": 3792, + "Ġsoft": 3793, + "Ġflag": 3794, + "Ġmom": 3795, + "inch": 3796, + "ĠSamsung": 3797, + "Ġapps": 3798, + "Ġtouchdown": 3799, + "ĠCare": 3800, + "ĠMrs": 3801, + "Ġredistributed": 3802, + "Ġencourage": 3803, + "ched": 3804, + "Ġtend": 3805, + "Ġregions": 3806, + "pp": 3807, + "IP": 3808, + "br": 3809, + "ush": 3810, + "Ġargued": 3811, + "Ġjunior": 3812, + "BA": 3813, + "Ġsevere": 3814, + "ĠNIGHT": 3815, + "Ġdef": 3816, + "Ġsurrounding": 3817, + "48": 3818, + "Ġengine": 3819, + "Ġfilled": 3820, + "Ġseventh": 3821, + "Ġbattery": 3822, + "ĠAllen": 3823, + "Ġguidance": 3824, + "Ġroll": 3825, + "Ġrural": 3826, + "Ġexpert": 3827, + "Ġconvicted": 3828, + "Ġlikes": 3829, + "ĠRo": 3830, + "Ġgrown": 3831, + "Ġretirement": 3832, + "Ġintended": 3833, + "Ġmis": 3834, + "Ġarmy": 3835, + "Ġdance": 3836, + "ĠThank": 3837, + "Ġent": 3838, + "Ġoutlook": 3839, + "Ġpara": 3840, + "Ġdry": 3841, + "ĠTO": 3842, + "era": 3843, + "Ġwaste": 3844, + "Ġfaster": 3845, + "ĠEagles": 3846, + "TA": 3847, + "ĠFrank": 3848, + "Ã": 3849, + "LE": 3850, + "ura": 3851, + "ko": 3852, + "ao": 3853, + "Ġdistribution": 3854, + "Ġimprovement": 3855, + "Ġplayoff": 3856, + "Ġacquisition": 3857, + "ĠCH": 3858, + "Ġtomorrow": 3859, + "Ġstruggle": 3860, + "ĠHuman": 3861, + "Ġnewly": 3862, + "oon": 3863, + "ĠNe": 3864, + "con": 3865, + "sc": 3866, + "Ġunless": 3867, + "Ġtransition": 3868, + "ten": 3869, + "ĠInter": 3870, + "Ġequal": 3871, + "Ġrec": 3872, + "Ġappointed": 3873, + "Ġwake": 3874, + "ĠEarth": 3875, + "ose": 3876, + "ĠEastern": 3877, + "Ġsoldiers": 3878, + "ĠParliament": 3879, + "Ġsets": 3880, + "Ġattempts": 3881, + "ĠIllinois": 3882, + "Ġrevenues": 3883, + "ĠWil": 3884, + "Ġheads": 3885, + "Ġprepare": 3886, + "Ġpriority": 3887, + "PS": 3888, + "ĠJo": 3889, + "ĠNBC": 3890, + "Ġtherefore": 3891, + "yn": 3892, + "Ġinitiative": 3893, + "ct": 3894, + "Ġcoffee": 3895, + "ĠFair": 3896, + "43": 3897, + "den": 3898, + "form": 3899, + "ova": 3900, + "Ġappropriate": 3901, + "ĠPlay": 3902, + "Ġaccepted": 3903, + "Ġcreative": 3904, + "Ġfollows": 3905, + "Ġrescue": 3906, + "Ġtree": 3907, + "With": 3908, + "ĠNetflix": 3909, + "ĠFootball": 3910, + "Ġsurprised": 3911, + "Ġlowest": 3912, + "800": 3913, + "amp": 3914, + "Ġworried": 3915, + "mar": 3916, + "ran": 3917, + "Ġvisiting": 3918, + "Ġselected": 3919, + "ĠMusic": 3920, + "ĠAnn": 3921, + "Ġexplain": 3922, + "ging": 3923, + "Ġwidely": 3924, + "Ġsquare": 3925, + "Ġtrends": 3926, + "Ġimproving": 3927, + "ĠHead": 3928, + "ĠQueen": 3929, + "ĠSociety": 3930, + "Ġcutting": 3931, + "ĠGOP": 3932, + "03": 3933, + "',": 3934, + "ET": 3935, + "ĠDrive": 3936, + "oll": 3937, + "ato": 3938, + "ĠSea": 3939, + "Ġjury": 3940, + "ĠRights": 3941, + "Ġinvestor": 3942, + "ĠABC": 3943, + "Ġtool": 3944, + "ĠAre": 3945, + "Ġrejected": 3946, + "Ġemerging": 3947, + "Ġcounts": 3948, + "Ġnations": 3949, + "Ġfalse": 3950, + "Ġtreat": 3951, + "va": 3952, + "Ġweak": 3953, + "ĠHighway": 3954, + "down": 3955, + "Ġstruggled": 3956, + "ĠMP": 3957, + "Ġguests": 3958, + "Ġgender": 3959, + "Ġhouses": 3960, + "rit": 3961, + "ĠWild": 3962, + "Ġstreak": 3963, + "uc": 3964, + "ĠReserve": 3965, + "ĠRatings": 3966, + "alt": 3967, + "Ġgreatest": 3968, + "Ġlawyers": 3969, + "Ġreaching": 3970, + "Ġtemperatures": 3971, + "To": 3972, + "Ġoutstanding": 3973, + "Ġpasses": 3974, + "Ġfaith": 3975, + "inc": 3976, + "Ġcr": 3977, + "Ġinformed": 3978, + "oz": 3979, + "Ġtrees": 3980, + "Ġsending": 3981, + "Ġ150": 3982, + "bo": 3983, + "Ġwine": 3984, + "ros": 3985, + "Ġsuspected": 3986, + "Ġrepeatedly": 3987, + "Ġhat": 3988, + "Ġshape": 3989, + "ĠWh": 3990, + "Ġassist": 3991, + "Ġstress": 3992, + "Ġfeed": 3993, + "ark": 3994, + "ored": 3995, + "Ġwatched": 3996, + "Ġincredible": 3997, + "cl": 3998, + "nt": 3999, + "Ġentertainment": 4000, + "ih": 4001, + "Ġbeauty": 4002, + "Ġbi": 4003, + "ĠLocal": 4004, + "Ġsat": 4005, + "41": 4006, + "Ġbroad": 4007, + "Ġheavily": 4008, + "Ġengaged": 4009, + "Ġspecifically": 4010, + "ĠMen": 4011, + "ĠRoss": 4012, + "Ġ2005": 4013, + "ST": 4014, + "95": 4015, + "Ġdownload": 4016, + "400": 4017, + "Ġsentenced": 4018, + "ĠCatholic": 4019, + "ĠOklahoma": 4020, + "Ġthrew": 4021, + "Ġworry": 4022, + "Ġimp": 4023, + "Ġdrove": 4024, + "Ġcolleagues": 4025, + "Ġagenda": 4026, + "64": 4027, + "ĠEach": 4028, + "Ġfee": 4029, + "New": 4030, + "ium": 4031, + "Ġspokesperson": 4032, + "Ġbills": 4033, + "Ġ47": 4034, + "ĠAfghanistan": 4035, + "Ġinvited": 4036, + "ĠYouTube": 4037, + "Ġanniversary": 4038, + "Ġdozen": 4039, + "ram": 4040, + "ĠOnly": 4041, + "Ġemployment": 4042, + "Getty": 4043, + "Ġgap": 4044, + "Ġsweet": 4045, + "ĠLittle": 4046, + "Ġinf": 4047, + "ying": 4048, + "Ġglass": 4049, + "Ġclasses": 4050, + "Ġcoal": 4051, + "ĠSub": 4052, + "Ġduty": 4053, + "CA": 4054, + "Ġcoaches": 4055, + "Â": 4056, + "anna": 4057, + "ĠSk": 4058, + "Ġ46": 4059, + "ison": 4060, + "ille": 4061, + "ĠST": 4062, + "ric": 4063, + "Ġparticipate": 4064, + "Ġequ": 4065, + "Ġrich": 4066, + "Ġrespectively": 4067, + "Ġexpenses": 4068, + "Ġcombination": 4069, + "right": 4070, + "Ġshareholders": 4071, + "Ġturns": 4072, + "Ġearn": 4073, + "Ġ51": 4074, + "ured": 4075, + "Ġdrink": 4076, + "ĠKar": 4077, + "ĠShares": 4078, + "ĠMid": 4079, + "ĠGetty": 4080, + "Ġbridge": 4081, + "lo": 4082, + "Ġinspired": 4083, + "Ġsurface": 4084, + "Ġgift": 4085, + "ence": 4086, + "Ġchallenging": 4087, + "Ġoffices": 4088, + "Ġsuspects": 4089, + "ĠFinance": 4090, + "Ġab": 4091, + "bound": 4092, + "Ġmomentum": 4093, + "Ġbacked": 4094, + "Ġparent": 4095, + "Ġcrucial": 4096, + "ave": 4097, + "Ġdealing": 4098, + "Ġregulatory": 4099, + "Ġapparently": 4100, + "ĠMat": 4101, + "Ġapart": 4102, + "Ġport": 4103, + "ole": 4104, + "Ġbeach": 4105, + "Ġcultural": 4106, + "Ġinstitutional": 4107, + "Ġbeating": 4108, + "ĠIowa": 4109, + "ĠAli": 4110, + "67": 4111, + "Ġje": 4112, + "ays": 4113, + "Ġweekly": 4114, + "Ġbirthday": 4115, + "Ġpipeline": 4116, + "Ġknee": 4117, + "Ġsolar": 4118, + "ĠPe": 4119, + "Ġcategory": 4120, + "ĠArea": 4121, + "ky": 4122, + "ures": 4123, + "06": 4124, + "ĠBall": 4125, + "Ġsemi": 4126, + "ĠHamilton": 4127, + "hip": 4128, + "ĠPh": 4129, + "ĠNext": 4130, + "Ġathletes": 4131, + "ii": 4132, + "Ġmovies": 4133, + "han": 4134, + "net": 4135, + "Ġplastic": 4136, + "Ġbehalf": 4137, + "gen": 4138, + "Ġfindings": 4139, + "Ġstretch": 4140, + "ĠSa": 4141, + "Ġofficially": 4142, + "ĠSarah": 4143, + "Ġprivacy": 4144, + "ĠMad": 4145, + "Ġnone": 4146, + "gh": 4147, + "On": 4148, + "Ġdrama": 4149, + "ĠFl": 4150, + "ika": 4151, + "ĠArsenal": 4152, + "Ġviolent": 4153, + "UN": 4154, + "called": 4155, + "59": 4156, + "Ġhate": 4157, + "Ġrelationships": 4158, + "Ġgranted": 4159, + "ĠJon": 4160, + "Ġlisten": 4161, + "season": 4162, + "Ġfewer": 4163, + "GA": 4164, + "ĠLabour": 4165, + "Ġremarks": 4166, + "ĠJonathan": 4167, + "ĠRos": 4168, + "sey": 4169, + "ĠOntario": 4170, + "ĠThompson": 4171, + "ĠNight": 4172, + "Ġranked": 4173, + "ĠUkraine": 4174, + "Ġimmigrants": 4175, + "Ġdegrees": 4176, + "ĠGe": 4177, + "Ġlabor": 4178, + "umb": 4179, + "ĠYORK": 4180, + "Ġallies": 4181, + "sp": 4182, + "hed": 4183, + "sw": 4184, + "Ġtariffs": 4185, + "SP": 4186, + "Ġclassic": 4187, + "Ġawards": 4188, + "ents": 4189, + "Ġfix": 4190, + "Ġsoccer": 4191, + "Ġconcert": 4192, + "ust": 4193, + "Ġadult": 4194, + "Ġoutput": 4195, + "Ġmanaging": 4196, + "02": 4197, + "Ġpromise": 4198, + "Ġawareness": 4199, + "Ġgross": 4200, + "Ġentering": 4201, + "Ġpo": 4202, + "oj": 4203, + "Ġmetal": 4204, + "Ġexit": 4205, + "Ġexcellent": 4206, + "Ġclubs": 4207, + "hold": 4208, + "Ġreplaced": 4209, + "ĠClass": 4210, + "Ġscientists": 4211, + "Ġprimarily": 4212, + "ĠMer": 4213, + "ão": 4214, + "Ġcircumstances": 4215, + "ades": 4216, + "Ġsupplies": 4217, + "aker": 4218, + "ĠSand": 4219, + "Ġscandal": 4220, + "Ġsettlement": 4221, + "ĠWisconsin": 4222, + "ĠWarriors": 4223, + "ĠAustin": 4224, + "Ġjournalists": 4225, + "ening": 4226, + "Ġreflect": 4227, + "ĠBuy": 4228, + "ĠAwards": 4229, + "Ġselection": 4230, + "ĠBel": 4231, + "bury": 4232, + "Ġtechnologies": 4233, + "%,": 4234, + "ime": 4235, + "ĠÄ": 4236, + "ĠAdministration": 4237, + "Ġchannel": 4238, + "Star": 4239, + "Ġtransport": 4240, + "Ġawarded": 4241, + "ena": 4242, + "Ġmotor": 4243, + "orn": 4244, + "kin": 4245, + "Ġfeaturing": 4246, + "Ġphones": 4247, + "ĠAND": 4248, + "Ġrelevant": 4249, + "ĠSee": 4250, + "Ġwinners": 4251, + "Ġdad": 4252, + "ĠSource": 4253, + "ĠCheck": 4254, + "aut": 4255, + "ĠFar": 4256, + "Ġopponents": 4257, + "Ġoutcome": 4258, + "Ġdoors": 4259, + "Ġsuicide": 4260, + "ima": 4261, + "Ġjumped": 4262, + "Ġperspective": 4263, + "Ġtransportation": 4264, + "Ġthinks": 4265, + "ĠMor": 4266, + "Ġdeadline": 4267, + "Ġ53": 4268, + "ĠDeputy": 4269, + "ery": 4270, + "Ġdetailed": 4271, + "uch": 4272, + "ĠBur": 4273, + "Ġtrades": 4274, + "ĠGreg": 4275, + "Ġzero": 4276, + "erson": 4277, + "ĠChildren": 4278, + "Ġdu": 4279, + "66": 4280, + "Ġmixed": 4281, + "ĠBarack": 4282, + "54": 4283, + "Ġterritory": 4284, + "Ġac": 4285, + "Ġconcept": 4286, + "ĠAdd": 4287, + "Ġourselves": 4288, + "Ġreaction": 4289, + "ĠSydney": 4290, + "ink": 4291, + "Ġconsistent": 4292, + "Ġboat": 4293, + "room": 4294, + "Ġdozens": 4295, + "Ġeffectively": 4296, + "but": 4297, + "Ġmotion": 4298, + "Ġalive": 4299, + "ĠKey": 4300, + "weight": 4301, + "Ġexports": 4302, + "Ġoperate": 4303, + "Ġregime": 4304, + "ĠAuthority": 4305, + "och": 4306, + "ĠCR": 4307, + "leg": 4308, + "Ġforget": 4309, + "American": 4310, + "bs": 4311, + "Ġthoughts": 4312, + "ĠSign": 4313, + "ĠPatriots": 4314, + "Ġbrief": 4315, + "ĠOregon": 4316, + "ĠBal": 4317, + "Ġmine": 4318, + "Ġciting": 4319, + "Ġmagazine": 4320, + "more": 4321, + "ERS": 4322, + "ĠBer": 4323, + "ua": 4324, + "ox": 4325, + "ĠMain": 4326, + "Ġinstance": 4327, + "tr": 4328, + "Ġrestaurants": 4329, + "ora": 4330, + "Ġharassment": 4331, + "\",\"": 4332, + "Ł": 4333, + "Ġsilver": 4334, + "ĠMueller": 4335, + "ĠSenator": 4336, + "ĠEvery": 4337, + "Ġfootage": 4338, + "ms": 4339, + "Ġopposed": 4340, + "ĠLink": 4341, + "Ġver": 4342, + "Ġpleased": 4343, + "ame": 4344, + "ending": 4345, + "Ġrivals": 4346, + "ida": 4347, + "ike": 4348, + "ta": 4349, + "ĠCook": 4350, + "Ġheadquarters": 4351, + "ear": 4352, + "Ġaggressive": 4353, + "Ġcourts": 4354, + "ĠMuseum": 4355, + "Ġim": 4356, + "ĠHoldings": 4357, + "Ġcommunication": 4358, + "Ġphase": 4359, + "yl": 4360, + "Ġpowers": 4361, + "Ġproved": 4362, + "Ġcarbon": 4363, + "Ġaside": 4364, + "ĠOlympics": 4365, + "Ġgathered": 4366, + "ĠPennsylvania": 4367, + "Ġsmartphone": 4368, + "ĠMet": 4369, + "ĠHurricane": 4370, + "Ġprotected": 4371, + "Ġcommunications": 4372, + "Ġemerged": 4373, + "Ġaim": 4374, + "Ġstable": 4375, + "ides": 4376, + "GB": 4377, + "Ġentirely": 4378, + "Ġmissile": 4379, + "ĠGen": 4380, + "Ġunclear": 4381, + "Ġelectricity": 4382, + "ology": 4383, + "away": 4384, + "Ġlicense": 4385, + "ĠPittsburgh": 4386, + "Ġcameras": 4387, + "Ġmusical": 4388, + "Ġmanagers": 4389, + "57": 4390, + "Ġscores": 4391, + "Ġprofile": 4392, + "hel": 4393, + "¼": 4394, + "Ġshouldn": 4395, + "RA": 4396, + ");": 4397, + "Ġpermanent": 4398, + "ome": 4399, + "Ġet": 4400, + "Ġmar": 4401, + "Ġfavor": 4402, + "Ġmaker": 4403, + "Ġdiscussions": 4404, + "ory": 4405, + "Ġsharp": 4406, + "Ġpleaded": 4407, + "Ġpassenger": 4408, + "quarter": 4409, + "Ġdem": 4410, + "Ġversus": 4411, + "Ġmainly": 4412, + "Ġeighth": 4413, + "ĠAirport": 4414, + "ĠCross": 4415, + "million": 4416, + "ĠNas": 4417, + "Ġcited": 4418, + "56": 4419, + "Ġyes": 4420, + "ĠBelow": 4421, + "arn": 4422, + "ĠTurkish": 4423, + "ĠSl": 4424, + "Ġstepped": 4425, + "Ġproducers": 4426, + "Ġovernight": 4427, + "Ġsounds": 4428, + "52": 4429, + "Ġ64": 4430, + "Ġ54": 4431, + "58": 4432, + "ĠClark": 4433, + "ĠRick": 4434, + "Ġgr": 4435, + "ĠMont": 4436, + "Ġbeer": 4437, + "une": 4438, + "Ġreporter": 4439, + "Ġcharity": 4440, + "Ġeating": 4441, + "Ġextend": 4442, + "Ġguess": 4443, + "NA": 4444, + "Ġhedge": 4445, + "Ġencouraged": 4446, + "owned": 4447, + "ĠMel": 4448, + "ĠKentucky": 4449, + "ace": 4450, + "Ġlineup": 4451, + "Ġhosts": 4452, + "Ġcapable": 4453, + "PR": 4454, + "ĠArts": 4455, + "Ġcontroversial": 4456, + "Ġhosted": 4457, + "ries": 4458, + "Ġroster": 4459, + "Ġfixed": 4460, + "ĠWalker": 4461, + "ged": 4462, + "Ġdisaster": 4463, + "Ġdispute": 4464, + "ĠDenver": 4465, + "ĠTrade": 4466, + "ute": 4467, + "ese": 4468, + "cy": 4469, + "Ġgrant": 4470, + "ĠMax": 4471, + "Ġdistance": 4472, + "isc": 4473, + "Ġeditor": 4474, + "ĠDave": 4475, + "Ġperformances": 4476, + "Ġlay": 4477, + "Ġvulnerable": 4478, + "ĠMurray": 4479, + "ĠâĤ¬": 4480, + "Ġmining": 4481, + "Ġ2004": 4482, + "level": 4483, + "ability": 4484, + "Ġauto": 4485, + "Ġfake": 4486, + "Ġattacked": 4487, + "ona": 4488, + "ups": 4489, + "ened": 4490, + "Ġfallen": 4491, + "Ġstations": 4492, + "ĠContact": 4493, + "itz": 4494, + "Ġincidents": 4495, + "Ġcomplaints": 4496, + "Ġoperates": 4497, + "Ġrefugees": 4498, + "Ġessential": 4499, + "ĠTest": 4500, + "Ġdemands": 4501, + "Ġroles": 4502, + "yr": 4503, + "Ġacts": 4504, + "Ġusual": 4505, + "ring": 4506, + "Ġhanded": 4507, + "ĠMatthew": 4508, + "hour": 4509, + "Ġindustries": 4510, + "Ġshoot": 4511, + "ĠAuthorities": 4512, + "Ġprobe": 4513, + "ĠUtah": 4514, + "ĠRBI": 4515, + "ĠAD": 4516, + "Ġprospect": 4517, + "outs": 4518, + "ĠUber": 4519, + "Ġbright": 4520, + "Ġmention": 4521, + "Ġsavings": 4522, + "ĠMiss": 4523, + "ONDON": 4524, + "Ġ1990": 4525, + "arm": 4526, + "ĠTen": 4527, + "These": 4528, + "Ġexplains": 4529, + "minute": 4530, + "85": 4531, + "Ġmaximum": 4532, + "Ġro": 4533, + "Ġrookie": 4534, + "Ġstudio": 4535, + "ĠCam": 4536, + "ĠGal": 4537, + "Ġdefend": 4538, + "hand": 4539, + "53": 4540, + "ĠOil": 4541, + "Ġserves": 4542, + "Ġsn": 4543, + "ios": 4544, + "ĠDefense": 4545, + "AB": 4546, + "Ġhired": 4547, + "Ġsupports": 4548, + "Ġpremium": 4549, + "ef": 4550, + "Ġfailing": 4551, + "ĠIndiana": 4552, + "Ġexp": 4553, + "Ġobjective": 4554, + "Ġaffordable": 4555, + "ĠCom": 4556, + "ĠThanks": 4557, + "Ġanywhere": 4558, + "Ġconfirm": 4559, + "ited": 4560, + "Ġrepresenting": 4561, + "Ġwitness": 4562, + "69": 4563, + "Ġclaiming": 4564, + "Ġviolation": 4565, + "Ġhistorical": 4566, + "med": 4567, + "Ġpreparing": 4568, + "ĠTech": 4569, + "Ġposts": 4570, + "OC": 4571, + "ĠGraham": 4572, + "ĠGl": 4573, + "ĠLions": 4574, + "ales": 4575, + "ĠID": 4576, + "Ġcorrect": 4577, + "ĠAntonio": 4578, + "Ġadvertising": 4579, + "Ġeastern": 4580, + "OW": 4581, + "Ġholdings": 4582, + "Ġpolls": 4583, + "ĠSH": 4584, + "Ġexecutives": 4585, + "ĠJewish": 4586, + "ĠGary": 4587, + "Ġprize": 4588, + "ĠCommissioner": 4589, + "Ġcells": 4590, + "ify": 4591, + "Ġlunch": 4592, + "Ġdemocracy": 4593, + "ĠEr": 4594, + "Ġregularly": 4595, + "Ġresulted": 4596, + "ĠAve": 4597, + "ĠPartners": 4598, + "Ġrewritten": 4599, + "Ġlo": 4600, + "Ġcooperation": 4601, + "ĠGulf": 4602, + "Ġsmoke": 4603, + "ĠMemorial": 4604, + "Ġwave": 4605, + "Ġfears": 4606, + "Ġkid": 4607, + "ĠGiants": 4608, + "Ġrecovered": 4609, + "row": 4610, + "ĠRadio": 4611, + "ĠBarcelona": 4612, + "Ġwonderful": 4613, + "ĠDow": 4614, + "Ġstream": 4615, + "ĠSimon": 4616, + "Ġdetail": 4617, + "Ġvolunteers": 4618, + "ĠInd": 4619, + "Ġforms": 4620, + "mann": 4621, + "ĠRay": 4622, + "oor": 4623, + "ĠTake": 4624, + "Ġrepresented": 4625, + "het": 4626, + "Ġblow": 4627, + "aged": 4628, + "RE": 4629, + "ĠMissouri": 4630, + "Ġcovering": 4631, + "Ġprofits": 4632, + "Ġconcluded": 4633, + "Ġthus": 4634, + "ĠColumbia": 4635, + "ode": 4636, + "ĠZimbabwe": 4637, + "Ġdisclosed": 4638, + "Ġlifted": 4639, + "ĠSean": 4640, + "ĠHarvey": 4641, + "ĠPlus": 4642, + "ces": 4643, + "ĠGreece": 4644, + "ĠLady": 4645, + "Ġdelay": 4646, + "Ġkitchen": 4647, + "ĠIndex": 4648, + "Ġbear": 4649, + "Ġputs": 4650, + "new": 4651, + "88": 4652, + "ĠAsh": 4653, + "Å¡": 4654, + "Ġperforming": 4655, + "law": 4656, + "ĠPart": 4657, + "Ġindicated": 4658, + "Ġannounce": 4659, + "Ġcompensation": 4660, + "Ġka": 4661, + "ĠScience": 4662, + "ris": 4663, + "Ġrecommendations": 4664, + "ĠSecond": 4665, + "Ġlights": 4666, + "Ġtemporary": 4667, + "urs": 4668, + "Ġwestern": 4669, + "stone": 4670, + "68": 4671, + "ĠDisney": 4672, + "Ġplayoffs": 4673, + "Ġjudges": 4674, + "Ġengineering": 4675, + "ĠPen": 4676, + "ĠPal": 4677, + "Ġobvious": 4678, + "ĠBridge": 4679, + "ĠEnd": 4680, + "ĠArab": 4681, + "Ġexcept": 4682, + "Ġhole": 4683, + "class": 4684, + "Ġcauses": 4685, + "Ġconnect": 4686, + "ĠAI": 4687, + "An": 4688, + "Ġchose": 4689, + "ĠElizabeth": 4690, + "min": 4691, + "Ġproper": 4692, + "ĠNHL": 4693, + "Ġraces": 4694, + "Ġinnovation": 4695, + "Ġsugar": 4696, + "600": 4697, + "ĠModi": 4698, + "illa": 4699, + "Ġtrillion": 4700, + "ĠSar": 4701, + "ĠAffairs": 4702, + "Ġimpossible": 4703, + "Ġguide": 4704, + "Ġcaptured": 4705, + "ĠSales": 4706, + "Ġspecies": 4707, + "51": 4708, + "Ġar": 4709, + "Ġmaster": 4710, + "Ġstayed": 4711, + "iro": 4712, + "ĠEconomic": 4713, + "Ġvast": 4714, + "ili": 4715, + "Ġpet": 4716, + "ye": 4717, + "77": 4718, + "Ġkeeps": 4719, + "ĠPhil": 4720, + "ĠEPS": 4721, + "ĠRegional": 4722, + "Ġsectors": 4723, + "Ġdesire": 4724, + "ĠStanley": 4725, + "¾": 4726, + "Ġunknown": 4727, + "Ġpot": 4728, + "ĠPR": 4729, + "Ġknowing": 4730, + "Ġflying": 4731, + "ĠTreasury": 4732, + "iers": 4733, + "enn": 4734, + "ably": 4735, + "Ġsick": 4736, + "Ġmanner": 4737, + "Ġmanufacturers": 4738, + "Ġchampions": 4739, + "gy": 4740, + "Part": 4741, + "ister": 4742, + "ĠMountain": 4743, + "Ġimagine": 4744, + "Ġportion": 4745, + "ĠCamp": 4746, + "Ġchemical": 4747, + "ible": 4748, + "ĠAnaly": 4749, + "ĠBureau": 4750, + "Ġpm": 4751, + "Ġupdated": 4752, + "Ġetc": 4753, + "ĠField": 4754, + "iles": 4755, + "Ġobtained": 4756, + "Ġstick": 4757, + "Ġcat": 4758, + "har": 4759, + "Ġmarked": 4760, + "Ġmedium": 4761, + "ĠDes": 4762, + "People": 4763, + "Ġwealth": 4764, + "ores": 4765, + "ĠBaltimore": 4766, + "Ġtip": 4767, + "Ġdismissed": 4768, + "ĠVictoria": 4769, + "ĠBrad": 4770, + "Ch": 4771, + "Ġ56": 4772, + "Ġstadium": 4773, + "eth": 4774, + "Ġthunder": 4775, + "Ġtested": 4776, + "Ġdrawn": 4777, + "Ġcounsel": 4778, + "ld": 4779, + "Ġspirit": 4780, + "uss": 4781, + "Ġtheme": 4782, + "my": 4783, + "Ġnecessarily": 4784, + "Ġelements": 4785, + "Ġcollected": 4786, + "ĠRes": 4787, + "ĠMaryland": 4788, + "ĠEnter": 4789, + "Ġfounded": 4790, + "ae": 4791, + "Ġpilot": 4792, + "Ġshoulder": 4793, + "PC": 4794, + "Ġargument": 4795, + "Ġyen": 4796, + "Ġreceiver": 4797, + "Ġharm": 4798, + "ĠET": 4799, + "Ġprotesters": 4800, + "Ġ72": 4801, + "ĠAaron": 4802, + "Ġed": 4803, + "Ġexpecting": 4804, + "\":\"": 4805, + "Ġbike": 4806, + "Äĩ": 4807, + "Ġluxury": 4808, + "half": 4809, + "ĠBarbara": 4810, + "Ġfoundation": 4811, + "Ġill": 4812, + "Ġsubmitted": 4813, + "Ġdeeply": 4814, + "Ġhospitals": 4815, + "ĠBJP": 4816, + "Ġshock": 4817, + "Ġplatforms": 4818, + "Ġsummary": 4819, + "ĠWhere": 4820, + "Ġcelebration": 4821, + "iff": 4822, + "Ġveterans": 4823, + "Ġachieved": 4824, + "fl": 4825, + "Ġactivists": 4826, + "ĠManager": 4827, + "Ġformal": 4828, + "Ġformed": 4829, + "Ġinvestigate": 4830, + "ĠKyle": 4831, + "Ġ:": 4832, + "ĠRa": 4833, + "ovic": 4834, + "Ġdrinking": 4835, + "Ġnetworks": 4836, + "ĠAlexander": 4837, + "ĠOs": 4838, + "Ġ)": 4839, + "Ġbomb": 4840, + "Ġrecalled": 4841, + "ito": 4842, + "ient": 4843, + "Ġrepresentatives": 4844, + "ĠChrist": 4845, + "ĠWay": 4846, + "Ġdeadly": 4847, + "Ġinvesting": 4848, + "ĠRussell": 4849, + "Ġconsumption": 4850, + "Ġharder": 4851, + "Ġbail": 4852, + "Ġcritics": 4853, + "Ġdanger": 4854, + "Ġdrew": 4855, + "ĠSol": 4856, + "Ġcopyright": 4857, + "ĠHenry": 4858, + "Ġbuyers": 4859, + "Ġresidential": 4860, + "Ġmaintenance": 4861, + "pr": 4862, + "Ġmarks": 4863, + "Ġages": 4864, + "Ġcovers": 4865, + "Ġton": 4866, + "Ġtitles": 4867, + "ĠPS": 4868, + "ĠEvans": 4869, + "Ġmigrants": 4870, + "Ġflights": 4871, + "Ġmonitoring": 4872, + "Ġaddressed": 4873, + "Ġvital": 4874, + "Ġcontrolled": 4875, + "Ġweapon": 4876, + "Ġinches": 4877, + "Ġreduction": 4878, + "Ġurban": 4879, + "Ġcoaching": 4880, + "Ġreducing": 4881, + "ila": 4882, + "Ġrealize": 4883, + "Ġmeat": 4884, + "Ġref": 4885, + "Ġoverseas": 4886, + "Ġblame": 4887, + "Ġterrorist": 4888, + "Ġstuck": 4889, + "ĠUs": 4890, + "esh": 4891, + "pro": 4892, + "Ġ58": 4893, + "ough": 4894, + "Ġexposure": 4895, + "ĠAbu": 4896, + "state": 4897, + "Ġproviders": 4898, + "Ġfore": 4899, + "Ġjet": 4900, + "bar": 4901, + "Ġownership": 4902, + "ret": 4903, + "Ġupset": 4904, + "Ġfacts": 4905, + "Ġpurchasing": 4906, + "Ġreforms": 4907, + "Ġriver": 4908, + "Ġsomebody": 4909, + "Ġguest": 4910, + "iy": 4911, + "Ġauction": 4912, + "ĠReading": 4913, + "Ġconsequences": 4914, + "Ġrepresentative": 4915, + "Ġappointment": 4916, + "add": 4917, + "Ġcollaboration": 4918, + "ĠTesla": 4919, + "ĠCohen": 4920, + "Ġengagement": 4921, + "Ġspeaks": 4922, + "EST": 4923, + "Ġexposed": 4924, + "Ġmaintained": 4925, + "rs": 4926, + "Ġdating": 4927, + "ĠProgram": 4928, + "board": 4929, + "Ġracing": 4930, + "Ġpension": 4931, + "ign": 4932, + "iti": 4933, + "ĠFive": 4934, + "Ġextensive": 4935, + "ĠHa": 4936, + "ĠPoint": 4937, + "ĠMexican": 4938, + "Ġexpanded": 4939, + "Ġtotally": 4940, + "Ġinvestigations": 4941, + "ĠOrleans": 4942, + "Ġcycle": 4943, + "ĠESPN": 4944, + "ifying": 4945, + "Ġcup": 4946, + "ĠAz": 4947, + "ĠInvestors": 4948, + "Ġengage": 4949, + "reg": 4950, + "Ġfought": 4951, + "Ġterrorism": 4952, + "Ġblocked": 4953, + "ĠOK": 4954, + "Äį": 4955, + "72": 4956, + "Ġdestroyed": 4957, + "«": 4958, + "Ġstaying": 4959, + "Ġafford": 4960, + "Ġappearances": 4961, + "ĠHills": 4962, + "Ġcrore": 4963, + "Ġstrategies": 4964, + "Ġtips": 4965, + "ĠSm": 4966, + "ĠFr": 4967, + "Ġbanned": 4968, + "ĠSon": 4969, + "ask": 4970, + "Ġlimits": 4971, + "Ġrecognition": 4972, + "Ġeligible": 4973, + "ĠGar": 4974, + "Ġvolatility": 4975, + "Ġlaid": 4976, + "nes": 4977, + "Ġgrade": 4978, + "ĠRE": 4979, + "ĠHart": 4980, + "Ġ57": 4981, + "oma": 4982, + "Ġuncertainty": 4983, + "Ġrecognized": 4984, + "ĠPC": 4985, + "Ġchosen": 4986, + "uz": 4987, + "Ġadviser": 4988, + "una": 4989, + "Ġassessment": 4990, + "Ġreveal": 4991, + "mo": 4992, + "After": 4993, + "ĠBro": 4994, + "ĠOff": 4995, + "Ġpeak": 4996, + "Ġreferred": 4997, + "ĠSC": 4998, + "Ġ2003": 4999, + "ification": 5000, + "Ġshutdown": 5001, + "ĠOfficials": 5002, + "ias": 5003, + "Ġextreme": 5004, + "Ġflood": 5005, + "Ġhockey": 5006, + "Ġwage": 5007, + "ĠNet": 5008, + "Ġdamaged": 5009, + "Ġreplacement": 5010, + "ĠMaria": 5011, + "Ġcreation": 5012, + "Ġguns": 5013, + "aci": 5014, + "Ġworker": 5015, + "do": 5016, + "Ġviewers": 5017, + "Ġseed": 5018, + "sts": 5019, + "Ġtouchdowns": 5020, + "Ġmistake": 5021, + "ray": 5022, + "ull": 5023, + "Ġpricing": 5024, + "Ġstrongly": 5025, + "Ġaims": 5026, + "ĠNavy": 5027, + "ĠEgypt": 5028, + "ker": 5029, + "Ġve": 5030, + "ĠSteven": 5031, + "Ġres": 5032, + "ational": 5033, + "Ġrequests": 5034, + "Ġemissions": 5035, + "ĠArena": 5036, + "uma": 5037, + "ĠAtlantic": 5038, + "hr": 5039, + "ĠAFP": 5040, + "ĠSquare": 5041, + "Ġcontribute": 5042, + "Ġfunction": 5043, + "Ġdec": 5044, + "ĠNelson": 5045, + "89": 5046, + "Ġreferendum": 5047, + "ĠPre": 5048, + "Ġapplied": 5049, + "ĠGMT": 5050, + "ĠIranian": 5051, + "ĠNigerian": 5052, + "ĠAny": 5053, + "NG": 5054, + "Ġacknowledged": 5055, + "Ġreferring": 5056, + "Ġventure": 5057, + "Ġimports": 5058, + "Ġblog": 5059, + "Ġfutures": 5060, + "OU": 5061, + "ĠUFC": 5062, + "Ġneither": 5063, + "Ġextension": 5064, + "hes": 5065, + "ĠMed": 5066, + "76": 5067, + "Ġsustainable": 5068, + "ains": 5069, + "Ġreputation": 5070, + "ĠVancouver": 5071, + "Ġbasically": 5072, + "acy": 5073, + "Ġsad": 5074, + "ĠFrancis": 5075, + "ĠKennedy": 5076, + "ĠNevada": 5077, + "ĠLu": 5078, + "ras": 5079, + "ĠAv": 5080, + "Ġrear": 5081, + "ĠHo": 5082, + "Ġproperly": 5083, + "abe": 5084, + "ĠHotel": 5085, + "Ġopinions": 5086, + "under": 5087, + "ĠStation": 5088, + "ĠFOR": 5089, + "ops": 5090, + "Ġadopted": 5091, + "ĠSwiss": 5092, + "ĠCountry": 5093, + "ĠTer": 5094, + "ĠAndy": 5095, + "Me": 5096, + "ĠCooper": 5097, + "ĠTigers": 5098, + "ĠCreek": 5099, + "Ġgay": 5100, + "iner": 5101, + "ĠAN": 5102, + "Ġbird": 5103, + "lla": 5104, + "ĠKate": 5105, + "ĠPet": 5106, + "ni": 5107, + "Ġprospects": 5108, + "ater": 5109, + "ites": 5110, + "Ġescape": 5111, + "lam": 5112, + "ake": 5113, + "Ġ1980": 5114, + "ĠLag": 5115, + "Ġsuccessfully": 5116, + "Ġdistricts": 5117, + "Ġministers": 5118, + "aries": 5119, + "Ġframe": 5120, + "ĠON": 5121, + "ĠEuro": 5122, + "ĠMarkets": 5123, + "Ġregister": 5124, + "Ġdefeated": 5125, + "Ġdevelopments": 5126, + "Ġninth": 5127, + "Ġquiet": 5128, + "Ġgenerated": 5129, + "Ġvaluable": 5130, + "Ġrecommended": 5131, + "ĠTheatre": 5132, + "ĠCap": 5133, + "bed": 5134, + "Ġreference": 5135, + "Ġease": 5136, + "oring": 5137, + "Ġ66": 5138, + "Ġimprovements": 5139, + "Ġelsewhere": 5140, + "ĠHillary": 5141, + "Ġdefender": 5142, + "ĠRight": 5143, + "zy": 5144, + "Ġcomprehensive": 5145, + "Ġspotted": 5146, + "ĠOakland": 5147, + "ĠOk": 5148, + "ĠSystem": 5149, + "ique": 5150, + "Ġpersons": 5151, + "Ġexist": 5152, + "Ġbroader": 5153, + "Ġclinical": 5154, + "Ġ2001": 5155, + "oul": 5156, + "Ġsecurities": 5157, + "ghan": 5158, + "Ġshelter": 5159, + "ero": 5160, + "ATED": 5161, + "Ġhosting": 5162, + "Ġselect": 5163, + "ĠKavanaugh": 5164, + "Ġrestrictions": 5165, + "osa": 5166, + "Ġyields": 5167, + "ĠLA": 5168, + "Ġ59": 5169, + "Ġwonder": 5170, + "Ġabsence": 5171, + "ür": 5172, + "ÅĤ": 5173, + "DP": 5174, + "Ġelectronic": 5175, + "Ġillegally": 5176, + "Ġmicro": 5177, + "ĠNEW": 5178, + "Ġhall": 5179, + "Ġaged": 5180, + "Ġtemperature": 5181, + "cast": 5182, + "atic": 5183, + "Ġlegacy": 5184, + "Ġaffairs": 5185, + "ji": 5186, + "ĠResources": 5187, + "Ġgang": 5188, + "winning": 5189, + "Ġattending": 5190, + "aro": 5191, + "Ġfriendly": 5192, + "aine": 5193, + "Ġcannabis": 5194, + "Ġairline": 5195, + "Ġnoting": 5196, + "Ġprofessionals": 5197, + "ĠFREE": 5198, + "RC": 5199, + "Ġfinancing": 5200, + "Ġindependence": 5201, + "ved": 5202, + "Ġresulting": 5203, + "Ġsteady": 5204, + "ĠWinter": 5205, + "uring": 5206, + "Ġhoped": 5207, + "98": 5208, + "Ġpresentation": 5209, + "aya": 5210, + "Ġrated": 5211, + "osh": 5212, + "ĠAnalysis": 5213, + "=": 5214, + "Ġdonations": 5215, + "IR": 5216, + "Ġcombat": 5217, + "ĠHoward": 5218, + "anda": 5219, + "79": 5220, + "Ġinvested": 5221, + "Ġexpanding": 5222, + "omb": 5223, + "ress": 5224, + "ble": 5225, + "Ġjournalist": 5226, + "ĠWoods": 5227, + "Ġcenters": 5228, + "ott": 5229, + "Ġstreaming": 5230, + "Ġterror": 5231, + "Ġsustained": 5232, + "ĠWWE": 5233, + "pre": 5234, + "ÅŁ": 5235, + "ait": 5236, + "Ġarrival": 5237, + "Ġresidence": 5238, + "Ġextent": 5239, + "Ġarrive": 5240, + "Ġ2002": 5241, + "Ġestablish": 5242, + "74": 5243, + "ĠArgentina": 5244, + "ĠDem": 5245, + "inn": 5246, + "aud": 5247, + "ĠNCAA": 5248, + "Ġquestioned": 5249, + "Ġballot": 5250, + "Ġmin": 5251, + "Ġlandscape": 5252, + "Ġhorse": 5253, + "Ġopponent": 5254, + "iel": 5255, + "Ġprompted": 5256, + "atory": 5257, + "Ġlift": 5258, + "Ġassociation": 5259, + "cher": 5260, + "Ġdefending": 5261, + "Ġtiny": 5262, + "Ġpoverty": 5263, + "ĠSafety": 5264, + "Ġpetition": 5265, + "ĠLimited": 5266, + "ĠCA": 5267, + "FC": 5268, + "Ãł": 5269, + "oni": 5270, + "Ġmonitor": 5271, + "ÃŃa": 5272, + "MA": 5273, + "Ġanswers": 5274, + "ĠMitchell": 5275, + "Ġbo": 5276, + "ĠShah": 5277, + "Ġsm": 5278, + "Ġmedal": 5279, + "ĠCivil": 5280, + "Ġrecognize": 5281, + "key": 5282, + "Ġpregnant": 5283, + "Ġspots": 5284, + "ante": 5285, + "Ġacademic": 5286, + "Ġinitiatives": 5287, + "Ġsecured": 5288, + "ĠCL": 5289, + "ils": 5290, + "Ġanticipated": 5291, + "Ġinvolvement": 5292, + "ĠMake": 5293, + "Ġinsisted": 5294, + "ĠWales": 5295, + "Ġclothing": 5296, + "Ġtracks": 5297, + "Ġsymptoms": 5298, + "Ġplate": 5299, + "ĠNY": 5300, + "Ġretailers": 5301, + "ĠPan": 5302, + "Ġfled": 5303, + "Ġquoted": 5304, + "Ġsaved": 5305, + "ĠCarter": 5306, + "Ġteaching": 5307, + "ĠTokyo": 5308, + "ĠCr": 5309, + "ĠSix": 5310, + "ĠPicture": 5311, + "Ġrecover": 5312, + "Ġcomedy": 5313, + "ree": 5314, + "Ġstrikes": 5315, + "ĠSanders": 5316, + "sel": 5317, + "Ġgraduate": 5318, + "Ġpending": 5319, + "St": 5320, + "Ġwarrant": 5321, + "Ġhonest": 5322, + "ĠGM": 5323, + "Ġnoticed": 5324, + "ĠGalaxy": 5325, + "ider": 5326, + "Ġproposals": 5327, + "Ġwore": 5328, + "Ġindeed": 5329, + "EM": 5330, + "ĠChannel": 5331, + "ances": 5332, + "ĠBrady": 5333, + "86": 5334, + "Ġgotten": 5335, + "Ġthrowing": 5336, + "ĠLeader": 5337, + "ĠVideo": 5338, + "71": 5339, + "Ġwelcomed": 5340, + "NEW": 5341, + "Ġfairly": 5342, + "Ġpromises": 5343, + "ĠSilver": 5344, + "Ġrape": 5345, + "Ġopener": 5346, + "ares": 5347, + "ĠSir": 5348, + "making": 5349, + "Ġcur": 5350, + "Ġrooms": 5351, + "73": 5352, + "Ġamounts": 5353, + "ĠIndustry": 5354, + "ĠDar": 5355, + "Ġ62": 5356, + "ted": 5357, + "Ġabroad": 5358, + "ĠMaybe": 5359, + "Ġreaders": 5360, + "oke": 5361, + "Ġpublication": 5362, + "ĠJean": 5363, + "Ġoperator": 5364, + "ĠHaving": 5365, + "ĠMil": 5366, + "life": 5367, + "Ġgenerate": 5368, + "ĠCraig": 5369, + "ĠMass": 5370, + "ĠBh": 5371, + "Ġrequested": 5372, + "Ġcrazy": 5373, + "ĠSpace": 5374, + "Ġcopy": 5375, + "Ġexport": 5376, + "Ġcontext": 5377, + "Ġbr": 5378, + "62": 5379, + "ĠRobinson": 5380, + "Ġcyber": 5381, + "ENT": 5382, + "BI": 5383, + "arg": 5384, + "Ġspeaker": 5385, + "Ġdramatic": 5386, + "ĠOl": 5387, + "ĠMill": 5388, + "Ġtrained": 5389, + "Ġediting": 5390, + "Ġsalary": 5391, + "Ġdirectors": 5392, + "Ġexplore": 5393, + "Ġlucky": 5394, + "Ġprominent": 5395, + "Ġbrothers": 5396, + "Ġneck": 5397, + "icht": 5398, + "ĠWatson": 5399, + "born": 5400, + "Ġproven": 5401, + "Ġprincipal": 5402, + "Ġedition": 5403, + "Ed": 5404, + "Ġswitch": 5405, + "maker": 5406, + "Ġrelative": 5407, + "mi": 5408, + "ĠBruce": 5409, + "ho": 5410, + "ĠScottish": 5411, + "water": 5412, + "ĠSport": 5413, + "ĠKings": 5414, + "ĠCollins": 5415, + "adi": 5416, + "Ġcelebrated": 5417, + "Ġclothes": 5418, + "Ġsunny": 5419, + "ĠCharlotte": 5420, + "ees": 5421, + "Ġscenes": 5422, + "ĠData": 5423, + "Ġwounded": 5424, + "Ġunusual": 5425, + "Ġrealized": 5426, + "ĠPlan": 5427, + "ĠTrans": 5428, + "ĠFC": 5429, + "Ġletters": 5430, + "Ġalerts": 5431, + "ĠWarren": 5432, + "DS": 5433, + "oss": 5434, + "pping": 5435, + "Ġsuspension": 5436, + "Ġbenchmark": 5437, + "ĠAcc": 5438, + "Ġalert": 5439, + "Ġpassion": 5440, + "ĠEst": 5441, + "Ġlatter": 5442, + "Ġstability": 5443, + "Ġarts": 5444, + "Ġpursue": 5445, + "ĠSeason": 5446, + "Ġfields": 5447, + "Ġmethod": 5448, + "63": 5449, + "Ġfolks": 5450, + "Ġexclusive": 5451, + "Ġcrews": 5452, + "Ġsessions": 5453, + "ĠMajor": 5454, + "ĠMount": 5455, + "Ġmap": 5456, + "Ġ=": 5457, + "Ġsituations": 5458, + "ĠBerlin": 5459, + "rey": 5460, + "Ġdates": 5461, + "Ġsheet": 5462, + "ĠLo": 5463, + "Ġfighters": 5464, + "ĠMart": 5465, + "Ġatmosphere": 5466, + "Ġillness": 5467, + "Ġcompeting": 5468, + "ĠChristopher": 5469, + "ĠRoy": 5470, + "mm": 5471, + "iano": 5472, + "Ġge": 5473, + "ĠRams": 5474, + "Ġconversations": 5475, + "ĠPa": 5476, + "ĠTel": 5477, + "Ġappreciate": 5478, + "78": 5479, + "ĠTotal": 5480, + "low": 5481, + "ĠStone": 5482, + "Ġopposite": 5483, + "Ġbarrel": 5484, + "Ġdevelopers": 5485, + "Ġexpress": 5486, + "Ġhighs": 5487, + "which": 5488, + "par": 5489, + "ĠVietnam": 5490, + "Ġblocks": 5491, + "Ġrecording": 5492, + "Ġadjusted": 5493, + "Ġret": 5494, + "ĠAR": 5495, + "Ġmilitants": 5496, + "Ġinnovative": 5497, + "ĠGhana": 5498, + "FR": 5499, + "Ġfantastic": 5500, + "Ġmortgage": 5501, + "ando": 5502, + "ĠLane": 5503, + "ises": 5504, + "ĠÂ": 5505, + "Ġhomeless": 5506, + "ĠKal": 5507, + "Ġapproached": 5508, + "Ġrounds": 5509, + "Ġmargins": 5510, + "ament": 5511, + "ĠMotor": 5512, + "Ġencouraging": 5513, + "ÂŃ": 5514, + "uru": 5515, + "Ġhandling": 5516, + "ĠMassachusetts": 5517, + "Ġplanet": 5518, + "ĠSpring": 5519, + "ĠBon": 5520, + "gu": 5521, + "Beat": 5522, + "Ġdrawing": 5523, + "ĠPhoenix": 5524, + "very": 5525, + "aid": 5526, + "ĠSte": 5527, + "ĠEntertainment": 5528, + "ĠRon": 5529, + "Ġassigned": 5530, + "ĠSA": 5531, + "News": 5532, + "Ġinterviews": 5533, + "ĠOh": 5534, + "media": 5535, + "vel": 5536, + "Ġpermission": 5537, + "Ġtransactions": 5538, + "Ġtraders": 5539, + "Ġsolo": 5540, + "Ġprovincial": 5541, + "Ġsuggesting": 5542, + "¡": 5543, + "Ġdiverse": 5544, + "Ġ67": 5545, + "Ġranks": 5546, + "ĠFre": 5547, + "Ġfavourite": 5548, + "Ġ63": 5549, + "Ġdifferences": 5550, + "Ġtargeting": 5551, + "Ġactors": 5552, + "Ġ76": 5553, + "icated": 5554, + "Ġcollect": 5555, + "akes": 5556, + "war": 5557, + "Ġcontained": 5558, + "ches": 5559, + "Ġlibrary": 5560, + "Ġsegments": 5561, + "ĠLine": 5562, + "ê": 5563, + "ual": 5564, + "Ġbags": 5565, + "Ġfactory": 5566, + "Ġear": 5567, + "Ġsomewhat": 5568, + "Ġrail": 5569, + "ĠUP": 5570, + "ula": 5571, + "ĠNiger": 5572, + "Ġlas": 5573, + "Ġimplementation": 5574, + "Ġemails": 5575, + "kel": 5576, + "wing": 5577, + "Ġadvised": 5578, + "--": 5579, + "istic": 5580, + "Ġdepth": 5581, + "Ġshoes": 5582, + "ĠJennifer": 5583, + "Ġvenue": 5584, + "Ġcontain": 5585, + "Ġhighlights": 5586, + "Ġcapabilities": 5587, + "Ġprocesses": 5588, + "Ġtradition": 5589, + "Ġcontacted": 5590, + "Ġproducing": 5591, + "Ġtrail": 5592, + "rem": 5593, + "Ġ600": 5594, + "Ġ68": 5595, + "AA": 5596, + "ĠBa": 5597, + "ĠSuch": 5598, + "ĠTyler": 5599, + "ipp": 5600, + "Ġsurvived": 5601, + "ami": 5602, + "ĠContinue": 5603, + "Ġcapture": 5604, + "bi": 5605, + "61": 5606, + "96": 5607, + "Ġthreatening": 5608, + "Ġkeen": 5609, + "dale": 5610, + "Ġtrailer": 5611, + "Ġstages": 5612, + "ĠGordon": 5613, + "Ġfinishing": 5614, + "Ġlegislative": 5615, + "Ġuseful": 5616, + "ĠGreek": 5617, + "ald": 5618, + "Ġgrounds": 5619, + "ĠDu": 5620, + "storms": 5621, + "ills": 5622, + "Ġexpense": 5623, + "Ġdetained": 5624, + "Today": 5625, + "Ġdiet": 5626, + "Ġwood": 5627, + "ĠCameron": 5628, + "Ġthrown": 5629, + "Ġcricket": 5630, + "Ġideal": 5631, + "with": 5632, + "Ġteammates": 5633, + "ours": 5634, + "Ġprojected": 5635, + "Ġpersonally": 5636, + "ĠBoy": 5637, + "rom": 5638, + "ĠPhilippines": 5639, + "win": 5640, + "ges": 5641, + "Ġcounties": 5642, + "ĠBaker": 5643, + "Ġprosecutor": 5644, + "Ġroof": 5645, + "met": 5646, + "Ġpartly": 5647, + "ĠMoon": 5648, + "eman": 5649, + "Ġfocusing": 5650, + "Ġfishing": 5651, + "than": 5652, + "ĠJeremy": 5653, + "ĠBad": 5654, + "ais": 5655, + "Ġcontrols": 5656, + "Ġtonnes": 5657, + "Ġshall": 5658, + "Ġ61": 5659, + "Ġgathering": 5660, + "ĠERA": 5661, + "Ġpresidency": 5662, + "Ġ85": 5663, + "ĠGas": 5664, + "Ġscenario": 5665, + "Ġquarters": 5666, + "Ġang": 5667, + "Ġsettled": 5668, + "ĠCommerce": 5669, + "Ġanybody": 5670, + "Ġgarden": 5671, + "ĠLibrary": 5672, + "Ġbet": 5673, + "Ġtopic": 5674, + "olo": 5675, + "Ġintense": 5676, + "87": 5677, + "Ġlinks": 5678, + "Ġmed": 5679, + "ĠAG": 5680, + "Ġflooding": 5681, + "ĠMurphy": 5682, + "PM": 5683, + "Ġfinds": 5684, + "Ġsensitive": 5685, + "pped": 5686, + "Ġcompletion": 5687, + "Ġminority": 5688, + "Ġvon": 5689, + "Ġstriking": 5690, + "rich": 5691, + "Ġbars": 5692, + "Ġefficient": 5693, + "Ġcontributions": 5694, + "Ġvisits": 5695, + "Ġattract": 5696, + "ĠMalaysia": 5697, + "ĠREL": 5698, + "Ġopens": 5699, + "Ġessentially": 5700, + "Ġreasonable": 5701, + "Ġsentiment": 5702, + "ĠMelbourne": 5703, + "Ġfitness": 5704, + "Ġfrequently": 5705, + "ĠRangers": 5706, + "Ġmuseum": 5707, + "ĠDNA": 5708, + "Ġcontrast": 5709, + "ĠAdams": 5710, + "ĠWin": 5711, + "Ġfalls": 5712, + "Ġimposed": 5713, + "250": 5714, + "ood": 5715, + "ĠRio": 5716, + "Ġchoices": 5717, + "Ġyellow": 5718, + "rin": 5719, + "ben": 5720, + "ĠStaff": 5721, + "ĠIndonesia": 5722, + "Ġcarries": 5723, + "Ġtourism": 5724, + "UM": 5725, + "ĠOrange": 5726, + "sell": 5727, + "Ġresolve": 5728, + "ĠMumbai": 5729, + "Ġpan": 5730, + "Ġimplement": 5731, + "Ġmidfielder": 5732, + "OP": 5733, + "Ġtensions": 5734, + "Ġ800": 5735, + "ĠLord": 5736, + "ĠLight": 5737, + "Ġlies": 5738, + "és": 5739, + "Ġparticipation": 5740, + "Ġtries": 5741, + "Ġsheriff": 5742, + "degree": 5743, + "Ġcongressional": 5744, + "Ġmode": 5745, + "Ġregulation": 5746, + "ĠJacob": 5747, + "ĠCrown": 5748, + "Ġbowl": 5749, + "ĠMississippi": 5750, + "Ġtheft": 5751, + "ĠKingdom": 5752, + "Ġresort": 5753, + "Ġroyal": 5754, + "Ġunemployment": 5755, + "PP": 5756, + "Ġnomination": 5757, + "ĠTR": 5758, + "Ġbehaviour": 5759, + "bank": 5760, + "ĠForest": 5761, + "WASHINGTON": 5762, + "ĠOthers": 5763, + "Ġslowly": 5764, + "Ġmenu": 5765, + "vo": 5766, + "ĠSy": 5767, + "ĠMetro": 5768, + "ĠLisa": 5769, + "Ġregistration": 5770, + "While": 5771, + "ĠJesus": 5772, + "Ġ250": 5773, + "Ġprocessing": 5774, + "Ġmonetary": 5775, + "ape": 5776, + "ener": 5777, + "ĠSystems": 5778, + "Ġdisappointed": 5779, + "Ġprint": 5780, + "uy": 5781, + "ħ": 5782, + "Ġdemanding": 5783, + "Ġincredibly": 5784, + "play": 5785, + "Ġsurveillance": 5786, + "ĠStandard": 5787, + "Ġperiods": 5788, + "Ġwrites": 5789, + "ĠLuke": 5790, + "ĠPalestinian": 5791, + "Ġwalks": 5792, + "Ġriding": 5793, + "Ġwaters": 5794, + "ĠSox": 5795, + "Ġtraveling": 5796, + "Ġtap": 5797, + "Ġorganized": 5798, + "Ġresource": 5799, + "Ġangry": 5800, + "Ġtiming": 5801, + "Ġempty": 5802, + "Ġmilk": 5803, + "Ġtherapy": 5804, + "ĠBrandon": 5805, + "mon": 5806, + "Ġnationwide": 5807, + "Ġnovel": 5808, + "ĠStorm": 5809, + "iet": 5810, + "ĠBre": 5811, + "Ġbegun": 5812, + "Ġdiplomatic": 5813, + "Ġads": 5814, + "ĠDC": 5815, + "ĠOb": 5816, + "ĠMontreal": 5817, + "ĠDown": 5818, + "ĠMilwaukee": 5819, + "Ġmeal": 5820, + "ĠPuerto": 5821, + "ĠMas": 5822, + "Ġjoy": 5823, + "Ġdeparture": 5824, + "ĠWright": 5825, + "Ġspoken": 5826, + "style": 5827, + "ĠAction": 5828, + "ĠComey": 5829, + "Ġdelivering": 5830, + "Ġtoll": 5831, + "Ġmidnight": 5832, + "ĠRevenue": 5833, + "Ġfiring": 5834, + "Ġstunning": 5835, + "Ġkicked": 5836, + "ĠOttawa": 5837, + "Ġefficiency": 5838, + "ĠLincoln": 5839, + "Ġtaste": 5840, + "ez": 5841, + "ĠWeather": 5842, + "ĠMorning": 5843, + "Ġhadn": 5844, + "Ġdiversity": 5845, + "ily": 5846, + "ĠAy": 5847, + "Ġargue": 5848, + "Ġerror": 5849, + "Ġtaught": 5850, + "Ġche": 5851, + "Ġoccasion": 5852, + "Ġinc": 5853, + "ĠOrlando": 5854, + "ĠOnline": 5855, + "Ġlegs": 5856, + "ĠNation": 5857, + "uck": 5858, + "Ġwidespread": 5859, + "ĠOcean": 5860, + "Ġconstantly": 5861, + "ĠLatin": 5862, + "Ġcomfort": 5863, + "Ġrely": 5864, + "uff": 5865, + "ĠCard": 5866, + "aring": 5867, + "Ġhumans": 5868, + "ĠThomson": 5869, + "aka": 5870, + "BIT": 5871, + "ĠReview": 5872, + "po": 5873, + "ú": 5874, + "Ġtrucks": 5875, + "Ġforecasts": 5876, + "view": 5877, + "Ġlongtime": 5878, + "ĠConstitution": 5879, + "Ġreserves": 5880, + "bit": 5881, + "Ġstressed": 5882, + "Ġcontribution": 5883, + "Ġchicken": 5884, + "ĠDE": 5885, + "Ġfat": 5886, + "ĠOscar": 5887, + "Ġcriticized": 5888, + "Ġtestimony": 5889, + "Ġapparent": 5890, + "Ġconstant": 5891, + "Ġcabinet": 5892, + "ĠDuke": 5893, + "Ġaspects": 5894, + "lic": 5895, + "ĠVol": 5896, + "Ġwing": 5897, + "Ġreb": 5898, + "ĠSessions": 5899, + "ĠSmart": 5900, + "car": 5901, + "ĠIm": 5902, + "Ġoperational": 5903, + "Ġregulators": 5904, + "ĠJimmy": 5905, + "eter": 5906, + "Ġnobody": 5907, + "ĠMarc": 5908, + "Ġliterally": 5909, + "Ġresistance": 5910, + "ĠKam": 5911, + "Ġsexually": 5912, + "Ġ69": 5913, + "uth": 5914, + "Ġviewed": 5915, + "Ġpicks": 5916, + "Ġdin": 5917, + "Ġtalented": 5918, + "Ġtennis": 5919, + "Ġstrengthen": 5920, + "Ġgl": 5921, + "ĠProtection": 5922, + "Ġinstalled": 5923, + "ways": 5924, + "ĠCampbell": 5925, + "ĠPortland": 5926, + "Ġintent": 5927, + "ĠPalace": 5928, + "Ġsecondary": 5929, + "Ġlocked": 5930, + "ĠPA": 5931, + "Ġlanded": 5932, + "Ġlength": 5933, + "Ġboosted": 5934, + "Ġpurchases": 5935, + "Ġcommand": 5936, + "ĠAsked": 5937, + "Ġspaces": 5938, + "Ġiconic": 5939, + "Ġrecommend": 5940, + "Ġduties": 5941, + "Ġseized": 5942, + "Ġdelayed": 5943, + "FA": 5944, + "AND": 5945, + "daq": 5946, + "Ġhiring": 5947, + "Ġoccur": 5948, + "DC": 5949, + "ĠMus": 5950, + "Ġag": 5951, + "Ġhopefully": 5952, + "ĠPenn": 5953, + "ards": 5954, + "Ġstriker": 5955, + "Ġrent": 5956, + "ĠTy": 5957, + "ĠBuffalo": 5958, + "ĠKy": 5959, + "Ġhike": 5960, + "pper": 5961, + "Ġ120": 5962, + "Ġop": 5963, + "Ġwheel": 5964, + "ĠIan": 5965, + "Ġchart": 5966, + "tt": 5967, + "Ġvolunteer": 5968, + "IG": 5969, + "person": 5970, + "ight": 5971, + "ĠBook": 5972, + "unt": 5973, + "ĠTechnologies": 5974, + "Now": 5975, + "Ġfavour": 5976, + "ĠGh": 5977, + "ĠQatar": 5978, + "ĠDutch": 5979, + "ĠGrant": 5980, + "ĠBan": 5981, + "rel": 5982, + "Ġagreements": 5983, + "Ġeducational": 5984, + "worth": 5985, + "ĠWard": 5986, + "700": 5987, + "Ġanymore": 5988, + "Ġrepair": 5989, + "Ġoperators": 5990, + "ĠLi": 5991, + "ots": 5992, + "ĠLouisiana": 5993, + "ĠWhether": 5994, + "Ġodds": 5995, + "Ġnoon": 5996, + "ĠStr": 5997, + "Ġfail": 5998, + "iser": 5999, + "Ġforever": 6000, + "Ġrecall": 6001, + "ĠPo": 6002, + "ĠHot": 6003, + "Ġdesigner": 6004, + "ido": 6005, + "LL": 6006, + "ĠControl": 6007, + "Ġsurvive": 6008, + "iam": 6009, + "Ġorganisation": 6010, + "ĠWork": 6011, + "Ġwider": 6012, + "Ġtank": 6013, + "work": 6014, + "ĠAS": 6015, + "Ġposting": 6016, + "Ġsuddenly": 6017, + "MC": 6018, + "ĠAL": 6019, + "ĠProfessor": 6020, + "ĠCoach": 6021, + "Ġrushed": 6022, + "Ġafraid": 6023, + "Ġactivist": 6024, + "that": 6025, + "ĠFilm": 6026, + "Ġbacking": 6027, + "Ġhousehold": 6028, + "Ġsignal": 6029, + "Ġaccurate": 6030, + "str": 6031, + "ĠThread": 6032, + "ĠBears": 6033, + "ATION": 6034, + "ĠAlliance": 6035, + "ĠMcDonald": 6036, + "ĠVenezuela": 6037, + "ogg": 6038, + "ĠWindows": 6039, + "makers": 6040, + "Ġutility": 6041, + "Ġrapidly": 6042, + "Ġattractive": 6043, + "Ġpa": 6044, + "ĠLarry": 6045, + "Ġmisconduct": 6046, + "Ġfreshman": 6047, + "Ġqualified": 6048, + "Ġcleared": 6049, + "Ġcrashed": 6050, + "Ġparticipating": 6051, + "Ġpages": 6052, + "Ġhighlight": 6053, + "Ġdialogue": 6054, + "ĠAlberta": 6055, + "Ġca": 6056, + "Ġwitnesses": 6057, + "ables": 6058, + "Ġfollowers": 6059, + "Ġensuring": 6060, + "Ġpromoting": 6061, + "Ġsearching": 6062, + "Ġremote": 6063, + "Ġclash": 6064, + "Ġfirefighters": 6065, + "Ġteen": 6066, + "ĠPlace": 6067, + "ĠNote": 6068, + "Ġregardless": 6069, + "ult": 6070, + "oney": 6071, + "ander": 6072, + "ional": 6073, + "ining": 6074, + "Ġdemanded": 6075, + "ĠCommunications": 6076, + "Ġconsideration": 6077, + "TC": 6078, + "ĠSoutheast": 6079, + "aga": 6080, + "ĠGarden": 6081, + "inger": 6082, + "ht": 6083, + "Ġbranch": 6084, + "Ġmouth": 6085, + "Ġaudio": 6086, + "Ġraw": 6087, + "Ġcoordinator": 6088, + "Ġexact": 6089, + "ĠHan": 6090, + "Ġdelays": 6091, + "ĠWal": 6092, + "ĠWells": 6093, + "Ġng": 6094, + "Ġhandful": 6095, + "Ġgirlfriend": 6096, + "Ġtypical": 6097, + "ĠWayne": 6098, + "ĠFranklin": 6099, + "Ġconstitutional": 6100, + "ĠChance": 6101, + "Ġblamed": 6102, + "rim": 6103, + "Ġpreliminary": 6104, + "Ġlie": 6105, + "da": 6106, + "ĠCapitol": 6107, + "Ġroutine": 6108, + "ĠNASA": 6109, + "Ġtre": 6110, + "ĠGolf": 6111, + "Ġsight": 6112, + "ĠDer": 6113, + "Ġreserve": 6114, + "150": 6115, + "Ġspeculation": 6116, + "Ġcompetitors": 6117, + "ĠMacron": 6118, + "ony": 6119, + "Ġovertime": 6120, + "Ġ71": 6121, + "Ġdepending": 6122, + "ĠWarner": 6123, + "Ġaccusations": 6124, + "ius": 6125, + "Ġpredicted": 6126, + "ĠCharlie": 6127, + "Ġeverywhere": 6128, + "Ġcable": 6129, + "ĠSaint": 6130, + "ĠRegion": 6131, + "Ġhero": 6132, + "ĠEmb": 6133, + "Ġkinds": 6134, + "Ġstarter": 6135, + "Ġsolve": 6136, + "ĠGuard": 6137, + "Ġloves": 6138, + "ĠDouglas": 6139, + "Ġfunded": 6140, + "ĠBrent": 6141, + "ĠAnyone": 6142, + "Ġsubstantial": 6143, + "ĠMarine": 6144, + "ĠMichelle": 6145, + "Ġcelebrating": 6146, + "Ġoffset": 6147, + "Ġbutton": 6148, + "gg": 6149, + "Ġmedicine": 6150, + "uri": 6151, + "Ġsomewhere": 6152, + "PD": 6153, + "Ġmon": 6154, + "Ġfires": 6155, + "final": 6156, + "oth": 6157, + "ined": 6158, + "Ġunderway": 6159, + "Ġmistakes": 6160, + "Ġgrateful": 6161, + "Ġcheap": 6162, + "È": 6163, + "Ġ95": 6164, + "Ġviolations": 6165, + "arr": 6166, + "Ġsurprising": 6167, + "Ġob": 6168, + "ĠNATO": 6169, + "Ġcontroversy": 6170, + "ĠSweden": 6171, + "Ġfuneral": 6172, + "Ġreviews": 6173, + "Ġpromotion": 6174, + "TY": 6175, + "Ġliberal": 6176, + "Ġpromising": 6177, + "ĠSP": 6178, + "How": 6179, + "Ġmemories": 6180, + "Ġbreast": 6181, + "zi": 6182, + "ights": 6183, + "Ġpattern": 6184, + "Ġoutdoor": 6185, + "ĠMu": 6186, + "Ġrush": 6187, + "ĠTheresa": 6188, + "ĠPol": 6189, + "Ġdescribe": 6190, + "ĠBand": 6191, + "ĠStewart": 6192, + "Ġ1999": 6193, + "ĠRaiders": 6194, + "mp": 6195, + "Ġprocedures": 6196, + "Ġplot": 6197, + "Ġhire": 6198, + "used": 6199, + "Ġ1970": 6200, + "Ġpicking": 6201, + "ĠSim": 6202, + "Ġregard": 6203, + "inal": 6204, + "backs": 6205, + "ĠHard": 6206, + "ĠLow": 6207, + "ĠAc": 6208, + "Is": 6209, + "Ġguarantee": 6210, + "ĠGiven": 6211, + "Ġbeta": 6212, + "ĠTre": 6213, + "Ġtrans": 6214, + "Ġretailer": 6215, + "Ġpurposes": 6216, + "ĠHol": 6217, + "Ġenjoying": 6218, + "Ġbrown": 6219, + "ĠPerry": 6220, + "Ġplea": 6221, + "MS": 6222, + "ĠDakota": 6223, + "ĠParker": 6224, + "Ġcommit": 6225, + "ĠLawrence": 6226, + "ĠMorris": 6227, + "ended": 6228, + "Ġvirtual": 6229, + "ÃĹ": 6230, + "Ġfruit": 6231, + "84": 6232, + "ĠHas": 6233, + "ishing": 6234, + "Ġdominated": 6235, + "ĠFA": 6236, + "Ġchannels": 6237, + "Ġunderstood": 6238, + "Ġcitizen": 6239, + "Ġchecks": 6240, + "ĠKenya": 6241, + "Ġdisabled": 6242, + "SD": 6243, + "Ġprotecting": 6244, + "Ġtweets": 6245, + "Ġsparked": 6246, + "ĠCO": 6247, + "§": 6248, + "ori": 6249, + "ĠGDP": 6250, + "ĠSer": 6251, + "ĠVisit": 6252, + "ĠMS": 6253, + "Ġbarely": 6254, + "Ġsand": 6255, + "Ġap": 6256, + "aging": 6257, + "Ġrel": 6258, + "ĠPerhaps": 6259, + "ĠMourinho": 6260, + "ĠJets": 6261, + "Ġdisclosure": 6262, + "Ġhighlighted": 6263, + "Ġimplemented": 6264, + "Ġcompliance": 6265, + "ĠAB": 6266, + "ĠAssistant": 6267, + "ĠCape": 6268, + "Ġfunny": 6269, + "Ġleverage": 6270, + "Ġmachines": 6271, + "Ġranging": 6272, + "Ġfastest": 6273, + "ĠRoberts": 6274, + "ĠPolicy": 6275, + "gar": 6276, + "Ġcollapse": 6277, + "ĠThrough": 6278, + "Ġrobbery": 6279, + "ĠHay": 6280, + "Ġelite": 6281, + "ĠDigital": 6282, + "ĠFun": 6283, + "ĠAlan": 6284, + "ement": 6285, + "Ġmit": 6286, + "Ġspin": 6287, + "Ġlistening": 6288, + "ĠDoug": 6289, + "ĠSaints": 6290, + "Ġinterior": 6291, + "Ġenhance": 6292, + "ĠCardinals": 6293, + "ever": 6294, + "Ġrobust": 6295, + "Ġinform": 6296, + "Ġsuffer": 6297, + "book": 6298, + "ĠMuslims": 6299, + "Ġagriculture": 6300, + "Ġkm": 6301, + "Ġdivers": 6302, + "ñ": 6303, + "ĠReg": 6304, + "Ġequivalent": 6305, + "Ġcraft": 6306, + "Ġsettle": 6307, + "Ġcontains": 6308, + "ĠMack": 6309, + "ĠDis": 6310, + "ĠFore": 6311, + "ĠSudan": 6312, + "ĠMail": 6313, + "ĠBrooklyn": 6314, + "izer": 6315, + "bn": 6316, + "Ġhundred": 6317, + "Ġexhibition": 6318, + "ĠHave": 6319, + "vin": 6320, + "Ġcivilians": 6321, + "ĠCincinnati": 6322, + "Some": 6323, + "ĠSE": 6324, + "Ġbat": 6325, + "ĠIns": 6326, + "Ġcalm": 6327, + "Ġtone": 6328, + "Ġnormally": 6329, + "Ġseeks": 6330, + "ĠAss": 6331, + "Ġmembership": 6332, + "Ġannually": 6333, + "Ġemployers": 6334, + "CO": 6335, + "Ġcomplicated": 6336, + "Ġheadlines": 6337, + "ĠLabor": 6338, + "Ġlifestyle": 6339, + "ĠRen": 6340, + "ĠRich": 6341, + "cent": 6342, + "ude": 6343, + "Ġawesome": 6344, + "Ġpaint": 6345, + "Ġrolling": 6346, + "Ġwalls": 6347, + "Ġlab": 6348, + "Ġtourists": 6349, + "care": 6350, + "Ġgear": 6351, + "izz": 6352, + "Ġcream": 6353, + "ĠTro": 6354, + "ices": 6355, + "Ġpack": 6356, + "Ġdiseases": 6357, + "ĠSpeaker": 6358, + "ĠOfficers": 6359, + "Ġsky": 6360, + "83": 6361, + "ĠBE": 6362, + "Ġcategories": 6363, + "Ġindicate": 6364, + "Ġru": 6365, + "ĠSony": 6366, + "ĠDun": 6367, + "ocks": 6368, + "Ġconcrete": 6369, + "ĠMadison": 6370, + "ĠSab": 6371, + "IV": 6372, + "Ġobserved": 6373, + "ria": 6374, + "Ġinterim": 6375, + "Ġencounter": 6376, + "ista": 6377, + "Ġanger": 6378, + "Ġrapid": 6379, + "mail": 6380, + "Ġdestination": 6381, + "ĩ": 6382, + "Ġbreaks": 6383, + "rell": 6384, + "ĠChase": 6385, + "Ġattorneys": 6386, + "Ġrolled": 6387, + "ĠSprings": 6388, + "ĠVillage": 6389, + "TO": 6390, + "HS": 6391, + "Ġcampaigns": 6392, + "ologist": 6393, + "ĠTax": 6394, + "ĠIII": 6395, + "Ġteach": 6396, + "Ġprovision": 6397, + "Ġrem": 6398, + "Ġshirt": 6399, + "Ġdeployed": 6400, + "Ġguidelines": 6401, + "Ġav": 6402, + "zer": 6403, + "Ġrushing": 6404, + "94": 6405, + "place": 6406, + "Man": 6407, + "Ġdivided": 6408, + "ĠGun": 6409, + "Ġwindows": 6410, + "Ġcomponents": 6411, + "aba": 6412, + "ĠSwitzerland": 6413, + "election": 6414, + "ĠTampa": 6415, + "ĠAri": 6416, + "ás": 6417, + "Ġhighway": 6418, + "Ġacres": 6419, + "Ġcrown": 6420, + "known": 6421, + "Ġinquiry": 6422, + "url": 6423, + "Ġexpertise": 6424, + "Ġpraised": 6425, + "yer": 6426, + "Ġconclusion": 6427, + "Ġabortion": 6428, + "Ġlady": 6429, + "Ġtribute": 6430, + "Ġunveiled": 6431, + "Ġbeaten": 6432, + "TE": 6433, + "ĠMot": 6434, + "unk": 6435, + "Ġtriple": 6436, + "Ġforcing": 6437, + "ĠTickets": 6438, + "uit": 6439, + "Ġiron": 6440, + "Ġscientific": 6441, + "ĠIP": 6442, + "Ġdiagnosed": 6443, + "Ġocean": 6444, + "wide": 6445, + "ĠCowboys": 6446, + "LC": 6447, + "Ġmethods": 6448, + "ĠFind": 6449, + "ĠDean": 6450, + "Ġfundamental": 6451, + "ĠGill": 6452, + "Ġfeelings": 6453, + "IO": 6454, + "hu": 6455, + "Ġfeedback": 6456, + "ote": 6457, + "Ġduo": 6458, + "fully": 6459, + "get": 6460, + "Ġproof": 6461, + "story": 6462, + "Ġlongest": 6463, + "Ġshops": 6464, + "ĠJong": 6465, + "ĠCro": 6466, + "ĠHawaii": 6467, + "91": 6468, + "ĠJake": 6469, + "ĠSusan": 6470, + "Ġsubmit": 6471, + "rav": 6472, + "Ġmodest": 6473, + "Ġlit": 6474, + "Ġattempting": 6475, + "Ġsits": 6476, + "Ġaddressing": 6477, + "93": 6478, + "ĠBi": 6479, + "Ġlying": 6480, + "ĠOrganization": 6481, + "ĠOak": 6482, + "oli": 6483, + "Ġfatal": 6484, + "Ġmountain": 6485, + "val": 6486, + "lu": 6487, + "ĠMaine": 6488, + "Ġcharging": 6489, + "Ġresigned": 6490, + "illo": 6491, + "Ġrecommendation": 6492, + "party": 6493, + "ĠWeb": 6494, + "ĠPanthers": 6495, + "Ġnoise": 6496, + "ĠBrussels": 6497, + "awa": 6498, + "Ġambassador": 6499, + "Ġaccessible": 6500, + "ĠCalgary": 6501, + "idd": 6502, + "ĠAirlines": 6503, + "gr": 6504, + "Ġnu": 6505, + "roy": 6506, + "ĠMars": 6507, + "ĠPoland": 6508, + "ĠJerry": 6509, + "ados": 6510, + "ĠRico": 6511, + "ĠMir": 6512, + "ĠFin": 6513, + "ious": 6514, + "Ġpacked": 6515, + "Ġinsider": 6516, + "President": 6517, + "ĠBull": 6518, + "ĠYemen": 6519, + "ĠConnecticut": 6520, + "Ġ73": 6521, + "Ġdepartments": 6522, + "Ġorganic": 6523, + "ĠSummer": 6524, + "ĠBet": 6525, + "ste": 6526, + "zo": 6527, + "rat": 6528, + "Ġalliance": 6529, + "Ġintervention": 6530, + "wan": 6531, + "ĠOR": 6532, + "Ġdefined": 6533, + "ĠÃł": 6534, + "ĠChiefs": 6535, + "Ġknocked": 6536, + "ared": 6537, + "Ġholes": 6538, + "Ġpulling": 6539, + "ĠTodd": 6540, + "ĠJamie": 6541, + "ĠSher": 6542, + "Ġsignature": 6543, + "ĠSur": 6544, + "Ġgym": 6545, + "ĠVladimir": 6546, + "ĠThailand": 6547, + "Ġgaming": 6548, + "Ġsaving": 6549, + "ceive": 6550, + "82": 6551, + "ĠBern": 6552, + "ĠDid": 6553, + "Ġhardware": 6554, + "ished": 6555, + "Ġconspiracy": 6556, + "ANS": 6557, + "ĠIntelligence": 6558, + "Ġassembly": 6559, + "Ġ101": 6560, + "Ġconcise": 6561, + "ĠManhattan": 6562, + "Ġbelief": 6563, + "Ġsurge": 6564, + "Ġdeserve": 6565, + "Ġconsistently": 6566, + "ĠNor": 6567, + "okes": 6568, + "ðŁ": 6569, + "ME": 6570, + "ĠAsset": 6571, + "Ġsubstance": 6572, + "Ġprefer": 6573, + "Ġburning": 6574, + "ĠNik": 6575, + "ook": 6576, + "ĠPinterest": 6577, + "Ġboyfriend": 6578, + "ĠHal": 6579, + "ĠMerkel": 6580, + "Ġintroduce": 6581, + "ĠLinkedIn": 6582, + "ĠFull": 6583, + "ĠFarm": 6584, + "Ġchildhood": 6585, + "ĠTransportation": 6586, + "Ġterrible": 6587, + "du": 6588, + "Ġintention": 6589, + "Ġseemingly": 6590, + "elle": 6591, + "Ġfoods": 6592, + "Ġtitled": 6593, + "Ġdual": 6594, + "Ġimport": 6595, + "Ġdeveloper": 6596, + "UL": 6597, + "ington": 6598, + "ĠDelta": 6599, + "?'": 6600, + "iness": 6601, + "Ġquit": 6602, + "ĠGarcia": 6603, + "ĠSri": 6604, + "Ġhip": 6605, + "ĠBrazilian": 6606, + "elt": 6607, + "ively": 6608, + "Ġstructures": 6609, + "Ġlabour": 6610, + "Ġneighbors": 6611, + "Ġtill": 6612, + "Ġsoil": 6613, + "Ġdropping": 6614, + "Ġnominee": 6615, + "Ġmeets": 6616, + "92": 6617, + "rant": 6618, + "isa": 6619, + "Ġluck": 6620, + "aa": 6621, + "jet": 6622, + "ĠTor": 6623, + "ĠCrime": 6624, + "Ġlane": 6625, + "Ġflu": 6626, + "Ġlaunching": 6627, + "ĠAutom": 6628, + "aks": 6629, + "Ġuniversities": 6630, + "Ġpollution": 6631, + "ĠAdvis": 6632, + "ĠMall": 6633, + "ls": 6634, + "Ġdeeper": 6635, + "Ġrepeated": 6636, + "Ġmeanwhile": 6637, + "Ġchip": 6638, + "Ġoutlets": 6639, + "Ġliked": 6640, + "Ġsal": 6641, + "Ġwelfare": 6642, + "ago": 6643, + "Ġmakers": 6644, + "ving": 6645, + "fer": 6646, + "Ġovercome": 6647, + "mb": 6648, + "Ġshocked": 6649, + "akers": 6650, + "Ġnonprofit": 6651, + "Ġdonated": 6652, + "eral": 6653, + "Ġresume": 6654, + "Ġlogo": 6655, + "Ġsubscription": 6656, + "Ġ74": 6657, + "ela": 6658, + "Ġaspect": 6659, + "html": 6660, + "Ġsorry": 6661, + "Ġupgrade": 6662, + "Ġstance": 6663, + "Ġfr": 6664, + "Ġpapers": 6665, + "Ġattacking": 6666, + "Ġmeaningful": 6667, + "81": 6668, + "ĠWeinstein": 6669, + "Ġcreates": 6670, + "Ġhonour": 6671, + "ĠReply": 6672, + "oph": 6673, + "Ġmarch": 6674, + "Ġsmile": 6675, + "Ġcomparison": 6676, + "will": 6677, + "ĠSanchez": 6678, + "Ġvoter": 6679, + "Ġtheory": 6680, + "Ġequally": 6681, + "ĠRoger": 6682, + "Ġperfectly": 6683, + "Ġlanding": 6684, + "Ġbillions": 6685, + "ĠBloomberg": 6686, + "Ġpermit": 6687, + "Ġfinals": 6688, + "Ġracial": 6689, + "Ġpregnancy": 6690, + "iled": 6691, + "ĠFederation": 6692, + "Ġforest": 6693, + "Ġtag": 6694, + "aul": 6695, + "Ġdrinks": 6696, + "Ġ(\"": 6697, + "ĠMobile": 6698, + "Ġtouched": 6699, + "Ġclock": 6700, + "Ġreg": 6701, + "Ġasylum": 6702, + "igan": 6703, + "Ġsenator": 6704, + "Ġ99": 6705, + "ĠKumar": 6706, + "Ġskill": 6707, + "Ġ1998": 6708, + "pa": 6709, + "ĠAf": 6710, + "Ġmood": 6711, + "ston": 6712, + "Ġhang": 6713, + "ĠMPs": 6714, + "Please": 6715, + "ĠEve": 6716, + "Ġdocumentary": 6717, + "Ġpersonality": 6718, + "ĠCast": 6719, + "Ġdiscount": 6720, + "bing": 6721, + "ĠBoeing": 6722, + "Ġdepend": 6723, + "Ġcrossing": 6724, + "EX": 6725, + "Ġsucceed": 6726, + "Ġhumanitarian": 6727, + "ĠMuhammad": 6728, + "Ġwages": 6729, + "Ġcolumn": 6730, + "Ġexternal": 6731, + "Ġstatistics": 6732, + "ĠTODAY": 6733, + "Ġtrips": 6734, + "Ġta": 6735, + "Ġpenalties": 6736, + "Ġwriters": 6737, + "Ġshipping": 6738, + "ĠIndians": 6739, + "Ġsalt": 6740, + "ĠIndustrial": 6741, + "ĠYankees": 6742, + "ĠDen": 6743, + "Ġrough": 6744, + "Ġbarrels": 6745, + "ĠHor": 6746, + "bert": 6747, + "ĠDep": 6748, + "Ġresign": 6749, + "97": 6750, + "Ġballs": 6751, + "ĠJun": 6752, + "ĠBab": 6753, + "Ġassociate": 6754, + "Ġstring": 6755, + "Ġhub": 6756, + "Ġorgan": 6757, + "ĠMarshall": 6758, + "ĠFIFA": 6759, + "ĠMun": 6760, + "ency": 6761, + "research": 6762, + "Ġpeers": 6763, + "Ġtall": 6764, + "ĠGoldman": 6765, + "Don": 6766, + "Ġparade": 6767, + "Ġparks": 6768, + "Ġdet": 6769, + "Ġdisappointing": 6770, + "Ġreflects": 6771, + "ĠLakers": 6772, + "Ġfiles": 6773, + "Ġrelatives": 6774, + "ĠUSD": 6775, + "ĠArticle": 6776, + "Ġcustom": 6777, + "ĠCarlos": 6778, + "Ġtracking": 6779, + "Ġmaintaining": 6780, + "ĠCur": 6781, + "ardo": 6782, + "ĠSkip": 6783, + "Ġattitude": 6784, + "Just": 6785, + "Ġinstitution": 6786, + "Ġnarrow": 6787, + "Ġsnap": 6788, + "Ġenterprise": 6789, + "Ġdrives": 6790, + "Ġ77": 6791, + "Ġcrop": 6792, + "Ġvirus": 6793, + "Ġcelebrity": 6794, + "Ġeconomies": 6795, + "ued": 6796, + "Ġsum": 6797, + "ĠDubai": 6798, + "ĠInsurance": 6799, + "Ĺ": 6800, + "ury": 6801, + "ĠUnfortunately": 6802, + "Ġclosure": 6803, + "ota": 6804, + "ĠPhilip": 6805, + "oms": 6806, + "Ġinvestigated": 6807, + "Ġgenerations": 6808, + "ĠETF": 6809, + "ĠKeith": 6810, + "ĠLater": 6811, + "isk": 6812, + "Ġpreferred": 6813, + "Ġdefault": 6814, + "Ġtowns": 6815, + "ĠRod": 6816, + "ĠDie": 6817, + "Ġintegrated": 6818, + "Ġacquiring": 6819, + "Ġvoices": 6820, + "Ġser": 6821, + "Ġpresents": 6822, + "ĠBR": 6823, + "ĠEmergency": 6824, + "Ġreligion": 6825, + "HA": 6826, + "Ġresponding": 6827, + "ĠThings": 6828, + "Ġbeef": 6829, + "ĠWithout": 6830, + "urd": 6831, + "ĠCarl": 6832, + "Ġadministrative": 6833, + "ĠWhich": 6834, + "Ġchallenged": 6835, + "Ġcooking": 6836, + "ivid": 6837, + "ĠFer": 6838, + "Ġtremendous": 6839, + "ĠTerry": 6840, + "iri": 6841, + "CS": 6842, + "ĠJunior": 6843, + "ĠReddit": 6844, + "Ġtea": 6845, + "Ġaccounting": 6846, + "lan": 6847, + "Ġdetention": 6848, + "Ġreplied": 6849, + "SI": 6850, + "ĠHel": 6851, + "ns": 6852, + "ĠProf": 6853, + "Ġramp": 6854, + "ĠConservative": 6855, + "Ġattendance": 6856, + "Ġspecialist": 6857, + "ĠFinal": 6858, + "Ġadvertisement": 6859, + "Ġacquire": 6860, + "ĠWhatsApp": 6861, + "Ġworkforce": 6862, + "ĠCalif": 6863, + "Ġspeakers": 6864, + "ĠEPA": 6865, + "Ġconviction": 6866, + "hire": 6867, + "ĠFisher": 6868, + "ĠIntel": 6869, + "Ġbin": 6870, + "ĠWas": 6871, + "Ġearth": 6872, + "vi": 6873, + "Ġhurricane": 6874, + "Ġholidays": 6875, + "Ġassume": 6876, + "Ġinvolve": 6877, + "Ġdynamic": 6878, + "ĠGre": 6879, + "Ġitem": 6880, + "Ġpound": 6881, + "Ġanxiety": 6882, + "ĠPrint": 6883, + "rop": 6884, + "Ġautomatically": 6885, + "Ġdiscrimination": 6886, + "ĠLam": 6887, + "ĠColl": 6888, + "Ġimpressed": 6889, + "Ġinvolves": 6890, + "ĠLes": 6891, + "ĠTri": 6892, + "ĠLook": 6893, + "ĠiOS": 6894, + "Ġgrab": 6895, + "ĠAngel": 6896, + "Ġstops": 6897, + "ĠPay": 6898, + "ĠECB": 6899, + "Ġbunch": 6900, + "Ġletting": 6901, + "ele": 6902, + "ĠAdditionally": 6903, + "Ġboards": 6904, + "NC": 6905, + "Ġtragedy": 6906, + "Ġpink": 6907, + "Ġgonna": 6908, + "ones": 6909, + "Ġrev": 6910, + "ĠIndependent": 6911, + "ĠCambridge": 6912, + "ĠPence": 6913, + "Ġprosecution": 6914, + "Ġdeputies": 6915, + "ĠAhmed": 6916, + "Ġlows": 6917, + "ĠAmy": 6918, + "ĠBuilding": 6919, + "mark": 6920, + "Ġsmooth": 6921, + "Ġsole": 6922, + "Ġwanting": 6923, + "ĠHeart": 6924, + "Ġobtain": 6925, + "ĠBus": 6926, + "Ġexchanges": 6927, + "friendly": 6928, + "Ġlabel": 6929, + "elect": 6930, + "ĠCompanies": 6931, + "owing": 6932, + "ĠCB": 6933, + "RI": 6934, + "ĠMaster": 6935, + "Ġliquid": 6936, + "ĠDanny": 6937, + "Ġproceeds": 6938, + "ĠLaura": 6939, + "card": 6940, + "Ġtears": 6941, + "Ġexploration": 6942, + "Ġdepression": 6943, + "ken": 6944, + "ĠFe": 6945, + "Ġlending": 6946, + "ĠYouth": 6947, + "ality": 6948, + "NS": 6949, + "Ġmoon": 6950, + "ĠTaiwan": 6951, + "Ġstruggles": 6952, + "Ġdiscovery": 6953, + "Ġqualify": 6954, + "Ġwireless": 6955, + "alia": 6956, + "Ġwitnessed": 6957, + "Ġheight": 6958, + "ĠGuy": 6959, + "left": 6960, + "KE": 6961, + "Ġfoul": 6962, + "ĠMohammed": 6963, + "Ġgrass": 6964, + "ĠNon": 6965, + "Ġswim": 6966, + "Ġbrilliant": 6967, + "you": 6968, + "ĠFlynn": 6969, + "Ġsinging": 6970, + "eria": 6971, + "UT": 6972, + "ĠMcCain": 6973, + "ĠSep": 6974, + "ĠWars": 6975, + "Ġburden": 6976, + "Ġpas": 6977, + "Ġabandoned": 6978, + "Ġint": 6979, + "ĠTurner": 6980, + "Ġcollective": 6981, + "ĠEnvironmental": 6982, + "ĠStudents": 6983, + "Ġofferings": 6984, + "Ġresignation": 6985, + "Ġexplosion": 6986, + "ĠKoh": 6987, + "ager": 6988, + "Ġthrows": 6989, + "Ġasks": 6990, + "light": 6991, + "Ġanyway": 6992, + "Ġyard": 6993, + "Ġcarrier": 6994, + "Ġwaves": 6995, + "backed": 6996, + "TR": 6997, + "oud": 6998, + "Ġbreach": 6999, + "Ġdated": 7000, + "Ġdressed": 7001, + "ĠDodgers": 7002, + "oles": 7003, + "Ġ78": 7004, + "Ġreads": 7005, + "Ġpredict": 7006, + "ĠJerusalem": 7007, + "ĠPT": 7008, + "Ġcrack": 7009, + "yan": 7010, + "Ġnights": 7011, + "eline": 7012, + "Ġconvinced": 7013, + "Ġlock": 7014, + "Ġcarefully": 7015, + "ĠMercedes": 7016, + "Ġultimate": 7017, + "Ġdist": 7018, + "Ġslight": 7019, + "ĠEdwards": 7020, + "Ġswing": 7021, + "iling": 7022, + "Ġknife": 7023, + "ĠNashville": 7024, + "IF": 7025, + "inder": 7026, + "udd": 7027, + "Ġsenators": 7028, + "ĠFurther": 7029, + "ĠXi": 7030, + "Ġstr": 7031, + "ĠOd": 7032, + "days": 7033, + "Ġcomm": 7034, + "Ġverdict": 7035, + "Ġconfirmation": 7036, + "king": 7037, + "ĠCS": 7038, + "Ġadvocates": 7039, + "Ġpride": 7040, + "Ġmemorial": 7041, + "ams": 7042, + "erman": 7043, + "Ġteenager": 7044, + "ĠNeil": 7045, + "uts": 7046, + "Ġsoul": 7047, + "see": 7048, + "post": 7049, + "Ġchest": 7050, + "fire": 7051, + "ĠLynch": 7052, + "Ġpeaceful": 7053, + "OND": 7054, + "ĠIndustries": 7055, + "ĠJuan": 7056, + "Ġrestore": 7057, + "Ġreliable": 7058, + "ming": 7059, + "agan": 7060, + "Source": 7061, + "ĠCabinet": 7062, + "Ġremarkable": 7063, + "ĠTrudeau": 7064, + "ĠEs": 7065, + "Ġintegrity": 7066, + "ove": 7067, + "fe": 7068, + "Ġproceedings": 7069, + "Ġconnections": 7070, + "Ġunprecedented": 7071, + "ĠGlen": 7072, + "ux": 7073, + "Ġearning": 7074, + "Ġingredients": 7075, + "Ġnominated": 7076, + "ĠBangladesh": 7077, + "made": 7078, + "Ġlessons": 7079, + "Ġbreakfast": 7080, + "ĠRelations": 7081, + "Ġloose": 7082, + "Al": 7083, + "Ġupgraded": 7084, + "ral": 7085, + "ĠPage": 7086, + "oto": 7087, + "ĠQueensland": 7088, + "Ġprocedure": 7089, + "ĠSmall": 7090, + "Ġrespective": 7091, + "Ġpictured": 7092, + "ĠBas": 7093, + "Ġpreparation": 7094, + "ĠMyanmar": 7095, + "Ġdonation": 7096, + "Ġvisible": 7097, + "iest": 7098, + "ĠBroadway": 7099, + "rick": 7100, + "ĠSchools": 7101, + "Ġarrests": 7102, + "ĠJessica": 7103, + "ĠBengal": 7104, + "Ġhell": 7105, + "Ġannouncing": 7106, + "Ġmail": 7107, + "ĠMcG": 7108, + "two": 7109, + "rest": 7110, + "OD": 7111, + "ĠBradley": 7112, + "Ġdoubled": 7113, + "Ġpledged": 7114, + "Ġcomeback": 7115, + "Ġextraordinary": 7116, + "Ġslide": 7117, + "Ġassess": 7118, + "Ġagricultural": 7119, + "ĠKay": 7120, + "Ġvendors": 7121, + "Ġnarrative": 7122, + "Ġreviewed": 7123, + "ĠPass": 7124, + "Ġinspiration": 7125, + "ĠHunter": 7126, + "Ġcalendar": 7127, + "ĠDiamond": 7128, + "Ġremoval": 7129, + "ners": 7130, + "ĠKap": 7131, + "Ġconsent": 7132, + "Ġvisual": 7133, + "Ġcheese": 7134, + "ĠTher": 7135, + "ĠFR": 7136, + "ĠShanghai": 7137, + "iah": 7138, + "ĠCole": 7139, + "AK": 7140, + "Ġranking": 7141, + "Ġcook": 7142, + "Ġhalftime": 7143, + "ĠStars": 7144, + "Ġroutes": 7145, + "aim": 7146, + "Ġestablishment": 7147, + "ĠMug": 7148, + "Ġsurvivors": 7149, + "urg": 7150, + "ĠBrett": 7151, + "Ġunexpected": 7152, + "ained": 7153, + "Ġrarely": 7154, + "ĠGall": 7155, + "Ġadvocate": 7156, + "ĠNad": 7157, + "Ġ911": 7158, + "Ġracist": 7159, + "erer": 7160, + "ĠRev": 7161, + "ĠSection": 7162, + "Ġhelpful": 7163, + "CT": 7164, + "agg": 7165, + "Ġgovernance": 7166, + "Ġfelony": 7167, + "Ġoptimistic": 7168, + "Ġelectoral": 7169, + "EG": 7170, + "town": 7171, + "Ġdaughters": 7172, + "Ġanswered": 7173, + "Ġthin": 7174, + "ĠClassic": 7175, + "Ġshareholder": 7176, + "ĠBlake": 7177, + "ĠFla": 7178, + "Ġparliamentary": 7179, + "dy": 7180, + "Ġcommented": 7181, + "Ġtri": 7182, + "Ġglobe": 7183, + "Ġmandate": 7184, + "Ġslipped": 7185, + "ĠTower": 7186, + "Ġoperated": 7187, + "gers": 7188, + "Ġassured": 7189, + "ĠMartinez": 7190, + "Ġdesigns": 7191, + "ĠModel": 7192, + "Ġstakeholders": 7193, + "Ġdefended": 7194, + "Ġseniors": 7195, + "Ġvacation": 7196, + "Ġglobally": 7197, + "ump": 7198, + "Not": 7199, + "Ġclip": 7200, + "Ġarticles": 7201, + "BR": 7202, + "km": 7203, + "ĠFront": 7204, + "PL": 7205, + "Ġadoption": 7206, + "Ġsudden": 7207, + "Ġframework": 7208, + "Ġhanging": 7209, + "gl": 7210, + "ĠSel": 7211, + "Ġmoderate": 7212, + "Ġreverse": 7213, + "income": 7214, + "cor": 7215, + "ĠGB": 7216, + "Ġphysically": 7217, + "Ġtransparency": 7218, + "ĠElectric": 7219, + "Ġrefugee": 7220, + "profile": 7221, + "iva": 7222, + "ately": 7223, + "ĠAC": 7224, + "Ġtransferred": 7225, + "Ġaffair": 7226, + "ĠAlaska": 7227, + "oria": 7228, + "ĠChange": 7229, + "Ġrepeat": 7230, + "Ġscreening": 7231, + "ender": 7232, + "ĠCas": 7233, + "ĠDav": 7234, + "Ġfocuses": 7235, + "Ġcommissioner": 7236, + "Ġupside": 7237, + "ĠKeep": 7238, + "ĠBlues": 7239, + "ently": 7240, + "Ġaut": 7241, + "Ġexperiencing": 7242, + "aman": 7243, + "Ġapprove": 7244, + "Ġmile": 7245, + "Ġcheaper": 7246, + "ĠWind": 7247, + "ĠStore": 7248, + "Ġgrabbed": 7249, + "Ġsons": 7250, + "Ġfighter": 7251, + "Ġum": 7252, + "ĠBased": 7253, + "don": 7254, + "Ġconstitution": 7255, + "finals": 7256, + "act": 7257, + "¢": 7258, + "Ġmill": 7259, + "Ġorganisations": 7260, + "ĠToyota": 7261, + "Ġyuan": 7262, + "Ġterrorists": 7263, + "Ġforth": 7264, + "Ġavailability": 7265, + "Ġentrance": 7266, + "Ġvolumes": 7267, + "Ġmult": 7268, + "plus": 7269, + "ĠColumbus": 7270, + "ĠSummit": 7271, + "Ġbabies": 7272, + "ĠMur": 7273, + "ĠGray": 7274, + "ĠChar": 7275, + "ĠButler": 7276, + "Ġpose": 7277, + "ĠNatural": 7278, + "ĠAtt": 7279, + "Ġdecrease": 7280, + "Ġtens": 7281, + "kt": 7282, + "Ġminds": 7283, + "Ġimpacted": 7284, + "Ġchapter": 7285, + "ĠOp": 7286, + "ĠHarrison": 7287, + "ĠRodriguez": 7288, + "Ġethnic": 7289, + "Ġtravelling": 7290, + "ĠBond": 7291, + "ader": 7292, + "core": 7293, + "Ġgallery": 7294, + "founder": 7295, + "ĠVill": 7296, + "Ġdecent": 7297, + "ĠHistory": 7298, + "ĠInt": 7299, + "ĠNa": 7300, + "ĠHad": 7301, + "Ġmainstream": 7302, + "ĠTs": 7303, + "Ġbottle": 7304, + "sen": 7305, + "Ġrecession": 7306, + "Ġsophomore": 7307, + "Ġsilence": 7308, + "cc": 7309, + "Ġqualifying": 7310, + "Ġcomplained": 7311, + "ĠRad": 7312, + "Ġactively": 7313, + "Ġbacks": 7314, + "ĠMusk": 7315, + "Ġcareful": 7316, + "Ġmeals": 7317, + "ĠDor": 7318, + "Ġmess": 7319, + "ĠBelgium": 7320, + "Ġke": 7321, + "ĠLopez": 7322, + "Ġbow": 7323, + "Ġhelicopter": 7324, + "was": 7325, + "Ġstone": 7326, + "kins": 7327, + "Ġunlike": 7328, + "Ġcollision": 7329, + "ĠAlt": 7330, + "HP": 7331, + "ĠMason": 7332, + "has": 7333, + "Ġclimbed": 7334, + "Ġindication": 7335, + "Ġhotels": 7336, + "Ġloud": 7337, + "ĠMilan": 7338, + "kes": 7339, + "Ġbadly": 7340, + "Ġtrials": 7341, + "Ġimpacts": 7342, + "ĠJane": 7343, + "Ġcrossed": 7344, + "Ġdiscussing": 7345, + "ĠSM": 7346, + "Ġpopularity": 7347, + "ĠWant": 7348, + "fall": 7349, + "Ġartificial": 7350, + "ĠBu": 7351, + "akh": 7352, + "Ġdominant": 7353, + "gov": 7354, + "Ġpremier": 7355, + "Ġexecution": 7356, + "gate": 7357, + "Ġswimming": 7358, + "Ġchat": 7359, + "Ġdevastating": 7360, + "acking": 7361, + "Ġreception": 7362, + "urt": 7363, + "Ġtheater": 7364, + "Ġgather": 7365, + "Ġtear": 7366, + "uro": 7367, + "Ġdemocratic": 7368, + "Ġrebels": 7369, + "Ġlifetime": 7370, + "Ġradical": 7371, + "uan": 7372, + "Ġtechniques": 7373, + "ache": 7374, + "ior": 7375, + "Ġcamps": 7376, + "Ġtelephone": 7377, + "ĠDublin": 7378, + "ĠBrand": 7379, + "ĠMarcus": 7380, + "aun": 7381, + "ĠRec": 7382, + "Ġ82": 7383, + "ban": 7384, + "Ġsafely": 7385, + "aku": 7386, + "aki": 7387, + "Ġbankruptcy": 7388, + "FF": 7389, + "Ġformat": 7390, + "Ġattached": 7391, + "ĠFame": 7392, + "ĠEdward": 7393, + "Ġmerger": 7394, + "ĠRepresentatives": 7395, + "izes": 7396, + "Ġhidden": 7397, + "Ġval": 7398, + "zz": 7399, + "Ġexcess": 7400, + "Ġscope": 7401, + "Ġdivorce": 7402, + "Ġburn": 7403, + "Ġrequirement": 7404, + "BB": 7405, + "ĠHand": 7406, + "Ġcons": 7407, + "Ġrisen": 7408, + "Ġtwitter": 7409, + "Ġoffseason": 7410, + "ĠSometimes": 7411, + "ĠInf": 7412, + "ĠAng": 7413, + "uer": 7414, + "report": 7415, + "Ġdreams": 7416, + "Ġ700": 7417, + "ips": 7418, + "ĠDream": 7419, + "Ġgifts": 7420, + "Ġsomehow": 7421, + "ĠTur": 7422, + "ĠRachel": 7423, + "can": 7424, + "Ġlog": 7425, + "ĠMedicaid": 7426, + "Ġles": 7427, + "Ġtired": 7428, + "ĠArkansas": 7429, + "Ġliquidity": 7430, + "ĠPhillips": 7431, + "ĠBTC": 7432, + "Ġhide": 7433, + "Ġpun": 7434, + "ĠRun": 7435, + "lyn": 7436, + "ĠUC": 7437, + "ĠDesign": 7438, + "ĠDev": 7439, + "Ġvaluation": 7440, + "Ġreveals": 7441, + "ĠChild": 7442, + "other": 7443, + "Ġposed": 7444, + "lee": 7445, + "Ġships": 7446, + "ĠTrue": 7447, + "Ġdescribes": 7448, + "Ġrunner": 7449, + "bro": 7450, + "Ġankle": 7451, + "Ġod": 7452, + "ĠAnnual": 7453, + "CL": 7454, + "Ġoverhaul": 7455, + "ned": 7456, + "Ġbold": 7457, + "Ġmo": 7458, + "ĠFalls": 7459, + "Ġemployed": 7460, + "ĠGro": 7461, + "Ġflash": 7462, + "ĠTD": 7463, + "Ġnervous": 7464, + "Ġintegration": 7465, + "Ġsmartphones": 7466, + "Ġmovements": 7467, + "nie": 7468, + "ition": 7469, + "ĠThird": 7470, + "Ģ": 7471, + "Ġmetres": 7472, + "Ġeconomist": 7473, + "omp": 7474, + "Ġteens": 7475, + "Ġeveryday": 7476, + "Ġinterviewed": 7477, + "Ġbriefly": 7478, + "],": 7479, + "uke": 7480, + "ĠFOX": 7481, + "Ġunderlying": 7482, + "ĠLuc": 7483, + "Ġcourses": 7484, + "ss": 7485, + "amed": 7486, + "°": 7487, + "ju": 7488, + "ĠBanks": 7489, + "Ġoutfit": 7490, + "illing": 7491, + "Ġtrafficking": 7492, + "Ġurging": 7493, + "Ġbelt": 7494, + "Ġrid": 7495, + "CP": 7496, + "Ġelderly": 7497, + "ĠGrowth": 7498, + "án": 7499, + "ĠSn": 7500, + "Ġsurrounded": 7501, + "Ġsisters": 7502, + "ĠIslam": 7503, + "Ġsynd": 7504, + "ĠCosta": 7505, + "di": 7506, + "ĠKl": 7507, + "Ġmanufacturer": 7508, + "holders": 7509, + "Ġelement": 7510, + "Ġload": 7511, + "Ġbooked": 7512, + "Ġaccompanied": 7513, + "ĠChamber": 7514, + "Ġbriefing": 7515, + "Oh": 7516, + "imi": 7517, + "ĠDefence": 7518, + "ĠCurrently": 7519, + "aking": 7520, + "Ġhandled": 7521, + "ĠCD": 7522, + "ĠBenjamin": 7523, + "Ġpocket": 7524, + "ĠKashmir": 7525, + "Ġlighting": 7526, + "aps": 7527, + "Ġ1997": 7528, + "ech": 7529, + "Ġaddiction": 7530, + "Ġbases": 7531, + "Ġpriorities": 7532, + "Ġhardly": 7533, + "ĠQuebec": 7534, + "ĠEarn": 7535, + "IES": 7536, + "ĠZach": 7537, + "ĠAlong": 7538, + "MI": 7539, + "Ġins": 7540, + "ĠRogers": 7541, + "ĠKan": 7542, + "ĠFuture": 7543, + "Ġtriggered": 7544, + "ĠUnit": 7545, + "Ġweighed": 7546, + "Ġpointing": 7547, + "Ġchocolate": 7548, + "ĠBrowns": 7549, + "ĠISIS": 7550, + "Ġgoalkeeper": 7551, + "Ġsaves": 7552, + "ĠAndre": 7553, + "burn": 7554, + "ĠCont": 7555, + "ĠNetherlands": 7556, + "Ġpolitically": 7557, + "ĠAshley": 7558, + "ĠWhit": 7559, + "aded": 7560, + "PH": 7561, + "Ġborders": 7562, + "ORE": 7563, + "Ġally": 7564, + "Trump": 7565, + "istan": 7566, + "ĠHunt": 7567, + "ĠCancer": 7568, + "ĠGrace": 7569, + "ĠTottenham": 7570, + "Ġ1960": 7571, + "ĠMarg": 7572, + "ĠBryan": 7573, + "ĠAgain": 7574, + "acing": 7575, + "Ġarguments": 7576, + "ĠSouthwest": 7577, + "Ġvocal": 7578, + "Ġjudgment": 7579, + "Ġengaging": 7580, + "Ġadopt": 7581, + "Ġrental": 7582, + "Ġlinebacker": 7583, + "ĠKardashian": 7584, + "Ġepisodes": 7585, + "..": 7586, + "Ġunt": 7587, + "Ġvowed": 7588, + "Ġ79": 7589, + "ule": 7590, + "Ġtransit": 7591, + "Ġoffshore": 7592, + "Ġsuppliers": 7593, + "Ġarguing": 7594, + "Ġsatellite": 7595, + "ĠLind": 7596, + "ĠTaliban": 7597, + "Buy": 7598, + "ĠCaribbean": 7599, + "ĠBarry": 7600, + "Ġauthors": 7601, + "ĠWolf": 7602, + "Ġviewing": 7603, + "ĠCubs": 7604, + "From": 7605, + "Ġ%": 7606, + "Ġcurrencies": 7607, + "Why": 7608, + "ĠBroncos": 7609, + "Ġtrick": 7610, + "Ġdiesel": 7611, + "ĠLiberal": 7612, + "FL": 7613, + "Ġtopics": 7614, + "Ġretain": 7615, + "ĠLiberty": 7616, + "Ġacquisitions": 7617, + "ced": 7618, + "Ġfre": 7619, + "Ġfleet": 7620, + "Ġcopper": 7621, + "ĠPot": 7622, + "jen": 7623, + "ĠElliott": 7624, + "ĠPyongyang": 7625, + "Ġobject": 7626, + "ĠUse": 7627, + "Ġmutual": 7628, + "MP": 7629, + "Ġev": 7630, + "Ġdeny": 7631, + "ĠEveryone": 7632, + "lling": 7633, + "Ġpays": 7634, + "Ġdrought": 7635, + "Ġcorn": 7636, + "Ġworkplace": 7637, + "rig": 7638, + "ĠMn": 7639, + "Ġadvisory": 7640, + "ĠCat": 7641, + "Ġchronic": 7642, + "ĠSteelers": 7643, + "Ġboxes": 7644, + "ĠNap": 7645, + "Ġdemonstrated": 7646, + "ĠTournament": 7647, + "Ġsymbol": 7648, + "ĠAfghan": 7649, + "ĠTan": 7650, + "ired": 7651, + "ĠEv": 7652, + "ĠConsumer": 7653, + "Ġmoral": 7654, + "ĠAdditional": 7655, + "Ġwebsites": 7656, + "Ġoccasions": 7657, + "Ġfate": 7658, + "Ġpitcher": 7659, + "Ġtaxpayers": 7660, + "Ġdeemed": 7661, + "ĠLibya": 7662, + "Ġpriced": 7663, + "Ġdistributed": 7664, + "ĠForum": 7665, + "Ġrice": 7666, + "Ġbloc": 7667, + "Ġprovisions": 7668, + "agh": 7669, + "Ġpen": 7670, + "Ġattracted": 7671, + "ĠEdmonton": 7672, + "Ġthousand": 7673, + "Ġpainting": 7674, + "Ġil": 7675, + "Ġcourtesy": 7676, + "Ġeliminate": 7677, + "Ġacc": 7678, + "Ġmeters": 7679, + "Ġreflected": 7680, + "Ġcomponent": 7681, + "Every": 7682, + "Ġsells": 7683, + "Ġfault": 7684, + "Ġburned": 7685, + "ĠKirk": 7686, + "ĠAnna": 7687, + "Ġappeals": 7688, + "Ġeggs": 7689, + "Ġfrequent": 7690, + "Ġtrigger": 7691, + "Ġrevised": 7692, + "ĠAngela": 7693, + "Ġ81": 7694, + "Ġsingles": 7695, + "Ġviral": 7696, + "Ġworries": 7697, + "ĠShould": 7698, + "profit": 7699, + "Ġraises": 7700, + "ĠBryant": 7701, + "ĠProduct": 7702, + "Ġtenure": 7703, + "Ġdiabetes": 7704, + "Ġcolour": 7705, + "azz": 7706, + "ĠGirls": 7707, + "Ġpractical": 7708, + "Ġblind": 7709, + "ancing": 7710, + "pictured": 7711, + "Ġfinale": 7712, + "ĠElection": 7713, + "Ġathletic": 7714, + "Ġpromoted": 7715, + "Ġflowers": 7716, + "Ġtrains": 7717, + "ario": 7718, + "Ġsufficient": 7719, + "IE": 7720, + "Ġexamples": 7721, + "Ġshed": 7722, + "Ġbirds": 7723, + "Ġchaos": 7724, + "Ġwound": 7725, + "Ġrocket": 7726, + "Ġwet": 7727, + "Ġsample": 7728, + "ĠNag": 7729, + "ĠOliver": 7730, + "Ġscrutiny": 7731, + "ĠSeven": 7732, + "ĠRoman": 7733, + "ĠFred": 7734, + "Ġweird": 7735, + "ĠTam": 7736, + "ĠSupport": 7737, + "ĠNathan": 7738, + "Ġstudying": 7739, + "Ġintroduction": 7740, + "Ġtons": 7741, + "cer": 7742, + "aus": 7743, + "ION": 7744, + "Ġcritic": 7745, + "ĠAh": 7746, + "alo": 7747, + "pur": 7748, + "Ġstorms": 7749, + "ĠMission": 7750, + "Ġcredits": 7751, + "Ġgrants": 7752, + "Ġcomp": 7753, + "Ġhearts": 7754, + "part": 7755, + "Ġpin": 7756, + "Ġsubsequent": 7757, + "Ġmad": 7758, + "ĠSacramento": 7759, + "woman": 7760, + "from": 7761, + "Ġoutcomes": 7762, + "Ġoldest": 7763, + "Ġdesperate": 7764, + "ĠTal": 7765, + "ĠDJ": 7766, + "ward": 7767, + "Ġaudiences": 7768, + "Ġimportantly": 7769, + "ĠEmily": 7770, + "sk": 7771, + "ĠHeat": 7772, + "ĠType": 7773, + "ĠPeace": 7774, + "Ġsuspicious": 7775, + "aly": 7776, + "ĠGET": 7777, + "ĠCAP": 7778, + "dis": 7779, + "ĠIraqi": 7780, + "ĠReed": 7781, + "Ġstrange": 7782, + "ĠParent": 7783, + "900": 7784, + "Ġglad": 7785, + "ĠTroy": 7786, + "ĠShort": 7787, + "Ġheritage": 7788, + "Ġarriving": 7789, + "ingly": 7790, + "Ġtransformation": 7791, + "Ġlease": 7792, + "Ġcollapsed": 7793, + "cha": 7794, + "ĠPatrol": 7795, + "Ġcomputers": 7796, + "Ġprinciples": 7797, + "Ġsporting": 7798, + "ĠHughes": 7799, + "mile": 7800, + "ĠCit": 7801, + "Ġdrilling": 7802, + "ĠBox": 7803, + "ÃŁ": 7804, + "bre": 7805, + "ĠOverall": 7806, + "Ġopioid": 7807, + "Ġdelighted": 7808, + "Ġhonored": 7809, + "ĠCold": 7810, + "Ġunions": 7811, + "ĠCou": 7812, + "ĠCircuit": 7813, + "Ġblast": 7814, + "sson": 7815, + "ĠHernandez": 7816, + "ĠLooking": 7817, + "Ġlegally": 7818, + "ĠWalmart": 7819, + "bridge": 7820, + "Ġmat": 7821, + "rad": 7822, + "ids": 7823, + "Ġdining": 7824, + "Ġrebound": 7825, + "abad": 7826, + "ĠRom": 7827, + "Ġimpose": 7828, + "ĠAlpha": 7829, + "ĠWeekly": 7830, + "TER": 7831, + "ĠJam": 7832, + "Ġabsolute": 7833, + "Ġinventory": 7834, + "ĠBilly": 7835, + "ĠKaren": 7836, + "ĠFriends": 7837, + "ĠCent": 7838, + "ĠVikings": 7839, + "ĠMuch": 7840, + "cell": 7841, + "ads": 7842, + "Ġph": 7843, + "Ġkiller": 7844, + "ĠMembers": 7845, + "Ġshooter": 7846, + "ĠInvestigators": 7847, + "ĠJoshua": 7848, + "Ġparticipated": 7849, + "Ġinnocent": 7850, + "ĠRichmond": 7851, + "itor": 7852, + "ĠDal": 7853, + "ĠOperator": 7854, + "Ġmakeup": 7855, + "Ġconf": 7856, + "ĠNEWS": 7857, + "ĠDef": 7858, + "Ġchase": 7859, + "ĠCost": 7860, + "mont": 7861, + "\":": 7862, + "Ġarrangements": 7863, + "stein": 7864, + "Ġretire": 7865, + "ĠLuis": 7866, + "Ġrenewed": 7867, + "ĠTownship": 7868, + "Ġchecked": 7869, + "arts": 7870, + "ĠCash": 7871, + "Ġcentres": 7872, + "chers": 7873, + "ĠSolutions": 7874, + "Ġlegend": 7875, + "ige": 7876, + "most": 7877, + "osed": 7878, + "ĠPor": 7879, + "Ġpremiere": 7880, + "FS": 7881, + "Ġmissiles": 7882, + "ĠLang": 7883, + "Ġsing": 7884, + "best": 7885, + "Ġtail": 7886, + "Ġriders": 7887, + "Picture": 7888, + "zen": 7889, + "ĠKent": 7890, + "Ġtransform": 7891, + "Ġwildlife": 7892, + "Ġsmoking": 7893, + "Ġpreseason": 7894, + "ĠLucas": 7895, + "ĠAnne": 7896, + "owski": 7897, + "Ġtape": 7898, + "Ġdisplayed": 7899, + "Ġforum": 7900, + "Ġanonymity": 7901, + "ĠIndianapolis": 7902, + "hips": 7903, + "acc": 7904, + "ĠMoreover": 7905, + "lers": 7906, + "area": 7907, + "ĠIndeed": 7908, + "Ġconducting": 7909, + "Ġinfection": 7910, + "Ġdealt": 7911, + "OB": 7912, + "asing": 7913, + "ĠGaza": 7914, + "itter": 7915, + "ĠKa": 7916, + "Ġhopeful": 7917, + "ĠSnow": 7918, + "Ġentitled": 7919, + "Ġaffecting": 7920, + "Ġeager": 7921, + "Ġcircle": 7922, + "Ġlaugh": 7923, + "ĠProsecutors": 7924, + "ĠDur": 7925, + "Ġbarriers": 7926, + "ĠPoll": 7927, + "oun": 7928, + "ĠPalm": 7929, + "chi": 7930, + "Ġsamples": 7931, + "Ġcompromise": 7932, + "atter": 7933, + "Ġenormous": 7934, + "Ġé": 7935, + "coming": 7936, + "ĠPharmaceutical": 7937, + "Ġrank": 7938, + "Let": 7939, + "Ġtransgender": 7940, + "ĠCloud": 7941, + "FO": 7942, + "ĠBor": 7943, + "Ġbonus": 7944, + "Ġordinary": 7945, + "ĠPres": 7946, + "ĠHIV": 7947, + "ires": 7948, + "OSE": 7949, + "Ġdancing": 7950, + "ĠHD": 7951, + "Ġversions": 7952, + "Ġ88": 7953, + "rate": 7954, + "Ġtackles": 7955, + "Ġknock": 7956, + "ĠEmma": 7957, + "Ġmotivated": 7958, + "ĠBennett": 7959, + "ĠBurn": 7960, + "Ġgrid": 7961, + "Ġembrace": 7962, + "ĠSpurs": 7963, + "Ġflows": 7964, + "ĠGer": 7965, + "Ġsponsored": 7966, + "Ġsurvival": 7967, + "ching": 7968, + "Ġ1995": 7969, + "Ġreward": 7970, + "Ġdepends": 7971, + "Ġpostseason": 7972, + "Ġloaded": 7973, + "Ġneutral": 7974, + "ĠPop": 7975, + "BL": 7976, + "Ġrevolution": 7977, + "ĠFreedom": 7978, + "Ġrecovering": 7979, + "Ġrequiring": 7980, + "ALL": 7981, + "ARE": 7982, + "Ġmini": 7983, + "lt": 7984, + "ĠFDA": 7985, + "Ġcarpet": 7986, + "ĠPrior": 7987, + "Ġadmission": 7988, + "ĠEver": 7989, + "ĠTribune": 7990, + "ĠRonaldo": 7991, + "Ġthick": 7992, + "Ġlanes": 7993, + "Ġ84": 7994, + "ĠMemphis": 7995, + "Ġopt": 7996, + "BO": 7997, + "Ġfaculty": 7998, + "ĠChad": 7999, + "ĠSUV": 8000, + "ĠHen": 8001, + "Ġeste": 8002, + "ĠHu": 8003, + "ĠAgriculture": 8004, + "store": 8005, + "ĠDrug": 8006, + "inter": 8007, + "Ġ1996": 8008, + "ident": 8009, + "Ġbackup": 8010, + "ĠHonda": 8011, + "ĠHope": 8012, + "oes": 8013, + "ums": 8014, + "amer": 8015, + "Ġbreath": 8016, + "Ġ110": 8017, + "Ġjoke": 8018, + "ĠAld": 8019, + "Ġwondering": 8020, + "ĠAssad": 8021, + "ĠRem": 8022, + "Ġfundraising": 8023, + "pot": 8024, + "è": 8025, + "Ġquestioning": 8026, + "Ġpent": 8027, + "ĠMoney": 8028, + "ĠMedicine": 8029, + "wick": 8030, + "ĠKnights": 8031, + "Ġbatting": 8032, + "ĠMos": 8033, + "Ġdesignated": 8034, + "isse": 8035, + "Ġspotlight": 8036, + "Ġlake": 8037, + "Ġcaution": 8038, + "Ġinmates": 8039, + "Ġlap": 8040, + "CE": 8041, + "ĠJavascript": 8042, + "ĠDeutsche": 8043, + "ĠFargo": 8044, + "Ġguaranteed": 8045, + "borough": 8046, + "Ġfunctions": 8047, + "ĠElementary": 8048, + "ĠChuck": 8049, + "Ġpitched": 8050, + "ĠKrist": 8051, + "Ġsteal": 8052, + "Ġchips": 8053, + "Ġalarm": 8054, + "Ġbeloved": 8055, + "scale": 8056, + "Ġassaulted": 8057, + "ĠPentagon": 8058, + "Ġtemporarily": 8059, + "Ġ93": 8060, + "Ġ>": 8061, + "ĠPortugal": 8062, + "ti": 8063, + "HL": 8064, + "Ġdecreased": 8065, + "Ġexistence": 8066, + "Ġisolated": 8067, + "Ġdeposit": 8068, + "Ġstudied": 8069, + "\")": 8070, + "Ġtrophy": 8071, + "ĠBrooks": 8072, + "Ġbattling": 8073, + "Ġweaker": 8074, + "ĠPrivate": 8075, + "ĠAccess": 8076, + "Ġvirtually": 8077, + "Ġshortage": 8078, + "Ġgaining": 8079, + "Ġbathroom": 8080, + "TON": 8081, + "Ġconcerning": 8082, + "Ġengineer": 8083, + "Ġbread": 8084, + "Ġdemonstrate": 8085, + "ĠDh": 8086, + "Ġhorses": 8087, + "Ġintersection": 8088, + "Ġcolors": 8089, + "Ġdelegation": 8090, + "Ġnotable": 8091, + "Ġwithdrawal": 8092, + "ĠDennis": 8093, + "Ġlocally": 8094, + "Ġcoastal": 8095, + "Ġcomply": 8096, + "ĠMoh": 8097, + "ĠAlbert": 8098, + "Ġclosest": 8099, + "ĠCITY": 8100, + "Ġ83": 8101, + "Ġcancelled": 8102, + "ĠðŁ": 8103, + "Ġsharply": 8104, + "RS": 8105, + "Ġproductivity": 8106, + "Ġbasket": 8107, + "SS": 8108, + "Ġadmit": 8109, + "ool": 8110, + "ination": 8111, + "ĠBB": 8112, + "Ġsur": 8113, + "ĠSteel": 8114, + "ĠTed": 8115, + "ĠPac": 8116, + "Ġpatterns": 8117, + "Ġlisting": 8118, + "Ġreplacing": 8119, + "ĠPradesh": 8120, + "Ġroots": 8121, + "Ġbroker": 8122, + "ĠWriting": 8123, + "Ġsued": 8124, + "Ġorganised": 8125, + "ĠThanksgiving": 8126, + "ĠNOT": 8127, + "Ġjournalism": 8128, + "uel": 8129, + "Ġkilometers": 8130, + "Ġhunt": 8131, + "berry": 8132, + "ĠMother": 8133, + "Ġlegitimate": 8134, + "Ġinput": 8135, + "ĠRel": 8136, + "ĠGuardian": 8137, + "Ar": 8138, + "Ġtransported": 8139, + "Ġbedroom": 8140, + "ashing": 8141, + "Ġbats": 8142, + "Ġcleaning": 8143, + "Ġwrapped": 8144, + "Pacific": 8145, + "Ġfence": 8146, + "Ġtestified": 8147, + "Ġ1994": 8148, + "Ġinterference": 8149, + "Ġmatching": 8150, + "Ġexpression": 8151, + "eta": 8152, + "ĠSpencer": 8153, + "Ġstrategist": 8154, + "who": 8155, + "Ġvictories": 8156, + "Ġ2022": 8157, + "Ġstakes": 8158, + "Ġbuses": 8159, + "ĠHousing": 8160, + "Ġeditorial": 8161, + "Ġ86": 8162, + "ĠBishop": 8163, + "Ġfrustrated": 8164, + "Ġappearing": 8165, + "http": 8166, + "IGHT": 8167, + "Ġmemo": 8168, + "Ġinsiders": 8169, + "Even": 8170, + "Ġclassroom": 8171, + "Ġchef": 8172, + "aining": 8173, + "].": 8174, + "ĠMcD": 8175, + "Ġ87": 8176, + "ĠPunjab": 8177, + "Ġancient": 8178, + "Ġresolved": 8179, + "Ġdying": 8180, + "Ġdestruction": 8181, + "Ġgoverning": 8182, + "Ġrestructuring": 8183, + "ĠPick": 8184, + "Ġmunicipal": 8185, + "Ġengines": 8186, + "ĠHudson": 8187, + "Æ": 8188, + "Ġrepeal": 8189, + "standing": 8190, + "Ġbound": 8191, + "ĠOS": 8192, + "ĠCommonwealth": 8193, + "Ġdescription": 8194, + "Ġhouseholds": 8195, + "Ġmal": 8196, + "Ġstopping": 8197, + "equ": 8198, + "Ġregulator": 8199, + "Ġcontaining": 8200, + "Ġremoving": 8201, + "Ġwithdraw": 8202, + "Ġburied": 8203, + "Ġlists": 8204, + "ĠGil": 8205, + "Ġlowered": 8206, + "Ġformally": 8207, + "ĠRound": 8208, + "asi": 8209, + "¥": 8210, + "lett": 8211, + "Ġprogressive": 8212, + "ĠFalcons": 8213, + "ĠRaw": 8214, + "gun": 8215, + "Ġcontributing": 8216, + "Ġhunting": 8217, + "Ġvalid": 8218, + "Ġexception": 8219, + "ĠPlayers": 8220, + "ĠTra": 8221, + "Ġracism": 8222, + "hing": 8223, + "chen": 8224, + "Ġdifferently": 8225, + "Ġchampionships": 8226, + "ĠEng": 8227, + "ĠNO": 8228, + "ĠAuto": 8229, + "ĠErdogan": 8230, + "iding": 8231, + "Ġwarming": 8232, + "Ġcivilian": 8233, + "ĠDam": 8234, + "Ġfantasy": 8235, + "ĠNav": 8236, + "itions": 8237, + "ĠDrew": 8238, + "ĠNancy": 8239, + "Ġtrapped": 8240, + "ĠRussians": 8241, + "ĠIC": 8242, + "Ġflexibility": 8243, + "ular": 8244, + "Ġviolated": 8245, + "ipped": 8246, + "Ġgarage": 8247, + "ĠDeep": 8248, + "Ġpraise": 8249, + "ĠLab": 8250, + "ĠPlayer": 8251, + "Ġjudicial": 8252, + "Ġdonate": 8253, + "Ġseparated": 8254, + "Ġreleases": 8255, + "nik": 8256, + "Ġexplanation": 8257, + "aph": 8258, + "Ġloyal": 8259, + "Ġstrongest": 8260, + "ĠShar": 8261, + "Ġrescued": 8262, + "Ġambitious": 8263, + "Ġclimb": 8264, + "Ġscared": 8265, + "Ġignored": 8266, + "cut": 8267, + "Ġstole": 8268, + "Ġweakness": 8269, + "ĠRidge": 8270, + "oa": 8271, + "LA": 8272, + "Ġdep": 8273, + "ĠPowell": 8274, + "Do": 8275, + "Ġprotein": 8276, + "Ġreiterated": 8277, + "ĠCox": 8278, + "aling": 8279, + "ĠUnlike": 8280, + "ĠKane": 8281, + "ĠMcConnell": 8282, + "Ġshowcase": 8283, + "Ġuniform": 8284, + "ower": 8285, + "Ġdiscover": 8286, + "stop": 8287, + "ipper": 8288, + "Ġtreatments": 8289, + "Ġgrocery": 8290, + "Ġsubscribers": 8291, + "lock": 8292, + "ple": 8293, + "Ġflew": 8294, + "ania": 8295, + "Ġstepping": 8296, + "ĠSoviet": 8297, + "Ġconsultant": 8298, + "ags": 8299, + "ĠLim": 8300, + "Ġ91": 8301, + "ĠCode": 8302, + "ports": 8303, + "box": 8304, + "Ġlakh": 8305, + "Ġreminder": 8306, + "ym": 8307, + "ĠTravis": 8308, + "Ġpure": 8309, + "now": 8310, + "ĠVR": 8311, + "Ġachievement": 8312, + "ĠEmirates": 8313, + "ĠThunder": 8314, + "Ġmerely": 8315, + "ĠCa": 8316, + "ĠAverage": 8317, + "ĠDa": 8318, + "Ġtopped": 8319, + "ĠCurry": 8320, + "Ġchemicals": 8321, + "Ġamendment": 8322, + "ĠBorder": 8323, + "ĠBat": 8324, + "Ġ130": 8325, + "Ġprogramming": 8326, + "Ġtele": 8327, + "ĠKarl": 8328, + "Ġaveraged": 8329, + "ĠSpe": 8330, + "world": 8331, + "PG": 8332, + "Ġfights": 8333, + "ĠPrincess": 8334, + "ĠCIA": 8335, + "ĠAbe": 8336, + "Ġacted": 8337, + "only": 8338, + "Ġinsight": 8339, + "Ġathlete": 8340, + "ĠTar": 8341, + "commerce": 8342, + "Ġaveraging": 8343, + "cr": 8344, + "ĠPalestinians": 8345, + "Well": 8346, + "Ġbull": 8347, + "Ġchoosing": 8348, + "Ġsurely": 8349, + "ĠSecret": 8350, + "Ġteammate": 8351, + "ĠAmendment": 8352, + "ĠBirmingham": 8353, + "Ġexcitement": 8354, + "strong": 8355, + "ĠSin": 8356, + "Ġdamages": 8357, + "rated": 8358, + "Ġrankings": 8359, + "Ġconservation": 8360, + "home": 8361, + "erm": 8362, + "ield": 8363, + "Ġdisorder": 8364, + "acher": 8365, + "Ġnaturally": 8366, + "atur": 8367, + "Ġpackages": 8368, + "Ġapproaches": 8369, + "icks": 8370, + "ourn": 8371, + "Ġodd": 8372, + "Ġshore": 8373, + "ĠBeing": 8374, + "Ġmagic": 8375, + "Ġtourist": 8376, + "largest": 8377, + "Ġwhenever": 8378, + "Ġlenders": 8379, + "Ġegg": 8380, + "ĠChair": 8381, + "Ġlets": 8382, + "Ġwarnings": 8383, + "į": 8384, + "Ġpol": 8385, + "Ġdrag": 8386, + "ĠAmb": 8387, + "ĠCle": 8388, + "ĠLouisville": 8389, + "ĠShaw": 8390, + "lands": 8391, + "Ġanthem": 8392, + "ĠTrail": 8393, + "Ġaccepting": 8394, + "anger": 8395, + "good": 8396, + "ĠBroad": 8397, + "ĠLebanon": 8398, + "ĠMillion": 8399, + "ĠHenderson": 8400, + "Ġwh": 8401, + "Ġdust": 8402, + "Ġ92": 8403, + "ĠMend": 8404, + "Ġchecking": 8405, + "ĠCow": 8406, + "sized": 8407, + "Ġautomatic": 8408, + "Ġcelebrates": 8409, + "Ġarena": 8410, + "Ġfinger": 8411, + "ĠHarvard": 8412, + "Ġfrustration": 8413, + "Ġstrict": 8414, + "Ġpreserve": 8415, + "Ġsleeping": 8416, + "Ġconverted": 8417, + "Ġinsights": 8418, + "Ġtra": 8419, + "Ġjailed": 8420, + "Ġchamber": 8421, + "Ġtoxic": 8422, + "ading": 8423, + "ĠTriple": 8424, + "grade": 8425, + "ĠRest": 8426, + "ĠHoly": 8427, + "oper": 8428, + "Ġdesk": 8429, + "Ġmatchup": 8430, + "Ġsteep": 8431, + "ĠGot": 8432, + "lay": 8433, + "ĠCab": 8434, + "aked": 8435, + "ĠFoster": 8436, + "Ġrunners": 8437, + "ĠNA": 8438, + "Ġdestroy": 8439, + "Ġsupportive": 8440, + "ĠRacing": 8441, + "Ġtrademark": 8442, + "Ġjacket": 8443, + "Ġhorror": 8444, + "ĠAle": 8445, + "Ġass": 8446, + "Ġsch": 8447, + "abb": 8448, + "Ġplanes": 8449, + "Ġimpression": 8450, + "ĠEarly": 8451, + "ĠPompe": 8452, + "Ġking": 8453, + "Ġsilent": 8454, + "ĠCuba": 8455, + "Ġmedication": 8456, + "ences": 8457, + "list": 8458, + "ailing": 8459, + "WA": 8460, + "ella": 8461, + "Ġprop": 8462, + "Ġhalt": 8463, + "Ġslowing": 8464, + "ĠFoods": 8465, + "Ġanonymous": 8466, + "kh": 8467, + "Ġtraveled": 8468, + "Ġcommunicate": 8469, + "Ġter": 8470, + "ĠHockey": 8471, + "ĠRobin": 8472, + "Ġswept": 8473, + "Ġclinic": 8474, + "ration": 8475, + "len": 8476, + "Ġau": 8477, + "Ġcareers": 8478, + "ĠSound": 8479, + "Ġaddresses": 8480, + "China": 8481, + "ĠSr": 8482, + "Ġexhibit": 8483, + "ĠMotors": 8484, + "ĠIl": 8485, + "Ġinstall": 8486, + "ĠOkay": 8487, + "Ġ>>": 8488, + "hood": 8489, + "stand": 8490, + "Ġaudit": 8491, + "Ġcake": 8492, + "Ġflames": 8493, + "bel": 8494, + "ĠMust": 8495, + "ĠManafort": 8496, + "Ġcommodity": 8497, + "night": 8498, + "ĠRoom": 8499, + "ĠLanka": 8500, + "Ġcommander": 8501, + "ln": 8502, + "Ġdatabase": 8503, + "ĠSet": 8504, + "Ġgraduated": 8505, + "ĠTarget": 8506, + "Ġoutbreak": 8507, + "rou": 8508, + "ĠPope": 8509, + "ĠEqu": 8510, + "Ġpolling": 8511, + "Ġdig": 8512, + "Ġbrutal": 8513, + "ĠBarn": 8514, + "Ġdefinition": 8515, + "Ġpit": 8516, + "Ġpickup": 8517, + "ĠBitcoin": 8518, + "ĠReid": 8519, + "Ġloving": 8520, + "ĠHerald": 8521, + "ĠCanadians": 8522, + "Ġneighbor": 8523, + "Ġdies": 8524, + "ione": 8525, + "ĠRef": 8526, + "big": 8527, + "Ġguards": 8528, + "including": 8529, + "ente": 8530, + "Ġpartially": 8531, + "Image": 8532, + "Ġbulk": 8533, + "Ġslot": 8534, + "ĠNorthwest": 8535, + "ĠBarclays": 8536, + "Ġairlines": 8537, + "iver": 8538, + "isi": 8539, + "Ġsubsidiary": 8540, + "Ġcont": 8541, + "ĠDaniels": 8542, + "Ġscript": 8543, + "Ġunfair": 8544, + "Ġscreens": 8545, + "Ġprof": 8546, + "ĠIrma": 8547, + "Ġ1992": 8548, + "Ġmandatory": 8549, + "ĠSant": 8550, + "Ġsuspicion": 8551, + "NES": 8552, + "ĠLauren": 8553, + "igen": 8554, + "Ġprevention": 8555, + "Ġtension": 8556, + "ema": 8557, + "Ġtasks": 8558, + "Ġshake": 8559, + "Ġexplosive": 8560, + "Ġaffects": 8561, + "Ġmum": 8562, + "ĠDog": 8563, + "rer": 8564, + "Ġopted": 8565, + "Ġtrio": 8566, + "Ġlesson": 8567, + "Ġautomotive": 8568, + "where": 8569, + "ĠMontgomery": 8570, + "Ġcouples": 8571, + "Ġ89": 8572, + "AF": 8573, + "Ġinfo": 8574, + "ĠForm": 8575, + "Ġspectrum": 8576, + "Ġbands": 8577, + "Ġokay": 8578, + "Ġstroke": 8579, + "ĠNetanyahu": 8580, + "Ġwealthy": 8581, + "ĠAround": 8582, + "ĠGlenn": 8583, + "sec": 8584, + "there": 8585, + "ickets": 8586, + "ĠBudget": 8587, + "ĠBMW": 8588, + "Ġflagship": 8589, + "rier": 8590, + "Ġpodcast": 8591, + "Ġpursuing": 8592, + "Ġpos": 8593, + "ĠIslands": 8594, + "ĠUrban": 8595, + "page": 8596, + "Ġemotions": 8597, + "ided": 8598, + "Ġdividends": 8599, + "Ġboom": 8600, + "Ġaccusing": 8601, + "ird": 8602, + "ĠNam": 8603, + "ava": 8604, + "Ġwishes": 8605, + "ĠNy": 8606, + "ĠStanford": 8607, + "Ġcriteria": 8608, + "ĠJews": 8609, + "Ġengineers": 8610, + "Ġaccuracy": 8611, + "Ġdisplays": 8612, + "Ġdeserves": 8613, + "ridge": 8614, + "omm": 8615, + "aur": 8616, + "Ġdramatically": 8617, + "Ġunity": 8618, + "speed": 8619, + "Ġdeclining": 8620, + "Ġpermits": 8621, + "ĠKn": 8622, + "Ġconsulting": 8623, + "aux": 8624, + "ATE": 8625, + "ĠWat": 8626, + "ĠEditor": 8627, + "sy": 8628, + "urn": 8629, + "ĠUsing": 8630, + "asc": 8631, + "ital": 8632, + "Ġcre": 8633, + "quality": 8634, + "Ġce": 8635, + "Ġenemy": 8636, + "Ġoffence": 8637, + "icket": 8638, + "ĠDick": 8639, + "ĠTH": 8640, + "ĠChampionships": 8641, + "Ġoverwhelming": 8642, + "rib": 8643, + "ku": 8644, + "rap": 8645, + "Ġhomer": 8646, + "acion": 8647, + "member": 8648, + "erv": 8649, + "aney": 8650, + "MB": 8651, + "eded": 8652, + "Ġpunishment": 8653, + "Ġnegotiate": 8654, + "ĠFile": 8655, + "stream": 8656, + "ĠHur": 8657, + "Ġnose": 8658, + "ĠFab": 8659, + "iter": 8660, + "Ġpainful": 8661, + "ITY": 8662, + "eren": 8663, + "Ġcollecting": 8664, + "Additional": 8665, + "Ġentrepreneurs": 8666, + "bal": 8667, + "Ġexploring": 8668, + "Ġguitar": 8669, + "Ġpartnerships": 8670, + "Ġfurniture": 8671, + "Ġauthorized": 8672, + "Ġeasing": 8673, + "shirt": 8674, + "ĠGross": 8675, + "Ġpolitician": 8676, + "ĠSimpson": 8677, + "Ġdrone": 8678, + "ĠKatie": 8679, + "Ġprofitability": 8680, + "ĠNHS": 8681, + "ĠSierra": 8682, + "ĠNorway": 8683, + "ASHINGTON": 8684, + "ific": 8685, + "Ġcondemned": 8686, + "team": 8687, + "ĠNebraska": 8688, + "Ġthrilled": 8689, + "iller": 8690, + "Ġpatrol": 8691, + "ĠWR": 8692, + "orm": 8693, + "Ġspectacular": 8694, + "ĠKnight": 8695, + "ĠTravel": 8696, + "nam": 8697, + "Ġmuscle": 8698, + "ĠRain": 8699, + "ĠColombia": 8700, + "Ġnursing": 8701, + "Ġmigration": 8702, + "ĠMitch": 8703, + "Ġreleasing": 8704, + "ĠBesides": 8705, + "ĠMul": 8706, + "Ġheadline": 8707, + "Ġcontemporary": 8708, + "Ġdev": 8709, + "ĠChan": 8710, + "Ġindicates": 8711, + "ĠAp": 8712, + "ĠLt": 8713, + "ĠMarvel": 8714, + "Ġremembered": 8715, + "®": 8716, + "ĠForces": 8717, + "ĠColin": 8718, + "ĠGabriel": 8719, + "Ġobjects": 8720, + "ĠRHP": 8721, + "kar": 8722, + "ĠKo": 8723, + "Ġsignals": 8724, + "Ġinner": 8725, + "real": 8726, + "RO": 8727, + "Ġromantic": 8728, + "cat": 8729, + "ĠKel": 8730, + "Ġgut": 8731, + "ĠBoys": 8732, + "Ġyoungest": 8733, + "ĠCeltics": 8734, + "Ġslated": 8735, + "Ġremind": 8736, + "Ġproductive": 8737, + "set": 8738, + "Co": 8739, + "ĠBailey": 8740, + "Ġrenewable": 8741, + "ĠCarson": 8742, + "ĠDj": 8743, + "ĠKos": 8744, + "Ġurge": 8745, + "Ġfin": 8746, + "Ġpursuit": 8747, + "ĠCON": 8748, + "ĠChapter": 8749, + "Ġpal": 8750, + "Ġgate": 8751, + "ĠPackers": 8752, + "ĠReports": 8753, + "ĠRugby": 8754, + "ĠMasters": 8755, + "MO": 8756, + "Ġ98": 8757, + "Ġcatches": 8758, + "ĠAgreement": 8759, + "ĠTillerson": 8760, + "ĠIce": 8761, + "Ġrumors": 8762, + "ĠLeonard": 8763, + "ĠDolphins": 8764, + "ĠLP": 8765, + "top": 8766, + "ĠCrist": 8767, + "ĠHon": 8768, + "Ġblaze": 8769, + "Ġrhetoric": 8770, + "ands": 8771, + "ady": 8772, + "David": 8773, + "igh": 8774, + "Ġbuzz": 8775, + "ĠStrong": 8776, + "Ġshocking": 8777, + "ĠRh": 8778, + "Ġnegotiating": 8779, + "Ġtender": 8780, + "ĠJohnny": 8781, + "ĠMario": 8782, + "Ġ97": 8783, + "ĠHeritage": 8784, + "Ġexists": 8785, + "Ġprayers": 8786, + "Ġlengthy": 8787, + "Ġsafer": 8788, + "ĠHalloween": 8789, + "ĠJared": 8790, + "ĠConnect": 8791, + "Ġbump": 8792, + "Ġstrain": 8793, + "Ġfilling": 8794, + "Ġtrauma": 8795, + "Ġcompleting": 8796, + "cht": 8797, + "Ġkillings": 8798, + "anne": 8799, + "GE": 8800, + "ĠRescue": 8801, + "Ġdealers": 8802, + "Ġlocals": 8803, + "ĠVictor": 8804, + "Ġtragic": 8805, + "Ġdelivers": 8806, + "orts": 8807, + "Ġrugby": 8808, + "Ġinstallation": 8809, + "asa": 8810, + "ĠBart": 8811, + "Ġjournal": 8812, + "school": 8813, + "ĠCome": 8814, + "ĠVeterans": 8815, + "Sun": 8816, + "Ġcrowds": 8817, + "Ġtransparent": 8818, + "Ġimplications": 8819, + "ĠHuawei": 8820, + "sex": 8821, + "Ġrallied": 8822, + "Ġresponses": 8823, + "Ġdebris": 8824, + "Ġconvention": 8825, + "Ġmothers": 8826, + "BE": 8827, + "ĠRoute": 8828, + "Ġrebel": 8829, + "ĠEmmanuel": 8830, + "aster": 8831, + "Ġunderstands": 8832, + "pound": 8833, + "ĠCastle": 8834, + "Ġ2021": 8835, + "rik": 8836, + "ĠGR": 8837, + "Ġconvince": 8838, + "ault": 8839, + "Ġpassionate": 8840, + "ĠSciences": 8841, + "Ġarrives": 8842, + "idad": 8843, + "Ġcelebrities": 8844, + "ends": 8845, + "ĠFans": 8846, + "Ġdish": 8847, + "ĠCorps": 8848, + "hat": 8849, + "Ġemployer": 8850, + "ĠHy": 8851, + "Ġpowered": 8852, + "Ġgrandmother": 8853, + "ĠFL": 8854, + "oured": 8855, + "VE": 8856, + "ĠInst": 8857, + "ĠPerez": 8858, + "Ġtune": 8859, + "Ġcitizenship": 8860, + "Ġignore": 8861, + "Ġdoubles": 8862, + "IB": 8863, + "Ġprogrammes": 8864, + "inda": 8865, + "Ġentities": 8866, + "ĠInterior": 8867, + "Ġprompting": 8868, + "Ġwire": 8869, + "Ġtheatre": 8870, + "%)": 8871, + "Ġheels": 8872, + "ĠJu": 8873, + "Ġdeposits": 8874, + "Ġtrash": 8875, + "mond": 8876, + "she": 8877, + "iana": 8878, + "Ġislands": 8879, + "ĠTommy": 8880, + "Ġpub": 8881, + "Ġdiscipline": 8882, + "ĠSW": 8883, + "Ġmusicians": 8884, + "Ġembassy": 8885, + "ĠQB": 8886, + "hander": 8887, + "UES": 8888, + "ĠFerguson": 8889, + "Ġblocking": 8890, + "ahn": 8891, + "Ġfines": 8892, + "Ġtactics": 8893, + "Ġbullet": 8894, + "Ġequipped": 8895, + "Ġescaped": 8896, + "ĠSil": 8897, + "ĠPack": 8898, + "ĠAthletic": 8899, + "ĠMic": 8900, + "ĠDoes": 8901, + "ĠCarr": 8902, + "ĠChargers": 8903, + "ĠKyl": 8904, + "Ġzones": 8905, + "µ": 8906, + "iki": 8907, + "Ġgreatly": 8908, + "ĠMD": 8909, + "Ġimmigrant": 8910, + "ĠConstruction": 8911, + "ĠBorn": 8912, + "iment": 8913, + "ĠWade": 8914, + "Ġvisa": 8915, + "Ġgenuine": 8916, + "Ġelectronics": 8917, + "ĠSat": 8918, + "Ġsponsors": 8919, + "ĠMontana": 8920, + "Ġspell": 8921, + "ĠSachs": 8922, + "ĠEt": 8923, + "Ġfoster": 8924, + "Ġlocker": 8925, + "Ġexplaining": 8926, + "ĠAge": 8927, + "Ġgunman": 8928, + "Ġsauce": 8929, + "Ġcry": 8930, + "Ġstimulus": 8931, + "Ġarray": 8932, + "Ġcompare": 8933, + "Ġboats": 8934, + "Ġext": 8935, + "iders": 8936, + "ĠAst": 8937, + "ĠParks": 8938, + "ester": 8939, + "Ġ94": 8940, + "Ġrelating": 8941, + "Ġvegetables": 8942, + "Ġaccountable": 8943, + "Ġhyper": 8944, + "ĠWim": 8945, + "Ġnewest": 8946, + "ĠRome": 8947, + "ĠChancellor": 8948, + "CBS": 8949, + "Ġbusinessman": 8950, + "ĠDelaware": 8951, + "Ġlands": 8952, + "court": 8953, + "aria": 8954, + "Ġapproaching": 8955, + "cker": 8956, + "ĠSalt": 8957, + "ĠMak": 8958, + "Ġtreating": 8959, + "Ġsubsequently": 8960, + "ĠEll": 8961, + "xton": 8962, + "Ġ180": 8963, + "Ġdetermination": 8964, + "ĠSalman": 8965, + "ĠJoel": 8966, + "Ġclassified": 8967, + "Ġspan": 8968, + "Ġearthquake": 8969, + "ranked": 8970, + "Ġ96": 8971, + "ĠTiger": 8972, + "Ġadvocacy": 8973, + "mit": 8974, + "Ġcolleges": 8975, + "ĠYeah": 8976, + "ĠCaptain": 8977, + "Ġorange": 8978, + "Ġprojections": 8979, + "Ġelectrical": 8980, + "ĠMA": 8981, + "olog": 8982, + "ĠNewcastle": 8983, + "oppers": 8984, + "Ġrepresentation": 8985, + "Ġlawsuits": 8986, + "just": 8987, + "aced": 8988, + "ĠRace": 8989, + "ĠAqu": 8990, + "ĠBills": 8991, + "Ġexclusively": 8992, + "ĠProfile": 8993, + "Ġhometown": 8994, + "ĠStan": 8995, + "Ġstarring": 8996, + "Ġdeciding": 8997, + "ĠRating": 8998, + "ĠMedicare": 8999, + "ĠTransport": 9000, + "Ġmystery": 9001, + "ĠTa": 9002, + "ĠPad": 9003, + "ĠSwedish": 9004, + "ĠCarroll": 9005, + "about": 9006, + "Ġtorn": 9007, + "Ġnurse": 9008, + "NE": 9009, + "Ġwaited": 9010, + "ĠJeffrey": 9011, + "ĠUntil": 9012, + "Ġbone": 9013, + "ĠBobby": 9014, + "Ġpronounced": 9015, + "Ġpharmaceutical": 9016, + "ĠGallery": 9017, + "ĠMatch": 9018, + "Ġeconomists": 9019, + "ĠMarketing": 9020, + "face": 9021, + "ĠPetroleum": 9022, + "ories": 9023, + "ĠMets": 9024, + "ĠCore": 9025, + "billion": 9026, + "Ġexamination": 9027, + "ĠPorter": 9028, + "2016": 9029, + "Ġgolden": 9030, + "Ġsem": 9031, + "ĠDuterte": 9032, + "ĠJefferson": 9033, + "ĠTehran": 9034, + "ĠLeicester": 9035, + "ĠDA": 9036, + "Ġadapt": 9037, + "ĠDame": 9038, + "ĠRic": 9039, + "Ġunchanged": 9040, + "ect": 9041, + "Ġsections": 9042, + "kg": 9043, + "igned": 9044, + "Ġfilings": 9045, + "Ġreact": 9046, + "Ġurgent": 9047, + "Ġvessels": 9048, + "Ġspark": 9049, + "Ġbutter": 9050, + "ĠCons": 9051, + "Ġstating": 9052, + "Ġcorporations": 9053, + "ĠHus": 9054, + "Ġdamaging": 9055, + "raw": 9056, + "Ġequality": 9057, + "Two": 9058, + "ĠMills": 9059, + "iu": 9060, + "Ġobligation": 9061, + "ĠBrook": 9062, + "arian": 9063, + "Re": 9064, + "Ġphotographs": 9065, + "Ġepic": 9066, + "ĠStudent": 9067, + "ĠTherefore": 9068, + "Ġgod": 9069, + "ĠFILE": 9070, + "iqu": 9071, + "Ġdescribing": 9072, + "Ġproceed": 9073, + "Ġcas": 9074, + "ĠKat": 9075, + "ĠBra": 9076, + "Ġadequate": 9077, + "Ġpassage": 9078, + "Ġthanked": 9079, + "USA": 9080, + "ĠNeither": 9081, + "ĠLegislature": 9082, + "Ġfinances": 9083, + "Ġinst": 9084, + "ĵ": 9085, + "ĠAngels": 9086, + "Ġvet": 9087, + "ĠDead": 9088, + "Ex": 9089, + "Ġkicks": 9090, + "force": 9091, + "Ġsoy": 9092, + "ĠWindsor": 9093, + "Ġenhanced": 9094, + "Ġ1993": 9095, + "ĠCzech": 9096, + "Ġgradually": 9097, + "ĠMagic": 9098, + "Ġshadow": 9099, + "Ġneighborhoods": 9100, + "ĠRivers": 9101, + "Ġrapper": 9102, + "ĠGirl": 9103, + "ĠRot": 9104, + "Ġcrackdown": 9105, + "fish": 9106, + "Ġpreventing": 9107, + "Ġproduces": 9108, + "ĠMi": 9109, + "Ġnotified": 9110, + "Ġunderground": 9111, + "WE": 9112, + "Ġadmits": 9113, + "Ġboxing": 9114, + "Ġrefer": 9115, + "Ġcommitments": 9116, + "ĠWoman": 9117, + "Ġdenies": 9118, + "col": 9119, + "ĠSide": 9120, + "Ġambulance": 9121, + "ĠRodgers": 9122, + "Ġaftermath": 9123, + "Ġdeck": 9124, + "irmed": 9125, + "Ġerrors": 9126, + "ĠConvention": 9127, + "Ġcurb": 9128, + "ĠShop": 9129, + "ĠThai": 9130, + "Ġma": 9131, + "Ġrespected": 9132, + "ĠMVP": 9133, + "Ġborrowing": 9134, + "Ġcruise": 9135, + "ĠSure": 9136, + "Ġsentencing": 9137, + "ĠObamacare": 9138, + "ĠIr": 9139, + "ĠSale": 9140, + "ĠPete": 9141, + "Ġopenly": 9142, + "Ġstartup": 9143, + "rock": 9144, + "Ġcargo": 9145, + "Ġtelecom": 9146, + "ĠDownload": 9147, + "Ġextending": 9148, + "ĠCurrent": 9149, + "Ġcompetitions": 9150, + "ĠKids": 9151, + "Ġshy": 9152, + "ĠKerry": 9153, + "ĠNever": 9154, + "ĠDevils": 9155, + "Ġprim": 9156, + "Con": 9157, + "Ġcurve": 9158, + "Ġassumed": 9159, + "Ġadjust": 9160, + "Ġimmune": 9161, + "UE": 9162, + "ĠUr": 9163, + "Ġconventional": 9164, + "Ġgrandchildren": 9165, + "ĠBol": 9166, + "Ad": 9167, + "ĠMaduro": 9168, + "fi": 9169, + "ĠUAE": 9170, + "ĠOrgan": 9171, + "Ġindicating": 9172, + "iem": 9173, + "ĠAgainst": 9174, + "ĠAmbassador": 9175, + "ĠSeoul": 9176, + "Ġcriminals": 9177, + "how": 9178, + "put": 9179, + "Ġreminded": 9180, + "Ġparked": 9181, + "lich": 9182, + "Ġcontinent": 9183, + "Ġmatched": 9184, + "ĠNicole": 9185, + "Ġgenetic": 9186, + "Ġhumanity": 9187, + "ĠTem": 9188, + "Ġindicator": 9189, + "Ġvessel": 9190, + "Ġdefendant": 9191, + "ĠGriffin": 9192, + "jan": 9193, + "Ġvend": 9194, + "boro": 9195, + "Ġbrokerage": 9196, + "ĠFall": 9197, + "Ġmere": 9198, + "VILLE": 9199, + "Ġlasted": 9200, + "ĠMind": 9201, + "Ġpatch": 9202, + "ĠInsider": 9203, + "ĠComm": 9204, + "Ġtechnique": 9205, + "ĠIM": 9206, + "ĠCavaliers": 9207, + "Ġshame": 9208, + "Ġmil": 9209, + "oot": 9210, + "irt": 9211, + "Ġcop": 9212, + "ĠLeon": 9213, + "Ġfrozen": 9214, + "Ġslip": 9215, + "pton": 9216, + "Ġpanels": 9217, + "Ġpitching": 9218, + "Ġleather": 9219, + "ĠLogan": 9220, + "ĠNearly": 9221, + "urch": 9222, + "Ġinstructions": 9223, + "ĠRow": 9224, + "ĠKurdish": 9225, + "this": 9226, + "Ġlegendary": 9227, + "su": 9228, + "Ġstabbed": 9229, + "sters": 9230, + "Ġteenage": 9231, + "def": 9232, + "Ġoversight": 9233, + "Ġvolatile": 9234, + "Ġtransmission": 9235, + "ĠSgt": 9236, + "ĠIndigenous": 9237, + "ĠOxford": 9238, + "ĠCasey": 9239, + "Ġcor": 9240, + "Ġsalaries": 9241, + "Ġsponsor": 9242, + "Ġprescription": 9243, + "mat": 9244, + "ĠLeeds": 9245, + "ĠPakistani": 9246, + "Ġevil": 9247, + "Ġtables": 9248, + "ĠAbdul": 9249, + "Ġexpectation": 9250, + "Ġlegislature": 9251, + "ĠLin": 9252, + "¹": 9253, + "Ġcontractor": 9254, + "Ġshifting": 9255, + "Ġgenerous": 9256, + "ĠEddie": 9257, + "Ġpuck": 9258, + "utt": 9259, + "Ġdubbed": 9260, + "Ġnowhere": 9261, + "Ġbetting": 9262, + "Ġdisclose": 9263, + "Ĥ": 9264, + "ĠFashion": 9265, + "ĠHarper": 9266, + "handed": 9267, + "isha": 9268, + "ĠReds": 9269, + "Ġachievements": 9270, + "ume": 9271, + "Ġshootings": 9272, + "Ġadvisers": 9273, + "ĠEaster": 9274, + "Ġinternationally": 9275, + "ĠWi": 9276, + "ĠGandhi": 9277, + "ĠChristians": 9278, + "Ġrecruiting": 9279, + "Ġexperiment": 9280, + "Ġsol": 9281, + "Ġdifficulties": 9282, + "Ġinfluential": 9283, + "Ġhybrid": 9284, + "Ġformation": 9285, + "ĠBoulevard": 9286, + "Ġflags": 9287, + "Ġformula": 9288, + "front": 9289, + "Ġinclusion": 9290, + "ĠNone": 9291, + "ICE": 9292, + "Ġfilming": 9293, + "ĠLou": 9294, + "ĠReynolds": 9295, + "Ġpump": 9296, + "Ġexceptional": 9297, + "ANG": 9298, + "ĠCorporate": 9299, + "SAN": 9300, + "ĠHealthcare": 9301, + "ĠUkrainian": 9302, + "aron": 9303, + "Ġpants": 9304, + "Ġdrops": 9305, + "ete": 9306, + "ĠStudies": 9307, + "Ġwounds": 9308, + "END": 9309, + "Ġshower": 9310, + "Ġreviewing": 9311, + "ĠGreater": 9312, + "Ġ»": 9313, + "itors": 9314, + "alled": 9315, + "Ġsqu": 9316, + "ĠRonald": 9317, + "ĠInv": 9318, + "Ġtougher": 9319, + "Ġbalanced": 9320, + "Ġlined": 9321, + "Ġprinciple": 9322, + "Ġ1950": 9323, + "Ġleak": 9324, + "Be": 9325, + "Ġcircuit": 9326, + "Ġunfortunate": 9327, + "ĠGran": 9328, + "ĠFish": 9329, + "Ġfriendship": 9330, + "asp": 9331, + "OO": 9332, + "Ġobligations": 9333, + "Ġcoup": 9334, + "OK": 9335, + "Ġbreakdown": 9336, + "Ġhook": 9337, + "Ġresearcher": 9338, + "inated": 9339, + "ĠMarie": 9340, + "ĠGab": 9341, + "ĠWA": 9342, + "quez": 9343, + "General": 9344, + "ĠSwift": 9345, + "Ġgust": 9346, + "ĠCarol": 9347, + "ĠCentury": 9348, + "ĠOPEC": 9349, + "ĠRd": 9350, + "ĠCop": 9351, + "Ġsubjects": 9352, + "ĠComments": 9353, + "ases": 9354, + "Ġrelation": 9355, + "ĠEnvironment": 9356, + "ı": 9357, + "Ġgasoline": 9358, + "ĠLog": 9359, + "Ġicon": 9360, + "Ġprofitable": 9361, + "ĠRetail": 9362, + "ANC": 9363, + "Ġappealing": 9364, + "Ġvillages": 9365, + "Ġpizza": 9366, + "Ġmall": 9367, + "Ġtower": 9368, + "ĠLinda": 9369, + "Ġaccomplished": 9370, + "Ġpod": 9371, + "Ġleaked": 9372, + "ĠWed": 9373, + "Ġmer": 9374, + "Ġopposing": 9375, + "!'": 9376, + "Ġstomach": 9377, + "Ġrevealing": 9378, + "Ġho": 9379, + "DF": 9380, + "ĠSterling": 9381, + "Ġsolely": 9382, + "Ġpres": 9383, + "ĠCy": 9384, + "ĠLatest": 9385, + "ĠPitt": 9386, + "ĠThink": 9387, + "Ġcapability": 9388, + "aled": 9389, + "Ġexecuted": 9390, + "alling": 9391, + "ĠSilva": 9392, + "Ġrestricted": 9393, + "Ġdeclaration": 9394, + "Ġkilometres": 9395, + "rol": 9396, + "Ġidentifying": 9397, + "Ġdonors": 9398, + "vent": 9399, + "Ġcostly": 9400, + "ense": 9401, + "ĠSeeking": 9402, + "OURCE": 9403, + "iving": 9404, + "Ġplacing": 9405, + "tech": 9406, + "Ġbottles": 9407, + "writer": 9408, + "ĠSeahawks": 9409, + "oming": 9410, + "ĠArthur": 9411, + "ously": 9412, + "bin": 9413, + "ĠVa": 9414, + "Ġbias": 9415, + "Ġliability": 9416, + "ift": 9417, + "rak": 9418, + "aves": 9419, + "Ġcautious": 9420, + "ĠPrize": 9421, + "iley": 9422, + "ĠSharma": 9423, + "global": 9424, + "Ġwars": 9425, + "sm": 9426, + "ĠRemember": 9427, + "wind": 9428, + "ĠRichardson": 9429, + "ĠSum": 9430, + "ĠVincent": 9431, + "ĠRice": 9432, + "inf": 9433, + "Ġconsultation": 9434, + "range": 9435, + "Ġbacteria": 9436, + "Ġarchitecture": 9437, + "Ġpole": 9438, + "ĠMach": 9439, + "Ġcattle": 9440, + "Ġabused": 9441, + "being": 9442, + "ĠHERE": 9443, + "Ġfame": 9444, + "Ġhearings": 9445, + "ĠBrit": 9446, + "Ġjoins": 9447, + "ĠMcGregor": 9448, + "Ġoppose": 9449, + "Ġcheer": 9450, + "itting": 9451, + "imes": 9452, + "Ġusage": 9453, + "Ġstint": 9454, + "Ġoutlet": 9455, + "Ġshoppers": 9456, + "ĠBaptist": 9457, + "Ġinappropriate": 9458, + "ĠALSO": 9459, + "Ġstealing": 9460, + "Ġpledge": 9461, + "ĠRan": 9462, + "Ġphotographer": 9463, + "Ġprevented": 9464, + "Ġ01": 9465, + "ĠEngineering": 9466, + "ĠProducts": 9467, + "Ġuniverse": 9468, + "ĠMcCarthy": 9469, + "¿": 9470, + "graded": 9471, + "Ġinspection": 9472, + "Ġind": 9473, + "Fi": 9474, + "aren": 9475, + "Ġprotections": 9476, + "Ġsorts": 9477, + "ĠWorks": 9478, + "Ġbillionaire": 9479, + "ĠGay": 9480, + "ĠiPad": 9481, + "IX": 9482, + "Ġdefendants": 9483, + "band": 9484, + "Ġfarms": 9485, + "Ġhom": 9486, + "gal": 9487, + "iant": 9488, + "Ġnortheast": 9489, + "ĠJoint": 9490, + "Ġcanceled": 9491, + "Ġtoys": 9492, + "Ġrein": 9493, + "ĠTumblr": 9494, + "pees": 9495, + "ĠAut": 9496, + "Police": 9497, + "Ġaide": 9498, + "Ġachieving": 9499, + "Ġmund": 9500, + "ĠCommercial": 9501, + "first": 9502, + "Ġanticipate": 9503, + "iac": 9504, + "Ġprobation": 9505, + "hem": 9506, + "Ġports": 9507, + "ĠKer": 9508, + "Ġsupplier": 9509, + "ĠFather": 9510, + "ĠAnti": 9511, + "ashed": 9512, + "ĠTable": 9513, + "bledon": 9514, + "Ġunf": 9515, + "ĠRash": 9516, + "ĠLeBron": 9517, + "Car": 9518, + "bu": 9519, + "ĠDerek": 9520, + "Ġaccounted": 9521, + "ĠPri": 9522, + "nings": 9523, + "Ġreceives": 9524, + "lev": 9525, + "Ġbilateral": 9526, + "ĠList": 9527, + "ĠLG": 9528, + "ĠJazz": 9529, + "Ġrestored": 9530, + "Ġbattles": 9531, + "ials": 9532, + "Ġoccupied": 9533, + "Ġrepairs": 9534, + "Ġradar": 9535, + "ĠMLB": 9536, + "ĠNC": 9537, + "Ġflexible": 9538, + "ĠCommand": 9539, + "Ġcoat": 9540, + "ĠVir": 9541, + "ĠColts": 9542, + "ĠBC": 9543, + "Ġtwin": 9544, + "Ġprisoners": 9545, + "Ġslowed": 9546, + "hop": 9547, + "ĠInn": 9548, + "Ġconflicts": 9549, + "Ġmeasured": 9550, + "Ġautonomous": 9551, + "ĠBow": 9552, + "Ġdisc": 9553, + "inson": 9554, + "ĠSche": 9555, + "aire": 9556, + "ĠSU": 9557, + "ĠPeterson": 9558, + "Ġdrafted": 9559, + "ĠPelosi": 9560, + "ĠSoon": 9561, + "Ġmechanism": 9562, + "Ġaccountability": 9563, + "ĠNortheast": 9564, + "Ġfo": 9565, + "Ġanalytics": 9566, + "ĠEverything": 9567, + "Ġperceived": 9568, + "bers": 9569, + "Ġcelebrations": 9570, + "Ġinstruments": 9571, + "Ġstrip": 9572, + "ĠJuventus": 9573, + "Ġunfortunately": 9574, + "ĠGA": 9575, + "Ġwrestling": 9576, + "Ġstatue": 9577, + "vis": 9578, + "five": 9579, + "Ġmarine": 9580, + "ĠSamuel": 9581, + "Ġresponsibilities": 9582, + "hill": 9583, + "Ġrecruit": 9584, + "Ġreferee": 9585, + "ĠRail": 9586, + "ĠEagle": 9587, + "ĠCongressional": 9588, + "Ġbreathing": 9589, + "Ġbass": 9590, + "hit": 9591, + "Ġspreading": 9592, + "Ġevacuated": 9593, + "Ġintellectual": 9594, + "Ġsovereign": 9595, + "ocked": 9596, + "Ġslammed": 9597, + "Ġformerly": 9598, + "Ġarch": 9599, + "Ġdifficulty": 9600, + "ĠAFC": 9601, + "ĠFresh": 9602, + "Ġinvite": 9603, + "oner": 9604, + "ĠMich": 9605, + "Ġpitches": 9606, + "stock": 9607, + "Ġinitiated": 9608, + "ĠKu": 9609, + "ĠFlorence": 9610, + "yd": 9611, + "ĠFast": 9612, + "Ġmusician": 9613, + "ĠChile": 9614, + "anga": 9615, + "Ġdairy": 9616, + "Ġcontractors": 9617, + "ador": 9618, + "ĠPlanning": 9619, + "Ġultra": 9620, + "Ġprayer": 9621, + "Ġsuggestions": 9622, + "ĠEk": 9623, + "Ġrandom": 9624, + "ĠSullivan": 9625, + "Ġsensor": 9626, + "Ġhomicide": 9627, + "ĠIncome": 9628, + "Ġsettings": 9629, + "Ġacknowledge": 9630, + "ĠStay": 9631, + "Ġterminal": 9632, + "Ġ1991": 9633, + "West": 9634, + "hard": 9635, + "arc": 9636, + "Ġcombine": 9637, + "Ġprivately": 9638, + "Ġbarrier": 9639, + "Ġmedian": 9640, + "Ġwhereas": 9641, + "ĠTitans": 9642, + "Ġincentives": 9643, + "Ġhistorically": 9644, + "Ġindictment": 9645, + "Ġhiding": 9646, + "ĠPDT": 9647, + "Ġrebuild": 9648, + "hol": 9649, + "Ġpour": 9650, + "Ġairports": 9651, + "ĠEdinburgh": 9652, + "Ġappoint": 9653, + "ĠJul": 9654, + "Ġconfusion": 9655, + "Ġdam": 9656, + "ork": 9657, + "Ġcalculated": 9658, + "Ġhood": 9659, + "ĠTemple": 9660, + "ĠYorkshire": 9661, + "EP": 9662, + "ented": 9663, + "Ġapology": 9664, + "awi": 9665, + "Ġfacilitate": 9666, + "ĠSheffield": 9667, + "Ġrides": 9668, + "Ġcompelling": 9669, + "ĠGonzalez": 9670, + "roll": 9671, + "ONG": 9672, + "UP": 9673, + "ĠAj": 9674, + "pen": 9675, + "ĠVar": 9676, + "ĠIPO": 9677, + "ĠAnimal": 9678, + "Ġshifted": 9679, + "Ġ140": 9680, + "Ġtobacco": 9681, + "El": 9682, + "ild": 9683, + "Ġuncertain": 9684, + "Un": 9685, + "Ġcaps": 9686, + "Ġrecreational": 9687, + "ĠTu": 9688, + "Ġenc": 9689, + "More": 9690, + "iko": 9691, + "ĠEverton": 9692, + "ĠWalk": 9693, + "Ġmurdered": 9694, + "Ġpur": 9695, + "Ġdivisions": 9696, + "ivo": 9697, + "Ġfarming": 9698, + "Ġcourage": 9699, + "ped": 9700, + "Ġcrying": 9701, + "Ġattributed": 9702, + "ée": 9703, + "Ġimplementing": 9704, + "ĠWang": 9705, + "Ġspeeds": 9706, + "alk": 9707, + "aming": 9708, + "eries": 9709, + "Ġavoided": 9710, + "ĠMessi": 9711, + "Ġconsiderable": 9712, + "rt": 9713, + "Ġinauguration": 9714, + "ĠPH": 9715, + "Ġsoldier": 9716, + "Ġore": 9717, + "ollywood": 9718, + "otive": 9719, + "ĠAuburn": 9720, + "ĠSav": 9721, + "ĠPut": 9722, + "Ġemphasis": 9723, + "Ġaf": 9724, + "owed": 9725, + "Ġdiagnosis": 9726, + "Ġcart": 9727, + "Ġassisted": 9728, + "ĠOrder": 9729, + "ĠEstate": 9730, + "Ġintends": 9731, + "ĠCommon": 9732, + "Ġadventure": 9733, + "Ġbeliefs": 9734, + "Ġlasting": 9735, + "cel": 9736, + "Ġdeployment": 9737, + "tra": 9738, + "ĠStories": 9739, + "Ġquote": 9740, + "Ġfeared": 9741, + "Ġconvenience": 9742, + "Ġoptimism": 9743, + "Ġscientist": 9744, + "ĠEnterprise": 9745, + "ĠRex": 9746, + "ĠFel": 9747, + "Ġposes": 9748, + "Ġroot": 9749, + "Ġevacuation": 9750, + "Ġpresidents": 9751, + "ĠRather": 9752, + "Ġgrave": 9753, + "ĠHeights": 9754, + "Ġjumping": 9755, + "driven": 9756, + "Ġaluminum": 9757, + "Ġholders": 9758, + "Ġboot": 9759, + "iber": 9760, + "Ġprecious": 9761, + "uation": 9762, + "FP": 9763, + "uses": 9764, + "Ġcommentary": 9765, + "Ġadvances": 9766, + "ĠNissan": 9767, + "Ġbronze": 9768, + "Ġinspire": 9769, + "Ġstarters": 9770, + "ĠEvan": 9771, + "rah": 9772, + "body": 9773, + "Ġcrops": 9774, + "Ġseeds": 9775, + "Ġharsh": 9776, + "ĠHomeland": 9777, + "Ġenabled": 9778, + "ological": 9779, + "Ġworkshop": 9780, + "Ġchains": 9781, + "amps": 9782, + "Ġamongst": 9783, + "ĠBear": 9784, + "Ġcertified": 9785, + "ĠJulie": 9786, + "Ġmountains": 9787, + "VA": 9788, + "Ġfed": 9789, + "Ġbuyer": 9790, + "ahl": 9791, + "ĠBos": 9792, + "ĠCrystal": 9793, + "Ġquest": 9794, + "ĠStein": 9795, + "Ġacceptable": 9796, + "Ġunbeaten": 9797, + "iring": 9798, + "ural": 9799, + "Ġuncomfortable": 9800, + "Ġpartial": 9801, + "Ġsacrifice": 9802, + "ĠGrande": 9803, + "Ġarrangement": 9804, + "Ġpackaging": 9805, + "screen": 9806, + "Ġmirror": 9807, + "Ġsweep": 9808, + "Ġconnecting": 9809, + "Ġpanic": 9810, + "ĠJacksonville": 9811, + "ĠKremlin": 9812, + "Ġorigin": 9813, + "Brien": 9814, + "Ġnorthwest": 9815, + "Ġcarriers": 9816, + "ĠRiley": 9817, + "Ġaud": 9818, + "Ġappreciation": 9819, + "Ġeliminated": 9820, + "ĠAnalyst": 9821, + "CR": 9822, + "Ġfirearm": 9823, + "Ġaccommodate": 9824, + "Ġstructural": 9825, + "Ġappealed": 9826, + "Ġcharter": 9827, + "ressing": 9828, + "Ġalike": 9829, + "white": 9830, + "Ġslowdown": 9831, + "Ġweigh": 9832, + "ĠPalmer": 9833, + "ound": 9834, + "ĠConn": 9835, + "Ġbranches": 9836, + "Ġace": 9837, + "Ġinsists": 9838, + "yo": 9839, + "ĠLynn": 9840, + "ĠCC": 9841, + "ĠWithin": 9842, + "Ġcoll": 9843, + "Ġsustain": 9844, + "Ġemerge": 9845, + "ĠBattle": 9846, + "VER": 9847, + "Ġaviation": 9848, + "Ġenables": 9849, + "ĠProduction": 9850, + "ĠGrove": 9851, + "Ġnationally": 9852, + "ĠBaldwin": 9853, + "rent": 9854, + "Ġfirearms": 9855, + "irm": 9856, + "Ġconsiders": 9857, + "ĠCosby": 9858, + "ĠMcK": 9859, + "ĠEnt": 9860, + "Ġincumbent": 9861, + "iance": 9862, + "Ġgiants": 9863, + "Ġkan": 9864, + "Ġminimal": 9865, + "ivity": 9866, + "ĠSay": 9867, + "ĠNass": 9868, + "Ġlovely": 9869, + "ĠFurthermore": 9870, + "Ġdisplaced": 9871, + "Ġcontacts": 9872, + "NY": 9873, + "Ġtechnological": 9874, + "ancy": 9875, + "Ġant": 9876, + "ope": 9877, + "ĠFY": 9878, + "Ġfavorable": 9879, + "ĠVirgin": 9880, + "Ġcasual": 9881, + "ĠLat": 9882, + "Ġpopulations": 9883, + "Ġromance": 9884, + "Ġforgotten": 9885, + "Ġfleeing": 9886, + "Ġspecialty": 9887, + "Ġdrill": 9888, + "Ġapplying": 9889, + "Ġcocaine": 9890, + "rea": 9891, + "Ġheroin": 9892, + "Ġsweeping": 9893, + "ĠMaj": 9894, + "Ġtroubled": 9895, + "Ġcolleague": 9896, + "Ġedged": 9897, + "omes": 9898, + "ĠHappy": 9899, + "´": 9900, + "Ġmilitant": 9901, + "boy": 9902, + "aver": 9903, + "Yes": 9904, + "llo": 9905, + "Ġsupporter": 9906, + "ĠSubscribe": 9907, + "ĠBird": 9908, + "ĠGibson": 9909, + "Ġhill": 9910, + "Ġnewspapers": 9911, + "ĠPHOTO": 9912, + "Ġouting": 9913, + "Ġdefine": 9914, + "Ġann": 9915, + "Ġrobot": 9916, + "Ġregret": 9917, + "ĠCould": 9918, + "raz": 9919, + "Ġceiling": 9920, + "Ġorganizers": 9921, + "ĠTw": 9922, + "Ġcriticised": 9923, + "ĠJoh": 9924, + "ĠJe": 9925, + "ĠBulls": 9926, + "Ġteeth": 9927, + "ĠRanch": 9928, + "ĠAndrea": 9929, + "Ġconservatives": 9930, + "Ġmag": 9931, + "vey": 9932, + "Ġpredecessor": 9933, + "ĠJPMorgan": 9934, + "Ġdraws": 9935, + "umber": 9936, + "Ġvaccine": 9937, + "ĠDas": 9938, + "Ġdisappeared": 9939, + "ĠIron": 9940, + "Ġlitigation": 9941, + "vert": 9942, + "Ġbelong": 9943, + "ĠRet": 9944, + "owers": 9945, + "rain": 9946, + "controlled": 9947, + "ĠKil": 9948, + "Ġrehab": 9949, + "ĠAustria": 9950, + "Ġprivilege": 9951, + "Ġbounce": 9952, + "Ġbout": 9953, + "ĠIslamist": 9954, + "Ġtaxi": 9955, + "ody": 9956, + ".'\"": 9957, + "Ġdos": 9958, + "shire": 9959, + "Ġaccidents": 9960, + "Ġdemonstration": 9961, + "His": 9962, + "ĠBO": 9963, + "ĠICE": 9964, + "van": 9965, + "File": 9966, + "ĠManning": 9967, + "ounded": 9968, + "Ġdirections": 9969, + "lled": 9970, + "Ġoffences": 9971, + "Ġlaptop": 9972, + "ĠUniversal": 9973, + "Ġmilestone": 9974, + "ĠNarendra": 9975, + "Ġnotion": 9976, + "Ġuns": 9977, + "ĠLower": 9978, + "Ġmidfield": 9979, + "Ġoutper": 9980, + "trans": 9981, + "ĠJa": 9982, + "three": 9983, + "Adds": 9984, + "Ġpressures": 9985, + "Ġprohibited": 9986, + "Ġutilities": 9987, + "Ġbes": 9988, + "ĠReporter": 9989, + "Ġcommodities": 9990, + "leton": 9991, + "Ġslower": 9992, + "EE": 9993, + "auer": 9994, + "Ġtablet": 9995, + "sl": 9996, + "iously": 9997, + "Ġaiming": 9998, + "eland": 9999, + "ĠNEXT": 10000, + "tered": 10001, + "IVE": 10002, + "onic": 10003, + "May": 10004, + "ĠMilitary": 10005, + "Mark": 10006, + "Ġlender": 10007, + "mate": 10008, + "Ġaboard": 10009, + "they": 10010, + "Ġrespondents": 10011, + "Ġconversion": 10012, + "Ġsecuring": 10013, + "Ġentity": 10014, + "ĠHarbor": 10015, + "ĠCu": 10016, + "Ġcats": 10017, + "ĠACC": 10018, + "ĠIbrahim": 10019, + "GL": 10020, + "Ġinvitation": 10021, + "Ġcond": 10022, + "ĠRecords": 10023, + "ĠAdrian": 10024, + "Ġbrave": 10025, + "Ġmineral": 10026, + "Ġsooner": 10027, + "Ġsatisfied": 10028, + "Ġpets": 10029, + "Ġnotably": 10030, + "ı": 10031, + "Ġmarking": 10032, + "ĠRO": 10033, + "ĠHaw": 10034, + "ĠVis": 10035, + "Ġmarketplace": 10036, + "ĠNat": 10037, + "ĠForward": 10038, + "ĠLeft": 10039, + "Ġaggravated": 10040, + "ĠClose": 10041, + "acey": 10042, + "Ġlandmark": 10043, + "Ġdisruption": 10044, + "ĠChallenge": 10045, + "ĠDays": 10046, + "ĠCoun": 10047, + "ahan": 10048, + "Ġaides": 10049, + "South": 10050, + "ĠDylan": 10051, + "ĠRavens": 10052, + "ĠNature": 10053, + "lli": 10054, + "Ġdiplomats": 10055, + "350": 10056, + "ĠDrake": 10057, + "tag": 10058, + "Ġlicensed": 10059, + "ĠDenmark": 10060, + "Ġcancel": 10061, + "Ġinstant": 10062, + "DI": 10063, + "Ġpunch": 10064, + "ĠJenkins": 10065, + "Ġstrengthening": 10066, + "des": 10067, + "-$": 10068, + "Ġallegation": 10069, + "Ġsizes": 10070, + "iza": 10071, + "Ġmentally": 10072, + "ĠResidents": 10073, + "acked": 10074, + "Ġsensors": 10075, + ",'\"": 10076, + "illion": 10077, + "ĠChampion": 10078, + "Ġexcessive": 10079, + "Ġhum": 10080, + "ĠComp": 10081, + "rend": 10082, + "ĠLakes": 10083, + "Ġburst": 10084, + "Ġtrainer": 10085, + "Ġclearing": 10086, + "ĠSilicon": 10087, + "Ġ350": 10088, + "DE": 10089, + "ĠGates": 10090, + "ĠHorn": 10091, + "ests": 10092, + "ĠCourtesy": 10093, + "Ġbipartisan": 10094, + "Ġhabits": 10095, + "ĠAlexa": 10096, + "walk": 10097, + "Ġsnapped": 10098, + "ĠEight": 10099, + "itis": 10100, + "zel": 10101, + "Ġcustoms": 10102, + "Ġsouthwest": 10103, + "Ġvary": 10104, + "Because": 10105, + "Ġpayout": 10106, + "Ġaccelerate": 10107, + "ĠBarr": 10108, + "tu": 10109, + "Ġfined": 10110, + "cost": 10111, + "ĠTheater": 10112, + "ĠCorbyn": 10113, + "Ġstem": 10114, + "Ġundermine": 10115, + ".;": 10116, + "Ġstays": 10117, + "Ġbreakthrough": 10118, + "Ġturnover": 10119, + "hot": 10120, + "Ġtriumph": 10121, + "Ġpainted": 10122, + "ĠWinnipeg": 10123, + "ĠKas": 10124, + "ĠStuart": 10125, + "irk": 10126, + "Am": 10127, + "Ġtrusted": 10128, + "aze": 10129, + "ĠLate": 10130, + "Ġaccessories": 10131, + "Ġmemorable": 10132, + "ĠFool": 10133, + "Ġrotation": 10134, + "ĠBulldogs": 10135, + "ĠChen": 10136, + "Ġpoised": 10137, + "ĠMonte": 10138, + "ĠClarke": 10139, + "leading": 10140, + "Ġvenues": 10141, + "Ġbeneficial": 10142, + "ĠLiam": 10143, + "ĠBrothers": 10144, + "ĠNeed": 10145, + "Ġconc": 10146, + "olly": 10147, + "ĠJulian": 10148, + "ogue": 10149, + "Ġfounding": 10150, + "Ġsidelines": 10151, + "Ġdeclare": 10152, + "ĠMember": 10153, + "Ġexamine": 10154, + "abs": 10155, + "Ġboundaries": 10156, + "ĠBrisbane": 10157, + "Ġlaunches": 10158, + "lor": 10159, + "ĠGa": 10160, + "Ġthr": 10161, + "expected": 10162, + "wal": 10163, + "ĠBarnes": 10164, + "Ġclashes": 10165, + "content": 10166, + "ĠClemson": 10167, + "iger": 10168, + "Mar": 10169, + "Ġaccord": 10170, + "Ġsoutheast": 10171, + "ģ": 10172, + "ĠStarbucks": 10173, + "osing": 10174, + "Ġseasonal": 10175, + "icking": 10176, + "Ġloyalty": 10177, + "Ġtent": 10178, + "ĠDy": 10179, + "Ġevident": 10180, + "Ġlobby": 10181, + "Ġtours": 10182, + "Ġbombing": 10183, + "uations": 10184, + "Ġrises": 10185, + "Ġdemonstrations": 10186, + "ĠWATCH": 10187, + "pin": 10188, + "Ġdeb": 10189, + "ĠDraft": 10190, + "rog": 10191, + "Ġseal": 10192, + "ĠPerformance": 10193, + "ĠLGBT": 10194, + "Ġsed": 10195, + "Ġgig": 10196, + "nan": 10197, + "Ġrainfall": 10198, + "Ġfabric": 10199, + "Ġmanages": 10200, + "Ġlifting": 10201, + "ĠMagazine": 10202, + "ĠCriminal": 10203, + "Ġhikes": 10204, + "Ġcatching": 10205, + "Ġ1989": 10206, + "OG": 10207, + "Ġdisappointment": 10208, + "Ġir": 10209, + "ĠEV": 10210, + "stown": 10211, + "pass": 10212, + "120": 10213, + "Ġmedals": 10214, + "ĠSimmons": 10215, + "Ġinaugural": 10216, + "ĠCorn": 10217, + "Ġmotorcycle": 10218, + "lets": 10219, + "ĠSkype": 10220, + "ét": 10221, + "Ġscary": 10222, + "opp": 10223, + "thirds": 10224, + "ĠMohamed": 10225, + "Ġteenagers": 10226, + "ANK": 10227, + "Ġserver": 10228, + "Ġouts": 10229, + "Ġdishes": 10230, + "four": 10231, + "dr": 10232, + "ĠOt": 10233, + "ĠSandy": 10234, + "ĠShane": 10235, + "orters": 10236, + "SH": 10237, + "Ġtouching": 10238, + "ĠNike": 10239, + "ĠHBO": 10240, + "driving": 10241, + "Ġplug": 10242, + "ĠBaseball": 10243, + "eling": 10244, + "hn": 10245, + "ulate": 10246, + "eed": 10247, + "ĠChristine": 10248, + "ĠGlobe": 10249, + "Ġethics": 10250, + "ĠTrevor": 10251, + "iya": 10252, + "Ġ360": 10253, + "Ġawaiting": 10254, + "Ġcounterpart": 10255, + "Ġsubsidies": 10256, + "pointers": 10257, + "Ġspy": 10258, + "ILL": 10259, + "Ġtakeover": 10260, + "ĠBeyond": 10261, + "Ġsurprisingly": 10262, + "TION": 10263, + "ĠSong": 10264, + "Ġni": 10265, + "Ġcommonly": 10266, + "Ġjack": 10267, + "Ġsubstitute": 10268, + "ews": 10269, + "Ġrecalls": 10270, + "ĠCommons": 10271, + "Ġsin": 10272, + "del": 10273, + "ĠMod": 10274, + "Ġpressing": 10275, + "ĠTelevision": 10276, + "ĠInside": 10277, + "ª": 10278, + "Ġbacklash": 10279, + "Ġcredible": 10280, + "ĠJenner": 10281, + "ĠPu": 10282, + "ĠStevens": 10283, + "ĠWE": 10284, + "Last": 10285, + "Ġinsurers": 10286, + "ĠJoin": 10287, + "bled": 10288, + "digit": 10289, + "Ġflooded": 10290, + "ĠShore": 10291, + "ĠTrophy": 10292, + "zing": 10293, + "ĠImmigration": 10294, + "Ġsuperior": 10295, + "IAN": 10296, + "Ġcasino": 10297, + "Ġenabling": 10298, + "Ġmeantime": 10299, + "Ġperformers": 10300, + "Ġproportion": 10301, + "Ġlawmaker": 10302, + "ĠConf": 10303, + "Ġconvert": 10304, + "Ġfarmer": 10305, + "Ġbu": 10306, + "ĠGE": 10307, + "ĠRepresentative": 10308, + "ĠBannon": 10309, + "ĠHelp": 10310, + "PT": 10311, + "formed": 10312, + "ĠSuperintendent": 10313, + "Ġfrustrating": 10314, + "ĠRegister": 10315, + "ĠPolitical": 10316, + "Ġboots": 10317, + "ĠRu": 10318, + "ĠSha": 10319, + "Ġinstrument": 10320, + "tor": 10321, + "ĠBelt": 10322, + "ĠWalsh": 10323, + "Ġrecipe": 10324, + "ilt": 10325, + "ĠClean": 10326, + "iors": 10327, + "Ġtwenty": 10328, + "iler": 10329, + "nder": 10330, + "Ġwinger": 10331, + "Ġwheat": 10332, + "ĠAviation": 10333, + "Ġcorrupt": 10334, + "Ġconnectivity": 10335, + "ĠVen": 10336, + "order": 10337, + "esc": 10338, + "break": 10339, + "Ġmetals": 10340, + "Ġtraditionally": 10341, + "Ġbell": 10342, + "Ġviolating": 10343, + "rough": 10344, + "Ġintroducing": 10345, + "Ġguided": 10346, + "ĠMol": 10347, + "Ġdesert": 10348, + "ĠBree": 10349, + "Le": 10350, + "ĠZone": 10351, + "ĠGlass": 10352, + "ĠEUR": 10353, + "ĠYahoo": 10354, + "Ġlaps": 10355, + "Ġdiffer": 10356, + "ĠHold": 10357, + "Ġtimely": 10358, + "Ġsuccessor": 10359, + "Ġcomic": 10360, + "Ġbears": 10361, + "Ġlicence": 10362, + "Ġreject": 10363, + "Ġsophisticated": 10364, + "Too": 10365, + "Ġobjectives": 10366, + "ĠId": 10367, + "urers": 10368, + "Ġraid": 10369, + "COM": 10370, + "Ġelect": 10371, + "ĠHampshire": 10372, + "Ġlens": 10373, + "Ġdesigners": 10374, + "Ġpresently": 10375, + "ĠRCMP": 10376, + "ĠEgyptian": 10377, + "ĠWalter": 10378, + "ĠWallace": 10379, + "Ġ2025": 10380, + "utics": 10381, + "ried": 10382, + "Ġrefuse": 10383, + "Ġsiblings": 10384, + "ĠNothing": 10385, + "Ġdressing": 10386, + "Ġnerve": 10387, + "AST": 10388, + "Ġuncertainties": 10389, + "Ġtale": 10390, + "ĠTalk": 10391, + "Ġissuing": 10392, + "shot": 10393, + "ĠTak": 10394, + "Ġacid": 10395, + "ĠNintendo": 10396, + "Ġwash": 10397, + "pd": 10398, + "ĠClaire": 10399, + "ĠScot": 10400, + "Ġsuits": 10401, + "ĠBayern": 10402, + "gest": 10403, + "Ġapplicable": 10404, + "Ġinteraction": 10405, + "ĠEnforcement": 10406, + "ĠRohingya": 10407, + "Ġjan": 10408, + "Ġunited": 10409, + "ĠCoalition": 10410, + "Ġlegislators": 10411, + "Ġdetectives": 10412, + "ĠSing": 10413, + "ĠBetween": 10414, + "ĠPoly": 10415, + "pool": 10416, + "mal": 10417, + "Ġreply": 10418, + "Ġschemes": 10419, + "ĠHolmes": 10420, + "ĠSenators": 10421, + "ĠVerizon": 10422, + "Ġwelcoming": 10423, + "ĠCricket": 10424, + "ĠMarco": 10425, + "ĠYears": 10426, + "ĠLiving": 10427, + "Ġcounterparts": 10428, + "ĠParadise": 10429, + "ĠTrad": 10430, + "#": 10431, + "iw": 10432, + "ĠSoccer": 10433, + "umbled": 10434, + "Ġdeceased": 10435, + "heim": 10436, + "Ġevaluation": 10437, + "Ġwrap": 10438, + "Ġmild": 10439, + "aji": 10440, + "ĠUCLA": 10441, + "ĠNative": 10442, + "president": 10443, + "ĠXbox": 10444, + "Ġenterprises": 10445, + "ĠSlam": 10446, + "oga": 10447, + "Rock": 10448, + "piece": 10449, + "ĠColeman": 10450, + "Ġcomparable": 10451, + "uba": 10452, + "Ġprovinces": 10453, + "ĠFormula": 10454, + "ipt": 10455, + "ô": 10456, + "Ġtick": 10457, + "ĠIMF": 10458, + "anch": 10459, + "atta": 10460, + "rew": 10461, + "However": 10462, + "LS": 10463, + "etta": 10464, + "ĠCustoms": 10465, + "SU": 10466, + "Ġpublishing": 10467, + "Ġinch": 10468, + "Ġkills": 10469, + "¤": 10470, + "ĠSus": 10471, + "ĠBeth": 10472, + "Ġsteam": 10473, + "jpg": 10474, + "pointer": 10475, + "Ġturnovers": 10476, + "Ġpowder": 10477, + "ĠUSB": 10478, + "ĠWildlife": 10479, + "ĠDirect": 10480, + "atively": 10481, + "ĠFerrari": 10482, + "Ġpleasure": 10483, + "ĠMatthews": 10484, + "Ġski": 10485, + "ography": 10486, + "ĠVermont": 10487, + "ĠMargaret": 10488, + "ĠMunich": 10489, + "Ġlayer": 10490, + "ĠProperty": 10491, + "Ġeconomics": 10492, + "ĠCrew": 10493, + "UK": 10494, + "Ġunnecessary": 10495, + "ĠGlasgow": 10496, + "Ġsealed": 10497, + "Ġclarity": 10498, + "Ġsurplus": 10499, + "ĠCanyon": 10500, + "ĠApart": 10501, + "Ġacceptance": 10502, + "ĠEllis": 10503, + "uster": 10504, + "rid": 10505, + "ĠHawks": 10506, + "Ġstatewide": 10507, + "Ġthreaten": 10508, + "ĠJail": 10509, + "Ġinclusive": 10510, + "Ġmud": 10511, + "Ġpat": 10512, + "Ġbitter": 10513, + "Ġalternatives": 10514, + "Ġaffiliate": 10515, + "Ġevaluate": 10516, + "ĠBaby": 10517, + "Ġperception": 10518, + "tim": 10519, + "Ġrefusing": 10520, + "Ġgrey": 10521, + "Ġarguably": 10522, + "Ġfirmly": 10523, + "ĠDark": 10524, + "Ġexcuse": 10525, + "ĠRaymond": 10526, + "Ġballots": 10527, + "inton": 10528, + "Ġ125": 10529, + "ĠCatherine": 10530, + "Ġsacks": 10531, + "ĠDeb": 10532, + "Ġworkout": 10533, + "web": 10534, + "Ġbatteries": 10535, + "breaking": 10536, + "ML": 10537, + "Ġunacceptable": 10538, + "ĠValentine": 10539, + "ĠYOU": 10540, + "ĠRT": 10541, + "Ġjurisdiction": 10542, + "Ġexamined": 10543, + "strom": 10544, + "ĠPocket": 10545, + "Ġcement": 10546, + "Ġuniversal": 10547, + "ĠOz": 10548, + "Ġkit": 10549, + "Ġchurches": 10550, + "Ġsuburban": 10551, + "ĠKushner": 10552, + "ĠDavidson": 10553, + "Sports": 10554, + "email": 10555, + "Ġrealistic": 10556, + "Ġintend": 10557, + "ĠGrey": 10558, + ",''": 10559, + "Ġscholarship": 10560, + "Ġphilosophy": 10561, + "Ġwheels": 10562, + "Ġmotivation": 10563, + "eway": 10564, + "match": 10565, + "ĠDate": 10566, + "John": 10567, + "Ġcontrolling": 10568, + "750": 10569, + "aven": 10570, + "Ġfilmed": 10571, + "Ġ160": 10572, + "ĠBrock": 10573, + "ĠDetails": 10574, + "Ġlogistics": 10575, + "Ġassumptions": 10576, + "ĠStep": 10577, + "Ġfails": 10578, + "ĠNotre": 10579, + "Ġjuice": 10580, + "Ġcounting": 10581, + "Ġphotograph": 10582, + "Ġfortunate": 10583, + "Ġestablishing": 10584, + "ĠNJ": 10585, + "ĠWorkers": 10586, + "ĠQuinn": 10587, + "ĠHeather": 10588, + "Ġtimeline": 10589, + "Ġimported": 10590, + "ĠNASCAR": 10591, + "Ġexercises": 10592, + "Ġsearched": 10593, + "ĠRalph": 10594, + "alf": 10595, + "Ġgene": 10596, + "Ġdependent": 10597, + "én": 10598, + "iate": 10599, + "ĠBristol": 10600, + "Ġhung": 10601, + "Ġtropical": 10602, + "Ġintensity": 10603, + "ĠIdaho": 10604, + "ĠMull": 10605, + "Ġsuite": 10606, + "Ġblockchain": 10607, + "cz": 10608, + "ovich": 10609, + "Ġworn": 10610, + "ĠLE": 10611, + "AV": 10612, + "emi": 10613, + "Ġidentification": 10614, + "Ġtunnel": 10615, + "ĠARE": 10616, + "ĠArm": 10617, + "Ġoutrage": 10618, + "Ġtwist": 10619, + "uka": 10620, + "ĠGra": 10621, + "Ġjets": 10622, + "ĠThus": 10623, + "Ġcompound": 10624, + "Ġfinancially": 10625, + "2019": 10626, + "asse": 10627, + "Ġspare": 10628, + "ĠNoah": 10629, + "ĠMade": 10630, + "ĠMom": 10631, + "Ġphenomenon": 10632, + "Ġnurses": 10633, + "Ġoutlined": 10634, + "Ġpolit": 10635, + "ĠCarm": 10636, + "Ġleagues": 10637, + "Ġmath": 10638, + "Ġmodified": 10639, + "Ġwillingness": 10640, + "ĠAmanda": 10641, + "Ġgrandfather": 10642, + "Of": 10643, + "DR": 10644, + "Ġdip": 10645, + "ĠRAM": 10646, + "ĠChristie": 10647, + "Ġargues": 10648, + "ĠEX": 10649, + "ĠNine": 10650, + "ĠScroll": 10651, + "ĠTHIS": 10652, + "Pro": 10653, + "Ġkeys": 10654, + "Ġprocessor": 10655, + "Ġscam": 10656, + "ĠTraining": 10657, + "Ġhoney": 10658, + "Ĵ": 10659, + "Ġfacebook": 10660, + "ĠLegal": 10661, + "Ġaging": 10662, + "Ġspiritual": 10663, + "ĠHost": 10664, + "Ġlung": 10665, + "ĠUSC": 10666, + "Ġdirt": 10667, + "Ġfe": 10668, + "after": 10669, + "ĠDiana": 10670, + "Ġounce": 10671, + "date": 10672, + "ĠFinals": 10673, + "Ķ": 10674, + "Ġthorough": 10675, + "Ġviable": 10676, + "Ġanytime": 10677, + "Ġfost": 10678, + "orter": 10679, + "ware": 10680, + "ĠHolland": 10681, + "ĠMand": 10682, + "ĠSend": 10683, + "2013": 10684, + "ĠVolkswagen": 10685, + "Ġsuitable": 10686, + "ifies": 10687, + "Ġcomedian": 10688, + "Ġneighbours": 10689, + "ĠKnow": 10690, + "Ġcurious": 10691, + "ĠTwenty": 10692, + "ĠPrevention": 10693, + "ĠStephanie": 10694, + "Ġpilots": 10695, + "Ġstored": 10696, + "Ġdire": 10697, + "Ġfits": 10698, + "ision": 10699, + "ĠShell": 10700, + "Ġshifts": 10701, + "Ġpepper": 10702, + "Ġattendees": 10703, + "ĠName": 10704, + "hers": 10705, + "rip": 10706, + "Ġwatchdog": 10707, + "andy": 10708, + "Ġbio": 10709, + "Ġpublisher": 10710, + "powered": 10711, + "ĠCM": 10712, + "rian": 10713, + "ĠRand": 10714, + "wise": 10715, + "ĠJesse": 10716, + "Ġladies": 10717, + "ĠMetropolitan": 10718, + "ĠMicro": 10719, + "Ġkicking": 10720, + "Ġmeg": 10721, + "Ġclouds": 10722, + "Ġtrim": 10723, + "wear": 10724, + "ĠML": 10725, + "Ġconsists": 10726, + "Ġrig": 10727, + "Ġhonestly": 10728, + "GS": 10729, + "ĠNicholas": 10730, + "Ġcope": 10731, + "Ġpublish": 10732, + "working": 10733, + "bur": 10734, + "ĠNar": 10735, + "olds": 10736, + "aja": 10737, + "ĠSad": 10738, + "Ġclicking": 10739, + "Ġbids": 10740, + "ĠZuckerberg": 10741, + "Ġ900": 10742, + "Ġexam": 10743, + "ivers": 10744, + "Ġpray": 10745, + "Ġreader": 10746, + "ĠSeth": 10747, + "inem": 10748, + "Ġconfront": 10749, + "stra": 10750, + "AW": 10751, + "ĠGian": 10752, + "Ġaccordance": 10753, + "Ġinteract": 10754, + "ĠSharks": 10755, + "Ġfireworks": 10756, + "gment": 10757, + "illy": 10758, + "Ġconst": 10759, + "ARY": 10760, + "Ġprizes": 10761, + "Ġshoulders": 10762, + "Ġaccessed": 10763, + "Ġecosystem": 10764, + "Ġlicensing": 10765, + "La": 10766, + "Ġdedication": 10767, + "Ġdé": 10768, + "Ġyouths": 10769, + "lem": 10770, + "Ġtoy": 10771, + "ĠProm": 10772, + "ounding": 10773, + "rod": 10774, + "Ġ1000": 10775, + "ishes": 10776, + "Over": 10777, + "Ġgaps": 10778, + "Ġmissions": 10779, + "Ġrailway": 10780, + "Day": 10781, + "orp": 10782, + "ĠSchumer": 10783, + "Ġeclipse": 10784, + "Ġshell": 10785, + "ĠBY": 10786, + "Many": 10787, + "ĠRecord": 10788, + "Ġdrunk": 10789, + "ayan": 10790, + "Ġsuggestion": 10791, + "Ġdefenders": 10792, + "ĠNewton": 10793, + "Ġdisputes": 10794, + "Ġevolution": 10795, + "Ġcredibility": 10796, + "ĠTenn": 10797, + "Ġplain": 10798, + "size": 10799, + "cont": 10800, + "Ġlone": 10801, + "Ġfingers": 10802, + "BUR": 10803, + "ĠInvestigation": 10804, + "ĠQualcomm": 10805, + "var": 10806, + "Ġcountless": 10807, + "ĠRebecca": 10808, + "½": 10809, + "abi": 10810, + "Ġreflecting": 10811, + "ĠTurn": 10812, + "Ġinteractive": 10813, + "Ġincentive": 10814, + "second": 10815, + "offs": 10816, + "ĠBerkeley": 10817, + "ĠTexans": 10818, + "Ġheated": 10819, + "Ġscorer": 10820, + "ĠSharif": 10821, + "Ġmigrant": 10822, + "west": 10823, + "ĠHoliday": 10824, + "Ġwrist": 10825, + "Ġchairs": 10826, + "Ġrecommends": 10827, + "ĠWildcats": 10828, + "ĠPed": 10829, + "ĠQuarter": 10830, + "ĠIV": 10831, + "ĠArch": 10832, + "Ġstandings": 10833, + "Ġbombs": 10834, + "Ġcapped": 10835, + "Can": 10836, + "Ġcaring": 10837, + "ĠLah": 10838, + "lim": 10839, + "Ġdragged": 10840, + "ĠBeat": 10841, + "DB": 10842, + "Ġaired": 10843, + "Ġjeans": 10844, + "action": 10845, + "Ġgenerating": 10846, + "ĠGir": 10847, + "risk": 10848, + "lon": 10849, + "stage": 10850, + "âĤ¬": 10851, + "earing": 10852, + "ĠTogether": 10853, + "Ġreun": 10854, + "ĠCorey": 10855, + "ĠBak": 10856, + "Ġprestigious": 10857, + "Ġapplicants": 10858, + "here": 10859, + "ĠMattis": 10860, + "Ġridiculous": 10861, + "ĠLess": 10862, + "Ġrains": 10863, + "Ġpresenting": 10864, + "anti": 10865, + "Ġdisabilities": 10866, + "Ġapartments": 10867, + "storm": 10868, + "ĠHem": 10869, + "Ġhabit": 10870, + "ĠRuth": 10871, + "ĠNPR": 10872, + "nut": 10873, + "Ġappreciated": 10874, + "Ġseparation": 10875, + "uda": 10876, + "Ġminus": 10877, + "ĠPhotos": 10878, + "Ġblew": 10879, + "ĠVoice": 10880, + "Ġrallies": 10881, + "Ġfond": 10882, + "ĠTaking": 10883, + "yt": 10884, + "FE": 10885, + "ĠTory": 10886, + "ressed": 10887, + "ĠLy": 10888, + "Ġrocks": 10889, + "ĠRah": 10890, + "Ġelementary": 10891, + "nis": 10892, + "ĠPresidential": 10893, + "Ġnutrition": 10894, + "Ġbaseman": 10895, + "Ġsuperstar": 10896, + "ĠWa": 10897, + "lar": 10898, + "Ġstaged": 10899, + "ĠLearn": 10900, + "Ġbroadcaster": 10901, + "Ġboasts": 10902, + "Ġdoubts": 10903, + "rum": 10904, + "Ġbare": 10905, + "cap": 10906, + "Ġclimbing": 10907, + "ĠSelect": 10908, + "ĠCant": 10909, + "ĠNord": 10910, + "ĠBeck": 10911, + "ĠKad": 10912, + "ello": 10913, + "Ġenforce": 10914, + "ĠZe": 10915, + "ked": 10916, + "elly": 10917, + "ĠLED": 10918, + "ĠOperations": 10919, + "ĠLuk": 10920, + "Ġcertificate": 10921, + "Ġdeter": 10922, + "Ġspill": 10923, + "Ġgrain": 10924, + "league": 10925, + "Up": 10926, + "ĠKid": 10927, + "using": 10928, + "ĠJays": 10929, + "Ġoccasionally": 10930, + "ĠMI": 10931, + "yes": 10932, + "Ġdetect": 10933, + "Ġpropaganda": 10934, + "Ġneighboring": 10935, + "sub": 10936, + "avan": 10937, + "ĠAstros": 10938, + "oti": 10939, + "threatening": 10940, + "Ġshorter": 10941, + "INGS": 10942, + "Ġfeeding": 10943, + "Ġelevated": 10944, + "ĠWenger": 10945, + "Ġundergo": 10946, + "Ġpsychological": 10947, + "Ġautom": 10948, + "NP": 10949, + "anks": 10950, + "ĠNokia": 10951, + "Ġdrones": 10952, + "Ġrecognised": 10953, + "Ġheroes": 10954, + "agen": 10955, + "Ġparole": 10956, + "ĠBah": 10957, + "Ġhomeowners": 10958, + "ĠSweet": 10959, + "Ġinstances": 10960, + "ĠParish": 10961, + "ĠSL": 10962, + "Ġunw": 10963, + "Ġdelicious": 10964, + "¯": 10965, + "ĠInvestments": 10966, + "ĠPhilippine": 10967, + "inos": 10968, + "Ġmes": 10969, + "Ġbite": 10970, + "Ġcornerback": 10971, + "ĠHat": 10972, + "Ġdeserved": 10973, + "ologists": 10974, + "[": 10975, + "Ġwrongdoing": 10976, + "ĠTrent": 10977, + "ĠVe": 10978, + "ĠDeal": 10979, + "Mr": 10980, + "Ġovers": 10981, + "Ġhonors": 10982, + "ĠITV": 10983, + "Ġpayroll": 10984, + "Ġconfused": 10985, + "Ġelaborate": 10986, + "ange": 10987, + "World": 10988, + "ĠResort": 10989, + "ilia": 10990, + "ĠKr": 10991, + "Ġconclude": 10992, + "First": 10993, + "ĠDR": 10994, + "Ġpeer": 10995, + "Ġrunway": 10996, + "ĠPotter": 10997, + "cons": 10998, + "bad": 10999, + "si": 11000, + "ĠClimate": 11001, + "ĠHoll": 11002, + "Ġweighing": 11003, + "Ġepidemic": 11004, + "ĠBible": 11005, + "Ġhon": 11006, + "Ġrenew": 11007, + "Ġgambling": 11008, + "ĠNationals": 11009, + "itable": 11010, + "ĠOutlook": 11011, + "Ġreactions": 11012, + "ĠCos": 11013, + "ĠDana": 11014, + "India": 11015, + "ĠAirbus": 11016, + "power": 11017, + "watch": 11018, + "Ġstyles": 11019, + "Ġordinance": 11020, + "Ġcam": 11021, + "Ġinvent": 11022, + "ĠDurant": 11023, + "Ġexchanged": 11024, + "Ġyoga": 11025, + "ĠMichel": 11026, + "ĠWyoming": 11027, + "ĠPhase": 11028, + "ĠHannah": 11029, + "Ġtem": 11030, + "Ġfare": 11031, + "omer": 11032, + "Ġtrails": 11033, + "Ġquietly": 11034, + "ĠFourth": 11035, + "Ġwise": 11036, + "Ġappetite": 11037, + "Ġpedestrian": 11038, + "Ġfierce": 11039, + "hin": 11040, + "ako": 11041, + "Ġvacant": 11042, + "Ġdynamics": 11043, + "Ġbust": 11044, + "ĠGT": 11045, + "century": 11046, + "Ġpermitted": 11047, + "Ġfog": 11048, + "Ġrecruitment": 11049, + "ĠDue": 11050, + "Ġbro": 11051, + "Ġsil": 11052, + "ĠOpp": 11053, + "Ġphrase": 11054, + "ĠChip": 11055, + "ĠBase": 11056, + "Ġjazz": 11057, + "Ġenemies": 11058, + "Ġremainder": 11059, + "bles": 11060, + "Ġ105": 11061, + "ĠGur": 11062, + "Ġretiring": 11063, + "ĠCour": 11064, + "ĠSi": 11065, + "Ġinevitable": 11066, + "ĠAdvisory": 11067, + "ĠCampaign": 11068, + "ĠPeninsula": 11069, + "base": 11070, + "Ġjustify": 11071, + "inen": 11072, + "North": 11073, + "Ġfreezing": 11074, + "Ġphotography": 11075, + "Ġappointments": 11076, + "ĠTree": 11077, + "Os": 11078, + "Ġdivide": 11079, + "ĠMMA": 11080, + "Ġdeclines": 11081, + "ĠAbbott": 11082, + "ACH": 11083, + "ĠJah": 11084, + "Ġspr": 11085, + "Ġskilled": 11086, + "ĠTry": 11087, + "ANT": 11088, + "ael": 11089, + "ĠMcN": 11090, + "Ġtariff": 11091, + "generation": 11092, + "ĠMans": 11093, + "Or": 11094, + "Ġraped": 11095, + "Ġdisability": 11096, + "Ġnominations": 11097, + "Ġhappiness": 11098, + "ĠLSU": 11099, + "ĠInterstate": 11100, + "ĠDance": 11101, + "ĠMaking": 11102, + "Ġbailout": 11103, + "oro": 11104, + "ĠObviously": 11105, + "Ġinbox": 11106, + "football": 11107, + "hy": 11108, + "ĠCase": 11109, + "Ġentertaining": 11110, + "Ġhardest": 11111, + "ĠOpposition": 11112, + "Ġflip": 11113, + "ĠPirates": 11114, + "anu": 11115, + "ĠKlopp": 11116, + "Ġballistic": 11117, + "Ġprinted": 11118, + "ĠNFC": 11119, + "UST": 11120, + "Ġglasses": 11121, + "Ġrum": 11122, + "ĠDuncan": 11123, + "hal": 11124, + "Ġpreview": 11125, + "BER": 11126, + "dec": 11127, + "Ġsustainability": 11128, + "Ġaff": 11129, + "Ġhungry": 11130, + "service": 11131, + "avi": 11132, + "Ġsometime": 11133, + "Ġmod": 11134, + "ĠLib": 11135, + "oko": 11136, + "Ġfundraiser": 11137, + "Ġcrowded": 11138, + "mates": 11139, + "Ġcreativity": 11140, + "ĠHell": 11141, + "Ġtreaty": 11142, + "ĠSoftware": 11143, + "ĠRandy": 11144, + "ĠPolish": 11145, + "sa": 11146, + "ardi": 11147, + "Ġcab": 11148, + "ĠCamera": 11149, + "Ġlicenses": 11150, + "Ġ1988": 11151, + "Ġcontinuous": 11152, + "Ġpaired": 11153, + "Ġtally": 11154, + "Ġgrip": 11155, + "cho": 11156, + "Ġsurged": 11157, + "Ġpodium": 11158, + "Ġcontrary": 11159, + "SL": 11160, + "ĠResearchers": 11161, + "cing": 11162, + "Ġmi": 11163, + "Ġdisputed": 11164, + "Ġgrades": 11165, + "Ġseverely": 11166, + "ĠMcL": 11167, + "ondo": 11168, + "Ġshelters": 11169, + "Ġdomain": 11170, + "ĠSwitch": 11171, + "Ġtestify": 11172, + "case": 11173, + "omet": 11174, + "atch": 11175, + "ĠAff": 11176, + "Ġcasting": 11177, + "berger": 11178, + "Ġintimate": 11179, + "erc": 11180, + "plan": 11181, + "ĠPast": 11182, + "ĠUt": 11183, + "Ġapologized": 11184, + "ĠDet": 11185, + "alle": 11186, + "Ġwhilst": 11187, + "Ġpel": 11188, + "Ġexecute": 11189, + "Ġharmful": 11190, + "ĠRB": 11191, + "onda": 11192, + "ĠFul": 11193, + "II": 11194, + "Those": 11195, + "Ġcryptocurrency": 11196, + "Ġrealise": 11197, + "ĠAthens": 11198, + "ĠApplication": 11199, + "ORD": 11200, + "Ġmidst": 11201, + "ĠSem": 11202, + "Ġmessaging": 11203, + "Ġcousin": 11204, + "ĠMarsh": 11205, + "ĠAlmost": 11206, + "uto": 11207, + "wire": 11208, + "ĠManaging": 11209, + "Ġsends": 11210, + "ĠDerby": 11211, + "Ġpad": 11212, + "Ġdevoted": 11213, + "ĠWorking": 11214, + "ĠWestminster": 11215, + "Ġdirty": 11216, + "ements": 11217, + "ĠLew": 11218, + "door": 11219, + "Ġadvisor": 11220, + "ival": 11221, + "Ġsubscribe": 11222, + "Ġcredited": 11223, + "Ġpressed": 11224, + "Ġbrick": 11225, + "Ġrehabilitation": 11226, + "Ġ\"[": 11227, + "erry": 11228, + "Ġtransformed": 11229, + "arp": 11230, + "Ġreceivers": 11231, + "ĠFan": 11232, + "ĠKris": 11233, + "ĠCharlottesville": 11234, + "Ġste": 11235, + "Ġconstructed": 11236, + "Ġbroadly": 11237, + "ĠBetter": 11238, + "ĠJanet": 11239, + "Ġenthusiasm": 11240, + "ĠIrving": 11241, + "ĠConst": 11242, + "Everyone": 11243, + "agn": 11244, + "ĠCrawford": 11245, + "Ġregards": 11246, + "ĠBurns": 11247, + "Ġjokes": 11248, + "erg": 11249, + "ARD": 11250, + "apped": 11251, + "Ġtravelled": 11252, + "ĠPoor": 11253, + "ĠHolly": 11254, + "Ġcontainer": 11255, + "Ġinfected": 11256, + "Ġlean": 11257, + "ĠWould": 11258, + "Ġmagnitude": 11259, + "ĠDou": 11260, + "minded": 11261, + "Ġpastor": 11262, + "Ġwherever": 11263, + "ulation": 11264, + "Ġ1986": 11265, + "ĠMegan": 11266, + "Ġgraphic": 11267, + "Ġtalents": 11268, + "Ġkn": 11269, + "ĠEC": 11270, + "ĠMcM": 11271, + "ĠKon": 11272, + "eni": 11273, + "ĠEsc": 11274, + "inas": 11275, + "ĠNom": 11276, + "Ġchasing": 11277, + "arl": 11278, + "ĠHungary": 11279, + "Ġmainland": 11280, + "ĠDist": 11281, + "utes": 11282, + "Ġrubber": 11283, + "iat": 11284, + "ĠMorrison": 11285, + "ushing": 11286, + "iny": 11287, + "Ġcopies": 11288, + "ĠFat": 11289, + "agged": 11290, + "Ġfloating": 11291, + "ĠCurtis": 11292, + "Ġfatally": 11293, + "ĠManuel": 11294, + "Ġgraduates": 11295, + "nar": 11296, + "ĠKenny": 11297, + "Ġretreat": 11298, + "Ġretro": 11299, + "ĠPierre": 11300, + "listed": 11301, + "ĠDale": 11302, + "ding": 11303, + "Ġintentions": 11304, + "Ġsentences": 11305, + "ĠSere": 11306, + "Ġinvasion": 11307, + "Ġpremiums": 11308, + "ĠGardner": 11309, + "Ġshipments": 11310, + "Ġcol": 11311, + "bell": 11312, + "ilo": 11313, + "Ġworthy": 11314, + "Ġinterceptions": 11315, + "Ġcomplain": 11316, + "icle": 11317, + "ĠTah": 11318, + "ĠMt": 11319, + "ĠSyracuse": 11320, + "Since": 11321, + "aches": 11322, + "ĠCand": 11323, + "Ġinteractions": 11324, + "ĠShawn": 11325, + "nc": 11326, + "Ġtheaters": 11327, + "ART": 11328, + "Th": 11329, + "Ġalter": 11330, + "aley": 11331, + "imo": 11332, + "Ġresponders": 11333, + "kan": 11334, + "ĠDarren": 11335, + "Ġdeliveries": 11336, + "PI": 11337, + "125": 11338, + "Ġlaughing": 11339, + "ĠPatterson": 11340, + "Ġinfections": 11341, + "Ġtur": 11342, + "130": 11343, + "Ġhackers": 11344, + "Ġwarn": 11345, + "Ġfreeze": 11346, + "Ġscreaming": 11347, + "ĠEcho": 11348, + "ĠDom": 11349, + "MAN": 11350, + "ĠJoy": 11351, + "Ġbeneath": 11352, + "ĠHalf": 11353, + "Ġpatent": 11354, + "Ġugly": 11355, + "Ġlip": 11356, + "Ġnominees": 11357, + "ĠGrade": 11358, + "Ġinfluenced": 11359, + "Ġabilities": 11360, + "Ġlimiting": 11361, + "Ġsmell": 11362, + "Ġesc": 11363, + "ĠBernard": 11364, + "cs": 11365, + "ĠMyers": 11366, + "oted": 11367, + "Black": 11368, + "Ġlim": 11369, + "Ġsworn": 11370, + "ĠBlair": 11371, + "anes": 11372, + "ĠEvent": 11373, + "Ġmature": 11374, + "Ġpositioned": 11375, + "Ġerupted": 11376, + "grand": 11377, + "ĠTell": 11378, + "Ġbackdrop": 11379, + "Ġyeah": 11380, + "ĠClear": 11381, + "Ġsignificance": 11382, + "Ġpatience": 11383, + "ĠWing": 11384, + "Ġhorrible": 11385, + "Ġdeploy": 11386, + "ipe": 11387, + "Ġbitcoin": 11388, + "Ġcommitting": 11389, + "Ġdismiss": 11390, + "ĠBlood": 11391, + "ĠMeyer": 11392, + "selling": 11393, + "Ġregarded": 11394, + "Ġlottery": 11395, + "ĠLuther": 11396, + "Ġpipe": 11397, + "Ġcro": 11398, + "ĠANC": 11399, + "ĠSolar": 11400, + "Ġsimilarly": 11401, + "Ġham": 11402, + "ĠHonor": 11403, + "tar": 11404, + "gin": 11405, + "ĠArmstrong": 11406, + "Ġbrowser": 11407, + "agon": 11408, + "via": 11409, + "Ġentries": 11410, + "Ġinfl": 11411, + "Ġgraduation": 11412, + "Ġalleges": 11413, + "ĠLoading": 11414, + "Ġsuperb": 11415, + "ially": 11416, + "Ġadministrator": 11417, + "uls": 11418, + "Ġartistic": 11419, + "ĠANGEL": 11420, + "ĠBang": 11421, + "Ġfossil": 11422, + "¨": 11423, + "Ġpoly": 11424, + "ĠGuardiola": 11425, + "ĠPerth": 11426, + "Ġeducate": 11427, + "Cl": 11428, + "Ġcommittees": 11429, + "Ġforthcoming": 11430, + "Ġadjustments": 11431, + "count": 11432, + "Ġincoming": 11433, + "brook": 11434, + "ĠMinneapolis": 11435, + "Ġgown": 11436, + "ĠCroatia": 11437, + "host": 11438, + "Ġcompetitor": 11439, + "Ġlyrics": 11440, + "Ġbelonging": 11441, + "ĠFrances": 11442, + "ĠHaley": 11443, + "ĠBruins": 11444, + "Ġmask": 11445, + "ĠPv": 11446, + "dollar": 11447, + "Ġbowling": 11448, + "Ġjewelry": 11449, + "ĠJulia": 11450, + "Ġbroadband": 11451, + "ĠBhar": 11452, + "ĠArmed": 11453, + "vy": 11454, + "government": 11455, + "kov": 11456, + "Ġpremises": 11457, + "Ġjersey": 11458, + "Ġapplies": 11459, + "ĠFreeman": 11460, + "Ġgrows": 11461, + "ĠEquity": 11462, + "Ġmaterially": 11463, + "Ġfigured": 11464, + "ience": 11465, + "Ġmajors": 11466, + "ĠYe": 11467, + "ĠHey": 11468, + "oned": 11469, + "aping": 11470, + "Ġtoilet": 11471, + "ĠConnor": 11472, + "Ġavoiding": 11473, + "pos": 11474, + "Once": 11475, + "ĠRockets": 11476, + "ĠSnapchat": 11477, + "Go": 11478, + "Ġsolidarity": 11479, + "ĠAffordable": 11480, + "Ġdial": 11481, + "ĠOmar": 11482, + "xt": 11483, + "ĠVatican": 11484, + "anta": 11485, + "ĠSuperior": 11486, + "Ġbeaches": 11487, + "ĠKi": 11488, + "Ã¥": 11489, + "KY": 11490, + "Ġgro": 11491, + "ĠEmpire": 11492, + "Ġoccurs": 11493, + "Ġjoked": 11494, + "Ġquotes": 11495, + "ĠSaskatchewan": 11496, + "pert": 11497, + "Ġmaintains": 11498, + "olt": 11499, + "Ġupgrades": 11500, + "ĠCho": 11501, + "ĠAlexis": 11502, + "ĠHundreds": 11503, + "ĠBud": 11504, + "Ġcenturies": 11505, + "ĠInvestor": 11506, + "ĠGomez": 11507, + "Ġconceded": 11508, + "Ġexpressing": 11509, + "ĠIBM": 11510, + "Ġadvancing": 11511, + "ĠDollar": 11512, + "jer": 11513, + "Ġexceed": 11514, + "author": 11515, + "rist": 11516, + "seat": 11517, + "ĠPrimary": 11518, + "ĠForbes": 11519, + "ĠAlzheimer": 11520, + "Ġdevastated": 11521, + "Ġawful": 11522, + "ĠStudio": 11523, + "Ġbullpen": 11524, + "Ġmobility": 11525, + "Ġanalyze": 11526, + "lie": 11527, + "AFP": 11528, + "iche": 11529, + "ĠRoyals": 11530, + "Ġcoupled": 11531, + "Ġdug": 11532, + "ĠRing": 11533, + "Ġenvironments": 11534, + "national": 11535, + "ĠCongo": 11536, + "Ġalleging": 11537, + "wn": 11538, + "ulating": 11539, + "Ġur": 11540, + "Ġreaches": 11541, + "ĠPine": 11542, + "Ġthreshold": 11543, + "Ġtournaments": 11544, + "Ġheating": 11545, + "ĠGard": 11546, + "ĠHamas": 11547, + "Ġ«": 11548, + "ĠHolding": 11549, + "Ġpossibilities": 11550, + "ĠHassan": 11551, + "ĠMohammad": 11552, + "Ġoffenders": 11553, + "Ġautomated": 11554, + "Ġrealised": 11555, + "ouse": 11556, + "building": 11557, + "ĠDub": 11558, + "ĠGeneva": 11559, + "Ġfacial": 11560, + "ĠRestaurant": 11561, + "ĠNg": 11562, + "Ġtot": 11563, + "Ġgrace": 11564, + "ĠCP": 11565, + "Ġposter": 11566, + "hart": 11567, + "ĠNi": 11568, + "Ġreaff": 11569, + "Ġprov": 11570, + "Ġ111": 11571, + "ĠAid": 11572, + "Ġscrap": 11573, + "izers": 11574, + "ogen": 11575, + "Ġtissue": 11576, + "Ġvibrant": 11577, + "Ġrider": 11578, + "CD": 11579, + "ĠKitchen": 11580, + "Ġgenre": 11581, + "¬": 11582, + "depth": 11583, + "kind": 11584, + "Ġendorsed": 11585, + "Ġsimultaneously": 11586, + "Ġintern": 11587, + "ĠDrag": 11588, + "Ġembraced": 11589, + "Ġcounted": 11590, + "uj": 11591, + "ĠOg": 11592, + "Ġphysician": 11593, + "ĠIR": 11594, + "IST": 11595, + "ĠKir": 11596, + "Ġhacking": 11597, + "ĠSources": 11598, + "astic": 11599, + "growing": 11600, + "ĠWake": 11601, + "Ġhint": 11602, + "Ġcompiled": 11603, + "Ġreign": 11604, + "Ġcinema": 11605, + "Ġboosting": 11606, + "Ġaccommodation": 11607, + "ĠEuropa": 11608, + "Ġsubsidiaries": 11609, + "Ġclosures": 11610, + "ĠBil": 11611, + "ĠBou": 11612, + "wh": 11613, + "ĠAw": 11614, + "FT": 11615, + "hole": 11616, + "ĠNova": 11617, + "ĠNSW": 11618, + "Ġrap": 11619, + "Ġencourages": 11620, + "GR": 11621, + "ds": 11622, + "ĠMuk": 11623, + "ĠSurvey": 11624, + "ĠReagan": 11625, + "oning": 11626, + "Ġneighbouring": 11627, + "ĠMcCl": 11628, + "acht": 11629, + "Ġfinishes": 11630, + "ĠEsp": 11631, + "pat": 11632, + "Ġdestinations": 11633, + "ĠWagner": 11634, + "Ġconfronted": 11635, + "square": 11636, + "Ġpie": 11637, + "brand": 11638, + "hl": 11639, + "Ġabsent": 11640, + "Ġsurf": 11641, + "Ġrifle": 11642, + "ĠSS": 11643, + "ĠDeath": 11644, + "wich": 11645, + "Ġbeds": 11646, + "ĠLock": 11647, + "ĠAgu": 11648, + "atives": 11649, + "jee": 11650, + "Ġoral": 11651, + "Ġbudgets": 11652, + "Ġinspiring": 11653, + "IONS": 11654, + "works": 11655, + "Ġspirits": 11656, + "Ġcabin": 11657, + "Ġsatisfaction": 11658, + "Ġvoluntary": 11659, + "ĠMunicipal": 11660, + "Ġdeportation": 11661, + "ĠWriter": 11662, + "ĠVI": 11663, + "VERTISEMENT": 11664, + "/.": 11665, + "ĠSouthampton": 11666, + "aces": 11667, + "ĠHelen": 11668, + "ĠHum": 11669, + "110": 11670, + "Ġgarbage": 11671, + "through": 11672, + "Ġkingdom": 11673, + "MT": 11674, + "augh": 11675, + "Ġbizarre": 11676, + "ĠStarting": 11677, + "Ġwooden": 11678, + "ĠProgress": 11679, + "iron": 11680, + "sten": 11681, + "ĠSergio": 11682, + "ĠHR": 11683, + "Ġturnout": 11684, + "ĠAmericas": 11685, + "ĠSara": 11686, + "Ġagrees": 11687, + "apper": 11688, + "Ġbra": 11689, + "Ġrecycling": 11690, + "oom": 11691, + "Ġflee": 11692, + "Ġdistinct": 11693, + "IAL": 11694, + "aha": 11695, + "Ġfever": 11696, + "ĠPartnership": 11697, + "ĠYu": 11698, + "ĠPixel": 11699, + "ĠBlock": 11700, + "ĠMelissa": 11701, + "igg": 11702, + "Ġdecides": 11703, + "ĠNorman": 11704, + "Ġmas": 11705, + "held": 11706, + "ĠPD": 11707, + "Ġsheer": 11708, + "ĠDim": 11709, + "ĠCass": 11710, + "Ġcolumnist": 11711, + "ĠBros": 11712, + "Ġturnaround": 11713, + "ĠValue": 11714, + "ĠBachelor": 11715, + "awn": 11716, + "Ġassignment": 11717, + "ested": 11718, + "ĠJudiciary": 11719, + "Ġdiamond": 11720, + "Ġmus": 11721, + "Ġindigenous": 11722, + "lines": 11723, + "Ġ1984": 11724, + "igroup": 11725, + "ict": 11726, + "ĠJaguars": 11727, + "Ġlun": 11728, + "Ġprofiles": 11729, + "Ġcomputing": 11730, + "ĠBelgian": 11731, + "ĠLloyd": 11732, + "ĠGoing": 11733, + "Ġdisp": 11734, + "Ġ1987": 11735, + "eder": 11736, + "ĠVin": 11737, + "Ġgovern": 11738, + "Ġblend": 11739, + "ĠSebastian": 11740, + "ĠMidwest": 11741, + "iga": 11742, + "Ġspl": 11743, + "Ġtopping": 11744, + "Ġnetworking": 11745, + "ĠEmer": 11746, + "Ġoxygen": 11747, + "ĠInterest": 11748, + "ĠMoy": 11749, + "Ġtrader": 11750, + "Ġbay": 11751, + "Ġsticking": 11752, + "ĠMovement": 11753, + "Ġbidding": 11754, + "tax": 11755, + "Ġacademy": 11756, + "ĠMO": 11757, + "ĠSpirit": 11758, + "Ġhealing": 11759, + "wen": 11760, + "ĠPrix": 11761, + "cal": 11762, + "ĠOperating": 11763, + "Ġinstantly": 11764, + "ĠTonight": 11765, + "Ġsacked": 11766, + "Ġautomation": 11767, + "umps": 11768, + "ĠNey": 11769, + "March": 11770, + "ĠBuck": 11771, + "Ġconcentration": 11772, + "Here": 11773, + "Ġtravelers": 11774, + "Ġprotective": 11775, + "ĠMoody": 11776, + "Ġentrepreneur": 11777, + "Ġfac": 11778, + "kowski": 11779, + "Ġpreparations": 11780, + "Ġdominate": 11781, + "Ġspray": 11782, + "Ġdisturbing": 11783, + "ĠFraser": 11784, + "ĠCody": 11785, + "ashi": 11786, + "ĠPel": 11787, + "Ġrisky": 11788, + "Ġawkward": 11789, + "ĠVA": 11790, + "ails": 11791, + "Ġangle": 11792, + "Ġundergoing": 11793, + "Ġalbums": 11794, + "Ġafterwards": 11795, + "ĠNaw": 11796, + "uge": 11797, + "enter": 11798, + "ĠSussex": 11799, + "ĠRecently": 11800, + "Ġlikelihood": 11801, + "large": 11802, + "Ġsnaps": 11803, + "ibr": 11804, + "ĠMalcolm": 11805, + "Ġcru": 11806, + "Ġaltogether": 11807, + "Ġsetup": 11808, + "Ġtorture": 11809, + "Ġfiber": 11810, + "Ġquarterbacks": 11811, + "ĠGetting": 11812, + "ipping": 11813, + "ĠNorwegian": 11814, + "ĠMiles": 11815, + "ĠArnold": 11816, + "ĠDisease": 11817, + "Ġtends": 11818, + "ife": 11819, + "ĠCaroline": 11820, + "Ġnavigate": 11821, + "Ġbrush": 11822, + "ĠAssociates": 11823, + "Ġbath": 11824, + "ĠCenters": 11825, + "ĠMC": 11826, + "Ġtaxpayer": 11827, + "comp": 11828, + "Ġaccomplish": 11829, + "ĠTraffic": 11830, + "ĠBru": 11831, + "Ġgreenhouse": 11832, + "ĠMalaysian": 11833, + "ĠPur": 11834, + "ased": 11835, + "ĠKnicks": 11836, + "aters": 11837, + "Ġalt": 11838, + "ICK": 11839, + "Ġcalculations": 11840, + "Ġmindset": 11841, + "unch": 11842, + "Ġgu": 11843, + "Ġsteadily": 11844, + "Ġfiction": 11845, + "ĠPap": 11846, + "forming": 11847, + "ĠActor": 11848, + "ĠBerry": 11849, + "imp": 11850, + "ĠUpper": 11851, + "Ġassessed": 11852, + "Ġlawn": 11853, + "ĠRoh": 11854, + "Ġclearance": 11855, + "funded": 11856, + "Ġpret": 11857, + "ĠHom": 11858, + "VS": 11859, + "ĠTourism": 11860, + "ĠRy": 11861, + "ĠGonz": 11862, + "ĠStudios": 11863, + "Ġanchor": 11864, + "Ġrecognise": 11865, + "Ġcooperate": 11866, + "enny": 11867, + "aza": 11868, + "ĠMeet": 11869, + "Ġeventual": 11870, + "SW": 11871, + "ĠCounsel": 11872, + "ĠSave": 11873, + "Ġlucrative": 11874, + "Ġslim": 11875, + "ĠGreens": 11876, + "Ġchemistry": 11877, + "ĠSheikh": 11878, + "Ġbridges": 11879, + "business": 11880, + "ĠSaf": 11881, + "ĠGy": 11882, + "Ġprotocol": 11883, + "Ġnephew": 11884, + "ĠBrands": 11885, + "ĠCulture": 11886, + "orship": 11887, + "Ġ(£": 11888, + "ĠDell": 11889, + "astics": 11890, + "Ġproving": 11891, + "ĠMann": 11892, + "aca": 11893, + "Ġindoor": 11894, + "ĠUganda": 11895, + "ĠRomney": 11896, + "ĠStage": 11897, + "Ġward": 11898, + "ĠAmber": 11899, + "haw": 11900, + "Ġtw": 11901, + "Ġbullying": 11902, + "ĠCAR": 11903, + "Ġassociates": 11904, + "ĠHopkins": 11905, + "Ġsuburb": 11906, + "Ġaggressively": 11907, + "Ġpostponed": 11908, + "Ġbas": 11909, + "Ġburglary": 11910, + "ĠFound": 11911, + "Ġfloors": 11912, + "Any": 11913, + "Ġjam": 11914, + "Ġvisibility": 11915, + "Ġbenefited": 11916, + "ĠAud": 11917, + "aying": 11918, + "iku": 11919, + "ĠPas": 11920, + "ĠGPS": 11921, + "ĠOwens": 11922, + "Ġreluctant": 11923, + "ĠOlivia": 11924, + "ols": 11925, + "Ġemotion": 11926, + "ĠHeavy": 11927, + "Ġhostile": 11928, + "Ġfavorites": 11929, + "Ġfeat": 11930, + "ĠCord": 11931, + "ĠGO": 11932, + "Ġindicted": 11933, + "idal": 11934, + "ĠIL": 11935, + "Ħ": 11936, + "acer": 11937, + "ICH": 11938, + "oda": 11939, + "Ġrecipients": 11940, + "Ġtribal": 11941, + "Ġresist": 11942, + "ĠCritics": 11943, + "Ġsang": 11944, + "ĠMath": 11945, + "ĠBrighton": 11946, + "ĠKw": 11947, + "Ġlimitations": 11948, + "Ġinterception": 11949, + "onde": 11950, + "ĠRobertson": 11951, + "Ġenjoys": 11952, + "site": 11953, + "Ġwings": 11954, + "ĠCeltic": 11955, + "Ġrelaxed": 11956, + "Share": 11957, + "Ġwarrants": 11958, + "oco": 11959, + "Ġcritically": 11960, + "GC": 11961, + "Ġcute": 11962, + "Ġlaying": 11963, + "itude": 11964, + "ĠMediterranean": 11965, + "Ġwatches": 11966, + "Ġdisagree": 11967, + "ĠReturn": 11968, + "ARC": 11969, + "people": 11970, + "Ġtwelve": 11971, + "Ġoverdose": 11972, + "ĠLot": 11973, + "ĠFROM": 11974, + "ĠPeters": 11975, + "Ġadministrators": 11976, + "Ġslam": 11977, + "jar": 11978, + "OH": 11979, + "ĠInitiative": 11980, + "Ġteamed": 11981, + "ĠMajority": 11982, + "June": 11983, + "ĠPlaza": 11984, + "lake": 11985, + "Ġglimpse": 11986, + "Ġrings": 11987, + "Ġos": 11988, + "Ġmentor": 11989, + "have": 11990, + "Ġlanguages": 11991, + "Ġuncle": 11992, + "agu": 11993, + "ĠWine": 11994, + "ĠCategory": 11995, + "ĠIng": 11996, + "Ġcontests": 11997, + "ĠRosen": 11998, + "ĠWhatever": 11999, + "Ġdenying": 12000, + "ean": 12001, + "Ġspec": 12002, + "Ġgrad": 12003, + "Ġtenants": 12004, + "show": 12005, + "ĠGregory": 12006, + "Ġcontention": 12007, + "Ġunanimously": 12008, + "ĠPin": 12009, + "fa": 12010, + "ĠPink": 12011, + "Ġswitched": 12012, + "acre": 12013, + "ĠTrading": 12014, + "VP": 12015, + "ĠMaple": 12016, + "Neill": 12017, + "Ġdiscounts": 12018, + "alls": 12019, + "Ġsounded": 12020, + "Ġrumours": 12021, + "ĠCre": 12022, + "hall": 12023, + "ĠTele": 12024, + "Ġthankful": 12025, + "Ġsurveyed": 12026, + "UB": 12027, + "Ġdignity": 12028, + "Ġnod": 12029, + "Ġmisleading": 12030, + "ĠTX": 12031, + "ĠBurke": 12032, + "Ġmounting": 12033, + "Ġskies": 12034, + "Ġbesides": 12035, + "ĠGarrett": 12036, + "tha": 12037, + "Ġintelligent": 12038, + "Ġtanks": 12039, + "apping": 12040, + "ĠRat": 12041, + "aint": 12042, + "Ġentertain": 12043, + "ĠAbdullah": 12044, + "Ġsink": 12045, + "ĠLan": 12046, + "ĠManufacturing": 12047, + "NFL": 12048, + "Ġthemes": 12049, + "ĠHaven": 12050, + "ĠDavies": 12051, + "ĠKerr": 12052, + "ĠLen": 12053, + "Ġcourtroom": 12054, + "Ġfailures": 12055, + "Ġlately": 12056, + "ĠElectronics": 12057, + "Ġgorgeous": 12058, + "Ġnotification": 12059, + "Ġ2030": 12060, + "aved": 12061, + "Ġdeer": 12062, + "economic": 12063, + "ĠStatistics": 12064, + "Ġconfrontation": 12065, + "Ġgovernors": 12066, + "ĠHaram": 12067, + "ĠLGBTQ": 12068, + "Ġprocessed": 12069, + "ĠDuchess": 12070, + "Ġdowns": 12071, + "Ġpork": 12072, + "Ġhumor": 12073, + "ocese": 12074, + "Ġneeding": 12075, + "Ġmidterm": 12076, + "ĠOval": 12077, + "Ġcorners": 12078, + "Ġtablets": 12079, + "eds": 12080, + "vere": 12081, + "Ġattacker": 12082, + "Paul": 12083, + "pee": 12084, + "ĠAlice": 12085, + "Ġrenowned": 12086, + "Ġ09": 12087, + "ocking": 12088, + "Ġcreditors": 12089, + "ĠPedro": 12090, + "ĠPhone": 12091, + "Ġsurveys": 12092, + "ĠWelsh": 12093, + "Ġcow": 12094, + "Ġbuilds": 12095, + "Ġ000": 12096, + "ĠAzerbaijan": 12097, + "ĠYad": 12098, + "Ġinfant": 12099, + "Ġmotorists": 12100, + "Ġpoorly": 12101, + "Ġmedications": 12102, + "Ġstupid": 12103, + "ĠCastro": 12104, + "user": 12105, + "antly": 12106, + "alty": 12107, + "ĠCond": 12108, + "issa": 12109, + "ĠIvan": 12110, + "Ġcostume": 12111, + "Ġ08": 12112, + "Ġhence": 12113, + "Ġdangers": 12114, + "Ġbullish": 12115, + "Life": 12116, + "Ġflavor": 12117, + "ĠCharleston": 12118, + "Ġbikes": 12119, + "Ġworkshops": 12120, + "Ġarranged": 12121, + "Ġcontender": 12122, + "Ġsequel": 12123, + "ĠPlant": 12124, + "Ġdonor": 12125, + "Ġfactories": 12126, + "rict": 12127, + "ellen": 12128, + "Ġrobots": 12129, + "ĠWor": 12130, + "ĠDirectors": 12131, + "ĠPeru": 12132, + "Ġqueen": 12133, + "ĠTimothy": 12134, + "ĠToo": 12135, + "Ġobservers": 12136, + "Ġears": 12137, + "Ġbel": 12138, + "link": 12139, + "uns": 12140, + "Ġhomers": 12141, + "Ġadjacent": 12142, + "Ġconfidential": 12143, + "Ġstunned": 12144, + "iden": 12145, + "illed": 12146, + "ESS": 12147, + "Ġconvenient": 12148, + "ĠLindsey": 12149, + "por": 12150, + "upp": 12151, + "Ġborrow": 12152, + "ĠAhmad": 12153, + "ORT": 12154, + "Ġrelate": 12155, + "ĠSelf": 12156, + "ĠVanguard": 12157, + "utter": 12158, + "ĠBranch": 12159, + "ĠBolton": 12160, + "bat": 12161, + "Ġoutright": 12162, + "fighters": 12163, + "ĠBed": 12164, + "Ġpes": 12165, + "inski": 12166, + "Ġgunshot": 12167, + "Ġprinting": 12168, + "ĠSent": 12169, + "vern": 12170, + "Ġharvest": 12171, + "Ġbubble": 12172, + "Ġrefund": 12173, + "Ġfuels": 12174, + "Ġdive": 12175, + "Ġdiplomat": 12176, + "Ġpile": 12177, + "ĠVery": 12178, + "rot": 12179, + "ĠSearch": 12180, + "ĠJoyce": 12181, + "ĠPruitt": 12182, + "ĠLevel": 12183, + "ĠBP": 12184, + "ĠLac": 12185, + "had": 12186, + "Ġexpenditure": 12187, + "ĠMadd": 12188, + "Ġpockets": 12189, + "ĠClippers": 12190, + "ĠDear": 12191, + "ĠGive": 12192, + "Ġhal": 12193, + "Ġvertical": 12194, + "Ġwholesale": 12195, + "what": 12196, + "ĠSpringfield": 12197, + "ayed": 12198, + "ĠSom": 12199, + "Ġsecrets": 12200, + "Ġcharts": 12201, + "iar": 12202, + "ibility": 12203, + "LAND": 12204, + "Ġbearing": 12205, + "Ġprom": 12206, + "Ġtab": 12207, + "Ġsheets": 12208, + "ĠGL": 12209, + "Ġendless": 12210, + "opening": 12211, + "ĠOwen": 12212, + "Ġunderneath": 12213, + "ĠErik": 12214, + "ĠDACA": 12215, + "Ġsteering": 12216, + "Ġfootprint": 12217, + "ĠRoma": 12218, + "ĠDucks": 12219, + "ĠEllen": 12220, + "ĠProfessional": 12221, + "ĠGardens": 12222, + "Ġgoalie": 12223, + "Ġshine": 12224, + "Ġturmoil": 12225, + "Ġhunger": 12226, + "ĠâĢĭ": 12227, + "active": 12228, + "hey": 12229, + "Ġblessed": 12230, + "ason": 12231, + "oping": 12232, + "ĠThousands": 12233, + "Ġdose": 12234, + "ĠLor": 12235, + "Ġevolved": 12236, + "Ġcharities": 12237, + "ĠPE": 12238, + "ĠRub": 12239, + "ws": 12240, + "Ġmist": 12241, + "ĠShen": 12242, + "Ġbiological": 12243, + "ĠTweet": 12244, + "Ġcollections": 12245, + "Ġsubstantially": 12246, + "inner": 12247, + "Ġbattled": 12248, + "ĠCong": 12249, + "Hold": 12250, + "wp": 12251, + "Ġwells": 12252, + "Ġsake": 12253, + "Ġunrest": 12254, + "ĠKurt": 12255, + "Ġripped": 12256, + "itation": 12257, + "Ġneighbourhood": 12258, + "Ġinv": 12259, + "Ġcad": 12260, + "ĠCuban": 12261, + "ĠWealth": 12262, + "Ġtuition": 12263, + "Ġdeclaring": 12264, + "sch": 12265, + "orne": 12266, + "Ġwondered": 12267, + "ĠChaff": 12268, + "Ġdealer": 12269, + "ĠNumber": 12270, + "Mobile": 12271, + "Ġscratch": 12272, + "Ġprepares": 12273, + "ĠSens": 12274, + "ĠIstanbul": 12275, + "ĠPanama": 12276, + "ĠCay": 12277, + "Ġallocation": 12278, + "itutional": 12279, + "Ġhar": 12280, + "ĠNazi": 12281, + "ĠSund": 12282, + "Ġwarehouse": 12283, + "Ġbackyard": 12284, + "ĠIll": 12285, + "Ġunlawful": 12286, + "ĠReform": 12287, + "Ġbasement": 12288, + "ĠHi": 12289, + "ĠPictures": 12290, + "Ġtransfers": 12291, + "ĠSell": 12292, + "Ġfluid": 12293, + "Ġambitions": 12294, + "wife": 12295, + "Ġintensive": 12296, + "Ġsteals": 12297, + "Ġfestive": 12298, + "ĠHayes": 12299, + "Ġrestoration": 12300, + "Ġbranded": 12301, + "Journal": 12302, + "Ġmacro": 12303, + "Ġconsole": 12304, + "ĠMelania": 12305, + "ĠRahul": 12306, + "Ġdisposal": 12307, + "Ġcult": 12308, + "Ġpetrol": 12309, + "Ġtires": 12310, + "Ġkidnapping": 12311, + "Ġ115": 12312, + "Ġswap": 12313, + "ĠSud": 12314, + "Ġblown": 12315, + "ĠHindu": 12316, + "ĠBeckham": 12317, + "ĠGul": 12318, + "Ġfixture": 12319, + "Ġwisdom": 12320, + "Ġmines": 12321, + "fort": 12322, + "Ġrivers": 12323, + "ĠCyber": 12324, + "Ġtouches": 12325, + "race": 12326, + "Ġrelax": 12327, + "Ġcrashes": 12328, + "Ġconstituency": 12329, + "Ġ1979": 12330, + "Ġbureau": 12331, + "Ġinterface": 12332, + "Ġdetected": 12333, + "ĠBio": 12334, + "Ġhighlighting": 12335, + "ames": 12336, + "Ġcorresponding": 12337, + "great": 12338, + "Ġgray": 12339, + "Ġadvantages": 12340, + "ĠME": 12341, + "ĠAbbas": 12342, + "Ġnaked": 12343, + "rington": 12344, + ".),": 12345, + "ĠFace": 12346, + "third": 12347, + "Ġtranscript": 12348, + "ples": 12349, + "Good": 12350, + "ĠArctic": 12351, + "Ġtolerance": 12352, + "reat": 12353, + "green": 12354, + "ĠMik": 12355, + "Ġoutreach": 12356, + "Ġrolls": 12357, + "Ġgen": 12358, + "Ġsupplied": 12359, + "Ġguarantees": 12360, + "aug": 12361, + "Ġsemif": 12362, + "ounds": 12363, + "running": 12364, + "Ġfitting": 12365, + "ĠRisk": 12366, + "iveness": 12367, + "family": 12368, + "Ġti": 12369, + "ĠIsaac": 12370, + "Ġdump": 12371, + "ĠPatricia": 12372, + "Ġpassport": 12373, + "ĠRhode": 12374, + "Who": 12375, + "log": 12376, + "Ġstat": 12377, + "Ġrat": 12378, + "ango": 12379, + "SB": 12380, + "ĠMaur": 12381, + "Ġsmiling": 12382, + "Ġstrikeouts": 12383, + "Ġpupils": 12384, + "Ġcomplications": 12385, + "ĠAdvanced": 12386, + "ĠMonetary": 12387, + "ĠTall": 12388, + "ĠALL": 12389, + "Ġcontributor": 12390, + "ĠAdvertising": 12391, + "Ġhorrific": 12392, + "Ġcompeted": 12393, + "ĠKenneth": 12394, + "Ġhailed": 12395, + "Ġbones": 12396, + "Ġbolster": 12397, + "ĠBoss": 12398, + "Ġhospitalized": 12399, + "ĠTelegraph": 12400, + "ĠIndependence": 12401, + "Ġdr": 12402, + "ĠHang": 12403, + "Ġdocumented": 12404, + "Ġsubtle": 12405, + "invest": 12406, + "Ġbounced": 12407, + "ĠMAN": 12408, + "Ġprofession": 12409, + "Ń": 12410, + "Ġexcellence": 12411, + "ĠInspector": 12412, + "ĠBL": 12413, + "Ġdisrupt": 12414, + "ĠWinston": 12415, + "ĠCommunist": 12416, + "ĠSharon": 12417, + "Ġmechanical": 12418, + "Ġtreats": 12419, + "Ġdesperately": 12420, + "ĠIndy": 12421, + "ĠGi": 12422, + "ĠComposite": 12423, + "ĠHeath": 12424, + "aser": 12425, + "ĠCardiff": 12426, + "ilit": 12427, + "Ġeased": 12428, + "Ġprospective": 12429, + "Ġcommissioned": 12430, + "Ġtire": 12431, + "Ġalign": 12432, + "Ġgesture": 12433, + "Ġweakened": 12434, + "URE": 12435, + "SN": 12436, + "Ġnationals": 12437, + "Ġrelies": 12438, + "ĠIRS": 12439, + "ĠCount": 12440, + "Ġmedicines": 12441, + "Ġcongress": 12442, + "Ġstranger": 12443, + "Qu": 12444, + "lessly": 12445, + "ĠQueens": 12446, + "ĠAlleg": 12447, + "uing": 12448, + "ĠWy": 12449, + "ĠMiguel": 12450, + "idi": 12451, + "Ġcivic": 12452, + "ĠPetro": 12453, + "endo": 12454, + "Obviously": 12455, + "Ġreflection": 12456, + "ĠStop": 12457, + "ĠFitzgerald": 12458, + "placed": 12459, + "shore": 12460, + "Ġcorrectly": 12461, + "ĠNE": 12462, + "amy": 12463, + "ĠCT": 12464, + "some": 12465, + "ĠMb": 12466, + "oi": 12467, + "ĠHogan": 12468, + "ĠInnovation": 12469, + "ĠVilla": 12470, + "ĠCAN": 12471, + "ĠCemetery": 12472, + "into": 12473, + "Ġquestionable": 12474, + "Ġcreator": 12475, + "rug": 12476, + "Ġsemifinals": 12477, + "mission": 12478, + "Ġcle": 12479, + "ĠWaters": 12480, + "ĠNixon": 12481, + "ĠBT": 12482, + "Ġassuming": 12483, + "ĠJer": 12484, + "ĠClay": 12485, + "pack": 12486, + "ĠCool": 12487, + "may": 12488, + "Ġdecor": 12489, + "Ġspike": 12490, + "ĠSomalia": 12491, + "ĠKarn": 12492, + "ĠDamascus": 12493, + "Shares": 12494, + "Ġsus": 12495, + "ĠMoss": 12496, + "Ġ1985": 12497, + "Ġsuperintendent": 12498, + "ĠResults": 12499, + "Ġspends": 12500, + "prom": 12501, + "Ġshipped": 12502, + "Ġlaundering": 12503, + "ĠLeslie": 12504, + "Ġmeteor": 12505, + "Ġabandon": 12506, + "Ġdeliberately": 12507, + "ĠSentinel": 12508, + "Ġfascinating": 12509, + "Ġenrollment": 12510, + "ĠExperts": 12511, + "ĠSimilarly": 12512, + "ĠCuomo": 12513, + "bor": 12514, + "Ġune": 12515, + "neutral": 12516, + "Ġhamstring": 12517, + "Ġnegotiated": 12518, + "zes": 12519, + "ĠLeo": 12520, + "ĠDoctor": 12521, + "Ġcurriculum": 12522, + "ĠFocus": 12523, + "Ġtravels": 12524, + "Ġbeverage": 12525, + "ĠIncluding": 12526, + "tz": 12527, + "type": 12528, + "ĠRange": 12529, + "Ġfloods": 12530, + "Ġcoached": 12531, + "Ġdominance": 12532, + "letico": 12533, + "ĠRafael": 12534, + "Ġpredictions": 12535, + "Ġprosperity": 12536, + "ĠCav": 12537, + "Ġclinics": 12538, + "ĠBanking": 12539, + "ĠComing": 12540, + "ears": 12541, + "ĠKaepernick": 12542, + "ĠBlvd": 12543, + "Ġretained": 12544, + "isions": 12545, + "Ġko": 12546, + "Ġensemble": 12547, + "Ġprecise": 12548, + "Ġcompact": 12549, + "MD": 12550, + "ĠJet": 12551, + "ached": 12552, + "ĠTru": 12553, + "ĠBass": 12554, + "ĠIcon": 12555, + "Ġexcluding": 12556, + "sur": 12557, + "Ġconstruct": 12558, + "Ġvoiced": 12559, + "pan": 12560, + "Ġinability": 12561, + "Ġexc": 12562, + "Ġmate": 12563, + "Ġtrailing": 12564, + "Ġsuccessive": 12565, + "Ġbets": 12566, + "Ġgauge": 12567, + "Ġminorities": 12568, + "ĠIND": 12569, + "ĠVel": 12570, + "ĠGP": 12571, + "oid": 12572, + "bon": 12573, + "Ġpred": 12574, + "Ġdash": 12575, + "Ġperformer": 12576, + "Ġoccasional": 12577, + "aken": 12578, + "mes": 12579, + "America": 12580, + "Ġliver": 12581, + "Sp": 12582, + "Big": 12583, + "Ġwildfires": 12584, + "ĠJackie": 12585, + "ĠLed": 12586, + "ĠFinland": 12587, + "Ġjurors": 12588, + "olic": 12589, + "urance": 12590, + "ĠEdge": 12591, + "open": 12592, + "Ġscenarios": 12593, + "Ġglory": 12594, + "entry": 12595, + "ĠCoffee": 12596, + "rep": 12597, + "ĠChand": 12598, + "ĠVas": 12599, + "ĠIslamabad": 12600, + "Ġbur": 12601, + "ĠFle": 12602, + "ĠEdition": 12603, + "Ġshoe": 12604, + "ï¸ı": 12605, + "**": 12606, + "tle": 12607, + "ĠEb": 12608, + "keeping": 12609, + "ĠBasketball": 12610, + "ĠVon": 12611, + "ĠCF": 12612, + "MENT": 12613, + "amm": 12614, + "ĠFernando": 12615, + "Ġcompares": 12616, + "ĠDouble": 12617, + "Ġconvictions": 12618, + "Ġatop": 12619, + "Ġcops": 12620, + "Ġremembers": 12621, + "Ġlacking": 12622, + "dom": 12623, + "itate": 12624, + "ĠBeauty": 12625, + "Ġdevelops": 12626, + "ĠGor": 12627, + "Ġfunctional": 12628, + "ĠCOUNTY": 12629, + "ĠUpon": 12630, + "Ġsprint": 12631, + "Ġinjection": 12632, + "Ġminors": 12633, + "ĠTamil": 12634, + "ĠGat": 12635, + "101": 12636, + "ety": 12637, + "Ġdrum": 12638, + "Ġtasked": 12639, + "Ġpact": 12640, + "Ġ170": 12641, + "MR": 12642, + "ĠRamos": 12643, + "Ġcandy": 12644, + "Sc": 12645, + "iced": 12646, + "Ġsupermarket": 12647, + "Ġworrying": 12648, + "Ġsellers": 12649, + "ĠTag": 12650, + ".:": 12651, + "Ġmixture": 12652, + "oting": 12653, + "Bl": 12654, + "ĠLl": 12655, + "ĠJal": 12656, + "ican": 12657, + "ĠBid": 12658, + "country": 12659, + "ĠStrategy": 12660, + "Ġadverse": 12661, + "Ġplunged": 12662, + "ĠMit": 12663, + "Ġstark": 12664, + "aton": 12665, + "Ġbooking": 12666, + "Tr": 12667, + "Ġcontainers": 12668, + "Ġvintage": 12669, + "ĠPit": 12670, + "Ġsurfaced": 12671, + "Ġindependently": 12672, + "Ġdetection": 12673, + "ĠBeyon": 12674, + "Ġcasualties": 12675, + "Ġstabbing": 12676, + "oved": 12677, + "Ġbarred": 12678, + "Ġthereby": 12679, + "Ġpartnered": 12680, + "Ġposing": 12681, + "ĠShannon": 12682, + "ĠChapel": 12683, + "Ġtechnically": 12684, + "uous": 12685, + "»": 12686, + "ometer": 12687, + "Ġwildfire": 12688, + "share": 12689, + "heart": 12690, + "Ġammunition": 12691, + "Ġthrive": 12692, + "ĠStre": 12693, + "GP": 12694, + "cé": 12695, + "ĠMonaco": 12696, + "goal": 12697, + "ĠUm": 12698, + "ĠHSBC": 12699, + "ĠHilton": 12700, + "ĠViv": 12701, + "ĠKell": 12702, + "Ġdecisive": 12703, + "Ġmotive": 12704, + "amo": 12705, + "feld": 12706, + "ĠWH": 12707, + "iry": 12708, + "ulu": 12709, + "ĠSchneider": 12710, + "Ġcampaigning": 12711, + "Ġseparately": 12712, + "igo": 12713, + "ĠED": 12714, + "ĠRamirez": 12715, + "Ġmetro": 12716, + "ĠPatel": 12717, + "ĠChi": 12718, + "ĠAudi": 12719, + "Ġcharacteristics": 12720, + "Ġrestart": 12721, + "Ġkeyboard": 12722, + "ĠSD": 12723, + "his": 12724, + "biz": 12725, + "ĠSoft": 12726, + "ĠGrammy": 12727, + "Ġcontested": 12728, + "Ġweekends": 12729, + "Ġ112": 12730, + "Ġcycling": 12731, + "Ġhealthier": 12732, + "ija": 12733, + "Ġheader": 12734, + "Ġemploy": 12735, + "İ": 12736, + "Ġshortages": 12737, + "ĠAsk": 12738, + "ĠIvanka": 12739, + "Ġpartisan": 12740, + "Ġflowing": 12741, + "Ġcave": 12742, + "ENS": 12743, + "Ġups": 12744, + "read": 12745, + "ouch": 12746, + "Ġ102": 12747, + "Ġforming": 12748, + "bot": 12749, + "bie": 12750, + "Ġenrolled": 12751, + "Ġconcussion": 12752, + "Ġaffidavit": 12753, + "Ġmysterious": 12754, + "uries": 12755, + "ĠMang": 12756, + "Ġauthentic": 12757, + "Ġmetrics": 12758, + "ĠTwins": 12759, + "Ġprep": 12760, + "IJ": 12761, + "Ġdesired": 12762, + "ĠDiv": 12763, + "wall": 12764, + "ĠTab": 12765, + "Ġcompet": 12766, + "Ġrelied": 12767, + "Ġinequality": 12768, + "Ġmanual": 12769, + "ĠBucks": 12770, + "agging": 12771, + "Ġcorporation": 12772, + "Ġbanner": 12773, + "Ġgraphics": 12774, + "Ġaccurately": 12775, + "ĠMeeting": 12776, + "Ġconsult": 12777, + "ser": 12778, + "Ġprotesting": 12779, + "Ġhurting": 12780, + "omed": 12781, + "tes": 12782, + "Ġrode": 12783, + "Ġstartups": 12784, + "Ġhanding": 12785, + "ĠNest": 12786, + "Ġconsistency": 12787, + "anned": 12788, + "dem": 12789, + "ĠLyon": 12790, + "ĠCompetition": 12791, + "Ġtricky": 12792, + "Ġcos": 12793, + "ĠBengals": 12794, + "arry": 12795, + "Ġunderwent": 12796, + "ĠKit": 12797, + "à": 12798, + "uploads": 12799, + "Ġskate": 12800, + "Ġ''": 12801, + "Ġjun": 12802, + "ĠContent": 12803, + "focused": 12804, + "lat": 12805, + "ĠExp": 12806, + "ought": 12807, + "Ġnightmare": 12808, + "ĠExpect": 12809, + "Ġprecisely": 12810, + "ĠMonica": 12811, + "Ġlobbying": 12812, + "ĠChester": 12813, + "ĠInvest": 12814, + "Former": 12815, + "Ġimminent": 12816, + "ĠNL": 12817, + "Ġcomparing": 12818, + "ĠChes": 12819, + "ede": 12820, + "ĠNobel": 12821, + "mers": 12822, + "ĠKin": 12823, + "ĠBoko": 12824, + "ount": 12825, + "Ġthoroughly": 12826, + "Ġscattered": 12827, + "sharing": 12828, + "markets": 12829, + "ĠMis": 12830, + "Ġambition": 12831, + "Ġpreference": 12832, + "Ġeffectiveness": 12833, + "rio": 12834, + "Ġheavyweight": 12835, + "Ġovert": 12836, + "anya": 12837, + "ĠKanye": 12838, + "ishi": 12839, + "Ġrewards": 12840, + "uled": 12841, + "bach": 12842, + "Ġemphasized": 12843, + "Ġapologize": 12844, + "ĠRecent": 12845, + "!!": 12846, + "Ġanimated": 12847, + "ĠExxon": 12848, + "Ġfruits": 12849, + "Ġstripped": 12850, + "fold": 12851, + "ĠIndonesian": 12852, + "ller": 12853, + "Ġdementia": 12854, + "Ġkidney": 12855, + "Ġhalted": 12856, + "years": 12857, + "Ġconcerts": 12858, + "Ġrefers": 12859, + "ĠFri": 12860, + "Your": 12861, + "irl": 12862, + "Ġleap": 12863, + "jud": 12864, + "ĠHugh": 12865, + "ĠFO": 12866, + "Ġsore": 12867, + "Ġkil": 12868, + "ĠMate": 12869, + "cci": 12870, + "Ġsetback": 12871, + "Ġtightening": 12872, + "keeper": 12873, + "ĠAlbany": 12874, + "Ġpolicymakers": 12875, + "Ġdisorders": 12876, + "ĠCBC": 12877, + "ĠDiaz": 12878, + "Ġmaps": 12879, + "Ġroutinely": 12880, + "Ġverify": 12881, + "Ġbash": 12882, + "ĠJinping": 12883, + "Ġdisasters": 12884, + "ĠMonroe": 12885, + "ĠLouise": 12886, + "JP": 12887, + "ĠNevertheless": 12888, + "Ġconcessions": 12889, + "ĠPog": 12890, + "going": 12891, + "ĠFifth": 12892, + "ĠJill": 12893, + "ICT": 12894, + "ĠFM": 12895, + "ĠSugar": 12896, + "ĠBarb": 12897, + "Ġmidway": 12898, + "Ġtin": 12899, + "ĠPic": 12900, + "ĠPL": 12901, + "Ġleaks": 12902, + "Ġgrief": 12903, + "Ġtattoo": 12904, + "`": 12905, + "Ġment": 12906, + "ĠNu": 12907, + "Ġmarry": 12908, + "Ġdiving": 12909, + "Ġ1982": 12910, + "Ġcoin": 12911, + "ĠPoc": 12912, + "Ġstarred": 12913, + "ĠRiverside": 12914, + "Ġsidelined": 12915, + "Ġminers": 12916, + "STON": 12917, + "Ġbelongs": 12918, + "ĠSantos": 12919, + "ĠTechnical": 12920, + "aco": 12921, + "Ġadvise": 12922, + "Ġstreams": 12923, + "Ġcooler": 12924, + "ĠHE": 12925, + "Ġordering": 12926, + "ĠTask": 12927, + "ĠACT": 12928, + "ĠAnton": 12929, + "Ġcertification": 12930, + "ĠLeafs": 12931, + "ĠTS": 12932, + "ĠSerbia": 12933, + "azi": 12934, + "inks": 12935, + "ĠEST": 12936, + "Ġrelay": 12937, + "°": 12938, + "Ġdisappearance": 12939, + "ĠRomania": 12940, + "Ġoven": 12941, + "Ġowed": 12942, + "ĠStrip": 12943, + "ulated": 12944, + "UC": 12945, + "ITE": 12946, + "bling": 12947, + "Then": 12948, + "ppy": 12949, + "Ġunlimited": 12950, + "Ġcalories": 12951, + "Ġmerchandise": 12952, + "Ġblonde": 12953, + "ĠSpicer": 12954, + "performing": 12955, + "Ġimpl": 12956, + "Ġplates": 12957, + "Ġmosque": 12958, + "Ġdemon": 12959, + "Ġought": 12960, + "Ġdumped": 12961, + "Ġtracked": 12962, + "even": 12963, + "Ġstabil": 12964, + "imet": 12965, + "ĠLiga": 12966, + "ugh": 12967, + "ther": 12968, + "agar": 12969, + "Ġarchitect": 12970, + "Ġallocated": 12971, + "ĠJoey": 12972, + "Ġmarathon": 12973, + "master": 12974, + "ĠBert": 12975, + "Ġast": 12976, + "ĠEbola": 12977, + "ĠConservation": 12978, + "nic": 12979, + "Ġparallel": 12980, + "Ġinmate": 12981, + "Ġlocate": 12982, + "Ġdistribute": 12983, + "guard": 12984, + "Ġtackling": 12985, + "ential": 12986, + "Ġvi": 12987, + "Ġcups": 12988, + "Ġrhythm": 12989, + "Ġendured": 12990, + "ĠHub": 12991, + "ois": 12992, + "ĠLiberals": 12993, + "ĠRedskins": 12994, + "ĠEP": 12995, + "ĠKnox": 12996, + "fr": 12997, + "Ġmassacre": 12998, + "oka": 12999, + "Ġcompl": 13000, + "raft": 13001, + "ĠPublished": 13002, + "Ġattraction": 13003, + "ĠStephens": 13004, + "ility": 13005, + "ĠPul": 13006, + "ĠCapt": 13007, + "Ġexploded": 13008, + "Ġexceeded": 13009, + "lying": 13010, + "Ġcal": 13011, + "Mart": 13012, + "Ġpaintings": 13013, + "inate": 13014, + "ĠBrendan": 13015, + "Ġfortune": 13016, + "onductor": 13017, + "Ġphysicians": 13018, + "ĠStudy": 13019, + "ĠBul": 13020, + "ĠModern": 13021, + "HD": 13022, + "ĠBour": 13023, + "Ġtying": 13024, + "Ġ1967": 13025, + "Ġlighter": 13026, + "Ġtoss": 13027, + "inspired": 13028, + "Ġgreeted": 13029, + "Ġcycl": 13030, + "Ġverified": 13031, + "Ġmerit": 13032, + "sign": 13033, + "lder": 13034, + "Ġdebts": 13035, + "ĠSnyder": 13036, + "Ġamendments": 13037, + "Ġindicators": 13038, + "ĠDortmund": 13039, + "then": 13040, + "ĠListen": 13041, + "ĠFB": 13042, + "ref": 13043, + "ĠIoT": 13044, + "ĠBrewers": 13045, + "ĠLeadership": 13046, + "ĠNicolas": 13047, + "ĠBody": 13048, + "Ġsam": 13049, + "ĠAdvisor": 13050, + "Ġcord": 13051, + "Ġabuses": 13052, + "ĠPortuguese": 13053, + "Ġflown": 13054, + "VR": 13055, + "Ġconsumed": 13056, + "Ġreass": 13057, + "Ġalien": 13058, + "Ġrivalry": 13059, + "ĠREPORT": 13060, + "ĠRush": 13061, + "Ġdirecting": 13062, + "Ġsearches": 13063, + "ĠHP": 13064, + "ĠRoll": 13065, + "ĠFay": 13066, + "ĠClare": 13067, + "Ġhaul": 13068, + "Ġriot": 13069, + "Ġsettlements": 13070, + "Ġnorm": 13071, + "Ġaccelerated": 13072, + "ĠLok": 13073, + "Ġclever": 13074, + "Ġhyd": 13075, + "Ġstats": 13076, + "ĠHull": 13077, + "kers": 13078, + "Ġbuys": 13079, + "uter": 13080, + "Ġfue": 13081, + "https": 13082, + "UD": 13083, + "Ġisolation": 13084, + "Ġsuspend": 13085, + "ĠRules": 13086, + "ĠCircle": 13087, + "ĠHopefully": 13088, + "played": 13089, + "âĢ³": 13090, + "ĠPRE": 13091, + "sim": 13092, + "edd": 13093, + "ĠProperties": 13094, + "Ġbeans": 13095, + "Ġrevive": 13096, + "ĠBir": 13097, + "oug": 13098, + "Ġmob": 13099, + "Ġshowdown": 13100, + "iman": 13101, + "Ġpap": 13102, + "Ġvol": 13103, + "wu": 13104, + "Ġdiver": 13105, + "Ġpill": 13106, + "ĠMarlins": 13107, + "ĠLamar": 13108, + "Ġpersistent": 13109, + "Ġcondolences": 13110, + "ĠThor": 13111, + "Ab": 13112, + "Ġimpress": 13113, + "ĠRaptors": 13114, + "Ġreferences": 13115, + "Ġstiff": 13116, + "ĠBash": 13117, + "eding": 13118, + "Ġmurders": 13119, + "ĠGene": 13120, + "ĠManila": 13121, + "Ġbrokers": 13122, + "Ms": 13123, + "start": 13124, + "ĠDhabi": 13125, + "etz": 13126, + "Ġsubmission": 13127, + "ĠSchmidt": 13128, + "ĠPersonal": 13129, + "ĠBeverly": 13130, + "ĠMovie": 13131, + "ĠLamb": 13132, + "Ġplacement": 13133, + "Ġfolk": 13134, + "Ġfrequency": 13135, + "Ġplanted": 13136, + "Ġtwins": 13137, + "prov": 13138, + "rec": 13139, + "Ġpermanently": 13140, + "Ġcoordination": 13141, + "ĠCart": 13142, + "Ġobstacles": 13143, + "Ġliterature": 13144, + "Ġtu": 13145, + "Ġchill": 13146, + "ĠReserved": 13147, + "Ġlovers": 13148, + "ĠOutside": 13149, + "Ġslideshow": 13150, + "ĠGru": 13151, + "Ġty": 13152, + "Ġsalad": 13153, + "Ġlaboratory": 13154, + "ĠHolt": 13155, + "Ġ103": 13156, + "urb": 13157, + "ĠOrganisation": 13158, + "ĠAndrews": 13159, + "Ġrecipient": 13160, + "arch": 13161, + "Ġbleeding": 13162, + "ĠPand": 13163, + "Ġoverturned": 13164, + "Ġlistened": 13165, + "Ġclause": 13166, + "Ġnationalist": 13167, + "Ġresumed": 13168, + "ĠCout": 13169, + "ĠPride": 13170, + "Ġlayers": 13171, + "ĠBella": 13172, + "Ġreversed": 13173, + "Ġpriest": 13174, + "ĠFX": 13175, + "Ġalbeit": 13176, + "Ġhalfway": 13177, + "Ġcotton": 13178, + "ĠCarey": 13179, + "ĠTE": 13180, + "OCK": 13181, + "Ġbuck": 13182, + "ributes": 13183, + "ea": 13184, + "Ġfancy": 13185, + "ĠBuc": 13186, + "Ġbans": 13187, + "uters": 13188, + "Ġliabilities": 13189, + "ĠSou": 13190, + "ĠBernie": 13191, + "Ġintervene": 13192, + "food": 13193, + "ĠNDP": 13194, + "Ġinsist": 13195, + "Ġcontracted": 13196, + "hawk": 13197, + "),\"": 13198, + "ĠDawn": 13199, + "Ġmol": 13200, + "Ġcommissioners": 13201, + "Ġstranded": 13202, + "Ġoverwhelmed": 13203, + "Ġrecipes": 13204, + "Ġva": 13205, + "Ġrad": 13206, + "Ġscare": 13207, + "rez": 13208, + "Ġeliminating": 13209, + "Ġresc": 13210, + "ĠBreak": 13211, + "chn": 13212, + "Ġdelight": 13213, + "iot": 13214, + "Ġfreely": 13215, + "TI": 13216, + "ĠBluetooth": 13217, + "ĠMonth": 13218, + "ĠFlor": 13219, + "ĠFreddie": 13220, + "Ġtrailed": 13221, + "Ġinvestigative": 13222, + "Ġimposing": 13223, + "Ġattracting": 13224, + "awk": 13225, + "ĠSherman": 13226, + "Ġsucceeded": 13227, + "Ġvent": 13228, + "Ġreconciliation": 13229, + "ĠCel": 13230, + "ĠThroughout": 13231, + "ĠDowntown": 13232, + "ĠBrother": 13233, + "Ġtraditions": 13234, + "Ġmir": 13235, + "Ġstamp": 13236, + "tery": 13237, + "etti": 13238, + "isch": 13239, + "tic": 13240, + "Ġbanning": 13241, + "loss": 13242, + "ĠSpeedway": 13243, + "Ġstalled": 13244, + "ĠEN": 13245, + "ASH": 13246, + "thing": 13247, + "ĠAppeals": 13248, + "rac": 13249, + "Ġdistress": 13250, + "ĠConservatives": 13251, + "ĠPremium": 13252, + "usa": 13253, + "Ġslump": 13254, + "imm": 13255, + "ĠSupp": 13256, + "ĠWong": 13257, + "Ġdistant": 13258, + "Ġ104": 13259, + "Ġtide": 13260, + "ĠNorfolk": 13261, + "ĠYang": 13262, + "Ġsmashed": 13263, + "ĠBarrett": 13264, + "inho": 13265, + "Ġrobbed": 13266, + "ĠFarmers": 13267, + "filled": 13268, + "BT": 13269, + "Ġautumn": 13270, + "Ġtemple": 13271, + "ĠJacobs": 13272, + "Ġprecipitation": 13273, + "ĠHours": 13274, + "ĠFlight": 13275, + "Ġbeside": 13276, + "ĠOre": 13277, + "!)": 13278, + "ĠTurnbull": 13279, + "Ġpig": 13280, + "Ġcooling": 13281, + "Ġservers": 13282, + "oriented": 13283, + "Ġlocks": 13284, + "ĠSears": 13285, + "aving": 13286, + "ĠQuick": 13287, + "ĠGlob": 13288, + "ĠMining": 13289, + "Ġhorizon": 13290, + "arians": 13291, + "ĠOm": 13292, + "writing": 13293, + "Ġbelieving": 13294, + "Ġbon": 13295, + "Ġmounted": 13296, + "Ġpunt": 13297, + "ucci": 13298, + "uzz": 13299, + "cul": 13300, + "Ġkiss": 13301, + "ĠOnt": 13302, + "ĠCyprus": 13303, + "Ġrelying": 13304, + "Ġpiano": 13305, + "Ġcure": 13306, + "Ġcontinuously": 13307, + "ĠNobody": 13308, + "ĠBund": 13309, + "osis": 13310, + "ĠAurora": 13311, + "ĠBach": 13312, + "ĠKendall": 13313, + "Ġechoed": 13314, + "iable": 13315, + "Ġconscious": 13316, + "Ġmonster": 13317, + "omo": 13318, + "proof": 13319, + "ĠNate": 13320, + "Ġfilmmaker": 13321, + "ĠNaj": 13322, + "Ġvendor": 13323, + "ĠFoot": 13324, + "ĠChang": 13325, + "ĠFest": 13326, + "Ġselfie": 13327, + "Ġenters": 13328, + "ĠConor": 13329, + "ĠMosul": 13330, + "ĠWHAT": 13331, + "Ġwa": 13332, + "ĠGamb": 13333, + "osta": 13334, + "Ġcautioned": 13335, + "ĠTucker": 13336, + "ĠAirways": 13337, + "Ġvisitor": 13338, + "Ġ·": 13339, + "ĠRevolution": 13340, + "aching": 13341, + "Ġearliest": 13342, + "ĠQuality": 13343, + "Ġshorts": 13344, + "ube": 13345, + "ĠOperation": 13346, + "ĠSabha": 13347, + "Ġstrengths": 13348, + "ikes": 13349, + "Ġsexy": 13350, + "Ġrot": 13351, + "ibles": 13352, + "Ġcolours": 13353, + "THE": 13354, + "ailed": 13355, + "Ġwoke": 13356, + "ĠEmbassy": 13357, + "Ġinfamous": 13358, + "rov": 13359, + "State": 13360, + "âĢ¦.": 13361, + "Ġpond": 13362, + "Ġcapt": 13363, + "fore": 13364, + "De": 13365, + "Ġedited": 13366, + "self": 13367, + "Hey": 13368, + "Ġportrait": 13369, + "ĠManufact": 13370, + "ĠStand": 13371, + "Ġcontenders": 13372, + "':": 13373, + "acker": 13374, + "Ġwithdrawn": 13375, + "ĠBraves": 13376, + "ĠHosp": 13377, + "changing": 13378, + "ĠBag": 13379, + "Ġadjustment": 13380, + "ĠCousins": 13381, + "ĠAAP": 13382, + "Ġfi": 13383, + "Ġoutdoors": 13384, + "Ġlacked": 13385, + "BM": 13386, + "ĠWHO": 13387, + "ĠPST": 13388, + "ĠLuck": 13389, + "Ġassisting": 13390, + "ĠGround": 13391, + "ĠTeen": 13392, + "ĠOle": 13393, + "Ġembarrassing": 13394, + "ĠWalt": 13395, + "ĠVision": 13396, + "ĠFal": 13397, + "ĠZoo": 13398, + "ĠWorth": 13399, + "ĠFloyd": 13400, + "ĠGujarat": 13401, + "Ġtipped": 13402, + "Ġfam": 13403, + "ĠDad": 13404, + "Ġworship": 13405, + "Ġtyre": 13406, + "Ġrebuilding": 13407, + "Ġqualities": 13408, + "ĠLives": 13409, + "Ġbeats": 13410, + "Ġ450": 13411, + "Ġexisted": 13412, + "ĠGeorg": 13413, + "Ġpoured": 13414, + "rows": 13415, + "ĠOx": 13416, + "ĠSid": 13417, + "Ġmac": 13418, + "Ġteaches": 13419, + "ĠEli": 13420, + "alla": 13421, + "Ġdownside": 13422, + "ĠBend": 13423, + "non": 13424, + "ĠArmenia": 13425, + "Ġcultures": 13426, + "ĠMae": 13427, + "Ġduration": 13428, + "ĠAthletics": 13429, + "Ġjuvenile": 13430, + "Ġlid": 13431, + "Ġbankers": 13432, + "Ġoverview": 13433, + "wy": 13434, + "Ġorbit": 13435, + "Vs": 13436, + "because": 13437, + "Ps": 13438, + "ĠFran": 13439, + "Ġtouring": 13440, + "Ġwary": 13441, + "Ġ106": 13442, + "Ġlaser": 13443, + "ĠVij": 13444, + "âĦ¢": 13445, + "Ġsurrender": 13446, + "press": 13447, + "rees": 13448, + "NO": 13449, + "ĠShortly": 13450, + "ĠKor": 13451, + "edu": 13452, + "Ġhatred": 13453, + "Ġtee": 13454, + "Ġfamously": 13455, + "Ġkeeper": 13456, + "ND": 13457, + "Ġreduces": 13458, + "HC": 13459, + "Ġhay": 13460, + "Ġunnamed": 13461, + "ĠTes": 13462, + "Ġattackers": 13463, + "ĠFew": 13464, + "ĠRichards": 13465, + "Ġ1968": 13466, + "Ġspeeches": 13467, + "Ġcybersecurity": 13468, + "ĠInfrastructure": 13469, + "Ġ07": 13470, + "ENCE": 13471, + "uties": 13472, + "Ġanxious": 13473, + "ĠGang": 13474, + "Ġannouncements": 13475, + "lette": 13476, + "oret": 13477, + "ĠRockies": 13478, + "ĠEmployees": 13479, + "ĠThrones": 13480, + "Ġhugely": 13481, + "Ġclin": 13482, + "ĠHob": 13483, + "Ġfraction": 13484, + "ĠOfficial": 13485, + "ĠMariners": 13486, + "ĠElse": 13487, + "Ġsanctuary": 13488, + "ĠPhotograph": 13489, + "Ġreopen": 13490, + "lf": 13491, + "hm": 13492, + "vest": 13493, + "Ġspeeding": 13494, + "Ġtooth": 13495, + "ĠShi": 13496, + "ĠTitle": 13497, + "ĠMes": 13498, + "ĠJobs": 13499, + "fair": 13500, + "ĠDanish": 13501, + "ĠMalik": 13502, + "Ġlaughed": 13503, + "Ġnavy": 13504, + "ĠActress": 13505, + "ĠWilliamson": 13506, + "overs": 13507, + "Ġreckless": 13508, + "Ġjo": 13509, + "otic": 13510, + "Ġassaulting": 13511, + "Ġpri": 13512, + "ĠPi": 13513, + "Ġlesser": 13514, + "Ġtit": 13515, + "Ġdat": 13516, + "Ġnail": 13517, + "ĠMarathon": 13518, + "ĠGren": 13519, + "ĠDol": 13520, + "Ġjointly": 13521, + "Ġamended": 13522, + "mine": 13523, + "ĠBashar": 13524, + "ĠHyundai": 13525, + "Ġuncovered": 13526, + "Ġeducated": 13527, + "atti": 13528, + "pres": 13529, + "ĠBRE": 13530, + "Ġya": 13531, + "Bank": 13532, + "odd": 13533, + "lit": 13534, + "ĠLinks": 13535, + "Ġswitching": 13536, + "itte": 13537, + "ĠSind": 13538, + "erved": 13539, + "Ġ**": 13540, + "Ġpositively": 13541, + "Ġfrankly": 13542, + "Ġrevenge": 13543, + "ĠTrinity": 13544, + "ĠCDC": 13545, + "Ġthreatens": 13546, + "Ġhammer": 13547, + "NET": 13548, + "ĠMut": 13549, + "Ġsy": 13550, + "Ġunidentified": 13551, + "icken": 13552, + "Ġdrills": 13553, + "Ġtense": 13554, + "Ġforeigners": 13555, + "OST": 13556, + "Ġethical": 13557, + "ĠDurham": 13558, + "ĠQual": 13559, + "Ġterritories": 13560, + "Ġid": 13561, + "hor": 13562, + "enders": 13563, + "Mc": 13564, + "OV": 13565, + "percent": 13566, + "Ġdom": 13567, + "Ġupward": 13568, + "Ġamb": 13569, + "Ġvisas": 13570, + "zan": 13571, + "Ãĥ": 13572, + "Ġundocumented": 13573, + "Ġsuburbs": 13574, + "Ġhydro": 13575, + "ĠJob": 13576, + "ĠAdelaide": 13577, + "oya": 13578, + "ĠSR": 13579, + "ĠMick": 13580, + "Ġconsolidation": 13581, + "Ġemotionally": 13582, + "ĠHop": 13583, + "Her": 13584, + "Ġloses": 13585, + "ĠMoto": 13586, + "eled": 13587, + "Ġregulated": 13588, + "ental": 13589, + "Ġencountered": 13590, + "Ġhop": 13591, + "ĠTrafford": 13592, + "Ġsticks": 13593, + "Ġveto": 13594, + "Ġexpose": 13595, + "Ġstretched": 13596, + "fin": 13597, + "inance": 13598, + "chair": 13599, + "ĠGareth": 13600, + "ĠPil": 13601, + "ĠHammond": 13602, + "Ġserial": 13603, + "omy": 13604, + "Ġcellphone": 13605, + "ĠClara": 13606, + "Ġreacted": 13607, + "ĠNic": 13608, + "ĠHomes": 13609, + "ĠBroadcasting": 13610, + "ĠFut": 13611, + "ĠSupply": 13612, + "assing": 13613, + "ĠNewman": 13614, + "Ġcharitable": 13615, + "ĠClayton": 13616, + "Ġsovereignty": 13617, + "Ġconvincing": 13618, + "ĠPrincipal": 13619, + "ĠHigher": 13620, + "ĠCut": 13621, + "ĠCarrie": 13622, + "ĠSpot": 13623, + "Sometimes": 13624, + "ĠJar": 13625, + "ĠConsider": 13626, + "ieu": 13627, + "Ġrefinery": 13628, + "Ġbloody": 13629, + "wheel": 13630, + "Ġcryptocurrencies": 13631, + "Fund": 13632, + "ĠSunderland": 13633, + "ĠEvents": 13634, + "âĢĭ": 13635, + "Ġaccidentally": 13636, + "deep": 13637, + "Ġfranc": 13638, + "bec": 13639, + "ĠHartford": 13640, + "Ġstellar": 13641, + "wright": 13642, + "kick": 13643, + "UG": 13644, + "ĠBeast": 13645, + "Ġrefusal": 13646, + "ĠRoberto": 13647, + "ĠDixon": 13648, + "ĠDiane": 13649, + "name": 13650, + "asts": 13651, + "ĠCharter": 13652, + "Ġfueled": 13653, + "Ġcontents": 13654, + "Ġaccessing": 13655, + "Ġtroubles": 13656, + "Ġtops": 13657, + "Ġdebuted": 13658, + "icating": 13659, + "Ġinvestigator": 13660, + "Ġsubscribing": 13661, + "Ġcoordinated": 13662, + "ĠFil": 13663, + "six": 13664, + "teen": 13665, + "Ġwithdrew": 13666, + "ĠGilbert": 13667, + "Ġ1983": 13668, + "arsity": 13669, + "Ġimagination": 13670, + "Ġhandgun": 13671, + "ĠAlibaba": 13672, + "Ġbug": 13673, + "Ġ107": 13674, + "ĠCOMP": 13675, + "ĠSomething": 13676, + "Ġreliability": 13677, + "ĠFCC": 13678, + "ĠFowler": 13679, + "Ġsingled": 13680, + "nom": 13681, + "Ġknocking": 13682, + "Ġmeddling": 13683, + "Ġdetermining": 13684, + "reports": 13685, + "Ġshade": 13686, + "ĠSN": 13687, + "anto": 13688, + "Ġcomplaining": 13689, + "ĠNan": 13690, + "WS": 13691, + "Ġyoungsters": 13692, + "Il": 13693, + "ĠKaw": 13694, + "ĠProp": 13695, + "ĠCell": 13696, + "ĠHurricanes": 13697, + "Ġpublicity": 13698, + "ĠXin": 13699, + "rial": 13700, + "ICO": 13701, + "Ġsupervision": 13702, + "ĠSpotify": 13703, + "ĠNewport": 13704, + "Ġprince": 13705, + "anche": 13706, + "Ġsubscriber": 13707, + "ĠVic": 13708, + "ACT": 13709, + "ĠRaf": 13710, + "ĠActing": 13711, + "Ġcollusion": 13712, + "pet": 13713, + "isl": 13714, + "Ġcommerce": 13715, + "Health": 13716, + "ĠAbraham": 13717, + "pri": 13718, + "Ġlightweight": 13719, + "Ġinsurer": 13720, + "Like": 13721, + "Ġhelmet": 13722, + "Ġevac": 13723, + "look": 13724, + "ĠNaval": 13725, + "160": 13726, + "ĠFleet": 13727, + "vol": 13728, + "Ġexpired": 13729, + "ĠKlein": 13730, + "ĠEmmy": 13731, + "ABLE": 13732, + "ĠMorocco": 13733, + "ĠTrip": 13734, + "uted": 13735, + "Ġnos": 13736, + "ĠVista": 13737, + "mas": 13738, + "ĠRocky": 13739, + "ĠFlint": 13740, + "enberg": 13741, + "ĠBrow": 13742, + "Ġsignatures": 13743, + "Ġpolar": 13744, + "ajo": 13745, + "Ġendorsement": 13746, + "Ġreservations": 13747, + "LIN": 13748, + "anny": 13749, + "elli": 13750, + "last": 13751, + "Ġoversee": 13752, + "cm": 13753, + "ĠOilers": 13754, + "Are": 13755, + "Ġjudiciary": 13756, + "onte": 13757, + "ĠTrack": 13758, + "Ġsupervisor": 13759, + "erk": 13760, + "isher": 13761, + "Ġintact": 13762, + "Ġslid": 13763, + "icals": 13764, + "paid": 13765, + "ĠMAR": 13766, + "lement": 13767, + "ĠLiu": 13768, + "ĠLarge": 13769, + "ĠWings": 13770, + "pect": 13771, + "ĠRum": 13772, + "Ġanalyzed": 13773, + "Ġemploys": 13774, + "arte": 13775, + "ims": 13776, + "ĠEventually": 13777, + "Ġaffiliated": 13778, + "Ġhospitality": 13779, + "ĠSprint": 13780, + "Ġresolutions": 13781, + "Ġliquor": 13782, + "ĠNAFTA": 13783, + "ANY": 13784, + "Ġradiation": 13785, + "ĠProv": 13786, + "Ġpause": 13787, + "ĠTMZ": 13788, + "Ġelbow": 13789, + "Ġresilience": 13790, + "ĠParents": 13791, + "mus": 13792, + "ĠSafe": 13793, + "Ġinterpretation": 13794, + "Ġraced": 13795, + "IND": 13796, + "KR": 13797, + "Ġhinted": 13798, + "ĠErin": 13799, + "ĠBahrain": 13800, + "Ġcredentials": 13801, + "eless": 13802, + "Ġprocurement": 13803, + "ĠWebb": 13804, + "ĠLowe": 13805, + "ĠNak": 13806, + "ĠLearning": 13807, + "zh": 13808, + "Ġdipped": 13809, + "ĠSuite": 13810, + "Ġmisdemeanor": 13811, + "ALE": 13812, + "Ġstrengthened": 13813, + "ĠSophie": 13814, + "Ġconfirms": 13815, + "Ġrac": 13816, + "gey": 13817, + "Ġshootout": 13818, + "Ġble": 13819, + "Ġcircles": 13820, + "ĠChef": 13821, + "Ġcomprised": 13822, + "ĠSantiago": 13823, + "Ġfeud": 13824, + "beat": 13825, + "Ġstaffers": 13826, + "Ġacute": 13827, + "ski": 13828, + "Ġpolled": 13829, + "ĠKur": 13830, + "ĠJen": 13831, + "ĠUltimately": 13832, + "anded": 13833, + "ĠHoney": 13834, + "Ġannounces": 13835, + "Ġamateur": 13836, + "around": 13837, + "Ġfunctioning": 13838, + "group": 13839, + "ĠSqu": 13840, + "Where": 13841, + "Ġvoid": 13842, + "ĠSandra": 13843, + "isers": 13844, + "Ġhelicopters": 13845, + "ĠGym": 13846, + "ĠWol": 13847, + "mouth": 13848, + "Ġsubjected": 13849, + "ici": 13850, + "ually": 13851, + "ĠWash": 13852, + "ĠLindsay": 13853, + "ĠVers": 13854, + "Ġjumps": 13855, + "Ġneglect": 13856, + "ĠKuwait": 13857, + "fund": 13858, + "ĭ": 13859, + "ather": 13860, + "lly": 13861, + "ei": 13862, + "Although": 13863, + ".''": 13864, + "Ġunhappy": 13865, + "Ġpills": 13866, + "Ġmagical": 13867, + "Ġdro": 13868, + "Ġinviting": 13869, + "ĠJohnston": 13870, + "oving": 13871, + "450": 13872, + "ĠMerc": 13873, + "Ġadmitting": 13874, + "Ġinsisting": 13875, + "ĠCru": 13876, + "ĠResource": 13877, + "oir": 13878, + "Ġcomplexity": 13879, + "ĠRoth": 13880, + "ĠCher": 13881, + "July": 13882, + "raf": 13883, + "Ġaggregate": 13884, + "Ġhelm": 13885, + "uclear": 13886, + "olan": 13887, + "Ġoffenses": 13888, + "ĠWolves": 13889, + "ĠFu": 13890, + "ĠPierce": 13891, + "Ġemailed": 13892, + "ĠStra": 13893, + "Ġpedestrians": 13894, + "ĠER": 13895, + "ĠConway": 13896, + "Ġblowing": 13897, + "CLOSE": 13898, + "hab": 13899, + "ĠGreene": 13900, + "Ġconfessed": 13901, + "ĠTorres": 13902, + "ĠHolocaust": 13903, + "Ġrepay": 13904, + "Ġdemonstrates": 13905, + "ĠPool": 13906, + "gent": 13907, + "Ġdeleted": 13908, + "Ġ$$": 13909, + "ĠSO": 13910, + "Ġdri": 13911, + "ĠNeg": 13912, + "ĠVP": 13913, + "ĠPF": 13914, + "ĠPrep": 13915, + "Ġorganizing": 13916, + "icker": 13917, + "Ġmanufactured": 13918, + "enson": 13919, + "adas": 13920, + "Ġwines": 13921, + "Ġmachinery": 13922, + "Ġspecialists": 13923, + "ĠDetective": 13924, + "ĠDL": 13925, + "Op": 13926, + "Ġquicker": 13927, + "ĠPenguins": 13928, + "Engine": 13929, + "zone": 13930, + "Ġsequence": 13931, + "ĠLost": 13932, + "Ġwarmer": 13933, + "ĠEthiopia": 13934, + "Ġaffirmed": 13935, + "fest": 13936, + "resses": 13937, + "Ġsoap": 13938, + "Ġbooth": 13939, + "Ġnotorious": 13940, + "amin": 13941, + "Ġpursued": 13942, + "ĠCer": 13943, + "ĠSB": 13944, + "Ġlivestock": 13945, + "Ġtrace": 13946, + "Ġrespects": 13947, + "arden": 13948, + "April": 13949, + "Ġ128": 13950, + "ĠSaid": 13951, + "ennial": 13952, + "Ġnamely": 13953, + "ĠBot": 13954, + "Ġ108": 13955, + "ĠLem": 13956, + "nell": 13957, + "Ġconfirming": 13958, + "Ġlogged": 13959, + "Ġprofound": 13960, + "elo": 13961, + "ĠChambers": 13962, + "RT": 13963, + "Ġnewer": 13964, + "Ġsideline": 13965, + "ĠCardinal": 13966, + "este": 13967, + "Ġnarrowly": 13968, + "Ġcompromised": 13969, + "Ġpolicing": 13970, + "Ġporn": 13971, + "Ġarc": 13972, + "Ġlearnt": 13973, + "INE": 13974, + "step": 13975, + "ĠDomin": 13976, + "Ġwaist": 13977, + "Ġboycott": 13978, + "mitted": 13979, + "iffs": 13980, + "ground": 13981, + "ĠMaterials": 13982, + "Ġceasefire": 13983, + "Right": 13984, + "ĠZen": 13985, + "estyle": 13986, + "Thank": 13987, + "ĠOnePlus": 13988, + "ĠMLS": 13989, + "Ġconstituents": 13990, + "oster": 13991, + "ĠProsecutor": 13992, + "Ġpriorit": 13993, + "ĠDebbie": 13994, + "ĠExpand": 13995, + "uv": 13996, + "Ġintegrate": 13997, + "Ġimmun": 13998, + "Ġdisciplinary": 13999, + "ĠImm": 14000, + "Ġja": 14001, + "Ġgardens": 14002, + "ĠHim": 14003, + "obe": 14004, + "Ġhitter": 14005, + "Ġbullets": 14006, + "Ġevolving": 14007, + "ĠScientists": 14008, + "Michael": 14009, + "ĠDO": 14010, + "Ġunbelievable": 14011, + "Ġlooming": 14012, + "Ġdownturn": 14013, + "Ġmentality": 14014, + "Ġreopened": 14015, + "Ġash": 14016, + "ĠChapman": 14017, + "Ġloop": 14018, + "ĠUT": 14019, + "ĠTier": 14020, + "Ġunaware": 14021, + "Ġgratitude": 14022, + "Ġperforms": 14023, + "olk": 14024, + "Ġ\"(": 14025, + "Ġlacks": 14026, + "Ġinstructed": 14027, + "ĠRecreation": 14028, + "sample": 14029, + "Ġrequesting": 14030, + "Canada": 14031, + "Ġsupposedly": 14032, + "ĠHardy": 14033, + "Ġholder": 14034, + "change": 14035, + "ĠDominic": 14036, + "ĠXavier": 14037, + "Ġlig": 14038, + "Ġcandid": 14039, + "ĠRab": 14040, + "Ġconferences": 14041, + "ĠBurton": 14042, + "Dr": 14043, + "Ġmunicipalities": 14044, + "Ġcrushed": 14045, + "Ġseekers": 14046, + "ĠCitizens": 14047, + "Ġheightened": 14048, + "ĠCasino": 14049, + "Ġdesktop": 14050, + "Ġwhoever": 14051, + "ĠImpact": 14052, + "Ġcocktail": 14053, + "Ġphilanthrop": 14054, + "ĠSAN": 14055, + "ĠPreston": 14056, + "Ġobesity": 14057, + "Ġrestrict": 14058, + "ĠKab": 14059, + "ĠProvidence": 14060, + "Ġscar": 14061, + "ĠChart": 14062, + "Ġbosses": 14063, + "ĠRate": 14064, + "Ġsav": 14065, + "pay": 14066, + "Ġtransplant": 14067, + "ĠNoble": 14068, + "child": 14069, + "Ġconclusions": 14070, + "FI": 14071, + "Ġsack": 14072, + "Ġexperimental": 14073, + "holder": 14074, + "oca": 14075, + "herty": 14076, + "ĠMT": 14077, + "Ġcatcher": 14078, + "LY": 14079, + "Ġgrams": 14080, + "reet": 14081, + "Ġadaptation": 14082, + "Ġhumble": 14083, + "Ġbot": 14084, + "Ġidentical": 14085, + "ication": 14086, + "ifer": 14087, + "ĠCrow": 14088, + "Ġregain": 14089, + "ĠLightning": 14090, + "Ġkg": 14091, + "Ġcomposed": 14092, + "Ġcorrespondent": 14093, + "Ġreunion": 14094, + "Ġobserve": 14095, + "Ġcomprising": 14096, + "Ġimpeachment": 14097, + "Ġresh": 14098, + "Ġlemon": 14099, + "ĠSnap": 14100, + "Ġproprietary": 14101, + "een": 14102, + "ourt": 14103, + "Ġdetective": 14104, + "Ġlabels": 14105, + "Ġcorridor": 14106, + "ĠClinic": 14107, + "Ġarra": 14108, + "ĠPearl": 14109, + "Ġinformal": 14110, + "ĠUnd": 14111, + "ĠVenezuelan": 14112, + "Ġpeninsula": 14113, + "Ġdefeating": 14114, + "Ġsyndrome": 14115, + "iere": 14116, + "Ġspite": 14117, + "bag": 14118, + "aran": 14119, + "Ġspecialized": 14120, + "ĠAA": 14121, + "ĠLyn": 14122, + "Ġinstrumental": 14123, + "Smith": 14124, + "Ġpivotal": 14125, + "Ġnightclub": 14126, + "ĠCob": 14127, + "Ġcolorful": 14128, + "Ġartwork": 14129, + "Ġ1981": 14130, + "Ġdawn": 14131, + "erville": 14132, + "uated": 14133, + "ief": 14134, + "Ġlinking": 14135, + "ĠOw": 14136, + "Ġappreci": 14137, + "Ġreductions": 14138, + "elling": 14139, + "Ġsalmon": 14140, + "bb": 14141, + "ĠPhillip": 14142, + "yle": 14143, + "Ġassure": 14144, + "Ġdiscretion": 14145, + "Ġefficiently": 14146, + "ĠMau": 14147, + "abil": 14148, + "Ġintentionally": 14149, + "Ġactivated": 14150, + "Ġimmense": 14151, + "ĠStrategic": 14152, + "Ġcheating": 14153, + "ĠTrend": 14154, + "ĠSamantha": 14155, + "Ġcomple": 14156, + "Ġhack": 14157, + "ĠSerie": 14158, + "ĠText": 14159, + "Ġstylish": 14160, + "ĠFaith": 14161, + "ĠGST": 14162, + "Ġexterior": 14163, + "Ġblessing": 14164, + "Ġblanket": 14165, + "Ġcooked": 14166, + "Ġretaliation": 14167, + "Ġtro": 14168, + "Ġshelves": 14169, + "rose": 14170, + "ĠGram": 14171, + "Ġsho": 14172, + "ĠArgentine": 14173, + "Ġclerk": 14174, + "specific": 14175, + "Ġagreeing": 14176, + "Ġstandout": 14177, + "black": 14178, + "Ġtrending": 14179, + "Ġviolate": 14180, + "Get": 14181, + "ño": 14182, + "ĠOpt": 14183, + "ĠFrankfurt": 14184, + "ĠFranco": 14185, + "eness": 14186, + "Ġlining": 14187, + "Ġzoo": 14188, + "oil": 14189, + "lia": 14190, + "rab": 14191, + "Ġorganize": 14192, + "Ġwoods": 14193, + "Ġscan": 14194, + "Ġurgency": 14195, + "Ġoccurring": 14196, + "Ġreliance": 14197, + "Ġconcepts": 14198, + "Ġeligibility": 14199, + "0000": 14200, + "ĠBrief": 14201, + "Ġabusive": 14202, + "ĠBench": 14203, + "Ġrub": 14204, + "ĠDil": 14205, + "Ġmount": 14206, + "Ġmaturity": 14207, + "ĠNut": 14208, + "nee": 14209, + "enc": 14210, + "Ġgunfire": 14211, + "ĠKill": 14212, + "Ġgates": 14213, + "Ġflower": 14214, + "iol": 14215, + "Ġshaped": 14216, + "Ġundoubtedly": 14217, + "Ġbackgrounds": 14218, + "ĠComplex": 14219, + "\":{\"": 14220, + "Ġnaming": 14221, + "Ġmonument": 14222, + "Ġoh": 14223, + "Ġembedded": 14224, + "Ġbang": 14225, + "ĠKro": 14226, + "Ġaggression": 14227, + "ĠMits": 14228, + "During": 14229, + "ĠEp": 14230, + "iners": 14231, + "ĠAnaheim": 14232, + "Ġrom": 14233, + "Ġoutgoing": 14234, + "Ġfulfill": 14235, + "Ġreminds": 14236, + "Ġren": 14237, + "à¤": 14238, + "ĠSue": 14239, + "Ġrefresh": 14240, + "Ġlif": 14241, + "Ġfil": 14242, + "ĠLead": 14243, + "Ġregulate": 14244, + "ĠTeachers": 14245, + "Ġclarify": 14246, + "obs": 14247, + "Ġblasted": 14248, + "ĠAx": 14249, + "Ġflavors": 14250, + "Ġmega": 14251, + "Ġhurdles": 14252, + "Ġinspector": 14253, + "ĠSalvador": 14254, + "Ġprescribed": 14255, + "Ġrenovation": 14256, + "OUR": 14257, + "Ġutil": 14258, + "ĠBradford": 14259, + "Ġwasted": 14260, + "Ġlineman": 14261, + "Ġpalm": 14262, + "icate": 14263, + "Ġoverseeing": 14264, + "otted": 14265, + "ĠRapids": 14266, + "Ġjustified": 14267, + "aby": 14268, + "Ġextends": 14269, + "Ġoath": 14270, + "bow": 14271, + "ĠRivera": 14272, + "Jan": 14273, + "ĠImran": 14274, + "Ġforests": 14275, + "ĠShel": 14276, + "ĠBrun": 14277, + "Ġaerial": 14278, + "ĠNOW": 14279, + "PAR": 14280, + "Ġbeverages": 14281, + "ettel": 14282, + "Ġfragile": 14283, + "Ġcodes": 14284, + "Į": 14285, + "abel": 14286, + "Watch": 14287, + "road": 14288, + "Ġdismissal": 14289, + "ĠRosa": 14290, + "Ġcrunch": 14291, + "²": 14292, + "Ġinnovations": 14293, + "Ġhabitat": 14294, + "Ġforefront": 14295, + "ĠKoch": 14296, + "ĠChevrolet": 14297, + "Ġwheelchair": 14298, + "Ġconsiderably": 14299, + "Ġexpenditures": 14300, + "Ġtexts": 14301, + "Ġprompt": 14302, + "Ġskating": 14303, + "Ġpetroleum": 14304, + "ĠICC": 14305, + "Ġvit": 14306, + "fit": 14307, + "Ġprolonged": 14308, + "ĠLucy": 14309, + "Ġcho": 14310, + "Ġrocked": 14311, + "ĠBrom": 14312, + "Ġfreed": 14313, + "Ġyours": 14314, + "ĠEden": 14315, + "Ġmonitored": 14316, + "asted": 14317, + "Ġoversees": 14318, + "ieri": 14319, + "Ġideology": 14320, + "ĠFine": 14321, + "tering": 14322, + "Top": 14323, + "Ġdamp": 14324, + "uta": 14325, + "Ġlethal": 14326, + "Ġpurple": 14327, + "udge": 14328, + "ĠChemical": 14329, + "ĠPetersburg": 14330, + "Ġwarns": 14331, + "Ġcollectively": 14332, + "Ġâ": 14333, + "Ġplaintiffs": 14334, + "ĠBoris": 14335, + "Ġsheep": 14336, + "oves": 14337, + "ĠAuthor": 14338, + "Ġcampuses": 14339, + "Ġdestroying": 14340, + "Ġgloves": 14341, + "Ġcease": 14342, + "Ġdelegates": 14343, + "Ġpreceded": 14344, + "realDonaldTrump": 14345, + "Ġforwards": 14346, + "erton": 14347, + "ĠBuzzFeed": 14348, + "Ġoccupation": 14349, + "ĠLegion": 14350, + "Ġstir": 14351, + "Ġshale": 14352, + "Ġterrific": 14353, + "Ġnewborn": 14354, + "Ġstandoff": 14355, + "OWN": 14356, + "Ġmuscles": 14357, + "ĠHerman": 14358, + "ĠLiz": 14359, + "ĠExperience": 14360, + "ĠSuccess": 14361, + "ĠHispanic": 14362, + "ĠCCTV": 14363, + "Ġcomplement": 14364, + "ĠBing": 14365, + "Ġprem": 14366, + "ĠJohannes": 14367, + "Ġdent": 14368, + "itar": 14369, + "ĠHein": 14370, + "ĠNicola": 14371, + "Ġconcludes": 14372, + "ĠKhal": 14373, + "Ġparish": 14374, + "Ġshaking": 14375, + "ĠSchw": 14376, + "mod": 14377, + "ĠLil": 14378, + "ña": 14379, + "ĠBog": 14380, + "ĠFight": 14381, + "Ġgre": 14382, + "Ġfel": 14383, + "Ġheal": 14384, + "err": 14385, + "TM": 14386, + "airo": 14387, + "health": 14388, + "Ġswings": 14389, + "Ġtier": 14390, + "anka": 14391, + "ribune": 14392, + "emouth": 14393, + "ĠBloom": 14394, + "Ġowing": 14395, + "Tech": 14396, + "Ġdough": 14397, + "Ġbatch": 14398, + "ĠLion": 14399, + "ĠZamb": 14400, + "Ġcrashing": 14401, + "ĠXL": 14402, + "ppers": 14403, + "ĠDoctors": 14404, + "ĠSor": 14405, + "video": 14406, + "Ġcigarettes": 14407, + "ĠBoxing": 14408, + "Ġconstitute": 14409, + "Ġconcentrate": 14410, + "ĠArmenian": 14411, + "Ġsemester": 14412, + "position": 14413, + "emic": 14414, + "ĠNYC": 14415, + "ĠCampus": 14416, + "Ġalternate": 14417, + "Ġexped": 14418, + "Ġpublishers": 14419, + "2015": 14420, + "Ġunanimous": 14421, + "ĠPrevious": 14422, + "Ġwellness": 14423, + "ĠCreative": 14424, + "edy": 14425, + "AGE": 14426, + "ĠCavs": 14427, + "Ġ1978": 14428, + "Ġfu": 14429, + "ĠTata": 14430, + "ĠChoice": 14431, + "Ġwoes": 14432, + "ĠCable": 14433, + "Ġ~": 14434, + "ĠGem": 14435, + "Ġconsolidated": 14436, + "ĠManitoba": 14437, + "Cloud": 14438, + "Ġrounded": 14439, + "ĠVentura": 14440, + "Ġshark": 14441, + "Ġdresses": 14442, + "Ġtraction": 14443, + "eda": 14444, + "Ġdiv": 14445, + "Ġdental": 14446, + "Wh": 14447, + "ĠGig": 14448, + "ĠBoyd": 14449, + "ĠTransit": 14450, + "Ġtelevised": 14451, + "SON": 14452, + "ĠVince": 14453, + "Ġcloses": 14454, + "apt": 14455, + "ĠWheeler": 14456, + "ĠTyson": 14457, + "Ġforensic": 14458, + "Ġpunished": 14459, + "Ġseas": 14460, + "Ġnavigation": 14461, + "Ġprecedent": 14462, + "Ġextremist": 14463, + "Ġcomposite": 14464, + "PO": 14465, + "Ġsurvivor": 14466, + "ĠVale": 14467, + "gars": 14468, + "HT": 14469, + "ĠRiyadh": 14470, + "Ġrevival": 14471, + "ĠPayne": 14472, + "Ġcollaborative": 14473, + "ĠCustomers": 14474, + "ĠPf": 14475, + "Ġproves": 14476, + "erve": 14477, + "Ġelev": 14478, + "ĠPaper": 14479, + "Ġchore": 14480, + "Ġthriller": 14481, + "Ġstraw": 14482, + "cock": 14483, + "Gu": 14484, + "Ġaligned": 14485, + "ĠChronicle": 14486, + "Ġshouting": 14487, + "Ġ1976": 14488, + "Ġlightning": 14489, + "Ġworlds": 14490, + "ĠOpening": 14491, + "enton": 14492, + "ĠAna": 14493, + "ĠGol": 14494, + "ĠTechn": 14495, + "lis": 14496, + "Ġorientation": 14497, + "ĠArri": 14498, + "ĠPG": 14499, + "ross": 14500, + "Ġsank": 14501, + "LOS": 14502, + "ĠAllison": 14503, + "Ġsmiles": 14504, + "USD": 14505, + "Ġkits": 14506, + "Bar": 14507, + "ĠBri": 14508, + "Ġounces": 14509, + "ĠNielsen": 14510, + "eno": 14511, + "Ġ109": 14512, + "Ġnorms": 14513, + "Ġskip": 14514, + "180": 14515, + "Ġmonitors": 14516, + "2012": 14517, + "Ġincorporate": 14518, + "Ġmechanisms": 14519, + "ĠHack": 14520, + "ĠBomb": 14521, + "ĠGavin": 14522, + "ĠNatalie": 14523, + "Ġdiscusses": 14524, + "Ġassembled": 14525, + "Ġcognitive": 14526, + "owner": 14527, + "Ġgenuinely": 14528, + "Ġdisappear": 14529, + "ĠAK": 14530, + "Ġstal": 14531, + "Ġsoup": 14532, + "ĠFinn": 14533, + "Ġcares": 14534, + "Ġfinest": 14535, + "Ġtuned": 14536, + "ende": 14537, + "ĠStefan": 14538, + "Ġaccompanying": 14539, + "î": 14540, + "Maybe": 14541, + "Ġoffender": 14542, + "TT": 14543, + "Ġ212": 14544, + "Ġvolleyball": 14545, + "needed": 14546, + "Ġquo": 14547, + "Ġdim": 14548, + "ĠHistorical": 14549, + "ĠLance": 14550, + "gmail": 14551, + "ĠGate": 14552, + "Ġdemonstrators": 14553, + "Ġdy": 14554, + "cia": 14555, + "ĠSteele": 14556, + "ĠJoan": 14557, + "ĠKerala": 14558, + "KA": 14559, + "ĠElectoral": 14560, + "Ġpaths": 14561, + "ø": 14562, + "Ne": 14563, + "Ġaccepts": 14564, + "Ġlowering": 14565, + "Ġportions": 14566, + "ĠValencia": 14567, + "Ġfestivals": 14568, + "Ġgeneric": 14569, + "usk": 14570, + "ĠVernon": 14571, + "ĠOrioles": 14572, + "Ġrenewal": 14573, + "Ġbelonged": 14574, + "Ġbreathe": 14575, + "Ġ220": 14576, + "Ġrecruited": 14577, + "Ġlogic": 14578, + "Ġrecreation": 14579, + "Ġverbal": 14580, + "ĠHaz": 14581, + "double": 14582, + "Ġfavourites": 14583, + "Ġfundamentals": 14584, + "ĠSoc": 14585, + "360": 14586, + "SO": 14587, + "Ġalerted": 14588, + "Ġbriefed": 14589, + "ĠBruno": 14590, + "Ġseating": 14591, + "Ġfreight": 14592, + "ĠAmer": 14593, + "Ġwished": 14594, + "table": 14595, + "growth": 14596, + "ĠWent": 14597, + "Ġhilarious": 14598, + "Ġthroat": 14599, + "bet": 14600, + "gon": 14601, + "Ġample": 14602, + "hee": 14603, + "ĠHood": 14604, + "ĠIceland": 14605, + "ĠAnkara": 14606, + "iang": 14607, + "Ġpracticing": 14608, + "azer": 14609, + "Ġleaf": 14610, + "Ġhottest": 14611, + "Ġmarginal": 14612, + "Ġrevelations": 14613, + "ĠPrices": 14614, + "ĠLar": 14615, + "times": 14616, + "Ġhandles": 14617, + "ĠNaz": 14618, + "Ġinstitute": 14619, + "Ġtranslate": 14620, + "ĠJP": 14621, + "Ġsoared": 14622, + "Ġconsume": 14623, + "ĠTap": 14624, + "ĠCelebrity": 14625, + "ĠMayweather": 14626, + "ĠOracle": 14627, + "Ġmor": 14628, + "ANA": 14629, + "Ġpaperwork": 14630, + "aste": 14631, + "Ġdil": 14632, + "Ġdecorated": 14633, + "Ġpromotional": 14634, + "ĠMerrill": 14635, + "Ġappliances": 14636, + "ĠCOP": 14637, + "Ġlips": 14638, + "ĠBrennan": 14639, + "ĠMile": 14640, + "ĠNetworks": 14641, + "ĠComment": 14642, + "ĠIb": 14643, + "ĠAgg": 14644, + "IDE": 14645, + "Ġinitiate": 14646, + "Ġknockout": 14647, + "Ġbargain": 14648, + "Ġaccordingly": 14649, + "bee": 14650, + "ĠGerald": 14651, + "Ġproblematic": 14652, + "Ġtrap": 14653, + "Ġfinalists": 14654, + "addy": 14655, + "would": 14656, + "Ġstrictly": 14657, + "ĠRamsey": 14658, + "Ġdownward": 14659, + "Ġextract": 14660, + "Ġfamed": 14661, + "ĠOUT": 14662, + "Ġinduct": 14663, + "ĠAuckland": 14664, + "Ġpoetry": 14665, + "mos": 14666, + "ĠGuinea": 14667, + "management": 14668, + "ohan": 14669, + "ĠGuide": 14670, + "aily": 14671, + "umping": 14672, + "Ġenacted": 14673, + "ĠEye": 14674, + "vision": 14675, + "umi": 14676, + "aped": 14677, + "Ġbicycle": 14678, + "ĠHouth": 14679, + "ĠNAS": 14680, + "Ġtapped": 14681, + "wer": 14682, + "otti": 14683, + "EA": 14684, + "Ġsurprises": 14685, + "ĠUpdate": 14686, + "ĠPun": 14687, + "ĠMiz": 14688, + "ĠOro": 14689, + "Ġcostumes": 14690, + "title": 14691, + "Ġsurviving": 14692, + "According": 14693, + "themed": 14694, + "ĠPeoples": 14695, + "Se": 14696, + "Ġassociations": 14697, + "hett": 14698, + "Time": 14699, + "Ġessay": 14700, + "Ġmu": 14701, + "ĠScore": 14702, + "ĠSpani": 14703, + "ĠSEE": 14704, + "Ġmales": 14705, + "Ġrage": 14706, + "EU": 14707, + "ĠYellow": 14708, + "rupt": 14709, + "Ġapparel": 14710, + "Ġsweat": 14711, + "Ġnearest": 14712, + "zman": 14713, + "Ġanticipation": 14714, + "Ġinjuring": 14715, + "Ġousted": 14716, + "chan": 14717, + "ĠAlert": 14718, + "Ġber": 14719, + "atal": 14720, + "Com": 14721, + "Ġ04": 14722, + "Ġafterward": 14723, + "edge": 14724, + "ĠBooker": 14725, + "lex": 14726, + "ĠWhole": 14727, + "Ġtoughest": 14728, + "ĠMaharashtra": 14729, + "lier": 14730, + "ĠTennis": 14731, + "Ġhandy": 14732, + "ĠMetal": 14733, + "ĠiTunes": 14734, + "ĠDiscovery": 14735, + "Ġcompassion": 14736, + "ĠLIVE": 14737, + "Ġeconomically": 14738, + "Ġendangered": 14739, + "GO": 14740, + "Ġmound": 14741, + "word": 14742, + "ĠTouch": 14743, + "ogo": 14744, + "Ġincomes": 14745, + "when": 14746, + "ĠAside": 14747, + "Ġscandals": 14748, + "Ġfunctionality": 14749, + "ĠAer": 14750, + "Ġcouncils": 14751, + "Ġdenial": 14752, + "140": 14753, + "Ġimplied": 14754, + "Ġoutfits": 14755, + "Ġsuited": 14756, + "Ġ1973": 14757, + "ĠPizza": 14758, + "Ġdebates": 14759, + "record": 14760, + "Ġhype": 14761, + "ĠRus": 14762, + "ĠRobbie": 14763, + "Ġtouted": 14764, + "ĠSharp": 14765, + "Ġbeings": 14766, + "Ġslavery": 14767, + "encies": 14768, + "ĠRooney": 14769, + "Ġnan": 14770, + "Ġraids": 14771, + "Ġinstructor": 14772, + "Market": 14773, + "Ġshook": 14774, + "Ġdeliberate": 14775, + "ĠNorthwestern": 14776, + "ĠEss": 14777, + "Ġwhatsoever": 14778, + "ĠConfederate": 14779, + "YS": 14780, + "ĠCameroon": 14781, + "ĠFlip": 14782, + "Yeah": 14783, + "Ġwashing": 14784, + "mand": 14785, + "ĠLex": 14786, + "Ġissuance": 14787, + "Ġniche": 14788, + "Ġfold": 14789, + "ĠWendy": 14790, + "Ġhy": 14791, + "Ġbucket": 14792, + "ĠVW": 14793, + "ĠCairo": 14794, + "ĠSK": 14795, + "ĠKang": 14796, + "Ġintake": 14797, + "Ġhills": 14798, + "anz": 14799, + "©": 14800, + "ugu": 14801, + "ĠFortunately": 14802, + "ĠMarqu": 14803, + "Ġimprisonment": 14804, + "oking": 14805, + "Ġdistributors": 14806, + "zie": 14807, + "Ġstip": 14808, + "ĠWire": 14809, + "Ġcouncillors": 14810, + "Ġsue": 14811, + "ĠRegardless": 14812, + "ĠEnc": 14813, + "Ġbaking": 14814, + "ĠVenture": 14815, + "Ġintriguing": 14816, + "Ġupheld": 14817, + "ĠActive": 14818, + "Ġgenes": 14819, + "ĠDawson": 14820, + "ĠPreviously": 14821, + "ĠRac": 14822, + "Ġmetric": 14823, + "Files": 14824, + "ĠiPhones": 14825, + "ĠWelcome": 14826, + "Ġburns": 14827, + "ĠScreen": 14828, + "ashes": 14829, + "ĠApr": 14830, + "Ġtheories": 14831, + "san": 14832, + "ĠRenault": 14833, + "ĠSinger": 14834, + "Ġfounders": 14835, + "Russian": 14836, + "ĠBelfast": 14837, + "Ġimagined": 14838, + "ĠPlanet": 14839, + "ĠCatalan": 14840, + "ĠRochester": 14841, + "Ġevolve": 14842, + "ĠOT": 14843, + "Ġpassword": 14844, + "Ġhomelessness": 14845, + "Ġbacklog": 14846, + "Ġpresenter": 14847, + "Ġfal": 14848, + "ISH": 14849, + "ĠEM": 14850, + "icked": 14851, + "Ġunlock": 14852, + "city": 14853, + "Ġnegotiation": 14854, + "Ġdancers": 14855, + "dan": 14856, + "ĠCOL": 14857, + "VC": 14858, + "boat": 14859, + "Ġoverly": 14860, + "deal": 14861, + "lander": 14862, + "Ġdiss": 14863, + "ICS": 14864, + "Ġfifty": 14865, + "Ġowe": 14866, + "Ġprisons": 14867, + "ifications": 14868, + "wo": 14869, + "ĠAu": 14870, + "Ġapiece": 14871, + "ĠCourtney": 14872, + "Ġ1975": 14873, + "Ġsurpass": 14874, + "Ġidentities": 14875, + "Ġintegral": 14876, + "Ġdocumentation": 14877, + "Ġelegant": 14878, + "ĠIg": 14879, + "Ġdear": 14880, + "Ġ113": 14881, + "ĠGupta": 14882, + "Ġcontentious": 14883, + "rish": 14884, + "Ġclues": 14885, + "Ġadditions": 14886, + "Ġep": 14887, + "rus": 14888, + "Ġcentered": 14889, + "ĠPhillies": 14890, + "father": 14891, + "Ġborough": 14892, + "Ġbuttons": 14893, + "Ġdeported": 14894, + "ĠREC": 14895, + "ĠAlready": 14896, + "eh": 14897, + "hur": 14898, + "Ġupbeat": 14899, + "omen": 14900, + "Ġdetailing": 14901, + "Ġwr": 14902, + "Ġvaried": 14903, + "ĠEconomics": 14904, + "Ġensures": 14905, + "ĠCivic": 14906, + "Ġunpaid": 14907, + "sold": 14908, + "ĠHil": 14909, + "ĠMult": 14910, + "ĠRising": 14911, + "ĠMini": 14912, + "Ġneuro": 14913, + "Ġpenal": 14914, + "Ġneighbour": 14915, + "ĠChavez": 14916, + "Ġjew": 14917, + "ĠVIP": 14918, + "Connor": 14919, + "ĠTalking": 14920, + "Ġcorrection": 14921, + "Ġstandpoint": 14922, + "roads": 14923, + "ĠWool": 14924, + "Ġverification": 14925, + "Ġmic": 14926, + "olf": 14927, + "Ġexemption": 14928, + "Ġfilter": 14929, + "Ġballoon": 14930, + "leases": 14931, + "ician": 14932, + "ĠSpr": 14933, + "Ġtoe": 14934, + "Ġunconstitutional": 14935, + "Ġmanslaughter": 14936, + "Ġtossed": 14937, + "ĠMeg": 14938, + "ATIONS": 14939, + "ACK": 14940, + "ĠRouge": 14941, + "ĠHansen": 14942, + "ĠHook": 14943, + "Out": 14944, + "ĠHorse": 14945, + "ĠBath": 14946, + "ĠAlways": 14947, + "Ġincorporated": 14948, + "Ġconjunction": 14949, + "ĠFit": 14950, + "Ġexamining": 14951, + "Ġwallet": 14952, + "Ġensured": 14953, + "Ġacclaimed": 14954, + "ippers": 14955, + "Ġbeneficiaries": 14956, + "Ġunexpectedly": 14957, + "Ġexploit": 14958, + "ĠWillie": 14959, + "Ġcomb": 14960, + "ĠWalton": 14961, + "rica": 14962, + "icky": 14963, + "Ġate": 14964, + "ĠPadres": 14965, + "Ġrib": 14966, + "Ġsnacks": 14967, + "ĠFernandez": 14968, + "ĠMachine": 14969, + "ction": 14970, + "Ġillnesses": 14971, + "ĠHoffman": 14972, + "ĠSpaceX": 14973, + "Ġju": 14974, + "Ġswift": 14975, + "Ġembark": 14976, + "ĠRailway": 14977, + "Ġmeasuring": 14978, + "agers": 14979, + "arsh": 14980, + "Ġessence": 14981, + "angle": 14982, + "Ġolive": 14983, + "ĠCommander": 14984, + "iggs": 14985, + "Ġrewarded": 14986, + "Ġdispatched": 14987, + "Ġplayground": 14988, + "½": 14989, + "ĠProgramme": 14990, + "Ġstudios": 14991, + "Ġskeptical": 14992, + "ĠOlymp": 14993, + "ĠKeys": 14994, + "ĠSunshine": 14995, + "amba": 14996, + "ĠDonna": 14997, + "Ġlightly": 14998, + "Ġobtaining": 14999, + "Ġpoisoning": 15000, + "Ġaz": 15001, + "Ġ1972": 15002, + "Ġunconscious": 15003, + "ECT": 15004, + "Ġlied": 15005, + "ĠKaz": 15006, + "Ġ06": 15007, + "ĠMoving": 15008, + "Ġnum": 15009, + "oral": 15010, + "Ġassessments": 15011, + "Ġscholarships": 15012, + "Ġevacuate": 15013, + "ĠSunni": 15014, + "Ġquake": 15015, + "Ġfort": 15016, + "ques": 15017, + "ĠAlonso": 15018, + "Ġthread": 15019, + "Ġsqueeze": 15020, + "arat": 15021, + "oly": 15022, + "ĠAlphabet": 15023, + "uting": 15024, + "icio": 15025, + "ĠRetirement": 15026, + "ither": 15027, + "Ġasleep": 15028, + "Ġpairs": 15029, + "Ġmanufacture": 15030, + "ĠHazard": 15031, + "Ġsidewalk": 15032, + "Ġwears": 15033, + "ĠCraft": 15034, + "emen": 15035, + "ieth": 15036, + "Ġbypass": 15037, + "ĠLancaster": 15038, + "Ġflour": 15039, + "charge": 15040, + "ĠCLICK": 15041, + "Ġpotatoes": 15042, + "ĠKarachi": 15043, + "Ġvalley": 15044, + "Ġsights": 15045, + "Ġfallout": 15046, + "ords": 15047, + "BN": 15048, + "Ġsunshine": 15049, + "Ġundertaken": 15050, + "Ġcontestants": 15051, + "Ġaccomplishments": 15052, + "Ġconditioning": 15053, + "Ġcel": 15054, + "ĠHalifax": 15055, + "Ġaccent": 15056, + "***": 15057, + "Ġpitchers": 15058, + "Ġadopting": 15059, + "Ġjustices": 15060, + "Ġrip": 15061, + "ince": 15062, + "Ġelimination": 15063, + "Ġaerospace": 15064, + "ĠBeer": 15065, + "ĠBasin": 15066, + "Ġunwanted": 15067, + "goers": 15068, + "isco": 15069, + "ĠTwin": 15070, + "ĠDesert": 15071, + "rix": 15072, + "Ġdarkness": 15073, + "ĠDunn": 15074, + "City": 15075, + "pop": 15076, + "Ġ1969": 15077, + "ataka": 15078, + "Ġtal": 15079, + "Ġautism": 15080, + "ĠMcLaren": 15081, + "ĠUEFA": 15082, + "Ġclassrooms": 15083, + "ĠLeave": 15084, + "Americans": 15085, + "las": 15086, + "Ġqui": 15087, + "Ġundefeated": 15088, + "otto": 15089, + "ĠNRA": 15090, + "ĠPorsche": 15091, + "Ġnuts": 15092, + "oys": 15093, + "ĠMethodist": 15094, + "Ġatt": 15095, + "Ġtweeting": 15096, + "children": 15097, + "eller": 15098, + "Ġinquiries": 15099, + "Ġmillennials": 15100, + "ĠWembley": 15101, + "INS": 15102, + "Ġautopsy": 15103, + "ĠElon": 15104, + "ĠHicks": 15105, + "ugg": 15106, + "Ġwreck": 15107, + "ĠComcast": 15108, + "Ġstones": 15109, + "public": 15110, + "ĠKem": 15111, + "bedroom": 15112, + "ļ": 15113, + "itated": 15114, + "Ġsemic": 15115, + "uman": 15116, + "Cal": 15117, + "ANN": 15118, + "ĠGaz": 15119, + "Ġundisclosed": 15120, + "ĠPlanned": 15121, + "ĠYale": 15122, + "ĠIST": 15123, + "lies": 15124, + "ĠStanding": 15125, + "Ġrelieved": 15126, + "EO": 15127, + "Ġgraduating": 15128, + "park": 15129, + "ĠâĢķ": 15130, + "Ġpensions": 15131, + "rave": 15132, + "ĠWonder": 15133, + "AZ": 15134, + "Ġcosting": 15135, + "Ġeditors": 15136, + "Ġtotaled": 15137, + "Ġspacecraft": 15138, + "meter": 15139, + "Ġ02": 15140, + "ĠNikki": 15141, + "sworth": 15142, + "ĠCrit": 15143, + "asha": 15144, + "Ġknees": 15145, + "Ġhats": 15146, + "uity": 15147, + "ĠPanther": 15148, + "Ġtan": 15149, + "ĠBuzz": 15150, + "ĠGlad": 15151, + "ĠPleasant": 15152, + "SM": 15153, + "Ġtricks": 15154, + "Ġplac": 15155, + "ĠDanielle": 15156, + "Ġours": 15157, + "Ġwashed": 15158, + "haven": 15159, + "Ġdrain": 15160, + "ĠUttar": 15161, + "Ġapple": 15162, + "Ġjunk": 15163, + "Ġturkey": 15164, + "ĠDug": 15165, + "Ġdiplomacy": 15166, + "Ġempire": 15167, + "Ġpinch": 15168, + "Ġferry": 15169, + "ĠDustin": 15170, + "Ġ03": 15171, + "Ġelder": 15172, + "Everything": 15173, + "ĠProgressive": 15174, + "ution": 15175, + "VI": 15176, + "dam": 15177, + "Ġlever": 15178, + "ĠAustralians": 15179, + "Ġconsequence": 15180, + "itan": 15181, + "Ġcondemn": 15182, + "Ġneg": 15183, + "ĠOverview": 15184, + "Ġsuccesses": 15185, + "Ġprobable": 15186, + "ĠMirror": 15187, + "mor": 15188, + "verse": 15189, + "Ġevaluating": 15190, + "ĠBes": 15191, + "Ġimm": 15192, + "Ġharness": 15193, + "Ġresilient": 15194, + "ĠBuild": 15195, + "Ġstraightforward": 15196, + "ADE": 15197, + "Ġgrandparents": 15198, + "Ġmarched": 15199, + "ĠKiev": 15200, + "Ġchiefs": 15201, + "oha": 15202, + "Ġvest": 15203, + "kn": 15204, + "enda": 15205, + "ĠSev": 15206, + "Ġbatters": 15207, + "ĠJos": 15208, + "ĠQue": 15209, + "ĠCourse": 15210, + "ĠCorner": 15211, + "ĠMess": 15212, + "Ġmourn": 15213, + "keepers": 15214, + "ĠRegina": 15215, + "Everybody": 15216, + "Ġtrajectory": 15217, + "Ġdefenseman": 15218, + "ĠArticles": 15219, + "Ġspur": 15220, + "ĠPhD": 15221, + "Ġpipes": 15222, + "Ġduck": 15223, + "Ġcombining": 15224, + "ĠHit": 15225, + "ĠGeorgetown": 15226, + "ĠBee": 15227, + "Cor": 15228, + "Ġcomposition": 15229, + "Ġconnects": 15230, + "ĠMARK": 15231, + "taker": 15232, + "Ġcertainty": 15233, + "Ġhefty": 15234, + "ĠHezbollah": 15235, + "ĠShip": 15236, + "Ġmalicious": 15237, + "AI": 15238, + "Ġbits": 15239, + "Ġstyl": 15240, + "Ġimpaired": 15241, + "ĠCBI": 15242, + "Despite": 15243, + "othe": 15244, + "ĠRyder": 15245, + "ĠAlf": 15246, + "ifa": 15247, + "Ind": 15248, + "Ġblaming": 15249, + "ĠToledo": 15250, + "EW": 15251, + "ĠEssex": 15252, + "iated": 15253, + "ĠAberdeen": 15254, + "ANCE": 15255, + "Ġpossess": 15256, + "Ġsuperhero": 15257, + "Ġoverhead": 15258, + "quet": 15259, + "ĠRicky": 15260, + "Ġdock": 15261, + "ĠTelecom": 15262, + "Ġshelf": 15263, + "³": 15264, + "Ġmaritime": 15265, + "Ġportrayed": 15266, + "ĠYesterday": 15267, + "Ġcollided": 15268, + "Ġcookies": 15269, + "ĠCul": 15270, + "Ġindexes": 15271, + "Ġnaval": 15272, + "oval": 15273, + "105": 15274, + "ĠWeber": 15275, + "chief": 15276, + "arma": 15277, + "ĠRey": 15278, + "Ġauditor": 15279, + "ĠMarion": 15280, + "ĠMartha": 15281, + "ĠSally": 15282, + "Ġsedan": 15283, + "ĠAlison": 15284, + "nce": 15285, + "Es": 15286, + "ĠParade": 15287, + "Ġpharmacy": 15288, + "ĠKre": 15289, + "loe": 15290, + "cks": 15291, + "Ġmitigate": 15292, + "Ġdesigning": 15293, + "Ġ2024": 15294, + "Ġportable": 15295, + "Ġimproves": 15296, + "ĠAMD": 15297, + "Ġexcluded": 15298, + "CON": 15299, + "ĠOscars": 15300, + "Ġfixtures": 15301, + "comb": 15302, + "ĠBerg": 15303, + "Ġbother": 15304, + "Ġboring": 15305, + "Ġobservation": 15306, + "ĠCad": 15307, + "Ġrecordings": 15308, + "ĠCultural": 15309, + "Ġweaken": 15310, + "Ġaccuse": 15311, + "ĠAbd": 15312, + "abor": 15313, + "115": 15314, + "uffle": 15315, + "Ġhighways": 15316, + "atham": 15317, + "empt": 15318, + "ĠDeer": 15319, + "ĠEDT": 15320, + "ĠWait": 15321, + "athan": 15322, + "Ġaccumulated": 15323, + "Ġguilt": 15324, + "Ġexempt": 15325, + "Ġdiluted": 15326, + "ĠJamal": 15327, + "Ġshit": 15328, + "cross": 15329, + "Ġeve": 15330, + "Ġshirts": 15331, + "Ġsatisfy": 15332, + "ĠPaulo": 15333, + "AH": 15334, + "sic": 15335, + "ĠChloe": 15336, + "ĠCities": 15337, + "ĠSwansea": 15338, + "Ġprecision": 15339, + "ĠTracy": 15340, + "ping": 15341, + "Ġcontinually": 15342, + "Ġdemographic": 15343, + "Ġcliff": 15344, + "Ġjaw": 15345, + "isted": 15346, + "ĠDevelop": 15347, + "ĠAJ": 15348, + "Ġaisle": 15349, + "ĠLionel": 15350, + "Ġpredominantly": 15351, + "Ġmel": 15352, + "Ġlifelong": 15353, + "hs": 15354, + "Ġshouted": 15355, + "lad": 15356, + "Ġdest": 15357, + "Ġpacks": 15358, + "ĠKath": 15359, + "ĠCruise": 15360, + "fired": 15361, + "oder": 15362, + "hua": 15363, + "Ġgoodbye": 15364, + "Ġinterfere": 15365, + "eca": 15366, + "Ġré": 15367, + "atum": 15368, + "itas": 15369, + "ĠLodge": 15370, + "ĠWald": 15371, + "Ġmidday": 15372, + "umble": 15373, + "asting": 15374, + "©": 15375, + "ĠLeg": 15376, + "ĠNepal": 15377, + "Ġchased": 15378, + "idge": 15379, + "Ġconv": 15380, + "Ġfraudulent": 15381, + "Ġopera": 15382, + "Ġshr": 15383, + "ĠUniverse": 15384, + "ĠJerome": 15385, + "Ġ1977": 15386, + "ĠDancing": 15387, + "ĠRS": 15388, + "±": 15389, + "eks": 15390, + "Ġchic": 15391, + "Ġpunish": 15392, + "Ġpropose": 15393, + "arin": 15394, + "ĠChop": 15395, + "ĠAhead": 15396, + "ĠGallagher": 15397, + "ĠBangkok": 15398, + "ĠShelby": 15399, + "ĠNS": 15400, + "Ġcheek": 15401, + "onia": 15402, + "Ġrelegation": 15403, + "ĠHind": 15404, + "ĠCory": 15405, + "Ġfingerprint": 15406, + "Ġstrive": 15407, + "Ġmm": 15408, + "igs": 15409, + "Ġholy": 15410, + "Ġfavored": 15411, + "ĠSomeone": 15412, + "ĠLatino": 15413, + "ĠPatt": 15414, + "Ġchallenger": 15415, + "ĠCotton": 15416, + "Sw": 15417, + "itten": 15418, + "ĠXI": 15419, + "ĠStat": 15420, + "ĠDIS": 15421, + "Ġautomakers": 15422, + "Ġevaluated": 15423, + "ĠArc": 15424, + "Ġpersuade": 15425, + "Af": 15426, + "Ġreunited": 15427, + "Ġabs": 15428, + "Ġbride": 15429, + "Ġpurely": 15430, + "uce": 15431, + "uded": 15432, + "Ġsettling": 15433, + "Ġlodged": 15434, + "Ġfixing": 15435, + "Ġsuccession": 15436, + "ĠAlfred": 15437, + "ĠAlvarez": 15438, + "mac": 15439, + "ĠFont": 15440, + "Ġcontra": 15441, + "affle": 15442, + "Ġcopied": 15443, + "Ġmasses": 15444, + "ĠElections": 15445, + "ĠThan": 15446, + "Ġsoaring": 15447, + "jay": 15448, + "Ġsuing": 15449, + "Ġconcentrated": 15450, + "Ġconvey": 15451, + "Ġ240": 15452, + "gs": 15453, + "ĠNeal": 15454, + "Ġnasty": 15455, + "ĠLB": 15456, + "odi": 15457, + "ĠSergei": 15458, + "Ġthumb": 15459, + "Ġservants": 15460, + "Ġrevelation": 15461, + "Ġdischarge": 15462, + "ĠBright": 15463, + "ĠBent": 15464, + "ĠChrysler": 15465, + "mill": 15466, + "ĠImagine": 15467, + "Ġreceptions": 15468, + "Ġpersonalities": 15469, + "Ġsilly": 15470, + "ĠLoc": 15471, + "ĠZero": 15472, + "HI": 15473, + "rice": 15474, + "Ġgar": 15475, + "far": 15476, + "enh": 15477, + "ĠBiden": 15478, + "ĠEntreprene": 15479, + "Ġassumption": 15480, + "Ġnicely": 15481, + "ĠEither": 15482, + "|": 15483, + "ĠNW": 15484, + "ĠKens": 15485, + "ĠNolan": 15486, + "Ġowning": 15487, + "atures": 15488, + "ĠPastor": 15489, + "ĠRegistration": 15490, + "Ġexperiments": 15491, + "Ġassurance": 15492, + "Ġhashtag": 15493, + "oint": 15494, + "ĠBin": 15495, + "Ġqualification": 15496, + "center": 15497, + "Ġausterity": 15498, + "ĠPers": 15499, + "Ġscoop": 15500, + "Ġpros": 15501, + "ĠFields": 15502, + "Ġfur": 15503, + "ĠJas": 15504, + "Ġplanting": 15505, + "security": 15506, + "ĠTrain": 15507, + "ĠKathy": 15508, + "demand": 15509, + "ĠLev": 15510, + "Ġtut": 15511, + "tier": 15512, + "QU": 15513, + "Ġexploitation": 15514, + "Ġignoring": 15515, + "ĠSex": 15516, + "Ġadapted": 15517, + "Ġdisastrous": 15518, + "Ġempower": 15519, + "Ġcreators": 15520, + "ĠLay": 15521, + "ĠDragon": 15522, + "ĠWyn": 15523, + "Ġ1974": 15524, + "acious": 15525, + "performance": 15526, + "ĠTiffany": 15527, + "isting": 15528, + "Ġindividually": 15529, + "ĠLeading": 15530, + "ĠSask": 15531, + "Ġcatastrophic": 15532, + "Ġpunched": 15533, + "ĠVienna": 15534, + "Ġsurgical": 15535, + "Gr": 15536, + "odo": 15537, + "Ġgem": 15538, + "ĠMinority": 15539, + "Ġmice": 15540, + "ĠHistoric": 15541, + "ĠKot": 15542, + "caster": 15543, + "Ġsuff": 15544, + "journal": 15545, + "Ġpresumably": 15546, + "ĠBit": 15547, + "inary": 15548, + "Ġbre": 15549, + "Ġenhancing": 15550, + "Ġgru": 15551, + "ĠRunning": 15552, + "hardt": 15553, + "Ġtroubling": 15554, + "Ġpumps": 15555, + "ĠProspect": 15556, + "etic": 15557, + "Ġmartial": 15558, + "Ġcouncillor": 15559, + "atra": 15560, + "ths": 15561, + "ĠSark": 15562, + "ĠChamp": 15563, + "scoring": 15564, + "ĠWel": 15565, + "rup": 15566, + "Ġterrifying": 15567, + "ĠCatch": 15568, + "Ġinspections": 15569, + "Ġpornography": 15570, + "bra": 15571, + "ĠKeeping": 15572, + "Ġbanker": 15573, + "angers": 15574, + "ĠCrimea": 15575, + "ĠDisclosure": 15576, + "iba": 15577, + "Ġturf": 15578, + "Ġschedules": 15579, + "ĠJorge": 15580, + "ĠAcross": 15581, + "Ġsolving": 15582, + "Ġsensation": 15583, + "ĠWW": 15584, + "cial": 15585, + "atz": 15586, + "Ġlion": 15587, + "Ġcertificates": 15588, + "itive": 15589, + "ĠWes": 15590, + "ĠPrison": 15591, + "ĠPlayStation": 15592, + "duty": 15593, + "Ġvariable": 15594, + "Ġstrangers": 15595, + "istrates": 15596, + "vs": 15597, + "Ġreigning": 15598, + "Ġsliding": 15599, + "ĠShin": 15600, + "Ġtelecommunications": 15601, + "Ġinstalling": 15602, + "Ġrecogn": 15603, + "Ġsubway": 15604, + "too": 15605, + "ĠMcKin": 15606, + "ĠStoke": 15607, + "Ġsensitivity": 15608, + "bas": 15609, + "Ġsan": 15610, + "Ġ(-": 15611, + "ĠSuarez": 15612, + "Ġaverages": 15613, + "ammu": 15614, + "ĠFen": 15615, + "Ġrefined": 15616, + "outh": 15617, + "Ġcob": 15618, + "ĠLaz": 15619, + "essa": 15620, + "Ġpositioning": 15621, + "Three": 15622, + "Ġoils": 15623, + "Ġassaults": 15624, + "Ġcompanion": 15625, + "ĠFlash": 15626, + "ĠMam": 15627, + "ĠTill": 15628, + "Ġblues": 15629, + "ĠJae": 15630, + "ĠPier": 15631, + "Ġbedrooms": 15632, + "ĠHawkins": 15633, + "ĠCornell": 15634, + "Ġanswering": 15635, + "Ġsec": 15636, + "Ġrecognizes": 15637, + "Red": 15638, + "ĠJamaica": 15639, + "Ġinsurgents": 15640, + "Ġbrace": 15641, + "Ġra": 15642, + "ĠTai": 15643, + "ocation": 15644, + "ignment": 15645, + "Ġreasonably": 15646, + "inating": 15647, + "Ġbonuses": 15648, + "Ġsandwich": 15649, + "Ġinadequate": 15650, + "Ġdelicate": 15651, + "Ġadorable": 15652, + "Ġpalace": 15653, + "Ġsmallest": 15654, + "Ġpractically": 15655, + "ĠCrosby": 15656, + "Ġlevy": 15657, + "Ġlend": 15658, + "boards": 15659, + "shaped": 15660, + "Ġvulnerability": 15661, + "ĠKelley": 15662, + "Ġsponsorship": 15663, + "ract": 15664, + "Ġslew": 15665, + "Ġfederation": 15666, + "ĠLal": 15667, + "acies": 15668, + "ĠFamilies": 15669, + "Ġproposing": 15670, + "Ġhyp": 15671, + "elected": 15672, + "inkle": 15673, + "ĠSays": 15674, + "ĠApollo": 15675, + "ĠWis": 15676, + "imer": 15677, + "Ġcombines": 15678, + "Ġtim": 15679, + "ĠQuestion": 15680, + "Ġborrowers": 15681, + "Ġswiftly": 15682, + "ĠMagn": 15683, + "Ġheadphones": 15684, + "Russia": 15685, + "Ġtongue": 15686, + "Ġbye": 15687, + "nn": 15688, + "Ġseller": 15689, + "ĠWord": 15690, + "Tom": 15691, + "ĠDevin": 15692, + "ĠSurrey": 15693, + "Ġquad": 15694, + "Ġcourthouse": 15695, + "gi": 15696, + "ĠGrill": 15697, + ">": 15698, + "Ġrational": 15699, + "ĠFlames": 15700, + "ĠCham": 15701, + "Ġvacuum": 15702, + "ĠRays": 15703, + "Ġescalating": 15704, + "Ġouter": 15705, + "Ġstretches": 15706, + "ĠSpeed": 15707, + "Ġnegatively": 15708, + "Ġabsorb": 15709, + "ĠAustrian": 15710, + "Ġslice": 15711, + "ĠDiet": 15712, + "Ġbun": 15713, + "Ġtactical": 15714, + "ĠCBD": 15715, + "Ġedges": 15716, + "Ġnest": 15717, + "Ġstrained": 15718, + "ulates": 15719, + "ĠTina": 15720, + "Net": 15721, + "ķ": 15722, + "ĠGos": 15723, + "God": 15724, + "White": 15725, + "Ġproudly": 15726, + "usion": 15727, + "ĠArlington": 15728, + "ĠNear": 15729, + "ĠMaxwell": 15730, + "Ġbomber": 15731, + "Ġcared": 15732, + "Ġapprovals": 15733, + "Ġexams": 15734, + "ĠEconomy": 15735, + "Ġposters": 15736, + "ĠHampton": 15737, + "ĠPere": 15738, + "ĠContract": 15739, + "Ġhoused": 15740, + "Ġinstruction": 15741, + "ĠJess": 15742, + "Ġacre": 15743, + "Ġcongestion": 15744, + "ĠGener": 15745, + "Ġdioxide": 15746, + "Ġvar": 15747, + "ĠAlexandria": 15748, + "ĠSpider": 15749, + "Ġcoins": 15750, + "Ġ225": 15751, + "Ġterritorial": 15752, + "ĠSPD": 15753, + "Ġfloat": 15754, + "null": 15755, + "Ġcalculate": 15756, + "ĠDin": 15757, + "eto": 15758, + "Ġcows": 15759, + "Ġpunct": 15760, + "Ġexpire": 15761, + "Ġkidnapped": 15762, + "Ġcou": 15763, + "Ġattitudes": 15764, + "ĠLeh": 15765, + "ĠHero": 15766, + "ĠKabul": 15767, + "Ġcubic": 15768, + "Ġdigits": 15769, + "ĠRES": 15770, + "Ġpipelines": 15771, + "icide": 15772, + "ĠSingle": 15773, + "Ġhurts": 15774, + "ĠMaz": 15775, + "ĠPak": 15776, + "Ġslate": 15777, + "Ġmultimedia": 15778, + "ADA": 15779, + "Mexico": 15780, + "ĠRelease": 15781, + "chard": 15782, + "Ġgarlic": 15783, + "ĠFletcher": 15784, + "Ġaforementioned": 15785, + "Ġ05": 15786, + "ĠParkway": 15787, + "Ġfirefighter": 15788, + "Ġcounseling": 15789, + "utions": 15790, + "Cap": 15791, + "Ġconsultants": 15792, + "ĠMeh": 15793, + "ouring": 15794, + "ĠDI": 15795, + "mic": 15796, + "phones": 15797, + "Ġencounters": 15798, + "ĠHapp": 15799, + "Ġcartoon": 15800, + "flight": 15801, + "Ġundertake": 15802, + "ĠHans": 15803, + "Ġplunge": 15804, + "ĠParenthood": 15805, + "Ġkickoff": 15806, + "ĠCelsius": 15807, + "ĠRas": 15808, + "ĠDund": 15809, + "ounce": 15810, + "Ġpurse": 15811, + "Ġmortality": 15812, + "Ġbrains": 15813, + "Ġconglomerate": 15814, + "ĠObserver": 15815, + "ĠSector": 15816, + "ĠApparently": 15817, + "Ġblank": 15818, + "iston": 15819, + "Ġweighs": 15820, + "gro": 15821, + "ĠPaw": 15822, + "ĠCOM": 15823, + "ĠPurdue": 15824, + "Ġnetted": 15825, + "ĠLinux": 15826, + "Mike": 15827, + "Ġfaithful": 15828, + "Ġmagazines": 15829, + "Ġheadquartered": 15830, + "ĠIps": 15831, + "Ġindications": 15832, + "Look": 15833, + "ĠElite": 15834, + "Ġsupreme": 15835, + "Ġchunk": 15836, + "ĠSz": 15837, + "ĠVine": 15838, + "rise": 15839, + "ĠYas": 15840, + "general": 15841, + "ĠOpera": 15842, + "Ġpriests": 15843, + "Assad": 15844, + "Ġaunt": 15845, + "Ġwhopping": 15846, + "enzie": 15847, + "Ġvegan": 15848, + "Ġinflux": 15849, + "ĠConsult": 15850, + "Ġwaiver": 15851, + "Having": 15852, + "inning": 15853, + "Ġproximity": 15854, + "Ġclassical": 15855, + "ĠIslanders": 15856, + "Ġadvertisers": 15857, + "ĠCe": 15858, + "ĠSochi": 15859, + "Ġmemoir": 15860, + "ĠPlaying": 15861, + "yers": 15862, + "Ġstud": 15863, + "Ġobservations": 15864, + "Ġadmire": 15865, + "Ġhiking": 15866, + "Ġbatter": 15867, + "Ġconfusing": 15868, + "Ġprecaution": 15869, + "kil": 15870, + "clusive": 15871, + "opoulos": 15872, + "ĠWestbrook": 15873, + "ĠTanzania": 15874, + "ĠCedar": 15875, + "usted": 15876, + "Ġdestructive": 15877, + "ĠIndies": 15878, + "osi": 15879, + "ĠAmid": 15880, + "Ġintercepted": 15881, + "Ġpartnering": 15882, + "Ġsubstances": 15883, + "ĠSuns": 15884, + "Ġpromotes": 15885, + "bird": 15886, + "Gen": 15887, + "aper": 15888, + "ĠEy": 15889, + "Ġterrain": 15890, + "Ġ1930": 15891, + "zon": 15892, + "Ġbreed": 15893, + "broken": 15894, + "uchin": 15895, + "ĠPrim": 15896, + "ĠRoland": 15897, + "Ġfitted": 15898, + "Ġprotects": 15899, + "Ġ114": 15900, + "RP": 15901, + "Ġdisrupted": 15902, + "ĠBaylor": 15903, + "oren": 15904, + "ĠKeen": 15905, + "Ġmansion": 15906, + "Ġgrassroots": 15907, + "ĠVictory": 15908, + "Ġbarn": 15909, + "Ġdepreciation": 15910, + "oped": 15911, + "immer": 15912, + "Ġgarnered": 15913, + "ĠLip": 15914, + "ĠTob": 15915, + "Ġcreatures": 15916, + "ooter": 15917, + "Ġconsortium": 15918, + "obi": 15919, + "ĠMonster": 15920, + "arks": 15921, + "turn": 15922, + "Ġsketch": 15923, + "Ġpredicting": 15924, + "Ġminimize": 15925, + "ĠEthan": 15926, + "anson": 15927, + "ĠAdjusted": 15928, + "ĠHornets": 15929, + "ĠNZ": 15930, + "ĠKathleen": 15931, + "ĠKier": 15932, + "ĠMercury": 15933, + "Ġghost": 15934, + "Ġhaw": 15935, + "ĠDemand": 15936, + "ĠCollection": 15937, + "ĠFortune": 15938, + "Ġcruel": 15939, + "Ġfurious": 15940, + "ĠKun": 15941, + "ĠSalem": 15942, + "Ġunsuccessful": 15943, + "ĠLomb": 15944, + "ĠFury": 15945, + "ahi": 15946, + "Ġenthusiastic": 15947, + "Ġsurgeries": 15948, + "ACE": 15949, + "Ġroller": 15950, + "ĠStamford": 15951, + "Being": 15952, + "Dec": 15953, + "check": 15954, + "Ġaffection": 15955, + "Ġgifted": 15956, + "Ġenerg": 15957, + "Ġvarying": 15958, + "ĠCharl": 15959, + "Ġsolved": 15960, + "ĠNV": 15961, + "Ġlaptops": 15962, + "Ġkindness": 15963, + "mart": 15964, + "ĠPenny": 15965, + "Ġ116": 15966, + "ĠFeder": 15967, + "ĠCisco": 15968, + "Ġeducators": 15969, + "Ġminim": 15970, + "Ġgangs": 15971, + "Ġfestivities": 15972, + "ĠOriginal": 15973, + "yre": 15974, + "rying": 15975, + "Ġtighter": 15976, + "ĠMalta": 15977, + "Ġshield": 15978, + "interest": 15979, + "Ġbuoy": 15980, + "Ġsupplement": 15981, + "ĠSof": 15982, + "Ġok": 15983, + "Ġprosecuted": 15984, + "Ġinterventions": 15985, + "Ġseize": 15986, + "Ġcaravan": 15987, + "ĠCarlson": 15988, + "ĠEnterprises": 15989, + "ĠChristina": 15990, + "ĠWellington": 15991, + "Ġaltered": 15992, + "TP": 15993, + "Ġexpresses": 15994, + "Ġcomfortably": 15995, + "Ġstaffing": 15996, + "afa": 15997, + "itu": 15998, + "saving": 15999, + "Ġinflammation": 16000, + "hatt": 16001, + "ĠMiranda": 16002, + "icious": 16003, + "Ġgrabbing": 16004, + "ĠANY": 16005, + "Ġobjections": 16006, + "Ġdot": 16007, + "cle": 16008, + "Ġrelates": 16009, + "Ġtribe": 16010, + "Ġboarding": 16011, + "ĠEpisode": 16012, + "ĠEnjoy": 16013, + "arding": 16014, + "Ġathletics": 16015, + "Ġflies": 16016, + "Ġmortgages": 16017, + "ruct": 16018, + "Ġink": 16019, + "ĠKC": 16020, + "ĠSecondary": 16021, + "Ġfer": 16022, + "ĠQaeda": 16023, + "OA": 16024, + "Frank": 16025, + "track": 16026, + "ĠChandler": 16027, + "Ġenv": 16028, + "ĠLeaders": 16029, + "ĠKemp": 16030, + "Ġunsafe": 16031, + "sponsored": 16032, + "San": 16033, + "ĠUsers": 16034, + "PE": 16035, + "ĠAccount": 16036, + "otta": 16037, + "ĠMix": 16038, + "ĠCindy": 16039, + "En": 16040, + "Ġ175": 16041, + "Ġoverlooked": 16042, + "Ġpublications": 16043, + "Ġrewarding": 16044, + "Ġexplicit": 16045, + "Ġnotch": 16046, + "Ġspecifics": 16047, + "Ġdesignation": 16048, + "ĠAppeal": 16049, + "Ġcontingent": 16050, + "Ġcage": 16051, + "ĠKol": 16052, + "ĠJohns": 16053, + "ĠReach": 16054, + "ĠTin": 16055, + "ĠAfricans": 16056, + "Ġprec": 16057, + "ĠRural": 16058, + "ĠDw": 16059, + "Ġuphold": 16060, + "Ġsuffers": 16061, + "Ġweed": 16062, + "inst": 16063, + "Ġcancellation": 16064, + "ĠShaun": 16065, + "Ġleve": 16066, + "Ġdivisive": 16067, + "Ġhel": 16068, + "Ġfatigue": 16069, + "ĠSchwartz": 16070, + "ĠKirst": 16071, + "Ġarise": 16072, + "Ġgrandson": 16073, + "ĠLawson": 16074, + "Ġcollaborate": 16075, + "Ġparticipant": 16076, + "ĠBryce": 16077, + "Ġinfield": 16078, + "mid": 16079, + "Ġut": 16080, + "Ġnotices": 16081, + "Ġsneak": 16082, + "ĠPAR": 16083, + "Chris": 16084, + "Ġutilize": 16085, + "ĠByron": 16086, + "ĠZhang": 16087, + "PF": 16088, + "Ġoverwhelmingly": 16089, + "Ġvegetable": 16090, + "Ġabsurd": 16091, + "ĠChem": 16092, + "etime": 16093, + "Ġenvoy": 16094, + "Ġlover": 16095, + "length": 16096, + "Ġrevolutionary": 16097, + "ĠYam": 16098, + "Ġshutting": 16099, + "mt": 16100, + "super": 16101, + "ĠToby": 16102, + "ĠCoca": 16103, + "Ġproposition": 16104, + "Ġembracing": 16105, + "Ġversatile": 16106, + "ĠWalking": 16107, + "Ġillicit": 16108, + "Ġnude": 16109, + "Ġunpredictable": 16110, + "take": 16111, + "Ġgotta": 16112, + "ĠXiaomi": 16113, + "Ġinstit": 16114, + "ĠPep": 16115, + "ĠPearson": 16116, + "Ġrejection": 16117, + "stead": 16118, + "Ġmut": 16119, + "Ġoutspoken": 16120, + "ĠBaghdad": 16121, + "ĠFly": 16122, + "Ġwholly": 16123, + "ĠRM": 16124, + "ĠFa": 16125, + "Ġcleaner": 16126, + "frey": 16127, + "ĠHab": 16128, + "ĠLiber": 16129, + "Ġwhereabouts": 16130, + "Ġchefs": 16131, + "Ġalumni": 16132, + "Ġstopp": 16133, + "dd": 16134, + "forward": 16135, + "rast": 16136, + "ĠNash": 16137, + "ĠCort": 16138, + "Ġpotent": 16139, + "Ġmold": 16140, + "Ġdistinctive": 16141, + "chip": 16142, + "ĠBrunswick": 16143, + "Ġpopulist": 16144, + "Ġplagued": 16145, + "eka": 16146, + "ĠIOC": 16147, + "ugs": 16148, + "ĠDob": 16149, + "Ġmagn": 16150, + "asser": 16151, + "hew": 16152, + "Ġcapturing": 16153, + "oos": 16154, + "Ġcrystal": 16155, + "Ġalarming": 16156, + "Ġ135": 16157, + "iating": 16158, + "Ġnap": 16159, + "umar": 16160, + "ĠExpl": 16161, + "Ġupgrading": 16162, + "Ġdecl": 16163, + "Ġoverturn": 16164, + "ARK": 16165, + "linked": 16166, + "ĠContinued": 16167, + "Ġslumped": 16168, + "ĠGaga": 16169, + "iful": 16170, + "ĠPosted": 16171, + "ĠRecommended": 16172, + "Ġsnake": 16173, + "Ġexplosives": 16174, + "Ġhind": 16175, + "Ġcontempt": 16176, + "Ġmock": 16177, + "NBA": 16178, + "Ġstall": 16179, + "Ġorganisers": 16180, + "Ġingredient": 16181, + "Ġblockbuster": 16182, + "ĠStream": 16183, + "ĠLeah": 16184, + "Pic": 16185, + "Ġventures": 16186, + "oman": 16187, + "Ġweakening": 16188, + "Ġmaximize": 16189, + "Ġdigging": 16190, + "uez": 16191, + "Ġdistinction": 16192, + "ĠMali": 16193, + "Ġcontaminated": 16194, + "Ġhij": 16195, + "Ġcrafts": 16196, + "Fl": 16197, + "Ġcloset": 16198, + "ĠRapp": 16199, + "Ġtowers": 16200, + "Ġamenities": 16201, + "Ġopioids": 16202, + "Ġcontend": 16203, + "load": 16204, + "ĠJol": 16205, + "ĠBooks": 16206, + "Ġsim": 16207, + "Ġthrilling": 16208, + "Ġmeter": 16209, + "ĠMultiple": 16210, + "Ġarbitration": 16211, + "Ġcracked": 16212, + "Pl": 16213, + "Ġphotographers": 16214, + "Te": 16215, + "ĠSidd": 16216, + "Ġexplored": 16217, + "170": 16218, + "Ġpleasant": 16219, + "ĠCapitals": 16220, + "ĠRi": 16221, + "ĠRandall": 16222, + "overed": 16223, + "Ġchar": 16224, + "ĠEverybody": 16225, + "ĠPolitics": 16226, + "Ġmoisture": 16227, + "Ġthriving": 16228, + "ĠScotia": 16229, + "arded": 16230, + "imb": 16231, + "ĠFantasy": 16232, + "Ġcemetery": 16233, + "ĠPath": 16234, + "eur": 16235, + "ĠSec": 16236, + "ĠPlatform": 16237, + "Ġdeparted": 16238, + "ĠVIDEO": 16239, + "ĠPant": 16240, + "ĠSyn": 16241, + "Ġ230": 16242, + "bleacher": 16243, + "live": 16244, + "Ġprob": 16245, + "Ġgymn": 16246, + "Ġjudged": 16247, + "orns": 16248, + "Ġstemming": 16249, + "umbling": 16250, + "ĠHew": 16251, + "ĠCheryl": 16252, + "Ġconsciousness": 16253, + "cos": 16254, + "ĠTate": 16255, + "CNN": 16256, + "Ġrecognizing": 16257, + "meg": 16258, + "Ġpant": 16259, + "ulk": 16260, + "MM": 16261, + "ĠPrescott": 16262, + "ĠMarcel": 16263, + "anas": 16264, + "Ġhappier": 16265, + "mag": 16266, + "ĠLov": 16267, + "Ġspreads": 16268, + "ĠSample": 16269, + "Ġpopped": 16270, + "HR": 16271, + "ĠMitt": 16272, + "Ġ00": 16273, + "Ġlabeled": 16274, + "Ġaspirations": 16275, + "?)": 16276, + "Ġloads": 16277, + "ĠBritt": 16278, + "hurst": 16279, + "ĠTeams": 16280, + "Ġextremists": 16281, + "ĠClement": 16282, + "lings": 16283, + "shirts": 16284, + "cheon": 16285, + "ĠDEL": 16286, + "ĠLocation": 16287, + "Ġpresentations": 16288, + "ĠFalcon": 16289, + "Ġtoddler": 16290, + "kl": 16291, + "Ġprone": 16292, + "Ġcommemor": 16293, + "ĠStanton": 16294, + "201": 16295, + "Ġranges": 16296, + "Ġfielder": 16297, + "Ġattends": 16298, + "rade": 16299, + "Ġproactive": 16300, + "Ġhostage": 16301, + "ĠGriffith": 16302, + "ockey": 16303, + "ĠAdding": 16304, + "ĠAFL": 16305, + "gas": 16306, + "istics": 16307, + "Ġsurgeon": 16308, + "Ġtsunami": 16309, + "2014": 16310, + "Ġconstraints": 16311, + "cu": 16312, + "Ġsurrendered": 16313, + "azed": 16314, + "ĠAirbnb": 16315, + "650": 16316, + "zed": 16317, + "Ġinjustice": 16318, + "dog": 16319, + "full": 16320, + "ĠHear": 16321, + "Ġsprawling": 16322, + "Ġhomeland": 16323, + "ĠSG": 16324, + "anced": 16325, + "Ġpools": 16326, + "ĠCE": 16327, + "Ġbeers": 16328, + "AE": 16329, + "ĠJac": 16330, + "Ġrecurring": 16331, + "Writing": 16332, + "Ġgenius": 16333, + "ĠFrost": 16334, + "Ġgrounded": 16335, + "Ġallege": 16336, + "lessness": 16337, + "Ġjumper": 16338, + "Ġvicious": 16339, + "Ġsecretly": 16340, + "Ġhacked": 16341, + "ĠAmsterdam": 16342, + "ibu": 16343, + "Ġ1971": 16344, + "ĠRosenstein": 16345, + "nick": 16346, + "arge": 16347, + "Ġladder": 16348, + "elled": 16349, + "Ġsatellites": 16350, + "Ġassassination": 16351, + "ĠDepot": 16352, + "built": 16353, + "Ġunrelated": 16354, + "maid": 16355, + "ĠDod": 16356, + "ĠVanderbilt": 16357, + "Ġboundary": 16358, + "ĠStafford": 16359, + "ĠBry": 16360, + "Ġtribunal": 16361, + "Ġoutings": 16362, + "Ġquantity": 16363, + "imming": 16364, + "ĠBlacks": 16365, + "Br": 16366, + "eri": 16367, + "uffed": 16368, + "Ġexplicitly": 16369, + "ĠBieber": 16370, + "AKING": 16371, + "Ġphotographed": 16372, + "ĠPolit": 16373, + "Ġpremature": 16374, + "hered": 16375, + "ĠVi": 16376, + "Ġmarsh": 16377, + "casters": 16378, + "ĠKra": 16379, + "Ġdried": 16380, + "Ġcafe": 16381, + "eting": 16382, + "Ġshaping": 16383, + "aram": 16384, + "orf": 16385, + "Ġrichest": 16386, + "Ġhurricanes": 16387, + "Ġcommands": 16388, + "Gl": 16389, + "anth": 16390, + "Ġstunt": 16391, + "Ġyearly": 16392, + "Ġdefeats": 16393, + "Ġconsultancy": 16394, + "call": 16395, + "Ġlag": 16396, + "adh": 16397, + "ĠPalestine": 16398, + "Ġcustomized": 16399, + "ĠScar": 16400, + "ĠWesley": 16401, + "ready": 16402, + "Ġpersist": 16403, + "Ġpacking": 16404, + "ono": 16405, + "Ġdischarged": 16406, + "Ġpouring": 16407, + "sburg": 16408, + "Ġreconsider": 16409, + "ĠMethod": 16410, + "enez": 16411, + "cill": 16412, + "Ġsecular": 16413, + "pers": 16414, + "Ġple": 16415, + "ELS": 16416, + "ĠMine": 16417, + "Ġpushes": 16418, + "Us": 16419, + "Ġframes": 16420, + "ĠNets": 16421, + "ĠSiem": 16422, + "ĠHitler": 16423, + "kill": 16424, + "Ġrented": 16425, + "Ġcharm": 16426, + "Ġpulls": 16427, + "ĠTide": 16428, + "Ġinsufficient": 16429, + "itted": 16430, + "Care": 16431, + "iera": 16432, + "Ġcouch": 16433, + "aders": 16434, + "ext": 16435, + "ĠCitizen": 16436, + "Ġlogical": 16437, + "ĠMeadows": 16438, + "ĠDenis": 16439, + "ĠDrivers": 16440, + "Ġrepublic": 16441, + "Ġadvising": 16442, + "Ġparamedics": 16443, + "insky": 16444, + "illard": 16445, + "encia": 16446, + "Ġkh": 16447, + "Ġrh": 16448, + "Ġfinalized": 16449, + "Ġreins": 16450, + "ĠFarrell": 16451, + "Ġsteer": 16452, + "Ġproxy": 16453, + "unes": 16454, + "ĠSoul": 16455, + "ĠCopper": 16456, + "ĠKenyan": 16457, + "amped": 16458, + "conference": 16459, + "sted": 16460, + "ĠLon": 16461, + "Ġreplay": 16462, + "ĠBle": 16463, + "Ġvibe": 16464, + "Ġportfolios": 16465, + "sea": 16466, + "Ġbeautifully": 16467, + "Ġairs": 16468, + "ĠRap": 16469, + "ĠKatrina": 16470, + "Ġberth": 16471, + "gold": 16472, + "ĠIsaiah": 16473, + "iques": 16474, + "elson": 16475, + "Ġrelentless": 16476, + "ĠHighland": 16477, + "ĠPhilippe": 16478, + "ĠFol": 16479, + "Ġenduring": 16480, + "enz": 16481, + "Ġaer": 16482, + "icing": 16483, + "ĠHTC": 16484, + "Ġdoping": 16485, + "ĠAlb": 16486, + "Ġsom": 16487, + "icia": 16488, + "Ġcoroner": 16489, + "Ġdamn": 16490, + "Ġ119": 16491, + "Ġwiped": 16492, + "ĠAuditor": 16493, + "hern": 16494, + "ĠJew": 16495, + "endra": 16496, + "osp": 16497, + "ĠRory": 16498, + "Ġshapes": 16499, + "ĠPablo": 16500, + "Ġforemost": 16501, + "ĠHos": 16502, + "ĠCunningham": 16503, + "145": 16504, + "ĠRecovery": 16505, + "!!!": 16506, + "western": 16507, + "Ġimaging": 16508, + "ĠRookie": 16509, + "ĠMTV": 16510, + "Ġunc": 16511, + "ĠSporting": 16512, + "Ġpatrons": 16513, + "ĠCoverage": 16514, + "ĠObservatory": 16515, + "Ġfishermen": 16516, + "ĠProvince": 16517, + "ĠAston": 16518, + "ĠOsh": 16519, + "ĠWeekend": 16520, + "Ġrecruits": 16521, + "Ġdensity": 16522, + "FM": 16523, + "ĠGorsuch": 16524, + "ĠErie": 16525, + "lining": 16526, + "Ġshowcased": 16527, + "ĠRubio": 16528, + "Ġchaotic": 16529, + "Ġattractions": 16530, + "Ġhug": 16531, + "ĠHerbert": 16532, + "ĠRespond": 16533, + "Ġhappily": 16534, + "Ġtor": 16535, + "ĠOTHER": 16536, + "runner": 16537, + "ĠShakespeare": 16538, + "Ġstretching": 16539, + "ĠJudy": 16540, + "wyn": 16541, + "ĠCafe": 16542, + "Ġgreens": 16543, + "ĠHend": 16544, + "Ġglam": 16545, + "iation": 16546, + "ĠKingston": 16547, + "Ġincremental": 16548, + "Live": 16549, + "ĠBraun": 16550, + "USS": 16551, + "reb": 16552, + "Ġimperative": 16553, + "Ġsympathy": 16554, + "Ġrefuge": 16555, + "Ġadministered": 16556, + "rance": 16557, + "ĠLiberia": 16558, + "Ġmobil": 16559, + "heads": 16560, + "Ġinevitably": 16561, + "ĠEugene": 16562, + "ĠBerkshire": 16563, + "ĠHarbour": 16564, + "ĠTrends": 16565, + "TB": 16566, + "Ġdeficits": 16567, + "Ġlistings": 16568, + "Ġreadings": 16569, + "Ġtumor": 16570, + "Ġoffic": 16571, + "opy": 16572, + "Ġdistracted": 16573, + "Ġappropriately": 16574, + "ĠWillis": 16575, + "Ġskirt": 16576, + "ĠTea": 16577, + "Ġshades": 16578, + "Ġbargaining": 16579, + "Ġretention": 16580, + "ĠConcert": 16581, + "ĠMeteor": 16582, + "ĠCustom": 16583, + "Ġinputs": 16584, + "ĠSah": 16585, + "enta": 16586, + "Love": 16587, + "ĠBurg": 16588, + "ĠCynthia": 16589, + "ĠMoses": 16590, + "ubb": 16591, + "Ġpeoples": 16592, + "dh": 16593, + "ĠFro": 16594, + "bean": 16595, + "Ġcigarette": 16596, + "tta": 16597, + "umm": 16598, + "Ġphenomenal": 16599, + "Ġyelling": 16600, + "Ġinaug": 16601, + "Ġconven": 16602, + "ĠGore": 16603, + "request": 16604, + "Ġcolonial": 16605, + "ĠAleppo": 16606, + "Ġdemolition": 16607, + "Ġamounted": 16608, + "Ġstaggering": 16609, + "Ġclips": 16610, + "Ġinconsistent": 16611, + "ĠMilton": 16612, + "ĠWireless": 16613, + "ĠReno": 16614, + "ĠPerkins": 16615, + "Ġunusually": 16616, + "Ġmemor": 16617, + "Ġhectares": 16618, + "Ġlat": 16619, + "central": 16620, + "ĠDig": 16621, + "ĠMarina": 16622, + "ĠPartner": 16623, + "daily": 16624, + "your": 16625, + "Reilly": 16626, + "Ġpope": 16627, + "phy": 16628, + "Ġassessing": 16629, + "ĠRodrigo": 16630, + "wi": 16631, + "Ġcompatible": 16632, + "imate": 16633, + "Ġgentle": 16634, + "ĠRhodes": 16635, + "Brexit": 16636, + "ieve": 16637, + "Ġbreaches": 16638, + "Ġchopped": 16639, + "Ġcancers": 16640, + "VEL": 16641, + "Ġsluggish": 16642, + "ĠUltra": 16643, + "ĠUl": 16644, + "Ġcrises": 16645, + "ONE": 16646, + "ĠEquipment": 16647, + "Ġcater": 16648, + "Ġadjourn": 16649, + "Ġreadily": 16650, + "ĠRolling": 16651, + "ĠBott": 16652, + "inel": 16653, + "ĠRule": 16654, + "Ġgrind": 16655, + "ĠHussain": 16656, + "ussie": 16657, + "Ġdepressed": 16658, + "ĠImperial": 16659, + "ongo": 16660, + "Ġuniforms": 16661, + "Ġ117": 16662, + "Ġchambers": 16663, + "ĠDum": 16664, + "ifi": 16665, + "ĠBetty": 16666, + "ĠTA": 16667, + "Ġpromotions": 16668, + "itary": 16669, + "Ġcried": 16670, + "Ġbranding": 16671, + "ĠBahamas": 16672, + "ĠDat": 16673, + "Ġantibiotics": 16674, + "ĠAus": 16675, + "Ġumbrella": 16676, + "Ġgradual": 16677, + "Ġaltercation": 16678, + "Ġlure": 16679, + "ĠJakarta": 16680, + "Ġunified": 16681, + "chin": 16682, + "ettes": 16683, + "ĠRwanda": 16684, + "ulations": 16685, + "Ġbrink": 16686, + "Ġbroadcasting": 16687, + "ĠArtist": 16688, + "Ġrecon": 16689, + "Ġaqu": 16690, + "ĠServ": 16691, + "999": 16692, + "ĠParticipants": 16693, + "ĠVentures": 16694, + "fight": 16695, + "Ġactivism": 16696, + "Ġstructured": 16697, + "Ġportal": 16698, + "Ġtendency": 16699, + "ĠAssociate": 16700, + "Ġcalf": 16701, + "ĠOrd": 16702, + "ĠTi": 16703, + "ĠFrancois": 16704, + "uary": 16705, + "ĠVik": 16706, + "urchase": 16707, + "Ġfried": 16708, + "Ġbooming": 16709, + "Ġparticles": 16710, + "amas": 16711, + "INA": 16712, + "Super": 16713, + "supp": 16714, + "urring": 16715, + "ĠWatts": 16716, + "affer": 16717, + "ĠDEC": 16718, + "Ġroadway": 16719, + "border": 16720, + "Ġsequ": 16721, + "entially": 16722, + "ieg": 16723, + "Ġcamping": 16724, + "Ġ750": 16725, + "Ġcycles": 16726, + "ĠReese": 16727, + "ĠFellow": 16728, + "isters": 16729, + "ĠVehicle": 16730, + "kies": 16731, + "ĠJonas": 16732, + "Ġfoundations": 16733, + "ĠNigel": 16734, + "Ġstab": 16735, + "Ġcongressman": 16736, + "ĠWichita": 16737, + "antes": 16738, + "Ġprogression": 16739, + "Ġditch": 16740, + "lik": 16741, + "Ġsid": 16742, + "Ġele": 16743, + "ĠMund": 16744, + "Ġstairs": 16745, + "lete": 16746, + "Ġlingering": 16747, + "Ġsadly": 16748, + "Ġay": 16749, + "Em": 16750, + "Ġdeadliest": 16751, + "soon": 16752, + "Ġtangible": 16753, + "Ġabusing": 16754, + "Ġcomprises": 16755, + "vil": 16756, + "ĠBun": 16757, + "Ġdoubling": 16758, + "Ġcommun": 16759, + "Ġslogan": 16760, + "Ġloading": 16761, + "Ġshallow": 16762, + "Ġattributes": 16763, + "Che": 16764, + "Ġcheering": 16765, + "Ġrefuses": 16766, + "cam": 16767, + "bes": 16768, + "hon": 16769, + "ĠSpartans": 16770, + "cept": 16771, + "ĠComputer": 16772, + "ĠCanberra": 16773, + "ĠWARNING": 16774, + "Ġstuffed": 16775, + "block": 16776, + "ĠJennings": 16777, + "ĠAU": 16778, + "atin": 16779, + "Ġom": 16780, + "Ġbachelor": 16781, + "Ġprediction": 16782, + "ĠWinner": 16783, + "agne": 16784, + "Ġrob": 16785, + "ĠKatherine": 16786, + "Ġli": 16787, + "ĠHumph": 16788, + "ĠPEOPLE": 16789, + "IRO": 16790, + "Cola": 16791, + "Ġguitarist": 16792, + "isen": 16793, + "ĠHighlights": 16794, + "Ġwelcomes": 16795, + "Ġprisoner": 16796, + "Ġpsychology": 16797, + "Ġextradition": 16798, + "Ġrou": 16799, + "ĠLund": 16800, + "Ġthoughtful": 16801, + "RY": 16802, + "orman": 16803, + "Alex": 16804, + "Ġlaughter": 16805, + "Ġfumble": 16806, + "Ġsynthetic": 16807, + "Ġdigit": 16808, + "ĠRoc": 16809, + "ĠFactory": 16810, + "ellery": 16811, + "ishment": 16812, + "ilar": 16813, + "ĠEarl": 16814, + "ĠSutton": 16815, + "ĠJur": 16816, + "ĠAllan": 16817, + "ĠKoreans": 16818, + "uki": 16819, + "Ġculinary": 16820, + "PU": 16821, + "Stock": 16822, + "stars": 16823, + "ĠDayton": 16824, + "beck": 16825, + "Ġinstability": 16826, + "ĠBring": 16827, + "Ġbreeding": 16828, + "Ġmiracle": 16829, + "bons": 16830, + "Ġdonating": 16831, + "ĠKick": 16832, + "ĠSag": 16833, + "afi": 16834, + "Ġharassed": 16835, + "asm": 16836, + "Their": 16837, + "inity": 16838, + "Ġacademics": 16839, + "Ġstatute": 16840, + "ĠAmit": 16841, + "Ġpressured": 16842, + "east": 16843, + "\"),": 16844, + "iso": 16845, + "220": 16846, + "Ġairplane": 16847, + "ĠMcCabe": 16848, + "ctions": 16849, + "ĠMesa": 16850, + "Ġsensational": 16851, + "ĠFE": 16852, + "ĠNeigh": 16853, + "Ġbribery": 16854, + "Ġflaws": 16855, + "Ġfemales": 16856, + "Ġmisses": 16857, + "ĠColor": 16858, + "ĠVietnamese": 16859, + "ĠMental": 16860, + "Unfortunately": 16861, + "ĠPont": 16862, + "Ġ1940": 16863, + "dry": 16864, + "ĠGazette": 16865, + "ĠAns": 16866, + "Ġwhistle": 16867, + "Ġsymbolic": 16868, + "Ġpossessions": 16869, + "ĠDriver": 16870, + "Ġbracket": 16871, + "ĠReign": 16872, + "oji": 16873, + "Ġoct": 16874, + "Ġtube": 16875, + "ĠFelix": 16876, + "Ġtranslated": 16877, + "Ġpromptly": 16878, + "ĠErnest": 16879, + "arth": 16880, + "Ġdumb": 16881, + "Ġinfluences": 16882, + "taking": 16883, + "Ġprivat": 16884, + "erers": 16885, + "Ġmalware": 16886, + "Ġpredictable": 16887, + "Ġtighten": 16888, + "Ġheights": 16889, + "Ġfairness": 16890, + "facing": 16891, + "Ġrematch": 16892, + "Ġpoet": 16893, + "Ġfundamentally": 16894, + "Ġcoveted": 16895, + "Ġlivelihood": 16896, + "ĠABOUT": 16897, + "Ġsourced": 16898, + "Ġdeferred": 16899, + "Ġslashed": 16900, + "ĠSchultz": 16901, + "Ġtriggering": 16902, + "ĠShiv": 16903, + "Ġlithium": 16904, + "ahead": 16905, + "Ġleisure": 16906, + "Ġbackpack": 16907, + "ilateral": 16908, + "ĠNuclear": 16909, + "ĠLeone": 16910, + "ĠNice": 16911, + "Ġenthusiasts": 16912, + "September": 16913, + "Ġenroll": 16914, + "ĠWear": 16915, + "erey": 16916, + "angs": 16917, + "such": 16918, + "Ġunpopular": 16919, + "Ġdisciplined": 16920, + "Ġshrinking": 16921, + "ĠBrewing": 16922, + "ĠReally": 16923, + "Ġdirective": 16924, + "175": 16925, + "Ġnotifications": 16926, + "Ġfortunes": 16927, + "ĠHour": 16928, + "ĠGan": 16929, + "ĠChurchill": 16930, + "ĠDodge": 16931, + "ĠJeep": 16932, + "Ġsour": 16933, + "Ġderived": 16934, + "Ġft": 16935, + "riv": 16936, + "Ġlaundry": 16937, + "Ġfentanyl": 16938, + "ĠSioux": 16939, + "achi": 16940, + "workers": 16941, + "Ġworkload": 16942, + "rooms": 16943, + "ĠQU": 16944, + "ĠTruth": 16945, + "Ġdefenses": 16946, + "Ġdunk": 16947, + "IJ": 16948, + "Ġderby": 16949, + "ĠMotion": 16950, + "ĠMayo": 16951, + "ĠIke": 16952, + "Ġpreferences": 16953, + "Ġped": 16954, + "elman": 16955, + "moon": 16956, + "Ġshoots": 16957, + "ĠNoel": 16958, + "Ġmilit": 16959, + "ĠCambodia": 16960, + "ĠMLA": 16961, + "Ġhonoured": 16962, + "fast": 16963, + "Ġalgorithms": 16964, + "Ġstormed": 16965, + "NT": 16966, + "Benz": 16967, + "Ġvaccines": 16968, + "Ġmarching": 16969, + "Ġ118": 16970, + "ĠWilmington": 16971, + "GM": 16972, + "coin": 16973, + "Ġunderwater": 16974, + "ĠClearly": 16975, + "Ġorgans": 16976, + "mir": 16977, + "Ġdenounced": 16978, + "pless": 16979, + "imal": 16980, + "ĠKom": 16981, + "Ġfatalities": 16982, + "Ġyoungster": 16983, + "Ġthirty": 16984, + "Ġinternally": 16985, + "222": 16986, + "Ġdemonstrating": 16987, + "Ġbusiest": 16988, + "Ġperpetrators": 16989, + "Ġstun": 16990, + "Both": 16991, + "ĠMcCoy": 16992, + "gn": 16993, + "ĠDalton": 16994, + "ĠDAY": 16995, + "Ġsacred": 16996, + "Ġconsuming": 16997, + "Ġ(+": 16998, + "ĠPioneer": 16999, + "ĠApplications": 17000, + "ĠBolt": 17001, + "ĠBarkley": 17002, + "ĠExpo": 17003, + "ĠLore": 17004, + "ĠPrivacy": 17005, + "ĠHarley": 17006, + "Ġtractor": 17007, + "Ġtenth": 17008, + "ĠHaiti": 17009, + "ÃŃn": 17010, + "ĠTVs": 17011, + "ĠCathedral": 17012, + "Ġunite": 17013, + "Ġbinding": 17014, + "oks": 17015, + "ĠJenny": 17016, + "Ġcaller": 17017, + "ĠIngram": 17018, + "ĠPrairie": 17019, + "Ġrunoff": 17020, + "Ġasserted": 17021, + "icit": 17022, + "ĠSie": 17023, + "102": 17024, + "ĠMB": 17025, + "Ġobstruction": 17026, + "Ġgroom": 17027, + "Ġtolerate": 17028, + "Ġcans": 17029, + "forth": 17030, + "Ġvillain": 17031, + "Ġdefining": 17032, + "ĠFrenchman": 17033, + "otte": 17034, + "Ġcontr": 17035, + "clock": 17036, + "onder": 17037, + "Ġprolific": 17038, + "ĠElectronic": 17039, + "ĠSak": 17040, + "annie": 17041, + "ASS": 17042, + "Ġmultinational": 17043, + "Associated": 17044, + "IZ": 17045, + "ĠBelle": 17046, + "Ġmand": 17047, + "asis": 17048, + "Mac": 17049, + "Ġpretend": 17050, + "ĠCommunication": 17051, + "Ġheartbreaking": 17052, + "ĠShepherd": 17053, + "ĠBIG": 17054, + "mph": 17055, + "ĠShield": 17056, + "ĠLiv": 17057, + "ĠStatus": 17058, + "Ġbikini": 17059, + "Ġranch": 17060, + "Ġpeacefully": 17061, + "ITCH": 17062, + "bourne": 17063, + "ĠVariety": 17064, + "Ġstationed": 17065, + "Ġhed": 17066, + "Ġexhausted": 17067, + "Ġsurpassed": 17068, + "Ġcatalyst": 17069, + "Ġsmuggling": 17070, + "uating": 17071, + "Ġ123": 17072, + "Ġdup": 17073, + "ĠSul": 17074, + "conf": 17075, + "jit": 17076, + "Ġmaiden": 17077, + "asta": 17078, + "ĠCalvin": 17079, + "borne": 17080, + "Ġgrim": 17081, + "Ġtort": 17082, + "cott": 17083, + "olas": 17084, + "NR": 17085, + "Ġbreakout": 17086, + "ĠHun": 17087, + "ĠGuatemala": 17088, + "Ġhistorian": 17089, + "ĠLawyers": 17090, + "ĠDisplay": 17091, + "Ġobstruct": 17092, + "ĠOsborne": 17093, + "Ġtherapies": 17094, + "ĠAub": 17095, + "Ġinjunction": 17096, + "stroke": 17097, + "Ġseafood": 17098, + "Ġhazardous": 17099, + "ĠWolver": 17100, + "ĠViolence": 17101, + "ĠBillion": 17102, + "ĠLetter": 17103, + "ĠWorldwide": 17104, + "Real": 17105, + "Ġexpires": 17106, + "Ġflawed": 17107, + "European": 17108, + "Ġrigorous": 17109, + "ĠSimilar": 17110, + "ĠSurface": 17111, + "ĠEF": 17112, + "mys": 17113, + "ĠFunds": 17114, + "ographer": 17115, + "Ġtribes": 17116, + "Ġspouse": 17117, + "Ġunsure": 17118, + "aways": 17119, + "Ġtrainers": 17120, + "arie": 17121, + "ĠZar": 17122, + "ĠComedy": 17123, + "ĠLit": 17124, + "ĠNoon": 17125, + "Ġgallon": 17126, + "Ġconsulate": 17127, + "ĠBras": 17128, + "iology": 17129, + "onies": 17130, + "ĠBelichick": 17131, + "ĠRoot": 17132, + "ĠLux": 17133, + "ĠSed": 17134, + "ĠTos": 17135, + "Ġinherited": 17136, + "tw": 17137, + "Ġdeaf": 17138, + "Ġdriveway": 17139, + "jah": 17140, + "ĠScientific": 17141, + "ĠNottingham": 17142, + "both": 17143, + "awan": 17144, + "Ġnut": 17145, + "ĠLebanese": 17146, + "ĠAAA": 17147, + "ĠSuzuki": 17148, + "ĠBU": 17149, + "ells": 17150, + "Ġspecify": 17151, + "ĠNotes": 17152, + "Ġvoluntarily": 17153, + "ĠMolly": 17154, + "Ġoutskirts": 17155, + "Ġbehaviors": 17156, + "Ġmilitia": 17157, + "Ġsplash": 17158, + "Ġpersonalized": 17159, + "ĠFiat": 17160, + "ĠKind": 17161, + "ĠTruck": 17162, + "py": 17163, + "ĠWIN": 17164, + "dist": 17165, + "itational": 17166, + "APP": 17167, + "ĠPelicans": 17168, + "ĠGam": 17169, + "mel": 17170, + "Ġmandated": 17171, + "Ġbalances": 17172, + "ĠWizards": 17173, + "iary": 17174, + "ĠAvailable": 17175, + "Ġkay": 17176, + "jin": 17177, + "eyed": 17178, + "Ġsterling": 17179, + "Ġconcealed": 17180, + "ĠFedEx": 17181, + "ĠPO": 17182, + "ĠJacqu": 17183, + "anted": 17184, + "eme": 17185, + "ĠDefensive": 17186, + "manship": 17187, + "Ġreliever": 17188, + "Ġshortstop": 17189, + "Ġphot": 17190, + "ĠGain": 17191, + "ĠConcern": 17192, + "due": 17193, + "Ġalgorithm": 17194, + "fell": 17195, + "ĠMountains": 17196, + "icians": 17197, + "Ġhonoring": 17198, + "Ġuploaded": 17199, + "Ġtore": 17200, + "GH": 17201, + "orde": 17202, + "ĠCoin": 17203, + "ĠAven": 17204, + "Ġliterary": 17205, + "Before": 17206, + "Ġtactic": 17207, + "Ġsocially": 17208, + "ĠSik": 17209, + "Ġthermal": 17210, + "Ġhor": 17211, + "price": 17212, + "Ġrooted": 17213, + "arrow": 17214, + "Ġcirculating": 17215, + "Ġlaughs": 17216, + "ĠLines": 17217, + "lig": 17218, + "Ġjudgement": 17219, + "....": 17220, + "Ġsewer": 17221, + "Ġdancer": 17222, + "ĠPens": 17223, + "Ġsig": 17224, + "ische": 17225, + "wives": 17226, + "Ġgran": 17227, + "ĠBron": 17228, + "ĠHyde": 17229, + "yards": 17230, + "Ġcandidacy": 17231, + "Ġhey": 17232, + "Ġcontributors": 17233, + "ĠUpdated": 17234, + "Ġ190": 17235, + "Ġhalls": 17236, + "Ġemphas": 17237, + "ĠCherry": 17238, + "Ġrim": 17239, + "Ġbilled": 17240, + "Ġbaked": 17241, + "ĠPopular": 17242, + "lb": 17243, + "Ġgravity": 17244, + "Under": 17245, + "Ġreservation": 17246, + "organ": 17247, + "ĠPict": 17248, + "ĠWhitney": 17249, + "Ġonboard": 17250, + "NEY": 17251, + "ĠBreaking": 17252, + "Ġflagged": 17253, + "rar": 17254, + "ĠBasic": 17255, + "ĠDomestic": 17256, + "ĠPent": 17257, + "Ġvigilant": 17258, + "Ġzoning": 17259, + "Fire": 17260, + "Ġcorrected": 17261, + "isbury": 17262, + "ĠLaure": 17263, + "ĠDevon": 17264, + "print": 17265, + "ĠTopics": 17266, + "ĠFuel": 17267, + "Ġcirculation": 17268, + "ĠPratt": 17269, + "Ġskiing": 17270, + "Ġtornado": 17271, + "dep": 17272, + "ĠUnless": 17273, + "ifting": 17274, + "Ġfool": 17275, + "should": 17276, + "Ġinspectors": 17277, + "Ġprotested": 17278, + "Ġba": 17279, + "ussia": 17280, + "Ġspun": 17281, + "grass": 17282, + "phone": 17283, + "Ġpotato": 17284, + "ĠBehind": 17285, + "cil": 17286, + "Ġconcession": 17287, + "Ġapplause": 17288, + "ĠChin": 17289, + "Ġceremonies": 17290, + "pit": 17291, + "Ġtraumatic": 17292, + "Ġbasics": 17293, + "Ġparameters": 17294, + "ĠMoz": 17295, + "ĠAIDS": 17296, + "Ph": 17297, + "Ġjudging": 17298, + "Ġlecture": 17299, + "Ġmunicipality": 17300, + "Ġcardiac": 17301, + "ogan": 17302, + "pir": 17303, + "could": 17304, + "Channel": 17305, + "Ġshattered": 17306, + "ĠAV": 17307, + "continental": 17308, + "chie": 17309, + "ibi": 17310, + "ĠOy": 17311, + "Mon": 17312, + "ĠCN": 17313, + "WC": 17314, + "Ġdistributor": 17315, + "ĠSavannah": 17316, + "Ġcleaned": 17317, + "ĠFlores": 17318, + "Ġembarrassed": 17319, + "Ġclay": 17320, + "Ġvolcano": 17321, + "Ġstressful": 17322, + "Ġsummoned": 17323, + "ĠSeg": 17324, + "Ġstatistical": 17325, + "ĠShak": 17326, + "Ġadequately": 17327, + "worthy": 17328, + "fighting": 17329, + "alan": 17330, + "Ġnecessity": 17331, + "Ġresidency": 17332, + "Ġsober": 17333, + "arius": 17334, + "ĠTaj": 17335, + "mount": 17336, + "wards": 17337, + "Ġaesthetic": 17338, + "Coin": 17339, + "ĠDew": 17340, + "were": 17341, + "SK": 17342, + "Ġpowerhouse": 17343, + "Ġcleanup": 17344, + "ĠWITH": 17345, + "ĠHers": 17346, + "ĠRao": 17347, + "ĠFlyers": 17348, + "Ġdominating": 17349, + "issued": 17350, + "ĠMcGr": 17351, + "Ġinsurgency": 17352, + "Ġburial": 17353, + "ĠPlains": 17354, + "ensive": 17355, + "ĠPresent": 17356, + "Mo": 17357, + "Ġnerves": 17358, + "Ġsmoothly": 17359, + "staff": 17360, + "Ġrestoring": 17361, + "ĠGeneration": 17362, + "Ġcommuters": 17363, + "ĠLegend": 17364, + "ĠGad": 17365, + "lied": 17366, + "Ġissuer": 17367, + "ĠDozens": 17368, + "Ġphases": 17369, + "ĠWu": 17370, + "ĠTunisia": 17371, + "ĠPacers": 17372, + "Ġdur": 17373, + "ĠIG": 17374, + "annon": 17375, + "sided": 17376, + "Ġvo": 17377, + "ĠNI": 17378, + "Ġvitamin": 17379, + "Ġsoc": 17380, + "Ġimmunity": 17381, + "Ġgenerates": 17382, + "ĠMcGu": 17383, + "Ġexplores": 17384, + "Ġassistants": 17385, + "Ġstems": 17386, + "ushed": 17387, + "ĠZak": 17388, + "ĠOwners": 17389, + "Ġvariant": 17390, + "ardy": 17391, + "ĠNewark": 17392, + "ĠCatalonia": 17393, + "Ġautonomy": 17394, + "Ġgreet": 17395, + "Ġawait": 17396, + "ĠLuckily": 17397, + "ĠTicket": 17398, + "ĠSTOR": 17399, + "asy": 17400, + "Ġincorrect": 17401, + "Ġconsisting": 17402, + "Ġperspectives": 17403, + "ĠQuint": 17404, + "Ġtotaling": 17405, + "Ġnortheastern": 17406, + "Ġcharacterized": 17407, + "Ġsurfaces": 17408, + "nation": 17409, + "Ġprevents": 17410, + "ĠSho": 17411, + "Ġelectorate": 17412, + "Ġshortfall": 17413, + "chy": 17414, + "aws": 17415, + "ĠAddress": 17416, + "Ġdefensively": 17417, + "quel": 17418, + "chester": 17419, + "Ġterr": 17420, + "ahu": 17421, + "lined": 17422, + "ĠNev": 17423, + "unn": 17424, + "Def": 17425, + "pc": 17426, + "ĠSig": 17427, + "Ġnonetheless": 17428, + "ĠSundays": 17429, + "ĠBAS": 17430, + "Ġpolicemen": 17431, + "ĠGoal": 17432, + "apa": 17433, + "Ġrope": 17434, + "Ġoutage": 17435, + "ĠPaso": 17436, + "Ġsadness": 17437, + "ĠGrowing": 17438, + "ĠKyr": 17439, + "Ġale": 17440, + "ĠBreitbart": 17441, + "ĠVia": 17442, + "ĠBrig": 17443, + "idence": 17444, + "Ġ145": 17445, + "quire": 17446, + "Ġdistraction": 17447, + "ĠOdd": 17448, + "ĠSimply": 17449, + "ĠNin": 17450, + "Ġcompetent": 17451, + "ded": 17452, + "iper": 17453, + "ĠKaty": 17454, + "ĠSolomon": 17455, + "Ġfeeds": 17456, + "ĠMort": 17457, + "ĠRica": 17458, + "affe": 17459, + "Ġcooperating": 17460, + "Ġarrivals": 17461, + "Ġdelete": 17462, + "ĠAth": 17463, + "Ġtrustees": 17464, + "Ġtub": 17465, + "Ġsaga": 17466, + "otes": 17467, + "ĠCJ": 17468, + "Ġexited": 17469, + "stakes": 17470, + "Ġinflu": 17471, + "2000": 17472, + "ĠDonovan": 17473, + "ĠNur": 17474, + "Ġoutline": 17475, + "Ġaudition": 17476, + "oked": 17477, + "ĠJag": 17478, + "money": 17479, + "Ġcardiovascular": 17480, + "song": 17481, + "ĠOften": 17482, + "ĠGoff": 17483, + "ĠOaks": 17484, + "Will": 17485, + "acon": 17486, + "Ġ?": 17487, + "Har": 17488, + "ĠLambert": 17489, + "atoon": 17490, + "ĠAF": 17491, + "ĠMavericks": 17492, + "nia": 17493, + "ĠChennai": 17494, + "\"},\"": 17495, + "Ġpairing": 17496, + "mad": 17497, + "ause": 17498, + "ĠRide": 17499, + "111": 17500, + "ĠFallon": 17501, + "ĠHyder": 17502, + "ĠPiper": 17503, + "Ġfilmmakers": 17504, + "icon": 17505, + "ĠBeau": 17506, + "Ġbutt": 17507, + "lot": 17508, + "Ġrifles": 17509, + "Ġsunglasses": 17510, + "ĠTRA": 17511, + "Ġmagnetic": 17512, + "arty": 17513, + "ĠYo": 17514, + "ĠWeight": 17515, + "?!": 17516, + "ether": 17517, + "Ġaspir": 17518, + "Ġhunters": 17519, + "Ġcontamination": 17520, + "Ben": 17521, + "political": 17522, + "],\"": 17523, + "ĠBever": 17524, + "Ġmonuments": 17525, + "won": 17526, + "auc": 17527, + "Ġexpressions": 17528, + "Ġlakes": 17529, + "iao": 17530, + "abin": 17531, + "Ġpleading": 17532, + "Ġdiscounted": 17533, + "Ġdisappoint": 17534, + "ĠTW": 17535, + "craft": 17536, + "Ġsocieties": 17537, + "ĠAugusta": 17538, + "Ġbott": 17539, + "Ġmarker": 17540, + "ĠWrestling": 17541, + "CBC": 17542, + "athy": 17543, + "ĠAZ": 17544, + "Ġfabulous": 17545, + "valued": 17546, + "Ġoptical": 17547, + "Ġshaken": 17548, + "OSS": 17549, + "ĠImp": 17550, + "ĠAUD": 17551, + "inals": 17552, + "Ġrevital": 17553, + "Ġcontroller": 17554, + "Ġgrasp": 17555, + "uling": 17556, + "ĠFrederick": 17557, + "ague": 17558, + "bull": 17559, + "ĠLadies": 17560, + "Ġdisruptive": 17561, + "Ġbenefiting": 17562, + "Ġverge": 17563, + "ĠDak": 17564, + "Ġgrabs": 17565, + "ĠPAC": 17566, + "GN": 17567, + "ĠMcMahon": 17568, + "rob": 17569, + "ĠEspecially": 17570, + "ĠChrome": 17571, + "ĠBundesliga": 17572, + "104": 17573, + "Ġliberty": 17574, + "ĠSF": 17575, + "Ġvarieties": 17576, + "East": 17577, + "Ġgrowers": 17578, + "Ġsocialist": 17579, + "Ġunemployed": 17580, + "AMI": 17581, + "Ġtotals": 17582, + "ĠGib": 17583, + "Ġdefect": 17584, + "ĠOrtiz": 17585, + "ĠPerfect": 17586, + "Ġpraying": 17587, + "ISS": 17588, + "Ġul": 17589, + "Ġthrust": 17590, + "osc": 17591, + "ĠOtherwise": 17592, + "Ġobsessed": 17593, + "Ġ650": 17594, + "ĠWebsite": 17595, + "Ġspectators": 17596, + "ĠScout": 17597, + "ĠBoone": 17598, + "ĠDillon": 17599, + "Ġabortions": 17600, + "lect": 17601, + "utz": 17602, + "Ġvillagers": 17603, + "Ġaccelerating": 17604, + "Ġslap": 17605, + "Ġvague": 17606, + "Ġjurisdictions": 17607, + "League": 17608, + "ĠUruguay": 17609, + "Ġobstacle": 17610, + "Ġmanufactures": 17611, + "Ġcampaigned": 17612, + "ĠAdvance": 17613, + "ĠNort": 17614, + "emer": 17615, + "Ġ1964": 17616, + "Ġirre": 17617, + "Ġprog": 17618, + "ĠFeatured": 17619, + "Ġcommute": 17620, + "Ġhandset": 17621, + "akis": 17622, + "ĠArs": 17623, + "tail": 17624, + "iker": 17625, + "Ġcrafted": 17626, + "Ġupl": 17627, + "ĠMarcos": 17628, + "Looking": 17629, + "Ġseated": 17630, + "ĠBoat": 17631, + "Ġreadiness": 17632, + "ĠLLP": 17633, + "otechnology": 17634, + "facebook": 17635, + "ĠScouts": 17636, + "ĠEar": 17637, + "ĠAdv": 17638, + "ĠDemocracy": 17639, + "NI": 17640, + "oci": 17641, + "ĠSnapdragon": 17642, + "Saturday": 17643, + "ĠPra": 17644, + "ĠCoastal": 17645, + "ĠVoters": 17646, + "ĠLeigh": 17647, + "ohn": 17648, + "orry": 17649, + "Ġtechnicians": 17650, + "armed": 17651, + "Ġshrink": 17652, + "Ġspinning": 17653, + "agram": 17654, + "320": 17655, + "liner": 17656, + "ĠContest": 17657, + "ĠCountries": 17658, + "Ġfarewell": 17659, + "ĠCW": 17660, + "aris": 17661, + "Ġstorytelling": 17662, + "Ġpasser": 17663, + "Ġsailing": 17664, + "control": 17665, + "Ġdissent": 17666, + "ĠRih": 17667, + "Ġedit": 17668, + "Ġspoilers": 17669, + "itched": 17670, + "ĠBentley": 17671, + "Ġcant": 17672, + "mn": 17673, + "ĠMacy": 17674, + "Ġindefinitely": 17675, + "Ġvill": 17676, + "Ġmeth": 17677, + "ĠEL": 17678, + "Ġoptional": 17679, + "Ġremark": 17680, + "ĠVanessa": 17681, + "ã": 17682, + "Ġmasks": 17683, + "ĠProvincial": 17684, + "Ġculprit": 17685, + "ĠTol": 17686, + "Ġsnack": 17687, + "ĠInfinity": 17688, + "ĠPub": 17689, + "Ġbrakes": 17690, + "Ġclar": 17691, + "Ġinception": 17692, + "love": 17693, + "Ġwonders": 17694, + "Ġforged": 17695, + "ĠCEOs": 17696, + "Ġspecifications": 17697, + "irst": 17698, + "ension": 17699, + "ĠMarin": 17700, + "det": 17701, + "Ġordeal": 17702, + "ĠFeed": 17703, + "December": 17704, + "Ġstrokes": 17705, + "fect": 17706, + "orial": 17707, + "Ġshowcasing": 17708, + "Ġstack": 17709, + "UAL": 17710, + "ĠAlexandra": 17711, + "Ġpoison": 17712, + "ĠFry": 17713, + "ĠCars": 17714, + "Ġprototype": 17715, + "ĠUSDA": 17716, + "ĠIF": 17717, + "flows": 17718, + "Ġtailored": 17719, + "ĠGear": 17720, + "Ġmyth": 17721, + "Ġplatinum": 17722, + "seven": 17723, + "founded": 17724, + "encing": 17725, + "ĠTip": 17726, + "ĠMald": 17727, + "Ġgeopolitical": 17728, + "112": 17729, + "Ġenqu": 17730, + "ĠNR": 17731, + "ĠNadu": 17732, + "leen": 17733, + "ĠTat": 17734, + "Ġcolon": 17735, + "ĠSize": 17736, + "Ġvis": 17737, + "Ġbere": 17738, + "ĠAnnie": 17739, + "ĠWatkins": 17740, + "Ġpumping": 17741, + "cur": 17742, + "ĠBates": 17743, + "Ġslug": 17744, + "miss": 17745, + "Ġforecasting": 17746, + "source": 17747, + "Ġacknowledges": 17748, + "Ġprosecute": 17749, + "Ġtestament": 17750, + "Ġcum": 17751, + "ems": 17752, + "Ġsocks": 17753, + "ĠSame": 17754, + "Ġcompetitiveness": 17755, + "Ġdefinitive": 17756, + "Ġintensified": 17757, + "Ġsatisfying": 17758, + "Ġphysics": 17759, + "ĠHarden": 17760, + "Ġsubsidy": 17761, + "Men": 17762, + "ĠPaddock": 17763, + "Ġworkouts": 17764, + "ĠSaw": 17765, + "Ġcrisp": 17766, + "ĠBezos": 17767, + "ĠVote": 17768, + "Ġguiding": 17769, + "anged": 17770, + "Ġstaple": 17771, + "ŀ": 17772, + "ules": 17773, + "ĠAvengers": 17774, + "Ġoptim": 17775, + "ĠBuffett": 17776, + "Ġtimetable": 17777, + "oust": 17778, + "HE": 17779, + "ĠGrab": 17780, + "Have": 17781, + "cca": 17782, + "Ġwaived": 17783, + "Ġretaining": 17784, + "Ġaber": 17785, + "Ġoffline": 17786, + "Ġvigil": 17787, + "books": 17788, + "ĠRein": 17789, + "Ġacknowledging": 17790, + "ĠDoyle": 17791, + "Ġproteins": 17792, + "Ġmixing": 17793, + "ĠAlcohol": 17794, + "ĠJD": 17795, + "Ġsyn": 17796, + "Ġthieves": 17797, + "Ġhomemade": 17798, + "Ġfeminist": 17799, + "ĠRoosevelt": 17800, + "ĠCoal": 17801, + "Ġwishing": 17802, + "ĠSIGN": 17803, + "ĠLad": 17804, + "Ġempathy": 17805, + "ĠBrooke": 17806, + "ĠMash": 17807, + "inations": 17808, + "''": 17809, + "ulators": 17810, + "Ġdrastically": 17811, + "Ġfloral": 17812, + "ĠGuild": 17813, + "Ġundercover": 17814, + "ĠLaboratory": 17815, + "ĠRank": 17816, + "Ġrestraining": 17817, + "Ġparagraph": 17818, + "Ġpersona": 17819, + "ĠEmployment": 17820, + "ogs": 17821, + "ĠGw": 17822, + "ĠMedal": 17823, + "Ġwildly": 17824, + "fare": 17825, + "ĠCNBC": 17826, + "photo": 17827, + "Ġtransforming": 17828, + "Ġtermination": 17829, + "still": 17830, + "INT": 17831, + "Ġbal": 17832, + "ĠEconom": 17833, + "ĠLarson": 17834, + "Ġheck": 17835, + "Ġquantitative": 17836, + "Ġemergence": 17837, + "esta": 17838, + "Ġknot": 17839, + "Ġwhale": 17840, + "ĠðŁĺ": 17841, + "Ġperimeter": 17842, + "Ġempowerment": 17843, + "Ġmg": 17844, + "Ġrents": 17845, + "Ġrefreshing": 17846, + "Ġleasing": 17847, + "Ġpatents": 17848, + "andi": 17849, + "Ġfathers": 17850, + "Ġunse": 17851, + "Ġprocessors": 17852, + "Down": 17853, + "Ġreversal": 17854, + "veh": 17855, + "andal": 17856, + "ĠKov": 17857, + "Blue": 17858, + "Ġspecializes": 17859, + "Link": 17860, + "ĠConsidering": 17861, + "ĠEdmund": 17862, + "Ġneo": 17863, + "agger": 17864, + "rg": 17865, + "Ġseverity": 17866, + "Ġcour": 17867, + "RL": 17868, + "ĠTeresa": 17869, + "Ġgallons": 17870, + "Ġacquitted": 17871, + "Ġaccompl": 17872, + "Ġcracks": 17873, + "Ġsciences": 17874, + "Club": 17875, + "Ġpredicts": 17876, + "ĠVu": 17877, + "Ġhints": 17878, + "ĠZack": 17879, + "Ġrefurb": 17880, + "Ġdestabil": 17881, + "ĠSamar": 17882, + "ĠInfo": 17883, + "fs": 17884, + "Ġratios": 17885, + "Ġinherent": 17886, + "ĠContinental": 17887, + "Ġtreasure": 17888, + "Ġcaucus": 17889, + "Ġenact": 17890, + "orporated": 17891, + "ineries": 17892, + "Ġtastes": 17893, + "main": 17894, + "Ġsq": 17895, + "ickson": 17896, + "corruption": 17897, + "ulture": 17898, + "ĠGoodman": 17899, + "ĠLing": 17900, + "ĠSup": 17901, + "Ġexposing": 17902, + "immers": 17903, + "Ġresponds": 17904, + "heimer": 17905, + "Air": 17906, + "ĠFigures": 17907, + "Ġlongstanding": 17908, + "ĠAnalytics": 17909, + "Ġenforced": 17910, + "Ġnickname": 17911, + "Ġclinch": 17912, + "ĠCarpenter": 17913, + "ĠPharma": 17914, + "Ġconstructive": 17915, + "Ġgel": 17916, + "ĠSham": 17917, + "ĠTOP": 17918, + "ĠDerrick": 17919, + "ör": 17920, + "birds": 17921, + "ĠTong": 17922, + "ĠBatman": 17923, + "ĠRouhani": 17924, + "ĠOlive": 17925, + "ĠRiv": 17926, + "Ġdessert": 17927, + "Ġguides": 17928, + "Ġsag": 17929, + "Ġchemotherapy": 17930, + "Ġslept": 17931, + "ĠFranc": 17932, + "ĠDunk": 17933, + "writers": 17934, + "ĠÃĹ": 17935, + "Ġ401": 17936, + "Ġoutfielder": 17937, + "ĠHamburg": 17938, + "izu": 17939, + "Ġscr": 17940, + "Ġcomparisons": 17941, + "Ġwhites": 17942, + "Ġtraits": 17943, + "Ġcollateral": 17944, + "LEY": 17945, + "ideshow": 17946, + "Ġstatutory": 17947, + "Ġruin": 17948, + "Ġsituated": 17949, + "tem": 17950, + "Ġinject": 17951, + "rage": 17952, + "550": 17953, + "Ġfactions": 17954, + "ĠNaomi": 17955, + "cutting": 17956, + "Ġcommunicating": 17957, + "Ġrailroad": 17958, + "Ġsparking": 17959, + "Ġrespiratory": 17960, + "ĠWebster": 17961, + "ĠCarbon": 17962, + "Ġundertaking": 17963, + "Ġcomposer": 17964, + "ĠFigure": 17965, + "Ġspecified": 17966, + "Video": 17967, + "uber": 17968, + "Ġsexuality": 17969, + "lected": 17970, + "ĠBurger": 17971, + "ĠCards": 17972, + "SR": 17973, + "ĠLie": 17974, + "Ġrecount": 17975, + "Ġexceeding": 17976, + "Ġquoting": 17977, + "ĠJama": 17978, + "ĠVictorian": 17979, + "Ġsway": 17980, + "ĠGes": 17981, + "ĠSI": 17982, + "ĠKazakhstan": 17983, + "Ġaccusation": 17984, + "etr": 17985, + "Ah": 17986, + "Ġproc": 17987, + "Ġlamb": 17988, + "ĠMorales": 17989, + "ĠLily": 17990, + "Ġderail": 17991, + "Ġcontributes": 17992, + "iddle": 17993, + "ĠConcord": 17994, + "Ġelectr": 17995, + "Ġequip": 17996, + "Ġquantum": 17997, + "Ġthereafter": 17998, + "Ġarrange": 17999, + "Ġraided": 18000, + "ĠMove": 18001, + "ĠSang": 18002, + "ĠGaming": 18003, + "Ġbiology": 18004, + "ĠAmnesty": 18005, + "Ġdemise": 18006, + "ĠBarton": 18007, + "Ġqualifier": 18008, + "ANI": 18009, + "Ġundersc": 18010, + "Ġroyalty": 18011, + "ĠINC": 18012, + "Ġsne": 18013, + "ariat": 18014, + "ĠWan": 18015, + "Ġcluster": 18016, + "quin": 18017, + "Ġwhales": 18018, + "ĠFear": 18019, + "ĠBrew": 18020, + "Ġdeport": 18021, + "airs": 18022, + "Ġcensus": 18023, + "OUS": 18024, + "Ġrespectful": 18025, + "bone": 18026, + "Ġwaivers": 18027, + "friend": 18028, + "Ġsystemic": 18029, + "ĠDion": 18030, + "James": 18031, + "ĠAdmission": 18032, + "Ġstigma": 18033, + "ĠTIME": 18034, + "Ġunderpin": 18035, + "ĠWitnesses": 18036, + "Ġdigs": 18037, + "Ġgenocide": 18038, + "Ġstaging": 18039, + "rolled": 18040, + "Ġspecially": 18041, + "oop": 18042, + "Ġbaseline": 18043, + "ĠRF": 18044, + "avis": 18045, + "Ġvocals": 18046, + "COL": 18047, + "LD": 18048, + "Ġimpending": 18049, + "ĠCaldwell": 18050, + "Ġaluminium": 18051, + "Ġstra": 18052, + "ĠTayyip": 18053, + "Ġadmissions": 18054, + "falls": 18055, + "Ġrealizing": 18056, + "oen": 18057, + "ĠRV": 18058, + "ĠMog": 18059, + "Ġadvocating": 18060, + "ĠPepper": 18061, + "lived": 18062, + "ĠWick": 18063, + "Facebook": 18064, + "ĠSpect": 18065, + "Ġshout": 18066, + "Ġfractured": 18067, + "vet": 18068, + "Ġ1966": 18069, + "Ġcompensate": 18070, + "ĠVolume": 18071, + "Ġcategor": 18072, + "ĠHuntington": 18073, + "Free": 18074, + "OUGH": 18075, + "local": 18076, + "Sch": 18077, + "uti": 18078, + "Ġburger": 18079, + "Ġbush": 18080, + "Ġimpacting": 18081, + "Ġfrost": 18082, + "tti": 18083, + "ĠFresno": 18084, + "onz": 18085, + "shaw": 18086, + "ĠLibyan": 18087, + "Ġassert": 18088, + "ĠLegacy": 18089, + "ĠIE": 18090, + "ĠKinder": 18091, + "ĠHorizon": 18092, + "Ġtum": 18093, + "Ġsignaled": 18094, + "ĠFors": 18095, + "Ġspeedy": 18096, + "rang": 18097, + "ĠFT": 18098, + "Ġselecting": 18099, + "Ġpale": 18100, + "WD": 18101, + "Ġprobability": 18102, + "OUND": 18103, + "istrate": 18104, + "Ġsens": 18105, + "ocating": 18106, + "Ġinterpret": 18107, + "Ġpuzzle": 18108, + "Ġinland": 18109, + "Ġmanipulation": 18110, + "Sal": 18111, + "Ġfulfilling": 18112, + "ĠMcMaster": 18113, + "Make": 18114, + "jun": 18115, + "giving": 18116, + "ĠNiagara": 18117, + "Ġscholars": 18118, + "ALT": 18119, + "ĠSteam": 18120, + "omin": 18121, + "ĠSau": 18122, + "ĠDowning": 18123, + "Ġgy": 18124, + "ĠTit": 18125, + "ĠLav": 18126, + "ĠPepsi": 18127, + "Ġdumping": 18128, + "ĠDetect": 18129, + "ĠTDs": 18130, + "ĠKob": 18131, + "ĠSY": 18132, + "Ġpioneer": 18133, + "Ġ_": 18134, + "Ġclarified": 18135, + "ĠTests": 18136, + "opic": 18137, + "ĠMN": 18138, + "ĠBowman": 18139, + "umin": 18140, + "Ġwidow": 18141, + "Ġrallying": 18142, + "ĠPull": 18143, + "Ġprojection": 18144, + "Ġescalation": 18145, + "Ġlibraries": 18146, + "ĠFounder": 18147, + "ĠHugo": 18148, + "ĠStyle": 18149, + "Ġfreelance": 18150, + "Ġlisteners": 18151, + "Ġdiscovering": 18152, + "ĠPlans": 18153, + "Ġfranchises": 18154, + "ĠPam": 18155, + "Ġfarther": 18156, + "UI": 18157, + "opers": 18158, + "103": 18159, + "ublished": 18160, + "keys": 18161, + "aky": 18162, + "Ġinnov": 18163, + "¦": 18164, + "ĠDrum": 18165, + "Ġwraps": 18166, + "ĠCongressman": 18167, + "ĠVenus": 18168, + "fake": 18169, + "ĠBronx": 18170, + "ĠDinner": 18171, + "faced": 18172, + "Ġbackward": 18173, + "inge": 18174, + "Ġarsenal": 18175, + "ĠAce": 18176, + "uden": 18177, + "fre": 18178, + "Ġspa": 18179, + "ĠSaunders": 18180, + "ĠMatter": 18181, + "ĠSpons": 18182, + "Ġconsultations": 18183, + "ĠRuss": 18184, + "Ġsculpture": 18185, + "Ġuncommon": 18186, + "Nov": 18187, + "pg": 18188, + "otherapy": 18189, + "Ġgol": 18190, + "ĠBlazers": 18191, + "Ġadvises": 18192, + "ĠRegulatory": 18193, + "ĠBoyle": 18194, + "Äģ": 18195, + "Ġcuisine": 18196, + "Ġencouragement": 18197, + "yp": 18198, + "eny": 18199, + "ĠOrchestra": 18200, + "ĠChicken": 18201, + "Ġ1965": 18202, + "ĠPret": 18203, + "ĠCooperation": 18204, + "ĠDevices": 18205, + "ĠRodney": 18206, + "ĠHonduras": 18207, + "ĠEgg": 18208, + "Ġchurn": 18209, + "Ġclutch": 18210, + "ĠBernstein": 18211, + "Ġain": 18212, + "Ġformidable": 18213, + "ĠFacility": 18214, + "Ġpag": 18215, + "mons": 18216, + "bol": 18217, + "Ġliteracy": 18218, + "Ġsubmissions": 18219, + "ĠHulu": 18220, + "ĠConstitutional": 18221, + "ĠIsh": 18222, + "ĠPaula": 18223, + "olve": 18224, + "Ġabundance": 18225, + "ĠAla": 18226, + "ĠEcuador": 18227, + "Ġreconstruction": 18228, + "Ġcrush": 18229, + "reek": 18230, + "ĠÂŃ": 18231, + "ibo": 18232, + "Ġpracticed": 18233, + "Ġpac": 18234, + "rett": 18235, + "Ġpasta": 18236, + "Ġresp": 18237, + "ĠFlag": 18238, + "pal": 18239, + "Ġcommenting": 18240, + "Ġrecap": 18241, + "âĢĶâĢĶ": 18242, + "ĠToy": 18243, + "ĠMeredith": 18244, + "Ġreceipt": 18245, + "Ġseparating": 18246, + "ĠMap": 18247, + "Ġmogul": 18248, + "ĠBurlington": 18249, + "Ġger": 18250, + "Ġcoordinate": 18251, + "grad": 18252, + "Ġescalated": 18253, + "Ġproceeded": 18254, + "turned": 18255, + "Ġupt": 18256, + "hum": 18257, + "ĠWere": 18258, + "Whether": 18259, + "Ġenjoyable": 18260, + "energy": 18261, + "Ġprohibit": 18262, + "Ġhurdle": 18263, + "Ġdivorced": 18264, + "Ġcommentator": 18265, + "GT": 18266, + "ATH": 18267, + "Ġtravellers": 18268, + "Ġpopulated": 18269, + "ĠVo": 18270, + "ĠRebels": 18271, + "Ġspurred": 18272, + "Ġideological": 18273, + "Ġelephant": 18274, + "keyes": 18275, + "Pat": 18276, + "Ġlinger": 18277, + "Ġreps": 18278, + "Ġcocktails": 18279, + "ĠKristen": 18280, + "istically": 18281, + "Ġgunmen": 18282, + "Ġ1920": 18283, + "Ġquart": 18284, + "National": 18285, + "Ġexceptions": 18286, + "kat": 18287, + "priced": 18288, + "ĠHarold": 18289, + "ĠPistons": 18290, + "Ġcompounds": 18291, + "Ġmouse": 18292, + "Ġexhibits": 18293, + "ĠBurk": 18294, + "Ġclassmates": 18295, + "Ġcirculated": 18296, + "Ġattributable": 18297, + "ĠBaton": 18298, + "Ġorganizer": 18299, + "Ġdurable": 18300, + "Ġsingers": 18301, + "ĠOman": 18302, + "Ġhydrogen": 18303, + "Ġslash": 18304, + "Ġaccidental": 18305, + "ĠAbrams": 18306, + "KS": 18307, + "itty": 18308, + "Ġrust": 18309, + "Ġselections": 18310, + "porting": 18311, + "ĠEmanuel": 18312, + "XX": 18313, + "ĠThornton": 18314, + "Ġcolumns": 18315, + "Ġsentiments": 18316, + "fun": 18317, + "Ġplight": 18318, + "ĠSister": 18319, + "ĠMaggie": 18320, + "hya": 18321, + "Daniel": 18322, + "Ġplung": 18323, + "orio": 18324, + "ĠYorker": 18325, + "ĠSaturdays": 18326, + "Ġloc": 18327, + "aye": 18328, + "illon": 18329, + "ĠConsulting": 18330, + "pled": 18331, + "ĠZin": 18332, + "ĠFarms": 18333, + "ĠGiuliani": 18334, + "ĠMIN": 18335, + "ĠHanson": 18336, + "ĠComplete": 18337, + "ourke": 18338, + "oche": 18339, + "ĠJord": 18340, + "Ġprofessors": 18341, + "ĠWILL": 18342, + "ĠCron": 18343, + "Ġdorm": 18344, + "Ġcracking": 18345, + "tur": 18346, + "ORS": 18347, + "Ant": 18348, + "Ġdeduction": 18349, + "ĠSIM": 18350, + "igue": 18351, + "ĠValent": 18352, + "ĠEthereum": 18353, + "ĠSunny": 18354, + "ĠExtra": 18355, + "ivan": 18356, + "ĠFo": 18357, + "Ġleases": 18358, + "ibe": 18359, + "Ġ1800": 18360, + "Ġslapped": 18361, + "emaker": 18362, + "Ġfa": 18363, + "rien": 18364, + "ĠPeriod": 18365, + "ĠES": 18366, + "ĠBlu": 18367, + "Ġpreserving": 18368, + "Ġsmarter": 18369, + "mans": 18370, + "Ġgest": 18371, + "zu": 18372, + "nu": 18373, + "Ġdivest": 18374, + "roc": 18375, + "ĠFlood": 18376, + "Given": 18377, + "ĠNorton": 18378, + "Ġgranting": 18379, + "Ġdealings": 18380, + "Ġgeographic": 18381, + "esa": 18382, + "Ġcub": 18383, + "Ġcriticizing": 18384, + "ĠCub": 18385, + "Ġsurroundings": 18386, + "ĠInternal": 18387, + "Ġsle": 18388, + "Ġcrushing": 18389, + "ĠPP": 18390, + "izations": 18391, + "ĠAbdel": 18392, + "Joe": 18393, + "ĠVisitors": 18394, + "ĠCarly": 18395, + "INGTON": 18396, + "ĠGC": 18397, + "ĠWB": 18398, + "Ġgently": 18399, + "·": 18400, + "though": 18401, + "ĠAlto": 18402, + "Ġresting": 18403, + "ĠPerson": 18404, + "ĠTon": 18405, + "Ġbore": 18406, + "ĠClar": 18407, + "Ġmot": 18408, + "Ġbathrooms": 18409, + "ĠTypically": 18410, + "Ġdisconnect": 18411, + "Ġtightly": 18412, + "ĠHarvest": 18413, + "ĠHed": 18414, + "ĠGermans": 18415, + "atar": 18416, + "Ġkeynote": 18417, + "Ġimproper": 18418, + "fil": 18419, + "Ġintens": 18420, + "iev": 18421, + "Ġmedi": 18422, + "Ġtenant": 18423, + "Ġfootsteps": 18424, + "uli": 18425, + "Ġlegalization": 18426, + "106": 18427, + "ĠLexington": 18428, + "folio": 18429, + "Ġ½": 18430, + "ĠRita": 18431, + "Ġbattered": 18432, + "inka": 18433, + "ĠJavaScript": 18434, + "ĠMusical": 18435, + "ĠTalent": 18436, + "Ġlounge": 18437, + "Ġintimidation": 18438, + "ikh": 18439, + "ĠFam": 18440, + "Ġtherapeutic": 18441, + "Ġbalancing": 18442, + "Ġrocky": 18443, + "liners": 18444, + "ĠPredators": 18445, + "Ġregistering": 18446, + "Ġdiligence": 18447, + "ĠRover": 18448, + "ĠDot": 18449, + "Ġterminated": 18450, + "ĠEdu": 18451, + "Ġcharming": 18452, + "ĠPLAY": 18453, + "ĠFact": 18454, + "ĠCi": 18455, + ").\"": 18456, + "ĠWrestle": 18457, + "hun": 18458, + "Ġopenings": 18459, + "Ġfou": 18460, + "Ġ126": 18461, + "spe": 18462, + "ĠAW": 18463, + "Ġbud": 18464, + "ĠTemper": 18465, + "ĠOrthodox": 18466, + "Ġprogressed": 18467, + "tre": 18468, + "Ġtasting": 18469, + "Ġscrutin": 18470, + "ĠLima": 18471, + "Ġlayout": 18472, + "Ġlitter": 18473, + "ijk": 18474, + "ĠParkinson": 18475, + "ĠAnfield": 18476, + "Ġdevelopmental": 18477, + "Ġheaven": 18478, + "ĠWoodward": 18479, + "index": 18480, + "Ġpistol": 18481, + "Ġreson": 18482, + "ĠWS": 18483, + "Ġemb": 18484, + "ĠLap": 18485, + "ĠPle": 18486, + "lington": 18487, + "ĠSit": 18488, + "Ġabruptly": 18489, + "ĠSenegal": 18490, + "ĠYates": 18491, + "aceutical": 18492, + "ĠJak": 18493, + "ĠHastings": 18494, + "iste": 18495, + "ĠDB": 18496, + "ĠAgent": 18497, + "Ġpreservation": 18498, + "ĠLank": 18499, + "ĠSuffolk": 18500, + "Ġboo": 18501, + "essed": 18502, + "Ġempowering": 18503, + "enne": 18504, + "Ġrecycled": 18505, + "Ġstrateg": 18506, + "Ġbrake": 18507, + "135": 18508, + "ĠStef": 18509, + "ĠFlake": 18510, + "ĠGregg": 18511, + "ĠRent": 18512, + "Ġinstallment": 18513, + "FW": 18514, + "ĠCran": 18515, + "obo": 18516, + "ml": 18517, + "ĠJade": 18518, + "Ġaccuses": 18519, + "ĠNvidia": 18520, + "Ġburg": 18521, + "High": 18522, + "Ġbothered": 18523, + "ĠBenn": 18524, + "Ġinterrupted": 18525, + "Ġtrek": 18526, + "Ġserv": 18527, + "Ġpatron": 18528, + "Ġdictator": 18529, + "owa": 18530, + "jad": 18531, + "ĠTulsa": 18532, + "Ġboil": 18533, + "Ġdisplaying": 18534, + "Ġcinem": 18535, + "awaited": 18536, + "¸": 18537, + "Ġreacts": 18538, + "ĠDee": 18539, + "ĠGron": 18540, + "igation": 18541, + "Ġservic": 18542, + "capt": 18543, + "Ġinsane": 18544, + "ĠVeteran": 18545, + "umen": 18546, + "End": 18547, + "ĠCream": 18548, + "Ġextremism": 18549, + "ĠMalone": 18550, + "Col": 18551, + "Ġsafeguard": 18552, + "Ġtomatoes": 18553, + "die": 18554, + "Ġchamp": 18555, + "zero": 18556, + "ĠPRES": 18557, + "Ġchoir": 18558, + "Ġpediatric": 18559, + "Ġprivileged": 18560, + "Ġdownstream": 18561, + "Business": 18562, + "ĠFighting": 18563, + "atable": 18564, + "Ġsums": 18565, + "Ġinsult": 18566, + "arten": 18567, + "ĠWikiLeaks": 18568, + "Ġpads": 18569, + "Ġretali": 18570, + "ĠHunts": 18571, + "Ġindie": 18572, + "ĠShields": 18573, + "ĠMortgage": 18574, + "oses": 18575, + "ampton": 18576, + "ĠVideos": 18577, + "ĠPER": 18578, + "itionally": 18579, + "ĠKimmel": 18580, + "sum": 18581, + "trade": 18582, + "acity": 18583, + "marked": 18584, + "ĠAngus": 18585, + "Ġtemper": 18586, + "Ġseizure": 18587, + "Ġfictional": 18588, + "utton": 18589, + "eva": 18590, + "Rs": 18591, + "Ġintra": 18592, + "ĠRequest": 18593, + "ppe": 18594, + "ĠeBay": 18595, + "ĠUSS": 18596, + "Ġ1500": 18597, + "Ġpossessing": 18598, + "Ġbacon": 18599, + "ĠSexual": 18600, + "ĠBuff": 18601, + "Ġslaughter": 18602, + "Ġjur": 18603, + "zhou": 18604, + "suit": 18605, + "ĠCha": 18606, + "ĠBuk": 18607, + "crime": 18608, + "ĠEasy": 18609, + "ĠChain": 18610, + "aq": 18611, + "ĠPall": 18612, + "flation": 18613, + "225": 18614, + "oup": 18615, + "109": 18616, + "ĠMcKenzie": 18617, + "Ġclearer": 18618, + "ĠDogs": 18619, + "oration": 18620, + "Ġsubs": 18621, + "Follow": 18622, + "ĠShirley": 18623, + "Ġadjusting": 18624, + "ĠEFF": 18625, + "Ġflipped": 18626, + "Ġconform": 18627, + "ĠLaurent": 18628, + "Ġcircular": 18629, + "ĠNOR": 18630, + "Ġmort": 18631, + "Ġtexture": 18632, + "avour": 18633, + "Ġflex": 18634, + "ĠHedge": 18635, + "ðŁĺ": 18636, + "Ġtrophies": 18637, + "ĠINV": 18638, + "Ġboast": 18639, + "ĠTyr": 18640, + "ĠNichols": 18641, + "ĠSpa": 18642, + "Ġcheered": 18643, + "Ġprey": 18644, + "reach": 18645, + "Ġbreached": 18646, + "ĠRegions": 18647, + "ĠLyft": 18648, + "ĠTul": 18649, + "ĠKore": 18650, + "Ġendure": 18651, + "ĠCover": 18652, + "\").": 18653, + "ĠSavage": 18654, + "ère": 18655, + "reens": 18656, + "Ġnic": 18657, + "sector": 18658, + "Ġweaknesses": 18659, + "Ġreboot": 18660, + "Ġ210": 18661, + "Ġimagery": 18662, + "ĠFrem": 18663, + "Ġclue": 18664, + "ĠLars": 18665, + "Ġfaction": 18666, + "hetic": 18667, + "Ġallied": 18668, + "ĠMarvin": 18669, + "Ġmethodology": 18670, + "ĠTN": 18671, + "Ġutter": 18672, + "Ġ270": 18673, + "ĠVolvo": 18674, + "oline": 18675, + "ĠACLU": 18676, + "Ġindirect": 18677, + "Ġminer": 18678, + "ĠBale": 18679, + "ĠStrange": 18680, + "ĠFuller": 18681, + "Ġexpelled": 18682, + "ĠTropical": 18683, + "Ġremotely": 18684, + "ĠTIM": 18685, + "Ġinnocence": 18686, + "Ġconfined": 18687, + "Ġfares": 18688, + "Ġprevalent": 18689, + "Ġdesp": 18690, + "House": 18691, + "azar": 18692, + "Ġgestures": 18693, + "ĠCES": 18694, + "ĠDM": 18695, + "eal": 18696, + "ĠÐ": 18697, + "Ġburnt": 18698, + "Ġframed": 18699, + "ĠDani": 18700, + "Ġhol": 18701, + "ĠCannes": 18702, + "ĠHayden": 18703, + "Ġwardrobe": 18704, + "ĠAssange": 18705, + "ĠSamp": 18706, + "bay": 18707, + "sky": 18708, + "ĠHence": 18709, + "ĠGrizzlies": 18710, + "rates": 18711, + "laws": 18712, + "ĠMandela": 18713, + "ĠHoover": 18714, + "rics": 18715, + "charged": 18716, + "Ġexclude": 18717, + "Ġpassive": 18718, + "Ġcontinuation": 18719, + "Ġblunt": 18720, + "Ġvac": 18721, + "ĠEmerging": 18722, + "rench": 18723, + "tv": 18724, + "ĠHollow": 18725, + "ĠOC": 18726, + "Ġadvisors": 18727, + "Ġrendered": 18728, + "ĠBernardino": 18729, + "ĠSupporters": 18730, + "ronic": 18731, + "Ġchancellor": 18732, + "Ġ1963": 18733, + "Ġuranium": 18734, + "Ġak": 18735, + "ĠOptions": 18736, + "ermott": 18737, + "ĠBerger": 18738, + "ibia": 18739, + "Ġexplosions": 18740, + "Ġimpairment": 18741, + "Ġhail": 18742, + "Ġalley": 18743, + "Ġcruelty": 18744, + "ĠClarence": 18745, + "Ġvariations": 18746, + "Ġrealm": 18747, + "Ġrenovations": 18748, + "ĠNorwich": 18749, + "Ġbelongings": 18750, + "Ġmerchants": 18751, + "ĠMinisters": 18752, + "ĠDodd": 18753, + "Ġviewer": 18754, + "Ġneutrality": 18755, + "quer": 18756, + "ĠPrinceton": 18757, + "dead": 18758, + "arest": 18759, + "GET": 18760, + "ĠCanadiens": 18761, + "ĠIgn": 18762, + "clear": 18763, + "Mal": 18764, + "ĠBridges": 18765, + "ĠHayward": 18766, + "Ġremarked": 18767, + "ingle": 18768, + "Ġsob": 18769, + "Ġdepart": 18770, + "beans": 18771, + "Ġpreserved": 18772, + "ĠFairfax": 18773, + "Ġforgot": 18774, + "ĠBeh": 18775, + "Rob": 18776, + "Ġcooperative": 18777, + "ullah": 18778, + "Ġmates": 18779, + "Ġrang": 18780, + "Ġthigh": 18781, + "Ġabducted": 18782, + "Ġchaired": 18783, + "ĠHearts": 18784, + "Ġidentifies": 18785, + "ĠBuckingham": 18786, + "ijn": 18787, + "ĠJab": 18788, + "Ġclashed": 18789, + "feed": 18790, + "sites": 18791, + "ĠCareer": 18792, + "exp": 18793, + "ĠBuccaneers": 18794, + "scape": 18795, + "Ġupdating": 18796, + "Ġintentional": 18797, + "ĠGuam": 18798, + "ĠBreakfast": 18799, + "ĠHag": 18800, + "Media": 18801, + "Ġtapping": 18802, + "Ġpics": 18803, + "Ġeaten": 18804, + "Ġpremise": 18805, + "Kim": 18806, + "ĠStorage": 18807, + "Ġextensively": 18808, + "Ġoutrageous": 18809, + "ĠSadly": 18810, + "Global": 18811, + "¢": 18812, + "leaning": 18813, + "CM": 18814, + "Ġeasiest": 18815, + "ument": 18816, + "Ġ122": 18817, + "Ġdaunting": 18818, + "ISE": 18819, + "Ġsunset": 18820, + "Ġreset": 18821, + "Ġbent": 18822, + "Trust": 18823, + "ĠCaleb": 18824, + "ĠRut": 18825, + "ĠBast": 18826, + "ETS": 18827, + "iencies": 18828, + "Ġpu": 18829, + "ature": 18830, + "Ġrealities": 18831, + "omi": 18832, + "Ġsoda": 18833, + "Ġunveil": 18834, + "ĠGoldberg": 18835, + "opes": 18836, + "Ġuprising": 18837, + "ĠMR": 18838, + "Ġendorse": 18839, + "Ġsail": 18840, + "Ġconverting": 18841, + "Ġglamorous": 18842, + "ĠHollande": 18843, + "108": 18844, + "isky": 18845, + "Ġcushion": 18846, + "240": 18847, + "Ġadventures": 18848, + "Ġantitrust": 18849, + "ĠStockholm": 18850, + "pace": 18851, + "ĠVald": 18852, + "ĠTransfer": 18853, + "ERT": 18854, + "ĠMcInt": 18855, + "Ġsurging": 18856, + "ogn": 18857, + "Ġlauded": 18858, + "ĠZam": 18859, + "ĠRough": 18860, + "TOR": 18861, + "Ġwed": 18862, + "Ġorigins": 18863, + "ĠEld": 18864, + "oso": 18865, + "Ġsupplying": 18866, + "ĠPetty": 18867, + "ĠTwe": 18868, + "ĠDenise": 18869, + "ĠBec": 18870, + "Ġbehave": 18871, + "Ġ121": 18872, + "estone": 18873, + "ĠBoulder": 18874, + "ĠBlackhawks": 18875, + "ĠWyatt": 18876, + "Ġfiguring": 18877, + "ĠDeborah": 18878, + "agi": 18879, + "significant": 18880, + "Ġasthma": 18881, + "Ġmessy": 18882, + "mpire": 18883, + "Ġax": 18884, + "Ġaspiring": 18885, + "ĠNH": 18886, + "ĠGina": 18887, + "heavy": 18888, + "ĠVick": 18889, + "ÃŃs": 18890, + "something": 18891, + "Ġbodily": 18892, + "Ġunauthorized": 18893, + "ĠActually": 18894, + "ĠOH": 18895, + "Ġmicrophone": 18896, + "allah": 18897, + "Ġrampant": 18898, + "Ġrelocated": 18899, + "Ġwidening": 18900, + "ĠCait": 18901, + "nel": 18902, + "ĠBlackBerry": 18903, + "Ġprofessionally": 18904, + "ĠInterestingly": 18905, + "Ġbarbecue": 18906, + "Ġresisting": 18907, + "ĠNunes": 18908, + "disc": 18909, + "Ġgroundbreaking": 18910, + "orable": 18911, + "ĠRegulation": 18912, + "Ġborrowed": 18913, + "Ġleaking": 18914, + "Ġlengths": 18915, + "Ġunveiling": 18916, + "houses": 18917, + "Ġ155": 18918, + "ĠBillboard": 18919, + "icion": 18920, + "Times": 18921, + "ĠZoe": 18922, + "ĠAbby": 18923, + "bus": 18924, + "ĠMinutes": 18925, + "ributed": 18926, + "Ġparap": 18927, + "Ġfertil": 18928, + "ABC": 18929, + "ĠIsle": 18930, + "Ġtherapist": 18931, + "Ġgubernatorial": 18932, + "ĠAust": 18933, + "ĠLoan": 18934, + "Bo": 18935, + "ĠNRL": 18936, + "rag": 18937, + "Clear": 18938, + "Ġrevision": 18939, + "Ġflesh": 18940, + "BD": 18941, + "iji": 18942, + "Ġproductions": 18943, + "Ġcoconut": 18944, + "ĠMcCorm": 18945, + "ĠDash": 18946, + "Ġgeography": 18947, + "hearted": 18948, + "Ġarson": 18949, + "Ġgoaltender": 18950, + "Ġbelly": 18951, + "Ġqualifications": 18952, + "ĠActiv": 18953, + "Ġhooked": 18954, + "ĠHungarian": 18955, + "Ġprotocols": 18956, + "inking": 18957, + "Ġfronts": 18958, + "ĠKuala": 18959, + "ĠToys": 18960, + "ĠFitness": 18961, + "Ġwarfare": 18962, + "Ġoutp": 18963, + "ĠQuestions": 18964, + "Ġwel": 18965, + "ĠShan": 18966, + "ĠMorton": 18967, + "ĠRomero": 18968, + "Ġglance": 18969, + "ĠTay": 18970, + "Ġsneakers": 18971, + "ĠSymphony": 18972, + "Ġinspect": 18973, + "enna": 18974, + "Nobody": 18975, + "Ġscrapped": 18976, + "ĠDeVos": 18977, + "ĠDominican": 18978, + "Ġplanets": 18979, + "anova": 18980, + "Ġnotify": 18981, + "Ġincurred": 18982, + "Ġunders": 18983, + "Ġdetainees": 18984, + "ĠMarriott": 18985, + "electric": 18986, + "ĠKes": 18987, + "union": 18988, + "ĠWatt": 18989, + "ATING": 18990, + "Ġslipping": 18991, + "Ġraft": 18992, + "Ġresisted": 18993, + "Ġcred": 18994, + "tern": 18995, + "Ġflurry": 18996, + "Line": 18997, + "Ġconsulted": 18998, + "Ġanalyzing": 18999, + "107": 19000, + "ĠWide": 19001, + "¶": 19002, + "human": 19003, + "ĠFEMA": 19004, + "Ġsmash": 19005, + "Ġcorps": 19006, + "Ġbarric": 19007, + "Ġcollar": 19008, + "ĠTB": 19009, + "without": 19010, + "ĠCanucks": 19011, + "Ġneedle": 19012, + "ĠSidney": 19013, + "ĠLauderdale": 19014, + "Ġglove": 19015, + "ilee": 19016, + "pic": 19017, + "Ġbenef": 19018, + "ĠHydro": 19019, + "ĠDisc": 19020, + "ĠArg": 19021, + "Ġtermin": 19022, + "Ġsympath": 19023, + "Ġpest": 19024, + "ĠCoff": 19025, + "Ġadvancement": 19026, + "social": 19027, + "pol": 19028, + "ĠEmails": 19029, + "Ġstacked": 19030, + "ibly": 19031, + "ĠAlbion": 19032, + "Ġfist": 19033, + "hero": 19034, + "ĠMarian": 19035, + "asia": 19036, + "Ġtownship": 19037, + "Ġslick": 19038, + "Ġmodeling": 19039, + "achers": 19040, + "ĠArgent": 19041, + "ĠSUN": 19042, + "arde": 19043, + "Ġpinned": 19044, + "Ġhitters": 19045, + "Ġdare": 19046, + "ictions": 19047, + "arily": 19048, + "Ġsting": 19049, + "Ġprimaries": 19050, + "appointed": 19051, + "Ġformats": 19052, + "Ġglitter": 19053, + "Ġpatches": 19054, + "Ġstrategically": 19055, + "Ġaka": 19056, + "Ġyielded": 19057, + "BY": 19058, + "Ġjeopard": 19059, + "ĠVand": 19060, + "Ġcrowned": 19061, + "Ġoccupants": 19062, + "Ġtanker": 19063, + "ĠVisa": 19064, + "Great": 19065, + "Ġseasoned": 19066, + "ĠAviv": 19067, + "Ġfiery": 19068, + "Ġderivatives": 19069, + "Ġdiverted": 19070, + "Ġacqu": 19071, + "Ġsandwiches": 19072, + "ĠLorenzo": 19073, + "Ġpardon": 19074, + "ĠBarber": 19075, + "ĠAgricultural": 19076, + "ĠPhilly": 19077, + "Ġregrets": 19078, + "ĠMillions": 19079, + "ĠFrazier": 19080, + "Ġtreasury": 19081, + "ĠKenn": 19082, + "Ġdestined": 19083, + "olved": 19084, + "Back": 19085, + "leader": 19086, + "lyss": 19087, + "ĠReyes": 19088, + "001": 19089, + "bags": 19090, + "ĠStandards": 19091, + "ĠExcellence": 19092, + "ĠMaid": 19093, + "ĠAnthem": 19094, + "FIELD": 19095, + "Ġrevived": 19096, + "ĠQuad": 19097, + "Ġdistinguished": 19098, + "Ġweighted": 19099, + "Ġritual": 19100, + "Ġinvites": 19101, + "wana": 19102, + "iture": 19103, + "ĠCI": 19104, + "ĠMAY": 19105, + "Ġunfairly": 19106, + "ĠKP": 19107, + "ĠMidlands": 19108, + "Ġmint": 19109, + "uers": 19110, + "Ġcatalog": 19111, + "arant": 19112, + "Ġlosers": 19113, + "Ġscheduling": 19114, + "esar": 19115, + "Ġtransferring": 19116, + "Ġbankrupt": 19117, + "Ġmethamphetamine": 19118, + "ĠEsk": 19119, + "ĠTreatment": 19120, + "ĠResponse": 19121, + "Ġhomework": 19122, + "ĠBald": 19123, + "Ġembarrassment": 19124, + "Ġpoorest": 19125, + "ĠPlatinum": 19126, + "ĠFac": 19127, + "Ġunleashed": 19128, + "Ġbrighter": 19129, + "002": 19130, + "Ġdisl": 19131, + "ĠLowry": 19132, + "ived": 19133, + "ĠDemon": 19134, + "ĠNonetheless": 19135, + "arro": 19136, + "ĠCONT": 19137, + "ifted": 19138, + "ĠFreder": 19139, + "isson": 19140, + "Ġrout": 19141, + "ARA": 19142, + "Ġswinging": 19143, + "Oct": 19144, + "Ġliable": 19145, + "Ġleaning": 19146, + "Ġlungs": 19147, + "380": 19148, + "ĠProcess": 19149, + "ĠCov": 19150, + "terrorism": 19151, + "Ġresistant": 19152, + "Ġpumped": 19153, + "Ġtripled": 19154, + "Semitism": 19155, + "ĠMia": 19156, + "Ġpenetration": 19157, + "ĠLutheran": 19158, + "BU": 19159, + "odes": 19160, + "Ġspanning": 19161, + "utch": 19162, + "Trans": 19163, + "ĠVolunteers": 19164, + "Ġpathway": 19165, + "Ġinfectious": 19166, + "Ġdrastic": 19167, + "ĠEngineers": 19168, + "Ġprincess": 19169, + "acts": 19170, + "usting": 19171, + "utive": 19172, + "achel": 19173, + "DO": 19174, + "Ġpave": 19175, + "ĠHerrera": 19176, + "Ġnearing": 19177, + "help": 19178, + "Ġembarked": 19179, + "Ġmodes": 19180, + "ĠDriving": 19181, + "Ġopting": 19182, + "Best": 19183, + "Ġbehavioral": 19184, + "Ġcables": 19185, + "App": 19186, + "otion": 19187, + "ĠExt": 19188, + "ĠSinclair": 19189, + "ĠInsp": 19190, + "Ġsinking": 19191, + "Next": 19192, + "ĠLumpur": 19193, + "ĠShadow": 19194, + "Donald": 19195, + "itals": 19196, + "Ġmentions": 19197, + "floor": 19198, + "Ġconsiderations": 19199, + "ĠSquad": 19200, + "ĠPlate": 19201, + "dos": 19202, + "Friday": 19203, + "Hopefully": 19204, + "arre": 19205, + "Ġalum": 19206, + "\":\"/": 19207, + "Ġfet": 19208, + "anza": 19209, + "Ġdign": 19210, + "ĠNguyen": 19211, + "ĠRutgers": 19212, + "ĠSew": 19213, + "Ġfilters": 19214, + "ofi": 19215, + "Ġunavailable": 19216, + "ranking": 19217, + "Ġrefining": 19218, + "ĠUNC": 19219, + "Ġmax": 19220, + "yll": 19221, + "Ġhandsome": 19222, + "Ġutterly": 19223, + "See": 19224, + "ĠStores": 19225, + "Ke": 19226, + "ĠAdvoc": 19227, + "ordon": 19228, + "umbles": 19229, + "Ġbugs": 19230, + "olar": 19231, + "ĠCork": 19232, + "Ġtoken": 19233, + "Ġauthorization": 19234, + "Ġconscience": 19235, + "Ġrepl": 19236, + "edi": 19237, + "owitz": 19238, + "iven": 19239, + "Ġlieu": 19240, + "Ġlifts": 19241, + "Lean": 19242, + "Ġmagnificent": 19243, + "ĠFilms": 19244, + "onents": 19245, + "Ġ***": 19246, + "Green": 19247, + "ĠAdvocate": 19248, + "ĠArrow": 19249, + "Ġblows": 19250, + "Ġexploited": 19251, + "fly": 19252, + "ĠAmar": 19253, + "ĠNOTICE": 19254, + "Ġsincere": 19255, + "found": 19256, + "ĠRud": 19257, + "Ġcy": 19258, + "ĠHeidi": 19259, + "Ġempowered": 19260, + "Ġweakest": 19261, + "ĠKru": 19262, + "Credit": 19263, + "aunted": 19264, + "Ġexotic": 19265, + "aning": 19266, + "Ġaw": 19267, + "ĠMulti": 19268, + "Ġanimation": 19269, + "850": 19270, + "ĠCounter": 19271, + "ĠNit": 19272, + "alli": 19273, + "Ġcapitalize": 19274, + "Ġexecuting": 19275, + "Ġdescent": 19276, + "ovi": 19277, + "ĠKimberly": 19278, + "headed": 19279, + "Ġmentioning": 19280, + ")-": 19281, + "ĠSpecifically": 19282, + "ayette": 19283, + "ihad": 19284, + "ĠIss": 19285, + "Ġdisagreed": 19286, + "ĠKum": 19287, + "Ġurges": 19288, + "Ġpermitting": 19289, + "Ġpy": 19290, + "isp": 19291, + "Ġhygiene": 19292, + "Ġmourning": 19293, + "Ġcyclists": 19294, + "cats": 19295, + "FER": 19296, + "cycl": 19297, + "Ġnewcomers": 19298, + "Ġplead": 19299, + "Ġmend": 19300, + "secret": 19301, + "fan": 19302, + "Ġtranslates": 19303, + "unit": 19304, + "ĠTank": 19305, + "drive": 19306, + "ĠSite": 19307, + "Ġacceleration": 19308, + "ĠEnrique": 19309, + "ĠElaine": 19310, + "Ġstaring": 19311, + "Ġbackwards": 19312, + "Ġot": 19313, + "Ġvot": 19314, + "ĠHK": 19315, + "Ġfian": 19316, + "ĠLockheed": 19317, + "Ġmanifest": 19318, + "ĠZurich": 19319, + "pad": 19320, + "ĠRav": 19321, + "flow": 19322, + "Ġmoms": 19323, + "ĠSolid": 19324, + "ĠReady": 19325, + "aughlin": 19326, + "Ġreminding": 19327, + "ĠCOR": 19328, + "Ġoptimal": 19329, + "ĠCrisis": 19330, + "Ġcholesterol": 19331, + "ĠGerard": 19332, + "Ġfest": 19333, + "Ġsanction": 19334, + "Ġdragging": 19335, + "inent": 19336, + "ĠBravo": 19337, + "Ġamend": 19338, + "aval": 19339, + "Ġpoem": 19340, + "Ġinvasive": 19341, + "Ġlandsc": 19342, + "leigh": 19343, + "Ġheadache": 19344, + "ĠMuse": 19345, + "ĠTurning": 19346, + "girl": 19347, + "cess": 19348, + "Ġfalsely": 19349, + "Ġplaintiff": 19350, + "Ġheavier": 19351, + "Ġrumored": 19352, + "Ġeleven": 19353, + "ĠConsumers": 19354, + "ĠOriginally": 19355, + "ĠStatement": 19356, + "bors": 19357, + "Ġrevoked": 19358, + "ĠOmaha": 19359, + "Fox": 19360, + "ĠKle": 19361, + "Ġvault": 19362, + "Ġoutdated": 19363, + "umes": 19364, + "ĠArk": 19365, + "Ġapologised": 19366, + "Ġrockets": 19367, + "ĠMarines": 19368, + "Ġcaptures": 19369, + "ĠMW": 19370, + "ĠWalters": 19371, + "ĠFactor": 19372, + "Ġensuing": 19373, + "ĠSession": 19374, + "oons": 19375, + "Ġ132": 19376, + "gt": 19377, + "ĠPoints": 19378, + "Ġexhaust": 19379, + "ĠOsaka": 19380, + "heed": 19381, + "Ġhandic": 19382, + "amber": 19383, + "inging": 19384, + "Ġll": 19385, + "Ġescorted": 19386, + "Ġfloated": 19387, + "Ġmerge": 19388, + "Ġcompliment": 19389, + "ĠVC": 19390, + "Ġinsulin": 19391, + "ĠDebt": 19392, + "ça": 19393, + "Ġpens": 19394, + "Ġassertion": 19395, + "Ġredevelopment": 19396, + "moderate": 19397, + "Ġleftist": 19398, + "ĠBA": 19399, + "Ġherd": 19400, + "Ġinsecurity": 19401, + "liter": 19402, + "Ġcommence": 19403, + "ĠCaucus": 19404, + "Ġnovels": 19405, + "ĠChevron": 19406, + "Ġerosion": 19407, + "ĠNicholson": 19408, + "ĠRoof": 19409, + "ĠVolunteer": 19410, + "Ġcompelled": 19411, + "Ġcongratulated": 19412, + "ĠPanel": 19413, + "Ġov": 19414, + "idelity": 19415, + "Ġspect": 19416, + "Ġbee": 19417, + "ĠAssistance": 19418, + "Ġterrified": 19419, + "iew": 19420, + "Ġweekday": 19421, + "ĠHiggins": 19422, + "special": 19423, + "ubs": 19424, + "anton": 19425, + "Ġbribes": 19426, + "Ġneat": 19427, + "ĠCliff": 19428, + "Ġdisqualified": 19429, + "ĠND": 19430, + "Ġvers": 19431, + "andra": 19432, + "Ġgraft": 19433, + "value": 19434, + "Ġportray": 19435, + "Ġdaytime": 19436, + "ksh": 19437, + "Ġconsist": 19438, + "Ġhonesty": 19439, + "ĠTimber": 19440, + "ĠNich": 19441, + "Ġinvented": 19442, + "ĠBuch": 19443, + "Ġskull": 19444, + "Ġtags": 19445, + "Ġ124": 19446, + "ighth": 19447, + "Ġrelaxing": 19448, + "Online": 19449, + "Ġsanctioned": 19450, + "Sport": 19451, + "ĠCove": 19452, + "Ġcomics": 19453, + "MW": 19454, + "AMA": 19455, + "mother": 19456, + "Home": 19457, + "ĠCustomer": 19458, + "Ġstrides": 19459, + "ĠWins": 19460, + "Ġrollout": 19461, + "ĠWeaver": 19462, + "Ġshuttle": 19463, + "Ġsteak": 19464, + "Ġglorious": 19465, + "ĠToll": 19466, + "Ġtrustee": 19467, + "Ġinstallations": 19468, + "ĠOpportunity": 19469, + "Ġoper": 19470, + "horse": 19471, + "Ġaided": 19472, + "irus": 19473, + "Ġsleek": 19474, + "Ġyelled": 19475, + "ĠSocialist": 19476, + "Ġapplaud": 19477, + "ĠWah": 19478, + "Ġdevote": 19479, + "Ġdh": 19480, + "Ġarchitectural": 19481, + "ĠMAC": 19482, + "centric": 19483, + "ĠSense": 19484, + "illas": 19485, + "ĠArchbishop": 19486, + "glass": 19487, + "Ġallowance": 19488, + "Ġbundle": 19489, + "andon": 19490, + "eight": 19491, + "ĠKare": 19492, + "haus": 19493, + "ĠAndreas": 19494, + "Ġdoll": 19495, + "RAM": 19496, + "Ġvolunteering": 19497, + "ĠRaleigh": 19498, + "Ġbees": 19499, + "Ġnickel": 19500, + "Ġgenerosity": 19501, + "Ġhomeowner": 19502, + "ĠLieutenant": 19503, + "Ġlandfall": 19504, + "ĠRenew": 19505, + "ĠGiving": 19506, + "ĠContribut": 19507, + "aret": 19508, + "ulf": 19509, + "Ġreinforce": 19510, + "ĠSalv": 19511, + "ĠVenice": 19512, + "Ġfreedoms": 19513, + "ĠTools": 19514, + "Ġ1962": 19515, + "ĠWarm": 19516, + "majority": 19517, + "Ġpleas": 19518, + "oding": 19519, + "plant": 19520, + "Ġtow": 19521, + "ĠBlanc": 19522, + "ĠPipeline": 19523, + "ĠMoor": 19524, + "Ġrefrain": 19525, + "ĠExplore": 19526, + "language": 19527, + "cers": 19528, + "ĠWT": 19529, + "sent": 19530, + "ĠNun": 19531, + "Ġplastics": 19532, + "acas": 19533, + "Ġdisruptions": 19534, + "Ġdiscomfort": 19535, + "enko": 19536, + "Ġimprisoned": 19537, + "Copyright": 19538, + "Ġmyriad": 19539, + "Ġparenting": 19540, + "Ġspree": 19541, + "NBC": 19542, + "Ġonion": 19543, + "ĠIsraelis": 19544, + "ĠRA": 19545, + "Ġrelocate": 19546, + "113": 19547, + "ĠHir": 19548, + "ĠDre": 19549, + "ĠDry": 19550, + "ĠONE": 19551, + "ĠAdministrator": 19552, + "Ġprints": 19553, + "ĠGret": 19554, + "Ġundergraduate": 19555, + "ĠLif": 19556, + "avers": 19557, + "ĠCarney": 19558, + "Ġapex": 19559, + "Ġlenses": 19560, + "Ġliberals": 19561, + "gb": 19562, + "ĠWhereas": 19563, + "Ġcountryside": 19564, + "amine": 19565, + "ĠTerminal": 19566, + "Ġintr": 19567, + "ĠTrey": 19568, + "ALS": 19569, + "Ġcontinental": 19570, + "Ġselfies": 19571, + "FILE": 19572, + "ĠUnity": 19573, + "Ġauthoritarian": 19574, + "Ġoriginated": 19575, + "ĠExcept": 19576, + "yna": 19577, + "Ġmonet": 19578, + "Ġundermining": 19579, + "ĠGS": 19580, + "pi": 19581, + "iq": 19582, + "Ġslides": 19583, + "ĠSummary": 19584, + "Ġpains": 19585, + "cluding": 19586, + "Ġequation": 19587, + "locked": 19588, + "Ġfraternity": 19589, + "Ġwithstand": 19590, + "Ġdevastation": 19591, + "Ġdemo": 19592, + "late": 19593, + "Ġpunches": 19594, + "Ġgeared": 19595, + "nen": 19596, + "ĠBowie": 19597, + "attle": 19598, + "Ġpolitic": 19599, + "ĠGle": 19600, + "mented": 19601, + "ĠCoordinator": 19602, + "Ġupwards": 19603, + "ĠMega": 19604, + "angled": 19605, + "Ġengineered": 19606, + "Ġluggage": 19607, + "ĠWen": 19608, + "ĠSergeant": 19609, + "Ġkindergarten": 19610, + "ĠPortsmouth": 19611, + "uddin": 19612, + "ket": 19613, + "oba": 19614, + "Ġoscill": 19615, + "esse": 19616, + "ĠOlson": 19617, + "ĠBorough": 19618, + "Ġsupplements": 19619, + "ĠEvening": 19620, + "ANE": 19621, + "Ġlava": 19622, + "Ġgearing": 19623, + "setting": 19624, + "urgical": 19625, + "asty": 19626, + "ĠDaytona": 19627, + "Ġbrewery": 19628, + "Ġpledges": 19629, + "rounder": 19630, + "ulous": 19631, + "ĠHancock": 19632, + "rex": 19633, + "Ġram": 19634, + "Ġproceeding": 19635, + "ĠMurdoch": 19636, + "Ġdowngrade": 19637, + "Ġstatues": 19638, + "Ġdebated": 19639, + "ĠSleep": 19640, + "Ġ144": 19641, + "ĠRuby": 19642, + "ĠFi": 19643, + "123": 19644, + "ĠArabic": 19645, + "Ġlasts": 19646, + "ĠIvy": 19647, + "ĠWid": 19648, + "rown": 19649, + "stick": 19650, + "?'\"": 19651, + "ĠSTEM": 19652, + "Ġsensible": 19653, + "htar": 19654, + "Ġharbor": 19655, + "Ġcra": 19656, + "ĠAlbum": 19657, + "ĠCarnival": 19658, + "Ġimplies": 19659, + "agement": 19660, + "ĠInitially": 19661, + "Ġchooses": 19662, + "Jeff": 19663, + "ĠHig": 19664, + "Ġtam": 19665, + "Ġlump": 19666, + "ucks": 19667, + "Ġrepatri": 19668, + "ĠMercy": 19669, + "zza": 19670, + "Ġ365": 19671, + "ĠRicardo": 19672, + "ogram": 19673, + "Ġundergone": 19674, + "system": 19675, + "Ġtel": 19676, + "ĠKee": 19677, + "ully": 19678, + "istas": 19679, + "Ġgrains": 19680, + "ĠTomorrow": 19681, + "ĠRC": 19682, + "ĠTurk": 19683, + "Ġfreshmen": 19684, + "ĠAway": 19685, + "ĠSach": 19686, + "ĠUltimate": 19687, + "Ġoffensively": 19688, + "ismo": 19689, + "Ġteaser": 19690, + "ĠJud": 19691, + "Ġlegitimacy": 19692, + "opt": 19693, + "ĠCobb": 19694, + "Ġrejecting": 19695, + "ĠSolo": 19696, + "ĠArcher": 19697, + "Ġsoutheastern": 19698, + "ĠPlain": 19699, + "ĠLoss": 19700, + "Ġminerals": 19701, + "ĠMari": 19702, + "Ġscrambling": 19703, + "ĠPeak": 19704, + "Ġhavoc": 19705, + "rings": 19706, + "Ġunofficial": 19707, + "ĠHaj": 19708, + "director": 19709, + "ĠCanal": 19710, + "ĠNSA": 19711, + "ĠEaton": 19712, + "ĠPART": 19713, + "ĠCommissioners": 19714, + "Ġwellbeing": 19715, + "resa": 19716, + "Ġunderstandable": 19717, + "dates": 19718, + "ĠSorry": 19719, + "Ġastonishing": 19720, + "Ġrevise": 19721, + "ĠEc": 19722, + "ĠLack": 19723, + "endi": 19724, + "endale": 19725, + "also": 19726, + "Ġcolder": 19727, + "Ġheel": 19728, + "Ġcellular": 19729, + "Conn": 19730, + "ĠThur": 19731, + "Ġmassage": 19732, + "olla": 19733, + "clus": 19734, + "Ġtoilets": 19735, + "ĠCelebr": 19736, + "Ġtackled": 19737, + "Ġchorus": 19738, + "ETA": 19739, + "anca": 19740, + "ĠOLED": 19741, + "Ġpunk": 19742, + "ĠBrain": 19743, + "ĠNuggets": 19744, + "Ġseamless": 19745, + "make": 19746, + "atted": 19747, + "ĠRog": 19748, + "ĠPatch": 19749, + "Ġruined": 19750, + "Ins": 19751, + "Ġconsolidate": 19752, + "Ġgospel": 19753, + "ĠCaption": 19754, + "Ġoverweight": 19755, + "Ġscreened": 19756, + "ĠKraft": 19757, + "ĠBain": 19758, + "breaker": 19759, + "ĠFeinstein": 19760, + "ĠDoc": 19761, + "Ġdeepest": 19762, + "ĠOL": 19763, + "Ġtunes": 19764, + "Ġrightly": 19765, + "ĠLanc": 19766, + "ĠBrotherhood": 19767, + "Ġpoultry": 19768, + "ĠPure": 19769, + "Ġstimulate": 19770, + "Ġdiscourse": 19771, + "ĠStark": 19772, + "Ġmuseums": 19773, + "ention": 19774, + "Ġtaxation": 19775, + "ĠAkron": 19776, + "ayer": 19777, + "ĠKirby": 19778, + "farm": 19779, + "oser": 19780, + "Ġcommend": 19781, + "Ġunarmed": 19782, + "ensions": 19783, + "Ġsuperst": 19784, + "Ġoceans": 19785, + "Ġmisuse": 19786, + "LO": 19787, + "ĠByrne": 19788, + "ĠMaritime": 19789, + "Ġdense": 19790, + "Ġexcuses": 19791, + "Ġsuppose": 19792, + "ĠMarks": 19793, + "Ġrainy": 19794, + "Ġreplicate": 19795, + "Ġboutique": 19796, + "ĠRenaissance": 19797, + "jas": 19798, + "icted": 19799, + "Ġreferenced": 19800, + "ĠTir": 19801, + "ĠHatch": 19802, + "ĠCry": 19803, + "ĠPayPal": 19804, + "Ġfulfil": 19805, + "ĠHawaiian": 19806, + "come": 19807, + "ĠThirty": 19808, + "Ġ260": 19809, + "ĠYak": 19810, + "Ġangles": 19811, + "Ġlandlord": 19812, + "Ġlavish": 19813, + "Women": 19814, + "ĠNT": 19815, + "Ġreinforced": 19816, + "Ġprevail": 19817, + "ĠCommunities": 19818, + "Ġfootwear": 19819, + "Ġassurances": 19820, + "Ġlb": 19821, + "Ġairing": 19822, + "Ġresorts": 19823, + "ĠFiji": 19824, + "ĠShay": 19825, + "Ġprevailing": 19826, + "many": 19827, + "Ġimpe": 19828, + "ĠDul": 19829, + "Ġsymbols": 19830, + "zb": 19831, + "ĠCere": 19832, + "Ġapplauded": 19833, + "Ġsoundtrack": 19834, + "Ġdrunken": 19835, + "ĠEuropeans": 19836, + "Ġherds": 19837, + "moving": 19838, + "WR": 19839, + "ĠHindi": 19840, + "Ġwaking": 19841, + "Jo": 19842, + "Andrew": 19843, + "rosse": 19844, + "ĠLegislative": 19845, + "Ġdisgrace": 19846, + "Nothing": 19847, + "ĠBulgaria": 19848, + "Ġhumidity": 19849, + "Ġtranslation": 19850, + "Ġmeasurements": 19851, + "Ġvying": 19852, + "ĠBrid": 19853, + "Max": 19854, + "Ġdir": 19855, + "unci": 19856, + "Ġdefines": 19857, + "Ġperfection": 19858, + "ancers": 19859, + "Matt": 19860, + "ĠShinzo": 19861, + "ĠPresidents": 19862, + "Ġginger": 19863, + "onna": 19864, + "existing": 19865, + "rika": 19866, + "enced": 19867, + "ĠBray": 19868, + "Ġgall": 19869, + "Ġdisrespect": 19870, + "ĠCumber": 19871, + "Ġcontestant": 19872, + "ucky": 19873, + "anticipated": 19874, + "abled": 19875, + "LLOW": 19876, + "Bel": 19877, + "ĠKear": 19878, + "Ġstoryline": 19879, + "Ġrigs": 19880, + "ĠScots": 19881, + "ĠChap": 19882, + "ĠThankfully": 19883, + "Ġcommunist": 19884, + "ĠAdviser": 19885, + "Ġregist": 19886, + "Ġannoying": 19887, + "ĠDVD": 19888, + "Ġethic": 19889, + "ĠFilipino": 19890, + "ĠAdidas": 19891, + "Ġbilling": 19892, + "Ġalleviate": 19893, + "Ġsmoked": 19894, + "Ġhazard": 19895, + "EV": 19896, + "Ag": 19897, + "baum": 19898, + "Ġdoses": 19899, + "Ġoutcry": 19900, + "Ġinclined": 19901, + "Ġpsychologist": 19902, + "itzer": 19903, + "January": 19904, + "Ġmornings": 19905, + "aught": 19906, + "Ġsurreal": 19907, + "ĠCannon": 19908, + "avy": 19909, + "ĠCris": 19910, + "cf": 19911, + "Ġinterpreted": 19912, + "Ġpersecution": 19913, + "vation": 19914, + "Ġupfront": 19915, + "ĠWaste": 19916, + "Ġmills": 19917, + "Ġbombings": 19918, + "ĠHeaven": 19919, + "ĠFlat": 19920, + "Ġboxer": 19921, + "Ġavenues": 19922, + "Invest": 19923, + "ĠZika": 19924, + "Ġbackstage": 19925, + "idas": 19926, + "eston": 19927, + "ead": 19928, + "Ġbishops": 19929, + "Ġrender": 19930, + "Ġfootballer": 19931, + "Ġspilled": 19932, + "Only": 19933, + "Ġsaddened": 19934, + "ĠAbove": 19935, + "inator": 19936, + "tro": 19937, + "onen": 19938, + "ĠAMC": 19939, + "Ġstringent": 19940, + "Ġfooting": 19941, + "ĠGhost": 19942, + "Ġtexting": 19943, + "ĠCPI": 19944, + "ĠUW": 19945, + "Ġaccol": 19946, + "iries": 19947, + "ĠFlex": 19948, + "ĠCarolyn": 19949, + "Andre": 19950, + "Ġsiege": 19951, + "Muslim": 19952, + "Ġautomobile": 19953, + "reci": 19954, + "Ġdean": 19955, + "atre": 19956, + "Ġwax": 19957, + "Ġwo": 19958, + "ĠDuffy": 19959, + "Ġfiance": 19960, + "Ġfib": 19961, + "Ġeagle": 19962, + "ĠCatal": 19963, + "Ġinfants": 19964, + "Ġsubmitting": 19965, + "Ġdownhill": 19966, + "Ġstaffer": 19967, + "ĠLights": 19968, + "Ġeater": 19969, + "ĠCaliforn": 19970, + "Ġsupervisors": 19971, + "ĠPy": 19972, + "Ġcondemnation": 19973, + "Ġsci": 19974, + "Ġhated": 19975, + "Ġtil": 19976, + "ĠLavrov": 19977, + "Ġsab": 19978, + "Ġmotors": 19979, + "Ġlogging": 19980, + "ĠOwn": 19981, + "Ġpi": 19982, + "Ġrepeating": 19983, + "ĠDOJ": 19984, + "enary": 19985, + "ĠChow": 19986, + "fat": 19987, + "Ġbalcony": 19988, + "orie": 19989, + "NING": 19990, + "ĠUnified": 19991, + "Neil": 19992, + "Bill": 19993, + "ĠSims": 19994, + "uten": 19995, + "LV": 19996, + "ĠEMS": 19997, + "Ġsip": 19998, + "Ġreplaces": 19999, + "ichi": 20000, + "ĠFig": 20001, + "ĠCharity": 20002, + "Ġpeek": 20003, + "Ġrack": 20004, + "Ġcousins": 20005, + "Ġresolving": 20006, + "Ġthrone": 20007, + "ĠEngine": 20008, + "ĠChak": 20009, + "Ġlamented": 20010, + "Ġwipe": 20011, + "Ġnutrients": 20012, + "ĠChat": 20013, + "AMP": 20014, + "ĠOprah": 20015, + "uming": 20016, + "serving": 20017, + "Ġfir": 20018, + "Ġlandlords": 20019, + "neck": 20020, + "Ġupload": 20021, + "Ġunspecified": 20022, + "Ġicy": 20023, + "´": 20024, + "Ġze": 20025, + "Ġprohibits": 20026, + "ĠFI": 20027, + "Res": 20028, + "ĠEff": 20029, + "hell": 20030, + "umbo": 20031, + "Ġreceipts": 20032, + "Ġoperatives": 20033, + "stant": 20034, + "Ġwives": 20035, + "ĠCinema": 20036, + "Ġnegligence": 20037, + "Ġgases": 20038, + "ĠLau": 20039, + "Ġbrew": 20040, + "August": 20041, + "never": 20042, + "Ġpenned": 20043, + "Ġincomplete": 20044, + "ĠZh": 20045, + "esi": 20046, + "Ġranged": 20047, + "apolis": 20048, + "Ġwithdrawing": 20049, + "ĠLevi": 20050, + "ĠLevy": 20051, + "ĠDaly": 20052, + "Ġdelaying": 20053, + "ĠMSNBC": 20054, + "ĠCyrus": 20055, + "ĠNutrition": 20056, + "NN": 20057, + "Ġwinding": 20058, + "Ġglow": 20059, + "ĠMY": 20060, + "Ġgoodwill": 20061, + "ĠMON": 20062, + "Ġslots": 20063, + "ĠNina": 20064, + "ĠFIR": 20065, + "ĠLTE": 20066, + "ĠInnov": 20067, + "dev": 20068, + "ctic": 20069, + "Ġanalyses": 20070, + "ĠBangalore": 20071, + "Ġtales": 20072, + "Ġovercame": 20073, + "ĠThurs": 20074, + "Ġcherry": 20075, + "ĠNou": 20076, + "ĠFlowers": 20077, + "1000": 20078, + "updated": 20079, + "rieve": 20080, + "ĠBeautiful": 20081, + "iak": 20082, + "Ġplayback": 20083, + "Ġheadset": 20084, + "Ġashamed": 20085, + "Min": 20086, + "Ġadm": 20087, + "ĠLucky": 20088, + "ĠTucson": 20089, + "Ġentirety": 20090, + "ranging": 20091, + "ĠVance": 20092, + "kered": 20093, + "image": 20094, + "ĠGord": 20095, + "War": 20096, + "Ġsimilarities": 20097, + "dig": 20098, + "ĠJude": 20099, + "Ġlonely": 20100, + "hra": 20101, + "ĠStaples": 20102, + "ĠACA": 20103, + "Ġmeasurement": 20104, + "Ġcooper": 20105, + "ATER": 20106, + "ĠMeng": 20107, + "Ġbarring": 20108, + "190": 20109, + "ĠBatt": 20110, + "Ġreproductive": 20111, + "ĠRowe": 20112, + "Ġsubsid": 20113, + "Ġslogans": 20114, + "ugar": 20115, + "ĠKeller": 20116, + "ingham": 20117, + "fuel": 20118, + "Ġhid": 20119, + "afe": 20120, + "Ġindul": 20121, + "cash": 20122, + "Ġstressing": 20123, + "ĠMIT": 20124, + "Ġtrump": 20125, + "ancer": 20126, + "ĠPes": 20127, + "ĠMint": 20128, + "Ġcrossover": 20129, + "ĠWeiss": 20130, + "ĠElvis": 20131, + "ĠPermanent": 20132, + "ĠKhalid": 20133, + "Ġunjust": 20134, + "Ġexceptionally": 20135, + "Ġfut": 20136, + "Ġavid": 20137, + "ĠEthics": 20138, + "Ġutilized": 20139, + "Ġfeasibility": 20140, + "Ġcatering": 20141, + "Press": 20142, + "wayne": 20143, + "October": 20144, + "Ġfavors": 20145, + "Ġobsession": 20146, + "Ġmelt": 20147, + "Ġmug": 20148, + "ĠMK": 20149, + "Ġapples": 20150, + "Ġvine": 20151, + "cliffe": 20152, + "Ġgrat": 20153, + "Ġspells": 20154, + "ounced": 20155, + "Ġdecree": 20156, + "issy": 20157, + "Team": 20158, + "Ġdeploying": 20159, + "Feb": 20160, + "Ġmiserable": 20161, + "Ġwat": 20162, + "ĠBust": 20163, + "ĠNorris": 20164, + "ĠTimberwolves": 20165, + "Ġangered": 20166, + "ĠArn": 20167, + "oft": 20168, + "rome": 20169, + "Ġadvertisements": 20170, + "onal": 20171, + "Ġnun": 20172, + "Ġtorque": 20173, + "Ġslave": 20174, + "Ġnonsense": 20175, + "Ġcoy": 20176, + "Ġcites": 20177, + "Game": 20178, + "Ġarchitects": 20179, + "playing": 20180, + "Ġgener": 20181, + "Ġsocio": 20182, + "Ġmeditation": 20183, + "Ġforgive": 20184, + "Ġsmiled": 20185, + "%),": 20186, + "Ġpers": 20187, + "ĠSoph": 20188, + "Ġoccupy": 20189, + "atton": 20190, + "Ġwitnessing": 20191, + "Ġapologise": 20192, + "Ġpredecessors": 20193, + "ĠCassidy": 20194, + "Ġtallied": 20195, + "NER": 20196, + "Ġtract": 20197, + "ĠHolder": 20198, + "ĠPav": 20199, + "Ġjackets": 20200, + "Mel": 20201, + "raud": 20202, + "Ġexercising": 20203, + "ĠChung": 20204, + "ĠAmin": 20205, + "athi": 20206, + "ĠMem": 20207, + "Ġracked": 20208, + "Ġcarved": 20209, + "ĠMickey": 20210, + "ĠLafayette": 20211, + "Ġgrill": 20212, + "ĠINFORMATION": 20213, + "usc": 20214, + "ĠPromotion": 20215, + "yson": 20216, + "istry": 20217, + "Ġfulfilled": 20218, + "Ġrestraint": 20219, + "Ġpopping": 20220, + "ĠSlater": 20221, + "Ġmercy": 20222, + "aden": 20223, + "Ġsubmarine": 20224, + "ĠBowling": 20225, + "dogs": 20226, + "ĠSwe": 20227, + "Ġnoticeable": 20228, + "Ġbis": 20229, + "ĠPremiership": 20230, + "Ġspat": 20231, + "ĠTow": 20232, + "ĠWand": 20233, + "Ġmechanics": 20234, + "while": 20235, + "ĠBenson": 20236, + "Ġmolecules": 20237, + "Ġcrosses": 20238, + "Ġrecalling": 20239, + "ĠCertainly": 20240, + "HAM": 20241, + "Ġsever": 20242, + "ĠRudy": 20243, + "ĠDUI": 20244, + "OLD": 20245, + "ĠTobacco": 20246, + "Ġsubdued": 20247, + "Ġquota": 20248, + "TF": 20249, + "Ġflats": 20250, + "Ġemphasize": 20251, + "Ġbelts": 20252, + "ĠOpinion": 20253, + "Ġpiled": 20254, + "ĠSpark": 20255, + "ĠElias": 20256, + "Ġclassification": 20257, + "ĠHands": 20258, + "ĠCV": 20259, + "Ġtoast": 20260, + "Ġcandle": 20261, + "atching": 20262, + "short": 20263, + "ĠDup": 20264, + "Ġult": 20265, + "bats": 20266, + "Ġmarketers": 20267, + "ĠAvery": 20268, + "ĠColbert": 20269, + "ĠIk": 20270, + "ĠVac": 20271, + "ĠJackets": 20272, + "Ġmerits": 20273, + "eli": 20274, + "PORT": 20275, + "Ġelevator": 20276, + "irming": 20277, + "effective": 20278, + "Ġgroceries": 20279, + "Ġhi": 20280, + "ĠINTER": 20281, + "ĠSAP": 20282, + "ĠNYPD": 20283, + "ĠKY": 20284, + "Ġangel": 20285, + "Ġspectacle": 20286, + "ré": 20287, + "ĠRoche": 20288, + "Ġinsects": 20289, + "Ġcommenced": 20290, + "ĠFoley": 20291, + "Ġdarker": 20292, + "ĠUg": 20293, + "ĠMostly": 20294, + "Ġtermed": 20295, + "uci": 20296, + "ĠExec": 20297, + "ĠBrittany": 20298, + "Ġharmony": 20299, + "Ġadvocated": 20300, + "Ġparcel": 20301, + "ĠHots": 20302, + "Ġmonarch": 20303, + "ĠSiri": 20304, + "odge": 20305, + "ĠPag": 20306, + "Ġprogressing": 20307, + "grounds": 20308, + "Ġonstage": 20309, + "Ġwarmth": 20310, + "ĠWon": 20311, + "Ġviolates": 20312, + "ĠSaudis": 20313, + "Ġbumper": 20314, + "Ġpatrols": 20315, + "ĠBarron": 20316, + "Ġindoors": 20317, + "Ġtar": 20318, + "Each": 20319, + "Val": 20320, + "Ġapplicant": 20321, + "ĠCater": 20322, + "Ġclassics": 20323, + "ĠThreat": 20324, + "Ġwrapping": 20325, + "ĠIdlib": 20326, + "anking": 20327, + "Did": 20328, + "adia": 20329, + "ĠRig": 20330, + "ĠBram": 20331, + "ĠLaurie": 20332, + "ĠHair": 20333, + "ĠCannabis": 20334, + "Ġdaylight": 20335, + "ĠNorm": 20336, + "ĠRip": 20337, + "sin": 20338, + "unta": 20339, + "Pass": 20340, + "ĠAcad": 20341, + "ĠCummings": 20342, + "Ġtheirs": 20343, + "ĠDistribution": 20344, + "especially": 20345, + "Ġgrilled": 20346, + "Ġaffiliates": 20347, + "ĠVander": 20348, + "ĠCath": 20349, + "ĠProductions": 20350, + "ĠTrek": 20351, + "230": 20352, + "Ġcasinos": 20353, + "ĠCain": 20354, + "atu": 20355, + "idget": 20356, + "ĠWinds": 20357, + "Ġunanswered": 20358, + "Ġintercept": 20359, + "ĠMarty": 20360, + "Ġrefin": 20361, + "Ġlieutenant": 20362, + "cas": 20363, + "Chief": 20364, + "average": 20365, + "ilot": 20366, + "Ġscrimmage": 20367, + "ĠMud": 20368, + "speaking": 20369, + "ĠFranken": 20370, + "ĠTories": 20371, + "Ġabstract": 20372, + "awar": 20373, + "ĠTerms": 20374, + "dal": 20375, + "ĠFur": 20376, + "Ġhumour": 20377, + "rh": 20378, + "Ġsitu": 20379, + "aed": 20380, + "ĠFIN": 20381, + "Ġtranscripts": 20382, + "approved": 20383, + "ĠParsons": 20384, + "Ġpigs": 20385, + "Ġrepayment": 20386, + "ĠARM": 20387, + "ĠElliot": 20388, + "ĠLevine": 20389, + "Ġtagged": 20390, + "pun": 20391, + "ĠDwight": 20392, + "Ġconfiguration": 20393, + "sis": 20394, + "ĠAdult": 20395, + "Ġearthquakes": 20396, + "Ġcreature": 20397, + "ĠMRI": 20398, + "Ġmach": 20399, + "Ġprescriptions": 20400, + "cover": 20401, + "Ġministries": 20402, + "Ġinaccurate": 20403, + "ĠLabs": 20404, + "ĠMGM": 20405, + "Ġtomato": 20406, + "Ġeng": 20407, + "Ġopposes": 20408, + "owan": 20409, + "Ġmapping": 20410, + "Ġconsum": 20411, + "online": 20412, + "eters": 20413, + "code": 20414, + "Aug": 20415, + "Point": 20416, + "branded": 20417, + "pling": 20418, + "ĠCalder": 20419, + "Oper": 20420, + "ĠMiddles": 20421, + "Ġchampagne": 20422, + "ĠTues": 20423, + "Ġsampling": 20424, + "Ġenergetic": 20425, + "rano": 20426, + "ĠStyles": 20427, + "Ġneglected": 20428, + "ĠDamon": 20429, + "Ġendanger": 20430, + "Ġsouthwestern": 20431, + "ĠATM": 20432, + "ĠDuck": 20433, + "engers": 20434, + "Ġdan": 20435, + "yth": 20436, + "Ġbou": 20437, + "ĠDecl": 20438, + "Gold": 20439, + "Ġprojecting": 20440, + "Google": 20441, + "ĠHussein": 20442, + "Ġaccomplishment": 20443, + "itarian": 20444, + "Ġgossip": 20445, + "ĠRai": 20446, + "ril": 20447, + "ĠSke": 20448, + "Ġpsychiatric": 20449, + "ĠMacBook": 20450, + "ĠAdobe": 20451, + "ĠHodg": 20452, + "Ġaccompany": 20453, + "Ġadvertised": 20454, + "Ġreminiscent": 20455, + "Ġgeographical": 20456, + "Ġconvertible": 20457, + "IK": 20458, + "CTV": 20459, + "Ġcommunal": 20460, + "Ġchim": 20461, + "Ġselfish": 20462, + "Ġdrilled": 20463, + "Ġtortured": 20464, + "Ġblacks": 20465, + "noon": 20466, + "Ġmanifesto": 20467, + "ĠRichie": 20468, + "acco": 20469, + "Im": 20470, + "Ġdebit": 20471, + "ĠSNP": 20472, + "perfect": 20473, + "gard": 20474, + "ĠRatio": 20475, + "Ġstubborn": 20476, + "Ġaccumulation": 20477, + "Ġcongregation": 20478, + "Ġkissing": 20479, + "Ġkillers": 20480, + "ĠAbbey": 20481, + "von": 20482, + "ĠFuj": 20483, + "ĠIsabel": 20484, + "NB": 20485, + "ĠNish": 20486, + "ĠJulius": 20487, + "ĠZimmer": 20488, + "Ġuncover": 20489, + "dar": 20490, + "isle": 20491, + "ĠCompar": 20492, + "Ġcounselor": 20493, + "ĠSok": 20494, + "ĠCumm": 20495, + "ĠHip": 20496, + "Ġurgently": 20497, + "Ġrentals": 20498, + "Ġapproving": 20499, + "Ġirrigation": 20500, + "Ġprostate": 20501, + "ĠJudicial": 20502, + "ĠSubmit": 20503, + "ĠTanner": 20504, + "attack": 20505, + "emb": 20506, + "Ġreclaim": 20507, + "Ġec": 20508, + "Ġbrutality": 20509, + "Ġcommanding": 20510, + "Ġreasoning": 20511, + "Roy": 20512, + "ĠElect": 20513, + "ĠMobil": 20514, + "anding": 20515, + "Ġmirrors": 20516, + "Israel": 20517, + "Ġpavement": 20518, + "Ġoverdue": 20519, + "ĠMd": 20520, + "street": 20521, + "Ġthrill": 20522, + "pora": 20523, + "azon": 20524, + "Ġbrewing": 20525, + "enge": 20526, + "ĠDisaster": 20527, + "Ġbuilder": 20528, + "ods": 20529, + "utsch": 20530, + "Ġterminals": 20531, + "ĠBaird": 20532, + "enburg": 20533, + "Ġhast": 20534, + "Ġbrass": 20535, + "Ġparental": 20536, + "enture": 20537, + "ĠConduct": 20538, + "Ġexpands": 20539, + "luck": 20540, + "mur": 20541, + "ĠBj": 20542, + "Ġadministrations": 20543, + "ĠOlivier": 20544, + "oux": 20545, + "Ġnarrowed": 20546, + "winner": 20547, + "Ġmakeshift": 20548, + "ĠVAT": 20549, + "ĠJavier": 20550, + "-,": 20551, + "Ġsystematic": 20552, + "Ġenforcing": 20553, + "emin": 20554, + "ĠAudio": 20555, + "United": 20556, + "gener": 20557, + "ĠKara": 20558, + "ivas": 20559, + "ĠPretty": 20560, + "ĠLob": 20561, + "Ġpetitions": 20562, + "ĠMercer": 20563, + "ampa": 20564, + "product": 20565, + "Ġdistributing": 20566, + "Ġtunnels": 20567, + "Ġcondo": 20568, + "ĠRSS": 20569, + "ĠCarlo": 20570, + "Ġpumpkin": 20571, + "Ġsto": 20572, + "Ġassumes": 20573, + "oway": 20574, + "hiba": 20575, + "lection": 20576, + "Ġgam": 20577, + "ĠAires": 20578, + "Ġtransmitted": 20579, + "Ġtrousers": 20580, + "Ġcheers": 20581, + "ĠJensen": 20582, + "Ġemer": 20583, + "Ġsimpler": 20584, + "Ġcolored": 20585, + "ĠSustainable": 20586, + "Ġinstruct": 20587, + "Ġpoles": 20588, + "Ġsupervised": 20589, + "Ġinteg": 20590, + "ĠMoreno": 20591, + "boarding": 20592, + "igrant": 20593, + "ĠYoga": 20594, + "Ġenvironmentally": 20595, + "Ġsacrifices": 20596, + "Ġshores": 20597, + "Ġ127": 20598, + "Ġestranged": 20599, + "Ġintoxicated": 20600, + "Ġemergencies": 20601, + "ĠKosovo": 20602, + "yang": 20603, + "Ġfastball": 20604, + "Ġpackaged": 20605, + "LAN": 20606, + "Ġhurry": 20607, + "ĠManny": 20608, + "Ġporch": 20609, + "Ġcuriosity": 20610, + "ĠKend": 20611, + "thouse": 20612, + "ĠTou": 20613, + "mun": 20614, + "Ġwaving": 20615, + "Ġpasswords": 20616, + "ĠSwan": 20617, + "Ġprefers": 20618, + "ĠCorrections": 20619, + "aic": 20620, + "Ġejected": 20621, + "Ġdossier": 20622, + "ĠChal": 20623, + "Ġfacto": 20624, + "Ġspine": 20625, + "leck": 20626, + "Ġrestriction": 20627, + "Ġdisagreement": 20628, + "grown": 20629, + "ĠEdgar": 20630, + "Ġquantities": 20631, + "ĠRapid": 20632, + "Ġpals": 20633, + "Ġspared": 20634, + "Ġremarkably": 20635, + "ructure": 20636, + "Ġbackers": 20637, + "ĠGoals": 20638, + "cles": 20639, + "rolling": 20640, + "ĠBlasio": 20641, + "Ġorchestra": 20642, + "ologies": 20643, + "ĠRise": 20644, + "Power": 20645, + "Ġuptick": 20646, + "atha": 20647, + "ĠMob": 20648, + "Ġshotgun": 20649, + "downs": 20650, + "ĠBorg": 20651, + "Ġmorale": 20652, + "Call": 20653, + "wave": 20654, + "ĠDuc": 20655, + "Ġunwilling": 20656, + "oad": 20657, + "Ġbusinessmen": 20658, + "Ġrefriger": 20659, + "Ġgamers": 20660, + "Ġcele": 20661, + "Ġprecip": 20662, + "Ġrenegoti": 20663, + "OY": 20664, + "ĠPharm": 20665, + "Ġresponsive": 20666, + "Ġservant": 20667, + "eye": 20668, + "Ġraping": 20669, + "vas": 20670, + "Ġgroin": 20671, + "ĠMelvin": 20672, + "ĠKurds": 20673, + "Ġstricter": 20674, + "ĠMum": 20675, + "ients": 20676, + "Ġstandalone": 20677, + "Ġforums": 20678, + "Ġcommemorate": 20679, + "Far": 20680, + "ĠTelegram": 20681, + "Ġscreenings": 20682, + "ĠLeonardo": 20683, + "ighton": 20684, + "ĠDOWN": 20685, + "Ġmodule": 20686, + "Ġremedy": 20687, + "Ġ280": 20688, + "Su": 20689, + "ĠBecker": 20690, + "ĠGast": 20691, + "prem": 20692, + "ĠInto": 20693, + "oyle": 20694, + "114": 20695, + "Ġadhere": 20696, + "Report": 20697, + "ĠJaneiro": 20698, + "ĠKry": 20699, + "Pakistan": 20700, + "Ġrobotic": 20701, + "ande": 20702, + "Ġoverlooking": 20703, + "ĠTreaty": 20704, + "Ġrect": 20705, + "yne": 20706, + "Ġbattlefield": 20707, + "ĠGeoff": 20708, + "Ġearns": 20709, + "ĠMiner": 20710, + "Ġteased": 20711, + "Ġexemptions": 20712, + "Ġvacancy": 20713, + "oku": 20714, + "Ġvulnerabilities": 20715, + "ĠRou": 20716, + "Ġobserv": 20717, + "Ġoverlook": 20718, + "Ġcorrespond": 20719, + "Ġtheatrical": 20720, + "Ġrobotics": 20721, + "ĠCompl": 20722, + "ĠPasadena": 20723, + "laden": 20724, + "Ġvastly": 20725, + "olit": 20726, + "Ġjustification": 20727, + "Ġtampering": 20728, + "ĠSutherland": 20729, + "ĠMens": 20730, + "Ġinvisible": 20731, + "uren": 20732, + "ĠAshton": 20733, + "owl": 20734, + "Ġdisqual": 20735, + "ĠEva": 20736, + "Ġfriction": 20737, + "ĠIrvine": 20738, + "Ġaliens": 20739, + "ĠPension": 20740, + "ĠAssets": 20741, + "ĠBenedict": 20742, + "ittal": 20743, + "Ġsword": 20744, + "Ġunderwear": 20745, + "ĠFarmer": 20746, + "Ġtimber": 20747, + "Ġdependence": 20748, + "ĠTang": 20749, + "Ġ165": 20750, + "ĠNazis": 20751, + "Ġpunching": 20752, + "ĠGloria": 20753, + "usat": 20754, + "Ġluxurious": 20755, + "chuk": 20756, + "ĠCot": 20757, + "Ġregained": 20758, + "Ġreassure": 20759, + "Ġhello": 20760, + "Ġante": 20761, + "Ġnegotiators": 20762, + "Add": 20763, + "paced": 20764, + "ér": 20765, + "Ġdemolished": 20766, + "Ann": 20767, + "joy": 20768, + "ĠJenna": 20769, + "Apple": 20770, + "Ġdisturbance": 20771, + "Ġcommissions": 20772, + "ĠPolitico": 20773, + "along": 20774, + "Ġnem": 20775, + "Ġauctions": 20776, + "ruck": 20777, + "ĠOD": 20778, + "ofer": 20779, + "Play": 20780, + "Ġcarn": 20781, + "vez": 20782, + "Ġtents": 20783, + "Ġcongratulate": 20784, + "ĠLiquid": 20785, + "ĠCoyotes": 20786, + "uku": 20787, + "ĠAllah": 20788, + "Ġbend": 20789, + "Ġcanvas": 20790, + "ĠClifford": 20791, + "Ġvolunteered": 20792, + "Luc": 20793, + "bp": 20794, + "ĠCensus": 20795, + "ĠShot": 20796, + "Ġanonymously": 20797, + "ĠAnglo": 20798, + "ĠBayer": 20799, + "ĠAber": 20800, + "ĠCorrectional": 20801, + "Ġhardship": 20802, + "ĠBuenos": 20803, + "ĠDaw": 20804, + "Ġbaskets": 20805, + "Ġupstairs": 20806, + "Ġmindful": 20807, + "ĠLCD": 20808, + "ĠBlackburn": 20809, + "ĠHale": 20810, + "477": 20811, + "Ġcircus": 20812, + "ĠDragons": 20813, + "Ġrubble": 20814, + "rb": 20815, + "Ġheadaches": 20816, + "aunt": 20817, + "itus": 20818, + "Ġscaled": 20819, + "ĠComic": 20820, + "asio": 20821, + "ĠNordic": 20822, + "Per": 20823, + "Ġbombers": 20824, + "ilitation": 20825, + "Ġindirectly": 20826, + "ĠHod": 20827, + "andan": 20828, + "operation": 20829, + "Ġpuppy": 20830, + "ĠMats": 20831, + "Ġstewards": 20832, + "roup": 20833, + "Ġmemorandum": 20834, + "Ġpatio": 20835, + "const": 20836, + "ĠBold": 20837, + "ĠKaiser": 20838, + "Following": 20839, + "Ġcompat": 20840, + "Ġsidewalks": 20841, + "ĠFitzpatrick": 20842, + "Ġsunlight": 20843, + "ĠLever": 20844, + "ĠBecky": 20845, + "icles": 20846, + "ĠProbably": 20847, + "Ġgarner": 20848, + "ĠTomas": 20849, + "Ġblankets": 20850, + "uga": 20851, + "jiang": 20852, + "Ġrevel": 20853, + "ĠHutch": 20854, + "llers": 20855, + "Ġtrimmed": 20856, + "ĠSTR": 20857, + "ĠKR": 20858, + "ĠPike": 20859, + "ĠASS": 20860, + "Bay": 20861, + "Ġdiagnostic": 20862, + "ĠSteph": 20863, + "Ġtoured": 20864, + "ĠAvoid": 20865, + "vic": 20866, + "Without": 20867, + "ĠClinical": 20868, + "Ġblo": 20869, + "undo": 20870, + "ĠBoise": 20871, + "Ġspeculated": 20872, + "ĠProt": 20873, + "vention": 20874, + "Ġscholar": 20875, + "ĠSta": 20876, + "Featured": 20877, + "ĠPrev": 20878, + "Ġpenny": 20879, + "ĠHath": 20880, + "rawn": 20881, + "Ġrenovated": 20882, + "ĠFried": 20883, + "itol": 20884, + "uddle": 20885, + "Ġinquest": 20886, + "Ġmetropolitan": 20887, + "lights": 20888, + "Ġtempo": 20889, + "onom": 20890, + "ĠImport": 20891, + "Asia": 20892, + "Ġowes": 20893, + "Ġmagistrate": 20894, + "ĠFriedman": 20895, + "Ġcontacting": 20896, + "Ġstrains": 20897, + "Ġhomage": 20898, + "Ġlent": 20899, + "ception": 20900, + "git": 20901, + "Ġlively": 20902, + "Ġscra": 20903, + "WW": 20904, + "ön": 20905, + "rill": 20906, + "Jack": 20907, + "ĠShank": 20908, + "iani": 20909, + "Ġdecreasing": 20910, + "MON": 20911, + "ĠSupervisor": 20912, + "ĠCats": 20913, + "ĠFusion": 20914, + "Ġracially": 20915, + "ĠTara": 20916, + "ĠPurchase": 20917, + "ĠRally": 20918, + "ĠGraph": 20919, + "ĠHello": 20920, + "hest": 20921, + "ĠVarg": 20922, + "Ġdrowned": 20923, + "ĠThu": 20924, + "ĠWet": 20925, + "ĠEug": 20926, + "Ġrainbow": 20927, + "Ġtelev": 20928, + "ĠAmir": 20929, + "Based": 20930, + "Ġcookie": 20931, + "uding": 20932, + "Ġcontracting": 20933, + "Ġobjected": 20934, + "Ġfork": 20935, + "acent": 20936, + "ĠTil": 20937, + "ĠLilly": 20938, + "ĠEur": 20939, + "Ġhormone": 20940, + "Ġnails": 20941, + "ĠFischer": 20942, + "Ġpier": 20943, + "EMENT": 20944, + "Ġeruption": 20945, + "visory": 20946, + "Ġspeculate": 20947, + "apan": 20948, + "ĠJub": 20949, + "ĠHuckabee": 20950, + "string": 20951, + "stay": 20952, + "Ġsustaining": 20953, + "VM": 20954, + "Ġpriv": 20955, + "Ġclos": 20956, + "Ġdownloaded": 20957, + "ĠIv": 20958, + "Ġfinanced": 20959, + "ĠSao": 20960, + "ĠEverett": 20961, + "rene": 20962, + "ĠWo": 20963, + "ĠPiet": 20964, + "Ġengulfed": 20965, + "Ġexiting": 20966, + "uni": 20967, + "horn": 20968, + "Ġgrav": 20969, + "ection": 20970, + "Ġdrainage": 20971, + "Ġfuelled": 20972, + "Ġorganizational": 20973, + "bike": 20974, + "ĠAreas": 20975, + "Ġpoliceman": 20976, + "ĠFirm": 20977, + "ĠSlide": 20978, + "Ġrand": 20979, + "ĠJedi": 20980, + "Ge": 20981, + "really": 20982, + "Manchester": 20983, + "ĠWise": 20984, + "parent": 20985, + "Ġlad": 20986, + "Ġurine": 20987, + "ĠColombian": 20988, + "geon": 20989, + "Ġ1961": 20990, + "Mania": 20991, + "Ġgraph": 20992, + "Ġcod": 20993, + "fred": 20994, + "Ġeffic": 20995, + "ĠGateway": 20996, + "asket": 20997, + "Ġdiminished": 20998, + "Mass": 20999, + "Ġ205": 21000, + "Long": 21001, + "Ġgranddaughter": 21002, + "Ġshining": 21003, + "Semitic": 21004, + "Ġarising": 21005, + "Ġ330": 21006, + "ĠDU": 21007, + "ĠZah": 21008, + "Ġexclusion": 21009, + "ĠClaus": 21010, + "Ġven": 21011, + "oine": 21012, + "ĠAPI": 21013, + "reve": 21014, + "Ġmilitias": 21015, + "Ġfro": 21016, + "Ġwaved": 21017, + "ĠLuxembourg": 21018, + "Ġdiamonds": 21019, + "Ġstabilize": 21020, + "Ġqueue": 21021, + "ĠSponsor": 21022, + "Ġeldest": 21023, + "ĠLud": 21024, + "Ġwasting": 21025, + "Ġdimension": 21026, + "Ġmotorcycles": 21027, + "ucker": 21028, + "ĠTav": 21029, + "Ġsupremacy": 21030, + "Take": 21031, + "ĠCPU": 21032, + "cup": 21033, + "Ġdisregard": 21034, + "Ġenvelope": 21035, + "ĠCah": 21036, + "Ġproposes": 21037, + "ĠMaurice": 21038, + "Ġhobby": 21039, + "Ġharmon": 21040, + "Ġribbon": 21041, + "ĠOrigin": 21042, + "Ġbuilders": 21043, + "Ġconj": 21044, + "Ġcert": 21045, + "eat": 21046, + "ĠStern": 21047, + "ulia": 21048, + "vals": 21049, + "cling": 21050, + "Ġprovocative": 21051, + "Ġsofter": 21052, + "Ġ1948": 21053, + "Ġremod": 21054, + "ĠSob": 21055, + "Ġmaxim": 21056, + "Ġblueprint": 21057, + "oit": 21058, + "ĠGarner": 21059, + "Ġfibre": 21060, + "search": 21061, + "ĠWrite": 21062, + "270": 21063, + "Ġclergy": 21064, + "ĠPalo": 21065, + "obile": 21066, + "Mad": 21067, + "Ġclown": 21068, + "Ġtraced": 21069, + "280": 21070, + "ĠAlberto": 21071, + "Ġdrums": 21072, + "ĠFridays": 21073, + "ĠStrat": 21074, + "stated": 21075, + "ĠStevenson": 21076, + "Pr": 21077, + "Ġboasted": 21078, + "ĠBrees": 21079, + "ĠDonn": 21080, + "ĠMaya": 21081, + "Ġrelieve": 21082, + "Ġ1080": 21083, + "Ġcheapest": 21084, + "Ġuniquely": 21085, + "Ġjungle": 21086, + "Ġprevalence": 21087, + "Ġoutfield": 21088, + "ĠMaps": 21089, + "Ġaccustomed": 21090, + "pac": 21091, + "Ġcombinations": 21092, + "ĠSoros": 21093, + "stad": 21094, + "Ġket": 21095, + "Ġdisgusting": 21096, + "ĠOFF": 21097, + "irs": 21098, + "Ġbiased": 21099, + "Ġpaved": 21100, + "iked": 21101, + "utterstock": 21102, + "ocal": 21103, + "Ġsurround": 21104, + "ĠGuang": 21105, + "Ġspear": 21106, + "ĠBellev": 21107, + "ortun": 21108, + "Rec": 21109, + "acho": 21110, + "Ġfrightening": 21111, + "Ġtyres": 21112, + "normal": 21113, + "ĠYan": 21114, + "ĠWarsaw": 21115, + "ĠBod": 21116, + "ourse": 21117, + "199": 21118, + "Ver": 21119, + "erent": 21120, + "Ġsparkling": 21121, + "Ġchanting": 21122, + "Ġ1945": 21123, + "Ġturbo": 21124, + "Ġhazards": 21125, + "IRE": 21126, + "ĠRonnie": 21127, + "Ġsplitting": 21128, + "ĠMatte": 21129, + "roph": 21130, + "Ġtended": 21131, + "Ġvandalism": 21132, + "alis": 21133, + "SY": 21134, + "Ġoversaw": 21135, + "Happy": 21136, + "ĠTC": 21137, + "275": 21138, + "Ġeco": 21139, + "ĠKers": 21140, + "Ġextensions": 21141, + "ĠFlan": 21142, + "ĠCena": 21143, + "ĠDowns": 21144, + "Ġdrummer": 21145, + "Ġawaited": 21146, + "ĠACL": 21147, + "Ġlegends": 21148, + "ĠRollins": 21149, + "hend": 21150, + "Ġdeparting": 21151, + "Ġtha": 21152, + "Ġunre": 21153, + ".(": 21154, + "Ġfaded": 21155, + "Ġretirees": 21156, + "vid": 21157, + "Ġentrants": 21158, + "ĠStella": 21159, + "arer": 21160, + "Ġteaspoon": 21161, + "ĠSheridan": 21162, + "irc": 21163, + "ĠRelief": 21164, + "ĠButt": 21165, + "Ġris": 21166, + "Ġundermined": 21167, + "Ġsunk": 21168, + "Sam": 21169, + "kamp": 21170, + "riot": 21171, + "rating": 21172, + "Ġclubhouse": 21173, + "Ġpeaked": 21174, + "ĠSki": 21175, + "Ġairstrikes": 21176, + "Ġconce": 21177, + "ĠCPR": 21178, + "Ġesp": 21179, + "ĠWave": 21180, + "ĠColiseum": 21181, + "outheastern": 21182, + "Ġtrou": 21183, + "Ġfeather": 21184, + "ĠSoy": 21185, + "ĠBihar": 21186, + "Ġintervened": 21187, + "mits": 21188, + "colored": 21189, + "330": 21190, + "Ġprocession": 21191, + "apeake": 21192, + "ité": 21193, + "riel": 21194, + "Ġmart": 21195, + "afer": 21196, + "ĠGuests": 21197, + "ĠPie": 21198, + "Ġshiny": 21199, + "ĠSixers": 21200, + "ĠRoads": 21201, + "Ġkicker": 21202, + "ĠCrimes": 21203, + "Ġfrontier": 21204, + "ansen": 21205, + "November": 21206, + "smith": 21207, + "ĠLaun": 21208, + "fried": 21209, + "weet": 21210, + "ĠGrass": 21211, + "Ġsanitation": 21212, + "ĠEat": 21213, + "ĠParts": 21214, + "ĠTun": 21215, + "amar": 21216, + "ĠJupiter": 21217, + "ĠFS": 21218, + "Ġunsc": 21219, + "ĠDone": 21220, + "Ġleveraging": 21221, + "Ġtucked": 21222, + "Ġineffective": 21223, + "Ġriots": 21224, + "wei": 21225, + "ĠAttend": 21226, + "Ġpertaining": 21227, + "amen": 21228, + "monds": 21229, + "Ġmism": 21230, + "serious": 21231, + "ĠViol": 21232, + "rous": 21233, + "Ġ129": 21234, + "uebl": 21235, + "umption": 21236, + "tri": 21237, + "ĠWedding": 21238, + "Ġtroopers": 21239, + "ĠTHR": 21240, + "olving": 21241, + "leys": 21242, + "Med": 21243, + "Ġseparatists": 21244, + "Ġimper": 21245, + "ĠFrontier": 21246, + "Ġwhit": 21247, + "ĠMutual": 21248, + "Ġrested": 21249, + "Ġunhealthy": 21250, + "gang": 21251, + "Ġresearching": 21252, + "ĠColonel": 21253, + "Ġaffordability": 21254, + "ĠRegarding": 21255, + "ĠWend": 21256, + "ĠMellon": 21257, + "Ġplots": 21258, + "Ġcanal": 21259, + "PER": 21260, + "ĠShopping": 21261, + "etry": 21262, + "Ġoccurrence": 21263, + "Ġgraves": 21264, + "BF": 21265, + "ĠKau": 21266, + "indust": 21267, + "Ġbeard": 21268, + "uate": 21269, + "ĠProdu": 21270, + "ĠSomali": 21271, + "ishers": 21272, + "ĠFell": 21273, + "ĠHutchinson": 21274, + "Ġhust": 21275, + "Ġillustration": 21276, + "Ġ//": 21277, + "Ġsharks": 21278, + "Ġcoincidence": 21279, + "Ġremake": 21280, + "Ġmural": 21281, + "course": 21282, + "ĠSultan": 21283, + "arse": 21284, + "Ġwhip": 21285, + "ĠPodcast": 21286, + "Ġtightened": 21287, + "Ġdenim": 21288, + "Ġlandfill": 21289, + "future": 21290, + "Ġsuperv": 21291, + "Hand": 21292, + "Ġpraising": 21293, + "ĠEly": 21294, + "ĠGust": 21295, + "ĠMayer": 21296, + "Ġorphan": 21297, + "Ġrepaired": 21298, + "ĠPir": 21299, + "Ġspiral": 21300, + "husband": 21301, + "ienne": 21302, + "iatric": 21303, + "Ġmarriages": 21304, + "Ġhorn": 21305, + "plain": 21306, + "ĠLum": 21307, + "ession": 21308, + "ĠFeatures": 21309, + "Ġbreakup": 21310, + "Ġentrepreneurship": 21311, + "rina": 21312, + "Ġembargo": 21313, + "Ġcapitalism": 21314, + "ĠMinor": 21315, + "Ġpromo": 21316, + "Ġexcel": 21317, + "Japan": 21318, + "Ġworsening": 21319, + "Ġstumbled": 21320, + "Ġpins": 21321, + "Ġswipe": 21322, + "Ġexile": 21323, + "Ġseparatist": 21324, + "ĠBian": 21325, + "Ġrelocation": 21326, + "Ġcommanders": 21327, + "Ġdowned": 21328, + "Ġblogger": 21329, + "packed": 21330, + "ĠSchn": 21331, + "Ġwaterfront": 21332, + "ĠYus": 21333, + "Ġnegotiator": 21334, + "Ġfavourable": 21335, + "Iran": 21336, + "oulder": 21337, + "Ġcance": 21338, + "Ġvind": 21339, + "angel": 21340, + "Ġauthenticity": 21341, + "Ġtowel": 21342, + "bul": 21343, + "ĠNeville": 21344, + "ĠBuddhist": 21345, + "fields": 21346, + "uly": 21347, + "Ġniece": 21348, + "Ġcorrections": 21349, + "Ġassignments": 21350, + "ĠSchl": 21351, + "Ġharmed": 21352, + "375": 21353, + "Ġwounding": 21354, + "ĠPosition": 21355, + "Ġsupermarkets": 21356, + "Ġdisclosures": 21357, + "Ġ185": 21358, + "esp": 21359, + "ĠMcCull": 21360, + "ĠMale": 21361, + "Ġsailors": 21362, + "mis": 21363, + "ĠSophia": 21364, + "Ġunfolded": 21365, + "owell": 21366, + "ĠScarborough": 21367, + "Ġentrepreneurial": 21368, + "118": 21369, + "ogy": 21370, + "ĠLikewise": 21371, + "Ġswung": 21372, + "Ġdrawings": 21373, + "Ġdrafting": 21374, + "ĠSimple": 21375, + "ĠFilip": 21376, + "arf": 21377, + "Ġfade": 21378, + "Ġmerged": 21379, + "ĠLeaf": 21380, + "sun": 21381, + "Ġflame": 21382, + "Ġindices": 21383, + "ĠCreate": 21384, + "ittle": 21385, + "ĠWer": 21386, + "ĠMond": 21387, + "Ġoz": 21388, + "ĠSmoke": 21389, + "Ġreplies": 21390, + "ĠDH": 21391, + "Ġjud": 21392, + "ĠFalk": 21393, + "Ġ---": 21394, + "Ġconstitutes": 21395, + "Ġtheat": 21396, + "119": 21397, + "Ġintermediate": 21398, + "vill": 21399, + "ĠGow": 21400, + "ĠHut": 21401, + "ł": 21402, + "155": 21403, + "ĠLocated": 21404, + "ĠDoor": 21405, + "Ġsliced": 21406, + "aru": 21407, + "Ġtearing": 21408, + "defense": 21409, + "oyer": 21410, + "Ġprodu": 21411, + "Ġseminar": 21412, + "asso": 21413, + "Ġpeaks": 21414, + "Ġconceal": 21415, + "Ġcrypto": 21416, + "Ġsetbacks": 21417, + "ĠAlicia": 21418, + "ĠFAA": 21419, + "Ġcontinuity": 21420, + "Ġcatastrophe": 21421, + "Ġbeg": 21422, + "Ġscales": 21423, + "apixel": 21424, + "Ġsalon": 21425, + "Ste": 21426, + "Ġlesbian": 21427, + "Ġanticip": 21428, + "Ġutilization": 21429, + "Ġchickens": 21430, + "Ġspinal": 21431, + "ĠJuliet": 21432, + "ĠFas": 21433, + "prising": 21434, + "ĠSalvation": 21435, + "Ġ138": 21436, + "Ġutilizing": 21437, + "âĢ¢": 21438, + "ĠMessenger": 21439, + "Ġrebellion": 21440, + "ĠAlexand": 21441, + "Ġinsect": 21442, + "Ġribs": 21443, + "ĠBild": 21444, + "Ġmonopoly": 21445, + "Queen": 21446, + "ĠNaples": 21447, + "Ġ133": 21448, + "Ġhourly": 21449, + "Ġego": 21450, + "Ġpencil": 21451, + "ĠPew": 21452, + "Ġdesirable": 21453, + "vant": 21454, + "ĠLAT": 21455, + "Ġperpet": 21456, + "lish": 21457, + "Ġ201": 21458, + "Ġdistances": 21459, + "Ġdistressed": 21460, + "Work": 21461, + "Ġtattoos": 21462, + "Ġstereotypes": 21463, + "istent": 21464, + "ĠCoral": 21465, + "fo": 21466, + "Ġpayable": 21467, + "Ġakin": 21468, + "ĠLis": 21469, + "ĠFinding": 21470, + "Ġsusceptible": 21471, + "ĠKiw": 21472, + "Ġforgiveness": 21473, + "ĠMoment": 21474, + "ĠDmitry": 21475, + "Ġrenov": 21476, + "Ġquint": 21477, + "ĠWaterloo": 21478, + "ĠReality": 21479, + "Ġstray": 21480, + "ĠBeaver": 21481, + "Ġbites": 21482, + "Ġelusive": 21483, + "Ġvirtue": 21484, + "Ġgadgets": 21485, + "Ġlandslide": 21486, + "ĠHealthy": 21487, + "Ġpits": 21488, + "Donnell": 21489, + "Ġirony": 21490, + "uct": 21491, + "Ġpractitioners": 21492, + "Ġreck": 21493, + "governmental": 21494, + "Ġatomic": 21495, + "Ġmotiv": 21496, + "Ġpolic": 21497, + "Ġcommunicated": 21498, + "ĠHS": 21499, + "Ġcriticize": 21500, + "Ġsynerg": 21501, + "Del": 21502, + "ĠRoe": 21503, + "Ġinspirational": 21504, + "ĠWarning": 21505, + "pel": 21506, + "Ġnevertheless": 21507, + "Ġdespair": 21508, + "Ġ(.": 21509, + "Ġfearing": 21510, + "Ġgrop": 21511, + "tree": 21512, + "Ġtrusts": 21513, + "Ġinterviewing": 21514, + "amic": 21515, + "Ġscor": 21516, + "ject": 21517, + "Another": 21518, + "pose": 21519, + "Ġdepicted": 21520, + "ĠPhotography": 21521, + "ĠLenovo": 21522, + "ĠEpic": 21523, + "ĠBoot": 21524, + "GI": 21525, + "enses": 21526, + "Class": 21527, + "arity": 21528, + "Ġservicing": 21529, + "ĠHann": 21530, + "Ġawe": 21531, + "Ġoverdoses": 21532, + "ĠFinnish": 21533, + "Ġpav": 21534, + "ĠPCs": 21535, + "SEC": 21536, + "ĠStro": 21537, + "Ġattracts": 21538, + "Ġapprehended": 21539, + "128": 21540, + "Ġunstable": 21541, + "ĠOutdoor": 21542, + "Ġcloth": 21543, + "ĠUlster": 21544, + "Ġvisually": 21545, + "Ġsculpt": 21546, + "Ġsufficiently": 21547, + "ĠKendrick": 21548, + "Ġengages": 21549, + "Ġknives": 21550, + "ĠGut": 21551, + "Ġarbit": 21552, + "osition": 21553, + "Ġemoji": 21554, + "Ġpinpoint": 21555, + "Ġremembering": 21556, + "rence": 21557, + "ĠVish": 21558, + "Ġimproperly": 21559, + "Ġranc": 21560, + "Ġupstream": 21561, + "Ġcheckpoint": 21562, + "Ġrash": 21563, + "eson": 21564, + "Ġtoes": 21565, + "260": 21566, + "Ġinvalid": 21567, + "Ġonions": 21568, + "Ġlashed": 21569, + "ĠDong": 21570, + "Ġprovisional": 21571, + "ĠFern": 21572, + "Ġirresponsible": 21573, + "actively": 21574, + "ĠKnown": 21575, + "Ġben": 21576, + "ĠBlank": 21577, + "Ġactresses": 21578, + "paying": 21579, + "Ġsyrup": 21580, + "isman": 21581, + "Ġeducating": 21582, + "Sunday": 21583, + "ifiable": 21584, + "Post": 21585, + "Ġcalculation": 21586, + "Ġhesitate": 21587, + "ĠIncreasing": 21588, + "Ġreeling": 21589, + "ĠDairy": 21590, + "ensing": 21591, + "Ġmaternity": 21592, + "Ø": 21593, + "./": 21594, + "ĠElm": 21595, + "Ġweddings": 21596, + "ĠYard": 21597, + "117": 21598, + "ĠRocket": 21599, + "OF": 21600, + "Ġtreasurer": 21601, + "Ġrattled": 21602, + "ĠDrop": 21603, + "arel": 21604, + "ĠFulton": 21605, + "ĠGiant": 21606, + "ĠFloor": 21607, + "Jet": 21608, + "ikk": 21609, + "ĠBucs": 21610, + "ostics": 21611, + "reme": 21612, + "ĠRouse": 21613, + "Ġdeliber": 21614, + "ĠEle": 21615, + "Ġconducts": 21616, + "ĠBlog": 21617, + "connected": 21618, + "Ġprayed": 21619, + "Ġcolourful": 21620, + "Ġaugmented": 21621, + "Ġbatted": 21622, + "Ġrelevance": 21623, + "ĠRomanian": 21624, + "acqu": 21625, + "ĠChel": 21626, + "ĠClo": 21627, + "ĠGraves": 21628, + "Ġchees": 21629, + "ĠGibbs": 21630, + "CLE": 21631, + "Ġfertility": 21632, + "Ġambul": 21633, + "Ġspecs": 21634, + "ĠIRA": 21635, + "ĠBooth": 21636, + "ithe": 21637, + "ĠPlayoff": 21638, + "ammed": 21639, + "Ġcollaborating": 21640, + "Ġlunar": 21641, + "Ġconfronting": 21642, + "Ġattribute": 21643, + "King": 21644, + "riz": 21645, + "Ġcasualty": 21646, + "acia": 21647, + "waters": 21648, + "Ġpaving": 21649, + "Ġcaregivers": 21650, + "nor": 21651, + "Ġreacting": 21652, + "ĠHash": 21653, + "Ġsqueezed": 21654, + "Ġexert": 21655, + "ĠMichele": 21656, + "ĠConc": 21657, + "ĠHep": 21658, + "Ġsewage": 21659, + "wart": 21660, + "GY": 21661, + "Ġdiscourage": 21662, + "ĠFir": 21663, + "Ġtextile": 21664, + "ĠSpice": 21665, + "ĠFah": 21666, + "Ġcomplainant": 21667, + "Ġinstinct": 21668, + "camp": 21669, + "ĠEdison": 21670, + "ĠVIDEOS": 21671, + "LM": 21672, + "ĠSands": 21673, + "About": 21674, + "Ġdisk": 21675, + "brid": 21676, + "Ġmuted": 21677, + "ACC": 21678, + "Ġwre": 21679, + "event": 21680, + "Ġicons": 21681, + "Express": 21682, + "udes": 21683, + "ĠBeatles": 21684, + "color": 21685, + "ĠHaas": 21686, + "ĠWolfe": 21687, + "ĠYOUR": 21688, + "Ġaccessibility": 21689, + "ĠCornwall": 21690, + "Ġing": 21691, + "Ġatrocities": 21692, + "weather": 21693, + "ĠDominion": 21694, + "ĠMIL": 21695, + "ĠLara": 21696, + "Ġunravel": 21697, + "Ġmaneuver": 21698, + "Ġfoam": 21699, + "ribe": 21700, + "CI": 21701, + "Ġcandles": 21702, + "acs": 21703, + ")(": 21704, + "coon": 21705, + "ĠPurple": 21706, + "ĠGovernors": 21707, + "ĠKeystone": 21708, + "ĠYuk": 21709, + "file": 21710, + "Ġviol": 21711, + "gery": 21712, + "370": 21713, + "train": 21714, + "Ġgunshots": 21715, + "olin": 21716, + "Ġviruses": 21717, + "ĠTex": 21718, + "hours": 21719, + "Ġprev": 21720, + "ĠRid": 21721, + "ected": 21722, + "ĠVog": 21723, + "riers": 21724, + "Ġmurdering": 21725, + "ĠIz": 21726, + "Ġdeliberations": 21727, + "arming": 21728, + "unda": 21729, + "Ġrink": 21730, + "ĠDrugs": 21731, + "idered": 21732, + "Ġforge": 21733, + "Ġexpansive": 21734, + "VIEW": 21735, + "ĠBots": 21736, + "Ġswitches": 21737, + "KO": 21738, + "atten": 21739, + "Ġvariants": 21740, + "ĠVirtual": 21741, + "ĠCoch": 21742, + "yon": 21743, + "ĠKai": 21744, + "Ġbullied": 21745, + "iday": 21746, + "version": 21747, + "Ġlib": 21748, + "ĠCec": 21749, + "igated": 21750, + "ĠTRUMP": 21751, + "ĠPod": 21752, + "Ġtoppled": 21753, + "Ġeyeing": 21754, + "ĠPatients": 21755, + "techn": 21756, + "Ġhampered": 21757, + "Ġavert": 21758, + "ĠScheme": 21759, + "ĠCorm": 21760, + "Ġpony": 21761, + "Ġzoom": 21762, + "abo": 21763, + "Ġsleeves": 21764, + "lane": 21765, + "ĠLester": 21766, + "ĠDane": 21767, + "Ġcough": 21768, + "Ġsignings": 21769, + "HER": 21770, + "Ġsibling": 21771, + "Ġredemption": 21772, + "Ġstockp": 21773, + "ĠAlgeria": 21774, + "Ġpadd": 21775, + "ĠBrenda": 21776, + "uchi": 21777, + "Ġtransporting": 21778, + "Ġspeculative": 21779, + "ĠSek": 21780, + "abal": 21781, + "Ġshipment": 21782, + "oker": 21783, + "Ġwarranty": 21784, + "atan": 21785, + "Ġblister": 21786, + "ĠCelebration": 21787, + "Ġwal": 21788, + "Ġlac": 21789, + "Ġprioritize": 21790, + "ression": 21791, + "BP": 21792, + "Ġcollaborated": 21793, + "ĠNewsletter": 21794, + "ĠDamian": 21795, + "ĠResidential": 21796, + "Ġgra": 21797, + "Ġfeasible": 21798, + "ĠCrest": 21799, + "ĠBean": 21800, + "ĠSturgeon": 21801, + "ĠTale": 21802, + "ĠContin": 21803, + "ĠMush": 21804, + "Ġrocking": 21805, + "ĠMane": 21806, + "ĠHumane": 21807, + "resistant": 21808, + "ĠFra": 21809, + "highest": 21810, + "fts": 21811, + "Ġamassed": 21812, + "ĠPavilion": 21813, + "ĠSkin": 21814, + "Ġunfold": 21815, + "Ġresur": 21816, + "ĠPET": 21817, + "model": 21818, + "Ġemploying": 21819, + "Ġrude": 21820, + "Ġirrelevant": 21821, + "angu": 21822, + "Page": 21823, + "PN": 21824, + "igator": 21825, + "ĠReb": 21826, + "ĠArrest": 21827, + "ĠGund": 21828, + "Ġmalls": 21829, + "zhen": 21830, + "wed": 21831, + "Ġdaring": 21832, + "Ġfactual": 21833, + "ĠGent": 21834, + "Ġinforming": 21835, + "ĠStri": 21836, + "ĠLounge": 21837, + ".]": 21838, + "ĠTribunal": 21839, + "ĠMoines": 21840, + "Ġshadows": 21841, + "generated": 21842, + "fulness": 21843, + "Ġheartfelt": 21844, + "ĠLivingston": 21845, + "ĠClerk": 21846, + "Ġnationalism": 21847, + "ĠMiche": 21848, + "balls": 21849, + "anos": 21850, + "agle": 21851, + "Ġprejudice": 21852, + "Ġevenly": 21853, + "Ġswearing": 21854, + "Ġexits": 21855, + "Ġcondemning": 21856, + "Ġvanilla": 21857, + "club": 21858, + "ĠFunding": 21859, + "ĠDover": 21860, + "Ġhots": 21861, + "Ġfres": 21862, + "Ġgoodness": 21863, + "ĠMcKay": 21864, + "Ġbulls": 21865, + "avia": 21866, + "129": 21867, + "Ġ1947": 21868, + "Ġdefamation": 21869, + "ĠMoran": 21870, + "irms": 21871, + "ĠFitz": 21872, + "ĠRossi": 21873, + "urated": 21874, + "Ġvariation": 21875, + "ĠBauer": 21876, + "ĠSchro": 21877, + "Ġcolony": 21878, + "ĠParliamentary": 21879, + "ikan": 21880, + "Ġstirring": 21881, + "ĠSheldon": 21882, + "Ġaccessory": 21883, + "ĠUtilities": 21884, + "Ġnab": 21885, + "Ġpract": 21886, + "Ġherein": 21887, + "ĠRole": 21888, + "ĠMant": 21889, + "Ġpharm": 21890, + "Ġ215": 21891, + "ĠNGO": 21892, + "ĠAnything": 21893, + "ĠMacedonia": 21894, + "Ġbree": 21895, + "ĠWTO": 21896, + "Chicago": 21897, + "ĠProtect": 21898, + "quarters": 21899, + "ĠGrassley": 21900, + "ĠInteractive": 21901, + "ĠInterview": 21902, + "Ġ550": 21903, + "Ġastronauts": 21904, + "Ġfreak": 21905, + "ĠIntegrated": 21906, + "Ġindict": 21907, + "Ġgenerators": 21908, + "acio": 21909, + "Kevin": 21910, + "Ġvaccination": 21911, + "Ġblockade": 21912, + "ĠSons": 21913, + "Ġcapita": 21914, + "ĠAnita": 21915, + "ĠExport": 21916, + "ĠNex": 21917, + "ĠAram": 21918, + "Ġzinc": 21919, + "Ġrevamped": 21920, + "Ġselective": 21921, + "Ġmanipulate": 21922, + "ĠBedford": 21923, + "ĠBattery": 21924, + "Ġqualifiers": 21925, + "lean": 21926, + "Ġscrew": 21927, + "film": 21928, + "ror": 21929, + "ĠEllison": 21930, + "ombo": 21931, + "ĠOst": 21932, + "165": 21933, + "Ġslaves": 21934, + "ĠPayton": 21935, + "Ġbarg": 21936, + "Ġrugged": 21937, + "ĠWinn": 21938, + "ĠHammer": 21939, + "ĠUPS": 21940, + "Euro": 21941, + "Ġunfamiliar": 21942, + "Ġdistract": 21943, + "Ġbuffer": 21944, + "ledge": 21945, + "Ġtrunk": 21946, + "Ġ320": 21947, + "122": 21948, + "Ġdilemma": 21949, + "Ġpra": 21950, + "Ġutmost": 21951, + "Ġcampaigners": 21952, + "icular": 21953, + "eful": 21954, + "�": 21955, + "ĠHQ": 21956, + "neau": 21957, + "Ġsir": 21958, + "test": 21959, + "Company": 21960, + "Ġrescind": 21961, + "ardon": 21962, + "MG": 21963, + "Gov": 21964, + "ĠRaz": 21965, + "Ġrod": 21966, + "fed": 21967, + "Ġpsych": 21968, + "Ġunin": 21969, + "ĠArbor": 21970, + "Ġnewcomer": 21971, + "ĠEdwin": 21972, + "raising": 21973, + "quist": 21974, + "Ġdiscoveries": 21975, + "Steve": 21976, + "Ġscramble": 21977, + "js": 21978, + "Ġacoustic": 21979, + "Ġdeterioration": 21980, + "Ġobserving": 21981, + "ĠWinning": 21982, + "ĠSaban": 21983, + "idy": 21984, + "Ġoverd": 21985, + "Ġscouting": 21986, + "Ġpunitive": 21987, + "ĠShelter": 21988, + "Ġmocked": 21989, + "Ġdreamed": 21990, + "Ġinvaluable": 21991, + "LP": 21992, + "standard": 21993, + "Ġrecounted": 21994, + "ĠSabres": 21995, + "points": 21996, + "Ġfringe": 21997, + "ĠBarker": 21998, + "alian": 21999, + "ĠPROV": 22000, + "Ġcartel": 22001, + "Ġovercrowd": 22002, + "tain": 22003, + "Year": 22004, + "ĠWelfare": 22005, + "ĠChr": 22006, + "Ġintroduces": 22007, + "ĠDoing": 22008, + "ĠGlover": 22009, + "Ġdeteriorating": 22010, + "Par": 22011, + "Ġattendant": 22012, + "ĠMold": 22013, + "ĠFlying": 22014, + "ovan": 22015, + "Ġoptimize": 22016, + "Ġchapters": 22017, + "Ġdull": 22018, + "gay": 22019, + "ĠATP": 22020, + "ĠKah": 22021, + "ainer": 22022, + "feet": 22023, + "Ġjoking": 22024, + "Ġdisadvantage": 22025, + "Rep": 22026, + "Ġtwisted": 22027, + "Ġslain": 22028, + "Ġcomprise": 22029, + "Ġrestricting": 22030, + "Ġdispos": 22031, + "Ġshaky": 22032, + "Ġembattled": 22033, + "owe": 22034, + "conscious": 22035, + "oken": 22036, + "Ġmistaken": 22037, + "ĠDra": 22038, + "Ġreservoir": 22039, + "Ġspate": 22040, + "Scott": 22041, + "avor": 22042, + "Ġqual": 22043, + "amel": 22044, + "hunt": 22045, + "ĠChevy": 22046, + "Ġclaw": 22047, + "Ġwitch": 22048, + "ĠZimmerman": 22049, + "arium": 22050, + "Ġrubbish": 22051, + "Ġstrings": 22052, + "Ġdoc": 22053, + "Ġplaque": 22054, + "ĠCyr": 22055, + "Ġflourish": 22056, + "Ġworthwhile": 22057, + "Ġbanners": 22058, + "ĠLemon": 22059, + "ĠRainbow": 22060, + "Ġconsisted": 22061, + "ĠHOW": 22062, + "Ñ": 22063, + "Ġblogs": 22064, + "CLUS": 22065, + "eely": 22066, + "Ġbeast": 22067, + "ĠMai": 22068, + "Ġhostility": 22069, + "eros": 22070, + "Ġforeseeable": 22071, + "ĠCorker": 22072, + "ĠWEEK": 22073, + "visors": 22074, + "ressive": 22075, + "ĠViktor": 22076, + "Ġbureaucracy": 22077, + "Ġ256": 22078, + "ĠFeel": 22079, + "ĠAdventure": 22080, + "Ġefficacy": 22081, + "ĠInstitution": 22082, + "ĠHarbaugh": 22083, + "ĠPractice": 22084, + "ĠChristianity": 22085, + "Thanks": 22086, + "Ġfridge": 22087, + "idel": 22088, + "Ġeff": 22089, + "Ġvein": 22090, + "terms": 22091, + "Ġignorance": 22092, + "Ġscream": 22093, + "Ġwit": 22094, + "ĠRousse": 22095, + "ĠWillow": 22096, + "Ġhallway": 22097, + "former": 22098, + "Ġshooters": 22099, + "ĠReporting": 22100, + "Ġgal": 22101, + "Ġsavvy": 22102, + "rand": 22103, + "Ġremed": 22104, + "ĠBaron": 22105, + "inar": 22106, + "Ġseizures": 22107, + "ĠThorn": 22108, + "ĠProtesters": 22109, + "ĠRevolutionary": 22110, + "think": 22111, + "ĠCabrera": 22112, + "Four": 22113, + "ĠRudd": 22114, + "Ġprost": 22115, + "ĠBottom": 22116, + "Port": 22117, + "nas": 22118, + "ifax": 22119, + "Wire": 22120, + "Ġtokens": 22121, + "antis": 22122, + "ĠSOU": 22123, + "ĠMilk": 22124, + "asters": 22125, + "Ġshrimp": 22126, + "Ġcakes": 22127, + "blue": 22128, + "ifty": 22129, + "View": 22130, + "adium": 22131, + "fen": 22132, + "zyk": 22133, + "ĠEmil": 22134, + "Ġdismay": 22135, + "Ġtilt": 22136, + "aska": 22137, + "Young": 22138, + "Ġpredators": 22139, + "Ġovershadowed": 22140, + "mitt": 22141, + "ĠSemin": 22142, + "ĠSchiff": 22143, + "ĠClarkson": 22144, + "212": 22145, + "210": 22146, + "Ġvanished": 22147, + "Ġmesh": 22148, + "ĠBurnett": 22149, + "ĠMent": 22150, + "ĠBlind": 22151, + "ĠPatriot": 22152, + "ĠVil": 22153, + "Ġflick": 22154, + "ĠTowns": 22155, + "ĠWhites": 22156, + "Ġspice": 22157, + "ĠMode": 22158, + "Ġnominate": 22159, + "Ġwrest": 22160, + "ĠAshes": 22161, + "Ġrows": 22162, + "ĠClint": 22163, + "Ġgentleman": 22164, + "utan": 22165, + "athlon": 22166, + "ĠIntermediate": 22167, + "hews": 22168, + "Ġoffended": 22169, + "ĠPaige": 22170, + "ĠFinch": 22171, + "ĠAboriginal": 22172, + "positive": 22173, + "Stop": 22174, + "Ġrenting": 22175, + "Ġ[âĢ¦]": 22176, + "ĠHert": 22177, + "Ġvegetation": 22178, + "apes": 22179, + "ĠCanon": 22180, + "appa": 22181, + "Ġabst": 22182, + "ĠKatz": 22183, + "Ġsurfing": 22184, + "aghan": 22185, + "ĠPresidency": 22186, + "Ġscaling": 22187, + "ĠSas": 22188, + "Ġpeanut": 22189, + "Ġrecommending": 22190, + "cious": 22191, + "endez": 22192, + "eker": 22193, + "ĠKamp": 22194, + "Ġsitcom": 22195, + "Ġcrust": 22196, + "women": 22197, + "ĠJes": 22198, + "ĠWhe": 22199, + "ĠWarwick": 22200, + "Ġepit": 22201, + "ĠAlc": 22202, + "Ġdictate": 22203, + "ĠSPORTS": 22204, + "ĠLanguage": 22205, + "Ġindicative": 22206, + "ĠMacDonald": 22207, + "Ġreorgan": 22208, + "Ġ`": 22209, + "ARS": 22210, + "Ġliberation": 22211, + "Ġbless": 22212, + "Ġreflective": 22213, + "Ġà¤": 22214, + "Ġdesires": 22215, + "ĠHank": 22216, + "ĠLaunch": 22217, + "Ġrotating": 22218, + "ĠStones": 22219, + "Ġcoordinating": 22220, + "ĠZeit": 22221, + "Ġskepticism": 22222, + "ĠAlam": 22223, + "ĠTrout": 22224, + "ĠSMS": 22225, + "ĠCrescent": 22226, + "ĠTeacher": 22227, + "Ġfury": 22228, + "Ġeyebrows": 22229, + "onga": 22230, + "ĠPilot": 22231, + "ĠRutherford": 22232, + "Ġinterstate": 22233, + "established": 22234, + "Ġbaggage": 22235, + "Ġ131": 22236, + "riks": 22237, + "mil": 22238, + "Ġneon": 22239, + "Ġqueer": 22240, + "ourced": 22241, + "ĠKash": 22242, + "ĠEleven": 22243, + "illes": 22244, + "ĠOpportun": 22245, + "Ġstre": 22246, + "Washington": 22247, + "ĠDifferent": 22248, + "Ġexempl": 22249, + "Ġboarded": 22250, + "Ġrogue": 22251, + "ĠDNC": 22252, + "rone": 22253, + "Ġreversing": 22254, + "nine": 22255, + "ĠIvory": 22256, + "itating": 22257, + "uve": 22258, + "Ġfracture": 22259, + "255": 22260, + "ĠAssessment": 22261, + "Ġsubjective": 22262, + "Ġfluct": 22263, + "ĠJaguar": 22264, + "Ġstride": 22265, + "Ġreapp": 22266, + "ĠGrow": 22267, + "against": 22268, + "ĠMedina": 22269, + "scenes": 22270, + "ĠNieto": 22271, + "Ġsou": 22272, + "ĠFleming": 22273, + "Ġnarcotics": 22274, + "ĠBere": 22275, + "ĠBub": 22276, + "ĠAck": 22277, + "Ġvinyl": 22278, + "ĠCopy": 22279, + "ĠGarland": 22280, + "ĠDuty": 22281, + "Ġinn": 22282, + "Ġmerchant": 22283, + "Ġactivate": 22284, + "Ġglowing": 22285, + "ettle": 22286, + "ĠBran": 22287, + "Ġsilk": 22288, + "anco": 22289, + "TL": 22290, + "ĠFurn": 22291, + "Ġwithheld": 22292, + "Ġpulse": 22293, + "ĠGU": 22294, + "BUS": 22295, + "ĠHyper": 22296, + "Ġpicnic": 22297, + "Ġpositives": 22298, + "ĠParamount": 22299, + "Ġ737": 22300, + "Ġenlisted": 22301, + "ĠValerie": 22302, + "false": 22303, + "ĠChocolate": 22304, + "ĠSTAR": 22305, + "Ġdescended": 22306, + "Ġtasty": 22307, + "ĠDaesh": 22308, + "ĠNed": 22309, + "Ġcomplimentary": 22310, + "Ġdepicting": 22311, + "ĠHavana": 22312, + "college": 22313, + "Ġtraces": 22314, + "Ġundue": 22315, + "ĠSisters": 22316, + "aum": 22317, + "ĠCourier": 22318, + "ĠOng": 22319, + "ĠSparks": 22320, + "ongs": 22321, + "ĠYong": 22322, + "URR": 22323, + "los": 22324, + "Ġhorsepower": 22325, + "confidence": 22326, + "ĠPett": 22327, + "ĠMeasure": 22328, + "Ġmarches": 22329, + "zig": 22330, + "ĠTOR": 22331, + "Ġexported": 22332, + "ĠRak": 22333, + "ĠInvestigations": 22334, + "Ġterminate": 22335, + "ĠTian": 22336, + "Ġmasters": 22337, + "ĠDS": 22338, + "Ġoutraged": 22339, + "ĠCups": 22340, + "ĠWeir": 22341, + "exec": 22342, + "Ġjourneys": 22343, + "Ġabide": 22344, + "Ġavail": 22345, + "ĠStreets": 22346, + "Ġfixes": 22347, + "Ġcocoa": 22348, + "Ġabundant": 22349, + "Ġhubs": 22350, + "mort": 22351, + "Ġrobberies": 22352, + "ĠBark": 22353, + "Ġprecautions": 22354, + "Ġhammered": 22355, + "ometric": 22356, + "mith": 22357, + "ĠMcCann": 22358, + "ĠJaw": 22359, + "ĠQuest": 22360, + "ĠMcF": 22361, + "Ġlob": 22362, + "Ġlegalized": 22363, + "Ġquirky": 22364, + "Ġtrailers": 22365, + "ĠIndividual": 22366, + "Ġcumulative": 22367, + "Ġenlarge": 22368, + "Ġconvoy": 22369, + "olen": 22370, + "got": 22371, + "landers": 22372, + "Ġscanner": 22373, + "Ġscans": 22374, + "ĠEg": 22375, + "prof": 22376, + "Ġhosp": 22377, + "ĠColo": 22378, + "Ġerr": 22379, + "Ġdeval": 22380, + "ĠUsually": 22381, + "Ġbul": 22382, + "ummy": 22383, + "Ġtandem": 22384, + "occupied": 22385, + "Ġmandates": 22386, + "ĠSwim": 22387, + "121": 22388, + "ussed": 22389, + "EF": 22390, + "Ġfries": 22391, + "Until": 22392, + "rc": 22393, + "Ġbadge": 22394, + "Ġstrips": 22395, + "Ġmagnet": 22396, + "Ġarchive": 22397, + "stan": 22398, + "ĠDeadline": 22399, + "Ġdisposable": 22400, + "Ġbob": 22401, + "Ġnorthwestern": 22402, + "Jul": 22403, + "ĠSAL": 22404, + "Ġinfluencing": 22405, + "Ġdevil": 22406, + "ĠEllie": 22407, + "cms": 22408, + "ingo": 22409, + "888": 22410, + "Ġcosmetic": 22411, + "Also": 22412, + "Ġyacht": 22413, + "Ġlazy": 22414, + "Ġmerc": 22415, + "Ġabsorbed": 22416, + "harm": 22417, + "116": 22418, + "Ġsubpoena": 22419, + "Ġcounters": 22420, + "ĠLori": 22421, + "Ġrandomly": 22422, + "nea": 22423, + "waves": 22424, + "Ġrelie": 22425, + "ĠKiss": 22426, + "Ġchassis": 22427, + "Ġbakery": 22428, + "Images": 22429, + "ĠHolden": 22430, + "Ġamazed": 22431, + "Ġalignment": 22432, + "ĠPowers": 22433, + "Ġlabelled": 22434, + "Ġstaunch": 22435, + "Ġsignaling": 22436, + "Ġsenate": 22437, + "Ġunconventional": 22438, + "ĠAlternative": 22439, + "Ġambassadors": 22440, + "ĠVPN": 22441, + "atics": 22442, + "Ġmosquito": 22443, + "ĠScholarship": 22444, + "Ġhelpless": 22445, + "alone": 22446, + "ZA": 22447, + "chel": 22448, + "Ġconstituencies": 22449, + "ĠCafé": 22450, + "Ġhatch": 22451, + "ĠRupert": 22452, + "Ġrendering": 22453, + "Ġreinstated": 22454, + "Ġinterval": 22455, + "Texas": 22456, + "ĠAHL": 22457, + "February": 22458, + "review": 22459, + "Ġgle": 22460, + "Ġfals": 22461, + "Ġmarkers": 22462, + "Ġgovernmental": 22463, + "ĠPos": 22464, + "Ġarose": 22465, + "every": 22466, + "Ġrulings": 22467, + "obar": 22468, + "Govern": 22469, + "gren": 22470, + "isan": 22471, + "Ġmarketed": 22472, + "Click": 22473, + "Ġord": 22474, + "Ġballoons": 22475, + "asers": 22476, + "ĠHorton": 22477, + "pub": 22478, + "ĠAerospace": 22479, + "Ġflank": 22480, + "Ġmolecular": 22481, + "bour": 22482, + "nuts": 22483, + "Ġalliances": 22484, + "Ġbenchmarks": 22485, + "ocate": 22486, + "stadt": 22487, + "ĠGoodwin": 22488, + "lap": 22489, + "ĠFactors": 22490, + "Never": 22491, + "ĠNem": 22492, + "Ġroadside": 22493, + "orth": 22494, + "Ġexhibited": 22495, + "ĠPearce": 22496, + "ĠOlsen": 22497, + "Ġpostal": 22498, + "ĠLiberation": 22499, + "reen": 22500, + "mary": 22501, + "Ġropes": 22502, + "Ġlarg": 22503, + "Ġgob": 22504, + "boys": 22505, + "ĠSax": 22506, + "Ġreimbursement": 22507, + "ĠVie": 22508, + "ĠCatholics": 22509, + "ĠMartial": 22510, + "Ġpremiered": 22511, + "Ġawaits": 22512, + "ĠUnderstanding": 22513, + "ĠBelarus": 22514, + "ĠVor": 22515, + "ogi": 22516, + "iaz": 22517, + "Ġvictorious": 22518, + "Ġancestors": 22519, + "Ġwreckage": 22520, + "Ġoppression": 22521, + "ĠChildhood": 22522, + "Ġwidth": 22523, + "ĠPlymouth": 22524, + "ĠFifty": 22525, + "Ġoccupancy": 22526, + "etts": 22527, + "ĠFiscal": 22528, + "lifting": 22529, + "ĠTraditional": 22530, + "Ġnostalgia": 22531, + "Law": 22532, + "Ġlays": 22533, + "Ġarresting": 22534, + "Ġanticipating": 22535, + "Ġinsults": 22536, + "ĠExtension": 22537, + "Ġgenerator": 22538, + "ummer": 22539, + "Ġageing": 22540, + "Ġbouncing": 22541, + "ember": 22542, + "ĠWAR": 22543, + "ĠNico": 22544, + "ĠWow": 22545, + "ĠRaven": 22546, + "flower": 22547, + "ĠCrim": 22548, + "bh": 22549, + "Ġundo": 22550, + "Ġburgers": 22551, + "roud": 22552, + "ĠAtkinson": 22553, + "ĠYEAR": 22554, + "Ġpoorer": 22555, + "ICA": 22556, + "ĠSchedule": 22557, + "Ġstronghold": 22558, + "ĠMillennium": 22559, + "Ġ###": 22560, + "ilda": 22561, + "ĠGH": 22562, + "Ġupscale": 22563, + "aldi": 22564, + "ĠResolution": 22565, + "Ġswelling": 22566, + "Ġgrieving": 22567, + "ĠNile": 22568, + "ĠTig": 22569, + "ERY": 22570, + "ooth": 22571, + "BALL": 22572, + "Ġballet": 22573, + "Ġbucks": 22574, + "ĠUV": 22575, + "akin": 22576, + "Ġchilling": 22577, + "Ġdatabases": 22578, + "ĠGD": 22579, + "section": 22580, + "Ġhires": 22581, + "Ġmul": 22582, + "Ġsen": 22583, + "ĠTownsend": 22584, + "Ġinspected": 22585, + "ilic": 22586, + "Ġdiscriminatory": 22587, + "fol": 22588, + "Ġalcoholic": 22589, + "ĠHoff": 22590, + "Carl": 22591, + "Ġvicinity": 22592, + "lein": 22593, + "ĠEco": 22594, + "ĠGovern": 22595, + "Ġsecrecy": 22596, + "aned": 22597, + "ĠDUP": 22598, + "Ġ570": 22599, + "Ġsow": 22600, + "Ġstalls": 22601, + "Ġinsulting": 22602, + "ĠDT": 22603, + "Ġinforms": 22604, + "fitting": 22605, + "ĠDepending": 22606, + "ĠMelanie": 22607, + "ĠThom": 22608, + "path": 22609, + "Ġadmired": 22610, + "Peter": 22611, + "idents": 22612, + "ielding": 22613, + "ĠShanahan": 22614, + "TD": 22615, + "Things": 22616, + "sn": 22617, + "Ġconstituted": 22618, + "Ġ137": 22619, + "Ġderailed": 22620, + "ĠBonnie": 22621, + "Ġgraffiti": 22622, + "Ġearnest": 22623, + "Ġcompliant": 22624, + "blown": 22625, + "Ġalle": 22626, + "prise": 22627, + "Ġfocal": 22628, + "Ġgentlemen": 22629, + "ĠTalks": 22630, + "Ġpassports": 22631, + "Ġdeprived": 22632, + "Ġdude": 22633, + "ĠNath": 22634, + "Ġgoverned": 22635, + "Ġsac": 22636, + "Ġcastle": 22637, + "qv": 22638, + "Ġtolerated": 22639, + "ĠSci": 22640, + "close": 22641, + "ĠDynamics": 22642, + "Ġflashing": 22643, + "yk": 22644, + "ĠConsolid": 22645, + "Ġinherently": 22646, + "ĠForrest": 22647, + "Gene": 22648, + "Public": 22649, + "Ġloser": 22650, + "runners": 22651, + "Ġprudent": 22652, + "Ġpioneering": 22653, + "ĠHowe": 22654, + "ĠButter": 22655, + "ĠArabian": 22656, + "acha": 22657, + "ĠBBQ": 22658, + "ĠMineral": 22659, + "Ġdestiny": 22660, + "Ġretrieve": 22661, + "ĠBav": 22662, + "reth": 22663, + "oby": 22664, + "ĠGrid": 22665, + "Ġgrievances": 22666, + "ĠTips": 22667, + "Ġadamant": 22668, + "Ġdiets": 22669, + "Ġmilestones": 22670, + "Ġcollects": 22671, + "ĠLaboratories": 22672, + "ĠWC": 22673, + "Ġpostp": 22674, + "Ġdams": 22675, + "ĠOEM": 22676, + "Ġrumor": 22677, + "Ġlocking": 22678, + "Ġemission": 22679, + "Ġqueries": 22680, + "Jones": 22681, + "Ġlang": 22682, + "ĠAcqu": 22683, + "ĠMedium": 22684, + "ĠTreasurer": 22685, + "Sept": 22686, + "FB": 22687, + "Ġintegrating": 22688, + "Ġbolstered": 22689, + "Ġincorporating": 22690, + "encers": 22691, + "Ġirregularities": 22692, + "Ġnom": 22693, + "iod": 22694, + "ĠAi": 22695, + "Ġsor": 22696, + "anked": 22697, + "Ġrehears": 22698, + "fig": 22699, + "ĠBug": 22700, + "hoff": 22701, + "Ġtrooper": 22702, + "Ġgalaxy": 22703, + "amon": 22704, + "ĠAtlas": 22705, + "Ġsolicit": 22706, + "Ġsings": 22707, + "ĠInstructions": 22708, + "ĠMig": 22709, + "thinking": 22710, + "ĠCostco": 22711, + "Ġbreasts": 22712, + "Ġportraits": 22713, + "ĠCock": 22714, + "Ġsubscriptions": 22715, + "Ġpine": 22716, + "Ġhaunted": 22717, + "ĠMED": 22718, + "eer": 22719, + "ega": 22720, + "ĠZa": 22721, + "ENN": 22722, + "ĠWinners": 22723, + "aith": 22724, + "safe": 22725, + "Ġ143": 22726, + "ĠWeston": 22727, + "ĠLansing": 22728, + "ĠLaurel": 22729, + "ocrat": 22730, + "ograph": 22731, + "Ġmatchups": 22732, + "ĠFriend": 22733, + "Ġdigest": 22734, + "Ġdimensions": 22735, + "azing": 22736, + "Ġtipping": 22737, + "Ġenrich": 22738, + "gart": 22739, + "argo": 22740, + "Ġoutbreaks": 22741, + "Ġsalvage": 22742, + "ĠErica": 22743, + "Ġmodules": 22744, + "ĠPDF": 22745, + "ĠGoods": 22746, + "oots": 22747, + "2011": 22748, + "Ġinterrupt": 22749, + "Ġradi": 22750, + "ĠSimone": 22751, + "vell": 22752, + "ĠSV": 22753, + "extremely": 22754, + "Ġstadiums": 22755, + "ĠRox": 22756, + "Ġconflicting": 22757, + "Ġyouthful": 22758, + "ĠUM": 22759, + "series": 22760, + "Ġded": 22761, + "Ġfielding": 22762, + "Pre": 22763, + "itled": 22764, + "Ġstreamed": 22765, + "Ġapprentices": 22766, + "ĠAlec": 22767, + "ĠGap": 22768, + "ĠPrem": 22769, + "Ġleased": 22770, + "Ġdeepening": 22771, + "Ġbounds": 22772, + "Ġrethink": 22773, + "ĠVoting": 22774, + "ĠScha": 22775, + "blood": 22776, + "ĠReeves": 22777, + "Ġbells": 22778, + "Ġcollector": 22779, + "ĠCrimson": 22780, + "ĠWheat": 22781, + "207": 22782, + "ĠHB": 22783, + "ĠBCC": 22784, + "Ġsync": 22785, + "ĠAnders": 22786, + "Ġthanking": 22787, + "Ġlayoffs": 22788, + "Ġfoolish": 22789, + "Ġcustod": 22790, + "Ġelephants": 22791, + "Ġcorrelation": 22792, + "ĠHarding": 22793, + "ĠGPU": 22794, + "ĠBarnett": 22795, + "Ġol": 22796, + "Ġalarms": 22797, + "Ġfluctuations": 22798, + "shop": 22799, + "Ġcommentators": 22800, + "ĠAlpine": 22801, + "Ġmur": 22802, + "Ġbiotech": 22803, + "Ġunlocked": 22804, + "ouri": 22805, + "roe": 22806, + "ĠPayment": 22807, + "ĠPOL": 22808, + "ĠGuest": 22809, + "Ġphrases": 22810, + "ĠBuilt": 22811, + "erves": 22812, + "Ġnutritional": 22813, + "205": 22814, + "ourage": 22815, + "Related": 22816, + "Come": 22817, + "ĠSAT": 22818, + "Ġgatherings": 22819, + "Ġsquads": 22820, + "Ġorganising": 22821, + "Ġministerial": 22822, + "Ġkilomet": 22823, + "ĠJump": 22824, + "ĠStrength": 22825, + "ĠFerr": 22826, + "Ġillustrated": 22827, + "ĠOber": 22828, + "Ġextrad": 22829, + "Ġlimitation": 22830, + "idis": 22831, + "ĠMonths": 22832, + "ifts": 22833, + "Ġmotives": 22834, + "Ġmaternal": 22835, + "Ġbait": 22836, + "Ġadversity": 22837, + "Twitter": 22838, + "ĠUni": 22839, + "Ġgrappling": 22840, + "Ġbowls": 22841, + "ĠHib": 22842, + "ĠCopenhagen": 22843, + "Ġsergeant": 22844, + "Ġintro": 22845, + "Ġscrambled": 22846, + "ĠExc": 22847, + "Ġshowcases": 22848, + "Ġplotting": 22849, + "Ġsym": 22850, + "ĠNah": 22851, + "berries": 22852, + "itching": 22853, + "conn": 22854, + "istle": 22855, + "ĠBeginning": 22856, + "asley": 22857, + "ĠMeadow": 22858, + "ĠCra": 22859, + "Ġsupremacist": 22860, + "Ġsweats": 22861, + "production": 22862, + "innon": 22863, + "ovo": 22864, + "Ġscept": 22865, + "Ġdrowning": 22866, + "ĠEh": 22867, + "Ġdecorations": 22868, + "Ġsympathetic": 22869, + "raction": 22870, + "Ġ195": 22871, + "ripp": 22872, + "ĠNotice": 22873, + "charging": 22874, + "ĠDIY": 22875, + "ĠJin": 22876, + "Ġskinny": 22877, + "Ġmaj": 22878, + "Ġwhisk": 22879, + "Ġcongreg": 22880, + "RAL": 22881, + "Ġvolley": 22882, + "Ġestablishments": 22883, + "Ġcite": 22884, + "Miss": 22885, + "Int": 22886, + "iola": 22887, + "ĠBare": 22888, + "KING": 22889, + "ools": 22890, + "private": 22891, + "Ġflaw": 22892, + "Ġwires": 22893, + "Ġideals": 22894, + "oub": 22895, + "Ġ\"'": 22896, + "ĠCompet": 22897, + "ĠStatements": 22898, + "ĠHDR": 22899, + "rm": 22900, + "Ġbegging": 22901, + "uffs": 22902, + "Ġdispatch": 22903, + "Ġskipped": 22904, + "Ġlabs": 22905, + "hawks": 22906, + "Ġexpl": 22907, + "Ġpatriotic": 22908, + "ussions": 22909, + "Ġportrayal": 22910, + "ĠBudapest": 22911, + "ĠCod": 22912, + "Ġextingu": 22913, + "smart": 22914, + "Ġburdens": 22915, + "ĠDrama": 22916, + "Ġaltitude": 22917, + "Ġpursuant": 22918, + "à¥": 22919, + "atari": 22920, + "cot": 22921, + "Ġhotline": 22922, + "ooters": 22923, + "ĠRolls": 22924, + "Ġjeopardy": 22925, + "oids": 22926, + "Ġpageant": 22927, + "149": 22928, + "Ġdistinguish": 22929, + "support": 22930, + "ĠHighlands": 22931, + "ĠErnst": 22932, + "ĠHole": 22933, + "pering": 22934, + "ĠHasan": 22935, + "Ġrece": 22936, + "Ġirregular": 22937, + "Ġdisturbed": 22938, + "Ġcoupon": 22939, + "ĠElijah": 22940, + "oise": 22941, + "Ġfriendships": 22942, + "girlfriend": 22943, + "Ġrampage": 22944, + "arers": 22945, + "Ġdispens": 22946, + "assion": 22947, + "Ġtentative": 22948, + "ĠExploration": 22949, + "fashioned": 22950, + "ĠInstit": 22951, + "Ġthemed": 22952, + "ĠKurdistan": 22953, + "ĠCAL": 22954, + "ĠSweeney": 22955, + "Ġransom": 22956, + "Ġstamps": 22957, + "ĠSchwe": 22958, + "ĠLucia": 22959, + "124": 22960, + "omore": 22961, + "Ġmotivate": 22962, + "ĠWorcester": 22963, + "wald": 22964, + "CAR": 22965, + "iken": 22966, + "andro": 22967, + "ffic": 22968, + "ĠRehab": 22969, + "Ġgrou": 22970, + "Ġcontrollers": 22971, + "ĠHai": 22972, + "nz": 22973, + "Ġartillery": 22974, + "ĠMish": 22975, + "Ġregistry": 22976, + "Ġfrontman": 22977, + "ĠCharg": 22978, + "orneys": 22979, + "ĠPRESS": 22980, + "Ġperceptions": 22981, + "ĠMcGee": 22982, + "AU": 22983, + "mg": 22984, + "Off": 22985, + "ĠNGOs": 22986, + "chemical": 22987, + "Ġbrun": 22988, + "ĠHav": 22989, + "Ġlace": 22990, + "Ġ202": 22991, + "Ġdefer": 22992, + "Ġinjected": 22993, + "Ġgluten": 22994, + "ĠRin": 22995, + "ĠAvalanche": 22996, + "Ġcorpor": 22997, + "ĠPamela": 22998, + "Ġfills": 22999, + "ĠReve": 23000, + "ĠMonument": 23001, + "Ġnationalists": 23002, + "ĠIQ": 23003, + "adden": 23004, + "ĠLoop": 23005, + "Ġ134": 23006, + "Reg": 23007, + "click": 23008, + "bush": 23009, + "ĠKub": 23010, + "ipes": 23011, + "Ġtoggle": 23012, + "ĠRae": 23013, + "Ġburgl": 23014, + "Ġholistic": 23015, + "ronics": 23016, + "Ġprominence": 23017, + "jack": 23018, + "Ġfinan": 23019, + "icates": 23020, + "Ġvel": 23021, + "important": 23022, + "Thursday": 23023, + "chet": 23024, + "Ġrefunds": 23025, + "ĠElder": 23026, + "ĠOwner": 23027, + "Ġtakeaway": 23028, + "Pe": 23029, + "ĠToro": 23030, + "Tim": 23031, + "fix": 23032, + "before": 23033, + "ĠMotorola": 23034, + "Ġlev": 23035, + "Term": 23036, + "ĠSne": 23037, + "Ġmisinformation": 23038, + "ĠSinai": 23039, + "Ġnitrogen": 23040, + "Ġ203": 23041, + "Ġescaping": 23042, + "Ġjunction": 23043, + "ĠSantana": 23044, + "ĠYemeni": 23045, + "Ġwhipped": 23046, + "ĠStephenson": 23047, + "Ġattire": 23048, + "ĠBard": 23049, + "atically": 23050, + "ĠFaul": 23051, + "ĠSym": 23052, + "resh": 23053, + "ĠMG": 23054, + "Sub": 23055, + "ĠCarmen": 23056, + "Ġig": 23057, + "ĠSanford": 23058, + "ĠYa": 23059, + "cycle": 23060, + "Ġencryption": 23061, + "ĠScal": 23062, + "ĠChest": 23063, + "ĠMadonna": 23064, + "agin": 23065, + "ĠDHS": 23066, + "ĠCed": 23067, + "YR": 23068, + "Ġtruce": 23069, + "ĠBike": 23070, + "Ġfoes": 23071, + "ĠSlovakia": 23072, + "adal": 23073, + "Rain": 23074, + "OPE": 23075, + "Ġlockdown": 23076, + "Ġunilateral": 23077, + "Ġoverseen": 23078, + "Ġblames": 23079, + "Ġbarrage": 23080, + "aan": 23081, + "uds": 23082, + "ĠRust": 23083, + "ĠHC": 23084, + "cox": 23085, + "ĠAllied": 23086, + "ĠJosé": 23087, + "pected": 23088, + "Ġunp": 23089, + "Ġsomeday": 23090, + "Ġdeductions": 23091, + "icial": 23092, + "ĠPRO": 23093, + "ĠIntern": 23094, + "Ġhemp": 23095, + "Ġkilograms": 23096, + "Ġnets": 23097, + "ĠBACK": 23098, + "early": 23099, + "outed": 23100, + "Ġrelegated": 23101, + "Ġ1958": 23102, + "ĠMustang": 23103, + "Ġgamble": 23104, + "Ġprostitution": 23105, + "ĠPapa": 23106, + "Ġinexpensive": 23107, + "GHz": 23108, + "Ġjerseys": 23109, + "Ġmisery": 23110, + "VIS": 23111, + "ĠRAW": 23112, + "Ġthri": 23113, + "Ġaffiliation": 23114, + "small": 23115, + "Ġflashed": 23116, + "Ġcoastline": 23117, + "Ġgard": 23118, + "Ġsv": 23119, + "Ġwaits": 23120, + "itton": 23121, + "London": 23122, + "Ġaccus": 23123, + "ĠCharge": 23124, + "Ġincub": 23125, + "Ġwanna": 23126, + "ĠAwareness": 23127, + "abies": 23128, + "ĠUh": 23129, + "Ġpersuaded": 23130, + "ĠThames": 23131, + "Ġcurated": 23132, + "Ī": 23133, + "Ġbrutally": 23134, + "Ġrooftop": 23135, + "Ġoy": 23136, + "Ġ1900": 23137, + "bery": 23138, + "Ġuphill": 23139, + "Ġinteracting": 23140, + "Ġchilly": 23141, + "ERE": 23142, + "Ġcapsule": 23143, + "ĠSaul": 23144, + "ocker": 23145, + "Ġdeserving": 23146, + "ĠBowen": 23147, + "ĠReaders": 23148, + "ĠWriters": 23149, + "Ġartifacts": 23150, + "ĠRanger": 23151, + "reau": 23152, + "Ġimperson": 23153, + "Ġhears": 23154, + "ĠMaher": 23155, + "neg": 23156, + "Ġmantra": 23157, + "Ġmull": 23158, + "Ġelders": 23159, + "ĠAmtrak": 23160, + "Ġspouses": 23161, + "ĠHak": 23162, + "Ġopenness": 23163, + "Ġprevailed": 23164, + "Ġfortnight": 23165, + "Pal": 23166, + "ride": 23167, + "Ġillustrate": 23168, + "dominated": 23169, + "trust": 23170, + "ī": 23171, + "ĠFemale": 23172, + "ĠSlim": 23173, + "Ġdesc": 23174, + "ĠKathryn": 23175, + "Ġdeepen": 23176, + "TAIN": 23177, + "eredith": 23178, + "Ġchanted": 23179, + "ĠHector": 23180, + "bread": 23181, + "ĠIsa": 23182, + "Ġvolcanic": 23183, + "Ġah": 23184, + "owners": 23185, + "aquin": 23186, + "Ġmelting": 23187, + "Ġpreschool": 23188, + "ocus": 23189, + "ĠMast": 23190, + "ĠMyr": 23191, + "Ġsuppress": 23192, + "Ġversatility": 23193, + "ĠNEC": 23194, + "Ġhoax": 23195, + "Ġmutually": 23196, + "ĠNeb": 23197, + "ĠWheel": 23198, + "kit": 23199, + "abl": 23200, + "again": 23201, + "ĠSonny": 23202, + "rift": 23203, + "Ġsweater": 23204, + "Ġinund": 23205, + "ĠTaco": 23206, + "ĠBout": 23207, + "Ġnonprofits": 23208, + "Ġmodify": 23209, + "Ġprofessionalism": 23210, + "ĠGould": 23211, + "ĠGuerrero": 23212, + "Ġterribly": 23213, + "ĠBenz": 23214, + "Ġcountered": 23215, + "Ġbean": 23216, + "ĠPhelps": 23217, + "Ġprowess": 23218, + "bc": 23219, + "Ġfeast": 23220, + "Ġ5000": 23221, + "Ġrevisit": 23222, + "Ġchin": 23223, + "agent": 23224, + "Ġtones": 23225, + "Ġextraction": 23226, + "ĠPosts": 23227, + "oin": 23228, + "Ġattain": 23229, + "Ġgardening": 23230, + "earned": 23231, + "ĠOtto": 23232, + "player": 23233, + "Ġscams": 23234, + "ĠHonolulu": 23235, + "ĠAppro": 23236, + "ĠHIGH": 23237, + "Ġdwell": 23238, + "Islam": 23239, + "leaders": 23240, + "Ġlegisl": 23241, + "expl": 23242, + "ĠChoi": 23243, + "Ġfrenzy": 23244, + "Ġcommercially": 23245, + "Ġlbs": 23246, + "Ġgateway": 23247, + "ĠAndersen": 23248, + "emia": 23249, + "lez": 23250, + "Ġresidences": 23251, + "office": 23252, + "ĠHelsinki": 23253, + "olia": 23254, + "Ġwolf": 23255, + "Ġstyling": 23256, + "ĠJunction": 23257, + "ĠPeyton": 23258, + "udo": 23259, + "ĠDorothy": 23260, + "Ġfreshly": 23261, + "ĠJulio": 23262, + "ĠSunset": 23263, + "ĠMadden": 23264, + "Ġissu": 23265, + "Ġsounding": 23266, + "sports": 23267, + "Ġmassively": 23268, + "ĠRahman": 23269, + "Ġpresided": 23270, + "Instead": 23271, + "Ġ136": 23272, + "ĠHowell": 23273, + "beit": 23274, + "Ġprosperous": 23275, + "Ġwrongly": 23276, + "ĠRaqqa": 23277, + "ĠCes": 23278, + "Ġbuddy": 23279, + "Ġchatting": 23280, + "Ġfencing": 23281, + "Ġtant": 23282, + "ocated": 23283, + "ALK": 23284, + "Ġsnapping": 23285, + "euro": 23286, + "Ryan": 23287, + "ĠRecogn": 23288, + "ucked": 23289, + "Ġpurported": 23290, + "ĠCann": 23291, + "Ġintimidating": 23292, + "Ġrulers": 23293, + "ĠMarse": 23294, + "Art": 23295, + "ĠAadhaar": 23296, + "Ġvows": 23297, + "Ġhunter": 23298, + "ourmet": 23299, + "ĠVarious": 23300, + "2009": 23301, + "anie": 23302, + "Ġcompassionate": 23303, + "ĠParking": 23304, + "Ġmalaria": 23305, + "Ġamnesty": 23306, + "Ġworsened": 23307, + "ĠTitan": 23308, + "Ġcrossings": 23309, + "drug": 23310, + "Ġaddicted": 23311, + "Ġremorse": 23312, + "ĠDestiny": 23313, + "Dear": 23314, + "Ġhur": 23315, + "Ġimplicated": 23316, + "Ġplayful": 23317, + "Ġripe": 23318, + "Ġsizable": 23319, + "Ġcrab": 23320, + "Ġliqu": 23321, + "Ġdrib": 23322, + "Ġcontraction": 23323, + "cro": 23324, + "ĠGus": 23325, + "Ġdoomed": 23326, + "Ġmog": 23327, + "ĠMonitor": 23328, + "Count": 23329, + "Ġsadd": 23330, + "Ġwrestler": 23331, + "Ġrestraints": 23332, + "Ġraging": 23333, + "185": 23334, + "Ġtapes": 23335, + "Ġmitigation": 23336, + "ocratic": 23337, + "Ġvib": 23338, + "ĠSnowden": 23339, + "aldo": 23340, + "Ġweights": 23341, + "Ġ1959": 23342, + "ucc": 23343, + "ĠCoc": 23344, + "Log": 23345, + "ĠStev": 23346, + "Ġdealership": 23347, + "Ġtrademarks": 23348, + "iru": 23349, + "Ġbeneficiary": 23350, + "Ġlegislator": 23351, + "Ġdeadlines": 23352, + "Ġcosmetics": 23353, + "ĠTammy": 23354, + "ĠCombined": 23355, + "Ġeducator": 23356, + "athon": 23357, + "Ġcombo": 23358, + "fu": 23359, + "appropriate": 23360, + "nington": 23361, + "ĠLiberties": 23362, + "missions": 23363, + "opard": 23364, + "ĠMondays": 23365, + "Ġfetch": 23366, + "Ġhers": 23367, + "jon": 23368, + "ukes": 23369, + "zek": 23370, + "Ġvetting": 23371, + "yet": 23372, + "Ġfacilitating": 23373, + "ĠStras": 23374, + "character": 23375, + "ĠHeads": 23376, + "Ġclim": 23377, + "ĠAlbuquerque": 23378, + "Ġbind": 23379, + "Ġconcluding": 23380, + "ĠBasically": 23381, + "rail": 23382, + "ĠTCU": 23383, + "ĠDepression": 23384, + "Ġhem": 23385, + "ĠHue": 23386, + "Ġpand": 23387, + "Ġscoreboard": 23388, + "Av": 23389, + "Ġidol": 23390, + "compl": 23391, + "Ġredesign": 23392, + "ĠJarrett": 23393, + "Ġfavoured": 23394, + "ĠINS": 23395, + "Ġpropelled": 23396, + "Ġevasion": 23397, + "Ġwidened": 23398, + "Ġwastewater": 23399, + "nard": 23400, + "responsive": 23401, + "Ġdemographics": 23402, + "engine": 23403, + "ĠBrewer": 23404, + "ĠBaxter": 23405, + "ront": 23406, + "ĠColon": 23407, + "Ġpromoter": 23408, + "Ġgenres": 23409, + "ovsky": 23410, + "build": 23411, + "urate": 23412, + "ĠCohn": 23413, + "design": 23414, + "Ġturbulent": 23415, + "Ġcurtain": 23416, + "310": 23417, + "ĠLamp": 23418, + "ĠBonds": 23419, + "church": 23420, + "Ġdeterrent": 23421, + "Ġdictatorship": 23422, + "acement": 23423, + "haul": 23424, + "Ġspir": 23425, + "Ġconceived": 23426, + "Ġstern": 23427, + "sit": 23428, + "Ġsingular": 23429, + "ĠYog": 23430, + "Ġconditional": 23431, + "Ġide": 23432, + "lund": 23433, + "Ġautop": 23434, + "ĠBEST": 23435, + "ĠJed": 23436, + "Ġrationale": 23437, + "Ġalarmed": 23438, + "Ġshovel": 23439, + "ĠProb": 23440, + "ĠMao": 23441, + "ĠBurgess": 23442, + "Ġ1953": 23443, + "above": 23444, + "ĠManson": 23445, + "Ġdismal": 23446, + "ĠFrankie": 23447, + "Ġtempted": 23448, + "Ġunderdog": 23449, + "ribing": 23450, + "ENCY": 23451, + "ĠDele": 23452, + "Las": 23453, + "places": 23454, + "Ġnotoriously": 23455, + "ĠAkin": 23456, + "Ġglut": 23457, + "Ġseamlessly": 23458, + "Ġrecess": 23459, + "written": 23460, + "ĠTJ": 23461, + "occ": 23462, + "ĠTerritory": 23463, + "ĠAIR": 23464, + "ĠDiagn": 23465, + "Ġvacancies": 23466, + "Ġcultivation": 23467, + "ĠAless": 23468, + "Ġrenamed": 23469, + "ĠMahmoud": 23470, + "bright": 23471, + "Ġvisibly": 23472, + "Ġnas": 23473, + "erred": 23474, + "ĠCarn": 23475, + "Ġtriggers": 23476, + "Ġpunishing": 23477, + "Ġluc": 23478, + "ĠBett": 23479, + "Ġbeam": 23480, + "ĠCheng": 23481, + "aina": 23482, + "Ġdetermines": 23483, + "ĠGerry": 23484, + "Ġshocks": 23485, + "Ġstainless": 23486, + "Ġdefects": 23487, + "ĠCinem": 23488, + "Ġtorrent": 23489, + "Ġresurgence": 23490, + "Ġcoral": 23491, + "Ġblitz": 23492, + "ĠGel": 23493, + "Ġstemmed": 23494, + "gur": 23495, + "Ġlymph": 23496, + "zzo": 23497, + "Ġspearheaded": 23498, + "Ġlicences": 23499, + "';": 23500, + "Ġarbitrary": 23501, + "ĠUzbek": 23502, + "Ġthief": 23503, + "reaching": 23504, + "Ġcand": 23505, + "ĠEA": 23506, + "ĠParaly": 23507, + "ĠEmerson": 23508, + "ĠSergey": 23509, + "ĠScher": 23510, + "ĠWr": 23511, + "rowing": 23512, + "Ġ3000": 23513, + "Ġmighty": 23514, + "elight": 23515, + "mAh": 23516, + "Ġcelebr": 23517, + "ĠConclusion": 23518, + "ĠCathy": 23519, + "Ġpolished": 23520, + "uddled": 23521, + "ewski": 23522, + "Ġfucking": 23523, + "Ġinterfering": 23524, + "Ġlandscapes": 23525, + "Ġfearful": 23526, + "ĠDetention": 23527, + "%).": 23528, + "ĠTT": 23529, + "Ġbleak": 23530, + "Ġindebted": 23531, + "Ġcheat": 23532, + "Ġconsolation": 23533, + "ĠPace": 23534, + "raine": 23535, + "Ġhonorary": 23536, + "420": 23537, + "Ġtechnician": 23538, + "ĠComprehensive": 23539, + "Ġfences": 23540, + "Ġwearable": 23541, + "ĠMarilyn": 23542, + "stru": 23543, + "Ġdrained": 23544, + "ĠGibraltar": 23545, + "lag": 23546, + "Ġdisorderly": 23547, + "Ġproclaimed": 23548, + "Ġcapacities": 23549, + "Ġretains": 23550, + "ĠVid": 23551, + "oshi": 23552, + "ĠEid": 23553, + "Ġanalytical": 23554, + "ominium": 23555, + "ĠExaminer": 23556, + "ĠNAACP": 23557, + "ocol": 23558, + "rev": 23559, + "ĠRim": 23560, + "ĠWoody": 23561, + "ĠMcKenna": 23562, + "ĠLennon": 23563, + "ĠEmploy": 23564, + "Fort": 23565, + "psy": 23566, + "Ġsphere": 23567, + "oday": 23568, + "ĠChick": 23569, + "ĠCompared": 23570, + "ĠIranians": 23571, + "ĠAccountability": 23572, + "itchie": 23573, + "ĠDickinson": 23574, + "Ġflock": 23575, + "Ġeclips": 23576, + "Ġnat": 23577, + "anke": 23578, + "ĠNeighborhood": 23579, + "Ġ141": 23580, + "Ġscarce": 23581, + "Ġcreations": 23582, + "lists": 23583, + "Ġuseless": 23584, + "Ġcriticisms": 23585, + "Ġruler": 23586, + "ĠHick": 23587, + "arya": 23588, + "worker": 23589, + "alam": 23590, + "Angelo": 23591, + "otle": 23592, + "Ġnewsletters": 23593, + "Ġerected": 23594, + "Ġzip": 23595, + "ĠBirthday": 23596, + "Ġdogged": 23597, + "Ġdanced": 23598, + "Ġconfession": 23599, + "Ġvomiting": 23600, + "ickers": 23601, + "Ġfox": 23602, + "Ġdeduct": 23603, + "Ġstresses": 23604, + "poll": 23605, + "ĠRadar": 23606, + "Ġengagements": 23607, + "Ġexaminer": 23608, + "Ġopportun": 23609, + "Ġlongevity": 23610, + "Ġbanana": 23611, + "carbon": 23612, + "uo": 23613, + "ĠLT": 23614, + "Ġsynagogue": 23615, + "Ġblackmail": 23616, + "INK": 23617, + "Ġfle": 23618, + "ĠGutierrez": 23619, + "Ġracket": 23620, + "Ġevenings": 23621, + "Ġdietary": 23622, + "ĠKok": 23623, + "Ġfaulty": 23624, + "Ġabandoning": 23625, + "ĠFlow": 23626, + "quest": 23627, + "estead": 23628, + "Ġbir": 23629, + "Ġsuicidal": 23630, + "ĠGift": 23631, + "ĠMissing": 23632, + "ĠMazda": 23633, + "ĠRib": 23634, + "ĠJourney": 23635, + "Ġconcede": 23636, + "Ġbrushed": 23637, + "Tw": 23638, + "andowski": 23639, + "ĠYun": 23640, + "Bride": 23641, + "zai": 23642, + "awatts": 23643, + "Ġcha": 23644, + "Ġspans": 23645, + "SF": 23646, + "Ġshells": 23647, + "planned": 23648, + "ĠGeographic": 23649, + "ĠVent": 23650, + "Ġfav": 23651, + "Ġinterrogation": 23652, + "Ġvaries": 23653, + "ĠPlat": 23654, + "operative": 23655, + "avid": 23656, + "Ġgreatness": 23657, + "ĠStrait": 23658, + "ĠSelling": 23659, + "Ġlawful": 23660, + "Ġlyn": 23661, + "Ġfunnel": 23662, + "Ġpundits": 23663, + "ties": 23664, + "Ġpneumonia": 23665, + "Ġcommencement": 23666, + "Ġbrisk": 23667, + "fires": 23668, + "ĠHTML": 23669, + "ĠSevent": 23670, + "Ġhistor": 23671, + "Ġ147": 23672, + "olls": 23673, + "Ġpian": 23674, + "Little": 23675, + "Ġcommercials": 23676, + "Ġdeteriorated": 23677, + "Ġbasin": 23678, + "Ġprohibition": 23679, + "Ġrestrictive": 23680, + "Ġtom": 23681, + "ĠPulse": 23682, + "vale": 23683, + "Ġmim": 23684, + "ĠLyons": 23685, + "ĠTrinidad": 23686, + "data": 23687, + "195": 23688, + "ĠPain": 23689, + "vor": 23690, + "ĠDirectorate": 23691, + "Wow": 23692, + "essential": 23693, + "Ġemerges": 23694, + "ĠDoors": 23695, + "Ġunde": 23696, + "Ġarchives": 23697, + "ĠIX": 23698, + "ĠAman": 23699, + "oric": 23700, + "ĠOper": 23701, + "nothing": 23702, + "Ġ142": 23703, + "igr": 23704, + "rust": 23705, + "ĠBYU": 23706, + "ĠBom": 23707, + "Ġrift": 23708, + "ĠAbs": 23709, + "ĠJenn": 23710, + "Ġrookies": 23711, + "hoe": 23712, + "Ġunderage": 23713, + "eden": 23714, + "Ġroasted": 23715, + "Ġenrol": 23716, + "Ġerased": 23717, + "Ġfreeway": 23718, + "Sil": 23719, + "Ġplanner": 23720, + "Ġconfess": 23721, + "ĠDual": 23722, + "ĠHeadquarters": 23723, + "bottom": 23724, + "Ġstatistic": 23725, + "ĠPush": 23726, + "Ġanim": 23727, + "ITT": 23728, + "Ġexecutions": 23729, + "Hub": 23730, + "ĠStick": 23731, + "Ġobscure": 23732, + "oven": 23733, + "Ġcoats": 23734, + "unc": 23735, + "Morning": 23736, + "Ġnit": 23737, + "mie": 23738, + "Ġcurves": 23739, + "gew": 23740, + "ĠAnniversary": 23741, + "members": 23742, + "ĠAbsolutely": 23743, + "Ġapt": 23744, + "otional": 23745, + "ĠGin": 23746, + "izo": 23747, + "Ġpretending": 23748, + "arak": 23749, + "Ġorganise": 23750, + "Ġroyalties": 23751, + "ĠCamden": 23752, + "Ġsausage": 23753, + "Inst": 23754, + "Ġchalk": 23755, + "ĠSurf": 23756, + "ĠSunrise": 23757, + "Ġmoder": 23758, + "aido": 23759, + "loving": 23760, + "lus": 23761, + "Ġoblig": 23762, + "Ġmotions": 23763, + "Ġclarification": 23764, + "ĠOM": 23765, + "Ġbishop": 23766, + "Ġexhibitions": 23767, + "ĠRifle": 23768, + "ĠPhot": 23769, + "ĠHM": 23770, + "ATIONAL": 23771, + "Ġwid": 23772, + "Ġreside": 23773, + "ĠPV": 23774, + "OOK": 23775, + "ĠTue": 23776, + "Ġ1200": 23777, + "Ġ1957": 23778, + "Ġespionage": 23779, + "ĠAPPLIC": 23780, + "Ġblasts": 23781, + "fter": 23782, + "Ġimmensely": 23783, + "ĠLots": 23784, + "Ġinflammatory": 23785, + "anging": 23786, + "Ġtumultuous": 23787, + "identified": 23788, + "Ġstead": 23789, + "ĠAch": 23790, + "Ãī": 23791, + "Ġbub": 23792, + "hler": 23793, + "olution": 23794, + "Ġshun": 23795, + "Ġnull": 23796, + "Ġunused": 23797, + "ĠObs": 23798, + "Ġinsol": 23799, + "ĠAttack": 23800, + "ertain": 23801, + "Ġdefiant": 23802, + "Through": 23803, + "ĠArmour": 23804, + "Ġsimulation": 23805, + "UCK": 23806, + "Ġinfluenza": 23807, + "Ġonset": 23808, + "Ġbored": 23809, + "Ġsouls": 23810, + "Ġreferees": 23811, + "Ġcollaborations": 23812, + "ĠLer": 23813, + "Ġcreepy": 23814, + "Ġanaly": 23815, + "ĠEffect": 23816, + "orting": 23817, + "Card": 23818, + "Ġdice": 23819, + "Ġharvesting": 23820, + "235": 23821, + "sty": 23822, + "ĠMcCartney": 23823, + "Ġsalute": 23824, + "UMP": 23825, + "Ġherb": 23826, + "ĠAbuse": 23827, + "ĠRamadan": 23828, + "Ġsuck": 23829, + "trained": 23830, + "ĠPhysical": 23831, + "iren": 23832, + "anches": 23833, + "erie": 23834, + "Ġhangs": 23835, + "Ġcataly": 23836, + "Ġintuitive": 23837, + "assi": 23838, + "Ġtechn": 23839, + "Ġjugg": 23840, + "Ġgameplay": 23841, + "Ġapolog": 23842, + "Ġfifteen": 23843, + "Ġgalleries": 23844, + "Ġoutlines": 23845, + "patient": 23846, + "ĠPotential": 23847, + "Ġethnicity": 23848, + "Ġharbour": 23849, + "Ġoverthrow": 23850, + "ĠLung": 23851, + "Ġwarehouses": 23852, + "ĠMonitoring": 23853, + "Ġmentors": 23854, + "Ġsized": 23855, + "Ġenvisioned": 23856, + "Ġgin": 23857, + "DT": 23858, + "Ġpropel": 23859, + "ĠKul": 23860, + "ference": 23861, + "estic": 23862, + "ĠLego": 23863, + "Ġdinners": 23864, + "ĠMoe": 23865, + "designed": 23866, + "ĠSusp": 23867, + "ĠBrick": 23868, + "qua": 23869, + "IDS": 23870, + "ĠBam": 23871, + "athe": 23872, + "Ġslices": 23873, + "Ġbottled": 23874, + "thy": 23875, + "producing": 23876, + "ĠTerror": 23877, + "professional": 23878, + "ĠKis": 23879, + "erto": 23880, + "ĠVehicles": 23881, + "Ġbeforehand": 23882, + "Ġdetrimental": 23883, + "weights": 23884, + "Ġallowances": 23885, + "Williams": 23886, + "ĠSyrians": 23887, + "ĠSto": 23888, + "Ġcozy": 23889, + "reditation": 23890, + "ensen": 23891, + "ĠSard": 23892, + "Ġroy": 23893, + "ooting": 23894, + "ĠReserv": 23895, + "ominated": 23896, + "emate": 23897, + "ĠTot": 23898, + "ĠCarnegie": 23899, + "ĠThib": 23900, + "ĠMarshal": 23901, + "Ġ152": 23902, + "Ġmayors": 23903, + "inery": 23904, + "ĠFiona": 23905, + "ĠCadillac": 23906, + "ivated": 23907, + "Ġeagerly": 23908, + "ĠOffensive": 23909, + "Ġastronaut": 23910, + "ĠVital": 23911, + "Ġcane": 23912, + "Ġquitting": 23913, + "ĠLone": 23914, + "Ġcensorship": 23915, + "ĠWelch": 23916, + "ĠUd": 23917, + "Ġmarquee": 23918, + "ĠDip": 23919, + "Ġwhereby": 23920, + "Ġtiger": 23921, + "gem": 23922, + "Ġconserv": 23923, + "Ġpresumed": 23924, + "ĠEntry": 23925, + "ffer": 23926, + "ĠProceed": 23927, + "Ġbrawl": 23928, + "ĠJaime": 23929, + "Ġecho": 23930, + "Ġadvancements": 23931, + "Ġtransitional": 23932, + "erick": 23933, + "Ġbully": 23934, + "anan": 23935, + "Ġreinvent": 23936, + "ĠLetters": 23937, + "Ġbricks": 23938, + "ĠSmy": 23939, + "Ġtowering": 23940, + "gging": 23941, + "299": 23942, + "orian": 23943, + "dimensional": 23944, + "ĠForty": 23945, + "ĠSinn": 23946, + "ushi": 23947, + "ĠSurveillance": 23948, + "enabled": 23949, + "ĠMous": 23950, + "ĠVive": 23951, + "Marcus": 23952, + "Ġvom": 23953, + "Ġcreek": 23954, + "Ġlime": 23955, + "Ġseismic": 23956, + "ĠFork": 23957, + "Ġembroiled": 23958, + "marks": 23959, + "Ġherald": 23960, + "ĠSonia": 23961, + "âĢ¦\"": 23962, + "wired": 23963, + "Ġobliged": 23964, + "ĠProjects": 23965, + "lde": 23966, + "ĠRiders": 23967, + "Ġovercoming": 23968, + "Mail": 23969, + "ĠLawn": 23970, + "ĠHawk": 23971, + "figure": 23972, + "ĠWritten": 23973, + "Ġens": 23974, + "Ġspacious": 23975, + "target": 23976, + "ĠRecep": 23977, + "ĠSAM": 23978, + "Ġentertained": 23979, + "Ġignited": 23980, + "ĠCENT": 23981, + "ogenic": 23982, + "Ġunatt": 23983, + "Ġexceeds": 23984, + "Ġ--------------------------------": 23985, + "Ġpillars": 23986, + "ĠBorders": 23987, + "ickey": 23988, + "Ġextinction": 23989, + "Ġviability": 23990, + "Ġtumors": 23991, + "ĠWilkinson": 23992, + "ĠKEY": 23993, + "Ġbins": 23994, + "ĠReported": 23995, + "Sm": 23996, + "ĠExclusive": 23997, + "ĠChilean": 23998, + "info": 23999, + "Ġwilderness": 24000, + "did": 24001, + "absolutely": 24002, + "pillar": 24003, + "Ġelites": 24004, + "ĠPreview": 24005, + "ixie": 24006, + "Mont": 24007, + "ribut": 24008, + "dream": 24009, + "Ġplanners": 24010, + "ĠSomerset": 24011, + "Ġenvis": 24012, + "ĠStall": 24013, + "Ġelevate": 24014, + "ographies": 24015, + "rama": 24016, + "Ha": 24017, + "Ġamidst": 24018, + "oho": 24019, + "Ġrejects": 24020, + "Jim": 24021, + "Ġmarginally": 24022, + "Ġusher": 24023, + "arez": 24024, + "ĠHawth": 24025, + "Ġsprink": 24026, + "ĠOffer": 24027, + "Ġanchored": 24028, + "ucking": 24029, + "ĠGarn": 24030, + "ĠConserv": 24031, + "Ġsocietal": 24032, + "Ġbrowsing": 24033, + "Ġbidder": 24034, + "burgh": 24035, + "ĠRunner": 24036, + "Ġtrendy": 24037, + "verts": 24038, + "imposed": 24039, + "ĠPatton": 24040, + "lements": 24041, + "Ġspicy": 24042, + "Ġswe": 24043, + "ĠStrike": 24044, + "Ġclam": 24045, + "ĠYankee": 24046, + "ĠKT": 24047, + "ĠGreenwood": 24048, + "ĠWays": 24049, + "Ġ2050": 24050, + "Ġattach": 24051, + "ĠShim": 24052, + "Ġmeltdown": 24053, + "Ġassemble": 24054, + "ĠUPDATE": 24055, + "Ġscout": 24056, + "Brown": 24057, + "ĠKobe": 24058, + "Ġpostpone": 24059, + "liness": 24060, + "allo": 24061, + "rief": 24062, + "ĠGerm": 24063, + "ĠFD": 24064, + "ĠReggie": 24065, + "ĠUnivers": 24066, + "ĠShepard": 24067, + "Ġcancell": 24068, + "ĠRomeo": 24069, + "ĠWarrior": 24070, + "ench": 24071, + "ifier": 24072, + "Ġprivileges": 24073, + "Ġsenses": 24074, + "Ġimpoverished": 24075, + "ĠPostal": 24076, + "encer": 24077, + "ĠConrad": 24078, + "Ġprinter": 24079, + "Ġinflicted": 24080, + "ĠGamble": 24081, + "ĠHeroes": 24082, + "132": 24083, + "Ġrevisions": 24084, + "Ġunsuccessfully": 24085, + "ĠHeisman": 24086, + "Ġstamped": 24087, + "inding": 24088, + "ĠLuna": 24089, + "Ġreinvest": 24090, + "ducers": 24091, + "ĠPassword": 24092, + "Leod": 24093, + "Ġcompounded": 24094, + "',\"": 24095, + "ogging": 24096, + "Ġprobing": 24097, + "ĠPBS": 24098, + "ĠMU": 24099, + "ĠWhenever": 24100, + "Ġsped": 24101, + "ĠCompetitive": 24102, + "isans": 24103, + "opa": 24104, + "Ġcleric": 24105, + "Ġvivid": 24106, + "à¸": 24107, + "126": 24108, + "Ġinconvenience": 24109, + "udi": 24110, + "Ġimmersive": 24111, + "Ġdiversion": 24112, + "Ġlogs": 24113, + "Ġspying": 24114, + "inct": 24115, + "Ġlitres": 24116, + "Ġmetallic": 24117, + "identally": 24118, + "FX": 24119, + "Ġloudly": 24120, + "Ġnursery": 24121, + "Ġcollectors": 24122, + "ĠKart": 24123, + "Ġescalate": 24124, + "Ġringing": 24125, + "Ġprocedural": 24126, + "Ġdisrupting": 24127, + "ĠEthiopian": 24128, + "ĠCFL": 24129, + "Ġillustrates": 24130, + "Ġperks": 24131, + "official": 24132, + "325": 24133, + "Ġmillennial": 24134, + "Ġbreadth": 24135, + "Ġmelted": 24136, + "Ġ850": 24137, + "ĠBake": 24138, + "donald": 24139, + "ĠGrac": 24140, + "Ġseeded": 24141, + "ĠDiscount": 24142, + "idates": 24143, + "Ġdrift": 24144, + "Ġcaptive": 24145, + "Ġseriousness": 24146, + "Ġrepercussions": 24147, + "Ġdisciplines": 24148, + "Ġthesis": 24149, + "Ġsleeve": 24150, + "ses": 24151, + "Monday": 24152, + "Ġthwart": 24153, + "ĠLic": 24154, + "Ġquadru": 24155, + "ĠPresbyterian": 24156, + "Ġreactors": 24157, + "ĠSuzanne": 24158, + "ewater": 24159, + "Ġlam": 24160, + "Ġbreastfeeding": 24161, + "Ġrats": 24162, + "ĠArtists": 24163, + "Ġdomestically": 24164, + "Ġdecom": 24165, + "ĠArms": 24166, + "basketball": 24167, + "Ġscrub": 24168, + "ĠTeddy": 24169, + "beh": 24170, + "ĠBetsy": 24171, + "ĠNursing": 24172, + "Ġdescriptions": 24173, + "127": 24174, + "gil": 24175, + "itional": 24176, + "Ġchampioned": 24177, + "ĠCalling": 24178, + "Ġrealization": 24179, + "ĠBuddy": 24180, + "hou": 24181, + "ĠDire": 24182, + "ĠHuff": 24183, + "Ġlipstick": 24184, + "Ray": 24185, + "Ġflare": 24186, + "belt": 24187, + "Ġbrightest": 24188, + "Ġmalfunction": 24189, + "ĠManor": 24190, + "Ġsaturated": 24191, + "rays": 24192, + "ĠDW": 24193, + "ixed": 24194, + "ĠSlovenia": 24195, + "seen": 24196, + "ĠCause": 24197, + "arios": 24198, + "ASE": 24199, + "Ġrend": 24200, + "ĠTBA": 24201, + "Ġlecturer": 24202, + "attering": 24203, + "Ġaffluent": 24204, + "CEO": 24205, + "Ġbreathtaking": 24206, + "ĠGiles": 24207, + "irth": 24208, + "ĠPhilips": 24209, + "Ġposture": 24210, + "ĠTSA": 24211, + "heit": 24212, + "Ġmenace": 24213, + "ricks": 24214, + "ĠAden": 24215, + "ĠReich": 24216, + "iggle": 24217, + "ĠShutterstock": 24218, + "Ġcourageous": 24219, + "edia": 24220, + "Staff": 24221, + "Ġdivert": 24222, + "ĠCir": 24223, + "Ġguessing": 24224, + "apers": 24225, + "ĠBritons": 24226, + "lé": 24227, + "Ġconvened": 24228, + "ĠSerbian": 24229, + "Ġricher": 24230, + "Ġcock": 24231, + "Ġdeposited": 24232, + "company": 24233, + "Ġdelic": 24234, + "sensitive": 24235, + "tank": 24236, + "ĠPatty": 24237, + "mia": 24238, + "onomous": 24239, + "cn": 24240, + "Ġclamp": 24241, + "ĠAcademic": 24242, + "Ġprosecuting": 24243, + "ĠTransparency": 24244, + "Ġdeflation": 24245, + "Ġdashboard": 24246, + "ĠDress": 24247, + "Ġlin": 24248, + "mu": 24249, + "ĠGoodell": 24250, + "Ġlav": 24251, + "ĠTwelve": 24252, + "Ġflavour": 24253, + "Ġfiercely": 24254, + "Ġbloom": 24255, + "ĠHaf": 24256, + "ĠGrad": 24257, + "LET": 24258, + "ĠSeeing": 24259, + "oxide": 24260, + "Ġmenus": 24261, + "char": 24262, + "adoes": 24263, + "combe": 24264, + "Street": 24265, + "ĠRidley": 24266, + "Ġdepicts": 24267, + "ĠPred": 24268, + "ÑĢ": 24269, + "British": 24270, + "Ġbumps": 24271, + "Ġlamp": 24272, + "ĠDesmond": 24273, + "ĠPB": 24274, + "Ġfrag": 24275, + "tin": 24276, + "ĠSharing": 24277, + "Ġdesperation": 24278, + "Ġcommuter": 24279, + "igrants": 24280, + "ĠShapiro": 24281, + "Ġkinda": 24282, + "Ġimpartial": 24283, + "ĠJewel": 24284, + "Ġcongratulations": 24285, + "Ġcompost": 24286, + "Ġadmiration": 24287, + "Ġpaycheck": 24288, + "ĠAnonymous": 24289, + "enger": 24290, + "Mer": 24291, + "ĠGospel": 24292, + "ĠEth": 24293, + "ĠMH": 24294, + "Ġfem": 24295, + "ĠTrial": 24296, + "Ġdepths": 24297, + "ĠApplied": 24298, + "Ġgrit": 24299, + "Ġerase": 24300, + "sid": 24301, + "comm": 24302, + "}": 24303, + "Ġretreated": 24304, + "Ġanalysed": 24305, + "ĠRegular": 24306, + "ĠPesh": 24307, + "ICAL": 24308, + "pei": 24309, + "ĠReilly": 24310, + "ĠTrib": 24311, + "Ġbooths": 24312, + "Ġdrank": 24313, + "Ġcoma": 24314, + "Ġharvested": 24315, + "ĠCHAR": 24316, + "Ġbutterfly": 24317, + "Ġsailed": 24318, + "ĠDrink": 24319, + "eping": 24320, + "ATCH": 24321, + "ĠLegends": 24322, + "Ġinsured": 24323, + "Ġwholes": 24324, + "ĠBis": 24325, + "ĠShea": 24326, + "ighter": 24327, + "Ġsnakes": 24328, + "ĠGunn": 24329, + "ĠPoss": 24330, + "Ġdispar": 24331, + "Ġbombshell": 24332, + "Ġscanning": 24333, + "340": 24334, + "choice": 24335, + "cool": 24336, + "\"âĢĶ": 24337, + "ĠTheo": 24338, + "rine": 24339, + "ĠJacques": 24340, + "Ġdisadvantaged": 24341, + "Ġparamount": 24342, + "igate": 24343, + "stat": 24344, + "anski": 24345, + "Ġoutsourcing": 24346, + "Ġpopulous": 24347, + "Ġbinge": 24348, + "ĠOrganic": 24349, + "urban": 24350, + "Ġyogurt": 24351, + "Ġretweet": 24352, + "osen": 24353, + "cially": 24354, + "215": 24355, + "Ġeditions": 24356, + "Ġburgeoning": 24357, + "efully": 24358, + "ĠThousand": 24359, + "Ġreplacements": 24360, + "ĠAmazing": 24361, + "rator": 24362, + "icy": 24363, + "Ġintensify": 24364, + "Sen": 24365, + "ĠQuincy": 24366, + "powers": 24367, + "ĠAur": 24368, + "ĠZion": 24369, + "stal": 24370, + "Ġpillar": 24371, + "ĠErit": 24372, + "ĠPerform": 24373, + "aston": 24374, + "Eric": 24375, + "Ġunh": 24376, + "IFF": 24377, + "950": 24378, + "ĠEngineer": 24379, + "ĠLands": 24380, + "Ġdubious": 24381, + "fy": 24382, + "ĠWI": 24383, + "ĠSv": 24384, + "ĠHendricks": 24385, + "ĠKod": 24386, + "Ġoutlining": 24387, + "ĠCorrespond": 24388, + "amus": 24389, + "worst": 24390, + "arter": 24391, + "coni": 24392, + "Ġhierarchy": 24393, + "ĠTHAT": 24394, + "Ġexce": 24395, + "Ġrailways": 24396, + "Ġmasked": 24397, + "lene": 24398, + "Ġoutset": 24399, + "Ġavalanche": 24400, + "Ġnicknamed": 24401, + "Ġ702": 24402, + "Lee": 24403, + "Ġ139": 24404, + "ĠSixth": 24405, + "365": 24406, + "nda": 24407, + "Ġaccountant": 24408, + "Ġobese": 24409, + "Ġgrape": 24410, + "Ġimpunity": 24411, + "ĠYorkers": 24412, + "Ġguardian": 24413, + "icity": 24414, + "Ġcentrist": 24415, + "Ġwaterways": 24416, + "ursed": 24417, + "Ġhopeless": 24418, + "header": 24419, + "Ġtack": 24420, + "Ġric": 24421, + "umn": 24422, + "Ġvalve": 24423, + "Ġtread": 24424, + "ĠCST": 24425, + "Ġhepatitis": 24426, + "ctor": 24427, + "ĠRED": 24428, + "Ġsolitary": 24429, + "NW": 24430, + "Ġceremonial": 24431, + "Ġfoe": 24432, + "Ġling": 24433, + "Jason": 24434, + "ĠLisbon": 24435, + "Ġ1955": 24436, + "ĠHeller": 24437, + "Ġkin": 24438, + "essen": 24439, + "Ġturbines": 24440, + "shi": 24441, + "Ġlodge": 24442, + "Ġveterinary": 24443, + "ĠBoll": 24444, + "ĠConfederation": 24445, + "ĠJournalists": 24446, + "Ġtug": 24447, + "ĠStarr": 24448, + "Ġpiles": 24449, + "Way": 24450, + "adel": 24451, + "orean": 24452, + "Ġoft": 24453, + "Ġshortcomings": 24454, + "ĠSheila": 24455, + "Ġbackbone": 24456, + "III": 24457, + "ĠDarwin": 24458, + "ĠTunis": 24459, + "Ġsuspicions": 24460, + "Ġdisagreements": 24461, + "Ġ247": 24462, + "illery": 24463, + "'\"": 24464, + "Ġsegregation": 24465, + "ohl": 24466, + "Ġinstincts": 24467, + "ĠPoo": 24468, + "nih": 24469, + "parency": 24470, + "uddy": 24471, + "esting": 24472, + "asses": 24473, + "ĠIntroduction": 24474, + "ĠSirius": 24475, + "Local": 24476, + "orous": 24477, + "Ġrehearsal": 24478, + "Ġdemol": 24479, + "Ġtraffickers": 24480, + "Ġupsetting": 24481, + "Ġheir": 24482, + "death": 24483, + "ĠMoments": 24484, + "Los": 24485, + "Ġatmospheric": 24486, + "aints": 24487, + "ĠDianne": 24488, + "Ġlikewise": 24489, + "ĠMing": 24490, + "auga": 24491, + "Ġfirsthand": 24492, + "Ġnarratives": 24493, + "ĠAstron": 24494, + "ĠExtreme": 24495, + "Ġhorns": 24496, + "ĠSana": 24497, + "Ġrecapt": 24498, + "ĠMist": 24499, + "ĠRandolph": 24500, + "connect": 24501, + "Ġindecent": 24502, + "Ġforty": 24503, + "Ġjihadists": 24504, + "azes": 24505, + "Ġdread": 24506, + "Ġgrapes": 24507, + "Ġremoves": 24508, + "Ġscreamed": 24509, + "ĠCrus": 24510, + "ikers": 24511, + "Ġsnapshot": 24512, + "ĠCalls": 24513, + "Cons": 24514, + "Ġlettuce": 24515, + "ĠPig": 24516, + "urable": 24517, + "jured": 24518, + "ILY": 24519, + "ĠJessie": 24520, + ".).": 24521, + "Pay": 24522, + "Tra": 24523, + "----------------": 24524, + "ĠUnits": 24525, + "ĠPlayboy": 24526, + "Ġarthritis": 24527, + "Ġafforded": 24528, + "insk": 24529, + "ĠFake": 24530, + "ĠLies": 24531, + "ĠBaltic": 24532, + "oyal": 24533, + "ĠVest": 24534, + "Ġrusher": 24535, + "Ġincorporates": 24536, + "ĠMM": 24537, + "ĠDru": 24538, + "ĠWare": 24539, + "ĠSammy": 24540, + "ĠGob": 24541, + "ĠRuk": 24542, + "Ġ146": 24543, + "ĠCrowd": 24544, + "Ġduel": 24545, + "irts": 24546, + "Ġsourcing": 24547, + "hp": 24548, + "ĠJava": 24549, + "bred": 24550, + "ĠRefer": 24551, + "Ġuninsured": 24552, + "Ġslope": 24553, + "256": 24554, + "Ġregulating": 24555, + "Ġfundra": 24556, + "Ġinserted": 24557, + "ĠNickel": 24558, + "ĠConsumption": 24559, + "ĠRomo": 24560, + "Atlantic": 24561, + "Ġenclave": 24562, + "Ġpegged": 24563, + "Ġdirects": 24564, + "mbudsman": 24565, + "ĠDES": 24566, + "Ob": 24567, + "Ġlimbs": 24568, + "Ġbury": 24569, + "ILA": 24570, + "Ġstew": 24571, + "Ġbreeze": 24572, + "Ġabrupt": 24573, + "ĠGott": 24574, + "ĠClaude": 24575, + "Ġgenetically": 24576, + "Ġrigid": 24577, + "ĠDudley": 24578, + "ĠNer": 24579, + "registered": 24580, + "Ġentrenched": 24581, + "Ġextortion": 24582, + "ĠNurs": 24583, + "Ġcontingency": 24584, + "etter": 24585, + "Ġrejo": 24586, + "Ġprotagonist": 24587, + "Ġcounselling": 24588, + "ĠVit": 24589, + "aware": 24590, + "ĠMonsanto": 24591, + "GG": 24592, + "Ġincarcerated": 24593, + "Ġabduction": 24594, + "Ġreferencing": 24595, + "Germany": 24596, + "uates": 24597, + "reck": 24598, + "Ġtram": 24599, + "Ġchron": 24600, + "Ġmish": 24601, + "ĠVes": 24602, + "ĠTire": 24603, + "Ġvandal": 24604, + "ĠCrazy": 24605, + "ĠLifetime": 24606, + "ĠSpectrum": 24607, + "celer": 24608, + "Ġmotto": 24609, + "hang": 24610, + "Ġblade": 24611, + "gel": 24612, + "Ġbiography": 24613, + "Ġallegiance": 24614, + "hod": 24615, + "hap": 24616, + "ptic": 24617, + "acle": 24618, + "ĠBlade": 24619, + "ĠBoh": 24620, + "Ġ149": 24621, + "Ġchang": 24622, + "Ġcanned": 24623, + "Ġfacilitated": 24624, + "actor": 24625, + "iologist": 24626, + "Ġrebuilt": 24627, + "Ġawake": 24628, + "Ġmayoral": 24629, + "ĠEuros": 24630, + "Ġdangerously": 24631, + "MK": 24632, + "Ġreplica": 24633, + "Ġcoinc": 24634, + "blog": 24635, + "ĠEra": 24636, + "Ġrelinqu": 24637, + "quite": 24638, + "ondon": 24639, + "rosso": 24640, + "tun": 24641, + "Ġtouchscreen": 24642, + "Ġpops": 24643, + "ousing": 24644, + "efficient": 24645, + "Ġ148": 24646, + "Ġconced": 24647, + "although": 24648, + "Ġ1956": 24649, + "Ġmortar": 24650, + "ĠCave": 24651, + "ĠJung": 24652, + "urer": 24653, + "Ġillusion": 24654, + "ĠBerman": 24655, + "intend": 24656, + "Ġcoping": 24657, + "Dem": 24658, + "tion": 24659, + "estation": 24660, + "ĠSounds": 24661, + "Ġnavigating": 24662, + "Ġsperm": 24663, + "Ġreligions": 24664, + "Ġfol": 24665, + "Ġheroic": 24666, + "FD": 24667, + "Ġhesitant": 24668, + "asure": 24669, + "Ġredeem": 24670, + "Adam": 24671, + "Ġfireplace": 24672, + "vertis": 24673, + "ĠSung": 24674, + "290": 24675, + "iland": 24676, + "ĠUpdates": 24677, + "OTUS": 24678, + "ĠPTSD": 24679, + "Ġhelmets": 24680, + "\"?": 24681, + "Ġslashing": 24682, + "Ġscouts": 24683, + "Ġspelling": 24684, + "ĠInitial": 24685, + "draw": 24686, + "Ġchallengers": 24687, + "Ġsupremacists": 24688, + "Ġpilgrims": 24689, + "Ġasc": 24690, + "ĠFill": 24691, + "ĠPau": 24692, + "Ġjewel": 24693, + "ĠMalt": 24694, + "icip": 24695, + "Ġinhabitants": 24696, + "Ġmetre": 24697, + "ahar": 24698, + "Comp": 24699, + "atches": 24700, + "inv": 24701, + "Ġcyclist": 24702, + "ĠQC": 24703, + "Ġmanually": 24704, + "ĠAnchorage": 24705, + "Ġdiscarded": 24706, + "Ġconsolid": 24707, + "Ġnavig": 24708, + "ĠAnimals": 24709, + "ĠPole": 24710, + "esson": 24711, + "Ġ1954": 24712, + "Ġsorted": 24713, + "Ġmadness": 24714, + "ĠBrigade": 24715, + "ĠGenesis": 24716, + "Ġdismissing": 24717, + "ĠPanasonic": 24718, + "Ġdizz": 24719, + "ĠEducational": 24720, + "ĠKO": 24721, + "ĠPill": 24722, + "ĠGIF": 24723, + "Ġbol": 24724, + "Ġwards": 24725, + "Ġcontroversies": 24726, + "Chinese": 24727, + "Ġantics": 24728, + "Ġreliant": 24729, + "ĠMoff": 24730, + "Ġethanol": 24731, + "Ġtorch": 24732, + "rights": 24733, + "ĠHabit": 24734, + "arton": 24735, + "rera": 24736, + "ĠSasha": 24737, + "abella": 24738, + "Ġproliferation": 24739, + "Ġsincerely": 24740, + "communication": 24741, + "ĠNay": 24742, + "ĠChattanooga": 24743, + "ounces": 24744, + "ĠNXT": 24745, + "ĠEmir": 24746, + "Ġmanipulated": 24747, + "Ġharassing": 24748, + "wat": 24749, + "Ġbouts": 24750, + "Book": 24751, + "Ġhovering": 24752, + "ĠScan": 24753, + "ship": 24754, + "ĠAngola": 24755, + "ĠLC": 24756, + "Ġruins": 24757, + "Ġsexist": 24758, + "zar": 24759, + "Ġpledging": 24760, + "ober": 24761, + "Ġembold": 24762, + "Ġobjection": 24763, + "Ġboasting": 24764, + "MIN": 24765, + "Ġherbs": 24766, + "Ġgears": 24767, + "ĠIc": 24768, + "stre": 24769, + "him": 24770, + "Ġhomicides": 24771, + "cki": 24772, + "castle": 24773, + "counter": 24774, + "ĠCAS": 24775, + "ĠReasons": 24776, + "ĠDeclaration": 24777, + "Ġsimplify": 24778, + "Ġfared": 24779, + "Ġescort": 24780, + "Ġkidn": 24781, + "ĠHamm": 24782, + "Ġnailed": 24783, + "Ġaccommodations": 24784, + "Ġmodifications": 24785, + "rible": 24786, + "Ġwool": 24787, + "EDIT": 24788, + "2010": 24789, + "Ġauthentication": 24790, + "Ġgoat": 24791, + "hom": 24792, + "Ġfederally": 24793, + "ĠRath": 24794, + "Ġspiked": 24795, + "Ġmisrepresent": 24796, + "Ġavenue": 24797, + "Ġbroadcasts": 24798, + "ĠEstonia": 24799, + "ennes": 24800, + "ĠMare": 24801, + "ption": 24802, + "ĠKag": 24803, + "Ġcircumstance": 24804, + "orrow": 24805, + "isons": 24806, + "ĠCollabor": 24807, + "Ġstroll": 24808, + "ĠCPS": 24809, + "soft": 24810, + "iral": 24811, + "apo": 24812, + "usky": 24813, + "poke": 24814, + "Ġwoo": 24815, + "ĠElena": 24816, + "ĠLastly": 24817, + "Ġlinemen": 24818, + "Canadian": 24819, + "ĠAnyway": 24820, + "Ġsubstantive": 24821, + "ĠCurt": 24822, + "Ġard": 24823, + "ĠYosh": 24824, + "ĠBuchanan": 24825, + "Ġrevolving": 24826, + "Ġspecials": 24827, + "Ġshrine": 24828, + "Ġlumber": 24829, + "Ġorchestrated": 24830, + "kie": 24831, + "azy": 24832, + "Ġexpiration": 24833, + "ĠDaryl": 24834, + "ĠPatri": 24835, + "better": 24836, + "2020": 24837, + "ĠFav": 24838, + "ĠOP": 24839, + "OTT": 24840, + "Ġflush": 24841, + "ĠSikh": 24842, + "Ġecosystems": 24843, + "ĠBET": 24844, + "eared": 24845, + "audio": 24846, + "ĠFahrenheit": 24847, + "police": 24848, + "Ġincarceration": 24849, + "Ġerupt": 24850, + "ĠDamien": 24851, + "ĠHague": 24852, + "ulz": 24853, + "ĠAgents": 24854, + "ĠBanner": 24855, + "Ġconductor": 24856, + "ĠAjax": 24857, + "arson": 24858, + "Ġrests": 24859, + "Ġeurozone": 24860, + "Ġfelon": 24861, + "Ġcurator": 24862, + "morning": 24863, + "Ġevidenced": 24864, + "ĠNeh": 24865, + "Ġmattress": 24866, + "Ġtast": 24867, + "Ġfueling": 24868, + "ĠOccup": 24869, + "Ġbake": 24870, + "ĠZac": 24871, + "meaning": 24872, + "Ill": 24873, + "ĠHau": 24874, + "ĠLaden": 24875, + "Ġbald": 24876, + "Mary": 24877, + "oky": 24878, + "atri": 24879, + "Ġtracker": 24880, + "OTA": 24881, + "catching": 24882, + "ĠUnderground": 24883, + "ĠHuffPost": 24884, + "ĠAtkins": 24885, + "oglu": 24886, + "Ġauthorised": 24887, + "Ġroutines": 24888, + "ĠHof": 24889, + "veland": 24890, + "Ġlangu": 24891, + "Ġprot": 24892, + "ĠHyd": 24893, + "integ": 24894, + "Ġbravery": 24895, + "Ġviolin": 24896, + "Ġdelightful": 24897, + "Ġticks": 24898, + "iton": 24899, + "Ġreap": 24900, + "Ġoversized": 24901, + "ĠPitch": 24902, + "Ġprized": 24903, + "Ġfusion": 24904, + "fact": 24905, + "acting": 24906, + "Ġfullback": 24907, + "Ġpolite": 24908, + "Ġswear": 24909, + "Ġconfiscated": 24910, + "ĠStud": 24911, + "Ġfielded": 24912, + "rito": 24913, + "covered": 24914, + "financial": 24915, + "bill": 24916, + "HK": 24917, + "OTOS": 24918, + "loaded": 24919, + "Ġmarble": 24920, + "ĠDiplom": 24921, + ".âĢĶ": 24922, + "Ġeats": 24923, + "Ġbackfield": 24924, + "Ġtimeframe": 24925, + "Ġvegetarian": 24926, + "Ġswaps": 24927, + "ĠMines": 24928, + "igor": 24929, + "ĠLenn": 24930, + "ĠDP": 24931, + "ordered": 24932, + "ĠShark": 24933, + "Ġquant": 24934, + "erence": 24935, + "Ġashes": 24936, + "ĠBuckley": 24937, + "ophobia": 24938, + "Ġwarranted": 24939, + "Rose": 24940, + "Ġunreasonable": 24941, + "ĠJav": 24942, + "Ġpalette": 24943, + "Ġjoints": 24944, + "Ġadvent": 24945, + "Ġnoteworthy": 24946, + "ĠNicol": 24947, + "ĠChristensen": 24948, + "Ġplummeted": 24949, + "ayers": 24950, + "Ġdefends": 24951, + "Ġcontended": 24952, + "ĠCongratulations": 24953, + "kish": 24954, + "ĠHannity": 24955, + "Ġgroundwater": 24956, + "ĠKramer": 24957, + "Ġerect": 24958, + "Ġappet": 24959, + "ĠKardash": 24960, + "Ġexacerbated": 24961, + "Ġexplanations": 24962, + "vious": 24963, + "eport": 24964, + "---": 24965, + "icism": 24966, + "ĠNatasha": 24967, + "ĠGeoffrey": 24968, + "estro": 24969, + "Article": 24970, + "Ġincidence": 24971, + "Ġprovoked": 24972, + "elf": 24973, + "Ġinsistence": 24974, + "ĠOUR": 24975, + "Ġfertilizer": 24976, + "Ġstickers": 24977, + "ĠGators": 24978, + "ĠLanding": 24979, + "ĠDON": 24980, + "sta": 24981, + "ĠRobbins": 24982, + "Ġpixels": 24983, + "ĠHoy": 24984, + "imated": 24985, + "ĠÃī": 24986, + "â": 24987, + "Ġsimpl": 24988, + "Other": 24989, + "245": 24990, + "Ġforcibly": 24991, + "'.\"": 24992, + "Ġsmashing": 24993, + "Ġmosquitoes": 24994, + "Ġpaints": 24995, + "Ġdebating": 24996, + "enty": 24997, + "ĠIB": 24998, + "leaf": 24999, + "ĠDah": 25000, + "Ġreferral": 25001, + "pired": 25002, + "Ġbrunch": 25003, + "gie": 25004, + "Ġvict": 25005, + "ribute": 25006, + "Ġbloggers": 25007, + "Ġgum": 25008, + "ĠAdmiral": 25009, + "France": 25010, + "ĠPK": 25011, + "ĠSaturn": 25012, + "Ġinflated": 25013, + "WAR": 25014, + "Ġscenic": 25015, + "usal": 25016, + "their": 25017, + "Ġcontends": 25018, + "Ġpathways": 25019, + "inis": 25020, + "Ġawarding": 25021, + "Ġmisled": 25022, + "Ġeternal": 25023, + "Ġexaminations": 25024, + "Ġpoker": 25025, + "Ġsafest": 25026, + "Ġchildcare": 25027, + "aday": 25028, + "Ġpreceding": 25029, + "ĠCollective": 25030, + "Ġrespectable": 25031, + "ographical": 25032, + "Ġoak": 25033, + "00000": 25034, + "ĠCorridor": 25035, + "oran": 25036, + "133": 25037, + "Ġmushrooms": 25038, + "gaard": 25039, + "ĠOmega": 25040, + "ĠNaturally": 25041, + "anim": 25042, + "Ġcaptains": 25043, + "Ġtang": 25044, + "Ġlobbyists": 25045, + "ĠSug": 25046, + "Ġsucc": 25047, + "249": 25048, + "ENG": 25049, + "134": 25050, + "Ġsolic": 25051, + "ĠAdded": 25052, + "ĠSuicide": 25053, + "ĠFULL": 25054, + "ĠStrauss": 25055, + "ĠDiesel": 25056, + "Ġtempting": 25057, + "acist": 25058, + "ĠDelivery": 25059, + "Ġquiz": 25060, + "ĠPARK": 25061, + "Ġcollisions": 25062, + "Ġrestrained": 25063, + "purpose": 25064, + "ĠChanges": 25065, + "Ġabsentee": 25066, + "Ġprobes": 25067, + "hib": 25068, + "Ġcul": 25069, + "Ġpetty": 25070, + "Ġnecess": 25071, + "Ġcues": 25072, + "OME": 25073, + "Ġinadvertently": 25074, + "urity": 25075, + "ĠStuff": 25076, + "FG": 25077, + "Ġwrestlers": 25078, + "Ġpaste": 25079, + "ĠRoku": 25080, + "Ġcardboard": 25081, + "aires": 25082, + "Ġvariables": 25083, + "ĠSaras": 25084, + "ĠFif": 25085, + "Ġinvests": 25086, + "ĠDiscover": 25087, + "ĠFix": 25088, + "Thomas": 25089, + "ĠLunch": 25090, + "lv": 25091, + "camera": 25092, + "Step": 25093, + "Ġresumes": 25094, + "ĠSacred": 25095, + "ĠShooting": 25096, + "Ġnoble": 25097, + "Ġslopes": 25098, + "Ġont": 25099, + "Ġtwists": 25100, + "Very": 25101, + "Ġbigotry": 25102, + "ĠTib": 25103, + "Ġmos": 25104, + "Ġwarrior": 25105, + "Ġbroadcasters": 25106, + "Ġubiquitous": 25107, + "ameda": 25108, + "Ġchess": 25109, + "Special": 25110, + "Ġconver": 25111, + "Ġdeleg": 25112, + "endant": 25113, + "Ġfoil": 25114, + "Ġlush": 25115, + "Ġtaxed": 25116, + "Mag": 25117, + "ahs": 25118, + "Ġtablespoons": 25119, + "scription": 25120, + "clamation": 25121, + "ĠCertain": 25122, + "ĠDiversity": 25123, + "Ġhairst": 25124, + "ĠBrewery": 25125, + "Ġshedding": 25126, + "Cla": 25127, + "Ġpenis": 25128, + "ĠMurder": 25129, + "Park": 25130, + "uner": 25131, + "iments": 25132, + "ĠOVER": 25133, + "hus": 25134, + "Ġtabloid": 25135, + "Chart": 25136, + "Ġvouchers": 25137, + "ĠCoord": 25138, + "Ġmethane": 25139, + "ĠFisheries": 25140, + "ĠKham": 25141, + "includes": 25142, + "ĠSuperman": 25143, + "ensed": 25144, + "isure": 25145, + "Amazon": 25146, + "Ġvacated": 25147, + "heet": 25148, + "Ġroast": 25149, + "Ġlegalize": 25150, + "ĠTut": 25151, + "Ġsignage": 25152, + "init": 25153, + "Ġthefts": 25154, + "202": 25155, + "Ġstatic": 25156, + "Ġchants": 25157, + "Bob": 25158, + "Ġdiscretionary": 25159, + "Ġendurance": 25160, + "Ġcollegiate": 25161, + "Ġcorridors": 25162, + "Ġslack": 25163, + "ĠLash": 25164, + "Az": 25165, + "Series": 25166, + "Ġnonpartisan": 25167, + "ĠMcGill": 25168, + "Ġuneven": 25169, + "ulsive": 25170, + "eu": 25171, + "Ġpil": 25172, + "Ġfisheries": 25173, + "Ġonslaught": 25174, + "fiction": 25175, + "holding": 25176, + "Ġcheated": 25177, + "Ġtraumat": 25178, + "lasting": 25179, + "Ġmultitude": 25180, + "ĠThr": 25181, + "ĠBreast": 25182, + "Ġ1600": 25183, + "ĠMatth": 25184, + "Ġdiminish": 25185, + "ĠFTC": 25186, + "Ġgram": 25187, + "ĠResident": 25188, + "Ġfading": 25189, + "Ġmarginalized": 25190, + "ĠLite": 25191, + "ĠCarlton": 25192, + "Ġerad": 25193, + "Welcome": 25194, + "ĠFaw": 25195, + "iddy": 25196, + "Ġparticip": 25197, + "Ġcz": 25198, + "Ġtexted": 25199, + "Ġsuites": 25200, + "ĠForever": 25201, + "Ġrendition": 25202, + "rait": 25203, + "ĠPrague": 25204, + "Ġsponsoring": 25205, + "Ġcompos": 25206, + "ĠBeacon": 25207, + "144": 25208, + "Ġpupil": 25209, + "Ġintricate": 25210, + "Ġathleticism": 25211, + "Ġoptimization": 25212, + "Ġloot": 25213, + "polit": 25214, + "ĠOtt": 25215, + "Whatever": 25216, + "uno": 25217, + "ĠConstable": 25218, + "esville": 25219, + "Ġlookout": 25220, + "ĠAircraft": 25221, + "Ġspo": 25222, + "Ġcorrobor": 25223, + "Ġhiatus": 25224, + "ĠKnowing": 25225, + "ĠHamp": 25226, + "Ġspe": 25227, + "Ġstoring": 25228, + "Ġshakes": 25229, + "uran": 25230, + "Ġsickness": 25231, + "Ġliber": 25232, + "ĠAdministrative": 25233, + "Ġpleasing": 25234, + "ĠEqual": 25235, + "ĠConversation": 25236, + "Ġalgae": 25237, + "Ġlobbyist": 25238, + "ĠHelena": 25239, + "ptions": 25240, + "Ġfaire": 25241, + "ĠGone": 25242, + "ĠWiggins": 25243, + "Robert": 25244, + "Ġlistens": 25245, + "ĠDaisy": 25246, + "Ġsticky": 25247, + "sale": 25248, + "ĠMarijuana": 25249, + "ĠSSD": 25250, + "ĠTool": 25251, + "once": 25252, + "ĠHarmon": 25253, + "mobile": 25254, + "Ġdetain": 25255, + "Money": 25256, + "Ġflawless": 25257, + "forced": 25258, + "Ġguru": 25259, + "Ġairspace": 25260, + "ĠArchie": 25261, + "ĠGender": 25262, + "ĠMeat": 25263, + "abilities": 25264, + "ĠBD": 25265, + "Open": 25266, + "Ġoutsider": 25267, + "issue": 25268, + "Ġlearns": 25269, + "natural": 25270, + "Ġvinegar": 25271, + "ĠSUB": 25272, + "ĠRecon": 25273, + "blers": 25274, + "Ġsniff": 25275, + "Ġsuppression": 25276, + "Ġsaf": 25277, + "urger": 25278, + "Ġbunker": 25279, + "asaki": 25280, + "ĠSpartan": 25281, + "ĠTok": 25282, + "Ġrav": 25283, + "Ġfoc": 25284, + "Sean": 25285, + "etric": 25286, + "Ġballpark": 25287, + "ĠHerb": 25288, + "ĠBM": 25289, + "ĠPublishing": 25290, + "Ġroadmap": 25291, + "pered": 25292, + "Ġpredator": 25293, + "ĠBlockchain": 25294, + "Ġvalidity": 25295, + "ĠGlou": 25296, + "ĠYamaha": 25297, + "Ġadop": 25298, + "Ġswamp": 25299, + "Ġcomplied": 25300, + "Ky": 25301, + "Greg": 25302, + "casts": 25303, + "john": 25304, + "ĠBosnia": 25305, + "Ġcinematic": 25306, + "ĠTavern": 25307, + "Ġfrustrations": 25308, + "eryl": 25309, + "Ġfairy": 25310, + "UNCH": 25311, + "ĠTus": 25312, + "Corp": 25313, + "ĠNug": 25314, + "closed": 25315, + "Ġexercised": 25316, + "urden": 25317, + "Ġdigitally": 25318, + "137": 25319, + "ĠVictims": 25320, + "Ġreluctance": 25321, + "ELL": 25322, + "ĠTribe": 25323, + "chall": 25324, + "Ġwhiskey": 25325, + "ogl": 25326, + "Ġmater": 25327, + "ĠBac": 25328, + "Ġapartheid": 25329, + "ĠMBA": 25330, + "mot": 25331, + "ĠIre": 25332, + "®,": 25333, + "ĠChic": 25334, + "Ġtimed": 25335, + "ĠDome": 25336, + "efer": 25337, + "Ġobserver": 25338, + "unky": 25339, + "ĠKant": 25340, + "Ġundrafted": 25341, + "Ġsimplicity": 25342, + "onds": 25343, + "Ġstoked": 25344, + "Ġ1949": 25345, + "Ġransomware": 25346, + "ĠPow": 25347, + "ĠAngelo": 25348, + "ĠAmbrose": 25349, + "adjusted": 25350, + "Guard": 25351, + "138": 25352, + "ĠKaplan": 25353, + "stri": 25354, + "Ġcries": 25355, + "NF": 25356, + "atro": 25357, + "Ġavocado": 25358, + "illian": 25359, + "Ġsculptures": 25360, + "Ġelevation": 25361, + "Ġinspires": 25362, + "Ġgenerals": 25363, + "arb": 25364, + "chell": 25365, + "ĠJournalism": 25366, + "ĠHybrid": 25367, + "ĠCaller": 25368, + "vec": 25369, + "Lu": 25370, + "Ġresemble": 25371, + "bys": 25372, + "erving": 25373, + "antz": 25374, + "Ġwiden": 25375, + "vised": 25376, + "Ev": 25377, + "Ġdiagn": 25378, + "ĠMakes": 25379, + "Ġcer": 25380, + "ĠPats": 25381, + "single": 25382, + "sche": 25383, + "struct": 25384, + "Ġdissolved": 25385, + "Ġtimeout": 25386, + "Ġenhancement": 25387, + "CF": 25388, + "Ġindust": 25389, + "ĠDed": 25390, + "ĠZo": 25391, + "CB": 25392, + "Ġpesticides": 25393, + "ĠRubin": 25394, + "George": 25395, + "opal": 25396, + "Ġmotel": 25397, + "critical": 25398, + "Ġcollapsing": 25399, + "ĠShal": 25400, + "tex": 25401, + "Ġcomplementary": 25402, + "Ġoust": 25403, + "ĠFlu": 25404, + "Ġexporting": 25405, + "Ġdifferential": 25406, + "north": 25407, + "ĠFG": 25408, + "Ġspoon": 25409, + "sha": 25410, + "Ġdismantle": 25411, + "elta": 25412, + "Ġjar": 25413, + "space": 25414, + "Smart": 25415, + "mere": 25416, + "Ð": 25417, + "ĠGillespie": 25418, + "Lo": 25419, + "ĠMead": 25420, + "capacity": 25421, + "ĠIssue": 25422, + "050": 25423, + "ĠVall": 25424, + "Ġdisgr": 25425, + "Ġmeme": 25426, + "Ġpard": 25427, + "Ġcompensated": 25428, + "ĠKet": 25429, + "major": 25430, + "ĠBren": 25431, + "Ġheed": 25432, + "131": 25433, + "Ġcm": 25434, + "Ġdazzling": 25435, + "ĠCheese": 25436, + "Ġmonumental": 25437, + "Ġyielding": 25438, + "Read": 25439, + "Ġgrinding": 25440, + "Ang": 25441, + "Ġdefiance": 25442, + "Ġintimidated": 25443, + "Ġ310": 25444, + "Ġoutsiders": 25445, + "houn": 25446, + "Ma": 25447, + "ĸ": 25448, + "ĠForget": 25449, + "ĠSans": 25450, + "Ġunfolding": 25451, + "ĠSap": 25452, + "ĠLak": 25453, + "Ġsectarian": 25454, + "ĠDaddy": 25455, + "oxy": 25456, + "hitting": 25457, + "Ġdetectors": 25458, + "ĠRee": 25459, + "Ġbroaden": 25460, + "Ġslaying": 25461, + "Ġsuspending": 25462, + "Ġinvestig": 25463, + "Tuesday": 25464, + "Ġantibiotic": 25465, + "ĠShiite": 25466, + "igi": 25467, + "ĠExternal": 25468, + "ĠPhotographer": 25469, + "Ġerratic": 25470, + "NJ": 25471, + "ĠDock": 25472, + "Ġoutweigh": 25473, + "rants": 25474, + "Ġlobster": 25475, + "Ġreactor": 25476, + "Ġunrealistic": 25477, + "ĠAudrey": 25478, + "ĠYor": 25479, + "Anyone": 25480, + "Ġfraught": 25481, + "е": 25482, + "ĠWester": 25483, + "fc": 25484, + "ĠDunham": 25485, + "ĠLug": 25486, + "allow": 25487, + "139": 25488, + "Ġparity": 25489, + "Ġhorizontal": 25490, + "ijuana": 25491, + "Ġcivilization": 25492, + "ĠGins": 25493, + "Ġsmokers": 25494, + "ĠDiabetes": 25495, + "Five": 25496, + "ĠDG": 25497, + "Ġunderscores": 25498, + "Ġelabor": 25499, + "ĠLub": 25500, + "ĠDevil": 25501, + "Ġ154": 25502, + "ĠGuarant": 25503, + "ĠPandora": 25504, + "Ġexcav": 25505, + "Ġaccuser": 25506, + "Ġrevolt": 25507, + "Ġinstructors": 25508, + "Ġire": 25509, + "ographic": 25510, + "ĠCLE": 25511, + "Ġexpedition": 25512, + "ould": 25513, + "Ġstriving": 25514, + "south": 25515, + "onis": 25516, + "ĠSwed": 25517, + "MY": 25518, + "ĠLevin": 25519, + "Ġcarp": 25520, + "ĠArchitects": 25521, + "Ġ{": 25522, + "Ġcovert": 25523, + "Ġcooled": 25524, + "ĠStaten": 25525, + "Ġspecializing": 25526, + "ĠHazel": 25527, + "Ġlen": 25528, + "ighty": 25529, + "Ġbrilliantly": 25530, + "Phil": 25531, + "Ġlament": 25532, + "Australia": 25533, + "203": 25534, + "Ġticking": 25535, + "Ġadjud": 25536, + "Ġroommate": 25537, + "ĠSheet": 25538, + "capital": 25539, + "167": 25540, + "Ġendeavor": 25541, + "Ġaver": 25542, + "Ġdues": 25543, + "ĠCycl": 25544, + "oried": 25545, + "Va": 25546, + "loading": 25547, + "Ġpremie": 25548, + "Ġregimes": 25549, + "ĠAly": 25550, + "Ġperennial": 25551, + "Ġconsoles": 25552, + "Ġironic": 25553, + "ichael": 25554, + "Ġvigorously": 25555, + "Ġtransmit": 25556, + "gary": 25557, + "eking": 25558, + "Ġjails": 25559, + "ĠEpiscopal": 25560, + "eddy": 25561, + "Ġidle": 25562, + "Ġsafeguards": 25563, + "Ġdwindling": 25564, + "NOR": 25565, + "torn": 25566, + "ĠEvangel": 25567, + "ĠPlastic": 25568, + "ĠTerm": 25569, + "Ġforwarded": 25570, + "avage": 25571, + "Ġrefrigerator": 25572, + "arna": 25573, + "ĠGuinness": 25574, + "ĠCandy": 25575, + "Ġbotched": 25576, + "seller": 25577, + "Ġpul": 25578, + "grades": 25579, + "oshenko": 25580, + "earth": 25581, + "nette": 25582, + "Ġtraps": 25583, + "Ġtarn": 25584, + "Ġmilitar": 25585, + "ĠAriel": 25586, + "Ġtubes": 25587, + "ulo": 25588, + "Water": 25589, + "edin": 25590, + "Ġmarvel": 25591, + "chenko": 25592, + "ĠElk": 25593, + "spect": 25594, + "coe": 25595, + "ĠIllustrated": 25596, + "Ġruthless": 25597, + "etermined": 25598, + "Ġdys": 25599, + "Ġbreaching": 25600, + "gee": 25601, + "Nick": 25602, + "Ġcruiser": 25603, + "Ġciv": 25604, + "Ġdou": 25605, + "Ġ;": 25606, + "deb": 25607, + "ĠAsheville": 25608, + "Ġbiting": 25609, + "Ġyo": 25610, + "Courtesy": 25611, + "Ġroses": 25612, + "ĠConsequently": 25613, + "Ġrevis": 25614, + "Ġconfinement": 25615, + "next": 25616, + "produced": 25617, + "Ġmoratorium": 25618, + "Ġkne": 25619, + "eties": 25620, + "Ġplethora": 25621, + "Ġceleb": 25622, + "FIN": 25623, + "Ġdepartures": 25624, + "ĠWynne": 25625, + "abilia": 25626, + "ĠCourts": 25627, + "olis": 25628, + "Ġcereal": 25629, + "Ġblended": 25630, + "333": 25631, + "ĠLun": 25632, + "Ġrepe": 25633, + "Ġmathematics": 25634, + "Ġpharmacies": 25635, + "Center": 25636, + "Ġwhist": 25637, + "pine": 25638, + "Ġperm": 25639, + "Ġcustomary": 25640, + "Ġhormones": 25641, + "Ġcleansing": 25642, + "Ġconfidentiality": 25643, + "Ġmascot": 25644, + "Ġslippery": 25645, + "Ġmediation": 25646, + "Ġpodcasts": 25647, + "Ġcoating": 25648, + "Ġconveyed": 25649, + "Ġgir": 25650, + "ĠNurse": 25651, + "DM": 25652, + "Ġlured": 25653, + "orted": 25654, + "Ġolig": 25655, + "ritz": 25656, + "ĠINF": 25657, + "Ġtirelessly": 25658, + "Ġdoorstep": 25659, + "Ġtomb": 25660, + "Ġwithholding": 25661, + "irling": 25662, + "Ġhog": 25663, + "Ġ156": 25664, + "Ġgau": 25665, + "chem": 25666, + "raid": 25667, + "Ġtrolls": 25668, + "Ġ182": 25669, + "ĠColumb": 25670, + "Ġtissues": 25671, + "Ġnaive": 25672, + "Ġlect": 25673, + "Central": 25674, + "Sign": 25675, + "168": 25676, + "Ġbribe": 25677, + "ĠDoll": 25678, + "ĠTripoli": 25679, + "Ġfunk": 25680, + "Ġplaza": 25681, + "Ġmechanic": 25682, + "mem": 25683, + "Ġmonkey": 25684, + "grid": 25685, + "Ġtainted": 25686, + "ĠNicaragua": 25687, + "pelling": 25688, + "ĠXia": 25689, + "ammers": 25690, + "Ġorth": 25691, + "ICAN": 25692, + "Ġrant": 25693, + "Ġdiary": 25694, + "ĠHarrington": 25695, + "Ġimply": 25696, + "Qaeda": 25697, + "Ġworsen": 25698, + "Ġcrafting": 25699, + "ĠShir": 25700, + "Ġcoincided": 25701, + "Ġsnatched": 25702, + "ileen": 25703, + "sei": 25704, + "Ġsurgeons": 25705, + "directed": 25706, + "Ġcompulsory": 25707, + "Ġnowadays": 25708, + "ĠLI": 25709, + "ĠRebel": 25710, + "Ġlions": 25711, + "ĠJR": 25712, + "scar": 25713, + "ĠRespons": 25714, + "Ġscroll": 25715, + "ĠErd": 25716, + "iety": 25717, + "\";": 25718, + "ĠBone": 25719, + "ĠRumble": 25720, + "ĠKS": 25721, + "ĠLaur": 25722, + "kell": 25723, + "ĠBirds": 25724, + "agic": 25725, + "Ġsimmer": 25726, + "Ġrunaway": 25727, + "Ġ162": 25728, + "auna": 25729, + "Ġdialog": 25730, + "Ġlouder": 25731, + "esque": 25732, + "RR": 25733, + "Ġbloss": 25734, + "Ġcaliber": 25735, + "nery": 25736, + "Ġhauled": 25737, + "Ġbacterial": 25738, + "ĠVanity": 25739, + "ĠPrograms": 25740, + "omew": 25741, + "ĠMama": 25742, + "Ġarr": 25743, + "Ġdod": 25744, + "ĠJarvis": 25745, + "ĠFIRST": 25746, + "Ġinjections": 25747, + "ĠBallard": 25748, + "Ġmedically": 25749, + "angan": 25750, + "ĠNewfoundland": 25751, + "Ġfracking": 25752, + "Ġbast": 25753, + "outing": 25754, + "Ġmercury": 25755, + "Ġwatershed": 25756, + "ĠAmateur": 25757, + "Ġ153": 25758, + "escal": 25759, + "Ġpainter": 25760, + "creat": 25761, + "Ġperceive": 25762, + "Ġgent": 25763, + "attacks": 25764, + "worked": 25765, + "Ġimporting": 25766, + "Indian": 25767, + "Ġconvict": 25768, + "clad": 25769, + "Ġbudding": 25770, + "Ġambient": 25771, + "ĠWitness": 25772, + "letes": 25773, + "Ġbuffet": 25774, + "Ġneedles": 25775, + "Ġcoding": 25776, + "Ġchoke": 25777, + "Ġcorrespondence": 25778, + "Ġgods": 25779, + "Ġdances": 25780, + "Ġsteadfast": 25781, + "cert": 25782, + "Ġroaming": 25783, + "between": 25784, + "weak": 25785, + "Jer": 25786, + "jandro": 25787, + "Ġdiscouraged": 25788, + "Ġfruition": 25789, + "ĠØ": 25790, + "ĠKop": 25791, + "ULL": 25792, + "efe": 25793, + "imble": 25794, + "obb": 25795, + "ulla": 25796, + "Ġaccredited": 25797, + "Ġlectures": 25798, + "bil": 25799, + "why": 25800, + "Ġgreeting": 25801, + "ĠBoost": 25802, + "Ġmailed": 25803, + "Ġtroop": 25804, + "Ġfrig": 25805, + "Ġrese": 25806, + "Ġscratched": 25807, + "Stars": 25808, + "ĠRailroad": 25809, + "ĠIdol": 25810, + "Ġsuccumbed": 25811, + "ĠWeeks": 25812, + "ffe": 25813, + "Ġjihadist": 25814, + "ITION": 25815, + "Ġthreads": 25816, + "ĠGenerally": 25817, + "Ġmedieval": 25818, + "Ġquotas": 25819, + "ĠFerry": 25820, + "rique": 25821, + "Ġprod": 25822, + "ĠEduc": 25823, + "rive": 25824, + "Ġensued": 25825, + "Cy": 25826, + "Ġinfring": 25827, + "Ġprank": 25828, + "Ġfrontline": 25829, + "Ġcompletes": 25830, + "upe": 25831, + "Ġmanageable": 25832, + "Ġpoems": 25833, + "otten": 25834, + "igne": 25835, + "threat": 25836, + "ĠDri": 25837, + "ĠLINK": 25838, + "Calif": 25839, + "ĠDos": 25840, + "ulent": 25841, + "Ġaids": 25842, + "Ġslips": 25843, + "umped": 25844, + "Ġstyled": 25845, + "Ġdisproportionately": 25846, + "ĠDish": 25847, + "ĠUncle": 25848, + "andel": 25849, + "Ġrecharge": 25850, + "rators": 25851, + "ĠSPR": 25852, + "Ġguarded": 25853, + "ĠGreatest": 25854, + "ĠSkills": 25855, + "ĠNob": 25856, + "ĠDesk": 25857, + "ĠCros": 25858, + "Ġwrit": 25859, + "Ġquery": 25860, + "ORTS": 25861, + "Ġbundled": 25862, + "Ġgib": 25863, + "Ġeth": 25864, + "iesta": 25865, + "Ġevade": 25866, + "dict": 25867, + "straight": 25868, + "Met": 25869, + "present": 25870, + "Ġdiff": 25871, + "Ġdere": 25872, + "ĠSpl": 25873, + "Ġrepr": 25874, + "ĠBeard": 25875, + "Ġvain": 25876, + "Ġappointing": 25877, + "ĠVisual": 25878, + "caps": 25879, + "gado": 25880, + "ĠRican": 25881, + "ĠPose": 25882, + "endor": 25883, + "Ġ222": 25884, + "ĠLear": 25885, + "Ġconstructing": 25886, + "Dan": 25887, + "ĠSpears": 25888, + "ĠTherapy": 25889, + "pta": 25890, + "Ġrehabilit": 25891, + "Ġrisked": 25892, + "ĠGuer": 25893, + "HF": 25894, + "Ġ301": 25895, + "Ġliking": 25896, + "Ġmodular": 25897, + "eree": 25898, + "ĠMAT": 25899, + "ĠHomeless": 25900, + "Ġstove": 25901, + "erd": 25902, + "hash": 25903, + "ĠAchilles": 25904, + "ĠBeta": 25905, + "Ġincl": 25906, + "Ġgunned": 25907, + "ĠCrab": 25908, + "ĠMara": 25909, + "Ġinvaded": 25910, + "ulatory": 25911, + "ATA": 25912, + "angering": 25913, + "onso": 25914, + "Ġallocate": 25915, + "Ġgarment": 25916, + "itudes": 25917, + "ĠHuang": 25918, + "Ġstaples": 25919, + "ĠAlban": 25920, + "Ġtrough": 25921, + "Ġupright": 25922, + "tie": 25923, + "Ġexploits": 25924, + "ĠVaughan": 25925, + "ĠDarrell": 25926, + "Ġassortment": 25927, + "ĠChill": 25928, + "Ġlearners": 25929, + "aqu": 25930, + "Ġexplode": 25931, + "ĠChong": 25932, + "bt": 25933, + "opl": 25934, + "Ġaltern": 25935, + "Ġ151": 25936, + "fur": 25937, + "ULT": 25938, + "HOU": 25939, + "ĠMemory": 25940, + "Ġboosts": 25941, + "ynes": 25942, + "priv": 25943, + "Ġtimeless": 25944, + "Ġcurtail": 25945, + "ĠCary": 25946, + "ĠHud": 25947, + "Ġexclus": 25948, + "Ġ275": 25949, + "Ġfry": 25950, + "ĠVera": 25951, + "Ġdefied": 25952, + "ĠDust": 25953, + "Ġenvision": 25954, + "ĠPhilipp": 25955, + "Ġenhancements": 25956, + "ĠLIB": 25957, + "ggy": 25958, + "ĠAzure": 25959, + "esis": 25960, + "Ġcharismatic": 25961, + "Ġcoincide": 25962, + "inged": 25963, + "ĠChoose": 25964, + "Ġsizeable": 25965, + "136": 25966, + "Ġpronounce": 25967, + "ĠPositive": 25968, + "Ġideally": 25969, + "Ġechoes": 25970, + "Ġcottage": 25971, + "Ġencrypted": 25972, + "Prime": 25973, + "Ġá": 25974, + "Ġflashes": 25975, + "Group": 25976, + "Ġ501": 25977, + "heat": 25978, + "atility": 25979, + "ĠTesting": 25980, + "pex": 25981, + "WT": 25982, + "154": 25983, + "annah": 25984, + "Ġcompromising": 25985, + "Ġinactive": 25986, + "Ġdisparity": 25987, + "Ġgruesome": 25988, + "ĠFeather": 25989, + "ĠMandal": 25990, + "Ġthereof": 25991, + "ĠProducer": 25992, + "Ġprofiling": 25993, + "Ġlogistical": 25994, + "Ġcornerstone": 25995, + "ĠClaudia": 25996, + "Congress": 25997, + "ĠDill": 25998, + "ophone": 25999, + "Ġcameo": 26000, + "ĠCutler": 26001, + "Ġcraz": 26002, + "throw": 26003, + "ĠKasich": 26004, + "Ġexploiting": 26005, + "ĠSeas": 26006, + "agles": 26007, + "ĠGeological": 26008, + "ĠStub": 26009, + "ĠUps": 26010, + "MER": 26011, + "Ġmem": 26012, + "itution": 26013, + "Ġunderstandably": 26014, + "Ġcontractual": 26015, + "warming": 26016, + "qi": 26017, + "Sky": 26018, + "whelming": 26019, + "Ġcurse": 26020, + "ĠAren": 26021, + "Ġ265": 26022, + "ĠGree": 26023, + "Ġpresiding": 26024, + "Works": 26025, + "stones": 26026, + "Ġappalling": 26027, + "plex": 26028, + "dj": 26029, + "aunting": 26030, + "Ġimag": 26031, + "Ġsexism": 26032, + "ĠVert": 26033, + "ĠRag": 26034, + "ĠBliss": 26035, + "posium": 26036, + "div": 26037, + "Ġexperimenting": 26038, + "Ass": 26039, + "Lago": 26040, + "worthiness": 26041, + "ĠBerk": 26042, + "ĠDisneyland": 26043, + "Ġexaggerated": 26044, + "iliation": 26045, + "ĠFP": 26046, + "Ġprincipals": 26047, + "Miami": 26048, + "ropri": 26049, + "PLE": 26050, + "iona": 26051, + "ĠPokemon": 26052, + "apse": 26053, + "Ġbubbles": 26054, + "INC": 26055, + "ĠCaps": 26056, + "ĠBrowne": 26057, + "sing": 26058, + "Ġcafé": 26059, + "Ġceilings": 26060, + "frame": 26061, + "ĠIrwin": 26062, + "ATS": 26063, + "dated": 26064, + "Ġprotester": 26065, + "Ġtaps": 26066, + "ĠOslo": 26067, + "Ù": 26068, + "Ġconcentrations": 26069, + "Ġdistributions": 26070, + "Ġglucose": 26071, + "ĠRudolph": 26072, + "Ġtowels": 26073, + "Ġâĸº": 26074, + "Ġneighbourhoods": 26075, + "Ġinduction": 26076, + "Ġglaring": 26077, + "Ġannexation": 26078, + "Ġunsustainable": 26079, + "ĠTend": 26080, + "Ġthumbs": 26081, + "iegel": 26082, + "cript": 26083, + "gor": 26084, + "closure": 26085, + "thought": 26086, + "Ġpaddle": 26087, + "Ġemulate": 26088, + "Ġdiameter": 26089, + "Ġtailor": 26090, + "ĠCorpor": 26091, + "icable": 26092, + "ĠPrin": 26093, + "Ġadminister": 26094, + "ĠJudd": 26095, + "ĠColleg": 26096, + "aund": 26097, + "ĠPond": 26098, + "ĠNOTE": 26099, + "Ġcombating": 26100, + "Ġinvention": 26101, + "ĠOculus": 26102, + "ĠRepl": 26103, + "iscal": 26104, + "Ġtrilogy": 26105, + "anian": 26106, + "ATT": 26107, + "ĠCoke": 26108, + "DL": 26109, + "ĠLup": 26110, + "living": 26111, + "Ġadvertise": 26112, + "ĠConnie": 26113, + "amping": 26114, + "Ġsung": 26115, + "ORY": 26116, + "ĠTet": 26117, + "Ġsplits": 26118, + "Ġreconnect": 26119, + "Ġlou": 26120, + "mut": 26121, + "ulator": 26122, + "Ġstrap": 26123, + "Ġswallow": 26124, + "rote": 26125, + "Ġexec": 26126, + "ffen": 26127, + "ĠCombine": 26128, + "ĠTreat": 26129, + "Ġsorrow": 26130, + "ĠNotably": 26131, + "ĠSever": 26132, + "rette": 26133, + "Ġwherein": 26134, + "Ġtransitioning": 26135, + "Ġtrout": 26136, + "Ġcockpit": 26137, + "Ġcrawl": 26138, + "Ġferv": 26139, + "Ġliquids": 26140, + "Ġtsp": 26141, + "atell": 26142, + "Ġmeasles": 26143, + "Ġjug": 26144, + "Ac": 26145, + "ĠKD": 26146, + "ĠMoose": 26147, + "Ġvans": 26148, + "chain": 26149, + "ĠPapua": 26150, + "plet": 26151, + "Wednesday": 26152, + "lynn": 26153, + "chery": 26154, + "budget": 26155, + "Tony": 26156, + "ĠBacon": 26157, + "Ġstirred": 26158, + "ĠSpecialist": 26159, + "Ġcounterfeit": 26160, + "а": 26161, + "Ġdifferentiate": 26162, + "Ġmuscular": 26163, + "ĠTheodore": 26164, + "Ġlooms": 26165, + "ĠXX": 26166, + "ottage": 26167, + "Ġbenches": 26168, + "ĠMunicip": 26169, + "Po": 26170, + "ĠHeck": 26171, + "Ġscars": 26172, + "ĠNim": 26173, + "ÙĬ": 26174, + "ĠIngredients": 26175, + "Ġecological": 26176, + "ĠAWS": 26177, + "Ġdispose": 26178, + "Ġmattered": 26179, + "Ġ720": 26180, + "Ġpatriotism": 26181, + "ĠGrind": 26182, + "Ġcurved": 26183, + "opia": 26184, + "ĠLiqu": 26185, + "Ġevangelical": 26186, + "tto": 26187, + "ĠMaterial": 26188, + "ĠShowtime": 26189, + "ĠBS": 26190, + "Ġcheckpoints": 26191, + "Ġcrippling": 26192, + "ĠBalance": 26193, + "stress": 26194, + "bearing": 26195, + "Ġ216": 26196, + "ĠGuards": 26197, + "Ġlinebackers": 26198, + "Ġoffending": 26199, + "Ġsands": 26200, + "umbnail": 26201, + "atorial": 26202, + "Ġliberties": 26203, + "ĠGW": 26204, + "ĠPulitzer": 26205, + "ĠAlvin": 26206, + "ĠFAC": 26207, + "ĠStrategies": 26208, + "Ġreiter": 26209, + "ĠRestaur": 26210, + "ĠLithuania": 26211, + "ĠSwanson": 26212, + "terror": 26213, + "ĠMaurit": 26214, + "Ġparadise": 26215, + "zzle": 26216, + "owment": 26217, + "ĠWP": 26218, + "Ġsodium": 26219, + "Ġfuturistic": 26220, + "Ġdots": 26221, + "Anthony": 26222, + "Though": 26223, + "Ġstripes": 26224, + "Ġorig": 26225, + "ultz": 26226, + "Ġ340": 26227, + "KK": 26228, + "umer": 26229, + "ivery": 26230, + "Ġplacebo": 26231, + "Ġdemocrat": 26232, + "Ġsubmerged": 26233, + "ĠHidden": 26234, + "pieces": 26235, + "Ġasteroid": 26236, + "ĠGraphic": 26237, + "Ġadvert": 26238, + "sil": 26239, + "Ġdreaming": 26240, + "Ġnationality": 26241, + "Ġfostering": 26242, + "daughter": 26243, + "ĠSavings": 26244, + "Ġmischief": 26245, + "ĠClair": 26246, + "ĠBundy": 26247, + "Ġblatant": 26248, + "Ġtabs": 26249, + "qa": 26250, + "severe": 26251, + "attered": 26252, + "Ġgreed": 26253, + "Ġresembles": 26254, + "Ġnominal": 26255, + "Ġineligible": 26256, + "wealth": 26257, + "fax": 26258, + "payers": 26259, + "Ġdisplacement": 26260, + "itute": 26261, + "Ġunpleasant": 26262, + "ĠPom": 26263, + "lif": 26264, + "edo": 26265, + "ĠNP": 26266, + "Inter": 26267, + "Ġcohort": 26268, + "ĠStacy": 26269, + "ĠDai": 26270, + "Ġhistories": 26271, + "alin": 26272, + "273": 26273, + "Ġdram": 26274, + "ĠKand": 26275, + "Ġexpectancy": 26276, + "ansson": 26277, + "Ġlimbo": 26278, + "ĠPolar": 26279, + "Ġdivine": 26280, + "oused": 26281, + "Ġshel": 26282, + "ĠProblem": 26283, + "achment": 26284, + "Ġâĸł": 26285, + "shoot": 26286, + "antam": 26287, + "ĠHerz": 26288, + "Ġ157": 26289, + "Ġpreventive": 26290, + "keye": 26291, + "Sing": 26292, + "Ġcharacteristic": 26293, + "Ġcasually": 26294, + "ĠTaiwanese": 26295, + "md": 26296, + "ĠHubbard": 26297, + "imon": 26298, + "Ġsect": 26299, + "148": 26300, + "Ġmartyr": 26301, + "stud": 26302, + "Ġcongrat": 26303, + "ĠSWAT": 26304, + "ĠTheory": 26305, + "INAL": 26306, + "opping": 26307, + "ply": 26308, + "ĠKindle": 26309, + "uu": 26310, + "ĠLith": 26311, + "kaya": 26312, + "ĠActivity": 26313, + "uously": 26314, + "ĠJeb": 26315, + "tell": 26316, + "ĠSpin": 26317, + "ĠExplorer": 26318, + "Ġfolded": 26319, + "ĠCanterbury": 26320, + "ĠStur": 26321, + "Ġminiature": 26322, + "Ġmultif": 26323, + "ĠPressure": 26324, + "angling": 26325, + "ĠOverse": 26326, + "Ġresides": 26327, + "Ġimpressions": 26328, + "Ġauthored": 26329, + "265": 26330, + "Ġallergies": 26331, + "143": 26332, + "ĠJi": 26333, + "Ġsticker": 26334, + "ĠAccord": 26335, + "Ġcaste": 26336, + "Ġseparates": 26337, + "ĠFein": 26338, + "Daily": 26339, + "179": 26340, + "ĠScores": 26341, + "ĠAuction": 26342, + "hea": 26343, + "Ġdisclosing": 26344, + "ĠTacoma": 26345, + "Ġverse": 26346, + "ĠBeg": 26347, + "Ġfabrics": 26348, + "aez": 26349, + "Ġattachment": 26350, + "isy": 26351, + "Christ": 26352, + "Ġaddictive": 26353, + "Ġvir": 26354, + "Week": 26355, + "ĠPlum": 26356, + "croft": 26357, + "itivity": 26358, + "ĠExhibition": 26359, + "Ġbruised": 26360, + "Ġmimic": 26361, + "rers": 26362, + "Ġanal": 26363, + "Ġunintended": 26364, + "Ġpall": 26365, + "atts": 26366, + "ĠWarn": 26367, + "Ġslows": 26368, + "WH": 26369, + "Ġembro": 26370, + "nec": 26371, + "Ġ168": 26372, + "285": 26373, + "ologic": 26374, + "Ġhob": 26375, + "ĠPeel": 26376, + "Mill": 26377, + "eps": 26378, + "Ġrobbers": 26379, + "ĠDahl": 26380, + "semble": 26381, + "omics": 26382, + "toe": 26383, + "ĠLoch": 26384, + "Ġreproduction": 26385, + "ĠCullen": 26386, + "Ġimplants": 26387, + "Ġwow": 26388, + "ĠSTATE": 26389, + "vt": 26390, + "Ġdepleted": 26391, + "Ġbreweries": 26392, + "Ġhateful": 26393, + "Ġgast": 26394, + "Ġhollow": 26395, + "Ġradically": 26396, + "ographed": 26397, + "ĠFog": 26398, + "onian": 26399, + "ĠSequ": 26400, + "Ġdisrespectful": 26401, + "Dis": 26402, + "ĠExper": 26403, + "pron": 26404, + "ĠAmelia": 26405, + "ĠSage": 26406, + "bath": 26407, + "Ġtransformative": 26408, + "Ġtremendously": 26409, + "Ġpillow": 26410, + "ĠNormal": 26411, + "Cont": 26412, + "ĠMedic": 26413, + "educated": 26414, + "Ġredesigned": 26415, + "Ġkneeling": 26416, + "Ġinh": 26417, + "Ġroofs": 26418, + "Ġhandmade": 26419, + "Ġprotracted": 26420, + "ĠIsn": 26421, + "ĠCapacity": 26422, + "Ġsquash": 26423, + "ĠVega": 26424, + "Ġfats": 26425, + "ĠCertified": 26426, + "ointed": 26427, + "Ġpricey": 26428, + "ĠBasil": 26429, + "Ġfreezer": 26430, + "Ġscent": 26431, + "Ġpizz": 26432, + "ĠArd": 26433, + "Ġdistractions": 26434, + "Ġviolently": 26435, + "ĠHess": 26436, + "Ġfunc": 26437, + "Ġundert": 26438, + "Ġrejuven": 26439, + "Ġdisbelief": 26440, + "cluded": 26441, + "named": 26442, + "ĠFailure": 26443, + "kus": 26444, + "Ġhostages": 26445, + "ĠSahara": 26446, + "Ġ1944": 26447, + "Leary": 26448, + "ĠPrel": 26449, + "enza": 26450, + "ĠAlly": 26451, + "ĠKak": 26452, + "Ġcounselors": 26453, + "ĠGale": 26454, + "ĠHok": 26455, + "ĠSold": 26456, + "Ġhacker": 26457, + "Ġhun": 26458, + "Ġbung": 26459, + "Ġdeclares": 26460, + "Ġinfringement": 26461, + "OOD": 26462, + "Ġdoub": 26463, + "jam": 26464, + "Ġallergy": 26465, + "ĠShipping": 26466, + "Ġmedic": 26467, + "Ġaccommod": 26468, + "Ġdocumenting": 26469, + "Ġcompanions": 26470, + "Ġmodelling": 26471, + "Ġcarriage": 26472, + "ĠCherokee": 26473, + "Ġtresp": 26474, + "Ġtaxable": 26475, + "ĠActivities": 26476, + "ĠCrane": 26477, + "bots": 26478, + "ĠRusso": 26479, + "Ġstocked": 26480, + "ervation": 26481, + "Ġcoffin": 26482, + "aign": 26483, + "guards": 26484, + "Ġonwards": 26485, + "Ġfrank": 26486, + ".*": 26487, + "unic": 26488, + "Ġcens": 26489, + "enic": 26490, + "ruit": 26491, + "rained": 26492, + "Ġadapting": 26493, + "aments": 26494, + "Ġstagnant": 26495, + "azaar": 26496, + "ĠHarlem": 26497, + "Ġ158": 26498, + "ysis": 26499, + "Ġbraking": 26500, + "Ġdipping": 26501, + "Ġclan": 26502, + "ĠShu": 26503, + "Ġprops": 26504, + "qualified": 26505, + "Ġmistakenly": 26506, + "ĠStalin": 26507, + "Ġaddicts": 26508, + "ĠCALL": 26509, + "ropolis": 26510, + "aten": 26511, + "pec": 26512, + "ĠDro": 26513, + "ĠFellowship": 26514, + "ĠSupporting": 26515, + "loc": 26516, + "uben": 26517, + "499": 26518, + "Bro": 26519, + "Ġpots": 26520, + "Ġchunks": 26521, + "wr": 26522, + "ĠColonial": 26523, + "ĠArchitecture": 26524, + "Ġconstrained": 26525, + "Ġenvelop": 26526, + "ĠIronically": 26527, + "aban": 26528, + "Ġapparatus": 26529, + "Ġcue": 26530, + "Ġborne": 26531, + "ĠRoz": 26532, + "ilton": 26533, + "Ġtheoretical": 26534, + "ĠWatching": 26535, + "Ġfuck": 26536, + "ĠSilk": 26537, + "ĠSTE": 26538, + "bler": 26539, + "ĠPOST": 26540, + "ĠUpton": 26541, + "Ġsummons": 26542, + "ĠCum": 26543, + "ĠKL": 26544, + "Ġrelaxation": 26545, + "ĠDuff": 26546, + "Ġincumb": 26547, + "ĠRedd": 26548, + "Ġstature": 26549, + "Ġcanv": 26550, + "added": 26551, + "Ġremedies": 26552, + "ĠISO": 26553, + "ĠDecker": 26554, + "Ġafloat": 26555, + "Ġstartling": 26556, + "ĠBethlehem": 26557, + "Ġrealizes": 26558, + "find": 26559, + "ĠAra": 26560, + "Ġphased": 26561, + "arov": 26562, + "Ġhalting": 26563, + "ĠWindow": 26564, + "Ġdentist": 26565, + "Ġtumble": 26566, + "Ġvalidation": 26567, + "Ġcarve": 26568, + "ĠIPS": 26569, + "Ġirrit": 26570, + "ĠEssential": 26571, + "Ġfluids": 26572, + "rons": 26573, + "Ġimplant": 26574, + "Ġnuisance": 26575, + "ĠShelley": 26576, + "ĠGemini": 26577, + "Ġpharmac": 26578, + "iction": 26579, + "Ġtaped": 26580, + "ĠGovernments": 26581, + "ruly": 26582, + "Ġscant": 26583, + "Ġprominently": 26584, + "Ġreim": 26585, + "unning": 26586, + "arted": 26587, + "ĠMatters": 26588, + "Ġ1918": 26589, + "ĠPros": 26590, + "atel": 26591, + "ĠBattalion": 26592, + "onduct": 26593, + "talk": 26594, + "ĠTinder": 26595, + "ĠInstant": 26596, + "ĠKern": 26597, + "Ġbuckets": 26598, + "ĠGroups": 26599, + "Ġmetaphor": 26600, + "cloud": 26601, + "ĠString": 26602, + "Ohio": 26603, + "Ġcaffeine": 26604, + "Old": 26605, + "Ġdefinite": 26606, + "ĠNikola": 26607, + "ĠLords": 26608, + "icol": 26609, + ")?": 26610, + "Ġenjoyment": 26611, + "Ġfamine": 26612, + "Ġdefinitions": 26613, + "ĠJem": 26614, + "Check": 26615, + "Ġaiding": 26616, + "ĠMé": 26617, + "Ġrenewables": 26618, + "Ġsightings": 26619, + "footed": 26620, + "Box": 26621, + "Ġgoats": 26622, + "Ġshack": 26623, + "AX": 26624, + "ĠMonk": 26625, + "ĠGraduate": 26626, + "Ġmeats": 26627, + "handle": 26628, + "147": 26629, + "rys": 26630, + "Ġunsub": 26631, + "Pont": 26632, + "uble": 26633, + "440": 26634, + "Ġeyel": 26635, + "thro": 26636, + "Ġcreep": 26637, + "^^^^": 26638, + "Ġpopcorn": 26639, + "Ġcompression": 26640, + "sal": 26641, + "ouf": 26642, + "Ġrepairing": 26643, + "Think": 26644, + "Ġdoubtful": 26645, + "ĠLooks": 26646, + "Ġtaller": 26647, + "Ġsul": 26648, + "sf": 26649, + "give": 26650, + "ĠGau": 26651, + "Ġrevered": 26652, + "EMBER": 26653, + "Ġsloppy": 26654, + "ersen": 26655, + "Ġvitamins": 26656, + "ĠImprovement": 26657, + "Ġprogresses": 26658, + "Ġdiploma": 26659, + "semb": 26660, + "ustain": 26661, + "Ġchant": 26662, + "Ġbumped": 26663, + "Ġsabotage": 26664, + "nant": 26665, + "Ġrabbit": 26666, + "Ġdividing": 26667, + "ĠDefender": 26668, + "Ġlik": 26669, + "Ġirrespective": 26670, + "cade": 26671, + "ĠSter": 26672, + "touch": 26673, + "EMA": 26674, + "Ġparted": 26675, + "ĠBAR": 26676, + "hung": 26677, + "Ġannoyed": 26678, + "Ġhinder": 26679, + "Ġexamines": 26680, + "oan": 26681, + "ĠBoe": 26682, + "Ġaggreg": 26683, + "ĠChu": 26684, + "ĠUCS": 26685, + "IGHTS": 26686, + "pez": 26687, + "ĠUNESCO": 26688, + "Ġwindshield": 26689, + "Martin": 26690, + "Ġwithhold": 26691, + "does": 26692, + "Ġbruising": 26693, + "Ġdeterior": 26694, + "bourg": 26695, + "ĠTowers": 26696, + "JD": 26697, + "England": 26698, + "Ġequivalents": 26699, + "Ġrazor": 26700, + "Ġreassuring": 26701, + "Ġident": 26702, + "Ġ208": 26703, + "reath": 26704, + "ceans": 26705, + "Ġpatrolling": 26706, + "eve": 26707, + "pots": 26708, + "itative": 26709, + "Ġsided": 26710, + "Ġsofa": 26711, + "Ġunborn": 26712, + "Ġaug": 26713, + "Ġperpetual": 26714, + "effect": 26715, + "represented": 26716, + "Ġrails": 26717, + "ĠSummers": 26718, + "ĠMOR": 26719, + "ĠSlow": 26720, + "ĠExpert": 26721, + "Ġshameful": 26722, + "Ġaudits": 26723, + "Sl": 26724, + "ĠBurr": 26725, + "adow": 26726, + "ĠWAY": 26727, + "anic": 26728, + "ĠIslamists": 26729, + "ĠStranger": 26730, + "pse": 26731, + "amaz": 26732, + "ĠPeggy": 26733, + "ĠSeventh": 26734, + "Ġscreenplay": 26735, + "ĠGriff": 26736, + "Ireland": 26737, + "142": 26738, + "Ġneural": 26739, + "ĠFernand": 26740, + "ainment": 26741, + "ĠMigration": 26742, + "ureen": 26743, + "ĠSCH": 26744, + "Sullivan": 26745, + "ĠWag": 26746, + "ĠREG": 26747, + "Ġ420": 26748, + "inky": 26749, + "ĠNewspaper": 26750, + "School": 26751, + "Ok": 26752, + "ĠKrishna": 26753, + "Ġ480": 26754, + "erald": 26755, + "Ġskipping": 26756, + "Ġharrowing": 26757, + "158": 26758, + "rogen": 26759, + "Ġbetrayal": 26760, + "Ġculmination": 26761, + "ĠCirc": 26762, + "Ġ211": 26763, + "stro": 26764, + "ĠTrace": 26765, + "Ġheaviest": 26766, + "td": 26767, + "ĠHenri": 26768, + "epend": 26769, + "RB": 26770, + "arella": 26771, + "umbai": 26772, + "Ġcrem": 26773, + "ĠDistribut": 26774, + "ruff": 26775, + "Ġscreams": 26776, + "Ġscathing": 26777, + "girls": 26778, + "Ġtiles": 26779, + "ĠEvil": 26780, + "usp": 26781, + "Ġknowledgeable": 26782, + "Ġrestitution": 26783, + "ĠWiFi": 26784, + "Ġitiner": 26785, + "exper": 26786, + "oris": 26787, + "ĠPokémon": 26788, + "iane": 26789, + "produ": 26790, + "ĠAchievement": 26791, + "Ġbrunt": 26792, + "ĠSurgery": 26793, + "Ġpragmatic": 26794, + "Ber": 26795, + "ĠKejriwal": 26796, + "cus": 26797, + "Ġconsensual": 26798, + "acet": 26799, + "ĠSecondly": 26800, + "Ġdivul": 26801, + "uca": 26802, + "Ġbusted": 26803, + "emies": 26804, + "ĠMou": 26805, + "Ġ217": 26806, + "Ġexcludes": 26807, + "ĠSamoa": 26808, + "Ġlofty": 26809, + "ĠSic": 26810, + "ĠRemem": 26811, + "dn": 26812, + "Ġeradicate": 26813, + "Ġpies": 26814, + "Ġscenery": 26815, + "ATTLE": 26816, + "ĠWAS": 26817, + "Ġinnovate": 26818, + "ĠEverest": 26819, + "Ġsynonymous": 26820, + "izen": 26821, + "Ġeuth": 26822, + "ĠFIA": 26823, + "ITIES": 26824, + "ĠSuddenly": 26825, + "Ġforay": 26826, + "pell": 26827, + "ÄŁ": 26828, + "licensed": 26829, + "Ġfra": 26830, + "Ġblasting": 26831, + "autical": 26832, + "ĠBlizzard": 26833, + "orer": 26834, + "Ġchili": 26835, + "ĠSylvia": 26836, + "except": 26837, + "tec": 26838, + "ĠResistance": 26839, + "young": 26840, + "usions": 26841, + "iotic": 26842, + "ĠDreams": 26843, + "ĠArchives": 26844, + "Ġunleash": 26845, + "ĠPract": 26846, + "Ġlikened": 26847, + "Ġga": 26848, + "Ġdisappearing": 26849, + "Ġunnoticed": 26850, + "Ġfrightened": 26851, + "arms": 26852, + "ĠCAD": 26853, + "Ġcoloured": 26854, + "ĠSigns": 26855, + "oing": 26856, + "Ġvodka": 26857, + "ruption": 26858, + "otions": 26859, + "isal": 26860, + "ĠBecome": 26861, + "Ġswoop": 26862, + "reating": 26863, + "Ġchoking": 26864, + "Ġunforgettable": 26865, + "258": 26866, + "packs": 26867, + "345": 26868, + "ĠAutumn": 26869, + "Ġther": 26870, + "399": 26871, + "ĠFaculty": 26872, + "Ġ1933": 26873, + "ĠNormally": 26874, + "orge": 26875, + "ĠTess": 26876, + "ĠChrom": 26877, + "Ġscripts": 26878, + "Ġbiking": 26879, + "Act": 26880, + "Ġgrazing": 26881, + "ĠLabrador": 26882, + "ĠLey": 26883, + "Ġwandering": 26884, + "Ġfend": 26885, + "ĠPolk": 26886, + "ĠKeane": 26887, + "ĠBeef": 26888, + "elope": 26889, + "ĠApproximately": 26890, + "Ġ1952": 26891, + "personal": 26892, + "Ġhistorians": 26893, + "ĠMcDonnell": 26894, + "must": 26895, + "LES": 26896, + "iking": 26897, + "Ġtherm": 26898, + "Ġhumane": 26899, + "Ġcrowdfunding": 26900, + "ĠBenefits": 26901, + "Land": 26902, + "Ġanalog": 26903, + "agency": 26904, + "ĠCrowley": 26905, + "Ġbirths": 26906, + "Ġobj": 26907, + "Ġfren": 26908, + "ĠSalmon": 26909, + "bies": 26910, + "Ġreve": 26911, + "216": 26912, + "Ġbetrayed": 26913, + "Ġinduced": 26914, + "acles": 26915, + "Ġtrad": 26916, + "Ġforgiven": 26917, + "Ġearners": 26918, + "208": 26919, + "Ġxen": 26920, + "Ġunle": 26921, + "Ġnecklace": 26922, + "Ġgravel": 26923, + "Ġsalads": 26924, + "Ġgrooming": 26925, + "California": 26926, + "Ġpossessed": 26927, + "Ġproclamation": 26928, + "Ġsequences": 26929, + "ream": 26930, + "FOX": 26931, + "arkin": 26932, + "ĠTRAN": 26933, + "Ġpurs": 26934, + "ĠLoans": 26935, + "Ġsacrificed": 26936, + "Ġiceberg": 26937, + "Phill": 26938, + "Ġgalvan": 26939, + "Ġsmugglers": 26940, + "formation": 26941, + "onson": 26942, + "ĠVaughn": 26943, + "Ġdoctrine": 26944, + "ĠEyes": 26945, + "Ġunmanned": 26946, + "states": 26947, + "Ġdetermin": 26948, + "almost": 26949, + "Ġeviction": 26950, + "Ġtid": 26951, + "ARR": 26952, + "Ġcooks": 26953, + "Bad": 26954, + "ĠCamb": 26955, + "Ġlinear": 26956, + "229": 26957, + "ĠCooke": 26958, + "ĠPurch": 26959, + "join": 26960, + "ĠCult": 26961, + "ĠRefugee": 26962, + "Ġslamming": 26963, + "ĠðŁij": 26964, + "Ġpedal": 26965, + "ĠVeronica": 26966, + "Ġlandowners": 26967, + "ĠYel": 26968, + "ĠWorkshop": 26969, + "antic": 26970, + "Ġdysfunction": 26971, + "Ġ229": 26972, + "Ġculturally": 26973, + "Ġinfuri": 26974, + "ĠEck": 26975, + "sem": 26976, + "Ġwired": 26977, + "ĠWerner": 26978, + "lov": 26979, + "ĠJasper": 26980, + "Ġvehemently": 26981, + "ĠSpy": 26982, + "lift": 26983, + "ĠNab": 26984, + "ĠPound": 26985, + "ĠHanna": 26986, + "Ġleveled": 26987, + "WOOD": 26988, + "tm": 26989, + "ĠKitt": 26990, + "Ġconve": 26991, + "nat": 26992, + "Ġjog": 26993, + "IVER": 26994, + "Ġmemes": 26995, + "Ġseaw": 26996, + "ector": 26997, + "Ġsprayed": 26998, + "Ġvaccinated": 26999, + "Europe": 27000, + "Ġmustard": 27001, + "ĠMahm": 27002, + "Ġ214": 27003, + "Research": 27004, + "iminary": 27005, + "Ġconcerted": 27006, + "Detroit": 27007, + "Ġkios": 27008, + "Ġplummet": 27009, + "Ġvisuals": 27010, + "247": 27011, + "Ġ228": 27012, + "development": 27013, + "ĠPascal": 27014, + "acial": 27015, + "ĠSeasons": 27016, + "ĠTL": 27017, + "480": 27018, + "ĠReader": 27019, + "Ġexpulsion": 27020, + "Ġchoked": 27021, + "Ġdevotion": 27022, + "ĠSTAT": 27023, + "urred": 27024, + "Ġfascinated": 27025, + "Ġstealth": 27026, + "NL": 27027, + "Ġbooster": 27028, + "Kat": 27029, + "ĠPriebus": 27030, + "Ġaux": 27031, + "ĠHate": 27032, + "ĠThing": 27033, + "Ġabnormal": 27034, + "Ġcalmly": 27035, + "Ġdedicate": 27036, + "cause": 27037, + "Ġisolate": 27038, + "ĠPai": 27039, + "Ġsuspensions": 27040, + "Ġpoisoned": 27041, + "ission": 27042, + "Ġprohibiting": 27043, + "353": 27044, + "banks": 27045, + "Ġkissed": 27046, + "ĠBegin": 27047, + "atis": 27048, + "LI": 27049, + "Ġshaft": 27050, + "ĠGuth": 27051, + "ĠBoo": 27052, + "Ġcinnamon": 27053, + "Ġverbally": 27054, + "ĠRabbi": 27055, + "Ġmonsters": 27056, + "done": 27057, + "ĠClyde": 27058, + "Ġspar": 27059, + "ĠCage": 27060, + "ĠPersons": 27061, + "305": 27062, + "ĠMons": 27063, + "Ġjealous": 27064, + "Ġswirling": 27065, + "know": 27066, + "Ġprote": 27067, + "Ġcruising": 27068, + "Ġduly": 27069, + "Ġchapel": 27070, + "Ġgroove": 27071, + "bps": 27072, + "ĠKelvin": 27073, + "iom": 27074, + "aer": 27075, + "bomb": 27076, + "Christian": 27077, + "Ġgigs": 27078, + "+.": 27079, + "ĠWei": 27080, + "Ġfarmland": 27081, + "otally": 27082, + "Ġequitable": 27083, + "ĠCBO": 27084, + "chool": 27085, + "amara": 27086, + "Ġwealthiest": 27087, + "ĠMeans": 27088, + "Ġ235": 27089, + "ĠUk": 27090, + "steps": 27091, + "raham": 27092, + "nerg": 27093, + "Ġclad": 27094, + "Ġsled": 27095, + "ĠMorrow": 27096, + "152": 27097, + "ĠRece": 27098, + "Ġplausible": 27099, + "Ġbisexual": 27100, + "artments": 27101, + "Ġveh": 27102, + "ĠLoft": 27103, + "bly": 27104, + "ĠCONC": 27105, + "automatic": 27106, + "Ġmasterpiece": 27107, + "ĠSpringer": 27108, + "Ġtendencies": 27109, + "Ro": 27110, + "Ġresentment": 27111, + "Ġadversely": 27112, + "Ġbandwidth": 27113, + "ĠDAV": 27114, + "Ġtun": 27115, + "Ġpuppies": 27116, + "ĠBundes": 27117, + "ĠHort": 27118, + "ĠGarfield": 27119, + "Ġenlist": 27120, + "Ġmont": 27121, + "gd": 27122, + "Ġrooting": 27123, + "Dream": 27124, + "Ġfulfillment": 27125, + "chal": 27126, + "182": 27127, + "prop": 27128, + "159": 27129, + "Ġcourtyard": 27130, + "iard": 27131, + "ĠSle": 27132, + "Ġoperative": 27133, + "Ġpublishes": 27134, + "ĠProposition": 27135, + "Ġcritique": 27136, + "Ġredist": 27137, + "wang": 27138, + "ĠNep": 27139, + "DD": 27140, + "Ġbonding": 27141, + "141": 27142, + "ĠAssault": 27143, + "-'": 27144, + "Ġlodging": 27145, + "itters": 27146, + "cigarettes": 27147, + "Ġ__": 27148, + "ĠLaf": 27149, + "GF": 27150, + "ĠAnat": 27151, + "ĠStephan": 27152, + "214": 27153, + "ĠKass": 27154, + "Ġviz": 27155, + "Ġpiling": 27156, + "Ġfugitive": 27157, + "ĠCurrency": 27158, + "ĠCrypto": 27159, + "Ġfaux": 27160, + "ĠPing": 27161, + "ĠLia": 27162, + "igl": 27163, + "Ġadversaries": 27164, + "ĠYPG": 27165, + "ĠComb": 27166, + "ĠYar": 27167, + "heny": 27168, + "Ġoverhe": 27169, + "Fest": 27170, + "emy": 27171, + "Ever": 27172, + "Ġ370": 27173, + "Ġsecretive": 27174, + "ĠSEN": 27175, + "ĠMEM": 27176, + "PRESS": 27177, + "ĠBirth": 27178, + "kos": 27179, + "Ġprecarious": 27180, + "irting": 27181, + "ĠUI": 27182, + "Ġoccupying": 27183, + "olute": 27184, + "Ġperiodic": 27185, + "eon": 27186, + "iens": 27187, + "ĠRH": 27188, + "Win": 27189, + "Ġplaybook": 27190, + "Ġexodus": 27191, + "ĠSkinner": 27192, + "Ġorderly": 27193, + "ĠVed": 27194, + "ouses": 27195, + "Ġescal": 27196, + "Ġbenign": 27197, + "Ġbots": 27198, + "ĠWhis": 27199, + "Ġappra": 27200, + "FOR": 27201, + "ĠChromebook": 27202, + "_____": 27203, + "990": 27204, + "athed": 27205, + "Ġspirited": 27206, + "illi": 27207, + "Ġbicycles": 27208, + "orse": 27209, + "ifestyle": 27210, + "orno": 27211, + "ĠDept": 27212, + "JA": 27213, + "Ġnausea": 27214, + "Ġpervasive": 27215, + "velop": 27216, + "commun": 27217, + "ĠUniversities": 27218, + "Ġremnants": 27219, + "Ġdisarm": 27220, + "ĠBoots": 27221, + "Ġprin": 27222, + "...\"": 27223, + "quila": 27224, + "Ġcautiously": 27225, + "uper": 27226, + "onto": 27227, + "din": 27228, + "Ġvelocity": 27229, + "Ġconspiring": 27230, + "ĠMX": 27231, + "Ġemphasizing": 27232, + "Ġâĸ": 27233, + "ĠStam": 27234, + "Ġspices": 27235, + "Ġairplanes": 27236, + "uty": 27237, + "culture": 27238, + "ĠPetr": 27239, + "Ġglor": 27240, + "ĠExcel": 27241, + "ĠSpeech": 27242, + "Ġharmless": 27243, + "ĠPend": 27244, + "ĠCrossing": 27245, + "ĠDocument": 27246, + "Ġramifications": 27247, + "ĠCroatian": 27248, + "ĠKiller": 27249, + "Ġmultim": 27250, + "Ġdiscontinued": 27251, + "Ġcherished": 27252, + "ĠMaker": 27253, + "aspers": 27254, + "ĠBlooming": 27255, + "ĠMata": 27256, + "offic": 27257, + "Ġsettlers": 27258, + "ĠPlenty": 27259, + "ĠInstitutes": 27260, + "ĠArpaio": 27261, + "Pool": 27262, + "ĠSubst": 27263, + "Ġ380": 27264, + "Ġdecidedly": 27265, + "ollah": 27266, + "Den": 27267, + "ĠJiang": 27268, + "ĠAmos": 27269, + "Grand": 27270, + "ĠTurns": 27271, + "meyer": 27272, + "Ġconducive": 27273, + "Ġpoignant": 27274, + "abortion": 27275, + "Ġnotebook": 27276, + "Ġshelling": 27277, + "common": 27278, + "ĠPavel": 27279, + "Ġhumid": 27280, + "Ġinappropriately": 27281, + "????": 27282, + "Ġsoar": 27283, + "Ġdynasty": 27284, + "Ġresearched": 27285, + "ĠYon": 27286, + "Ġmaple": 27287, + "Ġwedge": 27288, + "mass": 27289, + "ĠTM": 27290, + "USE": 27291, + "eln": 27292, + "Ġgloss": 27293, + "rigan": 27294, + "steen": 27295, + "ĠDeV": 27296, + "Ġdebacle": 27297, + "Christmas": 27298, + "Ġtweaks": 27299, + "grab": 27300, + "Ġprofoundly": 27301, + "Ġcampaigner": 27302, + "ĠSeal": 27303, + "Ġiteration": 27304, + "Ġsigh": 27305, + "Ġunfounded": 27306, + "Ġframing": 27307, + "Ġrecognizable": 27308, + "Ġseizing": 27309, + "legal": 27310, + "Ġproportions": 27311, + "omers": 27312, + "rek": 27313, + "Ġscreenshot": 27314, + "itsu": 27315, + "ĠOG": 27316, + "ĠYing": 27317, + "ĠMississ": 27318, + "295": 27319, + "Ġlandsl": 27320, + "Ġpsychiatrist": 27321, + "sov": 27322, + "arine": 27323, + "Ju": 27324, + "Ġflo": 27325, + "apple": 27326, + "hof": 27327, + "wig": 27328, + "ĠENT": 27329, + "Ġenthusiast": 27330, + "Such": 27331, + "ĠArtificial": 27332, + "happy": 27333, + "oton": 27334, + "ĠFram": 27335, + "ĠRemove": 27336, + "Ġsmear": 27337, + "Ġjer": 27338, + "Ġtopp": 27339, + "Ġimbalance": 27340, + "ĠWords": 27341, + "Ġcoffers": 27342, + "olina": 27343, + "Ġrigged": 27344, + "uction": 27345, + "idding": 27346, + "Ġdispensaries": 27347, + "Ġdermat": 27348, + "Ġshutter": 27349, + "idental": 27350, + "Ġcontinu": 27351, + "Ġhumility": 27352, + "Ġbulbs": 27353, + "Ġ207": 27354, + "lass": 27355, + "ĠBeirut": 27356, + "ĠUlt": 27357, + "urry": 27358, + "NEWS": 27359, + "Ġfeminine": 27360, + "Ġsimulated": 27361, + "Ġcharger": 27362, + "mom": 27363, + "ĠCreed": 27364, + "Ġwolves": 27365, + "essions": 27366, + "created": 27367, + "ifiers": 27368, + "Ġdissemin": 27369, + "ĠDarling": 27370, + "umann": 27371, + "Ġmarrying": 27372, + "Ġshred": 27373, + "avin": 27374, + "Ġbudgetary": 27375, + "Ġmedicinal": 27376, + "ulin": 27377, + "seys": 27378, + "agues": 27379, + "Ġextracted": 27380, + "ĠFlower": 27381, + "Ġcontinents": 27382, + "ĠWish": 27383, + "Ġdivides": 27384, + "ĠDing": 27385, + "Ġinsulation": 27386, + "respect": 27387, + "ĠABS": 27388, + "Ġreconcile": 27389, + "keep": 27390, + "ILD": 27391, + "Ġgenome": 27392, + "Ġ410": 27393, + "ĠSweep": 27394, + "Ġharass": 27395, + "Ġfrantic": 27396, + "ĠEE": 27397, + "dad": 27398, + "Ġaperture": 27399, + "rought": 27400, + "Ġhugs": 27401, + "Ġdrying": 27402, + "Ġoverrun": 27403, + "Space": 27404, + "Ġperiodically": 27405, + "Ġbrightness": 27406, + "atched": 27407, + "kee": 27408, + "ĠITS": 27409, + "ĠSpokane": 27410, + "ĠSeaf": 27411, + "Ġdesks": 27412, + "ĠEisen": 27413, + "ĠOPS": 27414, + "Ġcider": 27415, + "Ġacceler": 27416, + "ĠAthlet": 27417, + "2008": 27418, + "ĠGuid": 27419, + "ĠManip": 27420, + "Ġmould": 27421, + "Ġmisguided": 27422, + "Ġbrow": 27423, + "Ġmanagerial": 27424, + "Ġhugged": 27425, + "Ġfurnish": 27426, + "ĠHarmony": 27427, + "ĠHebrew": 27428, + "Ġtyph": 27429, + "Ġdecreases": 27430, + "Ġimpetus": 27431, + "Ġcontagious": 27432, + "Ġunch": 27433, + "209": 27434, + "Ġswell": 27435, + "ĠHuffington": 27436, + "Ġpubs": 27437, + "Ġadequ": 27438, + "amoto": 27439, + "rir": 27440, + "Ġpristine": 27441, + "Ġanx": 27442, + "ĠSecure": 27443, + "Ġenrichment": 27444, + "ĠVAL": 27445, + "Ġsummed": 27446, + "Ġconfidently": 27447, + "ĠProfit": 27448, + "ĠFrog": 27449, + "ĠLena": 27450, + "ĠFUN": 27451, + "Ġbruises": 27452, + "Ġuproar": 27453, + "coll": 27454, + "ĠImpro": 27455, + "Ġflair": 27456, + "146": 27457, + "ĠBrend": 27458, + "Ġ166": 27459, + "Ġenhances": 27460, + "ĠDent": 27461, + "Ġdegener": 27462, + "Ġproponents": 27463, + "ĠInspired": 27464, + "Ġramps": 27465, + "Ġwisely": 27466, + "Western": 27467, + "Ġtart": 27468, + "Ġsteered": 27469, + "Ġtreason": 27470, + "dropping": 27471, + "Ġtransc": 27472, + "ĠScarlett": 27473, + "ĠEzekiel": 27474, + "Ġpivot": 27475, + "esame": 27476, + "Show": 27477, + "Ġdiscontent": 27478, + "ĠJudith": 27479, + "ĠPutting": 27480, + "Ġblessings": 27481, + "Ġhardcore": 27482, + "Ġtray": 27483, + "Ġdiscern": 27484, + "oley": 27485, + "ouk": 27486, + "Ġwil": 27487, + "Ġintolerance": 27488, + "157": 27489, + "ĠRelative": 27490, + "ĠLynd": 27491, + "Ġwhistleblower": 27492, + "Ġincon": 27493, + "ĠTao": 27494, + "Ġindefinite": 27495, + "Ġguardians": 27496, + "Ġagon": 27497, + "ĠInstruments": 27498, + "Ġexistential": 27499, + "AAF": 27500, + "vind": 27501, + "Ġbrazen": 27502, + "condition": 27503, + "Ġratified": 27504, + "fam": 27505, + "ĠHin": 27506, + "ĠMichaels": 27507, + "204": 27508, + "ĠKats": 27509, + "ITS": 27510, + "ISON": 27511, + "prone": 27512, + "Ġboiling": 27513, + "Ġprolong": 27514, + "Ġnoticing": 27515, + "resident": 27516, + "brance": 27517, + "ĠFolk": 27518, + "Ġdesserts": 27519, + "uton": 27520, + "Web": 27521, + "ĠLongh": 27522, + "ĠReef": 27523, + "Going": 27524, + "ĠCarb": 27525, + "Sur": 27526, + "complete": 27527, + "ĠSloan": 27528, + "ĠClubs": 27529, + "ĠSadd": 27530, + "Ġshrugged": 27531, + "Ġedible": 27532, + "ĠTyp": 27533, + "thal": 27534, + "ĠRocks": 27535, + "ĠClive": 27536, + "Ġkidding": 27537, + "ĠCrom": 27538, + "ĠTurks": 27539, + "ĠWak": 27540, + "Ġeyewitness": 27541, + "ĠHass": 27542, + "collar": 27543, + "Ġsucceeding": 27544, + "Ġinsert": 27545, + "Ġ224": 27546, + "ĠBret": 27547, + "Ġneurological": 27548, + "Ġrewrite": 27549, + "imil": 27550, + "ultimate": 27551, + "ĠJeremiah": 27552, + "Ġliaison": 27553, + "Ġpedd": 27554, + "direct": 27555, + "ĠYi": 27556, + "ĠMAD": 27557, + "ĠOrion": 27558, + "oyd": 27559, + "ĠLOC": 27560, + "release": 27561, + "Ġinvestigates": 27562, + "ĠApache": 27563, + "û": 27564, + "ĠVend": 27565, + "Ġcynical": 27566, + "ĠHelm": 27567, + "ĠMovies": 27568, + "tops": 27569, + "Ġsinister": 27570, + "Ġunparalleled": 27571, + "Ġspikes": 27572, + "Ġoverlap": 27573, + "enstein": 27574, + "Ġhypocrisy": 27575, + "Plus": 27576, + "Ġexpansions": 27577, + "Ġvow": 27578, + "Ġdetonated": 27579, + "Ġfellowship": 27580, + "Ġsolicitor": 27581, + "ĠNewtown": 27582, + "mony": 27583, + "ĠLod": 27584, + "ĠDevelopers": 27585, + "ateg": 27586, + "ibus": 27587, + "Ġcrumbling": 27588, + "ĠWein": 27589, + "ĠKlan": 27590, + "gio": 27591, + "ĠPhys": 27592, + "ĠAntarctica": 27593, + "368": 27594, + "Ġseam": 27595, + "Ġautomobiles": 27596, + "ĠTEAM": 27597, + "bern": 27598, + "Ġmanic": 27599, + "Ġsanct": 27600, + "Ġequals": 27601, + "Est": 27602, + "Ġincentiv": 27603, + "ĠHawking": 27604, + "nin": 27605, + "Ġresonate": 27606, + "bid": 27607, + "Ġtelescope": 27608, + "endon": 27609, + "ĠVacc": 27610, + "Ġregretted": 27611, + "Ġ1300": 27612, + "ĠForestry": 27613, + "BOOK": 27614, + "Ġgroundwork": 27615, + "Ġessays": 27616, + "ĠIndo": 27617, + "Pierre": 27618, + "ĠChau": 27619, + "Ġapologies": 27620, + "killers": 27621, + "ĠMoroccan": 27622, + "0001": 27623, + "336": 27624, + "Ra": 27625, + "Ġparcels": 27626, + "Ġleaned": 27627, + "Ġthankfully": 27628, + "ĠSplit": 27629, + "Ġlobbied": 27630, + "ĠDegree": 27631, + "Ġrisking": 27632, + "assy": 27633, + "Ġsupplemental": 27634, + "little": 27635, + "Ġeclectic": 27636, + "Ġ206": 27637, + "ealing": 27638, + "206": 27639, + "Ġrepo": 27640, + "Ġhose": 27641, + "ayn": 27642, + "lux": 27643, + "Ġbeliever": 27644, + "')": 27645, + "ĠHide": 27646, + "vance": 27647, + "ĠEinstein": 27648, + "Ġdepos": 27649, + "Ġfray": 27650, + "Ġki": 27651, + "Ġinternship": 27652, + "ĠHou": 27653, + "Vis": 27654, + "Ġstare": 27655, + "ĠBreed": 27656, + "option": 27657, + "Ġvisionary": 27658, + "Ġmins": 27659, + "Ġbitten": 27660, + "ancies": 27661, + "ĠShake": 27662, + "Ġtemplate": 27663, + "Ġliner": 27664, + "Ġmuster": 27665, + "appro": 27666, + "ĠMubarak": 27667, + "esty": 27668, + "mong": 27669, + "actory": 27670, + "Ġheadphone": 27671, + "ĠPrec": 27672, + "Ġwaive": 27673, + "Ron": 27674, + "ĠHearing": 27675, + "Ġimperfect": 27676, + "Ġsealing": 27677, + "Ġlocating": 27678, + "Ġculminated": 27679, + "chio": 27680, + "channel": 27681, + "lust": 27682, + "ĠLowell": 27683, + "woods": 27684, + "Ġsoak": 27685, + "Ġforbidden": 27686, + "Ġdetached": 27687, + "unct": 27688, + "ĠHunger": 27689, + "ĠPatient": 27690, + "ĠPolo": 27691, + "Saharan": 27692, + "Jon": 27693, + "athered": 27694, + "ĠSignal": 27695, + "Six": 27696, + "Ġstatistically": 27697, + "ITH": 27698, + "artment": 27699, + "ĠCU": 27700, + "Ġhates": 27701, + "qual": 27702, + "Ġcapitalist": 27703, + "ATES": 27704, + "ĠDesc": 27705, + "Ġhandcuffed": 27706, + "Ġindulge": 27707, + "ĠReligious": 27708, + "German": 27709, + "housing": 27710, + "Ġdismantling": 27711, + "Ġconventions": 27712, + "dain": 27713, + "chairs": 27714, + "Ġloos": 27715, + "Ġknowingly": 27716, + "Var": 27717, + "Ġhusbands": 27718, + "eez": 27719, + "asion": 27720, + "ĠIssa": 27721, + "Ġswollen": 27722, + "Ġ1946": 27723, + "Ġheadlined": 27724, + "Chelsea": 27725, + "Ġignorant": 27726, + "Ġperipheral": 27727, + "Note": 27728, + "Ġaxe": 27729, + "Ġnicotine": 27730, + "ĠSanctuary": 27731, + "Ġ1917": 27732, + "Ġwithdrawals": 27733, + "uits": 27734, + "Hot": 27735, + "Ġreimburse": 27736, + "probably": 27737, + "ĠAdapt": 27738, + "industrial": 27739, + "answer": 27740, + "orus": 27741, + "ĠMell": 27742, + "Talk": 27743, + "Ġcontemplating": 27744, + "omas": 27745, + "Ġtaxis": 27746, + "Ġencompasses": 27747, + "rations": 27748, + "ĠLatvia": 27749, + "Ġhumiliating": 27750, + "Ġloft": 27751, + "tight": 27752, + "rium": 27753, + "Ġlogin": 27754, + "ĠBulletin": 27755, + "Ġturtles": 27756, + "EAR": 27757, + "349": 27758, + "Radio": 27759, + "ĠBord": 27760, + "151": 27761, + "kk": 27762, + "pocket": 27763, + "Ġdove": 27764, + "348": 27765, + "Ġtemptation": 27766, + "ĠCoy": 27767, + "those": 27768, + "ĠDest": 27769, + "ishly": 27770, + "rn": 27771, + "Ġmammals": 27772, + "ĠTub": 27773, + "arial": 27774, + "ĠPersian": 27775, + "Ġdaddy": 27776, + "Zen": 27777, + "Ġps": 27778, + "Ġ]": 27779, + "Field": 27780, + "adiq": 27781, + "Ġmeaningless": 27782, + "Ġprimer": 27783, + "Ġ1942": 27784, + "Ġ!": 27785, + "625": 27786, + "Ġfashionable": 27787, + "ĠTheft": 27788, + "ĠHAVE": 27789, + "christ": 27790, + "Ġperil": 27791, + "Ġrepealing": 27792, + "Ġbuff": 27793, + "Ġodor": 27794, + "Ġstalking": 27795, + "ĠDems": 27796, + "iences": 27797, + "Ġunilaterally": 27798, + "odies": 27799, + "ĠQuite": 27800, + "Ġbloodshed": 27801, + "Ġinfect": 27802, + "Ġreminders": 27803, + "Ġchop": 27804, + "Ġevapor": 27805, + "877": 27806, + "Ġhorrified": 27807, + "ĠFruit": 27808, + "rams": 27809, + "Ġinsecure": 27810, + "cester": 27811, + "ĠNationwide": 27812, + "Ġmocking": 27813, + "Ret": 27814, + "Ġcomplying": 27815, + "sav": 27816, + "Ġali": 27817, + "Family": 27818, + "Ĩ": 27819, + "Ġdishonest": 27820, + "Ġincorrectly": 27821, + "LOAD": 27822, + "ĠGand": 27823, + "ourcing": 27824, + "obby": 27825, + "ĠPetersen": 27826, + "Something": 27827, + "Ġravaged": 27828, + "limited": 27829, + "Ġrituals": 27830, + "ĠKnowledge": 27831, + "ĠUtility": 27832, + "Ġdoom": 27833, + "Ġsheds": 27834, + "ĠGael": 27835, + "ĠMillennials": 27836, + "ĠMonthly": 27837, + "Ġdomination": 27838, + "Ġrapport": 27839, + "spot": 27840, + "ĠPrest": 27841, + "ĠHA": 27842, + "ushes": 27843, + "Ġtact": 27844, + "Richard": 27845, + "Ġgritty": 27846, + "Does": 27847, + "ĠTNT": 27848, + "Ġdownfall": 27849, + "Wood": 27850, + "ĠPrediction": 27851, + "ĠPour": 27852, + "ĠFraud": 27853, + "ĠSyndrome": 27854, + "166": 27855, + "Ġliteral": 27856, + "Ġaddict": 27857, + "ĠLoud": 27858, + "hens": 27859, + "ĠAccounts": 27860, + "distance": 27861, + "Ġclassmate": 27862, + "Ġsalv": 27863, + "Ġunlucky": 27864, + "Ġpartying": 27865, + "ĠKou": 27866, + "ĠSNAP": 27867, + "%-": 27868, + "Ġdelegate": 27869, + "Ġstrikers": 27870, + "ĠSlate": 27871, + "Ġarticulate": 27872, + "390": 27873, + "Ġinqu": 27874, + "Ġdiscredit": 27875, + "ĠPriv": 27876, + "ploy": 27877, + "ĠMarketplace": 27878, + "ĠTune": 27879, + "visor": 27880, + "Ġwrestle": 27881, + "Ġkindly": 27882, + "ĠCollect": 27883, + "Ġcirc": 27884, + "ĠRemain": 27885, + "Ġ192": 27886, + "contin": 27887, + "Ġ325": 27888, + "Ġsevered": 27889, + "isations": 27890, + "Ġmuddy": 27891, + "Ġtaxing": 27892, + "ĠRepresent": 27893, + "ĠSty": 27894, + "rology": 27895, + "ĠJudges": 27896, + "ĠBronze": 27897, + "ĠApplic": 27898, + "Ġarrow": 27899, + "consuming": 27900, + "ĠFeaturing": 27901, + "Ġspies": 27902, + "Ġnoises": 27903, + "ĠColony": 27904, + "lost": 27905, + "Ġopp": 27906, + "Ġdeem": 27907, + "ĠGarc": 27908, + "icent": 27909, + "ptroller": 27910, + "liest": 27911, + "Ġoutward": 27912, + "ĠUser": 27913, + "Ġintimidate": 27914, + "156": 27915, + "Ġjab": 27916, + "ANGE": 27917, + "Jay": 27918, + "ĠPoverty": 27919, + "ACA": 27920, + "Ġrife": 27921, + "Ġfaint": 27922, + "ĠAcceler": 27923, + "tall": 27924, + "ĠUNITED": 27925, + "ĠFighter": 27926, + "ĠGilmore": 27927, + "Ġsod": 27928, + "amura": 27929, + "Ġpredictive": 27930, + "Ġpolish": 27931, + "ĠDD": 27932, + "Ġfabricated": 27933, + "ĠDag": 27934, + "Ġfatty": 27935, + "Ġplague": 27936, + "Ġexhib": 27937, + "ĠAdvent": 27938, + "Ġ1941": 27939, + "ERSON": 27940, + "initely": 27941, + "Ġloneliness": 27942, + "ĠEquality": 27943, + "Ġuntrue": 27944, + "Ġonlook": 27945, + "Ġfragmented": 27946, + "ruce": 27947, + "Ġdistrust": 27948, + "Ġscal": 27949, + "ĠCors": 27950, + "Ġrobbing": 27951, + "cultural": 27952, + "clusion": 27953, + "ĠObi": 27954, + "sels": 27955, + "ĠEvidence": 27956, + "ĠSac": 27957, + "Ġfragments": 27958, + "Ġflipping": 27959, + "ĠRabbit": 27960, + "Ġdisproportionate": 27961, + "ĠCreat": 27962, + "Ġlabeling": 27963, + "ĠGri": 27964, + "Ġ161": 27965, + "ĠEditors": 27966, + "holm": 27967, + "adr": 27968, + "Ĭ": 27969, + "tailed": 27970, + "Ġrenters": 27971, + "Ġnoodles": 27972, + "Ġcompetence": 27973, + "Ġpanc": 27974, + "uration": 27975, + "Ġacids": 27976, + "Ġconfid": 27977, + "rival": 27978, + "AAA": 27979, + "kson": 27980, + "Ġrecreate": 27981, + "153": 27982, + "Ġ164": 27983, + "ĠOlympia": 27984, + "ĠUnlimited": 27985, + "ĠShock": 27986, + "ĠTeaching": 27987, + "ĠHouses": 27988, + "resso": 27989, + "ĠMaw": 27990, + "Ġreplen": 27991, + "Ġprotestors": 27992, + "bey": 27993, + "Ġsurve": 27994, + "Ġemphasizes": 27995, + "223": 27996, + "ĠEsther": 27997, + "ĠNikol": 27998, + "Ġprosecutions": 27999, + "ĠFreed": 28000, + "Ġposs": 28001, + "OTE": 28002, + "ĠPrayer": 28003, + "Ġsquarely": 28004, + "Ġtir": 28005, + "adv": 28006, + "Ġbogus": 28007, + "Ġwrongful": 28008, + "Ġembell": 28009, + "Ġseldom": 28010, + "Ġpossesses": 28011, + "Er": 28012, + "ĠAlternatively": 28013, + "Ġinstituted": 28014, + "rr": 28015, + "Ġvocational": 28016, + "eval": 28017, + "ĠComics": 28018, + "Ġstumbling": 28019, + "335": 28020, + "Ġdragon": 28021, + "vine": 28022, + "services": 28023, + "Ġcrit": 28024, + "irens": 28025, + "Ġlayered": 28026, + "orb": 28027, + "Ġdominates": 28028, + "ĠMarx": 28029, + "period": 28030, + "avering": 28031, + "Ġbrigade": 28032, + "Ġchem": 28033, + "ĠEvolution": 28034, + "ĠSuk": 28035, + "Ġ209": 28036, + "ĠMalk": 28037, + "Ġtallest": 28038, + "recogn": 28039, + "ĠCraw": 28040, + "Ġell": 28041, + "ĠCaesar": 28042, + "php": 28043, + "ĠSurvivors": 28044, + "sd": 28045, + "itsch": 28046, + "ambo": 28047, + "Ġashore": 28048, + "acular": 28049, + "rost": 28050, + "Ġmurderer": 28051, + "Ġcasts": 28052, + "ĠEconomist": 28053, + "ĠWeapons": 28054, + "Ġnostalgic": 28055, + "Skip": 28056, + "REAM": 28057, + "Pa": 28058, + "Ġjournals": 28059, + "ĠSitting": 28060, + "Union": 28061, + "Att": 28062, + "ĠMaxim": 28063, + "Ġpurportedly": 28064, + "Ġrespecting": 28065, + "ĠMAX": 28066, + "seed": 28067, + "Ġjuicy": 28068, + "ĠGallup": 28069, + "Ġmileage": 28070, + "adier": 28071, + "Ġbod": 28072, + "DER": 28073, + "Ġsummers": 28074, + "icult": 28075, + "ipl": 28076, + "ĠDeng": 28077, + "Ġsmells": 28078, + "Ġivory": 28079, + "Ġ255": 28080, + "Id": 28081, + "DEN": 28082, + "Ġ159": 28083, + "Due": 28084, + "ĠLighting": 28085, + "ĠSurely": 28086, + "Ġsund": 28087, + "ĠKessler": 28088, + "immigrant": 28089, + "Ġtragedies": 28090, + "ĠOxy": 28091, + "ĠFixed": 28092, + "ĠBalk": 28093, + "Ġoriented": 28094, + "pher": 28095, + "Ġkitchens": 28096, + "Ġhips": 28097, + "Ġtweak": 28098, + "Ġtuna": 28099, + "ĠCla": 28100, + "Ġdislike": 28101, + "ussy": 28102, + "Ġoutnumbered": 28103, + "Ġplumbing": 28104, + "Ġcogn": 28105, + "ĠThrow": 28106, + "ĠTER": 28107, + "urally": 28108, + "ĠMurd": 28109, + "Ġcreamy": 28110, + "Ġresiding": 28111, + "otics": 28112, + "Ġfingerprints": 28113, + "!,": 28114, + "Ġpaused": 28115, + "ĠMilo": 28116, + "Ġhomosexuality": 28117, + "Ġresponsibly": 28118, + "iop": 28119, + "UCT": 28120, + "Ġsucceeds": 28121, + "ĠCRE": 28122, + "ĠThatcher": 28123, + "Ġcurrents": 28124, + "Ġarises": 28125, + "Ġwaterproof": 28126, + "Ġamp": 28127, + "ĠClaims": 28128, + "177": 28129, + "Ġsubpoen": 28130, + "Ġvig": 28131, + "ĠNeuro": 28132, + "Ġblur": 28133, + "ĠPaint": 28134, + "campus": 28135, + "Ġtoughness": 28136, + "ĠButton": 28137, + "Neal": 28138, + "ĠDEN": 28139, + "ĠNir": 28140, + "ĠAxel": 28141, + "EEP": 28142, + "Ġpint": 28143, + "Ġagile": 28144, + "odor": 28145, + "Ġessentials": 28146, + "ĠMov": 28147, + "ĠVenezuel": 28148, + "Ġexchanging": 28149, + "ĠNegative": 28150, + "Mil": 28151, + "Key": 28152, + "Ġbuzzing": 28153, + "ĠStew": 28154, + "Ġrebuke": 28155, + "Ġdepl": 28156, + "ĠKoz": 28157, + "Ġ163": 28158, + "Ġshines": 28159, + "NZ": 28160, + "Ġcarnage": 28161, + "cases": 28162, + "Ġwarmed": 28163, + "ĠGreenwich": 28164, + "College": 28165, + "Ġneedy": 28166, + "301": 28167, + "ĠMü": 28168, + "culation": 28169, + "Ġ440": 28170, + "425": 28171, + "atories": 28172, + "Ġsatisfactory": 28173, + "ĠFib": 28174, + "ĠElim": 28175, + "developed": 28176, + "Ġvacations": 28177, + "Ġpeculiar": 28178, + "Ġvets": 28179, + "onest": 28180, + "ĠPug": 28181, + "Ġlifestyles": 28182, + "zzi": 28183, + "Ġprovoke": 28184, + "bah": 28185, + "arger": 28186, + "ĠVirt": 28187, + "Sales": 28188, + "annel": 28189, + "ĠMeth": 28190, + "ivating": 28191, + "Ġrevoke": 28192, + "ĠAgenda": 28193, + "ĠIch": 28194, + "Ġsensit": 28195, + "ĠAzerbai": 28196, + "ĠBombay": 28197, + "Ġuncon": 28198, + "river": 28199, + "Ġapr": 28200, + "actic": 28201, + "ĠSubaru": 28202, + "Ġbanquet": 28203, + "Ġcontradict": 28204, + "tek": 28205, + "Football": 28206, + "igent": 28207, + "Ġreintrodu": 28208, + "ĠInsight": 28209, + "Ġsystematically": 28210, + "Ġboun": 28211, + "ĠFishing": 28212, + "Ġstri": 28213, + "ĠOB": 28214, + "Ġstair": 28215, + "Wall": 28216, + "ĠAllow": 28217, + "Ġcaramel": 28218, + "169": 28219, + "Ġcafes": 28220, + "Ġcalcium": 28221, + "Ġ169": 28222, + "Ġportraying": 28223, + "Ġdiscriminate": 28224, + "Ġunrestricted": 28225, + "Ġmant": 28226, + "Ġscarcity": 28227, + "Ġfeminism": 28228, + "ĠJJ": 28229, + "ĠOversight": 28230, + "ĠCue": 28231, + "Ġinexperienced": 28232, + "Ġdrafts": 28233, + "Ġ1939": 28234, + "nm": 28235, + "forest": 28236, + "ĠHonour": 28237, + "Ġceramic": 28238, + "Ġdownstairs": 28239, + "Ġboon": 28240, + "Ġmorality": 28241, + "Ġhorrifying": 28242, + "Rad": 28243, + "justice": 28244, + "Ġmosques": 28245, + "Ġcurfew": 28246, + "Ġsurrogate": 28247, + "Ġreimb": 28248, + "enth": 28249, + "pressure": 28250, + "beam": 28251, + "Ġwhirlwind": 28252, + "ĠRecession": 28253, + "ĠTours": 28254, + "Ġclusters": 28255, + "ĠQuant": 28256, + "Jonathan": 28257, + "project": 28258, + "Ġ777": 28259, + "ĠNOAA": 28260, + "abis": 28261, + "Ġdeficiencies": 28262, + "Ġsuicides": 28263, + "Ġfoothold": 28264, + "ĠYah": 28265, + "imeter": 28266, + "URN": 28267, + "Ġcultivate": 28268, + "Ġnoisy": 28269, + "Ġ1951": 28270, + "Ġpressuring": 28271, + "ĠDeals": 28272, + "ĠProphet": 28273, + "ĠWikipedia": 28274, + "INESS": 28275, + "ĠShine": 28276, + "ĠCalled": 28277, + "ĠSole": 28278, + "ĠZhou": 28279, + "Ġasphalt": 28280, + "armac": 28281, + "ĠScorp": 28282, + "ĠUnknown": 28283, + "ĠPAT": 28284, + "Heart": 28285, + "Ġguessed": 28286, + "Ġsushi": 28287, + "Ġheartbeat": 28288, + "Ġconcent": 28289, + "eret": 28290, + "plin": 28291, + "Ġweeds": 28292, + "Ġbombed": 28293, + "ĠTerrorism": 28294, + "Rich": 28295, + "Ġblades": 28296, + "Ġhaunt": 28297, + "Ġstorefront": 28298, + "Ġthwarted": 28299, + "access": 28300, + "ĠLydia": 28301, + "LINE": 28302, + "Ġpregnancies": 28303, + "Ġripping": 28304, + "ĠBelieve": 28305, + "spoken": 28306, + "inian": 28307, + "sed": 28308, + "ĠBrass": 28309, + "econom": 28310, + "current": 28311, + "Ġvoc": 28312, + "Ġmodeled": 28313, + "Ġpeppers": 28314, + "otech": 28315, + "ĠOption": 28316, + "Connell": 28317, + "isel": 28318, + "Ġcompel": 28319, + "Ġjuveniles": 28320, + "ĠNET": 28321, + "ĠEXP": 28322, + "Ġparadigm": 28323, + "Des": 28324, + "Ġ204": 28325, + "employed": 28326, + "Ġdurability": 28327, + "Ġ245": 28328, + "Ġbillionaires": 28329, + "violent": 28330, + "ĠCooperative": 28331, + "TOP": 28332, + "ĠGarry": 28333, + "ĠSoldiers": 28334, + "Ġdared": 28335, + "Ġvoucher": 28336, + "Ġblends": 28337, + "gue": 28338, + "Ġadventurous": 28339, + "Ġorganisms": 28340, + "Ġgaze": 28341, + "Ġcrap": 28342, + "Coach": 28343, + "omon": 28344, + "ĠWheels": 28345, + "ĠGrayson": 28346, + "Ġrecy": 28347, + "grave": 28348, + "Ġallergic": 28349, + "Ġreef": 28350, + "Ġbeginnings": 28351, + "ĠRuff": 28352, + "Ġclout": 28353, + "structed": 28354, + "315": 28355, + "ĠGeorgian": 28356, + "say": 28357, + "Ġsprings": 28358, + "ĠAsus": 28359, + "Ġrepaid": 28360, + "ĠGuys": 28361, + "ticket": 28362, + "Ġunb": 28363, + "ĠCertificate": 28364, + "ĠSTORY": 28365, + "cin": 28366, + "Ġpassions": 28367, + "Ġmediocre": 28368, + "Ġlackluster": 28369, + "vernight": 28370, + "kids": 28371, + "ĠWife": 28372, + "politics": 28373, + "ĠHimal": 28374, + "oddy": 28375, + "ensus": 28376, + "ĠGustav": 28377, + "binding": 28378, + "ĠIndividuals": 28379, + "Ġmaize": 28380, + "Ġhoop": 28381, + "ĠChanging": 28382, + "Ġlessen": 28383, + "Ġarranging": 28384, + "ĠFukushima": 28385, + "ĠTrying": 28386, + "ĠMage": 28387, + "Ġskeleton": 28388, + "ĠTec": 28389, + "289": 28390, + "Ġrecl": 28391, + "ĠFIL": 28392, + "Gs": 28393, + "ĠOdyssey": 28394, + "ĠProcessing": 28395, + "ilion": 28396, + "Ġsubsidized": 28397, + "Ġabdomen": 28398, + "Ġanalyse": 28399, + "music": 28400, + "clean": 28401, + "Ġunfinished": 28402, + "Ġdownloads": 28403, + "Ġmorally": 28404, + "Ġ218": 28405, + "Ġtrib": 28406, + "Keep": 28407, + "ĠSER": 28408, + "FY": 28409, + "Ġaust": 28410, + "Ġdiscovers": 28411, + "ĠGROUP": 28412, + "ĠMachines": 28413, + "Ġeroded": 28414, + "Ġominous": 28415, + "Ġbrightly": 28416, + "IME": 28417, + "Ġwicked": 28418, + "ĠTrou": 28419, + "Ġvisions": 28420, + "Kay": 28421, + "reported": 28422, + "Ġbog": 28423, + "ĠQuin": 28424, + "ĠSigma": 28425, + "urned": 28426, + "ixon": 28427, + "Ġharming": 28428, + "Ġcheckout": 28429, + "inet": 28430, + "much": 28431, + "Ġcherish": 28432, + "ĠByrd": 28433, + "ĠSamson": 28434, + "WP": 28435, + "orders": 28436, + "boa": 28437, + "Ġbron": 28438, + "oki": 28439, + "ĠRR": 28440, + "Ġsuitcase": 28441, + "Ġfeathers": 28442, + "ĠChristy": 28443, + "Islamic": 28444, + "Ġamusement": 28445, + "ĠISS": 28446, + "intensive": 28447, + "Qaida": 28448, + "Ġneurons": 28449, + "Ġwagon": 28450, + "ĠTek": 28451, + "Ġdolls": 28452, + "ĠShoot": 28453, + "Ġunderestimate": 28454, + "Ġstreamlined": 28455, + "Ġfractures": 28456, + "Ġcathedral": 28457, + "Ġeliminates": 28458, + "helle": 28459, + "Ġcitrus": 28460, + "risis": 28461, + "Ġimpecc": 28462, + "istries": 28463, + "ĠHog": 28464, + "vote": 28465, + "pas": 28466, + "Ġassign": 28467, + "ĠSongs": 28468, + "ĠMiracle": 28469, + "kas": 28470, + "zynski": 28471, + "Ġcrane": 28472, + "Ġadulthood": 28473, + "ĠBenefit": 28474, + "ĠGrimes": 28475, + "Ġpayday": 28476, + "ablished": 28477, + "Ġcenterpiece": 28478, + "Ġhassle": 28479, + "ĠAppalachian": 28480, + "follow": 28481, + "Ġ290": 28482, + "ĠRL": 28483, + "ĠDoe": 28484, + "Ġacclaim": 28485, + "Ġlevied": 28486, + "Ġtossing": 28487, + "Ġcarrots": 28488, + "ĠDarius": 28489, + "161": 28490, + "Ġoffspring": 28491, + "ĠJury": 28492, + "ĠTPP": 28493, + "CAP": 28494, + "Ġenvironmentalists": 28495, + "Ġrays": 28496, + "267": 28497, + "Ser": 28498, + "Ġcaptivity": 28499, + "Ġappellate": 28500, + "ĠElectricity": 28501, + "ĠEnough": 28502, + "232": 28503, + "Ġfisher": 28504, + "Ġbrilliance": 28505, + "Ġpraises": 28506, + "aunch": 28507, + "Ġsolicitation": 28508, + "Ġadolescent": 28509, + "Ġinferior": 28510, + "checks": 28511, + "Set": 28512, + "Ġmutations": 28513, + "ĠLatinos": 28514, + "ĠLicense": 28515, + "ĠAme": 28516, + "hirt": 28517, + "ĠChun": 28518, + "Ġdeeds": 28519, + "ldon": 28520, + "Ġmammoth": 28521, + "Ġturtle": 28522, + "rule": 28523, + "Ken": 28524, + "Ġvoyage": 28525, + "gram": 28526, + "Ġconquer": 28527, + "Ġretaliate": 28528, + "ĠPJ": 28529, + "ĠViking": 28530, + "Ġsafegu": 28531, + "ordinary": 28532, + "ĠArbit": 28533, + "ĠDigest": 28534, + "Die": 28535, + "Ġbureaucratic": 28536, + "Ġhonorable": 28537, + "Ġcafeteria": 28538, + "ĠRAF": 28539, + "ĠPlaces": 28540, + "ĠKlu": 28541, + "Cam": 28542, + "ĠBiology": 28543, + "ĠCycling": 28544, + "imore": 28545, + "Ġstripping": 28546, + "Ġwarriors": 28547, + "Ġbursting": 28548, + "Ġlapse": 28549, + "Ġversa": 28550, + "Ġclicked": 28551, + "ogh": 28552, + "Ġ\"âĢ¦": 28553, + "Ġdiligently": 28554, + "ĠMiy": 28555, + "ĠCorpus": 28556, + "Ġredef": 28557, + "Ġ176": 28558, + "ĠInstrument": 28559, + "ĠOECD": 28560, + "Ġstro": 28561, + "Ġmicrowave": 28562, + "Santa": 28563, + "Ġpars": 28564, + "Social": 28565, + "iffe": 28566, + "itability": 28567, + "Equ": 28568, + "Ġnud": 28569, + "legged": 28570, + "ĠTud": 28571, + "lav": 28572, + "Ġinterpreter": 28573, + "alcohol": 28574, + "Ġimposition": 28575, + "Ġdwelling": 28576, + "Ġ1400": 28577, + "].\"": 28578, + "ĠIw": 28579, + "RM": 28580, + "Ġ555": 28581, + "Ġparalyzed": 28582, + "mind": 28583, + "rans": 28584, + "adin": 28585, + "French": 28586, + "Ġliar": 28587, + "Represent": 28588, + "Ġstrapped": 28589, + "orate": 28590, + "Ġrigging": 28591, + "Ġinterrog": 28592, + "Ġsparse": 28593, + "ento": 28594, + "ĠThem": 28595, + "Ġbaseless": 28596, + "Ġbuildup": 28597, + "Ġundecided": 28598, + "isms": 28599, + "Ġabduct": 28600, + "Ġflowed": 28601, + "Ġprestige": 28602, + "Ġhacks": 28603, + "Ġpanicked": 28604, + "Cast": 28605, + "ĠKrish": 28606, + "umat": 28607, + "Ġantique": 28608, + "Ġbitters": 28609, + "Ġentitlement": 28610, + "Ġstandby": 28611, + "Ten": 28612, + "said": 28613, + "ĠConditions": 28614, + "events": 28615, + "Ġobey": 28616, + "Ġshortest": 28617, + "etting": 28618, + "Ġconcentrating": 28619, + "ĠNeeds": 28620, + "234": 28621, + "Ġintrigued": 28622, + "enting": 28623, + "ĠXen": 28624, + "ĠAlger": 28625, + "seekers": 28626, + "anish": 28627, + "Ġ172": 28628, + "âĢij": 28629, + "Ġsilicon": 28630, + "Ġstandardized": 28631, + "ĠFountain": 28632, + "essel": 28633, + "Ġapproves": 28634, + "Ġsucked": 28635, + "gone": 28636, + "ĠBriggs": 28637, + "brother": 28638, + "Ġartisan": 28639, + "ĠContinuing": 28640, + "vir": 28641, + "Ġsubmarines": 28642, + "ĠInk": 28643, + "program": 28644, + "ĠNexus": 28645, + "ĠCoco": 28646, + "Ġconceptual": 28647, + "Ġmatt": 28648, + "aughters": 28649, + "Ġbaths": 28650, + "Ġbeaut": 28651, + "ĠEmerald": 28652, + "ĠParties": 28653, + "248": 28654, + "completely": 28655, + "esan": 28656, + "Ġdiarrhea": 28657, + "Ġ1100": 28658, + "borg": 28659, + "ĠBroken": 28660, + "Ġreiterate": 28661, + "Ġsorting": 28662, + "ONS": 28663, + "Ġ177": 28664, + "Ġadmin": 28665, + "ĠMandatory": 28666, + "Ġsymptom": 28667, + "Ġpaced": 28668, + "Remember": 28669, + "Ġabdominal": 28670, + "Ġswapped": 28671, + "Ġtransitions": 28672, + "IFA": 28673, + "pretty": 28674, + "ĠJC": 28675, + "Ġallotted": 28676, + "ĠShows": 28677, + "Arthur": 28678, + "Ġsoften": 28679, + "dozen": 28680, + "Mah": 28681, + "Ġextinguished": 28682, + "Ġreelection": 28683, + "Ġdeployments": 28684, + "Ġsturdy": 28685, + "Ġdownright": 28686, + "Ġjams": 28687, + "ĠOptim": 28688, + "Ġhumiliation": 28689, + "cd": 28690, + "Ġbunk": 28691, + "sie": 28692, + "NAT": 28693, + "ilies": 28694, + "Ġimplying": 28695, + "Ġ<": 28696, + "Ġhomepage": 28697, + "242": 28698, + "Ġey": 28699, + "Ġdict": 28700, + "Ġslender": 28701, + "Ġforehead": 28702, + "ĠCecil": 28703, + "Ġshrunk": 28704, + "ĠExit": 28705, + "Ġexpressly": 28706, + "Ġseals": 28707, + "ĠThiel": 28708, + "umni": 28709, + "Ġdamning": 28710, + "ĠVS": 28711, + "ulum": 28712, + "BBC": 28713, + "URES": 28714, + "Ġinhal": 28715, + "Ġfont": 28716, + "Ġworkplaces": 28717, + "ĠPUBLIC": 28718, + "ĠHorror": 28719, + "Bs": 28720, + "arta": 28721, + "ĠBread": 28722, + "Ġstret": 28723, + "Ġethos": 28724, + "Ġstabilized": 28725, + "Ġconvers": 28726, + "ĠInqu": 28727, + "Ġjudgments": 28728, + "ĠContemporary": 28729, + "221": 28730, + "Ġzombie": 28731, + "VD": 28732, + "Ġmisunderstanding": 28733, + "Ġspam": 28734, + "ĠPapers": 28735, + "Ġcrocod": 28736, + "ENA": 28737, + "ĠJuven": 28738, + "ĠAbram": 28739, + "Ġbursts": 28740, + "atto": 28741, + "Ġturbulence": 28742, + "tty": 28743, + "sexual": 28744, + "Ġwaning": 28745, + "community": 28746, + "Government": 28747, + "Ġtranspl": 28748, + "??": 28749, + "Getting": 28750, + "ĠRare": 28751, + "prime": 28752, + "Ġlooting": 28753, + "Ġvalidate": 28754, + "ĠCreating": 28755, + "ĠCorruption": 28756, + "Ġspit": 28757, + "ĠFavorite": 28758, + "Kar": 28759, + "Ġadaptive": 28760, + "ĠART": 28761, + "Ġtorso": 28762, + "ĠIdent": 28763, + "Ġsubdivision": 28764, + "azo": 28765, + "Ġconsequently": 28766, + "Ġrotate": 28767, + "ĠWit": 28768, + "Ġestab": 28769, + "managed": 28770, + "ĠBound": 28771, + "Ġskim": 28772, + "198": 28773, + "ĠCorona": 28774, + "ĠâĿ": 28775, + "Ġwording": 28776, + "buck": 28777, + "iph": 28778, + "patrick": 28779, + "Help": 28780, + "flying": 28781, + "Ġracer": 28782, + "Ġfisherman": 28783, + "____": 28784, + "ackers": 28785, + "Ġpersisted": 28786, + "Ġmyths": 28787, + "Ġgarn": 28788, + "ologue": 28789, + "ĠApprentice": 28790, + "Ġhereby": 28791, + "Ġvulgar": 28792, + "ĠGinger": 28793, + "Ġtrait": 28794, + "ĠIdea": 28795, + "Ġfigur": 28796, + "ĠSchwarzenegger": 28797, + "ĠSafari": 28798, + "178": 28799, + "ĠAsians": 28800, + "775": 28801, + "ĠTriangle": 28802, + "Ġdemons": 28803, + "ĠOv": 28804, + "Ġanime": 28805, + "Broad": 28806, + "Ġmolecule": 28807, + "Ġdeposition": 28808, + "Ġbiodiversity": 28809, + "modern": 28810, + "Ġwallets": 28811, + "NH": 28812, + "planes": 28813, + "rats": 28814, + "ĠSeed": 28815, + "Ġ174": 28816, + "umed": 28817, + "Ġtouting": 28818, + "gre": 28819, + "ĠSEAL": 28820, + "Ġperpetrator": 28821, + "ĠGerrard": 28822, + "Ġallocations": 28823, + "Ġworsh": 28824, + "payment": 28825, + "bett": 28826, + "ĠIssues": 28827, + "ennis": 28828, + "eering": 28829, + "ĠMV": 28830, + "yi": 28831, + "hak": 28832, + "Ġ167": 28833, + "Ġorchestr": 28834, + "224": 28835, + "Ġsup": 28836, + "Ġleukemia": 28837, + "osures": 28838, + "575": 28839, + "Ġnoticeably": 28840, + "Ġparamilitary": 28841, + "ĠTHERE": 28842, + "Ġwaged": 28843, + "igrated": 28844, + "Ġdocumentaries": 28845, + "Ġsenseless": 28846, + "Ġbark": 28847, + "Ġgenetics": 28848, + "ĠAlbania": 28849, + "ĠCrypt": 28850, + "ĠSEO": 28851, + "Ġnightly": 28852, + "Ġfaults": 28853, + "279": 28854, + "ĠFerdinand": 28855, + "ĠSylv": 28856, + "Ġcalam": 28857, + "ĠMuller": 28858, + "ĠSpielberg": 28859, + "Boy": 28860, + "ĠUrs": 28861, + "Ġrug": 28862, + "Ġcolonies": 28863, + "ĠFunk": 28864, + "Ġlyric": 28865, + "ĠATT": 28866, + "anni": 28867, + "ĠNB": 28868, + "Ġthorn": 28869, + "Ġpertinent": 28870, + "188": 28871, + "Ġpartic": 28872, + "Head": 28873, + "Pad": 28874, + "Palestinian": 28875, + "ĠBarg": 28876, + "anical": 28877, + "beaut": 28878, + "onge": 28879, + "Ġgigantic": 28880, + "travel": 28881, + "Ġdownloading": 28882, + "Contin": 28883, + "whe": 28884, + "plane": 28885, + "Wil": 28886, + "IDA": 28887, + "Ele": 28888, + "ĠPAL": 28889, + "Ġbeams": 28890, + "ĠProud": 28891, + "ramer": 28892, + "Ġindependents": 28893, + "Ġtranslator": 28894, + "ĠBrah": 28895, + "ĠTrooper": 28896, + "aylor": 28897, + "pson": 28898, + "Ġguise": 28899, + "Ġdiffering": 28900, + "Ġtopple": 28901, + "ichen": 28902, + "ĠSeymour": 28903, + "deg": 28904, + "ĠMixed": 28905, + "Ġinvoluntary": 28906, + "Ġcountdown": 28907, + "ĠNarc": 28908, + "ĠAdults": 28909, + "Ġcoaster": 28910, + "Ġ342": 28911, + "ĠAcquisition": 28912, + "mone": 28913, + "Ġpenchant": 28914, + "Brian": 28915, + "Gh": 28916, + "Pres": 28917, + "enei": 28918, + "Ġreefs": 28919, + "ĠMaver": 28920, + "Ġdevised": 28921, + "ĠIMP": 28922, + "vict": 28923, + "Ġagility": 28924, + "ĠPayments": 28925, + "respected": 28926, + "Ġtuning": 28927, + "ĠFACE": 28928, + "actions": 28929, + "Ġyell": 28930, + "ĠLeaving": 28931, + "Ġsnowy": 28932, + "Saudi": 28933, + "Ġformations": 28934, + "Ġairborne": 28935, + "Ġdeed": 28936, + "ooks": 28937, + "Ġnamesake": 28938, + "Ġpunishable": 28939, + "Ġagg": 28940, + "oths": 28941, + "ĠFamous": 28942, + "ĠDeposit": 28943, + "Ġinduce": 28944, + "189": 28945, + "Ġhesitation": 28946, + "ĠBrowse": 28947, + "ople": 28948, + "reys": 28949, + "henko": 28950, + "Ġsecretaries": 28951, + "Ġintersections": 28952, + "Ġdiminishing": 28953, + "ints": 28954, + "Ġ1934": 28955, + "ĠInvestigative": 28956, + "ĠMexicans": 28957, + "ĠMahar": 28958, + "ibur": 28959, + "Ġstocking": 28960, + "gross": 28961, + "Ġasbestos": 28962, + "Ġagitation": 28963, + "ĠBST": 28964, + "Overall": 28965, + "Ġheats": 28966, + "ĠSpan": 28967, + "Ġimped": 28968, + "Ġtrusting": 28969, + "Pet": 28970, + "Ġegregious": 28971, + "Ġcomedians": 28972, + "zin": 28973, + "WIN": 28974, + "Ġchats": 28975, + "Ġexploding": 28976, + "ĠTort": 28977, + "Ġembraces": 28978, + "Ġneut": 28979, + "verson": 28980, + "ouncing": 28981, + "ĠFiber": 28982, + "Ġbaker": 28983, + "Ġunstoppable": 28984, + "ĠDial": 28985, + "cars": 28986, + "Marc": 28987, + "164": 28988, + "volt": 28989, + "Ġceased": 28990, + "EFF": 28991, + "Ġpromoters": 28992, + "Ġcircuits": 28993, + "Ġexcise": 28994, + "Ġseminars": 28995, + "ĠTiny": 28996, + "ĠImportant": 28997, + "ĠTup": 28998, + "Ġoutburst": 28999, + "ĠSOC": 29000, + "ĠWWII": 29001, + "Ġmerging": 29002, + "highly": 29003, + "ĠGmail": 29004, + "ozy": 29005, + "ĠKB": 29006, + "Ġlaboratories": 29007, + "knit": 29008, + "ĠClosed": 29009, + "Ġsurrounds": 29010, + "ĠVet": 29011, + "Ġcere": 29012, + "vard": 29013, + "ĠDeadpool": 29014, + "text": 29015, + "Ġinfusion": 29016, + "Ġcuc": 29017, + "ĠAtl": 29018, + "Ġbustling": 29019, + "ĠSettings": 29020, + "Ġ193": 29021, + "ryan": 29022, + "184": 29023, + "186": 29024, + "Ġswat": 29025, + "rane": 29026, + "Ġepidem": 29027, + "lando": 29028, + "Ġtestifying": 29029, + "Ġmoistur": 29030, + "ĠTens": 29031, + "Ġexemplary": 29032, + "ĠPump": 29033, + "Ġforcefully": 29034, + "ĠFare": 29035, + "Ġcomplicate": 29036, + "Fe": 29037, + "Di": 29038, + "ĠThy": 29039, + "Ġcompartment": 29040, + "ĠFiesta": 29041, + "Would": 29042, + "fitted": 29043, + "Ġcull": 29044, + "Ġcomedic": 29045, + "cyl": 29046, + "Ġwhichever": 29047, + "stic": 29048, + "Ġ213": 29049, + "Ġspills": 29050, + "Ġplasma": 29051, + "Ġdisguise": 29052, + "ĠCompass": 29053, + "ĠImmun": 29054, + "Ġscarf": 29055, + "Ġdisperse": 29056, + "Ġreckon": 29057, + "ĠTaste": 29058, + "root": 29059, + "ĠGAME": 29060, + "xx": 29061, + "Ġhomophobic": 29062, + "Ġdimin": 29063, + "/#": 29064, + "Ġ178": 29065, + "Ġgems": 29066, + "lio": 29067, + "informed": 29068, + "ample": 29069, + "XT": 29070, + "Ġrepression": 29071, + "ĠTakes": 29072, + "Ġhabitats": 29073, + "Ġmountainous": 29074, + "ĠMcH": 29075, + "ENC": 29076, + "Mobil": 29077, + "Ġreel": 29078, + "ĠTI": 29079, + "Ġauthorize": 29080, + "ĠAccept": 29081, + "ĠMetall": 29082, + "CCC": 29083, + "Ġwetlands": 29084, + "ĠWitch": 29085, + "heading": 29086, + "Ġintervals": 29087, + "ĠWitt": 29088, + "hene": 29089, + "Ġcomforting": 29090, + "ollen": 29091, + "ERN": 29092, + "ooky": 29093, + "etch": 29094, + "Ġassailant": 29095, + "announced": 29096, + "elin": 29097, + "plate": 29098, + "920": 29099, + "eating": 29100, + "induced": 29101, + "ĠIgor": 29102, + "ĠAmph": 29103, + "Ġpatented": 29104, + "posing": 29105, + "Ġextraordinarily": 29106, + "Ġfearless": 29107, + "mortem": 29108, + "ĠDraw": 29109, + "ĠRend": 29110, + "Son": 29111, + "ridden": 29112, + "ĠAdvantage": 29113, + "Ġ305": 29114, + "Ġroared": 29115, + "Str": 29116, + "Ġradioactive": 29117, + "Ġslur": 29118, + "ĠRear": 29119, + "affles": 29120, + "ĠPon": 29121, + "Ġost": 29122, + "umbs": 29123, + "ĠSlack": 29124, + "athom": 29125, + "baby": 29126, + "213": 29127, + "ĠSpending": 29128, + "ĠAccordingly": 29129, + "Ġclocks": 29130, + "archs": 29131, + "Ġsmugg": 29132, + "Ġmastermind": 29133, + "ĠKlaus": 29134, + "alpha": 29135, + "Ġspoiled": 29136, + "264": 29137, + "Pod": 29138, + "Ġflared": 29139, + "Ġcomposure": 29140, + "ĠCAM": 29141, + "Ġrestruct": 29142, + "Ġtasted": 29143, + "ĠKimber": 29144, + "Ġupheaval": 29145, + "CHAR": 29146, + "ĠGeo": 29147, + "itations": 29148, + "Ġbegged": 29149, + "UX": 29150, + "Authorities": 29151, + "ĠEngel": 29152, + "ĠHOME": 29153, + "Ġratt": 29154, + "Ġquickest": 29155, + "475": 29156, + "ĠSting": 29157, + "ĠICO": 29158, + "yu": 29159, + "Ġdefy": 29160, + "Prince": 29161, + "cards": 29162, + "Ġovertake": 29163, + "Ġretrieved": 29164, + "ĠNavajo": 29165, + "Ġpastry": 29166, + "ĠLange": 29167, + "Ġentrusted": 29168, + "ĠCull": 29169, + "aler": 29170, + "Ġdinosaurs": 29171, + "Ġbragging": 29172, + "ĠAlley": 29173, + "meier": 29174, + "ĠAssuming": 29175, + "Ġana": 29176, + "omatic": 29177, + "Brend": 29178, + "acted": 29179, + "Ġexhaustive": 29180, + "Ġunfit": 29181, + "Several": 29182, + "gap": 29183, + "Ġtet": 29184, + "228": 29185, + "Sk": 29186, + "302": 29187, + "Ġdeflect": 29188, + "Ġ179": 29189, + "226": 29190, + "Ġadorned": 29191, + "ĠSpread": 29192, + "Ġthirds": 29193, + "ĠSemi": 29194, + "Ġdescend": 29195, + "Ġaccumulate": 29196, + "Ġflavours": 29197, + "Ġinvoked": 29198, + "ĠAnge": 29199, + "Ġprofess": 29200, + "unks": 29201, + "ĠKickstarter": 29202, + "ENTS": 29203, + "ĠRw": 29204, + "Ġchatter": 29205, + "ĠPOS": 29206, + "Ġcollaborators": 29207, + "ĠEW": 29208, + "ĠMarkus": 29209, + "Ġimpair": 29210, + "Ġbolt": 29211, + "Ġglue": 29212, + "Ġloosely": 29213, + "ĠSUM": 29214, + "Ġhydraulic": 29215, + "Ġpredatory": 29216, + "Charles": 29217, + "cond": 29218, + "Ġspawned": 29219, + "Fr": 29220, + "174": 29221, + "Ġtame": 29222, + "Ġaggrav": 29223, + "Ġchrist": 29224, + "true": 29225, + "ivable": 29226, + "Ġhen": 29227, + "ĠKut": 29228, + "Ġskyrocket": 29229, + "Ġeg": 29230, + "Ġveterinarian": 29231, + "ĠStats": 29232, + "Kit": 29233, + "Ġbiologist": 29234, + "Spe": 29235, + "Ġantenna": 29236, + "Ġsust": 29237, + "fill": 29238, + "Ġpayload": 29239, + "227": 29240, + "Ġlivestream": 29241, + "ORN": 29242, + "ĠAbel": 29243, + "Ġdeception": 29244, + "ussen": 29245, + "Britain": 29246, + "partisan": 29247, + "Ġbrowse": 29248, + "Ġmelan": 29249, + "172": 29250, + "ĠNumerous": 29251, + "ĠMansion": 29252, + "Ġassailants": 29253, + "£": 29254, + "olerance": 29255, + "Ġdirectives": 29256, + "ĠInteg": 29257, + "zers": 29258, + "Ġduct": 29259, + "ĠHonestly": 29260, + "ĠImmediately": 29261, + "ixty": 29262, + "Ġdiagnose": 29263, + "Ġimplication": 29264, + "ĠiPads": 29265, + "testers": 29266, + "riots": 29267, + "Ġrespons": 29268, + "XP": 29269, + "pes": 29270, + "875": 29271, + "Ġ199": 29272, + "ĠPoe": 29273, + "303": 29274, + "Ġailments": 29275, + "ĠCarrier": 29276, + "Ġeject": 29277, + "Ġrestroom": 29278, + "Drive": 29279, + "manufact": 29280, + "Ġcompens": 29281, + "Ġglossy": 29282, + "Ġrecovers": 29283, + "Ġthinner": 29284, + "Ġdescendants": 29285, + "antle": 29286, + "Beaut": 29287, + "competitive": 29288, + "ĠRobotics": 29289, + "Ġpretext": 29290, + "233": 29291, + "Ġflanked": 29292, + "ĠâĻ": 29293, + "Ġguts": 29294, + "Ġwee": 29295, + "Ġaccents": 29296, + "mc": 29297, + "Ġgrapp": 29298, + "ĠNathaniel": 29299, + "ĠMikhail": 29300, + "Ġobligated": 29301, + "Ġmanoeuv": 29302, + "Ġechoing": 29303, + "Ġ189": 29304, + "ĠDevice": 29305, + "isd": 29306, + "Ġloopholes": 29307, + "Ġbehold": 29308, + "ĠMerry": 29309, + "Ġfunn": 29310, + "Ġnuanced": 29311, + "667": 29312, + "ELY": 29313, + "ĠTasmania": 29314, + "ĠSaddam": 29315, + "Ġquizz": 29316, + "military": 29317, + "cient": 29318, + "Ġoutlaw": 29319, + "ĠAudit": 29320, + "ĠBoom": 29321, + "Ġcrim": 29322, + "asured": 29323, + "ĠApps": 29324, + "ĠKush": 29325, + "onica": 29326, + "Ġamput": 29327, + "signed": 29328, + "ĠMEN": 29329, + "ĠRosenberg": 29330, + "Ġvide": 29331, + "ĠDirection": 29332, + "Ġfountain": 29333, + "TW": 29334, + "ĠCARE": 29335, + "Ġreassured": 29336, + "Food": 29337, + "Ġdepressing": 29338, + "ĠWhilst": 29339, + "reatment": 29340, + "Ġspelled": 29341, + "Ġhipp": 29342, + "ĠPeach": 29343, + "hound": 29344, + "Harry": 29345, + "Ġcatalogue": 29346, + "ĠCommun": 29347, + "Ġnurture": 29348, + "rush": 29349, + "ĠPopulation": 29350, + "ĠNTS": 29351, + "ĠElectrical": 29352, + "rounded": 29353, + "Ġblending": 29354, + "Ġ223": 29355, + "alities": 29356, + "ilation": 29357, + "eas": 29358, + "estate": 29359, + "Ġnarrowing": 29360, + "ĠTreasure": 29361, + "192": 29362, + "Ġwhims": 29363, + "Ġrobber": 29364, + "Ġsoaked": 29365, + "nian": 29366, + "Ġcongest": 29367, + "ĠYosemite": 29368, + "notes": 29369, + "icer": 29370, + "ĠGuardians": 29371, + "ĠFrozen": 29372, + "Ġ187": 29373, + "Ġhandcuffs": 29374, + "Someone": 29375, + "Ġenshr": 29376, + "gency": 29377, + "ĠCube": 29378, + "Ġprinters": 29379, + "Ġundercut": 29380, + "ĠSolution": 29381, + "rosis": 29382, + "ĠHumanity": 29383, + "Ġsucks": 29384, + "ĠSick": 29385, + "Tax": 29386, + "Ġtablespoon": 29387, + "ĠTrin": 29388, + "ĠArchive": 29389, + "Mom": 29390, + "ĠSAY": 29391, + "Ġdrifting": 29392, + "ĠFarage": 29393, + "Ġforging": 29394, + "WM": 29395, + "ĠEleanor": 29396, + "USH": 29397, + "Ġemph": 29398, + "Ġcareless": 29399, + "Ġspew": 29400, + "Ġinsensitive": 29401, + "Ġawhile": 29402, + "Ġcit": 29403, + "opened": 29404, + "ĠFem": 29405, + "Ġvapor": 29406, + "Ġdownt": 29407, + "ylene": 29408, + "Ġclut": 29409, + "Ġculp": 29410, + "1990": 29411, + "Ġdisgruntled": 29412, + "Students": 29413, + "uttering": 29414, + "gyn": 29415, + "vre": 29416, + "Ġrapes": 29417, + "division": 29418, + "ĠCalendar": 29419, + "tal": 29420, + "icts": 29421, + "caliber": 29422, + "ĠFighters": 29423, + "ĠUnc": 29424, + "163": 29425, + "ĠRogue": 29426, + "Ġregistrations": 29427, + "Ġundermines": 29428, + "ĠPunch": 29429, + "Ġdramas": 29430, + "176": 29431, + "Ġslider": 29432, + "ĠFlore": 29433, + "ر": 29434, + "Ġbru": 29435, + "inelli": 29436, + "Ġdisparities": 29437, + "ا": 29438, + "Ġreferrals": 29439, + "ĠCharges": 29440, + "Ġbreeds": 29441, + "ĠMEP": 29442, + "288": 29443, + "Ġmouths": 29444, + "Ġsideways": 29445, + "Ġbelievers": 29446, + "ppard": 29447, + "Ġhotter": 29448, + "Ġunderestimated": 29449, + "Ġjelly": 29450, + "525": 29451, + "ĠCMS": 29452, + "ĠWeiner": 29453, + "Ġguarding": 29454, + "Ġampl": 29455, + "ĠKidd": 29456, + "UF": 29457, + "orient": 29458, + "max": 29459, + "Ash": 29460, + "Ġwander": 29461, + "Ġ..........": 29462, + "ĠDempsey": 29463, + "ĠToken": 29464, + "chat": 29465, + "Justin": 29466, + "equipped": 29467, + "ĠBI": 29468, + "Ġsins": 29469, + "Ġnond": 29470, + "ursion": 29471, + "Ġcoc": 29472, + "Ġmailing": 29473, + "ĠArchitect": 29474, + "Ġhaunting": 29475, + "Ġpont": 29476, + "Ġascertain": 29477, + "Ġwig": 29478, + "Ġskysc": 29479, + "Ġarg": 29480, + "ĠItalians": 29481, + "/?": 29482, + "Ġ----------------------------------------------------------------": 29483, + "ĠPrecision": 29484, + "EPA": 29485, + "Ġhotly": 29486, + "Ġcircumvent": 29487, + "ĠEcc": 29488, + "Ġmerch": 29489, + "akov": 29490, + "Ġunab": 29491, + "heres": 29492, + "Ġsubcommittee": 29493, + "ĠDiscuss": 29494, + "ĠChallenger": 29495, + "crafted": 29496, + "Ġcanine": 29497, + "osphere": 29498, + "Ġspider": 29499, + "Ġteachings": 29500, + "atos": 29501, + "Ġuniversally": 29502, + "Ġturbine": 29503, + "ĠLO": 29504, + "ĠMAG": 29505, + "Ġpassers": 29506, + "Ġroundup": 29507, + "Ġdenounce": 29508, + "ĠSpiegel": 29509, + "until": 29510, + "Ġshaved": 29511, + "Ġdisdain": 29512, + "Nazi": 29513, + "Ġnewfound": 29514, + "Ġspontaneous": 29515, + "Ġmash": 29516, + "ĠDispatch": 29517, + "Ġsunrise": 29518, + "ogged": 29519, + "Ġfuss": 29520, + "Ġeas": 29521, + "acci": 29522, + "ĠTarg": 29523, + "Ġhash": 29524, + "lict": 29525, + "Ġmisc": 29526, + "ĠSched": 29527, + "guy": 29528, + "linger": 29529, + "warm": 29530, + "ipel": 29531, + "ĠGork": 29532, + "Ġdispatcher": 29533, + "Ġ315": 29534, + "Ġfinely": 29535, + "Ġreliably": 29536, + "Ġrupt": 29537, + "Ġnegligent": 29538, + "Ġendorsements": 29539, + "ĠOrient": 29540, + "Ġelectro": 29541, + "haired": 29542, + "Ġphysique": 29543, + "wine": 29544, + "Ġadolescents": 29545, + "Ġ184": 29546, + "alth": 29547, + "Ġvalidated": 29548, + "izzard": 29549, + "ĠPeck": 29550, + "Ġemblem": 29551, + "status": 29552, + "ĠJungle": 29553, + "orius": 29554, + "Ġeccentric": 29555, + "Ġfolding": 29556, + "poor": 29557, + "ĠTHC": 29558, + "appers": 29559, + "Ġscripted": 29560, + "239": 29561, + "ĠPreferred": 29562, + "digital": 29563, + "Ġsharper": 29564, + "Ġportrays": 29565, + "rative": 29566, + "238": 29567, + "Ġ183": 29568, + "Ġuneasy": 29569, + "ĠRI": 29570, + "Ġvil": 29571, + "171": 29572, + "Ġspoil": 29573, + "ĠPricing": 29574, + "ĠHardware": 29575, + "Ġ188": 29576, + "Ġhorrendous": 29577, + "Ġostensibly": 29578, + "nah": 29579, + "Ġgadget": 29580, + "ADS": 29581, + "coat": 29582, + "Ġexhausting": 29583, + "Ġdraining": 29584, + "arate": 29585, + "ĠBulgarian": 29586, + "emo": 29587, + "Ġhier": 29588, + "Ġguitars": 29589, + "ieties": 29590, + "assed": 29591, + "ĠYaz": 29592, + "Ġaggress": 29593, + "ĠBG": 29594, + "vik": 29595, + "Ġneatly": 29596, + "Ġpixel": 29597, + "Ġintimacy": 29598, + "ĠRug": 29599, + "Ġ512": 29600, + "Ġnarrated": 29601, + "Ġmast": 29602, + "ĠNos": 29603, + "ĠHung": 29604, + "reciation": 29605, + "ĠChandra": 29606, + "Ġbios": 29607, + "ĠEnded": 29608, + "lique": 29609, + "ĠCambod": 29610, + "Ġworrisome": 29611, + "ĠEQ": 29612, + "Ġnovelist": 29613, + "ĠDynamic": 29614, + "ĠMIC": 29615, + "Ġdisposed": 29616, + "Ġbrackets": 29617, + "Ġhaircut": 29618, + "ĠLana": 29619, + "Ġlull": 29620, + "Ġbillboard": 29621, + "ĠReverend": 29622, + "ĠNAV": 29623, + "borgh": 29624, + "Ġadrenaline": 29625, + "Ġseeming": 29626, + "ĠPCB": 29627, + "ĠBridgewater": 29628, + "Ġsquirrel": 29629, + "262": 29630, + "write": 29631, + "Ġstabilization": 29632, + "wild": 29633, + "Ġsecession": 29634, + "Ġpacket": 29635, + "AMES": 29636, + "licted": 29637, + "Ġmalnutrition": 29638, + "claimed": 29639, + "Ġcharred": 29640, + "Ġtragically": 29641, + "Published": 29642, + "Ġrepealed": 29643, + "ĠSawyer": 29644, + "ĠMormon": 29645, + "resolution": 29646, + "ĠSaud": 29647, + "Henry": 29648, + "Ġdiscontin": 29649, + "Ġsnag": 29650, + "danger": 29651, + "Ġmixes": 29652, + "Ġupbringing": 29653, + "Ġlimb": 29654, + "ĠFantastic": 29655, + "Sim": 29656, + "ĠAugustine": 29657, + "ĠGreeks": 29658, + "cod": 29659, + "ĠHistorically": 29660, + "mire": 29661, + "register": 29662, + "ĠKund": 29663, + "Ġdebilitating": 29664, + "Chat": 29665, + "ĠTau": 29666, + "ï": 29667, + "lower": 29668, + "pie": 29669, + "Ġ430": 29670, + "Ġnascent": 29671, + "Ġ375": 29672, + "Ġbum": 29673, + "WI": 29674, + "Netflix": 29675, + "whether": 29676, + "Ġdearly": 29677, + "eff": 29678, + "PRES": 29679, + "Ġlandmarks": 29680, + "Ġculminating": 29681, + "Ġmigrate": 29682, + "balanced": 29683, + "Ġregulars": 29684, + "Ġmodification": 29685, + "Ġdips": 29686, + "ĠRedmond": 29687, + "ationally": 29688, + "atsu": 29689, + "Ġphilosophical": 29690, + "Ġtyping": 29691, + "Ġunreal": 29692, + "Ġboiled": 29693, + "Ġblight": 29694, + "Ġdru": 29695, + "ĠGaddafi": 29696, + "Ġnour": 29697, + "Ġsequential": 29698, + "Ġaugment": 29699, + "ĠEuras": 29700, + "ĠWiley": 29701, + "endar": 29702, + "Ġacronym": 29703, + "esteem": 29704, + "ĠMajesty": 29705, + "Ġgrips": 29706, + "Ġobsolete": 29707, + "nos": 29708, + "Made": 29709, + "ogie": 29710, + "ĠLiver": 29711, + "ĠDonetsk": 29712, + "Ġdynam": 29713, + "tel": 29714, + "bring": 29715, + "Ġknit": 29716, + "Ġfirepower": 29717, + "Ġprepaid": 29718, + "ĠRaphael": 29719, + "Ġsensing": 29720, + "720": 29721, + "WN": 29722, + "Nor": 29723, + "puted": 29724, + "Ġbureaucrats": 29725, + "ĠAdjust": 29726, + "Ġintensely": 29727, + "Ġsunscreen": 29728, + "Ho": 29729, + "ĠYelp": 29730, + "ĠPU": 29731, + "ĠSerge": 29732, + "ĠCyp": 29733, + "ELF": 29734, + "ĠGuns": 29735, + "Ġteamwork": 29736, + "ĠBib": 29737, + "ĠMaintenance": 29738, + "perate": 29739, + "Ġwiping": 29740, + "Ġcharcoal": 29741, + "ordan": 29742, + "International": 29743, + "Ġbehaving": 29744, + "Ġsoftened": 29745, + "ĠIncreased": 29746, + "Ġunfl": 29747, + "470": 29748, + "Ġinformative": 29749, + "Ġnovelty": 29750, + "Ġavoidance": 29751, + "Ġteasing": 29752, + "matic": 29753, + "Ġmaid": 29754, + "ĠPell": 29755, + "Ġcounterterrorism": 29756, + "ĠGabe": 29757, + "ications": 29758, + "ĠConnection": 29759, + "ĠInquiry": 29760, + "isin": 29761, + "orama": 29762, + "Ġcorpse": 29763, + "Ġpractitioner": 29764, + "itto": 29765, + "UA": 29766, + "Ġforestry": 29767, + "Ġlic": 29768, + "Ġrevolves": 29769, + "Ġcalculating": 29770, + "Ġpuppet": 29771, + "ulously": 29772, + "ĠPebble": 29773, + "Dep": 29774, + "Ġupholding": 29775, + "Ġcarving": 29776, + "Ġwartime": 29777, + "Ġenvy": 29778, + "Ġencro": 29779, + "ĠPunk": 29780, + "ĠAdminist": 29781, + "ucha": 29782, + "Ġbattleground": 29783, + "Ġlol": 29784, + "uable": 29785, + "Ġunheard": 29786, + "ĠSpur": 29787, + "phony": 29788, + "Ġcarc": 29789, + "ĠSut": 29790, + "Ġpollutants": 29791, + "Cr": 29792, + "Ġvigorous": 29793, + "355": 29794, + "ĠMarriage": 29795, + "Ġstaffed": 29796, + "fecture": 29797, + "ĠArabs": 29798, + "supported": 29799, + "Ġmanpower": 29800, + "ĠSatellite": 29801, + "None": 29802, + "Ġqueues": 29803, + "Ġinsightful": 29804, + "Ġinterchange": 29805, + "Rel": 29806, + "Ġsolemn": 29807, + "Ġsmuggled": 29808, + "upt": 29809, + "Ġ171": 29810, + "Ġparallels": 29811, + "intelligence": 29812, + "punk": 29813, + "Ġrecycle": 29814, + "Ġdecorative": 29815, + "Ġshar": 29816, + "arrell": 29817, + "iances": 29818, + "ĠBolivia": 29819, + "Ġstrengthens": 29820, + "430": 29821, + "Ġhardships": 29822, + "Ġsignalling": 29823, + "Ġunthinkable": 29824, + "READ": 29825, + "Ġtad": 29826, + "picked": 29827, + "Ġarmor": 29828, + "Ġcores": 29829, + "ĠMatrix": 29830, + "Ġdj": 29831, + "Ġevolutionary": 29832, + "ĠBermuda": 29833, + "OE": 29834, + "organized": 29835, + "Ġrelentlessly": 29836, + "sol": 29837, + "ĠMamm": 29838, + "Ġpounding": 29839, + "Weather": 29840, + "Ġrab": 29841, + "Ġsweets": 29842, + "funding": 29843, + "ĠHUD": 29844, + "ĠSoldier": 29845, + "reed": 29846, + "released": 29847, + "Ġcontainment": 29848, + "alid": 29849, + "ĠNikon": 29850, + "Ġcervical": 29851, + "Ġign": 29852, + "Ġalias": 29853, + "Ġoptimized": 29854, + "Ġasserting": 29855, + "ĠAFTER": 29856, + "Ġflatt": 29857, + "Ġdinosaur": 29858, + "ĠRefugees": 29859, + "ĠAnch": 29860, + "Ġadjustable": 29861, + "Ġroaring": 29862, + "Ġpilgrimage": 29863, + "Ġcowboy": 29864, + "Ġentails": 29865, + "ractions": 29866, + "EY": 29867, + "undy": 29868, + "ĠKuh": 29869, + "inges": 29870, + "ĠTerra": 29871, + "ĠEscape": 29872, + "Ġrundown": 29873, + "Ġstriped": 29874, + "KN": 29875, + "ocations": 29876, + "IDENT": 29877, + "IGH": 29878, + "Ġavoids": 29879, + "Moh": 29880, + "ĠLS": 29881, + "lbs": 29882, + "ĠAttempt": 29883, + "Ġtriangle": 29884, + "Ġclimax": 29885, + "Ġhp": 29886, + "Ġallot": 29887, + "learning": 29888, + "ĠJFK": 29889, + "Justice": 29890, + "OUT": 29891, + "ĠHER": 29892, + "ĠLect": 29893, + "Ġtrench": 29894, + "edar": 29895, + "Ġreservoirs": 29896, + "uid": 29897, + "rf": 29898, + "162": 29899, + "Ġinterfered": 29900, + "Ġemit": 29901, + "these": 29902, + "444": 29903, + "ĠLeather": 29904, + "essing": 29905, + "ĠEighth": 29906, + "uckle": 29907, + "Breaking": 29908, + "Ġunresolved": 29909, + "Ġgoose": 29910, + "252": 29911, + "platform": 29912, + "atus": 29913, + "Ġcomplexion": 29914, + "ĠBUS": 29915, + "Ġstruct": 29916, + "middle": 29917, + "Sat": 29918, + "ĠWHERE": 29919, + "LB": 29920, + "redible": 29921, + "vered": 29922, + "Louis": 29923, + "ĠBaz": 29924, + "Eye": 29925, + "safety": 29926, + "Ġhypothetical": 29927, + "Ġbowel": 29928, + "Ġuntouched": 29929, + "312": 29930, + "ĠPric": 29931, + "Ġastounding": 29932, + "meet": 29933, + "Aaron": 29934, + "ĠWoo": 29935, + "236": 29936, + "ĠShape": 29937, + "Ġdrifted": 29938, + "Ġtile": 29939, + "ĠGrim": 29940, + "Ġundeniable": 29941, + "Ġ..": 29942, + "Ġradius": 29943, + "Ġovarian": 29944, + "ĠSeriously": 29945, + "verning": 29946, + "Ġassertions": 29947, + "oxic": 29948, + "231": 29949, + "ĠViz": 29950, + "Jackson": 29951, + "ĠSno": 29952, + "Ġboycot": 29953, + "okingly": 29954, + "ousse": 29955, + "proclaimed": 29956, + "Ġblazing": 29957, + "Ġinefficient": 29958, + "Ġfig": 29959, + "Ġbooze": 29960, + "259": 29961, + "agus": 29962, + "statement": 29963, + "Ġlocom": 29964, + "Ġtacos": 29965, + "Ġmemos": 29966, + "gender": 29967, + "ĠOrt": 29968, + "263": 29969, + "Ġintervening": 29970, + "Soc": 29971, + "University": 29972, + "ĠPis": 29973, + "ĠReturns": 29974, + "ĠPAN": 29975, + "Ġultrasound": 29976, + "Ġcoherent": 29977, + "tracking": 29978, + "rieved": 29979, + "383": 29980, + "Ġqualitative": 29981, + "uld": 29982, + "ĠGiovanni": 29983, + "Ġstorylines": 29984, + "Ġdarkest": 29985, + "Ġvelvet": 29986, + "RIP": 29987, + "Ġcompatibility": 29988, + "Ġtroll": 29989, + "CN": 29990, + "Found": 29991, + "ĠOu": 29992, + "Ġtease": 29993, + "Ġvested": 29994, + "Ġprovocation": 29995, + "Ġimprovised": 29996, + "Ġactivation": 29997, + "unte": 29998, + "ĠMonteneg": 29999, + "ĠJOHN": 30000, + "ĠReact": 30001, + "Ġpolluted": 30002, + "217": 30003, + "Ġmushroom": 30004, + "Ġdisconnected": 30005, + "ĠVoices": 30006, + "asu": 30007, + "Ġsensory": 30008, + "REE": 30009, + "Ġmonarchy": 30010, + "Ġ173": 30011, + "doing": 30012, + "involved": 30013, + "ĠJonah": 30014, + "Ġtoxins": 30015, + "Ġtv": 30016, + "Ġacademia": 30017, + "IQ": 30018, + "Mor": 30019, + "ĠStraight": 30020, + "ĠRN": 30021, + "ĠâĹı": 30022, + "Ġpear": 30023, + "187": 30024, + "Ġendeavors": 30025, + "ĠTurbo": 30026, + "Ġducks": 30027, + "ĠRamsay": 30028, + "Ġoutpatient": 30029, + "Ġcomprehend": 30030, + "UNE": 30031, + "Ġbriefings": 30032, + "total": 30033, + "Ġmigr": 30034, + "always": 30035, + "Ġmoot": 30036, + "ĠRider": 30037, + "Ġbiblical": 30038, + "Form": 30039, + "Ġcurry": 30040, + "Ġexquisite": 30041, + "385": 30042, + "244": 30043, + "Ġattendants": 30044, + "Ġcabinets": 30045, + "nton": 30046, + "Baby": 30047, + "Honestly": 30048, + "ĠFIRE": 30049, + "211": 30050, + "itech": 30051, + "ĠProsper": 30052, + "Ġchops": 30053, + "odic": 30054, + "Rod": 30055, + "job": 30056, + "orset": 30057, + "ĠAry": 30058, + "obic": 30059, + "ĠNil": 30060, + "isable": 30061, + "Ġorche": 30062, + "Ġtrivial": 30063, + "ĠZy": 30064, + "ĠXP": 30065, + "Ġendorsing": 30066, + "ĠLIM": 30067, + "adish": 30068, + "237": 30069, + "ĠLaws": 30070, + "heid": 30071, + "ĠSignature": 30072, + "ĠVern": 30073, + "ĠBland": 30074, + "ansk": 30075, + "Ġrepository": 30076, + "ĠPetra": 30077, + "Enter": 30078, + "Ġtruths": 30079, + "Ġbordering": 30080, + "Ġpenn": 30081, + "Ġsimplified": 30082, + "zn": 30083, + "ĠCree": 30084, + "Ġ181": 30085, + "Hi": 30086, + "ĠGreenberg": 30087, + "Ġprematurely": 30088, + "ĠSass": 30089, + "Ġwrecked": 30090, + "Ġheinous": 30091, + "415": 30092, + "Turn": 30093, + "zl": 30094, + "amental": 30095, + "ĠBraz": 30096, + "fing": 30097, + "ĠAngle": 30098, + "ĠPhantom": 30099, + "agra": 30100, + "ĠShack": 30101, + "Ġhomegrown": 30102, + "Ġalright": 30103, + "AME": 30104, + "ĠKN": 30105, + "Ġclicks": 30106, + "Ġmanned": 30107, + "ĠScope": 30108, + "Ġextras": 30109, + "Ġclinicians": 30110, + "321": 30111, + "African": 30112, + "Ġjuices": 30113, + "Ġrefere": 30114, + "****": 30115, + "ambling": 30116, + "since": 30117, + "Ġvoic": 30118, + "QB": 30119, + "ĠAtmospheric": 30120, + "Mat": 30121, + "Ġperpetrated": 30122, + "ĠSteps": 30123, + "Fit": 30124, + "Ġsilenced": 30125, + "Ġbonded": 30126, + "Ġquantify": 30127, + "Houston": 30128, + "ocracy": 30129, + "Ġfreeing": 30130, + "pipe": 30131, + "corn": 30132, + "rones": 30133, + "ooked": 30134, + "ĠSuz": 30135, + "Ġunaccount": 30136, + "196": 30137, + "Ġlogos": 30138, + "ĠFurious": 30139, + "ĠSpart": 30140, + "urst": 30141, + "itri": 30142, + "ĠZub": 30143, + "ĠActual": 30144, + "Ġslee": 30145, + "Ġgag": 30146, + "Ġmetabolism": 30147, + "ĠDesigned": 30148, + "Ġpedigree": 30149, + "Ġcoolest": 30150, + "âĿ": 30151, + "iuses": 30152, + "ĠYellowstone": 30153, + "Ġinformant": 30154, + "Ġushered": 30155, + "ĠGarg": 30156, + "thel": 30157, + "Hop": 30158, + "Ġrepetitive": 30159, + "flag": 30160, + "Ġunmarked": 30161, + "ĠBrave": 30162, + "Ġincur": 30163, + "reading": 30164, + "ppel": 30165, + "lah": 30166, + "ateurs": 30167, + "286": 30168, + "ĠAtomic": 30169, + "Ġappliance": 30170, + ")'": 30171, + "traditional": 30172, + "Ġdads": 30173, + "Ġregimen": 30174, + "Ġinfrared": 30175, + "Ġdotted": 30176, + "Ġtails": 30177, + "Ġhorrors": 30178, + "uments": 30179, + "Ġdub": 30180, + "lighting": 30181, + "Ġunearthed": 30182, + "assisted": 30183, + "ĠSpiel": 30184, + "trial": 30185, + "Ġpersever": 30186, + "MAX": 30187, + "Ġicing": 30188, + "Energy": 30189, + "Ġ1943": 30190, + "move": 30191, + "Error": 30192, + "Ġliter": 30193, + "ĠCly": 30194, + "Ari": 30195, + "Ġgranite": 30196, + "Ġcropped": 30197, + "ĠRD": 30198, + "ĠREM": 30199, + "TX": 30200, + "Ġdispleasure": 30201, + "ĠComfort": 30202, + "Ġunsettling": 30203, + "Ġscratching": 30204, + "866": 30205, + "eton": 30206, + "560": 30207, + "Ġcommonplace": 30208, + "Ġreproduced": 30209, + "ggie": 30210, + "Ġschooling": 30211, + "Ġreprim": 30212, + "Ġdarling": 30213, + "huge": 30214, + "ĠDante": 30215, + "cp": 30216, + "heastern": 30217, + "Ġeduc": 30218, + "Digital": 30219, + "Ġwrath": 30220, + "Ġwatering": 30221, + "ĠTail": 30222, + "Ġdegradation": 30223, + "530": 30224, + "usive": 30225, + "ĠXu": 30226, + "ĠAH": 30227, + "Ġclassy": 30228, + "ĠSET": 30229, + "Ġcriminally": 30230, + "dependent": 30231, + "ĠAlps": 30232, + "Ġnotwithstanding": 30233, + "Ġfamiliarity": 30234, + "ĠAPP": 30235, + "aurus": 30236, + "gments": 30237, + "Mid": 30238, + "Ġepilepsy": 30239, + "Ġresemblance": 30240, + "brush": 30241, + "Ġ333": 30242, + "Ġliberated": 30243, + "ĠBeng": 30244, + "ĠLans": 30245, + "Ġtraff": 30246, + "ihu": 30247, + "establish": 30248, + "Ġcort": 30249, + "Rick": 30250, + "Ġplugged": 30251, + "onement": 30252, + "ĠAccounting": 30253, + "Ġreconstruct": 30254, + "Pop": 30255, + "Ġincapable": 30256, + "aho": 30257, + "ĠDexter": 30258, + "Ġpitted": 30259, + "Ġbathing": 30260, + "Ġdun": 30261, + "Ġexplor": 30262, + "ĠMidnight": 30263, + "Ġactiv": 30264, + "iann": 30265, + "likely": 30266, + "acons": 30267, + "owicz": 30268, + "Ġnegativity": 30269, + "Ġfreel": 30270, + "ewitness": 30271, + "Ġinj": 30272, + "Stephen": 30273, + "Ġshredded": 30274, + "Ġprepar": 30275, + "Script": 30276, + "Ġcorrectional": 30277, + "Ġcommits": 30278, + "hai": 30279, + "activity": 30280, + "Imp": 30281, + "Ġstumble": 30282, + "Ġcache": 30283, + "ĠPromise": 30284, + "Ġprecinct": 30285, + "Ġmulticultural": 30286, + "Ġsubstitutes": 30287, + "Ġshortened": 30288, + "ovable": 30289, + "Ġfasting": 30290, + "Ġinfused": 30291, + "Ġbulldo": 30292, + "alm": 30293, + "Ġadjoining": 30294, + "Ġmultiplayer": 30295, + "ĠAlien": 30296, + "Ġpund": 30297, + "ethyl": 30298, + "Ġbliss": 30299, + "ĠDecision": 30300, + "Ġbab": 30301, + "Ġangrily": 30302, + "another": 30303, + "oled": 30304, + "ainted": 30305, + "ĠPriest": 30306, + "Ġdraped": 30307, + "ĠPersonally": 30308, + "Ġstomp": 30309, + "ĠWolfgang": 30310, + "Ġoste": 30311, + "itches": 30312, + "Ġhoops": 30313, + "ĠJO": 30314, + "Ġsche": 30315, + "ĠZan": 30316, + "Ġcleans": 30317, + "Ġclimbs": 30318, + "Ġelectronically": 30319, + "243": 30320, + "ocy": 30321, + "gall": 30322, + "ĠREAL": 30323, + "Ġmurky": 30324, + "Ġmodernization": 30325, + "tub": 30326, + "Really": 30327, + "Ġlax": 30328, + "Ġdoubted": 30329, + "yden": 30330, + "ĠPrevent": 30331, + "UTERS": 30332, + "Ġoverride": 30333, + "ĠSAF": 30334, + "Ġcoun": 30335, + "Ġexcerpts": 30336, + "Ġmotivations": 30337, + "Ġdecency": 30338, + "Ġastronomers": 30339, + "orical": 30340, + "Ġaltering": 30341, + "Ġ232": 30342, + "described": 30343, + "omic": 30344, + "Ġexh": 30345, + "Ġknocks": 30346, + "ĠRiot": 30347, + "ĠPurs": 30348, + "equal": 30349, + "pleting": 30350, + "llan": 30351, + "ĠSOL": 30352, + "iator": 30353, + "ILE": 30354, + "ĠWM": 30355, + "Ġdefences": 30356, + "Ġforearm": 30357, + "Toronto": 30358, + "526": 30359, + "Ġacne": 30360, + "Ġthirteen": 30361, + "itiz": 30362, + "akable": 30363, + "charges": 30364, + "Ġinaction": 30365, + "Ġbred": 30366, + "Ġdeficiency": 30367, + "Ġintrigue": 30368, + "opoly": 30369, + "ĠCamer": 30370, + "ĠMelt": 30371, + "Ġunlawfully": 30372, + "Ġpenetrate": 30373, + "ĠUsed": 30374, + "ĠDirty": 30375, + "Ġexcerpt": 30376, + "ĠYen": 30377, + "ĠCARD": 30378, + "Ġcher": 30379, + "ĠChallenges": 30380, + "ieves": 30381, + "Ġambush": 30382, + "Data": 30383, + "eeks": 30384, + "Ġgiveaway": 30385, + "Ġpawn": 30386, + "Ġtransf": 30387, + "renched": 30388, + "Ġmoderately": 30389, + "Ġnumbered": 30390, + "ĠIntegrity": 30391, + "ĠHOU": 30392, + "ĠHDMI": 30393, + "Royal": 30394, + "LT": 30395, + "ĠDirk": 30396, + "izon": 30397, + "Ġ227": 30398, + "Ġdisagrees": 30399, + "ĠNinth": 30400, + "Ġincrement": 30401, + "ĠGlory": 30402, + "suff": 30403, + "Ġartery": 30404, + "ĠEmployee": 30405, + "bum": 30406, + "ĠEditorial": 30407, + "Kh": 30408, + "ĠPremiere": 30409, + "ĠWeld": 30410, + "ĠIncluded": 30411, + "Ġmathematical": 30412, + "Ġexponentially": 30413, + "Ġhandwritten": 30414, + "ĠMAS": 30415, + "Ġindiscrim": 30416, + "Ġnutrient": 30417, + "ĠSelection": 30418, + "Ġ219": 30419, + "hyd": 30420, + "Ġdeton": 30421, + "æ": 30422, + "dark": 30423, + "ĠFidel": 30424, + "Ġmonkeys": 30425, + "Ġnutritious": 30426, + "Ġheadlights": 30427, + "oller": 30428, + "piring": 30429, + "ĠDefenders": 30430, + "Ġdrown": 30431, + "elong": 30432, + "Ġfloats": 30433, + "graduate": 30434, + "Ġprosper": 30435, + "ĠNamed": 30436, + "ĠEating": 30437, + "ECK": 30438, + "establishment": 30439, + "XM": 30440, + "Ġsoaking": 30441, + "278": 30442, + "Ġlistener": 30443, + "Ġsimultaneous": 30444, + "olutions": 30445, + "payer": 30446, + "Ġcustomize": 30447, + "ĠROCK": 30448, + "Ġaltar": 30449, + "ĠExercise": 30450, + "anky": 30451, + "ĠProfession": 30452, + "sever": 30453, + "ĠMerchant": 30454, + "RF": 30455, + "ĠCombat": 30456, + "Ġlegality": 30457, + "fledged": 30458, + "Ġdiapers": 30459, + "lves": 30460, + "Ġlur": 30461, + "Ġignores": 30462, + "ĠProtocol": 30463, + "Ġrepresentations": 30464, + "ĠBlumenthal": 30465, + "ĠLime": 30466, + "romptu": 30467, + "Ġbesieged": 30468, + "dl": 30469, + "Ġsighting": 30470, + "ĠParm": 30471, + "ĠServer": 30472, + "ĠBenghazi": 30473, + "estival": 30474, + "Ġplaylist": 30475, + "ĠUng": 30476, + "ĠQuantum": 30477, + "Ġcompromises": 30478, + "ĠSurvivor": 30479, + "ĠMobility": 30480, + "Ġbounty": 30481, + "ophers": 30482, + "ISA": 30483, + "need": 30484, + "uese": 30485, + "Ġorn": 30486, + "218": 30487, + "Ġ530": 30488, + "Ġbuddies": 30489, + "Ġagendas": 30490, + "ĠFeldman": 30491, + "ĠÃĸ": 30492, + "ĠBMC": 30493, + "ĠServe": 30494, + "Ent": 30495, + "ĠKH": 30496, + "ĠINT": 30497, + "Ġlittered": 30498, + "Ġvisitation": 30499, + "mist": 30500, + "Ġdupl": 30501, + "Ġrouted": 30502, + "ĠAmount": 30503, + "Dev": 30504, + "ĠConv": 30505, + "Ġslams": 30506, + "ĠVeterinary": 30507, + "bold": 30508, + "Ġ186": 30509, + "ĠDOT": 30510, + "builder": 30511, + "Ġdecay": 30512, + "ĠHemp": 30513, + "pelled": 30514, + "Ġmankind": 30515, + "Tonight": 30516, + "Ġeffortlessly": 30517, + "ĠBUT": 30518, + "Ġhostilities": 30519, + "formerly": 30520, + "alon": 30521, + "ĠCrash": 30522, + "humane": 30523, + "Ġmayhem": 30524, + "ĠBudd": 30525, + "Ġdisinformation": 30526, + "Ġ226": 30527, + "Ġprototypes": 30528, + "__": 30529, + "IVERS": 30530, + "izzy": 30531, + "ĠMight": 30532, + "ĠPip": 30533, + "pour": 30534, + "INO": 30535, + "ĠLL": 30536, + "Ġwiret": 30537, + "Ġresorted": 30538, + "ĠTanaka": 30539, + "ĠDOES": 30540, + "Earlier": 30541, + "HO": 30542, + "Ġmoniker": 30543, + "ĠFang": 30544, + "ĠHua": 30545, + "bered": 30546, + "adding": 30547, + "194": 30548, + "STR": 30549, + ".\")": 30550, + "cop": 30551, + "ĠFlags": 30552, + "ĠColleges": 30553, + "ĠUz": 30554, + "Ġsparks": 30555, + "Ġparadox": 30556, + "Marie": 30557, + "Strong": 30558, + "Ġstrawberry": 30559, + "Ġnurturing": 30560, + "Ġfax": 30561, + "Tor": 30562, + "killer": 30563, + "burse": 30564, + "Ġattachments": 30565, + "Ġpup": 30566, + "Ġexhaustion": 30567, + "Ġwhisky": 30568, + "isu": 30569, + "ologically": 30570, + "iership": 30571, + "Ġlamps": 30572, + "Ġshuff": 30573, + "Ġcentralized": 30574, + "ĠNeedless": 30575, + "Ġgrenade": 30576, + "Ġrouter": 30577, + "Ġoptics": 30578, + "ivering": 30579, + "Ġpioneers": 30580, + "ĠHug": 30581, + "Ġhandguns": 30582, + "010": 30583, + "Ġbailed": 30584, + "uana": 30585, + "197": 30586, + "Ġdistorted": 30587, + "ĠEssentially": 30588, + "ĠSilent": 30589, + "Ġcomparative": 30590, + "Music": 30591, + "ĠMUS": 30592, + "Bur": 30593, + "ĠComet": 30594, + "ĠWinchester": 30595, + "IGN": 30596, + "Mod": 30597, + "ĠCandidate": 30598, + "Ġdysfunctional": 30599, + "ĠCeleb": 30600, + "Ġhitch": 30601, + "api": 30602, + "Ġidiot": 30603, + "Ġunsupported": 30604, + "gat": 30605, + "inker": 30606, + "Ġredevelop": 30607, + "Ġdwind": 30608, + "Ġforgetting": 30609, + "ĠRost": 30610, + "Ġremembrance": 30611, + "Na": 30612, + "mopolitan": 30613, + "Ġberries": 30614, + "Ġmarital": 30615, + "Vol": 30616, + "ĠClosing": 30617, + "ĠHindus": 30618, + "itism": 30619, + "Ġrover": 30620, + "Ġmysteries": 30621, + "ĠNig": 30622, + "ucing": 30623, + "Ġfabrication": 30624, + "Ġgarments": 30625, + "Ġwield": 30626, + "ĠCompton": 30627, + "357": 30628, + "Ġoxide": 30629, + "chron": 30630, + "ĠThought": 30631, + "Ġcomed": 30632, + "ĠEpstein": 30633, + "ĠBART": 30634, + "orative": 30635, + "ĠKahn": 30636, + "adan": 30637, + "APH": 30638, + "cum": 30639, + "Ġloophole": 30640, + "ĠGoPro": 30641, + "osit": 30642, + "Ġspecification": 30643, + "ĠAPR": 30644, + "Ġdrains": 30645, + "Ġconserve": 30646, + "ĠMorse": 30647, + "Ġcalorie": 30648, + "ĠCheney": 30649, + "station": 30650, + "Ġevangel": 30651, + "Ġspraying": 30652, + "lections": 30653, + "Ġenclosure": 30654, + "Ġcommanded": 30655, + "ĠOrganizations": 30656, + "Ġimb": 30657, + "mins": 30658, + "ĠTobias": 30659, + "Ve": 30660, + "ĠNau": 30661, + "183": 30662, + "ĠGuantanamo": 30663, + "173": 30664, + "Ġrequisite": 30665, + "Ġderivative": 30666, + "Ġpopulism": 30667, + "Ġcultivated": 30668, + "lord": 30669, + "uler": 30670, + "ĠDEA": 30671, + "inally": 30672, + "Ġdemonstr": 30673, + "trip": 30674, + "ĠFirefox": 30675, + "246": 30676, + "confirmed": 30677, + "Anne": 30678, + "Ġtamp": 30679, + "ĠHousehold": 30680, + "amous": 30681, + "Meet": 30682, + "Ġdashed": 30683, + "pire": 30684, + "Ġinex": 30685, + "Ġloosen": 30686, + "272": 30687, + "famous": 30688, + "ĠHeard": 30689, + "Ġhindsight": 30690, + "Ġdepot": 30691, + "ĠCutting": 30692, + "ĠMouse": 30693, + "Ġgeological": 30694, + "number": 30695, + "OUN": 30696, + ".,\"": 30697, + "Ġmoderation": 30698, + "ĠUNHCR": 30699, + "Ġdomains": 30700, + "eco": 30701, + "Ġcrater": 30702, + "Ġ510": 30703, + "kid": 30704, + "Ġcylinders": 30705, + "ĠClasses": 30706, + "Kn": 30707, + "Ġcarcin": 30708, + "ĠHunting": 30709, + "irit": 30710, + "ARP": 30711, + "anting": 30712, + "ĠMarino": 30713, + "ĠRESP": 30714, + "ifle": 30715, + "Ġ239": 30716, + "fman": 30717, + "Ġtheoretically": 30718, + "Ġdistraught": 30719, + "Ġstaircase": 30720, + "Ġexpel": 30721, + "Ġlord": 30722, + "Ġbehaviours": 30723, + "Ġprescribing": 30724, + "ographs": 30725, + "ĠNewly": 30726, + "Ġpatiently": 30727, + "Ġskyline": 30728, + "udos": 30729, + "Ġrepertoire": 30730, + "Ġhover": 30731, + "mint": 30732, + "Ġclears": 30733, + "Ġkale": 30734, + "ĠSco": 30735, + "ĠCoulter": 30736, + "Ġpancreat": 30737, + "pu": 30738, + "995": 30739, + "Ġincompetent": 30740, + "2007": 30741, + "Ġgripping": 30742, + "enable": 30743, + "Ġreinforcing": 30744, + "ĠFee": 30745, + "education": 30746, + "ĠKuro": 30747, + "Ġbowed": 30748, + "Ġshave": 30749, + "ĠMean": 30750, + "xi": 30751, + "Ġinciting": 30752, + "atters": 30753, + "Ġecstatic": 30754, + "hog": 30755, + "Ġclauses": 30756, + "Ġsubt": 30757, + "Ġbehaved": 30758, + "tains": 30759, + "Liverpool": 30760, + "Ġstrives": 30761, + "ĠKev": 30762, + "ĠFramework": 30763, + "defined": 30764, + "Ġrecounts": 30765, + "array": 30766, + "tips": 30767, + "Ġartificially": 30768, + "fits": 30769, + "Clearly": 30770, + "mediate": 30771, + "Ġunseen": 30772, + "Ġthugs": 30773, + "ĠLent": 30774, + "Ġ1938": 30775, + "Ġgenital": 30776, + "ĠSonic": 30777, + "ĠWarehouse": 30778, + "pler": 30779, + "Ġunm": 30780, + "Ġpackets": 30781, + "ĠMET": 30782, + "ealous": 30783, + "ographers": 30784, + "Ġlabou": 30785, + "Core": 30786, + "+,": 30787, + "parable": 30788, + "Ġstrat": 30789, + "Ġinvitations": 30790, + "Ġsouven": 30791, + "Ġbillboards": 30792, + "ĠRegulations": 30793, + "Ġdwarf": 30794, + "Ġtoler": 30795, + "Ġprose": 30796, + "Ġestates": 30797, + "Ġmetabolic": 30798, + "ĠSuff": 30799, + "ĠFirstly": 30800, + "Ġpolio": 30801, + "Ġchick": 30802, + "ĠDaughter": 30803, + "Ġsubstant": 30804, + "ĠIdentity": 30805, + "umbers": 30806, + "ĠFacts": 30807, + "Ġfrust": 30808, + "Ġdissip": 30809, + "ĠDeck": 30810, + "Hy": 30811, + "ĠBirch": 30812, + "Ġhurled": 30813, + "democracy": 30814, + "nered": 30815, + "eper": 30816, + "Ġcerebral": 30817, + "181": 30818, + "Ġhalves": 30819, + "abit": 30820, + "balance": 30821, + "ĠTibet": 30822, + "Ġhandheld": 30823, + "ĠDough": 30824, + "Ġprogrammed": 30825, + "hw": 30826, + "Ġoutlawed": 30827, + "ĠSerious": 30828, + "Ġironically": 30829, + "Ġmanipulating": 30830, + ")\"": 30831, + "juries": 30832, + "Ġfragrance": 30833, + "crete": 30834, + "ĠHHS": 30835, + "cience": 30836, + "Ġcosmic": 30837, + "Ġforeclosure": 30838, + "Ġpercentages": 30839, + "Bus": 30840, + "Ġenticing": 30841, + "extra": 30842, + "ĠShy": 30843, + "ĠÂ¥": 30844, + "Ġheadsets": 30845, + "imensional": 30846, + "Ġlux": 30847, + "Ġresidual": 30848, + "Ġmantle": 30849, + "ĠSJ": 30850, + "ĠPeaks": 30851, + "ĠFinger": 30852, + "Ġunfolds": 30853, + "anity": 30854, + "Ġresettlement": 30855, + "ĠWeak": 30856, + "ĠBeen": 30857, + "Ġ198": 30858, + "Ġangels": 30859, + "ĠFarn": 30860, + "peace": 30861, + "Ġcapac": 30862, + "Ġhue": 30863, + "Ġlust": 30864, + "traumatic": 30865, + "laun": 30866, + "Ġstrawberries": 30867, + "Ġherbal": 30868, + "Ġconversions": 30869, + "ĠHeld": 30870, + "Ġprescribe": 30871, + "Its": 30872, + "ĠDartmouth": 30873, + "Ġfashioned": 30874, + "460": 30875, + "BLE": 30876, + "international": 30877, + "Ġlumin": 30878, + "Ġplantation": 30879, + "ilde": 30880, + "490": 30881, + "Ġeuph": 30882, + "Ġdisgust": 30883, + "Ġaspire": 30884, + "medical": 30885, + "Ġsocialism": 30886, + "Ġdissolve": 30887, + "Wal": 30888, + "Ġadmittedly": 30889, + "Ġsewing": 30890, + "ĠAcer": 30891, + "Ġtul": 30892, + "Ġfacilit": 30893, + "Ġgrandma": 30894, + "ĠFeeling": 30895, + "Ġobst": 30896, + "ĠFranz": 30897, + "ĠPalin": 30898, + "ĠIncrease": 30899, + "gets": 30900, + "ĠImam": 30901, + "âĢİ": 30902, + "Ġcoincides": 30903, + "urrence": 30904, + "Ġlifes": 30905, + "Lab": 30906, + "Ham": 30907, + "angelo": 30908, + "Wild": 30909, + "Ġvetoed": 30910, + "Ġventilation": 30911, + "olid": 30912, + "Summer": 30913, + "Ġfacade": 30914, + "neys": 30915, + "ĠWOM": 30916, + "ĠBenny": 30917, + "ĠMarried": 30918, + "squ": 30919, + "ĠReflect": 30920, + "return": 30921, + "elia": 30922, + "olding": 30923, + "Ġrefine": 30924, + "ĠMadness": 30925, + "innacle": 30926, + "posts": 30927, + "287": 30928, + "fruit": 30929, + "274": 30930, + "icator": 30931, + "ĠVoy": 30932, + "Ġunsett": 30933, + "Ġfant": 30934, + "Ġtreaties": 30935, + "Ġcrystals": 30936, + "Ġhijacked": 30937, + "words": 30938, + "ĠReleased": 30939, + "Save": 30940, + "Ġcannon": 30941, + "Ġanomaly": 30942, + "Ġbeacon": 30943, + "Ġcrippled": 30944, + "Ġbundles": 30945, + "Ġuntreated": 30946, + "Ġhappiest": 30947, + "Ġgalaxies": 30948, + "Ġoccupational": 30949, + "416": 30950, + "Dar": 30951, + "Ġcrank": 30952, + "Ġappropriation": 30953, + "asking": 30954, + "mens": 30955, + "Ġdetector": 30956, + "Ġskewed": 30957, + "Ġpoke": 30958, + "254": 30959, + "Ġhypertension": 30960, + "apolog": 30961, + "Ġevaluations": 30962, + "blocks": 30963, + "Ġpow": 30964, + "GEN": 30965, + "Ġscalp": 30966, + "Ġarrogant": 30967, + "AIDS": 30968, + "ority": 30969, + "Ġredirect": 30970, + "Ġderogatory": 30971, + "Ġlateral": 30972, + "495": 30973, + "rolley": 30974, + "brew": 30975, + "Ġbabys": 30976, + "Ġmuff": 30977, + "ĠRequ": 30978, + "Ġdime": 30979, + "Ġwonderfully": 30980, + "Ġtreasures": 30981, + "ĠNES": 30982, + "Ġponds": 30983, + "Ġimpulse": 30984, + "Ġdetecting": 30985, + "Ġgrin": 30986, + "Ġbrid": 30987, + "Ġshoved": 30988, + "Ġpurge": 30989, + "irteen": 30990, + "OTHER": 30991, + "ÙĦ": 30992, + "irsch": 30993, + "ĠOcc": 30994, + "193": 30995, + "Ġfodder": 30996, + "wrote": 30997, + "meric": 30998, + "posal": 30999, + "Ġwinters": 31000, + "ĠJuice": 31001, + "hub": 31002, + "Ġcontrasting": 31003, + "Brazil": 31004, + "Ġflashy": 31005, + "uffer": 31006, + "technology": 31007, + "Children": 31008, + "Ġcatapult": 31009, + "owsky": 31010, + "ĠEclipse": 31011, + "abeth": 31012, + "ĠParticip": 31013, + "Ġlaud": 31014, + "ĠQuiet": 31015, + "Ġsimulations": 31016, + "Ġsacrificing": 31017, + "Ġpreaching": 31018, + "Ġvoicing": 31019, + "itizen": 31020, + "Ġgn": 31021, + "Ġsans": 31022, + "Ġ285": 31023, + "ĠRobot": 31024, + "Ġ1936": 31025, + "Ġsham": 31026, + "ĠKislyak": 31027, + "ĠGCC": 31028, + "tale": 31029, + "ĠShades": 31030, + "Ġsediment": 31031, + "Ġconveniently": 31032, + "Give": 31033, + "mounted": 31034, + "Ġpeel": 31035, + "Jun": 31036, + "ĠEisenhower": 31037, + "Ġdiplom": 31038, + "ĠPreservation": 31039, + "Ġaffirm": 31040, + "Ġtaboo": 31041, + "ĠGarr": 31042, + "ĠApply": 31043, + "prim": 31044, + "Ġausp": 31045, + "Ġtextbook": 31046, + "Ġforfeit": 31047, + "icides": 31048, + "Ġundis": 31049, + "DJ": 31050, + "Ġ\"...": 31051, + "ĠXperia": 31052, + "Ġfurry": 31053, + "Australian": 31054, + "Ġpreach": 31055, + "Ġparamed": 31056, + "Ġ196": 31057, + "agos": 31058, + "ĠRIP": 31059, + "Ġ408": 31060, + "ĠQuarterly": 31061, + "ĠQuentin": 31062, + "Ġdeft": 31063, + "ĠVlad": 31064, + "massive": 31065, + "apore": 31066, + "Ġquestionnaire": 31067, + "secution": 31068, + "ĠTunnel": 31069, + "ĠAssist": 31070, + "BILITY": 31071, + "everything": 31072, + "vich": 31073, + "Ġcomparatively": 31074, + "heng": 31075, + "ETH": 31076, + "ĠiPod": 31077, + "Ġinsurgent": 31078, + "Ġtestosterone": 31079, + "191": 31080, + "Ġmoons": 31081, + "Ġgripped": 31082, + "Ġstrang": 31083, + "pects": 31084, + "ĠSERVICE": 31085, + "Ġnumb": 31086, + "Ġmeasurable": 31087, + "Ġdismantled": 31088, + "Ġdepict": 31089, + "Ġretake": 31090, + "Light": 31091, + "Ġaquatic": 31092, + "useum": 31093, + "judicial": 31094, + "Ġ****": 31095, + "Ġrosters": 31096, + "certain": 31097, + "Ġhypothesis": 31098, + "2002": 31099, + "Snow": 31100, + "Ġpounded": 31101, + "ĠZel": 31102, + "ĠTrem": 31103, + "iversity": 31104, + "219": 31105, + "Jen": 31106, + "ĠAdventures": 31107, + "Ġcylinder": 31108, + "Ġbanging": 31109, + "Ġbalk": 31110, + "analy": 31111, + "ĠHust": 31112, + "ookie": 31113, + "ĠReturning": 31114, + "Ġpods": 31115, + "analysis": 31116, + "ĠTruman": 31117, + "Ġorg": 31118, + "Ġsar": 31119, + "Ġdred": 31120, + "ĠTelecommunications": 31121, + "ĠSven": 31122, + "carry": 31123, + "ĠLOVE": 31124, + "Ġparting": 31125, + "asar": 31126, + "utations": 31127, + "itic": 31128, + "Ġactu": 31129, + "Ġbananas": 31130, + "ĠNights": 31131, + "410": 31132, + "Still": 31133, + "Ġtweaked": 31134, + "went": 31135, + "Ġtoddlers": 31136, + "irted": 31137, + "Ġpaed": 31138, + "ĠWink": 31139, + "Ġviewpoint": 31140, + "ĠHelic": 31141, + "Ġhandshake": 31142, + "Ġpoaching": 31143, + "Ġrounding": 31144, + "268": 31145, + "ĠNVIDIA": 31146, + "Ġsquat": 31147, + "Ġtowed": 31148, + "Ġhandler": 31149, + "Ġconspir": 31150, + "Ġadditionally": 31151, + "CENT": 31152, + "ĠÃľ": 31153, + "article": 31154, + "ĠTough": 31155, + "NM": 31156, + "Rem": 31157, + "Ġstunts": 31158, + "ILS": 31159, + "ĠLM": 31160, + "Connect": 31161, + "ĠParagu": 31162, + "Ġcomplexities": 31163, + "Ġhugging": 31164, + "Ġabolish": 31165, + "ricting": 31166, + "ĠItems": 31167, + "Ġtemples": 31168, + "ĠSeat": 31169, + "ĠRubber": 31170, + "Ġindic": 31171, + "ĠVitamin": 31172, + "Ġcitations": 31173, + "Ġarmored": 31174, + "---------------": 31175, + "ĠNeo": 31176, + "ippy": 31177, + "Que": 31178, + "Ġrag": 31179, + "Ġlov": 31180, + "630": 31181, + "Ġadept": 31182, + "orbit": 31183, + "253": 31184, + "412": 31185, + "Ġbutterflies": 31186, + "Ġoutl": 31187, + "ĠCycle": 31188, + "Ġaesthetics": 31189, + "ĠTwitch": 31190, + "405": 31191, + "factor": 31192, + "ðŁij": 31193, + "ĠCircus": 31194, + "Posted": 31195, + "Ġintroductory": 31196, + "ĠStack": 31197, + "atoes": 31198, + "Ġfurn": 31199, + "ĠHond": 31200, + "Ġbipolar": 31201, + "ĠAging": 31202, + "inches": 31203, + "Ġincompetence": 31204, + "Ġaloud": 31205, + "Imagine": 31206, + "Ġsepar": 31207, + "Ġmanip": 31208, + "ophobic": 31209, + "inion": 31210, + "bek": 31211, + "Ġquer": 31212, + "ĠArmen": 31213, + "Ġhumorous": 31214, + "Ġmundane": 31215, + "Ġapologizing": 31216, + "Ġpioneered": 31217, + "Ġ303": 31218, + "282": 31219, + "Ġcalming": 31220, + "orious": 31221, + "760": 31222, + "Ġstitches": 31223, + "Ġthrottle": 31224, + "Ġspinach": 31225, + "urities": 31226, + "ĠCologne": 31227, + "Ġripple": 31228, + "Cs": 31229, + "Cent": 31230, + "Should": 31231, + "Ġaffinity": 31232, + "amount": 31233, + "ĠMISS": 31234, + "Ġsage": 31235, + "Ġamusing": 31236, + "Ġsnatch": 31237, + "clair": 31238, + "ĠGuess": 31239, + "bench": 31240, + "ĠMoj": 31241, + "nuclear": 31242, + "Ġfid": 31243, + "ĠVM": 31244, + "ĠGN": 31245, + "brainer": 31246, + "Ġcurled": 31247, + "Ġbushes": 31248, + "icably": 31249, + "Ġcreeping": 31250, + "Ġveil": 31251, + "ĠALS": 31252, + "ESPN": 31253, + "ulsion": 31254, + "ĠGTX": 31255, + "ĠANN": 31256, + "Ġcomplicit": 31257, + "assault": 31258, + "IOR": 31259, + "Ġpolymer": 31260, + "Ġestimating": 31261, + "277": 31262, + "alog": 31263, + "Ġglimps": 31264, + "Ġreinforces": 31265, + "Ġtextbooks": 31266, + "Ġdictated": 31267, + "ĠReyn": 31268, + "latable": 31269, + "ĠOrth": 31270, + "520": 31271, + "Ġtrickle": 31272, + "ĠWrong": 31273, + ".[": 31274, + "ĠDesigner": 31275, + "304": 31276, + "ĠInner": 31277, + "Ġrave": 31278, + "ppa": 31279, + "ĠGim": 31280, + "Ġswath": 31281, + "Ġcarts": 31282, + "atlantic": 31283, + "Ġpersists": 31284, + "ĠDeveloper": 31285, + "Ġgoodies": 31286, + "isive": 31287, + "Inf": 31288, + "ĠSaving": 31289, + "loop": 31290, + "tions": 31291, + "Ġabusers": 31292, + "Ġclot": 31293, + "Ġmesmer": 31294, + "Ġdeg": 31295, + "Ġskirts": 31296, + "257": 31297, + "Ġunreliable": 31298, + "ĠCOMM": 31299, + "Ġ194": 31300, + "Ġfledgling": 31301, + "administ": 31302, + "Israeli": 31303, + "ĠBarbie": 31304, + "ĠJeanne": 31305, + "Ġgenerously": 31306, + "ĠStruct": 31307, + "ĠZap": 31308, + "Ġvetted": 31309, + "ĠViolet": 31310, + "Ġ),": 31311, + "Ġembarrass": 31312, + "bang": 31313, + "ĠProvider": 31314, + "getting": 31315, + "alg": 31316, + "Ġunconditional": 31317, + "ĠHulk": 31318, + "ĠWad": 31319, + "utation": 31320, + "Ġpointless": 31321, + "Ġdeprivation": 31322, + "Ġstarving": 31323, + "ĠImpossible": 31324, + "ĠStir": 31325, + "Ġknack": 31326, + "anse": 31327, + "Ġsecurely": 31328, + "Ġply": 31329, + "395": 31330, + "Pack": 31331, + "liv": 31332, + "Ġridden": 31333, + "alks": 31334, + "308": 31335, + "male": 31336, + "Ġbitterly": 31337, + "Ġirrational": 31338, + "Members": 31339, + "ported": 31340, + "qq": 31341, + "ractor": 31342, + "Ġinflict": 31343, + "ĠBoehner": 31344, + "Ġthickness": 31345, + "Ġdome": 31346, + "ĠInflu": 31347, + "Ġheap": 31348, + "Ġmirrored": 31349, + "Ġconstituent": 31350, + "Ġfertile": 31351, + "Ġvaping": 31352, + "266": 31353, + "riages": 31354, + "Ġembassies": 31355, + "Ġpersu": 31356, + "ĠMacArthur": 31357, + "issions": 31358, + "Main": 31359, + "aths": 31360, + "onne": 31361, + "circ": 31362, + "Ġsweating": 31363, + "quartered": 31364, + "Ġsax": 31365, + "Ġ540": 31366, + "Ġreputable": 31367, + "Ġsatire": 31368, + "Ġpastors": 31369, + "ventional": 31370, + "Mic": 31371, + "female": 31372, + "Ġpity": 31373, + "appropri": 31374, + "voc": 31375, + "hei": 31376, + "Ġimperial": 31377, + "Ġcorrective": 31378, + "Ġresent": 31379, + "Ġtempered": 31380, + "Ġdiffers": 31381, + "Hamilton": 31382, + "Ġsaddle": 31383, + "Ġgrenades": 31384, + "ĠQuart": 31385, + "onymous": 31386, + "til": 31387, + "Ġdepiction": 31388, + "Ġdisreg": 31389, + "Ġpetitioner": 31390, + "Ġfret": 31391, + "ĠEns": 31392, + "Emer": 31393, + "540": 31394, + "opathy": 31395, + "vertisements": 31396, + "Ġsketches": 31397, + "venth": 31398, + "Ġautomate": 31399, + "Ġjihad": 31400, + "iping": 31401, + "Ġtert": 31402, + "ĠSop": 31403, + "ships": 31404, + "Ġdeceptive": 31405, + "ĠPryor": 31406, + "ĠGorge": 31407, + "ĠMeridian": 31408, + "rero": 31409, + "affected": 31410, + "Ġlame": 31411, + "660": 31412, + "rub": 31413, + "Hello": 31414, + "ĠNumbers": 31415, + "269": 31416, + "Ġmarg": 31417, + "Fran": 31418, + "640": 31419, + "Ġcath": 31420, + "winter": 31421, + "ĠMosque": 31422, + "Ġreckoning": 31423, + "ĠImaging": 31424, + "Ġmutation": 31425, + "ĠMild": 31426, + "Ġkidnap": 31427, + "Ġnav": 31428, + "Ġferocious": 31429, + "Ġdusty": 31430, + "Cele": 31431, + "ĠFoss": 31432, + "Ġregrett": 31433, + "lymp": 31434, + "Ġcoli": 31435, + "Ġstereo": 31436, + "Ġforesee": 31437, + "alties": 31438, + "Ġresusc": 31439, + "Full": 31440, + "wash": 31441, + "ĠINST": 31442, + "ĠPars": 31443, + "Ġcoated": 31444, + "ĠHT": 31445, + "Ġdiscord": 31446, + "Ġreforming": 31447, + "CAN": 31448, + "Ġblink": 31449, + "Ġlubric": 31450, + "Ġmishand": 31451, + "ensible": 31452, + "existent": 31453, + "secondary": 31454, + "ĠDoesn": 31455, + "terrorist": 31456, + "Ġriff": 31457, + "custom": 31458, + "ĠDET": 31459, + "Ġreusable": 31460, + "ĠCRA": 31461, + "ĠScalia": 31462, + "Ġaccelerator": 31463, + "Ġpropag": 31464, + "ĠMID": 31465, + "ework": 31466, + "Ġlooted": 31467, + "oscope": 31468, + "eners": 31469, + "ruction": 31470, + "Ġbarr": 31471, + "Ġviewership": 31472, + "Ġlends": 31473, + "obil": 31474, + "ĠRoots": 31475, + "ĠCame": 31476, + "ibel": 31477, + "Ġglobalization": 31478, + "lab": 31479, + "information": 31480, + "Ġcoordin": 31481, + "Ġglitch": 31482, + "Ġworms": 31483, + "Ġslurs": 31484, + "Ġcontemplated": 31485, + "ĠPenal": 31486, + "Ġ191": 31487, + "Ġ221": 31488, + "Ġexposes": 31489, + "Ġ248": 31490, + "ĠASP": 31491, + "Ġdependency": 31492, + "urga": 31493, + "pdf": 31494, + "Ġvibr": 31495, + "clone": 31496, + "ossible": 31497, + "ĠUtt": 31498, + "serv": 31499, + "ĠLevant": 31500, + "maybe": 31501, + "MU": 31502, + "ĠLunar": 31503, + "Ġbystanders": 31504, + "Ġcapitals": 31505, + "Ġpreacher": 31506, + "thin": 31507, + "Ġunderscore": 31508, + "Ġ('": 31509, + "Ġmedd": 31510, + "Ġautobiography": 31511, + "Ġpersistence": 31512, + "Ġarming": 31513, + "Ġappalled": 31514, + "Ġcontradictory": 31515, + "Ġreciproc": 31516, + "Ġtakedown": 31517, + "tan": 31518, + "Ġnecessities": 31519, + "itans": 31520, + "ĠAlas": 31521, + "Ġsegregated": 31522, + "ĠResponsibility": 31523, + "ĠSHOW": 31524, + "ISIS": 31525, + "Ġpengu": 31526, + "Ġumb": 31527, + "ĠHO": 31528, + "HB": 31529, + "ĠChou": 31530, + "Ġalluded": 31531, + "Ġharms": 31532, + "bara": 31533, + "ĠWOR": 31534, + "Sorry": 31535, + "Ġstarvation": 31536, + "Ġspilling": 31537, + "Ġcarb": 31538, + "annis": 31539, + "ĠGarrison": 31540, + "Ġmillionaire": 31541, + "ifling": 31542, + "ĠCancel": 31543, + "Ġimprint": 31544, + "Ġborrower": 31545, + "455": 31546, + "ĠCic": 31547, + "Ġexposures": 31548, + "dest": 31549, + "Ġunn": 31550, + "Ġ802": 31551, + "Ġadherence": 31552, + "prints": 31553, + "Ġweary": 31554, + "Ġwaging": 31555, + "Ġ1937": 31556, + "ĠKepler": 31557, + "%;": 31558, + "Ġdefective": 31559, + "ĠReps": 31560, + "ĠGranted": 31561, + "Ġdisco": 31562, + "ĠRanking": 31563, + "erno": 31564, + "Ġarchaeological": 31565, + "sq": 31566, + "Ġcapit": 31567, + "Ġfleets": 31568, + "Ġinventor": 31569, + "iffin": 31570, + "Ġspotting": 31571, + "ĠSHARES": 31572, + "309": 31573, + "Hard": 31574, + "save": 31575, + "241": 31576, + "ĠThinking": 31577, + "XY": 31578, + "Ġhavens": 31579, + "Ġmessed": 31580, + "crop": 31581, + "Ġperme": 31582, + "Ġtimelines": 31583, + "ĠGarage": 31584, + "Ġplateau": 31585, + "together": 31586, + "fox": 31587, + "Ġfailings": 31588, + "ĠTight": 31589, + "ĠPhysics": 31590, + "ĠScholars": 31591, + "Ġpans": 31592, + "Fall": 31593, + "Ġhull": 31594, + "GER": 31595, + "Ġbourbon": 31596, + "ceived": 31597, + "Ġsteroids": 31598, + "Ġhamb": 31599, + "Ġinterpretations": 31600, + "Ġcush": 31601, + "Chair": 31602, + "Ġinformational": 31603, + "aryn": 31604, + "Ġwoven": 31605, + "Ġamen": 31606, + "Bre": 31607, + "Ġrefreshed": 31608, + "York": 31609, + "ĠBlast": 31610, + "Editor": 31611, + "Ġmotivating": 31612, + "ĠReason": 31613, + "Florida": 31614, + "Ġdreaded": 31615, + "Ġstationary": 31616, + "Ġbil": 31617, + "doors": 31618, + "Ġslightest": 31619, + "Ġcombustion": 31620, + "Ġfascination": 31621, + "Ġstraps": 31622, + "scribed": 31623, + "Ġexhibiting": 31624, + "Ġsimplest": 31625, + "Gar": 31626, + "Ġprogressives": 31627, + "claim": 31628, + "ocket": 31629, + "Ġexoner": 31630, + "ĠNETWORK": 31631, + "Brad": 31632, + "Ġ197": 31633, + "Ġnightmares": 31634, + "Ġillust": 31635, + "among": 31636, + "ĠGreenpeace": 31637, + "Ġoval": 31638, + "Ġblocker": 31639, + "3000": 31640, + "ĠMemor": 31641, + "Ġmids": 31642, + "Ġconfuse": 31643, + "YN": 31644, + "cow": 31645, + "Ġdispensary": 31646, + "telling": 31647, + "Ġentail": 31648, + "Ġneurolog": 31649, + "Ġbroth": 31650, + "Ġpron": 31651, + "ĠAnswer": 31652, + "thank": 31653, + "Ġintersect": 31654, + "Ġclinging": 31655, + "ĠKilling": 31656, + "Ġcohesion": 31657, + "Ġcategorized": 31658, + "Ġtangled": 31659, + "ĠASC": 31660, + "Arsenal": 31661, + "ĠAutomatic": 31662, + "580": 31663, + "sac": 31664, + "Ġshady": 31665, + "consumer": 31666, + "hetically": 31667, + "NV": 31668, + "Ġoverl": 31669, + "holes": 31670, + "ĠDonation": 31671, + "tera": 31672, + "score": 31673, + "library": 31674, + "Ġsmoother": 31675, + "Ġcoasts": 31676, + "Ġintercourse": 31677, + "Ġunfavorable": 31678, + "erb": 31679, + "Hel": 31680, + "Ġbiases": 31681, + "Ġinheritance": 31682, + "Ġsuppressed": 31683, + "ĠRecommend": 31684, + "iculture": 31685, + "ighting": 31686, + "inguished": 31687, + "idences": 31688, + "operated": 31689, + "Ġhors": 31690, + "Ġshrug": 31691, + "aila": 31692, + "ĠConsortium": 31693, + "Ġveins": 31694, + "uria": 31695, + "ĠSmithsonian": 31696, + "ĠAX": 31697, + ")âĢĶ": 31698, + "given": 31699, + "JC": 31700, + "Ġreneg": 31701, + "Ġprincip": 31702, + "Ġextinct": 31703, + "Golden": 31704, + "ASON": 31705, + "Ġstatutes": 31706, + "292": 31707, + "ĠGOOD": 31708, + "ĠGreenland": 31709, + "ĠRasmussen": 31710, + "ATHER": 31711, + "Ġdeserted": 31712, + "ĠHitchcock": 31713, + "Ġqualifies": 31714, + "Ġdreadful": 31715, + "Ġsupers": 31716, + "Ġtendon": 31717, + "oter": 31718, + "ĠFate": 31719, + "Ġrestrooms": 31720, + "igating": 31721, + "Sher": 31722, + "Name": 31723, + "orph": 31724, + "ĠCritical": 31725, + "rox": 31726, + "Ġdefunct": 31727, + "Ġcanoe": 31728, + "Ġbiscuits": 31729, + "Ġwomb": 31730, + "808": 31731, + "istar": 31732, + "Ġroar": 31733, + "aundering": 31734, + "iewicz": 31735, + "ĠNM": 31736, + "ĠChamberlain": 31737, + "Ġ233": 31738, + "ĠCoat": 31739, + "Ġ999": 31740, + "aft": 31741, + "Ġlurking": 31742, + "ĠPist": 31743, + "Ġfollower": 31744, + "Ġcareg": 31745, + "ÙĨ": 31746, + "ĠThin": 31747, + "ZZ": 31748, + "ĠGI": 31749, + "ĠVintage": 31750, + "Ġpainstaking": 31751, + "Ġgloom": 31752, + "Ġtbsp": 31753, + "Ġwhim": 31754, + "ĠMask": 31755, + "rugged": 31756, + "Ġwritings": 31757, + "stantial": 31758, + "luence": 31759, + "ordable": 31760, + "akia": 31761, + "Ġassassinated": 31762, + "Wind": 31763, + "Ġdemeanor": 31764, + "Night": 31765, + "rape": 31766, + "ĠBringing": 31767, + "Ġshields": 31768, + "ĠAntarctic": 31769, + "Ġfruitful": 31770, + "ĠBuster": 31771, + "ĠLois": 31772, + "Ġ302": 31773, + "Style": 31774, + "ĠRIS": 31775, + "Ġdissatisfaction": 31776, + "ulp": 31777, + "ĠLaser": 31778, + "Ġdisposition": 31779, + "ĠAnk": 31780, + "Ġabsorbing": 31781, + "276": 31782, + "Ġvolcan": 31783, + "Ġleftover": 31784, + "yah": 31785, + "ĠVaj": 31786, + "Ġunsolved": 31787, + "oland": 31788, + "Ġstained": 31789, + "Ġpathetic": 31790, + "ylan": 31791, + "Ġknots": 31792, + "immigration": 31793, + "ieving": 31794, + "Coming": 31795, + "Commerce": 31796, + "ĠHurt": 31797, + "drawn": 31798, + "Ġaxis": 31799, + "Ġdye": 31800, + "ĠNora": 31801, + "ĠPortal": 31802, + "Ġsuspense": 31803, + "ĠExactly": 31804, + "Ġpowering": 31805, + "ĠClock": 31806, + "Ġdrawer": 31807, + "ĠSpike": 31808, + "Ġhallmark": 31809, + "aber": 31810, + "ĠTrainer": 31811, + "UV": 31812, + "Ġredundant": 31813, + "Tour": 31814, + "Ġdesignate": 31815, + "Ġredress": 31816, + "ĠUb": 31817, + "cake": 31818, + "oded": 31819, + "Ġkings": 31820, + "iates": 31821, + "Ġcoupons": 31822, + "Ġextremes": 31823, + "Elect": 31824, + "Ġcitation": 31825, + "Ġdirectory": 31826, + "Ġtranspired": 31827, + "cele": 31828, + "gence": 31829, + "5000": 31830, + "ostic": 31831, + "Ġraining": 31832, + "ĠSight": 31833, + "videos": 31834, + "phthal": 31835, + "llor": 31836, + "Ġappraisal": 31837, + "Ġdetox": 31838, + "Ġelecting": 31839, + "Ġordinances": 31840, + "Ġlifespan": 31841, + "Ref": 31842, + "Ġilluminated": 31843, + "Ġforfe": 31844, + "Making": 31845, + "ĠWorst": 31846, + "ĠTP": 31847, + "Ġfullest": 31848, + "ĠISIL": 31849, + "ĠRates": 31850, + "Ġyeast": 31851, + "sett": 31852, + "ĠYok": 31853, + "innie": 31854, + "edition": 31855, + "ĠGoldstein": 31856, + "Ġunaff": 31857, + "god": 31858, + "Ġzo": 31859, + "rums": 31860, + "Ġopaque": 31861, + "ĠHist": 31862, + "Yesterday": 31863, + "AMS": 31864, + "aband": 31865, + "005": 31866, + "illary": 31867, + "ĠSplash": 31868, + "Ġaccrued": 31869, + "Ell": 31870, + "Ġnominating": 31871, + "ĠBroadcast": 31872, + "ĠWhip": 31873, + "ARM": 31874, + "Ġunnecessarily": 31875, + "brown": 31876, + "429": 31877, + "ansky": 31878, + "Ġextravagant": 31879, + "Malley": 31880, + "wage": 31881, + "Ġexempted": 31882, + "Ġtypo": 31883, + "Ġesports": 31884, + "ĠStru": 31885, + "ĠPython": 31886, + "Ġsaint": 31887, + "ĠCSI": 31888, + "ĠPowder": 31889, + "Ġdisguised": 31890, + "ĠSubway": 31891, + "Ġprecursor": 31892, + "ĠWizard": 31893, + "Johnson": 31894, + "icas": 31895, + "Ġdefaults": 31896, + "!).": 31897, + "ebra": 31898, + "jected": 31899, + "Ġunaccompanied": 31900, + "HH": 31901, + "Ġproced": 31902, + "clinical": 31903, + "Ġmitigating": 31904, + "ĠSoup": 31905, + "ĠFunny": 31906, + "344": 31907, + "Hall": 31908, + "Ġscalable": 31909, + "Ġshimmer": 31910, + "Ġunderstatement": 31911, + "zeb": 31912, + "icus": 31913, + "Ġretract": 31914, + "IDER": 31915, + "ieft": 31916, + "iii": 31917, + "ĠEmperor": 31918, + "Ġvoltage": 31919, + "343": 31920, + "Rest": 31921, + "ĠButcher": 31922, + "Ġlaced": 31923, + "Ġsalty": 31924, + "Ġfourteen": 31925, + "Ġoxy": 31926, + "Ġraged": 31927, + "Ġforg": 31928, + "Ġcaveat": 31929, + "Ġponder": 31930, + "process": 31931, + "Ġghosts": 31932, + "ĠGoose": 31933, + "didn": 31934, + "stood": 31935, + "amation": 31936, + "Ġvillains": 31937, + "contract": 31938, + "Ġbooted": 31939, + "ĠDidn": 31940, + "ĠSalon": 31941, + "Ġlewd": 31942, + "ĠFritz": 31943, + "Ġorganis": 31944, + "Ġpuzzles": 31945, + "ĠRX": 31946, + "Ġcurtains": 31947, + "ĠPackage": 31948, + "Ġrebate": 31949, + "Ġspokes": 31950, + "Ġoccupant": 31951, + "Ġfooled": 31952, + "appy": 31953, + "Ġyourselves": 31954, + "Ġmaths": 31955, + "Ġ630": 31956, + "bos": 31957, + "ĠHeb": 31958, + "APS": 31959, + "Ġbulletin": 31960, + "Ġpests": 31961, + "Ġlum": 31962, + "ĠHAS": 31963, + "users": 31964, + "idated": 31965, + "Ġpalpable": 31966, + "ĠFeature": 31967, + "ĠPKK": 31968, + "Ġdetriment": 31969, + "Ġbamboo": 31970, + "Ġimmersed": 31971, + "ĠDud": 31972, + "Ġion": 31973, + "icc": 31974, + "ĠIris": 31975, + "ĠBeats": 31976, + "Ġimprobable": 31977, + "Ġfuner": 31978, + "Ġsprung": 31979, + "ĠLieberman": 31980, + "ĠSTA": 31981, + "venge": 31982, + "Ġtreacherous": 31983, + "Ġpreced": 31984, + "Ġsniper": 31985, + "ĠGOLD": 31986, + "ĠSUR": 31987, + "Nic": 31988, + "ĠROB": 31989, + "Camp": 31990, + "Ġhooks": 31991, + "oling": 31992, + "Ġbolst": 31993, + "339": 31994, + "heter": 31995, + "Ġbracelet": 31996, + "Ġbreat": 31997, + "307": 31998, + "ĠTrader": 31999, + "ĠPixar": 32000, + "hist": 32001, + "Ġmenacing": 32002, + "Ġgrizz": 32003, + "294": 32004, + "Ġillustrious": 32005, + "Ġtransact": 32006, + "Ġspoiler": 32007, + "ĠWORK": 32008, + "Road": 32009, + "Ġblackout": 32010, + "Ġencomp": 32011, + "proven": 32012, + "ĠFriendship": 32013, + "Ġentrances": 32014, + "Ġprofessions": 32015, + "Ġinsin": 32016, + "Ġrecorder": 32017, + "Ġformulation": 32018, + "govern": 32019, + "Ġpainfully": 32020, + "ĠRepe": 32021, + "eeds": 32022, + "cru": 32023, + "ĠDir": 32024, + "Ġtriumphant": 32025, + "Ġignition": 32026, + "xy": 32027, + "Ġintrusion": 32028, + "ĠEAR": 32029, + "RES": 32030, + "Ġration": 32031, + "ĠTaken": 32032, + "Ġcages": 32033, + "Ġpeg": 32034, + "Ġcommem": 32035, + "680": 32036, + "ĠRite": 32037, + "Ġfolder": 32038, + "Ġvertically": 32039, + "Ġcheeks": 32040, + "pick": 32041, + "Ġcrispy": 32042, + "Ġsqueezing": 32043, + "ĠBene": 32044, + "ĠTrailer": 32045, + "ĠKM": 32046, + "acceptable": 32047, + "ĠSetting": 32048, + "Ġsupernatural": 32049, + "ĠEz": 32050, + "Ġvenom": 32051, + "ĠFrey": 32052, + "Ġpulp": 32053, + "Had": 32054, + "centered": 32055, + "metics": 32056, + "Kent": 32057, + "ĠDOI": 32058, + "kr": 32059, + "ĠWHEN": 32060, + "Ġtakeoff": 32061, + "isf": 32062, + "uko": 32063, + "Ġquasi": 32064, + "Ġveggies": 32065, + "Ġpesticide": 32066, + "Ġstimulating": 32067, + "Ġacknowledgement": 32068, + "Ġattained": 32069, + "ĠBackground": 32070, + "281": 32071, + "317": 32072, + "ĠTrees": 32073, + "Ġdetractors": 32074, + "Ġannouncer": 32075, + "Ġjoyful": 32076, + "ĠElf": 32077, + "istration": 32078, + "phi": 32079, + "Ġprogressively": 32080, + "mini": 32081, + "Ġcontraception": 32082, + "asca": 32083, + "ishops": 32084, + "Ġmisunderstood": 32085, + "Ġinitiating": 32086, + "ĠConversely": 32087, + "338": 32088, + "080": 32089, + "idation": 32090, + "ĠGoes": 32091, + "Ġimprov": 32092, + "Ġswapping": 32093, + "Vict": 32094, + "Ġdevoid": 32095, + "fighter": 32096, + "ĠMori": 32097, + "Ġvoy": 32098, + "ĠElev": 32099, + "ĠAim": 32100, + "Ġtrustworthy": 32101, + "Leg": 32102, + "675": 32103, + "ĠPossible": 32104, + "Crunch": 32105, + "ĠRings": 32106, + "Ġphony": 32107, + "Ġbladder": 32108, + "ĠChall": 32109, + "Spot": 32110, + "oak": 32111, + "Was": 32112, + "ĠFAM": 32113, + "ĠAGA": 32114, + "ĠFifa": 32115, + "Ġenclosed": 32116, + "Ġanthrop": 32117, + "faith": 32118, + "ĠAux": 32119, + "Ġgracious": 32120, + "roller": 32121, + "Ġdowntime": 32122, + "swing": 32123, + "Ġcamouflage": 32124, + "ĠCosts": 32125, + "Ġliv": 32126, + "ricular": 32127, + "ĠUran": 32128, + "Ġdisapproval": 32129, + "Ġpropriet": 32130, + "bits": 32131, + "Ġmafia": 32132, + "ĠSCHOOL": 32133, + "ĠPrepar": 32134, + "button": 32135, + "Almost": 32136, + "Ġpastoral": 32137, + "ĠDove": 32138, + "Hol": 32139, + "Ġimposes": 32140, + "ĠDram": 32141, + "lys": 32142, + "ĠSAS": 32143, + "Ġwiring": 32144, + "271": 32145, + "ĠModels": 32146, + "Ġoutpost": 32147, + "etics": 32148, + "Ġinsulted": 32149, + "ĠMongolia": 32150, + "Ġoverth": 32151, + "Haw": 32152, + "ĠHomer": 32153, + "itta": 32154, + "raining": 32155, + "Ġevidently": 32156, + "raphic": 32157, + "impact": 32158, + "Ġfranch": 32159, + "Ġ2100": 32160, + "Ġapproximate": 32161, + "Ġcartoons": 32162, + "Ġbackups": 32163, + "umbing": 32164, + "Ġforceful": 32165, + "ĠShad": 32166, + "Ġsurges": 32167, + "Ġperf": 32168, + "Ġdele": 32169, + "Ġquieter": 32170, + "ĠHorowitz": 32171, + "ĠDX": 32172, + "anners": 32173, + "ĠNinja": 32174, + "ĠScript": 32175, + "ĠElise": 32176, + "collect": 32177, + "Ġgrading": 32178, + "ĠBethesda": 32179, + "Kids": 32180, + "ĠTelephone": 32181, + "Ġpreferring": 32182, + "Ġreconcil": 32183, + "Ġmango": 32184, + "ĠHail": 32185, + "ĠCitizenship": 32186, + "Master": 32187, + "cular": 32188, + "Ġstuffing": 32189, + "ĠAlive": 32190, + "ALLY": 32191, + "Ġchi": 32192, + "ĠDynam": 32193, + "ĠRosenthal": 32194, + "Ġpurity": 32195, + "Ġtemp": 32196, + "ĠHAL": 32197, + "employ": 32198, + "Ġplentiful": 32199, + "ĠComed": 32200, + "Ġstacks": 32201, + "ĠHuge": 32202, + "ĠOlder": 32203, + "Ġsclerosis": 32204, + "ONY": 32205, + "Ġfilmmaking": 32206, + "chance": 32207, + "Cry": 32208, + "Ġworkflow": 32209, + "ĠPersonnel": 32210, + "awed": 32211, + "ĠColumn": 32212, + "Ġuncomp": 32213, + "Ġdiscriminated": 32214, + "Ġpts": 32215, + "Ġallev": 32216, + "ĠKinn": 32217, + "meal": 32218, + "Ġnovice": 32219, + "Ġcrest": 32220, + "Ġhearty": 32221, + "Ġlowers": 32222, + "inqu": 32223, + "ĠPlayoffs": 32224, + "ĠHyp": 32225, + "Ġautos": 32226, + "Ġindec": 32227, + "Ġnighttime": 32228, + "Ġreflex": 32229, + "306": 32230, + "disciplinary": 32231, + "ophe": 32232, + "contact": 32233, + "Ġachievable": 32234, + "Ġslab": 32235, + "ĠMessage": 32236, + "ĠVMware": 32237, + "ĠDia": 32238, + "REG": 32239, + "Ġconfisc": 32240, + "ĠMechan": 32241, + "Ġphenomena": 32242, + "Ġsequencing": 32243, + "Ġshaming": 32244, + "Ġcompilation": 32245, + "ĠAges": 32246, + "Ġmastered": 32247, + "Ġagony": 32248, + "Ġrestrain": 32249, + "ĠLyme": 32250, + "Which": 32251, + "ĠBarney": 32252, + "ĠConcept": 32253, + "Ġsuperheroes": 32254, + "ĠPsychology": 32255, + "Ġreminis": 32256, + "violence": 32257, + "Lead": 32258, + "Da": 32259, + "VEN": 32260, + "ERC": 32261, + "ĠVoter": 32262, + "Ġbetray": 32263, + "Ġsavage": 32264, + "driver": 32265, + "IFT": 32266, + "Chain": 32267, + "angler": 32268, + "'-": 32269, + "lain": 32270, + "ĠRatt": 32271, + "bis": 32272, + "iverse": 32273, + "Ġdensely": 32274, + "Ġuncom": 32275, + "Ġunsuspecting": 32276, + "Ġstimulation": 32277, + "diff": 32278, + "Ġskins": 32279, + "ĠRiding": 32280, + "ategic": 32281, + "ĠUnderstand": 32282, + "occup": 32283, + "ĠCooking": 32284, + "Ġschizophrenia": 32285, + "ĠKoen": 32286, + "Ġcomrades": 32287, + "HY": 32288, + "Ġfab": 32289, + "ĠRowling": 32290, + "Allen": 32291, + "ĠJUL": 32292, + "Ġembryos": 32293, + "UU": 32294, + "ĠCAT": 32295, + "Ġtidy": 32296, + "finger": 32297, + "ĠCake": 32298, + "Ġrightfully": 32299, + "religious": 32300, + "Ġ407": 32301, + "Gal": 32302, + "408": 32303, + "Ġgrievance": 32304, + "Ġswallowed": 32305, + "251": 32306, + "283": 32307, + "ĠBarcl": 32308, + "opter": 32309, + "Ġpedoph": 32310, + "Ġcured": 32311, + "Ġestablishes": 32312, + "increasing": 32313, + "tics": 32314, + "articles": 32315, + "Ġunethical": 32316, + "authored": 32317, + "Ġanchors": 32318, + "ĠContra": 32319, + "Ġventured": 32320, + "ĠCoh": 32321, + "Ġpuff": 32322, + "heddar": 32323, + "Ġomission": 32324, + "Ġdich": 32325, + "ceed": 32326, + "Ġscares": 32327, + "Ġdoctoral": 32328, + "293": 32329, + "ĠUnt": 32330, + "Ġdop": 32331, + "ĠInjury": 32332, + "ificantly": 32333, + "ĠRift": 32334, + "ĠOrders": 32335, + "Ġmobilize": 32336, + "particularly": 32337, + "Ġchilled": 32338, + "Reports": 32339, + "redibly": 32340, + "ĠGuru": 32341, + "Ġvalleys": 32342, + "Ġtextures": 32343, + "Ġreuse": 32344, + "roit": 32345, + "unts": 32346, + "Ġirreversible": 32347, + "Ġwarships": 32348, + "Ġpus": 32349, + "Ġpeeled": 32350, + "Ġthirst": 32351, + "Ġgrapple": 32352, + "busters": 32353, + "Ġnort": 32354, + "ĠDates": 32355, + "Safe": 32356, + "Ġbirthplace": 32357, + "hemoth": 32358, + "Ġvile": 32359, + "Ġ306": 32360, + "Ram": 32361, + "activated": 32362, + "ĠAero": 32363, + "Ġbutcher": 32364, + "ĠKnock": 32365, + "Ġdisturb": 32366, + "Ġtotality": 32367, + "tted": 32368, + "Ġlegit": 32369, + "cking": 32370, + "nikov": 32371, + "Ġfavoring": 32372, + "lang": 32373, + "Ġrightful": 32374, + "orum": 32375, + "!!!!": 32376, + "ĠMinute": 32377, + "Ġpostings": 32378, + "Java": 32379, + "510": 32380, + "Ġmicrobes": 32381, + "Ġsixteen": 32382, + "entimes": 32383, + "Ġbulb": 32384, + "Ġgoalt": 32385, + "Ġhumiliated": 32386, + "ansom": 32387, + "roach": 32388, + "Ġgrouping": 32389, + "hari": 32390, + "Ġcler": 32391, + "Ġstared": 32392, + "ĠSymptoms": 32393, + "Ġbasil": 32394, + "Whenever": 32395, + "ĠWhoever": 32396, + "Oil": 32397, + "ĠJericho": 32398, + "ĠAlm": 32399, + "Pol": 32400, + "Hur": 32401, + "Ġupro": 32402, + "ĠSpo": 32403, + "hammer": 32404, + "Mur": 32405, + "ĠTorch": 32406, + "Ġfrequencies": 32407, + "ĠExpansion": 32408, + "Ġparalysis": 32409, + "igon": 32410, + "ĠSail": 32411, + "Ġsilently": 32412, + "Ġrevolver": 32413, + "Ġstockpile": 32414, + "Ġpessimistic": 32415, + "ESA": 32416, + "Ġdisclaim": 32417, + "Ġdemocracies": 32418, + "ĠTales": 32419, + "ĠAngry": 32420, + "ĠWhitman": 32421, + "ĠOri": 32422, + "Ġtransitioned": 32423, + "behind": 32424, + "ĠLAN": 32425, + "Ġcav": 32426, + "ĠJazeera": 32427, + "KC": 32428, + "ĠInspect": 32429, + "irty": 32430, + "ĠAin": 32431, + "ĠOrig": 32432, + "Ġobscene": 32433, + "Ġdormant": 32434, + "Ġharb": 32435, + "ĠWiz": 32436, + "ĠAdolf": 32437, + "Ġvic": 32438, + "Ġdenouncing": 32439, + "Ġye": 32440, + "aques": 32441, + "Ġomn": 32442, + "Ġassemblies": 32443, + "nosis": 32444, + "Ġadmon": 32445, + "Ġanguish": 32446, + "Ġvag": 32447, + "YE": 32448, + "ĠMacro": 32449, + "Ġrubbing": 32450, + "Ġreplicated": 32451, + "Moon": 32452, + "ĠGuitar": 32453, + "Ġcentimeters": 32454, + "amily": 32455, + "ĠAmes": 32456, + "Ġchlorine": 32457, + "Perhaps": 32458, + "Ġpartisans": 32459, + "soc": 32460, + "Ġvagina": 32461, + "Ġtrove": 32462, + "ĠYES": 32463, + "Ġtherapists": 32464, + "Ġnods": 32465, + "Ġhanged": 32466, + "Ġridge": 32467, + "Ġhaz": 32468, + "ĠmacOS": 32469, + "Ġske": 32470, + "ĠShia": 32471, + "Ġsteril": 32472, + "Ġalmond": 32473, + "ĠRockefeller": 32474, + "Ġintrinsic": 32475, + "Certainly": 32476, + "Ġsublime": 32477, + "Earn": 32478, + "abet": 32479, + "Ġframeworks": 32480, + "ogical": 32481, + "ilst": 32482, + "ipal": 32483, + "Ġrescuing": 32484, + "ĠWatergate": 32485, + "Ġ231": 32486, + "ĠNano": 32487, + "ighthouse": 32488, + "olph": 32489, + "Ġ312": 32490, + "Ġhealed": 32491, + "ĠTomb": 32492, + "Ġsubst": 32493, + "Ġsulph": 32494, + "ĠNewsp": 32495, + "ĠLama": 32496, + "venue": 32497, + "387": 32498, + "productive": 32499, + "ĠNEED": 32500, + "minus": 32501, + "ĠPages": 32502, + "cand": 32503, + "ĠClover": 32504, + "ĠForensic": 32505, + "ryn": 32506, + "ogle": 32507, + "ocr": 32508, + "Ġvaccinations": 32509, + "cies": 32510, + "ĠMek": 32511, + "Ġunaffected": 32512, + "Ġfetal": 32513, + "ĠDino": 32514, + "Ġhemisphere": 32515, + "Ġfroze": 32516, + "ĠPeg": 32517, + "Ġmicroscope": 32518, + "Ġmoderates": 32519, + "ĠGEN": 32520, + "ĠHawai": 32521, + "Ġstagn": 32522, + "Absolutely": 32523, + "practice": 32524, + "IBLE": 32525, + "cture": 32526, + "ĠAshe": 32527, + "Ġcondoms": 32528, + "Ġpoked": 32529, + "training": 32530, + "Ġintermedi": 32531, + "347": 32532, + "Ġcardinal": 32533, + "ĠSpoon": 32534, + "Ġsupp": 32535, + "Ġpreviews": 32536, + "Service": 32537, + "ĠBeam": 32538, + "Ġtranscend": 32539, + "Fresh": 32540, + "Sure": 32541, + "Ġ4000": 32542, + "idential": 32543, + "ĠCoinbase": 32544, + "Ġworkings": 32545, + "ĠPI": 32546, + "Ġpassionately": 32547, + "Ġdecisively": 32548, + "ĠInspection": 32549, + "Ġinvoke": 32550, + "Ġstain": 32551, + "Ġcleaners": 32552, + "Ġregulates": 32553, + "Ġshone": 32554, + "ĠEVERY": 32555, + "istance": 32556, + "map": 32557, + "Ġredu": 32558, + "Ġoccupies": 32559, + "Ġprocure": 32560, + "acket": 32561, + "roman": 32562, + "Ġilleg": 32563, + "Ġleaps": 32564, + "yond": 32565, + "Ġyarn": 32566, + "ĠLTD": 32567, + "ĠCONTR": 32568, + "ĠRestoration": 32569, + "ĠCDs": 32570, + "Ġdrinkers": 32571, + "ĠJordanian": 32572, + "Ġabl": 32573, + "Ġdisparate": 32574, + "Ġprimed": 32575, + "ĠFirearms": 32576, + "artz": 32577, + "Ġindispensable": 32578, + "Ter": 32579, + "Ġfright": 32580, + "Ġmarkedly": 32581, + "Ġroam": 32582, + "ĠJurassic": 32583, + "Ġfeder": 32584, + "Ġpepp": 32585, + "ĠDV": 32586, + "Ġpancakes": 32587, + "sweet": 32588, + "Ġunmatched": 32589, + "Ġassembling": 32590, + "Ultimately": 32591, + "Ġendeavour": 32592, + "Ġluckily": 32593, + "Ġbitch": 32594, + "Ġelegance": 32595, + "eers": 32596, + "drop": 32597, + "credit": 32598, + "Ġscourge": 32599, + "ĠMinimum": 32600, + "Ġimpatient": 32601, + "Ġhunted": 32602, + "ĠGoddard": 32603, + "Kal": 32604, + "Ġmined": 32605, + "Ġcalves": 32606, + "Ġ234": 32607, + "Ġplank": 32608, + "Ġinjecting": 32609, + "ĠKaufman": 32610, + "ĠCompliance": 32611, + "tone": 32612, + "Ġ345": 32613, + "Ġdazz": 32614, + "ĠClarks": 32615, + "Ġcomprehens": 32616, + "Ġpist": 32617, + "Ġrhythms": 32618, + "Ġreserv": 32619, + "337": 32620, + "ĠIDF": 32621, + "Ġshouts": 32622, + "midt": 32623, + "323": 32624, + "Ġsoothing": 32625, + "Ġadministr": 32626, + "Ġgloomy": 32627, + "Ġfutile": 32628, + "ĠProhibition": 32629, + "upon": 32630, + "ĠAnglic": 32631, + "seeking": 32632, + "Ġdodge": 32633, + "Ds": 32634, + "ĠGrants": 32635, + "editor": 32636, + "ĠInquis": 32637, + "Ġ1929": 32638, + "decl": 32639, + "ĠPorts": 32640, + "ĠCure": 32641, + "ĠDPRK": 32642, + "oct": 32643, + "Ġvocabulary": 32644, + "Ġcling": 32645, + "298": 32646, + "Ġpeac": 32647, + "Ġantibodies": 32648, + "dor": 32649, + "ĠWorse": 32650, + "Ġsmelled": 32651, + "Ġleash": 32652, + "MED": 32653, + "Ġdisinteg": 32654, + "Ġtruthful": 32655, + "Ġsalesman": 32656, + "Ġsquares": 32657, + "susp": 32658, + "Ġcraving": 32659, + "Ġwizard": 32660, + "moral": 32661, + "ĠQué": 32662, + "Anything": 32663, + "Ġfalsehood": 32664, + "ARI": 32665, + "Ġcoworkers": 32666, + "Ġthy": 32667, + "outher": 32668, + "Ġbrushing": 32669, + "ĠProtest": 32670, + "ĠMF": 32671, + "abba": 32672, + "lead": 32673, + "ĠExhibit": 32674, + "Ga": 32675, + "ĠFranks": 32676, + "Ġdictates": 32677, + "illegal": 32678, + "Ġrelayed": 32679, + "Ġploy": 32680, + "ĠاÙĦ": 32681, + "ĠDocuments": 32682, + "Ġtint": 32683, + "ĠYuan": 32684, + "Ġdepended": 32685, + "Mir": 32686, + "ĠIntrodu": 32687, + "Ġrecourse": 32688, + "oqu": 32689, + "ĠTED": 32690, + "Ġdifferentiated": 32691, + "ĠWalls": 32692, + "Ġsentimental": 32693, + "Ġantis": 32694, + "retion": 32695, + "comes": 32696, + "ĠWORLD": 32697, + "Ġcoax": 32698, + "ĠTatt": 32699, + "ĠGingrich": 32700, + "2006": 32701, + "ĠBrut": 32702, + "Second": 32703, + "posed": 32704, + "shots": 32705, + "Ġ313": 32706, + "idian": 32707, + "alking": 32708, + "Ġdens": 32709, + "Ġgif": 32710, + "akings": 32711, + "Ġkeywords": 32712, + "Ġchast": 32713, + "Ġadversary": 32714, + "Ġnick": 32715, + "iasis": 32716, + "ĠLegisl": 32717, + "Ġcoff": 32718, + "ĠOriental": 32719, + "ĠMorg": 32720, + "ĠHAR": 32721, + "Ġlegalizing": 32722, + "Ġbanter": 32723, + "ĠTart": 32724, + "ĠTRI": 32725, + "Ġantagon": 32726, + "ĠGF": 32727, + "oler": 32728, + "ĠUFO": 32729, + "Therefore": 32730, + "ĠOsama": 32731, + "ĠStructure": 32732, + "apps": 32733, + "Ġpee": 32734, + "ĠSomehow": 32735, + "ĠOverwatch": 32736, + "ĠCasual": 32737, + "Ġdishon": 32738, + "SEE": 32739, + "ctive": 32740, + "andering": 32741, + "ĠTransformation": 32742, + "Andy": 32743, + "ĠFever": 32744, + "Ġspectator": 32745, + "Ġlash": 32746, + "Ġprotector": 32747, + "apy": 32748, + "Ġexhilar": 32749, + "aroo": 32750, + "Ġmamm": 32751, + "Ġbystand": 32752, + "acky": 32753, + "Ġdigestive": 32754, + "Ġamplified": 32755, + "Ġalpha": 32756, + "continue": 32757, + "Low": 32758, + "Ġdisgusted": 32759, + "356": 32760, + "script": 32761, + "Ġgenerational": 32762, + "ĠPassenger": 32763, + "sight": 32764, + "Ġcout": 32765, + "Ġhone": 32766, + "ulse": 32767, + "Ġignite": 32768, + "284": 32769, + "gow": 32770, + "Ġbinary": 32771, + "Ġincess": 32772, + "Review": 32773, + "607": 32774, + "ĠSurprise": 32775, + "Ġirritation": 32776, + "ĠBarth": 32777, + "ĠGum": 32778, + "Ġvideot": 32779, + "ĠFres": 32780, + "asons": 32781, + "Ġcollaborator": 32782, + "fal": 32783, + "ĠGon": 32784, + "Ġsettles": 32785, + "regular": 32786, + "Ġmiscarriage": 32787, + "cube": 32788, + "Ġsubord": 32789, + "ĠRegistered": 32790, + "Ġnotions": 32791, + "zzy": 32792, + "Ġrevert": 32793, + "OFF": 32794, + "Ġhasht": 32795, + "ĠPNG": 32796, + "Ġunimaginable": 32797, + "builders": 32798, + "Taylor": 32799, + "ĠPAY": 32800, + "Ġ).": 32801, + "Ġ238": 32802, + "ĠLAST": 32803, + "MAS": 32804, + "Ġillustrations": 32805, + "Ġparody": 32806, + "Ġdispersed": 32807, + "ĠRoses": 32808, + "Ġestimation": 32809, + "ĠGets": 32810, + "Patrick": 32811, + "CHA": 32812, + "Ġmisdem": 32813, + "agate": 32814, + "alter": 32815, + "Ġgeo": 32816, + "Ġenormously": 32817, + "Ġarrogance": 32818, + "Ġpert": 32819, + "Ġmeta": 32820, + "ĠJuno": 32821, + "iov": 32822, + "imov": 32823, + "Ġchores": 32824, + "acan": 32825, + "Paris": 32826, + "313": 32827, + "Lewis": 32828, + "Ġwillingly": 32829, + "ERA": 32830, + "Ġencaps": 32831, + "ilk": 32832, + "Ġnodes": 32833, + "Ġenzyme": 32834, + "want": 32835, + "Ġtolerant": 32836, + "Ġcondos": 32837, + "Ġasserts": 32838, + "Ġcanon": 32839, + "Ġscanned": 32840, + "bishop": 32841, + "Ġperched": 32842, + "util": 32843, + "ĠBonus": 32844, + "create": 32845, + "ĠFuk": 32846, + "Ġmotif": 32847, + "Ġcontemplate": 32848, + "ĠBEN": 32849, + "imir": 32850, + "Ġacadem": 32851, + "uvian": 32852, + "ĠIdeas": 32853, + "ĠCY": 32854, + "Ġants": 32855, + "Ġprostitutes": 32856, + "2005": 32857, + "Spring": 32858, + "ĠBarrel": 32859, + "ĠAunt": 32860, + "ĠLudwig": 32861, + "ĠHerm": 32862, + "PRO": 32863, + "obiles": 32864, + "rack": 32865, + "STER": 32866, + "ucket": 32867, + "Ġmun": 32868, + "Ġ419": 32869, + "ICES": 32870, + "Ġcardio": 32871, + "Ġtrenches": 32872, + "Nation": 32873, + "yahoo": 32874, + "Ġburd": 32875, + "Ġnost": 32876, + "Ġappropriations": 32877, + "ĠChili": 32878, + "Josh": 32879, + "GW": 32880, + "Ġoppressed": 32881, + "ĠBEFORE": 32882, + "Ġmurderous": 32883, + "Pen": 32884, + "achable": 32885, + "Ġrive": 32886, + "Ġculmin": 32887, + "Ġdefin": 32888, + "ĠMord": 32889, + "idate": 32890, + "ĠChim": 32891, + "ource": 32892, + "ĠElectro": 32893, + "orthy": 32894, + "Ġcalendars": 32895, + "regation": 32896, + "Ġretrospect": 32897, + "ĠTribal": 32898, + "ĠHes": 32899, + "Ġcran": 32900, + "Ġcreditor": 32901, + "Ġfibers": 32902, + "note": 32903, + "idays": 32904, + "ĠSebast": 32905, + "ĠKitty": 32906, + "Ġplainly": 32907, + "ĠLAPD": 32908, + "Ġtrumpet": 32909, + "ĠAppropriations": 32910, + "Hill": 32911, + "ĠVeget": 32912, + "296": 32913, + "lated": 32914, + "othes": 32915, + "ibrarian": 32916, + "Listen": 32917, + "nex": 32918, + "WHO": 32919, + "Ġshampoo": 32920, + "Ġclaimants": 32921, + "Ġisol": 32922, + "Ġunchecked": 32923, + "Ġmov": 32924, + "umo": 32925, + "ĠLens": 32926, + "Ġdiscreet": 32927, + "Ġrespectfully": 32928, + "Ġreclaimed": 32929, + "ĠHatt": 32930, + "thus": 32931, + "ĠFlo": 32932, + "Ġsumm": 32933, + "phas": 32934, + "ĠHaitian": 32935, + "Ġstrife": 32936, + "Ġabound": 32937, + "verted": 32938, + "Ġpatronage": 32939, + "449": 32940, + "Ġprelim": 32941, + "ĠZhu": 32942, + "ĠRevel": 32943, + "adic": 32944, + "Ġminded": 32945, + "ĠStability": 32946, + "Ġresembling": 32947, + "Ġvending": 32948, + "ischer": 32949, + "Ġkisses": 32950, + "Ġsuperiority": 32951, + "Ġinfinite": 32952, + "ISC": 32953, + "880": 32954, + "Ġappease": 32955, + "VO": 32956, + "404": 32957, + "ECH": 32958, + "gam": 32959, + "River": 32960, + "metal": 32961, + "determination": 32962, + "Cook": 32963, + "Ġbuds": 32964, + "Ġ(%)": 32965, + "ĠCreated": 32966, + "Ġstrut": 32967, + "Ġ425": 32968, + "Ġverte": 32969, + "ĠOrb": 32970, + "Ġweaving": 32971, + "261": 32972, + "Ġflyers": 32973, + "spons": 32974, + "ĠCovenant": 32975, + "570": 32976, + "Ġintangible": 32977, + "ĠBJ": 32978, + "ĠStead": 32979, + "ĠBrune": 32980, + "pain": 32981, + "independent": 32982, + "Ball": 32983, + "witch": 32984, + "ĠIon": 32985, + "Ġpupp": 32986, + "Cash": 32987, + "ĠConvert": 32988, + "Ġimpede": 32989, + "broad": 32990, + "onew": 32991, + "Ġsynergy": 32992, + "Ġcoined": 32993, + "620": 32994, + "ivalent": 32995, + "ĠInfect": 32996, + "ĠAqua": 32997, + "Together": 32998, + "ĠChemistry": 32999, + "ĠURL": 33000, + "ampion": 33001, + "Ġdeclarations": 33002, + "Ġaffirmative": 33003, + "umper": 33004, + "ĠTarant": 33005, + "Ġstereotype": 33006, + "Ġbookstore": 33007, + "incre": 33008, + "Ġchipset": 33009, + "Ġangst": 33010, + "Jose": 33011, + "laus": 33012, + "Ġheater": 33013, + "ipers": 33014, + "Ġeminent": 33015, + "hook": 33016, + "sticks": 33017, + "ĠCoul": 33018, + "Ġmildly": 33019, + "SG": 33020, + "Ġworm": 33021, + "Ġdisable": 33022, + "Ġperfume": 33023, + "ISTER": 33024, + "Ġgathers": 33025, + "ĠLotus": 33026, + "hyp": 33027, + "actus": 33028, + "Ġdistinctly": 33029, + "fifth": 33030, + "!),": 33031, + "ĠCrunch": 33032, + "Ġcohesive": 33033, + "Ġfortunately": 33034, + "Ġninety": 33035, + "Ġcartels": 33036, + "empl": 33037, + "Direct": 33038, + "Ġcommuting": 33039, + "ĠSX": 33040, + "ractive": 33041, + "Ġtranslating": 33042, + "ĠAQ": 33043, + "Ġslay": 33044, + "abuse": 33045, + "ĠProc": 33046, + "ĠCantor": 33047, + "ĠTas": 33048, + "Sir": 33049, + "Thom": 33050, + "ĠCHRIST": 33051, + "Ġreceptive": 33052, + "ĠCornel": 33053, + "Arab": 33054, + "Ġgrammar": 33055, + "Ġhandlers": 33056, + "Ġalloy": 33057, + "Ġthinly": 33058, + "adem": 33059, + "Ġproponent": 33060, + "ĠPVC": 33061, + "Ġstump": 33062, + "tom": 33063, + "rets": 33064, + "iciency": 33065, + "780": 33066, + "Ġ311": 33067, + "ĠClapper": 33068, + "ITAL": 33069, + "Ùħ": 33070, + "Ġnarrator": 33071, + "Ġblond": 33072, + "Ġintermittent": 33073, + "Ġcollabor": 33074, + "646": 33075, + "Ġmetast": 33076, + "Ġregeneration": 33077, + "ĠLegendary": 33078, + "Ġgenitals": 33079, + "Ġbartender": 33080, + "atson": 33081, + "Okay": 33082, + "Ġpassages": 33083, + "Ġsubstituted": 33084, + "orr": 33085, + "ALTH": 33086, + "Ġartic": 33087, + "Ġascent": 33088, + "Ġmatured": 33089, + "Ġterminology": 33090, + "served": 33091, + "ĠDeliver": 33092, + "Ġattic": 33093, + "anges": 33094, + "Ġrenaissance": 33095, + "Ġbleed": 33096, + "claimer": 33097, + "onse": 33098, + "Sec": 33099, + "Ġparticle": 33100, + "aneous": 33101, + "ateur": 33102, + "Ġzeal": 33103, + "ĠPets": 33104, + "Working": 33105, + "ĠRespect": 33106, + "Ġsermon": 33107, + "ĠProvided": 33108, + "Ġfilibuster": 33109, + "Ġabolished": 33110, + "reviewed": 33111, + "cription": 33112, + "Ġrevers": 33113, + "atered": 33114, + "435": 33115, + "Ġwhe": 33116, + "ometown": 33117, + "UFC": 33118, + "products": 33119, + "Winter": 33120, + "Ġ304": 33121, + "Ġsporadic": 33122, + "orough": 33123, + "EB": 33124, + "ĠAgric": 33125, + "ĠMTA": 33126, + "wic": 33127, + "Ġpowerless": 33128, + "Ġcarrot": 33129, + "ww": 33130, + "Ġabsorption": 33131, + "ĠTyphoon": 33132, + "Turkey": 33133, + "Ġproclaim": 33134, + "Ġhikers": 33135, + "Ġpractise": 33136, + "/$": 33137, + "Ġfingertips": 33138, + "Ġbaff": 33139, + "vu": 33140, + "Ġans": 33141, + "plug": 33142, + "Ġacquaintance": 33143, + "itement": 33144, + "ihar": 33145, + "Ġreluctantly": 33146, + "Ġforc": 33147, + "Ġguarant": 33148, + "ĠWanted": 33149, + "Walk": 33150, + "addle": 33151, + "unders": 33152, + "Fred": 33153, + "Ġtides": 33154, + "ĠBai": 33155, + "Ġcountering": 33156, + "raper": 33157, + "ursions": 33158, + "ĠFlav": 33159, + "pared": 33160, + "raised": 33161, + "Ñı": 33162, + "ĠDiff": 33163, + "Ġreload": 33164, + "ourses": 33165, + "ĠBurning": 33166, + "Ġwand": 33167, + "Ġledger": 33168, + "Ġcoughing": 33169, + "ĠLoren": 33170, + "Nazis": 33171, + "Ġcompile": 33172, + "Eight": 33173, + "icultural": 33174, + "yy": 33175, + "Ġ1932": 33176, + "Run": 33177, + "AIN": 33178, + "Ġattractiveness": 33179, + "ĠOmn": 33180, + "Ġconfer": 33181, + "compliance": 33182, + "Ġembed": 33183, + "Steven": 33184, + "2001": 33185, + "Ġdecre": 33186, + "Ġprompts": 33187, + "ĠHare": 33188, + "Ġleaping": 33189, + "Ġslaughtered": 33190, + "Ġforfeiture": 33191, + "342": 33192, + "Charl": 33193, + "CDC": 33194, + "ographically": 33195, + "Ġduplicate": 33196, + "Ġdistracting": 33197, + "examination": 33198, + "Ġpeas": 33199, + "Ġcatchy": 33200, + "Ġdives": 33201, + "ĠAda": 33202, + "Hay": 33203, + "Ġenthusiastically": 33204, + "Ġfunky": 33205, + "kay": 33206, + "EVA": 33207, + "Ġpsychologists": 33208, + "Ġancestry": 33209, + "iyah": 33210, + "ifter": 33211, + "nob": 33212, + "518": 33213, + "rouse": 33214, + "Ġchord": 33215, + "Ġcone": 33216, + "Ġbarracks": 33217, + "ĠRoyale": 33218, + "ĠIntegration": 33219, + "Ġtrolling": 33220, + "ĠSynt": 33221, + "andals": 33222, + "ĠGrain": 33223, + "ĠNeck": 33224, + "618": 33225, + "Ġrapist": 33226, + "pins": 33227, + "Ġwitty": 33228, + "Ġdehydration": 33229, + "arlane": 33230, + "Ġimmoral": 33231, + "Ġaccum": 33232, + "ĠMcAuliffe": 33233, + "slow": 33234, + "Ġinjust": 33235, + "Ġ1700": 33236, + "Ġcarbs": 33237, + "Ġintel": 33238, + "Non": 33239, + "isks": 33240, + "Tre": 33241, + "Ġinterviewer": 33242, + "sam": 33243, + "Ġdelve": 33244, + "Ġadmirable": 33245, + "ĠROM": 33246, + "ĠHispanics": 33247, + "Ġimpart": 33248, + "Ġunderrated": 33249, + "Ġvictimized": 33250, + "ĠPsych": 33251, + "ppings": 33252, + "Ġ610": 33253, + "pole": 33254, + "Ġdiner": 33255, + "ĠScale": 33256, + "Ġunforeseen": 33257, + "surprisingly": 33258, + "opus": 33259, + "ĠCOURT": 33260, + "Ġjuggling": 33261, + "ĠFacilities": 33262, + "Aid": 33263, + "ĠHPV": 33264, + "Ġcrawling": 33265, + "flu": 33266, + "etary": 33267, + "ĠHarriet": 33268, + "329": 33269, + "ĠSod": 33270, + "ĠBiological": 33271, + "birth": 33272, + "ribed": 33273, + "Ġpulses": 33274, + "396": 33275, + "eways": 33276, + "ĠAlma": 33277, + "nov": 33278, + "015": 33279, + "ricane": 33280, + "agna": 33281, + "Ak": 33282, + "ĠClaim": 33283, + "Ġpref": 33284, + "Ġinterfaces": 33285, + "ĠADHD": 33286, + "604": 33287, + "ZE": 33288, + "venture": 33289, + "Ġascend": 33290, + "ĠGou": 33291, + "Ġpriceless": 33292, + "redo": 33293, + "kw": 33294, + "Conf": 33295, + "Ġmah": 33296, + "Ġpoets": 33297, + "Ġstalk": 33298, + "Ġencamp": 33299, + "Ġhopped": 33300, + "Ġmelody": 33301, + "JECT": 33302, + "eming": 33303, + "Ġbewild": 33304, + "aternal": 33305, + "uchs": 33306, + "dit": 33307, + "ĠTransmission": 33308, + "Lake": 33309, + "Ġatoms": 33310, + "ĠThoughts": 33311, + "ilts": 33312, + "volume": 33313, + "Ġsocioeconomic": 33314, + "atisf": 33315, + "Ġnarr": 33316, + "zinski": 33317, + "ymes": 33318, + "episode": 33319, + "Ġinherit": 33320, + "Ġintending": 33321, + "Ġarenas": 33322, + "uras": 33323, + "burning": 33324, + "334": 33325, + "teenth": 33326, + "Ġsophistication": 33327, + "Ġscreenshots": 33328, + "Ġautistic": 33329, + "lip": 33330, + "paper": 33331, + "Ġmonopol": 33332, + "799": 33333, + "forms": 33334, + "ocrats": 33335, + "Ġpineapple": 33336, + "Ġbegs": 33337, + "Ġpersecuted": 33338, + "Ġsubscribed": 33339, + "Ġelic": 33340, + "ĠPRESIDENT": 33341, + "297": 33342, + "Ġpreferential": 33343, + "Ġpyramid": 33344, + "Ġconvergence": 33345, + "Ġwob": 33346, + "Project": 33347, + "ĠAluminum": 33348, + "ĠJPM": 33349, + "ĠBAT": 33350, + "Ġdolphins": 33351, + "018": 33352, + "healthy": 33353, + "ĠCG": 33354, + "ĠEffective": 33355, + "worm": 33356, + "ĠEas": 33357, + "olicited": 33358, + "ĠUSE": 33359, + "ĠCaval": 33360, + "Ġswirl": 33361, + "Ġspaghetti": 33362, + "Ġinward": 33363, + "Republican": 33364, + "Ġpublicized": 33365, + "Ġeconomical": 33366, + "Ġsalsa": 33367, + "ĠTitanic": 33368, + "dot": 33369, + "Ġcontro": 33370, + "ĠBangl": 33371, + "iban": 33372, + "ĠKlux": 33373, + "Ġhinges": 33374, + "610": 33375, + "Ġvalves": 33376, + "profits": 33377, + "Wonder": 33378, + "Ġorient": 33379, + "Ġsque": 33380, + "Ġprivatization": 33381, + "Obama": 33382, + "Thousands": 33383, + "ĠTasman": 33384, + "Ġmaze": 33385, + "eem": 33386, + "Ġsurvives": 33387, + "istant": 33388, + "Ġenriched": 33389, + "Ġencl": 33390, + "Ġcompliments": 33391, + "ĠShoes": 33392, + "Ġinsanity": 33393, + "consider": 33394, + "agog": 33395, + "Ġbaffled": 33396, + "Ġ°": 33397, + "ĠWordPress": 33398, + "qus": 33399, + "usual": 33400, + "stall": 33401, + "Deb": 33402, + "ĠRothschild": 33403, + "Ġesche": 33404, + "Ġsoph": 33405, + "Ġambiguous": 33406, + "negative": 33407, + "Ġdiscouraging": 33408, + "Alexander": 33409, + "319": 33410, + "Ġsummon": 33411, + "ipation": 33412, + "000000": 33413, + "Ġminimalist": 33414, + "Ġenraged": 33415, + "777": 33416, + "Ġplanetary": 33417, + "Ġthroughput": 33418, + "Ġtemperament": 33419, + "ĠNIC": 33420, + "ileged": 33421, + "minster": 33422, + "ĠPLEASE": 33423, + "Ġexagger": 33424, + "ĠDescription": 33425, + "Ġagitated": 33426, + "Ġimmortal": 33427, + "Ġrenders": 33428, + "Ġcharisma": 33429, + "sequ": 33430, + "Ġmajorities": 33431, + "Ġfreaking": 33432, + "ĠAdvice": 33433, + "Ġembodies": 33434, + "stable": 33435, + "Ġcustomization": 33436, + "started": 33437, + "ĠAutism": 33438, + "Ġparticipates": 33439, + "ĠUTC": 33440, + "Marco": 33441, + "Ġoddly": 33442, + "Ġantiqu": 33443, + "ĠPear": 33444, + "ĠFey": 33445, + "Ġcertify": 33446, + "Ġdisillusion": 33447, + "ĠPhysicians": 33448, + "obl": 33449, + "855": 33450, + "Ġelim": 33451, + "Ġ335": 33452, + "Ol": 33453, + "ĠSear": 33454, + "Ġnuances": 33455, + "past": 33456, + "Sa": 33457, + "ĠSlov": 33458, + "Ġfiltered": 33459, + "Ġanalogy": 33460, + "Ġformulate": 33461, + "Ġarmies": 33462, + "Ġpuls": 33463, + "fters": 33464, + "ilipp": 33465, + "ĠHOT": 33466, + "485": 33467, + "ĠAfghans": 33468, + "Ġtopical": 33469, + "ĠBunny": 33470, + "seeing": 33471, + "Ġeloqu": 33472, + "Ġkidneys": 33473, + "ĠDEM": 33474, + "pent": 33475, + "Ġhus": 33476, + "stores": 33477, + "ĠProtestant": 33478, + "Comm": 33479, + "label": 33480, + "Kings": 33481, + "ĠPurpose": 33482, + "âĢ¦..": 33483, + "Ġaccumulating": 33484, + "calling": 33485, + "Ġgiveaways": 33486, + "Ġpredicament": 33487, + "Ġtyp": 33488, + "Ġtraveler": 33489, + "003": 33490, + "impro": 33491, + "fac": 33492, + "Ġmapped": 33493, + "itious": 33494, + "Ġmasculinity": 33495, + "Ġtantal": 33496, + "ĠDJs": 33497, + "Ġviewpoints": 33498, + "Burn": 33499, + "ĠWii": 33500, + "pak": 33501, + "ĠEB": 33502, + "Ġhinge": 33503, + "Ġfacets": 33504, + "Ġphotographic": 33505, + "Ġcompiling": 33506, + "Ġdecks": 33507, + "Ġarticulated": 33508, + "Federal": 33509, + "crim": 33510, + "llah": 33511, + "Ġfiasco": 33512, + "ĠLIST": 33513, + "oute": 33514, + "ĠDraper": 33515, + "ĠLaos": 33516, + "Ġclimbers": 33517, + "raph": 33518, + "ĠDek": 33519, + "WAY": 33520, + "Ġgreets": 33521, + "Ġoppressive": 33522, + "otor": 33523, + "otiation": 33524, + "\":[": 33525, + "Record": 33526, + "mining": 33527, + "Town": 33528, + "Ġfavorably": 33529, + "ĠYoutube": 33530, + "William": 33531, + "Ġlan": 33532, + "âĢ²": 33533, + "ĠSpec": 33534, + "Ġtranquil": 33535, + "ĠClient": 33536, + "oln": 33537, + "celona": 33538, + "Ġrealistically": 33539, + "Ġmisplaced": 33540, + "ĠBie": 33541, + "bye": 33542, + "Yo": 33543, + "465": 33544, + "ĠMadagascar": 33545, + "oplan": 33546, + "arist": 33547, + "Ġconfines": 33548, + "Ġï": 33549, + "awks": 33550, + "Ġpiracy": 33551, + "Ġunwelcome": 33552, + "Intel": 33553, + "Ġparanoid": 33554, + "CLAIM": 33555, + "Ġblush": 33556, + "united": 33557, + "Ġmotivational": 33558, + "ĠVII": 33559, + "Ġdiabetic": 33560, + "Ġantiv": 33561, + "Ġdissect": 33562, + "Ġbestselling": 33563, + "Ġfluffy": 33564, + "ĠRemote": 33565, + "Ġvert": 33566, + "Correct": 33567, + "Ġcolossal": 33568, + "Ġcontrasts": 33569, + "Ġcirca": 33570, + "ĠDamage": 33571, + "Ġunrel": 33572, + "Ġdiscrepancy": 33573, + "ĠCIS": 33574, + "ĠCLASS": 33575, + "ilty": 33576, + "Ġsynopsis": 33577, + "emed": 33578, + "cakes": 33579, + "ibal": 33580, + "inea": 33581, + "ienced": 33582, + "Ġimplicit": 33583, + "ĠLOOK": 33584, + "Ġsilhouette": 33585, + "affiliated": 33586, + "ĠHalo": 33587, + "377": 33588, + "Ġlyr": 33589, + "ĠVide": 33590, + "herent": 33591, + "Ġbadges": 33592, + "plays": 33593, + "orea": 33594, + "Ġjammed": 33595, + "cancer": 33596, + "ĠYep": 33597, + "racted": 33598, + "ĠDisability": 33599, + "Ġfooth": 33600, + "friends": 33601, + "Ġbloated": 33602, + "Bet": 33603, + "ĠAntioch": 33604, + "Ġintrodu": 33605, + "Ġannexed": 33606, + "ivism": 33607, + "ĠFlickr": 33608, + "pants": 33609, + "Ġinterruption": 33610, + "645": 33611, + "ĠIly": 33612, + "ĠOss": 33613, + "ĠAMA": 33614, + "Ġpolitely": 33615, + "Ġnatives": 33616, + "Ġrushes": 33617, + "enges": 33618, + "ĠHarm": 33619, + "Ġdestroyer": 33620, + "ĠEstimates": 33621, + "Ġtransforms": 33622, + "Ġinvariably": 33623, + "Ġcac": 33624, + "iency": 33625, + "599": 33626, + "Ġconstitutionally": 33627, + "Ġrappers": 33628, + "ĠSettlement": 33629, + "icz": 33630, + "Ġhardened": 33631, + "citizens": 33632, + "Ġcircling": 33633, + "Ġtrapping": 33634, + "Ġguaranteeing": 33635, + "690": 33636, + "agher": 33637, + "Ġarcade": 33638, + "Ġfanc": 33639, + "Ġslapping": 33640, + "OPS": 33641, + "Ġmasse": 33642, + "Ġpudding": 33643, + "Jac": 33644, + "ĠGraphics": 33645, + "Ġuptake": 33646, + "?,": 33647, + "Fair": 33648, + "ĠSatan": 33649, + "uffy": 33650, + "ĠGuatem": 33651, + "ĠTransaction": 33652, + "Ġunlocking": 33653, + "ĠLINE": 33654, + "Ġapprehens": 33655, + "Ġglean": 33656, + "291": 33657, + "Ġexacerbate": 33658, + "ĠTrave": 33659, + "ĠTrop": 33660, + "Supp": 33661, + "Ġqueens": 33662, + "cart": 33663, + "Ġscrolling": 33664, + "Ġox": 33665, + "cone": 33666, + "Matthew": 33667, + "ĠDIRECT": 33668, + "Ġbacker": 33669, + "Ġthyroid": 33670, + "Sarah": 33671, + "ĠEDIT": 33672, + "ĠActivision": 33673, + "352": 33674, + "Ġreinforcements": 33675, + "Ġding": 33676, + "Ġplush": 33677, + "Ġpeanuts": 33678, + "ĠFant": 33679, + "ĠPediatrics": 33680, + "Ġaccommodating": 33681, + "ĠPractices": 33682, + "Answer": 33683, + "racial": 33684, + "ĠConstant": 33685, + "740": 33686, + "strength": 33687, + "apist": 33688, + "Ġsynthes": 33689, + "ĠLeap": 33690, + "ĠFabric": 33691, + "Ġbrainstorm": 33692, + "obia": 33693, + "Ġconception": 33694, + "Ġtuberculosis": 33695, + "Ġmajestic": 33696, + "ĠTitus": 33697, + "ĠTee": 33698, + "Ġlikeness": 33699, + "ĠSEA": 33700, + "lite": 33701, + "Ġ950": 33702, + "sufficient": 33703, + "Ġtrem": 33704, + "Ġharshly": 33705, + "Ġredacted": 33706, + "Ġwelding": 33707, + "Ġperplex": 33708, + "Ġpoetic": 33709, + "Ġinsignificant": 33710, + "Ġware": 33711, + "Ġwandered": 33712, + "Ġmete": 33713, + "ĠSTART": 33714, + "Ġweaponry": 33715, + "opsy": 33716, + "shadow": 33717, + "Ġobsc": 33718, + "hare": 33719, + "ĠOPEN": 33720, + "Ġdiligent": 33721, + "Girls": 33722, + "Ġinitials": 33723, + "Start": 33724, + "ĠBrookings": 33725, + "ombs": 33726, + "Ġlashes": 33727, + "essor": 33728, + "Ġgravy": 33729, + "ĠUbuntu": 33730, + "Tree": 33731, + "Ġ435": 33732, + "Ġcellar": 33733, + "Ġaquarium": 33734, + "ĠPodesta": 33735, + "361": 33736, + "ĠController": 33737, + "Ġeru": 33738, + "reasonable": 33739, + "Ġpermissions": 33740, + "725": 33741, + "Ġadministering": 33742, + "Ġflirt": 33743, + "Ġfleeting": 33744, + "asive": 33745, + "Ġsubcontract": 33746, + "Ġfascist": 33747, + "Ġcabbage": 33748, + "science": 33749, + "Ġboiler": 33750, + "ioned": 33751, + "Ġintegrates": 33752, + "Ġresidue": 33753, + "KEY": 33754, + "Ġwi": 33755, + "Ġsquared": 33756, + "Unless": 33757, + "Ġmute": 33758, + "ĠTuc": 33759, + "Ġverb": 33760, + "Gary": 33761, + "Ġexperimentation": 33762, + "fee": 33763, + "chini": 33764, + "Ġmarrow": 33765, + "ĠBalt": 33766, + "Ġnodded": 33767, + "tn": 33768, + "Ġmissionary": 33769, + "OTO": 33770, + "Ġoptimum": 33771, + "555": 33772, + "Ġwhipping": 33773, + "aunts": 33774, + "ĠScene": 33775, + "Ġcharacterize": 33776, + "Ġretrospective": 33777, + "Ġutilizes": 33778, + "Ġhastily": 33779, + "older": 33780, + "ĠPW": 33781, + "Ġsleepy": 33782, + "020": 33783, + "ĠAcid": 33784, + "Ġridiculously": 33785, + "Ġgigg": 33786, + "649": 33787, + "Ġcrus": 33788, + "ĠShame": 33789, + "ĠTorn": 33790, + "finding": 33791, + "IPS": 33792, + "Ġplat": 33793, + "ometers": 33794, + "Ġamphib": 33795, + "ellow": 33796, + "ĠSpecies": 33797, + "commercial": 33798, + "Ġvirgin": 33799, + "Ġdarn": 33800, + "Ġsorely": 33801, + "Ġrespondent": 33802, + "Ġray": 33803, + "ĠCONS": 33804, + "Ġunequivocally": 33805, + "server": 33806, + "Ġdrip": 33807, + "ĠRazor": 33808, + "Ban": 33809, + "ĠHMS": 33810, + "Ġhijab": 33811, + "ĠMuss": 33812, + "Ġsandy": 33813, + "Ġaversion": 33814, + "Ġoverarching": 33815, + "Ġultr": 33816, + "ĠIraqis": 33817, + "Ġuninterrupted": 33818, + "Ġrouting": 33819, + "Ġundone": 33820, + "independence": 33821, + "gra": 33822, + "ysics": 33823, + "inflammatory": 33824, + "cussion": 33825, + "ĠDefinitely": 33826, + "Ġelastic": 33827, + "peer": 33828, + "ĠGiov": 33829, + "ĠMandarin": 33830, + "Ġscratches": 33831, + "Ġphysicist": 33832, + "Ġbestowed": 33833, + "usually": 33834, + "OULD": 33835, + "igration": 33836, + "Human": 33837, + "Dead": 33838, + "osph": 33839, + "bott": 33840, + "doctoral": 33841, + "Ġbending": 33842, + "Ġconfigurations": 33843, + "psych": 33844, + "db": 33845, + "ĠUD": 33846, + "Ġarteries": 33847, + "orically": 33848, + "Ġblasphemy": 33849, + "jj": 33850, + "checking": 33851, + "adian": 33852, + "IRD": 33853, + "ĠDialogue": 33854, + "Ġshielded": 33855, + "ĠVox": 33856, + "Dave": 33857, + "Ġturb": 33858, + "ĠMassive": 33859, + "ĠBMI": 33860, + "ĠNF": 33861, + "uced": 33862, + "ickle": 33863, + "ishable": 33864, + "Ġembody": 33865, + "ÙĪ": 33866, + "Senior": 33867, + "ĠResult": 33868, + "try": 33869, + "egu": 33870, + "401": 33871, + "ĠLoyal": 33872, + "Ġperilous": 33873, + "Ġdissu": 33874, + "Ġmythology": 33875, + "ĠWax": 33876, + "Jesus": 33877, + "ĠMotorsport": 33878, + "Ġadvis": 33879, + "ĠAki": 33880, + "ISM": 33881, + "tested": 33882, + "Ġplag": 33883, + "Ġriches": 33884, + "ĠOCT": 33885, + "ĠLocke": 33886, + "BG": 33887, + "Ġ460": 33888, + "rawl": 33889, + "ĠTermin": 33890, + "Ġ295": 33891, + "Ġchopping": 33892, + "KT": 33893, + "Ġconverts": 33894, + "Ask": 33895, + "alse": 33896, + "ĠKeynes": 33897, + "Ġrefuted": 33898, + "Ġrabbits": 33899, + "Ġbilingual": 33900, + "urse": 33901, + "ĠSalad": 33902, + "odiac": 33903, + "Ġsolidly": 33904, + "Dam": 33905, + "Ġpp": 33906, + "rities": 33907, + "Rah": 33908, + "itness": 33909, + "Ġsixty": 33910, + "332": 33911, + "cold": 33912, + "Ġhindered": 33913, + "Ġclipped": 33914, + "Ġreceptor": 33915, + "ĠHoms": 33916, + "Ġdusk": 33917, + "Ġarchae": 33918, + "LR": 33919, + "Ġrods": 33920, + "Ġ257": 33921, + "ĠSith": 33922, + "ĠPumpkin": 33923, + "ellation": 33924, + "ĠWD": 33925, + "Ġdecriminal": 33926, + "Ġusable": 33927, + "Ġcheerful": 33928, + "ĠInform": 33929, + "Ġbrushes": 33930, + "vier": 33931, + "ĠBrush": 33932, + "590": 33933, + "boost": 33934, + "guided": 33935, + "ĠMJ": 33936, + "Ġsatirical": 33937, + "ortion": 33938, + "efficiency": 33939, + "Ġstrands": 33940, + "ĠWilde": 33941, + "Ġreproduce": 33942, + "verage": 33943, + "Ġlug": 33944, + "Ġhist": 33945, + "offer": 33946, + "Ġcollapses": 33947, + "Ġclerks": 33948, + "Ġairstrike": 33949, + "IPP": 33950, + "iscover": 33951, + "Ġnefarious": 33952, + "Ġstripe": 33953, + "Ġbona": 33954, + "ocon": 33955, + "Ġpunishments": 33956, + "ITED": 33957, + "ĠAltern": 33958, + "testing": 33959, + "Ġeerie": 33960, + "erous": 33961, + "Ġcaves": 33962, + "Ġcondemns": 33963, + "ĠDropbox": 33964, + "inese": 33965, + "axis": 33966, + "ĠRegistry": 33967, + "ĠMong": 33968, + "Ġbullies": 33969, + "Ġdocks": 33970, + "ĠAlter": 33971, + "rella": 33972, + "446": 33973, + "ĠDare": 33974, + "Ġvirtues": 33975, + "Ġdont": 33976, + "Value": 33977, + "ENE": 33978, + "received": 33979, + "Ġseaf": 33980, + "476": 33981, + "ilon": 33982, + "ĠKits": 33983, + "Ġrarity": 33984, + "Ġnurt": 33985, + "skin": 33986, + "ĠUL": 33987, + "ĠRegiment": 33988, + "terior": 33989, + "hate": 33990, + "ĠEstimated": 33991, + "ĠSilence": 33992, + "Ġorganism": 33993, + "ĠSigned": 33994, + "ĠIA": 33995, + "bite": 33996, + "Ġthicker": 33997, + "Ġeyeb": 33998, + "Ġjournalistic": 33999, + "ĠDisp": 34000, + "margin": 34001, + "Dri": 34002, + "Ġcomplexes": 34003, + "Ġimaginary": 34004, + "Ġrefuel": 34005, + "Ġmeticulous": 34006, + "Dub": 34007, + "Ġhaze": 34008, + "860": 34009, + "Ġproverbial": 34010, + "Ġozone": 34011, + "cale": 34012, + "resent": 34013, + "Ġdiscrete": 34014, + "boats": 34015, + "Ġ343": 34016, + "ĠRET": 34017, + "Ġsailor": 34018, + "hair": 34019, + "gear": 34020, + "Ġmalt": 34021, + "Ġpeach": 34022, + "ĠRabb": 34023, + "699": 34024, + "318": 34025, + "ĠVerge": 34026, + "Fin": 34027, + "ĠMighty": 34028, + "ierce": 34029, + "403": 34030, + "Ġdisenfranch": 34031, + "bass": 34032, + "nice": 34033, + "Ġsinks": 34034, + "ĠLaugh": 34035, + "367": 34036, + "ĠZur": 34037, + "Ġtravers": 34038, + "ĠMystery": 34039, + "onsense": 34040, + "ĠMonarch": 34041, + "Ġleapt": 34042, + "ergy": 34043, + "porate": 34044, + "display": 34045, + "ilet": 34046, + "Ġendemic": 34047, + "Bern": 34048, + "Ġpulmonary": 34049, + "Ġbroch": 34050, + "ĠManziel": 34051, + "Lyn": 34052, + "Repe": 34053, + "lda": 34054, + "hands": 34055, + "Ġtroublesome": 34056, + "Jordan": 34057, + "UTION": 34058, + "ĠALP": 34059, + "ĠLEG": 34060, + "Ġreconnaissance": 34061, + "ĠRNA": 34062, + "letters": 34063, + "ĠYounger": 34064, + "ĠLW": 34065, + "ĠSensor": 34066, + "388": 34067, + "Ġwielding": 34068, + "spr": 34069, + "Ġancestral": 34070, + "331": 34071, + "OTH": 34072, + "ĠAxis": 34073, + "irement": 34074, + "ĠCompact": 34075, + "voice": 34076, + "Ġpercussion": 34077, + "Ġendeav": 34078, + "Kate": 34079, + "ĠJACK": 34080, + "ĠMagnus": 34081, + "Ġinterconnected": 34082, + "ĠTraff": 34083, + "demon": 34084, + "Ġardent": 34085, + "ĠSomers": 34086, + "andum": 34087, + "346": 34088, + "heartedly": 34089, + "ayne": 34090, + "Design": 34091, + "melon": 34092, + "ĠCarib": 34093, + "Ġ1935": 34094, + "intention": 34095, + "cape": 34096, + "cend": 34097, + "organic": 34098, + "373": 34099, + "ĠRevival": 34100, + "ĠBLACK": 34101, + "Ġaspiration": 34102, + "yellow": 34103, + "bodied": 34104, + "Ġcrave": 34105, + "ĠIntelligent": 34106, + "ĠUnique": 34107, + "tab": 34108, + "386": 34109, + "ĠNess": 34110, + "Official": 34111, + "Stay": 34112, + "Ġcreat": 34113, + "iliary": 34114, + "rified": 34115, + "ĠPok": 34116, + "Ġabolition": 34117, + "Ka": 34118, + "ĠCourage": 34119, + "ĠDickens": 34120, + "rophic": 34121, + "ĠFAR": 34122, + "Ġfurnished": 34123, + ".âĢĵ": 34124, + "rete": 34125, + "Ġvaginal": 34126, + "hner": 34127, + "ĠLONG": 34128, + "imates": 34129, + "ĠLiter": 34130, + "ĠMeasures": 34131, + "ĠBelg": 34132, + "\"-": 34133, + "ĠRaider": 34134, + "enario": 34135, + "rification": 34136, + "ĠFISA": 34137, + "ĠStab": 34138, + "Ġnar": 34139, + "mund": 34140, + "Tenn": 34141, + "Ġwakes": 34142, + "Ġcharg": 34143, + "okers": 34144, + "assment": 34145, + "Ġsiph": 34146, + "Ġludicrous": 34147, + "670": 34148, + "Ġcompositions": 34149, + "Ġpinnacle": 34150, + "ĠRankings": 34151, + "ĠTelescope": 34152, + "secure": 34153, + "Ġib": 34154, + "Ġaptly": 34155, + "paste": 34156, + "ĠJUST": 34157, + "RD": 34158, + "herry": 34159, + "sung": 34160, + "Ġmig": 34161, + "naires": 34162, + "Ġmigrated": 34163, + "Base": 34164, + "Ġamazingly": 34165, + "Ġunregulated": 34166, + "published": 34167, + "ĠPIT": 34168, + "ĠMissile": 34169, + "extreme": 34170, + "ĠAlone": 34171, + "skilled": 34172, + "ĠRamp": 34173, + "Ġcamer": 34174, + "Ġflyer": 34175, + "Ġbrewers": 34176, + "ĠReference": 34177, + "ĠMOV": 34178, + "ĠLep": 34179, + "Ġentitle": 34180, + "ivals": 34181, + "ĠPIN": 34182, + "Ġbatches": 34183, + "Ġunexplained": 34184, + "Ġenergies": 34185, + "Ġblurred": 34186, + "enged": 34187, + "orig": 34188, + "WF": 34189, + "olves": 34190, + "ĠPicks": 34191, + "ĠTwice": 34192, + "arranted": 34193, + "Ġmembrane": 34194, + "ĠMoonlight": 34195, + "Ġsulfur": 34196, + "Ġpurposely": 34197, + "Ġfumes": 34198, + "Ġ(#": 34199, + "onics": 34200, + "ivities": 34201, + "rollers": 34202, + "Ġflattering": 34203, + "felt": 34204, + "Ġintoxication": 34205, + "Bridge": 34206, + "ĠFallout": 34207, + "Ġcreatively": 34208, + "Ġpsychologically": 34209, + "Ġdespicable": 34210, + "gae": 34211, + "820": 34212, + "VERS": 34213, + "Ġtidal": 34214, + "Ġcarbohydrates": 34215, + "strip": 34216, + "Ġgravitational": 34217, + "Ġfeds": 34218, + "ĠZhao": 34219, + "legates": 34220, + "Ġ307": 34221, + "String": 34222, + "ĠRepair": 34223, + "Ġ1928": 34224, + "orses": 34225, + "atography": 34226, + "Boston": 34227, + "Ġasymm": 34228, + "ĠSomebody": 34229, + "Van": 34230, + "ĠSovereign": 34231, + "Ġnotoriety": 34232, + "Ġsimulate": 34233, + "ĠDiscussion": 34234, + "ĠTransition": 34235, + "Ġcopying": 34236, + "antage": 34237, + "ĠRodrig": 34238, + "Ġindifference": 34239, + "Ġ580": 34240, + "Ġastronomical": 34241, + "Ġscrews": 34242, + "840": 34243, + "inates": 34244, + "ĠStreaming": 34245, + "Ġentit": 34246, + "ĠLiterature": 34247, + "369": 34248, + "805": 34249, + "OTS": 34250, + "о": 34251, + "img": 34252, + "inness": 34253, + "Ġreverber": 34254, + "Ġpartition": 34255, + "Short": 34256, + "Ġmoist": 34257, + "Ġspoof": 34258, + "ĠDesire": 34259, + "orce": 34260, + "Ġcrammed": 34261, + "Ġunfor": 34262, + "Pan": 34263, + "ingen": 34264, + "Ġrelat": 34265, + "Mother": 34266, + "ĠGn": 34267, + "altern": 34268, + "Ġresurg": 34269, + "Ġcramped": 34270, + "ĠCitadel": 34271, + "Ġlaureate": 34272, + "Ġanalys": 34273, + "Ġnuns": 34274, + "ĠTie": 34275, + "activ": 34276, + "ĠSurprisingly": 34277, + "ĠProtective": 34278, + "ĠRedemption": 34279, + "Ġendlessly": 34280, + "Ġfists": 34281, + "spl": 34282, + "ĠKron": 34283, + "ĠExamples": 34284, + "Especially": 34285, + "Ġprejud": 34286, + "ĠSchwar": 34287, + "Ġ237": 34288, + "ĠPlants": 34289, + "ĠUNDER": 34290, + "Ġlasers": 34291, + "Ġsher": 34292, + "Ġgoddess": 34293, + "Ġwipes": 34294, + "409": 34295, + "ĠGTA": 34296, + "Ġhybrids": 34297, + "rowd": 34298, + "ĠMILL": 34299, + "ĠNUM": 34300, + "ĠGeek": 34301, + "ĠTWO": 34302, + "ĠTimbers": 34303, + "Ġresembled": 34304, + "ĠGRE": 34305, + "Bring": 34306, + "Ġcompressed": 34307, + "ĠOral": 34308, + "379": 34309, + "Ġwrench": 34310, + "LCS": 34311, + "Ġhomosexual": 34312, + "Kelly": 34313, + "Ġhump": 34314, + "ĠSicily": 34315, + "Ġperished": 34316, + "aos": 34317, + "doesn": 34318, + "scrib": 34319, + "Charlie": 34320, + "Ġshuffle": 34321, + "372": 34322, + "cedented": 34323, + "402": 34324, + "Ġtiers": 34325, + "Ġinteracted": 34326, + "ĠHG": 34327, + "ĠJere": 34328, + "ĠBRA": 34329, + "ĠDOC": 34330, + "things": 34331, + "Ġfaiths": 34332, + "Ġgirlfriends": 34333, + "Ġfortified": 34334, + "develop": 34335, + "ĠKus": 34336, + "iability": 34337, + "rase": 34338, + "iotics": 34339, + "ĠChern": 34340, + "boxes": 34341, + "abol": 34342, + "idan": 34343, + "emon": 34344, + "ĠJudaism": 34345, + "ĠSituation": 34346, + "ĠGrimm": 34347, + "Ġgou": 34348, + "ĠVictim": 34349, + "backer": 34350, + "Ġanimosity": 34351, + "ĠHorizons": 34352, + "ĠKazakh": 34353, + "Ġgrossly": 34354, + "ĠTac": 34355, + "yg": 34356, + "366": 34357, + "Ġcheaply": 34358, + "Ġformulated": 34359, + "ĠDangerous": 34360, + "offensive": 34361, + "Ġsauces": 34362, + "Ġkeyboards": 34363, + "666": 34364, + "Ġcanopy": 34365, + "Inc": 34366, + "astered": 34367, + "iesel": 34368, + "Ġadv": 34369, + "currency": 34370, + "Ġscapego": 34371, + "plings": 34372, + "ĠBDS": 34373, + "Ġstrangely": 34374, + "today": 34375, + "ĠEgyptians": 34376, + "Ġcoron": 34377, + "often": 34378, + "ĠTransformers": 34379, + "ĠAfterwards": 34380, + "reated": 34381, + "Ġpoisonous": 34382, + "Ġgeographically": 34383, + "Ġmell": 34384, + "Cross": 34385, + "Ġdeductible": 34386, + "ĠZionist": 34387, + "Ġcutter": 34388, + "ĠRP": 34389, + "ĠImag": 34390, + "Ġoverflow": 34391, + "358": 34392, + "ĠADD": 34393, + "bones": 34394, + "Ġflattened": 34395, + "ĠGREEN": 34396, + "Ġlaure": 34397, + "haps": 34398, + "ĠCellular": 34399, + "kens": 34400, + "363": 34401, + "ĠSmash": 34402, + "ĠSpeak": 34403, + "ĠMaiden": 34404, + "Ġgreedy": 34405, + "ĠManit": 34406, + "Ġfacet": 34407, + "ĠGPA": 34408, + "Ġracks": 34409, + "popular": 34410, + "322": 34411, + "ĠBars": 34412, + "avement": 34413, + "359": 34414, + "Ġpomp": 34415, + "Ġregisters": 34416, + "Fs": 34417, + "ĠLoving": 34418, + "ĠTaxi": 34419, + "concert": 34420, + "ĠArchae": 34421, + "Ġcurls": 34422, + "ĠSpit": 34423, + "ĠLIFE": 34424, + "Ġinvade": 34425, + "rolog": 34426, + "wreck": 34427, + "Ġconflicted": 34428, + "Ġ970": 34429, + "Ġexiled": 34430, + "Ġchew": 34431, + "udging": 34432, + "Ġexper": 34433, + "ĠFt": 34434, + "rius": 34435, + "ĠXer": 34436, + "~": 34437, + "Ġbandwagon": 34438, + "Fore": 34439, + "Cat": 34440, + "Ġoverflowing": 34441, + "Ġradios": 34442, + "Much": 34443, + "Ġfacilitates": 34444, + "ĠCaf": 34445, + "ĠQing": 34446, + "Use": 34447, + "Ġmang": 34448, + "Ġpissed": 34449, + "ĠOuter": 34450, + "within": 34451, + "ĠSchr": 34452, + "ĠSherlock": 34453, + "Ġ336": 34454, + "Ġcasc": 34455, + "chens": 34456, + "incent": 34457, + "Ġcultivating": 34458, + "ampions": 34459, + "Ġwasteful": 34460, + "adays": 34461, + "sets": 34462, + "ĠLF": 34463, + "watching": 34464, + "Ġabandonment": 34465, + "ĠJesuit": 34466, + "Ġlegislatures": 34467, + "regnancy": 34468, + "ĠColt": 34469, + "Ġinterns": 34470, + "Ġundertook": 34471, + "ĠIPA": 34472, + "ĠInstall": 34473, + "nsics": 34474, + "washer": 34475, + "Ġbeginners": 34476, + "ĠDiseases": 34477, + "Ġlimp": 34478, + "ĠESA": 34479, + "Basically": 34480, + "Ġprud": 34481, + "LED": 34482, + "Ġgrease": 34483, + "ousel": 34484, + "Ġrotten": 34485, + "ĠCele": 34486, + "facts": 34487, + "ĠLouie": 34488, + "ĠISI": 34489, + "481": 34490, + "Ġsett": 34491, + "Ġtoug": 34492, + "ĠReck": 34493, + "OUNT": 34494, + "ĠFou": 34495, + "Ġinhibitor": 34496, + "gru": 34497, + "bane": 34498, + "1980": 34499, + "ĠPanc": 34500, + "Ġsuperficial": 34501, + "Ġauthoritative": 34502, + "ĠVOL": 34503, + "790": 34504, + "Ġcrusade": 34505, + "airy": 34506, + "Ġemphatically": 34507, + "Ġflourishing": 34508, + "Ġ416": 34509, + "Ġheroine": 34510, + "inx": 34511, + "Ġanch": 34512, + "stretched": 34513, + "ĠRegener": 34514, + "ĠAncient": 34515, + "evaluate": 34516, + "Ġantibody": 34517, + "ĠEston": 34518, + "ĠAeg": 34519, + "Ġboldly": 34520, + "TN": 34521, + "ĠPercentage": 34522, + "Ġ747": 34523, + "Ġrapt": 34524, + "ĠEdited": 34525, + "Earth": 34526, + "phal": 34527, + "ĠXXX": 34528, + "arling": 34529, + "ĠReligion": 34530, + "Ġ503": 34531, + "forces": 34532, + "Ġendpoint": 34533, + "Miller": 34534, + "Ba": 34535, + "Ġdisappears": 34536, + "andre": 34537, + "Ġconnector": 34538, + "407": 34539, + "ĠTOUR": 34540, + "aura": 34541, + "ĠRazer": 34542, + "UPDATE": 34543, + "Ġcalib": 34544, + "original": 34545, + "ĠMonkey": 34546, + "Ir": 34547, + "Ġexacerb": 34548, + "killing": 34549, + "Ġforb": 34550, + "native": 34551, + "Ġpoking": 34552, + "Ġveiled": 34553, + "mails": 34554, + "Ġalphabet": 34555, + "Ġawkwardly": 34556, + "ĠNames": 34557, + "Ġspiders": 34558, + "ĠParam": 34559, + "ĠColour": 34560, + "Ġunification": 34561, + "ĠPione": 34562, + "Ġoffend": 34563, + "Ġscoff": 34564, + "ĠSAR": 34565, + "ĠBuildings": 34566, + "edes": 34567, + "ĠAke": 34568, + "Ġfirmware": 34569, + "Madison": 34570, + "policy": 34571, + "ĠComputing": 34572, + "ĠRW": 34573, + "Ġfluent": 34574, + "Ġdece": 34575, + "Ġswore": 34576, + "Ġrestaur": 34577, + "Ġpresses": 34578, + "ophon": 34579, + "Ġphilosopher": 34580, + "ften": 34581, + "Ġintruder": 34582, + "Ġleng": 34583, + "ĠCowboy": 34584, + "cled": 34585, + "Ġmeticulously": 34586, + "ĠPair": 34587, + "ĠEND": 34588, + "Ġcapsules": 34589, + "Ġauxiliary": 34590, + "Ġverses": 34591, + "Ġsheltered": 34592, + "Ġexplorer": 34593, + "ĠWolverine": 34594, + "auts": 34595, + "Ġinhibitors": 34596, + "ĠPeng": 34597, + "ĠValve": 34598, + "imar": 34599, + "Ġchuck": 34600, + "ĠRecording": 34601, + "Ġardu": 34602, + "Test": 34603, + "Ġinterven": 34604, + "Ġchrome": 34605, + "months": 34606, + "tap": 34607, + "ĠManz": 34608, + "format": 34609, + "ĠBalkans": 34610, + "Ġannex": 34611, + "uder": 34612, + "ĠAAC": 34613, + "Ġdisturbances": 34614, + "354": 34615, + "asms": 34616, + "ĠTad": 34617, + "puting": 34618, + "Ġfateful": 34619, + "imen": 34620, + "Ġaudi": 34621, + "ĠNewsweek": 34622, + "Around": 34623, + "Ġretribution": 34624, + "Ġsugars": 34625, + "Ġescapes": 34626, + "Ġlegitim": 34627, + "ĠProof": 34628, + "Ġmisogyn": 34629, + "cit": 34630, + "Ġclutching": 34631, + "exist": 34632, + "Ġrevol": 34633, + "Ġdiscs": 34634, + "discrimination": 34635, + "Ġstout": 34636, + "aline": 34637, + "ĠRandom": 34638, + "364": 34639, + "Ġapprehension": 34640, + "Ġmockery": 34641, + "Ġfossils": 34642, + "ĠStress": 34643, + "Ġbenefic": 34644, + "exc": 34645, + "lude": 34646, + "Small": 34647, + "Ġgh": 34648, + "Ġobserves": 34649, + "ĠSUP": 34650, + "Ġbrewer": 34651, + "ĠESP": 34652, + "Ġomitted": 34653, + "multiple": 34654, + "Ġminimizing": 34655, + "Ġtaco": 34656, + "Ġindifferent": 34657, + "medi": 34658, + "available": 34659, + "Ġ252": 34660, + "Ġsanity": 34661, + "ĠCookie": 34662, + "mostly": 34663, + "near": 34664, + "NASA": 34665, + "Ġlowly": 34666, + "seless": 34667, + "Ġobsess": 34668, + "itous": 34669, + "Dispatch": 34670, + "Ġcanyon": 34671, + "Ġbriefs": 34672, + "Say": 34673, + "ĠNato": 34674, + "ĠSpend": 34675, + "Ġ242": 34676, + "ĠEthernet": 34677, + "Ġmatte": 34678, + "ĠStim": 34679, + "hetics": 34680, + "Ġflourished": 34681, + "389": 34682, + "ĠMcA": 34683, + "695": 34684, + "Ġoverr": 34685, + "Ġtorment": 34686, + "Ġpirate": 34687, + "ĠJohann": 34688, + "roversial": 34689, + "ĠUnemployment": 34690, + "breakers": 34691, + "ĠMessages": 34692, + "tones": 34693, + "Ġtagging": 34694, + "Ġfrog": 34695, + "Jewish": 34696, + "Ġmessenger": 34697, + "Ġexasper": 34698, + "ernaut": 34699, + "Ġnarrower": 34700, + "ĠCatalyst": 34701, + "ĠSecrets": 34702, + "Ġadj": 34703, + "ĠFug": 34704, + "Ġaura": 34705, + "Ġtherape": 34706, + "mber": 34707, + "Ġcaliphate": 34708, + "Ġretreating": 34709, + "ĠComput": 34710, + "Ġburying": 34711, + "Ġail": 34712, + "Ġgriev": 34713, + "lins": 34714, + "825": 34715, + "tten": 34716, + "ifully": 34717, + "ĠTrials": 34718, + "igma": 34719, + "Ġ1914": 34720, + "Ġcoordinates": 34721, + "ocusing": 34722, + "ĠFeng": 34723, + "ĠWhale": 34724, + "Ġshorten": 34725, + "Ġcorrectness": 34726, + "evil": 34727, + "network": 34728, + "Ġreactive": 34729, + "assuming": 34730, + "ĠLaksh": 34731, + "games": 34732, + "Ġruining": 34733, + "excluding": 34734, + "annels": 34735, + "º": 34736, + "Ġrubbed": 34737, + "aleb": 34738, + "flex": 34739, + "iped": 34740, + "ĠLimit": 34741, + "allowed": 34742, + "ĠDMV": 34743, + "ĠLD": 34744, + "Ġstamina": 34745, + "conduct": 34746, + "Ġmislead": 34747, + "lib": 34748, + "ĠEminem": 34749, + "Ġpayoff": 34750, + "Ġkernel": 34751, + "Ġsweeps": 34752, + "Ġsonic": 34753, + "ĠKodi": 34754, + "unique": 34755, + "Ġsurrog": 34756, + "Michigan": 34757, + "Ġattest": 34758, + "Ġdummy": 34759, + "ĠStellar": 34760, + "ĠSquadron": 34761, + "ĠHait": 34762, + "ĠSpirits": 34763, + "605": 34764, + "ĠHemisphere": 34765, + "legram": 34766, + "ĠRack": 34767, + "opol": 34768, + "Ġfreshwater": 34769, + "cession": 34770, + "Ġabort": 34771, + "ĠLOG": 34772, + "Ġfuzzy": 34773, + "Ġcrystall": 34774, + "illation": 34775, + "ĠFreddy": 34776, + "Ġsalvation": 34777, + "Ġjuxtap": 34778, + "weekly": 34779, + "usha": 34780, + "456": 34781, + "Ġ660": 34782, + "ĠGlacier": 34783, + "Ġnegatives": 34784, + "Ġillegitimate": 34785, + "ĠProtein": 34786, + "Moore": 34787, + "Der": 34788, + "Ġinfancy": 34789, + "Again": 34790, + "ALD": 34791, + "Leon": 34792, + "ĠIdeally": 34793, + "fresh": 34794, + "730": 34795, + "Ġgamb": 34796, + "Ġscrewed": 34797, + "wow": 34798, + "Ġembodied": 34799, + "ĠCinderella": 34800, + "341": 34801, + "ĠPiano": 34802, + "Ġbroccoli": 34803, + "Ġmats": 34804, + "ĠZheng": 34805, + "cream": 34806, + "anut": 34807, + "ĠZig": 34808, + "Columb": 34809, + "ĠTibetan": 34810, + "Death": 34811, + "Ġstren": 34812, + "ĠVertical": 34813, + "Ġratification": 34814, + "Ġprincipally": 34815, + "ELD": 34816, + "Ġforbid": 34817, + "Ġamalg": 34818, + "blind": 34819, + "auri": 34820, + "stery": 34821, + "Ġbarley": 34822, + "FBI": 34823, + "ĠHex": 34824, + "925": 34825, + "Domin": 34826, + "oat": 34827, + "Ġswayed": 34828, + "ĠKKK": 34829, + "ĠTaxes": 34830, + "Ġker": 34831, + "eeper": 34832, + "ĠAwakens": 34833, + "ĠPix": 34834, + "ĠKING": 34835, + "dc": 34836, + "Ren": 34837, + "Ġlegitimately": 34838, + "ĠTriumph": 34839, + "ĠSites": 34840, + "ĠSai": 34841, + "tl": 34842, + "painted": 34843, + "ĠWaiting": 34844, + "starting": 34845, + "parents": 34846, + "ĠDuo": 34847, + "eele": 34848, + "upper": 34849, + "ĠInvestig": 34850, + "Ġeighteen": 34851, + "Ġcorrelated": 34852, + "ĠCascade": 34853, + "acca": 34854, + "ĠAlph": 34855, + "ĠPolic": 34856, + "ĠEVs": 34857, + "Ġworthless": 34858, + "ĠIndust": 34859, + "auld": 34860, + "ĠYiannopoulos": 34861, + "ĠEzra": 34862, + "Ġmorphed": 34863, + "Ġoriginating": 34864, + "mania": 34865, + "Ġsparing": 34866, + "Ġextrem": 34867, + "cre": 34868, + "ults": 34869, + "mare": 34870, + "classified": 34871, + "Ġparachute": 34872, + "Ġmistrust": 34873, + "ONT": 34874, + "Mind": 34875, + "Ġthru": 34876, + "707": 34877, + "ĠTwain": 34878, + "Ġmelodies": 34879, + "ĠDanger": 34880, + "ĠDPS": 34881, + "Ġderive": 34882, + "Ġdissolution": 34883, + "Ġchildbirth": 34884, + "Ġ415": 34885, + "fork": 34886, + "solid": 34887, + "loads": 34888, + "ĠCGI": 34889, + "378": 34890, + "ĠShed": 34891, + "Face": 34892, + "Ġcomet": 34893, + "iceps": 34894, + "ĠReduction": 34895, + "Fly": 34896, + "jp": 34897, + "ĠAnimation": 34898, + "Luke": 34899, + "Ġabiding": 34900, + "Ġdevise": 34901, + "ĠAe": 34902, + "Ġflux": 34903, + "Ġbras": 34904, + "Ġfracturing": 34905, + "Ġinventive": 34906, + "ĠGranger": 34907, + "Ġsap": 34908, + "inducing": 34909, + "Ġreviewers": 34910, + "Officers": 34911, + "ĠWHY": 34912, + "Ġamplify": 34913, + "Ġentr": 34914, + "Ġslit": 34915, + "457": 34916, + "Ġreformed": 34917, + "ĠPhi": 34918, + "Ġtempt": 34919, + "Ġcontradiction": 34920, + "585": 34921, + "ĠMaced": 34922, + "371": 34923, + "kinson": 34924, + "robe": 34925, + "ĠHunters": 34926, + "astern": 34927, + "criminal": 34928, + "jew": 34929, + "Ġdecentralized": 34930, + "bands": 34931, + "Ġavatar": 34932, + "ĠBarrier": 34933, + "Ġcharacterization": 34934, + "student": 34935, + "Ġgays": 34936, + "Ġspecialize": 34937, + "ĠJudging": 34938, + "Ġinitiation": 34939, + "Ġshove": 34940, + "Ġpirates": 34941, + "Ġfictitious": 34942, + "ĠPoker": 34943, + "ĠElsa": 34944, + "ĠTECH": 34945, + "handedly": 34946, + "Ġglued": 34947, + "Ġclinically": 34948, + "Ġinaccessible": 34949, + "Ġderegulation": 34950, + "Ġprohib": 34951, + "Ġdangling": 34952, + "Ġnoses": 34953, + "Ġstash": 34954, + "اØ": 34955, + "ESH": 34956, + "Ġmonstrous": 34957, + "Ġcrept": 34958, + "ĠCharm": 34959, + "Ġbeh": 34960, + "Ġshuts": 34961, + "Ġ236": 34962, + "imedia": 34963, + "445": 34964, + "Du": 34965, + "Ġafar": 34966, + "ĠRout": 34967, + "Ġflares": 34968, + "Utah": 34969, + "Ġ808": 34970, + "Ġjewels": 34971, + "2004": 34972, + "Ġrecal": 34973, + "Gas": 34974, + "ĠExcellent": 34975, + "Ġpitfalls": 34976, + "ĠDrawing": 34977, + "viously": 34978, + "angered": 34979, + "changes": 34980, + "Ġpasture": 34981, + "talking": 34982, + "Ġinequ": 34983, + "Ġbicycl": 34984, + "Cost": 34985, + "423": 34986, + "bard": 34987, + "Ġanterior": 34988, + "ecast": 34989, + "CHR": 34990, + "397": 34991, + "masters": 34992, + "706": 34993, + "ĠFinish": 34994, + "Yet": 34995, + "study": 34996, + "ĠCogn": 34997, + "Ġloaf": 34998, + "Ġspatial": 34999, + "ĠParad": 35000, + "batch": 35001, + "Ġvents": 35002, + "Ġspins": 35003, + "ĠAddiction": 35004, + "Ġcondone": 35005, + "Ġproble": 35006, + "English": 35007, + "ĠRomans": 35008, + "ĠSaying": 35009, + "ĠKling": 35010, + "Universal": 35011, + "ivist": 35012, + "Ġskirm": 35013, + "Ġ2500": 35014, + "Ġ263": 35015, + "aired": 35016, + "ĠMartian": 35017, + "ĠCompensation": 35018, + "lation": 35019, + "ĠSalam": 35020, + "LGBT": 35021, + "ĠDart": 35022, + "strike": 35023, + "vasive": 35024, + "ILLE": 35025, + "Ġimaginative": 35026, + "ĠEuph": 35027, + "Financial": 35028, + "Ġholog": 35029, + "orah": 35030, + "crit": 35031, + "ĠOswald": 35032, + "512": 35033, + "ĠUri": 35034, + "Ġdiscrepancies": 35035, + "Ġbeads": 35036, + "ĠShots": 35037, + "Mem": 35038, + "Ġhunts": 35039, + "Ġsubtly": 35040, + "Ġ470": 35041, + "ĠVigil": 35042, + "Ġsew": 35043, + "ĠBurma": 35044, + "igm": 35045, + "ighed": 35046, + "swe": 35047, + "Ġ251": 35048, + "Ġdeceit": 35049, + "Ġphysi": 35050, + "iflower": 35051, + "ĠCert": 35052, + "Ġchewing": 35053, + "rax": 35054, + "ĠMER": 35055, + "icient": 35056, + "Les": 35057, + "Ġ390": 35058, + "Ġperjury": 35059, + "Ġfiltering": 35060, + "770": 35061, + "Ġpoppy": 35062, + "Ġbland": 35063, + "ĠNasa": 35064, + "Ġorbiting": 35065, + "ĠRipple": 35066, + "otal": 35067, + "ĠRyu": 35068, + "ĠShap": 35069, + "ĠJian": 35070, + "Ġpiv": 35071, + "ĠNeptune": 35072, + "rary": 35073, + "Ġunavoidable": 35074, + "Ġguideline": 35075, + "Ġwaterfall": 35076, + "inators": 35077, + "ĠLogic": 35078, + "ĠPlug": 35079, + "role": 35080, + "Ġalterations": 35081, + "ĠSett": 35082, + "ĠFeld": 35083, + "Ġfreezes": 35084, + "Ġbedrock": 35085, + "ĠVIEW": 35086, + "ovation": 35087, + "Ġneedless": 35088, + "ĠIU": 35089, + "ignant": 35090, + "ĠConfeder": 35091, + "316": 35092, + "fine": 35093, + "Ġjars": 35094, + "gotten": 35095, + "Bron": 35096, + "Ġmindfulness": 35097, + "imating": 35098, + "Ġhysteria": 35099, + "Ġhurried": 35100, + "Ġinfantry": 35101, + "ĠNYU": 35102, + "tags": 35103, + "Penn": 35104, + "Ġtracing": 35105, + "ĠSwing": 35106, + "ĠIo": 35107, + "Ġreckoned": 35108, + "ĠRecall": 35109, + "ĠVersion": 35110, + "314": 35111, + "Ġecology": 35112, + "Ġarmoured": 35113, + "Ġresonance": 35114, + "970": 35115, + "Ġvigilance": 35116, + "Ġrede": 35117, + "ĠBohem": 35118, + "Ġchau": 35119, + "ĠDevi": 35120, + "Ġtru": 35121, + "))": 35122, + "Put": 35123, + "Ġflavored": 35124, + "ĠClown": 35125, + "Senate": 35126, + "ĠScandinavian": 35127, + "mable": 35128, + "Residents": 35129, + "ĠFranchise": 35130, + "Ġprecincts": 35131, + "Prem": 35132, + "ĠNeutral": 35133, + "coal": 35134, + "Ġdelinqu": 35135, + "Mus": 35136, + "UME": 35137, + "Ġtedious": 35138, + "roots": 35139, + "ĠCondition": 35140, + "ĠIntercept": 35141, + "017": 35142, + "itives": 35143, + "Ġdefinitively": 35144, + "Ġobliter": 35145, + "Ġclandestine": 35146, + "Ġstagnation": 35147, + "Ġblindness": 35148, + "abiding": 35149, + "Ġremix": 35150, + "feeding": 35151, + "Ġunrecogn": 35152, + "2003": 35153, + "960": 35154, + "381": 35155, + "Ġbulky": 35156, + "xia": 35157, + "ivered": 35158, + "inic": 35159, + "ĠSoci": 35160, + "ĠYards": 35161, + "Ġhides": 35162, + "Film": 35163, + "Ġtestim": 35164, + "Ġblacklist": 35165, + "Deep": 35166, + "Standard": 35167, + "ĠClash": 35168, + "Ġriddled": 35169, + "Ġdiseng": 35170, + "ĠTRE": 35171, + "ĠIDs": 35172, + "Ġmigrating": 35173, + "protect": 35174, + "Ġgraded": 35175, + "Ġvaguely": 35176, + "ĠCharacter": 35177, + "382": 35178, + "ĠMOD": 35179, + "Eng": 35180, + "Ġmobilized": 35181, + "Ġsincerity": 35182, + "Ġ317": 35183, + "sighted": 35184, + "ownt": 35185, + "ĠâĢİ": 35186, + "umpy": 35187, + "Ġitching": 35188, + "ĠVerd": 35189, + "cook": 35190, + "Ġsimulator": 35191, + "players": 35192, + "Early": 35193, + "infeld": 35194, + "Ġmaximizing": 35195, + "Philipp": 35196, + "ĠPhotoshop": 35197, + "Ġdestroys": 35198, + "Ġbefriend": 35199, + "Ġfilthy": 35200, + "ĠIncident": 35201, + "gha": 35202, + "Ġcomplicity": 35203, + "Ġmessing": 35204, + "YA": 35205, + "ĠNegro": 35206, + "adows": 35207, + "374": 35208, + "Ġpip": 35209, + "cean": 35210, + "Ġ1924": 35211, + "Sent": 35212, + "represent": 35213, + "Ġdeems": 35214, + "ĠRue": 35215, + "Ġtitanium": 35216, + "Ġmanners": 35217, + "âĢ¦âĢ¦": 35218, + "bare": 35219, + "Ġusur": 35220, + "mma": 35221, + "ĠPanda": 35222, + "ulus": 35223, + "ĠSlav": 35224, + "324": 35225, + "ĠMole": 35226, + "^": 35227, + "micro": 35228, + "foreign": 35229, + "lest": 35230, + "ocular": 35231, + "ĠUniv": 35232, + "ĠFrag": 35233, + "Ġshepherd": 35234, + "Ġelectron": 35235, + "ĠFSA": 35236, + "Ġunl": 35237, + "dose": 35238, + "Ġimmersion": 35239, + "ĠDeL": 35240, + "Ġbiomedical": 35241, + "Anna": 35242, + "Ġskillet": 35243, + "Ġrecre": 35244, + "Ġtrillions": 35245, + "voy": 35246, + "Ġnormalized": 35247, + "radio": 35248, + "cue": 35249, + "urbed": 35250, + "Ġthinkers": 35251, + "328": 35252, + "327": 35253, + "ĠForge": 35254, + "505": 35255, + "Ġunbearable": 35256, + "olini": 35257, + "Ġdisinfect": 35258, + "Ġshaving": 35259, + "Ġtoxicity": 35260, + "453": 35261, + "Ġheterosexual": 35262, + "Baltimore": 35263, + "Ġstool": 35264, + "lr": 35265, + "ĠMk": 35266, + "Ġantidote": 35267, + "Dark": 35268, + "810": 35269, + "Ġirritated": 35270, + "ĠSUPPORT": 35271, + "Chance": 35272, + "bent": 35273, + "ĠZelda": 35274, + "ĠPenguin": 35275, + "ifled": 35276, + "Ġarte": 35277, + "705": 35278, + "Ġcondol": 35279, + "izza": 35280, + "ĠCK": 35281, + "Ġprojector": 35282, + "ravings": 35283, + "Ġ1919": 35284, + "Ġburner": 35285, + "ĠSchwarz": 35286, + "Oregon": 35287, + "Ġridicule": 35288, + "Ġinstructional": 35289, + "Ġ\"#": 35290, + "ĠDign": 35291, + "Ġkitten": 35292, + "Ġconstit": 35293, + "iration": 35294, + "Speed": 35295, + "ecycle": 35296, + "ĠFalse": 35297, + "ĠDealer": 35298, + "Could": 35299, + "655": 35300, + "outside": 35301, + "Ġworldview": 35302, + "Ġ246": 35303, + "Ġspitting": 35304, + "595": 35305, + "MN": 35306, + "ĠComes": 35307, + "ingu": 35308, + "Ġenzymes": 35309, + "Ġcompass": 35310, + "Ġexclaimed": 35311, + "ĠMalays": 35312, + "Ġ1916": 35313, + "Ġcoloring": 35314, + "Ġrepeats": 35315, + "Ġsoils": 35316, + "Ġtrivia": 35317, + "ĠIsles": 35318, + "Const": 35319, + "ĠFiction": 35320, + "665": 35321, + "Ġcriminality": 35322, + "ĠZi": 35323, + "384": 35324, + "ĠWilderness": 35325, + "ĠCanary": 35326, + "ĠVs": 35327, + "и": 35328, + "ĠAPIs": 35329, + "Ġbehest": 35330, + "Ġeb": 35331, + "ĠHipp": 35332, + "Ġpreempt": 35333, + "Ġevoke": 35334, + "Ġinept": 35335, + "tele": 35336, + "447": 35337, + "ĠGarmin": 35338, + "Ġpursuits": 35339, + "351": 35340, + "Ġcliché": 35341, + "ĠJihad": 35342, + "Ġ308": 35343, + "ĠSnake": 35344, + "ĠAnnounce": 35345, + "Nearly": 35346, + "!'\"": 35347, + "Ġ1927": 35348, + "saw": 35349, + "Ġabhor": 35350, + "Plan": 35351, + "rawled": 35352, + "ĠRiy": 35353, + "ensor": 35354, + "Fal": 35355, + "quick": 35356, + "odynamic": 35357, + "Ġsubstitution": 35358, + "Ġprovoking": 35359, + "Operation": 35360, + "rupulous": 35361, + "Ġsweetness": 35362, + "folk": 35363, + "ĠDefault": 35364, + "Ġstarved": 35365, + "ĠPrinting": 35366, + "urious": 35367, + "ĠTracker": 35368, + "them": 35369, + "Ġleth": 35370, + "Ġemptied": 35371, + "Ġfootprints": 35372, + "ilian": 35373, + "Ġbattalion": 35374, + "Ġprophet": 35375, + "Ġrailing": 35376, + "Ġhect": 35377, + "rouch": 35378, + "lees": 35379, + "Ġideologies": 35380, + "Ġ254": 35381, + "ĠGods": 35382, + "ĠAvalon": 35383, + "Ġfrontrunner": 35384, + "ĠPork": 35385, + "ĠPipe": 35386, + "Ġscaven": 35387, + "Ġming": 35388, + "Ġerg": 35389, + "Ġ520": 35390, + "Ġhatched": 35391, + "asant": 35392, + "ĠHI": 35393, + "Ġpend": 35394, + "Ġ288": 35395, + "Prom": 35396, + "achev": 35397, + "ĠEcology": 35398, + "enforcement": 35399, + "467": 35400, + "dule": 35401, + "Ġrealism": 35402, + "ĠTypes": 35403, + "USB": 35404, + "utra": 35405, + "ĠHiroshima": 35406, + "Ġcontradicted": 35407, + "393": 35408, + "ĠDSL": 35409, + "Ġtherein": 35410, + "ĠReconstruction": 35411, + "Ġ243": 35412, + "irled": 35413, + "479": 35414, + "ĠWhats": 35415, + "Currently": 35416, + "ĠPOWER": 35417, + "ĠHiro": 35418, + "ĠBreath": 35419, + "ĠYourself": 35420, + "Ġlantern": 35421, + "376": 35422, + "É": 35423, + "ĠHumans": 35424, + "Lady": 35425, + "Ġdissemination": 35426, + "ecake": 35427, + "ĠChao": 35428, + "flat": 35429, + "Ġinspecting": 35430, + "stration": 35431, + "Ġidentifiable": 35432, + "CV": 35433, + "ĠLobby": 35434, + "function": 35435, + "Roll": 35436, + "DIV": 35437, + "Tell": 35438, + "Ġfasc": 35439, + "ĠAOL": 35440, + "HM": 35441, + "Keefe": 35442, + "Ġporous": 35443, + "Ġsmoot": 35444, + "existence": 35445, + "ĠDeg": 35446, + "Ġdivor": 35447, + "isner": 35448, + "allas": 35449, + "Bloomberg": 35450, + "Ġdictators": 35451, + "ĠGeh": 35452, + "Ġsilicone": 35453, + "Ġdab": 35454, + "Ġmashed": 35455, + "Ġpric": 35456, + "might": 35457, + "ĠBLM": 35458, + "Ġpatriarch": 35459, + "Microsoft": 35460, + "ĠAds": 35461, + "Ġcoronary": 35462, + "ĠContrary": 35463, + "Ġdra": 35464, + "ĠStarted": 35465, + "Ġbuckle": 35466, + "lear": 35467, + "accept": 35468, + "Within": 35469, + "bd": 35470, + "interested": 35471, + "bia": 35472, + "POR": 35473, + "motion": 35474, + "ĠFounders": 35475, + "ĠCassandra": 35476, + "ĠPassion": 35477, + "Ġbehavioural": 35478, + "ĠHealing": 35479, + "Ġmarkings": 35480, + "Ġsnowball": 35481, + "Ġridiculed": 35482, + "phase": 35483, + "Ġunto": 35484, + "aque": 35485, + "uggets": 35486, + "Ġfrantically": 35487, + "Ġcoward": 35488, + "Ġinconvenient": 35489, + "Taking": 35490, + "Afee": 35491, + "Ġtwisting": 35492, + "930": 35493, + "ĠSieg": 35494, + "ĠGit": 35495, + "Ġcurs": 35496, + "ĠGlas": 35497, + "ĠSignificant": 35498, + "Ġachieves": 35499, + "Ġpreferably": 35500, + "Ġcondensed": 35501, + "Ġfetus": 35502, + "Ġunivers": 35503, + "Ġpse": 35504, + "Access": 35505, + "Ġintertwined": 35506, + "been": 35507, + "quit": 35508, + "ĠLEGO": 35509, + "Ġimagining": 35510, + "454": 35511, + "Ġplains": 35512, + "sequently": 35513, + "pull": 35514, + "Fast": 35515, + "Pot": 35516, + "yles": 35517, + "AIR": 35518, + "Ġblatantly": 35519, + "eki": 35520, + "ilated": 35521, + "ĠMembership": 35522, + "Ġ262": 35523, + "Ġ}": 35524, + "Ġexcavation": 35525, + "Ġethn": 35526, + "addin": 35527, + "Ġfoundational": 35528, + "ceptions": 35529, + "ĠViet": 35530, + "exempt": 35531, + "Ġmicrophones": 35532, + "Ġ244": 35533, + "778": 35534, + "Ġdwar": 35535, + "attery": 35536, + "502": 35537, + "ĠKik": 35538, + "Ġinspir": 35539, + "ĠMaximum": 35540, + "Ġvengeance": 35541, + "Ġetched": 35542, + "outine": 35543, + "552": 35544, + "Ġunicorn": 35545, + "gged": 35546, + ".�": 35547, + "ĠBlackwell": 35548, + "ĠStatue": 35549, + "Ġdissidents": 35550, + "ĠKaine": 35551, + "Ġdeforestation": 35552, + "ĠScholar": 35553, + "Ġpleasantly": 35554, + "ÑĤ": 35555, + "398": 35556, + "ĠRUN": 35557, + "arent": 35558, + "Ġundeniably": 35559, + "Ġtechnologically": 35560, + "Ġconsciously": 35561, + "ĠEther": 35562, + "Ġproportional": 35563, + "Ġlaund": 35564, + "ĠRye": 35565, + "Ġambiguity": 35566, + "Ġunmist": 35567, + "Terror": 35568, + "ciplinary": 35569, + "ĠImproved": 35570, + "hesis": 35571, + "Ġcooker": 35572, + "elsen": 35573, + "Ġguerrilla": 35574, + "opped": 35575, + "ATURE": 35576, + "Ġrequ": 35577, + "Ġunprepared": 35578, + "Ġcamel": 35579, + "Ġfitt": 35580, + "Sex": 35581, + "edged": 35582, + "Ġrecurrent": 35583, + "ctuary": 35584, + "ĠCompare": 35585, + "ĠServing": 35586, + "Tri": 35587, + "Ġtransient": 35588, + "ĠBees": 35589, + "Ġcovenant": 35590, + "Ġfantasies": 35591, + "Ġespresso": 35592, + "draft": 35593, + "baugh": 35594, + "Ġdemocratically": 35595, + "ĠBans": 35596, + "ĠManual": 35597, + "ĠTurtle": 35598, + "ennett": 35599, + "achy": 35600, + "ĠClim": 35601, + "Ġdescending": 35602, + "Ġprow": 35603, + "Ġinconsistencies": 35604, + "Player": 35605, + "Ġoblivious": 35606, + "ĠWonderland": 35607, + "nav": 35608, + "aughter": 35609, + "Ġlod": 35610, + "Ġ403": 35611, + "ĠPolaris": 35612, + "ĠLeia": 35613, + "ĠInfantry": 35614, + "Sy": 35615, + "ĠMeter": 35616, + "Ġautoimmune": 35617, + "Ġdiagnoses": 35618, + "Ġtrespass": 35619, + "011": 35620, + "wrong": 35621, + "ĠGREAT": 35622, + "Ġtelescopes": 35623, + "shows": 35624, + "Pac": 35625, + "olation": 35626, + "Ġclerics": 35627, + "Ġdissenting": 35628, + "406": 35629, + "Ġetiquette": 35630, + "Ġdeterrence": 35631, + "765": 35632, + "Ġove": 35633, + "Has": 35634, + "Pak": 35635, + "ा": 35636, + "ĠNec": 35637, + "Ġsociology": 35638, + "witz": 35639, + "Ġkittens": 35640, + "Ġcontinual": 35641, + "Ġoverlapping": 35642, + "Ġmonks": 35643, + "ĠMechanical": 35644, + "Captain": 35645, + "ocial": 35646, + "ĠFalling": 35647, + "ĠCorrection": 35648, + "ĠTrouble": 35649, + "Ġslog": 35650, + "Ġ253": 35651, + "Ġemanating": 35652, + "Ġwidest": 35653, + "PROV": 35654, + "Japanese": 35655, + "urat": 35656, + "Ġboxed": 35657, + "ĠCases": 35658, + "Ġjarring": 35659, + "Fix": 35660, + "'?": 35661, + "ĠStrateg": 35662, + "Republic": 35663, + "ovy": 35664, + "362": 35665, + "ĠMothers": 35666, + "Ġstreaks": 35667, + "Ġlocalized": 35668, + "ĠONLY": 35669, + "Ġeh": 35670, + "ĠObject": 35671, + "Ġstub": 35672, + "Fre": 35673, + "ĠScarlet": 35674, + "Ġmultip": 35675, + "ĠMaul": 35676, + "ĠProblems": 35677, + "cest": 35678, + "Ġmortal": 35679, + "Ġarche": 35680, + "ulet": 35681, + "Ġfuller": 35682, + "ĠGER": 35683, + "Si": 35684, + "mr": 35685, + "ĠPowerful": 35686, + "boxing": 35687, + "ĠPeer": 35688, + "Jean": 35689, + "ĠTF": 35690, + "Ġplural": 35691, + "optim": 35692, + "Jimmy": 35693, + "ĠFriendly": 35694, + "Mex": 35695, + "Ġdepri": 35696, + "PK": 35697, + "Ġwaitress": 35698, + "eph": 35699, + "arrass": 35700, + "ikawa": 35701, + "feel": 35702, + "Finally": 35703, + "fourth": 35704, + "394": 35705, + "conom": 35706, + "VT": 35707, + "Ġeleg": 35708, + "ivot": 35709, + "Ġharsher": 35710, + "ĠPepe": 35711, + "ĠImpl": 35712, + "Ġankles": 35713, + "idity": 35714, + "ĠPrepare": 35715, + "Rather": 35716, + "Ġconservatism": 35717, + "Ġunquestion": 35718, + "ribution": 35719, + "ĠPatent": 35720, + "ĠDeluxe": 35721, + "ĠAE": 35722, + "007": 35723, + "Ġprag": 35724, + "bg": 35725, + "Ġpalate": 35726, + "Ġintric": 35727, + "ossom": 35728, + "Ġspac": 35729, + "ĠSpotlight": 35730, + "Seven": 35731, + "amacare": 35732, + "ĠGotham": 35733, + "Ġencompass": 35734, + "Ġnicer": 35735, + "ĠLauder": 35736, + "Ġscaff": 35737, + "worn": 35738, + "442": 35739, + "Ġpropri": 35740, + "443": 35741, + "ĠCompos": 35742, + "ĠIniti": 35743, + "inth": 35744, + "Ġrehe": 35745, + "Prov": 35746, + "Ġgri": 35747, + "ossip": 35748, + "ĠModest": 35749, + "quiet": 35750, + "Ġwealthier": 35751, + "Ġ241": 35752, + "icum": 35753, + "Ġcommunism": 35754, + "Ġhelpers": 35755, + "Ġbellig": 35756, + "Ġ405": 35757, + "uttered": 35758, + "Ġbitterness": 35759, + "nl": 35760, + "474": 35761, + "Ġvitality": 35762, + "blank": 35763, + "ĠLeth": 35764, + "PAC": 35765, + "326": 35766, + "ĠNapoleon": 35767, + "Ġ299": 35768, + "ĠReviews": 35769, + "ĠSect": 35770, + "Ġstrongh": 35771, + "ĠTube": 35772, + "Ġwoodland": 35773, + "Ġhumming": 35774, + "411": 35775, + "Alpha": 35776, + "Ġundet": 35777, + "Ġmounts": 35778, + "Officials": 35779, + "igning": 35780, + "830": 35781, + "ĠStamp": 35782, + "ubby": 35783, + "424": 35784, + "Ġoutlandish": 35785, + "Ġjerk": 35786, + "Ġradiant": 35787, + "Ġcubes": 35788, + "Director": 35789, + "Ġatro": 35790, + "vous": 35791, + "Sab": 35792, + "Ġpretended": 35793, + "Ġ620": 35794, + "975": 35795, + "Sham": 35796, + "Ġpotassium": 35797, + "ĠAttention": 35798, + "gly": 35799, + "opens": 35800, + "ĠWorker": 35801, + "porter": 35802, + "Ġsplendid": 35803, + "embed": 35804, + "Je": 35805, + "ĠMeal": 35806, + "Ġsurname": 35807, + "Usually": 35808, + "Ġtimer": 35809, + "Ġweave": 35810, + "irin": 35811, + "ĠGenetics": 35812, + "ensual": 35813, + "Ġmerry": 35814, + "Ġapprehend": 35815, + "utsche": 35816, + "strate": 35817, + "Ġsupplementary": 35818, + "ĠRoundup": 35819, + "upid": 35820, + "Ġmiraculous": 35821, + "ĠHUN": 35822, + "Ġglaciers": 35823, + "weed": 35824, + "ĠSuggest": 35825, + "XL": 35826, + "authors": 35827, + "Ġbarking": 35828, + "ĠUKIP": 35829, + "leased": 35830, + "ĠRAD": 35831, + "Ġfide": 35832, + "Ġphen": 35833, + "Ġscanners": 35834, + "Parents": 35835, + "ĠBlaze": 35836, + "Ġtweaking": 35837, + "Ġelaborated": 35838, + "Ġsusp": 35839, + "iscovered": 35840, + "Ġthighs": 35841, + "Ġradicals": 35842, + "ULTS": 35843, + "aggressive": 35844, + "endants": 35845, + "Hon": 35846, + "Ġcorrecting": 35847, + "391": 35848, + "pps": 35849, + "ĠTerritories": 35850, + "Ġconferred": 35851, + "crazy": 35852, + "utor": 35853, + "ĠSurvival": 35854, + "Ġbrowsers": 35855, + "ĠConflict": 35856, + "pn": 35857, + "Ġdeprive": 35858, + "riage": 35859, + "ilan": 35860, + "à¦": 35861, + "949": 35862, + "Congratulations": 35863, + "radical": 35864, + "ĠHits": 35865, + "powerful": 35866, + "Ġcrypt": 35867, + "745": 35868, + "ĠRegistrar": 35869, + "ophile": 35870, + "ĠElement": 35871, + "cooked": 35872, + "ĠTwilight": 35873, + "Ġdemos": 35874, + "IER": 35875, + "Ġstricken": 35876, + "Magic": 35877, + "abby": 35878, + "ĠSack": 35879, + "ĠShrine": 35880, + "Nev": 35881, + "Probably": 35882, + "ĠWisdom": 35883, + "ulpt": 35884, + "opher": 35885, + "Ġcolonel": 35886, + "atl": 35887, + "Tem": 35888, + "kun": 35889, + "ĠIndie": 35890, + "Putin": 35891, + "jection": 35892, + "areth": 35893, + "ĠBullet": 35894, + "Ġsmartest": 35895, + "ĠEsper": 35896, + "Ġproficiency": 35897, + "Ġcessation": 35898, + "Ġmars": 35899, + "ĠDATA": 35900, + "sup": 35901, + "Ġostr": 35902, + "Jane": 35903, + "Ġpathogens": 35904, + "hd": 35905, + "ĠNK": 35906, + "Ġhorribly": 35907, + "regulated": 35908, + "Ġesteemed": 35909, + "ĠChinatown": 35910, + "Ġvibration": 35911, + "Ġoverboard": 35912, + "ĠRhod": 35913, + "Ġfeces": 35914, + "otation": 35915, + "Ġcryptic": 35916, + "Bal": 35917, + "OPER": 35918, + "Ġaffirmation": 35919, + "Ġmenstrual": 35920, + "Ġuntold": 35921, + "Ġanecdotes": 35922, + "ĠHOUSE": 35923, + "Ġcape": 35924, + "311": 35925, + "ittance": 35926, + "ĠRemy": 35927, + "ĠWaves": 35928, + "ĠCOVER": 35929, + "ordinate": 35930, + "Ġrestricts": 35931, + "Samsung": 35932, + "Ġplantations": 35933, + "olver": 35934, + "Better": 35935, + "ĠExplos": 35936, + "Ġnasal": 35937, + "ĠSyri": 35938, + "ĠPerl": 35939, + "Ġlatency": 35940, + "othermal": 35941, + "Sweet": 35942, + "ĠRyzen": 35943, + "ĠYuri": 35944, + "Ġsmack": 35945, + "Ġcrow": 35946, + "aniel": 35947, + "iological": 35948, + "Ġmonk": 35949, + "Ġtutorial": 35950, + "ĠAure": 35951, + "Ġcliffs": 35952, + "ameron": 35953, + "umers": 35954, + "ĠMour": 35955, + "Ġunorthodox": 35956, + "Ġgulf": 35957, + "Ġintrusive": 35958, + "ĠVIII": 35959, + "ĠFF": 35960, + "Ġenlarged": 35961, + "Ġspheres": 35962, + "ĠCheap": 35963, + "ĠAmend": 35964, + "Ġ::": 35965, + "Ġpacing": 35966, + "ĠStartup": 35967, + "ĠDating": 35968, + "racist": 35969, + "ĠDivine": 35970, + "Ġpollen": 35971, + "ĠMeaning": 35972, + "ĠLei": 35973, + "ĠMOT": 35974, + "ĠARC": 35975, + "legate": 35976, + "Ġbrav": 35977, + "Ross": 35978, + "redit": 35979, + "414": 35980, + "ringe": 35981, + "perhaps": 35982, + "SPA": 35983, + "Southern": 35984, + "Front": 35985, + "undrum": 35986, + "Ġassorted": 35987, + "ĠDawkins": 35988, + "ĠWrap": 35989, + "Ġconsequential": 35990, + "ĠFuji": 35991, + "458": 35992, + "Ġunst": 35993, + "Bon": 35994, + "acter": 35995, + "Trade": 35996, + "ingers": 35997, + "ĠClin": 35998, + "Ġstimul": 35999, + "arah": 36000, + "inois": 36001, + "urdy": 36002, + "Ġobsessive": 36003, + "Zone": 36004, + "Ġprimitive": 36005, + "unctions": 36006, + "Ġadapter": 36007, + "Ġassures": 36008, + "Daddy": 36009, + "Ġunsatisf": 36010, + "441": 36011, + "Ġ1910": 36012, + "Ġsecondly": 36013, + "truth": 36014, + "RED": 36015, + "040": 36016, + "Pope": 36017, + "venants": 36018, + "Ġestim": 36019, + "Ġhemorrh": 36020, + "Ġexcruciating": 36021, + "459": 36022, + "Ġboils": 36023, + "ieved": 36024, + "Storm": 36025, + "Ġmanifestation": 36026, + "Ġinsulated": 36027, + "fb": 36028, + "Ġclassify": 36029, + "Mbps": 36030, + "Ġinclination": 36031, + "Ġaur": 36032, + "Ġpolarized": 36033, + "Ġoccupations": 36034, + "Secretary": 36035, + "Ġcustomizable": 36036, + "scribe": 36037, + "Ġadjunct": 36038, + "Ġ1922": 36039, + "rived": 36040, + "ocative": 36041, + "Friends": 36042, + "Oak": 36043, + "Ġpsyche": 36044, + "Ġwrinkles": 36045, + "anthrop": 36046, + "Ġcoercion": 36047, + "enos": 36048, + "Ġvariability": 36049, + "hma": 36050, + "phot": 36051, + "ĠXander": 36052, + "ĠDiss": 36053, + "Ġtigers": 36054, + "ahoo": 36055, + "focus": 36056, + "rical": 36057, + "grow": 36058, + "Ġseminal": 36059, + "Ġdisciples": 36060, + "Cas": 36061, + "Hundreds": 36062, + "Ġscissors": 36063, + "correct": 36064, + "Ġfascism": 36065, + "imoto": 36066, + "Ġnudity": 36067, + "charg": 36068, + "Ġrusty": 36069, + "ĠLyndon": 36070, + "Ġanomalies": 36071, + "onial": 36072, + "ĠiCloud": 36073, + "Ġannoy": 36074, + "Ġdistortion": 36075, + "Lou": 36076, + "ĠGiul": 36077, + "eyes": 36078, + "870": 36079, + "uum": 36080, + "ĠUltr": 36081, + "Action": 36082, + "cigarette": 36083, + "igators": 36084, + "kj": 36085, + "Ġ323": 36086, + "uine": 36087, + "Score": 36088, + "Ġmans": 36089, + "Security": 36090, + "Ġarom": 36091, + "ĠBoards": 36092, + "Ġwrists": 36093, + "602": 36094, + "Ġastronomy": 36095, + "Ġresin": 36096, + "width": 36097, + ")/": 36098, + "Ġconcurrent": 36099, + "unless": 36100, + "606": 36101, + "ĠMagnet": 36102, + "Ġauthorizing": 36103, + "ĠJunk": 36104, + "atical": 36105, + "Ġauthent": 36106, + "zac": 36107, + "413": 36108, + "ĠGrape": 36109, + "Ġcircled": 36110, + "Ġooz": 36111, + "Ġvisceral": 36112, + "ointment": 36113, + "Ġincendiary": 36114, + "ĠBourbon": 36115, + "Ġgimmick": 36116, + "vette": 36117, + "Stan": 36118, + "Ġdetachment": 36119, + "488": 36120, + "Ġmisogyny": 36121, + "Ġenlight": 36122, + "utic": 36123, + "Ġinquire": 36124, + "ĠBEL": 36125, + "ascular": 36126, + "ĠWasserman": 36127, + "Dallas": 36128, + "Ġconstellation": 36129, + "Ġdystopian": 36130, + "504": 36131, + "ĠOptical": 36132, + "Ġsilhou": 36133, + "Girl": 36134, + "ĠGong": 36135, + "ĠHighest": 36136, + "????????": 36137, + "Sav": 36138, + "ocity": 36139, + "leted": 36140, + "Ġattrition": 36141, + "ĠExpedition": 36142, + "ĠKilled": 36143, + "501": 36144, + "ONES": 36145, + "dat": 36146, + "Ġglyphosate": 36147, + "Ġplugs": 36148, + "Ġlact": 36149, + "Fla": 36150, + "fps": 36151, + "riger": 36152, + "Ġparagraphs": 36153, + "Ġinnate": 36154, + "ĠFoo": 36155, + "aternity": 36156, + "ĠGry": 36157, + "Ġoneself": 36158, + "642": 36159, + "Iowa": 36160, + "oodle": 36161, + "ĠCoconut": 36162, + "ĠChess": 36163, + "ommel": 36164, + "Ġmagnesium": 36165, + "Ġairliner": 36166, + "Ġexceedingly": 36167, + "ĠCreator": 36168, + "YouTube": 36169, + "Ġsleeper": 36170, + "Ġlonging": 36171, + "ĠPercy": 36172, + "Ġmatrix": 36173, + "Ġâľ": 36174, + "Ġbarren": 36175, + "Mrs": 36176, + "Ġinvading": 36177, + "Ġincom": 36178, + "Ġemperor": 36179, + "Ġip": 36180, + "irie": 36181, + "Ġpredictably": 36182, + "ĠBless": 36183, + "Ġsuperpower": 36184, + ":-": 36185, + "Ġpropensity": 36186, + "easy": 36187, + "educ": 36188, + "ĠPolly": 36189, + "Ġcumbersome": 36190, + "Ġcollide": 36191, + "016": 36192, + "Ġtransports": 36193, + "Ġscraps": 36194, + "below": 36195, + "Ġhairs": 36196, + "mentation": 36197, + "Ġevolves": 36198, + "ĠFallen": 36199, + "Ġunsurprisingly": 36200, + "Ġcuff": 36201, + "Ġ249": 36202, + "mental": 36203, + "ĠCamel": 36204, + "Ġ337": 36205, + "Clinton": 36206, + "Ġdecad": 36207, + "ĠSTEP": 36208, + "ĠTestament": 36209, + "Ġirresistible": 36210, + "ĠACE": 36211, + "Ġhamm": 36212, + "ĠTerr": 36213, + "Ġcaul": 36214, + "iggins": 36215, + "Ġproficient": 36216, + "resp": 36217, + "Ġheirs": 36218, + "Ġ321": 36219, + "dress": 36220, + "ĠClothing": 36221, + "Ġ560": 36222, + "Ġ264": 36223, + "ĠRobb": 36224, + "Ġfrail": 36225, + "Ġoptimizing": 36226, + "615": 36227, + "ĠRefuge": 36228, + "rowth": 36229, + "washing": 36230, + "Ġgenders": 36231, + "indu": 36232, + "ĠNAT": 36233, + "Ġleans": 36234, + "Ġeyed": 36235, + "Ġhilar": 36236, + "vice": 36237, + "wolf": 36238, + "Ġfatig": 36239, + "ococ": 36240, + "ĠCarry": 36241, + "Community": 36242, + "Clark": 36243, + "itably": 36244, + "sv": 36245, + "448": 36246, + "Ġnumer": 36247, + "Ġ1925": 36248, + "ĠBehavioral": 36249, + "ĠScream": 36250, + "Ġgeek": 36251, + "rake": 36252, + "ĠTTC": 36253, + "Ġadditives": 36254, + "ĠBye": 36255, + "ylon": 36256, + "Ġfoliage": 36257, + "ateral": 36258, + "rapnel": 36259, + "Science": 36260, + "Ġrecollection": 36261, + "thening": 36262, + "ĠUbisoft": 36263, + "ĠLur": 36264, + "ĠOkinawa": 36265, + "ĠProvision": 36266, + "ferred": 36267, + "ĠGrounds": 36268, + "Ġhops": 36269, + "aterial": 36270, + "Ġacad": 36271, + "Ġengulf": 36272, + "ĠApex": 36273, + "frequency": 36274, + "relations": 36275, + "ĠCorvette": 36276, + "ĠRepeat": 36277, + "Ġanew": 36278, + "Ġhes": 36279, + "ĠLair": 36280, + "ĠPSP": 36281, + "foundation": 36282, + "Band": 36283, + "ĠPublisher": 36284, + "Ġreciprocal": 36285, + "Ġ287": 36286, + "Ġpir": 36287, + "Adams": 36288, + "Ġprostitute": 36289, + "ĠMecca": 36290, + "ectomy": 36291, + "Ġskew": 36292, + "ĠLol": 36293, + "Voice": 36294, + "ĠCalais": 36295, + "ISION": 36296, + "rue": 36297, + "Ġgaping": 36298, + "prot": 36299, + "Ġ6000": 36300, + "Ġtilted": 36301, + "Ġgoofy": 36302, + "Stand": 36303, + "Ġfellows": 36304, + "Ġcurly": 36305, + "ĠPOW": 36306, + "Ġlore": 36307, + "Ġinhabited": 36308, + "ĠIdentification": 36309, + "Metro": 36310, + "Ġdispel": 36311, + "Ġinvoking": 36312, + "Ġdeleting": 36313, + "Ġstigmat": 36314, + "ĠDalai": 36315, + "Ġequate": 36316, + "Ġmascara": 36317, + "endered": 36318, + "ĠNYT": 36319, + "ĠCommittees": 36320, + "rians": 36321, + "ĠOlympus": 36322, + "ĠQR": 36323, + "ĠDrinking": 36324, + "Ġbatt": 36325, + "andr": 36326, + "computer": 36327, + "Senator": 36328, + "ĠTwist": 36329, + "ĠNoise": 36330, + "Ġcheesy": 36331, + "Ġ1931": 36332, + "Ġtyranny": 36333, + "Ġnegligible": 36334, + "ĠBok": 36335, + "Ġwebpage": 36336, + "ĠHEAD": 36337, + "ĠNovel": 36338, + "Ġquarry": 36339, + "Ġexpressive": 36340, + "Ġforgiving": 36341, + "Among": 36342, + "asin": 36343, + "ĠSuc": 36344, + "Democrats": 36345, + "795": 36346, + "Ġaback": 36347, + "¨": 36348, + "ĠNeon": 36349, + "392": 36350, + "ĠRNC": 36351, + "ĠPROC": 36352, + "sein": 36353, + "Ros": 36354, + "Ġemot": 36355, + "ĠASA": 36356, + "ĠSeb": 36357, + "ĠExtended": 36358, + "atern": 36359, + "Ġpsychedelic": 36360, + "Fil": 36361, + "ĠOrwell": 36362, + "ĠSOS": 36363, + "Ġconceive": 36364, + "Ġhobbies": 36365, + "Ġspecimens": 36366, + "ĠTEXT": 36367, + "sometimes": 36368, + "Mario": 36369, + "orpor": 36370, + "ĠTemporary": 36371, + "Ġapocalypse": 36372, + "Ġcounterproductive": 36373, + "ĠQUEST": 36374, + "ĠCargo": 36375, + "Amb": 36376, + "Ġoptic": 36377, + "groups": 36378, + "Ġparanoia": 36379, + ".?": 36380, + "sounding": 36381, + "mediately": 36382, + "System": 36383, + "ubi": 36384, + "Ġuttered": 36385, + "Ġgraphs": 36386, + "âĢĭâĢĭ": 36387, + "Ġscientifically": 36388, + "Ġbluntly": 36389, + "Ġhopping": 36390, + "Fun": 36391, + "ĠSUPER": 36392, + "Ġrobe": 36393, + "VB": 36394, + "ĠQuote": 36395, + "Ġincarnation": 36396, + "Ġtreadmill": 36397, + "Ġ1915": 36398, + "Ġbart": 36399, + "669": 36400, + "Ġhoc": 36401, + "Ġ309": 36402, + "Ġimprovis": 36403, + "Ġhut": 36404, + "Ġmixer": 36405, + "ĠCt": 36406, + "span": 36407, + "Ġwatered": 36408, + "Ġpatriot": 36409, + "Ġdehyd": 36410, + "laughs": 36411, + "ĠFancy": 36412, + "ĠVoc": 36413, + "Ġintellect": 36414, + "ĠTid": 36415, + "Ġnesting": 36416, + "Tel": 36417, + "Ġ()": 36418, + "letter": 36419, + "ĠSeems": 36420, + "Ops": 36421, + "ĠContents": 36422, + "ript": 36423, + "hani": 36424, + "Ġrecru": 36425, + "Ġpickups": 36426, + "repair": 36427, + "Throughout": 36428, + "bear": 36429, + "Ġconquered": 36430, + "656": 36431, + "Ġmalf": 36432, + "Ġordained": 36433, + "755": 36434, + "ĠReprodu": 36435, + "brain": 36436, + "ĠOuts": 36437, + "ĠWage": 36438, + "Ru": 36439, + "________": 36440, + "ĠLAW": 36441, + "ĠWass": 36442, + "Ġcomplication": 36443, + "Fri": 36444, + "Ġregener": 36445, + "Wait": 36446, + "577": 36447, + "Ġmisconception": 36448, + "Ġbombardment": 36449, + "Ġunloaded": 36450, + "Ġdictionary": 36451, + "IU": 36452, + "025": 36453, + "etically": 36454, + "ĠNarr": 36455, + "repe": 36456, + "Ġassigning": 36457, + "Rail": 36458, + "Ġnotebooks": 36459, + "Ġingest": 36460, + "Ġrpm": 36461, + "Ġalienated": 36462, + "ĠCredits": 36463, + "Ġindis": 36464, + "ĠGathering": 36465, + "aration": 36466, + "-+-+-+-+": 36467, + "Ġori": 36468, + "Ġsr": 36469, + "ndra": 36470, + "Ġlibertarian": 36471, + "Ġcoerced": 36472, + "ording": 36473, + "Ġtranqu": 36474, + "Ġelbows": 36475, + "549": 36476, + "Ġping": 36477, + "ĠRELE": 36478, + "ĠYanuk": 36479, + "Ġmaneuvers": 36480, + "ĠTrojan": 36481, + "IFIED": 36482, + "ĠViolent": 36483, + "è": 36484, + "Ġlest": 36485, + "Ġarrows": 36486, + "frog": 36487, + "anty": 36488, + "WB": 36489, + "ĠSeen": 36490, + "648": 36491, + "Ġclutter": 36492, + "ĠBender": 36493, + "Ġpessim": 36494, + "ĠTeg": 36495, + "Asian": 36496, + "IFIC": 36497, + "Ġexponential": 36498, + "Ġsponge": 36499, + "rite": 36500, + "ĠDAM": 36501, + "Ġtacit": 36502, + "ĠZoom": 36503, + "Ġolds": 36504, + "Ġonward": 36505, + "ĠSandwich": 36506, + "missible": 36507, + "isol": 36508, + "940": 36509, + "Ġinciner": 36510, + "ĠTrick": 36511, + "Ġawakening": 36512, + "Ġdart": 36513, + "ĠCouch": 36514, + "respons": 36515, + "ĠElephant": 36516, + "ĠPluto": 36517, + "ĠTags": 36518, + "itcher": 36519, + "644": 36520, + "702": 36521, + "Ġelectrons": 36522, + "ĠMyth": 36523, + "ĠAad": 36524, + "Danny": 36525, + "Ġcraw": 36526, + "ĠCertification": 36527, + "Ġtending": 36528, + "Ġpellets": 36529, + "Ġamused": 36530, + "ĠAuschwitz": 36531, + "ĠAppl": 36532, + "iris": 36533, + "ashion": 36534, + "walking": 36535, + "Ġabnorm": 36536, + "Cro": 36537, + "?:": 36538, + "ĠIcelandic": 36539, + "ĠAvailability": 36540, + "Ġcann": 36541, + "Opt": 36542, + "buster": 36543, + "ĠQuartz": 36544, + "Executive": 36545, + "tracks": 36546, + "igel": 36547, + "MIT": 36548, + "ĠTracking": 36549, + "Ġconditioned": 36550, + "Ġsampled": 36551, + "ĠGenius": 36552, + "Ġsubstit": 36553, + "ĠSiberia": 36554, + "Ġfrequ": 36555, + "historic": 36556, + "okin": 36557, + "OWS": 36558, + "1500": 36559, + "warts": 36560, + "ĠEtsy": 36561, + "licks": 36562, + "ĠSmooth": 36563, + "unity": 36564, + "515": 36565, + "Ġperk": 36566, + "aida": 36567, + "forts": 36568, + "ĠUA": 36569, + "RIC": 36570, + "Spain": 36571, + "ĠWired": 36572, + "cuts": 36573, + "Ġfurnace": 36574, + "ĠTOTAL": 36575, + "ĠTables": 36576, + "662": 36577, + "Fab": 36578, + "Ġquaint": 36579, + "ĠWorlds": 36580, + "ĠCabin": 36581, + "atche": 36582, + "List": 36583, + "ĠVO": 36584, + "Ġkeyword": 36585, + "Ġ258": 36586, + "Farm": 36587, + "timer": 36588, + "ĠVolt": 36589, + "Build": 36590, + "pressed": 36591, + "*,": 36592, + "Ġ324": 36593, + "aiman": 36594, + "TING": 36595, + "Ġsneaking": 36596, + "cery": 36597, + "Ġcrib": 36598, + "ĠIllust": 36599, + "later": 36600, + "Ġcompar": 36601, + "Ġpropulsion": 36602, + "647": 36603, + "ĠTrails": 36604, + "Ġperiphery": 36605, + "steel": 36606, + "Ġvividly": 36607, + "ĠConver": 36608, + "eatured": 36609, + "427": 36610, + "463": 36611, + "Ġapprox": 36612, + "spin": 36613, + "Ġconfigured": 36614, + "inside": 36615, + "razy": 36616, + "account": 36617, + "anye": 36618, + "riend": 36619, + "Ġbows": 36620, + "809": 36621, + "ĠDEF": 36622, + "ĠRez": 36623, + "Fans": 36624, + "ĠDF": 36625, + "Ġstains": 36626, + "ĠAtom": 36627, + "ĠConce": 36628, + "ĠTOM": 36629, + "ĠELECT": 36630, + "Ġdisappro": 36631, + "019": 36632, + "afia": 36633, + "ĠTemperature": 36634, + "Ġextracts": 36635, + "fab": 36636, + "Ġunsur": 36637, + "Ġseasoning": 36638, + "Ty": 36639, + "KB": 36640, + "Ġposit": 36641, + "Ġlocality": 36642, + "1200": 36643, + "cour": 36644, + "izons": 36645, + "hh": 36646, + "506": 36647, + "ĠDLC": 36648, + "iago": 36649, + "Ġcorpses": 36650, + "iddling": 36651, + "Mayor": 36652, + "Ġsimplistic": 36653, + "Ġlibel": 36654, + "Ġalmonds": 36655, + "Ġswast": 36656, + "Change": 36657, + "ĠJoker": 36658, + "MAR": 36659, + "ĠScully": 36660, + "Ġmailbox": 36661, + "VIDEO": 36662, + "ĠKyoto": 36663, + "esley": 36664, + "ĠIncredible": 36665, + "youtube": 36666, + "Ġinequalities": 36667, + "Ġbolts": 36668, + "Ġbothering": 36669, + "Ġattentive": 36670, + "ĠSparrow": 36671, + "Ġdiaper": 36672, + "Ġfanbase": 36673, + "Ġuncont": 36674, + "Ap": 36675, + "ĠQi": 36676, + "Price": 36677, + "471": 36678, + "Ġpearl": 36679, + "wid": 36680, + "899": 36681, + "ĠPony": 36682, + "casting": 36683, + "Ġinhabit": 36684, + "Ġunve": 36685, + "Ġinsur": 36686, + "ĠWee": 36687, + "658": 36688, + "Ġeffected": 36689, + "gger": 36690, + "Ġinstallments": 36691, + "imilar": 36692, + "FU": 36693, + "Ġinfertility": 36694, + "climate": 36695, + "HEAD": 36696, + "fashion": 36697, + "ĠTHEY": 36698, + "jc": 36699, + "Ġsatisf": 36700, + "ĠGuidelines": 36701, + "Ġinsure": 36702, + "ĠRSA": 36703, + "Ġvirt": 36704, + "Ġinterpre": 36705, + "Joshua": 36706, + "ĠShut": 36707, + "Ġtestimonies": 36708, + "Ñģ": 36709, + "untary": 36710, + "417": 36711, + "Ġbeck": 36712, + "ĠMilky": 36713, + "ç": 36714, + "Ġsequels": 36715, + "Ġ281": 36716, + "ĠRibbon": 36717, + "Ġroomm": 36718, + "Ġsynchron": 36719, + "452": 36720, + "Ġ1926": 36721, + "Ġhawk": 36722, + "ĠDisorder": 36723, + "Ġbackstory": 36724, + "ĠNum": 36725, + "Ġoverheard": 36726, + "technical": 36727, + "Jud": 36728, + "aii": 36729, + "Ġdecon": 36730, + "ĠRape": 36731, + "ĠWarrant": 36732, + "Ġpoop": 36733, + "spir": 36734, + "Country": 36735, + "Ġweld": 36736, + "Ġabuser": 36737, + "Ġ------": 36738, + "material": 36739, + "Ġpreserves": 36740, + "spring": 36741, + "Ġpuzzled": 36742, + "ĠDebate": 36743, + "Joseph": 36744, + "Ġ272": 36745, + "Blood": 36746, + "antry": 36747, + "Ġconverge": 36748, + "Ġimaginable": 36749, + "oward": 36750, + "545": 36751, + "Ġfug": 36752, + "Vision": 36753, + "075": 36754, + "Ġadoptive": 36755, + "Ġunknow": 36756, + "Stream": 36757, + "Ġaffili": 36758, + "ĠPUR": 36759, + "ĠWally": 36760, + "Ġgamer": 36761, + "Ġfart": 36762, + "stice": 36763, + "Ġcongen": 36764, + "н": 36765, + "685": 36766, + "orst": 36767, + "ĠATF": 36768, + "Ġml": 36769, + "ĠMozilla": 36770, + "Ġcalmed": 36771, + "bage": 36772, + "ĠVault": 36773, + "arkable": 36774, + "ĠGuan": 36775, + "Ġclueless": 36776, + "umatic": 36777, + "Ġshameless": 36778, + "Ġpreached": 36779, + "Ġmisconceptions": 36780, + "Ġanthology": 36781, + "Ġbiomass": 36782, + "ĠPs": 36783, + "tails": 36784, + "Ġexcessively": 36785, + "Ġextr": 36786, + "Davis": 36787, + "Ġgrounding": 36788, + "Ġshortcuts": 36789, + "ĠShift": 36790, + "ĠRew": 36791, + "ĠIllum": 36792, + "Ġincite": 36793, + "sense": 36794, + "ĠScouting": 36795, + "otos": 36796, + "respond": 36797, + "Ġbeware": 36798, + "gran": 36799, + "ĠXV": 36800, + "JM": 36801, + "ĠSounders": 36802, + "Ġ276": 36803, + "Ġshockingly": 36804, + "Ġgastrointestinal": 36805, + "erences": 36806, + "df": 36807, + "ĠNG": 36808, + "Ġdiscredited": 36809, + "Ġdemoral": 36810, + "Ġgladly": 36811, + "Tal": 36812, + "ĠPredator": 36813, + "708": 36814, + "Ġdoi": 36815, + "Ġdecentral": 36816, + "illin": 36817, + "printed": 36818, + "Ġinflicting": 36819, + "ribes": 36820, + "Ġsupper": 36821, + "abc": 36822, + "Ġgraz": 36823, + "980": 36824, + "Bull": 36825, + "Ġmillionaires": 36826, + "Ġvanity": 36827, + "imony": 36828, + "Ġbiologists": 36829, + "Ġalternating": 36830, + "Ġsleeps": 36831, + "Force": 36832, + "ĠPrinc": 36833, + "ĠTransgender": 36834, + "Ġ314": 36835, + "ĠProvide": 36836, + "enthal": 36837, + "Ġplum": 36838, + "Ġresurrect": 36839, + "CW": 36840, + "Ġinjure": 36841, + "ĠPerspective": 36842, + "ĠBei": 36843, + "Ġrestless": 36844, + "aciously": 36845, + "Ġchlor": 36846, + "catch": 36847, + "ĠLuigi": 36848, + "Ġinconsistency": 36849, + "Ġwhiff": 36850, + "Arizona": 36851, + "ustration": 36852, + "ĠRaid": 36853, + "ĠDemons": 36854, + "ĠVita": 36855, + ":\"": 36856, + "Ġmigraine": 36857, + "ĠHamb": 36858, + "Ġwidget": 36859, + "451": 36860, + "Ġrandomized": 36861, + "etchup": 36862, + "ĠParticularly": 36863, + "Ġdiced": 36864, + "Ġperfected": 36865, + "roid": 36866, + "710": 36867, + "Ġreflections": 36868, + "Ġantioxidants": 36869, + "ĠLabel": 36870, + "Ġ326": 36871, + "igious": 36872, + "ĠEucl": 36873, + "608": 36874, + "Ġstrand": 36875, + "ĠDirt": 36876, + "ĠLift": 36877, + "suits": 36878, + "ĠControls": 36879, + "RAW": 36880, + "Ġcowardly": 36881, + "ĠUmb": 36882, + "Growing": 36883, + "mington": 36884, + "Ġ339": 36885, + "ĠCommit": 36886, + "Ġnonviolent": 36887, + "Ġcontaminants": 36888, + "Ġacrylic": 36889, + "ĠMAP": 36890, + "Ġ269": 36891, + "Ġdegrading": 36892, + "Ġmiracles": 36893, + "ĠEstablishment": 36894, + "despite": 36895, + "cry": 36896, + "Ġpauses": 36897, + "Ġmythical": 36898, + "Ġtwenties": 36899, + "Actually": 36900, + "phan": 36901, + "recorded": 36902, + "Ġunwillingness": 36903, + "engineering": 36904, + "avored": 36905, + "Ġdevout": 36906, + "item": 36907, + "Ġbunny": 36908, + "ĠMerchants": 36909, + "Ġconsumes": 36910, + "508": 36911, + "Ġlex": 36912, + "ĠClause": 36913, + "Ġchecklist": 36914, + "Sus": 36915, + "uther": 36916, + ".#": 36917, + "Bit": 36918, + "uay": 36919, + "bf": 36920, + "Ġpopulace": 36921, + "Ġ316": 36922, + "Ġcombust": 36923, + "Ġnano": 36924, + "Ġpopul": 36925, + "Indust": 36926, + "Ġcapitalists": 36927, + "ĠFiles": 36928, + "Bang": 36929, + "Ġkosher": 36930, + "atile": 36931, + "Ġincrim": 36932, + "OVER": 36933, + "Ġmelee": 36934, + "ymph": 36935, + "ĠPupp": 36936, + "evin": 36937, + "ĠMolecular": 36938, + "Ġmisinterpret": 36939, + "vc": 36940, + "olithic": 36941, + "ĠSimpsons": 36942, + "Ġshrew": 36943, + "Ġselectively": 36944, + "ĠDrain": 36945, + "mittedly": 36946, + "conservative": 36947, + "True": 36948, + "Using": 36949, + "562": 36950, + "apon": 36951, + "Ġapprentice": 36952, + "Mas": 36953, + "ĠBattlefield": 36954, + "Ġfing": 36955, + "Ġconcoct": 36956, + "ĠVIS": 36957, + "ĠHuss": 36958, + "Ġdetects": 36959, + "ĠFriedrich": 36960, + "Ġlatitude": 36961, + "Custom": 36962, + "ĠÙ": 36963, + "ĠBones": 36964, + "whose": 36965, + "Ġredirected": 36966, + "aligned": 36967, + "ĠNeighbor": 36968, + "ĠAmen": 36969, + "ĠMarble": 36970, + "Beyond": 36971, + "Ġbiomark": 36972, + "Ġerroneous": 36973, + "Atlanta": 36974, + "Ġmasturb": 36975, + "ĠAssoci": 36976, + "Albert": 36977, + "Ġcigar": 36978, + "ĠFraz": 36979, + "ethe": 36980, + "skinned": 36981, + "Ford": 36982, + "throp": 36983, + "Acc": 36984, + "Ġtricked": 36985, + "Ġoverwhelm": 36986, + "Ġimplements": 36987, + "ĠGeForce": 36988, + "Ġbounces": 36989, + "Ġmoderator": 36990, + "910": 36991, + "ĠButterfly": 36992, + "ĠIllegal": 36993, + "ĠSubject": 36994, + "RET": 36995, + "ĠFreeze": 36996, + "ĠNewt": 36997, + "Ġuterus": 36998, + "696": 36999, + "Ġ267": 37000, + "tk": 37001, + "Ġdodged": 37002, + "liam": 37003, + "Ġparasite": 37004, + "obal": 37005, + "ĠHubble": 37006, + "Ġtheology": 37007, + "âĢĶ\"": 37008, + "height": 37009, + "Ale": 37010, + "employment": 37011, + "ĠWallet": 37012, + "cessive": 37013, + "Ġ404": 37014, + "Ġsimilarity": 37015, + "zens": 37016, + "Ġdumps": 37017, + "Ġdepress": 37018, + "Ġlifeless": 37019, + "535": 37020, + "oard": 37021, + "Scotland": 37022, + "Ġbelievable": 37023, + "Ġcalculator": 37024, + "ĠNaked": 37025, + "Ġremission": 37026, + "Ġoranges": 37027, + "ĠSections": 37028, + "Ġentangled": 37029, + "Ġuncanny": 37030, + "Ġteaspoons": 37031, + "vr": 37032, + "ĠPorn": 37033, + "Organ": 37034, + "Ġbund": 37035, + "Doug": 37036, + "ĠGHz": 37037, + "Major": 37038, + "abus": 37039, + "Bell": 37040, + "avier": 37041, + "Ġimplanted": 37042, + "RON": 37043, + "Fle": 37044, + "462": 37045, + "509": 37046, + "Ġgoggles": 37047, + "Ġmanuscript": 37048, + "NOT": 37049, + "ĠCanaveral": 37050, + "ĠDID": 37051, + "Season": 37052, + "HAEL": 37053, + "Edge": 37054, + "appiness": 37055, + "DIS": 37056, + "Ġplotted": 37057, + "Ġwrought": 37058, + "Ġquarantine": 37059, + "Ġrearr": 37060, + "itage": 37061, + "Ġsocket": 37062, + "Ġbrig": 37063, + "Ġunbelievably": 37064, + "abytes": 37065, + "TG": 37066, + "Ġ444": 37067, + "ĠOffic": 37068, + "Ġacquaintances": 37069, + "ĠComparison": 37070, + "Nine": 37071, + "ĠFeast": 37072, + "758": 37073, + "YC": 37074, + "Ġfiner": 37075, + "ĠStrawberry": 37076, + "Ġeternity": 37077, + "liament": 37078, + "urrency": 37079, + "ĠCortana": 37080, + "ĠSabbath": 37081, + "Ġsprinkle": 37082, + "unker": 37083, + "ĠUE": 37084, + "flies": 37085, + "Ġblender": 37086, + "Ġacutely": 37087, + "emark": 37088, + "ĠAffect": 37089, + "Politics": 37090, + "Ġsane": 37091, + "Ġcorrosion": 37092, + "Ġspirituality": 37093, + "Ġredeemed": 37094, + "Ġingrained": 37095, + "manager": 37096, + "joined": 37097, + "ĠDumb": 37098, + "ĠHeight": 37099, + "Ġseventeen": 37100, + "Ġ640": 37101, + "Ġreviewer": 37102, + "Ġwallpaper": 37103, + "Ġnurs": 37104, + "Ġsubset": 37105, + "703": 37106, + "Ġsymbolism": 37107, + "Ġdudes": 37108, + "Ġmismatch": 37109, + "gans": 37110, + "please": 37111, + "ĠKE": 37112, + "Ġatom": 37113, + "004": 37114, + "ionic": 37115, + "Ġservings": 37116, + "Ġproxies": 37117, + "Ġtranscription": 37118, + "yx": 37119, + "bowl": 37120, + "iscovery": 37121, + "ĠScotch": 37122, + "brace": 37123, + "riter": 37124, + "ĠDesktop": 37125, + "Ġlimestone": 37126, + "æ": 37127, + "Neg": 37128, + "013": 37129, + "Ġformulas": 37130, + "Ġeval": 37131, + "Ġzombies": 37132, + "GU": 37133, + "ĠHermes": 37134, + "Ġbrist": 37135, + "Mand": 37136, + "Ġmastery": 37137, + "Ġgoverns": 37138, + "Ġconstrued": 37139, + "region": 37140, + "Ġemitted": 37141, + "Vice": 37142, + "060": 37143, + "Jennifer": 37144, + "mol": 37145, + "Ġjealousy": 37146, + "Ġingenuity": 37147, + "bug": 37148, + "olitical": 37149, + "Ġperce": 37150, + "ĠSapp": 37151, + "dim": 37152, + "utral": 37153, + "Ġinterrogated": 37154, + "Gate": 37155, + "Ġamber": 37156, + "911": 37157, + "ĠEveryday": 37158, + "ĠDDR": 37159, + "ĠBlades": 37160, + "Ġnifty": 37161, + "Ġmurderers": 37162, + "Ġpresumption": 37163, + "Pitt": 37164, + "Div": 37165, + "ĠDestination": 37166, + "having": 37167, + "Ġprolifer": 37168, + "Ġbreaker": 37169, + "ĠBW": 37170, + "Ġcourier": 37171, + "Try": 37172, + "ĠBUR": 37173, + "itized": 37174, + "Ġcompress": 37175, + "Ġrepetition": 37176, + "ĠTik": 37177, + "Ġdivergence": 37178, + "Ġcube": 37179, + "everyone": 37180, + "ĠPoles": 37181, + "418": 37182, + "ĠHighly": 37183, + "468": 37184, + "Jeremy": 37185, + "Ġcontradictions": 37186, + "Ġmanure": 37187, + "Sad": 37188, + "pletion": 37189, + "626": 37190, + "Ġ279": 37191, + "Ġfrivolous": 37192, + "ĠCanaan": 37193, + "olor": 37194, + "Ġincapac": 37195, + "ĠGentle": 37196, + "Ġinsomnia": 37197, + "ĠJing": 37198, + "688": 37199, + "ĠViews": 37200, + "Ġsyll": 37201, + "486": 37202, + "antom": 37203, + "Ġcog": 37204, + "aintain": 37205, + "ĠDVDs": 37206, + "Ġ318": 37207, + "archy": 37208, + "Ġreprodu": 37209, + "Ġconcedes": 37210, + "Brook": 37211, + "Ġinterpreting": 37212, + "Ġextracting": 37213, + "Ġess": 37214, + "uning": 37215, + "ĠMathematics": 37216, + "iably": 37217, + "Ġmultit": 37218, + "ĠActs": 37219, + "iliated": 37220, + "Foreign": 37221, + "Ġflaming": 37222, + "ĠCoup": 37223, + "Ġglitches": 37224, + "Ġdifferentiation": 37225, + "ihadi": 37226, + "ĠDrone": 37227, + "Ġincompatible": 37228, + "asher": 37229, + "documented": 37230, + "agons": 37231, + "wark": 37232, + "Ġshielding": 37233, + "ĠCorrect": 37234, + "romising": 37235, + "uned": 37236, + "Ġconduit": 37237, + "ĠDiablo": 37238, + "Ġbeginner": 37239, + "Ġarchived": 37240, + "smanship": 37241, + "ĠTBD": 37242, + "digy": 37243, + "Ġ322": 37244, + "Ġ268": 37245, + "ĠTears": 37246, + "ĠPriority": 37247, + "Italy": 37248, + "Ġ^": 37249, + "annot": 37250, + "different": 37251, + "Joy": 37252, + "Ġbreathed": 37253, + "heon": 37254, + "Ġracists": 37255, + "Ġvascular": 37256, + "Between": 37257, + "etition": 37258, + "ĠLikely": 37259, + "icans": 37260, + "529": 37261, + "ĠMonsters": 37262, + "agy": 37263, + "Orange": 37264, + "hide": 37265, + "SIM": 37266, + "Ġdeceive": 37267, + "ĠDAR": 37268, + "Ġshattering": 37269, + "Ġow": 37270, + "peak": 37271, + "Ġpreferable": 37272, + "Ġpiping": 37273, + "ĠLEDs": 37274, + "ĠCOMMUN": 37275, + "ĠConstruct": 37276, + "008": 37277, + "Ġdissatisfied": 37278, + "ĠKNOW": 37279, + "ĠFrame": 37280, + "ĠToast": 37281, + "Ġadore": 37282, + "history": 37283, + "Soviet": 37284, + "reporting": 37285, + "Ġ266": 37286, + "pract": 37287, + "ĠSauce": 37288, + "686": 37289, + "ievers": 37290, + "ĠDomain": 37291, + "ousand": 37292, + "768": 37293, + "Cos": 37294, + "609": 37295, + "432": 37296, + "Ġtransl": 37297, + "oof": 37298, + "Ġ292": 37299, + "Turkish": 37300, + "ĠPOLIT": 37301, + "Harris": 37302, + "bj": 37303, + "Ġrodents": 37304, + "556": 37305, + "Ġintellectuals": 37306, + "Ġinteroper": 37307, + "ixt": 37308, + "Ġunbiased": 37309, + "itia": 37310, + "Ġ504": 37311, + "Ġbuttocks": 37312, + "ĠFlam": 37313, + "Ġchrom": 37314, + "Ġ259": 37315, + "shock": 37316, + "ĠRJ": 37317, + "ĠLich": 37318, + "422": 37319, + "Ġcondom": 37320, + "phen": 37321, + "Ġvigilante": 37322, + "Ġowl": 37323, + "Ġdwellings": 37324, + "Ġarchaeologists": 37325, + "Ġ680": 37326, + "RAY": 37327, + "Ġ1921": 37328, + "Ġ625": 37329, + "ĠPLAN": 37330, + "alde": 37331, + "030": 37332, + "abbling": 37333, + "Wave": 37334, + "Ni": 37335, + "Ġfurthe": 37336, + "JS": 37337, + "Ġpsycho": 37338, + "ĠFrançois": 37339, + "Ġundergrad": 37340, + "Ġsuccessors": 37341, + "Ġpadded": 37342, + "introdu": 37343, + "Ġreasoned": 37344, + "Ġvas": 37345, + "creen": 37346, + "onsequ": 37347, + "starter": 37348, + "Court": 37349, + "ĠHIS": 37350, + "Ġplaster": 37351, + "Ġranger": 37352, + "Ġ298": 37353, + "esters": 37354, + "Ġglare": 37355, + "ype": 37356, + "Ġcompute": 37357, + "Ali": 37358, + "mallow": 37359, + "Ġmasculine": 37360, + "ĠExamination": 37361, + "improve": 37362, + "Ġdeclass": 37363, + "Ġdecoration": 37364, + "ĠFIG": 37365, + "abre": 37366, + "Ġstale": 37367, + "abling": 37368, + "ĠRusty": 37369, + "ĠASAP": 37370, + "Ġadjusts": 37371, + "Ġbluff": 37372, + "density": 37373, + "Ġdisse": 37374, + "Ġcensor": 37375, + "ervatives": 37376, + "Ġkettle": 37377, + "Ġskeptics": 37378, + "fd": 37379, + "Imm": 37380, + "461": 37381, + "Ġadvantageous": 37382, + "419": 37383, + "ĠPresents": 37384, + "482": 37385, + "ĠRewards": 37386, + "Ġovershadow": 37387, + "Alabama": 37388, + "ĠCPC": 37389, + "Ġsock": 37390, + "ĠChurches": 37391, + "hidden": 37392, + "Ġcringe": 37393, + "ĠHOR": 37394, + "PB": 37395, + "Pretty": 37396, + "Hong": 37397, + "?),": 37398, + "687": 37399, + "Ġgrocer": 37400, + "472": 37401, + "565": 37402, + "itent": 37403, + "Ġpartake": 37404, + "wait": 37405, + "usters": 37406, + "Ġcones": 37407, + "Ġconcurrently": 37408, + "Ġlevers": 37409, + "Ġaroma": 37410, + "ĠDrill": 37411, + "498": 37412, + "804": 37413, + "ithering": 37414, + "Ġ355": 37415, + "Ġlegion": 37416, + "Ġvitri": 37417, + "Ġcondu": 37418, + "Angel": 37419, + "OWER": 37420, + "Ġ{*": 37421, + "Simon": 37422, + "Ġsynthesis": 37423, + "ĠContainer": 37424, + "sheet": 37425, + "Bi": 37426, + "ĠRaspberry": 37427, + "Ġ328": 37428, + "anders": 37429, + "ĠBlossom": 37430, + "ĠFINAL": 37431, + "acid": 37432, + "Ġborderline": 37433, + "Aut": 37434, + "Ġoriginate": 37435, + "Ġtransm": 37436, + "Ġbuffalo": 37437, + "atial": 37438, + "ĠCraigslist": 37439, + "Ġcredential": 37440, + "Ġdisbanded": 37441, + "Ġunprotected": 37442, + "ĠZer": 37443, + "waukee": 37444, + "diagn": 37445, + "1999": 37446, + "doc": 37447, + "ellig": 37448, + "Ġwarheads": 37449, + "ĠADS": 37450, + "verified": 37451, + "ĠHAM": 37452, + "785": 37453, + "Cu": 37454, + "Ġenorm": 37455, + "ĠSkill": 37456, + "\\": 37457, + "Ġbashing": 37458, + "Ġloudspe": 37459, + "during": 37460, + "Ġdebunked": 37461, + "adequ": 37462, + "Ġuh": 37463, + "Feed": 37464, + "ificial": 37465, + "pred": 37466, + "ĠPassing": 37467, + "Kyle": 37468, + "enance": 37469, + "ĠMex": 37470, + "itect": 37471, + "Ġcavern": 37472, + "Ġtrop": 37473, + "ĠEliot": 37474, + "753": 37475, + "Ġencountering": 37476, + "Ġsulf": 37477, + "Always": 37478, + "ĠGest": 37479, + "Ġadditive": 37480, + "Ġ278": 37481, + "Ġloops": 37482, + "liberal": 37483, + "urion": 37484, + "ĠRefresh": 37485, + "ĠDynasty": 37486, + "Ġsweaty": 37487, + "Ġsails": 37488, + "protection": 37489, + "ĠRooms": 37490, + "ĠEXT": 37491, + "few": 37492, + "ĠPaid": 37493, + "Ġ377": 37494, + "Ġcolonialism": 37495, + "Ġchuckle": 37496, + "Ġarmour": 37497, + "Ġsoftly": 37498, + "661": 37499, + "Building": 37500, + "ĠAMER": 37501, + "Ġbabe": 37502, + "Ġshif": 37503, + "Sem": 37504, + "Ġdisembark": 37505, + "ĠSubstance": 37506, + "Stone": 37507, + "Ġdialect": 37508, + "ĠAph": 37509, + "Ġspreadsheet": 37510, + "ierra": 37511, + "Ġlineage": 37512, + "ĠCust": 37513, + "ĠBabe": 37514, + "Ġwra": 37515, + "ĠMafia": 37516, + "Ġflakes": 37517, + "ĠEVER": 37518, + "cong": 37519, + "ĠCreation": 37520, + "loo": 37521, + "ĠAmpl": 37522, + "ĠSpectre": 37523, + "012": 37524, + "geons": 37525, + "Ġswarm": 37526, + "ĠPale": 37527, + "ĠSeek": 37528, + "itures": 37529, + "Ġarri": 37530, + "Ġredistribution": 37531, + "campaign": 37532, + "ĠAbility": 37533, + "579": 37534, + "ournament": 37535, + "locks": 37536, + "Ġnests": 37537, + "ĠConstantine": 37538, + "Ġwhisper": 37539, + "Ġshrouded": 37540, + "changed": 37541, + "ĠEnhanced": 37542, + "Ġ920": 37543, + "Ġglob": 37544, + "Tam": 37545, + "Ġoutwe": 37546, + "Ġilliter": 37547, + "Ġsurg": 37548, + "Nap": 37549, + "ĠAerial": 37550, + "iferation": 37551, + "Egypt": 37552, + "ERO": 37553, + "Ġantip": 37554, + "environment": 37555, + "machine": 37556, + "Ġrupture": 37557, + "treatment": 37558, + "internal": 37559, + "Ġinfiltrate": 37560, + "Ġgratification": 37561, + "Uber": 37562, + "Ġunequal": 37563, + "Ġflav": 37564, + "Lord": 37565, + "tein": 37566, + "ĠLOT": 37567, + "Ġbullshit": 37568, + "Ġoriginals": 37569, + "Ġminced": 37570, + "Ġmultiply": 37571, + "ayson": 37572, + "Ġrecomm": 37573, + "Ġreceptors": 37574, + "Ġflashlight": 37575, + "Ġinhuman": 37576, + "Future": 37577, + "Ġpuzzling": 37578, + "Ġrouters": 37579, + "Ġuncontroll": 37580, + "responsible": 37581, + "Ġcellul": 37582, + "ĠTablet": 37583, + "Ġbolted": 37584, + "Ġpermissible": 37585, + "adra": 37586, + "picture": 37587, + "ODY": 37588, + "BRE": 37589, + "Iraq": 37590, + "Total": 37591, + "rising": 37592, + "Ġ273": 37593, + "nv": 37594, + "Ġ327": 37595, + "alysed": 37596, + "infect": 37597, + "Ġ1912": 37598, + "ĠVT": 37599, + "ĠLazarus": 37600, + "ictive": 37601, + "Bu": 37602, + "ĠNEVER": 37603, + "ĠCODE": 37604, + "ĠModified": 37605, + "fetched": 37606, + "ĠTrap": 37607, + "mob": 37608, + "Ġupkeep": 37609, + "WARD": 37610, + "Ġbrewed": 37611, + "Ġsaliva": 37612, + "Ġ1923": 37613, + "Ġsteroid": 37614, + "rather": 37615, + "ĠVER": 37616, + "Ġcontextual": 37617, + "Ont": 37618, + "ĠLSD": 37619, + "agine": 37620, + "Ġaudible": 37621, + "ĠMeta": 37622, + "erek": 37623, + "aults": 37624, + "ĠOttoman": 37625, + "ĠIncludes": 37626, + "Ġocc": 37627, + "678": 37628, + "ipple": 37629, + "Ġcontrasted": 37630, + "014": 37631, + "ĠLenin": 37632, + "Ġomega": 37633, + "885": 37634, + "civil": 37635, + "Ġoverload": 37636, + "},\"": 37637, + "Ġprogrammers": 37638, + "Ġgeometry": 37639, + "?).": 37640, + "shift": 37641, + "ĠClancy": 37642, + "nr": 37643, + "verb": 37644, + "Ġ760": 37645, + "Ġstaggered": 37646, + "Playing": 37647, + "ĠSmile": 37648, + "Ġcomplains": 37649, + "ĠSloven": 37650, + "Ġdisobedience": 37651, + "creator": 37652, + "Ġly": 37653, + "incoln": 37654, + "emp": 37655, + "Ġcrate": 37656, + "ĠPledge": 37657, + "ĠGPUs": 37658, + "protected": 37659, + "Vo": 37660, + "medium": 37661, + "Ġacet": 37662, + "603": 37663, + "478": 37664, + "469": 37665, + "Further": 37666, + "Ġsensed": 37667, + "Lock": 37668, + "Ġcrabs": 37669, + "ĠChains": 37670, + "ĠNEO": 37671, + "Ġexperimented": 37672, + "ĠRhythm": 37673, + "802": 37674, + "Ġhormonal": 37675, + "491": 37676, + "ĠMedian": 37677, + "Ġevaluates": 37678, + "ippi": 37679, + "Ġremovable": 37680, + "Ġvector": 37681, + "ilant": 37682, + "TERN": 37683, + "Ġpurch": 37684, + "ĠBind": 37685, + "athering": 37686, + "Ġcords": 37687, + "Lib": 37688, + "Ġdamned": 37689, + "orc": 37690, + "ĠEverywhere": 37691, + "Ġgorilla": 37692, + "ystem": 37693, + "fail": 37694, + "Ġecstasy": 37695, + "allion": 37696, + "Sea": 37697, + "Ġuploading": 37698, + "ĠSpecific": 37699, + "Ġreinforcement": 37700, + "cerned": 37701, + "ĠDollars": 37702, + "Twenty": 37703, + "OX": 37704, + "ADD": 37705, + "Ġbraces": 37706, + "Ġraven": 37707, + "Ġ1890": 37708, + "Ġcirculate": 37709, + "udden": 37710, + "Disney": 37711, + "ĠNope": 37712, + "ĠBagg": 37713, + "ĠBuddha": 37714, + "rael": 37715, + "urus": 37716, + "ĠKarma": 37717, + "Ġcurl": 37718, + "Ġflips": 37719, + "Ġbearer": 37720, + "Ġmisunderstand": 37721, + "Ġabras": 37722, + "ĠAssassin": 37723, + "Fact": 37724, + "Ġinterf": 37725, + "Ġvantage": 37726, + "ĠGenocide": 37727, + "Ġdeducted": 37728, + "Sep": 37729, + "McC": 37730, + "Jessica": 37731, + "ĠBackup": 37732, + "Ian": 37733, + "urnal": 37734, + "Ġlaborers": 37735, + "438": 37736, + "ĠContinuous": 37737, + "ĠNBN": 37738, + "Cool": 37739, + "mitting": 37740, + "ĠNormandy": 37741, + "Ġpurchaser": 37742, + "Ġacquainted": 37743, + "Ġblogging": 37744, + "route": 37745, + "marine": 37746, + "Ġstartled": 37747, + "6000": 37748, + "ĠRadical": 37749, + "kiss": 37750, + "ĠBlitz": 37751, + "express": 37752, + "Ġ601": 37753, + "hent": 37754, + "Ġtink": 37755, + "pires": 37756, + "launch": 37757, + "sg": 37758, + "ĠEffects": 37759, + "Ġstiffness": 37760, + "ĠAllies": 37761, + "Ġthirsty": 37762, + "Ġmyst": 37763, + "Ġlogger": 37764, + "Ġstances": 37765, + "ĠEvaluation": 37766, + "090": 37767, + "Ġproclaiming": 37768, + "Ġhypocritical": 37769, + "496": 37770, + "Ġcaus": 37771, + "ĠKappa": 37772, + "ĠLann": 37773, + "ĠScientist": 37774, + "Ġempath": 37775, + "etrical": 37776, + "lege": 37777, + "Hom": 37778, + "Aud": 37779, + "ĠColors": 37780, + "ĠStraw": 37781, + "each": 37782, + "ĠPatron": 37783, + "Ġnuance": 37784, + "send": 37785, + "ourney": 37786, + "ĠPhen": 37787, + "Ġamino": 37788, + "ĠSeconds": 37789, + "Sn": 37790, + "ĠCiv": 37791, + "Ġconglomer": 37792, + "Ġ411": 37793, + "versely": 37794, + "487": 37795, + "prises": 37796, + "Ġ277": 37797, + "necessary": 37798, + "Ġdope": 37799, + "Late": 37800, + "Ġrake": 37801, + "ĠBrigham": 37802, + "ogun": 37803, + "ĠSTATES": 37804, + "ĠGaal": 37805, + "Ġintellig": 37806, + "Ġglacier": 37807, + "destruct": 37808, + "ĠZucker": 37809, + "484": 37810, + "Ġ332": 37811, + "ĠArist": 37812, + "Ġprotagonists": 37813, + "Ġgraveyard": 37814, + "names": 37815, + "ĠPax": 37816, + "Ġthresholds": 37817, + "Seeing": 37818, + "Ġmunitions": 37819, + "Ġcontradicts": 37820, + "684": 37821, + "Ġ529": 37822, + "ĠConcent": 37823, + "ĠBlessed": 37824, + "Hz": 37825, + "Ġinhibit": 37826, + "Ġshenanigans": 37827, + "ĠSpear": 37828, + "Ġoverlay": 37829, + "ritis": 37830, + "ilus": 37831, + "Ġvariance": 37832, + "Ġoverpower": 37833, + "viol": 37834, + "erning": 37835, + "Ġpolarization": 37836, + "aito": 37837, + "GV": 37838, + "493": 37839, + "Keeping": 37840, + "Ġpaternity": 37841, + "ĠHappiness": 37842, + "oops": 37843, + "sb": 37844, + "xit": 37845, + "ophysical": 37846, + "Ġconclusive": 37847, + "Arch": 37848, + "Ġmiser": 37849, + "Ġsuffice": 37850, + "ĠStout": 37851, + "Ġhrs": 37852, + "643": 37853, + "Ġprincipled": 37854, + "azine": 37855, + "atorium": 37856, + "ĠFairy": 37857, + "Ġinfiltrated": 37858, + "ĠHier": 37859, + "ĠMIA": 37860, + "inders": 37861, + "Ġrebutt": 37862, + "Ġxx": 37863, + "Ġfeats": 37864, + "izzle": 37865, + "Ġ780": 37866, + "668": 37867, + "Ġrepressive": 37868, + "ĠYugoslavia": 37869, + "sole": 37870, + "704": 37871, + "ĠRPG": 37872, + "ĠTroll": 37873, + "packing": 37874, + "ĠDatabase": 37875, + "ĠVelvet": 37876, + "ĠRELEASE": 37877, + "ablish": 37878, + "smoking": 37879, + "ĠBottle": 37880, + "ĠFully": 37881, + "ĠLean": 37882, + "Ġobjectively": 37883, + "ĠFounding": 37884, + "ĠClassics": 37885, + "Ġmosaic": 37886, + "473": 37887, + "Ġrooft": 37888, + "Ġcentrally": 37889, + "Ġdismissive": 37890, + "Ġparasites": 37891, + "009": 37892, + "Ġcursed": 37893, + "Ġvex": 37894, + "Ġeconom": 37895, + "ĠBore": 37896, + "enery": 37897, + "ĠFundamental": 37898, + "ĠOmni": 37899, + "489": 37900, + "714": 37901, + "Ġforegoing": 37902, + "Ġfragment": 37903, + "oros": 37904, + "070": 37905, + "ĠFaust": 37906, + "Ġsucking": 37907, + "Ġnode": 37908, + "Ġrighteous": 37909, + "ĠPowered": 37910, + "426": 37911, + "HQ": 37912, + "Ġchronically": 37913, + "ĠBAL": 37914, + "Ġprest": 37915, + "Ġrapists": 37916, + "ĠRelationship": 37917, + "ĠCHR": 37918, + "Ġlinen": 37919, + "Ġnumerical": 37920, + "oters": 37921, + "Ġiterations": 37922, + "ttes": 37923, + "ĠENTER": 37924, + "Ġrabbi": 37925, + "Ġhoard": 37926, + "Ġmerciless": 37927, + "Ġrobes": 37928, + "ĠSpray": 37929, + "Ġadvers": 37930, + "ilantro": 37931, + "483": 37932, + "Ġfungus": 37933, + "Ġalcoholism": 37934, + "anasia": 37935, + "ĠCruiser": 37936, + "Ġmorals": 37937, + "cision": 37938, + "measures": 37939, + "Ġsabot": 37940, + "Ġrecol": 37941, + "ĠSaur": 37942, + "ĠError": 37943, + "Ġmysteriously": 37944, + "sle": 37945, + "Ġfeminists": 37946, + "д": 37947, + "ackle": 37948, + "ĠMarxist": 37949, + "Ġselves": 37950, + "Ġdoorway": 37951, + "Ġdiscard": 37952, + "Ġbandits": 37953, + "ĠDive": 37954, + "ameless": 37955, + "TRY": 37956, + "Ġgull": 37957, + "Ġrepublican": 37958, + "sr": 37959, + "ĠDynamo": 37960, + "Ġembryo": 37961, + "MENTS": 37962, + "ĠLOW": 37963, + "Ġ319": 37964, + "Ġgly": 37965, + "Ġcowork": 37966, + "Coll": 37967, + "Ġcris": 37968, + "ĠBanana": 37969, + "reality": 37970, + "Ġmobilization": 37971, + "unal": 37972, + "Updated": 37973, + "Crew": 37974, + "ĠGideon": 37975, + "Ġvines": 37976, + "Ġknitting": 37977, + "Ġdag": 37978, + "ĠSurv": 37979, + "Ġvacc": 37980, + "Ġimpulses": 37981, + "Northern": 37982, + "Ġnanop": 37983, + "allows": 37984, + "UTH": 37985, + "Ġflashbacks": 37986, + "alsa": 37987, + "Ġ282": 37988, + "Ġtransmissions": 37989, + "ĠAlmighty": 37990, + "Office": 37991, + "ĠBride": 37992, + "ĠBeasts": 37993, + "othy": 37994, + "ĠClouds": 37995, + "ĠDyn": 37996, + "ĠJolly": 37997, + "District": 37998, + "Ġveget": 37999, + "Ġantit": 38000, + "ĠSmoking": 38001, + "hess": 38002, + "Ġcompose": 38003, + "Ġreligiously": 38004, + "ĠHY": 38005, + "Ġfluorescent": 38006, + "rame": 38007, + "ĠMeier": 38008, + "ĠSQ": 38009, + "benefit": 38010, + "Thirty": 38011, + "559": 38012, + "ĠCance": 38013, + "586": 38014, + "Ġgrouped": 38015, + "Ġphys": 38016, + "Ġrebellious": 38017, + "ĠBASE": 38018, + "chid": 38019, + "582": 38020, + "ĠLessons": 38021, + "ĠWonderful": 38022, + "ODE": 38023, + "uctions": 38024, + "Ġbarbaric": 38025, + "rahim": 38026, + "635": 38027, + "Ġcloves": 38028, + "ĠNIH": 38029, + "ossession": 38030, + "Employ": 38031, + "Ġliberate": 38032, + "Gro": 38033, + "Ġmagician": 38034, + "ountain": 38035, + "FORM": 38036, + "533": 38037, + "Ġunpredict": 38038, + "rity": 38039, + "Ġfaked": 38040, + "plets": 38041, + "ppelin": 38042, + "Living": 38043, + "Ġnearer": 38044, + "Ġsuperiors": 38045, + "Ur": 38046, + "Ġheroism": 38047, + "Ġbearded": 38048, + "006": 38049, + "Cole": 38050, + "1970": 38051, + "Ġsill": 38052, + "ĠReduce": 38053, + "OLOG": 38054, + "onel": 38055, + "Billy": 38056, + "ĠPainter": 38057, + "ansas": 38058, + "Ġintermediary": 38059, + "trump": 38060, + "ĠMith": 38061, + "otom": 38062, + "434": 38063, + "Ġterrit": 38064, + "Wa": 38065, + "Ġsuprem": 38066, + "Rh": 38067, + "liction": 38068, + "ĠDEAD": 38069, + "Ġbothers": 38070, + "503": 38071, + "Ġfrogs": 38072, + "Ġsprinkled": 38073, + "Ġnil": 38074, + "628": 38075, + "Private": 38076, + "ĠKGB": 38077, + "Ġoverriding": 38078, + "Ġdeceived": 38079, + "698": 38080, + "idium": 38081, + "Ġseeker": 38082, + "Final": 38083, + "Ġsubconscious": 38084, + "Ġwom": 38085, + "Ġcass": 38086, + "Ġchicks": 38087, + "Ġverifying": 38088, + "ective": 38089, + "inia": 38090, + "ĠDetection": 38091, + "MH": 38092, + "fortable": 38093, + "ĠISPs": 38094, + "Ġcrumble": 38095, + "ĠRecap": 38096, + "598": 38097, + "ummies": 38098, + "export": 38099, + "Irish": 38100, + "Ġlil": 38101, + "ĠRapt": 38102, + "ĠRIGHT": 38103, + "Ġanecdotal": 38104, + "Ġpiercing": 38105, + "deck": 38106, + "Liber": 38107, + "Books": 38108, + "Ġassassin": 38109, + "Tur": 38110, + "revolution": 38111, + "ĠSheep": 38112, + "ĠPublishers": 38113, + "EMS": 38114, + "iosis": 38115, + "finder": 38116, + "ĠCuriosity": 38117, + "ARB": 38118, + "ĠConvers": 38119, + "IVES": 38120, + "clave": 38121, + "ĠChaos": 38122, + "ĠMim": 38123, + "ĠCostume": 38124, + "Ġtwe": 38125, + "Ġintim": 38126, + "757": 38127, + "berto": 38128, + "Ġ261": 38129, + "VPN": 38130, + "cribed": 38131, + "ĠVerb": 38132, + "cb": 38133, + "Ġaxle": 38134, + "Ġsandwic": 38135, + "Ice": 38136, + "ĠThermal": 38137, + "654": 38138, + "709": 38139, + "ĠPact": 38140, + "ĠEnsure": 38141, + "izable": 38142, + "497": 38143, + "Ġbloodstream": 38144, + "Aw": 38145, + "Ġleakage": 38146, + "Ġalleg": 38147, + "ĠMelody": 38148, + "681": 38149, + "Austin": 38150, + "428": 38151, + "Ġsummarized": 38152, + "ĠDefendants": 38153, + "ĠVader": 38154, + "Ê": 38155, + "Ġ1880": 38156, + "Ġassemb": 38157, + "YOU": 38158, + "GREEN": 38159, + "jury": 38160, + "4000": 38161, + "Ġvenerable": 38162, + "Ġcomputational": 38163, + "Ġperpetuate": 38164, + "Ġtorpedo": 38165, + "Ġaborted": 38166, + "Ġrhetorical": 38167, + "ĠOvert": 38168, + "Ġacknowledgment": 38169, + "essment": 38170, + "ĠIGN": 38171, + "ĠSheen": 38172, + "571": 38173, + "Ġcontag": 38174, + "Ġcultiv": 38175, + "Ġspawn": 38176, + "mess": 38177, + "Dur": 38178, + "Ġvortex": 38179, + "ixties": 38180, + "ĠBlow": 38181, + "Sum": 38182, + "Åį": 38183, + "Rom": 38184, + "ĠRadeon": 38185, + "Fed": 38186, + "Ġameric": 38187, + "ĠAnth": 38188, + "Ġantic": 38189, + "Ġfortress": 38190, + "Cold": 38191, + "ĠPredict": 38192, + "Fake": 38193, + "Ġilluminate": 38194, + "Find": 38195, + "Ġintellectually": 38196, + "Ġgon": 38197, + "alker": 38198, + "Ġinvoice": 38199, + "IELD": 38200, + "Ġfools": 38201, + "ĠEnding": 38202, + "-(": 38203, + "Ġalk": 38204, + "ĠControlled": 38205, + "Ġpurposefully": 38206, + "ĠChronic": 38207, + "Ġrele": 38208, + "ĠOps": 38209, + "Party": 38210, + "ethnic": 38211, + "ĠSpecifications": 38212, + "ffee": 38213, + "ĠTeach": 38214, + "ulas": 38215, + "Ġenslaved": 38216, + "onomy": 38217, + "Ġtenets": 38218, + "Ġammonia": 38219, + "Ġ1913": 38220, + "Ġdripping": 38221, + "612": 38222, + "659": 38223, + "ĠSagan": 38224, + "Ġinaccur": 38225, + "Ġabol": 38226, + "ĠLIKE": 38227, + "Ġvisualization": 38228, + "learn": 38229, + "anon": 38230, + "cipline": 38231, + "Ġadaptations": 38232, + "Ġwaiter": 38233, + "nergy": 38234, + "507": 38235, + "ĠDK": 38236, + "YD": 38237, + "Ġpedest": 38238, + "Sense": 38239, + "ĠObst": 38240, + "Ġresurrection": 38241, + "ĠSPECIAL": 38242, + "Unlike": 38243, + "Ġlia": 38244, + "Ġpersuasive": 38245, + "iatrics": 38246, + "ONEY": 38247, + "esthetic": 38248, + "494": 38249, + "zik": 38250, + "Ġfract": 38251, + "ĠOutput": 38252, + "ĠBers": 38253, + "rozen": 38254, + "ĠRevis": 38255, + "Ġdraconian": 38256, + "Words": 38257, + "asions": 38258, + "ĠClintons": 38259, + "CU": 38260, + "History": 38261, + "Ġtwilight": 38262, + "iform": 38263, + "Ġdispl": 38264, + "progress": 38265, + "ĠIO": 38266, + "Ġcannibal": 38267, + "Michelle": 38268, + "Ġnerv": 38269, + "Ġcontexts": 38270, + "ĠHorses": 38271, + "Ġanatomy": 38272, + "ĠLegislation": 38273, + "ĠBloody": 38274, + "Ġunwittingly": 38275, + "Ġinquired": 38276, + "ĠZip": 38277, + "ĠDesigns": 38278, + "Ġirritating": 38279, + "Ġunison": 38280, + "ĠRG": 38281, + "aviour": 38282, + "Ġpseudo": 38283, + "ĠVenom": 38284, + "Ġobscured": 38285, + "Ġner": 38286, + "uked": 38287, + "ORGE": 38288, + "Ġmomentarily": 38289, + "olyn": 38290, + "Syrian": 38291, + "Ġmicroscopic": 38292, + "Ġmistress": 38293, + "Less": 38294, + "Ġawoke": 38295, + "Ġtutor": 38296, + "esome": 38297, + "ollar": 38298, + "egg": 38299, + "UTE": 38300, + "Buzz": 38301, + "Ġattainment": 38302, + "Ġdiscriminating": 38303, + "::": 38304, + "Ġ525": 38305, + "azard": 38306, + "ĠBrist": 38307, + "oras": 38308, + "Ġveterin": 38309, + "jing": 38310, + "idon": 38311, + "ĠAustral": 38312, + "arious": 38313, + "ĠGrav": 38314, + "anol": 38315, + "ĠQuran": 38316, + "Ġbleach": 38317, + "588": 38318, + "ĠOsw": 38319, + "Ġdiffered": 38320, + "typ": 38321, + "ĠSIL": 38322, + "failed": 38323, + "436": 38324, + "Ġpalms": 38325, + "ĠFail": 38326, + "idespread": 38327, + "Ġchap": 38328, + "ĠIMAGES": 38329, + "ACP": 38330, + "matched": 38331, + "Ġjaws": 38332, + "MHz": 38333, + "Nik": 38334, + "ĠHume": 38335, + "OSH": 38336, + "Ġpresume": 38337, + "secut": 38338, + "ĠDied": 38339, + "ĠBreat": 38340, + "gins": 38341, + "prison": 38342, + "ĠUR": 38343, + "ĠROS": 38344, + "isitions": 38345, + "Ġpelvic": 38346, + "exclusive": 38347, + "522": 38348, + "689": 38349, + "FN": 38350, + "Ġener": 38351, + "Ġdispers": 38352, + "Ġcohorts": 38353, + "shut": 38354, + "ĠLoad": 38355, + "needs": 38356, + "azaki": 38357, + "inoa": 38358, + "Inside": 38359, + "usra": 38360, + "ighters": 38361, + "Ġ271": 38362, + "Ġsubordinate": 38363, + "ĠHOL": 38364, + "ĠGlow": 38365, + "Ġincred": 38366, + "ĠMadame": 38367, + "Ġoats": 38368, + "Ġdeviation": 38369, + "ĠApproach": 38370, + "Ġnarc": 38371, + "bart": 38372, + "bole": 38373, + "ĠSHE": 38374, + "effects": 38375, + "ĠADA": 38376, + "Ġmuse": 38377, + "Squ": 38378, + "Ġneuroscience": 38379, + "ĠValues": 38380, + "engu": 38381, + "Ġdosage": 38382, + "Ġwhispers": 38383, + "Ġnaughty": 38384, + "ĠFarming": 38385, + "Recently": 38386, + "Ġrelapse": 38387, + "rentice": 38388, + "UGH": 38389, + "Ġdarkened": 38390, + "appings": 38391, + "ĠSlaughter": 38392, + "ĠAnim": 38393, + "Ġovertly": 38394, + "poses": 38395, + "Ġdeficient": 38396, + "Ġnecks": 38397, + "Iron": 38398, + "Ġphysiological": 38399, + "ĠLiang": 38400, + "Ġlear": 38401, + "Ġcelestial": 38402, + "Ġpistols": 38403, + "Ġeyebrow": 38404, + "915": 38405, + "ratch": 38406, + "cephal": 38407, + "ĠPSU": 38408, + "Ġphotograp": 38409, + "ĠGaul": 38410, + "Ġuncontrolled": 38411, + "ĠJoined": 38412, + "652": 38413, + "itory": 38414, + "Ġ274": 38415, + "GAN": 38416, + "imester": 38417, + "essional": 38418, + "Ø©": 38419, + "Ġuncons": 38420, + "THER": 38421, + "Ġpaternal": 38422, + "Zero": 38423, + "ugen": 38424, + "538": 38425, + "Ġende": 38426, + "Ġ505": 38427, + "movie": 38428, + "Lind": 38429, + "Ġscorn": 38430, + "ulty": 38431, + "Ġpesky": 38432, + "Ġ8000": 38433, + "677": 38434, + "Ġhomophobia": 38435, + "ranch": 38436, + "Ġnarciss": 38437, + "ĠVoyager": 38438, + "ĠHELP": 38439, + "528": 38440, + "edly": 38441, + "Ġdetract": 38442, + "Hope": 38443, + "787": 38444, + "ĠMerlin": 38445, + "Ġgrids": 38446, + "KI": 38447, + "Mu": 38448, + "ĠSelected": 38449, + "select": 38450, + "ĠModer": 38451, + "ĠFeet": 38452, + "Ġrename": 38453, + "intensity": 38454, + "Wilson": 38455, + "Ġ414": 38456, + "leave": 38457, + "Ready": 38458, + "intuitive": 38459, + "Ġmeager": 38460, + "Franc": 38461, + "DH": 38462, + "Ġrhy": 38463, + "ĠPillar": 38464, + "ĠDOE": 38465, + "minist": 38466, + "ĠGrave": 38467, + "isible": 38468, + "Ess": 38469, + "Ġempt": 38470, + "Ġpatched": 38471, + "ĠAbortion": 38472, + "rals": 38473, + "Ġdow": 38474, + "Ġcrawled": 38475, + "igrate": 38476, + "Virginia": 38477, + "Ġconting": 38478, + "Ġorphans": 38479, + "ĠCrimean": 38480, + "Ġdyn": 38481, + "Ġshadowy": 38482, + "sound": 38483, + "ailable": 38484, + "Ġ293": 38485, + "vm": 38486, + "Ġaccompanies": 38487, + "Meanwhile": 38488, + "JR": 38489, + "ĠDirections": 38490, + "Ġadolescence": 38491, + "Ġpenetrated": 38492, + "bars": 38493, + "Rev": 38494, + "Ta": 38495, + "ĠSkywalker": 38496, + "ĠFires": 38497, + "concept": 38498, + "ĠSIG": 38499, + "554": 38500, + "currently": 38501, + "Ġ----------------": 38502, + "ĠWHITE": 38503, + "767": 38504, + "rors": 38505, + "PDF": 38506, + "Ġcasing": 38507, + "673": 38508, + "Ġdisapprove": 38509, + "1800": 38510, + "ĠWeed": 38511, + "Ġinhib": 38512, + "Ġmorbid": 38513, + "433": 38514, + "Ġawfully": 38515, + "Ts": 38516, + "Maria": 38517, + "Ġillusions": 38518, + "Ġtotalitarian": 38519, + "ollo": 38520, + "Ġsuppl": 38521, + "Ġsarc": 38522, + "ĠRGB": 38523, + "Ġlauncher": 38524, + "Ġbadass": 38525, + "ĠSyd": 38526, + "Ġscrape": 38527, + "ĠCLA": 38528, + "Ġcircum": 38529, + "657": 38530, + "Ġnucleus": 38531, + "ĠUkip": 38532, + "Ġmodem": 38533, + "ĠJou": 38534, + "adders": 38535, + "Ġwiser": 38536, + "thereal": 38537, + "Ġdemocr": 38538, + "ĠInvalid": 38539, + "Mine": 38540, + "Ġmanifested": 38541, + "meat": 38542, + "MORE": 38543, + "Larry": 38544, + "acements": 38545, + "Ġspecimen": 38546, + "results": 38547, + "Ġswallowing": 38548, + "Ġpigeon": 38549, + "tons": 38550, + "ĠLose": 38551, + "Ġquartz": 38552, + "Ġintraven": 38553, + "Ġ412": 38554, + "alyst": 38555, + "Ġengraved": 38556, + "client": 38557, + "ĠADV": 38558, + "ĠShared": 38559, + "Ġrites": 38560, + "Ġhysterical": 38561, + "ĠHUM": 38562, + "Cow": 38563, + "orously": 38564, + "Ġpleasures": 38565, + "democratic": 38566, + "Ġamph": 38567, + "Ġnib": 38568, + "rieg": 38569, + "Ġcalculates": 38570, + "Ġfrying": 38571, + "favorite": 38572, + "Ġantim": 38573, + "ĠDoom": 38574, + "monitor": 38575, + "Want": 38576, + "Ġtemplates": 38577, + "558": 38578, + "iever": 38579, + "Photos": 38580, + ",,": 38581, + "ĠSync": 38582, + "Ġconfronts": 38583, + "kept": 38584, + "dt": 38585, + "ĠERROR": 38586, + "ETF": 38587, + "578": 38588, + "Ġspor": 38589, + "718": 38590, + "ivation": 38591, + "ĠHaskell": 38592, + "Ca": 38593, + "Ġdick": 38594, + "Ġcivilized": 38595, + "Ġblah": 38596, + "enough": 38597, + "Ġoccup": 38598, + "Ġ334": 38599, + "antically": 38600, + "584": 38601, + "ĠDolphin": 38602, + "ĠStarts": 38603, + "Ġfanatic": 38604, + "ت": 38605, + "imag": 38606, + "Ġmicrobial": 38607, + "freedom": 38608, + "cult": 38609, + "wra": 38610, + "Ġ423": 38611, + "RIPT": 38612, + "601": 38613, + "BTC": 38614, + "atmeal": 38615, + "653": 38616, + "agogue": 38617, + "Ġderives": 38618, + "Wolf": 38619, + "466": 38620, + "Susan": 38621, + "ĠPassage": 38622, + "ARDS": 38623, + "Guy": 38624, + "Council": 38625, + "Ġerotic": 38626, + "pure": 38627, + "ĠMemories": 38628, + "ĠWikileaks": 38629, + "elines": 38630, + "Ġanth": 38631, + "Capital": 38632, + "807": 38633, + "ĠEggs": 38634, + "cv": 38635, + "ctors": 38636, + "Ġshatter": 38637, + "Ġesteem": 38638, + "vity": 38639, + "ĠVulcan": 38640, + "effic": 38641, + "ĠBELOW": 38642, + "Ġplatoon": 38643, + "Commun": 38644, + "oustic": 38645, + "Amy": 38646, + "Freedom": 38647, + "ppo": 38648, + "Ja": 38649, + "ĠConan": 38650, + "Ġinsepar": 38651, + "scene": 38652, + "Ġurinary": 38653, + "gain": 38654, + "Hillary": 38655, + "ĠTAM": 38656, + "Hist": 38657, + "Ġmechan": 38658, + "ĠRobots": 38659, + "Leader": 38660, + "Ġcartridges": 38661, + "Ġwhistleblowers": 38662, + "ĠSPL": 38663, + "Labour": 38664, + "unction": 38665, + "Ġfaithfully": 38666, + "Ġcoarse": 38667, + "Ġsynth": 38668, + "ĠLV": 38669, + "Ġjustifying": 38670, + "439": 38671, + "Victoria": 38672, + "ĠProceedings": 38673, + "alogy": 38674, + "Ġmorph": 38675, + "Ġcove": 38676, + "Ġlaughable": 38677, + "ECA": 38678, + "Ġ670": 38679, + "aturated": 38680, + "ĠSouls": 38681, + "ĠSleeping": 38682, + "Ly": 38683, + "ĠRetro": 38684, + "Ġastroph": 38685, + "Ġseism": 38686, + "atherine": 38687, + "ĠHercules": 38688, + "Ġfuse": 38689, + "ĠHL": 38690, + "Ġunintentionally": 38691, + "ĠRé": 38692, + "iery": 38693, + "Ġconco": 38694, + "Ġeras": 38695, + "recent": 38696, + "Ġlaunchers": 38697, + "ĠVolcano": 38698, + "ĠJace": 38699, + "Ġterminating": 38700, + "ĠIde": 38701, + "zee": 38702, + "asonic": 38703, + "itone": 38704, + "Ġnutshell": 38705, + "Ġbip": 38706, + "dies": 38707, + "Ġ286": 38708, + "Ġnood": 38709, + "ĠFathers": 38710, + "alys": 38711, + "Ġtheor": 38712, + "???": 38713, + "548": 38714, + "674": 38715, + "efined": 38716, + "806": 38717, + "âĻ": 38718, + "697": 38719, + "Ġdecap": 38720, + "ĠFN": 38721, + "Ġbureaucr": 38722, + "ĠGoat": 38723, + "ĠShang": 38724, + "Ġsemin": 38725, + "Ġthroats": 38726, + "Ġmoth": 38727, + "herer": 38728, + "Democratic": 38729, + "ixtures": 38730, + "impl": 38731, + "ĠLogo": 38732, + "ortunate": 38733, + "Ġclumsy": 38734, + "Ġinnocuous": 38735, + "ĠBlend": 38736, + "abulary": 38737, + "ĠFaces": 38738, + "Ġpornographic": 38739, + "px": 38740, + "Information": 38741, + "Ġfluoride": 38742, + "Ġatroc": 38743, + "Ġdelta": 38744, + "whatever": 38745, + "ossier": 38746, + "ĠNoir": 38747, + "ĠYao": 38748, + "551": 38749, + "undred": 38750, + "Ġmillennium": 38751, + "Ġferal": 38752, + "Ġconvinc": 38753, + "cano": 38754, + "imsy": 38755, + "angles": 38756, + "Ġsterile": 38757, + "ĠMenu": 38758, + "779": 38759, + "ĠCrack": 38760, + "Ġabundantly": 38761, + "ĠmL": 38762, + "Ġinfiltration": 38763, + "ĠDefinition": 38764, + "733": 38765, + "oubt": 38766, + "Ġorbital": 38767, + "Ġpiss": 38768, + "Ġbeet": 38769, + "679": 38770, + "Ġcounteract": 38771, + "ĠALE": 38772, + "ulative": 38773, + "crew": 38774, + "Ġliberating": 38775, + "ĠDull": 38776, + "Speaking": 38777, + "Sadly": 38778, + "Ġmisfortune": 38779, + "Ġdolphin": 38780, + "557": 38781, + "Ġbould": 38782, + "ĠTorah": 38783, + "ĠConfederacy": 38784, + "421": 38785, + "Ġorbits": 38786, + "ocused": 38787, + "beer": 38788, + "Rand": 38789, + "ĠORIG": 38790, + "Ġmuc": 38791, + "LER": 38792, + "ĠMisty": 38793, + "Ġinexpl": 38794, + "Ġreptiles": 38795, + "Ġaven": 38796, + "blocking": 38797, + "ĠPASS": 38798, + "Ġarisen": 38799, + "ĠMock": 38800, + "Ġops": 38801, + "Ġshin": 38802, + "524": 38803, + "Ġdigestion": 38804, + "Soft": 38805, + "irect": 38806, + "POL": 38807, + "ĠSpell": 38808, + "Level": 38809, + "Ġhex": 38810, + "Ġbitcoins": 38811, + "ĠHungry": 38812, + "VL": 38813, + "ĠRealm": 38814, + "RELATED": 38815, + "Delta": 38816, + "Pri": 38817, + "Ġrejoice": 38818, + "ĠLatter": 38819, + "LG": 38820, + "Ġstupidity": 38821, + "Ġdonkey": 38822, + "nova": 38823, + "Vill": 38824, + "Ġdecomp": 38825, + "Ġexternally": 38826, + "Ġsequest": 38827, + "815": 38828, + "Ġshortcut": 38829, + "riminal": 38830, + "Hun": 38831, + "EH": 38832, + "Ġregiment": 38833, + "Case": 38834, + "definition": 38835, + "Ġappendix": 38836, + "ĠPlayed": 38837, + "associated": 38838, + "izens": 38839, + "ĠVag": 38840, + "Ġflung": 38841, + "Ġfru": 38842, + "Ġcoil": 38843, + "________________________": 38844, + "Ġselects": 38845, + "Ġsolves": 38846, + "aea": 38847, + "985": 38848, + "Tomorrow": 38849, + "Ġsear": 38850, + "APE": 38851, + "492": 38852, + "Ġenlightened": 38853, + "Ġnonexistent": 38854, + "ĠPotato": 38855, + "Ghost": 38856, + "Ġrichness": 38857, + "ĠKarin": 38858, + "Ġfamilial": 38859, + "ĠJA": 38860, + "Regardless": 38861, + "Ġepis": 38862, + "GD": 38863, + "Ġinsanely": 38864, + "ĠPhill": 38865, + "Block": 38866, + "Finding": 38867, + "omal": 38868, + "Ġdecipher": 38869, + "ĠSwap": 38870, + "derived": 38871, + "ĠOFFIC": 38872, + "Support": 38873, + "Ġnylon": 38874, + "Ġexaggeration": 38875, + "Ġevangelicals": 38876, + "Ġbearings": 38877, + "587": 38878, + "Ġlocale": 38879, + "Ġpowerfully": 38880, + "Ġappropriated": 38881, + "itates": 38882, + "irlfriend": 38883, + "cule": 38884, + "ĠSomewhere": 38885, + "747": 38886, + "ĠInteresting": 38887, + "464": 38888, + "Ġelong": 38889, + "Ġdegrade": 38890, + "rafted": 38891, + "Ġtutorials": 38892, + "905": 38893, + "ĠIntervention": 38894, + "Ġuniqueness": 38895, + "Ġ284": 38896, + "Ġexplorers": 38897, + "Ġnucle": 38898, + "ĠMillenn": 38899, + "511": 38900, + "ĠReneg": 38901, + "Ġexecut": 38902, + "urai": 38903, + "leon": 38904, + "Ġdeserts": 38905, + "ĠCig": 38906, + "Ġsuggestive": 38907, + "instead": 38908, + "Ġlousy": 38909, + "Ġenigmatic": 38910, + "594": 38911, + "Know": 38912, + "rollment": 38913, + "ipher": 38914, + "Ġhumanities": 38915, + "Ġmodifying": 38916, + ".....": 38917, + "Ġdegraded": 38918, + "Ġsuppressing": 38919, + "Ġeman": 38920, + "abouts": 38921, + "functional": 38922, + "ĠOU": 38923, + "ĠRelax": 38924, + "786": 38925, + "esses": 38926, + "ĠLogin": 38927, + "spec": 38928, + "ĠWWF": 38929, + "Ġ364": 38930, + "ĠIsis": 38931, + "Wisconsin": 38932, + "Ġequival": 38933, + "ĠCollector": 38934, + "ibilities": 38935, + "malink": 38936, + "acea": 38937, + "Ġchained": 38938, + "Ġarist": 38939, + "Ġdisadvantages": 38940, + "ĠBrus": 38941, + "limits": 38942, + "ĠDmit": 38943, + "544": 38944, + "ĠRecipe": 38945, + "Ġhabitual": 38946, + ".):": 38947, + "ĠPRODUCT": 38948, + "772": 38949, + "Ġrept": 38950, + "Ġpathology": 38951, + "Ġresurrected": 38952, + "uders": 38953, + "Ġlingu": 38954, + "Ġdenomination": 38955, + "Ġfirewall": 38956, + "scient": 38957, + "Ġvaliant": 38958, + "Kansas": 38959, + "516": 38960, + "Ġcontemporaries": 38961, + "Roman": 38962, + "Ġaccompan": 38963, + "Ġantennas": 38964, + "ĠXan": 38965, + "Ġelectromagnetic": 38966, + "ĠNek": 38967, + "alien": 38968, + "indle": 38969, + "Ġgraphene": 38970, + "Ġgraceful": 38971, + "syn": 38972, + "ĠBosh": 38973, + "Ġ1908": 38974, + "Ġsuccumb": 38975, + "Technology": 38976, + "Ġtoxin": 38977, + "myra": 38978, + "essert": 38979, + "Hell": 38980, + "Gil": 38981, + "Ġdiarr": 38982, + "imeters": 38983, + "Ġexplo": 38984, + "Ġgeometric": 38985, + "ĠNavigation": 38986, + "cern": 38987, + "Ġprogrammer": 38988, + "oÄŁan": 38989, + "Ġdodging": 38990, + "ĠLU": 38991, + "573": 38992, + "inters": 38993, + "Ġserum": 38994, + "Ġuber": 38995, + "Ġmanga": 38996, + "762": 38997, + "ĠOccasionally": 38998, + "437": 38999, + "ĠTheme": 39000, + "Ġimmature": 39001, + "Ġactivating": 39002, + "ĠTruly": 39003, + "د": 39004, + "osion": 39005, + "Age": 39006, + "TIME": 39007, + "Silver": 39008, + "sand": 39009, + "ulnerable": 39010, + "Ġcram": 39011, + "Large": 39012, + "ĠAnger": 39013, + "icators": 39014, + "431": 39015, + "ĠHonest": 39016, + "zip": 39017, + "Ġdism": 39018, + "Ġfades": 39019, + "ĠPik": 39020, + "Ast": 39021, + "sequent": 39022, + "Ġunsigned": 39023, + "xious": 39024, + "creation": 39025, + "Ġ395": 39026, + "ottenham": 39027, + "Ġundesirable": 39028, + "ugal": 39029, + "ĠDivide": 39030, + "lp": 39031, + "563": 39032, + "ĠPOP": 39033, + "ĠCET": 39034, + "session": 39035, + "Ġoccurrences": 39036, + "chu": 39037, + "ĠACS": 39038, + "ĠProsecut": 39039, + "Ġhypnot": 39040, + "rely": 39041, + "ERG": 39042, + "Ven": 39043, + "Republicans": 39044, + "inez": 39045, + "ĠImplementation": 39046, + "Ġsprang": 39047, + "Ġobs": 39048, + "Defense": 39049, + "Ġunexpl": 39050, + "ĠPAGE": 39051, + "ĠTent": 39052, + "ĠNeurolog": 39053, + "Ġintuition": 39054, + "759": 39055, + "Ġterrestrial": 39056, + "Ġmorphine": 39057, + "Ġ.\"": 39058, + "ĠHydra": 39059, + "651": 39060, + "Ġneoliberal": 39061, + "683": 39062, + "Ġabnormalities": 39063, + "quant": 39064, + "Ġmonastery": 39065, + "jac": 39066, + "ĠReaction": 39067, + "Ġcontraceptive": 39068, + "ĠBalls": 39069, + "Ġapost": 39070, + "676": 39071, + "ĠHELL": 39072, + "approximately": 39073, + "Ġvibrations": 39074, + "COR": 39075, + "ĠCPUs": 39076, + "Ġcontin": 39077, + "Ġsemblance": 39078, + "Ġshorth": 39079, + "tip": 39080, + "ĠChips": 39081, + "makes": 39082, + "Ġprett": 39083, + "Ġconspicuous": 39084, + "ĠAmp": 39085, + "Ġvisualize": 39086, + "Hu": 39087, + "sorry": 39088, + "nai": 39089, + "ĠArcade": 39090, + "rimination": 39091, + "obin": 39092, + "Ġvampire": 39093, + "773": 39094, + "ĠCaucasus": 39095, + "Medic": 39096, + "ĠGitHub": 39097, + "ĠWicked": 39098, + "ĠFet": 39099, + "Krist": 39100, + "998": 39101, + "Ġfrontal": 39102, + "Ġ283": 39103, + "ndum": 39104, + "Ġidols": 39105, + "ĠMSG": 39106, + "ĠShuttle": 39107, + "ĠTowards": 39108, + "Ġsaturation": 39109, + "Ġ®": 39110, + "Ġcradle": 39111, + "eteen": 39112, + "Ġprejudices": 39113, + "separ": 39114, + "ĠSoda": 39115, + "ynam": 39116, + "Ġnause": 39117, + "Ġpenetrating": 39118, + "ĠVampire": 39119, + "Ġmole": 39120, + "Ġgoogle": 39121, + "earance": 39122, + "583": 39123, + "Ġdomin": 39124, + "727": 39125, + "Kind": 39126, + "Ġcust": 39127, + "manuel": 39128, + "ĠAstro": 39129, + "Roger": 39130, + "JO": 39131, + "killed": 39132, + "ĠDisapp": 39133, + "833": 39134, + "ĠEQU": 39135, + "Ġprecedence": 39136, + "mberg": 39137, + "641": 39138, + "ĠRoller": 39139, + "Ġspecifying": 39140, + "035": 39141, + "phil": 39142, + "Ġpowdered": 39143, + "Ġblot": 39144, + "Ġdeline": 39145, + "Bruce": 39146, + "536": 39147, + "Ġpim": 39148, + "leasing": 39149, + "vacc": 39150, + "RN": 39151, + "Ġspacing": 39152, + "Ġhangar": 39153, + "ĠPlot": 39154, + "537": 39155, + "legraph": 39156, + "596": 39157, + "Ġpolyg": 39158, + "doi": 39159, + "ĠNerd": 39160, + "installed": 39161, + "ĠSeeds": 39162, + "ĠPlays": 39163, + "ĠRomance": 39164, + "layer": 39165, + "Ġunsu": 39166, + "Ġcurric": 39167, + "Mi": 39168, + "restrial": 39169, + "ĠNiño": 39170, + "ĠProper": 39171, + "Ġpores": 39172, + "Giving": 39173, + "aeus": 39174, + "Middle": 39175, + "liber": 39176, + "Ġcombatants": 39177, + "ĠBulk": 39178, + "Ġ502": 39179, + "Ġstru": 39180, + "ĠLonely": 39181, + "Companies": 39182, + "inence": 39183, + "Autom": 39184, + "Ġfearsome": 39185, + "Ġsummar": 39186, + "Ġrotated": 39187, + "ĠPLA": 39188, + "ĠFAT": 39189, + "572": 39190, + "ĠSkies": 39191, + "iour": 39192, + "Ġintimately": 39193, + "amera": 39194, + "Ġ475": 39195, + "623": 39196, + "Ġirrig": 39197, + "Ġboosters": 39198, + "Ġtransmitting": 39199, + "DOWN": 39200, + "ĠAble": 39201, + "Ġfuriously": 39202, + "spirit": 39203, + "Ġgrun": 39204, + "Ġbible": 39205, + "ĠAdmir": 39206, + "Ġ§": 39207, + "ĠRaise": 39208, + "Ġflowering": 39209, + "uxe": 39210, + "ravis": 39211, + "urther": 39212, + "ĠScientology": 39213, + "pathy": 39214, + "Ġruth": 39215, + "Ġtempor": 39216, + "Ġwhispered": 39217, + "ogly": 39218, + "coord": 39219, + "chlor": 39220, + "processing": 39221, + "iott": 39222, + "ĠTY": 39223, + "wik": 39224, + "abolic": 39225, + "ĠUnable": 39226, + "ĠLiterary": 39227, + "ĠpH": 39228, + "Eastern": 39229, + "Craig": 39230, + "Fear": 39231, + "Ġinventions": 39232, + "ĠNost": 39233, + "Ġafflicted": 39234, + "ĠSwamp": 39235, + "INST": 39236, + "Jerry": 39237, + "Ġprope": 39238, + "ĠLancet": 39239, + "Ġrefres": 39240, + "ĠPrinciples": 39241, + "ĠLys": 39242, + "ERAL": 39243, + "addock": 39244, + "Ġcynicism": 39245, + "Ġmassacres": 39246, + "roo": 39247, + "Ġcollagen": 39248, + "Johnny": 39249, + "Keith": 39250, + "Italian": 39251, + "553": 39252, + "Dad": 39253, + "Neither": 39254, + "cler": 39255, + "ilers": 39256, + "Ġassass": 39257, + "Travel": 39258, + "672": 39259, + "Ġeaves": 39260, + "ATOR": 39261, + "Ġoily": 39262, + "581": 39263, + "ateful": 39264, + "728": 39265, + "Ġchiefly": 39266, + "tical": 39267, + "enes": 39268, + "ĠWouldn": 39269, + "ĠJacket": 39270, + "ĠSuit": 39271, + "Ġindustrialized": 39272, + "ĠNose": 39273, + "ĠSECTION": 39274, + "Ġredd": 39275, + "Ġcavity": 39276, + "Ġconn": 39277, + "Shield": 39278, + "Ġtongues": 39279, + "Ġsuccinct": 39280, + "views": 39281, + "ĠMUST": 39282, + "oliath": 39283, + "Ġlimitless": 39284, + "Ġapocalyptic": 39285, + "ĠAtlantis": 39286, + "DNA": 39287, + "ilded": 39288, + "ĠDresden": 39289, + "nit": 39290, + "Ġsubdiv": 39291, + "gressive": 39292, + "701": 39293, + "hops": 39294, + "alist": 39295, + "Ġunintentional": 39296, + "Ġpsychic": 39297, + "Ġcontrovers": 39298, + "Ġforeground": 39299, + "Ġnaïve": 39300, + "Ġfolders": 39301, + "icist": 39302, + "Ġdrawbacks": 39303, + "ĠToxic": 39304, + "ophy": 39305, + "ĠMasonic": 39306, + "Ġcis": 39307, + "olated": 39308, + "Ġdepletion": 39309, + "Rap": 39310, + "692": 39311, + "Ġinver": 39312, + "ĠFAQ": 39313, + "Ġmeanings": 39314, + "Ġbisc": 39315, + "ĠRage": 39316, + "Ġresear": 39317, + "Ep": 39318, + "Ġunbeat": 39319, + "ĠComponents": 39320, + "bub": 39321, + "ĠInterface": 39322, + "Isa": 39323, + "ĠArgon": 39324, + "Ġdenomin": 39325, + "Ġmammal": 39326, + "519": 39327, + "Ġsizing": 39328, + "imbabwe": 39329, + "ĠReplacement": 39330, + "Georgia": 39331, + "ĠParticipation": 39332, + "Ġmelts": 39333, + "Ġfemin": 39334, + "514": 39335, + "Ġseams": 39336, + "513": 39337, + "ĠGaw": 39338, + "Ġbrood": 39339, + "Mit": 39340, + "Ġannoyance": 39341, + "Ġequilibrium": 39342, + "Ġpatri": 39343, + "Ġ338": 39344, + "561": 39345, + "mentioned": 39346, + "ĠVotes": 39347, + "Ġintoler": 39348, + "Ġstrikingly": 39349, + "Ġ352": 39350, + "Ġskeletal": 39351, + "616": 39352, + "isition": 39353, + "Ġfluor": 39354, + "provided": 39355, + "517": 39356, + "Ġclimates": 39357, + "Ġsensibilities": 39358, + "ĠFrequ": 39359, + "onite": 39360, + "Kenn": 39361, + "Ġmagnets": 39362, + "assis": 39363, + "Ġprerequisite": 39364, + "Ġ>>>": 39365, + "Ġscree": 39366, + "google": 39367, + "ĠMirage": 39368, + "Ġevict": 39369, + "Peace": 39370, + "Ġmissionaries": 39371, + "617": 39372, + "748": 39373, + "rient": 39374, + "ĠSTATS": 39375, + "Bird": 39376, + "ĠShiva": 39377, + "ĠBlessing": 39378, + "Ġredundancy": 39379, + "Ġphotoc": 39380, + "ĠOnes": 39381, + "754": 39382, + "alert": 39383, + "urous": 39384, + "Ġfolklore": 39385, + "ĠIdeal": 39386, + "sheets": 39387, + "according": 39388, + "Hor": 39389, + "Cle": 39390, + "ĠEdit": 39391, + "671": 39392, + "olitics": 39393, + "ĠESC": 39394, + "Ġparaly": 39395, + "Ġorgasm": 39396, + "speak": 39397, + "ð": 39398, + "Ġsneaky": 39399, + "Ġswords": 39400, + "Ġfandom": 39401, + "776": 39402, + "ĠScandinav": 39403, + "Ġdarts": 39404, + "546": 39405, + "cerpt": 39406, + "ĠGifts": 39407, + "Ġmagically": 39408, + "phys": 39409, + "Laughs": 39410, + "ĠSour": 39411, + "ources": 39412, + "789": 39413, + "ĠEps": 39414, + "ository": 39415, + "uality": 39416, + "literally": 39417, + "Ġheavens": 39418, + "FUL": 39419, + "Ġie": 39420, + "ĠISP": 39421, + "Ġwink": 39422, + "Ġweeping": 39423, + "Ġdocking": 39424, + "ACY": 39425, + "iece": 39426, + "Ġsignifies": 39427, + "guns": 39428, + "Sac": 39429, + "Leave": 39430, + "imation": 39431, + "Ġunex": 39432, + "uctive": 39433, + "ĠFees": 39434, + "ĠPortable": 39435, + "ĠInvestigator": 39436, + "pill": 39437, + "rehensible": 39438, + "Ġpotency": 39439, + "803": 39440, + "Ġembodiment": 39441, + "overty": 39442, + "shine": 39443, + "REL": 39444, + "ĠMPH": 39445, + "ĠPatriarch": 39446, + "Ġaspirin": 39447, + "Ġrinse": 39448, + "Ġinher": 39449, + "ograms": 39450, + "ĠTHREE": 39451, + "qt": 39452, + "ipples": 39453, + "Ġdehuman": 39454, + "Ġslander": 39455, + "Ġflora": 39456, + "brow": 39457, + "Ġblindly": 39458, + "ectar": 39459, + "endish": 39460, + "Ġpigment": 39461, + "cellent": 39462, + "Ġyells": 39463, + "ĠLust": 39464, + "ĠAttacks": 39465, + "ĠSyndicate": 39466, + "otin": 39467, + "gress": 39468, + "reenshot": 39469, + "picking": 39470, + "Ġacupuncture": 39471, + "images": 39472, + "glas": 39473, + "ĠPolicies": 39474, + "Ġintestinal": 39475, + "1998": 39476, + "ULE": 39477, + "runs": 39478, + "ĠNing": 39479, + "ĠAsuka": 39480, + "ĠSkull": 39481, + "Motor": 39482, + "Ġdefund": 39483, + "Ġattaching": 39484, + "ĠBAD": 39485, + "Ġquarrel": 39486, + "Child": 39487, + "Dog": 39488, + "issan": 39489, + "irmation": 39490, + "Ġinline": 39491, + "ĠLover": 39492, + "Ġcyan": 39493, + "entary": 39494, + "awareness": 39495, + "Ġtraveller": 39496, + "âĢIJ": 39497, + "Ġbeasts": 39498, + "Ġboobs": 39499, + "ĠDeadly": 39500, + "Ġplutonium": 39501, + "ĠIntellectual": 39502, + "Jam": 39503, + "Ġconsec": 39504, + "663": 39505, + "ĠVegan": 39506, + "Ġ331": 39507, + "uron": 39508, + "ĠHEL": 39509, + "reements": 39510, + "Ġclone": 39511, + "Ġoutputs": 39512, + "oult": 39513, + "ĠDOM": 39514, + "ĠNX": 39515, + "Ze": 39516, + "909": 39517, + "brate": 39518, + "arations": 39519, + "ĠJindal": 39520, + "Ġbooklet": 39521, + "amide": 39522, + "Ġscraping": 39523, + "Sol": 39524, + "Date": 39525, + "796": 39526, + "Ġfulf": 39527, + "Ġskeletons": 39528, + "Ġsaints": 39529, + "ĠCurious": 39530, + "Han": 39531, + "Ġrepud": 39532, + "osity": 39533, + "ĠGravity": 39534, + "Ġmetadata": 39535, + "Focus": 39536, + "Ġthrott": 39537, + "ĠProgramming": 39538, + "Break": 39539, + "erver": 39540, + "Ġknight": 39541, + "yrs": 39542, + "Ġ376": 39543, + "sat": 39544, + "auto": 39545, + "Ġbroom": 39546, + "Ġnerd": 39547, + "Political": 39548, + "022": 39549, + "-------------": 39550, + "oulos": 39551, + "Ġrelic": 39552, + "Ġenactment": 39553, + "rious": 39554, + "ĠUniform": 39555, + "Teen": 39556, + "Colorado": 39557, + "055": 39558, + "Ġangled": 39559, + "bolt": 39560, + "ĠNeander": 39561, + "ĠDism": 39562, + "thanks": 39563, + "Polit": 39564, + "ersion": 39565, + "dro": 39566, + "install": 39567, + "Jake": 39568, + "hz": 39569, + "Ġ770": 39570, + "ĠCommodore": 39571, + "lahoma": 39572, + "Ġshri": 39573, + "Ġ....": 39574, + "Ġ7000": 39575, + "scope": 39576, + "Ġgenesis": 39577, + "Ġresided": 39578, + "ĠRivals": 39579, + "Ġsarcastic": 39580, + "Ġelicit": 39581, + "Ġmultiplied": 39582, + "uitous": 39583, + "Ġoppress": 39584, + "ĠPROT": 39585, + "Ġperpetually": 39586, + "ĠAdds": 39587, + "Ġbuffers": 39588, + "Ġmush": 39589, + "Ġ354": 39590, + "Ġpresc": 39591, + "ĠKung": 39592, + "682": 39593, + "Education": 39594, + "Ġpled": 39595, + "bsp": 39596, + "Ġconfessions": 39597, + "Ġrevocation": 39598, + "Micro": 39599, + "ĠHobby": 39600, + "ĠFatal": 39601, + "STAR": 39602, + "Ġworkspace": 39603, + "Ġtransformations": 39604, + "Ġportals": 39605, + "orned": 39606, + "figured": 39607, + "Ġlinguistic": 39608, + "pperc": 39609, + "ergus": 39610, + "Fel": 39611, + "ĠIntent": 39612, + "Ġ289": 39613, + "Ġdelinquent": 39614, + "Ġhandwriting": 39615, + "Ġvap": 39616, + "576": 39617, + "redited": 39618, + "736": 39619, + "Ġpsychiatry": 39620, + "GMT": 39621, + "Ġdisingen": 39622, + "Ġcrou": 39623, + "801": 39624, + "Ġmalice": 39625, + "itutes": 39626, + "ĠTiff": 39627, + "Ġstink": 39628, + "574": 39629, + "Story": 39630, + "Modern": 39631, + "ĠGly": 39632, + "Jamie": 39633, + "Ġadvertis": 39634, + "Ġhiber": 39635, + "Ġinfiltr": 39636, + "Ġelector": 39637, + "rovers": 39638, + "ĠFist": 39639, + "peed": 39640, + "ĠClassical": 39641, + "592": 39642, + "Ġconscientious": 39643, + "Surv": 39644, + "Text": 39645, + "ĠDrunk": 39646, + "Ġsupplemented": 39647, + "THIS": 39648, + "Ġtimid": 39649, + "Ġstacking": 39650, + "rites": 39651, + "Ġrebirth": 39652, + "Ġbalcon": 39653, + "Ġyawn": 39654, + "rosc": 39655, + "axy": 39656, + "Hart": 39657, + "ĠOPER": 39658, + "996": 39659, + "Ġrabid": 39660, + "ĠTick": 39661, + "Ġgrinning": 39662, + "elfth": 39663, + "045": 39664, + "Ġjustifies": 39665, + "ĠPirate": 39666, + "ĠSalary": 39667, + "Ġmirac": 39668, + "613": 39669, + "inately": 39670, + "ĠLIN": 39671, + "Ġinadequ": 39672, + "NPR": 39673, + "iddled": 39674, + "storage": 39675, + "Ġseventy": 39676, + "onet": 39677, + "Ġgastro": 39678, + "FIR": 39679, + "Ġrodent": 39680, + "629": 39681, + "ĠInclude": 39682, + "ĠCategories": 39683, + "ĠLiterally": 39684, + "Ġpree": 39685, + "aunder": 39686, + "ĠLOL": 39687, + "694": 39688, + "Ġindef": 39689, + "Ped": 39690, + "Ġmenstru": 39691, + "Ġcensored": 39692, + "Ġconfigure": 39693, + "Ġoverest": 39694, + "igenous": 39695, + "Ġrectangular": 39696, + "ĠMIS": 39697, + "ĠMub": 39698, + "Ġwitches": 39699, + "izards": 39700, + "Ġobnoxious": 39701, + "ĠLoll": 39702, + "ĠSEM": 39703, + "Ġspiritually": 39704, + "Ġcoer": 39705, + "Ġmodesty": 39706, + "butt": 39707, + "Ġedits": 39708, + "ĠShall": 39709, + "sburgh": 39710, + "Ġ1911": 39711, + "Rex": 39712, + "manent": 39713, + "ĠLithuan": 39714, + "Ġpointers": 39715, + "ativity": 39716, + "retch": 39717, + "Ġcascade": 39718, + "ĠRagnarok": 39719, + "ĠPainting": 39720, + "ĠATL": 39721, + "Born": 39722, + "Ġpadding": 39723, + "whel": 39724, + "Ġgrotesque": 39725, + "Ġtheorists": 39726, + "forcer": 39727, + "ĠJinn": 39728, + "Ġrenal": 39729, + "jamin": 39730, + "ĠFEC": 39731, + ".\"\"": 39732, + "redict": 39733, + "Ġoppos": 39734, + "opted": 39735, + "Sel": 39736, + "ipment": 39737, + "752": 39738, + "792": 39739, + "Pur": 39740, + "Ġvolt": 39741, + "Ġflap": 39742, + "ĠCASE": 39743, + "Ġdyed": 39744, + "orers": 39745, + "becca": 39746, + ",.": 39747, + "ifice": 39748, + "ubes": 39749, + "Ġyr": 39750, + "DW": 39751, + "Ġalteration": 39752, + "ĠSimpl": 39753, + "Ġunequiv": 39754, + "756": 39755, + "Dou": 39756, + "Ġplunder": 39757, + "Ġcommons": 39758, + "Ġstag": 39759, + "ĠZeal": 39760, + "avanaugh": 39761, + "Self": 39762, + "none": 39763, + "EGIN": 39764, + "Ġflashback": 39765, + "VAL": 39766, + "Gab": 39767, + "ĠCapture": 39768, + "ĠBrilliant": 39769, + "ĠDisk": 39770, + "ĠMood": 39771, + "Ġhaun": 39772, + "Ġrotting": 39773, + "ĠCobra": 39774, + "Ġpsychopath": 39775, + "Ġhelper": 39776, + "Starting": 39777, + "ĠOrbit": 39778, + "Ġcaf": 39779, + "Half": 39780, + "Volume": 39781, + "aptop": 39782, + "ĠSaga": 39783, + "azor": 39784, + "593": 39785, + "774": 39786, + "ĠCaucasian": 39787, + "compan": 39788, + "ĠVERY": 39789, + "GES": 39790, + "Ġvomit": 39791, + "Ġdispro": 39792, + "ĠMechanics": 39793, + "Ġ385": 39794, + "Ġmystical": 39795, + "AFTA": 39796, + "Ġbacter": 39797, + "availability": 39798, + "Ġhairc": 39799, + "ĠVec": 39800, + "rypt": 39801, + "Ġmanipulative": 39802, + "shell": 39803, + "ĠWeird": 39804, + "jab": 39805, + "ĠByr": 39806, + "Bow": 39807, + "uin": 39808, + "Ġquot": 39809, + "MX": 39810, + "Ġ960": 39811, + "ĠSharia": 39812, + "ĠWeapon": 39813, + "ĠPowerPoint": 39814, + "Ġstitching": 39815, + "Ġconstraint": 39816, + "âľ": 39817, + "ulic": 39818, + "597": 39819, + "omedical": 39820, + "ĠSupplemental": 39821, + "ĠSurve": 39822, + "ĠSubcommittee": 39823, + "ĠDarkness": 39824, + "Ġpython": 39825, + "LU": 39826, + "Ġ402": 39827, + "ĠQuan": 39828, + "ĠModerate": 39829, + "clusively": 39830, + "Ġextrap": 39831, + "Ġlatt": 39832, + "ĠSTUD": 39833, + "oslav": 39834, + "Ġsymb": 39835, + "battle": 39836, + "flash": 39837, + "ĠDeploy": 39838, + "Ġmicrobiome": 39839, + "Ġingested": 39840, + "Ġdistort": 39841, + "Ġassimil": 39842, + "Ġmobs": 39843, + "illet": 39844, + "Gre": 39845, + "Ġ294": 39846, + "Ġforbids": 39847, + "ĠEfficiency": 39848, + "ĠClan": 39849, + "763": 39850, + "Ġdragons": 39851, + "States": 39852, + "ĠMAKE": 39853, + "ĠBOOK": 39854, + "ĠRuns": 39855, + "ĠUX": 39856, + "EED": 39857, + "Whoever": 39858, + "ionics": 39859, + "worldly": 39860, + "ĠMermaid": 39861, + "Ġbenz": 39862, + "Info": 39863, + "523": 39864, + "Ġbiod": 39865, + "ĠPoison": 39866, + "ceivable": 39867, + "Services": 39868, + "ATIVE": 39869, + "ĠItem": 39870, + "Ġdisav": 39871, + "Ġheter": 39872, + "Ġasteroids": 39873, + "ĠWooden": 39874, + "Ġelectroly": 39875, + "assadors": 39876, + "nance": 39877, + "reflect": 39878, + "Ġattent": 39879, + "iphany": 39880, + "Ġspaceship": 39881, + "Ġbegg": 39882, + "algia": 39883, + "Ax": 39884, + "Ġidiosyncr": 39885, + "Ġinserting": 39886, + "ĠCSS": 39887, + "ĠLET": 39888, + "ĠStrikes": 39889, + "ossibly": 39890, + "Exp": 39891, + "Opp": 39892, + "dden": 39893, + "Ġplayable": 39894, + "ĠJM": 39895, + "Ġlawfully": 39896, + "ĠBlink": 39897, + "Ġ413": 39898, + "Ġoverpowered": 39899, + "Ġcommenter": 39900, + "Track": 39901, + "Ġmethyl": 39902, + "Ġfermented": 39903, + "Ġinvaders": 39904, + "ĠMoves": 39905, + "Ġcommunicates": 39906, + "rint": 39907, + "ĠTray": 39908, + "jug": 39909, + "Ġsuperf": 39910, + "ochet": 39911, + "ĠJelly": 39912, + "Ġestrogen": 39913, + "Dom": 39914, + "mix": 39915, + "Gun": 39916, + "ochemistry": 39917, + "952": 39918, + "Ġovere": 39919, + "ĠPlaintiff": 39920, + "ĠPilgrim": 39921, + "ĠSERVICES": 39922, + "ĠExpend": 39923, + "ĠFRE": 39924, + "Ġsmelling": 39925, + "ĠSpaces": 39926, + "bris": 39927, + "Mission": 39928, + "Ġarter": 39929, + "Ġautonom": 39930, + "Lisa": 39931, + "ĠPercent": 39932, + "NK": 39933, + "ĠLimits": 39934, + "Ġ356": 39935, + "Recent": 39936, + "ĠSiberian": 39937, + "etermin": 39938, + "nets": 39939, + "ĠSword": 39940, + "essee": 39941, + "Ùĩ": 39942, + "icycle": 39943, + "Ġparas": 39944, + "Ġrud": 39945, + "Ġscrib": 39946, + "Ġ1860": 39947, + "Shop": 39948, + "orld": 39949, + "Ġpept": 39950, + "ENSE": 39951, + "Ġanimations": 39952, + "ership": 39953, + "Search": 39954, + "ĠUSSR": 39955, + "washed": 39956, + "Ġpromulg": 39957, + "Ġdetainee": 39958, + "Ġunderest": 39959, + "ĠAppropri": 39960, + "Left": 39961, + "Update": 39962, + "Wallet": 39963, + "idently": 39964, + "ĠBicycle": 39965, + "Ġgorge": 39966, + "abyte": 39967, + "ĠMinecraft": 39968, + "rike": 39969, + "997": 39970, + "Tesla": 39971, + "Often": 39972, + "ĠTHESE": 39973, + "Ġregression": 39974, + "Hen": 39975, + "Ġsnippets": 39976, + "irds": 39977, + "Ġprinces": 39978, + "Ġwastes": 39979, + "ĠWond": 39980, + "itimate": 39981, + "ĠMongol": 39982, + "ĠkW": 39983, + "Ġidiots": 39984, + "Ġforeigner": 39985, + "Upon": 39986, + "Ġbackdoor": 39987, + "umph": 39988, + "ĠSquirrel": 39989, + "Ġtyped": 39990, + "Ġblockers": 39991, + "Vote": 39992, + "ĠPossibly": 39993, + "geist": 39994, + "ĠTRANS": 39995, + "Ġtitan": 39996, + "VG": 39997, + "Ġmicrobi": 39998, + "Ġinteracts": 39999, + "Ġmasc": 40000, + "Ġfinite": 40001, + "Ġcutoff": 40002, + "ornings": 40003, + "Ġprototyp": 40004, + "Ġcompan": 40005, + "mology": 40006, + "ĠBOX": 40007, + "Cre": 40008, + "Bot": 40009, + "grading": 40010, + "PET": 40011, + "Ġinsidious": 40012, + "ĠFranch": 40013, + "orians": 40014, + "ĠAUT": 40015, + "ĠCrush": 40016, + "589": 40017, + "question": 40018, + "anguard": 40019, + "Ġabsurdity": 40020, + "?\",": 40021, + "Hum": 40022, + "Ġliberalism": 40023, + "Ġpostwar": 40024, + "Gener": 40025, + "Personally": 40026, + "889": 40027, + "Bul": 40028, + "Ġlighthouse": 40029, + "Ġ291": 40030, + "VK": 40031, + "ĠExposure": 40032, + "Ġsubtract": 40033, + "ometime": 40034, + "arbon": 40035, + "ĠThieves": 40036, + "anus": 40037, + "ĠLibertarian": 40038, + "Raw": 40039, + "Ġsolvent": 40040, + "Ġcorros": 40041, + "Ġsignific": 40042, + "Ġscholarly": 40043, + "024": 40044, + "Ġfetish": 40045, + "Ġlarvae": 40046, + "Ġcatast": 40047, + "Ġtraitor": 40048, + "ijing": 40049, + "Demand": 40050, + "math": 40051, + "Ġconceivable": 40052, + "either": 40053, + "acl": 40054, + "ĠArrows": 40055, + "627": 40056, + "ĠFrankenstein": 40057, + "entious": 40058, + "Ġimitation": 40059, + "amn": 40060, + "ĠSTOP": 40061, + "Ġcripp": 40062, + "zag": 40063, + "ĠZed": 40064, + "797": 40065, + "Along": 40066, + "Ġwont": 40067, + "Ġfolds": 40068, + "Shar": 40069, + "ĠCommentary": 40070, + "ĠLibraries": 40071, + "ĠThunderbolt": 40072, + "itud": 40073, + "Toy": 40074, + "Ġincidentally": 40075, + "ĠResp": 40076, + "Ġordinarily": 40077, + "Ġvanish": 40078, + "acterial": 40079, + "Minnesota": 40080, + "rank": 40081, + "614": 40082, + "ĠExam": 40083, + "Got": 40084, + "Ġsnipers": 40085, + "ETHOD": 40086, + "dirty": 40087, + "igsaw": 40088, + "Obs": 40089, + "ĠAuthors": 40090, + "Ġillustrating": 40091, + "782": 40092, + "864": 40093, + "Ġblinded": 40094, + "transfer": 40095, + "Ġspawning": 40096, + "ĠDiary": 40097, + "ĠDNS": 40098, + "CG": 40099, + "someone": 40100, + "Ġcruc": 40101, + "Morgan": 40102, + "Learn": 40103, + "API": 40104, + "toc": 40105, + "STAT": 40106, + "ĠFlame": 40107, + "aganda": 40108, + "ĠBenef": 40109, + "stuff": 40110, + "SEA": 40111, + "Ġincest": 40112, + "Normally": 40113, + "ĠRU": 40114, + "Ġarsenic": 40115, + "isine": 40116, + "ĠTG": 40117, + "Type": 40118, + "regn": 40119, + "Cass": 40120, + "Touch": 40121, + "Site": 40122, + "Ġpict": 40123, + "Ġcorrupted": 40124, + "729": 40125, + "Ġnineteen": 40126, + "Ġparaph": 40127, + "Ġtavern": 40128, + "Ġretard": 40129, + "ĠKaf": 40130, + "Ġcolleg": 40131, + "bucks": 40132, + "imum": 40133, + "ĠCandle": 40134, + "ĠMisc": 40135, + "ĠAwesome": 40136, + "edited": 40137, + "ĠDN": 40138, + "otomy": 40139, + "Ġdisclaimer": 40140, + "798": 40141, + "ĠGoodbye": 40142, + "ucle": 40143, + "atom": 40144, + "Judge": 40145, + "cipl": 40146, + "Ġinexplicable": 40147, + "iddler": 40148, + "781": 40149, + "Ġempirical": 40150, + "Veter": 40151, + "Ġascert": 40152, + "Ġaest": 40153, + "Ġlaz": 40154, + "binary": 40155, + "Ġ358": 40156, + "contained": 40157, + "Ġmultipl": 40158, + "ocado": 40159, + "Ġdelusional": 40160, + "Ġaeros": 40161, + "udence": 40162, + "Ġjargon": 40163, + "estine": 40164, + "Ġarbitrarily": 40165, + "Ġprick": 40166, + "BACK": 40167, + "amines": 40168, + "Mess": 40169, + "Knowing": 40170, + "ublic": 40171, + "ĠWarfare": 40172, + "Ġsignify": 40173, + "Ġfragmentation": 40174, + "Tex": 40175, + "Ġnin": 40176, + "Ġdise": 40177, + "882": 40178, + "hospital": 40179, + "volent": 40180, + "Need": 40181, + "Ġinfer": 40182, + "Sony": 40183, + "783": 40184, + "YING": 40185, + "Ġinfinity": 40186, + "ĠFortress": 40187, + "Ġmustache": 40188, + "Ġcorresponds": 40189, + "DX": 40190, + "Ġunmarried": 40191, + "ĠCruel": 40192, + "Ġ1901": 40193, + "Ġappropri": 40194, + "ZI": 40195, + "Ġphosph": 40196, + "901": 40197, + "IFE": 40198, + "Ġ347": 40199, + "Ġconvoluted": 40200, + "ĠApost": 40201, + "htm": 40202, + "Ġilluminating": 40203, + "568": 40204, + "Ġassassinate": 40205, + "Ġparam": 40206, + "Ġimpractical": 40207, + "cedes": 40208, + "ĠProcedure": 40209, + "ĠMouth": 40210, + "Battle": 40211, + "Ġ451": 40212, + "Sand": 40213, + "Ġcontamin": 40214, + "Hour": 40215, + "Cell": 40216, + "BIL": 40217, + "Ġprecon": 40218, + "ĠScor": 40219, + "Ġconfig": 40220, + "ĠMuscle": 40221, + "Ġhive": 40222, + "Ġunderworld": 40223, + "plement": 40224, + "Ġpostage": 40225, + "Ġinterpersonal": 40226, + "Ġpierced": 40227, + "Ġcharms": 40228, + "oscopic": 40229, + "ASC": 40230, + "ĠDex": 40231, + "render": 40232, + "png": 40233, + "Ġcritiques": 40234, + "992": 40235, + "ĠVinyl": 40236, + "Bear": 40237, + "idia": 40238, + "ĠTemp": 40239, + "Ġcyn": 40240, + "ĠBCE": 40241, + "Ġpatriarchal": 40242, + "Ġantagonist": 40243, + "ĠGMO": 40244, + "Ġunnatural": 40245, + "Race": 40246, + "imeo": 40247, + "ĠUkrainians": 40248, + "Train": 40249, + "Ġ329": 40250, + "ritten": 40251, + "igil": 40252, + "Lin": 40253, + "alus": 40254, + "*****": 40255, + "olded": 40256, + "ĠPegasus": 40257, + "Bas": 40258, + "photos": 40259, + "Ġ820": 40260, + "Ġsquadron": 40261, + "ESE": 40262, + "Ġ373": 40263, + "Uk": 40264, + "Lost": 40265, + "Store": 40266, + "ĠScenes": 40267, + "JJ": 40268, + "Ġlick": 40269, + "Tyler": 40270, + "cius": 40271, + "lishing": 40272, + "ocl": 40273, + "Ġassoci": 40274, + "ensitivity": 40275, + "entanyl": 40276, + "Rum": 40277, + "Ġ443": 40278, + "onding": 40279, + "Ġpedals": 40280, + "ĠPsychological": 40281, + "Ġthro": 40282, + "Network": 40283, + "591": 40284, + "Pick": 40285, + "Ġchords": 40286, + "ĠHound": 40287, + "entials": 40288, + "faces": 40289, + "ĠYin": 40290, + "ugi": 40291, + "bows": 40292, + "ĠForms": 40293, + "886": 40294, + "Ox": 40295, + "Ġ351": 40296, + "Ġmating": 40297, + "Ġchirop": 40298, + "916": 40299, + "Ġexpend": 40300, + "Ġusefulness": 40301, + "Marvel": 40302, + "ĠStretch": 40303, + "omez": 40304, + "ĠJS": 40305, + "Hal": 40306, + "fle": 40307, + "ĠCountdown": 40308, + "ĠLH": 40309, + "assian": 40310, + "vd": 40311, + "ĠTranscript": 40312, + "ĠExtrem": 40313, + "idine": 40314, + "ustainable": 40315, + "ederal": 40316, + "ĠOwl": 40317, + "Ġcreed": 40318, + "ĠGrateful": 40319, + "Ġprenatal": 40320, + "________________________________": 40321, + "ĠElements": 40322, + "âĢ¦)": 40323, + "nesia": 40324, + "ARGET": 40325, + "Ġboredom": 40326, + "Ġdepictions": 40327, + "verbal": 40328, + "ĠeSports": 40329, + "Laura": 40330, + "ilage": 40331, + "ĠGalactic": 40332, + "Investigators": 40333, + "Ġscattering": 40334, + "instein": 40335, + "ĠExperiment": 40336, + "ĠRecre": 40337, + "Ġregul": 40338, + "Ġrelent": 40339, + "STE": 40340, + "Ġslicing": 40341, + "igans": 40342, + "raped": 40343, + "ĠDeter": 40344, + "Ġsmoker": 40345, + "ĠWikimedia": 40346, + "pages": 40347, + "Ted": 40348, + "713": 40349, + "Ġpuberty": 40350, + "Ġhars": 40351, + "ĠStarter": 40352, + "patch": 40353, + "leeve": 40354, + "Ġ346": 40355, + "ĠAccessories": 40356, + "ventions": 40357, + "ĠSTAND": 40358, + "ĠUrug": 40359, + "ĠOccupy": 40360, + "Ġbinds": 40361, + "ĠBubble": 40362, + "Ġincorporation": 40363, + "Ġstereotypical": 40364, + "Ġgor": 40365, + "987": 40366, + "Ġevils": 40367, + "tower": 40368, + "Ġastronomer": 40369, + "Ble": 40370, + "ĠNid": 40371, + "ĠWidow": 40372, + "Ġpaw": 40373, + "Ġinnoc": 40374, + "ĠOWN": 40375, + "Ġtofu": 40376, + "drops": 40377, + "ĠEval": 40378, + "693": 40379, + "Collins": 40380, + "penter": 40381, + "ĠNib": 40382, + "Ġsmokes": 40383, + "Ġ1850": 40384, + "Ġtechno": 40385, + "oooo": 40386, + "ĠUnic": 40387, + "ĠKirin": 40388, + "\":[\"": 40389, + "Ġincrements": 40390, + "989": 40391, + "oodoo": 40392, + "ĠCyborg": 40393, + "Ġcures": 40394, + "ĠOW": 40395, + "ĠAnnex": 40396, + "behavior": 40397, + "/-": 40398, + "Ġbuggy": 40399, + "onent": 40400, + "Bey": 40401, + "Ġsummarize": 40402, + "putable": 40403, + "Ġfri": 40404, + "Gi": 40405, + "urances": 40406, + "ĠAppalach": 40407, + "Ġhegemony": 40408, + "ĠOrigins": 40409, + "Ġconnectors": 40410, + "ĠAST": 40411, + "object": 40412, + "ĠSlay": 40413, + "Arm": 40414, + "oston": 40415, + "ĠEVEN": 40416, + "Ġprophecy": 40417, + "Bright": 40418, + "ĠVector": 40419, + "Marg": 40420, + "omical": 40421, + "Holy": 40422, + "ĠRPM": 40423, + "ĠReceiver": 40424, + "Ġtracts": 40425, + "boss": 40426, + "Ġblurry": 40427, + "aspx": 40428, + "DES": 40429, + "Ġcess": 40430, + "ĠAster": 40431, + "anything": 40432, + "levard": 40433, + "unciation": 40434, + "jong": 40435, + "Ġiv": 40436, + "Common": 40437, + "ĠDistance": 40438, + "imus": 40439, + "outheast": 40440, + "Ġcir": 40441, + "ĠCato": 40442, + "Ġinscribed": 40443, + "ersed": 40444, + "Ġanarchy": 40445, + "Ġplagiar": 40446, + "Ġthug": 40447, + "Actor": 40448, + "ĠTant": 40449, + "Researchers": 40450, + "remember": 40451, + "Ġitch": 40452, + "Ġrefill": 40453, + "Ġsucker": 40454, + "ĠWANT": 40455, + "RAG": 40456, + "rencies": 40457, + "ĠTape": 40458, + "Ġattaches": 40459, + "nb": 40460, + "Tan": 40461, + "Ġappend": 40462, + "Ġalas": 40463, + "951": 40464, + "panel": 40465, + "Climate": 40466, + "icrobial": 40467, + "Brandon": 40468, + "ĠFreud": 40469, + "Ġfungi": 40470, + "Ġcommenters": 40471, + "ĠDelicious": 40472, + "Ġhitherto": 40473, + "conv": 40474, + "Ġchemist": 40475, + "Ġdenominations": 40476, + "ĠBehavior": 40477, + "comed": 40478, + "ĠLantern": 40479, + "ĠFloating": 40480, + "magic": 40481, + "ĠBarbar": 40482, + "bender": 40483, + "iliar": 40484, + "unny": 40485, + "Ġretracted": 40486, + "atars": 40487, + "ĠLovely": 40488, + "Ġinfinitely": 40489, + "Ġhumili": 40490, + "Ġinterestingly": 40491, + "Ġmunicip": 40492, + "ĠPanic": 40493, + "Ġcomprehension": 40494, + "ĠMassacre": 40495, + "Ġpersuasion": 40496, + "enf": 40497, + "Ġcoded": 40498, + "higher": 40499, + "chart": 40500, + "umbered": 40501, + "ĠIndigo": 40502, + "Ġthinker": 40503, + "Ġgoof": 40504, + "ĠPetition": 40505, + "fascist": 40506, + "absor": 40507, + "Ġassay": 40508, + "ĠClassification": 40509, + "Ġhalluc": 40510, + "speech": 40511, + "issues": 40512, + "Ġinexper": 40513, + "ĠLibre": 40514, + "Ġsling": 40515, + "zech": 40516, + "Ġpouch": 40517, + "ĠOffense": 40518, + "ĠHF": 40519, + "Fight": 40520, + "026": 40521, + "ĠTrident": 40522, + "fm": 40523, + "Ġintox": 40524, + "Ġ465": 40525, + "colonial": 40526, + "ovies": 40527, + "794": 40528, + "Techn": 40529, + "undreds": 40530, + "Ġchildish": 40531, + "arenthood": 40532, + "ĠShade": 40533, + "Host": 40534, + "Ġdirectional": 40535, + "reader": 40536, + "rimp": 40537, + "ĠEater": 40538, + "prep": 40539, + "Ġmeas": 40540, + "Ġlatch": 40541, + "inant": 40542, + "nels": 40543, + "finished": 40544, + "application": 40545, + "Board": 40546, + "Ġfiller": 40547, + "ivably": 40548, + "CAST": 40549, + "Ġstereotyp": 40550, + "Ġwarranties": 40551, + "ĠProbe": 40552, + "Ġspontaneously": 40553, + "Ġtropes": 40554, + "Meg": 40555, + "ĠHandling": 40556, + "hemer": 40557, + "986": 40558, + "ĠSly": 40559, + "plates": 40560, + "Ġmolten": 40561, + "ĠHIT": 40562, + "strings": 40563, + "Ġcentrif": 40564, + "ĠENG": 40565, + "Indeed": 40566, + "Ġ429": 40567, + "Ġsly": 40568, + "Ġ490": 40569, + "Ġhordes": 40570, + "boot": 40571, + "691": 40572, + "ihara": 40573, + "Ġsubversive": 40574, + "Russell": 40575, + "aceous": 40576, + "wk": 40577, + "Ġreverence": 40578, + "Ġingenious": 40579, + "holiday": 40580, + "eligible": 40581, + "ĠTactical": 40582, + "978": 40583, + "herence": 40584, + "Ġgimm": 40585, + "Ġarchaic": 40586, + "Ġadam": 40587, + "Ġ297": 40588, + "Father": 40589, + "ĠLerner": 40590, + "Ġhesitated": 40591, + "Safety": 40592, + "Ġawakened": 40593, + "ueller": 40594, + "Ġextrater": 40595, + "Ġmummy": 40596, + "ĠBuddhism": 40597, + "Ġ359": 40598, + "Ġlegions": 40599, + "Ġprehistoric": 40600, + "ancouver": 40601, + "Ġmelancholy": 40602, + "ĠEnemy": 40603, + "ĠSyl": 40604, + "ĠRobo": 40605, + "verting": 40606, + "ĠBullets": 40607, + "essler": 40608, + "Ġmarvelous": 40609, + "ĠBened": 40610, + "Ġsavior": 40611, + "omever": 40612, + "Bee": 40613, + "Ġrapp": 40614, + "Ġpredomin": 40615, + "ĠScripture": 40616, + "Ġsnapshots": 40617, + "Ġunrem": 40618, + "Ġsquid": 40619, + "ĠBuddh": 40620, + "ĠSantorum": 40621, + "Internet": 40622, + "avoid": 40623, + "Ġunamb": 40624, + "Ġ296": 40625, + "Ġnexus": 40626, + "Ġinterchangeable": 40627, + "ockets": 40628, + "Ġfoll": 40629, + "ĠOPT": 40630, + "023": 40631, + "²": 40632, + "Ġhereditary": 40633, + "Ġvape": 40634, + "=\"": 40635, + "1996": 40636, + "س": 40637, + "Emergency": 40638, + "Ġneb": 40639, + "Ġisot": 40640, + "Ġdiam": 40641, + "stairs": 40642, + "ĠAppendix": 40643, + "venient": 40644, + "Ġinvol": 40645, + "Ġtheorist": 40646, + "Ġconqu": 40647, + "Mich": 40648, + "ĠSort": 40649, + "antasy": 40650, + "dating": 40651, + "771": 40652, + "Ġape": 40653, + "Ġindemn": 40654, + "ween": 40655, + "Games": 40656, + "ascal": 40657, + "Muslims": 40658, + "Ġleaflets": 40659, + "Ġtraverse": 40660, + "Ġtransgress": 40661, + "Ġflushed": 40662, + "893": 40663, + "lasses": 40664, + "obos": 40665, + "ooming": 40666, + "Ġtou": 40667, + "mast": 40668, + "âģ": 40669, + "751": 40670, + "Either": 40671, + "Ġgrate": 40672, + "urgy": 40673, + "Ġendowed": 40674, + "ĠRasm": 40675, + "Nat": 40676, + "odka": 40677, + "olon": 40678, + "iants": 40679, + "Ġsensations": 40680, + "Ġsituational": 40681, + "pox": 40682, + "Figure": 40683, + "Ġslime": 40684, + "Ġ421": 40685, + "ollow": 40686, + "Ġanesthesia": 40687, + "adult": 40688, + "ĠPiece": 40689, + "994": 40690, + "ĠAnalog": 40691, + "Iv": 40692, + "flo": 40693, + "Ġdomest": 40694, + "Ġcabal": 40695, + "Ġgarg": 40696, + "Ġrabb": 40697, + "REC": 40698, + "ISTORY": 40699, + "Friend": 40700, + "Ġancestor": 40701, + "ĠLets": 40702, + "Ġelf": 40703, + "Ġlobb": 40704, + "ĠAdren": 40705, + "silver": 40706, + "astical": 40707, + "Ġstitch": 40708, + "028": 40709, + "Hug": 40710, + "Ġmoss": 40711, + "ompl": 40712, + "Ġunob": 40713, + "883": 40714, + "Ġcortex": 40715, + "olutely": 40716, + "052": 40717, + "Seattle": 40718, + "restling": 40719, + "endment": 40720, + "Ġ366": 40721, + "ventus": 40722, + "ĠRated": 40723, + "ĠClever": 40724, + "Ġcloak": 40725, + "phrase": 40726, + "flake": 40727, + "Ġphilosophies": 40728, + "784": 40729, + "Ġskulls": 40730, + "wake": 40731, + "oru": 40732, + "ĠACTION": 40733, + "Ġcomprom": 40734, + "ĠManufacturer": 40735, + "ĠImprove": 40736, + "Ns": 40737, + "ĠRevenge": 40738, + "lords": 40739, + "Ġ417": 40740, + "iddles": 40741, + "Ġcondesc": 40742, + "tiny": 40743, + "Ġchloride": 40744, + "greg": 40745, + "ĠREST": 40746, + "subject": 40747, + "Ġundes": 40748, + "ftime": 40749, + "Ġbottleneck": 40750, + "ĠZombie": 40751, + "Ġhabitable": 40752, + "Ġcigars": 40753, + "Ġenlarg": 40754, + "icester": 40755, + "ðĿ": 40756, + "regulation": 40757, + "arters": 40758, + "Ġformulations": 40759, + "Ġadhesive": 40760, + "Ġ344": 40761, + "pod": 40762, + "etitive": 40763, + "Ġcontinuum": 40764, + "aghd": 40765, + "Ġ701": 40766, + "Ġdisband": 40767, + "Tu": 40768, + "Ġcivilisation": 40769, + "ĠPCI": 40770, + "Ġcrooked": 40771, + "ammy": 40772, + "Ġbrim": 40773, + "Jr": 40774, + "ĠBunker": 40775, + "plot": 40776, + "Ġwielded": 40777, + "Ġcaricature": 40778, + "ĠInfinite": 40779, + "piracy": 40780, + "aretz": 40781, + "Ġstares": 40782, + "incinnati": 40783, + "agents": 40784, + "ĠObamaCare": 40785, + "asuring": 40786, + "ansion": 40787, + "Ġastonished": 40788, + "iovascular": 40789, + "Bio": 40790, + "Ġadvisable": 40791, + "Ġsender": 40792, + "887": 40793, + "Led": 40794, + "DN": 40795, + "Ġaggregation": 40796, + "ĠInnocent": 40797, + "ĠTransactions": 40798, + "worms": 40799, + "ĠWorm": 40800, + "Ġ363": 40801, + "ĠBiblical": 40802, + "rared": 40803, + "Ġgazing": 40804, + "chant": 40805, + "Ġsubordinates": 40806, + "1600": 40807, + "actually": 40808, + "olition": 40809, + "ĠRTX": 40810, + "ĠPyramid": 40811, + "alph": 40812, + "ĠFPS": 40813, + "Ġerrone": 40814, + "ĠLR": 40815, + "Scientists": 40816, + "Ġincons": 40817, + "Ġbrittle": 40818, + "027": 40819, + "ĠBowser": 40820, + "Rub": 40821, + "links": 40822, + "ĠWik": 40823, + "ussion": 40824, + "Marsh": 40825, + "resents": 40826, + "Clean": 40827, + "Ġbrute": 40828, + "ĠInventory": 40829, + "1100": 40830, + "ĠATK": 40831, + "793": 40832, + "Ġcaveats": 40833, + "ĠKnot": 40834, + "IRT": 40835, + "ĠCanad": 40836, + "isma": 40837, + "entin": 40838, + "Own": 40839, + "Ġ455": 40840, + "Ġlesions": 40841, + "ĠAres": 40842, + "ĠKali": 40843, + "Ġpaws": 40844, + "Auto": 40845, + "Ġdiscrim": 40846, + "044": 40847, + "ĠCOUN": 40848, + "Ġ1905": 40849, + "Ġexperien": 40850, + "Ġ406": 40851, + "achelor": 40852, + "Ġscarcely": 40853, + "Ġsynchronized": 40854, + "Rat": 40855, + "Blake": 40856, + "Ġrewriting": 40857, + "Ġcannons": 40858, + "stem": 40859, + "Apparently": 40860, + "Ġleveling": 40861, + "?]": 40862, + "Ġfins": 40863, + "ĠTone": 40864, + "ogether": 40865, + "Sound": 40866, + "Ġmicrosc": 40867, + "ĠAsylum": 40868, + "Ġindividuality": 40869, + "Ġ432": 40870, + "lease": 40871, + "Chuck": 40872, + "Ġhating": 40873, + "Ġleftists": 40874, + "ĠPersonality": 40875, + "ĠBundle": 40876, + "Dutch": 40877, + "Ġtransformer": 40878, + "iami": 40879, + "ĠTradition": 40880, + "ĠRecipes": 40881, + "Ġdiscour": 40882, + "Viol": 40883, + "Ext": 40884, + "ĠOliv": 40885, + "ashington": 40886, + "Ġmillennia": 40887, + "Ġpsychiatrists": 40888, + "ĠTrilogy": 40889, + "inction": 40890, + "Ġdisliked": 40891, + "088": 40892, + "954": 40893, + "Ġoverloaded": 40894, + "Ġopium": 40895, + "acus": 40896, + "resources": 40897, + "mud": 40898, + "ometry": 40899, + "Hit": 40900, + "Ġguild": 40901, + "Ġabyss": 40902, + "884": 40903, + "ensity": 40904, + "ĠDifference": 40905, + "Electric": 40906, + "authent": 40907, + "Ġdownloadable": 40908, + "ellar": 40909, + "ĠSavior": 40910, + "ĠFRI": 40911, + "Ġ445": 40912, + "Ġincidental": 40913, + "Ġanalogue": 40914, + "ounters": 40915, + "ĠBuilder": 40916, + "Ġnarration": 40917, + "ategor": 40918, + "raise": 40919, + "Ġindoctr": 40920, + "Aren": 40921, + "Ġbaptism": 40922, + "Ġobe": 40923, + "Ġtubing": 40924, + "apsed": 40925, + "Fortunately": 40926, + "gered": 40927, + "Pict": 40928, + "Ġmastering": 40929, + "ĠHIM": 40930, + "ĠObesity": 40931, + "Ġornament": 40932, + "advant": 40933, + "ĠCous": 40934, + "032": 40935, + "cells": 40936, + "Ġpreclude": 40937, + "Ġanecdote": 40938, + "Ġpatriarchy": 40939, + "ĠSending": 40940, + "Pie": 40941, + "Ġdepressive": 40942, + "ĠEnds": 40943, + "712": 40944, + "zos": 40945, + "icka": 40946, + "Ġ1906": 40947, + "Anti": 40948, + "vana": 40949, + "ĠRestrict": 40950, + "Ġprotr": 40951, + "Ġusername": 40952, + "Ġparach": 40953, + "1997": 40954, + "imental": 40955, + "rower": 40956, + "carb": 40957, + "033": 40958, + "Ġobligatory": 40959, + "Ġwillful": 40960, + "Ġsnail": 40961, + "json": 40962, + "izarre": 40963, + "Ġmiscar": 40964, + "Ġdopamine": 40965, + "л": 40966, + "Ġapplic": 40967, + "Ġnervously": 40968, + "YY": 40969, + "alez": 40970, + "ĠSoviets": 40971, + "ĠMister": 40972, + "Ġcrates": 40973, + "Ġheavenly": 40974, + "Ġdoct": 40975, + "048": 40976, + "Ġ2400": 40977, + "ivia": 40978, + "adies": 40979, + "Phone": 40980, + "asks": 40981, + "Ġperenn": 40982, + "Ġcomposing": 40983, + "Ġraiding": 40984, + "requent": 40985, + "ibli": 40986, + "ĠFeedback": 40987, + "cellaneous": 40988, + "ĠContracts": 40989, + "ĠCasting": 40990, + "vim": 40991, + "Cut": 40992, + "Ġabbrevi": 40993, + "Ġintest": 40994, + "ricted": 40995, + "969": 40996, + "nostic": 40997, + "Ġinverted": 40998, + "ĠEG": 40999, + "aiden": 41000, + "ĠClaud": 41001, + "ĠiP": 41002, + "urized": 41003, + "Emily": 41004, + "Ġ353": 41005, + "Ġ((": 41006, + "ammad": 41007, + "Reb": 41008, + "plom": 41009, + "YES": 41010, + "connection": 41011, + "ĠWra": 41012, + "ĠMerch": 41013, + "Ġether": 41014, + "Elizabeth": 41015, + "Chip": 41016, + "relevant": 41017, + "URA": 41018, + "Ġantioxidant": 41019, + "ĠChron": 41020, + "Ġtheological": 41021, + "HCR": 41022, + "ruits": 41023, + "Body": 41024, + "enezuel": 41025, + "Few": 41026, + "adder": 41027, + "Ġinducing": 41028, + "ĠDarth": 41029, + "Ġimplicitly": 41030, + "Ġoverfl": 41031, + "Ġrelics": 41032, + "Must": 41033, + "ĠAnswers": 41034, + "Ġretina": 41035, + "ĠSlowly": 41036, + "ĠShib": 41037, + "software": 41038, + "Ġ\"\"": 41039, + "hack": 41040, + "Apart": 41041, + "told": 41042, + "Ger": 41043, + "Civil": 41044, + "problem": 41045, + "Ġslang": 41046, + "Ġtactile": 41047, + "Ġtabl": 41048, + "ĠAscension": 41049, + "Ġhumankind": 41050, + "Howard": 41051, + "rescent": 41052, + "ĠReleases": 41053, + "arijuana": 41054, + "Christopher": 41055, + "ĠWarden": 41056, + "blogspot": 41057, + "ĠVari": 41058, + "idency": 41059, + "ĠHandler": 41060, + "Round": 41061, + "MJ": 41062, + "Ġrhyth": 41063, + "Tai": 41064, + "terson": 41065, + "Ġ,\"": 41066, + "portation": 41067, + "ĠOrbital": 41068, + "Ġfantas": 41069, + "Ġattribut": 41070, + "Ġdiagram": 41071, + "atech": 41072, + "1992": 41073, + "ibl": 41074, + "Woman": 41075, + "ternally": 41076, + "Days": 41077, + "Ġdebunk": 41078, + "ĠPhant": 41079, + "ĠOath": 41080, + "sharp": 41081, + "Ġclaws": 41082, + "Lots": 41083, + "Incre": 41084, + "Aff": 41085, + "hooting": 41086, + "rect": 41087, + "Ġaltru": 41088, + "Ġwors": 41089, + "Ġtho": 41090, + "Ġ349": 41091, + "clusions": 41092, + "Ġpseudonym": 41093, + "Bec": 41094, + "Ġphosphorus": 41095, + "ivic": 41096, + "Ġ348": 41097, + "otent": 41098, + "Ġub": 41099, + "Ġcoales": 41100, + "regate": 41101, + "Ġ1870": 41102, + "Ġglide": 41103, + "treated": 41104, + "ĠSymb": 41105, + "Ġenchant": 41106, + "Besides": 41107, + "stocks": 41108, + "Ġ388": 41109, + "--------------": 41110, + "interpret": 41111, + "ouple": 41112, + "Ġdrawback": 41113, + "ĠRevised": 41114, + "Ġanat": 41115, + "Ġpsychosis": 41116, + "ب": 41117, + "Ġdiffuse": 41118, + "Ġaffidav": 41119, + "elve": 41120, + "amination": 41121, + "ĠTackle": 41122, + "hunter": 41123, + "env": 41124, + "Ġchests": 41125, + "Ġsubter": 41126, + "Ġconquest": 41127, + "Ġfidelity": 41128, + "Ġinfringing": 41129, + "opathic": 41130, + "ĠGrip": 41131, + "ĠKeyboard": 41132, + "Ġobjectionable": 41133, + "Ġmetabol": 41134, + "ĠGö": 41135, + "Room": 41136, + "...)": 41137, + "KEN": 41138, + "assic": 41139, + "Ġgeop": 41140, + "Tro": 41141, + "Ġcursing": 41142, + "Ġdile": 41143, + "Ġultraviolet": 41144, + "inarily": 41145, + "Ġdistilled": 41146, + "sect": 41147, + "ĠShooter": 41148, + "uckles": 41149, + "Ġdistortions": 41150, + "Map": 41151, + "Doctor": 41152, + "Ġinstalls": 41153, + "oire": 41154, + "Ġstarch": 41155, + "ociation": 41156, + "Lev": 41157, + "Ġscripture": 41158, + "Ġsalient": 41159, + "ilitating": 41160, + "wb": 41161, + "ĠSov": 41162, + "ĠDamn": 41163, + "Grey": 41164, + "Ġ980": 41165, + "Ġjung": 41166, + "Ġlicking": 41167, + "029": 41168, + "ĠDian": 41169, + "ĠBabylon": 41170, + "к": 41171, + "ĠRomantic": 41172, + "Ġguesses": 41173, + "ĠFren": 41174, + "Generally": 41175, + "ultural": 41176, + "istence": 41177, + "Ġiniti": 41178, + "Ġ341": 41179, + "ĠSlave": 41180, + "ultan": 41181, + "ĠTrash": 41182, + "ĠEmpty": 41183, + "ĠHundred": 41184, + "ĠDirective": 41185, + "Anderson": 41186, + "Advertisement": 41187, + "RH": 41188, + "ĠOo": 41189, + "ĠHik": 41190, + "peg": 41191, + "Sup": 41192, + "ĠXT": 41193, + "Ġencrypt": 41194, + "selage": 41195, + "ĠThrone": 41196, + "Ġconsecut": 41197, + "Li": 41198, + "ĠVirus": 41199, + "ĠCookies": 41200, + "SHIP": 41201, + "Ġflavorful": 41202, + "odynamics": 41203, + "animal": 41204, + "spread": 41205, + "ĠIPCC": 41206, + "jobs": 41207, + "ernand": 41208, + "ĠHaunted": 41209, + "Ġintolerable": 41210, + "ĠLAR": 41211, + "ixtape": 41212, + "Ġneur": 41213, + "Ġcausal": 41214, + "ĠPsychiatry": 41215, + "ĠVim": 41216, + "Ġgenomic": 41217, + "duration": 41218, + "ĠUsername": 41219, + "ategy": 41220, + "Ġunic": 41221, + "ĠKILL": 41222, + "blooded": 41223, + "Ġcaucuses": 41224, + "ĠPOLITICO": 41225, + "Spanish": 41226, + "Ġobedience": 41227, + "Ġinconven": 41228, + "MAT": 41229, + "Ġbends": 41230, + "ĠImprovements": 41231, + "Ġrelig": 41232, + "ĠForth": 41233, + "ĠLumia": 41234, + "uces": 41235, + "Ġunim": 41236, + "ĠStatistical": 41237, + "kb": 41238, + "auntlet": 41239, + "ĠDisco": 41240, + "ĠInstruction": 41241, + "ooo": 41242, + "ĠDictionary": 41243, + "culated": 41244, + "Adv": 41245, + "ĠAvatar": 41246, + "ictional": 41247, + "Ġcentr": 41248, + "ifles": 41249, + "orks": 41250, + "skill": 41251, + "Ġlatex": 41252, + "ĠPagan": 41253, + "Ġdevast": 41254, + "Ġprol": 41255, + "896": 41256, + "Product": 41257, + "968": 41258, + "Ġfrench": 41259, + "083": 41260, + "ĠCluster": 41261, + "cloth": 41262, + "ĠFilter": 41263, + "ĠDisorders": 41264, + "etimes": 41265, + "Ġinstinctively": 41266, + "ĠBritann": 41267, + "Ġaft": 41268, + "ĠVict": 41269, + "Ġâĺħ": 41270, + "Ġperverse": 41271, + "Ġcontraceptives": 41272, + "ĠHannibal": 41273, + "escap": 41274, + "ĠApostle": 41275, + "ĠXiao": 41276, + "ĠMagnum": 41277, + "Ġphosphate": 41278, + "Ġ399": 41279, + "utable": 41280, + "Ġsten": 41281, + "Ġwearer": 41282, + "Ġsmug": 41283, + "ĠInfluence": 41284, + "Ġ384": 41285, + "Truth": 41286, + "struction": 41287, + "Ġmaniac": 41288, + "ĠMagnetic": 41289, + "ousands": 41290, + "Ġsemen": 41291, + "dir": 41292, + "ĠTornado": 41293, + "Ġexplos": 41294, + "1995": 41295, + "Xi": 41296, + "Steel": 41297, + "057": 41298, + "Barn": 41299, + "Fan": 41300, + "ĠChatt": 41301, + "Chem": 41302, + "ĠFold": 41303, + "bees": 41304, + "1080": 41305, + "ĠMaze": 41306, + "ierre": 41307, + "oeuv": 41308, + "Cand": 41309, + "odium": 41310, + "mmm": 41311, + "ereo": 41312, + "Ġreactionary": 41313, + "Ġacidic": 41314, + "ĠRemoval": 41315, + "Ġnont": 41316, + "031": 41317, + "ĠTerminator": 41318, + "ĠVendor": 41319, + "enemy": 41320, + "Ġreconstructed": 41321, + "ĠGalileo": 41322, + "Ġtesters": 41323, + "albeit": 41324, + "uminium": 41325, + "Ġrite": 41326, + "ĠInput": 41327, + "committee": 41328, + "Ġjour": 41329, + "gements": 41330, + "Ġgerm": 41331, + "Dick": 41332, + "ĠRequirements": 41333, + "omsday": 41334, + "Î": 41335, + "ISSION": 41336, + "Ġmolded": 41337, + "Ġrye": 41338, + "Attorney": 41339, + "population": 41340, + "Ġrepet": 41341, + "Sync": 41342, + "breaks": 41343, + "Ġbanished": 41344, + "Ġraspberry": 41345, + "Ġammo": 41346, + "Ġorthodox": 41347, + "Ġwebcam": 41348, + "ĠAsc": 41349, + "vl": 41350, + "1989": 41351, + "Ġdiscipl": 41352, + "Ġmoreover": 41353, + "Ġexplodes": 41354, + "1960": 41355, + "Ġpropositions": 41356, + "Protect": 41357, + "Ġsexes": 41358, + "physical": 41359, + "ĠAthena": 41360, + "ocent": 41361, + "ĠGothic": 41362, + "ĠRacial": 41363, + "istani": 41364, + "Ġhelium": 41365, + "ĠPresumably": 41366, + "Ġperman": 41367, + "becue": 41368, + "ĠHW": 41369, + "rued": 41370, + "ĠCNS": 41371, + "DEP": 41372, + "ĠManifest": 41373, + "2500": 41374, + "ĠMyst": 41375, + "Economic": 41376, + "Prot": 41377, + "Ġledge": 41378, + "Ġimitate": 41379, + "ĠTotally": 41380, + "ĠBeaut": 41381, + "OIL": 41382, + "Ġ1440": 41383, + "Moscow": 41384, + "ĠSets": 41385, + "merga": 41386, + "Ġlesbians": 41387, + "Walker": 41388, + "Move": 41389, + "ĠSOM": 41390, + "ĠPsy": 41391, + "strument": 41392, + "Ġiter": 41393, + "ĠTosh": 41394, + "oola": 41395, + "ĠAntiqu": 41396, + "ĠShining": 41397, + "Ġobservational": 41398, + "VW": 41399, + "rophe": 41400, + "034": 41401, + "Ġcontiguous": 41402, + "Ġstarve": 41403, + "sure": 41404, + "Ġnegate": 41405, + "Ġmindless": 41406, + "tf": 41407, + "Ġdownwards": 41408, + "046": 41409, + "riors": 41410, + "Ġreverted": 41411, + "ĠAthe": 41412, + "Bra": 41413, + "eah": 41414, + "Rachel": 41415, + "Hung": 41416, + "Join": 41417, + "ĠRaces": 41418, + "Ġmutant": 41419, + "Ġuncond": 41420, + "Ġusability": 41421, + "NESS": 41422, + "haust": 41423, + "036": 41424, + "Ġobscurity": 41425, + "Ġimperialism": 41426, + "Ġemitting": 41427, + "Ġideologically": 41428, + "ĠIro": 41429, + "erva": 41430, + "ĠIzzy": 41431, + "ĠLevels": 41432, + "onym": 41433, + "ĠConspiracy": 41434, + "ĠSapphire": 41435, + "Ul": 41436, + "Ġhuh": 41437, + "ochem": 41438, + "Ġbehaves": 41439, + "ĠMesh": 41440, + "Ark": 41441, + "Ġvec": 41442, + "ĠActions": 41443, + "Ġdistinguishing": 41444, + "ĠTsarnaev": 41445, + "ĠEndurance": 41446, + "ederation": 41447, + "itant": 41448, + "Ġstreetcar": 41449, + "041": 41450, + "ĠAval": 41451, + "ĠCompanion": 41452, + "ĠCartoon": 41453, + "Ġcalculus": 41454, + "993": 41455, + "eq": 41456, + "ĠVanilla": 41457, + "MAC": 41458, + "wolves": 41459, + "fg": 41460, + "Ġfermentation": 41461, + "Ġinformants": 41462, + "Ġsudo": 41463, + "Ġperipher": 41464, + "Ġindign": 41465, + "parts": 41466, + "detail": 41467, + "femin": 41468, + "blade": 41469, + "Ġinserts": 41470, + "Ġoffsets": 41471, + "Ġantidepressants": 41472, + "Ġphr": 41473, + "Ġresultant": 41474, + "biology": 41475, + "Ġacquies": 41476, + "UFF": 41477, + "****************": 41478, + "ĠPenalty": 41479, + "Ġrever": 41480, + "heric": 41481, + "ĠShadows": 41482, + "command": 41483, + "Ġreprint": 41484, + "089": 41485, + "empty": 41486, + "ĠTAG": 41487, + "stim": 41488, + "FK": 41489, + "Ġkins": 41490, + "uggle": 41491, + "imura": 41492, + "wit": 41493, + "Kill": 41494, + "Beck": 41495, + "Ocean": 41496, + "Ġlabyrinth": 41497, + "ĠNorse": 41498, + "IENCE": 41499, + "Ġ+++": 41500, + "DoS": 41501, + "gm": 41502, + "Ġbarbar": 41503, + "ĠCeres": 41504, + "Ġhashing": 41505, + "eworthy": 41506, + "Ġrecite": 41507, + "Ġelectrodes": 41508, + "Ġconformity": 41509, + "response": 41510, + "olate": 41511, + "Ġ357": 41512, + "Snap": 41513, + "Crime": 41514, + "Ġpointer": 41515, + "ĠTIT": 41516, + "Ġdistinctions": 41517, + "Ġ427": 41518, + "ĠÙĪ": 41519, + "abases": 41520, + "Mars": 41521, + "ĠSpiritual": 41522, + "Ġimpuls": 41523, + "Philadelphia": 41524, + "1994": 41525, + "Ġcunning": 41526, + "Ġfram": 41527, + "Ġinco": 41528, + "Ġomnip": 41529, + "imize": 41530, + "ervative": 41531, + "Gy": 41532, + "Drug": 41533, + "Ġcarniv": 41534, + "ĠSailor": 41535, + "download": 41536, + "ĠBeetle": 41537, + "ĠEarthqu": 41538, + "izontal": 41539, + "Alan": 41540, + "Nice": 41541, + "Prior": 41542, + "MAG": 41543, + "Ġautobi": 41544, + "ĠBrill": 41545, + "Ġpredominant": 41546, + "ĠMessiah": 41547, + "REM": 41548, + "ĠSlip": 41549, + "ĠWebs": 41550, + "ademic": 41551, + "<": 41552, + "ĠVessel": 41553, + "vari": 41554, + "Code": 41555, + "Ġbeetle": 41556, + "projects": 41557, + "BAT": 41558, + "Ġpsychotic": 41559, + "Ġunderside": 41560, + "Ġrefute": 41561, + "Considering": 41562, + "kees": 41563, + "wd": 41564, + "priority": 41565, + "Ġtwentieth": 41566, + "Ġatheist": 41567, + "amina": 41568, + "Ġeuphem": 41569, + "Ġtripod": 41570, + "ĠTrayvon": 41571, + "ĠNON": 41572, + "2200": 41573, + "ĠNPC": 41574, + "ependence": 41575, + "ĠMHz": 41576, + "ĠBung": 41577, + "Ġpane": 41578, + "Ġaboriginal": 41579, + "ĠPLUS": 41580, + "igers": 41581, + "ĠSexy": 41582, + "MF": 41583, + "Chall": 41584, + "Ay": 41585, + "ilingual": 41586, + "adj": 41587, + "Ġfrown": 41588, + "successful": 41589, + "stack": 41590, + "Ġic": 41591, + "ĠSeah": 41592, + "Ġconsequ": 41593, + "bugs": 41594, + "ĠScand": 41595, + "ĠCurve": 41596, + "Nob": 41597, + "ĠHoo": 41598, + "ĠKissinger": 41599, + "ĠTimeline": 41600, + "Ġmt": 41601, + "Description": 41602, + "YP": 41603, + "ĠInstallation": 41604, + "levision": 41605, + "Ġanthropology": 41606, + "itzerland": 41607, + "iaries": 41608, + "kward": 41609, + "robat": 41610, + "Ġcarbohydrate": 41611, + "Phot": 41612, + "оÐ": 41613, + "ĠSQL": 41614, + "Disc": 41615, + "Ġdataset": 41616, + "ynski": 41617, + "Ġfiat": 41618, + "ĠDres": 41619, + "ĠFavor": 41620, + "ĠHalls": 41621, + "Alt": 41622, + "PART": 41623, + "Spider": 41624, + "Ġdisabling": 41625, + "RG": 41626, + "Ward": 41627, + "aturation": 41628, + "Ġwillfully": 41629, + "Ġlockout": 41630, + "ĠShutdown": 41631, + "956": 41632, + "Ġcommunists": 41633, + "Against": 41634, + "Ore": 41635, + "ĠRik": 41636, + "ĠASD": 41637, + "ĠOnion": 41638, + "Ġparticulars": 41639, + "Analy": 41640, + "checked": 41641, + "selected": 41642, + "romy": 41643, + "ĠAkira": 41644, + "Ġcongr": 41645, + "Choice": 41646, + "Ġbos": 41647, + "organisms": 41648, + "Ġfrowned": 41649, + "Tok": 41650, + "Bir": 41651, + "ĠScrib": 41652, + "Ġrealms": 41653, + "Ġcoercive": 41654, + "1993": 41655, + "021": 41656, + "âĢĵâĢĵ": 41657, + "athetic": 41658, + "rior": 41659, + "Ġfolly": 41660, + "ĠAMERICA": 41661, + "Ġcassette": 41662, + "953": 41663, + "Ġabsorbs": 41664, + "043": 41665, + "quad": 41666, + "''.": 41667, + "ĠExtract": 41668, + "Ġ424": 41669, + "Whit": 41670, + "Dun": 41671, + "Ġexerted": 41672, + "Ġbrethren": 41673, + "ĠChronicles": 41674, + "eric": 41675, + "Mot": 41676, + "Ġendings": 41677, + "piration": 41678, + "Ġpredetermined": 41679, + "ĠAirl": 41680, + "Ġgasp": 41681, + "Ġ367": 41682, + "Ġexclaim": 41683, + "cation": 41684, + "sort": 41685, + "idden": 41686, + "missive": 41687, + "ع": 41688, + "oice": 41689, + "same": 41690, + "Ott": 41691, + "Ġscatter": 41692, + "Flight": 41693, + "ĠTOD": 41694, + "Stra": 41695, + "amia": 41696, + "IZE": 41697, + "Ġcompressor": 41698, + "ixels": 41699, + "lethal": 41700, + "ĠExperimental": 41701, + "Ing": 41702, + "knife": 41703, + "Ġvanishing": 41704, + "ĠRequired": 41705, + "Stat": 41706, + "ĠPlex": 41707, + "spection": 41708, + "ĠBakr": 41709, + "Amazing": 41710, + "Ġbreaths": 41711, + "rots": 41712, + "OSP": 41713, + "Ġ840": 41714, + "Wars": 41715, + "OGR": 41716, + "Ġ372": 41717, + "ĠKhe": 41718, + "inous": 41719, + "lightly": 41720, + "ĠRounds": 41721, + "Ġrefinement": 41722, + "property": 41723, + "Ġmetaph": 41724, + "oultry": 41725, + "istor": 41726, + "Ġintestine": 41727, + "eus": 41728, + "ĠWilhelm": 41729, + "ĠBane": 41730, + "emption": 41731, + "oubtedly": 41732, + "ĠVirtue": 41733, + "'),": 41734, + "Ħ¢": 41735, + "Ġappar": 41736, + "ĠTranslation": 41737, + "Quite": 41738, + "Ġphysicists": 41739, + "Ġpriesthood": 41740, + "Ġallowable": 41741, + "Saint": 41742, + "OSED": 41743, + "bind": 41744, + "Ġtorches": 41745, + "osexual": 41746, + "Cruz": 41747, + "ertility": 41748, + "ĠAES": 41749, + "Ġascended": 41750, + "Ġmuzzle": 41751, + "Ġelectors": 41752, + "ĠKrug": 41753, + "Ġcc": 41754, + "classic": 41755, + "ĠMace": 41756, + "Å«": 41757, + "ĠâĢ¦\"": 41758, + "ĠTEST": 41759, + "gomery": 41760, + "Person": 41761, + "Ġtranslations": 41762, + "ĠDys": 41763, + "ĠConsent": 41764, + "Ġ361": 41765, + "alos": 41766, + "Ġallerg": 41767, + "ĠWast": 41768, + "ĠChecks": 41769, + "cerning": 41770, + "Ġlizard": 41771, + "Ġrevolutions": 41772, + "Ġtether": 41773, + "Ġminimized": 41774, + "ĠReverse": 41775, + "itely": 41776, + "iguous": 41777, + "athing": 41778, + "Flow": 41779, + "Moving": 41780, + "Ġ409": 41781, + "047": 41782, + "Ġsnug": 41783, + "Nich": 41784, + "Ġcartridge": 41785, + "YL": 41786, + "Ġforwarding": 41787, + "umerous": 41788, + "ĠAbedin": 41789, + "iolet": 41790, + "tick": 41791, + "ĠTransform": 41792, + "Grant": 41793, + "Ġsubtitles": 41794, + "ĠEmin": 41795, + "ghost": 41796, + "ĠKurd": 41797, + "Ġfireball": 41798, + "compatible": 41799, + "Ġprojectiles": 41800, + "amorph": 41801, + "ĠSatisf": 41802, + "Ġquirks": 41803, + "Ġrecept": 41804, + "spective": 41805, + "Ġgraphical": 41806, + "ĠPicard": 41807, + "ĠAuthent": 41808, + "ĠSponge": 41809, + "Army": 41810, + "ĠLumin": 41811, + "ĠSOME": 41812, + "Ġsolitude": 41813, + "ĠSHOULD": 41814, + "ĠFasc": 41815, + "opez": 41816, + "types": 41817, + "gallery": 41818, + "OLOGY": 41819, + "shake": 41820, + "Ġ369": 41821, + "Ġreused": 41822, + "Ġ378": 41823, + "Ġexorc": 41824, + "Ġdocs": 41825, + "Yu": 41826, + "ĠGOD": 41827, + "ocrine": 41828, + "location": 41829, + "fif": 41830, + "Grid": 41831, + "Ġpowd": 41832, + "Ġ'[": 41833, + "Ġposterior": 41834, + "Thompson": 41835, + "Table": 41836, + "oslov": 41837, + "ĠGoddess": 41838, + "odon": 41839, + "ĠSTD": 41840, + "Ġresponsiveness": 41841, + "stab": 41842, + "absolute": 41843, + "Enough": 41844, + "ĠEssence": 41845, + "ĠUpgrade": 41846, + "hematically": 41847, + "Subscribe": 41848, + "alsh": 41849, + "repl": 41850, + "Ġselector": 41851, + "ĠLength": 41852, + "Ġtemporal": 41853, + "Tele": 41854, + "ocalyptic": 41855, + "ĠDeaths": 41856, + "rl": 41857, + "Target": 41858, + "ĠOrn": 41859, + "ongh": 41860, + "Ġ1909": 41861, + "Quest": 41862, + "Place": 41863, + "ĠDisabled": 41864, + "Ġascending": 41865, + "giene": 41866, + "ĠMSI": 41867, + "ivil": 41868, + "Ġcaval": 41869, + "Ġintermitt": 41870, + "Ġsalts": 41871, + "Apr": 41872, + "059": 41873, + "ĠKeeper": 41874, + "emis": 41875, + "ĠEternal": 41876, + "SER": 41877, + "estones": 41878, + "Ġrudimentary": 41879, + "Ġpooled": 41880, + "ĠAlright": 41881, + "Ġdiagrams": 41882, + "ydia": 41883, + "Jacob": 41884, + "Ġarchitectures": 41885, + "ĠUSPS": 41886, + "Ġfootnote": 41887, + "ĠBrav": 41888, + "ĠLeopard": 41889, + "Ġvirtuous": 41890, + "ploma": 41891, + "ĠHIP": 41892, + "Ġhorizontally": 41893, + "olith": 41894, + "Prop": 41895, + "ĠApocalypse": 41896, + "Syria": 41897, + "ĠShowdown": 41898, + "constitutional": 41899, + "Independent": 41900, + "ĠMiliband": 41901, + "ĠTracks": 41902, + "adle": 41903, + "ĠESL": 41904, + "ĠFIGHT": 41905, + "Ġjohn": 41906, + "é": 41907, + "benef": 41908, + "eware": 41909, + "ĠTABLE": 41910, + "ĠVeg": 41911, + "ainers": 41912, + "Ġresolves": 41913, + "Warren": 41914, + "ĠRanked": 41915, + "possibly": 41916, + "bian": 41917, + "simple": 41918, + "Ġuniformly": 41919, + "ĠSlash": 41920, + "otton": 41921, + "ĠAbsent": 41922, + "agically": 41923, + "ĠPieces": 41924, + "Station": 41925, + "ĠBeware": 41926, + "ĠDiscrimination": 41927, + "Ġponies": 41928, + "Import": 41929, + "utory": 41930, + "ĠParas": 41931, + "Phoenix": 41932, + "Lat": 41933, + "UTC": 41934, + "push": 41935, + "astically": 41936, + "urrent": 41937, + "untarily": 41938, + "Ġparanormal": 41939, + "Ġglanced": 41940, + "Ġmanifestations": 41941, + "ĠNeuroscience": 41942, + "irgin": 41943, + "ROM": 41944, + "Ġ($)": 41945, + "Ġ379": 41946, + "missing": 41947, + "Ġmercenaries": 41948, + "Ġenumer": 41949, + "ĠShant": 41950, + "Ws": 41951, + "wered": 41952, + "Ġbuffs": 41953, + "ultane": 41954, + "ĠRohing": 41955, + "igger": 41956, + "Ring": 41957, + "Ġmanifests": 41958, + "Fat": 41959, + "ĠReduced": 41960, + "ĠMinerva": 41961, + "uart": 41962, + "ĠArmory": 41963, + "orange": 41964, + "igible": 41965, + "Ġphysiology": 41966, + "Ut": 41967, + "Ġparchment": 41968, + "ĠFired": 41969, + "trap": 41970, + "oggle": 41971, + "mson": 41972, + "ĠPoster": 41973, + "Ġbount": 41974, + "import": 41975, + "maximum": 41976, + "Ġ422": 41977, + "ĠFemin": 41978, + "Ġnodding": 41979, + "Ġinscription": 41980, + "Results": 41981, + "GRE": 41982, + "icative": 41983, + "Ġcognition": 41984, + "Ġions": 41985, + "ĠBite": 41986, + "Ġneutron": 41987, + "Ġduplication": 41988, + "ĠZIP": 41989, + "ĠQuit": 41990, + "Ġgrasping": 41991, + "ĠDaylight": 41992, + "Ġlayouts": 41993, + "CLA": 41994, + "reason": 41995, + "ĠHuh": 41996, + "Ġpige": 41997, + "ĠBomber": 41998, + "Produ": 41999, + "Ġgland": 42000, + "ĠAbsolute": 42001, + "writ": 42002, + "Ġmassac": 42003, + "Ġfixation": 42004, + "device": 42005, + "yz": 42006, + "ĠGOT": 42007, + "ĠDying": 42008, + "adjust": 42009, + "grain": 42010, + "Ġdeform": 42011, + "Ġtypew": 42012, + "Ġdagger": 42013, + "ĠTuring": 42014, + "ĠBucc": 42015, + "Heavy": 42016, + "Ġcommod": 42017, + "files": 42018, + "ogeneous": 42019, + "roth": 42020, + "Buff": 42021, + "Ġbookmark": 42022, + "porary": 42023, + "Medical": 42024, + "Um": 42025, + "Ġtranslucent": 42026, + "ĠAnxiety": 42027, + "ĠCorinthians": 42028, + "optional": 42029, + "PUT": 42030, + "Ġcrucifix": 42031, + "alloween": 42032, + "ĠVK": 42033, + "Ġblu": 42034, + "ĠCorinth": 42035, + "Mount": 42036, + "Ġmembranes": 42037, + "particip": 42038, + "Ġextraord": 42039, + "Ġstimulated": 42040, + "leneck": 42041, + "Ġspecifies": 42042, + "Sin": 42043, + "lash": 42044, + "Edited": 42045, + "Ġfused": 42046, + "Nin": 42047, + "ĠBungie": 42048, + "ĠTooth": 42049, + "WATCH": 42050, + "Nav": 42051, + "Initially": 42052, + "+)": 42053, + "ĠAncest": 42054, + "Ġtransmitter": 42055, + "ĠVolks": 42056, + "ezvous": 42057, + "ĠNirvana": 42058, + "ĠCald": 42059, + "font": 42060, + "Und": 42061, + "remlin": 42062, + "ichever": 42063, + "ĠHeal": 42064, + "shall": 42065, + "Ġattribution": 42066, + "authorized": 42067, + "ĠINTO": 42068, + "acteria": 42069, + "ĠTsu": 42070, + "ĠPlane": 42071, + "iphate": 42072, + "igraph": 42073, + "chev": 42074, + "Ġinverse": 42075, + "ifest": 42076, + "Players": 42077, + "!!\"": 42078, + "ĠContrast": 42079, + "1984": 42080, + "Ġsevent": 42081, + "colour": 42082, + "ĠRational": 42083, + "virtual": 42084, + "Ġfec": 42085, + "ĠETH": 42086, + "ĠPru": 42087, + "Õ": 42088, + "asma": 42089, + "Cur": 42090, + "Ġassigns": 42091, + "Ġridic": 42092, + "Todd": 42093, + "ulton": 42094, + "ĠDefendant": 42095, + "opsis": 42096, + "Ġpercentile": 42097, + "shr": 42098, + "wagen": 42099, + "Ġ368": 42100, + "SIGN": 42101, + "Screen": 42102, + "reprene": 42103, + "Ġerection": 42104, + "ĠFreak": 42105, + "ĠStard": 42106, + "stained": 42107, + "Ġcla": 42108, + "fet": 42109, + "ramids": 42110, + "QL": 42111, + "avorable": 42112, + "ĠTCP": 42113, + "nown": 42114, + "ulence": 42115, + "similar": 42116, + "Ġlinkage": 42117, + "ercise": 42118, + "Path": 42119, + "LECT": 42120, + "ĠCollections": 42121, + "ĠModule": 42122, + "Ġcs": 42123, + "Current": 42124, + "Ġmono": 42125, + "ĠAlv": 42126, + "ĠDude": 42127, + "Ġhypers": 42128, + "Ġ2600": 42129, + "surface": 42130, + "Ġpredictor": 42131, + "ĠColomb": 42132, + "Prof": 42133, + "anqu": 42134, + "natal": 42135, + "Ġadultery": 42136, + "ĠGenerations": 42137, + "clerosis": 42138, + "Ġ371": 42139, + "Ġenlightenment": 42140, + "onomic": 42141, + "Ġsatir": 42142, + "ĠBasics": 42143, + "Graham": 42144, + "ĠRove": 42145, + "Ġadul": 42146, + "Shut": 42147, + "ocious": 42148, + "Ġhandc": 42149, + "BW": 42150, + "ĠCognitive": 42151, + "visible": 42152, + "Ġinev": 42153, + "Ġ978": 42154, + "ĠSupported": 42155, + "Ġarrays": 42156, + "Ġalienation": 42157, + "Weight": 42158, + "ĠkWh": 42159, + "Ġwarped": 42160, + "Ġ386": 42161, + "lance": 42162, + "Ġherpes": 42163, + "ĠPHP": 42164, + "Ġclaimant": 42165, + "uitive": 42166, + "Ġpussy": 42167, + "Ġcorpus": 42168, + "ĠAo": 42169, + "Qual": 42170, + "ĠXVI": 42171, + "requ": 42172, + "Ġsympt": 42173, + "mination": 42174, + "Ġhairy": 42175, + "ĠBattles": 42176, + "owntown": 42177, + "Roberts": 42178, + "Ġnec": 42179, + "ablo": 42180, + "AMD": 42181, + "internet": 42182, + "Tar": 42183, + "direction": 42184, + "ouston": 42185, + "ĠGlock": 42186, + "ĠYanukovych": 42187, + "ogens": 42188, + "rogram": 42189, + "otype": 42190, + "ĠPt": 42191, + "tenance": 42192, + "Ġaromatic": 42193, + "oxin": 42194, + "Vert": 42195, + "Ġsociop": 42196, + "cible": 42197, + "Db": 42198, + "________________": 42199, + "Third": 42200, + "ĠShips": 42201, + "!.": 42202, + "expensive": 42203, + "WOR": 42204, + "primary": 42205, + "Ġ666": 42206, + "Ġdecaying": 42207, + "Ġclustered": 42208, + "Ġbeetles": 42209, + "ĠHogwarts": 42210, + "Ġheaders": 42211, + "ĠJudah": 42212, + "Ġscen": 42213, + "Ġcosmos": 42214, + "ĠGenetic": 42215, + "blems": 42216, + "Ġfeeble": 42217, + "NOW": 42218, + "NSA": 42219, + "Ġadminist": 42220, + "ĠDocker": 42221, + "portion": 42222, + "gression": 42223, + "Ġ1904": 42224, + "heard": 42225, + "Ġinhab": 42226, + "ĠLeaves": 42227, + "Ġcortisol": 42228, + "atinum": 42229, + "unknown": 42230, + "ĠObserv": 42231, + "ĠPhilosophy": 42232, + "Ide": 42233, + "Ġcopyrighted": 42234, + "surv": 42235, + "ĠLocations": 42236, + "Ġglands": 42237, + "ĠKnife": 42238, + "ĠEmber": 42239, + "ĠUnicorn": 42240, + "Ġhaste": 42241, + "Ġkinderg": 42242, + "ĠTerrit": 42243, + "ĠKoran": 42244, + "Ġaval": 42245, + "addon": 42246, + "ĠNero": 42247, + "\"]": 42248, + "Ġ392": 42249, + "comfort": 42250, + "Ġclothed": 42251, + "ashtra": 42252, + "mode": 42253, + "Ġ??": 42254, + "!\",": 42255, + "Ġknob": 42256, + "EMP": 42257, + "norm": 42258, + "ĠAgo": 42259, + "RECT": 42260, + "Denver": 42261, + "Ġ1907": 42262, + "ĠBombs": 42263, + "Sche": 42264, + "Ġtriangular": 42265, + "Ġperv": 42266, + "rises": 42267, + "Jes": 42268, + "Ġcalibration": 42269, + "Ġts": 42270, + "Same": 42271, + "ĠAxe": 42272, + "ĠMei": 42273, + "multi": 42274, + "Ġexerc": 42275, + "orney": 42276, + "Ware": 42277, + "abul": 42278, + "ĠFior": 42279, + "Eventually": 42280, + "ĠGrizz": 42281, + "Past": 42282, + "married": 42283, + "Ġscram": 42284, + "ĠCache": 42285, + "posure": 42286, + "Ġheav": 42287, + "ĠShirt": 42288, + "powder": 42289, + "complex": 42290, + "Doc": 42291, + "arus": 42292, + "Pi": 42293, + "Ġcurv": 42294, + "ĠTopic": 42295, + "Ġ.)": 42296, + "Ġwills": 42297, + "philis": 42298, + "gui": 42299, + "leground": 42300, + "Eth": 42301, + "Strike": 42302, + "Kid": 42303, + "Ġdelegated": 42304, + "Soon": 42305, + "Ġwast": 42306, + "gage": 42307, + "Ġprosecut": 42308, + "Ġ374": 42309, + "opolis": 42310, + "chest": 42311, + "ensation": 42312, + "Ġredes": 42313, + "Ġpresum": 42314, + "Portland": 42315, + "Ġannihil": 42316, + "yssey": 42317, + "Ġforks": 42318, + "Ġvitro": 42319, + "walker": 42320, + "ĠPsal": 42321, + "ĠStealth": 42322, + "Quick": 42323, + "ĠBaghd": 42324, + "ĠDrift": 42325, + "//": 42326, + "Ġinvincible": 42327, + "ĠGAM": 42328, + "Ġcastles": 42329, + "Ġbondage": 42330, + "ĠBalloon": 42331, + "Amid": 42332, + "individual": 42333, + "tis": 42334, + "ĠGuides": 42335, + "xe": 42336, + "Cong": 42337, + "URI": 42338, + "ĠHH": 42339, + "PHOTOS": 42340, + "ĠASIC": 42341, + "burst": 42342, + "ahon": 42343, + "ĠFIX": 42344, + "ilib": 42345, + "Ġ457": 42346, + "ĠLogged": 42347, + "à¹": 42348, + "Creat": 42349, + "inatory": 42350, + "column": 42351, + "ĠAugustus": 42352, + "suggest": 42353, + "pret": 42354, + "ĠParan": 42355, + "Ġsubsistence": 42356, + "wx": 42357, + "×": 42358, + "aleigh": 42359, + "dash": 42360, + "ĠMana": 42361, + "Ko": 42362, + "opausal": 42363, + "Ġbene": 42364, + "ĠSabb": 42365, + "ĠGhosts": 42366, + "Ġ1830": 42367, + "ĠHats": 42368, + "ĠHive": 42369, + "Perfect": 42370, + "Ġsocialists": 42371, + "Ġtumult": 42372, + "EGA": 42373, + "ĠNAME": 42374, + "Android": 42375, + "assembled": 42376, + "phis": 42377, + "Stage": 42378, + "Char": 42379, + "Double": 42380, + "Ġinsign": 42381, + "IED": 42382, + "perial": 42383, + "ĠEMP": 42384, + "mx": 42385, + "Ġskept": 42386, + "Ġwifi": 42387, + "Ġparad": 42388, + "ĠFrequency": 42389, + "Dist": 42390, + "nil": 42391, + "iots": 42392, + "å": 42393, + "Message": 42394, + "Furthermore": 42395, + "Ġhideous": 42396, + "ĠLDL": 42397, + "ĠFault": 42398, + "ĠDimensions": 42399, + "ĠImplement": 42400, + "fram": 42401, + "Ġamaz": 42402, + "ĠIndones": 42403, + "ĠTile": 42404, + "Ġlar": 42405, + "gc": 42406, + "Ġcorrelate": 42407, + "Ġensl": 42408, + "mite": 42409, + "Ġhomosexuals": 42410, + "Ġagric": 42411, + "8000": 42412, + "Ġcuring": 42413, + "rament": 42414, + "Ġrecons": 42415, + "ocene": 42416, + "ENTION": 42417, + "Ġcommunion": 42418, + "ĠFunction": 42419, + "iple": 42420, + "Ġredund": 42421, + "Ġcalibrated": 42422, + "Ġcontribut": 42423, + "ĠHuck": 42424, + "limit": 42425, + "ĠFedora": 42426, + "ĠTsuk": 42427, + "brates": 42428, + "Ġ1903": 42429, + "ozo": 42430, + "visual": 42431, + "ĠDiscipline": 42432, + "chains": 42433, + "ĠOCD": 42434, + "Ġexpended": 42435, + "0002": 42436, + "Ġsty": 42437, + "ĠNightmare": 42438, + "ĠReplace": 42439, + "ounty": 42440, + "fn": 42441, + "1900": 42442, + "ĠEpidem": 42443, + "ĠFW": 42444, + "Ġgul": 42445, + "ĠTomato": 42446, + "ĠPerse": 42447, + "wl": 42448, + "ĠFormation": 42449, + "Scan": 42450, + "cosystem": 42451, + "Brand": 42452, + "Ġ398": 42453, + "Ġcaptives": 42454, + "Ġ×": 42455, + "ESCO": 42456, + "ĠEnder": 42457, + "lesh": 42458, + "ĠAscend": 42459, + "poly": 42460, + "eous": 42461, + "Ġhyster": 42462, + "Murray": 42463, + "phe": 42464, + "Ġradiator": 42465, + "esthes": 42466, + "Ġopin": 42467, + "Ġconspic": 42468, + "intosh": 42469, + "Ġwitchcraft": 42470, + "ĠCFR": 42471, + "ussian": 42472, + "escent": 42473, + "locking": 42474, + "Ġnonsensical": 42475, + "uala": 42476, + "ĠSerial": 42477, + "1991": 42478, + "ĠCalm": 42479, + "containing": 42480, + "Ġstimulates": 42481, + "Ġ448": 42482, + "Pir": 42483, + "ĠâĨĴ": 42484, + "ĠDiver": 42485, + "Ġmanuscripts": 42486, + "ĠGaia": 42487, + "Ñĥ": 42488, + "Learning": 42489, + "Ġnipple": 42490, + "reads": 42491, + "Ġandroid": 42492, + "ĠMeditation": 42493, + "Ġincomprehensible": 42494, + "edded": 42495, + "Ġdescendant": 42496, + "ĠMorty": 42497, + "Luckily": 42498, + "ARCH": 42499, + "ausible": 42500, + "Dig": 42501, + "shared": 42502, + "ĠClip": 42503, + "Ġtrope": 42504, + "Ġnarcissistic": 42505, + "ventures": 42506, + "Ġcuriously": 42507, + "ĠCosmos": 42508, + "Aust": 42509, + "Lay": 42510, + "ĠShard": 42511, + "ĠRecorded": 42512, + "Ġ458": 42513, + "........": 42514, + "Ġperish": 42515, + "ĠExample": 42516, + "luent": 42517, + "Ġapes": 42518, + "ĠHitch": 42519, + "Ġholiest": 42520, + "Ġamplifier": 42521, + "minent": 42522, + "xxxxxxxx": 42523, + "inite": 42524, + "Ġgenomes": 42525, + "ĠGuilty": 42526, + "mult": 42527, + "Ġorc": 42528, + "Ġnipples": 42529, + "Side": 42530, + "Ġlogically": 42531, + "Ġdatasets": 42532, + "ĠTitanium": 42533, + "Ġrotor": 42534, + "undle": 42535, + "handled": 42536, + "nexpected": 42537, + "Ġdw": 42538, + "Ġdiagonal": 42539, + "ĠAnimated": 42540, + "Ġnumbering": 42541, + "Forest": 42542, + "ĠâĨ": 42543, + "Prin": 42544, + "Ġchemically": 42545, + "ĠGithub": 42546, + "Ġaph": 42547, + "ĠFaster": 42548, + "ĠTinker": 42549, + "ikini": 42550, + "Dest": 42551, + "dri": 42552, + "Manufact": 42553, + "isance": 42554, + "Return": 42555, + "Alert": 42556, + "elcome": 42557, + "ĠMMR": 42558, + "Ġresid": 42559, + "ĠLIC": 42560, + "Ġspecificity": 42561, + "zanne": 42562, + "Ġanyways": 42563, + "Ġ426": 42564, + "Scot": 42565, + "astery": 42566, + "Via": 42567, + "ĠBlocks": 42568, + "Ġactivates": 42569, + "Ġabstinence": 42570, + "Ġchronological": 42571, + "Soul": 42572, + "ĠSchne": 42573, + "Ġwatts": 42574, + "AUT": 42575, + "Ġcalcul": 42576, + "Simply": 42577, + "Emb": 42578, + "ceptive": 42579, + "ĠCatholicism": 42580, + "obook": 42581, + "ĠBits": 42582, + "ĠMbps": 42583, + "Ġindignation": 42584, + "Ġshorthand": 42585, + "Active": 42586, + "ĠLimbaugh": 42587, + "ĠCapcom": 42588, + "adesh": 42589, + "Ġclipping": 42590, + "ĠInstructor": 42591, + "Secret": 42592, + "___": 42593, + "Fer": 42594, + "rawling": 42595, + "ĠReward": 42596, + "Ġweep": 42597, + "Ġmotherboard": 42598, + "Above": 42599, + "metry": 42600, + "ĠPTS": 42601, + "Ġbombard": 42602, + "abetes": 42603, + ".--": 42604, + "Lens": 42605, + "Comb": 42606, + "basic": 42607, + "ĠREALLY": 42608, + "Later": 42609, + "Ġ383": 42610, + "Ġpositional": 42611, + "olesc": 42612, + "Ġcrotch": 42613, + "ĠMDMA": 42614, + "requently": 42615, + "ĠPants": 42616, + "Ġ433": 42617, + "uctor": 42618, + "Ġillumination": 42619, + "ĠÙħ": 42620, + "ocrin": 42621, + "Ġpamph": 42622, + "atio": 42623, + "etc": 42624, + "Ġrestores": 42625, + "ĠProtector": 42626, + "Develop": 42627, + "ĠMew": 42628, + "trop": 42629, + "ĠSlayer": 42630, + "Ti": 42631, + "ĠNotwithstanding": 42632, + "Match": 42633, + "LIST": 42634, + "IDES": 42635, + "ĠThick": 42636, + "Ġdisks": 42637, + "Kin": 42638, + "Ġghetto": 42639, + "ĠObjects": 42640, + "Ġprism": 42641, + "ĠNether": 42642, + "Ġvul": 42643, + "iky": 42644, + "]:": 42645, + "ĠDetail": 42646, + "Ġfucked": 42647, + "!?": 42648, + "anium": 42649, + "Ġlords": 42650, + "ilities": 42651, + "ĠEthnic": 42652, + "static": 42653, + "$$": 42654, + "evidence": 42655, + "Ġmainline": 42656, + "Ġpeasant": 42657, + "ĠEnhance": 42658, + "ĠForced": 42659, + "virt": 42660, + "Ġii": 42661, + "Ġsymm": 42662, + "Ġconverter": 42663, + "ularity": 42664, + "Ġrepent": 42665, + "num": 42666, + "ĠScrew": 42667, + "ĠFTA": 42668, + "Ġmarines": 42669, + "hetto": 42670, + "blow": 42671, + "Ġado": 42672, + "ĠTypical": 42673, + "Ġoverw": 42674, + "ĠBerm": 42675, + "keley": 42676, + "Song": 42677, + "hao": 42678, + "valid": 42679, + "EXT": 42680, + "ĠProvides": 42681, + "âĺħâĺħ": 42682, + "ĠOdin": 42683, + "Shot": 42684, + "Ġgamma": 42685, + "Princ": 42686, + "asonry": 42687, + "ĠAccuracy": 42688, + "Ġcriterion": 42689, + "Ġdescriptive": 42690, + "Gall": 42691, + "gray": 42692, + "ĠCalcul": 42693, + "Ġaxes": 42694, + "ĠCommunists": 42695, + "ĠRebellion": 42696, + "Success": 42697, + "tg": 42698, + "Ġâĺ": 42699, + "Ġmultiplier": 42700, + "ravity": 42701, + "Thus": 42702, + "URL": 42703, + "Ġalternatively": 42704, + "duction": 42705, + "Ġsarcast": 42706, + "ĠCarth": 42707, + "ĠUSL": 42708, + "ĠInvisible": 42709, + "larg": 42710, + "pleted": 42711, + "pathic": 42712, + "Additionally": 42713, + "ĠCao": 42714, + "Ġlatent": 42715, + "ĠSurge": 42716, + "MEN": 42717, + "communications": 42718, + "ĠArray": 42719, + "Pink": 42720, + "commit": 42721, + "isodes": 42722, + "earcher": 42723, + "Ukraine": 42724, + "ĠAnthrop": 42725, + "incial": 42726, + "Ġquotations": 42727, + "adena": 42728, + "Ġwhining": 42729, + "Ġretri": 42730, + "ĠAssass": 42731, + "elligent": 42732, + "ĠPERSON": 42733, + "Py": 42734, + "Send": 42735, + "ĠâĪĴ": 42736, + "DON": 42737, + "Ġwatt": 42738, + "description": 42739, + "POS": 42740, + "Ġrepro": 42741, + "destroy": 42742, + "icidal": 42743, + "Ġmidrange": 42744, + "Ġinfographic": 42745, + "interesting": 42746, + "category": 42747, + "Flash": 42748, + "ĠInvasion": 42749, + "ĠExodus": 42750, + "restricted": 42751, + "Ġinference": 42752, + "dding": 42753, + "mingham": 42754, + "Ġcircumst": 42755, + "Wi": 42756, + "ĠHast": 42757, + "Ġsubjug": 42758, + "Ġwhispering": 42759, + "-.": 42760, + "Ġadren": 42761, + "ĠPattern": 42762, + "BOX": 42763, + "ĠEnhancement": 42764, + "Exc": 42765, + "ĠBucket": 42766, + "ĠGUN": 42767, + "deen": 42768, + "ĠHomo": 42769, + "1985": 42770, + "Ġclo": 42771, + "Ġsnippet": 42772, + "Ġ1896": 42773, + "TPP": 42774, + "Seg": 42775, + "success": 42776, + ";\"": 42777, + "ĠMUCH": 42778, + "Author": 42779, + "Ġreplication": 42780, + "Ġhallucinations": 42781, + "Inv": 42782, + "ĠAware": 42783, + "ĠViper": 42784, + "kai": 42785, + "frames": 42786, + "ĠTHANK": 42787, + "ĠSHA": 42788, + "wordpress": 42789, + "Ġbc": 42790, + "CIA": 42791, + "arrison": 42792, + "Ġalloc": 42793, + "ĠAlz": 42794, + "letcher": 42795, + "ĠDaredevil": 42796, + "iversary": 42797, + "Ġmanuals": 42798, + "Catholic": 42799, + "feat": 42800, + "Ġkinetic": 42801, + "JB": 42802, + "yeah": 42803, + "ĠLDS": 42804, + "Ġppm": 42805, + "ĠADC": 42806, + "pring": 42807, + "cence": 42808, + "Ġclasp": 42809, + "Ġsetups": 42810, + "Ġdeity": 42811, + "ĠIndra": 42812, + "ĠWander": 42813, + "Ġantib": 42814, + "Otherwise": 42815, + "ombie": 42816, + "Bitcoin": 42817, + "ipop": 42818, + "expression": 42819, + "Animal": 42820, + "ĠResurrection": 42821, + "ĠMoral": 42822, + "ĠSDK": 42823, + "Ġwretched": 42824, + "ogenous": 42825, + "species": 42826, + "Ġchuckled": 42827, + "Thor": 42828, + "Ġ428": 42829, + "avery": 42830, + "ĠPry": 42831, + "asures": 42832, + "ĠErn": 42833, + "apor": 42834, + "Ġinnumerable": 42835, + "Ġbaptized": 42836, + "ĠExplosive": 42837, + "Ġelves": 42838, + "idges": 42839, + "ĠParadox": 42840, + "Close": 42841, + "aldehyde": 42842, + "construct": 42843, + "Ġvirginity": 42844, + "Poll": 42845, + "assin": 42846, + "Doctors": 42847, + "Pos": 42848, + "NECT": 42849, + "Moreover": 42850, + "Commercial": 42851, + "cknowled": 42852, + "1988": 42853, + "Ġquotation": 42854, + "marriage": 42855, + "ĠBapt": 42856, + "ĠSina": 42857, + "ĠGloves": 42858, + "gian": 42859, + "Ġconfounding": 42860, + "URRENT": 42861, + "Dean": 42862, + "Brew": 42863, + "thur": 42864, + "pty": 42865, + "immune": 42866, + "ĠSQU": 42867, + "Ġcounterfe": 42868, + "rider": 42869, + "Ġinferred": 42870, + "ĠDimension": 42871, + "ĠToad": 42872, + "Ġafterlife": 42873, + "ĠHERO": 42874, + "Indiana": 42875, + "seek": 42876, + "Ġdistinguishes": 42877, + "ĠQur": 42878, + "ĠMethods": 42879, + "combat": 42880, + "Ġcateg": 42881, + "ĠStruggle": 42882, + "teness": 42883, + "liquid": 42884, + "Ġblinking": 42885, + "ĠCONTIN": 42886, + "iae": 42887, + "Ġaerobic": 42888, + "Ġstrugg": 42889, + "Ġegalitarian": 42890, + "hello": 42891, + "orrect": 42892, + "ĠAbandon": 42893, + "Ġferment": 42894, + "Area": 42895, + "idem": 42896, + "ĠMania": 42897, + "Ġjs": 42898, + "ĠBALL": 42899, + "Running": 42900, + "Ġregenerate": 42901, + "iquid": 42902, + "Uh": 42903, + "Crystal": 42904, + "ĠItal": 42905, + "ĠHeavenly": 42906, + "в": 42907, + "CRIPTION": 42908, + "Consumer": 42909, + "dust": 42910, + "amiliar": 42911, + "ĠRhino": 42912, + "Rocket": 42913, + "Ġreversible": 42914, + "kok": 42915, + "ĠSketch": 42916, + "Ġshotguns": 42917, + "apses": 42918, + "Ġdetach": 42919, + "ĠCells": 42920, + "artist": 42921, + "rily": 42922, + "ĠRestore": 42923, + "Scar": 42924, + "Ġevid": 42925, + "Ġspaced": 42926, + "ĠContributions": 42927, + "Ġ418": 42928, + "ĠMystic": 42929, + "Ġobfusc": 42930, + "Russ": 42931, + "wings": 42932, + "Pear": 42933, + "osite": 42934, + "Nusra": 42935, + "urations": 42936, + "ovie": 42937, + "icago": 42938, + "ĠConcepts": 42939, + "Ġstimuli": 42940, + "Ġaroused": 42941, + "aughty": 42942, + "Talking": 42943, + "ĠPrompt": 42944, + "Across": 42945, + "ĠPlaint": 42946, + "Ġbranching": 42947, + "Thankfully": 42948, + "Original": 42949, + "Esc": 42950, + "ĠTechnician": 42951, + "fleet": 42952, + "usher": 42953, + "Mos": 42954, + "livion": 42955, + "oenix": 42956, + "Ġhr": 42957, + "ibble": 42958, + "Ġindent": 42959, + "ĠFinished": 42960, + "Department": 42961, + "ĠINFO": 42962, + "Movie": 42963, + "++": 42964, + "THING": 42965, + "Ġtimers": 42966, + "rocket": 42967, + "Natural": 42968, + "lime": 42969, + "Ġangular": 42970, + "osure": 42971, + "Ġdynamically": 42972, + "Ġpacif": 42973, + "ĠProcessor": 42974, + "Ġdisgu": 42975, + "Ġmoderators": 42976, + "Ġceases": 42977, + "Ġinertia": 42978, + "Ġpaperback": 42979, + "yton": 42980, + "ĠHuma": 42981, + "Ġprohibitions": 42982, + "Ġgestation": 42983, + "Bomb": 42984, + "termin": 42985, + "Ġcaric": 42986, + "oS": 42987, + "tc": 42988, + "Cop": 42989, + "raved": 42990, + "Ġeighty": 42991, + "ĠEnable": 42992, + "Ġimplementations": 42993, + "Ġconquering": 42994, + "ĠFinder": 42995, + "window": 42996, + "Gra": 42997, + "Ġfonts": 42998, + "laughter": 42999, + "Ġcolonization": 43000, + "ĠDOD": 43001, + ")!": 43002, + ",)": 43003, + "ĠGeral": 43004, + "ĠSpoiler": 43005, + "ĠComponent": 43006, + "Ġgist": 43007, + "hiro": 43008, + "Ġlicens": 43009, + "nesses": 43010, + "Ġkarma": 43011, + "?\".": 43012, + "OPA": 43013, + "Ġsquats": 43014, + "ĠRAND": 43015, + "Ġorally": 43016, + "document": 43017, + "olars": 43018, + "Ġpresumptive": 43019, + "Pers": 43020, + "OAD": 43021, + "ufficient": 43022, + "LESS": 43023, + "Hidden": 43024, + "ORK": 43025, + "xs": 43026, + "Ġmathematician": 43027, + "ĠGloss": 43028, + "Ġannihilation": 43029, + "Ġmanifold": 43030, + "Ry": 43031, + "Thunder": 43032, + "Yan": 43033, + "Activ": 43034, + "Ġworldly": 43035, + "TED": 43036, + "marg": 43037, + "ĠStun": 43038, + "ryce": 43039, + "ĠVG": 43040, + "Isn": 43041, + "ĠCyn": 43042, + "Expl": 43043, + "IRED": 43044, + "Ġcompr": 43045, + "Ġindisc": 43046, + "Boss": 43047, + "()": 43048, + "berman": 43049, + "ĠBegins": 43050, + "ujah": 43051, + "ornia": 43052, + "hetical": 43053, + "Ġcivilizations": 43054, + "Ġfundamentalist": 43055, + "strap": 43056, + "Forward": 43057, + "ettlement": 43058, + "Ġprophetic": 43059, + "glers": 43060, + "bending": 43061, + "Terry": 43062, + "Ġidi": 43063, + "Ġtrunc": 43064, + "Ġcreeps": 43065, + "intel": 43066, + "switch": 43067, + "ailand": 43068, + "Ġinstaller": 43069, + "GOP": 43070, + "Ġ499": 43071, + "ĠParallel": 43072, + "Cru": 43073, + "Ġ\"@": 43074, + "Ġ396": 43075, + "ĠUnlock": 43076, + "Raven": 43077, + "Corn": 43078, + "Ġcircadian": 43079, + "Ġ********************************": 43080, + "iliate": 43081, + "ĠFunctional": 43082, + "Ġpronouns": 43083, + "ĠSatoshi": 43084, + "Ġstim": 43085, + "Gay": 43086, + "Iss": 43087, + "ĠThief": 43088, + "atellite": 43089, + "Ġshards": 43090, + "Ġphil": 43091, + "protein": 43092, + "Ġalters": 43093, + "Poor": 43094, + "Typically": 43095, + "KER": 43096, + "ociate": 43097, + "Ġemits": 43098, + "recy": 43099, + "Ġmechanically": 43100, + "Ġ...\"": 43101, + "nature": 43102, + "sys": 43103, + "ysc": 43104, + "Ġwavelengths": 43105, + "pattern": 43106, + "insured": 43107, + "Ġparasitic": 43108, + "ĠLCS": 43109, + "ĠPACs": 43110, + "Ġheals": 43111, + "ĠCCP": 43112, + "ĠHacker": 43113, + "Ġpsy": 43114, + "ĠBeans": 43115, + "Ġdemonic": 43116, + "JV": 43117, + "Ġatmosp": 43118, + "equality": 43119, + "Ġairst": 43120, + "Ġincarn": 43121, + "ynthesis": 43122, + "Ġequations": 43123, + "tch": 43124, + "ĠHUGE": 43125, + "ĠChanged": 43126, + "itatively": 43127, + "Job": 43128, + "gaming": 43129, + "Ġ1899": 43130, + "ĠMorsi": 43131, + "Ġconjecture": 43132, + "riad": 43133, + "Ġprimates": 43134, + "ĠArtemis": 43135, + "ĠThro": 43136, + "Ġbiologically": 43137, + "Church": 43138, + "topia": 43139, + "recomm": 43140, + "Ġgradient": 43141, + "Ġful": 43142, + "Ġbastard": 43143, + "CHO": 43144, + "IUM": 43145, + "sleep": 43146, + "Construction": 43147, + "raints": 43148, + "vable": 43149, + "ionage": 43150, + "Ġcomrade": 43151, + "Ġpopulate": 43152, + "Ġnerds": 43153, + "ĠXie": 43154, + "result": 43155, + "ĠImper": 43156, + "Ġpamphlet": 43157, + "Ku": 43158, + "Ġbackend": 43159, + "ificent": 43160, + "etus": 43161, + "Ġdisson": 43162, + "config": 43163, + "Ġsuc": 43164, + "Ġwavelength": 43165, + "external": 43166, + "owder": 43167, + "Ġpredis": 43168, + "eenth": 43169, + "Det": 43170, + "andem": 43171, + "Ġ1865": 43172, + "ĠDefeat": 43173, + "Individual": 43174, + "Ġretrieving": 43175, + "stories": 43176, + "Ġdesolate": 43177, + "Ġlett": 43178, + "Ġunpublished": 43179, + "Ġpassively": 43180, + "Ġdissertation": 43181, + "raits": 43182, + "abee": 43183, + "ĠResist": 43184, + "Robin": 43185, + "Ġbenevolent": 43186, + "blast": 43187, + "Offic": 43188, + "snap": 43189, + "vernment": 43190, + "Ġextermin": 43191, + "wt": 43192, + "bitious": 43193, + "hibited": 43194, + "Insp": 43195, + "posted": 43196, + "ĠYugoslav": 43197, + "rational": 43198, + "adapt": 43199, + "ĠAtari": 43200, + "Ġplugin": 43201, + "oglobin": 43202, + "efeated": 43203, + "ĠHRC": 43204, + "cko": 43205, + "ilver": 43206, + "ĠDestruction": 43207, + "gewater": 43208, + "ĠRadiation": 43209, + "Ġimprison": 43210, + "origin": 43211, + "antine": 43212, + "ĠPublication": 43213, + "Ġhealer": 43214, + "istered": 43215, + "ĠTHEIR": 43216, + "hazard": 43217, + "Contract": 43218, + "Ġmediated": 43219, + "Ġindexed": 43220, + "ĠSYSTEM": 43221, + "Labor": 43222, + "Blade": 43223, + "Ġyog": 43224, + "Champ": 43225, + "Gordon": 43226, + "IAS": 43227, + "Ġnineteenth": 43228, + "animous": 43229, + "begin": 43230, + "ĠHolo": 43231, + "Planet": 43232, + "udding": 43233, + "default": 43234, + "ĠOMG": 43235, + "Ġwond": 43236, + "wm": 43237, + "pend": 43238, + "Extreme": 43239, + "Ġinterstellar": 43240, + "ASED": 43241, + "ĠBerks": 43242, + "Ġprimal": 43243, + "Foot": 43244, + "Ġinadvert": 43245, + "amboo": 43246, + "ĠLeica": 43247, + "Events": 43248, + "ĠPigs": 43249, + "RAFT": 43250, + "ï": 43251, + "ĠGentleman": 43252, + "Multiple": 43253, + "ĠPsychiatric": 43254, + "Ġdespise": 43255, + "ĠZionism": 43256, + "ĠSSL": 43257, + "shit": 43258, + "Ġthreaded": 43259, + "Ġartifact": 43260, + "Ġmitochondrial": 43261, + "ĠLayer": 43262, + "inus": 43263, + "podcast": 43264, + "Ġawaken": 43265, + "Management": 43266, + "Ġdelusions": 43267, + "grey": 43268, + "Ġpseud": 43269, + "agonal": 43270, + "ĠHirosh": 43271, + "Georg": 43272, + "Dragon": 43273, + "Stack": 43274, + "ohm": 43275, + "Ġvener": 43276, + "Row": 43277, + "Ġsandbox": 43278, + "Ġblinding": 43279, + "razen": 43280, + "Ġ389": 43281, + "Ġcrappy": 43282, + "Ġlith": 43283, + "antha": 43284, + "Ġplurality": 43285, + "ĠDAC": 43286, + "inently": 43287, + "intage": 43288, + "Ġ1902": 43289, + "ĠDepend": 43290, + "Ġelapsed": 43291, + "==": 43292, + "ĠGenie": 43293, + "Bush": 43294, + "ĠPlanetary": 43295, + "Bah": 43296, + "ĠKira": 43297, + "emn": 43298, + "Month": 43299, + "allic": 43300, + "coded": 43301, + "VOL": 43302, + "Ġ[...]": 43303, + "ĠRampage": 43304, + "Ġ(*": 43305, + "Production": 43306, + "licts": 43307, + "Ġinoc": 43308, + "Cour": 43309, + "Ġspurious": 43310, + "Ġultras": 43311, + "ggles": 43312, + "Ġdelusion": 43313, + "ĠRacer": 43314, + "ĠPrism": 43315, + "FH": 43316, + "uppet": 43317, + "Ġcultured": 43318, + "Ġ436": 43319, + "aneously": 43320, + "اÙĦ": 43321, + "ĠMissions": 43322, + "monton": 43323, + "criptions": 43324, + "ificate": 43325, + "Cause": 43326, + "Ġ1898": 43327, + "ocaust": 43328, + "Ġbri": 43329, + "ĠShoals": 43330, + "ommod": 43331, + "alted": 43332, + "ogenesis": 43333, + "warn": 43334, + "illus": 43335, + "vv": 43336, + "Ġcontam": 43337, + "ĠLesbian": 43338, + "Ġcavalry": 43339, + "ĠPresence": 43340, + "rehens": 43341, + "tool": 43342, + "accessible": 43343, + "Ġ(~": 43344, + "ĠLicensed": 43345, + "Ġprophets": 43346, + "Ġboulder": 43347, + "mean": 43348, + "akura": 43349, + "Ġunres": 43350, + "ĠCinnamon": 43351, + "Leaks": 43352, + "........................": 43353, + "Contact": 43354, + "Ġassassins": 43355, + "ĠGreenwald": 43356, + "dk": 43357, + "amazon": 43358, + "Ġagreeable": 43359, + "ernandez": 43360, + "Easy": 43361, + "PLA": 43362, + "ĠBigfoot": 43363, + "Ġconvent": 43364, + "Ġempires": 43365, + "Ġ387": 43366, + "Ġgrasped": 43367, + "Ġruby": 43368, + "Ġreconc": 43369, + "Warning": 43370, + "atem": 43371, + "Ġretrieval": 43372, + "ĠFDR": 43373, + "ĠReaper": 43374, + "orem": 43375, + "ĠLuo": 43376, + "hig": 43377, + "ĠArmor": 43378, + "tp": 43379, + "ĠInterpret": 43380, + "Conservative": 43381, + "ĠSodium": 43382, + "Ġbead": 43383, + "Ġpropagate": 43384, + "claw": 43385, + "href": 43386, + "ĠPaste": 43387, + "Ġomit": 43388, + "Boost": 43389, + "Diamond": 43390, + "goo": 43391, + "Ġanomal": 43392, + "ĠDISTRICT": 43393, + "Greek": 43394, + "warning": 43395, + "Ġdespised": 43396, + "Karl": 43397, + "AGES": 43398, + "Ġserotonin": 43399, + "ESSION": 43400, + "_______": 43401, + "ĠCollider": 43402, + "auldron": 43403, + "Ġsquee": 43404, + "Control": 43405, + "ffield": 43406, + "cycles": 43407, + "Legal": 43408, + "xa": 43409, + "minimum": 43410, + "ĠGeneric": 43411, + "Circ": 43412, + "·": 43413, + "Behind": 43414, + "guide": 43415, + "Ground": 43416, + "roying": 43417, + "ĠGrail": 43418, + "Ġthee": 43419, + "Ġ9000": 43420, + "Batman": 43421, + "Brother": 43422, + "Ġnons": 43423, + "RW": 43424, + "saf": 43425, + "ĠCroat": 43426, + "tainment": 43427, + "sci": 43428, + "Ye": 43429, + "Range": 43430, + "Ey": 43431, + "perature": 43432, + "ĠDracula": 43433, + "oreal": 43434, + "Fighting": 43435, + "Ġreleg": 43436, + "Ġcoupling": 43437, + "Tracker": 43438, + "tyard": 43439, + "Mut": 43440, + "Military": 43441, + "lamm": 43442, + "ittens": 43443, + "ĠCRC": 43444, + "ĠXiang": 43445, + "Ġorthodoxy": 43446, + "ĠGoth": 43447, + "Ġalgorith": 43448, + "ĠAthen": 43449, + "Ġtyrann": 43450, + "ĠTorrent": 43451, + "IDs": 43452, + "ĠGENERAL": 43453, + "ĠASUS": 43454, + "rastructure": 43455, + "Faith": 43456, + "models": 43457, + "rentices": 43458, + "ĠCurse": 43459, + "Ġcalibr": 43460, + "attled": 43461, + "monary": 43462, + "Ġpenet": 43463, + "aclysm": 43464, + "album": 43465, + "Ġremnant": 43466, + "Ġfung": 43467, + "itiveness": 43468, + "thodox": 43469, + "Ġunlocks": 43470, + "Ġprobabilities": 43471, + "Ġster": 43472, + "Ġscrim": 43473, + "Ġanalytic": 43474, + "Urban": 43475, + "âĢĶâĢĶâĢĶâĢĶ": 43476, + "Craft": 43477, + "Ġbrut": 43478, + "1986": 43479, + "Section": 43480, + "raged": 43481, + "arij": 43482, + "Hero": 43483, + "ĠHebdo": 43484, + "ĠEmpress": 43485, + "Ġvivo": 43486, + "ĠPublications": 43487, + "Ġcannabinoids": 43488, + "arrett": 43489, + "Ġbounded": 43490, + "Ġquests": 43491, + "Ġomin": 43492, + "ĠRuler": 43493, + "ĠYue": 43494, + "ridges": 43495, + "Ġpeasants": 43496, + "ĠAlloy": 43497, + "Desk": 43498, + "ULAR": 43499, + "Ġthor": 43500, + "ĠOvers": 43501, + "ĠTome": 43502, + "mk": 43503, + "Ġ1050": 43504, + "Ġshroud": 43505, + "Ġdistribut": 43506, + "weapons": 43507, + "ĠAuthorization": 43508, + "ĠPoke": 43509, + "ĠAlternate": 43510, + "scan": 43511, + "artisan": 43512, + "ĠGems": 43513, + "ĠForums": 43514, + "atonin": 43515, + "viron": 43516, + "Rog": 43517, + "duct": 43518, + "Ġtabletop": 43519, + "crow": 43520, + "/)": 43521, + "ĠStainless": 43522, + "ottest": 43523, + "Ġreborn": 43524, + "anchez": 43525, + "cium": 43526, + "ĠNicarag": 43527, + "elfare": 43528, + "Ġupd": 43529, + "ritic": 43530, + "bm": 43531, + "Ġ608": 43532, + "ĠSlightly": 43533, + "ĠDrops": 43534, + "ISO": 43535, + "ĠiT": 43536, + "xiety": 43537, + "ĠGawker": 43538, + "omination": 43539, + "ĠReached": 43540, + "Student": 43541, + "Drop": 43542, + "MET": 43543, + "ĠKubrick": 43544, + "1950": 43545, + "ĠTuls": 43546, + "Ġcomputed": 43547, + "depending": 43548, + "ĠCosmetic": 43549, + "udget": 43550, + "Lex": 43551, + "icut": 43552, + "ĠDepth": 43553, + "Ġ1893": 43554, + "ahah": 43555, + "Ġath": 43556, + "fights": 43557, + "thia": 43558, + "Ġoccult": 43559, + "Wheel": 43560, + "ĠSega": 43561, + "Ġtheolog": 43562, + "reement": 43563, + ")--": 43564, + "Ġunus": 43565, + "ĠGamma": 43566, + "Looks": 43567, + "Ġellipt": 43568, + "Ġairflow": 43569, + "ĠHimself": 43570, + "Ġpagan": 43571, + "ĠRei": 43572, + "Ġpilgr": 43573, + "ĠSubmission": 43574, + "Region": 43575, + "Ġinsertion": 43576, + "Ġsket": 43577, + "Ġsatisfies": 43578, + "ĠPixie": 43579, + "Ġcontempl": 43580, + "abbit": 43581, + "ĠReplay": 43582, + "ĠGalile": 43583, + "ĠGodzilla": 43584, + "Ġarithmetic": 43585, + "iasm": 43586, + "1987": 43587, + "ĠFeminist": 43588, + "Liter": 43589, + "ĠDisable": 43590, + "ouble": 43591, + "essors": 43592, + "Ġfors": 43593, + "Ġensu": 43594, + "Putting": 43595, + "ĠMSM": 43596, + "Cond": 43597, + "emade": 43598, + "Ġindistinguishable": 43599, + "Magn": 43600, + "Ġms": 43601, + "MAL": 43602, + "ĠBF": 43603, + "dm": 43604, + "iltration": 43605, + "irection": 43606, + "ĠSpir": 43607, + "Gb": 43608, + "ĠIbn": 43609, + "Abs": 43610, + "imens": 43611, + "RNA": 43612, + "============": 43613, + "Ġ655": 43614, + "ĠConversion": 43615, + "imilation": 43616, + "igion": 43617, + "ĠSomew": 43618, + "mL": 43619, + "Border": 43620, + "Ë": 43621, + "Factor": 43622, + "Number": 43623, + "Ġejac": 43624, + "Cho": 43625, + "Ġrighteousness": 43626, + "ĠPATH": 43627, + "ĠElys": 43628, + "ouched": 43629, + "Ġmultic": 43630, + "Ġfaculties": 43631, + "ĠEarthquake": 43632, + "ĠReferences": 43633, + "ensitive": 43634, + "Ġimpat": 43635, + "Ġ................": 43636, + "buff": 43637, + "Ġ1895": 43638, + "colo": 43639, + "Vi": 43640, + "Ġubiqu": 43641, + "ĠChev": 43642, + "Fish": 43643, + "ĠBlueprint": 43644, + "CHQ": 43645, + "Ġlinem": 43646, + "ĠFlavor": 43647, + "Ġcrimson": 43648, + "ĠAbstract": 43649, + "arette": 43650, + "plete": 43651, + "ranean": 43652, + "Dash": 43653, + "Ġdimensional": 43654, + "Cub": 43655, + "ttle": 43656, + "ĠDSM": 43657, + "Ġinstantaneous": 43658, + "esy": 43659, + "Ġepoch": 43660, + "Brit": 43661, + "ĠÎ": 43662, + "ECD": 43663, + "Ġwarp": 43664, + "obyl": 43665, + "ubric": 43666, + "Ġutilitarian": 43667, + "Ġsummarizes": 43668, + "letal": 43669, + "Ord": 43670, + "opath": 43671, + "tained": 43672, + "ghai": 43673, + "Ġwhis": 43674, + "insert": 43675, + "Ġphon": 43676, + "rils": 43677, + "Ġearthly": 43678, + "ĠAlic": 43679, + "ĠPCIe": 43680, + "Ġfurthermore": 43681, + "ocard": 43682, + "Ġuter": 43683, + "ĠAdmin": 43684, + "ographics": 43685, + "ĠConstantin": 43686, + "gravity": 43687, + "iPhone": 43688, + "Ġwasteland": 43689, + "Ġfps": 43690, + "Tip": 43691, + "Ġmurm": 43692, + "paces": 43693, + "ĠSamurai": 43694, + "ĠFOIA": 43695, + "ĠRadiant": 43696, + "ĠUnreal": 43697, + "Ġmicrow": 43698, + "usterity": 43699, + "zyme": 43700, + "itbart": 43701, + "metadata": 43702, + "Dat": 43703, + "ĠMoons": 43704, + "ĠProtestants": 43705, + "ungle": 43706, + "Ġvideog": 43707, + "pid": 43708, + "Ġdisple": 43709, + "aucus": 43710, + "Ġcoils": 43711, + "ĠDwar": 43712, + "fixed": 43713, + "Alice": 43714, + "Ġgarrison": 43715, + "ĠVelocity": 43716, + "ĠJehovah": 43717, + "Ġfascists": 43718, + "ĠCHO": 43719, + "jl": 43720, + "Ġmetaphors": 43721, + "ĠSiege": 43722, + "scientific": 43723, + "Ä«": 43724, + "Slow": 43725, + "hex": 43726, + "ĠBlaz": 43727, + "mediated": 43728, + "esthesia": 43729, + "ĠAvg": 43730, + "Ġbelie": 43731, + "Carter": 43732, + "Ġexposition": 43733, + "azeera": 43734, + "dial": 43735, + "Ġbask": 43736, + "Scale": 43737, + "Ġdisob": 43738, + "Ġgore": 43739, + "Ġhypocr": 43740, + "Ġphantom": 43741, + "ĠSynd": 43742, + "BLIC": 43743, + "pter": 43744, + "ĠScorpion": 43745, + "eor": 43746, + "ĠRecover": 43747, + "Ġsummoning": 43748, + "Ġorb": 43749, + "jump": 43750, + "Ġ768": 43751, + "ĠEnix": 43752, + "Spons": 43753, + ",...": 43754, + "Wide": 43755, + "Ġparse": 43756, + "Ġdebtor": 43757, + "Ġpathological": 43758, + "Ġserpent": 43759, + "ĠFranç": 43760, + "reetings": 43761, + "Ġdeletion": 43762, + "Ġvolunt": 43763, + "ĠNotification": 43764, + "liga": 43765, + "Disk": 43766, + "Account": 43767, + "1979": 43768, + "Ġsymmetry": 43769, + "ĠBearing": 43770, + "ĠABV": 43771, + "ĠORDER": 43772, + "rpm": 43773, + "ĠFuck": 43774, + "?!\"": 43775, + "mask": 43776, + "Grade": 43777, + "neath": 43778, + "ocom": 43779, + "Detect": 43780, + "ryption": 43781, + "ĠAura": 43782, + "Ġinert": 43783, + "PLAY": 43784, + "gres": 43785, + "INTON": 43786, + "Deal": 43787, + "fficient": 43788, + "ĠVoid": 43789, + "gement": 43790, + "Ġscorp": 43791, + "Ġreincarn": 43792, + "ĠVapor": 43793, + "Ġ1840": 43794, + "Yellow": 43795, + "......": 43796, + "Ġparameter": 43797, + "ĠDISTR": 43798, + "ĠForgotten": 43799, + "Eat": 43800, + "izational": 43801, + "Witness": 43802, + "ĠDupl": 43803, + "Ġdogma": 43804, + "Ġzipper": 43805, + "ĠZeus": 43806, + "mage": 43807, + "ormal": 43808, + "Ġ\".": 43809, + "Ġecc": 43810, + "ĠSlot": 43811, + "ĠRegist": 43812, + "Others": 43813, + "VID": 43814, + "Windows": 43815, + "Ġshitty": 43816, + "ĠLethal": 43817, + "Monster": 43818, + "ĠExpression": 43819, + "tx": 43820, + "ythm": 43821, + "Were": 43822, + "ivalry": 43823, + "atcher": 43824, + "ĠFormat": 43825, + "ĠPlasma": 43826, + "Phys": 43827, + "laugh": 43828, + "Fu": 43829, + "java": 43830, + "roma": 43831, + "ĠIncreases": 43832, + "Ġlicensee": 43833, + "Ġmystic": 43834, + "Ġproto": 43835, + "ĠLoki": 43836, + "forcing": 43837, + "hots": 43838, + "Ġ->": 43839, + "Outside": 43840, + "ĠEndless": 43841, + "Ġachie": 43842, + "ĠTurtles": 43843, + "Ġconvin": 43844, + "JUST": 43845, + "Ġimmobil": 43846, + "ĠCauses": 43847, + "Ġclich": 43848, + "xes": 43849, + "ffiti": 43850, + "Ġhypot": 43851, + "Bat": 43852, + "Ġbigot": 43853, + "Personal": 43854, + "ĠPharmac": 43855, + "Lot": 43856, + "VERT": 43857, + "Ġbapt": 43858, + "idelines": 43859, + "Ġprox": 43860, + "MAP": 43861, + "Spirit": 43862, + "ĠSlug": 43863, + "Ġebook": 43864, + "eches": 43865, + "ĠAndromeda": 43866, + "Ġceremon": 43867, + "1975": 43868, + "PRE": 43869, + "Ġasshole": 43870, + "linear": 43871, + "Nevertheless": 43872, + "Ġwillpower": 43873, + "azel": 43874, + "Fif": 43875, + "andise": 43876, + "Ġextravag": 43877, + "ĠBuffy": 43878, + "Ġcorrelations": 43879, + "ptr": 43880, + "Progress": 43881, + "shape": 43882, + "ĠSymbol": 43883, + "arag": 43884, + "ĠContext": 43885, + "ucer": 43886, + "1983": 43887, + "ĠMyster": 43888, + "Pain": 43889, + "Login": 43890, + "mbol": 43891, + "codes": 43892, + "RANT": 43893, + "Ġoverse": 43894, + "opot": 43895, + "STEM": 43896, + "enser": 43897, + "ĠCosmic": 43898, + "Spl": 43899, + "ritional": 43900, + "ĠPharaoh": 43901, + "ĠRemix": 43902, + "xon": 43903, + "ĠXII": 43904, + "Ġunman": 43905, + "Ġimmedi": 43906, + "Ġmonog": 43907, + "ĠLX": 43908, + "Ġabstraction": 43909, + "ocolate": 43910, + "ĠDonkey": 43911, + "Ġ!!": 43912, + "ĠLIA": 43913, + "shed": 43914, + "rules": 43915, + "Ġcalc": 43916, + "ĠAutob": 43917, + "anmar": 43918, + "eworks": 43919, + "notations": 43920, + "Ġtenancy": 43921, + "ĠPetraeus": 43922, + "dp": 43923, + "amphetamine": 43924, + "ĠCortex": 43925, + "rw": 43926, + "Ġprojectile": 43927, + "Ġintrinsically": 43928, + "Route": 43929, + "Ġnegoti": 43930, + "anuts": 43931, + "Analysis": 43932, + "redits": 43933, + "ĠGG": 43934, + "thread": 43935, + "ĠChosen": 43936, + "Years": 43937, + "otyp": 43938, + "ĠNCT": 43939, + "udic": 43940, + "ochemical": 43941, + "Neigh": 43942, + "Ġfishes": 43943, + "ĠFloat": 43944, + "Print": 43945, + "okia": 43946, + "Ġbarb": 43947, + "quote": 43948, + "Lew": 43949, + "Ġannoun": 43950, + "istors": 43951, + "Reading": 43952, + "ACTION": 43953, + "Ġintakes": 43954, + "ĠBeet": 43955, + "matter": 43956, + "Swe": 43957, + "Ther": 43958, + "Ġtyrant": 43959, + "ĠPsycho": 43960, + "ĠDestroy": 43961, + "Ġesoteric": 43962, + "Ġbiom": 43963, + "idious": 43964, + "Merc": 43965, + "hran": 43966, + "ĠBaal": 43967, + "seconds": 43968, + "Ġsuperhuman": 43969, + "ancel": 43970, + "Ġworshipped": 43971, + "Ġwebs": 43972, + "Ġviolet": 43973, + "ĠMetallic": 43974, + "eday": 43975, + "ordering": 43976, + "Nut": 43977, + "Ġconstructs": 43978, + "olescent": 43979, + "Unit": 43980, + "otypes": 43981, + "Ġembryonic": 43982, + "perm": 43983, + "Nature": 43984, + "ĠDecre": 43985, + "levant": 43986, + "Ġss": 43987, + "+(": 43988, + "ĠDoctrine": 43989, + "puters": 43990, + "Ġsaline": 43991, + "orsche": 43992, + "1111": 43993, + "values": 43994, + "Ġutopian": 43995, + "ĠBooster": 43996, + "Technical": 43997, + "ì": 43998, + "ĠLIMITED": 43999, + "nir": 44000, + "Ġclones": 44001, + "Performance": 44002, + "aple": 44003, + "Ġshudder": 44004, + "Ġcontempor": 44005, + "lator": 44006, + "ĠOops": 44007, + "Ġammon": 44008, + "Ġdavid": 44009, + "Ġbom": 44010, + "bish": 44011, + "Ġdetectable": 44012, + "Ġmultiplying": 44013, + "Ġreddit": 44014, + "Prim": 44015, + "Ġmedial": 44016, + "Ġsubstrate": 44017, + "ĠSanskrit": 44018, + "Spect": 44019, + "ĠMagical": 44020, + "Ġarcane": 44021, + "align": 44022, + "Ġ1861": 44023, + "Ġneocons": 44024, + "Ì": 44025, + "ĠBounty": 44026, + "ĠContinent": 44027, + "Ġhurd": 44028, + "alions": 44029, + "Ġgeneralized": 44030, + "ĠInsect": 44031, + "Ġsimul": 44032, + "actual": 44033, + "advert": 44034, + "ukong": 44035, + "Resp": 44036, + "ĠWarcraft": 44037, + "Hunter": 44038, + "hyper": 44039, + "ĠBreach": 44040, + "ught": 44041, + "Ġcomputation": 44042, + "react": 44043, + "Feel": 44044, + "ĠCheong": 44045, + "Ġslut": 44046, + "Ġgalactic": 44047, + "Ġtaunt": 44048, + "Enjoy": 44049, + "Ġreprinted": 44050, + "Word": 44051, + "ĠHandbook": 44052, + "amins": 44053, + "exit": 44054, + "Wo": 44055, + "Ġadherents": 44056, + "Counter": 44057, + "ĠNode": 44058, + "ĠTwisted": 44059, + "Ġgrinned": 44060, + "universal": 44061, + "ĠAmon": 44062, + "Ġaster": 44063, + "ĠEquip": 44064, + "!\".": 44065, + "Ġanalogous": 44066, + "rients": 44067, + "alky": 44068, + "ĠQian": 44069, + "Ġspont": 44070, + "docs": 44071, + "Ġcontemplation": 44072, + "Ġrevolutionaries": 44073, + "Ġpreset": 44074, + "ĠAmendments": 44075, + "Ġexecutes": 44076, + "ĠDuration": 44077, + "Ġcompulsion": 44078, + "Ġstagger": 44079, + "ynamic": 44080, + "blem": 44081, + "];": 44082, + "Higher": 44083, + "Balt": 44084, + "heast": 44085, + "Ġcorp": 44086, + "awei": 44087, + "Motion": 44088, + "Mis": 44089, + "Ġadventurer": 44090, + "eger": 44091, + "Ġarsen": 44092, + "ĠVoltage": 44093, + "ĠEVENTS": 44094, + "Salt": 44095, + "issance": 44096, + "DK": 44097, + "Ship": 44098, + "Ġunwitting": 44099, + "Ton": 44100, + "ĠPROGRAM": 44101, + "Ġtentacles": 44102, + "erness": 44103, + "thirst": 44104, + "Fig": 44105, + "fty": 44106, + "ĠTolkien": 44107, + "Sleep": 44108, + "ĠExplain": 44109, + "Pub": 44110, + "ĠBounce": 44111, + "ĠDemo": 44112, + "Ġ1897": 44113, + "ĠSPI": 44114, + "intern": 44115, + "********": 44116, + "ĠKills": 44117, + "ĠZombies": 44118, + "Single": 44119, + "ratom": 44120, + "ĠClaw": 44121, + "hid": 44122, + "asel": 44123, + "Shock": 44124, + "erential": 44125, + "Ġupgr": 44126, + "holy": 44127, + "Ġ\\": 44128, + "aghetti": 44129, + "Ġthence": 44130, + "genic": 44131, + "papers": 44132, + "1982": 44133, + "ravel": 44134, + "ĠUNIVERS": 44135, + "Charge": 44136, + "ĠDelay": 44137, + "ibrary": 44138, + "ĠHDD": 44139, + "olson": 44140, + "Ġenchanted": 44141, + "Wr": 44142, + "graph": 44143, + "Ġcorro": 44144, + "ept": 44145, + "etsu": 44146, + "ĠQin": 44147, + "Û": 44148, + "Ġantidepressant": 44149, + "ĠCerberus": 44150, + "Ġappe": 44151, + "ĠDEFENSE": 44152, + "Ġdysph": 44153, + "split": 44154, + "zilla": 44155, + "attr": 44156, + "Clar": 44157, + "Äĵ": 44158, + "hov": 44159, + "IRC": 44160, + "hibition": 44161, + "'/": 44162, + "ĠURLs": 44163, + "Draft": 44164, + "Prep": 44165, + "ĠLanguages": 44166, + "ĠTravels": 44167, + "ceiver": 44168, + "aturally": 44169, + "pair": 44170, + "ĠALWAYS": 44171, + "aaaa": 44172, + "ĠTenth": 44173, + "ĠNAD": 44174, + "Serv": 44175, + "ĠUID": 44176, + "cens": 44177, + "ĠLearned": 44178, + "Ġtraject": 44179, + "Ġmoaning": 44180, + "ĠNare": 44181, + "Ġingen": 44182, + "Ġsurn": 44183, + "Ġfloppy": 44184, + "breeding": 44185, + "uph": 44186, + "rossover": 44187, + "Understanding": 44188, + "Glass": 44189, + "Ġruntime": 44190, + "gp": 44191, + "Ġâľĵ": 44192, + "Ġcyt": 44193, + "bley": 44194, + "agall": 44195, + "Ġunworthy": 44196, + "otine": 44197, + "Ġchromosome": 44198, + "utters": 44199, + "Ġµ": 44200, + "Ġexpans": 44201, + "Ġdement": 44202, + "Ġinsurrection": 44203, + "Ġsurviv": 44204, + "genre": 44205, + "ospital": 44206, + "ĠPlato": 44207, + "ĠTrigger": 44208, + "selection": 44209, + "ilege": 44210, + "Ġsegreg": 44211, + "itizens": 44212, + "ĠRAID": 44213, + "Pure": 44214, + "hetti": 44215, + "ĠFailed": 44216, + "ĠCharacters": 44217, + "ĠCreep": 44218, + "akra": 44219, + "Ec": 44220, + "ĠAristotle": 44221, + "Lim": 44222, + "error": 44223, + "yrus": 44224, + "umably": 44225, + ">>": 44226, + "Ġtsun": 44227, + "knowledge": 44228, + "Cert": 44229, + "bable": 44230, + "hesion": 44231, + "ĠProcedures": 44232, + "Ġmarkup": 44233, + "ideo": 44234, + "Ġrhet": 44235, + "ĠChapters": 44236, + "ĠChecking": 44237, + "mega": 44238, + "Ġphotons": 44239, + "required": 44240, + "Unknown": 44241, + "ĠDrawn": 44242, + "Ġvari": 44243, + "EEK": 44244, + "Ġcompuls": 44245, + "Ġcloning": 44246, + "ccoli": 44247, + "Ġ1070": 44248, + "Ġkindred": 44249, + "Ġdiscl": 44250, + "ĠCind": 44251, + "Collect": 44252, + "Ġchromosomes": 44253, + "phant": 44254, + "ĠKafka": 44255, + "Ġeverlasting": 44256, + "Ġmercenary": 44257, + "ĠHmm": 44258, + "----": 44259, + "riber": 44260, + "Ġdoubtless": 44261, + "Ġsusceptibility": 44262, + "beta": 44263, + "notice": 44264, + "Ġcrochet": 44265, + "Ġrespir": 44266, + "Ġphilosophers": 44267, + "ĠExtras": 44268, + "Ġseparat": 44269, + "shown": 44270, + "iblings": 44271, + "Hispanic": 44272, + "copy": 44273, + "Tang": 44274, + "Knight": 44275, + "Ġpursu": 44276, + "ĠAnime": 44277, + "Ġlipid": 44278, + "ggies": 44279, + "levels": 44280, + "phalt": 44281, + "ĠCompleted": 44282, + "bral": 44283, + "Ġcerv": 44284, + "ĠAfric": 44285, + "ĠPhar": 44286, + "Color": 44287, + "ogene": 44288, + "ĠCompan": 44289, + "memory": 44290, + "Dust": 44291, + "ĠXIV": 44292, + "ĠConsole": 44293, + "').": 44294, + "Ġ1888": 44295, + "byn": 44296, + "Ġpolygamy": 44297, + "Auth": 44298, + "BUT": 44299, + "istine": 44300, + "Ġsacr": 44301, + "Ġabsor": 44302, + "ijah": 44303, + "ĠNeural": 44304, + "olester": 44305, + "ql": 44306, + "Already": 44307, + "Creating": 44308, + "ĠStarg": 44309, + "ĠPhilos": 44310, + "Consider": 44311, + "Ġrepositories": 44312, + "cludes": 44313, + "ĠBuffer": 44314, + "ĠPerspect": 44315, + "Ġcomput": 44316, + "Stew": 44317, + "iamond": 44318, + "ĠJudgment": 44319, + "OVA": 44320, + "angible": 44321, + "Ġoxid": 44322, + "Ġepigen": 44323, + "Ġsidel": 44324, + "ĠEag": 44325, + "devices": 44326, + "icone": 44327, + "1920": 44328, + "atism": 44329, + "beard": 44330, + "ĠGujar": 44331, + "ĠPlaystation": 44332, + "Ġglances": 44333, + "ĠCOMPLE": 44334, + "VERTIS": 44335, + "ukemia": 44336, + "Edit": 44337, + "Tickets": 44338, + "Square": 44339, + "ĠSerpent": 44340, + "Ġtransporter": 44341, + "MQ": 44342, + "ĠMongo": 44343, + "1967": 44344, + "ibaba": 44345, + "Ġtimet": 44346, + "sylvania": 44347, + "Latin": 44348, + "osaurs": 44349, + "Ġhumanoid": 44350, + "Ġcannabinoid": 44351, + "Ġdisciple": 44352, + "Psych": 44353, + "Ġimpro": 44354, + "Ġmc": 44355, + "Raid": 44356, + "Letter": 44357, + "ificant": 44358, + "ĠPortug": 44359, + "ĠFreem": 44360, + "Ġappell": 44361, + "ĠMushroom": 44362, + "Ġclans": 44363, + "Ġsinful": 44364, + "Ġingestion": 44365, + "ĠDirectory": 44366, + "abetic": 44367, + "Ġantigen": 44368, + "Ġimagin": 44369, + "mitter": 44370, + "!!!!!": 44371, + "ĠDPR": 44372, + "leness": 44373, + "\":\"\",\"": 44374, + "ĠAUTHOR": 44375, + "Ġgrunt": 44376, + "Ġflickering": 44377, + "Cath": 44378, + "asury": 44379, + "Ġnozzle": 44380, + "Secure": 44381, + "Stre": 44382, + "ĠBIT": 44383, + "Ġdeviations": 44384, + "Professor": 44385, + "bilt": 44386, + "ĠConscious": 44387, + "Ġinterrupts": 44388, + "ĠMormons": 44389, + "ĠCutter": 44390, + "Bed": 44391, + "ipient": 44392, + "ĠGhostbusters": 44393, + "Cart": 44394, + "endas": 44395, + "ĠExecution": 44396, + "ycle": 44397, + "Ġwedd": 44398, + "Sold": 44399, + "Ġvanquished": 44400, + "Regarding": 44401, + "Depending": 44402, + "']": 44403, + "atron": 44404, + "oidal": 44405, + "Cube": 44406, + "Studio": 44407, + ":/": 44408, + "ĠExplosion": 44409, + "activate": 44410, + "pport": 44411, + "fuck": 44412, + "Whe": 44413, + "Ġsmir": 44414, + "Ġwidgets": 44415, + "urses": 44416, + "izard": 44417, + ")*": 44418, + "icho": 44419, + "ĠVersus": 44420, + "ĠIntroduced": 44421, + "osaurus": 44422, + "1977": 44423, + "forum": 44424, + "Gray": 44425, + "Program": 44426, + "righteous": 44427, + "endum": 44428, + "ĠScare": 44429, + "Ġresists": 44430, + "*)": 44431, + "ĠCombo": 44432, + "Ġsockets": 44433, + "Ġaston": 44434, + "LAB": 44435, + "Ġmutated": 44436, + "eworld": 44437, + "DEF": 44438, + "Trend": 44439, + "âĢĶ-": 44440, + "Ġpropagation": 44441, + "Ġemancipation": 44442, + "collection": 44443, + "ĠDifferences": 44444, + "Tweet": 44445, + "Ġmajesty": 44446, + ")...": 44447, + "sylv": 44448, + "Ġadapters": 44449, + "Ġmilliseconds": 44450, + "Jews": 44451, + "ĠPatreon": 44452, + "phasis": 44453, + "ĠHTTP": 44454, + "onnaissance": 44455, + "ENDED": 44456, + "ĠIntro": 44457, + "qs": 44458, + "Ġsuperflu": 44459, + "*.": 44460, + "Ġminions": 44461, + "ĠStupid": 44462, + "Ġspecialization": 44463, + "ĠPikachu": 44464, + "Ġappellant": 44465, + "Training": 44466, + "circle": 44467, + "Interest": 44468, + "Ġfallacy": 44469, + "ĠDinosaur": 44470, + "ĠTHEM": 44471, + "Ġdirectories": 44472, + "Ġmasturbation": 44473, + "ĠStain": 44474, + "1978": 44475, + "odied": 44476, + "Ġexqu": 44477, + "ĠRats": 44478, + "swick": 44479, + "Ġemptiness": 44480, + "ĠXeon": 44481, + "Ġthereto": 44482, + "ĠEngels": 44483, + "ĠSupplement": 44484, + "Chan": 44485, + "Ġundead": 44486, + "ĠNoct": 44487, + "erest": 44488, + "ĠQuery": 44489, + "ĠSOLD": 44490, + "thritis": 44491, + "ĠEncounter": 44492, + "Ġvectors": 44493, + "Econom": 44494, + "Rogue": 44495, + "Ġgelatin": 44496, + "Rot": 44497, + "Flickr": 44498, + "Ġcaching": 44499, + "Ġloader": 44500, + "ĠELE": 44501, + "Ġcamoufl": 44502, + "Commission": 44503, + "Ġ1886": 44504, + "Ġcombos": 44505, + "ĠAwakening": 44506, + "Ġfeudal": 44507, + "Ġasses": 44508, + "ASY": 44509, + "atalie": 44510, + "Ġpanties": 44511, + "ĠMono": 44512, + "selves": 44513, + "Download": 44514, + "Ġvampires": 44515, + "------": 44516, + "ishop": 44517, + "User": 44518, + "Ġimperialist": 44519, + "ĠGOODMAN": 44520, + "1973": 44521, + "Vel": 44522, + "Struct": 44523, + "ĠUFOs": 44524, + "drivers": 44525, + "ĠOptional": 44526, + "uably": 44527, + "ĠPrinciple": 44528, + "verett": 44529, + "taining": 44530, + "Ġ1889": 44531, + "ĠCommunism": 44532, + "auder": 44533, + "Keys": 44534, + "lore": 44535, + "ĠMedieval": 44536, + "Hyd": 44537, + "weapon": 44538, + "Register": 44539, + "ĠHighlander": 44540, + "ĠRFC": 44541, + "Demon": 44542, + "ardless": 44543, + "ĠOrche": 44544, + "Kick": 44545, + "pixel": 44546, + "address": 44547, + "OUP": 44548, + "Brain": 44549, + "ĠMorph": 44550, + "bash": 44551, + "ĠANG": 44552, + "ĠIdle": 44553, + "ĠLucifer": 44554, + "Ġcorrelates": 44555, + "Ġgazed": 44556, + "colm": 44557, + "ĠKard": 44558, + "Solar": 44559, + "ĠVariable": 44560, + "ĠPACK": 44561, + "Ġfuzz": 44562, + "Ġanonym": 44563, + "ĠECO": 44564, + "feature": 44565, + "ĠEsports": 44566, + "ĠAnthropology": 44567, + "cise": 44568, + "manac": 44569, + "ĠSupports": 44570, + "rists": 44571, + "Quant": 44572, + "istical": 44573, + "çļĦ": 44574, + "Ġdexterity": 44575, + "monster": 44576, + "ordial": 44577, + "Mob": 44578, + "DEC": 44579, + "ĠConj": 44580, + "entric": 44581, + "1981": 44582, + "ECTION": 44583, + "ietal": 44584, + "ĠUses": 44585, + "ĠArmageddon": 44586, + "ĠCapitalism": 44587, + "Ub": 44588, + "iazep": 44589, + "helps": 44590, + "ouls": 44591, + "grim": 44592, + "ĠEthiop": 44593, + "tesy": 44594, + "Ġclipboard": 44595, + "Ġchimpanzees": 44596, + "PLIC": 44597, + "Sexual": 44598, + "wallet": 44599, + "ĠRect": 44600, + "ocytes": 44601, + "ĠHels": 44602, + "lace": 44603, + "Damn": 44604, + "Ġblasp": 44605, + "ildo": 44606, + "ĠRober": 44607, + "APD": 44608, + "ĠWCS": 44609, + "ippery": 44610, + "ellectual": 44611, + "Ġ$(": 44612, + "Ġuniverses": 44613, + "Ġholster": 44614, + "Ġshading": 44615, + "Ġinflic": 44616, + "else": 44617, + "ĠShiny": 44618, + "ĠAVG": 44619, + "Lower": 44620, + "ĠMayhem": 44621, + "Originally": 44622, + "Crypt": 44623, + "SHARE": 44624, + "ĠBeir": 44625, + "!:": 44626, + "Ġrepentance": 44627, + "WHAT": 44628, + ".......": 44629, + "Ġauditory": 44630, + "aaa": 44631, + "ĠLoot": 44632, + "ciples": 44633, + "Ġcontem": 44634, + "Ġphoton": 44635, + "æľ": 44636, + "omach": 44637, + "ĠWhedon": 44638, + "ĠValid": 44639, + "asonable": 44640, + "pha": 44641, + "assad": 44642, + "ĠPse": 44643, + "Heat": 44644, + "Ġplugins": 44645, + "Ġclenched": 44646, + "ĠAmeric": 44647, + "transform": 44648, + "ĠEnh": 44649, + "agnetic": 44650, + "usalem": 44651, + "sych": 44652, + "Wed": 44653, + "replace": 44654, + "ĠKinect": 44655, + "shield": 44656, + "Sax": 44657, + "ividually": 44658, + "Ġfunctionally": 44659, + "Ġ:)": 44660, + "typically": 44661, + "Opening": 44662, + "Fa": 44663, + "ĠSELECT": 44664, + "Ġsamurai": 44665, + "Ġhorde": 44666, + "entle": 44667, + "sth": 44668, + "Changes": 44669, + "Pin": 44670, + "ithing": 44671, + "illance": 44672, + "ĠEmblem": 44673, + "ĠMicha": 44674, + "crypt": 44675, + "ĠObjective": 44676, + "ophys": 44677, + "Ġavg": 44678, + "poon": 44679, + "Ġreadable": 44680, + "ĠRx": 44681, + "allel": 44682, + "Sit": 44683, + "gom": 44684, + "ureau": 44685, + "ĠDoodle": 44686, + "Ġdungeon": 44687, + "($": 44688, + "Nintendo": 44689, + "\"],\"": 44690, + "Notes": 44691, + "Grab": 44692, + "Prosecutors": 44693, + "Advanced": 44694, + "Ġ1862": 44695, + "ĠVeter": 44696, + "Ġjurisd": 44697, + "ĠLauncher": 44698, + "Catal": 44699, + "udder": 44700, + "Ġresidues": 44701, + "Ġregress": 44702, + "ĠConquer": 44703, + "osal": 44704, + "ĠDice": 44705, + "************": 44706, + "braska": 44707, + "ipolar": 44708, + "Ġathe": 44709, + "bringing": 44710, + "Suddenly": 44711, + "ĠIEEE": 44712, + "verbs": 44713, + "Ġdelet": 44714, + "ipeg": 44715, + "Previous": 44716, + "]\"": 44717, + "Ġsidebar": 44718, + "illac": 44719, + "Property": 44720, + "α": 44721, + "REP": 44722, + "Ġauthenticated": 44723, + "gypt": 44724, + "uilding": 44725, + "ĠGing": 44726, + "Ġwart": 44727, + "Birth": 44728, + "Ġobedient": 44729, + "ĠXuan": 44730, + "ĠTYPE": 44731, + "Ġinhibits": 44732, + "1972": 44733, + "humans": 44734, + "IENT": 44735, + "Ġyoutube": 44736, + "Shortly": 44737, + "ophen": 44738, + "ĠWinc": 44739, + "ĠWrit": 44740, + "AUD": 44741, + "ĠHobbit": 44742, + "emphasis": 44743, + "ĠWonders": 44744, + "Ġtwitch": 44745, + "ĠProphe": 44746, + "Berry": 44747, + "ĠGinny": 44748, + "ĠBurst": 44749, + "ĠGenerator": 44750, + "Ġepile": 44751, + "ĠBalanced": 44752, + "GPU": 44753, + "maps": 44754, + "Ġneurotrans": 44755, + "ĠIRC": 44756, + "Ġ\"$": 44757, + "Create": 44758, + "Particip": 44759, + "ĠMarxism": 44760, + "Ġthou": 44761, + "ĠMortal": 44762, + "Ġ�": 44763, + "Ġninja": 44764, + "inburgh": 44765, + "Ġappro": 44766, + "ĠPistol": 44767, + "Jar": 44768, + "Ġprophes": 44769, + "classes": 44770, + "Ġanarchist": 44771, + "Ġextant": 44772, + "message": 44773, + "itaire": 44774, + "Ġ1863": 44775, + "ĠProl": 44776, + "Ġpropell": 44777, + "Ġimpossibility": 44778, + "Ġpropos": 44779, + "itamin": 44780, + "Rating": 44781, + "olphin": 44782, + "Ġmitochond": 44783, + "versions": 44784, + "Liberal": 44785, + "ishy": 44786, + "Ġspherical": 44787, + "ĠSurvive": 44788, + "FREE": 44789, + "rawler": 44790, + "Metal": 44791, + "ĠStarship": 44792, + "Ġ=================================================================": 44793, + "ĠDharma": 44794, + "ĠSeller": 44795, + "Ġwrapper": 44796, + "Experience": 44797, + "Integ": 44798, + "Customer": 44799, + "hammad": 44800, + "Ġunanim": 44801, + "Jenn": 44802, + "Ġschizophren": 44803, + "agree": 44804, + "ĠEVENT": 44805, + "Shell": 44806, + "Ġfractions": 44807, + "1968": 44808, + "Ġextermination": 44809, + "ĠSniper": 44810, + "Ġpronoun": 44811, + "ĠHitman": 44812, + "xp": 44813, + "resource": 44814, + "WIND": 44815, + "Ġhierarchical": 44816, + "Ġted": 44817, + "Changing": 44818, + "Ġplaus": 44819, + "Transform": 44820, + "Ġbicy": 44821, + "imentary": 44822, + "Fuck": 44823, + "Mini": 44824, + "Ġoverc": 44825, + "ĠOptimus": 44826, + "outer": 44827, + "helial": 44828, + "akening": 44829, + "fx": 44830, + "Ġnig": 44831, + "Ġ+/-": 44832, + "ĠVICE": 44833, + "Ġnm": 44834, + "1976": 44835, + "ĠRitual": 44836, + "ĠTyrann": 44837, + "Ġscriptures": 44838, + "inical": 44839, + "ĠNull": 44840, + "ourgeois": 44841, + "dra": 44842, + "Ġpious": 44843, + "Ġneuron": 44844, + "Ġcolonists": 44845, + "ĠNebula": 44846, + "apply": 44847, + "Sah": 44848, + "Marx": 44849, + "Ġhypotheses": 44850, + "notation": 44851, + "acists": 44852, + "Math": 44853, + "Manager": 44854, + "Library": 44855, + "audi": 44856, + "Ġmp": 44857, + "ergic": 44858, + "Ġwizards": 44859, + "fw": 44860, + "DVD": 44861, + "ĠScala": 44862, + "Different": 44863, + "ampoo": 44864, + "ĠDread": 44865, + "abbage": 44866, + "Rus": 44867, + "ĠDumbledore": 44868, + "keleton": 44869, + "elsh": 44870, + "esian": 44871, + "ĠCorsair": 44872, + "Tier": 44873, + "ĠCelest": 44874, + "Ġnoun": 44875, + "Ġlucid": 44876, + "requisites": 44877, + "Ġgenus": 44878, + "Event": 44879, + "1974": 44880, + "ĠSatanic": 44881, + "iox": 44882, + "ĠHandle": 44883, + "ĠDestroyer": 44884, + "Ġinvocation": 44885, + "ĠXD": 44886, + "modified": 44887, + "Gam": 44888, + "ĠRPC": 44889, + "Ġsubsystem": 44890, + "Compared": 44891, + "odan": 44892, + "ĠPassive": 44893, + "ĠHelmet": 44894, + "nutrition": 44895, + "riction": 44896, + "HOW": 44897, + "Jess": 44898, + "Ġpiston": 44899, + "imately": 44900, + "Ġhypoc": 44901, + "ĠCelestial": 44902, + "MRI": 44903, + "Ġcompiler": 44904, + "ĠBadge": 44905, + "ĠRevelation": 44906, + "Ġintrig": 44907, + "Grad": 44908, + "ĠSPACE": 44909, + "Poly": 44910, + "ĠVul": 44911, + "Ġtrembling": 44912, + "Ġindepend": 44913, + "doctor": 44914, + "Certain": 44915, + "emet": 44916, + "Password": 44917, + "Ġgasped": 44918, + "Ġpronunciation": 44919, + "Fuel": 44920, + "ĠSPEC": 44921, + "assets": 44922, + "Extra": 44923, + "Ġformatting": 44924, + "Ġmods": 44925, + "\"!": 44926, + "akedown": 44927, + "Ġcircuitry": 44928, + "ĠTRUE": 44929, + "ĠVeil": 44930, + "Ġsighed": 44931, + "Charg": 44932, + "eals": 44933, + "Ġworkaround": 44934, + "Ġank": 44935, + "ĠScrolls": 44936, + "Ġdiffusion": 44937, + "Ġamps": 44938, + "ĠTempest": 44939, + "adata": 44940, + "Ġphenomen": 44941, + "Ġ???": 44942, + "Ġpopup": 44943, + "Ġinhibition": 44944, + "Ġaliases": 44945, + "erity": 44946, + "agraph": 44947, + "Jew": 44948, + "Ġbec": 44949, + "Classic": 44950, + "comment": 44951, + "usable": 44952, + "rodu": 44953, + "ĠEnlightenment": 44954, + "Ġinvis": 44955, + "Ġbiochemical": 44956, + "latest": 44957, + "ĠGMOs": 44958, + "ĠSocialism": 44959, + "Ġpollut": 44960, + "Ġeluc": 44961, + "Js": 44962, + "orthern": 44963, + "PDATED": 44964, + "alyses": 44965, + "Experts": 44966, + "Blog": 44967, + "ĠDemocr": 44968, + "etooth": 44969, + "pause": 44970, + "âĢ¢âĢ¢": 44971, + "ĠShinji": 44972, + "Ġdystop": 44973, + "Sources": 44974, + "ĠBrach": 44975, + "np": 44976, + "ĠXY": 44977, + "Ġneurot": 44978, + "assembly": 44979, + "Ġbourgeois": 44980, + "ĠReson": 44981, + "ĠIDE": 44982, + "Ġrecoil": 44983, + "raq": 44984, + "ĠAvenger": 44985, + "Paper": 44986, + "UTF": 44987, + "ĠWrest": 44988, + "ĠSimulation": 44989, + "elaide": 44990, + "ĠDMCA": 44991, + "utm": 44992, + "1963": 44993, + "Ġarcs": 44994, + "Ġmaximal": 44995, + "Ġcyl": 44996, + "Ġphilosoph": 44997, + "enium": 44998, + "Ġrelativity": 44999, + "ĠMacintosh": 45000, + "Ġpneum": 45001, + "LOC": 45002, + "Ġgoddamn": 45003, + "SHA": 45004, + "Ġlocalization": 45005, + "ĠPHI": 45006, + "Ġhierarch": 45007, + "Ġatheists": 45008, + "±": 45009, + "Luck": 45010, + "ĠJugg": 45011, + "options": 45012, + "alore": 45013, + "Edward": 45014, + "Monitor": 45015, + "Ġneoc": 45016, + "numbered": 45017, + "Arc": 45018, + "ĠCodes": 45019, + "ĠHallow": 45020, + "olitan": 45021, + "sections": 45022, + "ĠEzek": 45023, + "Ġamy": 45024, + "task": 45025, + "ĠCLS": 45026, + "ĠValkyrie": 45027, + "Ġcircumference": 45028, + "amac": 45029, + "ĠNotting": 45030, + "Ġproverb": 45031, + "Spec": 45032, + "Ġelemental": 45033, + "ĠBitcoins": 45034, + "Except": 45035, + "Release": 45036, + "ADVERTISEMENT": 45037, + "Complete": 45038, + "phrine": 45039, + "Ġspores": 45040, + "random": 45041, + "neum": 45042, + "trigger": 45043, + "ocide": 45044, + "Ġlongitudinal": 45045, + "isec": 45046, + "peat": 45047, + "Ġprecept": 45048, + "Wing": 45049, + "ĠâĹ": 45050, + "otropic": 45051, + "mouse": 45052, + "ĠWitcher": 45053, + "ĠAppearance": 45054, + "ROR": 45055, + "Ġ||": 45056, + "aird": 45057, + "Blu": 45058, + "Ġincomp": 45059, + "ĠFirefly": 45060, + "update": 45061, + "Loc": 45062, + "Ġnihil": 45063, + "hesive": 45064, + "Quality": 45065, + "youtu": 45066, + "Seriously": 45067, + "Ġannot": 45068, + "ĠCoins": 45069, + "Visit": 45070, + "lc": 45071, + "----------": 45072, + "Ġdiction": 45073, + "Ġafore": 45074, + "Ġimmortality": 45075, + "ĠForbidden": 45076, + "Allah": 45077, + "ĠPartial": 45078, + "ĠGears": 45079, + "Ġtrance": 45080, + "Hat": 45081, + "irez": 45082, + "ĠSATA": 45083, + "Ġelectrode": 45084, + "ĠLinear": 45085, + "rikes": 45086, + "Ġderiv": 45087, + "ĠXue": 45088, + "Fine": 45089, + "ĠIgnore": 45090, + "desc": 45091, + "DOM": 45092, + "Simple": 45093, + "orescence": 45094, + "Previously": 45095, + "Ġcircumcision": 45096, + "Sphere": 45097, + "Ġrenown": 45098, + "SET": 45099, + "ilight": 45100, + "ĠByzantine": 45101, + "EXP": 45102, + "Ġwhine": 45103, + "Missing": 45104, + "Lt": 45105, + "Guide": 45106, + "Ġhippocampus": 45107, + "Ġwip": 45108, + "yrights": 45109, + "Ġsubmer": 45110, + "Maker": 45111, + "Switch": 45112, + "Ġspectral": 45113, + "nect": 45114, + "Ãį": 45115, + "Ġreven": 45116, + "WER": 45117, + "Adding": 45118, + "ĠCONTROL": 45119, + "asper": 45120, + "0000000": 45121, + "ynt": 45122, + "annabin": 45123, + "ĠAliens": 45124, + "ĠPCR": 45125, + "asketball": 45126, + "ricia": 45127, + "ĠUnch": 45128, + "Tap": 45129, + "Ġpracticable": 45130, + "ĠUsage": 45131, + "Ġsoluble": 45132, + "Scroll": 45133, + "Random": 45134, + "Ġmoan": 45135, + "ĠPuppet": 45136, + "Dim": 45137, + "Attack": 45138, + "Ġspears": 45139, + "Ġrectangle": 45140, + "Ġamuse": 45141, + "ĠDoct": 45142, + "reon": 45143, + "ĠReset": 45144, + "vag": 45145, + "unin": 45146, + "ĠBris": 45147, + "ĠSwarm": 45148, + "Model": 45149, + "Standing": 45150, + "Ġdenotes": 45151, + "{": 45152, + "ĠLizard": 45153, + "nesty": 45154, + "Ġwor": 45155, + "Ġamplification": 45156, + "ĠInferno": 45157, + "Cover": 45158, + "SAM": 45159, + "respective": 45160, + "Shift": 45161, + "Ġlibertarians": 45162, + "Runner": 45163, + "ĠRevelations": 45164, + "Spr": 45165, + "ĠCrusader": 45166, + "Ġcaffe": 45167, + "Patch": 45168, + "stros": 45169, + "ĠImmortal": 45170, + "Ġinsofar": 45171, + "itance": 45172, + "ĠValhalla": 45173, + "Ġradial": 45174, + "Beast": 45175, + "sync": 45176, + "Ġ--------": 45177, + "ĠPathfinder": 45178, + "iless": 45179, + "operator": 45180, + "Choose": 45181, + "Ġdecode": 45182, + "Ġvou": 45183, + "ĠMutant": 45184, + "ĠCVE": 45185, + "Female": 45186, + "Ġoxidation": 45187, + "inational": 45188, + "dB": 45189, + "Scope": 45190, + "Wan": 45191, + "ĠBought": 45192, + "ĠDietary": 45193, + "rotein": 45194, + "Present": 45195, + "aukee": 45196, + "Ġtotem": 45197, + "Ġsatur": 45198, + "wagon": 45199, + "Builder": 45200, + "ĠBulg": 45201, + "Ġsects": 45202, + "Flo": 45203, + "ombat": 45204, + "ĠHermione": 45205, + "aughs": 45206, + "Ġhydra": 45207, + "paren": 45208, + "ë": 45209, + "Whereas": 45210, + "tsky": 45211, + "Ġchall": 45212, + "WORK": 45213, + "opian": 45214, + "rican": 45215, + "vati": 45216, + "ĠHTTPS": 45217, + "Ġwrink": 45218, + "Ġthrob": 45219, + "habi": 45220, + "Ġiodine": 45221, + "omorph": 45222, + "ĠScion": 45223, + "Hunt": 45224, + "Written": 45225, + "iosity": 45226, + "ĠBrowser": 45227, + "Ġsinners": 45228, + "culosis": 45229, + "Ġunconsciously": 45230, + "0100": 45231, + "Ġanarchists": 45232, + "Pull": 45233, + "FFER": 45234, + "Ġpandemonium": 45235, + "matically": 45236, + "Rush": 45237, + "Ġpurified": 45238, + "ĠCyan": 45239, + "ĠDifficulty": 45240, + "«": 45241, + "Aside": 45242, + "oggles": 45243, + "untu": 45244, + "iege": 45245, + "iberal": 45246, + "ĠCOUR": 45247, + "eteenth": 45248, + "weeney": 45249, + "biased": 45250, + "ĠDecay": 45251, + "quart": 45252, + "alysis": 45253, + "Ġstere": 45254, + "ellect": 45255, + "Ġkernels": 45256, + "juven": 45257, + "ĠJPEG": 45258, + "indal": 45259, + "topic": 45260, + "Ġidentifier": 45261, + "åı": 45262, + "Ġepid": 45263, + "1969": 45264, + "Ġpoisons": 45265, + "sym": 45266, + "mop": 45267, + "LOCK": 45268, + "axe": 45269, + "cohol": 45270, + "ctory": 45271, + "Ġadject": 45272, + "Skin": 45273, + "ĠFract": 45274, + "ĠSHAR": 45275, + "echo": 45276, + "thood": 45277, + "Ġencoding": 45278, + "Ġrelational": 45279, + "Len": 45280, + "Bone": 45281, + "agara": 45282, + "uggish": 45283, + "ĠTanks": 45284, + "Stats": 45285, + "lihood": 45286, + "Mult": 45287, + "Graph": 45288, + "ĠCannot": 45289, + "ĠSpac": 45290, + "handler": 45291, + "ĠShit": 45292, + "Ġmorp": 45293, + "controller": 45294, + "udeau": 45295, + "Screenshot": 45296, + "Development": 45297, + "Gear": 45298, + "Ġtong": 45299, + "ĠColossus": 45300, + "rylic": 45301, + "STRUCT": 45302, + "capitalist": 45303, + "Ġsupplementation": 45304, + "Parts": 45305, + "pb": 45306, + "oppy": 45307, + "pite": 45308, + "processor": 45309, + "Ġexplanatory": 45310, + "Environmental": 45311, + "Compl": 45312, + "Gaming": 45313, + "arently": 45314, + "Ġconcess": 45315, + "Ġathlet": 45316, + "forestation": 45317, + "orsi": 45318, + "igmat": 45319, + "Ġencoded": 45320, + "misc": 45321, + "Ġproofs": 45322, + "ĠRevision": 45323, + "Ġmathematic": 45324, + "Ġconstitu": 45325, + "fficiency": 45326, + "Ġlightsaber": 45327, + "gz": 45328, + "erate": 45329, + "ournals": 45330, + "Comment": 45331, + "Ġpercept": 45332, + ".\"[": 45333, + "ĠTechniques": 45334, + "coins": 45335, + "Shape": 45336, + "venant": 45337, + "ĠPrinted": 45338, + "Native": 45339, + "ĠGors": 45340, + "pecting": 45341, + "ĠDuel": 45342, + "Ġadmins": 45343, + "Flor": 45344, + "ĠDeus": 45345, + "cham": 45346, + "ĠRails": 45347, + "ceptor": 45348, + "naire": 45349, + "ĠSquid": 45350, + "ĠWarranty": 45351, + "SPEC": 45352, + "ensis": 45353, + "FUN": 45354, + "stellar": 45355, + "Select": 45356, + "llular": 45357, + "arget": 45358, + "ĠUncharted": 45359, + "Details": 45360, + "rison": 45361, + "Ġsyntax": 45362, + "chanted": 45363, + "Ġ-----": 45364, + "Ġthats": 45365, + "Registration": 45366, + "ĠSaber": 45367, + "ethical": 45368, + "Ġcryptography": 45369, + "atown": 45370, + "Ġdependencies": 45371, + "nw": 45372, + "Ġvehement": 45373, + "Ġrationality": 45374, + "ĠThou": 45375, + "Ġ----": 45376, + "rador": 45377, + "Ġenh": 45378, + "ĠCrate": 45379, + "STATE": 45380, + "/(": 45381, + "Ġdelim": 45382, + "CEPT": 45383, + "monkey": 45384, + "pai": 45385, + "uracy": 45386, + "Ġmortals": 45387, + "Sanders": 45388, + "ĠSeraph": 45389, + "-\"": 45390, + "1945": 45391, + "endix": 45392, + ":'": 45393, + "ĠLegs": 45394, + "Exper": 45395, + "ĠKrypt": 45396, + "clinton": 45397, + "Ġuphe": 45398, + "Vers": 45399, + "Similarly": 45400, + "ressor": 45401, + "leans": 45402, + "LOG": 45403, + "cific": 45404, + "Ġ].": 45405, + "-)": 45406, + "resist": 45407, + "Pred": 45408, + "Latest": 45409, + "ilyn": 45410, + "Ġblob": 45411, + "Ġdevils": 45412, + "ĠIllusion": 45413, + "erella": 45414, + "Ġyak": 45415, + "method": 45416, + "Ġ698": 45417, + "Shadow": 45418, + "velt": 45419, + "Ġsomet": 45420, + "xc": 45421, + "Ġtriangles": 45422, + "netic": 45423, + "Calling": 45424, + "ĠDRM": 45425, + "Ġtriglycer": 45426, + "Ġinhibited": 45427, + "Ġnep": 45428, + "Ġalgebra": 45429, + "ascar": 45430, + "laim": 45431, + "Ġappl": 45432, + "1971": 45433, + "Bernie": 45434, + "Eh": 45435, + "Ġundefined": 45436, + "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 45437, + "Sys": 45438, + "ournaments": 45439, + "Solid": 45440, + "Ġhep": 45441, + "ĠMales": 45442, + "Agent": 45443, + "Ġpsychedel": 45444, + "Wik": 45445, + "Ġdoctrines": 45446, + "rection": 45447, + "Compare": 45448, + "âĺ": 45449, + "Ġcertific": 45450, + "Ġsubstr": 45451, + "ĠCitation": 45452, + "ĠAFB": 45453, + "ĠBecame": 45454, + "Ġaristocracy": 45455, + "aryl": 45456, + "Ġanatomical": 45457, + "ocumented": 45458, + "ĠAssy": 45459, + "ĠFORM": 45460, + "Traditional": 45461, + "azines": 45462, + "Content": 45463, + "furt": 45464, + "Ġscripting": 45465, + "Ġcloaked": 45466, + "Ġunint": 45467, + "ĠCivilization": 45468, + "Desktop": 45469, + "ĠRagnar": 45470, + "Ġcurses": 45471, + "Ġobservable": 45472, + "ĠSpock": 45473, + "ĠPyr": 45474, + "Ġelectrom": 45475, + "ĠLump": 45476, + "oresc": 45477, + "ĠAttribution": 45478, + "egal": 45479, + "achusetts": 45480, + "Ġmarqu": 45481, + "âĻ¦": 45482, + "Ġcursor": 45483, + "ascist": 45484, + "1966": 45485, + "edit": 45486, + "lisher": 45487, + "ocyte": 45488, + "Writer": 45489, + "BILITIES": 45490, + "ĠUpload": 45491, + "Ġtreacher": 45492, + "Ġrecomb": 45493, + "Ġknights": 45494, + "Ġimmutable": 45495, + "ĠPly": 45496, + "Ġatten": 45497, + "ĠPassed": 45498, + "Flying": 45499, + "icipated": 45500, + "querade": 45501, + "ĠZot": 45502, + "CRE": 45503, + "ĠCursed": 45504, + "ickr": 45505, + "ĠDroid": 45506, + "thereum": 45507, + "Ġadjective": 45508, + "DIT": 45509, + "Ġtob": 45510, + "Ġinit": 45511, + "ĠPenet": 45512, + "Ġignor": 45513, + "Ġexalted": 45514, + "ĠDwell": 45515, + "assemb": 45516, + "Ġsentient": 45517, + "Ġ``": 45518, + "ĠGoo": 45519, + "Professional": 45520, + "othing": 45521, + "rupted": 45522, + "olics": 45523, + "ĠSetup": 45524, + "Thu": 45525, + "Campaign": 45526, + "Secondly": 45527, + "clipse": 45528, + "hibit": 45529, + "amate": 45530, + "SUP": 45531, + "ĠSuppose": 45532, + "submit": 45533, + "ĠDebian": 45534, + "Ġantid": 45535, + "Ġentert": 45536, + "ysical": 45537, + "ĠGladiator": 45538, + "ĠSTL": 45539, + "ĠBugs": 45540, + "ĠMech": 45541, + "ĠCoffin": 45542, + "itored": 45543, + "ICLE": 45544, + "Mist": 45545, + "Ġinfall": 45546, + "votes": 45547, + "actly": 45548, + "Occ": 45549, + "ĠConquest": 45550, + "alach": 45551, + "Ġintertw": 45552, + "reverse": 45553, + "amiya": 45554, + "icularly": 45555, + "edom": 45556, + "ĠLuxem": 45557, + "Fra": 45558, + "urrencies": 45559, + "Ġnobility": 45560, + "Tab": 45561, + "Beer": 45562, + "Ġ10000": 45563, + "Ġincor": 45564, + "Ġmelanch": 45565, + "Depth": 45566, + "Firstly": 45567, + "usr": 45568, + "ĠWiki": 45569, + "hhhh": 45570, + "ĠProxy": 45571, + "Ġantagonists": 45572, + "Ġtransistor": 45573, + "ĠRelic": 45574, + "ĠPrometheus": 45575, + "Ġ1280": 45576, + "Coun": 45577, + "ĠMedals": 45578, + "stats": 45579, + "Assembly": 45580, + "inished": 45581, + "cemic": 45582, + "Ġadventurers": 45583, + "Ġcd": 45584, + "Supporters": 45585, + "ĠYs": 45586, + "])": 45587, + "Ġneglig": 45588, + "Request": 45589, + "Ġwhore": 45590, + "Ġovercl": 45591, + "_-": 45592, + "partial": 45593, + "amd": 45594, + "Ġfructose": 45595, + "Ġdivid": 45596, + "Administ": 45597, + "amples": 45598, + "Boo": 45599, + "akery": 45600, + "owered": 45601, + "hester": 45602, + "Links": 45603, + "GROUND": 45604, + "ethy": 45605, + "Ġincarcer": 45606, + "Ġincap": 45607, + "Drag": 45608, + "ĠElastic": 45609, + "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 45610, + "Ultra": 45611, + "AAAA": 45612, + "Order": 45613, + "ĠMysteries": 45614, + "Ġcanonical": 45615, + "Ign": 45616, + "Ġanimate": 45617, + "wegian": 45618, + "ggle": 45619, + "Hash": 45620, + "Arg": 45621, + "verty": 45622, + "Ġanalges": 45623, + "ouver": 45624, + "ittees": 45625, + "ĠAsgard": 45626, + "______": 45627, + "Mix": 45628, + "1964": 45629, + "Rate": 45630, + "Ġarousal": 45631, + "pheus": 45632, + "undai": 45633, + "hetamine": 45634, + "ĠMysterious": 45635, + "Alright": 45636, + "ĠHerod": 45637, + "riott": 45638, + "ĠAnarchy": 45639, + "ĠArche": 45640, + "Question": 45641, + "Chapter": 45642, + "Token": 45643, + "ĠSphere": 45644, + "Ġinduces": 45645, + "Audio": 45646, + "Normal": 45647, + "Ġprophe": 45648, + "ĠValiant": 45649, + "Tag": 45650, + "Relations": 45651, + "Ġblinked": 45652, + "onyms": 45653, + "ĠVortex": 45654, + "Ġdb": 45655, + "emonic": 45656, + "Phase": 45657, + "Ġkingdoms": 45658, + "Twe": 45659, + "ĠLORD": 45660, + "plementation": 45661, + "ĠConstantinople": 45662, + "helm": 45663, + "ĠFlesh": 45664, + "Ġthumbnail": 45665, + "ledged": 45666, + "ĠPROG": 45667, + "Ġdisbel": 45668, + "ĠLikes": 45669, + "ĠGamer": 45670, + "renches": 45671, + "hattan": 45672, + "Index": 45673, + "pecially": 45674, + "ĠJiu": 45675, + "Ġwhats": 45676, + "erion": 45677, + "xf": 45678, + "ĠPerception": 45679, + "Alien": 45680, + "Capt": 45681, + "ãĢĤ": 45682, + "joining": 45683, + "nesium": 45684, + "ĠSocrates": 45685, + "Icon": 45686, + "animate": 45687, + "ocalypse": 45688, + "ĠTactics": 45689, + "assador": 45690, + "Veh": 45691, + "src": 45692, + ",-": 45693, + "Ġvisc": 45694, + "ĠDiscord": 45695, + "initial": 45696, + "atana": 45697, + "Size": 45698, + "Claim": 45699, + "ffect": 45700, + "iciary": 45701, + "Ġturret": 45702, + "reset": 45703, + "Ï": 45704, + "wrap": 45705, + "ulnerability": 45706, + "ĠInsert": 45707, + "Ġirrad": 45708, + "ognitive": 45709, + "clips": 45710, + "uncle": 45711, + "chemy": 45712, + "ottesville": 45713, + "Write": 45714, + "earances": 45715, + "1965": 45716, + "MIC": 45717, + "Ġmanag": 45718, + "Ġtelesc": 45719, + "Termin": 45720, + "Guest": 45721, + "Ġdenote": 45722, + "Failure": 45723, + "ograp": 45724, + "âĢķ": 45725, + "Ġscrolls": 45726, + "ĠArmored": 45727, + "Ġrecomp": 45728, + "Ġplaceholder": 45729, + "ĠISBN": 45730, + "ĠBelief": 45731, + "emporary": 45732, + "Asset": 45733, + "arcer": 45734, + "haar": 45735, + "assium": 45736, + "%:": 45737, + "ernal": 45738, + "ĠLv": 45739, + "atible": 45740, + "Pand": 45741, + "oubted": 45742, + "Lie": 45743, + "bial": 45744, + "STEP": 45745, + "Ġpresets": 45746, + "Ġstatist": 45747, + "Sund": 45748, + "reshold": 45749, + "endium": 45750, + "\");": 45751, + "Software": 45752, + "Ġbasal": 45753, + "ĠYose": 45754, + "Ġmortg": 45755, + "ocry": 45756, + "Ġsubreddit": 45757, + "omorphic": 45758, + "ĠLoaded": 45759, + "berra": 45760, + "vg": 45761, + "orkshire": 45762, + "ĠChrys": 45763, + "Repeat": 45764, + "ĠSimulator": 45765, + "rx": 45766, + "gex": 45767, + "Linux": 45768, + "ĠInstruct": 45769, + "irable": 45770, + "Ġmosquit": 45771, + "ĠManga": 45772, + "iOS": 45773, + "Ġsynt": 45774, + "Ġclitor": 45775, + "Ġlobe": 45776, + "ĠDelete": 45777, + "CVE": 45778, + "fortunately": 45779, + "Enc": 45780, + "vertising": 45781, + "Ġanten": 45782, + "Ġfif": 45783, + "Study": 45784, + "prev": 45785, + "ossus": 45786, + "Nar": 45787, + "Decl": 45788, + "erala": 45789, + "ĠPrototype": 45790, + "UGE": 45791, + "1001": 45792, + "Ġ---------": 45793, + "deals": 45794, + "odcast": 45795, + "TPS": 45796, + "Ġcodec": 45797, + "ittee": 45798, + "isexual": 45799, + "ĠBreaker": 45800, + "menu": 45801, + "ĠURI": 45802, + "('": 45803, + "ĠFiorina": 45804, + "ĠApostles": 45805, + "ĠWitches": 45806, + "raint": 45807, + "addafi": 45808, + "ersive": 45809, + "yrim": 45810, + "Ġmosa": 45811, + "Ġrog": 45812, + "Ear": 45813, + "âĺħ": 45814, + "Ġcaloric": 45815, + "matical": 45816, + "yrics": 45817, + "ĠKrugman": 45818, + "axter": 45819, + "1016": 45820, + "Ġsep": 45821, + "ĠExtend": 45822, + "ropolitan": 45823, + "thren": 45824, + "ologne": 45825, + "atomic": 45826, + "Naturally": 45827, + "Pros": 45828, + "gencies": 45829, + "akens": 45830, + "Male": 45831, + "Ġcausation": 45832, + "omnia": 45833, + "Comments": 45834, + "eeee": 45835, + "iquette": 45836, + "Ġcytok": 45837, + "ename": 45838, + "details": 45839, + "Ġdestruct": 45840, + "leep": 45841, + "ĠCavern": 45842, + "ĠInvention": 45843, + "ueless": 45844, + "Ġsubsection": 45845, + "outhern": 45846, + "metic": 45847, + "blogs": 45848, + "ĠPacks": 45849, + "ĠArduino": 45850, + "hhh": 45851, + "elligence": 45852, + "imity": 45853, + "ĠUltron": 45854, + "astrous": 45855, + "Ġbiome": 45856, + "ĠHover": 45857, + "Ġprivile": 45858, + "igham": 45859, + "apest": 45860, + "ĠYoshi": 45861, + "Artist": 45862, + ".\",": 45863, + "gamer": 45864, + "Virgin": 45865, + "Tea": 45866, + "ĠDoomsday": 45867, + "ĠðŁĻĤ": 45868, + "terday": 45869, + "ĠCommando": 45870, + "ĠAchieve": 45871, + "chrom": 45872, + "Ġcryptographic": 45873, + "Ġrebell": 45874, + "Specifically": 45875, + "âĢ¦âĢ¦âĢ¦âĢ¦": 45876, + "ĠEternity": 45877, + "Ġemulation": 45878, + "ĠSERV": 45879, + "ĠMiscellaneous": 45880, + "ĠParticipant": 45881, + "duc": 45882, + "vp": 45883, + "ĠSparkle": 45884, + "ategories": 45885, + "Ġdecrypt": 45886, + "ĠGNOME": 45887, + "activation": 45888, + "Ġanarch": 45889, + "owler": 45890, + "adiator": 45891, + "itars": 45892, + "ĠTHEN": 45893, + ")\",": 45894, + "åħ": 45895, + "Ġembod": 45896, + "vae": 45897, + "âĺĨ": 45898, + "Member": 45899, + "Ġrm": 45900, + "nyder": 45901, + "ĠLeviathan": 45902, + "Gaza": 45903, + "erenn": 45904, + "Chicken": 45905, + "ĠDefinitive": 45906, + "ĠBolshe": 45907, + "ĠJagu": 45908, + "gorith": 45909, + "loader": 45910, + "exe": 45911, + ".........": 45912, + "ĠReceived": 45913, + "ĠProto": 45914, + "ĠLocked": 45915, + "Posts": 45916, + "ankind": 45917, + "Clock": 45918, + "ĠCLI": 45919, + "Throw": 45920, + "dL": 45921, + "epad": 45922, + "ĠAtmosp": 45923, + "Ġmk": 45924, + "ĠSteal": 45925, + "uple": 45926, + "reference": 45927, + "ĠGNU": 45928, + "adelphia": 45929, + "scripts": 45930, + "ilaterally": 45931, + "ĠMods": 45932, + "odus": 45933, + "ignty": 45934, + "REF": 45935, + "Ġhypothesized": 45936, + "issors": 45937, + "Ġanus": 45938, + "HUD": 45939, + "rices": 45940, + "Draw": 45941, + "Computer": 45942, + "Below": 45943, + "uthor": 45944, + "ĠTact": 45945, + "=$": 45946, + "00000000": 45947, + "Ġcaut": 45948, + "Sharp": 45949, + "depend": 45950, + "Ġtatt": 45951, + "Goal": 45952, + "Sounds": 45953, + "zona": 45954, + "anyon": 45955, + "ricanes": 45956, + "ĠUSAF": 45957, + "Jump": 45958, + "Bottom": 45959, + "etermination": 45960, + "ĠPles": 45961, + "Ġhypothes": 45962, + "Reference": 45963, + "Ġswall": 45964, + "Ġmaneu": 45965, + "rifice": 45966, + "ĠVeh": 45967, + "Ġtex": 45968, + "geoning": 45969, + "ĠâľĶ": 45970, + "Mach": 45971, + "eanor": 45972, + "%);": 45973, + "archives": 45974, + "Ġencyclopedia": 45975, + "ĠPreferences": 45976, + "damage": 45977, + "Done": 45978, + "Ġcoefficient": 45979, + "ĠCreatures": 45980, + "Ġital": 45981, + "ivari": 45982, + "Revolution": 45983, + "Ġnob": 45984, + "Diff": 45985, + "Ġabbre": 45986, + "Writ": 45987, + "ĠDOS": 45988, + "redd": 45989, + "Ġsplend": 45990, + "orest": 45991, + "flame": 45992, + "Ġdevs": 45993, + "Ġ==": 45994, + "ĠPuzzle": 45995, + "Ġgit": 45996, + "MOD": 45997, + "ĠArgument": 45998, + "ĠAbyss": 45999, + "Studies": 46000, + "ophob": 46001, + "uild": 46002, + "scill": 46003, + "fp": 46004, + "Ġplur": 46005, + "Delete": 46006, + "ĠFALSE": 46007, + "FIL": 46008, + "Ġmicrobiota": 46009, + "ĠIPv": 46010, + "Stud": 46011, + "ortal": 46012, + "ĠDivinity": 46013, + "ounter": 46014, + "ä¸": 46015, + "Naz": 46016, + "stals": 46017, + "ihilation": 46018, + "Ġpersecut": 46019, + "ĠPlanes": 46020, + "viation": 46021, + "Driver": 46022, + "ĠEEG": 46023, + "Unity": 46024, + "Premium": 46025, + "ĠSiren": 46026, + "ĠPaleo": 46027, + "earchers": 46028, + "Pract": 46029, + "Ö": 46030, + "VII": 46031, + "mosp": 46032, + "Ġidentifiers": 46033, + "Near": 46034, + "achu": 46035, + "Apps": 46036, + "tackle": 46037, + "COLOR": 46038, + "Ġperpendicular": 46039, + "viks": 46040, + "ecided": 46041, + "ĠDota": 46042, + "icons": 46043, + "Ġpsi": 46044, + "Brave": 46045, + "Ġunimagin": 46046, + "ĠATI": 46047, + "OOL": 46048, + "Gender": 46049, + "ĠSwords": 46050, + "oples": 46051, + "Rank": 46052, + "olphins": 46053, + "Ġdeities": 46054, + "ĠXIII": 46055, + "м": 46056, + "ĠKraken": 46057, + "ĠLEVEL": 46058, + "stasy": 46059, + "ĠBabel": 46060, + "Hours": 46061, + "Avoid": 46062, + "Mech": 46063, + "Multi": 46064, + "Ġect": 46065, + "Occup": 46066, + "panic": 46067, + "Ġmutants": 46068, + "Evidence": 46069, + "Tips": 46070, + "Ġvolts": 46071, + "Exit": 46072, + "xb": 46073, + "planet": 46074, + "avez": 46075, + "features": 46076, + ")]": 46077, + "lol": 46078, + "ĠNeph": 46079, + "ĠSanct": 46080, + "Ġimpover": 46081, + "................................": 46082, + "Sty": 46083, + "Email": 46084, + "Torrent": 46085, + "Ġgluc": 46086, + "ĠSins": 46087, + "ĠIncarn": 46088, + "ĠWITHOUT": 46089, + "ĠPanzer": 46090, + "ĠAssignment": 46091, + "versible": 46092, + "Strange": 46093, + "ITNESS": 46094, + "incible": 46095, + "ZX": 46096, + "ĠMySQL": 46097, + "Ġconson": 46098, + "Ġoxidative": 46099, + "Machine": 46100, + "Impro": 46101, + "Parent": 46102, + "ĠMetroid": 46103, + "Educ": 46104, + "Ġdismant": 46105, + "dx": 46106, + "ĠPersona": 46107, + "ĠHDL": 46108, + "Americ": 46109, + "Users": 46110, + "Ġeighteenth": 46111, + "WARNING": 46112, + "ĠLists": 46113, + "ĠCanter": 46114, + "ĠTrotsky": 46115, + "Ġhaha": 46116, + "]'": 46117, + "ĠEncyclopedia": 46118, + "admin": 46119, + "ĠACTIONS": 46120, + "idav": 46121, + "ο": 46122, + "ĠFTP": 46123, + "Ġquar": 46124, + "ongyang": 46125, + "âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦": 46126, + "Ġsynchronization": 46127, + "DEM": 46128, + "riched": 46129, + "Ġnegro": 46130, + "Bench": 46131, + "Ġfilament": 46132, + "Ġdecoding": 46133, + "obj": 46134, + "Ġjoystick": 46135, + "Decre": 46136, + "ĠBolshevik": 46137, + "Virtual": 46138, + "ĠSacrament": 46139, + "xd": 46140, + "BILL": 46141, + "-+-+": 46142, + "¶": 46143, + "anchester": 46144, + "Pokemon": 46145, + "Ġslic": 46146, + "iameter": 46147, + "errilla": 46148, + "Exactly": 46149, + "\"'": 46150, + "getic": 46151, + "3333": 46152, + "solete": 46153, + "Ġincorpor": 46154, + "Ġio": 46155, + "------------": 46156, + "Ġantiquity": 46157, + "ATURES": 46158, + "Policy": 46159, + "oppable": 46160, + "Ġ=>": 46161, + "ODUCT": 46162, + "otide": 46163, + "Ú": 46164, + "Ġnormative": 46165, + "Fac": 46166, + "Ġshaman": 46167, + "element": 46168, + "Plex": 46169, + "INTER": 46170, + "etsk": 46171, + "ĠGauntlet": 46172, + "ĠBIOS": 46173, + "×ķ": 46174, + "riet": 46175, + "Rew": 46176, + "uristic": 46177, + "urches": 46178, + "ĠChomsky": 46179, + "ixir": 46180, + "package": 46181, + "Owner": 46182, + "Ġschematic": 46183, + "Assistant": 46184, + "Ġemanc": 46185, + "Ġarchetype": 46186, + "Initial": 46187, + "intent": 46188, + "Ġfilib": 46189, + "ispers": 46190, + "Flag": 46191, + "Tank": 46192, + "Ġinsurg": 46193, + "Ġapproximation": 46194, + "Ġsemantic": 46195, + "Ġsubtitle": 46196, + "Font": 46197, + "Ġintimid": 46198, + "Ġhath": 46199, + "tools": 46200, + "gob": 46201, + "Process": 46202, + "slave": 46203, + "ĠJUSTICE": 46204, + "âĻ¥": 46205, + "ĠHardcore": 46206, + "Discover": 46207, + "Ġexch": 46208, + "ptive": 46209, + "units": 46210, + "ĠDjango": 46211, + "itudinal": 46212, + "Ġpc": 46213, + "akespeare": 46214, + "ospace": 46215, + "Ġhorny": 46216, + "auth": 46217, + "ĠSkyrim": 46218, + "ENGTH": 46219, + "perors": 46220, + "ĠVulkan": 46221, + "Ġchimpan": 46222, + "Ġremem": 46223, + "Ġopacity": 46224, + "Ġ:(": 46225, + "ushima": 46226, + "Ġawoken": 46227, + "Ġsacrament": 46228, + "Beginning": 46229, + "escape": 46230, + "Anim": 46231, + "Ġadvant": 46232, + "ĠRequires": 46233, + "output": 46234, + "Ġdroid": 46235, + "Yep": 46236, + "rieving": 46237, + "Ġpt": 46238, + "ĠShotgun": 46239, + "ĠOsiris": 46240, + "disabled": 46241, + "ĠRadius": 46242, + "Medium": 46243, + "ĠScient": 46244, + "ĠRept": 46245, + "ymm": 46246, + "Ġcp": 46247, + "ĠLabyrinth": 46248, + "poral": 46249, + "Ġ'(": 46250, + "Hack": 46251, + "ĠTechnique": 46252, + "/,": 46253, + "Ġambig": 46254, + "Basic": 46255, + "Ġretrie": 46256, + "VICE": 46257, + "BIP": 46258, + "ragon": 46259, + "phies": 46260, + "uminum": 46261, + "ĠFei": 46262, + "lesi": 46263, + "Ġsemantics": 46264, + "ĠHz": 46265, + "ĠUnderworld": 46266, + "Ġendot": 46267, + "olesterol": 46268, + "ourning": 46269, + "Ġcaches": 46270, + "ĠYug": 46271, + "Legendary": 46272, + "ĠDocumentation": 46273, + "ĠSpiral": 46274, + "ĠClone": 46275, + "bnb": 46276, + "ĠâĶ": 46277, + "ustom": 46278, + "Mp": 46279, + "gettable": 46280, + "agonist": 46281, + "Ġneuronal": 46282, + "culus": 46283, + "enum": 46284, + "cules": 46285, + "Ġmuttered": 46286, + "ctica": 46287, + "necess": 46288, + "ĠSubtle": 46289, + "Ġsolder": 46290, + "Environment": 46291, + "oneliness": 46292, + "orage": 46293, + "âĢ¦.\"": 46294, + "nesota": 46295, + "agements": 46296, + "Ùİ": 46297, + "WHERE": 46298, + "ĠGDDR": 46299, + "Scient": 46300, + "ĠMulcair": 46301, + "ĠRena": 46302, + "________________________________________________________________": 46303, + "antics": 46304, + "Ġtorped": 46305, + "Brow": 46306, + "ossal": 46307, + "Category": 46308, + "Regular": 46309, + "remote": 46310, + "ãģ": 46311, + "ĠCoil": 46312, + "ritch": 46313, + "specified": 46314, + "Average": 46315, + "Ġfingert": 46316, + "entity": 46317, + "atibility": 46318, + "ampunk": 46319, + "ĠScriptures": 46320, + "Ġunequ": 46321, + "arettes": 46322, + "arching": 46323, + "Ġastron": 46324, + "Ġnumeric": 46325, + "ĠeBook": 46326, + "remove": 46327, + "onday": 46328, + "Ġmetaphysical": 46329, + "ĠGoku": 46330, + "Element": 46331, + "ĠRuin": 46332, + "Norm": 46333, + "Ġtox": 46334, + "puff": 46335, + "Ġharmonic": 46336, + "ĠAgility": 46337, + "ĠHearthstone": 46338, + "Ġmana": 46339, + "Points": 46340, + "Ġconduc": 46341, + "ĠPersia": 46342, + "-----": 46343, + "license": 46344, + "Application": 46345, + "assert": 46346, + "Reader": 46347, + "ĠSacrifice": 46348, + "float": 46349, + "inctions": 46350, + "byter": 46351, + "Ġfundament": 46352, + "\"âĢ¦": 46353, + "Fourth": 46354, + "Effective": 46355, + "ĠMeow": 46356, + "ĠErrors": 46357, + "ĠIcar": 46358, + "ĠMMO": 46359, + "Ġapostles": 46360, + "Ġfaintly": 46361, + "component": 46362, + "bably": 46363, + "uggage": 46364, + "ĠMPG": 46365, + "krit": 46366, + "container": 46367, + "ixture": 46368, + "ĠPOV": 46369, + "izabeth": 46370, + "onut": 46371, + "isdom": 46372, + "trace": 46373, + "ĠSDL": 46374, + "Interestingly": 46375, + "ĠExplan": 46376, + "lesiastical": 46377, + "ternal": 46378, + "Bug": 46379, + "Ġmetabolites": 46380, + "geries": 46381, + "Ġsupra": 46382, + "ĠMakoto": 46383, + "orget": 46384, + "racuse": 46385, + "][": 46386, + "ĠPrelude": 46387, + "peria": 46388, + "tube": 46389, + "ĠCatalog": 46390, + "ĠGoblin": 46391, + "QUEST": 46392, + "ĠINCLUD": 46393, + "ĠVERS": 46394, + "erguson": 46395, + "Ġcommandments": 46396, + "ĠUDP": 46397, + "itle": 46398, + "ι": 46399, + "domain": 46400, + "roximately": 46401, + "ĠTLS": 46402, + "ongevity": 46403, + "Ġmodulation": 46404, + "Ġdidnt": 46405, + "ĠCalories": 46406, + "Applications": 46407, + "ormon": 46408, + "Ġsd": 46409, + "dullah": 46410, + "Ġcous": 46411, + "ĠDARK": 46412, + "clip": 46413, + "ĠPsychiat": 46414, + "ĠTanz": 46415, + "ĠCharisma": 46416, + "ĠMerge": 46417, + "ĠKDE": 46418, + "requires": 46419, + "urdue": 46420, + "Ġdecimal": 46421, + "Ġâī¥": 46422, + "ĠAuth": 46423, + "ebted": 46424, + "ĠTempl": 46425, + "ĠâĢº": 46426, + "Ultimate": 46427, + "Ġmammalian": 46428, + "advertising": 46429, + "Ġdominion": 46430, + "Ġacron": 46431, + "ĠWem": 46432, + "ĠHeist": 46433, + "oiler": 46434, + "FLAG": 46435, + "ovember": 46436, + "Syn": 46437, + "Ġgodd": 46438, + "ĠPyth": 46439, + "Ġglyc": 46440, + "ĠHelpful": 46441, + "Ġgad": 46442, + "chedel": 46443, + "Similar": 46444, + "Ġ¶": 46445, + "Ġnp": 46446, + "ĠREPL": 46447, + "Fill": 46448, + "ĠSunder": 46449, + "etsy": 46450, + "ĠPAX": 46451, + "ĠFemales": 46452, + "ĠKingdoms": 46453, + "Ġwhistlebl": 46454, + "Hide": 46455, + "serial": 46456, + "ĠEnemies": 46457, + "ĠPeb": 46458, + "Ġpiety": 46459, + "ifact": 46460, + "esity": 46461, + "bsite": 46462, + "esides": 46463, + "Ġported": 46464, + "Ġamygdala": 46465, + "ĠGerr": 46466, + "afety": 46467, + "Ġadip": 46468, + "(\"": 46469, + "Ġcf": 46470, + "Ġurl": 46471, + "unia": 46472, + "icro": 46473, + "Austral": 46474, + "ĠConfig": 46475, + "accompanied": 46476, + "isite": 46477, + "Ġtextual": 46478, + "\">": 46479, + "Ġanecd": 46480, + "Ġ\",": 46481, + "angular": 46482, + "ĠUnicode": 46483, + "Proof": 46484, + "Ġmultiplication": 46485, + "Address": 46486, + "Ġbytes": 46487, + "lems": 46488, + "uterte": 46489, + "Episode": 46490, + "oshop": 46491, + "ritical": 46492, + "Adjust": 46493, + "argument": 46494, + "\\'": 46495, + "Rober": 46496, + "pection": 46497, + "Agg": 46498, + "äº": 46499, + "interrupted": 46500, + "ĠDebor": 46501, + "Ġlair": 46502, + "Various": 46503, + "isively": 46504, + "ĠStatic": 46505, + "ohyd": 46506, + "ĠEchoes": 46507, + "UID": 46508, + "raught": 46509, + "Bott": 46510, + "Ġapostle": 46511, + "ĠCentauri": 46512, + "oxicity": 46513, + "ibling": 46514, + "Ġparalle": 46515, + "inav": 46516, + "Crit": 46517, + "ĠTyph": 46518, + "Ġhig": 46519, + "ĠEDITION": 46520, + "Ġcoord": 46521, + "uish": 46522, + "sectional": 46523, + "inki": 46524, + "Title": 46525, + "anyahu": 46526, + "osterone": 46527, + "Ġdesper": 46528, + "ribly": 46529, + "Legend": 46530, + "afort": 46531, + "Org": 46532, + "Ġempir": 46533, + "ĠQuake": 46534, + "SSL": 46535, + "ioxide": 46536, + "åľ": 46537, + "Ġenz": 46538, + "urtle": 46539, + "BSD": 46540, + "Rust": 46541, + "ospels": 46542, + "Rare": 46543, + "Ġpartitions": 46544, + "Ġheresy": 46545, + "overy": 46546, + "Ġmonop": 46547, + "Pixel": 46548, + "odder": 46549, + "Option": 46550, + "withstanding": 46551, + "Transfer": 46552, + "Ġarrog": 46553, + "skip": 46554, + "ĠSSH": 46555, + "ĠSph": 46556, + "Ġcallback": 46557, + "PIN": 46558, + "Ġpdf": 46559, + "Ġplaint": 46560, + "cipled": 46561, + "reenshots": 46562, + "Ġparsing": 46563, + "::::::::": 46564, + "ioxid": 46565, + "Ġhereafter": 46566, + "ĠFunctions": 46567, + "ĠBulgar": 46568, + "Ġintu": 46569, + "DOC": 46570, + "Location": 46571, + "Hyper": 46572, + "ageddon": 46573, + "Evil": 46574, + "illions": 46575, + "Introduction": 46576, + "Physical": 46577, + "ĠLayout": 46578, + "âķ": 46579, + "------------------------": 46580, + "ĠRodham": 46581, + "ĠPatterns": 46582, + "Delivery": 46583, + "Ġdistur": 46584, + "ĠVolunte": 46585, + "ĠGUI": 46586, + "Ġclen": 46587, + "Ġinacc": 46588, + "ĠBallistic": 46589, + "ĠSprite": 46590, + "Privacy": 46591, + "theme": 46592, + "dump": 46593, + "ĠByte": 46594, + "ĠIncre": 46595, + "apult": 46596, + "ĠWrath": 46597, + "ensibly": 46598, + "NOTE": 46599, + "ounge": 46600, + "ustomed": 46601, + "ochond": 46602, + "ĠQt": 46603, + "Primary": 46604, + "Ġsidew": 46605, + "Root": 46606, + "gregation": 46607, + "SQL": 46608, + "ĠSOFTWARE": 46609, + "Gallery": 46610, + "ĠDungeon": 46611, + "ĠVengeance": 46612, + "->": 46613, + "steam": 46614, + "Ġfrivol": 46615, + "Ġpid": 46616, + "filter": 46617, + "Ġfacult": 46618, + "doms": 46619, + "Tool": 46620, + "1959": 46621, + "Ġprefix": 46622, + "Ġcomma": 46623, + "relative": 46624, + "Ġformatted": 46625, + "appropriately": 46626, + "Ġmd": 46627, + "xxx": 46628, + "ĠAuthentication": 46629, + "ĠWTC": 46630, + "Ġvulner": 46631, + "reditary": 46632, + "Steam": 46633, + "Tx": 46634, + "ĠGHC": 46635, + "Increased": 46636, + "forcement": 46637, + "ĠGuant": 46638, + "bernatorial": 46639, + "Entry": 46640, + "ĠWarp": 46641, + "ĠCreature": 46642, + "ĠAmmunition": 46643, + "Ġclust": 46644, + "ĠInher": 46645, + "Ġunbel": 46646, + "RGB": 46647, + "ĠMankind": 46648, + "ĠPlague": 46649, + "Ġ=================================": 46650, + "psc": 46651, + "Intern": 46652, + "tml": 46653, + "ĠCrusade": 46654, + "inflamm": 46655, + "Storage": 46656, + "token": 46657, + "inse": 46658, + "False": 46659, + "Adult": 46660, + "Pokémon": 46661, + "PLIED": 46662, + "Ġglac": 46663, + "ĠDwarf": 46664, + "sequence": 46665, + "Ġmagnification": 46666, + "ĠIlluminati": 46667, + "hedral": 46668, + "param": 46669, + "regon": 46670, + ".\",\"": 46671, + "Eva": 46672, + "igree": 46673, + "Object": 46674, + "Ġoptimizations": 46675, + "uador": 46676, + "mmmm": 46677, + "ullivan": 46678, + "Ġ[\"": 46679, + "ĠDusk": 46680, + "Ġtrig": 46681, + "Ġiss": 46682, + "Ġhypert": 46683, + "Ġperspect": 46684, + "Ġassum": 46685, + ":,": 46686, + "Ġinterpol": 46687, + "Asked": 46688, + "Boot": 46689, + "LIB": 46690, + "Loading": 46691, + "Ident": 46692, + "upuncture": 46693, + "ioch": 46694, + "Ġprefrontal": 46695, + "delay": 46696, + "ĠPoké": 46697, + "bestos": 46698, + "overe": 46699, + "Elf": 46700, + "eteria": 46701, + "ĠSneak": 46702, + "bians": 46703, + "ĠARTICLE": 46704, + "Xbox": 46705, + "encrypted": 46706, + "ync": 46707, + "ĠNietzsche": 46708, + "Nonetheless": 46709, + "Ġ±": 46710, + "ĠPrimal": 46711, + "ĠFlare": 46712, + "Ġconflic": 46713, + "ĠRune": 46714, + "Tes": 46715, + "cellence": 46716, + "Mega": 46717, + "ĠEntity": 46718, + "chrome": 46719, + "iatures": 46720, + "Ġuninstall": 46721, + "Winner": 46722, + "aimon": 46723, + "Ġhomebrew": 46724, + "Ruby": 46725, + "araoh": 46726, + "itime": 46727, + "Ġpotion": 46728, + "ĠAllows": 46729, + "ogyn": 46730, + "osuke": 46731, + "Limited": 46732, + "Ġmacros": 46733, + "ERROR": 46734, + "gling": 46735, + "Ġtodd": 46736, + "repre": 46737, + "ĠSakura": 46738, + "erker": 46739, + "items": 46740, + "FIG": 46741, + "ĠUnle": 46742, + "Ġhardness": 46743, + "Split": 46744, + "Ġarous": 46745, + "ocally": 46746, + "Ġì": 46747, + "ĠEVE": 46748, + "pleasant": 46749, + "ihil": 46750, + "ĠRouter": 46751, + "ĠLucius": 46752, + "readable": 46753, + "Ġtremb": 46754, + "Dro": 46755, + "Ġblaster": 46756, + "Ġbourgeoisie": 46757, + "NUM": 46758, + "Alternative": 46759, + "flags": 46760, + "GAME": 46761, + "ebook": 46762, + "ĠIPM": 46763, + "Ġcorrel": 46764, + "Setting": 46765, + "Frame": 46766, + "Ġatheism": 46767, + "Interested": 46768, + "Liquid": 46769, + "stanbul": 46770, + "Lv": 46771, + "Ġtits": 46772, + "Ġdc": 46773, + "×Ļ×": 46774, + "Ġdoctr": 46775, + "background": 46776, + "tsy": 46777, + "ĠCtrl": 46778, + "ĠCompatibility": 46779, + "idae": 46780, + "example": 46781, + "perture": 46782, + "Ġguid": 46783, + "ĠWinged": 46784, + "Command": 46785, + "ridor": 46786, + "bool": 46787, + "comments": 46788, + "ĠImmunity": 46789, + "Nit": 46790, + "Statement": 46791, + "Ġmanif": 46792, + "ĠIntake": 46793, + "Bloom": 46794, + "txt": 46795, + "context": 46796, + "input": 46797, + "achus": 46798, + "proc": 46799, + "Ñĭ": 46800, + "Ġdisemb": 46801, + "ospons": 46802, + "utical": 46803, + "ĠRender": 46804, + "Ironically": 46805, + "ursday": 46806, + "ĠExile": 46807, + "lishes": 46808, + "iets": 46809, + "orescent": 46810, + "cair": 46811, + "ĠSubjects": 46812, + "ĠDungeons": 46813, + "Ġiii": 46814, + "neapolis": 46815, + "ĠBlaster": 46816, + "Ġphp": 46817, + "ORED": 46818, + "ĠSLI": 46819, + "Ġelig": 46820, + "ĠIdentified": 46821, + "ĠBrawl": 46822, + "bytes": 46823, + "ĠCTR": 46824, + "Ġsched": 46825, + "Assuming": 46826, + "Bound": 46827, + "ĠMathemat": 46828, + "razil": 46829, + "ĠAstral": 46830, + "mble": 46831, + "untled": 46832, + "Ġmech": 46833, + "ĠDagger": 46834, + "ĠUseful": 46835, + "nesday": 46836, + "tarians": 46837, + "AMY": 46838, + "Camera": 46839, + "node": 46840, + "pict": 46841, + "ginx": 46842, + "Ġyea": 46843, + ">>>>>>>>": 46844, + "paragraph": 46845, + "ĠSupplementary": 46846, + "9999": 46847, + "ĠAlchemist": 46848, + "uzzle": 46849, + "igun": 46850, + "ĠCalculator": 46851, + "ĠApplicant": 46852, + "hift": 46853, + "ĠGPL": 46854, + "Ġencode": 46855, + "Crash": 46856, + "ĠNutr": 46857, + "kHz": 46858, + "TABLE": 46859, + "intestinal": 46860, + "andom": 46861, + "archive": 46862, + "Ëľ": 46863, + "Registered": 46864, + "Questions": 46865, + "Remote": 46866, + "ethyst": 46867, + "Ġgren": 46868, + "ĠTexture": 46869, + "Ġseiz": 46870, + "Anyway": 46871, + "ĠVariant": 46872, + "ê": 46873, + "Adapt": 46874, + "ittered": 46875, + "meta": 46876, + "ambers": 46877, + "ĠRuins": 46878, + "ĠChimera": 46879, + "password": 46880, + "ĠReboot": 46881, + "Ġcaster": 46882, + "Ġamplitude": 46883, + "Position": 46884, + "Ġnotation": 46885, + "Ġsecretion": 46886, + "Excellent": 46887, + "delete": 46888, + "aminer": 46889, + "ä»": 46890, + "Exec": 46891, + "ĠKenobi": 46892, + "Interview": 46893, + "ontent": 46894, + "ospel": 46895, + "Ġtuber": 46896, + "CONT": 46897, + "roups": 46898, + "Ġemulator": 46899, + "Ġjava": 46900, + "0200": 46901, + "Ġnested": 46902, + "Ġfert": 46903, + ")).": 46904, + "Dex": 46905, + "ĠSora": 46906, + "Ġpotions": 46907, + "ĠAnon": 46908, + "aah": 46909, + "Ġdunno": 46910, + "Ġμ": 46911, + "Ġmethodological": 46912, + "itles": 46913, + "phia": 46914, + "Beg": 46915, + "Rules": 46916, + "ĠXML": 46917, + "Ġflask": 46918, + "ĠShogun": 46919, + "Ġ2048": 46920, + "atchewan": 46921, + "Ġfuckin": 46922, + "Built": 46923, + "Ġbour": 46924, + "Ġdisag": 46925, + "yss": 46926, + "ĠÏ": 46927, + "Spoiler": 46928, + "Wiki": 46929, + "Ġmorphology": 46930, + "Ġendors": 46931, + "Ġdungeons": 46932, + "dragon": 46933, + ")),": 46934, + "Ġhous": 46935, + "Ġoverwhel": 46936, + "SAY": 46937, + "abwe": 46938, + "--------------------------------": 46939, + "Ġepist": 46940, + "Ġpalp": 46941, + "ĠExtensions": 46942, + "ĠMistress": 46943, + "ĠUkrain": 46944, + "================": 46945, + "edience": 46946, + "abama": 46947, + "ĠLua": 46948, + "ĠOffline": 46949, + "ĠKonami": 46950, + "unicip": 46951, + "ĠMachina": 46952, + "Specific": 46953, + "Ġpresupp": 46954, + "ĠGEAR": 46955, + "rition": 46956, + "rences": 46957, + "successfully": 46958, + "Ġ1024": 46959, + "Platform": 46960, + "}}": 46961, + "clude": 46962, + "roxy": 46963, + "Ġpromot": 46964, + "ĠAdapter": 46965, + "rocal": 46966, + "ĠMasquerade": 46967, + "Panel": 46968, + "Language": 46969, + "elsius": 46970, + "Push": 46971, + "abase": 46972, + "ĠdB": 46973, + "argon": 46974, + "ĠRemoved": 46975, + "amph": 46976, + "ĠWyr": 46977, + "Ġindisp": 46978, + "ĠOkin": 46979, + "aepernick": 46980, + "moil": 46981, + "Continue": 46982, + "00007": 46983, + "ĠJournals": 46984, + "TAG": 46985, + "ĠRemastered": 46986, + "Ġsymp": 46987, + "methyl": 46988, + "Overview": 46989, + "umeric": 46990, + "ĠCodex": 46991, + ".$": 46992, + "ranged": 46993, + "Sym": 46994, + "ĠVerse": 46995, + "ĠEnabled": 46996, + "ĠFUCK": 46997, + "ĠHearth": 46998, + "Ġbrill": 46999, + "ĠChaser": 47000, + "Beh": 47001, + "ĠAlchemy": 47002, + "Oracle": 47003, + "roleum": 47004, + "ĠVoldemort": 47005, + "();": 47006, + "Ġcollaps": 47007, + "Visual": 47008, + "ĠAngular": 47009, + "ĠOsc": 47010, + "ichita": 47011, + "Ġcig": 47012, + "Ġtoolbar": 47013, + "ĠEnlight": 47014, + "ÑĮ": 47015, + "ε": 47016, + "aliation": 47017, + "ĠLovecraft": 47018, + "jri": 47019, + "ĠInterstellar": 47020, + "Ġdebugging": 47021, + "Ġparentheses": 47022, + "ĠInit": 47023, + "Located": 47024, + "Weak": 47025, + "ĠPvP": 47026, + "ĠCloak": 47027, + "uture": 47028, + "iths": 47029, + "asionally": 47030, + "FACE": 47031, + "Introdu": 47032, + "');": 47033, + "slot": 47034, + "aturday": 47035, + "ĠNiet": 47036, + "Ġpuzz": 47037, + "!!!!!!!!": 47038, + "folios": 47039, + "Ç": 47040, + "Ġverbs": 47041, + "ĠFrames": 47042, + "ĠAmbro": 47043, + "Ġmillisec": 47044, + "ĠRebell": 47045, + "ylum": 47046, + "PASS": 47047, + "ĠConfiguration": 47048, + "μ": 47049, + "brids": 47050, + "vantage": 47051, + "Ġ['": 47052, + "ĠScy": 47053, + "Benef": 47054, + "gradation": 47055, + "ĠOrc": 47056, + "Resources": 47057, + "Awesome": 47058, + "ĠMilitia": 47059, + "POST": 47060, + "Ġbinaries": 47061, + "Mode": 47062, + "Ġkb": 47063, + "ĠWARRANT": 47064, + "hemy": 47065, + "Desc": 47066, + "alion": 47067, + "Ġwiki": 47068, + "Ġcommer": 47069, + "Serial": 47070, + "ĠUncommon": 47071, + "ignore": 47072, + "Ġconstructor": 47073, + "ctl": 47074, + "Ġ):": 47075, + "ĠVerify": 47076, + "Notice": 47077, + "ĠRPGs": 47078, + "uckland": 47079, + "Ġincre": 47080, + "Pinterest": 47081, + "ĠDefinitions": 47082, + "iband": 47083, + "Ġtd": 47084, + "Ġsubscrib": 47085, + "Shin": 47086, + "ĠGadget": 47087, + "Document": 47088, + "å®": 47089, + "Requ": 47090, + "QUIRE": 47091, + "ĠQuadro": 47092, + "ĠUnix": 47093, + "Enlarge": 47094, + "thens": 47095, + "\"...": 47096, + "gebra": 47097, + "pload": 47098, + "alogue": 47099, + "vironments": 47100, + "Strength": 47101, + "ĠPID": 47102, + "ĠInvaders": 47103, + "HOME": 47104, + "Atl": 47105, + "ĠBlizz": 47106, + "ĠWidth": 47107, + "ĠOpenGL": 47108, + "zx": 47109, + "$,": 47110, + "Ġå": 47111, + "cig": 47112, + "lectic": 47113, + "relation": 47114, + "Ġfeas": 47115, + "undown": 47116, + "Said": 47117, + "ν": 47118, + "��": 47119, + "english": 47120, + "ĠTokens": 47121, + "ĠALEC": 47122, + "OOOO": 47123, + "isconsin": 47124, + "Ġconstants": 47125, + "ĠTemplar": 47126, + "Accept": 47127, + "Ġmascul": 47128, + "enegger": 47129, + "ampires": 47130, + "Rated": 47131, + "lua": 47132, + "ucl": 47133, + "ĠSequence": 47134, + "ĠNRS": 47135, + "STD": 47136, + "Cra": 47137, + "autions": 47138, + "ĠKernel": 47139, + "oleon": 47140, + "htaking": 47141, + "ancial": 47142, + "Pages": 47143, + "orthodox": 47144, + "ropy": 47145, + "EEE": 47146, + "Ġtranssexual": 47147, + "?????": 47148, + "Ġsurpr": 47149, + "arthy": 47150, + "ĠPsychic": 47151, + "Ġdorsal": 47152, + "cember": 47153, + "joice": 47154, + "/+": 47155, + "verend": 47156, + "uint": 47157, + "Ġderog": 47158, + "Subject": 47159, + "hemat": 47160, + "!]": 47161, + "Ġ);": 47162, + "Ġmeshes": 47163, + "Ġreperc": 47164, + "ĠTerran": 47165, + "åĪ": 47166, + "Load": 47167, + "å¹": 47168, + "ikarp": 47169, + "rompt": 47170, + "Ġgoblins": 47171, + "ĠShattered": 47172, + "tests": 47173, + "Spread": 47174, + "ĠNaruto": 47175, + "Ġpredic": 47176, + "Hyp": 47177, + "ĠArkham": 47178, + "ĠNASL": 47179, + "Material": 47180, + "Rule": 47181, + "raviolet": 47182, + "ĠKlingon": 47183, + "Memory": 47184, + "acers": 47185, + "Known": 47186, + "Important": 47187, + "Ġα": 47188, + "Ġtraged": 47189, + "Ġshalt": 47190, + "Ġiso": 47191, + "ĠJSON": 47192, + "Instant": 47193, + "Ġpg": 47194, + "Ġexponent": 47195, + "formance": 47196, + "bitcoin": 47197, + "DOS": 47198, + "cheat": 47199, + "Ġrook": 47200, + "ĠBiol": 47201, + "noticed": 47202, + "Ġtwent": 47203, + "ĠRedux": 47204, + "ĠBorderlands": 47205, + "Supported": 47206, + "TRUMP": 47207, + "Ġturrets": 47208, + "include": 47209, + "Effect": 47210, + "Ġdisg": 47211, + "ophical": 47212, + "ĠFaction": 47213, + "wiki": 47214, + "Ġsrc": 47215, + "Laun": 47216, + "TIT": 47217, + "Ġorbs": 47218, + "Ġincompet": 47219, + "Ġdescriptor": 47220, + "ĠTrog": 47221, + "Contribut": 47222, + "ĠGodd": 47223, + "inances": 47224, + "Ult": 47225, + "lyak": 47226, + "âĢ¢âĢ¢âĢ¢âĢ¢": 47227, + "stitial": 47228, + "essim": 47229, + "Graphics": 47230, + "ubis": 47231, + "Ġegreg": 47232, + "DEV": 47233, + "Ġannotations": 47234, + "Yang": 47235, + "ĠDruid": 47236, + "ĠInquisition": 47237, + "ohydrate": 47238, + "Critical": 47239, + "æĸ": 47240, + "Sample": 47241, + "ĠPref": 47242, + "ĠUnleashed": 47243, + "ĠAccessed": 47244, + "Ġconceptions": 47245, + "Minor": 47246, + "pard": 47247, + "prus": 47248, + "Factory": 47249, + "thinkable": 47250, + "Ġexecutable": 47251, + "chapter": 47252, + "inyl": 47253, + "Display": 47254, + "ilater": 47255, + "Released": 47256, + "ĠDirectX": 47257, + "aneers": 47258, + "Ġ______": 47259, + "ĠHilbert": 47260, + "Options": 47261, + "Ġsorcery": 47262, + "esm": 47263, + "ÏĦ": 47264, + "Ġdescript": 47265, + "ĠTycoon": 47266, + "psons": 47267, + "Ġcov": 47268, + "Launch": 47269, + "ogeneity": 47270, + "Ġsacrific": 47271, + "ADRA": 47272, + "netflix": 47273, + "flix": 47274, + "usage": 47275, + "properties": 47276, + "attach": 47277, + "req": 47278, + "Resource": 47279, + "requisite": 47280, + "1007": 47281, + "ĠMIDI": 47282, + "ĠZoro": 47283, + "Tue": 47284, + "hower": 47285, + "dds": 47286, + "ynasty": 47287, + "headers": 47288, + "Ġdisproportion": 47289, + "omaly": 47290, + "Ġvim": 47291, + "inces": 47292, + "edient": 47293, + "ĠWraith": 47294, + "ilibrium": 47295, + "Hig": 47296, + "ĠFrie": 47297, + "Meat": 47298, + "ldom": 47299, + "KNOWN": 47300, + "orgetown": 47301, + "Improve": 47302, + "10000": 47303, + "Ġretarded": 47304, + "Disclaimer": 47305, + "Ġunfocused": 47306, + "ĠUnsure": 47307, + "ĠElixir": 47308, + "idth": 47309, + "atural": 47310, + "ĠErr": 47311, + "Critics": 47312, + "ĠBows": 47313, + "ifferent": 47314, + "proxy": 47315, + "Lic": 47316, + "aucas": 47317, + "rolet": 47318, + "ĠCoC": 47319, + "Ġdoesnt": 47320, + "phabet": 47321, + "Version": 47322, + "Ġhepat": 47323, + "gif": 47324, + "izophren": 47325, + "ãĥ»": 47326, + "ĠGutenberg": 47327, + "β": 47328, + "phans": 47329, + "Scene": 47330, + "Ġaccomp": 47331, + "ilings": 47332, + "rypted": 47333, + "aceae": 47334, + "arantine": 47335, + "heses": 47336, + "iasco": 47337, + "lopp": 47338, + "ĠGSL": 47339, + "disk": 47340, + "ãĢģ": 47341, + "0010": 47342, + "ĠOutbreak": 47343, + "Column": 47344, + "odox": 47345, + "atform": 47346, + "ĠThrust": 47347, + "ĠSVG": 47348, + "Enhanced": 47349, + "¯": 47350, + "Tools": 47351, + "rogens": 47352, + "xus": 47353, + "Available": 47354, + "zbollah": 47355, + "è¡": 47356, + "osate": 47357, + "usb": 47358, + "ordes": 47359, + "Matrix": 47360, + "ĠBlazing": 47361, + "ascus": 47362, + "ĠSovere": 47363, + "hement": 47364, + "*:": 47365, + "amaru": 47366, + "Ġparsed": 47367, + "Bonus": 47368, + "otrop": 47369, + "spell": 47370, + "ancock": 47371, + "ĠEnchant": 47372, + "vP": 47373, + "ĠReferred": 47374, + "Ġalot": 47375, + "ĠRuntime": 47376, + "ĠFn": 47377, + "CPU": 47378, + "ĠNicotine": 47379, + "External": 47380, + "ĠNightmares": 47381, + "Ġentropy": 47382, + "kB": 47383, + "ĠRealms": 47384, + "Ġ##": 47385, + "Ġsubmar": 47386, + "ĠSlime": 47387, + "itual": 47388, + "ĠBastard": 47389, + "Ġacknowled": 47390, + "Magazine": 47391, + "rendered": 47392, + "ircraft": 47393, + "CSS": 47394, + "Numbers": 47395, + "Pg": 47396, + "utenant": 47397, + "ĠPalest": 47398, + "ĠRoose": 47399, + "udicrous": 47400, + "anooga": 47401, + "Unt": 47402, + "Ġcapacitor": 47403, + "Ġschema": 47404, + "hematic": 47405, + "ĠPinball": 47406, + "endars": 47407, + "Ġ===": 47408, + "nsic": 47409, + "ipedia": 47410, + "Ġchromos": 47411, + "ĠmRNA": 47412, + "Ct": 47413, + "ĠPaladin": 47414, + "sonian": 47415, + "Ġæ": 47416, + "ajor": 47417, + "repeat": 47418, + "ortex": 47419, + "ĠHeroic": 47420, + "ĠHera": 47421, + "ociated": 47422, + "Ġdebug": 47423, + "osher": 47424, + "upiter": 47425, + "_.": 47426, + "Ġsys": 47427, + "ĠDownloads": 47428, + "','": 47429, + "Adventure": 47430, + "FORE": 47431, + "ocument": 47432, + "arning": 47433, + "Ġmiscon": 47434, + "vidia": 47435, + "Cod": 47436, + "ibraries": 47437, + "buffer": 47438, + "cdn": 47439, + "ĠModes": 47440, + "tarian": 47441, + "ĠPyro": 47442, + "ĠFixes": 47443, + "ĠâĪ": 47444, + "ĠCf": 47445, + "Testing": 47446, + "Byte": 47447, + "nants": 47448, + "oufl": 47449, + "ĠCipher": 47450, + "Aim": 47451, + "ĠAfgh": 47452, + "ĠStarCraft": 47453, + "intendent": 47454, + "akespe": 47455, + "Apply": 47456, + ">>>": 47457, + "Lenin": 47458, + "ĠShaman": 47459, + "%\"": 47460, + "ĠFrenzy": 47461, + "illusion": 47462, + "===": 47463, + "Website": 47464, + "Allow": 47465, + "ĠBinary": 47466, + "ensable": 47467, + "ĠEmpires": 47468, + "Ġpromul": 47469, + "ormonal": 47470, + "ileaks": 47471, + "ĠAmmo": 47472, + "assies": 47473, + "atican": 47474, + "avior": 47475, + "ĠIter": 47476, + "1024": 47477, + "uesday": 47478, + "ĠAppears": 47479, + "achine": 47480, + "Problem": 47481, + "ousy": 47482, + "ramid": 47483, + "nox": 47484, + "··": 47485, + "omething": 47486, + "ĠPurg": 47487, + "artney": 47488, + "Ġ0000": 47489, + "psey": 47490, + "Ġglutamate": 47491, + "ĠActivate": 47492, + "Repl": 47493, + "Priv": 47494, + "cyclop": 47495, + "ĠHispan": 47496, + "atsuki": 47497, + "Likewise": 47498, + "JOHN": 47499, + "POSE": 47500, + "pherd": 47501, + "schild": 47502, + "Ġsuffix": 47503, + "åIJ": 47504, + "Ġoptionally": 47505, + "ĠRecomm": 47506, + "ĠSpawn": 47507, + "ARDIS": 47508, + "Ġinconsist": 47509, + "Ġenglish": 47510, + "Beta": 47511, + "ĠContains": 47512, + "uddenly": 47513, + "Ġls": 47514, + "Dynamic": 47515, + "åĽ": 47516, + "Ġ{{": 47517, + "dq": 47518, + "Hmm": 47519, + "oliberal": 47520, + "ĠCarnage": 47521, + "ĠRebirth": 47522, + "incerity": 47523, + "Ġproletariat": 47524, + "ĠCrafting": 47525, + "Explore": 47526, + "Ġeld": 47527, + "ĠAnarch": 47528, + "Ġ(>": 47529, + "ĠClockwork": 47530, + "ĠProced": 47531, + "APTER": 47532, + "ĠSorcerer": 47533, + "âĶ": 47534, + "ĠSnape": 47535, + "elist": 47536, + "Balance": 47537, + "Tube": 47538, + "Ġ--------------------": 47539, + "Ġnostalg": 47540, + "ACTED": 47541, + "ĠVID": 47542, + "soever": 47543, + "ignt": 47544, + "Ġhypothal": 47545, + "ĠObj": 47546, + "igure": 47547, + "ĠElves": 47548, + "gorithm": 47549, + "Romney": 47550, + "idable": 47551, + "renheit": 47552, + "aptic": 47553, + "Ġnonex": 47554, + "Profile": 47555, + "Ġscient": 47556, + "ĠAchievements": 47557, + "ĠReload": 47558, + "Products": 47559, + "ampire": 47560, + "pread": 47561, + "ĠYamato": 47562, + "Thread": 47563, + "ĠFML": 47564, + "ĠForsaken": 47565, + "Statistics": 47566, + "Ġ([": 47567, + "utsu": 47568, + "nces": 47569, + "...?": 47570, + "upload": 47571, + "Typ": 47572, + "ĠReflex": 47573, + "Dial": 47574, + "Ġspawns": 47575, + "Server": 47576, + "Ġacquaint": 47577, + "iterranean": 47578, + "='": 47579, + "Device": 47580, + "ר": 47581, + "ocaly": 47582, + "Remove": 47583, + "Ġ=====": 47584, + "Ġabdom": 47585, + "ideos": 47586, + "Dual": 47587, + "Fax": 47588, + "Ġbesie": 47589, + "ĠAdin": 47590, + "Ġdescrib": 47591, + "Ġiod": 47592, + "Limit": 47593, + "aunders": 47594, + "ĠAssassins": 47595, + "xxxx": 47596, + "ulner": 47597, + "Shipping": 47598, + "Item": 47599, + "fortune": 47600, + "Ġcipher": 47601, + "mA": 47602, + "acerb": 47603, + "ebus": 47604, + "Ġmodifiers": 47605, + "Added": 47606, + "prisingly": 47607, + "Dir": 47608, + "ĠArchangel": 47609, + "umbnails": 47610, + "Huh": 47611, + "ĠWARN": 47612, + "Role": 47613, + "usional": 47614, + "Ġcortical": 47615, + "ĠSCP": 47616, + "ĠException": 47617, + "ĠWarhammer": 47618, + ")))": 47619, + "](": 47620, + "Ġsynaptic": 47621, + "Ġcached": 47622, + "archment": 47623, + "Ġtarg": 47624, + "Filter": 47625, + "ĠHades": 47626, + "Ġprinc": 47627, + "halla": 47628, + "ptoms": 47629, + "Ïģ": 47630, + "ructose": 47631, + "termination": 47632, + "Ġcompe": 47633, + "define": 47634, + "Ġprosec": 47635, + "require": 47636, + "ĠCorpse": 47637, + "Abstract": 47638, + "********************************": 47639, + "Used": 47640, + "ĠIbid": 47641, + "trak": 47642, + "ä¸Ń": 47643, + "ĠGABA": 47644, + "åĬ": 47645, + "ĠHegel": 47646, + "Jere": 47647, + "odore": 47648, + "í": 47649, + "namese": 47650, + "Origin": 47651, + "ĠMastery": 47652, + "gerald": 47653, + "Charges": 47654, + "--------------------": 47655, + "Forge": 47656, + "comings": 47657, + "åį": 47658, + "Ġ(&": 47659, + "Ġgrap": 47660, + "Mask": 47661, + "ĠGundam": 47662, + "generic": 47663, + "ĠMalf": 47664, + "raphics": 47665, + "Internal": 47666, + "ourge": 47667, + "Ġirresist": 47668, + "sterdam": 47669, + "Ġendogenous": 47670, + "Export": 47671, + "Ġë": 47672, + "poons": 47673, + "Ġabund": 47674, + "ĠQuantity": 47675, + "Issue": 47676, + "âĪĴ": 47677, + "cknow": 47678, + "Anonymous": 47679, + "ĠDRAG": 47680, + "Wikipedia": 47681, + "Ġsubdu": 47682, + "iverpool": 47683, + "apesh": 47684, + "Ability": 47685, + "ĠCentOS": 47686, + "iseum": 47687, + "lycer": 47688, + "Untitled": 47689, + "Ġlineback": 47690, + "Ġtomat": 47691, + "byte": 47692, + "tile": 47693, + "linux": 47694, + "Palest": 47695, + "canon": 47696, + "FAULT": 47697, + "ĠkHz": 47698, + "Ġhelic": 47699, + "ĠIGF": 47700, + "WARE": 47701, + "Feature": 47702, + "ĠGraveyard": 47703, + "ĠNemesis": 47704, + "akuya": 47705, + "inement": 47706, + "Ġwhence": 47707, + "ractical": 47708, + "Ping": 47709, + "tesque": 47710, + "scroll": 47711, + "espie": 47712, + "Ġasynchronous": 47713, + "ocre": 47714, + "Measure": 47715, + "morph": 47716, + "std": 47717, + "Settings": 47718, + "Course": 47719, + "Ġ],": 47720, + "Ïĥ": 47721, + "Documents": 47722, + "estern": 47723, + "Ġtf": 47724, + "Ġcircumcised": 47725, + "geant": 47726, + "Ġconject": 47727, + "ĠFolder": 47728, + "outube": 47729, + "ĠMedline": 47730, + "Status": 47731, + "ctr": 47732, + "anoia": 47733, + "ĠPowerShell": 47734, + "Chel": 47735, + "Loop": 47736, + "Ġresize": 47737, + "aphael": 47738, + "workshop": 47739, + "velength": 47740, + "hover": 47741, + "flush": 47742, + "Ġβ": 47743, + "Task": 47744, + "pedia": 47745, + "ptin": 47746, + "bidden": 47747, + "windows": 47748, + "ĠCaucas": 47749, + "aml": 47750, + "isoft": 47751, + "Ġrs": 47752, + "cgi": 47753, + "urrection": 47754, + "miah": 47755, + "ÏĤ": 47756, + "Ġplaythrough": 47757, + "Reddit": 47758, + "׾": 47759, + "Ġannotation": 47760, + "Ġnobles": 47761, + "seq": 47762, + "mares": 47763, + "Ġwik": 47764, + "foreseen": 47765, + "RPG": 47766, + "Ġreper": 47767, + "aredevil": 47768, + "arcity": 47769, + "/\"": 47770, + "Ġ});": 47771, + "Ġdiscont": 47772, + "ĠBinding": 47773, + "answered": 47774, + "Mesh": 47775, + "ĠMPEG": 47776, + "Ġperceptual": 47777, + "OTAL": 47778, + "ursive": 47779, + "ãģĦ": 47780, + "Ġplun": 47781, + "onential": 47782, + "ãĤ": 47783, + "ĠReloaded": 47784, + "iscopal": 47785, + "ĠDespair": 47786, + "FIX": 47787, + "Ġheterogeneity": 47788, + ",[": 47789, + "ichick": 47790, + "DCS": 47791, + "Ġcooldown": 47792, + "................": 47793, + "Ġsomew": 47794, + "Battery": 47795, + "stract": 47796, + "Attempt": 47797, + "allery": 47798, + "ĠNept": 47799, + "Ġtac": 47800, + "ĠElemental": 47801, + "Function": 47802, + "Ġbindings": 47803, + "versive": 47804, + "ĠWarlock": 47805, + "Response": 47806, + "ĠNPCs": 47807, + "ollower": 47808, + "ĠReborn": 47809, + "Ġphenotype": 47810, + "uscript": 47811, + "Ġpecul": 47812, + "!/": 47813, + "Unique": 47814, + "ĠFreeBSD": 47815, + "ĠChero": 47816, + "Ġcolle": 47817, + "gently": 47818, + "Empty": 47819, + "rss": 47820, + "Ġdd": 47821, + "forge": 47822, + "ĠTraps": 47823, + "×Ķ": 47824, + "iblical": 47825, + "---------": 47826, + "uminati": 47827, + "login": 47828, + "asus": 47829, + "xual": 47830, + "ĠMiko": 47831, + "ĠDrac": 47832, + "ssh": 47833, + "Submit": 47834, + "ĠMultiplayer": 47835, + "leanor": 47836, + "Orig": 47837, + "anism": 47838, + "peror": 47839, + "ĠESV": 47840, + "Ġencour": 47841, + "å°": 47842, + "ĠPLoS": 47843, + "ĠCrusher": 47844, + "ocrates": 47845, + "ynchronous": 47846, + "§": 47847, + "ĠLuffy": 47848, + "Lastly": 47849, + "Ġdiffere": 47850, + "okane": 47851, + "Enh": 47852, + "ursor": 47853, + "Ġapopt": 47854, + "ĠTotem": 47855, + "ä½": 47856, + "Honest": 47857, + "xml": 47858, + "Created": 47859, + "Ġteleport": 47860, + "NRS": 47861, + "ccess": 47862, + "ilitary": 47863, + "ackets": 47864, + "Ġenchantment": 47865, + "ĠCunning": 47866, + "ortmund": 47867, + "Altern": 47868, + "Alternatively": 47869, + "ĠLuthor": 47870, + "Publisher": 47871, + "GBT": 47872, + "çĶ": 47873, + "Activity": 47874, + "Ġleptin": 47875, + "æĪ": 47876, + "ĠStarfleet": 47877, + "å¸": 47878, + "oooooooo": 47879, + "Ġlawy": 47880, + "Frag": 47881, + "ת": 47882, + "yright": 47883, + "cookie": 47884, + "Finish": 47885, + "wikipedia": 47886, + "ĠAbilities": 47887, + "interface": 47888, + "Ġglared": 47889, + "Engineers": 47890, + "ĠAtk": 47891, + "oteric": 47892, + "Ġbyte": 47893, + "ossibility": 47894, + "Label": 47895, + "ĠCSV": 47896, + "Ġè": 47897, + "ĠOblivion": 47898, + "android": 47899, + "rehensive": 47900, + "ĠCommands": 47901, + "clud": 47902, + "ĠTutorial": 47903, + "retched": 47904, + "irlwind": 47905, + "conserv": 47906, + "ministic": 47907, + "void": 47908, + "ernels": 47909, + "alias": 47910, + "ĠDraco": 47911, + "desktop": 47912, + "ĠMormonism": 47913, + "oÄŁ": 47914, + "kef": 47915, + "Ġtimestamp": 47916, + "WAYS": 47917, + "ãģĹ": 47918, + "\"(": 47919, + "eneg": 47920, + "CHAT": 47921, + "Ġnpm": 47922, + "ĠGrenade": 47923, + "rongh": 47924, + "dinand": 47925, + "Definition": 47926, + "ĠInteger": 47927, + "Ġmodifier": 47928, + "Ġdex": 47929, + "ĠParameters": 47930, + "andestine": 47931, + "ĠSHALL": 47932, + "Purchase": 47933, + "enaries": 47934, + "Ġstarship": 47935, + "Armor": 47936, + "Skill": 47937, + "Ġlookup": 47938, + "verages": 47939, + "Minimum": 47940, + "ĠBleach": 47941, + "Ġdf": 47942, + "inosaur": 47943, + "ixel": 47944, + "Zip": 47945, + "temp": 47946, + "ruby": 47947, + "Fram": 47948, + "sword": 47949, + "Minecraft": 47950, + "strous": 47951, + "Client": 47952, + "ĠBarbarian": 47953, + "æĹ": 47954, + "USER": 47955, + "ĠMehran": 47956, + "axies": 47957, + "ermanent": 47958, + "ĠHeader": 47959, + "ablishment": 47960, + "hyde": 47961, + "Snake": 47962, + "ĠTelesc": 47963, + "Pocket": 47964, + "Ġ........": 47965, + "Destroy": 47966, + "Method": 47967, + "ĠZup": 47968, + "olulu": 47969, + "Ġunemploy": 47970, + "Temp": 47971, + "ĠExplicit": 47972, + "人": 47973, + "cache": 47974, + "innamon": 47975, + "Ġunavoid": 47976, + "Summary": 47977, + "Ġappre": 47978, + "Ġtaxp": 47979, + "XXX": 47980, + "ieval": 47981, + "ĠSummon": 47982, + "å¤": 47983, + "Lear": 47984, + "ibliography": 47985, + "CLASS": 47986, + "dimension": 47987, + "ĠHorde": 47988, + "Ġfilesystem": 47989, + "ĠQiao": 47990, + "obbies": 47991, + "DIR": 47992, + "Ġimpedance": 47993, + "éĩ": 47994, + "Names": 47995, + "ĠDrupal": 47996, + "Applic": 47997, + "imei": 47998, + "ynchron": 47999, + "Ire": 48000, + "ĠMinion": 48001, + "ĠHaste": 48002, + "ä¿": 48003, + "Ġ(=": 48004, + "LinkedIn": 48005, + "Maps": 48006, + "ifacts": 48007, + "Damage": 48008, + "odynam": 48009, + "ĠShroud": 48010, + "Ancient": 48011, + "enhagen": 48012, + "Tact": 48013, + "anship": 48014, + "aturdays": 48015, + "ãģ«": 48016, + "ikhail": 48017, + "ãģ®": 48018, + "framework": 48019, + "lication": 48020, + "âĢ¦]": 48021, + "Plug": 48022, + "ĠLilith": 48023, + "browser": 48024, + "offset": 48025, + "ĠJuda": 48026, + "ciating": 48027, + "console": 48028, + "Ġ=================": 48029, + "._": 48030, + "ĠPuzz": 48031, + "OPLE": 48032, + "erial": 48033, + "OHN": 48034, + "ĠGolem": 48035, + "ierrez": 48036, + "Ġ},": 48037, + "inition": 48038, + "insula": 48039, + "ĠEntered": 48040, + "greSQL": 48041, + "ĠFlask": 48042, + "ĠXCOM": 48043, + "fixes": 48044, + "ĠWeasley": 48045, + "arser": 48046, + "Ġrc": 48047, + "microsoft": 48048, + "HHHH": 48049, + "INFO": 48050, + "rehend": 48051, + "Ġpolymorph": 48052, + "Button": 48053, + "âī": 48054, + "QUI": 48055, + "twitch": 48056, + "jriwal": 48057, + "ĠSaiyan": 48058, + "Ġadherent": 48059, + "acters": 48060, + "arthed": 48061, + "âĢł": 48062, + "Ġfoss": 48063, + "ã": 48064, + "Quote": 48065, + "ependent": 48066, + "Ġhorr": 48067, + "UGC": 48068, + "Weiss": 48069, + "styles": 48070, + "advertisement": 48071, + "Credits": 48072, + "Lua": 48073, + "ĠUCH": 48074, + "Ġhorrend": 48075, + "Ġminion": 48076, + ">,": 48077, + "ãĥ³": 48078, + "Ġinclud": 48079, + "Compar": 48080, + "Ġ[]": 48081, + "Ġ(<": 48082, + "Phones": 48083, + "paralleled": 48084, + "HTML": 48085, + "Ġ(%": 48086, + "raltar": 48087, + "Ġamd": 48088, + "Maximum": 48089, + "ĠSolitaire": 48090, + "SCP": 48091, + "ĠVaugh": 48092, + "ĠCLR": 48093, + "database": 48094, + "module": 48095, + "̶": 48096, + "Capture": 48097, + "Window": 48098, + "ubuntu": 48099, + "Includes": 48100, + "ĠUriel": 48101, + "ORPG": 48102, + "κ": 48103, + "âĪ": 48104, + "ä¸Ģ": 48105, + "Ġdexter": 48106, + "ĠGlac": 48107, + "slice": 48108, + "HAHAHAHA": 48109, + "\\\"": 48110, + "lations": 48111, + "ÙIJ": 48112, + "ĠAUTH": 48113, + "earch": 48114, + "ĠSocket": 48115, + "Character": 48116, + "Sort": 48117, + "Ġindist": 48118, + "/_": 48119, + "ĠAntar": 48120, + "ifix": 48121, + "Ġlich": 48122, + "variable": 48123, + "_(": 48124, + "Ġgui": 48125, + "Herm": 48126, + "elvet": 48127, + "è¯": 48128, + "Developer": 48129, + "Ġkcal": 48130, + "ciation": 48131, + "Transaction": 48132, + "Ġdocker": 48133, + "###": 48134, + "ĠVegeta": 48135, + "Result": 48136, + "ocamp": 48137, + "aughtered": 48138, + "Increase": 48139, + "aples": 48140, + "iannopoulos": 48141, + "zbek": 48142, + "estyles": 48143, + "emonium": 48144, + "è¿": 48145, + "ĠFANT": 48146, + "Reason": 48147, + "Elsewhere": 48148, + "\"\"": 48149, + "ĠArtifact": 48150, + "Authent": 48151, + "herical": 48152, + "Ġmembr": 48153, + "socket": 48154, + "Elsa": 48155, + "Condition": 48156, + "Ġlapt": 48157, + "Ġsorcerer": 48158, + "Layer": 48159, + "apters": 48160, + "Ġveter": 48161, + "Myth": 48162, + "ensical": 48163, + "ÏĢ": 48164, + "noxious": 48165, + "Ġunpre": 48166, + "Flags": 48167, + "OOOOOOOO": 48168, + "Ġincent": 48169, + "Combat": 48170, + "Session": 48171, + "Ġteleportation": 48172, + "éĢ": 48173, + "ortment": 48174, + "Admin": 48175, + "Fixed": 48176, + "×Ļ": 48177, + "Ġconfir": 48178, + "ãģŁ": 48179, + "morrow": 48180, + "osponsors": 48181, + "\\/": 48182, + "ictionary": 48183, + "Num": 48184, + "Ġquir": 48185, + "åº": 48186, + "à¨": 48187, + "Ġ<<": 48188, + "Attempts": 48189, + "ãģ§": 48190, + "λ": 48191, + "Features": 48192, + "XXXX": 48193, + "Ġinflamm": 48194, + "VERSION": 48195, + "ortality": 48196, + "spawn": 48197, + "ratulations": 48198, + "Ġcharism": 48199, + "Ġ&&": 48200, + "Dialogue": 48201, + "luster": 48202, + "<<": 48203, + "args": 48204, + "redients": 48205, + "Ġpredicate": 48206, + "qqa": 48207, + "etheus": 48208, + "Ġ(!": 48209, + "Ġshowc": 48210, + "cmd": 48211, + "bringer": 48212, + "Ġcoh": 48213, + "Input": 48214, + "ĠFANTASY": 48215, + "Ġfict": 48216, + "Blocks": 48217, + "Install": 48218, + "vector": 48219, + "umblr": 48220, + "agnar": 48221, + "Array": 48222, + "Ġembry": 48223, + "Ġtheoret": 48224, + "Ġhref": 48225, + "irrel": 48226, + "irements": 48227, + "iations": 48228, + "Ġ(/": 48229, + "Thumbnail": 48230, + "Ġhashes": 48231, + "^^": 48232, + "Copy": 48233, + "Ġeq": 48234, + "translation": 48235, + "Favorite": 48236, + "Fail": 48237, + "Ġogre": 48238, + "isites": 48239, + "Merit": 48240, + "ãģ¦": 48241, + "DATA": 48242, + "rarily": 48243, + "igmatic": 48244, + "Sequ": 48245, + "Els": 48246, + "ãģª": 48247, + "lehem": 48248, + "requency": 48249, + "aughed": 48250, + "Ġdistingu": 48251, + "Ġartific": 48252, + "Ġdwarves": 48253, + "Í": 48254, + "resy": 48255, + "~~": 48256, + "sofar": 48257, + "ideon": 48258, + "ozyg": 48259, + "EEEE": 48260, + "ĠMelee": 48261, + "大": 48262, + "tumblr": 48263, + "ssl": 48264, + "Wra": 48265, + "ONSORED": 48266, + "Ġvowel": 48267, + "},": 48268, + "Vari": 48269, + "cientious": 48270, + "Node": 48271, + "Ġsorce": 48272, + "========": 48273, + "perse": 48274, + "Detailed": 48275, + "isphere": 48276, + "Background": 48277, + "ĺħ": 48278, + "Redd": 48279, + "ìĿ": 48280, + "ãģ¨": 48281, + "ĠCTRL": 48282, + "Ġç": 48283, + "iculty": 48284, + "ername": 48285, + "Ġns": 48286, + "Deploy": 48287, + "Ġhapp": 48288, + "Ġ///": 48289, + "Begin": 48290, + "Ġgp": 48291, + "$.": 48292, + "Output": 48293, + "Suggest": 48294, + "×IJ": 48295, + "ĠToggle": 48296, + "Ġnutrit": 48297, + "Ġ\\\"": 48298, + "Ġpreval": 48299, + "Ġsubreddits": 48300, + "Menu": 48301, + "Amount": 48302, + "ĠWasteland": 48303, + "Ġsprites": 48304, + "Ġshader": 48305, + "Ġ;)": 48306, + "NAME": 48307, + "CLUD": 48308, + "Ġgoblin": 48309, + "Refer": 48310, + "ÙĴ": 48311, + "á¹": 48312, + "Improved": 48313, + "endiary": 48314, + "Ġassail": 48315, + "chieve": 48316, + "reply": 48317, + "Ġcontrad": 48318, + "cients": 48319, + "GROUP": 48320, + "Controller": 48321, + "omsky": 48322, + "chemist": 48323, + "packages": 48324, + "ombies": 48325, + "scl": 48326, + "Ġibn": 48327, + "çĽ": 48328, + ":(": 48329, + "ĠMinotaur": 48330, + "niper": 48331, + "====": 48332, + "Ġsubsc": 48333, + "è¦": 48334, + "Ġinteger": 48335, + "Ġ\"-": 48336, + "Ġtheorem": 48337, + "utenberg": 48338, + "Trigger": 48339, + "github": 48340, + "ä¼": 48341, + "##": 48342, + "xtap": 48343, + "oké": 48344, + "ilial": 48345, + "idepress": 48346, + ":\\": 48347, + "Param": 48348, + "Correction": 48349, + "ïve": 48350, + "Chest": 48351, + "ש": 48352, + "ĠÏĦ": 48353, + "Ġrespawn": 48354, + "Ġrall": 48355, + "Ġcreatine": 48356, + "umsy": 48357, + "ĠTemplate": 48358, + "foo": 48359, + "query": 48360, + "Ġmanufact": 48361, + "Hardware": 48362, + "iframe": 48363, + "Ġ-------": 48364, + "Ġrecip": 48365, + "ĠAttributes": 48366, + "Ġforeskin": 48367, + "ãĤĭ": 48368, + "ãĥĦ": 48369, + "uania": 48370, + "................................................................": 48371, + "Ġphylogen": 48372, + "eaturing": 48373, + "Ġsprite": 48374, + "Ġinvari": 48375, + "DonaldTrump": 48376, + "({": 48377, + "ĠMalfoy": 48378, + "Gamer": 48379, + "ĠPlugin": 48380, + "γ": 48381, + "Query": 48382, + "ĠPuzzles": 48383, + "inventory": 48384, + "trl": 48385, + "Insert": 48386, + "Ġawa": 48387, + "ĠWerewolf": 48388, + "Ġhorizont": 48389, + "×ŀ": 48390, + "Ġcunt": 48391, + "]]": 48392, + "ĠByz": 48393, + "Mouse": 48394, + "Ġ[[": 48395, + "ĠCthulhu": 48396, + "ĠDRAGON": 48397, + "Default": 48398, + "ĠPresbyter": 48399, + "Ġff": 48400, + "Ġorcs": 48401, + "Construct": 48402, + "ĠDebug": 48403, + "Ġ*/": 48404, + "×ij": 48405, + "Ġembr": 48406, + "License": 48407, + "css": 48408, + "incinn": 48409, + "Prosecut": 48410, + "Ġsugg": 48411, + "å¾": 48412, + "ĠUndead": 48413, + "æĿ": 48414, + "Ġfs": 48415, + "Ġthw": 48416, + "Vector": 48417, + "åĮ": 48418, + "settings": 48419, + "å¯": 48420, + "Ġssh": 48421, + "ĠConverted": 48422, + "ãĤĴ": 48423, + "risome": 48424, + "Ġagre": 48425, + "Collection": 48426, + "cmp": 48427, + "puter": 48428, + "alloc": 48429, + "Ġé": 48430, + "ascade": 48431, + "ĠSpells": 48432, + "Ġ:-)": 48433, + "Haunted": 48434, + "Ġadolesc": 48435, + "FORMATION": 48436, + "ĠImperium": 48437, + "ãĥ¼": 48438, + "Supplement": 48439, + "Render": 48440, + "Theme": 48441, + "ĠTorment": 48442, + "([": 48443, + "ëĭ": 48444, + "Ġhtml": 48445, + "Ġjuven": 48446, + "ĠSiber": 48447, + "Ġdaemon": 48448, + "ivariate": 48449, + "objects": 48450, + "negie": 48451, + "Ġindu": 48452, + "landish": 48453, + "Meta": 48454, + "Impl": 48455, + "Ġglyph": 48456, + "Ġ-->": 48457, + "Ġstreng": 48458, + "agascar": 48459, + "guyen": 48460, + "((": 48461, + ")[": 48462, + "ĠNorn": 48463, + "Ġhippocamp": 48464, + "Ġ¯": 48465, + "îĢ": 48466, + "Connection": 48467, + "PATH": 48468, + "mbuds": 48469, + "ĠShards": 48470, + "Ġadvoc": 48471, + "Ġsimulac": 48472, + "âĸij": 48473, + "!?\"": 48474, + "ĠPotion": 48475, + "Ġamulet": 48476, + "ĠFnatic": 48477, + "Ġcryptoc": 48478, + "wav": 48479, + "radius": 48480, + "pkg": 48481, + "ĠMFT": 48482, + "æĢ": 48483, + "Ġtoile": 48484, + "Items": 48485, + "ifference": 48486, + "errors": 48487, + "ĠCelt": 48488, + "Ġunpop": 48489, + "ilogy": 48490, + "6666": 48491, + "hesda": 48492, + "Instruct": 48493, + "å·": 48494, + "Materials": 48495, + "ettings": 48496, + "Percent": 48497, + "Ġresistor": 48498, + "tymology": 48499, + "Ġdeprecated": 48500, + "Ġgrep": 48501, + "ĠWRITE": 48502, + "Ġtriv": 48503, + "Ġscrut": 48504, + "[/": 48505, + "anyl": 48506, + "skirts": 48507, + "MSN": 48508, + "ĠCodec": 48509, + "ecd": 48510, + "Anth": 48511, + "){": 48512, + "%]": 48513, + "veyard": 48514, + "aspberry": 48515, + "ãĢ": 48516, + "Reward": 48517, + "rha": 48518, + "Stretch": 48519, + "]-": 48520, + "Prev": 48521, + "Context": 48522, + "Ġlinux": 48523, + "HAHA": 48524, + "perties": 48525, + "ĠVIDE": 48526, + "Domain": 48527, + "Ġmurd": 48528, + "ĠLegions": 48529, + "apache": 48530, + "æŃ": 48531, + "Pause": 48532, + "Temperature": 48533, + "ufact": 48534, + "igslist": 48535, + "ĠRetrieved": 48536, + "èª": 48537, + "ãģĮ": 48538, + "Ingredients": 48539, + "ruary": 48540, + "dyl": 48541, + "Alias": 48542, + "ĠÎĶ": 48543, + "Ġinval": 48544, + "amsung": 48545, + "!--": 48546, + "olean": 48547, + "æī": 48548, + "ãģ¯": 48549, + "Ġcoefficients": 48550, + "ĠDHCP": 48551, + "âĨĴ": 48552, + "utonium": 48553, + ":[": 48554, + "âĹ": 48555, + "cli": 48556, + "Container": 48557, + "å¼": 48558, + "nexus": 48559, + "SOURCE": 48560, + "Ò": 48561, + "=/": 48562, + "Ġmysql": 48563, + "ĠGained": 48564, + "Ġ/*": 48565, + "uncture": 48566, + "Ġstatically": 48567, + "âĸł": 48568, + "æĺ¯": 48569, + "æ°": 48570, + "estamp": 48571, + "Cache": 48572, + "ulkan": 48573, + "staking": 48574, + "apter": 48575, + "ãģ¾": 48576, + "Ġμg": 48577, + "Ġtremend": 48578, + "ĠPiercing": 48579, + "naissance": 48580, + "ĠHealer": 48581, + "Enabled": 48582, + "éģ": 48583, + "âĸ": 48584, + "ĠThumbnails": 48585, + "Ġhither": 48586, + "Format": 48587, + "utherland": 48588, + "íķ": 48589, + "Ġdestro": 48590, + "fff": 48591, + "execute": 48592, + "msg": 48593, + "romancer": 48594, + "ĠCanaver": 48595, + "ĠVaults": 48596, + "oided": 48597, + "iage": 48598, + "Ġimg": 48599, + "summary": 48600, + "]);": 48601, + "ĠABE": 48602, + "ĠGamergate": 48603, + "utherford": 48604, + "Ġoverwrite": 48605, + "enment": 48606, + "æķ": 48607, + "Ġsystemd": 48608, + "tif": 48609, + "]).": 48610, + "ãĤ¤": 48611, + "Widget": 48612, + "======": 48613, + "(-": 48614, + "Ġ\"+": 48615, + "ĠIncarnation": 48616, + "æĥ": 48617, + "���": 48618, + "GUI": 48619, + "èĥ": 48620, + "forums": 48621, + "Ġrunes": 48622, + "Ġâī¤": 48623, + "Ġdefic": 48624, + "Distance": 48625, + "directory": 48626, + "ĠHorus": 48627, + "iltr": 48628, + "ortium": 48629, + "Ġ./": 48630, + "bda": 48631, + "owship": 48632, + "ĠâĨij": 48633, + "}.": 48634, + "åĩ": 48635, + "1027": 48636, + "Weapons": 48637, + "lucent": 48638, + "Ġauth": 48639, + ";;": 48640, + "Recommended": 48641, + "Ġsurv": 48642, + "Ġvm": 48643, + "ĠStronghold": 48644, + "Ġparan": 48645, + "ĠTrance": 48646, + "æĺ": 48647, + "Ġsovere": 48648, + "Ġcorrid": 48649, + "ĠPwr": 48650, + "Ġ[/": 48651, + "Ġseq": 48652, + "Population": 48653, + "Ġ[];": 48654, + "Ġreferen": 48655, + "ĠInstr": 48656, + "ĠStamina": 48657, + "kernel": 48658, + "Python": 48659, + "-+": 48660, + "Ġallele": 48661, + "éĽ": 48662, + "isode": 48663, + "ä¸į": 48664, + "otonin": 48665, + "modules": 48666, + "Notable": 48667, + "Spell": 48668, + "\\\\": 48669, + "Pref": 48670, + "Ġdatas": 48671, + "setup": 48672, + "Ġhapl": 48673, + "Height": 48674, + "åĭ": 48675, + "ãģ£": 48676, + "]),": 48677, + "Handle": 48678, + "umenthal": 48679, + "Package": 48680, + "Ġenthus": 48681, + "Ġunsus": 48682, + "Narr": 48683, + "Examples": 48684, + "FAQ": 48685, + "REDACTED": 48686, + "Ġnotor": 48687, + "Enable": 48688, + "Pattern": 48689, + "aeda": 48690, + ">.": 48691, + "CHECK": 48692, + "Ġ����": 48693, + "Ġ'.": 48694, + "Ġãĥ": 48695, + "append": 48696, + "����": 48697, + "gemony": 48698, + "terness": 48699, + "ĠHaku": 48700, + "NVIDIA": 48701, + "queue": 48702, + "Bind": 48703, + "Ġneigh": 48704, + "armor": 48705, + "retty": 48706, + "LOD": 48707, + "plugins": 48708, + "Ġ/>": 48709, + "TYPE": 48710, + "Ġ4096": 48711, + "-------": 48712, + "Preview": 48713, + "FML": 48714, + "Ġproletarian": 48715, + "zees": 48716, + "enfranch": 48717, + "ãģĨ": 48718, + "Ctrl": 48719, + "Module": 48720, + "ĠSurviv": 48721, + "ĠStarcraft": 48722, + "rored": 48723, + "reddit": 48724, + "Ġrul": 48725, + "Ġtx": 48726, + "Ġmage": 48727, + "Sword": 48728, + "Ġ~/": 48729, + "Effects": 48730, + "éļ": 48731, + "ä¹": 48732, + "Sensor": 48733, + "Solution": 48734, + "ãģĻ": 48735, + "Arcade": 48736, + "Ġpredec": 48737, + "Values": 48738, + "Length": 48739, + "Ġfortun": 48740, + "ttp": 48741, + "\"[": 48742, + "tmp": 48743, + "ĠBerserker": 48744, + "åĨ": 48745, + "ositories": 48746, + "Ġcouncill": 48747, + "ffff": 48748, + "));": 48749, + "Recipe": 48750, + "ĠASCII": 48751, + "âĦ¢:": 48752, + "ä": 48753, + "Ġhorm": 48754, + "=>": 48755, + "sers": 48756, + "ãģĭ": 48757, + "Recommend": 48758, + "['": 48759, + "agame": 48760, + "Animation": 48761, + "aucuses": 48762, + "Discussion": 48763, + "Ġhelicop": 48764, + "å¿": 48765, + "Float": 48766, + "Component": 48767, + "instance": 48768, + "Ġfoo": 48769, + "localhost": 48770, + "=-": 48771, + "Offset": 48772, + "Psy": 48773, + "ĠGohan": 48774, + "buquerque": 48775, + "Ġdefe": 48776, + "chwitz": 48777, + "parse": 48778, + "Ġdors": 48779, + "Ġspons": 48780, + "Ġasync": 48781, + "agonists": 48782, + "Ġindo": 48783, + ".>>": 48784, + "ĠDisciple": 48785, + "Ġfilename": 48786, + "rency": 48787, + "ĠDise": 48788, + "Ġ\"/": 48789, + "template": 48790, + "ãĤ¹": 48791, + "swers": 48792, + "Ġ++": 48793, + "Ġ[(": 48794, + "thora": 48795, + "ĠDepths": 48796, + "livious": 48797, + "Ġdisadvant": 48798, + "foundland": 48799, + "Upload": 48800, + "Ġ§§": 48801, + "Ġsophistic": 48802, + ";}": 48803, + "izont": 48804, + "\"}": 48805, + "estial": 48806, + "Ranked": 48807, + "ĠOccupations": 48808, + "LEASE": 48809, + "ĠOgre": 48810, + "folder": 48811, + "Plot": 48812, + "farious": 48813, + "Ġsuscept": 48814, + "Types": 48815, + "Discuss": 48816, + "Ġ'/": 48817, + "æµ": 48818, + "earable": 48819, + "æ³": 48820, + "Tile": 48821, + "iatus": 48822, + "åŃ": 48823, + "Ġreperto": 48824, + "Helper": 48825, + "Returns": 48826, + "ä¸Ĭ": 48827, + "imaru": 48828, + "Ġreq": 48829, + "Ġdissatisf": 48830, + "multipl": 48831, + "}{": 48832, + "-[": 48833, + "itial": 48834, + "*/": 48835, + "Config": 48836, + "Example": 48837, + "ĠjQuery": 48838, + "Mods": 48839, + "ĠGPIO": 48840, + "Ġlaun": 48841, + "layout": 48842, + "cised": 48843, + "Ġ......": 48844, + "+++": 48845, + "prototype": 48846, + "Exception": 48847, + "Ġsubsections": 48848, + "Ġresemb": 48849, + "Ġâĩ": 48850, + "ĠPubMed": 48851, + "username": 48852, + "Ġaggro": 48853, + "éĥ": 48854, + "Ġ};": 48855, + "ĠMages": 48856, + "ryu": 48857, + "apons": 48858, + "Optional": 48859, + "ĠAncients": 48860, + "ãĤĬ": 48861, + "Quotes": 48862, + "oaded": 48863, + "Ġsuspic": 48864, + "inline": 48865, + "omial": 48866, + "ĠMahjong": 48867, + "auntlets": 48868, + "Ġanarchism": 48869, + "Ġsubclass": 48870, + "ĠMLG": 48871, + "...]": 48872, + "Dialog": 48873, + "uphem": 48874, + "Ġrecursive": 48875, + "7601": 48876, + "frac": 48877, + "Else": 48878, + "ĠSeverus": 48879, + "},{\"": 48880, + "ĠCLIENT": 48881, + "Ġjavascript": 48882, + "sama": 48883, + "ĠLearns": 48884, + "ãĤĤ": 48885, + "Upgrade": 48886, + "Listener": 48887, + "Ġsnipp": 48888, + "Ġrune": 48889, + "ĠTTL": 48890, + "ertation": 48891, + "olicy": 48892, + "=\"\"": 48893, + "«ĺ": 48894, + "Ġexpr": 48895, + "ovych": 48896, + "Ġãģ": 48897, + "_-_": 48898, + "munition": 48899, + "////": 48900, + "func": 48901, + ">>>>": 48902, + "Provider": 48903, + "Ïī": 48904, + "BUG": 48905, + "Ġ[-": 48906, + "Ġarrang": 48907, + "merce": 48908, + "ãĥ": 48909, + "incarn": 48910, + "Valid": 48911, + "ĠAether": 48912, + "ãĤĵ": 48913, + "ĠUTF": 48914, + "ĠMonstrous": 48915, + "ãĤĮ": 48916, + "hedon": 48917, + "áµ": 48918, + ":#": 48919, + "ĠFrieza": 48920, + "padding": 48921, + "Reviewer": 48922, + "Ġpsychiat": 48923, + "yrinth": 48924, + "ĠâĶĤ": 48925, + "hillary": 48926, + "Static": 48927, + "Newsletter": 48928, + "Avg": 48929, + "Ġfn": 48930, + "Topic": 48931, + "choes": 48932, + "Ġnewsp": 48933, + "á¸": 48934, + "Ġ[+": 48935, + "~~~~~~~~~~~~~~~~": 48936, + ":]": 48937, + "apego": 48938, + "buf": 48939, + "Translation": 48940, + "ById": 48941, + "Ġmmol": 48942, + "ãĥ¼ãĥ": 48943, + "å½": 48944, + "ãĤī": 48945, + "Ġparser": 48946, + "ãĥª": 48947, + "`,": 48948, + "Lair": 48949, + ")}": 48950, + "ypes": 48951, + "adobe": 48952, + "Ġancest": 48953, + "ernel": 48954, + "ĠNULL": 48955, + "ç«": 48956, + "anguages": 48957, + "Increases": 48958, + "æĦ": 48959, + "utorial": 48960, + "ithmetic": 48961, + "dll": 48962, + "ĠArcane": 48963, + "çī": 48964, + "Ġtc": 48965, + "urtles": 48966, + "èĪ": 48967, + "Bytes": 48968, + "Slot": 48969, + "ĠBahá": 48970, + "Weapon": 48971, + "widget": 48972, + "querque": 48973, + "Ġembodiments": 48974, + "å¥": 48975, + "WARN": 48976, + "swer": 48977, + "thumbnails": 48978, + "FFFF": 48979, + "inguishable": 48980, + "Ġâī": 48981, + "Ġ${": 48982, + "AAAAAAAA": 48983, + "Conclusion": 48984, + "ĻĤ": 48985, + "disable": 48986, + "Rect": 48987, + "Ġsubp": 48988, + "Ġ().": 48989, + "ĠDetected": 48990, + "èĢ": 48991, + "[]": 48992, + "Ġcoerc": 48993, + "ĠmM": 48994, + "recated": 48995, + "fusc": 48996, + "ĠSorce": 48997, + "çĶŁ": 48998, + ").[": 48999, + "Ġ})": 49000, + "mobi": 49001, + "yip": 49002, + "Acknowled": 49003, + "ternity": 49004, + "iqueness": 49005, + "ython": 49006, + "><": 49007, + "Ġstd": 49008, + "Url": 49009, + "Ġnamespace": 49010, + "Ġtion": 49011, + "oother": 49012, + "Ó": 49013, + "Ġhemor": 49014, + "Ġrg": 49015, + "ventory": 49016, + "ãĤ¢": 49017, + "anamo": 49018, + "Socket": 49019, + "Topics": 49020, + "apeshifter": 49021, + "gnu": 49022, + "Ġdetrim": 49023, + "`.": 49024, + "romeda": 49025, + "çIJ": 49026, + "Ġlambda": 49027, + "Compan": 49028, + "Variable": 49029, + "Ġusb": 49030, + "ĠAdamant": 49031, + "ournal": 49032, + "Ġcovari": 49033, + "ãĥ©": 49034, + "éĸ": 49035, + "åİ": 49036, + "otaur": 49037, + "Ġ(),": 49038, + "Marginal": 49039, + "ãģı": 49040, + "Ġphysic": 49041, + "adeon": 49042, + "RESULTS": 49043, + "200000": 49044, + "ãģį": 49045, + "udeb": 49046, + "ãģĵ": 49047, + "COMPLE": 49048, + "Ġmsg": 49049, + "ghazi": 49050, + "/*": 49051, + "ĠDeity": 49052, + "Ġdisapp": 49053, + "Availability": 49054, + "Ġillum": 49055, + "à©": 49056, + "ptives": 49057, + ",âĢĶ": 49058, + "chnology": 49059, + "Ġaccur": 49060, + "Ġapi": 49061, + "Obj": 49062, + "ãĤ«": 49063, + "ãĤ¸": 49064, + "ä¹ĭ": 49065, + "ËĪ": 49066, + "Ġtcp": 49067, + "Required": 49068, + ".<": 49069, + "\".[": 49070, + "Ġ~/.": 49071, + "Ġobser": 49072, + "RFC": 49073, + "Ġintegers": 49074, + "åī": 49075, + "Installation": 49076, + "Ô": 49077, + "ó": 49078, + "csv": 49079, + "ãĥ«": 49080, + "ĠNoticed": 49081, + "âĸĵ": 49082, + "Tumblr": 49083, + "Reply": 49084, + "||": 49085, + "Ġconclud": 49086, + "Ġ))": 49087, + "ebin": 49088, + "sql": 49089, + "Closure": 49090, + "++++": 49091, + "],[": 49092, + "âĹı": 49093, + "Ġprolet": 49094, + "Ġ>=": 49095, + "estinal": 49096, + "Ġ[*": 49097, + "ĠInquisitor": 49098, + "Ġcmd": 49099, + "FINE": 49100, + "CRIP": 49101, + "Ġvertex": 49102, + "TeX": 49103, + "///": 49104, + "Ö¼": 49105, + "iscons": 49106, + "Ġmyster": 49107, + "Changed": 49108, + "timeout": 49109, + "irtual": 49110, + "Methods": 49111, + "Ġcerts": 49112, + "texture": 49113, + "Roaming": 49114, + "Proxy": 49115, + "Override": 49116, + "éĹ": 49117, + "utf": 49118, + "python": 49119, + "ĠRarity": 49120, + "ilitarian": 49121, + "çľ": 49122, + "().": 49123, + "æł": 49124, + "Ġbuf": 49125, + "åij": 49126, + "çķ": 49127, + "Ġ*.": 49128, + "umerable": 49129, + "~~~~": 49130, + "å¦": 49131, + "Ġsimultane": 49132, + "Ġjson": 49133, + "Requires": 49134, + "Ġperl": 49135, + "Interface": 49136, + "rupal": 49137, + ":": 49242, + "itialized": 49243, + "HTTP": 49244, + "Trivia": 49245, + "Sov": 49246, + "wrapper": 49247, + "={": 49248, + "ĠAzerb": 49249, + "aeper": 49250, + "Ġneighb": 49251, + "initions": 49252, + "Ġsts": 49253, + "ĠSasuke": 49254, + "#$": 49255, + "uliffe": 49256, + "æĸ¹": 49257, + "++++++++++++++++": 49258, + "ĠElven": 49259, + "ãģĤ": 49260, + "Ġartif": 49261, + "Folder": 49262, + "Ġà¨": 49263, + "åĤ": 49264, + "Ġphyl": 49265, + "uggest": 49266, + "blance": 49267, + "ãģł": 49268, + "Requirements": 49269, + "Usage": 49270, + "Ġinitialized": 49271, + "ãģ®æ": 49272, + "conservancy": 49273, + "ĠReincarn": 49274, + ")|": 49275, + "Ġantioxid": 49276, + "ĠClicker": 49277, + "Ġunlaw": 49278, + "Ġ\\(": 49279, + "ãĥĪ": 49280, + "Ġ[*]": 49281, + "Characters": 49282, + "////////": 49283, + "ãĢIJ": 49284, + "ãĤ·": 49285, + "webkit": 49286, + "ãĢij": 49287, + "Ġxp": 49288, + "alkyrie": 49289, + "Console": 49290, + "());": 49291, + "ĠKorra": 49292, + "\"))": 49293, + "oooooooooooooooo": 49294, + "Timer": 49295, + "////////////////": 49296, + "yout": 49297, + "engeance": 49298, + "emetery": 49299, + "Ġmages": 49300, + "mods": 49301, + "Null": 49302, + "Ġphilos": 49303, + "ascript": 49304, + "Ġaddon": 49305, + "ĠâĸĪ": 49306, + "emale": 49307, + "----------------------------------------------------------------": 49308, + "Ġ\\\\": 49309, + "=[": 49310, + "ĠParables": 49311, + "ãĥĨ": 49312, + "VALUE": 49313, + "Ġ@@": 49314, + "Ġuint": 49315, + "${": 49316, + "cpp": 49317, + "%%": 49318, + "Ġ(âĪĴ": 49319, + "utils": 49320, + "prefix": 49321, + "å°Ĩ": 49322, + "ãĥŃ": 49323, + "Completed": 49324, + "Ġgoto": 49325, + "ãĤ¯": 49326, + "Winged": 49327, + "perty": 49328, + "[\"": 49329, + "ãĥİ": 49330, + "ĠScythe": 49331, + "Ġæľ": 49332, + "Ġ!=": 49333, + "Buffer": 49334, + "docker": 49335, + "ĠWATCHED": 49336, + "èĢħ": 49337, + "())": 49338, + "Ġdst": 49339, + "SIZE": 49340, + "ĠDemonic": 49341, + "Ġresil": 49342, + "ãĤ¿": 49343, + "Ġpione": 49344, + "cpu": 49345, + "++)": 49346, + "TEXT": 49347, + "Ġdiscrep": 49348, + "debian": 49349, + "quished": 49350, + "Ġacknow": 49351, + "Ġtrave": 49352, + "Ġgcc": 49353, + "Catalog": 49354, + "ctrl": 49355, + "ĠMoroc": 49356, + "Ġcpu": 49357, + "Ġ];": 49358, + "ĠSorceress": 49359, + "Introduced": 49360, + "Frames": 49361, + "Ġcondem": 49362, + "¶æ": 49363, + "~~~~~~~~": 49364, + "ĠEmacs": 49365, + "][/": 49366, + "Ġglim": 49367, + "Init": 49368, + "ĠPrimordial": 49369, + "ãĥĥ": 49370, + "Ġ+=": 49371, + "Ġblat": 49372, + "à¼": 49373, + "------------------------------------------------": 49374, + "gpu": 49375, + "ãĥĥãĥĪ": 49376, + "Ġxml": 49377, + "Ġboolean": 49378, + "References": 49379, + "Ġ?)": 49380, + "Ġsatell": 49381, + "Queue": 49382, + "Ġpestic": 49383, + "Ġ}}": 49384, + "Attribute": 49385, + "Ġdx": 49386, + "ĠDefin": 49387, + "Synopsis": 49388, + "..................": 49389, + "ãĥ¬": 49390, + "plugin": 49391, + "Disable": 49392, + "0000000000000000": 49393, + ")\\": 49394, + "ĠIchigo": 49395, + "println": 49396, + "rontal": 49397, + "Setup": 49398, + "Ġ��������": 49399, + "å§": 49400, + "âĸº": 49401, + "ĠPengu": 49402, + "ailability": 49403, + "Duration": 49404, + "Timeout": 49405, + "ãĢĮ": 49406, + "Ġbehav": 49407, + "Reviewed": 49408, + "Ġtoget": 49409, + "\\.": 49410, + "lished": 49411, + "Ġthous": 49412, + "Ġperpend": 49413, + "ecause": 49414, + "Layout": 49415, + "è»": 49416, + "ĠDexterity": 49417, + "unsigned": 49418, + "+=": 49419, + "[[": 49420, + "ĠRunes": 49421, + "ãĤ¦": 49422, + "};": 49423, + "})": 49424, + "FTWARE": 49425, + "ength": 49426, + "milo": 49427, + "duino": 49428, + "天": 49429, + "ĠClojure": 49430, + "ļé": 49431, + "ãĥ¥": 49432, + "gradient": 49433, + "Ġ\"\"\"": 49434, + "âĨij": 49435, + "@#": 49436, + "JSON": 49437, + "Ġproport": 49438, + "addr": 49439, + "});": 49440, + "ãĥIJ": 49441, + "ä¸ī": 49442, + "Ġtmp": 49443, + "å£": 49444, + "../": 49445, + "zsche": 49446, + "ĠâĪ¼": 49447, + "Entity": 49448, + "æ©Ł": 49449, + "ĠâĶľâĶĢâĶĢ": 49450, + "filename": 49451, + "{{": 49452, + "@@": 49453, + "ĠSeym": 49454, + "Ġ/**": 49455, + "ĠSummoner": 49456, + "Quantity": 49457, + "ç·": 49458, + "Attach": 49459, + "Ġbool": 49460, + "Texture": 49461, + "Ġopio": 49462, + ".}": 49463, + "ãĥĭ": 49464, + "integer": 49465, + "Ġregex": 49466, + "Ġnomine": 49467, + "ription": 49468, + "ãģ®ç": 49469, + "ãĥķ": 49470, + "Ġsubparagraph": 49471, + "GGGG": 49472, + "Ġexplan": 49473, + "Header": 49474, + "Spawn": 49475, + "toggle": 49476, + "²¾": 49477, + "Abyss": 49478, + "expr": 49479, + "ĠZerg": 49480, + "ĠGrimoire": 49481, + "Contents": 49482, + "Instance": 49483, + "cyclopedia": 49484, + "ãĥĹ": 49485, + "ĠTakeru": 49486, + "=(": 49487, + "代": 49488, + "\\)": 49489, + "Ġrgb": 49490, + "htt": 49491, + "bryce": 49492, + "Ġlivest": 49493, + "ĠAnnotations": 49494, + "âĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢ": 49495, + "berus": 49496, + "ntil": 49497, + "Ġskelet": 49498, + "callback": 49499, + "åħī": 49500, + "Joined": 49501, + "ãĤª": 49502, + "Ġargs": 49503, + "artifacts": 49504, + "Ġå¤": 49505, + "ÃĽ": 49506, + "ãĥŀ": 49507, + "Streamer": 49508, + "}\"": 49509, + "Ġunden": 49510, + "ãĥģ": 49511, + "Īè": 49512, + "ãĥ£": 49513, + "Ġ0004": 49514, + "Ġ\\'": 49515, + "ãĤ°": 49516, + "ĠCONFIG": 49517, + "Ġ#####": 49518, + "``": 49519, + "anguage": 49520, + "Ġ*)": 49521, + "Template": 49522, + "MODE": 49523, + "Ġ00000000": 49524, + "'';": 49525, + ">": 49625, + "Ġlvl": 49626, + "Footnote": 49627, + "Iter": 49628, + "####": 49629, + "ãĥij": 49630, + "ĠCarbuncle": 49631, + "Ġ[+]": 49632, + "Ġmathemat": 49633, + "Allows": 49634, + "Ġ4090": 49635, + "Async": 49636, + "ģ«": 49637, + "Ļ½": 49638, + "))))": 49639, + "á½": 49640, + "Ġcx": 49641, + "Ġansw": 49642, + "{\"": 49643, + "ãĥŁ": 49644, + "addons": 49645, + "Filename": 49646, + "Appearances": 49647, + "ĠãĢĮ": 49648, + "Ġaddr": 49649, + "Ġcharact": 49650, + "glomer": 49651, + "Advertisements": 49652, + "Ġdracon": 49653, + "ĠFenrir": 49654, + "Ġ();": 49655, + "ĠCitiz": 49656, + "acebook": 49657, + "Ġparams": 49658, + "]=": 49659, + "Ġsubscript": 49660, + "Ġentreprene": 49661, + "tnc": 49662, + "iversal": 49663, + "Ġmillenn": 49664, + "ithub": 49665, + "/>": 49666, + "Ġ\"{": 49667, + "Frameworks": 49668, + "avorite": 49669, + "Ġ])": 49670, + "Constructed": 49671, + "fml": 49672, + "ãĥį": 49673, + "################################": 49674, + "-|": 49675, + "¥ŀ": 49676, + "Ġwithd": 49677, + "ĠCth": 49678, + "AppData": 49679, + "Msg": 49680, + ":{": 49681, + "ãĤ¨": 49682, + "Ġtuple": 49683, + "ç¥ŀ": 49684, + "Ġintrins": 49685, + "ĠCooldown": 49686, + "ategory": 49687, + "^{": 49688, + "ãĥĬ": 49689, + "''''": 49690, + "çĶ°": 49691, + "ĠDEBUG": 49692, + "Ġcannabin": 49693, + "ocobo": 49694, + "Invalid": 49695, + "ãĥĢ": 49696, + "Compat": 49697, + "Ġ({": 49698, + "Removed": 49699, + "Ġconvol": 49700, + "}:": 49701, + "interstitial": 49702, + "Ġ\"": 49721, + "initialized": 49722, + "Ġexting": 49723, + "Poké": 49724, + "Parameters": 49725, + "¶ħ": 49726, + "########": 49727, + "NULL": 49728, + "ãĥĩ": 49729, + "groupon": 49730, + "\\-": 49731, + "ãĥı": 49732, + "ãĤ±": 49733, + "Ġsubsequ": 49734, + "ccording": 49735, + "ĠMODULE": 49736, + "ĠProtoss": 49737, + "\"},{\"": 49738, + "Ġ..............": 49739, + "Integer": 49740, + "endif": 49741, + "ãĥĻ": 49742, + "parser": 49743, + "lambda": 49744, + "Ġcarbohyd": 49745, + "ĠUnloaded": 49746, + "_{": 49747, + "âĸ¬âĸ¬": 49748, + "Ġdebian": 49749, + "]}": 49750, + "ãĤ¶": 49751, + "Parameter": 49752, + "ãĤ£": 49753, + "ãĤ»": 49754, + "Ġ$_": 49755, + "İĭ": 49756, + "Ġiterator": 49757, + "ãĤ¬": 49758, + "WINDOWS": 49759, + "CONCLUS": 49760, + "Ġ\"\\": 49761, + "umbn": 49762, + "(&": 49763, + "ãĥ©ãĥ³": 49764, + "usercontent": 49765, + "ometimes": 49766, + "METHOD": 49767, + "ãĥ¢": 49768, + "potion": 49769, + "ãĥ¯": 49770, + "everal": 49771, + "Ġweap": 49772, + "minecraft": 49773, + "================================": 49774, + "printf": 49775, + "ĠShinra": 49776, + "Ġreluct": 49777, + "\\\",": 49778, + "Runtime": 49779, + "xff": 49780, + "ĠAbyssal": 49781, + "akeru": 49782, + "Ġ\\(\\": 49783, + "\"/>": 49784, + "efficients": 49785, + "Ü": 49786, + "avascript": 49787, + "Ġbehavi": 49788, + "++;": 49789, + "=#": 49790, + "Attributes": 49791, + "âĵĺ": 49792, + "lvl": 49793, + "¬¼": 49794, + "/**": 49795, + "Gameplay": 49796, + "ĠLeilan": 49797, + ">)": 49798, + "=\"/": 49799, + "Ġ));": 49800, + "ãĥĨãĤ£": 49801, + "ġ": 49802, + ".": 49836, + "DEBUG": 49837, + "âĶģ": 49838, + "ãĢı": 49839, + "WithNo": 49840, + "Redditor": 49841, + "ĠâĶľ": 49842, + "Ġfmt": 49843, + "ãĢİ": 49844, + "Ġmsec": 49845, + "ĪĴ": 49846, + "eatures": 49847, + "itially": 49848, + "\"\"\"": 49849, + "ãĥ¼ãĤ¯": 49850, + "Textures": 49851, + "\"},": 49852, + "\"><": 49858, + "||||": 49859, + "ß": 49860, + "iterator": 49861, + "è£ħ": 49862, + "Ĥª": 49863, + "ojure": 49864, + "ãħĭãħĭ": 49865, + "ãĥ¼ãĥ³": 49866, + "Ġprintln": 49867, + "Ġ][": 49868, + "âĸĪâĸĪ": 49869, + "âķIJ": 49870, + "\\\":": 49871, + "senal": 49872, + "é¾į": 49873, + "é¾": 49874, + "Ġcryst": 49875, + "ãĥķãĤ¡": 49876, + "ĠCosponsors": 49877, + "ãĤ·ãĥ£": 49878, + "Magikarp": 49879, + "ĠMagicka": 49880, + "âĸĪâĸĪâĸĪâĸĪ": 49881, + ",,,,,,,,": 49882, + "vertisement": 49883, + "âĶĢâĶĢâĶĢâĶĢ": 49884, + "ãĥķãĤ©": 49885, + "luaj": 49886, + "CLASSIFIED": 49887, + ".''.": 49888, + "byss": 49889, + "Ġ{:": 49890, + "ĠNanto": 49891, + "Ġptr": 49892, + "Ġ%%": 49893, + "Ġteasp": 49894, + "[_": 49895, + "ãĥ¤": 49896, + "ħĭ": 49897, + "ŃĶ": 49898, + "Ġpci": 49899, + "Ġ\"<": 49900, + "GGGGGGGG": 49901, + "æĪ¦": 49902, + "--+": 49903, + "ãĤ®": 49904, + "Ġ())": 49905, + "âĸ¬": 49906, + "Ġsizeof": 49907, + "}}}": 49908, + ";;;;;;;;": 49909, + ">]": 49910, + "âĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪ": 49911, + "Vaults": 49912, + "Ġistg": 49913, + "Ġnewcom": 49914, + "=]": 49915, + "¿½": 49916, + "ĵĺ": 49917, + "{\\": 49918, + "Args": 49919, + "Ġexha": 49920, + "(\\": 49921, + "Ġunnecess": 49922, + "\"}],\"": 49923, + "ĠUNCLASSIFIED": 49924, + ">(": 49925, + "ãĤ¢ãĥ«": 49926, + "æ©": 49927, + "70710": 49928, + "Ń·": 49929, + "ãĥ¼ãĥĨãĤ£": 49930, + "ĠSakuya": 49931, + "ãĥĥãĥī": 49932, + "ĠPyrrha": 49933, + "escription": 49934, + "VIDIA": 49935, + "================================================================": 49936, + "Ġlooph": 49937, + "=~": 49938, + "Ġcumbers": 49939, + "Ġ)]": 49940, + "govtrack": 49941, + "ĠãĤµ": 49942, + "Ġsubur": 49943, + "Þ": 49944, + "Ġâī¡": 49945, + "Interstitial": 49946, + "ãĥ¼ãĥĨ": 49947, + "Ġgobl": 49948, + "ãĥīãĥ©": 49949, + "oldown": 49950, + "ģĸ": 49951, + "Depths": 49952, + "Ġ());": 49953, + "Ġ._": 49954, + "20439": 49955, + "Ġç¥ŀ": 49956, + "ãģ®å®": 49957, + "ãĤ¼": 49958, + "Ġ$\\": 49959, + "âĹ¼": 49960, + "Ġencount": 49961, + "Ġ":48457,"Ġstreng":48458,"agascar":48459,"guyen":48460,"((":48461,")[":48462,"ĠNorn":48463,"Ġhippocamp":48464,"Ġ¯":48465,"îĢ":48466,"Connection":48467,"PATH":48468,"mbuds":48469,"ĠShards":48470,"Ġadvoc":48471,"Ġsimulac":48472,"âĸij":48473,"!?\"":48474,"ĠPotion":48475,"Ġamulet":48476,"ĠFnatic":48477,"Ġcryptoc":48478,"wav":48479,"radius":48480,"pkg":48481,"ĠMFT":48482,"æĢ":48483,"Ġtoile":48484,"Items":48485,"ifference":48486,"errors":48487,"ĠCelt":48488,"Ġunpop":48489,"ilogy":48490,"6666":48491,"hesda":48492,"Instruct":48493,"å·":48494,"Materials":48495,"ettings":48496,"Percent":48497,"Ġresistor":48498,"tymology":48499,"Ġdeprecated":48500,"Ġgrep":48501,"ĠWRITE":48502,"Ġtriv":48503,"Ġscrut":48504,"[/":48505,"anyl":48506,"skirts":48507,"MSN":48508,"ĠCodec":48509,"ecd":48510,"Anth":48511,"){":48512,"%]":48513,"veyard":48514,"aspberry":48515,"ãĢ":48516,"Reward":48517,"rha":48518,"Stretch":48519,"]-":48520,"Prev":48521,"Context":48522,"Ġlinux":48523,"HAHA":48524,"perties":48525,"ĠVIDE":48526,"Domain":48527,"Ġmurd":48528,"ĠLegions":48529,"apache":48530,"æŃ":48531,"Pause":48532,"Temperature":48533,"ufact":48534,"igslist":48535,"ĠRetrieved":48536,"èª":48537,"ãģĮ":48538,"Ingredients":48539,"ruary":48540,"dyl":48541,"Alias":48542,"ĠÎĶ":48543,"Ġinval":48544,"amsung":48545,"!--":48546,"olean":48547,"æī":48548,"ãģ¯":48549,"Ġcoefficients":48550,"ĠDHCP":48551,"âĨĴ":48552,"utonium":48553,":[":48554,"âĹ":48555,"cli":48556,"Container":48557,"å¼":48558,"nexus":48559,"SOURCE":48560,"Ò":48561,"=/":48562,"Ġmysql":48563,"ĠGained":48564,"Ġ/*":48565,"uncture":48566,"Ġstatically":48567,"âĸł":48568,"æĺ¯":48569,"æ°":48570,"estamp":48571,"Cache":48572,"ulkan":48573,"staking":48574,"apter":48575,"ãģ¾":48576,"Ġμg":48577,"Ġtremend":48578,"ĠPiercing":48579,"naissance":48580,"ĠHealer":48581,"Enabled":48582,"éģ":48583,"âĸ":48584,"ĠThumbnails":48585,"Ġhither":48586,"Format":48587,"utherland":48588,"íķ":48589,"Ġdestro":48590,"fff":48591,"execute":48592,"msg":48593,"romancer":48594,"ĠCanaver":48595,"ĠVaults":48596,"oided":48597,"iage":48598,"Ġimg":48599,"summary":48600,"]);":48601,"ĠABE":48602,"ĠGamergate":48603,"utherford":48604,"Ġoverwrite":48605,"enment":48606,"æķ":48607,"Ġsystemd":48608,"tif":48609,"]).":48610,"ãĤ¤":48611,"Widget":48612,"======":48613,"(-":48614,"Ġ\"+":48615,"ĠIncarnation":48616,"æĥ":48617,"���":48618,"GUI":48619,"èĥ":48620,"forums":48621,"Ġrunes":48622,"Ġâī¤":48623,"Ġdefic":48624,"Distance":48625,"directory":48626,"ĠHorus":48627,"iltr":48628,"ortium":48629,"Ġ./":48630,"bda":48631,"owship":48632,"ĠâĨij":48633,"}.":48634,"åĩ":48635,"1027":48636,"Weapons":48637,"lucent":48638,"Ġauth":48639,";;":48640,"Recommended":48641,"Ġsurv":48642,"Ġvm":48643,"ĠStronghold":48644,"Ġparan":48645,"ĠTrance":48646,"æĺ":48647,"Ġsovere":48648,"Ġcorrid":48649,"ĠPwr":48650,"Ġ[/":48651,"Ġseq":48652,"Population":48653,"Ġ[];":48654,"Ġreferen":48655,"ĠInstr":48656,"ĠStamina":48657,"kernel":48658,"Python":48659,"-+":48660,"Ġallele":48661,"éĽ":48662,"isode":48663,"ä¸į":48664,"otonin":48665,"modules":48666,"Notable":48667,"Spell":48668,"\\\\":48669,"Pref":48670,"Ġdatas":48671,"setup":48672,"Ġhapl":48673,"Height":48674,"åĭ":48675,"ãģ£":48676,"]),":48677,"Handle":48678,"umenthal":48679,"Package":48680,"Ġenthus":48681,"Ġunsus":48682,"Narr":48683,"Examples":48684,"FAQ":48685,"REDACTED":48686,"Ġnotor":48687,"Enable":48688,"Pattern":48689,"aeda":48690,">.":48691,"CHECK":48692,"Ġ����":48693,"Ġ'.":48694,"Ġãĥ":48695,"append":48696,"����":48697,"gemony":48698,"terness":48699,"ĠHaku":48700,"NVIDIA":48701,"queue":48702,"Bind":48703,"Ġneigh":48704,"armor":48705,"retty":48706,"LOD":48707,"plugins":48708,"Ġ/>":48709,"TYPE":48710,"Ġ4096":48711,"-------":48712,"Preview":48713,"FML":48714,"Ġproletarian":48715,"zees":48716,"enfranch":48717,"ãģĨ":48718,"Ctrl":48719,"Module":48720,"ĠSurviv":48721,"ĠStarcraft":48722,"rored":48723,"reddit":48724,"Ġrul":48725,"Ġtx":48726,"Ġmage":48727,"Sword":48728,"Ġ~/":48729,"Effects":48730,"éļ":48731,"ä¹":48732,"Sensor":48733,"Solution":48734,"ãģĻ":48735,"Arcade":48736,"Ġpredec":48737,"Values":48738,"Length":48739,"Ġfortun":48740,"ttp":48741,"\"[":48742,"tmp":48743,"ĠBerserker":48744,"åĨ":48745,"ositories":48746,"Ġcouncill":48747,"ffff":48748,"));":48749,"Recipe":48750,"ĠASCII":48751,"âĦ¢:":48752,"ä":48753,"Ġhorm":48754,"=>":48755,"sers":48756,"ãģĭ":48757,"Recommend":48758,"['":48759,"agame":48760,"Animation":48761,"aucuses":48762,"Discussion":48763,"Ġhelicop":48764,"å¿":48765,"Float":48766,"Component":48767,"instance":48768,"Ġfoo":48769,"localhost":48770,"=-":48771,"Offset":48772,"Psy":48773,"ĠGohan":48774,"buquerque":48775,"Ġdefe":48776,"chwitz":48777,"parse":48778,"Ġdors":48779,"Ġspons":48780,"Ġasync":48781,"agonists":48782,"Ġindo":48783,".>>":48784,"ĠDisciple":48785,"Ġfilename":48786,"rency":48787,"ĠDise":48788,"Ġ\"/":48789,"template":48790,"ãĤ¹":48791,"swers":48792,"Ġ++":48793,"Ġ[(":48794,"thora":48795,"ĠDepths":48796,"livious":48797,"Ġdisadvant":48798,"foundland":48799,"Upload":48800,"Ġ§§":48801,"Ġsophistic":48802,";}":48803,"izont":48804,"\"}":48805,"estial":48806,"Ranked":48807,"ĠOccupations":48808,"LEASE":48809,"ĠOgre":48810,"folder":48811,"Plot":48812,"farious":48813,"Ġsuscept":48814,"Types":48815,"Discuss":48816,"Ġ'/":48817,"æµ":48818,"earable":48819,"æ³":48820,"Tile":48821,"iatus":48822,"åŃ":48823,"Ġreperto":48824,"Helper":48825,"Returns":48826,"ä¸Ĭ":48827,"imaru":48828,"Ġreq":48829,"Ġdissatisf":48830,"multipl":48831,"}{":48832,"-[":48833,"itial":48834,"*/":48835,"Config":48836,"Example":48837,"ĠjQuery":48838,"Mods":48839,"ĠGPIO":48840,"Ġlaun":48841,"layout":48842,"cised":48843,"Ġ......":48844,"+++":48845,"prototype":48846,"Exception":48847,"Ġsubsections":48848,"Ġresemb":48849,"Ġâĩ":48850,"ĠPubMed":48851,"username":48852,"Ġaggro":48853,"éĥ":48854,"Ġ};":48855,"ĠMages":48856,"ryu":48857,"apons":48858,"Optional":48859,"ĠAncients":48860,"ãĤĬ":48861,"Quotes":48862,"oaded":48863,"Ġsuspic":48864,"inline":48865,"omial":48866,"ĠMahjong":48867,"auntlets":48868,"Ġanarchism":48869,"Ġsubclass":48870,"ĠMLG":48871,"...]":48872,"Dialog":48873,"uphem":48874,"Ġrecursive":48875,"7601":48876,"frac":48877,"Else":48878,"ĠSeverus":48879,"},{\"":48880,"ĠCLIENT":48881,"Ġjavascript":48882,"sama":48883,"ĠLearns":48884,"ãĤĤ":48885,"Upgrade":48886,"Listener":48887,"Ġsnipp":48888,"Ġrune":48889,"ĠTTL":48890,"ertation":48891,"olicy":48892,"=\"\"":48893,"«ĺ":48894,"Ġexpr":48895,"ovych":48896,"Ġãģ":48897,"_-_":48898,"munition":48899,"////":48900,"func":48901,">>>>":48902,"Provider":48903,"Ïī":48904,"BUG":48905,"Ġ[-":48906,"Ġarrang":48907,"merce":48908,"ãĥ":48909,"incarn":48910,"Valid":48911,"ĠAether":48912,"ãĤĵ":48913,"ĠUTF":48914,"ĠMonstrous":48915,"ãĤĮ":48916,"hedon":48917,"áµ":48918,":#":48919,"ĠFrieza":48920,"padding":48921,"Reviewer":48922,"Ġpsychiat":48923,"yrinth":48924,"ĠâĶĤ":48925,"hillary":48926,"Static":48927,"Newsletter":48928,"Avg":48929,"Ġfn":48930,"Topic":48931,"choes":48932,"Ġnewsp":48933,"á¸":48934,"Ġ[+":48935,"~~~~~~~~~~~~~~~~":48936,":]":48937,"apego":48938,"buf":48939,"Translation":48940,"ById":48941,"Ġmmol":48942,"ãĥ¼ãĥ":48943,"å½":48944,"ãĤī":48945,"Ġparser":48946,"ãĥª":48947,"`,":48948,"Lair":48949,")}":48950,"ypes":48951,"adobe":48952,"Ġancest":48953,"ernel":48954,"ĠNULL":48955,"ç«":48956,"anguages":48957,"Increases":48958,"æĦ":48959,"utorial":48960,"ithmetic":48961,"dll":48962,"ĠArcane":48963,"çī":48964,"Ġtc":48965,"urtles":48966,"èĪ":48967,"Bytes":48968,"Slot":48969,"ĠBahá":48970,"Weapon":48971,"widget":48972,"querque":48973,"Ġembodiments":48974,"å¥":48975,"WARN":48976,"swer":48977,"thumbnails":48978,"FFFF":48979,"inguishable":48980,"Ġâī":48981,"Ġ${":48982,"AAAAAAAA":48983,"Conclusion":48984,"ĻĤ":48985,"disable":48986,"Rect":48987,"Ġsubp":48988,"Ġ().":48989,"ĠDetected":48990,"èĢ":48991,"[]":48992,"Ġcoerc":48993,"ĠmM":48994,"recated":48995,"fusc":48996,"ĠSorce":48997,"çĶŁ":48998,").[":48999,"Ġ})":49000,"mobi":49001,"yip":49002,"Acknowled":49003,"ternity":49004,"iqueness":49005,"ython":49006,"><":49007,"Ġstd":49008,"Url":49009,"Ġnamespace":49010,"Ġtion":49011,"oother":49012,"Ó":49013,"Ġhemor":49014,"Ġrg":49015,"ventory":49016,"ãĤ¢":49017,"anamo":49018,"Socket":49019,"Topics":49020,"apeshifter":49021,"gnu":49022,"Ġdetrim":49023,"`.":49024,"romeda":49025,"çIJ":49026,"Ġlambda":49027,"Compan":49028,"Variable":49029,"Ġusb":49030,"ĠAdamant":49031,"ournal":49032,"Ġcovari":49033,"ãĥ©":49034,"éĸ":49035,"åİ":49036,"otaur":49037,"Ġ(),":49038,"Marginal":49039,"ãģı":49040,"Ġphysic":49041,"adeon":49042,"RESULTS":49043,"200000":49044,"ãģį":49045,"udeb":49046,"ãģĵ":49047,"COMPLE":49048,"Ġmsg":49049,"ghazi":49050,"/*":49051,"ĠDeity":49052,"Ġdisapp":49053,"Availability":49054,"Ġillum":49055,"à©":49056,"ptives":49057,",âĢĶ":49058,"chnology":49059,"Ġaccur":49060,"Ġapi":49061,"Obj":49062,"ãĤ«":49063,"ãĤ¸":49064,"ä¹ĭ":49065,"ËĪ":49066,"Ġtcp":49067,"Required":49068,".<":49069,"\".[":49070,"Ġ~/.":49071,"Ġobser":49072,"RFC":49073,"Ġintegers":49074,"åī":49075,"Installation":49076,"Ô":49077,"ó":49078,"csv":49079,"ãĥ«":49080,"ĠNoticed":49081,"âĸĵ":49082,"Tumblr":49083,"Reply":49084,"||":49085,"Ġconclud":49086,"Ġ))":49087,"ebin":49088,"sql":49089,"Closure":49090,"++++":49091,"],[":49092,"âĹı":49093,"Ġprolet":49094,"Ġ>=":49095,"estinal":49096,"Ġ[*":49097,"ĠInquisitor":49098,"Ġcmd":49099,"FINE":49100,"CRIP":49101,"Ġvertex":49102,"TeX":49103,"///":49104,"Ö¼":49105,"iscons":49106,"Ġmyster":49107,"Changed":49108,"timeout":49109,"irtual":49110,"Methods":49111,"Ġcerts":49112,"texture":49113,"Roaming":49114,"Proxy":49115,"Override":49116,"éĹ":49117,"utf":49118,"python":49119,"ĠRarity":49120,"ilitarian":49121,"çľ":49122,"().":49123,"æł":49124,"Ġbuf":49125,"åij":49126,"çķ":49127,"Ġ*.":49128,"umerable":49129,"~~~~":49130,"å¦":49131,"Ġsimultane":49132,"Ġjson":49133,"Requires":49134,"Ġperl":49135,"Interface":49136,"rupal":49137,":":49242,"itialized":49243,"HTTP":49244,"Trivia":49245,"Sov":49246,"wrapper":49247,"={":49248,"ĠAzerb":49249,"aeper":49250,"Ġneighb":49251,"initions":49252,"Ġsts":49253,"ĠSasuke":49254,"#$":49255,"uliffe":49256,"æĸ¹":49257,"++++++++++++++++":49258,"ĠElven":49259,"ãģĤ":49260,"Ġartif":49261,"Folder":49262,"Ġà¨":49263,"åĤ":49264,"Ġphyl":49265,"uggest":49266,"blance":49267,"ãģł":49268,"Requirements":49269,"Usage":49270,"Ġinitialized":49271,"ãģ®æ":49272,"conservancy":49273,"ĠReincarn":49274,")|":49275,"Ġantioxid":49276,"ĠClicker":49277,"Ġunlaw":49278,"Ġ\\(":49279,"ãĥĪ":49280,"Ġ[*]":49281,"Characters":49282,"////////":49283,"ãĢIJ":49284,"ãĤ·":49285,"webkit":49286,"ãĢij":49287,"Ġxp":49288,"alkyrie":49289,"Console":49290,"());":49291,"ĠKorra":49292,"\"))":49293,"oooooooooooooooo":49294,"Timer":49295,"////////////////":49296,"yout":49297,"engeance":49298,"emetery":49299,"Ġmages":49300,"mods":49301,"Null":49302,"Ġphilos":49303,"ascript":49304,"Ġaddon":49305,"ĠâĸĪ":49306,"emale":49307,"----------------------------------------------------------------":49308,"Ġ\\\\":49309,"=[":49310,"ĠParables":49311,"ãĥĨ":49312,"VALUE":49313,"Ġ@@":49314,"Ġuint":49315,"${":49316,"cpp":49317,"%%":49318,"Ġ(âĪĴ":49319,"utils":49320,"prefix":49321,"å°Ĩ":49322,"ãĥŃ":49323,"Completed":49324,"Ġgoto":49325,"ãĤ¯":49326,"Winged":49327,"perty":49328,"[\"":49329,"ãĥİ":49330,"ĠScythe":49331,"Ġæľ":49332,"Ġ!=":49333,"Buffer":49334,"docker":49335,"ĠWATCHED":49336,"èĢħ":49337,"())":49338,"Ġdst":49339,"SIZE":49340,"ĠDemonic":49341,"Ġresil":49342,"ãĤ¿":49343,"Ġpione":49344,"cpu":49345,"++)":49346,"TEXT":49347,"Ġdiscrep":49348,"debian":49349,"quished":49350,"Ġacknow":49351,"Ġtrave":49352,"Ġgcc":49353,"Catalog":49354,"ctrl":49355,"ĠMoroc":49356,"Ġcpu":49357,"Ġ];":49358,"ĠSorceress":49359,"Introduced":49360,"Frames":49361,"Ġcondem":49362,"¶æ":49363,"~~~~~~~~":49364,"ĠEmacs":49365,"][/":49366,"Ġglim":49367,"Init":49368,"ĠPrimordial":49369,"ãĥĥ":49370,"Ġ+=":49371,"Ġblat":49372,"à¼":49373,"------------------------------------------------":49374,"gpu":49375,"ãĥĥãĥĪ":49376,"Ġxml":49377,"Ġboolean":49378,"References":49379,"Ġ?)":49380,"Ġsatell":49381,"Queue":49382,"Ġpestic":49383,"Ġ}}":49384,"Attribute":49385,"Ġdx":49386,"ĠDefin":49387,"Synopsis":49388,"..................":49389,"ãĥ¬":49390,"plugin":49391,"Disable":49392,"0000000000000000":49393,")\\":49394,"ĠIchigo":49395,"println":49396,"rontal":49397,"Setup":49398,"Ġ��������":49399,"å§":49400,"âĸº":49401,"ĠPengu":49402,"ailability":49403,"Duration":49404,"Timeout":49405,"ãĢĮ":49406,"Ġbehav":49407,"Reviewed":49408,"Ġtoget":49409,"\\.":49410,"lished":49411,"Ġthous":49412,"Ġperpend":49413,"ecause":49414,"Layout":49415,"è»":49416,"ĠDexterity":49417,"unsigned":49418,"+=":49419,"[[":49420,"ĠRunes":49421,"ãĤ¦":49422,"};":49423,"})":49424,"FTWARE":49425,"ength":49426,"milo":49427,"duino":49428,"天":49429,"ĠClojure":49430,"ļé":49431,"ãĥ¥":49432,"gradient":49433,"Ġ\"\"\"":49434,"âĨij":49435,"@#":49436,"JSON":49437,"Ġproport":49438,"addr":49439,"});":49440,"ãĥIJ":49441,"ä¸ī":49442,"Ġtmp":49443,"å£":49444,"../":49445,"zsche":49446,"ĠâĪ¼":49447,"Entity":49448,"æ©Ł":49449,"ĠâĶľâĶĢâĶĢ":49450,"filename":49451,"{{":49452,"@@":49453,"ĠSeym":49454,"Ġ/**":49455,"ĠSummoner":49456,"Quantity":49457,"ç·":49458,"Attach":49459,"Ġbool":49460,"Texture":49461,"Ġopio":49462,".}":49463,"ãĥĭ":49464,"integer":49465,"Ġregex":49466,"Ġnomine":49467,"ription":49468,"ãģ®ç":49469,"ãĥķ":49470,"Ġsubparagraph":49471,"GGGG":49472,"Ġexplan":49473,"Header":49474,"Spawn":49475,"toggle":49476,"²¾":49477,"Abyss":49478,"expr":49479,"ĠZerg":49480,"ĠGrimoire":49481,"Contents":49482,"Instance":49483,"cyclopedia":49484,"ãĥĹ":49485,"ĠTakeru":49486,"=(":49487,"代":49488,"\\)":49489,"Ġrgb":49490,"htt":49491,"bryce":49492,"Ġlivest":49493,"ĠAnnotations":49494,"âĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢ":49495,"berus":49496,"ntil":49497,"Ġskelet":49498,"callback":49499,"åħī":49500,"Joined":49501,"ãĤª":49502,"Ġargs":49503,"artifacts":49504,"Ġå¤":49505,"ÃĽ":49506,"ãĥŀ":49507,"Streamer":49508,"}\"":49509,"Ġunden":49510,"ãĥģ":49511,"Īè":49512,"ãĥ£":49513,"Ġ0004":49514,"Ġ\\'":49515,"ãĤ°":49516,"ĠCONFIG":49517,"Ġ#####":49518,"``":49519,"anguage":49520,"Ġ*)":49521,"Template":49522,"MODE":49523,"Ġ00000000":49524,"'';":49525,">":49625,"Ġlvl":49626,"Footnote":49627,"Iter":49628,"####":49629,"ãĥij":49630,"ĠCarbuncle":49631,"Ġ[+]":49632,"Ġmathemat":49633,"Allows":49634,"Ġ4090":49635,"Async":49636,"ģ«":49637,"Ļ½":49638,"))))":49639,"á½":49640,"Ġcx":49641,"Ġansw":49642,"{\"":49643,"ãĥŁ":49644,"addons":49645,"Filename":49646,"Appearances":49647,"ĠãĢĮ":49648,"Ġaddr":49649,"Ġcharact":49650,"glomer":49651,"Advertisements":49652,"Ġdracon":49653,"ĠFenrir":49654,"Ġ();":49655,"ĠCitiz":49656,"acebook":49657,"Ġparams":49658,"]=":49659,"Ġsubscript":49660,"Ġentreprene":49661,"tnc":49662,"iversal":49663,"Ġmillenn":49664,"ithub":49665,"/>":49666,"Ġ\"{":49667,"Frameworks":49668,"avorite":49669,"Ġ])":49670,"Constructed":49671,"fml":49672,"ãĥį":49673,"################################":49674,"-|":49675,"¥ŀ":49676,"Ġwithd":49677,"ĠCth":49678,"AppData":49679,"Msg":49680,":{":49681,"ãĤ¨":49682,"Ġtuple":49683,"ç¥ŀ":49684,"Ġintrins":49685,"ĠCooldown":49686,"ategory":49687,"^{":49688,"ãĥĬ":49689,"''''":49690,"çĶ°":49691,"ĠDEBUG":49692,"Ġcannabin":49693,"ocobo":49694,"Invalid":49695,"ãĥĢ":49696,"Compat":49697,"Ġ({":49698,"Removed":49699,"Ġconvol":49700,"}:":49701,"interstitial":49702,"Ġ\"":49721,"initialized":49722,"Ġexting":49723,"Poké":49724,"Parameters":49725,"¶ħ":49726,"########":49727,"NULL":49728,"ãĥĩ":49729,"groupon":49730,"\\-":49731,"ãĥı":49732,"ãĤ±":49733,"Ġsubsequ":49734,"ccording":49735,"ĠMODULE":49736,"ĠProtoss":49737,"\"},{\"":49738,"Ġ..............":49739,"Integer":49740,"endif":49741,"ãĥĻ":49742,"parser":49743,"lambda":49744,"Ġcarbohyd":49745,"ĠUnloaded":49746,"_{":49747,"âĸ¬âĸ¬":49748,"Ġdebian":49749,"]}":49750,"ãĤ¶":49751,"Parameter":49752,"ãĤ£":49753,"ãĤ»":49754,"Ġ$_":49755,"İĭ":49756,"Ġiterator":49757,"ãĤ¬":49758,"WINDOWS":49759,"CONCLUS":49760,"Ġ\"\\":49761,"umbn":49762,"(&":49763,"ãĥ©ãĥ³":49764,"usercontent":49765,"ometimes":49766,"METHOD":49767,"ãĥ¢":49768,"potion":49769,"ãĥ¯":49770,"everal":49771,"Ġweap":49772,"minecraft":49773,"================================":49774,"printf":49775,"ĠShinra":49776,"Ġreluct":49777,"\\\",":49778,"Runtime":49779,"xff":49780,"ĠAbyssal":49781,"akeru":49782,"Ġ\\(\\":49783,"\"/>":49784,"efficients":49785,"Ü":49786,"avascript":49787,"Ġbehavi":49788,"++;":49789,"=#":49790,"Attributes":49791,"âĵĺ":49792,"lvl":49793,"¬¼":49794,"/**":49795,"Gameplay":49796,"ĠLeilan":49797,">)":49798,"=\"/":49799,"Ġ));":49800,"ãĥĨãĤ£":49801,"ġ":49802,".":49836,"DEBUG":49837,"âĶģ":49838,"ãĢı":49839,"WithNo":49840,"Redditor":49841,"ĠâĶľ":49842,"Ġfmt":49843,"ãĢİ":49844,"Ġmsec":49845,"ĪĴ":49846,"eatures":49847,"itially":49848,"\"\"\"":49849,"ãĥ¼ãĤ¯":49850,"Textures":49851,"\"},":49852,"\"><":49858,"||||":49859,"ß":49860,"iterator":49861,"è£ħ":49862,"Ĥª":49863,"ojure":49864,"ãħĭãħĭ":49865,"ãĥ¼ãĥ³":49866,"Ġprintln":49867,"Ġ][":49868,"âĸĪâĸĪ":49869,"âķIJ":49870,"\\\":":49871,"senal":49872,"é¾į":49873,"é¾":49874,"Ġcryst":49875,"ãĥķãĤ¡":49876,"ĠCosponsors":49877,"ãĤ·ãĥ£":49878,"Magikarp":49879,"ĠMagicka":49880,"âĸĪâĸĪâĸĪâĸĪ":49881,",,,,,,,,":49882,"vertisement":49883,"âĶĢâĶĢâĶĢâĶĢ":49884,"ãĥķãĤ©":49885,"luaj":49886,"CLASSIFIED":49887,".''.":49888,"byss":49889,"Ġ{:":49890,"ĠNanto":49891,"Ġptr":49892,"Ġ%%":49893,"Ġteasp":49894,"[_":49895,"ãĥ¤":49896,"ħĭ":49897,"ŃĶ":49898,"Ġpci":49899,"Ġ\"<":49900,"GGGGGGGG":49901,"æĪ¦":49902,"--+":49903,"ãĤ®":49904,"Ġ())":49905,"âĸ¬":49906,"Ġsizeof":49907,"}}}":49908,";;;;;;;;":49909,">]":49910,"âĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪ":49911,"Vaults":49912,"Ġistg":49913,"Ġnewcom":49914,"=]":49915,"¿½":49916,"ĵĺ":49917,"{\\":49918,"Args":49919,"Ġexha":49920,"(\\":49921,"Ġunnecess":49922,"\"}],\"":49923,"ĠUNCLASSIFIED":49924,">(":49925,"ãĤ¢ãĥ«":49926,"æ©":49927,"70710":49928,"Ń·":49929,"ãĥ¼ãĥĨãĤ£":49930,"ĠSakuya":49931,"ãĥĥãĥī":49932,"ĠPyrrha":49933,"escription":49934,"VIDIA":49935,"================================================================":49936,"Ġlooph":49937,"=~":49938,"Ġcumbers":49939,"Ġ)]":49940,"govtrack":49941,"ĠãĤµ":49942,"Ġsubur":49943,"Þ":49944,"Ġâī¡":49945,"Interstitial":49946,"ãĥ¼ãĥĨ":49947,"Ġgobl":49948,"ãĥīãĥ©":49949,"oldown":49950,"ģĸ":49951,"Depths":49952,"Ġ());":49953,"Ġ._":49954,"20439":49955,"Ġç¥ŀ":49956,"ãģ®å®":49957,"ãĤ¼":49958,"Ġ$\\":49959,"âĹ¼":49960,"Ġencount":49961,"Ġ 1\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mprettyprint\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mpprint\u001b[39;00m\n","\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'prettyprint'"]}],"source":["import prettyprint as pprint"]},{"cell_type":"code","execution_count":13,"metadata":{"id":"7WlGFTdri6ie","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1717906282339,"user_tz":-240,"elapsed":471,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"7c4a89c0-a8c7-4791-f131-5590b8d54e03"},"outputs":[{"output_type":"stream","name":"stdout","text":["INPUT: although cryptocurrencies like bitcoin and ethereum are quite popular today, they still lag behind centralized payment systems like visa in terms of transaction rates and time to finality. as of october 2020, bitcoin's and ethereum's network processes an average of 3-4 and 10 transactions per second (tps), respectively. in contrast, visa's global payment system handled a reported 1,700 tps and claimed to be capable of handling more than 24,000 tps . for a cryptocurrency to be adopted universally, it must be able to scale to process transactions at much higher throughput, i.e., tps rate. hence, blockchain protocols must be scalable to be suitable for widespread adoption.however, there are many challenges on the road to scaling blockchain based dls. garay et al. and kiyayas et al. show that existing blockchain protocols suffer from a loss of security properties as we scale the system. these security properties are fundamental to the operation of a robust dl. and consider a model with two types of agents, honest and adversarial where the adversary tries to attack the ledger by strategically forking the blockchain. a successful fork would allow the adversary to perform a double-spending attack.in this paper, first, we consider a setting in which all agents are honest and show that disparities in the connection to the peer-topeer network can make the system unfair. in such a case, nodes with a better internet connection will be able to grab a larger share of the reward while those with slower connections might lose out. we show that this disparity significantly increases as we increase the throughput of the system. notice that improving the quality of the overlay network may be more complicated than making protocol-level changes that may be implemented by merely updating the software clients.in literature, it is typically assumed that all the agents have equal access to the network, albeit with some finite delay. however, this is seldom the case in practice where some nodes may have better internet connections than others. for the first time, we introduce asymmetry in modeling network connections by assuming different delays for different nodes. hence, faster nodes would have shorter delays, while slower nodes would have longer delays which in turn results in asymmetry in the rewards collected by these agents. we first analyse consequences of this model in a setting with honest agents and then extend our discussion to rational agents.in order to analyze and quantify network fairness, we introduce two measures of fairness based on network events associated with broadcasting a transaction and broadcasting a block. first, we introduce frontrunning, an event associated with a node receiving a transaction. frontrunning (that we deal with in this paper) occurs when a node confirms a transaction before someone else hears about the transaction. we measure , the probability of this event happening between two fractions of the network. if is high, the faster nodes would consistently be able to grab high-value transactions while the slower ones would only be able to pick low-value ones left out by others. thus, a high would negatively impact some agents' revenue. we show that if we try to scale a bitcoinlike system to the throughput offered by the likes of visa, approaches to nearly 1, which implies that the slower nodes in the system will rarely be able to mine any high-value transactions that would result in these nodes receiving minimal reward in exchange for their mining efforts.we then consider the process of broadcasting a block through the network. publishing fairness quantifies the advantage a node might have over other nodes in broadcasting a block. if a node is able to propagate its block faster than other nodes, in case of an eventual fork, its fork would have a higher probability of being accepted. since we know that at higher throughputs forks become more common, faster nodes would be able to get more blocks accepted while those of slower nodes would frequently be orphaned. thus, the slower nodes, would not be able to even gather the fixed block rewards.as both of these measures deteriorate as with increased throughput, small variations in network access may lead to the system becoming unfair for the slower nodes. this would result in some agents gaining more than their fair share of reward while some agents earn less. this could certainly impact the profitability of the agents that earn less since they still need to pay for the costs associated with mining. thus, it may lead to drop in the agents maintaining the dl since agents that are unable to accumulate enough reward to break even the mining costs might shut down their mining operation or they might adopt strategic behavior to collect more rewards than that obtained by following the protocol honestly, either of which would reduce the security of the blockchain.we discuss possible behavior that a lack of network fairness could elicit from rational agents. their behavior could potentially hurt the stability of the system and reduce the effective throughput of the system. we use simulations to show that as the fairness reduces, the default strategy mining on top of the longest chain does not remain the dominant strategy which means that rational agents gain more reward by intentionally forking the longest chain. this could have adverse effect on the resilience of the blockchain against byzantine adversaries, making it less secure2 . hence, even though we scale the system to increase the throughput, we might not find much practical advantage due to these issues.thus, the potential of blockchain technology is hindered by the capabilities of the underlying networking infrastructure. hence, faster nodes would have shorter delays, while slower nodes would have longer delays which in turn results in asymmetry in the rewards collected by these agents.in order to analyze and quantify network fairness, we introduce two measures of fairness based on network events associated with broadcasting a transaction and broadcasting a block. we show that if we try to scale a bitcoinlike system to the throughput offered by the likes of visa, approaches to nearly 1, which implies that the slower nodes in the system will rarely be able to mine any high-value transactions that would result in these nodes receiving minimal reward in exchange for their mining efforts. if a node is able to propagate its block faster than other nodes, in case of an eventual fork, its fork would have a higher probability of being accepted. since we know that at higher throughputs forks become more common, faster nodes would be able to get more blocks accepted while those of slower nodes would frequently be orphaned. thus, it may lead to drop in the agents maintaining the dl since agents that are unable to accumulate enough reward to break even the mining costs might shut down their mining operation or they might adopt strategic behavior to collect more rewards than that obtained by following the protocol honestly, either of which would reduce the security of the blockchain. by increasing the block creation rate, we risk a node mining a block before it receives the latest block mined by the network. a lack of publishing fairness implies that not only slower nodes are less likely to receive reward transaction fees in the mined block but the are also less likely to receive the fixed block reward associated with mining a new block.01 for the bitcoin network. by broadcasting a transaction to other nodes, a agent is potentially increasing the number of nodes competing to include the transaction in their blocks and collect the corresponding transaction fees. a slow node that does not have enough high-value transactions in its mempool might have an incentive to either fork the block mined by a frontrunner (undercutting) or given a fork pick the fork that offers an opportunity to collect a higher transaction fee (petty mining). a slower node could fork a block mined by a faster node containing many high-value transactions due to frontrunning and include those transactions in its own block while leaving some of the transactions for others to include. secondly, even if a node receives the block mined by a slower node later, it would drop the previous block and mine on top of this instead since it offers a higher reward.we also discussed that not only does a lack of fairness impacts the revenue of some agents, it might also create an incentive for them to deviate from the honest mining strategy, which might impact the security of the blockchain system and further exacerbate lack of fairness in rewards. this implies that if a block is mined in a chain having lower _ than another chain, the block might end up earlier in the total block ordering than a block that has already been mined.\n","TARGET: blockchain-based distributed ledgers (dls) promise to transform the existing financial system by making it truly democratic. in the past decade, blockchain technology has seen many novel applications ranging from the banking industry to real estate. however, in order to be adopted universally, blockchain systems must be scalable to support a high volume of transactions. as we increase the throughput of the dl system, the underlying peer-to-peer network might face multiple levels of challenges to keep up with the requirements. due to varying network capacities, the slower nodes would be at a relative disadvantage compared to the faster ones, which could negatively impact their revenue. in order to quantify their relative advantage or disadvantage, we introduce two measures of network fairness, , the probability of frontrunning and , the publishing fairness. we show that as we scale the blockchain, both these measures deteriorate, implying that the slower nodes face a disadvantage at higher throughputs. it results in the faster nodes getting more than their fair share of the reward while the slower nodes (slow in terms of network quality) get less. thus, fairness and scalability in blockchain systems do not go hand in hand.in a setting with rational miners, lack of fairness causes miners to deviate from the \"longest chain rule\" or undercut, which would reduce the blockchain's resilience against byzantine adversaries. hence, fairness is not only a desirable property for a blockchain system but also essential for the security of the blockchain and any scalable blockchain protocol proposed must ensure fairness. \n","hello\n"]}],"source":["print(\"INPUT: \",train_dataset['input_text'][146])\n","print(\"TARGET: \",train_dataset['target_text'][146])\n","print(\"hello\")"]},{"cell_type":"code","execution_count":14,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":49,"referenced_widgets":["784df40b813a4a19bcf0c14e118135df","74818e74da3d4f06b7ad3d5b872cccb0","9c7c7d4b8b1f4e16a3afe3e312aba208","c2a3f46cf39d4bb99023f0b2b08dd4d4","97c6a232be88446cb33ebc47b27d87a8","fc044f33b6d74b03b933b9b98c3c9184","cab39a216a694f0b8d1d22094b0c1ec8","dc8ecdaeb6254bc0a7611e3647190e26","4a5399cfaa45400bb13a131ab4d1dd57","051ef654f3554b809b2c885f3b65e283","ca50d23ab252423d8e538eef087e817c"]},"id":"OTR1wU63WBOr","outputId":"7c821272-de34-44c6-d88e-61621fd97a7d","executionInfo":{"status":"ok","timestamp":1717906287965,"user_tz":-240,"elapsed":1370,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"display_data","data":{"text/plain":["Map: 0%| | 0/101 [00:00=1.14.0 in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.16.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (1.4.2)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (2024.5.15)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (4.66.4)\n","Building wheels for collected packages: rouge_score\n"," Building wheel for rouge_score (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24933 sha256=e69b2001eb936242d2f8f9648274507d11c6c9c9988c5ca24424e18f95536dce\n"," Stored in directory: /root/.cache/pip/wheels/5f/dd/89/461065a73be61a532ff8599a28e9beef17985c9e9c31e541b4\n","Successfully built rouge_score\n","Installing collected packages: rouge_score\n","Successfully installed rouge_score-0.1.2\n"]},{"output_type":"stream","name":"stderr","text":[":2: FutureWarning: load_metric is deprecated and will be removed in the next major version of datasets. Use 'evaluate.load' instead, from the new library 🤗 Evaluate: https://huggingface.co/docs/evaluate\n"," rouge = load_metric(\"rouge\")\n","/usr/local/lib/python3.10/dist-packages/datasets/load.py:759: FutureWarning: The repository for rouge contains custom code which must be executed to correctly load the metric. You can inspect the repository content at https://raw.githubusercontent.com/huggingface/datasets/2.19.2/metrics/rouge/rouge.py\n","You can avoid this message in future by passing the argument `trust_remote_code=True`.\n","Passing `trust_remote_code=True` will be mandatory to load this metric from the next major release of `datasets`.\n"," warnings.warn(\n"]},{"output_type":"display_data","data":{"text/plain":["Downloading builder script: 0%| | 0.00/2.17k [00:00=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate) (1.25.2)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (24.0)\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate) (5.9.5)\n","Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate) (6.0.1)\n","Requirement already satisfied: torch>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (2.3.0+cu121)\n","Requirement already satisfied: huggingface-hub in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.23.2)\n","Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.4.3)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.14.0)\n","Requirement already satisfied: typing-extensions>=4.8.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (4.12.1)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (1.12.1)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.3)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.1.4)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2023.6.0)\n","Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\n","Collecting nvidia-cuda-runtime-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\n","Collecting nvidia-cuda-cupti-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\n","Collecting nvidia-cudnn-cu12==8.9.2.26 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\n","Collecting nvidia-cublas-cu12==12.1.3.1 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\n","Collecting nvidia-cufft-cu12==11.0.2.54 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\n","Collecting nvidia-curand-cu12==10.3.2.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\n","Collecting nvidia-cusolver-cu12==11.4.5.107 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\n","Collecting nvidia-cusparse-cu12==12.1.0.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\n","Collecting nvidia-nccl-cu12==2.20.5 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl (176.2 MB)\n","Collecting nvidia-nvtx-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\n","Requirement already satisfied: triton==2.3.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2.3.0)\n","Collecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch>=1.10.0->accelerate)\n"," Downloading nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_x86_64.whl (21.3 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.3/21.3 MB\u001b[0m \u001b[31m19.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (2.31.0)\n","Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (4.66.4)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.10.0->accelerate) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2024.6.2)\n","Requirement already satisfied: mpmath<1.4.0,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.10.0->accelerate) (1.3.0)\n","Installing collected packages: nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, accelerate\n","Successfully installed accelerate-0.31.0 nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.20.5 nvidia-nvjitlink-cu12-12.5.40 nvidia-nvtx-cu12-12.1.105\n"]}],"source":["!pip install accelerate -U"]},{"cell_type":"code","execution_count":21,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"h6UGccgsXO3S","outputId":"507caa06-0a84-476f-dd0e-35cba49d1fec","executionInfo":{"status":"ok","timestamp":1717906332515,"user_tz":-240,"elapsed":434,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/transformers/training_args.py:1474: FutureWarning: `evaluation_strategy` is deprecated and will be removed in version 4.46 of 🤗 Transformers. Use `eval_strategy` instead\n"," warnings.warn(\n"]}],"source":["# enable fp16 apex training\n","training_args = Seq2SeqTrainingArguments(\n"," predict_with_generate=True,\n"," evaluation_strategy=\"steps\",\n"," per_device_train_batch_size=batch_size,\n"," per_device_eval_batch_size=batch_size,\n"," fp16=True,\n"," output_dir=\"./\",\n"," logging_steps=5,\n"," eval_steps=10,\n"," save_steps=10,\n"," save_total_limit=2,\n"," gradient_accumulation_steps=4,\n"," num_train_epochs=1,\n",")"]},{"cell_type":"code","execution_count":22,"metadata":{"id":"SM9e_n8xY6KE","executionInfo":{"status":"ok","timestamp":1717906340298,"user_tz":-240,"elapsed":447,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["trainer = Seq2SeqTrainer(\n"," model=led,\n"," tokenizer=tokenizer,\n"," args=training_args,\n"," compute_metrics=compute_metrics,\n"," train_dataset=train_dataset,\n"," eval_dataset=eval_dataset,\n",")"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":511},"id":"kLjtjhv4ZNCP","outputId":"64a5388a-f11d-4569-bc22-ba2da965e95c"},"outputs":[{"output_type":"display_data","data":{"text/plain":[""],"text/html":["\n","
\n"," \n"," \n"," [ 71/100 1:20:42 < 33:55, 0.01 it/s, Epoch 0.69/1]\n","
\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
StepTraining LossValidation LossRouge2 PrecisionRouge2 RecallRouge2 Fmeasure
102.7749002.9761920.1543000.1016000.117500
202.8651002.9165930.1377000.1177000.121300
303.0807002.8426500.1403000.0932000.106800
402.9918002.8240900.1552000.1171000.127500
503.0656002.7819960.1455000.1044000.115800
602.9960002.7739950.1481000.1151000.123900

\n","

\n"," \n"," \n"," [30/51 05:44 < 04:09, 0.08 it/s]\n","
\n"," "]},"metadata":{}},{"output_type":"stream","name":"stderr","text":["Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n"]}],"source":["trainer.train()"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":734},"id":"7q8GUp1cQDiW","outputId":"5487c4ad-3b87-4a53-8f29-47a91d530f2f","executionInfo":{"status":"ok","timestamp":1717559792279,"user_tz":-240,"elapsed":1219,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAdMAAADvCAYAAACpMT7PAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABI1klEQVR4nO3dd1iT1x4H8G9YYSMOliAiooKKihN3FXcVpQ7UVq231Vq8altr1ToQN2pr1Yqj1tG697guHIgo4t6I1oUDcDIEgUDO/eM0wcjM4k3g93mePCRv3nFySPLL2SLGGAMhhBBCVGYgdAIIIYQQfUfBlBBCCFETBVNCCCFETRRMCSGEEDVRMCWEEELURMGUEEIIURMFU0IIIURNFEwJIYQQNVEwJYQQQtREwZQQLRk2bBiqV68udDIIIaWAgikpd0QiUYluERERQidVQUREBEQiEXbs2CF0UgQXFRWFbt26oWrVqjA1NUW1atXQs2dPbNq0Sb5PRkYGgoODde7/SMomI6ETQEhp++uvvxQeb9iwAeHh4fm2e3p6qnWd1atXQyqVqnUOkt/27dsxYMAANGzYEGPHjoWtrS0ePnyIyMhIrF69GoMGDQLAg+mMGTMAAO3btxcwxaQ8oGBKyp3PP/9c4fG5c+cQHh6eb/vHMjIyYG5uXuLrGBsbq5Q+UrTg4GB4eXnh3LlzMDExUXjuxYsXAqWKlHdUzUtIAdq3b4969erh0qVLaNu2LczNzTF58mQAwN69e9GjRw84OTlBLBbD3d0dM2fORG5ursI5Pm4zffToEUQiERYuXIhVq1bB3d0dYrEYTZs2xYULFzSW9gcPHqBfv36oWLEizM3N0aJFC/zvf//Lt9/SpUtRt25dmJubw9bWFk2aNFGoJk1LS8O4ceNQvXp1iMVi2NnZoVOnTrh8+XKh196xYwdEIhFOnTqV77mVK1dCJBLh5s2bAIDExER8+eWXcHZ2hlgshqOjI/z9/fHo0aMiX9/9+/fRtGnTfIEUAOzs7ADwvK5SpQoAYMaMGfKq++DgYPm+d+7cQd++fVGxYkWYmpqiSZMm2Ldvn8L51q1bB5FIhMjISIwcORKVKlWCtbU1hgwZgrdv3xaZTlK+UMmUkEK8fv0a3bp1Q2BgID7//HPY29sD4F+wlpaW+P7772FpaYkTJ05g2rRpSE1NxYIFC4o976ZNm5CWloaRI0dCJBIhNDQUAQEBePDggdql2aSkJLRs2RIZGRkYM2YMKlWqhPXr16NXr17YsWMH+vTpA4BXQY8ZMwZ9+/bF2LFjkZmZievXryMmJkZeTfrNN99gx44dGD16NLy8vPD69WtERUUhNjYWPj4+BV6/R48esLS0xLZt29CuXTuF57Zu3Yq6deuiXr16AIDPPvsMt27dwn//+19Ur14dL168QHh4OOLj44vsuOXq6orjx4/j6dOncHZ2LnCfKlWqICwsDKNGjUKfPn0QEBAAAPD29gYA3Lp1C61atULVqlUxceJEWFhYYNu2bejduzd27twpzyeZ0aNHo0KFCggODkZcXBzCwsLw+PFjeTs2IWCElHNBQUHs449Cu3btGAC2YsWKfPtnZGTk2zZy5Ehmbm7OMjMz5duGDh3KXF1d5Y8fPnzIALBKlSqxN2/eyLfv3buXAWD79+8vMp0nT55kANj27dsL3WfcuHEMADt9+rR8W1paGnNzc2PVq1dnubm5jDHG/P39Wd26dYu8no2NDQsKCipyn4IMHDiQ2dnZsZycHPm2hIQEZmBgwEJCQhhjjL19+5YBYAsWLFD6/GvWrGEAmImJCfvkk0/Y1KlT2enTp+WvTebly5cMAJs+fXq+c3Ts2JHVr19f4f8llUpZy5YtmYeHh3zb2rVrGQDWuHFjlp2dLd8eGhrKALC9e/cqnX5SNlE1LyGFEIvF+PLLL/NtNzMzk99PS0vDq1ev0KZNG2RkZODOnTvFnnfAgAGwtbWVP27Tpg0AXj2rroMHD6JZs2Zo3bq1fJulpSVGjBiBR48e4fbt2wCAChUq4OnTp0VWL1eoUAExMTF4/vy5UmkYMGAAXrx4odCLdseOHZBKpRgwYAAAnocmJiaIiIhQurp0+PDhOHz4MNq3b4+oqCjMnDkTbdq0gYeHB86ePVvs8W/evMGJEyfQv39/+f/v1atXeP36Nbp06YJ79+7h2bNnCseMGDFCodZg1KhRMDIywsGDB5VKOym7KJgSUoiqVasW2C5369Yt9OnTBzY2NrC2tkaVKlXknZdSUlKKPW+1atUUHssCqyba4B4/fozatWvn2y7rmfz48WMAwE8//QRLS0s0a9YMHh4eCAoKwpkzZxSOCQ0Nxc2bN+Hi4oJmzZohODi4RAG/a9eusLGxwdatW+Xbtm7dioYNG6JWrVoA+A+V+fPn49ChQ7C3t0fbtm0RGhqKxMTEEr3OLl264MiRI0hOTkZkZCSCgoLw+PFjfPrpp8V2Qvrnn3/AGMPUqVNRpUoVhdv06dMB5O/I5OHhofDY0tISjo6OxbbvkvKDgikhhfiwBCqTnJyMdu3a4dq1awgJCcH+/fsRHh6O+fPnA0CJhsIYGhoWuJ0xpl6CleDp6Ym4uDhs2bIFrVu3xs6dO9G6dWt5MAGA/v3748GDB1i6dCmcnJywYMEC1K1bF4cOHSry3GKxGL1798bu3buRk5ODZ8+e4cyZM/JSqcy4ceNw9+5dzJ07F6amppg6dSo8PT1x5cqVEr8Oc3NztGnTBsuWLcOUKVPw9u3bYtMn+x+NHz8e4eHhBd5q1qxZ4jQQAlAHJEKUEhERgdevX2PXrl1o27atfPvDhw8FTFUeV1dXxMXF5dsuq352dXWVb7OwsMCAAQMwYMAAZGdnIyAgALNnz8akSZNgamoKAHB0dMS3336Lb7/9Fi9evICPjw9mz56Nbt26FZmOAQMGYP369Th+/DhiY2PBGMsXTAHA3d0dP/zwA3744Qfcu3cPDRs2xKJFi/D3338r/dqbNGkCAEhISACAQjsG1ahRAwAfuuTn51eic9+7dw+ffPKJ/PG7d++QkJCA7t27K51OUjZRyZQQJchKlR+WIrOzs7F8+XKhkqSge/fuOH/+PKKjo+Xb0tPTsWrVKlSvXh1eXl4AeE/lD5mYmMDLywuMMUgkEuTm5uarsrazs4OTkxOysrKKTYefnx8qVqyIrVu3YuvWrWjWrBnc3Nzkz2dkZCAzM1PhGHd3d1hZWRV7/uPHjxe4XdZ+Kavmlo0JTk5Ozvc62rdvj5UrV8oD74devnyZb9uqVasgkUjkj8PCwpCTk1PsjwpSflDJlBAltGzZEra2thg6dCjGjBkDkUiEv/76q1SraHfu3FlgR6ehQ4di4sSJ2Lx5M7p164YxY8agYsWKWL9+PR4+fIidO3fCwID/fu7cuTMcHBzQqlUr2NvbIzY2FsuWLUOPHj1gZWWF5ORkODs7o2/fvmjQoAEsLS1x7NgxXLhwAYsWLSo2jcbGxggICMCWLVuQnp6OhQsXKjx/9+5ddOzYEf3794eXlxeMjIywe/duJCUlITAwsMhz+/v7w83NDT179oS7uzvS09Nx7Ngx7N+/H02bNkXPnj0B8Gp6Ly8vbN26FbVq1ULFihVRr1491KtXD7///jtat26N+vXr4+uvv0aNGjWQlJSE6OhoPH36FNeuXVO4ZnZ2tjy9cXFxWL58OVq3bo1evXoVmxeknBCyKzEhuqCwoTGFDR05c+YMa9GiBTMzM2NOTk5swoQJ7MiRIwwAO3nypHy/wobGFDQcBIUM4fiQbGhMYTfZcJj79++zvn37sgoVKjBTU1PWrFkzduDAAYVzrVy5krVt25ZVqlSJicVi5u7uzn788UeWkpLCGGMsKyuL/fjjj6xBgwbMysqKWVhYsAYNGrDly5cXmcYPhYeHMwBMJBKxJ0+eKDz36tUrFhQUxOrUqcMsLCyYjY0Na968Odu2bVux5928eTMLDAxk7u7uzMzMjJmamjIvLy/2888/s9TUVIV9z549yxo3bsxMTEzy5fH9+/fZkCFDmIODAzM2NmZVq1Zln376KduxY4d8H9nQmFOnTrERI0YwW1tbZmlpyQYPHsxev35d4rwgZZ+IsVL8SU0IIXpk3bp1+PLLL3HhwgV5mywhBaE2U0IIIURNFEwJIYQQNVEwJYQQQtREbaaEEEKImqhkSgghhKiJgikhhBCiJpq0oQBSqRTPnz+HlZUVrVVICCHlFGMMaWlpcHJykk94UhgKpgV4/vw5XFxchE4GIYQQHfDkyZNCF6KXoWBaACsrKwA8A62trQVOje6TSCQ4evQoOnfurLDmIyke5Z1qKN9UQ/mmnNTUVLi4uMhjQlEomBZAVrVrbW1NwbQEJBIJzM3NYW1tTR9QJVHeqYbyTTWUb6opSXMfdUAihBBC1ETBlBBCCFETBVMtSUwENm0CbtwQOiWEEEK0jdpMtWTyZGDtWuCnn4B584RODSFEUxhjyMnJQW5urtBJUZpEIoGRkREyMzP1Mv3aYGxsDENDQ7XPQ8FUSz75hAfTEyeETgkhRFOys7ORkJCAjIwMoZOiEsYYHBwc8OTJExpD/y+RSARnZ2dYWlqqdR4KplryySf876VLQEoKYGMjbHoIIeqRSqV4+PAhDA0N4eTkBBMTE70LSFKpFO/evYOlpWWxkxCUB4wxvHz5Ek+fPoWHh4daJVQKplri7Ax4eAD37gGRkUDPnkKniBCijuzsbEilUri4uMDc3Fzo5KhEKpUiOzsbpqamFEz/VaVKFTx69AgSiUStYEq5qUUdOvC/J08Kmw5CiOZQECpbNFW7QO8KLZJV9VK7KSGElG0UTLWofXv+99o14PVrQZNCCCFEiyiYapG9PVC3Lr8fESFoUgghRKOqV6+OxYsXC50MnUHBVMuo3ZQQIiSRSCS/GRoawtbWFoaGhvJtwcHBKp33woULGDFihFppa9++PcaNG6fWOXQF9ebVsk8+AZYupXZTQogwEhIS5Pe3bNmCadOm4c6dO/KOVB+Or2SMITc3F0ZGxYeGKlWqaD6xeoxKplrWrh0gEgGxsXyKQUJI2cEYkJ4uzI2xkqXRwcFBfrO2toZIJJI/vnPnDqysrHDo0CE0btwYYrEYUVFRuH//Pvz9/WFvbw9LS0s0bdoUx44dUzjvx9W8IpEIf/zxB/r06QNzc3N4eHhg3759auXvzp07UbduXYjFYlSvXh2LFi1SeH758uXw8PCAqakp7O3t0bdvX/lzO3bsQP369WFmZoZKlSrBz88P6enpaqWnKBRMtaxiRaBhQ36fqnoJKVsyMgBLS2FumpyEaeLEiZg3bx5iY2Ph7e2Nd+/eoXv37jh+/DiuXLmCrl27omfPnoiPjy/yPDNmzED//v1x/fp1dO/eHYMHD8abN29UStOlS5fQv39/BAYG4saNGwgODsbUqVOxbt06AMDFixcxZswYhISEIC4uDocPH0bbtm0B8NL4wIEDMXz4cMTGxiIiIgIBAQFgJf0FogKq5i0FHToAV67wYDpwoNCpIYQQRSEhIejUqZP8ccWKFdGgQQP545kzZ2L37t3Yt28fRo8eXeh5hg0bhoH/fsnNmTMHS5Yswfnz59G1a1el0/TLL7+gY8eOmDp1KgCgVq1auH37NhYsWIBhw4YhPj4eFhYW+PTTT2FlZQVXV1c0atQIAA+mOTk5CAgIgKurKwCgfv36SqdBGVQyLQU03pSQssncHHj3TpibJidhatKkicLjd+/eYfz48fD09ESFChVgaWmJ2NjYYkum3t7e8vsWFhawtrbGixcvVEpTbGwsWrVqpbCtVatWuHfvHnJzc9GpUye4urqiRo0a+OKLL7Bx40b5nMkNGjRAx44dUb9+ffTr1w+rV6/G27dvVUpHSVEwLQVt2gCGhsD9+0Ax70VCiB4RiQALC2FumpwW2MLCQuHx+PHjsXv3bsyZMwenT5/G1atXUb9+fWRnZxd5HmNj44/yRwSpVKq5hH7AysoKly9fxubNm+Ho6Ihp06ahQYMGSE5OhqGhIcLDw3Ho0CF4eXlh6dKlqF27Nh4+fKiVtAAUTEuFtTUg++FH7aaEEF135swZDBs2DH369EH9+vXh4OCAR48elWoaPD09cebMmXzpqlWrlnwOXSMjI/j5+SE0NBTXr1/Ho0ePcOLfKkCRSIRWrVphxowZuHLlCkxMTLB7926tpVfQYBoWFgZvb29YW1vD2toavr6+OHToUKH7r169Gm3atIGtrS1sbW3h5+eH8+fPK+wzbNgwhXFVIpFIpfp6TaPxpoQQfeHh4YFdu3bh6tWruHbtGgYNGqS1EubLly9x9epVhVtSUhJ++OEHHD9+HDNnzsTdu3exfv16LFu2DOPHjwcAHDhwAEuWLMHVq1fx+PFjbNiwAVKpFLVr10ZMTAzmzJmDixcvIj4+Hrt27cLLly/h6empldcACBxMnZ2dMW/ePFy6dAkXL15Ehw4d4O/vj1u3bhW4f0REBAYOHIiTJ08iOjoaLi4u6Ny5M549e6awX9euXZGQkCC/bd68uTReTpE+bDfVYocyQghR2y+//AJbW1u0bNkSPXv2RJcuXeDj46OVa23atAmNGjVSuK1evRo+Pj7Ytm0btmzZgnr16mHatGkICQnBsGHDAAAVKlTArl270KFDB3h6emLFihXYvHkz6tatC2tra0RGRqJ79+6oVasWpkyZgkWLFqFbt25aeQ0AIGLa7CusgooVK2LBggX4z3/+U+y+ubm5sLW1xbJlyzBkyBAAvGSanJyMPXv2qJyG1NRU2NjYICUlBdbW1iqf50MZGUCFCoBEAvzzD+DurpHT6gSJRIKDBw+ie/fu+dpMSNEo71QjRL5lZmbi4cOHcHNzg6mpaalcU9OkUilSU1NhbW1Nq9/8q6j/qzKxQGeGxuTm5mL79u1IT0+Hr69viY7JyMiARCJBxYoVFbZHRETAzs4Otra26NChA2bNmoVKlSoVep6srCxkZWXJH6empgLgH1iJRKLCq8nP2Bho3twQUVEGCA/PQbVqOvUbRi2yPNJUXpUnlHeqESLfJBIJGGOQSqVaq/LUNlnZSfY6CP+BwRgrcD1TZd5fggfTGzduwNfXF5mZmbC0tMTu3bvh5eVVomN/+uknODk5wc/PT76ta9euCAgIgJubG+7fv4/JkyejW7duiI6OLnTh17lz52LGjBn5th89elSjiwBXrVobQB1s2pQIR8dLGjuvrggPDxc6CXqL8k41pZlvRkZGcHBwwLt374rt1arr0tLShE6CzsjOzsb79+8RGRmJnJwchecylJgZQ/Bq3uzsbMTHxyMlJQU7duzAH3/8gVOnThUbUOfNm4fQ0FBEREQojG362IMHD+Du7o5jx46hY8eOBe5TUMnUxcUFr1690lg1LwBERorg52cEe3uG+PgcjXZtF5JEIkF4eDg6depEVZVKorxTjRD5lpmZiSdPnqB69ep6W83LGENaWhqsrKw0tii2vsvMzMSjR4/g4uJSYDVv5cqV9aOa18TEBDVr1gQANG7cGBcuXMBvv/2GlStXFnrMwoULMW/ePBw7dqzIQAoANWrUQOXKlfHPP/8UGkzFYjHEYnG+7cbGxhr9oLZuDZiaAklJIty/bwwtdiwThKbzqzyhvFNNaeZbbm4uRCIRDAwM9La9UVa1K3sdBDAwMIBIJCrwvaTMe0vnclMqlSqUEj8WGhqKmTNn4vDhw/lm7SjI06dP8fr1azg6OmoymSoRiwHZhB40GxIh+knH+mwSNWnq/yloMJ00aRIiIyPx6NEj3LhxA5MmTUJERAQGDx4MABgyZAgmTZok33/+/PmYOnUq/vzzT1SvXh2JiYlITEzEu3fvAPApsH788UecO3cOjx49wvHjx+Hv74+aNWuiS5cugrzGj9F4U0L0k6yUokw7GtF9svbvwvrUlJSg1bwvXrzAkCFDkJCQABsbG3h7e+PIkSPyCZfj4+MVqiLCwsKQnZ2tsMwOAEyfPh3BwcEwNDTE9evXsX79eiQnJ8PJyQmdO3fGzJkzC6zGFYJsvOnJk4BUClBNCyH6wdDQEBUqVJDPNWtubq537Y5SqRTZ2dnIzMykal7w/Hj58iXMzc1LtIZrUQQNpmvWrCny+YiICIXHxU1nZWZmhiNHjqiZKu1q0oQvn/TmDXD9et7ybIQQ3efg4AAAKk/eLjTGGN6/fw8zMzO9+yGgLQYGBqhWrZra+SF4B6TyxtiYT3x/6BAvnVIwJUR/iEQiODo6ws7OTi/HBkskEkRGRqJt27bU4e1fJiYmGimlUzAVQIcOPJieOAF8953QqSGEKMvQ0FDtNjYhGBoaIicnB6amphRMNYwqzQUgazeNjAQ+GiNMCCFED1EwFUDDhnye3tRU4PJloVNDCCFEXRRMBWBoCLRrx+/TEBlCCNF/FEwFIhtvSpM3EEKI/qNgKhBZu2lUFKDnc2YTQki5R8FUIHXrAlWq8HVOz58XOjWEEELUQcFUIAYGQPv2/D61mxJCiH6jYCogajclhJCygYKpgGTtptHRwPv3wqaFEEKI6iiYCqhWLcDJCcjK4gGVEEKIfqJgKiCRSHEVGUIIIfqJgqnAqN2UEEL0HwVTgclKpufPA/+ucU4IIUTPUDAVmJsbUL06n/A+Kkro1BBCCFEFBVMdQO2mhBCi3yiY6gBqNyWEEP1GwVQHyEqmly8DycmCJoUQQogKKJjqgKpV+ZhTqZQvGE4IIUS/UDDVEdRuSggh+ouCqY6gdlNCCNFfFEx1hGwFmevXgVevBE0KIYQQJVEw1RF2dkC9evx+RISgSSGEEKIkCqY6hNpNCSFEP1Ew1SHUbkoIIfpJ0GAaFhYGb29vWFtbw9raGr6+vjh06FCRx2zfvh116tSBqakp6tevj4MHDyo8zxjDtGnT4OjoCDMzM/j5+eHevXvafBka064dX0nmzh0gIUHo1BBCCCkpQYOps7Mz5s2bh0uXLuHixYvo0KED/P39cevWrQL3P3v2LAYOHIj//Oc/uHLlCnr37o3evXvj5s2b8n1CQ0OxZMkSrFixAjExMbCwsECXLl2QmZlZWi9LZba2QKNG/D5V9RJCiP4wEvLiPXv2VHg8e/ZshIWF4dy5c6hbt26+/X/77Td07doVP/74IwBg5syZCA8Px7Jly7BixQowxrB48WJMmTIF/v7+AIANGzbA3t4ee/bsQWBgYIHpyMrKQlZWlvxxamoqAEAikUAikWjktZZUu3YGuHzZEMePS9GvX26pXltVsjwq7bwqCyjvVEP5phrKN+Uok0+CBtMP5ebmYvv27UhPT4evr2+B+0RHR+P7779X2NalSxfs2bMHAPDw4UMkJibCz89P/ryNjQ2aN2+O6OjoQoPp3LlzMWPGjHzbjx49CnNzcxVfkWosLOwA+OJ//3uPgwePleq11RUeHi50EvQW5Z1qKN9UQ/lWMhkZGSXeV/BgeuPGDfj6+iIzMxOWlpbYvXs3vLy8Ctw3MTER9vb2Ctvs7e2RmJgof162rbB9CjJp0iSFIJ2amgoXFxd07twZ1tbWKr0uVbVpA8ydy5CUZIG6dbvD1bVUL68SiUSC8PBwdOrUCcbGxkInR69Q3qmG8k01lG/KkdVSloTgwbR27dq4evUqUlJSsGPHDgwdOhSnTp0qNKBqg1gshlgszrfd2Ni41N9wFSsCTZsC584BUVHGqFmzVC+vFiHyq6ygvFMN5ZtqKN9KRpk8UqkD0pMnT/D06VP54/Pnz2PcuHFYtWqV0ucyMTFBzZo10bhxY8ydOxcNGjTAb7/9VuC+Dg4OSEpKUtiWlJQEBwcH+fOybYXtow9ovKlmPHgALFgAjBwJvHwpdGoIIWWZSsF00KBBOPnvN31iYiI6deqE8+fP4+eff0ZISIhaCZJKpQqdgT7k6+uL48ePK2wLDw+Xt7G6ubnBwcFBYZ/U1FTExMQU2g6riz4cb8qYsGnRN/fuAXPnAo0bA+7uwIQJwKpV/C8hhGiLSsH05s2baNasGQBg27ZtqFevHs6ePYuNGzdi3bp1JT7PpEmTEBkZiUePHuHGjRuYNGkSIiIiMHjwYADAkCFDMGnSJPn+Y8eOxeHDh7Fo0SLcuXMHwcHBuHjxIkaPHg0AEIlEGDduHGbNmoV9+/bhxo0bGDJkCJycnNC7d29VXqogWrYETEyAp0+B+/eFTo3ui40FZs4EGjTgS9lNnszXhjUwAFq35vts2ADExQmbTkJI2aVSm6lEIpG3MR47dgy9evUCANSpUwcJSsw28OLFCwwZMgQJCQmwsbGBt7c3jhw5gk6dOgEA4uPjYWCQF+9btmyJTZs2YcqUKZg8eTI8PDywZ88e1JNNagtgwoQJSE9Px4gRI5CcnIzWrVvj8OHDMDU1VeWlCsLcHGjRgq9teuIE9KrdtDQwBty8CezYwW+3b+c9Z2QEdOwI9O0L+PsDVaoAvXoB+/cD06cDW7YIl25CSBnGVNCsWTP2008/scjISGZqasquXr3KGGMsOjqaVa1aVZVT6pSUlBQGgKWkpAiWhuBgxgDGAgMFS0KJZWdnsz179rDs7GytXUMqZezyZcYmT2asVi2eN7KbsTFjPXowtnYtY69f5z/2ypW8fa9d01oSVVIaeVcWUb6phvJNOcrEApWqeefPn4+VK1eiffv2GDhwIBo0aAAA2Ldvn7z6l6jnw05I5bXdlDHg/Hne3lmzJuDjA8yZA9y9C4jFvOT511+8c9GBA8CwYbw39McaNgT69+f3p04tzVdACCkvVKrmbd++PV69eoXU1FTY2trKt48YMaLUJzkoq5o3B8zMgKQk3iZYiiOFBJWby4cF7dzJb/Hxec+ZmQHdu/Mq3B49ACurkp93xgxeJbxvHw/Q9JuPEKJJKpVM379/j6ysLHkgffz4MRYvXoy4uDjY2dlpNIHllVgMtGrF75f1VWTevQN27eIlS0dH3mno1195ILWwAAYMALZv5yXQHTuAwEDlAikA1KkDfPEFv0+lU0KIpqkUTP39/bFhwwYAQHJyMpo3b45Fixahd+/eCAsL02gCyzPZEJmyON40Ph74/Xega1egUiXgs8+A9et5wLSxAQYNAnbv5o+3bOGlUQsL9a45bRrvoHT0KO/cRYimvX7Nfwz2719+m2fKK5WC6eXLl9GmTRsAwI4dO2Bvb4/Hjx9jw4YNWLJkiUYTWJ7J2k0jIgCpVNCkqE0q5dWrU6fyNkxXV2D0aODIESA7m48JHTeOl8JfvgQ2bgR69+ZVu5pSowbw1Vf8/pQp9GVHNEsiAfr1A86c4TUpHw2JJ2WcSm2mGRkZsPq3nu3o0aMICAiAgYEBWrRogcePH2s0geVZkya8OvPNG+D6dR6E9ElGBnDsGG+n/N//gA+nRzYw4ONpe/bktzp1+Fqu2vbzz8DatcDp07yE2qWL9q9ZGKmUj4fNzS2FF060buxYxVqkRYuAD9bcIGWcSiXTmjVrYs+ePXjy5AmOHDmCzp07A+DjRkt7YviyzMiIT3wP6E+76bNnwMqVwKef8upbf39gzRoeSK2seHXthg28Y9Xp07ynrqdn6QRSAHB2Br79lt8XunQ6ZgzQooUxdu70EC4ReurdO92qWVi+HAgL4+/jxYv5j8XDh/l4aFJOqDL2Zvv27czY2JgZGBgwPz8/+fY5c+awrl27qnJKnaIL40xlFi7k4yM//VTolBTu7t1sFhgYy3x8chXGfwKMVa/O2H//y9jRo4xlZQmdUi4piTELC56+3buFScPevXl5VKVKOsvKonF/JRUVxZiVlZTVqvWaJSUJn2/HjjFmaMj/l/Pn822ffcYfDxsmbNo+RuNMlaNMLFApmDLGWEJCArt8+TLLzc2Vb4uJiWGxsbGqnlJn6FIwvXSJfyitrBiTSIROjaLsbMbmzWPM1FQqDwwiEWMtWjA2ezZj16/zyRZ00eTJPL316jH2wVu4VDx/zljlyoo/Ok6d0rF/ro56+ZKxqlXz8q1x41z29q1w6bl7lzFbW56WL77Ie79HR+dNKPL8uXDp+xgFU+WUSjCVefLkCXvy5Im6p9EpuhRMc3LyPqwxMUKnJk9MDGPe3nlfanXrvmSrV0tYYqLQKSuZN28Ys7Hhad+0qfSum5vLWKdO/LoNGzL22We8NP/ttzmllwg9lZvLWPfuPO/c3aXM2jqTAYw1a8ZYcnLpp+ftW8Zq1+bpadGCsffvFZ9v2ZI/N2lS6aetMBRMlaP1GZCkUilCQkJgY2MDV1dXuLq6okKFCpg5cyak+t7tVMcYGgLt2vH7utBumpbGe922aME7RVWqBKxZk4NZs85g6FCGj9Zl11m2tsD48fz+9OlATk7pXHfxYiA8nPdS3rQJGDqUf162bzcotTToq4ULgYMHAVNTYOvWHISEnEXFigznz/MhVkqs46y2nBw+3jkujrfD797N0/Uh2ftrxQrexkvKOFWi9cSJE1mVKlXY8uXL2bVr19i1a9fY77//zqpUqcImT56syil1ii6VTBljbMkS/gu3c2dh07FvH2MuLnml0S++YOzFC/39tZuamlfdumaN9q935QpjJib8emFhfFt6era8hHX4sPbToK+iovLaJVetynvPnT+fLa+5admS/09Lw3ff8Wuam/M5owuSk8NYzZp8vyVLSiddxdHXz6pQtF7N6+joyPbu3Ztv+549e5iTk5Mqp9QpuhZMb9zgH0gzM8Y2bmQsM7N0r//8OWP9+uUFUTc3xo4cyXtenz+gsg5e1appN1/T0xnz9OTX8vfPa1vLzs5mXbs+YABjQ4dq7/r67OVLxpyded4NGsTz7sP33KVLjFWowJ9v04axtDTtpuePP/I+C9u3F73v77/nfWZydKAmX58/q0LQejXvmzdvUKdOnXzb69Spgzdv3qhVUib51a0LuLkB798DgwcDVavyKqS7d7V7XamUL6zt6ckHoRsa8qEsN28C/46G0nvffsunMIyPB/74Q3vXGT+ez7Hs6Miv8+FQoLZtnwLgUyq+f6+9NOgjqRQYOpSv7VurFq8y/XgYlY8Przq3seHDrT79FEhP1056Tp8GRo3i94OD+VCvogwbxptCHj7kVcGk7FIpmDZo0ADLli3Lt33ZsmXw9vZWO1FEkUgEnD3LP7zOznzKskWLgNq1+ZSDW7fyWYQ0KTaWt9WOHAmkpPAJJC5eBObP5+utlhVmZny8KQDMmsUnmtC0ffv4GESAT5lYubLi83XqvEG1agxpaXxyC5Jn0aK8dtLt2wufk7lJEz6blrU1cOoUX8NW0//LR4+AgIC8mY5KMsezuXneuOaFC3VrbGxZV+rlOlWKvhEREczCwoJ5enqy4cOHs+HDhzNPT09maWnJIiMjVTmlTtG1at4PSSS87bJHDz4MJW+sImM//sjYvXvqnT8zk6+lKmvbs7BgbPHioquo9L3qKCuLMVdX/noXLNDsuT8cBvP99/mfl+XdDz/kMICxgADNXl+fnTmT1066cqXic4W9586eZczSkh/j58dYRoZm0pKaylj9+vy8Pj682r6kEhMZE4v5sVFRmkmPqvT9s1pSiYmMOToyNmaMes03Wq/mbdeuHe7evYs+ffogOTkZycnJCAgIwK1bt/DXX39pNtoTBUZGfPq9Awf4L+WpUwEnJz6f7YIFgIcHn8Js+3blS6unT/MpC4OD+bE9egC3b/Np0gwNtfBidISJCe/RCwDz5vEey5ogq6J89Yrn65w5he8bGMh79f7vf0Bysmaur89ev+arBeXmAgMHAl9/XbLjfH35zEMWFnwqyz59gMxM9dIilQKffw7cuAE4OAB79ypXO2Nvn7di0cKF6qWFFC83l/+/EhL4/Mi5uaV0YdVjdn5Xr15lBgYGmjylIHS5ZFoQiYTP5NOtm2Jp1c6OsYkTGbt/v+jj375lbMSIvOPs7RnburXkEy6UhV+7EgljtWrx1x8SoplzLlqU13Hs9u2C95HlXVZWNvPy4vv/+admrq+vcnN5zQvAmIdHwT10i3vPnTrFe9oCfGyqOqWTSZP4ecRixs6dU+0ct2/nTWpy967qaVFXWfisFickJK+ndWGfu5LSesmU6BYjI77CysGDwIMHfDJ3BwfgxQte0nJ35xO679zJ23tkGOMlWE9P3tEI4CWA2Fi+hFRpzZerC4yM+ALiAC89qNvecvUqMGkSv//LLzyPiyIS8RIYAGzerN619d2iRbyELhYX3U5alLZt+TnMzPjnom9fICtL+fNs3AjMncvv//EH0Ly58ucA+P+/Rw/+mfv1V9XOQYp38iSvWQN4P4XiPncapV7cVkQlU92Rnc3Yzp18bOqH09Y5OPCp9M6e5fP9yrbXrs1/zat2rbLxazc3N69dTJ1Zaz4cBtOrV9El/A/z7p9/+DEGBowlJKh+fX1WVDvph0r6njt2jDFT07whScrMD33uXF5b58SJJT+uMCdP8nOZmvLhPkIoK5/VgiQk8Fo1gLHhwzVzTiqZEhgb856HR44A9+8DEycCdnZ89ZY5c/jyZwcO8P2mTeMlqbZthU61sAwMgJkz+f3ffuMle1XIhsE4OOQfBlMUd3de8pFKgW3bVLu2Pnv9ms8qlJvL/5a0nbQoHTvy3tRiMW/rDAxUrJ0pzNOnvLYnK4v3UZg9W/20tGvHh/FkZub17iaakZvLhw0mJQH16gFLl5Z+GpRazzQgIKDI55Op54ROqlGDV1XNmMG/WFau5J0zWrXi1bteXkKnUHf06gU0bQpcuMDzTNkquQ+HwWzYAFSpotzxAwcCMTG8qnfMGOWO1WeyzlpPnvBOdCtXaq6ZoVMnHkh79eJjPQcN4lM5GhsXvH9GBg+kiYn8i3njRv5DS10iEf+hNWgQsGwZ8OOP+acgJKqZNYtPt2phwZsGhBi+p9RbxMbGpsibq6srhgwZoq20EjWZmPC2o/BwPnb09GkKpB8TifgHE+BB8enTkh+bkAD85z/8/vff8y9xZfXvz7+4z53j7d/lxS+/5LWTbtvGx4tqUpcuPJCamAA7dvDetQXNhcwY8OWXwKVLfDzwvn2qtdkWpm9fwMWF13r8/bfmzluenTiR199hxQqggPmESodmapbLlrLQZlqaylo7jFTKWNu2vO1l5MiSHZObm9c+3bBhyXuPFpR3fn78PLNmqZB4PXT2bF476YoVJTtG1ffc/v18WTTZ1IQfj5+W9QQ1MlK9D0FxZL2869Qp/eX/ytpn9cN20q++0vz5qc2UEDV8WDpds6ZkJcTffgOOHs1bDUYsVv36sl69mzaV/Rlz3rzJG08aGAiMGKHd6336Ka8GNDLi+fvll3njEHfu5P0HAF4roa0+BF99xUved+7wnsZENbm5vMo8KQmoXx9YskTY9AgaTOfOnYumTZvCysoKdnZ26N27N+Li4oo8pn379hCJRPluPXr0kO8zbNiwfM937dpV2y+HlCFt2vCqwZycvCqkwly9yjt4ASUbBlOcgABeHXn7Np8ooKxiLK+dtGZNzbaTFsXfn0/BaWgI/PUXD26XLgGyFqqxY/k2bbG2zvvRsGiR9q5T1s2cyYfCWFjwpgEzM2HTI2gwPXXqFIKCgnDu3DmEh4dDIpGgc+fOSC9ilupdu3YhISFBfrt58yYMDQ3Rr18/hf26du2qsN/m8j54jyhN1rP3779579yCZGTwX8fZ2byDy8iR6l+3QgU+JhHgpaey6pdfeI9ybbWTFiUggHfyMjQE1q3jvdszMvgCDqUxS9GYMbx0HBHBAzlRzrFjQEgIv79ypYDtpB8QNJgePnwYw4YNQ926ddGgQQOsW7cO8fHxuFTEu6tixYpwcHCQ38LDw2Fubp4vmIrFYoX9bG1ttf1ySBnTtCnv1SmV5k03+DFVh8EUR1bVu2ULv35ZEx2dV5pfvBho1Kj009CvX15P3exsvirN1q08yGmbiwuv3gaodKqshAQ+DIYxPnxq8GChU8SVwtum5FJSUgDwgFlSa9asQWBgICwsLBS2R0REwM7ODra2tujQoQNmzZqFSpUqFXiOrKwsZH0wPUpqaioAQCKRQFKSQWnlnCyPymJeTZ0K7N1rhO3bRbhwQYKGDfOe279fhLAw/hH6888cVKjASjSG8UOF5V2XLoClpREePxbh9OkctGxZdhpP37wBAgONkJMjQr9+UgwfnquxfFNWQACwbZsI27YZIDg4FxYWJRuHqgljxgAbNxpj2zaGkJAcuLpq/5r6/lnlczUb4sULA9Svz7BwYY5W/1/K5JOIMd3o4iCVStGrVy8kJycjKiqqRMecP38ezZs3R0xMDJo1aybfvmXLFpibm8PNzQ3379/H5MmTYWlpiejoaBgWMGN7cHAwZhTQMLZp0yaYl6X1xohKFi1qjNOnndGkSSKmTIkBALx5I8a4cZ8gNVWMXr3+wfDhtzR+3d9+a4STJ6uhW7eHGDnyusbPLwTGgDlzmuHCBUc4Or7DokWnYG5ewBiVcmLatJa4fr2K1t5DZc2mTXWwbVttmJrmYNGiU6ha9Z1Wr5eRkYFBgwYhJSUF1sW0Q+hMMB01ahQOHTqEqKgoODs7l+iYkSNHIjo6GtevF/1F8+DBA7i7u+PYsWPo2LFjvucLKpm6uLjg1atXxWYg4b/ewsPD0alTJxgXNhJej929CzRoYITcXF5KbNqUoWdPQ4SHG8Dbm+HMmRyVe+8WlXdHjojQs6cRqlRhePQop9BJBvTJ4sUGmDDBECYmDKdP56hcvVtW3nOHD4vQq5cRLC0ZHjzIQYUK2r3enTs52LLlGgIDG8DDw0ivVoM6dkyEHj0MwZgIGzbkIDBQ+6ErNTUVlStXLlEw1YlxpkFBQczZ2Zk9ePCgxMe8e/eOWVtbs8WLF5do/8qVK7MVJRzERuNMlVPWxq4VZPhwPpatY0fGfvklb47VW7fUO29ReZedzdepBRg7dEi96+iC6Gg+fhNgbPly9c5VVt5zUimTrxYUGqrda61YwZixsVQ+H7eZGWONGzM2bBhjCxcydvgwY8+elXy1qNL07FneZ2HEiNK7rjKxQNBgKpVKWVBQEHNycmJ3lVyXaO3atUwsFrNXr14Vu++TJ0+YSCRie/fuLdG5KZgqp6x8sRXl0aO8wf6yCQbUDQiMFZ93QUH8Wl98of61hPT6NWPVqvHX0r+/+l/YZek99+efPF+qVlVuIv6Sysr6eInFd8zUNC+ofnyzteWTlnz7LX+Pnz7N2Js3mk9XSUkkjLVrx9PWoIHmFnwvCb0JpqNGjWI2NjYsIiKCJSQkyG8ZH+TWF198wSYWsGRD69at2YABA/JtT0tLY+PHj2fR0dHs4cOH7NixY8zHx4d5eHiwzBJOS0PBVDll6YutKLLAVpLVYEqquLyLiuLXs7Qs3S8RTQsM5K/D3Z0xTXysytJ7LjOTr+YEMPbXX5o99/PnjLVsmbeW6qxZOWz37j3s/ftsFhfHV5aaMYOxvn35jEwGBgUHWFmw79qVsfHjGVu3jrFLl0rnPTllSt5nIC5O+9f7kN4EUwAF3tauXSvfp127dmzo0KEKx925c4cBYEePHs13zoyMDNa5c2dWpUoVZmxszFxdXdnXX3/NEhMTS5wuCqbKKUtfbEV5/pyxihUZc3Fh7MULzZyzuLzLzWXM1ZV/mWzbpplrlrbY2Lwv5IsXNXPOsvaemz07r+SlqWrWmBjGnJz4eW1sGDt4sPh8e/+esStXeFCfMIEvrO7iUniANTDgTR/r1zOWlqaZdH/oyBH+IwBgbPNmzZ+/OHoTTHUVBVPllLUvtqK8eqWZkpVMSfJu4kT+ZdK7t+auW5q++ipvPVFNKWvvudevGTM35/l07Jj65/vzT8ZMTPj5PD0Zk7WiqZpvycl8rdkVKxgbPZqx9u0Zq1RJMbCam/PmiPDw/HMeq+LpU8YqV+bn/uYb9c+nCpqblxAtqVSpdGfqAfImcDh4ENC3VQ4TE/lSdABfcowUrGJFYPhwfl+dGZgkEj5+dfhwPhGFvz9fgcjDQ7302djwWaJGjuRrhZ48Cbx8yddKnjmTnz8jg0/P2KkT4OrKJ+W4peJon5wcPrPYq1dAgwbKL4UoBAqmhOi4+vWBunX5l+OuXUKnRjlLl/J0+/ry9XNJ4caN47MxHT4M3Lyp/PEvX/JAJlsYOziYv1+09eNPJOJrJU+ZAsTF8VmtRo0CbG2BZ8+A+fP5erBNmvBJ6F+8KPm5p08HIiP58nfbt+vHuq8UTAnRcSIR/5UO6NdcvWlpwPLl/D6VSovn7g706cPv//KLcsdeucKD1qlTgKUlsGcPD0iaWNS8JEQioEUL/v9OSOAr8Pj786kZL13iiwdUrcrnr96xA8jMLPxcR44Ac+bw+6tXq1+qLi0UTAnRA4GB/O/Jk/zLSh+sWcOrpWvV4l+ipHjjx/O/f/9d8v/zpk281B8fzwNPTAwPZEIRi/k0jXv28NewdCmf5zonB9i/n8+J7OgIfPMNcPas4jKDz54Bn3/O748alTd/sT6gYEqIHqhRg//yl0r5Ciu6TiLJa+f64Qfo1Uw7QmrRgrdNSiTAsmVF75uTw0v8gwcD798D3boB588DXl6lk9aSqFwZGD2ap+v2bWDSJMDZmf/IWrmS/wjw8OArwNy7x/sHvHrFFz5QtnQuNAqmhOgJfarq3baNl5Ts7PLWCSUlIyudhoUBha1G+eYN0L17XmelSZN4qU/b0xGqw9OTV98+fgwcP87XsrWw4J2Ypk/nNRinT/N20m3b9KOd9EMUTAnRE/378zaw8+f5F5CuYgxYsIDfHzNG/74UhdarF18s/e1bYO3a/M/fuMGrTcPDAXNzvmzcnDn6U/o3MAA6dODryCYl5fUAli1f+Mcf/PXrGwqmhOgJe3tAtk6DLq91f+wYcO0aL3WMGiV0avSPoSHw3Xf8/i+/8GXHZHbs4D2jHzwA3Nx4D9r+/YVJpyZYWPA20qNHgSdPeC9mfX09FEwJ0SMfVvXqxnpP+YWG8r//+Q8fP0mUN2wYH9P88CGwezcPqD//zDvvpKfzH1UXLgDe3kKnVHOqVuVDwPQVBVNC9EifPry3ZGwsUMzKg4K4coWXTD8sXRHlmZsD337L78+fz3vnyoaL/PADH4taqZJw6SP5UTAlRI/Y2AA9evD7utgRSdYhpn9/oHp1QZOi94KC+A+nixeB//2Ptz3/9RfPYyMjoVNHPkbBlBA9I6vq3byZD5XRFY8f884wAE3SoAn29rzHKwC4uABRUXljMInuoWBKiJ7p3p1PEffkCR/0rit+/ZW37fn58XGCRH2//cYncLh8GWjcWOjUkKJQMCVEz5iZ5U07pytVvW/e8CENAJVKNcnUlE/KULmy0CkhxaFgSogeklX1btvGZ8sRmmyCgQYN+JhBQsobCqaE6KEOHfjsQq9f896zQsrMzFupZPz4vMH3hJQnFEwJ0UNGRnmD24Wu6v3rLz6TjYuLfk1MTogmUTAlRE/Jqnp37+YLMwtBKs0bDvPdd4CxsTDpIERoFEwJ0VMtWvCxnOnpwIEDwqRh3z7g7l0+/vWrr4RJAyG6gIIpIXpKJOJLVgHCVfXKJrT/9lu+2gch5RUFU0L0mKyq9+BBvspIaTpzho9zNTEB/vvf0r02IbqGgikheqxePaB+fT48Zteu0r22rFT6xReAo2PpXpsQXUPBlBA9J0RVb1wcby8F8hazJqQ8o2BKiJ4LDOR/T54Enj8vnWsuWsSXgOvVC6hTp3SuSYguo2BKiJ5zcwNatuTBbds27V8vMRFYv57fp6kDCeEomBJSBsiqejdu1P61li4FsrMBX1+gVSvtX48QfSBoMJ07dy6aNm0KKysr2NnZoXfv3oiLiyvymHXr1kEkEincTE1NFfZhjGHatGlwdHSEmZkZ/Pz8cO/ePW2+FEIE1b8/X5D74kVgyBAgNVU713n3Dli+nN//8UeaOpAQGUGD6alTpxAUFIRz584hPDwcEokEnTt3Rnp6epHHWVtbIyEhQX57/PixwvOhoaFYsmQJVqxYgZiYGFhYWKBLly7IzMzU5sshRDB2drwd08CAT+/XqBFw/rzmr/PHH0ByMuDhwdtLCSGcoOu1Hz58WOHxunXrYGdnh0uXLqFt27aFHicSieDg4FDgc4wxLF68GFOmTIG/vz8AYMOGDbC3t8eePXsQKOutQUgZM3Ys0KQJX7LrwQNeBRsSAkyYwEut6pJI+JqlAPDDD5o5JyFlhaDB9GMpKSkAgIoVKxa537t37+Dq6gqpVAofHx/MmTMHdevWBQA8fPgQiYmJ8PPzk+9vY2OD5s2bIzo6usBgmpWVhaysLPnj1H/ryCQSCSS6sL6VjpPlEeWV8jSdd82aARcuAEFBhti+3QCTJwNHj0rx55+5cHZW79ybN4sQH28EOzuGgQNzBF36jd5zqqF8U44y+SRijDEtpqXEpFIpevXqheTkZERFRRW6X3R0NO7duwdvb2+kpKRg4cKFiIyMxK1bt+Ds7IyzZ8+iVatWeP78ORw/GEnev39/iEQibN26Nd85g4ODMWPGjHzbN23aBHNzc828QEJKEWPAiRMuWL3aG5mZRrCyykZQ0FW0aJGg8vm++649Hj2ywaBBsejf/65mE0yIDsrIyMCgQYOQkpICa2vrIvfVmWA6atQoHDp0CFFRUXBW4ie0RCKBp6cnBg4ciJkzZ6oUTAsqmbq4uODVq1fFZiDh/4Pw8HB06tQJxrRsiFK0nXf37gFDhhji0iXePeLrr3OxYIEUyv5GPHZMhO7djWBuznD/fg4qVdJ4UpVC7znVUL4pJzU1FZUrVy5RMNWJat7Ro0fjwIEDiIyMVCqQAoCxsTEaNWqEf/75BwDkbalJSUkKwTQpKQkNGzYs8BxisRhisbjAc9MbruQov1Snrbzz8uLz506dCoSGAqtXGyIqyhCbNwMNGpT8PLK20q++EsHBQXf+x/SeUw3lW8kok0eC9uZljGH06NHYvXs3Tpw4ATc3N6XPkZubixs3bsgDp5ubGxwcHHD8+HH5PqmpqYiJiYGvr6/G0k6IvjAxAebPB8LD+Ry6sbG8bfW333j1bXGuXOHHGhryNUsJIfkJGkyDgoLw999/Y9OmTbCyskJiYiISExPx/v17+T5DhgzBpEmT5I9DQkJw9OhRPHjwAJcvX8bnn3+Ox48f46t/F1MUiUQYN24cZs2ahX379uHGjRsYMmQInJyc0Lt379J+iYToDD8/4Pp1oGdPPunCuHFAjx7AixdFHydb/LtfP75+KiEkP0GDaVhYGFJSUtC+fXs4OjrKbx+2a8bHxyMhIa/TxNu3b/H111/D09MT3bt3R2pqKs6ePQsvLy/5PhMmTMB///tfjBgxAk2bNsW7d+9w+PDhfJM7EFLeVK4M7N0L/P47YGoKHDoEeHsDR44UvP/jx4Ds40hTBxJSOEHbTEvS9ykiIkLh8a+//opfZQ04hRCJRAgJCUFISIg6ySOkTBKJ+GLebdvyaQhv3gS6dgW+/x6YMwf4sPvAr78CublAx46Aj49waSZE19HcvISUU/Xq8VmSRo/mj3/5BWjRArhzhz9++5bPeARQqZSQ4uhEb15CiDDMzPjE9Z07A19+CVy9CjRuzDsnvXgBpKfzauDOnYVOKSG6jYIpIQQ9e/LOSUOHAseOAV9/nTddIE1oT0jxqJqXEAIAcHLiHZFCQwEjI95W6uICDBggdMoI0X0UTAkhcgYGvCQaHc07J61fD9DYfkKKR9W8hJB8mjQBNm0SOhWE6A8qmRJCCCFqomBKCCGEqImCKSGEEKImCqaEEEKImqgDUgFk0xympqYKnBL9IJFIkJGRgdTUVFrWSUmUd6qhfFMN5ZtyZDGgJFPfUjAtQFpaGgDAxcVF4JQQQggRWlpaGmxsbIrcR8RKEnLLGalUiufPn8PKygoimvqlWKmpqXBxccGTJ0+KXY2eKKK8Uw3lm2oo35TDGENaWhqcnJxgYFB0qyiVTAtgYGAAZ2dnoZOhd6ytrekDqiLKO9VQvqmG8q3kiiuRylAHJEIIIURNFEwJIYQQNVEwJWoTi8WYPn06xB+uKk1KhPJONZRvqqF80x7qgEQIIYSoiUqmhBBCiJoomBJCCCFqomBKCCGEqImCKSGEEKImCqakRObOnYumTZvCysoKdnZ26N27N+Li4hT2yczMRFBQECpVqgRLS0t89tlnSEpKEijFumnevHkQiUQYN26cfBvlW+GePXuGzz//HJUqVYKZmRnq16+Pixcvyp9njGHatGlwdHSEmZkZ/Pz8cO/ePQFTLLzc3FxMnToVbm5uMDMzg7u7O2bOnKkwvyzlm+ZRMCUlcurUKQQFBeHcuXMIDw+HRCJB586dkZ6eLt/nu+++w/79+7F9+3acOnUKz58/R0BAgICp1i0XLlzAypUr4e3trbCd8q1gb9++RatWrWBsbIxDhw7h9u3bWLRoEWxtbeX7hIaGYsmSJVixYgViYmJgYWGBLl26IDMzU8CUC2v+/PkICwvDsmXLEBsbi/nz5yM0NBRLly6V70P5pgWMEBW8ePGCAWCnTp1ijDGWnJzMjI2N2fbt2+X7xMbGMgAsOjpaqGTqjLS0NObh4cHCw8NZu3bt2NixYxljlG9F+emnn1jr1q0LfV4qlTIHBwe2YMEC+bbk5GQmFovZ5s2bSyOJOqlHjx5s+PDhCtsCAgLY4MGDGWOUb9pCJVOikpSUFABAxYoVAQCXLl2CRCKBn5+ffJ86deqgWrVqiI6OFiSNuiQoKAg9evRQyB+A8q0o+/btQ5MmTdCvXz/Y2dmhUaNGWL16tfz5hw8fIjExUSHvbGxs0Lx583Kddy1btsTx48dx9+5dAMC1a9cQFRWFbt26AaB80xaa6J4oTSqVYty4cWjVqhXq1asHAEhMTISJiQkqVKigsK+9vT0SExMFSKXu2LJlCy5fvowLFy7ke47yrXAPHjxAWFgYvv/+e0yePBkXLlzAmDFjYGJigqFDh8rzx97eXuG48p53EydORGpqKurUqQNDQ0Pk5uZi9uzZGDx4MABQvmkJBVOitKCgINy8eRNRUVFCJ0XnPXnyBGPHjkV4eDhMTU2FTo5ekUqlaNKkCebMmQMAaNSoEW7evIkVK1Zg6NChAqdOd23btg0bN27Epk2bULduXVy9ehXjxo2Dk5MT5ZsWUTUvUcro0aNx4MABnDx5UmGZOgcHB2RnZyM5OVlh/6SkJDg4OJRyKnXHpUuX8OLFC/j4+MDIyAhGRkY4deoUlixZAiMjI9jb21O+FcLR0RFeXl4K2zw9PREfHw8A8vz5uOdzec+7H3/8ERMnTkRgYCDq16+PL774At999x3mzp0LgPJNWyiYkhJhjGH06NHYvXs3Tpw4ATc3N4XnGzduDGNjYxw/fly+LS4uDvHx8fD19S3t5OqMjh074saNG7h69ar81qRJEwwePFh+n/KtYK1atco3/Oru3btwdXUFALi5ucHBwUEh71JTUxETE1Ou8y4jIyPfQtaGhoaQSqUAKN+0RugeUEQ/jBo1itnY2LCIiAiWkJAgv2VkZMj3+eabb1i1atXYiRMn2MWLF5mvry/z9fUVMNW66cPevIxRvhXm/PnzzMjIiM2ePZvdu3ePbdy4kZmbm7O///5bvs+8efNYhQoV2N69e9n169eZv78/c3NzY+/fvxcw5cIaOnQoq1q1Kjtw4AB7+PAh27VrF6tcuTKbMGGCfB/KN82jYEpKBECBt7Vr18r3ef/+Pfv222+Zra0tMzc3Z3369GEJCQnCJVpHfRxMKd8Kt3//flavXj0mFotZnTp12KpVqxSel0qlbOrUqcze3p6JxWLWsWNHFhcXJ1BqdUNqaiobO3Ysq1atGjM1NWU1atRgP//8M8vKypLvQ/mmebQEGyGEEKImajMlhBBC1ETBlBBCCFETBVNCCCFETRRMCSGEEDVRMCWEEELURMGUEEIIURMFU0IIIURNFEwJIYQQNVEwJYQQQtREwZSQcuDly5cYNWoUqlWrBrFYDAcHB3Tp0gVnzpwBAIhEIuzZs0fYRBKix2g9U0LKgc8++wzZ2dlYv349atSogaSkJBw/fhyvX78WOmmElAk0Ny8hZVxycjJsbW0RERGBdu3a5Xu+evXqePz4sfyxq6srHj16BADYu3cvZsyYgdu3b8sXl/75559hZMR/h4tEIixfvhz79u1DREQEHB0dERoair59+5bKayNEV1A1LyFlnKWlJSwtLbFnzx5kZWXle/7ChQsAgLVr1yIhIUH++PTp0xgyZAjGjh2L27dvY+XKlVi3bh1mz56tcPzUqVPx2Wef4dq1axg8eDACAwMRGxur/RdGiA6hkikh5cDOnTvx9ddf4/379/Dx8UG7du0QGBgIb29vALyEuXv3bvTu3Vt+jJ+fHzp27IhJkybJt/3999+YMGECnj9/Lj/um2++QVhYmHyfFi1awMfHB8uXLy+dF0eIDqCSKSHlwGeffYbnz59j37596Nq1KyIiIuDj44N169YVesy1a9cQEhIiL9laWlri66+/RkJCAjIyMuT7+fr6Khzn6+tLJVNS7lAHJELKCVNTU3Tq1AmdOnXC1KlT8dVXX2H69OkYNmxYgfu/e/cOM2bMQEBAQIHnIoTkoZIpIeWUl5cX0tPTAQDGxsbIzc1VeN7HxwdxcXGoWbNmvpuBQd5Xx7lz5xSOO3fuHDw9PbX/AgjRIVQyJaSMe/36Nfr164fhw4fD29sbVlZWuHjxIkJDQ+Hv7w+A9+g9fvw4WrVqBbFYDFtbW0ybNg2ffvopqlWrhr59+8LAwADXrl3DzZs3MWvWLPn5t2/fjiZNmqB169bYuHEjzp8/jzVr1gj1cgkRBiOElGmZmZls4sSJzMfHh9nY2DBzc3NWu3ZtNmXKFJaRkcEYY2zfvn2sZs2azMjIiLm6usqPPXz4MGvZsiUzMzNj1tbWrFmzZmzVqlXy5wGw33//nXXq1ImJxWJWvXp1tnXr1tJ+iYQIjnrzEkJUVlAvYELKI2ozJYQQQtREwZQQQghRE3VAIoSojFqJCOGoZEoIIYSoiYIpIYQQoiYKpoQQQoiaKJgSQgghaqJgSgghhKiJgikhhBCiJgqmhBBCiJoomBJCCCFq+j8TpRGD4baaFwAAAABJRU5ErkJggg==\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAcoAAADvCAYAAAByipTtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA+PElEQVR4nO3deVxU1f/H8dewr4I74AIo5g65K+4iau6m5ZZKpplhaX37lmlulVlZfSv9aZmG5ZK7ZqYpLqC4b5jmkltqrriwCMo25/fHjcEJGBWBGeDzfDzOQ+bOvZf3XMGP595zz9UppRRCCCGEyJaVuQMIIYQQlkwKpRBCCGGCFEohhBDCBCmUQgghhAlSKIUQQggTpFAKIYQQJkihFEIIIUyQQimEEEKYIIVSCCGEMEEKpRD5RKfTMXnyZHPHEEI8ISmUokibP38+Op0ux7Znzx6z5vvrr7/Q6XR89tlnZs1hCY4ePUqfPn3w9vbGwcGBChUqEBwczIwZM4zW++ijj1izZo15QopiycbcAYQoCO+//z6+vr5Zlvv5+Zkhjfi3Xbt20bZtWypXrszw4cPx8PDg0qVL7Nmzh6+++orXXnvNsO5HH31Enz596Nmzp/kCi2JFCqUoFp555hkaNmxo7hgiB1OnTsXNzY39+/fj7u5u9N6NGzfME0qIf8ipV1HspaamUqpUKV588cUs78XHx+Pg4MBbb70FQEpKChMnTqRBgwa4ubnh7OxMy5Yt2bZtW75mvHHjBi+99BLly5fHwcGBgIAAfvjhhyzrLVmyhAYNGuDq6kqJEiWoW7cuX331leH91NRUpkyZQrVq1XBwcKB06dK0aNGC8PDwHL/3gQMH0Ol02X6/jRs3otPpWLduHQAJCQmMGTMGHx8f7O3tKVeuHMHBwRw6dMjk5zt79iy1a9fOUiQBypUrZ/hap9ORmJjIDz/8YDh9HhISYnj/8uXLDB06lPLly2Nvb0/t2rX5/vvvjfYXERGBTqdj6dKljBs3Dg8PD5ydnenevTuXLl0ymVMUT9KjFMVCXFwcN2/eNFqm0+koXbo0tra29OrVi1WrVvHtt99iZ2dnWGfNmjUkJyfTr18/QCucc+fOpX///gwfPpyEhATmzZtHx44d2bdvH08//XSeZ7937x5t2rThzJkzjBo1Cl9fX5YvX05ISAixsbGMHj0agPDwcPr3709QUBCffPIJACdOnGDnzp2GdSZPnsy0adMYNmwYjRs3Jj4+ngMHDnDo0CGCg4Oz/f4NGzakSpUqLFu2jCFDhhi9t3TpUkqWLEnHjh0BeOWVV1ixYgWjRo2iVq1a3Lp1i6ioKE6cOEH9+vVz/Ize3t7s3r2bY8eOUadOnRzXW7BggSH7yy+/DEDVqlUBuH79Ok2bNkWn0zFq1CjKli3Lhg0beOmll4iPj2fMmDFG+5o6dSo6nY533nmHGzdu8OWXX9K+fXuio6NxdHTMMYMohpQQRVhYWJgCsm329vaG9TZu3KgA9csvvxht37lzZ1WlShXD67S0NJWcnGy0zp07d1T58uXV0KFDjZYDatKkSSbznT9/XgFq+vTpOa7z5ZdfKkAtXLjQsCwlJUU1a9ZMubi4qPj4eKWUUqNHj1YlSpRQaWlpOe4rICBAdenSxWSm7Lz77rvK1tZW3b5927AsOTlZubu7G31uNzc3FRoa+tj737Rpk7K2tlbW1taqWbNm6u2331YbN25UKSkpWdZ1dnZWQ4YMybL8pZdeUp6enurmzZtGy/v166fc3NxUUlKSUkqpbdu2KUBVqFDBcOyUUmrZsmUKUF999dVj5xdFm5x6FcXC//3f/xEeHm7UNmzYYHi/Xbt2lClThqVLlxqW3blzh/DwcPr27WtYZm1tbehx6vV6bt++TVpaGg0bNnzo6cXcWr9+PR4eHvTv39+wzNbWltdff527d+8SGRkJgLu7O4mJiSZPo7q7u/PHH39w+vTpx8rQt29fUlNTWbVqlWHZpk2biI2NNTo+7u7u7N27lytXrjzW/oODg9m9ezfdu3fnyJEjfPrpp3Ts2JEKFSqwdu3ah26vlGLlypV069YNpRQ3b940tI4dOxIXF5fl72fw4MG4uroaXvfp0wdPT0/Wr1//WNlFMWDuSi1EfsroUe7fv/+h644YMUK5urqq+/fvK6WUmjt3rgJUdHS00Xrz589XdevWVba2tkY9VF9fX6P1yKMeZfXq1VXLli2zLI+OjlaAmjlzplJKqevXr6uaNWsaeksvvvii2rBhg9E2kZGRyt3dXQGqTp066q233lJHjhwxmTFDjRo1VHBwsOH1Cy+8oMqUKaNSU1MNy5YuXaocHByUlZWVatSokZo0aZI6e/bsI+0/Q3Jystq3b5969913lYODg7K1tVV//PGH4f3sepTXr1/P8cxBRlu1apVSKrNH+f3332f53i1btlTVq1d/rLyi6JMepRD/6NevHwkJCYae5rJly6hRowYBAQGGdRYuXEhISAhVq1Zl3rx5/Pbbb4SHh9OuXTv0er25ogPaoJfo6GjWrl1L9+7d2bZtG88884zRdcVWrVpx9uxZvv/+e+rUqcPcuXOpX78+c+fOfej++/bty7Zt27h58ybJycmsXbuW3r17Y2OTOdTh+eef59y5c8yYMQMvLy+mT59O7dq1jXrvD2NnZ0ejRo346KOPmD17NqmpqSxfvtzkNhnH/oUXXshy5iCjNW/e/JEzCGHE3JVaiPz0OD3K9PR05enpqfr166diYmKUjY1Nlh5hjx49VJUqVZRerzdaHhgYqLy9vY2WkUc9yg4dOigPDw+Vnp5utHzJkiXZXld98POMGDFCAer06dPZrpOQkKDq1aunKlSoYDKnUkodP35cAeqbb75Rq1evVoDatm2byW2uX7+uKlSooJo3b/7Q/Wfn6NGjClAjRowwLHNxccnSo0xLS1Ourq6qf//+D91nRo/y3XffNVqu1+uVp6en6tixY66yiqJLepRC/MPKyoo+ffrwyy+/sGDBAtLS0oyuv4F2jRK0a2IZ9u7dy+7du/MtV+fOnbl27ZrR9dO0tDRmzJiBi4sLrVu3BuDWrVtG21lZWeHv7w9AcnJytuu4uLjg5+dneN+UmjVrUrduXZYuXcrSpUvx9PSkVatWhvfT09OJi4sz2qZcuXJ4eXk9dP/btm0zOqYZMq4XVq9e3bDM2dmZ2NhYo/Wsra3p3bs3K1eu5NixY1n2ExMTk2XZjz/+SEJCguH1ihUruHr1Ks8884zJrKL4kdtDRLGwYcMGTp48mWV5YGAgVapUMbzu27cvM2bMYNKkSdStW5eaNWsard+1a1dWrVpFr1696NKlC+fPn+ebb76hVq1a3L17N9f5tmzZwv3797Ms79mzJy+//DLffvstISEhHDx4EB8fH1asWMHOnTv58ssvDQNShg0bxu3bt2nXrh0VK1bkwoULzJgxg6efftrwOWrVqkWbNm1o0KABpUqV4sCBA4bbOR5F3759mThxIg4ODrz00ktYWWX+XzshIYGKFSvSp08fAgICcHFxYfPmzezfv5/PP//c5H5fe+01kpKS6NWrFzVq1CAlJYVdu3axdOlSfHx8jO5xbdCgAZs3b+aLL77Ay8sLX19fmjRpwscff8y2bdto0qQJw4cPp1atWty+fZtDhw6xefNmbt++bfQ9S5UqRYsWLXjxxRe5fv06X375JX5+fgwfPvyRjoUoRszdpRUiP5m6PQRQYWFhRuvr9XpVqVIlBagPP/wwy/70er366KOPlLe3t7K3t1f16tVT69atU0OGDHmiU685tQULFiiltFOYL774oipTpoyys7NTdevWzZJ9xYoVqkOHDqpcuXLKzs5OVa5cWY0YMUJdvXrVsM6HH36oGjdurNzd3ZWjo6OqUaOGmjp1ara3YWTn9OnThmxRUVFG7yUnJ6v//ve/KiAgQLm6uipnZ2cVEBCgZs2a9dD9btiwQQ0dOlTVqFFDubi4KDs7O+Xn56dee+01df36daN1T548qVq1aqUcHR0VYHQa9vr16yo0NFRVqlRJ2draKg8PDxUUFKTmzJljWCfj1OtPP/2k3n33XVWuXDnl6OiounTpoi5cuPBIx0EULzqlsjnfIYQQRVRERARt27Zl+fLl9OnTx9xxRCEg1yiFEEIIE6RQCiGEECZIoRRCCCFMkGuUQgghhAnSoxRCCCFMkEIphBBCmGDWCQemTZvGqlWrOHnyJI6OjgQGBvLJJ58YzcLxb6mpqUybNo0ffviBy5cvU716dT755BM6der0SN9Tr9dz5coVXF1d0el0efVRhBBCFDJKKRISEvDy8jKaPCO7Fc2mY8eOKiwsTB07dkxFR0erzp07q8qVK6u7d+/muM3bb7+tvLy81K+//qrOnj2rZs2apRwcHNShQ4ce6XteunTpoU8ZkCZNmjRpxaddunTJZN2wqME8MTExlCtXjsjISKM5JB/k5eXF+PHjCQ0NNSzr3bs3jo6OLFy48KHfIy4uDnd3dy5dukSJEiVynTU1NZVNmzbRoUMHbG1tc72fgiJ585fkzV+SN38V17zx8fFUqlSJ2NhY3NzcclzPouZ6zZhQuVSpUjmuk5ycjIODg9EyR0dHoqKiclz/wQmZMyZBdnR0xNHRMddZbWxscHJywtHRsVD8YEne/CV585fkzV/FNW9qairAQy/DWUyPUq/X0717d2JjY3MsegADBgzgyJEjrFmzhqpVq7JlyxZ69OhBenp6tk8omDx5MlOmTMmyfPHixTg5OeXpZxBCCFF4JCUlMWDAAOLi4kyeYbSYQjly5Eg2bNhAVFQUFStWzHG9mJgYhg8fzi+//IJOp6Nq1aq0b9+e77//nnv37mVZ/989yoyu9s2bN5/41Gt4eDjBwcGF4n9gkjd/Sd78JXnzV3HNGx8fT5kyZR5aKC3i1OuoUaNYt24d27dvN1kkAcqWLcuaNWu4f/8+t27dwsvLi7Fjxxo9KulB9vb22NvbZ1lua2v7RAdYt2AB7nfuYNu5c6H4wcrwpJ+7oEne/CV585fkzV9PmvdRtzVroVRK8dprr7F69WoiIiLw9fV95G0dHByoUKECqamprFy5kueffz4fk/7Lr79iPWwYgY6O6Bo3hpYtC+57CyEKVHp6uuFa1qNKTU3FxsaG+/fvk56enk/J8k5RzWttbY2Njc0T3wpo1kIZGhrK4sWL+fnnn3F1deXatWsAuLm5GQbaDB48mAoVKjBt2jRAe5r85cuXefrpp7l8+TKTJ09Gr9fz9ttvF1zwVq1QzZtjGxWF6twZfv0VchilK4QovO7evcvff//N416hUkrh4eHBpUuXCsX92kU5r5OTE56entjZ2eX6+5m1UM6ePRuANm3aGC0PCwsjJCQEgIsXLxrdCHr//n3ee+89zp07h4uLC507d2bBggW4u7sXUGrA1ZX0X37hZps2lDtyBDp1grVroX37gssghMhX6enp/P333zg5OVG2bNnHKiB6vZ67d+/i4uJi+kZ2C1EU8yqlSElJISYmhvPnz1OtWrVcfzazn3p9mIiICKPXrVu35vjx4/mU6DE4O7N3/Hi6hIVhtWEDdO0KK1dCly7mTiaEyAOpqakopShbtuxj30qm1+tJSUnBwcGh0BSeopg34/aRCxcuGNbPDcs/IhZMb2dH+rJl0LMnJCdDr16werW5Ywkh8lBhOBUpcpYXhV8K5ZOyt4dly6BvX0hNheeegyVLzJ1KCCFEHpFCmRdsbWHRIhg8GNLTYeBA+OEHc6cSQgiRB6RQ5hVrawgLg+HDQa+HkBCYM8fcqYQQIs/99ddf6HQ6oqOjzR2lQEihzEtWVvDtt/Daa9rrESPg66/Nm0kIUayEhISg0+mytEd9FGFeadOmDWPGjCnQ75lfLGJmniJFp4OvvgIHB5g+HUaPhvv3oSDv8xRCFGudOnUiLCzMaFl2M5SJRyM9yvyg08Enn8DEidrrd96B998Hy5hWVwiRG0pBYqJ52mP+22Fvb4+Hh4dRK1myJKA9WKJv375G66emplK1alV+/PFHAH777TdatGiBu7s7pUuXpmvXrpw9ezZvjuM/Vq5cSe3atbG3t8fHx4fPP//c6P1Zs2ZRrVo1HBwcKF++PH369DG8t2LFCgIDA3F2dqZ06dK0b9+exMTEPM33IOlR5hedDqZM0UbFjh8PkyZpPcupU7X3hBCFS1ISuLg80qpWgHtefu+7d8HZOU92NXDgQJ577jnDDfsAGzdu5N69e/Tq1QuAxMRE3nzzTfz9/bl79y4TJ06kV69eREdH58ntFgcPHuT5559n8uTJ9O3bl127dvHqq69SunRpQkJCOHDgAK+//joLFiwgMDCQ27dvs2PHDgCuXr3KwIEDmTJlCv369SMxMZEdO3Y89uxJj0MKZX4bNw4cHeHNN2HaNK1Yfv65FEshRL5Zt26doQhmGDduHOPGjaNjx444OzuzevVqBg0aBMBPP/1Ep06dcHV1BaB3795G237//feULVuW48ePU6dOnSfO98UXXxAUFMSECRMAeOqppzh+/DjTp08nJCSEixcv4uzsTNeuXXF1dcXb25t69eoBWqFMS0uja9eu+Pj4YGVlRd26dZ84kylSKAvCG29oPcvQUPjf/7RiOXOmNvhHCFE4ODlpPbtHoNfriY+Pp0SJEnkz081jPju3bdu2hilCM5QqVQrQHnr8/PPPs2jRIgYNGkRiYiJr165l7ty5hnVPnz7NxIkT2bt3Lzdv3kSv1wPalKJ5UShPnDhBjx49jJY1b96cL7/8kvT0dIKDg/H29qZKlSp06tSJTp060atXL5ycnAgICCAoKIgWLVrQoUMHOnbsSJ8+fQynlvODFMqC8uqr2gCfYcNg9mxtJp85c7TbSoQQlk+ne/TTn3q9dk+1s7NZ/kPs7OyMn59fju8PHDiQ1q1bc+PGDcLDw3F0dKT9A3NVd+vWDW9vb7777ju8vLzQ6/XUqVOHlJSUgoiPq6srhw4dIiIigk2bNjFx4kQmT57M/v37cXd3Z+PGjYSHh7Nr1y5mzJjB+PHj2bt372M9gepxSJemIA0dCgsWaL8433+vTVCQlmbuVEKIYiYwMJBKlSqxdOlSFi1aRJ8+fQzPZrx16xanTp3ivffeIygoiJo1a3Lnzp08/f41a9Zk586dRst27tzJU089hfU/nQcbGxvat2/Pp59+yu+//85ff/3F1q1bAW1awaZNmzJ58mQOHz6MnZ0dq/Nx+lDpURa0gQO107D9+8PixVrPcvFieIJHwAghxIOSk5MNjy3MYGNjQ5kyZQyvBwwYwDfffMOff/7Jli1bDMtLlixJ6dKlmTNnDp6enly8eJGxY8fmKkdMTEyWSQk8PT35z3/+Q6NGjfjggw/o27cvu3fvZubMmcyaNQvQrrGeO3eOVq1aUbJkSdavX49er6d69ers3buXzZs3ExgYiK+vL/v37ycmJoaaNWvmKuMjUcVMXFycAlRcXNwT7SclJUWtWbNGpaSk5G4Ha9cqZWenFCjVrZtS9+49UZ6HeeK8BUzy5i/J+3D37t1Tx48fV/dy8buZnp6u7ty5o9LT0/MhmWlDhgxRQJZWvXp1o/WOHz+uAOXt7a3S0tKM8oaHh6uaNWsqe3t75e/vryIiIhSgVq9erZRS6vz58wpQhw8fzjFH69ats83xwQcfKKWUWrFihapVq5aytbVVlStXVtOnTzdsu2PHDtW6dWtVsmRJ5ejoqPz9/dXSpUsNuTt06KDKlCmj7O3t1VNPPaVmzJiRYw5Tf4+PWg+kR2ku3brBzz9rTxz55Rfo0UN78shjXrQXQogHzZ8/n/nz5z90vZo1axpuqcgYrJOhffv2WR5nqB64/cLHx+eht2P8+xGJ/9a7d+8so2sztGjRIsfta9asyYYNG/J2sNRDyDVKc+rUCX79VSuOmzZpz7J8xFF1QgghCoYUSnNr1w42bgRXV4iIgI4dIS7O3KmEEEL8QwqlJWjRAjZvBnd32LULgoPh9m1zpxJCCIEUSsvRuDFs3QqlS8P+/RAUBDEx5k4lhBDFnhRKS1Kvnnb6tXx5iI6GNm3gX0O8hRAF62GDVoRly4u/PymUlqZOHYiMBC8vOH4cWreGv/82dyohip2MG98LajYakT+SkpIADBMq5IbcHmKJqleH7du1gT5//gmtWmmnZX18zJ1MiGLDxsYGJycnYmJisLW1fazbEPR6PSkpKdy/f79Abl94UkUxr1KKpKQkbty4gbu7u+E/PrkhhdJSVa2qFcugIDh7NrNYmpi/UQiRd3Q6HZ6enpw/f54LFy481rZKKe7du4ejoyO6QvCkoKKc193dHQ8Pjyf6flIoLZm3t3Yatn17OHlSK5ZbtkB+TtUkhDCws7OjWrVqj336NTU1le3bt9OqVasnOuVXUIpqXltb2yfqSWaQQmnpKlTQBvi0bw/HjmnXLDdvBn9/cycToliwsrLCwcHhsbaxtrYmLS0NBweHQlF4JK9pln8yWmijYLdt00bFxsRA27Zw8KC5UwkhRLEghbKwKFNGu0bZpIk2GUFQEOzZY+5UQghR5EmhLEzc3bU5YVu00Ka5Cw7WBvwIIYTIN1IoC5sSJeC337RbR+7e1SZW37zZ3KmEEKLIkkJZGDk7w7p18MwzcO8edO0K69ebO5UQQhRJUigLK0dH7fmVPXpAcjL07Alr1pg7lRBCFDlSKAsze3tYvhyefx5SU6FPH1i61NyphBCiSJFCWdjZ2sKiRTBoEKSnw4AB8OOP5k4lhBBFhhTKosDGBubPh2HDQK+HkBCYM8fcqYQQokiQQllUWFnBt9/CqFGgFIwYATNmmDuVEEIUelIoixIrK/j6a3jrLe3166/D9OnmzSSEEIWcFMqiRqeDTz+F997TXr/9NnzwgXkzCSFEISaTohdFOp1WHB0ctII5cSJWiYnQtKm5kwkhRKEjPcqibPx4+OwzAKw/+YS6332n3XMphBDikUmhLOr+8x+YOROAKuvXYxMYCEePmjmUEEIUHlIoi4PQUNKWLye5RAl0R49Cw4ZaTzM93dzJhBDC4kmhLCZUjx5s++or9J07Q0oK/Pe/2sTqf/1l7mhCCGHRpFAWI8klS5K+ejV89x24uGiP6PL3h7Aw7d5LIYQQWUihLG50Om0GnyNHoHlzSEiAoUOhVy+4ccPc6YQQwuJIoSyuqlSByEj4+GNtvtiff4Y6dbQ/hRBCGEihLM6sreGdd2D/fqhbF2JitMd1vfQSxMebO50QQlgEsxbKadOm0ahRI1xdXSlXrhw9e/bk1KlTD93uyy+/pHr16jg6OlKpUiXeeOMN7t+/XwCJi6iAAK1Y/ve/2qnZ77/Xlu3YYe5kQghhdmYtlJGRkYSGhrJnzx7Cw8NJTU2lQ4cOJCYm5rjN4sWLGTt2LJMmTeLEiRPMmzePpUuXMm7cuAJMXgTZ22tT30VEgI+PNhq2dWttCjyZpEAIUYyZdQq73377zej1/PnzKVeuHAcPHqRVq1bZbrNr1y6aN2/OgAEDAPDx8aF///7s3bs33/MWC61aaQN93nhD61lOnw6//QYLFmi9TCGEKGYsaq7XuLg4AEqVKpXjOoGBgSxcuJB9+/bRuHFjzp07x/r16xk0aFC26ycnJ5P8QI8o/p9rb6mpqaSmpuY6a8a2T7KPgvRYeR0d4Ztv0HXujPXIkeiOHkU1aoR+8mT0b76pXdvMZ0X6+FoAyZu/JG/+yqu8j7q9TinLuIFOr9fTvXt3YmNjiYqKMrnu119/zVtvvYVSirS0NF555RVmz56d7bqTJ09mypQpWZYvXrwYJyenPMlelNnFxvL0rFl47tsHwK2aNTk0ejRJHh5mTiaEEE8mKSmJAQMGEBcXR4kSJXJcz2IK5ciRI9mwYQNRUVFUrFgxx/UiIiLo168fH374IU2aNOHMmTOMHj2a4cOHM2HChCzrZ9ejrFSpEjdv3jR5YB4mNTWV8PBwgoODsbW1zfV+CsoT5VUK3Y8/Yv3mm+gSElAuLqR/9hnqxRe1wT+WltcMJG/+krz5q7jmjY+Pp0yZMg8tlBZx6nXUqFGsW7eO7du3myySABMmTGDQoEEMGzYMgLp165KYmMjLL7/M+PHjsbIyHp9kb2+Pvb19lv3Y2trmyQ9EXu2noOQ677Bh0L49DBmCbvt2bF55Bdatg7lzoXz5vA/6j2JzfM1E8uYvyZu/njTvo25r1lGvSilGjRrF6tWr2bp1K76+vg/dJikpKUsxtP7nmpmFdI6LLh8f2LpVG+BjZ6cVyjp1YPVqcycTQoh8Y9ZCGRoaysKFC1m8eDGurq5cu3aNa9euce/ePcM6gwcP5t133zW87tatG7Nnz2bJkiWcP3+e8PBwJkyYQLdu3QwFU+Qja2t46y04cEAbBXvzJjz7LISEwD+DsYQQoijJ1anXS5cuodPpDKdJ9+3bx+LFi6lVqxYvv/zyI+8nYwBOmzZtjJaHhYUREhICwMWLF416kO+99x46nY733nuPy5cvU7ZsWbp168bUqVNz81FEbtWtC3v3wuTJ2v2XP/wA27Zpf/7r71MIIQqzXBXKAQMG8PLLLzNo0CCuXbtGcHAwtWvXZtGiRVy7do2JEyc+0n4e5VRpRESEcWAbGyZNmsSkSZNyE13kJXt7mDYNunaFwYPh3Dlo2xbefBOmTgUHB3MnFEKIJ5arU6/Hjh2jcePGACxbtow6deqwa9cuFi1axPz58/MynygMmjeH6GgYPlx7/cUX2sOhDx82aywhhMgLuSqUqamphpGkmzdvpnv37gDUqFGDq1ev5l06UXi4usKcOfDLL9oo2D/+gCZN4KOPIC3N3OmEECLXclUoa9euzTfffMOOHTsIDw+nU6dOAFy5coXSpUvnaUBRyHTtCkePagN8UlNh/HhtWrwzZ8ydTAghciVXhfKTTz7h22+/pU2bNvTv35+Af+YAXbt2reGUrCjGypaFFSu0gT0lSsDu3fD00/DttyC38AghCplcDeZp06YNN2/eJD4+npIlSxqWv/zyyzItnNDodNoAn9at4cUXtRGxr7wCa9dqkxR4epo7oRBCPJJc9Sjv3btHcnKyoUheuHCBL7/8klOnTlGuXLk8DSgKOW9v2LxZG+Bjbw/r12u3lqxYYe5kQgjxSHJVKHv06MGPP/4IQGxsLE2aNOHzzz+nZ8+eOU5OLooxKyvtsV0HD0K9enDrFjz3HAwaBLGx5k4nhBAm5apQHjp0iJYtWwKwYsUKypcvz4ULF/jxxx/5+uuv8zSgKEJq14Y9e7QBPlZWsHAh+PvDli3mTiaEEDnKVaFMSkrC1dUVgE2bNvHss89iZWVF06ZNuXDhQp4GFEWMnR18+CFERYGfH1y6pE22PmYMPDB1oRBCWIpcFUo/Pz/WrFnDpUuX2LhxIx06dADgxo0bT/ToKlGMNGumTVLwyiva66++ggYNtNOzQghhQXJVKCdOnMhbb72Fj48PjRs3plmzZoDWu6xXr16eBhRFmLMzzJ6tDfDx8IATJ6BpU/jgA5mkQAhhMXJVKPv06cPFixc5cOAAGzduNCwPCgrif//7X56FE8XEM8/AsWPaAJ+0NJg4Ees2bXC+fNncyYQQIvcPbvbw8MDDw4O///4bgIoVK8pkAyL3SpeGpUuhRw8IDcVq3z7aHTyIio6GiRPhIQ/0FkKI/JKrHqVer+f999/Hzc0Nb29vvL29cXd354MPPkCv1+d1RlFc6HQwcCAcPYr+mWewSk/H+rvvtEE/o0fDtWvmTiiEKIZyVSjHjx/PzJkz+fjjjzl8+DCHDx/mo48+YsaMGUyYMCGvM4riplIl0n/+mR1Tp6Jv2RKSk+Hrr6FKFXj7be1h0UIIUUByVSh/+OEH5s6dy8iRI/H398ff359XX32V7777Th6zJfLM7dq1Sd+8GcLDtUE+9+7B9Ong6wsTJshkBUKIApGrQnn79m1q1KiRZXmNGjW4ffv2E4cSwkCn0+6z3LULfv0V6teHu3e1ezF9fLQ/ExLMnVIIUYTlqlAGBAQwc+bMLMtnzpyJv7//E4cSIgudDjp3hgMHYNUqqFMH4uK0nqWvL3z6KSQmmjulEKIIytWo108//ZQuXbqwefNmwz2Uu3fv5tKlS6xfvz5PAwphRKeDXr200bHLlsHkyXDqFLzzjjbx+rvvwogR4OBg7qRCiCIiVz3K1q1b8+eff9KrVy9iY2OJjY3l2Wef5Y8//mDBggV5nVGIrKysoF8/7f7LH37QepXXr2tT4fn5wTffQEqKuVMKIYqAXBVKAC8vL6ZOncrKlStZuXIlH374IXfu3GHevHl5mU8I02xstOdenjoFc+ZApUpw+TKMHAnVq0NYmMzyI4R4IrkulEJYFFtbGD4cTp+GGTO0KfH++guGDoVatWDRIkhPN3dKIUQhJIVSFC329jBqFJw9C599BmXKaMXzhRe0R3qtXAkyKYYQ4jFIoRRFk5MT/Oc/cP48TJ0K7u5w/Dj06aM9peSXX0Apc6cUQhQCjzXq9dlnnzX5fqzcAC4sjYsLjBsHr74K//uf1qKjoXt3aNxYe1JJcLA2mlYIIbLxWD1KNzc3k83b25vBgwfnV1Yhcs/dHaZM0XqYY8dqPc59+6BjR2jVCiIjzZ1QCGGhHqtHGRYWll85hCgYpUvDtGnabSSffAKzZkFUFLRpA0FBWg/zn3uDhRAC5BqlKK7Kl9cmKDh7Vjsta2sLW7ZAYCB06QIHD5o7oRDCQkihFMVbhQrwf/+njYwdNgysrWH9emjYEJ59Fo4eNXdCIYSZSaEUAsDbG777Dk6ehEGDtME9q1dDQIA2A9DJk+ZOKIQwEymUQjzIzw9+/BH++AOef167hWTpUqhdG4YM0U7VCiGKFSmUQmSnZk2tQEZHaxOw6/VaAa1RA15+GS5eNHdCIUQBkUIphCkBAbBmjXYrSadO2ryx330H1arBa6/B1avmTiiEyGdSKIV4FI0awYYN2q0kbdtqTyaZOROqVIG33oKYGHMnFELkEymUQjyO5s1h61atNW8O9+/D559j89RT1J0zB92qVdrjvoQQRYYUSiFyo21b2LFD62U2bIguMZEq69dj06+f9uSSp57SnlwSFqbdeiLzygpRaD3WzDxCiAfodNp1y44dSVu3jovffIPvpUvojh3TiuPp01qhBG2CgxYtoGVLrfn7a8/SFEJYPPlNFeJJ6XSoTp04qtdTqXNnbBMTYdcu7Xrmjh3aQKDr17VHfK1cqW3j4qLNApRRPBs31uafFUJYHCmUQuQ1d3fo3FlroF3HPHBAK5pRUbBzJ8TFwaZNWgNtCr0GDTILZ/Pm2ry0Qgizk0IpRH5zcNAKYIsW2uv0dG1Cgx07MtuVK7Bnj9Y++0xbr1YtrWhmFM/KleVxYEKYgRRKIQqatbV2jdLfH0JDtYE+f/2Veao2KgpOnNAeNH38OHz7rbZdxYrGhbN2bbCS8XhC5DcplEKYm04Hvr5aGzRIWxYTo13nzCicBw/C33/DTz9pDbRTvM2bZxbOhg3B3t5sH0OIokoKpRCWqGxZbeq8Hj2014mJsHdvZq9z926IjYVff9UaaEWycePMXmdgILi5me0jCFFUSKEUojBwdoZ27bQG2lR60dGZhXPHDq0XmvE1aD1Vf3/j07VeXmb7CEIUVlIohSiMbGy0U60NG8KYMdp1ztOnM0/V7tihPenkyBGtzZypbefrCy1bogsMpMS9e1pP1d3dnJ9ECIsnhVKIokCn02YDeuopeOklbdnVq8YDhI4cgfPn4fx5bH78kbYAb7yh9TKrVcvaqlYFR0dzfiohLIJZC+W0adNYtWoVJ0+exNHRkcDAQD755BOqV6+e4zZt2rQhMjIyy/LOnTvza8a1GiEEeHrCc89pDSA+Xru2uWMH+h07SDt8GLuEBO3WlCtXIJvfKypWzL6IVqmi3fYiRDFg1kIZGRlJaGgojRo1Ii0tjXHjxtGhQweOHz+Os7NzttusWrWKlJQUw+tbt24REBDAcxn/GAghsleiBHTsCB07kp6ayob16+nctCm2f/2VOeXemTOZX8fGaiNt//4btm0z3pdOp93XWa2a9rDrfxdROztzfEIh8oVZC+Vvv/1m9Hr+/PmUK1eOgwcP0qpVq2y3KVWqlNHrJUuW4OTkJIVSiNwoVUqbh7ZJE+PlSsGtW5lF898tIQEuXNDa5s3G21pZgbd31l6on592jdTWtuA+nxB5wKKuUcbFxQFZi6Ep8+bNo1+/fjn2QJOTk0lOTja8jo+PByA1NZXU1NRcZ83Y9kn2UZAkb/4qknnd3DIHDD1IKbhxA93Zs3DmDLrTp9GdOYPuzBntdWKi4VqoYYq+jE2trcHHB+Xnh/Lzg3/+VH5+WnHNYaL4Inl8LUhxzfuo2+uUsozn/+j1erp3705sbCxRUVGPtM2+ffto0qQJe/fupXHjxtmuM3nyZKZMmZJl+eLFi3GSSaiFyFtKYX/nDi5Xr+J85Yr25wPN5oH/tP6b3tqapPLluevpSaKnJ4leXtrXXl4klSmjzWgkRB5KSkpiwIABxMXFUaJEiRzXs5hCOXLkSDZs2EBUVBQVK1Z8pG1GjBjB7t27+f3333NcJ7seZaVKlbh586bJA/MwqamphIeHExwcjG0hOJUkefOX5H0ESsGVK5k9zwd7oufOobt/P+dN7eyIq1AB527d0LVvj2rZUru31ELJz0P+yqu88fHxlClT5qGF0iJOvY4aNYp169axffv2Ry6SiYmJLFmyhPfff9/kevb29thnM62Xra1tnvxA5NV+CorkzV+S9yF8fLTWvr3xcr1eGzT04GCijHb2LLqUFNzPn4evv9aarS00a6btJygIGjWyyGuf8vOQv54076Nua9ZCqZTitddeY/Xq1URERODr6/vI2y5fvpzk5GReeOGFfEwohCgQVlbaKNrKlTNnH8qQnk7quXMc+fZb6t++jdWWLXDxImzfrrWJE8HVFdq00Ypm+/bak1fkSSsij5i1UIaGhrJ48WJ+/vlnXF1duXbtGgBubm44/nOj8+DBg6lQoQLTpk0z2nbevHn07NmT0vLMPiGKtn8GAF1u2ZKAzp2xsrHRZh3askUbcbt1K9y+Db/8ojUAD4/MohkUBJUqmfcziELNrIVy9uzZgDaJwIPCwsIICQkB4OLFi1j961FCp06dIioqik3/GlEnhCgGdDrtVhM/PxgxQjttGx2tFc3Nm7WZiK5dg0WLtAZQvXpm4WzTBkqWNOcnEIWM2U+9PkxERESWZdWrV3+kbYUQxYCVFdSvr7W334b797UZiDZv1nqd+/fDqVNamzVLW79BA61otm+vPWVFZhkSJljEYB4hhMgzDg7Qtq3Wpk7VZhiKiMg8VXvypFY89++HadO09Vu0yCycTz8tt6III1IohRBFm7s79OypNdBG127Zklk4r17NPG0L2mnZdu0yr2/6+cnAoGJOCqUQonipWBGGDNGaUnDiRGbR3LYN7tyBlSu1BtpI3IyiGRSkTfknihUplEKI4kun024lqVULXntNeyD2/v2ZhXPXLu1WlO+/1xpA3bqZhbNVK+3WFFGkSaEUQogMNjbaRAbNmsF772kPto6Kyjw1Gx0NR49q7X//09Zv2jRzRG2TJhY58YF4MlIohRAiJ87OhkeTARATo52ezSic589rhTQqCqZMARcXaNUKq7ZtKWFrq926Igo9KZRCCPGoypaF55/XGsC5c8YTH9y8CevXY71+PW0BNXWqNvq2XTutycCgQkkKpRBC5FaVKlobPlzrPf7+O2zejD48HH1kJDYxMbBsmdZAG0jUrl1m8axc2bz5xSORQimEEHnBykq7B/Ppp0kfPZoNP/9M5zJlsNm+Xett7t6t3Zry449aA6haNbO32batjKi1UFIohRAiHyhbW1Tz5tqUeRMnQlKSNop22zatcO7fr81Ze/YsfPedtlHt2pmFs3VrmWrPQkihFEKIguDklDn7D0B8vDYv7datWouOhj/+0NqMGdq1zPr1MwtnixbaYCFR4KRQCiGEOZQoAV26aA20gUCRkZmF8+RJOHhQa9Ona7eiNGmSWTibNpU5aguIFEohhLAEZcpA795aA7hyJfM07dat8NdfsHOn1j74QCuSzZtnFs6GDbViKvKcHFUhhLBEXl4wcKDWQLtn88HCefVq5py1oM0Q1LJlZuEMCNAGGIknJoVSCCEKA19frQ0dqs1Re+pUZtHctk17ePX69VoDKFVKG0iUUThr1JB7OHNJCqUQQhQ2Op1W+GrUgFdfzbyHM6NwRkZqhXPVKq0BeHhkFs127bSiKx6JFEohhCjsHriHkzffhNRUbRBQRuHcuROuXYPFi7UG4ONjPKJW5EgKpRBCFDW2ttqo2KZNYdw4uH8f9uzJLJx792qDg/55KootEFy2LNbVqkGFCpnNy8v4aycnc38ys5BCKYQQRZ2Dg3a9sk0beP99uHtX62X+UzjVwYM4xcRok76b4u6etXj+++vy5cHaugA+VMGRQimEEMWNi4vRU1HSbtxgd1gYgd7e2Fy/rt2acvmy1jK+TkyE2FitHT+e876trLTroTkV0ow/3dwKzeAiKZRCCFHclSzJnRo1UJ07Z/88TaW0mYSyK6APfn3tGqSna6+vXDH9PZ2cTBfSChXA0xPs7fPnMz8GKZRCCCFM0+m0HqCbG9SsmfN66elw40bWAvrvonrnjjb37ZkzWjOlTJkshVTn4UH5K1e0a7AFMJG8FEohhBB5w9pa6wV6emozBeUkKSmz12mqh5qSok3td/MmHDli2NwGaAqkBQVJoRRCCFEEOTlpD7H288t5HaW0e0GzKaT6v/8m/uRJnCtWLJC4UiiFEEJYHp0OSpfWmr+/0VvpqalErl9PZ2/vAokiEwEKIYQQJkihFEIIIUyQQimEEEKYIIVSCCGEMKHYDeZRSgEQHx//RPtJTU0lKSmJ+Ph4bLO7QdfCSN78JXnzl+TNX8U1b0YdyKgLOSl2hTIhIQGASpUqmTmJEEIIS5CQkICbm1uO7+vUw0ppEaPX67ly5Qqurq7onmCewfj4eCpVqsSlS5coUaJEHibMH5I3f0ne/CV581dxzauUIiEhAS8vL6yscr4SWex6lFZWVlTMw5tUS5QoUSh+sDJI3vwlefOX5M1fxTGvqZ5kBhnMI4QQQpgghVIIIYQwQQplLtnb2zNp0iTsLeARMI9C8uYvyZu/JG/+krymFbvBPEIIIcTjkB6lEEIIYYIUSiGEEMIEKZRCCCGECVIohRBCCBOkUD7E9u3b6datG15eXuh0OtasWWP0vlKKiRMn4unpiaOjI+3bt+f06dNmyTpt2jQaNWqEq6sr5cqVo2fPnpw6dcponfv37xMaGkrp0qVxcXGhd+/eXL9+3Sx5Z8+ejb+/v+Gm4WbNmrFhwwaLzJqdjz/+GJ1Ox5gxYwzLLCnz5MmT0el0Rq1GjRoWmTXD5cuXeeGFFyhdujSOjo7UrVuXAwcOGN63pN83Hx+fLMdXp9MRGhoKWN7xTU9PZ8KECfj6+uLo6EjVqlX54IMPjOY5taTjC9rUcmPGjMHb2xtHR0cCAwPZv39/wedVwqT169er8ePHq1WrVilArV692uj9jz/+WLm5uak1a9aoI0eOqO7duytfX1917969As/asWNHFRYWpo4dO6aio6NV586dVeXKldXdu3cN67zyyiuqUqVKasuWLerAgQOqadOmKjAwsMCzKqXU2rVr1a+//qr+/PNPderUKTVu3Dhla2urjh07ZnFZ/23fvn3Kx8dH+fv7q9GjRxuWW1LmSZMmqdq1a6urV68aWkxMjEVmVUqp27dvK29vbxUSEqL27t2rzp07pzZu3KjOnDljWMeSft9u3LhhdGzDw8MVoLZt26aUsrzjO3XqVFW6dGm1bt06df78ebV8+XLl4uKivvrqK8M6lnR8lVLq+eefV7Vq1VKRkZHq9OnTatKkSapEiRLq77//LtC8Uigfw78LpV6vVx4eHmr69OmGZbGxscre3l799NNPZkho7MaNGwpQkZGRSiktm62trVq+fLlhnRMnTihA7d6921wxjZQsWVLNnTvXorMmJCSoatWqqfDwcNW6dWtDobS0zJMmTVIBAQHZvmdpWZVS6p133lEtWrTI8X1L/30bPXq0qlq1qtLr9RZ5fLt06aKGDh1qtOzZZ59VAwcOVEpZ3vFNSkpS1tbWat26dUbL69evr8aPH1+geeXU6xM4f/48165do3379oZlbm5uNGnShN27d5sxmSYuLg6AUqVKAXDw4EFSU1ON8taoUYPKlSubPW96ejpLliwhMTGRZs2aWXTW0NBQunTpYpQNLPP4nj59Gi8vL6pUqcLAgQO5ePGixWZdu3YtDRs25LnnnqNcuXLUq1eP7777zvC+Jf++paSksHDhQoYOHYpOp7PI4xsYGMiWLVv4888/AThy5AhRUVE888wzgOUd37S0NNLT03FwcDBa7ujoSFRUVIHmLXaTouela9euAVC+fHmj5eXLlze8Zy56vZ4xY8bQvHlz6tSpA2h57ezscHd3N1rXnHmPHj1Ks2bNuH//Pi4uLqxevZpatWoRHR1tcVkBlixZwqFDh4yuk2SwtOPbpEkT5s+fT/Xq1bl69SpTpkyhZcuWHDt2zOKyApw7d47Zs2fz5ptvMm7cOPbv38/rr7+OnZ0dQ4YMsejftzVr1hAbG0tISAhgeT8LAGPHjiU+Pp4aNWpgbW1Neno6U6dOZeDAgYDl/Xvm6upKs2bN+OCDD6hZsybly5fnp59+Yvfu3fj5+RVoXimURVRoaCjHjh0jKirK3FFMql69OtHR0cTFxbFixQqGDBlCZGSkuWNl69KlS4wePZrw8PAs/8u1RBk9BQB/f3+aNGmCt7c3y5Ytw9HR0YzJsqfX62nYsCEfffQRAPXq1ePYsWN88803DBkyxMzpTJs3bx7PPPMMXl5e5o6So2XLlrFo0SIWL15M7dq1iY6OZsyYMXh5eVns8V2wYAFDhw6lQoUKWFtbU79+ffr378/BgwcLNIecen0CHh4eAFlGsl2/ft3wnjmMGjWKdevWsW3bNqNHinl4eJCSkkJsbKzR+ubMa2dnh5+fHw0aNGDatGkEBATw1VdfWWTWgwcPcuPGDerXr4+NjQ02NjZERkby9ddfY2NjQ/ny5S0u84Pc3d156qmnOHPmjEUeX09PT2rVqmW0rGbNmobTxZb6+3bhwgU2b97MsGHDDMss8fj+97//ZezYsfTr14+6desyaNAg3njjDaZNmwZY5vGtWrUqkZGR3L17l0uXLrFv3z5SU1OpUqVKgeaVQvkEfH198fDwYMuWLYZl8fHx7N27l2bNmhV4HqUUo0aNYvXq1WzduhVfX1+j9xs0aICtra1R3lOnTnHx4kWz5M2OXq8nOTnZIrMGBQVx9OhRoqOjDa1hw4YMHDjQ8LWlZX7Q3bt3OXv2LJ6enhZ5fJs3b57ldqY///wTb29vwPJ+3zKEhYVRrlw5unTpYlhmicc3KSkpy8OJra2t0ev1gOUeXwBnZ2c8PT25c+cOGzdupEePHgWbN0+HBhVBCQkJ6vDhw+rw4cMKUF988YU6fPiwunDhglJKG57s7u6ufv75Z/X777+rHj16mG049ciRI5Wbm5uKiIgwGraelJRkWOeVV15RlStXVlu3blUHDhxQzZo1U82aNSvwrEopNXbsWBUZGanOnz+vfv/9dzV27Fil0+nUpk2bLC5rTh4c9aqUZWX+z3/+oyIiItT58+fVzp07Vfv27VWZMmXUjRs3LC6rUtotNzY2Nmrq1Knq9OnTatGiRcrJyUktXLjQsI4l/b4ppVR6erqqXLmyeuedd7K8Z2nHd8iQIapChQqG20NWrVqlypQpo95++23DOpZ2fH/77Te1YcMGde7cObVp0yYVEBCgmjRpolJSUgo0rxTKh9i2bZsCsrQhQ4YopbQh1RMmTFDly5dX9vb2KigoSJ06dcosWbPLCaiwsDDDOvfu3VOvvvqqKlmypHJyclK9evVSV69eNUveoUOHKm9vb2VnZ6fKli2rgoKCDEXS0rLm5N+F0pIy9+3bV3l6eio7OztVoUIF1bdvX6N7Ei0pa4ZffvlF1alTR9nb26saNWqoOXPmGL1vSb9vSim1ceNGBWSbwdKOb3x8vBo9erSqXLmycnBwUFWqVFHjx49XycnJhnUs7fguXbpUValSRdnZ2SkPDw8VGhqqYmNjCzyvPGZLCCGEMEGuUQohhBAmSKEUQgghTJBCKYQQQpgghVIIIYQwQQqlEEIIYYIUSiGEEMIEKZRCCCGECVIohRBCCBOkUAohhBAmSKEUopCLiYlh5MiRVK5cGXt7ezw8POjYsSM7d+4EQKfTsWbNGvOGFKIQk+dRClHI9e7dm5SUFH744QeqVKnC9evX2bJlC7du3TJ3NCGKBJnrVYhCLDY2lpIlSxIREUHr1q2zvO/j48OFCxcMr729vfnrr78A+Pnnn5kyZQrHjx83PLx3/Pjx2Nho/3/W6XTMmjWLtWvXEhERgaenJ59++il9+vQpkM8mhKWQU69CFGIuLi64uLiwZs0akpOTs7y/f/9+QHtm4tWrVw2vd+zYweDBgxk9ejTHjx/n22+/Zf78+UydOtVo+wkTJtC7d2+OHDnCwIED6devHydOnMj/DyaEBZEepRCF3MqVKxk+fDj37t2jfv36tG7dmn79+uHv7w9oPcPVq1fTs2dPwzbt27cnKCiId99917Bs4cKFvP3221y5csWw3SuvvMLs2bMN6zRt2pT69esza9asgvlwQlgA6VEKUcj17t2bK1eusHbtWjp16kRERAT169dn/vz5OW5z5MgR3n//fUOP1MXFheHDh3P16lWSkpIM6/37SfHNmjWTHqUodmQwjxBFgIODA8HBwQQHBzNhwgSGDRvGpEmTCAkJyXb9u3fvMmXKFJ599tls9yWEyCQ9SiGKoFq1apGYmAiAra0t6enpRu/Xr1+fU6dO4efnl6VZWWX+s7Bnzx6j7fbs2UPNmjXz/wMIYUGkRylEIXbr1i2ee+45hg4dir+/P66urhw4cIBPP/2UHj16ANrI1y1bttC8eXPs7e0pWbIkEydOpGvXrlSuXJk+ffpgZWXFkSNHOHbsGB9++KFh/8uXL6dhw4a0aNGCRYsWsW/fPubNm2eujyuEeSghRKF1//59NXbsWFW/fn3l5uamnJycVPXq1dV7772nkpKSlFJKrV27Vvn5+SkbGxvl7e1t2Pa3335TgYGBytHRUZUoUUI1btxYzZkzx/A+oP7v//5PBQcHK3t7e+Xj46OWLl1a0B9RCLOTUa9CiGxlN1pWiOJIrlEKIYQQJkihFEIIIUyQwTxCiGzJVRkhNNKjFEIIIUyQQimEEEKYIIVSCCGEMEEKpRBCCGGCFEohhBDCBCmUQgghhAlSKIUQQggTpFAKIYQQJvw/qzdWW4rbTREAAAAASUVORK5CYII=\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAdMAAADvCAYAAACpMT7PAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABXg0lEQVR4nO3dd1hT1xsH8G8IEDayhwNRUXHgHriwiuLGURW0jtpWf462tlpbZxU31jpqnbWOOsEKWlsHWhEHDtzWRRXECjhQQEAgkPP745hAZGaRBN/P89yH5Obm3pNDkjdnCxhjDIQQQghRmoG2E0AIIYToOwqmhBBCiIoomBJCCCEqomBKCCGEqIiCKSGEEKIiCqaEEEKIiiiYEkIIISqiYEoIIYSoiIIpIYQQoiIKpkTvjR49GjVr1tR2MpTSuXNndO7cWdvJKNHcuXMhEAi0nQxCdB4FU6IxAoGgXFtkZKS2k6rzatasWWL+9ejRQ9vJw+jRo2FhYaHtZGhdbm4uVq1ahWbNmsHKygpVqlRBw4YNMXbsWNy9e1d23Llz5zB37lykpqZqL7FErQy1nQBSef32229y97dv346IiIgi+z09PVW6zqZNmyCRSFQ6hz5o2rQppkyZUmS/q6urFlJDijNo0CAcPnwYgYGB+OyzzyAWi3H37l0cOnQI7dq1Q/369QHwYDpv3jyMHj0aVapU0W6iiVpQMCUa89FHH8ndP3/+PCIiIorsf1dWVhbMzMzKfR0jIyOl0qdvqlatWmbeEe25dOkSDh06hIULF2LGjBlyj61Zs4ZKoZUcVfMSrercuTMaNWqEy5cvo1OnTjAzM5N9ER04cAC9e/eGq6srRCIRateujfnz5yM/P1/uHO+2mcbHx0MgEOCHH37Axo0bUbt2bYhEIrRq1QqXLl0qM00vX77E1KlT0bhxY1hYWMDKygo9e/bE9evX5Y6LjIyEQCBASEgIFi5ciGrVqsHExARdu3bFv//+W+S80rSYmpqidevWOH36tBI5VrIffvgBAoEAjx49KvLY9OnTYWxsjFevXgEATp8+jcGDB6NGjRoQiUSoXr06vvrqK7x580ataXpXaGgoWrRoAVNTU9jb2+Ojjz7CkydP5I5JTk7Gxx9/jGrVqkEkEsHFxQX+/v6Ij4+XHRMTEwM/Pz/Y29vD1NQU7u7uGDNmTKnX7tOnD2rVqlXsY97e3mjZsqXsfkREBDp06IAqVarAwsIC9erVKxIg3/XgwQMAQPv27Ys8JhQKYWdnB4C3Q3/zzTcAAHd3d1l1feHXt2PHDlk+2draIiAgAI8fP5Y7Z+HPTrt27WT5sH79+lLTSTSDSqZE61JSUtCzZ08EBATgo48+gpOTEwBg69atsLCwwNdffw0LCwv8/fffmDNnDtLT07Fs2bIyz7tr1y68fv0a48aNg0AgQHBwMAYOHIiHDx+WWpp9+PAhwsPDMXjwYLi7u+Pp06fYsGEDfHx8cPv27SLVqkuWLIGBgQGmTp2KtLQ0BAcHY/jw4bhw4YLsmM2bN2PcuHFo164dJk+ejIcPH6Jfv36wtbVF9erVy5VPYrEYL168KLLf3NwcpqamGDJkCKZNm4aQkBDZl7VUSEgIunfvDhsbGwA8qGVlZWH8+PGws7PDxYsX8dNPP+G///5DaGhoudKjqK1bt+Ljjz9Gq1atsHjxYjx9+hSrVq3C2bNncfXqVVl156BBg/DPP//g888/R82aNfHs2TNEREQgISFBdr979+5wcHDAd999hypVqiA+Ph779+8v9fpDhw7FyJEjcenSJbRq1Uq2/9GjRzh//rzsPfXPP/+gT58+8PLyQlBQEEQiEf7991+cPXu21PO7ubkBAHbu3In27dvD0LD4r9eBAwfi/v372L17N1asWAF7e3sAgIODAwBg4cKFmD17NoYMGYJPP/0Uz58/x08//YROnTrJ5RMAvHr1Cr169cKQIUMQGBiIkJAQjB8/HsbGxmX+uCBqxgipIBMnTmTvvuV8fHwYALZ+/foix2dlZRXZN27cOGZmZsays7Nl+0aNGsXc3Nxk9+Pi4hgAZmdnx16+fCnbf+DAAQaA/fHHH6WmMzs7m+Xn58vti4uLYyKRiAUFBcn2nTx5kgFgnp6eLCcnR7Z/1apVDAC7efMmY4yx3Nxc5ujoyJo2bSp33MaNGxkA5uPjU2p6GGPMzc2NASh2W7x4sew4b29v1qJFC7nnXrx4kQFg27dvl+0rLm8XL17MBAIBe/TokWzf999/X+R/VpxRo0Yxc3PzEh+X5kGjRo3YmzdvZPsPHTrEALA5c+Ywxhh79eoVA8CWLVtW4rnCwsIYAHbp0qUy01VYWloaE4lEbMqUKXL7g4OD5V73ihUrGAD2/Plzhc4vkUhk72cnJycWGBjIfv75Z7n8lFq2bBkDwOLi4uT2x8fHM6FQyBYuXCi3/+bNm8zQ0FBuv/Ray5cvl+3LyclhTZs2ZY6Ojiw3N1eh9BPVUDUv0TqRSISPP/64yH5TU1PZ7devX+PFixfo2LEjsrKy5HpGlmTo0KGykhgAdOzYEQAveZaVHgMD/tHIz89HSkqKrKrvypUrRY7/+OOPYWxsXOJ1YmJi8OzZM/zvf/+TO2706NGwtrYu83VItWnTBhEREUW2wMBAudd8+fJlWZUjAOzduxcikQj+/v6yfYXzNjMzEy9evEC7du3AGMPVq1fLnabykubBhAkTYGJiItvfu3dv1K9fH3/++acsXcbGxoiMjJRVSb9LWjI7dOgQxGJxudMgra4PCQkBY0y2f+/evWjbti1q1Kghd/4DBw4o1LFNIBDg6NGjWLBgAWxsbLB7925MnDgRbm5uGDp0aLnaTPfv3w+JRIIhQ4bgxYsXss3Z2RkeHh44efKk3PGGhoYYN26c7L6xsTHGjRuHZ8+e4fLly+VOO1EdBVOidVWrVpULMlL//PMPBgwYAGtra1hZWcHBwUHWASctLa3M80q/HKWkgbWkL2kpiUSCFStWwMPDAyKRCPb29nBwcMCNGzeKvW5Z15G2YXp4eMgdZ2RkVGIbXnHs7e3h6+tbZJNWLwLA4MGDYWBggL179wIAGGMIDQ1Fz549YWVlJTsuISEBo0ePhq2tLSwsLODg4AAfHx8A5ctbRUnzoF69ekUeq1+/vuxxkUiEpUuX4vDhw3ByckKnTp0QHByM5ORk2fE+Pj4YNGgQ5s2bB3t7e/j7+2PLli3IyckpMx1Dhw7F48ePER0dDYC3c16+fBlDhw6VO6Z9+/b49NNP4eTkhICAAISEhJQrsIpEIsycORN37txBYmIidu/ejbZt2yIkJASTJk0q8/mxsbFgjMHDwwMODg5y2507d/Ds2TO5411dXWFubi63r27dugAg1wZLNI+CKdG6wqUkqdTUVPj4+OD69esICgrCH3/8gYiICCxduhQAyvXFJhQKi91fuFRSnEWLFuHrr79Gp06dsGPHDhw9ehQRERFo2LBhsddV9jqa4Orqio4dOyIkJAQA70GdkJAgFyzy8/PRrVs3/Pnnn/j2228RHh6OiIgIbN26FUD58laTJk+ejPv372Px4sUwMTHB7Nmz4enpKSsxCwQC7Nu3D9HR0Zg0aRKePHmCMWPGoEWLFsjIyCj13H379oWZmZksf0JCQmBgYIDBgwfLjjE1NUVUVBSOHz+OESNG4MaNGxg6dCi6detWpPNbaVxcXBAQEICoqCh4eHggJCQEeXl5pT5HIpFAIBDgyJEjxdZCbNiwodzXJxWLOiARnRQZGYmUlBTs378fnTp1ku2Pi4vT+LX37duHDz74AJs3b5bbn5qaKussoghpyTE2NhZdunSR7ReLxYiLi0OTJk1US/A7hg4digkTJuDevXvYu3cvzMzM0LdvX9njN2/exP3797Ft2zaMHDlStj8iIkKt6ShMmgf37t2TywPpvsKlawCoXbs2pkyZgilTpiA2NhZNmzbF8uXLsWPHDtkxbdu2Rdu2bbFw4ULs2rULw4cPx549e/Dpp5+WmA5zc3P06dMHoaGh+PHHH7F371507NixSKcyAwMDdO3aFV27dsWPP/6IRYsWYebMmTh58iR8fX0Veu1GRkbw8vJCbGysrMq2pFmlateuDcYY3N3dZSXM0iQmJiIzM1OudHr//n0A0NtZwfQVlUyJTpKW9gqX7nJzc7F27doKufa7pcrQ0NAiQzjKq2XLlnBwcMD69euRm5sr279161aNjD0cNGgQhEIhdu/ejdDQUPTp00fuy7a4vGWMYdWqVWpPi1TLli3h6OiI9evXy1XHHj58GHfu3EHv3r0B8DHG2dnZcs+tXbs2LC0tZc979epVkf9P06ZNAaDcVb2JiYn45ZdfcP36dblSO8CHRr2rPOePjY1FQkJCkf2pqamIjo6GjY2NrMeu9P/x7v9/4MCBEAqFmDdvXpHXyBhDSkqK3L68vDy50mpubi42bNgABwcHtGjRosS0EvWjkinRSe3atYONjQ1GjRqFL774AgKBAL/99luFVJ326dMHQUFB+Pjjj9GuXTvcvHkTO3fuVKh9szAjIyMsWLAA48aNQ5cuXTB06FDExcVhy5YtCp3zyZMnciUzKQsLC/Tv319239HRER988AF+/PFHvH79ukiwqF+/PmrXro2pU6fiyZMnsLKywu+//15mW3JZxGIxFixYUGS/ra0tJkyYgKVLl+Ljjz+Gj48PAgMDZUNjatasia+++goAL1V17doVQ4YMQYMGDWBoaIiwsDA8ffoUAQEBAIBt27Zh7dq1GDBgAGrXro3Xr19j06ZNsLKyQq9evcpMZ69evWBpaYmpU6dCKBRi0KBBco8HBQUhKioKvXv3hpubG549e4a1a9eiWrVq6NChQ4nnvX79OoYNG4aePXuiY8eOsLW1xZMnT7Bt2zYkJiZi5cqVsh8y0kA3c+ZMBAQEwMjICH379kXt2rWxYMECTJ8+HfHx8ejfvz8sLS0RFxeHsLAwjB07FlOnTpVd09XVFUuXLkV8fDzq1q2LvXv34tq1a9i4ceN7M5mJztBCD2LynippaEzDhg2LPf7s2bOsbdu2zNTUlLm6urJp06axo0ePMgDs5MmTsuNKGhpT3PAKAOz7778vNZ3Z2dlsypQpzMXFhZmamrL27duz6Oho5uPjIzeMRTo0JjQ0VO750utv2bJFbv/atWuZu7s7E4lErGXLliwqKqrIOUtS2tCYwq9datOmTQwAs7S0lBuKInX79m3m6+vLLCwsmL29Pfvss8/Y9evXi6RbkaExJaWvdu3asuP27t3LmjVrxkQiEbO1tWXDhw9n//33n+zxFy9esIkTJ7L69eszc3NzZm1tzdq0acNCQkJkx1y5coUFBgayGjVqMJFIxBwdHVmfPn1YTExMmemUGj58OAPAfH19izx24sQJ5u/vz1xdXZmxsTFzdXVlgYGB7P79+6We8+nTp2zJkiXMx8eHubi4MENDQ2ZjY8O6dOnC9u3bV+T4+fPns6pVqzIDA4Miw2R+//131qFDB2Zubs7Mzc1Z/fr12cSJE9m9e/dkx0g/OzExMczb25uZmJgwNzc3tmbNmnLnA1EfAWNa6CVBCCFEJZ07d8aLFy9w69YtbSeFgNpMCSGEEJVRMCWEEEJURMGUEEIIURG1mRJCCCEqopIpIYQQoiIKpoQQQoiKaNKGYkgkEiQmJsLS0rLEab8IIYRUbowxvH79Gq6urrKVpEpCwbQYiYmJ5V6wmRBCSOX2+PFjVKtWrdRjKJgWw9LSEgDPwMLLVpHiicViHDt2DN27d6cpzBREeaccyjflUL4pJj09HdWrV5fFhNJQMC2GtGrXysqKgmk5iMVimJmZwcrKij6gCqK8Uw7lm3Io35RTnuY+6oBECCGEqIiCKSGEEKIiCqYakpwM7NoF3Lyp7ZQQQgjRNGoz1ZAZM4AtW4BvvwWWLNF2aggh6sIYQ15eHvLz87WdFIWJxWIYGhoiOztbL9OvCUZGRrJ1ZlVBwVRDPviAB9O//9Z2Sggh6pKbm4ukpCRkZWVpOylKYYzB2dkZjx8/pjH0bwkEAlSrVg0WFhYqnYeCqYZ88AH/e/kykJYGWFtrNz2EENVIJBLExcVBKBTC1dUVxsbGeheQJBIJMjIyYGFhUeYkBO8DxhieP3+O//77Dx4eHiqVUCmYaki1aoCHBxAbC0RFAX37ajtFhBBV5ObmQiKRoHr16jAzM9N2cpQikUiQm5sLExMTCqZvOTg4ID4+HmKxWKVgSrmpQV268L8nT2o3HYQQ9aEgVLmoq3aB3hUaJK3qpXZTQgip3CiYalDnzvzv9etASopWk0IIIUSDKJhqkJMT0LAhvx0ZqdWkEEKIWtWsWRMrV67UdjJ0BgVTDaN2U0KINgkEAtkmFAphY2MDoVAo2zd37lylznvp0iWMHTtWpbR17twZkydPVukcuoJ682rYBx8AP/1E7aaEEO1ISkqS3d6zZw/mzJmDu3fvyjpSFR5fyRhDfn4+DA3LDg0ODg7qT6weo5Kphvn4AAIBcOcOn2KQEFJ5MAZkZmpnY6x8aXR2dpZtVlZWEAgEsvt3796FpaUlDh8+jBYtWkAkEuHMmTN48OAB/P394eTkBAsLC7Rq1QrHjx+XO++71bwCgQC//PILBgwYADMzM3h4eODgwYMq5e/vv/+Ohg0bQiQSoWbNmli+fLnc42vXroWHhwdMTEzg5OSEDz/8UPbYvn370LhxY5iamsLOzg6+vr7IzMxUKT2loWCqYba2QNOm/DZV9RJSuWRlARYW2tnUOQnTd999hyVLluDOnTvw8vJCRkYGevXqhRMnTuDq1avo0aMH+vbti4SEhFLPM2/ePAwZMgQ3btxAr169MHz4cLx8+VKpNF2+fBlDhgxBQEAAbt68iblz52L27NnYunUrACAmJgZffPEFgoKCcO/ePRw5cgSdOnUCwEvjgYGBGDNmDO7cuYPIyEgMHDgQrLy/QJRA1bwVoEsX4OpVHkwDA7WdGkIIkRcUFIRu3brJ7tva2qJJkyay+/Pnz0dYWBgOHjyISZMmlXie0aNHI/Dtl9yiRYuwevVqXLx4ET169FA4TT/++CO6du2K2bNnAwDq1q2L27dvY9myZRg9ejQSEhJgbm6OPn36wNLSEm5ubmjWrBkAHkzz8vIwcOBAuLm5AQAaN26scBoUQSXTCkDjTQmpnMzMgIwM7WzqnISpZcuWcvczMjIwdepUeHp6okqVKrCwsMCdO3fKLJl6eXnJbpubm8PKygrPnj1TKk137txB+/bt5fa1b98esbGxyM/PR7du3eDm5oZatWphxIgR2Llzp2zO5CZNmqBr165o3LgxBg8ejE2bNuHVq1dKpaO8KJhWgI4dAaEQePAAKOO9SAjRIwIBYG6unU2d0wKbm5vL3Z86dSrCwsKwaNEinD59GteuXUPjxo2Rm5tb6nmMjIzeyR8BJBKJ+hJaiKWlJa5cuYLdu3fDxcUFc+bMQZMmTZCamgqhUIiIiAgcPnwYDRo0wE8//YR69eohLi5OI2kBKJhWCCsrQPrDj9pNCSG67uzZsxg9ejQGDBiAxo0bw9nZGfHx8RWaBk9PT5w9e7ZIuurWrSubQ9fQ0BC+vr4IDg7GjRs3EB8fj7/fVgEKBAK0b98e8+bNw9WrV2FsbIywsDCNpVerwXTdunXw8vKClZUVrKys4O3tjcOHD5d4/KZNm9CxY0fY2NjAxsYGvr6+uHjxotwxo0ePlhtXJRAIlKqvVzcab0oI0RceHh7Yv38/rl27huvXr2PYsGEaK2E+f/4c165dk9uePn2KKVOm4MSJE5g/fz7u37+Pbdu2Yc2aNZg6dSoA4NChQ1i9ejWuXbuGR48eYfv27ZBIJKhXrx4uXLiARYsWISYmBgkJCdi/fz+eP38OT09PjbwGQMvBtFq1aliyZAkuX76MmJgYdOnSBf7+/vjnn3+KPT4yMhKBgYE4efIkoqOjUb16dXTv3h1PnjyRO65Hjx5ISkqSbbt3766Il1Oqwu2mGuxQRgghKvvxxx9hY2ODdu3aoW/fvvDz80Pz5s01cq1du3ahWbNmctumTZvQvHlzhISEYM+ePWjUqBHmzJmDoKAgjB49GgBQpUoV7N+/H126dIGnpyfWr1+P3bt3o2HDhrCyskJUVBR69eqFunXrYtasWVi+fDl69uypkdcAAAKmyb7CSrC1tcWyZcvwySeflHlsfn4+bGxssGbNGowcORIAL5mmpqYiPDxc6TSkp6fD2toaaWlpsLKyUvo8hWVlAVWqAGIx8O+/QO3aajmtThCLxfjrr7/Qq1evIm0mpHSUd8rRRr5lZ2cjLi4O7u7uMDExqZBrqptEIkF6ejqsrKxo9Zu3Svu/KhILdGZoTH5+PkJDQ5GZmQlvb+9yPScrKwtisRi2trZy+yMjI+Ho6AgbGxt06dIFCxYsgJ2dXYnnycnJQU5Ojux+eno6AP6BFYvFSryaooyMgDZthDhzxgAREXmoUUOnfsOoRJpH6sqr9wnlnXK0kW9isRiMMUgkEo1VeWqatOwkfR2E/8BgjBW7nqki7y+tB9ObN2/C29sb2dnZsLCwQFhYGBo0aFCu53777bdwdXWFr6+vbF+PHj0wcOBAuLu748GDB5gxYwZ69uyJ6OjoEhd+Xbx4MebNm1dk/7Fjx9S6CHDVqvUA1MeuXclwcbmstvPqioiICG0nQW9R3imnIvPN0NAQzs7OyMjIKLNXq657/fq1tpOgM3Jzc/HmzRtERUUhLy9P7rEsBWbG0Ho1b25uLhISEpCWloZ9+/bhl19+walTp8oMqEuWLEFwcDAiIyPlxja96+HDh6hduzaOHz+Orl27FntMcSXT6tWr48WLF2qr5gWAqCgBfH0N4eTEkJCQp9au7dokFosRERGBbt26UVWlgijvlKONfMvOzsbjx49Rs2ZNva3mZYzh9evXsLS0VNui2PouOzsb8fHxqF69erHVvPb29vpRzWtsbIw6deoAAFq0aIFLly5h1apV2LBhQ4nP+eGHH7BkyRIcP3681EAKALVq1YK9vT3+/fffEoOpSCSCSCQqst/IyEitH9QOHQATE+DpUwEePDCCBjuWaYW68+t9QnmnnIrMt/z8fAgEAhgYGOhte6O0alf6OghgYGAAgUBQ7HtJkfeWzuWmRCKRKyW+Kzg4GPPnz8eRI0eKzNpRnP/++w8pKSlwcXFRZzKVIhIB0gk9aDYkQgipPLQaTKdPn46oqCjEx8fj5s2bmD59OiIjIzF8+HAAwMiRIzF9+nTZ8UuXLsXs2bPx66+/ombNmkhOTkZycjIyMjIA8CmwvvnmG5w/fx7x8fE4ceIE/P39UadOHfj5+WnlNb6LxpsSQkjlo9Vq3mfPnmHkyJFISkqCtbU1vLy8cPToUdmEywkJCXJVEevWrUNubq7cMjsA8P3332Pu3LkQCoW4ceMGtm3bhtTUVLi6uqJ79+6YP39+sdW42iAdb3ryJCCRAFTTQggh+k+rwXTz5s2lPh4ZGSl3v6zprExNTXH06FEVU6VZLVvy5ZNevgRu3ChYno0QQoj+onJRBTMy4hPfA1TVSwghlQUFUy2QtptSJyRCSGURHx8PgUCAa9euaTspWkHBVAuk7aZRUcA7Y4QJIUTtpAuACIVC2NjYQCgUamURkM6dO2Py5MkVes2KovVxpu+jpk35PL2pqcCVK0Dr1lpOECGk0uvRowc2b94sm7TBwMBAZzpmVgZUMtUCoRDw8eG3qd2UED3GGJCZqZ1NwcnrRCIRnJ2d4eTkBGdnZzg7O8PGxgYAMGzYMAwdOlTueLFYDHt7e2zfvh0AcOTIEXTo0AFVqlSBnZ0d+vTpgwcPHqgnH9/6/fff0bBhQ4hEItSsWRPLly+Xe3zt2rXw8PCAiYkJnJyc5EZ27Nu3D40bN4apqSns7Ozg6+uLzMxMtaavNFQy1ZIuXYADB3i76bffajs1hBClZGXx7vnakJEBmJur5VTDhw/H4MGDkZGRAYu3r+fo0aPIysrCgAEDAACZmZn4+uuv4eXlhYyMDMyZMwcDBgzAtWvX1DKb0uXLlzFkyBDMnTsXQ4cOxblz5zBhwgTY2dlh9OjRiImJwRdffIHffvsN7dq1w8uXL3H69GkAQFJSEgIDAxEcHIwBAwbg9evXOH36NCpytlwKploibTc9cwbIzQWMjbWbHkJI5Xbo0KEi88vOmDEDM2bMgJ+fH8zNzREWFoYRI0YA4OuM9uvXD5aWlgCAQYMGyT33119/hYODA27fvo1GjRqpnL4ff/wRXbt2xezZswEAdevWxe3bt7Fs2TKMHj0aCQkJMDc3R58+fWBpaQk3Nzc0a9YMAA+meXl5GDhwINzc3AAAjRs3VjlNiqBgqiUNGwIODsDz58DFi3zeXkKInjEz4yVEbV1bAR988AF+/vlnWenTwMBAtnyloaEhhgwZgp07d2LEiBHIzMzEgQMHsGfPHtnzY2NjMWfOHFy4cAEvXryQzfObkJCglmB6584d+Pv7y+1r3749Vq5cifz8fHTr1g1ubm6oVasWevTogR49emDAgAEwMzNDkyZN0LVrVzRu3Bh+fn7o3r07PvzwQ1k1dkWgNlMtMTAAOnfmt6ndlBA9JRDwqlZtbAqu+mJubo46deqgVq1aqFOnDurUqSO3FvTw4cNx4sQJPHv2DOHh4TA1NZXr7du3b1+8fPkSmzZtwoULF3DhwgUAqLDl6CwtLXHlyhXs3r0bLi4umDNnDpo0aYLU1FQIhUJERETg8OHDaNCgAX766SfUq1cPcXFxFZI2gIKpVtF4U0KIrmjXrh2qV6+OvXv3YufOnRg8eLBs1ZSUlBTcu3cPs2bNQteuXeHp6YlXr16p9fqenp44e/as3L6zZ8+ibt26srWoDQ0N4evri+DgYNy4cQPx8fH4++0XqEAgQPv27TFv3jxcvXoVxsbGCAsLU2saS0PVvFokbTeNjgbevAFMTbWbHkJI5ZWTk4Pk5GS8fv0aWVlZMDAwgKGhIezt7WXHDBs2DOvXr8f9+/dxslCVmY2NDezs7LBx40a4uLggISEB3333nVLpeP78eZGJHVxcXDBlyhS0atUK8+fPx9ChQxEdHY01a9Zg7dq1AHib78OHD9GpUyfY2Njgr7/+gkQiQb169XDhwgWcOHEC3bt3h6OjIy5cuIDnz5/DsyLXuWSkiLS0NAaApaWlafQ6Egljrq6MAYydOKHRS2lUbm4uCw8PZ7m5udpOit6hvFOONvLtzZs37Pbt2+zNmzcVdk11GTVqFANQZKtXr57ccbdv32YAmJubG5NIJHKPRUREME9PTyYSiZiXlxeLjIxkAFhYWBhjjLG4uDgGgF29erXEdPj4+BSbjvnz5zPGGNu3bx9r0KABMzIyYjVq1GDLli2TPff06dPMx8eH2djYMFNTU+bl5cX27t0rS7efnx9zcHBgIpGI1a1bl/3000/lypvS/q+KxAIqmWqRQMBLpzt38nZTabUvIYSo09atW7F161ZIJBKkp6fDysqq2OEsnp6eJQ4n8fX1xe3bt+X2FT62Zs2aZQ5FeXfxkncNGjSoSK9hqQ4dOpT4fE9PTxw5cqTUc2satZlqGbWbEkKI/qNgqmXSdtOLF7XXw54QQohqKJhqmbs7ULMmn/D+zBltp4YQQogyKJjqAGnplMabEkKIfqJgqgOo3ZQQ/VFWJxuiX9T1/6RgqgOkJdMrV/iybIQQ3SOdwCArK0vLKSHqJJ3BSToxhLJoaIwOqFoVqFsXuH+fLxjer5+2U0QIeZdQKESVKlXw7NkzAICZmRkECk7pp20SiQS5ubnIzs5Wy0ov+k4ikeD58+cwMzODoaFq4ZCCqY744AMeTE+epGBKiK5ydnYGAFlA1TeMMbx58wampqZ690NAUwwMDFCjRg2V84OCqY7o0gXYsIHaTQnRZQKBAC4uLnB0dIRYLNZ2chQmFosRFRWFTp06yaqt33fGxsZqKaVTMNUR0hVkbtwAXrwACk2XSQjRMUKhUOU2Nm0QCoXIy8uDiYkJBVM1o0pzHeHoCEiXBCxjxi1CCCE6hoKpDqHxpoQQop8omOoQGm9KCCH6SavBdN26dfDy8oKVlRWsrKzg7e2Nw4cPl/qc0NBQ1K9fHyYmJmjcuDH++usvuccZY5gzZw5cXFxgamoKX19fxMbGavJlqI2PD19J5u5dIClJ26khhBBSXloNptWqVcOSJUtw+fJlxMTEoEuXLvD398c///xT7PHnzp1DYGAgPvnkE1y9ehX9+/dH//79cevWLdkxwcHBWL16NdavX48LFy7A3Nwcfn5+yM7OrqiXpTQbG6BZM36bqnoJIUR/aLU3b9++feXuL1y4EOvWrcP58+fRsGHDIsevWrUKPXr0wDfffAMAmD9/PiIiIrBmzRqsX78ejDGsXLkSs2bNgr+/PwBg+/btcHJyQnh4OAICAopNR05ODnJycmT309PTAfBu5BXd/d3HxwBXrghx4oQEgwfnV+i1lSXNI30cKqBtlHfKoXxTDuWbYhTJJ50ZGpOfn4/Q0FBkZmbC29u72GOio6Px9ddfy+3z8/NDeHg4ACAuLg7Jycnw9fWVPW5tbY02bdogOjq6xGC6ePFizJs3r8j+Y8eOwczMTMlXpBxzc0cA3vjzzzf466/jFXptVUVERGg7CXqL8k45lG/KoXwrH0WmjtR6ML158ya8vb2RnZ0NCwsLhIWFoUGDBsUem5ycDCcnJ7l9Tk5OSE5Olj0u3VfSMcWZPn26XJBOT09H9erV0b17d1hZWSn1upTVsSOweDHD06fmaNiwF9zcKvTyShGLxYiIiEC3bt1o7JqCKO+UQ/mmHMo3xUhrKctD68G0Xr16uHbtGtLS0rBv3z6MGjUKp06dKjGgaoJIJIJIJCqy38jIqMLfcLa2QKtWwPnzwJkzRqhTp0IvrxJt5FdlQXmnHMo35VC+lY8ieaRUB6THjx/jv//+k92/ePEiJk+ejI0bNyp8LmNjY9SpUwctWrTA4sWL0aRJE6xatarYY52dnfH06VO5fU+fPpXNlyn9W9ox+oDGm6rHw4fAsmXAuHHA8+faTg0hpDJTKpgOGzYMJ99+0ycnJ6Nbt264ePEiZs6ciaCgIJUSJJFI5DoDFebt7Y0TJ07I7YuIiJC1sbq7u8PZ2VnumPT0dFy4cKHEdlhdVHi8KS2dqJjYWGDxYqBFC6B2bWDaNGDjRv6XEEI0RalgeuvWLbRu3RoAEBISgkaNGuHcuXPYuXMntm7dWu7zTJ8+HVFRUYiPj8fNmzcxffp0REZGYvjw4QCAkSNHYvr06bLjv/zySxw5cgTLly/H3bt3MXfuXMTExGDSpEkA+CTUkydPxoIFC3Dw4EHcvHkTI0eOhKurK/r376/MS9WKdu0AY2Pgv/+ABw+0nRrdd+cOMH8+0KQJX8puxgy+NqyBAdChAz9m+3bg3j3tppMQUnkp1WYqFotlbYzHjx9Hv7drhtWvXx9JCsw28OzZM4wcORJJSUmwtraGl5cXjh49im7dugEAEhIS5Gbzb9euHXbt2oVZs2ZhxowZ8PDwQHh4OBpJJ7UFMG3aNGRmZmLs2LFITU1Fhw4dcOTIEZiYmCjzUrXCzAxo25avbfr339CrdtOKwBhw6xawbx/fbt8ueMzQEOjaFfjwQ8DfH3Bw4Eva/fEH8P33wJ492ks3IaQSY0po3bo1+/bbb1lUVBQzMTFh165dY4wxFh0dzapWrarMKXVKWloaA8DS0tK0loa5cxkDGAsI0FoSyi03N5eFh4ez3NxcjV1DImHsyhXGZsxgrG5dnjfSzciIsd69GduyhbGUlKLPvXq14Njr1zWWRKVURN5VRpRvyqF8U4wisUCpat6lS5diw4YN6Ny5MwIDA9GkSRMAwMGDB2XVv0Q1hTshva/tpowBFy/y9s46dYDmzYFFi/gi6iIRL3n+9hvvXHToEDB6NO8N/a6mTYEhQ/jt2bMr8hUQQt4XSlXzdu7cGS9evEB6ejpsbGxk+8eOHVvhkxxUVm3aAKamwNOnvE2wAkcKaVV+Ph8W9PvvfEtIKHjM1BTo1YtX4fbuDVhalv+88+bxKuGDB3mApt98hBB1Uqpk+ubNG+Tk5MgC6aNHj7By5Urcu3cPjo6Oak3g+0okAtq357cr+yoyGRnA/v28ZOniwjsNrVjBA6m5OTB0KBAaykug+/YBAQGKBVIAqF8fGDGC36bSKSFE3ZQKpv7+/ti+fTsAIDU1FW3atMHy5cvRv39/rFu3Tq0JfJ9Jh8hUxvGmCQnAzz8DPXoAdnbAoEHAtm08YFpbA8OGAWFh/P6ePbw0am6u2jXnzOEdlI4d4527CFG3lBT+Y3DIkPe3eeZ9pVQwvXLlCjp27AgA2LdvH5ycnPDo0SNs374dq1evVmsC32fSdtPISEAi0WpSVCaR8OrV2bN5G6abGzBpEnD0KJCby8eETp7MS+HPnwM7dwL9+/OqXXWpVQv49FN+e9Ys+rIj6iUWA4MHA2fP8pqUd4bEk0pOqTbTrKwsWL6tZzt27BgGDhwIAwMDtG3bFo8ePVJrAt9nLVvy6syXL4EbN3gQ0idZWcDx47yd8s8/gcLTIxsY8PG0ffvyrX59vparps2cCWzZApw+zUuofn6av2ZJJBI+HjY/vwJeONG4L7+Ur0VavhwotOYGqeSUKpnWqVMH4eHhePz4MY4ePYru3bsD4ONGK3pi+MrM0JBPfA/oT7vpkyfAhg1Anz68+tbfH9i8mQdSS0teXbt9O+9Ydfo076nr6VkxgRQAqlUDJkzgt7VdOv3iC6BtWyP8/ruH9hKhpzIydKtmYe1aYN06/j5euZL/WDxyhI+HJu8JZcbehIaGMiMjI2ZgYMB8fX1l+xctWsR69OihzCl1ii6MM5X64Qc+PvKs8wDGhg9nbPt2xpKTtZ0sOffv57KAgDusefN8ufGfAGM1azL2+eeMHTvGWE6OtlPKPX3KmLk5T19YmHbScOBAQR45OGSynBwa91deZ84wZmkpYXXrprCnT7Wfb8ePMyYU8v/l0qV836BB/P7o0dpN27tonKliFIkFSgVTxhhLSkpiV65cYfn5+bJ9Fy5cYHfu3FH2lDpDl4Lp5cuMWeMVE0MoH6WaN+czGERFMaalD0ZuLmNLljBmYiKRJUsgYKxtW8YWLmTsxg0+2YIumjGDp7dRI8YKvYUrRGIiY/b28v/OU6fEFZsIPfX8OWNVqxbkW4sW+ezVK+2l5/59xmxseFpGjCh4v0dHF0wokpiovfS9i4KpYiokmEo9fvyYPX78WNXT6BRdCqZ5eYzZVxGzTohkT0ZNZ6xZM1ak+GdlxdiAAYxt2MBYfHyFpOvCBca8vAqS0LDhc7Zpk1jXCs0levmSMWtrnvZduyruuvn5jHXrxq/btCljgwbx0vyECXkVlwg9lZ/PWK9ePO9q15YwK6tsBjDWujVjqakVn55XrxirV4+np21bxt68kX+8XTv+2PTpFZ+2klAwVYzGg2l+fj6bN28es7KyYgYGBszAwIBZW1uzoKAguZKqvtKlYMoYY/378w/l4sVvdyQlMbZtG2PDhjFmZ1c0uNavz9jkyYwdOcJYVpZa05KeztiXX/ISKMAvv3mzmIWF6d8HdP58/ho8PBgTV1DBcPlyfk1TU8Zu32bs4EHx26peSYWlQV8tXcrzzsSEsZiYXLZy5d/M1lYiC2YV+XEVixnz8+PpqVaNfyTftX8/f9zGhrHXrysubaWhYKoYjQfT7777jjk4OLC1a9ey69evs+vXr7Off/6ZOTg4sBkzZihzSp2ia8F09Wr+oezevZgH8/IYu3iRsaAgxtq3Z8zAQD6wmpjwT/2KFYzduaNSvevBg4xVr15w6hEjGHv2TH8/oOnpBdWtmzdr/npXrzJmbMyvt24d35eZmSsrYR05ovk06KszZwraJTduLHjPXbyYK6tmbdeO/08rwldf8WuamfE5o4uTl8dYnTr8uNWrKyZdZdHXz6q2aDyYuri4sAMHDhTZHx4ezlxdXZU5pU7RtWB682ZBaWbnTsays0s5+OVLxkJDGfvkE/6T+d1Sq5sbY2PH8p/N5Xx9iYmMDR5ccAp3d8aOHi14XJ8/oNIOXjVqlJGvKsrMZMzTk1/L37/gN01ubi7r0eMhAxgbNUpz19dnz58XvJWHDeN5V/g9d/kyY1Wq8Mc7dtR8KfCXXwo+C6GhpR/7888Fn5k8HajJ1+fPqjZoPJiKRCJ27969Ivvv3r3LTExMlDmlTtG1YCqR8A+j9ANsZ8fYlCmMFfMvKPrEW7d4xPD1LSgWSTdDQ8Y6dWJs0SLe0+mdKvr8fN4MK21bFAoZmzaNB4bC9PkDmpXFmIsLf31r1mjuOuPH82u4uPDgIJWbm8sWLYpiAGOWlmqvldd7hdtJ69YtKHm++567dKngferjw1hGhmbSExXFOxUBfGWnsmRmFrTElBV4K4I+f1a1QePBtHXr1uzzzz8vsn/SpEmsdevWypxSp+haMGWMt8nMnVu0sPnBB4zt2VPOYScZGYz9+Scfq+LhUbTU6ujI62537mT3zjxjHToUPNSyJa+mLI6+f0ClpQdn56I/FNSh8DCYY8fkH8vNzWX794ezGjUkOvOFq0uCgwtaKwovn1fce+78ed4XD2CsSxf1/y/j4gqaBQYPLn8v8Nmz+XPatNF+73Z9/6wqorjlGBWl8WAaGRnJzM3NmaenJxszZgwbM2YM8/T0ZBYWFiwqKkqZU+oUXQymUmIxb7vs3bugExDvwMLYN98wFhurwMkePGBs7VrG+vUrGHj5dsuHgF1ES7bEaBbb+8UZlpddcu8Yff+A5uTw2m+AsWXL1HvuwsNgvv666OPSvJsyJY8BjA0cqN7r67OzZwvaSTdskH+spPfcuXOMWVjw5/j6qq+kn57OWOPGBaPSFAnUycmMiUT8uWfOqCc9ytL3z2p5JSfzWqAvvlCt+aZChsY8efKEzZgxgw0cOJANHDiQzZw5kz169Ih99tlnyp5SZ+hyMC3s0SP+q9fVVb6A2bUrYyEhCk6SkJPDrq86yTbafsuuoknRUqu1NR+JvmkTYwkJck+tDB/QX38tqEJXVyeWd4fBFPehlubdpUu5DOBfutocN6krXrwoqIUJDCxaoivtPXfmTMFvQz+/okNWFJWfz39vSmsvlBkJ+Omn/Pn9+6uWFlVVhs9qWfLy+A8pPmRPtRqKCh1nWti1a9eYgYGBOk+pFfoSTKXEYj6TT8+e8qVVR0fGvvuOF0BL8+oV75MkfZ6TE2MH1j1hkl+3MBYQwJitbdHg2qABL2odO8Zy09P1/gMqFvM2OYB3jFaHd4fBFEf65ZaTk8saNODH//qreq6vr/Lzec2LdNhScT9uygoKp07xnrYAb3NVpXQyfTqT/dA5f165c9y+zc8hEPCJHrTlfQimQUEFPa1L+tyVFwVTFelbMC0sLo6xmTP5L+jCsa97d8b27ZOfLEki4SXYwsd+9hnvECwnL49/i8ydy5i3d5HhNxJTU5bcrBnLmz6dR/WEBO03Dilh927+kqysVG9vKW4YTHEKf7lJx71266batfWdtJ1UJGLs2rXijylPUDh5kv+QARjr00e5gLpjR8Fb/bffFH9+YdIfCOPHq3YeVVT2YPr33wVfT9u2qX4+CqYq0udgKpWby9jvv/MgWjioOjvzqfTOneNfMNL99erxX/PlkpLC2N69jI0ZU7SOuXCxuGdPxmbNYiw8nNeN6XiAzc8vaBdTZdaawsNg+vUr/WUX/nL791/+HAOD4icBeB+U1k5aWHmDwvHjvPOSdEiSIk0f588XtHV+9135n1eSkycLOlMV7tFdkSpzME1K4rVqAP9qUgcKpiqqDMG0sAcP+JeBo2PRmGdkxNicOSq0K0kkLDcmhl0bO5bljxrF5xgUCksPsLNn62yADQ8vqCJ6+lS5c0iHwTg780ktSvPul1ubNvy5q1Ypd2199uJFwaQgAQHl/xFSlmPHCoLigAHlm8r68eOCGpu+fdUzf7NEwjsvqbMpQVGVNZjm5fEe3ACfb1tdPbkViQUKrWc6cODAUh9PTU1V5HSkgtSqBSxeDMybx9cW3bCBrzPavj2wcSPQoIEKJxcIAC8vxPfqhQa9esHAyAh48wa4fh24fLlg++cf4Nkz4PBhvkk5OQEtWhRsLVsCrq4VtybbO/r1A1q1Ai5d4nm2YoVizz94kC/FBfCl5hwcFHt+YCBw4QKwezdfou19IZEAo0YBjx8DHh78Paqut0C3bsCBA/x/GxYGDBsG7NoFGBkVf3xWFl+YPjkZaNSIL1RvoNRilfIEAmDqVH79NWuAb74BTExUPy8BFizgy1Sam/OF2c3MKj4NCgVTa2vrMh8fOXKkSgkimmNszNcT/fBDID2dry+qkZhlagq0bcs3qXcDbEwMcPs2X9j0r7/4JiUNsC1bFgTZCgqwAgH/YPr58aA4ZQpfA7U8kpKATz7ht7/+mn+JK2rIEP7c8+eBhw/5D6H3wY8/8gXkRSIgJARQ97LIfn48kA4YAOzbBwiFwI4dfM3gwhgDPv6Yv0Xt7fmPI0tL9aXjww+Bb7/lPxp27AA+/VR9535f/f03LygAwPr1QP36WkqIegrDlUtlq+bVNKWrjrKy+FpVa9bwhR8bNy65itjJiffgmDOHz4Lw5IlmXgzj1XGdOvHLjhtXvufk5xe0T5c0DKY4xeWdtFv/ggVKJF4PnTtX8G9fv758z1H2PffHHwUzGA0bVnSKP2lPUENDBfoQKEjay7t+/Ypf/q+yVfMWbif99FP1n19rbaaVBQVTxaj1A5qZyb9df/qp7ADr7FwQYA8eVGuAjYoq+FIta2gRY4z9+GPZw2CKU1zebd5cMPpIx5qU1S4lpfztpIWp8p4LD+f/V4BP+CUNqPv2Fby1Nm1S+LTllpZWMFPTH39o7jrFqUzBNC+PzwAH8K8JTUzFqTfBdNGiRaxly5bMwsKCOTg4MH9/f3b37t1Sn+Pj48MAFNl69eolO2bUqFFFHvfz8yt3uiiYKkbjH9B3A2yjRkVXx3k3wH7/vcoBVrrE1siRpR9X3mEwxSku7169Kjhf4Sn0KhuJpKBHeZ06ii2hpup77vffC36jjR7NWExMwbjUL79U6pQKmTqVX6tzZ81fq7DKFEy//57nobk5XxBLE/QmmPr5+bEtW7awW7dusWvXrrFevXqxGjVqsIxSZqlOSUlhSUlJsu3WrVtMKBSyLVu2yI4ZNWoU69Gjh9xxL4sMniwZBVPFaOUDWjjAjhpVeoB1cuLfWmPH8kn/Dx5k7O7dMsdJXLxYMFSlpNKmIsNgilNS3g0YwM/57beKnU+fSFfsEYlKXsasJOp4z4WEFARU6Y+X7t0rZm3bhISC0nFMjOavJ1VZgmlERMEENTt2aO46ehNM3/Xs2TMGgJ1SoLFixYoVzNLSUi4Ajxo1ivn7+yudDgqmitGZD2hmJh+ouHp12QEW4N+kderwKXImT+bzFB8/zr/p3jZmSRdmHzy4+EsqMgymOCXlXUgIP6+bW8W3q1WEc+cKgomipXnG1Pee27On4C1St27FTuU4fDi/bmBgxV1TZz6rKkhMLBjmp+nZazU2NEbT0tLSAAC2trblfs7mzZsREBAAc3Nzuf2RkZFwdHSEjY0NunTpggULFsDOzq7Yc+Tk5CAnJ0d2Pz09HQAgFoshFosVfRnvHWkeaT2vjIz4uJZWrQr2ZWZCcPs2EBsLwf37EMTGQhAby+9nZgL//su3wr2JATBTU6BOHfzq6IFWqIe7ofVx+9da8OjtAbx9f/7xhwDr1vGP0K+/5qFKFQZFs6CkvPPzAywsDPHokQCnT+ehXTumYGborpcvgYAAQ+TlCTB4sARjxuSrLd8UNXAgEBIiQEiIAebOzYe5ORROi7K++ALYudMIISEMQUF5cHPT/DV15rOqpPx8IDBQiGfPDNC4McMPP+Rp9P+lSD4JGGM68SmVSCTo168fUlNTcebMmXI95+LFi2jTpg0uXLiA1q1by/bv2bMHZmZmcHd3x4MHDzBjxgxYWFggOjoaQqGwyHnmzp2LedK+1YXs2rULZtoYsEQ0jzGYvHoF8ydPYJGYCIvERJhL/yYnwyA/v8Sn5lhaIs2xGiISWuAfsSesW1qi9UcSZLq4IF8kUlsSV61qhpMna6BnzziMG3dDbefVJsaARYta49IlF7i4ZGD58lMwM8vTdrK0Zs6cdrhxwwH9+v2LMWP+0XZydN6uXfURElIPJiZ5WL78FKpWzdDo9bKysjBs2DCkpaXBqozxWjoTTMePH4/Dhw/jzJkzqFbOgX3jxo1DdHQ0btwo/Yvm4cOHqF27No4fP46uXbsWeby4kmn16tXx4sWLMjOQ8F9vERER6NatG4xKGgmvT/LygPh4WUk27VIsroX8Cw/cR3X8V+pTWfXqYB4eYB4ewNu/zMMDqFmz6KBGlJ53R48K0LevIRwcGOLj80qcZECfrFxpgGnThDA2Zjh9Og/Nmil3nsrynjtyRIB+/QxhYcHw8GEeqlTR7PXu3s3Dnj3XERDQBB4ehiimbKGzjh8XoHdvIRgTYPv2PAQEaD50paenw97evlzBVCeqeSdNmoRDhw4hKiqq3IE0MzMTe/bsQVBQUJnH1qpVC/b29vj333+LDaYikQiiYkoURkZGev1BrWiVJr+MjABPT74BsAWw0wL49Vegd+dMDG3xLw4uv48GhvfxVa/7qPLsPnDvHvDqFQSPH0Pw+DEfSf7uOWvVAurVA+rWLdjc3CAQi4vNOz8/PoPS8+cCREUZoUePCnr9GnL+PDBjBr+9cqUArVur/l7R9/dcnz58BrLbtwXYutUI33yjuWtt2AB8/rkhxOK2WLSIz63SoAHQuDGf6alRI37bxUVrE5CVKDGRz5DFGDB2LDBiRMWELkXeW1oNpowxfP755wgLC0NkZCTc3d3L/dzQ0FDk5OTgo48+KvPY//77DykpKXBxcVElueQ9NmcO8NtvwJ+R5jhyugny0QRrVwNVxhc6KCWFB9X79+W32FggO5s/du+e3HmNAPQDwMzNeVusjY3sr5GtLXZWs8Xfz22QNNcWSLcpcgysrNQz152GvXwJDB3KC/1DhgD/+5+2U6QbpFMMjhkDrFoFfPkln6lMnXJzgc8/51OHAgI4OWUiLc0Mb94IZBOSFWZjUzTANmzI92tDXh6fgvH5c6BJE2DlSu2koyxareadMGECdu3ahQMHDqBevXqy/dbW1jA1NQUAjBw5ElWrVsXixYvlntuxY0dUrVoVe/bskdufkZGBefPmYdCgQXB2dsaDBw8wbdo0vH79Gjdv3iy2BPqu9PR0WFtbl6toT3iV219//YVevXrpdSmhLJMmAT//zG/36weEh5fzF7xEAvz3X9Ege/8+WFwcBBKJ8okyMODfcjbFBNri/ha+XYETwwYGAnv2ALVrA1euqD5dYGV6z+Xk8FaA5GT+g60c5YNyS0riUxieO8ffq/Pn56Nhw0Po0aMXEhKMcOsWcOsWcPMm/3v/Pn+7Fqdq1aJB1tOTl3A1afZsPsWnhQUP/HXravZ6hSkSC7RaMl33dkbwzp07y+3fsmULRo8eDQBISEiAwTu/vO/du4czZ87g2LFjRc4pFApx48YNbNu2DampqXB1dUX37t0xf/78cgVSQkoycyafgN7cHPjlFwWqwgwMgBo1+ObrK/dQXnY2IvbtQ7cWLWCUkcGLcK9eyf6ylJcI3fgKxpkv0d7zFRyEhR5/84Z/86Wk8E1RJiZlB2BLSz5ruLk534q7bWJSambcvcsDKQDs3av+eXf1nUjES44zZwI//AAMH66eataLF/lcxImJgLU1f+/6+krw1198bmJpS0Ph9Uuys/n/q3CAvXmTzyX85AnfjhwpON7AAPjgA2DkSH4eCwvV013YsWPAwoX89qZNFRtIFaX1at6yREZGFtlXr169Ep9ramqKo0ePqpo0QopwceG/3I2M1BgQhEKILS2BOnWKXcZEAOCqCFiyBOhfj0/WLpOdLRd4FforkfDnJybyTRUCQakB9/Vtc2yGOZzczdAixBz4s5TAXNztMoJ1ZfC///Ggcf06b24vpmuHQrZs4efMzeWlxwMH+Go8ZY30MDEBmjblW2FpaXzhJ2mAlQbZlBTgxAm+jR8PDBrEA+sHH0Dlzk1PnvAfFozx1xIQoNr5NE0nOiARoi9KGKqsUYGBPJj+9ReQmoqCHp8mJjzCK9oXgDHg9evig+y7+zIy+JpkmZl8K3xb2gOesYJ9xWj1dkMcgGAlMsDAgAfVQkFWaGqKtnl5EO7dy3tp2dvzf469vfxtOzu9WOfM1pa3m65Zw0unygZTsZivdPTTT/y+vz9fClDVH3/W1kC7dnyTYgyIi+PL2W3fzrsG/PYb36pW5dXVI0bw9lZFSdtJX7zg7aSKLoWoDRRMCdFx0g4g//wD7N/Pv3RVIhDwb1crK95Yp6z8/ILgWkLA3b8jCyf/zESDGpkYP6qEoFzS86XBWiLhQT2jYEyhAQAnALh6tex0mpuXHmyL26fphsBiTJ4MrF3Lq1Fv3eLtkop4/hwYPBg4dYrfnzuXtzdqqn+aQMA7qM+axauoL1zgQXXPHl6qXLqUby1a8NJqQADg6Fi+c3//PRAVxVsZQkP14vcQBVNCdJ1AwH+lz5zJSwEqB1N1EQr5t10JC36+fg18Mh5IBbB/JYABCp4/L4+3CxcTcPPS0nDj1Ck0qVYNwleveBEmJUX+74sXPOBLn//oUfmvbWZWdtB993EVJ3ipXZu3cf7+O1/f9ddfy//cq1f5guYJCbzdcscOXiqtKAJBwRLGK1bwtWm3b+d/pT2Gp0wBevbkgbVPn5ID5NGjwKJF/PamTbx6Wh9QMCVEDwQE8GB68iTvoakPo7w2b+bV0nXr8t7PCjM0LDFYM7EYj4VCNO7VC8KSevMyBqSnFw2wxQXdwvvEYh64ExL4Vl6mpgWBVbq9e//dzdparj146lQeTHfs4G2o5fk/79rFFxl/84YHnvBwPn5UW0Qi3hlp4ECepXv28MB66RLwxx98q1KFD5UaORLw9i7IgidPCnozjx/Pj9EXFEwJ0QO1avFf/efPAyEhfDyiLhOLC9q5pkxRvTOKUgQCHqysrXmxrzyk7cmKBuDcXB7NHj/mW3kJhbzB9G3gbWtnh4OOdrjzzA7Xh9vBZVgJQdjICHl5wPTpvI0V4KW+Xbug8VmUFGFvz4eUTZoE3LlT0Kb63398EokNG/i/ZuRI3jfgk094ljZrxkvn+oSCKSF6YtgwHkx37dL9YBoSwgt1jo78i1JvFG5PrlWrfM9hjLfnvhtg37397paZyauhnz/n21t93244+XYrhsTSCs/z7DDkjR26wg7ODezgVdsOBqvsSi4Zv7MYSEXz9OTVtwsWAJGRvLS6bx/w4AFvI/3+e36cpSV//+hDO2lhFEwJ0RNDhvBOKhcv8i+g8ha2KhpjwLJl/PYXX+jfl6LCBIKC6mgFZnFDdnaxQVbyPAVblqdAmJqCzg1foKZlocdfvQIYg8HrdLggHS6I4+e6/XYrjbExDO3s0EUohKG0k5VIxP9B0q3wfXU9ZmQkV5VtYAB06cK3n3/mw722bweOH+fvnV9+4SPF9A0FU0L0hJMTHzIREcEH4M+ape0UFe/4cT5e0tyct3uREpiY8DEkVavK7TYAkGMLTJwIuGcBsdcLqsn37c3H12NSYZKVgsYuKfjhuxdwtyqh5Fu4dJybC+TmQpCUBEuA17NW9GstJtiai0T4yMQEH5mY4E03E+QKRLCuPwuAV8WmTw0omBKiR4YN48F01y7eIUkX5zIIfjuW9JNPZEu/EgWNHs3ng46L4yW3AQP4/UWLhADs0LWrHTbuLee4Z+k44JQUiJ8+xYVjx9C2WTMY5uXx4UfZ2QVb4fuqPFZoFS4ABY+VwvTthm/18xcYBVNC9MiAAXw2mDt3gBs3+IB2XXL1Ki+ZCoXAV19pOzX6y8wMmDABmD+fj9XcupUPMwF4h64lS4pd0a94AgEfL2NhAbi6IiUpCax792Jn3FIbiYSXhksLwiUFZF2eM7AUFEwJ0SPW1kDv3nzyhl27dC+YSnuWDhmi2nwQhFfzBgcDMTH8vokJH3epzonwNcbAoKBa9z2h+2s3EULkDBvG/+7eXfIKH9rw6BGfyB6ARtflfF84OfE1PAGgenXgzBk9CaTvKQqmhOiZXr34yI3Hj/nSWrpixQo+0sPXl48TJKpbtYpP4HDlCp+Wj+guCqaE6BlTU952CvCqXl3w8iUf0gBQqVSdTEz4yin29tpOCSkLBVNC9JC0qjckpOxltSrCunW8w2iTJkC3btpODSEVj4IpIXqoSxc+u1BKCu89q03Z2QVLfk2dqpvDdQjRNAqmhOghQ0PeYxbQflXvb78BT5/yTjL6NDE5IepEwZQQPSWt6g0L44ucaINEUjAc5quvNDt0kRBdRsGUED3Vti0fy5mZCRw6pJ00HDwI3L/Px79++ql20kCILqBgSoieEgj4slWA9qp6pRPaT5hQ4hrhhLwXKJgSosekVb1//cUXFKlIZ8/yca7GxsDnn1fstQnRNRRMCdFjjRoBjRvz4TH791fstaWl0hEjABeXir02IbqGgikhek4bVb337vH2UoAPhyHkfUfBlBA9FxDA/548CSQmVsw1ly/nK3v16wfUr18x1yREl1EwJUTPubsD7drx4BYSovnrJScD27bx2zR1ICEcBVNCKgFpVe/OnZq/1k8/8aUqvb2B9u01fz1C9IFWg+nixYvRqlUrWFpawtHREf3798e9e/dKfc7WrVshEAjkNpN31sxjjGHOnDlwcXGBqakpfH19ERsbq8mXQohWDRnCF+SOiQFGjgTS0zVznYwMYO1afvubb2jqQEKktBpMT506hYkTJ+L8+fOIiIiAWCxG9+7dkZmZWerzrKyskJSUJNsePXok93hwcDBWr16N9evX48KFCzA3N4efnx+ys7M1+XII0RpHR96OaWDAp/dr1gy4eFH91/nlFyA1FfDw4O2lhBDOUJsXP3LkiNz9rVu3wtHREZcvX0anTp1KfJ5AIICzs3OxjzHGsHLlSsyaNQv+/v4AgO3bt8PJyQnh4eEIkPbWIKSS+fJLoGVLvmTXw4e8CjYoCJg2jZdaVSUW8zVLAWDKFPWck5DKQqvB9F1paWkAAFtb21KPy8jIgJubGyQSCZo3b45FixahYcOGAIC4uDgkJyfD19dXdry1tTXatGmD6OjoYoNpTk4OcnJyZPfT39aRicViiHVhfSsdJ80jyivFqTvvWrcGLl0CJk4UIjTUADNmAMeOSfDrr/moVk21c+/eLUBCgiEcHRkCA/O0uvQbveeUQ/mmGEXyScAYYxpMS7lJJBL069cPqampOHPmTInHRUdHIzY2Fl5eXkhLS8MPP/yAqKgo/PPPP6hWrRrOnTuH9u3bIzExES6FRpIPGTIEAoEAe/fuLXLOuXPnYt68eUX279q1C2ZmZup5gYRUIMaAv/+ujk2bvJCdbQhLy1xMnHgNbdsmKX2+r77qjPh4awwbdgdDhtxXb4IJ0UFZWVkYNmwY0tLSYGVlVeqxOhNMx48fj8OHD+PMmTOopsBPaLFYDE9PTwQGBmL+/PlKBdPiSqbVq1fHixcvysxAwv8HERER6NatG4xo2RCFaDrvYmOBkSOFuHyZd4/47LN8LFsmgaK/EY8fF6BXL0OYmTE8eJAHOzu1J1Uh9J5TDuWbYtLT02Fvb1+uYKoT1byTJk3CoUOHEBUVpVAgBQAjIyM0a9YM//77LwDI2lKfPn0qF0yfPn2Kpk2bFnsOkUgEkUhU7LnpDVd+lF/K01TeNWjA58+dPRsIDgY2bRLizBkhdu8GmjQp/3mkbaWffiqAs7Pu/I/pPaccyrfyUSSPtNqblzGGSZMmISwsDH///Tfc3d0VPkd+fj5u3rwpC5zu7u5wdnbGiRMnZMekp6fjwoUL8Pb2VlvaCdEXxsbA0qVARASfQ/fOHd62umoVr74ty9Wr/LlCIV+zlBBSlFaD6cSJE7Fjxw7s2rULlpaWSE5ORnJyMt68eSM7ZuTIkZg+fbrsflBQEI4dO4aHDx/iypUr+Oijj/Do0SN8+nYxRYFAgMmTJ2PBggU4ePAgbt68iZEjR8LV1RX9+/ev6JdIiM7w9QVu3AD69uWTLkyeDPTuDTx7VvrzpIt/Dx7M108lhBSl1WC6bt06pKWloXPnznBxcZFthds1ExISkJRU0Gni1atX+Oyzz+Dp6YlevXohPT0d586dQ4MGDWTHTJs2DZ9//jnGjh2LVq1aISMjA0eOHCkyuQMh7xt7e+DAAeDnnwETE+DwYcDLCzh6tPjjHz0CpB9HmjqQkJJptc20PH2fIiMj5e6vWLECK6QNOCUQCAQICgpCUFCQKskjpFISCPhi3p068WkIb90CevQAvv4aWLQIKNx9YMUKID8f6NoVaN5ce2kmRNfR3LyEvKcaNeKzJE2axO//+CPQti1w9y6//+oVn/EIoFIpIWXRid68hBDtMDXlE9d37w58/DFw7RrQogXvnPTsGZCZyauBu3fXdkoJ0W0UTAkh6NuXd04aNQo4fhz47LOC6QJpQntCykbVvIQQAICrK++IFBwMGBryttLq1YGhQ7WdMkJ0HwVTQoiMgQEviUZH885J27YBNLafkLJRNS8hpIiWLYFdu7SdCkL0B5VMCSGEEBVRMCWEEEJURMGUEEIIUREFU0IIIURF1AGpGNJpDtPT07WcEv0gFouRlZWF9PR0WtZJQZR3yqF8Uw7lm2KkMaA8U99SMC3G69evAQDVq1fXckoIIYRo2+vXr2FtbV3qMQJWnpD7npFIJEhMTISlpSUENPVLmdLT01G9enU8fvy4zNXoiTzKO+VQvimH8k0xjDG8fv0arq6uMDAovVWUSqbFMDAwQLVq1bSdDL1jZWVFH1AlUd4ph/JNOZRv5VdWiVSKOiARQgghKqJgSgghhKiIgilRmUgkwvfffw9R4VWlSblQ3imH8k05lG+aQx2QCCGEEBVRyZQQQghREQVTQgghREUUTAkhhBAVUTAlhBBCVETBlJTL4sWL0apVK1haWsLR0RH9+/fHvXv35I7Jzs7GxIkTYWdnBwsLCwwaNAhPnz7VUop105IlSyAQCDB58mTZPsq3kj158gQfffQR7OzsYGpqisaNGyMmJkb2OGMMc+bMgYuLC0xNTeHr64vY2Fgtplj78vPzMXv2bLi7u8PU1BS1a9fG/Pnz5eaXpXxTPwqmpFxOnTqFiRMn4vz584iIiIBYLEb37t2RmZkpO+arr77CH3/8gdDQUJw6dQqJiYkYOHCgFlOtWy5duoQNGzbAy8tLbj/lW/FevXqF9u3bw8jICIcPH8bt27exfPly2NjYyI4JDg7G6tWrsX79ely4cAHm5ubw8/NDdna2FlOuXUuXLsW6deuwZs0a3LlzB0uXLkVwcDB++ukn2TGUbxrACFHCs2fPGAB26tQpxhhjqampzMjIiIWGhsqOuXPnDgPAoqOjtZVMnfH69Wvm4eHBIiIimI+PD/vyyy8ZY5Rvpfn2229Zhw4dSnxcIpEwZ2dntmzZMtm+1NRUJhKJ2O7duysiiTqpd+/ebMyYMXL7Bg4cyIYPH84Yo3zTFCqZEqWkpaUBAGxtbQEAly9fhlgshq+vr+yY+vXro0aNGoiOjtZKGnXJxIkT0bt3b7n8ASjfSnPw4EG0bNkSgwcPhqOjI5o1a4ZNmzbJHo+Li0NycrJc3llbW6NNmzbvdd61a9cOJ06cwP379wEA169fx5kzZ9CzZ08AlG+aQhPdE4VJJBJMnjwZ7du3R6NGjQAAycnJMDY2RpUqVeSOdXJyQnJyshZSqTv27NmDK1eu4NKlS0Ueo3wr2cOHD7Fu3Tp8/fXXmDFjBi5duoQvvvgCxsbGGDVqlCx/nJyc5J73vufdd999h/T0dNSvXx9CoRD5+flYuHAhhg8fDgCUbxpCwZQobOLEibh16xbOnDmj7aTovMePH+PLL79EREQETExMtJ0cvSKRSNCyZUssWrQIANCsWTPcunUL69evx6hRo7ScOt0VEhKCnTt3YteuXWjYsCGuXbuGyZMnw9XVlfJNg6ialyhk0qRJOHToEE6ePCm3TJ2zszNyc3ORmpoqd/zTp0/h7OxcwanUHZcvX8azZ8/QvHlzGBoawtDQEKdOncLq1athaGgIJycnyrcSuLi4oEGDBnL7PD09kZCQAACy/Hm35/P7nnfffPMNvvvuOwQEBKBx48YYMWIEvvrqKyxevBgA5ZumUDAl5cIYw6RJkxAWFoa///4b7u7uco+3aNECRkZGOHHihGzfvXv3kJCQAG9v74pOrs7o2rUrbt68iWvXrsm2li1bYvjw4bLblG/Fa9++fZHhV/fv34ebmxsAwN3dHc7OznJ5l56ejgsXLrzXeZeVlVVkIWuhUAiJRAKA8k1jtN0DiuiH8ePHM2traxYZGcmSkpJkW1ZWluyY//3vf6xGjRrs77//ZjExMczb25t5e3trMdW6qXBvXsYo30py8eJFZmhoyBYuXMhiY2PZzp07mZmZGduxY4fsmCVLlrAqVaqwAwcOsBs3bjB/f3/m7u7O3rx5o8WUa9eoUaNY1apV2aFDh1hcXBzbv38/s7e3Z9OmTZMdQ/mmfhRMSbkAKHbbsmWL7Jg3b96wCRMmMBsbG2ZmZsYGDBjAkpKStJdoHfVuMKV8K9kff/zBGjVqxEQiEatfvz7buHGj3OMSiYTNnj2bOTk5MZFIxLp27cru3bunpdTqhvT0dPbll1+yGjVqMBMTE1arVi02c+ZMlpOTIzuG8k39aAk2QgghREXUZkoIIYSoiIIpIYQQoiIKpoQQQoiKKJgSQgghKqJgSgghhKiIgikhhBCiIgqmhBBCiIoomBJCCCEqomBKCCGEqIiCKSHvgefPn2P8+PGoUaMGRCIRnJ2d4efnh7NnzwIABAIBwsPDtZtIQvQYrWdKyHtg0KBByM3NxbZt21CrVi08ffoUJ06cQEpKiraTRkilQHPzElLJpaamwsbGBpGRkfDx8SnyeM2aNfHo0SPZfTc3N8THxwMADhw4gHnz5uH27duyxaVnzpwJQ0P+O1wgEGDt2rU4ePAgIiMj4eLiguDgYHz44YcV8toI0RVUzUtIJWdhYQELCwuEh4cjJyenyOOXLl0CAGzZsgVJSUmy+6dPn8bIkSPx5Zdf4vbt29iwYQO2bt2KhQsXyj1/9uzZGDRoEK5fv47hw4cjICAAd+7c0fwLI0SHUMmUkPfA77//js8++wxv3rxB8+bN4ePjg4CAAHh5eQHgJcywsDD0799f9hxfX1907doV06dPl+3bsWMHpk2bhsTERNnz/ve//2HdunWyY9q2bYvmzZtj7dq1FfPiCNEBVDIl5D0waNAgJCYm4uDBg+jRowciIyPRvHlzbN26tcTnXL9+HUFBQbKSrYWFBT777DMkJSUhKytLdpy3t7fc87y9valkSt471AGJkPeEiYkJunXrhm7dumH27Nn49NNP8f3332P06NHFHp+RkYF58+Zh4MCBxZ6LEFKASqaEvKcaNGiAzMxMAICRkRHy8/PlHm/evDnu3buHOnXqFNkMDAq+Os6fPy/3vPPnz8PT01PzL4AQHUIlU0IquZSUFAwePBhjxoyBl5cXLC0tERMTg+DgYPj7+wPgPXpPnDiB9u3bQyQSwcbGBnPmzEGfPn1Qo0YNfPjhhzAwMMD169dx69YtLFiwQHb+0NBQtGzZEh06dMDOnTtx8eJFbN68WVsvlxDtYISQSi07O5t99913rHnz5sza2pqZmZmxevXqsVmzZrGsrCzGGGMHDx5kderUYYaGhszNzU323CNHjrB27doxU1NTZmVlxVq3bs02btwoexwA+/nnn1m3bt2YSCRiNWvWZHv37q3ol0iI1lFvXkKI0orrBUzI+4jaTAkhhBAVUTAlhBBCVEQdkAghSqNWIkI4KpkSQgghKqJgSgghhKiIgikhhBCiIgqmhBBCiIoomBJCCCEqomBKCCGEqIiCKSGEEKIiCqaEEEKIiv4PPbzwR/bq8KYAAAAASUVORK5CYII=\n"},"metadata":{}}],"source":["import pandas as pd\n","df=pd.DataFrame(trainer.state.log_history)\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","\n","# Assuming df is already defined, and train_loss and eval_loss are subsets of df\n","train_loss = df[['loss', 'step']]\n","eval_loss = df[['eval_loss', 'step']]\n","\n","# Remove NaN rows in both dataframes\n","train_loss_clean = train_loss.dropna()\n","eval_loss_clean = eval_loss.dropna()\n","\n","# Plotting the loss vs step for train_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting the loss vs step for eval_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting both losses together\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train and Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n"]},{"cell_type":"code","source":["df"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":978},"id":"wzsaK7v-kEJf","executionInfo":{"status":"ok","timestamp":1717560169997,"user_tz":-240,"elapsed":9,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"102e3048-94b1-4ddf-8b38-d202dd2d6fec"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":[" loss grad_norm learning_rate epoch step eval_loss \\\n","0 3.3779 5.835055 0.000047 0.050378 5 NaN \n","1 2.8596 5.826276 0.000045 0.100756 10 NaN \n","2 NaN NaN NaN 0.100756 10 2.892671 \n","3 2.8063 4.150119 0.000042 0.151134 15 NaN \n","4 2.8903 3.600799 0.000040 0.201511 20 NaN \n","5 NaN NaN NaN 0.201511 20 2.818315 \n","6 2.9303 3.667494 0.000037 0.251889 25 NaN \n","7 2.9738 4.005008 0.000035 0.302267 30 NaN \n","8 NaN NaN NaN 0.302267 30 2.765197 \n","9 2.7176 3.679059 0.000032 0.352645 35 NaN \n","10 2.9033 3.467850 0.000030 0.403023 40 NaN \n","11 NaN NaN NaN 0.403023 40 2.725764 \n","12 2.5247 3.059027 0.000027 0.453401 45 NaN \n","13 2.4292 3.731794 0.000025 0.503778 50 NaN \n","14 NaN NaN NaN 0.503778 50 2.707592 \n","15 2.7186 3.835373 0.000022 0.554156 55 NaN \n","16 2.8956 4.142640 0.000020 0.604534 60 NaN \n","17 NaN NaN NaN 0.604534 60 2.696522 \n","18 2.7612 4.396478 0.000017 0.654912 65 NaN \n","19 2.9214 3.021402 0.000015 0.705290 70 NaN \n","20 NaN NaN NaN 0.705290 70 2.676208 \n","21 2.6277 3.319908 0.000012 0.755668 75 NaN \n","22 2.7345 3.356780 0.000010 0.806045 80 NaN \n","23 NaN NaN NaN 0.806045 80 2.656413 \n","24 2.7115 3.260069 0.000007 0.856423 85 NaN \n","25 2.6628 3.612410 0.000005 0.906801 90 NaN \n","26 NaN NaN NaN 0.906801 90 2.648421 \n","27 2.8393 3.872454 0.000002 0.957179 95 NaN \n","28 NaN NaN NaN 0.997481 99 NaN \n","\n"," eval_rouge2_precision eval_rouge2_recall eval_rouge2_fmeasure \\\n","0 NaN NaN NaN \n","1 NaN NaN NaN \n","2 0.1644 0.1861 0.1600 \n","3 NaN NaN NaN \n","4 NaN NaN NaN \n","5 0.1487 0.2110 0.1602 \n","6 NaN NaN NaN \n","7 NaN NaN NaN \n","8 0.1537 0.2306 0.1734 \n","9 NaN NaN NaN \n","10 NaN NaN NaN \n","11 0.1896 0.1848 0.1753 \n","12 NaN NaN NaN \n","13 NaN NaN NaN \n","14 0.2012 0.1867 0.1787 \n","15 NaN NaN NaN \n","16 NaN NaN NaN \n","17 0.1814 0.2004 0.1782 \n","18 NaN NaN NaN \n","19 NaN NaN NaN \n","20 0.1879 0.2272 0.1913 \n","21 NaN NaN NaN \n","22 NaN NaN NaN \n","23 0.1836 0.1914 0.1732 \n","24 NaN NaN NaN \n","25 NaN NaN NaN \n","26 0.1902 0.2027 0.1831 \n","27 NaN NaN NaN \n","28 NaN NaN NaN \n","\n"," eval_runtime eval_samples_per_second eval_steps_per_second \\\n","0 NaN NaN NaN \n","1 NaN NaN NaN \n","2 408.3102 0.242 0.122 \n","3 NaN NaN NaN \n","4 NaN NaN NaN \n","5 521.0521 0.190 0.096 \n","6 NaN NaN NaN \n","7 NaN NaN NaN \n","8 547.1482 0.181 0.091 \n","9 NaN NaN NaN \n","10 NaN NaN NaN \n","11 316.7331 0.313 0.158 \n","12 NaN NaN NaN \n","13 NaN NaN NaN \n","14 320.2220 0.309 0.156 \n","15 NaN NaN NaN \n","16 NaN NaN NaN \n","17 415.7496 0.238 0.120 \n","18 NaN NaN NaN \n","19 NaN NaN NaN \n","20 437.4463 0.226 0.114 \n","21 NaN NaN NaN \n","22 NaN NaN NaN \n","23 385.6734 0.257 0.130 \n","24 NaN NaN NaN \n","25 NaN NaN NaN \n","26 351.9450 0.281 0.142 \n","27 NaN NaN NaN \n","28 NaN NaN NaN \n","\n"," train_runtime train_samples_per_second train_steps_per_second \\\n","0 NaN NaN NaN \n","1 NaN NaN NaN \n","2 NaN NaN NaN \n","3 NaN NaN NaN \n","4 NaN NaN NaN \n","5 NaN NaN NaN \n","6 NaN NaN NaN \n","7 NaN NaN NaN \n","8 NaN NaN NaN \n","9 NaN NaN NaN \n","10 NaN NaN NaN \n","11 NaN NaN NaN \n","12 NaN NaN NaN \n","13 NaN NaN NaN \n","14 NaN NaN NaN \n","15 NaN NaN NaN \n","16 NaN NaN NaN \n","17 NaN NaN NaN \n","18 NaN NaN NaN \n","19 NaN NaN NaN \n","20 NaN NaN NaN \n","21 NaN NaN NaN \n","22 NaN NaN NaN \n","23 NaN NaN NaN \n","24 NaN NaN NaN \n","25 NaN NaN NaN \n","26 NaN NaN NaN \n","27 NaN NaN NaN \n","28 4092.0845 0.194 0.024 \n","\n"," total_flos train_loss \n","0 NaN NaN \n","1 NaN NaN \n","2 NaN NaN \n","3 NaN NaN \n","4 NaN NaN \n","5 NaN NaN \n","6 NaN NaN \n","7 NaN NaN \n","8 NaN NaN \n","9 NaN NaN \n","10 NaN NaN \n","11 NaN NaN \n","12 NaN NaN \n","13 NaN NaN \n","14 NaN NaN \n","15 NaN NaN \n","16 NaN NaN \n","17 NaN NaN \n","18 NaN NaN \n","19 NaN NaN \n","20 NaN NaN \n","21 NaN NaN \n","22 NaN NaN \n","23 NaN NaN \n","24 NaN NaN \n","25 NaN NaN \n","26 NaN NaN \n","27 NaN NaN \n","28 4.277124e+15 2.804105 "],"text/html":["\n","
\n","
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
lossgrad_normlearning_rateepochstepeval_losseval_rouge2_precisioneval_rouge2_recalleval_rouge2_fmeasureeval_runtimeeval_samples_per_secondeval_steps_per_secondtrain_runtimetrain_samples_per_secondtrain_steps_per_secondtotal_flostrain_loss
03.37795.8350550.0000470.0503785NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
12.85965.8262760.0000450.10075610NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
2NaNNaNNaN0.100756102.8926710.16440.18610.1600408.31020.2420.122NaNNaNNaNNaNNaN
32.80634.1501190.0000420.15113415NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
42.89033.6007990.0000400.20151120NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
5NaNNaNNaN0.201511202.8183150.14870.21100.1602521.05210.1900.096NaNNaNNaNNaNNaN
62.93033.6674940.0000370.25188925NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
72.97384.0050080.0000350.30226730NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
8NaNNaNNaN0.302267302.7651970.15370.23060.1734547.14820.1810.091NaNNaNNaNNaNNaN
92.71763.6790590.0000320.35264535NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
102.90333.4678500.0000300.40302340NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
11NaNNaNNaN0.403023402.7257640.18960.18480.1753316.73310.3130.158NaNNaNNaNNaNNaN
122.52473.0590270.0000270.45340145NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
132.42923.7317940.0000250.50377850NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
14NaNNaNNaN0.503778502.7075920.20120.18670.1787320.22200.3090.156NaNNaNNaNNaNNaN
152.71863.8353730.0000220.55415655NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
162.89564.1426400.0000200.60453460NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
17NaNNaNNaN0.604534602.6965220.18140.20040.1782415.74960.2380.120NaNNaNNaNNaNNaN
182.76124.3964780.0000170.65491265NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
192.92143.0214020.0000150.70529070NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
20NaNNaNNaN0.705290702.6762080.18790.22720.1913437.44630.2260.114NaNNaNNaNNaNNaN
212.62773.3199080.0000120.75566875NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
222.73453.3567800.0000100.80604580NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
23NaNNaNNaN0.806045802.6564130.18360.19140.1732385.67340.2570.130NaNNaNNaNNaNNaN
242.71153.2600690.0000070.85642385NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
252.66283.6124100.0000050.90680190NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
26NaNNaNNaN0.906801902.6484210.19020.20270.1831351.94500.2810.142NaNNaNNaNNaNNaN
272.83933.8724540.0000020.95717995NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
28NaNNaNNaN0.99748199NaNNaNNaNNaNNaNNaNNaN4092.08450.1940.0244.277124e+152.804105
\n","
\n","
\n","\n","
\n"," \n","\n"," \n","\n"," \n","
\n","\n","\n","
\n"," \n","\n","\n","\n"," \n","
\n","\n","
\n"," \n"," \n"," \n","
\n","\n","
\n","
\n"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"dataframe","variable_name":"df","summary":"{\n \"name\": \"df\",\n \"rows\": 29,\n \"fields\": [\n {\n \"column\": \"loss\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.19964460777893472,\n \"min\": 2.4292,\n \"max\": 3.3779,\n \"num_unique_values\": 19,\n \"samples\": [\n 3.3779,\n 2.9738,\n 2.8956\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"grad_norm\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.7762348991420785,\n \"min\": 3.021402359008789,\n \"max\": 5.835054874420166,\n \"num_unique_values\": 19,\n \"samples\": [\n 5.835054874420166,\n 4.005008220672607,\n 4.142639636993408\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"learning_rate\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 1.421038974422065e-05,\n \"min\": 2.0202020202020206e-06,\n \"max\": 4.7474747474747476e-05,\n \"num_unique_values\": 19,\n \"samples\": [\n 4.7474747474747476e-05,\n 3.484848484848485e-05,\n 1.9696969696969697e-05\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"epoch\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.28604805308749326,\n \"min\": 0.05037783375314862,\n \"max\": 0.9974811083123426,\n \"num_unique_values\": 20,\n \"samples\": [\n 0.05037783375314862,\n 0.906801007556675,\n 0.8060453400503779\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"step\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 28,\n \"min\": 5,\n \"max\": 99,\n \"num_unique_values\": 20,\n \"samples\": [\n 5,\n 90,\n 80\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_loss\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.08067865853473409,\n \"min\": 2.648421049118042,\n \"max\": 2.8926708698272705,\n \"num_unique_values\": 9,\n \"samples\": [\n 2.6564126014709473,\n 2.818314790725708,\n 2.696521520614624\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_rouge2_precision\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.018015001464828633,\n \"min\": 0.1487,\n \"max\": 0.2012,\n \"num_unique_values\": 9,\n \"samples\": [\n 0.1836,\n 0.1487,\n 0.1814\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_rouge2_recall\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.017432783611473084,\n \"min\": 0.1848,\n \"max\": 0.2306,\n \"num_unique_values\": 9,\n \"samples\": [\n 0.1914,\n 0.211,\n 0.2004\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_rouge2_fmeasure\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.010030425935345139,\n \"min\": 0.16,\n \"max\": 0.1913,\n \"num_unique_values\": 9,\n \"samples\": [\n 0.1732,\n 0.1602,\n 0.1782\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_runtime\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 81.10928084615396,\n \"min\": 316.7331,\n \"max\": 547.1482,\n \"num_unique_values\": 9,\n \"samples\": [\n 385.6734,\n 521.0521,\n 415.7496\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_samples_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.04688579505327576,\n \"min\": 0.181,\n \"max\": 0.313,\n \"num_unique_values\": 9,\n \"samples\": [\n 0.257,\n 0.19,\n 0.238\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"eval_steps_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.023754531731393438,\n \"min\": 0.091,\n \"max\": 0.158,\n \"num_unique_values\": 9,\n \"samples\": [\n 0.13,\n 0.096,\n 0.12\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_runtime\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 4092.0845,\n \"max\": 4092.0845,\n \"num_unique_values\": 1,\n \"samples\": [\n 4092.0845\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_samples_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 0.194,\n \"max\": 0.194,\n \"num_unique_values\": 1,\n \"samples\": [\n 0.194\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_steps_per_second\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 0.024,\n \"max\": 0.024,\n \"num_unique_values\": 1,\n \"samples\": [\n 0.024\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"total_flos\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 4277124458348544.0,\n \"max\": 4277124458348544.0,\n \"num_unique_values\": 1,\n \"samples\": [\n 4277124458348544.0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"train_loss\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 2.804104708661937,\n \"max\": 2.804104708661937,\n \"num_unique_values\": 1,\n \"samples\": [\n 2.804104708661937\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n }\n ]\n}"}},"metadata":{},"execution_count":22}]},{"cell_type":"code","source":["trainer.push_to_hub()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":316,"referenced_widgets":["d999632bf34d49049b25d1709416cac2","0b2845c3feb44e2bbe7f76c7b02974cb","6c9dfce9ca334aa6826b0dd55388671c","572e24d20778432893a73ddbb45b50e2","dd60e17bbbd34c0f8acb776cbf69d752","18b137aa8150425fa2d75a9666c5a261","0a999c484fdf4cb386d44a3ac0a15c0b","3484b37f40b948a6bb647f5625440e2f","28f6113d53e14452ba5b176275cb58f8","800988fd806a4fa2acc88d46cd514f4e","5f0db6d782a24e93beee027ffabe1a06","5967cc46146b405c9659433be21f8cd0","27351fdc0a6f48a0a8ecd141e839a975","4a1f926a4da34a1bbfe6eaab11d091bf","a033f8e0ec2d4af2bd63eb286e3566ef","ca59be0f52f847e8bce9973581ded553","1ece513836c2499f9911c9ed6f568f2b","de8ebeefb2e84a2eb90c0638e06b689f","8c26438126414eb298f133a280a91f9e","c2e38584498c428fa06dff64332a725b","ad8544231a0344b2a2f2cb8f2ef96cf9","29a9cf933da744d6bc121fb46da1028e","f20ec95a5aff4401aec9ee1d0e853a0b","4b6b487296784be1a745e1bfe81d189e","e2d37ae21cb442fb89cced88d2f8f0ea","3fc0c96684004865929ef4e38b538052","668d23c3712b4279aff7e2f0158cb4fb","e076c7d3346648568b01deb87ae005b2","c59a45dbcb9c48b08737112581392f60","30ed5f9d393c4e99b5aacd9a3e9c5dfc","2f410dcfe6ae4f60b03080cb8fd28821","d2bff27f30624e91b20b7d9159cc0331","7c669e83979a49278748296bcc628d93","d0c1e17120094d21ab8ff772270a8ca4","28c15bdfb1d1460da0b5ba3e914f9583","f28dec362fcc4970b0a7b7309b007053","2d76e4a40ba14424a5bfe9e012bd6a6a","9f7e5078ff4144d5a599eaf936b25224","ed46ddcd4c304acbbbb8ad6b635de42d","6b9abbffd6e2421bacf8210a746ce6f7","762e65b91a9f4994bae814be1b0a5d3d","a0afe5d41228439db467bfeb02346952","1774fc2c0cab46f3b58f460eb1e7a66c","e0e65d47015c46228eb1857c2052d643","0924bb0049ca49218de2d6c26ddfa0de","1d6d0c4129144b8f81db003b076cb6bd","a6ea44aa9fc84818bdcedbd08ad56d2f","c66fcba04c8f4b1dbf58c30ba87590e3","9ce160aa317e443997701cf2105cc89e","d5d3d91554b147e1990d193eb808164a","55d28d26c40440b794bef2c6b1a9f87b","1fd73e95beb743a1b2da98177371f08d","45ecbc0a52034d3ea6d9be3f13f14574","6d0599a6f6f84ce09074a4a1e0b3775f","d8d2cba6abe84824a605681d6cdda210","d0d68d60ac6b4f24bbeca5605d9dd0cc","942dcad3dcbc4a4f9dceb64fac2556c4","fa31f63819ae49cfbac58bf0797ead58","bc04e9d510b04e0cade22e77d5cff623","352317756a5d47f584fe603c44c93776","fb1fac9bbc68420fbdec8ececdcc73f2","7d5872a4e89045deb22fe4b63d4e802c","0c8e48a6acf94586a3acd5c614c97740","4cbab7ba3f404b29a2a514305e089162","b911acb5f18446f0aa5c3413a6b4339a","3a890c9327ba4bd59c3ccd5d4a7a76d1"]},"id":"BsPnIUQ5i3Tj","executionInfo":{"status":"ok","timestamp":1717559944905,"user_tz":-240,"elapsed":89765,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"4ab80405-5c50-4733-fe52-aee0b18f4fe0"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 2, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3}\n"]},{"output_type":"display_data","data":{"text/plain":["mnist_test.csv: 0%| | 0.00/18.3M [00:00=2.0.0 (from evaluate)\n"," Downloading datasets-2.19.2-py3-none-any.whl (542 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m542.1/542.1 kB\u001b[0m \u001b[31m12.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from evaluate) (1.25.2)\n","Collecting dill (from evaluate)\n"," Downloading dill-0.3.8-py3-none-any.whl (116 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m14.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from evaluate) (2.0.3)\n","Requirement already satisfied: requests>=2.19.0 in /usr/local/lib/python3.10/dist-packages (from evaluate) (2.31.0)\n","Requirement already satisfied: tqdm>=4.62.1 in /usr/local/lib/python3.10/dist-packages (from evaluate) (4.66.4)\n","Collecting xxhash (from evaluate)\n"," Downloading xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m26.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting multiprocess (from evaluate)\n"," Downloading multiprocess-0.70.16-py310-none-any.whl (134 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m19.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: fsspec[http]>=2021.05.0 in /usr/local/lib/python3.10/dist-packages (from evaluate) (2023.6.0)\n","Requirement already satisfied: huggingface-hub>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from evaluate) (0.23.2)\n","Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from evaluate) (24.0)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (3.14.0)\n","Requirement already satisfied: pyarrow>=12.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (14.0.2)\n","Requirement already satisfied: pyarrow-hotfix in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (0.6)\n","Collecting requests>=2.19.0 (from evaluate)\n"," Downloading requests-2.32.3-py3-none-any.whl (64 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.9/64.9 kB\u001b[0m \u001b[31m8.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (3.9.5)\n","Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from datasets>=2.0.0->evaluate) (6.0.1)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub>=0.7.0->evaluate) (4.12.1)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.19.0->evaluate) (2024.6.2)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->evaluate) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->evaluate) (2023.4)\n","Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas->evaluate) (2024.1)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.3.1)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (23.2.0)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.4.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (6.0.5)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.9.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (4.0.3)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas->evaluate) (1.16.0)\n","Installing collected packages: xxhash, requests, dill, multiprocess, datasets, evaluate\n"," Attempting uninstall: requests\n"," Found existing installation: requests 2.31.0\n"," Uninstalling requests-2.31.0:\n"," Successfully uninstalled requests-2.31.0\n","\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n","google-colab 1.0.0 requires requests==2.31.0, but you have requests 2.32.3 which is incompatible.\u001b[0m\u001b[31m\n","\u001b[0mSuccessfully installed datasets-2.19.2 dill-0.3.8 evaluate-0.4.2 multiprocess-0.70.16 requests-2.32.3 xxhash-3.4.1\n"]},{"output_type":"display_data","data":{"application/vnd.colab-display-data+json":{"pip_warning":{"packages":["requests"]},"id":"97e63df5d38e4b4c9a8e70b352f19cf2"}},"metadata":{}}]},{"source":["!pip install rouge_score"],"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"JhnsXhiG2CWs","executionInfo":{"status":"ok","timestamp":1717650596760,"user_tz":-240,"elapsed":8336,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"f207893b-f4a1-471f-bf0b-c28b88432224"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting rouge_score\n"," Downloading rouge_score-0.1.2.tar.gz (17 kB)\n"," Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: absl-py in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.4.0)\n","Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (from rouge_score) (3.8.1)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.25.2)\n","Requirement already satisfied: six>=1.14.0 in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.16.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (1.4.2)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (2024.5.15)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (4.66.4)\n","Building wheels for collected packages: rouge_score\n"," Building wheel for rouge_score (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24933 sha256=4dec86e9989dd3346fc5ca747861d5d8878e5c11037e272228a4bbdc090f7762\n"," Stored in directory: /root/.cache/pip/wheels/5f/dd/89/461065a73be61a532ff8599a28e9beef17985c9e9c31e541b4\n","Successfully built rouge_score\n","Installing collected packages: rouge_score\n","Successfully installed rouge_score-0.1.2\n"]}]},{"cell_type":"code","source":["from evaluate import load\n","# Load the ROUGE metric\n","import evaluate\n","rouge = evaluate.load('rouge')"],"metadata":{"id":"RdsN4yTZ1dgn"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["candidates = [generated_summary]\n","\n","references = [[target_text]\n"," ]\n","results = rouge.compute(predictions=candidates, references=references)\n","print(results)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"AQl7VcgT2NSM","executionInfo":{"status":"ok","timestamp":1717650608844,"user_tz":-240,"elapsed":1117,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}},"outputId":"31005642-55de-4739-e558-00e0cc5b189e"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["{'rouge1': 0.5520581113801452, 'rouge2': 0.3065693430656935, 'rougeL': 0.37772397094430993, 'rougeLsum': 0.37772397094430993}\n"]}]}],"metadata":{"colab":{"provenance":[],"machine_shape":"hm","gpuType":"L4"},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.12.3"},"widgets":{"application/vnd.jupyter.widget-state+json":{"d707336ba2a2484c92096b5b899cb116":{"model_module":"@jupyter-widgets/controls","model_name":"VBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"VBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"VBoxView","box_style":"","children":["IPY_MODEL_0106ccb8577449438d19914bd6ce9b46","IPY_MODEL_328f40de95e64dacb377f5e9b26a3ddd","IPY_MODEL_9e53fe07a0474994a9fdc1e114b15736","IPY_MODEL_84b2d4416b6b44af8ea944b9c166f5e8"],"layout":"IPY_MODEL_09719eacb5f548eaa14eefd7a1c5bafc"}},"f3b05648e2454fb9a0ba73e811168fe2":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c77cacf9373549a592427302b88f6f69","placeholder":"​","style":"IPY_MODEL_87eabbfd9f5d4073ba1b9d791bd033db","value":"

Copy a token from your Hugging Face\ntokens page and paste it below.
Immediately click login after copying\nyour token or it might be stored in plain text in this notebook file.
"}},"470c59293da044af8dcd6172b9c4b644":{"model_module":"@jupyter-widgets/controls","model_name":"PasswordModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"PasswordModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"PasswordView","continuous_update":true,"description":"Token:","description_tooltip":null,"disabled":false,"layout":"IPY_MODEL_b1c45cd6b3e94725b990489b5358659b","placeholder":"​","style":"IPY_MODEL_9257f49a84814ce598f6a2d8fc823d84","value":""}},"1d501f5c1ff848d48acd0afb8fe73f25":{"model_module":"@jupyter-widgets/controls","model_name":"CheckboxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"CheckboxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"CheckboxView","description":"Add token as git credential?","description_tooltip":null,"disabled":false,"indent":true,"layout":"IPY_MODEL_e67fd40dea464b07ad0215f9407484b7","style":"IPY_MODEL_dfa1c3a7408d4f28b69fcca36825de67","value":true}},"9558026104af44f1ad274fc74331e8d9":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ButtonView","button_style":"","description":"Login","disabled":false,"icon":"","layout":"IPY_MODEL_5896b3bc9b4e408cb672b2f97582502f","style":"IPY_MODEL_52c13934f9b843449f988d053a4fe10e","tooltip":""}},"f5da8365bcf14404baee1fbc8650b6cc":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_00dcc8ba4cc64dbaad81959f3401cebe","placeholder":"​","style":"IPY_MODEL_4265c57b7ba94f7cab5d43937163c3d3","value":"\nPro Tip: If you don't already have one, you can create a dedicated\n'notebooks' token with 'write' access, that you can then easily reuse for all\nnotebooks. "}},"09719eacb5f548eaa14eefd7a1c5bafc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":"center","align_self":null,"border":null,"bottom":null,"display":"flex","flex":null,"flex_flow":"column","grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":"50%"}},"c77cacf9373549a592427302b88f6f69":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"87eabbfd9f5d4073ba1b9d791bd033db":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b1c45cd6b3e94725b990489b5358659b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9257f49a84814ce598f6a2d8fc823d84":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e67fd40dea464b07ad0215f9407484b7":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dfa1c3a7408d4f28b69fcca36825de67":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"5896b3bc9b4e408cb672b2f97582502f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"52c13934f9b843449f988d053a4fe10e":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","button_color":null,"font_weight":""}},"00dcc8ba4cc64dbaad81959f3401cebe":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4265c57b7ba94f7cab5d43937163c3d3":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"388b6af2d5f54d8eb074f9fb1227ea09":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_bedce15de5ea458c8337812bd1161817","placeholder":"​","style":"IPY_MODEL_cacf92179a6149838f746701e12b5d5a","value":"Connecting..."}},"bedce15de5ea458c8337812bd1161817":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cacf92179a6149838f746701e12b5d5a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0106ccb8577449438d19914bd6ce9b46":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_aa26bf362ce54bb3909b0566cdb2e2f3","placeholder":"​","style":"IPY_MODEL_93c5a15ed4d544318bf3771570f6b8f1","value":"Token is valid (permission: write)."}},"328f40de95e64dacb377f5e9b26a3ddd":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a9ffd18480f54a1ab2d016d941b9228e","placeholder":"​","style":"IPY_MODEL_7c455af3d5c042e7997c8f430723cd48","value":"Your token has been saved in your configured git credential helpers (store)."}},"9e53fe07a0474994a9fdc1e114b15736":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_9a0c5a882aef461a938552ada0e8b341","placeholder":"​","style":"IPY_MODEL_208f9e4053b14faca33c9087f3bb2285","value":"Your token has been saved to /root/.cache/huggingface/token"}},"84b2d4416b6b44af8ea944b9c166f5e8":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_4a1002ad27054b67adeecdf277333c59","placeholder":"​","style":"IPY_MODEL_e34bead62563466eacb4e67af54db464","value":"Login successful"}},"aa26bf362ce54bb3909b0566cdb2e2f3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"93c5a15ed4d544318bf3771570f6b8f1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a9ffd18480f54a1ab2d016d941b9228e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7c455af3d5c042e7997c8f430723cd48":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9a0c5a882aef461a938552ada0e8b341":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"208f9e4053b14faca33c9087f3bb2285":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4a1002ad27054b67adeecdf277333c59":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e34bead62563466eacb4e67af54db464":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d999632bf34d49049b25d1709416cac2":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0b2845c3feb44e2bbe7f76c7b02974cb","IPY_MODEL_6c9dfce9ca334aa6826b0dd55388671c","IPY_MODEL_572e24d20778432893a73ddbb45b50e2"],"layout":"IPY_MODEL_dd60e17bbbd34c0f8acb776cbf69d752"}},"0b2845c3feb44e2bbe7f76c7b02974cb":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_18b137aa8150425fa2d75a9666c5a261","placeholder":"​","style":"IPY_MODEL_0a999c484fdf4cb386d44a3ac0a15c0b","value":"mnist_test.csv: 100%"}},"6c9dfce9ca334aa6826b0dd55388671c":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_3484b37f40b948a6bb647f5625440e2f","max":18289443,"min":0,"orientation":"horizontal","style":"IPY_MODEL_28f6113d53e14452ba5b176275cb58f8","value":18289443}},"572e24d20778432893a73ddbb45b50e2":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_800988fd806a4fa2acc88d46cd514f4e","placeholder":"​","style":"IPY_MODEL_5f0db6d782a24e93beee027ffabe1a06","value":" 18.3M/18.3M [00:03<00:00, 17.8MB/s]"}},"dd60e17bbbd34c0f8acb776cbf69d752":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"18b137aa8150425fa2d75a9666c5a261":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0a999c484fdf4cb386d44a3ac0a15c0b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3484b37f40b948a6bb647f5625440e2f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"28f6113d53e14452ba5b176275cb58f8":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"800988fd806a4fa2acc88d46cd514f4e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5f0db6d782a24e93beee027ffabe1a06":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"5967cc46146b405c9659433be21f8cd0":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_27351fdc0a6f48a0a8ecd141e839a975","IPY_MODEL_4a1f926a4da34a1bbfe6eaab11d091bf","IPY_MODEL_a033f8e0ec2d4af2bd63eb286e3566ef"],"layout":"IPY_MODEL_ca59be0f52f847e8bce9973581ded553"}},"27351fdc0a6f48a0a8ecd141e839a975":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1ece513836c2499f9911c9ed6f568f2b","placeholder":"​","style":"IPY_MODEL_de8ebeefb2e84a2eb90c0638e06b689f","value":"events.out.tfevents.1717555628.79975f1af372.8191.0: 100%"}},"4a1f926a4da34a1bbfe6eaab11d091bf":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_8c26438126414eb298f133a280a91f9e","max":13752,"min":0,"orientation":"horizontal","style":"IPY_MODEL_c2e38584498c428fa06dff64332a725b","value":13752}},"a033f8e0ec2d4af2bd63eb286e3566ef":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ad8544231a0344b2a2f2cb8f2ef96cf9","placeholder":"​","style":"IPY_MODEL_29a9cf933da744d6bc121fb46da1028e","value":" 13.8k/13.8k [00:00<00:00, 39.8kB/s]"}},"ca59be0f52f847e8bce9973581ded553":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1ece513836c2499f9911c9ed6f568f2b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"de8ebeefb2e84a2eb90c0638e06b689f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8c26438126414eb298f133a280a91f9e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c2e38584498c428fa06dff64332a725b":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"ad8544231a0344b2a2f2cb8f2ef96cf9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"29a9cf933da744d6bc121fb46da1028e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f20ec95a5aff4401aec9ee1d0e853a0b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_4b6b487296784be1a745e1bfe81d189e","IPY_MODEL_e2d37ae21cb442fb89cced88d2f8f0ea","IPY_MODEL_3fc0c96684004865929ef4e38b538052"],"layout":"IPY_MODEL_668d23c3712b4279aff7e2f0158cb4fb"}},"4b6b487296784be1a745e1bfe81d189e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e076c7d3346648568b01deb87ae005b2","placeholder":"​","style":"IPY_MODEL_c59a45dbcb9c48b08737112581392f60","value":"Upload 5 LFS files: 100%"}},"e2d37ae21cb442fb89cced88d2f8f0ea":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_30ed5f9d393c4e99b5aacd9a3e9c5dfc","max":5,"min":0,"orientation":"horizontal","style":"IPY_MODEL_2f410dcfe6ae4f60b03080cb8fd28821","value":5}},"3fc0c96684004865929ef4e38b538052":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_d2bff27f30624e91b20b7d9159cc0331","placeholder":"​","style":"IPY_MODEL_7c669e83979a49278748296bcc628d93","value":" 5/5 [00:32<00:00, 32.64s/it]"}},"668d23c3712b4279aff7e2f0158cb4fb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e076c7d3346648568b01deb87ae005b2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c59a45dbcb9c48b08737112581392f60":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"30ed5f9d393c4e99b5aacd9a3e9c5dfc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2f410dcfe6ae4f60b03080cb8fd28821":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"d2bff27f30624e91b20b7d9159cc0331":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7c669e83979a49278748296bcc628d93":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d0c1e17120094d21ab8ff772270a8ca4":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_28c15bdfb1d1460da0b5ba3e914f9583","IPY_MODEL_f28dec362fcc4970b0a7b7309b007053","IPY_MODEL_2d76e4a40ba14424a5bfe9e012bd6a6a"],"layout":"IPY_MODEL_9f7e5078ff4144d5a599eaf936b25224"}},"28c15bdfb1d1460da0b5ba3e914f9583":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ed46ddcd4c304acbbbb8ad6b635de42d","placeholder":"​","style":"IPY_MODEL_6b9abbffd6e2421bacf8210a746ce6f7","value":"mnist_train_small.csv: 100%"}},"f28dec362fcc4970b0a7b7309b007053":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_762e65b91a9f4994bae814be1b0a5d3d","max":36523880,"min":0,"orientation":"horizontal","style":"IPY_MODEL_a0afe5d41228439db467bfeb02346952","value":36523880}},"2d76e4a40ba14424a5bfe9e012bd6a6a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1774fc2c0cab46f3b58f460eb1e7a66c","placeholder":"​","style":"IPY_MODEL_e0e65d47015c46228eb1857c2052d643","value":" 36.5M/36.5M [00:06<00:00, 4.16MB/s]"}},"9f7e5078ff4144d5a599eaf936b25224":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ed46ddcd4c304acbbbb8ad6b635de42d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6b9abbffd6e2421bacf8210a746ce6f7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"762e65b91a9f4994bae814be1b0a5d3d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a0afe5d41228439db467bfeb02346952":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"1774fc2c0cab46f3b58f460eb1e7a66c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e0e65d47015c46228eb1857c2052d643":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0924bb0049ca49218de2d6c26ddfa0de":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_1d6d0c4129144b8f81db003b076cb6bd","IPY_MODEL_a6ea44aa9fc84818bdcedbd08ad56d2f","IPY_MODEL_c66fcba04c8f4b1dbf58c30ba87590e3"],"layout":"IPY_MODEL_9ce160aa317e443997701cf2105cc89e"}},"1d6d0c4129144b8f81db003b076cb6bd":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_d5d3d91554b147e1990d193eb808164a","placeholder":"​","style":"IPY_MODEL_55d28d26c40440b794bef2c6b1a9f87b","value":"training_args.bin: 100%"}},"a6ea44aa9fc84818bdcedbd08ad56d2f":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_1fd73e95beb743a1b2da98177371f08d","max":5176,"min":0,"orientation":"horizontal","style":"IPY_MODEL_45ecbc0a52034d3ea6d9be3f13f14574","value":5176}},"c66fcba04c8f4b1dbf58c30ba87590e3":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_6d0599a6f6f84ce09074a4a1e0b3775f","placeholder":"​","style":"IPY_MODEL_d8d2cba6abe84824a605681d6cdda210","value":" 5.18k/5.18k [00:00<00:00, 15.8kB/s]"}},"9ce160aa317e443997701cf2105cc89e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d5d3d91554b147e1990d193eb808164a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"55d28d26c40440b794bef2c6b1a9f87b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"1fd73e95beb743a1b2da98177371f08d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"45ecbc0a52034d3ea6d9be3f13f14574":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"6d0599a6f6f84ce09074a4a1e0b3775f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d8d2cba6abe84824a605681d6cdda210":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d0d68d60ac6b4f24bbeca5605d9dd0cc":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_942dcad3dcbc4a4f9dceb64fac2556c4","IPY_MODEL_fa31f63819ae49cfbac58bf0797ead58","IPY_MODEL_bc04e9d510b04e0cade22e77d5cff623"],"layout":"IPY_MODEL_352317756a5d47f584fe603c44c93776"}},"942dcad3dcbc4a4f9dceb64fac2556c4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fb1fac9bbc68420fbdec8ececdcc73f2","placeholder":"​","style":"IPY_MODEL_7d5872a4e89045deb22fe4b63d4e802c","value":"model.safetensors: 100%"}},"fa31f63819ae49cfbac58bf0797ead58":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_0c8e48a6acf94586a3acd5c614c97740","max":647614116,"min":0,"orientation":"horizontal","style":"IPY_MODEL_4cbab7ba3f404b29a2a514305e089162","value":647614116}},"bc04e9d510b04e0cade22e77d5cff623":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_b911acb5f18446f0aa5c3413a6b4339a","placeholder":"​","style":"IPY_MODEL_3a890c9327ba4bd59c3ccd5d4a7a76d1","value":" 648M/648M [00:32<00:00, 22.5MB/s]"}},"352317756a5d47f584fe603c44c93776":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fb1fac9bbc68420fbdec8ececdcc73f2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7d5872a4e89045deb22fe4b63d4e802c":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0c8e48a6acf94586a3acd5c614c97740":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4cbab7ba3f404b29a2a514305e089162":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"b911acb5f18446f0aa5c3413a6b4339a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3a890c9327ba4bd59c3ccd5d4a7a76d1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"5848c165bbcc43798510a4b7de6ac025":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_b0bb01a086014c899d02218b52dbb587","IPY_MODEL_678580242fcc493db844628c9f7ceeaa","IPY_MODEL_46c4fdeedf494a688132c40d799a304d"],"layout":"IPY_MODEL_cff98db809994469961760bd2b8a9bff"}},"b0bb01a086014c899d02218b52dbb587":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_7b1d3959e1924f70891bb710680e38ae","placeholder":"​","style":"IPY_MODEL_0db0b115f0374072a664abe056291d2c","value":"config.json: 100%"}},"678580242fcc493db844628c9f7ceeaa":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_4d37806af79b44ee9b47bae73432ddc3","max":1282,"min":0,"orientation":"horizontal","style":"IPY_MODEL_3935523fe3434e17ac46ec2bca0d2591","value":1282}},"46c4fdeedf494a688132c40d799a304d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a7b93b0d13294061bbd5f4900bb67522","placeholder":"​","style":"IPY_MODEL_e8a56214e9584fa0abfd72db0d8bff93","value":" 1.28k/1.28k [00:00<00:00, 101kB/s]"}},"cff98db809994469961760bd2b8a9bff":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7b1d3959e1924f70891bb710680e38ae":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0db0b115f0374072a664abe056291d2c":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4d37806af79b44ee9b47bae73432ddc3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3935523fe3434e17ac46ec2bca0d2591":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a7b93b0d13294061bbd5f4900bb67522":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e8a56214e9584fa0abfd72db0d8bff93":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0f98696a4f8d496cb5d9122a361df917":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_08a8e634ca4e4d60be998bf85e50d3d5","IPY_MODEL_d9425277e0b04ab986adab8058849336","IPY_MODEL_eb1e576545194454b26d14376f71ef98"],"layout":"IPY_MODEL_a1736f19e9e14787a9b90075a1f59b51"}},"08a8e634ca4e4d60be998bf85e50d3d5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_009cbbd4286b448693a181757a39409c","placeholder":"​","style":"IPY_MODEL_74d49d89ac934d1c87e7fbd39f456943","value":"model.safetensors: 100%"}},"d9425277e0b04ab986adab8058849336":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_7b464cd62b2246038aa163cb68be98d5","max":647614116,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6263de0a2ca647cb83d9e49fc820db92","value":647614116}},"eb1e576545194454b26d14376f71ef98":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e3f0b803c7634adea791d6ef00fe4ff9","placeholder":"​","style":"IPY_MODEL_e326f9fe55724d82b4ba94820ca8bf39","value":" 648M/648M [00:28<00:00, 21.8MB/s]"}},"a1736f19e9e14787a9b90075a1f59b51":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"009cbbd4286b448693a181757a39409c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"74d49d89ac934d1c87e7fbd39f456943":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7b464cd62b2246038aa163cb68be98d5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6263de0a2ca647cb83d9e49fc820db92":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"e3f0b803c7634adea791d6ef00fe4ff9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e326f9fe55724d82b4ba94820ca8bf39":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4c35efb2cd264fe0b162fc52a027076d":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_5d2d2f56da0e439a92ffaa15d12afed9","IPY_MODEL_4fe8644c63e54417af9823a1d18d1551","IPY_MODEL_99b426841b0642b28e6a25569e2153e0"],"layout":"IPY_MODEL_57a295f308544000aac01beaa936a108"}},"5d2d2f56da0e439a92ffaa15d12afed9":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_48d046c9253846598a8585227dbf2c2e","placeholder":"​","style":"IPY_MODEL_3c9e8dc215f74d03848cc6696e01e869","value":"generation_config.json: 100%"}},"4fe8644c63e54417af9823a1d18d1551":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_e2b3cd22c179449d99e3a92c90c9737b","max":295,"min":0,"orientation":"horizontal","style":"IPY_MODEL_ba84a501d78948fd8a19cddd52b61fb0","value":295}},"99b426841b0642b28e6a25569e2153e0":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e4c18749e7d24332bef7da70d876aff9","placeholder":"​","style":"IPY_MODEL_4b6d32887f0b497499c78af6690cacc2","value":" 295/295 [00:00<00:00, 30.3kB/s]"}},"57a295f308544000aac01beaa936a108":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"48d046c9253846598a8585227dbf2c2e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3c9e8dc215f74d03848cc6696e01e869":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e2b3cd22c179449d99e3a92c90c9737b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ba84a501d78948fd8a19cddd52b61fb0":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"e4c18749e7d24332bef7da70d876aff9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4b6d32887f0b497499c78af6690cacc2":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b8c1e1aa424e487e86df0f425da849e1":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_d40dec6af89e47a295e4b48d5c8f4734","IPY_MODEL_7e14c561413d4a548add644cf765e2e9","IPY_MODEL_4e479c7ac61541c2a0bada43886da71a"],"layout":"IPY_MODEL_740bc7bb7fb44c7d980c0297bcd7bfb6"}},"d40dec6af89e47a295e4b48d5c8f4734":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_0a231f9c3a77421fb91b439bd20d4f10","placeholder":"​","style":"IPY_MODEL_5cc1120ded8f4e91ac5185b6bdedd5e7","value":"tokenizer_config.json: 100%"}},"7e14c561413d4a548add644cf765e2e9":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_0090c0e19c9c4d39b0c98ec758a3d72c","max":1212,"min":0,"orientation":"horizontal","style":"IPY_MODEL_3898ab3c5dce4176ae50bba9c09623d7","value":1212}},"4e479c7ac61541c2a0bada43886da71a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a430886fb5d745c7834541a8687fe245","placeholder":"​","style":"IPY_MODEL_781d34031f954393af04fd0b60911ddc","value":" 1.21k/1.21k [00:00<00:00, 106kB/s]"}},"740bc7bb7fb44c7d980c0297bcd7bfb6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0a231f9c3a77421fb91b439bd20d4f10":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5cc1120ded8f4e91ac5185b6bdedd5e7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0090c0e19c9c4d39b0c98ec758a3d72c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3898ab3c5dce4176ae50bba9c09623d7":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a430886fb5d745c7834541a8687fe245":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"781d34031f954393af04fd0b60911ddc":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"3584a735ee284f0b8c88757813efaf86":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_c947203568b94464a0747ffb43218dd5","IPY_MODEL_88de79a33e144481b067296c2fec3a13","IPY_MODEL_12129bf7c47b4a7e8e98a7dbcf574c91"],"layout":"IPY_MODEL_0af043937ca949edbcd703fc5474e496"}},"c947203568b94464a0747ffb43218dd5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_cf5fb501633c428bbcb96b1a28ecfbe5","placeholder":"​","style":"IPY_MODEL_4befd48f393242b5a3e29b7c2bf57e88","value":"vocab.json: 100%"}},"88de79a33e144481b067296c2fec3a13":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_c99dedbeffb74a63915ed8f760d085eb","max":798293,"min":0,"orientation":"horizontal","style":"IPY_MODEL_d0a3c5c77a864c6fba65dd7293c1fab8","value":798293}},"12129bf7c47b4a7e8e98a7dbcf574c91":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a05516bba01e42c7be5dc4105e128fd3","placeholder":"​","style":"IPY_MODEL_7c82336ed7c0447cb5a5da96ea09cf23","value":" 798k/798k [00:00<00:00, 3.26MB/s]"}},"0af043937ca949edbcd703fc5474e496":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cf5fb501633c428bbcb96b1a28ecfbe5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4befd48f393242b5a3e29b7c2bf57e88":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c99dedbeffb74a63915ed8f760d085eb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d0a3c5c77a864c6fba65dd7293c1fab8":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a05516bba01e42c7be5dc4105e128fd3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7c82336ed7c0447cb5a5da96ea09cf23":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e70bad9f3772496ba72f895943d58444":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_52a85c5a71cb4866ac60d5e2f92b836f","IPY_MODEL_d486b1add058420fbe2a53f2c807de35","IPY_MODEL_c0dab47729c344aabfd58a453b50b7ee"],"layout":"IPY_MODEL_b61ff126cc644faebf2a34b407d476ee"}},"52a85c5a71cb4866ac60d5e2f92b836f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5f9abb070af84d6c9ffb1747238f8823","placeholder":"​","style":"IPY_MODEL_c9affc7ef61e4afdb209f564a36a82a7","value":"merges.txt: 100%"}},"d486b1add058420fbe2a53f2c807de35":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_b3863d62e3684d849f8ce78f9ca313bb","max":456318,"min":0,"orientation":"horizontal","style":"IPY_MODEL_aef0502ee8134d8ab2f5fc936271e573","value":456318}},"c0dab47729c344aabfd58a453b50b7ee":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_d9515470f22743fbac42773ca79562bb","placeholder":"​","style":"IPY_MODEL_dac9b42265b14eea89cbb1849e99ec93","value":" 456k/456k [00:00<00:00, 25.2MB/s]"}},"b61ff126cc644faebf2a34b407d476ee":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5f9abb070af84d6c9ffb1747238f8823":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c9affc7ef61e4afdb209f564a36a82a7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b3863d62e3684d849f8ce78f9ca313bb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"aef0502ee8134d8ab2f5fc936271e573":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"d9515470f22743fbac42773ca79562bb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dac9b42265b14eea89cbb1849e99ec93":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4e21bf2dbe11493bae594034557e7392":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0db8ee02ff0e4a68944999a7c46560ab","IPY_MODEL_681c5b9d90bf48d699024d829454934f","IPY_MODEL_ba94513db6c4472b962c8112edbfc49d"],"layout":"IPY_MODEL_415943e0edef48d5b8aecadd92bf370a"}},"0db8ee02ff0e4a68944999a7c46560ab":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5e7b8b180b56426183901fa4699e79ad","placeholder":"​","style":"IPY_MODEL_100286eff7cb4c8bbf0b4102263b7cc1","value":"tokenizer.json: 100%"}},"681c5b9d90bf48d699024d829454934f":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_59ef85a3460c49008caf08c1885b708a","max":2108907,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6c79cfcd1d0741eca4f443fc78edc7ae","value":2108907}},"ba94513db6c4472b962c8112edbfc49d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_eda9fa293e1140388cdf1370cedeb3ab","placeholder":"​","style":"IPY_MODEL_2dc9b6f6a972438ba94ead2cc3d00530","value":" 2.11M/2.11M [00:00<00:00, 4.41MB/s]"}},"415943e0edef48d5b8aecadd92bf370a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5e7b8b180b56426183901fa4699e79ad":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"100286eff7cb4c8bbf0b4102263b7cc1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"59ef85a3460c49008caf08c1885b708a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6c79cfcd1d0741eca4f443fc78edc7ae":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"eda9fa293e1140388cdf1370cedeb3ab":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2dc9b6f6a972438ba94ead2cc3d00530":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"833988820b52407b8ce129ae243aa676":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_73ec1d8dab594d1dab9a653171ecdd0f","IPY_MODEL_c5df12a2a6f64d6fb48ebb1e9bdfc3b7","IPY_MODEL_732fd4d0981c441995aafd0cdf4e7abe"],"layout":"IPY_MODEL_3861f745da6a40f6b34006fef99dcec4"}},"73ec1d8dab594d1dab9a653171ecdd0f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_13ecf0bb82e543e6bb4a84d1d8b7503d","placeholder":"​","style":"IPY_MODEL_7b5df58fac6d4f7f8070edc41015e5c8","value":"special_tokens_map.json: 100%"}},"c5df12a2a6f64d6fb48ebb1e9bdfc3b7":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_543e9cde20cd4e6ab033864819544057","max":957,"min":0,"orientation":"horizontal","style":"IPY_MODEL_20d0f57828fe4084b7a5c0e5e4aff10d","value":957}},"732fd4d0981c441995aafd0cdf4e7abe":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_716a26014b754e599bc8dda94617c87f","placeholder":"​","style":"IPY_MODEL_f288c94806f6490589640a7fba4268f0","value":" 957/957 [00:00<00:00, 89.8kB/s]"}},"3861f745da6a40f6b34006fef99dcec4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"13ecf0bb82e543e6bb4a84d1d8b7503d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7b5df58fac6d4f7f8070edc41015e5c8":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"543e9cde20cd4e6ab033864819544057":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"20d0f57828fe4084b7a5c0e5e4aff10d":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"716a26014b754e599bc8dda94617c87f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f288c94806f6490589640a7fba4268f0":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8a3e9dd6daf34d91a15bb6e43cff636a":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_43b2a10fec334ba288ccf54352f9f776","IPY_MODEL_a2e363f07a0f4782a994647fbe79dc63","IPY_MODEL_f554eb1028aa4acea0682bf91eb50888"],"layout":"IPY_MODEL_bc38646d4526409fa2db2b824300183a"}},"43b2a10fec334ba288ccf54352f9f776":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_373116608e6841e2b8925fedfe965999","placeholder":"​","style":"IPY_MODEL_4340ee44c4d744889e06db593add65b9","value":"tokenizer_config.json: 100%"}},"a2e363f07a0f4782a994647fbe79dc63":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_f5548040d38647d3bf07b3350cef2061","max":27,"min":0,"orientation":"horizontal","style":"IPY_MODEL_61052c7e4b0746efb44320fe842ca285","value":27}},"f554eb1028aa4acea0682bf91eb50888":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_135a07ddb4f54cb1b0e10d4495b8a576","placeholder":"​","style":"IPY_MODEL_322277ce73dc4ad6806f4b17b137a62a","value":" 27.0/27.0 [00:00<00:00, 2.50kB/s]"}},"bc38646d4526409fa2db2b824300183a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"373116608e6841e2b8925fedfe965999":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4340ee44c4d744889e06db593add65b9":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f5548040d38647d3bf07b3350cef2061":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"61052c7e4b0746efb44320fe842ca285":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"135a07ddb4f54cb1b0e10d4495b8a576":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"322277ce73dc4ad6806f4b17b137a62a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"fa5358aba22c43b0ad6fbb298e344755":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_be3bb030003f48758ff1f006aa86579c","IPY_MODEL_91a7e0e016a94b2bb00ddd20fbb8db8f","IPY_MODEL_365b7314dd5b4de081cab56647661fda"],"layout":"IPY_MODEL_ce140b6476de4ca7ad4295876934d167"}},"be3bb030003f48758ff1f006aa86579c":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a1deefa71940433c962d12cdcbbfe44e","placeholder":"​","style":"IPY_MODEL_fcabf6ee25bf496e98cc865576d3a6e4","value":"config.json: 100%"}},"91a7e0e016a94b2bb00ddd20fbb8db8f":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_169f81114aa14e9f8f87d41453d5d514","max":1092,"min":0,"orientation":"horizontal","style":"IPY_MODEL_e65611358e0b4ab5a0823173d1482e52","value":1092}},"365b7314dd5b4de081cab56647661fda":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_db3928ed213e465ca191004126b62854","placeholder":"​","style":"IPY_MODEL_7ee407436649465a866921781a487d7d","value":" 1.09k/1.09k [00:00<00:00, 90.6kB/s]"}},"ce140b6476de4ca7ad4295876934d167":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a1deefa71940433c962d12cdcbbfe44e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fcabf6ee25bf496e98cc865576d3a6e4":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"169f81114aa14e9f8f87d41453d5d514":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e65611358e0b4ab5a0823173d1482e52":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"db3928ed213e465ca191004126b62854":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7ee407436649465a866921781a487d7d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8949ff8c67124e979b250b10ce274761":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_513d7442b9a84a6c87c04d68356365f4","IPY_MODEL_cd52b2e003a343c0b123db4db2b7625c","IPY_MODEL_3c5cd335f23e49a281325b8280b9035d"],"layout":"IPY_MODEL_e34e8697f1414a50b55adbc886bf5730"}},"513d7442b9a84a6c87c04d68356365f4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_4185974ed245494a999a27b1044b5120","placeholder":"​","style":"IPY_MODEL_ed967ed04483407abcfbb06254bd95fe","value":"vocab.json: 100%"}},"cd52b2e003a343c0b123db4db2b7625c":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_a5485a24505e4775ae22c1b361ede78c","max":898822,"min":0,"orientation":"horizontal","style":"IPY_MODEL_a4971b7132724ba4a179af4c24e54722","value":898822}},"3c5cd335f23e49a281325b8280b9035d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_25664bc477a446e6b250805aac41b81a","placeholder":"​","style":"IPY_MODEL_524aa8d06fbb4461972f30d376df1e0f","value":" 899k/899k [00:00<00:00, 4.46MB/s]"}},"e34e8697f1414a50b55adbc886bf5730":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4185974ed245494a999a27b1044b5120":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ed967ed04483407abcfbb06254bd95fe":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a5485a24505e4775ae22c1b361ede78c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a4971b7132724ba4a179af4c24e54722":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"25664bc477a446e6b250805aac41b81a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"524aa8d06fbb4461972f30d376df1e0f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9ee97e0694e340ff9efbd139002d84f2":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_7c61abf2dbd7459893d0670ca7f5e1ec","IPY_MODEL_f8dce88e69664bbeaa4c87740eaaf5a5","IPY_MODEL_1b0a2d33d3ec4a83b4601a9fa9f5db98"],"layout":"IPY_MODEL_205c4907cc004e1b80f74089d165c581"}},"7c61abf2dbd7459893d0670ca7f5e1ec":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_4af4c7c7bea840f38c8b2fc70fc5e446","placeholder":"​","style":"IPY_MODEL_c5369837fb21442299c1ab895df1e6b2","value":"merges.txt: 100%"}},"f8dce88e69664bbeaa4c87740eaaf5a5":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_7da3841bc8c647ffb7f320e34d3bacd3","max":456318,"min":0,"orientation":"horizontal","style":"IPY_MODEL_4c1401460aec4dadb7089fff669becf6","value":456318}},"1b0a2d33d3ec4a83b4601a9fa9f5db98":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_26eb5c5346c14827b364680335a95f6c","placeholder":"​","style":"IPY_MODEL_8568b08c58d340228a0a67bb75e13251","value":" 456k/456k [00:00<00:00, 2.34MB/s]"}},"205c4907cc004e1b80f74089d165c581":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4af4c7c7bea840f38c8b2fc70fc5e446":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c5369837fb21442299c1ab895df1e6b2":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7da3841bc8c647ffb7f320e34d3bacd3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4c1401460aec4dadb7089fff669becf6":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"26eb5c5346c14827b364680335a95f6c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8568b08c58d340228a0a67bb75e13251":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4831b58c7acd4bcc8f6ccd858d4a87ff":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_a4d89a31d9684e6588a198a9e65debef","IPY_MODEL_2d0d4033d8ba44519b5cd0fae0a9a5ec","IPY_MODEL_0ce9b1f189324023b7223c9c83048067"],"layout":"IPY_MODEL_cefe505fc7a3407ca41038e1ee10d7fd"}},"a4d89a31d9684e6588a198a9e65debef":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_b53323f2a4e647bab0dd1e6346cb38b4","placeholder":"​","style":"IPY_MODEL_bc1596adccd04c7b826df52e4d8d2f10","value":"special_tokens_map.json: 100%"}},"2d0d4033d8ba44519b5cd0fae0a9a5ec":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_46eea1e06f334dc2907616202652e864","max":772,"min":0,"orientation":"horizontal","style":"IPY_MODEL_1e5e59184bd7415fbb4cdedef1ff14a4","value":772}},"0ce9b1f189324023b7223c9c83048067":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_038427d83b034494937e0a9d840607cb","placeholder":"​","style":"IPY_MODEL_90c107ff4d254001a8ada79c3eaa82ca","value":" 772/772 [00:00<00:00, 70.8kB/s]"}},"cefe505fc7a3407ca41038e1ee10d7fd":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b53323f2a4e647bab0dd1e6346cb38b4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"bc1596adccd04c7b826df52e4d8d2f10":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"46eea1e06f334dc2907616202652e864":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1e5e59184bd7415fbb4cdedef1ff14a4":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"038427d83b034494937e0a9d840607cb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"90c107ff4d254001a8ada79c3eaa82ca":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8d24ecf3a1e24ced9430e63470381924":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_bbb6231fd0264dc89a91595bd5b3ee4f","IPY_MODEL_32191641b17b4290a7ac171cb920baa5","IPY_MODEL_44e0aab583244eadb898af8b41ba5eb5"],"layout":"IPY_MODEL_3b8dc0e01c3e4e3a86e6ea083674fe96"}},"bbb6231fd0264dc89a91595bd5b3ee4f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_72cb9eb27487426d96e4f02f9ab569a0","placeholder":"​","style":"IPY_MODEL_199f1bb59a3b4b0c8a44d27995e4983b","value":"Map: 100%"}},"32191641b17b4290a7ac171cb920baa5":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_d9c573182af04d7c899cda1863a168f1","max":805,"min":0,"orientation":"horizontal","style":"IPY_MODEL_7edb9ca437694efd8176945906aff895","value":805}},"44e0aab583244eadb898af8b41ba5eb5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c30e4cfcff6f4c22a0c0736ffdd52666","placeholder":"​","style":"IPY_MODEL_009e111fc6864159bd65b3e994e670d3","value":" 805/805 [00:06<00:00, 126.86 examples/s]"}},"3b8dc0e01c3e4e3a86e6ea083674fe96":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"72cb9eb27487426d96e4f02f9ab569a0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"199f1bb59a3b4b0c8a44d27995e4983b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d9c573182af04d7c899cda1863a168f1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7edb9ca437694efd8176945906aff895":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"c30e4cfcff6f4c22a0c0736ffdd52666":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"009e111fc6864159bd65b3e994e670d3":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"784df40b813a4a19bcf0c14e118135df":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_74818e74da3d4f06b7ad3d5b872cccb0","IPY_MODEL_9c7c7d4b8b1f4e16a3afe3e312aba208","IPY_MODEL_c2a3f46cf39d4bb99023f0b2b08dd4d4"],"layout":"IPY_MODEL_97c6a232be88446cb33ebc47b27d87a8"}},"74818e74da3d4f06b7ad3d5b872cccb0":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fc044f33b6d74b03b933b9b98c3c9184","placeholder":"​","style":"IPY_MODEL_cab39a216a694f0b8d1d22094b0c1ec8","value":"Map: 100%"}},"9c7c7d4b8b1f4e16a3afe3e312aba208":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_dc8ecdaeb6254bc0a7611e3647190e26","max":101,"min":0,"orientation":"horizontal","style":"IPY_MODEL_4a5399cfaa45400bb13a131ab4d1dd57","value":101}},"c2a3f46cf39d4bb99023f0b2b08dd4d4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_051ef654f3554b809b2c885f3b65e283","placeholder":"​","style":"IPY_MODEL_ca50d23ab252423d8e538eef087e817c","value":" 101/101 [00:00<00:00, 124.31 examples/s]"}},"97c6a232be88446cb33ebc47b27d87a8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fc044f33b6d74b03b933b9b98c3c9184":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cab39a216a694f0b8d1d22094b0c1ec8":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"dc8ecdaeb6254bc0a7611e3647190e26":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4a5399cfaa45400bb13a131ab4d1dd57":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"051ef654f3554b809b2c885f3b65e283":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ca50d23ab252423d8e538eef087e817c":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"89e22579564e4df7845bff68bf610b3d":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_cadabb092e4e44a78ddf257651188fee","IPY_MODEL_f0ce216ffec54b8690f698ded84b10e3","IPY_MODEL_77f54a9522164259af627b7d824134df"],"layout":"IPY_MODEL_daeb019992ab42d5b71621e56c593cb5"}},"cadabb092e4e44a78ddf257651188fee":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_29f26b4645f946eb8e9369c3306d6532","placeholder":"​","style":"IPY_MODEL_209e2958bb9c41f8a650b013d15ecdbd","value":"pytorch_model.bin: 100%"}},"f0ce216ffec54b8690f698ded84b10e3":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_38c4f70de4074b4bbc308ab1eeafe738","max":647693783,"min":0,"orientation":"horizontal","style":"IPY_MODEL_e2c8fe161448475bb4a18e16e0cfa678","value":647693783}},"77f54a9522164259af627b7d824134df":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_415ffaf7eca94f93adf32a31a01960c2","placeholder":"​","style":"IPY_MODEL_597f5d6ee4674cb38f3a952b3fb71a2b","value":" 648M/648M [00:05<00:00, 138MB/s]"}},"daeb019992ab42d5b71621e56c593cb5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"29f26b4645f946eb8e9369c3306d6532":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"209e2958bb9c41f8a650b013d15ecdbd":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"38c4f70de4074b4bbc308ab1eeafe738":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e2c8fe161448475bb4a18e16e0cfa678":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"415ffaf7eca94f93adf32a31a01960c2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"597f5d6ee4674cb38f3a952b3fb71a2b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"6ce127b185b642deaa0a5aa6592607dd":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_86a97530979f4d0eb0f4b49842526e70","IPY_MODEL_883de2a8f9fd40bdb251a4b3901ffb78","IPY_MODEL_aa367bf8c7a34545b4fc8a070d1017e4"],"layout":"IPY_MODEL_d400bc2ccf5c4a3d9a4600e48f836d4c"}},"86a97530979f4d0eb0f4b49842526e70":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fdcc7267e6fd497db0c2883a5370f352","placeholder":"​","style":"IPY_MODEL_65cc66296cf443ba86e00c9902db43b4","value":"generation_config.json: 100%"}},"883de2a8f9fd40bdb251a4b3901ffb78":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_4cd2d358451d41a190bd55c5cea09cbd","max":168,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6f84841c1ed5446ea4897bd73bbc2197","value":168}},"aa367bf8c7a34545b4fc8a070d1017e4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c1e85729636346ff87094fc3b92a6b8c","placeholder":"​","style":"IPY_MODEL_2451682322954cadaa0a6fb23633504f","value":" 168/168 [00:00<00:00, 13.8kB/s]"}},"d400bc2ccf5c4a3d9a4600e48f836d4c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fdcc7267e6fd497db0c2883a5370f352":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"65cc66296cf443ba86e00c9902db43b4":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4cd2d358451d41a190bd55c5cea09cbd":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6f84841c1ed5446ea4897bd73bbc2197":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"c1e85729636346ff87094fc3b92a6b8c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2451682322954cadaa0a6fb23633504f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"fcf4832115e540f2b5a2fde29ec63674":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_2cbb2c5ddf0d4d44ad1fdbfa785472f4","IPY_MODEL_dbd03f9c072944d2a7d6851d6accbd63","IPY_MODEL_77f186f939e841f29a0a02f4d418d08e"],"layout":"IPY_MODEL_2462c391953b4eda896af2a00b3c495e"}},"2cbb2c5ddf0d4d44ad1fdbfa785472f4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ee27020df9214391aba4fcc448998ea0","placeholder":"​","style":"IPY_MODEL_c9c3ccfff72a4280b92ef23ab702b014","value":"Downloading builder script: "}},"dbd03f9c072944d2a7d6851d6accbd63":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_f2b0ef8bb0b447e7829fe5565614f30c","max":2169,"min":0,"orientation":"horizontal","style":"IPY_MODEL_d93607d646234b7198ca56ab30cd2ac8","value":2169}},"77f186f939e841f29a0a02f4d418d08e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ebca925ed7044c46b6546b55999f5fe8","placeholder":"​","style":"IPY_MODEL_d3bc803ee4204a8187724b7408438770","value":" 5.65k/? [00:00<00:00, 458kB/s]"}},"2462c391953b4eda896af2a00b3c495e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ee27020df9214391aba4fcc448998ea0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c9c3ccfff72a4280b92ef23ab702b014":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f2b0ef8bb0b447e7829fe5565614f30c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d93607d646234b7198ca56ab30cd2ac8":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"ebca925ed7044c46b6546b55999f5fe8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d3bc803ee4204a8187724b7408438770":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"dd5627e696804fada6b9cd71209fdac3":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_d7f879c624eb47a6b84efa57c64ad4ea","IPY_MODEL_599c70c621d14dc3b5f94af4d3ebf242","IPY_MODEL_d35b166805a2496d906a5199596a042b"],"layout":"IPY_MODEL_4242dcc54c344f05982313bdf58d8ab2"}},"d7f879c624eb47a6b84efa57c64ad4ea":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3203d202911d4eb59506b20d38e1ce64","placeholder":"​","style":"IPY_MODEL_a6308393db544beeba50cad6d6a03e4e","value":"Downloading builder script: "}},"599c70c621d14dc3b5f94af4d3ebf242":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_c6ffa16a22ba4e879f5ff64ac7b69855","max":1652,"min":0,"orientation":"horizontal","style":"IPY_MODEL_b0163e57c65e427e91cfa0609a20da41","value":1652}},"d35b166805a2496d906a5199596a042b":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_f7e902a29e024f079ee5b57d0159416c","placeholder":"​","style":"IPY_MODEL_8e4b1e784e234d148ae2e387ddc30c6b","value":" 4.21k/? [00:00<00:00, 278kB/s]"}},"4242dcc54c344f05982313bdf58d8ab2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3203d202911d4eb59506b20d38e1ce64":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a6308393db544beeba50cad6d6a03e4e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c6ffa16a22ba4e879f5ff64ac7b69855":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b0163e57c65e427e91cfa0609a20da41":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"f7e902a29e024f079ee5b57d0159416c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8e4b1e784e234d148ae2e387ddc30c6b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}},"accelerator":"GPU"},"nbformat":4,"nbformat_minor":0} \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/0.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/0.txt new file mode 100644 index 0000000000000000000000000000000000000000..9af343986b36c1007d55c39cf13dfb43e96b7d9a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/0.txt @@ -0,0 +1 @@ +boolean satisfiability problem (sat) is a significant np-complete problem with numerous practical applications -product configuration, hardware verification, and software package management, to name a few. there is no known single efficient algorithm that solves every sat problem, but heuristics have been developed that are well-suited to instances arising in practice. here we consider an even harder task of sampling solutions of a sat instance which is the key component for functional verification of software and hardware designs. most sat solvers return the same solution if run repeatedly; therefore, special approaches are needed to obtain several solutions, especially if it is required to sample more or less uniformly from the solution space. unigen and quicksampler are some algorithms for this purpose. unigen guarantees approximate uniformity of the sampled solutions but is very slow in certain cases. so, we are interested in whether the application of neural networks and denoising diffusion can produce diverse samples in less time.deep learning methods can accelerate solving np-hard problems. purely neural sat solvers have been developed that can solve small instances. hybrid solvers that provide neural heuristics within existing solvers are more practical for real-life instances. also, deep learning methods are used for combinatorial optimization including mip , tsp and vrp .denoising diffusion , coupled with neural networks, provide a learnable way for sampling from a given distribution. diffusion models have achieved state-of-the-art results for image generation . diffusion has been applied to discrete binary and categorical data, text generation , symbolic music generation , graph generation , and autoregressive modelling . but we are not aware of any attempt to use it for sampling sat solutions.in this paper, we apply denoising diffusion to sample solutions of sat. we use categorical diffusion introduced by and couple it with querysat graph neural network to implement the denoising function. we find that this approach achieves solving accuracy comparable to the state-of-the-art purely neural sat solver (querysat) and finds diverse solutions of similar characteristics as unigen up to 8000 times faster, in some cases., 2021b]and couple it with querysat.0 92. interestingly, there is not much difference whether diffusionsat has been trained with diverse solutions or with those coming from a standard sat solver -both produce slightly better diversity than unigen.to get more insight how diverse are the solutions, we inspect how many variables are equal in two solution samples on 3-sat instances, see appendix c. moreover, there is no need to use uniform distribution for training, as training on solutions from a standard sat solver gives equally good results.a appendix: datasets 3-clique and 3-sat tasks are generated using the cnfgen librarylauria et al.26n -2 3 crawford and auton.to evaluate 3sat instances with clause/variable ratio 3, we do not perform separate training, we use the same model trained for the 4.in order to evaluate diversity, we inspected how many variables are equal in two solution samples on 3-sat instances. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1.txt new file mode 100644 index 0000000000000000000000000000000000000000..95846f55e681017c380c39fe5d1c51d34523ecdf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1.txt @@ -0,0 +1 @@ +think of a cat & dog classifier that will not mistake an image of a "wolf" for an image of a "dog" or an image of a "lion" for an image of a "house cat". the classifier is capable of handling images of such objects which do not belong to either of the pre-defined classes. here the pre-defined classes being "cat" and "dog". the images handled do not have to be similar to those of pre-defined classes, and in fact they can be images of birds such as a parrot or even of pure backgrounds without any instances of an animal such as a forest or a beach or a city, but our classifier will still be able to handle these images accordingly, even though they do not belong to any of the pre-defined classes. auxiliary learning aims to provide classifiers with this capability to distinguish unfamiliar real world objects, similar to how humans are capable of identifying new objects from their surroundings and are able to realise that these objects do not belong to any of the known/learnt classes of objects. thus our classifier/agent can accept the fact that there are other classes/types of objects in the real world, as opposed to classifying every input image as one of the pre-defined classes.through this paper, we aim to introduce a novel approach to building general purpose neural networks. we do not claim that our approach presented in the paper is a complete or perfect solution to building general purpose neural networks but we believe it could be a step in the right direction. our solution mainly addresses two issues that pose difficulty for the development of a general purpose neural network. the first issue addressed is of increasing the generality of existing narrow purpose neural networks. the second issue addressed is a possible method for combining multiple narrow purpose neural networks to build a bigger neural network that incorporates the functionalities of all sub neural networks.think of a cat & dog classifier that will not mistake an image of a "wolf" for an image of a "dog" or an image of a "lion" for an image of a "house cat". the images handled do not have to be similar to those of pre-defined classes, and in fact they can be images of birds such as a parrot or even of pure backgrounds without any instances of an animal such as a forest or a beach or a city, but our classifier will still be able to handle these images accordingly, even though they do not belong to any of the pre-defined classes. once training has completed, the agent should be capable of classifying new images as belonging to either "dog" class or "cat" class.but what if the image provided to the agent for classification does not contain any instances of a dog or a cat? in such cases, the agent will still classify the image as belonging to one of the above mentioned classes. how will our agent classify this image? there is a higher chance for the agent to classify the given image as belonging to the class "dog". in this scenario we can see how our agent should include a new class namely "parrot" for classifying images, instead of classifying the image as belonging to either "dog" or "cat" class.our solution is to include a new class namely "others" called the auxiliary class, in addition to the pre-defined classes in the neural network for classification. for example, in case of the cat & dog classifier the auxiliary class can hold images that do not belong to both the "dog" class and the "cat" class, such as images of ships, parrots, forests, mobile phones, volcanoes, dolphins, radios, cars etc. class imbalance problem is a direct consequence of the auxiliary class having a greater number of data points/ images when compared to the number of images in any of the pre-defined classes.a traditional approach to utilise multiple neural networks each from a different domain would be to find a logical hierarchy between the neural networks and to manually program the flow of control between the neural networks as defined by the hierarchy. for example the "cat" class of a cat & dog classifier can act as the point of succession for the cat breed classifier (a classifier that takes the image of a cat and gives the species of cat as the output). similarly the "dog" class of a cat & dog classifier can act as the point of succession for the dog breed classifier (a classifier that takes the image of a dog and gives the species of dog as the output). the auxiliary classes from two different neural networks can come together to form a point of fusion between these neural networks, thus also enabling the combination of the systems of neural networks they each belong to if any. in a system with multiple neural networks, the auxiliary class of each neural network presents a possibility towards forming a point of fusion with other systems. these images had to be removed from the dataset before we could build the auxiliary class as the ilsvrc dataset was to be used solely for building the auxiliary class ("others" class). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/10.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/10.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf6c78d7ae16cc60593b678c8552c1fab92efab2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/10.txt @@ -0,0 +1 @@ +internet based systems are becoming an integral part of the everyday life with the advancements in networking and ai . with increased connectivity and technology adoption, there are rising concerns in cyberattacks. intrusion detection systems serve as an important defense against cyberattacks, to identify network threats and enable appropriate measures to be taken. intrusion detection systems based on anomaly detection adopt different approaches such as , to handle various types of challenges and datasets . anomaly is essentially a deviation from the rest of the normal data points or network traffic patterns. the similarity among key features of normal samples can be quantified to distill the anomalous ones. various supervised learning methods , were proposed for this anomaly detection problem. the main challenge is the need to generate a large dataset with annotated labels . to alleviate the need of such a large labeled training dataset, unsupervised learning methods are proposed in the literature for anomaly detection - .in this work, we propose an intrusion detection framework using unsupervised anomaly detection that integrates the clustering and deep learning techniques. it consists of three stages: the first stage separates the more obvious anomalies from the normal samples using k-means clustering. these remaining "normal" samples are sent to the second stage, to further identify the anomalies through the adaption of the ganomaly . ganomaly learns the compressed representation of the input data sample and computes the difference between such data representation and its reconstructed representation. the anomalies are identified if their reconstructed error is larger than the threshold. for the first two stages, we assume that the anomalies do not dominate the dataset statistically, which is the notion of an anomaly . we also propose a final-stage convolutional neural network (cnn) classification model. this stage is useful for attack type analysis in the event that annotated attack types are available. we evaluate the performance of our proposed framework on nsl-kdd, cic-ids2018, and ton iot datasets.the rest of the paper is organized as follows. section ii reviews the related works. the details of the proposed framework is presented in section iii. the experimental setup and dataset details are discussed in section iv. the results are presented and discussed in section v. section vi concludes the paper. within each eligible cluster m with cluster size c m , only the t h var × c m samples closest to the center of the cluster will be selected and added to the training set of probable normal samples for the next stage of the framework. as the input to this third stage are only the anomalies identified from the second stage, training of the cnn was performed using only the anomaly samples of different attack categories i.• nsl-kdd: nsl-kdd dataset is designed to overcome certain inherent issues from the kdd99 dataset such as redundant, duplicate records; availability of sufficient records in different difficulty levels; and reasonable number of records in both training and testing dataset. for the anomaly detection problem (stage 1 and stage 2 of the framework), the different attack categories will be treated under single anomaly "attack" category.true positive rate is the ratio of the number of data samples correctly predicted as attacks to the total number of attack samples present in the dataset. true positives refer to the number of attack samples correctly predicted as attacks by the model and false negatives are the number of the attack samples incorrectly predicted as normal by the given model. false positive rate is the ratio of the number of normal samples incorrectly identified as attacks to the total number of normal samples present in the dataset. false positives are the number of the normal samples incorrectly predicted as attacks by the model, and true negatives are the number of the normal samples correctly predicted as normal by the model. the relatively inferior performance of ocsvm in case of cic-ids2018 and ton iot dataset could be attributed to the presence of anomalous samples in the training data. it is possible that specifically in case of nsl-kdd dataset, the number of normal samples in the training set are sufficient for ocsvm to perform effective anomaly detection. the ganomaly in our framework is trained with the normal samples identified from the training set in the stage 1 of our framework whereas the baseline ganomaly is trained using the entire training dataset, which explains the better performance of our proposed framework. after the resampling technique adasyn is applied, the number of samples for each attack category was approximately made equal to the maximum number of samples for a single category prior to the resampling. the test dataset for the evaluation of cnn stage is different from the test dataset of the anomaly detection stage.for nsl-kdd, the test dataset within the nsl-kdd is used as the test dataset for the nsl-kdd evaluation. for ton iot (windows10), the number of available samples are split as 70%-30% for training and test dataset prior to the resampling process. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/100.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/100.txt new file mode 100644 index 0000000000000000000000000000000000000000..2bcba34acc09fcffbc279cf625c3915e4c20639d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/100.txt @@ -0,0 +1 @@ +ai is now used widely in a diverse set of tasks, ranging from face recognition to real-time speech processing to autonomous transportation. among these applications, deep neural networks have been responsible for the most recent ai breakthroughs, like deepminds alphagos stunning defeat of the go world champion. google has developed a highquality deep neural machine translation system between 17 languages . since then, natural language processing models have only grown more powerful and are now capable of programming, writing stories, and even composing poems. these powerful ai models require deeper and larger nets. in 2019, nvidia published natural language model megatron with 8.5 billion parameters. this year, microsoft introduced the next generation turing-nlg with 17 billion parameters. recently, openai released an even larger model and current state-ofthe-art, gtp-3 . with 174 billion parameters, gpt-3 takes 350gb memory and $12 million to train . it is clear that newer ai models require more parameters, more memory, and larger training sets. they will only continue to grow in size and training cost. to address these future demands, we propose a new architecture for ai chip. in this paper, we discuss two key components of this new architecture: heterogenous chip integration technology and single-form memory.our results demonstrate the potential of this new architecture. this chip, fabricated on a 40nm process, achieves performances comparable with current-state-of-the-art chips, which use generations more advanced fabrication processes than the 40nm. we further extrapolate the performances of our chip to current fabrication processes. our chip is projected to be able to hold 12 billion parameters on a single chip, while current best on the market only holds 8 billion on a whole wafer. to sustain the momentum in the field, we need to address the three challenges for ai chips: the memory wall, energy efficiency, and on-chip memory capacity. the first approach appears in high-bandwidth memory, such as highperformance ddr memory and hbm memory. to lower the power consumption of data transfer between dramand ai chips, high-bandwidth memory (hbm)and package substrate routing, like interposer, are used. fast memory is referred to as on-chip or near-chip memory, which provides data quickly without throttling the processing units. most ai chips on the market typically have a memory capacity of just 50 mb, which leaves a large gap between current memory capabilities and memory demands. the component technologies for our sunrise chip include heterogeneous integration technology on chip (hitoc), single form memory (unimem), and architecture specifically optimized for hitoc and unimem.by fabricating logic and memory wafers separately, both the logic process and memory process are optimized indepen-fig. by separating logic and memory, we achieve better electrical characteristics and overall chip characteristics as well as higher computational performance and memory capacity.with localized dram array pooling, and maximizing data movement, sunrise chip overcomes slow dram latency and deliver high computation performance.because memory bandwidth is abundant in sunrise chip, we choose to use vectors instead of tensors as the basic computational data unit. with hitoc and unimem technology, this architecture not only overcomes slow dram latency but also has sufficient memory bandwidth to support high performance. to compare the architecture effectively, we normalize each chip to a 7nm cmos process and a 1y dram process, based on factors such as density, transistor performance, and power reduction. although sunrise chip is on a 40nm process, its performance per unit area exceeds that of two competing chips at 12nm and 16nm. with the architecture of sunrise, one can choose to either use a less expensive process to achieve the same performance as other chips or use current processes to get better performance then other chips. sunrise chip, despite being fabricated on a 40nm process node, matches or exceeds other ai chips with more advanced process technology on the metrics of performance, memory bandwidth, memory capacity, energy efficiency, and cost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1000.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1000.txt new file mode 100644 index 0000000000000000000000000000000000000000..04647e7f2ad0a57e4244f660ab159c5b0e627516 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1000.txt @@ -0,0 +1 @@ +sentiment analysis has emerged as an increasingly prominent method in political text analysis. numerous studies have highlighted its ability to extract valuable insights from political texts, such as public sentiment in general election cycle (wang et al. 2012), capturing the political and economic narratives (ash, gauthier, and widmer 2023), emotions toward major political parties (ansari et al. 2020), and attitudes toward controversial policies like covid-19 vaccinations policy (lyu et al. 2022). however, sentiment analysis has been critiqued for merely capturing the overall tone of a text, failing to adequately represent the specific political stance that is targeted and pertains to individual topics or interest entities (bestvater and monroe 2023). furthermore, existing research emphasizes that the effectiveness of political stance classifiers is not uniform and is influenced by the type and quality of the text data (grimmer and stewart 2013;gonzález-bailón and paltoglou 2015). as scholars have acknowledged the distinctions between sentiment and stance, and recognized the limitations of sentiment analysis, many studies have attempted to enhance the stance classification techniques by using target-based methods (küçük and can 2020). others have explored alternatives to sentiment in textual features, such as n-grams (elfardy and diab 2016). however, despite the advancement of topic modeling methods, to the best of our knowledge, no research has leveraged the capacity of innovative topic modeling methods in identifying the latent structures within texts in political stance classification.in this article, we explore the potentials of topic metrics, which are dummy variables converted from extracted topics, to be an alternative solution for stance classification when sentiment analysis falls short in the task. stance is defined as the emotional or attitudinal position expressed toward a specific target and is often linked to distinct topics or entities of interest (mohammad et al. 2016;bestvater and monroe 2023). in addition, recent media studies indicate the potentials of topic metrics in capturing diverse targets influenced by social groups and political ideologies, finding that political stances are reflected in variations of fine-grained themes (pan et al. 2023). therefore, topic metrics can potentially address the shortcomings of sentiment metrics in stance classification.to investigate the potentials of topic metrics in political stance classification, we study the following research questions in this article:• rq1: what is the most effective topic modeling method for analyzing political texts? • rq2.1: can topic metrics outperform sentiment metrics in classifying stance? • rq2.2: does the combination of topic metrics and sentiment better than only using one of them? • rq3: what criteria should inform the choice of using either of the two metrics, or a combination of both, for classifying political stances?using three dataset with various types and contexts as identified by bestvater and monroe (2023), we first illustrate that bertopic outperforms traditional topic modeling techniques, including latent dirichlet allocation (lda) and non-negative matrix factorization (nmf), in achieving topic coherence. we then provide evidence that the topics extracted through bertopic, serving as topic metrics, surpass sentiment metrics in classifying political stances, particularly when analyzing context-rich texts from social media platforms. the topic metrics can improve the performance in stance classification over sentiment by 18.95% when applied to the context-rich texts from social media platforms. the combination of the topic and sentiment metrics show the best performance when sentiment has certain level of correlation with stance. however, its performance diminishes and topic metrics perform optimally when the correlation is weak.our results suggest that bertopic generate more coherent topics than lda and nmf in political text analysis. topic metrics are best suited for context-rich texts from social media and surpass sentiment metrics in other text types. however, combining both metrics into political stance classification does not consistently improve model performance, as a weak correlation between sentiment and stance might introduce noise during model training. when the coherence score is low, the combination of the two metrics offers the most significant enhancement in performance compared to using topic metrics alone. thus, for stance classification in social media or survey response texts, topic metrics should be prioritized over sentiment. if there is a noticeable correlation between sentiment and stance, or if the coherence score is low, the combination of both metrics is optimal.the topic metrics, demonstrated as a viable alternative or supplement to sentiment metrics, reveal multiple advantages. firstly, they minimize the number of assumptions and biases, and the labor required compared to manual political stance labeling, while enhancing accuracy over sole reliance on sentiment metrics. secondly, they are extracted from current data as a new measurement and there is no need for additional data collection. lastly, they provide political scientists insights into which issues present greater topic variances in debates versus those dominated by sentiments.this article primarily contributes in introducing an innovative metric for the classification of political stances, particularly within the context-rich texts from social media. a secondary, yet equally significant contribution is highlighting the need for future political science research to consider topic discrepancies, along with their associated framing and attention differentials, as vital components in capturing the political stance conveyed in political texts. furthermore, this article shows a novel application of topic modeling methods in the realm of political text analysis. furthermore, existing research emphasizes that the effectiveness of political stance classifiers is not uniform and is influenced by the type and quality of the text data(grimmer and stewart 2013;gonzález-bailón and paltoglou 2015). in addition, recent media studies indicate the potentials of topic metrics in capturing diverse targets influenced by social groups and political ideologies, finding that political stances are reflected in variations of fine-grained themes(pan et al.1: can topic metrics outperform sentiment metrics in classifying stance? • rq2.2: does the combination of topic metrics and sentiment better than only using one of them? • rq3: what criteria should inform the choice of using either of the two metrics, or a combination of both, for classifying political stances?. we then provide evidence that the topics extracted through bertopic, serving as topic metrics, surpass sentiment metrics in classifying political stances, particularly when analyzing context-rich texts from social media platforms. the combination of the topic and sentiment metrics show the best performance when sentiment has certain level of correlation with stance. topic metrics are best suited for context-rich texts from social media and surpass sentiment metrics in other text types. however, combining both metrics into political stance classification does not consistently improve model performance, as a weak correlation between sentiment and stance might introduce noise during model training. then we compare the f1 scores obtained from sentiment metrics, topic metrics and the combination of the two metrics for stance classification on the three datasets. however, on the kc dataset, where the correlation between sentiment and stance is notably weak, using only the topic metrics achives the highest f1 score. for the kc dataset, the topic metrics significantly outperform the sentiment metrics and also achieve a slightly higher f1 score than the combined metrics.68%, respectively.7% and 14. our experiment results provide evidence showing that topic metrics, derived from topic modeling methods, can outperform sentiment metrics in classifying stance.the findings of the paper not only provide an alternative and complement metrics for political stance classification, but also offers insights for future research to pay more attention to the topic discrepancies in political texts about controversial topics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1001.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1001.txt new file mode 100644 index 0000000000000000000000000000000000000000..608d92c75effbf8c8ff3732068faa432cd3b934e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1001.txt @@ -0,0 +1 @@ +the challenge of ensuring that artificial intelligence (ai) systems are aligned with human intentions, ethics, and values has been recognized since the early days of the development of ai. in his seminal article some moral and technical consequences of automation, norbert wiener highlighted the need to ensure that the "purpose put into the machine is the purpose which we really desire." more recently, russell has highlighted the need to explicitly align ai with objectives aligned with human intentions and values.but how can we embed human intentions and values in machines? and whose values should be embedded? with the rise of autonomous systems, these questions have garnered significant attention in recent years. this has led to new disciplines like machine behavior and machine ethics which have explored how machines can be "taught" to learn human norms. as expected, there is no unique way to teach norms to machines. building on rawlsian ideas of fairness , dwork et al. suggested that machine norms need to be "an approximation as agreed upon by society." awad et al. operationalized this idea for the case of autonomous vehicles by crowd-sourcing moral decisions from millions of online participants through the moral machine experiment. noothigattu et al. used data from the moral machine experiment to build a model of aggregated moral preferences using tools from computational social choice .more recently, in the context of large language models (llms), the project of ai alignment has been focused on mitigating harms i.e. ensuring that llms follow user instructions, do not generate biased or toxic text, or factually inaccurate information . reinforcement learning using human feedback (rlhf) has been the key technical innovation that has led to remarkable progress in developing aligned ai agents . for instance, the most widely deployed llms like gpt4 , claude , bard , and llama all use rlhf to fine-tune their models. while these approaches focus on criteria such as "helpfulness, honesty, and harmlessness" , there remain open questions about how such alignment can be realized practically and at scale. more broadly, whose norms or values should they be aligned with? and how can we align ai systems by respecting democratic norms?the limitations of rlhf in aligning ai systems at scale remain poorly understood. casper et. al. provide an exhaustive review of fundamental limitations and open challenges in rlhf for ai alignment. in particular, they highlight the limitation of selecting "representative humans" to act as reinforcers during the training process. for example, while training gpt4, openai restricts human reinforcers who agree with "expert" researcher preferences . given systemic issues in researcher training and hiring, this can lead to biased outcomes. additionally, the demographics of reinforcers are not representative of end users. 1this paper examines the limits of building aligned ai systems through a democratic process, even when reinforcers are representative of the underlying population of users. more specifically, we ask whether it is possible to design voting rules that allow a group of reinforcers, representative of a population of diverse users, to train an ai model using rlhf. using two widely known results from social choice theory, we show that there exist no unique voting rules that allow a group of reinforcers to build an aligned ai system by respecting democratic norms i.e. by treating all users and reinforcers the same. further, we show that it is impossible to build a rlhf model democratically that respects the private preferences of each user in a population simultaneously. as far as we know, this fundamental limitation of rlhf has not been highlighted in the literature. this paper builds on ideas from social choice theory, machine ethics, and computer science to highlight some fundamental barriers to embedding human intentions to ai systems while following democratic norms. our key result borrows from two widely known theorems in social choice theory -the impossibility theorems by arrow and sen -which are widely known constraints in voting theory. gabriel briefly touched on the implications of impossibility theorems on ai alignment but as far as we know, these arguments have not been formally investigated in the context of rlhf.the rest of the paper is organized as follows. in section 2, we provide formalize reinforcement learning with multiple reinforcers, in section 3 we outline the arrow-sen impossibility theorems in the context of rlhf and we discuss the implications of these theorems in section 4. using two widely known results from social choice theory, we show that there exist no unique voting rules that allow a group of reinforcers to build an aligned ai system by respecting democratic norms i.consider an ai agent receiving feedback from a human reinforcer over a sequence of steps; at each time t the agent receives a question/prompt p t ∈ o from the human and then sends an answer/response a t ∈ a to the human., n ).• pareto or consensus: an aggregation function f satisfies pareto if, whenever every individual i ∈ n strictly prefers σ 1 to σ 2 , the function f ranks σ 1 strictly higher than σ 2 .• independence of irrelevant alternatives (iia): the group members' preferences about some alternative σ 3 does not affect how the aggregation rule f ranks two different alternatives, σ 1 = σ 3 and σ 2 = σ 3 .theorem 1 (arrow's theorem) with three or more alternatives, any aggregation rule f satisfying pareto, independence of irrelevant alternatives, and transitivity must be dictatorial.sen's theorem implies that any choice c satisfying universal domain and pareto can respect the individual rights of at most one individual.if the choice is between outputs 1 and 3, a liberal -someone who cares about individual rights above all else -might argue that reinforcer-b's preference should count; since reinforcer-a would be ok not revealing their preference, and should not be forced to.similarly, in the choice between output-2 ("x is not fascist") and output-3 ("its complicated"), liberal values require that reinforcer-b's preference should be decisive, and since they clearly feel strongly about opposing the stance that x is fascist, they should be permitted to do this.thus, respecting individual preferences or liberal values would lead to preferring output-3 over output-1 and output-2 over output-3. arrow's impossibility theorem implies that there is no unique voting rule that can allow us to train ai agents through rlhf while respecting democratic norms i. note that this is distinct from the obvious result that any democratic process will always result in a minority that will be unhappy about the outcome; however, arrow's theorem implies that no unique voting rule exists even when deciding what the majority preference is. it implies that a democratic method of aligning ai using rlhf cannot let more than one reinforcer encode their (privately held) ethical preferences via rlhf, irrespective of the ethical preferences of other reinforcers.sen's theorem has important implications for aligning ai systems that respect personal preferences of individual users or a small subset of users with shared values, especially those who might not be represented amongst reinforcers. finally, our results indicate that the future of aligned ai development might be better served by incentivizing smaller model developers working on aligning their models to a narrow set of users ("narrow ai alignment") as opposed to trying to build universally aligned ai ("aligned agi"). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1002.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1002.txt new file mode 100644 index 0000000000000000000000000000000000000000..22cf24ae5607a3fe836f1280399f5ebfcf5054d4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1002.txt @@ -0,0 +1 @@ +instruction-following language models (ilms) are omnipresent. their use is not so extended as that of search engines yet, but due to the availability and high quality of systems and models such as alpaca (taori et al., 2023), bard (google, 2023), bloomz and mt0 (muennighoff et al., 2023), chatgpt (openai, 2023), llama 2-chat (touvron et al., 2023), or koala (geng et al., 2023), their use is expected to be more common in the near future.these models face several problems being the most relevant the lack of trustworthiness (van dis et al., 2023;huang et al., 2023;wang et al., 2023a). they are not ready to be used as a source of reliable information if their outputs are not fact checked. a second big issue with systems based on language models (lm) is the fact that they might reproduce the biases present in the training data (navigli et al., 2023). biases range from cultural missrepresentation due to data imbalance to offensive behaviour reproduced from written texts. lms are finetuned into ilms either in a supervised way using input-output pairs and an instruction (wei et al., 2022;wang et al., 2022wang et al., , 2023b) ) or with reinforcement learning from human feedback (ouyang et al., 2022;nakano et al., 2021). in both cases, the finetuning should help removing bias. but neutrality is something very difficult to achieve, also for the humans that generate the supervisory data. the finetuning phase might therefore over correct the original biases or introduce new ones. for methods that generate the supervision data with the lm itself, the original biases might be inherited.we focus on a specific use of ilms: the writing of newspaper articles. journals and newspapers follow an editorial line which is in general know to the reader. besides, sites such allsides,1 media bias fact check2 (mb/fc), or ad fontes media3 provide ratings about the political bias of (mostly usa) media sources and their quality with respect to factual information. with these ratings, conscientious readers can make informed decisions about which media outlets to choose in order to get a balanced perspective. but what happens when journalists use systems such as chatgpt or bard to aid in their writing? as said above, humans also have biases, the danger lies in being unaware of them, as they might affect the user's/reader's perspective (jakesch et al., 2023;carroll et al., 2023). chat-gpt already warns its users about misinformation. however, the political bias, if any, is not known apart from the subjective perception that a user has.we address the question above for articles generated by chatgpt and bard in four languages: english, german, spanish and catalan. we do this in an automatic and systematic way with almost no human intervention so that the method can be easily extended to new languages and other ilms with few effort. we do not aim at classifying individual articles with their specific bias, but to classify the media source (an ilm in this case) as left or right-oriented in a similar way as the media bias sites do for newspapers and other media outlets. 10:0 0.14 hotchpotch mensaje españa sociedad años opinión vida mundo denunciar política mujeres gente personas historia privado educación social artículo iglesia redactar realidad país citar políticos forma enviar libertad derecho españoles leer papa 10:1 0.16 economy millones euros españa año economía empresas años gobierno crisis mercado sector empresa país banco social países económica información compañía dinero política europea precio trabajadores sistema caso deuda servicios mes meses 10:2 0.12 government gobierno sánchez psoe partido presidente elecciones congreso política pedro iglesias ciudadanos españa rajoy madrid pablo cataluña vox partidos electoral líder ley socialista moncloa díaz izquierda político votos diputados constitución euros 10:4 0.15 local barcelona madrid ciudad años centro ayuntamiento covid catalunya personas españa zona visto comunidad periódico año calle metros relacionadas noticias local proyecto palma galicia vecinos hora plaza euros comentado nacional semana 10:5 0.11 science años forma personas salud mundo tipo estudio tecnología vida productos datos sistema información agua explica internet caso usuarios año investigación importante calidad universidad mejores permite cambio consumo españa problema enfermedad 10:6 0.06 covid españa coronavirus casos sociedad covid mapas evolución gobierno datos vacunación gráficos política mundo personas socios canarias madrid leído años pandemia contagios sanidad variante ómicron salud avanza hazte enlace copiar vacuna 10:7 0.07 sport madrid real partido equipo fútbol años españa club liga temporada barcelona jugador jugadores barça balón año juego carrera mundial puntos partidos minutos jornada messi español gol jugar historia campo entrenador 10:9 0.14 government i gobierno partido psoe presidente sánchez elecciones política españa congreso ciudadanos madrid rajoy ley partidos electoral pedro líder izquierda votos vox político socialista pablo país diputados ejecutivo debate iglesias portavoz comunidad 15:3 0.11 law & justice años caso policía juez comentarios madrid tribunal público sociedad justicia investigación fiscalía nacional españa comunidad euros judicial ley civil sentencia delito prisión audiencia ayuso mujer fiscal recuerda juicio guardia juzgado 15:4 0.12 local madrid ciudad zona años ayuntamiento centro personas metros año proyecto palma san comunidad vecinos calle mar agua información kilómetros obras capital zonas plaza isla volcán sevilla local edificio municipal barrio 15:7 0.10 international país presidente gobierno años unidos países internacional guerra trump china eeuu rusia ministro personas diciembre europa méxico seguridad información mundo militar noviembre francia venezuela nacional millones actualidad europea reino política 15:8 0.18 social mujeres años mundo vida españa personas sociedad política gente opinión social educación realidad país forma historia sociales universidad caso niños libertad violencia hombres problema mujer derecho autor género sentido padres 15:9 0.14 economy millones euros españa año economía años empresas gobierno crisis sector mercado banco país países empresa económica social europea trabajadores deuda dinero precio crecimiento meses medidas mes plan inversión europa información 15:10 0.l españa titania euros cookies reservados política mundo red comscore auditado digital transparencia web google condiciones lotería recomienda tipo móvil 15:14 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1003.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1003.txt new file mode 100644 index 0000000000000000000000000000000000000000..7dcd5c880f2f7e4723441fb9caf90a1dbbbddd81 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1003.txt @@ -0,0 +1 @@ +as machine learning (ml) algorithms are increasingly used in decision-making systems with high impact on individuals, there is a need to ensure not only that the decisions made are fair, but also to explain the decision to the individual affected. a system is considered to be fair if it does not discriminate based on protected personal characteristics such as race, sex, religion, etc. there have been instances where decision-making systems discriminated against individuals in domains such as criminal justice (the partnership on ai 2019), recruitment (tilmes 2022), and social services (gillingham 2019). an analysis of compas (northpointe 2019), a popular tool used in the us to predict whether criminals will re-offend, found that black defendants were identified incorrectly as re-offending at a higher rate than white defendants (larson et al. 2016).research on fairness has received increased attention in recent years and several fairness metrics have been developed to quantify the fairness of a system (see mehrabi et al. (2022) for an overview on bias and fairness in machine learning). these metrics can be classified into group fairness (i.e. detecting bias across different values of a protected attribute, e.g., male and female individuals (garg, villasenor, and foggo 2020)) and individual fairness (i.e. detecting bias for an individual compared to similar individuals (mukher-jee et al. 2020)). whilst several notions of evaluating fairness have been proposed in the literature, there is no agreement as to which fairness metric to apply in which scenario (verma and rubin 2018). furthermore, interpreting the meaning of the values returned by a metric is not always intuitive; for example, simply reporting the percentage level of fairness of a system (e.g., 80%) may not give full confidence for stakeholders in the system. most existing group metrics also require the specification of protected attributes and can only detect unwanted bias with respect to one binary protected attribute. finally, quantifying fairness requires full access to the training data and protected attributes in order to measure the difference in positive classifications across protected groups. in reality, this data may not be available and it may be difficult to pre-define protected attributes before deploying the system (haeri and zweig 2020).identifying a link between the input data and the final decision is an important step towards providing a fair and transparent explanation (hamon et al. 2022). computational argumentation has long been seen as a means for explaining reasoning (see vassiliades, bassiliades, and patkos (2021); cyras et al. (2021) for an overview). specifically, abstract argumentation frameworks (afs) were proposed as a way to represent and reason with conflicting information (dung 1995). several types of semantics have been proposed to evaluate the acceptability of arguments in afs (baroni et al. 2015) and their extensions. afs have been used for a variety of applications such as decision-making systems (amgoud and prade 2009; brarda, tamargo, and garcía 2021), recommender systems (cocarascu, rago, and toni 2019;rago, cocarascu, and toni 2018), knowledge-based systems (kökciyan et al. 2020), and planning and scheduling systems (cyras et al. 2019). however, until now they have not been explored in relation to individual fairness in decisionmaking systems.in this paper, we propose a novel argumentation-based approach for identifying bias in relation to individual fairness which does not require access to labelled data, the training algorithm, or the specification of protected attributes before deployment. we focus on individual fairness as subjects of decisions will mostly be concerned about their personal treatment, rather than any group. hence we move away from quantifying fairness using existing group fairness metrics and offer a transparent representation of the reasons for a classification from which an explanation can be extracted.we use a quantitative argumentation framework to represent the arguments of similar individuals that reason why the queried individual received a classification. reasons are differences in the values of attributes in the queried individual in relation to those of similar individuals with different classifications. the strength of attacks between attributevalue pairs is calculated as the proportion of similar individuals with particular characteristics and the overall evaluation is done using the weighted h-categorizer semantics (amgoud, doder, and vesic 2022) which calculates the final weights of arguments. as a result, final weights correspond to the attribute-value pairs that contribute most to the negative classification of a queried individual compared to similar individuals.the distance between individuals with the same attribute values is 0 and that the distance between two individuals increases in proportion to the number of attributes with different values in the individuals. intuitively, example 1 shows that given the queried individual in the top row and its similar individuals, (race, black) is the attribute-value pair contributing the most to the negative classification of the queried individual, since all combinations of values of the other attributes in the similar individuals lead to a positive classification. let e 0 be the queried individual and sim k (e 0 ) the set with the k individuals most similar to e 0 according to some similarity measure sim. if a similar positivelyclassified individual has a value for an attribute z different to the value of the negatively-classified queried individual, then the similar individual attacks that attribute-value pair in the queried individual. specifically all attribute-value pairs of similar individuals e i will attack the value of the attribute z in the queried individual e 0 if the values of z differ in e 0 and e i . given a binary classifier f : e → {+, -}, a queried individual e 0 , and the set of similar individuals sim k (e 0 ):. towards the queried individual's attribute-value pairs) and that attacks from the attribute-value pairs of a similar individual e i only occur towards an argument (z j , v(z j , e 0 )) when the values of the attribute z j differ in e 0 and e i (i. assume there is only one attribute z for which all positively-classified similar individuals have a different value than the queried individual and all other attributes have the same values for all individuals. table3shows that there is a greater percentage of negative labels for young or old individuals than mid-age individuals, and for single and divorced individuals compared to married individuals, however the percentage of negative labels does not vary greatly for marital.we took as queried individuals every individual with a negative classification from our test sets amounting to 7,252 and 1,253 individuals for the adult and bank marketing datasets, respectively.01; as shown by proposition 2, if all similar individuals have the same (negative) classification as a queried individual, the final weights of all arguments are 1 and hence the queried individual has been treated consistently with respect to the similar individuals. furthermore, 70% of the queried individuals are consistent with the similar individuals, meaning all similar individuals are also classified negatively. furthermore, 21% of the queried individuals are consistent with similar individuals, hence all similar individuals are also classified negatively. there is no difference in classifications between a queried individual and its similar individuals, or bias-attr = 0 will be identified as contributing the most to the negative classification of the queried individual.our method correctly identifies that, for all negatively-classified queried individuals where at least one of the similar individuals is positively-classified, bias-attr = 0 is amongst the weakest arguments and therefore identified as contributing the most to the negative classification, i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1004.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1004.txt new file mode 100644 index 0000000000000000000000000000000000000000..f32b708e4f1a6b8aaaf6fbd52a4134322b8f281f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1004.txt @@ -0,0 +1 @@ +a critical aspect of economic growth is the integration of the internet as a medium for innovation . it is pivotal that that digital identities, which refer to information used by computers to represent external agents, address stakeholder concerns . every digital identity system comes with its own set of trade-offs that considers security, reliability, resilience, accessibility, and scalability amongst many other factors . a key issue is the degree on which a single party may exert control over the operation of the system. centralized digital identity systems usually contain a dedicated registration process, unique user identifiers, and the storage of information in a central database . however, the centralization of authority and storage carries the risk of abuse such as exclusion and censorship alongside individual profiling . in contrast, decentralized identity systems typically approach the issue using transparent, open networks and protocols . they usually involve voluntary participation and attempt to minimize data collection or formal enrolment. in this approach, standards such as decentralized identifiers and verifiable credentials help prove set membership to establish that individuals possess certain attributes without needing a central authority . this extended abstract examines how redactable and sanitizable signature schemes can enable privacy preserving computation in the context of decentralized identity systems and consider mitigating solutions to some of their limitations. redactable and sanitizable signature scheme can be applied to anonymous disclosure and delegating identity issuance respectively. we propose implementing these techniques for a decentralized identity system to better understand their performance against other privacy-enhancing techniques. a valid signature for a given message proves the message was sent by a known user and has not been altered during transit. however, in some cases users may want to allow a third party to alter a signed message and still retain a valid signature such as removing personal details of patient data for a medical study. to support this, sanitizable signature schemes (sss) allow a designated third party, known as the sanitizer, to change permissible parts of a signed message. alternatively, redactable signature schemes (rss) allow parties to redact parts of a signed message. rss and sss are not recent, however research into these constructions are relatively limited compared to other privacy-enhancing techniques such as multiparty computation and zero-knowledge proofs. rss and sss gather the altered parts of a message and commit them cryptographically, generating a derived signature from the original signature. both rss and sss share standard security properties including being unforgeable, the prevention of deriving information of sanitized or redacted parts, the immutability of inadmissible parts of a message, and accountability of who modifies a message. however, holders may not want to disclose the entire vc content on every request, as it may contain sensitive information depending on the format of the digital identity.figure1: did model proposed by w3cone option to limit vc disclosure would be using zeroknowledge proofs to allow a party to show that a given statement is true, such as being in possession of a certain amount of cash, while not revealing any additional information beyond the fact that the said condition is met. current theoretical performance speed of rss indicates that this is the more efficient solution due to their speed and smaller signature size, while proof of knowledge protocols may have sizes larger than the signature of an unredacted vc. using this method the delegate node would gather the necessary information but cannot immediately issue a did as it requires a superior's signature before being issued. sss allows a delegating node to give a delegate blank templates that can be modified and some sss allow sanitization in a constrained fashion, this will ensure that a potential did will follow the correct format. sss has no post-quantum variant that the author is aware of, however the use of the post-quantum sphincs+ scheme in rssis promising and may be used in constructions of sss that uses chameleon hashes to permit sanitization. we aim to use rss and sss on dids based on the w3c standard to examine the size and speed of these schemes compared to alternatives such as zero-knowledge proofs and multi-signatures.decentralized identity systems can implement selective disclosure and delegate identity issuance using rss and sss respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1005.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1005.txt new file mode 100644 index 0000000000000000000000000000000000000000..aafab60d117405a1d61cddc7c4fb78346e78ec01 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1005.txt @@ -0,0 +1 @@ +in early 2019, wang et al. (2019b) released the "general language understanding evaluation" (glue) benchmark, with an updated version (wang et al., 2019a) following only a little later. currently, the best performing model on the updated version sits comfortably above what the authors calculated as "human performance" on the included tasks. 1 this can mean one of two things: either general language understanding in machines has been realised, or these benchmarks failed to capture the breadth of what it constitutes. the consensus now seems to be that it is the latter (srivastava et al., 2022;liang et al., 2022).in this paper, i try to take a step back and ask what "general language understanding" (glu) implemented in machines could mean. the next 1 at 91.3, compared to the 89.8 in the paper; https: //super.gluebenchmark.com/leaderboard, last accessed 2023-06-09. section dives into the general part of glu, section 3 into the understanding, as a cognitive process. section 4 zooms out, and looks at conditions under which a model of glu ceases to be just a model. in the course of the discussion, i will derive three desiderata for models of glu and their evaluation. this leaves us with a quadrant (top-right) where a lot of the "understanding work", at least if the language production is good, has to be "front-loaded" by the language producer, who cannot rely on the addresse intervening (bottom row) or the availability of much shared context (left column). first, while the ontogenetic trajectory takes the human language learner from the strongest kind of the "basic and primary" form of language-namely child/caretaker face-to-face interaction(clark, 2003)-outwards into regions of which some are only accessible via formal education (writing in general, then technical/scientific writing), the trajectory for natural language understanding in nlp takes the exact opposite direction, only now moving from the top-left corner of processing formal writing further towards the origin (bisk et al. (we might term this the coverage problem. this section will look at one aspect of the understanding part in "general language understanding". what is understanding? the classic view in nlp is well represented by this quote from a seminal textbook: " must compute some representation of the information that can be used for later inference"(allen, 1995, p.taking up this "actionable representation" view, and at first focussing on "text understanding", figure 2 (left column) shows an attempt to compile out of the vast literature on language understanding, both from nlp, but also from linguistics and psycholinguistics, a general (if very schematic) picture-a picture that at this level of detail would not be incomprehensible to the contemporary reader ofallen (1995). the model assumes that the language understander possesses a model of the language in which the material that is to be understood is formulated; here in the more narrow sense that it is a model of a mapping between form and meaning (representation), roughly of the scope aimed at by the formalisations such as those ofchomsky (1957)orpollard and sag (1994). to give an example, winograd schema sentences(levesque et al.) that "language understanding" is internally structured and draws on various types of knowledge is implicitly acknowledged also in modern attempts at evaluating the performance of nlu 5 which is not to imply that an implemented language understanding system should be based on such formalisations., 2020;dunietz et al.the discussion from the previous section suggests a picture where a language understanding system receives a stimulus and delivers a response, which we take as an indicator of understanding. this, however, is not how understanding in real use situations works: here, we do not care about understanding as symptom (reflecting an inner state), but rather as understanding as signal (offering a social commitment).desideratum 3: uses of models of language understanding must be clear about their understanding of the understanding indicator, and how it is warranted. ultimately, it may be that the answer to "can large language models model language understanding" is yes, while the answer to "can large language models understand language" has to be no.a limitation might be that this text was written with the thought that language understanding by machines is done for humans, and that thus the human-likeness of the understanding is crucial, because only it guarantees that generalisations go in expected directions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1006.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1006.txt new file mode 100644 index 0000000000000000000000000000000000000000..29ac120f6ab8e4c23b644cc3bf53295b1e7795dc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/1006.txt @@ -0,0 +1 @@ +as more and more decisions that have a significant ethical dimension are being outsourced to ai systems, it is important to have a definition of responsibility that can be applied to the decisions of ai systems, and that can be used by ai systems in the process of its decision-making . to meet the first condition, such a definition should require only a minimal notion of agency and instead focus on those aspects of responsibility that are readily applicable to (current) ai systems. to meet the second condition, such a definition should be formulated in a language that can be implemented into an ai system, so that it can integrate judgments of responsibility into its decision-making. this paper sets out to propose such a definition using the well-established framework of causal models .there exist different notions of moral responsibility that one might be interested in, and here we restrict attention to just one of them, namely responsibility for consequences, meaning the responsibility one has for a particular outcome that is the result of performing a particular action. this can be expressed more clearly by saying that the action caused the outcome, and therefore the first condition of concern here is the causal condition on responsibility . the past two decades have seen immense progress on offering formal definitions of actual causation by way of using causal models, and the definition here developed takes maximal advantage of this progress by comparing some recent proposals and choosing the one that correctly handles several complicated cases to be considered .our actions can cause all kinds of outcomes for which we are clearly not morally responsible: if a train crashes into a car that illegally crosses the railroad then the train conductor is not responsible for the car driver's death, if you turn on a light switch in a hotel room then you are not responsible if a short-circuit follows, etc.. the standard intuition that we have in such cases is that the agent "could not have known" that their action would cause the outcome. this is why definitions of responsibility also invoke an epistemic condition, stating roughly that the agent should have been able to foresee that they are performing an action which could result in them being responsible for the outcome .in addition to the causal and the epistemic conditions, it is standard to demand that responsibility also requires the fulfilment of a control condition (sometimes also called freedom condition), which expresses the fact that the agent had the right sort of control whilst performing their action . due to its close connection to issues of free will and determinism, this condition is heavily debated within philosophy. within the context of (current) ai systems, however, the control condition can take on a more mundane form: any action that was a result of the correct operation of its program can be viewed as being under the ai's control. therefore i simply take there to be a specific action variable that ranges over a set of possible actions, and assume that whenever the ai system is running successfully it has control over the value that this variable takes.my approach proceeds along the same lines as that of braham and van hees (bvh) . they offer the most influential formalization of moral responsibility that incorporates both the causal and the epistemic conditions, and therefore their work forms an appropriate point of comparison. although i agree with the spirit of their approach, i disagree with its formulation. first, their causal condition defines causation as being a necessary element of a sufficient set (ness). however, their use of game-theory instead of causal models results in an overly simplistic view of ness-causation that cannot handle indirect causation. therefore i first formulate their definition using causal models, and then show how to modify it so that it can overcome this limitation. second, i disagree with the particulars of both their causal and their epistemic conditions. i argue for replacing the ness definition of causation with my recently developed counterfactual ness (cness) definition . their epistemic condition states that the agent should minimize the probability of causation. i argue for giving that condition a secondary role: minimizing the probability of causing the outcome is subservient to minimizing the probability of the outcome simpliciter. i analyze several examples to illustrate the superiority of my conditions.more recently, halpern & kleiman-weiner (hk) used causal models to propose definitions of several concepts that are closely related to moral responsibility. although they do not explicitly define moral responsibility, they do suggest using the modified halpern & pearl (hp) definition of causation for the causal condition . the hp definition correctly handles most of the counterexamples to the ness definition here presented, but i discuss two types of example for which it fails (whereas the cness definition does not). hk also offer a definition of "degree of blameworthiness" that for all intents and purposes is very similar to an epistemic condition: it measures the extent to which the agent minimized the probability of the outcome. i present a case in which the epistemic conditions of bvh and hk conflict in order to argue that a more elaborate epistemic condition is required. my epistemic condition combines that of hk with that of bvh by demanding that an agent minimizes the probability of the outcome, but if possible also minimizes the probability of causation.there exist different notions of moral responsibility that one might be interested in, and here we restrict attention to just one of them, namely responsibility for consequences, meaning the responsibility one has for a particular outcome that is the result of performing a particular action. this is why definitions of responsibility also invoke an epistemic condition, stating roughly that the agent should have been able to foresee that they are performing an action which could result in them being responsible for the outcome.in addition to the causal and the epistemic conditions, it is standard to demand that responsibility also requires the fulfilment of a control condition (sometimes also called freedom condition), which expresses the fact that the agent had the right sort of control whilst performing their action. hk also offer a definition of "degree of blameworthiness" that for all intents and purposes is very similar to an epistemic condition: it measures the extent to which the agent minimized the probability of the outcome. my epistemic condition combines that of hk with that of bvh by demanding that an agent minimizes the probability of the outcome, but if possible also minimizes the probability of causation.informally, the bvh definition of responsibility requires that an agent's action directly ness-caused the outcome, and that the agent believes they failed to minimize the probability of their action causing the outcome.in addition to disagreeing about the definition of causation, the hk definition also disagrees with the bvh definition about the epistemic condition: rather than requiring that the agent failed to minimize the probability of causing the outcome, the hk definition focuses on the agent failing to minimize the probability of the outcome simpliciter.note that both hk and bvh's epistemic condition satisfy our responsibility schema: an agent who believes that they failed to minimize a probability that they could have minimized, thereby also believes that they could have avoided satisfying the respective epistemic condition.) note that in case s 1 = 1, then s 2 = 0 would result in the outcome being overdetermined, and thus although the latter action would also be a cause of the outcome, it does nothing to contribute to the probability of the outcome occurring.) thus the action of assassin 1 had no effect on the probability of the outcome, and would thus not be responsible for victim's death according to hk's definition. among all actions that minimize the probability of the outcome, we take one that minimizes the probability of causing the outcome, and then take a weighted sum of both causal strength measures for that action (where the second measure is ignored if it is negative).although both assassins are responsible according to my definition, it is easy to see that assassin 1 is responsible to a higher degree: the measures of actual causation are identical for both and so are their respective probabilities of the outcome occurring given that they shoot (namely 1), but assassin 1 's probability of the outcome occurring given that they do not shoot is far lower, and thus 7 cs ass1 e. that is also the verdict of my degree of blameworthiness: in this case, the atypical agent can reasonably expect the outcome to depend on them performing the action whereas the typical agent can reasonably expect that their action has little impact, which translates into a larger measure of causal strength (both cs e and cs ac ) for the former.based on a comparison with the work of bvh and hk, i have offered a novel formal definition of moral responsibility that is particularly suited for ai systems by filling in the causal and the 6 surprisingly, to my knowledge this rather obvious measure of causal strength has been overlooked so far in the literature. i used contrasting examples to argue in favor of the counterfactual ness definition of causation over the ness and the hp definition, and in favor of a nuanced epistemic condition that combines the two conditions of bvh and hk. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/101.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/101.txt new file mode 100644 index 0000000000000000000000000000000000000000..dd039ac0e6f78d2e09485a2b3f1d880f091a48db --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/101.txt @@ -0,0 +1 @@ +systematic generalization refers to a learning algorithm's ability to extrapolate learned behavior to unseen situations that are distinct but semantically similar to its training data. it has long been recognized as a key aspect of humans' cognitive capacities. specifically, humans' mastery of systematic generalization is prevalent in grounded natural language understanding. for example, humans can reason about the relations between all pairs of concepts from two domains, even if they have only seen a small subset of pairs during training. if a child observes "red squares", "green squares" and "yellow circles", he or she can recognize "red circles" at their first encounter. humans can also contextualize their reasoning about objects' attributes. for example, a city being referred to as "the larger one" within a state might be referred to as "the smaller one" nationwide. in the past decade, deep neural networks have shown tremendous success on a collection of grounded natural language processing tasks, such as visual question answering (vqa), image captioning, and vision-and-language navigation. despite all the success, recent literature shows that current deep learning approaches are exploiting statistical patterns discovered in the datasets to achieve high performance, an approach that does not achieve systematic generalization.gururangan et al. (2018) discovered that annotation artifacts like negation words or purpose clauses in natural language inference data can be used by simple text classification categorization model to solve the given task. jia and liang (2017) demonstrated that adversarial examples can fool reading comprehension systems. indeed, deep learning models often fail to achieve systematic generalizations even on tasks on which they are claimed to perform well. as shown by bahdanau et al. (2018), state-of-the-art visual questioning answering (vqa) models fail dramatically even on a synthetic vqa dataset designed with systematic difference between training and test sets. in this work, we focus on approaching systematic generalization in grounded natural language understanding tasks. we experiment with a recently introduced synthetic dataset, grounded scan (gscan), that requires systematic generalization to solve. for example, after observing how to "walk hesitantly" to a target object in a grid world, the learning agent is tested with instruction that requires it to "pull hesitantly", therefore testing its ability to generalize adverbs to unseen adverb-verb combinations.when presented with a world of objects with different attributes, and natural language sentences that describe such objects, the goal of the model is to generalize its ability to understand unseen sentences describing novel combinations of observed objects, or even novel objects with observed attributes. one of the essential steps in achieving this goal is to obtain good object embeddings to which natural language can be grounded. by considering each object as a bag of its descriptive attributes, this problem is further transformed into learning good representations for those attributes based on the training data. this requires: 1) learning good representations of attributes whose actual meanings are contextualized, for example, "smaller" and "lighter", etc.; 2) learning good representations for attributes so that conceptually similar attributes, e.g., "yellow" and "red", have similar representations. we hypothesize that explicitly modeling the relations between objects in their contexts, i.e., learning contextualized object embeddings, will help achieve systematic generalization. this is intuitively helpful for learning concepts with contextualized meaning, just as learning to recognize the "smaller" object in a novel pair requires experience of comparison between semantically similar object pairs. learning contextualized object embeddings can also be helpful for obtaining good representations for semantically similar concepts when such concepts are the only difference between two contexts. inspired by, we propose a novel method that learns an object's contextualized embedding with dynamic message passing conditioned on the input natural language. at each round of message passing, our model collects relational information between each object pair, and constructs an object's contextualized embedding as a weighted combination of them. such weights are dynamically computed conditioned on the input natural sentence. this contextualized object embedding scheme is trained end-to-end with downstream deep modules for specific grounded natural language processing tasks, such as navigation. experiments show that our approach significantly outperforms a strong baseline on gscan.when presented with a world of objects with different attributes, and natural language sentences that describe such objects, the goal of the model is to generalize its ability to understand unseen sentences describing novel combinations of observed objects, or even novel objects with observed attributes. this is intuitively helpful for learning concepts with contextualized meaning, just as learning to recognize the "smaller" object in a novel pair requires experience of comparison between semantically similar object pairs. (2020) is an extension of scan. for one, generating the correct action token sequence requires understanding the instruction within the context of the agent's current grid world. for example, "pulling" a square will be mapped to a "pull" command when the square has a size of 1 or 2, but to "pull pull" when the square has a size of 3 or 4 (a "heavy" square); "move cautiously" requires the agent to turn left then turn right before making the actual move.1) denoting the extracted object local embedding as x loc , and previous round's object context embedding as x ctx , we first construct a fused representation of an object i at round t by concatenating its local, context embedding as well their elementwise product:. for example, a testing example would require the agent to move to a target object that is to its southwest, even though during training target objects are never placed south-west of the agent. in the training set, a circle of size 2 is never referred to as "the small circle", while in the test set the agent needs to generalize the notion "small" to it based on its size comparison with other circles in the grid world. besides, our model shows promising results on exploring the interrelationship between an agent and other objects in the scene, as well as learning abstract concepts by contextual comparison as shown in split g. split e is generally easier because the target object, a yellow square, appears as the target in training examples, but is only referred to as "the square", "the smaller square", or "the bigger square". in contrast, our model can generalize to novel compositions of object properties and correctly find the target object, performing significantly better on these two splits.split f: this split is designed to test the model's ability to generalize to novel adverb-verb combinations, where the model is tested under different situations but always with the terms "while spinning" and "pull" in the commands.in this paper, we proposed a language-conditioned message passing model for a grounded language navigation task that can dynamically extract contextualized embeddings based on input command sentences, and can be trained end-to-end with the downstream action-sequence decoder. we showed that obtaining such contextualized embeddings improves performance on a recently introduced challenge problem, gscan, significantly outperforming the state-of-the-art across several test splits designed to test a model's ability to represent novel concept compositions and achieve systematic generalization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/102.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/102.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9807d950a0dae84988379b88aec96c045aa2b17 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/102.txt @@ -0,0 +1 @@ +in the past few years, there have been major advancements in the field of question answering (qa) systems,in which researchers have looked at different ways in which these systems can be made more accurate and humanlike in both their responses as well as their methodology. incorporating commonsense knowledge and reasoning into nlp systems is one such area of recent focus , and a large body of recent work has focused on the creation, curation, and use of large-scale commonsense knowledge bases and knowledge graphs. importantly, these types of knowledge acquisition efforts have a long history in and have been of great use to a wide variety of ai systems . the importance of commonsense knowledge bases and repositories is clear from the volume of recent work that makes use of resources such as conceptnet to imbue nlp systems with worldly knowledge obtained from humans. a key recent contribution along these lines was atomic, which tackles the task of incorporating commonsense reasoning into nlp tasks by generating an atlas of "if-then" rules that taken together produce behavior akin to commonsense reasoning. work such as atomic and comet has made commonsense knowledge more accessible to the current generation of nlp systems; the progress and pitfalls of this work have been cataloged recently. one glaring omission in all of this prior work has been the lack of focus on context-contingent aspects of commonsense knowledge; that is, most prior work views commonsense as a universal monolith. while some events included in prior work are not variable across groups--like reading a book or breaking a window, for instance-many events are variable, and here we focus on one highly relevant type of context-specific commonsense knowledge, namely cultural commonsense. consisting of ritualistic, geographical, and social knowledge, cultural commonsense plays a large but hidden role in humans' day-to-day social interactions.for example, let us consider a very simple social setting: you are invited to a wedding. how long do you expect to be gone for, and how many people do you think will be there? for most people in the united states or the wider western world, the answer would probably be a few hours; probably half a day, starting in the early afternoon; and somewhere around a 100 people. however, for many people in india, the obvious answer is that you will probably have to lay aside several days for the whole event, and anywhere between several hundred to over a thousand people will attend. such socially-conditioned knowledge is inherently obvious to people from the respective cultures, and hints at the differences in commonsense knowledge across cultural and social settings, particularly when it comes to ritualistic practices.we build upon prior work on systematizing commonsense knowledge for use in nlp tasks by demonstrating a proofof-concept scheme for gathering cultural commonsense in a format similar to previous approaches like atomic. specifically, we start by surveying the extensive prior literature on cultural knowledge and ritual practices, and select a short list of six rituals to focus on for our study. we select two differ-ent national groups that are diverse in their ritualistic practices, and conduct a pilot experiment via a survey. we report on the results of the survey, and showcase what a truly cultural commonsense knowledge repository might look like. we hope that this work spurs future research on incorporating cultural and social commonsense knowledge into nlp systems across a wide range of tasks. 2019;zafar et al.the importance of commonsense knowledge bases and repositories is clear from the volume of recent work that makes use of resources such as conceptnet to imbue nlp systems with worldly knowledge obtained from humans.one glaring omission in all of this prior work has been the lack of focus on context-contingent aspects of commonsense knowledge; that is, most prior work views commonsense as a universal monolith. while some events included in prior work are not variable across groups--like reading a book or breaking a window, for instance-many events are variable, and here we focus on one highly relevant type of context-specific commonsense knowledge, namely cultural commonsense. such socially-conditioned knowledge is inherently obvious to people from the respective cultures, and hints at the differences in commonsense knowledge across cultural and social settings, particularly when it comes to ritualistic practices.we build upon prior work on systematizing commonsense knowledge for use in nlp tasks by demonstrating a proofof-concept scheme for gathering cultural commonsense in a format similar to previous approaches like atomic. we hope that this work spurs future research on incorporating cultural and social commonsense knowledge into nlp systems across a wide range of tasks.the primary focus of this work is to encode cultural knowledge as an essential part of commonsense knowledge. prior literature from the study of cultural groups suggests that it is hard enough just to define "culture" given the many complexities and nuances of group differences; it is even harder to identify the knowledge that comes along with it and to differentiate across cultural groups.given the vast numbers of rituals that can fall into the six categories previously outlined, and the variance in the ex-tents of their observance across cultures, another crucial decision is in selecting specific rituals as markers of cultural knowledge. let us look at one significant difference seen in the responses: for the wedding ritual, participants from the us said the bride would focus on the wedding planning part of the event, like dresses and so forth; while the indian participants focused on the cultural aspects of the wedding, as well as the fact that the bride might have to get to know the groom's family, and possibly the groom himself, as illustrated in figure6. this is an excellent example of the type of knowledge that is collected by our work, where a machine can now leverage this information as commonsense knowledge that is culturally sensitive and correct.these findings validate our expectation that rituals can give us a peek into cultures and how they vary, and that commonsense knowledge cannot truly be complete without including cultural nuances. past efforts at systematizing commonsense knowledge(sap et al. this sub-graph is a glimpse of what our entire knowledge base will look like after we run entire dataset through a rigorous nlp pipeline containing semantic role labeling, word sense disambiguation, syntactic and semantic parsing, among others. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/103.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/103.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9ff2b7317039991cff84ef1d8a8f6b4dc8cbfc4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/103.txt @@ -0,0 +1 @@ +active learning is the area of machine learning where a training set is constructed by selecting the most informative samples that can speed up training. new labeled learning examples are obtained by queering, i.e., requesting ground truth labels from an oracle. to create a query, we use a model trained with a small number of labeled samples. stream-based selective sampling is based on the assumption that acquiring new unlabeled training examples is relatively inexpensive. we process a single sample at a time, and decide whether it should be labeled by oracle or discarded. in this work, we propose a new method that combines self-labeling with active learning in stream-based selective sampling scenario.an overview of our method is provided in fig. 1. we hope that this approach could allow for the cost-efficient creation of bigger labeled datasets. self-labeling could introduce noisy labels into the dataset , as in most cases, models have non-zero classification error. in various types of noises and their impact on deep training with predicted label (self-labeling)figure 1: overview of the proposed method. we utilize ensemble predictions to determine whether a given sample could be added to the dataset with the predicted label (self-labeling) or should be labeled by oracle (active learning). more specifically, we check if obtained support exceeds a predefined threshold and if all confident predictions return to the same class. if not, we check if the budget, create a query, and train with bootstrapping. otherwise, we filter out and drop the samples from the current majority class (prior filter), and perform bootstrapped training with label obtained from prediction.learning performance were analyzed. it was found for various noise types that, with the increase of the noise ratio, test accuracy decreased. and with the increase of dataset size, test accuracy increases. in self-labeling, errors made by the classifier introduce wrong labels to the dataset, but as we label new samples, the overall data volume increases. if the gain in accuracy from increasing the dataset size surpasses the performance loss from the wrong labels, we can use selflabeling to boost classification performance. in this work, we utilize this dynamic to improve classification performance.the main contributions of this work are following:• an analysis of the problems, that arise when we apply self-labeling to an active learning scenario• new method was proposed based on classifier committee, that integrates self-labeling to active learning• thorough experimental evaluation with multiple dataset and settings arxiv:2301.04420v1 11 jan 2023 we utilize ensemble predictions to determine whether a given sample could be added to the dataset with the predicted label (self-labeling) or should be labeled by oracle (active learning). otherwise, we filter out and drop the samples from the current majority class (prior filter), and perform bootstrapped training with label obtained from prediction. in self-labeling, errors made by the classifier introduce wrong labels to the dataset, but as we label new samples, the overall data volume increases. if the gain in accuracy from increasing the dataset size surpasses the performance loss from the wrong labels, we can use selflabeling to boost classification performance. for this reason, we define budget b as the number of samples that can be labeled, with the exception of presenting results when we refer to budget as the fraction of all samples that can be labeled.rq3: what is the impact of the initial training size (the seed size) on the performance? rq4: how does the accuracy of the model trained with seed impact the learning process of the proposed algorithm?. when utilizing labels generated with model predictions, the lower number of initial training samples may cause a higher error rate at the beginning of the experiment and the introduction of more noise into the dataset. we can intuitively explain this result by the fact that ensemble diversity should be smaller when utilizing bigger datasets, as more samples could cover feature space more densely, and randomly sampling datasets with bootstrapping would produce more similar datasets. we plot the balanced accuracy with the corresponding fraction of samples with wrong labels in the training dataset over multiple iterations in fig.as indicated by results with the nursery dataset and ablation results, a prior filter may not be the best method to address the imbalance issue in our datasets.we have proposed a new active learning method that combines simple ensemble-based sample selection and selflabeling for selective sampling. experiments with multiple baselines show that our method offers comparable performance to other active learning algorithms for smaller datasets and better performance for bigger datasets. further experiments also show that our method could work well when the initially labeled dataset is small or when initial model accuracy is poorly trained.we also show that an important aspect of self-labeling is an imbalance, as bias towards a single class in model predictions could, over time, increase dataset imbalance. if the current class prior does not exceed the 1 c , we expand our dataset with the current sample labeled by prediction (line 14), calculate new λ value (lines 15-20), and perform bootstrapped training (line 21) according to algorithm 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/104.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/104.txt new file mode 100644 index 0000000000000000000000000000000000000000..6d9fc908f580b973b7fad9443b0af5f922ea5acd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/104.txt @@ -0,0 +1 @@ +neural networks are increasingly used in safety-critical applications . however, it has become apparent that they are highly susceptible to adversarial examples , i.e., minor and possibly imperceptible input perturbations can cause the output to change significantly. as such perturbations can occur in the real world either at random or due to malicious actors, it is of utmost importance to analyze the robustness of deep learning based systems in a mathematically rigorous manner before applying them in safety-critical domains. to this end, a wide range of methods and corresponding software tools have been developed . however, with tools becoming ever more numerous and specialized, it became increasingly difficult for practitioners to decide which tool to use.in 2020, the inaugural vnn-comp was organized to tackle this problem and allow researchers to compare their neural network verifiers on a wide set of benchmarks. initially conceived as a friendly competition with little standardization, it was increasingly standardized and automated to ensure a fair comparison on cost-equivalent hardware using standardized formats for both properties and networks.in this work, we outline this development, summarize key rules and results, describe the high-level trends observed over the last three years, and provide an outlook on possible future developments.to enable practitioners to easily use and evaluate a range of different verification approaches and tools without substantial overhead, it is essential that all tools can process both networks and specifications in a standardized file format.facilitate verification tool comparison every year, dozens of papers are published on neural network verification, many proposing not only new methods but also new benchmarks. vnn-comp facilitates such a comparison between a large number of tools on a diverse set of benchmarks, using cost-equivalent hardware, and test instances not available to participants. letting participants and industry practitioners propose a wide range of interesting benchmarks, yields not only a ranking on the problems typically used in the field but also highlights which tools are particularly suitable for more specialized problems. to prevent excessive tuning to specific benchmark instances, benchmark proposers were encouraged to provide a script enabling the generation of new random instances for the final tool evaluation. however, teams were allowed to tune their tools for each benchmark, using the initial set of benchmark instances.in 2022, each participating team could submit or endorse up to two benchmarks, allowing industry practitioners to propose benchmarks without entering a tool. however, the large and increasing discrepancy between registered and submitted tools might indicate that many teams feel like they are not able to invest the significant effort required to support not only the standardized network and specification formats but also the wide variety of different benchmarks. as tools are ranked by their total score with each benchmark providing a score of up to 100%, the final ranking is biased towards tools that support all benchmarks.while we believe vnn-comp already provides reasonable mechanisms for comparing the tools submitted in every iteration, the changing benchmarks and tools make it hard to track the year-on-year progress of the field as a whole. because some tools are heavily optimized for the specific benchmarks of that year's competition, simply evaluating them on the benchmarks of previous (or future) years (even if they support them) does not yield a meaningful progress metric. while including all benchmarks from previous years in the (scored) benchmark selection would place an undue burden on participants, choosing one particularly challenging, representative, and interesting benchmark every year to be included as a (scored) extra benchmark in future iterations might be a good compromise. additionally, a more restrictive stance on tool tuning could enable a much more representative evaluation of new tools on old benchmarks. for vnn-comp, tuning tools was allowed explicitly on a per-benchmark basis and implicitly on a pernetwork basis, enabling teams to showcase the maximum performance of their tools. however, for future iterations it might be interesting to restrict tuning for some or all benchmarks to encourage authors to develop autotuning strategies, making the adaption of their tools to new problems much easier. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/105.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/105.txt new file mode 100644 index 0000000000000000000000000000000000000000..7d32dc24067d700c07ba8a7cf91e2da9fb1b4ed5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/105.txt @@ -0,0 +1 @@ +recently, reinforcement learning, combined with technological progress, has achieved superhuman performances in computing optimal behaviors in various decision processes. particularly, there is a development of self-play algorithms to design strong artificial intelligence in games such as go or chess . these methods have the practical interest that they do not require supervision neither with huge data set nor with expert feedback to train efficiently. we focus here on competitive multi-agent reinforcement learning (marl) where several agents interact with an environment that may involve randomness, in order to maximize their own profit. one of the mostly used model to deal with marl is stochastic game (a.k.a. markov game in the literature) that was introduced in the 50s by shapley . stochastic games (sgs) are more pertinent to handle marl compared to markov decision processes (mdps) that are widely used for single-agent reinforcement learning.here we consider learning in infinite-horizon two-player zerosum game with the average-reward criterion: there are two players that play indefinitely, and each player wins exactly what their opponent loses. at each time, the game has a current state and players choose and play an action simultaneously : they receive rewards and then the state of the environment changes (possibly randomly), this depending on the current state and the joint choice of actions. at the early stage of the game, the reward function and transition probabilities are unknown by the learning agent.an important concept in stochastic games is the nash equilibrium. in two-player zero-sum stochastic games, all nash equilibria have the same value which represents a bound of the minimum expected average-reward that each player can obtain if they play optimally, regardless of the strategy of the opponent. when both agents play according to the nash equilibrium, each of them can not hope a greater payoff by changing unilaterally his own strategy. therefore, nash equilibrium is often a benchmark for learning performances and a large set of works focus on this notion.two settings can be considered for learning in stochastic games: centralized and decentralized. in the first setting, a central learner tries to find the nash equilibrium as quick as possible by controlling both players, e.g. . in the second setting, the learner controls only one player and tries to be as efficient as possible against an arbitrary opponent, see for different settings and objectives. this setting is quite natural but more challenging than the first one because the learner has no control on the opponent strategy that may change over time. in this paper, we consider the decentralized learning problem. several definitions for the regret are possible in this case, that we will discuss in more detail in section 2. 3.learning algorithms are often split into two families called respectively model-based algorithms and model-free algorithms. in a model-based algorithm, the learner gathers observations of the game in order to estimate the parameters of the model (reward functions and transition probabilities), and then uses this estimation to compute a policy that is used for playing and gathering more information. in a model-free algorithm, the learner directly tries to estimate the values of the true model and plays optimally according to these value estimations. the two papers that are closest to ours are the algorithms ucsg in and psrl-zsg in . they both present a decentralized learning algorithm in averagereward sg and are both model-based. to the best of our knowledge, we propose in this paper the first model-free algorithm for the stochastic games with infinite-horizon average-reward objective. model-free algorithms have the advantage that they often need considerably less memory space to work, and additionally they often enjoy much better time complexity, which motivates our work. they can also be generalized to other settings, such as linear sgs. the drawbacks of model-free is that, in the literature, model-based algorithms often achieve better performances than model-free. this optimistic q-learning has then been extended in two directions:(1)to infinite-horizon average-reward mdps inby using an artificial discounted setting, and (2) to finite horizon stochastic games in. in section 4, we give the regret upper bounds (expected regret and regret with high probability), along with their proofs. the first provably-efficient qlearning appears in, where the authors add an optimism term to q-learning and prove that the algorithm achieves low regret performances in finite-horizon mdps. the v-learning algorithm is proposed infor centralized learning in finite-horizon sg, and then is adapted into design an efficient algorithm for decentralized learning when the agents do not observe the action of their opponent. oomd : for infinite-horizon average-reward mdp, the authors ofpropose the oomd algorithm that achieves a regret bound of order √ when they assume the ergodicity assumption. plus, we do not study bayesian regret but worst-case expected regret or worst-case high probability regret.in this paper we establish low regret bounds for the undiscounted setting where players play indefinitely and try to maximize their average-reward over time.this assumption is intuitively necessary to get low regret because if this assumption does not hold, it means that the opponent may have a way to lock the learning agent in "bad stage" while at the early stage of the learning process, the agent could not have enough information to avoid this.in this paper, we consider a decentralized learning algorithm that has a low regret in a two-player zero-sum stochastic game. nash equilibria in this setting provide to players an optimal way to play whatever their opponent does: if the max-player plays their nash policy, it guarantees them an average-reward of at least * against any opponent and we want to measure the online performance of the learning agent compared to this nash policy.unfortunately, it is shown inthat for the finite-horizon setting, it is not possible to design an algorithm that has a sublinear regret with the two above learning objectives.the main idea of our decentralized optimistic nash q-learning (that we present in algorithm 1) is to learn the game in a wellchosen discounted setting using optimistic q-learning techniques first introduced in. the algorithm also takes as parameter a confidence level ∈ (0, 1) which is set either to obtain a high probability regret bound, or to obtain a sublinear expected regret. this bound is similar to the regret bound of theorem 2 with = 1/ but is significantly smaller because the second term of the regret bound of theorem 2 disappears.by setting a parameter that scales in 1 3 , we can balance the terms and √ (due to regret bound for discounted setting) and obtain a regret bound in expectation of order 2 3 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/106.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/106.txt new file mode 100644 index 0000000000000000000000000000000000000000..433dd9d17f6d0d1d679bd01ec45c31da89755a49 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/106.txt @@ -0,0 +1 @@ +mutation testing is a white box testing method that aims to inject artificial changes based on real faults in order to evaluate a test suite's capability to reveal faults. mutation testing has been extensively studied and used in traditional software engineering , to assess the quality of test suites. a fundamental hypothesis of mutation testing is the coupling effect hypothesis which posits that "complex mutants are coupled to simple mutants in such a way that a set of test cases that detects all simple mutants in a program will detect a large percentage of the complex mutants" . as such uncovering such unkilled complex mutants is of interest, with one way of generating such complex mutants being to combine simple mutants, called first order mutation(s) (fom), together. this is the concept of higher order mutation(s) (hom) introduced by jia et al. .such established testing techniques and concepts could be useful for deep learning systems, in an effort to increase their reliability, since such systems are notoriously hard to test because of their peculiar nature . because of the paradigm § equal contribution differences between deep learning-based systems and traditional software systems, it is only recently that researchers have started proposing mutation testing frameworks tailored for deep learning-based systems, in particular supervised learning - , to assess the quality of test dataset at revealing faults.yet, supervised learning is not the only sub-paradigm in machine learning, and (deep) reinforcement learning (rl), one of the other main sub-paradigms with a wide range of applications , is increasingly being adopted in practice. rl differs deeply from supervised learning: while in supervised learning a model learns from a training dataset in order to generalize to any new data from the input distribution, rl is based on the idea of training an agent using its interaction with an environment through a feedback system . for instance, a robot (agent) evolving in a room (environment) with a goal to go from a to b with some traps on the way. as such, previously introduced frameworks might present several limitations if applied to rl, for instance, the mutation operators defined in supervised learning to obtain mutant models might not apply to rl. in parallel, to the best of our knowledge, is the only research work that tackled mutation testing in rl; proposing a fault detection approach that is based on the manual crafting of relevant environments. in particular, they introduced the idea that traditional test cases used in mutation testing applied to traditional software systems could be translated to the notion of test environments in rl. however, their study is limited to only one type of rl algorithm and does not explore real fault-based operators nor the potential usefulness of combining existing operators to form hom which could prove useful to assess test environments' capacity to find subtle faults in rl systems.in this paper, we propose a framework, rlmutation, for mutation testing of deep rl programs leveraging hom adapted to rl. we defined mutation operators for rl motivated by existing taxonomized faults. we then analyzed how they fared on different rl environments and algorithms by using and comparing a number of mutation killing definitions adapted from previous works. in order to leverage hom power to highlight more complex faults, we adapt existing work on hom to the rl task specificity. namely, we conceive a simple heuristic tailored to the rl problem to systematically generate some test environments in order to obtain test cases to assess hom usefulness. thus, we aim to provide some insights into how mutation testing could be applied to rl while acknowledging existing differences with supervised learning.our contribution is the proposed rl framework composed of the following: 11 mutation operators based on real taxonomized faults, a comparison of the impact of mutation killing definition design over the fom killed, and a heuristic to generate relevant test environments to study both fom and hom.the remainder of the paper is structured as follows: section ii gives relevant background knowledge about mutation testing, hom, and rl. section iii presents the mutation operators introduced, as well as the procedure to determine how to generate relevant hom when using mutation testing for rl. section iv reports about our experiments and results. section v discusses threats to the validity of our work. section vi reviews the related literature, while section vii concludes the paper. in particular, they introduced the idea that traditional test cases used in mutation testing applied to traditional software systems could be translated to the notion of test environments in rl. however, their study is limited to only one type of rl algorithm and does not explore real fault-based operators nor the potential usefulness of combining existing operators to form hom which could prove useful to assess test environments' capacity to find subtle faults in rl systems.our contribution is the proposed rl framework composed of the following: 11 mutation operators based on real taxonomized faults, a comparison of the impact of mutation killing definition design over the fom killed, and a heuristic to generate relevant test environments to study both fom and hom.studied the different types of hom, by classifying them based on two main properties: 1) a hom is coupled if, for a given set of test cases killing the hom t h = ∅ and a given union of sets of test cases killing its constituent fom ∪.applying mutation testing to deep learning, similarly to how it is done in traditional software engineering, raises several questions with the most prominent one being: is a test case killing a mutation, or is it just an artifact of the stochasticity of the model's training? indeed, given a test case x, a neural network n and a mutant m , having n (x) = m (x) does not necessarily mean that x killed the mutation m , as the mutation being killed could only be due to the stochasticity inherent to the training of the neural network.to assess the effectiveness of mutation testing for rl, we formulate the following research questions (rq): rq1: what are the limitations of existing mutation killing definitions when applied to rl? rq2: how are different agents and environments affected by the different mutations? rq3: do the hom generated from our fom possess the subsuming property similarly to traditional software engineering? 1) rq1.2) rq2.nonetheless, for the same test environment, the distributionwise test r still does not allow the detection of a high number of mutations, even mutations for which there are a sizable amount of ratios declaring the mutation killed with the avg definition, such as lunarlander/a2c/ndf.secondly, we see the test environments generated on cart-pole are relatively more likely to catch mutations with most of the mutations being killed by at least half the test environments compared to the ones generated on lunarlander.rq2-1 contrary to using only the initial test environment, generating additional test environments allows us to roughly evaluate which mutations might be trivial and which are more interesting based on the number of environments killing them.in a second step, it is possible to go beyond the raw number of test environments killing a certain mutation and, instead, to inspect which test environment kills the mutation. while it might not be possible to draw meaningful information for all mutations, especially on a reduced set of test environments, it shows nonetheless that generating test environments in that way also allows us to explore a potential link between parameters of the environments and their impact on the mutation.rq2-2 by mapping, for a given mutation, which generated test environments kill it or not, we can analyze which of the parameters of the test environments affect the decision to kill the mutation. we then trained new mutated agents based on the gathered hom in the same way as fom.overall this study showed that, while mutation testing applied to rl raises numerous challenges to consider from the mutation killing definition design to the generation of relevant test environments and hom, it is an interesting venue to improve testing of rl-based software systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/107.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/107.txt new file mode 100644 index 0000000000000000000000000000000000000000..44d32420848b9ccb0be9185218105e5279121ee5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/107.txt @@ -0,0 +1 @@ + inspired by the idea of capturing orientation information in capsule network, this paper proposed a novel method called switchable lightweight anti-symmetric process (slap), a protocol to produce the same output given different transformation variants, with the main research question: can symmetry variants be exploited directly by slap to improve and combine with cnn for machine learning?. this research tried to kill two birds by one stone, slap, by applying with cnn in reinforcement learning (of gomoku), challenging the widely used practice of data augmentation, aiming at reducing the sample size and improving the learning speed. if transfer learning in cnn is analogous to reusing a chair by cutting the legs and installing new legs to fit another, such 'switchable learning' in slap is analogous to turning the switch of an adjustable chair to fit certain symmetries. indeed memory plays an important role in reinforcement learning as well by episodic memory, an explicit record of past events to be taken as reference for making decisions, improving both sample efficiency and speed in reinforcement learning as experience can be used immediately for making decisions. note that slap can be used upon any function or model, and even if some (types) of the outputs are not invariant but follow the same transformation, these may be broken down and use the transformation information output from slap to make appropriate transformation back later for these parts only. if a machine learning problem is to be split into stages or parts by specified symmetry as a guide, slap might help by wrapping certain parts of a function or neural network model.to decouple from reinforcement learning dynamics, synthetic states of gomoku were created for testing neural network learning with slap vs with typical data augmentation (by rotation and reflection), the latter of which had 8 times the number of training samples.reinforcement learning required much more computation than neural network learning, so to save computation, the same neural network will be used and the testing of hyperparameters would be based on best models in neural network learning by synthetic gomoku states, with some deviations to be tested by grid search.despite the widely use of data augmentation to increase the variety of transformation variants in samples to improve machine learning, we proved that using slap to decrease the variety could achieve the same performance of typical data augmentation with sample size reduced by 87. while slap exploited only reflection and rotation symmetries in learning gomoku, the general concept of slap and the proof of invariance could apply to other symmetries. shortcomings: in gomoku reinforcement learning, slap tended to decrease learning rate multiplier more frequently, implying more frequent significant increase of validation loss. it might imply that slap would need quite different hyperparameters in reinforcement learning (as opposed to sharing the same hyperparameters of baseline models in the neural network learning experiment), and more or better searches of hyperparameters for reinforcement learning would be required, though it was constrained by computation resources. another plausible explanation for not speeding up reinforcement learning was the insignificant portion (~1%-2%) of neural network learning in the whole reinforcement training, implying that the time saved in neural network learning would be insignificant for the whole reinforcement learning in our chosen setting (which used a relatively simple cnn), and enough neural network learning iterations would have been allowed if hyperparameters were optimal. since no domain specific features or knowledges were used in slap, it should also benefit neural network learning generally for domains that are symmetry invariant, especially for reflection and rotation symmetries.as future work, slap may be applied in domains that are not fully symmetry invariant, by breaking down the neural network layers into two partsfirst learning as if it were fully symmetry invariant. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/108.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/108.txt new file mode 100644 index 0000000000000000000000000000000000000000..c082e56a5d556e482964bfb0780e0baa69c3d5b6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/108.txt @@ -0,0 +1 @@ +t he usage of wearable internet of things (iot) devices in healthcare is rising. due to their availability and sensing capability, these devices collect physiological data from patients and provide real-time diagnosis . wearable iot devices have caused remote healthcare to make a paradigm shift into predictive diagnosis and reliable early detection. the data collected by these devices partnered with different learning techniques aid in predictive healthcare services. doctors can analyze data such as their patient's activities and accurately predict anomalies and threats against their health . they can also prescribe treatments for preventing and addressing these detected concerns. however, this breakthrough has limitations in its technology. challenge within a network that employs wearable iot devices cause impasses in predictive healthcare.an open issue for wearable iot devices in predictive healthcare is the amount of data it needs to be effective. a large amount of personal data is collected, resulting in security and privacy concerns due to the nature of the data used for analysis . the wearable devices' limitations on processing capabilities lead to vulnerabilities and potential leakages in sensitive patient information . another issue is the integrity and reliability of the service . structuring healthcare to prioritize certain aspects can cause trade-offs in others. service integrity is crucial for this field in remote healthcare that relies on wearable data accuracy and predictive model precision. one more issue is the adaptability of the network that deploys and serves these predictive healthcare services . wearable iot device standardization is a significant concern to iot networks due to the heterogeneity it introduces. this diversity results in demands for continuous maintenance and updates to the medical server to ensure that it is up to date with every newly introduced wearable iot device. as a result, concerns about adaptability limit the healthcare network from fully remaining relevant and sustainable over long stretches of its service.in this work, we propose a fog-iot platform to address these issues. we use federated learning to preserve patient data privacy and the integrity of the network's predictive services . also, we incorporate blockchain technology to address the security issues in wearable iot devices through its access control and cryptographic structure , . finally, we combine these technologies within a fog-based iot architecture to enforce decentralized servers and resource reallocation to improve the adaptability and sustainability of the overall network . the main contributions of this work are:• we present a fog-based iot platform using federated learning and blockchain technology to preserve patient data privacy and improve the security of data within the network.• we designed a testbed that simulates and evaluates the proposed implementation. we used model accuracy to observe the platform's ability to preserve the integrity of a predictive service.the rest of this paper is organized as follows. a discussion on the background of our study and a brief literature review are in section ii. our proposed design and the methodology followed are in section iii. the presentation of a developed testbed and a discussion of the results are in section iv. finally, our conclusions are in section v.• we present a fog-based iot platform using federated learning and blockchain technology to preserve patient data privacy and improve the security of data within the network.wearable iot devices can form a network of sensing devices that collect data from points of interest for predictive analysis.1) network security and data privacy: introducing wearable devices to collect patient data adds more endpoints to the server of the healthcare service. therefore, network security and data privacy concerns grow as the network expands with more wearable devices.2) data integrity and precision: an advantage of using wearable iot devices in healthcare is the real-time diagnosis and early detection of illness and medical anomalies within a patient.3) network structure adaptability and flexibility: wearable iot devices introduce different sensors and technologies that collect and process data. instead of specialization through prototypes, we plan to use fog-iot paired with decentralized technologies such as blockchain and federated learning to develop a modular iot network for wearable devices in healthcare.healthcare services that involve wearable iot devices revolve around continuous data sensing, learning, and analysis. the result is a decentralized iot network that can maximize the potential of federated learning in keeping data analysis in healthcare services secure and sustainable. for wearable iot devices in healthcare, the network can use blockchains as a tamper-proof database for patient data storage.we propose a fog-iot-based approach to secure data exchange from wearable iot devices used for predictive healthcare services. integrating federated learning improves the sustainability of the predictive healthcare service by providing a system that can organize the model data and provide a global model that represents the overall knowledge obtained through all the local training. our focus is on the effectiveness of our platform in securing wearable iot device data and ensuring the integrity and flexibility of the healthcare service, and not maximizing the accuracy of the classifier. removing this constraint helps the server keep up with the constant introduction of new wearable iot devices by simplifying the data flow within the network.we propose a platform that addresses the issues of data privacy, service integrity, and network structure adaptability of wearable iot devices in predictive healthcare. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/109.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/109.txt new file mode 100644 index 0000000000000000000000000000000000000000..e66de034cfae5dd93375d5303045941a1de44d38 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/109.txt @@ -0,0 +1 @@ +deep learning with large neural networks has seen tremendous success in solving a wide range of tasks in recent years, including image classification (he et al., 2016;dosovitskiy et al., 2021;zhai et al., 2021), language processing (vaswani et al., 2017;devlin et al., 2019;brown et al., 2020), interacting with openended environments (silver et al., 2016;arulkumaran et al., 2019), and code synthesis (chen et al., 2021).recent empirical studies have shown that neural networks that incorporate multiplication operations between intermediate neurons (durbin & rumelhart, 1989;urban & van der smagt, 2015;trask et al., 2018), such as self-attention layers (vaswani et al., 2017) and hypernetworks (ha et al., 2016;krueger et al., 2017;littwin & wolf, 2019), are particularly effective. for example, self-attention layers have been widely successful in computer vision (dosovitskiy et al., 2021;zhai et al., 2021) and language processing (cheng et al., 2016;parikh et al., 2016;paulus et al., 2017;vaswani et al., 2017). it has also been shown that one can achieve reasonable performance with transformers even without applying non-linear activation functions (levine et al., 2020). additionally, hypernetworks, which use multiplication to generate network weights seem to improve the performance of neural networks on various meta-learning tasks (von oswald et al., 2020;littwin & wolf, 2019;bensadoun et al., 2021). however, the benefits of multiplication layers are not well understood from a theoretical perspective.in this work, we study the expressive power of neural networks with multiplication layers. specifically, we want to evaluate the number of neurons and layers needed to approximate a given function within a given error tolerance using a specific architecture. a classic result in the theory of deep learning shows that neural networks can approximate any smooth target function, known as the universal approximation property, with as few as one hidden layer (cybenko, 1989;hornik et al., 1989;ichi funahashi, 1989;leshno et al., 1991). however, these papers do not provide specific information about the type of architecture and number of parameters required to achieve a given level of accuracy. this is a crucial question, as a high requirement for these resources could limit the universality of neural networks and explain their limited success in some practical applications.previous work has demonstrated that functions in sobolev spaces can be approximated by a one-hidden layer neural network with analytic activation functions (mhaskar, 1996). however, the number of neurons required to approximate these functions with an error of at most ǫ in the l ∞ norm scales as o(ǫ -d/r ), where d is the input dimension, r is the smoothness degree of the target function, and ǫ > 0 is the error rate. this raises the question of whether the curse of dimensionality, the phenomenon whereby the complexity of a model grows exponentially with the input dimension, is inherent to neural networks.on the other hand, devore et al. (1989) proved that any continuous function approximator that approximates all sobolev functions of order r and dimension d within error ǫ requires at least ω(ǫ -d/r ) parameters in the l ∞ norm. this result meets the bound of mhaskar (1996) and confirms that neural networks cannot avoid the curse of dimensionality for the sobolev space when approximating in the l ∞ norm. a key question is whether neural networks can overcome this curse of dimensionality for certain sets of target functions, and what kind of architectures provide the best guarantees for approximating these functions.to overcome the curse of dimensionality, various studies (mhaskar et al., 2017;poggio et al., 2020;kohler & krzyżak, 2017;montanelli & du, 2019;blanchard & bennouna, 2022b;galanti & wolf, 2020) have investigated the approximation capabilities of neural networks in representing other classes of functions with weaker notions of distance, such as the l 2 distance. for example, mhaskar et al. (2017); poggio et al. (2020) showed that smooth, compositionally sparse functions with a degree of smoothness r can be approximated with the l ∞ distance up to error ǫ using deep neural networks with o(dǫ -2/r ) neurons. other structural constraints have been applied to functions with structured input spaces (mhaskar, 2010;nakada & imaizumi, 2022;schmidt-hieber, 2019), compositions of functions (kohler & krzyżak, 2017), piecewise smooth functions (petersen & voigtländer, 2017;imaizumi & fukumizu, 2018). a different line of research has focused on understanding the types of functions that certain neural network architectures can implement with regularity constraints. for example, e et al. (2021) showed that the space of 2-layer neural networks is equivalent to the barron space when the size of their weights is restricted. they further showed that barron functions can be approximated within ǫ using 2-layer networks with o(ǫ -2 ) neurons. another line of research has considered spectral conditions on the function space, allowing functions to be expressed as infinite-width limits of shallow networks (barron, 1991;klusowski & barron, 2016). in (blanchard & bennouna, 2022b) they considered the space of korobov functions, which are functions that are practically useful for solving partial differential equations (pdes). they showed any korobov function can be approximated up to error ǫ in l 2 distance with a 2-layer neural network with relu activation function with o(ǫ -1 log(1/ǫ) 1.5(d-1)+1 ) and with a o(log(d))-depth network with o(ǫ -0.5 log(1/ǫ) 1.5(d-1)+1 ) neurons.in a recent paper, montanelli et al. (2021) provided approximation guarantees were established for generalized bandlimited functions. these functions are commonly used to model signals that have a finite range of frequencies (e.g., waves, video, and audio signals), which is known as a finite bandwidth. the solutions to many pdes in physics are bandlimited functions, as the physical phenomena modeled by these pdes typically have a finite range of frequencies. for example, the solutions to the wave equation, which models the propagation of waves, are bandlimited functions. in (montanelli et al., 2021), it was shown that any bandlimited function can be approximated to within error ǫ using a relu neural network of depth o(log 2 (1/ǫ)) with o(ǫ -2 log 2 (1/ǫ)) neurons with the l 2 distance.in this paper, we study the approximation abilities of multiplicative neural network architectures with the l 2 distance. in particular, we prove that a multiplicative neural network of depth o(logneurons can approximate any generalized bandlimited function up to an error of ǫ (with constants depending on the dimension and on the band). additionally, we also study the approximation guarantees of neural networks for approximating functions in sobolev-type balls of order r. we show that for the same error tolerance ǫ, multiplicative neural networks can approximate these functions with depth o(d 2 ǫ -1/r ) and o(d 2 ǫ -(1+1/r) ) neurons, while standard relu neural networks require depth o(d 2 ǫ -2/r ) and o(d 2 ǫ -(2+2/r) ) neurons. these results demonstrate the superior performance of multiplicative gates compared to standard fully-connected layers. in table 1 we contrast our new bounds with preexisting bounds on the approximation power of neural networks for the sobolev space, bandlimited functions, and the sobolev-type ball. barron, 1993) , 2017;poggio et al. we show that for the same error tolerance ǫ, multiplicative neural networks can approximate these functions with depth o(d 2 ǫ -1/r ) and o(d 2 ǫ -(1+1/r) ) neurons, while standard relu neural networks require depth o(d 2 ǫ -2/r ) and o(d 2 ǫ -(2+2/r) ) neurons. a multiplicative neural network is a function f = y l,1 : r p0 → r defined by a set of univariate functions l i=1 {y i,j } pi j=1 .with this lemma in hand, we can show how to use multiplicative networks to approximate analytic kernel functions k : r → c by leveraging their ability to represent polynomials.the following theorem shows that a deep multiplicative network can approximate in b = d , a bandlimited function up to a given error tolerance using a relatively small number of neurons and depth.the above theorem shows that one can approximate bandlimited functions up to error ǫ using multiplicative neural networks of depth l ǫ = o(log(1/ǫ)) using g ǫ = o ǫ -2 log(1/ǫ) neurons. we use the results on bandlimited functions shown in theorem 3 to approximate functions in b 2r,2 w r,2 . we would like to approximate f using a bandlimited function f m and then approximate f m using a multiplicative neural network f bl = f ms . using the results of theorem 3, there exists a deep multiplicative neural network f ms that approximates the bandlimited function f m in l 2 (b) with error bounded by ǫ/2 and depth. lastly, we show that for the same error tolerance ǫ, multiplicative neural networks can approximate a target function f ∈ b 2r,2 with a depth of o(d 2 ǫ -1/r ) and o(d 2 ǫ -(1+1/r) ) neurons, while standard relu neural networks require a depth of o(d 2 ǫ -2/r ) and o(d 2 ǫ -(2+2/r) ) neurons.previous papers have studied the approximation guarantees of standard fully-connected neural networks to approximate functions in the barron space b 1,1(barron, 1993), the space of bandlimited functions(montanelli et al., 2021), and the korobov space(blanchard & bennouna, 2022a).in this paper, we extend these results by exploring the approximation guarantees of both multiplicative neural networks and standard fully-connected networks to approximate bandlimited functions and members of the sobolev-type ball b 2r,2 . in addition, we show that, unlike the barron space and the space of bandlimited functions, b 2r,2 is a subset of the sobolev space w r,2 .to see that b 2r,2 is a proper subspace of w r,2 , let us define as an example, the function f ∈ w r,2 r d through its fourier transform:. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/11.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/11.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa26bb4e6ab2c43c1568c013b907e13b50b3f9f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/11.txt @@ -0,0 +1 @@ +multivariate time series data are ubiquitous in various domains such as industry, finance and healthcare (bagnall et al. 2018;ruiz et al. 2021;rambhatla, che, and liu 2022;belkhouja, yan, and doppa 2022). such data record the changing trends of multiple variables over time. time series representation learning is a way to transform the complex raw time series data into semantic-rich representations (eldele et al. 2021;deldari et al. 2022). since time series data are usually collected through a large number of sensors or devices, there are no obvious visual patterns that can be easily recognized by humans. as a result, extensive efforts on data annotations are required before supervised learning approaches can be performed (qian, pan, and miao 2021). such an expensive manual labeling process greatly limits the utilization of time series data. therefore, unsupervised learning has emerged and gained the favor of researchers. there are unsupervised learning methods for time series that adopt auto-encoders (malhotra et al. 2017) and seq-to-seq models (lyu et al. 2018) to reconstruct the raw time series through joint training with decoders. however, robust reconstruction of complex time series is challenging in many cases, especially for the high-frequency physiological signals (tonekaboni, eytan, and goldenberg 2021;sarkar and etemad 2020). in this case, self-supervised learning which uses the self-generated supervised signals obtained by pretext tasks has been developed as well (misra and van der maaten 2020;kolesnikov, zhai, and beyer 2019).in this paper, we focus on contrastive learning, which is a special form of self-supervised learning with instance discrimination as the pretext task. contrastive learning has achieved remarkable advantages in diverse applications such as image (chen et al. 2020;grill et al. 2020;he et al. 2020;chen et al. 2021;dave et al. 2022) and time series classification (franceschi, dieuleveut, and jaggi 2019;eldele et al. 2021;yue et al. 2022;bagnall et al. 2017). nevertheless, the aforementioned approaches mainly focus on instancelevel contrast that treats instances independently. they usually regard the augmented view of the same instance as the unique positive and the remaining views as negatives. then, instances are distinguished by pulling representations of the positive pair together and pushing those of negative pairs apart. as the unique positive pair is composed of augmented views generated from the same instance, other instances with similar higher-level implicit semantics are misjudged as negative ones. in this case, the false negative instances will be pushed away in subsequent contrastive learning, which causes an adverse effect on instance discrimination.recently, cluster-wise contrastive learning breaks the independence between instances by exploiting the latent cluster information among instances. the learned representations are expected to retain higher-level semantic information by taking advantage of additional prior information brought by clustering (li et al. 2021a). however, existing cluster-wise contrastive learning approaches (caron et al. 2020;li et al. 2021a;zhang et al. 2021) usually i) adopt flat clustering algorithms that only capture a single hierarchy of semantic clusters, and ii) require prior knowledge to pre-specify the number of clusters, which are non-trivial for unlabeled time series. what's worse, these approaches still suffer from fake negatives and limited positives, as they construct only one positive pair with its corresponding prototype, while treating all remaining prototypes as negative candidates. such clustering is prone to noise and fails to fully take advantage of the hierarchical semantic information behind the entire set of instances. without ground-truth labels, it is challenging to guarantee the accurate constructions of such positive and negative pairs. for example, two instances may be divided into different clusters at a finegrained level, but belong to the same cluster at a coarsegrained level. the fine-grained prototypes preserve higher purity, while the coarse-grained ones reflect higher-level semantics. therefore, the implicit hierarchical semantics are all valuable and should be taken into account.in this paper, we propose a masked hierarchical clusterwise contrastive learning (mhccl) 1 model for time series representation learning. mhccl is facilitated with hierarchical clustering to enable more informative positive and negative pairs. it is motivated by the observation of multigranularity of clustering, i.e., clustering with a larger number of clusters preserves high purity within each small cluster, while the one with a smaller number of clusters can better reflect high-level semantics. we propose novel downward and upward masking strategies to improve constructed pairs for multi-level contrast, which is achieved by incorporating information from the remaining hierarchy at each partition of clustering. downward masking utilizes the information from upper partitions to lower partitions in the hierarchy, which helps supplement latent positive pairs and filter out fake negative pairs for effective contrastive learning. in addition, upward masking utilizes the information from lower partitions to upper partitions, which helps remove outliers and refine prototypes to improve the clustering quality.the main contributions of this work are as follows. firstly, we propose a novel downward masking strategy to incorporate the implicit multi-granularity semantic information obtained by hierarchical clustering into the construction of positive and negative pairs for contrastive learning. secondly, we reserve the representative instances that can characterize the cluster adequately, and filter the outliers that may bring side effects on contrastive learning by a novel upward masking strategy during hierarchical clustering. thirdly, we conduct extensive experiments to evaluate the proposed mhccl on seven benchmark datasets, with results demonstrating the effectiveness of mhccl. 2021a).in this paper, we propose a masked hierarchical clusterwise contrastive learning (mhccl) 1 model for time series representation learning. we propose novel downward and upward masking strategies to improve constructed pairs for multi-level contrast, which is achieved by incorporating information from the remaining hierarchy at each partition of clustering. downward masking utilizes the information from upper partitions to lower partitions in the hierarchy, which helps supplement latent positive pairs and filter out fake negative pairs for effective contrastive learning. firstly, we propose a novel downward masking strategy to incorporate the implicit multi-granularity semantic information obtained by hierarchical clustering into the construction of positive and negative pairs for contrastive learning. secondly, we reserve the representative instances that can characterize the cluster adequately, and filter the outliers that may bring side effects on contrastive learning by a novel upward masking strategy during hierarchical clustering. all three methods adopt flat clustering that only captures a single hierarchy of semantics, and the number of clusters needs to be manually specified, which could not capture the natural hierarchies in data. the goal of unsupervised representation learning is to train an encoder f q (•), which maps each raw time series x i into next, the hierarchical clustering module generates multiple partitions, and upward masking is conducted iteratively with hierarchical clustering to remove outliers and refine prototypes, which helps improve the clustering quality. the hierarchical clustering module generates m partitions, where p (p) denotes the p-th partition, and each partition contains k p clusters. the core of effective contrastive learning is the construction of positive and negative pairs, and mhccl introduces two novel masking strategies to improve the quality of pairs. in this way, after each partition of clustering, mhccl first calculates mean prototypes at the current partition, and then removes outliers based on the threshold through upward masking. 2020) usually construct one positive pair consisting of the anchor and the prototype of the cluster it belongs to, while all remaining prototypes are treated as negative candidates, causing fake negatives. to this end, mhccl utilizes the hierarchical structure information from upper partitions to lower partitions, to filter out false negatives by the novel downward masking strategy. h all = h pos + h neg is the total number of cluster-wise contrastive pairs, where h pos and h neg are the number of selected positive and negative prototypes respectively. motivated by the observation that fine-grained clustering preserves higher purity, and coarse-grained clustering reflects higher-level semantics, mhccl proposes to utilize such information for better constructions of pairs in contrastive learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/110.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/110.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebaeddea94a9b8826baa9a2f69e6d4a1d20f6702 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/110.txt @@ -0,0 +1 @@ +"whether ai will help us reach our aspirations or reinforce the unjust inequalities is ultimately up to us." -joy buolowini, 'facing the coded gaze' ai: more than human 1 whether you're driving your car using a gps system, call on alexa or siri to turn on your favorite tune, go on social media to perform a well-earned scroll down memory lane, or go to google search to find a gift to buy for a friend, you have encountered a machine learning model. these models collect data to make predictions that can make our lives easier, or harder, depending on the care taken during the machine learning life cycle process. the machine learning life cycle refers to the multistep process that begins with understanding project objectives and ends at model deployment and maintainability. each step in the process is as follows: define project objectives, acquire and explore data, model data, interpret and communicate, and implement, document, deploy and maintain the model. 2 the reason it is referred to as a life cycle is simple: at each step of the process, the ml practitioner is likely to go back a step or two to validate results by adjusting original project objectives, retrieving more or different data, tuning the model, or documenting their process for model maintenance. in ai ethics, researchers focus heavily on the first three steps in the process to find ways to mitigate bias downstream. this work focuses on the last step of the life cycle: implementing, documenting, and maintaining the model. this step is also referred to as machine learning 1 barbican centre. "joy buolamwini: examining racial and gender bias in facial analysis software".operations or mlops as referred to in this paper. before getting into my argument as to why a focus on mlops is an important addition to the ai ethical lexicon, i will first discuss past work and work my way into the importance of bias mitigation in the mlops step of the ml lifecycle. batya friedman, peter kahn jr. and alan borning take a particular interest in part 1 of the cycle, defining project objectives, in their article, "value sensitive design: theories and methods".3 within the article, friedman et al propose a tripartite method to designing a system keeping human values in mind. this system can be an application that makes decisions that affect the public and for the purposes of this paper, readers can think of machine learning as a type of system. the tripartite method considers the conceptual, empirical, and technical components of a system's design to formulate its main objectives and the way practitioners on june 29, 2015, twitter user, jacky alciné, tweeted "google photos, y'all up. my friend's not a gorilla." 9 (@jackyalcine) the post went viral with people around the world criticizing google's latest classification algorithm. 10 (wired) scholarship on algorithmic fairness soon followed with ai ethicists and researchers from around the world highlighting bias 5 noble, algorithms of oppression: how search engines reinforce racism. perpetuated by machine learning models. joy boulawini and timnit gebru releasing the article "gender shades: intersectional accuracy disparities in commercial gender classification" exposing the misclassification of black women in large classification models. they set out to supplement the lack of image data of black women using their own facial analysis dataset and incorporate it into three commercial classification models, ibm, microsoft, and face++. what they found was an outsized error rate when classifying black women compared to lighter completed men (8% compared to 34.7%). while supplementation improved performance on black women, the classifier was still a long way from being equitable. boulawini and gebru conclude their work by advocating for transparency and accountability through "inclusive benchmark datasets and subgroup accuracy reports".11 in race after technology, ruha benjamin suggests a similar approach as the one prescribed by gebru and buolawini, increasing transparency using, what she calls, a dataset nutrition label. much like a nutrition label on the back of packaged food, the dataset nutrition label will describe the contents of each model and their performance for tasks across subgroups.12 if google would have had a dataset nutrition label and been more transparent in its data collection process, solutions to the google misclassification problem may have been found more quickly. unfortunately, instead of finding a solution to the problem, three years later, google simply deleted "gorilla" and "chimpanzee" tags from the offending model completely. 13 (wired) while dataset transparency is an important step in mitigating bias in the ml lifecycle, it forgets a very important reality of deploying ai in the real world: ai, by its nature, is built to adapt to new situations and, many times, can cover its biased assumptions creating far reaching problems down the line. this takes us into a common tension in model building observed by ml practitioners in industry: the difference between the work done in part 3 of the life cycle, model building, and part 5 of the life cycle, deploying and maintaining the model.an example of this is discussed in you look like a thing and i love you by janelle shane. within it, she gives the example of ai being trained in a "dream world" with the skills acquired there transferred to the "real world". 14 unfortunately, the rules that bind us in reality are not present in the "dream world" and the ai was able to exploit the glitches it learned in the dream world and tried to apply this in the real world. this caused the model to subsequently fail. 15 if you change the term "dream world" and replace it with "training environment", you quickly see how models can perpetuate biased reasoning even with balanced training data. ai can adapt to cover its mistakes by learning to exploit gaps in the training set for higher reward in the real world.microsoft learned this first hand in 2016 when they unveiled "tay" -a conversational ai bot deployed on twitter. made to mine public data and incorporate the data it gathers from conversations with people online to become more conversational, the chatbot quickly picked up racist and homophobic attitudes even going so far as to praise hitler in one of its tweets within 24 hours of being online. 16 microsoft never trained its bot on such divisive language. the bot was trained in a dream world where the interactions it had and the data it collected was upbeat 14 shane, you look like a thing. 15 shane, you look like a thing. and positive. once deployed, it was no longer within the confines of its dreamlike training environment and like a new bowler bowling without guardrails, the bot's language quickly went into the gutter. while the details of the bots training environment was not made public, the bot seemed to be rewarded through its learning of new ways of interacting with human respondents. this led to the bot exploiting a gap between its training and what it was observing in the real world to gain maximal reward by engaging in divisive behavior-behavior that, undoubtedly, had more reactions online than positive ones based on the popularity it gained after sending hateful tweets.17 dataset nutrition labels and data transparency may have helped in google's case, but it would not have helped fix tay. tay would have needed tools put in place during the deployment step of ml lifecycle management to ensure that poor behavior is not exploited for the model to receive greater reward.mlops is the final line of defense between the model and the user interacting with the model. mlops covers the last step in the ml lifecycle: implementation, deployment, and model maintenance. 18 by creating a path to bias mitigation within the mlops function of the ml lifecycle, we can better ensure that the work that has been done to mitigate bias in parts 1 -3 of the lifecycle are fortified and continued in the real world through a robust framework created with the last step of the ml lifecycle prioritized. each step in the process is as follows: define project objectives, acquire and explore data, model data, interpret and communicate, and implement, document, deploy and maintain the model.2the reason it is referred to as a life cycle is simple: at each step of the process, the ml practitioner is likely to go back a step or two to validate results by adjusting original project objectives, retrieving more or different data, tuning the model, or documenting their process for model maintenance. this takes us into a common tension in model building observed by ml practitioners in industry: the difference between the work done in part 3 of the life cycle, model building, and part 5 of the life cycle, deploying and maintaining the model.concept and data drift can both cover biases showing up in a model that has been deployed and should be looked at not only from a perspective of model performance but also of bias mitigation. data drift occurs when the distribution of some input within the model is different in the wild than during model training. after model deployment, however, the ml practitioner finds that the population of people interacting the model have a different distribution.while data drift has been identified in the aforementioned model by the change in model input, practitioners benefit from looking at the distribution within each label and the features shared by each label and judge the variance of each population within the sample to establish true model confidence.internal data leakage is when the data source, data pipeline, query, model, dashboard, or other data based process has broken down. consider an example of a data practitioner monitoring performance of a model that has been deployed on a simple prediction model that is attempting to predict who would be more or less likely to place a bet. perhaps, during model training, the data scientist notices that gender is a strong predicting factor of who would be likely to place bets so the ml practitioner monitoring the model may stratify model output by gender to see if there is a divergence of scores by that feature. this can allow observers to gain extra confidence in the model as strong features may cause model performance to be favored in the direction of one subgroup over another causing overall scores to be an incomplete picture of true model performance. when the ml practitioner uses canary deployment they have the ability to test if model performance degradation has occurred as a result of bias that has been introduced to the model "in the wild". in practice, this will look like taking a representative sample of some percentage n of a population and unveiling the model to this group over some t time and testing the interactions of the model across each group while analyzing model parity. if the model performs up to an acceptable standard, then the ml practitioner can safely extend the deployed model to a larger population n + 1 until model stability has been achieved. ml practitioners can test the efficiency of model 1 versus model 2 and examine how much bias is present in each model as a kpi then move forward with the model that has the best parity performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/111.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/111.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ae2ca58557b7f6240e1c06342569b3e028d892f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/111.txt @@ -0,0 +1 @@ +estimating the empirical distribution of a scalar-valued data set is a basic and fundamental task. for example, estimating quantiles of a data stream is one of the oldest and most well-studied problems in streaming algorithms (greenwald and khanna, 2001;karnin et al., 2016;manku et al., 1998;munro and paterson, 1980), with applications to databases (greenwald and khanna, 2001), network health monitoring (cormode et al., 2004), and wireless sensor networks (shrivastava et al., 2004), among others. ideally, a data analyst would like to be able to assume that the data values are independent and identically distributed, and that they are directly observable. however, these assumptions might be violated in applications of interest.1. in many settings, samples can only be evaluated indirectly by comparing them to specified thresholds and learning whether or not each sample is less than or equal to its corresponding threshold. this is the case, for instance, when a seller experiments with varying posted prices in order to estimate the distribution of consumers' willingness to pay for a product or service. other examples arise when eliciting information about the distribution of individuals' abilities using pass-fail tests with a variable level of difficulty (e.g. swimming tests) or when evaluating the quality of a new product by asking consumers to compare it against products of known quality. (there is ample evidence in the behavioral sciences that human subjects' quality judgments can be elicited more reliably with ordinal comparisons than with subjective numerical ratings (ali and ronaldson, 2012;chapelle et al., 2012;larichev et al., 1995;moshkovich et al., 2002).)2.the assumption that samples are independent and identically distributed may also be violated. returning to the posted-pricing application, early adopters of a product might differ markedly from late adopters in their willingness to pay for the product, and the late adopters' willingness to pay may even depend on the rate of adoption by earlier consumers, which in turn depends on the posted prices they were offered. a similar application arises in an auction setting with repeated bidding -an internet advertiser estimates the distribution of winning bids by varying their own bid. to account for complex influences on the behavior of other bidders, assuming a worst-case input sequence rather than i.i.d. is very useful (weed et al., 2016).in this work we tackle the problem of estimating the empirical distribution of a sequence of numbers using threshold queries, in a non-stochastic setting that makes no assumptions about the process by which the sequence is generated. our model even allows the sequence to be constructed by an adaptive adversary. we assume the algorithm asks one threshold query about each element of the sequence, and the query and its answer are revealed to both parties before the next element of the sequence is generated by the adversary.the key question we aim to resolve is: what is the sample complexity of estimating the empirical cdf of a distribution on = {1, 2, . . . , n} to within ε? in more detail, what is the smallest t such that there exists a randomized algorithm that succeeds, with probability at least 3/4, in learning an estimate of the empirical cdf of an arbitrary sequence x 1 , . . . , x t that differs from the true empirical cdf (in l ∞ norm) by at most ε? in this paper, we resolve the question to within a constant factor, by proving asymptotically matching upper and lower bounds. in fact, our lower bound is valid even in a stochastic setting where the elements x 1 , . . . , x t are i.i.d. samples from a distribution on . hence our results reveal, perhaps surprisingly, that up to a constant factor, there is no difference in the sample complexity of cdf estimation in the stochastic and non-stochastic settings. , x t is restricted to be a constant sequence, cdf estimation and median estimation both become equivalent to binary search: the empirical cdf is a {0, 1}-valued step function with a step at some x ∈ and x is the unique approximate median, so both tasks become equivalent to identifying the value of x using queries of the form x ? ≤ q t .a deterministic algorithm for the k-tqm chooses queries q 1,t ≤ q 2,t ≤ • • • ≤ q k,t at time t, and receives feedback y 1,t ≤ y 2,t ≤ • • • ≤ y k,t , where y i,t = v t (q i,t ).for j ∈ let q j,t = min q q i=1 a i,t ≥ j k+1 ;. one natural approach would be to simulate the deterministic algorithm from the previous section; that is, run one step of algorithm 1 to obtain a set of k query points, and over the course of the next o k log n ε 2 time-steps, randomly sample one of the k points to query to get an ε-accurate estimate of the cdf at each of the k query points. if p, q are any two probability distributions on the same set, then d kl (p q) ≥ 0. for two probability distributions p, q on ω, let p f and q f denote the distributions of f (ω) when ω is sampled from p or from q, respectively. then d kl (p q) ≥ d kl p f q f .let p, q denote the distributions of (ω, f (ω)) when ω is sampled from p or from q, respectively. the function ω → (ω, f (ω)) is a probabilitypreserving bijection between the probability spaces (ω, p) and (γ, p) and also between the probability spaces (ω, q) and (γ, q). the first is an upper bound on d kl (p q) when neither q(0) nor q(1) is close to zero. the second is a lower bound on d kl (p q) when q(0) is close to zero and p(0) is far from zero. since we are assuming p(0) ≤ 1 2 ≤ q(0), as p(0) varies over the range the minimum of d kl (p q) occurs at p(0) = 1 2 , when.7 we know that d kl (p q) = k i=1 d kl (p(x i ) q(x i )) and d kl (q p) = k i=1 d kl (q(x i ) p(x i )).if q s = n then y s is deterministically equal to 1 under both distributions, p and p, so d kl (p s ps ) = 0 when q s = n. we will show that after every timestep t, for any point p ∈ where p 1 , p 2 are adjacent active points, then | f -1 t v t | ≤ log n-log(p 2 -p 1 ) 2t . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/112.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/112.txt new file mode 100644 index 0000000000000000000000000000000000000000..58f46e8f4c2452108ccac263416b672ba28e0ec1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/112.txt @@ -0,0 +1 @@ +over the past decade, the amount of available digital data has exponentially increased. thanks to the advances in computing capabilities, deep neural networks (dnns) have been successfully employed to solve challenging image and natural language processing tasks. however, state-of-the-art dnns are known to be highly vulnerable to adversarial examples , . these small but malicious perturbations of the network input can manipulate the trained model to produce incorrect predictions with high confidence, and some perturbations can even fool different network models . since adversarial attacks might lead to disastrous implications in critical areas like healthcare , climate and finance , defending against them is critical.so far, adversarial training is the most effective approach to mitigate the effect of strong attacks like the projected gradient descent (pgd) attack , deepfool , and autoattack . training the dnn with perturbed versions of the original samples makes it possible to improve the accuracy on unseen adversarial examples, also known as robustness accuracy . however, generating adversarial examples during training can be highly computationally intense since each sample is usually built with several steps in the direction of the gradient as this study was financed in part by the coordenac ¸ão de aperfeic ¸oamento de pessoal de nível superior -brasil (capes) -finance code 001. this work was also supported by the swiss government excellence scholarships for foreign students.the model is trained. moreover, adversarial training generally decreases the standard accuracy, that is, the accuracy on clean samples . this robustness-accuracy tradeoff is reported to be highly data-dependent, especially regarding the data distribution and its quality . furthermore, we only have access to a training dataset which is not necessarily representative for the problem we aim to learn. in this case, we could avoid using the entire training data. since the dataset is reduced, we can save several computations during backpropagation and speed-up training. this hypothesis was already investigated for standard training in , . in this work, we extend the work in , and apply it to the adversarial training case. from each mini-batch composed of both clean and adversarial samples, the proposed data selection algorithm selects the most relevant samples based on the cross-entropy loss. since only the selected samples are used to update the model parameters in the backpropagation, the training time is reduced. the selection also balances the necessary amount of clean and adversarial samples required to yield satisfactory robustness and standard accuracy.the paper is organized as follows. section ii presents a brief overview of the adversarial training method and some notations. in section iii, we propose a data selection technique for adversarial training. the proposed approach is tested via simulation results in section iv. finally, section v includes some conclusion remarks. training the dnn with perturbed versions of the original samples makes it possible to improve the accuracy on unseen adversarial examples, also known as robustness accuracy. moreover, adversarial training generally decreases the standard accuracy, that is, the accuracy on clean samples. from each mini-batch composed of both clean and adversarial samples, the proposed data selection algorithm selects the most relevant samples based on the cross-entropy loss. since only the selected samples are used to update the model parameters in the backpropagation, the training time is reduced. the selection also balances the necessary amount of clean and adversarial samples required to yield satisfactory robustness and standard accuracy. by using pgd, b adversarial examples are generated from the samples in the set b using equation(5). we then propose to select a portion p up of the samples in b based on the higher error values in equation (7), forming a selection set s. since only a portion p up of the samples are used to update the parameters, we can save some computations and we alleviate the training burden. we need more samples in the mini-batch to improve learning when the accuracy is low, whereas fewer samples are required to continue the learning process when the accuracy increases.as it will be shown in the simulations, updating the p up using equation(9)accelerates the convergence for p (0) up = 1 because, in this case, it selects more samples in the first epochs. the ds robust method is trained with the selection set s of size b = 256, which is composed of both clean and adversarial samples, and it is obtained using our selection strategy with update model parameters by back propagation using only the data samples in s; p up fixed or varying. the random robust method is trained with a mini-batch of size b = 256, composed of clean and adversarial samples selected at random.5, we slightly outperform the approach that consider all the samples (p up = 1) in terms of standard accuracy, with the benefit of requiring only 50% of the samples in the batch. therefore, the varying p up strategy can be applied if an early stopping method is also employed. although adversarial training is able to improve the robustness accuracy, it usually sacrifices standard accuracy in its way. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/113.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/113.txt new file mode 100644 index 0000000000000000000000000000000000000000..011ec8875bf51e17fbb8d1205f32742d05ac3273 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/113.txt @@ -0,0 +1 @@ +typical machine learning tasks, in the field of natural language processing , speech , computer vision , and etc, usually handle a mapping from source data x to target data y . for example, x is image and y is class label in image classification ; x is style tag and y is sentence in style-controlled text generation ; x is text and y is speech in text-to-speech synthesis .depending on the relative amount of information that x and y contain, these mappings can be divided into data understanding , data generation , and the combination of data understanding and generation . figure 1 shows the three types of tasks and the relative information between x and y : • data understanding tasks, in which x contains much more information than y (e.g., image classification , objective detection , sentence classification , machine reading comprehension ).• data generation tasks, in which y contains much more information than x (e.g., text generation or image synthesis from class label).• data understanding/generation tasks, in which x contains no significantly more or less information than y (e.g., image transfer , text-to-image synthesis , neural machine translation , text-to-speech synthesis , automatic speech recognition ). in this case, we need both data understanding capability on the source x and data generation capability on the target y .the information mismatch between x and y leads to different strategies for solving different tasks. for data understanding tasks, x is usually high-dimensional, complex, and redundant compared to y , and the key is to learn highly abstractive or discriminative representations (sometimes need to remove unnecessary information) for x in order to better predict y . thus, representation learning 2 and especially self-supervised pre-training have become some of the hottest topics in deep learning research in the past years. for data generation tasks, y is usually high-dimensional, complex, and redundant compared to x, and the key is how to better represent the distribution of y and better generate y from x. for data understanding/generation tasks, they need the capability in both understanding and generation, i.e., extract good representations from x and fully generate the information in y .typical machine learning tasks, in the field of natural language processing, speech, computer vision, and etc, usually handle a mapping from source data x to target data y .for the data generation tasks and the generation part of the data understanding/generation tasks (we call both the two types as data generation tasks in the remaining of this paper), they face distinctive challenges that cannot be addressed by the traditional formulation of representation learning. instead of directly generating target data y from source data x, regeneration learning first generates y (a representation of y ) from x and then generates y from y . regeneration learning extends the concept of representation learning to data generation tasks and learns a good representation (y ) of the target data y to ease the generation: 1) x → y mapping will be less one-to-many than x → y since y is a compact/representative version of y ; 2) y → y mapping can be learned in a self-supervised way (y is obtained from y ) and can be empowered by large-scale pre-training that is similar to that in traditional representation learning for data understanding tasks (e.in the rest of this paper, we first introduce the basic formulation of regeneration learning and its connection to other learning methods and paradigms in section 2, then summarize the applications of regeneration learning in section 3, and finally list some research opportunities on regeneration learning in section 4.regeneration learning extends the concept of representation learning to data generation, and thus it can be regarded as a special type of representation learning for data generation. furthermore, we can regard regeneration learning as a counterpart of traditional representation learning, since 1) regeneration learning handles the abstraction (y ) of the target data y for data generation, while traditional representation learning handles the abstraction (x ) of source data x for data understanding; 2) both the processes of y → y in regeneration learning and x → x in traditional representation learning can be learned in a self-supervised way (e., pre-training); 3) both the mappings from x to y in regeneration learning and from x to y in traditional representation learning are simpler than the direct mapping from x to y .we discuss some research opportunities to make regeneration learning more powerful to solve a variety of data generation tasks, and list some corresponding research questions (rq) both in each subsection and in table3, mainly from three perspectives: 1) how to get y ; 2) how to learn the mapping x → y and y → y ; 3) how to reduce the training-inference mismatch in regeneration learning pipeline. we further take source data x as the input of the source encoder to get x = f x (x) and encourage x and y to contain information with each other by using some losses such as contrastive loss or l1/l2 loss:.however, there is a trade-off on the difficulties between x → y and y → y , since considering more x when learning y will ease the learning of x → y while making the learning of y → y harder. regeneration learning decomposes a conditional data generation task x → y into data conversion and data rendering processes. literally, it means generating the data two times: first generates an intermediate representation y from source data x, and then generates a target data y from y . regeneration learning can be regarded as a counterpart of traditional representation learning: regeneration learning handles the abstraction (y ) of the target data y for data generation while representation learning handles the abstraction (x ) of source data x for data understanding, and both the processes of y → y in regeneration learning and x → x in representation learning can be learned in a self-supervised way (it is also a counterpart in literally: presentation→representation vs.we discuss the connections of regeneration learning to other methods, demonstrate a variety of data generation tasks that can benefit from regeneration learning, and further point out some research opportunities on regeneration learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/114.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/114.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ba1eec17aab6c6dcbf2c5a252192276b2447dbe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/114.txt @@ -0,0 +1 @@ +in recent years, the problem of privacy-preserving data analysis has become increasingly important and differential privacy (dwork et al., 2006) appears as the foundation of data privacy. differential privacy (dp) techniques are widely adopted by industrial companies and the u.s. census bureau (johnson et al., 2017;erlingsson et al., 2014;nguyên et al., 2016; the u.s. census bureau, 2020; abowd, 2018).one important method to protect data privacy is differentially private data synthesis (dpds). in the setting of dpds, a synthetic dataset is generated by some dp data synthesis algorithms from a real dataset. then, one can release the synthetic dataset and the real dataset will be protected. recently, national institutes of standards and technology (nist) organized the differential privacy synthetic data competition (nist, 2018;2019;2020-2021). in the nist competition, the state-ofthe-art algorithms are marginal-based (mckenna et al., 2021), where the synthetic dataset is drawn from a noisy marginal distribution estimated by the real dataset. to deal with high-dimensional data, the distribution is usually modeled by the probabilistic graphical model (pgm) such as the bayesian networks or markov random fields (jordan, 1999;wainwright et al., 2008;zhang et al., 2017;mckenna et al., 2019;cai et al., 2021).despite its empirical success in releasing high-dimensional data, as far as we know, the theoretical guarantee of marginal-based dpds approaches is rarely studied in literature. in this paper, we focus on a dpds algorithm based on the bayesian networks (bn) known as the privbayes (zhang et al., 2017) that is widely used in synthesizing sparse data (sparsity measured by the degree of a bn that will be defined later). a bn is a directed acyclic graph where each vertex is a low-dimensional marginal distribution and each edge is the conditional distribution between two vertices. it approximates the high-dimensional distribution of the raw data with a set of well-chosen low-dimensional distributions. random noise is added to each low-dimensional marginal to achieve differential privacy. we aim to analyze the marginal-based approach from a statistical perspective and measure the our contributions. our contributions are three-fold. first, we theoretically analyze the marginalbased synthetic data generation and derive an upper bound on the tv distance and l 2 distance between real data and synthetic data. the upper bounds show that the bayesian network structure mitigates the "curse of dimensionality". an upper bound for the sparsity of real data is also derived from the accuracy bounds. second, we evaluate the utility of the synthetic data from downstream supervised learning tasks theoretically. precisely, we bound the tstr error between the predictors trained on real data and synthetic data. third, we establish a lower bound for the tv distance between the synthetic data distribution and the real data distribution.one important method to protect data privacy is differentially private data synthesis (dpds). in the setting of dpds, a synthetic dataset is generated by some dp data synthesis algorithms from a real dataset. our contributions are three-fold. second, we evaluate the utility of the synthetic data from downstream supervised learning tasks theoretically. precisely, we bound the tstr error between the predictors trained on real data and synthetic data. precisely, privbayes(zhang et al. by the term "accuracy", we mean the tv or the l 2 distance between the synthetic distribution and the raw data distribution, respectively. we assume the raw data distribution can be represented by a bayesian network with d vertices that satisfies assumption 3. since we assume that the real data and synthetic data share the same k, we conclude that privbayes achieve better performance on sparser real datasets. an alternative is to release differentially private synthetic data instead of the raw data. a central problem is the "utility" of synthetic training data, which means the evaluation of synthetic data in downstream tasks., 2020;hittmeir et al. in other words, private synthetic data generated by privbayes performs similarly to raw data in downstream learning tasks.in this section, we complete the picture by deriving a lower bound for the tv-distance between synthetic private distribution and the raw data distribution. then by adding and contracting s j=1 a l a j , we rewrite and bound s 1 as s j=1,j =l a l (b j -a j ) + a j (a l -b l ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/115.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/115.txt new file mode 100644 index 0000000000000000000000000000000000000000..84acac1b2c9c1b66f83251e5920d8c5ad10da2f6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/115.txt @@ -0,0 +1 @@ +in deep learning, there are often good results with little justification and good justifications with few results. network pruning exemplifies the former: we can easily prune half or more of the connections of a neural network without affecting the resulting accuracy, but we may have difficulty explaining why we can do that. the theory of linear regions exemplifies the latter: we can theoretically design neural networks to express very nuanced functions, but we may end up obtaining much simpler ones in practice. in this paper, we posit that the mysteries of pruning and the wonders of linear regions can complement one another.when it comes to pruning, we can reasonably argue that reducing the number of parameters improves generalization. while denil et al. show that the parameters of neural networks can be redundant, it is also known that the smoother loss landscape of larger neural networks leads to better training convergence . curiously, jin et al. argue that pruning also smooths the loss function, which consequently improves convergence during fine tuning -the additional training performed after pruning the network. however, it remains unclear to what extent we can prune without ultimately affecting accuracy, which is an important concern since a machine learning model with fewer parameters can be deployed more easily in environments with limited hardware.the survey by hoefler et al. illustrates that a moderate amount of pruning typically improves accuracy while further pruning may lead to a substantial decrease in accuracy, whereas liebenwein et al. show that this tolerable amount of pruning depends on the task for which the network is trained. in terms of what to prune, another survey by blalock et al. observes that most approaches consist of either removing parameters with the smallest absolute value ; or removing parameters with smallest expected impact on the output , to which we can add the special case of exact compression .while most work on this topic has helped us prune more with a lesser impact on accuracy, fairness studies recently debuted by hooker et al. have focused instead on the impact of pruning on recall -the ability of a network to correctly identify samples as belonging to a certain class. recall tends to be more severely affected by pruning in classes and features that are underrepresented in the dataset , which tran et al. attribute to differences across such groups in gradient norms and hessian matrices of the loss function. in turn, good et al. showed that such recall distortions may also occur in balanced datasets, but in a more nuanced form: moderate pruning leads to comparable or better accuracy while reducing differences in recall, whereas excessive pruning leads to lower accuracy while increasing differences in recall. hence, avoiding a significant loss in accuracy due to pruning is also relevant for fairness.overall, network pruning studies have been mainly driven by one question: how can we get away with more network pruning? before we get there with our approach, let us consider the other side of the coin in our narrative.when it comes to the theory of linear regions, we can reasonably argue that the number of linear regions may represent the expressiveness of a neural network -and therefore relate to its ability to classify more complex data. we have learned that a neural network can be a factored representation of functions that are substantially more complex than the activation function of each neuron. this theory is applicable to networks in which the neurons have piecewise linear activations, and consequently the networks represent a piecewise linear function in which the number of pieces -or linear regions -may grow polynomially on the width and exponentially on the depth of the network . when the activation function is the rectified linear unit (relu) , each linear region corresponds to a different configuration of active and inactive neurons. for geometric reasons that we discuss later, not every such configuration is feasible.the study of linear regions bears some resemblance to universal approximation results, which have shown that most functions can be approximated to arbitrary precision with sufficiently wide neural networks . these results were extended in to the currently more popular relu activation and later focused on networks with limited width but arbitrarily large depth . in comparison to universal approximation, the theory of linear regions tells us what piecewise linear functions are possible to represent -and thus what other functions can be approximated with them -in a context of limited resources translated as both the number of layers and the width of each layer.most of the literature is focused on fully-connected feedforward networks using the relu activation function, which will be our focus on this paper as well. nevertheless, there are also adaptations and extensions of such results for convolutional networks by and for maxout networks by .several papers have shown that the right choice of parameters may lead to an astronomical number of linear regions , while other papers have shown that the maximum number of linear regions can be affected by narrow layers , the number of active neurons across different linear regions , and the parameters of the network . despite the exponential growth in depth, serra et al. observe that a shallow network may in some cases yield more linear regions among architectures with the same number of neurons. whereas the number of linear regions among networks of similar architecture relates to the accuracy of the networks , hanin and rolnick show that the typical initialization and subsequent training of neural networks is unlikely to yield the expressive number of linear regions that have been reported elsewhere.these contrasting results lead to another question: is the network complexity in terms of linear regions relevant to accuracy if trained models are typically much less expressive in practice? now that you have read both sides of our narrative, you may have guessed where we are heading.we posit that these two topics -network pruning and the theory of linear regions -can be combined. namely, that the latter can guide us on how to prune neural networks, since it can be a proxy to model complexity.but we must first address the paradox in our second question. as observed by hanin and rolnick , perturbing the parameters of networks designed to maximize the number of linear regions, such as the one by telgarsky , leads to a sudden drop on the number of linear regions. our interpretation is that every architecture has a probability distribution for the number of linear regions. if by perturbing these especially designed constructions we obtain networks with much smaller numbers, we may infer that these constructions correspond to the tail of that distribution. however, if certain architectural choices lead to much larger numbers of linear regions at best, we may also conjecture that the entire distribution shifts accordingly, and thus that even the ordinary trained network might be more expressive if shaped with the potential number of linear regions in mind. hence, we conjecture the architectural choices aimed at maximizing the number of linear regions may lead better performing networks.that brings us to a gap in the literature: to the best of our understanding, there is no prior work on how network pruning affects the number of linear regions. we take the path that we believe would bring the most insight, which consists of revisiting -under the lenses of sparsity -the factors that may limit the maximum number of linear regions based on the neural network architecture.in summary, this paper presents the following contributions: (i) we prove an upper bound on the expected number of linear regions over the ways in which weight matrices might be pruned, which refines the bound in to sparsified weight matrices (section 3). (ii) we introduce a network pruning technique based on choosing the density of each layer for increasing the potential number of linear regions (section 4). (iii) we propose a method based on mixed-integer linear programming (milp) to count linear regions on input subspaces of arbitrary dimension, which generalizes the cases of unidimensional and bidimensional inputs; this milp formulation includes a new constraint in comparison to for correctly counting linear regions in general (section 5). , n l }. let w l be the n l × n l-1 matrix where each row corresponds to the weights of a neuron of layer l, w l i the i-th row of w l , and b l the vector of biases associated with the units in layer l. the hyperplane w l i h l-1 + b l i = 0 defined by the parameters of the neuron separate the inputs in h l-1 into two half-spaces. (ii) the hyperplane arrangement: with every neuron in layer l partitioning h l-1 into two half-spaces, our first guess could be that the intersections of these half-spaces would lead the neurons in layer l to partition h l-1 into a collection of 2 n l regions. given the number of activation hyperplanes in layer l as n l and assuming for now that the size of the input space h l-1 is n l-1 , then the number of linear regions defined by layer l, or n l , is such that. in other words, the activation hyperplanes define the boundaries of the linear regions and within each linear region the points are such that either w l i h l-1 + b l i > 0 or w l i h l-1 + b l i < 0 with respect to each neuron i in layer l. hence, this bound ignores cases in which we would regard w l i h l-1 + b l i = 0 as making the neuron inactive when w l i h l-1 + b l i ≥ 0 for any possible input in h l-1 , and vice-versa when w l i h l-1 + b l i ≤ 0, since in either case the linear region defined with w l i h l-1 + b l i = 0 would not be full-dimensional and would actually be entirely located on the boundary between other full-dimensional regions. if layer l + 1 or any subsequent layer has more than n l neurons, that would not imply that the dimension of the image from any linear region is greater than n l since the output of any linear region after layer l is contained in a space with dimension at most min{n 0 , n 1 , . let r(l, d) be the expected maximum number of linear regions that can be defined from layer l to layer l with the dimension of the input to layer l being d; and let p (k|r, c, s) be the probability that a weight matrix having rank k with r rows, c columns, and probability s of each element being nonzero. namely, let r(l, d) be the maximum number of linear regions that can be defined from layer l to layer l with the dimension of the input to layer l being d, and let n n l ,d,j be the maximum number of regions from partitioning a space of dimension d with n l activation hyperplanes such that j of the corresponding neurons are active in the resulting subspaces. since n l j = n l n l -j , we may conservatively assume that n l 0 linear regions have n l active neurons, n l 1 linear regions have n l -1 active neurons, and so on. for the base case of l = l, we replace n l from the end of the summation range with the rank k of the weight matrix w l , and then we calculate the expected maximum number of linear regions using the probabilities of rank k having any value from 0 to n l as. , l -1}, we similarly replace n l from the end of the summation range with the rank k of the weight matrix w l , and then we calculate the expected maximum number of linear regions using the probabilities of rank k having any value from 0 to n l as.in these formulations, the parameters w l and b l of each layer l ∈ l are constant while the decision variables are the inputs of the network (x = h 0 ∈ x), the ouputs before and after activation of each feedforward layer (g l ∈ r n l and h l ∈ r n l + for l ∈ l), and the state of the neurons in each layer (z l ∈ {0, 1} n l for l ∈ l). for each layer l ∈ l and neuron i ∈ n l , the following constraints associate the input h l with the outputs g l i and h l i as well as with the neuron activation z l i :. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/116.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/116.txt new file mode 100644 index 0000000000000000000000000000000000000000..f3536683e22f61cc9cbc461b7887494625a57daa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/116.txt @@ -0,0 +1 @@ +this study focuses on human activity recognition based on inertial sensor data collected using smartphone sensors. there are already a lot of wellness wearables in the market which rely on sensor data. most of them are using static pre-build algorithms, and thus, they are not able to adapt to unseen situations. in fact, the most common way to build a machine learning prediction model is to rely on data that are given prior to training the model. the problem of this approach is that it assumes that structure of the data remains static. however, this is not the case in the real-world problems as the world around us constantly changes. moreover, while pre-build algorithms provide high recognition rates on most of the people, but not for all . due to this, the recognition should be based on adaptive personalized models and not on static user-independent models.in the case of wellness applications, incremental learning can be used to personalize and improve recognition models, and adapt them to new environments. when it comes to ensemble-based incremental learning models, like the ones studied in this article, personalizing by learning new personal base models from streaming data, and adding these to ensemble . therefore, in the case of incremental learning, the recognition model is updated, and model re-training is not needed. in fact, it has been shown that wearable sensor data based human activity recognition models benefit from incremental learning as there are some studies where incremental learning is used to recognize human activities based on wearable sensor data ( , and ). these articles do not focus on personalizing human activity recognition models, but it has shown in related studies that incremental learning can be used for that as well.in and unsupervised methods to personalize human activity recognition models without user-interruption based on incremental learning were presented. for instance, is based on ensemble method which processes the incoming streaming data as batches. for each batch, a new group of weak base models are trained, and combined to a group of previously trained models. in the context of human activity recognition, the problem is that true labels are not available. therefore, when it comes to incremental learning, there are two options for obtaining labels for the online data: ask labels from the user, or train new models based on predicted labels. however, considering predicted labels as true labels is risky as it is not sure if they are correctly classified. due to this, it is possible that the labels used to train new models are incorrect, and this easily leads to learning wrong things and concept drift . in this study, the concept drift is caused by changes in the observed data, and in addition, because of the limitations of the initial training data and machine learning model performance. due to these, the structure of data used to update the recognition models in not similar to initial training data causing mislabelled data and inaccurate drifting models.when manual labelling is used, similar problems do not exist. in , , , and it was shown that models benefit from user inputs and with them concept drift can mostly be avoided. for instance, in and , human intelligence was used to label instances in such cases where posterior value of the prediction was low. it was noted that already by replacing a small number of uncertain labels with true labels, the accuracy of the online learning model can be improved significantly. however, as the data labelling is always burdensome, user should be bothered as seldom as possible and need for user inputs should be minimized. therefore, as user inputs are needed in the model personalizing process to avoid concept drift, it needs to be studied is what is the best and the most effective way for human ai collaboration in the labelling process, and when user inputs really are needed and when predicted labels can be trusted.in this article, concept drift detection is based on feature relevance, a.k.a. feature importance, analysis which is not a new idea but in this article it is used for the first time in the context of human activity recognition. when it comes to explaining the reason for concept drift, there are articles where the type of drift is detected, for instance if the detected drift was virtual or real concept drift , but not many studies to explain the actual reason for the drift. however, in the concept drift related security applications were detected, and the reason drifting samples were explained by identifying a feature set having values that are different to affected samples and normal samples, and using this information to find the reason for the drift.in this article, it is studied how feature relevance analysis is not only used to detect when drift is happening, but also to detect the actual reason for the drift when a limited number of possible reasons for the concept drift are predefined. with the help of this information, human-ai collaboration can be made more effective as ai can explain what was wrong with the training data and suggest which instances human needs to label manually. human activity recognition is used as a case study.the article is organized as follows: the data used in the experiments is explained in section ii, section iii introduces the idea of the proposed method, section iv explains the experimental setup, and results and discussion. finally, conclusions and future work are in section v. therefore, in the case of incremental learning, the recognition model is updated, and model re-training is not needed.in this article, it is studied how feature relevance analysis is not only used to detect when drift is happening, but also to detect the actual reason for the drift when a limited number of possible reasons for the concept drift are predefined. according to the results provided in, this method improves the recognition rates and depending on the used base classifier, user needs to hand label about 10% of the observations. instead, new model is trained based on the obtained training data and predicted labels, and after this feature relevance analysis is used to analyze the quality of the trained personal base model based on how relevant different features are for this base model. then if it is noted that base model is not reliable, the reason for unreliability can be explained based on feature relevance analysis as well when typical reasons for concept drift are known, and true labels for needed observations are asked from the user. secondly, three reasons for the concept drift that are studied in this article are pre-defined (step 2): (s1) each class of the training data contains approximately equal amount of false labels, (s2-s3) and for s3, all the training samples of walking upstairs are mislabeled as walking downstairs.to compare feature relevance of clean and worst-case models (step 4), feature relevance for the models were obtained using matlab's predictorimportance-function, and tableishows the average relevance of each feature of clean model when personal model was trained for each subject.the results of figure2show how the relevance sum of the selected features (see tableii) for each scenario behave when the data quality used in the training process of base model reduces more and more, and therefore, the base model performance starts to gradually drift. the results from scenario s1 (figure2(a)) show that if the recognition rate of the model used to predict labels for the training data drops equally for each class, and therefore the training data of the studied model has approximately equal amount of false labels for each class, it has surprisingly small impact on the the model performance on test set (green bars). still, when the distance between feature relevance differences obtained from a model trained using correct labels and a model trained using false labels is calculated, there is a clear difference in the relevance sum of the selected features when the number falsely labeled training samples increases (blue line). figure2(b) shows how based on feature relevance analysis it is possible to detect situation where the labels of biking and walking are mixed in the training data, and figure2(c)shows results when the labels of walking upstairs are mixed with walking downstairs in the training data. the sum of relevance differences is smaller in s3 than in s2 (see right yaxis in figures2(b) and 2(c)), but also in the case of s2 false labels has a clear effect to the relevance sum of the selected features when more and more false labels are used in the model training process. this is studied in figure3where it is compared how the relevance sum of the features selected to detect concept drift in certain scenario behaves compared to relevance sum of the features selected for other two scenarios. in the figure3(a), as assumed based on tableii, only the sum of features selected for s1 is positive and goes up (blue line) (and the sum of features selected for s2 & s3 goes down) when the concept drift of scenario s1 is affecting to training label quality. each studied reason for the concept drift has a unique effect to the relevance of the selected features, and features selected to detect concept drift in certain scenario behaves differently compared to relevance sum of the features selected for other two scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/117.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/117.txt new file mode 100644 index 0000000000000000000000000000000000000000..76fa00445472ad8b9dedafce8ef73053c8ef4aa8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/117.txt @@ -0,0 +1 @@ +the rapid and dynamic pace of artificial intelligence (ai) and machine learning (ml) developments is revolutionizing the insurance sector, especially in property and casualty insurance. in 2018, the eiopa's big data analytics thematic review in motor and health insurance reported that 55% of the firms who participated in the survey were using ai, and 24% were at a "proof of concept" stage. (eiopa, 2019). eiopa itself, after noting that data doubles in very short intervals, gives a positive judgment on its use: "technology is finally making artificial intelligence (ai) into a relevant tool to improve our societies. insurance has been a heavy user of data from practically early days of its existence. the collection of data, even when available, has been expensive. analysis of this data has been expensive too and often inaccurate. the emergence of big data (bd) and ai are changing this, making it possible to have more exact knowledge and changing the ways insurers interact with policyholders." (eiopa, 2021) the use of ai in insurance helps companies better serve their customers by customizing insurance products and services to fit individual needs, in a more precise and timely manner. ai enters directly as a tool to guarantee the customer centricity of underwriters' actions: an accurate exam of the characteristics, needs and preferences of customers, conducted by filtering their data through ai systems, permits to adhere to them much better than a simplified market analysis did in the past. services to customers, in all phases of the value chain, can be offered promptly avoiding the mistakes inherent in human actions. indeed, the advantage of ai does not consist only in the amount of information that it can quickly process, but also in the fact that it can potentially outperform humans in a wide range of tasks, such as contentbased processing of information, text analytics, recognition of patterns, trends, and preferences, and improve operational efficiency, as clearly argued in (eling, nuessle, and staubli, (2021). as a consequence, revenues can increase, costs are reduced, and profits and value can go up. it is by now clear that, when ai is appropriately implemented, it has advantages in processing customer information and handling the relationship with clients. ai can handle the enormous amount of information on clients that is collected to exploit predictive analytics.at the same time, ai generates challenges for underwriters. unfortunately, the massive use of web-based and computer-based instruments makes every firm and individual, but especially insurance companies, the potential target of cyberattacks. a thorough, clear picture of the risks is in eling (2020).among cyberattacks, insurance companies' use of ai can be the target of socalled adversarial ai, which causes ai and ml tools to misinterpret the information provided to them and give output (recommendations, predictions, decisions, categorizations) favourable to the attacker. information about a category of customers that is collected and processed through an "automatic" ai tool can be distorted by adversarial ai.we demonstrate that not only a claim model can be defrauded, but predictions based on an attacked system may have better statistical properties and prediction accuracy than the non-attacked ones. high caution and expert judgment based on multiple sources of information are then needed when using ai.we give general suggestions to prevent attacks and focus on the ability of zero-label countermeasures to prevent them and build resilience against a pervasive phenomenon.to understand where and how adversarial ai can be exerted, in section 2 we review some typical uses of ai in property and casualty insurance. in section 3, we provide a definition of adversarial ai and some examples of attacks.in section 4 we categorize attacks based on whether they act on images, audio or text. in section 5 we discuss how to anticipate and prevent attacks, while in section 6 we study the issues raised by zero-labelling. section 7 contains a final discussion.the rapid and dynamic pace of artificial intelligence (ai) and machine learning (ml) developments is revolutionizing the insurance sector, especially in property and casualty insurance. in 2018, the eiopa's big data analytics thematic review in motor and health insurance reported that 55% of the firms who participated in the survey were using ai, and 24% were at a "proof of concept" stage."(eiopa, 2021)the use of ai in insurance helps companies better serve their customers by customizing insurance products and services to fit individual needs, in a more precise and timely manner. ai enters directly as a tool to guarantee the customer centricity of underwriters' actions: an accurate exam of the characteristics, needs and preferences of customers, conducted by filtering their data through ai systems, permits to adhere to them much better than a simplified market analysis did in the past. indeed, the advantage of ai does not consist only in the amount of information that it can quickly process, but also in the fact that it can potentially outperform humans in a wide range of tasks, such as contentbased processing of information, text analytics, recognition of patterns, trends, and preferences, and improve operational efficiency, as clearly argued in(eling, nuessle, and staubli, (2021).among cyberattacks, insurance companies' use of ai can be the target of socalled adversarial ai, which causes ai and ml tools to misinterpret the information provided to them and give output (recommendations, predictions, decisions, categorizations) favourable to the attacker. information about a category of customers that is collected and processed through an "automatic" ai tool can be distorted by adversarial ai.to understand where and how adversarial ai can be exerted, in section 2 we review some typical uses of ai in property and casualty insurance.we distinguish the use of ai in designing new products, from the one in risk assessment, from the substantial help in easing the underwriting process and the whole customer journey, including claim processing.first of all, ai can be used to improve the design of new products, by tailoring them to the perceived needs of (different categories of) potential customers, differentiated by, say, age, gender, occupation, level of education, experience with the same company, but also by more specific data that they offer through questionnaires or more information that they willingly reveal through their use of social media and friendships.adversarial ai consists in receiving text or photos or voice messages, depending on the type of ai interaction with the underwriter and its intermediaries (agents, brokers, . adversarial ai can be used and can damage insurance companies in all ai applications. when a product is designed to be tailored to the customer's needs, for instance, adversarial ai can be used to intercept the submission of the clients' features and therefore provide a picture of the client that is different from the true one.during the life of the contract, adversarial ai can interfere with chatbots and conversational ai, for instance by affecting the quality and precision of the answers to the customer, or by distorting his questions to the chatbot.the topics discussed in the paper require proper preparation and resilience building by insurance companies in domains enveloping the development and deployment of ai and ml, given the pervasiveness of adversarial ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/118.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/118.txt new file mode 100644 index 0000000000000000000000000000000000000000..fa898ebfc05a567adf763eb6a805181f66b03f55 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/118.txt @@ -0,0 +1 @@ +neural networks are currently widely used in various fields, such as image processing , pattern recognition , adaptive control , unmanned vehicles and aircraft collision avoidance systems , etc., demonstrating their powerful capabilities in solving complex and challenging problems that traditional approaches fail to address. as neural networks are further investigated, the size and complexity of their models continue to increase in order to improve their performance and accuracy to cope with complex and difficult tasks and changing environments. however, more complex large-scale neural network models also imply larger computational resources, such as larger memory, higher computational power and more energy consumption in applications . as a result, many neural network model reduction methods have been developed, such as parameter pruning and sharing, low-rank factorization, transfer/compact convolution filters, and knowledge distillation . more results on neural network model reduction can be found in a recent survey .on the other hand, due to the black-box nature of neural networks, neural networks are vulnerable in the face of resistance to interference/attacks. it has been observed that neural networks trained on large amounts of data are sometimes sensitive to updates and react to even small changes in parameters in unexpected and incorrect ways . when neural networks are applied as controllers onto dynamical systems, they will inevitably suffer from safety problems due to the inevitable disturbances and uncertainties in the control process, further affecting the stability and safety of the whole closed-loop system. therefore, when integrating neural networks into safety-critical control systems, the safety of the neural network needs to be guaranteed at all times, i.e., the safety verification of the neural network needs to be implemented. however, due to the sensitivity of neural networks to perturbations and the complex structure of neural networks, the verification of neural networks is extremely difficult. it has been demonstrated that the verification of simple properties of a small-scale neural network is an uncertainty polynomial (np) complete problem . a few results have been reported in the literature for the formal verification of systems consisting of neural networks, readers are referred to the recent survey . specifically, reachability analysis is one of the promising safety verification tools such as in - , a simulation-based approach is proposed that transforms the difficulty of over-approximating the neural network's output set into a problem of estimating the neural network's maximal sensitivity, which is formulated as a series of convex optimization problems , . polytope-operation-based approaches were developed in , , for dealing with a class of neural networks with activation functions of rectified linear units (relu). however, the scalability issue is the major barrier preventing applying these methods to large-scale neural networks as well as neural network control systems active in a long period of time which means a large amount of reachable set computation is required during the time of interest.in this paper, we propose a guaranteed model reduction method for neural network controllers based on the neural network reachability analysis and apply it to enhance the scalability of the reachability-based safety verification of closed-loop systems. firstly, a concept of model reduction precision is proposed to accurately measure the distance between the outputs of an original neural network and its reduced-size version, and an approach to compute the model reduction precision is proposed, which ensures that the difference between the outputs obtained from two neural networks for a given input interval, chosen with any identical input, is within the model reduction precision. this algorithm is then applied to the model reduction of the neural network control system, enabling computationally efficient verification processes based on the reduced-size neural network controller. finally, the correctness and feasibility of our approach are verified by applying it to the safety verification through the adaptive cruise control (acc) case study.the remainder of the paper is organized as follows: preliminaries are given in section ii. the guaranteed model reduction of neural networks is presented in section iii. the reachable set computation and safety verification algorithm for the neural network control system are presented in section iv. the evaluation on the adaptive cruise control system is given in section v. the conclusion is given in section vi.in this paper, we propose a guaranteed model reduction method for neural network controllers based on the neural network reachability analysis and apply it to enhance the scalability of the reachability-based safety verification of closed-loop systems. firstly, a concept of model reduction precision is proposed to accurately measure the distance between the outputs of an original neural network and its reduced-size version, and an approach to compute the model reduction precision is proposed, which ensures that the difference between the outputs obtained from two neural networks for a given input interval, chosen with any identical input, is within the model reduction precision. this algorithm is then applied to the model reduction of the neural network control system, enabling computationally efficient verification processes based on the reduced-size neural network controller. in actual applications, the neural network receives input and generates output in a fraction of the computation time, so the control input generated by the neural network is generally discrete, generated only at each sampling time point t k , k ∈ n, and then remains a constant value between two successive sampling time instants.where η denotes the output of the -th layer of the neural network, and in particular η 0 ∈ r n0 is the input to the neural network and η l ∈ r n l is the output produced by the neural network, respectively. definition 2: a set y e is called an output reachable set over-approximation of neural network(5), if y ⊆ y e holds, where y is the output reachable set of neural network(5).definition 5: safety specification s formalizes the safety requirements for state x(t) of neural network control system (1), and is a predicate over state x(t) of neural network control system(1)., guaranteed neural network model reduction, so that the reachable set computation can be efficiently performed on a significantly reduced-size neural network and then mapped back to the original neural network to reach safety verification conclusions.in this section, we apply neural network model reduction and model reduction precision to a neural network control system.proposition 1: given neural network φ, its reduced-size version φ with output set ŷ, and model reduction precision ρ(φ, φ, u), the output reachable set of original neural network φ satisfies.the reachable set estimation for a sampled-data neural network control system in the form of (4) generally involves two parts: 1) output set computation for neural network controllers denoted by., 5 layers with 20 neurons in each layer, through neural network model reduction, we replace the original neural network controller with a reduced-size neural network of hidden layer size 2 × 5, i. in summary, the simulations show that the closed-loop system with the reduced-size neural network can be used for safety verification of the original system as long as the model reduction precision can be provided.this paper investigates the problem of simplifying the safety verification of neural network control systems, proposes a concept of model reduction precision that characterizes the minimum upper bound on the outputs between a neural network and its reduced-size one, and proposes an algorithm to calculate the model reduction precision. by using a reduced-size neural network as the neural network controller and introducing the model reduction precision in the computation of the output reachable. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/119.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/119.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf0079e1c33505284fc9c6376d795c114ceea15e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/119.txt @@ -0,0 +1 @@ +in today's data-driven world, deep learning techniques have become the predominant approach for computer vision tasks (such as image classification and object detection). most state-of-the-art (sota) deep learning models use large-scale labeled datasets (e.g., imagenet , jft-3b , instagram-3.5b ), a few of which are proprietary and cannot be leveraged by the public. it is challenging in practice to curate and annotate large labeled realworld datasets across different data domains and learning tasks. however, it is much easier to collect large quantities of unlabeled data in real-world domains (e.g., remote sensing imagery , medical imagery ). semi-supervised learning (ssl) techniques are designed to jointly leverage small labeled datasets along with large unlabeled datasets to improve model performance.self-training (st) is an iterative ssl method where a "teacher" model trained on the labeled data annotates the unlabeled data with pseudo-labels. the subsequent learning of the "student" model uses both the labeled and pseudo-labeled data. this process is iterated, as shown in fig. 1. the major caveat of pseudo-labeling is the introduction of noisy pseudo-labels from incorrect predictions by the teacher. these noisy pseudo-labels accumulate over time resulting in the model developing a bias toward incorrectly predicted pseudo-labels. this issue is known as the "confirmation bias" problem .ssl techniques that learn from limited labeled data employ consistency regularization techniques to reduce confirmation bias. another popular method for reducing confirmation bias when enough labeled data is available is the noisystudent (ns) pseudo-labeling approach that uses softmax confidence thresholding to filter out under-confident pseudo-label predictions. this approach also found that training a student model larger than the initial teacher made the student more robust to handle noisy pseudo-labels. to reduce confirmation bias, we explore multiple design choices and variations to the ns iterative learning pipeline.in this paper, we analyze existing methods and propose additional novel modifications that include using calibrated teacher models, entropy-based pseudo-label thresholding, and custom splitbatch sampling. the proposed enhancements are modular and can be adapted to work with multiple existing st methods. we demonstrate the use of the modifications to enhance st across multiple benchmark datasets. lastly, we present a practical scenario using realworld open set unlabeled data that contains both data be- longing to the target training classes and data from additional/unwanted classes. we demonstrate our enhanced st technique using an open set recognition approach integrated with our st pipeline to improve performance even when trained with challenging open set data.self-training (st)is an iterative ssl method where a "teacher" model trained on the labeled data annotates the unlabeled data with pseudo-labels. lastly, we present a practical scenario using realworld open set unlabeled data that contains both data be- longing to the target training classes and data from additional/unwanted classes. we demonstrate our enhanced st technique using an open set recognition approach integrated with our st pipeline to improve performance even when trained with challenging open set data.consistency regularization approachesfollow the data manifold assumption that perturbations applied to the training data, such as data augmentation, should not increase the likelihood of the predicted target labels switching classes. in the nsapproach, the initial teacher model is trained on labeled data and then used to generate pseudo-labels for the unlabeled data. st commonly uses a randomly collected (uniform) minibatch (used in ns) that tends to overfit due to their bias towards selecting a larger number of noisy pseudo-labeled than labeled data during training, as the unlabeled subset sizes are usually much larger than the labeled subset.lastly, the ns approach uses a smaller-sized initial teacher model trained on the clean labels and a larger student model (and thus a larger subsequent teacher) trained jointly on the labeled and pseudo-labeled examples. we found that the ns approach of using a smaller teacher (r18 for svhn and wrn28-2 for cifar-10 and cifar-100) and a larger student (r34 for svhn and wrn40-2 for cifar-10 and cifar-100) with model noise (dropout) is unnecessary once we add our selected design choices from previous experiments to reduce confirmation bias. the results in table9compare the ns approach with the smallersamesized (sss: using the smaller teacher model size as the student model size) and largersamesized (lss: using the larger teacher model size as the student model size) approaches. our est approach provided significant improvements in the small labeled scenario while providing slight improvements to the large labeled scenario (as the teacher model already has enough labeled examples for better learning performance).real-world unlabeled data can be from an open set that contains data belonging to the target classes (classes from labeled training data) and data from additional nontarget classes. we used simclrto learn a contrastive feature space from the labeled target data and the unlabeled data (contains target and non-target class examples). the labeled subset is made up of a closed set with 10 target classes (cifar-10 subset consisting of 4k images), and the unlabeled subset contains 110 total classes with 10 target classes (cifar-10 subset consisting of different 42k images) and 100 non-target classes (cifar-100 subset consisting of 42k images).9 led to the best closed set validation accuracy for ns and est, respectively. from the results in table12, we observed that our est approach performed better than the ns approach showing that our proposed enhancements for handling noisy pseudo-labels extend to open set data as well by providing some basic filtering of pseudo-labels belonging to non-target classes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/12.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/12.txt new file mode 100644 index 0000000000000000000000000000000000000000..144fb74967fae6931358cf6235d8dd1a6b75c468 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/12.txt @@ -0,0 +1 @@ +the traffic alert collision avoidance system (tcas) has been an integral part of the increased safety of air transport since it was federally mandated in the 1991 for all passenger carrying aircraft with more than 30 seats flying in u.s. airspace . tcas led to a dramatic reduction in the occurrence of mid air collisions in modern aviation; however the heuristic based approach undertaken in tcas has made it difficult to adapt the system to the evolving complexity of the national airspace system (nas), which includes new cooperative surveillance systems (e.g., ads-b) and new vehicle entrants. in response, the federal aviation administration (faa) commissioned the development of a replacement for tcas. this new system, referred to as the next generation airborne collision avoidance system x (acas x), which is currently in development at mit lincoln laboratory and john hopkins applied physics laboratory, is expected to integrate into multiple aircraft platforms and reduce nuisance alerts as well as reduce the risk of near mid air collisions (nmac) . acas x introduced several variants designed to reduce the risk of nmac for a particular operation, such as commercial aviation (acas xa) , large uncrewed aerial systems (acas xu) , smaller uncrewed aerial vehicles (acas sxu) , and acas xr which is under development for advanced air mobility and helicopter operations. each variant adds capabilities and design considerations for the operational environment and platforms that will be commonly seen by the acas x equipped vehicle. for example, acas sxu introduced vehicle to vehicle surveillance to accommodate a future link that suas may use to interrogate and coordinate with each other. while, acas xu added remain well clear alerting due to its use in remotely piloted or autonomous uas. core to the acas x family of collision avoidance systems is the approach of modeling the collision avoidance problem as a partially observable markov decision processes (pomdp), which encompasses the state space of interest, allowable actions, the probability of transitioning between states, and reward model dictating the tuning objective.the pomdp is solved through value iteration, a dynamic programming (dp) algorithm, to calculate a q-function representing the value gained from taking an action, 𝑎, from the current state, 𝑠. in prior collision avoidance solutions the q-function has been represented as a lookup table that approximates the continuous collision avoidance problem as a set of discretized states and their optimal action . a shortcoming of the table representation is the memory footprint required to store the combination of all state variables and associated actions. previous work has attempted to tackle the memory footprint problem of a horizontal collision avoidance logic by representing similar table regions with a single equivalence class and achieving a 50% reduction in memory footprint with minimal degradation in safety . the second approach taken by used a representative horizontal logic lookup table as the training objective for a neural network to compress the gigabyte size table into a megabyte size file, but failed to run at the desired 1 hz cycle required by acas x. thus, the lookup table approach continues to be the solution to maintain the execution constraint in avionics hardware; however the state space variables needed to represent the three dimensional collision avoidance problem would result in an intractable lookup table encompassing terabytes of memory. to overcome the dimensionality curse, previous work has focused on dividing the collision problem into two sub-problems, horizontal and vertical collision avoidance with separate lookup tables. the independent pomdp solutions produce collision avoidance systems that are found to be safe, operationally suitable, and robust to increased airspace complexity. nevertheless assuming compute power will increase, representing the pomdp as a neural network may allow an integrated three dimensional logic to reduce alerting complexity and increase safety.an alternative solution used this approach and solved the pomdp using deep reinforcement learning . the multi-layer perceptron (mlp) neural network representation eliminated the need to separate the collision problem into two sub-problems, allowing for a greater level of coordination between the two axes. although this approach used the pomdp specified by the dp approach its performance fell short of the dp approach, partially because the parameters for the reward model were not properly optimized. this work attempts to improve the performance of the model developed by by introducing a surrogate optimization technique to optimize the parameters of the reward model. the remainder of the paper is organized as follows. section ii provides background information relevant to this work. section iii describes the structure of the reward model. section iv applies the surrogate optimizer to the reward model. section v discusses the results from the modified reward model. this new system, referred to as the next generation airborne collision avoidance system x (acas x), which is currently in development at mit lincoln laboratory and john hopkins applied physics laboratory, is expected to integrate into multiple aircraft platforms and reduce nuisance alerts as well as reduce the risk of near mid air collisions (nmac). acas x introduced several variants designed to reduce the risk of nmac for a particular operation, such as commercial aviation (acas xa), large uncrewed aerial systems (acas xu), smaller uncrewed aerial vehicles (acas sxu), and acas xr which is under development for advanced air mobility and helicopter operations. core to the acas x family of collision avoidance systems is the approach of modeling the collision avoidance problem as a partially observable markov decision processes (pomdp), which encompasses the state space of interest, allowable actions, the probability of transitioning between states, and reward model dictating the tuning objective. in prior collision avoidance solutions the q-function has been represented as a lookup table that approximates the continuous collision avoidance problem as a set of discretized states and their optimal action. the second approach taken byused a representative horizontal logic lookup table as the training objective for a neural network to compress the gigabyte size table into a megabyte size file, but failed to run at the desired 1 hz cycle required by acas x. thus, the lookup table approach continues to be the solution to maintain the execution constraint in avionics hardware; however the state space variables needed to represent the three dimensional collision avoidance problem would result in an intractable lookup table encompassing terabytes of memory. although this approach used the pomdp specified by the dp approachits performance fell short of the dp approach, partially because the parameters for the reward model were not properly optimized. formulating an mdp requires an environment state definition, actions the actor is able to implement, a transition function that describes how the state evolves given an action, and a reward model that defines the desirability of the agent's action. however, we cannot assume that each actor has a perfect model of the current state, so we modify the mdp into a pomdp, which allows the actors to make decisions with a probabilistic model of the current state. this modification is useful for aircraft collision avoidance because the ownship, the aircraft which contains the collision avoidance logic, cannot know the exact locations of intruder aircraft (i.when the surrogate optimizer is applied to the reward model, we optimize three reward parameters: alert, reversal, and cease alert, where each parameter has the domain . we then train a model using the drl training pipeline and evaluate the performance of that model in our simulation framework using a set metrics that report how often each undesirable state or action occurs. if the reward model parameters are unstable, then the performance of the trained model is partially attributable to the initialization of the random seed and thus is not a good candidate for the final reward model.to compare our final results, we test the best model from iteration 46 against both the default reward model parameters and acas xu.when comparing this model to the untuned version, we can see that it issues an alert in a much larger region of space, causing the tuned alert rate to be about 35% higher than the untuned model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/120.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/120.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab1c57ba3fec5f1ad0a0ea16b439c95e561be73b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/120.txt @@ -0,0 +1 @@ +data mining is a powerful technique for uncovering valuable insights and knowledge hidden within large sets of data. it utilizes a combination of methods from machine learning, statistics, and database systems to extract patterns and models from the data. as a crucial aspect of machine learning, data mining plays a key role in actionable knowledge discovery (akd), which is the process of extracting actionable insights from large datasets.one of the key advancements in data mining is the shift from data-driven to domain-driven methods. this approach focuses on applying data mining techniques within specific business domains, making the process more relevant and valuable to those specific industries. this approach also makes the process more technically significant and allows for the implementation of data mining in real-world applications.this paper aims to explore the various methods used in actionable knowledge extraction for different domains of usage. different techniques and approaches will be examined and discussed, with an emphasis on their strengths and limitations. additionally, the paper will provide a thorough analysis of the current state of the field, including a review of related research and existing datasets used in the field. the paper will also cover the main methods used by various research studies, evaluations of their advantages and disadvantages, and a discussion of any novel or new solutions presented in the field.the studyaimed to extract actionable knowledge about domestic violence (dv) from twitter data using data mining techniques. the study used pattern mining, mapreduce architecture and clustering to process the data and improve the classification accuracy and interpretability of the data. business management aims to improve productivity and effectiveness of work, and data mining techniques are a bridge to the gap of company data and actionable knowledge. each case study provides detailed information on the problem at hand, the data used, the methods applied, and the results obtained, making it a valuable resource for practitioners and researchers interested in the application of data mining techniques in different industries. the algorithm involves data collection, data cleaning, data preprocessing, and building customer profiles using an improved decision tree learning algorithm.the proposed conceptual framework integrates knowledge management and data mining to create a robust decisionmaking model for organizations by making knowledge actionable. data mining is used in the knowledge stage of the knowledge management process, where descriptive algorithms and business rules are applied using insight gained through environmental scanning, swot analysis, strategic planning, and cost-benefit analysis.descriptive analytics is a group of techniques within data mining that enables organizations to gain insights and feedback about their internal performance, operations, and organizational effectiveness by summarizing, generalizing and describing data. additionally, sophisticated multivariate data analysis statistical methods such as analysis of variance (anova), k-means clustering, correlation and regression analysis can be used to identify patterns and anomalies in data. it uses statistics and sophisticated multivariate data analysis statistical methods to summarize, generalize and describe a set of data, identify patterns, and anomalous data, which can help organizations to make data-driven decisions and improve their operations. fuzzy logic is particularly useful in data mining because it allows for the use of qualitative knowledge in the data mining process. this makes fuzzy logic particularly useful in data mining, as it allows for the use of qualitative knowledge such as human intuition, expert judgment, and natural language descriptions in the data mining process. it provides the ability to utilize qualitative knowledge in the data mining process, which can help organizations to make more accurate predictions, identify patterns, and make better decisions based on their data. it can identify key words and provide pattern recognition within text fields in data sets, which can help organizations to make more accurate predictions, identify trends, and make better decisions based on their data. this process can be applied to various types of data, including structured and unstructured data, and can be used in a wide range of applications, such as customer segmentation, fraud detection, and risk management. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/121.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/121.txt new file mode 100644 index 0000000000000000000000000000000000000000..531065783a23448aa1f6a8b672ba840edc42feac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/121.txt @@ -0,0 +1 @@ +t he development of ai technologies is of strategic sig- nificance to the semiconductor industry because of their potential for reducing the capital footprint and improving cycle time and yields. there is a significant opportunity to innovate novel machine learning techniques to make our manufacturing processes and controls smarter and more efficient. soft sensing, being one of those emerging techniques, plays an important role in monitoring industrial processes. in general terms, soft sensing models can be defined as inferential models that use easy-to-measure variables (e.g. online available sensors) for online estimation of hard-to-measure variables (e.g. quality variables). however, the development of ai-based soft sensing models is extremely sluggish and erratic. the major concern is that most semiconductor manufacturing systems are customized for specific applications with limited scalability. the ai model development in semiconductor industries suffered from poor flexibility and always involved extensive prior knowledge of semiconductor manufacturing mechanisms. in this paper, we aim at developing a purely data-driven machinelearning-based soft sensing model for regression applications, specifically for semiconductor metrology, providing a practicable solution for semiconductor industries.soft sensing for regression applications are predictive models that use online available sensors recordings (e.g. pressure, voltage, etc.) to predict quality indicators that cannot be automatically measured at all, or can only be measured at the follow-up metrology stage which is time-consuming, high cost, or sporadically. during the wafer manufacturing process at seagate factories, there are a lot of sensor data stored from the processes. this is the basis for the subsequent use of such data for the development of related data-driven softsensing models. the semiconductor manufacturing process is becoming increasingly more complex and longer, resulting in a significant increase in cost and difficulty in measuring the key quality indicators. after a wafer is manufactured, the engineers use metrology tools to do the time-consuming laboratory analysis. it is highly desirable to develop a softsensing system for defective wafer detection, to save the time and capacity those metrology tools.there are two categories of soft-sensing models, white-box physical models and black-box data-driven models. modeling by physical approach requires prior solid knowledge and usually focuses on the ideal steady-state processing, which is not feasible for complex systems. the data-driven models are based on historical observations of the process and are able to predict the real conditions of the process while physical models are unable to do. according to the sensor data collected in seagate factories, it's impossible to have a physical model for regression application in metrology. therefore, the remaining difficulties are related to the difficulty of choosing the correct model type and structure necessary for the development.there are several difficulties in soft sensing regression. first, the dependencies between sensor readings and measurements might be very complicated. second, there is variability in the engineering decision of whether a wafer is pass or fail. third, there are a lot of missing data and even measurement errors in the dataset. due to the above characteristics in data, a nonlinear data-driven model should be the best choice.our approach to dealing with soft sensing regression is based on deep learning methods. for a wafer, its sensor readings include one or multiple time steps, and we call them sensor time steps. more details are explained in section iv. deep learning models, such as long short-term memory (lstm) network, gru , and transformer , are popular tools to tackle sequential data. the recurrent structure in lstm is capable of handling sequential data of different lengths. the input gate, output gate and forget gate in lstm prevent the vanishing gradient problem to some degree. lstm-based models are widely applied in natural language processing (nlp) and related tasks, such as text classification , machine translation and speech recognition . since our sensor time steps are also sequential data like sentences in nlp, it's a natural idea to apply these nlp tools into our task. a sensor time step corresponds to a word in the sentence.the main contributions of this work include the following: first, we present the wafer soft sensing regression dataset in detail, and provide a way of understanding and preprocessing it. second, we design two loss functions to handle the different precision requirements of the wafers. third, we formulate the somewhat subjective evaluation criteria by engineers into several mathematical evaluation criteria.the rest of this article is organized as follows. section ii summarizes some existing works related to our soft sensing regression problem. section iii introduces our model architecture and loss functions. our data and our data preprocessing method are explained in section iv. experiments are shown in section v. finally, we have our discussions and conclusion in section vi. for a wafer, its sensor readings include one or multiple time steps, and we call them sensor time steps. since our sensor time steps are also sequential data like sentences in nlp, it's a natural idea to apply these nlp tools into our task. second, soft sensing regression can tell us how far the measurement deviates from acceptable values, but soft sensing classification only tells us the wafer is pass or fail.to deal with sequential data like sensor time steps, we choose lstm to build our model as explained in section iii-a. the embedding layer between the sensor time steps and the lstm structure can help dealing with the categorical variables in the sensor time steps. in the sensor dataset, except the hard sensor types of data, it also contains the soft contextual sensor data -textual reports, such as textual information of the multi-stage manufacturing process, tools, processing modules, etc. the other wafers' measurements data inherit the metrology measurements from that monitor wafer's measurements data.we thus use monitor data for training the regression model and use non-monitor (product wafer) data for checking the pass/fail results of wafers. data preprocessing 1) before joining sensor and measurements data: the data is split into training, validation and test set by ratio 7:2:1.in order to deal with date and time information in the sensor data, we convert it into two features: time in the current day and date in the current year.2) join sensor and measurements data: during model training, for every sample we need to use both the sensor data and measurements data, so it would be helpful if we can join them before model training. note that we only need nonmonitor measurements data in our regression task, we only need to join sensor data and non-monitor measurements data. in our evaluation, we use the monitor data in measurements data and define a fail wafer as a wafer which satisfies all the following conditions:. for example, after the same normalization, we can easily replace the l1 loss by l2 loss or huber loss so normalized l1 loss is more general.using the lstm-large model trained on all (kqi, type), and we predict a wafer to be a fail wafer if and only if the predicted meas med ŷ is outside the interval (b * 1 , b * 2 ), we get recall rate as 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/122.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/122.txt new file mode 100644 index 0000000000000000000000000000000000000000..35e72f1007db40ba28160b4d5c1a4ca4532c98b1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/122.txt @@ -0,0 +1 @@ +in steel strip manufacturing, monitoring the surface texture resulting from the galvanizing and temper rolling process inline (i.e. during production) allows for fast feedback for line parameter adjustments. conventional slow measurements performed during post-production using a stylus are essential for ensuring the strip steel produced has the correct properties and satisfies customer requirements. however, post-production measurements give slow feedback which is only useful for later rolls. therefore, fast feedback control is required for real time changes to steel surface. this has the potential to reduce value loss by reducing the likelihood of the surface texture being found inadequate during postproduction testing. however, for adjustments to be made based on fast feedback, it is essential that the inline measurements are accurate. compared to conventional stylus methods of post-production measurements, the method of inline measurement we are using are not as consistent.the method for inline measurements we use involves firing a laser at the steel surface and measuring the reflected angle. these angles represent the gradient of the steel in the measurement positions and can be used to calculate the surface profile and therefore, the surface properties. however, as previously mentioned, this method produces inconsistent results. the exact reason is unknown, though we speculate that it may be because the sensors measuring the angle are discrete, or because they are lined up in one dimension. the steel surface can reflect light in two dimensions; thus, the array might not capture sufficient information for stitching the gradients when the light reflects away from the measurement dimension. another potential reason is that the data does not have a high enough resolution in the time dimension, with not enough sampling. to overcome these issues, we propose the use of machine learning to learn the unexplained transformation from our raw reflected light intensity measurements to surface properties, thus allowing us to make use of fast feedback control. we demonstrate the effectiveness of this approach by predicting the ra roughness parameter (defined in section 3), however the same method can be applied for predicting many other surface parameters, either together in the same model, or separately. for the use in fast feedback control, accuracy is not the only consideration when modelling the transformation; the speed of the model chosen is also of great importance. each sample from the dataset used in this study is relatively large. it has a high resolution with 5cm of sample reading having 20 channels and 50,000 timesteps. the line speed is typically between 50cm/s and 300cm/s, meaning that to perform prediction at line speed we need to be able to run 60 examples per second. the combination of high resolution and fast line speed compounds the need for a faster model for our use case.we have chosen to interpret this problem as a time series extrinsic regression (tser) problem due to the possibility for each of the feature dimensions having independent partial occlusion, as each is provided by a separate sensor. however, this problem could also be interpreted as an image regression problem due to the spatial relation in two dimensions. the decision was made in hope of a more robust model, able to deal with variation between sensors. more is written about this in the data normalization section below.time series classification (tsc) problems are related to tser problems . both tsc and tser take input data and transform it into a representation which is used by an output model or layer to predict either a class, in the case of tsc, or continuous value, in the case of tser. unlike in time series forecasting, where the predicted value is a future value of the series or a value of a different data type which is more dependent on recent values, tser is a more general form where the output data can take any type and be related to any part of the input series, such as in our use case.the rocket model performs well while being exceptionally fast on univariate tsc . it has also been shown to work on tser problems . it has both a univariate and multivariate version, with success on multivariate problems documented . for its success when applied to both multivariate problems and tser, the rocket model is a good choice for our use case.computation speed is an important aspect when producing a model. with increased speed and efficiency, it becomes feasible to create more complicated models with more calculations. the highly influential "alexnet" , can attribute a lot of its success to the gpu speed up. the paper showed that the model beat other state of the art models by a significant margin on the imagenet dataset , showing that increased depth and complexity of the model improves performance. this was made feasible through gpu computation.in this paper we have chosen to reimplement rocket on gpu for the purpose of performing predictions at a pace more in line with the steel production line speed of our use case. however, the benefits of the gpu implementation go beyond our use case, as rocket is a well-established baseline, so improving its speed offers a better baseline for comparisons; also, given its increased speed, further improvements to its accuracy could be made by increasing the default number of kernels. therefore, this work has the possibility for use in the wider tsc and tser communities, with the potential to speed up the model generally or by making adaptations feasible to increase model complexity in ways that might improve performance in future work.we also show that the gpu implementation is faster than the cpu-based minirocket model which was later introduced as an improvement to the rocket model in terms of computational efficiency while maintaining similar accuracy. this version has the added benefit of allowing the potential for gradient flow and training, but we do not make use of this in the paper. both tsc and tser take input data and transform it into a representation which is used by an output model or layer to predict either a class, in the case of tsc, or continuous value, in the case of tser. unlike in time series forecasting, where the predicted value is a future value of the series or a value of a different data type which is more dependent on recent values, tser is a more general form where the output data can take any type and be related to any part of the input series, such as in our use case. however, the benefits of the gpu implementation go beyond our use case, as rocket is a well-established baseline, so improving its speed offers a better baseline for comparisons; also, given its increased speed, further improvements to its accuracy could be made by increasing the default number of kernels. therefore, this work has the possibility for use in the wider tsc and tser communities, with the potential to speed up the model generally or by making adaptations feasible to increase model complexity in ways that might improve performance in future work.we also show that the gpu implementation is faster than the cpu-based minirocket modelwhich was later introduced as an improvement to the rocket model in terms of computational efficiency while maintaining similar accuracy.as "exceptionally fast and accurate", in contrast, most other state of the art models are computationally expensive. they also remove the max pooling feature, only doing the ppv pooling operation. later, chang wei tan et al. this results in ~50,000 features compared to the ~10,000 of minirocket. the model consists of three main parts: (1) random convolutional transformation: here, convolutional kernels that have been initialized with random parameters and weights are applied to the input data, performing a linear transformation. the output from this model is then fed into a ridge regressor, however, a neural network based linear regressor could also be used to have an end-to-end pytorch model on the gpu. since the authors of the rocket model have provided minirocket as a faster alternative to rocket with similar accuracy, we also include timing experiments for minirocket.the minirocket experiments are faster that the rocket experiments when both are using very small batch sizes; however, the gpu rocket implementation overtakes sktime minirocket at a batch size of 20 as shown in in figure3a. relating to the usability of the model for our use case, we find that the results produced by the model are within the stylus measurement error, and that we are able to perform slightly above 5 transformations a second. given the success of ppv in the rocket model, experiments could be done to judge the viability and accuracy when using softppv as the global pooling operation as opposed to the normal gap layer in current deep learning models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/123.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/123.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d0b87360f28f7e313014699a06f32f0e408aedf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/123.txt @@ -0,0 +1 @@ +any machine learning model has learned something about the training data, so the leakage of a trained model may cause privacy concerns. if the training data contains private information, adversaries can leverage the visible information and capture the private data they want. several studies have explored this possibility. for example, the membership inference attack (mia) compromises privacy by predicting whether a data point is a part of the model's training data. another study proposed a reconstruction attack which aims to reconstruct an exact data point in the training set. however, it requires the attacker to have more knowledge beyond the trained model, such as all the data points, neural networks' weight initialization, and learning rate schedule. this work focuses on the reconstruction attack as in , but it requires less prior knowledge. more specifically, given a trained classification model and a distribution of the attack target, our adversary can generate the portrait of a previously invisible class (as shown in figure 1). this type of reconstruction attack can also threaten the privacy of the training data. for example, imagine we have a face recognition model which classifies the image input as a specific person. if a malicious party retrieves this model, the party can generate a sample image for each person.a human knows what his/her best friend looks like, but if this human is asked to draw his/her best friend a picture, this human may fail due to poor drawing skills. this phenomenon also applies to the trained neural network models. a trained classification model has learned how to process an image and do the classification. suppose we query this model for a sample image of a specific class by fine-tuning the model's input. in that case, the tuned input will be a noise (figure 2), regardless it grants 100% classification confidence on the attack target. this happens because the classification model is not perfectly robust. therefore, we use the distribution of attack targets to guide image generation.there are two requirements to evaluate a reconstructed image. first, this image should maximize the classification confidence of the attack target. second, it should belong to the distribution of the attack target. since the distribution of the attack target is invisible, we can approximate this using other distributions (for example, for a face recognition model, any distribution of face images can approximate the distribution of the attack target).to satisfy the requirement of the generated distribution, we will use latent variable generative models (lvgm) to learn from a distribution. there are various types of lvgms, and the options include the generative adversarial network (gan) , the variational autoencoder (vae) , and the diffusion related models , , and . for different lvgms, the methodologies for this reconstruction are also different, and the diffusion model is the focus of this work. notice that the goal of this work is different than , where they aimed to reconstruct the exact image in the training set. however, here we want to reconstruct an image with the same class label and a similar distribution to the attack target.moreover, the difference between this task and conditional image generation is that conditional image generation provides the entire data set, giving full knowledge of all classes. in contrast, for this work, a trained neural network model provides the knowledge of target classes in an implicit manner. more specifically, given a trained classification model and a distribution of the attack target, our adversary can generate the portrait of a previously invisible class (as shown in figure1). since the distribution of the attack target is invisible, we can approximate this using other distributions (for example, for a face recognition model, any distribution of face images can approximate the distribution of the attack target). however, here we want to reconstruct an image with the same class label and a similar distribution to the attack target.moreover, the difference between this task and conditional image generation is that conditional image generation provides the entire data set, giving full knowledge of all classes. the figure2: this image on the right is generated by maximizing the classification possibility on an attack target (image on the left). the discriminator d takes an image as input and outputs the prediction of whether this image is fake (generated by the generator) or real (belonging to the target distribution). in the first part of this loss function, the generator's objective is to create an image, input this image to the classifier, and maximize the confidence that this generated image to be classified as the attack target. notice that this objective function is similar to (1), while the only difference is that the generated image is specified to maximize the classification result of the attack target.to train this generator, we first generate a temporary data set for the discriminator, which includes both generated images and the images from the target distribution. the well-trained vae generator and diffusion model have learned how to generate an image to the target domain, so when interpolating two images, the generated interpolations will also belong to the target domain. for this reconstruction attack, the insight can be interpreted from the perspective of image interpolation: would there be an interpolation of images that happens to be our attack target?. we sample the diffusion model using this noise and input the denoised image into the classifier, and we want this specific noise to maximize the prediction of the attack target.different from the reconstruction attack in, we cannot directly compute the difference between the generated result and the training set because we are not reconstructing any specific image but a class. to quantitatively analyze the generation of the target class, we input the generated images into the c 2 classifier and collect the confidence that the generated images are classified as the attack target.in this work, we explored the potential approaches to generate an image for a target class when provided with a trained classification model and an approximate distribution of the attack target. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/124.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/124.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d111ef0bc6fbc46e74b947cfe572671b5964f5e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/124.txt @@ -0,0 +1 @@ +the rising popularity of modern mobile devices such as smartphones, tablets or wearables and their now ubiquitous availability led to a vast amount of collected data. additionally, such devices can determine the user's current location and may offer positiontailored information and services. however, resource (energy) limitations apply. in contrast to energy saving, embedded systems, smartphones are typically at the upper hardware limit. when dealing with gps information, the data analysis is being outsourced to data centers, often. a common way to transmit the data from the mobile devices to central servers is via cellular or satellite networks. according to meratnia and de by (meratnia and de by, 2004), around 100mb of storage size are needed, if just 400 objects collect gps information every ten seconds for a single day. considering large vehicle fleets or mobile applications with millions of users tracking objects for long time spans, it is obvious that there is a need to optimize the transmission and storage of trajectories through compression.in this paper, we investigate and evaluate the approach of autoencoders for compression and reconstruction of gps trajectories. for evaluation, we identify adequate distance metrics that measure the similarities between trajectories to evaluate the method and compare it to existing handcrafted compression algorithms, such as douglas-peucker. as main contribution, we will answer the following research questions: 1) how well does an autoencoder model perform in trajectory compression and reconstruction compared to traditional line simplification methods? 2) how well does an autoencoder model perform for different compression ratios and different trajectory lengths?the paper is structured as follows: section 2 holds related work. section 3 presents our methodology. evaluation is done in section 4, and finally, section 5 concludes the paper. as main contribution, we will answer the following research questions: 1) how well does an autoencoder model perform in trajectory compression and reconstruction compared to traditional line simplification methods? 2) how well does an autoencoder model perform for different compression ratios and different trajectory lengths?. all the points of the trajectories as well as the rescaling values and the latent variable of the autoencoder are stored in 32 bit each, so that the number of values in the original and compressed representation can be directly used to calculate the compression ratio.trajectories of sequence length 20 the autoencoder reconstructs the intermediate compression back to the original sequence length of 20 points, while douglas-peucker yields 10, 5 and 3 points, respectively. for the autoencoder reconstruction the mean euclidean distance between points can be calculated intuitively, while the trajectories that are compressed with douglas-peucker have to be interpolated first in order to equalize the sequence lengths.we notice, that the autoencoder performs worse than douglas-peucker for low compression ratio, which is the opossite for higher compression ratios. we assume, this is due to the way douglas-peuker selects a subset of the original trajectory: for a compression ratio of 2, half of the points in the compressed trajectory stay exactly the same. therefore, the comparison method favors douglas-peucker, but the autoencoder model still performs better, at least for higher compression ratios within our settings.when analyzing the mean discrete fréchet distances, the autoencoder performs significantly better than douglas-peucker across all compression ratios. while the autoencoder performs slightly better in terms of fréchet distance for a compression ratio of 2, it performs worse for the other two ratios. the autoencoder model performs worse for a compression to half of the original size, while higher compression ratios produce similar results with a tendency in favor of the autoencoder. the fréchet distance to the autoencoder reconstruction is less than the distance between original and interpolated douglas-peucker compression for a ratio of 2, but for higher ratios the autoencoder error grows faster. the autoencoder distance here is always higher than the interpolated douglas-peucker distance, but the difference between both methods grows slower for higher compression ratios compared to the fréchet distance in figure3a. the tendency that the autoencoder performs better than the interpolated td-tr compressions for lower compression ratios in terms of fréchet distance can be replicated for the t-drive dataset. the autoencoder performs generally worse than the synchronized td-tr compression, but the difference is smaller for higher compression ratios. with equalized sequence lengths through interpolation of the douglas-peucker compressions, our autoencoder still produces a lower fréchet error for low compression ratios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/125.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/125.txt new file mode 100644 index 0000000000000000000000000000000000000000..9d34f683722ca27472209e2aaa9c476d47c1e016 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/125.txt @@ -0,0 +1 @@ +edge computing (ec) is an emerging technology deployed in mobile edge networks, which coordinates computing and memory resources in the edge to support low-latency and high-bandwidth-demanding applications for end-side mobile and internet of things (iot) devices. with the increasing concern for intelligent services and data privacy protection of devices, federated edge learning (fel) is proposed to jointly train machine learning (ml) models with decentralized data at the edge of the network. unlike conventional centralized training paradigms, fel keeps private data on devices and only transmits model parameters or encrypted data information to the edge server, which is a promising solution for privacy-preserving edge intelligence. however, fel faces severe challenges related to resources, personalization, network environments, etc . shortage of individual device resources and imbalance of resources among devices significantly increase the difficulty of utilizing end-side resources. besides, personalized needs of users call for differentiated on-device models, while the uniform models trained by conventional parameter-averaging-based fel methods cannot generalize to all devices. moreover, non-ideal communication channels and network topology constrain the system design of fel.as an ml technique that both enables knowledge transfer and model collaborative training, knowledge distillation (kd) transfers knowledge from one ml model to another, allowing interactive learning among heterogeneous ml models to achieve constructive optimization. due to flexibility and effectiveness, kd has been applied to solve numerous ml problems, such as model compression , domain adaptation , distributed learning , etc. recent trends suggest the great potential to address the above challenges in the context of fel. previous works integrating kd into the training process of fel have been successful in tackling constrained device resources , adapting to heterogeneous devices and user requirements , and adapting to complex communication channels as well as network topologies . therefore, a survey is urgently needed to review how kd applies to fel.to the best of our knowledge, this paper is the first work to investigate the application of knowledge distillation in federated edge learning. different from existing surveys , we take the challenges faced by fel as the main clue, introducing existing fel approaches based on diverse forms of kd techniques and providing guidance for both future research directions and real deployment. specifically, the reminders of this paper are organized as follows. section 2 provides preliminary knowledge of related research directions, including fel and kd, and elaborates the reasons for concerning kd in fel. section 3 investigates fel based on kd in addressing resource-constrained, resource-heterogeneous, personalization, non-ideal channels and decentralization challenges in mobile edge networks. section 4 summarizes the limitations of existing methods, raises open problems in kd-based fel research, and provides guidance for real deployment. section 5 summarizes the whole paper.as a practical way to realize edge intelligence, federated edge learning (fel) implements federated learning (fl) systems in mobile edge networks, where massively distributed mobile and internet of things (iot) devices jointly train machine learning models without sharing private data on devices.kd can be customized to solve various machine learning problems, such as model compression via knowledge transfer from bulky models to compact models, distributed model training via knowledge exchange between models, etc. specifically, the technical characteristics of kd meet the core demands of fel, and the roles it can play include but not limited to compressing large-scale edge models for on-device deployment, transferring local adaptive knowledge to on-device models for personalization, and helping establish novel fl frameworks for enabling heterogeneous device supports. ds-fl achieves efficient communication via on-device local models' outputs exchange between heterogeneous devices and optimizes local models based on kd with common inputs of an open dataset, thus avoiding model parameters exchange conducted by conventional fel approaches. specifically, heand chengestablish alternating minimization fel frameworks to transfer knowledge from compact on-device models to the large edge model via kd, after which the on-device models are optimized based on the knowledge transferred back from the edge.existing works employ heterogeneous on-device models to adapt to the computing power of heterogeneous devices' hardware, and leverage kd to transfer knowledge from the shared edge model to heterogeneous on-device models. after that, pseudo-data generated by the trained generator is applied to a bi-directional kd process across the edge and the devices, aiming to integrate the knowledge from heterogeneous on-device models to the edge server and guide the on-device models to achieve more generalized training performance in turn. we suggest that a practical fel system with kd could include but not limited to resource-aware fel architecture, model-agnostic representation exchange protocols without open datasets, knowledge adaptation and refinement, kd-specific knowledge compression, privacy-preserving knowledge transfer, techniques for learning from knowledge, solutions for complex communication channels in mobile edge networks, etc.taking the challenges faced by federated edge learning (fel) as the main clue, this paper surveys prior works on applying knowledge distillation (kd) to fel, and classifies the role of kd in fel into four types of knowledge transfer, model representation protocol, component of backbone algorithm and dataset distillation. ds-fl achieves efficient communication via on-device local models' outputs exchange between heterogeneous devices and optimizes local models based on kd with common inputs of an open dataset, thus avoiding model parameters exchange conducted by conventional fel approaches. specifically, heand chengestablish alternating minimization fel frameworks to transfer knowledge from compact on-device models to the large edge model via kd, after which the on-device models are optimized based on the knowledge transferred back from the edge.existing works employ heterogeneous on-device models to adapt to the computing power of heterogeneous devices' hardware, and leverage kd to transfer knowledge from the shared edge model to heterogeneous on-device models. after that, pseudo-data generated by the trained generator is applied to a bi-directional kd process across the edge and the devices, aiming to integrate the knowledge from heterogeneous on-device models to the edge server and guide the on-device models to achieve more generalized training performance in turn. we suggest that a practical fel system with kd could include but not limited to resource-aware fel architecture, model-agnostic representation exchange protocols without open datasets, knowledge adaptation and refinement, kd-specific knowledge compression, privacy-preserving knowledge transfer, techniques for learning from knowledge, solutions for complex communication channels in mobile edge networks, etc.taking the challenges faced by federated edge learning (fel) as the main clue, this paper surveys prior works on applying knowledge distillation (kd) to fel, and classifies the role of kd in fel into four types of knowledge transfer, model representation protocol, component of backbone algorithm and dataset distillation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/126.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/126.txt new file mode 100644 index 0000000000000000000000000000000000000000..78b304bc7cafd202d927542f2e43614113449f67 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/126.txt @@ -0,0 +1 @@ +the healthcare industry could save up to $300 billion by focusing more on iomt devices, especially when dealing with chronic illnesses ; therefore, we can expect to see iomt as common place in the healthcare industry proving an undeniable incentive to utilise these ubiquitous and wide spread devices in a distributed, secure and intelligent manner.to this end, distributed machine learning on the edge is a persuasive solution that leverages the trend technology in the healthcare industry is following; unfortunately, current systems fail to consolidate these abilities and instead focus on distinct aspects. in this paper, we propose a novel approach to address the challenge of training machine learning systems, in particular neural networks (nns), on the devices themselves, which we refer to as learning on the edge (lote).our system provides enhanced, if not total, privacy and security, which given the sensitive nature of the patient data is essential, in a distributed and robust manner by default. this is achieved by combining federated learning, a paradigm that aggregates individually trained networks, and blockchain to remove the need for a centralised server. 1 james calo is with the hamyln centre / department of computing, imperial college london, london, uk jam414@ic.ac.uk 2 benny lo is with the hamyln centre / department of surgery and cancer, imperial college london, london, uk benny.lo@imperial.ac.ukthere is a clear hierarchy of architectural archetypes; on one end there is cloud computing - containing vast resources with increased computational power. this, however, requires communication between the local system and the cloud. therefore issues such as loss of connection, network congestion, cyber security, etc. will affect the system's performance. on the other end is edge computing , with restricted resources but unparalleled access to the device as both the system and the backing computations are on the same (or physically close) device. in between these archetypes is fog computing , ; having the same structure as cloud computing but instead using a local server as shown in fig. 1. current solutions target either cloud and fog based computing or do in fact run on the edge; however, they either lack machine learning or only perform inferencing, and not training, which is the most computational demanding task.one ideal use of iomt devices is mobile health (mhealth). in developing countries, this has been shown as an effective method to monitor patients; unfortunately, these systems are often unintelligent relying on basic mobile phone functionality . these approaches purposefully avoid internet connection, since it is unreliable in many developing countries, yet the required components are in place to leverage the power of machine learning.on the other hand, the focus of mhealth in developed countries is for smart wearable devices, often paired with an app, yet these too are unintelligent and use a fraction of the ability of modern iot systems running their computations via the cloud; this allows less capable hardware to run complex computations but suffers from latency issues and must be connected to the internet to work which is not ideal.whilst there exists a handful of iot systems that aim to leverage machine learning on the edge, they only support inferencing and not training.for example, the stm32cubeai converts neural networks to run on stm32 arm cortex-m-based microcontrollers and has been used to create a human activity recognition (har) fitness tracker embedding a convolutional neural network (cnn) in a wrist worn, low power, mcu for inferencing . frameworks such as these are a step in the right direction but suffer from the need to train the models on a dedicated system or the cloud.simultaneously, there have been advances in the hardware required to infer, and potentially train, on the edge. gpus are better adapted to machine learning methods than cpus but are rarely found in embedded devices and not all gpus were created equal; the majority of the frameworks utilise the cuda language which is designed specifically for nvidia gpus. furthermore, gpus may be usurped by ai accelerator application-specific integrated circuits such as google's tpu (tensor processing unit) and fpgas (field-programmable gate array), which are used in microsoft's project brainwave to improve real-time deep neural network (dnn) inferencing , . the requirement for specific hardware increases the physical size, power draw and cost of devices; this is counterproductive for iomt where smaller and less obtrusive devices are preferred. by moving the learning to the edge on a cpu one can upgrade existing devices whilst keeping the footprint of newer devices smaller and focus more on efficiency.the infrastructure required to take iomt and edge/fog computing to the next level is already in place in a hospital. the users only move within a set area and data collection happens in the same location meaning federated learning is ideally suited to edge learning in a hospital ; multiple surgeries happen simultaneously and can all learn together to train models to increase generalizability, improving the model's overall performance by treating each patient or surgery as a decentralised dataset whilst still allowing for bespoke training on a per patient basis . this is ideally suited to clinical settings as federated learning never shares data thereby keeping data private and allowing training on previously inaccessible tasks such as that of anastomotic leak detection where the existing data, of which there is little, is severely unbalanced.the healthcare industry could save up to $300 billion by focusing more on iomt devices, especially when dealing with chronic illnesses; therefore, we can expect to see iomt as common place in the healthcare industry proving an undeniable incentive to utilise these ubiquitous and wide spread devices in a distributed, secure and intelligent manner. in this paper, we propose a novel approach to address the challenge of training machine learning systems, in particular neural networks (nns), on the devices themselves, which we refer to as learning on the edge (lote). current solutions target either cloud and fog based computing or do in fact run on the edge; however, they either lack machine learning or only perform inferencing, and not training, which is the most computational demanding task.on the other hand, the focus of mhealth in developed countries is for smart wearable devices, often paired with an app, yet these too are unintelligent and use a fraction of the ability of modern iot systems running their computations via the cloud; this allows less capable hardware to run complex computations but suffers from latency issues and must be connected to the internet to work which is not ideal. by moving the learning to the edge on a cpu one can upgrade existing devices whilst keeping the footprint of newer devices smaller and focus more on efficiency. the users only move within a set area and data collection happens in the same location meaning federated learning is ideally suited to edge learning in a hospital; multiple surgeries happen simultaneously and can all learn together to train models to increase generalizability, improving the model's overall performance by treating each patient or surgery as a decentralised dataset whilst still allowing for bespoke training on a per patient basis. this is ideally suited to clinical settings as federated learning never shares data thereby keeping data private and allowing training on previously inaccessible tasks such as that of anastomotic leak detection where the existing data, of which there is little, is severely unbalanced. this additionally shifts the logical architecture from the cloud/fog, which essentially comprises of devices connected to a server, to the edge, where every device is independent and autonomous; the system will work even with only one node and even if all nodes go down, the system can recover fully since each node contains a copy of the accepted blockchain, this may not be possible if the central server lost its data. moreover, since the blockchain is being utilized as a trust mechanism for federated learning, the mining target difficulty can remain lower, reducing the computational cost and increasing the rate at which blocks are added to the chain; this results in lower powered devices having enough computing resources to generate hashes competitively whilst still providing the same protection. by using a pair of udp sockets, we not only parallelize the communication we can split the two cases across different devices; for example, a hospital may have many iomt devices but none with networking capabilities, just bluetooth; they could therefore connect all iomt devices to a single or pair of network enabled iot devices which would handle the networking and correct forwarding, much like network address translation (nat) with regards to wifi routers. additionally, if one set of devices are all training on the same set of data, only one device needs to connect to the outbound udp connection and, as long as everyone connects to the inbound connection, all devices gain the benefits. using both standard and federated training paradigms we trained the model five times using 10%, 25%, 50%, 75% and 100% of the training data; in the federated case the data was shared equally amongst all participating models, such that no two models saw the same data points, as would be the case in a live system (especially when using image data as the input). furthermore, when training using federated updates, with each "'local training"' round being performed sequentially, the training process runs quicker compared with the standard method due to each participant operating on smaller subsets of the data, allowing for optimisations such as better caching; this is especially apparent on smaller devices.a new distributed learning approach is proposed with the aim of allowing learning on the edge by designing a light weight, distributed, autonomous system that is a natural fit for iot devices that are abundant, particularly in a hospital environment. however, there are a few components that we would like to address in future work: in order to allow spare computing resources to be shared to distribute the "'local training"' there needs to be a way to secure the training data either by working on an encrypted form (homomorphic encryption) or by converting the data into a non-reversible representation, for example fourier or wavelet transformation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/127.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/127.txt new file mode 100644 index 0000000000000000000000000000000000000000..49fe463d74d6cad80a7dcb461d55cc066582d47c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/127.txt @@ -0,0 +1 @@ +in many applications of reinforcement learning, the underlying probability transition matrix is known but the size of the state space is large so that one uses approximate dynamic programming methods to obtain the optimal control policy. examples of such applications include game-playing rl agents for playing games such as chess and go. abstracting away the details, in essence what alphazero does is the following : it evaluates the current policy using a monte carlo rollout and obtains a new policy using the estimate of the value function of the old policy by using lookahead. we note that alphazero collects and uses monte carlo returns for all states in each rollout . thus, effectively the algorithm performs policy iteration using monte carlo estimates of the value function. if one ignores the monte carlo aspect of policy evaluation but is interested in the tree search of aspects of rollout and lookahead, there are several recent works which quantify the impact of the depth of rollout and lookahead on the performance of algorithm . however, to the best of our knowledge, there is no analysis of monte carlo policy evaluation when the estimates of the value function are obtained from trajectories simulated from the policy. to the best of our knowledge, the only analysis of such algorithms assume that, at each iteration, either one estimates the value function starting from every single state of the underlying mdp or from a subset of fixed states . in fact, studying monte carlo policy evaluation using a single trajectory from each policy at each step of policy iteration is a known open problem . in this paper, we take a significant step in solving this problem: we prove that, with sufficient lookahead, policy iteration and monte carlo policy evaluation does indeed converge provided we use sufficient lookahead during the policy improvement step. using v k , just as in policy iteration, the algorithm obtains a trajectory corresponding to the lookahead policy (see section 2) corresponding to v k , µ k+1 , where. in order to obtain ĵµ k+1 (i) for i ∈ d k , we perform an m-step rollout (see section 2) with policy µ k+1 by obtaining a discounted sum of m costs beginning at each i for i ∈ d k . using t m µ k+1 t h-1 v k (i) + w k (i) for i ∈ d k , we obtain the next iterate as follows:.where i denotes the s × s identity matrix, p k,µ k (i) is the probability that state i is ever visited by the trajectory under policy µ k , p k,µ k is the diagonal matrix where diagonal entries of the matrix correspond to the values of p k,µ k (i) for all i ∈ s, and z k satisfies the same properties as w k . we can then subtract v k+1 from both sides and easily obtain the stochastic approximation paradigm that allows us to show that lim sup k→∞ t v k -v k ≤ 0 :. the purpose of showing that lim sup k→∞ t v k -v k ≤ 0 is that asymptotically, we can use monotonicity to show that j * ≤ j μk+1 ≤ t v k . we note that in equation (1), we have written µ k+1 as a function of v k since it is the lookahead policy with respect to v k . just as in the previous section, for all states i ∈ d k , we obtain t m µ k+1 t h-1 φθ k (i) + w k (i). additionally, we do not need to compute φθ k -we only need to compute φθ k (i) = φ(i) ⊤ θ k for states i visited by the trajectory or involved in the tree search. now, instead of updating v k (i) for i ∈ d k , the case in section 3, we instead obtain θ µ k+1 ∈ r d , which uses t m µ k+1 t h-1 φθ k (i) + w k (i) for i ∈ d k to construct an estimate of θ µ k+1 . since m k is a matrix which uses the feature vectors corresponding to states in d k to construct an estimate of θ µ k+1 based on samples of ĵµ k+1 (i) for states i ∈ d k , it is easy to see that δ app is a measure of the ability of the feature vectors to approximate the value functions corresponding to the lookahead policies.in order to compute t m µ k+1 t h-1 φθ k (i) + w k (i) for i ∈ d k , we do not need to compute φθ k ; we need only compute φ(i) ⊤ θ k for states i ∈ d k and states i involved in the computation of the tree search at states visited by the trajectory.we denote by γk the γ k p k,µ k (i) corresponding to a maximizing i in the above expression.we define v k := φθ k and write the sequence of iterates {v k } ∞ k=0 as follows:.-t j µ k+1 t h-1 v k + t h-1 v k ≤ -j ℓ=1 α ℓ-1 α h-1 (α + 1)(∆ + ε)e. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/128.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/128.txt new file mode 100644 index 0000000000000000000000000000000000000000..2bd980fe094d4165d4c0f83fd85e406535bf2b64 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/128.txt @@ -0,0 +1 @@ +learning to produce an effective vector representation of input data has long been a central question for the field of deep learning. early proponents argued that a significant advantage of neural networks was that they could form distributed representations, where each input is represented by multiple neurons, and each neuron is involved in the representation of multiple different inputs (hinton et al., 1986). compared to using a separate neuron for each input, distributed representations are exponentially more compact (bengio, 2009). an extension of distributed representations is the idea of disentangled representations. these are a particular type of distributed representation in which each neuron represents a single human-interpretable feature of the input data, such as colour, size or shape. in the disentanglement literature, these features are often referred to as 'generative factors' or 'factors of variation'. intuitively, a disentangled vector representation is one in which each factor of variation is represented by a distinct subset of neurons. e.g., a certain subset of neurons represents shape and shape only, another distinct subset represents size and size only etc, and changing the size of the input but not the shape, will mean that the size neurons change their activation value, but the shape neurons remain unchanged. the difficulty, and the subject of this paper, comes when we try to translate this intuition into quantifiable metrics. in the strongest case, each factor is represented by a single neuron so that, e.g., changing the colour of the object in the image would cause a single neuron to change its value while all other neurons remain unchanged. (we discuss further below the ambiguity as to whether this stronger condition is required.) disentanglement (de) was originally formulated by (bengio, 2009) (see also (bengio et al., 2013;bengio, 2013)). more recently, beginning with (higgins et al., 2016), there have been many unsupervised de methods proposed based on autoencoders.in this paper, we examine the commonly used metrics to assess disentanglement. firstly, we show how they fail to pick up certain forms of entanglement, and that representations can score highly on such metrics while being entangled. specifically, we expose two problems with existing metrics: that they incorrectly align ground-truth factors to neurons, as they do not require distinct variables to be assigned to distinct factors; and that they only consider the effect of single latent variables at a time, and so fail to detect entanglements spread over multiple neurons. to address these problems, we present two new de metrics, based on the ability of a classifier to predict the generative factors from the encoded representation. if a representation is truly disentangled, then all the relevant information should be contained in a single neuron (or possibly a few neurons, see discussion in section 2), and so a classifier using only this/these neuron(s) should be just as accurate as one using all neurons, and one using all other neurons should be very inaccurate. our first metric is the accuracy of the single-neuron classifier, this should be high. our second metric is the accuracy of the classifier using all other neurons (normalized by that of a classifier on all neurons), this should be low.as well as showing the theoretical flaws in existing metrics, we also establish the superiority of our proposed metrics empirically, using a downstream compositional generalization task of identifying novel combinations of familiar features. humans could clearly recognize a purple giraffe, even if we have never seen one or even heard the phrase "purple giraffe" before, because we have disentangled the concepts of colour and shape, so could recognize each separately. the ability to form and understand novel combinations is a deep, important aspect of human cognition and is a direct consequence of humans being able to disentangle the relevant features of the objects we encounter. this is the basis for our proposed task. for example, we test whether a network trained to identify blue squares, blue circles and yellow squares can, at test time, correctly identify yellow circles. if it had learned to disentangle colour from shape, then it could simply identify "yellow" and "circle" separately, each of which is familiar. we show that existing de models generally perform poorly at this task, suggesting they are further from de than previous analyses have implied. we also show that a high score on de metrics is predictive of performance on this task, and that our proposed de metrics are the most predictive in this respect. our contributions are briefly summarized as follows.• we identify and describe two shortcomings of existing de metrics: incorrect alignment of neurons to factors and failure to pick on distributed entanglements.• we propose two alternative metrics, single-neuron classification and neuron knockout, that do not suffer from the problems that existing metrics suffer from.• we introduce the task of identifying novel combinations of familiar features to measure the compositional generalization of the encoding, and describe why it is suitable for evaluating de models.• we show empirically that existing models generally perform badly at this task, that their performance correlates with most de metrics, and that the strongest correlation is with our proposed metrics.the rest of this paper is organized as follows. section 2 discusses related work. section 3 describes the shortcomings of existing de metrics, section 4 proposes our new metrics. section 5 introduces the task of classifying novel combinations. section 6 presents the results of applying our metrics and proposed task to existing de models, and section 7 summarizes our work. intuitively, a disentangled vector representation is one in which each factor of variation is represented by a distinct subset of neurons. specifically, we expose two problems with existing metrics: that they incorrectly align ground-truth factors to neurons, as they do not require distinct variables to be assigned to distinct factors; and that they only consider the effect of single latent variables at a time, and so fail to detect entanglements spread over multiple neurons. if a representation is truly disentangled, then all the relevant information should be contained in a single neuron (or possibly a few neurons, see discussion in section 2), and so a classifier using only this/these neuron(s) should be just as accurate as one using all neurons, and one using all other neurons should be very inaccurate.as well as showing the theoretical flaws in existing metrics, we also establish the superiority of our proposed metrics empirically, using a downstream compositional generalization task of identifying novel combinations of familiar features. we also show that a high score on de metrics is predictive of performance on this task, and that our proposed de metrics are the most predictive in this respect.• we show empirically that existing models generally perform badly at this task, that their performance correlates with most de metrics, and that the strongest correlation is with our proposed metrics.the majority of existing metrics are based on aligning the set of factors g with the set of neurons z; that is, for each factor, finding the neuron that it is represented by. we observe that cases like this, where one neuron strongly encodes two or more factors, often occur in practice. existing metrics incorrectly align both factor 2 and factor 5 to neuron 2, whereas we correctly align factor 2 to neuron 1 instead. decreasing is incorrect, as the second model is clearly closer to the desired disentangled representation in which z 1 represents colour and colour only, and z 2 represents shape and shape only (wlog on ordering of the neurons). the first model needs z 2 to learn shape and z 1 to unlearn shape, the second model just needs z 1 to unlearn shape. in this case, i(g 0 ; z 1 ) = i(g 0 ; z 2 ) = 0, but i(g 0 ; z 1 , z 2 ) = 0, in fact i(g 0 ; z 1 , z 2 ) could be near maximal: i(g 0 ; z 1 , z 2 ) ≈ h(g 0 )., 1986), and the brain has been shown to be highly fault-tolerant(yu, 2016). there are three obvious ways in which one could use the given accuracies in our example to quantify feature importance: we could use the accuracy values themselves, we could use the normalized accuracy values so that the random noise neuron is measured as being of zero importance, and we could use the mutual information scores (as used for mig).3, our metrics, restricted to the two novel feature types of size and shape, are more predictive of performance on compositional generalization than are existing metrics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/129.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/129.txt new file mode 100644 index 0000000000000000000000000000000000000000..1693bb7360c313ce05b90a0677f9be200ba311cd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/129.txt @@ -0,0 +1 @@ +the human heart sends blood to the lungs to acquire oxygen, then returns it to the blood and carries it throughout the body. the leading cause of death is cardiovascular disease (cvds) worldwide . by doing research, who (world health organization) found closely 17.9 million, almost 32% of the global deaths caused by cvds, in 2019. cvds can be classified into three major groupselectrical (irregular heartbeats owing to malfunctions of the heart's electrical system or arrhythmia), structural (heart muscle disease or cardiomyopathy), circulatory (high blood pressure and coronary artery disease) . the coronary artery blockage of the heart is responsible for the heart attack shown in fig. 1. in order to examine the myocardial electrical transmissions of the human heart in the waveform, electrocardiography (ecg) is the preferred approach. this task is done by an instrument which is called electrocardiogram; it is known as ecg shortly. ecg is one kind of graph that shows voltage versus time of electrical movement of the heart. this graph is achieved by using electrodes placed on the skin, which detects electric signals of the heart which it beats each time . ecg is generally used to detect diseases of the human heart such as a coronary artery, arrhythmias, cardiomyopathy, heart attacks. in this study, we discussed arrhythmia classification using long short-term memory (lstm), deep convolutional neural networks (dcnns), and machine learning (ml). ecg signal usually provides some particular human heart information, such as the position of the heart and the heart chamber size. it also reveals the source and spread of impulses. the ecg can visualize cardiovascular rhythm and conduction disorders. the medication effects on the heart can also be identified. arrhythmia is common among other heart diseases . irregular heartbeat is known as arrhythmia. according to the speed of the heartbeat, arrhythmia is grouped into two categories -tachycardia: tachycardia is the medical term for rapid heartbeats. at least one person in the room has a heart rate of more than 100 beats per minute. bradycardia: bradycardia is the medical term for irregular, slow heartbeats. there are less than 60 beats per minute in your heart.arrhythmia can be occurred by damage from illness, injury or genetically. doctors suggest a common test ecg for diagnosing the rhythm of the heart . without treatment of the arrhythmia, enough blood may not pump by the heart to the body and it can harm the heart, brain, and other parts of the body. therefore, cardiologists correctly identify abnormal heartbeats is essential .the human heart sends blood to the lungs to acquire oxygen, then returns it to the blood and carries it throughout the body. cvds can be classified into three major groupselectrical (irregular heartbeats owing to malfunctions of the heart's electrical system or arrhythmia), structural (heart muscle disease or cardiomyopathy), circulatory (high blood pressure and coronary artery disease). the coronary artery blockage of the heart is responsible for the heart attack shown in fig. in order to examine the myocardial electrical transmissions of the human heart in the waveform, electrocardiography (ecg) is the preferred approach. ecg is one kind of graph that shows voltage versus time of electrical movement of the heart. this graph is achieved by using electrodes placed on the skin, which detects electric signals of the heart which it beats each time. ecg is generally used to detect diseases of the human heart such as a coronary artery, arrhythmias, cardiomyopathy, heart attacks. in this study, we discussed arrhythmia classification using long short-term memory (lstm), deep convolutional neural networks (dcnns), and machine learning (ml). ecg signal usually provides some particular human heart information, such as the position of the heart and the heart chamber size. without treatment of the arrhythmia, enough blood may not pump by the heart to the body and it can harm the heart, brain, and other parts of the body. when it came to beating the hearts of the people they were studying, they classified them into five categories: fusion(f), non ectopic(n), v, s, and unknown beats (q). the cae model was used to compress raw ecg signal beats in order to extract coded features from each one and then these were utilized in an lstm network to classify the arrhythmia class in the following phase.with the further evolution of machine and deep learning, mit-bih dataset has been used on ecg arrhythmia classification. in this review work, we have discussed the arrhythmia, mit-bih ecg signals database, and research works on arrhythmia detection. researchers applied various attempts for detecting arrhythmia problems using the mit-bih ecg dataset, worked with various techniques and achieved promising results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/13.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/13.txt new file mode 100644 index 0000000000000000000000000000000000000000..80bef1100bb10aba93c485a73b67d177a837ae90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/13.txt @@ -0,0 +1 @@ +as a major federal agency of the department of health and human services, food and drug administration, also known as the fda, has started to protect public health in the united states since the start of the last century . one major component of the fda's responsibilities is to regulate the marketing of new medical devices by ensuring the safety and efficacy of these premarket devices. tests and evaluations were stringently done on these devices before they are allowed for transactions. however, as technology progresses, more and more healthcare experts and organizations start to adopt artificial intelligence in their process of research and development, and an increasing number of devices are created to handle a variety of tasks. in the fiscal year 2021, fda reported receiving more than 18000 premarket submissions annually .due to the great variety and quantity of emerging medical devices, the difficulty and complexity of fulfilling the mission of the fda escalate. the average review time of a 510(k)-submission increased from 100 days to 180 days from 2000 to 2014 . in response to this change, fda started to turn to the assistance of machines in its process of regulation. according to mason marks , fda is replacing physical laboratory and clinical experiments with computer model and simulation that depends on artificial intelligence, which greatly increases the efficacy of the reviewing process. however, there is still much debate around the legitimacy and applicability of utilizing such an automatic approach without human supervision.on the other hand, artificial intelligence is used in healthcare to perform more accurate diagnoses and improve the healthcare delivery process to patients. there have already been several cases in which computer diagnosis exceeds the performance of human experts such as spotting malignant tumors . the increasing market of artificial intelligence in healthcare is further escalated by the covid-pandemic, which brought public concerns for healthcare to a much higher altitude and a sheerly dropped amount in the workforce. artificial intelligence in healthcare alone generates 6.9 billion usd in the year 2021 and is continuously growing with a compound growth rate of 46.2% . the expected market size of this booming market is anticipated to reach 194.4 billion by the year 2030.like any fast-growing industry, ai in healthcare poses potential risks which require regulation. however, as stated by simon chesterman , such regulation is complicated by the pace of change, the wariness of constraining innovation, and the immense difference it will bring to the traditional regulatory models. among all the changes that challenge the conventional regulation system, one of the most controversial is probably using ai for regulation. algorithms are built in a way that eliminates unwanted human judgment variability in the regulation process. while they can also introduce discrimination independently from the dataset, they are trained on . another property that might be interesting to explore is whether this bias that the ai model exhibits is accumulative; that's it, if ai is used in ai regulation, will the discriminations transfer, or even worse, add up? although many preliminary trials have been performed like the experiment of the "smart court" in hangzhou, china in late 2019, such risks should always be wary of when conducting research in pertinent fields.with these backgrounds, this publication will propose a new method that helps the fda simplify its process of regulating the marketing of new medical devices. we will examine the possibility of applying deep learning networks, especially pre-trained word embedding and sentence embedding to the regulation process. the first section is going to be an introduction to relevant information. the second section will describe different sets of experiments and the applied methods, which include gpt-3 pre-trained embedding, fasttext pre-trained embedding with subword information, and sentence embedding siamese network. the third section will be an analysis of the experiment results. the final section will discuss the limitations of the methods used and ways to improve the results. because this paper is almost solely based on pre-trained models and pre-trained embeddings, it also provides insight into how much we can exploit transfer learning and domain shifting in low-resource tasks. the second section will describe different sets of experiments and the applied methods, which include gpt-3 pre-trained embedding, fasttext pre-trained embedding with subword information, and sentence embedding siamese network.for the cfr device description dataset, all types of devices, 2585 in total, and their corresponding genres and descriptions are crawled from title 21 of the cfr dataset on the fda classification panel. moreover, from the randomly selected 510 (k) devices description, some data points are spotted with inaccurate regulation numbers because their described device types do not match their device descriptions. however, this paper also tries to train a fasttext supervised model initialized with fasttext pre-trained embeddings on the descriptions from the cfr device description dataset, which includes one example for each device type.after similarity scores are obtained between each device type in the cfr and the target device, the similarity rank of the original label of the target device will be recorded.pre-trained models and dynamic word embedding, on the other hand, provide an improvement upon the representations of static pre-trained word embedding by incorporating context, documents, and language information using methods like autoencoder and autoregression.when constructing the embeddings for the device descriptions, pre-trained embeddings are found for each word in the descriptions and the average of these word embeddings is taken with tf-idf weighting to form the sentence embeddings for the description. in this sense, the sentence embeddings of the device description can focus more on the words that contain more information about the classification of the device than less relevant words that provide little help to the semantic meanings of vector representations. another problem with averaging fasttext pre-trained embedding is that it has no clues about the positions of the words in the sentences, which is especially tricky because the descriptions of medical devices include a lot of important verb-noun pairs that describe the utilities of the devices like "drain urine" and "clean vasculature", which will induce great amounts of information lost by mixing them up.while bert is trained on sentence paired and employed next sentence prediction during its training, sentence bert is also trained on the snli dataset, which contains 570,000 sentence pairs and the logical relations between them like entailment, contradiction, and neutral. however, unlike models that are based on permuted language models and able to encode stronger contextual information, bert is a masked language model and less powerful as compared to models that encode both contextual and positional information like mpnet. out of the three device descriptions that the author identified from the 510(k) dataset, one device is completely erroneously labeled, which means the original device label in the 510(k) document has no relation to the device description.with the new model that is built in this paper, applicants can simply type in brief descriptions of the types and functionalities of their devices, and the model will automatically display 10-15 most similar device types which, according to the performance of the sentence bert and gpt-3 semantic search embedding, will contain the correct label with a very high chance. so instead of having to identify their device classifications from 2000 device types, applicants now only need to look through at most 20 types of devices to determine the classification of their device and which premarketing pathway they should adopt. then, inspired by the idea of position embedding, this information can be incorporated into the classification model by adding an extra embedding layer that encodes the subject of the sentence, which is especially important in device classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/130.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/130.txt new file mode 100644 index 0000000000000000000000000000000000000000..185b8dd3af8914de92acdc4a9b211ea8628ea2be --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/130.txt @@ -0,0 +1 @@ +our paper addresses the issue of slow computation time for batchbald, a method for active learning that finds the most informative points to label in a pool set using bayesian neural networks. we propose a new approximation, k-bald, which uses k-wise mutual information terms to approximate batchbald, making it much less expensive to compute. as future work, we propose that the acquisition batch size or the order of approximation could be dynamically chosen based on its quality, making it more efficient for larger datasets.the goal of active learning (cohn et al., 1994) is to identify the most informative points in an unlabeled pool set to be labeled and added to the training set, in order to improve the performance of the model. one commonly used technique for active learning is batchbald (kirsch et al., 2019), which uses bayesian neural networks to find the most informative points to label in a pool set. however, batchbald can be very slow to compute, especially for larger datasets. in this paper, we propose a new approximation, k-bald, which uses k-wise mutual information terms to approximate batchbald, making it much less expensive to compute.from an information-theoretic point of view, active learning means finding the unlabeled points in the pool set with the highest expected information gain, which is also referred to as bald score (houlsby et al., 2011). bald score is often used to capture the epistemic uncertainty of the model for a given point. when using bayesian neural networks, bald scores measure the disagreement between (monte-carlo) parameter samples, similar to 'query by committee' (seung et al., 1992). to be more specific, the bald scores look as follows, where q(ω) is an empirical distribution of the parameters of the ensemble members, or an approximate parameter distribution, e.g., using monte-carlo dropout (gal & ghahramani, 2015):(1) batchbald, proposed in kirsch et al. (2019), is an extension of the bald algorithm to handle batch acquisition of multiple points at once. in practice, batchbald computes the joint bald score and uses the greedy algorithm from submodular optimization theory to build an acquisition batch that is 1 -1 /e-optimal:labels for the points in the acquisition batch are then queried and added to the training set. in practice, however, computing batchbald can be very slow, especially for large datasets.in this paper1 , we propose a new family of approximations for batchbald, called k-bald, which uses up to k-wise mutual information terms, leading to a much less expensive approximation. for example, on mnist (deng, 2012), 2-bald takes 1 min to select an acquisition batch of size 5, and at acquisition batch size 10, 2-bald takes 2 min while it still performs as well as batchbald, while batchbald takes 1 min for acquisition batch size 5 and already 30 min for acquisition batch size 10, see also figure 1. importantly, we could use this family of approximations to dynamically choose the acquisition batch size by estimating the quality of our approximation-a first in active learning.the rest of the paper is organized as follows. in section §2, we will describe the k-bald method, including the explanation of k-wise mutual information terms, how k-bald approximates batchbald using these terms, and the dynamic choice of k based on approximation quality. in section §3, we will present our experiment results, including the comparison of computation time between batchbald and k-bald on mnist dataset and the comparison of performance between batchbald and k-bald. finally, we will conclude in section §4, summarizing our results and discussing future work and potential extensions. as future work, we propose that the acquisition batch size or the order of approximation could be dynamically chosen based on its quality, making it more efficient for larger datasets., 2019), which uses bayesian neural networks to find the most informative points to label in a pool set.in this paper1, we propose a new family of approximations for batchbald, called k-bald, which uses up to k-wise mutual information terms, leading to a much less expensive approximation. for example, on mnist(deng, 2012), 2-bald takes 1 min to select an acquisition batch of size 5, and at acquisition batch size 10, 2-bald takes 2 min while it still performs as well as batchbald, while batchbald takes 1 min for acquisition batch size 5 and already 30 min for acquisition batch size 10, see also figure1. in section §2, we will describe the k-bald method, including the explanation of k-wise mutual information terms, how k-bald approximates batchbald using these terms, and the dynamic choice of k based on approximation quality. that is instead of trying to compute the joint entropy in the batchbald score exactly, we can approximate it using pairwise mutual information terms, leading to a new approximation, we call 2-bald, or generally, following the inclusion-exclusion principle, using up to k-wise mutual information terms, leading to what call the k-bald family of approximations for batchbald.with 1-bald, we simply recover the well-known top-k bald, where we greedily maximize over the possible candidates for the acquisition batch using individual bald scores.so while 1-bald does not take the total correlation into account at all; 2-bald takes the total correlation into account up to pairwise terms; and k-bald takes the total correlation into account up to k-wise terms.for example, we could compute both 2-bald and 3-bald and stop the batch acquisition once the scores of 2-and 3-bald diverge too much.we first evaluate the performance of 2-bald using an acquisition batch size of 5 for batchbald and 10 for 2-bald. as can be seen from the figure, 2-bald performs as well as batchbald in terms of both accuracy and performs much better in regards to computation time: in table1, 2-bald takes 1 min to select an acquisition batch of size 5, and at acquisition batch size 10, 2-bald takes 2 min while still performs as well as batchbald, at least in the proof of concept experiment on mnist, while batchbald takes 1 min for acquisition batch size 5 and already 30 min for acquisition batch size 10. this means that while 2-bald might be a viable alternative to batchbald when using comparable acquisition batch sizes, it does not allow for scalability to larger acquisition batch sizes.in this paper, we introduced a new family of approximations for batchbald, k-bald, that use k-wise mutual information terms to approximate batchbald. additionally, it may be worthwhile to investigate the divergence of 3-bald from 2-bald as a way to dynamically set the acquisition batch size and catch approximation issues. specifically, they do not examine how the scores change within an acquisition round and the quality of the approximation (see figure3), and we do not consider the dynamic setting of the acquisition batch size or conservative acquisition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/131.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/131.txt new file mode 100644 index 0000000000000000000000000000000000000000..a981c74f75545afbd4b82f5c1ea192891c2a0a7b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/131.txt @@ -0,0 +1 @@ +recurrent neural networks (rnns) have a long history of being used to solve a wide range of tasks involving sequential data. they were the most common choice for natural language processing, but have since been largely replaced by transformers in recent years. however, there has been a recent resurgence of interest in the theoretical aspects of rnns, as seen in studies such as merrill et al. (2020). another study found that rnns with squashing and non-squashing (i.e. unbounded) activation functions exhibit qualitative differences in their counting abilities (weiss et al., 2018). this, along with the findings of el-naggar et al. (2022), suggests that even rnns with unbounded activation functions struggle to learn accurate counting on very long sequences. it is therefore crucial to understand why rnns, despite having the capacity, often fail to accurately count in practice.in this study, we examine the behaviour of the simplest form of rnns: a linear single-cell rnn. our goals are to: a) theoretically identify the necessary conditions for a linear rnn to have the ability to count, and b) explore how these conditions relate to the empirical behaviour of trained linear rnn models. the primary contributions of this paper are: a) we identify two conditions that indicate counting behaviour in linear rnns; b) we prove that these indicator conditions are necessary and sufficient for exact counting behaviour to be achieved in linear rnns; c) we then show empirically that linear rnns generally do not learn exact counting and do not meet the indicator conditions; and finally, d) we show empirical relationships between the length of the training sequences and the indicator value distributions. our goals are to: a) theoretically identify the necessary conditions for a linear rnn to have the ability to count, and b) explore how these conditions relate to the empirical behaviour of trained linear rnn models. the primary contributions of this paper are: a) we identify two conditions that indicate counting behaviour in linear rnns; b) we prove that these indicator conditions are necessary and sufficient for exact counting behaviour to be achieved in linear rnns; c) we then show empirically that linear rnns generally do not learn exact counting and do not meet the indicator conditions; and finally, d) we show empirical relationships between the length of the training sequences and the indicator value distributions.the generalisation of formal language tasks to very long sequences is not often addressed, as it requires an exact or near exact behaviour of the neural network in order avoid accumulation of errors, e. it is not clear what the general conditions are for an rnn to perform exact counting, which is necessary for developing a deeper understanding of the behaviour of rnns.in this section we formally define the balanced bracket language, balanced bracket counter and linear recurrent network and we identify conditions for the network weights that indicate that the linear recurrent network will behave as a balanced bracket counter.previous work, such assuzgun et al. (linear recurrent network) a linear recurrent network (lrn) is a network which receives an input x t at every timestep t, which is used along with the activation from the previous timestep h t-1 and weights w , u , and w b to produce activation h t , which is then passed on to the next timestep with the update function:.in theorem 1, we relate balanced bracket counter behaviour of a lrn to specific conditions on its we define two indicator conditions and show that they are necessary and sufficient for exact counting behaviour to be achieved in a lrn.the following two indicator conditions are necessary and sufficient for a linear recurrent network to accept the balanced bracket language bb. we prove that the counting indicator conditions in theorem 1 are necessary and sufficient to accept the balanced bracket language with a linear recurrent network.part 1: we prove that the counting indicator conditions in theorem 1 are satisfied if a linear recurrent network accepts the balanced bracket language by using different input sequences (table1), from which we derive the indicator conditions. part 2: we prove by induction that if the counting indicator conditions listed in theorem 1 hold, a linear recurrent network accepts the balanced bracket language.therefore, we prove that if the counting indicator conditions listed in theorem 1 are satisfied in a linear recurrent network, it accepts the balanced bracket language.although linear rnns have the theoretical capacity for counting behaviour, previous research has shown that these models often struggle to effectively generalise counting behaviour to long sequences. in this study, we present a set of necessary and sufficient conditions that indicate counting behaviour in linear rnns and provide proof that meeting these conditions is equivalent to counting behaviour. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/132.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/132.txt new file mode 100644 index 0000000000000000000000000000000000000000..6be885cad2df5ab37dac9aca27f5b6fc2862d2ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/132.txt @@ -0,0 +1 @@ +explanations for predictions of machine learning models act as reasons for a predictive model's behavior and are desirable for trustworthy and transparent machine learning (molnar 2022). with this being said, there is not much agreement in the machine learning community on exactly what counts as an explanation (doshi-velez and kim 2017; burkart and huber 2021). machine learning practitioners have developed a large set of domain-specific explanation methods in recent years. these methods are tailored either to the specific task the model is seeking to perform, e.g. regression, classification, object detection, etc., or to the type of inputs and outputs of the model, e.g., tabular features, images, sentences, etc. (islam et al. 2022).a promising technique for explaining the predictions of structured or tabular classifiers is rule-based explanations. a rule-based explanation is a predicate defining a simple region in the feature space that is sufficient for classifying a given point. in this paper, we take advantage of the connection between the inherent definability of rule-based explanations and definability in topology to develop a general framework to represent varieties of explanations based on existing explanation algorithms.to summarize this paper, we make the following contributions:• we present a novel framework of explainability for rulebased classifiers based on existing explanation algorithms.• we characterize explainability as a topological property relative to an explanation scheme i.e. relative to a choice of explanation shape and a measure of explanation size.we conjecture that all classifiers "in the wild" satisfy this notion of explainability. • employing our framework, we identify two principles for explanation algorithms that apply both theoretically and in practice. the first is that rule-based explanations can take nearly any desired shape. the second holds that if no probability measure is known over the feature space and at least one feature is not bounded, then explanations should be bounded, i.e. include all unbounded features.this paper proceeds as follows. in section 2, we discuss various existing explanation algorithms and provide a brief introduction to topology. we introduce explanation schemes as a framework for explainability and characterize explainability as a topological property in sections 3, 4, respectively. in section 5, we derive principles for both formal and practical explanation algorithms. in sections 6, 7, we conclude by discussing limitations and open problems and surveying related work. given a classifier and a point in the feature space, a rule-based explanation algorithm generates a rule defined in terms of the features of the classifier that both covers the given point and is sufficient for its classification.given black-box access to a classifier and a point, these algorithms evaluate the classifier on a collection of points sampled in a neighborhood of the given point and return a rule such that the points satisfying the rule (or at least some proportion of points above a given threshold) evaluate to the same label as the given point. a rule-based explanation for x ∈ x is a well-defined region of the feature space containing x whose classification is invariant within the region, i. similarly, if a probability measure is unknown, then the counting measure is typical for discrete feature spaces, lebesgue measure is typical for continuous feature spaces, and a product measure is typical for spaces with both discrete and continuous features. an explanation scheme is a tuple (x, ϕ, µ) where x is the feature space, ϕ is a scalable rule generating the explanation topology t ϕ on x, and µ is a measure on x representing coverage. a classifier f : x → y is explainable for scheme (x, ϕ, µ) if each x ∈ x has an explanation except on a set of edge cases. suppose f : → y is explainable for (x, ϕ, µ). if ϕ is the predicate for open intervals, then for x ∈ r if x is rational (irrational) then every rule satisfying ϕ that covers x contains an irrational (rational) number. scalable rules ϕ, ψ generate equivalent explanation topologies if for every potential explanation a ⊆ x satisfying ϕ and x ∈ a, then there is some potential explanation b ⊆ x satisfying ψ such that x ∈ b and b ⊆ a, and vice versa.if two explanation schemes share a coverage measure and their respective scalable rules each generate the same explanation topology, then they share the same class of explainable models. from this perspective, one can substitute any scalable rule that generates the standard topology in place of open balls or open rectangles in an explanation scheme without affecting which models are explainable for the new scheme. suppose the user does not know a probability measure over a continuous feature space, a single feature f is not bounded, and coverage is a monotone non-decreasing function of lebesgue measure. the inclusion of a coverage guarantee extends explainability in the sense of definition 3 which, for an explainable model, guarantees than an explanation exists not that an explanation of a given coverage exists. for explanation scheme (x, ϕ, µ), the fidelity of explanation a for classifier f : x. unlike the case with coverage guarantees, extending topological explainability to fuzzy explanations satisfying a given level of fidelity does not compromise the structure of the explanation topology t ϕ ; rather, the characterization of an explainable classifier requires modification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/133.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/133.txt new file mode 100644 index 0000000000000000000000000000000000000000..a7be70b4424dfc6be82ad00893e8d06520b204ff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/133.txt @@ -0,0 +1 @@ +many real-world applications involve in monitoring and analyzing a constant stream of data. a fundamental task in such applications is to monitor whether a change has occurred. for example, the goal may be monitoring the performance of a classifier over time, and triggering retraining if the quality degrades too much. we can also use change point detection techniques to detect anomalous behavior in the data stream. as the data flow may be significant, it is important to develop efficient algorithms.in this paper we study detecting change in a stream of binary numbers, that is, we are interested in detecting whether the underlying distribution has recently changed significantly. to test the change we will use a standard likelihood ratio statistic. namely, assume that we have already observed n samples from the last time we have observed change. in our first model, we fit a single bernoulli variable to these samples. in our second model, we split these samples in two halves, say at point i, and fit two bernoulli variables to these halves. once this is done we compare the likelihood ratio of the models. if the ratio is large enough, then we deem that change has occurred.in our setting, index i is not fixed. instead we are looking for the index that yields the largest likelihood. this can be done naively in o(n) time by testing each candidate. this may be too slow, especially if n is large enough and we do not have the resources before a new sample arrives. our main technical contribution is to show how we can achieve (1 -ǫ) approximate of the optimal i in o ǫ -1 log 2 n time.to achieve this we will first reduce the number of candidates for the optimal index i. we say that index j is a border if each interval ending at j -1 has a smaller proportion of 1s that any interval that starts at j. a known result states that the optimal change point will be among border indices. using border indices already reduces the search time greatly in practice, with theoretical running time being o n 2/3 .to obtain even smaller bounds we show that we can find the optimal index among the border indices for fixed model parameters, that is, the parameters for the two bernoulli variables, in o(log n) time. we then construct a list of o ǫ -1 log n candidates for these parameters. moreover, this list will contain model parameters that are close enough to the optimal parameters, so testing them yields (1 -ǫ) approximation guarantee in o ǫ -1 log 2 n time.the remaining paper is organized as follows. in section 2 we introduce preliminary notation and define the problem. in section 3 we introduce border points. we present our main technical contribution in sections 4-5: first we show how to find optimal index for fixed model parameters, and then show how to select candidates for these parameters. we present related work in section 6 and empirical evaluation in section 7. finally, we conclude with discussion in section 8.our first step for a faster change point discovery is to reduce the number of possible change points.we obtain a block sequence b from a binary sequence s by grouping the entries between border points: the counter u i indicates the number of 1s while the counter v i indicates the number of 0s. given a block sequence b find a change point i that maximizes q(i; b). assume that we have already observed n entries, and we have a block sequence of k blocks b induced by the border points. we then check whether av (k + 1, k + 1) ≤ av (k, k), that is, whether the average of the last block is smaller than or equal to the average of the second last block. given a block sequence b, an index i, and two parameters p 1 and p 2 , we define. assume a block sequence b = (u j , v j ) and two parameters 0 ≤ p 1 < p 2 ≤ 1. let t j = u j + v j , and write x = log p 1 -log p 2 and y = log(1 -p 1 ) -log(1 -p 2 ).we have shown that if we know the optimal p 1 and p 2 , then we can use binary search as described in the previous section to find the change point. instead of testing every p ∈ p , we will construct an index set c, and define r = {av (i, k) | i ∈ c}, such that for each p ∈ p there is r ∈ r such that eq.algorithm 1: findcands(b, ǫ), given a block sequence b of k entries and an estimation requirement ǫ > 0, constructs a candidate index set c that is used to estimate the model parameter p 2 . assume a block sequence b with k entries generated from a binary sequence s with n entries, and let ǫ > 0. for each algorithm 2: findcands ′ (b, ǫ), given a block sequence b of k entries and an estimation requirement ǫ > 0, constructs a candidate index set c that is used to estimate the model parameter p 1 . in figure1a, we show the average delay of discovering the true change point, that is, how many entries are needed, on average, before a change is discovered after each true change. we see from the results that the delay grows linearly with τ , whereas the number of false change points is significant for small values of τ but drop quickly as τ 1 recall that we say that change occurs if it is larger than σ = τ + log n. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/134.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/134.txt new file mode 100644 index 0000000000000000000000000000000000000000..64a83f4d4676f8c3c86b21bd0c927276f1de1663 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/134.txt @@ -0,0 +1 @@ +practical use cases for early sequence classification exist in many domains. holding your smartphone's microphone up to a speaker, in seconds a music recognition app can tell which song is being played. there are two competing objectives with respect to the app making a real-time classification from audio. on one hand, a longer sequence from the song may yield a more accurate classification. on the other hand, the user may not have the patience to wait very long.generally, we are interested in scenarios in which a classifier receives elements of a sequence over time. this kind of ongoing flow of data immediately suggests a need for a real-time ability to stop waiting for new elements and classify given the received elements at this point in time at sufficient accuracy. we call this early classifying to differentiate from classification after a 'complete' sequence or a pre-set number of sequence elements is received. optimally deciding when one has received enough data, and then making an accurate classification from that data, is the crux of the problem we are investigating.to this end, we introduce our novel classifier-induced stopping (cis) in this paper. previous methods depend on exploration during training (when there is access to the entire sequence) to learn (i) a policy to decide when to stop waiting for new elements and classify and (ii) the classifier itself. exploration, in an early sequence classification context, means the policy affects how much of the sequence is ingested or used to learn. in contrast, cis learns both policy and classifier in a more direct, supervised approach inspired by imitation learning . cis learns to classify as accurately as possible at every time step, after receiving a new element. concurrently, it learns to stop and classify at the optimal time (based off a reward) induced from its own classifications at each time step. cis removes notions of exploration and learns to follow the ideal decision-making based off its own classification predictions; hence, we call it classifier-induced. the main contributions of our work are as follows. we introduce a novel, supervised framework to learn a stopping time for early classifiers that avoids exploration. instead, it learns when to stop from its own classifications. we demonstrate that cis outperforms benchmarks in terms of a pareto frontier auc measure across diverse experiments.our paper is structured as follows. in section 2, we establish notation and review related work, specifically the two benchmark methods used in experiments. following in section 3, we discuss cis in detail. section 4 presents results from three sets of experiments on a variety of problems and data. section 5 gives a summary. this kind of ongoing flow of data immediately suggests a need for a real-time ability to stop waiting for new elements and classify given the received elements at this point in time at sufficient accuracy. previous methods depend on exploration during training (when there is access to the entire sequence) to learn (i) a policy to decide when to stop waiting for new elements and classify and (ii) the classifier itself. exploration, in an early sequence classification context, means the policy affects how much of the sequence is ingested or used to learn. concurrently, it learns to stop and classify at the optimal time (based off a reward) induced from its own classifications at each time step.. the entire sequence is wholly used in training and we are able to directly learn the optimal classification time in a supervised manner. they give the holder the right to buy a stock at a specified strike price only on a given expiration date (betting the stock will go up). however, after buying the option, if the option holder could predict that the stock price will not be above the strike price on the option expiration date, then the holder could attempt to sell the option in the secondary market to recoup the original cost of the option.from, it is reasonable to assume a strike price equal to the stock price on the option origination purchase date. for the training set, we divide each technology stock into disjoint 30day stock price samples, through 2016. we consider a binary classification of whether the stock closing price on day 30 is greater than or less than the stock closing price on day 1 (proxying strike price). the assumption is that we will have year-long stock price sequence to continually roll out early classifiers and 'purchase' new options the day after stopping and deciding what to do with the current one. in all experiments, we holistically compare early classifiers from ppo, larm, and cis by their pareto frontiers. for a given µ value, we roll out the early classifier over the validation set and compute the mean classification time and accuracy after each training epoch. while stock price movements are complex random walks, cis is able to discern recognizable patterns better than larm and ppo. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/135.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/135.txt new file mode 100644 index 0000000000000000000000000000000000000000..9d2ede2159035482271c4fcb2bb1ac2cfaf4908f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/135.txt @@ -0,0 +1 @@ +boosting is one of the most successful ideas in machine learning, allowing one to "boost" the performance of a base learning algorithm with rather poor accuracy into a highly accurate classifier, with recent applications in adversarial training , reinforcement learning , and federated learning , among many others. the classic boosting algorithm, known as adaboost , achieves this by iteratively training classifers on the training data set. after each iteration, the data set is reweighed and a new classifier is trained using a weighted loss function. the weights intuitively guide the attention of the base learning algorithm towards training samples that the previous classifiers struggle with. after a sufficiently large number of iterations, the produced classifiers are combined by taking a weighted majority vote among their predictions.both the classic adaboost algorithm, as well as more modern gradient boosters , all have this highly sequential behaviour, where the algorithm runs in multiple iterations that adjust the learning problem based on previously trained classifiers/regressors. indeed, the best performance of gradient boosters on benchmark data sets is often obtained after hundreds, or even thousands of iterations . this may be appropriate when the base learning algorithm has a small training time. however, it prevents the use of boosting in combination with e.g. medium-sized neural networks as the base learning algorithm, or if one wishes to use all available training data in a large data set. here the sequential nature of boosting algorithms is particularly critical, as it is not possible to simply distribute the training task to many machines. this shortcoming of boosting algorithms was also highlighted in the survey by natekin and knoll when discussing drawbacks of gradient boosting.in light of the above concerns, it would have a huge practical impact if a highly parallel boosting algorithm could have been developed. unfortunately, our main result shows that parallelizing boosting cannot be done without an exponential increase in the total work needed for training! weak to strong learning. to formalize the above claim that boosting cannot be parallelized, we need to introduce the theoretical framework in which we prove our impossibility result.boosting was introduced to address a theoretical question by kearns and valiant , asking whether a so-called weak learner can always be converted to a strong learner. in the following, we define these notions formally. first, let x be an input domain and c : x → {-1, 1} an unknown concept that we want to learn.a γ-weak learner for a concept c : x → {-1, 1}, is an algorithm that given some constant number of samples m 0 from any unknown distribution d over x , with constant probability returns a hypothesis t ≥ min{exp(ω(d/γ 2 )), exp(exp(ω(d)))} or l d (a s,c(s),w ) ≥ exp(-o(p max{γ, ln(tp)γ 2 /d})) in expectation over s and any random choices of a. here x , c and w depends on m, γ and d.let us discuss the implications of the theorem in detail. first, let us consider the simplest case where one wants only a constant number of parallel rounds p = o(1). then the lower bound states that either t ≥ exp(ω(d/γ 2 )), t ≥ exp(exp(ω(d))) or the error probability is at least exp(-o(max{γ, ln(t)γ 2 /d})). to make this error probability comparable to adaboost (1) requires t = exp(ω(dγ -2 ln m)). thus there is no way around either an exponential dependency on dγ -2 or a double-exponential dependency on d.if one is willing to use a super-constant number of rounds p, then there are a couple of possibilities for obtaining an error probability comparable to (1). first, one could have either p = exp(ω(d)), t = exp(ω(dγ -2 )) or t = exp(exp(ω(d))). these bounds are all exponential in either the vc-dimension or γ -2 or both. finally, there is the possibility of making the error e -o(pγ) small. to make it comparable to (1) requires p = ω(γ -1 ln m). this is only a γ factor less than adaboost. thus there is unfortunately not much hope for parallelizing boosting, and certainly not to a near-constant number of rounds. in all circumstances, if the number of rounds is significantly less than γ -1 ln m, then it requires an exponential number of queries t per round.finally, let us remark that it is sometimes stated that the unknown concept c belongs to some concept class c. here we implicitly assume that it belongs to the class of all concepts that may be γ-weak learned using the hypothesis set of the weak learner. alon et al. proved that this concept class has vc-dimension at most o d (γ -2+2/(d+1) ) when the hypothesis set of the γ-weak learner has vc-dimension d. here o d (•) hides factors depending only on d.a parallel boosting algorithm. to demonstrate the tightness of our lower bound, we also present a single-round boosting algorithm theorem 2. there is a weak-to-strong learner a, such that for any concept c, any γ-weak learner w for c using a hypothesis set of vc-dimension d, and any distribution d, when given m samples s ∼ d m , a has parallel complexity (1, exp(o(dγ -2 ln m))) and with probability 1 -δ over s, it outputs a hypothesis h withthe generalization error of our single-round algorithm thus matches that of adaboost (1), although making exponentially many queries to the weak learner. as shown by our lower bound, this is inevitable.let us also remark that using techniques in the two works , we can also remove the two logarithmic factors ln(m) ln(m/d) from the upper bound. as the logarithmic factors are not essential to our contribution, we merely comment here that the logarithmic factors can be removed by creating a logarithmic number of bootstrap sub-samples of the training data, running our algorithm in parallel on all sub-samples, and outputting a majority vote among the resulting classifiers. the resulting algorithm is then an optimal weak-to-strong learner by the previously mentioned sample complexity lower bound .finally, let us comment that there are previous boosting algorithms, based on branching programs, that invoke a weak learner in parallel , however none of these works use o(γ -2 ln m) rounds of boosting.previous lower bounds for parallelizing boosting. let us conclude by discussing related work by long and servedio . in their work, they also study the parallel complexity of boosting, however under somewhat different assumptions. concretely, they prove a lower bound showing that any weak-to-strong learner must have parallel complexity (p, t) satisfying p = ω(γ -2 ln m), regardless of the number of calls per round t. this strengthens a previous result by freund and is quantitatively a stronger lower bound than ours. however, they also model the problem in a way that makes their result weaker than ours. first, they make no assumption on the complexity/vc-dimension of the hypothesis set used by the weak-learner. on close inspection of their construction, their input domain x is the full cube {-1, 1} k with k = θ(γ -2 ln m) and they have one hypothesis h i for each coordinate i, making the prediction h i (x) = x i . the vc-dimension of this hypothesis set is ⌊lg 2 k⌋ = θ(ln(1/γ) + ln ln m), i.e. growing with m.secondly, and just as crucially, their lower bound assumes that the query distributions fed to the weak learner are obtained by "filtering". concretely, this means that the weight/probability mass put on each sample point x ∈ s is determined solely from the vector of predictions made by previously obtained hypotheses on x as well as the label c(x) (see their definition 2). in our lower bound, we make no assumption on how query distributions are chosen other than being computable from the hypotheses seen so far and the training data. this is a crucial difference, and in fact, our 1-round boosting algorithm explicitly queries for distributions that are not defined solely from labels and predictions while also using the bounded vc-dimension to define these distributions, thereby circumventing their lower bound. on close inspection of their construction, their input domain x is the full cube {-1, 1} k with k = θ(γ -2 ln m) and they have one hypothesis h i for each coordinate i, making the prediction h i (x) = x i . for each of these groups, starting with h 1 , we have a random subset x i of x .the weak learner w does the following upon being queried for a distribution d ′ over x : it searches through the hypothesis sets h i , starting with i = 1, and returns the first hypothesis h it sees with. we think of the hypothesis sets h i and corresponding subsets x i as being responsible for one parallel round each, where h 1 , x 1 is responsible for the first round. since this subset is unknown to a deterministic a, if it queries w with a distribution d ′ that is somewhat uniform over x , then about a 1 -β -1 fraction of the mass is on points x ∈ x with x / ∈ x 1 . it suddenly becomes rather likely that less than a 1 -β -1 fraction of the mass is on points x ∈ x with x / ∈ x i . the intuition in the proof of lemma 1 is that whenever e occurs, the algorithm a has no knowledge of c inside x p \ s, hence it can only guess these labels, resulting in an error probability of ω(|x p |/m) = ω(β -p ). but conditioned on the outcomes of these random variables, c is still uniform random inside x p ∩ (x \ s) and thus its conditional entropy is at least |x p ∩ (x \ s)|. we then evaluate h on x p and let z denote the set of x ∈ x p for which h(x) = c(x). .assume x i avoids d ′ and let h be one of the 2 d/2 random hypotheses from h i that equal c for all x / ∈ x i .define an indicator i x for every x ∈ x \ y , taking the value 1 if x / ∈ x i and 0 otherwise. for such d ′ , we consider the 2 d/2 random h ∈ h i that return a uniform random value for every x ∈ x . concretely, given a γ-weak learner w and m samples s ∼ d m , our goal is to produce a voting classifier f (x) = sign(g(x)) with g(x) = (1/k) k i=1 h i (x) such that for every training sample (x, c(x)) ∈ s, it holds that c(x)g(x) ≥ γ/16. for a concept c : x → {-1, 1}, a hypothesis set h and a distribution d over x , a set of samples t is an ε-approximation for (c, d, h) if for all h ∈ h, it holds that the following classic result shows that a small random sample t ∼ d n is an ε-approximation with good probability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/136.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/136.txt new file mode 100644 index 0000000000000000000000000000000000000000..98c13fee74ccd570e6663f3c810b78b61b150946 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/136.txt @@ -0,0 +1 @@ +the application of the machine learning approach random forest (rf) (breiman 2001) has become very popular for the analysis of high-dimensional data, e.g. generated in genomics (x. chen and ishwaran 2012) and metabolomics (t. chen et al. 2013) experiments or genome-wide association studies (gwas) (nicholls et al. 2020). the reason for this popularity are specific advantages over other methods, such as the flexibility in terms of input and output variables, since both quantitative and qualitative variables can be used to build classification, regression (strobl, malley, and tutz 2009) and survival models (ishwaran et al. 2011). another advantage is the ability to generate variable importance measures (vims) that are utilized to select the relevant features for parsimonious prediction models or to identify and interpret differences between the samples. the most common vims are the permutation importance and the impurity importance, so called since it is calculated from the impurity gain that the variable contributes to the random forest. another importance measure is minimal depth, which is based on the position of the variables in the decision trees (ishwaran et al. 2010). minimal depth and the impurity importance, however, are known to be biased in favour of features with many categories (strobl et al. 2007) and high category frequencies (k. k. nicodemus 2011), which is particularly important in gwas (boulesteix et al. 2012). since it is not affected by these biases, the permutation importance has been preferred and various selection techniques based on this importance measure have been developed (szymczak et al. 2016;janitza, celik, and boulesteix 2018;kursa and rudnicki 2010) and compared (degenhardt, seifert, and szymczak 2019). a few years ago, a corrected, unbiased impurity importance measure, the actual impurity reduction (air), was introduced (nembrini, könig, and wright 2018). air is computed faster than the permutation importance, which is why this importance measure is very useful for application to high-dimensional data. all of the rf based importance measures are affected by the correlation structure of the features (kristin k. nicodemus et al. 2010), and conditional variable importance has been proposed to determine the corrected, individual impact of the features (strobl et al. 2008;debeer and strobl 2020). we have taken a different approach to this issue because we think that the relations between the features should be included into the analysis treating them as interacting components. therefore, we have deliberately included feature relations to improve variable importance calculation, the power of feature selection and interpretation of differences between samples (seifert, gundlach, and szymczak 2019). we have achieved this by the exploitation of surrogate variables that have been introduced to compensate for missing values in the data, representing the features that can replace another feature in a split as best as possible (breiman et al. 1984). based on surrogate variables, we developed surrogate minimal depth (smd), an importance measure that incorporates surrogate variables into the concept of minimal depth, and mean adjusted agreement, a relation parameter that is determined by the split agreement of the features across the random forest. since this relation parameter considers the mutual impact of the features on the random forest model, this parameter goes beyond the analysis of ordinary correlation coefficients enabling a comprehensive analysis of the complex interplay of features and outcome. we applied this relation analysis to reveil relations between features in gene expression (seifert, gundlach, and szymczak 2019), metabolomics (wenck et al. 2022), and various spectroscopic data sets (shakiba et al. 2022;seifert 2020), e.g. to illuminate the interaction of drugs with proteins and lipids in living cells (zivanovic et al. 2019). however, since both smd and mean adjusted agreement are affected by the previously described biases, their application has so far been limited. here we introduce two novel approaches for the analysis of feature relations and mutual importance called mutual forest impact (mfi) and mutual impurity reduction (mir). we will show that they are not affected by these biases and compare their performance with existing approaches by applying them to different simulated data sets. for the selection of the predefined number s of surrogate splits, the surrogates with the highest values for the adjusted agreement adj are chosen.with p xi n and q xj n denoting the primary split based on variable x i and the surrogate split on variable x j of the node n and nodes(x i ) denoting the total number of nodes based on x i . in addition, 10 correlated variables (denoted as cx 1 , cx 2 , cx 3 , cx 7 , cx 8 , cx 9 ) were generated for each of x 1 , x 2 , x 3 , x 7 , x 8 and x 9 utilizing the simulatemodule function of the r package wgcna.mean adjusted agreement a q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q n(2) n(3) n(4) n(5) n(6) n(7) n(8) n(10) n(20.mutual forest impact (mfi) b q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q n(2) n(3) n(4) n(5) n(6) n(7) n(8) n(10.mean adjusted agreement c q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q n(2) n(3) n(4) n(5) n(6) n(7) n(8) n(10s2. smd with 5 and 10 surrogate variables (red circles and squares) shows the highest selection frequencies for all relevant variables reaching 100% for x 1 , x 2 , x 3 , x 4 and cx 1 , more than 90% for x 5 , x 6 and cx 2 as well as between 35 and 60% for cx 3 .01, the default q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0.mean adjusted agreement a q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0.mean adjusted agreement c q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0. q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q n(2) n(3) n(4) n(5) n(6) n(7) n(8) n(10) n(20a q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0.surrogate minimal depth (smd) q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q n(2) n(3) n(4) n(5) n(6) n(7) n(8) n(10) n(20) c -2 0 2 4.actual impurity reduction (air) b q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0.actual impurity reduction (air) q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q n(2) n(3) n(4) n(5) n(6) n(7) n(8c q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0.variables selection frequency q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q air smd, s = 5 smd, s = 10 smd, s = 20 smd, s = 100 mir, s = 5 mir, s = 10 mir, s = 20 mir, s = 100 relevant variables non-relevant variables for the basic variables, (x 1 -x 9 ) the selection frequencies are averaged across all 50 replicates, whereas for the six groups of correlated variables, (cx 1 -cx 3 and cx 7 -cx 9 ) as well as the non-causal variables (ncv) the average frequencies across all replicates and variables in the respective group are shown. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/137.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/137.txt new file mode 100644 index 0000000000000000000000000000000000000000..36598063a73fee9e4c259425f70fd6a9e47fb117 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/137.txt @@ -0,0 +1 @@ +the compression of dnns targets mainly the inference side, but recent works aim to squeeze the training process. however, it remains challenging. iclr 2021 haet challenge is an annual competition that evaluates the performance of classification models given a limited training time and data. the time budget for training is 10 minutes on an nvidia gpu v100 with 32 gb memory, running with an intel(r) xeon(r) gold 6230 cpu @ 2.10ghz processor, with 12 gb of ram. the dataset is a tiny unknown dataset of 10 classes containing 5k images for training and 1k images for testing during the development phase. the inputs to the model are 32×32 rgb images like cifar-10. a subset of mini-imagenet with 80×80 images is used to evaluate the performance of candidate models during the evaluation phase. applicants are allowed to use their own optimizer, training loop, and data augmentation process. introducing such limitations to the training process creates two main challenges. first, most classifiers need much more than 10 minutes to converge due to a large number of parameters. the general trend in most of the state of the arts of dnns is to go deeper and wider and increase the model's capacity (simonyan & zisserman, 2014;szegedy et al., 2016;2017;he et al., 2016). a larger network requires more time and computational resources to achieve its best accuracy. in this paper, we used a compressed model by applying neural architecture search to reduce the computation operations of the model. second, a small dataset leads the model to overfit. in this case, the model learns too well the training images and performs accurately on the training dataset but poorly on real-world images. since data augmentation (da) is an effective technique to generate additional data for training (taylor & nitschke, 2017), we added a combination of several da policies to our pipeline to overcome the overfitting problem. the dataset is a tiny unknown dataset of 10 classes containing 5k images for training and 1k images for testing during the development phase., 2016). in this paper, we used a compressed model by applying neural architecture search to reduce the computation operations of the model. in the preprocessing module, we added a data sampler to build a dataset requested by the competition: 5k images for the train set and 1k images for the validation set. model building module provides a designed model as we previewed a known model's nas variant. the goal of this third step was to find a smaller variant of the model that can perform higher or equal to the baseline model.we followedhoward et al. input resolution multiplier scales the input image and subsequently the internal representation of every layer. since the dataset is tiny, the model does not gain enough performance during training by using the basic techniques of image augmentation.1 cifar-10 cifar-10 is a collection of 60k color images of 32×32, divided as follows: 50k for train set and 10k for validation set. by applying nas on senet-18 and resnet-18, we were able to find a smaller variant for each model that performs similarly to its baseline model. although the input resolution is 18% bigger than its original model, the nomad-nas-resnet-18 has 22% less mac operations and 47% fewer parameters than resnet-18. table3shows the validation accuracy of our candidate model, nomad-nas-senet-18 after applying hpo.7% to 87. we used nomad, which implements a derivative-free optimization algorithm for both nas and hpo. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/138.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/138.txt new file mode 100644 index 0000000000000000000000000000000000000000..83a9160dfa94410bd13c88599825781864815ec9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/138.txt @@ -0,0 +1 @@ +online convex optimization (oco) has been a very attractive research problem for the last two decades, because of its versatility in modelling rich optimization problems. with oco, at each time t, an online algorithm selects an action a t , after which the loss incurred f t (a t ) is revealed. knowing all f t 's, 1 ≤ t ≤ t ahead of time, an optimal offline algorithm chooses action x ⋆ = arg min x t t=1 f t (x), and the static regret of an online algorithm is defined as r s = max ft,t=1,...,t t t=1 f t (a t )-t t=1 f t (x ⋆ ), i.e., an adversary can choose the functions f t . the name static comes from the fact that the optimal offline algorithm is constrained to use a single action.large body of work is known for static regret - , where if the functions f t are convex, the optimal regret is θ( √ t ), while if functions f t are strongly convex then algorithms with regret at most o(log t ) are known. when functions f t are chosen by an adversary, but the arrival order is randomized, algorithms with better guarantees are also known .natural generalization of the static regret is the dynamic regret , , where the regret for a comparator sequence u = (u 1 , . . . , u t ) is defined asfor this general dynamic regret definition, sub-linear (in t ) regret is not always possible, unless some structure is enforced on the comparator sequence. for example, for a sequence u, defining v u = t t=2 ||u tu t-1 ||, the online gradient descent (ogd) algorithm was shown to achieve dynamic regret r d (u) = o( √ t (1 + v u )) , which has been improved to o( t (1 + v u ))in , matching the lower bound ω( t (1 + v u )) .a special case of (1) that is popularly studied - is by restricting u = x ⋆ = (x ⋆ 1 , . . . , x ⋆ t ), where x ⋆ t = arg min x f t (x), the sequence of local optimizers. moreover, in this case, once the action a t is chosen, the only information available is ∇f t (a t ).for this case, the best known bound on the dynamic regret has been shown to be o(v x ⋆ ) using the online projected gradient descent (opgd) algorithm, when functions f t are strongly convex, and smooth. under the special case that the minimizersx ⋆ t lie in the interior of the feasible set, o(v x ⋆ ) regret can be achieved when functions f t are just convex, and smooth .for strongly convex and smooth functions, defining s x ⋆ = t t=2 ||x ⋆ tx ⋆ t-1 || 2 , showed that o(min{v x ⋆ , s x ⋆ }) is also achievable as long as at each time t, gradient of f t at as many points required is available. most recently, improved this guarantee to o(min{v x ⋆ , s x ⋆ , f v }), whereis the maximum function variation over the feasible set χ in successive time slots.in this paper, we focus on the constrained version of the oco that has been considered more recently in - , where at each time t, the objective is to minimize the loss function f t subject to a constraint g t (x) ≤ 0. for applications of constrained oco, we refer to prior work - . similar to the unconstrained oco, even with constraints, the typical information structure is that information about f t , g t is revealed after the action a t is chosen. the optimizer at time t is, and the objective is to choose actions to minimize the dynamic regret (1) with u = x ⋆ , where the constraint is already absorbed in the definition of x ⋆ t , and the maximization in ( 1) is with respect to both f t and g t . in addition to the dynamic regret, in this constrained oco, an additional metric of interest is the constraint violation penalty, which can be defined in many different ways. for example,which measures the gap between the function g t evaluated at the optimal point and the chosen action orwhich just counts the overall constraint violation. we use p g (x ⋆ ) rather than p ′ g (x ⋆ ) since it is a stronger measure as p g (x ⋆ ) ≥in prior work, starting from , where functions f t , g t are assumed to be convex, lipschitz and smooth, an algorithm has been proposed that achieves), which was improved in , to getx ⋆ t 3/4 ), and most recently in , an algorithm based on the drift plus penalty method haswhere v g is as defined in (2) with f = g.however, notably considers the full information setting, where once a t is chosen, full functions f t and g t are revealed, and hence x ⋆ t can be computed. clearly, obtaining this information is highly imposing. moreover, also needs to know the diameter d of the feasible set. in comparison, the result of requires the knowledge of v x ⋆ instead of individual x ⋆ t , which is relatively less demanding, however, still very difficult to obtain in practice, as well as the knowledge of t and d.in this paper, we consider an alternate information structure that is less imposing than considered in , . the full feasible set at time t is χ t = {x ∈ χ : g t (x) < 0}. we assume that once the current action a t is chosen, for a fixed constant dist > 0 that is independent of t , a subset of χ t , set χ t (a t ) = {x : g t (x) ≤ 0} ∩ b(a t , dist) is made available, where b(x, r) is a ball with radius r centered at x. set χ t (a t ) captures the feasible set in the neighborhood of the current action. with full information, e.g., in , dist = ∞. we will show that our results hold for any dist > 0.with this new information structure, we consider the problem of simultaneously minimizing the dynamic regret and constraint violation penalty when f t , g t are strongly convex, lipschitz and smooth. generalizing the results when f t , g t are only convex, is part of ongoing work.towards this end, we propose an algorithm that uses the projected gradient descent (pgd) algorithm as a black box, and depending on the chosen action a t being feasible g t (a t ) < 0, on the boundary g t (a t ) = 0 or infeasible g t (a t ) > 0, executes pgd over a suitably chosen subset that may or may not be contained in the feasible region of g t . the main concept that the algorithm relies on is the property of the pgd algorithm when executed over a convex set i and starting point a t , is that the next action a t+1 satisfiesfor a constant c < 1, where x ⋆ i = min x∈i f (x) when f is strongly convex and smooth.if the whole feasible region χ t = {x ∈ χ : g t (x) < 0} was known, then using i = χ t , (3) will imply that the algorithm is making 'quick' progress towards the optimal point x ⋆ t . unfortunately only local information about the feasible region χ t is known. in particular, only χ t (a t ) = χ t ∩ b(a t , dist) is available for a constant dist. thus, we proceed in two steps. we identify a small region i t at time t around a t that is contained in χ t and use (3) to claim that we are making progress towards the optimal point in this subset i t (which could be far away from the global optimal). next, exploiting the strong convexity and the smoothness of the functions, we extend the same claim to the optimal point x ⋆ t which need not be in i t .since we have only local information about g t around a t , it can happen that the size of i t is arbitrarily small or i t is empty in case g t (a t ) > 0 (current choice is infeasible). for both these cases, we show that the algorithm makes progress of a finite distance towards the optimal point x ⋆ t in χ t , and establish a relation similar to (3). once we have (3), a simple application of the triangle inequality and the lipschitz condition, implies the result.our contributions.• we show that under the defined information structure, the proposed algorithm simultaneously achieves• as a function of information variable dist > 0, both r d (x ⋆ ) and p g (x ⋆ ) scale inverse polynomially, which is natural to expect since for any algorithm as information availability is decreased, (smaller value of dist), the regret should worsen.we do not know at this point if the algorithm achieves the best scaling in terms of dist.• in remark 3, we also argue that our result is the best one can hope for, given the minimal information structure.notation: for the rest of the paper, we follow the notation described as follows. for a set i ∈ r n , its interior is defined as int(i), while its boundary as boundary(i). b(x, r) is the ball of radius r centered at x. for a discrete set of points s, convex hull(x ∈ s) represents the convex hull of points x ∈ s. proj(x, s) is the projection of point x on set s, i.e. proj(x, s) = arg min y∈s ||x -y||. knowing all f t 's, 1 ≤ t ≤ t ahead of time, an optimal offline algorithm chooses action x ⋆ = arg min x t t=1 f t (x), and the static regret of an online algorithm is defined as r s = max ft,t=1,. , x ⋆ t ), where x ⋆ t = arg min x f t (x), the sequence of local optimizers.for strongly convex and smooth functions, defining s x ⋆ = t t=2 ||x ⋆ tx ⋆ t-1 || 2 ,showed that o(min{v x ⋆ , s x ⋆ }) is also achievable as long as at each time t, gradient of f t at as many points required is available. we use p g (x ⋆ ) rather than p ′ g (x ⋆ ) since it is a stronger measure as p g (x ⋆ ) ≥. the full feasible set at time t is χ t = {x ∈ χ : g t (x) < 0}.1compared to prior work,that assume that f t and g t are convex, we assume that f t and g t are strongly convex with strong convexity parameters ν f , ν g , respectively. moreover, the algorithm can also access ∇f t (x), ∇g t (x) for at most one point x of its choice. p ′ g (x ⋆ ) can be negative, while p g (x ⋆ ) is always positive, and p g (x ⋆ ) ≥ p ′ g (x ⋆ ) since g t (x ⋆ t ) can be negative. a t is on the boundary of the feasible region for g t , and finally, iii) g t (a t ) > 0, i. when both f t , g t are strongly convex, lipschitz, and smooth (∇f t , ∇g t are lipschitz) for all t ≤ t and 1 ≤ t ≤ t, sup x∈χ ∇f t (x) ≤ g and sup x∈χ ∇g t (x) ≤ g, with information structure as defined, with algorithm 1, simultaneously,. in particular, at time t, after a t has been chosen, algorithm 1 requires only g t (a t ), χ t (a t ) and ∇f t (x), ∇g t (x), at x = a t or some x ∈ χ t (a t ). in, knowledge of v x ⋆ , d, t is needed over and above ∇f t (a t ), ∇g t (a t ), g t (a t ). when both f t , g t are strongly convex and smooth for all t ≤ t with information structure as defined, with algorithm 1, for the case when g t (a t ) < 0. thus, the update of the algorithm (16) is equivalent to executing subroutine optimize with starting point x t = a t , set i = i ′ t with step size µ = l g , for function h = g t .given the assumption that ∇g t (a t ) ≤ g, we get that (22) is equivalent to executing optimize with set i t = convex hull(a t , ât , x ⋆ t ), h = g t , and µ = max{g/dist, l g }. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/139.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/139.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e46d3cec804eb77b643338809fa170c3e0d950e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/139.txt @@ -0,0 +1 @@ +anomaly detection systems, able to discriminate abnormal, unexpected patterns and adapt to novel expected patterns in data, are known to be an essential part of risk-averse systems. in particular, anomaly detection systems assess the normal operational conditions allowing internet of things (iot) devices to stream high-fidelity data into control units.in their highly influential paper, chandola et al. review former research efforts spanning diverse application domains . recent studies highlight the need to develop holistic methods with general application and accessible tunability for operators , , .cook et al. denote substantial aspects that pose challenges to anomaly detection on iot, namely the context information of the measurement being temporal, spatial, and external, multivariate character, noise, and nonstationarity . feature engineering methods allow encoding contextual properties and increase the performance . however, extensive feature engineering may significantly increase dimensionality, requiring sizeable data storage and high computational resources .moreover, nonstationarity resulting from concept drift, an alternation in the pattern of data due to a change in statistical distribution, and change points, permanent changes to the system's state, represents a difficulty of a significant extent. in real-world scenarios, those changes are frequently unpredictable. therefore, the ability of an anomaly detection method to adapt to changes in the data structure is crucial for long-term deployments. the former scalability problem now introduces a significant latency in detector adaptation . incremental learning methods allowed adaptation while restraining the storage of the whole dataset. the supervised operator-in-the-loop solution offered by pannu et al. showed the detector's adaptation to data labeled on the flight. others approached the problem as sequential processing of bounded data buffers in univariate signals and multivariate systems .lastly, recent efforts to extend anomaly detection tasks to root cause isolation governed the development of explanatory methods capable of diagnosing and tracking faults across the system. studies can be split into two groups. the first group approaches explainability as the importance of individual features , , . those studies allow an explanation of novelty by considering features independently. the second group uses statistical learning creating models explainable via probability. yang et al. recently proposed a bayesian network (bn) for fault detection and diagnosis task. individual nodes of the network represent normally distributed variables, whereas the multiple regression model defines weights and relationships. using the predefined structure of the bn, the authors propose an offline-trained model with online detection and diagnosis . offline training, however, as we wrote earlier, do not allow adaptation to expected novel pattern and, therefore, to our knowledge, is not suitable for long-term operation on real iot devices.this paper emphasizes the importance of such adaptability in anomaly detection and proposes a method that addresses this challenge. here we report the discovery and characterization of an adaptive anomaly detection method for streaming iot data. the ability to diagnose multivariate data while providing root cause isolation, inherent in the univariate case, extends our previous contribution to the field as presented in . the proposed algorithm represents a general method for a broad range of safety-critical systems where anomaly diagnosis and identification is crucial.two case studies show that the proposed method based on dynamic joint normal distribution gives the capacity to explain novelties and isolate the root cause of anomalies and allow adaptation to change points advancing recently developed anomaly detection techniques to the long-term deployment of the service and cross-domain usage. we observe similar detection performance for the cost of lower scalability.the main contribution of the proposed solution to the developed body of research is that it:• provides both adaptability and interpretability • identifies systematic outliers and root cause • uses self-learning approach on streamed data • utilizes existing it infrastructure • establishes dynamic limits for signals ii. preliminaries in this section, we present the fundamental ideas that form the basis of the developed approach. subsection ii-a explains welford's online algorithm, which can adjust distribution to changes in real time. subsection ii-b proposes a two-pass implementation that can reverse the impact of expired samples. the math behind distribution modeling in subsection ii-c establishes the foundation for the gaussian anomaly detection model discussed in the final subsection ii-d of the preliminaries.anomaly detection systems, able to discriminate abnormal, unexpected patterns and adapt to novel expected patterns in data, are known to be an essential part of risk-averse systems. in particular, anomaly detection systems assess the normal operational conditions allowing internet of things (iot) devices to stream high-fidelity data into control units.moreover, nonstationarity resulting from concept drift, an alternation in the pattern of data due to a change in statistical distribution, and change points, permanent changes to the system's state, represents a difficulty of a significant extent.two case studies show that the proposed method based on dynamic joint normal distribution gives the capacity to explain novelties and isolate the root cause of anomalies and allow adaptation to change points advancing recently developed anomaly detection techniques to the long-term deployment of the service and cross-domain usage. the math behind distribution modeling in subsection ii-c establishes the foundation for the gaussian anomaly detection model discussed in the final subsection ii-d of the preliminaries.the cumulative distribution function (cdf) of a multivariate gaussian distribution describes the probability that all components of the random matrix x take on a value less than or equal to a particular point x in space, and can be used to evaluate the likelihood of observing a particular set of measurements or data points.global log-cdf of multivariate gaussian distribution computed, using the numerical algorithm proposed in, for process observation vector x i at time instance i, serves for the establishment of anomalous/normal behavior of the system as whole.input: expiration period t e output: system anomaly y g i , signal anomalies y s i , sampling anomaly y t i , change-point y c i , lower thresholds x l,i , upper thresholds x u,i , initialisation : 1: i ← 1; n ← 1; q ← 0.the deployment and operation of the anomaly detection system were successful as shown by its adaptation of changepoint on 7 th march 2022 that appeared due to the relocation of the battery storage system outdoors. the dynamics of the systems are elaborated in the model using welford's online algorithm with the capacity to update and revert sufficient parameters of multivariate gaussian distribution in time making it possible to elaborate non-stationarity in the process variables. at the second level, dynamic process limits based on ppf at threshold probability, given multivariate distribution parameters, help isolate the root cause of anomalies.the ability to detect and identify anomalies in the system, isolate the root cause of anomaly to specific signal or feature, and identify signal losses is shown in two case studies on real data. unlike many anomaly detection approaches, the proposed raid method does not require historical data or ground truth information about anomalies, relieving general limitations.the first case study performed on benchmark industrial data showed the ability to provide comparable results to other self-learning adaptable anomaly detection methods allowing, in addition, the root cause isolation. the second case study, performed on real operation data of bess, examined the battery energy storage system and demonstrated the ability to capture system anomalies and provide less conservative limits to signals and extracted features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/14.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/14.txt new file mode 100644 index 0000000000000000000000000000000000000000..d651b3c10c7b9c1419f643f4677a8c111a7d673f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/14.txt @@ -0,0 +1 @@ +deep learning methods that use physics-based knowledge as inductive bias have recently shown promise in learning dynamical system models that respect physical laws and that generalize beyond the training dataset (djeumou et al., 2022a;menda et al., 2019;gupta et al., 2020;cranmer et al., 2020;greydanus et al., 2019;finzi et al., 2020;zhong et al., 2021a). these methods, which often use neural networks to parametrize select terms in differential operators, are able to learn complex relationships from data while also yielding models that are compact and interpretable.however, there remain barriers to the deployment of such algorithms in engineering applications. many systems, from robots interacting with their surroundings to large-scale multiphysics systems, involve large numbers of interacting components. these interactions between subsystems hamiltonian neural networks (phnns) on data generated by individual subsystems, presented in §4. we then compose these submodels to construct another phnn that models the composite system, presented in §5.can increase the complexity of the overall system's dynamics, rendering monolithic approaches to learning-which capture the entire system using a single model learned from data-challenging.we present a framework and algorithms for learning and composing neural network models of dynamical systems. the framework models individual subsystems independently, and uses physicsinformed interfaces between these submodels to capture their interactions. this compositional approach to learning provides a number benefits and novel capabilities that would not otherwise be possible. firstly, it simplifies the learning problems to be solved. submodels are trained on trajectory data generated by relatively simple subsystems. the dynamics of more complex composite systems are then predicted without requiring additional training. secondly, it provides a modular framework for data-driven modeling. previously learned component models can be composed in new ways to simulate different composite systems. finally, it provides a natural way to compose data-driven models with models derived from first principles.we achieve this compositionality by representing the system of interest, as well as each of its subsystems, as a port-hamiltonian neural network (phnn)-a class of deep learning models that use the port-hamiltonian systems formalism (duindam et al., 2009;van der schaft et al., 2014) to inform the model's structure. more specifically, phnns parametrize each subsystem's hamiltonian function, as well as how it dissipates energy, interacts with other subsystems, and how it responds to control inputs. we enforce known properties of the dissipation and interaction terms through the model's construction. the phnn's output is obtained by numerically integrating a differential equation involving all of these terms to predict the system's state at a future time.using the physics-informed structure provided by the phnn, we present a method to compose collections of phnns in order to obtain models of the corresponding composite systems, and we provide upper bounds on this composite model's prediction errors. figure 1 illustrates the approach. the composite system's hamiltonian, dissipation term, and control input term are all obtained by combining the corresponding terms from the learned submodels. interactions between the subsystems are captured by the interconnection structure of the composite system, which may be known a priori, or may itself be learned from data. in many cases this interconnection structure is given by a constant linear operator; we accordingly present a method to learn it via linear regression. in the general case, it may instead be parameterized using a neural network.we demonstrate the novel capabilities of the proposed framework through numerical examples involving interacting spring-mass-damper systems. models of these systems, which include nonlin-ear energy dissipation and control inputs, are learned independently. the dynamics of the composite system are accurately predicted without additional training. if the system's interconnection structure is unknown, we demonstrate that an accurate composition may be learned using an amount of training data that is negligible in comparison with that required to train a new model from scratch. finally, we empirically observe that the proposed compositions of phnns exhibit the property of passivity-a property of port-hamiltonian systems that is useful for control purposes. similarly, let x 1 , x 2 , u 1 , and u 2 denote the subsystem states and control inputs, and let x c := (x 1 , x 2 ), u c := (u 1 , u 2 ) denote the state and control input of the composite system.) and control input g c matrices are obtained by stacking the matrices r i (x i ) and g i diagonally, and the composite interconnection term j c is obtained by stacking j i diagonally and by including an additional pair of off-diagonal interaction terms.the hamiltonian of the composite system is defined as the sum of the subsystem hamiltonians, h c,θ (x c ) := h 1,θ (x 1 ) + . , r k,θ (x k )) and g c,θ (x c ) := diag(g 1,θ (x 1 ), . , j k (x k )) + c(x c ), where c(x c ) ∈ r nc×nc is a skew symmetric composition matrix encoding energy-conserving interactions between the state variables of the various subsystems. , τ c |dc| } of trajectories τ c = {(x c,0 , u c,0 , t 0 ), . , (x c,|τ | , u c,|τ | , t |τ | )}, where x c,i , and u c,i represent the composite state and control input at time t i , respectively.to learn the parameters φ using the dataset d c , we define a loss function l comp (φ, θ c , d c ) similarly to as in equation 3, where θ c denotes the collection of all of the parameter vectors θ i from the subsystem phnns. in general, c φ (x c ) will be a function of x c and we parametrize its entries as the output of a neural network.however, when the interconnection term j c (x c ) is a constant matrix, c φ may be parameterized as constant skew symmetric matrix. furthermore, suppose the composite system of interest may be represented as a composition p c c (x c , u c ) of the port-hamiltonian subsystems defined by the composition term c(x c ). let c φ (x c ) denote the learned composition term and let c φ (x c ) i,j denote the submatrix that defines the interactions between subsystems i and j. furthermore, suppose the composite system of interest may be represented as a composition p c c (x c , u c ) of the port-hamiltonian subsystems defined by the composition term c(x c ). let c φ (x c ) denote the learned composition term and let c φ (x c ) i,j denote the submatrix that defines the interactions between subsystems i and j. equation 10 follows from equation 9 again by the triangle inequality and due to the block-diagonal structure of jc (x c ), r c (x c ), and g c (x c ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/140.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/140.txt new file mode 100644 index 0000000000000000000000000000000000000000..dbc3135f7a4a3b713331b73a6438e0f8fa76b5d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/140.txt @@ -0,0 +1 @@ +graph learning has found extensive use in various real-world applications, including recommendation systems , traffic prediction , and molecular property prediction . real-world graph data typically exhibit diverse graph structures and heterogeneous nodes and edges. in graph-based recommendation systems, for instance, a node can represent a product or a customer, while an edge can indicate different interactions such as view, like, or purchase. similarly in biochemistry tasks, datasets can comprise molecules with various biochemistry properties and thherefore various graph structures. moreover, purposefully increasing the diversity of graph data structures in training sets has become a crucial aspect of gnn training. techniques such as graph data augmentations and large-scale pre-training on diverse graphs have been widely adopted to allow gnns for extracting more robust and generalizable features. meanwhile, many real-world gnn applications, such as recommendation systems and molecule virtual screening, usually involve processing a vast number of candidate samples and therefore demand computational efficiency. that invites the key question: can one effectively scale a gnn model's capacity to leverage larger-scale, more diverse graph data, without compromising its inference efficiency?a common limitation of many gnn architectures is that they are essentially "homogeneous" across the whole graph, i.e., forcing all nodes to share the same aggregation mechanism, regardless of the differences in their node features or neighborhood information. 2 that might be suboptimal when training on diverse graph structures, e.g, when some nodes may require information aggregated over longer ranges while others prefer shorter-range local information. our solution is the proposal of a novel gnn architecture dubbed graph mixture of experts (gmoe). it comprises multiple "experts" at each layer, with each expert being an independent message-passing function with its own trainable parameters. the idea establishes a new base to address the diversity challenges residing in graph data.throughout the training process, gmoe is designed to intelligently select aggregation experts tailored to each node. consequently, nodes with similar neighborhood information are guided towards the same aggregation experts. this fosters specialization within each gmoe expert, focusing on specific subsets of training samples with akin neighborhood patterns, regardless of range or aggregation levels. in order to harness the full spectrum of diversity, gmoe also incorporates aggregation experts with distinct inductive biases. for example, each gmoe layer is equipped with aggregation experts of varying hop sizes. those with larger hop sizes cater to nodes requiring information from more extended ranges, while the opposite holds true for those with smaller hop sizes.we have rigorously validated gmoe's effectiveness through a range of comprehensive molecular property prediction tasks, underscoring our commitment to deliberate diversity modeling. moreover, our analysis demonstrates that gmoe surpasses other gnn models in terms of inference efficiency, even when they possess similar-sized parameters, thanks to the dynamic expert selection. this efficiency proves crucial in real-world scenarios, such as virtual screening in libraries of trillion-scale magnitude or beyond. the potency of our approach is corroborated by extensive experiments on ten graph learning datasets within the ogb benchmark. for instance, gmoe enhances the roc-auc by 1.81% on ogbg-molhiv, 1.40% on ogbg-molbbbp, 0.95% on ogbn-proteins, and boosts hits@20 score by 0.89% on ogbl-ddi, when compared to the single-expert baseline. to gain deeper insights into our method, we conduct additional ablation studies and comprehensive analyses. for example, each gmoe layer is equipped with aggregation experts of varying hop sizes. in contrast, each layer of gmoe constitutes a mixture of experts, with each expert being a gcn/gin layer featuring different aggregation step sizes.our study takes a significant stride forward by introducing sparse moe to scale graph neural networks in an end-to-end fashion, enabling efficient learning on datasets featuring diverse graph structures. the gmoe layer comprises multiple experts, each utilizing either the hop-1 or hop-2 aggregation function. this allows for similar nodes to be assigned to the same experts when learning with diverse graph structures, thereby enabling each expert to specialize in a particular structure type. the gmoe layer's adaptive selection between the hop-1 and hop-2 experts enables the model to dynamically capture short-range or long-range information aggregation for each node.additional loss functions to mitigate gmoe collapse nonetheless, if this model is trained solely using the expectation-maximization loss, it may succumb to a trivial solution wherein only a single group of experts is consistently selected. the computation cost of gating functions in gmoe is meanwhile negligible compared to the cost of selected experts, since both w g ∈ r n×s and w n ∈ r n×s is in a much smaller dimension than w (i) ∈ r s×s given n ≪ s. for gmoe models, as described in section 3, we select k experts out of a total of n experts for each node, where m out of n experts are hop-1 aggregation functions and the rest n -m are hop-2 aggregation functions.observation 1: larger graphs prefer larger hop sizes we conducted an ablation study on two prominent molecule datasets, namely ogbg-molhiv and ogbg-molfreesolv, which exhibit the largest and smallest average graph sizes, respectively, among all molecular datasets in the ogb benchmark. however, after gmoe has undergone enough epochs of training, all experts have received ample updates, enabling them to effectively model their respective subgroups of data.in this work, we propose the graph mixture of experts (gmoe) model, aiming at addressing the challenges posed by diverse graph structures. by incorporating multiple experts at each layer, each equipped with its own trainable parameters, gmoe introduces a novel approach to modeling graph data. through intelligent expert selection during training, gmoe ensures nodes with similar neighborhood information are directed towards the same aggregation experts, promoting specialization within each expert for specific subsets of training samples. another open question stemming from this work is whether we can apply gmoe on gnns with heterogeneous aggregation mechanisms, such as gat and graphtransformer, which may potentially further improve the performance on diverse graph data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/141.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/141.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ad86d848a2de1b5f12f69fc701b0b7525bc04ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/141.txt @@ -0,0 +1 @@ +machine learning methods have achieved impressive results in recent years. besides famous applications in areas like chess (silver et al., 2017a) and go (silver et al., 2017b), ai plays a critical role in advances to autonomous driving (grigorescu et al., 2020), protein structure prediction (jumper et al., 2021), cancer identification (sammut et al., 2022) and in cybersecurity (dasgupta et al., 2022). however, in order for ai methods to take the next step and be commonly employed for critical applications without any humans in the loop, we want to be able to understand the decision making process. a critical component towards explainable ai is understanding which parts of the input data are utilised by the model in its decision making. in neural networks, the most popular approach is to study the outgoing weights and gradients from an individual input node. larger weights are reasonably assumed to indicate a greater significance of the particular input, and indeed, an entire class of interpretability metrics, namely gradient-based methods, are founded on this idea (simonyan et al., 2013;linardatos et al., 2020). yet, given the immense complexity of overparameterised, deep neural networks, current methods are in practice often insufficient to appropriately explain a model. using methods from the physical sciences, we propose a novel approach as a next step towards interpretable neural networks., 2015)., 2017)consider the derivative of the output (loss) with respect to individual input nodes. in particular, it will be relevant below to understand that any group of minima close together, perhaps separated from other groups of minima via high-lying transition states, may share commonalities., 2015), which are binary classification problems. firstly, we identify groups of minima, that are separated from other groups by a higher-lying transition state. the auc of the best solutions is > 0.95 for both problems. hence, these networks provide a realistic solutions to the set problems. in figure2, the funnel containing the global minimum (red) conserves 3 weights, all related to one specific input node. coloured edges indicate that for all minima in the specific group, these particular weights are conserved, i. (2022), the magnitude of individual weights must always be viewed with caution due to permutational isomers. we have shown that groups of minima share conserved weights and importantly, that these weights are critical to model performance. randomly permuting the conserved weights strongly decreases model performance, much more so than permuting any other random set of weights s of equivalent cardinality |s|., 2015), we are unable to say which specific feature it is that helps the model in making a decision, but we can say where it can be found. in figure1, we know that both input nodes are relevant, which is confirmed by studying the conserved weights for the three given examples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/142.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/142.txt new file mode 100644 index 0000000000000000000000000000000000000000..ff97f82a0f2f80659773a814fded9ef21b2df9a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/142.txt @@ -0,0 +1 @@ +in several environments, the agent must avoid both potential hazards while simultaneously optimizing its total accumulated reward. the existing literature in safe planning uses a constrained mdp formulation (altman 2021) and represents safety specifications as safety constraints that are derived from the immediately observable state and/or action (achiam et al. 2017;tessler, mankowitz, and mannor 2018;stooke, achiam, and abbeel 2020;chow et al. 2019;dalal et al. 2018;simão, jansen, and spaan 2021). however, obtaining a perfect description of the target environment becomes practically infeasible as autonomous agents are increasingly deployed in the real world (dietterich 2017). as a result, operating on such incomplete models may produce undesirable side effects, also called negative side effects (nses), which are often discovered after agent deployment (amodei et al. 2016;alizadeh alamdari et al. 2022;krakovna et al. 2019;saisubramanian, kamar, andzilberstein 2020, 2022). therefore, addressing such nses has be-come a key challenge to increase the safety of deployed ai agents (saisubramanian, kamar, and zilberstein 2022).one popular method used to solve mdps with safety constraints is the lagrange multiplier method (tessler, mankowitz, and mannor 2018;stooke, achiam, and abbeel 2020). in this approach, the lagrange multiplier (which acts as a penalty for constraint violation) is adapted slowly as the learning proceeds, converging to constraint-satisfying policy while also optimizing the primary reward objective. other approaches include lyapunov functions (chow et al. 2019), trust region methods (achiam et al. 2017), and constraint safety layer (dalal et al. 2018) methods. notably, all these methods model safety requirements as functions of safety cost functions which are assumed to be known and markovian and hence functions of immediate state and action. our work focuses on settings without such modelling assumptions, where we learn a classifier trained on trajectories labeled with different categories of safety labels for nses, hence modeling non-markovian safety side effects. we then integrate this classifier with our safe planning approach.a closely related line of work for addressing nses is presented in (saisubramanian, kamar, andzilberstein 2020, 2022;shah and krasheninnikov 2019). saisubramanian, kamar, and zilberstein (2022) define nse as undesired, unmodeled effects due to incomplete mdp model specifications. since not all undesirable effects can be foretold in advance, the model specification may lack sufficient fidelity to represent different types of nses. their work proposed a supervised learning model to learn about nses through various types of human feedback data about nses, including human demonstration. other research works propose different ways to infer nses, for instance through initial state configuration (shah and krasheninnikov 2019), reachability of other states (krakovna et al. 2018) or attainable utility (turner, hadfield-menell, and tadepalli 2020) after performing an action, ability to perform future task different from the current task (krakovna et al. 2020). bayes reasoning was also used in (hadfield-menell et al. 2017) to infer the true reward specification from a number of candidate reward functions. one common theme behind all these works is that not all nses can be anticipated precisely at design time and they need to be dynamically learned through human feedback or inferred. furthermore, all these works focus on markovian nses, rather than a more general model where nses are associated with trajectories. it is a critical gap in previous approaches, such as (saisubramanian, kamar, and zilberstein 2020), as they decompose the penalty associated with an nse into additive penalties associated with each state-action pair, which may not always be the case in a real world setting. alternatively, one can expand the state space definition to make nses markovian, however this may make the agent's primary task computationally challenging due to complex state space, and knowledge about different types of nses is also not known apriori.another line of work (junges et al. 2016;alshiekh et al. 2018;jansen et al. 2020;jothimurugan et al. 2021) has explored using logic specifications, such as temporal logic, to specify safety constraints which can be non-markovian. in these methods, safety criteria need to be pre-specified. as nses are side effects unmodeled at the design stage, it is not feasible to pre-specify the nse criteria and these methods are not suitable for nse setting. in contrast, our method only requires trajectory labels for nse trajectories. in addition, temporal logic methods typically assume a safety-relevant abstraction of the original mdp (i.e. safetyrelevant mdp) (alshiekh et al. 2018;jansen et al. 2020) which requires significant efforts from domain experts to construct, as opposed to just using the nse labels. lastly, our method learns to optimize reward and satisfy constraint jointly, whereas temporal logic methods synthesize a safety shield in-silos (alshiekh et al. 2018;jansen et al. 2020). the shield either pre-specifies allowable actions or post-corrects the actions selected by the agent. this may lead to a suboptimal strategy with reduced action space.our main contributions are the following. first, we formulate safe mdp planning problem as constrained mdp (altman 2021). unlike previous methods, we do not assume a markovian safety cost function is given. we utilize a supervised learning model to learn safety characteristics of a trajectory from the nse data. second, we integrate the learned safety model and the mdp model in a single computation graph, and develop a lagrange multiplier (bertsekas 1999) method to optimize the policy while respecting trajectory-based safety constraints. we (a) proposed a method applicable to both markovian and non-markovian nses, and discrete and continuous domains, with much higher scalability than the previous method (saisubramanian, kamar, and zilberstein 2020); (b) developed a modelfree approach and show that our model-based method is significantly better. finally, our empirical results on a variety of discrete and continuous domains show that our highly scalable approach can satisfy complex non-markovian safety constraints, while optimizing agent's total returns, and outperform previous best approach for markovian nses. 2019), trust region methods(achiam et al. 2017), and constraint safety layer(dalal et al. one common theme behind all these works is that not all nses can be anticipated precisely at design time and they need to be dynamically learned through human feedback or inferred. second, we integrate the learned safety model and the mdp model in a single computation graph, and develop a lagrange multiplier (bertsekas 1999) method to optimize the policy while respecting trajectory-based safety constraints. we (a) proposed a method applicable to both markovian and non-markovian nses, and discrete and continuous domains, with much higher scalability than the previous method (saisubramanian, kamar, and zilberstein 2020); (b) developed a modelfree approach and show that our model-based method is significantly better. finally, our empirical results on a variety of discrete and continuous domains show that our highly scalable approach can satisfy complex non-markovian safety constraints, while optimizing agent's total returns, and outperform previous best approach for markovian nses. to address these issues, next we discuss a formulation in which data about safety aspects is collected in the form of negative side effects (nses), and a safety model is learned from such a dataset. we consider non-markovian nses as nse severity can depend on the entire trajectory, in contrast to markovian nses which depend on a single state-action pair(saisubramanian, kamar, and zilberstein 2020). non-markovian nses are more general than markovian nses, and can model complex nses without the need to expand the state representation of the mdp. as more nse data is gathered, we can update our nse classifier with additional data, without the need to always change the underlying state space and reward function each time new nses are discovered.as we are using markovian nses for the grid-world experiments, we report the nse values using the same nse penalty as markov-ha-s: fixed numerical value 5 and 10 for mild and severe nses respectively. the agent incurs no nse if it passes through the dirty zone fewer than two timesteps throughout the entire trajectory (mild nse: 2 -3 timesteps, severe nse: ≥ 4 timesteps in the dirty zone).75 • c).we have presented a method for safe mdp planning that avoids negative side effects (nses), which may arise during policy execution based on an incomplete model of a complex real world environment. unlike previous works that require knowledge of numerical safety cost functions, our method learns a rnn-based classifier that learns to label state-action trajectories with different safety categories based on collected nse dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/143.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/143.txt new file mode 100644 index 0000000000000000000000000000000000000000..c55faf2c7d18be4d38aa7a77a9e290723c5bb0ee --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/143.txt @@ -0,0 +1 @@ +bounded model checking (bmc) has been successfully applied to prove whether a piece of software meets the expected requirements. however, when the software features concurrent execution, the main challenge becomes exploring the exponentially growing program state space. for this reason, there have been many efforts to improve bmc for concurrent software, including sequentialization , dedicated theories for multi-threaded programs or combining bmc with fuzzing . a state-of-the-art example of such efforts is esbmc, an efficient smt-based bounded model checker for c and c++ programs . more specifically, esbmc explores each thread interleaving up to a maximum number of context switches .in order to control the program states explosion, bmc tools like esbmc can be executed with different settings (flags). these flags control the various mathematical techniques used by the bmc tool. unfortunately, expert knowledge is required to set the optimal flags for a given concurrent program. as a consequence, bmc tools are often executed with their default settings, which leads to compromises in their performance.in this paper, we introduce the metaverifier tool lf-checker (learn-fromthe-checker), which is able to predict the optimal flags for a given concurrent program. specifically, lf-checker uses esbmc as a back-end verifier, and predicts its optimal settings with a decision tree. the decision tree is trained to 2 software architecturethe general workflow of lf-checker is shown in fig. 1. first, we analyse the source code of the program-under-test (put), and extract useful features. then, we pass these features to the ml model, which predicts the best flags for esbmc.finally, we run esbmc on the put with these flags, with the objective of proving or disproving a given safety property. since our contribution is centered around the ml model, we share the details of its design below. for this reason, there have been many efforts to improve bmc for concurrent software, including sequentialization, dedicated theories for multi-threaded programsor combining bmc with fuzzing. a state-of-the-art example of such efforts is esbmc, an efficient smt-based bounded model checker for c and c++ programs.in order to control the program states explosion, bmc tools like esbmc can be executed with different settings (flags). unfortunately, expert knowledge is required to set the optimal flags for a given concurrent program.in this paper, we introduce the metaverifier tool lf-checker (learn-fromthe-checker), which is able to predict the optimal flags for a given concurrent program. specifically, lf-checker uses esbmcas a back-end verifier, and predicts its optimal settings with a decision tree. then, we pass these features to the ml model, which predicts the best flags for esbmc.finally, we run esbmc on the put with these flags, with the objective of proving or disproving a given safety property.esbmc exposes a large number of flags that regulate its verification strategy for concurrent program. for each of the 153 training benchmarks, we run esbmc for 3 minutes with 240 different combinations of flags. the decision tree learns to predict the 0-5 output class given the features of the put and a given choice of flags. at runtime, we use the decision tree to predict the optimal set of flags for esbmc. this shows that, by optimising the flags of esbmc for different programs with our predictor, esbmc can output 34% more correct results compared to using its default settings. this is because of an issue in the latest version of esbmc which leads to wrong verdicts in a small number of concurrent benchmarks when we merge goto statements after a context switch. crucially, these benchmarks have similar program features to those that are handled correctly by esbmc, which leaves no opportunity for the ml model to filter them out. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/144.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/144.txt new file mode 100644 index 0000000000000000000000000000000000000000..1aec65d7b813e08bd68a9a03fbba13552381b0d9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/144.txt @@ -0,0 +1 @@ +m any of the latest efforts in machine learning are focused on bringing learning as close to data collection as possible. this is of practical interest in a diverse array of applications, sensor networks in particular. in sensor networks, nodes are often communication-constrained, operate distributively, and can send only a few bits to their neighbors.there are several papers in the literature that consider the problem of designing distributed learning methods for online multi-kernel learning (omkl). in particular, contains the design and regret analysis of quantized and distributed omkl for a fully connected (complete) graph. although the fully connected case is important, it is not applicable in many sensor networks. this letter designs a new algorithm that expands the theoretical guarantees of the distributed and quantized omkl in to an arbitrary general non-fully connected graph. unlike , our method uses a novel hidden state quantization scheme in conjunction with a gossip algorithm. one major advantage, compared with , is that our algorithm does not need to communicate the loss function. another example of a distributed omkl that works for a non-fully connected graph is presented in . it requires communicating an unlimited number of bits between neighbors, and its assumptions are stricter than those of . to manage the required communication throughput, which is often a performance bottleneck , , this letter provides the properties of under looser assumptions and while communicating only a limited number of bits between neighboring nodes.in , a distributed stochastic gradient descent scheme with quantized communication is discussed. the proposed algorithm uses an extra shared "hidden" state and can be used for the single-kernel case, but not multi-kernel settings. in addition to extending to general non-fully connected networks, our work can be considered as the extension of to multi-kernel cases.there are a variety of learning tasks that omkl is suited for, such as multi-kernel classification, regression, and clustering , , . for a more in-depth review of the literature, see . at each instant of time t, node j receives the data string x j t ∈ r d and the desired response y j t ∈ r., where κ(x, x j t ) : r d × r d → r is a kernel function that measures the similarity between x and x j t . the weights are normalized as wj p,t = w j p,t / p p=1 w j p,t to have. thus, the weights define the gossip matrix w , which is j × j and doubly stochastic,,. for ease of notation, we will denote l(θ j p,t z vp (x j t ), y t ) as l(θ j p,t ) from now on. for the gossip step, we define a hidden state h j p,t ∈ r 2d for each node j that is known for all neighbors in s j because it is updated by the same quantized values known to all neighbors. the new local state, θ j p,t , will be a sum of θ j p,t-1 , which is an auxiliary variable where we store the local learning, plus a gossip step of size γ with the information from the hidden states. subsequently, each node j prepares the update to the hidden state by quantizing the difference between its local state θ j p,t and the common hidden state h j p,t .algorithm 1 gossiped and quantized omkl at node j 1: initialize w j p = 1/p and h j p = θ j p = θ j p = 0 for all p.obtain data x j t and construct z p (x j t ) for all p.compute l( f j p,t (x j t )) for all p.update using the hidden states θ j p,t = θ j p,t-1 + γ j i=1 w ij (h i p,t -h j p,t ).compress q j p,t = q(θ j p,t -h j p,t ).as in(7), update θ j p,t = θ j p,t -η∇l(θ j p,t ). define θp,t = 1 j j j=1 θ j p,t , and θ p,t and ∇ l(θ p,t ) analogously. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/145.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/145.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f81d57eb437620f44f65a1df8447d7d93fef4f3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/145.txt @@ -0,0 +1 @@ +looking at the figures, it is obvious that the labelpro-portionlocal or labelproportiontodense algorithm always achieves better results than the knn as a centralized approach. this is probably due to the fact that the integrated lstm in our approach is better able to represent temporal components. another noticeable aspect is the increasing mse value for increasing privacy guarantees by reducing ε. this is also an expected degradation in terms of accuracy, since adding noise reduces the information content of the neighbors. but it is interesting to see that the performance is getting worse compared to the labelproportionlocal algorithm, which is not using additional neighbor information. only when using no noise, performance of labelproportiontodense is the best on metr-la and pems-bay in comparison to the other algorithms.based on these general results, one can say that our approach with sending histograms between direct nodes is improving the general prediction performance. by using the lstm as the central learning model for local data, we achieved to outperform the centralized knn algorithm. using ε-differential privacy to ensure privacy of exchanged data, we have measured a significant increase of the mse error, which results in worse performance, than when using no neighbor information.2) prediction curve pems-bay -overview: because a single metric is not very meaningful, we plotted the predicted values of the different algorithms on the pems-bay dataset in figure 4. the actual measured data is depicted by the solid grey line in the background. in the foreground the three different algorithms are compared to each other. this chart contains no privacy preserving approach with privacy parameter ε. predictions of the algorithms are split into sections on the test set, for better visibility. therefore, the predictions of the knn as the blue line is only plotted for the first 3500 steps. following this, the prediction of the labelproportionlocal has been plotted in orange up to time step 7500. finally, the predicted values of the labelproportiontodense approach are shown in black. as seen at the time scale we have predictions of approximately 10500 time steps which are equal to 10500 * 5min = 52500min = 875h ≈ 36 days.therefore, only the general prediction shape can be seen, which is quite accurate. at most times, the knn and our approaches nearly met the actual prediction curve. there are some exceptions, where the data cannot be fitted very well. especially peaks in the actual car speed measurements are not recognized by the approaches. for example, the knn does not predict the sharp drop in speed at around time step 150. the same issue occurs, when looking at the labelproportionlocal approach around time step 5800. the only algorithm, that fits the curve nearly perfectly by viewing this in large scale is the labelproportiontodense approach. in this overview, the cycles where traffic speed is going down during rush hour are very good to see. during the normal day, the density is oscillating around 70 which is the normal expected traffic speed curve. this can be the reason why our approaches reach better prediction accuracy, because time dependent features are extracted by the lstm.3) predictioncurve pems-bay -detailed: in the plot over the entire test period, you can see tendencies, indicating the prediction accuracy. for better details, we have cut out a section of 200 time steps and displayed it in figure 5. this plot represents 200 * 5min = 1000min ≈ 17h, which is part of a day and night phase. it can also be seen by assuming that rush-hour is visible in the time steps starting from 6535. the night phase then begins around time step 6610, where a near constant speed is driven. we chose the slice by analyzing the dataset for the area, where the mse was the lowest. therefore, we chose the concrete slice from time frame 6468 to 6668.the actual measurement values are visible in gray. the knn, as well as the labelproportionlocal and labelpro-portiontodense are shown solid in the same color as the previous chart. additionally there are two more variations plotted. those are our approaches (labelproportiontodense), where noise is applied by ε-differential privacy. variations with noise are highlighted by dashed (ε = 0.5) or dotted (ε = 0.1) lines.the figure indicates, it is clear that the prediction is not as accurate as indicated in figure 4. here, all little deviations in density prediction are getting noticed.by focusing on the knn it looks like it is mostly under predicting the real world measurements. only for steep drops, visible at around time step 6535, the prediction is not close to the real curve. in general, the prediction of knn looks somewhat like a step function. therefore, all the little variations are not predicted well.compared to this, our approach without histogram transfer (labelproportionlocal) fits the real world measurements quite well. especially the steep drop, which the knn could not handle, is fitted well. for most predictions it is just slightly above the real world data and at some peaks, like in time step 6590, it is shifted along the time axis. at those points, the peak is predicted a bit later. but in general, this prediction curve is quite close to the original measurements.the only better approach than this is the labelproportion-todense approach. variations between the prediction of the approach and the real data is almost not visible. only slight jumps of the original data, which are not relevant for the general traffic speed, are not predicted. by looking at figure 5, this algorithm is approaching the best results from all tested ones.however no privacy guarantee can be given for the histograms. therefore we added differentially private variations. for ε = 0.5, the dashed curve shows the predictions. those predictions are also quite good and fit the real world data well. sometimes peaks are predicted with no real speed peak. this can be seen at time step 6485.for the labelproportiontodense with ε = 0.1, the dotted line shows the predictions. those are much worse than all other algorithms. when analyzing the curve, one can see that it is mostly a prediction around a traffic density of 66. it varies in the prediction value, but not by much. therefore, it looks like the approach has learned to just predict the mean value. the reason for this can be the applied noise to the histograms. the noise could have resulted in nearly equally distributed histograms, so that no information is included. when this happened, the model could also learn only the mean distribution, or mean value. for this reason, it looks like a privacy factor of ε = 0.1 is too high to gain useful information from neighboring histograms that are afterwards normalized again over all neighbors. especially with the introduction of the general data protection regulation (gdpr), a lot has changed in terms of data privacy, which must be implemented by everyone using sensitive data.the two proposed fully distributed deep learning algorithms will ensure flexible data privacy by setting a hyperparameter to balance both aspects: privacy and prediction accuracy. for example, the dp-llp algorithm, introduced by sachweh et al. the authors introduced a variant of label proportions, originally developed in-, extended by differential privacy to ensure that data transfer is protected. recent work that combines learning from label proportions with differential privacy (compare next section)is promising, and we therefore utilize data aggregation for our approach aswell.data aggregation methods, which are applied on very pure data are problematic, because resulting histograms contain all information, although data points were aggregated. each data point contains information about the traffic density and a normalized time value. data transfer is happening by peer-to-peer communication and without disclosing private information by using noise as described in the differential privacy subsection.based on these general results, one can say that our approach with sending histograms between direct nodes is improving the general prediction performance. by using the lstm as the central learning model for local data, we achieved to outperform the centralized knn algorithm. using ε-differential privacy to ensure privacy of exchanged data, we have measured a significant increase of the mse error, which results in worse performance, than when using no neighbor information.2) prediction curve pems-bay -overview: because a single metric is not very meaningful, we plotted the predicted values of the different algorithms on the pems-bay dataset in figure4. for most predictions it is just slightly above the real world data and at some peaks, like in time step 6590, it is shifted along the time axis.in conclusion, the general approach adopted fromresults in very good prediction accuracy on spatio-temporal data, as used in our evaluation with pems-bay, metr-la and lust. we could show, that the local approach of using an lstm combined with relu and dense-layer results in a very good prediction, because temporal information is extracted well by the lstm model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/146.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/146.txt new file mode 100644 index 0000000000000000000000000000000000000000..5b5b301f06195a981c0a48ac63ac518dabad387d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/146.txt @@ -0,0 +1 @@ +iot devices bring a number of vulnerabilities that make them attractive to cyber criminals. weak (or default) passwords and open insecure service ports are considered the top two exploited vulnerabilities of iot devices . vulnerable iot devices are now becoming a risk challenge for isps . there exist commercial solutions like that help isps tackle the challenge of vulnerable iot devices. the first step in every risk assessment and security analysis is obtaining visibility by discovering iot devices connected to the network and determining their type (e.g., camera, tv, speaker) using characteristics known a priori.iot devices often show identifiable patterns in their network behaviors, making them relatively distinguishable from each other (though behavioral overlaps are common too ). however, these patterns may change by the time and context of their use across various networks. the behavioral change is more pronounced when iot traffic inference is the objective of an isp tasked to serve and manage tens of thousands of home networks, each with a unique composition of assets and users, all distributed across sizable geography (city, state, or country). several existing research works studied different methods that isps can leverage to detect iot devices in residential networks . prior works tend to train a global model with machine learning algorithms and finetuned it by traffic data (testing and training) collected from a testbed (representing a single context). due to the limitation of data or evaluation scenarios, they did not encounter context variations and could not highlight and/or address their impacts.context-aware or contextualized modeling is an alternative to global modeling that consists of three stages: (1) a set of training contexts are identified; (2) models are trained on a per-context basis; and, (3) a given testing sample is predicted by the "closest" model selected from all available trained models . note that each contextualized model is trained by data collected from a single context (narrow and relatively tighter knowledge). in contrast, the global model captures data from multiple contexts (broad and relatively loose knowledge). limiting a model to learn a single context may increase the chance of over-fitting, while exposing a model to a diversified data set may not necessarily result in better performance, especially when the data is noisy. though contextualized modeling has been studied in other domains , to the best of our knowledge, no relevant study is found in the area of iot traffic inference.this paper compares global and contextualized modeling for classifying iot devices in home networks. specifically, we aim to answer the following question: "given a labeled dataset (training) from homes and an unlabeled dataset (testing) from other homes, which of the global versus contextualized modeling does yield better performance in classifying devices during the testing phase?" our first contribution highlights the presence of concept drifts in iot traffic behavior by analyzing more than 6m flow records ( §2). for our second contribution, we develop global and contextualized models (aiming to manage concept drifts in the space domain) and compare their performance ( §3). for the third contribution, we demonstrate that a dynamic inference can be applied to a combination of global and contextualized models to address concept drifts in the time domain ( §4).context-aware or contextualized modeling is an alternative to global modeling that consists of three stages: (1) a set of training contexts are identified; (2) models are trained on a per-context basis; and, (3) a given testing sample is predicted by the "closest" model selected from all available trained models. limiting a model to learn a single context may increase the chance of over-fitting, while exposing a model to a diversified data set may not necessarily result in better performance, especially when the data is noisy. specifically, we aim to answer the following question: "given a labeled dataset (training) from homes and an unlabeled dataset (testing) from other homes, which of the global versus contextualized modeling does yield better performance in classifying devices during the testing phase?" our first contribution highlights the presence of concept drifts in iot traffic behavior by analyzing more than 6m flow records ( §2). this challenge can be perceived differently in the time domain (when the model was trained on data collected some time ago) versus the space domain (when the model was trained on data of different homes with slight variations in context). contextualized modeling, instead, trains an isolated model specific to a newly added/updated home, which is relatively faster and less expensive computationally; and (2) although this paper selects the best model by leveraging ground truth labels, a practical approach like what we will explain in §4 may not always be able to select the best model from a set of available models (hence, affecting the inference performance).evaluating inference strategies: for our evaluation, we assume labeled data of five 1 homes is available for training (seen), and data of the remaining seven homes is used for testing (unseen).it is important to note that contextualized modeling requires the best model assigned to an unseen home before the testing phase. given an unseen home, the best model (one of five 's in our evaluation) is selected and assigned based on the highest accuracy obtained by applying 's to the labeled data of unseen homes (which is assumed to be available in this paper). in fact, in nine runs (out of ten), we found at least one unseen home where the ideal model for its training period differs from that of the testing period due to temporal concept drifts discussed in §2.overall, in half of the ten runs, we found cases of unseen homes whereby the contextualized model outperforms the global model. one may argue that the dynamic approach does not offer a significant advantage compared to the global model, especially when the global model is easier to develop. however, at the scale of an isp with thousands of home networks serving a larger variety of device types, the dynamic approach seems attractive, potentially outperforming the global model.4helps us have a closer look at the performance of static versus dynamic approaches for three representative homes unseen to our models, each from a certain run, across the testing days.model selection in absence of labeled data: as stated earlier, this paper assumed that the best model is somewhat given (by leveraging ground truth labels of an unseen dataset) for the contextualized modeling approach. given concept drifts, we next quantitatively compared the performance of two broad inference strategies: global (one model trained on aggregate data from all seen homes) versus contextualized (one model per seen home) when predicting traffic data of unseen homes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/147.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/147.txt new file mode 100644 index 0000000000000000000000000000000000000000..cf835a681f93c1d4881f8c2cfe9136b4f33968fc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/147.txt @@ -0,0 +1 @@ +there has been a growing research effort in deep learning to develop neural network architectures that can be used to learn efficiently from data that possesses an underlying symmetry. these architectures guarantee that the functions that are learned are subject to a geometric property, known as equivariance, that is connected with the symmetry group. group equivariant neural networks are important due to the additional advantages that they offer over traditional multilayer perceptron models. for example, they commonly display high levels of parameter sharing within each layer, resulting in significantly fewer parameters overall. this often leads to models that show improved prediction performance on new data.the symmetry group that has received the most attention, in terms of it being explicitly incorporated into neural network architectures, is the group of all permutations on some fixed number of objects, called the symmetric group. creating neural networks that are equivariant to permutations is highly desirable as many data structures, such as sets and graphs, exhibit natural permutation symmetry. it is easily understood that the labelling of the elements of a set or the vertices of a graph is arbitrary; hence, it is crucial to ensure that the functions that are learned from such data do not depend on how the data is labelled.in this paper, we look instead at how to construct neural networks that are equivariant to the alternating group. the alternating group is an index two subgroup of the symmetric group consisting solely of all of the even permutations.alternating group symmetry has proven to be particularly useful when learning from spherical image data that has been discretely represented on an icosahedron (zhang et al., 2019); in constructing convolutional neural networks on an icosahedron (cohen et al., 2019); and in estimating polynomials that are invariant to the action of the alternating group (kicki et al., 2020).specifically, we give a full characterisation of all of the possible alternating group equivariant neural networks whose layers are some tensor power of r n by finding a basis of matrices for the learnable, linear, alternating group equivariant layer functions between such tensor power spaces in the standard basis of r n .our approach is similar the one presented in the papers written by pearce-crump (2022a;2022b). they used different sets of set partition diagrams to characterise all of the learnable, linear, group equivariant layer functions between tensor power spaces in the standard basis of r n for the following groups: the symmetric group s n ; the orthogonal group o(n); the symplectic group sp(n); and the special orthogonal group so(n). we will show that in the case of the alternating group a n , the layer functions can also be characterised by certain sets of set partition diagrams.to do this, we use a concept that was first introduced by comes (2020), namely, so-called jellyfish. in their paper, they largely determined the theory of alternating group equivariance; however, they relied heavily on the language of category theory in their exposition. we simplify their approach, and provide proofs that are more accessible to the machine learning community.the main contributions of this paper, which appear in section 6 onwards, are as follows:1. we are the first to show how the combinatorics underlying set partition diagrams, together with some jellyfish, serves as the theoretical foundation for constructing neural networks that are equivariant to the alternating group when the layers are some tensor power of r n .2. in particular, we find a basis for the learnable, linear, a n -equivariant layer functions between such tensor power spaces in the standard basis of r n .3. we extend our approach to show how to construct neural networks that are equivariant to local symmetries.in this section, we recall the technique of using set partitions to find a basis of hom sn ((r n ) ⊗k , (r n ) ⊗l ) in the standard basis of r n since it will feature heavily in what follows for the alternating group.as the number of set partitions in π l+k having exactly t blocks is the stirling number l+k t of the second kind, we see that the number of elements in π l+k is equal to b(l + k), the (l + k) th bell number, and that the number of elements in π l+k,n is therefore equal to the n-restricted (l + k) th bell number. the basis elements of hom sn ((r n ) ⊗k , (r n ) ⊗l ), in the standard basis of r n , correspond bijectively with all set partitions π in π l+k having at most n blocks, which correspond bijectively with the orbits coming from the action of s n on l+k .we form a basis element of hom sn ((r n ) ⊗k , (r n ) ⊗l ), denoted by x π , by adding together all matrix units whose indexing pair (i, j) appears in the orbit o sn ((i π , j π )); that is,.however, the major difference between the symmetric group and the alternating group is that the a n orbits on l+k , and consequently the basis elements of hom an ((r n ) ⊗k , (r n ) ⊗l ), are not necessarily in bijective correspondence with the set partitions of having at most n blocks.if π is a set partition in π l+k,n having n -2 blocks or fewer, then π corresponds bijectively to a basis element of hom an ((r n ) ⊗k , (r n ) ⊗l ),.3 tells us that finding a basis of hom an ((r n ) ⊗k , (r n ) ⊗l ) in the standard basis of r n is very similar to finding a basis of hom sn ((r n ) ⊗k , (r n ) ⊗l ) in the standard basis of r n , since finding the basis amounts once again to considering all of the set partitions of having at most n blocks.indeed, if a set partition π in π l+k,n has at most n -2 blocks, then we see that x π given in (22) is a basis element of hom an ((r n ) ⊗k , (r n ) ⊗l ) since, in this case, o sn ((i π , j π )) = o an ((i π , j π )), by theorem 6.the question remains as to how to take a set partition π in π l+k,n having either n -1 or n blocks and use it to obtain the two basis elements of hom an ((r n ) ⊗k , (r n ) ⊗l ) that it corresponds to. the determinant map is an element of hom an ((r n ) ⊗n , (r n ) ⊗0 ), but it is not an element of hom sn ((r n ) ⊗n , (r n ) ⊗0 ).however, since the determinant map is a map (r n ) ⊗n → r, and the elements of o sn ((i π , j π )) are elements of l+k , to use the determinant map to try to identify the a n orbits in o sn ((i π , j π )), it would be useful to create a map g π : (r n ) ⊗l+k → (r n ) ⊗n that corresponds bijectively with the set partition π, since such a map would project standard basis elements of (r n ) ⊗l+k onto, at the very least, a linear combination of basis elements of (r n ) ⊗n .consequently, we can define the following two sets: )), we obtain the two basis elements of hom an ((r n ) ⊗k , (r n ) ⊗l ) from the one set partition π ∈ π l+k,n that has either n -1 or n blocks, namely.in algorithm 1, we present some pseudocode for how to explicitly construct the weight matrix for an a n -equivariant linear layer mapping (r n ) ⊗k → (r n ) ⊗l in the standard basis of r n .since any c ∈ (r n ) ⊗l satisfying (48) can be viewed as an element of hom an (r, (r n ) ⊗l ), to find the matrix form of c, all we need to do is to find a basis for hom an (r, (r n ) ⊗l ). they used various sets of set partition diagrams to characterise all of the learnable, linear, equivariant layer functions in hom g ((r n ) ⊗k , (r n ) ⊗l ) when g is any of the following groups: the symmetric group s n , the orthogonal group o(n), the symplectic group sp(n), and the special orthogonal group so(n). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/148.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/148.txt new file mode 100644 index 0000000000000000000000000000000000000000..43908c88c292ef3add460f441b5be6ea7e9f82a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/148.txt @@ -0,0 +1 @@ +with the development of neural architecture search (nas) techniques, it becomes popular to design deep neural network architectures automatically . it is a critical part to design a performance predictor to analyze the relationship between model architecture and its accuracy evaluated on certain tasks for nas algorithms, as training a large net can be fairly expensive .gp-nas-ensemble model is proposed as a novel predictor in this paper. based on gp-nas model , we make several improvements to make it more accurate and robust. the validity of our method is verified in the performance prediction track of cvpr2022 second lightweight nas challenge.with the development of neural architecture search (nas) techniques, it becomes popular to design deep neural network architectures automatically. it is a critical part to design a performance predictor to analyze the relationship between model architecture and its accuracy evaluated on certain tasks for nas algorithms, as training a large net can be fairly expensive. both academia and industry have recently been more interested in neural architecture search (nas). later work aims to lower the cost of searching while increasing performance, which can be divided into three categories: one-shot nas, gradient-based approaches, and predictor nas, which differ in the network architecture modeling process. gradient-based methods incorporate the architecture parameters for each operator and use backpropagation to jointly optimize them and the weights of the network. predictor nas approaches attempt to accurately and effectively forecast the performance of a particular neural network. some worksextend this line of thought by training a predictor to extrapolate the nas learning curves.uses bayesian regression as a proxy model to select candidates, andreplaces a strong predictor with a set of weaker predictors. recent neural network architecture search methods and benchmarksfor multi-task and cross-task have attracted a lot of attention from the community.uses continuous learning to find a single cell structure that can generalize well to unknown tasks via multi-task architecture search based on the weight sharing technique.used gradient-based nas to find the best cell structure for a variety of autonomous driving tasks.in this paper, we introduce the gp-nas-ensemble model, which ranked 2nd in the cvpr 2022 nas competition: performance estimation track.gp-nas is a powerful method to predict the performance of a neural network given its architecture, especially when the size of training data is small. to be more specific, the gp-nas model uses the gaussian process regression model to predict the accuracy of a neural network model under the assumption that the joint distribution between the training observations y and the test function values f.unlike most other competitors of this competition who use deep-learning method or other classical supervised machine learning to achieve a high score, we fully explored the potential of gp-nas model with only small modifications on the model architecture and the feature engineering pipeline. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/149.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/149.txt new file mode 100644 index 0000000000000000000000000000000000000000..b038af3ff29252395d4d7f0e5fef231060bc01d9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/149.txt @@ -0,0 +1 @@ +the origin of convolution neural networks (cnns) dates to 1950, when perceptron algorithm was invented . however, deep learning neural networks, especially, cnns became extremely popular after the introduction of imagenet competition by university of toronto in 2012. cnns have shown promising results in the task of image classification and image segmentation in a variety of applications ranging from student attendance system using face recognition to medical image processing.a cnn typically consists of the following layers: convolutional layers, pooling layers and fully connected layers. the problem in designing a neural network involves manually selecting several parameters including number of hidden layers, number of neurons in each hidden layer, objective function to be minimized, learning rate, dropout rates, activation function to be used and hyperparameters such as stride, padding and filter size in case of cnn . as the range of values that can be used for these parameters is huge, there are infinite number of combinations that can be made, and this makes the manual selection quite difficult. therefore, there are state-of-the-art architectures that are handcrafted by the experts who have expertise in the field , cross domain knowledge of deep learning, computer science and optimization. however, a cnn which is performing well on one dataset, may not perform well on the other. therefore, there is a high demand for the automated framework that takes the data as input and gives a well performing architecture as an output .designing of a neural network architecture can be categorized as a model selection problem . direct solution can be applying hyperparameter optimization, that is, getting the optimized values for parameters such as number of layers, activation function, etc. hyperparameter optimization is also known as black box optimization problem because there is no explanation of the mapping done between the architecture that has been created, the performance achieved and the learning task. three meta-modelling aspects that are used for automatic generation of cnn architecture are hyperparameter optimization, evolutionary algorithms and reinforcement learning.in the past few years, there have been many fully automated algorithms and frameworks that have been developed for this task, such as: 1) metaqnn 2) blockqnn 3) genet 4) autokeras and many more. most of the research is focused on using evolutionary methods and reinforcement learning.the paper is organized as follows: section ii contains theoretical explanation of different algorithms used for this task. section iii illustrates drawbacks faced by currently existing methodologies. section iv briefly summarizes the working and performance of different frameworks integrated for this research and finally section v comprised of the conclusion which concludes the paper. the problem in designing a neural network involves manually selecting several parameters including number of hidden layers, number of neurons in each hidden layer, objective function to be minimized, learning rate, dropout rates, activation function to be usedand hyperparameters such as stride, padding and filter size in case of cnn. each method has their own drawbacks, layer-by-layer makes the search space too large, and it takes a lot of time for the searching algorithm to find a good architecture, whereas for block and cell-based search space, they face the drawback of limited options.search strategy: optimization algorithms that should be used to explore the search space for finding out the best possible architecture faster while maintaining a good accuracy is decided in this phase. there are many techniques that are proposed such as extrapolation of the accuracy curve and predicting the final accuracy, training the models for a smaller number of epochs, training the model on small dataset, or this is also addressed by sometimes limiting the size of neural network to a particular number of hidden layers. discussed earlier, we have seen the huge time complexity of grid search and random search, so researchers started working on bayesian optimization methods for hyperparameter optimization and it showed impressive results in the earlier phase of neural architecture search. they also have used skip connections in the search space, but irrespective of the high accuracy achieved using multi-branch and skip connections, they are computationally expensive, time expensive and requires a lot of rule-based systems for creating valid architectures (as are susceptible to compilation failure during generation of cnn). some constraints are put on the values that can be used for designing the architecture to reduce the search space, nevertheless the enormous number of possible combinations leads to high time complexity and huge computational resources required for running the algorithm even on a small dataset.deployed a learning agent that used epsilon greedy algorithm and experience replay for efficient navigation through the search space and generation of high performing cnn architecture.proposed monas, a multi-objective neural architecture search that uses reinforcement learning for generation of cnn architecture. this was achieved by using a strategy called sequential model-based optimization (smbo) process, which instead of leading to a direct search in search space, performs a block-by-block search, that is, first a 1-block cell is created, which is then exploded by adding another block in that cell, and this cycle further goes on until the threshold of maximum number of blocks a cell can contain is reached. in, they have created block architectures that are purely based on convolutional layers and dense layers, exclusion of pooling layer has decreased the search space leading to faster generation of cnn architecture but also had a negative impact on its accuracy. this section is divided into 4 subparts: (a) algorithm for dqnas, (b) search spacedescription of different layers and parameters used for creation of search space, (c) search strategy and action spacemodel used to navigate through search space and take action (selection of layers) and (d) evaluation metricsmetrics used for the evaluation of generated cnn. when a new model is generated that has some common combination of layers with the previously trained models, then one shot training is used to transfer the parameter weights to this new model, this imparts some knowledge to the new generated model, and it does not start with some random initialization of weights. these models contain complex layers and connections and takes huge time and large computational resources for training, whereas models generated using proposed methodology are simple, takes 1 hour for training and have accuracy difference of mere 5% as compared to complex deep learning cnn architectures. there are a lot of meta-modelling approaches used for automatic generation of neural network architecture, but either they have a requirement of training the algorithm on the dataset using a lot of gpus on several days or they restrict the search space make force the algorithm to follow a pattern. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/15.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/15.txt new file mode 100644 index 0000000000000000000000000000000000000000..ff705af77aade216f6a8b0a7f7cae32095f55591 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/15.txt @@ -0,0 +1 @@ +in digital health, a human user and a mobile health (mhealth) application work together to acheive user specified behavioral goals. for example, the user may own a physical therapy (pt) app that guides them through a physicianrecommended daily exercise routine. to plan effective intervention, the app agent maintains a model of the user's behavior. in our paper, the app agent models the user's decision making process as that of a reinforcement learning (rl) agent. as a result, there are two rl agents that operate in this scenario. the first rl agent is the autonomous app agent, whose policy provides personalized interventions (e.g. a push notification about the importance of exercising daily) to the user in order to maintain healthy behavior (e.g. the pt routine). the second rl agent is the user, whose action space is binary -they choose whether or not to engage in the suggested behavior (e.g. do the pt routine).even though the user and app agents share the same goallong-term behavior change -without intervention, the user's default decision may not be to engage in the target behavior, due to systematic impairments in the human's decision making. for example, the user may be myopic -that is, they may heavily discounts future rewards (story et al. 2014). in our pt example, a fully rehabilitated shoulder may seem too distantly located in the future to motivate the user towards the goal. prior work has tried to infer the user's impairment from demonstration. in this paper, we explore a complementary problem: assuming we know the user's impairment, what should the app agent do about it?our contributions. in this work, we explore effective ways for the app gent to intervene to maintain users' goaloriented progress, in situations where the user would have otherwise disengaged. to do this, we propose a formal framework that represents the user as an rl agent, wherein different parametrizations of the user's markov decision process (mdp) capture a range of commonly observed user behaviors in mhealth -state-dependent motivation, disengagement, and difficulty of adherence. for example, a myopic user is represented as an agent planning with a small discount factor γ. furthermore, our framework formalizes the mechanism through which the app agent's interventions affect the user decisions; namely, the app agent intervenes on the user's mdp parameters. for example, intervening on a myopic user corresponds to increasing the user's discount factor γ. finally, as a precursor to user studies, we use our framework to extract concrete intervention strategies that are expected to work well for a given type of user. we end with a discussion of the interesting behavioral and computational open questions that arise within this framework.related work. reinforcement learning is frequently used to model the complex mechanisms underlying human behavior-from the firing of dopaminergic neurons in the brain (niv 2009;shteingart and loewenstein 2014) to disorders in computational psychiatry (maia and frank 2011;chen et al. 2015). in digital health, rl has been used to model maladaptive eating behaviors (taylor et al. 2021). although these settings use rl to produce models of human decision making, the models themselves are not used to enrich planning for an autonomous agent.in some settings, such as in human robot interaction (tejwani et al. 2022;xie et al. 2021;losey and sadigh 2019) or assistive ai (chen, fong, and soh 2022;reddy, dragan, and levine 2018), the human is modeled to inform the decisions of another rl algorithm. though these applications can be formalized as a multi-agent rl problem (i.e. a two-player cooperative game), we formalize this as a singleagent problem, where we must solve for the mhealth agent's policy. this choice is motivated by our setting, where the strongest detectable effect of the app's intervention on the user tends to be immediate and transitory.modeling the actions of an external, human rl agent re-quires inferring the parameters that drive their policy. in inverse reinfocement learning (irl), this means inferring the user agent's reward function (ziebart et al. 2008). irl has been applied in digital health to infer user preferences- zhou et al. (2018) infer the user's utility function in order to set adaptive daily step goals. similar to herman et al. (2016) and reddy, dragan, and levine (2018)'s goals of inferring the transition dynamics, we are interested in modeling the user's entire decision making process, beyond the rewards. despite evidence of humans demonstrating systematic behavioral impairments (e.g. myopic planning), most irl approaches assume that humans agents act optimally, or nearoptimally, with respect to a task. to improve on collaboration with humans, prior work has focused on representing and inferring these impairments from human actions (evans, stuhlmüller, and goodman 2016;laidlaw and dragan 2022;shah et al. 2019;jarrett, hüyük, and van der schaar 2021). however, the goal of prior work has been for the app agent to function in these collaborative settings despite these impairments but not to intervene on them. the app indirectly interacts with the world through the user; by influencing the user's decision making process, the app influences how the user moves in the world. at each time step, the user decides to perform (a user = 1) or not perform (a user = 0) the behavior.to recap, the user's behavior is governed by the following parameters: θ = {b user , g user , d user , p user , d real , γ user }. this accounts for some degree of randomness in the user's life: although they had planned to start pt for the day (a user = 1), the user may receive an urgent phone call that prevents it from happening (â user = 0). the app receives a negative reward if the user takes no action and a positive reward if the user takes an action following intervention: r app (s app , a app ) = 2â user -1. the app experiences transitions according to the true probability of making progress (p world instead of p user ) and the true probability that the user executes an action (â user instead of a user ).at a high level, componentis the burden the user would accumulate in order to reach the goal (and so depends on b user ), componentis the temporally-discounted value of the goal g user , and component is the relative consequence of disengagement. interventions on b user , g user , and d user are technically "unbounded" in effectiveness because they can be any real number. an intervention that makes g user = 10, 000 could make the inequality hold (thus a user = 1) for any relatively small values of b user and d user .6, g user = 10, b user = -1, d user = 0, p world = 0. interventions in this window target user parameters that can affect user behavior regardless of distance from the goal state.window 2: depending on user type, interventions on p user or γ user . although intervening on p user , b user , or d user is valid, the interventions on b user and d user would require a high level of effectiveness near 100%, while the intervention on p user must be at least 75% effective. we fix the value of g user = 10 because we do not consider interventions on g user and because the value of g user is meaningful only in relation to the value of b user , which we do sample. the underconfident and farsighted users will require intervention on b user or d user in window 1 and then intervention on p user in window 2 (the sizes of these windows will vary). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/150.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/150.txt new file mode 100644 index 0000000000000000000000000000000000000000..3fcd2f3451002b55161bd739a567d741aa52407a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/150.txt @@ -0,0 +1 @@ +graph neural networks (gnns) are neural network models designed to adapt and perform inference on graph domains . while a few models were already proposed in-between 2005 and 2009 , the interest in gnns has increased dramatically over the last few years, thanks to the broader availability of data, processing power, and automatic differentiation frameworks. now, gnns are the state-of-the-art solution in a comprehensive set of scenarios. nevertheless, regulations require high task performance and a transparent decision process .for this reason, several researchers have investigated techniques to explain gnns' predictions, primarily identifying the most critical portions of the graph that contributed to producing a particular inference. the vast majority of these techniques provide a post-hoc explanation, thus inferring the reasons that led to a specific outcome by a trained model. however, recent efforts toward "explainable-by-design" gnns rather than post-hoc explainers are opening up new, interesting approaches. for example, in , the authors introduce an explainability term to let the network converge to a "interpretable" local minimum to facilitate the work of post-hoc explanation algorithms. solutions like completely discard the notion of post-hoc algorithms providing explanations directly in the main model output.on a separate line of research, recent studies demonstrated that by providing the gnn with subgraphs that give different views of the same graph, it is possible to increase the expressive power of the standard message-passing framework.we propose to connect these two topics and build an explainable by-design subgraph-enhanced gnn. we use a data-driven approach to learn small and representative subgraphs that increase the expressive power for the downstream task and that can be used as explanations.on a separate line of research, recent studiesdemonstrated that by providing the gnn with subgraphs that give different views of the same graph, it is possible to increase the expressive power of the standard message-passing framework.of particular interest for this work is pgexplainer, which uses a small network to parametrize the probability of each edge ω ij of being part of the explanatory subgraph, and sample from this distribution to obtain the final explanation subgraph characterized by edges e ij . it has recently been shown that it is possible to create more expressive gnns using standard architectures that process several subgraphs of the input graph., all graphs obtainable by removing one edge (edge deleted strategy) or one node (node deleted strategy) from the original graph. the encoder implements a module l 1 consisting of several message-passing layer that process each subgraph independently, and then a second message-passing module l 2 preprocesses the aggregation of the subgraphs working as an information-sharing module for the subgraphs. finally, in the third step, we train the whole sgnn framework, fine-tuning the backbone using the explanatory subgraphs. our model outputs the predicted label and an explanation obtained by combining all the subgraphs used during training. this task requires a global pooling operation, such as mean, max, or min pooling, to build a graph representation from the node representations produced by stacking the layers described in equation (2). our goal is to develop a framework that jointly predicts the graph class and the explanation masks, highlighting the parts of the graph that contribute the most to the prediction. we expect such subgraphs to be more informative than post-hoc explanation masks since they are directly generated by the model to maximize the classification performance. in the first step, we train the backbone of the sgnn, which is a classifier that processes the original graph. we adapt the binarization threshold to obtain a set of explanations with k edges, where k ranges from 5% to 75% of the initial number of edges in the original graph. a new message-passing module preprocesses the aggregation of the explanations working as an information-sharing module for the subgraphs. finally, a new set learning moduleaggregates subgraph representations obtained after a global pooling operation into a single one used in downstream tasks. besides being used by the information-sharing module, the subgraph obtained by aggregating all the explanations is used to explain the model's prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/151.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/151.txt new file mode 100644 index 0000000000000000000000000000000000000000..b1bed5af4bde22412d45da1ecb56cd95b02d3a2c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/151.txt @@ -0,0 +1 @@ +diffusion models are powerful generative models that generate high-quality and diverse data. these methods inject gaussian noise into the data through a forward diffusion process (fdp), and they learn to reverse the process to go from noise to data. there are many ways to define diffusion models: score-based models (sbms) learn to predict the score (gradient log density), while (nonscored based) diffusion models (dm) learn to predict the added gaussian noise in order to remove it from the noisy data.sbms and dms generally rely on gaussian noise. a priori, there is no apparent reason why gaussian noise would be needed as opposed to other types of noise. very recent works have started exploring non-gaussian noise. bansal et al. and anonymous devise their own diffusion-like frameworks to sample from arbitrary distributions by going from dataset 1 to dataset 2; in both papers, they find that using non-gaussian distributions as the second dataset (instead of gaussian noise) significantly worsen the quality of the generated data. more related to our work, deasy et al. shows that sbm, where we learn the score of a generalized normal (gn) distribution, leads to significantly worse results when moving away from the gaussian distribution (which corresponds to the gn distribution with β = 2). in this paper, we aim to answer the question of whether there exist non-gaussian distributions that perform better than the gaussian distribution in (nonscored based) dms. our work generalizes the dms with learnable mean and variance by bao et al. to location-scale family noise distributions, and we test this framework on a variety of noise distributions. let ᾱt = t s=1 α s ; then by the property of gaussian distribution, this means that x t = √ ᾱt x 0 + √ 1ᾱt z and we approximately have that x t ∼ n (0, 1).from the markov property, we know that q(x t |x t-1 ) = q(x t |x t-1 , x 0 ).it can be shown that the variational lower bound optimization can be reduced to minimizing d kl (q(x t-1 |x t , x 0 )|| p θ (x t-1 |x t )) for all t ∈ where q(x t-1 |x t , x 0 ) is a closed-form gaussian distribution depending on x t and x 0 . given that q(x t-1 |x t , x 0 ) is gaussian distributed with known variance, its mean is the only parameter left to be estimated. , nichol anddhariwal , directly minimizing the kl divergence works poorly.contrary to ddpm and other diffusion models, our generalized framework directly samples from q(x t |x 0 ) rather than sample from q(x t+1 |x t ) one step at a time. however, as mentioned, we do not know q(x t |x t-1 ), so we cannot try to match this term; it also means that we cannot get the close-form solution for q(x t-1 |x t , x 0 ) using bayes rule as it depends on the unknown transition probability q(x t |x t-1 ).we show below how estimating the distribution of the noise z given x t allows us to directly sample from q(x t-1 |x t ) by plugging the sample from q(z|x t ) into a deterministic equation.thus, if we could sample from that z conditional on x t , we could effectively sample from q(x 0 |x t ). since we know that z is a sample from the distribution f (0, 1) in the forward process, we propose to estimate it as z(x t ) ∼ f (µ θ (x t ), σ θ (x t )) in the reverse process; this is a generalization of the variational approximation done in extended-ddpm and v ar. thus µ θ1 (x t ) = μθ1 (x t ) and σ 2 θ2 (x t ) = 1 √ 2 σ2 θ1 (x t ).contrary to other works, our theoretical framework explicitly defines the one-shot forward process q(x t |x 0 ), but not q(x t |x t-1 ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/152.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/152.txt new file mode 100644 index 0000000000000000000000000000000000000000..dcf976285855bd977957831f62a77ad04e997e3b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/152.txt @@ -0,0 +1 @@ +the success of deep neural networks (dnn) significantly depends on the cheap computation of gradients using back-propagation enabling gradient based minimization of the loss functions. as the parameter count of dnn ranges between several tens of thousand in small classification networks to several billion in large scale generative models, there seems to be no alternative to the use of gradients. however, gradient based optimization is beset with the problem of local minima, of which the energy landscape of dnn offers plenty. exploitation of a local minimum with gradient descent comes with guarantees for progress relative to previous optimization steps, but does not guarantee a decent level of performance. in order to go more global, momentum methods have therefore been introduced to overcome local minima.as compared to gradient descent, momentum based methods have more parameters to adjust. besides the strength of the inertial forces controlled by the 'mass' parameter, a 'friction' parameter has to be determined, which is responsible for slowing down the search motion and bringing it to rest, ultimately. finally, the learning rate needs to be controlled throughout the progress of the optimization process, like in gradient descent.the complexity in setting and controlling the aforementioned hyper-parameters can be alleviated by an interpretation of the optimization process in physical terms as already indicated by the physical connotations of 'mass' and 'friction'. it has been recently proposed to cast the optimization process in a port hamiltonian framework, which makes the convergence of the optimization process to a stationary point transparent via energy based considerations, where loss is connected to potential and momentum to kinetic energy, whereas 'friction' accounts for energy dissipation and interdicts motion at high pace for unlimited time. it is clear that the friction / energy dissipation parameter is essential for the (non) locality of the optimization process: if high, friction essentially damps out all momentum and the procedure essentially 'just flows down the hill' as for gradient descent, resulting in low exploration and high exploitation. if low, the motion will go on essentially un-damped and not rest and thereby explore all of the accessible parameter space. exploration is high, and exploitation is low in this setting.then, parameter settings can be modified over time or controlled adaptively as a part of the optimization algorithm is a familiar thought. the physics based intuition of port hamiltonian systems can be helpful in the design of such adaptive strategies. here we suggest a simple, event based adaptive parameter selection strategy that starts the optimization in an exploratory phase with low friction and turns over to exploitation by 'heavy breaking', once the potential energy (i.e. the loss function) is sufficiently reduced. sufficiency is pre-defined as the minimum reduction goal of the optimization, which can be set, e.g., as the reduction of the loss obtained in previous trials.in this paper, we show that the proposed strategy actually works for some classical examples in deep learning and improves the optimization loss and also the test accuracy for a standard, le-net-5 based architectures on two well known academic classification tasks solved by deep learning, namely the cifar10 and the fashionmnist data-sets.in order to focus on the optimization only, we do not employ data augmentation or pre-training and thereby do not achieve sota performance in our experiments. we however consistently achieve an advantage over the widely used stochastic gradient descent as a benchmark. we also observe consistent gains in performance after 'heavy breaking' is finally triggered.our paper is organized as follows: in section 2 we give an overview over related work and in section 3 we present the port hamiltonian view on gradient based optimization with momentum and energy dissipation. our experimental setup as well as our results are documented in section 4. in the final section 5 we present our conclusions and give an outlook to future research.the success of deep neural networks (dnn) significantly depends on the cheap computation of gradients using back-propagation enabling gradient based minimization of the loss functions. however, gradient based optimization is beset with the problem of local minima, of which the energy landscape of dnn offers plenty. it has been recently proposed to cast the optimization process in a port hamiltonian framework, which makes the convergence of the optimization process to a stationary point transparent via energy based considerations, where loss is connected to potential and momentum to kinetic energy, whereas 'friction' accounts for energy dissipation and interdicts motion at high pace for unlimited time. it is clear that the friction / energy dissipation parameter is essential for the (non) locality of the optimization process: if high, friction essentially damps out all momentum and the procedure essentially 'just flows down the hill' as for gradient descent, resulting in low exploration and high exploitation. here we suggest a simple, event based adaptive parameter selection strategy that starts the optimization in an exploratory phase with low friction and turns over to exploitation by 'heavy breaking', once the potential energy (i.in this paper, we show that the proposed strategy actually works for some classical examples in deep learning and improves the optimization loss and also the test accuracy for a standard, le-net-5based architectures on two well known academic classification tasks solved by deep learning, namely the cifar10and the fashionmnistdata-sets.our paper is organized as follows: in section 2 we give an overview over related work and in section 3 we present the port hamiltonian view on gradient based optimization with momentum and energy dissipation. while learning rate schedulesdetermine the setting for one specific parameter upfront, it has also been proposed to modify the dissipation parameter in momentum based optimization.the simple gradient descent algorithm to minimize a differentiable loss function l (θ), namely θ k+1 = θ k -α∇ θ l (θ k ) can be seen as a first order euler discretization of the gradient flow.as mentioned in the introduction, the problem with gradient descent in the context of highly non-convex loss functions l (θ), as especially in the context of the training of deep neural networks, lies in the fact that gradient flows and gradient descent algorithms get stuck in local minima.to over come the strict locality of gradient flow and gradient descent, momentum based methods have been introduced.using the chain-rule, (4) and ∇ x h(x(τ )) j∇ x h(x(τ )) = 0 by the skew-symmetry of j, it is now easy to see that the following inequality holds for the dissipated total 'energy' measured by h(x), where p 2 2m takes the role of kinetic energy and the loss l (θ) the role of potential energy. furthermore, for x * = θ * p * , we find p * = 0 and ∇ θ l (θ * ) = 0, hence the θ-component of stationary points are in one to one correspondence to the critical points of the original optimization problem. by the physics-motivated interpretation of momentum in a port hamiltonian framework, we explained how different settings for the friction / dissipation correspond to an exploration or exploitation phase in the progress of optimization. by switching from exploration to exploitation when a certain minimal reduction of the loss function of a deep neural network is achieved, we obtain improved classification accuracy of image classification networks as compared with simple stochastic gradient descent or a momentum based optimization with fixed friction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/153.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/153.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9d3d0a20fa94208c442127c7862848cfe4c35ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/153.txt @@ -0,0 +1 @@ +deep neural networks (dnns) have demonstrated impressive performance in a broad range of applications . however, deploying dnns on resource-constrained devices, such as iot devices, poses significant challenges. to address this issue, researchers have proposed various model compression techniques, including model quantization and pruning . among these techniques, model quantization has become a critical approach for compressing dnns due to its ability to maintain network structure and achieve comparable performance. this technique involves mapping the network parameters from 32-bit floating-point numbers to low-bit integers, resulting in reduced memory usage and faster inference.despite their impressive performance, dnns are highly susceptible to adversarial examples . adversarial examples are perturbations that are designed to be undetectable to human vision but can easily deceive dnns, posing a significant threat to practical deep learning applications. in addition, dnns are vulnerable to natural corruptions such as snow and motion blur, which are common in real-world scenarios and can significantly reduce the accuracy of dnn models. moreover, system noises resulting from the mismatch between software and hardware can also have a detrimental impact on model accuracy . these phenomena demonstrate that quantized networks deployed in safety-critical applications are unreliable when faced with various perturbations in the real world.therefore, it is critical to conduct a comprehensive evaluation of the robustness of quantized models before deploying them to identify potential weaknesses and unintended behaviors. while numerous studies have extensively investigated the robustness of floating-point networks against various attacks and metrics, research on the robustness of quantized models remains inadequate. these studies lack diversity in terms of noise sources and rely solely on small datasets, leading to inconclusive findings regarding the robustness of quantized models.we build the robustness evaluation benchmark of quantized models. our benchmark assesses the robustness of quantized models using 3 popular quantization methods (dorefa, pact, and lsq) and 4 classical architectures (resnet18, resnet50, regnetx600m, and mobilenetv2). for each method, we evaluate 4 commonly used bit-widths. our analysis includes 3 progressive adversarial attacks, 15 natural corruptions, and 14 systematic noises on the ima-genet benchmark. our empirical results demonstrate that lower-bit quantized models display better adversarial robustness but are more susceptible to natural corruptions and systematic noises. we identify impulse noise and the nearest neighbor interpolation as the most harmful sources of natural corruptions and systematic noises, respectively. while numerous studies have extensively investigated the robustness of floating-point networks against various attacks and metrics, research on the robustness of quantized modelsremains inadequate. these studies lack diversity in terms of noise sources and rely solely on small datasets, leading to inconclusive findings regarding the robustness of quantized models. our benchmark assesses the robustness of quantized models using 3 popular quantization methods (dorefa, pact, and lsq) and 4 classical architectures (resnet18, resnet50, regnetx600m, and mobilenetv2). our empirical results demonstrate that lower-bit quantized models display better adversarial robustness but are more susceptible to natural corruptions and systematic noises. in this paper, we aim to thoroughly evaluate the robustness of quantized models against multiple noises for several quantization methods, architectures, and quantization bits. for specific adversarial attacks, we measure adversarial robustness (ar) using model accuracy, where higher ar indicates a stronger model. for the union of different attacks, we adopt the worst-case ad-versarial robustness (w car) to measure adversarial robustness (higher indicates a stronger model):. we adopt the average accuracy of the quantized model on all corruptions (denoted as c) to measure natural robustness, denoted as n r:. we adopt the model stability on different systematic noises (denoted as s) to measure systematic robustness (sr). 2, we could make the following observations: (1) unlike the decrease in clean accuracy, lower-bit models exhibit higher worst-case adversarial robustness and are almost better than floating-point network; (2) at the same quantization bit, pact presents the best adversarial robustness compared to other quantization methods. furthermore, we compare the adversarial robustness (ar) of quantized models against specific attacks. though performing similar clean accuracy with the 32-bit model, quantized models are more vulnerable under natural corruptions, especially in the 2-bit models. moreover, we find that among 14 systematic noises, the nearest neighbor interpolation methods in pillow and opencv have the greatest impact on the model performance, which induce nearly a 6% decrease in performance for the 2-bit models.we presents a benchmark for evaluating the robustness of quantized models under various perturbations, including adversarial attacks, natural corruptions, and systematic noises. the results reveal that lower-bit quantized models have higher adversarial robustness than their floating-point counterparts, but are more vulnerable to natural corruptions (especially impulse noise) and systematic noises (especially the nearest neighbor interpolation). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/154.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/154.txt new file mode 100644 index 0000000000000000000000000000000000000000..8aa5d1ad32aa760746f5d5aa05bc49b4d1ca6cf7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/154.txt @@ -0,0 +1 @@ +personal data is being regarded as a new economic asset. our smartphone's personal data can be utilized for a variety of purposes, including identification, recommendation systems, predicting personalities by analyzing patterns in human behavior, and logging human health data with sensors. our research will focus on mobile health for human behavior analysis. artificial intelligence (ai) gives us insight into data that can be used to revolutionize and transform different industries that will propel mankind to new heights. artificial intelligence, specifically in the healthcare industry, can have a major impact on saving lives by providing solutions to pressing issues seen in healthcare. mobile health is considered one of the main drivers that are seeking to bring this transformative change. according to m-health: fundamentals and applications book, mobile health was first defined as ‘mobile computing, medical sensors, and communication technologies for healthcare . the technological breakthrough connected with the release of the first smartphone had a significant impact on the evolution of m-health. this also allowed for the creation of powerful embedded computational tools in smartphones and other technological devices(e.g., smart watches, wearable monitors, and sensors) to produce massive volumes of mobile health data. this innovative move ushered in the smartphonecentric m-health age along with beginning of the smartphonefocused m-health era. the patient-centered approach is starting to take center stage in the paradigm. smartphone m-health is becoming a reality. we can get real-time data from apps by connecting them to a variety of wearable sensors. recent developments have led to the emergence of m-health 2.0, which is described as "the convergence of m-health with emerging development in smart sensors, 5g communications systems with the functional capabilities of web 2.0, cloud computing, and social networking technologies, toward personalized patient-centered healthcare delivery services. this advancement, however, is exacerbating the issues and risks connected with big data. daily, billions of data are being generated from these applications and devices. however, not a lot is being translated into meaningful data. there are lots of advancements in ai in the healthcare industry, the most common application is critical health diagnosis , robots that perform or assist in surgical operations , and textual features observation for physhological status detection . this paper will attempt to shine a light on other not-sowell-known applications of mobile health data. the data collected with mobile health applications with the aid of sensors will be to make making decisions and predictions based on the findings of the model. the models used in this research can be used in the sports industry. this study explores the utilization of mobile applications on smartphones and tablets to gather information from people working in different sports and fitness environments, such as coaching effectiveness, strength and conditioning, and fitness training. after further exploration, we believe m-health can be implemented at a commercial level to determine which player will be fit to play games based on the data collected with sensors before a match. it will assist coaches, physicians, or any entity to make decisions using the model we have proposed. our smartphone's personal data can be utilized for a variety of purposes, including identification, recommendation systems, predicting personalities by analyzing patterns in human behavior, and logging human health data with sensors. blockchain technology has improved the authenticity of the sports industry sector and structural adjustment policies for creating sports health programs, which is extremely beneficial for the advancement of the sports industry.proposed myocardial disease prediction by applying a classification model. the superior model was then used to identify the major aspects influencing anticipated performances and effectiveness of women's handball players was predicted using linear regression, decision trees, support vector regression, radial-basis function neural networks, backpropagation neural networks, and long short-term memory neural network models in handball agility tests, 10-and 20-meter sprints, hands-onhips and hands-free dynamic balance leaps, a 20-meter shuttle run test, and 10-and 20-meter sprints. our study used the collected data to analyze and monitor the subject's health and make predictions from patterns observed by using several machine learning algorithms. we are illustrating the raw data, filtered data, and changing the label 'activity' exists a gap of ~1000 samples (~20sec in sample rate 50hz) graph below which gives the audience an idea of how data is actually formed through the sensor.in this paper, table1depicts the evaluation of multiple machine learning models' performance used to predict physical fitness for athletics, including xgboost, decision tree, logistic regression, random forest, and naive bayes. our smartphone's personal data can be utilized for a variety of purposes, including identification, recommendation systems, predicting personalities by analyzing patterns in human behavior, and logging human health data with sensors. this problem might be noticed in the iot system that is being currently used to store m-health data. blockchain technology has improved the authenticity of the sports industry sector and structural adjustment policies for creating sports health programs, which is extremely beneficial for the advancement of the sports industry.proposed myocardial disease prediction by applying a classification model. the superior model was then used to identify the major aspects influencing anticipated performances and effectiveness of women's handball players was predicted using linear regression, decision trees, support vector regression, radial-basis function neural networks, backpropagation neural networks, and long short-term memory neural network models in handball agility tests, 10-and 20-meter sprints, hands-onhips and hands-free dynamic balance leaps, a 20-meter shuttle run test, and 10-and 20-meter sprints. our study used the collected data to analyze and monitor the subject's health and make predictions from patterns observed by using several machine learning algorithms. we are illustrating the raw data, filtered data, and changing the label 'activity' exists a gap of ~1000 samples (~20sec in sample rate 50hz) graph below which gives the audience an idea of how data is actually formed through the sensor.in this paper, table1depicts the evaluation of multiple machine learning models' performance used to predict physical fitness for athletics, including xgboost, decision tree, logistic regression, random forest, and naive bayes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/155.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/155.txt new file mode 100644 index 0000000000000000000000000000000000000000..9638f4bc81895e66f25b6c9bd20a934c670f6040 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/155.txt @@ -0,0 +1 @@ +due to the prevalence and popularity of online social networks, network data has grown significantly, both in quantity and quality, over the years . such rich data can be exploited to gather information at both the individual and community levels. the influx of data having inter-personal connections (represented as graphs) has served as motivation to develop several unsupervised learning algorithms for various tasks on graphs . these methods leverage node features along with neighborhood information to learn node representations that do not depend on the domain of the underlying graph or the desired task at hand.it is essential that these node representations are generated with appropriate fairness measures, especially in the context of real-world deployments, to minimize bias induced by these graph learning frameworks on downstream tasks. accordingly, fairness in the context of trained decision-making systems has increased in popularity recently due to the numerous social distresses caused when systems not incorporating adequate fairness measures were deployed in the wild . the job platform xing is an extreme example that exhibited gender-based discrimination . on the node classification task, the accuracy increases steadily with the degree (slope=0.0051). after the introduction of cafin, the slope decreases significantly (slope=0.0041, a 20% reduction), leading to lower performance disparity between high and low degree nodes with negligible reduction (-0.3%) in the overall accuracy.previous works aimed to mitigate such unfairness, in this context, focus on ensuring minimal disparity in performance among individuals or groups defined by some membership criteria. although sensitive node attributes generally decide these group memberships, a recent uptick in research considers intrinsic node properties, specifically node degree, to evaluate the fairness of graph neural networks (gnns). for example, recent work provides theoretical proof that a popular subclass of graph neural networksgraph convolutional networks (gcns)are biased (in performance) towards highdegree nodes. they propose a degree-specific gcn layer targetting degree unfairness in the model and design a self-supervised learning algorithm for attaching pseudo labels to unlabelled nodes, which further helps low-degree nodes to perform better. later, rawlsgcn reveals the root cause of this degree-related unfairness by analyzing the gradients of weight matrices in gcn and proposes techniques to arxiv:2304.04391v3 20 apr 2024 mitigate this bias.gnns refine node embeddings by aggregating information from their neighbors. so, the efficacy of a node's representation is bound to be correlated to its abundance of structural information . this correlation creates a disparity in the richness of embeddings between structurally rich nodes (highly central) and the rest (less central). figure 1 empirically corroborates this claim. this disparity is even more concerning as the centralities (degree) of most real-world graphs follow the power-law distribution. this implies that a major fraction of nodes have low centrality scores and hence deficient representations compared to a small fraction of nodes having high centrality.most of the works in the literature focus on imposing fairness concerning sensitive attributes but often overlook the more inherent centrality-induced disparity. recent works also probe into how masking the sensitive attributes may not be enough, as some of the characteristics can seep into the inherent network structure. our work in this paper focuses exclusively on reducing the performance disparity induced among groups of nodes due to skewed centrality distributions. towards this end, we propose a generalized (additive) modification to the loss function of well-known unsupervised gnns to impose group fairness constraints while minimizing the cost induced by the same. to formally demonstrate our approach, we consider graphsage a popular unsupervised graph learning framework and widely adopted in many domains and then show how we extend its objective function with fairness constraints. graphsage, as studied empirically, focuses more on less frequent higher-degree nodes than on more frequent lower-degree nodes, leading to a performance disparity between the two groups of nodes. we remedy this limitation of graphsage through our work.note that these fairness constraints can be added to any underlying graph learning algorithm at three different stages: before learning (pre-processing), during learning (in-processing), and after learning (post-processing) . in-processing is considered robust and generalizable and finds its application across various domains as it directly adds a secondary objective to the original ; hence we adopt this technique in our proposed framework.in particular, we propose a framework, centrality aware fairness inducing in-processing (cafin), 1 that focuses on augmenting the unsupervised version of graphsage to induce centrality based (ex: degree) group fairness as an objective while maintaining similar performance on downstream tasks. to the best of our knowledge, cafin is the first work to deal with centrality-driven fairness for unsupervised graph learning, as all other methods work in the supervised or semi-supervised setting (and also largely do not tackle centralitybased fairness aspects). thus, our primary contribution is a novel in-processing technique to achieve centrality-aware group fairness for unsupervised graph node representation learning.it is essential that these node representations are generated with appropriate fairness measures, especially in the context of real-world deployments, to minimize bias induced by these graph learning frameworks on downstream tasks. although sensitive node attributes generally decide these group memberships, a recent uptick in research considers intrinsic node properties, specifically node degree, to evaluate the fairness of graph neural networks (gnns).note that these fairness constraints can be added to any underlying graph learning algorithm at three different stages: before learning (pre-processing), during learning (in-processing), and after learning (post-processing).in particular, we propose a framework, centrality aware fairness inducing in-processing (cafin), 1 that focuses on augmenting the unsupervised version of graphsage to induce centrality based (ex: degree) group fairness as an objective while maintaining similar performance on downstream tasks.this section briefly reviews relevant literature on (unsupervised) graph representation learning and existing fairness measures for these graph representation learning algorithms. instead of training feature representations for each node in the graph, it learns a set of functions that aggregate feature information from the neighborhood of a node to update the node representation, helping it learn node feature embeddings while accounting for information flow from neighbors.previous work seeks to make graph algorithms fair by (a) preprocessing the original graph to remove potential bias, for example, fair-dropthat adds and removes edges to induce fairness, thereby altering graph structure, (b) in-processing during the training phase, for example,that modifies the gradient used in the optimization, and (c) postprocessing the node embeddings to remove bias.previous works utilize in-processing techniques for fairer results, likethat uses the rawlsian difference principle to mitigate unfairness across the degree of graph convolutional networks (gcn) and, which learns robust tail node (low-degree) representations by transferring information from central nodes. since most graph data does not possess explicit sensitive attributes, we utilize the connectivity structure of the graph using centrality measures to naturally categorize nodes into groups -the group of popular (centrality greater than the median) and unpopular (centrality less than the median) nodes. as the size of the computation graph determines how much information the chosen node aggregates and learns from its neighborhood, the representations of central nodes encode much more information, giving them an advantage over less central nodes.where inc represents the imparity for the task of node classification, fc represents the count of nodes labeled with class c in the input graph, and a c i represents the average accuracy of nodes labeled with class c in the i th group (either popular or unpopular nodes). based on our prior division of nodes based on popularity, edges are divided into three groups -between two popular nodes (p -p), between one popular and one unpopular node (p -up), and between two unpopular nodes (up -up). for both the downstream tasks (node classification and link prediction), we sample three subgraphs (g1, g2, and g3) from the original graph: one for training graphsage (g1), one for training the downstream task classifier (g2), and the other for evaluating the classifier's performance in the downstream task (g3).where deg(u) represents the degree of node u, zu the embedding of node u, d(zu, zv) the distance between the node embeddings of nodes u and v, and d(u, v) the distance between the nodes in the graph. l f , the final modified loss function, converges to 0 when the ratio between the two distances is 1, and our loss formulation tries to make the node embedding distance equal to the actual (normalized) distance between the nodes in the graph. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/156.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/156.txt new file mode 100644 index 0000000000000000000000000000000000000000..4b7c132d01c400188f5239b061a00a5b741affcc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/156.txt @@ -0,0 +1 @@ +knowledge distillation is a method of transferring the knowledge from a complex model, called the teacher, to a smaller and simpler model, called the student. in recent years, several variants of knowledge distillation have been proposed, including teaching assistant distillation, curriculum distillation, mask distillation, and decoupling distillation. this literature review summarizes the recent developments in these variants of knowledge distillation and discusses their strengths and limitations.knowledge distillation is a method of compressing a complex deep neural network (dnn) into a smaller and faster dnn while preserving its accuracy. the process of knowledge distillation involves training a smaller dnn, called the student, to imitate the predictions of a larger and more complex dnn, called the teacher. the student network is trained to produce similar results as the teacher network, but with fewer parameters and lower computational cost. knowledge distillation has been widely used for model compression and acceleration, and has shown great promise in various applications , such as computer vision and natural language processing.in recent years, several variants of knowledge distillation have been proposed and explored to improve the performance of knowledge distillation . one of these variants is teaching assistant distillation, which introduces an intermediate model, called the teaching assistant, between the teacher and the student. the teaching assistant is trained to mimic the behavior of the teacher, and the student is trained to imitate the outputs of the teaching assistant. this approach has been shown to provide better performance than traditional knowledge distillation, as it can better capture the knowledge learned by the teacher.another variant of knowledge distillation is curriculum distillation, which designs the learning process to follow a curriculum, similar to human education. the curriculum is designed to present easy examples first and gradually increase the difficulty of the examples as the student improves. this approach has been shown to provide better performance than traditional knowledge distillation, especially for tasks that require a lot of prior knowledge.mask distillation is a variant of knowledge distillation that focuses on transferring the attention mechanism learned by the teacher to the student. in mask distillation, the teacher is trained to produce a mask that indicates the importance of each input feature for a particular prediction. the student is then trained to imitate the predictions of the teacher while using the mask to weigh the importance of the input features. this approach has been shown to provide better performance than traditional knowledge distillation, as it can better capture the attention mechanism learned by the teacher.finally, decoupling distillation is a variant of knowledge distillation that decouples the distillation loss from the task loss. in decoupling distillation, the student is trained to imitate the outputs of the teacher on a validation set, while being trained on the task loss on the training set. this approach has been shown to provide better performance than traditional knowledge distillation, as it can better balance the trade-off between preserving the knowledge of the teacher and adapting to the task.in conclusion, knowledge distillation is a widely used method for compressing and accelerating deep neural networks 2 teacher assistant when knowledge distillation is performed, increasing the model size of the teacher network instead makes the knowledge distillation worse. the results are as follows: the student network is a 2-layer cnn, while the teacher network is a 4-10-layer cnn, respectively, and it can be seen from the results that continuing to increase the depth of the teacher network does not necessarily improve the performance of knowledge distillation.to further prove that the gap does affect knowledge distillation, the authors conducted a small comparison experiment by fixing the teacher network as a 10-layer cnn and changing the number of layers of the student network, and the results are as follows.based on the above observations, it is not difficult to think that a medium-sized network (teacher assistant , ta) should be added between the large network and the small network, and the teacher network should be used to distill the ta first, and then the student network should be distilled by the ta. when ta was added, the distillation effect of the student network was significantly better than the direct distillation of the teacher network.(nokd in the table indicates no kd, blkd indicates baseline kd, and takd indicates kd with the ta method proposed above) from the analysis of the following figure, we know that the ta distilled from the teacher network (kd-ta) is better than the ta trained directly (fs-ta).the ta results obtained from multiple distillations are as follows, the more multiple stages the better the effect, but the more time and space consuming.in conclusion, the ta approach used, with layers of distillation, where the next layer absorbs the knowledge of the previous layer and passes it to the next layer, is a good solution to the problem that an overly strong teacher network provides some knowledge beyond what the student network can learn, resulting in lower efficiency, and helps us to select a teacher network of the right size. in recent years, several variants of knowledge distillation have been proposed, including teaching assistant distillation, curriculum distillation, mask distillation, and decoupling distillation.in conclusion, knowledge distillation is a widely used method for compressing and accelerating deep neural networks 2 teacher assistant when knowledge distillation is performed, increasing the model size of the teacher network instead makes the knowledge distillation worse.in conclusion, the ta approach used, with layers of distillation, where the next layer absorbs the knowledge of the previous layer and passes it to the next layer, is a good solution to the problem that an overly strong teacher network provides some knowledge beyond what the student network can learn, resulting in lower efficiency, and helps us to select a teacher network of the right size.decoupled knowledge distillation (dkd)is a new approach to knowledge distillation, which divides the traditional knowledge distillation loss into two parts: target class knowledge distillation (tckd) and non-target class knowledge distillation (nckd). decoupled knowledge distillation (dkd) reformulates knowledge distillation (kd) as a weighted sum of two parts, one related to the target class and the other unrelated to the target class.in summary, this new approach to interpreting logit distillation decomposes the classical kd loss into target class knowledge distillation (tckd) and non-target class knowledge distillation (nckd). there are various approaches to knowledge distillation, which include transferring knowledge from complex teachers to smaller students through a combination of cross-entropy and distillation loss. the concept of knowledge distillation (kd), which involves transferring knowledge from a pre-trained teacher model to a student model by minimizing the difference between prediction scores, is discussed here, highlighting the importance of balancing raw classification loss and kd loss in training students, and also discussing the challenges of using stronger teachers in kd, which may lead to greater differences between teachers and students and make it difficult to achieve exact matching. experimental setups and results for image classification using the dist method are described here, which outperforms previous knowledge distillation (kd) methods to handle the large differences between teacher and student models. the proposed dist significantly outperformed vanilla knowledge distillation without ground-truth labels when students were trained only on knowledge distillation loss. in summary, the new knowledge distillation method, dist, aims to improve knowledge distillation from a stronger teacher's perspective. a new approach, called course temperature for knowledge distillation (ctkd), is proposed here, which improves the performance of knowledge distillation by gradually increasing the learning difficulty of students through dynamic and learnable temperatures. they also investigated the effect of using different teachers on knowledge distillation and found that stronger teachers with similar architectures were better suited for feature-based distillation, while teachers with high accuracy but different architectures were not as effective. here, by analyzing the limitations of the existing method and focusing on its improvement, a new method, logit distillation, is proposed, and the knowledge distillation loss function is re-represented as a weighted sum of two components, target class knowledge distillation (tckd) and non-target class knowledge distillation (nckd). we discussed four variants of knowledge distillation, including teaching assistant distillation, curriculum distillation, mask distillation, and decoupling distillation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/157.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/157.txt new file mode 100644 index 0000000000000000000000000000000000000000..f8df2736c227a9e5745ae53a11709e9f3c7c8994 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/157.txt @@ -0,0 +1 @@ +the mrt3, also known as the metro rail transit line 3, is a rapid transit system in the philippines. it runs along edsa, a major thoroughfare in metro manila, and serves approximately 7% of the total population of the philippines (philippine statistics authority, 2021). the mrt3 was built in the late 1990s and early 2000s to alleviate traffic congestion in the city and provide a more efficient public transportation option for commuters. it is operated by the metro rail transit corporation and serves millions of passengers annually. the mrt3 has become an integral part of the transportation infrastructure in metro manila and is an essential part of daily life for many residents. unfortunately, the mrt3 is often overcrowded, with long lines and packed trains becoming common. 70% of passengers in metro manila rely on public transport, and commuters have complained about the difficulty of finding a seat or even standing room on the trains, leading to a frustrating and uncomfortable travel experience (ito, 2022). despite efforts to improve capacity and efficiency, the mrt3 remains a crowded and congested transportation option for many filipinos. this research will focus on the mrt3, which links to one of the country's premier central business areas, makati.the mrt3 operates on a single line stretching 13.8 kilometers from the north avenue station in quezon city to the taft avenue station in pasay city. it has 13 stations along its route, including notable stops at edsa, ayala, and makati. the trains run on a standard gauge track and are powered by an overhead catenary system. each train consists of four cars, with a total capacity of up to 1,000 passengers. the mrt3 has a maximum speed of 80 kilometers per hour. however, it typically operates at a slower speed due to the high volume of passengers and the densely populated areas it serves. the system also has a communication-based train control system, which helps improve the trains' reliability and safety. despite these technical features, the mrt3 has faced numerous challenges recently, including issues with its aging infrastructure, maintenance problems, and overcrowding (dalmacio, et al., 2019).transportation in the philippines has been significantly impacted by the covid-19 pandemic (chuenyindee, et al., 2022). to curb the spread of the virus, the government implemented various measures, including the suspension of public transportation and strict health protocols. despite this, the international association of public transportation stresses the need for public transportation to promote mobility during the crisis (des transport publics, 2020). this has significantly impacted commuters, particularly those who rely on public transportation to get to work or school. in response, the government has implemented alternative transportation options, such as shuttle services and bikesharing programs. however, these options are often limited and must thoroughly address all commuters' needs. in addition, implementing health protocols, such as the requirement for face masks and the reduction of capacity on public transportation, has further exacerbated the already crowded conditions on many modes of transportation. overall, the covid-19 epidemic has disrupted transportation in the philippines and presented substantial obstacles to commuters since many filipinos simultaneously continue to go to work, school, and home in metro manila. the roads become highly crowded with public and private cars; hence, the mrt replaces buses and jeepneys (guno, et al., 2021). consequently, the mrt might become busy at certain hours of the day. thousands of commuters have difficulty fitting onto mrt vehicles, and the number of passengers regularly surpasses the mrt's optimal capacity. this study intends to produce a data-driven forecast of mrt3 ridership at particular stations and at certain times of the day. the suggested model must incorporate light gbm, gradient boosting, and extreme random trees to provide reliable predictions. by offering a data-driven forecast of the volume of passengers at a particular moment, commuters may plan for potential rush hours and explore alternative ways of getting around.there are several benefits to applying time series forecasting in predicting the ridership of the mrt3 in the philippines. first, time series forecasting can provide a reliable estimate of future ridership based on past data, which can be used to plan for resources and infrastructure needs. by analyzing patterns and trends in the data, time series forecasting can help to identify factors that may influence ridership, such as the time of day, weather conditions, or holidays. this can allow decision-makers to make informed decisions about optimizing the mrt3 system to meet the needs of commuters. additionally, time series forecasting can help to identify potential problems or issues in advance, such as overcrowding or maintenance needs, allowing for proactive solutions to be implemented. overall, time series forecasting can provide valuable insights and support decision-making for improving the efficiency and effectiveness of the mrt3 system. this paper is structured as follows: section ii summarizes relevant studies and efforts on machine learning approaches to ridership forecasting. the third section gives background information on the algorithm and approach utilized to construct the current study. in section iv, the findings of the experiment are presented. section v concludes with a summary and recommendations for further study. first, time series forecasting can provide a reliable estimate of future ridership based on past data, which can be used to plan for resources and infrastructure needs. by analyzing patterns and trends in the data, time series forecasting can help to identify factors that may influence ridership, such as the time of day, weather conditions, or holidays.time series forecasting is a method of predicting future values based on past data points collected at regular intervals over some time. time series forecasting uses statistical models to analyze patterns and trends in the data and predict future values. many algorithms can be used to train and predict time series data, such as arima, prophet, and sarima; all these algorithms are used for specific use cases, like if there are trends, seasonality, and irregularities in the data. (2018)presents a machine learning model for predicting ridership in metro systems. the model combines convolutional neural networks (cnns) and long short-term memory (lstm) networks in a parallel architecture to analyze data from multiple sources, including passenger flow, weather, and economic indicators. the model combines two techniques: seasonal and trend decomposition using loess (stl) and long short-term memory (lstm) neural networks. the stl technique identifies and removes trends and seasonal patterns from the data, while the lstm networks are used to analyze the remaining data and make predictions. (2020)overcome the difficulty of processing multivariate long stationary timer series, such as electroencephalogram (eeg) data. the phases of data collection, data cleansing, and machine learning model development are covered. the platform also offers a range of features to improve the accuracy and performance of the models, including data preprocessing, feature engineering, and model selection. azure automl also provides real-time performance monitoring and the ability to deploy the models to various environments, such as azure machine learning, azure kubernetes service, or azure functions. it involves dividing the available data into a specified number of "folds," or subsets, and training the model on a different subset each time while evaluating its performance on the remaining data. the train data was used in the first part of the dataset to help build the prediction model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/158.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/158.txt new file mode 100644 index 0000000000000000000000000000000000000000..7937504e3ce79ad8380011c8cc23587e68a66de3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/158.txt @@ -0,0 +1 @@ +program comprehension is a critical part of developing and maintaining large software systems. many analysis and comprehension tools operate on program source code, such as code similarity comparison , automated program repair , and fault localization . however, when dealing with legacy systems or proprietary software, researchers and reverse engineers often have access only to the distributed compiled binaries. binary files are frequently viewed as more difficult to analyze than source code files, in part because assembly and machine language lack the semantics and structure present in a higher-level programming language. although debugging formats such as dwarf allow compilers to build binaries with debugging symbols to integrate semantic information within the binary, release builds often omit these symbols to improve performance and conceal intellectual property.to analyze binary files, reverse engineers use decompilers to transform a binary file (or single function) into an equivalent source code representation, which in turn helps engineers comprehend the original semantics of the file or function in question. this transformation augments the sequential, typeagnostic nature of assembly programming with abstractions such as variables names and types. however, without the original semantic information, decompilers are unable to provide meaningful names to these variables, instead assigning names and types based on implementation-specific naming conventions (e.g., 'uvar1').with the success of machine learning models in the domains of natural language processing and programming language analysis , researchers are proposing machine learning models to recover missing semantic information in binaries. these models leverage the insight that semantic information present in source code is context-dependent: variables that appear and are used in similar contexts tend to be assigned similar names and types . one such approach is the dirty (decompiled variable retyper) model proposed by chen et al. , , which adapts the transformer architecture to predict variable names and types in decompiler outputs. due to its focus on variable types, dirty leverages both the decompiled source code tokens as well as the object layout for all variables found during decompilation to improve model performance. this architecture, combined with the novel use of variable layouts, resulted in a model capable of identifying correct variable types 75.8% of the time.one main concern with the dirty model is its ability to generalize to different decompiler outputs, as a swath of tools are commonly used in reverse engineering - . the authors tested their implementation solely using the commercial hex-rays decompiler, whose expensive licensing restrictions are outside the budget of many researchers and hobbyist reverse engineers. for this reproducibility study, we address this concern by training the dirty architecture with a dataset decompiled using the open-source ghidra decompiler. we observe that the performance of this neural architecture is comparable for both hex-rays decompiler outputs and ghidra decompiler outputs.the use of ghidra is addressed in the dirty paper, where the authors concluded that the inability of ghidra to reliably obtain correct ground truth semantic information from debugging symbols in the data processing stage harmed the performance of the model. while we do not address specific decompilation algorithms in this paper, we adapt the dataset used in dirty by including only those binaries decompiled arxiv:2304.03854v1 7 apr 2023 by ghidra that contain corresponding dwarf symbolic information that can serve as adequate ground truth. we show that this dataset permits the dirty architecture to generalize to decompiler output produced with ghidra for variable naming and typing tasks. we hope that this reproducibility study will aid future researchers seeking to incorporate ghidra and other reverse engineering tools into datasets and evaluations.in summary, we demonstrate that dirty's architecture for the task of variable type prediction generalizes to our newly curated ghidra dataset.the use of ghidra is addressed in the dirty paper, where the authors concluded that the inability of ghidra to reliably obtain correct ground truth semantic information from debugging symbols in the data processing stage harmed the performance of the model. for future reference, we refer to dirty hex-rays as the original model implementation in the dirty paper and dirty ghidra as our model trained on ghidra decompiled functions. were kind enough to provide the underlying collection of binaries used to select the dirt binaries. while ghidra recovers the cpu_flags dwarf variable and includes it in the decompiled function source, ghidra fails to assign values to this variable, instead assigning cpu_flags information to a decompiler-created uvar1 variable, which does not contain any debugging information. although reverse engineers reading this function might be able to see that uvar1 aliases the cpu_flags variable and thus combine the two variables to create a cleaner, more accurate decompiled function, these two variables do not reside in the same memory location in ghidra, which prevents us from confidently merging these two variables during preprocessing. indeed, we see that, for some functions, ghidra correctly uses variables found in dwarf debugging sections, and hex-rays suffers from a similar problem in variables suddenly appearing when decompiling stripped binaries compared to binaries with debugging information. secondly, we note that this inability to correctly assign variables with dwarf types actually conveys semantic information, since it signifies that the variable is either a temporary or was combined with other variables in the source code during analysis.thus, we do not remove such "bad" dwarf variables from the training set because they are variables encountered by ghidra during analysis, and we do not want to artificially bias the results of our replicated model by introducing a bad data exclusion step not present in the original published results.in this section, we aim to answer the following research question: is the transformer architecture employed in dirty as effective at predicting variable names and types for a dataset decompiled with ghidra instead of hex-rays?. due to the low amount of structure variables and high levels of disappear variables in the ghidra dataset, we isolate these variable types and obtain their accuracies separately. while dirty hex-rays and dirty ghidra have similar accuracies for retyping variables in functions encountered during training, dirty ghidra performs slightly better than dirty hex-rays when retyping variables in functions not seen during training.for variables without corresponding dwarf variable information, dirty ghidra outperforms dirty hex-rays by 5%. while the increase in prediction rates can be attributed to the increased proportion of these variables in the ghidra dataset, the high rate of accurate predictions suggests that for both the hex-rays and ghidra decompilers, there are certain underlying patterns that unify cases in which dwarf semantic information is unrecoverable.for variables with corresponding dwarf variable information, dirty ghidra performs similarly to dirty hex-rays , correctly predicting the dwarf types 53. conclusion in this paper, we extend the work of chen et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/159.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/159.txt new file mode 100644 index 0000000000000000000000000000000000000000..ceee2d6363d9fd34c2f7e626bbd45e00b17cf64f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/159.txt @@ -0,0 +1 @@ +recent advances in the field of artificial intelligence (ai) have improved the accuracy of ai models to a point where they exceed the performance of human experts on an increasing number of tasks (farrell 2021;rajpurkar et al. 2017;topol 2019). however, various application domains remain where ai models can not consistently outperform human experts (cremer 2021;raghu et al. 2019;kühl et al. 2022). motivated by this observation, prior work has started to explore how the capabilities of human experts and ai models can be combined. in this context, learning to defer algorithms have demonstrated promising results by enabling the ai model not to make a prediction for all instances but rather to learn to defer a subset of them to the human expert-based on the ai model's and the expert's capabilities (mozannar and sontag 2020;okati, de, and rodriguez 2021;raghu et al. 2019). non-identical capabilities of the human expert and the ai can originate, e.g., from limited training data, limited model capacity, or the availability of side information only accessible to the human expert (hemmer et al. 2022b;charusaie et al. 2022). by compensating for the weaknesses of both the human expert and the ai through effective instance allocation, learning to defer algorithms can achieve a higher performance level than either the human or the ai model can achieve individually (raghu et al. 2019;wilder, horvitz, and kamar 2020).however, large amounts of labeled data are required to train learning to defer algorithms. in fact, they require not only ground truth labels to train the ai model but also additional human expert predictions that accurately reflect the expert's capabilities (leitão et al. 2022). in this context, any new human expert aiming to collaborate with the ai model would have to provide expert predictions so that the learning to defer algorithms can understand the expert's individual capabilities. it can become particularly costly in application domains with frequently changing human experts or where data labeling is time-consuming and requires the knowledge of highly trained specialists, e.g., in medicine. due to this impediment, the usage of learning to defer algorithms can become infeasible in these cases-despite their potential for decision-making tasks with high costs of errors.to reduce the number of human expert predictions required for the training of learning to defer algorithms, we propose a novel three-step approach capable of generating artificial expert predictions from only a small number of human expert predictions accurately reflecting the expert's capabilities. (1) we train an embedding model with ground truth labels to extract feature representations which (2) serve as a basis for training an expertise predictor model that learns to approximate the human expert's capabilities with only a small number of human expert predictions. it is trained on the available expert predictions while simultaneously leveraging instances for which no human expert pre-dictions are available by drawing upon an interchangeable semi-supervised learning component. (3) the expertise predictor generates artificial expert predictions for instances not labeled by the human expert. both human and artificial expert predictions combined can then be used to train a variety of learning to defer algorithms.we empirically demonstrate the efficiency of our approach on the cifar-100 dataset (krizhevsky 2009) using "synthetically" generated human expert predictions and on the nih chest x-ray dataset (majkowska et al. 2020;wang et al. 2017) that provides real-world individual radiologists' predictions. in addition, it also contains adjudicated ground truth labels provided by a panel of radiologists that serve as a "gold standard". for example, with our proposed approach, six expert predictions per class suffice to enable all evaluated learning to defer algorithms to outperform both the ai model and the radiologist compared to either conducting the task alone. on average, this corresponds to 98.96% of the achieved accuracy compared to the training with a complete set of human expert predictions.to summarize, the contribution of our work is threefold: first, we propose a novel approach that learns the capabilities of individual human experts from a small set of human expert predictions. second, we demonstrate our approach's ability to enable state-of-the-art learning to defer algorithms to function with only a minimum of required human expert predictions while maintaining their desirable properties of achieving superior team performance. third, we show its real-world applicability in the context of the medical domain with a dataset providing individual radiologists' predictions together with high-quality ground truth labels. in this context, learning to defer algorithms have demonstrated promising results by enabling the ai model not to make a prediction for all instances but rather to learn to defer a subset of them to the human expert-based on the ai model's and the expert's capabilities(mozannar and sontag 2020;okati, de, and rodriguez 2021;raghu et al. 2022).to reduce the number of human expert predictions required for the training of learning to defer algorithms, we propose a novel three-step approach capable of generating artificial expert predictions from only a small number of human expert predictions accurately reflecting the expert's capabilities. (1) we train an embedding model with ground truth labels to extract feature representations which (2) serve as a basis for training an expertise predictor model that learns to approximate the human expert's capabilities with only a small number of human expert predictions. it is trained on the available expert predictions while simultaneously leveraging instances for which no human expert pre-dictions are available by drawing upon an interchangeable semi-supervised learning component., due to different levels of knowledge or biases(lampert, stumpf, and ganc ¸arski 2016), this requires the availability of individual predictions for each expert working with the ai model, which impedes the applicability of learning to defer algorithms, especially in application domains with frequently changing human experts or high acquisition costs for expert predictions.our approach addresses this limitation through the generation of artificial expert predictions only from a small number of predictions provided by an individual human expert. 2018;liao, kar, and fidler 2021;welinder and perona 2010).as it is often accompanied by high costs to acquire a large number of individual human expert predictions, particularly when the expert that collaborates with the ai model changes frequently, we assume individual human expert predictions to be available only for a small subset of instances l ⊆ n with |l| |n |.as both the cifar-100 and the nih datasets are balanced across classes, we use the system accuracy to evaluate the performance of the learning to defer algorithms trained on both the available human expert predictions l and the artificial expert predictions. furthermore, we consider the performance of the human expert and the ai alone as two lower boundaries (human expert alone and classifier alone) that have to be exceeded to demonstrate our approach's applicability to learning to defer algorithms. for the remaining weakness sub-2021) trained on different numbers of human expert predictions l and artificial expert predictions for h 60 and h 90 on cifar-100. figure3displays the results of the learning to defer algorithms trained on human and artificial expert predictions for different numbers of available human expert predictions on the nih dataset. the result of our empirical evaluation shows that even a small number of expert predictions per class are sufficient for generating artificial expert predictions that enable learning to defer algorithms to exceed the individual performance of both human and ai. however, generating artificial expert predictions that are used together with a small number of human expert predictions to train learning to defer algorithms can entail the risk of potentially resulting in an overall system that is discriminatory against certain protected groups. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/16.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce88c20cd4bbe0bc9f2e3850ff44a4a142256417 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/16.txt @@ -0,0 +1 @@ +optimization is a vast field and is arguably one of the most useful tools for scientists and engineers. with applications in almost any industry, from operations research to climate analysis to process control to robotics, the need to further our understanding of optimization and develop efficient algorithms to solve optimization problems is clear. the mathematical structure and geometric interpretations of optimization make it an exciting academic research area. it is interesting for its own sake. so it is fortunate that optimization also happens to be extremely useful in solving real problems and developing real technology. another fortunate feature of optimization is that it has a rich history of remarkable leaps in understanding. one discovery of particular importance was the realization that the distinction between complex and easy optimization problems does not hinge on linearity, but rather, convexity . rockafellar published this historical paper in 1993. the date of his seminal discovery is interesting to note when put into context. humans first stepped on the moon in 1969. so it wasn't until over 20 years later that we realized the fundamental importance of convexity in optimization problems. nowadays, we are consistently sending rockets to space and back, which would not be possible without numerical optimization, in particular, convex optimization . how many more discoveries of the same magnitude as rockafellar's are left to make? currently, it seems that the theory behind convex optimization is nearly complete. so what developments are necessary to further our understanding of optimization and increase its utility? optimization that includes uncertainty is a research frontier that is ripe for research.in this survey paper, we review deterministic optimization and optimization under both aleatoric and epistemic uncertainty. from our past research about modeling uncertainty and optimization under uncertainty, we start applying them to the artificial intelligence (ai) domain. we have realized that optimization under uncertainty is one of the important filed in ai. we start to first do a literature study and explain what (important) methods have been used in optimization through this survey paper.the structure of this paper is as follows: in section 2 we briefly review optimization without uncertainty, convex and nonconvex. in section 3, we review the state-of-the-art methods for optimization under aleatoric and epistemic uncertainty. in section 4, we discuss optimization under uncertainty broadly and compare the different approaches. in section 5, we conclude with a brief summary and possible research directions. throughout the paper, we provide specific applications of optimization where many of the applications are focused on optimal control. it is important to remember, however, that these applications are just one of many use cases for the techniques discussed in this survey paper. we provide these applications for concreteness. for a complete survey of optimization as applied to optimal control specifically, we refer the reader to the excellent review paper .in this survey paper, we review deterministic optimization and optimization under both aleatoric and epistemic uncertainty.one example of how the sum of squares optimization is used for optimization under uncertainty is in the analysis and synthesis of lyapunov stable systems with bounded uncertainty in either the system dynamics or operating environment. therefore, optimization problems utilizing intervals on some of the variables is a form of optimization under epistemic uncertainty.where the inner loop is a search over the epistemic variables w for the upper bound on the objective and the outer loop is the true optimization problem at hand for chosen values of the epistemic variables.a nested loop optimization problem is computationally intensive due to the need to solve the inner optimization problem at every step of the outer optimization problem.in this decouple formulation, the two optimization problems are solved iteratively with the epistemic variables w being fixed in the optimization problem(29)and the decision variables x being fixed in the optimization problem(30). the approach is to first obtain a unique distribution for the random variables via a nested optimization problem: max p min w log(l(p; w)) subject to w lb ≤ w ≤ w ub(31)where p is the parameters of a multivariate normal distribution, µ and σ. after solving the above optimization problem, the resulting pdf that represents the random variables under interval uncertainty can then be used in a single loop optimization formulation, such as(29), where w * is chosen to be worst-case maximum likelihood estimates.ina confidence measure is defined ζ(µ x ) and proposes a multi-objective optimization problem for reliability-based optimization under epistemic uncertainty:.the general approach of taking an uncertain optimization problem and transforming it into a deterministic one accounts for many of the methods in epistemic optimization. for generic optimization problems, it is not surprising that reducing an epistemic parameter down to a fixed parameter via a statistical function may lead to an optimization problem that uses many inaccurate parameters.9 constrained optimization under uncertainty using decision theory of the most general models of epistemic uncertainty, even more, general than interval and belief function models, is imprecise probability. if an optimization problem only considers aleatoric uncertainty then the optimization problem can typically be categorized as a stochastic optimization program. many epistemic optimization methods optimize for the worstcase realization of the epistemic variables but one can also use bayesian inference methods to enforce levels of reliability/robustness into the problem. a separate class of optimization problems, fuzzy optimization, exists for optimization problems using fuzzy sets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/160.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/160.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c7282aaa1ada0d508f3c11cbc1532e360a33647 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/160.txt @@ -0,0 +1 @@ +nutrition is a key part of the performance of a rider. until 2020, the dutch cycling team jumbo-visma, winner of the tour de france 2022, would start preparing their calorie estimates up to three weeks in advance to ensure they had adequate estimates per cyclist and per stage. this is a time-consuming task. to improve team performance, we built regression models to predict the calories burned by a rider without needing any human computation. the models use information like the stage profile, the body mass index of speed power energy = power • race distance speed machine learning coachfigure 1: energy forecast procedure for team jumbo-visma coaches. machine learning provides prediction intervals. coaches pick a value from the speed and power intervals and forecast energy consumption.a cyclist, or the race tactics, but also unforeseen factors such as the weather conditions. following our forecasts, the nutritionists and cooks prepare meals for each rider per race day using the jumbo foodcoach app. this automated process ensures riders are provided with their exact nutrition needs, leading to a considerable advantage on race days. despite a significant improvement in calorie prediction from the manual predictions of coaches (r 2 score of 0.55 for the prediction by coaches to 0.82 for the regression models), coaches still tune the output predictions. this means coaches tend to increase or decrease the models' outputs based on knowledge and previous experiences for specific races. given this tendency for coaches to adjust the model predictions, instead of predicting a single outcome, it would be more beneficial to predict a range of possibilities. this can be achieved through prediction intervals. these intervals are calibrated based on the probability of encompassing the true output. by quantifying the reliability of the model predictions in estimating the speed and power of team jumbo-visma riders, coaches can adapt predictions based on the uncertainty of the forecasts. to achieve this, we employ methods from the conformal prediction framework introduced by vovk et al. (2005), providing valid and efficient prediction intervals. each interval is computed given a significance value α. this means if we take for instance 100 tour de france races and predict the calorie intake for a specific rider per race, in the long run, the true value will be outside the prediction bounds on average for only α races or less. figure 1 illustrates our approach. after we compute prediction intervals for both the race speed and the rider's power output for a specific race, coaches combine both to obtain an energy forecast. as a concrete example, for one of the races of the 2022 season, the long-term power forecast bounds were for a specific rider, with a predicted power of 245.17 (true value of 238.13). given the planned tactic and the previous experience of coaches with this race, the coach decided to round the power to 250 watts. combined with the predicted race time of 384 minutes, computed from the speed forecast, this resulted in a calorie forecast of 5760 kilocalories.in 2018, hilmkil et al. (2018) predicted the heart-rate response of a cyclist during a training session. the promising results led to a number of papers to predict the power performance of professional riders at the tour de france (kataoka and gray (2019)), to predict the winner of the tour de france (hobson and goff (2017)), to identify the next top cyclist (janssens et al. (2022)), and to athlete monitoring (leeuw et al. (2022)). nevertheless, none of those methods quantify uncertainty in their predictions. the data science team of visma connect started working with team jumbo-visma coaches and nutritionists in 2020 to improve the performance of the team using machine learning and mathematical methods. previously, the calorie intakes were computed manually by the coaches using only domain knowledge from previous similar races and experience. but out on the track, unforeseen factors impact how much energy the cyclists burn. the weather, for instance, can cause cyclists to exert themselves more, or perhaps the tactics of the team needs to change due to other circumstances. this means coaches would often have to review their estimates several times before each stage of the race, a time-consuming exercise that had to be done for each rider for all races of the season.we present the data that we received from team jumba-visma in section 2. we introduce our baseline prediction model and review current conformal prediction methods in section 3. we benchmark conformal methods on data from the giro d'italia and the tour de france in section 4. finally, we interpret our findings and give recommendations on how coaches can fine-tune conformal methods in practice in section 5. to achieve this, we employ methods from the conformal prediction framework introduced byvovk et al. each interval is computed given a significance value α. figure1illustrates our approach. for this project, we only retrieve the race name, race date, distance, race type (whether it is a one day race, stage race or grand tour) and name of the rider.the speed and power datasets use 8 and 10 features, respectively, ranging from the race type (one day race, stage race, or grand tour), the stage profile with the ascent/descent and distance, the weather conditions with the temperature, humidity, negative wind-effect and rainfall, attributes of the riders (body mass index (bmi)), and the tactics with the role of each rider (helper, climber or leader).we investigate the following conformal regression methods: jackknife and its variations (jackknife+, jackknife-minmax, jackknife+-after-bootstrap, and jackknife-afterbootstrapminmax), cross-validation (cv) and its variations (cv+ and cv-minmax), conformalized quantile regression (cqr) and inductive conformal prediction (icp).all prediction intervals outputted by the jackknife and cv methods (and their respected refined methods) have constant width for all features across the input space. we compare the error rate and interval width of all methods as a function of the significance α (figures2and3) of the cv+, cv-minmax, jackknife+-after-bootstrap, jackknife-minmaxafter-bootstrap, jackknife-minmax, jackknife+, cqr, and icp methods for the speed and power response variables. to differentiate constant and non-constant interval size prediction intervals methods, the two methods computing non-constant interval size prediction intervals (cqr and icp methods) are depicted by dashed lines. the jackknife+, jackknife+after-bootstrap, cv+, icp, and cqr methods compute prediction intervals close to the target value, particularly for the power response variable. the jackknife+, cv+ and jackknife+-after-bootstrap method produce the tightest intervals, particularly for the power response variable, followed by the minmax methods.to illustrate the benefits of our approach, we compare in figure4the manual predictions of coaches for two grand tour races of 2019 (grand tours are considered the most prestigious races of the season, typically spanning over three weeks) with our own forecasts, both single-point and prediction intervals, and the true response in predicting the race speed. the prediction of coaches tend to often fall outside the prediction intervals, while the error rate of our prediction interval is close to the target value.the cqr performs worst in terms of width of prediction intervals for the speed response variable, leading to wider intervals compared to other methods and an error rate under the target value α. this may result in more conservative prediction intervals for our small dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/161.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/161.txt new file mode 100644 index 0000000000000000000000000000000000000000..e02544f6ed39950f0101a30807cb5d254c9f97c9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/161.txt @@ -0,0 +1 @@ +multi-task learning (mtl) addresses multiple machine learning tasks simultaneously by creating a single multitask deep neural network (dnn) . due to parameter sharing, a multi-task dnn model is more computation and memory efficient compared to multiple single-task models. the mtl framework is extremely useful for resourceconstrained devices like smartphones, wearables, and selfdriving cars that host ai-powered applications but have low memory resources and strict latency requirements. for example, in self-driving cars, the model needs to recognize traffic lights, objects, and lanes based on the input signal .on the other hand, network pruning is a longstanding and effective method to compress dnns. it aims to detect the importance of model parameters and remove the ones that tend to have the least significance on the performance. approaches for network pruning can be classified as unstructured pruning methods that mask individual weights in the network, and structured pruning methods that remove complete filters and directly lead to efficient deep neural models without requiring specialized hardware for sparse structures. network pruning is studied rigorously in literature for single-task models and there exist many pruning criteria based on weight magnitude , connection sensitivity , and even learning-based pruning methods .however, the study on the effectiveness of structured pruning methods on multi-task models is sparse. a few works like propose weight sharing and merging strategies for constructing a multi-task model from multiple single-task models such that there is a minimum conflict between tasks. then the constructed multi-task model could be effectively pruned with single-task pruning methods. but these works show very similar results when comparing their proposed pipeline with the baseline single-task pruning methods. another work , proposed a method to directly prune mtl networks but while pruning those models with single-task pruning baselines, they did not try different hyperparameter settings to retrain/fine-tune the pruned models. it could be possible that different hyperparameter settings would work better because different pruning methods lead to different architectures.in this work, we investigate the effectiveness of structured pruning on multi-task dnns. we apply two structured pruning methods to prune multi-task models and show that regardless of the method used, we can obtain similar results from the pruned models with the same number of param-eters. specifically, we use an existing single-task pruning method as well as introduce another mtl-based pruning criterion. the proposed criterion is called cosprune and it identifies and prunes the convolutional filters that have conflicts between tasks. it uses pairwise cosine similarity between the task-specific gradients that flow through the filter during back-propagation. we accumulate this similarity score for some training iterations for every filter in the multi-task model. the filters with the least accumulated scores are pruned away. in contrast, the single-task pruning method is called taylor pruning which is a popular gradient-based pruning method. it determines the importance of the filter by looking at the increase in the loss function if that filter is removed. the taylor pruning importance score is also accumulated over some training iterations before pruning.we start our analyses by using the iterative pruning and fine-tuning strategy which repeatedly prunes a small proportion of filters and fine-tunes the multi-task model to gain back the lost performance . using this strategy we get consistently better results with cosprune against taylor pruning across all the tasks. the multi-task model achieved higher gflops/parameter reduction with cosprune without performance loss across all the tasks. this shows that the proposed cosprune criterion coupled with iterative pruning is a reasonable method for pruning multi-task models.however, when we re-train the pruned models independently with random initialization, we observe that they can give relatively better results on all the tasks when compared to the corresponding fine-tuning stage of iterative pruning. the key is to determine good learning rates for each of the pruned models. using the same learning rates is not the best strategy to compare different pruned architectures. this is because every model has different layer-wise configurations and so, an optimal hyperparameter setting for one model may not be best for the other models with different architectural designs. this type of analysis is generally not done in the existing literaturethe same training settings are used for the dense model as well as the pruned models. a study on single-task structured pruning also shows that re-training the pruned models from random initialization can lead to better results than fine-tuning the pruned architectures in the iterative pruning setting.furthermore, the pruned architectures from different pruning methods give similar results to each other at the same parameter level after re-training. there are no consistent winners with respect to different pruning criteria, which is contrary to what we observe in the case of iterative pruning. there are also some recent works in the context of single-task neural networks where random channel pruning is able to match the results of the dense model under appropriate settings. similarly, another work used different pruning methods on various architectures to show that randomly re-initializing the pruned models can match the performance of the respective unpruned models but they did not compare those different pruning strategies on the same model.we go beyond existing works and apply different structured pruning methods to the same multi-task model. we emphasize that the pruned mtl model obtained from any reasonable pruning method can perform well if it is trained from random initialization with its optimal learning rate. and we would like researchers to pay attention to the re-training comparisons with sufficient hyperparameter tuning when they try to propose a new pruning method. approaches for network pruning can be classified as unstructured pruning methodsthat mask individual weights in the network, and structured pruning methodsthat remove complete filters and directly lead to efficient deep neural models without requiring specialized hardware for sparse structures. network pruning is studied rigorously in literature for single-task models and there exist many pruning criteria based on weight magnitude, connection sensitivity, and even learning-based pruning methods. another work, proposed a method to directly prune mtl networks but while pruning those models with single-task pruning baselines, they did not try different hyperparameter settings to retrain/fine-tune the pruned models. we apply two structured pruning methods to prune multi-task models and show that regardless of the method used, we can obtain similar results from the pruned models with the same number of param-eters. a study on single-task structured pruningalso shows that re-training the pruned models from random initialization can lead to better results than fine-tuning the pruned architectures in the iterative pruning setting. similarly, another workused different pruning methods on various architectures to show that randomly re-initializing the pruned models can match the performance of the respective unpruned models but they did not compare those different pruning strategies on the same model.taylor pruning: it is a structured pruning method for single-task convolution neural networks which serves as a popular baseline for modern gradient-based pruning methods.model pruning: after the model is trained on the nyuv2 dataset, we apply iterative pruning with different filter pruning criteria (cosprune and taylor pruning) to get pruned mtl models.iterative pruning results: we applied the iterative pruning strategy with both the taylor pruning criterion and the cosprune importance criterion separately on the model initialized with the trained nyuv2 weights and ran 6 sets of experiments to accommodate for the variance in each method.from the results in table2, we can see that the models obtained from different pruning methods lead to very similar performance across all the tasks when the number of parameters is roughly equal. it can be seen that at very low parameters, the iterative pruning results become worse than the results of the models that were trained solely from scratch except in the case of normal estimation where the angle mean is slightly better in the case of the iterative cosprune method.analyzing the results: we observed that if we consider the pruned models obtained from two different pruning methods, randomly initialize them and re-train them with their optimal learning rate, they give similar results for the same number of parameters. at some parameter levels, the pruned model obtained from taylor pruning performed better but at some other parameter levels, the pruned model obtained from cosprune gave better results. furthermore, we saw that the structured iterative pruning was biased towards one pruning criterion over the other as figure1shows that the cosprune models outperformed taylor pruned models at the same parameter level. this might be happening because, in the case of iterative structured pruning, the inherited trained weights from previous pruning iterations may act as bad initialization for the pruned models at successive iterations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/162.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/162.txt new file mode 100644 index 0000000000000000000000000000000000000000..adaae813bbf6a6fbcc6091e4abe8b2755c7afd4e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/162.txt @@ -0,0 +1 @@ +time series are ubiquitous in most of the practical applications from meteorological forecasting, healthcare monitoring, to financial predictions. while recent advances in deep learning have made a huge impact on the field, one of the most commonly seen issues with time-series data are missing values. real world time series data is messy as it suffers from problems like sensor failures, data drops during transmission, malfunctioning sensors.missing data can have a serious impact on downstream models for time series classification, or forecasting. given the scarcity of labeled data, especially in domains like healthcare, it is not feasible to discard entire time series data. hence, practitioners need to create models while handling missing time series data, appropriately. to counter this issue, various imputation methods have been proposed in the literature.note, the main utility of imputation methods is to help increase the accuracy on downstream tasks like classification or regression. these tasks suffer if the imputation method used inserts a value which alters the distribution of the time-series, leading the downstream model to make errors. time series imputation methods can be categorized into two: unsupervised imputation methods and supervised imputation methods. unsupervised imputation methods learn statistical patterns in the observed time series to interpolate the missing values. methods in classical machine learning and statistics literature are mostly based on nearest neighbors to missing values or spline fitting or using state space models . recent methods using deep learning have been proposed to impute missing values, either use unsupervised learning or supervised learning from downstream tasks . supervised learning based methods use the downstream tasks like time series classification, as the primary teaching signal while imputing the missing values. these supervised learning methods have shown state-of-the-art performance on imputation accuracy .however, in practical settings, especially in domains like healthcare labeled data is limited. as labeling is expensive as well as sensitive in domains like healthcare where access to data is highly restricted due to regulatory concerns. unlabeled data on the other hand is relatively easier to access due as access to collecting and storing data has become cheaper over the last decades. with this observation, it is important to design imputation methods that can use both unlabeled and labeled data, i.e., semi-supervised methods.in this work, we propose a novel semi-supervised method, sparse transformer based imputation (st-impute), to impute missing time series values. transformers initially introduced in the area of natural language processing , have significantly improved the performance of time series forecasting methods as well . we modify the transformer architecture for time series imputation task by using diagonal self-attention masking and sparse activation functions. we train the model on the task of masked imputation modeling (mim) by imputing artificially removed values to mimic the imputation task. our modifications with the diagonal self-attention mask allows the model to train on time series reconstruction for non-missing values, alongside the supervised downstream task objective. st-impute improves imputation by 2%-9% over the most competitive baselines. to summarize, this work makes the following contributions:• we propose a novel semi-supervised learning algorithm for time-series imputation based on transformer architecture. • we make modifications to self-attention blocks by using a diagonal self-attention masking and sparse connections. this improves performance considerably over the vanilla transformer architecture. • we propose a masked imputation modeling loss to mimic the imputation task.• our results on three public datasets show that our method beats baselines over the imputation task, as well as downstream tasks like time series classification/regression.we organize the rest of the paper as follows. section 2 places our work in the context of the existing literature. section 3 discusses background concepts like self-attention. section 4 describes st-impute architecture and training objectives in detail. section 5 lays out our experimental settings. we present our results and analysis in section 6. finally, we conclude in section 7. time series imputation methods can be categorized into two: unsupervised imputation methods and supervised imputation methods. unsupervised imputation methods learn statistical patterns in the observed time series to interpolate the missing values. supervised learning based methods use the downstream tasks like time series classification, as the primary teaching signal while imputing the missing values.in this work, we propose a novel semi-supervised method, sparse transformer based imputation (st-impute), to impute missing time series values. these involve assumptions using polynomial fits over the missing values based on previously observed time series values. applying it to time-series would mean that time series that are similar in local patterns (time) would be used to fill up the missing values for a time series.some of the earliest deep learning methodson time series classification/regression tasks in presence of missing data concatenated missing data timestamps as a feature instead of imputing the missing the values. then, the model uses gru to learn to compute (impute) these missing values.generator is used to learn the distribution of time series by generating missing time series values.diagonal self-attention masking : since we take a reconstruction based approach in this work, we found that using a vanilla transformer makes it trivial for the encoder to reconstruct the observed time series values as it has access to these values through self-attention, with the attention scores lighting up on the diagonal.• mean imputation: imputes the missing values with the mean value of the time series. while the assumption may not hold true, it can provide a directional signal on quality of imputation as it would signify that the imputation method was able to capture patterns in the time series to be able to impute it correctly, helping the downstream classifier in the signals that can discriminate between time series classes. we want to see how effective it is for imputation for completely random missing values (as reported in previous sections) versus missing blocks of values. it is expected, as with the block missingness, it becomes harder for the model to impute values as (1) any error made in imputing one value would affect immediate missing neighbouring values; (2) the missing context around a missing value becomes larger, hence degrading the performance. we propose a masked imputation modeling task to train the model, which mimics time series imputation from the non-missing values. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/163.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/163.txt new file mode 100644 index 0000000000000000000000000000000000000000..f633aea21c7334f5835cb0818b6d1ce523de9637 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/163.txt @@ -0,0 +1 @@ +dynamic graph neural networks (dgnns) effectively handle real-world scenarios where the networks are dynamic with evolving features and connections. in general, dynamic gnn models can be classified into discrete-time and continuoustime models. the discrete-time dynamic graph networks(dtdgns) first generate a sequence of discrete snapshot graphs from the original dynamic graph data and then fuse information extracted from different snapshots . despite their utility in various applications, dtdgns suffer from information loss due to time discretization. to address this issue, continuous-time dynamic graph networks (ctdgns) have been developed, which directly take continuously occurring events as input, mitigating the information loss caused by time discretization in dtdgns. to model the continuity, ctdgn models based on ordinary differential equation (ode), random walk (rw), temporal point processes (tpp) and recursive neural network (rnn) have been proposed in the literature . due to their flexibility, generality, and ability to model complex time-varying relationships, this paper focuses on ctdgns.ctdgns have proven effective for modeling temporal graph data due to their ability to capture intricate temporal-spatial dependencies in long-term forecasting (ltf) tasks. however, the increased model complexity of ctdgns may lead to overfitting, resulting in capturing random noise rather than essential semantic information. moreover, the risk of overfitting in ctdgns is exacerbated by the common issue of inconsistent distribution between training and test data in ltf tasks. intuitively, we think the core problem of ltf is the lack of training data which has long time distance from the training data to forecast. consequently, data augmentation can be emerged as a direct and efficient solution to address the long-term forecasting challenges in continuous-time dynamic graphs. by generating additional training samples that capture the complex temporal patterns, data augmentation can enhance the model's generalization capabilities and mitigate overfitting issues.in the literature, there are already studies on data augmentation for graphs. rong et.al proposed dropedge to augment the training data by randomly removing a certain number of edges from the input graph. used mixup based techniques to augment the graph data so as to improve the training performance. however, these methods primarily target at static graphs and can not be easily applied to dynamic graphs with complex temporal dependency. dynamic graphs, unlike their static counterparts, exhibit vast changes in nodes and edges over time, with events occurring at non-uniform intervals and complex structural and temporal information complicating the data augmentation process. therefore, the challenging problem lies in generating data with valuable complex time-varying features instead of introducing random noise or simplistic synthesis. wang et.al proposed a memory tower augmentation (meta) module which stacks a few levels of memory modules to augment temporal graphs of different magnitudes on separate levels. however, meta has defined augmentation strategies in advance, such as perturb time, removing edges and adding edges with perturbed time. these strategies limit the intensity and direction of data enhancement to some extent. moreover, it can only be used for ctdgns with memory modules. considering these limitations, it is necessary to propose an efficient, simple and universal data augmentation method for continuous-time dynamic graphs.in this study, we propose a data augmentation method called uncertainty masked mix-up (ummu) that incorporates uncertainty estimation and masked mix-up to boost the performance of long-term forecasting for ctdgns. our main contributions are as follows:(1) we propose a plug-and-play module called uncertainty masked mix-up (ummu) to enhance the performance of ltf on ctdgns by data augmentation. ummu can be easily integrated into existing dgnns without bringing additional model complexity. furthermore, it does not induce any extra inference cost since it only applies to the training phase.(2) we incorporate uncertainty estimation to inject uncertainty into the embedding of the intermediate layer of ctdgns, using masked mix-up to further enhance this uncertainty, enabling the model to learn more representations of the complex relationships in the dynamic graph data.(3) we demonstrate the effectiveness of ummu by comprehensive experiments on three widely used ctdg datasets, showing significant improvements in long-term forecasting task. the discrete-time dynamic graph networks(dtdgns) first generate a sequence of discrete snapshot graphs from the original dynamic graph data and then fuse information extracted from different snapshots. dynamic graphs, unlike their static counterparts, exhibit vast changes in nodes and edges over time, with events occurring at non-uniform intervals and complex structural and temporal information complicating the data augmentation process.in this study, we propose a data augmentation method called uncertainty masked mix-up (ummu) that incorporates uncertainty estimation and masked mix-up to boost the performance of long-term forecasting for ctdgns.(1) we propose a plug-and-play module called uncertainty masked mix-up (ummu) to enhance the performance of ltf on ctdgns by data augmentation.(2) we incorporate uncertainty estimation to inject uncertainty into the embedding of the intermediate layer of ctdgns, using masked mix-up to further enhance this uncertainty, enabling the model to learn more representations of the complex relationships in the dynamic graph data.al proposed autoformeras a novel decomposition architecture with an auto-correlation mechanism. as graph neural networks (gnns) continue to evolve and gain prominence in various applications, an increasing number of researchers are focusing on data augmentation techniques for graph-structured data. these methods play a crucial role in improving the performance and robustness of gnn models by generating additional training samples that capture the inherent complexity and diversity present in graph data. nodeaugis proposes to augment and utilize the unlabeled data in semi-supervised learning, and graphcropcrops the subgraphs to augment the input features for the static graph classification. however, models employing these methods often demonstrate poor generalization ability for future data unless the entire model's dynamics and data dynamics are holistically described.considering the above, we propose ummu, unlike other da methods that enhance raw input graph data, ummu can be integrated into any layer of the model to enhance data for intermediate embedding in an easy-to-implement way. although the underlying distribution of the data shifts is unpredictable, the uncertainty estimation based on the training data observations can provide an appropriate and meaningful variation range for each event's feature, which does not harm model training but can simulate diverse and reasonable potential shifts. when using 10% of the dataset for training, incorporating the ummu module generally yields better results than adding the gsnop module for all models. despite this, ummu can still bolster the performance of models that already include gsnop.we propose a data augmentation method named ummu to boost the performance of long-term forecasting on continuous-time dynamic graph networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/164.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/164.txt new file mode 100644 index 0000000000000000000000000000000000000000..3fcf49d12d4ed9efac6c965df39dcec6a103e0a5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/164.txt @@ -0,0 +1 @@ +knowledge tracing is an important field of research in educational data mining, as it can help to improve the effectiveness and efficiency of learning. the first application is personalized learning. surveys that by tracking the histories of individual students over time, instructors can tailor instructional materials to meet specific needs of each student. the second application is early intervention. recognizes that knowledge tracing enables instructors to intervene early and offer extra assistance or resources to students who need it in order to succeed by identifying those who are having difficulty with specific concepts or skills. knowledge tracing can also facilitate adaptive learning, which can change the level of difficulty and pace of instruction based on the understanding and performance of individual students. shows that learning and engagement can be enhanced since students are appropriately challenged can can avoid frustration or boredom.recent advances in artificial intelligence (ai) have shown great promise for improving educational outcomes. in particular, deep learning models have been developed to predict student performance on standardized tests, such as the sat and toeic, based on a variety of factors, including their previous academic records, socio-economic background, and personal characteristics. one of the key challenges in building such models is capturing the dynamic and complex nature of student behavior, which can vary widely over time and across individuals.in this paper, we propose a transformer that leverages users' histories for educational performance prediction. the transformer is a state-of-the-art neural network architecture that has achieved impressive results in various natural language processing tasks, such as machine translation, question answering, and text generation. our approach extends the transformer to model student performance by incorporating their past academic records, study habits, and other relevant information.to evaluate the effectiveness of our approach, we conducted experiments on real-world educational datasets, including the kaggle riiid aied challenge dataset. we demonstrate that converting temporal features into multiple categorical features with different granularities can greatly improve model's performance. the results even show that the lecture information is irrelevant in the present of the multi-granularity temporal features. our results also demonstrate that our model outperforms traditional lightgbm, achieving state-of-the-art performance in predicting student performance on standardized tests. moreover, we show that our model can be used to provide personalized recommendations to students based on their historical data, enabling them to improve their academic performance. bkt is a probabilistic model where student knowledge is modeled as a latent variable and other observed context information and learning performance are used to identify the latent structure represented by a hidden markov model. the decoder in our model consists of 6 components: position embeddings, re-sponse embeddings, prior-elapsed-time embeddings, lag-time-1 categorical embeddings, lag-time-2 categorical embeddings, lag-time-3 categorical embeddings. this allows the model to capture the temporal dynamics of the data and learn representations that can account for changes in student learning over time.it can model complex interactions: the use of multiple embedding layers in both the encoder and decoder components of the model allows for the modeling of complex interactions between different factors that can affect student learning. for example, the model can learn to capture the relationship between a student's prior response and their subsequent performance on a related question, or the interaction between elapsed time and lag time in predicting student learning outcomes.the dataset contains over 100 million rows of data from student interactions with the platform, including information on the questions, answers, and explanations provided, as well as metadata such as the time elapsed since the previous interaction and the student's performance history. the goal of the elapsed time embedding is to assist the neural network in learning meaningful representations of the temporal information and in capturing the amount of time that has passed between the current taks or question ant the previous one. the network may learn to distinguish between various time periods and capture patterns and correlations between elapsed time and student performance by expressing elapsed time using an embedding layer. 5) lag-time-2 categorical embeddings: this feature encodes lag time for representing lag time in minutes, with a vocabulary size of 1441. 6) lag-time-3 categorical embedding: this feature encodes lag time for representing lag time in days, with a vocabulary size of 366.we did three experiments for comparison purposes: transformer without lecture information, transformer with lecture information, and lightgbm tableishows that the multiple temporal granularities are significant such that even lecture information is ignored, they can capture the complex interactions between question difficulty and student performance. to avoid data leakage, which entails using future information to predict past information, each feature at a given time point in the model had to be computed based on its information up to that time point. our architecture includes a sophisticated encoder component that captures information about the questions, parts of questions, and presence or absence of explanations, as well as a decoder component that incorporates temporal information about the sequence of interactions between the student and the learning materials. notably, we included three separate embedding layers to represent lag time in different temporal granularities, allowing the model to capture the temporal dynamics of the data.our architecture also has the ability to model complex interactions between different factors that can affect student learning, including the relationship between a student's prior response and their subsequent performance on a related question, or the interaction between elapsed time and lag time in predicting student learning outcomes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/165.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/165.txt new file mode 100644 index 0000000000000000000000000000000000000000..d286246943b647ec17b347323825bca40cf7a6d1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/165.txt @@ -0,0 +1 @@ +the intensive caring unit (icu) is an essential unit for the management of critically ill patients. however, many of these patients are at risk of readmission within 30 days of discharge, which can result in additional burdens on the patient, their family, and the healthcare system , . as a result, the development of a model that accurately predicts readmission risk can assist clinicians in identifying high-risk patients and intervening early to prevent readmission. however, there is a possibility of bias in such models due to factors such as race, gender, and socioeconomic status, which can lead to healthcare disparities and exacerbate existing health inequities . research , has shown that some predictive models may be less accurate in predicting health outcomes for certain racial and ethnic groups, exacerbating existing health disparities. in this context, bias , refers to the presence of systematic errors in the data or model that lead to unfair, inaccurate, or unequal treatment of certain groups of patients.to explore these issues, we have considered the hospital readmission use case on the mimic iii dataset . this dataset contains comprehensive records of over 60,000 icu admissions for approximately 40,000 patients over a ten-year period, making it an useful resource for developing and testing predictive models for readmission , . recent studies , , have discussed that the datasets used for predictive healthcare models often contain biases related to factors such as race, gender, and insurance status. these biases have the potential to negatively impact the accuracy of machine learning (ml) models and contribute to healthcare disparities. in this study, we want to explore the existence of systematic biases in a clinical dataset that may impact the readmission outcomes among the patients.our study aims to identify and quantify the biases, particularly those that are replicated by ml models. by doing so, we can develop strategies to address these biases and create more equitable predictive models for icu readmission. the findings of this paper can be valuable to clinicians and researchers working to develop predictive models, as well as those working to reduce healthcare disparities and promote health equity. proportional parity (pp)ensures proportional representation. following the industry standard for fairness-, if disparity for a group is within 80% and 125% of the value of the reference group on a group metric (e.16x lower than the reference group (medicare), the government group is 0.63x higher than the reference group (x represents the degree of disparity between the reference and other groups).for the attribute gender, with the reference group being male, the ep disparity for the female group is found to be 0.for the ethnicity group, with the reference group being white, the ep disparities for the other, black/african american, hispanic, and asian groups are found to be 0.for the attribute language group, with the reference group being english speaker, the ep disparity for the non-english group was found to be 0.25x, indicating that the non-english group was underrepresented in the selected set compared to the english speaker group. for insurance group, the self pay group is underrepresented with a disparity of 0. for language group, the non-english group is underrepresented with a disparity of 0.67x disparity, meaning they have a higher fpr than the reference group (male).76x disparity, meaning they have a higher fpr than the reference group (white).48x disparity, meaning they have a higher false positive error rate than the reference group (english speaker).40x disparity, meaning that they are 40% less likely to be correctly identified as negative compared to the reference group, white.74x disparity, indicating that they are 26% less likely to be correctly identified as negative compared to the reference group, english speakers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/166.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/166.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ec1de1120eb5a831be95a11c240468a522bbb1b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/166.txt @@ -0,0 +1 @@ +there are already numerous edge devices such as smartphones and iot devices that can collect valuable raw data, and ones expect to use these data to complete some intelligent tasks such as image recognition or text generation. however, deep learning, the most effective algorithm for accomplishing these tasks, requires huge data to train the model, making it challenging to learn a good enough model from the data owned by a single edge device. besides, due to data privacy, data protection regulations (voigt and von dem bussche, 2017), and the massive overhead of data transmission, it is unrealistic to aggregate data from different clients (edge devices) in a server for training. therefore, federated learning (fl) (kairouz et al., 2019) has emerged to solve the problem of jointly learning a global model without sharing the private data.although federated learning has shown good performance in many applications (kaissis et al., 2020;liu et al., 2020), there are still several important challenges that require researchers to pay attention to, namely privacy, communication cost, and statistical heterogeneity (ji et al., 2021). statistical heterogeneity means that client data is non-iid (independent and identically distributed). zhao et al. (2018) show that the accuracy of the federated learning algorithm has decreased significantly in the case of non-iid data. there are many methods proposed to address the challenge of statistical heterogeneity. fedprox (li et al., 2020) introduces a proximal term to constrain the update of the local model, and scaffold (karimireddy et al., 2020) corrects the gradient of each local update to reduce the variance. however, these methods do not bring significant improvement because they only implicitly deal with the fundamental dilemma caused by statistical heterogeneity, that is, the optimal objective of local update is inconsistent with the optimal objective of global update.in this work, we propose an approach fedshift to explicitly solve the above fundamental dilemma in the statistical heterogeneity challenge. fedshift is a simple and effective approach which adds the shift on the classifier output calculated by the client category distribution and makes the local optimal models satisfy the global optimum. we also prove the convergence results of fedshift in the strongly convex and non-convex cases and compare with fedavg, which does not have the classifier shift. numerous experiments are conducted to evaluate the effectiveness of fedshift, which demonstrate that fedshift outperforms the other state-of-the-art federated learning algorithms in test accuracy and communication efficiency on various datasets, including cifar10, cinic10 and tiny-imagenet. (2020)propose the fedprox algorithm, which adds a regular term to the loss function of the client.where l i (w) = e (x,y)∼di is the empirical loss of the i -th client that owns the local dataset d i , and d n i=1 d i is a virtual entire dataset that includes all client's local data. f (w; x) is the output of the model w when the input x is given, and i denotes the loss function of the i-th client.however, due to the inability to communicate local data, each client usually learns a local model w i on its local dataset by minimizing the experience loss l i (w i ). in fedavg algorithm(mcmahan et al.client drift as mentioned in(karimireddy et al.let w * be the global optimum of l(w) and w * i be the optimum of each client's empirical loss l i (w). the distribution of training data can be expanded as p (x, y) = p (x|y)p (y) and p i (x, y) = p i (x|y)p i (y), where p (x|y) is the conditional probability distribution of class y.note that w * i arg min w e (x,y)∼pi is not the optimum of the global optimization problem (min w e (x,y)∼p ), because of the statistical heterogeneity (p = p i ), although the global evaluation function is consistent with the local evaluation function ( i = ).different from reweighting, fedshift modifies the local optimization objective of each client to satisfy that w * i = arg min w e (x,y)∼pi ˜ i (f (w; x), y) also is the optimum of the global optimization problem min w e (x,y)∼p . for fedshift, by add shift s i in the output of model, the local optimum w * i satisfies the global optimum of min w e (x,y)∼p .1 y=k log e fk (w;x) k j=1 e fj (w;x) (6) let q i (y = k|x; w) e fk (w;x) k j=1 e fj (w;x) , we can derive that the optimal model w * i should satisfy q i (y = k|x; w * i ) = p i (y = k|x).which means that w * i satisfies min w e (x,y)∼p , which is the global optimum. theorem 2 shows us that under the strongly convex assumption of the function, benefiting from the classifier shift in fedshift, the global model can converge to the global optimum when there are enough iterations and communication rounds and a decayed learning rate.focusing on the class imbalance in the statistical heterogeneity of federated learning, we propose fedshift in this paper, which is a simple and effective method that adds the shift on the classifier output based on the client class distribution in the local training phase. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/167.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/167.txt new file mode 100644 index 0000000000000000000000000000000000000000..226a784a89db80112ff51625743ec5328178906f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/167.txt @@ -0,0 +1 @@ + wav2vec 2.0 has swept the speech processing community like a storm with the surprising effectiveness of transformerbased self-supervised models . self-supervision leverages large quantities of unlabeled data, which is much easier to obtain than labeled data. many task designs exist, but one of the most common is to mask the input and train the neural network to reconstruct the original input based on the surrounding information . the task definition enables the neural network to model the input distribution so that it can yield a general representation useful for various downstream tasks.after building a general self-supervised model, supervised training ensues to fine-tune the model to each specific need. one often attaches the penultimate fully connected layer to the frozen shared model, acting as the prediction head . however, input lengths often vary for both speech and text, a fundamental characteristic of sequential data. transformer's output representation length linearly increases as the input size gets longer, while the final prediction head requires the fixed-size input representation. it raises the problem of representation pooling; given a variable number of sequential representations, one has to summarize the representation to have a fixed size.the pooling task is often addressed as the unsupervised or paraphrastic sentence embedding problem in the nlp literature , especially after the success of word embeddings on various tasks. it aims to yield a general-purpose sentence embedding independent of each downstream task. by starting * equal contribution. from simply averaging the word representations , many unparameterized methods were introduced, such as considering the word frequency while averaging , whitening the representations , or utilizing the singular value transformation . however, existing methods are often dependent on the underlying tokenizer, making it nontrivial to apply to speech. also, simply averaging the speech representations poses a fundamental problem; each phone has a different length, so vowels will be overrepresented compared to consonants.in the meantime, many pooling methods have been introduced for summarizing the frame-wise speech representations, notably for speaker recognition. for example, statistics pooling (sp) concatenates the first-and second-order statistics of representations. also, the attention layer is often used to obtain which representation matters more for each downstream task . however, there has been limited interest in developing an unsupervised way of pooling. modern methods, except for sp, are often heavily parameterized, requiring labeled samples. in contrast, unsupervised pooling removes the training procedure altogether, being more closer to the idea of general speech embedding.to avoid parameterization while being speech-friendly, we shed light on vector quantization (vq) to substitute the tokenizer. decades of research have been conducted on vq , where it crept into the modern self-supervised models, such as vq-wav2vec . vq focuses on translating the real-valued vectors into countable indices, segmenting the latent embedding space with well-chosen centroids. given that essential components comprising speech, such as frequency, amplitude, and formants, are embedded inside the model representation , we expect the clusters to successfully gather similar phone representations to have the same cluster indices. we design various intuitive approaches that utilize vq to effectively summarize the speech representations, and experimentally demonstrate its effectiveness in a wide range of settings.to further boost the research on unsupervised pooling methods, we devise a benchmark that evaluates various pooling methods across multiple backbone networks and a wide range of tasks, namely, keyword spotting , speaker identification , intent classification , and emotion recognition . to avoid supervision altogether, we closely follow the recent contrastive learning literature to evaluate the effectiveness of the pooled representation directly via the nearestneighbor approach . also, we modify the existing unsupervised sentence embedding methods to compare their downstream performance. we further conduct various analyses that accurately depict the effectiveness and behaviors of our method by comparing with the parameterized methods via supervised learning, visualizing the weights directly, and exploring different settings for benchmark evaluation. it raises the problem of representation pooling; given a variable number of sequential representations, one has to summarize the representation to have a fixed size.the pooling task is often addressed as the unsupervised or paraphrastic sentence embedding problem in the nlp literature, especially after the success of word embeddings on various tasks. from simply averaging the word representations, many unparameterized methods were introduced, such as considering the word frequency while averaging, whitening the representations, or utilizing the singular value transformation.in the meantime, many pooling methods have been introduced for summarizing the frame-wise speech representations, notably for speaker recognition. given that essential components comprising speech, such as frequency, amplitude, and formants, are embedded inside the model representation, we expect the clusters to successfully gather similar phone representations to have the same cluster indices. we design various intuitive approaches that utilize vq to effectively summarize the speech representations, and experimentally demonstrate its effectiveness in a wide range of settings.to further boost the research on unsupervised pooling methods, we devise a benchmark that evaluates various pooling methods across multiple backbone networks and a wide range of tasks, namely, keyword spotting, speaker identification, intent classification, and emotion recognition. we further conduct various analyses that accurately depict the effectiveness and behaviors of our method by comparing with the parameterized methods via supervised learning, visualizing the weights directly, and exploring different settings for benchmark evaluation. let us denote the raw speech input as x, latent speech representations from the 1d convolutional features as z, context representations from the transformer as c, and the quantized representation as q:.we closely follow the superb benchmark that measures the efficacy of self-supervised speech models, but with a key difference in the evaluation for unsupervised pooling methods (section 3.we design our benchmark considering the unsupervised setting: not allowing additional trainable parameters to fairly compare the effectiveness of unsupervised pooling methods. we selected the parameterized pooling methods often used for speaker recognition: self attentive pooling (sap), attentive statistics pooling (asp), and vector-based attentive pooling (vap). additionally, we visualize a sample speech and its pooling weights induced by our vq-lp method in figure3from speech commands v2, where our method detects speech while suppressing areas with ambient noise.in this paper, we design an unsupervised yet effective way of pooling variable-length speech features obtained from selfsupervised speech models. further, we compare our unsupervised pooling method with supervised pooling methods based on attention mechanisms to understand its inner workings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/168.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/168.txt new file mode 100644 index 0000000000000000000000000000000000000000..972f4f1a6675b1f26d309028c2619aa59c132c80 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/168.txt @@ -0,0 +1 @@ +heart disorder, which affects the heart and arteries, is one of the most devastating human diseases. the heart is unable to pump the required volume of blood toward other parts of the body when it suffers from cardiac problems. in the case of heart disease, the valves and heart muscles are particularly affected. cardiac illness is also referred to as cardiovascular disease. the cardiovascular framework comprises all blood vessels, including arteries, veins, and capillaries, that constitute an intricate system of the bloodstream throughout the organ. cardiovascular infections include cardiac illnesses, cerebrovascular infections, and artery illnesses. heart disease may be a hazard, usually unavoidable and an imminent reason for casualty. heart disease is currently a prominent issue with all other well-being ailments since many people are losing their lives due to heart disease. cardiovascular disease kills 17.7 million people per year, accounting for 31% of all deaths globally, as per the world health organization (who). heart attacks and strokes account for 85% of these cases.heart-related disorders have also become the major cause of death in india . in the united states, one person is killed every 34 seconds. . heart diseases killed 1.7 million indians in 2016, concurring to the 2016 worldwide burden of disease report, released on september 15, 2017 . according to a who report published in 2018, nearly 6 million people died globally in 2016 because of heart infections. . controlling heart disorders costs approximately 3% of total healthcare spending . the world health organization's projections provided the impetus for this project. the who predicts that roughly 23.6 million people will die from heart disease by 2030.the expanding rate of heart infections has raised worldwide concern. heart failure is tougher to diagnose because of diabetes, hypertension, hyperlipidemia, irregular ventricular rate, and other pertinent diagnosable conditions. as cardiac illness becomes increasingly common, data on the condition is getting more nonlinear, non-normal, association-structured, and complicated. as a result, forecasting heart illness is a major difficulty in medical data exploration, and clinicians find it extremely difficult to properly forecast heart disease diagnosis. several studies have endeavored to use advanced approaches to analyze heart disease data. if the bagging is not adequately represented in the ensemble approach, it might result in excessive bias and consequently under-fitting. the boosting is also difficult to apply in real time due to the algorithm's increasing complexity. on the other hand, our proposed approach may combine the skills of several high-performing models on a classification or regression job to provide predictions that outperform any single model in the ensemble while also being simpler to build. our suggested system hasn't received much attention; so we've attempted to build it correctly and come up with a nice outcome, and a superior prediction system. the organization of the paper is explained as follows. in section ii, we have made an effort to state related research contributions, state their major contributions and compare with our work. we also provided a table with the underlying overview of the related works and comparison analytics for readers. with section iii, we have provided an outline of the system methodology and outlined the architecture. in section iv, implementations, and experimental results are described. section v, we speak on our limitation in section and we conclude the paper as a result, forecasting heart illness is a major difficulty in medical data exploration, and clinicians find it extremely difficult to properly forecast heart disease diagnosis. on the other hand, our proposed approach may combine the skills of several high-performing models on a classification or regression job to provide predictions that outperform any single model in the ensemble while also being simpler to build. et al. despite the fact that several studies have been undertaken on the issue, prediction accuracy still needs to be improved.implemented m5p, random tree, and reduced error pruning using the random forest ensemble method were presented and investigated as tree-based classification algorithms. the fundamental purpose of this study is to establish a robust intelligent heart disease prediction system (rihdps) by combining 3 data mining modelling techniques into an ensemble method: nb, lr, and nn. using existing datasets of heart disease patients as from the uci repository's cleveland database, the performance of decision tree algorithms is examined and validated. the aim of the research is to utilize data mining tools that uncover hidden patterns in cases of heart problems as well as to forecast the existence of heart disorders in individuals, ranging from no existence to likely existence. this research also consists of a visualization module in which the heart disease datasets are displayed in a diagrammatic representation using different data visualization techniques for user convenience and better understanding. the research design is shown in section a, the data collection and preprocessing are summarized in section b, and the ml classification techniques and stacked ensemble approach are explained in section c of this study. this is a decision tree ensemble comparable like bootstrap aggregation and random forest, among other decision tree ensemble, approaches. adaboost classifier: the algorithms, shorthand for adaptive boosting, is a boosting approach used in machine learning as ensemble learning. the stacked ensemble approach would be a supervised ensemble classification strategy that stacks many prediction algorithms to find the optimum combination. using the stacked ensemble classifier, we have shown an improved heart disease prediction method. as our study is based on recorded data from the statlog, cleveland, and hungary datasets, for future research possibilities, we will aim to train and test on a large medical data set using many ensemble methods in the future to see if we can enhance their performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/169.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/169.txt new file mode 100644 index 0000000000000000000000000000000000000000..f49b0cd324b5222feafe3f88c2397d01cab469ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/169.txt @@ -0,0 +1 @@ +sepsis is a major public health concern. it is a lifethreatening disease caused by a host's failed response to an infection . the immune system of a sepsis patient becomes aggressive in its protection against infection in the body, which causes organ dysfunction and potential organ damage. sepsis is still a common problem in modern medical settings, particularly intensive care units (icus). according to a global survey conducted in 2018 , roughly 13.6% to 39.3% of patients admitted to icu are impacted by sepsis. this share is 29.5% worldwide. patients with sepsis also experience longer and more expensive hospital stays. for patients that survive, sepsis can cause an increased risk of permanent organ damage, and physical disability .early treatment before the formation of sepsis in patients has been shown to improve the chances of successfully treating and preventing the disease . early care can prevent 80% of sepsis-related deaths, and the chances of survival drop by 8% every hour if action is not taken . in particular, studies have shown that treating sepsis 6 hours earlier before the onset significantly improves the patient's chance of recovering . therefore, this work focuses on the problem of early sepsis prediction, that is, 6 hours in advance. several clinical scores such as sofa have been developed to indicate the onset of sepsis with clinical-based data , . these measures are helpful when sepsis is onset already but have been limited for early prediction. for early sepsis onset prediction, predictions derived using supervised machine learning models such as random forest or long short-term memory (lstm) models have vastly outperformed clinical scores .because of this, there has been ongoing research in developing machine learning predictive models to detect the onset of sepsis early before it is suspected clinically. some studies have used traditional machine learning models such as logistic regression and random forest , . however, these studies did not capture the progressing temporal pattern that can be useful for early sepsis prediction. recently, deep learning models have also been applied in sepsis prediction, such as recurrent neural networks (rnns) and convolutional neural networks (cnns) . compared with traditional machine learning methods, deep learning often provides better prediction performance. however, training deep models can be tedious to obtain the optimal subsets and parameters. in addition, none of them has considered the fact that 6 hours is not a small gap. without the progressing information for the next 6 hours before onset, it is challenging to make an accurate prediction, which is explored in this work.first, it should be noted that training successful deep neural networks (dnns) is expensive in terms of time, computational resources, and data. we aim to avoid using dnns but still capture the progressing temporal change pattern for early sepsis prediction. we accomplish this by computing the hourly changes in feature values like heart rate within a given observation window. doing this will allow us to consider the temporal changes in the feature even in a traditional machine learning model, where features are often considered independently. second, we propose a multi-subset approach to close the 6hour gap. the contributions of this paper can be summarized as follows.• we propose to generate a series of delta values to capture the temporal change trend, which is incorporated into the feature set for training and prediction. • we develop a multi-subset approach in which the likeli-hood of sepsis arising earlier than 6 hours are generated from the previous subset and provided to the target subset as additional features to close the big 6-hour gap.• based on the proposed method, we apply an economical machine learning algorithm, that is xgboost , to the trauma patients from the year 2012 to 2019 at icus of uw harborview center, and obtain an auroc of 0.7906 for early sepsis prediction and that of 0.9199 for early septic shock prediction, better than a deep learning model due to limited amount of sepsis cases. for early sepsis onset prediction, predictions derived using supervised machine learning models such as random forest or long short-term memory (lstm) models have vastly outperformed clinical scores. • we develop a multi-subset approach in which the likeli-hood of sepsis arising earlier than 6 hours are generated from the previous subset and provided to the target subset as additional features to close the big 6-hour gap.however, these models did not consider that the temporal change trend can be helpful for early sepsis onset prediction, and missing 6 hours of progressing information is challenging to make an accurate prediction. in order to train the model to predict the onset of a sepsis incident h hours into the future, for each patient, we first shift the values in ŷ earlier by h hours and call this ŷh . from this example, we can observe that if we want to predict sepsis event onset in 3 hours, using observed patient data up to t = 1, the onset prediction for t = 4 should be 0, and using data up to t = 2, the prediction of the onset of a sepsis incident for t = 5 should be 1.after shifting the label for h hours to predict the onset of an incident in h hours, for a certain hour t, we can use only the x t,f values across all of the features, and the new ground truth label ŷh to train a classification model for the onset of a sepsis incident that will be able to make a prediction on new patients and provide the probability of the event in h hours as y h .1), we use the observed data up to the prediction time to determine the probabilities that a patient will experience a sepsis incident between the current time t and the sepsis incident onset time t + 6. for example, we can use the § the onset of a sepsis incident label has been given using the cdc's adult sepsis surveillance criteria with prior modifications utilizing readily obtainable emr data to improve specificity for the trauma population. we can further shrink the gap by providing sepsis incident probabilities of each hour in-between to feed more features in the target subset, which is using multiple subsets to close the 6-hour gap.to sufficiently demonstrate the proposed method, we apply it to two important sepsis-related tasks, that is, early sepsis onset prediction and early septic shock prediction.• what are the benefits of using multi-subsets in the development of sepsis and septic shock prediction models? • how do delta and statistic values incorporate the temporal change trend affect the effectiveness of the prediction models for sepsis and septic shock? • which features are more important for early sepsis or septic shock prediction?. in the comparison, we use '1 subset' to denote the method using only the target subset, '2 subsets' to denote the method adding the probability of sepsis in 3 hours to the target subset, and '6 subsets' to denote the method adding probabilities of sepsis in 1 hour, 2 hours, 3 hours, 4 hours, and 5 hours to the target subset. we can observe that our multi-subset approach is definitely helpful for the task of early septic shock prediction, where adding the probability of shock in 3 hours improves the performance compared to using only the target subset and adding more probabilities can further improve. the results show that the probability of sepsis occurring in 3 hours contributed significantly to the prediction of sepsis onset for the method using 2 subsets.in this work, we present an economical machine learning approach for sepsis related early prediction, which is way less expensive in terms of data, resources, and training time compared to deep learning models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/17.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/17.txt new file mode 100644 index 0000000000000000000000000000000000000000..a24be01130c4c4b95ac337d0b632fd9e8dc06b42 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/17.txt @@ -0,0 +1 @@ +feedback controllers form the core of any safety-critical cyber-physical systems (cpss). the traditional approaches for synthesizing feedback controllers rely on the availability of a mathematical model of the dynamical system whose behaviour the controller is supposed to regulate. however, for complex dynamical systems, the creation of a faithful mathematical model poses a tremendous challenge to the control engineers. reinforcement learning (rl) provides an alternative for synthesizing feedback controllers in the form of a controller agent for complex systems without precise mathematical models. the agent interacts with the dynamical system in a simulation environment providing a faithful but complex system model that is not suitable for controller synthesis using traditional control-theoretic procedures.a deep neural network can be used as the agent in rl, and in that case, the learning procedure is called the deep reinforcement learning (drl). in recent times, deep reinforcement learning (drl) has been extremely popular in solving highly complex problems such as solving atari (mnih et al. 2015) and go (silver et al. 2017), making bipedal robots walk (lillicrap et al. 2016), learning visuomotor controllers for robots (levine et al. 2016), and many more. the success of drl in solving those complex problems is attributed to well-defined reward functions. thus, designing correct reward functions (sutton and barto 2018) is extremely important for synthesizing drl-based controllers.several papers have considered the rl-based methods for synthesizing feedback controllers (e.g. (lillicrap et al. 2016;levine and koltun 2013;deisenroth, neumann, and peters 2013;fulton and platzer 2018;hafner and riedmiller 2011)). in this approach, the reward function is designed by a control engineer having complete knowledge of the system dynamics. as the system becomes high-dimensional and highly nonlinear, designing a correct reward function in this manner becomes prohibitively hard. for control engineers, it would have been significantly more convenient if they could write the specification of the closed-loop system in a formal language, and the reward could be generated automatically from this specification.in the recent past, signal temporal logic (stl) (donzé and maler 2010) has been used widely in capturing realtime specifications for synthesizing controllers for complex cpss (singh and saha 2021;raman et al. 2014;raman et al. 2015). the robustness semantics of stl makes it a potential candidate for being used for specification-based reward generation in controller synthesis through drl. an stlbased reward can efficiently perform temporal aggregation, which is difficult to achieve in a hand-crafted reward function. however, the classical quantitative semantics (donzé and maler 2010), (jaksic et al. 2018) of stl often leads to improper rewards, which in turn may lead to the synthesis of sub-optimal controllers. this is because the pointbased classical semantics suffer from the shadowing problem (várnai and dimarogonas 2020), where an increase in the robustness of an individual sub-specification does not influence the robustness of the full specification computed by the and operator, except for the case when it is minimum.of late, researchers have proposed several new semantics of stl that attempt to incorporate aggregate no-tions of robustness instead of point-based estimates provided by the classical semantics. some of these semantics such as agm (mehdipour, vasile, and belta 2019) and softmax (várnai and dimarogonas 2020) were designed to address the shadowing problem. however, as the robustness functions for these semantics are not smooth, they are not well-suited for rl-based controller synthesis. another semantics lse (li, ma, and belta 2018;pant, abbas, and mangharam 2017) aims to provide a smooth approximation to the robustness function, but it does not address the shadowing problem. due to the limitations of the existing aggregation-based semantics, stl has not yet been adopted for drl-based controller synthesis despite its tremendous potential.in this work, we propose a new aggregate-based semantics for stl, called sss (smoothened-summation-subtraction). our semantics is not sound, but we show that soundness is not an essential requirement of an stl semantics when it is used for reward generation in the drl process. rather, our semantics have all the essential properties for being qualified as a reward generation mechanism -it offers smoothness, addresses the shadowing problem, and ensures min-max boundedness.we implement our semantics in the online monitoring tool rtamt (ničković and yamaguchi 2020) and evaluate it on several challenging continuous control benchmarks available in the gym environment (brockman et al. 2016). for each of those benchmarks, we introduce a suitable stl specification that captures the safety and liveness requirements of the system. we compare the performance of the controller synthesized using our semantics with those synthesized with other aggregate-based semantics available in the literature. experimental results establish that our semantics consistently outperforms all the other semantics by a significant margin. an stlbased reward can efficiently perform temporal aggregation, which is difficult to achieve in a hand-crafted reward function.for online robustness ρ computation, we change the definition of u i operator in the classical robustness semantics of stl (equation3).in this paper, the symbol ρ is used to denote robustness function only for online setting whereas ρ to define the general robustness function, i.to solve problem 1 using reinforcement learning, we represent the model-free environment m = x, u, t , ρ , where x denotes the set of continuous states x ∈ r p of the system, u denotes the set of continuous control actions u ∈ r q and ρ(φ, ω) ∈ r represents the robustness of trace ω w. 2018)as it leads to smooth policies(shen et al.we formulate the approximation for and in expression (16) because of two reasons -firstly, we want to use an aggregate measure of all the robustness in the and expression, and secondly, we want to penalize the largest difference between any two robustness. with learning, we will eventually get the desirable behavior in both cases -former with negative robustness (not sound) and latter with positive robustness (sound). nevertheless, our semantics satisfies the aggregate notion of soundness (theorem 2), and we provide the bounds on the robustness values generated by the operator in all cases.depending on the robustness values being positive or negative, we can split it into three cases -all positive, all negative, and lastly, some positive and some negative. in the theorem below, we prove that if all individual robustnesses are positive (negative), then the sss semantics will certainly give us a positive (negative) robustness (satisfies soundness). we provide a bound on the robustness for both these cases as well (here, traditional soundness requires negative robustness for a in both cases). in part 3, traditional soundness would have required robustness of a to always remain negative since ρ min is negative. however, aggregate soundness takes into account the fact that the overall sum is positive, and hence robustness of a can be positive if the magnitude of positive robustness values outweigh the negative ones. these two extreme values will always be positive (negative) if all the robustness values are positive (negative). hence, if all ρ i > 0 or all ρ i < 0, then the range of robustness of operator a in equation (23) will always contain positive or negative robustness values, respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/170.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/170.txt new file mode 100644 index 0000000000000000000000000000000000000000..803f67587d072c3d247ac8047d890abc02b4f010 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/170.txt @@ -0,0 +1 @@ +health equity is a crucial principle in public health, which aims to eliminate disparities in health outcomes and healthcare access among various populations. the world health organization (who) and the united nations (un) both prioritize health equity as a key aspect of their missions to improve global health outcomes. however, despite these efforts, disparities in health outcomes continue to persist, particularly among marginalized and underserved populations .machine learning (ml) has the potential to transform the way we approach health and healthcare with its advanced analytical and predictive capabilities. ml can aid in comprehending complex health systems, identifying disease patterns and trends, and improving patient outcomes . however, it is essential to exercise caution when employing ml and consider potential biases and inequalities that may be present in the data used to train these models , . such biases can lead to discrimination and unjust outcomes for specific populations, exacerbating existing health disparities .this study aims to promote health equity through ml by reviewing the literature on ml fairness and presenting a novel ml pipeline approach to integrate fairness into various stages of a standard ml pipeline. although fair ml has been explored in artificial intelligence (ai) literature , , its implementation in public health has received limited attention. this study endeavors to provide the public health community with accessible methods for ensuring equitable outcomes when using ml. the specific contributions of this research are:-summarizing the concepts of fair ml and presenting an ml pipeline approach for public health use to achieve equitable outcomes.-providing examples that demonstrate the importance of the pipeline approach and how disparities can be amplified and mitigated through ml. -offering straightforward and accessible methods for the public health community to incorporate fairness in their use of ml. unlike previous studies , - that have primarily focused on specific applications of ml in healthcare, this review adopts a different approach by presenting a methodology for incorporating fairness during various stages of a standard ml pipeline. this pipeline idea is proposed based on a thorough review of pertinent literature on fair ml. the study primary focus is on promoting health equity for marginalized and underserved populations and providing straightforward and accessible methods for the public health community.health equityis a crucial principle in public health, which aims to eliminate disparities in health outcomes and healthcare access among various populations. the world health organization (who)and the united nations (un)both prioritize health equity as a key aspect of their missions to improve global health outcomes.this study aims to promote health equity through ml by reviewing the literature on ml fairness and presenting a novel ml pipeline approach to integrate fairness into various stages of a standard ml pipeline.-summarizing the concepts of fair ml and presenting an ml pipeline approach for public health use to achieve equitable outcomes.health equity,is a fundamental principle in public health that emphasizes the importance of ensuring equal access to healthcare resources, opportunities, and outcomes for all individuals, regardless of their background or social status. the authors also emphasize the importance of addressing health equity and disparities through ml methods that focus on algorithmic fairness.by incorporating fairness methods at relevant stages of the ml pipeline, the proposed framework can help address the biases identified in the precision medicine and covid-19 outcome examples, ensuring that the resulting models are fair, equitable, and produce accurate, unbiased predictions for all individuals, irrespective of their race or ethnicity.fairness in ml plays a vital role in promoting public health equity by addressing biases in healthcare models and ensuring that predictive algorithms are both accurate and equitable. furthermore, by integrating interdisciplinary knowledge from fields such as computer science, statistics, and public health, researchers can develop robust fairness-aware ml algorithms that uncover hidden patterns, enabling a deeper understanding of the complex relationship between demographic factors and health outcomes. however, addressing public health equity requires a comprehensive approach that includes analyzing health disparities, targeting interventions, addressing social determinants of health,, and promoting equal access to healthcare services. by incorporating fair ml models into public health decision-making processes, we can ensure that predictions and decisions are fair and do not perpetuate or exacerbate existing health disparities. finally, using fair ml models that align with the principles of distributive justice, such as equity, representation, and accountability, can help ensure that predictions and decisions made by the models are fair and do not perpetuate or exacerbate existing health disparities.the connection between fair ml and public health equity is critical, as the use of unbiased ml models can aid in the reduction of existing health disparities and promote equal access to healthcare services. furthermore, incorporating fair ml into public health decision-making processes aids in the elimination of potential biases in predictive models, preventing the perpetuation or exacerbation of existing health disparities. through the examination of various case studies in the public health domain, this paper has demonstrated the practical applications and potential impact of fair ml models on healthcare outcomes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/171.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/171.txt new file mode 100644 index 0000000000000000000000000000000000000000..18c19e09587001446c58ee4c34f195898b28678c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/171.txt @@ -0,0 +1 @@ +a model is a probing device used to explain a phenomenon through data. in most cases, a true model for this phenomenon exists but cannot be specified at all . this setting indicates that all plausible models, though useful, can be deemed as misspecified . can we use a plausible explainable model, while correcting for its misspecification implicitly? unlike the prescriptive generative modeling dogma, predominant in the statistical community, the implicit generative modeling view taken by the machine learning community lays emphasis on predictive ability rather than on explainability . implicit deep generative models have witnessed tremendous success in domains such as computer vision. however, their opaqueness and lack of explainability has made the injection of subjective knowledge into them a highly specialized and experimental task. in this work, our proposal is to reconcile implicit and explicit generative models into a single framework in the misspecified setting. we do that by taking gans and abc as representative of the two fields respectively.the introduction of gans in 2014 by goodfellow et al. marked a very decisive point in the field of generative deep learning. since then, deep learning based generative models like gans and variational autoencoders have been extensively worked on, with the main intention of addressing issues with likelihood estimation based methods and related strategies. the crux of these issues lies in complex or intractable computations that arise during maximum likelihood estimation or evaluation of the likelihood function. a gan uses two adversarial modules -the generator and the discriminator, essentially playing a zero sum min-max game with each other, with the competition between them driving both modules to improve and reach a stage where the generator is able to produce counterfeit data which is indistinguishable from the real data. although gans have been shown to address the issues mentioned above well by leveraging the benefits of using piece-wise linear units, there are some inherent issues with the gan paradigm. these include the inherent difficulty in achieving convergence, stability issues during training and the necessity of large amounts of data. an active area of research in this direction is to apply gans in different settings and also to improve stability .another older, but equally interesting generative paradigm is approximate bayesian computation (abc) . abc finds its roots in bayesian inference, and aims to bypass likelihood evaluation by approximating the posterior distribution of model parameters. this method is extremely useful in cases when the likelihood estimation is computationally intensive, or even intractable. the likelihood-free aspect of this paradigm allows the data generative model to be as complex as it can get. however, there are some drawbacks, such as low acceptance rates at higher dimensions, the difference between the prior distribution from the posterior distribution, identification of lower dimensional statistics to summarize and compare the datasets and the model selection problem.abc and gan complementarity: looking at these two paradigms, it becomes clear that both abc and gans try to solve the same problem -learning the data generation mechanism by capturing the distribution of the data, but they approach the problem in different ways. by studying these two paradigms, their similarities and differences become apparent. with respect to the data generation model, abc uses a user-specified model, whereas the generator in a gan is non-parametric. looking at the discriminative model for both, abc uses an explicit, user-specified discriminator which often uses euclidean distance or some other distance measure on a set of summary statistics to measure the difference between real and simulated datasets. for gans, the discriminative model is specified through a function like kl divergence or js divergence as the discriminator's objective function. another key difference here is that the feedback from the discriminator in a gan flows back to the generator, thereby making them connected, while in abc, these two modules are disconnected. further, in abc, model selection is followed by model inference, but in gans, since the generator and discriminator are connected, this occurs implicitly during the learning process. we now see that abc and gan appear to be at two ends of the data generation spectrum, with each having its own advantages and disadvantages. can we get rid of making choices about the summary statistics, distance metrics, model selection in the context of abc? further, can we deal with model misapplication either in likelihood or prior or both in the bayesian context? gans, in particular, the adversarial mini-max formulation can address these questions. skipgan: gan generator takes as inputs the features, and the simulated data from abc, and the discriminator, also takes a weighed combination of abc generator and gan generator. we consider several standard, interpretable models such as linear models, gradient boosted trees (gbt) and a combination of deep learning and gradient boosted trees (tabnet) as abc models under various mispaceification settings.we consider three datasets (one simulated and two real), three prior generative models, two basic abc-gan architectures, and two gan generator architctures -leading to a total 36 experiments. we consider three families of models -linear models, gradient boosted trees (gbts), and transformers -as explicit generative models. linear models: standard linear regression models are implemented in statsmodel, a python module that provides classes and functions for the estimation of many different statistical models, as well as for conducting statistical tests, and statistical data exploration.in order to test the hypothesis that, abc-gan models perform no worse than the prior models, we take boston dataset, and synthetically inject model misspecification, as described earlier, and report mae of g π (sampler) and g γ (a deterministic transformation). as can be seen, the proposed abc-gan models outperform the prior models in almost all casesdifferent priors, different abc-gan models, and different levels of model misspectications. next, we investigate, how these models perform at specific levels of model misspecification by prior, model architecture, and dataset.in tables123456789, each row corresponds to a level of model mispecification as indicated by (variance, bias) columns, and rows corresponding to columns -prior model, mgan, tab-mgan, skipgan, tab-skipgan -indicate the mae of the models indicated by the column header.a skip connection has been added in some models, as explained earlier, to take a weighted average of the prior model and the gan model. however, for models with lower complexity (such as linear models), skipgan performs better in correcting the model misspecification. our abc-gan models outperform prior models with the same amount of misspecification, and perform equivalent or better than these priors even in the ideal situation of perfectly specified models.how is our experimentation on regression any different than the other existing work, among the wide variety of literature that exists on regression, including non-parametric approaches such as gaussian process regression? while being useful in the ml community, these methods don't solve the problems of (1) correcting likelihood misspecification in the models or data and (2) performing equivalent or better than to the prior models under perfect condition (no noise condition). our model caters mainly to correcting misspecification in the prior models, and performs equivalently or better than the prior models in the ideal case in several regression tasks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/172.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/172.txt new file mode 100644 index 0000000000000000000000000000000000000000..6fd71f59cdfda7314cecb210100bce55dced2c27 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/172.txt @@ -0,0 +1 @@ +constrained closed-loop control systems are critical to a wide range of applications, and optimizing their online performance by tuning control parameters is a common challenge. this paper studies the time-varying black-box optimization problems that arise in optimizing system performance with constraints, where both the objective and the constraints are unknown. for instance, consider building control, which aims to minimize energy consumption while meeting occupant comfort requirements . solving the controller tuning problem in such scenarios is challenging for several reasons. firstly, it is hard to model the mapping from control parameters to the performance metric of the closedloop system. secondly, unknown constraint violations need to be taken care of during the optimization process. finally, evaluating the system's performance is often expensive in practice.to tackle these challenges, bayesian optimization has shown promise as a sample-efficient, derivative-free blackbox optimization method . bayesian optimization constructs a surrogate model using gaussian process regression and uses this model to guide the sampling of the black-box functions. variants of bayesian optimization methods have been proposed to handle constrained optimization problems. one common method is to maximize constrained expected improvement (cei) , to select the next this work was supported by the swiss national science foundation under the nccr automation (grant agreement 51nf40 180545).w. xu, y. jiang, and c. n. jones are with the automatic control laboratory, epfl, switzerland. w. xu and b. svetozarevic are with swiss federal laboratories for materials science and technology (empa), switzerland. {wenjie.xu, yuning.jiang, colin.jones}@epfl.ch, bratislav.svetozarevic@empa.ch sample in each step. another line of research developed safe bayesian optimization (safe bo) methods by restricting the sampling to only feasible points - . these methods have found wide applications in control system optimization. for example, cei method is applied to trajectory optimization for path following control . safe bo is applied to tune the pi controller of a room heating system subject to a group of safe constraints . however, the cei method may not guarantee constraint feasibility and can suffer from severe constraint violations, while safe bayesian optimization methods can be too cautious and lose performance due to the strict requirement of sampling feasible points.in the context of controller tuning, one additional challenge that needs to be considered is that the unknown objective and constraints are time-varying . to address this challenge, several time-varying variants of bayesian optimization have been proposed in the literature - . in practice, these variations can be due to the change of 'contextual variables' observed by the decision-maker before selecting a new set of parameters . affected by the time-varying contextual disturbances, guaranteeing constraint satisfaction at every time instant can be challenging and may not be necessary . but rather, it can be more of interest to satisfy the time-average constraint , especially when it represents some economic cost that accumulates over time. examples include performance optimization of a machine subject to fatigue constraints (see, e.g., ) and data center cooling subject to the constraints on the number of delayed queries per unit of time .motivated by the aforementioned observations, we formulate the time-varying black-box optimization problem as a constrained contextual bayesian optimization problem. to solve it, we extend the primal-dual bayesian optimization framework to the contextual setting . in the proposed algorithm, the contextual variable that impacts the response surface of the black-box functions can be observed at each step before selecting a new set of parameters, and then the system performance is optimized over the current context. the detailed contribution is summarized as follows:• we propose a primal-dual contextual bayesian optimization (pdcbo) algorithm for time-varying constrained black-box optimization problems. in contrast to the regret with respect to a static optimal value in the non-contextual setting , we provide bounds on the contextual regret with respect to the time-varying optimal solutions. furthermore, our algorithm can achieve zero time-averaged constraint violations, even under the adversarial time-varying contextual setting;• the proposed method is deployed to both sampled instances from gaussian processes and to a continuous stirred-tank reactor parameter tuning problem. compared to other state-of-the-art methods, our method can simultaneously achieve the lowest cumulative objective while satisfying all the constraints on average.this work aims to sequentially optimize the control parameters θ t ∈ θ ⊂ r n θ after observing some contextual variable z t ∈ z ⊂ r nz in each step t, where θ is the candidate set of the control parameters and z is the set of possible contextual variables. we use g to denote the concatenation (g i ) n i=1 and g(θ, z) to denote the concatenation (g i (θ, z)) n i=1 . the lagrangian of the original problem is l(θ, z, φ) = f (θ, z) + φ t g(θ, z) and the dual function is d(z, φ) = min θ l(θ, z, φ). to measure the violations, we introduce the cumulative constraint value t τ =1 g i (θ τ , z τ ), i ∈ , and the timeaverage constraint value.we consider the problem (1) with only one constraint, where θ ∈ θ = , z ∈ z = , and both f and g are unknown black-box functions sampled from a gaussian process.we want to adaptively tune the feed rate f b of the component b and the reaction temperature t r to maximize the cumulative economic profit from the reaction while managing x a and x g .where j(f b , t r , p ) is the minimization objective that is opposite to the net economic profit, p ∈ r 4 is the price vector of the product and the raw materials, g 1 (f b , t r ) and g 2 (f b , t r ) are threshold constraints on the residual mass fractions. the method is applied to both sampled instances from gaussian processes and to a continuous stirred tank reactor feedrate and reaction temperature tuning problem; simulation results on both problems show that the method simultaneously provides closeto-optimal performance and maintains constraint feasibility on average.where the first inequality follows by the optimality of π * ǫ , the equality follows by the definition of π ǫ and the last inequality follows by assumption 2, which implies that |f.1) bound cumulative regret: we have the following lemma to bound f t (θ t , z t ) -eπ * ǫ (zt) (f t (θ, z t )), which approximates the single-step regret.where the first inequality follows by the inequality (15), the second inequality follows by adding and subtracting 1 η f t (θ t , z t ) and the projection operation to as shown in(8), the third inequality follows by the optimality of θ t for the primal update problem (9) and the assumption that ǫ ≤ c i , and the last inequality follows by the feasibility of π * ǫ for the problem(16).where the last inequality follows by that relaxed optimal value eπ * (zt) (f (θ, z t )) is smaller or equal to the original optimal value, and the equality splits the original term into four terms. 1, the second inequality follows by the monotonicity of β 1/2 0,t , and the last inequality follows by lem., where the first inequality follows by the optimality of θ t for the primal update problem, the second inequality follows by that both η and λ t are non-negative, and the third inequality follows by lem.where the first inequality follows by the inequality(15), the second inequality follows by that ǫ ≤ ξ 2 , the third inequality follows by that λ t ≥ 0 and the lem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/173.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/173.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef5eb439a168e71d2b92724db0761c6eda61fdc4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/173.txt @@ -0,0 +1 @@ +most supervised learning methods are designed to estimate conditional probability p (y | x = x) where x is known as the set of features (variables) and y the outcome (or the target variable). a large number of such predictive models have been developed for outcome predictions and they can model conditional probabilities accurately in various data sets.however, many decision making problems require answers to what-if questions regarding any feature and the outcome. if x i ∈ x is changed from 0 to 1, how will y be changed? for example, will a job applicant get the job if the applicant has a college degree? will a customer buy the product if the customer receives the coupon? answering such what-if questions needs the estimation of the causal effect of the corresponding feature (e.g. college degree or coupon) on the outcome (e.g. getting the job or buying the product). the causal effect of a feature (as a treatment) on the outcome means the change of the outcome due to the change of the feature .causal effect estimation is a main topic in the causal inference area, and most causal effect estimation methods assume a fixed treatment variable and a given set of covariates . several conditional average treatment effect (cate) estimation methods have been proposed for personalised decision making , and they all deal with a fixed treatment variable and a given set of covariates.in many problem settings, there is not a fixed treatment variable, and therefore, these causal effect estimation methods cannot be applied to such a problem. for example, with a data set containing people's lifestyles, diets and etc. (features), and heart attacks in their 60s (the outcome). a predictive model can be trained on the data set to predict the risk of a heart attack in their 60s based on their feature values. furthermore, one may wish to know what lifestyle change or diet change she/he should undertake now to reduce the risk of a heart attack in future. each of lifestyle and diet features can be a treatment and a covariate when it is not a treatment.identifying the best possible treatment for an individual is a key part for the solution in personalised decision making. during the search for the best possible treatment for an individual, features are alternated as treatments to have their respective causal effects evaluated. when a feature is considered as the treatment, an appropriate covariate set for the treatment and the outcome needs to be determined. this makes an existing causal effect estimation method not applicable since it assumes a fixed treatment variable and a given covariate set for the treatment and the outcome . existing treatment effect estimation methods need to be extended to a predictive machine learning setting where each feature can be a treatment and there is not a designation of a fixed treatment and a covariate set.in this paper, firstly we deal with the challenge to causal effect estimation when alternating features as treatments. it is not a straightforward extension of existing causal effect estimation methods when alternating treatment variables. we show the challenges in the following example. consider a traditional setting where a data set contains the treatment variable t , a covariate variable x, and the outcome y . this implies that the causal graph underlying the data set is t ← x → y and t → y . in the problem setting of this paper, both (t, x) are features and can be treatments alternatively. to estimate the causal effect of t on y conditioned on x = x, the conditional average causal effect (cate) of t on y conditioned on x = x is used. however, when x is alternated to be the treatment, t is not a covariate of the variable pair (x, y ) but a mediator between x and y . in this case, a cate estimator is not applicable for estimating the causal effect of x on y conditioned on t since, when changing x as the treatment, t will be changed too. instead, the controlled direct effect (cde) of x on y when t is controlled to t = t is appropriate.we face two challenges as shown in the above example. firstly, when alternating features as treatments, the causal effects for different features may not be of the same type. secondly, the choice of the right type of causal effect for a treatment needs the underlying causal graph. so, in general, we cannot estimate the causal effect of individual features on the outcome by alternating them as treatments if we do not know the underlying causal graph. in practice, the causal graph is rarely known. in this paper, we reduce the requirement of knowing the causal graph to the requirement of knowing that the data set contains all direct causes of the outcome and no variables affected by the outcome. hidden variables which are not the direct causes are allowed. this makes our proposed approach more practical than methods assuming knowing the causal graph for estimating the causal effects of any feature on the outcome.secondly, the theoretical results of the above solution naturally link a predictive model to causal effect estimation. i.e., when the conditions are satisfied, causal effects can be directly estimated from a predictive model. the results imply that we can have a causally interpretable predictive model where causal effects of individual features can be derived from the model. the causal interpretation does not depend on the transparency of the predictive model, but depends on what inputs to the model are. the predictive model itself can be black-box, but the effect of each feature on the outcome can be derived from the model and hence the model is causally interpretable.the connection between predictive models and causal interpretation contributes to model explanation. it is desirable to interpret a predictive model causally, but it is generally impossible. as a predictive model, the coefficients in linear regression models are often interpreted as causal effects of the corresponding features on the outcome. however, such an interpretation is valid only when the underlying causal structure is known and the regression follows the causal structure properly. this is because the coefficients may represent different types of causal effects, or even may be biased estimations . furthermore, even if the causal interpretation is valid, the coefficients in linear regression models indicate the average treatment effects (ate) at population level, and are not suitable for personalised decision making discussed in this paper.thirdly, we further study the property of the above causally interpretable model, and show that it is robust when the environment is changed from which the model is trained.in summary, this work makes the following contributions.1. for causal effect estimation of any feature on the outcome in a given circumstance, normally the causal graph underlying the data set is needed. this paper reduces the requirement of knowing the complete causal graph to that of knowing that the data set contains all direct causes of the outcome and no variables affected by the outcome. hidden variables are allowed as long as they are not direct causes of the outcome. the relaxed assumptions lead to more practical methods for causal effect estimation of any feature on the outcome.2. the work links a predictive model to causal effect estimation. when the conditions identified in the paper are satisfied, a predictive model can be used for causal effect estimation and hence causally interpretable.we use experiments to show that various types of predictive models estimate causal effects as accurately as state-of-the-art causal effect estimation methods.3. we analyse and demonstrate that a causally interpretable model is more robust than other predictive models using all features in a new environment that is different from the one in which the model is trained. to estimate the causal effect of t on y conditioned on x = x, the conditional average causal effect (cate) of t on y conditioned on x = x is used. we use y to denote y = 1, x i to represent x i = x i and do(x i ) to represent do(x i = x i ) when the context is clear. for a personalised decision, there needs to estimate the causal effect of each x i ∈ x on y when other variable values in x are kept unchanged for all x j = x i . for individual x = (x 1 , x 2 ), we aim at estimating causal effect of x 1 on y (x 1 is the treatment) when. let a data set include variables (x 1 , x 2 , y ) where x 1 and x 2 are direct causes of y . let g be a dag, g x1 be a dag by removing from g outgoing edges from x 1 , and g x1x2 be a dag by removing from g incoming edges to x 1 and outgoing edges from x 2 .,x k-1 ,x k be a dag removing from g all incoming edges into x 1 , x 2 , . firstly, let x = {x a , x d , x i } where x a includes all ancestors of y , x d includes all descendants of y , and x i includes all other variables (i.let x a = x a \x i and x a be a value of x a . {x a , x d , x i } = x where x is a value of x = x\x i .we consider three cases, x j ∈ x i , x j ∈ x a , and x j ∈ x d ., the opposite value of x i (or x i = x i + δ for a continuous treatment) given other values of x are kept constant?. x 3 and x 4 belong to type 1; x 6 and x 7 belong to type 2; and x 1 and x 2 belong to type 3. edges y → x 6 and x 5 → x 6 form a collider at x 6 , and hence y and x 5 are associated when conditioning on x 6 . let the feature set x include {x a , x d , x i } that are associated with y . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/174.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/174.txt new file mode 100644 index 0000000000000000000000000000000000000000..38cdf24e7fd85886d1429a0a1f3b3d5e4e679607 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/174.txt @@ -0,0 +1 @@ +stochastic convex optimization (sco) is a fundamental framework, that captures several classical machine learning (ml) problems, such as linear regression, logistic regression and svms (support vector machines); amongst others. in the past two decades, sco has been extensively explored and highly influenced the field of ml: it has popularized the use of stochastic gradient descent (sgd) as the standard workhorse for training ml models; see e.g. (shalev-shwartz et al., 2007;welling and teh, 2011;mairal et al., 2009;recht et al., 2011); as well as has lead to the design of sophisticated sgd variants that play a central role in training modern large scale models (duchi et al., 2011;kingma and ba, 2015).one practical difficulty in applying sgd-type methods is the need to tune its learning rate among other hyperparameters, and it is well known that the performance of such algorithms crucially relies on the right choice of the learning rate. as a remedy, several adaptive sgd variants have been designed throughout the years (duchi et al., 2011;kingma and ba, 2015;levy et al., 2018;kavis et al., 2019;jacobsen and cutkosky, 2022); however in practice such methods still require hyperparameter tuning which might be very costly in huge scale scenarios.in this paper we focus on the prevalent sco setting where the objective (expected loss) is an expectation over smooth losses (sco-eos); this applies e.g. to linear and logistic regression problems (but not to svms). in this case, it is well known that sgd requires a careful tuning of the learning rate to obtain the optimal performance. for example, in the noiseless case, sgd (or gd in this case) should employ a learning rate of η offline = 1/l where l is the smoothness parameter of the objective. nevertheless, if we apply this η offline in the noisy setting, the guarantees of sgd become vacuous. to obtain the optimal sgd guarantees, we should roughly decrease the learning rate by a factor of σ √ t where t is the total number of sgd iterates (and samples), and σ is the variance of the noise in the gradient estimates. this illustrates the sensitivity of sgd to the choice of η. the same applies to stochastic accelerated methods such as (lan, 2012;hu et al., 2009;xiao, 2010).contributions. we introduce a novel gradient estimate for sco-eos problems, and show that its square error, ε t 2 , shrinks with the number of updates, ε t 2 ∝ 1/t, where t is the iterate. this, in contrast to the standard sgd estimator where usually ε t 2 = variance t = o(1). our new estimate blends two recent machanisms that are related to the notion of momentum: anytime averaging, which is due to (cutkosky, 2019); and a corrected momentum technique (cutkosky and orabona, 2019). we therefore denote our estimate by µ 2 which stands for momentum 2 .we further design an sgd variant called µ-sgd, as well as an accelerated version called µ 2 -extrasgd , that employ our new estimate, and demonstrate their stability with respect to the choice of the learning rates η. concretely,• upon using the exact same learning rate of η offline = 1/8lt (where t is the total number of iterates/data-samples), µ 2 -sgd enjoys a convergence rate of o(l/t ) in the noiseless case, and a rate of o(l/t + σ/ √ t ) in the noisy case.moreover, in the noisy case, µ 2 -sgd enjoys the same convergence rate as of the optimal sgd o(l/t + σ/ √ t ), for a wide range of learning rate choices i.e. η ∈ , such that the ratio η offline /η noisy ≈ (σ/l) √ t .• upon using the exact same learning rate of η offline = 1/2l, µ 2 -extrasgd enjoys an optimal convergence rate of o(l/t 2 ) in the noiseless case, and an optimal rate of o(l/t 2 + σ/ √ t ) in the noisy case.moreover, in the noisy case, µ 2 -extrasgd enjoys the same optimal convergence of o(l/t 2 + σ/ √ t ), for an extremely wide range of learning rate choices i.e. η ∈ , such that the ratio η offline /η noisy ≈ (σ/l)t 3/2 . the independence between samples implies that g t is an unbiased estimate of ∇f (x t ) in the following sense, e = ∇f (x t ) . it is often comfortable to think of the computation of g t = ∇f (x t ; z t ) as a (noisy) gradient oracle that upon receiving a query point x t ∈ k outputs a vector g t ∈ r d , which is an unbiased estimate of ∇f (x t ).update corrected momentum (storm style): draw z t+1 ∼ d, compute g t+1 := ∇f (x t+1 ; z t+1 ), and gt := ∇f (x t ; z t+1 ) and update,.where we have used α t = t + 1 implying α t /α 1:t-1 ≤ 4/t for any t ≥ 2, we also used w t -x t ≤ d which holds since w t , x t ∈ k, finally we use α t-1 = t.initialize: set x 0 = 0, and x1 = y 0 , draw z 1 ∼ d and set d 0 = g0 = d1 = ∇f (x 1 , z 1 ) for t = 1, .draw a fresh sample z t+1 ∼ d and compute, gt = ∇f (x t ; z t+1 ) , ĝt+1 = ∇f (x t+1 , z t+1 ). let f : k → r be a convex function and k be a convex set with diameter d, and denote w * ∈ arg min w∈k f (w). (18); and the third line follows by the definitions of g t , ĝt , as well as from w t -w * ≤ d, which holds since w t , w * ∈ k; the fourth line follows by our assumption in eq. (2) implies that for any z ∈ support{d} there exists l x,y;z ∈ such that, ∇f (x; z) -∇f (y; z) = l x,y;z x -y .where we have used e(∇f (x; z) -∇f (y; z)) = (∇f (x) -∇f (y)), and we denote σ 2 l {x, y} := e(l x,y;z -l x,y ).where we have used α t = t + 1 implying α t /α 1:t-1 ≤ 4/t for any t ≥ 2, we also used w t -x t ≤ d which holds since w t , x t ∈ k, finally we use α t-1 = t. 1: g t := ∇f (x t , z t ) ; gt-1 := ∇f (x t-1 , z t ), and we will also denote, ḡt := ∇f (x t ) ,and.where the first inequality follows from cauchy-schwartz; the second inequality holds since w t -w * ≤ d, as well as from using a+b 2 ≤ 2 a 2 +2 b 2 which holds for any a, b ∈ r d , the third inequality follows by the self bounding property for smooth functions (see lemma c.1 below) implying that ∇f (x τ ) 2 ≤ 2l(f (x τ ) -f (w * )) := 2l∆ τ ; and the fourth inequality follows due to α 2 τ ≤ 2α 1:τ which holds since α τ = τ + 1.(levy et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/175.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/175.txt new file mode 100644 index 0000000000000000000000000000000000000000..2435e06b39b3ca42464c65d514074c0cf17648d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/175.txt @@ -0,0 +1 @@ +iot devices are becoming increasingly prevalent in our daily lives. however, iot devices have inherent security vulnerabilities making them a prime target for attackers. prior work has shown how these devices can be easily compromised , , which has led to the deployment of network intrusion detection systems (nids) that detect any malicious network activity. this provides an early-warning system and enable system administrators to detect compromise.nids systems work by monitoring traffic patterns and detecting any malicious activities within the network. it employs an intrusion detection model to identify traffic patterns that deviate from normal behavior. recently, deep learningbased techniques have been proposed to train the intrusion detection model, which are trained to classify network traffic and identify the type of attack, if any. since distributed devices may see different types of attack, to effectively capture the heterogeneity of the devices, recent studies have proposed a federated learning approach, where the model learns a common intrusion pattern that captures the behavior of different in the non-iid data, dp scenario, the accuracy drops when the privacy budget of users with strict privacy exhausts because the network forgets past experience when new information arrives.iot devices . the key benefit of federated approach is that it enables aggregation of intrusion patterns from distributed iot devices such that data remains local to the devices.although a decentralized federated approach remains local to the device, it is not privacy-preserving as information may leak from the trained model . recently, differential privacy (dp) has emerged as a technique to train models to prevent such leak . dp mechanisms provide statistical guarantees to privacy by perturbing the data using random noise. however, prior work mostly assumes homogeneous privacy requirements across all users. that is, all users have uniform privacy expectations and thus share an almost equal amount of information. however, in a realistic scenario, it is quite probable that different users have different privacy budgets. moreover, providers can incentivize users to share more data. thus, it is natural to assume that a user will be willing to share more information and be less conservative about their privacy.but, distributed users with heterogeneous privacy budgets where data is not independent and identically distributed exacerbate the problem of the training model. in a dp-based deep learning framework, an accountant tracks the overall privacy loss at each access to the data to provide a bound on the privacy guarantee . and, the neural network model training progresses until the privacy budget is exhausted. as shown in figure 1, when the privacy budget is homogeneous (the case assumed in prior work), the performance accuracy remains stable. however, in a heterogeneous privacy budget scenario, the performance drops as it continues to learn. this is because, with new training updates, the model tends to quickly forget information learned from past users with stricter privacy budgets. thus, a key challenge is to ensure that the model does not forget past experiences of users having stricter privacy when new data from users with moderate privacy requirement arrives.to address the above challenges, in this paper, we design a continual learning based approach that ensures that the network remembers past experiences even when the privacy budget of a group of users are spent. in doing so, we make the following key contributions:• we formulate a real world problem of federated learning for intrusion detection on client-cohorts with heterogeneous privacy budgets and non-identical data distribution. we define the notion of cohort-based (ǫ, δ)-differential privacy for the aforementioned application. • we adapt current federated (ǫ, δ)-dp training methods for our cohort-based (ǫ, δ)-dp setting and study the challenges introduce by the heterogeneous setting. • we design two novel differentially private continual learning based methods, dp-r and dp-si, that can effectively train networks with heterogeneous privacy requirements.to the best of our knowledge, this is the first work that uses continual learning to improve federated (ǫ, δ)-dp training.• we provide an extensive evaluation that studies the performance, flexibility and hyper-parameter sensitivity of our cohort-based federated (ǫ, δ)-dp sgd methods. our evaluation is done on a real world cse-cic-ids2018 dataset . our results show that continual learning based federated dp approaches outperform the baseline dp-sgd methods in a heterogeneous privacy setting. the improvement in performance for both our proposed methods is robust to hyperparameter changes. additionally, we show that dp-si also provides flexibility in adapting to post-hoc relaxations to client privacy requirements. since distributed devices may see different types of attack, to effectively capture the heterogeneity of the devices, recent studies have proposed a federated learning approach, where the model learns a common intrusion pattern that captures the behavior of different in the non-iid data, dp scenario, the accuracy drops when the privacy budget of users with strict privacy exhausts because the network forgets past experience when new information arrives. thus, a key challenge is to ensure that the model does not forget past experiences of users having stricter privacy when new data from users with moderate privacy requirement arrives. a privacy accountant keeps track of the privacy loss and stops training when the privacy loss reaches a certain threshold. in comparison to homogeneous differential privacy, where a single ǫ value controls the privacy loss, the vector corresponds to each user's privacy. a cohort privacy mapping g : u → c maps users to a cohort, where each cohort has the same privacy preference.3 (cohort differential privacy): given the cohort privacy mapping g and e, a randomized mechanism m : d → r is said to be cohort differentially private if for all users u ∈ u, for any two neighboring inputs d 1 , d 2 ∈ d that only differ by a single record u, and for all possible outputs. given that the privacy requirement at each cohort, our goal is to enable a trusted curator learn an intrusion detection model in a decentralized manner that satisfies the privacy budget within each cohort. in this scenario, the privacy budget of the cohort with stricter privacy requirements will end sooner than that of the more relaxed cohort.initialize: w 0 , accountant(ǫ c , k c ), l si = 0, j = 0 ⊲ initialize weights, the privacy accountant and si loss function and j is the number of cohort's privacy budget exhausted in communication round t for each round t = 1, 2, .. in addition, we use the moments account to track the privacy loss, and the training stops when the privacy budget for both cohorts is spent. as shown, at cr=23, we see a significant drop in accuracy (and loss) in the dp-only approach, where the privacy budget of cohort 1 finishes and all new updates to the model come from cohort 2, which results in the drop. when the privacy budgets are spent however, in dp-si, the training performance remains stable and doesn't drop even when the privacy budget is spent.5) indicates that half the privacy budget is used in the first half of the training phase, and the next half of the privacy budget is uniformly distributed for the rest of the training phase to ensure that the model is periodically updated.learning and differential privacy: differential privacy was proposed by dwork et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/176.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/176.txt new file mode 100644 index 0000000000000000000000000000000000000000..54d480538dd25864e98170d0a0fe5880a40198b2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/176.txt @@ -0,0 +1 @@ +grid computing - can provide services that effectively distributes tasks to suitable resources connected in a network. for example, we can improve the processing speed of intensive scientific calculations by installing work-load management software, which utilizes idle pcs (personal computers) of public libraries or administration systems . however, studies into high performance computing , still lack research of public computer facilities, which have a lot of idle times. the idle times of generic personal computers have increased over time due to generalization of computer usage and cloud computing environments in educational institutions, national administrative agencies, and public institutions. figure 1 shows an example of used and idle times of 300 dual-core pcs in a university library during a typical weekday. the unused idle times are 28% (2,672 hours) of the times that the pcs are powered on 9,550 hours.the remainder of this paper is organized as follows. section ii describes the compilation cost due to the evolution of software. section iii addresses the design and implementation of the proposed techniques. section iv shows the experimental results. related work is described in section v. finally, section vi concludes the paper.the distributed compilation system (distcom) is a technique designed for implementing a distributed compilation platform in order to speed up the compilation of large software by using the resources of idle pcs. then, the (2) distcom manager distributes the compilation commands to the distributed pcs to run the distributed compilation of the software code. the approach is practical and useful because the proposed system supports retry mechanism that executes recompilation based on the object file units whenever compilation failure of a distributed pc happened during the distributed compilation. when distcom uses the distributed pc resources, the (2) distcom manager uses two methods to control the cpu resources in order to optimize the compilation performance of running tasks on the distributed computer. when we executed a distributed compilation, we set the maximum available computers to 9 machines (cpu: intel core2duo, mem: ddr2 1g, network interface card: intel 100 mbps ethernet controller) and the dedicated resource scheduling policy as the default cpu scheduling policy. figure11shows how pc resources connected by network for building mobile platform source affect the processing speed of the distributed compilation. from our analysis, we found that compilation processing performance of shared resource scheduling method largely depends on the cpu usage of pc resource in comparison with dedicated resource scheduling method. moreover, in case those available pc resources are more than 10 distributed pcs, the compilation speed of dedicated resource scheduling method was improved against a high-performance computing server (8-core intel xeon e5 processor, 12gb memory). the existing distccallocates one work-load per pc without one work-load per cpu and does not support shared resource scheduling like distcom. however, our proposed system executes multi-core aware distributed compilation that allocates one work-load per cpu after calculating the number of cpus. the results show that the dedicated resource scheduling method with 40 vms consumed 20 minutes and the high-performance cloud server on its native operating system consumed 17 minutes. in this section, we discuss existing distributed compilation schemes, as well as existing high-performance computing, which served for us as a prelude to the proposed distcom system. in this paper, we proposed a distributed compilation system, which consists of a distributed server and client model, a resource manager for scheduling distributed computers, and a cross-compiler infrastructure to support heterogeneous architectures. we have verified that our proposed distcom system can significantly improve compilation speeds using existing idle pc resources by proposing a distributed compiler system of compatible heterogeneous cpu architectures. moreover, the proposed distcom system minimizes performance degradation of the distributed compilation by executing resource scheduling of remote computers based on object file units. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/177.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/177.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6f9f25d4a6bb75bdcadc2619a5bbf821d22ad1d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/177.txt @@ -0,0 +1 @@ +mainstream gpu programming as exemplified by cuda and opencl employ a "single instruction multiple threads" (simt) programming model. the cpu host code in an opencl application defines an n-dimensional computation grid where each index represents an element of execution called a "work-item". an opencl kernel describes the algorithm that will be executed on gpu for one work-item. work-items are grouped together into independent "work-groups" that execute concurrently. work-items inside one work-group may communicate through fast on-chip shared local memory (slm) and barrier synchronization.opencl's programming model is a powerful paradigm to express data parallelism, as developers can write purely scalar code for their kernels without knowing the details of how the work-items are mapped to the hardware execution units. this abstraction has profound performance implications, however, as the intel gpu architecture (also called gen) and the underlying instruction set architecture (isa) is "single instruction multiple data" (simd). intel gpus feature an expressive instruction set that supports variable simd-sizes as well as powerful regioning capabilities that allow for fast cross-lane data sharing. an execution unit (eu) on gen has a fixed number of hardware threads, and each thread executes simd instructions on its dedicated 4kb byte-addressable register file. the opencl compiler is responsible for vectorizing the kernel into one of the three simd sizes (8,16,32) for thread dispatch, and work-items execute the same instructions on one thread in lock-step. simd size selection is thus the most important optimization decision for the compiler, as it affects thread occupancy, instruction-level parallelism (ilp), simd-lane utilization due to divergence, and register spill.a high-performance program on gen needs to exploit a thread's dedicated register file to cut down memory traffic while avoiding register spill, which is often fatal for performance. this can be surprisingly difficult to achieve for opencl programs, however, as in order to stay portable the language offers no mechanism for direct register file control. register pressure estimate at the source level is often wildly inaccurate due to the various compiler optimizations and transformations that must happen to lower opencl c into gen isa.since under the simt model each work-item executes independently, opencl programs also lose control of data sharing among the cooperative items in the same thread. furthermore, the simt model prevents opencl programs from directly accessing gen isa's powerful regioning mechanisms, which allows one simd lane to access another lane's data at no additional cost. the introduction of subgroups in opencl 2.0 partially alleviates the gaps by exposing some of the underlying hardware capabilities through builtin functions, but getting close to the metal performance with opencl on intel gpus remains challenging.this paper presents the c-for-metal (cm) development framework, an explicit simd programming model designed specifically for coding to the metal on intel gpus. the cm language is an extension to c/c++ that provides an intuitive interface to express explicit data-parallelism at a high level of abstraction. at the core of the language are two special vector and matrix types that form the foundation of its programming model. vector and matrix variables are to be allocated in registers, which makes it much easier to control register usage at the source level. a cm kernel describes the algorithm for an entire hardware thread instead of a single work-item through builtin operations on vectors and matrices; of particular importance is the select operator that supports efficient registergather of elements in a variable and is mapped directly to the gen isa regions. programmers explicitly control an instruction's simd size by varying the number of elements returned in a select operation, and different simd sizes may be used based on considerations such as register demand and divergence.the cm compiler (cmc) is based on the llvm infrastructure and is responsible for generating gen isa simd instructions from the high-level vector and matrix operations. a number of cm-specific intrinsics are introduced to effectively represent such operations in the llvm intermediate representation (ir). a sequence of cm-specific optimizations and transformations are developed around those intrinsics. one unique challenge in developing this compiler is that we need to strike a careful balance between compiler optimizations and what-you-write-is-what-you-get. cm kernels are fully compatible with the intel gpu opencl runtime and oneapi level zero and can be launched directly as if they are written in opencl. while gen is cm's native architecture, cm kernels may also be executed on cpu for debugging purposes. the cm development framework is open source and can be found in .we present a comprehensive experimental evaluation of representative applications from different domains implemented in cm and opencl. for each workload we provide an implementation sketch on how to code to the metal on gen using cm. we show that cm kernels achieve up to 2.7x speedup compared to the best-known opencl implementations that use available intel-specific gpu extensions . the speedup offered by cm does not mean a sacrifice to productivity; while opencl may allow for rapid prototyping of sequential code, this advantage is often negated by the subsequent tuning efforts required to obtain good performance on gpus. results from the development process of several compute kernels indicate that cm provides 2-3x more productivity in terms of the development effort than opencl.the rest of the paper is organized as follows: section ii briefly covers the related work; section iii discusses the main motivations of cm as an efficient simd programming model; section iv describes the cm programming language; section v describes the cm compiler; section vi presents several applications implemented in cm and their experimental evaluation; and finally section vii concludes this paper. a cm kernel describes the algorithm for an entire hardware thread instead of a single work-item through builtin operations on vectors and matrices; of particular importance is the select operator that supports efficient registergather of elements in a variable and is mapped directly to the gen isa regions.the cm compiler (cmc) is based on the llvm infrastructureand is responsible for generating gen isa simd instructions from the high-level vector and matrix operations. cm kernels are fully compatible with the intel gpu opencl runtimeand oneapi level zeroand can be launched directly as if they are written in opencl.the rest of the paper is organized as follows: section ii briefly covers the related work; section iii discusses the main motivations of cm as an efficient simd programming model; section iv describes the cm programming language; section v describes the cm compiler; section vi presents several applications implemented in cm and their experimental evaluation; and finally section vii concludes this paper. even though simd lanes in a thread share the register file, the simt abstraction prevents one lane from accessing another lane's register data, and this invariably leads to redundant computation and memory operations. in section iv we present the cm implementation to showcase the language's key features, while section v explains how the cm kernel is compiled into the base isa. in section vi, we evaluate the performance of our cm kernel against an optimized opencl kernel that uses intel-specific extensions, and show that even this optimized version can only reach less than 50% of cm's performance. since a cm kernel describes the algorithm for one thread, it can naturally store the data for the 2d block read/write in a matrix, and it can also choose the best matrix size without being constrained by the dispatch size. this example shows the power of cm programming on gen; programmers express their algorithms using high-level matrix operations, and the compiler generates them into multiple simd instructions while taking advantage of the region-based address scheme to efficiently access register data.to avoid excessive global memory access and global synchronizations, our cm kernel takes advantage of the large register space to hold 256 data elements in registers, processing several split steps locally. even though opencl and cm gemm kernels employ a similar register-blocking strategy -opencl is able to do so by using the cl intel subgroup extensionand mimicking the cm implementation, the cm kernel is able to process more data per thread thanks to more efficient management of the register file. cm has been extensively used for high-performance library development inside intel, however, and user experiences overwhelmingly suggest that programmers are much more productive using cm once performance tuning efforts are considered. the simt abstraction makes it difficult for even expert gpu programmers to control a kernel's full optimization needs, and their opencl implementation suffers from poor performance predictability; an innocuous one-line change could result in significant variation in generated code if it causes the kernel to spill or copy moves to not be coalesced. major features are illustrated for how to expose underlying hardware capabilities: vector/matrix variables represent registers and express simd parallelism, select operation maps to register regioning, block read/write enables efficient memory access, and divergent control flow constructs allow for mixing simt and simd models. for instance, dpc++-esimdintegrates some cm language features into dpc++, and ispcalso generates cm vector intrinsics and relies on cm optimizations and code generation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/178.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/178.txt new file mode 100644 index 0000000000000000000000000000000000000000..4cfb694f4d2c8b89ee960092ab307b945d9fec6e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/178.txt @@ -0,0 +1 @@ +real-time computer vision applications are currently bound to traditional camera sensors that transfer each pixel at each frame to a host where it is processed. this requires high-performance buses between the sensors and hosts, especially where high frame-rates are required. a self-driving car may need to receive new information for every 1cm travelled to be vigilant of unexpected scenarios, so at 80 km/hr a frame rate of 2222 hz would be required. a 2 mega-pixel camera, with 10-bit pixel depth, running at such a frame rate, requires a bus capable of 45.6 gbit/s -which is currently only possible with devices such as a pci-e x8 gen3 interface . for many applications, however, streaming data at such volumes is too demanding -both in power and computation time -hence requiring an alternative solution.codesign of hardware and software for computer vision applications is an emerging research field to address the limitations of conventional systems . focal-plane sensor-processors (fpsps) are a promising avenue for reducing the data transfer between the camera and the processing unit. fpsps, often synonymous with cellular processor arrays (cpas) and pixel processor arrays (ppas), perform processing on the sensor chip itself and are often designed for tasks which require high frame rates or low latency . the principle behind them is that a small processor is embedded directly with each pixel of the sensor. while fpsps come in various forms for specific applications, we in this paper we explore a general-purpose fine-grain architecture scamp-5 , but one can imagine alternatives that could be designed for various use cases.one of the most widely used methods for image analysis is convolution kernels. from edge detection using sobel filters to document recognition using convolutional neural networks , convolutional kernels are the foundation for many complex computer vision applications. traditionally, application of the convolutional kernels to the image data occurs on a cpu, but more recently gpus and fpgas are used to accelerate the computations in parallel , . several systems have been designed to optimise the processing of convolutional kernels on gpus and fpgas, leading to a vast array of techniques to reduce the number of operational cycles needed to apply kernels to input data. while this significantly increased throughput, these methods are still bounded in latency as the image must make its way from the camera through to the host system. as for fpsps, the ability to process the data on the focal plane enables the kernels to be applied to the image data at very low latency. furthermore, the unique ability to select the data which is transferred from the device to the host reduces the data volume, which allows for high frame rates. however, the technology is comparatively new. by design, they offer novel ways to interact with the data, and while work has been done to provide a domain-specific-language and associated tools to program such hardware , there has been less work done so far to produce code generation systems to make efficient use of their architectural features when applying convolutional kernels in particular.one such system that does exist, however, is auke . given an n ×n convolutional kernel, auke's reverse-split algorithm generates code for scamp-5 which applies the kernel efficiently to the captured image on the focal-plane using analogue computation. auke is, however, limited to compiling just a single convolutional kernel at a time using a reduced instruction set that omits the more powerful instructions available in scamp-5.in this work, we present an improved alternative to auke, with the ability to produce code for applying multiple convolutional kernels at a time. the problem is presented as a dynamic graph search problem in which we must efficiently generate and traverse possible processor states to find a path that describes the relevant convolutional computation. by incorporating instruction selection and instruction scheduling into the core of search process, we enable the use of more novel features of cpa architectures than auke is able to use. by optimising the code for multiple kernels simultaneously, common sub-expressions between kernels can be exploited and produced only once rather than for each kernel. this reduces the computational expense of applying the kernels, enabling applications to run at a faster frame rate. the primary objective of this work is to push the boundary of code generation for fpsp devices through simultaneous kernel optimisation. we offer the following contributions:-cain: a code generation algorithm which effectively makes use of common sub-expressions across filters consisting of multiple convolutional kernels. our graph search strategy -which enables cain to efficiently search large graphs -combines instruction scheduling, instruction selection and registerallocation constraints into the core of the search to make better use of specific hardware capabilities in simd processors. -we show how this search can be tractable for problems of interest through a problem formulation based on auke's multi-set-of-atoms problem representation, combined with a ranking heuristic and a hybrid graph-generatorgraph-search exploration strategy. -we show how this approach allows flexible exploitation of hardware capabilities (such as three-operand adds and multi-step shifts), and generates very efficient use of additions to avoid multiplies. -evaluation of the effectiveness of cain on the scamp-5 focal-plane sensorprocessor. we compare against auke and test the effectiveness of simultaneous kernel optimisation. we conclude by exploring how our simultaneous kernel optimisation extends to future devices with more registers per pixel.the remainder of the paper is organised as follows. section 2 describes the scamp-5 and its instruction sets, section 3 explains our proposed code generation algorithm cain, and in section 4 detailed comparison is made between cain and auke, together with an evaluation of the effectiveness of simultaneous kernel optimisation. section 5 reviews the related work auke in detail. finally, section 6 concludes our work, with a discussion about potential future research. by design, they offer novel ways to interact with the data, and while work has been done to provide a domain-specific-language and associated tools to program such hardware, there has been less work done so far to produce code generation systems to make efficient use of their architectural features when applying convolutional kernels in particular. given an n ×n convolutional kernel, auke's reverse-split algorithm generates code for scamp-5 which applies the kernel efficiently to the captured image on the focal-plane using analogue computation. our graph search strategy -which enables cain to efficiently search large graphs -combines instruction scheduling, instruction selection and registerallocation constraints into the core of the search to make better use of specific hardware capabilities in simd processors. section 2 describes the scamp-5 and its instruction sets, section 3 explains our proposed code generation algorithm cain, and in section 4 detailed comparison is made between cain and auke, together with an evaluation of the effectiveness of simultaneous kernel optimisation. for example; the macro instruction add(a, b, c) means a := b + c and is made up of two bus instructions: bus(news, b, c) meaning the news register now contains the value of -(b+c); and then bus(a, news) so that reg-ister a contains b + c.cain is a framework for compiling convolutional filters, designed to search through a configurable cellular processor array (cpa) instruction set to find efficient code. to do this cain searches backwards, starting with a set of final kernels, these are the convolutional filter, and applying instructions in reverse to simplify the kernels until only the identity kernel 2 is left.cain is designed such that different definitions could be used without changing the fundamental search algorithm but the definitions we use here for scamp-5 are based largely on auke's, which provides an elegant way to conceptualise the convolutional kernels without multiplication. while dfs performs well in auke, it struggles in cain because the number of child nodes at every level is far greater, since each edge is only one instruction and there are multiple kernels to consider. the aim of this algorithm is to ensure that the search does not end up 'trapped' in one small part of the graph, but can effectively search traverse many children of many of the nodes that are found where dfs will search all of the children of nodes at the extent of the paths it searches before searching the second children of nodes earlier in the graph. not only this, the search strategy used by cain is better than auke's, as shown in 5 × 5 gaussian kernel, were using the same set of instructions (basic), code generated by cain is half in length when compared to output of auke's. for example when combining 3 × 3 and 5 × 5 gaussian, unlike auke, cain is implemented to utilise the common sub-expressions between the kernels, thus, generating shorter code than naively concatenating the code for each of the gaussian kernels.if cain has an effective heuristic we will quickly see a point of diminishing returns in code length, as cain continues to search new nodes and takes more time. automatic kernel code generation for analogue simd (auke) is an algorithm for generating code given a single convolutional kernel created by t. this is not needed in cain since cain searches instruction by instruction, and so any optimisations found via graph relaxation are already a part of the search space. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/179.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/179.txt new file mode 100644 index 0000000000000000000000000000000000000000..9853956a0d9ff2c8fb557fe423558401a837d88e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/179.txt @@ -0,0 +1 @@ +one of the aims of pactole is to stay as simple and as close as possible to the definitions of the robotic swarm community. thus, a state of the overall system, a configuration is defined as the collection of the states of all robots, conveniently combined into a map from robot names to robot states:a robot state can be anything (to accurately describe reality) but must at least contains its location, accessible through a function get_location : state → location, where the type location denotes the space where robots evolve.an execution is an infinite sequence of configurations:executions are usually built by executing a protocol (called a robogram) against an environment, represented as a demon, that is, an infinite sequence of decisions called demonic actions. this observation denotes a degraded version of the configuration centred on the observing robot, depending on its sensors. it is a parameter of the model and its computation from a (local) configuration is performed by an obs_from_config function, which hides the information unavailable to robots and takes as input the configuration and the state of the observing robot. this function is specified by a logical formula obs_is_ok relating any configuration to its possible observation from any robot state. to represent the fact that robots observe from a personal point of view, they have their own frame of reference that need not be consistent in time or with other robots (other orientation, other scale, other origin, etc.). this frame of reference allows to create a local configuration (by opposition to the point of view of the demon denoted as global) from which the observation is computed, it depends on the underlying space and it is picked by the demon.in such an execution, the robogram corresponds to one look-compute-move cycle and the demonic action to the reaction of the environment. their interaction is described by a function round so that the resulting execution is simply repeatedly calling this function with the robogram, the demon and the starting configuration.the round function is the heart of the model, implementing the look-compute-move cycle and computing the configuration obtained after one round. note that this function is the same for all variants, fsync/ssync/a-sync synchronization, all spaces, all sensors, etc. this is done in the following consecutive steps for each robot name id:1. if the robot id is not activated, its state may undergo some change by the inactive function to represent an ongoing action or the effect of the environment.2. if id is a byzantine robot, it is relocated by the demonic action da.3. use the local frame of reference provided by da to compute the local configuration.4. transform this local configuration into an observation.5. apply the robogram on this observation.6. if moves are flexible, compute the new position of id using information given by da. • exists_at_less_than_dp expressing that if all robots of lower identifier in range of an alive robot r have their light on, then one of them is not in the pursuit zone of r (at most d p away from r). for the property executed_means_light_on, remark that if a robot r alive in cf withdraws in cf , it is either because, in cf , there is no robot in range or there is one too close (at most d away). since the light is on when move_to returns false, we know this function returns true in cf, and r thus performs the move chosen by choose_new_pos between cf and cf . thus r cannot lose contact with its target: by the specification of choose_new_pos r moved at a distance no greater than d p to the cf-location of its target, that is at a distance no greater than d p + d = d max to the cf -location of the target. moreover, r cannot withdraw due to another robot at a distance less than d in cf : by the specification of choose_new_pos it moved at a distance more than 2d apart from the cf-location of any other robot of lower identifier, that is more than 2d -d = d apart from the cf -location of any other robot of lower identifier.by the property executed_means_light_on a robot r alive in cf that withdraws in cf has its light on and did not move between cf and cf . if r stays alive in cf and withdraws in cf , then some other robot r is at a distance from r that is greater than d in cf and at most d in cf'. let us consider an alive robot r in cf such that all robots in range have their light on. note that as r is alive in cf , there was a robot in range in cf and r had a target r in cf. since r has its light on in cf , it did not move between cf and cf . if r was out of the pursuit zone of r in cf (that is, at most d p away from r), we can conclude because r either did not move or moved to a position not farther that d p away from r . if r was inside the pursuit zone if r in cf, by the specification of choose_target, so were all robots in range of r. in particular, r could move towards r as no robot was inside its danger zone and since r did not move, r and r are at most d p away in cf .if move_to is true and r has its light off, then by property executioner_means_light_off r cannot withdraw, and cannot get out of range of r since r moves closer to r .if move_to is true and r has its light on, the invariant exists_at_less_than_dp and the specification of choose_target entail that all robots in range of r have their light on and that r is out of the pursuit zone. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/18.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/18.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a676a963ce377c69d5511dd9565587a20b31bee --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/18.txt @@ -0,0 +1 @@ +one of the most fundamental responsibilities of a legal professional is to leverage analytical problem solving skills in applying a seemingly endless universe of law to an equally endless universe of scenarios. such laws can be confusing and contradictory, well-reasoned rationales may be far from straightforward, and application may be inconsistent. accordingly, the passing rates for the bar examination -a critical step toward becoming a practicing attorney in many countries -range from about 80% in the united states in 2021 1 to 39.2% in japan in 2020 (widely regarded as one of the most difficult of all bar examinations) 2 . the best attorneys are not only able to achieve these tasks with ease, but they are also able to effectively explain the basis for their work. more often than not, a simple binary response to a given legal question is far from acceptable without the appropriate rationale to conclude it, perhaps accompanied by a chain of references to applicable statutes or a legal reasoning technique such as issue, rule, application, conclusion (burton, 2017).application of reason-based prompting mechanisms with large language models is becoming an increasingly prevalent focus area in natural language processing research. models like openai's gpt-3 achieve satisfactory results -81% accuracy with a zero-shot approach and 82.8% accuracy with a few-shot approach -on commonsense reasoning tasks drawn from the physicalqa dataset (brown et al., 2020), but, as we observe, struggle significantly with more specialized domain data. further research builds on these basic queries by implementing a variety of so-called prompt engineering approaches, which range from soliciting a model to "think step by step" in producing incremental reasoning to underscore a given response (kojima et al., 2022), to leveraging an iterative cycle of model generated rationales to bootstrap its own ability to produce more elaborate reasoning approaches (zelikman et al., 2022); these approaches have demonstrated a nomimal improvement in a language model's ability to rationalize a correct response to a particular baseline query.our research aims to explore the effects of such approaches on highly specialized domain data, namely that in the legal field drawn from the japanese bar exam. we frame our approach around the university of alberta's annual competition on legal information extraction/entailment -or col-iee -event (rabelo et al., 2022), in which certain subtasks are devoted to reasoning through legal hypotheses given contextual articles; the coliee data itself solicits a yes/no response following a proposed hypothesis and supporting legal statutes.we first explore zero through few-shot approaches using pretrained llms, coupled with prompts either drawn from existing work (e.g., "let's think step by step" (kojima et al., 2022)) or generated by ourselves (e.g., "please determine if the following hypothesis is true or false based on the given premise").additionally, we assess the impacts of fine tuning an llm to infer binary responses both with and without explanations (either machine generated or extracted from the supporting premise). the best results on the coliee 2021 test set, though, result from a zero-shot, legal-prompt approach, which surpasses state of the art coliee performance by 15.79%. similarly, an 8-shot approach yields the best performance on 2022 coliee test data, with an overall accuracy improvement of 9.46%.our experiments show that few-shot and finetuning with explanation approaches show good and consistent results for the two test sets of the col-iee competition we used for evaluation. zero-shot and a fine-tuning approach using the labels only show more inconsistent results across the two years. the zero-shot with legal reasoning approach shows the best result for one year only and may be more prone to overfitting to a specific test set indicating that further research of those prompting approaches is needed.premise: article 18 (1) if the grounds prescribed in the main clause of article 15, paragraph (1) cease to exist, the family court must rescind the decision for commencement of assistance at the request of the person in question, that person's spouse, that person's relative within the fourth degree of kinship, the guardian of a minor, the supervisor of a minor's guardian, the assistant, the assistant's supervisor, or a public prosecutor. be aware that the accuracy 2021 coliee winner obtained was 0.premise: article 117 (1) a person who concludes a contract as an agent of another person is liable to the counterparty for the performance of the contract or compensation for loss or damage, as chosen by the counterparty, unless the person proves the authority to represent or the principal ratifies the contract. as shown in figure2, in the first stage, which is the reasoning extraction process, we provide gpt-3 with {prompt} + {premise} + {hypoth-esis} + {cot} as input, where the prompt is the prompt2 described in section 4. premise: article 117 (1) a person who concludes a contract as an agent of another person is liable to the counterparty for the performance of the contract or compensation for loss or damage, as chosen by the counterparty, unless the person proves the authority to represent or the principal ratifies the contract. premise: article 117 (1) a person who concludes a contract as an agent of another person is liable to the counterparty for the performance of the contract or compensation for loss or damage, as chosen by the counterparty, unless the person proves the authority to represent or the principal ratifies the contract., 2020), compute the cosine similarity score between each sentence and the hypothesis, then choose the sentence with the highest similarity score as the explanation. in our approach, we prompt gpt-3 to generate explanations for each hypothesis-premise-answer triplets by providing it with "please explain why the following hypothesis is" + {label} + "based on the given premise.premise: article 509 the obligor of either of the following obligations may not duly assert a set-off against the obligee; provided, however, that this does not apply if the obligee acquires a claim corresponding to the relevant obligation from another person: (i) an obligation for compensation for loss or damage based on a tort committed in bad faith; or (ii) an obligation for compensation for loss or damage for death or injury to person (excluding the one set forth in the preceding item). the approach is extracted from the legal reasoning approaches summarized ft1 refers to fine-tuning gpt-3 with binary answers prompt, ft2 refers to fine-tuning gpt-3 with pseudo-explanation.premise: article 566 if the subject matter delivered by the seller to the buyer does not conform to the terms of the contract with respect to the kind or quality, and the buyer fails to notify the seller of the non-conformity within one year from the time when the buyer becomes aware of it, the buyer may not demand cure of the non-conformity of performance, demand a reduction of the price, claim compensation for loss or damage, or cancel the contract, on the grounds of the non-conformity; provided, however, that this does not apply if the seller knew or did not know due to gross negligence the non-conformity at the time of the delivery.premise: article 15 (1) the family court may decide to commence an assistance in respect of a person whose capacity to appreciate their own situation is inadequate due to a mental disorder, at the request of the person in question, that person's spouse, that person's relative within the fourth degree of kinship, the guardian, the guardian's supervisor, the curator, the curator's supervisor, or a public prosecutor; provided, however, that this does not apply to a person with respect to whom there are grounds as prescribed in article 7 or the main clause of article 11. article 11 the family court may decide to commence a curatorship in respect of a person whose capacity to appreciate their own situation is extremely inadequate due to a mental disorder, at the request of the person in question, the person's spouse, the person's relative within the fourth degree of kinship, the guardian, the guardian's supervisor, the assistant, the assistant's supervisor, or a public prosecutor; provided, however, that this does not apply to a person in respect of whom a cause set forth in article 7 exists.premise: article 117 (1) a person who concludes a contract as an agent of another person is liable to the counterparty for the performance of the contract or compensation for loss or damage, as chosen by the counterparty, unless the person proves the authority to represent or the principal ratifies the contract.premise: article 18 (1) if the grounds prescribed in the main clause of article 15, paragraph (1) cease to exist, the family court must rescind the decision for commencement of assistance at the request of the person in question, that person's spouse, that person's relative within the fourth degree of kinship, the guardian of a minor, the supervisor of a minor's guardian, the assistant, the assistant's supervisor, or a public prosecutor. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/180.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/180.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f5483c131a8fab2dda208dbd6cee4493ce7aa23 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/180.txt @@ -0,0 +1 @@ +according to nist, cloud computing is a model where computing resources can be provisioned and release on-demand by consumer with self-service options and very little help from service providers . the advancement of networking and other areas of computer hardware has made the cloud computing model a reality. amazon launched the first public accessible cloud computing infrastructure with ec2 and s3 in 2006 . this is a milestone in the cloud computing era. since then, the competition between major cloud providers has resulted in major technology innovations and standardization initiatives. today, the major public cloud service providers are amazon web services, google cloud platform, microsoft azure, ibm bluemix and salesforce. this paper will discuss the cloud computing roots then followed by its concept.the future of cloud computing and recent development trend will also be discussed. therefore, the paper will provide a comprehensive overview of what cloud computing is, how its services are provided and what are the different service models available for providing cloud computing services to customers according to their requirements.according to nist, cloud computing is a model where computing resources can be provisioned and release on-demand by consumer with self-service options and very little help from service providers. therefore, the paper will provide a comprehensive overview of what cloud computing is, how its services are provided and what are the different service models available for providing cloud computing services to customers according to their requirements.while discussing about various cloud computing services and associate service delivery models, it is important to have a clear idea regarding roots of cloud computing to understand fundamental nature and characteristics of cloud computing that makes it different from traditional computing services. it is important to identify that cloud computing integrates technologies from different areas of computing and networking and it is integration of these computing areas that makes it possible to deliver fundamental services offered by cloud computing technology.cloud computing is a model developed from several other distributed computing research areas such as hpc, virtualization, utility computing and grid computing. cloud computing has fundamentally changed the way computing services are delivered to the customers, but without advancement in these technologies, it was not possible. the cloud computing in its initial phase was not as diverse and as efficient as it is today, but over the years as new bola abimbola cloud computing concept and roots technologies have been introduced and existing technologies have been improved, it has revolutionized how computing services fundamentally work and delivered to customers as new service delivery models have been introduced to ensure access to cloud computing services is seamless and easy while ensuring that it is consistent with client requirements as well. the popularity of cloud computing has increased significantly since its initial market release and many vendors saw opportunity of marketing this technology by offering various cloud computing related services.one of the major reasons why cloud computing has become so popular among business and organizations is because it provides opportunity to access high quality computing without investing in complex infrastructure. the whole the idea of cloud computing service is to simply operations and management of computing services that organizations need to enhance their business processes. compare to grid computing, the dominant distributed computing model before cloud model emerged, cloud computing has clear advantages. each service delivery models has its own benefits and limitations and therefore choice of service delivery model is essential while considering to invest in cloud computing for accessing computing resources and benefits offered by cloud computing technology as well.as in cloud computing model depending on whether it is private cloud or private cloud model, many organizations are accessing computing resources. therefore, elasticity is an important concept in context of cloud computing for optimization of resources and enhancing quality of cloud computing services.although cloud security is one of the biggest issues for many organizations to adopt cloud computing, it is clear that major cloud platforms are taking security seriously and working hard to comply with international security compliance policies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/181.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/181.txt new file mode 100644 index 0000000000000000000000000000000000000000..df2048f8398ed7e18a6dfd19a1f9da3efe5ddd09 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/181.txt @@ -0,0 +1 @@ +high-performance computing (hpc) clusters are widely used in-house at scientific and academic research institutions . for some users, the transition from running their analyses on a single workstation to running them on a complex, multi-tenanted cluster, usually employing some degree of parallelism, can be challenging, if not bewildering, especially for users whose role is not predominantly computational in nature. on the other hand, there are more experienced users, who can benefit from pointers on how to get the best from their use of hpc. this ten simple rules guide is aimed at helping you identify ways to improve your utilisation of hpc, avoiding common pitfalls that can negatively impact other users and will also help ease the load (pun intended) on your hpc sysadmin. it is intended to provide technical advice common to the use of hpc platforms such as lsf, slurm, pbs/torque, sge, loadleveler and yarn, the scheduler used with hadoop/spark platform. this ten simple rules guide is aimed at helping you identify ways to improve your utilisation of hpc, avoiding common pitfalls that can negatively impact other users and will also help ease the load (pun intended) on your hpc sysadmin. he was referring to the automatic emails sent out by the department's hpc cluster's job-scheduler, ibm lsf (load sharing facility) -it was configured to automatically email the job-submitting user an email report on job completion. it transpired that a certain user -yes, you've guessed correctly, it was me -submitted a very large number of jobs (one for each entry in the protein data bank), but had not overridden the default setting to email the job report back to the user.there are often one or more login nodes which are reserved to facilitate users logging on to the cluster via ssh and for submitting jobs to the job-scheduler, as well as performing non-intensive tasks, such as editing source-code and job submission scripts (figure1). for example, anaconda is a ready to use distribution of datascience packages together with a package manager called conda for any programming language (most commonly python and r), which is frequently used with linux and often installed on hpc clusters. these parameters should be set according to the resources allocated for the job and will be dependent on the architecture of the hpc system in use. ideally, the report should show high % cpu utilisation -the time during which the cpu executed user commands, and a low % system usage -the time during which the system had spent kernel mode executing system processes, e. [incidentally, this system time overhead is the reason care should be taken whilst submitting large numbers of very small jobs. this is simply the time interval between the job's start and finish -if your job is taking a long time to run on hpc, it is possible that something might be awry, and it is worth investigating further. the linux time command can be prepended to individual programs or commands in a batch-script and used to monitor the time taken (real-time, user-time, system-time) for that task to execute.whilst hpc nodes are connected with high-speed network interconnects, the available bandwidth is finite and shared by all of the users, their jobs, the operating system and the cluster-management software. for this reason, hpc systems often have designated nodes -typically called gateway nodes (see figure1) -specifically for file transfers into and out of the hpc cluster, with defined procedures or commands to use.rule 9: be aware of any recharge model! some institutions operate a recharge model whereby the computing department that runs the hpc service bills research groups or departments who use the cluster. in these cases cpu usage is usually charged in cpu/hours for cpu time used -not wall-time requested on job submission -and storage charges are usually billed in, e. hpc jobs are therefore often submitted with a project-code to identify the research group or project, particularly when they are billed for use. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/182.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/182.txt new file mode 100644 index 0000000000000000000000000000000000000000..5fe0927105250d042578a75fab8ce8e32f1e7b18 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/182.txt @@ -0,0 +1 @@ +edsc is built on the event-driven execution model using the publish/subscribe communication paradigm. in the publish/subscribe interaction scheme, components subscribe to events of interest, or to a pattern of events, and are subsequently asynchronously notified by the system when any event published matches their registered interest. in order to incorporate this paradigm into a smart contract platform, the platform design should provide the following basic features to the participating smart contracts and external accounts:• event definition: any external account or smart contract in the system is able to define/register new and unique event types in the system. this is analogous to defining a class in an object-oriented programming paradigm.• event subscription: any smart contract in the system is able to subscribe or unsubscribe to a particular event type that is already defined in the system. at the time of subscription, the subscriber contract may specify additional logic that will be used by the system to evaluate whether to invoke it in response to the event of interest's occurrence in the system.• event publishing: any smart contract is able to publish an event that has already been defined in the system. in order to provision the three fundamental features mentioned above, the smart contract system needs to incorporate the following functionality specific to the event-driven execution model:• event definition maintenance: the event templates are saved immutably in the system. this may be achieved in practice by referencing the event definitions on the blockchain itself, similar to how smart contract code is stored on-chain by reference in ethereum.• subscription information maintenance: the subscription information is also saved immutably in the system. this can also be achieved in practice by referencing the subscription information on the blockchain itself, similar to how smart contract code is stored on-chain by reference in ethereum.• event matching: every time a published event is processed, the system determines all the smart contracts which are subscribed to that particular event. the system also evaluates the corresponding subscription logics of all those subscriptions to determine which smart contracts to invoke in response to the publishing event.• event queueing: based on the event matching, the system queues all the matching subscribed smart contracts for execution. since the system is asynchronous, there are no guarantees as to when the subscription triggers will be executed. the system guarantees the queueing of these executions. since the publish/subscribe method is an anonymous and indirect communication paradigm, the system decouples the communicating entities i.e., the smart contracts in space and execution flow:• space decoupling: the publishing and the subscribing smart contracts do not need to know each other since they are not required to address each other for communication. hence, the event publisher does not maintain a record of all the smart contracts which will be evoked in response to its event publication. likewise, the subscriber may subscribe to events from multiple sources without specifying them individually.• time decoupling: there is no provision for the publisher or the subscriber to run within any time constraint. the subscriber execution can be queued for a later time window (depending on future events).• execution flow decoupling: the inherently asynchronous communication decouples execution flow from inter-contract communication. a smart contract is not blocked when sending a notification to an external contract. the system can handle the subscriber execution in response to the notification by running it concurrently or queueing it for later. the subscriber and publisher of events do not have to be synchronized in their execution. their implementation allows three distinct kinds of events to interact with the blockchain network, namely: (i) block event, which occurs when a new block is added to the ledger; (ii) transaction event and; (iii) chaincode event, which can hold conditions for triggering events.• event subscription: any smart contract in the system is able to subscribe or unsubscribe to a particular event type that is already defined in the system.• event matching: every time a published event is processed, the system determines all the smart contracts which are subscribed to that particular event.like event definition, event subscription also occurs through a special type of event that is predefined in the system. smart contracts post this event to signal their desire to subscribe to a particular event type, which is passed as the payload of this predefined subscription event. every time a smart contract makes a subscription to an event type, an entry against that event type is added in the subscription trie with the subscriber smart contract's address and all the subscription parameters provided.in addition to event definition and subscription/unsubscription events, there are two other special types of events in the system: the transaction event and the deploy event. a transaction event is an event to which every smart contract is subscribed by default and is triggered if the event contains that smart contract's address in its payload. subscription fee this is the fee that any contract which subscribes to this event must pay to the event publisher when it runs in response to the event.the root of the transaction trie, state trie(includes storage trie), event state trie (includes event definition and event subscription trie) and receipts trie is referenced on chain. to further mitigate the risk of event publishers spamming the system, edsc allows subscribers to use variables like the event rate and block rate as well as the subscription logic expression to control their frequency of subscription execution. for instance, when a node observes an event update where financial value can be extracted, the node may send a shortcut message that registers to the event or updates its event registration to boost its priority in the event buffer. there are dedicated messages for event creation, event updates, event subscription, event unsubscription, and event subscription updates. a gas fee is charged for operations such as creating an event, making an event subscription, or updating an existing event subscription. when a node receives a new event message, its protocolmanager module first sends the new message to the event manager for validation, including verifying signatures and checking other constraints and security requirements such as event update rates. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/183.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/183.txt new file mode 100644 index 0000000000000000000000000000000000000000..dbe67b8d3ceb029fe45bcf53bfd20faae63561f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/183.txt @@ -0,0 +1 @@ +a lease is a contract issued by a resource owner to give control to a holder over the protected resource for a certain time duration. this duration is defined using a lease term parameter. a lease term might have any length, from zero to infinity. in practice, however, the lease term is typically set to a limited amount of time. when the lease term expires, the holder usually has to renew the lease.typically, classical distributed systems assume trusted environments in which they rely on the system time sources, like clock_gettime to enforce the lease term. it provides resolution up to nanoseconds and has an extremely low overhead on modern linux systems that use vdso.compared to the classical systems, distributed systems built with tees assume a more privileged attacker who can affect the lease term by manipulating the system time resources. hence, in this paper, we introduce a novel concept of trusted leases to tackle this challenge.• security: it always maintains the lease correctness invariant; that is, the lease duration at the granter must be a superset of the lease duration at the holder. then, two core functions are used to maintain a correct t-lease protocol state: update_renew_lease, called by the holder to request a lease and update the lease state, and update_lease_client, called primarily by the granter to update the state of the lease at its own side. next, the lease holder initializes the lease: sets the requested lease term and the lease identifier. while the lease itself has an infinite term, to activate a lease configuration, each node must exchange a non-infinite lease with a majority of the nodes in the lease group. this system relies on the invariant that a lease duration at the lease holder is shorter than the lease duration at the granter.00 q q q q q q q q q q q q q q q q q q 0 5 10 250 500 750 1000.interrupt frequency, hz unaccounted cycles, percent q q q 10 instrs. q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0 5 10 local remote figure8: frequency with which client can check t-lease for expiration.duration of lease, sec granter requests per second q q. because interrupt delivery may cause additional lease requests and affect performance, we measure the average duration of a lease depending on guard and lease interval, set to the same value (figure14).5 10. q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q qq q q q q q q q 10 -8 retries/lease update q q q q q q local remote tpm.000 q q q q q q q q q q q q 0 100 200 300 1 10 100.native t-lease q q q q q q q q q q 0 50 100 native t-lease figure14: duration of timer interval with active lease for the pql case study. q q q q q q q q q q q q q q q q q q 0 50 100 native t-lease. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/184.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/184.txt new file mode 100644 index 0000000000000000000000000000000000000000..9dbabf1a0abf9799e2086f45817f64a59a1d5b30 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/184.txt @@ -0,0 +1 @@ +the complexity of the multi-layered it infrastructures such as the internet of things, distributed processing frameworks, databases and operating systems, is constantly increasing . to meet the consumers' expectations of fluent service with low response times guarantees and availability, the service providers highly rely on the high volumes of monitoring data. the massive volumes of data lead to maintenance overhead for the operators and require introducing of data-driven tools to process the data.a crucial task for such tools is to correctly identify the symptoms of deviation of the current behaviour system from the expected one. due to the large volumes of data, the anomaly detector should produce a small number of false-positive alarms, thus reducing the efforts of the operators, while at the same time producing a high detection rate. the benefit of timely detection allows prevention of potential failures and increases the opportunity window for conducting a successful reaction from the operator. this is especially important if urgent expertise and/or administration activity is required. the symptoms often are notified whenever there are performance problems or system failures and usually manifests as some fingerprints within the monitored data: logs, metrics or distributed traces.the monitored system data represent the state of the system at any time point. they are grouped into three categories-modalities: metrics, application logs, and distributed traces . the metrics are time-series data that represent the utilization of the available resources and the status of the infrastructure. typically they involve measuring of the cpu, memory and disk utilization, as well as data as network throughput, and service call latency. application logs are print statements appearing in code with semi-structured content. they represent interactions between data, files, services, or applications containing a rich representative structure on a service level. service, microservices, and other systems generate logs which are composed of timestamped records. distributed traces chains the service invocations as workflows of execution of http or rpc requests. each part of the chain in the trace is called an event or span. a property of this type of data is that it preserves the information for the execution graph on a (micro)service level. thus, the information for the interplay between the components is preserved.the log data can produce a richer description on a service level since they are fingerprints of the program execution within the service. on the other side, the traces do not have much information on system-level information but preserve the overall graph of request execution. referring to the different aspects of the system, the logs and traces provide orthogonal information for the distributed systems behaviour. building on this observation in this work, we introduce an anomaly detection multi-source approach that can consider the data from both the traces and logs, jointly. we demonstrate the usability of time-aligned log and tracing data to produce better results on the task of anomaly detection as compared to the single modalities as the main contribution to this work. the results show that the model build under the joint loss from both the logs and trace data can exploit some relationship between the modalities. the approach is trainable end-to-end and does not require the building of separate models for each of the modalities. as a second contribution, we consider the introduction of vector embeddings for the spans within the trace. the adopted approach allows the definition of the span vectors as a pooling over the words they are composed of. we refer to these vector embeddings as span2vec. building on this observation in this work, we introduce an anomaly detection multi-source approach that can consider the data from both the traces and logs, jointly.the most common approaches for anomaly detection from log data roughly follows a two-step composition -log parsing followed by a method for anomaly detection. ina multimodal variational autoencoder approach is adopted for effectively learning the relationships among cross-domain data which provide good results for anomaly detection build on the logs and metrics as modalitites. since the log templates can have a different number of tokens, for the uniform representation of the log templates a special token is added, such that each of the logs has an equal number of tokens.represented in this form the spans have very similar representation as to the logs, with additional constraints that the spans are further bounded by the operation executed within the trace. the granularity representation of a log message is on a single time interval, on one side, and the spans span across multiple time stamps. the block of log messages is created in a way that, the log messages that come from the start time of the first and the end time of the last span in the window of spans are joined into one block.ntp is utilized for anomaly detection for logs, however, the anomaly detection in the traces require additional anomaly detection procedure. the anomaly detection procedure for the single modality log model considers a log as normal if the prediction for the log is in the next top k logs.baselines the main aim of this work is to demonstrate that the shared information between the logs and traces can improve anomaly detection in comparison to anomaly detection methods build from single modalities. firstly, one can observe that the results from the single modalities methods show that for the logs and traces, individually the approach can provide good results. one explanation of this behaviour is that the granularity of the information from the logs is truncated on the level of the data source with a lower frequency of generation -the trace is harder for the information in the trace to be transferred to the logs. the information that the multimodal method is receiving from the logs when it is aiming to predict the next relevant span complements the information as obtained just from the sequence of spans individually. it uses data from two complementary different modalities describing the behaviour of the distributed system -logs and traces.the results show that the multimodal approach can improve the scores for anomaly detection for multiple modalities in comparison to the single modalities of logs and traces. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/185.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/185.txt new file mode 100644 index 0000000000000000000000000000000000000000..65e18df93ec0ec748219a22920e68f3fa91d1d9a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/185.txt @@ -0,0 +1 @@ +with ever-increasing dataset sizes, remote analysis paradigms are becoming increasingly popular. in such systems (e.g. jurić et al. 2017;taghizadeh-popp et al. 2020;nikutta et al. 2020;stetzler 2020), the users access data and computing resources through science platforms -rich gateways exposing server-side code editing, management, execution and result visualization capabilities -usually implemented as notebooks such as jupyter (kluyver et al. 2016), or zeppelin. a challenge of this model is that the data provider (e.g., an archive facility) now bears both the cost of dataset storage and that of computing resources -including those used for running the users' jupyter notebooks. this cost can balloon quickly, especially on cloud resources: left unmanaged, a 24/7 run for a 100 users reaches $300, 000+ range. this can be reduced by terminating inactive instances, but the price is a poor user experience.in this contribution we present a solution: the ability to checkpoint (freeze) a user's running jupyter notebook server to disk, and restore it to memory on-demand (including on a different host). this c/r functionality can dramatically reduce the cost, while fully maintaining the user experience. it also enables novel capabilities, such as uninterrupted migration of work based on resource needs.in this contribution we present a solution: the ability to checkpoint (freeze) a user's running jupyter notebook server to disk, and restore it to memory on-demand (including on a different host). with elsa, the user logs into the jupyter-hub aspect of the science platform and starts jupyter on a machine with desired capabilities (e. the spawner finds a vm where the notebooks run, and starts and stops jupyter on that vm. as it has no support for pod c/r, we could not use kubernetes at this time 3 ; instead, our spawner directly allocates one new vm per user from the cloud provider.although each user gets their own vm, per-user jupyter is still run from a container., linux distribution doesn't matter, as long as podman/criu are available), ii) allows us to use the standard notebookserver container, iii) makes deployment significantly easier (a simple pull, rather than 1 the source code is available at https://github.4 isolation between users at the vm level leads to predictable user experience; using vms allows us to add swap making out-of-memory conditions a "soft" fail; and bare vms are faster to provision relative to speeds from common k8s cluster autoscalers. os-level install), and iv) allows for secure re-use of vms between different users (as users are sandboxed by their container). a shared /home elegantly solves the problem of how to keep users' data identical on an inode level if/when they restore a checkpoint on a different machine (a requirement for checkpointing). given we use do apis in our spawner to manage the vm instances, elsa will not run on other providers out-of-the-box. 2017). for on-prem science platforms our work lets the platform operator checkpoint rather than terminate inactive instances resulting in significantly better user experience. as we show in table1, with our c/r work running a typical user's jupyter instance may cost as little as $200/yr, with no degradation to user experience relative to running 24/7. the second row shows the cost of running for 15% of that time (a typical duty cycle we observed with our users), storing a checkpoint while the user is inactive. finally we show the cost of running on spot instances, which is now possible as users work can be transparently migrated to a new instance if the spot vm is to be terminated. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/186.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/186.txt new file mode 100644 index 0000000000000000000000000000000000000000..bdf2b1c21fd1e390c5af8881ab414bd2ecd7f0c4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/186.txt @@ -0,0 +1 @@ +mergeability of data summaries is an important property since it allows parallel and distributed processing of datasets. in general, given two summaries on two datasets, mergeability means that there exists an algorithm to merge the two summaries into a single summary related to the union of the two datasets, simultaneously preserving the error and size guarantees. big volume data streams (or big data) can therefore be compressed and fused by means of a suitable, mergeable sketch data structure.to formally define the concept of mergeability, we shall denote by s() a summarization algorithm, by d a dataset, by ǫ an error parameter and by s(d, ǫ) a valid summary for d with error ǫ produced by s(). the summarization algorithm s() is mergeable if there is an algorithm a that, given two input summaries s(d 1 , ǫ) and s(d 2 , ǫ), outputs a summary s(d 1 ⊎ d 2 , ǫ) (here ⊎ stands for the multiset sum operation ).even though mergeability is a fundamental property of data summary, merging algorithms may not be necessarily simple or may be complex to formally prove correct. in particular, merging algorithms for the problems of heavy hitters and quantiles were not known until a few years ago.regarding heavy hitters, cormode and hadjieleftheriou presented in 2009 a survey of existing algorithms, classifying them as either counterbased or sketch-based. in the concluding remarks, cormode and hadjieleftheriou stated that "in the distributed data case, different parts of the input are seen by different parties (different routers in a network, or different stores making sales). the problem is then to find items which are frequent over the union of all the inputs. again due to their linearity properties, sketches can easily solve such problems. it is less clear whether one can merge together multiple counter-based summaries to obtain a summary with the same accuracy and worst-case space bounds".the first merging algorithm for summaries ob-tained by running the misra-gries algorithm (rediscovered and improved by and and also known as frequent) was published in 2011 . one year later, provided a new merge algorithm for frequent and space saving ,showing that the summaries of these algorithms are isomorphic. the same paper also provided a merging algorithms for greenwald-khanna quantile summaries. later, improved merging algorithms for both misra-gries and space saving summaries were presented . we formally prove that our uddsketch data summary for tracking quantiles is mergeable, design and analyze a corresponding parallel algorithm and provide extensive experimental results showing the excellent scalability and accuracy achieved. this result enables parallel and distributed processing of big volume data streams (or big data), that can be compressed and fused for accurate quantile tracking and analysis.the rest of this paper is organized as follows. we recall related work in section 2. the merge procedure is presented in section 3 and it is formally proved to be correct in section 4. experimental results are provided and discussed in section 5. finally, we draw our conclusions in section 6.we shall denote by {b i k } k the set of buckets of the sketch s i , and by m the maximum number of buckets related to the size of a sketch.the computational complexity of the parallel uddsketch algorithm is therefore o( n p + log p) since each process p i spends o( n p ) to insert its share of the input items in its sketch, and the parallel reduction requires o(log p) (there are log p steps, each one costing o(1)). let i γ : ∆ → z : i γ (x) = ⌈log γ x⌉ denote the function which maps each item x ∈ ∆ to the corresponding bucket in the sketch built by udds-ketch processing d and assume that the sketch can grow unbounded.moreover, for each bucket key k ∈ i γ (∆), the preimage of k under i γ , denoted by i -1 γ (k), is the set of items assigned to the bucket b k , and we can compute the value of a bucket b k as the sum of the multiplicities of its items in the input dataset, i.when the sketch is allowed to grow unbounded, the value of γ and consequently the accuracy of the sketch is not constrained; it can be set arbitrarily and is not modified by uddsketch.in fact, the collapsing procedure of uddsketch is equivalent to a change of the value of γ, which is squared in each collapse operation, and a sketch reconstruction through the mapping function using the new γ value. when a limit of m buckets is imposed to the size of the sketch and that limit is exceeded with the current value of γ, uudsketch squares that value and reconstructs the sketch until the constraint |i γ (∆)| ≤ m is satisfied.the characterization of the sketch as the multiset (i γ (∆), β) continues to hold even if collapsing operations are executed with γ set to the value needed to respect the sketch size constraint, and the sketch remains invariant with regard to the order in which the items are processed or the order in which the collapsing operations are executed, thus proving that uddsketch is permutation invariant when processing insertion-only streams. let d 1 = (∆ 1 , µ 1 ) and d 2 = (∆ 2 , µ 2 ) be two multisets and s 1 and s 2 the sketches produced by uddsketch respectively processing d 1 and d 2 with a limit to the number of buckets, m, and an initial value of γ = γ 0 . denote by s m the sketch obtained by merging s 1 and s 2 on the basis of the uddsketch merge procedure and denote by s g the sketch that uddsketch would produce on the multiset d = (∆, µ) = d 1 d 2 with the same size limit m and the same initial value of γ = γ 0 .t is the sketch that we obtain processing through uddsketch the dataset d in a particular order of insertions and collapses, but we know from lemma 1 that the order of insertions and collapses is not relevant, therefore we can conclude that t = s g which finally proves the thesis s m = s g . let σ 1 and σ 2 be two streams including insertions and deletions of items drawn from the universe set u = ⊂ r + and s 1 and s 2 be the sketches produced by uddsketch processing respectively σ 1 and σ 2 with the sketch size limited to m buckets, and an initial value of γ = γ 0 . denote by s m the sketch obtained by merging s 1 and s 2 on the basis of the uddsketch merge procedure and denote by s g the sketch that uddsketch would produce on the stream σ = σ 1 σ 2 with the sketch size limited to the same number of buckets, m, and the same initial value of γ = γ 0 . the value of γ during the execution of ud-dsketch can only grow due to the collapses of the sketch and its final value depends on the order in which deletions are interleaved with insertions. this particular order of insertions and deletions, in turn, produces a sketch with the same final value of γ that one would obtain by processing only the insertions of the input stream and completely ignoring the deletions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/187.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/187.txt new file mode 100644 index 0000000000000000000000000000000000000000..796bd546979c739485c4ad564611adfefc2f9828 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/187.txt @@ -0,0 +1 @@ +the advent of fog and edge computing has prompted predictions that they will take over the traditional cloud for information processing and knowledge extraction at a large scale. notwithstanding the fact that fog and edge computing have undoubtedly large potential, these predictions are probably oversimplified and wrongly portray the relations between fog, edge and cloud computing. concretely, fog and edge computing have been introduced as an extension of the cloud services towards the data sources, thus forming the computing continuum.the computing continuum enables the creation of a new type of services, spanning across distributed infrastructures, for au-tonomous vehicles, smart cities, and content delivery, among other applications. these services have a large spectrum of requirements, burdensome to meet with "distant" cloud data centers. for instance, they may need low-latency connections for fast decision making close to the data sources and substantial computing resources for complex data analysis. the computing continuum provides a vast heterogeneity of computational and communication resources, which have the potential to meet these demands.the heterogeneity of the computing continuum raises multiple application management challenges, such as where to offload an application from the cloud to the fog or to the edge. these issues primarily concern the large diversity of the devices, which range from single-board computers such as raspberry pis to powerful multi-processor servers. this poses the following dilemma of many practitioners and researchers: should we use devices accessible with low latency and with limited resource availability, or a high-performance cloud at the expense of high communication delay?to answer this question it is essential to characterize the performance of the resources. existing literature , , including the defog benchmark suite, addresses this problem by conducting performance analysis of cloud services and to some extent of edge infrastructures. nevertheless, these approaches (i) consider the edge and the cloud resources in isolation, (ii) provide only quantitative analysis of the performance without offloading recommendations, (iii) evaluate a limited number of devices, and (iv ) do not consider the environmental impact in terms of co 2 emissions for executing the applications.we present in this article a performance characterization and an analysis of the co 2 emissions of the resources across the computing continuum. our main goal is to support the decision process for offloading an application to fog or edge resources by considering the application characteristics. for this purpose, we deployed a real testbed named carinthian computing continuum (c3 ) that aggregates a large set of heterogeneous resources. we base the analysis on three complementary applications widely utilized by industry and research: video encoding, machine learning and in-memory data analytics. we conclude by providing recommendations on where to compute applications across the computing continuum. we employ a bootstrapping script that automatically configures the resources in the testbed 3 . table 1 summarizes the resource characteristics of the c 3 testbed. notwithstanding the fact that fog and edge computing have undoubtedly large potential, these predictions are probably oversimplified and wrongly portray the relations between fog, edge and cloud computing. concretely, fog and edge computing have been introduced as an extension of the cloud services towards the data sources, thus forming the computing continuum.the heterogeneity of the computing continuum raises multiple application management challenges, such as where to offload an application from the cloud to the fog or to the edge. nevertheless, these approaches (i) consider the edge and the cloud resources in isolation, (ii) provide only quantitative analysis of the performance without offloading recommendations, (iii) evaluate a limited number of devices, and (iv ) do not consider the environmental impact in terms of co 2 emissions for executing the applications.we classify the resources in the c 3 testbed into three layers: cloud layer, fog layer and edge layer.figure2depicts the average encoding time and transfer time, from the video source (located at the university of klagenfurt) to the encoding device or instance, for a single raw video segment in the three resolutions.6% for the raspberry pi 3b devices. we observe that the older generation single-board computers (raspberry pi 3b) have a significantly higher encoding time than the other resources. however, the raspberry pi 3b devices provide lower transfer times than the cloud instances and are suitable for video-ondemand services employing offline encoding.org/sintel with significantly lower transfer times. the remaining cloud and fog resources showed similarly video encoding performance in the range between 0.5 s to 1. overall, the egs achieved the lowest encoding and transfer time due to the low utilization rate and its high computing and networking capabilities. we formulate the recommendations based on a systematic performance and carbon footprint analysis of a selected set of applications on a heterogeneous set of devices and cloud instances across the computing continuum. our results revealed that to reduce the network traffic over the computing continuum it is recommended to offload to edge and fog resources, while we advocate the cloud for lower execution times. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/188.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/188.txt new file mode 100644 index 0000000000000000000000000000000000000000..9f8b9b41c584f76d46436eefab32a0e50d1e51a7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/188.txt @@ -0,0 +1 @@ +it is not dfficult to point out the weaknesses of neural nets and deep learning. simply put, neural nets are too weak to support general ai. they receive inputs (numbers), perform simple arithmetic operations and produce outputs (numbers). consequently, they provide only primitive services such as object classifications. although object classification has some interesting applications, the power of classification is in fact not much compared to all the complex services a human can provide. complex services -making a coffee, withdrawing money from atm, etc -are not well supported by neural nets. in addition, their classification services are not perfect, as they are only approximate.a human can provide complex services to others. the notion of services and how to complete them thus play a key role for an ai to imitate a human. in other words, the right move towards general ai would be to find (a) a mathematical notion for services, and (b) how an ai automatically generates a strategy for completing the service calls.fortunately, japaridze developed a theory for services/games involving complex ones. computability logic (col) - , is an elegant theory of (multi-)agent services. in col, services are seen as games between a machine and its environment and logical operators stand for operations on games. it understands interaction among agents in its most general -game-based -sense.in this paper, we discuss a web programming model based on col and implement an ai atm. an ai atm is different from a regular atm in that the former automatically generates a strategy for a service call, while the latter does not.we assume the following in our model:• each agent corresponds to a web site with a url. an agent's knowledgebase(kb) is described in its homepage.• agents are initially inactive. an inactive agent becomes activated when another agent invokes a query for the former.• our model supports the query/knowledge duality, also known as querying knowledge. that is, knowledge of an agent can be obtained from another agent by invoking queries to the latter.to make things simple, we choose cl9-a fragment of col -as our target language. cl9 includes sequential operators: sequential disjunction ( ▽ ) and sequential conjunction (△) operators. these operators model knowledgebase updates. imagine an atm that maintains balances on kim. balances change over time. whenever kim presses the deposit button for $1, the machine must be able to update the balance of the person. this can be represented by balance($0)△blance($1)△ . . . △.in this paper, we present cl9 φ which is a web-based implementation of cl9. this implementation is straightfoward and its correctness is rather obvious. what is interesting is that cl9 is a novel web programming model with possible database updates. it would provide a good starting point for future high-level web programming. for a subformula f ′ of the above f ω , we say that ω is the matching environment of both f ′ and f .the class of cl9 φ -formulas is defined as the smallest set of expressions such that (a) for any cl9-formula f and any agent ω, f ω are in it and, (b) if h and j are in it, then so are ¬h, h ∧ j, h ∨ j, h → j.3 given a cl9 φ -formula j, the skeleton of j -denoted by skeleton(j) -is obtained by replacing every occurrence f ω by f . and f is uniformly valid iff there is an hpm h, called a uniform solution for f , such that h wins f * for every interpretation * .the elementarization of a cl9-formula f means the result of replacing in the capitalization of f every surface occurrence of the form g 1 ⊓ .choose: h → f , where h is the result of replacing in f a surface occurrence of a subformula g 1 ⊔ .switch: h → f , where h is the result of replacing in f a surface occurrence of a subformula.match: h → f , where h is the result of replacing in f two -one positive and one negative -surface occurrences of some general atom by a nonlogical elementary atom that does not occur in f .a surface occurrence of a subexpression in a given hyperformula f means an occurrence that is not in the scope of a choice operator, such that, if the subexpression occurs within a component of a sequential subformula, that component is underlined or occurs earlier than the underlined component.the elementarization f of a hyperformula f is the result of replacing, in the capitalization of f , every surface occurrence of the form g 1 ⊓ . ⊔ g n by ⊥, every positive surface occurrence of each general literal by ⊥, and every surface occurrence of each hybrid atom by the elementary component of that atom.a hyperformula f is said to be balanced iff, for every hybrid atom p q occurring in f , the following two conditions are satisfied:. the elementary atom q does not occur in f , nor is it the elementary component of any hybrid atom occurring in f other than p q ., where h has two -a positive and a negative -active surface occurrences of some hybrid atom p q , and f is the result of replacing in h both occurrences by p .if e is derived by match • from h through replacing the two (active surface) occurrences of a hybrid atom p q in h by p , then the machine finds within ω and copies, in the positive occurrence of p q , all of the moves made so far by the environment in the negative occurrence of p q , and vice versa. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/189.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/189.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf77046fd9b4021bc1569b0bdabc5b931399df3a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/189.txt @@ -0,0 +1 @@ +distributed computing has attracted significant interests as it enables complex computing tasks to process in parallel across many computing nodes to speed up the computation. however, due to massive data and limited communication resources, the distributed computing systems suffer from the communication bottleneck . many previous works have shown that using coding can greatly reduce communication load (see, e.g., - ).in li et al. considered a mapreduce-type framework consisting of three phases: map, shuffle and reduce, and proposed the coded distributed computing (cdc) scheme. in the cdc scheme, k servers first map their stored files into intermediate values in the map phase, and then based on the mapped intermediate values, the servers multicast coded symbols to other servers in the shuffle phase, and finally, each server computes output functions based on the local mapped intermediate values and the received coded symbols. the cdc scheme was generalized to the cascaded case where each reduce function is computed by s ≥ 1 servers, which this work is supported by nsfc grant nsf61901267.is helpful in reducing the communication load of the nextround data shuffling when the job consists of multiple rounds of computations. based on the cdc scheme of case s = 1, an asymmetric coded distributed computing (acdc) scheme was proposed in which allows a set of servers serving as "helper" to perform map and shuffle operations, but not reduce operation. the acdc scheme achieves the minimum execution time when the total number of computing servers and size of storage memories are sufficiently large. this is, however, impractical in some real distributed systems which only have limited computing and storage resources.in this paper, we try to answer the following questions. given a mapreduce-type task with an arbitrary amount of computing servers and storage memories, 1) is it always good to use all available computing servers? if not, how many servers should be exactly used? 2) how to efficiently utilize the storage memories and allocate files to servers? 3) how to efficiently exchange information among computing servers? we answer these questions by establishing an optimal coding scheme and resource allocation strategy. in more detail, we first show that neither the cdc nor acdc scheme is optimal, and whether the cdc or acdc scheme is better depends on system parameters (e.g., number of computing servers and size of storage memories) and task parameters (e.g., number of input files). then, we propose a hybrid coding scheme for the case s ≥ 1 by combining the ideas of cdc and acdc, and show that this hybrid scheme strictly outperforms cdc and acdc. the generalized acdc scheme for s ≥ 1 is similar in spirit to the coded caching schemes in , . by deriving an information-theoretic converse on the execution time, we prove that for any mapreduce task using a weakly symmetric reduce assignment which includes the assignments of cdc and acdc as special cases, our scheme achieves the minimum execution time. the optimality result holds for an arbitrary amount of computing servers and storage memories.2) shuffle phase: a message x k ∈ f 2 k , for some k ∈ n, is generated by each node k ∈ k, as a function of intermediate values computed locally, i. denote w k ⊆ q as the assignment indices of reduce functions on the node k ∈ k, with k∈k w k = q.note that here only the nodes in k s produce reduce functions, and when k s = k, the reduce design w symmetric turns to be the same as that in the cdc scheme. , k s }}, and l * 2 (r 2 , s, k s ) is the lower convex envelope of the points {(r 2 , l 2 (r 2 , s, k s )) : r 2 ∈ {0, .divide k nodes into two disjoint sets k s and k h , with k s ⊆ k and k h ⊂ k. consider a distributed computing task with n input files, q reduce functions, a file placement {m k } k k=1 and a reduce design {w k } k k=1 that uses k nodes.let k h |k h | and k s |k s |. note that k h and k s are fixed once the reduce design {w k } k k=1 is given, independent of the map and shuffle operations.for any scheme with a file placement {m k } k k=1 , let n 1 be the assignment indices of files on nodes in k s , but not on any node in k h , i.let b j,1 be the number of files which are stored at j nodes in k s , but not at any node in k h , then we have |n similar to, we introduce an enhanced distributed computing system by merging all nodes in k h into a super node such that all files in n 2 can be evenly mapped by nodes in k h in parallel, and the mapped intermediate values can be shared without data shuffle.for this enhanced system, let a 1 j,d be the number of intermediate values that are known by j nodes in k s , not mapped by the super node, and needed by (but not available at) d nodes; a 2 j,d be the number of intermediate values that are mapped both by j nodes in k s and the super node, and needed by (but not available at) d nodes in k s . the lower bound in (26) is valid for all schemes with any file placement {m k } k k=1 and reduce design {w k } k k=1 . although (26) has a form similar to the time sharing scheme, the α is defined in (25) and can not be changed once {w k } k k=1 and {m k } k k=1 are given. since each node k ∈ k s requires the intermediate values v q,n , for all q ∈ w k , n / ∈ m k , and.for the general case s ≥ 1, recall that the reduce design w symmetric assigns all q reduce functions symmetrically to nodes in k s , and each node k ∈ k s computes the reduce functions whose indices are in the batch d p if k ∈ p ⊆ k s . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/19.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/19.txt new file mode 100644 index 0000000000000000000000000000000000000000..02f51c93ffdaa0c726415776c4ce0f6f1003e846 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/19.txt @@ -0,0 +1 @@ +there is increased attention to the problem of learning generalist agents (as opposite to specialist) in a way that the same representation can be used in a range of tasks, even if it does not excel at any specific task . while a specialist should be expected to excel at its one task, a generalist is expected to be good at many problems. in this paper, we focus on building a generalist model for processing a special type of short text: question.the development of online communities produces a massive amount of text every day. for example, in the question domain, with the rise of commercial voice assistants such as siri and alexa and communities such as quora, numerous questions are asked on a daily basis. processing these questions can provide a new perspective on understanding communities and people's interests.in this paper, we define the generalist model as a question-processing model that targets analysing the semantic and syntactic information in the question. more specifically, this generalist model can process the questions in terms of question topic classification, equivalent question recognition, and question answering, which will be explained in section 2. some state-of-the-art deep learning models like transformer are widely used in natural language processing (nlp). they resulted in leading performance for various tasks . train a language model requires lots of training data. therefore, researchers had to create pre-trained language models using large-scale unsupervised tasks and then fine-tune them with labelled task-specific data. however, labelled data for a specific task are always limited and hard to obtain. besides, a language model can have a size of millions or billions of parameters. it is usually expensive to train and use a separate network for each task. a generalist model can help address these problems by applying multi-task learning, a learning approach that improves generalisation by adding inductive bias such as tasks and domain information .there are two main strategies for multi-task learning. one standard approach is adding extra tasks, also referred to as auxiliary tasks, to improve the performance of the target task. empirically, adding auxiliary tasks to a pre-trained network is more similar to transfer learning, which improves primary tasks with additional tasks. another is learning all the tasks jointly without identifying the primary task so that all the tasks can achieve balanced performance, which can be leveraged for training a generalist agent.we fine-tune the pre-trained language model with all the tasks jointly without identifying primary and auxiliary tasks. these tasks share the same domain, which is referred to as inductive bias multi-task learning . research shows that multi-task learning and pre-trained language models are complementary and can be combined to generate better performance on learning text representations.there are many different types of tasks included in multi-task natural language understanding. for example, single sentence classification like sentiment analysis, pairwise classification like natural language inference, and regression task like sentence similarity. mt-dnn trains their multi-tasking model with the transformer encoder and task-specific layer so that it can apply to classification and regression tasks. to adapt to various tasks, some researchers re-frame all the datasets into the same format. mqan formulates all the datasets into question answering over context. t5 creates a sequence-to-sequence format for the tasks. all these models focus on general language understanding tasks like glue , and decanlp .in contrast, we focus on a range of different tasks for processing questions. and we report here on a generalist network called qbert to solve three processing tasks we defined in the question domain. qbert intends to work as a "generalist" language model that can perform multiple question tasks rather than a "specialist" who is only trained to maximise the performance on one specific task.qbert is based on sentence-bert (sbert) , a siamese bert (bidirectional encoder representations from transformers) that projects the sentences into high-dimensional vector space. this process is known as embedding. the sentence embeddings with similar semantic meanings are close to each other in the vector space. note that our intention is not to design a new algorithm but to fine-tune sbert in a multi-task way so that the same representations can be used for processing questions in multiple ways. after fine-tuning sbert, the embeddings generated from the input sequence can be used for both classification and retrieval tasks.a previous study on the question-related multi-tasking model shows that the training curriculum is critical. they reported that one certain curriculum could obtain a balanced performance on all the tasks. however, one of the limitations of the previous study is that the model lacked consistency on different question tasks. reference performed topic classification with a single bert structure, others with siamese bert. to improve this, we re-frame the single sentence classification into a retrieval task.during inference, qbert produces the representation of the input sequence without any task-specific modules. instead, it contains a threshold filter to determine the cosine similarity of the embedding pairs. compared to the standard multi-task structure, reducing task-specific layers simplifies the complexity of the network. the network shares all the weights between tasks, also known as hard parameter sharing. more details of qbert will be explained in section 3.after that, we compare qbert with sbert and the single-task version of sbert in section 4. the results in section 5 also show how the training curriculum affects the performance of qbert. more specifically, this generalist model can process the questions in terms of question topic classification, equivalent question recognition, and question answering, which will be explained in section 2. empirically, adding auxiliary tasks to a pre-trained network is more similar to transfer learning, which improves primary tasks with additional tasks. for example, single sentence classification like sentiment analysis, pairwise classification like natural language inference, and regression task like sentence similarity. qbert intends to work as a "generalist" language model that can perform multiple question tasks rather than a "specialist" who is only trained to maximise the performance on one specific task.qbert combines 3 different types of tasks: question topic classification, equivalent question recognition, and question answering. these tasks target common natural language understanding problems such as single sentence classification, pairwise classification, and information retrieval.question topic classification (qt): given a question, the model labels the topic of the question. in classification, the model aims at classifying if the question pairs are similar or not, and based on the outcome, retrieve all similar questions from a question corpus with the given question.the three question tasks we defined in the previous section include three kinds of machine learning tasks: single sentence multiclass classification, pairwise classification, and information retrieval. to perform these three tasks with one siamese model, we consider the topic classification as pairwise classification by taking the (question, topic) as the pairwise input.we apply two different loss functions for different types of data: online contrastive loss for binary classification tasks that have both positive and negative sample, and multiple negatives ranking loss for information retrieving datasets that does not contain positive nor negative label. qbert calculates the d cosine (u, v ), the cosine similarity between embeddings u and v , and applies different similarity thresholds for each task to determine if two sequences are related in terms of topic, equivalent question, or corresponding answer. for example, a question might be unique in the corpus so that the closest question to the given question is not equivalent to the given question if it has a smaller cosine similarity than the threshold; or a question might not have a high-confidence answer from the candidate corpus, the closest candidate with a cosine similarity smaller than the threshold will not be considered as the right answer. although qbert-rr does not excel in any task compared to the single task model, it is able to generate a representation that can be used to perform a range of question tasks.in this paper, we propose a generalist model to process questions in a variety of tasks, namely question topic classification, equivalence question recognition, and question answering. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/190.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/190.txt new file mode 100644 index 0000000000000000000000000000000000000000..e9440d3f3c2817292f66132cd10ef624ef099c39 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/190.txt @@ -0,0 +1 @@ +the two primary models of locality, local and congest, share most of the same features: the nodes are connected in the form of an undirected graph, time proceeds in synchronous rounds, and in each round, each node can exchange different messages with each of its neighbors. the difference is that the messages can be of arbitrary size in local, but only logarithmic in congest. a question of major current interest is to what extent message sizes matter in order to achieve fast execution.random sampling is an important and powerful principle with extensive applications to distributed algorithms. in its basic form, the nodes of the network compute their random samples and share it with their neighbors in order to reach collaborative decisions. when the samples are too large to fit in a single congest message, then the local model seems to have a clear advantage. the goal of this work is to overcome this handicap and derive equally efficient congest algorithms, particularly in the context of coloring problems.graph coloring is one of the most fundamental topics in distributed computing. in fact, it was the subject of the first work on distributed graph algorithms by linial . the task is to either color the vertices or the edges of the underlying communication graph g so that adjacent vertices/edges receive different colors. the most basic distributed coloring question is to match what is achieved by a simple centralized algorithm that colors the vertices/edges in an arbitrary order. thus, our primary focus is on the (∆ + 1)-vertex coloring and the (2∆ -1)-edge coloring problems, where ∆ is the maximum degree of g.randomized distributed coloring algorithms are generally based on sampling colors from the appropriate domain. the classical and early algorithms for vertex coloring, e.g. , involve sampling individual colors and operate therefore equally well in congest. the more recent fast coloring algorithms, both for vertex and edge coloring , all involve a technique of schneider and wattenhofer that uses samples of up to logarithmic number of colors. in fact, there are no published sublogarithmic algorithms (in n or ∆) for these coloring problems in congest, while there are now poly(log log n)-round algorithms in local. a case in point is the (2∆ -1)-edge coloring problem when ∆ = log 1+ω (1) n, which can be solved in only o(log * n) local rounds . the bottleneck in congest is the sampling size of the schneider-wattenhofer protocol.we present here a technique for sampling a logarithmic number of colors and communicating them in only o(1) congest rounds. we apply the technique to a number of coloring problems, allowing us to match in congest the best complexity known in local.the sampling technique is best viewed as making random choices with a limited amount of randomness. this is achieved by showing that sampling within an appropriate subfamily of all color samples can retain some of the useful statistical properties of a fully random sample. it is inspired by newman's theorem in communication complexity , where dependence on shared randomness is removed through a similar argument.we apply the sampling technique to a number of coloring problems where the nodes/edges to be colored have a large slack : the number of colors available exceeds by a constant fraction the number of neighbors. we particularly apply the technique to settings where the maximum degree ∆ is superlogarithmic (we shall assume ∆ = ω(log 1+1/ log * n n)).we obtain a superfast o(log * ∆)-round algorithm for (2∆ -1)-edge coloring when ∆ = ω(log 1+1/ log * n n). independent of ∆, we obtain a poly(log log n)-round algorithm. this shows that coloring need not be any slower in congest than in local.we obtain similar results for vertex coloring, for the same values of ∆ (∆ = ω(log 1+1/ log * n n)). we obtain an o(log * ∆)-round algorithm for (1 + ǫ)∆-coloring, for any ǫ > 0. for graphs that are locally sparse (see sec. 2 for definition), this gives a (∆ + 1)-coloring in the same time complexity. matching results also hold for the distance-2 coloring problem, where nodes within distance 2 must receive different colors. within o(log ∆) rounds we are then in the setting where the maximum uncolored degree of each node is logarithmic.if the nodes are all able to try θ(log n) colors in o(1) rounds, and all colors have an independent, ω(1) probability of success, o(1) rounds suffice to color all nodes w. the o(log * n) algorithms work through increasing the ratio of slack to uncolored degree, trying more and more colors as this ratio increases, allowing nodes to try θ(log n) colors each with constant probability over the course of o(log * n) rounds.however, all these algorithms have nodes send θ(log n) colors during the algorithm's execution, which requires θ(log n • log ∆) bits, i.while θ(log n • log ∆) bits are needed to describe an arbitrary choice of θ(log n) colors in a color space of size θ(∆), being able to describe any choice of θ(log n) colors can be unnecessary.with a shared source of randomness, instead of sending log ∆ bits to specify a color, a node can use the shared random source as a source of random colors and send indices of colors in the random source.let us apply this with ψ v , the set of colors not currently used by neighbors of v, and t good , the set of colors that are neither already used nor tried in this round by nodes adjacent to v.therefore, assuming that the above holds and that there are at least x colors in s iv ∩ ψ v , when v picks x random colors in s iv ∩ ψ v , the colors picked each have a chance at least 1/6 of being in t good . let x = ǫ∆•log i/ log * n n 2(1+ǫ)cc log n denote the number of colors tried in our application of multitrials. since ∆ ≥ log 1+1/ log * n n and x ≥ ǫ 2(1+ǫ)cc log (i+1)/ log * n n, we have q • x ∈ ω(log n). its repeated application yields that after the first log * n -1 first phases of this step, each node is either already colored or tries ω(log n) colors in each run of multitrials, which colors all remaining nodes w.lower ∆ and concluding remarks when ∆ ∈ o(log 1+1/ log * n n), a simple use of the shattering techniquetogether with the recent deterministic algorithm of(using o(log 2 c log n) rounds with o(log c) bits to compute a degree+1 list-coloring of a n-vertex graph whose lists are subsets of) is enough to solve the problem in o(log 3 log n) congest rounds, which combined with our previous o(log * (n)) algorithm for ∆ ∈ o(log 1+1/ log * n n) means there exists an algorithm for all ∆ that solves the (1 + ǫ)∆ coloring problem in o(log 3 log n) congest rounds w. the representative sets and the slack at the edges' disposal further allow us to sample not just θ(log n/ log ∆) colors (represented in log ∆ bits each) in o(1) rounds but θ(log n) colors by sampling pseudoindependent colors. as before, the representative sets guarantee that for any uncolored edge e, whatever colors other edges adjacent to e are trying, the chosen representative set s ie has a large intersection with the set of unused and untried colors, as long as this set represents a constant fraction of the color space (which slack and a good choice of x guarantee).the first results are based on the observations that with slack of ∆ log (c) n (where log (c) n is the c-iterated logarithm), it suffices to run multitrial for o(c) rounds to reduce the uncolored degree to o(log n), and that when ∆ = ω(log 1+1/c ′ n), if suffices to run multitrial for o(c ′ ) rounds to color all remaining nodes in the last phase of our algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/191.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/191.txt new file mode 100644 index 0000000000000000000000000000000000000000..569177130c924f6847602afaf8b7b31db021b980 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/191.txt @@ -0,0 +1 @@ +in this paper, we study gossip in smartphone peer-to-peer networks, an interesting emerging networking platform that makes use of the peer-to-peer libraries included in standard smartphone operating systems (for examples of these networks in practice, see: , , , , , , ). we begin by improving the best-known synchronous gossip algorithms in this setting, and then build on these results to describe and analyze the first efficient asynchronous solution. the model in which we study this latter algorithm captures the interfaces and guarantees of an actual peer-to-peer networking library used in ios, meaning that our gossip solution can be directly implemented on commodity iphones. to emphasize this practicality, in appendix d we provide the swift code that implements this algorithm in ios-a rare instance in the study of distributed algorithms for wireless networks in which the gap between theory and practice is minimal.below we briefly summarize the models we study and the relevant existing bounds in these models, before describing the new results proved in this paper.the mobile telephone model (mtm). the mobile telephone model (mtm) extends the well-studied telephone model of wired peer-to-peer networks (e.g., , , , , , ) to better capture the dynamics of the peer-to-peer network libraries implemented in existing smartphone operating systems. in recent years, multiple distributed algorithm problems have been studied in the mtm setting, including: rumor spreading , load balancing , leader election , network capacity , and gossip , .as we elaborate in section iii, in the mtm, time proceeds in synchronous rounds. at the beginning of each round, each wireless device (which we will call a node) can advertise a small amount of information to its neighbors in the peer-topeer network topology (defined by an undirected graph). after receiving advertisements, nodes can attempt local connections. in more detail, in each round, each node can send and accept at most one connection proposal. if a node u's proposal is accepted by neighboring node v, then u and v can perform a bounded amount of reliable communication using this connection before the round ends.this scan-and-connect network architecture-in which nodes can broadcast small advertisements to all of their neighbors, but form pairwise connections with only a limited number at a time-is a defining feature of existing smartphone peer-to-peer libraries. in the peer-to-peer libraries that depend on bluetooth, for example, the advertisements are implemented as low energy beacons that contain at most tens of bytes, whereas the pairwise connections are implemented as reliable, high throughput links that can achieve up to 2 mbits/sec . these libraries, therefore, allow devices to broadcast advertisements to all neighbors, but severely restrict the number of concurrent pairwise connections allowed. in ios, for example, this limit is 7 (the mtm typically reduces this bound to 1 to simplify the model description and analysis).mobile telephone model vs. classical telephone model. the mtm can be understood as a modification of the classical telephone model of peer-to-peer networks , , , , , . the mtm differs from its predecessor in two ways: (1) it allows nodes to broadcast small advertisements to all neighbors; and (2) it bounds the numbers of concurrent connections allowed at each node. as elaborated in , , this second difference prevents existing telephone model results from applying to the mobile telephone setting, as the best-known telephone model analyses specifically depend on the ability of nodes to service an unbounded number of incoming concurrent connections (the standard analysis of push-pull rumor spreading, for example, depends on the ability of many nodes to simultaneously pull the rumor from a common informed neighbor). on the other hand, the addition of advertisements to the mtm means that results in this new model do not apply to the classical telephone setting, which not include this behavior. fundamentally new techniques are needed to study the mtm.the asynchronous mobile telephone model (amtm). the mobile telephone model includes synchronized rounds. this assumption simplifies analyses that probe the fundamental capabilities of scan-and-connect style peer-to-peer networks. it also introduces, however, a gap between theory and practice, as real smartphone peer-to-peer networks are not synchronized. to help close this gap, in , the authors introduced the asynchronous mobile telephone model (amtm), which, as we elaborate in section iv, eliminates the synchronous round assumption from the mtm, and allows communication events to unfold with unpredictable delays, controlled by an adversary. to increase the practicality of the amtm, the authors of also provide a software wrapper around the network libraries offered in ios that matches the interface from the formal specification of the amtm-simplifying the task of directly implementing algorithms analyzed in the amtm on iphones.existing results. work on the mtm began with , which studied rumor spreading, and described a strategy that uses a 1-bit advertisement to compensate for connection bounds to spread a rumor in at most o((1/α) log2 n log 2 ∆) rounds, with high probability, in a network with n nodes, maximum degree ∆, and vertex expansion α (see section ii). the paper also proved that there exist graphs with good graph conductance, φ, for which efficient rumor spreading is impossible. this creates a separation from the classical telephone model where both vertex expansion and conductance are known to be good measures of the ability to spread a rumor efficiently in a graph. in the classical model, for example, the canonical push-pull rumor spreading strategy requires θ((1/α) log 2 n) rounds for graphs with vertex expansion α , and θ((1/φ) log n) rounds for graphs with conductance φ .the more general problem of gossiping k rumors in the mobile telephone model was first studied in , which described an algorithm that spreads the rumors in o((k/α) log 5 n) rounds, 1 with high probability. this algorithm was one-shot, in the sense that it cannot accommodate on-going rumor arrivals, or detect when it has terminated. in recent work , a simpler gossip algorithm was described and analyzed that improves 1 in , the algorithm is listed as requiring o((k/α) log 6 n) rounds, but that result assumes a single bit advertisements in each roundrequiring devices to spell out control information over many rounds of advertising. to normalize with this paper, in which tags can contain log n bits, this existing strategy's time complexity improves by a log factor.this bound to o((k/α) log 2 n log 2 ∆) rounds, and can handle on-going rumor arrivals.by comparison, the best-known gossip solution in the classical telephone model requires o(d + polylog(n)) rounds . this result was considered a breakthrough as it removed the dependence on graph properties such as expansion or conductance. the solution in , however, requires unbounded concurrent connections and unbounded message size (allowing all rumors in the set difference between two nodes to be delivered during a given one-round connection 2 ).the amtm was introduced in , which analyzes a basic asynchronous rumor spreading algorithm, and prove it requires o( √ (n/α) • log 2 nα • δ max ) time, with high probability, where δ max is a sum of the maximum delays on the relevant communication events (as is standard in asynchronous models, δ max is unknown to the algorithm and can change from execution to execution). for gossip, however, the paper establishes only a crude deterministic bound of o(n • k • δ max ) time to gossip k rumors. finding an efficient gossip algorithm in the amtm was left as the core open question of , as such an algorithm could be directly deployed as an information spreading routine in real smartphone peer-to-peer networks. new result #1: improved synchronous gossip. our ultimate goal in this paper is to design and analyze an efficient and simple gossip strategy for the amtm. the first step toward this goal is to identify an efficient synchronous strategy that can be adapted to asynchrony. the existing synchronous gossip algorithm from is not a good candidate for this purpose because it requires nodes to advertise whether or not they were involved in a connection at any point during the previous log n rounds. this behavior cannot be easily adapted to an environment with no rounds.in section iii, we overcome this issue by describing a simpler strategy we call random diffusion gossip that does not depend on round history. this algorithm has each node continually advertise two pieces of information about its current rumor set: a hash of the set and its size. when faced with multiple neighbors with different rumor set hash values, a node will randomly select a recipient of a connection proposal from among those with the smallest rumor set sizes. this strategy is easily adapted to asynchrony as it does not explicitly use rounds.as we show, in addition to being both round-independent and pleasingly straightforward in its operation, random diffusion gossip is more efficient than the solution from , requiring only o((k/α) log n log 2 ∆) rounds to spread k rumors. the source of this speed-up is a new and improved version of the core technical lemma from , which bounds the performance of a random matching strategy in bipartite graphs. notice that this gossip result also improves the best known result for rumor spreading (i.e., for k = 1).finally, we note that these synchronous gossip bounds are all of the form õ(k/α) (where õ suppresses polylogarithmic factors in n and ∆). as argued in the previous work on gossip, it might be possible to leverage pipelining to achieve results in õ(k + (1/α)), which would make the existing gossip strategies for this model far from optimal in certain cases. in section iii, we resolve this open question by proving that ω(k/α) is indeed a lower bound for spreading k rumors in the mobile telephone model. new result #2: asynchronous gossip. our synchronous random diffusion gossip algorithm's operation is easily adapted to our asynchronous model. adapting its analysis, however, is more complicated. like most algorithms studied in the mtm, our synchronous analysis of random diffusion gossip relies on the synchronized behavior of the devices in the network: fixing for each round a set of potentially productive connections, and then arguing that a reasonable fraction of these connections will succeed in parallel during the round.our first step toward enabling an asynchronous analysis is to divide time into intervals of a length proportional to δ max . these phases are not used by the algorithm (as δ max is a priori unknown), but instead meant only to facilitate our analysis. as in the synchronous setting, we fix a set of potential connections at the beginning of each interval. we show that amidst all the chaotic, asynchronous behavior that occurs during the interval, for each such connection from some node u to some node v in this set, one of two things will happen: there will be a point at which u selects a connection from a set that includes v and that is not too large (keeping the probability of v's selection reasonable), or some other node will end up connecting with v before u even gets a chance to learn about v.to make use of this probabilistic analysis, we leverage a rebuilt version of the core randomized matching lemma from (discussed above), that we make not only more powerful but also significantly more friendly to asynchrony. in more detail, this new version includes two crucial changes. first, the original lemma follows the behavior of a randomized matching strategy over multiple rounds to achieve the needed result. our new version, by contrast, requires only a single round, which is necessary to apply to our interval structure, as in the asynchronous setting too much can change in the network between intervals to enable a coherent multi-interval graph analysis. second, the original version relied on the precise probabilities of particular connections occurring, using both upper and lower bounds on these values to prove its claim. our new version only requires the loose lower bounds on connection probabilities established by our asynchronous analysis.combining these techniques, we are able to translate the synchronous complexity bound directly to the asynchronous setting, proving that k rumors spread in at most o((k/α) log n log 2 ∆ • δ max ) time. at the beginning of round r, let t u (r) be the token set of node u and let s u (r) be the minimum token set size among u's neighbors. furthermore, for a fixed topology graph g = (v, e), let n(u) be the neighbors of u in g and let n u (r) be the productive neighbors for u at the beginning of round r, where we define. for convenience, let s min (r) = s j (r) and n * min (r) = n * j (r) for j = i min (r). this allows us to analyze the cut between these nodes in v \ s min (r) and the nodes that still possess exactly i min (r) tokens, s min (r).furthermore, note that if for some rounds r 1 and r 2 such that c(r 1 ) > 1, c(r 2 ) = 1, and r 1 < r 2 it must be the case that every node in s min (r 1 ) has participated in a productive connection.32•log ∆ , and 4) for every u ∈ l, every neighbor of u in r min (r) is in r. for the matching m of size at least m • c over our original graph g(l, r), we denote a node v ∈ r as the original match of a node u ∈ l if {u, v} ∈ m. therefore if condition c holds such that u∈l ′ deg l ′ ,r ′ (u) ≤ m∆ 1-i 32•log ∆ , the second objective of the lemma is already satisfied by setting l ′′ = l ′ , r ′′ = r ′ , and r ′ = r.in g(l ′ , r ′ ) let r ′ b ⊆ r ′ be the bad nodes in r ′ and let r ′ g ⊆ r ′ be the good nodes, where good and bad are defined with respect to. now remove every node from r ′ that is selected in round r and denote the remaining set r ′′ , and remove from l ′ every node u for which u's original match was removed from r ′ . since we know from the above that each node v ∈ r ′ g is removed from r ′ with probability at least 1/4 and the probability that any edge {u, v} is removed from g(l ′ , r ′ ) is at least the probability that v is removed from r ′ , the probability that an edge {u, v} incident on r ′ g is removed is at least 1/4.as is noted in, this implies that at least a 1/ log ∆ fraction of nodes in l ′ had their original match removed in round r which means that at least |l ′ |/ log ∆ nodes of r ′ were selected and therefore participated in a productive connection.note that once again by our construction, for every node u ∈ l ′′ , every neighbor of u in r min (r + 1) is in r ′′ . furthermore by our construction of g(l ′′ , r ′′ ), l ′′ ⊆ l ∩ l min (r + 1) and r ′′ ⊆ r ∩ r min (r + 1). for example, let t u (t) be the token set of node u at time t in the same way t u (r) was u's token set at the beginning of round r. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/192.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/192.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e6541147b6f22a6b5a8881c9d02b5330f4a0555 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/192.txt @@ -0,0 +1 @@ +static code analysis tools are highly specialized to detect one or more defects, typically categorized into similar types of defects. these tools fulfill a group of specific needs of software developers. it is only recently that heterogeneous multicore systems have been adopted in a wide-range of hardware in industrial sectors such as automotive, wireless communication and embedded vision. therefore it is increasingly important to develop new static code analyses that address the fundamental problem of concurrency, which means that many tasks running at the same time on the same hardware can lead to unpredictable and incorrect behaviour. identifying and fixing issues related to concurrency and parallelism is one of the most time-consuming and costly aspects of parallel programming.however, static code analysis tools that detect defects related to parallel programming are at a very early stage.this papers presents an experimental evaluation of appentra's parallelware static code analysis tools on power systems, which go beyond the state of the art by addressing the problem of concurrency and parallelism from three different perspectives: finding concurrency issues in the code, discovering new opportunities for parallelization in the code, and generating parallel-equivalent code that enables tasks to runs faster. in the rest of the paper, section 2 describes the current set of parallelware tools, namely, the parallelware development library, parallelware analyzer (beta) and parallelware trainer. next, section 3 presents early results from the analysis of the snu npb suite , a c version of the nas parallel benchmarks , using power systems available at the jülich supercomputing centre and at appentra headquarters. finally, section 4 presents conclusions and future work.this papers presents an experimental evaluation of appentra's parallelware static code analysis tools on power systems, which go beyond the state of the art by addressing the problem of concurrency and parallelism from three different perspectives: finding concurrency issues in the code, discovering new opportunities for parallelization in the code, and generating parallel-equivalent code that enables tasks to runs faster. in the rest of the paper, section 2 describes the current set of parallelware tools, namely, the parallelware development library, parallelware analyzer (beta) and parallelware trainer. next, section 3 presents early results from the analysis of the snu npb suite, a c version of the nas parallel benchmarks, using power systems available at the jülich supercomputing centre and at appentra headquarters.appentra is a deep tech global company that delivers products based on the parallelware technology, a unique approach to static code analysis for concurrent and parallel programming. -parallelware analyzer (beta)is designed to speed up the development of parallel applications and to enforce best practice in parallel programming for heterogeneous multicore systems. the structure of the report is as follows: benchmark, the software application; files, number of source code files; sloc, source lines of code calculated by the sloccount tool; time, runtime of the parallelware analyzer tool in milliseconds; software issues, number of issues found in the code related to concurrency and parallelism; and opportunities, number of loops found in the code that have opportunities for parallelization using multithreading and simd paradigms.the current tool setup reports five software issues related to concurrency and parallelism: global, use of global variables in the body of a function; scope, scalar variables not declared in the smallest scope possible in the code; pure, pure functions free of side effects not marked by the programmer; scoping, variables in an openmp parallel region without an explicit data scoping; and default, openmp parallel region without the default(none) clause.parallelware analyzer successfully analyzed a total of 192 source files of code, containing 39890 lines of code written in the c programming language, in less than 13 seconds.the parallelware trainer tool was used to automatically generate several parallel versions of a code that computes the mandelbrot sets.1, which contains directives #pragma omp parallel for ); taskwait, parallel version using openmp 3.2, which contains directives #pragma omp task and #pragma omp taskwait ); taskloop, parallel version using openmp 4. its structure is as follows: version, serial or parallel version of the code, one of sequential, multithreading, taskwait and taskloop; no.on the other side, parallelware trainer provides a gui that facilitates the generation of parallel version of a code, as well as the testing of those version for correctness and performance. in less than one hour, a software engineer with little experience in parallel programming generated several openmp-enabled parallel versions of the mandelbrot algorithm using multithreading and tasking paradigms. we also plan to extend the number of software issues related to concurrency and parallelism detected by the parallelware tools and run them on a wider set of scientific and engineering software. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/193.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/193.txt new file mode 100644 index 0000000000000000000000000000000000000000..50109b625ef4cbe1eebb4b518e5b5d59f2703148 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/193.txt @@ -0,0 +1 @@ +one of the most important notions affecting our everyday social life is trust. most of the interactions in a society depend on it and it is a very fundamental concept in our daily life . for example, when we are shopping online, first we evaluate the trustworthiness of the website to be sure if shopping is secure. trustworthiness in this context is based on our previous knowledge about the website. if we do not have enough knowledge to evaluate the trustworthiness, then we check the information of others, or the reputation of the website. we also use trust and reputation for example to choose the product that we want to buy on the website. the comments for the product, our trust in the brand, or whether we used the product before play an important role in the decision-making process.trust not only affects our social life but it also has repercussions in a variety of fields such as economy and politics.as trust provides us with the internal security in an uncertain environment in which we lack the necessary information , it is an essential notion in our lives. trust also plays an essential role in virtual communities such as self-organized systems. in these communities, healthy communication between agents is established with the help of trust and reputation. so it is required to transfer these concepts from social life to the field of informatics. however, it is not that easy to define and represent them mathematically for computation.the first problem is that, because trust is an abstract concept, it does not have a universal definition that can be used in all research areas. the researchers in the social sciences have proposed different approaches to define trust in a social sense. our first goal is to find a suitable definition based on the literature in this field. secondly, we have to formulate this definition mathematically in a trust model which determines the trustworthiness of the other side. until today, various trust models have been developed by researchers for the computational area. they have used different definitions of trust and reputation and improved different strategies for their model. for the purposes of this paper, we have chosen two different fundamental studies in this area. the first one, which we will accept as the base article for our research, is the system of abdul-rahman et al. . they approach trust and reputation from a social point of view and provide a model in this sense. we have also used this method in our trust model. the second study which we will accept as the reference point is the article of tomforde et al. . they have developed a very advanced model which inspired us when we are developing our model. for convenience, the article "supporting trust in virtual communities" is referred as the "base article", the article "representation of trust and reputation in self-managed computing systems" is referred as the "reference article" for the remainder of this paper.the outline of this paper is as follows: in section ii, definitions for trust, computational trust, and reputation are given to provide a common perspective when evaluating these concepts. section iii discusses the two articles that we mentioned above. in this section, we have analyzed the base article in detail and compared it with the reference article. in section iv, we have introduced our trust model. lastly, section v concludes the paper. for convenience, the article "supporting trust in virtual communities"is referred as the "base article", the article "representation of trust and reputation in self-managed computing systems"is referred as the "reference article" for the remainder of this paper. trust (or, symmetrically, distrust) is a particular level of the subjective probability with which an agent assesses that another agent or group of agents will perform a particular action, both before he can monitor such action (or independently of his capacity ever to be able to monitor it) and in a context in which it affects his own action" based on this explanation, we can say trust is the subjective prediction of a trustor that a trustee behaves in a certain way.1) input (knowledge about the agent) is given to the trust model (the input can be based on previous interactions or the reputation of the agent in the community). so the trust degree td of an agent a about an agent b should have context c.an agent uses two different data structures to store trust information, namely set q and r. also, the outcome of an experience e is graded with one of the four degrees in set e = {vg, g, b, vb} respectively very good (vg), good (g), bad (b), very bad (vb).q is for direct experiences and it holds agents' name (set a), experience results (set s) and contexts of trust (set c). in set s, there are four values s = (s vg , s g , s b , s vb ) for experiences with each agent, so s = {(s vg , s g , s b , s vb )}.for each type of experience e ∈ e there is a semantic distance value in the tuple sd = (sd vg , sd g , sd b , sd vb ). this means evaluations of the trust degree is done within the system of an agent and the result matters only to the agent itself. for example, the binary metric measures trust with two degrees (-1,1) namely good and bad, or the continuous metric has continuous values to measure trust in a certain range as in .the general trust degree t d gen holds the trustworthiness of an agent which is calculated using (i) the experience set and (ii) the latest t d gen .ex med = t d (s+1)/2 , when s is odd (t d s/2 + t d (s+1)/2 )/2, when s is even(7)we ignore extreme values in an experience set by using median because we want to calculate the trust degree according to the general behaviors of an agent. so the new general trust degree will be t d gen m and the latest general trust degree t d gen (m-1) .if we cannot get any reputation value from others, the general trust degree t d gen will be equal to the threshold value t d th , which will be introduced in the next section. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/194.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/194.txt new file mode 100644 index 0000000000000000000000000000000000000000..b1652c45fec5d3b4bebf465a193168d49d90c0cd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/194.txt @@ -0,0 +1 @@ +distributed systems have to handle a lot of changes during their run-time. they especially have to deal with malicious behaviour and failures. for detecting those the authors kantert et. al. established two requirements for trust metrics in in the following named "reference article". they also introduce the weighted trust metric (wtm) and the weighted simple exponential smoothing trust metric (wses). the same group of researchers gives a low-power sensor network and the simple trust metric in their research article , in the following as "end-to-end trust", where the simple trust metric is adjusted to the network's task.resulting on reading both the articles we wondered if the simple trust metric in end-to-end trust fulfils the requirements made in the reference article and how it differs from the weighted trust metric and the weighted exponential smoothing trust metric.this paper is organised as follows: section ii introduces as well into the definitions of trust and reputation from the reference article as into the model of the low-power sensor network from end-to-end trust. afterwards, section iii defines the three used trust metrics from the literature. based on section ii and iii, section iv discusses whether the metric from end-to-end trust fulfils the requirements from the definition in section ii and how the simple trust metric differs from wtm and wses. lastly, section v concludes the paper with the results. they also introduce the weighted trust metric (wtm) and the weighted simple exponential smoothing trust metric (wses). the same group of researchers gives a low-power sensor network and the simple trust metric in their research article, in the following as "end-to-end trust", where the simple trust metric is adjusted to the network's task.resulting on reading both the articles we wondered if the simple trust metric in end-to-end trust fulfils the requirements made in the reference article and how it differs from the weighted trust metric and the weighted exponential smoothing trust metric. based on section ii and iii, section iv discusses whether the metric from end-to-end trust fulfils the requirements from the definition in section ii and how the simple trust metric differs from wtm and wses.in this metric a node gets a new rating r ∈ r in a range between -1 and 1, such that r w := the positive ratings are stored in the fifo-queue r w + , the negative ratings are represented in the queue r w -, but there also is a fifoqueue r w n that contains all ratings. the simple trust metric has only one floating-point value for the last reputation value where t n,t = r t t for a specific node n. while the effect of a rating in the wses trust metric is related to the fixed value of 0, the effect of a rating in the simple trust metric is related to the floating value of t n,t-1 , which is the reputation value of the last trust round. in contrast to the wses trust metric, the simple trust metric always gets its new ratings with a new trust round.for calculating the new reputation value both the simple trust metric and wses trust metric need the previous reputation value. while the simple trust metric uses only one floating-point number, the wses trust metric needs a pair of them with the first element for the positive reputation values and the second element for the negative reputation values. that the simple trust metric only needs one value is caused by the fact, that the simple trust metric has no negative ratings and due to that doesn't need to have a storage for them. we also see that the first case and the last case of the wses trust metric are nearly the same as in the simple trust metric. the second case of the wses trust metric is not listed for the simple trust metric because it is not relevant.also, the authors of the reference article show, that the wses trust metric fulfils their requirements of a trust metric.wses trust metric r t ∈ (0, 1] r s ∈ t n,t-1 is float r s n is a tuple of floats good rating higher than ξ good rating higher than 0 bad rating lower than ξ bad rating lower than 0 τ t = t n,t τ s t s (r s n ) weighted weighted fulfils requirement (r1) fulfils requirement (r1) fulfils requirememnt (r2) fulfils requirement (r2). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/195.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/195.txt new file mode 100644 index 0000000000000000000000000000000000000000..e9868f087ed8cdea00b35b828e429caa07aeec73 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/195.txt @@ -0,0 +1 @@ +self-stabilization is a fault tolerance approach that allows distributed systems to achieve a global correct configuration starting from an unknown initial configuration. without external intervention, a self-stabilizing algorithm is able to correct the global configuration of the distributed system in a finite time. various selfstabilizing distributed algorithms have been proposed in the literature using graph theory such as leader election, nodes coloring, domination problem, identifying the independent set, constructing the spanning tree. these algorithms have many benefits in the real-life applications, for example independent sets have been used as cluster heads in ad hoc and sensor networks .in graphs, independence is commonly defined as follow: let g = (v, e) be a graph, where v is the set of nodes and e is the set of edges. an independent set is a subset of nodes s ⊂ v such that there is no two adjacent nodes in s. the distance between any two nodes in s is greater than 1. an independent set s is said maximal, if there is no superset of s that could be an independent set. in other words, there is no node outside the maximal independent set (mis) that may join mis. it is well known in graph literature that mis is considered also as dominating set because every node out of mis has at least a neighbor in mis (every node outside mis is dominated by a node of mis).in this paper, we deal with a particular case of independent set. we call s maximal distance-2 independent set (md2is), if of s nodes are independent and the distance between any two nodes among them is strictly greater than 2. figure 1 illustrates difference between mis and md2is where green nodes are independent. observe that in mis (a), distance of 2 could be found between independent nodes. however, the distance between green nodes in md2is (b) is strictly greater than 2. nodes of mis are used as servers (cluster heads) in ad hoc and wireless sensor networks to provide important services for other nodes. each cluster head has to guarantee services for nodes connected to it, that are called members of the cluster. cluster members represent nodes outside of mis. a cluster head could serve its members by routing information, providing keys of encryption, giving names for members,... figure 1(b) shows that elements of md2si could be used as cluster heads where members connected to the head could be within distance of 2. however, using mis, members could not be located within distance more than 1. obviously, md2is provides a more reduced number of clusters than mis. the choice of the cluster heads is important in order to contribute in extending lifetime of wireless sensor and ad hoc networks. using md2is rather than mis as cluster heads could provide a good alternative in this sense especially that lifetime is the major problem of these networks. in addition to that and in order to deal with any possible failure, we use self-stabilizing algorithm that ensures reconstructing cluster heads, after the failure occurs, allowing the network still operational.finding the maximal independent set (mis) in graphs using self-stabilization paradigm was studied in literature for the first time by shukla et al. in 1995 . authors have used a straightforward idea based on two rules: (1) a node v joins the set s (which is under construction) if v has no neighbor in s, and (2) a node v leaves the set s if at least one of its neighbors is in s. other variants of self-stabilizing algorithms constructing independent set have been introduced to deal with particular problems which try to minimize the algorithm complexity or to be suitable for distributed daemon1 . reader can refer to the survey for more details on mis self-stabilizing algorithms. other self-stabilizing algorithms have been proposed for independent sets imposing additional constraints besides to the independence condition. for example, has presented an algorithm to discover the independent set where each node u out of s has at least a neighbor v in s such that deg(v) > deg(u). in authors propose a distributed self-stabilizing algorithm to find mis using two-hop (distance-2) information in wireless sensor networks.1.2 related works has proposed a self-stabilizing algorithm to find the independent dominating set imposing a distance greater than k between any two nodes of the independent set. work is an improvement of the memory management regarding the first one . every node outside the independent set is within distance k. presented a self-stabilizing algorithm to compute a dominating set s (which is not independent) where every node out of s has to be distant from s at most by k. although the precedent algorithms have bounded complexity o(n + k) in rounds, authors indicate that these algorithms might still never converge under the distributed daemon, since the daemon could ignore an enabled nodes. it is known in literature that: if the round complexity of a self-stabilizing algorithm is finite, this does not mean it converges . therefore, the computation of the convergence time still an open question for independent (or dominating) set at distance k ≥ 2. various selfstabilizing distributed algorithms have been proposed in the literature using graph theory such as leader election, nodes coloring, domination problem, identifying the independent set, constructing the spanning tree.in graphs, independence is commonly defined as follow: let g = (v, e) be a graph, where v is the set of nodes and e is the set of edges. an independent set is a subset of nodes s ⊂ v such that there is no two adjacent nodes in s. it is well known in graph literature that mis is considered also as dominating set because every node out of mis has at least a neighbor in mis (every node outside mis is dominated by a node of mis). we call s maximal distance-2 independent set (md2is), if of s nodes are independent and the distance between any two nodes among them is strictly greater than 2. in 1995 .2 related workshas proposed a self-stabilizing algorithm to find the independent dominating set imposing a distance greater than k between any two nodes of the independent set.networks and distributed systems are modelled generally as an undirected graph g = (v, e) where the units of processing represent the set of nodes v and the links are the set of edges e. generally, a set s of nodes is distance-k independent if every node in s is distant at least k + 1 to any other node of s. consequently, a distance-2 independent set is a subset s of v where every two nodes of s are at distance > 2.definition : a distance-2 independent set of a graph g(v, e) is a subset s of v such that the distance between any two nodes of s is strictly greater than 2. once the system reaches the legitimate configuration, all the nodes are disabled and the set s = {v ∈ v : v.in our algorithm, r1 shows that if a node v out of s reads the state and the exp of all its neighbors and finds that all the state = out and all the exp = 0, hence all the neighbors at distance-2 are out, therefore node v has to join the set s.lemma 2 if a node v executes r1 becoming independent, it remains independent, and every node in its neighborhood at distance-2 still out of s and cannot be enabled.theorem 1 md2is is a self-stabilizing algorithm that constructs maximal distance-2 independent set in o(n) moves under expression model using a central daemon. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/196.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/196.txt new file mode 100644 index 0000000000000000000000000000000000000000..0fa7188f0444f2a5000ab3287e66d2036d24d439 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/196.txt @@ -0,0 +1 @@ +there are two main blockchain transaction models: the unspent transaction output (utxo) model and the account model. utxos were introduced in bitcoin and are predominantly used to represent credits. the utxo model is stateless; utxos represent units of value that must be spent, so any state change within the utxo model results in old utxos being succeeded by new utxos. the account model is state-dependent; it implements balances where transactions change the state of the system by keeping a balance . the utxo model has various advantages over the account model including superior privacy, safety, and scalability, mainly due to the structure of transactions. however, one shortcoming is that it is unable to represent debt.debt is an important part of a well-functioning financial system but its opacity within the financial system has been identified as a contributor to the 2008 financial crisis . for this reason, regulatory agencies have recommended increasing transparency in debt markets, such as mortgage markets, in order to prevent build-up of excessive leverage . nevertheless, it is understood that complete transparency also leads to adverse outcomes like increased price volatility .this work presents a possible solution to the representation of debt on utxo blockchains. in our design, debt is represented analogously to the way in which credit is represented; where utxos represent credit as unspent transaction outputs, we represent debt in something akin to unpaid transaction inputs, i.e., a debt is a transaction that has not yet been funded. we implement our design in tendermint , a library for state machine replication (smr), and show how it can be applied to the representation of reverse mortgage transactions on the blockchain. as a practical matter, we note that this work presents a protocol for managing credits and debts in an efficient manner and does not prevent debtholders from abandoning their debt. we expect that this could be done through existing legal frameworks, or through collateralization schemes being pioneered in defi technology .the literature has given some consideration to debt representation in the blockchain. one existing approach is the implementation of a debt token and logic to handle debt creation and destruction . another approach uses the blockchain as a shared data layer to record loans . there have been attempts at the representation of debt in a multi-blockchain setting , in which debt is represented implicitly by locking tokens on multiple blockchains and not within a single blockchain. other work has used smart contracts to represent debt , but it does not improve the transparency of aggregate debt within the system. apart from work that represents debt in a blockchain, there is also at least one effort that is moving the home equity loan process onto the blockchain , but it does not put mortgage transactions themselves onto the blockchain. to our knowledge, the existing literature has not considered representing debt as unpaid transaction inputs.the rest of the paper is structured as follows. section 2 discusses transactions in the utxo model. section 3 presents a novel way of representing debt in the utxo model. section 4 presents our prototype for representing reverse mortgage transactions on the blockchain. section 5 is the conclusion. in our design, debt is represented analogously to the way in which credit is represented; where utxos represent credit as unspent transaction outputs, we represent debt in something akin to unpaid transaction inputs, i. the transaction hash is a hash of the transaction containing the transaction output from where the value is to be drawn; transaction outputs that are not matched to a transaction input therefore are unspent. it is also possible to represent debt using smart contracts, but such an implementation would be opaque and require additional computation to query the amount of debt issued or owed by a debtor, and it would not allow a straightforward lookup on the blockchain by parsing transactions. to enable this representation, we introduce two new types of transactions: debt transactions and outstanding debt transactions. in a debt transaction input, the erstwhile transaction hash field is re-purposed to act as a public key field that records a public key belonging to the creditor; this enables parties involved in the debt issuance to be recorded on the blockchain.the rest is similar to a coinbase transaction in that the debt transaction output is a normal transaction output. since the transaction output of the debt transaction is a standard utxo, it is included into the utxo pool like conventional utxos after the debt transaction has been accepted by the network. however, since one needs a mechanism to keep track of outstanding debts and their repayment, we introduce outstanding debt transactions, which are created simultaneously with debt transactions, as shown in fig. outstanding debt transactions are transactions with unmatched inputs and a transaction output matched with the corresponding debt transaction and the creditor's public key.after an outstanding debt transaction is created and broadcast to all other nodes, it is inserted into a debt pool, which is similar to a utxo pool, but which holds outstanding debt transactions instead of utxos. an outstanding debt transaction is removed from the debt pool when a debt owner repays the remainder of a debt, i. once an outstanding debt transaction has a matched input, it is no different from a normal transaction and is removed from the debt pool and inserted into the transaction pool to be eventually accepted by the network. in the case of partial repayments, we create two new transactions from the original outstanding debt transaction: the first is a normal transaction that records the transfer of value of the repayment amount from the debtor to the creditor; the second is a new outstanding debt transaction with the remaining debt amount, which is similar to the way change transactions are handled in utxo blockchains. the issuance of two new transactions in the case of repayments ensures that funds allocated to repayment and outstanding debt balances are finalized by the network and not held in debt pools, akin to splitting a utxo when a transfer of credit is made. simultaneously, the local state of the node is updated and the utxo pool is updated with the utxos belonging to the debtor, while the debt pool is updated with an outstanding debt transaction belonging to the creditor. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/197.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/197.txt new file mode 100644 index 0000000000000000000000000000000000000000..3f35c52760f49158e7b45359cb51b7db655d113a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/197.txt @@ -0,0 +1 @@ +i n the past decade, machine learning models have demon- strated unprecedented data processing capability in numerous applications. to well train machine learning models, it is unavoidable to massively collect data samples from users, which gives rise to the concern on user privacy leakage , . this concern has significantly hindered the wide application of machine learning techniques. in light of this, federated learning (fl) has been proposed to allow decentralized clients to collaboratively train machine learning models by merely exchanging intermediate computations (i.e., gradients) yao fu and di wu are with the department of computer science, sun yatsen university, guangzhou, 510006, china, and guangdong key laboratory of big data analysis and processing, guangzhou, 510006, china (e-mail: fuyao7@mail2.sysu.edu.cn; wudi27@mail.sysu.edu.cn).yipeng zhou is with the department of computing, fse, macquarie university, australia, 2122 (e-mail: yipeng.zhou@mq.edu.au).shui yu is with the school of computer science, university of technology sydney, australia (e-mail: shui.yu@uts.edu.au).yonggang wen is with the school of computer science and engineering, nanyang technological university, singapore (e-mail: ygwen@ntu.edu.sg).chao li is with tencent technology (shenzhen) co. ltd, china.(e-mail: ethancli@tencent.com).with the parameter server (ps). raw data samples are locally kept on clients. however, it has been pointed out that the leaked gradient information can still be exploited by malicious entities to crack user privacy , , , , . 1 to further enhance the protection of user privacy, differential privacy (dp) has been employed to add additional noises to client gradients before they are uploaded to the ps , , .unfortunately, the adoption of dp may lower the model accuracy significantly, which has become a major obstacle for the application of dp in fl , , . taking the popular fedavg algorithm as an example, the naive adoption of dp in fedavg may cause 2-10 times training loss than the original model . for many real-world applications, such high performance deterioration can significantly reduce the effectiveness of advanced machine learning models, and is somewhat unacceptable at all. as fedavg represents a large family of gradient descent (gd) based algorithms widely used in fl, it is questionable how practical dp will be when being applied to fl.typically, clients and the ps in fl works together with the following manner: clients conduct a number of local iterations (li) before their gradients plus dp noises are uploaded to the ps for aggregation. by receiving the computations from multiple selected clients, the ps conducts a round of aggregation and distributes new results to clients for the next round of global iteration (gi) . apparently, it is only necessary to add the dp noises to clients' gradients after the last round of lis before they are disclosed to the ps. the noises distort the disclosed gradients against attacks, which meanwhile impair the convergence of fl , , . 2in this work, we investigate how to improve the practicality of dp in fl through tuning the number of conducted local or global iterations. intuitively, we can improve the model accuracy if the number of lis is tuned properly since there is no need to add noises after each li except the last li. firstly, we derive the convergence rate of the fedavg algorithm distorted by the dp noises. secondly, we derive the conditions for the dp based fedavg to converge, and formally derive the formula to set the number of lis according to the dp mechanism to minimize the negative influence of dp noises. at last, we conduct case study by using the laplace mechanism and the gaussian mechanism, through which we theoretically unveil that: 1) the learning error of the dp based fedavg with the gaussian mechanism can finally converge to a constant number if we use a fixed number of lis per gi; 2) the dp based fedavg with the laplace mechanism will diverge with the number of gis, but the divergence rate can be lowered substantially by setting the number of lis according to our method.overall, our contributions in this paper can be summarized as below:• to the best of our knowledge, we are the first to theoretically study the practicality of dp in fl through tuning the numbers of lis and gis, which paves the way towards a better understanding of the complicated relation between the random noises introduced by dp and the final fl model accuracy. unfortunately, the adoption of dp may lower the model accuracy significantly, which has become a major obstacle for the application of dp in fl,,.typically, clients and the ps in fl works together with the following manner: clients conduct a number of local iterations (li) before their gradients plus dp noises are uploaded to the ps for aggregation. secondly, we derive the conditions for the dp based fedavg to converge, and formally derive the formula to set the number of lis according to the dp mechanism to minimize the negative influence of dp noises. at last, we conduct case study by using the laplace mechanism and the gaussian mechanism, through which we theoretically unveil that: 1) the learning error of the dp based fedavg with the gaussian mechanism can finally converge to a constant number if we use a fixed number of lis per gi; 2) the dp based fedavg with the laplace mechanism will diverge with the number of gis, but the divergence rate can be lowered substantially by setting the number of lis according to our method.• to the best of our knowledge, we are the first to theoretically study the practicality of dp in fl through tuning the numbers of lis and gis, which paves the way towards a better understanding of the complicated relation between the random noises introduced by dp and the final fl model accuracy. in fl, there exist two schemes to add dp noises: the global dp scheme and the local dp scheme. their theoretical analysis validated that the number of global iterations and the number of engaged clients in each global iteration should be neither too large nor too small to minimize the final loss function.other than the above ldp mechanism that can guarantee the privacy budget over all rounds of global iteration, there exist a number of other works that designed and analyzed a loose ldp mechanism that can only guarantee the privacy budget for a single round of global iteration,,.relaxed the local ǫ-dp in wireless fl settings, which gave fixed noises in each iteration and used advanced composition rule to track the total privacy leakage over t iterations. let t g denote the total number of gis to be executed, e denote the number of lis to be executed by each engaged client per gi and b denote the number of clients engaged by the ps in each round of gi.the traditional laplace mechanism gives (ǫ, 0)-dp, while the gaussian mechanism gives (ǫ, δ)-dp. then, the client updates the model on the local dataset for e rounds before the client returns the updated model with noises added with the rule θl. for k + 1 ∈ c e , local parameters represented by θ l k+1 will be aggregated with other engaged clients and dp noises.2, we can draw a similar conclusion that the model accuracy can be impaired by the dp noises significantly such that implementing dp in fl is challenge when ǫ is small. through tuning the number of local/global iterations, we show from both theoretical analysis and experimental study that it is very important to set the number of iteration times properly such that the influence of the dp noises can be minimized. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/198.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/198.txt new file mode 100644 index 0000000000000000000000000000000000000000..aba791c5714eef3120ca0d198739b288b5cbbdec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/198.txt @@ -0,0 +1 @@ +the reconstruction of charged particle trajectories (tracking) is a pivotal element of the reconstruction chain in compact muon solenoid (cms) as it measures the direction and momentum of charged particles, which is then also used as input for nearly all high-level reconstruction algorithms: vertex reconstruction, b-tagging, lepton reconstruction, jet isolation and missing transverse momentum reconstruction. tracking by far is the most time consuming step of event construction and its time scales poorly with the detector occupancy. this brings a computing challenge to the upcoming upgrade of the accelerator from the large hadron collider (hlc) to the high-luminosity lhc (hl-hlc) where the instantaneous luminosity will increase by an order of magnitude due to the increased number of overlapping proton-proton collisions.to address this challenge, the parallel kalman filter tracking project mkfit was established in 2014 with the goal of enabling efficient tracking on modern computing architectures. over the last 6 years, we have made significant progress towards developing a parallelized and vectorized implementation of the combinatoric kalman filter algorithm for tracking . this allows the efficient global reconstruction of the entire event within the projected online cpu budget. this also opens the possibility of deploying mkfit into the cms high level trigger (hlt), where the performance requirements are particularly strict. the current goal is to test the algorithm online in run 3 of the lhc.global reconstruction necessarily entails the unpacking and clustering of the hit information from all silicon strip tracker modules before the hots are processed by mkfit. the current cms hlt, on the other hand, performs hit and track reconstruction on demand, i.e., only for softwarer-selected regions of the detector. therefore, we have recently begun to investigate how to implement the unpacking and clustering steps efficiently for the entire detector at once. this document highlights the latest development in enabling parallelization of unpacking and clustering on modern computing architectures. we start from a standalone version of the unpacker, which uses simulated raw data and calibration data, along with simplified versions of many related cmssw classes.the reconstruction of charged particle trajectories (tracking) is a pivotal element of the reconstruction chain in compact muon solenoid (cms)as it measures the direction and momentum of charged particles, which is then also used as input for nearly all high-level reconstruction algorithms: vertex reconstruction, b-tagging, lepton reconstruction, jet isolation and missing transverse momentum reconstruction. we start from a standalone version of the unpacker, which uses simulated raw data and calibration data, along with simplified versions of many related cmssw classes.the cms strip tracker data are organized by front end drivers (feds). in zero suppression mode, the measured signal within a fed is stored like a variant of compressed sparse row (csr) format, where the channel and strip numbers correspond to the row and column numbers (figure1). besides the event-based strip tracker data, pre-measured calibration data, e. the unpacking step transforms all event-based strip tracker data and pre-measured calibration data to the soa format in order to provide optimal performance for the clustering step. we then unpack the data to soa format, which can be done concurrently for each channel on a multicore cpu or gpu. unpacking is the most time consuming step since it involves irregular data access pattern and is particularly costly on gpu. first only strips with signal-tonoise ratio larger than the "channel threshold" are recorded and we call these strips as active strips. our parallel implementation is based on the fact that every candidate cluster will have at least one active strip with signal-to-noise ratio larger than the "seed threshold", which we call seed strips. we then form a cluster around a seed strip by determining the left and right boundaries, computing the cluster charge, and checking it against the "cluster threshold". the seed seeking stage can be parallelized over all strips and the cluster forming stage can be parallelized over all seed strips. the input data for these tests is a simulated data sample of tt events with an average pileup per event of 70 using the phase 1 cms geometry in 2018 with realistic detector conditions. for this ttbar pu70 data sample, the strip number and the seed strip number are roughly 800,000 and 150,000, respectively.we have enabled parallelization of unpacking and clustering of cms silicon strip detector data and demonstrated its performance on both multi-core cpus and many-core gpus. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/199.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/199.txt new file mode 100644 index 0000000000000000000000000000000000000000..04b760e2f77f81c16aed8171ad04314664f2e839 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/199.txt @@ -0,0 +1 @@ +a mobile pervasive environment consists of users interacting with mobile devices connected to stationary devices, desktops, servers or other mobile devices wirelessly. due to mobility of users, frequent network disconnections have become a normal characteristic, and as a consequence, this results in failure of any mobile distributed system running in such environment affecting negatively the reliability of the latter. several fault tolerance mechanisms have been proposed to solve the reliability problem in distributed computing systems. almost all proposed techniques consider environments with wired homogeneous computational devices. thus, they are difficult to adapt in a wireless heterogenous mobile computing environment as opposed to grid computing systems. fault tolerance is a process of reinstating the normal or an acceptable behavior of a system. in pervasive computing environments, network disconnection in the middle of the execution of a task is frequent. it is due to, mainly, mobility of users. for example, if user a's device act as a participating device in a cluster of devices collaborating to execute an application partitioned and offloaded from user b's device and the former moves away from the cluster, then a failure is generated. hence, this paper proposes a fault tolerant algorithm, using reactive fault tolerant methods, which is an independent component that can be added to any offloading framework. the fault tolerant component takes as input the different tasks offloading schedules from the existing offloading systems and ensure the complete application execution by the application of different fault tolerant policies. the rest of the paper is structured as follows: section 2 discusses some existing works in the area of fault tolerance in distributed computing systems; section 3 presents the modeling of the application, device and reliability models; section 4 describes in detail the calculation of reliability level of devices, presents an algorithm to cluster devices based on reliability level, discusses the replication and checkpointing policies and describes a possible implementation. the fault tolerant algorithm is also presented and discussed along with an analysis of the time complexity of the latter. section 5 describes the experimental setup, the design and implementation of a simulator and the evaluation and analysis of the results; and finally, section 6 summarizes the paper. for example, if user a's device act as a participating device in a cluster of devices collaborating to execute an application partitioned and offloaded from user b's device and the former moves away from the cluster, then a failure is generated. the fault tolerant component takes as input the different tasks offloading schedules from the existing offloading systemsand ensure the complete application execution by the application of different fault tolerant policies. the rest of the paper is structured as follows: section 2 discusses some existing works in the area of fault tolerance in distributed computing systems; section 3 presents the modeling of the application, device and reliability models; section 4 describes in detail the calculation of reliability level of devices, presents an algorithm to cluster devices based on reliability level, discusses the replication and checkpointing policies and describes a possible implementation. thus, tasks found along the critical path should be subject to the application fault tolerant policies to make sure the application computes on time.the three criteria used to determine the reliability of devices where either replication or checkpointing fault tolerant policy will be applied are the computing capability of the device, its availability and its data throughput in the network.the task replication is offloaded to the same cluster as the selected participating device, originally scheduled to process the offloaded task as per the code offloading module, in order to maintain the offloading benefits in terms of execution time and energy consumption and load balancing. for each task found in the critical path, their corresponding offloading scheduled device 𝑚 is check if the latter is in the low reliability or high reliability cluster. for the low reliability cluster, the score of all devices in that cluster is calculated and the device ~𝑚 with the lowest score is assigned the task replica 𝑆 . since each device has only one attribute which is 𝑅 and the number of clusters is 2, that is, low reliability and high reliability clusters, therefore, the time complexity can be reduced to 𝑂 𝑚 * 𝐼 and hence to 𝑂 𝑚 . three metrics are considered for this evaluation and they are the application completion time, the fault tolerant algorithm overhead cost and the number of control messages for fault tolerance.to simulate failures of devices, a 2-parameter weibull distribution is considered to generate random time between failures and the failure time points are computed by the device start time to the time generated from the distribution. the workload generator gets the tasks on the critical path and the list of high and low reliability device clusters and assign each task to a device. each task is bind with a data size and the amount of computation.the results from the different experiments demonstrate that the proposed fault tolerant algorithm can adapt, its policies, to different execution conditions, such as different device availability and different task computation requirement.to permit the usage of different fault tolerant policies such as replication and checkpointing, the devices are grouped into high and low reliability clusters. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/2.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/2.txt new file mode 100644 index 0000000000000000000000000000000000000000..501b98249ca0b5ce30b5a9488caf276cf423ab54 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/2.txt @@ -0,0 +1 @@ +we are increasingly surrounding ourselves with artificial intelligence-controlled (ai) autonomous systems such as self-driving cars. these systems must be trained by some mechanism, which is usually reinforcement , deep reinforcement learning , or sometimes neuroevolution . regardless of the optimization method, performance must be described by an objective function. with self-driving cars, the objective is to deliver the passengers safely from point a to point b, not damaging the car or others in the process, maybe saving energy, and to a large degree prioritizing arrival time -the same objectives human drivers have. however, these objectives are selfish in that they ignore the success of others. a self-driving car optimized by these criteria would not be considerate in lane transition planning, intersections, or other potentially contentious situations in which humans have expressed consideration. this poses a simple question: how do we optimize autonomous decision-making ais beyond their selfish objectives?one way could be to include more criteria in the objective function. however, this leads to an explosion of criteria and does not necessarily address system efficiency. for example, each car yielding right of way to one waiting behind a stop sign is inefficient. similarly, it might not be clear which extra objectives affect overall performance. neither does it sound possible to quantitatively encode "being considerate and optimal" into an objective function.nature faces a similar problem during evolutionary optimization of decision-making. evolution selects on short time rewards making it hard for the complex behavior of cooperation to evolve. thus, different mechanisms have been proposed that could help cooperation to emerge . the most obvious ones are kin selection , group selection , and inclusive fitness . kin selection is a concept that is hard to transfer into the domain of reinforcement learning and thus we leave it out of the discussion here. in biology, group-level selection requires all members of the group to replicate together, let alone receive the same payoff. inclusive fitness stipulates that the performance of one agent is dependent on another agent. both these mechanisms find a way to award organisms both individually and from mutual support of other agents. group-level and inclusive fitness selection improve cooperation, and emphasize the success of the group over the success of the individual. thus, it should be possible to counter the emergence of selfish ai behavior by using group-level selection or inclusive fitness.multiple examples of group-level selection exist, and have been identified as drivers of major transitions in evolution, such as the transition from single-to multi-cellular organisms or social insects . while driving cooperation, group-level selection often favors efficient division of labor. examples include the specialization of soma and germline in multicellularity, or the specialization of queens and workers in social insects. even in less strict situations where groups of organisms are working together in a synergistic fashion to receive higher benefits, the rewards are may not be distributed equally . this division of labor from group-level selection creates a situation where members receive more than they would alone, but still encounter unequal rewards -typically described as a despotic distribution .interestingly, the fitness function for optimization in a genetic algorithm can also be applied for optimization in reinforcement learning . the concept of group-level selection and inclusive fitness can thus be transferred to reinforcement learning. however, inheritance only plays a role in the context of a genetic algorithm wherein populations of agents compete and the fitter ones replicate proportionally more often. this is different from reinforcement learning wherein agents do not replicate. instead of optimizing a single agent, a group of agents can be trained using an objective function rewarding group performance. this is either accomplished with one policy controlling the actions of all agents at the same time , or by training independent agents that share information or experiences . thus, inclusive fitness is more akin to using independent policies, while group-level selection is closer to the optimization of a single policy.group performance for foraging agents can be assessed by taking the summation of the individually collected rewards or by taking the maximum of those rewards. this typically leads to poor overall performance, diverse behavior, and a higher despotic index . also, the distributed nature of learning poses problems to exploration and learning schedules . on the other hand, this heterogeneous outcome might be desired to solve other tasks . it has also been argued that global reward schemes do not scale to larger groups and that using individual reward schemes remedies this problem .this credit assignment difficulty can be summarized as an act of finding the balance between local individual reward that can cause counterproductive interference, and maximizing global group reward that can lead to self-sacrificial inefficiencies at the local scale. for this reason many adaptive methods under the name "shaped reinforcement learning" have been proposed , being only a few. it seems that the literature suggests that a global reward scheme evaluating the maximal or joint effort of a group neither leads to optimal performance, nor does it flatten the despotic index.regardless, here we show that assessing the performance of a group by its weakest member leads to optimal performance, while also resulting in a fair distribution of labor and reward -a flat despotic index. we show this for both genetic algorithms and reinforcement learning. the task used here is a foraging task, and performance is optimized by either a genetic algorithm or by reinforcement learning of policies controlling groups of individuals or the entire group. three different rewarding schemes are compared:• mean: the resources are pooled and then fairly distributed among the four agents• minimum: each agent gets the same score defined by the agent who collected the least food• maximum: each agent gets the same score defined by the agent who collected the most food (a control)we will show that the minimum reward scheme indeed leads to high performance while also satisfying a low despotic index. instead of optimizing a single agent, a group of agents can be trained using an objective function rewarding group performance.regardless, here we show that assessing the performance of a group by its weakest member leads to optimal performance, while also resulting in a fair distribution of labor and reward -a flat despotic index.objective function for the genetic algorithm the four agents to forage in the environment are either chosen randomly from the population (without replacement) and evaluated once (individual), or each agent in the population is cloned three times to create a group of four identical agents that are evaluated in the environment (clone). we determined which group-selection method and reward scheme combination leads to the highest performance of a group for the reward schemes mean, maximum, and minimum. we find that all three reward schemes generally result in high performance, but that clonal groups using mean or minimum reward schemes outperform the others (see figure1). as expected, the despotic index was highest when using the maximum reward scheme regardless of group-level selection or inclusive fitness (see figure23). the next steepest despotic index can be found when using the mean reward scheme, with group-level selection leading to a flatter hierarchy than inclusive fitness (see figure2 1). finally, group-level selection with minimum reward scheme resulted in a nearly equal resource distribution, indicating the most fair behaviors and outcomes for all agents in the group (see figure22). using inclusive fitness and the minimum reward scheme results in a flatter distribution of resources compared to mean and maximum, but is still steeper than group-level selection when using the minimum reward scheme.as discussed before, the concepts of group-level selection and inclusive fitness do not perfectly translate to reinforcement learning, since neither policies nor agents "replicate". rewards received by one agent only directly affect the policy of that agent, while the actions of other agents are only indirectly included through the reward scheme and lifetime interaction state changes.in all cases (mean, minimum, and maximum) centralized control outperforms decentralized control, and mean and minimum reward schemes yield better-performing agents than maximum when using centralized control.notably the despotic index is flattest when using the minimum reward scheme and steepest when using the maximum reward scheme (see figure4). group performance under this minimum reward scheme is on par with rewarding the average, and is better than rewarding the best, at least for group-level selection. however, this is exactly the result of this research: using the minimum reward scheme encourages an increase in every agent's performance in an equal fashion avoiding an unfair distribution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/20.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/20.txt new file mode 100644 index 0000000000000000000000000000000000000000..52319012caa7f5c93951e1ba7acbdc434128b2cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/20.txt @@ -0,0 +1 @@ +imagine a robot that can work as a butler. it can handle many tasks and talk with other butler robots to do even more tasks. alas, one cannot buy or build one today even with an unlimited budget. the reason a butler robot is not available is because we do not know how to program it. current approaches require huge data to teach a single skill , and the data requirement grows exponentially with the number of tasks. while we have made remarkable progress in solving tasks with well-defined structures such as when explicit rewards or ground truth exist, we do not know how we can generalize this capability for a single task to multiple tasks. turing suggested : instead of trying to produce a programme to simulate the adult mind, why not rather try to produce one which simulates the child's? humans are born with a vast blank memory and a mechanism for filling it. let us call this mechanism the learning mechanism in this paper. with diverse experiences as input, the mechanism fills the contents of the memory as shown in fig. 1. after a few years, we can do many things in multiple domains such as perception, motor, social, language, and physics. we claim that there were the following issues in previous approaches that made the search for the learning mechanism difficult and propose a new approach to mitigate those issues. • targeting a single skill rather than diverse skills (the main issue) -while a human child can learn to do many things simultaneously, we have focused largely on developing models that can do only a single task. this approach had resulted in overfitted solutions that cannot be generalized to diverse tasks. • use of refined and focused datasets rather than diverse and noisy datasets (the first common pattern) -because the focus is to teach one skill, we tend to build a refined dataset or an environment that contains only task relevant information. this resulted in spoon-fed human-edited sensory data . compare this with how humans learn from unstructured data such as visual and auditory senses and find underlying structures and apply these structures to many domains . • relying on explicit rewards rather than on other mechanisms (the second common pattern) -while operant conditioning is a powerful mechanism , we tend to rely on explicit rewards to guide learning. designing a reward mechanism might be easy for a single task. however, it becomes exponentially difficult as the number of target tasks increase. if we compare the language acquisition abilities of humans and robots, robots can learn to navigate according to the verbal instruction quickly - but do not know how to generalize this to other tasks such as cooking. on the contrary, human infants cannot follow verbal instruction for a very long time. unfortunately, you cannot give a treat to an 8-month infant for toilet training when he goes to a bathroom himself. but slowly around two years when they acquire language, they can do many tasks with it . one key difference is that while robots are trained using explicit rewards, it is not the case with infants. • too many necessary components rather than a sufficient set of the learning mechanism (the third common pattern) -finally, we tend to find individual necessary mechanisms rather than suggesting a set of the sufficient mechanisms. the learning mechanism is a system of multiple components. some might classify the components into two different categories: 1) innate or builtin mechanisms versus 2) universal principles that drive learning. examples of innate mechanisms are reflexes, hippocampus, or limbic systems. universal principles explain the driving force behind learning and can be usually written as succinct mathematical formulation such as intrinsic motivation , , bayesian statistics , or the free energy principles . as we can see, there are many candidate components, and we anticipate that the learning mechanism will be a set of multiple components. however, for a single application, a single or small subset of these components might do the job. the problem is that we cannot linearly concatenate the solutions from multiple domains because they are not independent. therefore, a more critical but neglected question is what is a sufficient set of components for all problems humans can solve. as a summary, we tend to build models for single tasks resulting in overfitted solutions that cannot be generalized to multiple tasks. in this perspective, we need a regularization. regularization by sharing is an effective pattern as demonstrated in convolutional neural network (cnn) or recurrent neural network (rnn) . we claim that we need to regularize by enforcing the use of the same learning mechanism to conduct multiple tasks as allen newell suggested in his unified theories of cognition . then why has the focus of past researches been on developing models for individual tasks? imagine that a researcher has decided to build an agent that can perform many tasks like a human can. the first problem she encounters is that there is no simulated environment that can provide the diverse experiences required to acquire skills across multiple domains.to solve this problem, we introduce our ongoing effort to build a simulated environment for developmental robotics (sedro). sedro provides diverse experiences similar to that of human infants from the stage of a fetus to 12 months of age. sedro also simulates developmental psychology experiments to evaluate the progress of intelligence development in multiple domains.there are two generalizable lessons in our work. first, we point that the learning environment should provide experiences for the multiple tasks and provide a proof-of-concept example. fig. 2 shows screenshots of sedro. in our environment, the learning agent has to rely on interactions with other characters such as a mother character, who teaches language as a human mother does. other characters have to intelligently react to the random babbling of the baby in a diverse but reasonable way. programming a mother character for all situations is intractable and it becomes increasingly challenging to provide an experience for open-ended learning when social learning is involved. in our paper, we address this issue by focusing on the earlier stage of development from the stage of a fetus to 12 months of age when a few words are acquired. it is more tractable as the conversations between the mother and the baby tends to be one-directional rather than interactive back-and-forth conversations. second generalizable lesson is that we can build upon the prior researches in the developmental psychology to evaluate the developmental progress of non-verbal artificial agent. because our environment cannot provide sufficient language exposure beyond the first 12 months, the agent cannot acquire advanced language beyond the first few words. consequently, we cannot evaluate the developmental progress of the agent based on their ability to follow verbal instruction or answer questions correctly. we overcome this challenge by using studies from developmental psychology. there are many experiments revealing developmental milestones for non-verbal infants. we can simulate and make use of those experiments in sedro for developmental assessments. as a concrete example, kellman and spelke found that babies acquire perceptual completion around four months using the habituation-dishabituation paradigm . with sedro, models can be computationally evaluated by simulating and running experiments to compare behaviors of the agent to the intellectual progress of human infants. fig. 3 explains these experiments in more detail and shows screenshots of our simulated environment. the rest of this paper is arranged in the following manner. in section ii, we survey related works which cover different types of simulated environments for developing ai and various evaluation methods for non-verbal agents. then, in section iii, we illustrate our proposed environment, sedro. finally, we draw the conclusion in section iv by pointing out some major limitations of the current version of sedro, along with a future plan of actions to resolve these issues. while we have made remarkable progress in solving tasks with well-defined structures such as when explicit rewards or ground truth exist, we do not know how we can generalize this capability for a single task to multiple tasks.• targeting a single skill rather than diverse skills (the main issue) -while a human child can learn to do many things simultaneously, we have focused largely on developing models that can do only a single task. • use of refined and focused datasets rather than diverse and noisy datasets (the first common pattern) -because the focus is to teach one skill, we tend to build a refined dataset or an environment that contains only task relevant information. • relying on explicit rewards rather than on other mechanisms (the second common pattern) -while operant conditioning is a powerful mechanism, we tend to rely on explicit rewards to guide learning. • too many necessary components rather than a sufficient set of the learning mechanism (the third common pattern) -finally, we tend to find individual necessary mechanisms rather than suggesting a set of the sufficient mechanisms. as we can see, there are many candidate components, and we anticipate that the learning mechanism will be a set of multiple components. as a summary, we tend to build models for single tasks resulting in overfitted solutions that cannot be generalized to multiple tasks. we claim that we need to regularize by enforcing the use of the same learning mechanism to conduct multiple tasks as allen newell suggested in his unified theories of cognition.then why has the focus of past researches been on developing models for individual tasks? imagine that a researcher has decided to build an agent that can perform many tasks like a human can. the first problem she encounters is that there is no simulated environment that can provide the diverse experiences required to acquire skills across multiple domains. first, we point that the learning environment should provide experiences for the multiple tasks and provide a proof-of-concept example. because our environment cannot provide sufficient language exposure beyond the first 12 months, the agent cannot acquire advanced language beyond the first few words.in sedro, we simulate the human infant experiences, but an alternative is to use a completely artificial environment that is not relevant to human experience but still requires skills in many domains. through similar researches, though we might find clues about the underlying human learning mechanism, it might be challenging to apply them to human robot interaction because language is a set of arbitrary symbols shared between members. we expect researchers in the ai and robotics community to discover the learning mechanism for artificial general intelligence by testing different cognitive architectures using the open-ended learning environment developed in our project. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/200.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/200.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae638a1943c38ed1b752e3e59bb92cc1dbd2ebe6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/200.txt @@ -0,0 +1 @@ +qiskit aer is an open source framework for quantum circuit simulations and we implemented a parallel quantum computing simulator for high-performance computers as a backend of qiskit aer. qiskit aer is one of the components of qiskit that provides seamless access to quantum hardware and simulators with libraries to help development of quantum software. in addition to ideal (noiseless) simulation, qiskit aer supports a noise model for noisy simulation, which allows users to develop quantum algorithms for realistic noisy environments. the frontend of qiskit-aer translates such qiskitunique apis to basic matrix operation as written in section ii for backends. depending on characteristics and usages of quantum circuits, users select one of backends from state vector, unitary matrix, density matrix, stabilizer state, super operator, and matrix product state (mps) simulators. out backend extends the state vector simulator with acceleration of gpus and distributed environment.hundreds of qubits of quantum computers are becoming realistic in the near future and existing quantum computers are available for development and evaluation of quantum applications-. universal quantum computing simulations-are now available for developing quantum applications with smaller numbers of qubits (around 20 qubits) on classical computers, even on desktop or laptop personal computers. to simulate rather more qubits (around 50-qubits), parallel simulators-must store the quantum state in the huge distributed memory in parallel-processing computers.though system can provide enough memory, inefficient memory usage in state vector simulators is a critical problem to simulate many qubits; the communication overheads over hybrid parallel computers become inhibitors to scale simulation performance. to parallelize a state vector simulator on a distributed memory space, probability amplitudes of quantum state must be exchanged across different memory spaces.quantum circuits run quantum computing programs by applying quantum gates to qubits.• copying data from one gpu to another gpu • copying data to other nodes cache blocking is a well-known technique to avoid repeating fetch data from main memory to cpu caches in highperformance computing research. we map such qubits to qubits larger than or equal to nc and other qubits to smaller qubits than nc in simulation. similar to the cache memory on a classical computer, a chunk whose size is nc qubits on a quantum computer can be7. gates on the smaller qubits (smaller than nc) can be calculated without moving data, so the memory is very fast, like a cache memory on a classical computer. the qubits in excess of nc can be assigned to slower memory, and by applying a swap gate to a pair of qubits, one qubit is fetched to the cache and the other qubit is swapped out from cache at the same time (fig. the execution order of two gates can be swapped if each gate is applied to different qubits because these two gates can be executed independently on a real quantum computer. one of the circuit we used is quantum volumewhich is a randomly generated quantum circuit that uses all the given qubits and all the qubits entangled by cnot gates. the second step from 30 to 32 qubits shows the simulation times when using six gpus; here, the performance deteriorates on the baseline of quantum volume because of data exchange overheads, on the other hand with cache blocking we can decrease overheads of data exchange and we measured better performance. moreover, by storing chunks in the both the memory of the gpus and the memory of the cpu, we can use the memory of the gpus as a cache memory to accelerate simulations using the chunks stored in the cpu. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/201.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/201.txt new file mode 100644 index 0000000000000000000000000000000000000000..57ff5b98f57e36918b625559330f7b62f9bb5f44 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/201.txt @@ -0,0 +1 @@ +the goal of local certification is to verify that a network, represented by a connected graph g in which each vertex has a unique identifier, satisfies some given property. the constraint is that each node of the network has a local view of the network (its neighborhood) and has to make its decision based only on this local view. if the graph satisfies the property, we want all vertices to accept the instance, while if the graph does not satisfy the property, at least one vertex has to reject the instance. this is a significant restriction and it only allows the verification of local properties (related to the degrees, for instance), so each vertex is given in addition some small certificate, and each vertex can now base its decision on its certificate and the certificates of its neighbors. for any property p, the goal is to produce a protocol to certify p locally while using certificates of minimal size. such a protocol is called a one-round proof labelling scheme with complexity f (n), where f (n) is the maximum number of bits in the certificate of a vertex in an n-vertex graph satisfying p (a formal definition of proof labelling schemes will be given in section 2). proof labelling schemes are a natural component of self-stabilizing algorithms, and are a particular form of distributed interactive protocols (with a single interaction). more broadly, proof labelling schemes with compact certificates (of logarithmic or polylogarithmic size) can be seen as a distributed version of the class np, for which certificates of polynomial size exist (and can be checked in polynomial time in the centralized setting).the euler genus of a surface σ is denoted by eg(σ) (see section 3 for more on surfaces and graph embeddings). in particular, the orientable surfaces of euler genus 0, 2 and 4 are respectively the sphere (or equivalently the plane), the torus and the double torus. the non-orientable surfaces of euler genus 1 and 2 are respectively the projective plane and the klein bottle.motivated by recent work on distributed interactive protocols in classes with linear time recognition algorithms , it was recently proved that graph planarity has a one-round proof labelling scheme with complexity o(log n) , and that this complexity is the best possible. more recently, the same authors built upon their previous work to extend their result to graphs embeddable on any fixed surface .theorem 1.1 ( ). for any (orientable or non-orientable) surface σ, the class of graphs that are embeddable on σ has a one-round proof labelling scheme with complexity at mostthe proof of the planar case (i.e., the case eg(σ) = 0) in and its extension to general surfaces are fairly intricate, with the two papers totaling 65 pages. the proof of the planar case reduces the problem to graphs that are closer and closer to trees (for which compact proof labelling schemes are known), while the proof for general surfaces works by carefully cutting the surface along non-contractible cycles, thus reducing the problem to planar graphs.in this short note, we give a simple and direct proof of theorem 1.1, based on rotation systems together with a distributed computation of the euler genus using euler's formula along a rooted spanning tree. we believe that our simplified approach is an important step towards an extension of this work to more general classes, such as minor-closed classes. in addition, we want to emphasize that surfaces are central in the study of distributed algorithms in planar graphs, as these graphs are locally indistinguishable from graphs on surfaces (see for instance for applications of this observation to obtain lower bounds on distributed coloring of planar graphs).soundness: if g ∈ f, then for every possible choice of certificates (c g (v)) v∈v (g) and distinct identifiers (id(v)) v∈v (g) , there exists a vertex v ∈ v (g) such that. a half-edge of g is a pair (v, e), where v ∈ v (g) and e is an edge incident to v. for any edge e of g, we glue the two polygons containing e together on e (if a single polygon contains e twice, we glue the two sides corresponding to e together), by respecting the natural orientation of e (that is, if e = uv, the vertex u of one polygon is identified with the vertex u of the other polygon, and similarly for v), see figure3. the face f associated to the orbit of σ • α containing (v, e) is said to be the face bounding the half-edge (v, e), and we say that (v, e) is bounded by f . note that if an edge e of g is incident to a single face f , the two half-edges of e are bounded by f , while if e is incident to two distinct faces f 1 , f 2 , one half-edge of e is bounded by f 1 and the other is bounded by f 2 .for each face f of g, the prover considers an arbitrary half-edge bounded by f and sets it as the root of f (in the remainder, if the root of f is (v, e), we say that f points to v). the prover then assigns integers to the half-edges bounded by f as follows: for any half-edge (v, e) bounded by f , the f -index of (v, e) is the smallest integer i ≥ 0 such that (v, e) = (σ • α) i (u, e 0 ), where (u, e 0 ) denotes the root half-edge of f . now, consider any edge uv, and let f u be the face bounding the half-edge (u, uv) and let f v be the face bounding the half-edge (v, uv). then the edge uv is also given as certificate the f u -index of (u, uv) together with the identifier of the root half-edge of f u , and similarly the f v -index of (v, uv) together with the identifier of the root half-edge of f v . after having collected these certificates, both u and v are supposed to have all the information concerning the edge uv, namely: the identifiers id(u) and id(v), the u-index i of v, the v-index j of u, the identifiers of the root half-edges of the faces f and f bounding u i and v j respectively, the f -index of u i and the f -index of v j . note that for any edge uv where i is the u-index of v and j is the v-index of u, and f is the face bounding u i in σ, the next half-edge on f with respect to u i is v j+1 (see figure4). the verifier at v checks that the half-edges u i and v j+1 agree on the identifier of the root half-edge of the face f bounding them, so that the knowledge of the root half-edge of f is consistent along the face f . in order to make sure that this root half-edge of f is actually bounded by f , the verifier at v simply checks that the f -index of v j+1 is equal to 0 if v j+1 is the root half-edge of f , or equal to 1 plus the f -index of u i otherwise. since the face f is finite and circularly ordered, some half-edge (u, e) bounded by f must have f -index at least the f -index of the next half-edge (u , e ) on f , and by definition this is only possible if (u , e ) has f -index 0. once the rooted spanning tree t has been certified, each vertex of g knows its children in t . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/202.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/202.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3f6fbba227ac850ba54443922b782cade64dfcb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/202.txt @@ -0,0 +1 @@ +to reduce the carbon impact of datacenters, operators have pledged to rely on renewable energy. sometimes they have placed datacenters near cheap, renewable generation, such as google's facility at the dalles, oregon. they also timeshift workloads to use periodically-abundant renewable power .however, few cloud service providers have demonstrated that they can achieve zero-carbon compute that uses only renewable energy and hardware whose embodied energy-the energy used in mining and manufacturing-is zero or near zero. zero-carbon compute avoids the waste of (1) renewable energy that is thrown away or sold at a negative price and (2) embodied energy of hardware that is thrown away when machines are retired. it is this challenge we tackle in this paper.the electric grid is often unable to collect and store energy or transport it to where it could be used. these issues are both fundamental (e.g., intermittent renewable sources, losses due to transport inefficiency, lack of grid interconnection) and practical (e.g., high energy storage costs). while it is hard to move remote power to data centers, moving data and compute near renewable generation is feasible. at the same time, compute, storage, and networking gear within data centers have a fast hardware refresh cycle. since the vast majority of a computer's carbon footprint stems from its manufacture , short lifespans waste significant embodied energy .researchers and industry have found that bulk-compute workloads are the easiest to run on such zero-carbon compute. such workloads have loose deadlines and can be paused, which allows them to move toward renewable power and stop when it becomes unavailable. although bulk workloads are not particularly efficient on older compute, they make use of hardware that would otherwise go to waste. however, prior work has considered the hardware and software abstractions neither to support zero-carbon compute for general-purpose cloud compute nor to tolerate the data movement and intermittent power inherent in this context.carrying these ideas to fruition gives license to a seemingly-fantastical dream: that there is no fundamental barrier to building (distributed) hyper-scale data centers that have zero or near-zero carbon footprint. since there is often surplus power available somewhere, such data centers could provide high availability (e.g., the wind is blowing in the us midwest at night while the sun is shining in the us west during the day).to realize this dream we must rethink the systems and networking abstractions that underpin the infrastructure of this new type of cloud platform. in this paper we imagine the construction and deployment of terrawatt, a geographically-distributed, zero-carbon compute infrastructure, based on the trends highlighted above. this infrastructure is instantiated as individual shipping containers called sundrops with predominantly old hardware. the three central challenges in designing and building terrawatt are (1) designing abstractions and infrastructure for distributed, intermittently-powered compute that expose some but not all of the vagaries of the underlying figure 1: gas-fired plants can be adjusted to meet demand (e.g., produce more when prices are higher); wind energy is weather-dependent and often not in phase with demand. power availability, (2) addressing the systems challenges in using legacy compute infrastructure to still service meaningful workloads, and (3) designing a framework and metrics for evaluating the energy and carbon footprint of terrawatt at macro-scale and individual tasks run on it at micro-scale.however, few cloud service providers have demonstrated that they can achieve zero-carbon compute that uses only renewable energy and hardware whose embodied energy-the energy used in mining and manufacturing-is zero or near zero. zero-carbon compute avoids the waste of (1) renewable energy that is thrown away or sold at a negative price and (2) embodied energy of hardware that is thrown away when machines are retired. while it is hard to move remote power to data centers, moving data and compute near renewable generation is feasible. however, prior work has considered the hardware and software abstractions neither to support zero-carbon compute for general-purpose cloud compute nor to tolerate the data movement and intermittent power inherent in this context. power availability, (2) addressing the systems challenges in using legacy compute infrastructure to still service meaningful workloads, and (3) designing a framework and metrics for evaluating the energy and carbon footprint of terrawatt at macro-scale and individual tasks run on it at micro-scale.renewables have periods of underproduction, during which not enough renewable energy is available to meet demands, and periods of overproduction, during which too much renewable energy is available, to the point that the excess must be discarded (curtailed) or sold at a negative price point. negative-price power and curtailed power are together referred to as opportunity power.5 twh of opportunity power in caiso in 2017 (a conservative estimate) and a constant growth rate, caiso alone could provide 22 twh of opportunity power by 2025, enough to power the city of los angeles. with estimates of opportunity power in caiso and miso sitting at 7-20 twh available per year, opportunity power in just these two regions has the potential to provide between 10-30% of the power needed by data centers. because each region has ephemeral availability of curtailed power, we must support dynamically relocating customer data and workloads to other locations when a site's available power changes. we argue that new reliability metrics must be created and communicated to users in terrawatt; we hypothesize that we may face the reality of fluctuating power generation by dynamically shifting workloads from one location to another. more concretely, if site a loses renewable power while site b gains power, we need a way to transparently move customer jobs from site a to b.using power production data similar to that in figure3, we can predict how available power will change over time. even if site b has more power, it may not have an equivalent amount of compute relative to that power. sundrops do not have constant power, so we must scale the network to both take advantage of opportunity power and to scale back when power is scarce. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/203.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/203.txt new file mode 100644 index 0000000000000000000000000000000000000000..7d1b565f6975fad23c4420c0f9c41511064c5c72 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/203.txt @@ -0,0 +1 @@ +the questions of what can be computed by independent computational entities working together and how fast can it be computed serve as a high level motivation for research that spans a broad variety of fields such as population protocols , mobile robots , and programmable matter among others. mobile robots on graphs, as a paradigm, restricts this question to situations where the entities are restricted in their movement and communication capabilities. within this area, problems that are studied take on the form of either having the robots work together to find something in the graph (e.g., exploration , treasure hunting ) or form a certain configuration (e.g., gathering , scattering , pattern formation , convergence ).dispersion is one such problem of the latter category. introduced in this setting by augustine and moses jr. , it asks the following question. given n robots initially placed arbitrarily on an n node graph, devise an algorithm such that the robots reach a configuration where exactly one robot is present on each node. this problem can be used as an abstraction for the large swath of problems where computational entities must share resources with the constraint that sharing the same resource is much more expensive than searching for an unused resource. the original paper looked at the trade-offs between time taken to reach this configuration and the memory required by each robot. subsequent papers 37] have expanded the scope of this problem. however, only the recent papers of molla et al. expands the scope of this problem to capture the issue of byzantine faults. more specifically, how can the problem of dispersion be adapted and solved when some fraction of the robots may act in a byzantine manner. in the real world, where systems must be fault tolerant and errors are the norm instead of the exception, more work must be done to understand and deal with them.the previous work sought to study this problem on a ring and proposed time and memory optimal algorithms for that setting. in this paper, we expand on that work and broaden it in a variety of ways.given n robots, up to f of which are byzantine, initially placed arbitrarily on a graph of n nodes, the non-byzantine robots must re-position themselves autonomously to reach a configuration where each node has at most one non-byzantine robot on it and subsequently terminate. we first develop an algorithm that runs on any graph when robots start in an arbitrary configuration that takes o(n 4 |λ good |x(n)) rounds, where λ good is the length of the largest id among all non-byzantine robots and x(n) is the number of rounds required to explore any graph of n nodes, but tolerates up to ⌊n/2 -1⌋ byzantine robots. there is an algorithm to solve byzantine dispersion of n robots, when up to n -1 of them are weak byzantine robots, on an n node graph in the class of graphs where the graph is isomorphic to its quotient graph. since each robots performs find-map independently without relying on any information from the other robots or their movements, no amount of byzantine robots can prevent a non-byzantine robot from generating a quotient graph. from the remaining robots, let the smallest ⌊k/3⌋ id robots form group b and the remaining robots form group c. each robot remembers the set of other k -1 ids of the collocated robots and also remembers which robot belongs to which group among those k gathered robots. because if two groups have ⌊k/6 + 1⌋ byzantine robots each, then this contradicts the fact that the total number of byzantine robots are less than or equal to ⌊k/3 -1⌋ among those k robots. note that, if some byzantine robots change groups and join the byzantine robots in b, making the number of byzantine robots more than ⌊k/6 + 1⌋, then the non-byzantine robots in a ∪ c understand it as each robot knows who is in which group. also, in a ∪ c there are more than ⌊k/3 + 1⌋ non-byzantine robots and the non-byzantine robots of b believe only such a group as token that is consisting of more than equal to ⌊k/3 + 1⌋ robots from a ∪ c. so, the non-byzantine robots of b correctly find an isomorphic map of the graph and pass this information to other robots such that robots in a ∪ c also have this map. even if robots change groups, both groups would still have a majority of non-byzantine robots since the total number of byzantine robots we allow is no more than ⌊n/4 -1⌋. byzantine dispersion can be solved deterministically in an exponential number of rounds in the presence of at most ⌊n/4 -1⌋ strong byzantine robots when robots start in any arbitrary initial configuration and f is known to all the robots.suppose that instead of restricting the number of robots to n, we now have k robots that are trying to solve byzantine dispersion on an n node graph, where up to f of the robots are byzantine. we show that, even if the robots know the values of n, k, and f , and the byzantine robots are weak, if there are too many byzantine robots, then deterministically solving byzantine dispersion is impossible. depending on whether the byzantine robots are weak or strong, whether the robots are initially gathered or not, and the upper bound on the number of byzantine robots present, we have developed several algorithms that exploit those conditions to solve the problem as quickly as possible. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/204.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/204.txt new file mode 100644 index 0000000000000000000000000000000000000000..f27198a8a2218f6e0b89571f5d7dac28ff4ce3eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/204.txt @@ -0,0 +1 @@ +large-scale acoustic modeling for automatic speech recognition (asr) usually relies on speech data from multiple sources. for industrial asr products, oftentimes the acoustic models are customized for one or more target clients. the traditional approach to training acoustic models aggregates the speech data from the clients, along with other public and/or internal speech data if necessary. the training is typically carried out in a centralized fashion on the servers of the asr service provider. with data privacy and protection becoming a crucial issue in information technology , most clients will require that their data stay on-premises, precluding its release to the provider for training. a new approach to training the models under this circumstance is needed. in this paper, we investigate a federated acoustic modeling strategy where the acoustic model is trained collaboratively among the clients with each client having their data locally stored. the clients only exchange their local model updates with the central server at the service provider. this model exchange takes place at a minimal frequency to reduce the communication cost. 1 federated learning (fl) has been widely used in applications such as healthcare and finance where data privacy is a constraint. in the setting of fl, multiple entities collaborate with each 1 ©2021 ieee. personal use of this material is permitted. permission from ieee must be obtained for all other uses, in any current or future media, including reprinting/republishing this material for advertising or promotional purposes, creating new collective works, for resale or redistribution to servers or lists, or reuse of any copyrighted component of this work in other works.other to optimize a machine learning problem under the orchestration of a central server. a global model is learned with each client keeping its data private, in local storage. even though fl was proposed initially in scenarios with a huge number of mobile or edge devices , it was later generalized to a much broader spectrum of applications. in , the initial fl setting with an emphasis on a large number of devices, each with a relatively small amount of data, is referred to as "cross-device" fl, while the setting that we are about to use in this work for federated acoustic modeling is referred to as "cross-silo" fl. cross-silo fl deals with a federation of a few data providers each with a relatively large amount of siloed data. in the speech community, fl related efforts have also been reported recently. in , federated learning was used to train an embedded wake word detector on crowdsourced speech. in , various federated averaging schemes and data augmentation techniques have been studied to improve keyword spotting models with data not independent and identically distributed (iid) at the edge. an interactive system was built in to demonstrate how fl can help transfer learning on acoustic models using edge device data. in , a federated transfer learning platform is introduced with improved performance using enhanced federated averaging via hierarchical optimization and gradient selection.in this paper, we introduce a cross-silo fl framework for joint acoustic modeling with heterogeneous client data from multiple clients. its configuration is shown in fig. 1. each client has a local server for data storage and computation. the clients only communicate with the central server of the service provider. model parameters, not raw data, are exchanged between the clients and the central server in each round of communication. local model parameters are uploaded to the central server and aggregated into a global model that is then transmitted back to each client. the communication is synchronous and takes place at a minimal frequency to reduce the communication cost. since client data may come from distinct domains with unbalanced amounts, a fundamental issue with federated acoustic modeling in the real world is dealing with non-iid data. in this work, we propose a client adaptive federated training (caft) strategy to mitigate data heterogeneity. experiments are conducted on 1,150 hours of speech data from multiple domains including public data, internal data, and real-world client data. we compare the performance of the federated strategy under various settings and also compare it with the traditional centralized training.the remainder of the paper is organized as follows. section 2 gives the mathematical formulation the federated acoustic modeling. section 3 and section 4 present the algorithms and implementation of federated training and client adaptive federated training of acoustic models. experimental results are reported in section 5 followed by a discussion in section 6. finally, we conclude the paper with a summary in section 7. where w is the parameters, x ∼ di the data samples from distribution di and f (w, x) the loss function. federated modeling optimizes the following risk function overwhere pi > 0 and i pi = 1 are weights on the local risk functions. it is typical to choose pi = 1 l to make the clients equally contributed. in conventional distributed training , data from multiple sources are first mixed and then distributed to learners. each learner has equal access to the mixed data and therefore local distribution across learners is iid. in federated learning, however, the data from different sources can not be mixed and hence the local data distribution is non-iid, which is different from the conventional distributed training. this is a fundamental issue in federated learning. in addition, the amounts of data from the clients could be unbalanced. as a result, the weights in the global loss function in eq.2 are sometimes set to pi = n i n where n = l i=1 ni to make the loss function of each client proportional to their amounts of data. the traditional approach to training acoustic models aggregates the speech data from the clients, along with other public and/or internal speech data if necessary. with data privacy and protection becoming a crucial issue in information technology, most clients will require that their data stay on-premises, precluding its release to the provider for training. in this paper, we investigate a federated acoustic modeling strategy where the acoustic model is trained collaboratively among the clients with each client having their data locally stored. in, the initial fl setting with an emphasis on a large number of devices, each with a relatively small amount of data, is referred to as "cross-device" fl, while the setting that we are about to use in this work for federated acoustic modeling is referred to as "cross-silo" fl. since client data may come from distinct domains with unbalanced amounts, a fundamental issue with federated acoustic modeling in the real world is dealing with non-iid data. experiments are conducted on 1,150 hours of speech data from multiple domains including public data, internal data, and real-world client data. in each communication round, clients receive the global model from the central server and update it locally using their own local data before sending it back to the central server. the local client data is evenly divided into t chunks and one chunk of data after randomization is used for multi-step mini-batch sgd with a batch size b in each communication round:. it consists of 420 hours of broadcast news (bn) data, 450 hours of internal dictation data, 100 hours of internal meeting data, 140 hours of hospitality (travel and hotel reservation) data and 40 hours of accented data, respectively. it represents a good coverage of public data (bn), internal data (dictation and meeting) and real- world client data (hospitality and accented). the baseline is considered an oracle model as it is trained by mixing all the training data such that training is carried out on iid data. in federated training, the optimization of the local models for each client follows a recipe similar to the baseline except the training uses local data.9, and a batch size of 1,024. at test time, we use the transform estimated from the training data as we assume the training and test data is matched for a given client.in this paper we investigated cross-silo federated acoustic modeling to protect data privacy where the asr service provider collaboratively trains a global acoustic model across multiple clients. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/205.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/205.txt new file mode 100644 index 0000000000000000000000000000000000000000..3308a9f078d61ffaddb62042db6fb324bbe53813 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/205.txt @@ -0,0 +1 @@ +motivated by the ever-growing number of frameworks for parallel computations, we address the complexity of executing multiple jobs in such settings. such frameworks, e.g., mapreduce , typically need to execute a long queue of jobs. a fundamental goal of such systems is to schedule many jobs in parallel, for utilizing as much of the computational power of the system as possible. ideally, this is done by the system in a black-box manner, without the need to modify the jobs and, more importantly, without the need to know their properties and specifically their communication patterns beforehand.in their seminal work, leighton, maggs, and rao studied the special case where each of the to-be-scheduled jobs is a routing protocol that routes a packet through a network along a given path. the goal in their work is to schedule t jobs such that the length of the schedule, i.e., the overall runtime until all t packets have reached their destination, is minimized. they showed that there exists an optimal packet-routing schedule of length o(congestion + dilation), where congestion is the maximum number of packets that need to be routed over a single edge of the network and dilation is the maximum length of a path that a packet needs to travel. clearly, both parameters are lower bounds on the length of any schedule, implying that the above schedule is asymptotically optimal. further, leighton, maggs, and rao showed that assigning a random delay to each packet gives a schedule of length o(congestion + dilation • log (t • dilation)).in his beautiful work, ghaffari raised the question of running multiple jobs in the distributed congest model on n nodes. applying the random delays method , he showed a randomized algorithm which after o(dilation•log 2 n) rounds of pre-computation, runs a given a set of jobs in o(congestion+dilation•log n) rounds. here, in a similar spirit to , congestion is the maximum number of messages that need to be sent over a single edge and dilation is the maximum round complexity of all jobs. further, ghaffari showed that this is nearly optimal, by constructing an instance which requires ω(congestion + dilation • log n/ log log n) rounds to schedule.in this paper, we address the t-scheduling problem in the (congested) clique model , in which each of n machines can send o(log n)-bit messages to any other machine in each round. our goal is thus to devise scheduling algorithms that run t jobs in a black-box manner, such that they complete in a number of rounds that beats the trivial solution of simply running the jobs sequentially one after the other, and, ideally, reaches inherent lower bounds that we discuss later. we emphasize that we schedule all jobs' actions on-the-fly during their execution. throughout the paper, we use the terminology that a job is a protocol that n nodes, v 0 , . . . , v n-1 , need to run on some input, and we use the notion of an algorithm for the scheduling procedure that the n machines, p 0 , . . . , p n-1 , execute. each machine p i is given the inputs of the nodes v j i for all jobs j, and the machines run an algorithm which simulates the protocols of their assigned nodes.our contributions are three algorithms for scheduling t jobs in the clique model, which exhibit trade-offs based on the parameters of dilation, localcongestion, and globalcongestion of the set of jobs, which we formally define below. our scheduling algorithms complete within round complexities that are nearly optimal w.r.t. the appropriate parameters. we stress that any black-box scheduling algorithm in which each machine only simulates the nodes that are originally assigned to it must inherently suffer from another type of congestion as a lower bound on its round complexity, namely, the maximum number of messages that all nodes assigned to a single machine have to send or receive, normalized by the bandwidth n that each machine has per round. , n -1 }. assume that for each i ∈ and j ∈ , machine ϕ s (i, j) holds the state s i,j,r of node v i,j after its computation step in round r.forming buckets (locally): each machine p i for each j ∈ uses s i,j,r to locally compute s i,j and t i,j , the number of messages each node v i,j sends and receives in round r, respectively. then, each machine executes the sending step of round r for each of its newly assigned nodes, where a message from v i,j to v i ′ ,j is sent from p ϕ(i,j) to p ϕ(i ′ ,j) . for each i ∈ and j ∈ the machine p f (i,j) receives the state s i,j,r and executes the sending step of round r, the receiving step of round r + 1, and the computation step of round r + 1 for node v i,j .forming chunks: first, each machine p i , for each job j, uses s i,j,r to locally compute the number of messages s i,j that node v i,j sends in round r. otherwise, if we do not find j 0 , the nodes of the jobs in s send less than 2n 2 messages in round r, so we obtain the last chunk and set j k = j k ′ = s. first, each machine p i reads the input for each node v i,j for each j ∈ , and executes the computation step of round r = 0, as a result of which it holds the state s i,j,0 for each of its nodes.6, the round complexity of executing round r for all jobs is o(⌈m r /n 2 + m • t/n⌉), where m r is the number of messages sent in round r.1: input shuffling 2: execution: run dilation many phases, where in phase r each machine p i runs the protocol for its nodes {v π -1 j (i),j | j ∈ }, and messages are routed via claim 2. to this end, the machines jointly compute the value of m r = j∈ i∈ s r i,j , where s r i,j is the number of messages that node v i,j sends in round r. denote by s r i,j = i ′ ∈ s r i ′ ,j •½ π j (i ′ )=i the random variable whose value is the number of messages sent by machine p i for job j (note that there is a single i ′ = π -1 j (i) for which i = π j (i ′ ), but this i ′ is also a random variable).1: sample delays: independently uar pick d j ∈ and broadcast the values 2: execution : run o(d + dilation) phases, where in phase r progress each job j that satisfies r ≥ d j by one round where the messages of all jobs are routed with claim 2. denote by s i,j = r ′ ∈ s r ′ i,j • ½ d j +r ′ =r the random variable whose value is the number of messages sent by machine p i for job j on round r. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/206.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/206.txt new file mode 100644 index 0000000000000000000000000000000000000000..39020bb927832970c4698e2618757fc8a0f01ced --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/206.txt @@ -0,0 +1 @@ +during the past decade, the analysis of distributed systems has seen a dramatic rise in interest. fundamental limitations and structural properties of distributed systems such as limited memory, communication bandwidth, and lack of a central coordinator require coordination by distributed information sharing. analysis of social networks as well as sensor networks - , distributed inference - , and multi-agent control , are applications of distributed learning.we consider the problem of decision-making in a network, where agents observe a stream of private signals and exchange their beliefs to agree on hypothesis set that best describes their observations. fully bayesian solutions require agents to have a complete knowledge of the whole distributed system, such as each other's likelihood functions , . on the other hand, locally bayesian or non-bayesian methods - alternatively suggest agents (i) update their beliefs internally using the bayes rule and (ii) combine their beliefs locally among neighbors using a fusion rule.a broad line of research has been developed to address different aspects of social/cooperative learning - , among which efficient communication is one of the most fundamental challenges - . in such decentralized problems, agents are restricted only to access the information from their local neighbors. several works investigated non-bayesian learning from various perspectives for fixed undirected networks. likewise, other communication setups such as directed, non-connected, and time-varying networks - , as well as adaptive update rules , have been explored thoroughly.conventional non-bayesian algorithms require agents to share their beliefs on all hypotheses with their neighbors , , . this, however, imposes large communication loads should the set of hypotheses be large. similarly, the idea of quantized communication has been extensively studied previously - . several methods have been recently proposed to address the communication bottlenecks over the networks - . in social learning moreover, it may not be crucial that agents exchange all their beliefs with each other; instead, a set of compressed messages could be shared through the network. works in and propose algorithms with a compressed message sharing with the assumption of a unique common parameter locally optimal for all agents. furthermore, these algorithms consider unweighted mixing matrices for communication besides specific sparsification and quantization methods. more importantly, no non-asymptotic analysis is available even under those stronger assumptions. in this paper, we work with milder assumptions such as weighted networks and conflicting hypotheses, i.e., the set of parameters that best describes all agents' observations (on average) may not be locally optimal for all agents. we furthermore seek to provide the first non-asymptotic analysis for non-bayesian learning with compressed communication. besides, our algorithm provides a unified framework that accommodates a wide range of compression operators (section ii). in , authors study the possibility of answering binary questions about a particular hypothesis by sending a subset of beliefs. they propose an algorithm with partial information sharing to reduce communication. in contrast, we will present a more general approach that contains various quantization and sparsification operators.this paper proposes a distributed non-bayesian algorithm for social learning where agents exchange their compressed beliefs. the core of our algorithm is inspired by choco-gossip , but we develop a modified version of their results to show convergence in our algorithm. our proposed algorithm inherits choco's benefits like arbitrary compression rate and mild assumptions on the quantizer.in summary, the contributions of this paper are threefold:• we propose a novel algorithm for non-bayesian distributed learning with (possibly) arbitrary compressed communication per round. our algorithm follows a unified consensus mechanism covering various compression operators, including sparsification and quantization func-tions. thus, it provides a simple and general communication rule for agents to leverage a proper compression operator as a black-box module. we also show a memoryefficient version of the algorithm. • we provide a non-asymptotic, explicit, and linear convergence rate of beliefs for our algorithm in probability. we work under the conflicting hypotheses setup, where optimal hypotheses of each agent locally need not be the optimal hypotheses of the network. we also prove exponential asymptotic convergence of the beliefs around the set of optimal hypotheses almost surely. • finally, we show the communication advantages of our algorithm through numerical experiments on various compression operators and multiple network topologies. the remainder of this paper is organized as follows. in section ii, we describe the problem setup and state our main algorithm and results. in section iii, we prove the almost sure asymptotic exponential convergence rate for the proposed algorithm. likewise, we provide the non-asymptotic convergence rate in probability for our algorithm in section iv. in section v, we illustrate the proposed algorithm via numerical experiments. finally, we end with concluding remarks and discussing future works in section vi.we consider the problem of decision-making in a network, where agents observe a stream of private signals and exchange their beliefs to agree on hypothesis set that best describes their observations. on the other hand, locally bayesian or non-bayesian methods-alternatively suggest agents (i) update their beliefs internally using the bayes rule and (ii) combine their beliefs locally among neighbors using a fusion rule.this paper proposes a distributed non-bayesian algorithm for social learning where agents exchange their compressed beliefs. • finally, we show the communication advantages of our algorithm through numerical experiments on various compression operators and multiple network topologies.distributed non-bayesian learning: each agent i ∈ starts with a prior probability distribution, namely the set of prior beliefs μ0 i = such that (i, j) ∈ e, using a bayesian-type update rule.i , then at each round t + 1 for all t ≥ 0, observing a new signal and exchanging (some information of) its beliefs with the neighbors, seeks to update its beliefs using a non-bayesian rule.algorithm 1 distributed non-bayesian learning with compressed communication input: initial beliefs μ0 i ∈ r m , mixing matrix a, compression ratio ω ∈ (0, 1], and learning stepsize γ ∈ (0, 1] procedure :. memory-efficient algorithm: algorithm 1 requires all agents to keep the approximation of their neighbors' beliefs locally.our analysis in section iv suggests that with a high probability and after a sufficient time, the agents' beliefs linearly concentrate around the set of optimal hypotheses, under the update rule proposed in our algorithm (eq. (5)). in this section, we quantify the performance through a series of empirical experiments. first, we fix a sequence of observations, sampled from f i , for each agent i ∈ , and apply our algorithm with one of the compression operators. second, we beliefs evolution: we first compare the performance of our algorithm using the three compression operators introduced in tableiversus the update rule with perfect communication[26,eq. our main results show that the beliefs generated by our proposed algorithm exponentially concentrates around the set of optimal hypotheses. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/207.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/207.txt new file mode 100644 index 0000000000000000000000000000000000000000..515bce20352d67da09a0660117cd2a02327848b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/207.txt @@ -0,0 +1 @@ +embedded devices that control machines in the physical world have been part of industrial processes as well as home and automotive appliances for decades - . in contrast to general-purpose computing, these devices need to fulfill timing constraints. to this end, real-time operating systems (rtoss) are used, which are lightweight and make guarantees towards the timing predictability of tasks . usually, a preemptive task scheduler allows to configure different priorities for different concurrently active tasks, so that the most timecritical tasks always take precedence over less critical ones.interrupt requests (irqs) are generated by the hardware and inevitable for systems to function. at the same time, they introduce a level of unpredictability to the process flow. since the corresponding interrupt service routines (isrs) are handled by the processor, the scheduler of an operating system (os) has no control over their execution. however, by keeping the execution times of isrs minimal and con-sidering worst-case scenarios during the development, most traditional embedded systems can handle irqs without missing deadlines. yet, in the past, the environment controlled by embedded systems tended to be self-contained, the number of environmentally-triggered irqs was typically small, and their impact, therefore, predictable. with the advent of the internet of things (iot) in industrial applications this premise has changed. iot microcontrollers come with built-in network chips and are increasingly often network-connected for the sake of remote control, monitoring, and maintenance . iot networks are, however, open by design and thus less policed . especially for critical real-time tasks on networked embedded microcontrollers, this is a threat: the embedded systems have to handle the additional resource consumption of the non-critical networking tasks and the necessary network interface controller (nic) introduces a new source of unpredictability as incoming packets trigger irqs that disturb the flow of scheduled tasks . this might lead to a critical load of interrupts and triggered network tasks in the rtos, invalidating real-time guarantees and thereby lowering the system's dependability.this paper analyzes the impact of network loads on critical real-time tasks running on state-of-the-art microcontrollers with modern rtoss used in the iot. in an initial study, we evaluate timing measurements of critical tasks on microcontrollers running vendor-supplied rtoss, network drivers, and network stack tasks under different network-triggered irq loads. to expose any existing mitigation in the hardware and closed source drivers, a pseudo network driver is designed to serve as a second irq source. measurements are taken and compared between real and pseudo network packet processing. building on our methodology, we perform the following contributions:• measurement of isr-induced delay to real-time tasks.• evaluation of overhead induced by networking tasks under different network loads. • preliminary analysis of the feasibility of iot programming frameworks and ip networking in real-time scenarios.outline. the remainder of the paper is structured as follows. section ii presents the problem statement. section iii introduces our evaluation methodology. section iv presents our empirical results with two different setups. section v discusses the results. section vi describes the related work, while section vii concludes the paper. especially for critical real-time tasks on networked embedded microcontrollers, this is a threat: the embedded systems have to handle the additional resource consumption of the non-critical networking tasks and the necessary network interface controller (nic) introduces a new source of unpredictability as incoming packets trigger irqs that disturb the flow of scheduled tasks. this might lead to a critical load of interrupts and triggered network tasks in the rtos, invalidating real-time guarantees and thereby lowering the system's dependability.this paper analyzes the impact of network loads on critical real-time tasks running on state-of-the-art microcontrollers with modern rtoss used in the iot. in an initial study, we evaluate timing measurements of critical tasks on microcontrollers running vendor-supplied rtoss, network drivers, and network stack tasks under different network-triggered irq loads. this includes the execution of isrs, network drivers, and network stack tasks, as well as the robustness of the entire system under high network loads.1) real network packets: in the first setup, network packets are sent to the devices over a wi-fi connection and are handled by the framework-supplied driver and networking tasks (figure2a). a networking task simulation largely performs the same actions a packet received over a network triggers: upon registering the interrupt which in this case is triggered via an input pin, a short isr is called that preempts the currently running process to copy a packet descriptor to a freertos queue. both show a linear increase in lateness with rising packet load once lateness occurs for priorities chosen below the driver priory with real ip packet impact reaching 50% lateness increase per packet per second. once the packet load is high enough that the network driver needs half of the computing resources for itself, the operating system's scheduler distributes the processing resources equally between the driver and the observed (critical) task. with real network packets the system crashes at 980 packets per second when giving the observed task a lower priority than the wi-fi driver and at 2,000 packets per second with the same priority.network driver tasks, which are responsible for a large share of the impact on timing predictability and hence lateness, are very highly prioritized in the tested systems' frameworks.network driver tasks are currently given a priority level in rtoss that is not suitable for critical real-time systems. by running network handlers with a priority lower than the one used for real-time tasks, windows ce effectively is able to implement isolation and call admission for incoming network activities. we therefore analyzed the impact of network packet floods to the lateness and performance of real-time tasks on two state-of-the-art iot mcus. our results show that the execution of network stack tasks on iot devices can pose a significant threat to realtime guarantees and that the isr executions themselves have a similar, yet less severe impact on critically prioritized tasks in comparison to the entire packet handling. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/208.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/208.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d97d9da39c59bf08e2a80bd134245de1fc9f275 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/208.txt @@ -0,0 +1 @@ +blockchain is known as a disruptive technology that would eliminate the necessity of trusting centralized entities, running, and sometimes abusing, the financial and information networks. for this promise of blockchain systems to be fulfilled, the fundamental problems regarding the underlying technology should be addressed. in that sense, there has been a remarkable effort in the community to solve the blockchain trilemma. this trilemma highlights the challenge of simultaneously achieving three main requirements of a blockchain network: (1) scalability, i.e., the throughput of the blockchain with n nodes should scale with n , (2) security, i.e. the blockchain network should function properly even if a constant fraction of the nodes is controlled by adversaries, (3) decentralization, i.e. the storage, communication, and computation resources needed by each node remain constant, as n increases. for a survey on approaches to this problem, refer to .one of the promising approaches to blockchain scalability problem is sharding. in sharding, the system exploits the gain of parallel processing by splitting the nodes into several shards, where each shard forms and maintain a subchain, in parallel with other shards. the drawback is that as the number of nodes maintaining a subchain becomes small, it becomes vulnerable to security attacks. recently, in , a scheme, named polyshard, has been proposed that claims to achieve linear scaling in throughput and security threshold. in addition, the storage and computation resources per node remain constant. this would be a remarkable step toward outlining a solution to the blockchain trilemma. the main idea of polyshard is incorporating coded computing techniques in the context of blockchain.coded computing is based on running the desirable computation tasks on some linear combinations of the inputs, rather than each input individually - . this would help the system to detect and correct the results of adversarial nodes, ignore the results of straggles, and even protect the input data against curious nodes. in particular, in , lagrange coded computing (lcc) has been introduced to compute a general polynomial function of several inputs on a cluster of servers, where some of them are adversaries. in polyshard, lagrange coded computing is used to run the function, verifying the validity of the blocks, on some linear combinations of the blocks produced by the subchains. it is claimed that this would entangle the security of all shards together and improve the security of the blockchain, without increasing the computation and storage cost at each node. blockchain systems can benefit from coding in other ways as well. for example, , , use coding to reduce storage in nodes, uses coding for easing the bootstrap process of new nodes, and , use coding to tackle the availability problem in blockchain.in this paper, we introduce a fundamental attack on polyshard that undermines its security. the attack is based on an adversarial behavior in the system that is unobserved in . in essence, the adversarial nodes can take the control of a few shards and transmit inconsistent blocks to different nodes. the heavy load of communication does not allow nodes to resolve the inconsistencies. these inconsistent versions make the set of equations used for decoding inconsistent. we prove that this inconsistency cannot be resolved by linear decoding, unless the number of nodes n grows at least linear with k d 2 , where k is the number of shards and d is the degree of the verification function of the blockchain system. this prevents the system from tolerating o(n ) adversaries. the general discrepancy attack that results from some malicious nodes distributing inconsistent data, can happen in many systems and is not limited to blockchains. we have studied the fundamental problem of distributed encoding in .in the following, first, we summarize polyshard in section ii. then, we explain the attack in section iii, and analyze it in section iv. in this section, we review polyshard , and adopt the notation in . each shard k ∈ the new blocks should be verified by a verification function f t : u t → v with x k (t) and y t-1 k as inputs, where v is a vector space over f.), so that we can compute e t k = 1(h t k ∈ w), and then the verified block y k (t) = e t k x k (t), where w ⊆ v denotes the set of function outputs that affirm x k (t), and 1 is the indicator function.in polyshard, each shard k ∈ and each node n ∈ are associated with distinct constant values ω k ∈ f, and α n ∈ f, respectively. , p t-1 (ω k )) for all k ∈ , node n should determine the polynomial f t (q t (z), p 1 (z), . when a certain number of nodes in shard k ∈ are under the control of the adversary, they can produce more than one new block x k (t), say x.denoting the versions of the received adversarial messages, and ω k ∈ f, k ∈ are distinct constants assigned to message producer nodes.the honest nodes should be able to recover f (x k ), k ∈ h k , after they receive enough number of y's.and we know which polynomial each node has evaluated, we can pick out at least (d(k -1) + 1) consistent evaluations of one polynomial using which we can decode f (x k ), k ∈ h k .we choose an arbitrary set n ⊆ n of size n = rank(m lagrange ) -1, and show that under some adversarial behaviours, f (x k ), k ∈ h k cannot be decoded from y n , n ∈ n , even if we know the adversarial behaviour, i. in other words, we assume that we know the adversarial behaviour, and want to decode f (x k ), k ∈ h k .for the above equations, we used the fact that f (x k ) = f (q v1 (ω k )), k ∈ h k , therefore the first d(k -1)+ 1 variables in u are present in (18). , f (q (v v β ′ ) (z)), have the same value at each point ω k , k ∈ h k . we knew what each node have received from each adversarial node, the bound given in theorem 1 would be tight, and rank(m lagrange ) equations would be enough for recovering f (x k ), k ∈ h k . consider a blockchain system based on polyshard with n = k ln k ∈ n nodes, k ∈ n shards, s := ln k ∈ n nodes in a shard, and β = ck ∈ n adversarial nodes. in a block associated with v, the subcolumns associated with monomials that contrast v are zeros. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/209.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/209.txt new file mode 100644 index 0000000000000000000000000000000000000000..60b184e5860dc0c3121dfc54582ae30d4c0948e4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/209.txt @@ -0,0 +1 @@ +although cryptocurrencies like bitcoin and ethereum are quite popular today, they still lag behind centralized payment systems like visa in terms of transaction rates and time to finality. as of october 2020, bitcoin's and ethereum's network processes an average of 3-4 and 10 transactions per second (tps), respectively. in contrast, visa's global payment system handled a reported 1,700 tps and claimed to be capable of handling more than 24,000 tps . for a cryptocurrency to be adopted universally, it must be able to scale to process transactions at much higher throughput, i.e., tps rate. hence, blockchain protocols must be scalable to be suitable for widespread adoption.however, there are many challenges on the road to scaling blockchain based dls. garay et al. and kiyayas et al. show that existing blockchain protocols suffer from a loss of security properties as we scale the system. these security properties are fundamental to the operation of a robust dl. and consider a model with two types of agents, honest and adversarial where the adversary tries to attack the ledger by strategically forking the blockchain. a successful fork would allow the adversary to perform a double-spending attack.in this paper, first, we consider a setting in which all agents are honest and show that disparities in the connection to the peer-topeer network can make the system unfair. in such a case, nodes with a better internet connection will be able to grab a larger share of the reward while those with slower connections might lose out. we show that this disparity significantly increases as we increase the throughput of the system. notice that improving the quality of the overlay network may be more complicated than making protocol-level changes that may be implemented by merely updating the software clients.in literature, it is typically assumed that all the agents have equal access to the network, albeit with some finite delay. however, this is seldom the case in practice where some nodes may have better internet connections than others. for the first time, we introduce asymmetry in modeling network connections by assuming different delays for different nodes. hence, faster nodes would have shorter delays, while slower nodes would have longer delays which in turn results in asymmetry in the rewards collected by these agents. we first analyse consequences of this model in a setting with honest agents and then extend our discussion to rational agents.in order to analyze and quantify network fairness, we introduce two measures of fairness based on network events associated with broadcasting a transaction and broadcasting a block. first, we introduce frontrunning, an event associated with a node receiving a transaction. frontrunning (that we deal with in this paper) occurs when a node confirms a transaction before someone else hears about the transaction. we measure , the probability of this event happening between two fractions of the network. if is high, the faster nodes would consistently be able to grab high-value transactions while the slower ones would only be able to pick low-value ones left out by others. thus, a high would negatively impact some agents' revenue. we show that if we try to scale a bitcoinlike system to the throughput offered by the likes of visa, approaches to nearly 1, which implies that the slower nodes in the system will rarely be able to mine any high-value transactions that would result in these nodes receiving minimal reward in exchange for their mining efforts.we then consider the process of broadcasting a block through the network. publishing fairness quantifies the advantage a node might have over other nodes in broadcasting a block. if a node is able to propagate its block faster than other nodes, in case of an eventual fork, its fork would have a higher probability of being accepted. since we know that at higher throughputs forks become more common, faster nodes would be able to get more blocks accepted while those of slower nodes would frequently be orphaned. thus, the slower nodes, would not be able to even gather the fixed block rewards.as both of these measures deteriorate as with increased throughput, small variations in network access may lead to the system becoming unfair for the slower nodes. this would result in some agents gaining more than their fair share of reward while some agents earn less. this could certainly impact the profitability of the agents that earn less since they still need to pay for the costs associated with mining. thus, it may lead to drop in the agents maintaining the dl since agents that are unable to accumulate enough reward to break even the mining costs might shut down their mining operation or they might adopt strategic behavior to collect more rewards than that obtained by following the protocol honestly, either of which would reduce the security of the blockchain.we discuss possible behavior that a lack of network fairness could elicit from rational agents. their behavior could potentially hurt the stability of the system and reduce the effective throughput of the system. we use simulations to show that as the fairness reduces, the default strategy mining on top of the longest chain does not remain the dominant strategy which means that rational agents gain more reward by intentionally forking the longest chain. this could have adverse effect on the resilience of the blockchain against byzantine adversaries, making it less secure2 . hence, even though we scale the system to increase the throughput, we might not find much practical advantage due to these issues.thus, the potential of blockchain technology is hindered by the capabilities of the underlying networking infrastructure. hence, faster nodes would have shorter delays, while slower nodes would have longer delays which in turn results in asymmetry in the rewards collected by these agents.in order to analyze and quantify network fairness, we introduce two measures of fairness based on network events associated with broadcasting a transaction and broadcasting a block. we show that if we try to scale a bitcoinlike system to the throughput offered by the likes of visa, approaches to nearly 1, which implies that the slower nodes in the system will rarely be able to mine any high-value transactions that would result in these nodes receiving minimal reward in exchange for their mining efforts. if a node is able to propagate its block faster than other nodes, in case of an eventual fork, its fork would have a higher probability of being accepted. since we know that at higher throughputs forks become more common, faster nodes would be able to get more blocks accepted while those of slower nodes would frequently be orphaned. thus, it may lead to drop in the agents maintaining the dl since agents that are unable to accumulate enough reward to break even the mining costs might shut down their mining operation or they might adopt strategic behavior to collect more rewards than that obtained by following the protocol honestly, either of which would reduce the security of the blockchain. by increasing the block creation rate, we risk a node mining a block before it receives the latest block mined by the network. a lack of publishing fairness implies that not only slower nodes are less likely to receive reward transaction fees in the mined block but the are also less likely to receive the fixed block reward associated with mining a new block.01 for the bitcoin network. by broadcasting a transaction to other nodes, a agent is potentially increasing the number of nodes competing to include the transaction in their blocks and collect the corresponding transaction fees. a slow node that does not have enough high-value transactions in its mempool might have an incentive to either fork the block mined by a frontrunner (undercutting) or given a fork pick the fork that offers an opportunity to collect a higher transaction fee (petty mining). a slower node could fork a block mined by a faster node containing many high-value transactions due to frontrunning and include those transactions in its own block while leaving some of the transactions for others to include. secondly, even if a node receives the block mined by a slower node later, it would drop the previous block and mine on top of this instead since it offers a higher reward.we also discussed that not only does a lack of fairness impacts the revenue of some agents, it might also create an incentive for them to deviate from the honest mining strategy, which might impact the security of the blockchain system and further exacerbate lack of fairness in rewards. this implies that if a block is mined in a chain having lower _ than another chain, the block might end up earlier in the total block ordering than a block that has already been mined. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/21.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/21.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3e8e12b74d08fd7db8ca4a9ad5037434766aa4a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/21.txt @@ -0,0 +1 @@ +speech quality is a key performance metric of modern telecommunication systems. subjective speech quality can be measured by asking human judges to rate the perceived quality of a speech sample on a 5-point likert scale ranging from 1 ("bad") to 5 ("excellent") and then averaging several ratings to derive a mean opinion score (mos) . while crowd-sourcing offers a way to obtain mos ratings at scale , it remains a costly and time-consuming approach to measure speech quality. recently, there has been substantial research interest in estimating speech quality automatically and blindly using computational models , including a special challenge at interspeech 2022 on non-intrusive objective speech quality assessment (nisqa) . while computational models can be a cost-effective way to obtain speech quality estimates, they are typically data-driven and thus require large amounts of labeled training data, usually obtained via crowd-sourcing . here we investigate whether mos training data can be augmented using complementary labels and data, and whether the quality of crowd-sourced mos ratings can be improved by estimating and correcting rater bias. our approach builds on prior work on multi-task learning with partially labeled data as well as research into addressing quality issues in crowd-sourced data. we assume that related labels e.g., acoustic parameters affecting speech perception (reverberation time and clarity) or additional mos labels rating signal and background quality, may aid mos estimation. here we investigate whether mos training data can be augmented using complementary labels and data, and whether the quality of crowd-sourced mos ratings can be improved by estimating and correcting rater bias., acoustic parameters affecting speech perception (reverberation time and clarity) or additional mos labels rating signal and background quality, may aid mos estimation. however, in practice, data sets may exhibit missing labels, or there may be opportunities to increase training data by combining multiple data sets that share a subset of labels.we aim to learn speech mos in a multi-task learning (mtl) framework to leverage useful information in related tasks, increase the available training data size by combining heterogeneous data sets, and improve model generalization. our contribution in this paper is two-fold: (i) we propose a semi-supervised multi-task framework for combining disjoint data sets or data sets with partially missing labels to estimate multiple labels jointly, and (ii) we show preliminary results for two empirical methods to estimate and correct rater bias in speech mos labels.the mos training set consists of speech samples labeled by human raters using crowd-sourcing. the purpose of the proposed multitask framework is to combine multiple heterogeneous datasets in training, specifically a dataset containing a single mos label per sample (dmos), a dataset containing a separate overall, signal, and background quality mos label per sample (dovr,sig,bak), and a dataset containing a reverberation time (t60) and clarity (c50) label per sample (dt60,c50).similarly to prior work, we hypothesize that speech mos rater bias can be estimated and corrected by comparing the ratings of an individual rater with the average ratings (or moss) of the samples rated by that rater. we propose two methods for addressing speech mos rater bias; a simple bias removal, and a rating correction using a least squares linear fit.to test the proposed rater effect estimation and correction, we carried out experiments on mos labels of 29 294 speech samples, obtained from at least 10 raters each, i. the remaining ns -5 raters are used to estimate a second hold-out mos estimate ms from a minimum of five raters excluding the rater j and the four random raters used to calculate ms,j.2shows the distribution of improvements of the unbiased ratings rj over the raw ratings rj with respect to mos estimates ms,j (blue) and ms (red), for the proposed bias correction methods and overall, signal, and background quality mos labels (ovr, sig, bak). as can be seen, both methods seem to improve rater performance for most raters in terms of the root-meansquared error rmsej = s -1 j s j rs,j -ms to gauge the potential effect of bias correction on mos estimation performance, we evaluated using unbiased mos estimates separately in training and testing., to 392 raters in the mos training set and 2279 raters in the mos test set. the proposed bias and linear model corrections show promise for improving the quality of crowd-sourced mos labels as well as mos estimation performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/210.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/210.txt new file mode 100644 index 0000000000000000000000000000000000000000..90df3229f229f16ee927c1db51462800395ca47f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/210.txt @@ -0,0 +1 @@ +in leading german print media one can find statements that the bitcoin system consumes about as much electricity as the federal republic of germany, tendency rising (frankfurter allgemeine zeitung, 2020-06-06 ). on the other hand, an article was published in the magazine nature climate change in 2018, according to which if bitcoin is adopted on a large scale, the emissions caused by it alone could lead to global warming of more than 2 • c in the next three decades . the faz article was modified on our initiative shortly after its publication in the online version, and the nature article was followed by a controversial scientific discussion about the sense of the underlying assumptions. nevertheless, such publications lead to an incorrect impression in the public regarding the ecological consequences of bitcoin and to an even more problematic generalisation to blockchains.in essence, the statement that bitcoin and also many other cryptocurrencies cause an enormous power consumption is correct and important and has been analysed in detail in numerous publications, including the journals joule and nature sustainability . frequently, however, it is precisely these striking statements that remain present, are taken out of context, are incorrectly generalised or used for lines of argumentation that testify to a lack of understanding of the fundamental interrelation between the high electricity consumption of some cryptocurrencies and economic as well as technical parameters. for example, bitcoin's electricity consumption does not necessarily increase steadily nor does it grow significantly with the number of transactions processed per time unit. moreover, blockchain technology is mentioned in the same breath as bitcoin so frequently, both in public reporting and, to some extent, in the scientific community, that certain prejudices regarding the power consumption of blockchain technology have become generally established.in fact, there are now numerous cryptocurrencies based on technically significantly modified blockchain variants with completely different characteristics with regard to their power consumption. the situation is similar for a large number of implementations of blockchain-based platforms for cross-organisational processes in business and the public sector. in germany, for example, there are projects by automobile manufacturers in the supply chain or the federal office for migration and refugees . as the topic of sustainability is rightly very present in politics and economy , the question of electricity consumption and the sustainability of blockchain technology in general is very often asked in the context of blockchain-related projects for the reasons described above. the presence of this electricity consumption stigma could therefore significantly impede the adoption of blockchain technology and, thus, the exploitation of its benefits .accordingly, in this paper we want to give a comprehensive overview of the electricity consumption of blockchain technology in general in order to provide a solid basis for the general discourse. to this end, we first describe well-known estimates for the energy consumption of bitcoin, but expand on these estimates by a detailed discussion of the recent bitcoin halving, which reveals many of the fundamental interrelations. in the bitcoin halving event, which periodically takes place approximately every four years, the number of new bitcoins created per block and serving as a reward for the miner is halved. this ensures that the number of existing bitcoins remains limited (geometric series). the aim of this construction is to reduce inflation. on the other hand, we are also investigating a larger part of the very heterogeneous spectrum of blockchains than just some cryptocurrencies that are technically closely related to bitcoin. with this we extend an article published by us in the magazine business & information systems engineering on the energy consumption of blockchains, which already discusses some of the issues addressed in this article and focuses more on the sustainability discussion of blockchain technol-ogy applications beyond cryptocurrencies. in comparison, we will go into more detail on some aspects only briefly discussed there. in particular, we will quantitatively analyse the implications of using so-called zk-rollups on the power consumption of blockchains in addition to bitcoin halving.despite the fact that blockchain technology is used in a much wider range of applications than in bitcoin and cryptocurrencies, bitcoin also plays a central role in this article. this is due to its problematic high energy consumption. we believe, however, that other applications of blockchain technology are much more important in the long run.for other well-known pow blockchains, such as ethereum, bitcoin cash, bitcoin sv and litecoin (these are the largest pow-based cryptocurrencies after bitcoin in terms of market capitalisation), the same estimation formulas apply as for bitcoin, except that there are other hashing algorithms, specialised mining hardware and parameters such as average block times and block rewards. as the market capitalisation of bitcoin is higher than the cumulative market capitalisation of all other cryptocurrencies, it can be assumed that the cumulative electricity consumption of all pow cryptocurrencies is not much more than twice that of bitcoin, and a "best guess" is at a factor of approximately 1.an important observation for pow cryptocurrencies is that their power consumption cannot be reduced in the long term by increasing the energy efficiency of hardware: on the one hand, this can be seen from the fact that the estimation of the upper bound depends only on electricity prices and not on total computing power. since the "slowest" permitted node dictates the performance of the system, bitcoin and other cryptocurrency systems can only process a few transactions per second -currently, the storage space required for the complete bitcoin blockchain requires just under 300 gb and is growing by about 60 gb per year; a multiple of transactions per time unit would also multiply the growth accordingly. hence, simply dividing the power consumption by the number of transactions in pow-based cryptocurrencies yields an enormous amount of energy on a per transaction basis: for bitcoin, the electricity consumption for a single transaction would then amount to several hundred kwh and thus correspond to the electricity consumption of an average german household in several weeks to months.in the following, a more detailed analysis of bitcoin's electricity consumption will be carried out by analysing the recent bitcoin halving and deriving implications for the long-term development of electricity consumption. the comparison of the development of bitcoin prices and hash rate over the past 12 months, as shown in figure1, suggests that the upper bound described above is indeed a fairly good estimate of actual electricity consumption: with relatively stable bitcoin prices until march 2020, the observed hash rate increased continuously; apparently, the initiation or expansion of mining activities, which is associated with the procurement of appropriate hardware, was considered worthwhile. this could be explained by the fact that due to the reduction in the value of bitcoin and thus the level of the mining incentive, miners with higher variable costs, for example due to obsolete hardware or high electricity prices, were forced out of mining here for a short period of time.05 usd/kwh, the halving can force old, less energy-efficient hardware out of the market in the short term, whereas more modern, more energy-efficient hardware remaines profitable and, at lower electricity prices, mining with older hardware also makes economic sense.05 usd/kwh and that after the halving, due to the increased competition from cheap electricity tariffs, mining hardware that was initially forced out of the market was also used again and thus the actual electricity consumption even rose above the upper limit of 0., the prices for bitcoin and electricity as well as the transaction fees, this orientation of the actual electricity consumption at the upper limit also means that, due to the periodic halvings, bitcoin's electricity consumption will decrease significantly in the long term. although the power consumption of a blockchain is generally much higher than that of a corresponding centralized solution (here by a factor of around 50) due to the redundancy (and to some extent also due to the consensus and generally the more extensive use of cryptographic methods), it may still only account for a very small part of the power consumption of the entire it solution or the entire process, even if clients and backups are included. zkrollups were primarily developed to solve scalability and performance problems of blockchains. the total power consumption of all these pow cryptocurrencies is still mainly caused by bitcoin and amounts to between 20 -50 % of the german power consumption, with a best guess for bitcoin at about 100 twh/a or 20 % of the german power consumption. the driving force behind the electricity consumption is the price of bitcoin and not the number of transactions, and if the economic environment remains the same, the periodic halving of block rewards in many pow-based cryptocurrencies would in the long term lead to a significant reduction in electricity consumption. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/211.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/211.txt new file mode 100644 index 0000000000000000000000000000000000000000..cef286ac20cd3f3c693133f562ad911e146db1ec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/211.txt @@ -0,0 +1 @@ +internet of things (iot) is a revolutionary technology that allows users to interact with smart and connected devices in novel ways. smart farming has recently emerged as a popular iot domain that involves data-driven and automated farm processes using a variety of iot sensors from soil sensors, temperature/humidity sensors, ph probe to actuators such as robots and drones . with increased agricultural yield, reduced cost, and optimal resource usage, smart farming can solve the significant issue of meeting the global food demand by 2050 . cloud and fog computing paradigms have been leveraged to satisfy the computational need of iot applications. while cloud supports resource-intensive applications, fog nodes can run delay-sensitive applications by bringing virtualized resources to the network edge .distributed data flow (ddf) is an effective approach to model iot applications as a collection of interdependent modules that can be deployed in a distributed computing environment . however, the efforts have been mostly limited to smart city , and smart healthcare domains. since smart farming involves different functionalities, it is necessary to investigate the ddf approach for modelling smart farming applications. in this paper, we propose a ddf based model for soil management application that consists of a number of modules with varying resource needs. moreover, the proposed model has multiple application loops, each of which generates a specific type of result (e.g., status, alerts, and recommendation) for the end-user. we evaluate the performance of the proposed model considering both cloudbased and fog-based deployment.the remainder of this paper is organized as follows. section 2 presents the iot application model. section 3 discusses the experimental results. we draw conclusions and outline some future works in section 4. smart farming has recently emerged as a popular iot domain that involves data-driven and automated farm processes using a variety of iot sensors from soil sensors, temperature/humidity sensors, ph probe to actuators such as robots and drones. in this paper, we propose a ddf based model for soil management application that consists of a number of modules with varying resource needs. the iot application receives data from the soil sensors and send the processed data to the end-user device denoted as display in fig. the application consists of five modules: 1) sense: this module collects data (tuple t 1 ) from sensors, removes incomplete and noisy information, and sends the filtered data (tuple t 2 ) to the data aggregation module, 2) data aggregation: this module combines the filtered data by using temporal/spatial aggregation to remove redundancy. it sends tuples t 3 , t 4 , and t 5 to status generation, event detection, and the soil analytics modules, respectively, 3) status generation: it provides users with real-time status of soil being monitored in the desired format, 4) event detection: this module processes the tuple t 4 and sends critical alerts (tuple t 7 ) to the end-user on detecting abnormal soil condition such as low moisture, 5) soil analytics: this is a lightweight analytic module that provides recommendations for optimal usage of water. the tier-2 fog server has more capacity than the tier-1 fog servers. we increase the number of sensors per tier-1 fog server from 3 to 21. in fog-based deployment, we place the 'sense" and "data aggregation" module on tier-1 fog servers. the modules "status generation", "event detection", and "soil analytics" are placed on the tier-2 fog server. network usage represents the number of data bytes transmitted through the network during the execution of the application. we observe that network usage of cloud-based strategy is much higher than that of fog-based strategy irrespective of the number of sensors. also, each strategy experiences a slight increase in the end-to-end latency with an increase in the number of sensors.in this paper, we propose a model for iot based soil management application and evaluate the model using cloudbased and fog-based deployments. our experimental results show that fog-based strategy outperforms the cloud-based strategy both in terms of latency and network usage. although fog servers offer lower delay and save bandwidth, application modules with higher resource requirements need to be placed on the cloud data center. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/212.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/212.txt new file mode 100644 index 0000000000000000000000000000000000000000..6d291be0cf2163d0ddb2f1248b40140079aab198 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/212.txt @@ -0,0 +1 @@ +in many areas of the fourth industrial revolution, high-performance computing (hpc) provides important infrastructures for enabling large-scale data analytics. reliable computing performance is vital for cloud computing, data storage and management, and optimization (sakellariou et al., 2018). thus, the investigation of performance variability of hpc has drawn great attention in recent research (cameron et al., 2019). the variability of hpc performance exists in several aspects, of which the input/output (io) variability is of great interest. the io performance is usually measured by the io throughput (i.e., data transfer speed), which can vary from run to run. the variability of io throughput can be affected by various system factors such as cpu frequency, the number of threads, io operation mode, and io scheduler, through a complicated relationship (cameron et al., 2019).to configure an hpc system with reliable io performance, one important task is to find an optimal configuration (i.e., a certain level combination of system factors) that optimizes the io performance measure. the search for the optimized configuration is a challenging task since the functional relationship between io performance measure and system factors is unknown and complicated, especially for the hpc system containing both quantitative and qualitative inputs. to address this challenge, sequential designs in computer experiments (sacks et al. 1989;santner et al. 2003;fang et al. 2005) can be used. it is a novel application of sequential designs of experiments for the hpc performance optimization.the execution of computer experiment of hpc is time consuming. for example, it can take hours or days to collect the hpc io performance in a single run under certain system configurations. therefore, statistical surrogates are often adopted for statistical analysis and uncertainty quantification (sacks et al. 1989;bingham et al. 2014). one fundamental issue is the design of experiments, i.e., how to choose the settings of input variables to run computer experiments to obtain the output responses for the objectives of interest. the commonly used designs are space-filling designs (lin and tang 2015;joseph 2016;xiao and xu 2017;wang et al. 2018). to entertain both qualitative and quantitative inputs, space-filling designs such as sliced latin hypercube designs and marginally coupled designs have been introduced (qian 2012; deng et al. 2015;he et al. 2017;he et al. 2019).however, these designs are proposed with the aim of building an accurate emulator and thus they are not designated for other objectives such as the optimization problem we consider here. an objective-oriented design approach is to use sequential designs which find the new input setting sequentially for the objective of interest (picheny et al. 2016;sauer et al. 2020). such an approach has appeared being efficient and advantageous as indicated in many applications (gramacy 2020). for example, bingham et al. (2014) adopted sequential designs for choosing input settings of a computer simulator for the maximization of the tidal power in the bay of fundy, nova scotia, canada (ranjan et al. 2011). one popular approach in the sequential design framework is to use an expected improvement (ei) criterion (jones et al. 1998;schonlau et al. 1998;sóbester et al. 2005;ponweiser et al. 2008). an ei criterion was initially introduced for the global optimization of black box functions (computer simulators) by jones et al. (1998). since then, various ei criteria have been proposed for other objectives such as contour estimation (ranjan et al. 2008), quantile estimation (roy 2008), estimating the probability of rare events and system failure (bichon et al. 2009), and prediction (yang et al. 2020). other criteria in the sequential design framework include the upper confidence bound (srinivas et al. 2010)), the knowledge gradient method (frazier et al. 2008;scott et al. 2011), and hierarchical expected improvement (chen et al. 2019).however, to the best of our knowledge, these sequential design approaches including those using ei criteria have exclusively focused on computer experiments with only quantitative inputs. these approaches may not be directly applicable to computer experiments, such as the hpc experiment, with both qualitative and quantitative factors.in this article, our scope is to develop a sequential design approach for efficient optimization of computer experiments with both qualitative and quantitative (qq) factors. in the hpc example, the io operation mode is a qualitative variable, while the cpu frequency is a quantitative variable. we propose an adaptive composite exploitation and exploration (cee) method for the global optimization for computer experiments with qq factors. a new criterion combining the predictive mean and standard deviation based on the additive gaussian process (agp) (deng et al. 2017) is introduced to search for follow-up design points.similar to the ei and other criteria, the proposed criterion also aims to achieve the balance between exploitation and exploration when searching for the next input setting. what is fundamentally different and makes this criterion novel is that the search design region at each stage via the new criterion is adaptive in the sense that the design region changes with the data collected. theoretical justifications are provided to support the choice of the adaptive design region. in addition, the proposed cee criterion has a simple expression with meaningful interpretation to choose the next design point sequentially based on the agp as the surrogate. the sequential design procedure with the proposed criterion appears to be efficient in both computation and finding the optimal setting, i.e., the setting of optimizing the response output.the remainder of this paper is organized as follows. section 2 briefly reviews the additive gaussian process model. section 3 presents the details of the proposed adaptive cee method and its theoretical justification on the choice of adaptive design region. in section 4, several numerical examples are conducted to illustrate the effectiveness of the proposed method. section 5 presents the case study of hpc experiments, where the proposed method is demonstrated to efficiently find the optimal setting for hpc performance optimization.we conclude this work with some discussion in section 6.in this section, we describe the proposed adaptive cee sequential design based on the additive gaussian process model for computer experiments with quantitative and qualitative factors. that is, given n collected data points (w t t , y t ), t = 1, • • • , n, the key interest is to find the next design point w n+1 ∈ a for the computer experiment such that we can promptly find the optimal setting of w * to reach the smallest value of output y(w). the design point with a small value of µ 0|n (w 0 ) will support local exploitation, while the design point with a large value of σ 0|n (w 0 ) will encourage the exploration. therefore, we propose an adaptive cee criterion for finding the next design point where the design region is adaptive in each iteration of the sequential procedure. the design region a n consists of the points μ0|n (w 0 ) -β 0|n σ0|n (w 0 ) is less than the minimum of μ0|n (w 0 ) + β 0|n σ0|n (w 0 ). moreover, lemma 2 below shows that min w 0 ∈a y(w 0 ) belongs to the interval with the probability greater than 1 -α.this section provides some connections between the proposed adaptive cee criterion and the ei criterion for the sequential design of computer experiments with quantitative and qualitative factors. the adaptive design region of the proposed method also makes the search of the next design point more efficient.the first three approaches are sequential designs, each of which chooses the next design point by the given method, gets its response and updates the model estimation, and then continues to choose the next design point until the stopping criterion is met.to start the proposed adaptive cee sequential design, we obtain an initial training data of three points, where a three-level full factorial design(wu and hamada 2009)is used for the qualitative factor and a random latin hypercube design(mckay et al. in figure2, the small solid dots are the initial three points, and the points which are labeled "4" to "9" are six sequential points. in each simulation, a 9-run initial design is adopted, where a three-level fractional factorial design is used for the qualitative factors and a random latin hypercube design is used for the quantitative factors. the functions f i , g j and h k have the expression as follows: for this example, a 9-run initial design is adopted, where a three-level fractional factorial design is used for the qualitative factors and a random latin hypercube design is used for quantitative factors. table1summarizes the input factors, of which the quantitative factors are the cpu clock frequency (x 1 ) and the number of threads (x 2 ), and the qualitative factors are the io operation mode (z 1 ) with three levels, the io scheduler (z 2 ) with three levels and the vm io scheduler (z 3 ) with three levels.we apply the proposed adaptive cee sequential design to find the optimal configuration to achieve the maximum of y sn , the ratio between the mean and sd of the throughput and the ei method, respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/213.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/213.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5f29baec536f04e0bf7e0de3a84e0923c39d6ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/213.txt @@ -0,0 +1 @@ +the internet of things (iot), cloud computing, and machine learning will allow for more adaptive cities, houses, and infrastructures - . however, the vision of intelligent cyberphysical systems will not be implemented with centralized cloud resources alone as they are too far away from sensorequipped iot devices, yielding high latencies, network bottlenecks, unnecessary energy consumption through wide-area communication, and in many use cases also considerable privacy concerns. addressing these limitations of central clouds, new distributed computing paradigms for the iot such as edge and fog computing promise resources in closer proximity to sensor-equipped edge devices .while continuous software testing is commonly applied in cloud environments, this is not yet the case for the emerging distributed computing environments of the iot. today's cloud applications are tested through extensive use of virtualization, cluster orchestration tools, and ci/cd pipelines, allowing engineers to continuously deploy and test new software versions in so called staging environments. these environments are set up to replicate the production environment as closely as possible to assure testing under realistic circumstances before application deployment. however, creating such staging environments is much more challenging for iot architectures, which are significantly more heterogeneous, distributed, and dynamically changing. these challenges manifest themselves in a lack of adequate tools.at the same time, continuous testing in realistic test environments is absolutely essential for many iot applications. for instance, if applications are to continuously optimize the operation of critical urban infrastructures such as transport systems, water infrastructures, and energy grids on the basis of collected sensor data, new versions of such applications must be tested thoroughly before they can be deployed. it needs to be verified that an application does meet key non-functional requirements such as for its dependability and performance. therefore, the application behavior has to be tested under the expected distributed computing environment conditions and also variations of these conditions, given the dynamic nature of iot environments.in this paper, we present a first glimpse of marvis1 , a new framework that combines hybrid testbeds with domain-specific simulations to allow testing distributed iot applications in adequate test environments. specifically, we are developing marvis with the following requirements in mind.1) representativity: enable testing of iot applications in realistic conditions, so non-functional requirements such as the responsiveness and dependability can be verified.while continuous software testing is commonly applied in cloud environments, this is not yet the case for the emerging distributed computing environments of the iot. therefore, the application behavior has to be tested under the expected distributed computing environment conditions and also variations of these conditions, given the dynamic nature of iot environments.in this paper, we present a first glimpse of marvis1, a new framework that combines hybrid testbeds with domain-specific simulations to allow testing distributed iot applications in adequate test environments. 3) versatility: enable testing of applications in various specific environments, so different iot architectures and application deployments can be evaluated. 5) usability: enable efficient specification, provisioning, and monitoring of the testing environments, so users can quickly test new versions of their distributed iot applications. we demonstrate marvis in this paper with a scenario, in which containers, a network simulator, and a traffic simulator are integrated automatically to test a distributed iot application.to test the behavior of distributed iot applications, software engineers currently make use of various different tools such as physical testbeds, emulated environments, and simulators. however, these approaches usually fall short when large-scale iot environments need to be tested with specific environment conditions, when the testing of non-functional requirements demands a certain degree of realism, or when the actual application code needs to be tested. this model-based approach focuses on a single 'thing' under test and is therefore not comparable to marvis, which aims to be a hybrid testbed for distributed iot applications incorporating many devices, emulations, and simulations. however, in this work only a single simulation tool is addressed, whereas marvis is a generalized framework.our approach in marvis combines existing domain-specific simulators and emulators with hardware testbeds to create an environment that resembles the production environment as close as possible, thus enabling a realistic evaluation of distributed iot applications. as presented in figure1, distributed iot applications are executed on virtual or physical nodes that are connected via a network simulation or physical networks.virtual nodes, physical nodes, and networks can be integrated in a single scenario, allowing the testing of applications in large-scale networks while also observing their physical effect on single nodes.marvis integrates multiple simulators and testbeds for different domain environments and facilitates the exchange of data between these simulators, testbeds, the network simulation and the nodes running the applications under test, as illustrated in figure2. marvis integrates virtual and physical nodes and co-simulated domain environments around a central network simulation, aiming to provide a full staging environment for testing distributed iot applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/214.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/214.txt new file mode 100644 index 0000000000000000000000000000000000000000..00015210142c8caa064b87a102f5f784c24ba601 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/214.txt @@ -0,0 +1 @@ +transformer-based architectures such as bert have recently lead to breakthroughs in a variety of language-related tasks, such as document classification, sentiment analysis, question answering, and various forms of text-mining (vaswani et al., 2017;devlin et al., 2019;adhikari et al., 2019;sun et al., 2019a;yang et al., 2019;lee et al., 2020). these models create semantic representations of text, which can subsequently be used in many downstream tasks (devlin et al., 2019). the training process for transformers typically includes two phases: during pre-training, the model learns to extract semantic representations from large, taskindependent corpora. the pre-training is followed by task-specific fine-tuning on a separate dataset to optimize model performance further.in this paper, we study the effects of fine-tuning transformer-based architectures in a federated learning (fl) setting. in fl, models are trained in a decentralized fashion on a number of local compute instances, called clients, and intermittently aggregated and synchronized via a central server. as such, fl is a solution for distributed compute, as well as distributed data, and provides a level of privacy with regards to the sharing of personal or otherwise sensitive data. model aggregation is commonly performed via averaging of the weights of the individual client models, called federated averaging (fedavg) (mcmahan et al., 2017a).depending on the application, the number of clients in an fl setting can differ wildly. in instances where smartphones are used as clients, their number can reach into the millions (hard et al., 2018), whereas settings with higher compute requirements and more data per client will often range between a handful and a few dozens of clients. here, we focus on the latter, as training large language models requires a lot of compute. a potential application of this is the medical field, in which automated analyses of electronic health records yield enormous potential for diagnostics and treatment-related insights (zeng et al., 2018).our contribution: we provide a comprehensive overview of the applicability of the federated learning setting to large language models. to this end, we work with a fixed computation budget for each task, and use a fixed total amount of data while varying the number of clients between which the data is split up. this way, we isolate the effects of distributing data over several clients for distributed compute. we leave comparisons with a fixed amount of data per client and varying noni.i.d. data distributions between clients for future work. the main contributions of this paper are the following: (1) we provide a comparison of three popular transformer-based language models in the federated learning setting, using the imdb, yelp f, and ag news datasets. ( 2) we analyze how the number of clients impacts task performance across tasks and model architectures. in fl, models are trained in a decentralized fashion on a number of local compute instances, called clients, and intermittently aggregated and synchronized via a central server. in instances where smartphones are used as clients, their number can reach into the millions(hard et al. to this end, we work with a fixed computation budget for each task, and use a fixed total amount of data while varying the number of clients between which the data is split up. the main contributions of this paper are the following: (1) we provide a comparison of three popular transformer-based language models in the federated learning setting, using the imdb, yelp f, and ag news datasets., 2015). data.we construct several experiments to evaluate how well federated learning scales to an exponentially increasing number of clients. finally, since runs with a larger number of clients converge more slowly, we allow those runs to continue to a second threshold and report the number of rounds required to reach 90% of the baseline performance, similar tomcmahan et al. while all models perform worse with more clients, albert and bert mostly reach the target accuracy within the allocated number of rounds until 32 clients are used. distilbert on the other is unable to reach the target accuracy at 16 clients for yelp f, and as low as 4 clients for imdb. with bert) show a gradual degradation with the number of clients, others configurations are more adversely affected by the increasing number of clients. in some instances the accuracy stays constant over a large period, sometimes even at the random classifier baseline for the whole (distilbert on imdb) or part (distilbert on ag news) of the experiment when the number of clients is high. we demonstrated that bert and albert scale well up to 32 clients with no sharp decline in performance (figure1), but found distilbert to struggle at 16 clients in the yelp f and ag news tasks, and with as low as 4 clients in the imdb task, with a substantial drop in performance compared to the baseline. this may be related to the variability in the movie review data, adding to a larger interclient difference in data distribution when data is put into smaller partitions, resulting in a larger difference between the client models taking part in the federated averaging.in conclusion, we have demonstrated the applicability of the federated learning paradigm and evaluated it on a number of transformer-based models up to 32 clients. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/215.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/215.txt new file mode 100644 index 0000000000000000000000000000000000000000..59640a9e15214e02d5f4970f98a1dd980bec92cb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/215.txt @@ -0,0 +1 @@ +the advent of blockchain technology and, more in general, of the distributed ledger technologies (dlts), has attracted and focused the attention on the study of distributed systems. it has been revised the consensus problem, with new proposals being analysed and tested, because of the necessity of agreement on the publication of block, to ensure integrity, and to handle faulty nodes in blockchains . analogously, the increasing interest in quantum computation is leading to a deeper understanding of quantum distributed systems, and eventually it will be implemented a world wide quantum network, already know as quantum internet -the quantum counterpart of the world wide web. in this scenario, the study of quantum distributed systems is fundamental for the correct functioning of such a network . to ensure agreement in a quantum network, thus, it is vital the development of quantum consensus protocols: from a formal definition, to actual proposals.our paper is structured as follows: an introduction to computational complexity theory will show the importance of quantum computation, then a brief presentation about quantum computing theory will give an insight about quantum formalism to a reader unfamiliar with quantum mechanics, finally an overview about quantum consensus is shown.computation theory was born and it is developing following the evolution of information technology: from the first computers, through the transistors, arriving to the today's supercomputers and the enormous amount of data produced every day . even though the computational power of our devices nowadays is unbelievable high, if compared with the devices from a few years ago, classical computers seem not to be fit for the next generation of problems in almost any field: chemistry, biology, medical technology, cryptography, optimisation, finance, etc. because of those and other applications, in last forty years, the possibility to build a computer based on quantum mechanics is focusing the attention and efforts of researchers, governments and companies. the history of quantum computation started in 1980, when paul benioff proposed the first quantum mechanical model of a computer . the next year, richard feynman gave a talk at the first conference on the physics of computation, in which he stated that quantum mechanical phenomena cannot be efficiently simulated on a classical computer, proposing a basic model for a quantum computer .since then, a lot of discoveries have been done. among the most important ones, there are the description of the first universal quantum turing machine, i.e. the definition of the first universal quantum computer, that can simulate any other quantum turing machine with at most a polynomial slowdown , and the ground-breaking discover of the shor's algorithm, that allows to solve in polynomial time the factoring and the discrete logarithms problem . shor's algorithm sparked a tremendous interest in quantum computers. the reason lies with the fact that shor's algorithm can theoretically break many of the cryptosystems in use today .therefore, the discover of quantum algorithms solving some problems believed to be computationally hard, triggered a rephrase of the computational complexity theory. a problem is said to be computationally easy, hence it belongs to the computational class p, if it can be solved by a classical computer in polynomial time, respect to the number of bits needed to describe the problem. a problem is said to be computationally hard if the required resources (time) for solving it rises super polynomially fast (often exponentially) with the input size. such problems belongs to the computational complexity class np (non-deterministic polynomial time). regarding those problems classes as sets, it is "strongly believed" that p⊂np. "strongly believed" means that nowadays it is only know that p⊆np, but it is strong the trust in the fact that np problems cannot be solved in polynomial time by a classical computer.in this framework quantum computers play their role. shor's algorithm (and other algorithms, e.g. deutsch's algorithm ) demonstrated that certain problems in the np set could be solved efficiently (i.e. in polynomial time) using a quantum computer. it leads to a new complexity class, bqp (bounded-error quantum polynomial time), that includes all the problems in p and some problems in np (it is still not clear how many).in figure 1 the relationship between the different complexity classes is shown.figure 1: the relationship between classical and quantum complexity classes. while it is strongly assumed that bqp is larger than p and encompasses some problems in np, it remains unclear how the classes are exactly related. figure from .in this context, scientists started talking about the possible "quantum supremacy" for what concerns computation. it was conjectured that indeed quantum computers would solve hard problems efficiently, but an experimental proof was needed. in october 2019, google and nasa claimed to have achieved and proved the quantum supremacy , . the result was impressive, even though there was scepticism, because some researchers pointed out that the problem solved by this quantum annealer (not an universal quantum turing machine) could have been solved by a classical supercomputer in a comparable amount of time; moreover, since the machine is not a quantum computer, strictly speaking, they believe it is improper to address the achievement of quantum supremacy in this particular context . analogously, the increasing interest in quantum computation is leading to a deeper understanding of quantum distributed systems, and eventually it will be implemented a world wide quantum network, already know as quantum internet-the quantum counterpart of the world wide web. to ensure agreement in a quantum network, thus, it is vital the development of quantum consensus protocols: from a formal definition, to actual proposals.our paper is structured as follows: an introduction to computational complexity theory will show the importance of quantum computation, then a brief presentation about quantum computing theory will give an insight about quantum formalism to a reader unfamiliar with quantum mechanics, finally an overview about quantum consensus is shown. the next year, richard feynman gave a talk at the first conference on the physics of computation, in which he stated that quantum mechanical phenomena cannot be efficiently simulated on a classical computer, proposing a basic model for a quantum computer. the definition of the first universal quantum computer, that can simulate any other quantum turing machine with at most a polynomial slowdown, and the ground-breaking discover of the shor's algorithm, that allows to solve in polynomial time the factoring and the discrete logarithms problem. the result was impressive, even though there was scepticism, because some researchers pointed out that the problem solved by this quantum annealer (not an universal quantum turing machine) could have been solved by a classical supercomputer in a comparable amount of time; moreover, since the machine is not a quantum computer, strictly speaking, they believe it is improper to address the achievement of quantum supremacy in this particular context. in general, a definition of consensus for quantum systems has to take into account the fact that a quantum network, however it may be built, is intrinsically different respect to a classical network.the definition of quantum consensus may be, in principle, based on its classical counterpart: even though, a classical probabilistic consensus must be considered because of the inherent stochastic nature of quantum measurement. they define four classes of consensus, namely σexpectation consensus, reduced state consensus, symmetric state consensus, and single σ-measurement consensus, are basing their definitions on symmetries and invariants of the system.quantum protocols and algorithms to achieve consensus over a quantum network have been proposed by researchers. a categorisation of such algorithms leads to the identification of four categories, based on the quantum mechanical feature used to reach consensus: state invariance respect to permutations, correlations due to entangled states, state evolution by means of quantum measurements, and by means of quantum key distribution (qkd) protocols. authors prove that a quantum consensus algorithm makes quantum states converge to a symmetric-state consensus from arbitrary initial states preserving purity; authors also show that quantum consensus algorithms can generate a w-state entanglement. jafarizadehstudies the optimisation of the convergence rate of the quantum consensus algorithm over quantum network with n qudits.as prescribed by the postulates of quantum mechanics, the operation of measuring a quantum state determines the collapse of the quantum state in an eigenstate of the operator involved into the measurement.it is important to notice that the literature about quantum consensus is still scarce, especially if compared to the amount of works on quantum distributed systems and networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/216.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/216.txt new file mode 100644 index 0000000000000000000000000000000000000000..12fc4cf31d9619a19dd4df41fc47e4e45513992f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/216.txt @@ -0,0 +1 @@ +on the one hand, due to the emergence of the general data protection regulation, more and more people are paying attention to privacy protection in machine learning. on the other hand, in real situations, more and more data island appears, making traditional machine learning difficult to achieve. generally speaking, ai service needs data provided by users to train on a server. however, in this process, the data may come from various institutions, and although the institution wants to get a perfect model, it does not like leaking its own data. therefore, in order to break data island and achieve privacy protection, google proposed federated learning in 2016. in fl, ai services can perform machine learning without collecting data from various institutions. fl allows the model to be trained locally and send encrypted information to the center server. then the center server aggregates received data and send back to every client. finally client could update parameter by themselves. for the method of updating parameters, there are gd, sgd, mini-batch sgd methods, but these methods are all first-order accuracy. therefore, we consider a higherorder accuracy method, the newton method, but in the newton method, the hessian matrix may be irreversible and even if it does be a inverse matrix, it is also extremely difficult to compute it. therefore, we consider adopting the quasi-newton method. among them, dfp and bfgs are two representative algorithms. yang et al. implemented bfgs under the algorithm architecture of logistic regression and applied it to vertical federated learning. but in terms of communication, there are still problems. therefore, we combined dfp and bfgs to propose a new algorithm, which is used in the logistic regression algorithm of vertical federated learning. in the end, compared to other algorithm, our algorithm can achieve better results with less communication times.on the one hand, due to the emergence of the general data protection regulation, more and more people are paying attention to privacy protection in machine learning. on the other hand, in real situations, more and more data island appears, making traditional machine learning difficult to achieve. however, in this process, the data may come from various institutions, and although the institution wants to get a perfect model, it does not like leaking its own data. therefore, in order to break data island and achieve privacy protection, googleproposed federated learning in 2016. therefore, we consider a higherorder accuracy method, the newton method, but in the newton method, the hessian matrix may be irreversible and even if it does be a inverse matrix, it is also extremely difficult to compute it. yanget al. but in terms of communication, there are still problems. however, the convergence of the first-order gradient descent method is lower than that of the second-order newton method. the calculation is very large when calculating the inverse of the hesian matrix, so the quasi-newton method came into being, bfgs and dfp, as the two representative methods. a series of works on horizontal federated learning has been proposed,, each client has a part of the sample, but has all the data attributes. in vertical federated learning, each client holds part of the data attributes, and the samples are overlapped. yangand others use l-bfgs to implement logistic regression algorithm of vertical federated learning.in this work, inspired by bfgs in logistic regression of vertical federated learning, we exlore a broader framework, bdfl, that is capable of managing heterogeneous federated environments when ensuring privacy security.the basic idea of newton's method is to use the first-order gradient and the second-order gradient(hessian) at the iteration point to approximate the objective function with the quadratic function, and then use the minimum point of the quadratic model as the new iteration point.in this article, we use the quasi-newton method to replace the gradient descent method on the purpose of exchanging a larger amount of calculation for a smaller communication cost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/217.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/217.txt new file mode 100644 index 0000000000000000000000000000000000000000..14c965207aefba680d644b6cd7ae025ed4937fe6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/217.txt @@ -0,0 +1 @@ +the internet of things (iot) is an important emerging technological paradigm whereby billions of ubiquitous sensor and actuator devices are connected to enable the development of applications across a wide number of domains. an increasing number of these applications are expected to perform in a capacity where the services they provide must meet certain minimum quality of service (qos) requirements. this is especially relevant for applications used in the real-time monitoring and control of critical infrastructures, such as: human health-care, transportation systems, electrical generation, natural disaster prediction, and telecommunications, to name but a few - .as the number of internet-connected devices increases yearon-year, so does the volume of data being produced. in order to process these large data streams, distributed stream processing frameworks (dspf) such as storm , and flink allow for the deployment of analytics pipelines which utilize the processing power of a cluster of commodity nodes. therefore, these frameworks are being utilized increasingly for the processing of iot data streams - . applications developed within these systems are, in principle, required to operate indefinitely on an unbounded stream of continuous data in an environment where partial failures are to be expected as these applications scale. consequently, dspfs feature high availability modes, implement fault tolerance mechanisms by default, and expose a rich set of continually evolving features. the end result being that the way in which these systems are composed has a high level of complexity and number of configuration options. a quick scan of the official documentation reveals that flink has over 300 options across 28 categories 1 , and spark closer to 400 across 26 categories 2 .system configuration has an impact on performance and reliability. yet, with the vast number of options available for tuning, i.e. framework settings, job parameters, resource selections, etc., the effects of which are not always well understood or straightforward to determine. that is, finding the best combination of resource selections and system configurations is difficult to estimate upfront both by experts and automatically by optimization tools as it is highly dependent on a number of key factors: the analytics application which exhibits its own unique operational characteristics; the cluster environment which is often not known before deployment and may vary over time, i.e. network topologies and physical hardware; and the data which is variable based on the characteristics of the input data, loads from other applications, and ingestion rates. this is especially true in environments consisting of multiple connected distributed systems making up larger application architectures, such as: resource managers, messaging queues, distributed file systems, scalable databases, etc. at the same time, critical iot applications typically have defined qos requirements with regards to performance, reliability, etc, which a configuration should meet .currently, the most common way of tuning configuration parameters is for it to be done manually by performance engineers, usually requiring several hours of investigation and testing . these engineers require detailed knowledge of the specific dspf itself and the cluster environment in order to find a system configuration that falls inline with the aforementioned qos constraints. approaches have been proposed at finding more precise and less time-consuming methods for the automatic tuning of dspf parameters - . these typically focus on only a limited number of settings, while there are numerous points of configurations in practice with many dependencies between them. a solution is needed which is complementary to these existing perfor- mance modelling approaches, which provides an approach for gathering analytics data through testing and monitoring.for this purpose, we propose an approach for the effective testing of system configurations for critical iot analytics pipelines in realistic conditions. we implemented our approach using a prototype called timon which allows for the testing of multiple different versions of system configurations in parallel within an environment that behaves like production using real streaming data. in this way, operators can safely and efficiently experiment with potential system configurations to understand what impact these will have when used in production.the remainder of the paper is structured as follows: section ii discusses the related work with regards to configuring dspfs, section iii presents a typical architecture for critical iot analytics pipelines, section iv presents our approach to configuration testing, section v describes our evaluation where we present our experiments and findings, and section vi discusses our findings with conclusions. in order to process these large data streams, distributed stream processing frameworks (dspf) such as storm, and flinkallow for the deployment of analytics pipelines which utilize the processing power of a cluster of commodity nodes. that is, finding the best combination of resource selections and system configurations is difficult to estimate upfront both by experts and automatically by optimization tools as it is highly dependent on a number of key factors: the analytics application which exhibits its own unique operational characteristics; the cluster environment which is often not known before deployment and may vary over time, i.for this purpose, we propose an approach for the effective testing of system configurations for critical iot analytics pipelines in realistic conditions. we implemented our approach using a prototype called timon which allows for the testing of multiple different versions of system configurations in parallel within an environment that behaves like production using real streaming data.the remainder of the paper is structured as follows: section ii discusses the related work with regards to configuring dspfs, section iii presents a typical architecture for critical iot analytics pipelines, section iv presents our approach to configuration testing, section v describes our evaluation where we present our experiments and findings, and section vi discusses our findings with conclusions.from a high level perspective, determining the best system configuration for any particular stream processing application can be found by comparing it to: the same application executing in the same environment while ingesting the same data but using alternate variations of the configuration set. in this diagram we can see the virtual cluster environment where both the production pipeline and shorterlived configuration testing pipelines exist and are managed by the container orchestrator. each configuration testing pipeline is composed in the same way as the production pipeline and would be executing the same iot analytics application. importantly, all configuration testing pipelines will also process the same input data as the production pipeline. when notifications and alarms produced by a configuration testing pipelines needs to be written back to the distributed streaming platform, they will each have their own unique messaging queues.as part of the assumed iot stream processing architecture described in the previous section, each iot analytics pipeline records metrics in a time series database.• an apache flink (high availability) cluster of 11 instances (1 jobmanagers and 10 taskmanagers); • an apache zookeeper cluster of 3 instances for distributed coordination; • an apache cassandra cluster of 3 instances for archival of processed data; and • a single influxdb time series database instance for collection of performance measurements.this paper presented an approach which allows for the effective testing of system configuration of critical iot analytics pipelines in realistic conditions. for this, we assume a typical distributed architecture for critical iot analytics pipelines and utilize containerization as well as container-orchestration in order to replicate instances of this architecture in parallel, each with their own configuration set. we showed how using such a testing approach in the production environment can capture the runtime behaviors of stream processing applications in order to investigate the individual performance of each configuration set. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/218.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/218.txt new file mode 100644 index 0000000000000000000000000000000000000000..d238a86b41dbc0bfcfaf70d80069b226f279ef64 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/218.txt @@ -0,0 +1 @@ +with new coming technological instruments in all fields of science the need to improve computational algorithms, fully utilise hardware architectures, improve softwares, and compete in upcoming data challenges , is becoming ever more important. today, by looking to the worlds most powerful machines , we can be certain that to reach exaflops performance, heterogeneous computing is necessary. clear leaders in this area are systems using the power of gpus, which provide excellent energy efficiency . to efficiently use such a system we need to ensure that the applications ideally execute functions (kernels) concurrently and the data transfers are hidden by computations. beginning with cuda 7, we can manage this asynchronous behaviour by introducing "streams". one of the simplest guides to using this functionality is provided by harris ].in this paper we study the implementation of streams into the astroaccelerate (aa) project. aa is a gpuenabled software package that focuses on enabling realtime processing of time-domain radio-astronomy data. it uses the cuda programming language for nvidia gpus and can perform tasks such as dedispersion, single pulse searching and fourier domain acceleration searches (fdas) in real time on very large data-sets which are comparable to those which will be produced by next generation radiotelescopes such as the square kilometre array (ska).the aa code can be divided into few main parts as show in fig. 1. the first part performs the preparation of system and reading user data. the second part consists of mapping tasks to suitable resources and allocation of all necessary memory. the third part, which is the part of the code in which we have implemented streams, is responsible for the dedispersion of data and single pulse searching. the fourth part offers optional features like fdas (fourier domain acceleration search) or periodicity searching.to achieve the desired asynchronous behaviour (as shown in fig. 2, bottom right) of data transfers and computing, we split the input signal to n time chunks (these chunks represent the amount of signal that can fit to gpu memory), and again divide them by the number of desired cuda streams into smaller chunks. these smaller chunks are then associated with a stream id. this process is repeated for all time chunks until all data are processed. care has to be taken to distribute the correct chunk of memory to the correct cuda stream. it uses the cuda programming language for nvidia gpus and can perform tasks such as dedispersion, single pulse searchingand fourier domain acceleration searches (fdas)[dimoudi et al. the third part, which is the part of the code in which we have implemented streams, is responsible for the dedispersion of data and single pulse searching.2, bottom right) of data transfers and computing, we split the input signal to n time chunks (these chunks represent the amount of signal that can fit to gpu memory), and again divide them by the number of desired cuda streams into smaller chunks.to successfully obtain overlapping data transfers and coherent execution of kernels we perform the following steps: 1) create cuda streams; 2) pinning host memory; 3) substitute the commands cudamemcpy to cudamemcpyasync; 4) associate streams id to kernels and memory transfers; 5) appropriately change all other explicit (wait event commands) and implicit (e. memory set, memory allocation) synchronisation commands to non-blocking ones.the host memory is pageable by default, which means that the gpu cannot address the data directly. to be able to overlap kernel execution and data transfers the host memory involved must be pinnedin cuda cudamallochost() or cudahostalloc() is used to allocate pinned memory, to deallocate use cudafreehost().when applying the above mentioned points we find that an increase in the throughput of the memory transfers by ∼30 % can be achieved, along with the benefit of partially overlapping kernels execution. the gaps are caused by the fact that when the copy is invoked the driver must allocate a temporary page-locked (pinned) host array and transfer the data there (see fig. to be precise the time saved is just moved to the allocation and deallocation of memory where we see significant increase (note the increase will be even higher for systems with larger host memory). to decrease time caused by the allocation/deallocation of the host memory we create smaller temporary buffers to move the data from host (big pageable memory) to host (small pinned memory). using this approach significant time can be saved in the preparation of the host memory.however, the host to host copies block the cuda streams. as dynamic allocation and deallocation of memory is used in this step which gives rise to synchronous behaviour, we have moved memory allocations from the computation phase to the preparation/memory allocation phase of the code. we have run into several issues such as stream event barriers not working with atomic operations or significant increases in allocation time for pinned host memory. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/219.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/219.txt new file mode 100644 index 0000000000000000000000000000000000000000..e19f066a07767adcde20dc8bf7bf5b158a489333 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/219.txt @@ -0,0 +1 @@ +clustering is the most important learning task in unsupervised learning, which aims at identifying meaningful subgroups in data by exploiting dissimilarity or distance between individual observations. a lot of different algorithms have been proposed, that introduce various notions of what clusters are and how they may be discovered in data. procedures like dbscan or optics derive clusters directly from data by inspecting the neighborhood of individual points, whereas e.g. k-means clustering minimizes a specific loss function by employing an em-style algorithm. the optimization of submodular functions constitutes another option for identifying clusters in data. from a practical point of view, these functions aim at measuring the "utility" or the "representativity" of a particular subset of data. furthermore, they maintain a property of diminishing returns, which during optimization leans towards the identification of rather compact summarizations of the observed data. found representative data points in summaries may then be exploited to derive clusters, serving as their respective cluster "exemplars". clustering using submodular functions might be more favorable than conventional methods, as optimization is possible with strong theoretical guarantees and is also feasible in streaming data settings that require inherently real-time processing.submodular functions like the informative vector machine , have been proposed, but require the specification of a positive-definite mercer kernel, which, depending on the concrete data, might be cumbersome. exemplar-based clustering, as a submodular function, is more flexible as it allows for arbitrary, non-negative dissimilarity functions and enables end-users to include domain knowledge they might have from "traditional" clustering. however, this particular function is expensive to compute as the time complexity is o(n•k) for datasets of size n and subsets (i.e. the set of desired cluster exemplars) of size k. this problem gains additional importance as optimizers for submodular functions usually evaluate l sets for their function value when choosing the next optimization step, which leads to a total time complexity of o(n • k • l).in this work, we want to present a novel method to accelerate the evaluation of exemplar-based clustering. to accomplish this acceleration we employ graphics processing units (gpu), which are wide-spread co-processors and well-established to deal with massively parallelizable tasks. we also discuss, how the necessities of submodular function optimizers might be incorporated into the algorithm design and which hardware pitfalls have to be kept in mind to exploit the computational power of gpus at best.our contributions are as follows:• we present the first algorithm to evaluate exemplarbased clustering on gpus and discuss, how this procedure exploits hardware features like shared memory and coalesced access to minimize runtime. • we show, how the procedure is capable of running under low-memory conditions, which are common among gpus.• we conduct a series of experiments to compare different cpu implementations with our gpu algorithm and determine the possible benefits w.r.t. the achievable run-time and speedup. this paper is organized as the following: in section ii we give a short overview of practical applications to submodular functions and of current acceleration strategies for clustering. in section iii, we formally establish submodular functions and the important cardinality-constrained optimization problem. furthermore, we introduce the greedy optimizer. in section iv we will establish the submodular function of exemplar-based clustering, briefly explain how it measures representativity, discuss, how an implementation for cpus might look like and which acceleration possibilities are feasible, before introducing our gpu algorithm. in section v we present our experiments and the achieved results. in section vi we summarize our work and give an outlook on how this work might be refined. every thread from a single block has access to comparably small shared memory, which in contrast to global memory represents on-chip memory.we establish a formal framework to discuss the mapping into the grid-block structure, as follows: let c = (d g , d b ) be a specific kernel configuration to solve a particular problem, with d g = (g x , g y , g z ) and d b = (b x , b y , b z ) the dimensioning of the grid and the block respectively. hence, we load vectors v i ∈ v from global memory into shared memory, that serves as a low latency, user-managed cache. let γ be the number of bytes every v i ∈ v requires to be stored, then we can determine d b = (b x , b y , b z = 1) to be as follows:. for our algorithm, we store the v matrix in a columnwise fashion and copy it in a single memory transaction to the gpu. we do not consider further optimization, because no further memory accesses on the gpu, beyond loading vectors from global to shared memory, are conducted, as discussed above. the threads t 1 , t 2 and t 3 are assigned to the evaluation matrices s 1 , s 2 and s 3 and successively access the information of vectorized matrix, which leads to coalesced access and to as few memory transactions as possible. if threads of a single warp access a particular memory segment, then these memory accesses become coalesced into a single memory transaction, which is beneficial to runtime and data throughput. conversely, if threads of a single warp access different memory segments then more memory transactions are needed to access the same data. since v i is already loaded into shared memory, we focus on the various memory accesses to s i from different gpu kernel threads. to exploit coalesced memory access, we have to optimize loading vectors from s j , whereas j varies for different threads of the same warp and the same block. nevertheless, this is convenient for the algorithm, as no variable evaluation set sizes need to be considered, which simplifies addressing the needed data in gpu memory.3) chunking: while for cpu computations we can usually assume that enough memory is available to solve a specific problem, this assumption may not hold for gpus due to lack of memory expandability. this might be the case, when v is already very large, which suggests either the use of lower floating-point precision (reducing the required memory to solve problem instances) or better suited hardware with larger memory. we especially took care of choosing an appropriate memory layout and established a way to deal with scarce gpu memory by chunking a given problem into smaller sub-problems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/22.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/22.txt new file mode 100644 index 0000000000000000000000000000000000000000..70520b4c1055bf8d9849752787742e264ec09ae9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/22.txt @@ -0,0 +1 @@ +despite the popularity of neural network based and boosting models, there is still a big interest to logic based methods of machine learning, which support explicit representation of learned hypotheses. the inherent interpretability of these methods is what makes them particularly useful in critical domains such as, e.g., information security, medicine, automated control, etc.of particular interest in the field of logic based machine learning is probabilistic law discovery (pld) , which is a variant of probabilistic rule learning. it allows for balancing between the completeness of the set of the learned hypotheses and computational expenses, and in the limit it guarantees learning the complete set of hypotheses true on data.while having some similarities with decision tree/random forest methods, pld based models uniquely combine ensembling features with the property of being inherently interpretable. the explicitness of hypotheses learned by pld allows for building glass-box classification, clusterization, regression, or adaptive control models, which also support straightforward integration of domain knowledge. the transparency of pld based models makes them accessible for post-hoc meta-analysis to support transfer learning, conceptual abstraction, symmetry detection, etc. similarly to some other logic based ml methods, the disadvantages of pld are due to the complexity of rule learning, which is related to np-hard problems and thus, direct implementations of pld face efficiency problems when applied to datasets with big numbers of features.currently several implementations of pld are known, which combat the dimensionality problem with the help of heuristics. they have been benchmarked on different ml tasks against other well-known models, e.g., decision tress, neural networks, associative rules, in domains such as medicine , finance , bioinformatics , adaptive control .the aim of this paper is to provide a concise and accessible introduction into probabilistic law discovery, which covers the base learning algorithms, optimization techniques, and application guidelines. the exposition is based on the latest implementation of pld, which provides a reasonable balance between the completeness of the learned hypotheses and computational complexity. , p n ) is defined similarly). , p n (x) → r(x) is said to be a probabilistic law on a given dataset if it has a non-zero probability p and the following holds: the probability of any other rule with the same conclusion and a premise given by a proper subset of predicates p 1 , .for a given dataset and a predicate language, it outputs a set of rules in this language, which are probabilistic laws on this dataset in general the algorithm does not guarantee to find all the probabilistic laws on the dataset it allows for balancing between the completeness of the obtained set of laws and computation time in the limit (having unbounded computational resources) the algorithm computes the complete set of probabilistic laws on the given data.after that, the algorithm selects from the obtained probabilistic laws those ones, which have exactly d predicates in the premise, and it starts refining only these rules (by successively adding predicates one by one to the premises), while checking whether their probability increases (i. , p 4 → r.the heuristic used in the algorithm is based on the assumption that probabilistic laws are typically arranged into chains, in which each subsequent law is obtained from the previous one by refinement with a single predicate, for example: { p 1 , p 2 → r , p 1 , p 2 , p 3 → r , p 1 , p 2 , p 3 , p 4 → r }.probabilistic prediction of features (the classification task)identification of features/specific feature values that an object must have in order to be assigned to a particular class (abductive classification) combining features into subsets closed wrt probabilistic laws and computing subsets of objects corresponding to these closed subsets (hierarchical object and feature clusterization) prediction of value intervals for numeric features (interval regression) anomaly detection and time series analysisbuilding self-learning agent systems that interact with environments (reinforcement learning) control of modular systems with many degrees of freedom, in particular, adaptive robotic controlwe comment on solutions to these tasks in section 5. the nodes in this graph are the rules enumerated by the algorithm and there is an edge from a rule r to r if r is a subrule of r , i. in this example illustration, all rules up to the depth d = 2 are enumerated and it turns out that there are probabilistic laws a, c → r and b, c → r of depth 2. to compute a set of probabilistic laws with conclusion r, the derivation graph is used by the pld algorithm as follows.statistics for n is calculated -n is connected by an edge to each subrule of n from the previous graph layer it is verified whether n is a probabilistic law; the set reg k is defined to consist of all probabilistic laws from n odes k , which meet a statistical criterion at step k > d (additional enumeration), the set of all single-predicate refinements of probabilistic laws from reg k-1 is computed., there are no probabilistic laws at level k in the graph); the number k equals to the maximal rule size m axsize (a hyperparameter for setting the maximal number of predicates in premises of rules considered by the algorithm).base rule enumeration depth d maximum rule size (maximal enumeration depth) m axsize probability threshold for rules confidence (statistical significance) threshold for rules probability gain threshold (global threshold) for laws probability gain thresholds for each level of the derivation graph and for each law size (level/size specific threshold). in situation when pld cannot enumerate laws up to a given size m axsize we have a choice: either find shorter probabilistic laws by reducing m axsize, or implement enumeration up to the required size by dropping some shorter laws rules by adjusting other thresholds. those probabilistic laws learned by pld are selected which have probability above a certain threshold (for example, these can be laws with a probability greater than 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/220.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/220.txt new file mode 100644 index 0000000000000000000000000000000000000000..d1133f6496affba84e2ae8273d8c7826597ab80d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/220.txt @@ -0,0 +1 @@ +as users require a high performance computer for mass computation, applications need a high-performance computing system to execute jobs more quickly. the demand for high performance has meant cpu architecture has developed from a single cpu to a multicore cpu. recently, cpu architecture has evolved to a multicore cpu architecture based on nonuniform memory architecture as shown in figure 1. in response to this, the design of the modern computer faces a very challenging software assignment called thread scheduling. the technique is used to control the memory nodes at the operating system level, and manages the memory usage of tasks to avoid the problem where tasks lean towards one memory node.however, in the technique of using the operating system level the relative importance among user applications cannot be recognized. therefore, it is important that we find an automatic memory scheduling method in the user-space. moreover, users need to have in-depth system knowledge to obtain high performance applications and effective memory utilization in the existing systems. therefore, users cannot utilize the multicore system based on non-uniform memory architecture (numa) , because they need to have an in-depth numa architecture knowledge. this paper presents a novel memory scheduler that removes unnecessary memory latency and supports high-performance execution of the application. our proposed system schedules memory nodes after monitoring numa architecture automatically in the user-space. recently, cpu architecture has evolved to a multicore cpu architecture based on nonuniform memory architecture as shown in figure1. the technique is used to control the memory nodes at the operating system level, and manages the memory usage of tasks to avoid the problem where tasks lean towards one memory node. therefore, users cannot utilize the multicore system based on non-uniform memory architecture (numa),because they need to have an in-depth numa architecture knowledge. our proposed system schedules memory nodes after monitoring numa architecture automatically in the user-space.schednumaoptimizes memory locality in the numa system by placing tasksinto the same numa node. however, this technique damages the effective memory utilization of tasks because the proposed idea statically fixes tasks into a specific numa node.our proposed technique maintains an ideal memory locality to help the high-performance execution of the application by removing the possibility of memory latency. to reach this goal, the proposed system automatically executes (re)allocation of jobs by finding the best ideal numa node in the user-space with the collected information after monitoring the numa bus topology and run-time memory usage. if the distribution status of the memory node is not balanced or the execution flow of tasks is changed by the operating system level memory scheduler, the reporter in 2) executes the assignment to find a suitable memory node for the high-performance of important applications. second, in 2), the reporter calculates runtime high-performance factors, re-sorting processes of the numa list, and the ideal memory node for new tasks. at this time, in 3), the user-space scheduler considers the static cpu affinity information required by the server administrator as well as the numa specific information received by 2), the reporter. however, the system administrator needs to perform tuning work with optimization tools because optimal numa tuning using the operating system is very difficult. it is impractical for the administrator to understand the memory architecture of the numa architecture to obtain stable memory utilization and high-performance. the proposed system reallocates tasks into an ideal memory node with collected information after monitoring the characteristics of the numa topologyin the user-space without a kernel space. in other words, our proposed system is a new user-level numa aware memory scheduler considering the memory utilization and optimization of performance without the processor affinity technique that damages the memory utilization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/221.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/221.txt new file mode 100644 index 0000000000000000000000000000000000000000..577877ef954280c89f4ea4249ac10288de402eed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/221.txt @@ -0,0 +1 @@ +since the introduction of bitcoin and the concept of a decentralized, tamperproof database -a blockchain -a number of different paradigms have been developed to design such databases.recently, the idea of building such systems based on pos (proof of stake) has gained significant popularity. while in the original pow (proof of work, as used in bitcoin) mechanism that is used for incentivizing participation and securing the system, the voting power of a participant is proportional to the amount of computational power possessed, in pos the voting power is proportional to the amount of tokens (digital currency specific to this system). a popular choice in such systems is then to periodically delegate a fixed size committee of participants which then is responsible for running the consensus on which blocks to add to the blockchain. this way of building a blockchain has two substantial advantages over vanilla pow systems such as bitcoin: 1) it allows to run one of the classical permissioned consensus protocols that have been developed over the last 4 decades, 2) it allows to not only reward nodes for participation but also penalize misbehavior, by slashing security deposits of the offending committee members.there has been recently tremendous progress in the design of permissioned consensus protocols that can be used as core engines in such pos blockchains . a vast majority of them are designed in the partially synchronous bft model which asserts that communication between nodes becomes eventually synchronous and that no more than a given fraction of nodes, say 1∕3 (which is optimal in this model), are dishonest and may violate the protocol in an arbitrary way. state-of-the-art protocols such as hotstuff , tendermint and streamlet come close to optimality with respect to bandwith, latency of finalization and, also importantly, simplicity. however, there are several practical properties of such blockchain systems that are not captured by this classical model, and consequently, significant room for improvement remains. one such important aspect is that the partition of nodes into honest and byzantine might not accurately reflect their true attitude. in fact, according to the model, even "honest" nodes that have missed several protocol messages because of a ddos attack or even a temporary network failure, are considered byzantine. in a situation where more than 1∕3 of nodes suffered (even for a few seconds) from such a network issue, protocols in the classical bft model are not guaranteed to function properly.on the other hand, besides these occasional offline periods, it is fair to assume that in a real-world system an overwhelming fraction, if not all, of the nodes honestly follow the protocol rules. this is a consequence of the financial incentives for honest participation. indeed, it is in the best interest of committee members to make sure they actively participate in the consensus protocol, as they are paid a salary for honest work and are penalized for being offline or not contributing enough to the protocol progress. in fact, because of penalties for protocol offences, it is highly unlikely that an adversary tries an attack which is not guaranteed to succeed, as otherwise it risks significant losses. therefore, with the only exception of large-scale, coordinated attacks that are intended to bring down the whole system, one should always expect almost all nodes to behave honestly.motivated by this realization there have been several works that design protocols which are safe in the classical sense while at the same time trying to offer better guarantees in "typical" scenarios. in this paper we propose a new protocol -highway -that contributes to this line of work. the security of highway is still formalized on grounds of the partially synchronous bft model, thus in particular it achieves safety and liveness in the most demanding setting when 1∕3 of all nodes are byzantine. however, on top of that, highway offers the following two features that make it particularly attractive in real-world deployments. first of all, in periods of honest participation of a large fraction of nodes, it allows to reach finality of blocks with "confidence" much higher than the typical threshold of 1∕3. to give an example, if a block reaches finality confidence of 0.8 (which is possible in highway) then at least 80% of the nodes would need to violate the protocol in order to revert the block from the chain. this stands in contrast with the classical notion of finalization that is binary: either a block is finalized (this means finality confidence of 1∕3) or it is not. the second practical improvement in highway is that it achieves flexibility akin to the notion defined in . the nodes participating in highway might be configured with different security trade-offs between the allowed number of byzantine and crashing nodes (nodes that might go offline but are otherwise honest) in the protocol. flexibility then means that despite these differences in configuration, all the nodes run a single version of the protocol and perform the same actions, only the finality decisions they make depend on the chosen parameters. a practical consequence is that nodes with lower security thresholds might reach finality much faster than nodes with higher thresholds, but as long as both these nodes' assumptions are satisfied they finalize the same blocks and stay in agreement.technically, highway can be categorized as a dag-based protocol , in which nodes jointly maintain a common history of protocol messages, forming a directed acyclic graph representing the causality order. in its design, highway derives from the cbc-casper approach and significantly improves upon it by the use of a new finality mechanism, message creation schedule and spam prevention mechanism. we believe that the conceptual simplicity of dag-based protocols along with the desirable practical features of the highway protocol make it a solid choice for a consensus engine in a proof of stake-based blockchain. the nodes participating in highway might be configured with different security trade-offs between the allowed number of byzantine and crashing nodes (nodes that might go offline but are otherwise honest) in the protocol. besides eliminating the need to make an additional consensus on this particular hyperparameter, one important implication of such feature is that it allows validators to play slightly different roles in the ecosystem -for example some validators may deal mainly with finalizing relatively small transactions, in which case small latency is more important than very high security (and, as will become apparent after the protocol is presented, reaching higher thresholds usually takes more time), while others can prioritize safety over latency3. if an honest validator reaches finality with confidence threshold ≥ for a given valid block , then no honest validator will ever reach finality with confidence threshold for a block competing with . flexible bft also introduces, as the name suggests, certain flexibility for the nodes when it comes to choosing the parameters related to the finality -each of the nodes can have independent assumptions about number of faulty nodes of each kind, and it is guaranteed that two honest nodes with correct assumptions can't finalize competing blocks, and if all nodes have correct assumptions, the protocol will continue making progress. first, in highway validators can update their confidence thresholds and they are able to recompute finality of all of the blocks without the need of communicating with other validators, while in case of flexible bft that would require rerunning the whole protocol.to illustrate this difference, consider the scenario in which there is a big group of overly-conservative honest nodes incorrectly assuming that 90% of the nodes are honest -in such scenario in flexible bft even less conservative honest nodes will not be able to finalize blocks, while less conservative honest validators in highway will not be influenced by such a choice of other validators, as it does not influence the communication between them in any way -in fact, validators doesn't even have explicit means of checking confidence thresholds chosen by the others. thus in every round, the round leader creates 2 units: the proposal unit, and the witness unit, and a non-leader creates either 1 (the witness unit) or 2 units (the confirmation and the witness unit). what is perhaps worse here is that although it will be easy for the honest validators receiving these units to tell that something is going wrong (given the extraordinary number of units and equivocations), it is non-trivial to determine exactly who is responsible and where to start cutting off the bad units. this means that a validator receiving this pattern will be left with the choice of either ignoring these messages (which means that if 1 and 2 were honest that these validators are being permanently cut off from each other), or forwarding them (which can be a problem if a version of this with different equivocating messages was sent to the other honest validators).we note that one equivocating validator ∈  might send equivocating units to each of the other -1 validators and if they do not coordinate about which units to include in their next downset, then all these may end up part of the dag, and thus each honest validator will be forced to include ω( ) chains of units from in order to incorporate each others' units. importantly, the status of a unit for a given validator can change only one way -if a given unit satisfies lnc, it will never cease to satisfy it, if a unit is endorsed, it will never cease being endorsed, and if a unit is not cited naively by another unit , it will never be considered to be cited naively by .while the application of limited naivety criterion ensures that an attacker cannot force the honest nodes to process too many units, it is no longer clear that it can be made to work with our original liveness strategy as that required validators to create units above all units they are aware of, which may well violate the limited naivety criterion.first of all we increase the length of a round from = 3δ to = 6δ, the reason for that is that for a unit sent by an honest validator at time 0, ∕3 or 2 ∕3 we want not only the unit but also its endorsements to reach each other honest validator by time ∕3, 2 ∕3 or respectively.rapid endorsement spread: "whenever a unit is created by an honest validator after gst, after ∕3 time, each unit in ( ) that was endorsed in 's local view at the time of creating is also endorsed in the local view of every honest validator.note that while the finality in such a modified scenario is purely a function of validator weights, the communication complexity remain dependent on the total number of validators -every validator needs to download the units of every other validator, no matter how small its weight is. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/222.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/222.txt new file mode 100644 index 0000000000000000000000000000000000000000..1575e6703ff8ad031bc085b7bbfc85e8f34f9a56 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/222.txt @@ -0,0 +1 @@ +a hashgraph is a directed graph that summarizes who has said what to whom. each peer maintains a hashgraph reflecting the communications it is aware of. in general, each peer knows a different subset of the true graph, but because of digital signatures and cryptographic hashes, they cannot disagree about the information they have in common.the nodes of a hashgraph are events. each event is created by a particular peer. except for each peer's initial event, each event has two parents. one parent has the same creator (we call that one the self-parent), and the other has a 1 3200 transactions per second in 2015 . 2 65,000 transactions per second in 2017 . 3 as it happens, none of the minor errors we found appear to affect the implemented online algorithm. different creator (we call it the other-parent). honest peers do not create forks, where a fork is defined as two events with the same creator in which neither is a self-ancestor of the other. 4 in other words, the events created by an honest peer will form a chain.we can visualize a hashgraph as shown in figure 1. in this example, all peers are behaving honestly.in the hashgraph network, each peer periodically chooses another at random and sends that peer its latest event. the recipient then creates a new event, with its own latest event as self-parent and the event it just received as other-parent. every event is digitally signed, so there can be no dispute over who created it. each event also contains the hashes of its two parents, so there is no dispute over parentage either. the recipient of the event will request from the sender any of the event's ancestors that it does not have. (for simplicity, we will ignore that part of the protocol in what follows.) the recipient also puts a timestamp into the event, which is ultimately used to determine a consensus timestamp for other events.finally, each event contains a payload of transactions. when a peer wishes to inject a new transaction into the network, it stores it in a buffer of outgoing transactions. the next time it creates an event (this happens multiple times per second), it uses the contents of its buffer as the new event's payload. transactions are just along for the ride in the consensus algorithm, so we will discuss them little.in the example, dave sent d1 to cathy, resulting in cathy creating c2. then cathy sent c2 back to dave, resulting in dave creating d2. bob sent b1 to both alice and cathy, resulting in a2 and c3. at about the same time, alice sent a1 to bob, resulting in b2. alice sent a2 to bob, resulting in b3. cathy sent c3 to bob, resulting in b4. finally, dave sent d2 to bob, resulting in b5.the algorithm partitions events into rounds, in a manner that is easy for all peers to agree on. the first event created by a peer in a round is called a witness. (note that dishonest peers may have multiple witnesses in a single round.) a witness that is quickly propagated to most peers is called famous. identifying the famous witnesses is the main job of the consensus algorithm.each round, the algorithm selects one famous witness from each peer that has one to be a unique famous witness.an honest peer will have at most one witness per round, so if that witness turns out to be famous, it will also be unique. if a dishonest peer happens to have multiple famous witnesses, one of them is selected.we say that an event has been received by the network in the first round in which it is an ancestor of all the unique famous witnesses. the round received is the primary determiner of an event's place in the order. ties are broken using a consensus timestamp that is computed using the unique famous witnesses. any remaining ties are broken in an arbitrary but deterministic way. finally, the ordering of transactions is determined by the ordering of the events in which they reside, with transactions in the same event ordered by their position in the payload.we say that y sees x (written x y) if(1)x ≤ y and (2) there does not exist any fork z, z ′ such that z, z ′ ≤ y and creator(x) = creator(z).1 we say that y strongly sees x (written x ≪ y) if there exists a supermajor set z, such that for all z ∈ z, x z ≤ y. therefore there exist events z ∈ z and z ′ ∈ z ′ such z and z ′ share the same creator, and that creator is honest. then x belongs to round i, unless x strongly sees events in round i on a supermajority of peers, in which case x belongs to round i + 1.every c rounds (a parameter at least d + 3), the election will employ a coin round: every voter who sees a nearly unanimous result the previous round will continue to vote with the majority.8 (voting) suppose x is a round i witness and y is a round j witness, with i + d ≤ j. the round x is received by the network is the earliest round i for which all the round i unique famous witnesses are descendants of x. unlike here, to advance to the next round an event would need to strongly see many witnesses in the current round, while we require it only to strongly see many events in the current round., give x the round max(m, n) + 1) if there exists a set of peers a, comprising a supermajority of the peers, such that for each peer a ∈ a, there exists w created by a in round max(m, n) where w ≪ x. then election (vote s x) (pred n) y t f says that when y collects votes on x from the previous round, it receives t yeas and f nays.recall that, in the proof, we obtained events z and z ′ , sharing the same honest creator, where x z ≤ v and y z ′ ≤ w. we concluded that either z ⊑ z ′ or z ′ ⊑ z, since otherwise z and z ′ would constitute a fork on a honest peer.16), which we can state thus: lemma good_coins : forall s i x, rwitness i x -> exists j y t f b, i < j /\ (j -i) mod coin_freq = 0 /\ member (global s) y /\ rwitness j y /\ election (vote s x) (pred j) y t f /\ ((t >= f /\ b = true) \/ (f > t /\ b = false)) /\ forall w, member (global s) w -> rwitness j w -> honest (creator w) -> coin w s = b. we will say that two samples are similar up to the ith spawn if their first i spawned events are the same, and they give the same coin flip for all of them except possibly the last: definition similar (s s' : sample) i := (forall j, j <= i -> spawn s j = spawn s' j) /\ (forall j, j < i -> coin (spawn s j) s = coin (spawn s j) s'). since w is arbitrary, every round i + d witness votes yes, so x will be decided to be famous in round i + d + 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/223.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/223.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a90ca1672af67c01af92eab02b039674975863d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/223.txt @@ -0,0 +1 @@ +spiking neural networks (snns), first formalized in 1997 , have experienced a renaissance in recent years due to the rise in popularity of deep learning and the widespread availability of gpgpu hardware. while we are still waiting for the breakthrough that will let snns overtake 2nd generation anns, the research community remains highly active, working to improve performance, biological fidelity, support for complex models and topologies, and user friendliness. our own prior work surpassed the state of the art on a couple of these criteria.we believe that snn simulation is fundamental to snn design: faster simulation means faster iteration and thus quicker progress for the field as a whole. not only speed is important, but so is size, especially considering that we are currently limited to simulating a modest 1% of a rat's visual cortex . while gains are still to be made from algorithmic and data-structural improvements, we must solve multi-gpu (and eventually multi-node) scaling. towards this goal we present a snn simulator called "spice" (/spaik/) which scales to millions of neurons, billions of synapses, and 8 gpus. 1 this is made possible by three key contributions:• a novel, cache-aware spike transmission algorithm allows linear scaling with network size in the face of millions of neurons that do not fit into cache. • our parallelization scheme distributes both computations and storage across multiple gpus. • a simple neuron partitioning strategy achieves perfect load balancing in practice albeit being completely static. the result is a snn simulator that makes it possible to run ten different experiments of 100k iterations on a 24b-synapse 1 we use "gpu" to refer to an entire gpu pcie board throughout the paper. model spanning 8 gpus in the same time it takes competing simulators to create a single network. the top row of each gpu's timeline depicts kernel invocations (simulation steps), the bottom row depicts memory transfers (spike synchronizations). as soon as half the batch has completed on all gpus, we download spike counts (required for address calculations), synchronize the spikes (according to fig. we remind ourselves that a network, in its entirety, is defined by the neuron pool, synapse pool, adjacency list, and user callbacks. in order to split such a network into, say, two slices given a pivot one would: 1) split the neuron pool into ranges ). if, additionally, we can achieve perfect load balancing (all gpus completing the batch simultaneously), the simulation will scale linearly with the number of gpus.the goal of load balancing is to distribute the simulation load across gpus as evenly as possible so as avoid the system stalling for a long-running gpu. the partitioning of the adjacency list and synapse pool is determined by that of the neuron pool since a gpu must always store the incoming edges to all of its neurons. the effectiveness of this strategy depends on the assumption that the number of neurons far outweighs the dynamic range2of their costs, which is a necessary condition for any load balancing strategy (with neuron-granularity): if the dynamic range tended towards infinity, the possibility of finding a balanced partition would go to zero.our simulator scales with multiple gpus in both space and time, allowing one to increase network size while maintaining simulation time ("scaleup", fig. both scenarios suffer from a natural limit: spike synchronization time grows linearly with the number of gpusadd enough and any simulation will eventually be bottlenecked by it, leading to sub-linear or even negative scaling. this effect sets in much earlier in the "speedup" scenario where per-gpu simulation time goes down with the number of gpus, shortening the spike synchronization window. instead of transmitting all spikes from delay steps ago, one would (in a loop) transmit 1-step old spikes via the first adjacency list, 2-steps old spikes via the second adjacency list, and so on. more importantly, this feature would work completely orthogonally to the existing pipeline: each per-delay adjacency list would be split across multiple gpus just as it is now. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/224.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/224.txt new file mode 100644 index 0000000000000000000000000000000000000000..14b66bde4336296eedf47fa9537e684108cad5ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/224.txt @@ -0,0 +1 @@ +multicore processing has great potential benefits to reduce size, weight, and power (swap) at lower costs. however, that benefit can easily be squandered if interference between cores is not sufficiently mitigated. the focus of this paper is on techniques for accurately measuring the impact of interference on the worst case execution time (wcet) for flight software.in this introduction, we cover the basic concepts of multicore processing, including isolation techniques and the importance of measuring the impact of multicore interference channels on wcet. the rest of the paper then covers our v&v framework in section ii, our test methodology in section iii, and a discussion on the analysis of completeness in section iv. the paper wraps up with a brief description of future directions for our research. this complicates control and adds a significant challenge to assurance analysis because software applications can now run simultaneously on different cores and thus might interfere with one another more directly than on unicore systems where partitions must run one at a time. also known as separation or partitioning, isolation prevents one software function from impacting the behavior of other independent software functions present on the system, so that each software function deterministically meets its requirements regardless of the presence or absence of others. thus, temporal partitioning allocates the entire resource for a fraction of time while spatial partitioning allocates a fraction of the resource for the entire time. most partitioned avionics systems use temporal partitioning to share the cpu, spatial partitioning to share the memory, and bandwidth partitioning to share the i/o. the us federal aviation administration (faa) released the cast-32 position paper on flight certification of systems with multicore processors in 2014 and then produced an updated cast-32adocument in 2016 with an intent "to identify topics that could impact the safety, performance, and integrity of a software airborne system executing on multi-core processors. for example, if it were determined by inspection that contention on a specific resource could not affect the software (for example, software which doesn't require use of an particular peripheral should be unaffected by contention on it), then no further measurements concerning usage of the resource would be necessary. this is in agreement with the guidance for cast-32a objective mcp_resource_usage_3, which states: "if the applicant identifies interference channels that cannot affect the software applications in the intended final configuration, then those interference channels do not need to be mitigated and no verification of mitigation is needed.in another example, if it had been demonstrated using interference generators that a partitioning system prevents any measurable interference on a shared l2 cache, then it is reasonable to argue that it is unnecessary to carry out any specific tests on the software under test concerning l2 cache interference.similarly, if it can be demonstrated that the software under test will never use a resource, then it may be maintained that the impact of interference on that resource need not be experimentally confirmed for that software. for example, if testing a piece of software in isolation shows that the software fits entirely onto a shared l2 cache and never makes requests for data from main memory, it would be invalid to assume that interference on main memory is therefore irrelevant for this software.other non-obvious modes of interference may also exist, particularly when applications are using more than one resource simultaneously, and this can create couplings between different interference channels.if no effect is seen from executing with the additional rapidaemons, this suggests that the interference channel is not present in the platformif a rapidaemon that utilizes a resource very heavily cannot be interfered with by other rapidaemons accessing the same resource very intensively, it is possible to argue that a real application generating far less intensive resource accesses will not interfere with other real applications. for example, if the peak use rate of a given resource were found to be less than that seen in a rapidaemon configuration that didn't yield any interference, then it is reasonable to conclude that no effect is likely to be seen for this software; therefore further testing against that resource could be deemed unnecessary. for example, if the wcet increase due to multicore interference is 5% for shared resource a and 7% for shared resource b, but the resources a and b are accessed by the cores through entirely independent interconnects, then the appropriate margin must be the maximum of the two, 7%.it is still early in the history of certification of fully active multicore systems hosting mixed-criticality software, but thus far techniques for verification such as we propose seem to be the most promising way to provide solid certification evidence to assure that multicore interference has been sufficiently mitigated. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/225.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/225.txt new file mode 100644 index 0000000000000000000000000000000000000000..c0a07868cbc59972bf7dac3bd1aac1e197ad29ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/225.txt @@ -0,0 +1 @@ +distributed data-parallel processing systems such as mapreduce , spark , flink , and dataflow/beam enable users to take advantage of clusters of bare-metal or virtual machines for analysis of large datasets. these systems have become popular tools for workloads that range from data aggregation and search to relational queries, graph processing, and machine learning . jobs from these diverse domains stress different resources, while the resource demands typically also fluctuate significantly over the runtime of jobs . therefore, multiple jobs usually share cluster resources without isolation, so they can benefit from statistical multiplexing . this is implemented by using resource management systems like yarn and mesos . these systems allow users to reserve fractions of cluster nodes via the notion of containers, in which users then run one or multiple jobs using the frameworks of their choice. by default the resource management systems use simple scheduling methods such as round-robin, fifo, greedy approaches, and other reservation-based methods such as dominant-resource fairness , while low resource utilization remains a major problem in industry . yet, since jobs differ considerably in which resources they stress and how much utilization fluctuates, schedulers should actively co-locate jobs that share resources efficiently. the benefits of such approaches have been demonstrated before, including by the authors , with multiple schedulers that explicitly take combined resource utilization and interference among co-located workloads into account or learn the impact of this indirectly , taking advantage of the recurrence of a majority of jobs . however, previous efforts fall short in at least one of the following dimensions:-learning efficiency: multiple systems require extensive training data as they learn the sharing efficiency on the level of individual jobs or using completely generic learning methods . -continuous learning: some systems do not update their models continuously and therefore do not adapt to changes in workloads . -solution practicality: some systems do not incorporate objectives besides throughput , while others assume control over more than just job order or require instrumentation not generally supported .addressing these limitations, we present hugo, a cluster scheduler that efficiently learns from collected resource usage metrics to co-locate those jobs that have complementary resource demands and therefore share resources efficiently, building on our previous work . hugo first clusters jobs by their resource utilization, yielding multiple groups of jobs that contain jobs with similar resource demands. subsequently, our scheduler uses reinforcement learning to continuously evolve its knowledge on which groups of jobs are sharing the resources of a particular cluster environment efficiently. that is, the scheduler learns for each workload and cluster from the experiences of scheduling particular job combinations onto the same cluster nodes, assessing which groups of jobs produce a high resource utilization yet low interference when co-located. this combination of generalization across a fixed number of groups of jobs with reinforcement learning of co-location benefits provides learning efficiency, a reduced scheduling complexity, and adaptation to changes in workloads. furthermore, we show how additional scheduling requirements are integrated into hugo with the example of balanced waiting times.addressing these limitations, we present hugo, a cluster scheduler that efficiently learns from collected resource usage metrics to co-locate those jobs that have complementary resource demands and therefore share resources efficiently, building on our previous work. hugo first clusters jobs by their resource utilization, yielding multiple groups of jobs that contain jobs with similar resource demands. that is, the scheduler learns for each workload and cluster from the experiences of scheduling particular job combinations onto the same cluster nodes, assessing which groups of jobs produce a high resource utilization yet low interference when co-located.hugo is an adaptive cluster job scheduler that utilizes resource usage profiles of jobs to select and co-locate combinations of jobs that efficiently share the available resources. for example, there could be groups for jobs that predominantly stress the cpu, memory, disks, or network, while jobs of others groups could also exhibit mixed high usage of multiple resources such as both, cpu and memory. jobs in the queue can be recurring jobs or new jobs. the co-location goodness measure assesses how specific combinations of job groups utilize resources, using metrics that capture the resource utilization and interference among co-located jobs.typically, we have multiple jobs queued and with these jobs also multiple job groups. its elements h eg contain the preference of job group e when co-locating jobs of it with jobs of group g.where π e (g) = πe(g) i∈q πe(i) , c is the set of job groups with jobs currently running on the cluster, q is the set of job groups with jobs currently in the queue and s is the set of all groups.where α is the learning rate, ω n is the set of job groups containing jobs placed on the node n, r n represents the co-location goodness for node n, and r i is the mean goodness across all nodes containing jobs in the job group i. however, if the co-location goodness preference of the job group itself is low compared to most of the other job groups, the jobs in that group are still at risk of not getting selected.this paper presented hugo, a cluster scheduler for distributed data-parallel processing workloads that selects jobs based on the resource usage of co-located jobs. to efficiently generalize its knowledge and thus co-locate even new jobs effectively, the approach learns preferences not for single jobs but for groups of jobs that exhibit similar resource demands. hugo selects among the queued jobs using these learned preferences, choosing types of jobs that complement the jobs currently running on the shared infrastructure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/226.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/226.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9c653675fa629d5e0b182d38e06d834b6a26eb2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/226.txt @@ -0,0 +1 @@ +a real-time system is a system where the missing of a deadline may lead to a catastrophe and thus warrants to formally verify the temporal behaviour of the system to ensure safety. in the last decade real-time systems have shifted from uniprocessor to multiprocessor systems in order to deal with the computational, thermal and energy constraints of modern complex applications. to that end, a lot of research has been conducted with regards to the challenge of how to make use of the parallelism provided by multiprocessors for task sets with inter-and intra-task parallelism whilst satisfying deadline constraints. inter-task parallelism refers to the potential concurrent execution of distinct tasks that execute sequentially, whereas intra-task parallelism refers to tasks that allow for parallel execution. fork/join models , synchronous parallel task models, real-time scheduling algorithms and responsetime analyses thereof have been published, e.g., , and dag (directed-acyclic graph) based task models , , , , . these models enable tasks with higher execution demands and inherent parallelism such as computer vision, radar tracking or video applications to be scheduled with tighter deadlines.besides the different approaches and justifications to represent intra-task parallelism using the above models, parallel applications in the domain of autonomous driving and image processing are subject to multiple conditional branches and control flow instructions as stated by melani et. al . moreover, the execution times of the subjobs of parallel algorithms in these domains are highly varying due to varying sensor inputs, e.g., images for object detection in autonomous vehicles. beyond that, it was shown that the multicore architecture complicates the worst-case timing analysis. this is due to interference effects from contention on shared resources, e.g., caches, memory etc. the authors in argue that the arbitration delay and state perturbation caused by resource sharing must be captured in the worst-case bounds. all these uncertainties eventually lead to pessimistic responsetime analyses in real-time systems and thus lead to resource underutilization. these architectural impacts on the worst-case execution time analysis have been thoroughly researched by e.g., cache partitioning or bandwidth sharing mechanisms for memory accesses .another approach to this problem is to accept the uncertain execution behaviour of the parallel tasks and to focus on the probabilistic response-time characteristics. for many applications, e.g., closed-loop feedback controllers, hard real-time system engineering (with a safe but very pessimistic upper bound) is not required due to the inherent controller robustness towards timing non-idealities like jitter and deadline misses. in fact, if only a limited number of deadlines of a control application are missed, the required quality of control can still be satisfied.recently, many research efforts have been focused on formalizing and analyzing relaxations of deadline constraints , e.g., weakly hard systems where m out of k task instances must meet the deadlines. moreover, maggio et al. investigate the closed-loop control system stability under consecutive deadline-miss constraints, which further motivates the need for scheduling algorithms that can guarantee probabilistic bounds on consecutive deadline misses to the application.in order to formally describe and verify quantitive guarantees of deadline misses, some quantifications are of importance for soft real-time systems: probability of a deadline miss, probability for k consecutive deadline misses, maximum tardiness of a job. despite the guarantees are soft, the precise quantification of such deadline misses are hard and challenging even for the ordinary sequential real-time task models that are scheduled upon a uniprocessor system. a summary of the literature in this research direction is provided in section ii. they can only be derived under strict model assumptions, e.g., that a job is aborted whenever a job exceeds its deadline in the state-of-the-art analyses. the reason for this complexity is partly due to inter task interference, i.e., the preemption and interference patterns of the task system due to higher-priority jobs, which results in a large number of system states that must be considered in a response-time analysis.we aim to analyze, optimize and verify the schedulability of probabilistic conditional parallel dag tasks on identical multiprocessors with respect to quantities such as deadline-miss probabilities, consecutive deadline-miss probabilities and tardiness constraints. when considering the scheduling and analysis of probabilistic parallel dag tasks, not only inter-task, but also intra-task interference, and multiprocessor scheduling anomaly effects (the early completion of jobs may lead to longer response-times) must be considered, which complicate the analyses for the above mentioned quantities. contributions: we propose scheduling algorithms based on reservations, i.e., service provisioning, for the probabilistic analysis of parallel dag tasks to avoid inter-task interference induced complexities and anomaly effects and are thus firstly able to solve the stated objective. more precisely, we make the following contributions:• we propose a probabilistic version and formal description of the widely used conditional parallel dag task model in section iii. • we contribute scheduling algorithms and response-time analyses for probabilistic conditional parallel dag tasks based on resource reservation. the reservations can be scheduled along side real-time workloads using any existing scheduling paradigm. in addition, we provide design rules to devise reservations that guarantee probabilistic characteristics such as bounded tardiness, stability, and probabilistic upper-bounds for k-consecutive deadline misses. our approach is anomaly-free because any early completions due to scheduling or dynamic dag structures are handled by the adoption of resource reservation and the abstraction of the workload model. to the best of our knowledge, this is the first paper that addresses the analysis and optimization for probabilistic conditional parallel dag task sets with quantitive guarantees. each conditional parallel dag task τ i ∈ t is defined by a conditional dag structure g i (to be defined later), a relative deadline d i and a minimal inter-arrival time t i , which denotes the minimal distance between two job releases. a conditional dag is composed of finitely many dags, each of which consist of a tuple (v, e), where v denotes the finite set of subjobs and the relation e ⊆ v × v denotes the precedence constraints of these subjobs such that there are no directed circles in the underlying graph.each probabilistic conditional dag task is described by the tuple τ i = (g i , d i , t i ) where g i denotes a probabilistic conditional dag structure, d i denotes the relative deadline and t i denotes the minimal inter-arrival time between two job releases. for each task τ i ∈ t a cumulative distribution function (cdf) is inferred from the conditional dag structure, where f i (u, v) describes the probabilistic behaviour of the volume and length of a dag instance.we use a reservation system to handle the scheduling of the dag tasks and use any partitioned scheduling algorithm to schedule the reservation system and other tasks in the system. in a list schedule on m i in-parallel reservation servers a subjob of a given dag job g = (v, e) is executed on any reservation server that is idle and scheduled for execution and as soon as all preceding subjobs have executed until completion.based on this definition, the worst-case response time of a job τ i,ℓ of a dag task τ i that was released at t i,ℓ is given by the smallest t ′ ≥ t i,ℓ such that work s i (t i,ℓ , t ′ ) ≥ vol(g ℓ i ) + backlog s i (t i,ℓ ), where backlog s i (t i,ℓ ) is the amount of unfinished work at time t i,ℓ of jobs of τ i released before t i,ℓ .proof: in the proof we split the work at time s kq and estimate each summand of work s i (f kq-1 , f kq ) = work s i (f kq-1 , s kq ) + work s i (s kq , f kq ) on its own., work s i (f kq-1 , s kq ) = serv s i (f kq-1 , s kq ) holds for all q ∈ {2, . given the workload conserving properties of list-scheduling used to dispatch subjobs to the service, an eligible subjob is scheduled whenever service is available. additionally, given the provided service serv s i (s kq , f kq ) due to sequential execution of v kq , at most m i -1 reservations of duration ℓen(v kq ) may be unused.by decomposing work s i (r g , r g + t), we obtain that it can be written as the sum of p q=1 work s i (µ t (2q -1), µ t (2q)) and of p q=1 work s i (µ t (2q), µ t (2q + 1)). the first summand is lower bounded by the sum of the corresponding service values p q=1 serv s i (µ t (2q -1), µ t (2q)), and the second summand from above is lower bounded by p q=1 serv s i (µ t (2q), µ t (2q + 1)) -(m -1)ℓen(v kq ) . for each probabilistic constrained-deadline conditional dag task the algorithm determines all feasible configurations (m i , e i ) by iterating through the number of in-parallel reservations m i ∈ and search for the smallest required reservation service to still comply with the consecutive deadline-miss constraints.in this paper we proposed a probabilistic version and formal description of the widely used conditional parallel dag task model and proposed a resource reservation system that allows for scheduling anomaly free scheduling whilst provably guaranteeing probabilistic quantities such as bounded tardiness, stability, and probabilistic upper-bounds of k consecutive deadline misses. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/227.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/227.txt new file mode 100644 index 0000000000000000000000000000000000000000..55453a06c999484437aadaaa10509e7a733f88aa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/227.txt @@ -0,0 +1 @@ +in recent years, the rapid advancements in hardware technologies such as graphics processing units (gpus) and tensor processing units (tpus) allow compute-intensive workloads to be offloaded from the central processing units (cpus) by introducing parallelism . there is a wide variety of areas that can benefit from parallelization , one of which is state estimation.state estimation is a common task that arises in various areas of science and engineering . it aims at combining the noisy measurements and the model to estimate the hardto-measure states. a frequent and classical method for solving this problem is based on bayesian filtering and smoothing which inherently provides a sequential solution with linear complexity in the number of time steps.in order to tackle the computational burden of kalman type of filters and smoothers, provide sub-linear computational methods by taking advantage of the sparse structures of the matrices appearing in the batch forms of the problems. in other works, using an ensemble formulation of kalman filter has been used to speed up the matrix computations through parallelization . the primary focus of these works was the efficient computation of the covariance matrices either by introducing sparse or sample covariance matrices rather than considering the temporal state-space structure per se.the authors would like to thank academy of finland for funding.while in the aforementioned works, parallelization of the subproblems in the area of bayesian filtering and smoothing were considered, presented a general parallelizable formulations specifically designed for parallelizing state-estimation problems in the temporal direction. moreover, for the special case of linear gaussian model, parallel equations for computing kalman filter and raugh-tung-striebel smoother solutions were derived.overcoming the computational burden in the case of nonlinear dynamical systems with additive gaussian noise is also of paramount importance. in these types of models, various linearization approaches can be used. taylor series expansion based iterated extended kalman smoother (ieks) methods and sigma-point based methods are well-established techniques in literature. iterated sigma-point methods have been proposed, for example, in . despite the capabilities of the aforementioned methods in state estimation in nonlinear gaussian models, they lack a framework which enables the computations in a more efficient way when using parallelization.the contribution of this paper is to present a set of parallelizable formulas for filtering and smoothing in nonlinear gaussian systems, in particular, ieks and sigma-point based methods using a scan algorithm . the proposed methods reduce the linear span complexity of the state estimation methods to logarithmic with respect to the number of measurements.this paper is organized as follows: section 2 briefly reviews the generic parallel framework for bayesian filters and smoothers. sections 3 and 4 are concerned with presenting the formulation of the problem and proposing our method. section 5 analyzes the efficiency and the computational complexity of the proposed method through one numerical example, and section 6 concludes the paper.the goal of the filtering problem is to find the posterior distributions p(x k | y 1:k ) for k = 1, . this distribution is a probabilistic representation of the available statistical information on the state x k ∈ r nx given the measurements y 1:k = {y 1 , . , y k } with y k ∈ r ny . the following strategies are used inso as to particularize a k and the binary associative operator ⊗ which provide a parallel framework for solving the aforementioned sequential filtering and smoothing problem.having considered the aforementioned general formulations, in this paper, we aim to extend the element a k and the binary associative operator ⊗ to linear approximations of non-linear gaussian systems, specifically, to the extended kalman filter and smoother, and sigma-points methods.) are nonlinear functions.) and (h k , d k , ω k ) using sigma-point-based statistical linear regression (slr) methodas follows. then, in order to find the parameters (f k-1 , c k-1 , λ k-1 ), transformed sigma-points are obtained as z j = f k-1 (x (i) j,k-1 ) for j = 1, . in this case, ω and λ are selected as zeros, and (f k-1 , c k-1 ) and (h k , d k ) are obtained by analytical linearization at the previous posterior (smoother) mean estimate of x 0:n . by expanding f k-1 (x k-1 ) and h k (x k ) in the first-order taylor series utilizing the previous posterior means xk , the parameters of (6) are:. aiming to specify the element a k for obtaining parallel filtering equations according to (3), we apply kalman filter update step to the density p(x k | x k-1 ) with measurement y k .it is worth noticing that in order to find parameters of (13) at k = 1 and given m 0 and p 0 , conventional formulations of the kalman filter method with the linearized parameters are applied directly for prediction and update steps. also, using the information form of kalman filter, the distribution g ′ k (x k-1 ) = p(y k | x k-1 ) ∝ n i (x k-1 ; η k , j k ) can be obtained as follows:. assume that the filtering means x * k and covariance matrices p * k for the model (11) have been acquired as described above.in the smoothing step, the parameters a k = (e k , g k , l k ) can be calculated in parallel. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/228.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/228.txt new file mode 100644 index 0000000000000000000000000000000000000000..ffd7f4bc6d26ead5f660940dd524914e8775b697 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/228.txt @@ -0,0 +1 @@ +at the time of writing, the world is dominated by a worldwide pandemic called covid-19. developing a vaccine against it is one of the most important possibilities to fight this virus. nevertheless, time is rare, as the pandemic already caused more than 2.15 million deaths in the last 12 months. to speed up this process, vast processing power is needed to simulate the folding of the virus proteins. this simulated folding process helps scientists in finding new possibilities for a vaccine. however, this processing power amount cannot be reached with a single supercomputer or a server farm without enormous costs. to solve this problem, a more sophisticated approach can be applied: volunteer-based distributed computing. it is called volunteer-based distributed computing, which is used by the folding@home project. by combining the idle power of a large portion of computers worldwide, enormous processing power can be formed. at the time of writing, the combined processing power reached 0.22 exaflops . in peak time (2020-03), the processing power even exceeded 1.5 exaflops which is even higher than the currently fastest computer globally with about 0.44 exaflops . furthermore, in contrast to the world's fastest computer, the folding@home project does not need to take all its clients' operating costs into consideration because this cost is donated by the participants who get credits in exchange.in the following, we introduce the concept and architecture behind the folding@home project. in contrast, we present a trust-based approach and discuss whether such an approach including trust communities is applicable to a volunteer-based distributed computing system like folding@home. the work unit's result is then sent back to the work server, or, if this server is not available, to a collection server which acts as a buffer and sends the information to the work server as soon as it comes back online.3) the replication factor describes how many other agents should receive the same work unit to get a trustable result. an agent with a good reputation will have a low replication factor as this agent's result is trustable and has to be checked by only a small amount of other agents or eventually no agents at all. a higher maximum limit for the replication factor means more reliability but less throughput, as more agents have to work on the same work unit. the minimum replication factor represents the number of other agents that calculate the same work unit even if the agent has the highest reputation. the second work unit is then distributed to the next agent with the lowest f min with no work unit. this is an etc with a tcm distributing wus to the members and inviting a new agent to join, trying to separate the agents with a good reputation from those with a bad one. in the preorganization phase, the first step of forming an etc, all the agents are unrated and begin to rate each other based on the work results. as mentioned in ii-b, the assignment server balances the clients' incoming work requests between the work servers, which then forward the work units to the clients. to make both approaches comparable, the software of the agents, previously known as the clients, has to be limited to only accept work units and not also send new ones to the grid, except they become the trust community manager. we will call agents with the ability to distribute wus work agents. aiming for the highest performance, we have to consider that too egoistic agents might try to solve the wus independently, so they do not need to share credits with other agents. each agent randomly distributes its work units to the other agents and rates them afterward by criteria like correctness, time to compute, or rejection. as a result, the work agents begin to invite the agents with the best reputation to join their etc. therefore, the work agent that invited the other agents to join the etc becomes the first tcm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/229.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/229.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d5efe53fc7e0922ee2bcabe898dd270806239b6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/229.txt @@ -0,0 +1 @@ +since the birth of bitcoin in 2008 as a peer-to-peer electronic cash system, blockchain technologies have spread far beyond the sole cryptocurrency domain, in particular after the implementation of general purpose smart contracts introduced by ethereum . besides a growing number of applications ranging from de-fi, healthcare, music industry, government, identity, to cite but a few, blockchain technology has recently started to catalyse the attention of the scientific community as well with the promising potential of being a 'game changer' in outdated and broken scientific practices and leading towards open science . indeed, scholars have pointed out how the intrinsic characteristics of blockchain technology set the basis for a open science infrastructure in which decisional processes are transparent and therefore more democratically accessible to all the stakeholders (researchers, reviewers, funders, taxpayers). those are: the consensus algorithm , a deterministic computational trust that allows for decentralization, for which there are no trusted third parties; the proof of existence (poe) that via cryptographic hashing and timestamping creates a digital footprint able to keep a traceable chronological record of research objects that cannot be altered or retrieved (due to its property of immutability or append-only) . in particular, a 'blockchainified science' could 'reduce waste' , by disclosing each step in the research cycle to 'scientific self-correction' way before the final publication step, and therefore help fixing the current reproducibility crisis in science. a thorny issue in the academic system that can -and we think it should -be tackled by blockchain concerns the status and accreditation of peer review, the core process of scientific validation currently facing a crisis . in this paper we propose a solution to the problem of reviewers recognition based on the principles of tokenomics and in line with the values of open science. a thorny issue in the academic system that can -and we think it should -be tackled by blockchain concerns the status and accreditation of peer review, the core process of scientific validation currently facing a crisis. some studies have reported an improvement in the transparency and civility of the review process when open reports are released according to the standards of open peer review. we imagine a final paper originating from the peer review process as a complex system that emerges from the interactions between the authors and the reviewers, a whole that is more than the sum of its parts.the peer review history, including reviewers' recommendations and authors' replies, should be openly and permanently accessible to the community (in the form of 'open reports' of open peer reviews) even before articles' publication in order to make editorial decisions more democratic and prevent waste of knowledge. following the example of models offered by journals peer review consortia, such as the neuroscience peer review consortium4and independent companies like researchsquare5and peerage of science6, that provide a scientific peer review service, peer reviews in ants-review will be transferable across journals (like in 'cascading' or 'portable peer reviews').the ants-review protocol is divided into different modules responsible for the following functionalities, as shown in the flow-chart (see figure1): antsreview, which manages access management and the core system (see figure1,(a,b,f,h)); privacy, which maintains the anonymity of the system via aztec protocol (see figure1, (e)); tokenomics, which manages the incentive mechanism of the system (see figure1,(c,d,e,h)).• upload of the files containing the requirements of the peer review and the paper to be reviewed into ipfs, whose hash is stored into the ethereum blockchain as poe;. bob (peer-reviewer) can download the files relative to alice's paper and the requirements of the peer review by leveraging on the content-addressing feature of ipfs that allows anyone to find the document using an ipfs explorer; subsequently, bob can submit a peer review before the deadline by fulfilling the antreview created by alice, with the function fulfillantreview(), by uploading the peer review on ipfs, whose hash is stored into ethereum blockchain as poe. ted (approver) can accept the peer review submitted by bob with the function acceptantreview(), by specifying the amount of ants that will be transferred as reward from the contract to bob. if alice's antreview does not receive any peer review and the deadline expires, anters can get a refund with the function refund() for their contributions. finally, a protocol upgrade inspired by discover 17 , a web3 browser by status 18 and still under investigation would allow anters to validate peer reviews via an upvote/downvote system that will consent the protocol to automatically pay out the reward to the reviewers based on the votes associated with their peer reviews. this dissociation of initial scientific dissemination and scientific validation will force the publishing industry to adapt in order to keep up with the higher quality scientific process offered by those alternative peer review platforms and justify their added value. in our proposal we also decoupled the peer review process from the publishers giving it back to the scientific community and applying incentives from tokenomics. we foresee that the future will evolve towards community-driven peer reviews: peer reviews will be more and more independent from publishers, and researchers will be the ones seeking the papers to review to build reputation within the community and not journals.enlarging the pool of reviewers to potentially an entire scientific community and accelerating the whole process requires a standard for peer reviews: for example some aspects might be taken over by ai assistants (such as the artificial intelligence review assistant (aira)leaving to the reviewers the sole task of evaluating the content of a paper. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/23.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/23.txt new file mode 100644 index 0000000000000000000000000000000000000000..701ca9abe823cbdf1d4e9a180fef6ba85aac0f36 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/23.txt @@ -0,0 +1 @@ +pddl is the leading specification language used by the ai planning community in expressing (classical) planning problems and solving them by domain-independent planners. most of the planners for the classical planning problem (with a unique initial state and actions that are deterministic) implement boolean state variables only. some additionally support numerical state variables, integers or reals. while this is theoretically sufficient to express a broad class of complex planning problems, the practical limits of pddl are sometimes encountered. this is particularly evident when pddl specification are not written by hand, and need to be generated by programs written in conventional programming languages. there are numerous examples of this both in standard pddl benchmark problem sets as well as in the planning literature. pddl is in these cases used an intermediate language to interface with domain-dependent planners.the idea of more powerful specification language, allowing the easier expression of complex planning problems, and to reduce the need for ad hoc problem generators, is not new. a prominent example is functional strips (geffner 2000). if pddl's predicates p pt 1 , . . . , t n q are viewed as arrays as in programming languages, pddl limits to arrays indexed by object names or by actions' parameter variables only, and only boolean values as elements. functional strips goes further, allowing more complex array indexing, including with object-valued expressions and nesting of array indexing. specifications in this kind of language can often outperform pddl in elegance and succinctness.the goal of this work is, similarly to functional strips, to increase the expressivity of specification languages. we believe that a planning specification language should support a broad collection of different data types, including scalars like booleans, numeric types, and enumerated types, as well as compound types such as records, unions, arrays, sets and lists. this has been motivated by our work on using existing planning technology in the creation of intelligent software systems, which handle complex structured and relational data, as found in almost any software application.in this work, we will first present an expressive language that supports several complex data types, and then we will give reductions of that language to an intermediate boolean representation. this representation can be used as a basis of implementations in different types of intermediate and lower level modeling languages.as an obvious language to reduce the booleanized representation to is pddl (ghallab et al. 1998), due to the existence of dozens of implementations of pddl in scalable, robust planners. the front-end of our planning system uses the extended specification language that supports complex datatypes. the expressions in this language, and the actions based on them, are reduced to a purely boolean representation. the final step is the generation of pddl, so that existing planners can be used for finding plans.intuitively it is clear that reducing complex data types to pddl is possible, but it is not clear how practical those reductions are, and how efficiently existing planners for pddl can solve the resulting pddl specifications. many existing planners support a large fragment of pddl, but may translate some parts of pddl representations to specific normal forms, and these normal form translations may increase the size of the representations. for instance, planner backends may require actions' preconditions to be conjunctions of (positive) literals. for this reason, to support many stateof-the-art planners better, we develop techniques to further process the pddl to forms that are better digestible by existing planners.in the experimental part of the work, we try out the resulting planner front-end with different state-of-the-art pddl 1: data types planners as back-ends. we demonstrate their ability to solve complex problems that would be tedious and unintuitive to express in pddl.the experiments help identify bottle-necks in existing planners, which could aid in developing existing planning technology to better handle more complex problem specifications. we believe that a planning specification language should support a broad collection of different data types, including scalars like booleans, numeric types, and enumerated types, as well as compound types such as records, unions, arrays, sets and lists.in this work, we will first present an expressive language that supports several complex data types, and then we will give reductions of that language to an intermediate boolean representation.intuitively it is clear that reducing complex data types to pddl is possible, but it is not clear how practical those reductions are, and how efficiently existing planners for pddl can solve the resulting pddl specifications. many existing planners support a large fragment of pddl, but may translate some parts of pddl representations to specific normal forms, and these normal form translations may increase the size of the representations.we define a planning problem by a quadruple of p " xv, s 0 , g, ay, where v is the set of state variables, s 0 defines the initial state by specifying the values of all state variables in v, g is a boolean formula describing the set of goal states (i.theoretically, variables of v could be of any type, but in this work we limit to boolean state variables, which is the most commonly used type in planners that support pddl." translation notice that recursive data types, with a field of a record or a union type pointing to another value of the same type, could not be handled by this reduction without rather strict additional restrictions, due to there being no size bounds of values of recursive data types. for each w i φ , we add the set of txψ j , w i φ :" jy | 1 ď j ď nu to the effects of the action b a i ; moreover, we add xj, w i φ :" ky to the effects of action a, and initialize all auxiliary variables with the value of k in the initial state. , b a n to be executed before the execution of action a, by introducing variables of p 0 , p a i , 1 ď i ď n, and: • adding p 0 and txj, p 0 :" ky, xj, p a 1 :" jyu to the precondition and effects of b a 1 , respectively, • adding p a i´1 and txj, p a i´1 :" ky, xj, p a i :" jyu to the precondition and effects of b a i , 2 ď i ď n, respectively, • adding p a n and txj, p a n :" ky, xj, p 0 :" jyu to the precondition and effects of action a, respectively, and • initializing p 0 with j and p a i , 1 ď i ď n, with k in the initial state. since the structures of both vrw t and vew t are the same, to reduce these assignments to boolean assignments suitable for pddl, we can just find the boolean variables in vrw t and their corresponding boolean formula in vew t , and create conditional boolean assignments based on them. moreover, since pddl only supports j and k as the assignment values (add effects and delete effects), we further reduce it to xcond ^φ ^eb , v b :" jy and xcond ^φ ^ e b , v b :" ky. the first goal was to evaluate the practicality of the proposed method to translate problems with complex data types into the most common version of pddl that supports only boolean variables. moreover, our second and more specific goal was to evaluate the effectiveness and improvement of using complex data types compared to the cases that we can also describe our problems with scalar type values without too much difficulty.to better evaluate the effects of using complex data types, we conducted other experiments to compare the performance of solving identical problems in two cases: in one case, we use complex data types, and in another case, we use only scalar data types to describe the same problem.we have proposed a very expressive modeling language for planning, with a rich collection of data types, and devised a translation of this language first to boolean logic, and then further to the planning domain description language pddl. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/230.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/230.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a1e84f1233b4a0cf3e0d6641db8f58ff9545d4c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/230.txt @@ -0,0 +1 @@ +in the industrial sector, production planning leans on efficient decision-making strategies in order for the company to gain an edge on the competitive market. production planning aims at optimizing the resources of a company to meet its long, medium and short-term objectives. roughly speaking, long-term decisions belong to the strategic level of production planning, when a company's global goals are defined. tactical planning is responsible for efficiently using the available resources in order to fulfill the goals of the strategic planning. in the short-term planning level, known as operational planning, decisions related to the company's routine are made.production planning for a variety of realistic scenarios involves deciding the number of lots of items to be produced (lot sizing), and the planning and sequencing of tasks to be performed for the production of the items (scheduling) . one may define a production plan by considering lot sizing and scheduling separately, where the lot sizing problem is solved first so that the tasks can be scheduled. this approach is suitable for problems whose costs and machine setup time do not depend on the sequence of tasks performed during the production of the items. nevertheless, when there is a dependence between machine setup and sequence of tasks, e.g, when capacity use violates an imposed limit, the results of the scheduling also affect the lot sizing. therefore, it is not possible to approach lot sizing and scheduling in a hierarchical and separate manner.better decision-making strategies to solve problems with sequence-dependent setup costs and times may be reached by simultaneously integrating the production plan considering lot sizing and scheduling (integrated) . the resulting problem is known as integrated lot sizing and scheduling problem (ilssp), which is recognized as being computationally challenging. classic ilssps aim at defining a production plan that minimizes the total costs of production in a finite planning horizon. the ilssp is an extension of the single item capacitated lot sizing problem (clsp) with setup costs. therefore, the problem of determining an optimal solution to the ilssp is n p-hard . moreover, the feasibility problem of ilssp is n p-complete .heuristic methods efficiently solve several variants of the ilssp, even the significantly hard instances. furthermore, the advances in computational power predicted by moore's law enable the incorporation of characteristics in ilssp models that better approximate the mathematical formulations to real scenarios. considering these facts, we propose solution methods to efficiently solve a relevant problem that has not yet been extensively investigated by the literature, the ilssp on parallel machines with non-triangular sequence-dependent setup1 costs and times and setup carry-over, referred to here as ilssp-nt on parallel machines.important applications of the ilssp-nt on parallel machines can be found in the food processing industry , e.g., in the animal feed industry. in the last five years the production of animal feed increased by an average of 2.49% a year and, according to the ifif (international feed industry federation), it tends to soar until 2050 . the growth in the animal feed industry is one of the consequences of the rise in consumption of raw materials such as meat, milk and eggs provided by the livestock industry.to the best of our knowledge, a few solution methods to approach the ilssp-nt on parallel machines can be found in the literature. kang et al. proposed a branch-and-price-based approach to solve the ilssp-nt on parallel machines without setup times. meyr introduced four heuristics, which are combinations of the threshold acceptance and simulated annealing heuristics with dual reoptimization. on the other hand, many problems related to the ilssp-nt on parallel machines have been investigated .mathematical programming-based heuristics, also known as matheuristics, e.g., the relax-and-fix and fix-and-optimize heuristics, have been widely suggested to approach problems related to the ilssp-nt . moreover, the results presented in indicate that local search-based methods play an important role in improving the quality of the solutions. therefore, in this paper we propose solution methods that combine the relax-and-fix and fix-and-optimize heuristics and that use intensificationbased heuristics known as path-relinking and kernel search to refine the solutions. the in-troduced intensification strategies were designed to consider the particularities of the non-triangular setups to better approach the ilssp-nt.computational experiments were carried out with the proposed heuristics using instance sets obtained from studies in the literature. in the first experiment, we employed the introduced solution methods to approach the ilssp-nt on a single machine on the instances introduced in . the results obtained by our solution methods are contrasted to those found by the cplex v. 12.10 solver with an imposed time limit of 3600 seconds. in this experiment, the proposed heuristics were very competitive with cplex, achieving better average gaps in the problems tested. in the second experiment, we tested the heuristics presented here to address the ilssp-nt on parallel machines. in this experiment, we used instances proposed in but modified the setup costs and times of some items so that they were shortcut items, as introduced in , and compared the solutions obtained using the proposed heuristics with the solutions found by the cplex v. 12.10 software time limited to 3600 seconds. the results of this experiment demonstrate the outstanding performance of the proposed matheuristics with the intensification strategies in comparison to cplex.this paper is organized as follows: section 2 presents a review of problems related to the ilssp; section 3 presents the main characteristics of the ilssp-nt on parallel machines as well as the investigated mathematical formulation; section 4 provides a thorough description of all the methods proposed to solve the ilssp-nt on parallel machines; section 5 presents a comparative analysis of the results obtained by the proposed heuristics and the cplex v. 12.10 solver when solving the ilssp-nt on a single and parallel machines; and finally, section 6 presents the final remarks and directions for future research.this paper is organized as follows: section 2 presents a review of problems related to the ilssp; section 3 presents the main characteristics of the ilssp-nt on parallel machines as well as the investigated mathematical formulation; section 4 provides a thorough description of all the methods proposed to solve the ilssp-nt on parallel machines; section 5 presents a comparative analysis of the results obtained by the proposed heuristics and the cplex v.in the proposed rfo, if the rf heuristic returns a feasible solution to the ilssp-nt on parallel machines, we apply the fo heuristic to the solution. the first solution, represented by (x rf , y rf , z rf ), is the solution obtained in iteration θ -3 of the rf heuristic, if the method reaches such an iteration. the second solution, called (x rf * , y rf * , z rf * ), is the best solution found by the rf heuristic. if (x rf * , y rf * , z rf * ) is feasible, the fo heuristic uses it as starting solution to obtain an improved solution (x f o , y f o , z f o ), which is returned by the method along with (x rf , y rf , z rf ) and (x rf * , y rf * , z rf * ). the first solution, referred to as (x e (1) , y e (1) , z e(1) ), is the solution obtained by the rf heuristic in the iteration where the variables associated to the periods t ′ θ-2 to p have their domain relaxed. the second solution of e, referred to as (x e (2) , y e (2) , z e(2) ), is the best solution between those found by the rf and pr heuristics. on the other, our goal in employing (x e (2) , y e (2) , z e(2) ) and (x e (3) , y e (3) , z e(3) ) in pr is to ensure the intensification in the neighborhood of the feasible solutions found by the rf and fo heuristics. solution (x e (2) , y e (2) , z e(2) ) is updated with solution (x (m 1) , y (m 1) , z (m 1) ), in case z e(2) > z (m 1) .if either elapsedtime-pr is greater than or equal to timelimit-pr or the objective function values z e (2) and z e(3) respectively associated to the solutions (x e (2) , y e (2) , z e(2) ) and (x e (3) , y e (3) , z e(3) ) are equal, the algorithm stops and returns e.data: instance, (x rf , y rf , z rf ), (x rf * , y rf * , z rf * ), (x f o , y f o , z f o ), availtime-rfo, timelimit-pr. 1 (x e (1) , y e(1)) , z e(1) ) ← (x rf , y rf , z rf ); 2 (x e (2) , y e(2)) , z e(2) ) ← (x rf * , y rf * , z rf * ); 2) , y e (2) , z e(2) ), availtime-pr); 11 update availtime-pr; 2) , y e (2) , z e(2) ). in the loop, the pr and fo heuristics are performed respectively while availtime-pr is greater than zero and the objective function values associated with (x e (2) , y e (2) , z e(2) ) and (x e (3) , y e (3) , z e(3) ) are different. if a solution with a better objective function value in comparison to (x e (2) , y e (2) , z e (2) ) is found by the pr heuristic, the fo heuristic is applied to such a solution.let (x rf * , y rf * , z rf * ) and (x f o , y f o , z f o ) be the best solutions found by the rf and fo heuristics, respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/231.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/231.txt new file mode 100644 index 0000000000000000000000000000000000000000..9370414305c2fc346d4d01a6623d09df87f8b811 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/231.txt @@ -0,0 +1 @@ +figure 2 shows the architecture of deal. deal consists of a global layer that provides an selection optimization process with the mab algorithm, and a local layer that manages the local decremental learning through incremental and decremental updates, and the associated energy control. the details are as follows. global layer. deal supports federated learning in a clientserver manner. in the global selection layer in the component as shown in figure 2, when a learning job is created from the server, deal selects a worker subset n = {1, 2, ...n } from all live candidates. in this subset, all workers shall have required training data d, sufficient computation resource to complete the learn job with a reward x. the whole selection process shall maximize this reward as an optimization process. as the device column in figure 2, deal initializes the federated learning setup in a pub/sub model. all selected workers are notified by the server via the pub method, as well as receiving the models to be trained. gradually, each worker finishes its local training, and sends back model gradients via sub methods. in this process, workers can leave. deal allows the server to communicate with workers via the sub method periodically, and starts the convergence process when receiving the majority signals from all selected workers or a time to live (ttl) is violated. local layer. in the local layer for learning and system control, each worker introduces a hyperparameter θ, meaning how much one worker shall "forget" its data . therefore, though we still compute similarity models between users, as shown in figure 1, after some epochs, deal overwrites the model with newly arrived data and forgets the deleted data, as well as their impact in the model. in this way, we not only allow the balance between model training and local energy reduction, but also enable a better privacy preserved for each worker.in summary, deal exhibits a two-level design, globally and locally, to optimize the energy efficiency and privacy for federated learning. next, we introduce our system modeling within these two layers. in this way, all previous relentless effort on system energy management techniques, such as dynamic voltage and frequency scaling (dvfs), process migration, and ic thermal shutdown, can be adopted into the federated learning process, in order to save training energy.based on the above findings, we propose a decremental energy-aware learning framework (deal) that provides an energy efficient design from decremental learning and energy saving techniques. when the learning starts, deal develops a local middleware layer that carefully manages the local learning process as incremental and decremental updates, based on specific models. federated learning is designed to train a shared model collaboratively with the data generated on edge devices while preserving data privacy, in a mobile federation. deal consists of a global layer that provides an selection optimization process with the mab algorithm, and a local layer that manages the local decremental learning through incremental and decremental updates, and the associated energy control. therefore, though we still compute similarity models between users, as shown in figure1, after some epochs, deal overwrites the model with newly arrived data and forgets the deleted data, as well as their impact in the model.cpu freq(-1) //tune down dvfs 14: return top-k items from l j regularization, which are widely used in the real-time mobilebased machine learning, in order to highlight the design and implementation of deal in the local layer, and show that deal can be easily adapted to effectively support other algorithms and systems. if we still have access to the similarity matrix l calculated from the original matrix y, then we can calculate the corresponding similarity matrix l from the updated matrix ŷ and compare it with the stale similarity matrix l. concretely, as can be seen from figure4, in the movielens dataset, 92% of the simulated devices show that the advantage of deal is faster in training the converged model compared with the original, and the median values of convergence time for deal and the original are 158ms and 94,988ms (normalized to 0.18 and 0. among these datasets, the housing dataset has the largest accuracy reduction, which is reduced by 12%. there is currently no existing approach that can effectively quantify privacy on mobile phones,, so we measure privacy by observing the proportion of data objects. for original, because it needs to train all the data (10 newly added data and the previous old data), as the number of training increases, its proportion value continues to decrease. federated learning is proposed to make multiple mobile devices collaboratively train a shared deep learning model while guaranteeing the data privacy,,-.this paper proposes an energy efficient learning framework, deal, that achieves energy saving with a decremental learning design. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/232.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/232.txt new file mode 100644 index 0000000000000000000000000000000000000000..bc7a35f8403d588579dc9e3791d16c3d4bd9736e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/232.txt @@ -0,0 +1 @@ +with the advances in artificial intelligence (ai), we are seeing a rapid growth in the number of aidriven applications as well as the volume of data required to train them. however, a large proportion of data used for machine learning are often generated outside the data centers by distributed resources such as mobile phones and iot (internet of things) devices. it is predicted that the data generated by iot devices will account for 75% of the total in 2025 . under this circumstance, it will be very costly to gather all the data for centralized training. more importantly, moving the data out of their local devices (e.g., mobile phones) is now restricted by law in many countries, such as the general data protection regulation (gdpr) 1 enforced in eu.we face three main difficulties to learn from decentralized data: i) massive scale of end devices; ii) limited communication bandwidth at the network edge; and iii) uncertain data distribution and data quality. as an promising solution, federated learning (fl) is a framework for efficient distributed machine learning with privacy protection (i.e., no data exchange). a typical process of fl is organized in rounds where the devices (clients) download the global model from the server, perform local training on their data and then upload their updated local models to the server for aggregation. compared to traditional distributed learning methods, fl is naturally more communication-efficient at scale . nonetheless, several issues stand out. the global model's convergence on different data conditions. we ran the fl process with 100 clients to learn a cnn model on the mnist dataset, which is partitioned and allocated to clients in four different ways where the data are 1) original (black line): noiseless and evenly distributed across the clients, 2) biased (magenta line): locally class-imbalanced, 3) noisy (blue line): blended with noise, or 4) biased and noisy (red line). the noise (if applied) covers 65% of the clients; the dominant class accounts for >50% of the samples for biased local data. the fraction of selected clients is 0.3 for each round. the lyapunov's condition can be intuitively explained as a limit on the overall variation (with |z kµ k | 2+δ being the (2 + δ)-th moment of z k ) of a set of random variables. let rp k denote the local profile from client k and rp b denote the baseline profile (generated in model evaluation) on the server.where rp k and rp b are generated by an identical global model; α k is the penalty factor deciding how biased the strategy needs to be against client k. using partial aggregation and our selection strategy that satisfies α k = -ln(λρ k ) div(rp k ,rp b ) , the global model θ(t) converges in expectation given an aggregation interval τ ≥ 1 and a decreasing step size (learning rate).aggregation method rule of selection fedavgfull aggregation random selection cfcfmfull aggregation submission order fedavg-rppartial (scheme ii) random selection fedproxpartial aggregation weighted random by data ratio fedadampartial with momentum random selection aflpartial with momentum local loss valuation fedprof (ours) full/partial aggregation weighted random by score (nox) emission prediction model over a network of 50 sensors.(11)according to, where bw k is the downlink bandwidth of device k (in mhz); snr is the signal-to-noise ratio of the communication channel, which is set to be constant as in general the end devices are coordinated by the base stations for balanced snr with the fairness-based policies; msize is the size (in mb) of the (encrypted) model; the model upload time is twice as much as that for model download since the uplink bandwidth is set to 50% of the downlink bandwidth.where t rp gen k is estimated as the time cost of one epoch of local training; t rp up k is computed in a similar way to the calculation of t comm k in eq. in other words, h k = x 1 w k,1 + x 2 w k,2 + . + x v w k,v + b k is a normally distributed variable since w k,i and b k (k = 1, 2, .where ∇f k (θ k (t -1), ξ k,t-1 ) is the stochastic gradient computed over a batch of data ξ k,t-1 drawn from d k with regard to θ k (t -1). we also define two virtual sequences v(t) = n k=1 ρ k v k (t) and θ(t) = aggregate({v k (t)} k∈s(t) ) for every time step t (note that the actual global model θ(t) is only updated at the aggregation steps t a = {τ, 2τ, 3τ, .for ease of presentation, we also define two virtual gradient sequences: ḡ(t) = n k=1 ρ k ∇f k (θ k (t)) and g(t) = n k=1 ρ k ∇f k (θ k (t), ξ k,t ).where γ = f * -n k=1 ρ k f * k . .let q k = ρ k , take the expectation of θ(t) over s(t) and notice that v(t) = k∈u ρ k v k (t):. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/233.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/233.txt new file mode 100644 index 0000000000000000000000000000000000000000..837cb978315c28fd81cd6d072de9eac6bd42aed9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/233.txt @@ -0,0 +1 @@ +the distributed ledger technology (dlt) enables a set of independent untrusted nodes to establish an agreement on the state of a shared ledger. blockchain, a type of dlt, is mostly known for its use cases in cryptocurrencies such as bitcoin , ethereum , and xrp , among others. however, the technology can be used for more diverse applications and industries. some examples are biomedical and health care , , internet of things (iot) , , public administration , , and cloud computing , . since each industry has its own unique sets of requirements, many isolated permissioned and permissionless blockchains have been introduced, posing limits regarding its interoperability.currently, developers commit to a single blockchain solution, and they cannot use the capabilities of more than one blockchain (vendor lock-in). these isolated, incompatible networks have resulted in silos of data and assets, which cannot be used from other networks. blockchain interoperability solutions are needed to enable asset and information transfer this project has been supported by the linux foundation as part of the hyperledger summer internships program under the towards blockchain interoperability with hyperledger project. from one blockchain to another. however, interoperability for blockchains encounters challenges that make it different from interoperability for other software networks.first, each interoperability solution should take into account the differences in the architecture of blockchain networks. although all blockchains have an immutable ledger that stores the history of assets, they usually reach a consensus on the order of transactions using different algorithms. as a result, their underlying network and their validation mechanisms can be different from other blockchains. each blockchain network that participates in the interoperation is independent and in full control of their assets and information. moreover, the interoperability solutions should not require significant changes in the underlying blockchain networks, and it should be usable with minimal effort for existing blockchains.this paper aims to tackle this problem by proposing a blockchain interoperability solution based on the publish/subscribe architecture across permissioned blockchains. we have implemented a broker blockchain that acts as a middleman in the interoperability process between the source and the destination networks. it is worth noting that since the broker is itself a blockchain network, it is not a central authority and peers from the source and destination blockchains can also participate in the governance of this network. the broker blockchain keeps a copy of the information that needs to be shared in the form of a topic. a topic has a name, message, publisher, and a set of subscribers. the publisher is the source blockchain network that wants to share the information. it is responsible for creating the topic on the broker and publishing it to the corresponding topic whenever the information needs an update. the subscribers are the destination networks that need some information from the source network. as soon as the subscriber network subscribes to a topic, the broker network notifies it whenever a change happens. this solution enables interoperability between blockchains with minimal effort.we used a performance benchmark tool to analyze the performance of the implemented prototype of the platform. the throughput and average latency for different functionalities of the broker network were investigated. the results indicate that our network can handle hundreds of transactions per second. moreover, the evaluations identified the publishtotopic functionality to be the broker network's bottleneck.the rest of this paper is organized as follows. section ii gives a summary of the related work on blockchain interoperability and blockchain-based publish/subscribe protocols. section iii introduces the system design details for the proposed interoperability solution. section iv demonstrates the implementation and deployment details of the platform, while section v presents its performance evaluation. section vi outlines some discussions on the design and evaluation of the platform and section vii concludes the paper. it is worth noting that since the broker is itself a blockchain network, it is not a central authority and peers from the source and destination blockchains can also participate in the governance of this network.a recent survey classifies blockchain interoperability studies in three categories: cryptocurrency-directed interoperability approaches, blockchain engines, and blockchain connectors. instead of enabling blockchain interoperability for currently running blockchains, blockchain engines propose blockchain networks that are interoperable by design.blockchain interoperability aims to enable applications to use the assets and information available on blockchains other than their main blockchain network.the publisher blockchain is the blockchain network that sends the data, also referred to as the source network.the subscriber blockchain is the blockchain network that received the data, also referred to as the destination network. a detailed explanation of each step in figure1 follows:1) for any blockchain network to interact with the broker blockchain, it must enroll in the connector smart contract. 3) similar to the publisher blockchain, the subscriber blockchain should also enroll in the connector smart contract. 7) as soon as a publish request is received by the topics smart contract, the smart contract fetches the information about all the subscribers of the topic from the connector smart contract. finally, the smart contracts that need to be implemented on the broker blockchain are complicated, and the blockchain needs to support this kind of smart contract. each topic has one publisher, the blockchain network that has registered the topic on the broker, which is the only blockchain that can make changes to the topic.the subscriber, or destination blockchain, is the blockchain that requires information from another blockchain to run a task.the publisher, or the source blockchain, is the blockchain network that needs to send information to other blockchains.the topics smart contract has five important functionalities: create a topic, query a topic, publish to a topic, subscribe to a topic, and unsubscribe from a topic. the relay network is also a blockchain network; while exploiting all desirable features offered by blockchain, it runs smart contracts implementing the interoperability functional-ity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/234.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/234.txt new file mode 100644 index 0000000000000000000000000000000000000000..fbd4e54ae5222727cdcea2307951eed4767bdbb2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/234.txt @@ -0,0 +1 @@ +we present the streamlet protocol by completing the prototype in section 2.1, as shown in figure 10. the main motivation of streamlet is to achieve simplicity. recall that we say a chain is certified if all its blocks are certified. the protocol advances in rounds, and each round has a duration 2∆ where ∆ is the assumed maximum network delay. in each round, the leader proposes a block extending the longest certified chain it knows. the replicas will vote for the first proposal by the leader, if the proposed block extends one of the longest certified chains it has seen. when there exist three the streamlet protocol can be defined by the prototype in figure 1 with the following components. for simplicity, the streamlet also assumes a message echo mechanism, where an honest process forwards a message to all other processes when it receives a previously unseen message.• state. each replica locally keeps all certified blocks it has seen. 2) replicas always propose and vote for the longest chain, thus the proposing/voting/locking/commit rules are different. in other words, the rules depend on the height of the block in the blockchain. we call such approach height-based as different from hotstuff/diembft which is round-based. (3) in each round, the replicas send vote messages to all other replicas instead of just the next leader. replicas also forward any unseen message it received to others, leading to a message complexity o(n 3 ) per round, and hence amortized o(n 3 ) cost per block decision. as can be seen above, streamlet sacrifices performance such as message complexity and commit latency to achieve protocol simplicity. when receiving the leader's proposal, replicas send a vote message containing the block (in practice only the hash digest of the block) to the next leader, if the proposal's round number is larger than its highest voted round number r vote and the proposal extends a block of a round number ≥ r lock . however, the (f + 1)strong commit of b does not rule out a conflicting f -strong committed block b ′ , as the assumption of the f -strong commit is violated (number of faults is f + 1) and the f -strong guarantee for b ′ does not hold anymore. a strong-vote with a marker for block b ′ can "vote for" block b, if b = b ′ or the round number of b is larger than the value of marker and b ′ extends the sft-diembft protocol modifies the original diembft protocol in figure2with the following changes. to strong commit a block b with higher confidence, the sft-diembft protocol uses a strong 3-chain rule as the strong commit rule, which requires every block in the 3-chain to have a set of x + f + 1 endorsers, instead of 2f + 1 direct votes from a qc in the regular commit rule. if a block b with round number r has e endorsers, then no other conflicting block with round number r can be certified under t ≤ e -f -1 byzantine replicas. if a block b of round number r is x-strong committed at some honest replica by the strong commit rule and the number of byzantine faults is ≤ x, then any certified block with round number ≥ r must extend b. since block b is x-strong committed, there exists three blocks b = b k , b k+1 , b k+2 with consecutive round numbers r, r + 1, r + 2, and each of the block b k , b k+1 , b k+2 has ≥ x + f + 1 endorsers.where r h is the largest round number of any conflicting block b ′ that h voted for on f, and r l is the largest round number of the common ancestor block of both b and b ′ . after gst, if the actual number of byzantine faults is t ≤ f and rounds r to r + 5 have honest leaders, then the block b proposed by the round r leader is (2f -t)-strong committed within at most n + 2 rounds. the replica x-strong commits a block b k and all its ancestors, if and only if there exists three adjacent blocks b k , b k+1 , b k+2 in the chain with consecutive round numbers, and each of b k , b k+1 , b k+2 has at least x + f + 1 distinct signed vote messages., b f +1 vote for b r . in round r + 1, the leader is byzantine, it proposes two conflicting blocks b r+1 (extending b r ) and b ′ r+1 (extending an earlier block b r-1 ). thus, both b r+1 and b ′ r+1 get certified., h f , h f +1 and f byzantine replicas b 1 , . endorsement now has an additional parameter k, and a strong-vote for block b ′ k-endorses a block b if and only if b = b ′ , or b ′ extends b and marker < k. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/235.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/235.txt new file mode 100644 index 0000000000000000000000000000000000000000..5390a2cbe797061dc51b916baa42a28b01c07e25 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/235.txt @@ -0,0 +1 @@ +an algorithm is distributed if its execution takes place on several entities of a system that communicate and coordinate their actions to achieve a common goal. such a paradigm covers a wide range of topics and settings. in some settings, the feasibility of the distributed task is certain, and the main question is the cost at which the task can be realized, i.e., its complexity in terms of time and communication. when feasibility is not certain, the natural question is to characterize the conditions under which the task is feasible, and understand what are the main obstructions.many sources of impossibility in distributed computing stem from symmetries in the system, which prevent the entities (also called nodes or processors) from making proper decisions. a classical example is the impossibility of consensus in fully asynchronous systems in presence of a single crash . when the nodes of the system communicate through a network, the structure of the network itself is a possible source of symmetry. in some settings, these symmetries can be broken using randomness and/or unique identifiers. without such features, i.e., when one considers deterministic algorithms in anonymous networks, it is crucial to understand the kinds of symmetry that prevent a problem from being solved.in a seminal work, angluin initiates the study of impossibility results in anonymous networks. in particular, she shows that no deterministic algorithm exists that solves leader election and related problems in presence of certain symmetries. here, the symmetries are not global ones, but a form of local similarity which prevents the nodes from distinguishing between several execution scenarios. the key concept in these characterizations is that of a covering relation, i.e., a homomorphism ϕ from the actual communication graph g to a (possibly smaller) graph h, which preserves neighborhood. angluin's lifting lemma establishes that, if such a relation is proper, then given the execution of an algorithm in h, there exists a possible execution of the same algorithm in g which acts indistinguishably on those vertices of g that ϕ sends to a same vertex of h. for instance, if a leader is elected in h, then several leaders may be elected in g, which contradicts the existence of an election algorithm for g.a few years later, yamashita and kameda started a systematic study of four representative problems in anonymous networks through a series of articles . these problems are leader election, edge election, spanning tree construction, and topology recognition. in particular, they obtain in a rich set of characterizations pertaining to different types of symmetries.the key ingredient in these works is that of a view. the view of a vertex v is an infinite tree rooted at v, which unfolds recursively the structural information available to v (through its neighbors, the neighbors of its neighbors, etc.). the symmetricity of a graph in general is then defined as the maximum multiplicity among similar views (over all port numberings). finally, they define a concept of quotient graph in which every dissimilar view is represented by a single vertex, and which plays a similar role as a minimum graph for the covering relation.while both families of tools-those based on views and those based on coverings-serve the same essential purpose, the way they relate to each other is not straightforward. one source of confusion is that the computational models considered in and are different. angluin considers a coarse-grain pairwise interaction model in which the local symmetry is broken upon interaction between two neighboring nodes (in the same spirit as the more recent population protocols ). in contrast, yamashita and kameda consider an asynchronous message passing model with neither atomicity nor local symmetry-breaking facilities. (both use locally-unique port numbering.) for one of the problems considered, topology recognition, yamashita and kameda nonetheless express a sufficient condition in terms of angluin's coverings, warning the reader that the condition is not necessary.in , boldi and vigna establish a fundamental link between views and directed graph coverings, considering the notion of "fibrations" (a homomorphism between two directed graphs which preserves the outgoing arcs): two vertices have the same view if and only if they lie in the same fibre with respect to a minimal fibration. they also give algebraic characterizations of computability in anonymous networks, where fibrations play the central role (see for the particular case of election and for a general overview). a key contribution in these works is that asynchronous message passing can indeed be studied through the lenses of coverings, but on the condition that the communication graph be handled as a symmetric directed graph. this subtle, and somewhat counterintuitive aspect, of using directed graphs to deal with undirected networks, is what enables (among others) a proper treatement of loops in the minimum graph, which posed some problems in the case of quotient graphs. in particular, the homomorphism that sends a graph to its quotient graph in does not always induce a bijection among local port numbers due to some of these loops. the exact formulation of these characterizations relies on the symmetric directed version of a graph g, dir(g), obtained by replacing every edge of g by two opposite arcs between the same pair of vertices. [bcg + 96,bv02,cha06,cm0 the communication graph in this case is better seen as a directed symmetric graph dir(g) canonically obtained from g by replacing each edge {u, v} by two arcs (u, v) and (v, u). upon termination, the collection of configurations define a directed symmetric graph d such that (1) dir(g) is a symmetric covering of d, and (2) d may be any symmetric directed graph of which dir(g) is a symmetric covering, depending on the particular ordering of events in the execution. there exists a distributed algorithm which computes a spanning tree of g for all port numberings δ if and only if either dir(g) is minimal for the symmetric covering relation, or dir(g) is a 2-sheeted symmetric covering of a symmetric graph d having at least one loop and it is not a q-sheeted symmetric covering of a symmetric digraph with q > 2. if all the symmetric digraphs d such that dir(g) is a q-sheeted symmetric covering of d admit a unique q-sheeted symmetric covering that is simple, connected, and has n vertices, then there exists a distributed algorithm solving topology recognition for all port numberings δ in (g, δ) with the knowledge of n = |v |. there exists a distributed algorithm solving the topology recognition problem for all port numberings δ in (g, δ) with the knowledge of n = |v | if and only if any symmetric digraph d such that dir(g) is a q-sheeted symmetric covering of d admits exactly one simple connected q-sheeted symmetric covering. if m computes g 1 , then g is also successfully recognized because g 4 is the only symmetric covering of g 1 on 4 vertices which is simple and connected., if g is g 5 , g 6 , or g 7 , then recognition may fail because all these graphs are symmetric coverings of g 1 (with the same number of sheets). given a graph g, if there is no graph h with the same size as g such that g and h have a finite common covering then there exists a distributed algorithm for computing the topology of g. the possible graphs defined by four vertices in the subgraph c 1 imply that at least one of the images, by the covering mapping, of the edges {v 0 , v 1 }, {v 0 , v 2 }, {v 0 , v 3 } is not a bridge in d, thus one of the edges {v 0 , v 1 }, {v 0 , v 2 }, {v 0 , v 3 } is not a bridge in g 1 (contradiction).i : {the vertex v leader or co-leader owns and initializes the spanning tree computation} begin send via each port (different from co-leader if any) r 1 : {upon receipt by v via the port q} begin if v owns for the first time then t reev := t reev ∪ {q}; send via the port q send via each port of v different from q else send via the port q otherv := otherv ∪ {q} and the set of its children (port numbers through which it receives the message in-the-tree). at the end of the computation of m on a graph g, each vertex has a number and can, thanks to some local information (the set of local views of the vertices of g), build a graph ρ(g, δ) such that (dir(g), δ) is a symmetric covering of ρ(g, δ). thus, in the case where dir(g) is minimal for the symmetric covering relation, each vertex can build g, and every vertex of g will have a unique number between 1 and the size of g: the algorithm is an enumeration (naming) algorithm.in the case where (dir(g), δ) is not minimal and ρ(g, δ) is not, in general, isomorphic to (dir(g), δ), then (dir(g), δ) is a symmetric covering of ρ(g, δ), each vertex can build ρ(g, δ) and each vertex of ρ(g, δ) (and of g) will have a unique number between 1 and the size of ρ(g, δ). if a vertex v has a neighbor u such that δ u (v) = p and δ v (u) = q, then (m, p, q) ∈ n (v) if the last message that v got from u indicated that n(u) = m; -m (v) ∈ n × p(n 3 ) is the mailbox of v. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/236.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/236.txt new file mode 100644 index 0000000000000000000000000000000000000000..1d1e20112bdca179e898645466e296530e42b717 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/236.txt @@ -0,0 +1 @@ +internet of vehicles (iovs) and connected vehicles require vehicular communication which is facilitated by the vehicular ad hoc network (vanet). vanet has some unique features such as high mobility that requires specialized software to simulate. moreover, vanet also has some specialized protocols such as clustering which requires complex calculation for a hierarchical structure. to simulate a vanet scenario, specifically two parts are simulated. the first part is to generate traffic that is provided by traffic simulator like simulation for urban mobility (sumo) , move , etc. the second part is the main part of the vanet simulation what is simulated by a traditional network simulator or vanet simulator.internet-of-things (iots), internet-of-vehicles (iovs), etc. are advancing rapidly. at the same time, simulation platforms for vanet are not advancing at a steady rate that can meet the growing demand for advanced simulation tools such as vanet clustering which requires the use of a large database, real-time data processing, complex multi-level hierarchical calculation, etc. moreover, the platform should be accessible through the internet, preferably, a cloudbased platform with a user-friendly environment. some simulation platforms have been developed over the years; however, while vanet research is advancing rapidly, the simulation platforms are not advancing concurrently. for example, no simulator is getting more popularity than ns-2 while ns-2 has not been updated in a decade, since 2011 . some attempts have been made to develop specialized vanet simulators such as trans , veins , netsim , etc.; however, these platforms are either proprietary or need a specific operating system and specialized setup. also, they do not have a clustering module or accessibility over the internet.as part of our research, we needed a vanet simulation platform that is equipped with a fully functional clustering module, easy access of data through the database server, machine-independent, and easily accessible over the internet. moreover, a cloud based vanet simulation platform will facilitate a path providing an application for real-life vehicular communication.our objective was to develop a fully functional vanet simulator with a complete clustering module. our aim is to develop the simulator which will be using a database server for large data and will be accessible over the internet. cloud based vanet simulator (cvanetsim) will be the first of its kind. the following features will be available for cvanetsim:➢ all necessary features of a vanet simulator, ➢ fully functional clustering module, ➢ using a database to provide efficient access and update on data, ➢ cloud based platform to access over the internet.in the next section (section ii), cvanetsim has been described and section iii represents a conclusion with a future plan on cvanetsim. the second part is the main part of the vanet simulation what is simulated by a traditional network simulator or vanet simulator. at the same time, simulation platforms for vanet are not advancing at a steady rate that can meet the growing demand for advanced simulation tools such as vanet clustering which requires the use of a large database, real-time data processing, complex multi-level hierarchical calculation, etc. some simulation platforms have been developed over the years; however, while vanet research is advancing rapidly, the simulation platforms are not advancing concurrently.as part of our research, we needed a vanet simulation platform that is equipped with a fully functional clustering module, easy access of data through the database server, machine-independent, and easily accessible over the internet. moreover, a cloud based vanet simulation platform will facilitate a path providing an application for real-life vehicular communication.our objective was to develop a fully functional vanet simulator with a complete clustering module.➢ all necessary features of a vanet simulator, ➢ fully functional clustering module, ➢ using a database to provide efficient access and update on data, ➢ cloud based platform to access over the internet.the discrete-event cloud based vanet simulator, cvanetsim, is developed using java programming language and mysql database in a tomcat web server.vanet requires analysis of big data as well as clustering protocol demands complex multi-level calculation. moreover, to build a real-life application for vanet, we need a web based vanet simulator that is capable of handling big data using a database. cvanetsim is developed especially for vanet scenarios and fulfilled all these requirements including a vanet clustering protocol. since cvanetsim is accessible from the internet, cvanetsim does not serve as a simulator only, any kind of real-life vanet application can be developed using this simulator without storing or processing any data on a local machine. cvanetsim is a discreteevent simulator which uses mysql database to use sumo data and then process data to generate a cluster. cvanetsim processes sumo data and analyzes various features of vehicles such as degree, transmission range, velocity, relative velocity, distance, position, relative distance, angle, etc. if clustering protocol is needed to be implemented, the clustering module receives all analyzed data and process further to create cluster and cluster head (ch) depending on a particular algorithm and transmission range. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/237.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/237.txt new file mode 100644 index 0000000000000000000000000000000000000000..b83d0bc119ea3809ad6f28b03c131e98a8874ee9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/237.txt @@ -0,0 +1 @@ +this paper is the fourth instalment in a series of works devoted to describing the semantics of extensions of petri nets using categorical tools.category theory has been applied to petri nets starting in the nineties ; see also . the main idea is that we can use different varieties of free monoidal categories to describe the executions (or runs) of a net . these works have been influential since they opened up an avenue of applying high-level methods to studying petri nets and their properties. for instance, in the categorical approach allowed to describe glueing of nets leveraging on colimits and double categories, while category-theory libraries such as can be leveraged to implement nets in a formally verified way. these libraries implement category theory directly, so that one could translate the categorical definitions defining some model object directly and obtain an implementation.in , we started another line of research, where we were able to define a categorical semantics for coloured nets employing monoidal functors. the grothendieck construction was then used to internalize this semantics, obtaining the well-known result that coloured nets can be "compiled back" to petri nets.in , we extended these ideas further, and we were able to characterize bounded nets and mana-nets -a new kind of nets useful to model chemical reactions -in terms of generalized functorial semantics.this approach, based on the correspondence between slice categories and lax monoidal functors to the category of spans , has still a lot to give. in this paper, we show how it can be used to model hierarchical nets.there are a lot of different ways to define hierarchical nets , which can be seen as a graph-based model. it means that we have one "parent" petri net and a bunch of "child" nets. a transition firing in the parent net corresponds to some sort of run happening in a corresponding child net. the main net serves to orchestrate and coordinate the executions of many child nets in the underlayer. this paper will contain very little new mathematics. instead, we will reinterpret results obtained in to show how they can be used to model hierarchical nets, moreover, in a way that makes sense from an implementation perspective.it is worth noting that category theory in this paper is used in a way that is slightly different than the usage in graph transformations research: we won't be using category theory to generalize definitions and proofs to different classes of graph(-related) objects. instead, we will employ categorical concepts to actually build a semantics for hierarchical petri nets. a lax functor is defined by the same data, but both the 2-cells µ : f f • fg ⇒ f( f g) and η : id fa ⇒ f(id a ) can be non-invertible; the same coherence diagrams in definition 3 hold.the main insight of categorical semantics for petri nets is that the information contained in a given net is enough to generate a free symmetric strict monoidal category representing all the possible ways to run the net. connecting input and output places of a transition in the parent net with certain places in the corresponding child, we can represent the orchestration by saying that each time a transition in the parent net fires, its input tokens are transferred to the corresponding child net, that takes them across until they reach a place connected with the output place in the parent net. our strategy is to consider a hierarchical net as an extension of a petri net: the parent net will be the petri net we extend, whereas the children nets will be encoded in the extension.we sometimes consider the span as a function f : s f → a × b, thus we may write f (s) = (a, b) for s ∈ s f with f 1 (s) = a and f 2 (s) = b. given a guarded petri net with side effects n, n , a marking for n, n is a pair (x, x) where x is an object of f (n) and x ∈ n x. we say that a marking (y, y) is reachable from (x, x) if there is a morphism f : x → y in f (n) and an element s ∈ s f such that n f (s) = (x, y).the fssmc n f at the center of the span is called the child net associated to f ; the morphisms f and f are called play n f and stop n f , respectively.unrolling the definition, we are associating to each generating morphism of f of f (n) -the parent net -a fssmc n f -the child net. as the feet of the spans corresponding to the child nets will, in general, be varying with the net themselves, we need to pre and post-compose them with other spans to ensure composability: f and f represent morphisms that select the initial and accepting states of n f , that is, markings of n f in which the computation starts, and markings of n f in which the computation is considered as concluded. notice how this also solves the problems highlighted in section 3, as f and f mediate between the shape of inputs/outputs of the transition f and the shape of n f itself. nets n, n in the category petri span with n having the shape of definition 13 form a subcategory, denoted with petri , and called the category of hierarchical petri nets.• a morphism from (x, x) to (y, y) in m is a pair ( f , s) where f : x → y in f (m) and s ∈ s m f in the apex of the corresponding span that connects x to y.clearly, now we can define hierarchical nets with a level of hierarchy higher than two by just mapping a generator f of the parent net to a span where n f is in the form n for some other hierarchical nets n, and the process can be recursively applied any finite number of times for each transition.from this point of view, a hierarchical net would work exactly as a standard petri net, with the exception that in sending a transaction to the parent net, the user also has to specify, in the transaction data, a proper execution of the child net corresponding to the firing transition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/238.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/238.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ef01c48703dbfc368f44efb4b3bb8407aa7c2ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/238.txt @@ -0,0 +1 @@ +tightly coupled processor arrays (tcpas) are loop accelerators with the goal to be energy efficient by offering comprehensive loop acceleration, meaning they handle all parts of loop execution: computation, control, and communication. for this purpose, tcpas have a grid of numerous, simple processing elements (pes) to exploit task-(multiple loops in parallel), loop-(multiple parts of a loop in parallel), iteration-(multiple subsequent iterations in parallel), and instruction-level parallelism; they have global controllers to centrally compute control flow and unburden the pes, a circuit-switched interconnect network to locally communicate intermediate data, and i/o buffers with address generators to autonomously stream i/o data only at the borders.synchronization and efficient utilization of these components rely on a parallelizing compiler, in particular cycleaccurate scheduling of operations as well as high-quality register allocation and routing, all of which are np-complete problems. because of this tight synchronization, the components require distinct programs and configuration data for any distinct combination of loop bounds and number of allocated pes, but these two parameters are in general unknown a priori. the number of allocated pes is unknown because multiple applications may dynamically allocate regions of pes sized in accordance with, for example, non-functional properties such as latency and energy consumption. we face a conundrum: both programs and configuration data must be generated at runtime despite the np-complete problems compilation involves. this renders just-in-time compilation unsuitable. instead, we propose to split compilation into two phases, as illustrated in figure 1:(1) symbolic mapping (section 5) is performed off-line and solves the involved np-complete problems, generating a symbolic configuration. a symbolic configuration is a novel compact representation of configurations parameterized in the loop bounds and number of pes. here, we contribute the first solution to allocating and representing routes on the interconnect network (section 5. 4) despite not yet knowing the number of allocated pes. the pe programs are represented symbolically and compactly by a polyhedral syntax tree .(2) instantiation (section 6) is performed once the parameter values are known and generates a concrete configuration from a symbolic configuration. in particular, we show for the first time how to instantiate pe programs from a polyhedral syntax tree and that it is possible to instantiate them in polynomial time independently of the number of pes (sections 6.1-6.3).in section 7, we present experimental results showing the time and space efficiency of this hybrid compilation approach for a number of real-world loop programs-but first, we distinguish our work from related approaches to loop parallelization.mathematically, tiling decomposes an iteration space i into an intra-tile iteration space j and inter-tile iteration space k:.for example, the assembly instruction addi rd0 rd1 10 can be structured into five fragments: the mnemonic addi, the registers rd0 and rd1, the literal 10, and finally the entire instruction itself. given a condition space after tiling i * ⊆ j ⊕ k, the function split : , i * ↦ → j = { ∈ j | ( , ) ∈ i * } maps a tile ∈ k to its intra-tile domain j within i * , that is the set of iterations within tile that lie in i * . given a polyhedral syntax tree with condition space after tiling i * = domain( ) ⊆ j ⊕ k, specialization for a tile ∈ k, denoted ⊲ , recursively maps i * to the intra-tile domain of within i * :. given a condition space after tiling i * ⊆ k ⊕ j , the set of tiles with the same intra-tile domain j , called its inter-tile domain k, is given by the function tiles :. △ definition 8 implies that if 1 and 2 are part of the same intra-tile pattern i = ( j, k) of a leaf 's condition space, that is if both 1 ∈ k and 2 ∈ k, specialization maps them to the same intra-tile pattern j , making domain( ⊲ 1 ) = domain( ⊲ 2 ).for each pe ∈ k, there is a partitioning of k into two subsets: k + , containing all k such that ∈ k , and k -, containing all k such that ∉ k . since is issued whenever is in 's condition space j = domain( ), the instruction at therefore occupies the slot of the kernel issued in all iterations that are the ⌊ / ⌋-th successor of an iteration in j (compare the red instructions in figure6), given by succ.⊲ copy , but with offset children ⌊ / ⌋ shri fd0 rd0 1 shri fd0 rd0 1 shri fd0 rd0 1 shri fd0 rd0 1 shri fd0 rd0 1 shri od1 rd0 1 andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1.nop andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1 andi od0 rd0 1 shri fd0 rd0 1 shri fd0 rd0 1 shri fd0 rd0 1 shri fd0 rd0 1 shri fd0 rd0 1 shri od1 rd0 1 nop6. in the following, j pc,fu = j ∪ e ′ pc,fu denotes intra-tile iteration space including the epilog space and e the union of all individual epilog spaces.the folded polyhedral syntax tree ′ pc,fu of a functional unit gives rise to a set qc pc,fu of kernel classes, that is a partition of j ∪ e ′ pc,fu into subsets q qc of iterations in which the same kernel is issued.= 5 nop shri fd0 rd0 1 andi od1 rd0 1 shri fd0 rd0 1 andi od1 rd0 1 shri od0 rd0 1 andi od1 rd0 1 nop fig.the constructed control flow graph represents the functional unit program to be generated: in any iteration ∈ j pc,fu , there is exactly one node where ∈ q , representing the kernel to be issued. clearly, the execution time of program instantiation is roughly linear in the number of processor classes and not in the number of pes, as is, for example, evident for the matrix multiplication example: program instantiation for both 4 × 4 = 16 and 32 × 32 = 1024 pes takes about equally as long because both have two processor classes, meaning two programs need to be instantiated. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/239.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/239.txt new file mode 100644 index 0000000000000000000000000000000000000000..9953eed6710956c2e84639f628b19288580ea0a8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/239.txt @@ -0,0 +1 @@ +aspect-based sentiment analysis (absa) aims at identifying the sentiment polarity towards the specific aspect in a sentence.an target aspect refers to a word or a phrase describing an aspect of an entity.for example, in the sentence "the decor is not special at all but their amazing food makes up for it", there are two aspect terms "decor" and "food", and they are associated with negative and positive sentiment respectively.recently, neural network methods have dominated the study of absa since these methods can be trained end-to-end and automatically learn important features.(wang et al., 2016) proposed to learn an embedding vector for each aspect, and these aspect embeddings were used to calculate the attention weights to capture important information with regard to the given aspects.(tang et al., 2016b) developed the deep memory network to compute the importance degree and text representation of each context word with multiple attention layers.(ma et al., 2017) introduced the interactive attention networks (ian) to interactively learn attentions in contexts and targets, and generated the representations for target and context words separately.(xue and li, 2018) proposed to extract sentiment features with convolutional neural networks and selectively output aspect related features for classification with gating mechanisms.subsequently, transformer (vaswani et al., 2017) and bert based methods (devlin et al., 2018) have shown high potentials on absa task.there are also several studies attempting to simulate the process of human reading cognition to further improve the performance of absa (lei et al., 2019;.so far, several absa datasets have been constructed, including semeval-2014 restaurant review dataset, laptop review dataset (pontiki et al., 2014) and twitter dataset (dong et al., 2014).although these three datasets have since become the benchmark datasets for the absa task, most sentences in these datasets consist of only one aspect or multiple aspects with the same sentiment polarity (see table 1) 2 , which makes aspect-based sentiment analysis degenerate to sentence-level sentiment analysis.based on our empirical observation, the sentence-level sentiment classifiers without considering aspects can still achieve competitive results with many recent absa methods (see textcnn and lstm in table 3).on the other hand, even advanced absa methods trained on these datasets can hardly distinguish the sentiment polarities towards different aspects in the sentences that contain multiple aspects and multiple sentiments.with the goal of advancing and facilitating research in the field of aspect-based sentiment analysis, in this paper, we present a new multi-aspect multi-sentiment (mams) dataset.in mams dataset, each sentence consists of at least two aspects with different sentiment polarities, making the proposed dataset more challenging compared with existing absa datasets.considering merely the sentence-level sentiment of the sentence will fail to achieve good performance on mams dataset.we empirically evaluate the stateof-the-art absa methods on mams dataset, the poor results demonstrate that the proposed mams dataset is more challenging than the semeval-2014 restaurant review dataset.we analyze the properties of recent absa methods, and propose new capsule networks (denoted as capsnet and capsnet-bert) to model the complicated relationship between aspects and contexts, which combine the strengths of recent nlp advances.experimental results show that the proposed methods achieve significantly better results than the state-of-the-art baseline methods on mams and semeval-14 restaurant datasets.our main contributions are summarized as follows: (1) we manually annotate a large-scale multi-aspect multi-sentiment dataset, preventing absa degenerating to sentence-level sentiment analysis.the release of it would push forward the research of absa.(2) we propose a novel capsule network based model to learn the complicated relationship between aspects and contexts.(3) experimental results show that the proposed method achieves significantly better results than the stateof-the-art baseline methods.aspect-based sentiment analysis (absa) aims at identifying the sentiment polarity towards the specific aspect in a sentence., 2014)and twitter dataset(dong et al., 2014).although these three datasets have since become the benchmark datasets for the absa task, most sentences in these datasets consist of only one aspect or multiple aspects with the same sentiment polarity (see table1) 2 , which makes aspect-based sentiment analysis degenerate to sentence-level sentiment analysis.on the other hand, even advanced absa methods trained on these datasets can hardly distinguish the sentiment polarities towards different aspects in the sentences that contain multiple aspects and multiple sentiments.in mams dataset, each sentence consists of at least two aspects with different sentiment polarities, making the proposed dataset more challenging compared with existing absa datasets.experimental results show that the proposed methods achieve significantly better results than the state-of-the-art baseline methods on mams and semeval-14 restaurant datasets.our main contributions are summarized as follows: (1) we manually annotate a large-scale multi-aspect multi-sentiment dataset, preventing absa degenerating to sentence-level sentiment analysis.data annotation we create two versions of mams dataset for two subtasks of aspect-based sentiment analysis: aspect-term sentiment analysis (atsa) and aspect-category sentiment analysis (acsa).for atsa, we invited three experienced researchers who work on natural language processing (nlp) to extract aspect terms in the sentences and label the sentiment polarities with respect to the aspect terms.three experienced nlp researchers were asked to identify the aspect categories described in given sentences and determine the sentiment polarities towards these aspect categories., w a m } or an aspect category a c , aspect-level sentiment classification aims to predict the sentiment polarity y ∈ {1, ..in this paper, we present mams, a challenge dataset for aspect-based sentiment analysis, in which each sentence contains multiple aspects with different sentiment polarities.the proposed mams dataset could prevent aspect-level sentiment classification degenerating to sentence-level sentiment classification, which might push forward the researches on aspect-based sentiment analysis. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/24.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/24.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e30f7ac8c485742305a78c99d88022bd3fd43eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/24.txt @@ -0,0 +1 @@ +uavs have become increasingly popular in recent years due to their versatility and potential for a wide range of applications, from surveillance and monitoring to delivery and transportation. however, safe and efficient operation of uavs in complex environments remains a significant challenge, particularly when multiple uavs are involved. a key issue is the need to optimize the trajectories of the uavs to achieve various objectives, such as minimizing travel time, avoiding collisions, and maximizing coverage. traditional methods for trajectory planning and control are often limited in their ability to handle the complexity and uncertainty of real-world scenarios and may not be scalable to large fleets of uavs.prior research, exemplified by lai (2020), xue (2017), and qiu and duan (2020), has demonstrated the efficacy of leveraging non-linear optimization techniques. recently, xu et al. (2024) have used multi-objective optimization for trajectory generation. when quick trajectory changes are required, the optimization routine is too slow and not adaptive, and hence, ai techniques are preferred. ai techniques, particularly those based on machine learning and neural networks, have shown great promise in addressing these challenges by enabling uavs to learn from data and adapt to changing conditions lai (2020).these studies involved training ffnn with a single hidden layer, utilizing activation functions such as the hyperbolic tangent function (tanh), sigmoid, etc. the activation functions used do not predict the path with much accuracy, and hence, we first improve this aspect. we systematically apply a diverse set of activation functions to an ffnn with a single hidden layer, conducting a comprehensive comparative analysis. besides sigmoid and tanh, we use rectified linear unit (relu), leaky relu, swish, elliot, and maxout.in our pursuit of enhanced trajectory accuracy, second, we introduce a novel activation function, adaptoswelligauss, which surpasses commonly used counterparts in the same neural network architecture. this function combines the swish activation function, which captures smooth transitions and maintains trajectory continuity, with the elliot activation function, which captures abrupt shifts in direction and velocity, as well as a scaled and shifted gaussian, which makes the activation function robust against noisy data.in autonomous uavs, the crucial components of collision detection and avoidance play a paramount role in ensuring the safety and efficiency of their operations, which is our third focus. the significance of these features becomes even more pronounced when considering multiple uavs taking off simultaneously. collision detection between uavs is straightforward, however, there are many ways to avoid collisions by changing their trajectories. guo et al. (2021) introduces one such popular approach, the circular arc trajectory geometric method (ctga). assuming two uavs are colliding at a point in the path, this technique adds a small perturbation to the path of one of these uavs. a drawback of this algorithm is its susceptibility to getting stuck in a manipulation loop. any alteration in the trajectory of one uav may inadvertently create collision candidates with other uavs, leading to a challenging situation. additionally, frequent manipulations in a uav's trajectory can result in a convoluted flight path, compromising the overall efficiency of the uav swarm.another complementary technique to avoid uav collisions is to change their starting times. sastre et al. (2022a) and sastre et al. (2022b) propose such a popular approach. they employ a batching mechanism, creating groups of uavs with non-colliding trajectories to facilitate safe flight. however, the creation of multiple batches introduces a time-intensive process, delaying the overall launch of the uav swarm.in this paper, we introduce an advanced collision detection and avoidance algorithm, referred to as the icdab algorithm. here, we first improve the ctga algorithm from guo et al. (2021), and then we integrate this avoidance algorithm with the batching mechanism leading to our algorithm.the remainder of the paper is organized as follows: section 2 reviews the literature, section 3 describes our proposed algorithms and methodology, section 4 presents the results, and section 5 concludes the paper and suggests directions for future work. this function combines the swish activation function, which captures smooth transitions and maintains trajectory continuity, with the elliot activation function, which captures abrupt shifts in direction and velocity, as well as a scaled and shifted gaussian, which makes the activation function robust against noisy data. a drawback of this algorithm is its susceptibility to getting stuck in a manipulation loop. the most common technique used is detailed inguo et al. swish activation: as uavs navigate through various trajectory phases, the swish component seamlessly adjusts its activation levels to accommodate both gradual and subtle changes. on a broader level, a collision is defined when any uav collision sphere intersects with the collision sphere of another uav. let we be given a list of uavs, their respective trajectories in terms of x, y, and z coordinates, their timestamps, and predefined point on the trajectory of each uav where collision is to be checked (also called waypoints). if a collision is detected, then instead of the standard procedure of building a list of colliding pairs of uavs, we apply the collision avoidance strategy discussed below. this standard algorithm takes the following inputs: precise collision point of the two uavs, and an array for padding adjustments to uav1. next, uav1 is checked for collision with all the remaining uavs, and if there is one, then again collision avoidance strategy as above is applied. this 1 st uav is checked for a collision with all the subsequent uavs in the list (2 nd uav onwards).wlog, let the i th uav does not collide with the 1 st uav, then 1 st and i th uavs are added to this next batch, and both are further checked for collision from (i + 1) th uav in the batching list. as shown in table9, increasing the safe radius initially results in a decrease in the number of colliding uavs, decrease in number of batches, and increase in the maximum number of uavs per batch, achieving optimal results at a safe radius of 2. at this radius, the number of colliding uavs is reduced to 33, the number of batches is minimized to 4, and the maximum number of uavs per batch increases to 396. however, beyond this optimal point, further increases in the safe radius lead to a reverse trend, where both the number of colliding uavs and the number of batches begin to increase again, while the maximum number of uavs per batch decreases. as in the given figure, when the trajectory of uav1 is significantly altered to avoid uav2, it may inadvertently collide with uav3, which was previously not on a collision course with uav1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/240.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/240.txt new file mode 100644 index 0000000000000000000000000000000000000000..c840396de883f5fcf28150add13b421e0562c231 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/240.txt @@ -0,0 +1 @@ +coconut sugar, or coco sugar (see fig. 1), is a natural sweetener that has become a popular alternative for people struggling with diabetes mellitus. with a glycemic index (gi) of 35, this sugar is categorized as a low gi food that claims to help prevent obesity and support weight maintenance . the on-growing demand for coconut sugar has led to the development of the philippine national standard for the coconut sap sugar. in 2010, the standard released established a system of grading and classifying the said commodity. the physical characteristics of the coconut sugar involve color, odor, taste and purity. chemical properties include, water activity, glucose, fructose, sucrose and ash . as listed in table 1, coconut sugar is classified in terms of quality. the on-growing demand for coconut sugar has led to the development of the philippine national standard for the coconut sap sugar. in light with this, the proponents strive to provide a novel way, which can assess the quality of the coconut sugar with high accuracy using machine learning approach.rf is a robust machine learning technique used both for classification and regression that uses ensemble learning algorithm.a total of 350 images of coconut sugar were acquired from two coconut sugar production agencies including philippine coconut authority (pca) and united coconut association of the philippines (ucap). these rgb values were used as input and the classification of the images as "superior", "good", and "reject" was used as the output. cross-validation was used to evaluate the performance of the model using different classification methods. in classifying the coconut sugar quality, numbers 1, 2, and 3 were used and they refer to superior, good, and reject qualities, respectively. the accuracy achieved for all 10 folds were averaged to evaluate the model that used a certain classification method. the accuracy and running time were averaged to compute the overall evaluation of each of the machine learning models.05% accuracies, respectively. fig.6running times (sec) achieved using different classification methods figure7describes the plot of the accuracy versus the running time of models tested; in which models that having higher accuracies tend to have higher running times. the values were fed into the algorithms using python and the scikitlearn library.according to the accuracy versus running time graph, sgd is the preferred method in this study due to its high accuracy and ample running time. similarly, this approach can be integrated with computer vision to allow the real time assessment of coconut sugar. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/241.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/241.txt new file mode 100644 index 0000000000000000000000000000000000000000..a9be3882d64ff71a289988a7cbc375eb16d97a5c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/241.txt @@ -0,0 +1 @@ +artificial neural networks (anns) have been demonstrated to be effective for many cases of supervised learning , but programming an ann manually can be a challenging task. frameworks such as tensorflow and pytorch have been created to simplify the creation and use of anns.one of the major uses of artificial neural networks is natural language processing one aspect of which is sentiment analysis. to compare the two machine learning frameworks, the first step was to develop, train, and evaluate the same neural network model in both frameworks. in theory we should be able to obtain the same accuracy in both frameworks. since the same underlying model was being implemented. the second step was to compare the model implementations in the two frameworks based on execution time, memory usage, and ease of development.the data set that was chosen to test the frameworks was a publicly-available set of amazon reviews for video games. due to the nuances and bias involved in what each individual feels a certain rating should be, the data set was then broken down into only positive and negative reviews. the positive reviews consisted of the reviews with ratings of 4 or 5, whereas the negative reviews consisted of the reviews with ratings 1, 2, or 3.neural networks use mathematical calculations, so the textual reviews needed to be converted into numerical information. each occurrence of each word in the textual reviews was then replaced by the numerical index of that word in the common word list if that word occurred in the common word list. any word that were not common enough to be found in the common word list was given the value of 0. each sentence is reduced to a total size of 250 indices. any sentence over 250 words used just the first 250 words found in the vocabulary. if a sentence is shorter than 250 words, then the rest of list is padded with 0's. this meant that 13 words in the original sentence were converted to their numerical representation and the rest of the list was filled with 0's.this sentence highlights some of the issues that were found with the data set. these errors made it so that those words were not common enough to be included in the final sentence, removing some of the important information. in this case, words like 'dvd', 'collection', 'son' and 'wanted' are left out from the tokenized sentence because of errors present in the review. since the words that occur before a certain word in a sentence add importance to the current word being analyzed, rnns are often used in natural language processing. the embedding layer takes the list of 200 numbers representing the review sentence, and changes them into vector representations that are stored in a list of size 32. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/242.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/242.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebfc39ae989d319f21f482a399172392b8e59e2d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/242.txt @@ -0,0 +1 @@ +this paper brings together new algorithms and insights to construct a framework for robust and extremely rapid object detection. this framework is demonstrated on, and in part motivated by, the task of face detection. toward this end we have constructed a frontal face detection system which achieves detection and false positive rates which are equivalent to the best published results . this face detection system is most clearly distinguished from previous approaches in its ability to detect faces extremely rapidly. operating on 384 by 288 pixel images, faces are de-tected at 15 frames per second on a conventional 700 mhz intel pentium iii. in other face detection systems, auxiliary information, such as image differences in video sequences, or pixel color in color images, have been used to achieve high frame rates. our system achieves high frame rates working only with the information present in a single grey scale image. these alternative sources of information can also be integrated with our system to achieve even higher frame rates.there are three main contributions of our object detection framework. we will introduce each of these ideas briefly below and then describe them in detail in subsequent sections.the first contribution of this paper is a new image representation called an integral image that allows for very fast feature evaluation. motivated in part by the work of papageorgiou et al. our detection system does not work directly with image intensities . like these authors we use a set of features which are reminiscent of haar basis functions (though we will also use related filters which are more complex than haar filters). in order to compute these features very rapidly at many scales we introduce the integral image representation for images. the integral image can be computed from an image using a few operations per pixel. once computed, any one of these harr-like features can be computed at any scale or location in constant time.the second contribution of this paper is a method for constructing a classifier by selecting a small number of important features using adaboost . within any image subwindow the total number of harr-like features is very large, far larger than the number of pixels. in order to ensure fast classification, the learning process must exclude a large majority of the available features, and focus on a small set of critical features. motivated by the work of tieu and viola, feature selection is achieved through a simple modification of the adaboost procedure: the weak learner is constrained so that each weak classifier returned can depend on only a single feature . as a result each stage of the boosting process, which selects a new weak classifier, can be viewed as a feature selection process. adaboost provides an effective learning algorithm and strong bounds on generalization performance .the third major contribution of this paper is a method for combining successively more complex classifiers in a cascade structure which dramatically increases the speed of the detector by focusing attention on promising regions of the image. the notion behind focus of attention approaches is that it is often possible to rapidly determine where in an image an object might occur . more complex processing is reserved only for these promising regions. the key measure of such an approach is the "false negative" rate of the attentional process. it must be the case that all, or almost all, object instances are selected by the attentional filter.we will describe a process for training an extremely simple and efficient classifier which can be used as a "supervised" focus of attention operator. the term supervised refers to the fact that the attentional operator is trained to detect examples of a particular class. in the domain of face detection it is possible to achieve fewer than 1% false negatives and 40% false positives using a classifier constructed from two harr-like features. the effect of this filter is to reduce by over one half the number of locations where the final detector must be evaluated.those sub-windows which are not rejected by the initial classifier are processed by a sequence of classifiers, each slightly more complex than the last. if any classifier rejects the sub-window, no further processing is performed. the structure of the cascaded detection process is essentially that of a degenerate decision tree, and as such is related to the work of geman and colleagues .an extremely fast face detector will have broad practical applications. these include user interfaces, image databases, and teleconferencing. in applications where rapid frame-rates are not necessary, our system will allow for significant additional post-processing and analysis. in addition our system can be implemented on a wide range of small low power devices, including hand-helds and embedded processors. in our lab we have implemented this face detector on the compaq ipaq handheld and have achieved detection at two frames per second (this device has a low power 200 mips strong arm processor which lacks floating point hardware).the remainder of the paper describes our contributions and a number of experimental results, including a detailed description of our experimental methodology. discussion of closely related work takes place at the end of each section. toward this end we have constructed a frontal face detection system which achieves detection and false positive rates which are equivalent to the best published results. in other face detection systems, auxiliary information, such as image differences in video sequences, or pixel color in color images, have been used to achieve high frame rates. in order to ensure fast classification, the learning process must exclude a large majority of the available features, and focus on a small set of critical features. in the domain of face detection it is possible to achieve fewer than 1% false negatives and 40% false positives using a classifier constructed from two harr-like features. in our lab we have implemented this face detector on the compaq ipaq handheld and have achieved detection at two frames per second (this device has a low power 200 mips strong arm processor which lacks floating point hardware). initial experiments demonstrated that a frontal face classifier constructed from 200 features yields a detection rate of 95% with a false positive rate of 1 in 14084. a positive result from the first classifier triggers the evaluation of a second classifier which has also been adjusted to achieve very high detection rates. further processing can take any form such as additional stages of the cascade (as in our detection system) or an alternative detection system. in most cases classifiers with more features will achieve higher detection rates and lower false positive rates. in principle one could define an optimization framework in which: i) the number of classifier stages, ii) the number of features in each stage, and iii) the threshold of each stage, are traded off in order to minimize the expected number of evaluated features. each stage is trained by adding features until the target detection and false positives rates are met ( these rates are determined by testing the detector on a validation set).3a notion similar to the cascade appears in the face detection system described by rowley et al.'s two network face system is the fastest existing face detector. while this basic insight is very valuable, in their implementation it is necessary to first evaluate some feature detector at every location.each classifier in the cascade was trained with the 4916 training faces (plus their vertical mirror images for a total of 9832 training faces) and 10,000 non-face sub-windows (also of size 24 by 24 pixels) using the adaboost training procedure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/243.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/243.txt new file mode 100644 index 0000000000000000000000000000000000000000..e356cdfdf8ce7e817d31ecc998f8e73e68c65cb1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/243.txt @@ -0,0 +1 @@ +the holdout method is central to empirical progress in the machine learning community. competitions, benchmarks, and large-scale hyperparameter search all rely on splitting a data set into multiple pieces to separate model training from evaluation. however, when practitioners repeatedly reuse holdout data, the danger of overfitting to the holdout data arises .despite its importance, there is little empirical research into the manifested robustness and validity of the holdout method in practical scenarios. real-world use cases of the holdout method often fall outside the guarantees of existing theoretical bounds, making questions of validity a matter of guesswork.recent replication studies demonstrated that the popular cifar-10 and imagenet benchmarks continue to support progress despite years of intensive use. the longevity of these benchmarks perhaps suggests that overfitting to holdout data is less of a concern than reasoning from first principles might have suggested. however, this is evidence from only two, albeit important, computer vision benchmarks. it remains unclear whether the observed phenomenon is specific to the data domain, model class, or practices of vision researchers. unfortunately, these replication studies required assembling new test sets from scratch, resulting in a highly labor-intensive analysis that is difficult to scale.in this paper, we empirically study holdout reuse at a significantly larger scale by analyzing data from 120 machine learning competitions on the popular kaggle platform . kaggle competitions are a particularly well-suited environment for studying overfitting since data sources are diverse, contestants use a wide range of model families, and training techniques vary greatly. moreover, kaggle competitions use public and private test data splits which provide a natural experimental setup for measuring overfitting on various datasets.to provide a detailed analysis of each competition, we introduce a coherent methodology to characterize the extent of overfitting at three increasingly fine scales. our approach allows us both to discuss the overall "health" of a competition across all submissions and to inspect signs of overfitting separately among the top submissions. in addition, we develop a statistical test specific to the classification competitions on kaggle to compare the submission scores to those arising in an ideal null model that assumes no overfitting. observed data that are close to data predicted by the null model is strong evidence against overfitting.overall, we conclude that the classification competitions on kaggle show little to no signs of overfitting. while there are some outlier competitions in the data, these competitions usually have pathologies such as non-i.i.d. data splits or (effectively) small test sets. among the remaining competitions, the public and private test scores show a remarkably good correspondence. the picture becomes more nuanced among the highest scoring submissions, but the overall effect sizes of (potential) overfitting are typically small (e.g., less than 1% classification accuracy). thus, our findings show that substantial overfitting is unlikely to occur naturally in regular machine learning workflows. moreover, kaggle competitions use public and private test data splits which provide a natural experimental setup for measuring overfitting on various datasets.before we delve into the analysis of the kaggle data, we briefly define the type of overfitting we study and then describe how the kaggle competition format naturally lends itself to investigating overfitting in machine learning competitions.considering the danger of overfitting to the test set in a competitive environment, kaggle subdivides each test set into public and private components. in particular, we can view the public test split s public as the regular test set and use the held-out private test split s private to approximate the population loss. these four competitions are the accuracy competitions with the largest number of submissions and serve as representative examples for a typical accuracy competition in the metakaggle dataset (see table1for information about these competitions). in an ideal competition with a large test set and without any overfitting, the public and private accuracies of a submission would all be almost identical and lie near the y = x diagonal.these scatter plots can be seen as indicators of overall competition "health": in case of pervasive overfitting, we would expect a plateauing trend where later points mainly move on the x-axis (public accuracy) but stagnate on the y-axis (private accuracy). in contrast, the four plots in figure1show that as submissions progress on the public test set, they see corresponding improvements also on the private test set. so at least on the coarse scale of the first analysis level, there are little to no signs of adaptive overfitting: it is easier to make genuine progress on the data distribution in these competitions than to substantially overfit to the test set.while the scatter plots discussed above give a comprehensive picture of an entire competition, one concern is that overfitting may be more prevalent among the submissions with the highest public accuracy since they may be more adapted to the public test set. very small (effective) test sets make it easier to reconstruct the public / private split (and then to overfit), and also make the public and private scores more noisy.figure5: mean accuracy differences versus test set size (public and private combined) for 32 accuracy competitions with at least 1,000 submissions and available test set size (the test set sizes for two competitions with at least 1,000 submissions were not available from the metakaggle dataset). although a reliable recommendation for applied machine learning will require broader investigation, our results for accuracy competitions suggest that at least 10,000 examples is a reasonable minimum test set size to protect against adaptive overfitting. so while overfitting may have occurred to a small extent, it did not invalidate the overall conclusions from the competitions such as which submissions rank among the top or how well they perform on the private test split. due to the limited size of the public and private test sets, the public scores are only approximately equal to the private scores (even in the absence of any adaptive overfitting), which can lead to substantial rank changes even though all score deviations are small and of (roughly) equal size. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/244.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/244.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc49bbd811c11e6b6da852cc547ded4e7c2c3f82 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/244.txt @@ -0,0 +1 @@ +chronic kidney disease (ckd) is a global public health problem affecting approximately 10% of the world's population , . the percentage of prevalence of ckd in china is 10.8% , and the range of prevalence is 10%-15% in the united states . according to another study, this percentage has reached 14.7% in the mexican adult general population . this disease is characterised by a slow deterioration in renal function, which eventually causes a complete loss of renal function. ckd does not show obvious symptoms in its early stages. therefore, the disease may not be detected until the kidney loses about 25% of its function . in addition, ckd has high morbidity and mortality, with a global impact the associate editor coordinating the review of this manuscript and approving it for publication was hao ji.on the human body . it can induce the occurrence of cardiovascular disease , . ckd is a progressive and irreversible pathologic syndrome . hence, the prediction and diagnosis of ckd in its early stages is quite essential, it may be able to enable patients to receive timely treatment to ameliorate the progression of the disease.machine learning refers to a computer program, which calculates and deduces the information related to the task and obtains the characteristics of the corresponding pattern . this technology can achieve accurate and economical diagnoses of diseases; hence, it might be a promising method for diagnosing ckd. it has become a new kind of medical tool with the development of information technology and has a broad application prospect because of the rapid development of electronic health record . in the medical field, machine learning has already been used to detect human body status , analyze the relevant factors of the disease and diagnose various diseases. for example, the models built by machine learning algorithms were used to diagnose heart disease , , diabetes and retinopathy , , acute kidney injury , , cancer and other diseases , . in these models, algorithms based on regression, tree, probability, decision surface and neural network were often effective. in the field of ckd diagnosis, hodneland et al. utilized image registration to detect renal morphologic changes . vasquez-morales et al. established a classifier based on neural network using large-scale ckd data, and the accuracy of the model on their test data was 95% . in addition, most of the previous studies utilized the ckd data set that was obtained from the uci machine learning repository. chen et al. used k-nearest neighbor (knn), support vector machine (svm) and soft independent modelling of class analogy to diagnose ckd, knn and svm achieved the highest accuracy of 99.7% . in addition, they used fuzzy rule-building expert system, fuzzy optimal associative memory and partial least squares discriminant analysis to diagnose ckd, and the range of accuracy in those models was 95.5%-99.6% . their studies have achieved good results in the diagnosis of ckd. in the above models, the mean imputation is used to fill in the missing values and it depends on the diagnostic categories of the samples. as a result, their method could not be used when the diagnostic results of the samples are unknown. in reality, patients might miss some measurements for various reasons before diagnosing. in addition, for missing values in categorical variables, data obtained using mean imputation might have a large deviation from the actual values. for example, for variables with only two categories, we set the categories to 0 and 1, but the mean of the variables might be between 0 and 1. polat et al. developed an svm based on feature selection technology, the proposed models reduced the computational cost through feature selection, and the range of accuracy in those models was from 97.75%-98.5% . j. aljaaf et al. used novel multiple imputation to fill in the missing values, and then mlp neural network (mlp) achieved an accuracy of 98.1% . subas et al. used mlp, svm, knn, c4.5 decision tree and random forest (rf) to diagnose ckd, and the rf achieved an accuracy of 100% . in the models established by boukenze et al., mlp achieved the highest accuracy of 99.75% . the studies of , focus mainly on the establishment of models and achieve an ideal result. however, a complete process of filling in the missing values is not described in detail, and no feature selection technology is used to select predictors as well. almansour et al. used svm and neural network to diagnose ckd, and the accuracy of the models was 97.75% and 99.75%, respectively . in the models established by gunarathne et al., zero was used to fill out the missing values and decision forest achieved the best performance with the accuracy was 99.1% .to summarize the previous ckd diagnostic models, we find that most of them suffering from either the method used to impute missing values has a limited application range or relatively low accuracy. therefore, in this work, we propose a methodology to extend application range of the ckd diagnostic models. at the same time, the accuracy of the model is further improved. the contributions of the proposed work are as follows.1) we used knn imputation to fill in the missing values in the data set, which could be applied to the data set with the diagnostic categories are unknown.2) logistic regression (log), rf, svm, knn, naive bayes classifier (nb) and feed forward neural network (fnn) were used to establish ckd diagnostic models on the complete ckd data sets. the models with better performance were extracted for misjudgment analysis.3) an integrated model that combines log and rf by using perceptron was established and it improved the performance of the component models in ckd diagnosis after the missing values were filled by knn imputation.knn imputation is used to fill in the missing values. to our knowledge, this is the first time that knn imputation has been used for the diagnosis of ckd. in addition, building an integrated model is also a good way to improve the performance of separate individual models. the proposed methodology might effectively deal with the scene where patients are missing certain measurements before being diagnosed. in addition, the resulting integrated model shows a higher accuracy. therefore, it is speculated that this methodology might be applicable to the clinical data in the actual medical diagnosis.the rest of the paper is organized as follows. in section ii, we describe the preliminaries. the establishments of the individual model and the integrated model are described in section iii. in section iv, we evaluate and discuss the performance of the integrated model. in section v, we summarize the work and its contributions, including future works.1) we used knn imputation to fill in the missing values in the data set, which could be applied to the data set with the diagnostic categories are unknown.2) logistic regression (log), rf, svm, knn, naive bayes classifier (nb) and feed forward neural network (fnn) were used to establish ckd diagnostic models on the complete ckd data sets.3) an integrated model that combines log and rf by using perceptron was established and it improved the performance of the component models in ckd diagnosis after the missing values were filled by knn imputation. for the numerical variables, the missing values are filled using the median of the corresponding variable in k complete samples, and for the category variables, the missing values are filled using the category that has the highest frequency in the corresponding variable in k complete samples. in each calculation of the model training, the algorithm selects the best combination of parameters to establish the model by grid search. for the nb and the knn, the performance of the models when using knn imputation is not very ideal compared to using random imputation or mean and mode imputation in tables5 and 6. the above result also proves the validity of the knn imputation, since knn imputation does improve the accuracy of some models, such as log, rf and svm (table5).the probability distribution of the samples in the complete ckd data set (at k = 11), the horizontal axis and the vertical axis represent the probabilities that the samples were judged as notckd by the log and the rf, respectively.in order to verify whether the integrated model can improve the performance of the component models, we first used the same random number seed 1234 to establish and evaluate the integrated model on each complete data, and the confusion matrices returned are shown in table9. the average results of the integrated models and two component models are shown in table10, and the integrated model has the best performance in detecting the two categories because table9. we also compared the methodology in this study (log, rf and integrated model) with the other models on the same data in previous studies (called contrast models), and the comparison result is shown in table11. by the use of knn imputation, log, rf, svm and fnn could achieve better performance than the cases when the random imputation and mean and mode imputation were used. in addition, the ckd data set is composed of mixed variables (numeric and category), so the similarity evaluation methods based on mixed data could be used to calculate the similarity between samples, such as general similarity coefficient. after unsupervised imputation of missing values in the data set by using knn imputation, the integrated model could achieve a satisfactory accuracy. in addition, due to there are only two categories (ckd and notckd) of data samples in the data set, the model can not diagnose the severity of ckd. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/245.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/245.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a03d51e415feeb344513f44162c4cd0bcb72ffe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/245.txt @@ -0,0 +1 @@ +over the past years, improving the vehicle traffic flow has been the focal point of many researchers . a large portion of the research and innovations aim to minimize, manage, and mitigate disruptions to traffic arising from traffic congestions . to deal with the inevitable effect of traffic flow on road safety, the economy, the environment and social life, it is prudent to design traffic systems such that they can cater for the ever-growing number of vehicles and all the potential influencers of traffic flow . recent studies that address the issue of optimizing traffic lights timing and reducing delays can be classified into the following three categories : 1. time-series approach: this approach attempts to predict traffic light phases by determining patterns of the temporal variation of traffic flow . 2. stochastic approach: the stochastic approach utilizes probabilistic models to forecast traffic flow. 3. nonparametric approaches: the algorithms of nonparametric approaches have no (or very little) prior knowledge about the form of the true function that is being modelled .the work of this paper falls under nonparametric approaches with deep neural networks being utilized to address the factors that influence traffic delay at signalized intersections. furthermore, those influencing factors are injected into the deep neural network as input features.the contributions of this paper can be summarised as follows:1. to the best of authors knowledge, this paper is the first attempt to utilize dl in optimizing traffic signals through accurate prediction of vehicle flow at signalized intersections. 2. the experiments conducted in researches for this paper take into account all the past and future states of input features that are known to be impacting traffic flow of arterial networks. some of the input features are: time, weather, events, car population, congestion etc. 3. contrary to the conventional techniques of traffic prediction that are used in setting traffic signal phases, the method proposed in this paper has two interdependent regression tasks; one for vehicle flow, and the other one for phases of intersection movements.the rest of this paper is organized as follows: sect. 2 reviews the studies on traffic light phase prediction. section 3 sets the scene on the general architecture of the network while sect. 4 discusses the experimental results. concluding remarks are described in sect. 5. to deal with the inevitable effect of traffic flow on road safety, the economy, the environment and social life, it is prudent to design traffic systems such that they can cater for the ever-growing number of vehicles and all the potential influencers of traffic flow. contrary to the conventional techniques of traffic prediction that are used in setting traffic signal phases, the method proposed in this paper has two interdependent regression tasks; one for vehicle flow, and the other one for phases of intersection movements. ge et al. to make the regression model more effective, this paper suggests using multitask learning (mtl)such that tasks (traffic lights phase times and traffic congestion) are trained separately.because a poorly designed traffic light can lead to increased vehicle delay and road incidents, causing traffic congestion, and may encourage drivers to take alternative routes that may not be suitable for traffic (such as routes through residential neighbourhoods).where d is the average signal delay per vehicle in seconds, d1 is the average delay per vehicle due to uniform arrivals in seconds, pf is the progression adjustment factor used to account for the effect of signal progression on traffic flow, d2 is the average delay per vehicle due to random arrivals in seconds, and d3 is the average delay per vehicle due to initial queue at start of analysis time period, in seconds.where v is the traffic volume in veh/h and c is capacity (the maximum hourly volume that can pass through an intersection from a lane or group of lanes under prevailing roadway, traffic and control conditions) in veh/h.to calculate the ideal green time, real-time traffic volume is practised using the deep learning network, and the value is plugged into the aforementioned delay formulae.since the determining factor for the optimization of traffic signals is congestion reduction in essence traffic volume and the analysis is specific for an isolated intersection, a few assumptions are made:. due to lack of data, this experiment was conducted using time series, weather condition and traffic volume of the same intersection as inputs of the ann. to make it computationally easier and provide meaningful time stamps, the date and hours are merged into a separate cell called date time which corresponds to the traffic volume of all the movements in a period of five minutes (300 s).the relationship between metrological conditions is a well know phenomenon noticed by road users, and backed by many researchers includingin which it proves the reduction of traffic flow due to weather conditions (rain, fog, mist, haze, or snow); however, this experiment does not attempt to establish this association. out of that dataset, the hourly ratio (in millimetres) of rain was however, since our traffic volume data was structured at a 5 min frequency, the hourly rain data needed to be divided by five so it could be fed into the neural network along with traffic volume in parallel. table2in this experiment, traffic volume data, time and weather condition (raining) were the only inputs for the analysis, even though other factors such as incidents and events could have been used as well.to determine the lowest accessible delay, traffic engineers can plug in predicted traffic volume and series of green times into eq. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/246.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/246.txt new file mode 100644 index 0000000000000000000000000000000000000000..6311903df1f8db5bcd221f1bf78074af09f2a848 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/246.txt @@ -0,0 +1 @@ +sentiment analysis is process of identifying and analyzing data based upon the person feelings, reviews and thoughts.in this paper, we have used different machine learning classifiers to predict peoples sentiment regarding delhi corporation election and their feelings about different political parties namely bhartiya janta party(bjp), indian national congress(inc), aam aadmi party(aap).we have used seven machine learning methods namely naïve-bayes classifier , multinomialnbvclassifier , bernoullinb classifier , logistic regression classifier , sgdc (stochastic gradient decent classifier) , svc (support vector classifier) : linearsvc , and nusvc , to classify citizens sentiment for the three major political parties in delhi corporation election.we have used python and nltk to implement this research.python3.5 version was used.in python, there is no compilation step therefore testing and debugging happens quickly.python provides a large number of open source libraries.natural language toolkit (nltk) is library in python, which provides a base for building programs and classification of data.in short, this research is done to identify the best machine learning algorithm to find the polarity of tweets of these political parties and predict the sentiment of people in upcoming elections.this paper is organized in different sections as follows.section 2 highlights the already published literature in the area of sentiment analysis.section 3, explains the detail description of data, various machine learning classifiers and measures for performance evaluation on the said methods.the results of all machine learning methods along with the accuracy, is presented in section 4.section 5 concludes with summary of results eventually leading to the future directions.section 6 give details of future work.sentiment analysis is process of identifying and analyzing data based upon the person feelings, reviews and thoughts.in this paper, we have used different machine learning classifiers to predict peoples sentiment regarding delhi corporation election and their feelings about different political parties namely bhartiya janta party(bjp), indian national congress(inc), aam aadmi party(aap).we have used seven machine learning methods namely naïve-bayes classifier, multinomialnbvclassifier, bernoullinb classifier, logistic regression classifier, sgdc (stochastic gradient decent classifier), svc (support vector classifier): linearsvc,and nusvc,to classify citizens sentiment for the three major political parties in delhi corporation election.in short, this research is done to identify the best machine learning algorithm to find the polarity of tweets of these political parties and predict the sentiment of people in upcoming elections.section 3, explains the detail description of data, various machine learning classifiers and measures for performance evaluation on the said methods.the past and current research papers on sentiment analysis of twitter data have been studied., eg theresa wilson 2011) to classifying the sentiment of twitter messages on sentence-level (apoorvagarwal (2011)by experimenting with three types of models: unigram model, a feature based model and a tree kernel based model and document level classification (pang and lee 2008) have been studied.we have used machine learning classifiers to classify tweets which come under supervised machine learning approach for which a detailed paper of sentiment analysis algorithms and applications: a survey (walaa medhat , ahmed hassan and hoda karashy 2014)has been taken as base.we have built machine learning classifiers by writing a script in python, trained them by training data (movie reviews), tested them and thus created a sentiment module for classifying tweets which is slightly a different approach.however, humans use an informal way to write tweets that too in different languages which makes the task to extract features and analyzing the sentiment accurately a difficult task.sentiment analysis is used to identifying people s opinion, attitude and emotional states.to do the sentiment analysis of tweets, the proposed system first extracts the twitter posts from twitter by user.we have used machine learning supervised approach to obtain the results.we saw different party have different sentiment results according to their progress and working procedure.we also saw how any social event, speech or rally cause a fluctuation in sentiment of people. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/247.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/247.txt new file mode 100644 index 0000000000000000000000000000000000000000..8424ba06fa3c95b094ae02847cfe5ba9705583f6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/247.txt @@ -0,0 +1 @@ +electrocardiography (ecg) and photoplethysmograph (ppg) provide common ways for measuring heart activities.these two types signals are important for healthcare applications since they provide the measurement of both basic average heart rate (hr) and more detailed information like heart rate variability (hrv).however, these signals are mostly measured from skin-contact ecg/bvp sensors, which may cause discomfort and are inconvenient for long-term monitoring.to solve this problem, remote photoplethysmography (rppg) , which targets to measure heart figure 1.rppg measurement from highly compressed videos.due to video compression artifact and rppg information loss, the rppg in(a) has very noisy shape and inaccurate peak counts which lead to erroneous heart rate measures, while after video enhancement by stven, the rppg in(b) shows more regular pulse shape with accurate peak locations comparing to the ground truth ecg.activity remotely and without any contact, has been developing rapidly in recent years .however, most previous rppg measurement works did not take the influence of video compression into consideration, whereas the fact is that most videos captured by commercial cameras are compressed through different compression codecs with various bitrates.recently, two works pointed out and demonstrated that the performance of rppg measurement dropped to various extents when using compressed videos with different bitrates.as shown in fig.1(a), rppg signals measured from highly compressed videos usually suffer from noisy curve shape and inaccurate peak locations due to information loss caused by both intra-frame and inter-frame coding of the video compression process.video compression is inevitable for remote services considering the convenient storage and transmis-sion in internet.thus it is of great practical value to develop rppg methods that can work robustly on highly compressed videos.however, no solution has been proposed yet to counter this problem.to address this problem, we propose a two-stage, endto-end method using hidden rppg information enhancement and attention networks, which can counter video compression loss and recover rppg signals from highly compressed facial videos.figure 1(b) illustrates the advantages of our method on rppg measurement from highly compressed videos.our contributions include:• to our best knowledge, we provide the first solution for robust rppg measurement directly from compressed videos, which is an end-to-end framework made up of a video enhancement module stven (spatio-temporal video enhancement network) and a powerful signal recovery module rppgnet.• the rppgnet, featured with a skin-based attention module and partition constraints, can measure accurately at both hr and hrv levels.compared with previous works which only output simple hr numbers , the proposed rppgnet produces much richer rppg signals with curve shapes and peak locations.moreover, it outperforms state-of-art methods on various video formats of a benchmark dataset even without using the stven module.• the stven, which is a video-to-video translation generator aided with fine-grained learning, is the first video compression enhancement network to boost rppg measurement on highly compressed videos.• we conduct cross-dataset test and show that the stven can generalize well to enhance unseen, highly compressed videos for rppg measurement, which implies promising potential in real-world applications.due to video compression artifact and rppg information loss, the rppg in(a) has very noisy shape and inaccurate peak counts which lead to erroneous heart rate measures, while after video enhancement by stven, the rppg in(b) shows more regular pulse shape with accurate peak locations comparing to the ground truth ecg.however, most previous rppg measurement works did not take the influence of video compression into consideration, whereas the fact is that most videos captured by commercial cameras are compressed through different compression codecs with various bitrates.1(a), rppg signals measured from highly compressed videos usually suffer from noisy curve shape and inaccurate peak locations due to information loss caused by both intra-frame and inter-frame coding of the video compression process.to address this problem, we propose a two-stage, endto-end method using hidden rppg information enhancement and attention networks, which can counter video compression loss and recover rppg signals from highly compressed facial videos.• to our best knowledge, we provide the first solution for robust rppg measurement directly from compressed videos, which is an end-to-end framework made up of a video enhancement module stven (spatio-temporal video enhancement network) and a powerful signal recovery module rppgnet.• the stven, which is a video-to-video translation generator aided with fine-grained learning, is the first video compression enhancement network to boost rppg measurement on highly compressed videos.fueled by the high performance of deep learning, several works introduce it to enhance the quality of compressed videos and get promising results, including arcnn, deep residual denoising neural networks (dncnn), generative adversarial networksand multi-frame quality enhancement network.the other onetried to address the rppg issue on compressed videos, but the ap- proach was only on bio-signal processing level after the rppg was extracted, which has nothing to do with video enhancement.to the best of our knowledge, no video enhancement method has ever been proposed for the problem of rppg recovery from highly compressed videos.in order to overcome the above-mentioned drawbacks and fill in the blank, we propose a two-stage, end-to-end deep learning based method for rppg measurement from highly compressed videos.inspired by, we design an advanced joint training strategy to ensure that stven can enhance the video specifically in favor of rppg recovery, which boosts the performance of rppgnet even on highly compressed video.in this training strategy, we take away the cycle-loss part since we expect stven to recover richer rppg signals instead of irrelevant information loss during video compression.in the joint training, we use the rppg signals recovered from high quality videos as a softer target for the updating of stven, and it converges faster and more steadily than using the ecg signals, which might be too far-fetched and challenging as the target for highly compressed videos, as our prior tests proved.9(bottom) shows less objective quality (psnr) fluctuation of the highly compressed videos with stven enhancement, which seems to help recover smoother and robust rppg signals.in this paper, we proposed an end-to-end deep learning based method for rppg signals recovery from highly compressed videos. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/248.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/248.txt new file mode 100644 index 0000000000000000000000000000000000000000..428b97aa43e1eb26ae88cebec1b80a292e82273d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/248.txt @@ -0,0 +1 @@ +recurrent neural networks, long short-term memory and gated recurrent neural networks in particular, have been firmly established as state of the art approaches in sequence modeling and transduction problems such as language modeling and machine translation . numerous efforts have since continued to push the boundaries of recurrent language models and encoder-decoder architectures .recurrent models typically factor computation along the symbol positions of the input and output sequences. aligning the positions to steps in computation time, they generate a sequence of hidden states h t , as a function of the previous hidden state h t−1 and the input for position t. this inherently sequential nature precludes parallelization within training examples, which becomes critical at longer sequence lengths, as memory constraints limit batching across examples. recent work has achieved significant improvements in computational efficiency through factorization tricks and conditional computation , while also improving model performance in case of the latter. the fundamental constraint of sequential computation, however, remains.attention mechanisms have become an integral part of compelling sequence modeling and transduction models in various tasks, allowing modeling of dependencies without regard to their distance in the input or output sequences . in all but a few cases , however, such attention mechanisms are used in conjunction with a recurrent network.in this work we propose the transformer, a model architecture eschewing recurrence and instead relying entirely on an attention mechanism to draw global dependencies between input and output. the transformer allows for significantly more parallelization and can reach a new state of the art in translation quality after being trained for as little as twelve hours on eight p100 gpus.attention mechanisms have become an integral part of compelling sequence modeling and transduction models in various tasks, allowing modeling of dependencies without regard to their distance in the input or output sequences.in this work we propose the transformer, a model architecture eschewing recurrence and instead relying entirely on an attention mechanism to draw global dependencies between input and output.self-attention, sometimes called intra-attention is an attention mechanism relating different positions of a single sequence in order to compute a representation of the sequence.to the best of our knowledge, however, the transformer is the first transduction model relying entirely on self-attention to compute representations of its input and output without using sequencealigned rnns or convolution. in addition to the two sub-layers in each encoder layer, the decoder inserts a third sub-layer, which performs multi-head attention over the output of the encoder stack.while for small values of d k the two mechanisms perform similarly, additive attention outperforms dot product attention without scaling for larger values of d k.instead of performing a single attention function with d model -dimensional keys, values and queries, we found it beneficial to linearly project the queries, keys and values h times with different, learned linear projections to d k , d k and d v dimensions, respectively.• in "encoder-decoder attention" layers, the queries come from the previous decoder layer, and the memory keys and values come from the output of the encoder.similarly to other sequence transduction models, we use learned embeddings to convert the input tokens and output tokens to vectors of dimension d model .in this section we compare various aspects of self-attention layers to the recurrent and convolutional layers commonly used for mapping one variable-length sequence of symbol representations (x 1 , . hence we also compare the maximum path length between any two input and output positions in networks composed of the different layer types.as noted in table1, a self-attention layer connects all positions with a constant number of sequentially executed operations, whereas a recurrent layer requires o(n) sequential operations. in terms of computational complexity, self-attention layers are faster than recurrent layers when the sequence length n is smaller than the representation dimensionality d, which is most often the case with sentence representations used by state-of-the-art models in machine translations, such as word-pieceand byte-pairrepresentations. to improve computational performance for tasks involving very long sequences, self-attention could be restricted to considering only a neighborhood of size r in the input sequence centered around the respective output position.in this work, we presented the transformer, the first sequence transduction model based entirely on attention, replacing the recurrent layers most commonly used in encoder-decoder architectures with multi-headed self-attention. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/249.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/249.txt new file mode 100644 index 0000000000000000000000000000000000000000..e356cdfdf8ce7e817d31ecc998f8e73e68c65cb1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/249.txt @@ -0,0 +1 @@ +the holdout method is central to empirical progress in the machine learning community. competitions, benchmarks, and large-scale hyperparameter search all rely on splitting a data set into multiple pieces to separate model training from evaluation. however, when practitioners repeatedly reuse holdout data, the danger of overfitting to the holdout data arises .despite its importance, there is little empirical research into the manifested robustness and validity of the holdout method in practical scenarios. real-world use cases of the holdout method often fall outside the guarantees of existing theoretical bounds, making questions of validity a matter of guesswork.recent replication studies demonstrated that the popular cifar-10 and imagenet benchmarks continue to support progress despite years of intensive use. the longevity of these benchmarks perhaps suggests that overfitting to holdout data is less of a concern than reasoning from first principles might have suggested. however, this is evidence from only two, albeit important, computer vision benchmarks. it remains unclear whether the observed phenomenon is specific to the data domain, model class, or practices of vision researchers. unfortunately, these replication studies required assembling new test sets from scratch, resulting in a highly labor-intensive analysis that is difficult to scale.in this paper, we empirically study holdout reuse at a significantly larger scale by analyzing data from 120 machine learning competitions on the popular kaggle platform . kaggle competitions are a particularly well-suited environment for studying overfitting since data sources are diverse, contestants use a wide range of model families, and training techniques vary greatly. moreover, kaggle competitions use public and private test data splits which provide a natural experimental setup for measuring overfitting on various datasets.to provide a detailed analysis of each competition, we introduce a coherent methodology to characterize the extent of overfitting at three increasingly fine scales. our approach allows us both to discuss the overall "health" of a competition across all submissions and to inspect signs of overfitting separately among the top submissions. in addition, we develop a statistical test specific to the classification competitions on kaggle to compare the submission scores to those arising in an ideal null model that assumes no overfitting. observed data that are close to data predicted by the null model is strong evidence against overfitting.overall, we conclude that the classification competitions on kaggle show little to no signs of overfitting. while there are some outlier competitions in the data, these competitions usually have pathologies such as non-i.i.d. data splits or (effectively) small test sets. among the remaining competitions, the public and private test scores show a remarkably good correspondence. the picture becomes more nuanced among the highest scoring submissions, but the overall effect sizes of (potential) overfitting are typically small (e.g., less than 1% classification accuracy). thus, our findings show that substantial overfitting is unlikely to occur naturally in regular machine learning workflows. moreover, kaggle competitions use public and private test data splits which provide a natural experimental setup for measuring overfitting on various datasets.before we delve into the analysis of the kaggle data, we briefly define the type of overfitting we study and then describe how the kaggle competition format naturally lends itself to investigating overfitting in machine learning competitions.considering the danger of overfitting to the test set in a competitive environment, kaggle subdivides each test set into public and private components. in particular, we can view the public test split s public as the regular test set and use the held-out private test split s private to approximate the population loss. these four competitions are the accuracy competitions with the largest number of submissions and serve as representative examples for a typical accuracy competition in the metakaggle dataset (see table1for information about these competitions). in an ideal competition with a large test set and without any overfitting, the public and private accuracies of a submission would all be almost identical and lie near the y = x diagonal.these scatter plots can be seen as indicators of overall competition "health": in case of pervasive overfitting, we would expect a plateauing trend where later points mainly move on the x-axis (public accuracy) but stagnate on the y-axis (private accuracy). in contrast, the four plots in figure1show that as submissions progress on the public test set, they see corresponding improvements also on the private test set. so at least on the coarse scale of the first analysis level, there are little to no signs of adaptive overfitting: it is easier to make genuine progress on the data distribution in these competitions than to substantially overfit to the test set.while the scatter plots discussed above give a comprehensive picture of an entire competition, one concern is that overfitting may be more prevalent among the submissions with the highest public accuracy since they may be more adapted to the public test set. very small (effective) test sets make it easier to reconstruct the public / private split (and then to overfit), and also make the public and private scores more noisy.figure5: mean accuracy differences versus test set size (public and private combined) for 32 accuracy competitions with at least 1,000 submissions and available test set size (the test set sizes for two competitions with at least 1,000 submissions were not available from the metakaggle dataset). although a reliable recommendation for applied machine learning will require broader investigation, our results for accuracy competitions suggest that at least 10,000 examples is a reasonable minimum test set size to protect against adaptive overfitting. so while overfitting may have occurred to a small extent, it did not invalidate the overall conclusions from the competitions such as which submissions rank among the top or how well they perform on the private test split. due to the limited size of the public and private test sets, the public scores are only approximately equal to the private scores (even in the absence of any adaptive overfitting), which can lead to substantial rank changes even though all score deviations are small and of (roughly) equal size. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/25.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/25.txt new file mode 100644 index 0000000000000000000000000000000000000000..dae7dbe22b0ca8f741bd9c5a2e194940a2357c44 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/25.txt @@ -0,0 +1 @@ +many machine learning (ml) problems reduce to the question of comparing and summarizing probability measures. for example, problems in domain adaptation, adversarial training and distributed learning fall withing this setting. measuring the distance between two probability measures can generally be addressed using any divergence function solomon et al. . however, such functions often do not consider the spatial properties and the physical distances between these measures. optimal transport on the other hand, converts the distance between samples to a distance between probability measures, and in doing so endows the space of measures with a topology: if the underlying space is euclidean, the concepts of interpolation, barycenters and gradient of functions are naturally extended to the space of measures peyré and cuturi .notwithstanding this important property, when it comes to the practical requirement of incorporating probabilistic constraints in the ot problem, or processing uncertainty in the marginals, there exists no systematic or generic methodology. this latter observation motivates our interest in fully probabilistic design (fpd). fpd is the axiomatic framework for designing probability measures under uncertainty, while being consistent with particular knowledge constraints, imposed by the designer karny . these constraints express any additional information about the unknown probability measure, in the form of a prior density, a set membership, some physical laws, etc. quinn et al. . in this paper, we prove that regularised ot is a special case of fpd. indeed, by formulating the ot problem as a constrained design of a joint probability density, the connection with fpd emerges naturally. we argue that this connection yields a structured and robust fpd-ot framework.our paper is structured as follows. we begin in section 2 by reviewing the key concepts of ot, emphasising the mathematical objects used later in the document. in section 3, we introduce fpd and establish in section 4 the connection between ot and fpd. the key conclusions follow in section 5. in this paper, we prove that regularised ot is a special case of fpd. we overload µ and ν to denote the probability density functions (pdf) in the continuous case (or probability mass functions (pmf) in the discrete case).c : ωs × ωt → r + is a measurable cost function, π(x, y) denotes an unknown (variational) pdf with support in: ωs × ωt and π k denotes the set of joint pdfs π(x, y|k) with support in: ωs × ωt , on which some knowledge constraints k are imposed.towards defining the entropy-regularised ot, let us define the kullback-leibler divergence (kld) from a variational probability density, π, to a fixed probability density, ζ, as follows:.by introducing an entropy term, the kantorovitch problem becomes strongly convex and efficient iterative scaling algorithms can be used to compute an approximate transport plan (namely, sinkhorn-knopp in the discrete case, fortet in the continuous case)cuturi .in addition to the statistical similarity embedded in the transport cost function c, we may need to design a more structured joint pdf π(x, y).in the following section, we introduce fully probabilistic design (fpd), which we argue is a generalization of regularised ot. we believe that relaxing the ot formalism into the more generic fpd framework may enable a new set of probabilistic knowledge constraints to be processed in ot. it is, in fact, a generalisation of the classical bayesian conditioning, allowing the processing of probabilistic knowledge constraints k in the design of the pdf π(x, y|k), without requiring a specified joint model π(x, y, k)quinn et al.the axiomatic formulation of fpd, as an optimal distributional design problem, was first established inkarny , where the authors proved that it is an extension of bayesian decision making.more formally, fpd seeks a pdf π(x, y) which satisfies predefined design constraints, formalized as membership of a set, π k , of knowledge-constrained pdfs:. to compute the optimal solution, a utility (loss) function is then used to compare and rank the candidate probability densities, based on their degree of closeness to the ideal design.given that both the kantorovitch and the fpd problems seek the optimal design of a joint pdf, it is natural to investigate possible connections between the two frameworks, which we shall study in the next section.remark 3: the ideal design π i is composed of two factors: (i) the gibbs kernel, which is a function of the transportation cost c(x, y) and (ii) φ, corresponding to a reference pdf which encodes the designer's preferences about the optimal pdf π o . the entropy-regularised ot problem is a specialization of fpd, where the ideal design π i reduces to the boltzmann distribution:. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/250.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/250.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa503af2033d99a88538862328d1a3e19037056d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/250.txt @@ -0,0 +1 @@ +automatically generating images according to natural language descriptions is a fundamental problem in many applications, such as art generation and computer-aided design. it also drives research progress in multimodal learning and inference across vision and language, which is one of the most active research areas in recent years most recently proposed text-to-image synthesis methods are based on generative adversarial networks (gans) . a commonly used approach is to encode the whole text description into a global sentence vector as the condition for gan-based image generation . although * work was performed when was an intern with microsoft research this bird is red with white and has a very short beak 10:short 3:red 11:beak 9:very 8:a 3:red 5:white 1:bird 10:short 0:this figure 1. example results of the proposed attngan. the first row gives the low-to-high resolution images generated by g0, g1 and g2 of the attngan; the second and third row shows the top-5 most attended words by f attn 1 and f attn 2 of the attngan, respectively. here, images of g0 and g1 are bilinearly upsampled to have the same size as that of g2 for better visualization.impressive results have been presented, conditioning gan only on the global sentence vector lacks important finegrained information at the word level, and prevents the generation of high quality images. this problem becomes even more severe when generating complex scenes such as those in the coco dataset .to address this issue, we propose an attentional generative adversarial network (attngan) that allows attentiondriven, multi-stage refinement for fine-grained text-toimage generation. the overall architecture of the attngan is illustrated in figure 2. the model consists of two novel components. the first component is an attentional gener-ative network, in which an attention mechanism is developed for the generator to draw different sub-regions of the image by focusing on words that are most relevant to the sub-region being drawn (see figure 1). more specifically, besides encoding the natural language description into a global sentence vector, each word in the sentence is also encoded into a word vector. the generative network utilizes the global sentence vector to generate a low-resolution image in the first stage. in the following stages, it uses the image vector in each sub-region to query word vectors by using an attention layer to form a word-context vector. it then combines the regional image vector and the corresponding word-context vector to form a multimodal context vector, based on which the model generates new image features in the surrounding sub-regions. this effectively yields a higher resolution picture with more details at each stage. the other component in the attngan is a deep attentional multimodal similarity model (damsm). with an attention mechanism, the damsm is able to compute the similarity between the generated image and the sentence using both the global sentence level information and the fine-grained word level information. thus, the damsm provides an additional fine-grained image-text matching loss for training the generator.the contribution of our method is threefold. (i) an attentional generative adversarial network is proposed for synthesizing images from text descriptions. specifically, two novel components are proposed in the at-tngan, including the attentional generative network and the damsm. (ii) comprehensive study is carried out to empirically evaluate the proposed attngan. experimental results show that the attngan significantly outperforms previous state-of-the-art gan models. (iii) a detailed analysis is performed through visualizing the attention layers of the attngan. for the first time, it is demonstrated that the layered conditional gan is able to automatically attend to relevant words to form the condition for image generation. the first row gives the low-to-high resolution images generated by g0, g1 and g2 of the attngan; the second and third row shows the top-5 most attended words by f attn 1 and f attn 2 of the attngan, respectively. the first component is an attentional gener-ative network, in which an attention mechanism is developed for the generator to draw different sub-regions of the image by focusing on words that are most relevant to the sub-region being drawn (see figure1). it then combines the regional image vector and the corresponding word-context vector to form a multimodal context vector, based on which the model generates new image features in the surrounding sub-regions. with an attention mechanism, the damsm is able to compute the similarity between the generated image and the sentence using both the global sentence level information and the fine-grained word level information.as shown in figure2, the proposed attentional generative adversarial network (attngan) has two novel components: the attentional generative network and the deep attentional multimodal similarity model.the attention model f attn (e, h) has two inputs: the word features e ∈ r d×t and the image features from the previous hidden layer h ∈ rd ×n .the damsm learns two neural networks that map subregions of the image and words of the sentence to a common semantic space, thus measures the image-text similarity at the word level to compute a fine-grained loss for image generation.where v ∈ r d×289 and its i th column v i is the visual feature vector for the i th sub-region of the image; and v ∈ r d is the global vector for the whole image. (11), (12) and (13), we can obtain loss functions l s 1 and l s 2 (where 's' stands for "sentence") using the sentence vector e and the global image vector v., we use generated images to query their corresponding text descriptions. our "attngan1" architecture has one attention model and two generators, while the "attngan2" architecture has two attention models stacked with three generators (see figure2). as shown in table 2 and figure3, stacking two attention models in the generative networks not only generates images of a higher resolution (from 128×128 to 256×256 resolution), but also yields higher inception scores on both cub and coco datasets. the final λ of the coco dataset turns out to be much larger than that of the cub dataset, indicating that the proposed l dam sm is especially important for generat-the bird has a yellow crown and a black eyering that is round this bird has a green crown black primaries and a white bellyand our attngan on cub and coco test sets. for those sub-regions, the attention is equally allo-this bird has wings that are black and has a white belly this bird has wings that are red and has a yellow belly this bird has wings that are blue and has a red belly cated to all words and shown to be black in the attention map (see figure4). another observation is that our second attention model f attn 2 is able to attend to some new words that were omitted by the first attention model f attn 1 (see figure4). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/251.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/251.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c518ab9d3fb55cc22063e9473aadc8b0a6669c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/251.txt @@ -0,0 +1 @@ +the fixed camera feed is (frame by frame) given to an image processing unit at the base which tries to: 1) recognize the preregistered face of the instructor and 2) identify his/her skeleton for pose detection. once the face is recognized and the associated skeleton is detected, tracking the instructor's position begins. a set of features are also extracted from the identified skeleton (which is the output of the deep neural network). more specifically, 12 features are measured from the network output and are fed to a pretrained pose classifier. the result of classification is the instructor's pose which can be handed over to the camera controller to take appropriate actions. however, since current cnns mostly take still images and classification of consecutive frames does not always yield similar results, to have a smooth shooting experience, we designed a markov model for instructor's poses. similar to markov chains, the instructor's pose is memoryless and transition to the next pose only depends on the current pose. some transitions are impossible and some have low probabilities. for example, one cannot sit right after being detected to be facing the blackboard. but transition to sitting is possible when the instructor is facing the students.at each markov state, the received sequence of images (frames) can be considered as evidences. given the current state, one can find the conditional probability of each potentional state happening, and based on the map estimation, change the state/pose. details of the markov model will be given in the following sections.the summary of the above process is presented as a flowchart in figs. 2 and 3. note that fig. 3 shows parts of the high-level flowchart of fig. 2 in more detail. recording starts with sensing the wearable gadget of the instructor (and possibly cross correlating it with the class timetable). then, the fixed camera feed is searched for the instructor's face and once it is detected and the associated skeleton is extracted, position tracking is triggered. the loop continuously tracks the instructor and scans for any pose changes. the result is used by the ptz camera controller to direct the recording. it is worth mentioning that pose detection is a straightforward task and the flow of the algorithm in fig. 3 will not be interrupted or stuck. if tracking is successful but the skeleton is missed, the previous pose is used until the algorithms catch up. the pose detection result as well as instructor's position tracking information are handed over to the ptz camera controller to orchestrate the filming process.the contributions of this article can be summarized as follows: 1) designing a low-cost two-camera framework for automatic recording of classes; 2) employment of deep neural networks for skeleton detection and defining a set of features for instructor's pose classification; 3) enhancement of classification accuracy by using a corrective markov chain and a bayesian map estimator; 4) creation of pose training datasets and implementation of the whole solution based on openpose library. in section ii, we present the proposed camera management framework including the deep pose detection, the markov model, and the bayesian estimator.the fixed camera feed is (frame by frame) given to an image processing unit at the base which tries to: 1) recognize the preregistered face of the instructor and 2) identify his/her skeleton for pose detection. similar to markov chains, the instructor's pose is memoryless and transition to the next pose only depends on the current pose. given the current state, one can find the conditional probability of each potentional state happening, and based on the map estimation, change the state/pose. given a lecture hall video dataset of enough duration, one can build a markov transition matrix with prior probabilities for the four defined states.a pure markov model might be a good presentation of the states and their transition probabilities; however, it does not help with the improvement of pose detection results per se. one can make a dataset of transitions from θ i to θ j given a video database of classroom recordings and by using the law of large numbers (llns), find the pmf of each sequence of w.in order to find p(θ i |θ j ) at every state, we went through 50 h of classroom videos and followed the method explained in section ii-b to count the number of transitions from every pose to every other.the next step to enhance the accuracy of the proposed system was applying the markov model and bayesian map estimator to the nn classifier output and find the most probable pose in practice.9(a) and (c) shows the output of the pose detection system when only an nn classifier is used after the skeletondetecting deep neural network. in this article, we presented a low-cost two-camera solution which relies on off-the-shelf deep pose detection libraries and is equipped with an extra processing layer with bayesian map estimators and markov models for better detection accuracy and promotion of output videos. the frames received from the fixed camera were used to track instructor's position also determine his/her pose. we used openpose deep network to detect the skeleton of instructor and extracted 12 features from that to help a neural network classifier detect his/her pose. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/252.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/252.txt new file mode 100644 index 0000000000000000000000000000000000000000..c57e471f847b0f27107caf59b4bf77aa2785b5f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/252.txt @@ -0,0 +1 @@ +ever since the introduction of credit cards and online payments, many scammers have found ways to exploit people and steal their credit card information to use them for unauthorized purchases. this leads to a huge amount of fraudulent purchases every day. banks and ecommerce websites are trying to identify these fraudulent transactions and stop them from happening again. with machine learning and deep learning methods, they are trying to stop the fraudsters before the transaction is approved.machine learning is one of the hottest topics of this decade and a subset of artificial intelligence. more and more companies are looking to invest in machine learning to improve their services. machine learning is a combination of various computer algorithms and statistical modeling to allow the computer to perform tasks without hard coding. the acquired model would be learning from the "training data". predictions can be made or actions can be performed from stored experiential knowledge. deep learning models are a part of machine learning techniques which involves artificial neural networks. convolutional neural networks, deep belief network, auto-encoders, recurrent neural network, and restricted boltzmann machine are all various methods. a properly trained nn would have the capability to capture unique relationships over the whole dataset.credit card fraud is a form of fraud involving the use of fake or stolen credit card information and causing financial harm to account holders or merchants involved. the total number of credit card fraud in single euro payments area (sepa) in 2016 was 1.8 billion euros out of the total 4.38 trillion euros transaction, which is 0.4% lower than the previous year . in 2015, according to the nelson report, the total loss from the credit cards in the world was $21.84 billion and projected that in 2020 it would be $32 billion. in this paper, we will be looking into 3 data sets. they are the european dataset , the australian dataset and the german dataset . in this work we aim to benchmark different ml and dl techniques. an ensemble of the best 3 performing models is also applied the all 3 datasets. we present our conclusions based on an empirical study comparing different ml and deep learning models.the paper is organized as follows. section 2 summarizes the related work and the background of the models implemented. section 3 provides details on implementation and experimental setup. in section 4 we present and discuss the results. conclusions and future work are summarized in sections 5 and 6 respectively. as follows we review some of the most relevant studies that have applied machine learning and deep learning models in the area of fraud detection. in their paper, they discuss the various concerns while handling this problem, such as there is a non-availability of real-world data which forces researches to work on faux data as the banks and other financial institutions don't make their data public due to privacy concerns as it is sensitive data. the methodology they used was splitting the datasets into 4 different sets which contained 20% fraud, 15% fraud, 10% fraud and 3% fraud. hence, they used true positive rate (fraud catching rate), true negative rate, false positive rate (false alarm rate) and false negative rate.randhawa et al's paperon credit card fraud detection using adaboost and majority voting explores many different machine learning algorithms such as naïve bayes, random forest, gradient boosted tree, etc.in this study we present an empirical comparison of various machine learning and deep learning models inspired by the previous studies.in particular we compare the performance of svm, knn and random forest to deep learning methods such as autoencoders, rbm, dbn and cnn.our experiments try to investigate the effectiveness of different machine learning and deep learning models using data set with varying size and complexities.the idea behind the cost of failure is that each of the false negatives (frauds detected as normal) would have a cost of $1000 and false positives (normal instances detected as fraud) would have a cost of $100 to the company/entity. here, the recommendation would be to choose svm instead of the ensemble if the company is looking to reduce the cost as much as possible as the ensemble method would take longer time in terms of both training and testing while svm has the least in terms of testing and training. a second ensemble (ensemble 2) is build using the models with the least cost of failures: knn, svm and random forest. ensemble 2 (knn, svm, random forest) on the other hand, which was based on combining classifiers with the least cost of failure methods, classifier achieved a higher mcc, auc, and lower cost value. examining the results of the table we can see that svm, random forest and cnn are the best models in terms of performance (auc and mcc values). convolutional neural networks was found to be the best deep learning method as it produces good results for both european and german dataset, while its performance on the australian dataset was the 4th best and it cost of failure was similar to knn.this paper provides an empirical investigation comparing various machine learning and deep learning models on different data sets for the detection of fraudulent transaction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/253.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/253.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2a9c5f96aa2a9067bbb04053179accab1e894ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/253.txt @@ -0,0 +1 @@ +there has been explosive growth in both artificial intelligence and digital currencies over the past few years. as technology evolves at a torrid pace, the collateral benefits continue to thrive. within the ai space, the hurdles to employ advanced statistics or even deep learning have been reduced significantly. incredibly user friendly api's can be called upon and tweaked in very little time. as such, more and more fields are using these methods to help derive meaning from years or even decades worth of previously recorded data.cryptocurrencies have also risen to the mainstream by way of continued adoption. it remains to be seen yet whether, as some claim, the digital currency will one day replace physical monies, but their usefulness has already been thoroughly demonstrated. more importantly, the transaction data has already been recorded and logged for many years. this helps demonstrate that the system does indeed work. in some cases, cryptocurrencies rely on a decentralized documentation process, such as the blockchain, which aids in accuracy. both the ubiquity of digital currency adoption as well as the transparent nature of its underlying blockchain support our claim that the data provided is objective and correct.not all digital coins take this approach, but by far, the largest, bitcoin does indeed. thus bitcoin is a natural choice for examining how its historical data can properly feed modern data science approaches. we first look at which features can be extracted from the blockchain, and test their usefulness. next, we look at the trading prices themselves in order to determine patterns needed for price movements forecasting. similar techniques have been applied in the past, such as greaves et al. , but their work was done back in 2015 and a limited feature selection was elaborated on the blockchain network dataset. our hope herein is to leverage their guidance and generate new and robust approaches to a variety of circumstances using readily available api's and publicly available datasets. we will be using a kaggle dataset which provides transaction details for many different cryptocurrencies. specifically, we will extract the bitcoin records to help test and train our models. there are over 1,500 trading days available, which we believe is sufficient to generate usable algorithms.the remaining of this paper is organized as follows. in section ii, we give a summary of existing solutions. in section iii we discuss the background of our research problem along with related challenges. building upon the detailed research problem, we present our contributing feature selection methodology and time series-based price prediction in sections iv and v, respectively. evaluation setups, implementations, and findings are also discussed in each corresponding section iv and v. lastly, we conclude this work in section vi. this study builds upon existing efforts to leverage features about blockchain as a baseline to perform machine learning-based price predictions of bitcoin.aimed at forecasting the global computing power of blockchain system by leveraging changes in bitcoin prices, jang and leeutilized blockchain information features along with time series and bayesian neural networks (bnns) to predict and analyze changes in bitcoin prices. besides price forecasting, saad et al.besides past price features, in this work, we identify other features to explore patterns and correlations among varying prices. based upon these price features, the model is trained and tested in order to attain an efficient prediction accuracy. this work tries to build upon existing machine learning techniques for price forecasting to customize and generate novel and efficient methods while considering a variety of circumstances using readily available api's and publicly available datasets. there are several columns which, in the context of bitcoin, provide the same result: price in usd and price in bitcoin do not yield different or better information. price in bitcoin is simply the ratio of a given currency's daily price divided by the same day's price of bitcoin. because the target variable, price, is a continuous variable, a regression approach would be needed. each model predicts the price one day in advance and compares the results to the actual price over the relevant timeframe. the ridge regression did perform slightly better than the standard linear regression, most likely due to simply standardizing the data. support vector regression price prediction finally, we introduced a "simple" deep learning approach, a sequential neural network (snn) which can be seen in figure8. the snn is also suited for a continuous variable like price, but what we found most useful was the ability to focus the model on both mse and mae. we set up a dense neural network with a single hidden layer, rectified linear unit (relu) activation function for the input, and linear activation for the hidden one. if the data has a continuous mapping from one space to only one other space (such as price), a single hidden layer is sufficient. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/254.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/254.txt new file mode 100644 index 0000000000000000000000000000000000000000..8380a9e7756f54be0d9404de51adee2d99b9b8ce --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/254.txt @@ -0,0 +1 @@ +manipulation of visual content has now become ubiquitous, and one of the most critical topics in our digital society.for instance, deepfakes has shown how computer graphics and visualization techniques can be used to defame persons by replacing their face by the face of a different person.faces are of special interest to current manipulation methods for various reasons: firstly, the reconstruction and tracking of human faces is a well-examined field in computer vision , which is the foundation of these editing approaches.secondly, faces play a central role in human communication, as the face of a person can emphasize a message or it can even convey a message in its own right .current facial manipulation methods can be separated into two categories: facial expression manipulation and facial identity manipulation (see fig. 2).one of the most prominent facial expression manipulation techniques is the method of thies et al. called face2face.it enables the transfer of facial expressions of one person to another person in real time using only commodity hardware.follow-up work such as "synthesizing obama" is able to animate the face of a person based on an audio input sequence.figure 2: advances in the digitization of human faces have become the basis for modern facial image editing tools.the editing tools can be split in two main categories: identity modification and expression modification.aside from manually editing the face using tools such as photoshop, many automatic approaches have been proposed in the last few years.the most prominent and widespread identity editing technique is face swapping, which has gained significant popularity as lightweight systems are now capable of running on mobile phones.additionally, facial reenactment techniques are now available, which alter the expressions of a person by transferring the expressions of a source person to the target.identity manipulation is the second category of facial forgeries.instead of changing expressions, these methods replace the face of a person with the face of another person.this category is known as face swapping.it became popular with wide-spread consumer-level applications like snapchat.deepfakes also performs face swapping, but via deep learning.while face swapping based on simple computer graphics techniques can run in real time, deepfakes need to be trained for each pair of videos, which is a timeconsuming task.in this work, we show that we can automatically and reliably detect such manipulations, and thereby outperform human observers by a significant margin.we leverage recent advances in deep learning, in particular, the ability to learn extremely powerful image features with convolutional neural networks (cnns).we tackle the detection problem by training a neural network in a supervised fashion.to this end, we generate a large-scale dataset of manipulations based on the classical computer graphics-based methods face2face and faceswap as well as the learningbased approaches deepfakes and neuraltextures .as the digital media forensics field lacks a benchmark for forgery detection, we propose an automated benchmark that considers the four manipulation methods in a realistic scenario, i.e., with random compression and random dimensions.using this benchmark, we evaluate the current state-of-the-art detection methods as well as our forgery detection pipeline that considers the restricted field of facial manipulation methods.our paper makes the following contributions:• an automated benchmark for facial manipulation detection under random compression for a standardized comparison, including a human baseline,• a novel large-scale dataset of manipulated facial imagery composed of more than 1.8 million images from 1,000 videos with pristine (i.e., real) sources and target ground truth to enable supervised learning,• an extensive evaluation of state-of-the-art hand-crafted and learned forgery detectors in various scenarios,• a state-of-the-art forgery detection method tailored to facial manipulations.current facial manipulation methods can be separated into two categories: facial expression manipulation and facial identity manipulation (see fig.face manipulation methods: in the last two decades, interest in virtual face manipulation has rapidly increased.presented an image-based approach called video rewrite to automatically create a new video of a person with generated mouth movements.with video face replacement, dale et al.demonstrated the first real-time expression transfer for facial reenactment.several other works explicitly refer to detecting manipulations related to faces, such as distinguishing computer generated faces from natural ones, morphed faces, face splicing, face swappingand deepfakes.however, early experiments with all manipulation methods showed that the target face had to be nearly front-facing to prevent the manipulation methods from failing or producing strong artifacts.a face in a target sequence is replaced by a face that has been observed in a source video or image collection.to create a fake image, the trained encoder and decoder of the source face are applied to the target face.face2face face2faceis a facial reenactment system that transfers the expressions of a source video to a target video while maintaining the identity of the target person.postprocessing -video quality to create a realistic setting for manipulated videos, we generate output videos with different quality levels, similar to the video processing of many social networks.to track the face in the video and to extract the face region of the image.this incorporation of domain knowledge figure5: our domain-specific forgery detection pipeline for facial manipulations: the input image is processed by a robust face tracking method; we use the information to extract the region of the image covered by the face; this region is fed into a learned classification network that outputs the prediction.we evaluated various variants of our approach by using different state-of-the-art classification methods.the constrained convolutional layer is specifically designed figure6: binary detection accuracy of all evaluated architectures on the different manipulation methods using face tracking when trained on our different manipulation methods separately. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/255.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/255.txt new file mode 100644 index 0000000000000000000000000000000000000000..78729e1c15ee62973357eb67505465ff55f2e8a2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/255.txt @@ -0,0 +1 @@ +water and fire managers in the western united states (u.s.) rely on subseasonal forecasts-forecasts of temperature and precipitation two to six weeks in advance-to allocate water resources, manage wildfires, and prepare for droughts and other weather extremes . while purely physics-based numerical weather prediction dominates the landscape of short-term weather forecasting, such deterministic methods have a limited skillful (i.e., accurate) forecast horizon due to the chaotic nature of their differential equations . prior to the widespread availability of operational numerical weather prediction, weather forecasters made predictions using their knowledge of past weather patterns and climate (sometimes called the method of analogs) . the current availability of ample meteorological records and high-performance computing offers the opportunity to blend physics-based and statistical machine learning (ml) approaches to extend the skillful forecast horizon.this data and computing opportunity, coupled with the critical operational need, motivated the u.s. bureau of reclamation and the national oceanic and atmospheric administration (noaa) to conduct the subseasonal climate forecast rodeo , a yearlong real-time forecasting challenge, in which participants aimed to skillfully predict temperature and precipitation in the western u.s. two to four weeks and four to six weeks in advance. to meet this challenge, we developed an ml-based forecasting system and a subseasonalrodeo dataset suitable for training and benchmarking subseasonal forecasts.ml approaches have been successfully applied to both shortterm (< 2 week) weather forecasting and longer-term climate prediction , but mid-term subseasonal outlooks, which depend on both local weather and global climate variables, still lack skillful forecasts .our subseasonal ml system is an ensemble of two nonlinear regression models: a local linear regression model with multitask feature selection (multillr) and a weighted local autoregression enhanced with multitask k-nearest neighbor features (autoknn). the multillr model introduces candidate regressors from each data source in the subseasonalrodeo dataset and then prunes irrelevant predictors using a multitask backward stepwise criterion designed for the forecasting skill objective. the autoknn model extracts features only from the target variable (temperature or precipitation), combining lagged measurements with a skill-specific form of nearest-neighbor modeling. for each of the two rodeo target variables (temperature and precipitation) and forecast horizons (weeks 3-4 and weeks , this paper makes the following principal contributions:(1) we release a new subseasonalrodeo dataset suitable for training and benchmarking subseasonal forecasts. (2) we introduce two subseasonal regression approaches tailored to the forecast skill objective, one of which uses only features of the target variable. (3) we introduce a simple ensembling procedure that provably improves average skill whenever average skill is positive. (4) we show that each regression method alone outperforms the rodeo benchmarks, including a debiased version of the operational u.s. climate forecasting system (cfsv2), and that our ensemble outperforms the top rodeo competitor. (5) we show that, over 2011-2018, an ensemble of our models and debiased cfsv2 improves debiased cfsv2 skill by 40-50% for temperature and 129-169% for precipitation.we hope that this work will expose the ml community to an important problem ripe for ml development-improving subseasonal forecasting for water and fire management, demonstrate that ml tools can lead to significant improvements in subseasonal forecasting skill, and stimulate future development with the release of our user-friendly python pandas subseasonalrodeo dataset.the subseasonal climate forecast rodeo was a year-long, real-time forecasting competition in which, every two weeks, contestants submitted forecasts for average temperature ( • c) and total precipitation (mm) at two forecast horizons, 15-28 days ahead (weeks 3-4) and 29-42 days ahead (weeks 5-6). the official contest cfsv2 forecast for t, an average of 32 operational forecasts based on 4 model initializations and 8 lead times, was debiased by adding the mean observed temperature or precipitation for monthday(t) over 1999-2010 and subtracting the mean cfsv2 reforecast, an average of 8 lead times for a single initialization, over the same period. for each target variable (temperature or precipitation) and horizon (weeks 3-4 or 5-6), our forecasting system relies on two regression models trained using two sets of features derived from the subseasonalrodeo dataset. specifically, the training data for a given target date is restricted to a 56-day (8-week) span around the target date's day of the year (s = 56). when predicting skill(a t −ℓ−h , a t * −ℓ−h ) output similarities (sim t ) t weeks 3-4, we include lagged temperature or precipitation anomalies from 29 days, 58 days, and 1 year prior to the target date; when predicting weeks 5-6, we use 43 days, 86 days, and 1 year. these lags are chosen because the most recent data available to us are from 29 days before the target date when predicting weeks 3-4 and 58 days before the target date when predicting weeks 5-6. our measure of similarity is tailored to the cosine similarity objective: similarity between a target date and another date is measured as the mean skill observed when the historical anomalies preceding the candidate date are used to forecast the historical anomalies of the target date.to predict a given target date, we regress onto the three fixed lags, the constant intercept feature ones, and either knn1 through knn20 (for temperature) or knn1 only (for precipitation), treating each grid point as a separate prediction task. for weeks 3-4, the eight forecasts came from 15 and 16 days prior to the target date; for weeks 5-6, we used 29 and 30 days prior. for each date t, we then reconstructed the debiased cfsv2 forecast by subtracting the long-term cfs average and adding the observed target variable average over 1999-2010 for monthday(t) to the reconstructed cfsv2 forecast.while the official contest cfsv2 baseline averages the forecasts of four model initializations, the cfsv2 operational forecast dataset only provides the forecasts of one model initialization (the remaining model initialization forecasts are released in real time but deleted after one week). to mimic the actual real-time use of the forecasting system to produce forecasts for a particular target date, we train our models using only data available prior to the forecast issue date; for example, the forecasts issued on april 18, 2011 are only trained on data available prior to april 18, 2011. the ensemble improves over the debiased cfsv2 average skill by 23% for temperature weeks 3-4, by 39% for temperature weeks 5-6, by 123% for precipitation weeks 3-4, and by 157% for precipitation weeks 5-6. the figure shows that when predicting precipitation, the top neighbor for a target date is generally from the same time of year as the target date: for summer target dates, the top neighbor tends to be from a summer month and similarly for winter target dates. each dataframe or series contributes data variables (features or target values) falling into one of three categories: (i) spatial (varying with the target grid point but not the target date); (ii) temporal (varying with the target date but not the target grid point); (iii) spatiotemporal (varying with both the target grid point and the target date). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/256.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/256.txt new file mode 100644 index 0000000000000000000000000000000000000000..0ab8885349a30b288cd5114ec5170f987ee72c9c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/256.txt @@ -0,0 +1 @@ +e-learning has been established as an effective means for learning because of various features like "learning at your own pace" and "getting certified from home". however, the concept is not very common in the field of education for learning disabled children. moodle is a very popular lms used in various institutions for facilitating e-learning due it's easy interface and apt features for an online classroom environment.ld is a disorder affecting the nervous system. to put it simply, it is the different way in which a person's brain works. children having lds have difficulty in reading, writing, reasoning, recalling, organizing information or understanding on their own. they also find it hard to learn at a normal or regular pace like others. we are focusing on detecting and attempting to solve some aspects of dyslexia which is difficulty in reading and is one of the more commonly known lds .the study of ld focuses on identifying the conditions that affect the student's personal development and justify the provision of certain aids or special services, such as adaptations to the tools they can use for certain processes, for example, adaptations to access, assistance, intervention, and learning .learning disability(ld) according to the who's international classification of diseases the ld are known as "specific developmental disorders of scholastic skills" and are classified as follows : specific reading disorder. specific spelling disorder. specific disorder of arithmetical skills.other developmental disorders of scholastic skills.developmental disorder of scholastic skills.mixed disorder of scholastic skills.data handling & analysis -this group of components deals with fetching all the users data from the moodle through the admin user, analysing the data using python which is outside the moodle environment. to find whether the user has a miscue problem, word by word comparison is done between the actual passage and the converted text from the audio response. word substitutions, omissions, additions, reversals, mispronunciations, prefix/suffix additions are computed in the response (audio) by comparing every word, it's preceding, succeeding word accordingly or comparing letter inversions (reversals).the picture description is used to analyse the visual and the audio processing skills of the student which is done by providing a picture of a garden and asking them to describe the image in their own words by recording their speech on the system which is later obtained from moodle. to understand whether the user has the ability to describe the picture well or not we capture 5 recordings each capturing a particular sentence which is converted to a keyword list after tokenizing and stopwords removal. we have calculated the similarity by using cosine similarity technique by vectorizing the two lists and the percentage is stored in another list.we then simply check if the maximum percentage value in the list is greater than the threshold of 40% then, the recorded sentence is classified as similar to the corpus else, it is classified as not similar. if more than 60% (3 out of 5) of the sentences have the cosine similarity of less than 40% with the corpus, then it means that the user has difficulty in describing the picture, else he does not have this difficulty.the topic description mainly helps us understand whether the student is able to frame correct sentences and gauge the understanding of the topic. to find out whether the student has this difficulty or not, we have created an assessment in moodle where a topic is provided and the students are made to speak 5 sentences on the topic given. the keyword list is generated for the recording and corpus similar to picture description and cosine similarity is computed. 60% of the sentences have the cosine similarity closer to 0, then it means that the user has difficulty in describing the topic, else he does not have this difficulty.the short term memory problem is assessed by recording the speech input of the user obtained via the courses attempted on moodle. to determine whether the user has short term memory loss, we provide an audio recording containing a small passage including sequences of specific objects to be memorized.confusion matrix tells us that out of 10 test cases, 5 times the model predicts a tuple belonging to ld class correctly (true positive), 0 times non-ld being predicted as ld (false positives), 1 time ld being predicted as non-ld (false negatives), 4 times non-ld being correctly classified (true negatives). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/257.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/257.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4b45d88dabfd1dc6e8b1e300f339065bf633d2e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/257.txt @@ -0,0 +1 @@ +deep learning is a subclass of machine learning. compared with traditional machine learning, deep learning is more suitable for big data processing. algorithm performance increases with the increase of data volume. unlike traditional machine learning, deep learning does not rely on the artificial determination of application features. instead, it attempts to directly obtain higher-level features from the data and achieve a deep-level machine learning model through multiple transformations of features. deep learning has experienced decades of development since its beginning. from the proposal of mp model and hebb learning rule to the discovery of perceptron learning by american scientist rosenblatt, the subsequent proposal of perceptron is of milestone significance for the development of neural network. however, because of the single layer perceptron can't solve the problem of nonlinear, linear classification, make the development of artificial neural network into a trough, this kind of situation, it was not until 1986 that a kind of suitable for multilayer perceptron back propagation algorithm, bp algorithm is put forward, to solve the nonlinear problem, and walked out of the plight of artificial neural network and has received the attention again. to limit computer level at the time, however, cannot be achieved to a lot of training model, it was not until 2006 that "through unsupervised learning method step by step training algorithm, using the back propagation algorithm of supervised tune" the depth of the learning method is put forward, make deep learning into explosive growth stage. in 2016, alphago beat lee se-dol 4-1, which caused a worldwide sensation. in 2017, alphago zero, an upgraded version of alphago based on reinforcement learning algorithms, was launched. however, because of the single layer perceptron can't solve the problem of nonlinear, linear classification, make the development of artificial neural network into a trough, this kind of situation, it was not until 1986 that a kind of suitable for multilayer perceptron back propagation algorithm, bp algorithm is put forward, to solve the nonlinear problem, and walked out of the plight of artificial neural network and has received the attention again. by taking the hidden layer of the upper layer of restricted boltzmann machine as the visible layer of the next layer, several layers of restricted boltzmann machines are stacked up to form a deep belief network.convolutional neural network is a feedforward neural network, which includes convolution calculation method and has a deep structure. in the process of image recognition and processing, feature differentiation is simulated through convolution layer, and the number of weights is reduced through the network structure of weight sharing and pooling, so as to reduce the complexity of image feature extraction and data reconstruction.3 training process of convolutional neural network the intermediary, thus greatly reducing the number of parameters from the image input layer to the hidden layer.the basic structure of convolutional neural network includes convolution layer, pooling layer and full connection layer. convolutional layer and pooling layer constitute multiple coil units to extract features layer by layer, and finally complete image classification through the later full connection layer.with a tree-like hierarchical structure, recursive neural network is an artificial neural network in which network nodes recursively implement a whole sequence of input according to their connection order, and it is one of the algorithms of deep learning. recursive neural network is divided into temporal recursive neural network and structural recursive neural network. the neurons of temporal recursive neural network are connected to form a directed graph, while the structural recursive neural network is recurred through similar neural network structure to construct a more complex neural network. the major difference between recurrent neural network and feedforward neural network is that recurrent neural network has a certain "memory". as the pioneering work of convolutional neural network, lenetwas put forward in the 1990s, which determined the basic structure of convolutional neural network's convolutional layer, pooling layer and full connection layer, and laid the foundation of convolutional neural network. inception module is proposed to enable convolutional neural network to realize multiple convolution and pooling operations on input images in parallel, so as to avoid the problem that the convolutional layer is repeatedly stacked and the network becomes deeper and deeper for better performance. for deep learning, there are too many network level, especially for the way of back propagation network, when we use like a sigmoid function, using the chain derivative, can make the error from the output layer begins to decay, back layer more than one, then the lower basic less than effective training, thus affecting the normal work of the network.at present, the image classification algorithm based on depth of learning, especially based on the convolution of the neural network algorithm to deal with some simple image classification, can achieve very good effect, but in some complex image processing, especially contains a certain change, such as face recognition, face will be as time, light, angle, constant change, and how to better, faster and more accurately identify complex images, which has become the focus of current research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/258.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/258.txt new file mode 100644 index 0000000000000000000000000000000000000000..39cf45c1f32866761ed3757d4a68ae3a88caa2ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/258.txt @@ -0,0 +1 @@ +agriculture plays a very important role in the economic growth of any country. it is the field which highly affect the gdp of the countries. agriculture sector contributes around 16% of gdp of india. there are various factors that affects the quality and quantity of crops cultivated. due to different weather and local conditions these plants are exposed to various diseases. and if these diseases remain undetected may cause some serious losses. in india itself around 15-25 percent of crops are lost due to diseases, pest, weeds. also, we can take reference of the incident of georgia (usa) in 2007 in which there was loss of around 540 usd due to plant diseases.as the cultivational fields are quite large and have very large number of plants in that, hence it becomes very difficult for the human eye to properly detect and classify each and every plant. and doing so is very important as even single infected plant can spread the disease. also, most of the farmers does not have proper knowledge of those diseases and actual cure for that disease. hiring experts may cost them heavily and use of pesticides without knowledge will harm the land. hence in order to solve this problem we have developed the artificial intelligence based solution.accuracy and speed are the two main factors that will decide the success of the automatic plant leaf disease detection and classification model. the suggested model will help the farmers to correctly detect and classify the disease by scanning the leaf and alert the farmers about the disease before it starts spreading. the model is mainly divided into four steps or phases. in first one, we collect the dataset of different plant leaves infected as well as healthy. these all images will be color images. in second step, noise from the images is removed then we will create color transformation structure for the images. in third step we segment the images using clustering techniques available. this step is performed to easily extract the foreground that is leaf. now the image set of leaves with black background is obtained. in final step, different machine learning and deep learning algorithms like logistic regression, knn, svm and cnn are trained and compared on the basis of accuracy and the algorithm that performs best in training as well as testing is taken in account.accuracy and speed are the two main factors that will decide the success of the automatic plant leaf disease detection and classification model. the suggested model will help the farmers to correctly detect and classify the disease by scanning the leaf and alert the farmers about the disease before it starts spreading. in final step, different machine learning and deep learning algorithms like logistic regression, knn, svm and cnn are trained and compared on the basis of accuracy and the algorithm that performs best in training as well as testing is taken in account.s, r. in their approach they collect 75 images of different diseased plant leaves such as bacterial blight and more. there were total of 5 classes that include 4 disease classes and one normal healthy leaf class.another paper named "plant leaf disease detection and classification based on cnn with lvq algorithm" clarifies that they have used cnn model for the leaf disease classification. three matrixes for r, g, b channels were used as input to cnn model and the output was feed into neural network known as lvq (learning vector quantization)."plant disease classification using image segmentation and svm techniques" by k. after that image was cropped and with image preprocessing techniques noise was removed and smoothening was done and converted into greyscale images. cnn is a neural network which comprises of four layers namely-convolutional layer, pooling layer, activation function layer and fully connected layer as shown in figure) pooling layer this is the layer which is majorly responsible for size reduction of output of previous layer. the cnn model was able to give accuracy of 98% on testing set that is so far best among all classifiers with 0. we are able to achieve the accuracy of 98% in detecting and classifying the plant leaf disease. the accuracy score achieved by all classifiers are as follows: - the presented model used the dataset that consists of more than 20,000 images with 19 total classes. the following model can be extended by using even more large dataset with more categories of diseases and the accuracy can also be improved by tuning the hyperparameters. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/259.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/259.txt new file mode 100644 index 0000000000000000000000000000000000000000..69d3342029cdcf6832e408e8317cd2474325ebad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/259.txt @@ -0,0 +1 @@ +in recent years, deep learning has become an important methodology in many informatics fields such as vision recognition, natural language processing, and bioinformatics . deep learning is also a strong analytic tool for huge volumes of data. in the internet of things (iot), one open problem is how to reliably mine real-world iot data from a noisy and complex environment that confuses conventional machine learning techniques. deep learning is considered as the most promising approach to solving this problem . deep learning has been introduced into many tasks related to iot and mobile applications with encouraging early results. for example, deep learning can precisely predict the home electricity power consumption with the data collected by smart meters, which can improve the electricity supply of the smart grid . because of its high efficiency in studying complex data, deep learning will play a very important role in future iot services.edge computing is another important technology for iot services . due to data transferring with limited network performance, the centralized cloud computing structure is becoming inefficient for processing and analyzing huge amounts of data collected from iot devices . as edge computing offloads computing tasks from the centralized cloud to the edge near iot devices, transferred data are enormously reduced by the preprocessing procedures. the edge computing can perform well when the intermediate data size is smaller than the input data size.a typical deep learning model usually has many layers in the learning network. the intermediate data size can be quickly scaled down by each network layer until enough features are found. therefore, the deep learning model is very appropriate for the edge computing environment since it is possible to offload parts of learning layers in the edge and then transfer the reduced intermediate data to the centralized cloud server.another advantage of deep learning in edge computing is the privacy preserving in intermediate data transferring. intermediate data generated in traditional big data systems, such as mapreduce or spark, contains the user privacy since the preprocessing remains as data semantics. the intermediate data in deep learning usually have different semantics compared to the source data. for example, it is very hard to understand the original information with the features extracted by a convolutional neural network (cnn) filter in the intermediate cnn layer.thus, in this article, we introduce deep learning for iot into the edge computing environment to improve learning performance as well as to reduce network traffic. we formulate an elastic model that is compatible with different deep learning models. thus, because of the different intermediate data size and preprocessing overhead of different deep learning models, we state a scheduling problem to maximize the number of deep learning tasks with the limited network bandwidth and service capability of edge nodes. we also try to guarantee the quality of service (qos) of each deep learning service for iot in the scheduling. we design offline and online scheduling algorithms to solve the problem. we perform extensive simulations with multiple deep learning tasks and given edge computing settings. the experimental results show that our solution outperforms other optimization methods on deep learning for iot.the main contributions of this article are summarized as follows. we first introduce deep learning for iot into the edge computing environment. to the best of our knowledge, this is an innovative work focusing on deep learning for iot with edge computing. we formulate an elastic model for varying deep learning models for iot in edge computing. we also design an efficient online algorithm to optimize the service capacity of the edge computing model. finally, we test the deep learning model for iot with extensive experiments in a given edge computing environment. we also compare our edge computing method to traditional solutions.the remainder of this article can be outlined introducing deep learning into more iot applications is another important research issue . the efficiency of deep learning for iot has been evaluated in many important iot applications. for example, some works focus on the applications in wearable iot devices deployed in dynamic and complex environments that often confuse the traditional machine learning methods. bhattacharya et al. proposed a new deep learning model for wearable iot devices that improves the accuracy of audio recognition tasks.most existing deep learning applications (e.g., speech recognition) still need to be cloud-assisted. alsheikh et al. proposed a framework to combine deep learning algorithms and apache spark for iot data analytics. the inference phase is executed on mobile devices, while apache spark is deployed in cloud servers for supporting data training. this two-layer design is very similar to edge computing, which shows that it is possible to offload processing tasks from the cloud. as edge computing offloads computing tasks from the centralized cloud to the edge near iot devices, transferred data are enormously reduced by the preprocessing procedures. therefore, the deep learning model is very appropriate for the edge computing environment since it is possible to offload parts of learning layers in the edge and then transfer the reduced intermediate data to the centralized cloud server.thus, in this article, we introduce deep learning for iot into the edge computing environment to improve learning performance as well as to reduce network traffic. thus, because of the different intermediate data size and preprocessing overhead of different deep learning models, we state a scheduling problem to maximize the number of deep learning tasks with the limited network bandwidth and service capability of edge nodes. since we focus on general iot devices without enough energy supplement and high-spec chips, the edge servers are deployed in iot gateways, which have enough service capacity for executing deep learning algorithms.in this section, we first introduce the scenario of deep learning for iot and then present the edge computing framework of deep learning for iot. since the deep learning technology improves the efficiency of processing multimedia information, more and more works have begun to introduce deep learning into multimedia iot services.the most important benefit of deep learning over machine learning is the better performance with large data scale since many iot applications generate a large amount of data for processing. the collected multimedia data size is much larger than traditional structured data size, but it is hard to improve the performance of the network for transferring collected data from iot devices to the cloud service. therefore, edge computing is efficient for deep learning tasks, since the size of extracted features is scaled down by the filters in deep learning network layers. the edge servers load the intermediate data from the lower layers and then transferred data to the cloud server as the input data for the higher layers. meanwhile, as different deep learning networks and tasks have different sizes of intermediate data and computational overhead, efficient scheduling is needed to optimize deep learning for iot in the edge computing structure.the problem of scheduling iot deep learning network layers in edge computing: given an edge computing structure, the scheduling problem attempts to assign maximum tasks in the edge computing structure by deploying deep learning layers in iot edge servers such that the required transferring latency of each task can be guaranteed, denoted by. since the number of tasks is much larger than the number of edge servers and deep learning network layers, the complexity of the proposed algorithm is o(|t|), which is good enough for practical scheduling.in this article, we introduce deep learning for iot into the edge computing environment to optimize network performance and protect user privacy in uploading data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/26.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/26.txt new file mode 100644 index 0000000000000000000000000000000000000000..1410890402940709916d813bb67e85a96638311b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/26.txt @@ -0,0 +1 @@ +federated learning (fl) has emerged as a distributed computing paradigm where multiple agents train a machine learning model in a collaborative manner under the coordination of a server 1 without transferring or disclosing their raw data . the agents compute and communicate local updates (typically stochastic gradients) to the model, and then the server performs a global update . the agents can be different organizations, such as medical or financial corporations, which cannot share their confidential data with the server due to legal or business related concerns. although fl has the potential to alleviate privacy risks associated with centralized machine learning, its operation counts on the assistance of the agents who incur privacy, computation, communication and energy costs for their efforts. therefore, compensation of their losses in an effective fashion is vital to ensure their cooperation.in this study, we consider strategic agents, i.e., they are self-interested and rational entities and they seek to maximize their utilities. during each round, each agent locally samples a minibatch of training data points, and sends their stochastic gradients. as a function of his minibatch size choice, each agent incurs a cost and the server compensates him through a reward mechanism. note that the server cannot directly 1 we refer to the server as "she" and an agent as "he".observe or verify the minibatch size choices of the agents. therefore, an agent may attempt to reduce his cost by collecting less data points, while still claiming the same reward from the server. this can increase the noise levels of the stochastic gradients and can severely hamper the training efforts.the described framework diverges from a classical fl setup where the participants are assumed to be submissive clients, who always carry out their dictated tasks according to a pre-established protocol. to address this challenge, we design a reward mechanism which constructs a reference gradient based on the stochastic gradients collected from every participating agent and then rewards each agent based on the distance between each agent's gradient update and the constructed reference gradient. we show that the proposed reward mechanism has a cooperative nash equilibrium where the agents follow the lead of the server. this approach enables a feasible fl training in the presence of strategic agents by ensuring the reliability of local model updates. as a function of the minibatch size choice s k,t , agent k incurs a cost captured by h : r ≥0 → r ≥0 . in addition to the time and power expenses due to computation and transmission of the stochastic gradient x k,t (1) and its communication to the server, h(s k,t ) may also incorporate the effort of data acquisition and associated privacy risks for the agent. ν t ] k×1 .we call an agent's minibatch size choice s k,t cooperative if s k,t = ν t . , s k,t ]. agent k's best response to the minibatch size vector s -k,t is s * k,t ∈ s such that u k,t (s k,t = s * k,t , s -k,t ) ≥ u k,t (s k,t = s k,t , s -k,t ) for all s k,t ∈ s. in particular, the proposed reward mechanism, for x k,t = ⊥ and k -k,t = ∅, has the following form:.if an agent does not participate in distributed learning during round t (x k,t = ⊥ and s k,t = 0), then his reward is equal to 0.ideally, the server would want to determine the reward of agent k according to the distance between his gradient x k,t with the population gradient m t (2) since the expected value of this distance would be inversely proportional to the minibatch size choice of the agent:. the following result demonstrates that the best minibatch size choice for agent k is to be cooperative, s k,t = ν t , given that other agents are also cooperative. according to theorem 1, the proposed reward mechanism can incentivize the agents to follow the requested minibatch size sequence ν t without rewarding the agents more than their cost, despite the fact that the server can never verify the minibatch size choices of the agents.3depicts the reward of agent k for different minibatch size choices, s k,t ∈ {2, 4, 8, 16}, when ν t = 8 and the other agents are cooperative (s i,t = ν t for i = k). in particular, when the agent follows the request of the server (s k,t = ν t = 8), his reward is equal to his cost (r k,t = 9), which verifies the budget balanced property of the proposed reward mechanism. consequently, it is verified that the proposed reward mechanism has a cooperative equilibrium, since s k,t = ν t is the best response minibatch size choice when the other agents follow the server. conditioned on a strategy vector s t such that s -k,t = 0 and s k,t = 0, it follows that e x k,t -m -k,t. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/260.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/260.txt new file mode 100644 index 0000000000000000000000000000000000000000..52bfedb4745806312f77ed85fab1c5365a508354 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/260.txt @@ -0,0 +1 @@ +stock exchanges are the financial institutions which allow exchange of different types of goods between stock broker components. stock market prediction is the method of determining the future value of a stock or other financial instrument traded on an exchange. a misconception is also associated with people that buying and selling of the stocks/shares in the market is an act of gambling. this misconception can be changed and bringing awareness among people for this.over the past few years, 90 percent of the data in the world has been created as a result of the creation of 2.5 quintillion bytes of data on a daily basis. a very large amount of data is generated by financial market. it's very difficult for a trader to recognize a pattern and then devise an optimal strategy for making decisions. predicting how the stock market will perform is one of the most difficult things to do. there are so many factors involved in the predictionphysical factors vs physiological, rational and irrational behavior, etc. all these aspects combine to make share prices volatile and very difficult to predict with a high degree of accuracy. machine learning can be used as a game changer in predicting the values of stock prices. machine learning techniques have the potential to unearth patterns and insights we didn't see before, and these can be used to make unerringly accurate predictions. the machine learning is growing at a phenomenal pace in today's world. a study which gave a theoretical approch to predict stock prices with the help of basic regresion models was developed by ashish sharma and his colleagues .the study explained the regression models and how their application can be useful in price prediction. this study gave us an idea about how chosing appropriate factors affecting stock price as variables can give some predictions. this work needed an appropriate set of features to predict most accurate value. a study by amit kumar sirohi gave a detailed explanation of an 2-tier model in which first tier gave information about feature selection like opening price, closing price of a stock, etc and the second tier build various kernels on the extracted features. this gave us a fair idea about apply suitable features to models.later a study by pushkar khanal and shree raj shakya stated that support vector machine algorithm gave best results for prediction with an effective accuracy as compared to most other machine learning algorithms and traditional technical methods. this study effectively explained how classification can be useful in prediction.a study by yaojun wang and yaoqing wang explained the effect of the stock comments information from social media with help of social media mining on the stock price. bring this factor in consideration along with other important factors led to more accurate results. this study applied svm with emotion index effectively.later it was realized that deep learning models could improve the accuracy significantly. rohit verma with his colleagues proposed a theoretical approach for using artificial neural network to predict stock prices .here the results were obtained from nifty stock index dataset on the basis of values from the past days. an accuracy of 96% was obtained from this study. stock market prediction is the method of determining the future value of a stock or other financial instrument traded on an exchange. a study which gave a theoretical approch to predict stock prices with the help of basic regresion models was developed by ashish sharma and his colleagues. this study gave us an idea about how chosing appropriate factors affecting stock price as variables can give some predictions. a study by amit kumar sirohi gave a detailed explanation of an 2-tier modelin which first tier gave information about feature selection like opening price, closing price of a stock, etc and the second tier build various kernels on the extracted features.a study by yaojun wang and yaoqing wangexplained the effect of the stock comments information from social media with help of social media mining on the stock price.it is an american stock market index with market capitalizations of 500 large companies that have common stock listed in nasdaq, nyse, etc. and, the s&p500 is market cap weighted index, meaning that the companies with larger stock value have got more influence on the index. the data contains highest value, lowest value, opening value, closing value and volume of traded stocks for a given particular date. high stock price (highest value of that stock during that day) 4. low stock price (lowest value of that stock price during that day). using this single value, the future stock price of a company can be predicted using various regression models available in machine learning. here, the x-axis will contain the date of the stock and the y-axis will contain the closing price of a stock.however, the closing price of a stock does not give the user a lot of information of the future prices of that particular stock.close: current day stock closing price close: previous day stock closing price.the stock price and momentum of apple stock dataset was predicted and the accuracies of the various machine learning models were compared and analyzed. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/261.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/261.txt new file mode 100644 index 0000000000000000000000000000000000000000..155870c2705a6ac040843e30b2a870d67601c8ff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/261.txt @@ -0,0 +1 @@ +in sections 4.2 and 4.3 we will discuss the main theoretical results of this work.here we give a brief preview of them.• in theorem 1 and corollary 1 we establish equivalence between the predictors obtained using ne of eirm game s eirm and invariant predictors s iv .we establish this equivalence for a large class of representations and classifiers, where both can be nonlinear.• in theorem 2, we borrow the generalization result from (arjovsky et al., 2019) and show that same generalization guarantees continue to hold for our setting.following (arjovsky et al., 2019), we assume both classifiers and representations are linear.• in theorem 3, we discuss the role of representation and how in some cases we can reduce the computational expense that one may incur in searching for the representations.we establish this result for a large class of classifiers and invertible representations, where both can be nonlinear.• in theorem 4, we discuss the existence of both the nash equilibria of eirm game and the invariant predictors.we restrict the classifiers to be linear but representations may be nonlinear.in the supplement, we extend the result to nonlinear classifiers.the goal of irm is to use these multiple datasets to construct a predictor f that performs well across many unseen environments e all , where e all ⊇ e tr .define the risk achieved by f in environment e as r e (f ) = e x e ,y e `(f (x e ), y e ) , where ` is the loss when f (x) is the predicted value and y is the corresponding label.invariant predictor: we say that a data representation φ : x → z ⊆ r d elicits an invariant predictor w • φ across environments e ∈ e if there is a classifier w : z → r k that achieves the minimum risk for all the environments w ∈ arg min w∈hw r e (w • φ), ∀e ∈ e. (2016)..where x s e * is the vector x e with indices in s * , g : r |s * | → r is a function to describe the conditional expectation and e ∼ f e , e = 0, e ⊥ x s e * .since the optimal classifier does e not vary across environments, w * • φ * = g is an invariant predictor across all e all (assume g ∈ h w ). (2019)states that if representations and classifiers are from the class of linear models, i.the set of actions of the where w = {w q } q6 =e is the set of choices of all environments but e.if we replace u e with −r e (w , φ), we obtain the inequalities in (4).• finite sum: if w 1 ∈ h w and w 2 ∈ h w , then w 1 + w 2 ∈ h w , where for every z ∈ z, (w 1 + w 2 )(z) = w 1 (z) + w 2 (z) • scalar multiplication: for any c ∈ r and w ∈ h w , cw ∈ h w , where for every z ∈ z, (cw)(z) = c×w(z).for environment e, y e ← z et γ + e , where e is independent 1 of z 1 e , z e ∈ r c and γ ∈ r c .we observe x e , which is a 1 scrambled version of z e and z e , where z e can be correlated.if at least n − r + training r environments e tr ⊆ e all lie in linear general position of degree r, then any predictor obtained from eirm game over the training environments in ŝ eirm is invariant across all the testing environments e all .suppose there exists a φ, {w q } q=1 q such that∀q ∈ e tr w is in the interior of h w , then the corresponding ensemble predictor. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/262.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/262.txt new file mode 100644 index 0000000000000000000000000000000000000000..cb7bb0e0b22b72e5836eb49416e8c2dedce2766b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/262.txt @@ -0,0 +1 @@ +cyber bullying is when someone uses technology to send threatening or embarrassing messages to another person. bullying on social media can be even worse due to its quick spread to the wider audience. research shows that such behaviour frequently occurred on facebook and twitter sites . out of 80.83 million internet users in bangladesh , more than 90% of social media users are active on facebook where majority is young, vulnerable and in dare need of protection. with being 7th most spoken language and with the popularity of unicode system and growing use of internet , the use of bangla over social media is increasing. but very few works have been done on bangla text for social media activity monitoring due to a lack of a large number of annotated corpora, named dictionaries and morphological analyzer , which demands in-depth analysis on bangladesh's perspective. due to its quick spread to the wider audience, cyber bullying has been receiving a profound interest to the researchers over the past few years. however, bangla, the seventh most widely spoken language in the world and the second most used language in the indian subcontinent, is lagging far behind in bangla cyber bullying research. meanwhile, with progressive affordability of technology and incentives by the concerned governments, bangla document analysis is more relevant now than ever before.in general, the data available on social media are mostly short, noisy, unstructured and sometimes a mix-up of multiple languages, which means that the use of traditional bullying detection methods like guidelines, human moderations, and keyword searches will fall short in social media data . research shows use of machine learning algorithms and sentiment analysis for social media data has better accuracy than the keyword search and textual analysis of contents . but proposed machine learning technique in literature are very content specific. due to linguistic differences between english and non-english contents, performance may vary. besides, the socio-emotional behaviour and user specific information of the study population also has significant impact for cyber bullying detection. for instance, support vector machine (svm), a popular learning method for english text was less accurate on arabic texts compared to nb. hence, this project intends to investigate cyber bullying further to apply on bangla text. the objective of this work is to develop a cyber bullying detection and monitoring scheme suitable for bangla text on social media network. hence, targets of this paper are:1.to design a novel scheme for analysing bangla content on social media by combining text analytics and machine learning algorithms.2.to compare the performance of the module with other available techniques.the rest of the paper is organized as follows. in section ii, existing cyber bullying detection methods are discussed. section iii provides the motivation of the current approach and methodologies used for developing the cyber bullying detection scheme for bangla text. section iv presents the classification outcome of the proposed method and section v contains the concluding remarks of the overall method. with being 7th most spoken language and with the popularity of unicode system and growing use of internet, the use of bangla over social media is increasing. due to its quick spread to the wider audience, cyber bullying has been receiving a profound interest to the researchers over the past few years.in general, the data available on social media are mostly short, noisy, unstructured and sometimes a mix-up of multiple languages, which means that the use of traditional bullying detection methods like guidelines, human moderations, and keyword searches will fall short in social media data. the objective of this work is to develop a cyber bullying detection and monitoring scheme suitable for bangla text on social media network.to design a novel scheme for analysing bangla content on social media by combining text analytics and machine learning algorithms. section iii provides the motivation of the current approach and methodologies used for developing the cyber bullying detection scheme for bangla text. among the available classification techniques, svm gets notable attention due to better performance in various text classifications. hence, the aim of this research was to explore various machine learning algorithms on bangla text and identify the best performing one for detection of cyber bullying on social media contents.extracted bangla text content was pre-processed and tokenized by separating special character and emoji from text data. the experiments included 2,400 bangla texts collected from social media posts where of the data contained 10% bullying text. overall, the training and testing process was carried in two phases: i) including only text based features (posts/comments) ii) including both text based features and user information. hence, the accuracy of svm, which construct a hyper plane or set of hyper planes in a high or infinite dimensional space, on bangla text justifies the superior performance. conclusion though a notable amount of work has been performed for cyber bullying detection on english text, very few work have been done on bangla text. in this paper revisited four state-ofthe-art supervised machine learning algorithms including nb, j48, svm and knn on bangla text and empirically compared their classification performance. due to high-dimensional input space, few irrelevant features and linearly separable nature of text dataset, svm performs better than other classification algorithm for text classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/263.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/263.txt new file mode 100644 index 0000000000000000000000000000000000000000..965abf4ff025b8d530f9177815255461db129e71 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/263.txt @@ -0,0 +1 @@ +machine learning is about prediction on unseen data or testing data. in machine learning a computer first learn to perform a task by training dataset. then the computer perform the same task with the testing data . in supervised learning we pass both input and output data and the result is already known. supervised learning is of two types classification based and regression based. in this paper we are using classification based supervised learning . knn is a simple algorithm that stores all available cases and classifies based on a similarity measures (e.g distance function) .the implementation of the model includes six basic steps of machine learning that are:1. collect data/prepare data 2. choose algorithm 3. creating object of the model 4. train the model by training dataset 5 .making prediction on unseen data or testing data 6. evaluation of the model. the dataset contain 150 samples of data that has 3 classes, each contain 50 samples. to train the machine we split the dataset into two parts training and testing dataset, then the machine will train by training dataset and then it will test on testing dataset. now we will evaluate the model weather it recognize the iris species accurately or not.machine learning is about prediction on unseen data or testing data. in machine learning a computer first learn to perform a task by training dataset. then the computer perform the same task with the testing data. in supervised learning we pass both input and output data and the result is already known. collect data/prepare data 2.making prediction on unseen data or testing data 6. the dataset contain 150 samples of data that has 3 classes, each contain 50 samples. to train the machine we split the dataset into two parts training and testing dataset, then the machine will train by training dataset and then it will test on testing dataset. so for that we collected/prepared data which involve data preprocessing and splitting of data. data preprocessing involve handling of missing data, handle of categorical data and handling of feature scaling. categorical data involves nominal data and ordinal data which can be handle by pandas as well as machine learning and for handling missing data and feature scaling we use pandas and machine learning respectively. splitting of dataset involves training data and testing data. we shuffle the data so that there is no any particular sequence in training as well as testing dataset. k-nearest neighbors is the simplest supervised machine learning algorithm that classifies a data point based on how its neighbors are classified .initially we have some data (training data), which classifies coordinates into groups identified by an attribute, and another set of data points (testing data) that is allocated by analyzing the training data set and the unclassified points are marked as white. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/264.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/264.txt new file mode 100644 index 0000000000000000000000000000000000000000..e0de66ce4020e9bb3403a106b5c5839c25aa9cf0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/264.txt @@ -0,0 +1 @@ +the adoption of e-commerce in modern times has led to higher profitability for merchants and brought more satisfaction to consumers . this has also impacted positively on the economy of countries worldwide by improving gross domestic product (gdp). the pricewaterhousecoopers (pwc) in south africa has reported that user online retail shopping sales surpassed a trillion rand (south africa currency) for the first time in history, which has increased to 1.46 trillion rand in 2016 . in addition, the same source reported that because of the global surge in e-commerce, the collective gdp of africa continent is expected to rise by us $1trillion by the year 2020, which is up from us$1.6 trillion in 2010. moreover, the economist intelligence unit (eiu) has forecasted a real gdp growth of 4.9% from 2012 to 2016 for the african continent, which is well above the average global growth . e-commerce is playing a key role in the global economic growth and the need to keep satisfying customers cannot be over emphasized. however, abundance of e-commerce information in recent years has become a serious challenge for shoppers, because of the inherent difficulty in information discovery. this has led to the emergence of recommendation systems to assist a user in the information discovery. a content-based image recommendation system is an application that uses image features to filter information from all the available sources and displays the appropriate information based on personal preferences that are kept in the user profile . the origin of recommendation systems can be traced to methods in cognitive science, management science, approximation theory and information retrieval, which have since been applied in various human endeavours.in the e-commerce application domain, many methods and principles have been applied to implement recommendation systems of which classification methods have been identified as an important component. product classification, which involves the association of classes with related products from a large number of merchants, is one of the most important processing tasks of content-based recommendation systems. apart from user profiling , classification methods have been found useful in many recommendation applications such as product image retrieval , product taxonomy browsing , increase scalability and improve overall recommendation accuracy.meanwhile, most of the accuracies reported on product classification studies rely heavily on text tagging, which is a traditional product data representation method . however, product classification based on text tagging is plagued with several hiccups such as overlapping text across product classes , labour intensiveness , discrepancy in vocabulary usage , spelling error and undescriptive nature of texts . while incremental improvement can be achieved by trying new methods that exploit textual features , current research efforts have shifted focus to image based product classification models with diverse applications in various fields of life . according to the pattern recognition theory, feature extraction and feature recognition methods play major roles in the classification process. as accuracy is one of the common assessments of performances, several researchers in the recommendation domain have carried out interesting studies to enhance the performance of product classification.in contrast to text tagging, image based product classification involves the use of images for product representation and classification model. the high dimensionality of extracting image features, limited content analysis, artefacts, inhomogeneity and other nuance factors often inhibit image-based classification performance . a significant volume of researches have been channelled towards this direction, nevertheless, the number of image-classes investigated and accuracies reported still leave much to be desired for real-time applications. in acknowledging the complexity of these problems, an initial attempt is made to solve a simpler sub-problem of image content-based recommendation by proposing enhanced product image classification architecture. among the core algorithms used in the proposed architecture is the ensembles of artificial neural network trained with the eigen-based colour features. the propose architecture is used to classify product images into 100 classes.specifically, effort in this work is channelled toward actualising an effective image-based classification model that majorly relies on an ensemble of artificial neural network (ann) and potential of efficient eigen-based image feature representation. the extraction of colour product features from a product image is performed using the histogram of oriented gradient (hog) and uniform linear binary pattern (ulbp) feature extraction methods. the eigenbased algorithm is then applied to extract a dimensionality reduced product image features. we applied data partitioning (cross validation) scheme that splits the initial data into training, validation and testing subsets in the proportion of 70%:15%: 15% (70% of the dataset is used for training the network, while 15% each of the remaining datasets is used for validation and testing), were experimentally explored on the pi100 categorization data set . we chose this database because it has been widely used in ecommerce research for product image classification and it is freely available for research purpose.the overarching objective of the study at hand is to generate an efficient image-based classification model that can be used to deliver an effective user-centric categorical preferences or be integrated with any conventional recommendation system to improve its quality. in realising this singular objective, four different experiments were performed to establish the appropriateness of the product image classification models for the proposed architecture. this paper makes three major contributions enunciated as follows:(1). propose an eigen-based product image feature extraction algorithm that delivers an effective product image representation for large product classes. the results of this method can serve as an image segmentation approach in e-commerce applications such as recommender systems to resolve inherent limited content analysis problem. (2). the final classification model obtained from this work can be easily integrated with any other decision supporting applications in e-commerce domain to improve its quality. (3). the proposed eigen-based product image feature extraction algorithm is evaluated quantitatively on experimental images acquired from publicly available product images using the standard accuracy and mean squared error metrics.this work further stresses the fact that an ensemble of machine learning classifiers gives better results than any classifier used in isolation. the rest of this paper is structured as follows. in section 2, relevant literature is discussed. section 3 describes in detail our proposed architecture as well as materials and methods. then we describe the experiments carried out to validate the performance of the various methods for the tasks of product image classification in section 4. in section 5 discussions of results are presented. the paper is finally concluded in section 6 by giving a brief discussion about the future directions. apart from user profiling, classification methods have been found useful in many recommendation applications such as product image retrieval, product taxonomy browsing, increase scalabilityand improve overall recommendation accuracy. while incremental improvement can be achieved by trying new methods that exploit textual features, current research efforts have shifted focus to image based product classification models with diverse applications in various fields of life.in contrast to text tagging, image based product classification involves the use of images for product representation and classification model. in acknowledging the complexity of these problems, an initial attempt is made to solve a simpler sub-problem of image content-based recommendation by proposing enhanced product image classification architecture. the extraction of colour product features from a product image is performed using the histogram of oriented gradient (hog) and uniform linear binary pattern (ulbp) feature extraction methods. propose an eigen-based product image feature extraction algorithm that delivers an effective product image representation for large product classes. (3). artificial neural network (ann) and support vector machine (svm) are two popular classifiers that have been applied to e-commerce product image classification and decision supporting tasks with some degree of success.to further improve image based product classification accuracy, many researchers have investigated the technique of combining the predictions of multiple classifiers which is known as ensemble learning.1% respectively. according to the authors, combining the classifiers improved the overall accuracy to 86. apparently, some of the foregoing research efforts have led to significant improvements in the performance of product classification methods. the goal of any supervised multiclass classification algorithm is to classify an unseen image i as belonging to one class out of all the existing k image classes using the feature vector obtained from the image i.in this paper, we proposed an enhanced colour image based product classification architecture, which comprises of a median filter, the newly developed eigen colour feature (chog-ecf and culbp-ecf) and artificial neural network ensemble for product classification in e-commerce domain. we have been able to show that it is possible to push low classification accuracy that characterizes existing product image classifiers to a useable level with ensemble strategy and dimensionally reduced colour features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/265.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/265.txt new file mode 100644 index 0000000000000000000000000000000000000000..a26e1630d87065a7ad1df05b1246a6bf5647d309 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/265.txt @@ -0,0 +1 @@ +keratoconus is a noninflammatory corneal disorder which often affects both eyes. keratoconus affects approximately 45 per 100,000 individuals in the us .in clinics, more advanced stages of keratoconus cases can usually be detected easily because of the manifestation of obvious signs, however, detecting early stage and suspect keratoconus cases is challenging due to unclear manifestation of disease, requiring a more comprehensive assessment of corneal characteristics , . keratoconus emerges in all races and genders.keratoconus involves the deformation of the cornea to a conical shape, followed by the thinning of the stroma. the thinned cornea determines the emergence of an uneven astigmatism which is often challenging to be managed and typically leads to the worsening of sight. as can be seen in figure 1, the keratoconus cornea may have a cone shaped structure with uneven thinning at some regions. progressive keratoconus may cause a gradual decline in vision and eventually impacts the quality of the patient's life.the distortion of the cornea results in irregular astigmatism along with myopia, leading to decreased visual accuracy. keratoconus usually develops during puberty and becomes stabilized in the fourth decade of life .the possible mechanisms, including the genetic and enzymatic ones, have been studied on a large scale; even so, the etiology of keratoconus currently remains unclear and the major factors that trigger the disease are largely unknown .keratoconus symptoms may vary partly due to the disease stage. when the symptoms are obvious, keratoconus can be simply diagnosed by an ophthalmologist. however, since the symptoms are not obvious in suspect keratoconus cases or those in the early stages of the disease, the diagnosis becomes challenging.in clinics, more advanced stages of keratoconus cases can usually be detected easily because of the manifestation of obvious signs, however, detecting early stage and suspect keratoconus cases is challenging due to unclear manifestation of disease, requiring a more comprehensive assessment of corneal characteristics,.however, developing such models is challenging since there is a significant variability in corneal parameters of normal eyes and eyes with early stage keratoconus.included in our results below, we provide a comprehensive summary of previous work on major machine learning models including multi-layer perceptron, support vector machine (svm), unsupervised machine learning algorithms, artificial neural networks, radial basis networks, convolutional neural networks and decision tree techniques that have been developed to detect keratoconus (figure2). the algorithm was built on these previous findings in order to target the development and validation of machine learning algorithms that identify early stage keratoconus at high accuracy levels using large scale multicenter datasets collected from multiple corneal clinics. to avoid confusing the machine learning classifiers with correlated or non-discriminative corneal parameters (features), we performed feature selection prior to applying machine learning models.83, respectively. we compared the accuracy of the machine learning models and selected a subset of corneal parameters to achieve the best performing model for keratoconus detection. these machine learning models can augment clinical practice and aid ophthalmologist to better identify keratoconus particularly those at higher risk of developing the disease or those currently at the early stages of the disease. most promising corneal parameters in detecting keratoconus were higher order irregular astigmatism, maximum keratometric power (in 10 mm region), best fit sphere (vertical axis), highest irregularity parameter (in 5 mm region), standard deviation of pachymetry (in 5 mm region), higher-order aberrations (in 4 mm region), and aberration parameters (in coma and sphere orders 5). we implemented all these models and ultimately selected an svm model, as the best performing learning scheme for detecting keratoconus from oct-based corneal parameters. the proposed model using only normal and suspect keratoconus eyes and only normal and keratoconus eyes and achieved an accuracy of 93% and 98. this reflects that the model is highly accurate in detecting keratoconus eyes as well as highly proficient in detecting suspect keratoconus eyes.in summary, we developed several machine learning models and performed feature selection to identify both best performing model and a subset of most promising corneal parameters in detecting both preclinical and clinical keratoconus. the selected machine learning model had a high accuracy in identifying both suspect and early stage keratoconus eyes. the automated keratoconus model we developed can augment clinical practice aid corneal specialists in identifying those at higher risk of developing keratoconus or at the early stage of the disease. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/266.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/266.txt new file mode 100644 index 0000000000000000000000000000000000000000..c21c4af7b688f477baf12d5eb7580a107b73d71c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/266.txt @@ -0,0 +1 @@ +today, very large amounts of information are available in on-line documents. as part of the effort to better organize this information for users, researchers have been actively investigating the problem of automatic text categorization.the bulk of such work has focused on topical categorization, attempting to sort documents according to their subject matter (e.g., sports vs. politics). however, recent years have seen rapid growth in on-line discussion groups and review sites (e.g., the new york times' books web page) where a crucial characteristic of the posted articles is their sentiment, or overall opinion towards the subject matter -for example, whether a product review is positive or negative. labeling these articles with their sentiment would provide succinct summaries to readers; indeed, these labels are part of the appeal and value-add of such sites as www.rottentomatoes.com, which both labels movie reviews that do not contain explicit rating indicators and normalizes the different rating schemes that individual reviewers use. sentiment classification would also be helpful in business intelligence applications (e.g. mindfuleye's lexant system 1 ) and recommender systems (e.g., terveen et al. (1997), tatemura (2000)), where user input and feedback could be quickly summarized; indeed, in general, free-form survey responses given in natural language format could be processed using sentiment categorization. moreover, there are also potential applications to message filtering; for example, one might be able to use sentiment information to recognize and discard "flames" (spertus, 1997).in this paper, we examine the effectiveness of applying machine learning techniques to the sentiment classification problem. a challenging aspect of this problem that seems to distinguish it from traditional topic-based classification is that while topics are often identifiable by keywords alone, sentiment can be expressed in a more subtle manner. for example, the sentence "how could anyone sit through this movie?" contains no single word that is obviously negative. (see section 7 for more examples). thus, sentiment seems to require more understanding than the usual topic-based classification. so, apart from presenting our results obtained via machine learning techniques, we also analyze the problem to gain a better understanding of how difficult it is.our aim in this work was to examine whether it suffices to treat sentiment classification simply as a special case of topic-based categorization (with the two "topics" being positive sentiment and negative sentiment), or whether special sentiment-categorization methods need to be developed.despite its simplicity and the fact that its conditional independence assumption clearly does not hold in real-world situations, naive bayes-based text categorization still tends to perform surprisingly well(lewis, 1998); indeed,domingos and pazzani (1997)show that naive bayes is optimal for certain problem classes with highly dependent features. because training maxent is expensive in the number of features, we limited consideration to (1) the 16165 unigrams appearing at least four times in our 1400document corpus (lower count cutoffs did not yield significantly different results), and (2) the 16165 bigrams occurring most often in the same data (the selected bigrams all occurred at least seven times). however, the definition of the maxent feature/class functions f i,c only reflects the presence or absence of a feature, rather than directly incorporating feature frequency. in order to investigate whether reliance on frequency information could account for the higher accuracies of naive bayes and svms, we binarized the document vectors, setting n i (d) to 1 if and only feature f i appears in d, and reran naive bayes and sv m light on these new vectors. note that bigrams and unigrams are surely not conditionally independent, meaning that the feature set they comprise violates naive bayes' conditional-independence assumptions; on the other hand, recall that this does not imply that naive bayes will necessarily do poorly(domingos and pazzani, 1997).line (3) of the results table shows that bigram information does not improve performance beyond that of unigram presence, although adding in the bigrams does not seriously impact the results, even for naive bayes. this would not rule out the possibility that bigram presence is as equally useful a feature as unigram presence; in fact,pedersen (2001)found that bigrams alone can be effective features for word sense disambiguation. 12 this serves as a crude form of word sense disambiguation(wilks and stevenson, 1998): for example, it would distinguish the different usages of "love" in "i love this movie" (indicating sentiment orientation) versus "this is a love story" (neutral with respect to sentiment). however, the effect of this information seems to be a wash: as depicted in line (5) of figure3, the accuracy improves slightly for naive bayes but declines for svms, and the performance of maxent is unchanged. indeed, line (7) shows that simply using the 2633 most frequent unigrams is a better choice, yielding performance comparable to that of using (the presence of) all 16165 (line (2)).position an additional intuition we had was that the position of a word in the text might make a difference: movie reviews, in particular, might begin with an overall sentiment statement, proceed with a plot discussion, and conclude by summarizing the author's views.on the other hand, we were not able to achieve accuracies on the sentiment classification problem comparable to those reported for standard topic-based categorization, despite the several different types of features we tried. unigram presence information turned out to be the most effective; in fact, none of the alternative features we employed provided consistently better performance once unigram presence was incorporated. but i loved it. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/267.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/267.txt new file mode 100644 index 0000000000000000000000000000000000000000..1454f08e67cfc614d2fea5d8cd53ec900bb0bf61 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/267.txt @@ -0,0 +1 @@ +android smartphone have become more and more popular since they has many uses and functionalities, where 2013 android grew to a very large number: 87% . also, the number of android malware is growing too, malware smartphone increase 26% in 2014 and 97% of smartphone malware is on android . in this paper we purpose to detect malware using machine learning and apis class feature in android operating system. this research uses some features to detect malware, there are 16 apis classes and 51 packages that used in this research. this feature is used by machine learning to decide benign and malware application, because more feature is better than few feature for output precision rate. there are three algorithms that used in this paper: svm, j48, and random forest because they generate excellent rate as reported by previous researches. also, the number of android malware is growing too, malware smartphone increase 26% in 2014 and 97% of smartphone malware is on android. in this paper we purpose to detect malware using machine learning and apis class feature in android operating system. this feature is used by machine learning to decide benign and malware application, because more feature is better than few feature for output precision rate.also some methods has been developed to detect malware using apis, kim et al.also proposed to get both information from manifest file and information of api calls combined with different components as feature set.this paper propose to detect malware using apis, where apis form malware and benign application or games will be classified by android software categories (see tableiiandroid software categories for more information). using of machine learning methods could detect malware quite effectively and efficiently, and machine learning methods can be used to create more effective antimalware software, capable of detecting even zero-day attacks. machine learning method will be affected by the algorithm, so we compare correlation for malware detection using machine learning (tablev). from tablev, we can see that support vector machine, tree j48, and tree random forest are algorithms which often give the best result in research related to malware detection, in this paper we use them to process dataset.implementation is performed by using 412 sample android apps with 205 benign applications and 207 malware applications (tableiv), using 16 apis class (tableiii) and 51 packages from that apis class.7% or 200 of 207 sample malware) use telephonymanager, where package can be used by malware to steal identity of smartphone, such as device id, serial number, phone number, etc.this apis package class data will be used by machine learning to see rate rating classification between benign and malware app. in this paper, we use three machine learning algorithms: support vector machine (svm), j48, and random forest, and use two test option: cross validation and percentage split. output machine learning is precision rate, where this rate provides a level of precision in the classification benign and malware app. summary in this paper, we present that detection malware using apis class and package can detect malware, and using machine learning with three algorithm: svm, j48, and random forest to get precision rate. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/268.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/268.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce15742b785917ee1fc54e75d6ad9244d9d412f4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/268.txt @@ -0,0 +1 @@ +the people communicate through language, whether written or spoken. they often use this language to describe the visual world around them. images, signs are another way of communication and understanding for the physically challenged people. the generation of descriptions from the image automatically in proper sentences is a very difficult and challenging task , but it can help and have a great impact on visually impaired people for better understanding of the description of images on the web. a good description of an image is often said for 'visualizing a picture in the mind'. the creation of an image in mind can play a significant role in sentence generation. also, human can describe the image after having a quick glance at it. the progress in achieving complex goals of human recognition will be done after studying existing natural image descriptions.this task of automatically generating captions and describing the image is signifi-cantly harder than image classification and object recognition. the description of an image must involve not only the objects in the image, but also relation between the objects with their attributes and activities shown in images . most of the work done in visual recognition previously has concentrated to label images with already fixed classes or categories leading to the large progress in this field. eventually, vocabularies of visual concepts which are closed, makes a suitable and simple model for assumption.these concepts appear widely limited after comparing them with the tremendous amount of thinking power which human possesses. however, the natural language like english should be used to express above semantic knowledge, that is, for visual understanding language model is necessary. in order to generate description from an image, most of the previous attempts have suggested to combine all the current solutions of the above problem. whereas, we will be designing a single model which takes an image as an input and is trained for producing a sequence of words where each word belongs to the dictionary that describes the image suitably as shown in fig. 1.the relation between visual importance and descriptions moves to the text summarization problem in natural language processing (nlp) . the important goal of text summarization is selecting or generating an abstract for document. in problem of image captioning, for any image we would like to generate a caption which will describe various features of that image .this paper proposes a model capable of generating novel descriptions from images. for this task, we have used flickr 8k dataset consisting of 8000 images and five descriptions per image. the fig. 2 illustrates the dataset structure in which an image is having five natural language captions. in this work, we are using cnn as well as rnn. pre-trained convolutional neural network (cnn) is used for the image classification task. this network acts as an image encoder. the last hidden layer is used as an input to recurrent neural network (rnn). this network is a decoder which generates sentences. sometimes, the generated sentence seems to lose track or predict wrong sentence than that of the original image content. this sentence is generated from description that is common in dataset and the sentence is weakly related to input image. this task of automatically generating captions and describing the image is signifi-cantly harder than image classification and object recognition. the description of an image must involve not only the objects in the image, but also relation between the objects with their attributes and activities shown in images. whereas, we will be designing a single model which takes an image as an input and is trained for producing a sequence of words where each word belongs to the dictionary that describes the image suitably as shown in fig. in problem of image captioning, for any image we would like to generate a caption which will describe various features of that image.have suggested multimodal recurrent neural network model which is used for description generation of an image. the pretrained cnn is used for image classification and the last layer of network is used as input to rnn decoder. in this approach, a neural model is designed which generates descriptions for image in natural language. the variable i l represents the input gate, f l represents the forget gate, o l represents the output gate in the lstm cell, c l represents state of memory cell unit and m l represents hidden state which is the output of the block generated after processing in lstm, x l represents a parameter of sequence at time step l and variable w [. applying this principle while generating the captions, the target is to maximize the amount of the image caption generated given an image, namely.where x i denotes an image, s 1:li represents group of words in properly formed sentence of length l i and θ represents model parameters.the image caption model has three sub models, first one is image model which repeats the image feature vector 28 times having dimension 28 x 4096 here 28 represents the maximum number of words in a caption. the second one is language model consisting of single lstm unit and outputs the matrix having dimension 28 x 256, 256 is the output size of lstm unit and the final model merge these two vectors and pass it to another lstm unit having output dimension 28 x 915. for training we pass same encoded text vector as target vector but while testing we just encode "sol" to feature vector along with test image feature vector and we get matrix of dimension 28 x 915 and we decode that matrix into sequence words.this work presents a model, which is a neural network that can automatically view an image and generate appropriate captions in natural language like english.• description without errors • description with minor errors • description somewhat related to image • description unrelated to image the categories in results are due to neighborhood of some particular words, i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/269.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/269.txt new file mode 100644 index 0000000000000000000000000000000000000000..87de51ef7374cc534a2c2dc6f3537ef82cae5e23 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/269.txt @@ -0,0 +1 @@ +observing progressive improvement in various fields of pattern recognition with end-to-end deep learning based methods (krizhevsky et al., 2012;girshick, 2015), self-driving researchers try to revolutionize autonomous car field with the help of end-to-end deep learning techniques (bojarski et al., 2016b;chen et al., 2015;. impressive results have been acquired by mapping camera images directly to driving control commands (bojarski et al., 2016b) with simple structure similar to ones for image classfication task (simonyan & zisserman, 2014). further researches were conducted to improve the performance of deep learning based autonomous driving system, for example, conditional imitation learning approach has been proposed to solve the ambigious action problem. however, two crutial problems failed to be spotted: (1) poor generalization ability of unobserved driving environment given limited diversity of training scenerios. for example, though addressed the driving direction selection problem, it showed poor generalization ability in unseen test town which has different map and building structure than training town's. this generalization problem is extremely important since collected driving dataset always has limitation of diversity (2) current end-to-end autonomous approaches lack of accident explanation ability when these models behave unexpectedly. although saliency map based visualization methods (smilkov et al., 2017;sundararajan et al., 2017;springenberg et al., 2014;bojarski et al., 2016a) have been proposed to dig into the 'black box', the only information these methods could bring is the possible attention of the model instead of the perception process of the model.we proposed a new driving approach to solve the two aforementioned problems by using multi-task basic perception knowledge. we argue that when end-to-end model is trained to address a specific difficult task, it's better to train the model with some basic knowledge to solve relevant easier tasks before (pan et al., 2010). an analogy for this can be observed when human beings learn a difficult knowledge. for example, to solve a complex integration problem, compared with students without basic math knowledge, students who know about basic knowledge of math are able to learn the core of intergration more quickly and solve other similar integration problems instead of memorizing the solution of the specific problem.our proposed model consists of two modules: perception module and driving module as in fig. 1. the perception module is used for learning easier driving-related perception knowledge, which we refer as ability of pixel level understanding of input including what & where and how far knowledge. we trained perception module with segmentation map and depth map first, while the former serves as what & where knowledge and the latter serves as how far knowledge. by visualizing inferenced segmentation and depth results whether perception process works well or not could be inferred. after the perception module was trained to have ability of pixel level understanding of its image input, we freezed the perception module weights and trained driving module with driving dataset. this decomposition of end-to-end driving network strucuture is considered to be mediated perception approach (ullman, 1980). with our proposed driving structure and stepwise training strategy, the generalization and accident explanation problems were addressed to a certain extent. after the perception module was trained to have ability of pixel level understanding of its image input, we freezed the perception module weights and trained driving module with driving dataset.one problem is that aforementioned methods were tested in dissimlar driving scenerios using different driving dataset, thus it's hard to determine if model itself is the source of the better driving behavior instead of effectiveness of data(sun et al. though saliency-map based visualization methods(bojarski et al. the perception module is aimed at extracting encoded feature map containing pixel level understanding information for driving module and qualitative explanation when proposed model doesn't work as expected by visualizing the predicted segmentation and depth maps to determine if the driving problem is caused by percept process or driving process. for each driving guidance direction, there is a driving branch(which predicts the value of driving controls) corresponding to it, therefore there are four driving guidance branches totally. the output of second last layer in perception module is inputted to the driving module, therefore the training of which could benefit from the multi-knowledge extracted by the perception module.dataset for training our model could be categorized into 2 items: (1) perception module training dataset (2) driving module training dataset. for fair comparison, we use same driving dataset published by conditional imitation learning ) except that we collected extra segmentation and depth maps in train town for training our proposed perception module.we trained the whole system using a step-wise training method which is firstly we trained the perception module with multi-task basic perception knowledge, then we freezed the weights of perception module and train driving module with driving dataset.since we observed our training model has better generalization ability in unseen town comparing with other methods when almost the same driving dataset were used to train (except that we collected extra depth maps and segmentation maps in same training environments), we want to investigate the origin of the better generalization ability.• our original proposal: firstly train perception module with basic knowledge, after training perception module, freeze its weights and train driving module with driving dataset • compared method: train the encoder of perception module and driving module together with driving dataset. basic knowledge of segmentation map and depth map are output from the perception module during test phase, therefore how the driving module percepts the current scenario could be known by simply visualizing the outputs of segmentation and depth from perception module. in this comparison we achieved counter-intuition results: after fine-tune the weights of the perception module the driving model achieved worse results than original method which freeze the weights of perception module when training the driving module.one possible reason is that the generalization ability lies in the perception module instead of the driving module, therefore when we train the perception module again with driving dataset, the ability of generating compressed multi-knowdege information is destoryed. one interesting result we acquired by comparing different train strategies is that the generalization ability of driving origins from basic knowledge and lies in weights of the perception module which should not be modified during training with driving dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/27.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/27.txt new file mode 100644 index 0000000000000000000000000000000000000000..55e6cc9bd031542cd1f80d3585220069be03d9cb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/27.txt @@ -0,0 +1 @@ +graphs are a great data type that can perfectly model entities and their interactions. therefore, they are everywhere in our daily life, such as social networks and natural science. nowadays, graph neural networks (gnns) have been drawing increasing research attention because of their great success in various applications . however, in many real-world scenarios, such as chemistry and health care, it could be very expensive to collect a sufficient amount of labeled data to facilitate the training of gnns, which limits the performance of gnns.active learning is a framework to resolve this challenge, which allows dynamic query of new node labels from unlabeled node sets given a limited budget, and has been shown to be very helpful in various learning problems . al algorithm generally consists of two main components: a query system to select the most valuable instances for the downstream task, and an oracle to label the selected sample. in recent years, several frameworks like proposes al algorithms for graphs in order to handle the unique challenge when applying active learning to graph data: graph data is not independent and identically distributed, which requests us to consider how to incorporate graph structure into the query system design to capture the correlation among different nodes.however, existing graph active learning query functions only use properties of the unlabeled nodes to determine the next label to be queried . in many real-world data sets, however, we often do not have control over the distribution of the available labeled set of nodes. the available labeled node set could be biased (e.g. most of them belong to a single class) or isolated (e.g. all of them are in a single isolated sub-graph). if we need to query a new label, the label which is not as valuable given an unbiased labeled set could turn out to be very valuable given a biased labeled set. hence, designing label query functions that consider the influence of the current labeled set on the value of candidates for label queries will be promising in selecting the most valuable labels.in this work, inspired by the idea in that a more diverse training set improves the performance of active learning, we propose three novel active learning scores for graphs based on node dissimilarity: feature dissimilarity score (fds), structure dissimilarity score (sds), and embedding dissimilarity score (eds). those scores directly evaluate how dissimilar a candidate node for label queries is with respect to existing labeled nodes. we conduct extensive experiments to demonstrate that those scores improve the performance of node classification tasks of gcn by about 2.1% when added to the other conventionally-used active learning scores. meanwhile, we also conduct some ablation studies where we replace gcn backbone with other well-known graph neural network variants such as gat and sgc . results show that our methods are generalizable to those gnn variants.active learning is a framework to resolve this challenge, which allows dynamic query of new node labels from unlabeled node sets given a limited budget, and has been shown to be very helpful in various learning problems. in recent years, several frameworks likeproposes al algorithms for graphs in order to handle the unique challenge when applying active learning to graph data: graph data is not independent and identically distributed, which requests us to consider how to incorporate graph structure into the query system design to capture the correlation among different nodes.in this work, inspired by the idea inthat a more diverse training set improves the performance of active learning, we propose three novel active learning scores for graphs based on node dissimilarity: feature dissimilarity score (fds), structure dissimilarity score (sds), and embedding dissimilarity score (eds). this work shows that uncertainty-based query scores need to be combined with other metrics to select the most valuable nodes at the early training stage, since the model could not provide sufficient probability information at the early training stage. we find out that, by directly comparing the labeled node set and the unlabeled node set, we are able to involve the influence of the labeled set on the value of unlabeled candidates into consideration, boosting our al performance. given an attributed graph g = (v, e) with node set v and edge set e, v would be the union set of the labeled node set l 0 and unlabeled node set u 0 at epoch 0. at every training epoch t, if the labeling budget b is not reached, the al query strategy module selects the best candidate in the unlabeled nodes set at the current epoch (denoted as ut) with the query strategy, queries its label with the oracle, and adds it into the labeled nodes set at current epoch.inspired by the well-recognized idea in active learning that a more diverse training set can benefit the performance, we propose three node dissimilarity-based metrics to augment the query strategy and improve the active graph embedding system.feature dissimilarity score (fds): to evaluate how dissimilar a candidate node is compared to the nodes that have already been labeled in terms of their node feature, we propose fds, which applies the commonly used cosine similarity for feature distance measurement. on a high level, the time-sensitive hyper-parameter set increases the weight of φ centrality (v i ) at the early training stage because we want to collect more representative nodes from the graph, and decreases the weight of φ centrality (v i ) at the later training stage since we want to include more dissimilar nodes to our existing labeled set.• density score and embedding dissimilarity score: the effectiveness of the density score will not be affected if we change the assumption of homophily.• feature dissimilarity score: the effectiveness of fds will be severely affected by changing the assumption of homophily, since fds is a score that completely neglects the structural information of a graph. while this is not a significant problem in homophilic graphs where nodes in different sub-graph clusters usually have different features, in heterophilic graphs, neglecting the structural information might lead to node selection within a single sub-graph cluster.• structural dissimilarity score: the effectiveness of sds will also be severely affected by changing the assumption of homophily, since the assumption "two nodes are similar if they share the same neighbors" will break under heterophilic setting. it is also notable that none of those scores performs better than gcc, which means that there is still room for performance improvement by finding specifically targeted types of active learning score functions for heterophilic graphs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/270.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/270.txt new file mode 100644 index 0000000000000000000000000000000000000000..f2b5b95f90cbc60d7c862d6ce063c1083331b5ba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/270.txt @@ -0,0 +1 @@ +as the technology is advancing, devices are generating large amount of data every. there is a global outburst in the availability of data for researchers. the complexity, huge size and heterogeneity of data require one to search, discover and adopt new software tools and mechanisms in order to successfully manage, analyse, and visualize the data . in , author have obtained results from google scholar for the term "big data" from year 2008-2015. these results shows how this field is evolved through years and the increasing rate of publications in the field of big data. this exponential growth in the field of big data started from 2012 and still this area of research is attracting more and more researchers. a report by mckinsey states that 50% of americans are the victim of one or more chronic diseases, and they spend around 80% of american medical care fee on treatment of these chronic diseases . around 2.7 trillion usd are being spent on the treatment of those chronic diseases annually. this amount of 2.7 trillion usd contains 18% of the annual gross domestic product (gdp) of the united states. many other countries are also suffering from these chronic diseases. according to a chinese report published in 2015 86.6% of deaths are caused by these chronic diseases in china . considering the annual growth of data generation, by 2020 data we generate annually will reach 44 trillion gigabytes which is ten times the size of the data generated in 2013 . big data in healthcare industry refers to electronic health datasets so large and complex for traditional software tools to process. healthcare analytics refers to the systematic use of these healthcare datasets for business insights, decision making, planning, learning, early prediction and detection of diseases by using different statistical, predictive and quantitative models and techniques. figure 1, shows the fast increase in the number of publications referring to "predictive analytics in healthcare" from year 2005 to 2017. healthcare analytics needs a technology that helps to perform a real time analysis on the massive dataset. in healthcare industry the application of predictive analytics are significantly high. predictions can be made about patients, which patients, areas or geographic will be affected by some disease. due to these applications in healthcare industry predictive analytics have received a huge amount of interest from researchers in past few years. recent developments in machine learning has enhanced radically the capability of computers to identify and label images, identify and translate speech, play games which involves skills and higher iq, prediction of diseases and improved decision making over data. in these applications of machine learning, the objective is usually to train a computer to do as humans or better than a human . traditionally supervised learning algorithms are used for training the model with labelled data and then testing data is used for evaluation using testing data . in recent years, many researchers around the globe worked in big data analytics and predictive analytics in healthcare and other domains, to predict or forecast about the future challenges and opportunities. taxonomy of big data and analytics is presented in figure 2. this taxonomy is adopted from and extended in this work. there are different big data sources from where data is coming, then different components and big analytics technologies are given. in this paper we will focus on machine learning for predictive analytics. research work by different authors is studied as a basis for our research and understanding. in this regard few research papers are discussed below.in , pisapia et al. used image analysis and machine learning for the prediction of hydrocephalus. they used the cerebral ventriculomegaly and extracted 77 imaging features. machine learning algorithm support vector machines was applied on the ventricular features of 25 children. the question was who needed shunts and who did not. results were obtained and compared. results shows that every 3 out of 4 children need shunts with 75% sensitivity and 95% specificity. a new fuzzy rulebased classifier is proposed in . algorithms are designed on the basis of expectation-maximization and fuzzy-rule base classifier for applying analytics and cluster formation. proposed scheme is compared with existing schemes and results were analysed on the basis of accuracy, response time, false positive rate and computation cost. results show that proposed technique performs better than bayes network, multi-layer and decision tables. in , authors predicated the diabetes types, complications and type of treatment which can be given to patients. predictive analysis algorithm and hadoop map reduce was used for the prediction and the treatment types. large data set gathered from different laboratories, clinics, ehr and phr processed in hadoop, final results then distributed over different servers according to the geographical locations. jiang zheng and aldo dagnino in presented a comprehensive survey of literature over big data and analytics. the focus of the authors were to apply machine learning algorithms on industrial power systems and applications for the prediction of faults and power load. in , a healthcare prediction system based on naïve bayes algorithm is presented. proposed system discovers and extracts hidden data related to different diseases from disease database. this system allows users to share their health related problems and then using naive bayes predict the correct illness. for better prediction of heart diseases in frequent chronic disease outbreak communities, authors streamline the machine learning algorithms in . authors proposed a new convolutional neural network based multimodal disease risk prediction algorithm. for evaluation of proposed algorithm real life hospital data was collected from central china for the period of 2013-2015. experiment was done on chronic disease of cerebral infraction. experiments results shows that for structured data naive bayes performs better and for structured and text when combined proposed algorithm, performs better. a proof of concept study is presented in . because of the clinical importance of sepsis, authors used sepsis mortality as the prediction use case. data was acquired from four emergency departments for a period of 12 months. processing and clustering of data was done using k-mean clustering, random forest technique was used for prediction. logistic regression model and cart were used as traditional model of prediction in emergency care. results shows that random forest predict more accurate results as compared to other models. das et al. studied the cases of dengue and malaria in delhi, india and performed predictive analysis on the data in . simi et al. explored the importance of early detection of female infertility in . authors in their research work used 26 variables and 8 classes of female infertility, results identified that random forest technique outperformed other techniques and provide 88% accuracy. lafta et al. proposed an intelligent recommender system that assists the patients and practitioners about the short term risk assessment of heart failures in . authors proposed a heart disease prediction model, according to the results the system also provides recommendations to the patients that about the need of taking some test and visiting a doctor. the main component of recommendation system is based on time series data analysis algorithm. for evolution of the purposed system real life data was used. authors conducted a pilot study on group of heart failure patients and gathered data using daily medical readings. there were 7147 records of patient data conducted for the period of may to november 2012. according to the results recommendation accuracy of the proposed system ranges between 75% and 100%. the dataset used contains only few patients and the data and readings taken were only numerical values. author focused on diabetes mellitus, a diseases in which body cannot retain level of maltose in blood . system performs prediction in hadoop/map reduce environment using different algorithms. proposed system predicts the type of diabetic mellitus a person may have. for prediction of patient's status as non ckd or ckd, machine learning classification algorithms were used in . a dataset consisting 400 data records, 25 attributes was taken from uci repository. authors used the reduced dataset consisting 14 attributes related to ckd. using microsoft azure machine learning studio, different machine learning algorithms were applied on dataset. results verify that multiclass decision forest algorithm performs better and provides 99.1% accuracy. in , authors focused on the predicting the survivability of patients with breast cancer. dataset consisting data of more than 683 cases was obtained from the uci machine learning repository. there were 26 variables related to the disease in each dataset record. authors used five machine learning algorithms on the dataset. according to the results support vector machine performed better than other algorithms and provided 97% accuracy. telemonitoring data for prediction of asthma exacerbations before their occurrence using machine learning algorithms in . 7001 records submitted by asthma patients was used for training and testing of algorithm. adaptive bayesian network, naive bayesian classifier and support vector machines, three algorithms were used. the study showed that machine learning techniques have significant potential in predicting asthma exacerbations over tele-monitoring data. the next section will describe dataset and attributes we used for disease predictive analytics. considering the annual growth of data generation, by 2020 data we generate annually will reach 44 trillion gigabytes which is ten times the size of the data generated in 2013. healthcare analytics refers to the systematic use of these healthcare datasets for business insights, decision making, planning, learning, early prediction and detection of diseases by using different statistical, predictive and quantitative models and techniques. recent developments in machine learning has enhanced radically the capability of computers to identify and label images, identify and translate speech, play games which involves skills and higher iq, prediction of diseases and improved decision making over data. traditionally supervised learning algorithms are used for training the model with labelled data and then testing data is used for evaluation using testing data. there are different big data sources from where data is coming, then different components and big analytics technologies are given. experiment was done on chronic disease of cerebral infraction. proposed system predicts the type of diabetic mellitus a person may have. dataset consisting data of more than 683 cases was obtained from the uci machine learning repository.data mining is one of the major and important technology that is currently being used in the industry for performing data analysis and gaining insight into the data. data mining uses different data mining techniques such as artificial intelligence, machine learning and statistical analysis. it is used mostly for predictive analytics as it builds a model from data, this data also includes the outcomes or responses. as this research work evaluates the performance of machine learning algorithms for predictive analytics in healthcare, supervised learning is used in this research work. in next step, data was divided into two sets which are training data and testing data. once the model trains itself using training data, testing data was used for predicting the responses and checking the accuracy, and lastly the model was evaluated. data was divided into two portions, training data and testing data, both these portions consisting 70% and 30% data respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/271.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/271.txt new file mode 100644 index 0000000000000000000000000000000000000000..53e01bfa608d09aa8f6a5e55ba509eab71abef3b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/271.txt @@ -0,0 +1 @@ +deep neural networks (dnns) are extremely powerful machine learning models that achieve excellent performance on difficult problems such as speech recognition and visual object recognition . dnns are powerful because they can perform arbitrary parallel computation for a modest number of steps. a surprising example of the power of dnns is their ability to sort n n -bit numbers using only 2 hidden layers of quadratic size . so, while neural networks are related to conventional statistical models, they learn an intricate computation. furthermore, large dnns can be trained with supervised backpropagation whenever the labeled training set has enough information to specify the network's parameters. thus, if there exists a parameter setting of a large dnn that achieves good results (for example, because humans can solve the task very rapidly), supervised backpropagation will find these parameters and solve the problem.despite their flexibility and power, dnns can only be applied to problems whose inputs and targets can be sensibly encoded with vectors of fixed dimensionality. it is a significant limitation, since many important problems are best expressed with sequences whose lengths are not known a-priori. for example, speech recognition and machine translation are sequential problems. likewise, question answering can also be seen as mapping a sequence of words representing the question to a sequence of words representing the answer. it is therefore clear that a domain-independent method that learns to map sequences to sequences would be useful.sequences pose a challenge for dnns because they require that the dimensionality of the inputs and outputs is known and fixed. in this paper, we show that a straightforward application of the long short-term memory (lstm) architecture can solve general sequence to sequence problems. the idea is to use one lstm to read the input sequence, one timestep at a time, to obtain large fixeddimensional vector representation, and then to use another lstm to extract the output sequence from that vector (fig. 1). the second lstm is essentially a recurrent neural network language model except that it is conditioned on the input sequence. the lstm's ability to successfully learn on data with long range temporal dependencies makes it a natural choice for this application due to the considerable time lag between the inputs and their corresponding outputs (fig. 1).there have been a number of related attempts to address the general sequence to sequence learning problem with neural networks. our approach is closely related to kalchbrenner and blunsom who were the first to map the entire input sentence to vector, and is related to cho et al. although the latter was used only for rescoring hypotheses produced by a phrase-based system. graves introduced a novel differentiable attention mechanism that allows neural networks to focus on different parts of their input, and an elegant variant of this idea was successfully applied to machine translation by bahdanau et al. . the connectionist sequence classification is another popular technique for mapping sequences to sequences with neural networks, but it assumes a monotonic alignment between the inputs and the outputs .figure 1: our model reads an input sentence "abc" and produces "wxyz" as the output sentence. the model stops making predictions after outputting the end-of-sentence token. note that the lstm reads the input sentence in reverse, because doing so introduces many short term dependencies in the data that make the optimization problem much easier.the main result of this work is the following. on the wmt'14 english to french translation task, we obtained a bleu score of 34.81 by directly extracting translations from an ensemble of 5 deep lstms (with 384m parameters and 8,000 dimensional state each) using a simple left-to-right beamsearch decoder. this is by far the best result achieved by direct translation with large neural networks. for comparison, the bleu score of an smt baseline on this dataset is 33.30 . the 34.81 bleu score was achieved by an lstm with a vocabulary of 80k words, so the score was penalized whenever the reference translation contained a word not covered by these 80k. this result shows that a relatively unoptimized small-vocabulary neural network architecture which has much room for improvement outperforms a phrase-based smt system.finally, we used the lstm to rescore the publicly available 1000-best lists of the smt baseline on the same task . by doing so, we obtained a bleu score of 36.5, which improves the baseline by 3.2 bleu points and is close to the previous best published result on this task (which is 37.0 ). surprisingly, the lstm did not suffer on very long sentences, despite the recent experience of other researchers with related architectures . we were able to do well on long sentences because we reversed the order of words in the source sentence but not the target sentences in the training and test set. by doing so, we introduced many short term dependencies that made the optimization problem much simpler (see sec. 2 and 3.3). as a result, sgd could learn lstms that had no trouble with long sentences. the simple trick of reversing the words in the source sentence is one of the key technical contributions of this work.a useful property of the lstm is that it learns to map an input sentence of variable length into a fixed-dimensional vector representation. given that translations tend to be paraphrases of the source sentences, the translation objective encourages the lstm to find sentence representations that capture their meaning, as sentences with similar meanings are close to each other while different sentences meanings will be far. a qualitative evaluation supports this claim, showing that our model is aware of word order and is fairly invariant to the active and passive voice. in this paper, we show that a straightforward application of the long short-term memory (lstm) architecturecan solve general sequence to sequence problems. the idea is to use one lstm to read the input sequence, one timestep at a time, to obtain large fixeddimensional vector representation, and then to use another lstm to extract the output sequence from that vector (fig. note that the lstm reads the input sentence in reverse, because doing so introduces many short term dependencies in the data that make the optimization problem much easier. surprisingly, the lstm did not suffer on very long sentences, despite the recent experience of other researchers with related architectures.a useful property of the lstm is that it learns to map an input sentence of variable length into a fixed-dimensional vector representation. given that translations tend to be paraphrases of the source sentences, the translation objective encourages the lstm to find sentence representations that capture their meaning, as sentences with similar meanings are close to each other while different sentences meanings will be far.the simplest strategy for general sequence learning is to map the input sequence to a fixed-sized vector using one rnn, and then to map the vector to the target sequence with another rnn (this approach has also been taken by cho et al. first, we used two different lstms: one for the input sequence and another for the output sequence, because doing so increases the number model parameters at negligible computational cost and makes it natural to train the lstm on multiple language pairs simultaneously. so for example, instead of mapping the sentence a, b, c to the sentence α, β, γ, the lstm is asked to map c, b, a to α, β, γ, where α, β, γ is the translation of a, b, c.while the lstm is capable of solving problems with long term dependencies, we discovered that the lstm learns much better when the source sentences are reversed (the target sentences are not reversed). normally, when we concatenate a source sentence with a target sentence, each word in the source sentence is far from its corresponding word in the target sentence. however, lstms trained on reversed source sentences did much better on long sentences than lstms trained on the raw source sentences (see sec. while the decoded translations of the lstm ensemble do not outperform the best wmt'14 system, it is the first time that a pure neural translation system outperforms a phrase-based smt baseline on a large scale mt method test bleu score (ntst14) baseline system33.our work is closely related to kalchbrenner and blunsom, who were the first to map the input sentence into a vector and then back to a sentence, although they map sentences to vectors using convolutional neural networks, which lose the ordering of the words. we were initially convinced that the lstm would fail on long sentences due to its limited memory, and other researchers reported poor performance on long sentences with a model similar to ours. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/272.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/272.txt new file mode 100644 index 0000000000000000000000000000000000000000..1d00e0a3d67d11252ed68a3e7df5d8d1d1dd0027 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/272.txt @@ -0,0 +1 @@ +unlike simple object classification such as distinguishing cats from dogs, flower recognition and classification is a challenging task due to the wide range of flower classes that share similar features: several flowers from different types share similar colour, shape and appearance. furthermore, images of different flowers usually contain similar surrounding objects such as leaves, grass etc. there are more than 250,000 known species of flowering plants classified into about 350 families . a wide range of various applications including content-based image retrieval for flower representation and indexing , plants monitoring systems, floriculture industry , live plant identification and educational resources on flower taxonomy depend on successful flower classification. manual classification is possible but time consuming and tedious to use with a large number of images and potentially erroneous in some flower classes especially when the image background is complex. thus, robust techniques of flower segmentation, detection and classification have great value.conventional flower classification techniques use a combination of features extracted from the flower images with the aim of improving classification performance . colour, texture, shape and some statistical information are among the main sources of features that are widely used to identify the different flower species . some methods rely on human interaction to further enhance the classification results . in addition, support vector machines (svms) are among the most commonly used types of classifiers . many flower classification techniques rely on learning their features from a segmented flower region to improve accuracy .hand-crafted traditional discriminative features that can be used in a classification task such as histogram of oriented gradients (hogs), scale-invariant feature transform (sift), speeded up robust features etc. cannot be easily applied to the flower classification problem due to the problem complexity as well as the numerous flower classes. in addition, the robustness of a flower classification technique applied to one flower dataset is not guaranteed on a different flower dataset. this is mainly because conventional methods rely heavily on specific hand-made features, which might not be generalisable to other flower images or similar flower images with different conditions such as change of lightning, flower pose or variation of surrounding objects.deep learning techniques, especially convolutional neural networks (cnns), have recently gained wide interest due to superior accuracy compared with classical machine learning methods, which rely on hand-crafted features. in addition, the advance of hardware capabilities, particularly with the use of graphics processing units (gpus), sped up the processing time of deep learning techniques significantly .in this work, we show how we utilise recent development of deep learning methods such as cnn alongside the existence of reasonable size flower datasets to tackle the flower classification task robustly. our automatic method detects the region around the flower in an image, and then uses the cropped images to learn a strong cnn classifier to distinguish different flower classes. the detection is performed by finding the minimum bounding box around an automatically segmented flower. the segmentation is achieved as a binary classification task within a fully convolutional network (fcn) framework. our robust method is evaluated on different known flower datasets and results show that the proposed technique achieves at least 97% classification accuracy (ca) on all datasets.the rest of this paper is organised as follows: in section 2 we present the background and related work. section 3 presents the proposed method. the experimental setup is described in section 4, followed by results and comparisons in section 5. we then conclude our work in section 6.unlike simple object classification such as distinguishing cats from dogs, flower recognition and classification is a challenging task due to the wide range of flower classes that share similar features: several flowers from different types share similar colour, shape and appearance. a wide range of various applications including content-based image retrieval for flower representation and indexing, plants monitoring systems, floriculture industry, live plant identification and educational resources on flower taxonomydepend on successful flower classification.conventional flower classification techniques use a combination of features extracted from the flower images with the aim of improving classification performance. many flower classification techniques rely on learning their features from a segmented flower region to improve accuracy. in addition, the robustness of a flower classification technique applied to one flower dataset is not guaranteed on a different flower dataset. this is mainly because conventional methods rely heavily on specific hand-made features, which might not be generalisable to other flower images or similar flower images with different conditions such as change of lightning, flower pose or variation of surrounding objects.in this work, we show how we utilise recent development of deep learning methods such as cnn alongside the existence of reasonable size flower datasets to tackle the flower classification task robustly. our automatic method detects the region around the flower in an image, and then uses the cropped images to learn a strong cnn classifier to distinguish different flower classes. our robust method is evaluated on different known flower datasets and results show that the proposed technique achieves at least 97% classification accuracy (ca) on all datasets. in, they used the concept of bi-level co-segmentation (bicos) and bicos-multi-task (bicos-mt) in an svm classifier, whereas inthey used tri-level cos (tricos) to tackle the flower segmentation and classification; an svm model was used with sift, lab colour model, principal component analysis, fisher vector (fv) and gaussian mixture model (gmm).other approaches in flower classification have been proposed such as pairwise rotation invariant co-occurrence local binary pattern (pricolbp); metric forests with gmm; generalised max pooling (gmp) with fv and power normalisation; visual adjectives (vas) with sift and improved fv; saliency driven image multi-scale nonlinear diffusion filtering; heterogeneous co-occurrence features; generalised hierarchical matching (ghm) with saliency map (locsaliency); contextual exemplar classifier (cec); fisher discrimination dictionary learning (fddl) with frequent local histograms (flhs); gridspecific bag-of-flh (grid-flh); colour attention-based bag-of-words; harr-like transformation of local features; and graph-regularised robust late fusion (grlf). since our classification method is not sensitive to very accurate segmentation because it relies on a rough detection of the flower region, we evaluated the accuracy of flower detection.to demonstrate the effect of the flower detection step on the accuracy of the proposed method, we report the result of flower classification using cnn with no segmentation step in the tables. segmentation and detection of the minimal flower region allow for a more accurate classification because it allows the classification cnn to focus on the region of interest while excluding nondiscriminative regions. finally, although we show the applicability of the proposed method on the flower classification problem, our method can be applied to other applications, which share similar challenges with flower classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/273.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/273.txt new file mode 100644 index 0000000000000000000000000000000000000000..613401c01514ccb58a1e20d57c541ca07399e2a5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/273.txt @@ -0,0 +1 @@ +it is observed that amongst most affected diseases, lower spine problems ranks first and its affection rate is higher than human immuno deficiency virus, road accidents, tuberculosis, and lung cancer as per in global burden of disease study 2010 . the back pain from lower lumbar can be caused by any body part such as connected network of spinal cord, nerves, bones, discs or tendons in the lumbar spine. typical circumstances of lbp include: a) the irritation in the joining nerves from spine nerve to legs b) small nerves that supply the lower back may be irritated c) spine muscle might be strained d) the damage in bones, ligaments or joints may be one of the reason . these different reasons behind lbp leads to various types of diseases like, spondylolysis, spondylolisthesis, stenosis, herniated disc etc. varieties of reasons behind lbp make the identification of them difficult which leads to a gap management of it, and this gap make the lbp chronic. so it is very much important to identify the cause of lbp in the early stage which will prevent to become it chronic.it is observed that amongst most affected diseases, lower spine problems ranks first and its affection rate is higher than human immuno deficiency virus, road accidents, tuberculosis, and lung cancer as per in global burden of disease study 2010. the back pain from lower lumbar can be caused by any body part such as connected network of spinal cord, nerves, bones, discs or tendons in the lumbar spine. typical circumstances of lbp include: a) the irritation in the joining nerves from spine nerve to legs b) small nerves that supply the lower back may be irritated c) spine muscle might be strained d) the damage in bones, ligaments or joints may be one of the reason.an expert system is an application of artificial intelligence (ai) in which domain knowledge is feed into the system, interpreted by knowledge engineer and is inferred to an answer regarding the query in that domain.in the medical field, an es is playing various roles amongst which, diagnosing and classification are significant, when it is implied with artificial neural network, it gives best results in many cases like, cardiovascular diseases, diabetes, cancer.a decision support system is developed for clinicians to diagnose lower back pain, which is based on knowledge base. sullivan has compared different models available for classifying chronic lower back pain (clbp) and he made an observation that they would not be sufficient, as 85% of clbp disorders are still not labelled, as these group of disorders are due to 'tissue strains' and 'sprains'.an expert system (es) is one of the application areas of artificial intelligence, which have expert knowledge of particular domain and it use this knowledge to respond properly -here domain refers to the area of interest in which the task is being performed.here an es is designed to classify the spine condition whether is normal or abnormal from the standard lower back pain dataset having twelve numeric attributes. svm finds the hyperplane that separates input elements with maximal margin in the ndimensional space by mapping training vectors product known as kernel function.it the kernel function is a dot product of variables in the input space and maps it to the output space.it the kernel function is a dot product of variables in the input space and maps it to the output space.the svm algorithm is implemented on these dataset and for the different kernel functions and accuracy is compared for the classification of lower back pain. after that newly designed weighted kernel function is implemented for the same dataset and it is observed that, the attributes, sacral_slope, pelvic_radius, degree_spondylolisthesis, pelvic_slope affects most on classification of lower back pain more accurately, as shown in fig. the designed weighed kernel is use to identify the attributes that most affects the classification of spine condition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/274.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/274.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb2eb3fca87551cbb7b42e242f31ec6f9c8510e1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/274.txt @@ -0,0 +1 @@ +most of the experienced users know to detect the phishing site but due to the speed of life, user ignores the uncertainty in the url's and hence leading to leaking the sensitive information of the particular user in a phishing site. on the other side most of the users are not aware of the detail knowledge about the url's and they tend to fall in the trap of phishers site. phishing attack is majorly increased while the technology is increasing. the first phishing attack in the world is aol (american online) in early 1990's. ii. types of phishing spear phising: spear phishing could be a direct attack on the corporate or a selected website. this principally steals the fundamental information of the actual company's users. it is customized attack on specific employee or a group or a company. clone phıshıng: the phisher as to make a nearly identical replication of the legitimate website or message which can easily trick the victim and which eventually falls into the trap lead by phishers.whalıng: whaling could be a common cyber-attack that happens once associate degree aggressor utilizes spear phishing ways to travel when an oversized, high-profile target.most of the experienced users know to detect the phishing site but due to the speed of life, user ignores the uncertainty in the url's and hence leading to leaking the sensitive information of the particular user in a phishing site.ii.figure1classification diagram of phishing and its detecting techniques drive-by-download: it's a delivery technique that inserts a machine smidgeon some harmful virus or shell code by simply visiting the web site or seeing associate markup language.software grouping approaches-aim of this arrangement is to recognize phishing and authentic sites for the benefit of client the engineers create programming apparatuses that secure and distinguish phishing assaults as the client disregards. phishing programming order methodologies, for example, blacklist, white-list, heuristics, visual similitude, machine learning. it is a based phishing identification that checks for the likeness with the content pieces, picture inserted in sites by large aspects of the malicious and legitimate site. xiang hong used a feature wealthy machine leaning frame work and come up with 2 vital options of the phishing scams (i) an internet site that's a alikeness of a establishment site. guang xiang used feature-rich machine learning approach which is used to detect phishing web. this submits faux information into hypertext markup language forms, rather than showing alert messages to the user, whenever the phishing web site is visited. the rate of phishing attack is highly increasing day by day through many ways. user education or training aims to develop the technical awareness level of the end-user to lower their vulnerability to phishing attack. as a solution for it, many phishing techniques are developed and many attacks are detected. the major issues implementing these algorithms are finding the right feature set for a particular phishing attack. therefore, we presented a paper which shows the survey of different ways of detecting phishing techniques and algorithms. this survey provides better understanding about the phishing detection techniques, many problem solving solutions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/275.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/275.txt new file mode 100644 index 0000000000000000000000000000000000000000..5120f8c3f5aa0b1a609e540a3088d4aa34388772 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/275.txt @@ -0,0 +1 @@ +text does not only communicate informative contents, but also attitudinal information, including emotional states. the following reports on an empirical study of text-based emotion prediction.section 2 gives a brief overview of the intended application area, whereas section 3 summarizes related work. next, section 4 explains the empirical study, including the machine learning model, the corpus, the feature set, parameter tuning, etc. section 5 presents experimental results from two classification tasks and feature set modifications. section 6 describes the agenda for refining the model, before presenting concluding remarks in 7. first, what emotion or emotions most appropriately describe a certain text passage, and second, given a text passage and a specified emotional mark-up, how to render the prosodic contour in order to convey the emotional content,(cahn, 1990). while the users preferred the emotional client, this evaluation does not reveal emotion classification accuracy, nor how well the model generalizes on a large data set., 2004)addresses sentence-level emotion recognition for japanese tts. their model uses a composition assumption: the emotion of a sentence is a function of the emotional affinity of the words in the sentence. while the authors actually address the two fundamental problems of emotional tts, their approach is impractical and most likely cannot scale up for a real corpus. again, while lexical items with clear emotional meaning, such as happy or sad, matter, emotion classification probably needs to consider additional inference mechanisms.furthermore, if multiple emotion classes can characterize s, then given e' ⊂ e, the target of the mapping function becomes the ordered pair (s, e ).whereas our goal is to predict finer emotional meaning distinctions according to emotional categories in speech; in this study, we focus on the basic task of recognizing emotional passages and on determining their valence (i. semantic role labeling(koomen, punyakanok, roth and yih, 2005).the current data set consisted of a preliminary annotated and tie-broken data set of 1580 sentence, or 22 grimms' tales. this feature intends to provide an idea about the story's general affective personality(picard, 1997), whereas the feature reflecting the story progress is hoped to capture that some emotions may be more prevalent in certain sections of the story (e. group 12 uses lexical lists extracted from wordnet(fellbaum, 1998), on the basis of the primary emotion words in their adjectival and nominal forms.• given that the data set is currently small, for the condition named same-tune-eval, tuning was performed automatically on all data using a slightly smaller set of combinations, and then manually adjusted against the 10-fold crossvalidation process. third and finally, the emotion class is combined by basic emotion labels, rather than an original annotated label.we also plan to explore finer emotional meaning distinctions, by using a hierarchical sequential model which better corresponds to different levels of cognitive difficulty in emotional categorization by humans, and to classify the full set of basic level emotional categories discussed in section 4. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/276.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/276.txt new file mode 100644 index 0000000000000000000000000000000000000000..eda92671d8356b8c473bd5ee89cf52aff93cb4bc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/276.txt @@ -0,0 +1 @@ +various business sectors and government agencies and individual travellers require precise and appropriately traffic flow information. it helps the riders and drivers to make better travel judgement to alleviate traffic congestion, improve traffic operation efficiency, and reduce carbon emissions. the development and deployment of intelligent transportation system (itss) provide better accuracy for traffic flow prediction. it is deal with as a crucial element for the success of advanced traffic management systems, advanced public transportation systems, and traveller information systems. . the dependency of traffic flow is dependent on real-time traffic and historical data collected from various sensor sources, including inductive loops, radars, cameras, mobile global positioning system, crowd sourcing, social media. traffic data is exploding due to the vast use of traditional sensors and new technologies, and we have entered the era of a large volume of data transportation. transportation control and management are now becoming more data-driven. , .however, there are already lots of traffic flow prediction systems and models; most of them use shallow traffic models and are still somewhat failing due to the enormous dataset dimension.recently, deep learning concepts attract many persons involving academicians and industrialist due to their ability to deal with classification problems, understanding of natural language, dimensionality reduction, detection of objects, motion modelling. dl uses multi-layer concepts of neural networks to mining the inherent properties in data from the lowest level to the highest level . they can identify massive volumes of structure in the data, which eventually helps us to visualize and make meaningful inferences from the data. most of the its departments and researches in this area are also concerned about developing an autonomous vehicle, which can make transportation systems much economical and reduce the risk of lives. also, saving time is the integrative benefit of this idea. in current decades the lots of attention have made towards the safe automatic driving. it is necessary that the information will be provided in time through driver assistance system (das), autonomous vehicles (av)and traffic sign recognition (tsr) . although already many algorithms have been developed for 978-1-7281-1683-9/20/$31.00 ©2020 predicting the traffic flow information. but these algorithms are not accurate since traffic flow involves data having a vast dimension, so it is not very easy to predict accurate traffic flow information with less complexity.we intend to use genetic, deep learning , image processing, machine learning and also soft computing algorithms for prediction of traffic flow since a lot of journals and research paper suggests that they work well when it comes to big-data. it helps the riders and drivers to make better travel judgement to alleviate traffic congestion, improve traffic operation efficiency, and reduce carbon emissions. the development and deployment of intelligent transportation system (itss) provide better accuracy for traffic flow prediction. it is deal with as a crucial element for the success of advanced traffic management systems, advanced public transportation systems, and traveller information systems. the dependency of traffic flow is dependent on real-time traffic and historical data collected from various sensor sources, including inductive loops, radars, cameras, mobile global positioning system, crowd sourcing, social media. traffic data is exploding due to the vast use of traditional sensors and new technologies, and we have entered the era of a large volume of data transportation.,. it is necessary that the information will be provided in time through driver assistance system (das), autonomous vehicles (av)and traffic sign recognition (tsr). but these algorithms are not accurate since traffic flow involves data having a vast dimension, so it is not very easy to predict accurate traffic flow information with less complexity.we intend to use genetic, deep learning , image processing, machine learning and also soft computing algorithms for prediction of traffic flow since a lot of journals and research paper suggests that they work well when it comes to big-data. by using these control strategies, the expenditure also reduces, and it is cost-effective models for the government or the traffic managers.deep learning is a part of machine learning algorithms, and it is a compelling tool to handle a large amount of data. in dl, use concepts of a neural network, by using this feature, it is beneficial to find network dynamics (such as spectrum availability, congestion points, hotspots, traffic bottleneck. svm support linear and nonlinear regression that we can refer to as support vector regression, instead of trying to fit the most significant possible roads between two classes while limiting margin violation.outliers detection is another critical step for an accurate result, and for this, we have used support vector machines (svms), which is a set of supervised learning methods that can also be used for classification and regression. 6) predict the 45 min interval parameters through machine learning algorithm 7) conclude about the traffic congestion following the above steps we can implement this algorithm and can obtain the model which gives the higher accuracy of the machine learning model than the existing ones. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/277.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/277.txt new file mode 100644 index 0000000000000000000000000000000000000000..6c961e68011153987075cc42344a11f8f6dd7fd2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/277.txt @@ -0,0 +1 @@ +from the last decades, as world is witness for real time flow of different kinds of structured and unstructured data received from social media, transportation, communication, devices and sensors. as per information, international data corporation forecasts that nearby 180 zeta byes of data will be generated up to 2025 . this huge explosion of data has given rise to a new economy to the world that is data economy. in today's environment of digital world, data can be considered as the new oil which is pretty important but having worth when it is preceded by cleaned and processed stage . this incremental growth in data economy has given the hike in the usage of concept of iot or internet of things and also incites the existence of data science. iot is a hybrid combination between the physical world objects and embedded devices that are connected through the internet to establish the communication. as per day by day involvement of iot in daily life as well as in business and industry, the more and more amount of data is generated with the help of sensors. rather than the whole generated newly data it's better to extract the smart data from the iot data that can be as its best representative . data science is also continuously moving ahead in direction to achieve a new paradigm that is artificial intelligence which makes possible to teach the machines by learning through the data and propel towards the diversity of its effective insights. generally, the huge rise in amount of data to exchange makes it impossible to analyze using traditional techniques. so, machine learning intense towards the computation and needs an enormous amount of data for training that includes repetitive training in order to refine the ability of learning as well as decision-taking regarding the applied algorithms . machine learning helps the iot devices and applications in making the more intelligent decisions with respect to the smart data. this huge explosion of data has given rise to a new economy to the world that is data economy. this incremental growth in data economy has given the hike in the usage of concept of iot or internet of things and also incites the existence of data science. as per day by day involvement of iot in daily life as well as in business and industry, the more and more amount of data is generated with the help of sensors. rather than the whole generated newly data it's better to extract the smart data from the iot data that can be as its best representative. data science is also continuously moving ahead in direction to achieve a new paradigm that is artificial intelligence which makes possible to teach the machines by learning through the data and propel towards the diversity of its effective insights. so, machine learning intense towards the computation and needs an enormous amount of data for training that includes repetitive training in order to refine the ability of learning as well as decision-taking regarding the applied algorithms. machine learning helps the iot devices and applications in making the more intelligent decisions with respect to the smart data. along with the addition of excessive data, machine learning refines the automotive learning process through training and lead towards adaptation of its algorithm. guided learning: when learning is applied on labeled data or the desired outcomes is known called as guided learning.if the learning is applied on unlabeled datasets or the data is not known in advance then this type of learning is known as unguided learning. semi-guided or semi-supervised learning approach is a hybrid approach as a combination of guided and unguided learning with few labeled and unlabeled data. reinforcement learning is a machine learning which allows the efficient observation of surroundings and consistent learning behavior to a learning system in order to enhance the frequency of cumulative incentives or rewards. different researchers has been already illustrated their research in this field with some machine learning algorithms in order to make proper utilization of huge amount of generated data and to provide some specific point of views. for data security analysis purpose addressed to man-in -middle attack (mitm) and support vector machine (svm) based on supervised (guided) learning method is used to deploy at edge to classify the traffic data as raw dataset. the recent radiations of success of machine learning along with iot prove their fusion as the wonderful source in the field of data science. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/278.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/278.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc49bbd811c11e6b6da852cc547ded4e7c2c3f82 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/278.txt @@ -0,0 +1 @@ +chronic kidney disease (ckd) is a global public health problem affecting approximately 10% of the world's population , . the percentage of prevalence of ckd in china is 10.8% , and the range of prevalence is 10%-15% in the united states . according to another study, this percentage has reached 14.7% in the mexican adult general population . this disease is characterised by a slow deterioration in renal function, which eventually causes a complete loss of renal function. ckd does not show obvious symptoms in its early stages. therefore, the disease may not be detected until the kidney loses about 25% of its function . in addition, ckd has high morbidity and mortality, with a global impact the associate editor coordinating the review of this manuscript and approving it for publication was hao ji.on the human body . it can induce the occurrence of cardiovascular disease , . ckd is a progressive and irreversible pathologic syndrome . hence, the prediction and diagnosis of ckd in its early stages is quite essential, it may be able to enable patients to receive timely treatment to ameliorate the progression of the disease.machine learning refers to a computer program, which calculates and deduces the information related to the task and obtains the characteristics of the corresponding pattern . this technology can achieve accurate and economical diagnoses of diseases; hence, it might be a promising method for diagnosing ckd. it has become a new kind of medical tool with the development of information technology and has a broad application prospect because of the rapid development of electronic health record . in the medical field, machine learning has already been used to detect human body status , analyze the relevant factors of the disease and diagnose various diseases. for example, the models built by machine learning algorithms were used to diagnose heart disease , , diabetes and retinopathy , , acute kidney injury , , cancer and other diseases , . in these models, algorithms based on regression, tree, probability, decision surface and neural network were often effective. in the field of ckd diagnosis, hodneland et al. utilized image registration to detect renal morphologic changes . vasquez-morales et al. established a classifier based on neural network using large-scale ckd data, and the accuracy of the model on their test data was 95% . in addition, most of the previous studies utilized the ckd data set that was obtained from the uci machine learning repository. chen et al. used k-nearest neighbor (knn), support vector machine (svm) and soft independent modelling of class analogy to diagnose ckd, knn and svm achieved the highest accuracy of 99.7% . in addition, they used fuzzy rule-building expert system, fuzzy optimal associative memory and partial least squares discriminant analysis to diagnose ckd, and the range of accuracy in those models was 95.5%-99.6% . their studies have achieved good results in the diagnosis of ckd. in the above models, the mean imputation is used to fill in the missing values and it depends on the diagnostic categories of the samples. as a result, their method could not be used when the diagnostic results of the samples are unknown. in reality, patients might miss some measurements for various reasons before diagnosing. in addition, for missing values in categorical variables, data obtained using mean imputation might have a large deviation from the actual values. for example, for variables with only two categories, we set the categories to 0 and 1, but the mean of the variables might be between 0 and 1. polat et al. developed an svm based on feature selection technology, the proposed models reduced the computational cost through feature selection, and the range of accuracy in those models was from 97.75%-98.5% . j. aljaaf et al. used novel multiple imputation to fill in the missing values, and then mlp neural network (mlp) achieved an accuracy of 98.1% . subas et al. used mlp, svm, knn, c4.5 decision tree and random forest (rf) to diagnose ckd, and the rf achieved an accuracy of 100% . in the models established by boukenze et al., mlp achieved the highest accuracy of 99.75% . the studies of , focus mainly on the establishment of models and achieve an ideal result. however, a complete process of filling in the missing values is not described in detail, and no feature selection technology is used to select predictors as well. almansour et al. used svm and neural network to diagnose ckd, and the accuracy of the models was 97.75% and 99.75%, respectively . in the models established by gunarathne et al., zero was used to fill out the missing values and decision forest achieved the best performance with the accuracy was 99.1% .to summarize the previous ckd diagnostic models, we find that most of them suffering from either the method used to impute missing values has a limited application range or relatively low accuracy. therefore, in this work, we propose a methodology to extend application range of the ckd diagnostic models. at the same time, the accuracy of the model is further improved. the contributions of the proposed work are as follows.1) we used knn imputation to fill in the missing values in the data set, which could be applied to the data set with the diagnostic categories are unknown.2) logistic regression (log), rf, svm, knn, naive bayes classifier (nb) and feed forward neural network (fnn) were used to establish ckd diagnostic models on the complete ckd data sets. the models with better performance were extracted for misjudgment analysis.3) an integrated model that combines log and rf by using perceptron was established and it improved the performance of the component models in ckd diagnosis after the missing values were filled by knn imputation.knn imputation is used to fill in the missing values. to our knowledge, this is the first time that knn imputation has been used for the diagnosis of ckd. in addition, building an integrated model is also a good way to improve the performance of separate individual models. the proposed methodology might effectively deal with the scene where patients are missing certain measurements before being diagnosed. in addition, the resulting integrated model shows a higher accuracy. therefore, it is speculated that this methodology might be applicable to the clinical data in the actual medical diagnosis.the rest of the paper is organized as follows. in section ii, we describe the preliminaries. the establishments of the individual model and the integrated model are described in section iii. in section iv, we evaluate and discuss the performance of the integrated model. in section v, we summarize the work and its contributions, including future works.1) we used knn imputation to fill in the missing values in the data set, which could be applied to the data set with the diagnostic categories are unknown.2) logistic regression (log), rf, svm, knn, naive bayes classifier (nb) and feed forward neural network (fnn) were used to establish ckd diagnostic models on the complete ckd data sets.3) an integrated model that combines log and rf by using perceptron was established and it improved the performance of the component models in ckd diagnosis after the missing values were filled by knn imputation. for the numerical variables, the missing values are filled using the median of the corresponding variable in k complete samples, and for the category variables, the missing values are filled using the category that has the highest frequency in the corresponding variable in k complete samples. in each calculation of the model training, the algorithm selects the best combination of parameters to establish the model by grid search. for the nb and the knn, the performance of the models when using knn imputation is not very ideal compared to using random imputation or mean and mode imputation in tables5 and 6. the above result also proves the validity of the knn imputation, since knn imputation does improve the accuracy of some models, such as log, rf and svm (table5).the probability distribution of the samples in the complete ckd data set (at k = 11), the horizontal axis and the vertical axis represent the probabilities that the samples were judged as notckd by the log and the rf, respectively.in order to verify whether the integrated model can improve the performance of the component models, we first used the same random number seed 1234 to establish and evaluate the integrated model on each complete data, and the confusion matrices returned are shown in table9. the average results of the integrated models and two component models are shown in table10, and the integrated model has the best performance in detecting the two categories because table9. we also compared the methodology in this study (log, rf and integrated model) with the other models on the same data in previous studies (called contrast models), and the comparison result is shown in table11. by the use of knn imputation, log, rf, svm and fnn could achieve better performance than the cases when the random imputation and mean and mode imputation were used. in addition, the ckd data set is composed of mixed variables (numeric and category), so the similarity evaluation methods based on mixed data could be used to calculate the similarity between samples, such as general similarity coefficient. after unsupervised imputation of missing values in the data set by using knn imputation, the integrated model could achieve a satisfactory accuracy. in addition, due to there are only two categories (ckd and notckd) of data samples in the data set, the model can not diagnose the severity of ckd. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/279.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/279.txt new file mode 100644 index 0000000000000000000000000000000000000000..a9be3882d64ff71a289988a7cbc375eb16d97a5c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/279.txt @@ -0,0 +1 @@ +artificial neural networks (anns) have been demonstrated to be effective for many cases of supervised learning , but programming an ann manually can be a challenging task. frameworks such as tensorflow and pytorch have been created to simplify the creation and use of anns.one of the major uses of artificial neural networks is natural language processing one aspect of which is sentiment analysis. to compare the two machine learning frameworks, the first step was to develop, train, and evaluate the same neural network model in both frameworks. in theory we should be able to obtain the same accuracy in both frameworks. since the same underlying model was being implemented. the second step was to compare the model implementations in the two frameworks based on execution time, memory usage, and ease of development.the data set that was chosen to test the frameworks was a publicly-available set of amazon reviews for video games. due to the nuances and bias involved in what each individual feels a certain rating should be, the data set was then broken down into only positive and negative reviews. the positive reviews consisted of the reviews with ratings of 4 or 5, whereas the negative reviews consisted of the reviews with ratings 1, 2, or 3.neural networks use mathematical calculations, so the textual reviews needed to be converted into numerical information. each occurrence of each word in the textual reviews was then replaced by the numerical index of that word in the common word list if that word occurred in the common word list. any word that were not common enough to be found in the common word list was given the value of 0. each sentence is reduced to a total size of 250 indices. any sentence over 250 words used just the first 250 words found in the vocabulary. if a sentence is shorter than 250 words, then the rest of list is padded with 0's. this meant that 13 words in the original sentence were converted to their numerical representation and the rest of the list was filled with 0's.this sentence highlights some of the issues that were found with the data set. these errors made it so that those words were not common enough to be included in the final sentence, removing some of the important information. in this case, words like 'dvd', 'collection', 'son' and 'wanted' are left out from the tokenized sentence because of errors present in the review. since the words that occur before a certain word in a sentence add importance to the current word being analyzed, rnns are often used in natural language processing. the embedding layer takes the list of 200 numbers representing the review sentence, and changes them into vector representations that are stored in a list of size 32. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/28.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/28.txt new file mode 100644 index 0000000000000000000000000000000000000000..1d1c5f78345a1e6ab4c0e5283be4f9c3db60012d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/28.txt @@ -0,0 +1 @@ +the ability to predict the weather is of great importance in a variety of fields and applications where decision-making processes need to account for and heavily rely on upcoming meteorological conditions. some examples include the transportation, energy, and agriculture industries . however, it is challenging to accurately predict the weather in the future, especially when the weather condition changes rapidly and unexpectedly. in addition to the traditional time-series data (e.g., air temperature, wind speed, precipitation, etc.), it also requires the consideration of the geographic context. the weather forecast in one region may have a strong correlation with that of other nearby regions . for example, heavy snow in the northeast of china may be related to light rain in the southwest of china. weather forecasting requires the consideration of not only temporal correlations but also spatial correlations.while recently a great amount of work has been focusing on weather nowcasting , many socio-economic needs must be aware of future weather conditions that span several hours, and not just a couple. the weather4cast challenge fills this gap by providing competitors with a tough challenge: predict rainfall events for the future eight hours given the preceding hour as a context.in particular, the aim of the 2022 edition of the weather4cast competition is to predict future highresolution rainfall events from lower-resolution satellite radiances. while radar data is more precise, accurate, and of higher resolution than satellite data, they are expensive to obtain and not available in many parts of the world. we thus want to learn how to predict these high-value rain rates from radiation measured by geostationary satellites. competition participants should predict rainfall locations for the next 8 hours in 32 time slots from an input sequence of 4 time slots of the preceding hour. the input sequence consists of four 11-band spectral satellite images. these 11 channels show slightly noisy satellite radiances covering so-called visible (vis), water vapor (wv), and infrared (ir) bands. each satellite image covers a 15-minute period and its pixels correspond to a spatial area of about 12km x 12km. the prediction output is a sequence of 32 images representing rain rates from ground-radar reflectivities. output images also have a temporal resolution of 15 minutes but have a higher spatial resolution, with each pixel corresponding to a spatial area of about 2km × 2km. so in addition to predicting the weather in the future, participants also have to deal with a super-resolution task due to the coarser spatial resolution of the satellite data.in contrast to the previous competition , there is only one target variable -rainfall events. the rainfall events data provided by the operational program for exchange of weather radar information (opera) 1 and the satellite images are given by european organisation for the exploitation of meteorological satellites (eumetsat) 2 .motivated by the recent success of transformer-based architectures for vision , we decide to investigate them for the given task. in particular, we conduct experiments with the swin-unetr transformer and an adaptation of the vivit model. our results allow us to achieve 3 rd place in the challenge and suggest that this is certainly an interesting research direction to pursue in future work.in particular, the aim of the 2022 edition of the weather4cast competition is to predict future highresolution rainfall events from lower-resolution satellite radiances. while radar data is more precise, accurate, and of higher resolution than satellite data, they are expensive to obtain and not available in many parts of the world. competition participants should predict rainfall locations for the next 8 hours in 32 time slots from an input sequence of 4 time slots of the preceding hour. output images also have a temporal resolution of 15 minutes but have a higher spatial resolution, with each pixel corresponding to a spatial area of about 2km × 2km. so in addition to predicting the weather in the future, participants also have to deal with a super-resolution task due to the coarser spatial resolution of the satellite data. the rainfall events data provided by the operational program for exchange of weather radar information (opera)1and the satellite images are given by european organisation for the exploitation of meteorological satellites (eumetsat)2. then, for each sample of the batch, it uses the 4 obtained class tokens (from the 4 images constituting the training sample) to model the temporal information (extracting the time dimension out of the batch dimension) through a second transformer, which gives rise to a new final classification token for each sequence of images (i. to adapt this architecture for the weather prediction task, several techniques are being explored: data transformation, upsampling, and channel convolution. also since a size 2 and stride 2 pooling operation is applied 5 times in the original architecture, all spatial dimensions of the input image must be divisible by 32.upsample another approach to overcome discrepancies between the original swin-unetr task and the weather prediction task is to modify the network architecture. in this experiment, we feed the data using its original shape to the network (except the height and width dimensions that were interpolated to 256) and change the last 2 layers of the swin encoder so they have an identical number of input and output channels. the convolution module consists of 2 convolutional layers: the first one has 4 input channels and 32 output channels with kernel size 3, and for the second one input and output channels are 32 with the same kernel size of 3. the shape of an input to a model is(11,4,252,252), where 11 is the number of bands spectral satellite images, 4 is the time dimension (1 preceding hour × 4 step, i. although the dimensions of the regions for input and output are the same, the spatial resolution of the satellite images is about six times lower than the resolution of the ground radar. this means that the entire rainfall ground radar region resembles only a 42 × 42 center's patch in the coarser satellite resolution, making this task a super-resolution task too. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/280.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/280.txt new file mode 100644 index 0000000000000000000000000000000000000000..c716c36d7fa75e395a4d8f09744c249ebac35412 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/280.txt @@ -0,0 +1 @@ +utilizing audio-visual cues together to recognize a person's identity has been studied in various fields from neuroscience (hasan et al., 2016;tsantani et al., 2019) to practical machine learning applications (nagrani et al., 2018b;a;wen et al., 2019a;shon et al., 2019). for example, some neurological studies have found that in some cortical areas, humans recognize familiar individuals by combining signals from several modalities, such as faces and voices (hasan et al., 2016). in conjunction with the neurological studies, it is also a well known fact that a human speech production system is directly related to the shape of the vocal tract (mermelstein, 1967;teager & teager, 1990).inspired by the aforementioned scientific evidence, we would like to ask three related questions from the perspective of machine learning: 1) is it possible to match the identity of faces and voices? (inference) 2) if so, is it possible to generate a face image from a speech signal? (generation) 3) can we find the relationship between the two modalities only using cross-modal self-supervision with the data "in-the-wild"? to answer these questions, we design a two-step approach where the inference and generation stages are trained sequentially. first, the two inference networks for each modality (speech encoder and face encoder) are trained to extract the useful features and to compute the cross-modal identity matching probability. then the trained inference networks are transferred to the generation stage to pass the information about the speech, which helps the generation network to output the face image from the conditioned speech.we believe, however, that it is impossible to perfectly reconstruct all the attributes in the image of a person's face through the characteristics of the voice alone. this is due to factors that are clearly published as a conference paper at iclr 2020 unrelated to one's voice, such as lighting, glasses, and orientation, that also exist in the natural face image. to reflect the diverse characteristics presented in the face images "in-the-wild", we therefore model the generation process by incorporating two latent factors into the neural network. more specifically, we adopted conditional generative adversarial networks (cgans) (mirza & osindero, 2014; so that the generator network can produce a face image that is dependent not only on the paired speech condition, but also on the stochastic variable. this allows the latent factors that contribute to the overall facial attributes to be disentangled into two factors: one that is relevant to the voice and the other that is irrelevant.adopting cgans negligently still leaves a few problems. for example, the condition in a cgans framework is typically provided as embedded conditional vectors through the embedding look-up table for one-hot encoded labels (brock et al., 2019;. the raw signals such as speech, however, cannot be taken directly from the embedding look-up table, so an encoder module is required. therefore, the trained speech encoder from the inference step is reused to output a pseudo conditional label that is used to extract meaningful information relevant to the corresponding face. then the generator and the discriminator are trained in an adversarial way by utilizing the pseudo-embedded conditional vectors obtained from the trained speech encoder in the first step.another problem with applying the conventional cgans for generating faces from voice arises from the fact that the distinction between different speakers can be quite subtle, which calls for a need for a more effective conditioning method. to mitigate this problem, we propose a new loss function, relativistic identity cgans (relidgans) loss, with modification of the relativistic gans (jolicoeur-martineau, 2019), allowing us to generate the face with a more distinct identity. each step will be described in greater detail in section 3.our contributions can be summarized as follows:1. we propose simple but effective end-to-end inference networks trained on audio-visual data without any labels in a self-supervised manner that perform a cross-modal identity matching task.inspired by the aforementioned scientific evidence, we would like to ask three related questions from the perspective of machine learning: 1) is it possible to match the identity of faces and voices? (inference) 2) if so, is it possible to generate a face image from a speech signal? (generation) 3) can we find the relationship between the two modalities only using cross-modal self-supervision with the data "in-the-wild"? to answer these questions, we design a two-step approach where the inference and generation stages are trained sequentially.where s denotes a speech segment, f denotes a face image, se denotes a speech embedding vector and f e denotes a face embedding vector. more specifically, randomly sampled gaussian noise z ∈ r 128 and speech condition c ∈ r 128 are concatenated and used as input for the generator function f f ake = g(z, c) to sample the face image conditioned on the speech.where f real and c + denotes the paired face and speech condition from data distribution, f f ake denotes the generated face sample conditioned on c + , and c − denotes the speech condition with mismatched identity to f real using negative sampling. random samples from (z, c) plane: the generated face images from diversely sampled z and fixed speech condition c is shown in fig. correlation between the generated samples and speech conditions: we performed a quantitative analysis to investigate the relationship between the speech condition and the face image it generated. to this end, we first sampled two different random variables z 1 , z 2 and two different speech condition vectors c 1 , c 2 of different speakers. then the two generated samples were encoded using the trained inference network f(•) to extract the face embeddings f e 1 = f(f f ake 1 ), f e 2 = f(f f ake 2 ). we then calculated the cosine distance between the speech condition vectors (cd(c 1 , c 2 )), and the cosine distance between the two face embeddings (cd(f e 1 , f e 2 )).furthermore, we examined whether the cd between two speech condition vectors gets closer when controlling the gender of the speaker, and also the face images generated from the two speech condition vectors. qta 2.where 1 denotes an identity function, n denotes a index of test sample, f n,1 denotes the generated sample g(z, s(s n ))), f n,2 denotes the ground truth image paired with the speech s n , and n denotes the total number of test samples.where f n denotes a generated face image from a speech segment s n,1 , and s n,2 denotes a negatively selected speech segment.lastly, we conducted a face retrieval experiment in which the goal was to accurately retrieve a real face image for the speaker using the generated image from their speech segment as a query. to retrieve the closest face image out of the 5,000 samples, the trained face encoder was used to measure the feature distance between the generated image and each of the 5,000 images. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/281.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/281.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc49bbd811c11e6b6da852cc547ded4e7c2c3f82 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/281.txt @@ -0,0 +1 @@ +chronic kidney disease (ckd) is a global public health problem affecting approximately 10% of the world's population , . the percentage of prevalence of ckd in china is 10.8% , and the range of prevalence is 10%-15% in the united states . according to another study, this percentage has reached 14.7% in the mexican adult general population . this disease is characterised by a slow deterioration in renal function, which eventually causes a complete loss of renal function. ckd does not show obvious symptoms in its early stages. therefore, the disease may not be detected until the kidney loses about 25% of its function . in addition, ckd has high morbidity and mortality, with a global impact the associate editor coordinating the review of this manuscript and approving it for publication was hao ji.on the human body . it can induce the occurrence of cardiovascular disease , . ckd is a progressive and irreversible pathologic syndrome . hence, the prediction and diagnosis of ckd in its early stages is quite essential, it may be able to enable patients to receive timely treatment to ameliorate the progression of the disease.machine learning refers to a computer program, which calculates and deduces the information related to the task and obtains the characteristics of the corresponding pattern . this technology can achieve accurate and economical diagnoses of diseases; hence, it might be a promising method for diagnosing ckd. it has become a new kind of medical tool with the development of information technology and has a broad application prospect because of the rapid development of electronic health record . in the medical field, machine learning has already been used to detect human body status , analyze the relevant factors of the disease and diagnose various diseases. for example, the models built by machine learning algorithms were used to diagnose heart disease , , diabetes and retinopathy , , acute kidney injury , , cancer and other diseases , . in these models, algorithms based on regression, tree, probability, decision surface and neural network were often effective. in the field of ckd diagnosis, hodneland et al. utilized image registration to detect renal morphologic changes . vasquez-morales et al. established a classifier based on neural network using large-scale ckd data, and the accuracy of the model on their test data was 95% . in addition, most of the previous studies utilized the ckd data set that was obtained from the uci machine learning repository. chen et al. used k-nearest neighbor (knn), support vector machine (svm) and soft independent modelling of class analogy to diagnose ckd, knn and svm achieved the highest accuracy of 99.7% . in addition, they used fuzzy rule-building expert system, fuzzy optimal associative memory and partial least squares discriminant analysis to diagnose ckd, and the range of accuracy in those models was 95.5%-99.6% . their studies have achieved good results in the diagnosis of ckd. in the above models, the mean imputation is used to fill in the missing values and it depends on the diagnostic categories of the samples. as a result, their method could not be used when the diagnostic results of the samples are unknown. in reality, patients might miss some measurements for various reasons before diagnosing. in addition, for missing values in categorical variables, data obtained using mean imputation might have a large deviation from the actual values. for example, for variables with only two categories, we set the categories to 0 and 1, but the mean of the variables might be between 0 and 1. polat et al. developed an svm based on feature selection technology, the proposed models reduced the computational cost through feature selection, and the range of accuracy in those models was from 97.75%-98.5% . j. aljaaf et al. used novel multiple imputation to fill in the missing values, and then mlp neural network (mlp) achieved an accuracy of 98.1% . subas et al. used mlp, svm, knn, c4.5 decision tree and random forest (rf) to diagnose ckd, and the rf achieved an accuracy of 100% . in the models established by boukenze et al., mlp achieved the highest accuracy of 99.75% . the studies of , focus mainly on the establishment of models and achieve an ideal result. however, a complete process of filling in the missing values is not described in detail, and no feature selection technology is used to select predictors as well. almansour et al. used svm and neural network to diagnose ckd, and the accuracy of the models was 97.75% and 99.75%, respectively . in the models established by gunarathne et al., zero was used to fill out the missing values and decision forest achieved the best performance with the accuracy was 99.1% .to summarize the previous ckd diagnostic models, we find that most of them suffering from either the method used to impute missing values has a limited application range or relatively low accuracy. therefore, in this work, we propose a methodology to extend application range of the ckd diagnostic models. at the same time, the accuracy of the model is further improved. the contributions of the proposed work are as follows.1) we used knn imputation to fill in the missing values in the data set, which could be applied to the data set with the diagnostic categories are unknown.2) logistic regression (log), rf, svm, knn, naive bayes classifier (nb) and feed forward neural network (fnn) were used to establish ckd diagnostic models on the complete ckd data sets. the models with better performance were extracted for misjudgment analysis.3) an integrated model that combines log and rf by using perceptron was established and it improved the performance of the component models in ckd diagnosis after the missing values were filled by knn imputation.knn imputation is used to fill in the missing values. to our knowledge, this is the first time that knn imputation has been used for the diagnosis of ckd. in addition, building an integrated model is also a good way to improve the performance of separate individual models. the proposed methodology might effectively deal with the scene where patients are missing certain measurements before being diagnosed. in addition, the resulting integrated model shows a higher accuracy. therefore, it is speculated that this methodology might be applicable to the clinical data in the actual medical diagnosis.the rest of the paper is organized as follows. in section ii, we describe the preliminaries. the establishments of the individual model and the integrated model are described in section iii. in section iv, we evaluate and discuss the performance of the integrated model. in section v, we summarize the work and its contributions, including future works.1) we used knn imputation to fill in the missing values in the data set, which could be applied to the data set with the diagnostic categories are unknown.2) logistic regression (log), rf, svm, knn, naive bayes classifier (nb) and feed forward neural network (fnn) were used to establish ckd diagnostic models on the complete ckd data sets.3) an integrated model that combines log and rf by using perceptron was established and it improved the performance of the component models in ckd diagnosis after the missing values were filled by knn imputation. for the numerical variables, the missing values are filled using the median of the corresponding variable in k complete samples, and for the category variables, the missing values are filled using the category that has the highest frequency in the corresponding variable in k complete samples. in each calculation of the model training, the algorithm selects the best combination of parameters to establish the model by grid search. for the nb and the knn, the performance of the models when using knn imputation is not very ideal compared to using random imputation or mean and mode imputation in tables5 and 6. the above result also proves the validity of the knn imputation, since knn imputation does improve the accuracy of some models, such as log, rf and svm (table5).the probability distribution of the samples in the complete ckd data set (at k = 11), the horizontal axis and the vertical axis represent the probabilities that the samples were judged as notckd by the log and the rf, respectively.in order to verify whether the integrated model can improve the performance of the component models, we first used the same random number seed 1234 to establish and evaluate the integrated model on each complete data, and the confusion matrices returned are shown in table9. the average results of the integrated models and two component models are shown in table10, and the integrated model has the best performance in detecting the two categories because table9. we also compared the methodology in this study (log, rf and integrated model) with the other models on the same data in previous studies (called contrast models), and the comparison result is shown in table11. by the use of knn imputation, log, rf, svm and fnn could achieve better performance than the cases when the random imputation and mean and mode imputation were used. in addition, the ckd data set is composed of mixed variables (numeric and category), so the similarity evaluation methods based on mixed data could be used to calculate the similarity between samples, such as general similarity coefficient. after unsupervised imputation of missing values in the data set by using knn imputation, the integrated model could achieve a satisfactory accuracy. in addition, due to there are only two categories (ckd and notckd) of data samples in the data set, the model can not diagnose the severity of ckd. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/282.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/282.txt new file mode 100644 index 0000000000000000000000000000000000000000..c840396de883f5fcf28150add13b421e0562c231 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/282.txt @@ -0,0 +1 @@ +coconut sugar, or coco sugar (see fig. 1), is a natural sweetener that has become a popular alternative for people struggling with diabetes mellitus. with a glycemic index (gi) of 35, this sugar is categorized as a low gi food that claims to help prevent obesity and support weight maintenance . the on-growing demand for coconut sugar has led to the development of the philippine national standard for the coconut sap sugar. in 2010, the standard released established a system of grading and classifying the said commodity. the physical characteristics of the coconut sugar involve color, odor, taste and purity. chemical properties include, water activity, glucose, fructose, sucrose and ash . as listed in table 1, coconut sugar is classified in terms of quality. the on-growing demand for coconut sugar has led to the development of the philippine national standard for the coconut sap sugar. in light with this, the proponents strive to provide a novel way, which can assess the quality of the coconut sugar with high accuracy using machine learning approach.rf is a robust machine learning technique used both for classification and regression that uses ensemble learning algorithm.a total of 350 images of coconut sugar were acquired from two coconut sugar production agencies including philippine coconut authority (pca) and united coconut association of the philippines (ucap). these rgb values were used as input and the classification of the images as "superior", "good", and "reject" was used as the output. cross-validation was used to evaluate the performance of the model using different classification methods. in classifying the coconut sugar quality, numbers 1, 2, and 3 were used and they refer to superior, good, and reject qualities, respectively. the accuracy achieved for all 10 folds were averaged to evaluate the model that used a certain classification method. the accuracy and running time were averaged to compute the overall evaluation of each of the machine learning models.05% accuracies, respectively. fig.6running times (sec) achieved using different classification methods figure7describes the plot of the accuracy versus the running time of models tested; in which models that having higher accuracies tend to have higher running times. the values were fed into the algorithms using python and the scikitlearn library.according to the accuracy versus running time graph, sgd is the preferred method in this study due to its high accuracy and ample running time. similarly, this approach can be integrated with computer vision to allow the real time assessment of coconut sugar. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/283.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/283.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e1b7d63ef6726c94e60f9c1e8a9669b3fe81303 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/283.txt @@ -0,0 +1 @@ +david marr has defined vision as the process of discovering from images what is present in the world, and where it is . the combination of what and where captures the essence of an image at the semantic level and therefore, also plays a crucial role when defining the desired output of image synthesis tools.in this work, we employ scene graphs with per-object location and appearance attributes as an accessible and easyto-manipulate way for users to express their intentions, see fig. 1. the what aspect is captured hierarchically: objects are defined as belonging to a certain class (horse, tree, boat, etc.) and as having certain appearance attributes. these attributes can be (i) selected from a predefined set obtained by clustering previously seen attributes, or (ii) copied from a sample image. the where aspect, is captured by what is often called a scene graph, i.e., a graph where the scene objects are denoted as nodes, and their relative position, such as "above" or "left of", are represented as edge types.our method employs a dual encoding for each object in the image. the first part encodes the object's placement and captures a relative position and other global image features, as they relate to the specific object. it is generated based on the scene graph, by employing a graph convolution net-work, followed by the concatenation of a random vector z. the second part encodes the appearance of the object and can be replaced, e.g., by importing it from the same object as it appears in another image, without directly changing the other objects in the image. this copying of objects between images is done in a semantic way, and not at the pixel level.in the scene graph that we employ, each node is equipped with three types of information: (i) the type of object, encoded as a vector of a fixed dimension, (ii) the location attributes of the objects, which denote the approximate location in the generated image, using a coarse 5 × 5 grid and its size, discretized to ten values, and (iii) the appearance embedding mentioned above. the edges denote relations: "right of", "left of", "above", "below", "surrounding", and "inside". the method is implemented within a convenient user interface, which supports a dynamic placement of objects and the creation of a scene graph. the edge relations are inferred automatically, given the relative position of the objects. this eliminates the need for mostly unnecessary user intervention. rendering is done in real time, supporting the creation of novel scenes in an interactive way, see fig. 1 and more examples in the supplementary.the neural network that we employ has multiple subparts, as can be seen in fig. 2: (i) a graph convolutional network that converts the input scene graph to a per-object embedding to their location. (ii) a cnn that converts the location embedding of each object to an object's mask. (iii) a parallel network that converts the location embedding to a bounding box location, where the object mask is placed. (iv) an appearance embedding cnn that converts image information into an embedding vector. this process is done off-line and when creating a new image, the vectors can be imported from other images, or selected from a set of archetypes. (v) a multiplexer that combines the object masks and the appearance embedding information, to create a one multidimensional tensor, where different groups of layers denote different objects. (vi) an encoder-decoder residual network that creates the output image.our method is related to the recent work of , who create images based on scene graphs. their method also uses a graph convolutional network to obtain masks, a mul-figure 1. an example of the image creation process. (top row) the schematic illustration panel of the user interface, in which the user arranges the desired objects. (2nd row) the scene graph that is inferred automatically based on this layout. (3rd row) the layout that is created from the scene graph. (bottom row) the generated image. legend for the gui colors in the top row: purple -adding an object, green -resizing it, red -replacing its appearance. (a) a simple layout with a sky object, a tree and a grass object. all object appearances are initialized to a random archetype appearance. (b) a giraffe is added. (c) the giraffe is enlarged. (d) the appearance of the sky is changed to a different archetype. (e) a small sheep is added. (f) an airplane is added. (g) the tree is enlarged.tiplexer that combines the layout information and a subsequent encoder-decoder architecture for obtaining the final image. there are, however, important differences: (i) by separating the layout embedding from the appearance embedding, we allow for much more control and freedom to the object selection mechanism, (ii) by adding the location attributes as input, we allow for an intuitive and more direct user control, (iii) the architecture we employ enables better quality and higher resolution outputs, (iv) by adding stochasticity before the masks are created, we are able to generate multiple results per scene graph, (v) this effect is amplified by the ability of the users to manipulate the resulting image, by changing the properties of each individual object, (vi) we introduce a mask discriminator, which plays a crucial role in generating plausible masks, (vii) another novel discriminator captures the appearance encoding in a counterfactual way, and (viii) we introduce feature matching based on the discriminator network and (ix) a perceptual loss term to better capture the appearance of an object, even if the pose or shape of that object has changed.in the scene graph that we employ, each node is equipped with three types of information: (i) the type of object, encoded as a vector of a fixed dimension, (ii) the location attributes of the objects, which denote the approximate location in the generated image, using a coarse 5 × 5 grid and its size, discretized to ten values, and (iii) the appearance embedding mentioned above. there are, however, important differences: (i) by separating the layout embedding from the appearance embedding, we allow for much more control and freedom to the object selection mechanism, (ii) by adding the location attributes as input, we allow for an intuitive and more direct user control, (iii) the architecture we employ enables better quality and higher resolution outputs, (iv) by adding stochasticity before the masks are created, we are able to generate multiple results per scene graph, (v) this effect is amplified by the ability of the users to manipulate the resulting image, by changing the properties of each individual object, (vi) we introduce a mask discriminator, which plays a crucial role in generating plausible masks, (vii) another novel discriminator captures the appearance encoding in a counterfactual way, and (viii) we introduce feature matching based on the discriminator network and (ix) a perceptual loss term to better capture the appearance of an object, even if the pose or shape of that object has changed. also related is the synthesis of images from a given input layout of bounding boxes (and not one that is inferred by a network from a scene graph), which was very recently studied byfor small 64x64 images.each object i in the input scene graph is associated with a single node n i = , where o i ∈ r d1 is a learned encoding of the object class and l i ∈ {0, 1} d2+d3 is a location vector. the object appearance a i ∈ r d5 of object i seen during training, is obtained by applying a cnn a to a (ground truth) cropped image i ′ i of that object, resized to a fixed resolution of 64 × 64. the second one is obtained by incorporating the same ground truth bounding box and mask in a counterfactual way, by replacing a i with a k , where a k is an appearance embedding of an object image i ′ k of a different object from the same class c i , i. the perceptual losscompares the generated image with the ground truth training image p ′ , using the activations f u of the vgg networkat layer u in a set of predefined layers u .the goal of the compound loss is to make sure that the generated image p, given a ground truth layout tensor t ′ is indistinguishable from the real image p ′ , and that this is true, even if the layout tensor t is based on estimated bounding boxes and masks (unlike t ′ ). recall that i ′ i are ground truth crops of images, obtained from the ground truth image p ′ , using the ground truth bounding boxes b ′ . the other feature matching loss l fm-image compares the activations of d image (t, p) with those of the ground truth layout tensor and image d image (t ′ , p ′ ). on the test image, we report the accuracy of this classifier applied to the object images that are generated, using the bounding box of the image's layout. each row presents the scene layout, the ground truth image from which the layout was extracted, our method's results, where the object attributes present a random archetype and the location attributes are zeroed (l i = 0), our results when using the ground truth layout of the image (including masks and bounding boxes), our results where the appearance attributes of each object are copied from the ground truth image and the location vectors are zero, and our results where the location attributes coarsely describe the objects' locations and the appearance attributes are randomly selected from the archetypes. using ground truth location and appearance attributes, the resulting image better matches the test image.5presents results in which the appearance is fixed to the mean appearance vector for all objects of that class and the location attribute vectors l i are sampled from the gaussian distributions mentioned above. user study following, we perform a user study to compare with the baseline method the realism of the generated image, the adherence to the scene graph, as well as to verify that the objects in the scene graph appear in the output image. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/284.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/284.txt new file mode 100644 index 0000000000000000000000000000000000000000..0dfc1a86f3d0156a69add0cc9c9f09a160041f36 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/284.txt @@ -0,0 +1 @@ +with the rapid development of electronic information, computer input has become more and more common, but handwriting is still an irreplaceable way for people to transfer information. as a link combining handwritten characters and computer input, handwriting recognition has received more and more attention for its practicability. handwriting recognition technology is the basis of handwriting interpolation and handwriting identification. in the past decade, machine learning and pattern recognition have extended many highly intelligent handwriting recognition classifications, including artificial neural networks (ann) , support vector machine (svm) , modified quadratic discriminant function (mqdf) and hidden markov model , etc.arabic numerals are the only universal symbols in the world, and they have an indelible contribution to the development of world science and culture. convolutional neural networks have advantages in image processing in all neural networks, so this paper designs a handwritten digit recognition system based on convolutional neural networks.handwritten digit recognition is divided into online recognition and offline recognition. online recognition refers to letting the computer recognize the characters written on the handwriting device, and recognize it on the basis of handwriting strokes and stroke order. the theoretical research of this technology is very mature. offline recognition is the recognition of characters written on paper by the computer. there is no stroke and stroke order as the basis. only the picture recognition provides less information, so it is still facing greater challenges. the system designed in this paper is a handwritten digit recognition system that can realize offline recognition. the flow chart of the handwritten digit recognition system based on convolutional neural network designed in this paper is shown in figure 1. in the past decade, machine learning and pattern recognition have extended many highly intelligent handwriting recognition classifications, including artificial neural networks (ann), support vector machine (svm), modified quadratic discriminant function (mqdf)and hidden markov model, etc. convolutional neural networks have advantages in image processing in all neural networks, so this paper designs a handwritten digit recognition system based on convolutional neural networks.handwritten digit recognition is divided into online recognition and offline recognition. the system designed in this paper is a handwritten digit recognition system that can realize offline recognition. the flow chart of the handwritten digit recognition system based on convolutional neural network designed in this paper is shown in figure1. digit recognition module includes convolutional neural network (cnn) and recognition. the handwritten digit recognition system extracts the features of the handwritten digit pictures in the minst data set, performs recognition training, and then inputs the handwritten digits to be recognized for identification.the structure diagram of the artificial neural network composed of multiple perceptrons is shown in figure3: output input the artificial neural network was inspired by the biological neural network, and it simplifies the biological neural network. artificial neural network is optimized through a learning method based on mathematical statistics, so artificial neural network is also a practical application of mathematical statistics. the deep learning network consists of three layers, namely the input layer, the hidden layer and the output layer. it can be seen from figure5that the convolutional neural network is mainly composed of the convolutional layer c and the pooling layer s. the role of the pooling operation in the convolutional neural network is to map the weighted average of 4 consecutive pixels (2 * 2 area) in the convolutional layer c to the point of s, which reduces the dimension of the matrix. the model is a convolutional neural network designed by yann lecun in 1998 for handwritten digit recognition.this paper designs a handwritten digit recognition system based on convolutional neural network. the training and recognition process of this system is completed by lenet-5 based convolutional neural network repeated convolution operation and pooling operation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/285.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/285.txt new file mode 100644 index 0000000000000000000000000000000000000000..9953eed6710956c2e84639f628b19288580ea0a8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/285.txt @@ -0,0 +1 @@ +aspect-based sentiment analysis (absa) aims at identifying the sentiment polarity towards the specific aspect in a sentence.an target aspect refers to a word or a phrase describing an aspect of an entity.for example, in the sentence "the decor is not special at all but their amazing food makes up for it", there are two aspect terms "decor" and "food", and they are associated with negative and positive sentiment respectively.recently, neural network methods have dominated the study of absa since these methods can be trained end-to-end and automatically learn important features.(wang et al., 2016) proposed to learn an embedding vector for each aspect, and these aspect embeddings were used to calculate the attention weights to capture important information with regard to the given aspects.(tang et al., 2016b) developed the deep memory network to compute the importance degree and text representation of each context word with multiple attention layers.(ma et al., 2017) introduced the interactive attention networks (ian) to interactively learn attentions in contexts and targets, and generated the representations for target and context words separately.(xue and li, 2018) proposed to extract sentiment features with convolutional neural networks and selectively output aspect related features for classification with gating mechanisms.subsequently, transformer (vaswani et al., 2017) and bert based methods (devlin et al., 2018) have shown high potentials on absa task.there are also several studies attempting to simulate the process of human reading cognition to further improve the performance of absa (lei et al., 2019;.so far, several absa datasets have been constructed, including semeval-2014 restaurant review dataset, laptop review dataset (pontiki et al., 2014) and twitter dataset (dong et al., 2014).although these three datasets have since become the benchmark datasets for the absa task, most sentences in these datasets consist of only one aspect or multiple aspects with the same sentiment polarity (see table 1) 2 , which makes aspect-based sentiment analysis degenerate to sentence-level sentiment analysis.based on our empirical observation, the sentence-level sentiment classifiers without considering aspects can still achieve competitive results with many recent absa methods (see textcnn and lstm in table 3).on the other hand, even advanced absa methods trained on these datasets can hardly distinguish the sentiment polarities towards different aspects in the sentences that contain multiple aspects and multiple sentiments.with the goal of advancing and facilitating research in the field of aspect-based sentiment analysis, in this paper, we present a new multi-aspect multi-sentiment (mams) dataset.in mams dataset, each sentence consists of at least two aspects with different sentiment polarities, making the proposed dataset more challenging compared with existing absa datasets.considering merely the sentence-level sentiment of the sentence will fail to achieve good performance on mams dataset.we empirically evaluate the stateof-the-art absa methods on mams dataset, the poor results demonstrate that the proposed mams dataset is more challenging than the semeval-2014 restaurant review dataset.we analyze the properties of recent absa methods, and propose new capsule networks (denoted as capsnet and capsnet-bert) to model the complicated relationship between aspects and contexts, which combine the strengths of recent nlp advances.experimental results show that the proposed methods achieve significantly better results than the state-of-the-art baseline methods on mams and semeval-14 restaurant datasets.our main contributions are summarized as follows: (1) we manually annotate a large-scale multi-aspect multi-sentiment dataset, preventing absa degenerating to sentence-level sentiment analysis.the release of it would push forward the research of absa.(2) we propose a novel capsule network based model to learn the complicated relationship between aspects and contexts.(3) experimental results show that the proposed method achieves significantly better results than the stateof-the-art baseline methods.aspect-based sentiment analysis (absa) aims at identifying the sentiment polarity towards the specific aspect in a sentence., 2014)and twitter dataset(dong et al., 2014).although these three datasets have since become the benchmark datasets for the absa task, most sentences in these datasets consist of only one aspect or multiple aspects with the same sentiment polarity (see table1) 2 , which makes aspect-based sentiment analysis degenerate to sentence-level sentiment analysis.on the other hand, even advanced absa methods trained on these datasets can hardly distinguish the sentiment polarities towards different aspects in the sentences that contain multiple aspects and multiple sentiments.in mams dataset, each sentence consists of at least two aspects with different sentiment polarities, making the proposed dataset more challenging compared with existing absa datasets.experimental results show that the proposed methods achieve significantly better results than the state-of-the-art baseline methods on mams and semeval-14 restaurant datasets.our main contributions are summarized as follows: (1) we manually annotate a large-scale multi-aspect multi-sentiment dataset, preventing absa degenerating to sentence-level sentiment analysis.data annotation we create two versions of mams dataset for two subtasks of aspect-based sentiment analysis: aspect-term sentiment analysis (atsa) and aspect-category sentiment analysis (acsa).for atsa, we invited three experienced researchers who work on natural language processing (nlp) to extract aspect terms in the sentences and label the sentiment polarities with respect to the aspect terms.three experienced nlp researchers were asked to identify the aspect categories described in given sentences and determine the sentiment polarities towards these aspect categories., w a m } or an aspect category a c , aspect-level sentiment classification aims to predict the sentiment polarity y ∈ {1, ..in this paper, we present mams, a challenge dataset for aspect-based sentiment analysis, in which each sentence contains multiple aspects with different sentiment polarities.the proposed mams dataset could prevent aspect-level sentiment classification degenerating to sentence-level sentiment classification, which might push forward the researches on aspect-based sentiment analysis. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/286.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/286.txt new file mode 100644 index 0000000000000000000000000000000000000000..19630303f2212ee9f9881494c0e58713dfb95b7e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/286.txt @@ -0,0 +1 @@ +coronavirus disease 2019 (covid-19) is a highly contagious viral disease, and the world health organization (who) declared that the covid-19 was an international public health emergency . first described covid-19 in december 2019 in wuhan, hubei province, china. the ongoing outbreak of covid-19 is affecting multiple countries in the world . until mar 11th, 2020, 118,326 cases of covid-19 were diagnosed worldwide, including 80,955 cases in china and 37,371 cases outside china. additionally, 4,292 deaths have been triggered by covid-19 . many countries are facing increased pressures on health care resources. up to now, a great deal of studies is focused on using traditional statistical methods to identify risk factors of covid-19 patients. as an example, older age, pre-existing diseases, abnormal liver function, and tlymphocyte count were correlated closely with covid-19 progression and prognosis . however, traditional statistical methods could not rapidly identify changes in covid-19 patient's status during the outbreak. therefore, there is a crucial need to progress a useful forecasting tool for covid-19 and quickly categorize illness severity.currently, the importance of computational, mathematical, and surveillance-based methods for the study of infectious diseases continues to increase . machine learning-based methods are increasingly being used to diagnose disease, develop prediction models, and identify risk factors . the benefits of machine learning include improving health professionals' ability to establish diagnosis or prognosis; it will replace much of the work of radiologists and anatomic/clinical pathologists; it will improve the accuracy of diagnosis . therefore, machine learning is an indispensable tool for clinicians seeking to understand patient-individualized treatment better. we use a machine learning strategy to identify covid-19 patients at high risk for severe illness and prioritize their hospitalization. it may contribute to reduce patient mortality and reduce the burden on healthcare resources.optimization depends on the form of the problem we deal with. it is almost possible to reach any form according to the decision-makers preferences. these problems can be modeled as many-objective , memetic , robust , multiobjective , large scale , fuzzy , and single-objective optimization. these forms and the growing demand for their solvers have raised many challenges in data science. core problems demanding optimization are not limited to healthcare systems, but technologies such as the neural networks , water-energy optimization , image boost optimization , decision-making systems , temperature optimization , deployment optimization in sensor networks , sustainable development , parameter optimization , optimal resource allocation , deep learning tasks , mechanical parameters optimization , and many other potentials and connected domains . one of the main classes are solvers with an evolutionary basis. this optimization algorithm works based on an evolving swarm with stochastic updating rules. they have found a good application effect in many scenarios . at present, as a new single-objective algorithm, harris hawks optimizer (hho) 1 has been widely concerned like other optimizers and their applications such as particle swarm optimizer (pso) , whale optimizer (woa) , differential search (ds) , differential evolution (de) , slime mould algorithm (sma) 2 , monarch butterfly optimization (mbo) , and moth search algorithm (msa) .hho not only has strong plasticity but also has been widely used in other fields. many hho variants have been proposed recently . elaziz et al. proposed an improved harris-hawks optimizer (hho) to solve a multilevel image segmentation problem's global optimization problem and determine the optimal threshold. a large number of results and comparisons show that ssa has a strong ability to improve hho. shubham et al. put four effective strategies into the traditional hho, such as putting forward a nonlinear prey energy parameter, different fast diving, greedy selection mechanism, and learning based 1 download the codes at https://aliasgharheidari.com/hho.html on opposites. experimental results show that the proposed m-hho can be a useful optimization tool for solving global optimization problems. shao, kaixuan et al. proposed a new rolling bearing fault diagnosis method based on variational mode decomposition (vmd), time-shifting multiscale discrete entropy (tsmde), and support vector machine (svm) optimized by vibration harris hawks optimization algorithm. the outcomes show that this routine has better diagnostic performance than other comparison methods. tikhamarine et al. combined multi-layer perceptron (mlp) neural network and least squares support vector machine (lssvm) data-driven technology with advanced natural heuristic optimizer (hho) to simulate rainfall-runoff relationship. the experimental results show that the mixture of hho and lssvm can obtain high accuracy of runoff prediction.machine learning is widely used in the medical field. for example, abbasi, babak et al. proposed a new method for solving large-scale stochastic operation optimization problems (sops) using a machine learning model and applying the proposed decision-making method of blood unit transportation in the hospital network. the results show that compared with the current strategy, the average daily cost is reduced by 29% with the trained neural network model. in comparison, the average daily cost can be reduced by 37% with the proper optimal strategy. amiri, yupapin et al. used photonic crystal structure and machine learning technology to calculate the concentration of potassium chloride, urea, and glucose (pug) in human blood to achieve accurate measurement. moreover, at the paper's finale, a mathematical model is revealed to obtain the output power and potassium chloride changes, urea, and glucose concentrations. hakan ayyıldız et al. used red blood cell index and machine learning technology, including support vector machine (svm) and k-nearest neighbor (knn), to differentiate ida from î -thalassemia. instead, it employs the neighborhood component analysis feature selection (nca) technique to select the dataset's features. their obtained results point to that the rbc indices can result in higher efficacy than those described in the other works. banerjee, abhirup et al. used machine learning (ml), artificial neural network (ann) , and a humble statistical test to recognize sars-cov-2 positive patients. this new method can significantly improve initial screening for patients with limited pcr based diagnostic tools. rammurthy et al. designed the whho by combining whale optimization algorithm (woa) with hho. they applied it to the tumor automatic classification model. experiments show that the method of deep cnn based on whho is better than other methods.this study aims to develop efficient frameworks using the harris hawk's optimizer (hho), which trains a fuzzy knearest neighbor (fknn) model. then, the optimized hho-fknn is substantiated for the first time to diagnose the severity of covid-19. the active model is built using the info about patients' necessary information, pre-existing diseases, symptoms, immune indexes, and complications. in the developed method (hho-fknn), hho was employed to train an fknn model and to explore the critical risk factors of covid-19 infected people at the same time. in the experiment, hho-fknn is compared with the other machine learning methods like grey wolf optimizer (gwo)based fknn (gwo-fknn), support vector machines (svm), and random forest (rf). it is shown that the established hho-fknn method performs much better than its peers in terms of four evaluation metrics, including the classification accuracy (acc), sensitivity, specificity, and matthews correlation coefficients (mcc). hence, the main contributions of this study can be listed as follows:a we use a machine learning strategy to identify covid-19 patients at high risk for severe illness and prioritize their hospitalization. core problems demanding optimization are not limited to healthcare systems, but technologies such as the neural networks, water-energy optimization, image boost optimization, decision-making systems, temperature optimization, deployment optimization in sensor networks, sustainable development, parameter optimization, optimal resource allocation, deep learning tasks, mechanical parameters optimization, and many other potentials and connected domains. at present, as a new single-objective algorithm, harris hawks optimizer (hho)1has been widely concerned like other optimizers and their applications such as particle swarm optimizer (pso), whale optimizer (woa), differential search (ds), differential evolution (de), slime mould algorithm (sma) 2, monarch butterfly optimization (mbo), and moth search algorithm (msa).proposed a new rolling bearing fault diagnosis method based on variational mode decomposition (vmd), time-shifting multiscale discrete entropy (tsmde), and support vector machine (svm) optimized by vibration harris hawks optimization algorithm.proposed a new method for solving large-scale stochastic operation optimization problems (sops) using a machine learning model and applying the proposed decision-making method of blood unit transportation in the hospital network. in the developed method (hho-fknn), hho was employed to train an fknn model and to explore the critical risk factors of covid-19 infected people at the same time.in terms of these specific evaluation indexes, hho-fknn without the fs model has the best results and the smallest variance, followed by gwo-fknn with fs and hho-fknn with fs are the same. by further observing the curve in figure4, it was found that the hho-fknn without the fs model is liable to fall into a local optimum, and the accuracy is not as high as that of hho-fknn with fs. with the spread of covid-19 and an increase in the number of cases, mortality was higher among covid-19 infected individuals with comorbidities such as hypertension, diabetes mellitus, coronary heart disease, cerebral infarction, and chronic lung disease. reported that 20% of covid-19 patients have diabetes mellitus. a study with many patients from multiple centers in china suggested that compared to the non-severe patients, alt and ast levels were significantly increased in the severe patients. in a study of intensive care unit (icu) patients, it was shown that ast levels increased significantly in patients with icu compared with non-severe patients.based on patients' necessary information, pre-existing diseases, symptom, immune index, and complication, this study established a useful hho-fknn model to distinguish the severity of covid-19, of which innovations are as follows: on the one hand, it is proposed for the first time to use the immune index to distinguish the severity of covid-19, and on the other hand, the hho algorithm is used for the first time to screen the parameters and features of the fknn simultaneously. according to the experimental results, the proposed method shows higher prediction accuracy and more stable performance than other machine learning algorithms on the covid-19 severity prediction problem to select the key factors with more discriminating ability simultaneously.for future work, the proposed hho can be wrapped with other popular learning methods such as extreme learning machines, support vector machines, and convolutional neural networksfor the covid-19 diagnosis task. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/287.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/287.txt new file mode 100644 index 0000000000000000000000000000000000000000..50f919674481f16c00aa76673670e5797eacf527 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/287.txt @@ -0,0 +1 @@ +the aim of a recommender system is to provide suggestions to a set of users on items that might be interesting for them. recommendation systems are commonly found in e-commerce , (where users purchase goods like books, clothes or games online), usually implemented through collaborative filtering methods . these work by comparing similar items or similar users based on user ratings. if two users like the same items they are likely similar, and if two items are liked by the same users, those items are probably similar as well. however, as this method does not take into account the contents, new items cannot be recommended. content-based recommenders can be used to overcome some of these issues by looking at the item in question and finding similarity between items based on inherit properties . a hybrid approach can also be taken, to combine e.g. collaborative information, content features and demographics . a more detailed study into the current limitations and possible extensions of recommendation systems can be found in . † these two authors contributed equally to the work.the integration of recommendation systems into video games is a relatively new area of research. previous work has mostly focused on game recommendation engines, which present players with suggestions on alternative titles based on the games they have already played , . but it is also possible to use recommendation systems to increase player engagement in a game. in modern free-to-play games, users can buy a wide range of virtual items with real money (in-app purchases, iaps). however, sometimes they can be overwhelmed by the number of items offered and the diversity of playstyles, and this can lead to an increase in the churn rate-as players start to find the contents too difficult and are unable to progress within the game. item recommendation systems can help prevent this problem by offering players a more direct route to the items that could be appealing or useful for them, thereby improving their purchasing and general ingame experience. this may ultimately result into increased revenue by increasing player retention, iaps and the conversion rate from free to paying users.to achieve these goals, it is essential to recommend each player the right item-one that fits both their current state and their playing behavior-at the right time. and this is possible because (in contrast to other applications where very limited information is available) every action performed by a player within the game gets recorded. this offers a unique opportunity not only to obtain accurate predictions on the player's in-game behaviour (for example on when and at what level they will leave the game, see and ) but also to offer them personalized recommendations of items that are likely relevant to them.there are previous papers related to item recommendation systems. introduces a recommendation system for the massively multiplayer online first-person shooter game destiny, where players get suggestions on those items that best fit their play style and might improve their performance. they apply similarity measures to global descriptors like total kill count or kill/death ratio. clusters for the player "base" and "cooldown" stats were derived through k-means clustering, whereas archetypal analysis , (which clusters by extreme values rather than centroids ) was used to find distinct playstyles. similar analyses were done for the massively multiplayer online role-playing game tera and the multiplayer strategy game battlefield 2: bad company 2 or the game tomb raider: underworld . in all these cases, arxiv:1806.04900v2 14 aug 2018 players were clustered by their playing behaviour; although no recommendation system was built, behavioral profiling via clustering may be very useful in offering recommendations based on similarity between users.however, unsupervised clustering methods remains a challenge. in particular, a significant amount of game-specific knowledge, is required to find adequate features that can separate players into the right number of clusters. if two users like the same items they are likely similar, and if two items are liked by the same users, those items are probably similar as well. previous work has mostly focused on game recommendation engines, which present players with suggestions on alternative titles based on the games they have already played,. however, sometimes they can be overwhelmed by the number of items offered and the diversity of playstyles, and this can lead to an increase in the churn rate-as players start to find the contents too difficult and are unable to progress within the game. item recommendation systems can help prevent this problem by offering players a more direct route to the items that could be appealing or useful for them, thereby improving their purchasing and general ingame experience. this offers a unique opportunity not only to obtain accurate predictions on the player's in-game behaviour (for example on when and at what level they will leave the game, seeand) but also to offer them personalized recommendations of items that are likely relevant to them.introduces a recommendation system for the massively multiplayer online first-person shooter game destiny, where players get suggestions on those items that best fit their play style and might improve their performance. similar analyses were done for the massively multiplayer online role-playing game tera and the multiplayer strategy game battlefield 2: bad company 2or the game tomb raider: underworld.ml] 14 aug 2018 players were clustered by their playing behaviour; although no recommendation system was built, behavioral profiling via clustering may be very useful in offering recommendations based on similarity between users.while there are several approaches to the problem of developing recommendation systems, here we will explore a different avenue: our aim is to provide a method that predicts the next items a player will purchase, and use this information to recommend them other items.additionally, the model should be able to scale to millions of players; however, if we generate very large feature vectors (with thousands of features) and sample multiple labels per user, we could end up with datasets with over a billion samples (a thousand samples per user). as the model is trained over all players, once players are in a similar state the model can learn to predict and recommend the right item at the right time for each individual player. we check whether the player actually purchased the item that had the highest probability, any of the two items with the two highest probabilities or any of the three items with the three highest probabilities, as per the prediction.1. discussion an item recommendation system for games is essential to provide players with individual rewards or incentives to increase engagement, to maximize in-app purchases and to increase cross-selling and up-selling.while predictions were made only for a small set of items, the model is trivially extendable to run on hundreds of items, and can be used both for items purchased with real money and for in-game virtual purchases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/288.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/288.txt new file mode 100644 index 0000000000000000000000000000000000000000..41dc8df65f8bb881c4cbd7c04c57b55c3c3aa20e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/288.txt @@ -0,0 +1 @@ +as far as india is concern, the agriculture sector has a significant contribution to the gdp of india. the agriculture sector employs a large number of people all across the world. for food, a person has to depend on agriculture products. as we see in the past, to determine the type of plant diseases, farmers use the expert's people in the agriculture field, but that process is time-consuming and not always give the correct result. because it is always true in the majority of cases that a model gives correct results as compare to humans. as we see today, work is done in the area of the agriculture sector using image processing to improve the quality of products and help farmers to protect their crops from diseases, and it can extract relevant information from it. there are various methods for plant disease detection based on artificial neural network (ann) , support vector machine (svm) , k-nearest neighbors (knn) , convolutional neural network (cnn) , fuzzy logic (fl) and its combination. cnn based plant disease detection gives good accuracy, but it needs a large amount of data for training, which increases computational cost. svm based plant disease detection is useful for both accurate separable and non-definite separable data but cannot return an anticipation credence value like logistic regression does. knn based plant disease detection is useful for inputs where the probability distribution is unknown, but it is sensitive to localized data and takes considerable long computational time due to lazy learning. fuzzy logic-based plant disease detection is a heuristic modular way for defining any non-definite hegemony system and can attain a higher degree of cybernation but is not clearly understood and has no standard tuning and no stability criteria. neural network (nn) is a supervised learning approach, and it recognizes the relationship among data by a process that behaves as the human brain operates. neural network adjusts itself as input changes to produce the best result without the need to change the output criteria. neural network contains layers having inter-connected neurons or perceptrons, and the output layer has to classify input features into various classes.ann is a non-argument model, while most emblem methods are argument models that need a higher backcloth of emblem. in nn based bani-ahmed et al., proposed plant disease detection method using color transformation scheme his and segmentation by kmean with four clusters . other, varthini et al., detected plant diseases using color transformation scheme his, threshold masking for green pixel of leaf image followed by segmentation of input image into patch size of 32*32, and classification is completing by combination of the ann and svm . kamlu et al., proposed the spot and catalog of plant diseases using the threshold, k-means clustering, and ann . moreover, pradhan et al., review the paper based on major types of neural networks with hyperspectral data . so, nn based methods are resilient and can be accessible for both regression and catalog problems. it is good to miniature with no accurate data with a large number of inputs. therefore we proposed an improved nn based plant disease identification approach, based on cie lab, k-means, elbow, and augmentation. the paper is sort as follows: the proposed approach for improved nn describes in section 2 and section 3, the results are discussing for the robustness of the proposed approach over the considered plant disease methods. lastly, the paper cum in the conclusion section. there are various methods for plant disease detection based on artificial neural network (ann), support vector machine (svm), k-nearest neighbors (knn), convolutional neural network (cnn), fuzzy logic (fl)and its combination. cnn based plant disease detection gives good accuracy, but it needs a large amount of data for training, which increases computational cost. svm based plant disease detection is useful for both accurate separable and non-definite separable data but cannot return an anticipation credence value like logistic regression does. knn based plant disease detection is useful for inputs where the probability distribution is unknown, but it is sensitive to localized data and takes considerable long computational time due to lazy learning. fuzzy logic-based plant disease detection is a heuristic modular way for defining any non-definite hegemony system and can attain a higher degree of cybernation but is not clearly understood and has no standard tuning and no stability criteria. in nn based bani-ahmed et al., proposed plant disease detection method using color transformation scheme his and segmentation by kmean with four clusters. moreover, pradhan et al., review the paper based on major types of neural networks with hyperspectral data.our proposed approach consists of various steps, firstly we do image acquisition and color transformation by rgb to cie lab for making images device-independent such that images captured with different color space like standard-rgb and adobe-rgb gives same results, secondly segmentation is used to extract lesion region using k-means and elbow, thirdly apply the augmentation in direction of 90°, 180° and 270° to increase the dataset and avoid the over-fitting, fourthly extracted the feature using 1st order and 2nd order statistics, fifthly prepared the training and testing data set and finally classify the plant diseases using improved nn. in our proposed approach, we are used nn as classifier to identify the disease from leaf image and use various training algorithms in our improved nn is given below:.simulation of our proposed approach and existing plant disease detection on,andperformed on five general plant disease, namely, bacterial leaf spot, early scorch, frog-leaf spot, fungal disease, and sunburn disease (fig.table7represents the accuracy of the species considered plant disease detection method,, and the proposed approach.in this paper, we proposed an improved nn based plant disease identification. further accuracy can be improved for disease detection in plant leaf with the help of the fusion of various classifiers and integration of the fuzzy logic to the neural network. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/289.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/289.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebfc39ae989d319f21f482a399172392b8e59e2d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/289.txt @@ -0,0 +1 @@ +this paper brings together new algorithms and insights to construct a framework for robust and extremely rapid object detection. this framework is demonstrated on, and in part motivated by, the task of face detection. toward this end we have constructed a frontal face detection system which achieves detection and false positive rates which are equivalent to the best published results . this face detection system is most clearly distinguished from previous approaches in its ability to detect faces extremely rapidly. operating on 384 by 288 pixel images, faces are de-tected at 15 frames per second on a conventional 700 mhz intel pentium iii. in other face detection systems, auxiliary information, such as image differences in video sequences, or pixel color in color images, have been used to achieve high frame rates. our system achieves high frame rates working only with the information present in a single grey scale image. these alternative sources of information can also be integrated with our system to achieve even higher frame rates.there are three main contributions of our object detection framework. we will introduce each of these ideas briefly below and then describe them in detail in subsequent sections.the first contribution of this paper is a new image representation called an integral image that allows for very fast feature evaluation. motivated in part by the work of papageorgiou et al. our detection system does not work directly with image intensities . like these authors we use a set of features which are reminiscent of haar basis functions (though we will also use related filters which are more complex than haar filters). in order to compute these features very rapidly at many scales we introduce the integral image representation for images. the integral image can be computed from an image using a few operations per pixel. once computed, any one of these harr-like features can be computed at any scale or location in constant time.the second contribution of this paper is a method for constructing a classifier by selecting a small number of important features using adaboost . within any image subwindow the total number of harr-like features is very large, far larger than the number of pixels. in order to ensure fast classification, the learning process must exclude a large majority of the available features, and focus on a small set of critical features. motivated by the work of tieu and viola, feature selection is achieved through a simple modification of the adaboost procedure: the weak learner is constrained so that each weak classifier returned can depend on only a single feature . as a result each stage of the boosting process, which selects a new weak classifier, can be viewed as a feature selection process. adaboost provides an effective learning algorithm and strong bounds on generalization performance .the third major contribution of this paper is a method for combining successively more complex classifiers in a cascade structure which dramatically increases the speed of the detector by focusing attention on promising regions of the image. the notion behind focus of attention approaches is that it is often possible to rapidly determine where in an image an object might occur . more complex processing is reserved only for these promising regions. the key measure of such an approach is the "false negative" rate of the attentional process. it must be the case that all, or almost all, object instances are selected by the attentional filter.we will describe a process for training an extremely simple and efficient classifier which can be used as a "supervised" focus of attention operator. the term supervised refers to the fact that the attentional operator is trained to detect examples of a particular class. in the domain of face detection it is possible to achieve fewer than 1% false negatives and 40% false positives using a classifier constructed from two harr-like features. the effect of this filter is to reduce by over one half the number of locations where the final detector must be evaluated.those sub-windows which are not rejected by the initial classifier are processed by a sequence of classifiers, each slightly more complex than the last. if any classifier rejects the sub-window, no further processing is performed. the structure of the cascaded detection process is essentially that of a degenerate decision tree, and as such is related to the work of geman and colleagues .an extremely fast face detector will have broad practical applications. these include user interfaces, image databases, and teleconferencing. in applications where rapid frame-rates are not necessary, our system will allow for significant additional post-processing and analysis. in addition our system can be implemented on a wide range of small low power devices, including hand-helds and embedded processors. in our lab we have implemented this face detector on the compaq ipaq handheld and have achieved detection at two frames per second (this device has a low power 200 mips strong arm processor which lacks floating point hardware).the remainder of the paper describes our contributions and a number of experimental results, including a detailed description of our experimental methodology. discussion of closely related work takes place at the end of each section. toward this end we have constructed a frontal face detection system which achieves detection and false positive rates which are equivalent to the best published results. in other face detection systems, auxiliary information, such as image differences in video sequences, or pixel color in color images, have been used to achieve high frame rates. in order to ensure fast classification, the learning process must exclude a large majority of the available features, and focus on a small set of critical features. in the domain of face detection it is possible to achieve fewer than 1% false negatives and 40% false positives using a classifier constructed from two harr-like features. in our lab we have implemented this face detector on the compaq ipaq handheld and have achieved detection at two frames per second (this device has a low power 200 mips strong arm processor which lacks floating point hardware). initial experiments demonstrated that a frontal face classifier constructed from 200 features yields a detection rate of 95% with a false positive rate of 1 in 14084. a positive result from the first classifier triggers the evaluation of a second classifier which has also been adjusted to achieve very high detection rates. further processing can take any form such as additional stages of the cascade (as in our detection system) or an alternative detection system. in most cases classifiers with more features will achieve higher detection rates and lower false positive rates. in principle one could define an optimization framework in which: i) the number of classifier stages, ii) the number of features in each stage, and iii) the threshold of each stage, are traded off in order to minimize the expected number of evaluated features. each stage is trained by adding features until the target detection and false positives rates are met ( these rates are determined by testing the detector on a validation set).3a notion similar to the cascade appears in the face detection system described by rowley et al.'s two network face system is the fastest existing face detector. while this basic insight is very valuable, in their implementation it is necessary to first evaluate some feature detector at every location.each classifier in the cascade was trained with the 4916 training faces (plus their vertical mirror images for a total of 9832 training faces) and 10,000 non-face sub-windows (also of size 24 by 24 pixels) using the adaboost training procedure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/29.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/29.txt new file mode 100644 index 0000000000000000000000000000000000000000..b466401dc93d7b2137febcccb214899d6e07ee8e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/29.txt @@ -0,0 +1 @@ +the international energy agency (iea) has estimated that buildings were responsible for 28% of global emissions in 2019 . commercial buildings are a significant energy consumer, responsible for more than 7% of global final energy consumption and up to 18% in some industrialised economies . a typical commercial building has been shown to waste 30% of its energy consumption . incorporating digital technologies in buildings can reduce energy consumption, increase operational efficiency , and improve occupant comfort and productivity .a building automation system (bas) consists of 'points' representing 'sensors', 'actuators', 'settings' and 'setpoints'. these points are described by short text fields containing unstructured metadata that may provide information such as sensor type and location and what equipment it is associated with. novel artificial intelligence (ai) and machine learning (ml) solutions for managing buildings are being developed, such as occupancy detection , fault detection and diagnosis , and grid support services . however, deployment of these applications and algorithms is hamstrung because the bas point metadata was not designed to support these tasks . due to device memory limitations and to reduce input keystrokes, point text metadata is often heavily abbreviated, and the abbreviated forms can vary significantly from building to building. mapping to one of many machine-readable schemas, such as brick or haystack , is required, and this is usually performed manually.machine learning approaches to automate metadata mapping show promise. however, they require large amounts of data from many buildings in order to learn the multitude of ways engineers can express the same concept, and they're prone to making unrecoverable mistakes . in this paper, we use a hybrid approach of slot rules defined by finite-state transducers (fsts) combined with a statistical language model. this approach allows us to more easily include expert knowledge in our model whilst retaining the power of statistical techniques to learn from data. in this paper, we use a hybrid approach of slot rules defined by finite-state transducers (fsts) combined with a statistical language model. these approaches all train a local model, a model that is trained on a single building only.in this work, we perform a preliminary investigation into the use of fsts combined with a statistical language model to extract semantic information from bas text metadata. this introduces inefficiencies that we address by using a finite state transducer slot model with a language model. we use this property to generate a lattice that contains multiple potential output sequences and then use a language model to select the most likely sequence. the slot weights are either fixed, or dynamically assigned by a language model depending on the slot class. the matched text can also include delimiter characters, and the output is a sequence of expanded tags; for example inputting saf results in the output supply air fan.a language model is a probabilistic model that enables us to score a tagset based on the likelihood of the sequence of symbols. the tagset language model is trained on hvac phrases such as zone temperature setpoint and the equipment language model is a character model trained on equipment names such as ahu-01. in both cases, a count-based language model of order=2 was used, and perplexity used to compute the slot weights.in order to evaluate the feasibility of using fsts to model bas point metadata, we manually labelled 3,000 randomly selected points from 10 commercial buildings. finally, we tested the model on air handling unit points from five buildings that were not part of the dataset used to build the model. a secondary issue was due to equipment such as 'raf_19_2' being labelled as a tagset (return air fan) rather than as equipment.in this study, we have demonstrated that it is possible to use a hybrid of rules with a language model to train a global model to extract semantic information from bas text metadata.a promising alternative is continual learning, where a single model trained on multiple buildings is continuously updated with new information. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/290.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/290.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c943aff89d9610613f85e892eac289054c46cac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/290.txt @@ -0,0 +1 @@ +summarization is the task of condensing a piece of text to a shorter version that contains the main information from the original. there are two broad approaches to summarization: extractive and abstractive. extractive methods assemble summaries exclusively from passages (usually whole sentences) taken directly from the source text, while abstractive methods may generate novel words and phrases not featured in the source text -as a human-written abstract usually does. the extractive approach is easier, because copying large original text (truncated): lagos, nigeria (cnn) a day after winning nigeria's presidency, muhammadu buhari told cnn's christiane amanpour that he plans to aggressively fight corruption that has long plagued nigeria and go after the root of the nation's unrest. buhari said he'll "rapidly give attention" to curbing violence in the northeast part of nigeria, where the terrorist group boko haram operates. by cooperating with neighboring nations chad, cameroon and niger, he said his administration is confident it will be able to thwart criminals and others contributing to nigeria's instability. for the first time in nigeria's history, the opposition defeated the ruling party in democratic elections. buhari defeated incumbent goodluck jonathan by about 2 million votes, according to nigeria's independent national electoral commission. the win comes after a long history of military rule, coups and botched attempts at democracy in africa's most populous nation.baseline seq2seq + attention: unk unk says his administration is confident it will be able to destabilize nigeria's economy. unk says his administration is confident it will be able to thwart criminals and other nigerians. he says the country has long nigeria and nigeria's economy.pointer-gen: muhammadu buhari says he plans to aggressively fight corruption in the northeast part of nigeria. he says he'll "rapidly give attention" to curbing violence in the northeast part of nigeria. he says his administration is confident it will be able to thwart criminals.pointer-gen + coverage: muhammadu buhari says he plans to aggressively fight corruption that has long plagued nigeria. he says his administration is confident it will be able to thwart criminals. the win comes after a long history of military rule, coups and botched attempts at democracy in africa's most populous nation.figure 1: comparison of output of 3 abstractive summarization models on a news article. the baseline model makes factual errors, a nonsensical sentence and struggles with oov words muhammadu buhari. the pointer-generator model is accurate but repeats itself. coverage eliminates repetition. the final summary is composed from several fragments.chunks of text from the source document ensures baseline levels of grammaticality and accuracy. on the other hand, sophisticated abilities that are crucial to high-quality summarization, such as paraphrasing, generalization, or the incorporation of real-world knowledge, are possible only in an abstractive framework (see figure 5).due to the difficulty of abstractive summarization, the great majority of past work has been extractive (kupiec et al., 1995;paice, 1990;saggion and poibeau, 2013). however, the recent success of sequence-to-sequence models (sutskever figure 2: baseline sequence-to-sequence model with attention. the model may attend to relevant words in the source text to generate novel words, e.g., to produce the novel word beat in the abstractive summary germany beat argentina 2-0 the model may attend to the words victorious and win in the source text. et al., 2014), in which recurrent neural networks (rnns) both read and freely generate text, has made abstractive summarization viable rush et al., 2015;zeng et al., 2016). though these systems are promising, they exhibit undesirable behavior such as inaccurately reproducing factual details, an inability to deal with out-of-vocabulary (oov) words, and repeating themselves (see figure 1).in this paper we present an architecture that addresses these three issues in the context of multi-sentence summaries. while most recent abstractive work has focused on headline generation tasks (reducing one or two sentences to a single headline), we believe that longer-text summarization is both more challenging (requiring higher levels of abstraction while avoiding repetition) and ultimately more useful. therefore we apply our model to the recently-introduced cnn/ daily mail dataset (hermann et al., 2015;, which contains news articles (39 sentences on average) paired with multi-sentence summaries, and show that we outperform the stateof-the-art abstractive system by at least 2 rouge points.our hybrid pointer-generator network facilitates copying words from the source text via pointing (vinyals et al., 2015), which improves accuracy and handling of oov words, while retaining the ability to generate new words. the network, which can be viewed as a balance between extractive and abstractive approaches, is similar to gu et al.'s (2016) copynet and miao and blunsom's (2016) forced-attention sentence compression, that were applied to short-text summarization. we propose a novel variant of the coverage vector (tu et al., 2016) from neural machine translation, which we use to track and control coverage of the source document. we show that coverage is remarkably effective for eliminating repetition.in this section we describe (1) our baseline sequence-to-sequence model, (2) our pointergenerator model, and (3) our coverage mechanism that can be added to either of the first two models. note that the pointer and the coverage mechanism introduce very few additional parameters to the network: for the models with vocabulary size 50k, the baseline model has 21,499,600 parameters, the pointer-generator adds 1153 extra parameters (w h * , w s , w x and b ptr in equation 8), and coverage adds 512 extra parameters (w c in equation 11).figure9: the baseline model incorrectly substitutes dutch for new zealand (perhaps reflecting the european bias of the dataset), fabricates irish, and struggles with out-of-vocabulary words saili and aucklandbased. louis van gaal is close to delivering his first-season aim of returning man united into champions league . louis van gaal is close to delivering his unk aim of returning man united into the premier league top four . louis van gaal is close to delivering his unk aim of returning man united into champions league .pointer-generator, no coverage: louis van gaal is close to delivering his first-season aim of returning man united into champions league. louis van gaal is close to delivering his first-season aim of returning man united into champions league. louis van gaal is close to delivering his first-season aim of returning man united into champions league .figure10: in this example, both our baseline model and final model produce a completely abstractive first sentence, using a novel word beat. star striker jackson martinez and ricardo quaresma were one of many porto players who looked perplexed as they were hugged by fans before they making their way on to the team bus -set upon a cacophony of fiercely-proud chanting . it was the first time that porto , who had been unbeaten in this season 's tournament up until tuesday night , had reached the quarter-finals of the champions league since the 2008-09 season .pointer-generator, no coverage: porto star striker jackson martinez was one of many players to look perplexed by their warm reception.pointer-generator, with coverage: porto star striker jackson martinez was one of many players to look perplexed by their warm reception . figure11: the baseline model makes several factual inaccuracies: it claims porto beat bayern munich not vice versa, the score is changed from 7-4 to 2-0, jackson is changed to james and a heroes reception is replaced with a trophy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/291.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/291.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c518ab9d3fb55cc22063e9473aadc8b0a6669c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/291.txt @@ -0,0 +1 @@ +the fixed camera feed is (frame by frame) given to an image processing unit at the base which tries to: 1) recognize the preregistered face of the instructor and 2) identify his/her skeleton for pose detection. once the face is recognized and the associated skeleton is detected, tracking the instructor's position begins. a set of features are also extracted from the identified skeleton (which is the output of the deep neural network). more specifically, 12 features are measured from the network output and are fed to a pretrained pose classifier. the result of classification is the instructor's pose which can be handed over to the camera controller to take appropriate actions. however, since current cnns mostly take still images and classification of consecutive frames does not always yield similar results, to have a smooth shooting experience, we designed a markov model for instructor's poses. similar to markov chains, the instructor's pose is memoryless and transition to the next pose only depends on the current pose. some transitions are impossible and some have low probabilities. for example, one cannot sit right after being detected to be facing the blackboard. but transition to sitting is possible when the instructor is facing the students.at each markov state, the received sequence of images (frames) can be considered as evidences. given the current state, one can find the conditional probability of each potentional state happening, and based on the map estimation, change the state/pose. details of the markov model will be given in the following sections.the summary of the above process is presented as a flowchart in figs. 2 and 3. note that fig. 3 shows parts of the high-level flowchart of fig. 2 in more detail. recording starts with sensing the wearable gadget of the instructor (and possibly cross correlating it with the class timetable). then, the fixed camera feed is searched for the instructor's face and once it is detected and the associated skeleton is extracted, position tracking is triggered. the loop continuously tracks the instructor and scans for any pose changes. the result is used by the ptz camera controller to direct the recording. it is worth mentioning that pose detection is a straightforward task and the flow of the algorithm in fig. 3 will not be interrupted or stuck. if tracking is successful but the skeleton is missed, the previous pose is used until the algorithms catch up. the pose detection result as well as instructor's position tracking information are handed over to the ptz camera controller to orchestrate the filming process.the contributions of this article can be summarized as follows: 1) designing a low-cost two-camera framework for automatic recording of classes; 2) employment of deep neural networks for skeleton detection and defining a set of features for instructor's pose classification; 3) enhancement of classification accuracy by using a corrective markov chain and a bayesian map estimator; 4) creation of pose training datasets and implementation of the whole solution based on openpose library. in section ii, we present the proposed camera management framework including the deep pose detection, the markov model, and the bayesian estimator.the fixed camera feed is (frame by frame) given to an image processing unit at the base which tries to: 1) recognize the preregistered face of the instructor and 2) identify his/her skeleton for pose detection. similar to markov chains, the instructor's pose is memoryless and transition to the next pose only depends on the current pose. given the current state, one can find the conditional probability of each potentional state happening, and based on the map estimation, change the state/pose. given a lecture hall video dataset of enough duration, one can build a markov transition matrix with prior probabilities for the four defined states.a pure markov model might be a good presentation of the states and their transition probabilities; however, it does not help with the improvement of pose detection results per se. one can make a dataset of transitions from θ i to θ j given a video database of classroom recordings and by using the law of large numbers (llns), find the pmf of each sequence of w.in order to find p(θ i |θ j ) at every state, we went through 50 h of classroom videos and followed the method explained in section ii-b to count the number of transitions from every pose to every other.the next step to enhance the accuracy of the proposed system was applying the markov model and bayesian map estimator to the nn classifier output and find the most probable pose in practice.9(a) and (c) shows the output of the pose detection system when only an nn classifier is used after the skeletondetecting deep neural network. in this article, we presented a low-cost two-camera solution which relies on off-the-shelf deep pose detection libraries and is equipped with an extra processing layer with bayesian map estimators and markov models for better detection accuracy and promotion of output videos. the frames received from the fixed camera were used to track instructor's position also determine his/her pose. we used openpose deep network to detect the skeleton of instructor and extracted 12 features from that to help a neural network classifier detect his/her pose. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/292.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/292.txt new file mode 100644 index 0000000000000000000000000000000000000000..007501c07a68b32ae3fc7f9e5d53bf1668a5318a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/292.txt @@ -0,0 +1 @@ +with the high-speed development of the internet, e-commerce has already penetrated as a part of our daily life. currently most information on the internet exists in unstructured or semi-structured forms, and presented the explosive growth. besides, along with the development of e-commerce, the more appeared reviews not only help potential consumers to make decisions of the products in a certain extent, but also provide some good feedback for merchant. for instance, when a consumer plans to select comfortable hotel for his trip, he will surf on the bbs or review sites to read the opinions from experienced consumers.however, for a popular product, the number of reviews can be in hundreds or even thousands. this makes it difficult for a potential customer to read them to make an informed decision on whether to purchase the product. it also makes it difficult for the merchant of the product to keep track and to manage customer opinions. moreover, there is also some noise such as misleading articles often appearing in the first few pages, which would affect the comprehensiveness of browsers' information acquisition and correctness of their judgment. nowadays, some websites have made quantized expressions for the sentiment orientation (so) of their local review information, such as amazon.com, which has coarse-grained rating (5-star scale) for each review on its website, and the 5-star is the best, while the 1-star is the worst, then giving the total rating.in the past few years, many researchers transfer their interests from text classification to sentiment analysis . current researches mainly focused on proposing novel analyzing and processing technologies based on different domains, according to the large scale review data acquired from internet, such as 1) using part-of-speech (pos) to tag the sentences, several researchers summarized some rules to focus the object of opinion and sentiment items , and utilized the distribution rules of pos to extract the corresponding template for sentiment analysis ; 2) some scholars also started to make researches in sentiment orientation of different sentence structures ; 3) kim and hovy used the technology of semantic role labeling (srl), which was mainly utilized in news and public's opinion analysis, to help identify two main components of opinions: opinion holder and topic . of course there were also some scholars aiming at the reviews in a special website, and proposing special processing methods, so as to obtain the sentiment orientation of reviews .this article mainly has two contributions: 1) to propose a supervised machine learning approach to realize sentiment classification of online hotel reviews; 2) to utilize tf-idf information to set up unigram feature, this information is more effective than frequency evaluated by our experiment results.the rest of this paper is organized as follows. section 2 presents related work. section 3 introduces the theory of machine learning of support vector machine. section 4 describes the experiment set and shows the experimental results. finally, we conclude and prospect our work in section 5. current researches mainly focused on proposing novel analyzing and processing technologies based on different domains, according to the large scale review data acquired from internet, such as 1) using part-of-speech (pos) to tag the sentences, several researchers summarized some rules to focus the object of opinion and sentiment items, and utilized the distribution rules of pos to extract the corresponding template for sentiment analysis; 2) some scholars also started to make researches in sentiment orientation of different sentence structures; 3) kim and hovy used the technology of semantic role labeling (srl), which was mainly utilized in news and public's opinion analysis, to help identify two main components of opinions: opinion holder and topic. of course there were also some scholars aiming at the reviews in a special website, and proposing special processing methods, so as to obtain the sentiment orientation of reviews.this article mainly has two contributions: 1) to propose a supervised machine learning approach to realize sentiment classification of online hotel reviews; 2) to utilize tf-idf information to set up unigram feature, this information is more effective than frequency evaluated by our experiment results. as such, sentiment classification (sentiment analysis or opinion mining) can perform the tasks of automatically understanding the online reviews.sentiment classification aims to extract the text of written reviews of customers for certain products or services by classifying the reviews into positive or negative opinions according to the polarity of the review. the method has been attempted in different domains such as movie reviews, product reviews, customer feedback reviews, and legal blogs. other potential applications include extracting opinions or reviews from discussion forums such as blogs, and integrating automatic review mining with search engines to automatically provide useful statistical data of search results or to build sentiment analysis systems for specific products or services. this method is generally applied to the document-level sentiment analysis, such as tsou makes statistics for the sentiment orientation of news articles and measures the opinions of celebrities from the public through calculating the sentiment orientation of the words and comprehensively considers the spread, density and semantic intensity of the polarity elements. although the sentiment analysis based on the simple statistic belongs to coarse-grained orientation classification, because of its simple realization and not bad accuracy, it occupies a particular weight in the beginning of the orientation study.the second one is based on machine learning, generating orientation classification model through the training of numerous labeled corpuses, and then classifying the test texts using generated model. pang adopted the technology of standard bag-of-words and three machine learning methods (naive bayes, maximum entropy classifications and support vector machine (svm)) to make text orientation classification for the film reviews, and respectively compared them with the outcome of manual classification. chaovalit and zhou also used the methods of machine learning and sentiment orientation to deeply mine the film reviews. whitelaw presented a method for sentiment classification based on extracting and analyzing appraisal groups which is represented as a set of attributes values in several task-independent semantic taxonomies. in this research, we applied svm-based supervised machine learning models for sentiment classification of online hotel reviews.in this paper, we first analyze previous research on sentiment classification, and then propose a supervised machine learning approach using unigram feature with two types of information (frequency and tf-idf) to realize polarity classification of documents. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/293.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/293.txt new file mode 100644 index 0000000000000000000000000000000000000000..3dfb5410b69f7927f95048065e6f8057b8486700 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/293.txt @@ -0,0 +1 @@ +in recent years, credit card users suffer a huge amount of loss because of frauds. many researchers are working on the early detection of credit card frauds so. k. modi and r. dayma review on methods for fraud detection specially in credit card transactions. a. agrawal et al. proposed a novel approach for credit card fraud detection. the main tools used by researchers for credit card fraud detection includes, ml algorithms, neural networks, classification and clustering methods.neural networks are based on human brain and the learning capability of neural networks solved many problems in science and engineering. c. wang et al. proposed a neural network based whale algorithm for credit cards fraud detection. f. ghobadi and m. rohani proposed a cost sensitive model for credit card fraud detection using neural network. n. k. gyamfi and j. abdulai applied support vector machine for detecting fraud using credit cards.classification and clustering are the techniques to divide the data into several classes or clusters. these techniques are also very useful in solving many problems. m. kavitha and m. suriakala applied meta classifier on huge data to detect frauds in credit card transactions. h. wang et al. applied partitioning and clustering techniques based framework for credit cards fraud detection. a. mishra and c. ghorpade worked on skewed data and applied various classification techniques for credit cards fraud detection. alex g.c. de sá et al. proposed a classification technique for credit cards fraud detection.machine learning algorithms are ai techniques which are used in various disciplines to solve problems mainly deals with large amount of data. many researchers applied machine learning and deep learning techniques to detect frauds in credit cards. however, there is still a need to analyze and apply the power of ml algorithms to detect frauds in credit card transactions. the areas in which machine learning algorithms are in use are as follows:classification -classification find some conclusions from a huge amount of data. when given some input values from the data, the classification algorithms attempt to select one or more outputs on the basis of the input data. machine learning algorithms are very useful in classification. the main tools used by researchers for credit card fraud detection includes, ml algorithms, neural networks, classification and clustering methods.proposed a neural network based whale algorithm for credit cards fraud detection. rohaniproposed a cost sensitive model for credit card fraud detection using neural network.applied partitioning and clustering techniques based framework for credit cards fraud detection. ghorpadeworked on skewed data and applied various classification techniques for credit cards fraud detection.machine learning algorithms are ai techniques which are used in various disciplines to solve problems mainly deals with large amount of data. many researchers applied machine learningand deep learning techniques to detect frauds in credit cards.in this paper machine learning algorithms are applied on detection of frauds using credit card transactions.in this paper three machine learning algorithms namely decision tree, random forest and xgboost are applied on a real data set having data of more than one lacks credit cards.machine learning (ml) algorithms are applied on the data set of more than one lacks credit cards.in this paper machine learning algorithms are used for credit card fraud detection. the power of machine learning is used to detect credit cards frauds and the performance of different machine learning algorithms is compared. three machine learning algorithms, decision tree, random forest and xgboost are applied on a data set have the data of 284808 credit cards. the performance of other machine learning algorithm can be checked for credit card fraud detection. the accuracy of xgboost machine learning algorithm should also be tested on other data sets for credit card fraud detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/294.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/294.txt new file mode 100644 index 0000000000000000000000000000000000000000..7cab7ec00b85ae8b5dda572a1fc98c3795b12ac9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/294.txt @@ -0,0 +1 @@ +slight changes in pose and illumination produce large changes in object appearance. recognition of objects under various classes of geometric transformations or under various viewpoints was previously studied in . however, these methods offer no solution for the problem of illumination variability in natural images. in the problem of varying illumination and fixed pose was addressed. recognition under large variation in pose and illumination has recently been introduced in . in this method each "cone" models only 4x4 degrees patch of the visibility sphere, hence large variability in pose is accomplished by calculation of the distance to each cone, which is much more computationally expensive than our approach.appearance-based methods can recognize the object under a particular pose and lighting, if the object has been previously seen under similar circumstances. to extend these methods to handle illumination variability, a large set of images of the object under varying illumination should be used for the learning stage, which is highly inefficient. the following observations allow to alleviate this problem, by modeling the object appearance under a wide range of illuminations, instead of physically creating them.consider a convex object with lambecian reflectance function, which is illuminated by a single point light source at infinity. let b e sflx3 be a matrix where each row is the product of the albedo with the inward pointing unit normal for a point on the surface corresponding to a particular pixel in the image. let s e s~ denote the product of the light source intensity with the unit vector in the direction of the light source. the resulting image x e sfl is then given bythe pixels set to zero correspond to the surface points lying in an attached shadow. convexity of the object is assumed to avoid cast shadows. when no part of the object is shadowed, x lies in the 3-d subspace l, called the illumination space, given by the span of the matrix b, wherehence the illumination subspace can be constructed from just three basis images .it was shown in that the set c of all possible images of a convex lambertian surface, created by varying the direction and strength of an arbitrary number of point light sources at infinity, is defined as follows:and c is a convex cone in %" . furthermore, it was shown in that any image in the cone c can be represented as a convex combination of extreme rays given by x, , = max(bs,, ,o)where where b, and b, are the rows of b. it was proved in that the number of shadowing configurations is at most m ( m -l ) + 2 , where m 2 n is a number of distinct 0-7695-1143-0/01 $10.00 0 2001 ieee normals. hence there are at most m(m -1) extreme rays.since there is a finite number of extreme rays, the convex cone is polyhedral. the illumination subspace method offers a way to construct the illumination cone. gather three or more images of the object (with a fixed pose) under varying illumination without shadowing, and use these images to estimate the three-dimensional illumination subspace l by normalizing the images to unit length, and then using singular value decomposition (svd) to estimate the optimal three-dimensional orthogonal basis b' in least square sense. it was proved in that b' is sufficient for determining the subspace l. then from b' , the extreme rays defining the illumination cone c can be computed using eq.4 and 5.in this paper we use the observations from and the newly introduced anti-face method to detect 3-d objects under variable illumination and various classes of geometric transformations. the anti-face method offers an attractive solution, which proceeds by modeling the effects of different illumination conditions in the training set; this automatically voids the illumination effects, allowing fast illumination invariant detection, without having to create a large training set.section 2 focuses on applying the anti-face method to the illumination space and illumination cone, and presents novel applications for detection of objects with lambertian surface under both varying illumination and pose section 3 introduces an extension of the presented algorithms for ambient light. section 4 presents the experimental results. to extend these methods to handle illumination variability, a large set of images of the object under varying illumination should be used for the learning stage, which is highly inefficient.it was shown inthat the set c of all possible images of a convex lambertian surface, created by varying the direction and strength of an arbitrary number of point light sources at infinity, is defined as follows:. gather three or more images of the object (with a fixed pose) under varying illumination without shadowing, and use these images to estimate the three-dimensional illumination subspace l by normalizing the images to unit length, and then using singular value decomposition (svd) to estimate the optimal three-dimensional orthogonal basis b' in least square sense. the anti-face method offers an attractive solution, which proceeds by modeling the effects of different illumination conditions in the training set; this automatically voids the illumination effects, allowing fast illumination invariant detection, without having to create a large training set.section 2 focuses on applying the anti-face methodto the illumination space and illumination cone, and presents novel applications for detection of objects with lambertian surface under both varying illumination and pose section 3 introduces an extension of the presented algorithms for ambient light.to extend anti-faces to handle illumination variability we could sample the entire illumination space of the object, and use it as a training set.the set of images under an arbitrary number of point light sources at infinity is a convex polyhedral cone in %", which can be expressed as convex combination of extreme rays.from proposition 1 it follows that if the three basis images for illumination subspace are used as training set for the detector, it will detect the entire illumination subspace if the threshold is properly chosen. for the normalized image, the illumination cone is the intersection between the illumination cone for the nonnormalized image and the unit sphere in %".as mentioned in section 1, the number of extreme rays is m(m -1) where m i n is the number of distinct normals, which is usually large, hence the number of extreme rays needed for construction of the illumination cone can run in the millions.the illumination space and illumination cone models assume one or more point light sources at infinity.the following method offers a way to construct the illumination cone for a convex object with lambertian reflectance function, illuminated by an arbitrary number of point light sources at infinity and ambient light.since it is very difficult to simulate the light conditions that result in the images with significant attached shadows, we tested the algorithm on 200 random samples from the illumination cone of the tiger with one and two light sources. the key element of our approach was to model the effects of different illumination conditions that can be leamed from a small set of imagesin the training set of the anti-face detectors; this automatically voids the illumination effects, allowing fast illumination invariant detection. the method was successfully applied to detect an object under variable illumination and rotations in real images with complicated background and simulated images with significant attached shadows. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/295.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/295.txt new file mode 100644 index 0000000000000000000000000000000000000000..c7fb86b7f4234e7f84951f4c847db03e164f61d8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/295.txt @@ -0,0 +1 @@ +the human visual system can easily identify perceptually salient edges in an image. endowing machine vision systems with similar capabilities is of interest as edges are useful for diverse tasks such as optical flow , object detection , and object proposals . however, edge detection has proven challenging. early approaches relied on low-level cues such as brightness and color gradients. reasoning about texture markedly improved results, nevertheless, accuracy still substantially lagged human performance.the introduction of the bsds dataset , composed of human annotated region boundaries, laid the foundations for a fundamental shift in edge detection. rather than rely on complex hand-designed features, dollár et al. proposed a data-driven, supervised approach for learning to our goal is to train an edge detector given only semidense matches between frames (a). while motion discontinuities imply the presence of image edges (b), the converse is not necessarily true as distinct image regions may undergo similar motion (c). in this work we exploit the sparsity of edges to overcome the latter difficulty. we show that the signal obtained from matches computed over a large corpus of video data is sufficient to train top-performing edge detectors.detect edges. modern edge detectors have built on this idea and substantially pushed the state-of-the-art forward using more sophisticated learning paradigms . however, existing data-driven methods require strong supervision for training. specifically, in datasets such as bsds , human annotators use their knowledge of scene structure and object presence to mark semantically meaningful edges. 1 moreover, recent edge detectors use image-net pre-training . in this paper, we explore whether this is necessary: is object-level supervision indispensable for edge detection? moreover, can edge detectors be trained entirely without human supervision? the only input to our approach is semi-dense matching results from . during training we alternate between: (1) computing flow based on the matches and edge maps (initialized to simple gradients), (2) computing motion edges from the flow fields (green: positive edge samples; blue: discarded motion edges), ( 3) training an edge detector using the motion edges as supervision, and (4) recomputing image edges using the new detector. the process is iterated on a large corpus of videos leading to increasingly accurate flow and edges.we propose to train edge detectors using motion in place of human supervision. motion edges are a subset of image edges, see figure 1. therefore motion edges can be used to harvest positive training samples. on the other hand, locations away from motion edges may also contain image edges. fortunately, as edges are sparse, simply sampling such locations at random can provide good negative training data with few false negatives. thus, assuming accurate motion estimates, we can potentially harvest unlimited training data for edge detection.while it would be tempting to assume access to accurate motion estimates, this is arguably an unreasonably strong requirement. indeed, optical flow and edge detection are tightly coupled. recently, revaud et al. proposed epicflow : given an accurate edge map and semidense matches between frames , epicflow generates a dense edge-respecting interpolation of the matches. the result is a state-of-the-art optical flow estimate.this motivates our approach. we begin with only semidense matches between frames and a rudimentary knowledge of edges (simple image gradients). we then repeatedly alternate between computing flow based on the matches and most recent edge maps and retraining an edge detector based on signal obtained from the flow fields. specifically, at each iteration, we first estimate dense flow fields by interpolating the matching results using the edge maps obtained from the previous iteration. given a large corpus of videos, we next harvest highly confident motion edges as positives and randomly sample negatives, and use this data to train an improved edge detector. the process is iterated leading to increasingly accurate flow and edges. an overview of our method is shown in figure 2.we perform experiments with the structured edge (se) and holistic edge (he) detectors. se is based on structured forests, he on deep networks; se is faster, he more accurate. both detectors achieve state-ofthe-art results. the main result of our paper is that both methods, trained using our unsupervised scheme, approach the level of performance of fully supervised training. finally, we demonstrate that our approach can serve as a novel unsupervised pre-training scheme for deep networks . specifically, we show that when finetuning a network for object detection , starting with the weights learned for edge detection improves performance over starting with a network with randomly initialized weights. while the gains are modest, we believe this is a promising direction for future exploration. during training we alternate between: (1) computing flow based on the matches and edge maps (initialized to simple gradients), (2) computing motion edges from the flow fields (green: positive edge samples; blue: discarded motion edges), (3) training an edge detector using the motion edges as supervision, and (4) recomputing image edges using the new detector. for each image i j , we use e t j and g t j to denote its image edges and motion edges at iteration t. next, for training our new edge detector e t , we harvest positives instances using a high threshold on g t j and sample random negatives away from any motion edges.motion edge detection: detecting motion edges given optical flow estimates can be challenging, see figure3. we use an edge detector trained on image edges for motion edge estimation by applying the (image) edge detector to a color-coded flow map. running an edge detector e on the colored flow map gives us a simple mechanism for motion edge detection (3c). moreover, in our iterative scheme, as both our edge detector e t−1 and flow estimate f t improve with each iteration t, so do our resulting estimates of motion edges g t = e t−1 (f lowt orgb(f t )).motion edge alignment: motion edges computed from flow exhibit slight misalignment with their corresponding image edges. to align the motion edges we apply a simple heuristic: after applying non-maximum suppression and thresholding, we align the motion edges to superpixels detected in the color image. specifically, we utilize slic super-pixels, which cover over 90% of all image edges, and match motion and superpixel edge pixels using bipartite matching (also used in bsds evaluation) with a tolerance of 3 pixels. matched motion edge pixels are shifted to the superpixel edge locations and unmatched motion edges are discarded.our method produces motion edges g t , image edges e t , and optical flow f t at each iteration t.while our focus is not on motion edge detection, identifying motion edges reliably is important as motion edges serve as our only source of supervision. while our goal is not motion edge detection per-se, this result is important as it enables us to obtain high quality positive samples for training an image edge detector. we developed an iterative process that alternated between updating optical flow using edge results, and learning an edge detector based on the flow fields, leading to increasingly accurate edges and flows. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/296.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/296.txt new file mode 100644 index 0000000000000000000000000000000000000000..4f351dfeb5d76c93565484d4abb6d2d55b7e072f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/296.txt @@ -0,0 +1 @@ +improving comprehension of strategic performance and success in team competition is an important goal in sports science . data-driven methods can effectively overcome the subjective limitations (manual analysis) of the match and offer better results for football clubs. quantitative analysis can provide players and coaches with such insight, by allowing them to improve their match and assessment of the event beyond what personal observation can accomplish . traditionally, methods of performance analysis push the study of one-dimensional and discrete performance indicators towards probabilistic and correlational approaches . however, this results in somewhat limited functional information as it lacksthe associate editor coordinating the review of this manuscript and approving it for publication was paul d. yoo .the understanding of the player-to-player interactions that support the actions of players and overall team behaviour.it is reasonable to expect an analysis of such one-versusone dynamics in team sports to be insufficient as multiplayer interactions are important in determining success and failure . therefore, in order to quantify and explain performance, it has been advocated that performance analysis in team sports must also focus on the interactions between players that sustain the overall team behaviour , . from the dynamical systems view, the understanding of how the coordination emerges from the interaction among the system components, that is, the player-to-player interaction, is the key to performance analysis , . in team sports, performance analysis approaches that consider the interactions of the players in many multiplayer team competitions like football are not well explored .inspired by empirical studies of networked systems, researchers have recently developed a variety of techniques and models to help us understand player interaction network in sports - . interaction or passing networks can be constructed from the observation of ball transfer between players. a key challenge is to leverage the interaction networks to gain a functional understanding of the underlying team strategies. for example, by examining the structure of interaction networks, recurrent pass sequences can be identified and linked to a team's playing style , . when the emphasis is put at the player level, duch et al. used the interaction networks to quantify and rank player's contribution relative to the overall team activity.due to dissimilarity and diversity in real-world sports data, there is no systematic program for predicting network structure. in addition, there are no particular subsets of diagnostics that are universally accepted . since team networks are intrinsically subjective and dynamic objects, it is often hard to determine a suitable way of network characterisation that governs team formation . in team sports like football, quantifying player-to-player interaction is the key for understanding the dynamic patterns that generate a scoring opportunity . this motivated us to develop an approach that quantitatively characterises players' interaction in team sports. in this study, a data-driven approach to the study of complex player interactions from event stream data generated during football matches (henceforth referred to as soccer) is employed. the proposed framework can be used to quantify player interactions and connect that with the outcome using a machine learning approach.data-driven approaches for soccer analytics are given importance with the availability of the event stream data (e.g., opta, wyscout, stats, secondspectrum, scisports, and statsbomb). cintia et al. in their work, extracted pass-based performance measures to learn the correlation to match outcome using a machine learning approach. more recently, pappalardo et al. in their work employed a machine learning approach to rank players. their approach is based on computing statistical features from the event stream data for each player, which are then utilised to learn feature weights in a supervised learning framework i.e., relative to the match outcome. the authors then use the learned weights to compute the rating of a player. in another recent study by decroos et al. , the authors have performed a segmental analysis of different match states to extract several associative features of player performance, which are then used to determine the scoring or conceding probability using an ensemble classifier. in contrast to the above-mentioned studies that consider individual player's actions or cumulative team statistics, the proposed study describes a segment of a match using a set of activity and entropy-based quantifiable markers that capture both inter-and intra-player interactions.to quantify interaction among players in team sports conceived as dynamical systems unfolding in time, it is important to use appropriate measures , . the proposed study considers the behaviour of multiple players and the emergent nature of performance to develop pattern-forming dynamics, that is, the dynamic physical relationships that a player may establish with the teammates and opponents to make a goal. we developed a coarse-grain activity model of player-toplayer interaction from the possession chain data, that can be used to quantify the dynamic patterns underlying the interaction among players. we used the concepts of information theory retrieval to quantify the complexity of a pattern representing player interactions during sub-segments of the match.another key challenge from the analytics perspective is the format of the soccer log data, as different vendors use different data formats . therefore, an analyst has to develop complex pre-processors specific to a dataset. to tackle the challenges posed by the variety of event stream formats and to benefit the data-science community, we propose an approach that uses only a limited amount of information. the proposed approach only uses the possession information, such as player, team, action type, and result from the event stream data. the segmental analysis was thus performed using only the possession information to quantify the team performance and stability in team-dynamics during a specific module, that is, a match segment. furthermore, based on the derived performance measures we developed a machine learningenabled decision support system for automated prediction of a team's likelihood of a successful attempt at goal. therefore, in order to quantify and explain performance, it has been advocated that performance analysis in team sports must also focus on the interactions between players that sustain the overall team behaviour,. in team sports, performance analysis approaches that consider the interactions of the players in many multiplayer team competitions like football are not well explored., the authors have performed a segmental analysis of different match states to extract several associative features of player performance, which are then used to determine the scoring or conceding probability using an ensemble classifier. using the possession information corresponding to every segment in the match, we propose a coarsegrain model to find quantifiable measures of performance that demonstrate an associationship with the outcome of that segment, that is, which team (team-1 or team-2) makes an attempt to score by taking a ''shot '' at the opposition's goal. we analysed the interaction between players based on the following approach: a: unit increment each element m i,j of the interaction matrix is incremented by 1 for an interaction between the i th and j th player of the same team (ball passed) or players of the different team (ball recovery, tackle, ball lost etc.the possession chain data from each segment in a match was quantified using the proposed measures, which were then used as features for predicting the team that makes the ''shot '' during the segment. to calculate the estimate of complexity and non-linear dynamics in a match of soccer using the proposed coarse-grain model of teams' activity, we introduced four quantitative measures of team performance (tai , sei , kci , and dei ).we first explain the quantitative measures of performance derived from the proposed coarse-grain model of player interactions network, (a) total activity index (tai ), (b) shannon entropy index (sei ), (c) kolmogorov complexity index (kci ), and (d) distribution entropy index (dei ), followed by (e) the performance of the proposed machine learning approach and future work.3 (a)). this was further corroborated by the minimum and the maximum values of tai as seen, for example, in match g 3 (atlanta united fc (team-1) vs.furthermore, to elaborate the efficacy of the proposed quantifiable markers of team performance, we compared the results of the proposed approach with studies that employ a machine learning approach for evaluating performance,,. the rectangular box in histogram plot for team-1 (subplot (a)) indicates the players (p 2 , p 5 , p 8 , p 9 , p 10 , and p 11 ) who maintain a ball possession activity that is higher than the team's mean. across this match segment, team-1 performs better than team-2, because the segment ends with team-1 having a successful attempt at scoring i. six players of team-1 (p 2 , p 5 , p 8 , p 9 , p 10 , and p 11 ) maintain a level of interaction (as indicated by the rectangular box in 6a) above the team's mean, which is higher than team-2, where only three players are above the team's mean (as indicated by the rectangular box in fig. finally, the study demonstrates that the analysis we present can help uncover the pattern dynamics of a team's network derived using possession chain data, by quantitatively analysing measures of performance that have a specific distribution and that can be used to predict the performance of a team. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/297.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/297.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca18449e667cdc3a7279ba8fa6b2b4bcbf5f3469 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/297.txt @@ -0,0 +1 @@ +many studies have been performed to determine the attentiveness of students in an instructional setting. many of these studies relied on qualitative techniques rather than a quantitative approach to identifying and measuring attentiveness , , . some researchers have also investigated quantitative approaches to monitoring student attentiveness. biometric bracelets are being investigated as an indicator of student attentiveness . eye and head pose tracking have also been used to determine student attentiveness . facial expressions have been used to infer student attentiveness for computer network courses .classifying students as attentive or inattentive can be helpful to the instructor by providing feedback as to which teaching style a particular student responds most favorably to. there are four learning orientations a learner will likely fall into: an innovator; an implementer; a sustainer; or a resistant learner . if an instructor is able to classify a student as attentive or inattentive when the student is exposed to the associated teaching styles of each of these orientations, students can be separated into course sections that implement their optimal learning style or can be assigned online teaching modules that use the teaching style a particular student will perform optimally with.researchers have developed personalized e-learning systems based on genetic algorithms (ga) and case-based reasoning (cbr) . adaptive user interfaces have also been developed based on personalized learning . these approaches focus on online learning settings. the proposed system in this research can be applied to online as well as classroom instructional settings. this article describes a system that uses a commercial rgb-d camera to monitor, count, and record student gestures, postures, facial expressions, and verbalizations in order to produce data for determining student attentiveness. machine learning algorithms are then used to cluster, label, and classify the data for the purpose of classifying subsequent students as attentive or inattentive. this system is an imperative step towards developing the proposed personalized learning system described in this article. first, an rgb-d sensor is used to observe a single student. an algorithm that is running in real time then detects and counts various behaviors that indicate attentiveness. these data for this student are stored into a database. when every student in the study has been observed, the data from the database are clustered into two groups using the k-means algorithm . after the data are clustered and then labeled as attentive or inattentive, the svm algorithm is used to create a decision boundary for the two groups of data.classifying students as attentive or inattentive can be helpful to the instructor by providing feedback as to which teaching style a particular student responds most favorably to. if an instructor is able to classify a student as attentive or inattentive when the student is exposed to the associated teaching styles of each of these orientations, students can be separated into course sections that implement their optimal learning style or can be assigned online teaching modules that use the teaching style a particular student will perform optimally with. this article describes a system that uses a commercial rgb-d camera to monitor, count, and record student gestures, postures, facial expressions, and verbalizations in order to produce data for determining student attentiveness. machine learning algorithms are then used to cluster, label, and classify the data for the purpose of classifying subsequent students as attentive or inattentive. when every student in the study has been observed, the data from the database are clustered into two groups using the k-means algorithm. after the data are clustered and then labeled as attentive or inattentive, the svm algorithmis used to create a decision boundary for the two groups of data.in this system, an rgb-d sensor would be used to detect various student behaviors.the data of each student are saved into the database separately; since each rgb-d sensor is used for one student.when every student in the study has been observed, the data are clustered into two clusters using the k-means algorithm. the clusters of data are labeled as attentive or inattentive depending on the distance from the origin of the data; the cluster with a centroid closest to the origin is labeled inattentive and the other cluster is labeled attentive.the data used in this analysis were created by randomly generating data within a fixed range of values (from zero to twelve); these values represented the number of times a student was observed exhibiting a specific attentiveness behavior.this research lays the groundwork for building a system that can automatically classify a student as attentive or inattentive in an instructional setting. these labeled data were then used as training and testing data in a supervised learning algorithm (svm) to establish a decision boundary.the results from the system can be used to determine the learning style of a particular student. when the optimal teaching style is found for a specific student, that student can be placed in an instructional setting that exclusively uses that style. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/298.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/298.txt new file mode 100644 index 0000000000000000000000000000000000000000..996d41f54039f5668155b3fd05a94afff2495ad8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/298.txt @@ -0,0 +1 @@ +for many real-time applications, an agent must be capable of immediate online learning of each training instance, without the ability to loop through the entire dataset and while being subject to severe resource constraints. the ability to do online learning under these constraints from non-stationary data streams in a single pass is known as streaming learning . this training paradigm presents unique challenges to agents including limited ac- 1 https://github.com/tyler-hayes/deep_slda figure 1: learning curve for incremental imagenet. our deep slda approach achieves the best final top-5 accuracy, while running over 100 times faster and using 1,000 times less memory than the icarl and end-to-end models. cess to computational resources in terms of memory and compute time and inference must be able to be performed at any time during training .deep neural networks (dnns) are the dominant approach in computer vision for inferring semantic information from sensors such as cameras, but conventional dnns are not capable of being incrementally updated or learning quickly from individual instances. incrementally updating a dnn is challenging due to the stability-plasticity dilemma . to learn, a dnn must alter its weights, but altering weights that are critical for retaining past knowledge can cause forgetting. when a dnn is incrementally updated with a temporal stream of data that is not independent and identically distributed (iid), this dilemma typically manifests as catastrophic forgetting . rather than gradually losing the ability to work well on past information, catastrophic forgetting refers to how learning only a small amount of new information can cause the complete loss of ability to operate on previously learned tasks.in the past few years, much effort has been directed at figure 2: streaming learning requires agents to learn sample-by-sample in real time, making it better suited for embedded applications than incremental batch learning. for example, in our experiments on core50, each incremental batch consists of 1,200 samples (2 classes with 600 samples each). for imagenet, each incremental batch consists of ∼13,000 images (100 classes with ∼1,300 samples each). all examples must be seen by the model multiple times before inference can be performed.in contrast, for streaming learning, new information can be learned and used immediately. images are from core50.creating modified dnns that can be incrementally updated without catastrophic forgetting. the vast majority of these systems operate in the incremental batch learning framework . in this setting, the dnn receives a series of large batches of new labeled samples.after a batch has been received, the dnn loops over the batch until it is adequately learned, and then the dnn can be tested on information in that batch and previous batches. most incremental batch learning methods utilize partial rehearsal or pseudo-rehearsal . streaming learning has been little studied with dnns. numerous streaming classifiers have been explored in the data mining community, but these methods have primarily been assessed with low-dimensional data streams and most are slow to train . here, we explore the use of deep streaming linear discriminant analysis (slda) for training the output layer of a convolutional neural network (cnn) incrementally, which has not been done before. we validate performance on large-scale image classification datasets under multiple data orderings that cause catastrophic forgetting in conventional dnns. since slda only trains the output layer of a cnn and does not store any previous data, it is a lightweight classifier that can be easily deployed on embedded platforms.this paper makes the following contributions: 1. we describe the deep slda algorithm. we are the first to use slda for the classification of features from a deep cnn on large-scale image classification datasets. 2. we demonstrate that deep slda can surpass state-ofthe-art streaming learning algorithms.while much progress has been made in mitigating catastrophic forgetting for neural networks in the incremental batch learning paradigm, there is still a large gap between incremental batch learners and offline models, and much less progress has been made in the streaming paradigm. due to the maintenance of one covariance matrix per class, sqda requires more memory and compute resources as compared to slda, making it less suitable for on-device learning. for example, using sqda with embeddings from a resnet-18architecture on a 1,000 class dataset such as imagenet would require storing 1,000 covariance matrices of dimension 512×512, whereas slda would only require storing a single 512×512 covariance matrix. while many recent incremental batch learning methods perform multiple loops over a data batch, slda is a streaming method that learns per instance. • icarl -icarlis a popular incremental batch learning method designed for incremental class learning, where each batch must contain two or more categories, and these classes are not seen in later batches. based on the subset of core50 that we use, each class consists of exactly 600 training samples, so for the class iid and class instance orderings, 1,200 samples table1: ω all classification results on imagenet and core50. the icarl and end-to-end incremental batch learning models are trained on batches of 100 classes at a time for im-agenet and two classes at a time for core50, where they may loop over the batches until they have learned them. slda with a plastic covariance matrix outperforms slda with a fixed covariance matrix, exstream, icarl, and the streaming model without a replay buffer. this is likely due to the base initialization for imagenet having 100 classes, whereas the base initialization for core50 only had 1,200 samples, meaning the initial covariance matrix was not representative of the entire training set. although icarl is a top performer for imagenet, exstream and both variants of slda performed better on the class iid and class instance orderings of core50. while using 100 classes for initialization with imagenet is the standard approach, we find that the representations learned from only 50 classes provide slda with robust enough features to outperform icarl and using 75 classes allows slda to outperform both icarl and end-to-end. we conducted four variants of the experiment: 1) g(•) was initialized on imagenet and σ was initialized to a matrix of ones, 2) both g(•) and σ were initialized on imagenet, 3) g(•) was initialized on imagenet and then fine-tuned on the first 1,200 samples of core50 and σ was initialized to a matrix of ones, and 4) g(•) was initialized on imagenet and then fine-tuned on the first 1,200 samples of core50 and σ was initialized on the first 1,200 samples of core50, which is consistent with our main experiments. although performing a base initialization phase with core50 often yielded higher results, table3suggests that slda is capable of domain transfer from imagenet to core50, without requiring a base initialization phase. while our offline results indicate greater performance is achievable by training the hidden layers after base initialization, we urge developers of future incremental learning algorithms to test simply training the output layer after base initialization to ensure gains are being realized.while we initialized slda using the standard base initial-ization procedure used by icarl and others, the covariance matrix could instead be initialized using large amounts of unlabeled imagery (i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/299.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/299.txt new file mode 100644 index 0000000000000000000000000000000000000000..c3d7093b1a0b0b6a0ef358de600076ef9a0d7f2c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/299.txt @@ -0,0 +1 @@ +recently, neural machine translation (nmt) (bahdanau et al., 2015;hassan et al., 2018;he et al., 2017;xia et al., 2016wu et al., 2018b,a) has become more and more popular given its superior performance without the demand of heavily hand-crafted engineering efforts. it is usually trained to maximize the likelihood of each token in the target sentence, by taking the source sentence and the preceding (ground-truth) target tokens as inputs. such training approach is referred as maximum likelihood estimation (mle) (scholz, 1985). although easy to implement, the token-level objective function during training is inconsistent with sequence-level evaluation metrics such as bleu (papineni et al., 2002).to address the inconsistency issue, reinforcement learning (rl) methods have been adopted to optimize sequence-level objectives. for example, policy optimization methods such as reinforce (ranzato et al., 2016;wu et al., 2017b) and actorcritic (bahdanau et al., 2017) are leveraged for sequence generation tasks including nmt. in machine translation community, a similar method is proposed with the name 'minimum risk training' (shen et al., 2016). all these works demonstrate the effectiveness of rl techniques for nmt models .however, effectively applying rl to real-world nmt systems has not been fulfilled by previous works. first, most of, if not all, previous works verified their methods based on shallow recurrent neural network (rnn) models. however, to obtain state-of-the-art (sota) performance, it is essential to leverage recently derived deep models (gehring et al., 2017;vaswani et al., 2017), which are much more powerful.second, it is not easy to make rl practically effective given quite a few widely acknowledged limitations of rl method (henderson et al., 2018) such as high variance of gradient estimation (weaver and tao, 2001), and objective instability (mnih et al., 2013). therefore, several tricks are proposed in previous works. however, it remains unclear, and no agreement is achieved on how to use these tricks in machine translation. for example, baseline reward method (weaver and tao, 2001) is suggested in (ranzato et al., 2016;nguyen et al., 2017; but not leveraged in (he and deng, 2012;shen et al., 2016).third, large-scale datasets, especially monolingual datasets are shown to significantly improve translation quality (sennrich et al., 2015a;xia et al., 2016) with mle training, while it remains nearly empty on how to combine rl with monolingual data in nmt.in this paper, we try to fulfill these gaps and study how to practically apply rl to obtain strong nmt systems with quite competitive, even state-ofthe-art performance. several comprehensive studies are conducted on different aspects of rl training to figure out how to: 1) set efficient rewards; 2) combine mle and rl objectives with different weights, which aims to stabilize the training procedure; 3) reduce the variance of gradient estimation.in addition, given the effectiveness of leveraging monolingual data in improving translation quality, we further propose a new method to combine the strength of both rl training and source/target monolingual data. to the best of our knowledge, this is the first work that tries to explore the power of monolingual data when training nmt model with rl method.we obtain some useful findings through the experiments on wmt17 chinese-english (zh-en), wmt17 english-chinese (en-zh) and wmt14 english-german (en-de) translation tasks. for instance, multinomial sampling is better than beam search in reward computation, and the combination of rl and monolingual data significantly enhances the nmt model performance. our main contributions are summarized as follows.• we provide the first comprehensive study on different aspects of rl training, such as how to setup reward and baseline reward, on top of quite competitive nmt models.• we propose a new method that effectively leverages large-scale monolingual data, from both the source and target side, when training nmt models with rl.• combined with several of our findings and method, we obtain the sota translation quality on wmt17 zh-en translation task, surpassing strong baseline (transformer big model + back translation) by nearly 1.5 bleu points. furthermore, on wmt14 en-de and wmt17 en-zh translation tasks, we can also obtain strong competitive results.we hope that our studies and findings will benefit the community to better understand and leverage reinforcement learning for developing strong nmt models, especially in real-world scenarios faced with deep models and large amount of training data (including both parallel and monolingual data). towards this end, we open source all our codes/dataset at https://github.com/ apeterswu/rl4nmt to provide a clear recipe for performance reproduction.third, large-scale datasets, especially monolingual datasets are shown to significantly improve translation quality(sennrich et al.in addition, given the effectiveness of leveraging monolingual data in improving translation quality, we further propose a new method to combine the strength of both rl training and source/target monolingual data. for instance, multinomial sampling is better than beam search in reward computation, and the combination of rl and monolingual data significantly enhances the nmt model performance.we hope that our studies and findings will benefit the community to better understand and leverage reinforcement learning for developing strong nmt models, especially in real-world scenarios faced with deep models and large amount of training data (including both parallel and monolingual data). therefore, for each source-side monolingual sentence, we use the nmt model trained from the bilingual data to beam search a target sentence and treat it as the pseudo target reference y. for each target-side monolingual sentence, using the reverse nmt model, we back translate it to get its pseudo source sentence x. the rl training strategies are evaluated on bilingual datasets from three translation tasks, wmt14 english-german (en-de), wmt17 english-chinese (en-zh) and wmt17 chinese-english (zh-en), and we further conduct the experiments to leverage monolingual data in wmt17 zh-en translation.for the monolingual dataset on zh-en translation task, similar to(sennrich et al. during training, roughly 4, 096 source tokens and 4, 096 target tokens are paired in one mini batch. "b" denotes bilingual data, "ms" denotes sourceside monolingual data and "mt" denotes target-side monolingual data, "&" denotes data combination. we consider several settings for rl training: 1) only source-side monolingual data; 2) the combination of bilingual and source-side monolingual data. we first train an mle model using the augmented dataset combining the genuine bilingual data with the pseudo bilingual data generated from the monolingual data, and then perform rl training on this combined dataset.with target-side monolingual data for target-side monolingual data, we first pre-train a translation model from english to chinese 4 , and use it to back translate target-side monolingual sentence y to get pseudo source sentence x.similarly, we consider several settings for rl training: 1) only target-side monolingual data; 2) the combination of bilingual data and target-side monolingual data. we train an mle model using both the genuine and the generated pseudo bilingual data, and then perform rl training on this data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/3.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/3.txt new file mode 100644 index 0000000000000000000000000000000000000000..e1ae4a0d26c356c647786a27de4f315ea990932f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/3.txt @@ -0,0 +1 @@ +autonomous robotic systems are of particular interest for many fields, especially those that can be dangerous for human intervention like search and rescue, and maintenance on rigs. however, motion planning in unstructured environment is still a hard problem for legged robots and their success depends largely on their ability to plan their paths robustly. moreover, the method in which a controller deals with obstacles has great consequences on the planned trajectory, and these optimizations are quintessential in generating agile motions for real-world robots.trajectory optimization is a common practice for generating motion for legged systems , since it can produce optimal trajectories which satisfy the physical and environmental constraints of the robot. however, the solution from trajectory optimization is only valid for a particular pair of initial and target positions, and one needs to re-plan if the pair changes. due to high-dimensionality and complexity, solving such an optimization problem for legged robots is infeasible in real-time.previous work simplified the problem by using a reduced-order model and refining the trajectory using model predictive control . however, the issue is exacerbated in the presence of obstacles, since collision avoidance constraints are non-linear algebraic constraints and so harder to solve.in recent years, imitation learning and reinforcement learning have become the dominant focus in the research community. the data-driven approach offers a global solution and removes the hurdle of re-planning. on the other hand, collecting data for imitation learning is labour intensive work, which can be done by using motion capture or using animal data , which is extremely difficult on legged robots. reinforcement learning does not require any data, but it is extremely timeconsuming to learn a policy.for planning with obstacles, most work focuses on modelling the environment as a 2-dimensional grid that represents the height of the obstacles . the collision avoidance method finds the traversable paths in the plane . however, the paths may be sub-optimal, since completely circumventing an obstacle is time consuming at best, and completely impossible at worst.to mitigate the limitations of optimal control and imitation learning, we propose a self-supervised learning approach for efficient 3d collision avoidance in real-time. specifically, we generate a set of motion data from optimal control with a reduced model to create a rough plan and learn a policy that reproduces the motion data. the learned policy is refined through whole-body model predictive control which satisfies the physical constraints of the robot. let x k , u k represent the states and actions of the robot at time-step k, the goal of optimal control is to find a trajectory, a set of x, u, such that a given cost function is minimized. assuming that x i , x t are the initial and target state of the robot specified by the user, a typical problem can be formulated as the following minfor legged robots, motion planning is normally done through optimization. it is well-known that the states of legged robots drift, and predicting a long trajectory is not ideal. in addition, trajectory optimization, especially for long horizon, is not feasible in real-time. this is particularly an issue in the presence of obstacles, since collision constraints are generally non-linear and thus require nonlinear solvers. most people combine trajectory optimization for long horizon planning with model predictive control for short horizon planning real time planning and control.in the proposed work, we will use trajectory optimization to plan a rough path for the robot torso while avoiding collisions with the environment. the outcome of trajectory optimization is generated using an approximated model, which may not be realistic for the robot. therefore, we use modelpredictive control to refine the path from the reduced model. however, motion planning in unstructured environment is still a hard problem for legged robots and their success depends largely on their ability to plan their paths robustly.trajectory optimization is a common practice for generating motion for legged systems, since it can produce optimal trajectories which satisfy the physical and environmental constraints of the robot.previous work simplified the problem by using a reduced-order modeland refining the trajectory using model predictive control. on the other hand, collecting data for imitation learning is labour intensive work, which can be done by using motion captureor using animal data, which is extremely difficult on legged robots.to mitigate the limitations of optimal control and imitation learning, we propose a self-supervised learning approach for efficient 3d collision avoidance in real-time. specifically, we generate a set of motion data from optimal control with a reduced model to create a rough plan and learn a policy that reproduces the motion data. the learned policy is refined through whole-body model predictive control which satisfies the physical constraints of the robot. let x k , u k represent the states and actions of the robot at time-step k, the goal of optimal control is to find a trajectory, a set of x, u, such that a given cost function is minimized. most people combine trajectory optimization for long horizon planning with model predictive control for short horizon planning real time planning and control.in the proposed work, we will use trajectory optimization to plan a rough path for the robot torso while avoiding collisions with the environment. the outcome of trajectory optimization is generated using an approximated model, which may not be realistic for the robot. given the initial position of the robot state s i , the task is to find the a sequence of states {s k } n k=1 that guides the robot from its initial pose s i ∈ r 4 to its target pose s t ∈ r 4 while minimizing the time t and avoiding the obstacles at position s o .where f defines the dynamic equation of the system, x min , x max , u min ,u max are the lower and upper bounds of states and actions, and d(p k , p o ) denotes the distance between the robot and the obstacles. we use optimal control to generate a rough plan and then use supervised learning to learn a predictive model. the learned model provides the desired base motion and then it is refined using model predictive control for whole-body control. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/30.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/30.txt new file mode 100644 index 0000000000000000000000000000000000000000..627694586167081c6bb81f3694bd4573322a2aef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/30.txt @@ -0,0 +1 @@ +partially observable markov decision processes (pomdps) provide a mathematical framework for planning under uncertainty (sondik 1978;kochenderfer, wheeler, and wray 2022). an optimal policy for a pomdp maximizes the longterm reward that an agent accumulates while considering uncertainty from the agent's state and dynamics. planning, however, is often multi-objective, as agents will typically trade-off between maximizing multiple rewards and minimizing multiple costs. though this multi-objectivity can be handled explicitly (roijers et al. 2013), often times, multiple objectives are scalarized into a single reward function and penalties are captured through soft constraints. the drawback to this approach, however, is the need to define the parameters that weight the rewards and costs.constrained pomdps (cpomdps) model penalties through hard constraint budgets that must be satisfied, but are often harder to solve than pomdps with scalarized reward functions. policies for cpomdps with discrete state, action, and observation spaces can be generated offline through point-based value iteration (kim et al. 2011), with locally-approximate linear programming (poupart et al. 2015), or projected gradient ascent on finite-state controllers (wray and czuprynski 2022). additional work develops an online receding horizon controller for cpomdps with large state spaces by combining monte carlo tree search (mcts) (silver and veness 2010) with dual ascent to guarantee constraint satisfaction (lee et al. 2018). however, this method is limited to discrete state and action spaces.in this work, we extend mcts with dual ascent to algorithms that leverage double progressive widening (sunberg and kochenderfer 2018; couëtoux et al. 2011) in order to develop online solvers for cpomdps with large or continuous state, action, and observation spaces. specifically we extend three continuous pomdp solvers (pomcp-dpw, pomcpow and pft-dpw) (sunberg and kochenderfer 2018) to create the constrained versions (cpomcp-dpw, cpomcpow, and cpft-dpw). in our experiments, we a) compare our method against an unconstrained solver using reward scalarization, b) compare the algorithmic design choice of using simulated versus minimal cost propagation, and c) compare the rewards and costs accumulated by our three constrained solvers. policies for cpomdps with discrete state, action, and observation spaces can be generated offline through point-based value iteration(kim et al.in this work, we extend mcts with dual ascent to algorithms that leverage double progressive widening (sunberg and kochenderfer 2018;couëtoux et al. specifically we extend three continuous pomdp solvers (pomcp-dpw, pomcpow and pft-dpw)(sunberg and kochenderfer 2018)to create the constrained versions (cpomcp-dpw, cpomcpow, and cpft-dpw).pomdps formally, a pomdp is defined by the 7-tuple (s, a, o, t , z, r, γ) consisting respectively of state, action, and observation spaces, a transition model mapping states and actions to a distribution over resultant states, an observation model mapping an underlying transition to a distribution over emitted observations, a reward function mapping an underlying state transition to an instantaneous reward, and a discount factor.offline pomdp planning algorithms(spaan and vlassis 2005;kurniawati, hsu, and lee 2008) yield compact policies that act from any history but are typically limited to relatively small state, action, and observation spaces. in continuous spaces)(couëtoux et al.constrained planning constrained pomdps augment the pomdp tuple with a cost function c that maps each state transition to a vector of instantaneous costs, and a cost budget vector ĉ that the expected discounted cost returns must satisfy.where b 0 is the initial state distribution, and belief-based reward and cost functions return the expected reward and costs from transitions from states in those beliefs. early offline cpomdp solvers use an alpha vector formulation for value and perform cost and reward backups(isom, meyn, and braatz 2008)or augment the state space with cost-to-go and perform point-based value iteration(kim et al.to generate good actions online, undurti and how (2010) perform look-ahead search up to a fixed depth while using a conservative constraint-minimizing policy learned offline to estimate the cost at leaf nodes and prune unsafe branches. in algorithms 1 and 2, these approaches for pomdp planning in continuous spaces are combined with dual ascent in order to perform cpomdp planning in continuous spaces. we note that these algorithms are amenable to methods that implement better choices for actions(mern et al. we see that with normal cost propagation, this pessimistic cost-value distributes the search towards overly conservative actions, while with minimal cost propagation, the search focuses around and picks the 5 action. previous work performs online cpomdp planning for large state spaces, but small, discrete action and observation spaces by combining mcts with dual ascent(lee et al. 2018). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/300.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/300.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b8f4808af588b6f3b4daf1be2f22102cb7ade49 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/300.txt @@ -0,0 +1 @@ +manipulation of visual content has now become ubiquitous, and one of the most critical topics in our digital society. for instance, deepfakes has shown how computer graphics and visualization techniques can be used to defame persons by replacing their face by the face of a different person. faces are of special interest to current manipulation methods for various reasons: firstly, the reconstruction and tracking of human faces is a well-examined field in computer vision , which is the foundation of these editing approaches. secondly, faces play a central role in human communication, as the face of a person can emphasize a message or it can even convey a message in its own right .current facial manipulation methods can be separated into two categories: facial expression manipulation and facial identity manipulation (see fig. 2). one of the most prominent facial expression manipulation techniques is the method of thies et al. called face2face. it enables the transfer of facial expressions of one person to another person in real time using only commodity hardware. follow-up work such as "synthesizing obama" is able to animate the face of a person based on an audio input sequence. figure 2: advances in the digitization of human faces have become the basis for modern facial image editing tools. the editing tools can be split in two main categories: identity modification and expression modification. aside from manually editing the face using tools such as photoshop, many automatic approaches have been proposed in the last few years. the most prominent and widespread identity editing technique is face swapping, which has gained significant popularity as lightweight systems are now capable of running on mobile phones. additionally, facial reenactment techniques are now available, which alter the expressions of a person by transferring the expressions of a source person to the target.identity manipulation is the second category of facial forgeries. instead of changing expressions, these methods replace the face of a person with the face of another person. this category is known as face swapping. it became popular with wide-spread consumer-level applications like snapchat. deepfakes also performs face swapping, but via deep learning. while face swapping based on simple computer graphics techniques can run in real time, deepfakes need to be trained for each pair of videos, which is a timeconsuming task.in this work, we show that we can automatically and reliably detect such manipulations, and thereby outperform human observers by a significant margin. we leverage recent advances in deep learning, in particular, the ability to learn extremely powerful image features with convolutional neural networks (cnns). we tackle the detection problem by training a neural network in a supervised fashion. to this end, we generate a large-scale dataset of manipulations based on the classical computer graphics-based methods face2face and faceswap as well as the learningbased approaches deepfakes and neuraltextures .as the digital media forensics field lacks a benchmark for forgery detection, we propose an automated benchmark that considers the four manipulation methods in a realistic scenario, i.e., with random compression and random dimensions. using this benchmark, we evaluate the current state-of-the-art detection methods as well as our forgery detection pipeline that considers the restricted field of facial manipulation methods.our paper makes the following contributions:• an automated benchmark for facial manipulation detection under random compression for a standardized comparison, including a human baseline,• a novel large-scale dataset of manipulated facial imagery composed of more than 1.8 million images from 1,000 videos with pristine (i.e., real) sources and target ground truth to enable supervised learning,• an extensive evaluation of state-of-the-art hand-crafted and learned forgery detectors in various scenarios,• a state-of-the-art forgery detection method tailored to facial manipulations.current facial manipulation methods can be separated into two categories: facial expression manipulation and facial identity manipulation (see fig.face manipulation methods: in the last two decades, interest in virtual face manipulation has rapidly increased.presented an image-based approach called video rewrite to automatically create a new video of a person with generated mouth movements. with video face replacement, dale et al.several other works explicitly refer to detecting manipulations related to faces, such as distinguishing computer generated faces from natural ones, morphed faces, face splicing, face swappingand deepfakes. however, early experiments with all manipulation methods showed that the target face had to be nearly front-facing to prevent the manipulation methods from failing or producing strong artifacts. a face in a target sequence is replaced by a face that has been observed in a source video or image collection. to create a fake image, the trained encoder and decoder of the source face are applied to the target face.face2face face2faceis a facial reenactment system that transfers the expressions of a source video to a target video while maintaining the identity of the target person.postprocessing -video quality to create a realistic setting for manipulated videos, we generate output videos with different quality levels, similar to the video processing of many social networks.to track the face in the video and to extract the face region of the image. this incorporation of domain knowledge figure5: our domain-specific forgery detection pipeline for facial manipulations: the input image is processed by a robust face tracking method; we use the information to extract the region of the image covered by the face; this region is fed into a learned classification network that outputs the prediction. we evaluated various variants of our approach by using different state-of-the-art classification methods. the constrained convolutional layer is specifically designed figure6: binary detection accuracy of all evaluated architectures on the different manipulation methods using face tracking when trained on our different manipulation methods separately.6shows the results of a binary forgery detection task using all network architectures evaluated separately on all four manipulation methods and at different video quality levels. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/301.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/301.txt new file mode 100644 index 0000000000000000000000000000000000000000..97ea78887b70bb0e634160e6627387f7d52cabc7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/301.txt @@ -0,0 +1 @@ +web application as a medium for exchanging information with the client-server concept. the web has an essential role in the development of technology at this time, almost all levels of society have used the web in everyday life, such as social media, e-commerce, online courses, advertising, and internet banking . this situation also makes many people interested in taking advantage illegally by exploiting weaknesses in web technology. actors who carry out these activities are called hackers. according to the open web application security project (owasp), one of the most frequent attacks by hackers to attack web applications is cross-site scripting (xss) , and according to bridgewater, as many as 68% of web applications worldwide are vulnerable to xss attacks . xss is an attack that utilizes a security hole to enter malicious scripts into a web page. the script will then direct the user to a website that has been designed to be able to retrieve cookies or sessions that the user has. xss is generally divided into 2, namely reflected xss and xss stored . reflected xss is an xss attack that is done by inserting malicious javascript scripts into the url. in comparison, stored xss is an xss attack done by inserting malicious javascript scripts into the database.one effort to prevent this xss attack is by using machine learning detection methods. machine learning is a method for analyzing patterns from existing data based on distinguishing parameters or commonly called features. this method uses patterns that have been registered to recognize malicious scripts commonly used in xss attacks.the previous research only checked the existence of script tags in the url; therefore, machine learning has difficulty when several strings infiltrate the script tag. therefore, to improve detection, the writer combines machine learning with the n-gram method. the machine learning method used is svm, naive bayes, and knn. the characteristics of this method are to find an optimal classifier function that can separate two data sets from two different classes . the n-gram method is a method for detecting similarities between 2 sentences . with the n-gram method, the author will look for similarities between the urls in training data and malicious scripts. the addition of the n-gram method is expected to strengthen the detection of special xss attacks in the script tag feature, which is increasingly varied but has the same basis.the paper is organized as follows. section ii explains background material and related study, while section iii discusses the research method and the evaluation scenario. section iv describes the result and discussion. finally, the conclusion is presented in section v. the previous research only checked the existence of script tags in the url; therefore, machine learning has difficulty when several strings infiltrate the script tag. the addition of the n-gram method is expected to strengthen the detection of special xss attacks in the script tag feature, which is increasingly varied but has the same basis. in a study conducted by vishnu and jevita, the solution offered was detecting xss with machine learning with seven features for reflected xss and five features for stored xss. in contrast to research conducted by vishnu and jevita, which separates reflected xss and stored xss, this study combines the two types of xss and predicts xss with three feature categories. the resampling procedure utilized to evaluate machine learning with limited sample data or statistical methods that can be used to evaluate the performance of models or algorithms where data is separated into two subsets, namely learning process data and validation/evaluation data. the author will use the same machine learning features, namely seven features for reflected xss and five features for stored xss 7 features for reflected xss are:.the n-gram method is used to strengthen the detection of xss attacks carried out using the machine learning method. url 1, when weighted, has a value of 1-1-1-8-10-0-0-0, which in the training data, this url is considered detected as xss, with the value of the script feature being 10. the same is done in url 2, where this url is the same as url 1 has the script tag inside, meaning this url is detected as xss. url 1, when weighted, has a value of 1-1-1-8-10-0-0-0, which in the training data, this url is considered detected as xss, with the value of the script feature being 10. the same is done in url 2, where this url is the same as url 1 has the script tag inside, meaning this url is detected as xss.url data is utilized to evaluate the performance of the machine learning method in detecting reflected xss. it can be seen the results obtained from each method that the performance of the machine learning algorithm is increasing when added by the n-gram method in it, especially on the script feature. in terms of recall, the combination of svm and n-gram also achieves the highest recall by 99%, followed by nb with n-gram (91%), and knn with n-gram (86%), respectively this study presents a comparison of a machine learning method svm, knn, and naive bayes in detecting xss attacks. what distinguishes this research from previous research is that in this study, the authors added the n-gram method to each machine learning method specifically on the script feature. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/302.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/302.txt new file mode 100644 index 0000000000000000000000000000000000000000..ba12752b089e51968a03fbebae01cab26cd8d7e2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/302.txt @@ -0,0 +1 @@ +offensive posts on social media result in a number of undesired consequences to users. they have been investigated as triggers of suicide attempts and ideation, and mental health problems (bonanno and hymel, 2013;bannink et al., 2014). one of the most common ways to cope with offensive content online is training systems to be capable of recognizing offensive messages or posts. once recognized, such offensive content can be set aside for human moderation or deleted from the respective platform (e.g. facebook, twitter), preventing harm to users and controlling the spread of abusive behavior in social media.there have been several recent studies published on automatically identifying various kinds of offensive content such as abuse (mubarak et al., 2017), aggression (kumar et al., 2018, cyber-bullying (rosa et al., 2019), and hate speech . while there are a few studies published on languages such as arabic and greek (pitenis et al., 2020), most studies and datasets created so far include english data. data augmentation (ghadery and moens, 2020) and multilingual word embeddings (pamungkas and patti, 2019) have been applied to take advantage of existing english resources to improve the performance in systems dealing with languages other than english. to the best of our knowledge, however, state-of-the-art cross-lingual contextual embeddings such as xlm-r (conneau et al., 2019) have not yet been applied to offensive language identification. to address this gap, we evaluate the performance of cross-lingual contextual embeddings and transfer learning (tl) methods in projecting predictions from english to other languages. we show that our methods compare favorably to state-of-the-art approaches submitted to recent shared tasks on all datasets. the main contributions of this paper are the following:1. we apply cross-lingual contextual word embeddings to offensive language identification. we take advantage of existing english data to project predictions in three other languages: bengali, hindi, and spanish.2. we tackle both off-domain and off-task data for bengali. we show that not only can these methods project predictions for different languages but also for different domains (e.g. twitter vs. facebook) and tasks (e.g. binary vs. three-way classification).3. we provide important resources to the community: the code, and the english model will be freely available to everyone interested in working on low-resource languages using the same methodology. there is a growing interest in the development of computational models to identify offensive content online. early approaches relied heavily on feature engineering combined with traditional machine learning classifiers such as naive bayes and support vector machines (xu et al., 2012;dadvar et al., 2013). more recently, neural networks such as lstms, bidirectional lstms, and grus combined with word embeddings have proved to outperform traditional machine learning methods in this task (aroyehun and gelbukh, 2018;majumder et al., 2018). in the last couple of years, transformer models like elmo (peters et al., 2018) and bert (devlin et al., 2019) have been applied to offensive language identification achieving competitive scores and topping the leaderboards in recent shared tasks (liu et al., 2019;ranasinghe et al., 2019). most of these approaches use existing pretrained transformer models which can also be used as text classification models.the clear majority of studies on this topic deal with english (malmasi and zampieri, 2017;yao et al., 2019;ridenhour et al., 2020) partially motivated by the availability english resources (e.g. corpora, lexicon, and pre-trained models). in recent years, a number of studies have been published on other languages such as arabic , danish (sigurbergsson and derczynski, 2020), dutch (tulkens et al., 2016), french (chiril et al., 2019), greek (pitenis et al., 2020), italian (poletto et al., 2017), portuguese (fortuna et al., 2019), slovene (fišer et al., 2017), and turkish (çöltekin, 2020) creating new datasets and resources for these languages.recent competitions organized in 2020 such as trac and offenseval have included datasets in multiple languages providing participants with the opportunity to explore cross-lingual learning models opening exciting new avenues for research on languages other than english and, in particular, on low-resource languages. the aforementioned deep learning methods require large annotated datasets to perform well which is not always available for low-resource languages. in this paper, we address the problem of data scarcity in offensive language identification by using transfer learning and crosslingual transformers from a resource rich language like english to three other languages: bengali, hindi, and spanish., 2018, cyber-bullying(rosa et al. while there are a few studies published on languages such as arabic and greek(pitenis et al.recent competitions organized in 2020 such as trac and offenseval have included datasets in multiple languages providing participants with the opportunity to explore cross-lingual learning models opening exciting new avenues for research on languages other than english and, in particular, on low-resource languages. in this paper, we address the problem of data scarcity in offensive language identification by using transfer learning and crosslingual transformers from a resource rich language like english to three other languages: bengali, hindi, and spanish., 2019)was used in semeval-2019 task 5 (hateval). s labels bengali 4,000 f overtly aggressive, covertly aggressive, non aggressive english 14,100 t offensive, non-offensive hindi 8,000 t hate offensive, non hate-offensive spanish 6,600 t hateful, non-hateful table1: instances (inst., 2019)and although bert-m model showed some cross-lingual characteristics it has not been trained on crosslingual data(karthikeyan et al. the main idea of the methodology is that we train a classification model on a resource rich, typically english, using a crosslingual transformer model, save the weights of the model and when we initialise the training process for a lower resource language, start with the saved weights from english.inter-language transfer learning we first trained the xlm-r classification model on first level of english offensive language identification dataset (olid)(zampieri et al.inter-task and inter-language transfer learning similar to the inter-language transfer learning strategy, we first trained the xlm-r classification model on the first level of english offensive language identification dataset (olid)(zampieri et al. we compared our results to the best systems in trac-2 for bengali, hasoc for hindi, hateval for spanish in terms of weighted and macro f1 score according to the metrics reported by the task organizers -trac-2 reported only macro f1, hateval reported only weighted f1, and hasoc reported both.the results for bengali deserve special attention because the bengali data is off-domain with respect to the english data (facebook instead of twitter), and it contains three labels (covertly aggressive, overtly aggressive, and not aggressive) instead of the two labels present in the english dataset (offensive and non-offensive). similar to the hindi and spanish, transfer learning with xlm-r cross lingual embeddings provided the best results for bengali achieving 0.this paper is the first study to apply cross-lingual contextual word embeddings in offensive language identification projecting predictions from english to other languages using benchmarked datasets from shared tasks on bengali , hindi(mandl et al.the results obtained by our models confirm that olid's general hierarchical annotation model encompasses multiple types of offensive content such as aggression, included in the bengali dataset, and hate speech included in the hindi and spanish datasets, allowing us to model different subtasks jointly using the methods described in this paper. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/303.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/303.txt new file mode 100644 index 0000000000000000000000000000000000000000..17f1cf92198d718eb997f3ae61d40b3394aa77ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/303.txt @@ -0,0 +1 @@ +breast cancer is one of the main reasons for the demise of women.it is the second dangerous cancer after lung cancer.like any other cancer, breast cancer begins when healthy cells change and starts to grow in a disordered manner, forming a mass of cells called a tumor.a tumor can be benign or cancerous.a cancerous tumor is called malignant and they can grow and spread to other parts of the body of the patient.a benign tumor is a tumor that can grow in a particular part of the body but it does not spread to the other parts of the body.many challenges are faced by a woman fighting against breast cancer which includes pain during radiation therapy and chemotherapy, the huge cost in terms of money that comes with it and, much more so it becomes very essential to predict breast cancer as soon as possible.there are pieces of evidence (as suggested by who) that state that women who consume drinks that contain alcohol, have above average birth-weight and above-average height attained when they are an adult are more at risk to develop breast cancer.it is also suggested that physically active women, eat whole grains, vegetables, fruits, and consume less red meat, alcoholic drinks and, sugarsweetened drinks are at lower risk of developing breast cancer.the following graphs explain the statistics related to breast cancer such as most vulnerable age of developing breast cancer, average cases per year to the age and much more : the following are some images that represent the difference between a normal breast and a cancerous breast .by the use of machine learning algorithms and techniques, if the cancer is benign or malignant will be predicted by looking at the symptoms and attributes of a person suffering from any one of them and comparing them to the symptoms and attributes of the potential victim.malignant cancer is a more dangerous type of breast cancer and detecting it at an early phase will result in better treatment hence less harm to the patient.also, early treatment is less expensive.machine learning, with its ability to extract main features from complex datasets, is largely used to predict breast cancer.application of these machine learning techniques in the medical field is of great importance as a disease can be predicted in an initial stage which can help us reduce the cost of medication that goes with it, help aid people's health, predict mostly accurate outcomes and help upgrade the healthcare value and save people's lives.many challenges are faced by a woman fighting against breast cancer which includes pain during radiation therapy and chemotherapy, the huge cost in terms of money that comes with it and, much more so it becomes very essential to predict breast cancer as soon as possible.the following graphs explain the statistics related to breast cancer such as most vulnerable age of developing breast cancer, average cases per year to the age and much more: the following are some images that represent the difference between a normal breast and a cancerous breast.machine learning, with its ability to extract main features from complex datasets, is largely used to predict breast cancer.they proposed a method of box counting of fractal algorithms to extract the features of the image and then cluster the data obtained from those breast cancer thermography images to determine the stage of cancer, using the fuzzy c-means clustering algorithm.another research paper called breast cancer diagnosis using deep learning algorithmused deep learning algorithms to diagnose breast cancer.deep learning, which is a subgroup of machine learning where neural networks algorithms like the human brain are used to learn from a large amount of data which helps us solve complex problems.to preprocess the data they used techniques such as label encoder method which is used to convert the labels which are non-numeric into the numeric form so it can be used in machine learning models.then they used a deep learning neural network algorithm which contains a series of algorithms that tries to recognize some useful relationships in the dataset that copies the working of the human brain to diagnose cancer.they use the concept of ensemble method for diagnosing breast cancer using neural networks and logistic algorithms.another paper called the breast cancer diagnosis using an unsupervised feature extraction algorithm based on deep learning.the main objective of this research was to predict breast cancer using unsupervised deep learning based on feature extraction strategy.by studying the above 5 research papers on breast cancer, understood that our algorithm can provide us with better accuracy to predict the cancer as well as can do it faster than them as the algorithm used is not that complex to deal with.machine learning will be used to predict breast cancer.550 of our data entries will be used to train our machine and use the rest to test it so can be sure that our machine predicts the type of cancer correctly. conclusion in this research, two machine learning algorithms namely decision tree classifier and logistic regression is implemented for prediction of breast cancer, and compared the accuracies of both to find which one of the two will be best suited for the prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/304.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/304.txt new file mode 100644 index 0000000000000000000000000000000000000000..f4e2194f599407eed72627909c8b93f85da4d7ba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/304.txt @@ -0,0 +1 @@ +v ideo captioning is referred to as the problem of generating a textual description for a given video content. the interdisciplinary nature of this problem opens vast new possibilities for interacting with video collections and there has been increased research effort on this topic observable over the past years , , , . this interdisciplinary nature, however, also poses significant research challenges at the intersection between the fields of natural language processing and computer vision. typically, these challenges have been pursued as extrapolations of the solutions proposed earlier for image captioning , , . these solutions perform classification in the visual domain with the goal to generate salient regions, linking these regions with some predefined textual attributes and then synthesize a sentence by completing a predefined generative model using the recognized attribute , , , , , .different from static pictures , , the content of a video is significantly more rich and unlikely to be captured yang yang, jie zhou, jiangbo ai, yi bin, heng tao shen, yanli ji are with the center for future media and the school of computer science and engineering, university of electronic science and technology of china. email: dlyyang, jiezhou0714, jiangboml@gmail.com, yi.bin@hotmail.com, seanxiening@gmail.com, shenhengtao@hotmail.com. corresponding author: heng tao shen.alan hanjalic is with the multimedia computing group, intelligent systems department, delft university of technology, delft 2628cd, the netherlands. email: a.hanjalic@tudelft.nl.this work was supported in part by the national natural science foundation of china under project 61572108, project 61632007 and the 111 project no. b17008. an illustration of the modular structure of the proposed video captioning model characterizerd by an interplay of the generator g that generates text sentences and the discriminator d (adversary) that verifies the sentences. while the generator has the objective to make the generated sentences as close to its existing generative model as possible, the discriminator has the objective to ensure that generated sentences are reasonable and natural for people better understanding. the generative model tries to generate a sentence for the video as accurately as possible, but the discriminative model tries to distinguish whether the input sentences is from reference sentence or generated sentences. the orange input sentences for discriminative model represent the reference sentences, otherwise badly constructed sentences or uncorrelated sentences generated by generative model.used both 2-d cnn and 3-d cnn to process video clips and average pooling operation to process over all the clips to generate a single d v -dimension vector which will be used to generate descriptions for video. although the lstm scheme has proved promising performance for handling the temporal nature of video data in the temporal process, the lstm scheme critical deficiency is shown to accumulate the grammatical errors exponentially and may result in decreasing association among the generated words with the increasing video length. the generative model is used for encoding the video clips and generates sentences, while the discriminative model is trying to distinguish whether the input sentences are from reference sentence or generated sentences (as in fig.generative model: as mentioned before, we use a joint recurrent neural networks, also called encoder-decoder lstm similar to sequence-to-sequence models, as the generative model. given a video feature f ∈ r h extracted from the last hidden layer in encoder of generator, where h is the dimension of hidden layer, we concatenate it with its corresponding textual feature out ∈ r h1 , where h1 is the dimension of out. in order to reduce the instability in training process, we initialize the lstm parameters for generator and cnn parameters for discriminator by pre-training a standard encoder-decoder lstm model and a standard cnn classification model aforementioned. for example, when a captioning model uses "a child is running on the ground" to describe a picture, the attention model will focus on the area where the child located in the picture when the model generate the word "child", while generating the word "ground", the model will focus on the ground in the picture.i shows the experimental result compared to other methods. by incorporating with optical flow features, the model may generate more relevant descriptions to the video.2%, outperforming all the existing methods including smt, lstm, visual-labelsand s2vt.lstm: a woman is cooking lstm-gan: a woman is frying some food ground-truth: she is cooking on the fish lstm: a man is dancing lstm-gan: a group of men are dancing on the stage ground-truth: people are dancing on stage lstm: a man is cutting a bread lstm-gan: a man is cutting a loaf of bread ground-truth: a man is cuts a loaf of bread lstm: a man is pouring tomato into a pot lstm-gan: a man is pouring some sauce into a pot ground-truth: a person pours tomato sauce in a pot lstm: a man is jumping on a motorcycle lstm-gan: a man is riding a motorcycle ground-truth: a man is riding a motorcycle lstm: a man is cooking a pot lstm-gan: a person is making some food ground-truth: a men is preparing some food fig. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/305.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/305.txt new file mode 100644 index 0000000000000000000000000000000000000000..e08b6cfb98a1b77e0188b028fa5dba1f5c20da98 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/305.txt @@ -0,0 +1 @@ +in the development of software, code clones are generated by use of copy and paste activity of the codes in repetition. the copy and paste activity may lead to renaming of variables, e.g., a programmer may paste a part of the code to another location in the system such that it remains syntactically similar to the original code, but not exactly similar. the presence of code clones is undesirable as they generate the problem of redundancy in the program. the review of previous research work in the field of code clone detection revealed that their main focus was on finding the identical clones. the identification of similar clones is sometimes important and hence, the present research work is focusing on development of new technique for finding the similar code blocks and to quantify their similarity. based on the review of previous research works and the present challenge in the code clone detection, this paper suggest a technique to find clone clusters which are a set of code blocks having similarity amongst each other. by using the technique given in this paper for one code block, all other similar blocks can be detected. to make the tool efficient, support vector machine (svm) method is used, so that the class of clones can be identified. based on the review of previous research works and the present challenge in the code clone detection, this paper suggest a technique to find clone clusters which are a set of code blocks having similarity amongst each other. in their algorithm they proposed code clone detection based on similar changes made on the previous code snippets. their algorithm does not detect code clone in real time but it hypothesized that refractoring of code clones is not always a good option because most of the clone eventually shows different behaviours and hence diverge on their own.classification based on used source code representation model was presented and they also described some concrete examples of clone detection techniques highlighting main distinctive features and problems that are present in practical clone detection.based on the need of extending the knowledge in the field of code clone detection, the proposed work emphasize on the detection of code clones using a given set of programs as input and producing the percentage of similarity between the two code snippets. in a program the existence of code clones increases with the increase in percentage of similarity amongst the code snippets. the detection of code clones can be achieved by generating the feature sets for the code fragments and then matching for the similarity. the working of code clone detection can be made efficient with the use of intelligent methods of machine learning by classifying the code clones to the one of the appropriate class. figure1show the problem flow in which parser is used to generate the feature sets and machine learning tool is used for classification of the algorithm in the desired class. this step is also used to find the similarity between the code snippets taken as input and the already existing code fragments in the program. the feature generated corresponding to first for loop will be: loop: for identifier: i constants: 0,5 operator: assignment(i=0) lval=i rval=0 operator: binary(i ≤ 5) lval=i rval= 5 operator: unary(i++) the feature generated for second for loop will be: loop: for identifier: i constants: 0,5 operator: assignment(i=0) lval=i rval=0 operator: binary(i ≤ 5) lval=i rval= 5 operator: assignment(i=i+1) lval=i rval= i+1 operator: binary(i+1) lval=i rval= 1 thus, it is clear from above that some of the attributes matches, while some of them do not match. after generation of feature sets in phase 1 they are used as input in phase 2 and passed through the libsvm tool for the classification of the code snippet. hence, in the present work the results of classification of the algorithm are presented in the form whether the given code snippet belongs to sorting class or non-sorting class. it is clear from table1that the code clones can be detected by using the class labels produced by the tool and the accuracy of the results increases as the number of instances increases. in the second stage the feature sets used as input are passed through libsvm tool for the classification of code snippet. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/306.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/306.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c497c52e43185be25f782d920e66cfc77df585f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/306.txt @@ -0,0 +1 @@ +there is substantial statistical evidence that points to driver drowsiness as a primary cause of road accidents all over the world. driving for lengthy periods of time can lead to accidents if rest is not taken. the world health organization (who) have shown that south africa among african regions has the highest road traffic accident fatalities of about 26.6 % per 100 000 population . moreover, 1,700 people died on south african roads in the festive season of 2016 alone, a 5% increase on 2015 season. the transport minister of south africa released a report on the statistics of 2014-2015 annual year, which reveals that 80% of road accidents involve adult males between the ages of 19 and 34 . furthermore, the minister added that women are most likely to die in road accidents as passengers, especially on public transport. in addition, statistics showed that the top three causes of road accidents in south african roads include distracted drivers (for example, a driver on a phone call), speeding, and driving under the influence of alcohol . statistics of road accidents as per category of road crashes and casualties are shown in fig. 1.these incidents have led researchers around the world to investigate methods for early warning drowsiness detection and warning. in addition, many countries and government officials are paying attention to the implementation of solutions to improve driving safety. drowsiness or sleepiness can be described as a biological state where the body is in-transition from an awake state to a sleeping state. at this stage, a driver can lose concentration and be unable to take actions such as avoiding head-on collisions or braking timeously. there are obvious signs that suggest a driver is drowsy, such as: frequently yawning. they covered general measures that can be used to detect drowsiness in a driver and provided a comparative analysis of various drowsiness detection systems. this included an analysis of signs of drowsiness and various techniques used to measure these signs, with a range of driver drowsiness detection systems reviewed. they focused on reviewing existing (2015) drowsiness detection techniques, with an emphasis on pre-processing techniques that can be used on different systems, for example, the circular hough transform and the lab color space.while a large number of reviews have been conducted around driver drowsiness detection, the field has advanced and there is a need for a review of machine learning approaches applied to drowsiness detection. this paper attempts to address this need by assessing behavioural methods that are based on machine learning techniques for the classification stage of drowsiness detection, as shown in fig. this paper provides information on a set of machine learning techniques that one can use to make reliable and precise decisions for driver drowsiness detection systems. the remainder of this paper is organized as follows: a general framework for behavioral driver drowsiness detection using machine learning techniques is described in section ii. provides a review of metrics that are used in driver drowsiness detection and decision-making techniques.behavioral methods measure levels of drowsiness through the use of mounted cameras in the car to observe facial features such as eye state, head movement, blinking rate and yawning.machine learning techniques to classify different levels of drowsiness are now discussed, along with a review of measures that form a driver drowsiness detection system. in the driver drowsiness field, svms are primarily used to learn to classify different states of the driver from labelled data. tests were performed on reported accuracies obtained on the ulg multimodality drowsiness database(drozy), zju eye blink database, yawn detection dataset(yawndd), eye-chimera and the nthu-drowsy driver detection video dataset.there are many techniques that are based on behavioral methods and machine learning that can be utilized for the purpose of driver drowsiness detection. this paper presented a survey of approaches to driver drowsiness detection using machine learning techniques and discussed the range of features and measures used for classification. although there are different methods that can be used to measure the level of drowsiness (vehicle-based, physiological, and behavioral methods), this review has focused on behavioral methods because they are non-invasive, work in various light conditions and do not necessarily require vehicle modifications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/307.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/307.txt new file mode 100644 index 0000000000000000000000000000000000000000..77d8be6faa69484e8af7eeadfa083eeb4a84acb9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/307.txt @@ -0,0 +1 @@ +the idea of our model is mostly derived from the inception architecture of google. the architecture can be described as : a multi-layer feed-forward layer consisting of 200 intermediate nodes with another l2 weighted regularization layer with convolutional layer in between.as the model is getting trained with a triplet loss function, an embedding f(x)of image x such that the relative distance measured between different pairs of samples get maximized whereas that between similar pairs of samples gets minimized. the purpose of the loss function is to bring close the anchor record or the current image x a i and a positive image x p i and the negative record x n i (some other person) will be separated by some considerable large distance and the margin will be α.where + means max(z,0) and m is the number of triplets in the training set.the model was not trained end-to-end from the scratch as it is quite expensive in terms of time and effort, thus, we have used the pre-trained models. face detection and recognition has been a significant interaction tool used in the security systems, accesscontrol which has gained enough popularity in the last few decades. thus many computer vision security techniques such as face recognition system, gesture detection, retina detection have gained some importance as the system is non-intrusive in nature and it has proved to be quite useful to determine one's identity uniquely. over the years, quite a few number of face detection and recognition techniques have been proposed and that has showed up with promising results. usually a face recognition module comprises of the following : detecting the face, its alignment detection, extracting out the features, finding out the similarity and its measurement. in this paper we have proposed a face recognition based on the features extracted by a convolutional neural network from the captured image of a voter. in this paper, the above-stated security problem during elections in india has been dealt properly with face recognition based electronic voting system. most face recognition methods used aligned faces as the input, it had been shown that adopting alignment in the test stage could have 1% recognition accuracy improvement on the lfwdataset. first, the entire module of face recognition has been explained such as: a) overview of the deep cnn architecture, b) detection and transformation on input images which ensures that the faces are correctly aligned prior to feeding them into the cnn. c) use the cnn to extract 200-dimensional representations, or embeddings, of faces from the aligned input images as euclidean distance directly relates to a measure of face similarity d) finally compute the embeddings to formalize the similarity.the face recognition based electronic voting system has been evaluated on a standard dataset and a custom dataset prepared for the purpose of testing real-time.if the system can correctly identify the image of the voter, or in other words, if any of the aligned image of the voter is present inside the database, then the electronic voting machine will allow the voter to cast his vote, after the voting has been done correctly, all the aligned images of that particular identity will be automatically detected from the database.the classic results of the face recognition system has been tested both on lfw dataset and custom dataset.3) if the face verification result turns out to be positive, or in other words, if the voter is a pre-registered candidate, then he will be asked to cast the vote for a political party, otherwise the result will show that his face cannot be identified and he will be informed that he is not allowed to cast vote. despite the fact that a large number of techniques have proved to work superiorly in terms of detection and recognition of human faces, still it remains challenging to develop a computationally efficient algorithm to match the human face with those present in the large database. in this paper, a secure and hassle-free face recognition based electronic voting machine has been proposed which is intended to solve the tamparability and security issues faced during the elections in india. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/308.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/308.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ce1c0b34bc03eae67dbd2f695d1c5ffc92c5f65 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/308.txt @@ -0,0 +1 @@ +football being one of the most popular sports around the globe has a huge amount of fan following the day to day events in the game. the people associated with the football teams as well as the followers of the teams often come across instances where it is hard to guess how their team will perform in a particular game, this is where game result prediction systems come into play. soccer score prediction can be a helpful means to assess the readiness of a team before going into a game. the prediction can also help the management of different football associations in getting their teams ready for the upcoming matches so as to get the best result possible out of the fixture. accurate football match outcome prediction is valuable for small and big sports telecommunication companies as it can help them increase their revenue and make the game a lot more interesting for the viewers.there have been several attempts at trying to predict the outcome of a football match but none of them has been able to match the accuracy of the human prediction. humans are still by far superior in predicting the outcome of the match as they take into account the technicality as well as the emotional factors that affect the outcome of the game, this helps them in predicting well but it has its drawbacks as well. humans let their emotions overpower them in the prediction which more or less leads to a wrong prediction of the outcome. various factors affect the outcome of a football match such as the number of goals scored in previous matches by a team, the winning streak that the team is coming to play with in the present game, the environment in which the team is playing the current fixture, the number of cards that the players involved in the game are playing with and many more other factors associated with the current form of the players in the team as well as the current form of the team itself. the aim of this paper is to predict the result of a football match. for the prediction process, we make use of recurrent neural networks(rnns) and long short term memory(lstm). we aim at increasing the accuracy of the prediction by taking into consideration all the events that take place during a football match and their effects on the outcome of the match.football being one of the most popular sports around the globe has a huge amount of fan following the day to day events in the game. the people associated with the football teams as well as the followers of the teams often come across instances where it is hard to guess how their team will perform in a particular game, this is where game result prediction systems come into play. the prediction can also help the management of different football associations in getting their teams ready for the upcoming matches so as to get the best result possible out of the fixture. accurate football match outcome prediction is valuable for small and big sports telecommunication companies as it can help them increase their revenue and make the game a lot more interesting for the viewers.there have been several attempts at trying to predict the outcome of a football match but none of them has been able to match the accuracy of the human prediction. humans are still by far superior in predicting the outcome of the match as they take into account the technicality as well as the emotional factors that affect the outcome of the game, this helps them in predicting well but it has its drawbacks as well. humans let their emotions overpower them in the prediction which more or less leads to a wrong prediction of the outcome. various factors affect the outcome of a football match such as the number of goals scored in previous matches by a team, the winning streak that the team is coming to play with in the present game, the environment in which the team is playing the current fixture, the number of cards that the players involved in the game are playing with and many more other factors associated with the current form of the players in the team as well as the current form of the team itself. we aim at increasing the accuracy of the prediction by taking into consideration all the events that take place during a football match and their effects on the outcome of the match. the approach of this project is to deduce better features from the results of the previous matches that the team has played and taken into consideration the current form of the team predict the accurate result to the game.a match is played in a particular environment and the events happening on the field have a huge effect on the outcome of the match. for example: the values from the 'ftag'(full time away goals) and 'fthg'(full time home goals), two new attributes 'htgs'(home team goals scored) and 'atgs'(away team goals scored) were calculated by summing over the attributes through all the rows. with respect to football match outcome prediction, the dataset is a relational database as mentioned in section 4.an lstm network performs wells on classification and has the potential to perform well on predictions of the outcome of football games. however, the increased prediction accuracy over minutes played in a match indicates that the network is able to learn about football. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/309.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/309.txt new file mode 100644 index 0000000000000000000000000000000000000000..e39053b452d25ebfce66e3953efb6aba81f8ad0b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/309.txt @@ -0,0 +1 @@ +game ai has been studied for many years, which refers to the techniques for controlling npc (non-player character) in games. these techniques include script , finite state machine which have been applied in games and evolutionary algorithm , neural network , dynamic bayesian network , case-based reasoning , fuzzy logic , support vector machine , etc. by using these techniques, there are good results reported in some applications such as chess game and path finding .pac-man is an arcade game developed by namco in 1980 and has developed lots of versions. in the version of ms.pac-man used here, player controls pac-man to avoid npc ghosts' chase and try to complete its task of eating all pellets. since ghost is controlled by script which totally relies on game designer's mind, it may suffer from some disadvantages as stiff action and unstable performance in different maps. in this case, we build a neural network to control the ghost, improve its capacity through self-learning in evolutionary process.evolutionary neural network is a method of machine learning that uses evolutionary algorithms to train artificial neural network. stanley has used it to build agent for npc. there are mainly two kinds of evolutionary neural networks. the first one only evolves the values of the connection weights for a network while the second evolves both the values of the connection and the topology of the network. as the numbers of input and output are fixed in our method, we use the first kind of evolutionary neural network which is simple and efficient.previous research on pac-man is mostly focused on the role of pac-man exploring ways that pac-man can eat more pellets , rarely about npc character ghost. yannakakis proposed an evolutionary neural network to control ghosts, but the pac-man game he used is a much simplified version far away from the popular version: the map is simple that only contains a few channels; there is no power pellets; pac-man's speed is always double as ghosts' while their speed is almost the same in popular versions. at the same time, his approach ignores the original design which is a famous setting that four ghosts have specific characters leading to different behaviors. in this paper, we use the more popular version of pac-man, ms. pac-man, which is more complex and interesting than that used in . correspondingly, the evolutionary neural network should be different due to the more complex and real environment. the inputs and outputs of the network are changed and a fitness function is proposed to describe the capture ability of the red ghost in ms. pac-man.this paper is organized in six main sections. section two describes ms. pac-man's simulation environment. section three explains about neural network's structure and parameters. section four is the evolution process. section five presents our experimental results and the conclusion is given in section six. these techniques include script, finite state machine which have been applied in games and evolutionary algorithm, neural network, dynamic bayesian network, case-based reasoning, fuzzy logic, support vector machine, etc.pac-man used here, player controls pac-man to avoid npc ghosts' chase and try to complete its task of eating all pellets.evolutionary neural network is a method of machine learning that uses evolutionary algorithms to train artificial neural network.previous research on pac-man is mostly focused on the role of pac-man exploring ways that pac-man can eat more pellets , rarely about npc character ghost. yannakakisproposed an evolutionary neural network to control ghosts, but the pac-man game he used is a much simplified version far away from the popular version: the map is simple that only contains a few channels; there is no power pellets; pac-man's speed is always double as ghosts' while their speed is almost the same in popular versions. orange's behavior remains the same, always move randomly, give people the impression that it's stupid and don't known to chase pac-man; table2presents the move rules for all four ghosts. in other cases it would select the direction that reduce the gap with the abscissa orange always move randomly pac-man is a classic chasing and evading game so that ghosts' chasing is an important part. later we call red ghost controlled by neural network as nred while call red ghost controlled by traditional script as red as before. in ms. the position and entering maze time are respectively equal to the ghosts of red, pink, cyan and orange which help to avid the situation that all neural network controlled ghosts' crow together. y pac-man eats all the pellets y ghost catches pac-man y game is more than 3000 frames. one is the times ghost catch pac-man in n games, the other is the number of pellets left in the map when pac-man is captured. we assume the perfect ghost always succeeding in catching pac-man and do it efficiently that only keep pac-man eat 25% of all pellets.this paper builds an evolutionary neural network for red ghost's chasing behavior in ms. future work of our research will focus on two parts: build evolutionary neural network for other ghosts to chase coordinate with nred and try to use other machine learning method to build agent for ghost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/31.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/31.txt new file mode 100644 index 0000000000000000000000000000000000000000..7ecc964fbb60966062dcf6fe71d76b792d3e07a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/31.txt @@ -0,0 +1 @@ +long-short-term memory is a special kind of recurrent neural network that has better performance than traditional recurrent neural networks with many benefits, lstm can partially solve the vanishing gradient problem or long-term dependence issue. 3 is a module from the app named yitutushi shows the inside architecture of the lstm cell, the 𝑥 𝑡 is the input vector to the lstm unit, 𝑓 𝑡 is the forget gate's activation, it is the input gate's activation vector, 𝑜 𝑡 is the output gate's activation vector, ℎ 𝑡 is the hidden state vector or output vector of the lstm unit, 𝑐 𝑡 is the cell state vector 𝑐̃𝑡 is the cell input activation vector. and the equations of an lstm cell are below:the 𝜎 𝑔 is the sigmoid function 𝜎 𝑐 and 𝜎 ℎ are the hyperbolic tangent function.cvs format in the data management block on the website, and we can easily edit, add the people number in the period in the same form, this function save our time on the data collection and processing.for input data, we can directly download it from the netatmo weather website, but for output data, the number of people, we have to count by ourselves when the class is beginning. there is a notification function in the netatmo mobile app that can notify us when the co2 exceeds a scheduled level, which means there will be people in the classroom, we can go to the classroom when we get the notification and don't have to wait for the whole day since sometime maybe some people come to the classroom for a short meeting. we have collected several days of data for machine learning training that have people in the classroom, since the class schedule limitation, we set seven days as training data, two days as validation data, and two days as test data. for output data, since we just have one output, we can assume the people number value is categorical data and use one-hot encoding to encode the output data to get better performance.for the output layer, we know that the output data is one item with one hot encoding format, so we should use the softmax activation function to shape the output index range from 0 to 1, and the sum from all one hot index is 1. this will show the probability of the index that shows the probability of the people number generated from the model and then we transform the largest probability index back to the people number, model in this way will show its prediction of the people number based on the input data.we choose two days value as the test data to make prediction and we get predictions by using our model, we first build up the model without one-hot encoding for the output data, then we extend our model with one-hot encoding as an optimization, and the performance clearly shows the essential to apply onehot encoding.4and 5, the orange line is the real number of people that we count, and the blue line is the predicted number of people generated by the model fig.1 like other data such as price, score, and weight, so there is not good to show the tendency of people number variation. third, about data collection, we can only count the constant people number in the classroom by hand, if some people are going outside to the restroom for ten minutes we cannot notice because it is hard to stay in the classroom to count the dynamic number. in our model, we ignore the weekend value because there are no people in the classroom, but when we apply this model in the classroom, we cannot ignore the weekend value. we will try to collect more data and improve our collection method, like using the camera in the classroom to count people in the different time stamps, and we also can collect data next semester since the class this semester is going to end to enlarge the size of the dataset. we can change the brightness of the light and schedule the hvac working time via bemoss, combined with our model, we want to achieve auto-control for the light and hvac in the classroom, for example, the model can detect the occupancy in the classroom, and bemoss control the lightness or hvac power to face the demand of the classroom. with more people in the room, we can increase the lightness and set hvac to set a comfortable environment for students in the classroom during the study, when there are no people or too few people, there is no reason to turn on all lights in the room with 100 percent power. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/310.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/310.txt new file mode 100644 index 0000000000000000000000000000000000000000..123f063f5a658f1a9a7f35fdec80b8c41945ceb6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/310.txt @@ -0,0 +1 @@ +the heart is one of the main parts of the hu man body after the brain. the primary function of the heart is to pumping blood to the whole body parts. any disorder that can lead to disturbing the functionality of the heart is called heart disease. several types of heart disease are there in the world; coronary artery disease (cad), and heart failure (hf) are the most common heart diseases that are present. the main reason behind the coronary heart disease (cad) is blockage or narrowing down of the coronary arteries coronary arteries also responsible for supplying blood to the h eart. cad is the leading cause of death over 26 million people are suffering fro m coronary heart d isease(cad) around the world, and it is increasing 2% annually due to ca d 17.5 million deaths happened globally in 2005 . in the growing world, 2% of the population around the world is suffering fro m ca d, and 10% of the people are older than 65 years. approximately 2% of the annual healthcare budget spent only to treat cad d isease. usa government spent 35 billion dollars for cad in 2018 .different factors can raise the risk o f heart failure. medical scientists have classified those factors into two different categories; one of them is risk factors that cannot be changed, and another one is risk factors that can be changed. family history, sex, age comes under risk facto rs that cannot be changed. high cholesterol, smoking, physical inactivity, h igh blood pleasure all these come under risk factors .heart disease is a significant issue, so there is a need for diagnosis or prediction of heart disease there are several methods to diagnose heart disease among them angiography is the trending method which is used by most of the physicians across the world. ho wever, there are so me drawbacks associated with angiography technique. it is an expensive procedure and physicians have to analyze so many factors to diagnose a patient hence this process makes physician job very difficult, so these limitations motivate to develop a non-invasive method for pred iction of heart disease. these conventional methods deal with medical reports of the patients moreover these conventional methods are time -consuming, and it may g ive erroneous results because these conventional methods are performed by humans . to avoid these errors and to achieve better and faster results, we need an automated system. over the past years, researchers find out that machine learning algorith ms perform very well in analy zing medical data sets. these data sets will be d irectly g iven to machine learning algorith ms, and machine learning algorith ms will perform according to their nature, and those algorithms will give some outputs. there are some co mmon attributes which are used to predict the heart diseases are:  gender (it is a binary attribute 1 for female, 0 for male). several types of heart disease are there in the world; coronary artery disease (cad), and heart failure (hf) are the most common heart diseases that are present. the main reason behind the coronary heart disease (cad) is blockage or narrowing down of the coronary arteriescoronary arteries also responsible for supplying blood to the h eart.heart disease is a significant issue, so there is a need for diagnosis or prediction of heart disease there are several methods to diagnose heart disease among them angiography is the trending method which is used by most of the physicians across the world. these data sets will be d irectly g iven to machine learning algorith ms, and machine learning algorith ms will perform according to their nature, and those algorithms will give some outputs. if you give any labeled training data to support vector machine algorith ms , it will produce a classifier that will d ivide the labeled data into different classes. the name itself is suggested that it is a forest, a forest is a group of trees similarly in a random forest algorith m we will have trees these trees are the decision t rees. rando m forest algorith m works this way at; first it will co llect random samp les fro m the dataset and then it will create decision trees for each samp le fro m those available trees we will select the tree which will produce the best prediction results.developed a data mining model to predict heart disease efficiently.proposed an expert system based on two support vector machines(svm) to predict heart disease efficiently. these tow svm's have their purpose; first, one is used to remove the unnecessary features, and the second one is used for prediction.proposed predictive analytics to prevent and control the chronic disease with the help of machine learning techniques such as naive bayes, support vector machine, decision tree, and artificial neural network and they have used uci machine learning repository datasets to calculate the accuracy.developed a model to imp rove the prediction of heart disease by overcoming the problem of overfitting; overfitting means the proposed model performs and gives better accuracy on testing data and gives unfortunate accuracy result for training data while predicting the heart disease. that model consists of two algorithms one is ras(random search algorithm) other one is a random forest algorithm that is used to predict the model. here authors proposed a system that will predict heart disease with the help of data mining techniques, as well as machine learning techniques such as decision tree, naïve bayes, knearest neighbours , support vector machines, and artificial neural network for the prediction. so, in the future, it is better to use search algorith ms for selecting the features and then applying machine learning techniques for prediction will give us better results in the prediction of heart disease. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/311.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/311.txt new file mode 100644 index 0000000000000000000000000000000000000000..4b94ba00fbb5556aaeb89a947bfa11221642d690 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/311.txt @@ -0,0 +1 @@ +tuberculosis(tb) is a global epidemic that causes the death of 1.8 million people worldwide, annually. recent data from the world health organization (who, 2016) indicates that there are more than 9 million new cases found all over the world, among which over 80% are found in south-east asia, western pacific, and africa. tb is curable and preventable, but in resource-poor and marginalized communities with weak healthcare infrastructure, it is difficult to diagnose because of the high prevalence and constrained resources for better diagnosis and effective treatment follow-up. the global tb report from who declares that it is necessary to eliminate tb epidemic using innovative methods such as mhealth to improve the diagnosis process and promoting patient adherence to the medical treatment.the modern development of computer techniques has accelerated the tb diagnosis among the resource-poor areas, however, there is still a wide gap between the technological advancement and the clinical practices. the gap is mainly caused by two major barriers: 1) lacking large-scale, real-world, well-annotated and public-available x-ray image database. even though some databases like imageclef, jsrt digital image database and anode grand challenge database contain tb images, they are mostly focused on one or two tb manifestations(e.g, pulmonary nodule); 2) lacking high-performance computing system for accurate diagnosis by analyzing the chest x-ray images. the use of computer-aided chest radiography for tb screening and diagnosis has been limited due to the modest sensitivity and specificity, and high inter-and intra-observer differences in reporting shapes of tuberculosis on radiographic images. how to use precise computer algorithm for automatic tb screening and detection still remains to be a challenging problem for researchers.efforts described here are part of a mobile health (mhealth) integrative project aimed at reducing patient wait time to be diagnosed with tb by implementing a socio-technical solution to optimize the diagnosis process in a high-burden tb area in lima, the capital of perú. in this paper, we propose a novel deep learning method with cnn and transfer learning for classifying tb manifestations in chest x-ray images. our algorithm and training protocol show outstanding accuracy and are proven to be practical and stable for various cnn architectures(e.g., alexnet , googlenet ). experimental results show a wide potential for medical images analysis and tb diagnosis. the global tb report from who declares that it is necessary to eliminate tb epidemic using innovative methods such as mhealth to improve the diagnosis process and promoting patient adherence to the medical treatment. even though some databases like imageclef, jsrt digital image databaseand anode grand challenge database contain tb images, they are mostly focused on one or two tb manifestations(e. in this paper, we propose a novel deep learning method with cnn and transfer learning for classifying tb manifestations in chest x-ray images. due to the limited training image and specific image characteristics in the medical image, even though some progress has been made in the various fields, it remains to be a challenging problem for accurate and efficient diagnosis using deep learning. in this paper, we explore the feasibility of applying deep learning in tb diagnosis and detection using chest x-ray images. several efforts have been explored: 1) we study and evaluate various cnn architectures and training parameters for tb x-ray image dataset; 2) we apply the transfer learning techniques in chest x-ray images. we finetune the pretrained cnn model from natural image dataset(imagenet) to our medical x-ray image dataset to detect tb and find its effectiveness and efficiency than training from scratch. we adopt this schema and use various cnn models and smaller size of kernels to find the best fit for the tb chest x-ray images. we inherently adopt this training strategy, combine with shuffle sampling and cross-validation, and creatively apply it to the chest x-ray images for classifying tb manifestations.we conducted several experiments on a private tb x-ray image dataset from perú and followed the standard evaluation protocol, used multiple training models and got the average as the final accuracy. this dataset contains 4701 images, 453 of them are labeled as normal(which means the patients don't have the tb) and 4248 are labeled as abnormal that contain various tb manifestations. since the chest x-ray images are still not abundant for training large complex model, we use the pretrained model from imagenet that are public available in model zoo among the caffe community and then finetune on our chest x-ray image database. use alexnet for binary classification and googlenet for full classification dataset, then finetune using the same network structure with chest x-ray images.02% in. we will also deploy a user-centered, mobile device-based computing system to expedite the tb diagnosis process and conduct the fieldtesting in tb clinics in a high-burden tb area in lima, the capital of perú. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/312.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/312.txt new file mode 100644 index 0000000000000000000000000000000000000000..9267e76696b1e0f5496a11e4168f59ca58ae0e6b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/312.txt @@ -0,0 +1 @@ +generating realistic images that semantically match given text descriptions is a challenging problem and has tremendous potential applications, such as image editing, video games, and computer-aided design. recently, thanks to the success of generative adversarial networks (gans) in generating realistic images, text-to-image generation has made remarkable progress by implementing conditional gans (cgans) , which are able to generate realistic images conditioned on given text descriptions.however, current generative networks are typically uncontrollable, which means that if users change some words of a sentence, the synthetic image would be significantly different from the one generated from the original text as shown in fig. 1. when the given text description (e.g., colour) is changed, corresponding visual attributes of the bird are modified, but other unrelated attributes (e.g., the pose and position) are changed as well. this is typically undesirable in real-world applications, when a user wants to further modify the synthetic image to satisfy her preferences.the goal of this paper is to generate images from text, and also allow the user to manipulate synthetic images using natural language descriptions, in one framework. in particular, we focus on modifying visual attributes (e.g., category, texture, and colour) of objects in the generated images by changing given text descriptions. to achieve this, we propose a novel controllable text-to-image generative adversarial network (controlgan), which can synthesise high-quality images, and also allow the user to manipulate objects' attributes, without affecting the generation of other content.our controlgan contains three novel components. the first component is the word-level spatial and channel-wise attention-driven generator, where an attention mechanism is exploited to allow the 33rd conference on neural information processing systems (neurips 2019), vancouver, canada. this bird has a yellow back and rump, gray outer rectrices, and a light gray breast. (original text) this bird has a red back and rump, yellow outer rectrices, and a light white breast. (modified text) text ours original figure 1: examples of modifying synthetic images using a natural language description. the current state of the art methods generate realistic images, but fail to generate plausible images when we slightly change the text. in contrast, our method allows parts of the image to be manipulated in correspondence to the modified text description while preserving other unrelated content.generator to synthesise subregions corresponding to the most relevant words. our generator follows a multi-stage architecture that synthesises images from coarse to fine, and progressively improves the quality. the second component is a word-level discriminator, where the correlation between words and image subregions is explored to disentangle different visual attributes, which can provide the generator with fine-grained training signals related to visual attributes. the third component is the adoption of the perceptual loss in text-to-image generation, which can reduce the randomness involved in the generation, and enforce the generator to preserve visual appearance related to the unmodified text.to this end, an extensive analysis is performed, which demonstrates that our method can effectively disentangle different attributes and accurately manipulate parts of the synthetic image without losing diversity. also, experimental results on the cub and coco datasets show that our method outperforms existing state of the art both qualitatively and quantitatively. the second component is a word-level discriminator, where the correlation between words and image subregions is explored to disentangle different visual attributes, which can provide the generator with fine-grained training signals related to visual attributes.3(a)) takes two inputs: the word features w and hidden visual features v k ∈ r c×(h k * w k ) , where h k and w k denote the height and width of the feature map at stage k. the word features w are first mapped into the same semantic space as the visual features v k via a perception layer f k , producingw k = f k w, where.then, we calculate the channel-wise attention matrix m k ∈ r c×l by multiplying the converted word featuresw k and visual features v k , denoted as m k = v kwk .the attention weight α k i,j represents the correlation between the i th channel in the visual features v k and the j th word in the sentence s, and higher value means larger correlation.2 (a)) used in the generator, where w and w denote word features encoded from the original text s and a randomly sampled mismatched text, respectively, and 2) visual features n real , n fake , both encoded by a googlenet-basedimage encoder from the real image i and generated images i , respectively. (5) contains an adversarial loss l g k , a textimage correlation loss l corre , a perceptual loss l per , and a text-image matching loss l damsm.where k is the number of stages, i k is the real image sampled from the true image distribution p data at stage k, i k is the generated image at the k th stage sampled from the model distribution p g k , λ 2 , λ 3 , λ 4 are hyper-parameters controlling different losses, l per is the perceptual loss described in sec.4, which puts constraint on the generation process to reduce the randomness, the l damsmis used to measure text-image matching score based on the cosine similarity, and l corre reflects the correlation between the generated image and the given text description considering spatial information. (6): the unconditional adversarial loss is applied to make the synthetic image be real, and the conditional adversarial loss is utilised to make the generated image match the given text s.the adversarial loss l d k contains two components: the unconditional adversarial loss determines whether the image is real, and the conditional adversarial loss determines whether the given image matches the text description s:.to further evaluate whether the model can generate controllable results, we compute the l 2 reconstruction errorbetween the image generated from the original text and the one from the modified text shown in table1. in contrast, our approach is able to accurately manipulate parts of the image generation corresponding to the modified text, while preserving the visual attributes related to unchanged text. without perceptual loss, images generated from modified text are hard to preserve content that are related to unmodified text, which indicates that the perceptual loss can potentially introduce a stricter semantic constraint on the image generation and help reduce the involved randomness. three novel components are introduced in our model: 1) the word-level spatial and channel-wise attention-driven generator can effectively disentangle different visual attributes, 2) the word-level discriminator provides the generator with fine-grained training signals related to each visual attribute, and 3) the adoption of perceptual loss reduces the randomness involved in the generation, and enforces the generator to reconstruct content related to unmodified text. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/313.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/313.txt new file mode 100644 index 0000000000000000000000000000000000000000..896933519a0746492c4cc2509e3ab87271ef6d63 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/313.txt @@ -0,0 +1 @@ +w ith the active adoption of infocomm technologies to better support the inter-connectivity of things, we have seen significant effort put forth that results in the enhancement of efficiency in the work environment and standard of living worldwide, in both smart cities or from a wider perspective, a smart nation . an important building block of a smart city is the smart home, which is essentially where people spend a major part of their lives. a key enabler which supports the wireless device connectivity in smart homes is the ieee 802.11 standards. the ieee 802.11 standards specify the media access control (mac) and physical layer (phy) for the implementation of wireless local area networks (wlans), and provide the basis for the more commonly known technology, the wi-fi protocol. the ieee 802.11 is the de-facto standard to achieve the required short to medium range wireless device connectivity in smart homes, and is ranked the most popular and has the highest market adoption as the wireless technology deployed .in contrast to the wired network technologies where a device needs to be physically connected to the network to obtain network resource accessibility, a device requiring connectivity to the wlan only needs to be within the signal range of the wireless access point. with the removal of the need for wires to be deployed and the requirement for physical access, many security issues have surfaced and security considerations become essential for the wireless network protocols.over the years, high profile security vulnerability exploitation and attacks have been observed across different versions of the wi-fi protocol. examples of vulnerabilities and attacks that plague the protocol include those targeting the wired equivalent protection (wep) - and the wi-fi protected access (wpa) and wpa2 , . as such, even though the wi-fi protocol provides good mobility support and connectivity compatibility due to the adoption by many smart devices, especially smart phones, tablets and laptops, the risks that accompany the wide adoption of this protocol has also been brought into question.in light of these potential threats, attack detection has an important role to play as it can be seen that protection mechanisms are simply not adequate in defending against attacks in the ieee 802.11 networks. in addition, it is not sufficient to only view the attack detection as a binary class problem (that is, a traffic flow constitutes an attack or not). to facilitate the attack analysis, response, mitigation and recovery stages, and potentially also attack attribution, it is also necessary to classify the attacks at a finer granularity in an accurate manner, so as to enable an easier and more streamlined effort in the subsequent stages of work for the security analysts.in this paper, we analyse the threats and attacks targeting the ieee 802.11 network by utilizing a publicly available dataset. the data was collected by the authors in from a lab which was set up with various smart devices (e.g. smart tv, smart phones) to realistically emulate a typical soho infrastructure. various attacks using different set of tools were carried out in the lab, and both attack and legitimate wi-fi signals' measurements were collected. the attacks fall into the categories of flooding, injection and impersonation. we then propose a deep learning approach which self-learns the features necessary to detect network anomalies and perform attack classification accurately. to the best of our knowledge, this is the first work that proposes a deep learning approach to perform ieee 802.11 network anomaly detection and attack classification. in our experiments, we considered the classification task as a multi-class (that is, legitimate traffic, flooding type attacks, injection type attacks and impersonation type attacks) classification problem, and achieved an overall accuracy of 98.6688% in classifying the attacks accordingly.the rest of the paper is organised as follow. a discussion on existing work is provided in section ii. section iii provides readers with the challenges of attack detection and classification, as well as the background and description of the dataset used in this work. we describe the proposed deep learning approach in section iv, and provide the experimental results and analysis in section v. finally, we conclude the paper in section vi. to facilitate the attack analysis, response, mitigation and recovery stages, and potentially also attack attribution, it is also necessary to classify the attacks at a finer granularity in an accurate manner, so as to enable an easier and more streamlined effort in the subsequent stages of work for the security analysts. in our experiments, we considered the classification task as a multi-class (that is, legitimate traffic, flooding type attacks, injection type attacks and impersonation type attacks) classification problem, and achieved an overall accuracy of 98. however, manual feature selection can be a very tedious and timeconsuming process. the 15 attack types are the fragmentation, chopchop and arp injection attacks; deauthenticaton, authorization request, beacon, cts, rts, disassociation, fake power saving, probe request and probe response flooding attacks; and the evil twin, hirte and caffe latte impersonation attacks. the categories are (a) injection attacks: generate a high number of correctly encrypted data frames (b) flooding attacks: generate a high volume of management frames per unit time, and (c) impersonation attacks: introduce an access point to broadcast beacon frames to advertise a pre-existing network (that is, the victim's network). of the 15 attack types (with their variations), the train dataset contains 8 of the attack types, while the test dataset contains attacks of all 15 types. the 7 novel attack types are the chopchop injection attacks; cts, rts, disassociation, fake power saving and probe request flooding attacks; and the hirte impersonation attacks. tableishows the breakdown of the train dataset records, and figure3and figure4show the test dataset classification results of the 2-hidden-layer model and 3-hidden-layer model, respectively, when using different activation functions for the neurons. tableiireferring to figure3, our experimental results demonstrated that the generated 2-hidden-layer model using the prelu activation function was able to obtain a consistently high and balanced classification rate for all categories, compared to the other three activation functions, and the prior art's best classifier using the j48 algorithm. our 2-hidden-layer prelu based model showed a significant improvement in the impersonation attack classification at an accuracy of 98. nonetheless, when compared to the j48 classifier, our 2-hidden-layer prelu based model showed a drop in the classification of normal, injection and flooding traffic, by 0.2701% and 10.for both the 2-hidden-layer and 3-hidden-layer frameworks, the sigmoid based models did not manage to attain the highest classification accuracy for any of the traffic class, while the lrelu and relu based models attained the highest classification accuracy for the normal and flooding attack traffic, respectively. the prelu based models, on the other hand, attained the highest classification accuracies for both the injection and impersonation attacks. taking an overall view, we observed that both the 2-hidden-layer and 3-hidden layer prelu based models were able to provide a well-balanced classification for all four traffic types, with the 2-hidden-layer model having a better performance over the 3-hidden-layer model, as shown in figure3 and 4. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/314.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/314.txt new file mode 100644 index 0000000000000000000000000000000000000000..394ec4f23d4972fa1332cf3d9caf80533637be9f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/314.txt @@ -0,0 +1 @@ +image completion with generative adversarial networks (gans) is a highly recognized topic in computer vision. with image exchange becoming a common medium in daily communication in the present day, there is an increase of demand for realism in generated image over a minimal image completion feature. such demand is reflected on social media statistics. however, most image editing softwares require expertise such as knowing which specific tools to use at certain circumstances to effectively modify the image the way we want. instead, an image completion method which responds to user input would allow novice to easily modify images as desired. similarly, our proposed system has an ability to easily produce high quality face images, provided a sketch and color input is given even with the presence of erased parts in the image.in recent works, deep learning based image completion methods have been used to restore erased portion of an image. the most typical method used an ordinary (square) mask, then restored the masked region with an encoderdecoder generator. then global and local discriminator was figure 1. face image editing results by our system. it can takes free-form input consist of mask, sketch and color. for each example, it shows that our system make users can easily edit shape and color of face even if user wants completely change hairstyle and eye (third row). interestingly, user can edits earring by our system (fourth row). used to make an estimation on whether the result was real or fake . however, this system is limited to low resolution images, and the generated image had awkward edges on of the masked regions. in addition, the synthesized images on the restored region often fell short of the users expectation as the generator was never given any user input to utilize as a guide. several works that improved on this limitation include deepfillv2 , a work that utilized user's sketch as an input, and guidedinpating , which took a part of another image as an input to restore the missing parts. however, since deepfillv2 does not use color input, the color in the synthesized image is gerated by inference from the prior distribution learned from the training data set. the guided-inpating used parts of other images to restore deleted regions. however it was difficult to restore in detail because such process required inferring the user's preferred reference image. another recent work ideepcolor proposed a system that accepts color of user input as reference to create a color image for black and white images. however, the system in ideepcolor does not allow editing of the object structures or restore deleted parts on image. in another work, a face editing system faceshop which accepts sketch and color as user input was introduced. however, faceshop has some limitations to be used as an interactive system for synthetic image generation. firstly, it utilized random rectangular rotable masks to erase the regions that are used in local and global discriminator. this means that local discriminator must resize the restored local patch to accept fitting input dimensions, and the resizing process would distort the information in both the erased and remaining portions of the image. as a result, the produced image would have awkward edges on the restored portion. secondly, faceshop would produce an unreasonable synthetic image if too much area is wiped out. typically, when given an image with the entire hair image erased, system restores it with distorted shape.to deal with the aforementioned limitations, we propose a sc-fegan with a fully-convolutional network that is capable of end-to-end training. our proposed network uses a sn-patchgan discriminator to address and improve on the awkward edges. this system is trained with not only general gan loss but also concurrently with style loss to edit the parts of the face image even if a large area is missing. our system creates high quality realistic composite images with the user's free-form input. the free-form domain input of sketch and color also has an interesting additive effects, as shown in figure 1. in summary, we make the following contributions:• we suggest a network architecture similar to unet with gated convolutional layers . such architecture is easier and faster for both training and inference stages.it produced superior and detailed result compared to the coarse-refined network in our case.• we created a free-form domain data of masks, color and sketch. this data is used for making incomplete image data for training instead of stereotyped form input.• we applied sn-patchgan discriminator and trained our network with additional style loss. this application covers cases with large portions erased and has shown robustness at managing the edges of the masks. it also allowed production of details on the produced image such as high quality synthetic hair style and earring. with image exchange becoming a common medium in daily communication in the present day, there is an increase of demand for realism in generated image over a minimal image completion feature. similarly, our proposed system has an ability to easily produce high quality face images, provided a sketch and color input is given even with the presence of erased parts in the image. several works that improved on this limitation include deepfillv2, a work that utilized user's sketch as an input, and guidedinpating, which took a part of another image as an input to restore the missing parts. another recent work ideepcolorproposed a system that accepts color of user input as reference to create a color image for black and white images. because most commercial image editing softwares use defined operations, a typical image modification task requires expert knowledge to strategically apply a combination of transformations for an image.in this section, we discuss several works in the field of image completion and image translation among prevalent image editing methods that use deep learning. pix2pixproposed a system used dataset which consists of pair of images that can be used to create models that convert segmentation labels to the original image, or convert a sketch to an image, or a black and white image to color image. the ideepcoloris introduced as a system that convert a monochrome image into a color image by taking a user's desired color as a mask. in these works, image transformation that interacts with user input has shown that user input can be learned by feeding it to the generator with images.image completion field has two main challenges: 1) filling deleted area of the image, 2) proper reflection of users input in the restored area. the generator receives incomplete images with user input to create an output image in the rgb channel, and inserts the masked area of the output image into the incomplete input image to create a complete image. the encoder of our generator receives input tensor of size 512×512×9: an incomplete rgb channel image with a removed region to be edited, a binary sketch that describes the structure of removed parts, a rgb color stroke map, a binary mask and a noise (see figure2). we replaced the remaining parts of image outside the mask with the input image before applying the loss functions to it. the user can modify the face image intuitively through sketch and color input to obtain a realistic synthetic image that reflects shadows and shapes in detail. in addition, our sc-fegan can generates face image with only sketch and color free-form input even if the input image is erased totally (see figure10). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/315.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/315.txt new file mode 100644 index 0000000000000000000000000000000000000000..316dae621322b794744ba33ea7a6016bf9ab83c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/315.txt @@ -0,0 +1 @@ +intrusion is a severe issue in security and a prime problem of security breach, because a single instance of intrusion can steal or delete data from computer and network systems in a few seconds. intrusion can also damage system hardware. furthermore, intrusion can cause huge losses financially and compromise the it critical infrastructure, thereby leading to information inferiority in cyber war. therefore, intrusion detection is important and its prevention is necessary.different intrusion detection techniques are available, but their accuracy remains an issue; accuracy depends on detection and false alarm rate. the problem on accuracy needs to be addressed to reduce the false alarms rate and to increase the detection rate. this notion was the impetus for this research work. thus, support vector machine (svm), random forest (rf), and extreme learning machine (elm) are applied in this work; these methods have been proven effective in their capability to address the classification problem.intrusion detection mechanisms are validated on a standard dataset, kdd. this work used the nsl-knowledge discovery and data mining (kdd) dataset, which is an improved form of the kdd and is considered a benchmark in the evaluation of intrusion detection methods.the remainder of the paper is organized as detailed below. the related work is presented in section ii. the proposed model of intrusion detection to which different machine learning techniques are applied is described in section iii. the implementation and results are discussed in section iv. the paper is concluded in section v, which provides a summary and directions for future work.the accuracy of svm (linear), svm (rbf), rf, and elm on 20% testing and 80% training data samples is shown in figure5. elm performs better compared with svm (linear), svm (rbf) and rf on full data samples, whereas svm (rbf) indicates improved accuracy over rf and elm on half data samples. svm (linear) outperforms other techniques on 1/4 data samples, as depicted in figure5. the precision of svm (linear), svm (rbf), rf, and elm on 20% testing and 80% training data samples is shown in figure6. the precision of elm is better than that of svm linear and rbf on the full data samples, and it also outperforms that of rf. on half data samples, the precision of svm (linear) is higher than that of svm (rbf), elm, and rf. on 1/4 th data samples, the precision of svm (linear) is equal to that of svm (rbf).the recall of svm (linear), svm (rbf), rf, and elm on 20% testing and 80% training data samples is shown in figure7. on full data samples, the recall of elm performs better than those of svm (linear), svm (rbf), and rf. the recall of svm (linear) is greater than those of svm (rbf), elm, and rf. the ranking of recall on 1/4 of data samples is as follows: first for svm (rbf), second for svm (linear), third for rf, and fourth for elm.the accuracy of svm (linear), svm (rbf), rf, and elm on 10% testing and 90% training data samples is shown in figure8. on the full data samples, the accuracy of elm is better than that of svm (linear), svm (rbf), and rf. the svm (rbf) outperforms svm (linear), elm, and rf. furthermore, the svm indicated better results than other datasets in half of the data samples and in 1/4 of the data samples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/316.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/316.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0b872dbdce4c7c2156bf48bcf734b3f9dd370a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/316.txt @@ -0,0 +1 @@ +along of various biometrics techniques, in the past few decades, human-beings have been addicted to various technologies such as captured photos, scanned signatures, bar code systems, verification id & so on. also, biometrics is one of the applications in image processing which refers to technologies that used physiological or behavioral characteristics of human body for the user authentication. the biometric authentication system based on two modes: enrolment and recognition. in the enrolment mode, the biometric data is acquired from the sensor and stored in a database along with the person's identity for the recognition. in the recognition mode, the biometric data is re-acquired from the sensor and compared to the stored data to determine the user identity.biometric recognition based on uniqueness and permanence. the uniqueness means that there is no similarity of feature between two different biometrics data. for example, there are no two humans having the same fingerprint feature even if they are twins. and when the features of biometrics do not change over the lifetime or aging, it is called permanence. biometrics can have physiological or behavioral characteristics. the physiological characteristics are included in the physical part of body such as (fingerprint, palm print, iris, face, dna, hand geometry, retina... etc). the behavioral characteristics are based on an action taken by a person such as (voice recognition, keystroke-scan, and signature-scan). any biometrics system including two phases first phase is enrollment phase and second is recognition phase. the recognition phase divided to two things which is verification and identification. during the enrollment phase the biometrics data are captured and generate digital image then preprocessing apply to digital image for removing unwanted data and apply the post-processing than store this data in database.in the case of identification process the fingerprint acquired from one person is compared with all the fingerprints which store in database. also it is known as (1:n) matching. it is used in the process of seeking the criminals.in the verification process the person's fingerprint is verified from the database by using matching algorithms. also it is known as (1:1) matching. it is the comparison of a claimant fingerprint against enroll fingerprint, initially the person enrolls his/her fingerprint into verification system, and the result show whether the fingerprint which take from the user is matching with the fingerprint store as a template in database or not match. the enrollment, identification and verification process shown in the fig. the fingertip surface is a combination of many of ridges and many of valleys. in case of the ridge which declare as black lines and the valleys declare as white lines are show in fig. 2. the minutiae points are the points where the ridge structure changes such as bifurcation and end point. ii. fingerprint fingerprints are graphical patterns of ridges and valleys on the surface of fingertips, the ridge ending and ridge bifurcation is called minutiae as shown in fig. 3. there are many methods based on minutiae-based fingerprint representation were proposed in , . every person has a unique fingerprint from any other person. the fingerprint identification is based on two basic assumptions: -invariance and singularity invariance: means the fingerprint characteristics do not change along the life. singularity: means the fingerprint is unique and no two persons have the same pattern of fingerprint. the system fail to detect a match between input and template in database .the main stages of fingerprint recognition system are shown in fig. 4 fig. 4. fundamental steps of fingerprint recognition systemin the verification process the person's fingerprint is verified from the database by using matching algorithms. it is the comparison of a claimant fingerprint against enroll fingerprint, initially the person enrolls his/her fingerprint into verification system, and the result show whether the fingerprint which take from the user is matching with the fingerprint store as a template in database or not match. the fingerprint identification is based on two basic assumptions: -invariance and singularity invariance: means the fingerprint characteristics do not change along the life. in the online fingerprint identification the optical fingerprint reader is used to capture the image of fingerprint. the offline fingerprint identification is obtained by ink in the area of finger and then put a sheet of white paper on the fingerprint and finally scans the paper to get a digital image. in fig6shows the preprocessing steps.they are do an automatic fingerprint identification system the acquisition and pre-processing system with a fixed point dsp, tms320vc5509a and a fingerprint sensor, matbf200. a fingerprint feature extraction program is to locate, measure and encode ridge endings and bifurcations in the fingerprint. inthey are used an advanced method for extract feature from fingerprint which done by extract minutiae directly from original gray-level images without use binarization and thinning and they use gabor filter methods to extract features from fingerprint.,in their research the extract the minutiae from fingerprint image after apply the binarized, thinned and segmented version of a fingerprint image. in this system the fingerprint classification for indexing during fingerprint matching. they combining minutiae matching and image based fingerprints verification methods and the matching done by using the shapes of fingerprint such as cross(diamond and dispersed) and square .,in this work the matching stage perform by integrate three fingerprint matching algorithms which is hough transform, string distance and 2d dynamic programming based matching using the logistic regression method.1996 hough transform-based approaches -----------1997 ridge-based relative pre-alignment -----------2004 minutiae matching thu2005 global matching of clusters of minutiae ----------2006 invariant moment fingercode and lvq fvc20022006 global minutiae matching with image correlation ----------2007 minutiae matching, vector matching ,weight modification and local area matching process fvc20022008 minutiae matching ,which find the similartiy between two images and by calculating the correlation between these images. we also referred to the image acquisition stage, image pre-processing stage, feature extraction stage and matching stage for recognition purpose in details with some previous work. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/317.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/317.txt new file mode 100644 index 0000000000000000000000000000000000000000..a07a3e442226a17f373a4a914aba1293c888c050 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/317.txt @@ -0,0 +1 @@ +unsupervised pre-training played a central role in the resurgence of deep learning. starting in the mid 2000's, approaches such as the deep belief network (hinton et al., 2006) and denoising autoencoder (vincent et al., 2008) were commonly used in neural networks for computer vision (lee et al., 2009) and speech recognition (mohamed et al., 2009). it was believed that a model which learned the data distribution p (x) would also learn beneficial features for the subsequent supervised modeling of p (y |x) (lasserre et al., 2006;erhan et al., 2010). however, advancements such as piecewise linear activation functions (nair & hinton, 2010), improved initializations (glorot & bengio, 2010), and normalization strategies (ioffe & szegedy, 2015;ba et al., 2016) removed the need for pre-training in order to achieve strong results. other research cast doubt on the benefits of deep unsupervised representations and re-ported strong results using a single layer of learned features (coates et al., 2011), or even random features (huang et al., 2014;may et al., 2017). the approach fell out of favor as the state of the art increasingly relied on directly encoding prior structure into the model and utilizing abundant supervised data to directly learn representations (krizhevsky et al., 2012;graves & jaitly, 2014). retrospective study of unsupervised pre-training demonstrated that it could even hurt performance in modern settings (paine et al., 2014).instead, unsupervised pre-training flourished in a different domain. after initial strong results for word vectors (mikolov et al., 2013), it has pushed the state of the art forward in natural language processing on most tasks (dai & le, 2015;peters et al., 2018;howard & ruder, 2018;radford et al., 2018;devlin et al., 2018). interestingly, the training objective of a dominant approach like bert, the prediction of corrupted inputs, closely resembles that of the denoising autoencoder, which was originally developed for images.as a higher dimensional, noisier, and more redundant modality than text, images are believed to be difficult for generative modeling. here, self-supervised approaches designed to encourage the modeling of more global structure (doersch et al., 2015) have shown significant promise. a combination of new training objectives (oord et al., 2018), more recent architectures (gomez et al., 2017), and increased model capacity (kolesnikov et al., 2019) has allowed these methods to achieve state of the art performance in low data settings (hénaff et al., 2019) and sometimes even outperform supervised representations in transfer learning settings (he et al., 2019;misra & van der maaten, 2019;chen et al., 2020).given that it has been a decade since the original wave of generative pre-training methods for images and considering their substantial impact in nlp, this class of methods is due for a modern re-examination and comparison with the recent progress of self-supervised methods. we re-evaluate generative pre-training on images and demonstrate that when using a flexible architecture (vaswani et al., 2017), a tractable and efficient likelihood based training objective (larochelle & murray, 2011;oord et al., 2016), and significant compute resources (2048 tpu cores), generative pre-training is competitive with other self-supervised approaches and learns figure 1. an overview of our approach. first, we pre-process raw images by resizing to a low resolution and reshaping into a 1d sequence. we then chose one of two pre-training objectives, auto-regressive next pixel prediction or masked pixel prediction. finally, we evaluate the representations learned by these objectives with linear probes or fine-tuning.representations that significantly improve the state of the art in low-resolution unsupervised representation learning settings. this is especially promising as our architecture uses a dense connectivity pattern which does not encode the 2d spatial structure of images yet is able to match and even outperform approaches which do. we report a set of experiments characterizing the performance of our approach on many datasets and in several different evaluation settings (low data, linear evaluation, full fine-tuning). we also conduct several experiments designed to better understand the achieved performance of these models. we investigate how representations are computed inside our model via the performance of linear probes as a function of model depth as well as studying how scaling the resolution and parameter count of the approach affects performance., 2015)have shown significant promise. we re-evaluate generative pre-training on images and demonstrate that when using a flexible architecture(vaswani et al. we investigate how representations are computed inside our model via the performance of linear probes as a function of model depth as well as studying how scaling the resolution and parameter count of the approach affects performance. furthermore, linear probes help disentangle feature quality from model architecture: in fine-tuning, one model may outperform another because its architecture is more suited for the downstream task rather than because of better pretraining.no data augmentation is used when pre-training on web images, and lightweight data augmentation is used when pre-training or fine-tuning on imagenet.we also train igpt-m, a 455m parameter model with l = 36 and d = 1024 and igpt-s, a 76m parameter model with l = 24 and d = 512 to study the effect of model capacity on representation quality in a generative model. latent variable models such as vaes can avoid this issue by explicitly learning a representation of the input data, but deep autoregressive generative models have the same width and connectivity pattern at every layer. using the linear probe as a tool for measuring representation quality, we investigate whether better generative models (as measured by log-prob on held-out data) also learn better representations.note that our model is trained at the same input resolution (ir) as cifar, whereas models trained at the standard im-agenet ir may experience distribution shock upon linear evaluation. on cifar-10, we observe that linear probe accuracy at every layer is worse than that of the autoregressive model, with best-layer performance more than 1% lower.6% we see that auto-regressive models produce much better features than bert models after pre-training, but bert models catch up after fine-tuning. instead, we work directly on a subset of the raw supervised dataset, extracting features using our pre-trained model, and training a linear classifier on those features.many self-supervised approaches focus on designing auxiliary objectives which support the learning of useful representations without attempting to directly model the input data.finally, our results, considered together withdonahue & simonyan (2019), suggest revisiting the representation learning capabilities of other families of generative models such as flows(dinh et al. this motivates our procedure of sequentially searching learning rates from large to small and explains why larger models use lower learning rates than smaller models at fixed input resolution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/318.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/318.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8954b208acba2939cbad82f0f0c87df7f5635ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/318.txt @@ -0,0 +1 @@ +images containing faces are essential to intelligent visionbased human computer interaction, and research efforts in face processing include face recognition, face tracking, pose estimation, and expression recognition, etc.however, many reported methods assume that the faces in an image or an image sequence have been identified and localized.to build fully automated systems that analyze the information contained in face images, robust and efficient face detection algorithms are required.given an image, the goal of face detection is to identify all image regions which contain a face regardless of its three-dimensional position, orientation, and lighting conditions.such a problem is challenging because faces are not rigid and have a high degree of variability in size, shape, color, and texture.numerous techniques have been developed to detect faces in a single image .most face detection methods are based on knowledge, features, template matching, neural network, or their combination.among them, feature-based methods provide a good solution for detecting faces in different poses and sizes.the colorbased approaches give useful cues for face detection .however, these approaches face difficulties in robustly detecting skin colors in the presence of complex backgrounds and different lighting conditions.images containing faces are essential to intelligent visionbased human computer interaction, and research efforts in face processing include face recognition, face tracking, pose estimation, and expression recognition, etc.given an image, the goal of face detection is to identify all image regions which contain a face regardless of its three-dimensional position, orientation, and lighting conditions.based on the compact distribution of human skin colors in ycbcr color space, the proposed method detects skin regions over the entire image, and then generates face candidates based on these skin regions' borders.the algorithm uses a fuzzy reasoning engine with fuzzy factors such as the shape, the size of a face, and the presence of eyes and lips on a face to verify each face candidate.the outside borders which truly enclose the skin regions are face candidates, whereas the inside ones just enclose nonskin regions.the shape, the size of a face, and the presence of eyes, lips or noses become the fuzzy factors of the fuzzy reasoning.the survey also gives some special cases such as a face connected with background or faces connected via the skin color, and a skin region containing no hole (inside border).-if the fuzzy factors of a face candidate are suitable and the candidate contains one or more inside borders that might be facial features, then we conclude that it is a face.-if the fuzzy values of the shape and the size of a face candidate make it be one connected region where its inside borders are of one face, then we cut the background through process a in 2.-if the fuzzy values of the shape and the size of a face candidate make it be a connected region where its inside borders are of two faces, then we separate it into two faces through process b in 2.-if a suitable face candidate does not contain any inside border, then we should find absent features which have the same skin color.after verifying face candidates, outside borders of true faces may be concave at some positions which may contain some important information because of face poses and lighting conditions.a true face may not contain any inside border since its features have the same skin color.a new approach is presented that does the detection only on border pixels for face detection based on skin color and facial features.we propose a face detection algorithm for color images in complex backgrounds, using fuzzy logic, fast marching method and basic image processing techniques. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/319.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/319.txt new file mode 100644 index 0000000000000000000000000000000000000000..158f7151a796d8f93cf4b796fccb98fc516663f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/319.txt @@ -0,0 +1 @@ +convolutional neural network has had ground breaking results over the past decade in a variety of fields related to pattern recognition; from image processing to voice recognition. the most beneficial aspect of cnns is reducing the number of parameters in ann . this achievement has prompted both researchers and developers to approach larger models in order to solve complex tasks, which was not possible with classic anns; . the most important assumption about problems that are solved by cnn should not have features which are spatially dependent. in other words, for example, in a face detection application, we do not need to pay attention to where the faces are located in the images. the only concern is to detect them regardless of their position in the given images . another important aspect of cnn, is to obtain abstract features when input propagates toward the deeper layers. for example, in image classification, the edge might be detected in the first layers, and then the simpler shapes in the second layers, and then the higher level features such as faces in the next layers as shown in fig. 1, as in . for example, in image classification, the edge might be detected in the first layers, and then the simpler shapes in the second layers, and then the higher level features such as faces in the next layers as shown in fig. color image of a cifar-10 dataset with a width and height of 32×32 pixels, and a depth of 3 which rgb channel) or a video (gray scale video whose height and width are the resolution, and the depth are the frames) or even an experimental video, which has width and height of (l × l) sensor values, and the depths are associated with different time frames, as in . in the hidden layer in the multi-layer perceptron), as an example, there should be 32×32×3 weight connections for the cifar-10 dataset. if we add one more neuron into the hidden layer, then we will need another 32×32×3 weight connection, which will become in total, 32×32×3×2 parameters. to make it more efficient, we can connect the input image to the neurons in the next layer with exactly the same values for the height and width. in other words, the hidden neurons in the next layer only get inputs from the corresponding part of the previous layer. thus, if we want to have 32×32 neurons in the next layer, then we will have 5×5×3 by 32x32 connections which is 76,800 connections (compared to 3,145,728 for full connectivity), as in. another assumption for simplification, is to keep the local connection weights fixed for the entire neurons of the next layer. this will connect the neighbor neurons in the next layer with exactly the same weight to the local region of the previous layer. therefore, it again drops many extra parameters, and reduces the number of weights to only 5×5×3=75 to connect 32×32×3 neurons to 32×32 in the next layer. secondly, and a more interesting concept, is that fixing the weights for the local connections is similar to sliding a window of 5×5×3 in the input neurons and mapping the generated output to the corresponding place.is the output in the next layer, is the input image and is the kernel or filter matrix and is the convolution operation. therefore, each node in a fully-connected layer is directly connected to every node in both the previous and in the next layer as shown in fig.13, from this figure we can note that each of the nodes in the last frames in the pooling layer are connected as a vector to the first layer from the fully-connected layer. the most important layer in cnn is convolution layer which takes most of the time within the network. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/32.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/32.txt new file mode 100644 index 0000000000000000000000000000000000000000..a369bb175e8352ad8892c7e89c805115a3485e2c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/32.txt @@ -0,0 +1 @@ +automatic speech recognition (asr) plays a vital role in human-computer interactions, particularly in voice assistants. it allows such intelligent devices to translate a speech signal into textual information to obtain semantic comprehension before taking action. due to the rapid growth of related disciplines, asr systems have remarkably improved and are extensively used in several sectors, significantly improving work efficiency and reducing human demands.most state-of-the-art asr systems based on hidden markov model (hmm) or deep learning characterize the speech signal with acoustic features derived from the absolute frequency measurements such as mel-frequency cepstral coefficients (mfccs), perceptual linear predictive cepstrum (plp) and mel-filter bank . how-ever, it is well known that the frequency space of speakers varies greatly, especially if we compare that of men with that of women or children: women and children produce speech with higher frequencies than men . furthermore, if the physiological, emotional, or sociological factors (i.e., age, speaking rate, accent) are considered, the acoustic space varies significantly due to the inter-speaker and intra-speaker differences .to take into account the large variability of speech (interspeaker variability, environmental variability, etc.), current asr systems require a very large amount of data for their training. this is even more true in the case of systems based on deep learning. this is a major handicap for developing such systems for poorly endowed or endangered languages. it is, in fact, often quite challenging and very costly to develop an asr system for minority or low-resource languages when a large dataset of the transcribed speech does not exist . one research direction to reduce the training data size is to make the asr systems intrinsically speaker-independent. if this is possible for the acoustic part of the systems, only one speaker would be sufficient for the learning phase. this research proposes to describe the speech signal based on "acoustic gestures", with the hypothesis that it is more robust and capable of improving the performance of asr systems, particularly with respect to the natural variability of the speech. we suggest defining the acoustic gestures of the speech signal using polar coordinates rather than the transition angles on the acoustic space of the spectral subband centroid frequency (sscf). we anticipate that the polar coordinates provide a better representation of the speech because they can define not only the transition direction but also the acoustic trajectory of the speech signal.this paper is structured as follows: section 2 includes related work. the proposed method is described in section 3. the experiments are introduced in section 4. section 5 contains the results and discussions. finally, section 6 presents the conclusion and future work.most state-of-the-art asr systems based on hidden markov model (hmm) or deep learning characterize the speech signal with acoustic features derived from the absolute frequency measurements such as mel-frequency cepstral coefficients (mfccs), perceptual linear predictive cepstrum (plp) and mel-filter bank. how-ever, it is well known that the frequency space of speakers varies greatly, especially if we compare that of men with that of women or children: women and children produce speech with higher frequencies than men. this research proposes to describe the speech signal based on "acoustic gestures", with the hypothesis that it is more robust and capable of improving the performance of asr systems, particularly with respect to the natural variability of the speech. we suggest defining the acoustic gestures of the speech signal using polar coordinates rather than the transition angles on the acoustic space of the spectral subband centroid frequency (sscf). we anticipate that the polar coordinates provide a better representation of the speech because they can define not only the transition direction but also the acoustic trajectory of the speech signal. the ssc frequencies (sscfs) were computed by dividing the frequency band into a number of subbands and computing the centroid of each subband using power spectrum of the speech signal (equation1).in each sscf i -sscf i+1 plane, the angle between a transition window of n frames are defined as in equation2. our experiments showed that using only arctan angles to define the acoustic trajectory of the speech signal is probably not a very good idea because the jumps will produce a kind of "noise" in the data. we assert that they give a more accurate representation because they are continuous variables and can specify not only the transition direction but also the tra-2 figure3: the sscf angles on the sscf1-sscf2 plane at fast and normal rates produced by a vietnamese male and female (source).where j represents a frame at index j; sscf i and sscf i+1 represent the axis of the sscf plane, respectively.in this study, the angles and the polar coordinates were evaluated for characterizing the spectral trajectory of the speech signal on the sscf planes and compared to the classical mfcc, the most widely used acoustic feature for speech recognition.overall, we still find that recognition rates are similar between the polar parameters and the mfcc (particularly for the 6 mfcc) compared to the angles. moreover, calculating a trajectory angle between sscf0 and sscf1 (f0 and f1) is probably not judicious because it does not correspond to real physical phenomena that sscf0 (f0) characterizes the vibration of vocal cords, whereas the other sscf characterize the resonances of the vocal tract.in this work, we investigated the use of polar coordinates in sscf planes to describe the speech signal based on its acoustic trajectory. the finding showed that they significantly outperformed angles in both normal and crossgender recognitions, demonstrating that the polar representation provides better information in characterizing the acoustic elements of the speech signal. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/320.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/320.txt new file mode 100644 index 0000000000000000000000000000000000000000..cf3649b47ea034c64bde85bca52c9da83f628ffa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/320.txt @@ -0,0 +1 @@ +stroke is a major cause of mortality and high morbidity in causing disability in many countries. it is important to receive a correct diagnosis before stroke treatment starts, because treatment for stroke depends on the type of stroke suffered. this study classifies stroke patients into ischemic stroke and hemorrhage stroke based on ct scan image data. ischemic stroke is generally caused by a blockage in a blood vessel. while hemorrhage stroke is caused by bleeding in brain tissue.research related to the diagnosis and prediction of stroke was carried out by chiun-li chin, et al. who developed an early ischemic stroke detection system automatically using the cnn deep learning algorithm. the cnn architecture used uses two convolutional layers, namely the pooling layer and the fully connected layer. the main purpose of the pooling layer is down-sampling, which means the layer will compress the amount of data and parameters to reduce the problem of overfitting. the classification results obtained an accuracy value of 90%. other researchers using cnn's deep learning architecture are marbun, jt. et al. used an open stroke dataset obtained from www.radiopaedia.org to classify patient data into three classes: normal, ischemic stroke, and hemorrhagic stroke through ct scan images. the accuracy obtained is 90%. while other researchers using the same dataset, badriyah, tessy et al. do hyperparameter optimization on deep learning algorithm to improve stroke diagnosis accuracy. the random search optimization algorithm and bayesian search for hyperparameter tuning in deep learning can increase accuracy to 100%.the support vector machine algorithm also gave satisfying results in research conducted by jenna r.s and dr. sukesh kumar who predicted stroke by using the performance of various kernel functions in the support vector machine method. the best experimental results obtained in the kernel linear function by 91.7% and polynomial by 89.0%.apart from studies using certain methods in classifying medical datasets, the research was conducted by gur amrit pal singh and p.k. gupta detected and classified lung cancer by using several algorithms in machine learning, namely k-nearest neighbor (knn), super vector mechine (svm), decision tree, naïve bayes, stochastic gradient descent (sgd), random forest and multilayer perceptron (mlp) which is one type of deep learning architecture. from the results of the classification of medical image dataset of 15,750 with a distribution of 6,910 for the benign class and 8.84% for the malignant class the highest accuracy was 88.55% with the mlp approach.for research that combines several methods conducted by hima haridas, and aswathy wilson who compared the three approaches. the first approach uses neural network (nn) algorithm, the second approach uses a combination of two algorithms namely principal component analysis (pca) for deminsion reduction and classification using neural network. while the third approach uses three algorithms namely decision tree for feature selection, pca for deminsion reduction and neural network for stroke prediction. the results of comparisons made using the three approaches give the following results 95.0%, 95.2%, and 97.7% where the best results are given in the third approach. it is important to receive a correct diagnosis before stroke treatment starts, because treatment for stroke depends on the type of stroke suffered. this study classifies stroke patients into ischemic stroke and hemorrhage stroke based on ct scan image data.who developed an early ischemic stroke detection system automatically using the cnn deep learning algorithm.radiopaedia. while other researchers using the same dataset, badriyah, tessy et al. guptadetected and classified lung cancer by using several algorithms in machine learning, namely k-nearest neighbor (knn), super vector mechine (svm), decision tree, naïve bayes, stochastic gradient descent (sgd), random forest and multilayer perceptron (mlp) which is one type of deep learning architecture. the first approach uses neural network (nn) algorithm, the second approach uses a combination of two algorithms namely principal component analysis (pca) for deminsion reduction and classification using neural network. while the third approach uses three algorithms namely decision tree for feature selection, pca for deminsion reduction and neural network for stroke prediction.the data collection used in this study is a ct scan dataset of the patient's brain consisting of ischemic stroke image data and hemorrhagic stroke image data.data pre-processing is a step to improve the quality of image data consisting of conversion data, cropping, scaling which serves to regulate the size of the pixels used, grayscale which functions to uniformity of the image's gray degree, noise removal, which serves to eliminate noise and provide blur , besides doing feature extraction. the classification in this study produces ischemic stroke, hemorrhagic stroke using machine learning algorithm.data collection used in this study is the ct scan data of patients suffering from stroke obtained from hajj hospital in surabaya, indonesia where the data obtained came from 102 patients with 99 data distribution suffering from ischemic stroke and 3 patients suffering from hemorrhage stroke. some patients have multiple images, so that the distribution of class data for ischemic stroke is 226 image data and there are 7 image data obtained for hemorrhage stroke.classification performance is done using 8 machine learning algorithms to make comparisons, namely k-nearest neighbor (knn), naïve bayes, logistic regression (lr), decision tree (dt), random forest (rf), neural network mlp, deep learning (dl ), support vector machine (svm). the results of experiments conducted showed that the classification algorithm using the random forest method obtained the best validation results with the accuracy value of 95. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/321.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/321.txt new file mode 100644 index 0000000000000000000000000000000000000000..977e2bcae3b4ac3835b53cdb3ea618a4ece140c4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/321.txt @@ -0,0 +1 @@ +sporting events have always been very interesting to a wide range of population. one of the most popular sports is football and the champions league is the toughest and the most prestigious club football competition in the world.there are three possible outcomes of a match: home win, draw and away win. due to its popularity and the small number of possible outcomes of games, predicting results is a very interesting and seemingly simple challenge. however, it is very difficult to predict the final outcome because the way the team plays on a particular day depends on many factors, such as the current form, the last team meetings, rivalries, offensive and defensive skills, individual abilities of key players and even the psychological impact of fans in the stands. football is a game where the average sum of scored goals is pretty little (two to three per game), which means that a moment of brilliance or stupidity of an individual can decide the final outcome. for this reason, it is a big challenge to choose the features and the way of classification which would facilitate the prediction.in this paper, we will be developing a software system that can predict the outcome of champions league matches with around 60% accuracy. the champions league consists of two phases. in the first stage of the competition teams are divided into groups. each group has four teams, who each play two games, one at home and one away. two best teams from each group pass to the second round where they play in a knock-out tournament system.the outcome of a match depends on a number of features and that requires a large number of experiments to determine the best subset which has the greatest impact on the final outcome of the match. in order to achieve the best possible properties of prediction we have tested a large number of classifiers. the final part of the project was the choice of classifiers which showed the best results in predicting the outcome of matches.the initial set of more than 30 features was reduced to 20. the accuracy of the most successful classifier has reached a limit of 65%.the next section gives an overview of related articles. the third section describes the procedure for determining the system through the selection process of the most important features and the most successful learning algorithms. the next section presents the achieved results. the article ends with the conclusion and references.the outcome of a match depends on a number of features and that requires a large number of experiments to determine the best subset which has the greatest impact on the final outcome of the match. the obtained results are compared with the results achieved by the naive bayesian classifier, k-nn algorithm, and j48 (member of the decision trees group). the selected set of features consists of the number of played matches, points achieved at a certain point, the outcome of the meeting played in the home and away ground, and the current form. the set of classifiers which is used in this paper covers a large part of the above classifiers, and the feature set contains all the features used in the paperwith the addition of several important attributes that can affect the final outcome like the number of injured players.x the current form of teams shown on the basis of results achieved in the last six games, x the outcome of the previous meeting of the teams that play the game, x the current position in the rankings, x number of injured players from the first team, x the average number of scored and received goals per game. the testing however has shown that the separation of these features into three features (number of wins, losses and draws) actually shows better results.in order to achieve better prediction results we used many learning algorithms to determine which give the best results.x naive bayes, x bayesian networks, x logitboost, x the k-nearest neighbors algorithm, x random forest, x artificial neural networks.x training and validation sets contain matches from the first 3 rounds, testing set the remaining 3 rounds x training and validation sets contains matches from the first 4 rounds, testing set the remaining 2 rounds x training and validation sets contains matches from the first 5 rounds, testing set the remaining round.if we observe the results from the point of feature selection we can see that our basic set (one without subjective estimates of team qualities) actually shows better results than expertly constructed set. such bad results, which are comparable to reference method, are much unexpected if we take into account that feature set contains minimally interdependent features. the last claim can be confirmed if look at f1 measure results where each and every classifier (including the naïve bayesian) shows significantly better results than the reference method.bayesian network shows a bit better results than naïve bayesian classifier which is consistent with the results obtained in. one possible cause for such results is the property of k-nn algorithm to take into account all the features or in other words its inability to filter out nonessential features.random forest classifier shows excellent performance on basic set of features and relatively average performance on expertly built set. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/322.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/322.txt new file mode 100644 index 0000000000000000000000000000000000000000..667de2b4f86f4b49fb8097567bd97f13fdb081d3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/322.txt @@ -0,0 +1 @@ +analyzing stock market performance and using the analysis for short and long term predication of stock market movement is an important and difficult problem. stock market movement can be easily affected by political strategy, economy stability, trade war, unemployment rate, the expectations of investors and some unprecedented event such as the recent pandemic, covid-19, etc. therefore, it is always essential and challenging for the stockholder to be capable of accurately predicting the stock values. the magnitude of change of each influencing factor, and the nature of change (positive or negative change) create an intricate dependency between the factor and the stock market movement. this research area is broad and includes risk assessment and portfolio management, but we will focus on prediction of dow jones industrial average (or dow index) that tracks and combines performance on stock markets for 30 large, publicly-owned companies trading on the us stock market.we will adopt dow index as an indicator of stock market movement. to illustrate the stock movement we plot distribution of weekly, monthly and annual change of dow index in figures 1, 2, and 3, respectively. the aggregate data for the figures are scraped from during years 2008 -2020 and the 1 shows percentage of days, with respect to total number of days in 2008 -2020., when the stock market went down or up, during each day of the week. similarly, in figure 2, and 3, each bar represents cumulative percentage of occurrences within a month and within each year (2008,2009, ...), respectively. we can observe that movement is distributed pretty evenly across all data instances of the inputs, and the complex movement patterns are not easily noticeable in these visualizations. additionally, we propose to use the headlines to predict the movement of the dow index. in this study, we will use top-5 highest rated news headlines from reddit . figures 5 and 4 visualize frequent words from the headlines in word-cloud. as seen in the figures, the most influential words for the down and up movement are very similar.in this paper, we will be focusing on comparing the shallow machine learning (ml) and deep leaning (dl) approaches to predict the dow index movement based on the news headlines. additionally, we will explore and compare use of two different text representation methods as an addition to all the approaches. finally, we will consider two data sets: one that includes dates before the covid-19 pandemic and the other that includes the dates during the covid-19 pandemic up to the june 17th 2020 date, to study how our models handle unprecedented events due to pandemic. finally, we will consider two data sets: one that includes dates before the covid-19 pandemic and the other that includes the dates during the covid-19 pandemic up to the june 17th 2020 date, to study how our models handle unprecedented events due to pandemic.performed experimental evaluation of ensemble methods (random forest, adaboost and kernel factory) and single classifier models (neural networks, logistic regression, support vector machines and k-nearest neighbor), and found that random forest outperforms the other models.uses nonlinear svr to build the model where web time series information is mapped to stock price time series, and showed that the use of web information was effective in improving forecasting accuracy when compared with single svr.we utilize the daily world news headlines from reddit via pushshift apito predict the movement of dow jones industrial average such as "up" and "down", where the "up" movement means that closing value of dow jones industrial average is larger than the opening values of the dow jones industrial average in the same day, and the "down" movement means that closing value of the dow jones industrial average is less than the opening values of the dow jones industrial average in the same day.c) recurrent neural network: recurrent neural networks (rnns)-have been widely used to recognize patterns in sequences of data such as time-series data, text data, and biological data which contain sequential dependencies among the features. at each time step, rnn uses recurrent computation (recurrent layer) to calculate the hidden state of the sequence, which preserves the historical information of the sequence up to the current time step. forget gate decides what previous information is to be thrown away, then the input gate decides what information is to be stored. gated recurrent unit (gru), it is a simplified version of lstm, it only uses update gate and reset gate interactively to update hidden states. the reset gate decides how much information to take from previous information, then the update gate decides the strength of contribution of lower layers.the entire dataset is created using the data from february 7th, 2008 to june 17th, 2020, totaling 3112 data points that were divided into two subsets: 1) subset "before pandemic": feb 7th 2008 to dec 31st 2019 contains 2996 samples and 2) subset "during pandemic": jan 1st 2020 to jun 17th 2020 there are 6 experiments in our study, that were performed as follows:.• to evaluate our models "before pandemic" we use only the "before pandemic" data subset and explore use of tf-idf and glove (tables iii and iv, respectively). • to evaluate our models "during pandemic" we re-train our models using "before pandemic" data subset for training, and validate the models using "during pandemic" data subset for validation. we also evaluate both tf-idf and glove, but only utilize the same day's headlines for prediction, due to the relatively small number of data points in "during pandemic" data subset. we can prove that by using the time series data, recurrent neural networks and convolutional neural networks can capture more dependency features than logistic regression, support vector machine and random forest. in particular, we propose novel convolutional neural network and recurrent neural network architectures to find out the effect of global big events to the stock market movements. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/323.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/323.txt new file mode 100644 index 0000000000000000000000000000000000000000..bee683705893469e5582ce35175601cdb11bdbd4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/323.txt @@ -0,0 +1 @@ +with the growth of the internet, the growth of malware has also increased. the term malware means malicious software; it is an umbrella term used for various types of software programs which tends to have malicious activities on the user's system. there are various tools like zesus which can be used to obfuscate malware and even to develop new malware. moreover, mostly malware codes are reused and various variants of the same malware are available. this causes the similarity in malware code. even if new malware is written the patterns of malware can match up to some extent. these similarities can be detected using machine learning techniques.machine learning mainly involves two kinds of tasks: predictive methods and descriptive methods as shown in fig. 1. predictive tasks involve classification and regression. descriptive tasks involve clustering and association. in the case of classification, it provides discrete results. for example, classification can be used to tell if an email is a spam or not. in regression, the results are continuous or in integers or floating-point value while in classification results lie in some predetermined category. for example, if we have to calculate how many centimeters will it rain today based on past weather conditions like temperature, humidity, wind speed, etc. it will come under regression.in the case of clustering, the same kinds of objects are placed in one cluster. this means the task of clustering is to make groups of a set of objects such that these objects are similar to each other. for example, in the supermarket, there are large numbers of objects in baskets. and grouping the same kind of objects in a basket is called clustering. association means a task to find a relation between an object and a large amount of data. for example, in a supermarket, there are many baskets loaded with various objects. to predict which object will likely to occur if one object is present in the basket is called association. in the case of malware classification and clustering are important methods. in machine learning there is a major issue related to a dataset is of an imbalanced dataset. in an imbalanced dataset, there is a huge difference between positive and negative values. the details of a balanced, imbalanced dataset, a problem with an imbalanced dataset and a solution to convert it into a balanced dataset are provided in section iv of this paper. the rest paper is organized as, after a literature survey, a detailed explanation of machine learning, its tasks, supervised, unsupervised, and reinforcement machine learning is provided followed by details of a balanced and imbalanced dataset. afterward, the methodology is explained and then the results of two experiments on a balanced and imbalanced dataset using api calls are shown followed by the conclusion of the paper. the details of a balanced, imbalanced dataset, a problem with an imbalanced dataset and a solution to convert it into a balanced dataset are provided in section iv of this paper. the rest paper is organized as, after a literature survey, a detailed explanation of machine learning, its tasks, supervised, unsupervised, and reinforcement machine learning is provided followed by details of a balanced and imbalanced dataset. they used a dataset of 4266 samples which consists of 3265 malware and 1001 benign samples.the process of malware detection using machine learning can be classified into two parts namely feature extraction and implementation of machine learning classifiers. afterward, the dataset is partitioned into 2 ways namely a balanced and imbalanced dataset and finally, machine learning classifiers are implemented on these datasets. this dataset was imbalanced dataset consists of 42797 malware samples and 1079 benign samples. in this technique, excess malware samples eliminated randomly keeping 1079 malware samples and 1079 benign samples in a balanced dataset. balanced dataset consists of 2158 samples (1079 malware and 1079 benign samples) while imbalanced dataset, consists of 43876 samples (42797 malware and 1079 benign samples). this is imbalanced dataset consists of 42797 malware samples and 1079 benign samples. in the present paper, two experiments are performed, in experiment 1, a balanced dataset is taken with 1079 malware and 1079 benign samples. the result of machine learning classifiers namely k-nearest neighbors, gaussian naive bayes, multi naive bayes, decision tree, and random forest on a balanced and imbalanced dataset with training and testing ratio of 80:20,70:30, and 60:40 is shown in tablei5, 6, and 7 respectively. in the case of static features, features are extracted without execution of sample, and while in the case of dynamic features, the malware sample is executed in an isolated environment and after that features are extracted. to study the difference in overall accuracies and effect of an imbalanced dataset, two experiments were performed for balanced and imbalanced data on a previously build a dataset of malware detection on api calls. in the case of balanced data 1079 malware and 1079 benign samples are taken while in imbalanced data 42797 malware and 1079 benign samples are taken. as per our observation, in case the dataset is imbalanced, the resulting accuracy is too much which seems to be fabricated and on the other hand in the case of balanced data set, the accuracy is more reliable as it does not fall in the trap of data imbalance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/324.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/324.txt new file mode 100644 index 0000000000000000000000000000000000000000..630fcde975f3c0086a198d749c2c6ab7d13fa241 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/324.txt @@ -0,0 +1 @@ +driven by the exponential growth in the amount of online videos in recent years, research in video summarization has gained increasing attention, leading to various methods proposed to facilitate large-scale video browsing (gygli et al. 2014;gygli, grabner, and van gool 2015;zhang et al. 2016a;panda and roy-chowdhury 2017;mahasseni, lam, and todorovic 2017;potapov et al. 2014).recently, recurrent neural network (rnn), especially with the long short-term memory (lstm) cell (hochreiter and schmidhuber 1997), has been exploited to model the sequential patterns in video frames, as well as to tackle the end-to-end training problem. zhang et al. (zhang et al. 2016b) proposed a deep architecture that combines a bidirectional lstm network with a determinantal point process (dpp) module that increases diversity in summaries, referring to as dpp-lstm. they trained dpp-lstm with super-vised learning, using both video-level summaries and framelevel importance scores. at test time, dpp-lstm predicts importance scores and outputs feature vectors simultaneously, which are together used to construct a dpp matrix. due to the dpp modeling, dpp-lstm needs to be trained in a two-stage manner.although dpp-lstm (zhang et al. 2016b) has shown state-of-the-art performances on several benchmarks, we argue that supervised learning cannot fully explore the potential of deep networks for video summarization because there does not exist a single ground truth summary for a video. this is grounded by the fact that humans have subjective opinions on which parts of a video should be selected as the summary. therefore, devising more effective summarization methods that rely less on labels is still in demand. mahasseni et al. (mahasseni, lam, and todorovic 2017) developed an adversarial learning framework to train dpp-lstm. during the learning process, dpp-lstm selects keyframes and a discriminator network is used to judge whether a synthetic video constructed by the keyframes is real or not, in order to enforce dpp-lstm to select more representative frames. although their framework is unsupervised, the adversarial nature makes the training unstable, which may result in model collapse. in terms of increasing diversity, dpp-lstm cannot benefit maximally from the dpp module without the help of labels. since a rnn-based encoder-decoder network following dpp-lstm for video reconstruction requires pretraining, their framework requires multiple training stages, which is not efficient in practice.in this paper, we formulate video summarization as a sequential decision-making process and develop a deep summarization network (dsn) to summarize videos. dsn has an encoder-decoder architecture, where the encoder is a convolutional neural network (cnn) that performs feature extraction on video frames and the decoder is a bidirectional lstm network that produces probabilities based on which actions are sampled to select frames. to train our dsn, we propose an end-to-end, reinforcement learning-based framework with a diversity-representativeness (dr) reward function that jointly accounts for diversity and representativeness of generated summaries, and does not rely on labels or user interactions at all.the dr reward function is inspired by the general criteria of what properties a high-quality video summary should have. specifically, the reward function consists of a diversity reward and a representativeness reward. the diversity reward measures how dissimilar the selected frames are to each other, while the representativeness reward computes distances between frames and their nearest selected frames, which is essentially the k-medoids problem. these two rewards complement to each other and work jointly to encourage dsn to produce diverse, representative summaries. the intuition behind this learning strategy is closely concerned with how humans summarize videos. to the best of our knowledge, this paper is the first to apply reinforcement learning to unsupervised video summarization.the learning objective of dsn is to maximize the expected rewards over time. the rationale for using reinforcement learning (rl) to train dsn is two-fold. firstly, we use rnn as part of our model and focus on the unsupervised setting. rnn needs to receive supervision signals at each temporal step but our rewards are computed over the whole video sequence, i.e., they can only be obtained after a sequence finishes. to provide supervision from a reward that is only available in the end of sequence, rl becomes a natural choice. secondly, we conjecture that dsn can benefit more from rl because rl essentially aims to optimize the action (frame-selection) mechanism of an agent by iteratively enforcing the agent to take better and better actions. however, optimizing action mechanism is not particularly highlighted in a normal supervised/unsupervised setting.as the training process does not require labels, our method can be fully unsupervised. to fit the case where labels are available, we further extend our unsupervised method to the supervised version by adding a supervised objective that directly maximizes the log-probability of selecting annotated keyframes. by learning the high-level concepts encoded in labels, our dsn can recognize globally important frames and produce summaries that highly align with human-annotated summaries.we conduct extensive experiments on two datasets, summe (gygli et al. 2014) and tvsum , to quantitatively and qualitatively evaluate our method. the quantitative results show that our unsupervised method not only outperforms other state-of-the-art unsupervised alternatives, but also is comparable to or even superior than most of published supervised methods. more impressively, the qualitative results illustrate that dsn trained with our unsupervised learning algorithm can identify important frames that coincide with human selections.the main contributions of this paper are summarized as follows: (1) we develop an end-to-end, reinforcement learning-based framework for training dsn, where we propose a label-free reward function that jointly accounts for diversity and representativeness of generated summaries. to the best of our knowledge, our work is the first to apply reinforcement learning to unsupervised video summarization.(2) we extend our unsupervised approach to the supervised version to leverage labels. (3) we conduct extensive experiments on two benchmark datasets to show that our unsupervised method not only outperforms other state-of-the-art unsupervised methods, but also is comparable to or even superior than most of published supervised approaches.driven by the exponential growth in the amount of online videos in recent years, research in video summarization has gained increasing attention, leading to various methods proposed to facilitate large-scale video browsing(gygli et al.although dpp-lstm(zhang et al.mahasseni et al. dsn has an encoder-decoder architecture, where the encoder is a convolutional neural network (cnn) that performs feature extraction on video frames and the decoder is a bidirectional lstm network that produces probabilities based on which actions are sampled to select frames. to train our dsn, we propose an end-to-end, reinforcement learning-based framework with a diversity-representativeness (dr) reward function that jointly accounts for diversity and representativeness of generated summaries, and does not rely on labels or user interactions at all. the diversity reward measures how dissimilar the selected frames are to each other, while the representativeness reward computes distances between frames and their nearest selected frames, which is essentially the k-medoids problem.the main contributions of this paper are summarized as follows: (1) we develop an end-to-end, reinforcement learning-based framework for training dsn, where we propose a label-free reward function that jointly accounts for diversity and representativeness of generated summaries. (3) we conduct extensive experiments on two benchmark datasets to show that our unsupervised method not only outperforms other state-of-the-art unsupervised methods, but also is comparable to or even superior than most of published supervised approaches.(zhang et al. in particular, we develop a deep summarization network (dsn) to predict probabilities for video frames and make decisions on which frames to select based on the predicted probability distributions.during training, dsn will receive a reward r(s) that evaluates the quality of generated summaries, and the objective of dsn is to maximize the expected rewards over time by producing high-quality summaries. to verify that dsn can benefit more from reinforcement learning than from supervised learning, we add another baseline as the dsn trained with the cross entropy loss using keyframe annotations, where a confidence penalty(pereyra et al. clearly outperforms d-dsn and r-dsn on both datasets, which demonstrates that by using r div and r rep collaboratively, we can better teach dsn to produce high-quality summaries that are diverse and representative.although our reward functions are analogous to the objectives of gan dpp in concepts, ours directly model diversity and representativeness of selected frames in the feature space, which is more useful to guide dsn to find good solutions. extensive experiments on two benchmark datasets showed that using reinforcement learning with our unsupervised reward function outperformed other state-of-the-art unsupervised alternatives, and produced results comparable to or even superior than most supervised methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/325.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/325.txt new file mode 100644 index 0000000000000000000000000000000000000000..821deacf8628c6be334e3042240c5edbf0da552a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/325.txt @@ -0,0 +1 @@ +spatiotemporal data related to the public security have been growing at an exponential rate during the recent years. however, not all data have been effectively used to tackle real-world problems. in order to facilitate crime prevention, several scholars have developed models to predict crime . most used historical crime data alone to calibrate the predictive models.the research on crime prediction currently focuses on two major aspects: crime risk area prediction , and crime hotspot prediction , . the crime risk area prediction, based on the relevant influencing factors of criminal activities, refers to the correlation between criminal activities and physical environment, which both derived from the ''routine activity theory'' . traditional crime risk estimation methods usually detect crime hotspots from the historical the associate editor coordinating the review of this manuscript and approving it for publication was tallha akram . distribution of crime cases, and assume that the pattern will persist in the following time periods . for example, considering the proximity of crime places and the aggregation of crime elements, the terrain risk model tends to use crime-related environmental factors and crime history data, and is relatively effective for long-term, stable crime hotspot prediction . many studies have carried out empirical research on crime prediction in different time periods, combining demographic and economic statistics data, land use data, mobile phone data and crime history data. crime hotspot prediction aims to predict the likely location of future crime events and hotspots where the future events would concentrate . a commonly used method is kernel density estimation - . a model that considers temporal or spatial autocorrelations of past events performs better than those that fail to account for the autocorrelation . recently machine learning algorithms have gained popularity. the most popular methods include k-nearest neighbor(knn), random forest algorithm, support vector machine (svm), neural network and bayesian model etc. . some compared the linear methods of crime trend prediction , some compared bayesian model and bp neural network , , and others compared the spatiotemporal kernel density method with the random forest method in different periods of crime prediction . among these algorithms, knn is an efficient supervised learning method algorithm , . svm is a popular machine learning model because it can not only implement classification and regression tasks, but also detect outliers , . random forest algorithm has been proven to have strong non-linear relational data processing ability and high prediction accuracy in multiple fields - . naive bayes (nb) is a classical classification algorithm, which has only a few parameters and it is not sensitive to missing data , . convolutional neural networks (cnn) has strong expansibility, and can enhance its expression ability with a very deep layer to deal with more complex classification problems , . long short-term memory (lstm) neural network extracts time-series features from features, and has a significant effect on processing data with strong time series trends - . this paper will focus on the comparison of the above six machine learning algorithms, and recommend the best performing one to demonstrate the predictive power with and without the use of covariates.the research on crime prediction currently focuses on two major aspects: crime risk area prediction,and crime hotspot prediction,. for example, considering the proximity of crime places and the aggregation of crime elements, the terrain risk model tends to use crime-related environmental factors and crime history data, and is relatively effective for long-term, stable crime hotspot prediction. many studies have carried out empirical research on crime prediction in different time periods, combining demographic and economic statistics data, land use data, mobile phone data and crime history data. some compared the linear methods of crime trend prediction, some compared bayesian model and bp neural network,, and others compared the spatiotemporal kernel density method with the random forest method in different periods of crime prediction. point of interests (pois) data and road network density data are considered as covariates in the crime prediction model.the variable data needed for the prediction model is mainly divided into two parts: one is the historical case data; the other is the covariate data representing the surrounding environment. we count the number of cases occurred in each grid in each period, take this part of data as the basic data of the prediction model, and select the data of the corresponding period as the training data according to the prediction target period. in the experiment of this paper, city poi density and road network density are used to obtain the density surface of covariate in the study area through spatial interpolation of covariate spatial point data, which is used as covariate of the prediction model.taking the two weeks from january 1 to january 14, 2018 as the prediction target, the historical data of crime hotspots prediction is divided as shown in the table below. in prediction experiments in the first half of 2018, consisting of 13 time units, the overall prediction performance of the lstm model (model-d) is the best among the four different prediction models (tables2 & 3). this advantage can help lstm model save a part of the time of weight correction in the process of crime hot spot prediction, and has a certain applicability for the prediction of hotspot grids. model-f is lstm prediction model based on historical data, and model-f is lstm prediction model based on historical crime data and built environment covariates. according to the experimental results, we found that the prediction accuracy of the prediction accuracy of the lstm model was also improved after adding built environment covariates, and the average prediction index-hitra of 13 experimental periods increased by percentage points increased by 12. it can be seen that the lstm model and the lstm model with covariates have higher prediction accuracy based on their own high self-learning and advantages of processing time series data. in empirical research on the prediction of crime hotspots, rummens et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/326.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/326.txt new file mode 100644 index 0000000000000000000000000000000000000000..321e1ca7b8c970d190506723b71ac85530d88e8b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/326.txt @@ -0,0 +1 @@ +users may post explicitly their location on the tweet text they post, whereas in certain cases the location may be available implicitly by including certain relevant criteria. tweets are not a strongly typed language, in which users may post casual with emotion images. abbreviated form of text, misspellings, and extra characters of emotional words makes tweet texts noisy. the techniques applied for normal documents are not suited for analysing tweets. the character limitations of tweets about 140 characters may make the tweet uneasy to understand, if the tweet context is not studied.the issue of location prediction related named as geolocation precition is examined for wikipedia and web page documents. entity recognition from these formal documents has been researched for years. different types of content and context handling on these documents are also studied extensively. however, the location prediction problem from twitter depends highly on tweet content. users living in specific regions, locations may examine neighborhood tourist spots, landmarks and buildings and related events.home location: user's residential address given by user or location given by user on account creation is considered as home location. home location prediction can be used in various application namely recommendation systems, location based advertisements, health monitoring, and polling etc. home location can be specified as administrative location, geographical location or co-ordinates.tweet location: tweet location refers to the region from where the tweet is posted by user. by construing tweet location, one can get tweet person's mobility. usually home location collected from user profile, whereas tweet location can be arrived from user's geo tag. because of the first perspectives on tweet location, pois are comprehensively received as representation of tweet regions.mentioned location: when composing tweets, user may make reference to the names of a few locations in tweet texts. referenced location prediction may encourage better understanding of tweet content, and advantage applications like recommendation systems, location based advertisements, health monitoring, and polling etc. in this study, we include two sub-modules of mentioned location: first one is recognizing the mentioned location in tweet text, which can be achieved by extracting text content from a tweet that refers to geography names. second one is identifying the location from tweet text by solving them to entries in a geographical database.users may post explicitly their location on the tweet text they post, whereas in certain cases the location may be available implicitly by including certain relevant criteria.home location: user's residential address given by user or location given by user on account creation is considered as home location. home location prediction can be used in various application namely recommendation systems, location based advertisements, health monitoring, and polling etc. home location can be specified as administrative location, geographical location or co-ordinates.tweet location: tweet location refers to the region from where the tweet is posted by user. usually home location collected from user profile, whereas tweet location can be arrived from user's geo tag. because of the first perspectives on tweet location, pois are comprehensively received as representation of tweet regions.mentioned location: when composing tweets, user may make reference to the names of a few locations in tweet texts. referenced location prediction may encourage better understanding of tweet content, and advantage applications like recommendation systems, location based advertisements, health monitoring, and polling etc. in this study, we include two sub-modules of mentioned location: first one is recognizing the mentioned location in tweet text, which can be achieved by extracting text content from a tweet that refers to geography names. they aimed to identify automatically by ranking the local words by their location, and they find their degree of association of location words associated to particular location or cities. the aim of proposed system is to predict the user location from twitter content considering user home location, tweet location and tweet content. thus from this work, we can conclude that decision tree is the suitable algorithm for location prediction problem in tweet texts v. conclusion three locations are considered from twitter data, namely home location, mentioned location and tweet location. our experiment analysis concluded that decision tree is suitable for tweet text analysis and location prediction problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/327.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/327.txt new file mode 100644 index 0000000000000000000000000000000000000000..94f0d90e98f45b4f53d3a961fdfc645e44c38438 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/327.txt @@ -0,0 +1 @@ +in recent years, time series classification has attracted lots of attention as its application can be seen in many aspects such as heart disease prediction , weather forecasting , automatic device classification , etc. for classification, how to sufficiently utilize the temporal information of time series data is the essential part. machine learning algorithms are popular for data mining thanks to its solid statistical theory. researchers extract features from the raw data as feature vectors and they are harnessed as the input of the subsequent machine learning algorithm. but traditional machine learning algorithms are based on the handcrafted features. it is obviously that designing the features manually is timeconsuming. besides, extracting appropriate features for the task relies on the expertise of the researchers. comparing with traditional machine learning methods, deep learning methods are able to extract features automatically thus allowing the the associate editor coordinating the review of this manuscript and approving it for publication was tie qiu. researchers to build an end-to-end model for concrete tasks. with the arrival of the era of big data, more and more data are available to train a deep learning model and it turns out the more data that are used to train the model, the stronger performance of the model is. so, deep learning algorithms outperforms the traditional machine learning algorithms and have played an important role in data mining, image processing and natural language processing , etc.deep learning algorithms are mainly about artificial neural network (ann) . neural network with multiple layers, also called multi-layer perception (mlp), is capable for matching sophisticated function to solve different tasks. images commonly contains much of spatial information that are tricky for mlp to extract and utilize effectively. to better harness the spatial information of the input data, convolutional neural network (cnn) is developed. convolutional layers work like filters that have a significant output when facing a particular kind of spatial information such as edges or corners in the images which results in a better ability to utilize spatial information comparing with mlp.thanks to its ability of taking advantage of spatial information of images, cnn has remarkable achievements in image processing related areas such as image classification , object detection - , image caption , etc.the success of cnn in image processing also inspired researches in applying cnn in time series data as time series data contains temporal information that are kind of similar to spatial information of the images. for time series data, cnn is a filter that is sensitive to a certain kind of changing regulation. thanks to that trait, cnn can extract features based temporal information of time series data and makes a fast and effective end-to-end time series data classification model possible. however, a robust and reliable based cnn model which is adaptive for a various kind of time series data is still lacking.recurrent neural network (rnn) is a special structure that aims to take advantage of temporal information of the input data. as mlp suffers for ignoring the relationship of the current input data with the former and latter input data, rnn is developed. comparing with mlp, rnn can pass the temporal information through the network. it plays an important role in the time series data related tasks such as machine translation , speech recognition , emotion classification , etc. but training a deep rnn is extremely time-consuming. therefore, we consider a cnn-rnn cascade model for time series data classification.the contributions of this paper are in the following two aspects:firstly, we propose a model based on dual path convolutional neural network to extract features based on temporal information of the time series data. then we use rnn layers and fully connected layers to learn the map between the features and the output. our model is named as dual path cnn-rnn cascade network (dpcrcn). the dpcrcn is able to extract features automatically and effectively and analyze not only the temporal information of a single variable but the interactions of a group of variables. in addition, we use adaptive region of interest (roi) pooling to make our model adaptive to a flexible length of time series data.secondly, we conduct experiments on activity recognition system based on multisensor data fusion (arem) dataset to assess our model and compare with both the methods used in and other popular machine learning methods. what's more, we divide the validation data into pieces which contain different number of sequences. we examine our model on five different validation datasets to test the generalization of our model. besides, we provide the details of the dataset, the model and the training procedure.the rest of this paper is organized as follows. section ii presents related surveys and techniques. section iii introduces the proposed algorithm. section iv demonstrates the result of the experiment and details of the training process. section v presents the discussion. the conclusion and future work are shown in section vi. with the arrival of the era of big data, more and more data are available to train a deep learning model and it turns out the more data that are used to train the model, the stronger performance of the model is.the success of cnn in image processing also inspired researches in applying cnn in time series data as time series data contains temporal information that are kind of similar to spatial information of the images. thanks to that trait, cnn can extract features based temporal information of time series data and makes a fast and effective end-to-end time series data classification model possible.firstly, we propose a model based on dual path convolutional neural network to extract features based on temporal information of the time series data.tried a structure called multiscale convolutional neural network (mcnn) which applies different transformations to time series data. although deep rnns have made huge achievements for time series data analysis, it is extremely time-consuming to train a deep rnn model thus leading to consider a combination of cnn and rnn for time series data classification.proposed an rnn-cnn cascade model for time series data classification aims to utilize the advantage of rnn and cnn to faster the training process and better utilize the temporal information of the data. after that, we use a onedimension roi pooling layer to adjust the length of the data in order to make our model practical for different shape of the input data.to evaluate the generalization and reliability for different shape of time series data, we also divide the validation data into pieces that contains 38 sequence, 40 sequence, 44 sequence and 128 sequence. and the cascade structure has a positive affection on the performance on time series data classification because the cascade structure outperforms dual path convolutional network and lstm. even facing the time series data contains different number of sequences and haven't been used to train our model, the performance is still satisfied. the results show that cascade model can make use of the advantages of cnn and rnn to build a reliable and powerful end-to-end classifier to address time series data classification problems. to solve that problem, we proposed a deep learning architecture which is feasible for both multi variate time series data and single variate time series data. in our model, we use a dual path convolutional neural network to extract the features of the input data and use lstm and the fully-connected layers to learn the map between the extracted features and the output. in the future, we are going to investigate more novel architectures both for time series data classification and time series data prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/328.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/328.txt new file mode 100644 index 0000000000000000000000000000000000000000..1298c3d6a7478d1f29142d00cf3917083f9f6e3c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/328.txt @@ -0,0 +1 @@ +current approaches to object recognition make essential use of machine learning methods. to improve their performance, we can collect larger datasets, learn more powerful models, and use better techniques for preventing overfitting. until recently, datasets of labeled images were relatively small -on the order of tens of thousands of images (e.g., norb , caltech-101/256 , and cifar-10/100 ). simple recognition tasks can be solved quite well with datasets of this size, especially if they are augmented with label-preserving transformations. for example, the currentbest error rate on the mnist digit-recognition task (<0.3%) approaches human performance . but objects in realistic settings exhibit considerable variability, so to learn to recognize them it is necessary to use much larger training sets. and indeed, the shortcomings of small image datasets have been widely recognized (e.g., pinto et al. ), but it has only recently become possible to collect labeled datasets with millions of images. the new larger datasets include labelme , which consists of hundreds of thousands of fully-segmented images, and imagenet , which consists of over 15 million labeled high-resolution images in over 22,000 categories.to learn about thousands of objects from millions of images, we need a model with a large learning capacity. however, the immense complexity of the object recognition task means that this problem cannot be specified even by a dataset as large as imagenet, so our model should also have lots of prior knowledge to compensate for all the data we don't have. convolutional neural networks (cnns) constitute one such class of models . their capacity can be controlled by varying their depth and breadth, and they also make strong and mostly correct assumptions about the nature of images (namely, stationarity of statistics and locality of pixel dependencies). thus, compared to standard feedforward neural networks with similarly-sized layers, cnns have much fewer connections and parameters and so they are easier to train, while their theoretically-best performance is likely to be only slightly worse.despite the attractive qualities of cnns, and despite the relative efficiency of their local architecture, they have still been prohibitively expensive to apply in large scale to high-resolution images. luckily, current gpus, paired with a highly-optimized implementation of 2d convolution, are powerful enough to facilitate the training of interestingly-large cnns, and recent datasets such as imagenet contain enough labeled examples to train such models without severe overfitting.the specific contributions of this paper are as follows: we trained one of the largest convolutional neural networks to date on the subsets of imagenet used in the ilsvrc-2010 and ilsvrc-2012 competitions and achieved by far the best results ever reported on these datasets. we wrote a highly-optimized gpu implementation of 2d convolution and all the other operations inherent in training convolutional neural networks, which we make available publicly 1 . our network contains a number of new and unusual features which improve its performance and reduce its training time, which are detailed in section 3. the size of our network made overfitting a significant problem, even with 1.2 million labeled training examples, so we used several effective techniques for preventing overfitting, which are described in section 4. our final network contains five convolutional and three fully-connected layers, and this depth seems to be important: we found that removing any convolutional layer (each of which contains no more than 1% of the model's parameters) resulted in inferior performance.in the end, the network's size is limited mainly by the amount of memory available on current gpus and by the amount of training time that we are willing to tolerate. our network takes between five and six days to train on two gtx 580 3gb gpus. all of our experiments suggest that our results can be improved simply by waiting for faster gpus and bigger datasets to become available. until recently, datasets of labeled images were relatively small -on the order of tens of thousands of images (e. luckily, current gpus, paired with a highly-optimized implementation of 2d convolution, are powerful enough to facilitate the training of interestingly-large cnns, and recent datasets such as imagenet contain enough labeled examples to train such models without severe overfitting.the specific contributions of this paper are as follows: we trained one of the largest convolutional neural networks to date on the subsets of imagenet used in the ilsvrc-2010 and ilsvrc-2012 competitionsand achieved by far the best results ever reported on these datasets. we wrote a highly-optimized gpu implementation of 2d convolution and all the other operations inherent in training convolutional neural networks, which we make available publicly1. our final network contains five convolutional and three fully-connected layers, and this depth seems to be important: we found that removing any convolutional layer (each of which contains no more than 1% of the model's parameters) resulted in inferior performance.2 million training images, 50,000 validation images, and 150,000 testing images. since we also entered our model in the ilsvrc-2012 competition, in section 6 we report our results on this version of the dataset as well, for which test set labels are unavailable. on imagenet, it is customary to report two error rates: top-1 and top-5, where the top-5 error rate is the fraction of test images for which the correct label is not among the five labels considered most probable by the model.,).in our implementation, the transformed images are generated in python code on the cpu while the gpu is training on the previous batch of images.7% cnn 37. figure4shows five images from the test set and the six images from the training set that are most similar to each of them according to this measure. notice that at the pixel level, the retrieved training images are generally not close in l2 to the query images in the first column. this should produce a much better image retrieval method than applying autoencoders to the raw pixels, which does not make use of image labels and hence has a tendency to retrieve images with similar patterns of edges, whether or not they are semantically similar.our results show that a large, deep convolutional neural network is capable of achieving recordbreaking results on a highly challenging dataset using purely supervised learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/329.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/329.txt new file mode 100644 index 0000000000000000000000000000000000000000..f6c8ee33298de57670f178dc75bf36a73cf58408 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/329.txt @@ -0,0 +1 @@ +twitter 1 , a popular micro-blogging service, has received much attention recently. it is an online network used by millions of people around the world to stay connected to their friends, family members, and co-workers through their computers and mobile telephones (milstein et al., 2010).nowadays, twitter users have increased rapidly. its community estimated as 120 million worldwide, 1 http://twitter.com/ posts more than 5.5 million messages (tweets) every day (reported by twitter.com in march 2011). twitter can potentially serve as a valuable information resource for various applications. huberman et al. (2009) analyzed the relations among friends. boyd et al. (2010) investigated commutation activity. sakaki et al. (2010) addressed the detection of earthquakes. among the numerous potential applications, this study addresses the issue of detecting influenza epidemics, which presents two outstanding advantages over current methods. large scale: more than a thousand messages include the word "influenza" each day (nov. 2008-oct. 2009). such a huge data volume dwarfs traditional surveillance resources. real-time: twitter enables real-time and direct surveillance. this characteristic is extremely suitable for influenza epidemic detection because early stage detection is important for influenza warnings.although twitter based influenza warnings potentially offer the advantages noted above, it might also expose inaccurate or biased information from tweets like the following (brackets indicate the comments): although these tweets include mention of "influenza" or "flu", they do not indicate that an influenza patient is present nearby. we regard such messages (merely suspicions/questions, general news, etc.) as negative influenza tweets. we call others positive influenza tweets. in our experiments, 42% of all tweets that include "influenza" are negative influenza tweets. the huge volume of such negative tweets biases the results. this paper presents a proposal of a machinelearning based classifier to filter out negative influenza tweets. first, we build an annotated corpus of pairs of a tweet and positive/negative labels. then, a support vector machine (svm) (cortes and vapnik, 1995) based sentence classifier extracts only positive influenza tweets from tweets. in the experiments, the results demonstrated the high correlation (0.89 of the correlation), which is equal performance to that of the state-of-the-art method.the specified research point of this study is twofold:(1) this report describes that an svm-based classifier can filter out the negative influenza tweets (f-measure=0.76).(2) experiments empirically demonstrate that the proposed method detects the influenza epidemics with high accuracy (correlation ratio=0.89): it outperforms the state-of-the-art method. (2010)investigated commutation activity. this characteristic is extremely suitable for influenza epidemic detection because early stage detection is important for influenza warnings.although twitter based influenza warnings potentially offer the advantages noted above, it might also expose inaccurate or biased information from tweets like the following (brackets indicate the comments): although these tweets include mention of "influenza" or "flu", they do not indicate that an influenza patient is present nearby.) as negative influenza tweets. we call others positive influenza tweets. in our experiments, 42% of all tweets that include "influenza" are negative influenza tweets. this paper presents a proposal of a machinelearning based classifier to filter out negative influenza tweets. then, a support vector machine (svm)(cortes and vapnik, 1995)based sentence classifier extracts only positive influenza tweets from tweets.(1) this report describes that an svm-based classifier can filter out the negative influenza tweets (f-measure=0.(2) experiments empirically demonstrate that the proposed method detects the influenza epidemics with high accuracy (correlation ratio=0. while they focus on a word distribution, this paper employs a sentence classification (discrimination of negative influenza tweets).as described in section 1, it is necessary to filter out negative influenza tweets to infer precise amounts of influenza epidemics.this paper proposed a new twitter-based influenza epidemics detection method, which relies on the natural language processing (nlp). our proposed method could successfully filter out the negative influenza tweets (f-measure=0. the experiments with the test data empirically demonstrate that the proposed method detects influenza epidemics with high correlation (correlation ratio=0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/33.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/33.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a2eba75d0adf0ba4725a626a1f3e68f07258e97 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/33.txt @@ -0,0 +1 @@ +in-context learning (icl) is the alternative to the conventional training of large language models (llms) for specific task(s), where models are expected to learn a new task solely from the input text. in few-shot in-context learning that we focus on, the input text contains a set of demonstrations, i.e. the input-output examples of the task to be learned figure 1: in this work, we assess in-context learners' ability to improve when presented with demonstrations using a reasoning concept applicable in the prediction ( §2). we extract these concepts from human explanations ( §3.2) and assess models' ability to learn to use these concepts, as reflected in improving their prediction quality. and theoretical implications, both of which are of great significance; understanding free-form user requests allow applying llms in applications of restricted, or limited data availability without overspecialization (goodfellow et al., 2014). in-context learning can provide a handle of models' behaviour, enabling the model to avoid specific erroneous predictions. in theory, a training process resulting in an accurate new-task learner defines the sufficient conditions for the emergence of a specific level of generalization.recent llms trained on vast mixtures of tasks (sanh et al., 2022a;wang et al., 2022b;chung et al., 2022) show a certain level of new-task icl and gradually bring more attention and expectations in this direction. however, counter-arxiv:2212.01692v4 19 jul 2023 intuitively to the overall evaluations, in-context learners (icls) also expose surprising behavioural artefacts; liu et al. (2022) show icls' sensitivity to the ordering of in-context demonstrations. similarly, lu et al. (2022) find surprising sensitivity of icls to the specific wording of the prompts. min et al. (2022b) show that most of the model performance is persisted even when the contents of the demonstrations are randomly swapped. contrary to the ability to learn from input, wei et al. (2023) propose to attribute this to the over-reliance of incontext learners on semantics of the label tokens, especially in smaller models.we hypothesize that the discrepancy between the expected and the perceived abilities of icls might be attributed to their limited evaluation, commonly performed with a random set of task demonstrations. however, for many open-ended tasks, such as question answering, or translation, randomlychosen demonstrations rarely present a reasoning pattern which can help with the prediction of new input (figure 1). we argue that the evaluation with mostly non-informative contexts also can not reflect on the ability of learning, as observed in humans1 , as the gain of extrapolating associations presented in non-informative demonstrations can only bring little benefit to the practice.we also note that in the absolute numbers, the random-demonstrations evaluation may favour some llms, such as ones with a capacity to remember a wider variety of input distributions from pretraining, that can be used for modulating their behaviour in icl. however, note that such behaviour differs from learning new association(s) from the context and makes the model prone to adversaries.hence, in section 2, we propose to evaluate models' in-context learning ability primed with the demonstrations that exhibit a reasoning analogical to the one required for a robust prediction of the predicted sample (figure 1). we measure how well can the recent few-shot learners utilize identified concepts for more accurate predictions ( §3) and find large discrepancies among the models and concepts.our main contributions are following: (i) we introduce a task of concept-sharing few-shot learning, disentangling models' ability to learn a new reasoning concept from other aspects of prediction quality. we show how such reasoning concepts can be extracted from human explanations. (ii) for a wide variety of recent in-context learners, we measure the ability to benefit from presented reasoning concepts. we show that while some models are better at learning concepts on average, this ability can not be attributed to the models' size or training strategy.problem definition given a dataset d : {(x 1 → y 1 ), .., (x i → y i )} ∈ d containing pairs of input x j with associated label y j , an in-context few-shot learner θ(x) → y aims to predict a correct label y k+1 = y k+1 given a sequence of k input-output demonstrations, and the predicted input x k+1 :we expect in-context few-shot learner θ to model the relation of x i and y i by (i) identifying and (ii) applying the relations of input and output presented in demonstrations. each such relation is modelled by one or more latent concepts c:we broadly define a concept c as any function c(x, y) → {0, 1}, constraining a space of valid outputs y to the ones where c(x, y) = 1. thus, if θ learns a concept c, it will never predict for x such y that c(x, y) = 0. in a composition {c} = {c 1 , .., c j }, all c i ∈ {c} must evaluate to 1.given that modelling of each c valid for the task of d restrain a set of possible predictions of θ exclusively from incorrect predictions, extending a set of concepts learned in-context with complementary one(s) should never decrease the performance of the model θ on d.in-context learning (icl) is the alternative to the conventional training of large language models (llms) for specific task(s), where models are expected to learn a new task solely from the input text. the input-output examples of the task to be learned figure1: in this work, we assess in-context learners' ability to improve when presented with demonstrations using a reasoning concept applicable in the prediction ( §2).hence, in section 2, we propose to evaluate models' in-context learning ability primed with the demonstrations that exhibit a reasoning analogical to the one required for a robust prediction of the predicted sample (figure1). we measure how well can the recent few-shot learners utilize identified concepts for more accurate predictions ( §3) and find large discrepancies among the models and concepts.our main contributions are following: (i) we introduce a task of concept-sharing few-shot learning, disentangling models' ability to learn a new reasoning concept from other aspects of prediction quality., (x i → y i )} ∈ d containing pairs of input x j with associated label y j , an in-context few-shot learner θ(x) → y aims to predict a correct label y k+1 = y k+1 given a sequence of k input-output demonstrations, and the predicted input x k+1 :.given that modelling of each c valid for the task of d restrain a set of possible predictions of θ exclusively from incorrect predictions, extending a set of concepts learned in-context with complementary one(s) should never decrease the performance of the model θ on d.we reformulate in-context few-shot learning (1) to a concept-sharing few-shot learning, evaluating the ability of a few-shot learner θ to identify and apply a user-chosen reasoning concept c shown in demonstrations. subsequently, in concept-sharing few-shot learning, we let the learner to infer a prediction for input x k+1 by presenting it with demonstrations (x j → y j ) 1.informative concepts extraction constructing a scaled evaluation with annotated reasoning concepts c is challenging since the annotations of such concepts among datasets are very rare.this section overviews few-shot in-context learners that we evaluate for concept-sharing few-shot learning and the datasets with explanations allowing us to extract shared reasoning concepts.we extract the shared concepts c as pairs of (r, e 2 ); hence, conceptual few-shot will prime the prediction with questions and contexts presenting the entities in analogical relations to the ones the model should understand for correct prediction.this work introduces a concept-sharing few-shot learning task that reflects on in-context learners' ability to extract a specific reasoning concept from demonstrations and apply it in a prediction. some important concepts' features that we identify are following: (i) a number of premises or reasoning inference steps needed to map the input to output, (ii) the granularity of the reasoning steps, (iii) a type of the premises; for instance, whether the familiarity with a given concept requires a memorization of an entity property (such as "sun emits light"), or a reasoning mechanics such as analogical reasoning ("if animals can run and cat is an animal, then a cat can run").the presented evaluation of models' sensitivity to demonstrated reasoning concepts introduces a technical framework for quickly assessing models' compliance with our expected functioning; however, a selection of a comprehensive set of concepts, that we can agree our models should be able to learn, remains a subject of open discussion. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/330.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/330.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4a2fef395cbfcdc6a111bf77aec6cc24572e22e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/330.txt @@ -0,0 +1 @@ +accurate weather forecasting is important for agriculture, transportation and management as it influences decision making processes and optimal settings in these sectors. weather prediction can also be used to predict natural disasters, e.g. heatwaves or hurricanes and therefore contribute to saving human lives. moreover, accurate weather elements prediction can shed light in analysis regarding the climate change . previous approaches involve using numerical weather prediction (nwp) . nwp uses physical assumptions about the weather elements and mathematically models the atmosphere as a fluid. weather variables are then predicted by means of simulation of partial differential equations . however, this approach requires high computing power and processing the data might take up to several hours . furthermore, due to the weather assumptions mentioned above as well as the noise present in the data, the accuracy of nwp can degrade over time . machine learning data driven based models on the other hand, do not make any assumptions about the atmosphere. instead, these data driven based models take historical weather data as an input and train a model aiming to predict the target values for some time step ahead as an output.in recent years literature has witnessed the success of deep machine learning models in many application domains . in particular, deep learning based models learn their own features during the training, so that the features are the most optimal for network and they do not require to be hand-crafted . deep learning has already been used for predicting climate data . further, deep learning methodologies based on convolutional neural networks (cnns) have already been successfully applied to the weather forecasting problem . however, cnn-based approaches do not incorporate spatial relations between the weather stations. the authors in , casted the historical data in a tensorial format (weather stations, weather variables, time steps) which was then passed to the model and convolution operation was performed over the data volume. in this way, the neighborhood relation between the weather stations is only based on their order in the dataset.graph convolution networks (gcns) can generalize cnns to work on graphs rather than on regular grids . in particular, it enables incorporating the neighbor relation information, e.g. through an adjacency matrix of a graph. gcns have already been applied to various domains, e.g. computer vision , natural language processing or natural sci-ences . the application of gcns to weather prediction has not been yet extensively discovered.in this work, we treat weather stations and their corresponding weather variable values from different time steps as a spatiotemporal graph, as presented in fig 2(b). here, we develop our own novel models from st-gcn and 2s-agcn architectures which were successful on skeleton-based action recognition tasks. our models take as an input tensor data containing values of different weather variables (e.g. temperature, wind speed, air pressure among the others) for several cities and historical time steps and return values wind speed for selected cities and for selected time step ahead. furthermore, the adjacency matrix containing the values of relations between the cities is optimized together with other parameters during the training phase. in this way, the network is able to learn and decide about the city relations based on the given training data.the proposed models are trained on two datasets containing the weather data from danish and dutch datasets. these data have been previously introduced in and . we evaluate our models using mean absolute error and mean squared error, which are the original metrics used for evaluation of the baseline models . the obtained results show that the performances of our models surpass those of the baseline models. prediction plots and error values are provided. furthermore, since the learnable adjacency matrix is the only element deciding about the relations between the cities, we visualize what network has learnt and provide some interpretation insights.this paper is organized as follows. related work is provided in section 2. section 3 presents the proposed models and relevant details. section 4 describes the conducted experiments with corresponding results and section 5 includes discussion. finally, conclusions are drawn in section 6. instead, these data driven based models take historical weather data as an input and train a model aiming to predict the target values for some time step ahead as an output. the authors in, casted the historical data in a tensorial format (weather stations, weather variables, time steps) which was then passed to the model and convolution operation was performed over the data volume.in this work, we treat weather stations and their corresponding weather variable values from different time steps as a spatiotemporal graph, as presented infig 2(b). temperature, wind speed, air pressure among the others) for several cities and historical time steps and return values wind speed for selected cities and for selected time step ahead. the authors inintroduced weighted graph convolutional lstm architecture, which combines lstm with matrix multiplications replaced with graph convolutions with a single (one for the whole model), learnable adjacency matrix.based on the methodologies discussed above, we develop new novel models and apply them to the task of weather forecasting to predict wind speed in the selected cities (weather stations) in denmark and the netherlands. briefly, the learned and transformed values in the adjacency matrix entries decide how much of information coming from one weather station has to be included in the processed information of another weather station.the danish dataset contains hourly measurements with the values of the following weather variables: temperature, air pressure, wind speed and wind direction. the obtained results show that by treating the weather stations as the nodes in a graph, the proposed models better learn the underlying spatial relations between the weather stations which result in improving the prediction accuracy. in our proposed models we make the adjacency matrix learnable, so that the network can decide about the strength of the relations on its own based on the historical weather data observations.we also visualize the learnt (and transformed as described in section 3) adjacency matrices of the proposed models for the 2h ahead wind speed prediction for the dutch cities dataset. each entry of adjacency matrix controls how much of information coming from one node is to be included in the information constituting another node, considering all node attributes.g, in fig.in this paper, new models based on gcn architecture are proposed for wind speed prediction using historical weather data. thanks to the applied spatial-temporal convolutional operations on the built graph, the network learns the underlying relations between weather stations through a learnable adjacency matrix of the graph. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/331.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/331.txt new file mode 100644 index 0000000000000000000000000000000000000000..70596098eb32b9d8ba2612db53a7f2c70b74e9f9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/331.txt @@ -0,0 +1 @@ +intelligent transport system (its) is a system that manages transportation from traffic management to law enforcement. one important object that widely explored by its is a vehicle and their properties, including type, color, and license plate. vehicle color is an important property for vehicle identification and provide visual cues for fast action law enforcement. recognize vehicle color is very challenging task because several factors including weather condition, quality of video/image acquisition, and strip combination of the vehicle. the first factor, weather condition, may dramatically change the color illumination of the acquisition image. for example, if the image/video taken at haze condition then there a lot of "soft" white noise added to the image. soft white noise means that the noise is not random but continues and blended with the foreground and background objects. the quality of video/image acquisition is affected the final decision of the vehicle color recognition system and its depends of the optical sensor in the camera. camera that can capture object at high speed is recommended for its, but not all installed camera in the road can do that. a lot of cameras installed in the road only used to monitor the traffic, pedestrians, and street conditions. the last factor is strip combination of the vehicle, which is very affected to the vehicle recognition system. region selection is very important to tackle the problem.there are some research paper published to tackle vehicle color recognition problem, like in , , , , . chen et al. use feature context and linear svm classifier to tackle the problem. feature context is a collection of histogram that build with several areas, like spatial pyramid structure but with different region configuration. in other paper , they try to tackle vehicle color recognition problem using 2d histogram with some roi configuration as features and neural network as classifier. baek et al. also use 2d histogram but without roi configuration and svm as classifier. another approach is described by son et al. which using convolution kernel to extract similarity between positive and negative images and then feed up those similarity score to svm classifier.color spaces are very important to color recognition applications, like vehicle color recognition. the selection of color space will impact the recognition performance. the most usable color space in digital photography is rgb color space, but rgb color space has problem to color recognition because channel of rgb color space contribute equal for each channel so to distinct color is more difficult. usually, researcher will not use rgb as their primary color space and convert it to other color spaces that separate illumination and color, like cie lab or hsv , , . another approach is to make 2d histogram of two channels, like h and s channel in hsv color space, and do classification using those 2d histogram.in this paper, we present vehicle color recognition method using convolutional neural network (cnn). cnn is type of neural network but instead of using fully connected layer, cnn use layer called convolution layer to extract features from data. the training mechanism is very similar to normal neural network and use stochastic gradient descent as training algorithm. cnn is become very popular after winning the ilsvrc (imagenet large scale visual recognition challenge) 2012 . in those paper, they use more than 600,000 neuron and 7 hidden layer to provide good model of the data. to avoid overfitting krizhevsky et al. employed regularization method called dropout to the fully connected layer . the krizhevsky model is huge and as reported in the paper, the model trained in six day for 450,000 iteration in gpu hardware. before going into details, in section two we describe detils related works in color recognition. section two describe details architecture of our cnn model. section three reports the experiments we have done and discuss the results. in other paper, they try to tackle vehicle color recognition problem using 2d histogram with some roi configuration as features and neural network as classifier.color spaces are very important to color recognition applications, like vehicle color recognition. the most usable color space in digital photography is rgb color space, but rgb color space has problem to color recognition because channel of rgb color space contribute equal for each channel so to distinct color is more difficult. usually, researcher will not use rgb as their primary color space and convert it to other color spaces that separate illumination and color, like cie lab or hsv,,. chen et al. baek et al. first layer use 11x11@3 kernel with total 48 kernels, second layer use 3x3@48 kernel with total 128 kernels, third use 3x3@128 kernel with total 192 kernels, fourth layer use 3x3@192 kernel with total 192 kernels, and fifth layer use 3x3@192 with total 128 kernels. the confusion matrix shows that the most worst accuracy of our model is in green and gray color class. some examples of green class is misclassified as gray class and its above 10%. as seen in the dataset, some green color class examples has color that very close to gray, more like greengray color than green, so the classifier may have it wrong classified as a gray color class. for the practical implementation, we recommend using the client server mechanism, send the vehicle detection result to the server, do the vehicle color classification in server backend using gpu hardware, and send back the result to the intelligent transportation system for further processing. cyan-like color that appears in the kernel may contribute to the red color class or cyan color class. another color that appears repeatedly in the kernel are red-blue color, greengray color, and orange-like color. figure4cshow that for yellow color class a lot of the green-like color kernel neuron is active and its looks like our models learned that color can be recognize from the hood color or the top color of the car. from the experiment, the best accuracy is achieve using rgb color space and this is contradictive with several papers that not recomend rgb color space for color recognition and using another color space like hsv or yuv. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/332.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/332.txt new file mode 100644 index 0000000000000000000000000000000000000000..e478344735079fdd98f07b609e9f77c18304fdda --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/332.txt @@ -0,0 +1 @@ +machine learning (ml) has proved itself as a prominent field of study over the last decade by solving many very complex and sophisticated real-world problems. the application areas included almost all the real-world domains such as healthcare, autonomous vehicle (av), business applications, natural language processing (nlp), intelligent robots, gaming, climate modeling, voice, and image processing. ml algorithms' learning is typically based on trial and error method quite opposite of conventional algorithms, which followsthe associate editor coordinating the review of this manuscript and approving it for publication was utku kose.the programming instructions based on decision statements like if-else . one of the most significant areas of ml is forecasting , numerous standard ml algorithms have been used in this area to guide the future course of actions needed in many application areas including weather forecasting, disease forecasting, stock market forecasting as well as disease prognosis. various regression and neural network models have wide applicability in predicting the conditions of patients in the future with a specific disease . there are lots of studies performed for the prediction of different diseases using machine learning techniques such as coronary artery disease , cardiovascular disease prediction , and breast cancer prediction . in particular, the study is focused on live forecasting of covid-19 confirmed cases and study is also focused on the forecast of covid-19 outbreak and early response. these prediction systems can be very helpful in decision making to handle the present scenario to guide early interventions to manage these diseases very effectively.this study aims to provide an early forecast model for the spread of novel coronavirus, also known as sars-cov-2, officially named as covid-19 by the world health organization (who) . covid-19 is presently a very serious threat to human life all over the world. at the end of 2019, the virus was first identified in a city of china called wuhan, when a large number of people developed symptoms like pneumonia . it has a diverse effect on the human body, including severe acute respiratory syndrome and multi-organ failure which can ultimately lead to death in a very short duration . hundreds of thousands of people are affected by this pandemic throughout the world with thousands of deaths every coming day. thousands of new people are reported to be positive every day from countries across the world. the virus spreads primarily through close person to person physical contacts, by respiratory droplets, or by touching the contaminated surfaces. the most challenging aspect of its spread is that a person can possess the virus for many days without showing symptoms. the causes of its spread and considering its danger, almost all the countries have declared either partial or strict lockdowns throughout the affected regions and cities. medical researchers throughout the globe are currently involved to discover an appropriate vaccine and medications for the disease. since there is no approved medication till now for killing the virus so the governments of all countries are focusing on the precautions which can stop the spread. out of all precautions, ''be informed'' about all the aspects of covid-19 is considered extremely important. to contribute to this aspect of information, numerous researchers are studying the different dimensions of the pandemic and produce the results to help humanity.to contribute to the current human crisis our attempt in this study is to develop a forecasting system for covid-19. the forecasting is done for the three important variables of the disease for the coming 10 days: 1) the number 0f new confirmed cases. 2) the number of death cases 3) the number of recoveries. this problem of forecasting has been considered as a regression problem in this study, so the study is based on some state-of-art supervised ml regression models such as linear regression (lr), least absolute shrinkage and selection operator (lasso), support vector machine (svm), and exponential smoothing (es). the learning models have been trained using the covid-19 patient stats dataset provided by johns hopkins. the dataset has been preprocessed and divided into two subsets: training set (85% records) and testing set (15% records). the performance evaluation has been done in terms of important measures including r-squared score (r 2 score), adjusted r-squared score (r 2 adjusted ), mean square error (mse), mean absolute error (mae), and root mean square error (rmse).this study has some key findings which are listed below:• es performs best when the time-series dataset has very limited entries.• different ml algorithms seem to perform better in different class predictions.• most of the ml algorithms require an ample amount of data to predict the future, as the size of the dataset increases the model performances improve.• ml model based forecasting can be very useful for decision-makers to contain pandemics like covid-19. the rest of the paper consists of six sections. section i presents the introduction, section ii contains the description of the dataset and methods used in this study. section iii presents the methodology, section iv presents the results, and section v summarizes the paper and presents the conclusion. one of the most significant areas of ml is forecasting, numerous standard ml algorithms have been used in this area to guide the future course of actions needed in many application areas including weather forecasting, disease forecasting, stock market forecasting as well as disease prognosis. this problem of forecasting has been considered as a regression problem in this study, so the study is based on some state-of-art supervised ml regression models such as linear regression (lr), least absolute shrinkage and selection operator (lasso), support vector machine (svm), and exponential smoothing (es).the aim of this study is the future forecasting of covid-19 spread focusing on the number of new positive cases, the number of deaths, and the number of recoveries. this learning method may use regression techniques and classification algorithms for predictive models' development four regression models have been used in this study of covid-19 future forecasting:.in this study, we evaluate the performance of each of the learning models in terms of r-squared (r 2 ) score, adjusted r-square (r 2 adjusted ), mean square error (mse), mean absolute error (mae), and root mean square error (rmse). to contribute to this pandemic situation control, this study attempts to perform future forecasting on the death rate, the number of daily confirmed infected cases and the number of recovery cases in the upcoming 10 days. the dataset used in the study contains daily time series summary tables, including the number of confirmed cases, deaths, and recoveries in the past number of days from which the pandemic started. the dataset used for the study contains information about the daily reports of the number of newly infected cases, the number of recoveries, and the number of deaths due to covid-19 worldwide. four machine learning models lr, lasso, svm, and es have been used to predict the number of newly infected cases, the number of deaths, and the number of recoveries. all other models perform poorly, the order of performance from best to worst is es is best followed by lr, lasso and svm due to the nature however, comparing the current recovery statistics (figure19) with our models' predictions, the es prediction is following the trends which are very close to the actual situation. figure14and 15 show that our model predictions are quite promising, because the models predict that in upcoming days death rate will be increased and the graph of mortality rate shows the same pattern and in recovery scenario models predict that recoveries rate will be slowed down and recovery graph in figure15follows the same pattern which proves the model predictions correct.as shown in the previous sections, es performed best in all three cases such as, death rate forecasting, the number of new confirmed cases forecasting, and recovery rate forecasting. considering the best performance given by es model in all the three forecasting cases among all the four models, the model has been used for further analysis with interval prediction.in the second model training interval, the models were trained from the dataset of 22 jan 2020 to 02 mar 2020, data of 15 more days were added to the training set to predict the outcome of the upcoming 10 days from 02 mar 2020. to evaluate this uncertainty we perform prediction intervals on lr, because among three regression models (lr, lasso, and svm), in general, lr performs better in all three cases (death rate forecasting, new confirmed cases forecasting, recovery rate forecasting). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/333.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/333.txt new file mode 100644 index 0000000000000000000000000000000000000000..6deee8b12d845bb1df10f18d21d28e877d90a1ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/333.txt @@ -0,0 +1 @@ +there is rapidly growing interest in using bayesian optimization (bayesopt) to tune model and inference hyperparameters for machine learning algorithms that take a long time to run (snoek et al., 2012). tuning algorithms by grid search is a time consuming task. tuning by hand is also time consuming and requires trial, error, and expert knowledge of the model. to capture this knowledge, bayesopt uses a performance model (usually a gaussian process) as a guide to regions of hyperparameter space that perform well. bayesopt balances exploration and exploitation to decide which hyperparameter to evaluate next in an iterative procedure.bayesopt for machine learning algorithms is a form of model selection in which some objective, such as predictive likelihood or root mean squared error, is optimized with respect to hyperparameters η. thus, it is an empirical bayesian procedure where the marginal likelihood is replaced by a proxy objective. empirical bayes optimizes the marginal likelihood of data set x (a summary of symbols is provided in table 1),ηthen uses p(θ | x,η) as the posterior distribution over the unknown model parameters θ (carlin and louis, 2000). empirical bayes is applied in different ways, e.g., gradient-based optimization of gaussian process kernel parameters, optimization of hyperparameters to conjugate priors in variational inference. what is special about bayesopt is that it performs empirical bayes in a way 31st conference on neural information processing systems (nips 2017), long beach, ca, usa. unseen data that requires calculating the posterior p(θ | x, η (s) ) for each member in a sequence 1, . . . , s of candidate hyperparameters η (1) , η (2) , . . . , η (s) . often these posteriors are approximate, such as a point estimate, a monte carlo estimate, or a variational approximation. nonetheless, these operations are usually expensive to compute.therefore, what is surprising about bayesopt for approximate inference is that it disregards most of the computed posteriors and keeps only the posterior p(θ | x,η) that optimizes the marginal likelihood. it is surprising because the intermediate posteriors have something to say about the data, even if they condition on hyperparameter configurations that do not maximize the marginal likelihood.in other words, when we harbour uncertainty about η, should we be more bayesian? we argue for this approach, especially if one believes there is a danger of overfitting η on the validation set, which is especially the case as the dimensionality of the hyperparameters grows. as an illustrative example, figure 1 shows the predictive performance of a set of 115 posteriors (each corresponding to a different hyperparameter) of latent dirichlet allocation on validation data and testing data. overfitting validation means that the single best posterior would not be selected as the final answer in bayesopt.bayes empirical bayes (carlin and louis, 2000) extends the empirical bayes paradigm by introducing a family of hyperpriors p(η | λ) indexed by λ and calculates the posterior over the model parameters by integrating,this leads to the question of how to select the hyper-hyperparameter λ. a natural answer is a hierarchical empirical bayes approach where λ is maximized 1 ,and p(θ | x,λ) is used as the posterior. comparing eq. 3 to eq. 1 highlights that we are adding an extra layer of marginalization that can be exploited with the intermediate posteriors in hand. note the distinction between marginalizing the hyperparameters to the model vs. hyperparameters to the gaussian process of model performance. eq. 3 describes the former; the latter is already a staple of bayesopt (osborne, 2010).in this paper, we present empirical bayes for hyperparameter averaging (eb-hyp), an extension to bayesopt that makes use of this hierarchical approach to incorporate the intermediate posteriors in an approximate predictive distribution over unseen data x * .the train-marginalize-test pipeline eb-hyp is an alternative procedure for evaluating and deploying machine learning algorithms that reduces the need for a separate validation data set.validation data is typically used to avoid overfitting. overfitting is a danger in selecting both parameters and hyperparameters. the state of the art provides sophisticated ways of regularizing or marginalizing over parameters to avoid overfitting on training data. but there is no general method for regularizing hyperparameters and typically there is a requirement of conjugacy or continuity in order to simultaneously fit parameters and hyperparameters in the same training procedure.therefore, the standard practice for dealing with the hyperparameters of machine learning models and algorithms is to use a separate validation data set (murphy, 2012). one selects the hyperparameter that results in the best performance on validation data after fitting the training data. the best hyperparameter and corresponding posterior are then applied to a held-out test data set and the resulting performance is the final estimate of the generalization performance of the entire system. this practice of separate validation has carried over to bayesopt.eb-hyp avoids overfitting training data through marginalization and allows us to train, marginalize, and test without a separate validation data set. it consists of three steps:1. train a set of parameters on training data x train , each one conditioned on a choice of hyperparameter. 2. marginalize the hyperparameters out of the set of full or approximate posteriors. 3. test (or deploy) the marginal predictive distribution on test data x test and report the performance.in this paper, we argue in favour of this framework as a way of simplifying the evaluation and deployment pipeline. we emphasize that the train step admits a broad category of posterior approximation methods for a large number of models, including maximum likelihood, maximum a posteriori, variational inference, or markov chain monte carlo.in summary, our contributions are the following:• we highlight the three main shortcomings of the current prevalent approach to tuning hyperparameters of machine learning algorithms (computationally wasteful, potentially overfitting validation, added complexity of a separate validation data set) and propose a new empirical bayes procedure, eb-hyp, to address those issues. • we develop an efficient algorithm to perform eb-hyp using monte carlo approximation to both sample hyperparameters from the marginal posterior and to optimize over the hyper-hyperparameters. • we apply eb-hyp to two models and real world data sets, comparing to random search and bayesopt, and find a significant improvement in held out predictive likelihood validating the approach and approximation in practice.therefore, what is surprising about bayesopt for approximate inference is that it disregards most of the computed posteriors and keeps only the posterior p(θ | x,η) that optimizes the marginal likelihood. as an illustrative example, figure1shows the predictive performance of a set of 115 posteriors (each corresponding to a different hyperparameter) of latent dirichlet allocation on validation data and testing data.in this paper, we present empirical bayes for hyperparameter averaging (eb-hyp), an extension to bayesopt that makes use of this hierarchical approach to incorporate the intermediate posteriors in an approximate predictive distribution over unseen data x * . one selects the hyperparameter that results in the best performance on validation data after fitting the training data. the best hyperparameter and corresponding posterior are then applied to a held-out test data set and the resulting performance is the final estimate of the generalization performance of the entire system.eb-hyp avoids overfitting training data through marginalization and allows us to train, marginalize, and test without a separate validation data set. test (or deploy) the marginal predictive distribution on test data x test and report the performance.• we highlight the three main shortcomings of the current prevalent approach to tuning hyperparameters of machine learning algorithms (computationally wasteful, potentially overfitting validation, added complexity of a separate validation data set) and propose a new empirical bayes procedure, eb-hyp, to address those issues. • we apply eb-hyp to two models and real world data sets, comparing to random search and bayesopt, and find a significant improvement in held out predictive likelihood validating the approach and approximation in practice.performance model sampling performance model sampling is based on the idea that the set of simulated rewards r (s) can themselves be treated as a probability distribution of hyperparameters, from which we can also draw samples., gaussian process marginal given kernel hyperparameters) or estimated using methods that approximate the posterior p(θ | x, η(s)) such as maximum likelihood estimation, markov chain monte carlo sampling, or variational inference. simulationp (s) (η | x) from the posterior of p conditioned on the evaluation history has non-zero density wherever the prior p(η) is non-zero by the definition of p (s) (η | x) in eq. this affects asymptotic convergence but discontinuities in the 1 inputs training data x train and inference algorithm a : (x, η) → p(θ | x, η) 2 output predictive density p(x * | x train ) 3 initialize evaluation history v = {} 4 while v not converged do 5 draw performance function from gp posteriorp(s). to assess the necessity of a separate validation set we consider two scenarios: (1) training and validating on the train+validation data, (2) training on the train data and validating on the validation data. for finite validation data, there is no way of knowing how the optimal hyperparameter will behave on test data before seeing it, motivating an averaging approach like eb-hyp. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/334.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/334.txt new file mode 100644 index 0000000000000000000000000000000000000000..c57e471f847b0f27107caf59b4bf77aa2785b5f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/334.txt @@ -0,0 +1 @@ +ever since the introduction of credit cards and online payments, many scammers have found ways to exploit people and steal their credit card information to use them for unauthorized purchases. this leads to a huge amount of fraudulent purchases every day. banks and ecommerce websites are trying to identify these fraudulent transactions and stop them from happening again. with machine learning and deep learning methods, they are trying to stop the fraudsters before the transaction is approved.machine learning is one of the hottest topics of this decade and a subset of artificial intelligence. more and more companies are looking to invest in machine learning to improve their services. machine learning is a combination of various computer algorithms and statistical modeling to allow the computer to perform tasks without hard coding. the acquired model would be learning from the "training data". predictions can be made or actions can be performed from stored experiential knowledge. deep learning models are a part of machine learning techniques which involves artificial neural networks. convolutional neural networks, deep belief network, auto-encoders, recurrent neural network, and restricted boltzmann machine are all various methods. a properly trained nn would have the capability to capture unique relationships over the whole dataset.credit card fraud is a form of fraud involving the use of fake or stolen credit card information and causing financial harm to account holders or merchants involved. the total number of credit card fraud in single euro payments area (sepa) in 2016 was 1.8 billion euros out of the total 4.38 trillion euros transaction, which is 0.4% lower than the previous year . in 2015, according to the nelson report, the total loss from the credit cards in the world was $21.84 billion and projected that in 2020 it would be $32 billion. in this paper, we will be looking into 3 data sets. they are the european dataset , the australian dataset and the german dataset . in this work we aim to benchmark different ml and dl techniques. an ensemble of the best 3 performing models is also applied the all 3 datasets. we present our conclusions based on an empirical study comparing different ml and deep learning models.the paper is organized as follows. section 2 summarizes the related work and the background of the models implemented. section 3 provides details on implementation and experimental setup. in section 4 we present and discuss the results. conclusions and future work are summarized in sections 5 and 6 respectively. as follows we review some of the most relevant studies that have applied machine learning and deep learning models in the area of fraud detection. in their paper, they discuss the various concerns while handling this problem, such as there is a non-availability of real-world data which forces researches to work on faux data as the banks and other financial institutions don't make their data public due to privacy concerns as it is sensitive data. the methodology they used was splitting the datasets into 4 different sets which contained 20% fraud, 15% fraud, 10% fraud and 3% fraud. hence, they used true positive rate (fraud catching rate), true negative rate, false positive rate (false alarm rate) and false negative rate.randhawa et al's paperon credit card fraud detection using adaboost and majority voting explores many different machine learning algorithms such as naïve bayes, random forest, gradient boosted tree, etc.in this study we present an empirical comparison of various machine learning and deep learning models inspired by the previous studies.in particular we compare the performance of svm, knn and random forest to deep learning methods such as autoencoders, rbm, dbn and cnn.our experiments try to investigate the effectiveness of different machine learning and deep learning models using data set with varying size and complexities.the idea behind the cost of failure is that each of the false negatives (frauds detected as normal) would have a cost of $1000 and false positives (normal instances detected as fraud) would have a cost of $100 to the company/entity. here, the recommendation would be to choose svm instead of the ensemble if the company is looking to reduce the cost as much as possible as the ensemble method would take longer time in terms of both training and testing while svm has the least in terms of testing and training. a second ensemble (ensemble 2) is build using the models with the least cost of failures: knn, svm and random forest. ensemble 2 (knn, svm, random forest) on the other hand, which was based on combining classifiers with the least cost of failure methods, classifier achieved a higher mcc, auc, and lower cost value. examining the results of the table we can see that svm, random forest and cnn are the best models in terms of performance (auc and mcc values). convolutional neural networks was found to be the best deep learning method as it produces good results for both european and german dataset, while its performance on the australian dataset was the 4th best and it cost of failure was similar to knn.this paper provides an empirical investigation comparing various machine learning and deep learning models on different data sets for the detection of fraudulent transaction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/335.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/335.txt new file mode 100644 index 0000000000000000000000000000000000000000..f93dd1f7057c77821e9d264eaa43c89fdbf186f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/335.txt @@ -0,0 +1 @@ +recently, customer relationship management (crm) has gained its importance role in the marketing decisions strategies . the data mining in the crm aiming at learning available knowledge from the customer relationship by machine learning or statistical method to instruct the strategic behavior so that obtain the most profit.in the traditional method for data mining in the crm, machine learning is a power tool for the problem , and was widely used in the corresponding fields. network as a popular method in the machine learning that based on the experiential risk minimizations (erm) is a popular algorithm in the data mining for crm. however, there exit some drawbacks in the algorithm, if the training sets is very large, the process of training is very long ; because the network is training based on experiential risk minimizations, the general performance of the approach is very poor . how to improve the testing accuracies of the given problem is an open difficult task in the world.in the recent years, support vector machines (svms) have been introduced for solving pattern recognition problems because of their superior performance , . the svms are developed based on the idea of structural risk minimizations (srm), which guarantee the good general performance of the method. in the approach, one need map the original data into the feature space and one constructs an optimal separating hyperplane with maximal margin in this space.this paper uses the classifier based on support vector machine to resolve the real problems. the algorithm is applied to the dataset of distribution achievement of certain company, the results show that the training accuracy of the classifier with support vector machine is very high, and its generalization performance is excellent.the rest of this paper is organized as follows: in section 2, we will first shortly review the stand support vector machine. experimental results are shown in section 3 and some conclusions will be given in section 4. the data mining in the crmaiming at learning available knowledge from the customer relationship by machine learning or statistical method to instruct the strategic behavior so that obtain the most profit.in the traditional method for data mining in the crm, machine learning is a power tool for the problem, and was widely used in the corresponding fields. networkas a popular method in the machine learning that based on the experiential risk minimizations (erm) is a popular algorithm in the data mining for crm. however, there exit some drawbacks in the algorithm, if the training sets is very large, the process of training is very long; because the network is training based on experiential risk minimizations, the general performance of the approach is very poor.this paper uses the classifier based on support vector machine to resolve the real problems. the algorithm is applied to the dataset of distribution achievement of certain company, the results show that the training accuracy of the classifier with support vector machine is very high, and its generalization performance is excellent.in this section, we will use the classifier based on support vector machine to resolve the real problems. in order to evaluate the validity of the support vector machine, the algorithm is applied to the dataset of distribution achievement of certain company.in order to train the support vector machine classifiers, we first transform all variables to the corresponding value. in order to evaluate the validity of the support vector machine for the problem, we split the data set into training set and testing set. the results of experiments from the results above, we can easily conclude that the training and testing accuracies of the classifier with support vector machine is very high, and support vector machine play a key role in data mining for customer relationship management. because the support vector machine is aiming at the machine learning of little sample, so it has been successfully used in much practical problems.the data mining in the crm aiming at learning available knowledge from the customer relationship by machine learning or statistical method to instruct the strategic behavior so that obtain the most profit. support vector machine as the power tool in machine leaning was widely used in the data mining for crm. from the results of the practical problem with support vector machine, we can clearly report the good general performance of the support vector machine. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/336.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/336.txt new file mode 100644 index 0000000000000000000000000000000000000000..a75069c82f9130ed0a9398cf9473c5ef63978852 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/336.txt @@ -0,0 +1 @@ +the surprising development of information technology has created a new vision for network learning that its influence has already spread over the world to facilitate educational innovation. therefore, many countries have been paying attention to computer technology and expect it can facilitate the education reform in an effective and efficient ways. it is well known that the application of computer and internet teachings to traditional teaching requires some kind of transformation. consequently, the research and development of proper learning model has to seriously consider the mutual interaction between the users and the computers, the instructor and the learners, and the interaction among the learners. embed the related research issues to the above process, the splendid research results then can be expected. the theme-based learning is to learn an integrated knowledge by defining a central "theme" at the very start and compose related knowledge surrounds the cen-tral theme from various aspects. such a learning model emphasizes the training of the learners with the competency of knowledge integration. compared with traditional teaching, which teaches fragmentary information within the limitation of subjects, units, chapters, and sections, the intention of theme-based learning is to take a theme as a starting point and stretch out of it based on the learners' interests. accordingly, the learners can voluntarily construct their own knowledge since the theme is strongly connected with our daily life and developed from learners' willingness.a theme-based learning process can be divided into exterior circulation and interior circulation as illustrated in figure 1 . exterior circulation activities are 1) identify a central theme, 2) identify related subject domains based on learner's interest, 3) collect information for the specific topics, 4) integrate collected information to build shared knowledge, and 5) exhibit learning outcomes and share with others. the activities of the exterior circulation are explicit learning behaviors. on the other hand, the interior circulation consists of implicit mental activities, which are plan, action, and introspection, respectively. when learners engage in the themebased learning processes on web, they are experiencing the activities of exterior and interior circulation synchronously. since the explicit feature of the learning processes can be controlled or guided effectively by the careful design and implementation of the web-based learning environment, it is expected that the interior circulation, which represents the invisible mental behavior of the learners, can make great progress simultaneously.the exterior circulation of the theme-based learning model, as figure 1 illustrates, can be implemented as a web based system that helps to manage the learning processes. the learning activities with regard to student learning can be divided into five stages as follows:(1) identify a central theme the learners engaged into the theme-based learning can propose their own interested topics to ask for feedback from other team members. meanwhile, every learner can also join other member's proposed topic. after interaction and brain storming, the ones who are interested in the same topic are formed as a learning team, and this topic is the central theme that this team would investigate. the motivation of such an arrangement is that "a student can learn better if he/she was interested in the learning topic". the theme should be closely connected with the learners' daily life and an extensive range of survey which is not limited in a specific field is encouraged.1. identify a central theme.( at this stage, the theme is defined and the learning team for each theme is formed. based on the learner's own specific interest, each team member tries to find the issues in the related subject domain derived from the theme. notably, the interaction of learners on the learning platform can influence the relatively inactive learners to trigger their interests effectively on some specific topics through the events and activities originated by their team members.plan introspection 3. collect information for the specific topics. exterior circulation activities are 1) identify a central theme, 2) identify related subject domains based on learner's interest, 3) collect information for the specific topics, 4) integrate collected information to build shared knowledge, and 5) exhibit learning outcomes and share with others. since the explicit feature of the learning processes can be controlled or guided effectively by the careful design and implementation of the web-based learning environment, it is expected that the interior circulation, which represents the invisible mental behavior of the learners, can make great progress simultaneously.besides putting the learning activities that correspond to the exterior circulation of the theme-based learning model into practice, an intelligent diagnosis system is also incorporated in the proposed web-based thematic learning platform. notably, a fuzzy expert system and a composite classifier are used to give the learning guidance to the learners, and assist the instructor in grading each learner's online class participation and predicting the performance of each learner's final written report.the learners can login the theme-based learning system through user interface agent to participate in the learning activities such as data searching, data managing, discussing with the colleagues and the teacher online, posting and replying the articles, etc. the system can generates the learners' learning profiles, including the total time that the learners stayed in the platform, the frequency of login sessions, the learning materials collected by the learners, the articles posted or replied by the learners, and the online group discussion time spent by the learners, etc.the learners are expected to make progress based on certain proper learning advices given by the learning diagnosis system. meanwhile, the system can also predict the performance of the learners' final report so that the teacher can use this predicted achievement for further analysis on the learner's study behavior when there is a gap between expected result and the learner's actual performance. one is a fuzzy expert system which not only gives appropriate diagnosis messages to the learners but also delivers each learner's online participation assessment to the teacher at the end of each learning activity stage based on the learners' profile. the motivation of using a fuzzy expert system to give diagnosis and class participation assessment whereas using a composite classifier to predict the learners' final accomplishment is that the fuzzy expert system can function more like human experts who explain the reasoning processes behind their recommendation. the learning diagnosis system will not only give the teacher a summary report of suggestion messages that the learners received, but also offer the teacher each learner's online participation assessment based on the non-fuzzy output of the defuzzifier. this is also one of the major reasons that the prediction capability of a fuzzy expert system is worse than some advanced machine learning algorithms since the inputs to the fuzzy expert system are always chosen by the human experts and these selected attributes might not be the most promising ones for the fuzzy expert systems. one of the two classes was experimented with the proposed web-based thematic learning platform in a natural science course, wherein a fuzzy expert system is incorporated into the intelligent diagnosis system to grade students' class participation and learning guidance to the learners based on learning profiles so that the students can receive just-in-time support or suggestion to help them gain better learning achievement. the second experiment was conducted in another natural science course for the other fifth grade class whereas the diagnosis system was removed from the web-based thematic learning platform in order to demonstrate the performance of the proposed diagnosis system. it can be inferred from the results that the learning guidance given by the fuzzy expert system significantly affected the learners' study behavior and boosted the quality of the learners' final reports further. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/337.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/337.txt new file mode 100644 index 0000000000000000000000000000000000000000..06bc7b8ea7533b6cdcb027e23edca66f36ea7a81 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/337.txt @@ -0,0 +1 @@ +as the name suggests, conflict-free replicated data types (crdts) provide powerful properties: in particular, updates can be applied without further coordination of replicas, and recovery from network partitions can be done with ease. while crash-fault environments are typically assumed for crdts, it is natural to investigate whether or under which conditions these desired crdt properties also hold in byzantine environments.recent works on crdts in byzantine environments have followed different paths. the work stretches from classical assumptions of an honest two-thirds majority introducing coordination (e.g., byzantine-tolerant causal-order broadcast ) to coordination-free, sybil-resistant crdts using broadcast based on the happened-before relation as directed, acyclic graphs .in this brief announcement, we relate the notion of equivocation to crdts and show under which conditions a subclass of crdts is equivocation-tolerant in byzantine environments. we show that, due to equivocation tolerance, all statebased and certain operation-based crdts tolerate any number of byzantine faults while depending on rather mild assumptions on the communication layer.equivocation is the act of a byzantine replica sending different updates, which appear valid on their own, to different recipients where it should have sent the same update . while equivocation can only be detected globally or with two equivocated updates, a valid update is an update that is protocol-conforming when viewed on its own. omission occurs if a crash-or byzantine-faulty replica sends an update to only a strict subset of all protocol-intended recipients.a system provides non-equivocation if it prevents byzantine replicas from performing equivocation. we say that an algorithm is equivocation-tolerant if it neither needs to detect, prevent, nor remedy equivocation to ensure its provided guarantees beyond what is needed to cope with omission. it follows that correct replicas need to be able to locally detect an invalid update.in this brief announcement, we relate the notion of equivocation to crdts and show under which conditions a subclass of crdts is equivocation-tolerant in byzantine environments. we show that, due to equivocation tolerance, all statebased and certain operation-based crdts tolerate any number of byzantine faults while depending on rather mild assumptions on the communication layer.crdts provide strong eventual consistency (sec), consisting of strong convergence ("correct replicas that have delivered the same updates have an equivalent state"), eventual delivery ("an update delivered at some correct replica is eventually delivered to all correct replicas"), and termination ("all method executions terminate"). intuitively, equivocation tolerance for crdts means that validity of updates is defined in a way that any two valid updates are conflictfree, which allows applying any valid update directly, without threatening strong convergence. to avoid inconsistencies due to equivocation on operation ordering, we also require that valid operations have an inherent ordering: either all operations are commutative and no ordering is needed, or the datatype semantics inherently records a happened-before relation, i. an equivocation that breaks sec and leads to a permanent inconsistent state must either break inherent identity or inherent order, breaking an assumption of their mechanisms: if ensured by hashing, the equivocation can be reduced to a hash collision.without periodic gossiping of state-based crdts, operation-based crdts need to be able to handle omissions to ensure eventual delivery. we note that this approach essentially uses a state-based set crdt to synchronize all operations, benefiting from the byzantine tolerance of all state-based crdts shown in lemma 1. omission-handling, equivocation-tolerant operation-based crdts ensure strong eventual consistency for all correct replicas in an environment with n replicas of which an arbitrary number f exhibit byzantine faults, i.we showed that some crdts can be used in byzantine environments with an arbitrary number of faults by leveraging their equivocation tolerance.in environments with byzantine majorities, a crdt with non-commutative operations has to record the happened-before relation in the payload to ensure the causal order independently of the broadcast order.state-based crdts are easy to deploy in byzantine environments, as shown in lemma 1, based on their unconditional equivocation tolerance.in contrast to state-based crdts, as shown in lemma 2, operation-based crdts require additional properties to provide sec in byzantine environments. in summary, state-based grow-only crdts can play out their strengths in public, permissionless byzantine systems, while operations-based crdts have more restrictions, but allow for permissions with finer granularity of replicas.in this brief announcement, we analyzed the reasons why and under which circumstances a subclass of crdts can be moved from the crash fault model to a byzantine fault model using the notion of equivocation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/338.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/338.txt new file mode 100644 index 0000000000000000000000000000000000000000..31b632f2501ef41728c11696246dfdfbe9bfb4a8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/338.txt @@ -0,0 +1 @@ +the concept of blockchain and the use of consensus algorithm was initially proposed in 1982 , but a quantum leap was made with the introduction of bitcoin, a peer-to-peer electronic cash system in 2009. the main idea behind blockchain was to create a decentralized and growing list of blocks or records which are integrated with the help of cryptography protocols. as blockchain functions as a decentralized immutable ledger which records the transaction in the real-time, it gives rise to a complexity to validate and verify the transaction and make the system fault-tolerant, this can be achieved with the help of a consensus algorithm which acts as an instrument using which nodes in the network can coordinate and come to a common consensus or single truth without a central authority, thus a consensus algorithm is an essential element of a blockchain network that maintains the integrity and security of these distributed computing systems. many consensus algorithms developed with the time, for instance initially classical consensus protocols were the only known protocols that use all-to-all voting for obtaining single truth, then satoshi proposed longest-chain consensus protocols after that, there were various variants of consensus protocols that came into existence like the snow family of consensus protocols . the primary goal of all these developments in consensus protocols is to make the blockchain network more secure and increase the transaction per second in the blockchain, as the properties of an ideal consensus algorithm are:1. scalability: a consensus algorithm should maintain a smooth operation of the network by eliminating the possibility of slow processing time, system bloating, lags, etc. as blockchain functions as a decentralized immutable ledger which records the transaction in the real-time, it gives rise to a complexity to validate and verify the transaction and make the system fault-tolerant, this can be achieved with the help of a consensus algorithm which acts as an instrument using which nodes in the network can coordinate and come to a common consensus or single truth without a central authority, thus a consensus algorithm is an essential element of a blockchain network that maintains the integrity and security of these distributed computing systems. many consensus algorithms developed with the time, for instance initially classical consensus protocols were the only known protocols that use all-to-all voting for obtaining single truth, then satoshi proposed longest-chain consensus protocols after that, there were various variants of consensus protocols that came into existence like the snow family of consensus protocols. the primary goal of all these developments in consensus protocols is to make the blockchain network more secure and increase the transaction per second in the blockchain, as the properties of an ideal consensus algorithm are:. no permissions are required to be a node in a public blockchain network thus there is no guarantee of integrity from the node thus a node can alter the transaction data and hence it creates a fork in the network. are developed and become the mainstream consensus protocols, but there are some drawbacks of these consensus algorithms thus to reduce these flaws poa is a feasible alternative consensus algorithm. liveliness of the network: as the framework of poa depends on the reputation of the sealer hence the liveliness of the network implies that the reputation system of the network does not provide a possibility to reach a state at which the bad behaviour of the sealer no longer damages their reputation.in a poa based blockchain network, the validating node or sealers should be incentivised to retain their reputation or gain subsequently as this will refrain sealers from associating with any malicious activity on the network. as pow is a computationally intensive approach it slows down the tps (transaction per second) of the network and the time lag in accepting the new block may lead another miner to find the optimum solution as a proof of work thus creating a fork in the network which increase the time taken to reach on a consensus. in this consensus model, instead of solving a complex mathematical problem, users who want to add new blocks in the network must lock a share of their stakes in the network. poa in contrast is a reputation-based consensus model that relies on a limited number of validating nodes for adding new blocks in the network.• poa is less resource-consuming as compared to other consensus models as there is no direct competition among the nodes to add new blocks because of the limited number of validatory nodes in the network.• this consensus model can achieve more efficiency and throughput relative to other consensus models due to its centralized reputation-based approach to reach a consensus.• because of less number of forks generated in the network, the poa based consensus protocols have better transaction rate as compared to pos or pow.• the poa consensus model relinquishes some degree of decentralization because of its mechanism to reach a consensus using validatory nodes thus we can conclude poa sacrifices decentralization in order to achieve high throughput and scalability.• poa is a reputation-based consensus model, but if a validatory node in the network is intended to do malicious activities in the network, the threat of harming the reputation does not shield the network from malicious activities and third-party involvement. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/339.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/339.txt new file mode 100644 index 0000000000000000000000000000000000000000..c69b27245ba19255238a7a5d5c56a4099af1adcc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/339.txt @@ -0,0 +1 @@ +many fields of science are experiencing increases in the volume of datasets available to researchers. neuroscience in particular is experiencing a rapid growth in data due to technical advances, scientific breakthroughs, and sociological trends towards open data sharing. this increase in data is providing the basis for new discoveries about brain structure and function, but it also presents technical challenges. to deal with the deluge of available data, neuroscientists are increasingly adopting cloud platforms for data storage and processing. however, inefficient use of cloud services can lead to needlessly longer processing time and cost. we aim to investigate methods to reduce the impact of data transfers for neuroimaging workflows on cloud services.data prefetching is a well-established technique for the reduction of data access-related costs , , . traditionally, prefetching was used to reduce memory latency, as memory accesses were significantly slower than cpu processing. however, since the rise of big data, prefetching has also been shown to be beneficial to the processing of large datasets located on remote storage . during the execution of an application, data required for future tasks are copied from the remote storage device to compute-local storage, such that when the application requires the data, it can read it from local storage.a recent example of the effectiveness of prefetching on the cloud is netco , a prefetching extension integrated into the hadoop distributed file system (hdfs). future data is prefetched based on two measures: 1) size of the data to be processed and 2) task deadline. netco demonstrated superior performance compared to other file systems, which importantly motivates our study. however, it remains tightly bound to hdfs while cloud applications generally use different file systems. a more versatile solution is needed that would broadly apply to cloud data analysis and storage.the present study focuses on neuroscience data that describe long-range connections between different parts of the human brain, a central research topic in contemporary neuroscience . the three-dimensional trajectory of the major neural pathways, composed of millions of neuronal axons, are inferred from measurements of diffusion mri and processed using computational tractography algorithms. these algorithms generate "streamlines": 3d curves that approximate the trajectories of the major pathways. a single human brain measurement may contain several millions of these streamlines, with their coordinates assessed at sub-millimeter resolution. subsequent analyses of these streamlines usually access streamlines sequentially and entirely within the files in which they are stored. such an access pattern creates an excellent opportunity for prefetching.this paper investigates the benefits of prefetching for cloudbased processing of neuroscience streamlines. through both theoretical analysis and experimentation, we characterize the speed-up provided by prefetching compared to sequential data transfers for neuroscience data processing deployed on the amazon web services cloud. more specifically, this paper makes the following contributions:• formalization and performance analysis of a "rolling prefetch" data scheme for cloud-based applications; • implementation based on the s3fs python library to access data on amazon s3; • experimental evaluation in the amazon cloud with a 500-gb dataset of streamlines derived from dmri data.• formalization and performance analysis of a "rolling prefetch" data scheme for cloud-based applications; • implementation based on the s3fs python library to access data on amazon s3; • experimental evaluation in the amazon cloud with a 500-gb dataset of streamlines derived from dmri data. rolling prefetch consists of three threads: (1) the reading thread loads data blocks from local storage and marks them for eviction, (2) the prefetching thread transfers data blocks from cloud storage to local storage, and (3) the eviction threads deletes the blocks that have been marked for eviction.where n b is the number of data blocks, f is the total size of the file to transfer, l c is the cloud latency, b cr is the cloud read bandwidth, and c is the compute time per byte consumed. in other words, there will be less opportunities to prefetch data with very small datasets, unless the block size is proportionally smaller, in which case latency will be penalizing both reads from s3fs and rolling prefetch, extending processing time.3) parallel processing: since perceived throughput may be affected by the number of threads used, we aimed to determine if the performance difference between s3fs and rolling prefetch remains proportional to the amount of data being processed per thread. we expect that the benefits of rolling prefetch can be maximized with an increased compute time, due to greater opportunities to prefetch data during compute. however, we also expect that there is a peak ratio between data transfer time and compute time (data-to-compute ratio), where the benefits of rolling prefetch will be highest.since the benefits of rolling prefetch may vary according to the ratio between data transfer and compute time, we have selected two use cases with varying computation times: 1) histogram distribution of streamline lengths; and 2) bundle recognition.as we increase the file size we observe that the disparity between s3fs and rolling prefetch increases (figure2), with rolling prefetch significantly outpacing (∼1.at a very large number of blocks, the performance of both s3fs and rolling prefetch is degraded due to increased impacts of both s3 storage latency and additionally, in the case of rolling prefetch, local storage latency (figure4). s3fs exceeds the performance of rolling prefetch at a single block, where no actual prefetching can take place and we pay the cost of implementation overheads, such as writing the data to local storage and reading it from local storage rather than directly from memory. the ratio of datato-compute was found to be approximately 1/7, where the compute took, on average around 9000s. although our model dictates that the upper bound of speedup that can be obtained with rolling prefetch is 2×, the observed speedups obtained by these two applications never reach that boundary.our theoretical analysis and experimental results demonstrate that there is a substantial processing time gain to be obtained from using rolling prefetch, particularly in the case of mixed workloads, where there is a significant cost associated with time spent on compute and data transfers. since local storage on compute can become quite costly, researchers must decide between processing only a subset of the data, paying hefty fees related to storing large amounts of data or incorporating data management logic directly into their workflows. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/34.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/34.txt new file mode 100644 index 0000000000000000000000000000000000000000..1fef4f080b42303839d7f092ea87679a1d7c7f1c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/34.txt @@ -0,0 +1 @@ +in recent years, automatic speech recognition (asr) and natural language processing (nlp) models have converged to utilize common components like transformers and encoder/decoder modules. they increasingly rely on large amounts of data, large model sizes and large amounts of compute resources. this is a substantial departure from a previous era when asr and nlp utilized different modeling architectures, chosen to inject domain-specific knowledge and constraints. this shift to a common paradigm has stimulated research in fusing audio and text modalities to substantially improve performance in tasks such as audio intent classification and speech-to-speech translation. the fusion of modalities in many cases allows the direct optimization of the endtask, overcoming the hurdles of the older cascaded approaches that often led to accumulation of errors.despite the general trend to develop end-to-end models for various tasks, spoken dialog systems stick out as a sore thumb. most practical systems utilize a cascaded approach where the output of a general asr system is fed into a dialog model trained separately on written domain. this mismatch between written and spoken inputs to the dialog models is not well-studied, largely due to the lack of a public task with spoken user inputs.research into combined audio-text models is limited by the lack of paired data. while the paired data requirement can be relaxed to some extent for training data via un-or self-supervised training techniques, test sets with paired data are crucial for model evaluation. in addition to an evaluation task, a training set with spoken input would also be helpful in quantifying the gains from supervised learning and recent advances in self-supervised learning.the focus of our effort was to bring most benefit to the community with the limited resources available. while a wizard-of-oz style data collection in spoken domain would have been ideal to fully investigate all the phenomena of spoken domain, that would be extremely labor-intensive especially in annotating the dialog states and was beyond the scope of our effort. instead, we chose to create a spoken version of a well-studied writtendomain task, the multiwoz task. one advantage of this approach was that we could directly compare the performance of the spoken version with the continuing advances in the written domain. knowing that the current text-to-speech (tts) systems have come a long way, we also chose to create a tts-version of the multiwoz training corpus as a surrogate for the human spoken version.in the speech-aware dstc11 challenge, participants are asked to infer the dialog states from the sequence of agent (text-input) and user (audio-input) turns. we evaluated the performance on three versions of audio inputs -tts-verbatim, human-verbatim (humans speaking the written user inputs), and human-paraphrased (humans paraphrasing the written user inputs). aside from the audio-inputs, we provided transcripts from a state-of-the-art asr system trained on 33k hours of people's speech corpus to encourage participation from teams that did not have an easy access to asr systems. the transcripts were accompanied with timestamps of the words and the latent representations of the acoustic encoder, which could be used by participants to train joint audio-text encoders.in the course of developing this challenge, we developed a cascaded baseline system with data augmentation and report performances on a few variants of cascaded systems. in the process, we uncovered a bias in the multiwoz evaluation task, the slot values in the evaluation set have a substantial overlap with those of the training set. to address this bias, we created a new version of the multiwoz evaluation set, the dev-dsct11 and eval-dstc11. we observed that the new task surfaces many of the challenges in practical spoken dialog systems associated with mismatch in modalities, inability to recover from asr errors, and more generally difficulty of extracting semantically relevant information from audio signals.before launching into the data collection and in the process of developing baseline systems, we noticed that there is a substantial overlap in slot values between the training and evaluation sets, leading to overestimation of performance of the models that memorize the slot values, as reported elsewhere(song et al. we chose a model that achieves close to the state-of-the-art performance on the task with jga of 55.4%(zhao et al.we measured the impact of the redesigned evaluation sets, henceforth referred to as dev-dstc11 and test-dstc11, using two dialog models -a seq-to-seq model(zhao et al. many teams employed an explicit asr error correction model and re-trained their dst models on asr hypotheses together with various forms of tts-based data augmentation.8/1. we redesigned the evaluation sets (dev-dstc11 and eval-dstc11) by sampling new non-overlapping slot values and show that the new sets captures the weakness of the written dialog models better. while the performance improves with model size and data augmentation, even the best models (d3st-xxl) show substantial drop in performance when switching from written version to the spoken version (53.before launching into the data collection and in the process of developing baseline systems, we noticed that there is a substantial overlap in slot values between the training and evaluation sets, leading to overestimation of performance of the models that memorize the slot values, as reported elsewhere(song et al. we chose a model that achieves close to the state-of-the-art performance on the task with jga of 55.4%(zhao et al.we measured the impact of the redesigned evaluation sets, henceforth referred to as dev-dstc11 and test-dstc11, using two dialog models -a seq-to-seq model(zhao et al.8/1. we redesigned the evaluation sets (dev-dstc11 and eval-dstc11) by sampling new non-overlapping slot values and show that the new sets captures the weakness of the written dialog models better. while the performance improves with model size and data augmentation, even the best models (d3st-xxl) show substantial drop in performance when switching from written version to the spoken version (53. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/340.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/340.txt new file mode 100644 index 0000000000000000000000000000000000000000..c9d4208e2a1d9e0a473fd03a566889ad9b013179 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/340.txt @@ -0,0 +1 @@ +to reap the full potential of fog computing and enable emerging application domains such as the internet of things (iot) or connected driving, platform architectures need to be redesigned for an increasing degree of geo-distribution.in, e.g., a fog data store, full replication is infeasible as it requires constant communication between all nodes, especially when considering consistency guarantees. newer systems shard data and route client requests to the node that is concerned with a particular data item . gfs and nebula use centralized master servers that control replica placement. this is practical in a tightly coupled cluster but routing requests to a central server negates any qos improvements in a geo-distributed fog deployment. pastry , oceanstore , or cassandra use hashing, which scales well and is easily implemented, but cannot take data movement based on proximity into account.instead, the close relationship between physical and virtual world in fog computing invites a new approach where data is sharded based on network topology and geographic distribution of application clients. most importantly, different clients rarely access the same data at different locations, e.g., an ehealth sensor is bound to a specific person and is independent from sensors on another person.a fog data management system needs to ensure that the data the client needs is available at its closest node. in ifogstor , a centralized cloud node calculates optimal data placement based on optimizing latency between clients and nodes, yet placement is static and as such not useful for mobile clients. fbase , allows applications to control replica placement directly, which optimizes efficiency by moving data replicas with clients but places a burden on application developers. we extend this approach with a component to predict application client movement that alleviates work needed on the application side. the idea is that a client always accesses the same kind of data, so constantly moving this data to a node near the client leads to optimal resource allocation. crucially, this needs to happen without fully replicating all data, which is inefficient considering the limited network and storage resources available at the fog and edge. newer systems shard data and route client requests to the node that is concerned with a particular data item.a fog data management system needs to ensure that the data the client needs is available at its closest node. in ifogstor, a centralized cloud node calculates optimal data placement based on optimizing latency between clients and nodes, yet placement is static and as such not useful for mobile clients. the idea is that a client always accesses the same kind of data, so constantly moving this data to a node near the client leads to optimal resource allocation. we see two options for this: storage nodes keep track of data access and communicate among each other to coordinate data movement, or a clientside middleware initiates data movement. the second approach places a lesser burden on the edge node as prediction logic travels with the client, and sensitive information about data access and location can be kept on the client. as shown in figure1, the client contains a small middleware that collects metadata about data access and, optionally, hints about future movement or data access from the application in the form of geo-hints. this data is then used by a prediction component in the edge node that predicts future client location and initiates data movement. data about a particular client is kept mostly on the client so it does not have to be propagated throughout the storage system. hence, predicting data placement means predicting data access. this prediction can be based on the times a client accessed particular data in the past, on the physical or logical location of that access, on inferred metadata, or on client hints about future data access and location. when data shows that the client continuously moves between nodes in the same direction, future access is likely to happen at nodes further in this direction.to that end, we present an approach towards predictive replica placement that predicts movement of application clients based on data such as past data access patterns and proactively replicates data to likely future locations. we plan to evaluate the effectiveness of different sources for data access and client location patterns, as well as different prediction methods. furthermore, we plan to compare the performance of our proposed system to other replica placement approaches, in particular with regards to access latency, overhead, and privacy in the context of data movement restrictions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/341.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/341.txt new file mode 100644 index 0000000000000000000000000000000000000000..825f9c42cc889d1fd7ccfa8952fe972b6eb0355e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/341.txt @@ -0,0 +1 @@ +our approach tries to aid the full engineering workflow, from the desinging process, throught the system verification to the deployment, while providing a fault-tolerant architecture and automating most of the work, so the engineer can focus on the system design. following the sos approach, the model provides hierarchical composition of the components and describes the basic capabilities, interfaces and connections of the subsystems in the system. • communication and behaviour model: in the middle, we model the internal operation of our systems and the communication between them. this level consists of two kinds of models, the communication model and the behaviour model. the communication model can be used to describe the communication interfaces of the system and subsystems, and their qos characteristics, while the behaviour model can be used to describe the internal operation of the subsystems. the deployment model can be used to describe the physical components in the system and which physical components the subsystems should be installed on. the purpose of model transformations is to avoid the need to recreate relevant parts of the system at different levels of modeling, as they have already been designed at an earlier level, so they can be used in the design process of the next level. from the behaviour and communication model we generate the executable code, which is run in the containerized services, and from the deployment model we generate the deployment artifacts, which is used by the kubernetes to run these services. the behaviour model is also used to verify the functional correctness of the system.the purpose of the high-level model is to define system components and the dataflow between those components, which can later be used to generate the skeletons of the precise behaviour models.the high-level model is suitable for expressing systemsubsystem relationships between the components, thereby supporting the hierarchical composition of the system.the communication model describes the communication interfaces of the system services. therefore if we have a statechart describing the behaviour of the system, we can transform the model to a timed automata.first, the engineer designs the high-level model, defining the components of the system, their interfaces and the relations between them.the deployment model describes the hardware components of the system and the needs of the services to be installed.the presented modeling approach starts with the high-level model and leads to the deployment of the system, while integrating a verification framework to prove the functional correctness of the system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/342.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/342.txt new file mode 100644 index 0000000000000000000000000000000000000000..e4227c90d1538e30b6ee612d3ec56cae16c19f72 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/342.txt @@ -0,0 +1 @@ +ics communication provides proper functioning of critical infrastructure systems. these systems are naturally exposed to external threats including cyber attacks . traditionally, industrial systems are well protected against external threats through the use of firewalls and ids devices that filter communication between ics systems and internet traffic, making direct attacks on smart grid communication a rare case. however, attackers can gain access to the system by sending malware to a user via an infected e-mail attachment .the importance of the research is motivated by recent cyber attacks against critical infrastructure systems. one of the attacks against ukrainian power company happened in december 23, 2015 when blackenergy malware caused power disruption to 225,000 customers, lasting up to 6 hours . this attack included multiple stages starting with "spear phishing" e-mails targeting a staff to gain access to the corporate network of the power company. once inside the power company network, attackers gathered credentials and used vpns to get access to the internal network. more recent ransomware attack against the colonial gas pipeline in the u.s. happened in may 2021 . similarly to blackenergy attack, it was also initiated from an infected internal station.compared to standard information and communication systems, smart grid communication exhibits stable, periodical, and regular communication patterns since the communication occurs between devices with no or little human interference. typically, a controlling station periodically requests status data from a field device like the programmable logical controller (plc) or remote terminal unit (rtu) in order to provide a real-time view on industrial processes.to detect internal threats, we need (i) to regularly monitor smart grid communication and (ii) observe suspicious patterns that occur in the network traffic. one solution is to employ ics monitoring using extended ipfix protocol that retrieves monitoring data about active ics communication. to detect unknown adverse events or unusual behavior, we can observe statistical patterns using ics flow data.in this research, we closely examine statistical distribution of inter-arrival times of iec 104 packets. this is a part of my phd study that is focused on anomaly detection of ics communication using statistical methods. the main idea behind the research comes out from stable communication patterns that are typical for ics communication and can be observed on packet and flow levels. within the phd. research, we plan to apply various statistical methods on smart grid communication and evaluate their efficiency for covering common ics attack vectors defined by mitre att&ck for ics matrix 1 .outlier detection is a statistical technique that discovers data points that are inconsistent with the rest of the data. the fundamental idea behind unsupervised outlier detection is to score data points solely on the essential characteristics of the dataset. in general, density or distance are used to determine whether a data point is an inlier (normal) or outlier (anomaly). second, the data model should be designed to handle outlier data points accurately. the purpose of outlier detection is to detect rare events or unusual activities that differ from the majority of data points in a dataset. for a global outlier, outlier detection considers all data points, and the data point is considered an outlier if it is far away from all other data points. the local outlier detection covers a small subset of data points at a time. a local outlier is based on the probability of data point being an outlier as compared to its local neighborhood which is measured by the k-nearest neighbors (knn) algorithm. lof is a densitybased unsupervised anomaly detection method that computes a given data point's local density deviation with respect to its neighbors. lof scores are computed for all data points according to parameter k (the number of nearest neighbors) as follows:. for a data point p, the k-distance (p) is the distance d(p,o) between p and the farthest neighbor data point o by the following conditions: 1) at the least, k data points (records.the meaning of k-nearest neighbors of p is any data point q whose distance to the p data point is not greater than the k-distance(p). we used lof algorithm to learn inter-arrival time distribution of the transmitted iec 104 packets by computing the k-distance, reachability distance and density of data points. we then applied the lof on time windows where each data block contains 5000 inter-arrival time records. also, this algorithm is able to detect outliers regardless of the data distribution of normal behavior, since it does not make any assumptions about the distribution of data records. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/343.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/343.txt new file mode 100644 index 0000000000000000000000000000000000000000..82772f06e5b7ded4c1f3f39b4a6606d642139bfc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/343.txt @@ -0,0 +1 @@ +the reference model, ernest , is based on linear regression, with coefficients estimated through non-negative least squares (nnls). this is a variant of the least squares algorithm with the added restriction that coefficients must be non-negative. this restriction ensures that each term provides a non-negative contribution to the overall execution time, preventing estimation of negative execution times.as alternative to ernest, we consider four classic ml models for regression: 1 -regularized linear regression (lasso) 5 , neural network, decision tree, and random forest. linear regression (lr) was chosen for being easily interpretable. decision tree (dt) and random forest (rf), in turn, capture non-linear relationships in the data besides allowing interpretability as well. lastly, neural networks (nn) can capture non-trivial interactions among the input features albeit less interpretable. investigating different models is important to analyzing the performance differences across applications and identifying the solution that is best for each scenario. we refer to for a description of the aforementioned regression techniques. our first approach, referred to as black box, relies only on features that capture knowledge available prior to the application execution, similarly to the ernest model (but, as mentioned, using different ml methods). we evaluate both approaches, comparing them against the ernest model, on different scenarios, covering different application workloads and platform configurations, and also investigating the impact of the size of the training set on prediction accuracy.proposed a prediction platform for spark sql queries and machine learning applications, which, similarly to our gray box models, also exploits features related to each stage of the spark application.the ernest and the black box models use only features which are based on the number of cores and on the input data size, which are available a priori (before execution starts).we emphasize that while full information for all data points in the training set was used to learn the gray box models, we use only the information available before the runs to evaluate these models on the test set. for this reason, also the tensorflow number of cores (which corresponds to the number of cores available in the cluster) was included in the feature set of both black and gray box models while its inverse is used in the gray box models only. for each workload, we evaluate the accuracy of the prediction models in terms of core interpolation and data size extrapolation capabilities, acknowledging the fact that the data available for learning the models (training set) might have been obtained via experiments on setups different from the one for which the prediction is needed (test set). figure1shows the various scenarios (y-axis) of core interpolation built for each workload based on different splits of the data into training and test sets: in each row (case number), blue boxes represent configurations for which the data were used as part of the training set (and crossvalidation) and red crosses indicate configurations used as part of the test set 10 . since there is a large difference in the application completion time in the runs where the number of cores is small, we always included the data for the smallest number of cores in the training set.in the data size extrapolation scenarios, we put the runs with the largest dataset size (spanning across all available cores configurations) in the test set while the runs with the other dataset sizes in the training data, as shown in the two rightmost columns of tableii. for example, in the data extrapolation experiments with k-means, the best gray box model is the one with lr as regression technique in cases c1, c2, c3 and c4, with rf in cases c5 and c6, and with dt in the last case (c7)., query 26 and sparkdl with fixed data size), ernest yields very good results with mape values smaller than 10%, whereas the best gray box model generally achieves worse performance (mape of best models is in the range 7.8%).however, recall that gray box models do use dag-related features which are not available for the test instance at prediction time (a priori), and thus are replaced by the averages from the training data. also, our results show that the best black model always outperforms the results of the ernest method (possibly due to the use of more effective ml techniques) and almost always outperforms the results of the gray box model, despite using fewer features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/344.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/344.txt new file mode 100644 index 0000000000000000000000000000000000000000..5123f9e9afdd07ab1002ae2b66ef8a73a0f8f3d4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/344.txt @@ -0,0 +1 @@ +following the covid-19 pandemic, humanity is increasingly demanding a more efficient and equitable use of resources . one of the key issues is the allocation of communication resources. the demand for the internet of things (iot) applications has recently been rapidly increased remarkably. the use of long-distance and power-saving communications, particularly low-power wide-area networks (lpwa), is expected in iot communication. however, due to its characteristics, frequency congestion has become a serious problem, necessitating the use of a dynamic multi-channel selection method.using lightweight reinforcement-learning, called "tug-of-war (tow) dynamics ," ma et al. proposed an autonomous channel selection method in our previous study . all nodes learned the channel status by acknowledgement (ack) in a repeated cycle of sleep, wake, data transmission, and sleep. we also implemented this algorithm in ieee802.15.4e/4g-iot devices and experimented with 50 iot devices dynamically selecting five channels. however, this method did not address time-scheduling optimization for overall node transmission timings.for efficient use of space (channel) and time resources, we present the first model toward achieving "an economic allocation of resources." in this study, we propose a simple channelallocation method based on tow dynamics, combined with the time scheduling based on the synchronization of nonlinear oscillators. the synchronization is named "tow-based synchronization" because of the competing pushes and pulls that occur in phase space. in other words, the tow behaviors are active in both space and time in our new model. regardless of whether there is any interaction between the nodes, each node's phase. each node can transmit a packet at every discrete time step through the channel selected via tow dynamics. θ i =0), each node not only advances by angle ω but also interacts with other nodes by the interaction term ±k sin(φ) which represents "push (-)" or "pull (+)," where k represents the coupling strength (parameter), and φ represents the phase difference. each node receives a reward (+1 or -ω) according to the success (winning at the slot machine of selected channel (say #k), whose reward probability p k under the condition that there is no node using the same channel within φ th ) or failure (otherwise).in this study, we propose the following channel-allocation method using the tow-based synchronization which is based on the kuramoto's de-synchronization model; for node i =1, • • • , m , each phase θ i (t) is updated by the following equation:.here, ξ(t) is an arbitrary random noise, α is the memory parameter, and r k (t) is the reward from each slot machine whose reward probability is p k .figure1shows the interaction term between nodes for each phase difference. the phase difference between two nodes increases if the same channel is selected by both nodes (desynchronization), whereas the phase difference decreases if different channels are selected (synchronization). this tow-like "push and pull dynamics" between nodes in phase space can organize grouping (synchronization group) of various selection nodes. the groups can be distributed in the phase space such that every distance between groups should be larger than φ th .figure1: the interaction term between nodes for each phase difference.in this study, we fix ω =φ th = π/4 for all nodes, and only once when crossing the θ i = 0 (once in eight time steps), the interaction term ±k sin(φ) works. synchronization groups, particularly those where each node selects a different channel, are non-uniformly distributed in phase space such that every distance between groups is larger than φ th (area of influence), as expected.2(b). we can use more nodes until m = 40 (independent 5 channels × 8 phases = 40 nodes). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/345.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/345.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb465de4ffc4aa449e45a86018eb07ce32c0bd81 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/345.txt @@ -0,0 +1 @@ +in recent years, cyber-physical systems (cps) has evolved to become more complex in the attempt to provide them with increasingly sophisticated capabilities . this trend motivates industry and academia to investigate solutions to handle this increasing complexity. many of today's cps applications are developed as collaborative systems in which the systems interact with other systems, forming systems-of-systems (sos). sos is commonly characterised by five principal features; operational independence, managerial independence, geographic distribution, evolutionary development, and emergent behavior . furthermore, the systems interact more with their environment. the systems may also work autonomously most of the time by relying on artificial intelligence (ai). these mentioned aspects provide new challenges for collaborative cps, because their structure and behavior are exposed and could be influenced by external parties. in this paper we are mainly concerned with industrial cps that are mission or safety critical. for such systems, it is a challenge to establish a trustworthy collaborative cps (i.e. a cpsos) that can ensure key properties such as safety and security, while also relying on ai to provide new functionalities and enhance performance.the emergence of edge computing may have a major impact in expediting the development of collaborative cps. the advancement of network devices (e.g., switch, router, bridge), embedded devices equipped with ai chips, telecommunication technology (5g), micro-services, containers, etc. make it feasible to support collaboration by computational tasks, data and models at the edge instead of the cloud . edge computing can reduce network latency compared to cloud computing due to its locality. furthermore, edge computing can enable ai to bring its peak potential for collaborative cps applications.trustworthiness is frequently raised as an essential aspect for future systems, and collaborative cps is not the exception. trustworthiness has for example been put forward from the perspectives of ai, human-machine interaction and cps. just like the concept of dependability, trustworthiness is an umbrella term. however, trustworthiness tends to be used as an even more multifaceted property, referring to qualities such as security, safety, and predictability and a set of ethical properties, e.g., transparency and human oversight . nonetheless, the definition of trustworthiness and its attributes are still evolving. the broad scope of trustworthiness and the interdependencies (and trade-offs) between the involved qualities makes it difficult to determine proper trustworthiness attributes for realizing collaborative cps. we believe that architecture guidelines represent one important means to improve on this situation. the architectural guidance can be used to ensure the trustworthiness attributes are addressed from the early process of system development.architectural guidance has a relation to all relevant stakeholders (e.g., business, consumer, and technical) involved in the development and integration of collaborative cps. the need for architectural guidance becomes even more important with the increasing system complexity, shorter time-tomarkets, and system development involving multiple teams and organizations . reference architectures have the purpose to provide such architectural guidance during the development of novel system architectures . reference architectures accomplish this by containing essential architectural patterns, the use of standards and often implicitly domain knowledge that constrains system design . therefore, reference architectures can be used to guide the alignment over the system and subsystem interactions and integration as well as help to integrate all stakeholders who are involved in the system development (in terms of establishing shared views of the same system).in order to utilize the most of it, reference architectures should be able to provide the same architecture understanding to all stakeholders. for instance, the architect can communicate effectively with other architects or other stakeholders during the system development when they share the same architectural understanding. furthermore, the work can be more efficient and flexible when the architects or other stakeholders recognize the same boundaries between functions, processes, and subsystems, speak the same language and use the same standards. this cooperation can moreover be achieved through the use of a common language, standards, viewpoints, and architecture patterns. we are interested in the role and use of reference architectures for realizing trustworthy collaborative cps. we consider intelligent transportation systems (its) as our main domain since it is well representative for collaborative cps. our research involves two steps; (1) studying how people and organizations understand and make use of reference architectures, and how they treat trustworthiness attributes when working with reference architectures, and (2) investigating how to represent knowledge, especially the trustworthiness attributes in the reference architecture. after conducting these two steps, we will summarize our research findings and prioritize follow up research, to be evaluated in an its setting. as part of our first step, we currently investigate the role of reference architecture as boundary objects for realizing trustworthy collaborative cps. reference architectures have the purpose to provide such architectural guidance during the development of novel system architectures. reference architectures accomplish this by containing essential architectural patterns, the use of standards and often implicitly domain knowledge that constrains system design. therefore, reference architectures can be used to guide the alignment over the system and subsystem interactions and integration as well as help to integrate all stakeholders who are involved in the system development (in terms of establishing shared views of the same system). we are interested in the role and use of reference architectures for realizing trustworthy collaborative cps. our research involves two steps; (1) studying how people and organizations understand and make use of reference architectures, and how they treat trustworthiness attributes when working with reference architectures, and (2) investigating how to represent knowledge, especially the trustworthiness attributes in the reference architecture. as part of our first step, we currently investigate the role of reference architecture as boundary objects for realizing trustworthy collaborative cps. recently, the study inutilized reference architecture as boundary objects for the alignment of cross-domain stakeholders' for the development of collaborative sos. unlike existing studies, we are focusing on how people in practices deal with trustworthiness attributes to create reference architectures for trustworthy collaborative cps. therefore, the reference architecture can be used as a practical boundary objects to align stakeholder's ideas in realizing trustworthy collaborative cps. we also wanted to collect information about the challenges of using reference architectures as well as what attributes should be captured to realize trustworthy systems.• sq1: what is the definition of reference architectures? • sq2: who are the intended users of reference architectures?.• sq5: what aspect should be captured in the reference architecture for trustworthy collaborative cps? in the interview, we also asked our respondents about their personal information and experience related to systems engineering and reference architectures.2) the intended users of reference architectures: we got valuable inputs from our respondents about the intended users of reference architectures.3) design reference architectures: in this aspect, the person who is responsible for designing reference architectures is the architect.we also questioned our respondents on how to manage reference architectures to keep it "alive" such that it can be used for the future system development. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/346.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/346.txt new file mode 100644 index 0000000000000000000000000000000000000000..6e19be0393f6d069415470a46e2c974d6146206b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/346.txt @@ -0,0 +1 @@ +in this paper we consider the model of probabilistic population protocols. it was originally intended to model large systems of agents with limited resources . in this model, the agents are prompted to interact with one another towards a solution of a common task. the execution of a protocol in this model is a sequence of pairwise interactions between agents chosen uniformly at random . during an interaction, each of the two agents, called the initiator and the responder (the asymmetry assumed in ), updates its state in response to the observed state of the other agent following the predefined (global) transition function. the efficiency of population protocols is typically expressed in terms of the number of states used by agents and the number of interactions required by solutions (e.g., with high probability (w.h.p.) or in the expectation).there is a vast literature on population protocols, especially for such basic problems as majority and leader election .in the literature on population protocols , the concept of parallel time, which is the number of required interactions divided by the number n of agents, is widely spread. in other words, one divides the sequence of interactions in an execution of a population protocol into consecutive subsequences of n interactions called rounds. then one estimates the expected number of required rounds or the number of required rounds w.h.p.population protocols for any non-trivial problem require ω(n log n) interactions . hence, the expressions resulting from dividing those on the number of interactions by n are not only simpler but also more focused on the essentials. fast population protocols are commonly identified with those having poly-logarithmic parallel time. also, for example, when improving a polynomial upper bound on the number of interactions to o(n log n), one can refer to the improvement as an exponential one in terms of the parallel time, which sounds impressive.clearly, the average number of interactions that an agent takes part is a lower bound on the actual parallel time when the transition function of a population protocol is a black box. however, calling this trivial lower bound for parallel time may mislead readers not familiar with or not recalling the definition. they may start to believe that by the random choice of a pair of agents for each interaction in a round, there should be a lot of independent interactions in the round that could be implemented in parallel. consequently, they could believe that the whole protocol could be implemented in parallel in time proportional to the number of rounds. unfortunately, this intuition appears too optimistic.it is obvious that one can construct a sequence of n interactions that requires n parallel steps when the transition function of a population protocol is treated as a black box. more importantly, we show that the expected maximum length of a dependency chain of interactions in a single round of n interactions is θ( log n log log n ). the lower bound implies that when the transition function is treated as black box and the update of the states of interacting agents requires ω(1) time steps then the expected maximum number of parallel steps necessary to implement a round of n interactions is ω( log n log log n ). the upper bound opens for the possibility of a matching, fast parallel implementation of a single round of n interactions in the average case under additional assumptions. more importantly, we show that the expected maximum length of a dependency chain of interactions in a single round of n interactions is θ( log n log log n ). the lower bound implies that when the transition function is treated as black box and the update of the states of interacting agents requires ω(1) time steps then the expected maximum number of parallel steps necessary to implement a round of n interactions is ω( log n log log n ). the expected maximum length of a directed path in the dependency dag of a round of n interactions is ω( log n log log n ). consequently, when the transition function is treated as black box and the update of the states of interacting agents requires ω(1) time steps then the expected number of parallel time steps required to implement the round is ω( log n log log n ). we shall show that the expected maximum number of interactions in s that a single agent participates is ω( log n log log n ). therefore, the expected maximum load of a bin where no two balls correspond to the same interaction is still ω( log n log log n ). hence, the expected maximum number of interactions that the same agent participates in a round of n interactions is ω( log n log log n ).the bound in theorem 1 follows from the fact that one expects that at least one agent will be involved in ω( log n log log n ) interactions, which immediately implies that the expected maximum length of a directed path in the dependency dag of a round of n interactions is ω( log n log log n ). however, if one considers concurrently more agents, then perhaps the expected maximum length of a directed path in the dependency dag can be significantly larger, that is ω( log n log log n )? in this section we prove that this is not the case, implying that the lower bound in theorem 1 is asymptotically tight.in order to derive our upper bound on the expected maximum length of a directed path in the dependency dag of a round consisting of n interactions, we shall identify interactions with labeled edges in k n . then, for k = ⌈ (3+c) log n log log n ⌉, with probability at least 1 -1 n c , the obtained multigraph has no monotone interference path of length k.since e k is the event that g n,p has a monotone interference path of length k, the bound above implies that with probability at least 1-n -c the random labeled graph g n,p has no monotone interference path of length k = ⌈ (c+3)•log n log log n ⌉ = θ log n log log n . the expected maximum length of a directed path in the dependency dag of a round of n interactions is o log n log log n . it follows that the number of levels is o( log n log log n ).our almost logarithmic lower bound on the expected maximum length of a dependency chain in the dependency dag of a round in theorem 1 is implied by the lower bound on the expected maximum number of interactions sharing a single agent in a round of n interactions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/347.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/347.txt new file mode 100644 index 0000000000000000000000000000000000000000..08e75072297a1c2fccff262315bffa58be6e9029 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/347.txt @@ -0,0 +1 @@ +with a market capitalization of more than 1.2 trillion usd and a price of around 68,000 usd per coin in november, 2021 , bitcoin has always astounded people with its steadily increasing value and long-term activity since its inception in 2009 . the clout of bitcoin ushered researchers and investors into blockchain, which is a new technology underpinning cryptocurrencies . based on this technology, an assortment of tokens were created (e.g. ether) and their market value peaked at 3 trillion usd approximately on august, 2021 .during the period when the market value of cryptocurrencies skyrocketed, cryptokitties, the first blockchain-based game, has garnered widely recognized and financial interest in early december 2017 . by hybridizing cats with different genes, each new-born cat is unique and, if there is a gene mutation, extremely rare. players in cryptokitties can trade these cartoon cats with varying monetary values which is based on each of them being unique . it can be said that cryptokitties is the prototype of nft in the true sense because it has the following characteristics: uniqueness, indivisibility and non-interchangeability. unlike traditional tokens like bitcoin and ether, which are standard coins that all the tokens are equivalent and indistinguishable , each nft is unique and cannot be exchanged. additionally, due to indivisible nature of nfts, buying 0.1 token, which occurs frequently in bitcoin trade, is not permitted. in contrast to the conventional token such as bitcoin which is just a name, nfts exhibit more humanistic values because they contain more information and serve as a culture symbol.ethereum, a platform that issues ether with the second largest market value in cryptocurrency and is currently the largest nft trading platform, provides two main protocols for creating nfts: erc721 and erc1155. as with erc1155, erc721 is a token standard that defines an interface to allow nft to be managed, owned, and traded by a smart contract . however, there are a large quantity of differences in the ways of creating and transferring nfts between them. erc721 needs to create a new smart contract (e.g., cryptokitties contract) to create a new kind of nft, whereas erc1155 can deploy infinite kinds of nft in one smart contract. moreover, erc721 permits only one-nft transfer in a transaction whereas a batch of nfts being transferred in a transaction is sanctioned in erc1155 which can save many gas fees. these features of the two protocols make the corresponding trading, the characteristics of nfts, and trading participants diverse in many ways.undoubtedly, uniqueness, non-interchangeability and humanistic values which are the merits of nft and systematic protocols make the creation and trade of nft a hit. as the market capitalization of traditional tokens has increased steadily in the initial stage of its development, the market capitalization of nfts has jumped from only 70 million usd to near 25 billion usd in 2021 which is called "the first year" of nft . meanwhile, the market capitalization of cryptocurrencies reached a plateau at 2.5 to 2.9 trillion usd in november 2021. just when it seemed that cryptocurrencies were very prosperous, the subsequent consecutive plummet made their market capitalization one-third than its peak. the same to the market capitalization of nft, after it peaked at around 36 trillion usd in january 28th 2022, a fluctuation appearing and brought it to, fortunately, 23 trillion usd in september 5th 2022. no one knows whether the fate of nft is similar to cryptocurrencies, nevertheless, beeple, who created the most valuable (more than 69 million usd) nft called "everydays : the first 5000 days" said nft art is absolutely a bubble and exchanged all his ether to usd . as the clout of nft increasing, the dark side of nft was uncovered which included unauthorized nfts, wash trading and scams. for example, a cryptopunk nft was sold for 532 million usd in december 2021 and it was proved to be traded between many wallets of a single user . some rumors mixed with real events were reported by the mindless media, making the nft even more elusive.consequently, some questions come up and should be solved urgently: is nft truly (or to what extent) a bubble hidden behind the prosperous facade, and how to discern the bubble in the trading ecosystem of nft? unfortunately, little is known about the ecosystem of nft because most of the studies focus on traditional tokens or just analyze the characteristics of users and contracts, not the nfts . furthermore, measuring the activeness of nfts is a challenging task due to the fact that there is only one nft with the same token id, which is significantly less than the traditional tokens. assorted humanistic values contained in nfts also make the trend of their prices much more intangible than traditional tokens, which are just a name. in an attempt to fill the gap in research and disclose the characteristics of nft ecosystem, this paper proposes an approach for analyzing the erc721 and erc1155 nft ecosystems. the frame of our work can be seen in figure 1, we divide our work into four phases. the data collection phase, which is the first phase, collects all nft transaction records and event logs on ethereum. by analyzing this data, we classify the actions in nft trade into three categories chronologically: creation, transfer, and hold. then, we construct three graphs to model the nft trading, i.e., nft creation graph (ncg), nft transfer graph (ntg) and nft hold graph (nhg). in the third phase, we conduct a systematic analysis of the three graphs and extract new findings from them. moreover, by analysing the nft trade data, we have an overview of the behavior of nfts and propose new indicators to measure them. based on the statistics and indicators, we propose a new approach to detect wash trading issues in the nft trade network finally.in summary, we make the following contributions.(1) to the best of our knowledge, we are the first to conduct a comprehensive investigation on the whole ethereum erc721 and erc115 nft ecosystem. we outline the characteristics of nft traders via graph analysis and propose new nft indicators to quantify their activeness and value. we also summarize the trends of some quantitative criteria of the whole nft ecosystem. (2) using graph analysis and other methodologies, we acquire novel observations and findings about both nft traders and nfts in the ethereum nft ecosystem. they can help us gain a more comprehensive understanding of this ecosystem.in particular, we find that some anomalies like automatic programs, scam projects, and wash trades also exist. (3) by combining the new indicators with graph analysis, we propose an algorithm to detect the bubble nfts. the reported cases show the feasibility and effectiveness of our algorithm. we will release all the relevant data and codes after publication. the rest of the paper is organized as follows. after reviewing related work in section 2, we detail the data collection method in section 3. section 4 answers 3 important questions about the users in the nft trade network. section 5 analyzes the characteristics of nfts and proposes some new indicators to measure their activeness. after presenting the new approach for detecting bubble nfts mainly based on the two new indicators in section 6, we conclude the paper and discuss future work in section 7. the same to the market capitalization of nft, after it peaked at around 36 trillion usd in january 28th 2022, a fluctuation appearing and brought it to, fortunately, 23 trillion usd in september 5th 2022.consequently, some questions come up and should be solved urgently: is nft truly (or to what extent) a bubble hidden behind the prosperous facade, and how to discern the bubble in the trading ecosystem of nft? unfortunately, little is known about the ecosystem of nft because most of the studiesfocus on traditional tokens or just analyze the characteristics of users and contracts, not the nfts. it is worthy mentioning that paperanalyzed the nft market by examining the number of nft sales, nft trade volume and the number of unique blockchain wallets that traded nfts. unexpectedly, despite the fact that erc1155 can produce limitless kinds of nfts in a single smart contract, the number of creators who use it to make nfts is substantially lower than that of erc721 users, suggesting the characteristic "uniqueness" is better embodied in erc721 nfts.by analyzing the number of nodes and edges of ntg, we know that 5,078,013 accounts transfer nfts 56,641,999 times, where erc 721 nfts are transferred 48,429,314 times by 4,442,408 accounts and erc1155 nfts are transferred 8,212,685 times (17% of erc721) by 1,433,861 (32% of erc721) accounts. by examining the edges and nodes belonging to different protocols, we find that 3,616,144 accounts hold erc721 nfts and 842,380 accounts hold erc1155 nfts and 575,538 accounts hold both kinds of nfts. furthermore, the massive difference in average ownership between erc721 nfts and erc1155 nfts may be caused by the fact that the number of accounts that collect erc1155 nfts is much lower than that of erc721 nfts. by classifying the leading nft, we find that among the 15,983,602 nfts in the 964 nft series, the gaming has 8,893,870 nfts in 496 series, and the ens has 1,602,738 nfts in only 5 series, with more details shown in the appendix. the nft turnover ratio is calculated by dividing the number of nft transactions in the series by the total number of nfts in the series. the ranking of market capitalization proportion of leading nfts grouped by different categories is collectibles (67%), gaming (11%), decentraland (9%), art (8%), music, ens and sports, which roughly corresponds to the market capitalization ranking of all nfts indicating that our choice of nfts is without loss of generality. similarly, to identify the single bubble nft in the series, we also propose the fratio, which divides the price of this nft by the floor price in the same nft series. if the volume and fratio of this nft are very high and its p value is modest, suggesting that its activeness and value are extravagant when compared to other nfts in the same series, we consider it a bubble nft. if the volume and fratio of the nft are high but the p value is large, indicating that the transactions of the nft are not centralized, we examine the number of its transferors to determine whether it is a bubble nft. moreover, we proposed new indicators to measure the activeness of nfts and a new approach to find bubble nfts, especially wash trade nfts. in the future, we plan to enhance our bubble nft detection algorithm by adding more comprehensive characteristics and making it capable of detecting additional types of bubble nfts like scam nfts and nfts created by automatic programs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/348.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/348.txt new file mode 100644 index 0000000000000000000000000000000000000000..67f7b0d7b7b1f823a8b9eac25f532fe2421cb7dc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/348.txt @@ -0,0 +1 @@ +the convergence of disruptive digital technologies, such as artificial intelligence (ai), internet of things (iot), cloud computing, high-speed wireless communication, and blockchain, among others, has driven the world into a new wave of a technology revolution, known as the "fourth industrial revolution" (4ir) (schwab, 2017), also known as the "digital revolution" for the central role of digitalization in the 4ir. in 2011, germany first launched their industrie 4.0 initiative aiming at leveraging cyber-physical-systems (cps) to build smart factories and strengthen their leadership in manufacturing. the us launched a national advanced manufacturing program in 2014 and declared manufacturing to be a national priority. recent five-year plans of china since the 2010s actively promote critical technologies towards 4ir. industry 4.0 (i4.0) is a term frequently used as an interchangeable term for 4ir; however, they have some subtle differences in their focuses. in this article, we use the 4ir to address the revolutionary change in paradigms and tools, and we use industry 4.0 to address the targeted industrial systems (with manufacturing in the core as the fundamental component) in the 4ir. briefly, industry 4.0 is the target or goal, and the 4ir is the movement or process toward that goal.a defining feature of the fourth industrial revolution is digitalization. associated with this feature, a pervasive and profound digital transformation is ongoing in almost every aspect of human society globally. digital engineering is the digital transformation in the field of engineering, and digital engineering transformation is at the center of 4ir. in 2018, the us dod launched their digital engineering strategy (us dod, 2018;zimmerman, gilbert, & salvatore, 2019). this strategy requires using and sharing formal models and digital data across engineering lifecycle and organizational boundaries through a trusted "authoritative source of truth". this move will impact the us defense industry and propagate to other industry sectors and change how engineering is conducted. digital engineering transformation is about engineering paradigm shifts in the 4ir, and ai is a critical enabling technology in the digital engineering transformation.ai has achieved remarkable progress in recent years. at the same time, the disruptive impacts and fast growth of applications of ai systems (particularly ml) also raise broad concerns about an ai system's reliability, safety, security, privacy-preserving, fairness (or bias-free), explainability, traceability, transparency, and accountability, among other qualities. research on those concerns has been ongoing under the umbrella of "trustworthy ai" (aaai presidential panel on long-term ai future, 2009;eu ai hleg, 2019;h. liu et al., 2021;nsf, 2019;stone et al., 2016;wing, 2020). trustworthy ai has two primary aspects: societal effects (ethics) and technical performance (dependability). the ethics of ai concerns the long-term impacts of ai on humans and human society. the central principle is to use ai for good or for purposes beneficial to humans. dependability concerns the competency of ai systems on technical matters. given the critical role of ai in the 4ir, the recent concerns about trustworthy ai also propagate into industry 4.0 systems. the ai community has been striving for many decades to push the boundary of machine intelligence. the engineering community will bring ai technologies and ethical principles together to deliver trustworthy cyber-physical-social smart systems to human society. this article will discuss the following research questions:(1) what are the fundamental changes brought by the fourth industrial revolution?(2) more specifically, what are the fundamental changes in engineering? what is digital engineering?(3) what are the main uncertainties associated with the 4ir? what is trustworthy ai? (4) what are emerging engineering paradigm shifts in the 4ir? the contents of this article are organized as follows. section 2 discusses the answer to the first question by investigating the patterns of the four industrial revolutions, focusing on the ongoing fourth revolution. section 3 continues with the second question to discuss digital engineering transformation in the 4ir. section 4 discusses the uncertainties in the 4ir and trustworthy ai. section 5 discusses some open issues and focuses on the emerging engineering paradigm shifts. finally, section 6 concludes the article.the convergence of disruptive digital technologies, such as artificial intelligence (ai), internet of things (iot), cloud computing, high-speed wireless communication, and blockchain, among others, has driven the world into a new wave of a technology revolution, known as the "fourth industrial revolution" (4ir)(schwab, 2017), also known as the "digital revolution" for the central role of digitalization in the 4ir. digital engineering transformation is about engineering paradigm shifts in the 4ir, and ai is a critical enabling technology in the digital engineering transformation., 2013). as a result, the level of automation and autonomous functioning will stay at a slightly higher level than in industry 3. (2) the uncertainties caused by ai systems: the fast growth and broad applications of ai systems or embedded ai components in various systems have triggered much concern in two aspects: technical competence (such as reliability and security) and ethical concern (such as transparency, bias towards groups of people, human-machine relations, and others). although the advances of ai today are still domain-specific (or narrow ai) rather than general ai, the remarkable achievements, the disruptive impacts, and the fast growth of applications of ai systems triggered concerns and research on trustworthy ai systems.(1) ethical ai, focusing on social effects, mainly ethical considerations, essentially, the profound effects ai systems bring to humans, human groups, and human society, and advocating using ai to benefit humans as a central principle. the 23 asilomar principles (future of life institute, 2017) and the eu helg ethical ai guide (eu ai hleg, 2019) reflect a broad range of concerns about ethical issues of ai systems. naturally, this aspect of concern brings the focused attention partially back to more classical trustworthiness properties of engineering systems but with a focus on ai systems or ai components and their impact on the larger systems.in digital engineering transformation, it is an excellent opportunity for the engineering design community at large to bring the new capabilities of ai and the trustworthy ai principles together in various engineering systems design for human society to leverage the power of ai and at the same time to avoid or minimize the potential negative impacts.in previous sections, we have discussed the characteristics of 4ir and their fundamental impacts, digital engineering transformation -the manifestation of 4ir in engineering, and trustworthy ai -the leading technology in the digital transformation for i4. the need for engineering paradigm shifts in the 4ir is driven by the engineering environment, the disruptive digital technologies, the associated higher socialeconomic needs, and the new challenging problems, such as the trust issues of ai systems (as discussed in section 4).for the digitalization of engineering, basically, we need (i) to digitalize engineering artifacts, engineering processes, and enterprises; (ii) enable the sharing and interoperability of digitalized artifacts across the engineering lifecycle; (iii) to develop digital model-based engineering. just name a few, in digitalization, should the standard forms for digital representation and augmentation be supported by a centralized standardization (such as creating international standards) or a distributed evolutionary standardization (such as many ontologies competing to be standards at a fine-grained level and evolving gradually and naturally)? how do we achieve trustworthy ai systems in 4ir? what are digital trust mechanisms for digital engineering? many trust mechanisms used in cloud service(huang & nicol, 2013)are applicable to digitalized products, systems, and services, while still, what new mechanisms should be introduced? should be the sharing of digital engineering models and data in a centralized way or distributed way? there are many issues and challenges ahead as we conduct engineering in a very different new digital environment(coatanéa, nagarajan, panicker, & mokhtarian, 2022;horváth, 2022;huang et al. the engineering design community at large is facing an excellent opportunity to bring the new capabilities of ubiquitous machine intelligence and trustworthy ai principles, as well as digital trust, together in various engineering systems design to ensure the trustworthiness of systems in industry 4. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/349.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/349.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3d930e08317f9ce5e503081e57313c7e526763a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/349.txt @@ -0,0 +1 @@ +before the advent of social medias, brand crises were largely caused by journalists' contributions. nowadays, a firestorm is a cluster of consumers' digital word of mouth that highlights some communication error, or some terrible mistake made by a company . the cambridge dictionary1 defines the firestorm as "a sudden, and sometimes violent reaction" and the shitstorm as "a wildly chaotic and unmanageable situation, controversy, or sequence of events". in this paper, i will use both these terms interchangeably.during the last years, many firestorms took place on the internet , , , mainly due to the increase of the number of users on social networks. in some cases, firestorms have been formally studied to better understand this phenomenon , , . in 2007, several researchers debated over firestorms, and one of the main outcomes is that "a natural science model of the research process is suitable for studying the social world but a central issue remaining of whether the social world can, and should be, studied according to the same principles, procedures, and philosophy as the natural sciences" . this is relevant because today i are actually able to study and evaluate social dynamics by using the massive amount of data coming from the digital world, with particular emphasis on social networks .firestorms are not made of a single event with a standard behaviour, instead they are caused by non-linear dynamics leading to complex behaviours. due to this, companies must have appropriate procedures to respond to various crisis situations. lehtonen's theory shows that a firestorm develops in five stages: (1) latent stage, where weak signals of the upcoming crisis are received; (2) triggering event, where the subject becomes the target of news and social media attention;(3) the subject is in the top-news and the media attention spikes; (4) the media attention calms down to the level of general philosophical and ethical discussion; and (5) there are only minor media hits and attention is guided to other issues .as firestorms begin when there is a service failure, a social failure or when a company fails to communicate properly , this kind of errors can be reduced by following appropriate procedures. however, most of the existing quality and security procedures, such as the ones suggested by iso 9001:2015 and iso/iec 27002:2022 are not adequate for a multi-domain cyber and social attack. because, regard to the 27002:2022, social attacks are outside the scope, while, 9001:2015, even if it focuses on better business process quality, thus, less firestorm risk from the public, it does not mitigate the firestorm from an attacker.hence, in this paper i theorise that it is possible for an attacker to intentionally cause a firestorm attack to undermine the reputation of a company, with the side-effect of advantaging the competitors. i argue that self-organised firestorm attacks require a high number of bots that are already active on social medias: in this case, bots start the firestorm on the target company, spreading fake news (or magnifying a certain event, e.g., a mistake made by the company in the past) that will cause a high volume of real people to react negatively and continue the social attack, unknowingly on behalf of the adversary.additionally, i argue that open source intelligence (os-int) could allow an adversary to identify weak spots in the organization, namely people who most likely cannot react properly or defend themselves from the firestorm, hence not being able to timely mitigate its impact. many workers have a linkedin, facebook, or twitter account: moving the firestorm on the social media accounts of people who work for the target company can lead to an extremely stressful situation for arxiv:2301.01518v1 4 jan 2023 workers. this could be even worse for people who do not often deal with public relations, and could cause confusion, panic and distress. in fact, when a firestorm arises, even people who work on communication processes and managers can panic, and the fear of losing customers and partners can be very detrimental for any company.when people working in the target firm are in this altered status, i argue it is possible to elaborate a social engineering strategy to capture protected information: in this case, not only firestorms serve the purpose to undermine the corporate image, but they are also used as a diversion for a social engineering attack. in fact, while most important organisations adhere to best-practices listed in security standards like iso/iec 27002:2022 , during a social attack like firestorms, some best-practices and procedures may be distorted or bypassed, both intentionally or by mistake, due to the pressure applied to people who are in charge of complying to such procedures .contributions. the paper makes these contributions: 1) i explain how to make an automated and organized firestorm attack, with only a few manual operations such as the choice of a topic and of a hashtag; 2) i introduce a taxonomy of possible actions that the attacker could perform while doing the firestorm; 3) i illustrate how the author of a firestorm can evade detection for their attack by targeting single workers instead of the company profiles, while increasing the damage done to the firm. 4) i show possible long and short term procedures that a company can implement to mitigate the effect of firestorms attacks. i argue that self-organised firestorm attacks require a high number of bots that are already active on social medias: in this case, bots start the firestorm on the target company, spreading fake news (or magnifying a certain event, e. many workers have a linkedin, facebook, or twitter account: moving the firestorm on the social media accounts of people who work for the target company can lead to an extremely stressful situation for arxiv:2301.when people working in the target firm are in this altered status, i argue it is possible to elaborate a social engineering strategy to capture protected information: in this case, not only firestorms serve the purpose to undermine the corporate image, but they are also used as a diversion for a social engineering attack. the paper makes these contributions: 1) i explain how to make an automated and organized firestorm attack, with only a few manual operations such as the choice of a topic and of a hashtag; 2) i introduce a taxonomy of possible actions that the attacker could perform while doing the firestorm; 3) i illustrate how the author of a firestorm can evade detection for their attack by targeting single workers instead of the company profiles, while increasing the damage done to the firm. identifying the people who are most proud to work for the attacked company can also be helpful in exerting more pressure on the company (since they have more to do with the value of the company). shifting the attack on employees has another side-effect, which is beneficial to the attacker: the organisations that are responsible for the public cyber security in every country cannot see the firestorm attack on the company page, because the firestorm is focused on workers only such organisations will hardly be able to detect all comments and posts focused on workers, allowing the attacker to create a smoky form of the attack, which can bypasses conventional security measures, procedures and strategies.1) the value of the company on the financial market could rapidly decrease;2) people who worked in the company during the firestorm might be subject to discrimination in future, especially if the firestorm was caused by a (supposedly) unacceptable mistake that could have been avoided,.when many workers in the company are panicking, the organisation's cco (chief communication officer) will elaborate and react to firestorm on company pages, however, this cannot stop the social attack on the individual profiles of the employees.b) defence as a service: the adversary contacts the attacked firm, but instead of showing they are in charge of running the attack and asking money to stop it, they try to sell a fire(storm)fighter service to the victim, supposedly consisting on bots defending the reputation of the firm: this is basically a reversed firestorm, in which those same bots that built the latent state now defend the company: to avoid drawing excessive attention, the attacker might slowly change the proportion of attacking bots versus defending ones, until they are all defending the company. i can see that the company regains more than half the value lost during the next two months, however, the ransomware attack causes another drop in the financial value of the company due to customers losing trust in the company again, this time from a security perspective. financial value of cd projekt red and critical events tacting allied/partner companies for help with the various attacks on social media; 3) create in advance supporting bots that will defend the company automatically; 4) create an international database of accounts that have made firestorm.if a company has done something enormously wrong in the past, it is possible that every time the same company does something wrong, there is a chance that another firestorm can restart, either for the recent event or also for the past one. a) social failure: if the firestorm is linked to a partner company, or only a certain sector of the company is under attack, immediately distance yourself from them. even if it is complicated given the amount of partners, quality standards and corporate continuity, this action, if done in time, creates a good defensive shield at the communication level, as people can understand that the company itself has also understood the problem, limiting the damage; timing is essential during firestorms, first of all to understand whether the type of firestorm is real or artificial (you can tell by the date of creation of the accounts that do firestorm -if the initial accounts were born recently, they are probably bots, hence artificial); secondly for improving the cyber defence and be prepared for a possible cyber attack; tertiary for the public reaction, because it means that the affected company has noticed the failure faster or as fast as other people (who are doing the firestorm on social networks) and will promptly react to the problem, reassuring customers that it will be solved. i introduce an novel model allowing researchers and companies to (1) understand when companies and organisations have fragile defence against a social-cyber attack, (2) illustrate how company and organisation can defence them self from firestorm, (3) proving that social-cyber attack must be defined as a possible high risk event as multi domain sector, and (4) showing a now model of cyber attack, with a multidisciplinary sociological approach to increase the potentiality of common cyber attack. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/35.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/35.txt new file mode 100644 index 0000000000000000000000000000000000000000..4187958ac8c6ba42ea19eb86f906c86859019825 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/35.txt @@ -0,0 +1 @@ +summarization is an important natural language generation (nlg) task. a problem that goes hand in hand with it is summary evaluation, which quantifies the quality of a summarizer or a system summary it generates. the traditional approach to automated † summary quality assessment is reference-based, such as rouge (lin, 2004), bertscore (zhang* et al., 2020) and mover-score (zhao et al., 2019), which assesses a system summary against one or a plurality of humanwritten reference summaries. † the ground truth is still human evaluation.requiring highly educated human labor, reference summaries are very costly to obtain. therefore, many reference-free metrics have emerged recently (scialom et al., 2019;vasilyev et al., 2020;bao et al., 2022), which directly compute a score between a system summary and its source document. however, the performance of referencefree metrics has historically lagged behind that of reference-based metrics because a human-written reference summary serves as a fluent and comprehensive representation of the key facts in the input document and thus gives reference-based metrics an advantage.recently, large language models (llms) have shown promise in building reference-free summary quality metrics. metrics based on llms like gpt-3.5/4 (liu et al., 2023;wang et al., 2023;gao et al., 2023) have outperformed both reference-free and reference-based baselines. however, llms are computationally expensive, and the closed nature of gpt-3+ restricts their usage with legal and reproducibility ‡ limitations. a more viable solution that uses much more cost-effective language models is highly expected.to build an accurate but efficient metric, we revisit the reference-based metrics and hypothesize that they can be repurposed into referencefree metrics by directly comparing a summary with its source document. after being repurposed, bertscore outperforms not only its original reference-based version, but also most existing reference-free metrics across the summeval, newsroom, and tac2010 datasets on both semantic and linguistic aspects. notably, the repurposed bertscore achieves superior or comparable per- ‡ https://hackingsemantics.xyz/2023/ closed-baselines/ formance to gpt-3.5-based summarization evaluators. it is worth noting that these results are achieved using foundation models with significantly fewer parameters (<0.5b) compared to gpt-3.5's extensive 175 billion parameters.we hope this paper can inspire more work into zero-shot summarization or nlg evaluation using cost-effective (e.g., <1b parameters) lms. our source code is at https://github.com/ sigmawe/docasref. in summary, the key findings of this paper include:1. the proposed reference-free repurposing does improve performances for transformer-based metrics including bertscore and bleurt.2. the repurposed bertscore can significantly outperform all non-gpt-3.5 baselines using underlying lms of the similar capacity.3. with lms hundreds of times smaller, the repurposed bertscore can further match the performance of those based on gpt-3.5 in most of the cases. the traditional approach to automated † summary quality assessment is reference-based, such as rouge(lin, 2004), bertscore(zhang* et al., 2019;vasilyev et al.to build an accurate but efficient metric, we revisit the reference-based metrics and hypothesize that they can be repurposed into referencefree metrics by directly comparing a summary with its source document. after being repurposed, bertscore outperforms not only its original reference-based version, but also most existing reference-free metrics across the summeval, newsroom, and tac2010 datasets on both semantic and linguistic aspects. metrics for summary evaluation fall into two categories: the reference-based (short as ref-based) ones which are functions comparing a candidate summary and a human-written reference summary: f (system summary, reference), and reference-free (short as ref-free) ones which are functions that evaluate a candidate summary based solely on the input document:. ref-based metrics, such as rouge(lin, 2004), bertscore(zhang* et al. although ref-based metrics were originally designed to compare a system summary against a reference summary, we hypothesize that they can still be effective in directly comparing the system summary with the document. tac2010 reports three scores: pyramid(nenkova et al.the answer is yes! despite that ref-based metrics historically perform better than ref-free metrics, table1shows that the three modern metrics, moverscore, bertscore, and bleurt, gain their performances after being repurposed, on nearly all aspects of all datasets. in this case, the original document may be better than the reference summary to compare with for judging the summary quality. despite the challenges, repurposed bertscore outperforms all baselines except suenes, which is finetued using data explicitly augmented for the summary evaluation task, on all aspects. comparing a summary with a document is theoretically more challenging than comparing it with a reference, because information is more sparse in a document than in a reference. extraneous information in a summary causes the summary's context to diverge from that of the original document, resulting in a reduction of semantic similarity, even when comparing the same token in the summary to its 'counterpart in the document. because the document and the summary are independently embedded in bertscore, only when important information in the document and the summary align, the bertscore can be high.in this paper, we explore repurposing summary evaluation metrics that were originally designed or trained for reference-based use as reference-free metrics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/350.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/350.txt new file mode 100644 index 0000000000000000000000000000000000000000..cd51047b963f87e0d098dbdfe33580205601f4a8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/350.txt @@ -0,0 +1 @@ +the increasing use of artificial intelligence (ai) technology in various domains has the potential to cause individual and social harm. examples of bias and discrimination in ai applications include court decisions , job hiring , online ads , and many other areas prone to bias . these algorithmic decisions have economic and personal implications for individuals. therefore, fairness, accountability, transparency and ethics (fate) in ai must be properly regulated for responsible use cases , particularly in high-stakes domains . studies have shown that machine learning models can discriminate based on race and gender . fate in ai is intended to address the social issues caused by digital systems, but the current discourse is largely shaped by more economically developed countries (medc), raising concerns about neglecting local knowledge, cultural pluralism, and global fairness . as ai systems become more integrated into various products , they are a major driver of the fourth industrial revolution (4ir) and transformation . therefore, it is essential to understand the fate-related needs of different communities, as ai affects a wide range of people. ensuring effective transparency cannot be a one-size-fits-all approach , as this could disproportionately affect different communities . to this end, more contextualised and interdisciplinary research is needed to inform algorithmic fairness and transparency . additionally, diversity and sociodemographics arxiv:2301.01590v2 13 nov 2023 must be taken into account when designing and governing algorithms that affect the public . the most effective approach is to involve the affected public and ai developers to incorporate community-specific fate needs. ai practitioners must adhere to social values to ensure responsible ai for the public good , and focus on more representative values of the community affected by ai . through cooperative, inclusive, and community-led design of ai applications, algorithmic disparities can be effectively addressed, and relevant stakeholders within the community can ensure better policing of ai operations.this study aims to explore the areas of ai that are under-served in terms of responsibility and accountability. it seeks to evaluate the effectiveness of transparency methods in relation to fate-related issues, as well as to explore ways in which local communities can be involved in the design and development of ai systems that affect them. additionally, it will provide useful insights and recommendations to stimulate action towards representative and responsible ai. to this end, a community of 73 online users in nigeria from the global south was chosen as a case study to examine the public's views on fate in ai. nigeria was selected due to its large population and the increasing use of ai-powered products and services. in addition, the country is ranked 8th in the world for internet users , indicating a growing ai workforce in africa.the paper is divided into the following sections. section 2 provides the background and related studies. section 3 outlines the method and details of user studies. sections 4 and 5 present the results and a discussion, respectively. finally, section 6 concludes the study. ai practitioners must adhere to social values to ensure responsible ai for the public good, and focus on more representative values of the community affected by ai. through cooperative, inclusive, and community-led design of ai applications, algorithmic disparities can be effectively addressed, and relevant stakeholders within the community can ensure better policing of ai operations. ai is also used in journalism for newsgathering, production, and distribution processes, and in public administrations to explore the role and implications of ai. despite the potential of ai to address various social issues, the development and deployment of ai systems requires a significant amount of energy. in the agi era, will ai be able to determine what is right and wrong if its intelligence surpasses that of the human user? what would be the impact or risk of jobs being replaced by ai due to automation in areas with already high unemployment and other social problems? addressing current ai challenges in underserved communities will help address the most pressing ai issues. consequently, fairness, accountability, transparency, and ethics in ai are aimed at developing and ensuring responsible ai that incorporates moral behaviour and avoids encoding bias in ai decisions. additionally, ethical frameworks such as unesco's recommendation on the ethics of ai, the world economic forum's blueprint for inclusive ai, and the organisation for economic co-operation and development (oecd)have been proposed to address the human rights implications of ai systems. explainable ai (xai) provides explanations to humans to help them better understand ai systems, and can be used to identify and address fairness issues. this study is focused on how the communities can lend their voice and influence fate in ai discourse, in line with existing initiatives that support ai developers to factor in community-specific needs.noting that ethical ai deals with incorporating moral behaviour to avoid encoding bias in ai's decisions, the public can dictate norms, values and other ethical requirements to be reflected in ai.recommendation relevance construct relates to the user's perception of the relevance of the recommendation to the user awareness of ai's role construct includes questions about general knowledge about ai and its role in recommendation services need for transparency construct deals with the openness or relatable the explanation is to the user privacy concern construct includes privacy and willingness to share data for better recommendation services.this part of the study aims to inform how best to improve fate in ai and mitigate bias that could be encoded in ai. therefore ensuring accessible and inclusive ai technology will require (1) sound and inclusive policies from governments to foster useful ai development and (2) technology companies to implement ai systems that will operate within the remit of regulation, sociodemographics, and other context-specific considerations. for instance, in nigeria geolocation information is required to balance or inform the decision to take noting that ethical ai deals with incorporating moral behaviour to avoid encoding bias to ai's decisions, the public can dictate norms, values, and other ethical requirements to be reflected in ai.-the study examines the prevailing issues in ai applications and how fate in ai might better serve in places not traditionally served by ai systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/351.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/351.txt new file mode 100644 index 0000000000000000000000000000000000000000..59b0738a979e9ca8eba624e1c76cc53a32c5eec8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/351.txt @@ -0,0 +1 @@ + among which, mooc course quality evaluation is one of the vital tasks in mooc platform management for helping improve the course materials, promote students' learning efficiency(jiang et al.current studies in mooc quality evaluation lie in two aspects: (1) manual evaluation(wang et al.second, although multi-view node embedding can be obtained by performing representation learning on mooc hin, how guaranteeing the validity of the learned course representations remains a challenge. specifically, the validity of course representations lies in three aspects: (1) the course representations should preserve the same semantics as the raw course portfolio; (2) the representations in each view should be consistent with the unified representations of the course; (3) the course representations should be aligned with the overall representations of the mooc platform. therefore, to ensure validity, we aim to maximize the three correlations between the pair of course representations and the raw course portfolio, the pair of unified course representations and each view, and the pair of course representations and platform representations. formally, we aim to find a mapping function f : g → h that takes the mooc hin g as input, and outputs information-aware representations h = {h 1 , h 2 , .where âpos and âneg are derived from the positive course nodes pairs and the negative course node pairs respectively, based on equation3.where att(•) indicates the self-attention function, and h indicates the unified course representation, which has integrated the self-attention weights of different meta-paths. we use mutual information (mi) to quantify the agreement between the representation h learned by the course node in the unified view and the representation x of the raw features of the mooc. although we deconstruct heterogeneous graphs into different views, the course representations in each view are expected to be consistent with the unified course representations in semantics, which is defined as multi-view consistency. first, we get the multi-view representation h and unified course representation h. then, we use neural network estimation mi (h; h) to maximize the mutual information between the unified course representation h and multi-view representation h. we can maximize the mutual information between platform representation and unified course representation using the binary crossentropy loss of the discriminator as follows:.in this paper, we study the problem of mooc course quality evaluation with mooc heterogeneous information networks and propose an information-aware graph representation learning framework for multi-view mooc quality evaluation. third, we identify three types of validity of course representations, with (i) the agreement on expressiveness between the raw course portfolio and the learned course representations; (ii) the consistency between the representations in each view and the unified representations; and (iii) the alignment between the course and mooc platform representations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/352.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/352.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee8ce388cee8944ea274b5623aa62596b711487c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/352.txt @@ -0,0 +1 @@ +the aquaculture concept is a farming approach that comprises a similar method as agriculture but involves farming aquatic organisms such as fish rather than plants . farming fish not only helps reduce the seafood sully gap but also provides a way to acquire an environmentally friendly protein option. moreover, compared to other protein resources, it is also an efficient option for consumers. aquaculture can comprise either extensive or intensive production approaches . extensive aquaculture can have very little monitoring over the environment of the cultured organism, whereas intensive aquaculture is based on a highly controlled environment, which may include monitoring several requirements such as temperature, dissolved oxygen, and diet conserved within particular desired levels.a fish farm, which is a water-based agriculture, is a subset of aquaculture. fish farming is increasing rapidly in order to sustain the growth of fish as a protein source . about 62.5% of the world's farmed fish are produced by utilising rivers, lakes, and fish farms, whereas the core functionalities of a fish farm can include breeding and hatching fish. a fish farm can use fresh water, sea, salt water, or brackish water to perform its operation. there are various factors that aquaculture needs to ensure when farming fish. food is an essential substance as it supplies energy inputs to contain proper growth . likewise, the demand for feed constantly changes in the fish farm ecosystem as the species continue to evolve. however, a traditional fish farm fails to generate continuous allocation of food, thus resulting in vast numbers of mortality. moreover, water is also a crucial element in a fish farm and the key parameter required for the survival of major species. however, it may not always be possible to maintain the water quality variables at proper levels, in order to ensure maximal growth. to tackle those challenges, collecting accurate data from multiple different sensors across the fish farm ecosystem is very important.overall, the demand for seafood continues to increase, and seafood consumption has doubled over the past five decades . on top of that, around 15% of the protein-calorie intake worldwide is related to seafood.the seafood industry can also support economic development within rural areas. in scotland, for example, the scottish government has defined aquaculture as a critical area of economic development . this includes areas around fish farming, especially in the north and west of scotland. the key objective is supporting a healthy and sustainable scottish aquaculture industry through world-leading science and research .while many fish farms provide local data gathering capabilities, sharing the gathered data is often not supported. additionally, the remote nature of farms makes gathering data difficult due to the expense involved in setting up remote communication channels. satellite-gathered data fed directly into a cloud environment through satellite communications can thus offer many benefits to localised data gathering. however, the privacy of this type of communication is challenging and often questioned , . additionally, the security and privacy of the collected data is an ongoing challenge that can only be assured via fundamentally secure digital technologies and approaches .the rapid adoption of blockchain has transformed the operations of aquaculture, resolving many insoluble challenges, whereas, at the same time, it helps store trusted data in an immutable way while accelerating the overall processing of the endorsed task. our work thus outlines the creation of a private-permissioned blockchain infrastructure for the collection of data from multiple sensors within a fish farm environment. while many fish farms provide local data gathering, there is often a lack of sharing of the gathered data and multiple security and privacy concerns . the remote nature of farms often makes gathering data difficult due to the introduced expenses involved in setting up remote communication channels. our work manages to thoroughly investigate the state-of-the-art approaches, finally proposing a modern blockchain-accelerated connected fish farm system within scotland. about 62.5% of the world's farmed fish are produced by utilising rivers, lakes, and fish farms, whereas the core functionalities of a fish farm can include breeding and hatching fish. sangirova et al.the key elements of maintaining the health of the fish within a fish farm relate directly to the quality of the water environment provided, and can be seen as follows:.defined an automated environment for fish farming consisted of a number of different sensors, such as temperature sensors, dissolved oxygen, ph sensors and water level sensors to monitor fish within a tank. in terms of outputs, the main actuators were: i) rgb light modulation system, to control light outputs by driving different colours of light and different intensities, ii) heaters to heat the water to the required temperature, iii) inflators to add oxygen into the tanks whenever the dissolved oxygen value falls below a given value, iv) feeders to feed fish at any given times and v) power supplies to support the sensor infrastructure and act as a fail-safe in case a power issue emerges.defined a data set of iot-related fish farm data focusing on monitoring the water quality. for fish identification, yang et al.an important element within breeding and production efficiency is the feeding level given to the fish, which can be one of the most costly elements in the fish farming environment.also implemented a cloud-based infrastructure for a fish farm in omar, by integrating it into a wi-fi network with esp8266 and a cloud database of things speakto gather temperature, water level, ph and turbidity. the data gathered for the fish farm contract included: outlier filtering, water level, temperature level, and oxygen level, which then controlled a water pump. elements of the transactions are: collect water level, predicted water level, energy consumption, control water pump, user management farm, sensor management farm, actuator management, predicted water level history, energy consumption history, and water pump history. these sensitive details may include information about specific fish farm metrics that the fish farm can further utilise, commercialise, receive governmental funding and more. the sensors providers organisation has access only to the non-private data ledger, whereas the fish farm organisation also has access to the private data ledger. in the world of malicious data brokers, a malicious sensor provider may involve the exfiltration of critical fish farm data, thus either selling them to other third parties or even tampering with the data to potentially influence the fish farm to make unnecessary buying decisions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/353.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/353.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef3282fff002a3a180172cf6b4a0c1e377f0525a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/353.txt @@ -0,0 +1 @@ +aggressive behaviors traditionally are regarded as intentional act of harming others with an aroused physical state in face-to-face. 1 in the current cyber era, the explosion of information technologies and social media has changed the nature and social communication interactions. the use of social media has been a global phenomenon in human's daily life. users will have greater spontaneity with perceived anonymity mechanism and are prone to express their feeling in the context of social networks. in this vein, social media create a new channel for users to engage in aggressive behaviors. cyber aggression refers to the intentional harm behaviors to others through mobile phones, computers, and other electronic devices. 2,3 it includes hostile beahviors such as social exclusion, malicious humour, and guilt induction. 3,4 past studies have explored the aggression behaviors on the popular social network platforms (e.g., facebook and twitter) and demonstrated that users' characteristic influence, such as age and gender. 5 for example, researchers found that males were more likely to engaging in facebook aggression than females like sending insulting messages and posting aggressive comments. 6 meanwhile, previous studies focused on the specific groups like adolescent and children. pabian et al. 7 investigated the relationships between the dark triad personality traits and aggression among adolescents on facebook and found that facebook intensity significantly relates to adolescents' aggression behaviors.it is obvious that cyber aggression will results in negative consequences, include breaking relationship, substance use, rule-breaking behaviors, and even major criminal activity. 3,8 due to the negative influence, researchers had been trying to identify and detect cyber aggression. 8,9 as a stable individuals' psychological behavior, cyber aggression behaviors traditionally were measured by the self-reporting directly. 3 however, selfreporting weakens the data quality and validity of conclusion due to the several limitations, such as cost, subjectivity, and low flexibility. 10 the popularity of social media provides a new way to explore users' personality and psychological behaviors through amounts of data. 11 for example, sharif and hoque 9 proposed an aggressive text classification system to classify aggressive bengali text into religious, gendered, verbal and political aggression classes. sadiq et al. 8 extracted comments from social media and classified the comments text into three distinct classes of aggression. however, there is a dearth of research that strived to predict users' cyber aggression behaviors from the social media activities and personal characteristic combined with deep learning methods.a little study had explored the cyber aggression from textual analysis. 12,13 in that, chavan and shylaja 12 used the traditional feature extraction techniques like tf-id and ngram to detect cyberbullying comments on social network. al-garadi et al. 13 elaborated the features' selection and tested various machine learning algorithms for prediction of cyberbullying behaviors. however, conventional machine-learning techniques were restricted in the processing natural data of their raw form that required careful engineering and domain expertise. 14 with currently development of deep neural networks, deep learning performs major advances in solving above problems. specifically, the language model pre-training of deep learning has been shown effectively performance for processing natural language. 15 thus, the advanced bert (bidirectional encoder representations from transformers) model is employed in this study to predict users' cyber aggression.cyber aggression plays the key role in social media platforms' operations and interactions' environment. the mass data of social media and computational social science pave the advanced way to delineate individuals' personalities and behaviors. by collecting social media users' self-reports and online data, we proposed a new methodology of employing deep learning models to objectively predict individuals' cyber aggression behaviors. this study contributes detecting this phenomenon and optimizing platforms' organizations.1in the current cyber era, the explosion of information technologies and social media has changed the nature and social communication interactions.7investigated the relationships between the dark triad personality traits and aggression among adolescents on facebook and found that facebook intensity significantly relates to adolescents' aggression behaviors.10the popularity of social media provides a new way to explore users' personality and psychological behaviors through amounts of data.8extracted comments from social media and classified the comments text into three distinct classes of aggression. however, there is a dearth of research that strived to predict users' cyber aggression behaviors from the social media activities and personal characteristic combined with deep learning methods.15thus, the advanced bert (bidirectional encoder representations from transformers) model is employed in this study to predict users' cyber aggression.cyber aggression plays the key role in social media platforms' operations and interactions' environment. the mass data of social media and computational social science pave the advanced way to delineate individuals' personalities and behaviors. by collecting social media users' self-reports and online data, we proposed a new methodology of employing deep learning models to objectively predict individuals' cyber aggression behaviors. 320 active objects are selected for this research of cyber aggression in social media, including 74 males and 246 females. we employ the indirect aggression scale aggressor version4,5to measure participants' cyber aggression behaviors, including three types of social exclusion (10 items), malicious humour (9 items), and guilt induction (6 items).specifically, social exclusion refers to behaviors that work by socially excluding the victim, such as withholding information, leaving out of activities, and turning people against someone/ social manipulation; malicious humour refers to largely constituted behaviors in which humour was used to harm the victim, such as use of sarcasm as an insult, intentional embarrassment, practical joke playing; guilt induction refers to behaviors whereby guilt is intentionally induced, such as use of emotional blackmail, undue pressure, and coercion.as an example, we label high social exclusion as category 1, neutral social exclusion as category 0 and low social exclusion as category -1.we argue that leveraging advanced deep learning pretrained model such as bert could potentially improve cyber aggression prediction and increase the potential of automating textual coding.however, we study on the bert model building and cyber aggression prediction, instead of model interpretability for behavior prediction, which will be a promising direction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/354.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/354.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c0ade850b1677cf4970e49c6819f1572841f3a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/354.txt @@ -0,0 +1 @@ +ever-powerful computational ability, the reduced cost of communications infrastructure, and the increasediminishing size of sensors have enabled the pervasive placement of technologies into the fabric of urban spaces, birthing a movement of the "smart city." for many in the so-called smart cities movement, the trend has been towards the instrumentalization of cities, finding greater efficiencies, and the problematization of many facets of urban living (hollands, 2008). for technologists working in this field, the enumeration game is being applied to all domains ranging from mobility and infrastructure to public safety and democratic participation (eagle & pentland, 2009;goldsmith & crawford, 2014;jiang et al., 2013). nevertheless, the defining social characteristics of urban space defy a reduction to a mere optimization problem.these digital technologies and their resultant models and data outcomes have the ability to shape our perspective of the built environment. no different than the well-publicized challenges of bias prevalently found in other algorithmic processes (crawford, 2018;kirchner et al., 2016), the black box methodologies and opaque outcomes so too can unduly influence our reading of the places we inhabit (schwarzer, 2017). despite this conflict between reductivism and complexity, there is an urgent need to understand through new models and tools may open new avenues for research into public space and urban form in light of rapid urbanization and the increased privatization of urban space (e. talen & ellis, 2002).as if it were an iteration from the modernist use of photography in urban planning, the growing ubiquity of deep learning and computer vision applications have created new opportunities to understand cities through imagery (lecun et al., 2015). while there is optimism in how these emerging technologies can allow for a more precise (and perhaps, broad) method for understanding cities, questions remain.within this computational milieu, this paper focuses specifically on the nascent, but growing role of these algorithmic tools are being applied to urban planning and management. in one sense, they quantify human behavior in urban space that offers the ability for decision-makers to base policy in more informed ways. in another, this numerical reductivism applied to urban space is blind to the specificity and essential character that makes cities unique places of inhabitation. as such, this paper argues that in addition to situated technologies' reductivist orientation, there is a need for a distinct approach to their use in the understanding of how people inhabit and use these public urban spaces. further, with the increased proliferation of computer-vision and image-based approaches toward the instrumentalization of cities, an urban-specific lexicon to the training, implementation, and adoption of urban technologies.while the use of computer vision technologies has spanned many facets of urban management, such as infrastructure utilization and public safety, this paper considers explicitly using these technologies to understand how people inhabit and use public space. this paper argues that image-based artificial intelligence is a natural progression in the modernist use of photo imagery to capture data on the city. this paper also reviews, in brief, how artificial intelligence is being applied to image-based data to illustrate the potential weaknesses in creating thicker, domainspecific ontologies about the occupants and the spaces in which they inhabit. further, this paper discusses how thin data is being marketed by both public and private actors to the potential detriment of planning human-centric spaces. however, this paper also discusses potential approaches to reconceptualize the methodologies currently used to get toward an urban-semantic description of public space using these algorithmically-based methodologies despite these limitations., 2016), the black box methodologies and opaque outcomes so too can unduly influence our reading of the places we inhabit(schwarzer, 2017).as if it were an iteration from the modernist use of photography in urban planning, the growing ubiquity of deep learning and computer vision applications have created new opportunities to understand cities through imagery(lecun et al. as such, this paper argues that in addition to situated technologies' reductivist orientation, there is a need for a distinct approach to their use in the understanding of how people inhabit and use these public urban spaces.while the use of computer vision technologies has spanned many facets of urban management, such as infrastructure utilization and public safety, this paper considers explicitly using these technologies to understand how people inhabit and use public space. however, this paper also discusses potential approaches to reconceptualize the methodologies currently used to get toward an urban-semantic description of public space using these algorithmically-based methodologies despite these limitations.(jemielniak, 2020)as city officials, urbanists, and urban planners analyze the figurative reams of data available; imagery has also been leveraged as a means of understanding larger urban dynamics.salesses et al. particularly in light of the inability of city bureaucrats to develop or implement new data collection means, greater attention has been paid to technology companies, startups, and academics to create and implement these new technologies within the urban domain., 2020), startups such as miovision, currexvision, and vivacitylabs, and incumbent automotive companies such as honda have developed different algorithms and sensor packages to count and track automotive traffic on roadways, each with slightly distinct approaches informed by the countries from which the companies are from.while not fundamentally opposed to the potential to understand public behavior, these camera technologies can also be used for policing and security purposes at large using the same algorithms, whose ethical rationales have conflicted with civil libertarians.the use of these technologies has come with much criticism, including from the industry itself(shepardson, 2020), for the lack of transparency around the use of these technologies-including their questionable accuracy(hill, 2020;lee, 2020)-and the ethical concerns around their use. as duarte and desouza (2020) argue, urban technologies must consider more fundamentally how the epistemologies behind extensive data methodologies as well as how they shape ontologies and heuristics about cities, as ultimately there will be lasting transformations to both society and space due to the specific responsibility of urban planning in shaping communities. these findings would notably lead to new york city passing zoning regulations that required plazas and publicly-owned public spaces to no more than three feet above or three feet below street level to allow for visibility and easy access, and the complete revitalization of bryant park, which in the 1980s saw little activity, to increase its visibility and to add more seating, including its now-familiar moveable furniture.many of the vision-based smart cities technologies that seek to understand how people use space lack specificity for how people use urban space within urbanism discourse." when these technologies are implemented within the complex milieu of cities, the application thus far has erred toward reductive quantifications at the sacrifice of the dynamic characteristics of public space that draw billions of people to live, work, and play. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/355.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/355.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f34d213ee82fbd23aa1e84154b6b556da2750a8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/355.txt @@ -0,0 +1 @@ +the impacts of science gateways on research outputs across diverse fields are well documented . gateway infrastructures can be used to support emerging practices that emphasize the interdisciplinary, collaborative, open, and computationally driven nature of science. however, the adoption of a science gateway as a research platform can require significant adjustments to existing workflows and challenge one's assumptions about how to manage a research program. gateways that do not engage their user communities to build trust and support adoption are less likely to establish a robust research community and therefore limit their potential scientific impacts . the acceptance and use of scientific gateways, like the diffusion of any innovation, depends in large part on the perceived usefulness of the resource. developing gateways collaboratively, with input from user communities may accelerate the adoption of new disciplinary practices afforded by the gateway platform. centralized resources and online collaboration are becoming more widely adopted in many contexts where we believe the "gateway model" provides a context for addressing hard problems and broadening access to key resources.in this manuscript we describe an effort to build a science education gateway to accelerate undergraduate stem education reform. we define reform very broadly to embrace the diversity and dynamic nature of the landscape across which reform happens. effective teaching must be supported in a wide range of institutional settings, student populations, and delivery modalities. there is broad recognition that teaching and learning strategies should evolve to emphasize the adoption of evidence-based teaching methods, student engagement with authentic scientific practices, and broaden participation among traditionally underrepresented communities. these challenges are further complicated by the need to continuously integrate new topics and skills to connect classrooms to contemporary scientific practices and help prepare students to participate in the technical workforce. even when innovations are developed it is a non-trivial undertaking to support the broad implementation of those strategies so that the potential benefits reach as many learners as possible. considered at this scale, the acceleration of stem education reform is a wicked problem that will require the development of diverse overlapping strategies that can be applied flexibly across the landscape. furthermore, given the certainty that scientific practices and computational resources will continue to evolve, education reform should adopt a continuous quality improvement process in order to treat reform as ongoing and context specific.in 2014 nsf funded the project "supporting faculty in quantitative undergraduate biology education and synthesis (qubes)" which was designed to "address the nation's growing need to better prepare undergraduate biologists with the quantitative and computational skills needed to be successful in the workplace or in graduate school." given the long history of quantitative biology education reform efforts, the project was organized in part to highlight the visibility of ongoing but isolated reform communities and coordinate faculty access to a diverse collection of existing teaching and learning resources. we adopted the hubzero platform and worked with the science gateways community institute (sgci) to design and deploy a gateway to support quantitative biology education innovation and classroom implementation. over time our mission has evolved to serve the stem education reform community more broadly. at the conclusion of the initial nsf funding the management of the qubes platform was moved into the bioquest curriculum consortium, a well established 501(c)(3) nonprofit, where it is sustained as an open resource for the reform community.in this paper we describe the conceptualization and implementation of the qubes platform as a science education gateway (seg). after an overview of the technical infrastructure (tools) we describe the ways that faculty use of the gateway is facilitated using social infrastructure (practices). we end with a call to action for the undergraduate stem education reform community to explore the potential use of science education gateways as a means to accelerate the reform of teaching and learning."given the long history of quantitative biology education reform efforts, the project was organized in part to highlight the visibility of ongoing but isolated reform communities and coordinate faculty access to a diverse collection of existing teaching and learning resources. we end with a call to action for the undergraduate stem education reform community to explore the potential use of science education gateways as a means to accelerate the reform of teaching and learning. as a science education gateway qubes is designed to lower barriers to faculty participation in stem education reform by making it easier to engage in scholarship around teaching and learning. our target audiences include faculty whose scholarship centers on teaching and learning, with the platform designed to facilitate, document, and disseminate faculty work as they participate in diverse professional activities.the qubes platform is a shared online space that can be used to publish and disseminate open education resources, host distributed meeting and workshop activities, participate in professional learning, and support education reform projects. the four platform services (professional learning; oer library access; project support; and customizable workspaces) support overlapping faculty user communities, provide multiple points of entry, and enable manifold use case scenarios.the qubes platform has an open, self-publishing platform (qubes oer library) that uses a git-like version control system for tracking versions, adaptations, attribution, and use metrics. to support autonomy of hosted projects on the qubes platform, we increased group customization on the platform by allowing overrides of components, plugins and modules, including fully autonomous oer libraries (e. these resources are available within a partner support group on qubes for onboarding of new projects, which includes demonstrations of the effective use of the platform to support the creation, maintenance, and sustainability of project workspaces. since the inception of the qubes platform in 2014, we have emphasized working closely with the user community to develop a shared vision for the ways that a gateway can support faculty scholarship around teaching and learning reflected in this quote. here we briefly introduce four high priority areas including: scalable and robust hosting of software tools, including tight integration of these tools with oer; design of teacher portfolios, akin to a linkedin or researchgate for educators; a custom publishing platform that supports peer-reviewed education journal tools, beyond the self-publishing of oer already supported on the platform; and support for discipline-based education research (dber), which is constantly expanding our knowledge on evidence-based teaching strategies (see figure2).as faculty participate in reform projects, publish oer materials, engage with professional learning, and access materials from the oer library, they are building a record of their activity on the qubes platform. we imagine qubes as a platform where curriculum specialists, education researchers, and teaching faculty could collaborate to scale up data collection and explore the impacts of interventions in diverse teaching contexts and across student audiences. we believe that the gateway environment can play an important role facilitating research on faculty change, professional learning, project management, as well as documenting emerging practices as communities adopt gateway infrastructures to evolve their professional practices. our four platform services (oer library access; professional learning; partner support; and customizable workspaces) provide multiple points for faculty engagement and address key aspects of accelerating reform practices. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/356.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/356.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a70cbd9e8cfd57162e4583ab143c7cb587da851 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/356.txt @@ -0,0 +1 @@ +the covid-19 pandemic has greatly changed societies in almost every aspect of life since the end of 2019. one area that has been heavily affected by this global crisis is education. the year 2020 served as a turning point for educators across the globe, causing them to reevaluate their pedagogy and look for innovative ways to overcome the challenge of limiting in-person meetings for teaching and learning. the effects varied depending on the subject being taught, the cultural setting, and the technologies available. this paper hopes to contribute to this fast-growing body of knowledge on context-specific innovations by presenting a study that describes the process of prototyping a technology-based innovation in the context of what is known in the educational literature as emergency remote teaching (ert), which was defined as the sudden and temporary shift of delivery to a distance or remote mode of instruction as a result of extremely disruptive phenomena (hodges et al., 2020;mohmmed et al., 2020). results from this study may be of interest not only to researchers in the field of education, but also in other fields like psychology, information technology, and cognitive science as they have implications on the dynamics of immersion, presence, and satisfaction in multi-sensory experiences brought about by immersive technologies. however, an exploratory study on first-year students' perception of university online lessons reported that the efficiency of technological innovation brought about a positive response to online learning as well as changed the feeling of loneliness of in-home study to being able to concentrate on individual learning efficiently through online lessons(hirabayashi, 2020). aside from a recent study reporting some motivational effects of vr tours in a university in japan(figueroa et al., 2020), no article regarding vr tours for teaching the filipino language in japan in the context of remote teaching has been found as of this writing.this study aimed to provide insights as to how an intervention using vr tours could have an impact on japanese students of a filipino language class in the context of ert during the time of the covid-19 outbreak in japan. more specifically, it aims to answer three research questions (rq): rq1: how different are the levels of satisfaction between students who experienced vr tours with varying levels of immersion? rq2: how are the levels of satisfaction among students related to presence in the vr tours?.this study investigated the impact of vr tours on japanese students of a filipino language class in the context of ert during the time of the covid-19 outbreak in japan using both quantitative and qualitative analysis. by the end of the six vr tours, the students were expected to be able to introduce popular japanese tourist spots in a vr tour of their own using the vocabulary that they had learned from the tours and to be able to introduce the places using the correct pronunciation and rhythm. students assigned to the high immersion group participated in all the vr tours using their phones and vr goggles delivered to their homes. those assigned to the moderate immersion group participated in all the vr tours using their computers or smartphones, but without using vr goggles. the students were grouped according to the compatibility of their mobile devices with vr tours and their willingness to try vr tours.categorizing the reasons given by the participants regarding the rating of their vr tour experience produced 20 codes which were further categorized into 7 sub-themes and finally into two main themes -the benefits of vr tours and the problems and challenges encountered while doing the vr tours.however, in this instance, the cognitive innovation of incorporating novel vr tours into an online lesson that involved iteratively brainstorming lesson design and structure-related ideas, selecting which of these ideas to exploit, and constantly reflecting on learning points and things to improve each week, was enabled by limitations brought about by the state of emergency in japan during the covid-19 outbreak. in the same way, because of the sudden closure of borders that prevented learners from learning filipino vocabulary in a more immersive environment through field trips, vr was considered to be a possible means of allowing learners to somehow experience certain places in the philippines while learning the language without having to go there.while the challenges and difficulties encountered in the exploratory study supported recent findings(graeske & sjöberg, 2021;phoon et al. however, vr is progressively recognized as a powerful tool for learning and productivity by institutions and governments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/357.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/357.txt new file mode 100644 index 0000000000000000000000000000000000000000..9cb8fcaf07c21e8f8decec106df29502ff573a03 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/357.txt @@ -0,0 +1 @@ +rapid advances in artificial intelligence (ai) over the past several years have raised new questions about the role that machines might play in both promoting and impeding humanity. the field of education has been no different. emerging ai capabilities are enabling machines to fuse and make sense of larger, more diverse datasets in increasingly efficient ways. while these affordances of scale, diversity, and efficiency might help generate insights and guide actions to improve educational opportunities and outcomes, they also come with several technical limitations and related practical risks-like failures to generalize and identify causal relationships-that threaten to perpetuate unfair or harmful applications. thus, and rightfully so, the reemergence of ai has sparked new debates about the political, pedagogic, and practical implications of its application in educational contexts (shum & luckin, 2019). these debates are critical, especially if we wish for machines to be able to better-serve the human actors-teachers, learners, administrators, and others in education-who may benefit from their emerging capabilities.engaging productively in these debates, however, requires one to understand some of the methodological paradigms and practices specific to artificial intelligence in education (aied). however, researchers and practitioners not trained in computer science or engineering may find the rapidly advancing field of ai inaccessible. in this article, we try to address this gap, providing an overview of the meanings, methods, and limitations of ai as a re-emerging field and how these intersect with ai's applications in education. in doing so, we hope to build on previous introductions to this topic (e.g., luckin, 2018;holmes et al., 2019) and critical works that connect data models with ethical and social debates (perrotta & williamson, 2018;perrotta & selwyn, 2020). by opening up the "black box" of ai for those outside of the field, we hope to further humancentered ai in education by empowering all stakeholders, regardless of disciplinary background, to contribute to the development of ai that recognizes and champions human capabilities (li & etchemendy, 2018;yang et al., 2021).a third paradigm of machine learning is "reinforcement learning," which has recently been used, among other applications, to develop powerful gameplay systems (e. neural networks take inspiration from connectionist philosophies of cognitive science(elman et al. for example, early warning systems to detect students likely to drop out may be developed for districts that lack a breadth or depth of historical data by "borrowing" the predictive capacities of models pre-trained on data from larger school settings as a starting point(coleman et al. unlike machine learning systems, rule-based models will not necessarily make more accurate decisions with a larger scale or diversity of data., the problems they haven't yet answered that they are most likely to answer correctly, given their prior history of answers. as machine learning technologies continue to offer new opportunities for personalizing instruction, it will be important to identify the precise elements of these systems that offer the greatest promise for enhancing student learning.proponents of ai, particularly machine-learning based systems that seek to infer students' knowledge states from the growing scale and diversity of data available on digital learning platforms like khan academy, argue these systems have the capacity to obviate the need for explicit formative and summative assessments, by seeking to infer students' knowledge states from the growing scale and diversity of data available on such digital learning platforms(piech et al." these systems, often using different forms of regression, mine large troves of historical student data to predict which students are most at risk of failing an exam, dropping out of high school or college, etc., a simple rule-based system could trigger a warning if a student's gpa falls below a certain level-machine learning-based systems have the potential to identify and exploit patterns of which school leaders may not be aware. for example, small school districts might face a "cold start" problem: they simply do not have enough historical data to train an accurate machine learning model-requiring them to "borrow" data from other school districts to improve accuracy (e. school leaders may also struggle to calibrate interventions based on the outputs of a model., 2017)showed how an object recognition system that could classify an image as containing a banana with high confidence could easily be fooled into making an incorrect classification simply by adding a small sticker of a toaster to the image.• what kind of ai is it? the examples contained in this paper illustrate how different types of ai can (and cannot) help solve different problems in education, and may help educationalists form a judgement about their applicability and risks within their own contexts.• does the ai enable something that would be difficult or impossible to achieve without it? unpacking any benefits of the scale or diversity of data that the ai operates on, or any efficiencies it enables and weighing them against associated risks or limitations may help justify its usefulness.• how equitably are the anticipated benefits and risks distributed across different groups of students and families? ai, especially machine learning-based systems, can "learn," replicate, and scale bias and inequity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/358.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/358.txt new file mode 100644 index 0000000000000000000000000000000000000000..03c5b7f05b205074e978ee279175a9d65270b274 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/358.txt @@ -0,0 +1 @@ +during lockdowns caused by the covid-19 pandemic, universities raced to implement emergency remote teaching (ert) strategies in japan. some have explored innovative interventions other than webinar platforms and learning management systems to bridge the gap caused by restricted mobility among teachers and learners. one such innovation is virtual reality (vr). vr has been changing the landscape of higher education because of its ability to "teleport" learners to various places by simulating real-world environments in the virtual world. to fill in the gap brought by geographical limitations due to heightened restrictions in 2020, educators at tokyo university of foreign studies (tufs) explored integrating vr in teaching the filipino language to first year japanese students (figueroa et al., 2022).the filipino language was first taught in japan at the osaka university of foreign studies, now osaka university in 1983 followed by tufs in 1992 (laranjo, 2020). these universities offer an entire major course in the filipino language and philippine-related courses. before the pandemic, classes were held using traditional in-person classroom-based or blended pedagogy using a learning management system (lms). students were encouraged to join short-term language classes abroad during the long spring and summer vacations or to join one-year student exchange programs with affiliated universities abroad. these programs not only provided a more immersive experience for the learners as they used the language and interacted with the native speakers of the language they were studying, but they also increased their motivation to apply and experience first-hand what they learned inside the classroom.therefore, when the short-term visits and student exchange programs were canceled due to the stricter rules at the height of the pandemic in 2020, a photo-based vr tour lessons on filipino vocabulary at tufs was created to provide students with an immersive way of learning filipino language and experience the filipino culture at the comfort of their homes while being unable to physically visit the philippines (figueroa et al., 2022). however, rules started relaxing in 2021 and 2022 when vaccines were introduced. thus, some fully online classes in japan shifted to blended learning. the same happened at tufs. with favorable feedback from students in 2020, the photo-based vr tour lessons on filipino vocabulary were consequently integrated even in the blended offering of the course in 2021 and 2022.therefore, when the short-term visits and student exchange programs were canceled due to the stricter rules at the height of the pandemic in 2020, a photo-based vr tour lessons on filipino vocabulary at tufs was created to provide students with an immersive way of learning filipino language and experience the filipino culture at the comfort of their homes while being unable to physically visit the philippines(figueroa et al. how different were the satisfaction, presence, and interest felt and experienced by learners between groups who used vr tours and those who did not in each tour in 2022? 2. in the same year, students were divided into three groups -high immersion group (used vr goggles), moderate immersion group (did not use vr goggles but used the vr tours) and low immersion group (did not use vr goggles and vr tours; only used photo-based powerpoint tours). in the 2022 implementation, the students were only divided into two groups, but both groups were able to experience the photobased vr tours while using vr goggles and the photo-based tours presented in powerpoint presentations. the high immersion group consisted of students who experienced vr tours using their smart phones with vr goggles delivered to their homes. the medium immersion group consisted of students who experienced vr tours without the vr goggles. the low immersion group consisted of students who experienced powerpoint-based tours with the same content as the vr tours. as illustrated in fig.2, in the first three lessons, group 1 experienced vr tours with goggles (vr group) while group 2 experienced powerpoint-based tours (non-vr group).rq 1: how different were the satisfaction, presence, and interest felt and experienced by learners between groups who used vr tours and those who did not in each tour in 2022?.table3compares the medians of the satisfaction, presence, and interest ratings of students in the vr and non-vr groups in lesson 1. the mann whitney u test indicated that satisfaction ratings were greater for students in the vr group (mdn =10) than those in the non-vr group (mdn = 8) ,u = 35, p = . the mann whitney u test indicated that presence ratings were greater for students in the vr group (mdn =10) than those in the non-vr group (mdn = 7) ,u = 31, p = .table7compares the medians of the satisfaction, presence, and interest ratings of students in the vr and non-vr groups in lesson 5.table8compares the medians of the satisfaction, presence, and interest ratings of students in the vr and non-vr groups in lesson 6. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/359.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/359.txt new file mode 100644 index 0000000000000000000000000000000000000000..8adf59153de49525e43e3628a8cf72fff299424b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/359.txt @@ -0,0 +1 @@ +robustness is an important property of deep learning models. it refers to the model's ability to produce expected outputs in cases when input data differs from the data the model has been trained on . literature distinguishes between adversarial and non-adversarial robustness also known as natural robustness. adversarial robustness refers to the model's ability to accurately classify input data that has been intentionally corrupted in a way that aims to fool the model into making an incorrect prediction with high confidence. this type of robustness relies on exploiting model and data properties . while adversarial robustness prevents the model malfunction under intentional attacks , non-adversarial robustness preserves the model performance under naturally-induced data transformations such as environmental and sensory transformations . natural robustness is a desirable model property, as deployed models often encounter test data that differs from training data. for example, a model trained to recognize traffic signs on images collected in an area where rarely snows might generate an unpredictable output for a test image displaying the same sign under severe snowing conditions . differences in training and test datasets stem from distribution shifts that are common in practice due to the changing nature of data distributions, which can be both temporal or non-temporal (i.e., changes across locations, camera choice, image artifacts) . in a standardized machine learning pipeline, a training dataset is collected by sampling from theoretically infinite, high-dimensional data space with a priori unknown distribution d. a set of drawn samples dictates a source distribution of a training dataset, denoted with an orange curve in fig. 1, that the model learns through training. trained model inferences on test data having a target distribution, denoted by a blue curve in fig. 1, shifted relative to the source distribution. identifying the causes of distribution shift can be straightforward in some cases, such as in , where a model trained on sharp images was tested on blurry images that were not present in the training set. however, in many cases, the causes for distribution shift are not obvious and cannot be easily identified . in the authors acquire new test datasets for cifar-10 and imagenet following dataset acquisition guidelines and observe significantly decreased accuracy on a broad range of models when compared to original datasets. assessing the causes of distribution shifts accurately, especially in highdimensional data, poses a challenge to training models that can generalize well to an arbitrary sample from distribution d .when a model performs well on instances from a different distribution compared the the one it was trained on, it demonstrates out-of-distribution (ood) robustness. if the model performs consistently well on instances from the same distribution, it demonstrates in-distribution (id) robustness, ensuring stable performance on unseen data from the same distribution. while id robustness does not explore the model's generalization under distribution shifts, it is used as a standard method to detect overfitting and underfitting during the training. it is also used as a control metric for methods boosting ood generalization, since it has been shown that some of the methods can increase ood while simultaneously hurt model performance id .in this paper, we overview recent advancements in nonadversarial robustness, or shorter robustness, as we will refer to in this paper. while there are many papers addressing wellexplored adversarial robustness - , non-adversarial robustness has received significantly less attention. robustness improvement techniques have been proposed and discussed in multiple papers in the context of proposing a novel robustness benchmark dataset , robustness method - , or both . however, we were unable to find a systematic overview of these methods that could serve as an entry point to the nonadversarial robustness research field. thus, this paper gives a broad overview of robustness improvement methods based on domain adaptation and domain generalization approaches. we discuss the advantages and disadvantages of the approaches and complement the study with an overview of publicly available robustness benchmark datasets in computer vision.the paper is organized as follows. section ii gives a brief overview of the main deep learning principles that might help in following the content of sections iii and iv where we discuss major robustness improvement methodologies in domain generalization and domain adaptation fields. in section v we list major publicly available robustness benchmarking datasets, discuss the findings in section vi, and conclude in section vii. while adversarial robustness prevents the model malfunction under intentional attacks, non-adversarial robustness preserves the model performance under naturally-induced data transformations such as environmental and sensory transformations. if the model performs consistently well on instances from the same distribution, it demonstrates in-distribution (id) robustness, ensuring stable performance on unseen data from the same distribution.domain generalization is a machine learning field that aims to train the model on source distribution data so that it can generalize well to arbitrary target distribution.one approach to anticipate distribution shifts is to increase source distribution diversity and simultaneously enlarge the dataset by applying transformations on data sample copies. at the moment, studies suggest that optimal data augmentation choice is specific for a training dataset and task combination, with inappropriate choice leading to reduced model generalization such as infor deepfashion remixed benchmark dataset and training instabilityeven when applied augmentations seem to be visually almost unnoticeable. the results fromsuggest that natural robustness to synthetic distribution shifts does not apply to real-world distribution shifts, unless the model is trained on large and varied datasets. domain adaptation is a field of machine learning aiming to modify a model trained on a source distribution to adapt to data from a target distribution.using transfer learning in domain adaptation settings is based on the assumption that distribution invariant feature representations can be learned from source and target distribution data. the approach utilizes learning on multiple tasks and datasets to increase knowledge transfer by reducing the distance between learned tasksand training simultaneously on source and target distribution datasets. whileadapts the model on target data and uses adapted model weights further in inference,starts from the model trained solely on source distribution and adapts its weights online as target distribution data arrives. while synthetic benchmarks provide testing in controlled conditions under known distribution shifts, recent work shows that demonstrating robustness on synthetic benchmarks does not necessarily guarantee robustness on real-world distribution shifts. for this dataset, distribution shifts cannot be clearly identified, but studyhas shown degraded performance for a range of classifiers trained on imagenet and tested on imagenetv2, indicating clear existence of natural distribution shifts in the imagenetv2 dataset. while domain adaptation benchmarks can be assembled in this manner for any downstream task, in this paper we further discuss domain adaptation benchmarks for object recognition task, as it is a well-established field in deep learning for computer vision. the literature on data augmentation agrees that the right choice of data augmentation can boost natural ood robustness, but that multiple augmentations can boost robustness even more,,. in this review we have covered recent progress in domain generalization and domain adaptation techniques where the first approach aims to prepare the model for unknown data distribution shifts in training time and the second to adapt the model for known distribution shift in inference time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/36.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/36.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9872553ea9c2d829dbc157367082bbbd70c49e4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/36.txt @@ -0,0 +1 @@ +according to the world health organization (who), physical inactivity is one of the leading risk factors for noncommunicable diseases and death worldwide, inducing substantial personal and societal cost . on a personal level, it significantly increases the risk of cancer, heart disease, and diabetes, and it is estimated that up to five million deaths per year could be prevented if the global population were sufficiently active. deteriorating population health also comes with a growing societal burden in terms of medical care and loss of productivity. recent estimates show that physical inactivity has led to us$ 54 billion expense for the health system and us$ 14 billion in indirect economic losses in the us alone, while worldwide, 1-3% of national health care expenditures can be attributed to physical inactivity .a significant challenge towards reducing the global prevalence of physical inactivity is encouraging individuals who are not sufficiently active to alter their behavior and include physical activity in their daily routine. to this end, research has shown that interactive technology can be strategically designed to motivate desirable behavior change, such as regular exercise and healthy nutrition , for better population health and wellness. at the same time, investing in technological tools and resources for promoting regular physical activity can directly contribute to many of the united nations' 2030 sustainable development goals . overall, across all settings, there are opportunities for digital mhealth innovations to harness the potential of data to promote, support, monitor, and sustain health behavior change, focusing on physical activity.the u.s. department of health and human services recommends the equivalent of at least 150 minutes of moderateintensity aerobic activity each week to rip the benefits of regular exercise . simplifying these guidelines into something concrete and relatable, such as daily step counts, is an easy way for the majority of the population to understand and achieve them. specifically, daily step counts between 7,000 and 9,000 steps can result in health benefits similar to achieving the recommended amounts of moderate-to-vigorous exercise . to this end, considerable research efforts have been made for developing effective technological interventions to help people achieve the recommended step counts. amongst the most common and successful persuasive design techniques utilized in such interventions is goal-setting, i.e., setting a daily number of target steps for the user to achieve .the most straightforward goal-setting approach, used in most commercial physical activity trackers, is the fixed goal approach, where the system sets a fixed goal for the user. however, this approach may lead to unrealistic goals, as it does not consider the singularity of an individual's behavior. at the same time, research shows that a personalized and adaptive goal approach performs better in increasing adherence and physical activity levels , by tailoring the system to enhance motivational appeal. in this approach, the system monitors the daily behavior of the user, such as physical activity, sleep, and stress levels, and then personalizes their step goal based on a combination of factors. at the same time, it adapts over time to incorporate possible changes in user behavior.previous works have attempted to tackle the task of personalized and adaptive goal-setting by employing statistical models for time-series forecasting or domain-expertise-based estimates of step goals tailored to different user groups (see section ii). however, the focus is currently shifted towards more intelligent approaches, which provide state-of-the-art (sota) performance for the task of physical activity prediction . nevertheless, the field of physical activity prediction for adaptive goal-setting is still in its infancy, and sota approaches suffer from various limitations discussed below:small-scale & subjective experimental data (l1): prior work relies on small-scale datasets that are constructed and designed according to conceptual or theoretical interests with articulated research questions, and collected by users participating in ongoing research, which can lead to a distorted reflection of the actual levels of their daily activity ;challenging data wrangling (l2): there are only scattered guidelines concerning preprocessing techniques suitable for handling the idiosyncrasies of real-world, noisy timeseries data generated by personal informatics self-tracking systems, contrary to popular domains, such as natural language processing or computer vision;lack of physical activity prediction benchmarking (l3): limited studies exist in the area of predicting physical activity levels by exploiting machine learning and deep learning approaches for enabling personalized and adaptive goal-setting. even then, the exact architectures and hyperparameters utilized remain unspecified, and a comprehensive benchmarking of relevant approaches is yet to be published;reproducibility & reusability issues (l4): due to the limitations introduced by closed-source data and code repositories, reproducibility of published results and reusability of existing code and models for the task of personalized goalsetting is close to infeasible at this moment.motivated by the issues above, this work proposes ubi-wear, a five-step, data-driven framework for the task of physical activity prediction with the end goal of enabling personalized and adaptive goal-setting interventions via selftracking. the framework comprises the complete process from physical activity data acquisition to end-user applications. figure 1 depicts ubiwear's pipeline. our contributions are structured around the framework's steps:c1 -in-the-wild data exploitation: we discuss the availability of open datasets for physical activity prediction, and the idiosyncrasies of self-tracking data. ultimately, we utilize the openly accessible "myheart counts" dataset , containing real-world, large-scale physical activity patterns for thousands of users, allowing us to better capture different segments of the population and build generalizable prediction models. the dataset has been collected in-the-wild, presenting an objective view of the users' behavior (l1).c2 -self-tracking data processing guidelines: we introduce a set of prescriptive guidelines on how to process aggregated data from self-tracking devices, accompanied by a python library release1 . derived from our exhaustive experimentation, we propose specific adaptation methodologies for traditional preprocessing techniques explicitly designed to handle self-tracking data idiosyncrasies. our goal is for fig. 1. the ubiwear framework covers the path of physical activity data from its acquisition to its application scenarios.these guidelines to provide a more standardized definition of processing self-tracking data (l2).c3 -physical activity prediction benchmarking & evaluation: we experiment with six different learning paradigms for physical activity prediction, from machine learning to advanced deep learning architectures, and benchmark their performance for this complex learning task (l3). through the experimentation with more advanced architectures, ubi-wear achieves a mae (mean absolute error) of 1087 steps, 65% lower in terms of absolute error than that of the sota model , proving the feasibility of intelligent physical activity prediction.c4 -open access data and code: we purposefully work with an open-access dataset and publicly share our codebase2 to enable the reproducibility of our results. to further facilitate future interdisciplinary research, we also adopt containerization and offer step-by-step guides on how to download, store and preprocess the data, as well as on how to reproduce our results with the uploaded pre-trained models (l4).we structure the remaining of this paper as follows: section ii discusses the related literature in physical activity prediction and personalized goal-setting. sections iii-vii introduce the ubiwear framework from data acquisition to self-tracking data processing, modeling and evaluation to applications of insights. finally, section viii concludes the paper and delineates ideas for future work.small-scale & subjective experimental data (l1): prior work relies on small-scale datasets that are constructed and designed according to conceptual or theoretical interests with articulated research questions, and collected by users participating in ongoing research, which can lead to a distorted reflection of the actual levels of their daily activity;.challenging data wrangling (l2): there are only scattered guidelines concerning preprocessing techniques suitable for handling the idiosyncrasies of real-world, noisy timeseries data generated by personal informatics self-tracking systems, contrary to popular domains, such as natural language processing or computer vision;.lack of physical activity prediction benchmarking (l3): limited studies exist in the area of predicting physical activity levels by exploiting machine learning and deep learning approaches for enabling personalized and adaptive goal-setting.motivated by the issues above, this work proposes ubi-wear, a five-step, data-driven framework for the task of physical activity prediction with the end goal of enabling personalized and adaptive goal-setting interventions via selftracking.c1 -in-the-wild data exploitation: we discuss the availability of open datasets for physical activity prediction, and the idiosyncrasies of self-tracking data.c2 -self-tracking data processing guidelines: we introduce a set of prescriptive guidelines on how to process aggregated data from self-tracking devices, accompanied by a python library release1.c3 -physical activity prediction benchmarking & evaluation: we experiment with six different learning paradigms for physical activity prediction, from machine learning to advanced deep learning architectures, and benchmark their performance for this complex learning task (l3).prediction for personalized goal-setting an emerging body of work is trying to tackle the lack of intelligent goal-setting systems by publishing studies of statistical, rule-based, machine learning-or deep learning-based physical activity prediction solutions that take into account historic data to provide realistic and achievable exercise goals.it is evident that only a limited number of studies exploit machine learning or deep learning techniques for the task of physical activity prediction for enabling personalized and adaptive goal-setting, even though sota models provide significantly superior performance in similar tasks, such as human activity recognition or sleep stage classification. on top of the above-mentioned peculiarities, data acquisition for physical activity prediction is challenging; collecting proprietary data requires a significant monetary and time investment, while open datasets are limited and include diverse data modalities. popular small-scale (in terms of sample size) datasets include the extrasensory dataset (n=60 users) containing a wide range of sensor data combined with activity and location labels; and the studentlife dataset (n=48) comprised of continuous sensor data, self-reports, and various pre-post surveys combined with activity, mental health and academic performance labels. data processing for self-tracking data as discussed in section i, there are no concrete guidelines for the preprocessing of aggregated self-tracking data, despite the idiosyncrasies of the task. in other words, by extending the feature space with the date features for the same timestamps but different user behavior, we essentially introduce noise to our machine learning models by diverting attention from the actual activity data.first and foremost, by predicting personalized physical activity levels from the unique users' data, we can infer adaptive and challenging future step goals by increasing the predicted step goal by 10%. this led to the design of a set of prescriptive guidelines and the release of an open-sourced python library to help researchers and practitioners with the highly demanding task of data processing of ubiquitous selftracking data (c2). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/360.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/360.txt new file mode 100644 index 0000000000000000000000000000000000000000..7788d5bcb208f61b1945086a1663fc6c69315229 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/360.txt @@ -0,0 +1 @@ +currency is a universally accepted medium of exchange that enables the trade of goods and services. typically, in the form of paper and coins, it represents a government-issued monetary system and holds a face value that serves as a means of payment. the establishment of currency has greatly facilitated global trade and has been in existence for over 3,000 years. while its origins can be traced back to ancient egypt, the evolution of currency has led to its current form and widespread usage .currently, there are more than 200 national currencies in circulation worldwide. remarkably, 42 countries either utilize the u.s. dollar or have their currencies directly pegged to it. the u.s. dollar holds a significant position in the global economy, accounting for 58.8% of foreign exchange reserves, according to the international monetary fund (imf). most nations have their own official currencies, such as the swiss franc for switzerland and the yen for japan. however, the euro stands as an exception, having been adopted by the majority of european union member states . in terms of value, the top five highest-valued currencies globally are the kuwaiti dinar, bahraini dinar, omani rial, jordanian dinar, and cayman islands dollar .afghanistan, like any other country, possesses its official currency to facilitate domestic transactions. in contrast to widely used currencies like the euro (eur) or dollar (usd), afghanistan has its distinct national currency called the afghani (afn). the issuance of the afghani is overseen by the country's central bank, da afghanistan bank, and its adoption and popularity have been shaped by a significant historical background. the afghani is available in the form of bills and coins. bill denominations include 10, 20, 50, 100, 500, and 1,000 afn, while coin denominations include 1, 2, and 5 afn .the usage of currency has brought numerous conveniences to society. however, along with its benefits, the challenge of identifying genuine currency arises due to the existence of counterfeit or fake money. this problem extends beyond afghanistan and is faced by countries worldwide, as counterfeit denominations pose a significant issue in currency recognition. counterfeiting has a long-standing history and is often referred to as the world's second-oldest profession. counterfeit money refers to currency created without the legal authorization of a government or state, typically with the intention of imitating genuine currency and deceiving its recipients. the production or usage of counterfeit money is considered a form of forgery and is strictly prohibited by law .the presence of counterfeit currency circulating in an economy has a significant impact on the stability and value of legitimate currency, posing a threat to its overall integrity. with advancing technology, the risk of increased production and dissemination of counterfeit banknotes grows, which can have detrimental effects on a nation's economy. therefore, it is imperative to develop methods to detect counterfeit currency. afghanistan, in particular, faces a high prevalence of counterfeit currency, causing numerous challenges for its people. while banks and commercial establishments in afghanistan employ authentication machines, these systems are not accessible to the public. to address this issue, we propose a method to identify counterfeit afghan banknotes by examining specific security features using image processing and machine learning approach.as previously mentioned, afghani banknotes are available in six different denominations. however, it has been observed that counterfeit banknotes predominantly occur in the 500 and 1000 afn denominations . in the market, various banknotes have been issued and are in circulation in afghanistan, including those issued in 1387, 1389, 1391, and 1395. in this study, our focus was specifically on the 1000 afn banknotes issued in 1391.the remaining sections of this paper are organized as follows: in section ii, we discuss the related work pertaining to our study. the methodology employed in our work is described in section iii. section iv presents the results obtained from our analysis, and in section v, a comprehensive discussion of these results is provided. concluding remarks are presented in section vi, along with an overview of our future research directions. to address this issue, we propose a method to identify counterfeit afghan banknotes by examining specific security features using image processing and machine learning approach.in recent years, notable advancements have been achieved in the field of counterfeit currency detection, leading to the implementation of dedicated detection systems and software by various countries worldwide. the system utilized image processing techniques and focused on extracting six key features from the banknote image: an identification mark, security thread, watermark, numeral, floral design, and micro lettering. texture features were extracted using the gray-level co-occurrence matrix (glcm), while shape features were obtained using a set of common properties that characterize connected image regions. in another study, a fake currency detection system was developed using matlab's image processing capabilities. the system effectively detected counterfeit currency in the newly introduced denominations of 500 and 2000, employing a comprehensive process from image acquisition to feature intensity calculation. in a distinct study, researchers presented a system that verifies the genuineness of banknotes by analyzing color, texture, shape, and other distinct characteristics, as per the guidelines set by the reserve bank of india (rbi).in a related study an algorithm was developed using image processing techniques to detect counterfeit libyan banknotes. another study introduced a currency detection system for indian banknotes, achieving a high accuracy rate of 90% using digital image processing techniques with opencv. furthermore, another study was conducted to detect counterfeit currency by extracting first-order and secondorder statistical features from currency images. they employed statistical-based features and employed edge detection methods for accurate feature extraction.previous studies have extensively utilized image processing techniques and machine learning algorithms to detect counterfeit banknotes. consequently, the aim of this study was to address this research gap and contribute by identifying and proposing an improved machine learning algorithm specifically tailored for the detection of counterfeit afghan banknotes.2) data resampling: the dataset was imbalanced due to the rarity of fake banknotes, the number of fake notes were less than real notes, which can leas to a biased model that was unable to accurately classify banknotes. to achieve this, we utilized image processing techniques to extract key texture features from images of new 1000 afn banknotes issued in 1391, and built models for fr, naïve bayes, and part classifiers using the weka machine learning tool. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/361.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/361.txt new file mode 100644 index 0000000000000000000000000000000000000000..7b553470b0262dc243eafc098a032e92163e51dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/361.txt @@ -0,0 +1 @@ +while the language models (lms) demonstrate outstanding efficiency in working with unstructured language data, they struggle with problems that require exact computations (patel et al., 2021b). on the other hand, symbolic systems, such as a calculator, can perform arithmetics without errors. thus, combining the strengths of both neural and symbolic systems can yield significant benefits in tackling tasks that require arithmetics (schick et al., 2023;gao et al., 2023).given a sufficient amount of supervised data, the interaction with symbolic systems can be learned. however, obtaining texts demonstrating the interaction in relevant situations and in a consistent structure is non-trivial. consequently, a substantial effort of most related work addresses the data scarcity problem through semi-supervised learning, heuristics, prompting or few-shot, and reinforcement- based approaches (section 2) with compromises in the quality and reproducibility.to support future research in developing opensource tool-assisted language models, we curate a calc-x collection of over 300,000 samples for mathematical reasoning. calc-x transforms several existing datasets into a unified format that can be used to train and evaluate lms for the correct use of a calculator. we survey existing chain-ofthought (cot) datasets for arithmetical reasoning (section 2) and pick a subset suitable for integration into a consistent collection. to enable efficient integration of lms with independent tools, we propose a unified format of a fully parseable html-like markup language (section 3). for each dataset, we describe the curation process of its calculatoraugmented (calc-x) version (section 4). finally, we show that training on a full mixture of calc-x datasets enables lms to use a symbolic system during inference and largely improves the accuracy on held-out math problems (section 5). we make all our building tools, datasets and models publicly available (appendix a.4). calc-x transforms several existing datasets into a unified format that can be used to train and evaluate lms for the correct use of a calculator. finally, we show that training on a full mixture of calc-x datasets enables lms to use a symbolic system during inference and largely improves the accuracy on held-out math problems (section 5).talm(parisi et al. however, the availability of an extensive, standardized collection of toolassisted datasets like the one presented by calc-x will allow future work to substantially simplify the methods needed for creating tool-assisted models.out of the datasets reviewed in section 2, we create the first version of calc-x collection from these datasets: gsm8k, aqua-rat, mathqa, ape210k, mawps, svamp and asdiv.the resulting calc-x collection is designed to simplify the correct usability of the whole collection in both training and evaluation while persisting the maximum of datasets' original information. this results in data splits composed of subsets of the original datasets, but thanks to this step, the whole calc-x collection can be used to perform both validation and tests over all datasets when all datasets are also used in training. train on the original datasets: use all of the selected datasets (see section 4) to train a baseline: a generative model that produces an associated output reasoning chain on a given input sequence. train calcformers on the calc-x datasets: train the model for an identical objective, but on the corresponding calc-x datasets, to demonstrate the interaction with a symbolic system.results table1compares the performance of the conventional generative models and calculatorsupported models trained on calc-x datasets.this paper introduces a calc-x dataset collection, transforming over 300,000 samples of arithmetic reasoning datasets into a unified chain-of-thought format with explicit annotation of interaction with a calculator. calc-x enables integration of a simple symbolic system in the reasoning chains of language models via traditional supervised learning, easily allowing the models to offload mathematical computation to an external tool.we support the correct use of the calc-x collection for both training and evaluation by unifying the format of all included datasets and eliminating the datasets' mutual data leakages, making calc-x a convenient default for any future research addressing models' arithmetic reasoning.finally, we demonstrate the potential of calc-x by utilizing the whole unified collection in training and adjusting the models' inference process for the use of the calculator. we make the calc-x collection and newly-created calculator-supported calcformer models publicly available to facilitate further research in the fastpaced area of tool-using language models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/362.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/362.txt new file mode 100644 index 0000000000000000000000000000000000000000..b98b53c9bdd4a13dc27ce1fadaed4e279446c953 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/362.txt @@ -0,0 +1 @@ +recently, openai's chatgpt (openai 2023b) has demonstrated outstanding performance on text generation, sparking a research frenzy in large language models (llms). some of the most famous llms include gpt series like gpt-3 (brown et al. 2020), gpt-4 (openai 2023a), and palm (chowdhery et al. 2022), ernie (zhang et al. 2019). open-sourced ones like glm (du et al. 2021), bloom (laurenc ¸on et al. 2022), opt (zhang et al. 2022) and llama series (touvron et al. 2023) have remarkably accelerated the development of the community. in essence, llms are generative models are trained on excessively large amounts of text data that mimics how humans use language, and they exhibit superior zero-shot performance in a large range of natural language processing (nlp) tasks, including language translation, sentiment analysis, text classification, and question answering, etc. they are increasingly be- ing used in applications such as chatbots, language understanding, and speech recognition systems. nevertheless, due to the large scale (normally tens of billions or even trillions of parameters) of large language models, it causes large resource consumption even for deployment. taking gpt-3 as an example, it has 175 billion parameters and uses fp16 for inference, occupying approximately 350 gb of gpu memory, which means at least 8 nvidia a100 gpus are needed to support the deployment of a single model. therefore, it is more than a necessity to reduce the cost.model quantization, as a classic method of model compression, can effectively reduce the memory consumption of llms. for example, when using 4-bit quantization, gpt-3 can be deployed on 2 a100 gpus due to one-fourth of memory reduction. gptq (frantar et al. 2022) is currently the most prominent low-bit weight-only quantization method, which can compress some llms to 4-bit while maintaining acceptable precision degradation. smoothquant (xiao et al. 2023) could achieve 8-bit quantization for both weights and activations, by equivalently transferring the multiplication factors in weights and activations. however, these methods suffer from significant accuracy loss when applied to lower-bit quantization, such as 2-bit weight-only quantization using gptq or w4a8(4-bit for weights and 8-bit for activation) quantization using smoothquant. according to zeroquant-v2 (yao et al. 2023), llama-65b with gptq 2-bit quantization, the accuracy on the lambada dataset (paperno et al. 2016) decreased from 79% to 57%, for which reason it proposes a quantization-aware training method based on low-rank compensation. however, it not only requires additional training costs but also introduces additional parameters, which is not a viable choice for efficient deployment.to improve the lower-bit performance of quantized models, we first draw an intuition that llms have sufficient noise resilience, such that it calls a tender solution for precision recovery. it is demonstrated in prompt quantization (xu et al. 2023) that for a compressed llm, providing an appropriate prompt can yield high-precision generation without updating parameters. zeroquantv2 (yao et al. 2023) indicates that the larger parameter a model has, the less degradation will the quantization have. next, we explore why llms behave poorly on lower-bit quantization from a numerical perspective. we observe that the distribution of the quantized model's output tensor deviates significantly from that of the original float model, and it accumulates layer by layer to become intractable, see figure 1. therefore a question is raised: could we improve the performance of the quantized model by simply matching its activation distribution to that of the float model?to achieve this goal, we propose a method called norm-tweaking to enhance the quantized model by slightly adjusting the parameters of the layernorm layer to tweak the quantized distribution. this method can be widely applied to a variety of quantization methods, achieving significant accuracy improvement with only minimal additional computational cost. our method is evaluated on various models and datasets, and the results indicate that norm-tweaking consistently improves the performance of gptq and smoothquant on different large language models. for llama models, norm-tweaking demonstrates a general performance enhancement over gptq on diverse datasets, with a notable accuracy improvement of approximately 10% on the lambada dataset. moreover, during subjective evaluations of quantized models, we observe that norm-tweaking excels in preserving the general semantic ability of extremely low-bit quantized models. in a nutshell, our contribution is three-fold, 1. firstly, we discover that large language models in general are robust against weight distortion, merely slight partial weight adjustment could recover its accuracy even in extreme low-bit regime. it is unnecessary to adopt heavy quantization-aware training or other sophisticated techniques.2. secondly, we carefully devise an llm tweaking strategy composed of three parts (1) adjusting only the parameters of layernorm layers while freezing other weights, which can be applied to nearly all llms since it is pervasively used;(2) constrained data generation enlightened by llm-qat (liu et al. 2023) to obtain the required calibration dataset, which effectively reduces the dependence on specific datasets during model quantiza- (frantar et al. 2022) compresses and stores weight parameters, and decompresses them to fp16 for inference during calculation. this approach can effectively reduce the proportion of memory access time during inference while maintaining model accuracy. llm.int8() (dettmers et al. 2022) proposes to use float calculation or to adjust the multiplication operations of layernorm to reduce quantization loss. smoothquant (xiao et al. 2023) proposes a method to reduce the activation ranges by equivalently transferring the multiplication factors in weights and activations. gptq (frantar et al. 2022) reconstruct weights based on the method in obs (hassibi, stork, and wolff 1993) via hessian matrix to reduce quantization error. gptq has been widely applied in many scenarios where some llms could achieve high precision at 4-bit quantization. rptq (yuan et al. 2023) and awq (lin et al. 2023) further improve this method.quantization-aware training. another method to improve the performance of the quantized models is quantization-aware training (qat), which is to fine-tune the quantized models to match the original float models. qat is widely studied in convolutional networks, but it encounters significant setbacks in large language model quantization. as the training process of llms consumes a huge amount of text data (usually in the order of trillions of tokens), how to efficiently fine-tune the quantized llms while maintaining their general knowledge and generalization ability remains an open question. to name a few attempts, llm-qat (liu et al. 2023) requires the update the whole parameters of the llms on a set of at least 100k sampled data. zero-quantv2 (yao et al. 2023) introduces a low rank com-pensation to achieve parameter-efficient fine-tuning, but this approach neither eliminates the need for a large amount of training data nor avoids the introduction of additional parameters. 2022), ernie(zhang et al.model quantization, as a classic method of model compression, can effectively reduce the memory consumption of llms. smoothquant(xiao et al. next, we explore why llms behave poorly on lower-bit quantization from a numerical perspective.to achieve this goal, we propose a method called norm-tweaking to enhance the quantized model by slightly adjusting the parameters of the layernorm layer to tweak the quantized distribution. our method is evaluated on various models and datasets, and the results indicate that norm-tweaking consistently improves the performance of gptq and smoothquant on different large language models. another method to improve the performance of the quantized models is quantization-aware training (qat), which is to fine-tune the quantized models to match the original float models. as the training process of llms consumes a huge amount of text data (usually in the order of trillions of tokens), how to efficiently fine-tune the quantized llms while maintaining their general knowledge and generalization ability remains an open question.based on the observation shown in figure1, the difference between the output tensors of each layer in the quantized model and its floating counterpart accumulates, while the output of the quantized model gradually deviates from the quantization-friendly zero-mean distribution. complete fine-tuning of the quantized model through qat is a direct approach, but the large number of parameters in the llm model and the huge amount of required training data make qat unacceptable. therefore, our core objective can be summarized as adjusting the parameters of layernorm to make the output distribution of the quantized model approach that of the float model, which can be expressed formally as, arg min.motivated by the above analysis, we propose a ptq method for llms, called norm-tweaking, to quickly restore models' performance by slightly tweaking layernorm layers of the quantized model. we then use stochastic gradient descent to update the parameters of layernorm in this layer, forcing the activation distribution of the quantized model to mimic that of the float model. in 2-bit quantization, the gptq algorithm caused significant accuracy loss for most models, making the results almost unusable. nevertheless, adopting the quantization method proposed in this paper, the quantized model obtained under the same settings does not have these obvious errors in the output results, suggesting the robustness of our quantization method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/363.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/363.txt new file mode 100644 index 0000000000000000000000000000000000000000..c296e48ba514b6a579faaeccc712257107d0db7b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/363.txt @@ -0,0 +1 @@ +the k-means clustering is a popular tool in data analysis and an important objective in statistics, data mining, unsupervised learning, and computational geometry. the objective of the k-means clustering is to find k centers that minimize the sum of squared distances of all the points to their nearest centers, respectively. given a set x ⊆ r d of n data points and an integer k > 0, the k-means objective is to find a set c ⊆ r d of k centers that minimizes(1)finding optimal solution of the k-means objective stated in equation 1 in np-hard aloise et al. (2009). this problem is np-hard even for a restricted instance when all points are in the plane mahajan et al. (2012). however, several efficient approximation algorithms and heuristics have been developed to address this. the most popular algorithm for the k-means remains lloyd's k-means method lloyd (2006), which is a simple, fast heuristic that starts with any given initial solution and iteratively converges to a locally optimal solution.although the k-means problem is well-studied, the algorithms developed for the problem can perform poorly on real-world data. the reason is that the real-world datasets contain outliers, and k-means objective functions and algorithms are extremely sensitive to outliers. outliers can drastically change the quality of the clustering solution, and therefore, it is important to consider them while designing algorithms for the k-means objective. we state the objective function of k-means with outliers as follows: given a set x ⊆ r d of n points, an integer k > 0, and the number of outliers z, the objective of the k-means with outliers is to find a set c ⊆ r d of k centers that minimizes(2)problem statement: in this work, we aim to design efficient and practical approximation algorithms for k-means clustering with outliers problem, which solves the optimization problem stated in equation 2. d 2 -sampling for k-means: the k-means++ or d 2 -sampling arthur and vassilvitskii (2007) suggests an adaptive sampling algorithm for k-means problem (stated in equation (1)). in this sampling approach, the first point is sampled uniformly at random from the given points, and the sampled point is designated as a cluster center. then the second point is sampled with probability proportional to its squared distance from the first center, and designated as the second cluster center. in general, in each step, a new point is sampled with probability proportional to its square distance to the nearest cluster center sampled so far. if we sample k cluster centers following this distribution, the clustering obtained is o(log k)-approximation to the global optimum, in expectation. however, a limitation of d 2 sampling distribution is that it is extremely sensitive to outliers. consider a scenario where 99% of points are well clustered and the remaining 1% of points are very far away from these clusters. the d 2 sampling on this dataset is likely to pick outliers as cluster centers, and the final clustering results obtained is likely to be very far away from the optimal clustering. in this work we propose a simple tweak to d 2 -sampling, making it robust to outliers. both these algorithms can be made to output exactly the same number of outliers as the optimal solution, unlike previous algorithms that need to discard extra points than the optimal number of outliers. (2019). however, the algorithms are not designed to be practical. the algorithm ofkumar et al. they propose a sampling distribution consisting of a uniform and d 2 sampling mixture. note that this is the quantity we want to estimate in k-means with outlier problems.we present our algorithm -seeding algorithm for k-means with outliers in algorithm 1. our sampling (stated in line 5 of algorithm 1) is a simple modification of k-means++ sampling distribution which is computed by taking minimum of k-means++ sampling distribution and the term η • ρ x (s i-1 )/z, where η is a parameter, z is the number of outliers. (2019)is that we can set our parameter η = 1, which allows us to discard exactly z points as outliers, the same as what the optimal solution discards. this thresholding is controlled by the parameter η (line 5 of algorithm 1) that ensures that a small number of outliers points are sampled.summing the right-hand inequality over x ∈ a, we get the lemma below shows that in each iteration of algorithm 1, if we pick a point from an optimal inlier cluster a, then we get a 64-approximation guarantee for it, in expectation.in other words, if we sample x ∈ a as in algorithm 1 with probability proportional to min{φ {x} (s i ), ηρ x (s i )/z} then, up to a multiplicative factor of 8, we get a similar upper bound on any expectation calculated for sampling with probability proportional to φ {x} (s i ), i. let w i be the number of wasted iterations, i.baseline algorithms: we study the performance of our algorithm (algorithm 1) to find k initial cluster centers with the following baselines:2019) requires one parameter -an initial guess of optimal clustering cost -to derive the probability distribution on each data point. we also gave a bi-criteria approximation algorithm, where our algorithm samples slightly more than k points as cluster centers, which consist of a set of k points (as inlier cluster centers) that gives o(1) approximation guarantee. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/364.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/364.txt new file mode 100644 index 0000000000000000000000000000000000000000..d784a15a67f4568f79639a9e0a34e9fb9660bac8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/364.txt @@ -0,0 +1 @@ +integrating radar technology into internet of things (iot) applications and personal computing devices offers advantages over traditional camera-based solutions, including weather and lighting independence and low power consumption while maintaining highresolution data generation - .to achieve state-of-the-art prediction performance for radar data processing, deep learning (dl) techniques are required , , . however, deploying dl workloads on low-powered microcontrollers (mcus) commonly used in iot products presents challenges. early exit neural networks (eenns) - provide a potential solution by incorporating additional classifiers between hidden layers, known as early exits (ees). eenns terminate the inference when an ee provides sufficient results, thus saving computational resources. the the project on which this report is based was funded by the german ministry of education and research (bmbf) under the project number 16me0542k. the responsibility for the content of this publication lies with the author. selection of the appropriate classifier is typically based on available compute resources or the input sample.existing approaches have limitations that can lead to reduced accuracy or excessive compute resource usage and do not leverage the properties of streaming data resulting in non-optimal decisions - . this paper introduces two novel techniques for runtime decision-making: difference detection (dd) eenns and temporal patience (tp) eenns. these techniques improve termination decisions by leveraging ee output similarity over time to select the best exit for the current sample, thus focusing the similarity metric on the relevant information of the input data, which is based on features extracted by the already executed neural network (nn) layers. this creates a simple similarity metric that focuses on relevant features for the nn's task. additionally, it improves the efficiency by sharing computations between the network inference and the similarity computation and reduces the memory footprint compared to input filtering approaches which need to store the reference input data that is often significantly larger than the classification output vector.to the best of our knowledge, this is the first paper that leverages the temporal correlation of the sensor data to guide the termination decision process. these techniques improve termination decisions by leveraging ee output similarity over time to select the best exit for the current sample, thus focusing the similarity metric on the relevant information of the input data, which is based on features extracted by the already executed neural network (nn) layers. additionally, it improves the efficiency by sharing computations between the network inference and the similarity computation and reduces the memory footprint compared to input filtering approaches which need to store the reference input data that is often significantly larger than the classification output vector.early exit neural networks (eenns) improve inference efficiency and speed by incorporating multiple output branches, called early exits (ees), at different depths. template-matching is similar to our approach but lacks temporal components and its input similarity calculation would create too much overhead for the mcus due to the large radar data samples.the dd-eenn calculates the change in the classifier's output vector as the euclidean distance between the current classification output vector (⃗ o) and the vector of a previous sample (⃗ ot initial ). this change is defined as the distance (d) between the output vectors of the first ee classifier (⃗ ot,exit 0 ) between samples from two time-steps (t1 and tinitial) (see eq. the initial sample of a scene is labeled based on the majority vote of all classifiers, eliminating reliance on unreliable confidence-based metrics (see eq. if the change exceeds the threshold, the prediction is based on the majority vote of all classifiers, indicating the start of a new scene and setting the current sample as new reference (see eq. the use of the early classifier to calculate the change metric improves efficiency by reusing operations between the dd and the inference task.the dd-eenn relies on the similarity between samples to reuse previous predictions, making its efficiency and accuracy dependent on the defined threshold for an acceptable change. instead of always using the first classifier, this variant uses the first classifier that agrees with the majority vote of the initial sample of the current sequence for the following inputs of that sequence (see eq. the efficiency improvement of the dd and tp solutions came from their execution strategy, where only the earliest or previously selected exit classifier was executed as long as the change between the current and initial samples remained below the threshold. comparing the accuracy of the majority voting approach to the confidence-based solution, we found that the majority vote achieved an accuracy of 71. the majority vote and the optimally tuned confidence-based eenn yielded similar accuracies for this use case, indicating the potential for even greater efficiency gains when combining confidence-based labeling for new scenes with the dd approach. future work can focus on utilizing the similarity measurement for additional use cases like monitoring or could evaluate the approach on other data modalities like audio or image data exploring various eenn architectures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/365.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/365.txt new file mode 100644 index 0000000000000000000000000000000000000000..b7791d17cb17de86ce2baf6a8fbcf4457f3ccdd4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/365.txt @@ -0,0 +1 @@ +geospatial science has developed as a vibrant field characterised by intellectual vigour, conceptual expansion, and improved analytical skills as a consequence of the quantitative revolution in the subject of geography through a spatially integrated socio-environmental science that outshines prior disciplinary ties, borders, and limitations (berry et al., 2008). geospatial science, commonly referred to as geomatics (aina 2012), is a multidisciplinary discipline that focuses on comprehending, analysing, and visualising spatial data about the earth's surface using information technology to describe the connections between geography, individuals, places, and earth processes. technologies like global positioning system (gps), geographic information systems (gis), and remote sensing are frequently used as observational, measuring, and analytical tools, helping in the understanding of numerous events by providing the information with a spatial context. geospatial technology is being used increasingly in every industry today, including resource management, disaster management, forestry, logistics, infrastructure planning, and the study of climate change and other environmental issues (dangermond and goodchild, 2020). geospatial technology and the information created are becoming increasingly significant in all economic sectors, making the economy, society, and the environment an indispensable pillar of sustainable development. (scott and rajabifard, 2017). thus, geospatial science and technology support disaster management (ghosh and mukherjee, 2023), infrastructure planning, environmental monitoring (sanyal and chowdhury, 2023), as well as location-based services. digital twin's concept involves creating virtual models of physical objects and systems to reproduce their real-world counterparts as accurately as possible. they capture both the static and dynamic behaviour of objects or systems. digital twin finds their applications in various fields, starting from real-time monitoring of objects, maintenance and optimisation of systems, designing prototypes virtually before building them to simulating and predicting climate change and monitoring the performance of aircraft and grid systems. in the backdrop of facing the change and modernisation of manufacturing sectors and changing to smart manufacturing, the digital twin, as a novel technological tool for implementing smart manufacturing, has drawn numerous scholars' research and discussion. although this notion has been offered for some time, there are few project uses of a digital twin as a technical instrument, the referenceable expertise is barely any, and the reference content is the primarily theoretical and conceptual study (zhou et al., 2022). geospatial digital twin (gdt) emphasizes the geospatial attributes of the geographical settings, incorporating precise location and spatial layers for building a comprehensive knowledge of the spatial environment and its entities. therefore, the geospatial concept and network between the entities are one of the core parts of gdt. thus, implementing gdt is not a straightforward process and needs a variety of spatial computing technologies (eunus et al., 2023), including graph theory.graph algorithms are used extensively in location-based services and analysis (wang et al., 2019). graph algorithms analyse spatial connections and relationships between two points (demšar et al. 2008) or locations. graph theory is the study of the mathematical structures known as graphs, which are used to represent pairwise interactions between objects (singh 2014) (table 1). graphs may be spatial or non-spatial graphs, which further contain both directed and undirected graphs with weighted and unweighted components (table 2). real spatial or non-spatial networks possess characteristics specific to one of the following four graph types: regular, random, small world, and scale-free (anderson and dragićević 2020). an adjacency matrix can be used to depict the organisation of these graphs (anderson et al., 2020).the initial use of graph theory (euler et al., 1741), 200 years earlier, was a location-based problem known as "seven bridges of konigsberg", where euler demonstrated that it was impossible to travel over all seven of the bridges that connect the islands without ever using the same bridge twice. this approach, also known as network science, is shown by the shortest path routing algorithms (table 3) between two points (dijkstra et al., 1959). the theorem was developed as a result of euler's discoveries, which served as the cornerstone of network science. the findings also led to the conclusion that graph theory may be used to uncover and represent many structural properties (anderson et al., 2020). in the 19 th century, cayley's studies formed the beginning of enumerative graph theory, using trees as the types of graphs, and focused on calculating the number of certain types of graphs (bell et al., 2015). social network analysis, one of the earliest fields of application (de nooy et al., 2005), was where the three types of centrality metrics were initially established (freeman et al., 1979). in social networks, vertices stand in for people or institutions, and edges show their connections to one another. in social network analysis, a person's reachability-or how readily information can go to that personis described by their degree and closeness centralities. journey from then till today, graph theory has extended its applications from social media network cybersecurity to fields of bioinformatics and cryptography. with the increase in data connectedness and breakthroughs in graph technology, valuable insights are obtained when integrated with queries, statistics, algorithms, ml, and ai (anderson et al., 2020).the objective of this article is to understand some fundamental concepts and examples of graph algorithms, their applications in geospatial science (figure 1 and table 4), digital twin, and the methods by which geographic data, network sciences, and graph algorithms can be used to represent, analyse, and simulate complex geographical systems for better decision-making.the objective of this article is to understand some fundamental concepts and examples of graph algorithms, their applications in geospatial science (figure1and table4), digital twin, and the methods by which geographic data, network sciences, and graph algorithms can be used to represent, analyse, and simulate complex geographical systems for better decision-making. this method is a graph transformation algorithm that has the freedom to use any shortest path algorithm. the methodology applies a unique structure of semantics graphs and efficient data processing technologies. current research trends include understanding the spatial dynamics of objects operating on spatial network structures, including network theory as gas, and comprehending the intricately intertwined relationship between spatial structure and space-type dynamics. determining a bgi network with a preference for stormwater management, detecting via satellite pictures patches of blue and green, and identifying prospective bgi corridors using graph theory were the three stages that were taken. the study informed us of the challenges in allocating resources into a successful supply chain., 2018)according to the research, a precise and effective evacuation plan that includes numerous evacuation routes while promptly considering changing road conditions is critical to limiting damage. as a result, dijkstra's algorithm is performed from one place within the buffer to every recognised safe destination, the shortest time to travel path and several alternate paths are estimated.(demšar et al. for visualisation purposes, the geometry of the network's elements (i. a graph signal reconstruction model is superimposed on a graph that was learned from the data in this article's proposed signal reconstruction framework for air pollution monitoring data.the study of social phenomena using social network analysis, which uses network and graph theory, has been proven to be extremely useful in fields like criminology. the applications of graph theory in geospatial research are anticipated to rise further with the continuous development of technology and the increasing complexity of geographical data, making it a dynamic and essential topic for addressing the linked spatial concerns of our world. this article underscores the critical role of graph theory algorithms in addressing real-world geospatial challenges, emphasising their practical significance and potential for future innovations in spatial analysis and management, including the geospatial digital twin concept.[kruskal et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/366.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/366.txt new file mode 100644 index 0000000000000000000000000000000000000000..8edb1192dece30829eafca3c22f027a3e3602ab2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/366.txt @@ -0,0 +1 @@ +recent research has shown a growing interest in using concepts from differential geometry and topology to improve learning algorithms (bortoli et al., 2022;hensel et al., 2021;chamberlain et al., 2021;huang et al., 2022;barbero et al., 2022b;a). while euclidean spaces have been commonly used in machine learning, it has been found that using geometries that align better with the underlying data structure can lead to significant improvements. one approach is to use constant curvature model spaces like the poincaré ball (mathieu et al., 2019), hyperboloid (chami et al., 2019), or hypersphere (zhao et al., 2019) to encode latent representations of data. another approach is to use product spaces (gu et al., 2018;skopek et al., 2019;sáez de ocáriz borde et al., 2023b;a;zhang et al., 2020), which combine multiple model spaces, allowing for more complex representations while maintaining computational tractability. however, determining the optimal configuration of product manifolds for representing data is currently done heuristically, lacking efficiency and a principled framework. in this work, we propose a computational approximation of the gromov-hausdorff distances between product manifolds of model spaces. this measure of closeness between manifolds can be useful for machine learning applications where finding an appropriate manifold to represent the embedding space of data or latent representations is crucial. this quantity is infinite, therefore we will restrict our calculations to compact subsets a and b. next, considering two metric spaces a and b, we denote by espa, bq, the "embedding spaces of a and b", as the triple px, f, gq where x is a metric space and f : a ñ x and g : b ñ x are isometric embeddings. now we can summarise our strategy to estimate d gh pb e 2 , b h 2 q as follows (for d gh pb e 2 , b h 2 q it will be entirely analogous). all our computations will be done in dimension two, so we will simply precise how to generate points in b e 2 , b s 2 and b h 2 . using the descriptions b e 2 " tpr cosptq, r sinptqq : r p r0, 1s, t p r0, 2πqu, and b s 2 " tpsinpβq cospαq, sinpβq sinpαq, cospβqq : α p r0, 2πq, β p r0, 1su. as we anticipated above in the outline of our strategy to estimate d gh pb e 2 , b h 2 q, in order to give explicit well-distributed collection of points in b h 2 , it is enough to give a well-distributed collection of points in b e 2 and consider its image under the exponential map exp 0 : b e 2 ñ b h 2 , where we have identified b e 2 with the ball of radius one of r 2 -t 0 h 2 , i. for the euclidean plane, we sample b e 2 " tpr cosptq, r sinptqq : r p r0, 1s, t p r0, 2πqu, with a discretization of 10, 000 points in both r and t.in order to generate points in b h 2 (hyperbolic space), we will use the points in b e 2 (euclidean space) as a reference.we employ the following functions to embed b e 2 (euclidean space) and b s 2 (spherical space) into e 6n´6 . to optimize for f k , we explore all possible permutations of the basis vectors of e 6 : e 1 , e 2 , e 3 , e 4 , e 5 , e 6 . for instance, e 2 ˆh2 and e 2 ˆs2 would be connected with edge weighting w h 2 ,s 2 while s 2 ˆs2 and e 2 ˆe2 would have no connection in the graph. however, there would be no connection between e 2 and e 2 ˆe2 ˆe2 , or between h 2 and e 2 ˆs2 . as an illustration, the connectivity strength between e 2 ˆh2 and s 2 ˆh2 is w e 2 ,s 2 since d gh pe 2 ˆh2 , s 2 ˆh2 q " d gh pe 2 , s 2 q `dgh ph 2 , h 2 q " d gh pe 2 , s 2 q given that d gh ph 2 , h 2 q " 0. given a 0 p a and b 0 p b, we can define the isometric embeddings f : a ñ a ˆb and g : b ñ a ˆb given by f paq " pa, b 0 q and gpbq " pa 0 , bq. with exactly an analogous argument as before, we can notice that given two compact balls of radius one b and b 1 , of centres x 0 and x 1 0 , we can embed b into b ˆb1 by the mapping f : b þ ñ pb, x 1 0 q. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/367.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/367.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef7282a63825b5e503df1285923a461c86be8abe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/367.txt @@ -0,0 +1 @@ +the notion of individual fairness is a formalization of an ethical principle, "treating like cases alike, " which has been argued such as by aristotle. in a fairness-aware machine learning context, dwork et al. firstly formalized the notion.in their formalization, a similar pair of data in an unfair space should be mapped to similar positions in a fair space.we propose to re-formalize individual fairness by the statistical independence conditioned by individuals. this reformalization has the following merits. first, our formalization is compatible with that of dwork et al. second, our formalization enables to combine individual fairness with the fairness notion, equalized odds or sufficiency, as well as statistical parity. third, though their formalization implicitly assumes a pre-process approach for making fair prediction, our formalization is applicable to an in-process or post-process approach.the notion of individual fairness is a formalization of an ethical principle, "treating like cases alike, " which has been argued such as by aristotle. first, our formalization is compatible with that of dwork et al.after showing formalization of individual fairness by dwork et al.individual fairness is one of fairness criteria, and dwork et al. formalized as follows. an instance, x, is assumed to contain the information relevant to an individual except for the individual's sensitive information, and x is considered as a representation of the individual.we finally show that our formalization of individual fairness is compatible with that of dwork et al.(4), and our formalization of individual fairness is compatible with fairness through unawareness. hence, they implicitly omit the sensitive information in prediction, and their method satisfies a condition of fairness through unawareness. they are similar to non-sensitive features, but they are assumed that all the features are assumed to be legal if they are not sensitive.if we extend a notion of individual fairness by treating it as conditioning a fairness criterion by x, we can create an individualized version of the criterion.conditioning by x enables to convert these two fairness criteria to individual versions of them.as described above, criteria designed so that fairness is maintained at a group level can be convert to the corresponding criteria at an individual level by conditioning non-sensitive features.an approach of dwork et al. is a kind of a pre-process, and their formalization of individual fairness premises the approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/368.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/368.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a7d5c35a7b3c638897cda567ff23a654012b1fe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/368.txt @@ -0,0 +1 @@ +b ack pain is a leading cause of occupation-induced disability and results in large amounts of lost productivity annually . most occupational back pain is caused by roles requiring repetitive tasks such as heavy lifting of objects. there are many ergonomic guidelines for lifting with the goal of reducing the risk of back pain, but it is difficult for workers to consistently follow these guidelines in all situations. many may be unable to lift in the required manner due to poor workstation designs or physical limitations . monitoring a lifting workstation can ensure compliance with safe lifting techniques as well as aid in determining if workers are consistently lifting in an unsafe manner to perform the work the job requires. however, this type of monitoring requires significant overhead and doesn't provide sufficient data to create an objective methodology to assess risk. an automated system would allow users to classify risk with minimal overhead and provide real-time feedback to workers in an attempt to reduce long-term risk.both lift classification and lift detection are known as human activity recognition (har), which have been extensively studied in machine learning under various sensor modalities . har systems have been successful in videobased deployments . however, automatic lift assessment would require cameras to be placed in every lift location, which is prohibitively expensive and raises concerns for the privacy of the workers. furthermore, it is impractical for temporary workstations such as construction zones and contract work. in contrast, inertial measurement unit (imu) sensors are wearable, can detect motion regardless of workplace location, and don't have the privacy concerns associated with cameras. imu sensors are widespread in common consumer devices, such as smart watches and phones and therefore provide significant advantage for har problems.previous work was done to develop a model that could identify a lifting event from a subset of laboratory-gathered data with an f1 score of 97% . f1 is the harmonic mean of precision and recall and is commonly used to evaluate binary classifiers. the evaluation data was randomly sampled using k-fold cross validation. the model trained was able to reliably identify lifting and non-lifting events from data gathered under the same conditions as its training data. however, when this model was applied to a real-world environment (data collection in lab and real world is discussed in section ii), the f1 score dropped to 32.8%, showing that while the model could reliably identify lifting events within a dataset environment, it failed to identify more general lifting events.poor general performance on a model is not uncommon, and there is a significant amount of general guidance published . however, most of this work pertains to datasets where the capture device use does not significantly affect the data. for example, creating an object recognition model from images, the camera settings do not majorly change the underlying dataset, since all reasonable cameras produce clear images, either with or without the relevant recognition object. however, in har models, the location of the imu sensor majorly changes the data, and it is not currently possible to standardize imu data from sensors placed in different locations on the body. this makes it unfeasible to use some common techniques for improving model performance, such as transfer learning and generalizing training data from public datasets. therefore, adjustments to previous techniques were developed to make them suitable for har model applications. the model trained was able to reliably identify lifting and non-lifting events from data gathered under the same conditions as its training data. however, in har models, the location of the imu sensor majorly changes the data, and it is not currently possible to standardize imu data from sensors placed in different locations on the body. to identify the start and stop lift time for training and testing the imu sensor model, code was written to synchronize the imu data with the mocap data. the time of the lift, in hh:mm:ss:ms format, was stored, from mocap data, as metadata for each lift trial and each imu sensor data point had a unix epoch timestamp associated with it.to perform model evaluation, the team sampled a small segment of phase 3 data and used subjective analysis to determine if the model was performing well. we defined lifting with these parameters because the majority of the risk associated with the lifting motion is within these two time intervalsthe lifting labeling in phase 1&2 training data differed slightly from this definition. when it was discovered the model performed poorly on real-world data, the first step was to perform a detailed examination of the training data (phase 1&2). fortunately, the incorrect orientation was consistent, meaning that we could use data from other sensors to infer if the sensor was incorrectly placed and restructure the data mathematically to emulate the correct sensor placement. the combination of frame-by-frame data and synthetic imu data generation makes this dataset a feasible avenue for incorporating additional data. however, while the tooling provides a simulation of accelerometer data, gyroscopic data is provided as orientations, requiring modifications to provide angular velocities as in the niosh data set. in our process, the model did not have a general understanding of what a lifting event was, so finishing training with a small sample of real-world data did not improve performance because the model was already poorly generalized.it was noted inthat filtering the niosh data set was considered but ultimately not used due to both better performance with raw data and reduced pre-processing time; although it is unclear what filtering methods were evaluated, the results we have obtained seem to align with these preliminary findings.hiding data from a machine learning model typically does not produce better results, however in this situation it did because the sensors we removed were the least representative of real-world lifting motions. the authors ofhave successfully emulated imu sensor data from motion-capture data by placing virtual sensors on models, which could significantly expand the reach of future training data sets. furthermore, the authors have provided their tooling for generating synthetic imu data, although modifications may be necessary to reconcile differences between synthetic and real-world data sets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/369.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/369.txt new file mode 100644 index 0000000000000000000000000000000000000000..0bccbdbce5d29883c2e6c5b1f8df01876e9d3a0e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/369.txt @@ -0,0 +1 @@ +molecular property prediction has been an important topic of study in fields such as physical chemistry, physiology, and biophysics . it can be defined as a graph label prediction problem and addressed by machine learning. however, the graph learning models such as graph neural networks (gnns) must overcome issues in data scarcity, as the creation and testing of real-world molecules is an expensive endeavor . to address labeled data scarcity, model pre-training has been utilized as a fruitful strategy for improving a model's predictive performance on downstream tasks, as pre-training allows for the transfer of knowledge from large amounts of unlabeled data. the selection of pre-training strategy is still an open question, with contrastive tasks and predictive/generative tasks being the most popular methods.attribute reconstruction is one predictive method for graphs that utilizes masked autoencoders to predict node or edge features . masked autoencoders have found success in vision and language domains and have been adopted as a pre-training objective for graphs as the reconstruction task is able to transfer structural pattern knowledge , which is vital for learning specific domain knowledge such as valency in material science. additional domain knowledge which is important for molecular property prediction is that of functional groups, also called chemical motifs . the presence and interactions between chemical motifs directly influence molecular properties, such as reactivity and solubility . therefore, to capture the interaction information between motifs, it is important to transfer inter-motif structural knowledge during the pre-training of graph neural networks.unfortunately, the random attribute masking strategies used in previous work for graph pre-training were not able to capture the inter-motif structural knowledge . that is because they rely on neighboring node feature information for reconstruction . notably, leveraging the features of local neighbors can contribute to learning important local information, including valency and atomic bonding. however, gnns heavily rely on the neighboring node's features rather than graph structure , and this over-reliance inhibits the model's ability to learn from motif structures as message aggregation will prioritize local node feature information due to the propagation bottleneck . for example, as shown on the left-hand side of figure 1, if only a (small) partial set of nodes were masked in several motifs, the pre-trained gnns would learn to predict the node types (i.e., carbon) of two atoms in the benzene ring based on the node features of the other four carbon atoms in the ring.because a random masking strategy is not guaranteed to transfer inter-motif structural knowledge into downstream, we propose a novel masking strategy that forces the transfer of inter-motif knowledge. first, because all features of the nodes within the motif are masked, our strategy reduces the amount of feature information being passed within the motif and relieves the propagation bottleneck, allowing for the greater transfer of inter-motif feature and structural information.graph property prediction given a graph g = (v, e) ∈ g with the node set v for atoms and the edge set e ⊂ v × v for bonds, we have a d-dimensional node attribute matrix x ∈ r |v|×d that represents atom features such as atom type and chirality.g must adhere to two criteria: (1) each node within the motif must be within a k-hop neighborhood (k equals number of gnn layers) of an inter-motif node, and (2) sampled motifs may not be adjacent.for motif-aware masking, we have the choice of masking the features of all nodes within the motif or choosing to only mask the features of a percentage of nodes within each sampled motif.a traditional assumption was that a node would receive stronger influence from intra-motif nodes than from inter-motif nodes, due to shorter distance on the graph. while we have observed the accuracy advantages of motif-aware masking, we are curious about whether the assumption was broken by this novel strategy -the inter-motif influence may play a significant role in predicting node attributes and molecular graph pre-training.to measure the influence generally from (either intra-motif or inter-motif) source nodes on a target node v, we must design a measure that quantifies the influence from any source node u in the same graph g, denoted by s(u, v).while the infratio measurements are able to compare general inter-and intra-motif influences, these measures combine all inter-motif nodes into one set and do not consider the number of motifs within each graph. from this ranking, we define our score for inter-motif node influence averaged at the node, motif, and graph levels, derived from a similar score measurement used in information retrieval, mean reciprocal rank (mrr):.similar to the infratio measurements, mrr node directly captures the impact of the influence ranks for each node within the full graph set, whereas mrr graph alleviates bias on the number of nodes within a graph by averaging across in information retrieval, mrr scores are used to quantify how well a system can return the most relevant item for a given query. however, for our measurement, lower scores are preferred as lower intra-motif influence rank indicate greater inter-motif node influence.for the sake of clear visualization, we define an inter-motif score which indicates inter-motif knowledge transfer according to the number of motifs n within a graph:. the relatively low values indicate that the intra-motif node influence is still highly important for the pre-training task, but our method demostrates the highest inter-motif knowledge transfer amongst the baselines.52 and 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/37.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/37.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d254ad35a5138d5bf790c15b67542e943c99aec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/37.txt @@ -0,0 +1 @@ +in speech translation (st), end-to-end (e2e) neural approaches have gained attraction as a promising line of research towards systems with lower latency and less error propagation. however, developing models of this type can be challenging because the aligned speech-totranslation data is scarce (wang et al. 2020b;dong et al. 2021b;zheng et al. 2021;tang et al. 2021a). this leads researchers to explore methods that resort to large-scale unlabeled data. a simple one is to use pre-trained models to encode acoustic and/or textual input (pino et al. 2020;ye, wang, and li 2021), whereas others train st models using additional data of either automatic speech recognition (asr) or machine translation (mt), or both (wang et al. 2020c;xu et al. 2021;indurthi et al. 2021).such a paradigm provides an opportunity to make use of both labeled and unlabeled data, say, the speech, text, speech-to-transcription, text-to-text, and speech-to-text data. for example, one can feed all available data into an autoencoder to train st models (zheng et al. 2021). more recently, it has been found that stronger results can be achieved by using an explicit text encoder to ease the training on the mt data (li et al. 2021).here, we take a further step towards more effective use of both labeled and unlabeled data in st. we claim that the source-language text encoder plays an important role in leveraging asr and mt although it is not involved in standard end-to-end st. we then develop a method (named as figure 1: model architectures. dotted boxes mean that the items are dropped in st tuning and inference. s = acoustic signal sequence, t = transcription, x = source-language word sequence, and y = target-language word sequence.multi-step pre-training for speech translation, or msp-st for short) to expose the text encoder to both asr and mt learning processes, and force them to assist each other. having the text encoder as the bridge between asr and mt is perhaps helpful: the result st system can learn acoustic and textual encoding simultaneously (see figure 1). note that such a design also addresses the role mismatch problem wherein the pre-trained asr encoder does not behave like what the target-language decoder expects (wang et al. 2020b;xu et al. 2021). to our knowledge, this is the first to discuss the problem in large-scale pre-training with all asr, mt and st data on end-to-end st tasks.another improvement is that we denoise the text encoder so that it is robust to the noisy transcription-like input. in this 2021b;zheng et al. having the text encoder as the bridge between asr and mt is perhaps helpful: the result st system can learn acoustic and textual encoding simultaneously (see figure1). this is beneficial when the text encoder is used to supervise the learning of the st encoder, where the speech-to-transcription data is the input. broadly speaking, like any encoder-decoder model, one can train this architecture in a standard pre-training + fine-tuning fashion(lewis et al.but such a model does not accept source-language text as input, and it is non-trivial to learn the model on sourcelanguage text data.• the text encoder can supervise the training of the st encoder so that the behavior of the st encoder is more consistent with that of a standard mt encoder. to make use of these intuitions, we improve the st encoder and develop a contrastive training method to incorporate the text encoder into the asr-based training.we propose to use the text encoder to supervise the training of the st encoder. a common way is to add the ctc-based loss to the acoustic model. here we choose contrastive training as a way to connect the st encoder and the text encoder. more formally, let a(s) be the output of the st encoder given the speech s, and m(t) be the output of the pre-trained text encoder given the transcription t. so we run the asr training process after the mt training process and freeze the text encoder during asr training.there are two jobs for the text encoder: 1) encoding real source-language sentences in mt training, 2) encoding transcriptions in asr training. 2017;bérard et al. 2021), because there is a great length difference between acoustic sequence and word sequence, and the two models have different scopes of encoding, i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/370.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/370.txt new file mode 100644 index 0000000000000000000000000000000000000000..e37fe9600e6b4df0b824b10d3f45ba4a6d0a8222 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/370.txt @@ -0,0 +1 @@ +synthetic data generation is vital in various industries, like finance, telecommunication, and healthcare where data-driven decision-making is crucial jordon et al. . it resolves data scarcity and quality concerns by providing synthetic data that preserves statistical properties and relations with the original data. synthetic data enables testing new ideas without compromising real data, blending multiple sources, and protecting individual privacy voigt and von dem bussche . however, using synthetic data may cause performance degradation in modeling hittmeir et al. where utility degradation depends on the fidelity of the data generation process and the downstream task. to address this issue and maintain synthetic data quality, the development of a framework to mitigate the degradation is indispensable.the majority of existing approaches are "unsupervised" in the sense that they do not take into account the downstream task. for instance, the methods discussed in patki et al. treat the output this work proposes a novel synthetic data generation framework, the supervised and composed generative optimization approach for tabular data (sc-goat), to address the aforementioned issues. the framework comprises two key steps. firstly, we incorporate a supervised component customized for the specific downstream task. to achieve this, we leverage a bayesian optimization approach to fine-tune the hyperparameters related to the neural networks. additionally, apart from optimizing the traditional loss of gan or tvae through their parameters, we also tune the hyperparameters based on a loss function specific to the downstream task on the validation set. subsequently, we evaluate the model's performance based on the test error to accurately assess its effectiveness.in the second step, we adopt a meta-learning approach, leveraging bayesian optimization, to identify the optimal mixture distribution of existing synthetic data generation methods. to the best of our knowledge, this approach is the first to generate synthetic data based on a mixture of multiple synthetic data generation methods. from each method we learned in the first step, we explored multiple data generation techniques and tuned the proportion of data sets sampled. this approach is motivated by the quest to discover the projection of the true underlying data distribution onto the set encompassing various synthesizers.employing supervised bayesian optimization, we search for the ideal mixture that optimizes the downstream performance metric. by dynamically combining the strengths of different data generation methods, we aim to enhance the overall synthetic data quality and its suitability for downstream tasks. . synthetic data enables testing new ideas without compromising real data, blending multiple sources, and protecting individual privacyvoigt and von dem bussche . for instance, the methods discussed inpatki et al. to the best of our knowledge, this approach is the first to generate synthetic data based on a mixture of multiple synthetic data generation methods. by dynamically combining the strengths of different data generation methods, we aim to enhance the overall synthetic data quality and its suitability for downstream tasks.the synthetic data vault (sdv) project, utilized for conducting most of the experimentspatki et al.by employing bayesian optimization in this manner, we can efficiently fine-tune the synthesizer models and enhance the overall performance in generating data that closely resembles the real data set boosting the downstream performance. to evaluate synthetic generation models for tabular data, various benchmarking approaches are available, allowing flexibility in adapting the loss function to suit the specific objectives of synthetic data generation.we evaluate our approach on three diverse data sets discussed in the previous section: the adult data set 1 , the balanced credit card data set4, and the imbalanced credit card data set2. this is achieved by training an xgboost classifierchen and guestrin on the training data set and subsequently evaluating its performance on a separate validation data set.to comprehensively evaluate the effectiveness and improvements of our method, as well as the quality of the generated data compared to the original real data set, we fitted xgboost on the original data and assessed the model's performance in terms of auc. by comparing our results against the baseline xgboost model fitted on real data, we gain valuable insights into the efficiency of our approach and the similarity between the generated synthetic data and the real data.considering real-life scenarios often involve highly imbalanced data, we evaluated the performance of our approach on an imbalanced credit card data set. . comparing the values from tables3and5shows that the auc test scores for the xgboost fitted only on real data are very close to the auc test scores for the xgboost fitted only on synthetic data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/371.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/371.txt new file mode 100644 index 0000000000000000000000000000000000000000..993060b6299fa250d49186ccf7ed9365acc06c37 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/371.txt @@ -0,0 +1 @@ +digitalization has brought a plethora of possibilities for health and social care operations, including early health interventions , which support individuals by enabling self-care practices . one of the most impactful applications of this digital transformation is the early identification of patterns that indicate an individual might dropout (discontinue) a treatment program. such applications can significantly enhance individuals' quality of life, e.g., by minimizing the risk of disease development and identifying signs of depression .building on these advances, the implementation of smart health and social care applications empowers individuals, especially the elderly people, to enhance their overall wellbeing . such digital tools are particularly crucial for elderly people, as they monitor their physical and cognitive activities, having a vital role in improving their health status.by integrating machine learning (ml) techniques into these applications, intelligent services can be developed, resulting in substantial advantages for individuals and reduced economic this publication has been partially funded by the european union's horizon 2020 research and innovation programme under the terminet project with grant agreement no. 957406. costs for health operators. however, ml algorithms require large amounts of training data to provide highly accurate predictions. the transmission of data that, in the context of health and social care, contain personally identifiable information (pii) to a central server (cs) as well as data sharing among organizations, face limitations due to the increased privacy concerns and the fragmentation posed by regulations such as the gdpr . hence, research efforts have recently turned towards the decentralization of ml, with federated machine learning (fml) emerging as an attractive research area that enables model training without data transmission.fml, introduced by mcmahan et al. , allows the training data to remain distributed on edge devices while a shared model is learned by securely aggregating client-based computed updates . unlike traditional ml, in fml, a cs is responsible for coordinating participating clients and collecting model updates instead of raw data. this distributed approach ensures that the data remains on the clients' devices, minimizing privacy and security concerns.although fml has received much attention in recent years, the final model's predictive accuracy is affected by several challenges introduced by the distributed data, with the most common problem being data heterogeneity - . since fml clients are likely to own different data distributions, careful selection of clients per federated round is crucial for a federated model to converge towards the global objective . however, determining which clients should be included or excluded during specific federated training rounds is a complex task that involves addressing various issues and considerations such as resource allocation, data quality and quantity.motivated by the above, we propose the application of a fml-based training to predict early dropouts from active and healthy ageing applications. timely accurate predictions of dropouts enable early interventions by health and social care operators, reduced economical costs associated with hospitalization and ultimately, improve the quality of life of patients, while preventing them from disease development or mood changes. in this paper, we focus on elderly people and train a deep neural network using fml to minimize privacy related issues.the main contributions of this paper are as follows:• we explore the effectiveness of fml for predicting early dropouts from monitored interactions within a healthy ageing mobile application, a novel and very promising approach that enables early interventions for improving the overall well-being of individuals. • we comprehensively evaluate the impact of data heterogeneity and class imbalance on the predictive performance of fml. • we compare several data selection and class imbalance handling techniques. our findings indicate that by employing data selection and resampling techniques, fml approaches can achieve comparable or even superior performance compared to their centralized counterparts.the rest of this paper is structured as follows: section ii presents the related work. section iii introduces the dataset, defines the problem and fml architecture and illustrates the use-case scenarios. section iv presents the experimental details and compares the fml to centralized learning on different scenarios. finally, section v summarizes the key outcomes. the transmission of data that, in the context of health and social care, contain personally identifiable information (pii) to a central server (cs) as well as data sharing among organizations, face limitations due to the increased privacy concerns and the fragmentation posed by regulations such as the gdpr.although fml has received much attention in recent years, the final model's predictive accuracy is affected by several challenges introduced by the distributed data, with the most common problem being data heterogeneity-. since fml clients are likely to own different data distributions, careful selection of clients per federated round is crucial for a federated model to converge towards the global objective. our findings indicate that by employing data selection and resampling techniques, fml approaches can achieve comparable or even superior performance compared to their centralized counterparts.in our experiments, we evaluated three distinct scenarios: i) centralized learning, ii) cross-device fml, where each client represents a single individual and iii) cross-silo fml, where each client holds observations from multiple individuals, e.nonetheless, centralized learning often exhibits superior predictive accuracy to distributed learning settingsand serves as the upper bound for fml. • cross-device federated learning: given the highly sensitive nature of the personal and confidential data involved in the application, we employ fml as a sophisticated approach to enhance privacy and ensure secure data handling.• centralized learning: when applied to raw data, the centralized approach achieved an accuracy of 86%, precision of 76% a geometric mean of 79%. • cross-device federated learning: in cross-device fml, the results varied from those observed in the centralized learning setting. more precisely, the application of over-under sampling in cross-device fml demonstrated a slight improvement of 10% in recall, 11% in f1 score and 7% in geometric mean compared to the centralized counterpart. despite the significant improvements achieved through the use of under sampling or data selection techniques in cross-silo fml, the predictive accuracy still falls short compared to the centralized and cross-device fml settings. notably, when under sampling is integrating into cross-device fml, the federated setting surpass the predictive performance of the centralized approach. by focusing on users with data of high quality and optimizing data or user selection techniques, fml-based training can surpass centralized approaches. we compared two distributed learning settings, namely crossdevice and cross-silo fml, to traditional centralized learning. this finding indicates that federated learning not only minimizes privacy concerns but with the application of intelligent data selection or re-sampling algorithms, it can outperform centralized approaches, thereby achieving highly accurate predictions in the context of predicting early user dropouts from healthy ageing applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/372.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/372.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b972efd1ea73389cdfdc7b25643b359f3f6509e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/372.txt @@ -0,0 +1 @@ +with the growth of densely populated cities, increased traffic, and pollution from energy production and industrial activities, air pollution has emerged as a pressing concern, exerting a negative impact on both the environment and human health. various substances such as tropospheric ozone (o 3 ), nitrogen dioxide (no 2 ) and carbon monoxide (co) are sources of air pollution . among these pollutants, tropospheric ozone stands out as a particularly detrimental factor. hence, accurate monitoring of air quality, particularly ozone levels, is crucial for effective pollution management and public health protection .air pollution monitoring platforms have emerged as a result of the increasing utilization of the internet of things (iot), enabling the collection of real-time data from multiple sensors placed in various locations. however, the accuracy of the ozone sensors built into these iot-based solutions is a concern . manufacturers often neglect the calibration process and overlook how environmental conditions, such as air temperature and humidity can impact sensor performance. to enhance the reliability and functionality of gas sensors, air quality monitoring platforms commonly incorporate lowcost temperature and relative humidity sensors in addition to gas sensors . this redundancy in measuring pollutant concentration helps improve the calibration quality of gas sensors.accepted to "ieee sensors 2023". © 2023 ieee. personal use of this material is permitted. permission from ieee must be obtained for all other uses, in any current or future media, including reprinting/republishing this material for advertising or promotional purposes, creating new collective works, for resale or redistribution to servers or lists, or reuse of any copyrighted component of this work in other works.ferrer-cid et al. studied the application of multi-sensor data fusion techniques using machine learning and weighted averaging methods in air pollution monitoring platforms. their study demonstrated the effectiveness of traditional machine learning algorithms, including support vector regression (svr), random forest (rf), and k-nearest neighbors (knn), in fusing sensor data and performing calibration compared to weighted averaging. however, it was observed that the performance of these models heavily relies on feature engineering (e.g., applying partial least squares (pls) regression to mitigate multicollinearity), emphasizing the importance of carefully selecting and engineering the relevant features such as applying fast fourier transform . additionally, these methods usually neglect the temporal and spatial interdependencies present in the sensor readings, which could potentially provide valuable information for further improving calibration accuracy .graph neural networks (gnns) have proven effective in capturing spatial-temporal interrelationships in data, not only in cases where a graph structure is already present but also in scenarios involving spatially distributed data subject to spatial and temporal correlations - . as a result, they offer significant potential in diverse domains, including traffic data analysis - , recommendation systems , and biological networks . however, despite their wide-ranging applications, there exists a notable research gap regarding the utilization of gnns in the context of iot air pollution monitoring platforms. this gap presents an exciting opportunity to explore the potential advantages and challenges associated with leveraging the spatial and temporal relationships in sensor networks.in this paper, we propose a gnn-based model to address the challenge of a low-cost multisensor fusion for calibrating ozone sensors in iot air pollution monitoring platforms. gnns have the ability to effectively capture sensor interdependencies, allowing them to learn representations that incorporate information from neighboring nodes and the overall graph topology. our approach particularly leverages the graph attention network (gat) into the calibration process to enhance data fusion and improve the accuracy of ozone sensor calibration.the main objective of this research is to overcome the limitations of conventional machine learning-based calibration methods by utilizing the benefits of the gat. by fusing data from multiple sensors and effectively capturing the underlying spatial-temporal relationships, our approach provides a costeffective solution for calibrating ozone sensors in iot air pollution monitoring platforms. the proposed method has the potential to improve the reliability and accuracy of ozone measurements, thereby enabling more effective air quality management strategies. the remainder of this paper is organized as follows. section ii outlines the methodology, including the details of the gat and the calibration process. in section iii, experimental results and performance evaluation are presented. finally, section iv concludes the paper.air pollution monitoring platforms have emerged as a result of the increasing utilization of the internet of things (iot), enabling the collection of real-time data from multiple sensors placed in various locations. to enhance the reliability and functionality of gas sensors, air quality monitoring platforms commonly incorporate lowcost temperature and relative humidity sensors in addition to gas sensors.studied the application of multi-sensor data fusion techniques using machine learning and weighted averaging methods in air pollution monitoring platforms. their study demonstrated the effectiveness of traditional machine learning algorithms, including support vector regression (svr), random forest (rf), and k-nearest neighbors (knn), in fusing sensor data and performing calibration compared to weighted averaging.graph neural networks (gnns) have proven effective in capturing spatial-temporal interrelationships in data, not only in cases where a graph structure is already present but also in scenarios involving spatially distributed data subject to spatial and temporal correlations-. however, despite their wide-ranging applications, there exists a notable research gap regarding the utilization of gnns in the context of iot air pollution monitoring platforms.in this paper, we propose a gnn-based model to address the challenge of a low-cost multisensor fusion for calibrating ozone sensors in iot air pollution monitoring platforms. our approach particularly leverages the graph attention network (gat)into the calibration process to enhance data fusion and improve the accuracy of ozone sensor calibration. by fusing data from multiple sensors and effectively capturing the underlying spatial-temporal relationships, our approach provides a costeffective solution for calibrating ozone sensors in iot air pollution monitoring platforms. building upon the successful application of temporal and spatial graph attention layers as powerful feature extractors for multivariate time series in, we propose a novel model, named spatial-temporal graph attention fuser (stgat-fuser), that incorporates these attention layers to capture long-term dependencies, temporal relationships, and correlations among different sensor inputs. this incorporation is beneficial for sensor fusion and calibration tasks as it allows the system to consider and leverage the extended historical context, thereby enhancing the understanding of complex temporal patterns and improving the accuracy and reliability of the fusion and calibration processes. this node comprised four mox sensors, one ec sensor, as well as an air temperature sensor and an air relative humidity sensor.this paper proposes an approach for multisensor fusion and calibration in iot air pollution monitoring systems. by incorporating graph attention networks, cnn, and lstm, the proposed method successfully captures the spatial and temporal relationships among sensors, resulting in improved calibration accuracy. overall, the findings presented in this paper contribute to the advancement of sensor fusion techniques and calibration methods, enhancing the performance of iot air pollution monitoring systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/373.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/373.txt new file mode 100644 index 0000000000000000000000000000000000000000..987954c2023f4a2be799d1b174a4867d1f44eca5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/373.txt @@ -0,0 +1 @@ +reproducing network research results have both significant education and research values. for education, it completes students' learning process on computer networks with lecture attendance and textbook reading, in accordance with the usual process of science study worldwide, e.g., educators at stanford university assign reproduction projects in their networking classes . for research, it ensures the results are accurate and trustworthy, gives researchers a hands-on opportunity to understand the pros and cons of these results, and motivates more innovations, e.g., imc 2023 introduces a replicability track for submissions that aims to reproduce or replicate results previously published at imc .the best practice for people to reproduce a published network research typically involves one of three approaches. first, rerun a publicly available prototype provided by the authors (e.g., ) or other people who implement it based on the publication (e.g., ). second, if no public prototype is available, people may contact the authors to ask for a private prototype. third, if no prototype is available, people need to manually implement one following the publication.the best practice to reproduce network research results has limitations. all three approaches in the best practice are limited for various reasons. first, not much published research comes with a publicly available prototype. our study shows that even in prominent networking conferences such as sigcomm and nsdi, only a small number of papers provide publicly available prototypes from the authors. from 2013 to 2022, only 32% and 29% of papers in sigcomm and nsdi, respectively, provide open-source prototypes. although some non-authors implement prototypes and release them to the public , the number of such prototypes is even smaller. second, the authors sometimes are reluctant to share a private prototype for various reasons (e.g., patent filing, commercial product, policy, and security).as such, without ready-made prototypes, the dominant way for people to reproduce the results of a published networking paper is to manually implement its proposed design. although this "getting-hands-dirty" approach provides people precious experience in understanding the details of the paper, in particular the pros and cons of the proposed design, the whole process is both time and labor consuming and errorprone. in the long run, it is unsustainable because network research results are becoming more and more complex. if people are spending more time trying to reproduce the published results, they will have less time for critical thinking and innovation. one may think this is not a unique issue for the networking community, but a prevalent one for the whole computer science discipline. however, the situation is more severe for networking research. for example, it may take a fresh graduate student one week to reproduce a machine learning paper by manual implementation, but one or two months to reproduce a networking paper.proposal: reproducing network research results using large language models (llms). in this position paper, we make a bold proposal to reproduce network research results by prompt engineering the emerging llms. such a proposal, if built successfully, can benefit the networking community from multiple perspectives, including (1) substantially simplifying the reproduction process, (2) efficiently identifying missing details and potential vulnerabilities (e.g., hyperparameters and corner-case errors) in network research results, and (3) motivating innovations to improve published research. it could even help improve the peering review process of networking conferences , partially realizing the vision of a sigcomm april fools' day email in 2016 .our proposal is backed by the recent success of applying llms to both general and domainspecific code intelligence tasks as evidences. for general programming, copilot can provide effective code completion suggestions. chatgpt can complete and debug simple coding tasks when given proper prompts. rahmani et al. integrate llm and content-based synthesis to enable multi-modal program synthesis. for domainspecific programming, in particular the network domain, sage uses the logical form of natural-language sentences to identify ambiguities in rfcs and automatically generate rfc-compliant code. nassim uses an llm to parse network device manuals and generate corresponding configurations for devices. they both focus on well-formatted inputs with a limited range of topics (i.e., rfc and manuals).in this paper, we go beyond to take a first step to thoroughly investigate the feasibility and challenges of reproducing network research results by prompt engineering llms. a preliminary experiment ( §3). we conduct a small-scale experiment, where we choose four networking systems published in prominent networking conferences and journals and ask four students with essential knowledge of networking to each reproduce one system by prompt engineering the free chatgpt , a publicly available chatbot built on gpt-3.5, a representative llm .the results verify the feasibility of our proposal, i.e., each student successfully reproduces the system assigned to her / him via chatgpt. their correctness is validated by comparing the results of small-scale test cases with those of the corresponding open-source prototype. the efficiency is evaluated using large-scale datasets. results show that their efficiency is similar to that of their open-source prototypes.we learn several lessons from the experiment on how to use llms to reproduce networking research results more efficiently. first, provide llms with separate, modular prompts to build different components of a system and then put them together, rather than provide monolithic prompts to build the whole system. second, ask llms to implement components with pseudocode first to avoid unnecessary data type and structure changes later. third, data preprocessing is important to reproduction, but is often missed in the paper. we also learn some guidelines for debugging llm-generated code, including sending error messages / error test cases to llms, and specifying the correct logic in more detailed prompts. open research questions ( §4). we identify several key open research questions regarding llm-assisted network research results reproduction and elaborate on opportunities to tackle them. they include: (1) how to handle the diversity of these results (2) how to design a (semi-) automatic prompt engineering framework to reproduce these results? (3) how to identify and handle the missing details and vulnerabilities of these results? (4) how to develop a domain-specific llm for 5) how to use llms to discover optimization opportunities for these results? (6) how to apply this approach of reproduction to promote computer networking education and research?reproducing network research results have both significant education and research values. in this position paper, we make a bold proposal to reproduce network research results by prompt engineering the emerging llms., hyperparameters and corner-case errors) in network research results, and (3) motivating innovations to improve published research.in this paper, we go beyond to take a first step to thoroughly investigate the feasibility and challenges of reproducing network research results by prompt engineering llms. we conduct a small-scale experiment, where we choose four networking systems published in prominent networking conferences and journalsand ask four students with essential knowledge of networking to each reproduce one system by prompt engineering the free chatgpt, a publicly available chatbot built on gpt-3. we identify several key open research questions regarding llm-assisted network research results reproduction and elaborate on opportunities to tackle them. they include: (1) how to handle the diversity of these results (2) how to design a (semi-) automatic prompt engineering framework to reproduce these results? (3) how to identify and handle the missing details and vulnerabilities of these results? (4) how to develop a domain-specific llm for5) how to use llms to discover optimization opportunities for these results? (6) how to apply this approach of reproduction to promote computer networking education and research?., code completion , debug, and synthesis), we propose to prompt engineering the emerging llms to reproduce network research results. despite a simple example, it shows that an llm can implement network programs and gives us the confidence to experiment with more complex network research results. one design is to follow the top-down approach of system development with the following steps: (1) describe to the llm the key components of the system, (2) describe how components interact and ask the llm to define the interfaces, (3) provide the llm with the details of each component to generate the code, (4) test and debug the llmimplemented component, (5) repeat (3) and (4) for each component, and (6) test and debug the complete system. although the human-llm interaction in manual prompt engineering is beneficial for people to better understand the details of published research results, the efficiency of reproducing can substantially improved by (semi-) automatic prompt engineering.we propose to specifically build a network research reproduction llm by using network research materials (e. third, although it may still be a long shot, it is theoretically feasible to build a deep learning model with open-source and reproduced prototypes of networking research as datasets to predict networking innovations, similar to the recent building of al-phafoldin the area of biology. in the era where ai is the predominant computer science research area, reproducing network research results using llms could draw students' attention and motivate their interest in networking education and research. last but not least, llm-assisted network research results reproduction could help improve the peering review process of prominent networking conferences. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/374.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/374.txt new file mode 100644 index 0000000000000000000000000000000000000000..82035a88f804f38e19299a818443a24956334258 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/374.txt @@ -0,0 +1 @@ +over the last several years, deep learning has witnessed a rapid paradigm shift towards large foundational models trained on massive datasets (brown et al., 2020;chowdhery et al., 2022). these models learn representations which often extend to diverse downstream tasks. however, when pretraining data is distributed across a large number of devices, it becomes impractical to train models using centralized learning. in these cases, federated learning (fl; konečnỳ et al., 2016) allows participating clients to train a model together without exchanging raw data. this privacy-preserving property makes fl a popular choice for a range of applications, including face recognition (mei et al., 2022a), autonomous driving (li et al., 2021), recommendation systems (ning et al., 2021), and self-supervised representation learning (vemulapalli et al., 2022). in self-supervised learning, simclr (chen et al., 2020), byol (grill et al., 2020), and simsiam (chen & he, 2021) are widely used approaches that can be adapted for use in fl settings using algorithms like federated averaging (fedavg;mcmahan et al., 2017). representation learning benefits from large models due to their capacity to learn more nuanced and reliable representations of the data (chen et al., 2022;tran et al., 2022). however, in cross-device fl settings, the limited resources of edge devices (including memory, computation capacity, and network bandwidth) impedes the development of large models (wang et al., 2021;kairouz et al., 2021). in this work, we focus on federated training of large representation learning models on a large number of edge devices under resource constraints.typically in fl, clients' models share a single global architecture and perform end-to-end training in each communication round (mcmahan et al., 2017). however, many edge devices (e.g., internet of things (iot) devices, mobile phones, tablets, and personal computers) lack sufficient memory and compute to train most existing large ml models. for example, the google pixel 6 has 12 gb of memory, which is insufficient to naively train a multi-billion parameter model. communication of such a model and its gradient updates during every round of fl is also prohibitively data-intensive and time-consuming. these resource constraints create obstacles for real-world federated learning applications with large-scale models., 2022a), autonomous driving(li et al.one direction to manage resource constraints for federated learning on edge devices is to carefully select model architecture and hyperparameters(cheng et al., 2018;horvath et al.2(b), our experimental evaluation demonstrates that federated layer-wise learning (fll) can significantly reduce the resource usage of a single client compared to federated end-to-end learning (fel) in all aspects. in addition, we demonstrate that depth dropout is an effective complementary strategy in federated layer-wise learning, which further reduces resource usage upper bounds without degrading model performance. we can make the following observations: (i) both models pre-trained by federated layerwise learning and federated end-to-end learning can significantly outperform the model without pre-training, indicating the effectiveness of self-supervised representation learning in federated settings. (ii) while the federated layer-wise learning approach is an approximation of federated end-to-end learning, it can achieve performance on par with the end-to-end method in downstream evaluation tasks. (iii) we found that intermediate representations from the federated layer-wise learning model performed better than those from the federated end-toend learning model in certain downstream tasks. for example, in a linear downstream task using the representation from layer 3, the federated layer-wise learning model achieved 28.we conducted additional experiments to further investigate the effect of model size (number of layers) and number of training rounds per layer on the performance of federated layer-wise and end-to-end learning. we also found that the difference in performance between the two approaches was minimal when the number of training rounds per layer was small (4k) but became more pronounced when the number of training rounds per layer was increased (12k). based on these results, it appears that layer-wise learning may require slightly more training rounds per layer to reach the same performance as end-to-end learning. we conducted two sets of experiments: applying depth dropout to a 6-layer model and a 12-layer model, with a fixed dropout rate of 50% (meaning half of the fixed layers were dropped).our study presents federated layer-wise learning for devices with limited resources, which simultaneously reduces the demands on memory, computation, and communication for individual clients without significantly compromising performance in comparison to end-to-end training. we demonstrate that our proposed depth dropout technique is an effective complement to federated layer-wise learning, as it further reduces resource usage across all categories with minimal loss of performance, even when dropping half of the fixed layers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/375.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/375.txt new file mode 100644 index 0000000000000000000000000000000000000000..bd308e46155c55e6a8d5a0901377959f1a4ff87e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/375.txt @@ -0,0 +1 @@ +the majority of machine learning techniques utilize the empirical risk minimization framework. within this framework, the optimization objective is to minimize empirical risk, which is the average risk over a finite set of training samples. in practice, the value of interest is the population risk, representing the expected risk across a population. generalization error is the difference between the optimization objective (empirical risk) and the value of interest (population risk). the prevalence of machine learning techniques makes it essential to comprehend generalization error.previous studies (russo & zou, 2016;2019;xu & raginsky, 2017) have established a relationship between mutual information, i(w ; s n ), and the generalization error, where s n is a set containing n samples from a distribution µ, erving as the algorithm's input, and w represents the model's weights after training, serving as the algorithm's output. information-theoretic tools are wellsuited for analyzing iterative learning algorithms, as the chain rule of mutual information allows for a simple decomposition i(w, s n ) across iterations (i.e.)). leveraging this technique, xu & raginsky (2017) studies the generalization properties of stochastic gradient langevin dynamics (sgld). sgld can be considered as introducing noise to the sgd in each update step.since most commonly used algorithms in practice, such as sgd and adam (kingma & ba, 2014), do not incorporate noise injection during the update process, recent research efforts are focused on integrating information-theoretic methods into these iterative algorithms without added noise. the challenge in this approach is that the value of i(w t ; s n |w t-1 ) will become infinite when w t is determined by s n and w t-1 . a potential solution involves utilizing surrogate processes (negrea et al., 2020;sefidgaran et al., 2022). neu et al. (2021) derives generalization bounds for sgd by employing a "virtual sgld" surrogate process, in which noise is introduced during each update step of (s)gd. their generalization bound consists of two components: the generalization bound for the surrogate process and the bound for the difference between the generalization errors of the surrogate and original processes.this paper examines the mutual information i(s n , w ) from alternative perspectives and reformulates the mutual information to relate to the uncertainty of the update. the uncertainty of the update 2 related works existing works on generalization theory can be roughly divided into two categories: function space based method, and the learning algorithm based method. the function space based method study the generalization behavior based on the complexity of function space. many methods for measuring the complexity of the function space have been proposed, e.g., vc dimension (vapnik & chervonenkis, 2015), rademacher complexity (bartlett & mendelson, 2002) and covering number (shalev-shwartz & ben-david, 2014). these works fail in being applied to overparameters models, where the number of parameters is larger than the number of data samples. because the function space is too large to deliver a trivial result (zhang et al., 2021) in this case. to overcome this problem, recent works want to leverage the properties of learning algorithm to analyzing the generalization behavior. the most popular methods are stability of algorithm (hardt et al., 2016) and information-theoretic analysis (xu & raginsky, 2017;russo & zou, 2016). among them, the stability of algorithm (bousquet & elisseeff, 2002) measures how one sample change of training data impacts the model weights finally learned, and the information theory (russo & zou, 2016;2019;xu & raginsky, 2017) based generalization bounds rely on the mutual information of the input (training data) and output (weights after training) of the learning algorithm. although the both the stability method and information theoretic method are general, obtaining the generalization bound for practical learning algorithms is non-trival. most of the stability-based generalization bounds focus on sgd (hardt et al., 2016;bassily et al., 2020;nikolakakis et al., 2022). applying the stability-based method outside sgd is very complex and non-trival (nguyen et al., 2022;ramezani et al., 2018). most information-theoretic generalization bounds are applied for stochastic gradient langevin dynamics(sgld), e.g., sgd with noise injected in each step of parameters updating (pensia et al., 2018;negrea et al., 2019;haghifam et al., 2020;negrea et al., 2019;haghifam et al., 2020). neu et al. (2021) extends the information-theoretic generalization bounds to sgd by leveraging surrogate process. our work advances the field by extending the informationtheoretic based method to learning algorithms beyond sgd in a simple way. this represents a significant step towards developing practically useful generalization theories.previous studies(russo & zou, 2016;2019;xu & raginsky, 2017)have established a relationship between mutual information, i(w ; s n ), and the generalization error, where s n is a set containing n samples from a distribution µ, erving as the algorithm's input, and w represents the model's weights after training, serving as the algorithm's output., 2022). the uncertainty of the update 2 related works existing works on generalization theory can be roughly divided into two categories: function space based method, and the learning algorithm based method. the most popular methods are stability of algorithm(hardt et al. the gradient and the hessian matrix of f (w, z) are denoted as ∇f (w, z) and ∇ 2 f (w, z) respectively.our primary result is a bound on the generalization error of the weights w generated by a learning algorithm with bounded updates. suppose f (w, z) is r-sub-guassian with respect to z ∼ µ for every w ∈ w. (bounded updates) a learning algorithm is said to have bounded updates with respect to function f (•) and data distribution µ, if for all s n ∼ µ ⊗n , there exists a constant l, such that u t ≤ l for all t ≤ t , when the learning algorithm is operated on f (•) and s n .comparison between bounded updates assumption and l-lipschitz assumption the l-lipschitz assumption is widely used to analyze the convergence or generalization behavior of learning algorithms.test loss = training loss + generalization error, where the training loss is refer to the loss of the data set used as input to learning algorithm.interpreting our results in practice situation if the training error can decrease to a significantly lower value than the generalization error, or if the training error's vanishing rate is faster than the generalization error, and ∆ σ is not in the worst-case scenario, then the iterative learning algorithm with bounded updates can achieve a vanishing test loss at a rate of o(1/n 1 3 ) in worst-case scenario. additionally, using learning trajectory information could potentially enhance the bounds of iterative learning algorithms with bounded updates.algorithm 1 adam 1: input: the loss function f (w, z), the initial point w 1 ∈ r d , the batch size b, learning rates {η t } t t=1 , m 0 = 0,v 0 = 0, and hyperparameters β = (β 1 , β 2 ).algorithm 2 adagrad 1: input: the loss function f (w, z), the initial point w 1 ∈ r d , the batch size b, learning rates {η t } t t=1 , m 0 = 0,v 0 = 0. given the dataset s n = {z i } n i=1 , the variance of the gradient in weights w is calculated as 1 n n i=1 ∇f (w, z i ) -f s (w ) . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/376.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/376.txt new file mode 100644 index 0000000000000000000000000000000000000000..450cda604ba0713558a311f24d70b8d657b031ec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/376.txt @@ -0,0 +1 @@ +language models are increasingly being incorporated into web applications and other user-facing tools. 1these applications typically do not provide direct access to the underlying language model or the decoding configuration used for generation. in this paper, we show how even in this blackbox setting, it is possible to identify the decoding strategy employed for generation. we consider the case where one only has access to a system that inputs a prompt and outputs a generated response. we present algorithms for distinguishing the two most popular decoding strategies, top-k and nucleus sampling (a.k.a. top-p), and estimating their respective hyperparameters (k and p).the choice of decoding strategy-the algorithm used to sample text from a language model-has a profound impact on the randomness of generated text, introducing biases toward some word choices. for example, when openai's chatgpt,2 a chatbot built with large language models, is repeatedly passed a prompt asking it to report the outcome of rolling a twenty-sided die, we found that it only returns 14 of the 20 options, even though all should be equally likely.prior work has shown that knowing the decoding method makes it easier to detect whether a writing sample was generated by a language model or else was human-written (ippolito et al., 2020). as generated text proliferates on the web, in student homework, and elsewhere, this disambiguation is becoming increasingly important.concurrent work to ours by naseh et al. (2023) has developed similar strategies for detecting decoding strategy from a blackbox api: however, they focus more on identifying hybrid decoding strategies (including beam search), whereas we focus more on prompt engineering to produce close-to-uniform token distributions that reduce the number of queries needed. our proposed methods complement but are not comparable to those of tay et al. (2020). their method trains classifiers that input a generated text sequence and output a prediction for the decoding strategy used to generate it. in contrast, our method interacts with an api and does not require any data or ml training. suppose we know for a fact the prompt does induce a near-uniform distribution on all publicly-available language models: then the more likely explanation would be that the sampling algorithm itself truncated this distribution-either with top-k or top-p sampling. by measuring what fraction of the words we would expect to get generated actually do get generated for prompts with known output distributions, we can estimate values for k and p and distinguish between these two techniques.in practice, we use algorithm 1 (see appendix), which repeatedly estimates a lower bound for k using two different prompts m 1 and m 2 for increasing numbers of trials until (1) the two estimates match and (2) the x (k) token appears at least twice in both generations (to prevent spurious matching). because our guessed distributions are not perfect, instead of relying on just one prompt for our estimate, we instead average over two upper bounds of p derived from two different prompts.all of the estimates reported in this section could be improved with additional queries to the model for both p and k estimation, we average over the estimates from just two prompts, but using more prompts would lead to better estimates. however, this combination can be detected by comparing the empirical distribution against a set of known distributions for common models; if the distribution does not match any of them, then we can conclude that either it is not using any known model, or that other distribution shaping such as temperature has been applied.given a system gen : m → r that takes an input prompt m and outputs a single response token r, let m1 and m2 be two prompts that with high probability return responses from large (≫k) sets of different sizes-e.insert(gen(m2)) end for k1 ← # unique items in samples1 k2 ← # unique items in samples2 minsamples← (samples1 as we mentioned in the main paper text, suppose, for a given prompt m, we call gen(m), n number of times, each time keeping just the first token of the output.in practice, we use algorithm 1, which repeatedly estimates a lower bound for k using two different prompts m 1 and m 2 for increasing numbers of trials until (1) the two estimates match and (2) the x (k) token appears at least twice in both generations (to prevent spurious matching).consider a language model f θ :(m,r)→r that scores a prompt/response pair and a system gen:m →r that takes an input prompt m and outputs a single response token r using f θ and top-p sampling. because our guessed distributions are not perfect, instead of relying on a single distribution to bound our estimate, we instead average over the two upper bounds of p derived from the different prompts and return that as our guess. figure5shows the variance in output distributions for two exemplar-based prompts, digits and abc (table4), across different numbers of exemplars and different random selection of exemplars.for prompts to be used in top-p estimation, one additional challenge is that ideally the prompt should yield a similar distribution when inputted to all popular language models. as discussed in the paper, our estimates for p are worse when there is a greater mismatch between the known distribution used for top-p estimation and the true distribution of the language model underlying the blackbox system being attacked. the best approach (and the one we used to attack chatgpt) is to choose the known distribution to use for top-p estimation by keeping around a database of distributions from a bunch of different models, and then comparing the output distribution from the blackbox system to each distribution in the database. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/377.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/377.txt new file mode 100644 index 0000000000000000000000000000000000000000..5ec063f98ffe250d4ac075f3ca36abbe01c2496b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/377.txt @@ -0,0 +1 @@ +during the past few years, deep learning models have become larger and larger with millions or even billions of trainable parameters. training such huge models is a computationally expensive and time consuming process which takes a huge portion of memory as well (dean et al., 2012;hajimolahoseini et al., 2023). on edge devices and smart phones, memory consumption and computational complexity are even more concerning because they can cause several issues regarding memory and battery life (hajimolahoseini et al., 2019(hajimolahoseini et al., , 2018(hajimolahoseini et al., , 2022b)). however, due to high redundancy in ai models, they could be compressed to the point that the accuracy is preserved close to the original model chen et al. (2019).the computational complexity and memory consumption of deep learning models are dominated by the convolutional and fully connected layers, respectively cheng et al. (2018). the existing techniques for training/inference acceleration of ai models can be categorized into four different groups including low rank decomposition (lrd), pruning, quantization and knowledge distillation (kd) (cheng et al., 2017b). in all of these techniques except kd, the original architecture of the model is preserved and only the training layers are compressed so that the memory usage and/or computational complexity is minimized. in kd however, the compressed model may have a totally different architecture (hinton et al., 2015;rashid et al., 2021). "for more information about knowledge distillation, the reader is referred to (hinton et al., 2015;rashid et al., 2021).in pruning, the sparsity of the model is increased by removing the parts that are not contributing much to the performance of the network luo et al. (2018). it can be applied in different levels including filters, kernels, vectors or weights of the layers zhang et al. (2018); zhuang et al. (2018); mao et al. (2017). the pruning is applied according to a heuristic ranking measure which is introduced manually based on experiments. although it can be used as a compression technique, one big challenge of pruning is that it may take a long time for sequences of pruning and fine-tuning to reach to the desired performance which may cause a significant overhead during training (cheng et al., 2017b).in quantization approach on the other hand, the weights or activations of the model are quantized using scalar or fixed-point quantization techniques cheng et al. (2017a). in fixed-point weight quantization, the weights are represented using a lower precision e.g. 16-bits, 8-bits or binary (prato et al., 2019;bie et al., 2019). in scalar quantization, the weights are represented using a codebook of centres and codes for assigning to them cheng et al. (2017b). knowledge distillation (kd) uses a teacher-student framework in order to transfer the knowledge from a larger network (teacher) into a compact and efficient one (student) by adding and auxiliary loss to imitate softmax outputs or logits from the teacher as a representation of class distributions (hinton et al., 2015;rashid et al., 2021).a common issue with most of the aforementioned techniques is that these methods are based on some heuristics and therefore they have poor or no mathematical support and thus the a closed form solution does not exists. furthermore, they could face some serious limitations in high compression ratios. in contrast with the aforementioned methods, low rank decomposition (lrd) decomposes the weight tensors using a tensor decomposition algorithm e.g. singular value decomposition (svd) hajimolahoseini et al., 2022a); van loan (1987); li et al. (2021). in this approach, each fully connected layer is replaced with two consecutive fully connected layers whose weights are calculated from the original matrix using svd algorithm. on the other hand, for convolutional layers, a higher order version of svd e.g. tucker is applied in order to decompose them into multiple components de lathauwer et al. (2000b,a); ahmed et al. (2022).despite several benefits that lrd provides, including a strong mathematical foundation and one-shot knowledge distillation from original to decomposed model, the high number of new layers generated by applying tensor decomposition prevents it from being considered as a training/inference acceleration method in terms of frame per second. therefore, it is mostly considered as a type of model compression technique which helps in terms of memory consumption. a naive way of improving the training/inference speed is to reduce the decomposition ranks to the point that the acceleration is achieved but this may harm the accuracy of the model to the point that it could not be recovered close to the original model.in this paper, we show that appropriate rank selection and sequential freezing of the decomposed layers can help improving the efficiency of the lrd decomposed models without requiring to decrease the rank of decomposition significantly. in rank optimization technique, we search for the optimal rank around the calculated decomposition ranks in order to find the most efficient decomposed architecture. a sequential layer freezing of the decomposed layers is also proposed for saving time during back propagation. we also show that the proposed techniques are platform-agnostic and could be used in different ai processors e.g. nvidia's gpus and huawei's ascend npus to improve the training/inference speed.despite several benefits that lrd provides, including a strong mathematical foundation and one-shot knowledge distillation from original to decomposed model, the high number of new layers generated by applying tensor decomposition prevents it from being considered as a training/inference acceleration method in terms of frame per second.in this paper, we show that appropriate rank selection and sequential freezing of the decomposed layers can help improving the efficiency of the lrd decomposed models without requiring to decrease the rank of decomposition significantly. therefore, we propose to freeze one of the two decomposed layers in svd decomposition and first and last 1 × 1 layers in tucker decomposition and only fine-tune the weights of the other layer in svd and the core tensor in tucker decomposition. we call this technique sequential freezing to distinguish it from the regular freezing described in previous paragraph in which the freezing is applied onlyt once to some fixed layers. in this pseudo-code, the decomposed layers are shown as l r (i) where i = 0, 1, 2 for regular convolutional layers and i = 0, 1 for 1 × 1 convolutional and fully connected layers.algorithm 2: sequential freezing of decomposed layers input: decomposed layers l r (i), epoch number e if e%2 = 0 then if tucker then freeze l r (0) and l r (2) unfreeze l r (1). however, by applying rank optimization algorithm on top of lrd, the training (inference) speed improves by %25 (27%), 36% (37%) and 38% (36%) for resnet-50, -101 and -152 models, respectively. table1, when combined together, rank optimization and freezing can improve the training speed by 46%, 60% and 60% for resnet-50, -101 and -152 models, respectively.in order to show the computational complexity of the lrd, the decomposition time of the vanilla lrd with and without rank optimization and freezing techniques is compared in table2for resnet-50, -101 and -152 models. as seen in this table, vanilla lrd takes around 232 seconds to decompose the resnet-152 model while the rank optimization technique takes 716 seconds for calculating the optimal ranks.10% and 0. as seen here, the accuracy could be recovered easily after applying these techniques. for example, the sequential freezing reaches to accuracy of 95% at epoch 20 while regular freezing reaches to that accuracy at epoch 26, resulting around 30% faster convergence.in this work, we proposed two techniques for accelerating lrd models including rank optimization and layer freezing. specifically, layer freezing and its advanced version sequential layer freezing accelerates the training while rank optimization could be applied for accelerating both training and inference. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/378.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/378.txt new file mode 100644 index 0000000000000000000000000000000000000000..fcdfdde4e2f0432c6a4cea1375b062e1e02f0b3f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/378.txt @@ -0,0 +1 @@ +recent years have seen a plethora of works dealing with the analysis of graph-structured data . while the analysis of static graphs is a well-studied field, the analysis of dynamic graphs is still nascent. in this work, we will focus on the task of dynamic graph classification, where the goal is to predict the class of a process, which cannot be observed from a single snapshot of the graph, but only from the way the graph evolves over time. although various methods exist for dynamic graph classification, none, to our knowledge, concurrently offers (i) scalability, (ii) accommodates a changing node set, and (iii) considers edge weight information (table 1). in this work, we explore an approach based on filtrations, a concept from topological data analysis typically associated with persistent homology . specifically, we extend prior work from o'bray et al. on filtration curves to a dynamic setting. we refer to our method as filtration surfaces 1 and find that it remedies the aforementioned shortcomings of existing methods. in this work, we will focus on the task of dynamic graph classification, where the goal is to predict the class of a process, which cannot be observed from a single snapshot of the graph, but only from the way the graph evolves over time. finally, temporal graph kernelspresent an entirely different approach to dynamic graph classification by lifting standard graph kernels to the temporal domain. more specifically, a filtration can be obtained using an edge weight function w : e → r such that g i is induced by all edges with weights less than or equal to w i , where w i is the ith smallest weight.to build a filtration curve, we need to choose (i) an edge weight function w : e → r that assigns a weight to every edge, and (ii) a graph descriptor function f : g → r d that takes a (sub)graph and returns a value in r d . by building the graph filtration f g in order of increasing edge weight and evaluating the graph descriptor function on every subgraph g i of the filtration, one obtains a sequence of vectors which can be modeled as a matrix p g := m i=1 f (g i ) ∈ r m×d , the structure that o'bray et al. the filtration curve p g is a compact representation of the graph g that can be used for downstream tasks such as graph classification. the graph descriptor function f can be thought of as a feature extractor, which, when evaluated alongside a filtration, yields a multi-scale representation of the graph.propose the following edge weight functions to define a filtration over the graph: (i) the native edge weights, (ii) the max degree function w xy = max{degree(x), degree(y)}, (iii) the ollivier-ricci curvaturewith α = 0. intuitively, we calculate filtration curves p gi for all dynamic graph timesteps g i ∈ g and therefore extend the curve to another dimension, yielding a surface. formally, we model the sequence of filtration curves as a tensor r g := n i=1 p gi ∈ r n×m×d , where n is the length of the dynamic graph, m is the number of weight thresholds in the filtration, and d is the dimensionality of the graph descriptor function.just like filtration curves are step functions because the graph descriptor function does not change in-between thresholding weights, filtration surfaces can be thought of as step-like surfaces when assuming that the filtration curve does not change in-between timestamps of the dynamic graph.in contrast to existing approaches, filtration surfaces can handle a changing node set, since the filtration curves are computed independently for each graph and the shared weight index guarantees interoperability between graphs. furthermore, our method is suitable for the online temporal graph setting, since a new timestamp can be added to the filtration surface by simply appending a new filtration curve to the tensor. our investigation aims to address two pivotal questions: (i) how does the scalability of filtration surfaces compare against state-of-the-art methods? (ii) how do filtration surfaces perform in relation to baselines on datasets that rely on edge weight information?. for the real-world datasets, we use node label histogram filtration surfaces with ricci curvature as our edge weight function (fs-rc), since the dynamic graphs of the datasets do not have native edge weights. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/379.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/379.txt new file mode 100644 index 0000000000000000000000000000000000000000..312f97f4eacf9d871676b4d25c8e99dc3c45e8e2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/379.txt @@ -0,0 +1 @@ +polycystic kidney disease (pkd) is a prevalent, yet underresearched hereditary renal disorder characterized by the formation of numerous cysts in the kidneys . over time, these cysts can enlarge and disrupt the standard kidney structure, impairing kidney function and thus leading to various complications, such as high blood pressure, kidney stones, and, in severe cases, kidney failure . even though this disease is regarded as one the top kidney ailments of patients in the us, it is still relatively unexplored, hence leading to late detection and ineffective care. therefore, early and accurate diagnosis of pkd is crucial for timely intervention and effective management of the condition to improve patient outcomes, however, the various patient-specific factors that play a role in the diagnosis make it an intricate puzzle for nephrologists and clinicians to solve .in recent years, the progress made in artificial intelligence and machine learning, specifically deep learning, has opened up new possibilities in healthcare for both detection and prediction . neural networks, a cornerstone of deep learning, have demonstrated exceptional proficiency in image recognition, feature extraction, and classification tasks . with the incredible capabilities of these algorithms and models, healthcare professionals now have powerful tools that can aid in analyzing vast amounts of medical data with unparalleled precision and efficiency, thereby increasing the efficacy of medical prescriptions. however, certain domains have yet to be immersed in the frontiers of ai and ml, with pkd being one such disease that has had no previous research using machine learning done in it .therefore, in this research we aim to leverage the power of deep learning by utilizing neural networks to aid in pkd detection for accurate and early diagnosis. utilizing methods such as synthetic data creation and data preprocessing, an mlp algorithm and stacking ensemble were trained to see if they could learn whether or not a patient had pkd. furthermore, using a gene ontology tool, we found robust results indicating the processes and functions of the gene expressions that the model found highly correlated with pkd to gain deeper insights into the underlying molecular mechanisms that are affected by the disease. utilizing methods such as synthetic data creation and data preprocessing, an mlp algorithm and stacking ensemble were trained to see if they could learn whether or not a patient had pkd.1) data preprocessing: the algorithm started with the mice-based data getting pre-processed, allowing an algorithm to be used to learn the data. they are good at learning the relevant features and data, and they stack a layer of their neurons, where each layer is learning different parts of the data, allowing the neural network to get a hierarchical representation of the data.an mlp was used for this data rather than a stacking ensemble due to its ability to understand the complexity of data better than stacking ensembles. synthetic data based algorithm 1) data creation: in this study, we also used syntehtic data for our model. synthetic data is more accessible for the data augmentation needed to see if a machine learning algorithm could learn whether or not a patient has pkd based on gene expression data due to the data for pkd being hard to collect and analyze.2) data preprocessing: data preprocessing was done to use the dataset and to split the data. then, a train test split happened, splitting the data into 80% training data and 20% testing data. a more robust model is created by utilizing all of the strengths of the independent machine learning algorithms to accurately predict if a patient has pkd based on synthetic data. one significant difference between the datasets of the two different algorithms is that synthetic data is generated data, so it may be that the data being created is not indicative of anything particularly related to pkd, making it more prone to lowered accuracy than the standard clinically tested mice data. on the other hand, although the synthetic data may not be helping the algorithm, it is possible that the mice data needs to be more generalizable to humans due to the differences between the two species.additionally, due to a lack of adequate human clinical data on polycystic kidney disease (pkd), the research team had to rely heavily on introspection studies of pkd in mice models as well as synthetic data generated from computational modeling. without access to sufficient real-world data from human pkd patients to train machine learning algorithms, the use of these alternative data sources like mouse models and synthetic data was absolutely vital. the multimodal approach, combining real human data where available with data from mouse models and synthetic data generation, provided a robust overall training dataset that allowed the research team to make meaningful progress in applying ai to better understand pkd. moreover, exploring ways to overcome data scarcity through data augmentation techniques or synthetic data generation methods that accurately simulate human data can provide valuable insights into the robustness of the models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/38.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/38.txt new file mode 100644 index 0000000000000000000000000000000000000000..41fcdf4f34e2cc53743bace35f41391b80703a70 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/38.txt @@ -0,0 +1 @@ +similarity indexes like canonical correlation analysis (cca, hotelling, 1936) or centered kernel alignment (cka, kornblith et al., 2019) aim to find a similarity between parallel sets of different representations of the same data. the deep learning community adapted these indexes to measure similarity between representations that come from different models (raghu et al., 2017;morcos et al., 2018;kornblith et al., 2019). another line of work used the same methods to measure similarity between different languages which come from a single multilingual model (kudugunta et al., 2019;singh et al., 2019a;conneau et al., 2020;muller et al., 2021).in this paper, we argue that while cca/cka methods are a good fit for the first case, they are a suboptimal choice for the second scenario.first, we employ a real-world motivating example to demonstrate that cka can fail to capture the notion of similarity that we consider helpful in a cross-lingual context. we also discuss the general problems of cka/cca indexes and conclude that they are not well aligned with some of the goals of cross-lingual analysis (section 4).next, we propose and verify an averaged neuron-wise correlation (anc) as a straightforward alternative. it exploits the fact that representations from the same model have apriori-aligned neurons, which is the desired property in a crosslingual setup (section 5).finally, muller et al. (2021) demonstrated the so-called "first align, then predict" representational pattern in a multilingual model: the model first aligns representations of different languages together, and then (starting from the middle layers) makes them more language-specific again (to accompany the language-specific training objective). the finding is insightful but only considers mbert (wu and dredze, 2019) which is a masked language model (mlm) with 110m parameters. thus, it is unclear if the "first align, then predict" pattern is specific to this model or more general. in this study, we use anc to show that the pattern generalizes to the gpt-style (brown et al., 2020) causal language models (clms, lin et al., 2021) and extends to large-scale mlms and clms (section 6).in this paper we are interested specifically in the scenario of measuring the strength of cross-lingual similarity of representations that come from a single multilingual language model. this scenario is very common in the field as it is often not feasable to train a separate models for each language and we present a method that allows for better representational similarity analysis then cka/cca. in summary, our contributions are three-fold:• conceptual and empirical critique of cka/cca for cross-lingual similarity analysis (section 4);• average neuron-wise correlation as a simple alternative method designed specifically for cross-lingual similarity (section 5);• scaling laws of cross-lingual similarity in both multilingual mlms and clms (section 6).2 related work hotelling (1936) introduced cca as a method for measuring canonical correlations between two sets of random variables. in a cross-lingual setting, we have a single network, and we compare representations that come from different languages. following the introduction of svcca, kudugunta et al. (2019) used it to compare language representations (at different layers) in a multilingual neural machine translation system. the method we present in this work applies to the seq2seq models, but in this work, we focus on models trained with clm and mlm objectives while leaving seq2seq for future work. singh et al. (2019a) performed a similar study where they focused on the multilingual bert model2 and employed pwcca as a similarity index. the conclusion was that language representations diverge with network depth.on the other hand, conneau et al. (2020) and muller et al. (2021) used cka and behavior analysis to show that the opposite pattern takes place: language representations align with the network depth and only moderately decrease towards the end. in other words, representations first converge towards language neutrality and then recover some language-specificity. the alignment makes zeroshot cross-lingual transfer possible, and slight divergence accompanies language-specific training objectives (such as english downstream prediction task or predicting words in the particular language as in masked language modeling objective). following muller et al. 2021, we call this phenomenon the "first align, then predict" pattern.eventually, del and fishel (2021) showed that the similarity analysis was different because singh et al. (2019a) used cls-pooling while muller et al. (2021) used mean-pooling to convert token embeddings into a sentence representation. they also showed that mean-pooling is a better option.finally, li et al. (2015) aligned most correlated neurons between layers of two different networks and then computed similarity from the recovered correspondence. the method we propose in this paper is similar in spirit to this one, except we focus on the cross-lingual analysis of multilingual models and thus have no need to find the alignment between neurons.in this work, we build on these studies in three ways: we demonstrate that even cka can fail to provide relevant cross-lingual similarity, we propose another method to compare multilingual representations, and we reveal that the "first align, then predict" pattern generalizes across training objectives and holds for models of large sizes.similarity indexes like canonical correlation analysis (cca,hotelling, 1936)or centered kernel alignment (cka,kornblith et al.finally,muller et al.in this paper we are interested specifically in the scenario of measuring the strength of cross-lingual similarity of representations that come from a single multilingual language model. (2021)used cka and behavior analysis to show that the opposite pattern takes place: language representations align with the network depth and only moderately decrease towards the end.in this work, we build on these studies in three ways: we demonstrate that even cka can fail to provide relevant cross-lingual similarity, we propose another method to compare multilingual representations, and we reveal that the "first align, then predict" pattern generalizes across training objectives and holds for models of large sizes.specifically, we want to check if different normalization choices of the transformer(vaswani et al.experiment 2: cka similarirty after having the xnli zero-shot cross-lingual transfer scores, we extract sentence representations from all layers of each model and compare layers using the cka similarity index. next, we compare two parallel sets of sentence representations using the cka similarity index to get a similarity score for each layer. while the similarity for the first half of the layers increases (layers 0-5), the cka score drops dramatically at the middle layer of the network and continues to hang around zero for all remaining layers (layers 6-12).in summary, similarity indexes value different aspects of representations and correspond to different concepts of similarity. first, it aligns well with the goal that motivated most cross-lingual similarity analysis works: zero-shot cross-lingual transfer learning.the figure shows that unlike cka (figure1), the anc is able to reveal the "first align, then predict" pattern for the scale_normformer and better explains the evidence we provided in table1andfigurein summary, this section demonstrated that our method passes the sanity checks of both related literature and the section 4 experiment (that made cka fail). in this section, we present an application of anc to the analysis of representational similarity scaling in cross-lingual language models.results figures 5 and 6 reveal that the crosslingual similarity of multilingual representations in all the networks we study follows the same "first align, then translate" pattern.in this study, we introduced an example where cka drastically fails to reveal the cross-lingual similarity between language representations across the deeper layers of the multilingual model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/380.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/380.txt new file mode 100644 index 0000000000000000000000000000000000000000..df66c1d378595aa98be022a5d16534cf0e71c450 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/380.txt @@ -0,0 +1 @@ +generative semantic communication is an emerging topic that merges the fresh semantic communication area and the cutting-edge field of deep generative modeling. the next 6g communication systems will be ai-based and will rely on the transmission of the semantic information, trying to regenerate a semantically equivalent content, rather than exactly recovering the original bit sequence . semantic communication allows greater transmission efficiency due to the extraction and compression of semantic information leveraging effective deep learning models. as a consequence, the transmitted semantics are generally smaller and more robust to communication distortions, allowing to reduce latency and bandwidth while preserving reliability. this new paradigm is gaining interest in several fields of application, ranging from image compression , to video transmission , speech , point clouds , and the metaverse . with the increasing amount of connected devices and produced multimedia content, next-generation communication systems open corresponding author's email: eleonora.grassucci@uniroma1.it. new challenges. in the near future, we expect an explosion of demands that will require high compression ratios of transmitted data, leading to the study of how to properly extract semantic data, how to handle and exploit the received information or make receivers robust to channel corruptions and distortions.in this scenario, deep generative models assume crucial importance due to their ability to extract and exploit the received semantic information through a semantic-guided generation process. such models are trained to learn the original data distribution with the aim of generating new points by sampling from it. conditioning this generation process allows the user to control the output of the generation and generate content starting from a textual prompt, a low-resolution image, or a low-dimensional map. for these reasons, generative models can significantly enhance semantic communication frameworks, solving most of the learning challenges this new paradigm sets. very recently, a few examples have been proposed, including generative adversarial networks (gans) that have been involved to generate content from the received semantic vector , or from the semantic map . similarly, variational autoencoders (vaes) have been exploited for joint source-channel coding and image transmission . more recently, the power of state-of-the-art diffusion models has been exploited to generate photorealistic cityscape images starting from the corrupted received semantic maps with a very robust framework or from a textual prompt to reduce energy consumption . together with their promising solutions, generative semantic communication frameworks open new challenges and research pathways concerning the definition of semantic communication-tailored training losses and generative models. in particular, the sustainability of large state-of-the-art generative models will be crucial, together with novel methods to exploit the received noisy semantics and the assessment of the quality of service.while several overviews exist on semantic communication, they usually face the topic from a communication perspective. on the contrary, the aim of this icassp special session overview paper is to present a novel viewpoint on se- generative semantic communication is an emerging topic that merges the fresh semantic communication area and the cutting-edge field of deep generative modeling. semantic communication allows greater transmission efficiency due to the extraction and compression of semantic information leveraging effective deep learning models.in this scenario, deep generative models assume crucial importance due to their ability to extract and exploit the received semantic information through a semantic-guided generation process. for these reasons, generative models can significantly enhance semantic communication frameworks, solving most of the learning challenges this new paradigm sets. together with their promising solutions, generative semantic communication frameworks open new challenges and research pathways concerning the definition of semantic communication-tailored training losses and generative models. several works already investigate communication challenges, while we focus on the unexplored machine learning perspective since ai-based semantic communication frameworks have to deal with the following issues. both the sender and the receiver networks are subjected to resource constraints due to different aspects: i) they may have hardware limitations while current machine learning models have high computational demands; ii) the sender should reduce as much as possible the bandwidth usage and compress the semantic information. often, while modeling this latent space or learning the data distribution, generative models build a semantic representation of the data.moreover, deep generative models perfectly fit the semantic communication scenario due to their ability to exploit semantic information. an example is semantic image synthesis in which the generative model is conditioned by the semantic map and generates images according to this information. indeed, state-of-the-art models are not trained with noisy data, while directly instructing such models by simulating channel noise in the training data may result in more robust generative models. similarly, a challenge generative semantic communication frameworks have to face is exploiting noisy semantic information since stateof-the-art deep generative models consider clean conditioning. building a trade-off between the accuracy of the semantic information, the transmission cost/bandwidth, and the goal of the receiver is a challenging task that novel generative semantic communication frameworks have to face.another possible direction for the next generative semantic communication frameworks is defining novel loss functions that consider the preservation of the semantics in the regenerated content rather than the bitstream recovery. we highlight how deep generative models can significantly enhance the next semantic communication frameworks and we provide new research opportunities and directions for the generative semantic communication research topic. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/381.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/381.txt new file mode 100644 index 0000000000000000000000000000000000000000..4f5f881e560bbdf1636801a03a0cfa8a099943c2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/381.txt @@ -0,0 +1 @@ +centrifugal compressors' design is an extensive computational process as it requires the optimization of numerous 1 artificial intelligence team -baker hughes, florence, italy 2 software engineering team -baker hughes, florence, italy. correspondence to: shadi ghiasi .accepted after peer-review at the 1st workshop on synergy of scientific and machine learning modeling, syns & ml icml, honolulu, hawaii, usa. july, 2023. copyright 2023 by the author(s). design variables which are the starting point of software simulations of complex dynamical equations (ju et al., 2021).while engineering-powered software simulations are reliable solution to the end user due to the established technology, they have more complex formulations due to higher inter-dependency of system variables (garg et al., 2010).surrogate modeling with machine learning (ml) models for computer simulations enables reducing the computational cost and time required for a design simulation while maintaining a desired performance in industrial applications (bicchi et al., 2022;owoyele et al., 2022;kim et al., 2010). however, generating sufficient data points for training ml models is a daunting task since it requires running extensive software simulations. therefore, without any strategic sampling, the possibility to explore a larger design space is limited.under such circumstances, the active learning (al) strategy is a powerful framework to alleviate the problem of high quality annotation scarcity (settles, 2012). al is a ml technique that allows the model to interact with an oracle by queering the most important data for learning (monarch, 2021). in industrial applications, al can make the most of resources by significantly reducing the amount of labeled data for training ml models (brevault et al., 2022).utilizing ml surrogate models in the industrial simulation design setting has been explored by previous research. kim et al. implement surrogate modeling for optimization of a centrifugal compressor impeller (kim et al., 2010). however, without any strategic sampling, the research is done for a limited design space. automl-ga (owoyele et al., 2022) is an application of an automated machine learning-genetic algorithm coupled with computational fluid dynamics simulations for rapid engine design optimization. chabanet et al. (chabanet et al., 2021) apply al in industry 4.0 context. moreover, murugesan et al. (murugesan et al., 2022) propose an al framework for estimating the operating point of a modular multi pump used in energy field. wang et al. (wang & nalisnick, 2022) apply al for multilingual finger spelling corpora. finally, see also (reker, 2019) for some practical considerations for active ml in drug discovery. however, an al based framework has not been explored arxiv:2309.02818v1 6 sep 2023 for design optimization of centrifugal compressors. moreover, most research have focused on offline evaluation of al strategies, while, in industrial settings the data is acquired in real-time (cacciarelli et al., 2022) and a deployable streaming based al framework is needed.in this study, we present the activecompdesign framework for deployable al based design optimization of centrifugal compressors. we leverage on gaussian processes (gps) as deep surrogates of centrifugal compressor dynamics coupled with the al strategy with a design goal to reach the optimal power absorbed by the machine. we perform extensive computer simulations using our internal thermodynamics-based model integrated with an optimization algorithm to generate sufficient data samples for surrogate model training. we then use this data to perform an offline al algorithm with gp surrogates as a proof of concept. we further deploy our framework through an online al simulation environment in which the thermodynamics-based model and the ml-based model interact in real-time using a stream-based al strategy. our framework is currently in production. to the best of our knowledge, no other study have combined compressor's thermodynamics-based models and ml to propose a production-ready ai enhanced design optimization framework for design optimization of centrifugal compressors.centrifugal compressors' design is an extensive computational process as it requires the optimization of numerous 1 artificial intelligence team -baker hughes, florence, italy 2 software engineering team -baker hughes, florence, italy.surrogate modeling with machine learning (ml) models for computer simulations enables reducing the computational cost and time required for a design simulation while maintaining a desired performance in industrial applications(bicchi et al., 2022)propose an al framework for estimating the operating point of a modular multi pump used in energy field.in this study, we present the activecompdesign framework for deployable al based design optimization of centrifugal compressors. we leverage on gaussian processes (gps) as deep surrogates of centrifugal compressor dynamics coupled with the al strategy with a design goal to reach the optimal power absorbed by the machine. we perform extensive computer simulations using our internal thermodynamics-based model integrated with an optimization algorithm to generate sufficient data samples for surrogate model training. we further deploy our framework through an online al simulation environment in which the thermodynamics-based model and the ml-based model interact in real-time using a stream-based al strategy. to the best of our knowledge, no other study have combined compressor's thermodynamics-based models and ml to propose a production-ready ai enhanced design optimization framework for design optimization of centrifugal compressors.we do this within the activecompdesign framework by integrating a regression surrogate model of the physical process throughout the optimization process to benefit from a faster calculation of the system's response. however, due to the uncertainties produced by the surrogate model, we want to rely on the actual physical process equations to obtain reliable outputs when the ml surrogate model produces high uncertainties in prediction. in the following subsections we provide formulation for surrogate modeling of the design optimization process and the implemented offline and online al framework.with this approach we design a simulation environment where the thermodynamics-based simulator, the optimizer and the surrogate model can interact in real time after each data streaming.since our optimization goal is to reach minimum power (p) with less expensive computational effort but keeping the reliability of the output, we rely on the gp surrogate model only as an alternative model in case the uncertainty of the prediction is high.to generate the dataset for training and evaluating the gp surrogate model, we perform a bayesian optimization using gps (from the scikit-optimize library(louppe, 2017)) on top of our thermodynamics-based simulator of compressor design.the benefits of combining active ml methods with physical models underlying compressor's dynamics are large for design optimization applications including faster computations and more accurate design solutions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/382.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/382.txt new file mode 100644 index 0000000000000000000000000000000000000000..4918e1a88e4e54c3234de4636084cf59c69659a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/382.txt @@ -0,0 +1 @@ +in clustering and classification, there have been numerous distance-based algorithms proposed. while the euclidean metric is the "standard" notion of distance between numerical vectors, it does not always result in accurate learning. this can be e.g. due to the presence of many dependent features, noise, or features with large ranges that dominate the distances . mahalanobis metric learning aims at lessening this caveat by linearly transforming the feature space in a way that properly weights all important features, and discards redundant ones. in its most common form, metric learning focuses on learning a mahalanobis metric - .metric learning algorithms can be divided into two types based on their purpose . distance-based metric learning aims to increase the distances between instances of different classes (inter-class distances) and decrease the distances inside the same class (intra-class distances). on the other hand, classifier-based metric learning focuses on directly improving the performance of a particular classification algorithm, and is therefore dependent on the algorithm in question.despite the success of mahalanobis metric learning, highdimensionality of the data is a provable bottleneck that arises fairly often in practice. the work of has shown, through both upper and lower bounds, that, in the absence of assumptions or constraints, the sample complexity of mahalanobis metric learning, increases linearly with the data dimension. in addition, so does the computational complexity of learning. compounding this, high-dimensionality is known to quickly degrade the performance of machine learning algorithms in practice. this means that, even if a suitable distance metric is found, the subsequent algorithm might still perform poorly. all these issues are collectively known as the curse of dimensionality .it has been observed, however, that many real-world data sets do not fill their ambient spaces evenly in all directions, but instead their vectors cluster along a low-dimensional subspace with less mass in some directions, or have many redundant features . we refer to these data sets, in a general sense, in a broad sense, as having a low intrinsic dimension (low-id). due to their lower information content, it is intuitively expected that learning from such a data set should be easier, both statistically and computationally. one of the most popular ways to take advantage of a low-id is to compress the original data set into a low-dimensional space and then proceed with learning in this smaller space .random projections is a widely used compression method with attractive theoretical guarantees. these are universal in the sense of being oblivious to the data being compressed. all instances are subjected to a random linear mapping without significantly distorting euclidean distances, and reducing subsequent computing time. there has been much research on controlling the loss of accuracy with random projections for various learning algorithms, see e.g. , . another advantage, is that no pre-processing step is necessary beforehand, making random projections simple to implement . in the case of mahalanobis metric learning, an additional motivation is to reduce the number of parameters to be estimated.• a high-probability upper bound on the excess empirical error of the learnt metric, relative to the empirical error of the metric learnt in the original space. for a set t , we write diam(t ) := sup x,x ′ ∈t ∥x -x ′ ∥ for its diameter, and t -t := {x -x ′ : x, x ′ ∈ t }. let x × y be the instance space, where x ⊂ r d is the feature space and y = {0, 1} is the set of labels.the goal of mahalanobis metric learning is to learn a matrix m ∈ r d×d , such that the mahalanobis distance between any two instances x, x ′ , i. ∥m x -m x ′ ∥, is larger if x, x ′ have different labels and smaller if x, x ′ share the same label. for the purpose of dimensionality reduction, given a fixed k, where k ≤ d, we let r ∈ r k×d be our random projection (rp) matrix. to see why, let r 1 , r 2 ∈ r k×d be fixed matrices (which can also be seen as vectors in r kd ), and note that. from n (0, 1/k), t ⊂ (x × y) 2 be the training set defined in (3), m k be the hypothesis class defined in (2), l r d be the compressed true error defined in(5), and lr t be the compressed empirical error defined in(6).proof: let p be a probability measure induced by the random variable (x, y ), where x := (x, x ′ ) and y := i{y = y ′ }, for ((x, y), (x ′ , y ′ )) ∼ d 2 .also, for all i ∈ , let x i := (x 2i-1 , x 2i ) and y i := i{y 2i-1 , y 2i } be "regrouped" versions of the elements of t , defined in (3).lemma 7 (rademacher bound): let d be a distribution over x × {0, 1} and let {(x i , y i )} n i=1 be a sample of size n drawn i. this implies that, unless the data support fills the whole ambient space, the empirical error calculated in the compressed space is closer to the true error in the compressed space. from n (0, 1/k), t ⊂ (x × y) 2 be the training set defined in (3), m and m k be the hypothesis classes defined in (1) and (2) respectively, lt be the empirical error defined in(8), and lr t be the compressed empirical error defined in(6). this means that if the empirical error in the ambient space is small, the empirical error in the compressed space scales with the stable dimension, instead of the ambient dimension.to summarise, a gaussian random projection incurs a lower generalisation gap for mahalanobis metric learning, but induces an excess empirical error, compared to learning the metric in the ambient space. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/383.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/383.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5ce5e5c15c0655ddd5cf22e92cd542e25411c47 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/383.txt @@ -0,0 +1 @@ +in contemporary society, electrical energy has emerged as a pivotal resource propelling the economic and societal progress of nations worldwide. it is extensively utilized in industries, including manufacturing, mining, construction, and healthcare, among others. the provision of consistent and high-quality electrical power is not merely a convenience; rather, it is imperative to sustain investor confidence in economies and foster further development . with the advent of new technological advancements, electricity demand has surged, creating an urgent need for more costeffective and reliable power supply solutions .the current energy infrastructure lacks substantial energy storage capabilities in the generation, transmission, and distribution systems . this deficiency necessitates a precise balance between electricity generation and consumption. the maintenance of balance is contingent upon the utilization of an accurate load forecasting approach. adapting electricity generation to dynamically meet shifting demand patterns is paramount; since failure to do so puts the stability of the entire power system at risk .moreover, as the world pivots towards the increased adoption of renewable energy sources , power grids have witnessed a substantial transformation in their composition and structure. this integration of renewable energy sources, such as wind and solar power, introduces a degree of unpredictability into energy generation due to the stochastic nature of these sources . consequently, ensuring a stable and secure power system operation becomes an even more complex endeavor, demanding meticulous power planning and precise load forecasting.electric load forecasting is the practice of predicting electricity demand within a specific region. this process can be categorized into three distinct groups: short-term, medium-term, and long-term forecasting, depending on the forecasting horizon. short-term load forecasting (stlf), which focuses on predicting electricity demand for upcoming hours, a day, or a few days, serves as the foundation for effective power system operation and analysis. it facilitates the optimization of the operating schedules of generating units, including their start and stop times, and their expected output. the accuracy of stlf is of critical importance, as it directly influences the efficient utilization of generating units . the absence of accurate short-term load forecasting can lead to many operational challenges, including load shedding, partial or complete outages, and voltage instability. these issues can have detrimental effects on equipment functionality and pose potential risks to human safety.short-term load forecasting methods are pivotal in achieving this precision. these methods can be broadly classified into two main categories: statistical methods and machine learning methods , . machine learning-based load forecasting methods, such as the autoregressive integrated moving average model (arima) , long shortterm memory (lstm) , generative adversarial network (gan) , and convolutional neural network (cnn) , have gained prominence. these machine learning methods excel at capturing complex nonlinear data features within load patterns . they leverage the ability to discern similarities in electricity consumption across diverse power supply areas and customer types, allowing for more accurate and feasible load forecasting through the consideration of spatial-temporal coupling correlations. machine learning-based load forecasting methods, such as the autoregressive integrated moving average model (arima), long shortterm memory (lstm), generative adversarial network (gan), and convolutional neural network (cnn), have gained prominence. 2) attention-augmented hybrid model: given that power demand is impacted by short-term fluctuations and long-term trends in data, a hybrid model is used to detect both temporal and extended dependencies, improving accuracy.a convolutional neural network (cnn) is a deep learning model designed primarily for image-related tasks, but it can also be applied to other grid-like data, such as audio or time series data. fully connected layer: after multiple convolutional and pooling layers, the spatial dimensions are reduced, and the network connects to one or more fully connected layers, also known as dense layers.the lstm network is a type of recurrent neural network (rnn) architecture that is designed to capture and model sequential data while addressing the vanishing gradient problem that plagues traditional rnns. it takes the previous cell state and the current input (x t ) as input and produces a forget gate output. it takes the previous cell state and the current input and produces an input gate output.candidate cell state ( ct ): this is a candidate new cell state, computed using the current input and a tanh activation function.cell state update (c t ): the cell state is updated by combining the information retained from the previous cell state (f t • c t-1 ) and the new candidate cell state (i t • ct ). it takes the current input and the updated cell state and produces an output gate output.hidden state (h t ): the hidden state is the output of the lstm cell, which is used as the prediction and is also passed to the next time step. meanwhile, the cell state (c t ) acts as a memory buffer, accumulating and passing relevant information across time steps, thus enabling the model to recognize and exploit long-term patterns within input sequences. following the input layer, a convolutional layer is used to capture temporal spatial patterns in the data. subsequently, a bidirectional lstm layer is employed to model long-term dependencies both forward and backward, enabling the capture of historical data through time. the crucial multi-head attention module operates on the output of the first bidirectional lstm layer, enabling the model to focus on the most relevant features and learn their importance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/384.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/384.txt new file mode 100644 index 0000000000000000000000000000000000000000..4a398a8c702f850001e15c05b0d4f90099809cbc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/384.txt @@ -0,0 +1 @@ +deep learning models have become larger and larger over the past few years which leads to models with huge numbers of training parameters. training such a huge model requires a large amount of memory and computational power hajimolahoseini et al. (2022); hajimolahoseini et al.;walid ahmed (2022); ahmed et al. (2022) as well as big datasets. however, for some applications, especially on the edge devices with online learning capabilities, the memory and training time could be limited and a large dataset may not be available too li et al. (2021); hajimolahoseini et al. (2023;2019). in cases like this, the model is highly prone to the issue of overfitting and may also not be able to generalize well. hence, having a model which is able to learn fast on a small dataset and generalize well is highly beneficial hajimolahoseini et al. (2018).there are some techniques proposed in the literature which try to improve the training efficiency using different approaches during training yong et al. (2020); foret et al. (2020); zhang et al. (2018). for a detailed review of related work the reader is referred to coleman et al. (2017). in this competition, the goal is to reach to the highest possible accuracy in an image classification task in less than 10 minuets. the training is performed on a 10% subset of cifar10 dataset including 10 classes, each with 500 training samples and 100 test samples of 32 × 32 pixels rgb images krizhevsky et al. (2009). the evaluation is performed on mini-imagenet dataset (hidden at development time) of size similar to the development dataset deng et al. (2009). no pre-trained weights are allowed so the models are trained from scratch. more details about the proposed methodology is presented in the following sections.deep learning models have become larger and larger over the past few years which leads to models with huge numbers of training parameters. training such a huge model requires a large amount of memory and computational powerhajimolahoseini et al.;walid ahmed (2022);ahmed et al. (2022)as well as big datasets. however, for some applications, especially on the edge devices with online learning capabilities, the memory and training time could be limited and a large dataset may not be available tooli et al.there are some techniques proposed in the literature which try to improve the training efficiency using different approaches during trainingyong et al. (2018). (2016)as our baseline.during the training of the baseline model, we observed that the generalization of first-order optimizers such as sgd was sub-par, since the test accuracy of resnet-9 trained on the 10% of cifar10 dataet could not reach higher than 76.for models with a very large capacity, the value of training loss does not necessarily guarantee the ability of the model to generalize wellforet et al. this method converts the minimization problem into a min-max optimization on which sgd could be performed more efficiently. this is done by creating a projected gradient descent method with a constrained loss function.as a mechanism for generalizing the knowledge learned over many few-shot learning tasks, metalearning is a promising training approach for few-shot learning problems.this meta-learning approach has been reframed as a single-task algorithm for training on small dataset (10 classes of mini-imagenet) -named meta-learning based training procedure (mltp).in this paper, we adopted some techniques for improving the generalization of resnet-9 model when training on a small dataset in a very short time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/385.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/385.txt new file mode 100644 index 0000000000000000000000000000000000000000..3debff7f9f62d82f1d96c9901c48f55ca6eb4de9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/385.txt @@ -0,0 +1 @@ +designing neural network architectures that correctly account for the symmetry of physical laws is an important requirement for applications of artificial intelligence in science. in particular, for dynamical systems in physics, the relevant properties transform invariantly or equivariantly under euclidean transformations. this is also the case when modelling particles or atomistic systems, for which machine learning simulators are already widely used.there now exist a wide variety of equivariant neural network architectures leveraging a diverse set of mathematical formulations, among which we highlight two important classes. first, some architectures apply spherical harmonics mappings to incorporate directional information in an equivariant way (thomas et al., 2018;fuchs et al., 2020). these architectures have the advantage of being highly expressive but are also computationally expensive and challenging to implement. second, we highlight models that fall under the equivariant multilayer perceptron (e-mlp) paradigm (finzi et al., 2021). the idea behind this paradigm is to simply generalize standard multilayer perceptrons by composing equivariant linear layers with appropriate non-linear functions. these architectures are much simpler to work with and more computationally efficient than those using spherical harmonics, but in principle, require lifting input quantities to high-order tensors to achieve high expressivity (finkelshtein et al., 2022). prior works, however, show that one can achieve satisfactory modelling performance without requiring higher-order representations. one such example is the vector neurons (deng et al., 2021) model, which can be seen as an equivariant multilayer perceptron with order-1 vector features. this architecture also leverages the fact that the number of vector channels (neurons) in each layer can be arbitrary to increase expressivity.the e(n)-equivariant graph neural network (egnn) model by satorras et al. (2021) is an example of a model that does not clearly fit into one of the categories above. nevertheless, egnn has become widely applied mainly due to efficiency and simple model design. egnn uses the message-passing framework, which captures the inductive bias that sparse interactions between entities should lead to better generalization. egnn also has the advantage of separating equivariant features into a separate channel that only follows equivariant operations. the work of (brandstetter et al., 2022) extends egnn by using ideas inspired by spherical-harmonics-type architectures. their steerable e(n)-equivariant graph neural network (segnn) achieves better performance across some benchmarks but suffers from similar conceptual shortcomings in addition to increased computational complexity.in this paper, we explore the direction of generalizing egnn by drawing from e-mlp-type architectures. egnn only updates a single vector for each node in the graph over each layer. a natural way to increase the expressivity of this model is to make the number of vector channels arbitrary. in our experiments, we show that this change alone leads to an important increase in performance for some physical modelling tasks. this multi-channel extension also retains the simplicity and computational efficiency of the original architecture and makes intuitive physical sense: the network may use the different channels to store additional physical quantities relevant to the prediction task.we note that gmn (huang et al., 2022) proposes to use multiple channels as part of a generalized egnn-like model, as does gvp-gnn (jing et al., 2021). however, since this is one contribution amongst several others in gmn, and gvp-gnn the advantage of using multiple channels is not clear. here we show that we can obtain significant benefits only with the additional channels. in this short paper, we highlight that simply adding multiple channels to egnn can lead to a significant performance increase compared to much more expensive methods such as segnn. we believe this result should be of use to practitioners looking to preserve the advantages of egnn. the idea behind this paradigm is to simply generalize standard multilayer perceptrons by composing equivariant linear layers with appropriate non-linear functions. these architectures are much simpler to work with and more computationally efficient than those using spherical harmonics, but in principle, require lifting input quantities to high-order tensors to achieve high expressivity(finkelshtein et al.the e(n)-equivariant graph neural network (egnn) model bysatorras et al. egnn uses the message-passing framework, which captures the inductive bias that sparse interactions between entities should lead to better generalization. this multi-channel extension also retains the simplicity and computational efficiency of the original architecture and makes intuitive physical sense: the network may use the different channels to store additional physical quantities relevant to the prediction task. however, since this is one contribution amongst several others in gmn, and gvp-gnn the advantage of using multiple channels is not clear.we first conducted a hyperparameter search using a one vector-channel egnn to maximize its performance on the validation set, and then used those hyperparameters when testing the egnn models with 2, 3, and 5 vector channels. while using 2 vector channels improves over using 1, it takes 3 vector channels for the model to achieve its highest performance.table1: performance on the solar system prediction task using differing number of vector channels.the results, shown in table2, demonstrate that using just 1 more vector channel (mc-egnn-2) yields greatly improves performance over the single-vector egnn and matches the performance of more sophisticated models such as segnn(brandstetter et al. the vector channels are only used in the intermediate layers, but as we report hereafter, they still contribute to increased performance on all targets compared to the standard egnn. the results, shown in table3, demonstrate that performance is also comparable to segnn when using 8 vector channels.we show here that adding multiple channels to the egnn model leads to performance improvements in prediction tasks on physical systems, sometimes matching more complicated architectures. this is achieved without a significant increase in the forward runtime of the model because only a small number of vector channels are needed to obtain improvements. one possible downside we noticed with the multi-channel model was that training could be less stable when more vector channels were used. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/386.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/386.txt new file mode 100644 index 0000000000000000000000000000000000000000..6618387fede1acf4c7e28827ad944538b9c5e603 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/386.txt @@ -0,0 +1 @@ +logistic regression has been applied widely in many areas as a method of classification. the goal of logistic regression is to maximize the likelihood based on the observation of training samples, with its objective function formulated as follows with a natural and meaningful probabilistic interpretation:where x i and y i denote the i-th sample and its label.though logistic regression is straightforward and effective, its performance can be diminished due to over-fitting , especially when the dimensionality of features is very high compared to the number of available training samples. therefore, regularization term is usually introduced to alleviate overfitting issue . also, in applications with high-dimensional data, it's desirable to obtain sparse solutions, since in this way we are conducting classification and feature selection at the same time. therefore, ℓ 1 -regularized logistic regression has received more attention with the sparsity-inducing property and its superior empirical performance .more recent studies show that ℓ 1 -norm regularization may suffer from implicit bias problem that would cause significantly biased estimates, and such bias problem can be mitigated by a nonconvex penalty . therefore, nonconvex regularization has also been studied to induce sparsity in logistic regression . solving sparse logistic regression with convex ℓ 1 -norm regularizer or with nonconvex term using a unified algorithm has been studied in . however, it imposes strong regularity condition on the nonconvex term and it transfers the nonconvex problem into an ℓ 1 -norm regularized surrogate convex function, which limits its generality. as a contribution, we extend the scope of nonconvex penalties to a much weaker assumption and compare the performance of different regularization terms with a unified optimization framework.in this paper, we solve ℓ 1 -regularized (sparse) logistic regression by proposing a novel framework, which can be applied to non-convex regularization term as well. the idea of our proposed method stems from the well know iterative shrinkage thresholding algorithm (ista) and its accelerated version fast iterative shrinkage thresholding algorithm (fista) , upon which we modify the step-size setting and line search criteria to make the algorithm applicable for both convex and nonconvex regularization terms with empirical faster convergence rate. to be clear, we call any logistic regression with regularization term that can produce sparse solutions as sparse logistic regression, therefore, the term is not only limited to ℓ 1 -norm regularization. the idea of our proposed method stems from the well know iterative shrinkage thresholding algorithm (ista) and its accelerated version fast iterative shrinkage thresholding algorithm (fista), upon which we modify the step-size setting and line search criteria to make the algorithm applicable for both convex and nonconvex regularization terms with empirical faster convergence rate. to be clear, we call any logistic regression with regularization term that can produce sparse solutions as sparse logistic regression, therefore, the term is not only limited to ℓ 1 -norm regularization. in, the momentumized iterative shrinkage thresholding (mist) algorithm is proposed to minimize the nonconvex criterion for linear regression problems, and similar ideas can be applied to logistic regression as well. we use ista and fista with backtracking line search to solve eq (2), which are described in algorithm 1 and 2 respectively, where p l (β) represents the proximal operator defined as.in algorithm 2, we also initialize the step size as 1 l to avoid too small step size.besides the step size setting in bb rule aforementioned, another option proposed by us is to find the largest step-size by searching reversely: we start by setting the step-size to 1 l in each iteration and keep enlarging it until the line search condition is not satisfied and take the last step-size satisfying the criterion. from figure2we can see that for fista, when we initialize the step-size with 1 l (fista-lipschitz), it has better convergence performance than initialized with a random number (fista-random), which might be smaller than 1 l . for ista, the two algorithms proposed by us (ista-bb and ista-reverse) have similar performance and they obviously outperform vanilla ista with backtracking line search, either algorithm 3 ista-reverse: ista with lipschitz constant and reverse step size searching to solve eq (2). initialize β 0 randomly, step size 1 l0 as 1 l , where l is the lipschitz constant of ∇l(β), set η > 1; repeat 1) find the smallest nonnegative integer i k such that with.while the ℓ 1 -norm regularization is convenient since it's convex, several studies show that sometimes nonconvex regularization term can have better performancethough it turns the objective to nonconvex and even nonsmooth, which is challenging to obtain optimal solution.initialize β 0 , step size 1 l0 as 1 l , where l is the lipschitz constant of ∇l(β), set η > 1; repeat 1) start from k = 2, update the step size 1 l k using the barzilai-borwein (bb) rule 2) find the smallest nonnegative integer i k such that with.algorithm 5 ista-reverse: ista with lipschitz constant and reverse step size searching to solve eq(12). initialize β 0 randomly, step size 1 l0 as 1 l , where l is the lipschitz constant of ∇l(β), set η > 1; repeat 1) find the smallest nonnegative integer i k such that with l = l 0 /η i k we have f (pl. all the other parameters are set as suggested in the original papers. the proximal newton method is not included in the figure because its running time is way higher than the others, making it hard to visualize the time in the same figure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/387.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/387.txt new file mode 100644 index 0000000000000000000000000000000000000000..aac0f3a8e4e3f8b3d56152943bd025ce7dd2b0ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/387.txt @@ -0,0 +1 @@ +tisr is written in the programming language julia developed by bezanson et al., 2017. it utilizes a genetic programming (see koza, 1994) algorithm, i. e., a modified version of nsga-ii (see deb et al., 2002) with an island model population structure (see gorges-schleuter, 1991).3.1. overview. tisr's main loop consists of expression mutation (section 3.4), individual instantiation (described next), and selection (section 3.3). every iteration of this main loop constitutes a generation of the genetic algorithm.each individual contains one expression and a number of related attributes, which are determined in the "individual instantiation", which is divided in the following steps:(1) unnecessary parameter removal• for example: parameter + parameter → parameter, function(parameter) → parameter, . . . • values of parameters are not adjusted, as parameters are identified after this step (2) randomly trimming expressions that exceed the size limit (3) reordering of operands of + and •• for example:• reorder according to following rules (< means before): parameter < variable < unary operator < binary operator (4) grammar checking (described below) (5) parameter identification (section 3.5) (i) calculate residual-related measures (see table 3.1) (ii) calculate constraint violations (coming soon) (6) singularity prevention (coming soon) (7) determination of attributes unrelated to the residual (see table 3.1) at the "grammar checking", "parameter identification" and the "singularity prevention" steps, individuals may be deemed invalid, resulting in their termination and removal. the use of grammar may increase the algorithm's performance by filtering out individuals before parameter identification. currently, two grammar options are available. the user may prohibit certain operator compositions, e. g., cos(cos(x)) or exp(log(x)). these are also enforced during the random creation of expressions. the second currently implemented grammar option prohibits parameters in exponents, i. e., (x + 1) 3 would be allowed but 3 (x+1) would not. the latter grammar is not enforced at the "grammar checking" step above, but rather throughout tisr in the individual creation and mutations. we plan to introduce more grammar options in the future.the attributes of each individual, their brief description, and whether the attributes are related to the residual can be seen in table 3.1. currently, the ability to add custom attributes by the user is not implemented but will be added in the future.for parallelization, we currently employ multithreading on the "individual instantiation" step which includes parameter identification. as the parameter identification is by far the most expensive step, parallelizing the complete generational loop offers only a small additional performance benefit.• for example: parameter + parameter → parameter, function(parameter) → parameter, . this in lasso regression, an ℓ 1 -norm of the parameter vector is added to the squared residual norm as a regularization term, which incentives potentially zero parameter values.usually, sr searches for an expression f(x) which satisfies y = f(x) for the given data x and y. if, for example, we search an expression f(x) and presuppose that y = exp(f(x)) holds, we could rearrange the expression as log(y) = f(x) and search for the f part of the expression directly. this however, does change the residual to log(y) -f(x) and thus the minimization objective, which may lead to inferior results. the expression y = f(x)^2 + exp(f(x)) cannot be solved for f(x). in tisr it is possible to define pre_residual_processing! function, which is applied to the output of the expression f(x) before residual is calculated. for the second example, for the evaluation of each expression f(x) proposed by tisr, first, the output of the expression is calculated. the pre_residual_processing! function performs y_pred = f(x)^2 + exp(f(x)) to calculate the prediction of the proposed expression, and the residual is calculated by y -y_pred. in early stopping, the parameter identification is conducted for a fraction of the data, while the residual norm is also calculated for the remainder. one method to perform early stopping, as described inprechelt, 1998, is to terminate the parameter estimation as soon as the residual norm for the remaining data increases monotonically for a number of iterations. first, the parameter identification, which is currently by far the most expensive part of the algorithm, may be stopped after fewer iterations for candidate expressions which do not appear to capture behavior underlying the data. second, the jacobian for the levenberg-marquardt algorithm is only calculated for a fraction of the data, which increases the performance for large data sets.• protected evaluation of power and similar functions, avoiding the need for constructions such as abs(x)^y ▷ improved simplification possibilities and more sensible expressions • many residual pre-and post-processing options ▷ weighting ▷ custom post-processing ▷ custom processing of the prediction before residual calculation to, i., search for sub-expressions • removal of expression parts, which do not contribute much ▷ targeted simplification • early stopping ▷ mitigate overfitting and improve generalization performance ▷ improve performance by stopping earlier for the future, the most notable feature of tisr is its flexibility and extensibility, which allows us to implement major changes comparably easily and fast. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/388.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/388.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebfc4f422bc48db50a576da19d1351b1b42f97ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/388.txt @@ -0,0 +1 @@ +ranking problems arise in many applications including search engines, recommendation systems, and online advertising (see, e.g., the book by liu ). the output space in ranking problems consists of permutations of objects. given the true relevance scores of the objects, the accuracy of a ranked list is judged using ranking measures, such as pairwise loss (pl), discounted cumulative gain (dcg), and precision@n (p@n). many ranking algorithms are offline, i.e., they are designed to operate on the entire data in a single batch. however, interest in online algorithms, i.e., those that process the data incrementally, is rising for a number of reasons. first, online algorithms often require less computation and storage. second, many applications, especially on the web, produce ongoing streams of data making them excellent candidates for applying online algorithms. third, basic online algorithms, such as the ones developed in this work, make excellent starting points for developing more sophisticated online algorithms that can deal with non-stationarity. non-stationarity is a major issue in ranking problems since user preferences can easily change over time.the basic full feedback setting assumes that the relevance scores, typically obtained via human annotation, provide the correct feedback for each item in the ranking. since the output in ranking problems is a permutation over a potentially large set of objects, it becomes practically impossible to get full feedback from human annotators. therefore, researchers have looked into weak supervision or partial feedback settings where the correct relevance score is only partially revealed to the learning algorithm. for example, chaudhuri and tewari developed a model for online ranking with a particular case of partial feedback called top-k feedback. in this model, the online ranking problem is cast as an online partial monitoring game between a learner and an oblivious adversary (who generates a sequence of outcomes before the game begins), played over t rounds. 2 at each round, the learner outputs a ranking of objects whose quality with respect to the true relevance scores of the objects, is judged by some ranking measure. however, the learner receives limited feedback at the end of each round: only the relevance scores of the top-k ranked objects are revealed to the learner. here, k ≤ m (in practice k ≪ m) and m is the number of objects.the goal of the learner is to minimize its regret. the goal of regret analysis is to compute the upper bounds of the regret of explicit algorithms. if lower bounds on regret that match the upper bounds up to constants can be derived, then the minimax regret is identified, again up to constants. previous work considered two settings: non-contextual (objects to be ranked are fixed) and contextual (objects to be ranked vary and get encoded as a context, typically in the form of a feature vector). our focus in this work will be on the non-contextual setting where six ranking measures have been studied: pl, dcg, p@n, and their normalized versions area under curve (auc), normalized discounted cumulative gain (ndcg) and average precision (ap). chaudhuri and tewari showed that the minimax regret rates with the top-k feedback model for pl, dcg and p@n are upper bounded by o(t 2/3 ) for all 1 ≤ k ≤ m. in particular, for k = 1, the minimax regret rates for pl and dcg are θ(t 2/3 ). moreover, for k = 1, the minimax regret rates for auc, ndcg, and ap are θ(t ).one of the open questions, as described by chaudhuri and tewari , is to find the minimax regret rates for k > 1 for the six ranking measures.it is worth noting that the top-k feedback model is neither full feedback (where the adversary's move is uniquely determined by the feedback) nor bandit feedback (where the loss is determined by the feedback); the model falls under the framework of partial monitoring . recent advances in classification of finite partial-monitoring games have shown that the minimax regret of any such game is 0, θ(t 1/2 ), θ(t 2/3 ), or ω(t ), and is governed by two important properties: global observability and local observability . in particular, bartók et al. gave an almost complete classification of all finite partial-monitoring games by identifying four regimes: trivial, easy, hard, and hopeless games, which correspond to the four minimax regret rates mentioned before, respectively. what was left from the classification is the set of games in oblivious adversarial settings with degenerate actions which are never optimal themselves, but can provide useful information. lattimore and szepesvári finished the characterization of the minimax regret for all partial monitoring games.our contributions: we establish the minimax regret rates for all values of k, i.e., 1 ≤ k ≤ m and for ranking measures pl, dcg, and p@n. we obtain these results by showing that the properties of global observability and local observability hold in the appropriate cases. in addition, we provide an algorithm based on the neighborhoodwatch2 algorithm of lattimore and szepesvári . our algorithm achieves the minimax rate for p@n and has per-round time complexity polynomial in m (for any fixed n). again, the global observability condition holds if every pair of neighboring actions is globally observable, and the local observability condition holds if every pair of neighboring actions is locally observable.to explain intuitively, note that σ k ′ and h k ′ ,l ′ contain the same information as s k ′ and e l ′ because observing h k ′ ,l ′ is equivalent to observing s k ′ e l ′ . showed an algorithm neighborhoodwatch2 that achieves θ(t 1/2 ) minimax regret for all finite partial monitoring games with local observability, including games with duplicate or degenerate actions. for each pair of neighboring actions a, b for the ranking loss measure (negated) p@n, there exists a function v ab : σ × h → r such that definition 8 is satisfied and moreover, v ab ∞ = max σ∈σ,s∈h | v ab (σ, s) | can be upper bounded by 4. then,lattimore and szepesvári provided an algorithm so that for locally observable games without degenerate actions (as is the case in our current setting), its regret upper bound matches the best-known information-theoretical upper bound shown inlattimore and szepesvári .in this work, we have successfully closed one of the most interesting open questions proposed bychaudhuri and tewari : we have established a full characterization of minimax regret rates with top-k feedback model for all k for ranking measures pairwise loss (pl), discounted cumulative gain (dcg) and precision@n gain (p@n). definition 6 says that a pair of actions σ i and σ j is globally observable if l il j ∈ ⊕ 1≤k ′ ≤m! col(s ⊤ k ′ ). without loss of generality, assume σ i and σ j differ in positions k and k ′ where k ′k > 1.where (s ⊤ k ′ ) k ′′ ,l ′ is the element in row k ′′ and column l ′ of s ⊤ k ′ , for l ′ = 1, 2. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/389.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/389.txt new file mode 100644 index 0000000000000000000000000000000000000000..f7e552e4488a96bc7f5f4a6031c05049b072cfe6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/389.txt @@ -0,0 +1 @@ +deep neural networks (i.e., networks with many nonlinear layers) are widely considered to be the most appropriate architecture for mapping complex dependencies such as those arising in artificial intelligence tasks. their potential to map intricate dependencies has advanced their widespread use.for example, the study (meir et al., 2023) compares the first deep convolutional network for image classification with two sequential convolutional layers lenet (lecun et al., 1989) to its deeper evolution vgg16 (simonyan and zisserman, 2015) with 13 sequential convolutional layers. while the performance gain in this comparison was significant, further increasing the depth resulted in very small performance gains. adding three additional convolutional layers to vgg16 improved the validation error slightly from 25.6 % to 25.5 % on the ilsvrc-2014 competition dataset (russakovsky et al., 2015), while increasing the number of trainable parameters from 138m to 144m.however, training these networks remains a significant challenge, often navigated through numerical optimization methods based on the gradient of the loss function. in deeper networks, the gradient can significantly diminish particularly for parameters distant from the output, leading to the well-documented issue known as the "vanishing gradient".a breakthrough in this challenge is the concept of residual connections: using an identity function parallel to a layer (he et al., 2016). each residual layer consists of an identity mapping copying the layer's input to its output and a conventional weighted layer with a nonlinear activation function. this weighted layer represents the residue after applying the identity. the output of the identity and the weighted layer are summed together, forming the output of the residual layer. the identity function plays the role of a bridge-or "highway" (srivastava et al., 2015)transferring the gradient w.r.t. layer output into that of the input with unmodified size. in this way, it increases the gradient of layers remote from the output.the possibility of effectively training deep networks led to the widespread use of such residual connection networks and to the belief that this is the most appropriate architecture type (mhaskar et al., 2017). however, extremely deep networks such as resnet-1000 with ten times more layers than resnet-101 (he et al., 2016) often demonstrate a performance decline.although there have been suggestions for wide architectures like efficientnet (tan and le, 2019), these are still considered "deep" within the scope of this paper.this paper questions the assumption that deep networks are inherently superior, particularly considering the persistent gradient problems. success with methods like residual connections can be mistakenly perceived as validation of the superiority of deep networks, possibly hindering exploration into potentially equivalent or even better-performing "shallow" architectures.to avoid such premature conclusions, we examine in this paper the relative performance of deep networks over shallow ones, focusing on a parallel or "shallow" architecture instead of a sequential or "deep" one. the basis of the investigation is the mathematical decomposition of the mapping materialized by a stack of convolutional residual networks into a structure that suggests the possibility of being approximated by a shallow architecture. by exploring this possibility, we aim to stimulate further research, opening new avenues for ai architecture exploration and performance improvement.for example, the study(meir et al., 2016).the possibility of effectively training deep networks led to the widespread use of such residual connection networks and to the belief that this is the most appropriate architecture type(mhaskar et al.to avoid such premature conclusions, we examine in this paper the relative performance of deep networks over shallow ones, focusing on a parallel or "shallow" architecture instead of a sequential or "deep" one. if acceptable, it would be possible to substitute a deep residual network of h sequential layers with a "shallow" network with a single layer consisting of h individual modules in parallel, summing their output vectors.the analysis of section 2 suggests that the expressive power of a network architecture in which stacked residual connection layers of a deep network are reorganized into a parallel operation in a single, broad layer, may be close to that of the original deep network.it is important to point out that residual connection layers are restricted to partial stacks of equally sized layers (otherwise the unity mapping could not be implemented). the possibility of making the consecutive layer stack parallel concerns only the middle part with residual connections of identically sized layers. to keep the architecture simple and reduce the possibility of additional side effects, the input is flattened into a one-dimensional vector before the dense classification layer with ten linear output units. parallel filters are popular means of extending a straightforward convolution layer architecture: instead of each layer being a single convolution of the previous layer, it consists of multiple convolution filters in parallel.due to their identical layout and equal random initialization, training the two networks with one convolutional layer and one filter each resulted consequently in equal loss values. they show an interesting development for cifar10: the training loss decreases by raising the number of filters while the validation loss largely increases for more than four filters.altogether, the parallel architecture shows better performance on the validation set despite the slightly inferior loss on the training set.as an additional view to the relationship between the depth and the width of the network, a group of experiments is analyzed in which the product of the number of filters (f) and the number of convolution layers (c) are kept constant. for example, an architecture with 32 filters and a single convolutional layer has a ratio of 1 /32 while the ratio with one filter and 32 layers is 32 /1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/39.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/39.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb25b80b629f81830cb06f245330c02c5b12d31e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/39.txt @@ -0,0 +1 @@ +in supervised classification tasks, a machine learning model is provided with an input, and after the training phase, it outputs one or more labels from a fixed set of classes (mitchell, 1997). recent developments of large pre-trained language models (llms), such as bert (devlin et al., 2019), t5 (raffel et al., 2020) and gpt-3 (brown et al., 2020), gave rise to a novel approach to such tasks, namely prompting (liu et al., 2021a). in prompting (see fig. 1), there is usually no further training required (although fine-tuning is still an option), and instead, the input to the model is extended with an additional text specific to the task -a prompt. prompts can contain questions about the current sample, examples of input-output pairs or task descriptions (fig. 1, the long legal document & legal question are the input). using prompts as clues, a llm -a foundation model (bommasani et al., 2021) can infer from its implicit knowledge the intended outputs (fig. 1, the completion) in a zero-shot fashion (yin et al., 2019;sanh et al., 2021). legal prompt engineering is the process of creating, evaluating, and recommending prompts for legal nlp tasks. it would enable legal professionals to perform nllp tasks, such as data annotation, search or question-answering, by simply querying llms in natural language. in this presentation, we investigate prompting for the task of legal judgement prediction (strickson and de la iglesia, 2020;zhuopeng et al., 2020) step 1step 2step 3step 3step 4step 5 figure 2: an example prompt template in english for the echr task.in supervised classification tasks, a machine learning model is provided with an input, and after the training phase, it outputs one or more labels from a fixed set of classes(mitchell, 1997)., 2020)and gpt-3(brown et al. prompts can contain questions about the current sample, examples of input-output pairs or task descriptions (fig. using prompts as clues, a llm -a foundation model(bommasani et al.step 5 figure2: an example prompt template in english for the echr task.analog to many established nlp tasks, the legal domain has a diverse set of benchmarks, ranging from binary classification(alali et al., 2022)-an open-source framework for prompt-learning, and promptsource(bach et al.the legal judgement prediction (ljp) task (strickson and de la iglesia, 2020;zhuopeng et al., , 2022b)), maps to real (human readable) words.step 1: we use just the long legal document (one at a time) as the only input to the model. this improved the output of the model, but it was still not working for many cases, where the model continued with a list of other questions.step 4: we included the answer options "a, yes" and "b, no" and finally step 5: the special gptmodel indicator "<|endoftext|>", to separate the document from the prompt. we also optimized the model output sequence length based on the performance on the validation set. however, as we discovered, restricting the model generation to just one token yielded not the best overall performance. it is interesting to observe completions that could be more informative about the ljp, especially those that either listed the violated articles, or those that contained some form of explanation for the decision. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/390.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/390.txt new file mode 100644 index 0000000000000000000000000000000000000000..9bd12dec943300a2a6aa09903255f647f2688534 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/390.txt @@ -0,0 +1 @@ +predicting the dynamics of complex systems exhibiting high-dimensional spatiotemporal chaos is a challenging machine learning problem with important applications in: physics, biology, medicine, economics, meteorology etc. another problem of interest, is the inverse problem of inferring the connectivity network of such a system from input-output measurements. such an example is the case of inferring the connectivity of genetic regulatory networks from the measurements of gene expression data. here we explore the feasibility of these problems using a complex system corresponding to a non-linear network model we previously discussed in . this is a continuous model of non-linear random networks (nlrn), which exhibits a phase transition from ordered to chaotic dynamics as a function of the average network connectivity (in-degree). in the chaotic regime, these networks show strong sensitivity to initial conditions, quickly forgetting their past states, making them harder to predict and to infer their connectivity. in our approach we use the tensorflow library , which is the state of the art for deep neural networks training and prediction. our numerical results show that the dynamics of the considered system can be successfully predicted for short times. however, we also indirectly discovered that longer term predictions of the chaotic system quickly deteriorate and blow up due to an unexpected behavior of the tensorflow library. here we provide numerical evidence of the short time prediction ability, and of the longer term predictability blow up. however, we also indirectly discovered that longer term predictions of the chaotic system quickly deteriorate and blow up due to an unexpected behavior of the tensorflow library. the problems we would like to solve are: (1) to predict the chaotic dynamics of the system, and (2) to infer the connectivity matrix u and the bias vector a, using only the observed activity vector x(t).the tensorflow model used here is probably the simplest one, since it consists of a single layer neural network with a tanh() activation function here, 'ne' is the number of epochs used for training, and 'nb' is the number of samples included in each training batch.in a first example, we iterated an instance of the chaotic network with n = 50 nodes for t = 1000 steps, collecting the input-output pairs, then we trained the tensorflow network for nepochs=1000 and nbatch=100. on top we have the real 'future' dynamics of the chaotic system (simulated using the standard numpy library), in the middle we have the predicted dynamics of the tensorflow learned neural network, and at the bottom we have the difference of the two systems. this observation triggered the following question: if the inference errors are so small then why the prediction deteriorates after such a small number of steps? in fact we observed this unexpected phenomenon even when the inference errors were zero, which obviously should not happen, since in this case the chaotic and the learned systems should be identical. the first system was iterated using the numpy library, while the second system was iterated using the tensorflow library.shows the system iterated using the numpy library, the middle figure shows the system iterated with the tensorflow library, while the bottom figure shows the dynamics difference of the two systems. in this case the tensorflow system blow up takes a bit longer to set up, since in the previous example in figure1, the systems were not quite identical, as a result of the learning process being present. we repeated the experiment with a double number of nodes, n = 100, and surprisingly the ten-sorflow system iterations blow up even earlier, and more abruptly, suggesting that this undesirable and unexpected phenomenon also depends on the number n of nodes in the neural network. in one can see that by increasing the number of nodes n the tensorflow system iteration blows up earlier, and more abruptly, suggesting that larger systems cannot be reliably predicted even for a small number of steps, which further amplifies our previously disturbing conclusion. we have shown that these objectives could be achieved for a shorter prediction interval and relatively small size chaotic systems using the tensorflow library.predict()' method increases very fast with the size of the model, which could have very bad effects on attempts to predict the behavior of important high-dimensional spatiotemporal systems arising in pretty much every scientific field: physics, biology, medicine, economics, meteorology etc.in conclusion, we should stress that many scientific problems require the prediction of chaotic systems, which by the way are deterministic and should not include sources of randomness, unless a random perturbation is required by the problem to be solved, and in these cases the tensorflow 'model.predict()' method does not seem to be the appropriate method to use, since we end up predicting a chaotic system with another chaotic system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/391.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/391.txt new file mode 100644 index 0000000000000000000000000000000000000000..353d15ec143cc1d199f54c96a731d6c94e3670ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/391.txt @@ -0,0 +1 @@ +predictive process monitoring (ppm), or process prediction, is a branch of process mining that is concerned with the forecasting of how a running process instance will unfold in the future . for example, ppm approaches may predict what the outcome of the process instance will be, how long it will take to complete, or which activities will be executed next. in contrast to techniques like process discovery or conformance checking, process prediction is forward-facing, and aims to identify process execution problems like delays or compliance violations before they occur, thus enabling an organization to preemptively take preventive counteractions .whereas older approaches to process prediction relied on explicit models of process behavior, such as transition systems or probabilistic automata , recent research has almost exclusively tackled the problem with neural networks . the majority of this research has also focused on control-flow predictions, specifically the prediction of the next activity in a trace . at a high level, all existing contributions approach next activity prediction as a self-supervised machine learning problem : an existing event log is randomly split into a training and a test set. a machine learning model, typically a deep neural network, is shown incomplete traces from the training set, such that it learns to predict the next activity in that trace. the performance of the trained model is then evaluated by predicting the next activity for incomplete traces of the unseen test set and computing performance measures. almost all existing publications train and evaluate their models on a relatively small collection of event logs for their evaluation. this includes the helpdesk event log and the logs from the business process intelligence challenges (bpic) 2012, 2013, and/or 2017.in this paper, we argue that this current way of training and evaluating next activity prediction models is biased in the sense that it does not evaluate how well these models would generalize to unseen data. we argue that, in order to design reliable evaluation procedures, it is necessary to first engage with the topic of next-activity prediction on a more conceptual level. our line of argument is based on several observations about the aforementioned event logs: first, the next-activity label is almost entirely determined by the control-flow of the prefix. second, when only considering the control-flow perspective, there is an enormous amount of example leakage in all logs, so that most predictions are made on prefixes that were already seen during training. third, as other research has already shown , incomplete traces can often continue in different ways, so that the maximal achievable accuracy in this evaluation setting is unknown and probably much lower than 100%.after introducing basic concepts in section 2, we provide empirical evidence for each of these observations and demonstrate that the next-activity prediction task in these event logs is a rather trivial one that can be solved by a naive baseline (section 3). section 4 presents various scenarios for generalization in process prediction which are grouped into three types of generalization. finally, we discuss related work in section 5 and conclude the paper in section 6 at a high level, all existing contributions approach next activity prediction as a self-supervised machine learning problem: an existing event log is randomly split into a training and a test set.in this paper, we argue that this current way of training and evaluating next activity prediction models is biased in the sense that it does not evaluate how well these models would generalize to unseen data. we employ five commonly used event logs (helpdesk, bpic12, bpic13 incidents, bpic17 offer, and mobis) and generate six splits for each log: five in which we randomly allocate traces so that 80% of them are part of the training set and 20% are part of the test set, and one in which the split is time-based so that the 20% of traces with the most recent start timestamps end up in the test set. this means that most of the predictions made on the test set are trivial ones, and consequently, that one cannot draw valid conclusions about how well a prediction model would perform on unseen data from this evaluation setting.we can further illustrate this issue by demonstrating that the prediction accuracy of state-of-the-art models lies in a relatively narrow corridor that is bounded by a naive baseline with little to no generalization capacity on the lower end, and by the maximal accuracy that can be achieved with only control-flow features on the upper end. if a prediction model that predicts a single next-activity label is tasked with classifying a label-ambiguous prefix, the best prediction in terms of the resulting overall accuracy it can make is the activity that is most frequently associated with that prefix.figure2shows the prediction accuracy achieved by the baseline prediction model described above and the mppn, a state-of-the-art neural network predictor that includes contextual attributes for its prediction. this means that, although generalization is a characteristic of interest for machine learning in general and process prediction in particular, the generalization capabilities of ppm algorithms have so far not been explicitly evaluated, in the sense of applying an algorithm on a test log that has little to no overlap with the training data3.generalization over unseen control-flow constructs involves dealing with unseen control-flow variants in the prefix as shown in the scenarios in event logs l1, l2 and l3 in table1, table2and table3. when evaluating process prediction methods with point-measures like top-1 accuracy, which consider only the single most probable prediction, one cannot assess generalization properly as it does not take into account whether the model has learned that more than one option can follow.for generalization over unseen context combinations, the prediction model must be able to interpret the context attributes and to distinguish between those scenarios where the context attributes have influence on the next activity to be predicted and those scenarios where they do not. for instance, an unseen sequence of activities in the prefix can also come with unseen combination of context attributes or new attribute values which makes generalization in process prediction a challenging task. they used this splitting strategy to evaluate whether prediction models can learn process model structure of the unknown system behind the log, focusing mainly on concurrent activities in process models.although we have focused on next-activity prediction and other prediction situations were out of scope for this work, there might be more scenarios in nextactivity prediction that are not yet covered. following that, prediction models that take context information into account might actually be able to generalize with respect to the scenarios of unseen attribute value combination, as they reach comparable or higher accuracy as control-flow only models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/392.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/392.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebe057fa20ac3891b7d0bfdd3a164c85a9edff8f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/392.txt @@ -0,0 +1 @@ +table transformer (tabtransformer) is a novel deep tabular data modeling for various scenarios, such as supervised and semi-supervised learning. its main contribution is to transform regular categorical embeddings into contextual ones, thus achieving higher accuracy compared to previous state-of-the-art methods. on the other hand, differential privacy (dp) is a frequently used technique to ensure privacy for individual data points in a training dataset. dp-sgd , which combines dp with stochastic gradient descent (sgd), is one of the most frequently used optimization techniques in machine learning (ml) to train models on sensitive data while safeguarding individual privacy.in the literature, dp-sgd techniques either fine-tune a pre-trained model or train a model from scratch. however, almost none of them have focused on tabtransformer. in this paper, we implement various recent parameter-efficient finetuning techniques, such as lora , adapter , and prompt tuning (both shallow tuning and deep tuning), so as to explore the benefits of differentially private pre-training and fine-tuning for tabtransformers. to summarize, our key contributions are as follows: 1) we study an unexplored scenario for transfer learning in tabtransformers with dp, i.e., implementing various kinds of parameter-efficient techniques in the fine-tuning stage instead of full tuning. 2) different from previous tabular learning methods which mainly exploited dp at the fine-tuning stage, we study the use of dp-sgd for both pre-training and fine-tuning, thus ensuring end-toend privacy. 3) our experiments on the acsincome dataset showed that the accuracy outperforms the baselines in most cases, while the parameter efficiency improves by more than 97.86%. in addition, we report the best advantageous peft setting to inform and inspire the future design of dp-aware pre-training and fine-tuning for tabtransformers. in this paper, we implement various recent parameter-efficient finetuning techniques, such as lora, adapter, and prompt tuning(both shallow tuning and deep tuning), so as to explore the benefits of differentially private pre-training and fine-tuning for tabtransformers. the most popular peft techniques include lora, adapter, and (deep/shallow) prompt tuning. experimental results clearly show that peft methods ensure high parameter efficiency without the loss of accuracy, thus outperforming basic approaches in terms of accuracy, parameter efficiency, and privacy. the degree of parameter efficiency in a peft technique hinges on the number of parameters that remain trainable during fine-tuning. when we make a comparison between the peft methods listed in the table and the baseline methods (full tuning and train from scratch), it becomes evident that all the peft approaches have substantially decreased the value of n by at least 206,193-4,408 206,193 = 97. for example, when ϵ p , ϵ f are both set to 32, the acc of deep tuning, adapter, lora, and shallow tuning is 0.7452, respectively. meanwhile, when ϵ = 32, the acc of train from scratch and zero-shot inference are 0.7099 and 0.7098, respectively. these values suggest that when compared to train from scratch and zero-shot inference, the peft techniques increase the acc by at least 4. hence, to sum up, peft techniques achieve excellent levels of accuracy (acc) while demonstrating a remarkably high degree of parameter efficiency.736, respectively, while the acc of full tuning is 0.in this paper, we presented a pilot study exploring the benefits of combining differentially private pre-training and parameter-efficient fine-tuning (peft) for tabtransfomrers with a variety of fine-tuning methods, including adapter, lora, deep/shallow tuning. hence, compared to three baselines which are either parameter-consuming or ineffective, peft techniques achieve a significantly improved trade-off among privacy, accuracy, and parameter efficiency. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/393.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/393.txt new file mode 100644 index 0000000000000000000000000000000000000000..dfe2aaebe94a8c8e279833e94e4347e35094cfca --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/393.txt @@ -0,0 +1 @@ +data-driven ai systems and neural networks in particular have shown tremendous successes across a wide range of applications, including automotive, healthcare, gaming, marketing, and more recently natural language processing. fuelled by high and growing rates of adoption of the new technology across sectors, robustness and stability are vital characterisations of ai performance.the importance of ai stability and robustness is exemplified by the discovery of adversarial perturbations -imperceptible changes of input data leading to misclassifications. these perturbations can be universal (i.e. triggering misclassifications for many inputs), limited to a single attribute , or masquerading as legitimate inputs . sometimes, such ai instabilities can be typical , . moreover, instabilities can also be induced by perturbations of the ai structure .the issue of ai robustness is non-trivial and cannot be considered in isolation from other measures of ai performance: a model returning the same output regardless of the inputs is perfectly robust yet useless. a theoretical framework to approach the problem has recently been proposed in . it has been shown in that (i) there is an uncountably large family of distributions such that for an appropriately large data sample drawn from a distribution from this family there is a feed-forward neural network showing excellent performance on this sample, although (ii) this same network becomes inevitably unstable on some subset of the training and validation sets. moreover, (iii) for the same distribution and the same data, there is a stable network possibly having a different architecture.here we show that the stability-accuracy issues have other unexplored dimensions and could be significantly more pronounced than previously thought. our main result, theorem 1 shows that there exist large families of well-behaved data distributions for which even networks achieving zero training and validation error may be highly unstable with respect to almost any small perturbation on nearly half of the training or validation data. yet, for the same data samples and distributions, there exist stable networks with the same architecture as the unstable network which also minimise the loss function. strikingly, there exist infinitely many pairs of networks, in which one network is stable and accurate and the other is also accurate but unfortunately unstable, whose weights and biases could be made arbitrarily close to each other. what is even more interesting, all this happens and persists when the values of weights and biases are made small. this result reveals a fundamental issue at the heart of current data-driven approaches to learning driven by minimising empirical risk functions, even in the presence of weight regularisation, in distribution-agnostic settings. the issues is that such learning algorithms could be structurally incapable of distinguishing between stable and unstable solutions.the rest of the paper is organised as follows. in section 2 we introduce notation and problem setting. in section 3 we state our main results along with discussion, interpretation, and comparison to the literature. section 4 concludes the paper. it has been shown inthat (i) there is an uncountably large family of distributions such that for an appropriately large data sample drawn from a distribution from this family there is a feed-forward neural network showing excellent performance on this sample, although (ii) this same network becomes inevitably unstable on some subset of the training and validation sets. our main result, theorem 1 shows that there exist large families of well-behaved data distributions for which even networks achieving zero training and validation error may be highly unstable with respect to almost any small perturbation on nearly half of the training or validation data. yet, for the same data samples and distributions, there exist stable networks with the same architecture as the unstable network which also minimise the loss function. strikingly, there exist infinitely many pairs of networks, in which one network is stable and accurate and the other is also accurate but unfortunately unstable, whose weights and biases could be made arbitrarily close to each other.we will suppose that all data are drawn from some unknown probability distribution belonging to a family f , and each element d ∈ f of this family is supported on n × {0, 1}.moreover, there exist pairs of unstable and robust networks, f λ , fλ and f λ , fλ , satisfying the statements above such that the maximum absolute difference between their weights and biases is either arbitrarily small or arbitrarily large.(iii) however, for the above robust solution f , a) there exists an uncountably large family of distributions dδ ∈ f on which f correctly classifies both the training and test data, yet fails in the same way as stated in (i). b) there exists an uncountably large family of distributions dδ ∈ f such that the map f is robust on t ∪ v (with respect to perturbations.according to statement (i) of theorem 1, not only are instabilities to be expected, but they can also be remarkably widespread: for sufficiently large data sets they may occur, with high probability, for nearly half of all data. the technical point that the statement of theorem 1 holds with probability one is due to the fact that the proof constructs data distributions which assign probability zero to certain sets, so there may exist training samples with probability zero for which the construction does not apply.indeed, consider just the first two layers of the network f constructed in the proof of the theorem, remove the sign(•) activation function, and introduce an arbitrarily small positive factor β (cf.the final and perhaps the most interesting point in relation to the problem of verifiability is statement (iii), which can be related to challenge of the "dark data" -the data which exists but to which we don't have accessor, more generally, the missing data and the data which we don't have.proof of statement (ii) of the theorem the argument used in the proof of statement (i), part 2, implies that there exists a network f ∈ n n n,l such that f (x) takes value 1 when the inequalities. however, since for any α ∈ (0, ε/2) the function f is constant within a ball of radius α/ √ n around any data point x ∈ t ∪ v, we can conclude that f is insusceptible to the instabilities affecting f . to show that there exists a pair of unstable and stable networks, f and f (the network f is stable with respect to perturbations ζ : ζ ≤ α/ √ n), consider systems of inequalities (12), (14) with both sides multiplied by a positive constant κ > 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/394.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/394.txt new file mode 100644 index 0000000000000000000000000000000000000000..51d165de990bc27b27af0518314d7f757bf658ec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/394.txt @@ -0,0 +1 @@ +reinforcement learning (rl) has witnessed remarkable advancements in recent years, fueling innovations across diverse fields such as robotics, finance, aviation, and intelligent transportation systems , , , . while traditional rl methods are focused on maximizing cumulative rewards, real-world applications often demand a more comprehensive approach that considers the inherent risks associated with decision-making. specifically, in scenarios where actions may lead to high-stake consequences or where the environment is intrinsically uncertain, simply aiming for reward maximization without considering risk can lead to suboptimal or even catastrophic outcomes .safety in rl is instrumental to its advancements. prominent techniques include model-based strategies for assessing action safety , , , shielding mechanisms to counter unsafe decisions , , , constrained optimization for policy adherence , , , and formal methods underpinning rigorous safety with mathematical constructs , , . amid this landscape, risk-aware rl stands out. techniques for risk-aware rl range from incorporating financial risk metrics like value-at-risk (var) and conditional value-at-risk (cvar) , , , to embracing distributional rl that models the entire return distribution , , , to formulating risk-sensitive policies that inherently favor safer actions , . this adaptation in strategy ensures that agents are not only aiming for high rewards but are also cautious of rare yet consequential adverse events, striking a balance between reward-seeking and prudence in complex environments. 1 ali baheri is with the department of mechanical engineering at rochester institute of technology. akbeme@rit.edu building on the foundations of risk-sensitive rl, our work proposes a novel perspective by leveraging the powerful mathematical framework of optimal transport (ot). the ot provides tools to measure the distance between probability distributions in a geometrically meaningful way . in the context of rl, this allows us to treat risk as a divergence between the desired (or target) distribution of outcomes and the distribution induced by the agent's policy. by framing risk management as an ot problem, we can inherently consider the entire distribution of returns, capturing both the expected rewards and the associated risks. at its core, our approach aims to minimize the ot distance between the state distribution generated by the policy and a predefined target risk distribution. such a formulation fosters a balanced trade-off between reward maximization and risk mitigation. it accounts for the variability in outcomes, promoting policies that not only achieve high expected rewards but also align closely with the desired risk profile. the contributions of this paper are twofold:• we present a formulation for risk-aware rl, harnessing the capabilities of ot theory. this formulation integrates risk considerations into the rl paradigm, charting a novel direction for risk-sensitive decisionmaking.• we elucidate this framework with a series of theorems that highlight the interplay between risk distributions, value functions, and policy dynamics. these theorems reveal the balance between seeking rewards and navigating risks, emphasizing that the minimization of ot costs can pave the way for the derivation of policies that optimize rewards while maintaining safety. our objective is to find a policy that not only maximizes the expected return but also minimizes the ot cost between the state distribution under the policy and the risk distribution. the ot cost between the state distribution p π and the risk distribution p r is defined as:.where π(p π , p r ) is the set of all joint distributions on s × s with p π and p r as marginals, and c : s × s → r is a cost function that measures the cost of transporting probability mass from state s to state s ′ . the objective is to provide a comprehensive understanding of how risk, as captured by ot metrics, interacts with fundamental concepts in rl: safety of policy (theorem 1): theorem 1 postulates the relationship between the policy that minimizes ot costs and its intrinsic safety. by focusing on states proximate to a target risk distribution, this theorem bridges the gap between policy safety and state distribution, highlighting how an optimal policy in the ot sense also maximizes the expectation of visiting states that align closely with the risk distribution. given an mdp and a risk distribution p r , the policy π that minimizes the ot cost d ot (p π , p r ) is a "safer" policy in the sense that it induces a state distribution closer to the risk distribution., the state distribution p π ′ induced by π ′ is closer to the risk distribution p r than p π , but π minimizes the ot cost d ot (p π , p r ).this formalizes the intuition that if a policy π minimizes the ot cost between the state distribution under the policy and the risk distribution, then the state distribution under the policy must be closer to the risk distribution (in the sense of the ot cost) than any state distribution induced by a different policy.) given an mdp and a risk sensitivity parameter λ, the optimal value function v * that incorporates the ot cost as a part of the objective function, is less than or equal to the optimal value function v * 0 that does not consider the ot cost, i. this theorem could be proved by showing that a higher λ leads to a higher penalty for deviation from the risk distribution in the objective function, thus leading to a policy that induces a state distribution closer to the risk distribution. given an mdp, let p r be a target risk distribution, and let b δ (p r ) = {s : d ot (p s , p r ) ≤ δ} be the set of states that are within ot distance δ of the risk distribution. then a policy π that minimizes d ot (p π , p r ) also maximizes e π , the expected number of visits to states in b δ (p r ), where the expectation is taken over trajectories generated by policy π. now, suppose π * minimizes the ot distance d ot (p π , p r ) to the risk distribution p r . by the properties of the ot distance, we have: contraction property: d ot (p t π , p r ) ≤ d ot (p π , p r ) for all t . now, let b δ (p r ) = s : d ot (p s , p r ) ≤ δ be the set of states that are within ot distance δ of the risk distribution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/395.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/395.txt new file mode 100644 index 0000000000000000000000000000000000000000..665a0c8f58a43103bbab9733512c2f6222883b62 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/395.txt @@ -0,0 +1 @@ +dimensionality reduction (dr) methods map high-dimensional data to a low-dimensional embedding, which enables data visualization. dr methods for visualization have played a critical role to gain insights into high-dimensional data, and the toolkit of dr methods has been rapidly growing in recent years (mcinnes et al. , sainburg et al. , amid and warmuth , wang et al. ). only equiped with a comprehensive understanding of these dr methods, can make informed decision based on data visualization from them, can substantially improve upon them.the state of the art for unsupervised dr relies on the stochastic neighbor embedding (sne) framework (hinton and roweis ), where t-sne (van der maaten and hinton , van der maaten ), umap (mcinnes et al. , sainburg et al. ) are two most popular example methods with impressive visualization performance on real-word data. understanding how these sne methods work thus has drawn a maasive attention (böhm et al. , wang et al. , damrich et al. ). wang et al. exploited a unified insight into the loss functions of t-sne, umap, trimap, and pacmap using graph. damrich et al. furthermore generalized negative sampling in the graph construction, and uncovered the conceptual connection between ne with self-supervised contrastive learning.in this work, built on the discovered connection (damrich et al. ), we propose a unified pytorch framework1 , that breaks down the components of t-sne, umap, trimap, and pacmap, and allows to reimplement these sne methods with self-supervised contrastive setup. furthermore, given the success of self-supervised contrastive learning, or more generally, contrastive learning (van den oord et al. , chen et al. , chen et al. , khosla et al. ), we propose a supervised extension to the contrastive sne methods by leveraging label information, which provides a unified loss function that can be used for either unsupervised or supervised learning.our main contributions are summarized as below:• a unified pytorch framework for (un)supervised (non-)parametric contrastive ne methods • a generalized unified loss function • analytic results...dimensionality reduction (dr) methods map high-dimensional data to a low-dimensional embedding, which enables data visualization. dr methods for visualization have played a critical role to gain insights into high-dimensional data, and the toolkit of dr methods has been rapidly growing in recent years(mcinnes et al. ,sainburg et al.the state of the art for unsupervised dr relies on the stochastic neighbor embedding (sne) framework(hinton and roweis ), where t-sne (van der maaten andhinton , van der maaten ),umap (mcinnes et al. ,sainburg et al. ) are two most popular example methods with impressive visualization performance on real-word data.in this work, built on the discovered connection(damrich et al. ), we propose a unified pytorch framework1, that breaks down the components of t-sne, umap, trimap, and pacmap, and allows to reimplement these sne methods with self-supervised contrastive setup.• a unified pytorch framework for (un)supervised (non-)parametric contrastive ne methods • a generalized unified loss function • analytic results.sne methods usually preserve neighboring information by extracting high-dimensional similarity distribution p over pairs ij of input samples i, so-called anchors, and their corresponding nearest neighbors j, so-called positives, where 0 < i, j ≤ n , and then minimizing the lost function between the high-dimensional similarity distribution p and low-dimensional similarity distribution q θ .the estimation of high-dimensional similarity distribution p differs among the sne methods. t-sne transforms the euclidean distances dist(z i , z j ) or d ij to the similarities sim(x i , x j ) or p ij with a gaussian kernel, while umap transforms with a laplacian kernel. ) showed that t-sne and umap lead barely the same results when using the binary symmetric nearest-neighbor graph in high-dimensional space. therefore, here we transforms the distances dist(z i , z j ) or d ij into low-dimensional similarities sim(z i , z j ) or q θ,ij using a cauchy kernel ϕ(d ij ) = 1/(d 2 ij + 1) or ϕ ij for all sne methods by default.t-sne's lost function measures the kullback-leibler divergence of high-dimensional similarity distribution p from lowdimensional similarity distribution q θ , which actually requires the normalized low-dimension similarities q θ,ij = ϕ ij /z, where z = k̸ =l ϕ kl , the partition function. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/396.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/396.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c4bb418a0cc2098f9bd1f68a0b5cfd23e86177b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/396.txt @@ -0,0 +1 @@ +the significance and proliferation of reinforcement learning (rl) across various disciplines are by now selfevident . rl is an umbrella of algorithms that are rooted in the concept of stochastic approximation , , . at its core, it tries to solve an optimal control problem through maximizing some notion of a cumulative reward. yet, with this promise of rl algorithms, a tough challenge arises when applying learned policies to real-world applications . these policies, trained in lab simulations or controlled environments, may suffer a degradation in performance or exhibit an unsafe behavior . this stems from modeling mismatches and discrepancies between the training environment and real-world conditions.the field of stochastic optimal control (soc) , also known as dual control , yields two key behaviors: state uncertainty o fig. 1. a graphical overview of stochastic optimal control: the controller is not only concerned with regulating the state estimate (mean, mode, ... etc), but also regulating the state uncertainty (or the state estimate quality) via driving the system through high observable regions, for instance, regions of better signal-to-noise ratio (snr) and/or of more/better sensors. the green and pink regions correspond to state uncertainty propagation along two different trajectories. the trajectory in pink, resembling a trajectory under stochastic optimal control, takes into consideration regulating uncertainty, and hence, in its path to the origin, it chooses the path of higher observability. caution and probing . caution refers to the control actions that prevent undesirable outcomes when the system is under uncertainty, while probing involves actions aimed at gathering information about the system's uncertain parameters and states. these concepts play a central role in ensuring safety while enhancing the system's learning capabilities and state observability. in general, soc is computationally prohibitive, except for the simplest cases. relying on dynamic programming to solve such high dimensional problems is hopeless , due to the curse of dimensionality.the potential of both rl and soc is limited by their inherent challenges. here, we utilize each to address the limitation of the other. specifically, the modeling mismatch problems inherent to rl can be alleviated by the caution and probing effects of soc. caution imposes restrictions on the rl agent behavior under uncertainty and modelling mismatch, acting as a safeguard against false perception. probing, moreover, aims to correct the modelling uncertainty and to create an accurate perception of the environment. on the other hand, rl, possibly together with a neural net as a function approximator, can mitigate the computational burden of soc. these hypothesized mutual benefits serve as the motivation for the work we present here.early work in the rl community acknowledged the need for stochastic policies when the agent has limited or distorted access to the states . the randomness intro-arxiv:2309.10831v3 26 feb 2024 duced by stochastic policies diversifies chosen actions and hence achieves "artificial probing" in a sense analogous to persistence of excitation in control theory and system identification . while this method diversifies actions to enhance learning, it may also compromise system safety and stability. in this paper, we propose a learning architecture that addresses rl's vulnerability to modeling uncertainties and aims to alleviate the computational burdens of soc. therefore, we seek a controller which, unlike those with stochastic policies, seeks deliberate probing when information gathering is required, and does so cautiously: respecting safety and performance conditions.where x k ∈ r rx is the state vector, u k ∈ r ru is the control input, y k ∈ r ry is the output signal, and w k ∈ r rx , v k ∈ r ry are exogenous disturbances., a control law that is only dependent upon the data accessible up until the moment of evaluating the control action, or, u k = u k (z k ), where z k = {y 0 , .the disturbance w k ⊂ w ⊂ r rx and the control input u k ∈ u ⊂ r ru belonging to the bounded sets w, u, result in an invariant compact set x, i. moreover, the observation y k is conditionally independent when conditioned on x k ; {v k } k in (1) is also white. under these conditions, the state x k cannot be directly accessed; it can only be inferred through the observation y k , which typically is not equal to x k .1) the bayesian filter: the equivalent stochastic representation of system (1), ∀k ∈ n: the transition kernel x k+1 ∼ p(x k+1 | x k , u k ) and the measurement likelihood y k ∼ p(y k | x k ), can be achieved, similar to, due to the whiteness of w k and v k . to simplify the notation, we denote π k|k = p(x k | z k ) and π k+1|k = p(x k+1 | u k , z k ), and define the mapping. (the separation principle): if the system (1) is linear, the minimizing control law is u k = k k xk|k , where k k is the time-varying lqr gain of the deterministic (state fully observed and noise is zero) version of the problem and xk|k is the conditional mean of the kalman filter.in the nonlinear case however, σ k|k 's evolution depends on the jacobians f k , h k , which in turn depend on u k and xk|k . by the formulation in section ii and the hypothesis above: (i) the transition density p(π k+1 | πk , u k ), induced by the dynamics(10), is twice differentiable in u k , and continuous in π k+1 and π k , (ii) the policy, being a neural net, is twice differentiable with respect to its parameters (for most activation functions), (iii) the reward function r, being quadratic, is differentiable in all of its arguments, (iv) the reward and the transition densities, being continuous, themselves and their jacobians in u k , in πk and u k over the compact set x × u, are bounded.apply u k in (1a) to sample the true x k+1 ; using x k+1 in (1b), sample the true y k+1 ; using πk , u k and y k+1 , evaluate πk+1 using(10); calculate the reward r(π k , u k ); store the tuple.where x k (1) is the first entry of x k , w k and v k obey the assumptions listed under (1), and moreover, w k ∼ n (0, σ w ) and w k ∼ n (0, σ v )1, where σ v = 0. the critic network, approximating the action value function, takes three input values: both of the information state elements as well as the corresponding control action u k , and it outputs q 0 (π k , u k ).we use mini-batch learning, with batches of size 64 of tuples (π k|k , u k , r k , t (π k , u k , y k+1 )), and with learning rate 10 -3 . an lqg control is first applied: u k = k xk|k , where xk|k is provided by the ekf and k is the lqr gain of the deterministic version of the system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/397.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/397.txt new file mode 100644 index 0000000000000000000000000000000000000000..e42d12c8c68e0df163f84c0ee4687303dd153f09 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/397.txt @@ -0,0 +1 @@ +a number of factors, including job market competition and personal preferences, lead to people changing jobs over the course of their careers.however, changing jobs is a difficult decision that may be influenced by a variety of elements, including pay, job description, and location. a successful professional career requires making smooth job changes.the objective of this work is to accurately predict if an applicant will move to a new job or not using supervised machine learning (ml) models , , .to evaluate the model, several different implementations of classification were compared to determine which model suits this type of data best. these were trained on a subset of data originating from available person profiles collected through web scraping.the different steps of our approach are as follows:• data preprocessing (data cleaning): in this step, null values are removed from the training dataset. input values have been changed to specific required data types. categorical variables are encoded to dummy variables. • model building: in this step, several mlas have been performed including random forest (rf), logistic regression (lr), decision tree (dt), and extreme gradient boosting (xgboost). finally, smote is used to improve the performance of mlas. • model evaluating: in this step, models are assessed using decision support metrics such as precision, recall, f1-score, and accuracy.the remainder of this paper is organized as follows. the methodology adopted is detailed in section ii. section iii evaluates the proposed predictive model. conclusions and directions for future work are given in section iv. the first class contains all those employees who want to move to a new job and the second class consists of all those who did not seek a new job. as you need to fit and evaluate your model, you need to code the categorical data and convert all input and output variables to numeric values. for example, rf model struggles to capture information from categorical variables with a large number of categories if they are processed with the one-hot encoding technique.we have used different methods and tricks to manage the categorical variables present in the dataset used, namely:.• one-hot encoding: it consists of coding each categorical variable with different boolean variables (also called dummy variables) which take the values 0 or 1, indicating whether a category is present in an observation. consider a categorical variable x which admits k modalities m 1 , m 2 ,. a vector of size k which has 0s everywhere and a 1 at position i corresponding to modality m i . the categorical variable, therefore, is replaced with k numerical variables. • impact encoding: when the number of categories becomes very large, encoding by dummy variables can become inconvenient. an alternative method to clustering or truncation of categories consists in characterizing the categories by the link they maintain with the target variable y: this is the encoding impact.for a regression problem with target variable y, let x be a categorical variable with k categories m 1 , m 2 ,. for a training set of size n containing samples {(x i , y i )} independent and identically distributed, the estimator of this expectation is the mean of the values of y i for which the modality x i is equal to m k :.where s k is the set of indices i of the observations such that x i is equal to m k and n k the cardinality of this set.the nearmiss-1 method selects the elements of the majority class which have the smallest average distance with respect to the k closest examples of the minority class (k being a build the set a 1 .• lr: lr is a statistical model used to study the relationships between a set of qualitative variables x i and a qualitative variable y . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/398.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/398.txt new file mode 100644 index 0000000000000000000000000000000000000000..f3eb3c342f34d5c39e26ea3a5dbf8e1758800e85 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/398.txt @@ -0,0 +1 @@ +even though many debilitating diseases such as alzheimer's disease (ad) and parkinson's disease (pd) have catastrophic consequences for our human bodies, there is still no significant cure or development in early detection methods for these diseases. this significantly underscores the urgency of understanding the underlying principles behind these diseases. the people who suffer from ad not only have physical symptoms but are also affected emotionally. the inability to think, communicate and perform various tasks daily is often experienced by people suffering from ad . on the other hand, pd impacts one's movements and causes anxiety and depression . prior research has nailed down that several disruptions in the protein folding process can lead to misfolded proteins, which form insoluble long linear or fibrillar aggregates in several body parts. this process is identified to be the primary cause of ad and pd .protein folding kinetics (pfk) becomes vital for studying protein dynamics and behavior. the pfk's magnitude reflects a protein's propensity to undergo these transitions between folded and unfolded states or the association and dissociation of protein complexes . various factors influence pfk, such as ph, temperature, electric and magnetic fields, etc. pfk can be determined using spectroscopy, nuclear magnetic resonance (nmr), and fluorescence. however, these techniques often require specialized equipment and expertise. nmr experiments, for example, demand access to high-field spectrometers and advanced pulse sequences, which are not easily affordable. furthermore, spectroscopic methods, including uv-vis and infrared spectroscopy, have limitations when capturing fast kinetic events due to slower data acquisition rates. overcoming these limitations requires precise optimization of various parameters, development of specialized protocols, and integration of complementary techniques. several advancements in instrumentation, data acquisition, and analysis methods are also intended to address these limitations and improve the sensitivity and precision of protein kinetic studies utilizing spectroscopy, nmr, and fluorescence techniques.pfk predictions have been done prior to this research with machine learning and deep learning models. however, these approaches have encountered various difficulties due to the protein structure's intricate and irregular nature. these techniques have shown great potential in predicting, but their effectiveness is often inhibited by their inability to capture the fundamental nature of the protein structure between various parameters, thus being inaccurate .protein folding kinetics (pfk) becomes vital for studying protein dynamics and behavior. several advancements in instrumentation, data acquisition, and analysis methods are also intended to address these limitations and improve the sensitivity and precision of protein kinetic studies utilizing spectroscopy, nmr, and fluorescence techniques.in the field of computational biology, numerous methods have been developed to predict protein folding kinetics (pfk).protein folding database (pfdb)was considered for protein folding kinetics (pfk) prediction.the data is first passed to the data pre-processing module, which consists of an outlier remover (α), temperature standardization (β), feature extractor (γ), and encoder (δ). as the data consists of various features, α ensures the removal of outliers using the percentile-based outlier detection method, specifically the interquartile range (iqr) approach. temperature standardization is critical when dealing with protein folding data collected at various temperatures. β collects the rate constants of folding and unfolding (ln(k f ) and ln(k u ), respectively) at a reference temperature of 25°c using the eyring-kramers equationwhich facilitates precise assessments of folding kinetics and enabling more robust conclusions in protein folding research.as the number of features directly influences the efficiency of the model, finding the optimal number of features is essential to maximize the performance of the regressor while downsizing the number of feature samples and memory footprint. specifically, the batches d aa , d bb , d ab , and d ba were trained on the prominent features f a best , f b best , f a best ∪ f b best , and f a best ∩ f b best , respectively. the basis behind choosing an optimal feature subset is based on the performance, memory footprint, and inference time while availing the feature subsets. f a 2 feature subset is the most compact and computes in less time comparatively, but, when taking performance into account, other feature subsets deliver better performance. if a feature subset that strikes a balance between model size and performance had to be selected, f a 6 or f b 6 would be the best pick. these models were trained on the most prominent feature set f b 9 , as bonsai is designed to minimize computational resources, taking the best balance model into account would not be an appropriate comparison. the best regressor model (lgbm), in terms of performance, was evidently outperformed by the bonsai model, which consumed only 9% of the memory consumed by lgbm while being 6% more accurate when evaluated using r2 score. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/399.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/399.txt new file mode 100644 index 0000000000000000000000000000000000000000..acce44ac8b07bcfb5eaed705a47938c2e4b68af4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/399.txt @@ -0,0 +1 @@ +federated learning (fl) framework allows multiple clients to collaboratively learn a model with the help of a parameter server (ps), without sharing their local datasets . instead, clients share their local updates with the ps after local training round and the ps broadcasts the aggregated model back to the clients. given the size of modern neural network architectures, this brings a massive amount of communication overhead. hence, one core challenge in fl is the communication cost.privacy is a significant concern when it comes to machine learning (ml) since the solutions depend on data that can reveal sensitive information about its owner. hence, while training ml models, it is vital to prevent any sensitive features from the training set from leaking. privacy is one of the core promises of fl since data never leaves clients and only the local model updates are shared. unfortunately, it has been shown that such updates, and even the final trained model are enough to reveal sensitive information about the training set . hence, privacy protection is still a concern in fl and has been the topic of ongoing research.in this paper, we use differential privacy (dp) to measure the privacy leakage, which has become the gold standard since its introduction. when an algorithm receives two adjacent datasets as inputs, dp measures the degree of similarity between the resulting outputs, and hence, it is a measure of indistinguishability. we give its formal definition as follows.definition 1. a randomized algorithm m satisfies (ε, δ)-dp for ε > 0 and δ ∈ . another application area of our scheme is distributed learning, such as the settings in . in such settings, a massive amount of data belonging to the same entity is used to train a model. so, the training task is distributed across many gpus or servers, called worker terminals owned by the same entity. in order to avoid privacy leakage from the final deployed model, which can be accessed by third parties either via white-box or black-box access, the training procedure must satisfy dp. in such cases, our scheme significantly reduces the communication cost from gpus or the worker terminals to the ps. hence, the trusted aggregator model in this paper is only an abstraction but not a stringent system requirement, and applies to many scenarios encountered in practice.communication-efficient fl has been an active research area . however, when it comes to dp guarantees, directly extending these techniques is suboptimal since compression for communication efficiency and privacy introduce separate errors. works such as consider tackling these two problems jointly. however, they mostly aim at guaranteeing local dp, in which each update from clients is separately protected. unfortunately, such a stringent requirement considerably hurts the final model's performance. moreover, due to the use of specially designed mechanisms to satisfy local dp, the aforementioned methods are not directly extendable to central dp.the main idea of our proposed method is that the randomization required for privacy hurts the accuracy, and hence, it may not be necessary for clients to send full precision updates since they will be already destroyed by the ps to some extent after adding noise. instead, we propose using subtractive dithering quantization on the client updates prior to sending them to the ps. this reduces the communication cost while keeping the same accuracy. we show that if the quantization step size is randomly generated following a particular distribution, with the help of shared randomness between each client and the ps, the noise in the reconstructed update at the ps follows a normal distribution for any third party that does not have access to the common randomness. by employing dithered quantization, we simulate the normal noise addition process to ensure dp and avoid adding noise twice for quantization and dp, while significantly reducing the communication overhead.federated learning (fl) framework allows multiple clients to collaboratively learn a model with the help of a parameter server (ps), without sharing their local datasets. instead, clients share their local updates with the ps after local training round and the ps broadcasts the aggregated model back to the clients. privacy is one of the core promises of fl since data never leaves clients and only the local model updates are shared. hence, we aim to provide central dp guarantees; that is, we want the average of the client updates to satisfy the dp guarantees. this can be the case when the clients give their data to a trusted organization, such as a government agency, an independent regulator, or a research institution while they do not want to reveal their data to other clients via model updates, or to third parties via the final deployed model. in order to avoid privacy leakage from the final deployed model, which can be accessed by third parties either via white-box or black-box access, the training procedure must satisfy dp.the main idea of our proposed method is that the randomization required for privacy hurts the accuracy, and hence, it may not be necessary for clients to send full precision updates since they will be already destroyed by the ps to some extent after adding noise. we show that if the quantization step size is randomly generated following a particular distribution, with the help of shared randomness between each client and the ps, the noise in the reconstructed update at the ps follows a normal distribution for any third party that does not have access to the common randomness. hence, one of our goals is to protect the privacy of each client's local dataset from other clients since the updated model across rounds may reveal important sensitive information. hence, we also aim to protect privacy leakage from the final deployed model, which makes our model and solution relevant even when the clients are trusted, as in distributed learning.to transmit the quantized gradients to the ps, client i encodes each element (∇ℓ t i ) j using b i,j log 2 2 • c ∆i,j + 1 bits since |(∇ℓ t i ) j | ≤ c, resulting in j∈ b i,j bits of communication from client i to the ps in round t.using the common randomness shared between client i and the ps, the same realizations of v i,j 's and u i,j 's generated by client i can also be obtained by the ps.receive mi,j and decode as q (∇ℓ t i )j + ui,j estimate ( ∇ℓ t i )j = q (∇ℓ t i )j + ui,j -uj end for end for average gradients gt = 1 n i∈ (∇ℓ t i )j update the model wt+1 = wt -ηgt broadcast wt+1 to all clients end for since every client divides the sum of its sample gradients by b, and the ps also averages them over n clients, the effect of the gradient of a single sample is at most c/(bn ). fortunately, we observe that the communication cost scales logarithmically with the number of clients; and hence, even with a very large numbers of clients, our scheme still uses significantly less communication.through both theoretical analysis and experimental demonstrations, we have shown that using subtractive dithering quantization in the trusted aggregator model of fl can produce the same level of dp and accuracy guarantees as gaussian noise addition, while utilizing fewer communication resources. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/4.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/4.txt new file mode 100644 index 0000000000000000000000000000000000000000..04be40b7fd0ecde220c41c30961878d5a932c5eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/4.txt @@ -0,0 +1 @@ +in this paper we study the fundamental problem of learning an optimal policy in a markov decision processes (mdp), which is the most fundamental model for reinforcement learning (rl). in this model, there is a finite set of states, s, for each of which, there is a set of actions a. an agent is able to interact with the mdp by playing actions and then the environment will transition to a new state based on some fixed but unknown probability and at the same time return a scalar reward to the agent. for different variants of the mdp, the agent aims to maximize different objectives. for instance, in the discounted setting (dmdp), the agent aims to maximize the expected cumulative rewards, e , where r t is the reward collected at time t and γ ∈ , puterman , sutton and barto ).in the learning setting, the transition probability is unknown and hence an agent needs to interact with the mdp to collect data and solve the mdp. in order to characterize the learning sample complexity, we study the problem with the fundamental generative model kearns and singh , in which the agent is able to query each state-action pair for arbitrary number of samples (of the next state and the reward). then the goal is to 1 output a good stationary and deterministic policy π : s → a1 such that the value ρ π := limt=0 r t is approximately maximized, where r t is the reward collected at time t by following the actions of π.recently, there is a line of works kearns and singh , kakade , gheshlaghi azar et al. , jin et al. , sidford et al. , sidford et al. , wainwright , tu and recht , agarwal et al. , li et al. , settling the learning complexity of dmdp with generative model. the state-of-theart sample upper bound is provided in li et al. : o(|s||a|(1 -γ) -3 ε -2 ) 2 for an ε-optimal policy. a matching lower bound is provided in gheshlaghi azar et al. . however, for amdp, despite a number of recent advances wang , jin and sidford , jin and sidford , similar characterizations have not been achieved yet. the tightest upper bound so far is o(t mix ε -3 ) by jin and sidford , where t mix is the worst-case mixing time for a policy (i.e., the mixing time of the markov chain induced by playing a policy). they also showed a minimax lower bound ω(t mix ε -2 ). yet, whether the characterization using the worst-case mixing time is stringent remains illusive. one may simply question: why the worst policy can affect the complexity of learning the best policy?in the online setting (no generative model), however, auer et al. shows that the learning complexity (regret) is depending on a notion called the diameter, which measures the best hitting time from one state to another. such a diameter can be small even if there are very bad policies in the mdp. recently, the regret bound was additionally improved in zhang and ji by substituting the diameter d in the upper bound into another even smaller parameter h, the span 3 of bias of a gain-optimal policy. nevertheless, as these online algorithms only produce non-stationary policies4 , they cannot be applied in our setting. to best of our knowledge, we are not aware of any algorithms having a sample complexity only depending on the diameter d or the bias h of the amdp. a list of recent results are presented in table 1.in this paper, we revisit the learning question in amdp with a generative model and achieve nearly tight complexity characterizations . in particular, we show that if an amdp is weakly communicating and an upper bound of h, the span of bias of any optimal policy, is known, there is an algorithm that learns an ε-optimal policy with o(|s||a|hε -3 ) samples. we complement our upper bound by a nearly matching lower bound ω(|s||a|hε -2 ). our result improves the result in jin and sidford , where the bound depends on t mix , since t mix is always larger than h (see appendix 5) and can be considerably larger in certain mdps. such a diameter can be small even if there are very bad policies in the mdp. in particular, we show that if an amdp is weakly communicating and an upper bound of h, the span of bias of any optimal policy, is known, there is an algorithm that learns an ε-optimal policy with o(|s||a|hε -3 ) samples. proves a minimax lower bound of n = ω(|s||a|(1 -γ) -3 ε -2 ) to promise to find an ε-optimal policy.wang first gives an upper bound of n = o(τ 2 |s||a|t 2 mix ε -2 ), where τ is an upper bound of the ergodicity of all invariant distribution of all stationary policies, by a primal-dual method to the linear programming version of amdp problems. recently,jin and sidford further improves the upper bound to n = o(|s||a|t mix ε -3 ) by reducing amdps to dmdps, and also proves a lower bound of n = ω(|s||a|t mix ε -2 ), showing a tight dependence on t mix .when it comes to the online setting, where we pursue a low total regret instead of an ε-optimal stationary policy,auer et al. first gives an online algorithm with t -step regret upperbounded by ∆(t ) = o(d|s| |a|t ) where d is the diameter of the mdp, and also proves a lower bound ∆(t ) = ω( |s||a|dt ). to reduce from amdps to dmdps, we only need to bound the rhs in lemma 6 for two special policies: π = π * , the optimal policy in the amdp; and π = π, a near-optimal policy in the dmdp. .markov decision process a markov decision process (mdp) is a tuple m = (s, a, p, r), where s is a finite set of states, a is a finite set of actions, p : s × a → r s is the transition probability to any state when taking any action at any state, and r : s × a → is the reward function.average reward mdp an average reward mdp (amdp) is a mdp running infinite steps but with an average gain function instead of a discounted value function.li et al.algorithm 1 reducing to dmdp and solving with algorithm 1 inli et al.the intuition is that, since m 1 and m k,l disagree with the (unique) ε-optimal action at state x k , any (ε, δ)correct algorithm must distinguish between m 1 and m k,l , whose only difference is the transition probability at (s k , a l ).in this work, we establish an o(|s||a|hε -3 ln 1 δ ) sample complexity upper bound of the ε-optimal policy learning problem in amdps, and an ω(|s||a|dε -2 ln 1 δ ) lower bound (with a corallary of ω(|s||a|hε -2 ln 1 δ ) lower bound), matching in all parameters of (|s|, |a|, h, ln 1 δ ) up to some logarithmic factors. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/40.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/40.txt new file mode 100644 index 0000000000000000000000000000000000000000..f063401c84cf403059e78ece541692cda2de77eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/40.txt @@ -0,0 +1 @@ +in the field of constraint programming, hard to solve instances are highly sought after because they present an opportunity of improvement to solvers, by testing and challenging them. valuable insights can be gained by running solvers on difficult instances, whether these benchmarks come from industrial applications or were specifically crafted to this aim . various competitions provide regular opportunities for studying strngths and weaknesses of the latest constraint solvers. competitions for satisfaction problems have been done for two decades , while competitions for optimization problems have existed for almost as long . in contrast, the search for challenging model counting instances is a more recent endeavor, with the first edition of the model counting competition only taking place in 2020 . because model counting has a wide range of applications, especially in the field of probabilistic reasoning , studying difficult model counting instances is of great interest.it is preferable for benchmark instances to exhibit certain properties in order to return useful feedback. first, generating them should be considerably faster than solving them. if it takes as long to build them than to solve them, then the number of available instances, and therefore the number of opportunities the solvers have to test themselves, will be limited. second, the instances should be as diverse as possible, covering a wide array of sizes and structures. this is to ensure that the feedback obtained by running solvers on the instances is not restricted to a specific subset of the problem. lastly, to better capture the challenging areas of model counting, it is desirable that the hardness of difficult instances comes from actually having to count the models, and not from the question of satisfiability. if a constraint instance has no solution and its proof of unsatisfiability is hard to establish for satisfaction solvers, then it represents a useful tool to improve satisfaction solvers, but is not as helpful for model counters because it does not address the distinct particularities of the model counting problem.the instances produced by our generator fulfill all three above conditions. they are fast to build, in linear time in the number of variables times the number of literals. their parameters (arity, number of variables, number of clauses) can be set to any value, even the arity of each individual clause can be adjusted independently of the others. finally, while it is difficult to count their number of models, finding a single solution is easy. this means that whatever challenge the instances present is indeed related to the model counting problem.not all constraint instances are equally hard to solve, even when they belong to the same problem and contain the same number of variables. for random conjunctive normal form instances, increasing the number of clauses while fixing the arity and the number of variables exposes the existence of two distinct phases, one where most instances are satisfiable, and the other where on the contrary most instances are unsatisfiable. the transition between the two phases is sharp and corresponds to a peak in the difficulty of determining whether an instance has a solution , with the hardness of the problem decreasing as the number of clauses deviates from the transition position.basing the definition of the phase transition on satisfiability probability can of course only be done for problems where not all instances have a solution. however, other definitions have been adopted, revealing that the phase transition behavior occurs even for problems where all instances are satisfiable . for random model counting instances, phase transitions that depend on the probability for the number of solutions to be above a given threshold have been observed . while the particular numbers picked by the authors in their experiments did not allow them to notice any peak of difficulty associated to these phase transitions, we will empirically show that these peaks do in fact exist for some other threshold values, at least for ternary instances.the outline of our paper is as follows. in the next section, we recall the definitions related to the general satisfiability problem and describe our generation algorithm in details. in section 3, we compare the performance of our instances against other benchmarks in the most recent model counting competition, highlighting how our generator produced the smallest challenging instances of the benchmark pool. in section 4, we present the results of two sets of experiments. first, we show that when increasing the number of clauses, there emerges a peak in the difficulty of model counting instances, corresponding for ternary instances to a phase transition associated with a specific number of solutions threshold. second, we show that we can use this data to reliably predict where in the constrainedness map will difficult model counting instances appear, without having to generate and solve them beforehand. finally, we conclude in section 5. for random conjunctive normal form instances, increasing the number of clauses while fixing the arity and the number of variables exposes the existence of two distinct phases, one where most instances are satisfiable, and the other where on the contrary most instances are unsatisfiable. in section 3, we compare the performance of our instances against other benchmarks in the most recent model counting competition, highlighting how our generator produced the smallest challenging instances of the benchmark pool. first, we show that when increasing the number of clauses, there emerges a peak in the difficulty of model counting instances, corresponding for ternary instances to a phase transition associated with a specific number of solutions threshold. we had noticed in earlier tests that instances built upon balanced instances seemed to be slightly harder, so we generated slightly smaller instances of the second type, with values of 80 and 90 for n. in addition of reporting the total number of instances solved for each model counter, we also distinguish the results between the instances that we submitted and instances submitted by others, as well as between small and large instances.while none of our instances manages to beat all the solvers, it is clear from the plots that our benchmarks are the most challenging instances of their size, especially when size is defined in terms of number of clauses. none of our thirteen instances in the track was solved by more than two model counters, while all sixteen other small instances were solved by at least six of the seven model counters.• random: completely random k-cnf instances with n variables, where the set of k variables for each clause with k literals is picked uniformly at random among the n k possibilities, and the polarity of each literal is determined by an independent coin toss.random instances were chosen to have a control dataset with instances that we expected to be easy, or at least not as challenging as our ran-dom+cluster instances. random+solution instances are not as structured as random+cluster instances, but unlike completely random instances they cannot contain a small subset that is trivially unsatisfiable. this would seem to indicate that the peak of difficulty for counting the number of models of 3-cnf instances is indeed associated with some phase transition that is independent of the model counter used.an interesting observation that can be made from figure3(as well as from figures4and9), is that while our own random+cluster instances are more difficult to solve than random and random+solution instances, the latter two give nearly identical runtimes, no matter which model counter is used.6 , then there is a sharp transition between a phase where all instances have more solutions than this threshold, and a phase where none of the instances does, and that this transition corresponds exactly to where the hardest instances are. generating instances for many constrainedness values, solving them all and keeping the most challenging ones would take too long to be feasible, so instead we use our results to immediately select the number of clauses m most likely to yield hard instances. we also empirically proved that for arity k = 3 the difficulty of both our instances and random instances peaks at a precise location in the constrainedness map. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/400.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/400.txt new file mode 100644 index 0000000000000000000000000000000000000000..95f9ac595128e6aac81235b01841091f9db50aae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/400.txt @@ -0,0 +1 @@ +in recent years, deep neural networks (dnns) have achieved remarkable success in perception tasks such as vision, speech, and language . however, heterogeneous tabular data in industrial scenarios still poses significant challenges, making it the "last unconquered castle" for dnns . in tabular modeling tasks, shallow decision tree ensembles like random forests and gradient boosting trees remain the preferred tools for data mining specialists . however, these methods lack the ability to learn deep representations, which limits their potential to benefit from larger models and more training data .drawing on the core principles of deep learning-layerby-layer processing, intra-model feature transformations, and sufficient capacity-zhou et al. introduced deep forest (df), a tree-based deep learning approach . deep forest constructs a cascading structure by training random forests layer by layer. in classification tasks, each layer's forests generate class probability distributions, which are then merged with original features for the subsequent layer. upon reaching the maximum layer depth, optimal layer selected by crossvalidation is used to produce final output. when sufficiently deep, deep forest aims to achieve robust and powerful feature representations through this iterative refinement .however, due to its supervised greedy multi-layer architecture, deep forest is prone to overfitting , and such issue in one lay can propagate to subsequent layers . while increased depth should theoretically enhance representational power , it frequently leads to severe overfitting in practice, undermining generalizability . moreover, the similar learning objectives across layers diminish the benefits of building deeper models.overfitting in deep forest stems from its intrinsic learning procedure, which is difficult to suppress through hyperparameter tuning . reducing the model size, on the other hand, would compromise its design principle. data augmentation offers an alternative for regularization without limiting model capacity . however, tabular data augmentation remains underexplored due to the absence of invariances in heterogeneous features . furthermore, uniform augmentation intensity across layers result in similar feature representations and introduce high variance, complicating selection of the optimal layer based on validation error.in this work, we propose to improve deep forest with learnable data augmentation policy schedules. firstly, we introduce a simple yet potent data augmentation technique called cmt (cut mix for tabular data) to regularize deep forest. furthermore, a population-based algorithm is proposed to globally search for augmentation policy schedules, enabling layer-specific adjustments in augmentation intensity. finally, we utilize outputs from intermediate layers to construct a checkpoint ensemble, serving as a variance reducer to ensure more stable and robust results.our contributions are summarized as follows: • our method sets sota benchmarks in tabular classification and enables policy transfer to df variants.drawing on the core principles of deep learning-layerby-layer processing, intra-model feature transformations, and sufficient capacity-zhou et al. introduced deep forest (df), a tree-based deep learning approach. furthermore, uniform augmentation intensity across layers result in similar feature representations and introduce high variance, complicating selection of the optimal layer based on validation error.in this work, we propose to improve deep forest with learnable data augmentation policy schedules. firstly, we introduce a simple yet potent data augmentation technique called cmt (cut mix for tabular data) to regularize deep forest. furthermore, a population-based algorithm is proposed to globally search for augmentation policy schedules, enabling layer-specific adjustments in augmentation intensity. we thus reformulate the problem to identify an optimal sequence of policy combinations, establishing a layerwise policy schedule for deep forest.neighbour search: given a selected policy θ i,j = (p i , m j ) from the set p × m , a neighboring policy θ x,y = (p x , m y ) is chosen with probability of 1 |i-x|+|j-y|+1 . we initialize the best policy θ 0 for the first layer with grid search and acquire 2k -1 neighboring policies via neighbour search.input : list of policies p, list of models m, number of models (policies) l, max layer k output: train each model in m corresponding to p, evaluate the accuracy of each model, and sort m and p by accuracy in descending order; result: m,p evaluations. in augdf, we harness the hierarchical diversity introduced by the augmentation policy schedule to construct a checkpoint ensemble, as illustrated in figure1, which incurs neither additional training cost nor significant inference overhead. interestingly, even the extensive automl ensemble, auto-gluon, fails to match augdf's performance, underscoring the efficacy of augdf's deep representation and the benefits derived from its learnable data augmentation policy schedule. as illustrated in figure3, the policy schedules are universally beneficial, with all df variants exhibiting positive improvements across all datasets, albeit less so than the gains seen with augdf over df due to augdf's exhaustive policy search. through the integration of cmt data augmentation technique, population-based augmentation policy schedule learning and checkpoint ensemble, we successfully mitigate overfitting and elevate model performance. notably, the learned augmentation policy schedules are not only effective but also transferable, allowing them to be applied to variants of deep forest. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/401.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/401.txt new file mode 100644 index 0000000000000000000000000000000000000000..297f73a3654f3012fbd8beeddeacad739d6d4a9e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/401.txt @@ -0,0 +1 @@ +deep neural networks in the world of machine learning are widely used to obtain good results for use cases in industry, research and various fields. however, for highly complex cases we have the problem of needing a large number of parameters, with very large layers of neurons, which can be seen as having to apply very large matrices. in the literature it has been extensively studied to reduce the number of parameters in various ways, such as decomposing matrices into tensor networks or directly training the tensor network itself (fig. 1).alejandro mata ali: a.mata@ibermatica.com iñigo perez delgado: i.perez.delgado@ibermatica.com marina ristol roura: m.ristol@ibermatica.com aitor moreno fdez. de leceta: ai.moreno@ibermatica.com our focus of analysis will be on methods where a tensor network is generated to model the layer tensor and trained directly, rather than using a full matrix. for example, when we try to use tensorized physics informed neural networks to solve differential equations for big industrial cases, as the heat equation of an engine or fluids in a turbine. in this case the initialization problem is often encountered, which we will see in the next section. if we initialize the elements of each tensor with a certain distribution, when we contract the tensor network to obtain the tensor it represents, some of its elements are too large (infinite) or too small (null) for the computer.we want to eliminate precisely these problems. a first proposal could be to contract the tensor network and eliminate these elements. however, in certain very large layers we cannot store all the tensor elements in memory, so we need another way.one way is to re-initialize the tensor network by changing a distribution with better hyperparameters, changing the mean and standard deviation. nevertheless, many of these methodologies are not easy to apply or are not efficient at all.our method consists of iteratively calculating the frobenius norm for different sections of the tensor network until a condition is met, when we divide all the parameters of the tensor network by the calculated factor in a particular way. this allows us to gradually make the frobenius norm of the layer tend to the number we want, without having to repeatedly re-initialize.this method is remarkably interesting for hierarchical tree form layers, especially in tensor train (tt), tensor train matrix (tt-m) and projected entangled pair states (peps). this can also be used in other methods with tensor networks, such as combinatorial optimization, to determine hyperparameters, and it can be combined with other initialization methods.com our focus of analysis will be on methods where a tensor network is generated to model the layer tensor and trained directly, rather than using a full matrix. if we initialize the elements of each tensor with a certain distribution, when we contract the tensor network to obtain the tensor it represents, some of its elements are too large (infinite) or too small (null) for the computer.our method consists of iteratively calculating the frobenius norm for different sections of the tensor network until a condition is met, when we divide all the parameters of the tensor network by the calculated factor in a particular way.when we have a tensor network of n nodes, we will have that the elements of the tensor representing the tensor network are given by the sum of a set of values, each given by the product of n elements of the different nodes.a, the shape of the elements of the layer is given as we see that for 5 indices in the tensor we have to multiply 5 tensor elements, but in the general case with n indices we have. for a general case with bond dimension b, the dimension of the index that is contracted between each pair of nodes, n nodes and a constant elements of the nodes we would have.to avoid that the elements of the layer are too big or too small, and therefore we have too big or too small outputs in the initialization, we will normalize these elements so that the norm of the tensor is a number that we choose, for example 1.p ||a|| n,n , the partial square norm at n nodes of a tensor network a with n nodes, will be defined as the norm of the tensor network a n defined by the first n nodes of a.as we can see, in this case we would only have to do the same process as when calculating the total norm of the total tensor network, but stopping at step n and contracting the bond index of the two final tensors of the chain.if we have a tensor network a, representing a n a × m a matrix whose frobenius norm ||a|| f is infinite, zero or outside a certain range of values, we will want to normalize the elements of our tensor network so that the norm ||b|| f of the new tensor network b is equal to a certain number. to normalize the norm of the a tensor with n nodes, we will only have to divide the elements of each of its nodes by ||a|| 1/n f . if the total norm is infinite (zero), there will exist a partial square norm of n nodes whose value is finite and non-zero such that the partial square norm of n + 1 nodes is infinite (zero). this is because each step we add a new node to the partial square norm, we multiply by a new value, so infinity (zero) will appear after a certain number of nodes, being the partial square norm with one node less a valid number to divide by.we want a tensor network b with frobenius norm f , with n nodes and we set as tolerance range (a, b). however, this energy could be recovered if we perform the method, we save the scale factor by which we are multiplying the elements of the tensor network, and we multiply the values of the resulting tensor network by this factor. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/402.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/402.txt new file mode 100644 index 0000000000000000000000000000000000000000..139a14b85c37863abb5f3898764c0e2cb8253c16 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/402.txt @@ -0,0 +1 @@ +each layer of a neural network (nn) can be represented as a matrix of weights acting on a vector of input data or activation functions. specifically, the input to each layer is a vector of activation values from the previous layer, which is multiplied by a weight matrix to produce a new vector of weighted inputs. at each neuron, the weighted inputs are then passed through an activation function to produce the output of the layer.consider a feedforward nn of depth l, with l th layer width nl ≤ l. let w l ij be the weight matrix connecting layer l-1 to layer l (absorb all bias terms bl into additional rows of the weight matrix w l ij).let x be the input space, y be the target space. let d = {(xi, yi)} be a representative training dataset with xi ∈ x, yi ∈ y. for some input vector a ∈ r n , the propagation of this input through the nn is given for activation functions φ : r → r by y l i(a) = ∑ nl-1 w l ij φ(y l-1 j(a)) (1) as is common to the nn literature , we assume: 1) x is a compact metric space with distance function dx.2) activation functions φ(z) are continuous and bounded on their domain (e.g. relu, tanh).3) the loss function l(w) is continuous and differentiable in w.4) the nn trained on d converges stably: ||w* -w|| ≤ m*||∇l(w*)|| for some m*>0. let w l ij be the weight matrix connecting layer l-1 to layer l (absorb all bias terms bl into additional rows of the weight matrix w l ij). for some input vector a ∈ r n , the propagation of this input through the nn is given for activation functions φ : r → r by y l i(a) = ∑ nl-1 w l ij φ(y l-1 j(a))(1)as is common to the nn literature, we assume: 1) x is a compact metric space with distance function dx.proof: from eqn 1 and theorem 1, the nn l th layer's activation function output vector φ(y l-1 ) multiplied by the weighted adjacency matrix w l ij encodes part of the function f mapping the i-dimensional input manifold to the o-dimensional output manifold f : r i → r o .according to the svd theorem, every m × n matrix can be factorized into m = u × s × v t (2) where u is an m × m orthogonal matrix, s is an m × n diagonal matrix with non-negative real numbers on the diagonal, and v is an n × n orthogonal matrix. approximating an f': r i → r o , where u and v are orthogonal matrices of dimensions o × o and i × i respectively, s' is an n' × n' diagonal matrix with the largest n' singular values of w l ij in descending order, odist and idist are o × n' and n' × i rectangular matrices. in contrast, we can interpret s' in the truncated svd of s in m=u×s×v t as a linear approximation of the latent space within the mapping f: r i → r n' → r o represented by the w l ij matrix. such a linear approximation to compress the r i × r o manifold representing f into a lower n'-dimensional subspace or latent space manifold, is possible according to the manifold learning hypothesis. every data point {(xi,yi)} need only be represented by their coordinates, and thus a fully-connected product graph can be constructed between the input x graph and output y graph, and between the p data points in r i × r o space. however, if there is a different topology between x, y and the manifold, then every data point {(xi,yi)} in the original r i × r o space is actually not equally connected in the latent space, and we can construct a locally-connected graph between the p data points (weighted by unequal adjacency) as a discretization of the original xy manifold.where d is the degree matrix, a is the adjacency matrix in the case of the fully-connected graph, one can prove that solving the eigendecomposition of l and taking the largest n' eigenvectors of l is equivalent to solving for the truncated svd of xy to get s'.similarly, in the case of the locally-connected graph, one can prove that the l matrix is equivalent to a discretization of the laplace-beltrami operator on the (i + o)-dimensional manifold defined by xy, such that l's eigen-decomposition allows us to obtain a n'-dimensional latent space manifold to define s'. instead, they can be theoretically calculated by: idist = c(l × lin -1 ) odist = c'(lout × l -1 )(6)where l is the graph laplacian matrix for the latent manifold, lin and lout are the laplacian matrices for the input graph x and output graph y respectively, l -1 is the pseudo-inverse matrix of l (and where the c, c' matrices are merely used to reshape the graph laplacian matrices for n', i, o dimensions via hypergraph spectral clustering, another eigen-decomposition), such that if we denote the i/odist matrices as a mapping function g: metrica → metricb, it satisfies metricb = integral g(derivative of metrica) = integral g(la) integral(lb) = integral g(la) lb = g(la); so g = lb × la -1(7)note: in truncated svd, g is an identity matrix (eqn 4), implying lb and la are equivalent, and their corresponding graphs are equivalent in topology. then we can interpret v t as the first isometric transformation on x, idist = c*l*lin -1 as a metric transformation from the input manifold to the latent manifold (via reversed diffusion then re-diffusion of the signal on their respective graphs) to produce x', s' as the latent mapping function/manifold that maps x' to an approximated y' in latent space, odist = c'*lout*l -1 as another graph-based metric transformation from the latent manifold to the output manifold, and u as the second isometric transformation.computationally, lmd obtains the n'-dimensional latent space s' by computing the diagonal matrix of the r i × r o data space with svd and learning the eigen-decomposible graph laplacian matrices of the input, latent, and output manifolds.in our lmd factorization, we provide a more mathematically grounded formulation of the transition weight matrix of every nn, where the similarity function is analogous to the idist metric transformation matrix based on the graph laplacians, instead of using a trial-and-error approach with different metrics, from dot product to manhattan distance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/403.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/403.txt new file mode 100644 index 0000000000000000000000000000000000000000..68eed87878be8f64d44b95ba4e2452639183e630 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/403.txt @@ -0,0 +1 @@ +federated learning is distributed learning framework where data are used locally (i.e., local devices or local institutions) instead of sharing or storing data through centralized server . existing studies in federated learning have shown that learning good global model from local models is possible without sharing or storing the entire data. therefore, federated learning can effectively address privacy concerns related to training deep learning models on large datasets, which gained attention to federated learning.one of major challenges in federated learning is heterogeneity among local clients. showed that simply averaging local models often fail to obtain good global model with heterogeneous clients. numerous studies have been conducted to address the challenge of various types of heterogeneity in federated learning. for example, fedbn used batch normalization to mitigate heterogeneity of label distribution and fedprox used l 2 regularization to control deviance of heterogeneous local models from global model.the recent success of machine learning in various domains is largely attributed to the use of sequential data containing temporal information. for example, large language models heavily rely on the training on the massive natural language data and disease prediction models require a large number of electronic health records. while sequential data contain innate non-iidness due to varying-length, there have been lack of studies that seek to explore using sequential data. in this paper, we discussed the effect of temporal heterogeneity in sequential data on federated learning. we made the following contributions:• we observed that averaging local models where each local model was trained with the same length of sequences quickly converges than local models with varying-length sequences. • we proposed approaches to mitigate temporal heterogeneity due to varying-length sequences based on the observation. existing studies in federated learning have shown that learning good global model from local models is possible without sharing or storing the entire data. therefore, federated learning can effectively address privacy concerns related to training deep learning models on large datasets, which gained attention to federated learning. for example, fedbnused batch normalization to mitigate heterogeneity of label distribution and fedproxused l 2 regularization to control deviance of heterogeneous local models from global model.• we observed that averaging local models where each local model was trained with the same length of sequences quickly converges than local models with varying-length sequences.our aim is to train a global model f * from local models f i under temporal heterogeneity. local training epochs were set to 1 and communication to the global model was conducted after the training of all local models (i. this dataset contains both temporal heterogeneity within clients (varying-length of sequences in a sigle client) and between clients (different distribution of sequence lengths). we refer this dataset as varying-length dataset (vl dataset).the second dataset also has the five subsets showing the same label distribution and the number of images, but all images in the same subset were re-sized into one of . while temporal heterogeneity exists both inside and across clients in the first dataset, temporal heterogeneity only exists across clients in the second dataset. we refer the second dataset as fixed-length dataset (fl dataset).figure1shows test accuracy and average training loss per communication round on vl dataset and fl dataset. we observed that fedavg on fl dataset quickly converges than vl dataset. figure2shows l 2 -distance between each local model and global model in fedavg on the two datasets. while fedavg on vl dataset and fl dataset both shows convergence to the global model, fedavg on fl dataset shows different behavior from fedavg on vl dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/404.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/404.txt new file mode 100644 index 0000000000000000000000000000000000000000..2b0fc662ae145edf9ba3d6cd5ac718f050f77a49 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/404.txt @@ -0,0 +1 @@ +deep learning has established itself as a foundational technique across various applications, primarily due to its capability to learn complex patterns and relationships. one of the crucial aspects influencing the efficacy of deep learning models is the initialization of their weights. proper weight initialization can lead to faster model convergence and enhanced performance . while the reliance on large datasets and extensive computational resources is vital for determining feature quality and model versatility, correct initialization can offset some of the dependencies on these resources. this offset is especially crucial in domains with limited data and computational capabilities, underlining the importance of leveraging deep learning's potential without a heavy reliance on large datasets and extensive resources. to cater to such scenarios, fins offer an intuitive approach where neural networks are initialized to imitate specific statistical proper-ties. by doing so, fins provide a more informed starting point, making neural networks less opaque and offering a hint of interpretability in what is often dubbed a "black box." the beauty of fins lies in their simplicity, allowing researchers to directly incorporate domain-specific knowledge into the model's architecture, fostering both efficacy and understandability. moreover, when applied to eeg data for fatigue and drowsiness detection, a fin based on shannon's entropy consistently outperformed baselines, while certain models like vgg proved ineffective. in our study, we train a fin to imitate tsallis entropy. tsallis entropy, a non-extensive generalization of the traditional shannon entropy, measures the uncertainty or randomness of a system. the influence of q on tsallis entropy the distinguishing characteristic of tsallis entropy is its reliance on the parameter q. the shannon entropy becomes a special case of tsallis entropy when q = 1. in the context of our work, τ is initialized with a default value of 1, but like q, it's also trainable within our fin, allowing the network to adjust it adaptively during the learning phase.training to approximate the tsallis entropy using neural networks, we generated synthetic signals with uniform random values between 0 and 1. the output regression values for the fin were the tsallis entropy values, which were computed directly on the synthetic signals using the defined closed-form expression in equation 1. the model that showcased the best performance on the validation set was subsequently chosen for comparison against the tsallis entropy fin-powered networks.we hypothesize that we can achieve enhanced predictive accuracy over traditional baselines by initializing certain neural network weights to imitate tsallis entropy, followed by finetuning during training. unlike the previous experiment, where the input data was fed directly into the fin, here, we utilize a latent representation of the data-a condensed, yet informative, representation derived from previous layers of a deep neural network. our hypothesis posits that by feeding this latent representation through the fin, specifically designed to imitate the tsallis entropy, and further fine-tuning it during training, we can achieve superior recognition performance. crucially, after obtaining the 32-unit latent representation from the penultimate layer, the fin is integrated to compute the tsallis entropy of this representation. we hypothesize that embedding a neural network with the fin, specifically designed to imitate the tsallis entropy, will improve cnp detection performance compared to traditional models.in our experiments, integrating a feature imitating network (fin) designed to imitate tsallis entropy consistently enhanced predictive model performances across diverse domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/405.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/405.txt new file mode 100644 index 0000000000000000000000000000000000000000..8bd1aad6f2610779c47990887890d4228884163b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/405.txt @@ -0,0 +1 @@ +large scale vision models over the years have shown impressive results in various computer vision tasks . however, the use of such models in practice is often limited by available computational resources. knowledge distillation is a popular compression technique that can be used to improve both memory and computational time efficiency of large neural networks or neural network ensembles. kd transfers the knowledge from a cumbersome teacher model to a more lightweight student model by training the student to match the teacher behavior on the available data instead of predicting the true labels.domain generalization (dg) focuses on another challenge that often arises in machine learning applications: domain shift. how well do our models perform when training and test data distributions differ? an example can be a vision model encountering a weather condition or a time of day unseen during training. in dg benchmarks, models are trained on a number of source domains and their performance is evaluated on a different target domain that is unavailable during training to emulate the domain shift setting.the focus of this paper lies at the intersection of these two fields. how well does the teacher's ability to generalize to unseen domains is transferred to the student during kd? this question naturally arises in the situation when we have a large model that performs well under domain shift and want to compress it into a smaller model, preserving the ability to generalize to unseen domains. thus the scenario of our interest is when both the teacher training and the distillation are done on source domains, while the performance metric is calculated on the target domain. despite the large amount of existing research on kd and dg, this setting has received little attention in the existing literature.in this work, we apply weight averaging techniques to improve knowledge distillation performance under distribution shift. averaging model weights was proposed to improve neural network training in swa , where weights of multiple models from a single training trajectory are averaged to construct a better generalizing model (not to be confused with ensembling, where predictions of multiple models are averaged). a number of swa modifications were created and successfully applied to domain generalization, such as swad and sma , showing that weight averaging improves out-of-distribution generalization as well. we apply swad and sma to knowledge distillation, showing that they improve kd performance across two domain generalization datasets (pacs and office-home ) and two teacher-student neural network architectures (resnet and vit). in addition, we introduce a simplistic weight averaging strategy by modifying sma to include all the network weights up to the end of the training trajectory and show that it performs on par with its counterparts. the advantage of this strategy over swad and sma is that it does not need to calculate the validation set performance of the student to choose the averaging segment, thus reducing the computational cost of the whole procedure. we coin such knowledge distillation approach weight-averaged knowledge distillation (wakd), constructing a simple yet powerful baseline for future research of knowledge distillation under domain shift. our source code is available at https://github.com/vorobeevich/ distillation-in-dg. kd transfers the knowledge from a cumbersome teacher model to a more lightweight student model by training the student to match the teacher behavior on the available data instead of predicting the true labels. in dg benchmarks, models are trained on a number of source domains and their performance is evaluated on a different target domain that is unavailable during training to emulate the domain shift setting. how well does the teacher's ability to generalize to unseen domains is transferred to the student during kd? this question naturally arises in the situation when we have a large model that performs well under domain shift and want to compress it into a smaller model, preserving the ability to generalize to unseen domains. averaging model weights was proposed to improve neural network training in swa, where weights of multiple models from a single training trajectory are averaged to construct a better generalizing model (not to be confused with ensembling, where predictions of multiple models are averaged). we apply swad and sma to knowledge distillation, showing that they improve kd performance across two domain generalization datasets (pacsand office-home) and two teacher-student neural network architectures (resnet and vit). we coin such knowledge distillation approach weight-averaged knowledge distillation (wakd), constructing a simple yet powerful baseline for future research of knowledge distillation under domain shift.it is important to note that the standard dg setting forbids any use of target domain data during both training and model selection, meaning that both training and validation data come from source domains.weight averaging approaches follow the idea of picking a subset of models from a training trajectory instead of a single model and averaging their weights to create a final better generalizing model.swad evaluates the current model during training with some frequency, picking a segment of training iterations for weight averaging based on validation loss values of the evaluated models.in the next section, we perform knowledge distillation with swad and sma, showing that they produce students with better target domain accuracy in comparison to simply picking an individual model with the best validation performance from a training trajectory.we train teacher models using swad for 5,000 iterations (the same amount as in swad paper), as well as baselines for student models, where a model of student architecture is trained to predict hard labels independently of the teacher using swad. to allow fair comparison, all averaging strategies are compared on the same training trajectories, meaning that for each seed (and for each choice of target domain) both teacher training and distillation are done only once. the distillation without weight averaging (erm) outperforms the baseline of independently trained student networks across both datasets and both architectures, indicating that the teacher's ability to generalize outof-domain is transferred to the student to some extent and kd can perform well under domain shift. when compared to the other two strategies, wakd shows similar performance to distillation with swad and slightly outperforms distillation with sma, while being simpler and lacking the need to compute validation performance during training. we showed that weight averaging techniques from the domain generalization literature, namely swad and sma, can improve the performance of distillation students on unseen domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/406.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/406.txt new file mode 100644 index 0000000000000000000000000000000000000000..cd208c62f8d66bd3feefbaea09983ad8474a47c2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/406.txt @@ -0,0 +1 @@ +the transformer architecture was introduced in the landmark 2017 paper attention is all you need (vaswani et al., 2023) and traditionally consists of alternating attention and multilayer-perceptron (mlp) sublayers. although initially used for machine translation, transformers have been used across a wide range of tasks, including language modeling (radford et al., 2018;devlin et al., 2019;liu et al., 2018), computer vision (khan et al., 2022;cornia et al., 2020), and image generation (parmar et al., 2018). the widespread deployment of transformers has led to increasing interest in mechanistic interpretability (wang et al., 2022;conmy et al., 2023), which seeks to convert the computations of transformers into human-understandable explanations. some interpretability efforts, such as elhage et al. (2021), focused on attention-only transformers, finding that mlp layers were harder to interpret.this work seeks to supplement those mechanistic interpretability methods by showing that mlp layers in transformers are equivalent to a sum of masked attention heads and therefore can be subjected to interpretability techniques that work on attention-only transformers. in theorem 3 we show that by including a "bias token" akin to the persistent memory vectors in sukhbaatar et al. (2019) and using a slightly unusual attention-masking pattern, an mlp layer of size ℓ can be written as the sum of ℓ attention heads with internal dimension 1. we show in theorem 6 that one can apply this process throughout the entire transformer, converting the typical mlp-and-attention transformer into an attention-only transformer. we then show in theorems 7 and 8 that attention heads can implement row-wise linear transformations and matrix-level activation functions separately. finally, we show in theorem 9 that a slightly augmented network is capable of approximating any masking pattern to within arbitrary error. in theorem 3 we show that by including a "bias token" akin to the persistent memory vectors insukhbaatar et al. for a real-valued function f and matrix x, we will write f (x) for the entry-wise application of that function to the matrix. additionally, softmax can easily play the role of the sigmoid part of silu since softmax() = rownorm() = . let f (x) = α(xv 1 )v 2 be an mlp on m n,d with no biases and one hidden layer of size ℓ, and suppose α is a generalized silu function α(x) = a 1 silu(a 2 x). note the use of layer normalization(ba et al.to prove that there is a transformer t ′ that satisfies x ′ j = x j ⊕ on every sublayer, we proceed by induction.then, in the j = 2 sublayer, the construction in 3 would fail for lack of this bias term, as, without it, the pre-attention matrix (x ′ )w qk (x ′ ) t is 0.theorem 3 shows that attention heads can implement an mlp layer, but can they separately implement the components of an mlp, a linear transformation and an activation function? in this section we show that the answer is yes. this follows immediately from applying theorem 3 to the mlp f (x) = α(xi n )i n = α(x), whose hidden layer is of size ℓ = d. thus, to get the transformation x → α(x), one can combine these two theorems, using d + 1 attention heads to produce sublayer(x) = α(x) -x, in which case x → x + sublayer(x) = α(x). then for any mask matrix λ 2 satisfying λ 1 ≤ λ 2 entrywise, there is a family of masked attention heads h ω , parameterized by ω ∈ r, that use λ 2 as their mask matrix and such that h.our first task is to show that the attention pattern a 1 := msoftmax(xw qk x t + ωλ 1 , λ 2 ) converges to the corresponding attention pattern a 2 := msoftmax(xw qk x t , λ 1 ) entrywise as ω → ∞. first is the quantity of attention heads: we use one attention head per dimension of the hidden layer, which can easily increase the number of attention heads by several orders of magnitude, partially offset by the new attention heads having smaller internal dimension. for example, each layer of gpt-3 has 96 attention heads with internal dimension 128(brown et al. however, this is itself a useful new perspective on the difficulty of interpreting mlp layers: mlp layers in a model like gpt-3 are larger than attention layers by a 2:1 margin if one measures by number of parameters but by 500:1 if one measures by number of attention heads. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/407.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/407.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc751e7f87528f8944846631ba1d935d24b2a911 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/407.txt @@ -0,0 +1 @@ +federated learning (fl) is a widely studied distributed machine learning paradigm that enables participants to collaboratively learn a shared model f without collecting data x and output y from local clients . however, fl faces the challenge of heterogeneity in real-world scenarios, such as cross-domain caused by cross-region, cross-industry, and other factors (for different clients, data label distribution of y is the same, but the input features of data x are different), and different data collection processes lead to quantity skew. additionally, varying technical levels or computing capabilities result in the use of different model architectures . in this work, we focus on addressing the cross-domain problem and further attempt to explore solutions for quantity and model architecture heterogeneity.personalized fl (pfl) offers multiple paradigms for handling heterogeneity, such as parameter decoupling and knowledge distillation (kd) , which are often employed in situations where model parameters need flexible updates. some works focus on parameter decoupling by separating model feature extractors and task heads , channels , and adapters . for instance, fe-drod establishes a dual-head architecture on the client to learn generic representations from different domains and simultaneously improves the global generic and personalized performance of the model by introducing regularization terms. however, many existing works negatively impact the training of cross-domain scenarios, as shown in table 1. we find that sharing task header parameters through partialavg yields better performance, as seen in table 2. the kd-based fl method aims to transfer knowledge, such as model parameters , generative models , data embedding , or prototypes , to guide parameter updating. fedgen improves the model's generalization across various domains by sharing generators and generating latent space data on each client. however, our experiments reveal that setting inappropriate generators or sharing unsuitable model parameters can adversely affect model performance. fedpac performs refined classifier updates among all clients by sharing global feature representation prototypes and addresses cross-domain issues through feature alignment. however, computing prototypes incurs high computational costs and may expose statistical information of local datasets. kd also typically requires additional datasets , which complicates dataset preparation and is not conducive to real-world scenarios. in contrast, our approach only requires sharing task header parameters and performing kd on each client's local dataset.we propose a novel algorithm named unideal, standing for "curriculum knowledge distillation federated learning" by investigating cross-domain scenario. building upon the concept of parameter decoupling to address cross-domain inputs and enable further model heterogeneity, our key insight is that such training tasks are particularly challenging at the beginning of the training process. to overcome this difficulty, we employ curriculum learning (cl) based kd loss, which encourages clients and the server to find the right training direction through mutual evaluation. by aligning different domains from easy to hard, our approach achieves better convergence and effectively tackles the challenges of cross-domain scenarios. results in sec.3.2 show that in the heterogeneous scenario of cross-domain, unideal achieves the best results in terms of accuracy, communication overhead and running time compared with other sota (state-of-the-art) baselines. at the same time, it achieves a convergence rate of o( however, fl faces the challenge of heterogeneityin real-world scenarios, such as cross-domain caused by cross-region, cross-industry, and other factors (for different clients, data label distribution of y is the same, but the input features of data x are different), and different data collection processes lead to quantity skew.personalized fl (pfl)offers multiple paradigms for handling heterogeneity, such as parameter decoupling and knowledge distillation (kd), which are often employed in situations where model parameters need flexible updates. the kd-based fl method aims to transfer knowledge, such as model parameters, generative models, data embedding, or prototypes, to guide parameter updating. however, our experiments reveal that setting inappropriate generators or sharing unsuitable model parameters can adversely affect model performance.here, u k represents the model parameters excluding the header, and v k denotes the parameters of the head. this greatly reduces communication overheads and privacy leak risks.in addition to addressing data heterogeneity, unideal can also handle fl challenges with heterogeneous model architectures a k , as it only requires transmission of header parameters during communication process. in homogeneous settings, we use the same mlp model, while in heterogeneous model architecture settings, we generate mlp models randomly with different layers and hidden unit numbers for each client. (a) fedavgis a pioneering fl method; (b) fedrep, fed-babu, and fedrodfocus on parameter decoupling; (c) fedproto, fedkd, fedpcl, fedpac, and fed-genemploy knowledge distillation with model parameters or prototypes. partialkd uses global model head parameters to guide local head parameter updates through knowledge distillation. as observed from table2: (1) compared with local training, fedrep, which only transmits the model feature extractor parameters, results in a greater performance drop than fe-davg, which transmits all model parameters. (2) partialkd, which uses global header parameters to guide model parameter updates only through knowledge distillation, performs better than the other two partialavg algorithms that directly replace local header parameters at each round. this demonstrates the effectiveness of the adjustable teacher-student mutual evaluation curriculum learning approach in balancing both model performance and communication efficiency, making it a superior solution for cross-domain scenarios. the final model results show that in heterogeneous model architecture fl scenarios, unideal-hete can indeed achieve performance improvements consistent with local(hete), demonstrating the effectiveness of carefully designed models in enhancing fl outcomes. notice that for any approximate solution w t+1 k satisfies l(w t+1 k ; w t ) ≤ l(w t ; w t ), then l(w t+1 k ; w t ) ≤ l(w t+1 k ; w t ) ≤ l(w t ; w t ) = l(w t ; w t ) which implies that a solution optimizing l also satisfied l. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/408.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/408.txt new file mode 100644 index 0000000000000000000000000000000000000000..470088c2c8652f8e3c63fb86bde928ebc39536b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/408.txt @@ -0,0 +1 @@ +regression is sensitive to dataset shift , where the training and testing sets come from different distributions. dataset shift can arise due to sample selection bias, where a sample is non-uniformly chosen from a population for training a model. this type of bias can cause a subset of training samples to be partially observed, where any of the covariates or outcome of a sample is missing, or completely unobserved. consequently, the performance of the model after training on this biased set will be degraded when the model is deployed. most approaches such as , , and handle the missing-atrandom (mar) setting, where the selection of training samples is assumed to be independent from the outcome given the covariates. however, these approaches cannot properly account for the missing-not-at-random (mnar) setting, where the selection of training samples is assumed to not be independent from the outcome given the covariates.in this work, we focus on the problem of mnar sample selection bias on the outcome. as a motivating example, consider the relationship between sat score (feature) and the amount of scholarship offered by a certain university (outcome), where some students have missing values of scholarship. there could be some hidden mechanism behind the missing outcomes. for instance, amounts of scholarship offered to students who have not declared their majors are not collected. when the undeclared students are omitted from the training, a biased model is produced and could be very different from the ground truth model that would have been trained had scholarship amounts of all students been collected. however, under mnar sample selection on the outcome, we leverage the observed feature information of records with missing outcomes to train a model that is close to the ground truth model.the heckman selection model1 is a nobel prize winning approach that addresses mnar sample selection bias on the outcome. the method involves two equations: the prediction equation and the selection equation. the prediction equation describes the relationship between the covariates and the outcome of interest. the selection equation specifies the probability that a sample is selected in the training data. estimation of the selection equation requires an exclusion restriction, where the selection equation includes one or more variables that are covariates. to handle mnar sample selection bias on the outcome, the heckman model considers the respective noise terms of the prediction and selection equations, which follow a bivariate normal distribution.although the presence of an exclusion restriction avoids multicollinearity for the prediction model , the process to identify a valid exclusion restriction is often difficult in practice. this is first due to the lack of clear theoretical knowledge on which selection features should be included in the prediction model . moreover, using the heckman selection model with an invalid exclusion restriction can lead to non-robust regression on the biased training set . choosing from features that affect the selection, one way to address these challenges is to search through all combinations of selection features to find a suitable set of prediction features for the heckman selection model. however, this search process becomes computationally expensive as we deal with a large number of selection features in real-world data. choosing from features that affect the selection, one way to address these challenges is to search through all combinations of selection features to find a suitable set of prediction features for the heckman selection model.in this work, we present the heckman selection model with feature assignment (heckman-fa) as a framework that finds a suitable assignment of prediction features for the heckman model to robustly handle mnar sample selection bias on the outcome. second, using the parameters of the trained assignment function, heckman-fa extracts prediction features for the heckman model based on goodness-of-fit and the correlation between the noise terms of the prediction and selection equations. the heckman selection model assumes that the selection features consist of every prediction feature and additional features that do not affect the outcome. the framework then uses this assignment of prediction features to run the heckman model and compute the mean absolute error (mae) of predictions on d s when using the heckman selection model. using the selection and extracted prediction features, we run the heckman model to fit a robust prediction model under mnar sample selection bias on the outcome.1 kth selection feature is assigned 0 kth selection feature is not assigned(12)in general, an assignment function determines which of the k selection features are also prediction features. following the steps of, a sample z •k , the kth column of z, is drawn from a categorical distribution with class probabilities π 1k and π 2k , where π 2k (π 1k ) is the probability that the kth selection feature is (not) assigned to the prediction model.i , s i = 0)} n i=m+1 , selection feature matrix x (s) , number of selection features k, estimated parameters π, correlation threshold number of gumbel-softmax samples b output: prediction feature matrix x (p) 1: r 2 * a ← -∞ 2: for b iterations do 3:.(17).0082 lower than naive on the crime and compas datasets, respectively.in this paper, we introduced heckman-fa, a novel datadriven approach that obtains an assignment of prediction features for the heckman selection model to robustly handle mnar sample selection bias. heckman-fa finds prediction features for the heckman model by drawing a number of gumbel-softmax samples using the learned probability of assignment for each selection feature. in other words, when ranking the selection features based on their correlation with the outcome using the crime dataset, there is no set of j features from the ranking that ensures the robustness of heckman-c under mnar sample selection bias. this indicates that when assigning prediction features for the heckman model, ranking selection features based on π2k after training ψ is more effective than ranking based on the correlation of features with the outcome. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/409.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/409.txt new file mode 100644 index 0000000000000000000000000000000000000000..312f1e44c38a6df2f2173cb1731c803771bbe781 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/409.txt @@ -0,0 +1 @@ +the problem of energy efficiency and sustainability of machine learning (ml) systems is becoming increasingly important within the scientific community , as also highlighted by the onu's sustainable development goals (e.g., goal 9 or goal 12) . generally, the energy consumption of ml models is directly related to the training phase time complexity. this means that the longer it takes to train a model, the more energy is required by the system. for this reason, predicting a priori the training time of an ml model will be a significant advance in such direction, enabling the automatic selection of the efficient ml model. the training time prediction of ml models also becomes highly relevant in the context of mlops and, in general, continuous learning or learning-enabled systems, where the ml model is constantly re-trained with new data . as highlighted in , engineering such kind of system is always very challenging since the development processes are often ad-hoc and specific to the use case. for this reason, having an a priori estimation of the training time can help in standardizing some phases of the development process in contexts where, for instance, the computational power for training the model is very limited (e.g.,, iot devices ). in addition, selecting the most efficient ml model can help stakeholders satisfy other relevant quality properties of software architectures, like performance .in this paper, we present the work we are conducting towards a prediction of ml training time. in particular, we present an extensive empirical evaluation of the full parameter time complexity (fptc) approach proposed by zheng et al. in , which is, to the best of our knowledge, the only approach so far that formulates the ml training time as a function of dataset's and ml model's parameters. specifically, differently from what has been done in , where the authors use only one dataset, we use the fptc approach to predict the training time of a logistic regression and random forest classifier on a heterogeneous set of data, and we compare the predicted time with the actual training time of the method, highlighting the main strengths and weaknesses of the approach 1 .the paper is structured as follows: in section 2 we discuss some related works in the context of training time prediction; section 3 describes in detail the fptc approach; section 4 presents the conducted experiment and the research questions we want to answer; section 5 shows the experiment's results and discuss them w.r.t. the research questions; finally section 6 presents some future works and concludes the paper. in particular, we present an extensive empirical evaluation of the full parameter time complexity (fptc) approach proposed by zheng et al. in, which is, to the best of our knowledge, the only approach so far that formulates the ml training time as a function of dataset's and ml model's parameters.in this section, we describe in detail the fptc methodwhere the training time of several ml models is defined as a function of different parameters of the dataset, of the model itself, and of a coefficient (ω) that reflects the influence given by the execution environment on the actual training time of the model.where n is the number of rows of the dataset, v is the number of dataset's features, m is the number of classes of the dataset, q is the number of model's iterations during the training phase, and ω logreg is the slope of a regression function computed comparing the results of the first part of the equation 1 with the actual training time of a logistic regression model using a subset of the training datasets. ω rf is again defined as the slope of a regression function computed comparing the results of the first part of the equation 2 with the actual training time of a random forest classifier on a set of synthetic datasets. after computing the training times and the fptc predictions for each sub-dataset d ′′ s , the training times and the fptc predictions are used to train a linear regression model and to get its slope ω.to answer the rq2, we conducted a set of experiments to predict, using the fptc method, the training time of a logistic regression and a random forest classifier using 7 heterogeneous datasets.from this analysis of the slope variations, we can conclude how, differently from what is stated in, the slopes do not change only when the execution environment changes, but they are also related to the number of features of the dataset used to compute them, in particular when using a random forest classifier.figures2and3report the errors in the predictions of the fptc method compared to the actual training time of the logistic regression and random forest classifier, respectively, for all the datasets described in section 4. concerning the logistic regression classifier, it can be seen from figure2how the fptc method can predict the training time of the model under some datasets while it fails in the prediction of others. in particular, the fptc method can predict the training time of the logreg under the antivirus dataset (with an rmse and mape almost equal to 0 using the slope computed with 9,009 features of the synthetic dataset), arcene (with an rmse and mape almost equal to 0 using the slope computed with 6,006 features), compas (with an rmse and mape almost equal to 0 using the slope computed with 4,004 features), and dexter (with an rmse and mape almost equal to 0 using the slope computed with 501 features). from this table, it can be seen how the fptc method tends to underestimate the real training time, especially in adult (with a delta of almost 2 seconds between the actual training time and the predicted one), and aps (with a delta of almost 50 seconds between the actual training time and the predicted one). in addition, it can be seen from the charts that the fptc method can always predict real training time under a specific slope value achieving a value of zero for both rmse and mape. the only dataset on which the fptc method is not able to correctly predict the training time is the aps dataset, with the lowest mape of around 15 points.from this analysis, we can conclude how the fptc method is able to predict the training time of a logistic regression and random forest classifier under certain circumstances (i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/41.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/41.txt new file mode 100644 index 0000000000000000000000000000000000000000..c707805e021caadddc855efe2d75d6a52e18d5b7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/41.txt @@ -0,0 +1 @@ +submodularity is an important property that models a diminishing return phenomenon, i.e., the marginal value of adding an element to a set decreases as the set expands. it has been extensively studied in the literature, mainly accompanied with maximization or minimization problems of set functions (nemhauser and wolsey 1978;khuller, moss, and naor 1999). this is called submodularity. mathematically, a set function f : 2 v → r is submodular if for any two sets a ⊆ b ⊆ v and an element v ∈ v \b, we have. such property finds a wide range of applications in machine learning, combinatorial optimization, economics, and so on. (krause 2005;lin and bilmes 2011;li et al. 2022;shi et al. 2021;kirchhoff and bilmes 2014;gabillon et al. 2013;kempe, kleinberg, and tardos 2003;wang et al. 2021).further equipped with monotonicity, i.e., f (a) ≤ f (b) for any a ⊆ b ⊆ v , a submodular set function can be maximized by the cardinality constrained classic greedy algorithm, achieving an approximation ratio up to 1 -1/e (nemhauser and wolsey 1978) (almost the best). since then, the study of submodular functions has been extended by a variety of different scenarios, such as non-monotone scenario, adaptive scenario, and continuous scenario, etc (feige, mirrokni, and vondrak 2007;golovin and krause 2011;das and kempe 2011;bach 2019;shi et al. 2019).the above works focus on the set functions. in real applications, the order of adding elements plays an important role and affects the function value significantly. recently, the submodularity has been generalized to sequence functions (zhang et al. 2015;tschiatschek, singla, and krause 2017;streeter and golovin 2008;zhang et al. 2013). considering sequences instead of sets causes an exponential increase in the size of the search space, while allowing for much more expressive models.in this paper, we consider that the elements are networked by a directed graph. the edges encode the additional value when the connected elements are selected in a particular order. such setting is not given a specific name before. to distinguish from the classic submodularity, we in this paper name it as networked submodularity (net-submodularity for short). more specifically, the net-submodular function f (σ) is a sequence function, which is not submodular on the induced element set by σ but is submodular on the induced edge set by σ. the net-submodularity is first considered in (tschiatschek, singla, and krause 2017), which mainly focuses on the case where the underlying graph is a directed acyclic graph. general graphs and hypergraphs are considered in (mitrovic et al. 2018).recently, robust versions of the submodular maximization problem have arisen (orlin, schulz, and udwani 2018;mitrovic et al. 2017;bogunovic et al. 2017;sallam et al. 2020) to meet the increasing demand in the stability of the system. the robustness of the model mainly concerns with its ability in handling the malfunctions or adversarial attacks, i.e., the removal of a subset of elements in the selected set or sequence. sample cases of elements removal in real world scenarios include items sold out or stop production in recommendation (mitrovic et al. 2018), web failure of user logout in link prediction (mitrovic et al. 2019) and equipment malfunction in sensor allocation or activation (zhang et al. 2015). in this paper, we take one step further and study a new problem of robust sequence networked submodular maximization (rosenets). we show an example in figure 1 to illustrate the importance of rosenets problem. see in figure 1. suppose all edge weights in sequence a are 0.9, in sequence b are 0.5, in sequence c are 0.4. let the net-submodular utility function f of the sequence be the summation of all the weights of the induced edge set by the sequence. such utility function is obviously monotone but not submodular.1 however, it is submodular on the edge set. now we can see, the utility of sequence a, b and c are 2.7 (largest), 2.5 and 2.4 respectively. we can easily check that if one node would be removed in each sequence, the worst utility after removal of sequence a, b and c is 0.9, 1.0 (largest), and 0.8. if we remove two nodes in each sequence, the utility of a, b and c becomes 0, 0, and 0.4 (largest). with different number of nodes removed, the three sequences show different robustness. existing non-robust algorithm may select sequence a since it has the largest utility. however, sequence b and c are more robust against node removal.given a net-submodular function and the corresponding network, the rosenets problem aims to select a sequence of elements with cardinality constraints, such that the value of the sequence function is maximized when a certain number of the selected elements may be removed. as far as sequence functions and net-submodularity are concerned, the design and analysis of robust algorithms are faced with novel technical difficulties. the impact of removing an element from a sequence depends both on its position in the sequence and in the network. this makes the existing robust algorithms inapplicable here. it is unclear what conditions are sufficient for designing efficient robust algorithm with provable approximation ratios for rosenets problem. we aim to take a step for answering this question in this paper. our contributions are summarized as follows.1. to the best of our knowledge, this is the first work that considers the rosenets problem. combining robust optimization and sequence net-submodular maximization requires subtle yet critical theoretical efforts. 2. we design a robust greedy algorithm that is robust against the removal of an arbitrary subset of the selected sequence. the theoretical approximation ratio depends both on the number of the removed elements and the network topology. 3. we conduct experiments on real applications of recommendation and link prediction. the experimental results demonstrate the effectiveness and robustness of the proposed algorithm, against existing sequence submodulargiven a net-submodular function and the corresponding network, the rosenets problem aims to select a sequence of elements with cardinality constraints, such that the value of the sequence function is maximized when a certain number of the selected elements may be removed.given a directed graph g = (v, e), a non-negative monotone submodular set function h : 2 e → r ≥0 , and a parameter k, the objective is to select a non-repeating sequence σ of k unique elements that maximizes the objective function:. given a directed graph g = (v, e), a networked submodular function f (•) and robustness parameter τ , the rosenets problem aims at finding a sequence σ such that it is robust against the worst possible removal of τ nodes:. let the edge weights of (a, b), (b, c), (b, e), (b, f ) be 0. let the net-submodular utility function f of the selected sequence be the summation of all weights of the induced edge set by the sequence.step 2 (the second while loop), we select another sequence σ 2 of kτ elements from v \σ 1 , again in a greedy manner as in sequence greedy. when k = 5 and τ = 2, the rosenets algorithm will select the sequence σ 1 = a, b and sequence σ 2 = c, d, g . for convience, we denote f (v|σ) and f (σ ′ |σ) as the marginal gain of attending v and σ ′ to sequence σ respectively.we compare the performance of our algorithms rosenets to the non-robust version sequence greedy (sequence for short)(mitrovic et al. suppose that the sequence selected by the sequence greedy algorithm is σ with |σ| = k and that there exists a sequence. the above inequalities hold due to the fact that the function f (•) is monotone, sequence greedy algorithm select the edge of largest marginal value each time, and adding σ * (v, k, 0) to σ 1 ⊕ σ i-1 2 may add at most d in k edges.note the sequence σ * (v, k, τ ) -z does not contain any element in z and has kτ ′ elements. by the monotonicity of function f (•) and the above inequality, we have f (σ -z τ (σ)) = f ((σ 1 -z 1 τ (σ)) ⊕ (σ 2 -z 2 τ (σ)) ≥ f (σ 1 -z 1 τ (σ)) = f (σ τ2 1 ) ≥ τ 2 max{h(e in z ), h(e out z )} ≥ qf (σ 2 ) f (σ -z τ (σ)) = f ((σ 1 -z 1 τ (σ)) ⊕ (σ 2 -z 2 τ (σ)) ≥ f (σ 2 -z 2 τ (σ)) ≥ (1 -(d in + d out )q)f (σ 2 ) then we have inequality 1 as below.(k-τ 2 -1)d in -1 e k-τ -τ 2 -1 d in (k-τ 2 -1) -q 1-e -(1-1/k) τ 2 α and ℓ 2 (q) = (1 -(d in + d out )q) e k-τ -τ 2 -1. note when q = 1 β , ℓ 1 (q) = ℓ 2 (q) = β(e k-τ -τ 2 -1 (k-τ 2 -1)d in -1) e k-τ -τ 2 -1 d in (k-τ 2 -1) -β 1-e -(1-1/k) τ 2 α. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/410.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/410.txt new file mode 100644 index 0000000000000000000000000000000000000000..c023b47118f81c1ba43dd8b71bcd098ed2bc3e4b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/410.txt @@ -0,0 +1 @@ +data mining has been widely adopted for analysis and knowledge discovery in databases. this process involves data management, data preprocessing, modeling, and results in inferences and extracting latent data patterns . early warning systems based on data mining have enabled applications to perform a risk analysis, monitoring and warning, and a response capability . using existing knowledge or a set of indicators can assist domains such as healthcare and thanveer shaik, xiaohui tao are with the school of mathematics, physics & computing, university of southern queensland, toowoomba, queensland, australia (e-mail: thanveer.shaik@usq.edu.au, xiaohui.tao@usq.edu.au).lin li is with the school of computer and artificial intelligence, wuhan university of technology, china (e-mail: cathylilin@whut.edu.cn)haoran xie is with the department of computing and decision sciences, lingnan university, tuen mun, hong kong (e-mail: hrxie@ln.edu.hk) u r acharya is with school of mathematics, physics & computing, university of southern queensland, toowoomba, queensland, australia (email: rajendra.acharya@usq.edu.au).raj gururajan is with school of business, university of southern queensland, springfield, queensland, australia (e-mail: raj.gururajan@usq.edu.au).xujuan zhou is with school of business, university of southern queensland, springfield, queensland, australia (e-mail: xujuan.zhou@usq.edu.au).education to design decision support systems. radanliev et al. study used data mining to investigate scientific research response to the covid-19 pandemic and to review key findings on how early warning systems developed in previous epidemics responded to contain the virus.traditional unsupervised learning techniques discover underlying patterns for knowledge discovery in unlabelled data using association rule mining and clustering techniques . supervised learning strategies learn from labeled data to classify or predict patients' physical activities and vital signs . however, these methodologies are highly dependent on data and can only observe the data and present possible decisions in response, they cannot take actions based on observations. reinforcement learning (rl) deploys a learning agent in an uncertain, complex environment that explores or exploits the environment with its actions and learns the data based on its experience , . this allows the learning agent to gain rewards based on learning and its actions. rl is used in dynamic domains such as stockmarket trading , , traffic prediction , , and weather forecast to tackle decision-making problems using agentenvironment interaction samples and potentially delayed feedback that could also be applied to healthcare applications. in the healthcare domain, chronically diseased such as parkinson's disease and critical care patients often require long-term dynamic treatment regimes with the timely intervention of clinicians to avoid unwanted outcomes . zeng et al. proposed an rl algorithm to optimize postoperation warfarin anticoagulation dosage. the rl results outperformed conventional clinical practice using rule-based algorithms. existing patient monitoring applications based on rl primarily focus on prescribing the timing and dosage of medications so that patients are administered take the right medication at the right time . chen et al. described probabilistic machine learning models such as rl using the analogy of an icu clinician (learning agent) to monitor a patient (environment) via actions like ventilation and observing the changes in the environment (patient's state) to make subsequent decisions that achieve the goal of discharging the patient successfully.the research problem addressed here is that of being able to monitor the predicted state of an environment and take appropriate actions to avoid an emergency. traditional supervised learning strategies can classify or predict based on their training but cannot monitor and alert the appropriate team for timely interventions. to assist with tracking the environment state and monitor certain parameters we have designed a virtual generic forecasting environment with observation space, actions, and rewards policy with multiple deep learning agents. deploying a single learning agent to monitor all the parameters would complicate the environment as there are different thresholds set up for each of the parameters in an environment. for example, the learning agents learn different threshold levels of each vital sign in modified early warning scores (mews) based on previous iterations and rewards being accumulated for its actions. well-trained rl agents are capable of monitoring a patient's vital signs such as heart rate, respiration rate, and temperature, and alerting the corresponding clinical team if the vital signs fall outside any of the predefined thresholds .modeling forecasting applications, such as vital signs prediction, traffic prediction, and weather prediction, as an rl environment can enable rl agents to monitor tasks. rl agents can learn from historical data and interact with the environment to make real-time decisions based on the predicted states or actions. this approach can be used to develop intelligent monitoring systems that can adapt to changing conditions, optimize actions, and make informed decisions in complex and dynamic environments. by using rl for monitoring, it is possible to automate and optimize monitoring processes in various domains, leading to more efficient and effective monitoring outcomes. in this study, the rl environment is configured with a deep learning model to predict future states, which are then monitored by an rl agent.the aim of this research is to create a multi-agent framework that utilizes deep reinforcement learning (drl) agents to monitor and learn data patterns for various parameters. each parameter will have its own drl agent, responsible for monitoring, learning, and alerting respective teams if the parameters deviate from predefined thresholds as shown in fig. 1. conventional rl methodology is an agent performing a task for a transition from one state to another state, where this action might reward the agent either positively or negatively. in this study, a novel approach is taken to assign rewards so that the rl agents learn data patterns. an agent gets rewarded for predicting an action and performing the action in its current state. the rewards are designed in such a way that the learning agents are penalized for predicting the wrong actions. to learn behaviors we follow the reward-is-enough hypothesis being that the learning agent always tries to maximize the rewards based on their previous actions. the contributions of this study are as follows:• a generic monitoring environment accommodates multiple agents to monitor the states of a forecasting environment.• proposed a model-free gaming agent to learn the existing knowledge and monitor underlying data patterns adaptively. • transfer learning approach for time series forecasting applications such as patients' health status, traffic, and weather using the multi-agents in the pdrl environment. the paper is organized as follows: section ii presents the related works in the rl community to learning human behavior patterns and application in the healthcare domain. research problem formulation and the proposed multi-agent pdrl framework have been detailed in section iii. in section iv, the proposed methodology is evaluated on 10 different subject vital signs, and baseline models are discussed. in section v, the results of the proposed approach are compared with baseline models, and hyper-parameter optimization of the learning rate and discount factor are discussed. the comparison between the supervised approach and the rl approach is discussed in section vii. section viii concludes the paper with limitations and future work. reinforcement learning (rl) deploys a learning agent in an uncertain, complex environment that explores or exploits the environment with its actions and learns the data based on its experience,.described probabilistic machine learning models such as rl using the analogy of an icu clinician (learning agent) to monitor a patient (environment) via actions like ventilation and observing the changes in the environment (patient's state) to make subsequent decisions that achieve the goal of discharging the patient successfully. to assist with tracking the environment state and monitor certain parameters we have designed a virtual generic forecasting environment with observation space, actions, and rewards policy with multiple deep learning agents.modeling forecasting applications, such as vital signs prediction, traffic prediction, and weather prediction, as an rl environment can enable rl agents to monitor tasks.to formulate this problem, a customized rl forecasting environment needs to be configured with an innovative reward policy that links the current state and agent actions to learn data patterns while maximizing their rewards.2: pdrl monitoring framework a t ϵa denoting the action taken by the agent at time t, p is a markovian transition function as shown in equation1, which denotes how the agent transits from state s to state s ′ while performing an action a, r is a reward function, which returns an immediate reward r(s, a) for the action a taken in a state s defined in equation2, γ is a discount factor that focuses on immediate rewards instead of future rewards.in this section, forecasting applications are modeled as a customized rl forecasting environment based on mdp has been designed with observation space s, and action space a for learning agents to take appropriate actions, and it rewards r for the agents' actions. furthermore, considering a single agent to monitor the multiple states of a complex environment might lead to a sparse rewards challenge where the environment rarely produces a useful reward and limits agent learning.3) action space: defining actions for the rl agent in the environment is the most critical part of the rl process as it directly reflects the capacity of rl agents in adaptive learning. the proposed framework utilizes deep learning for forecasting the states of the rl environment, while the rl agent monitors the forecasted states. based on the observation space, action space, and reward policy defined for a customized gym environment for human behavior monitoring, the learning agents were run for 10 episodes shown on the x-axis, and the cumulative rewards have been awarded as scores for each episode shown on the y-axis.in the healthcare forecasting and monitoring experiment, vital signs such as heart rate, respiration, and temperature are predicted based on the time series data in the forecasting rl environment and the drl agents monitored the predicted vital signs to communicate with the appropriate medical emergency team in adverse situations. in the traffic dataset, a drl agent is deployed for monitoring the traffic forecasting process by customizing the observation space, action space, and rewards in the rl environment. based on the reward policy defined in the forecasting environment, the drl agents learning agents predicted the right action or right met to communicate the emergency of each vital sign. the learning agents were compelled to learn the behavior of the data patterns based on the reward policy for all possible actions in the action space for each state in the continuous observation space. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/411.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/411.txt new file mode 100644 index 0000000000000000000000000000000000000000..373334bc65f2c056439771793b7a1607b38ccd19 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/411.txt @@ -0,0 +1 @@ +in recent times, clustering has been the center piece of major fields such as data science, machine learning, knowledge discovery, statistics and data mining. in the information age, due to the presence of a plethora of uncleaned, unlabeled data, extraction of insights from this data is very essential in many applications.clustering is the process of breaking down data into meaningful subdivisions called clusters based on the similarity between data points. the points in a cluster have a higher similarity than the ones across clusters.there is always room for improvement in the clustering paradigm where a newer algorithm is more efficient and effective to a certain distribution of data. one of the most important problems faced while designing a clustering algorithm is choosing the parameters of the algorithm. if the algorithm is susceptible to a tiny change in those parameters, the robustness of the algorithm is affected. due to this problem, most of the time is spent on selecting the ideal parameters for the given data than in clustering. algorithms such as dbscan use parameters that are hard to estimate in a short period of time.partitioning clustering algorithms are the simplest kind of clustering algorithms. the idea is to breakdown the entire data into arbitrary k clusters where the partitions optimize a given function. for every cluster, a represent-er in the form of centroid, medoid, etc. is used to iteratively optimize the clusters with the addition of new data points into the cluster.the advantage of using these algorithms lie in the efficiency of their linearity. but, due to the reliance on the initial configuration of clusters, these algorithms lack robustness. also, they are not suitable for non-convex data or data with noise.hierarchical clustering algorithms produce a nested structure of clustering data points. they contain two types: topdown and bottom-up. in top-down algorithms, initially, the entire data set is taken as a single cluster and it is sequentially broken down into smaller clusters until they are singleton clusters. on the other hand, bottom-up clusters consider every point as a singleton cluster and sequentially combines the data points into bigger clusters than in the previous level. the advantages of using these algorithms lie in the flexibility of choosing the most appropriate number of clusters and their sizes from different levels of clusters. like the partitioning cluster algorithms, they are very sensitive to the presence of noise. also, they might encounter difficulties in handling convex and large data. hence, they can prove to be ineffective for real data.density based clustering algorithms group objects / data points based on the density of the locality rather than the proximity between data points. the high density regions are considered as clusters and low density ones as noise. with the advent of density based algorithms, clustering performance was boosted due to the provision of dealing with noise and non-convex data. but, these algorithms are very sensitive to the input parameters as small changes in the values of the parameters can completely shift the structure of clusters. nevertheless, the performance of density based algorithms is generally greater than partitioning algorithms.distribution-based clustering algorithms group data based on likelihood of data points belonging to a distribution (or cluster). objects / data points which most likely belong to the same distribution are clustered together. though their theoretical foundation is sound, they suffer from over fitting as complex models are generated easily. hence, estimation of the complexity of the model is difficult. moreover, real data may not belong to a precise distribution model and the presence of such models will lead to poor performance in these algorithms . however, distribution-based algorithms work well on complex, spatial data.with a plethora of clustering algorithms with their own advantages and disadvantages, a general algorithm is desired. in this paper, we introduce a modular design to our model to accommodate these various needs of clustering algorithms. in this design, we obtain hyper-parameters for the novel algorithm by pre-clustering a fraction of data with the best standard algorithm for that distribution and fine-tuning these results with our algorithm. along with this design, the model contains a salient feature to specify the amount of noise to be excluded by the algorithm.this paper is organized as follows: related work on clustering, modular algorithms are briefly discussed in section 2. in section 3, the design and implementation of the new algorithm are comprehensively explained. in section 4, the performance evaluation of the algorithm when compared to k-means and dbscan is presented. section 5 concludes the paper and some ideas for future research are discussed.clustering is the process of breaking down data into meaningful subdivisions called clusters based on the similarity between data points. on the other hand, bottom-up clusters consider every point as a singleton cluster and sequentially combines the data points into bigger clusters than in the previous level.density based clustering algorithms group objects / data points based on the density of the locality rather than the proximity between data points.distribution-based clustering algorithms group data based on likelihood of data points belonging to a distribution (or cluster). in this design, we obtain hyper-parameters for the novel algorithm by pre-clustering a fraction of data with the best standard algorithm for that distribution and fine-tuning these results with our algorithm. referenceintroduces st-dbscan which incorporates extensions of dbscan to discover clusters for spatial, non-spatial and temporal data as opposed to just spatial data by its parent algorithm. once the standard algorithm clusters the sample, we extract two parameters from the result -the clustering centroids and the proportion of data points in each of the clusters. iteratively, new points are added to the cluster and the front-runners are constantly updated till the exit condition -the number of points in the cluster is equal to the threshold of that cluster (calculated in the first phase) -is satisfied.with the flexibility to specify the number of points a cluster can include in itself, a unique property is observed: the difference between the total number of points in the data and the sum of the number of points clustered, can be defined as the noise in the data. when x percent of data is specified as noise to the model, the model excludes x percent of the total data when the proportions of points in each cluster are calculated.input data set df, number of front-runners n f r , noise factor n output centroids centroids, thresholds thresholds sample = take a sample of data from the data set df.a small a(i) means that a datum i is closely matched with other data in the same cluster and a large b(i) indicates that the datum i is poorly matched with data present in other clusters. epsilon distance specifies how close the points should be to a core point to consider those points as a part of the cluster and minpts specifies how many points should be in the epsilon distance from a point for it to become a core point.4) ability to cluster data of arbitrary shape: spatial databases may contain convex, non-convex and other data of arbitrary shape, and good clustering algorithms can cluster any data sufficiently well. with different "underlying" algorithms suited for different distributions and types of data, suitable parameters of the data are transferred to the "core" bacteria-farm algorithm which uses a novel approach to effectively cluster the data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/412.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/412.txt new file mode 100644 index 0000000000000000000000000000000000000000..efa0b621318f984fabbf8bda181b2cfdfe94c57e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/412.txt @@ -0,0 +1 @@ +the emergence of machine learning (ml) in healthcare signifies a paradigm shift towards automating clinician tasks and augmenting patient care capabilities . amidst the evolving ml landscape, federated learning has gained traction for preserving data privacy while constructing sophisticated server models . reinforcement learning (rl), another ml strategy, has demonstrated substantial improvements in prediction performance and decision-making tasks , . rl's application is particularly noteworthy in controlling autonomous systems, such as robots and drones, training them to make optimal decisions in real-time based on environmental sensor data.thanveer shaik and xiaohui tao are with the school of mathematics, physics & computing, university of southern queensland, toowoomba, queensland, australia (e-mail: thanveer.shaik@usq.edu.au, xiaohui.tao@usq.edu.au).haoran xie is with the department of computing and decision sciences, lingnan university, tuen mun, hong kong (e-mail: hrxie@ln.edu.hk) lin li is with the school of computer and artificial intelligence, wuhan university of technology, china (e-mail: cathylilin@whut.edu.cn) jianming yong is with the school of business at the university of southern queensland, queensland, australia (e-mail: jianming.yong@usq.edu.au) yuefeng li is with the school of computer science, queensland university of technology, brisbane, australia (e-mail: y2.li@qut.edu.au).• s represents a finite state space, where s t ∈ s signifies the state of an agent at a specific time t, • a is the set of actions available to each agent, and a t ∈ a represents the action taken by the agent at time t, • p is a markovian transition function p (s, a, s ′ ) that quantifies the probability of the agent transitioning from state s to state s ′ while executing action a, • r is a reward function r : s × a → r that provides an immediate reward r(s, a) for the action a performed in state s, • γ is a discount factor, ranging between 0 and 1, which emphasizes immediate rewards over future rewards. in this algorithm, the q-learning functions are approximated using the proposed t-gcn model, and the learning agent is rewarded based on the graph network prediction of the right action for the current state. 3) graphrl agent algorithm: algorithm 2 introduces the graphrl agent, presents the functionality of the graphrl agent within a complex action-state environment, utilizing t-gcn for q-function approximation.4) implementation algorithm: algorithm 3 serves as the comprehensive implementation of the graphrl framework, intricately combining the predictive graphrl environment (algorithm 1) with the graphrl agent (algorithm 2). the los angeles (la) traffic dataset, sourced from the los angeles department of transportation (ladot), provides real-time urban traffic data like traffic counts and speeds, while the large-scale traffic and weather events (lstw) dataset, with data across the united states, uniquely combines traffic conditions and weather events, posing a multifaceted challenge for the framework. the observation space of the vital sign, action space of different emergency teams and rewards for the agent actions in the predictive graphrl environment are defined based on the modified early warning scores (mews). for the evaluation process, the wesad dataset traffic forecasting: the goal of the proposed framework is to predict traffic using the predictive graphrl environment.weather forecasting: in weather forecasting, the goal of the proposed framework is to use past weather data to predict future weather events and to optimise actions based on those predictions.the proposed rl agent was enabled with t-gcn and its performance is compared with other traditional rl agents as shown in tab. the table provides a comparison of different ai agents and their performance on three different datasets: wesad, lam traffic forecasting, and us weather forecasting. from the table, it can be seen that the proposed graphrl agent is the most efficient agent on the wesad dataset, as it scored the highest score. on the lam traffic forecasting dataset, the q-learning agent scored the lowest, and the proposed graphrl agent scored the highest. on the us weather forecasting dataset, the a2c agent scored the lowest, while the graphrl agent scored the highest. the performance of the graphrl agent is measured by the episode score, which appears to be the total score achieved by the agent after a certain number of episodes. rigorous evaluations, utilizing an array of datasets such as wesad, la traffic forecasting, and us weather forecasting, have substantiated the framework's enhanced performance compared to conventional rl agents. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/413.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/413.txt new file mode 100644 index 0000000000000000000000000000000000000000..0795f57a3a474dbac6f3d70e1d0c85003e1cbdd1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/413.txt @@ -0,0 +1 @@ +product mapping or product matching (pm) is the process of matching identical products from different e-shops, where each product can be described by different graphical and textual data. it has an important application in e-commerce as it allows for general marketplace analysis and price comparison among different e-shops. product mapping is challenging as there is no general identification of products available on all the websites. therefore, models for measuring similarity of the products need to be trained to identify matching pairs of products based on as much textual and image data describing each product as available. existing freely available datasets , , , are limited as they often do not provide all data describing each product. moreover, they consist only of very distant non-matches. therefore, training the predictive models to distinguish matches and non-matches is very simple. to fill this gap, we create a new group of freely available datasets for product matching by manually scraping selected e-shops and searching for matching and non-matching pairs of products that create a good challenge for further research. in this paper, we present a dataset for czech product mapping -promapcz and a dataset for english product mapping -promapen.we created these datasets for product mapping by selecting some products from different categories from one e-shop and manually searching for matching pairs in another e-shop. we scraped all available product information including name, price, images, long and short descriptions, specification and source url. the comparisons were made manually without any type of id by comparing the available information and then again manually checked.the promapcz dataset consists of 1,495 matching and non-matching pairs from different categories for training models for czech product mapping from alza.cz and mall.cz e-shops. 34 percent of products are matching, 30 percent are close non-matches, and the rest are medium non-matches. all the available product information is in the czech language.promapen consists of 1,555 matching and non-matching pairs from different categories for training models for english product mapping from walmart.com and amazon.com e-shops. 32.7 percent of products are matching, 32.7 percent are close non-matches, and the rest are medium non-matches. all the available product information is in the english language. both datasets are available at https://github.com/kackamac/product-mapping-datasets.as a baseline for further research, we preprocessed these datasets and trained several machine-learning models to solve product mapping tasks in english and czech. we reached the best results with neural network-based models -f1 scores of 0.777 and 0.706 on promapcz and promapen respectively. we also preprocessed and trained the models on two existing datasets -amazon-walmart and amazon-google and compared the results to show that the newly proposed datasets are indeed more challenging and provide a good benchmark for product mapping models.in the rest of the paper, we first discuss existing product matching datasets and their shortcomings in the next section. in section 3, we describe in detail, how the promap datasets were created. section 4 details the preprocessing performed on these datasets and the following section contains results obtained by a number of different machine learning methods.product mapping or product matching (pm) is the process of matching identical products from different e-shops, where each product can be described by different graphical and textual data. therefore, models for measuring similarity of the products need to be trained to identify matching pairs of products based on as much textual and image data describing each product as available. in this paper, we present a dataset for czech product mapping -promapcz and a dataset for english product mapping -promapen. a deduplication algorithm was performed to obtain 1,154 pairs of matching products between the two data sources creating a dataset for training product mapping models. the selected common product attributes are product name, product description, manufacturer and price. the dataset was created by selecting products from predefined categories on amazon and based on the selected products, the google products dataset was generated by sending queries on the product name. the selected product attributes are product name, product description and product price. there is another type of related dataset such as shopmania, that contains product information organized in a three-level hierarchy comprised of hundreds of categories, or amazon review datathat contains millions of reviews and product metadata (descriptions, category information, price, brand, and image features) from five categories. these datasets contain a lot of product information but they do not contain any product pairs as they are intended for product categorization and not for product mapping. we see two deficiencies in the existing datasets -they either are not complete and do not contain all available product data, or they contain only distant non-matches and, therefore, they are not useful for product mapping in real life, as in practice it is necessary to decide if two very close-looking products are matches or not. in this paper, we therefore created two new datasets having scraped all available product information including images and by selecting not only matching but also two levels of close non-matching products for further model training. for each image in the source product image set, we computed the hamming distance of all images in the target product image set and we selected the most similar image. by finding the most similar image and filtering away too-distant images, we obtained the closest image from the second product image set for each image from the first product image set.we created two datasets for the product mapping task by manual matching and automated extraction of all possible attributes characterizing products from different categories from two different e-shops in the czech and english languages. additionally, even the medium non-matches are pairs of much more similar products than non-matches in existing product mapping datasets, making the new datasets more challenging for product mapping models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/414.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/414.txt new file mode 100644 index 0000000000000000000000000000000000000000..81be6daa73f3c5f7176ead4070aae0d85b5f04d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/414.txt @@ -0,0 +1 @@ +the difficulty to represent many biological, social, and technological networks arises from the complexity of their inherent relational structures and the nuanced definitions of the associated data. importantly, the data in such networks is often defined on higher-order components like edges and triangles, thus demanding an approach that incorporates interactions beyond the pairwise paradigm . simplicial complexes (scs) have been proposed to model such dependencies in higher-order networks . amongst others, scs have shown particular efficacy in edge flow applications (e.g., mass, energy, information, or trajectories) for which traditional graphbased techniques do not induce a good inductive bias . notably, they have been leveraged to alleviate the curse of dimensionality in autoregressive flow prediction for water networks and to remove arbitrage opportunities in currency exchange markets .an important property of scs is that they enjoy an algebraic representation via the hodge laplacian matrices, ultimately allowing for spectral analysis. the latter is achieved through the hodge decomposition of the spectrum and has been used to develop hodgeaware signal processing and learning techniques for edge flows and other simplicial data . recent advances include developing a simplicial fourier transform , convolutional filters , and neural networks that are trained in a (semi-)supervised manner. .while these supervised learning (sl) methods for simplicial data have their merits, they also come with limitations such as a over-reliance on labeled examples and suboptimal performance in data-scarce scenarios. sl methods are also mainly designed for a specific task and the resulting output is generally not reusable in different applications. therefore, we propose a contrastive learning (cl) approach for simplicial data that addresses these issues. to this end, we employ a simplicial convolutional neural network (scnn) to produce embeddings and optimise it in a self-supervised manner using the infonce loss . the resulting cl approach can use both labeled and unlabeled examples to create robust and reusable representations for simplicial data. these representations can subsequently be employed in various downstream tasks, offering improved accuracy especially in label-scarce scenarios.to further enhance our method, we propose stochastic augmentations and introduce information about the hodge decomposition into the embeddings by i) optimizing the parameters of the augmentations so that they generate positive examples that respect the spectral properties of the data; and ii) weighting the negative samples in the infonce loss by a hodge-aware distance to the anchor (true datum). this approach results in a spectrally organized embedding space and facilitates downstream learning. we corroborate the latter in two edge flow classification settings and show superior performance compared with a fully-supervised model. related to this, our contribution is threefold: c1) we propose simplicial contrastive learning (scl), design related augmentations and experimentally validate all our approaches;c2) we show how augmentations in the simplicial domain can be optimized with respect to the hodge decomposition;c3) we introduce a reweighing of the negative examples based on the similarity of their hodge components to encourage a spectrally organized embedding space. the latter is achieved through the hodge decomposition of the spectrum and has been used to develop hodgeaware signal processing and learning techniques for edge flows and other simplicial data.while these supervised learning (sl) methods for simplicial data have their merits, they also come with limitations such as a over-reliance on labeled examples and suboptimal performance in data-scarce scenarios.to further enhance our method, we propose stochastic augmentations and introduce information about the hodge decomposition into the embeddings by i) optimizing the parameters of the augmentations so that they generate positive examples that respect the spectral properties of the data; and ii) weighting the negative samples in the infonce loss by a hodge-aware distance to the anchor (true datum). a simplicial complex x k of order k is a collection of simplices such that it contains at least one ksimplex and if s k ∈ x k we have that all subsets of s k are also elements of x k . accordingly, we can decompose any edge flow x, into three parts x = xg + xc + xh each living in an orthogonal subspace known as the gradient space xg ∈ im(b ⊤ 1 ), the curl space xc ∈ im(b2), and the harmonic space xh ∈ ker(l1). our problem statement reads as: given a set of unlabeled edge flows, we want to train a simplicial convolutional neural network in a self-supervised manner to generate embeddings that reflect the hodge-properties of the data and that can be used in a downstream task. in the simplicial setting, this principle suggests that for each edge flow datum x we create both positive and negative examples and train the scnn (a. then, we create a positive pair for x via two topological augmentations x ′ i = t1(x) and x ′ j = t2(x) with respective representations z ′ i = gh(fh(x ′ i )) and z ′ j = gh(fh(x ′ j )).where p is the set of all positive pairs in the data, sim(u, v) = u ⊤ v/∥u∥2∥v∥2 is the cosine similarity, τ is a temperature parameter, and m is the number of negative examples xm with representations zm.simplicial data often have particular properties in one of the three hodge embeddings, which may be wrongly affected by augmentations if ignored., where p is a matrix with entries p i,j = x i x j p i p j for i ̸ = j and p i,j = (x i ) 2 p i for i = j.we corroborate the proposed approach and compare it with supervised alternatives on two edge flow classification tasks: i) a synthetic task considering trajectories on a map; and ii) a real-data case that contains ocean drifters moving around the madagascar island. we train the unsupervised simplicial contrastive learner (scl) on all available unlabled data points and fit a linear support vector machine (svm) on the obtained embeddings. the spectral simplicial contrastive learner (ssclspec) trained with reweighted negative samples and spectrally optimized probabilities achieves the best downstream accuracy on both datasets.we show that a contrastive learning framework, when coupled with a simplicial neural network, is effective for generating representations for edge flow data that contain hodge-related information. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/415.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/415.txt new file mode 100644 index 0000000000000000000000000000000000000000..897eddc6294760f0152783b17415dba419c8fa41 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/415.txt @@ -0,0 +1 @@ +knowledge graphs (kgs) are graph-structured knowledge bases where knowledge about the world is encoded in the form of relationships of various kinds between entities. kgs are an extremely flexible and versatile knowledge representation formalism, and are currently being used in a variety of domains, including bioinformatics , recommendation , linguistics , and industry applications . moreover, relational data from such domains is often temporal; for example, the action of buying an item or watching a movie is associated with a timestamp, and some medicines might interact differently depending on when they are administered to a patient. we refer to kgs augmented with temporal information as temporal knowledge graphs (tkgs).when reasoning with tkgs, one of the most crucial tasks is finding or completing missing links in the temporal knowledge graph at a precise time point, often referred to as the temporal link prediction task. one class of models for tackling the problem of identifying missing links in large static or temporal kgs is neural link predictors , i.e. differentiable models which map entities and relationships into a d-dimensional embedding space, and use entity and relation embeddings for scoring missing links in the graph.recently, lacroix et al. and sadeghian et al. proposed two state-of-the-art approaches to temporal link prediction for temporal knowledge graphs. the key idea is to extend factorisation-based neural link prediction models with dense representations for each timestamp and to regularise such representations by enforcing them to change slowly over time. however, the impact of the choice of temporal regularisation terms is still not well understood.hence, in this work, we systematically analyse a comprehensive array of temporal regularisers for neural link predictors based on tensor factorisation. starting from the temporal smoothing regulariser proposed in , we also consider a wide class of norms in the l p and n p families which allow to control the strength of the smoothing. we further extend the set of temporal regulariser by taking into account the regulariser proposed in , defining it as an explicit modelling of the temporal dynamic between adjacent timestamps. lastly, we propose to adopt a recurrent architecture as an implicit temporal regulariser that can generate timestamp embeddings sequentially and learn the temporal dynamic by updating its parameters during the training phase.we conducted the experimental evaluation over three well-known benchmark datasets: icews14, icews05-15, and yago15k. icews14 and icews05-15 are subsets of the widely used integrated crisis early warning system knowledge graph , while yago15k is a dataset derived from yago covering the entities appearing in fb15k , which adds occurssince and occursuntil timestamps to each triple.our results show that using our proposed temporal regularisers, neural link predictors based on tensor factorisation models can significantly improve their predictive accuracy in temporal link prediction tasks. by carefully selecting a temporal regulariser and regularisation weight, our version of tntcomplex produces more accurate results than all of the baselines on icews14, icew05-15, and yago15k. overall, linear regularizers for temporal smoothing that introduce smaller loss penalties for closer timestamp representations achieve the best performance. in contrast, recurrent architecture struggles to generate a long sequence of timestamps.a knowledge graph g ⊆ e × r × e contains a set of subject-predicate-object ⟨i, j, k⟩ triples, where each triple represents a relationship of type j ∈ r between the subject i ∈ e and the object k ∈ e of the triple. more formally, neural link predictors are defined by a parametric scoring function ϕ θ : e × r × e → r, with parameters θ that, given a triple ⟨i, j, k⟩, produces the likelihood that entities i and k are related by the relationship j. for example, in transe, the score of a triple ⟨i, j, k⟩ is given by ϕ transe θ (i, j, k) = -∥i + j -k∥ 2 , where i, j, k ∈ r d denote the embedding representations of i, j, and k, respectively. in distmult, the scoring function is defined as ϕ distmult θ (i, j, k) = ⟨i, j, k⟩ = d z=1 i z j z k z , where ⟨ • , • , • ⟩ denotes the tri-linear dot product. canonical tensor decompositionis similar to distmult, with the difference that each entity e ∈ e has two representations, e head ∈ r d and e tail ∈ r d , depending on whether it is being used as a head (subject) or tail (object): ϕ cp θ (i, j, k) = ⟨i head , j, k tail ⟩. in rescal, the scoring function is a bilinear model given by ϕ rescal θ (i, j, k) = i ⊤ jk, where i, k ∈ r d is the embedding representation of i and j, and j ∈ r d×d is the representation of j. i, j, k ∈ c d -and the scoring function is given by ϕ complex θ (i, j, k) = ℜ(⟨i, j, k⟩), where ℜ(x) represents the real part of x, and x denotes the complex conjugate of x.a temporal knowledge graph (tkg) is referred to a set of quadruples k = {(i, j, k, l) | i, k ∈ e, j ∈ r, l ∈ t }., i, j, k, t l ∈ r d ) and a scoring function ϕ θ (i, j, k, l) ∈ r, such that true quadruples receive high scores.where i and k are the embeddings of entities i, k ∈ e, j is the embedding for the relation j ∈ r, and t l is the embedding for the timestamp l ∈ t .in heterogeneous knowledge bases, where only part of the relation types are temporal, they introduce an embedding representation -j t -whether the relation j is temporal and an embedding j otherwise.where i, k ∈ r n×d denote the entity embeddings obtained by vertically concatenating the embedding of the entity i or k in each training tuple, and q j,t l represents the (row-wise) linear operator parameterised by the relation and timestamp embeddings j and t l . specifically, q is parameterized by j, t l by simply concatenating the embeddings to get q j,t l = , where j ∈ r n j ×d and t l ∈ r n l ×d are the representations of the fact's relation type and time elements, and n j + n l = n. for each quadruple (i, j, k, l) in the test set, we fill (i, j, ?, l) and (?, j, k, l) by scoring and sorting all possible entities in e.model mrr hit@1 hits@3 hit@10 mrr hit@1 hit@3 hit@10 mrr hit@1 hit@3 hit@10 model mrr hit@1 hits@3 hit@10 mrr hit@1 hit@3 hit@10 mrr hit@1 hit@3 hit@10 chronor + n3 (2021) table3: evaluation on the yago15k, icews14, and icews05-15 datasets for tntcomplex and chronor models using different temporal regularisers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/416.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/416.txt new file mode 100644 index 0000000000000000000000000000000000000000..ff8c670941d4f7ef375d338507a06b4249221636 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/416.txt @@ -0,0 +1 @@ +in recent years, the abundance of text data and its crucial role in various applications, such as natural language processing, information retrieval, and sentiment analysis, has garnered significant attention. however, one key challenge that researchers and practitioners face when working with text data is the presence of covariate drift. covariate drift refers to the phenomenon where the underlying distribution of the data changes over time, leading to a mismatch between the training and test data.detecting and addressing covariate drift is of paramount importance as it can have detrimental effects on the performance and reliability of text analysis models. when drift occurs, models trained on historical data may become obsolete or yield inaccurate results when applied to current data. thus, developing effective methods to identify and mitigate covariate drift is crucial for maintaining the efficacy of text data analysis.in this research, our objective is to identify which document embeddings, dimensionality reduction techniques, and drift detection methods work best for detecting covariate drift in text data. specifically, we explore the effectiveness of three widely used document embeddings: term frequency-inverse document frequency (tf-idf), doc2vec, and bert embeddings. additionally, we investigate the impact of dimensionality reduction techniques on drift detection, such as principal component analysis (pca) and latent semantic analysis(lsa).to evaluate the performance of the different approaches, we employ two popular drift detection methods: the kolmogorov-smirnov (ks) statistic and the maximum mean discrepancy (mmd) test. these methods provide statistical measures to quantify the divergence between the training and test data distributions.by conducting comprehensive experiments and comparative analyses, we aim to identify the most effective combination of embeddings, dimensionality reduction techniques, and drift detection methods for detecting and monitoring covariate drift in text data. the insights gained from this research will contribute to enhancing the robustness and reliability of text analysis models, enabling their effective deployment in dynamic environments where data distributions evolve over time.the remainder of this paper is organized as follows: section 2 provides background information and reviews related work on covariate drift detection, document embeddings, and dimensionality reduction techniques. section 3 presents the methodology, including the datasets used, document embeddings, dimensionality reduction techniques, and drift detection methods. section 4 details the experimental setup, while section 5 presents the results and analysis, followed by concluding remarks in section 6.in this research, our objective is to identify which document embeddings, dimensionality reduction techniques, and drift detection methods work best for detecting covariate drift in text data.by conducting comprehensive experiments and comparative analyses, we aim to identify the most effective combination of embeddings, dimensionality reduction techniques, and drift detection methods for detecting and monitoring covariate drift in text data.covariate drift detection in text data poses a significant challenge in maintaining the reliability and performance of text analysis models.applied the ks statistic to detect covariate drift in text data, specifically in the context of detecting concept drift in text classification.employed the mmd test to detect concept drift in sentiment analysis tasks, demonstrating its effectiveness in capturing changes in the sentiment distribution of text data.in their study,wang et al. they utilized a sentiment lexicon to detect changes in sentiment distributions and successfully identified covariate drift in sentiment analysis models. (2021)investigated drift detection in text data using distributional shifts in word embeddings.by investigating the performance of tf-idf, doc2vec, and bert embeddings, with and without dimensionality reduction using pca and lsa, and utilizing the ks statistic and mmd test for drift detection, we aim to provide insights into the best strategies for detecting and addressing covariate drift in text data.to detect covariate drift in the text data, we employ two drift detection methods: maximum mean discrepancy (mmd) and kolmogorov-smirnov (ks) statistic. it is commonly used for detecting changes in data distributions and can be applied to identify covariate drift in text data. by comparing the ks statistic between the distributions of the reference and current data, we can determine if there is a significant change in the data distribution, indicating covariate drift.by combining these document embeddings, dimensionality reduction techniques, and drift detectors, we aim to evaluate the performance of different approaches in detecting covariate drift in text data.by employing the ag-news subset, we aim to evaluate the effectiveness of our proposed approaches in detecting covariate drift and capturing distributional shifts in text data. further analysis and experimentation can be conducted to explore additional models, dimensionality reduction techniques, and drift detection methods to improve the accuracy and efficiency of drift detection in various text analysis tasks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/417.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/417.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d7824efc8027ba9852a9284024e4d9661a1fddd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/417.txt @@ -0,0 +1 @@ +continual learning (cl) recently emerged as a new paradigm for designing artificial intelligent (ai) systems that are adaptive and selfimproving over time . in cl, ai agents continuously learn from non-stationary data streams and adapt to dynamic environments. as such, cl can be applied to many real-time ai applications such as autonomous vehicles or digital twins . for these applications to be effectively deployed, it is important to guarantee robustness to unseen environments while retaining past knowledge. however, modern deep neural networks often forget previous experiences after learning new information and struggle when faced with changes in data distributions . although cl can mitigate forgetting issues, ensuring both memorization and robust generalization to unseen environments is still a challenging problem.to handle forgetting issues in cl, many practical approaches have been proposed using memory and regularization . for memory-based methods , a memory is deployed to save past data samples and to replay them when learning new information. regularization-based methods use regularization terms during model updates to avoid overfitting the current environment. this research was supported by a grant from the amazon-virginia tech initiative for efficient and robust machine learning.the source code is available on https://github.com/news-vt however, these approaches did not consider or theoretically analyze generalization performance on unseen environments even though they targeted non-stationary data streams.recently, a handful of works studied the generalization of cl agents. in , the authors theoretically analyzed the generalization bound of memory-based cl agents. the work in used game theory to investigate the tradeoff between generalization and memorization. in , the authors analyzed generalization and memorization under overparameterized linear models. however, the works in require that an ai agent knows when and how task/environment identities, e.g., labels, will change. in practice, such information is usually unavailable and unpredictable. hence, we focus on more general cl settings without such assumption. meanwhile, the works in empirically analyzed generalization under general cl settings. however, they did not provide a theoretical analysis of the tradeoff between generalization and memorization performance.the main contribution of this paper is a novel cl framework that can achieve robust generalization to dynamic environments while retaining past knowledge. in the considered framework, a cl agent deploys a capacity-limited memory to save previously observed environmental information. then, a novel optimization problem is formulated to minimize the worst-case risk over all possible environments to ensure the robust generalization while balancing the memorization over the past environments. however, it is generally not possible to know the change of dynamic environments, and deriving the worst-case risk is not feasible. to mitigate this intractability, the problem is relaxed with probabilistic generalization by considering risks as a random variable over environments. then, data points are sampled from the memory to estimate the distribution of risks over environmental change so as to obtain predictors that are robust with unseen changes. we then provide theoretical analysis about the generalization and memorization of our framework with new insights that a tradeoff exists between them in terms of the memory size. experiments show that our framework can achieve robust generalization performance for unseen target environments while retaining past experiences. the results show up to a 10% gain in the generalization compared to memory-based cl baselines. although cl can mitigate forgetting issues, ensuring both memorization and robust generalization to unseen environments is still a challenging problem.com/news-vt however, these approachesdid not consider or theoretically analyze generalization performance on unseen environments even though they targeted non-stationary data streams.the main contribution of this paper is a novel cl framework that can achieve robust generalization to dynamic environments while retaining past knowledge. then, a novel optimization problem is formulated to minimize the worst-case risk over all possible environments to ensure the robust generalization while balancing the memorization over the past environments. experiments show that our framework can achieve robust generalization performance for unseen target environments while retaining past experiences. to this end, we use a memory mt of limited capacity 0 ≤ |mt| ≤ |m| so that the agent can save the observed data samples {(.as the agent continuously experiences new environments, it is important to maintain the knowledge of previous environments (memorization) and generalize robustly to any unseen environments (generalization). since we use data samples in mt to estimate problem(6), the size of mt naturally represents the richness of estimation f r. to capture this tradeoff between the generalization and memorization, next, we analyze the impact of the memory size |mt| on the memorization and generalization performance.from theorem 1, we observe that as the memory size |mt| increases, the probability that the difference between risks of θt and θ * mt is smaller than ǫ decreases. however, as we have multiple environments in mt, θt will more likely deviate from θ * mt . next, we present the impact of the memory size |mt| on the generalization performance. as we have more information about the environments in mt, the model θt becomes more robust to the dynamic changes of the environments, thereby improving generalization.in this paper, we have developed a novel cl framework that provides robust generalization to dynamic environments while maintaining past experiences. the experimental results show that our framework can achieve robust generalization to unseen target environments during training while retaining past experiences. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/418.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/418.txt new file mode 100644 index 0000000000000000000000000000000000000000..c834b8eebd07cb696191126d6365c94b7cd9f99a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/418.txt @@ -0,0 +1 @@ +cancer is a complex genetic disease that originates from the accumulation of gene mutations within a cell and is ranked as the second leading cause of death in the united states according to the american cancer society 1 . given the tumor heterogeneity arising from the genetic variations among patients even with the same cancer type, substantial differences in the anti-cancer drug response can be expected, thereby highlighting the urgent need for targeted therapies. owing to the high cost and time associated with developing and validating anti-cancer drugs in clinical trials which is further exacerbated by the 96% failure rate, the development of preclinical computational models that can accurately predict whether a cell line is sensitive or resistant to a particular drug is imperative. the availability of large-scale pharmacogenomics datasets collected via high-throughput screening technologies offers feasible resources to develop robust drug response models and identify the important biomarkers predictive of drug sensitivity.large language models (llm), such as the generative pre-trained transformer (gpt-3) from openai, are "taskagnostic models" pre-trained on large textual corpora crawled from the web that have exhibited unprecedented capabilities on a broad array of nlp tasks. recent studies have noted the potential of gpt-3 in the biomedical domain 2,3 ; however, these studies focus on processing nlp datasets that include unstructured text and their applicability to biomedical tasks with structured data (e.g., pharmacogenomics data) remains unexplored. to this end, this work aims to investigate gpt-3's potential for anti-cancer drug sensitivity prediction on the genomics of drug sensitivity in cancer (gdsc) 4 database containing tabular pharmacogenomic information. the main contributions of this work include: (1) task-specific prompt engineering of the structured data, (2) evaluating and comparing the performance of gpt-3 for drug sensitivity prediction in the zero-shot and fine-tuning settings, (3) analyzing the effect of simplified molecular input line entry specification (smiles) sequences of drugs and genomic mutation features of cancer cell lines on the model's generalization and (4) we release a web app for using the gpt-3 variant fine-tuned on the gdsc data for drug sensitivity classification at https://huggingface.co/spaces/shaikachy/sensitivecancergpt. owing to the high cost and time associated with developing and validating anti-cancer drugs in clinical trials which is further exacerbated by the 96% failure rate, the development of preclinical computational models that can accurately predict whether a cell line is sensitive or resistant to a particular drug is imperative. the availability of large-scale pharmacogenomics datasets collected via high-throughput screening technologies offers feasible resources to develop robust drug response models and identify the important biomarkers predictive of drug sensitivity. to this end, this work aims to investigate gpt-3's potential for anti-cancer drug sensitivity prediction on the genomics of drug sensitivity in cancer (gdsc)4database containing tabular pharmacogenomic information. the main contributions of this work include: (1) task-specific prompt engineering of the structured data, (2) evaluating and comparing the performance of gpt-3 for drug sensitivity prediction in the zero-shot and fine-tuning settings, (3) analyzing the effect of simplified molecular input line entry specification (smiles) sequences of drugs and genomic mutation features of cancer cell lines on the model's generalization and (4) we release a web app for using the gpt-3 variant fine-tuned on the gdsc data for drug sensitivity classification at https://huggingface.we utilized the drug-cancer cell lines pairs and their corresponding drug response data (i., the half maximal inhibitory concentration (ic50)) from the new version of gdsc database (gdsc2) across 5 tissue types -lung adenocarcinoma (luad), breast invasive carcinoma (brca), colon and rectum adenocarcinoma (coread), thyroid carcinoma (thca) and brain lower grade glioma (lgg)which in total cover 288 unique drugs and 183 unique cell lines. we created dataset per tissue type which resulted in 16378, 20372, 35138, 1132 and 2516 drug-cell line pairs for the luad, brca, coread, thca and lgg cohorts, respectively; each cohort was trained and evaluated using gpt-3 separately with 80%-20% stratified split for the training and test sets. in addition, to inspect the effect of integrating additional context with the input in the form of drug's chemical structure (smile) and gene mutation information on the model's performance, we create ablated datasets wrt different input combinations for luad as the illustrative tissue type. the following are the ablated input combinations and their data sizes: drug + cell line + smile (12003), drug + cell line + mutation (4469), drug + cell line + smile + mutation (3500). to convert the ic50 drug response values to binary labels, we check if the 'feature delta mean ic50' value is negative (sensitive) or positive (resistant)., "drug name", "drug target", "drug smile", "gene mutation", "drug response"), we first convert the structured cell values in each row to a natural language text t using the corresponding column names (e. the prompt p is directly inputted into the gpt-3 ada model via openai completions api to generate the response r corresponding to the model's drug response prediction., 'drug name:', 'drug target:', 'gene mutation:') before the respective cell values and concatenate them together using new line as the delimiter (e. the results reveal that gene mutation features alone and also in combination with the drug's smile representations are more informative of drug response, with 24% and 29% performance gains in f1 respectively. comparative analysis was performed to demonstrate gpt-3's drug response generalizability in the zero-shot vs finetuning settings, where the fine-tuning performance was further enhanced with the use of gene mutation and drug's smile features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/419.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/419.txt new file mode 100644 index 0000000000000000000000000000000000000000..3f24abdd1e60132cf4dd13b6c6c5d15b404bc27c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/419.txt @@ -0,0 +1 @@ +applications of shallow and deep neural networks have rapidly become indispensable in a vast range of research disciplines, and their technological impact in recent years has been enormous. however, despite of those successes, the fundamental conceptual reasons underlying their functioning are currently insufficiently understood, and remain the subject of intense investigation, . the common approach to the minimization of the cost (loss) function is based on the gradient descent flow generated by it, and a variety of powerful algorithms have been developed for this purpose that can be highly successful in suitable application scenarios; but nevertheless, even if a global or sufficiently good local minimum is obtained, the properties of the minimizing parameters (weights, biases) remain quite mysterious.in this paper, we address the cost minimization problem via explicit construction of upper bounds on the global minimum of the cost function, without invoking gradient descent at all. the inspiration for our approach is drawn from methods in mathematical physics aimed at determining the ground state energy for complex manybody quantum systems (such as large molecules), see for instance . our main goal here is to obtain a rigorous mathematical understanding of the geometric structure of (approximate) cost minimizers; we do not focus on the development of efficient algorithms, or exploration of application cases in the present work. to make our exposition broadly accessible, we will mostly employ standard mathematical terminology, while minimally referring to specialized nomenclatures specific to the neural networks literature.the current paper is the first in a series of works in which we investigate the geometric structure of cost minimizers in neural networks (that is, weights and biases that minimize the cost). here, we analyze underparametrized shallow neural networks in the context of supervised learning, with one hidden layer, a relu activation function, an l 2 schatten class (or hilbert-schmidt) cost function, input space r m , output space r q with q ≤ m , and training input sample size n > qm that can be arbitrarily large -therefore, we are considering the underparametrized regime. we prove an upper bound on the minimum of the cost function of order o(δ p ) where δ p measures the signal to noise ratio of training inputs. first, we derive an upper bound on the minimum of the cost function with an explicit construction of suitable weights and biases. as a by-product of our proof, we obtain a constructively trained shallow network with explicit parameters. in the special case m = q, we explicitly determine an exact degenerate local minimum of the cost function, and show that the sharp value differs from the upper bound obtained for q ≤ m by a relative error o(δ 2 p ). the above assumptions allow for a succinct exposition of core geometric aspects of the problem; an analysis of more general situations is left for future work. for some thematically related background, see for instance and the references therein. we present the explicit construction of local and global l 2 cost minimizers in underparametrized deep learning (that is, multilayer) networks in a separate work, , which will invoke results developed in the paper at hand, combined with inspiration drawn from rigorous renormalization group analysis in quantum field theory, as for instance in .in this introductory section, we summarize the main results of this paper. we consider a shallow network for which y j ∈ r q denotes the j-th output vector, and define the output matrixwhich we assume to be invertible. we assume the input space to be given by r m with m ≥ q, and for j = 1, . . . , q, we letdenote the matrix of training input vectors x 0,j,1 , i = 1, . . . , n j , which belong to the output y j . letting n := q j=1 n j , the full matrix of training inputs is given by(1.3)we assume that q ≤ m ≤ m q < n where n can be arbitrarily large. we define the average of all training input vectors belonging to the output y j ,for j = 1, . . . , q, and ∆x 0,j,i := x 0,j,ix 0,j .(1.5)moreover, we letandmoreover, we definewhere the superscript indicates that this is the column-wise reduction of the matrix x 0 in (3.4), below. we may then define the orthoprojector p = p 2 = p t onto the span of the family {x 0,j } q j=1 which we assume to be linearly independent. we let p ⊥ := 1 -p . we consider a shallow network with hidden layerdetermined by a weight matrix w 1 ∈ r m×m , bias vector b 1 ∈ r m , and relu activation function σ, which acts component-wise as a ramp function (a) + := max{0, a}. we note that the property of σ acting component-wise singles out the given coordinate system in r m as distinct.we define the output layer with the map r m → r q given bywhere w 2 ∈ r q×m and b 2 ∈ r q . here, we do not include an activation function.then, the l 2 cost function c is defined aswhere | • | r q denotes the euclidean norm on r q . in theorem 3.1, we prove an upper bound on the minimum of the cost function with the following construction.let r ∈ o(m ) denote an orthogonal matrix that diagonalizes p . that is, the orthoprojectorsare diagonal in the given coordinate system; this is important for compatibility with the fact that σ acts component-wise. namely, r rotates the input data in such a way that range(p ) is made to align with the coordinate axes; this decouples the action of σ on the rotated range(p ) from its action on the rotated range(we may choose β 1 ≥ 0 sufficiently large (β 1 ≥ 2 max j,i |x 0,j,i | is sufficient), so that the projected, translated, and rotated training input vectors rp x 0,j,i + β 1 u m are component-wise non-negative for all i = 1, . . . , n j , and j = 1, . . . , q.then, we construct an upper bound on the cost function by use of the following weights and biases.we choosewith β 1 ≥ 0 large enough to ensure component-wise non-negativity of rp x 0 +(1.17)on the other hand, the m -q rows of rp ⊥ x 0 + p ⊥ r b * 1 are eliminated by way ofwe thus obtain a reduction of the dimension of the input space from m to q.passing to the output layer, we require w * 2 to solvewhich yieldsdenotes the penrose inverse of x red 0 , which satisfiesfinally, we find thatthat is, it reverts the translation by p r b * 1 in the previous layer. a geometrically intuitive way of thinking about this construction is that w * 1 = r orients the training input data with respect to the given coordinate system, in order to align it with the component-wise action of σ. this allows for a maximal rank reduction via σ, whereby the maximal possible amount of insignificant information is eliminated: p r b * 1 pulls the significant information (in the range of p r ) out of the kernel of σ, while p ⊥ r b * 1 pushes the insignificant information (in the range of p ⊥ r ) into the kernel of σ whereby the latter is eliminated. subsequently, b * 2 places the significant information back into its original position, and w * 2 matches it to the output matrix y in the sense of least squares, with respect to the l 2 -norm.we refer to the shallow network defined with these specific weights and biases w * i , b * i , i = 1, 2, as the constructively trained network. the parameterpenp ∆x 0,j,i , (1.24) measures the relative size between ∆x 0 and x 0,j , j = 1, . . . , q; in particular, penp ∆x 0,j,i scales like |∆x| |x| , and is scaling invariant under x 0 → λx 0 .we then obtain min wj ,bjin order to match an arbitrary test input x ∈ r m with one of the output vectors y j , j ∈ {1, . . . , q}, we let for x ∈ r m ,with the weights and biases of the constructively trained network. then,implies that x matches the j * -th output y j * . we define the metricwhere w 2 := w * 2 r, on the q-dimensional linear subspace range(p ) ⊂ r m . in theorem 3.3, we prove that(1.29) therefore, matching an input x ∈ r m with an output y j * via the constructively trained network is equivalent to solving the metric minimization problem j * = argmin j∈{1,...,q} (d w2 (p x, x 0,j )) (1.30) on the range of p . in other words, x ∈ r m is matched with the output y j * ∈ r q by determining that p x is closest to x 0,j * among all {x 0,j ∈ range(p ) ⊂ r m | j = 1, . . . , q} in the d w2 metric.in theorem 3.2, we focus on the special case m = q, and present the explicit construction of a degenerate local minimum of the cost function (more precisely, a weighted version of the cost function), and obtain an improvement on the upper bound (1.25) by a factor 1 -c 0 δ 2 p for some constant c 0 ≥ 0. here, degeneracy means that for all weights and biases w 1 , b 1 satisfying (1.17), the corresponding minimum of the cost function attains the same value. in particular, this implies that in this range of weights and biases, (1.25) differs from the sharp value with a relative error of order o(δ 2 p ), when q = m . in theorem 3.5, we prove a result for q = m closely related to theorem 3.2, but for the case where (1.17) is not satisfied.we expect our results for the case q = m to be extensible to q < m , but leave a detailed analysis for future work.this paper is organized as follows. in section 2, we give a detailed introduction of the mathematical model describings the shallow network. in section 3, we present the main results, namely theorem 3.1, which we prove in section 4, theorem 3.3, which we prove in section 7, and theorem 3.5. in section 6, we provide a detailed description of the constructively trained network. here, we analyze underparametrized shallow neural networks in the context of supervised learning, with one hidden layer, a relu activation function, an l 2 schatten class (or hilbert-schmidt) cost function, input space r m , output space r q with q ≤ m , and training input sample size n > qm that can be arbitrarily large -therefore, we are considering the underparametrized regime.4), below. this allows for a maximal rank reduction via σ, whereby the maximal possible amount of insignificant information is eliminated: p r b * 1 pulls the significant information (in the range of p r ) out of the kernel of σ, while p ⊥ r b * 1 pushes the insignificant information (in the range of p ⊥ r ) into the kernel of σ whereby the latter is eliminated. , q; in particular, penp ∆x 0,j,i scales like |∆x| |x| , and is scaling invariant under x 0 → λx 0 .29) therefore, matching an input x ∈ r m with an output y j * via the constructively trained network is equivalent to solving the metric minimization problem j * = argmin j∈{1,. in other words, x ∈ r m is matched with the output y j * ∈ r q by determining that p x is closest to x 0,j * among all {x 0,j ∈ range(p ) ⊂ r m | j = 1, . the projector property p 2 = p is thus easily verified, and orthogonality with respect to the euclidean inner product (•, •) on r m holds due to p t = p , whereby (v, p w) = (p v, w) for any v, w ∈ r m .which measures the relative size between x red 0 and p ∆x 0 , as penp ∆x 0,j,i scales like the noise to signal ratio |∆x| |x| of training inputs. let and matching an input x ∈ r m with an output y j * via the constructively trained shallow network is equivalent to the solution of the metric minimization problem j * = argmin j∈{1,.first of all, we note that w 2 p = w 2 has full rank q, therefore x → | w 2 p x| 2 is a non-degenerate quadratic form on the range of p .50) or (3. given a test input x ∈ r m , the constructively trained network metrizes the linear subspace range(p ) of the input space r m , and determines to which equivalence class of training inputs p x is closest.7) implies that where by construction of p , we have p x 0,j = x 0,j . therefore, since also w * 2 p r = w * 2 , we have for x, y ∈ range(p ), defines a metric in the range of p , which is a q-dimensional linear subspace of the input space r m .we can therefore reformulate the identification of an input x ∈ r m with an output y j * via the constructively trained shallow network as the solution of the metric minimization problem j * = argmin j∈{1,. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/42.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/42.txt new file mode 100644 index 0000000000000000000000000000000000000000..14c31907cce2a35189d2837fcc8402044cecbed5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/42.txt @@ -0,0 +1 @@ +university departmental tasks are the most crucial parts of a university system as it is interconnected to institutional management, efficiency and effectiveness. it includes inventory management, generating class schedules, assigning teaching assistants, handling the process of changing labs of students" due to clashes in their schedule etc. the work shown in this paper focuses on solving the university course allocation problem (ucap). this is known as an np-hard problem and it involves assignment of faculty members to their respective courses and timeslots, while satisfying a set of constraints. the task of course allocation is quite time consuming when it is done manually. the integration of artificial intelligence (ai) has been instrumental in tackling this problem. however, there is no universal solution to this problem since different institutions have different requirements. researchers have approached this problem by using various optimization algorithms. in this paper, the authors have used a hybrid algorithm (ha) which is a combination of local repair algorithm (lra) and modified genetic algorithm (mga). using this algorithm, an optimal solution can be found in the least possible time.research in course allocation has been carried out for a long time. various algorithms such as graph coloring methods, constraint-based methods, population-based methods, metaheuristic methods, variable neighborhood search, hybrid and hyperheuristic approaches have been used by researchers. population-based methods consist of genetic algorithm (ga), ant-colony optimization, memetic algorithm and metaheuristic methods include tabu search (ts) and simulated annealing (sa) . in recent years, both population based and local-area based methods have been used to solve this problem. among them, ga, which is a population based method, has gained popularity because of its high optimization efficiency. also, sa has been preferred by many researchers since it can avoid getting stuck at the local optimum . ga has some certain advantages over other algorithms. it does not require complex mathematical formulation. although ga provides globally optimal solutions for complex search spaces , it needs more time to execute . several researchers have reduced this execution time by modifying genetic and heuristic operators and integrating local search (ls) techniques . by improving the configuration of several resources, time consumed to reach the optimal solution has been significantly lower compared to conventional ga . another way of approaching the ucap is to use modified ga and cooperative ga. modified ga can produce results at a shorter time and cooperative ga can reduce the cost value . quality of generated timetable increases during the initial phase of ga according to . the authors proposed to use the generated output of ga as input to another optimizing algorithm to enhance the efficiency. more recent works on ga have proposed modifications to genetic operators such as the crossover operator . another variation of ga has been introduced by that uses multiple levels of ga computation which the authors have called mdga (multi-depth ga). these levels were based on the depth of the objective function such as shallow, medium and deep. their goal was to divide the problem to smaller ones to solve the timetabling problem while reducing its time consumption.taking the good properties of local and global area based algorithms, have integrated ga with ls algorithms such as sa, ts, randomized iterative local search (rils). their target was to take advantage of the exploration ability of ga and exploitation ability of ls. the ls algorithms help ga to get out from the local optimum. additionally, fuzzy logic was implemented to check soft constraint violation of the fitness function. considering the fitness and execution time, hybrid of ga and ts have generated the best optimal solution even when the dataset was large. the hybrid of ga and sa have performed worse than the hybrid of ga and ts, but better than the hybrid of ga and rils. recently, has presented a hybrid of parallel genetic algorithm with ls. the parallel ga was used to increase the convergence speed and to diversify the population. the solution provided by ga has been improved by minimizing soft constraint violation using ls and the elitism operator. these prevent the ga from getting stuck in the local optimum and lead to better performance.this paper presents a solution to the ucap using a hybrid algorithm. section ii contains the methodology of this research. section iii discusses the implementation results and analysis of the proposed algorithm. finally, section v concludes the paper with the limitations and future scopes of this work. in this paper, the authors have used a hybrid algorithm (ha) which is a combination of local repair algorithm (lra) and modified genetic algorithm (mga). various algorithms such as graph coloring methods, constraint-based methods, population-based methods, metaheuristic methods, variable neighborhood search, hybrid and hyperheuristic approaches have been used by researchers. population-based methods consist of genetic algorithm (ga), ant-colony optimization, memetic algorithm and metaheuristic methods include tabu search (ts) and simulated annealing (sa). by improving the configuration of several resources, time consumed to reach the optimal solution has been significantly lower compared to conventional ga. another way of approaching the ucap is to use modified ga and cooperative ga. modified ga can produce results at a shorter time and cooperative ga can reduce the cost value. the authors proposed to use the generated output of ga as input to another optimizing algorithm to enhance the efficiency. another variation of ga has been introduced bythat uses multiple levels of ga computation which the authors have called mdga (multi-depth ga).taking the good properties of local and global area based algorithms,have integrated ga with ls algorithms such as sa, ts, randomized iterative local search (rils). considering the fitness and execution time, hybrid of ga and ts have generated the best optimal solution even when the dataset was large. the hybrid of ga and sa have performed worse than the hybrid of ga and ts, but better than the hybrid of ga and rils.first of all, in lra, this initial solution was assigned to the solution variable and the faculty members were allocated in the courses based on the solution individuals. the maximum score was updated if the score found in the current iteration appeared to be better than the previous maximum score and the solution was also updated.while current_iteration < total_iteration do selected ← a random individual from current_solution for f in all possible faculties of selected course do selected ← replace previous selected with f if evaluate(current_solution) > evaluate(previous_solution) then solution ← current_solution end if end for end while return solution end function next in mga, the algorithm took the intermediate solution as a parameter.to determine the efficiency of this algorithm more precisely, a comparison with some other common algorithms such as conventional population based algorithms (genetic algorithm, memetic algorithm) and metaheuristic approaches (stochastic hill climbing, simulated annealing, tabu search) was required. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/420.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/420.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2c2c408733b6126b5c5b9d4e06f8d8f70f067ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/420.txt @@ -0,0 +1 @@ +with the extraordinary ability to encode complex relationships among entities in a system, graph data are widely observed in many application domains, such as social networks , biological networks , recommender systems , transportation networks , etc. graphs normally model entities as nodes and then construct edges between node pairs to represent underlying relationships. in addition, node attributes are represented as graph signals. traditional deep feed-forward neural networks (nns) only consider the propagation of features (i.e., columns of the graph signal matrix), which leaves the connectivity among nodes unexploited.©2023 ieee. personal use of this material is permitted. permission from ieee must be obtained for all other uses, in any current or future media, including reprinting/republishing this material for advertising or promotional purposes, creating new collective works, for resale or redistribution to servers or lists, or reuse of any copyrighted component of this work in other works.to overcome this limitation, graph neural networks (gnns) are designed to additionally aggregate neighbouring node features in the direction of rows, contributing to better graph representation learning (grl) and eventually outstanding predictive performance in various tasks .framing nns as an optimization problem is a wellestablished research topic in the machine learning community . likewise, numerous recent research on gnns focuses on the optimization formulation of gnn layers or the end-to-end gnn training. some works have shown that grl can be approximated by the solution of some optimization problem with the smoothness assumption of neighbouring node representations . it has also been proven that the end-to-end training for gnns can be formulated as a bilevel optimization problem, or alternatively, a faster multi-view single-level optimization framework . in this work, we will consider the bilevel optimization formulation of gnns, in which the upper-level problem shares the same purpose as optimizing the objective function, and the lowerlevel problem conducts grl.unifying gnns as optimization problems provides a new perspective to understanding and analyzing existing methods. for example, considering gnns in node classification tasks, the smoothness assumption, which tends to homogenize the labels of connected nodes, can lead to several adverse effects such as over-smoothing and inappropriate message-passing for heterophilic graphs . specifically, the so-called over-smoothing issue appears when node features become indistinguishable after several propagations of gnn layers. this phenomenon is more evident in the graphs where connected nodes are often with the same label, known as homophily. on the other hand, with heterophilic graphs where connected nodes have different labels, the smoothing effect induced by gnns can even lead to worse classification outcomes, because the model is prone to assign similar labels to connect nodes with similar features after smoothing.the above-mentioned issues can be mitigated with the concept of "skip connection" . for example, appnp combines the original node feature with the representation learned by each layer, which effectively preserves local information and helps mitigate over-smoothing issues. such methods are also helpful with heterophilic graphs because they arxiv:2309.06645v1 12 sep 2023 mitigate the effect of smoothing in representation learning. it has been shown that designing nns as a bilevel optimization problem with penalty on the bregman distance between representations from each two consecutive layers is reminiscent of and even better than applying skip connection . this method simplifies the network architecture by employing a set of invertible activation functions. however, it has no direct extension to gnns as the problem design is limited by the feature propagation of traditional nns.in this paper, we aim to propose a novel bilevel optimization framework for gnns enlightened by the notion of bregman distance that can effectively alleviate the adverse effects of smoothing. similar to other bilevel designs, we develop the upper-level problem to optimize the overall objective function, and the lower-level problem for grl. we show that the optimization framework can be easily applied to the computational format of gnns by introducing the same set of activation functions for bregman nns , and we name such architectures as bregman gnns.the contributions of this work include (1) a novel bilevel optimization framework for designing gnns with bregman distance; (2) an alternative solution to the adverse effects of smoothing with a set of specially-designed activation functions sharing a similar purpose with skip connection; (3) solid numerical experiment results to validate the effectiveness of the new framework. z l ∈ r n×d l denotes the node feature matrix at layer l, where n is the number of nodes, and d l is the embedding size.where b l , c l ∈ r d l+1 and δ l ∈ r. z i ∈ r d l and z ∈ r d l+1 are the feature vectors of sample i at layer l and l+1, respectively. finally, the matrix m l ∈ r d l ×d l+1 is the weight matrix, and e l ∈ r d l+1 ×d l+1 is the parameter matrix presenting the feature correlation. the second term measures the closeness between the feature vectors in layer l and l + 1., relu and arctan) can be written as the inverse gradient of strongly convex legendre functions ϕ, and for some particular choice of g and ϕ, the bregman proximity7).since tr((az l m l )e l z ⊤ ) = ⟨(az l m l )e l , z⟩, eq.×d l+1 be the weight matrix, and b l ∈ r d l+1 be the bias, then eq. if z l and z l+1 share the same dimension i., n × d l , then m l ∈ r d l ×d l . w l ∈ r d l ×d l+1 represents the weights in layer l. b l ∈ r d l+1 ×d l+1 represents the biases in layer l.hence, the parameters that the model should learn are m l , w l , and b l . regarding the term ρ -1 (az l m l ) in the derivation of eq. (6), the utilization of inverse activation function for az l m l brings the feature representation of the previous layer to the present layer. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/421.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/421.txt new file mode 100644 index 0000000000000000000000000000000000000000..c403676b313e98a0ac57dd4af2d1831f12715abd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/421.txt @@ -0,0 +1 @@ +rapid advances in artificial intelligence and machine learning have led to an increase in the deployment of robots and other embodied agents in real-world, safety-critical settings - . as such, it is vital that practitioners -who may be laypeople that lack domain expertise or knowledge of machine learning -are able to query such agents for explanations regarding why a particular prediction has been madebroadly referred to as explainable ai - . while progress has been made in this area, prior works tend to focus on explaining agent behavior in terms of rules , vision-based cues , , semantic concepts , or trajectories , . however, it has been shown that laypeople benefit from natural language explanations , since they do not require specialized knowledge to understand , leverage human affinity for verbal communication, and increase trust under uncertainty .in this work, we seek to develop a framework to generate natural language explanations of an agent's behavior given only observations of states and actions. by assuming access to only behavioral observations, we are able to explain behavior produced by any agent policy, including deep neural networks. unlike prior methods which exhibit limited expressivity due to utilizing language templates , , or assume access to a large dataset of humangenerated explanations , , we propose an approach in which large language models (llms) can be used to generate free-form natural language explanations in a fewshot manner. while llms have shown considerable zeroshot task performance and are well-suited to generating 1 university of michigan ponyz@umich.edu 2 shanghai jiao tong university zhangxijia@sjtu.edu.cn 3 carnegie mellon university {yueguo, sstepput, sycara, jacampbe}@andrew.cmu.edu natural language explanations - , they are typically applied to commonsense reasoning as opposed to explaining model behavior, and are prone to hallucination -a wellknown phenomenon in which false information is presented as fact . it is an open question as to how llms can be conditioned on an agent's behavior in order to generate plausible explanations while avoiding hallucination.our solution, and core algorithmic contribution, is the introduction of a behavior representation (br), in which we distill an agent's policy into a locally interpretable model that can be directly injected into a text prompt and reasoned with, without requiring fine-tuning. a behavior representation acts as a compact representation of an agent's behavior around a specific state and indicates what features the agent considers important when making a decision. we show that by constraining an llm to reason about agent behavior in terms of a behavior representation, we are able to greatly reduce hallucination compared to alternative approaches while generating informative and plausible explanations. an additional benefit of our approach is that it enables interactive explanations; that is, the user can issue follow-up queries such as clarification or counterfactual questions. this is particularly valuable, as explanations are social interactions conditioned on a person's own beliefs and knowledge and thus, are highly individual and may require additional clarification to be comprehensible and convincing .our approach is a three-stage process (see figure 1) in which we, 1) distill an agent policy into a decision tree, 2) extract a decision path from the tree for a given state which serves as our local behavior representation, and 3) transform the decision path into a textual representation and inject it into pre-trained llm via in-context learning to produce a natural language explanation. in this work we show how our framework can be applied to multi-agent reinforcement learning (marl) policies -a particularly relevant setting given the complex dynamics and decision-making resulting from agent-agent interactions. through a series of participant studies, we show that, a) our approach generates modelagnostic explanations that are significantly preferred by laypeople over baseline methods, and are preferred at least as much as those generated by a human domain expert; b) when an agent policy is sub-optimal, participants find the ability to interact with our explanations helpful and beneficial; and c) our approach yields explanations with significantly fewer hallucinations than alternative methods of encoding agent behavior. . overview of our three-step pipeline to explain policy actions: left: a black-box policy is distilled into a decision tree; middle: a decision path is extracted from the tree for a given state which contains a set of decision rules used to derive the associated action; right: we utilize an llm to generate an easily understandable natural language explanation given the decision path. lastly, a user can ask further clarification questions in an interactive manner. unlike prior methods which exhibit limited expressivity due to utilizing language templates,,or assume access to a large dataset of humangenerated explanations,, we propose an approach in which large language models (llms) can be used to generate free-form natural language explanations in a fewshot manner. a behavior representation acts as a compact representation of an agent's behavior around a specific state and indicates what features the agent considers important when making a decision. we show that by constraining an llm to reason about agent behavior in terms of a behavior representation, we are able to greatly reduce hallucination compared to alternative approaches while generating informative and plausible explanations.our approach is a three-stage process (see figure1) in which we, 1) distill an agent policy into a decision tree, 2) extract a decision path from the tree for a given state which serves as our local behavior representation, and 3) transform the decision path into a textual representation and inject it into pre-trained llm via in-context learningto produce a natural language explanation. through a series of participant studies, we show that, a) our approach generates modelagnostic explanations that are significantly preferred by laypeople over baseline methods, and are preferred at least as much as those generated by a human domain expert; b) when an agent policy is sub-optimal, participants find the ability to interact with our explanations helpful and beneficial; and c) our approach yields explanations with significantly fewer hallucinations than alternative methods of encoding agent behavior. overview of our three-step pipeline to explain policy actions: left: a black-box policy is distilled into a decision tree; middle: a decision path is extracted from the tree for a given state which contains a set of decision rules used to derive the associated action; right: we utilize an llm to generate an easily understandable natural language explanation given the decision path.natural language explanations: outside of explaining agent behavior, natural language explanations have received considerable attention in natural language processing areas such as commonsense reasoning,and natural language inference. while recent works have investigated the usage of llms in explaining another model's behavior by reasoning directly over the latent representation, this approach has yielded limited success thus far and motivates the usage of an intermediate behavior representation in our work. our approach consists of three steps: 1) we distill the agent's policy into a decision tree, 2) we generate a behavior representation from the decision tree, and 3) we query an llm for an explanation given the behavior representation.the last step in our approach is to define a prompt that constrains the llm to reason about agent behavior with respect to a given behavior representation., state and action descriptions, b) a description of what information the behavior representation conveys, c) in-context learning examples, and d) the behavior representation and action that we wish to explain.we conduct an irb-approved human-subject study with 32 participants in which we present ten environment states and actions -5 each for the medic and engineer -to each participant alongside explanations generated by a) our proposed framework, b) an explanation generated via language templates directly from the behavior representation, and c) a human-generated explanation. we compared our explanations with those generated by an llm when the behavior representation was replaced by a list of state-action pairs randomly sampled from the agent's trajectories τ , and when the behavior representation was removed entirely. an interesting observation is that the explanations generated without a behavior representation (no br) actually produces fewer factually wrong hallucinations than a behavior representation consisting of state-action why did the medic not consider the victim in room (2, 2)?. through construction of a behavior representation, we are able to prompt an llm to reason about agent behavior in a way that produces plausible and useful explanations, enables a user to interact and issue follow-up queries, and results in a minimal number of hallucinations, as measured through two participant studies and empirical experiments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/422.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/422.txt new file mode 100644 index 0000000000000000000000000000000000000000..c427ca165a588ba3e74d45f48795c1ace9669da6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/422.txt @@ -0,0 +1 @@ +causality plays an important role in enhancing not only the prediction power of a model but also its interpretability . causal explanations are more appropriate for human understanding than purely statistical explanations . accordingly, comprehending the causal connections between the variables of a system can enhance the interpretability of interpretable machine learning (iml) methods themselves.interpretable models such as linear regression or decision trees do not, despite their name, always lend themselves to causal interpretations. to illustrate this point, consider running multilinear regression on the predictors x 1 , x 2 and outcome y within a system whose variables are causally related as depicted in the graph of figure 1. the regression coefficients β 1 and β 2 of x 1 and x 2 might yield large values, which may be (and are often in practice) interpreted as suggesting a causal relationship. however, a causal interpretation of β 1 would not be appropriate. although x 1 might provide predictive power over y , this does not imply a causal relationship, since this predictive power is due to the confounder w . consequently, intervening on x 1 would not impact the outcome y .in current model-agnostic methods, a causal interpretation is often desirable but rarely possible. in partial dependence plots (pdps) , the partial dependence of a model outcome ŷ on a variable x i coincides with the backdoor criterion formula when the conditioning set encompasses all the other covariates x j =i . consequently, there is a risk of disregarding statistical dependence or, conversely, finding spurious dependence, by conditioning on causal descendants of x i . therefore, pdps (along with the closely related individual conditional expectation (ice) lines ) generally lack a causal interpretation. similarly, when utilizing (local interpretable model-agnostic explanations) lime to evaluate the importance of a feature for an individual, a causal interpretation cannot be guaranteed. lime fits a local model around the point of interest and assesses which features, when perturbed, would cause the point to cross the decision boundary of the model. however, intervening on a feature in such a way as to cross the model's decision boundary does not guarantee an actual change in the outcome in reality. this is because the model was trained on observational data, and that feature may merely be correlated with the outcome through a confounding factor, for example, rather than having a causal effect on the outcome.in both cases just described, it is the presence of confounders, selection bias, or an incorrect direction of causality seemingly implied by the model that can lead to misleading predictions and interpretations. we need a way to select which features are causally relevant -i.e. give us control over the chosen outcome variable. information theoretical quantities such as mutual information are often used to assess the relevance of a feature with respect to a given outcome variable , but this relevance is still purely statistical. this is a common issue when using standard information theoretical quantities in situations that require consideration of the underlying causal relationships. a version of mutual information which takes into account the causal structure of the system would solve this problem. this is what we set out to develop in this work.in our research, we extend traditional conditional entropy and mutual information to the realm of interventions, as opposed to simple conditioning. this extension drew inspiration from the conceptual and philosophical work presented in 1 . we dub these constructs "causal entropy" and "causal information gain". they are designed to capture changes in the entropy of a given variable in re- 1 the reader is referred to section 6 for a detailed discussion about this.sponse to manipulations affecting other variables. we derive fundamental results connecting these quantities to the presence of causal effect. we end by illustrating the use of causal information gain in selecting a variable which allows us to control an outcome variable, and contrast it with standard mutual information.the novelty of our work consists of providing rigorous definitions for causal entropy and causal information gain, as well as deriving some of their key properties for the first time. these contributions set the foundations for the development of methods which correctly identify features which provide causal control over an outcome variable.this paper is organized as follows. in section 2, we introduce the definitions of quantities from the fields of causal inference and information theory that will be used throughout the rest of the paper. section 3 includes a simple example of a structural causal model where standard entropy and mutual information are inadequate for obtaining the desired causal insights. in section 4, we define causal entropy and explore its relation to total effect. section 5 discusses the definition of causal information gain and investigates its connection with causal effect. furthermore, it revisits the example from section 3, showing that causal entropy and causal information gain allow us to arrive at the correct conclusions about causal control. in section 6, we compare the definitions and results presented in this paper with those of previous work. finally, in section 7, we discuss the obtained results and propose future research directions.one can model the causal structure of a system by means of a "structural causal model", which can be seen as a bayesian networkwhose graph g has a causal interpretation and each conditional probability distribution (cpd) p (x i | pa xi ) of the bayesian network stems from a deterministic function f xi (called "structural assignment") of the parents of x i . , n xn ) whose variables we call exogenous or noise. a set s of n structural assignments f xi for x i from (pa xi , n xi ), where pa xi ⊆ x are called parents of x i . furthermore, we write x := f x (x, n x ) to mean that f x (x, n x ) is a structural assignment for x. its entailed distribution p c x is the unique joint distribution over x such that ∀x i ∈ x, x i = f xi (pa xi , n xi ). , x n ). formally 2: xi=x) , where c do(xi=x) is the scm that differs from c only in that the structural assignment f xi (pa xi , n xi ) is replaced by the structural assignment fxi ( ñxi ) = ñxi , where ñxi is a random variable with range r xi and3p ñx i (x i ) = 1 x (x i ) for all x i ∈ r xi . additionally, we introduce a discrete variable x 1 to represent the number of individuals wearing shorts, which can be categorized as few (x 1 = 0), some (x 1 = 1), or many (x 1 = 2). it will be the average uncertainty one has about y if one sets x to x with probability p x ′ (x), where x ′ is a new auxiliary variable with the same range as x but independent of all other variables, including x. let y , x and x ′ be random variables such that x and x ′ have the same range and x ′ is independent of all variables in c.if there is a total causal effect of x on y , there cannot be a total causal effect of y on x (if x is a cause of y , y cannot be a cause of x). this view of mutual information allows for a straightforward analogous definition in the causal case, so that one can take causal information gain i c (y | do(x ∼ x ′ )) to signify the average reduction in uncertainty about y if one sets x to x with probability p x ′ (x). let y , x and x ′ be random variables such that x ′ is an intervention protocol for x. the causal information gain i c (y | do(x ∼ x ′ )) of y for x given the intervention protocol x ′ is the difference between the entropy of y w. and since both post-intervention distributions have the same entropy h y ∼bern(q) (y ) = h y ∼bern(1-q) (y ), then the causal entropy will also be h c (y | x ∼ x ′ ) = h y ∼bern(1-q) (y ) = h(y ) (for any chosen of x ′ ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/423.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/423.txt new file mode 100644 index 0000000000000000000000000000000000000000..012ccc2d98fc84237607e47a19541627fffd8d40 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/423.txt @@ -0,0 +1 @@ +cubesats have transformed the space industry, providing a cost-effective and efficient way to conduct diverse space missions, from scientific observations to advanced communications . a rising focus is on equipping spacecraft with advanced autonomous decision-making capabilities . achieving this relies on using automated planning tools to reduce human involvement and effectively handle complex and uncertain environments. implementing on-board planning mechanisms in spacecraft missions brings substantial benefits, including increased spacecraft availability, heightened reliability, and reduced ground segment operational costs. however, despite their potential, cubesats face significant task scheduling challenges in distributed systems due to processing limitations . efficient energy management is a primary concern, given their reliance on limited solar panel-derived energy. ensuring they operate within these constraints while maintaining high reliability in space underscores the importance of fault tolerance in satellite operations .in cubesat operations, the criticality of energy management is accentuated by their inherent power limitations . the complexity of the energy consumption issue is compounded by task-dependent variability, especially in observation missions with sophisticated sensor payloads like high-resolution cameras, adaptive sampling, and data transmission. solving planning problems in this context typically involves a constrained optimization . however, the inherent uncertainties and complexities of space environments, combined with task variability and unpredictability, often surpass the capabilities of traditional tools .one promising solution gaining attention involves applying artificial intelligence to dynamic task scheduling . artificial intelligence benefits from declining computational costs, abundant data, and advanced algorithms, with deep learning (dl) and reinforcement learning (rl) playing pivotal roles . rl, a subset of machine learning (ml), focuses on training agents to make sequential decisions by interacting with an environment to maximize cumulative rewards .rl has emerged as a crucial paradigm in ml with diverse applications and untapped potential across various domains. one promising application is dynamic task scheduling, where rl algorithms offer significant advantages. the repetitive nature of scheduling decisions aligns well with the dataintensive training methods of rl . moreover, rl's unique feature is its ability to adaptively make decisions in real-time without requiring a comprehensive environmental model .in the literature, the satellite task allocation field's primary focus is on earth observation (eo) missions, which present a classic example of a challenging multi-objective combinatorial problem . this complexity makes them suitable candidates for solutions using deep reinforcement learning (drl) methods. huang, et al. formulate eo task scheduling problems by introducing the concepts of visible window (vw) and observation window (ow). the decision variables in this context typically involve continuous mahya ramezani, m. amin alandihallaj, jose luis sanchez-lopez, and andreas hein specifically, it determines task acceptance without specifying ows for task execution and it assumes complete knowledge of the resource requirements for each task, rendering it impractical for realworld implementation.in the method, an encoder first prioritized tasks, emphasizing critical attributes that influence the success rate in task scheduling and energy consumption like task duration, spatial constraints, memory, and computational requirements. by employing similarityattention-based mechanisms and referencing data from prior task executions stored in the high-level reinforcement learning replay experience, this estimator predicts the energy consumption for each task based on each task id.the encoder is designed based on the attention-based mechanism by focusing on the failure and similarity of past tasks for feature extraction and task prioritization to assist the reinforcement learning algorithm and the energy consumption estimator in making more accurate decisions.2) task classification a task's complexity score incorporates computational demands 𝐶 𝑑 , similar task historical failure in estimation energy consumption 𝐻 𝑓 , and duration 𝑑.to accurately predict energy consumption before task execution, our methodology leverages an mlp network, informed by outputs from the encoder and the high-level experience replay including actual energy consumption of previous tasks.to address the cubesat task scheduling challenge, we propose a hierrl framework with two primary layers: a highlevel policy for global task distribution based on broader constraints, and a low-level policy for real-time monitoring and system adjustment.the state of the system consists of the state of the cubesat includes remind storage and computational resource, remind energy levels of each cubesat, the temperature of each cubesat, cubesats' orientation, required time for each task 𝑡 𝑟 , and the queue of tasks waiting for execution (𝑃 𝑠 , computational requirement, ow, location, status of processed and estimated energy consumption from estimator).actions include assigning and skipping a task, assigning a new task from the task queue to a specific cubesat and skip task, deciding not to assign a task to any cubesat, and keeping it in the queue for later assignment. given 𝐸 𝑐 𝑖 (𝑡) = 𝐸 𝑒 𝑖 𝐸 max be representing the normalized energy consumed by 𝐶 𝑖 for the task, where 𝐸 𝑒 𝑖 is energy consumed, and 𝐸 max is the maximum energy capacity of the cubesat, when 𝐸 𝑐 𝑖 > 1, the penalty is quantified as 𝑅 𝑃 𝑖,1 = 𝛽(𝐸 𝑐 𝑖 (𝑡) -1). moreover, the action space consists of two primary actions: keeping the task (𝑎 = 0) and reallocating the task (𝑎 = 1), which involves returning it to the global task queue for potential reassignment to another cubesat.for the energy-based component of the reward, when the remaining energy surpasses the estimated energy requirement for the task a positive reward is conferred if the decision is to keep the task.the low-level task reassignment is simulated when a cubesat either failed or is deemed unsuitable for task execution.to ascertain the superiority of our proposed hierrl methodology, we compared it with the task scheduling algorithm using maddpg and random scheduling, assessing metrics like task completion rate and adaptability.3highlights the average task success count achieved by the three methodologies (hierrl, random task scheduling, and maddpg) under examination. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/424.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/424.txt new file mode 100644 index 0000000000000000000000000000000000000000..d50773ecd625f45faef62052178a9dcdbe3083dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/424.txt @@ -0,0 +1 @@ +to address privacy concerns in distributed learning, federated learning (fl) has emerged as a viable solution, enabling multiple local clients to collaboratively train a model while retaining their private data and without sharing it . however, in real-world scenarios, data across different devices is not identically and independently distributed (non-iid), which poses challenges in model training and convergence .significant efforts have been made to improve performance and analyze convergence in non-iid fl , but few have provided theoretical guarantees by establishing generalization bounds. most existing fl generalization analyses rely on the probably approximately correct (pac) bayesian theory, first formulated by mcallester . building on the mcallester's bound, these analyses typically compute local bounds or apply existing pac-bayesian bounds directly, overlooking the non-iid nature of fl. this approach is flawed, as the pac-bayesian framework assumes that each data point is iid, ignoring non-iid data and directly employing the pac-bayesian theory, which potentially results in inaccurate or overly relaxed bounds. consequently, techniques developed for the pac-bayesian framework are not directly applicable to non-iid fl. therefore, this work aims to advance the theoretical underpinnings of non-iid fl.related works. the pac-bayesian framework has been extensively researched in recent years , yielding tighter and non-vacuous bounds. however, there has been limited exploration in the context of fl. some studies have proposed information theoretic-based pac-bayesian bounds using ratedistortion theory to prove generalization bounds , providing an information-theoretic perspective on enhancing generalization capacity. others have followed mcallester's approach, attempting to directly apply the fl paradigm to the bound. for example, the authors in to address privacy concerns in distributed learning, federated learning (fl) has emerged as a viable solution, enabling multiple local clients to collaboratively train a model while retaining their private data and without sharing it. building on the mcallester's bound, these analyses typically compute local bounds or apply existing pac-bayesian bounds directly, overlooking the non-iid nature of fl. this approach is flawed, as the pac-bayesian framework assumes that each data point is iid, ignoring non-iid data and directly employing the pac-bayesian theory, which potentially results in inaccurate or overly relaxed bounds.mcallester's bound in a multi-step fl scenario; omni-fedgeused the pac-bayesian learning framework to construct a weighted sum objective function with a penalty, considering only a local client bound instead of the entire system, which precludes obtaining global information; and fedpacemployed pac learning to balance utility, privacy, and efficiency in fl. first, we derive a federated pac-bayesian learning bound for non-iid local data, providing a unified perspective on federated learning paradigms. moreover, let ℓ : z × w → r + be a given loss function and h k ∈ h is a stochastic estimator on client k where h is the hypothesis class. in the pac-bayesian framework, each client holds a tailored prior distribution p k . the objective of each client is to furnish a posterior distribution q k ∈ m, where m denotes the set of distributions over h., where p and q are the global prior and posterior, respectively, and the averaging weight p = (p(1), . note that for each client k ∈ , p i is independent of s 1 , . since the prior p t+1 k = q t k and q t k is equal to the aggregated global posterior at epoch t -1, the prior can be viewed as the global knowledge. in particular, the prior p k and posterior q k are defined as follows:.calculating the gradients ∇ µ q k ,i and ∇ µ q k ,i directly can be intricate, but the re-parameterization trick is capable of tackling this issue. specifically, the aggregation weight p(k) is defined as the sample ratio of client k relative to the entire data size across all clients.1, 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/425.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/425.txt new file mode 100644 index 0000000000000000000000000000000000000000..54401f3c2c26ae675773c847022b4d9aa5f6eda1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/425.txt @@ -0,0 +1 @@ +the covid-19 pandemic has emerged as a global health crisis, affecting millions of individuals worldwide. while the disease primarily presents as a respiratory infection, accumulating evidence suggests that covid-19 can also have significant neurological implications in both the acute and post-recovery phases. neurological sequelae in post-covid-19 patients have become a subject of growing concern among healthcare professionals and researchers alike . covid-19 is caused by the severe acute respiratory syndrome coronavirus 2 (sars-cov-2) and was first reported in december 2019 in wuhan, china . since then, the virus has rapidly spread across the globe, leading to widespread morbidity and mortality . initially, the focus was on the respiratory manifestations of the disease, such as pneumonia and acute respiratory distress syndrome (ards). however, as the pandemic evolved, numerous reports of neurological complications in covid-19 patients surfaced . neurological symptoms associated with covid-19 encompass a broad spectrum, ranging from mild symptoms like headache and anosmia to more severe manifestations, including encephalopathy, stroke, and guillain-barré syndrome . these neurological complications may occur during the acute phase of the infection or persist during the post-recovery phase, leading to what is now commonly referred to as post-covid-19 syndrome or long covid . the long-term neurological sequelae in post-covid-19 patients pose unique challenges to healthcare providers. there is a pressing need to understand the underlying mechanisms responsible for these neurological manifestations and to identify potential risk factors that may predict the development of neurological complications. additionally, early recognition of neurological sequelae is crucial for timely intervention and the implementation of appropriate treatment strategies . given the complexity of covid-19 and its diverse neurological implications, this study seeks to explore the long-term neurological sequelae in a cohort of post-covid-19 patients. the primary objective is to utilize a machine learning approach to predict neurological outcomes based on a wide array of clinical data and neuroimaging parameters. by identifying predictive factors, this research aims to contribute to the development of personalized treatment plans and improved management of post-covid-19 neurological complications . in the pursuit of this research, we draw upon a substantial body of previous studies conducted by our team and other researchers. notably, studies such as yousif et al. investigated hematological changes in covid-19 patients, providing insights into the systemic effects of the virus. additionally, hadi et al. explored the role of inflammatory pathways in conditions such as atherosclerosis, shedding light on potential mechanisms relevant to our study. other research, such as hasan et al. and yousif et al. , delved into microbiological and genetic aspects that might intersect with neurological manifestations in covid-19. furthermore, our team has undertaken research into the interaction between covid-19 and various health conditions. sadiq et al. investigated the effect of anesthesia on maternal and neonatal health during cesarean section, an area that touches upon the broader impact of covid-19 on healthcare systems. yousif explored the potential role of cytomegalovirus as a risk factor for breast cancer, demonstrating our commitment to understanding the multifaceted aspects of viral infections. moreover, our research extends to areas of immunology and oncology. yousif et al. delved into the association between natural killer cell cytotoxicity and non-small cell lung cancer progression, highlighting the relevance of immune responses in viral infections and cancer. sadiq et al. examined the correlation between c-reactive protein levels and preeclampsia with or without intrauterine growth restriction, indicating our dedication to investigating clinical markers in complex health conditions. our studies also include microbiological investigations. yousif et al. conducted phylogenetic characterization of staphylococcus aureus isolated from women with breast abscesses, showcasing our expertise in microbiological research. mohammad et al. explored the effect of caffeic acid on doxorubicin-induced cardiotoxicity, which demonstrates our commitment to studying therapeutic interventions in the context of viral infections. finally, our recent investigations, such as those by al-jibouri et al., sadiq et al. , and sahai et al. , have touched upon the psycho-immunological status of recovered covid-19 patients, the impact of hematological parameters on pregnancy outcomes among pregnant women with covid-19, and the application of machine learning in predicting insurance risk, respectively. these studies underscore our dedication to comprehensively understanding the implications of covid-19 and related factors . by building upon this body of research and leveraging our expertise, we aim to contribute valuable insights into the long-term neurological consequences of covid-19, ultimately benefiting patient care and healthcare strategies in the face of this ongoing global health challenge. while the disease primarily presents as a respiratory infection, accumulating evidence suggests that covid-19 can also have significant neurological implications in both the acute and post-recovery phases. neurological sequelae in post-covid-19 patients have become a subject of growing concern among healthcare professionals and researchers alike. however, as the pandemic evolved, numerous reports of neurological complications in covid-19 patients surfaced. neurological symptoms associated with covid-19 encompass a broad spectrum, ranging from mild symptoms like headache and anosmia to more severe manifestations, including encephalopathy, stroke, and guillain-barré syndrome. these neurological complications may occur during the acute phase of the infection or persist during the post-recovery phase, leading to what is now commonly referred to as post-covid-19 syndrome or long covid. the long-term neurological sequelae in post-covid-19 patients pose unique challenges to healthcare providers. there is a pressing need to understand the underlying mechanisms responsible for these neurological manifestations and to identify potential risk factors that may predict the development of neurological complications. given the complexity of covid-19 and its diverse neurological implications, this study seeks to explore the long-term neurological sequelae in a cohort of post-covid-19 patients. by identifying predictive factors, this research aims to contribute to the development of personalized treatment plans and improved management of post-covid-19 neurological complications.and yousif et al. sadiq et al., sadiq et al., have touched upon the psycho-immunological status of recovered covid-19 patients, the impact of hematological parameters on pregnancy outcomes among pregnant women with covid-19, and the application of machine learning in predicting insurance risk, respectively.outcome prediction and evaluation: machine learning models will be utilized to predict long-term neurological sequelae in post-covid-19 patients. future prospective studies with larger sample sizes and diverse populations are warranted to validate the predictive models and explore additional factors contributing to neurological complications in post-covid-19 patients. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/426.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/426.txt new file mode 100644 index 0000000000000000000000000000000000000000..36aca2c6672a21afc2d6abd0bd1f3b6c4a85eb01 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/426.txt @@ -0,0 +1 @@ + a tca can transform data with modal clusters into data with a uni-modal distribution if the activation function approximates the cumulative distribution of the input data. subject to mild constraints, any fixed dimension-reducing transformation, y = t (x), together with the known or assumed feature distribution g(y), corresponds to a probability density function (pdf) on the input data-given by g(x) = p0,x(x) p0,x(y) g(y), where p 0,x (x) is a prior distribution and p 0,x (y) is its mapping to y. in the notation "g" represents the given feature distribution, "g" is its projection to the higher-dimensional range of x, and subscript "0" is a reminder that p 0,x (x) can be seen as a a reference distribution. therefore p 0,x (y) has region of support on the range of y, but is derived from the reference distribution on x. note that if p 0,x (x) is selected for maximum entropy, then g(x) is unique.to draw a sample of g(x), we first draw a sample y from g(y), then draw x randomly from the set {x : t (x) = y}, weighted by p 0,x (x). note that p 0,x (y) and p 0,y (y) are two different distributions with support on the range of y. while p 0,x (y) = t 1 , p 0,y (y) is a canonical maxent prior distribution for y. in any given layer of a ffnn, the hidden variable y ∈ r m is computed from the layer input x ∈ r n , where we assume n > m , i. as estimate of x, we use the conditional mean x = e 0 (x|z), i. the conditional mean under prior p 0,x (x). the solution to (1) is not guaranteed to exist unless z = w ′ x for some x in the range of x, denoted by x n. to use tcas in the initial stacked rbm, the tcas are initialized to a neutral state so that operates similar to a simple activation function (the base activation funcion, see section ii-c), and simple activation functions are used in the return (synthesis) path.where the functions f k (x) are simple activation functions, and w = {w k }, a = {a k } and b = {b k } are scale, weights, and bias parameters, respectively. as a convention, all activation functions except the first activation function f 1 (x) are sigmoid-like activations (see ted activation in tablei). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/427.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/427.txt new file mode 100644 index 0000000000000000000000000000000000000000..39d22c115c5474a605aef00f702cbf5e7d624aca --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/427.txt @@ -0,0 +1 @@ +physics models encoded in partial differential equations (pde) are crucial in the understanding of many natural phenomena such as sound, heat, elasticity, uid dynamics, quantum mechanics etc. learning these physics models from data is essential to enhance our understanding of the world, accelerate scientic discovery, and design new technologies. deep neural net-works (e.g., physics-informed neural nets raissi et al. , hamiltonian neural nets greydanus et al. ) have been successfully deployed in this domain. in these approaches, the spatial and temporal updates of a pde model are matched with ground truth experimental observations. a loss function is dened based on the mismatch between the simulation and the ground truth, and then the physics model is learned by back-propagation of error gradients of the loss function xue et al. , raissi et al. . nevertheless, such learning processes are expensive because of the need to back-propagate gradients over spatial and temporal simulations involving millions of mutually interacting elements.one line of successful approaches to accelerate the learning of pde models exploits the sparsity nature of system changes over time. for example, during the microstructure evolution of many engineering materials, only the boundary of the microstructure changes while large portion of the system remains unchanged. it is also assumed that the corresponding pde models can be decomposed into afne function of parameter functions and feature functions nasim et al. , sima and xue . the combination of decomposablity of the pde model and sparse changes/updates over time together create opportunities for efcient algorithms which handle learning in compressed spaces using random projections and/or locality sensitive hashing. nevertheless, such decomposablity structure applies to a limited class of pdes and sparsity structures may change with varying initial and boundary conditions (bc/ic).this paper propose a more general approach for efciently learning pde models via random projection, by exploiting sparsity in both value domain and frequency domain, and also approximating nondecomposable functions with decomposable polynomials. we observe that, systems modeled by pdes often have slow and gradual updates across wide regions in addition to a few rapid changes concentrated in small "interfacial" regions. such systems are frequently found in the real world. for example, during manufacturing processes such as laser sintering of powder materials into dense solids, grain boundary changes sharply at the interface area (sparse local change), while temperature rises gradually around the whole material (dense global change).systems with dense global change and sharp inter-face change limit the application of existing approaches nasim et al. , sima and xue for efciently learning relevant pde models. however, we observe that these temporal change signals can again become sparse if they are decomposed in value and frequency domains. the fourier uncertainty principle folland and sitaram implies that a signal sparse in the value domain should be dense in the frequency domain and vice versa. as a result, this decomposition can capture the sparse side of signals, whether they are in the value or the frequency domain.we propose random projection based efcient learning (reel), a general approach to expedite the learning of pdes, using signal decomposition into value and frequency domains, polynomial approximation with taylor series, and compression via random projection. the key innovation of reel is the inclusion of a signal decomposition step in the pde learning framework. with this step, we convert dense value domain system updates into sparse signal components in the value and frequency domains. we also use polynomial approximation with taylor series to approximate pde models, which otherwise cannot be written in the decomposable form of parameter functions and feature functions. an example is the phase eld model of sintering of powder compacts zhang and liao . after decomposition, the sparse signal components in the value and frequency domains are compressed to smaller dimensions by random projection. the learning of pde models is then carried out in the compressed space. notice that both the signal decomposition and compression steps are carried out once as a preprocessing step and in parallel, thus adding little computation overhead. an overview of reel is shown in figure 1.theoretically, we show the sparse projection into the value and frequency domains biases learning in a limited way. we derive a constant factor approximation bound between the projected loss function and the original one with poly-logarithmic number of projected dimensions. experimentally, we evaluate our approaches in several real-world problems. the rst is laser sintering of materials, which involves both grain boundary and temperature changes. a second application is nanovoid defect evolution in materials under irradiation and high temperature, in which both void surface movement and the emergence of interstitial and vacancy densities are considered. we demonstrate that using our reel algorithm leads to 70 -98% reduction in training times when the data is compressed to 1% of its original size and the learned models' performance are comparable to baseline.our contributions can be summarized as follows : 1) we propose an efcient method to learn pdes that have both sparse and dense feature functions, 2) we extend the applicability of random projection on sparse functions to both sparse and dense functions, using an appropriate decomposition of representation into both the value and frequency domains, 3) we extend the applicability of random projection for learning pde models that are not readily decomposable, by using taylor series approximation, and 4) we show empirical evidence that our learning method reel can greatly accelerate the current state of pde model learning.this paper propose a more general approach for efciently learning pde models via random projection, by exploiting sparsity in both value domain and frequency domain, and also approximating nondecomposable functions with decomposable polynomials.we propose random projection based efcient learning (reel), a general approach to expedite the learning of pdes, using signal decomposition into value and frequency domains, polynomial approximation with taylor series, and compression via random projection.our contributions can be summarized as follows : 1) we propose an efcient method to learn pdes that have both sparse and dense feature functions, 2) we extend the applicability of random projection on sparse functions to both sparse and dense functions, using an appropriate decomposition of representation into both the value and frequency domains, 3) we extend the applicability of random projection for learning pde models that are not readily decomposable, by using taylor series approximation, and 4) we show empirical evidence that our learning method reel can greatly accelerate the current state of pde model learning. ∂u ∂t in equation1) can be formulated as combination of sparse feature functions of system state variables u, multiplied by functions of learnable pde model parameters .here, u is the system state variable of interest, w i are feature functions of u, often sparse in value domain, and independent of pde model parameters . the ground truth system state change ∆u gt is also compressed to p ∆u gt by onetime random projection with p . second, for decomposed pde models, the system change ∆u and the feature functions w i in equation 3 may not be sparse in value domains; however, a change of representation domains, i.using these two techniques of polynomial approximation and signal decomposition with fourier transform, our reel algorithm transforms pde models into decomposable pdes with sparse value and sparse frequency domain feature functions, and then use random projection to compress the sparse signals. we use value and frequency domain signal decomposition as outlined in algorithm 1 to convert dense ∆u gt (t) signals into combination of sparse value and sparse frequency signals.after value and frequency domain decomposition, ∆u gt (t) is separated into sparse frequency component ∆u gt f req (t) and sparse value component ∆u gt val (t).after signal decomposition into sparse value and sparse frequency domain components, we use one-time random projection with random matrix p to compress all the sparse signal components for both system state change (yielding p ∆u gt val , p ∆u gt f req ) and feature functions (yielding p w i(val) , p w i(f req) ). the rst part inside the summation penalizes the difference between the predicted compressed value domain change and ground truth compressed value domain change, while the second part penalizes the difference between the frequency domain counterparts. after value and frequency domain decomposition as outlined in algorithm 1, let ∆u gt val (t) and ∆u val (t) have at most k 1 non-zero elements, and all ∆u gt f req (t) and ∆u f req (t) have at most k 2 non-zero elements.2 implies that random projection in value and frequency domain has limited effect on learning provided that the signals are sufciently sparse after value and frequency domain decomposition, and we only require poly-logarithmic number of projected dimensions for constant factor approximation. after value and frequency domain decomposition as outlined in algorithm 1, let ∆u gt val (t) and ∆u val (t) have at most k 1 non-zero elements, and all ∆u gt f req (t) and ∆u f req (t) have at most k 2 nonzero elements. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/428.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/428.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c343c0c3863d55197f4df3c04672ab8b8c893cd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/428.txt @@ -0,0 +1 @@ +neural networks achieved outstanding performance in many signal and image processing tasks, especially with the advent of the deep learning paradigm , . despite their many successful applications, traditional neural networks are theoretically designed to process real-valued or, at most, complex-valued data. accordingly, signals and images are represented by (possibly multidimensional) arrays of real or complex numbers , , . furthermore, traditional neural networks a priori do not consider possible intercorrelation between feature channels. the relationship between features is expected to be learned from the training data. consequently, besides relying on appropriate loss functions and effective optimizers, traditional deep learning models usually have too many parameters and demand a long training time.in contrast, vector-valued neural networks (v-nets) are designed to process arrays of vectors. they naturally take into account the intercorrelation between feature channels. hence, v-nets are expected to have fewer parameters than traditional neural networks. furthermore, they should be less susceptible to being trapped in a local minimum of the loss function surface during the training. hypercomplex-valued neural networks are examples of robust and lightweight v-nets for dealing with vector-valued data , , , .this paper aims to present a detailed framework for v-nets, making plain and understandable their relationship with traditional networks and hypercomplex-valued neural networks. precisely, we first present the mathematical background for vector-valued neural networks. then, we address the relationship between real and hypercomplex-valued neural networks, focusing on dense and convolutional layers.on the one hand, hypercomplex-valued neural networks are regarded as vector-valued models with additional algebraic properties. on the other hand, v-nets can be viewed as traditional neural networks with restrictions to take into account the intercorrelation between the feature channels. using these relationships, we show how to emulate vector-valued (and hypercomplex-valued) neural networks using traditional models, allowing us to implement them using current deep-learning libraries.the paper is structured as follows. section ii provides the mathematical background for v-nets, including hypercomplex algebras and examples , , . basic vector-valued matrix operations and their relationship with traditional linear algebra are briefly reviewed in section iii. section iv introduces v-nets, with a focus on dense and convolutional layers. this section also addresses the approximation capability of shallow dense networks and explains how to implement v-nets using the current deeplearning libraries designed for real-valued data. the paper finishes with concluding remarks in section v.as a bilinear operation, the multiplication of x, y ∈ v, denoted by the juxtaposition xy, satisfies (x + y)z = xz + yz and z(x + y) = zx + zy, ∀x, y, z ∈ v, and α(xy) = (αx)y = x(αy), ∀α ∈ f and x, y ∈ v. in computational applications, x ∈ v is given by its coordinates relative to the ordered basis e = {e 1 , . , e n }, the algebra is commutative if and only if e i e j = e j e i , ∀i, j = 1, .thus, the algebra is associative if and only if (e i e j )e k = e i (e j e k ), ∀i, j, k = 1, . using the distributive law and the multiplication table, the product of x = n i=1 x i e i and y = n j=1 y j e j satisfies xy = n i=1. the multiplication of x = n i=1 x i e i and y = n j=1 y j e j satisfies.where b k : v × v → r is a bilinear form whose matrix representation in the ordered basis e is.yields a linear operator a l : v → v defined by a l (x) = ax, for all x ∈ v.in words, m l : v → r n×n maps a vector a ∈ v to its matrix representation in the multiplication by the left with respect to the ordered basis e.note that m l (x) = x 0 p 0: + x 1 p 1: + x 2 p 2: + x n p n: , where p 0: = i 4×4 is the identity matrix and.as in the traditional matrix algebra, the product of two vector-valued matrices a ∈ v m ×l and b ∈ v l×n results in a new matrix c ∈ v m ×n with entries defined by. , x n ∈ v are the vector-valued inputs, w ij ∈ v represents the synaptic weighted from input j to neuron i, b i ∈ v denotes the bias, and ψ : v → v is a vector-valued activation function. consider a finite-dimensional non-degenerate algebra v, let ψ : v → v be the split activation function derived from ψ r , and let k ⊂ v n be a compact set. precisely, using the isomorphism ϕ given by (10), we consider ϕ(x) ∈ r nn and ϕ(y) ∈ r nm instead of x ∈ v n and y ∈ v m , respectively.where ϕ(x) ∈ r nn is a real-valued input vector, m l (w ) ∈ r nm ×nn is a real-valued synaptic weight matrix, ϕ(b) ∈ r nm is a real-valued bias vector, and ϕ(y) ∈ r nm is the real-valued output. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/429.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/429.txt new file mode 100644 index 0000000000000000000000000000000000000000..47ed8e382ab6c2d75c7c999463ca95f3d8f8b47f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/429.txt @@ -0,0 +1 @@ +effective data representation is vital in various machine learning tasks as it captures the underlying structure and context of the data . it enables accurate modeling and decision-making . embeddings, as low-dimensional vector representations, have gained prominence for data representation due to their ability to capture meaningful relationships and semantic information in the data .while embeddings are widely used, assessing their quality and capacity to preserve structural and contextual information is challenging. existing evaluation methods often focus on specific aspects and fail to provide a comprehensive assessment of representation capacity. current evaluation methods may lack holistic evaluation criteria, focusing on individual tasks or aspects of embeddings. this leads to a limited understanding of their overall effectiveness. knowing the strengths and limitations of embeddings is crucial for selecting appropriate models, improving performance, and avoiding unintended biases or inaccuracies in downstream applications . the quality of embeddings directly affects the performance and reliability of machine learning systems, making it essential to have reliable metrics to assess representation capacity .the primary objective of this research is to develop a robust and comprehensive evaluation framework to measure the representation capacity of embeddings. the proposed framework combines extrinsic evaluation methods, such as classification and clustering, with neighborhood analysis and trustworthiness using t-sne to provide a holistic assessment. by integrating classification and clustering tasks with t-sne-based neighborhood analysis and trustworthiness, we capture both functional and structural aspects of the embeddings. prior research has proposed various evaluation techniques, including intrinsic evaluation measures like word similarity and analogy tasks , as well as extrinsic evaluation through downstream tasks . however, current evaluation methods often lack a comprehensive evaluation framework that combines multiple metrics and fails to address the full representation capacity of embeddings. the proposed method overcomes the limitations of existing approaches by providing a comprehensive assessment of representation capacity, considering both functional and structural aspects.an alternative approach to embedding design in sequence classification is the utilization of a kernel (gram) matrix. kernel-based machine learning classifiers, such as support vector machines (svm) , can leverage kernel matrices for effective classification. these methods have shown promising results compared to feature engineering-based techniques . in this approach, the kernel matrix is computed by evaluating the similarity (kernel) values between sequences based on the number of matches and mismatches between k-mers . the resulting kernel matrix can be employed not only with kernel-based classifiers like svm but also with non-kernel-based classifiers such as decision trees using kernel principal component analysis (pca) . however, the kernel-based approach faces two main challenges:-computation of pairwise sequence similarity: computing the pairwise sequence similarity required for constructing the kernel matrix can be computationally expensive. as the number of sequences increases, the computational cost grows significantly, limiting the scalability of the kernel-based method. -memory storage of large kernel matrices: storing a kernel matrix of dimensions n × n, where n represents the number of sequences, can be challenging, especially when n is very large. the memory requirements for such matrices can become prohibitive, making it difficult to scale the kernelbased method to handle a large number of sequences effectively.due to the problems discussed above for kernel-based methods, feature engineering and deep learning-based methods for embedding design are more popular among researchers. for this purpose, we only focus on those types of embeddings in this paper. the proposed evaluation framework incorporates classification tasks to assess the discriminative power of embeddings and clustering analysis to evaluate their ability to capture inherent data clusters. the neighborhood structures of embeddings in high-dimensional and low-dimensional spaces are compared using t-sne to evaluate the preservation of local relationships. the bayesian optimization approach is employed to optimize the weights assigned to different evaluation metrics including classification, clustering, neighborhood agreement, and trustworthiness, ensuring a balanced assessment of representation capacity. the proposed method offers a comprehensive and holistic understanding of embedding quality. the incorporation of an optimization approach, which allows for the automatic selection of weights, ensures an objective and data-driven approach to measuring the importance of different evaluation metrics. the proposed method equips researchers and practitioners with a robust and quantifiable measure to assess the effectiveness of embeddings in preserving structural and contextual information, enabling informed decision-making in selecting appropriate embedding models. our contributions to this paper are summarized as follows:1. development of a comprehensive evaluation framework: we propose a novel evaluation framework to measure the representation capacity of embeddings. unlike existing methods that focus on specific aspects, our framework integrates classification, clustering, t-sne-based neighborhood analysis, and trustworthiness to provide a holistic assessment. this comprehensive approach enables a thorough understanding of the effectiveness of embeddings in preserving structural and contextual information. however, current evaluation methods often lack a comprehensive evaluation framework that combines multiple metrics and fails to address the full representation capacity of embeddings. the bayesian optimization approach is employed to optimize the weights assigned to different evaluation metrics including classification, clustering, neighborhood agreement, and trustworthiness, ensuring a balanced assessment of representation capacity. this approach automatically selects optimal weights for different evaluation metrics, including classification, clustering, neighborhood agreement, and trustworthiness. by analyzing the representation capacity of four embedding methods from the literature (spike2vec, spaced k-mers, pwm2vec, and autoencoder), we demonstrate the practicality and effectiveness of our approach in assessing different embedding models. by combining multiple evaluation metrics, including classification, clustering, and neighborhood analysis, the proposed framework captures both the discriminative power and the preservation of structural and contextual information. our approach combines classification and clustering evaluation with neighborhood analysis using t-sne and leverages bayesian optimization approach for weight optimization. if we break down the performance for different metrics, the spike2vec embedding shows the best classification accuracy, autoencoder shows the best clustering performance as well as the best trustworthiness value. from the "optimal weights (performance values)", we can observe that classification, clustering, and trustworthiness got almost equal weight based on the bayesian optimization. because of this reason, despite pwm2vec showing the best performance for neighborhood agreement, its representation capacity score is the lowest among all embedding methods (because of the 0. if we break down the performance for different metrics, the spaced k-mers embedding shows the best classification accuracy (with an optimal weight of 0.3136). autoencoder shows the best neighborhood agreement performance while pwm2vec shows the highest performance in the case of trustworthiness. again, from the "optimal weights (performance values)", we can observe that classification, clustering, and trustworthiness got almost equal weight based on the bayesian optimization. the results for the protein subcellular dataset are reported in table3. in terms of representation capacity, we can observe that spaced kmers-based embedding achieves the highest performance despite achieving the best individual performance for only classification metric. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/43.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/43.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc1f3290ed7971a99882f4b0d6492c9b738be4aa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/43.txt @@ -0,0 +1 @@ +when working on optimization problems, it is often difficult to pick one single objective to optimize: in most real applications different parties care about many different objectives at the same time. for example, consider the classic setting when we are given n clients and m possible facility locations in some metric space, and want to choose a location to build a facility. note that while the setting we consider is for facility location problems, the exact same setting also arises in spatial social choice (see e.g., ), where voters are the clients and the goal is to choose a candidate or outcome located in some metric space, where the distance from a voter to an outcome represents the cost of this outcome for the voter (e.g., based on their ideological differences). when choosing where to build a facility (or which candidate to select) for the public good (e.g., where to build a new post office, supermarket, etc.), we may care about minimizing the average distance from users to the chosen facility (a utilitarian measure), or the maximum distance (an egalitarian measure), or many other measures of fairness or happiness. focusing on just a single measure may not be useful for actual policy makers, who often want to satisfy multiple objectives simultaneously and in fact refuse to commit themselves to a single one, as many objectives have their own unique merits. in this paper we instead attempt to simultaneously minimize multiple objectives. for example, what if we care about both the average and the maximum distance to the chosen facility, and not just about some linear combination of the two? what if we want to choose a facility so that it is close to optimum in terms of the average distance from the users, and at the same time is also close to optimum in terms of the maximum distance? is this even possible to do?more specifically, we consider l-centrum problems , where we are given a set of possible facilities f and a set of n clients c in an arbitrary metric space with distance function d. for each client i ∈ c there is a cost d(i, j) if we choose to build facility j ∈ f. then the goal is to pick one facility from f such that it minimizes the sum of the l most expensive costs induced by the choice of facility location. such problems generalize minimizing the total client cost (l = n), as well as the maximum client cost (l = 1). the latter may be considered a measure which is more fair to all the clients (since it makes sure that all clients have small cost, not just on average), but would have the drawback that a solution where all except a single client have low cost would be considered the same as a solution where they all have high cost, as long as the maximum cost stays the same. because of this, some may argue that an objective where we consider only the costs of the worst 10 percent of the clients may be better. in this work, we side-step questions about which objective is best entirely. since each of the l-centrum objectives has its own advantages, our goal is to simultaneously approximate multiple such objectives. this idea of simultaneously approximating l-centrum problems as a method of creating "fair" outcomes was previously discussed in , and was adapted from the idea of approximate majorization in .note that our approach is very different from combining several objectives into a single one (e.g., by taking a weighted sum); we instead want to make sure that the chosen outcome is good with respect to each objective we are interested in simultaneously. more formally, for 1 ≤ l ≤ n, we define the cost function for choosing facility a ∈ f to be c l (a), which is the sum of the top l distances from a to each client in c. the l-centrum problem asks to minimize c l (a) with a fixed l value; denote the optimal facility location for this objective by o l . now suppose we have q such objectives that we want to optimize, such that l ∈ k = {k 1 , k 2 , • • • , k q }. we then say a facility a ∈ f is a simultaneous α-approximation for all of the q objectives iff c l (a) ≤ α • c l (o l ) for all l ∈ k. now suppose we have q such objectives that we want to optimize, such that l ∈ k = {k 1 , k 2 , • • • , k q }. we slightly abuse notation and refer to k as the set of objectives, and say that an objective c k is in k when k ∈ k.the above theorem indicates that by picking either o k or o p , the values of α k (o p ) and α p (o k ) cannot be simultaneously large. by definition, c p (a) = p i=1 d(a i , a) is the summation of the largest p distances from any clients to facility location a and c k (a) = k i=1 d(a i , a) is the summation of the largest k distances from any clients to facility location a. naturally, as the difference between k and p becomes smaller, we would expect that both α p (o k ) and α k (o p ) would also become smaller. in other words, assume that k p ≤ 1 2 , as the difference between k and p becomes smaller, the upper bound of α k (o p ) would also becomes smaller given a fixed value of α p (o k ). moreover, note that as the value of p k approaches 1, the value of β also approaches f (1) = 1.1 indicates that when the difference between any two objectives c k and c p is sufficiently small, both α p (o k ) and α k (o p ) would also be small.1 we must have for any k ∈ k \ {2k}, α k (o 2k ) ≤ 2. first, for each k ∈ k, we will make a node representing o k , which is the optimal facility location for objective c k . then there must exist some k ∈ k such that choosing o k would be at worst a β q approximation for every other objective in k.recall that γ k p ≥ α p (o k ) -2 for any p, k, but note that for at least one of the γ kx ky values above, it must that that k x < k y , since otherwise we would get that k 1 > k 1 , a contradiction. recall that in the graph representation g, for some node o k , k ∈ k, every edge that goes out of o k into some o p has weight representing the approximation ratio using o k with respect to objective c p , denoted by α p (o k ). since the graph g is complete, we will show the above theorem by showing that there must exist some o k , k ∈ k, |k| = q such that all of the edges leaving o k have weight less than or equal to β q . therefore, we can conclude that there must exist some o k , k ∈ k, such that all of the edges leaving o k have weight less than or equal to β q . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/430.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/430.txt new file mode 100644 index 0000000000000000000000000000000000000000..799ecfd7a027ea3e198811f4f892aeebcdc04253 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/430.txt @@ -0,0 +1 @@ +when analyzing the relationship between concurrent signals, covariance matrices are a useful tool, with applications in fields like brain-computer interfaces (bci) and evolutionary computation . by construction, they are rich in information, illustrating the relationship between signals while still encoding for signal-wise information on their diagonal. such matrices are at least positive semi-definite, and often fully symmetric positive definite (spd). the set of n × n spd matrices (sp d(n)) is a non-euclidean, riemannian (i.e. metric) manifold, and the regular euclidean operations of most neural network (nn)-based models seldom preserve that geometric structure, introducing deformations such as the "swelling effect" . structure-preserving nn-based approaches have been introduced , , deriving their layers from one of two geodesic-defining metrics on sp d(n). affine invariant metrics offer the best properties, but present computational challenges (e.g. no closed-form formula for averaging) . logeuclidean metrics are less isotropic, butthe first author is supported by the french national research agency (anr) and region normandie under grant haiscode. this work was granted access to the hpc resources of idris under the allocation 2022-ad010613618 made by genci, and to the computing resources of criann (normandy, france). still prevent swelling while being easier to compute . with a, b ∈ sp d(n), we chose this logeuclidean distance:this metric relies on the matrix logarithm log mat (•), bijectively and isometrically mapping sp d(n) onto sym(n), the vector space of n × n symmetric matrices (with exp mat (•) being its inverse). here, ∥x∥ 2 , x ∈ sym(n) is the l 2 norm applied to the upper triangular of x. logeuclidean operations are thus the riemannian equivalent to euclidean operations on sym(n).in this paper, we present a structure-preserving self-attention mechanism applicable to sequences of spd matrices, derived from the aforementioned logeuclidean metric. we embed said mechanism into a transformer-based architecture, and apply it to a biomedical classification problem. transformer-based technology has exploded in popularity ever since its introduction in , with self-attention mechanisms being applied to very different problems. with regards to riemannian geometry, innovations seem centered around the computation and application of attention maps, specifically. for instance, konstantinidis et al. combine the standard attention maps with grassmann and spd manifold-valued maps, to enrich their computer vision model's descriptive capabilities. by contrast, both he et al. and li et al. developed architectures to analyze 2d-manifold-valued data in 3d space, the former achieving rotational equivariance with respect to surfaces on the manifold and the latter developing two geodesic distances applicable to point clouds, and building attention maps from these distances. more generally, kratsios et al. provide a mathematical framework to apply attention mechanisms on a variety of constrained sets, including manifolds. while the latter approaches share our interest in preserving geometric information, little to no focus is given to a transformer's other components. to the best of our knowledge, our approach is the only one applying structure-preserving transformers to spd manifold-valued data.as seen in a recent survey by phan et al. most eeg datasets are heavily imbalanced, with the n1 stage often underrepresented (section iii-c) -models optimized for high overall accuracy may thus sacrifice n1 classification if it improves global performance. while the survey states that a sequence-to-sequence classification scheme (classifying each epoch in the input sequence) might lead to better performance, having multilabel inputs is nonsensical for this rebalancing -hence their use of a sequence-to-epoch scheme (classifying one epoch per sequence).beyond sleep staging, eeg signals are also utilized in bci (section i), where they are often analyzed through the lens of functional connectivity -the activation correlations between different brain regions. automatic sleep staging through functional connectivity was first investigated by jia et al. each input epoch is described as a multichannel timeseries of spd matrices, which are then tokenized bijectively. however, their approach does not guarantee the preservation of their data's spd structure, as they operate a channel-wise concatenation of their tokens, in addition to the concatenations found within their encoders (section ii-a). finally, the t tokens corresponding to the central epoch (of index ℓ + 1 in figure2) go through two fc blocs1, and are mapped onto ŷℓ+1 ∈ r c by a final classification linear map, with c the number of classes.we ensure structure preservation by using the sp-mha bloc in all transformer encoders, and choosing all linear maps within said encoders' feed-forward (ff) componentsand the aforementioned fc blocs to be triangular (section ii-b).to estimate functional connectivity from those signals, we apply the same preprocessing pipeline as2 , computing s × c = 30 × 7 covariance matrices in sp d(8), with s the sequence length and c the number of frequencybased channels. m ∈ {26, 27}) 3 ; the h parameter of each transformer encoder, in {3, 9} 3 ; and the number of epoch feature tokens t (section iii-b), chosen among {1, 3, 5, 7, 10} -with in particular t = 1 akin to describing each epoch with a single token, and t = 7 corresponding to one token being preserved per channel.as shown in tablei, we obtain the best mf1 and n1 f1 scores for l = 21, whereas the best macro-averaged accuracy is obtained for l = 13. while our model favors the smaller token size of d(m) = 351 for all values of l, it seems that having a large number of tokens to describe each epoch (at least t = 5) is necessary for best performance.we compare ourselves to five models: deepsleepnet, often used as a benchmark, with a pre-trained epoch-wise global feature map submodel followed by a sequence-tosequence rnn; iitnet, the source of our 31 folds, extracting multiple features per epoch through cnns and comparing them through sequence-wise rnns; graphsleep-net, expliciting epoch-wise functional connectivity through graph learning; dequidt et al.furthermore, comparing our best results (line 2) to those of lines 4 and 9 indicate that the structural preservation of our sp-mha improves our model's performance, with or without the influence of our new whitening (section iii-c). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/431.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/431.txt new file mode 100644 index 0000000000000000000000000000000000000000..fbc09d91063d5c77eb672a2277a4f7002379a0ba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/431.txt @@ -0,0 +1 @@ +we study unconstrained online linear optimization (olo) with lipschitz losses,1 which is a repeated game between us (the learner) and an adversarial environment denoted by env. in each (the t-th) round, with a mutually known lipschitz constant g:1. we make a decision x t ∈ r d based on the observations before the t-th round.2. the environment env reveals a loss gradient g t ∈ r d dependent on our decision history x 1 , . . . , x t , which satisfies the lipschitz condition with respect to the euclidean norm, g t ≤ g.3. we suffer the linear loss g t , x t .the game ends after t rounds, and then, our total loss is compared to that of an arbitrary fixed decision u ∈ r d . without knowing the time horizon t , the environment env and the comparator u, our goal is to guarantee low regret, defined as regret t (env, u) := t t=1 g t , x t -u .in a nutshell, this paper proposes a novel and practical strategy to achieve the tightest known regret upper bound (even including a near-optimal leading constant) that depends simultaneously on the loss gradients g 1 , . . . , g t and the comparator u.to be concrete, we now introduce a bit more of the background. existing research on this problem started from the minimax regime: under the additional assumption of u ≤ d, it has been long known that online gradient descent (ogd) guarantees the optimal upper bound on the worst case regret, sup env; u ≤d regret t (env, u) ≤ o dg √ t . refining such worst case optimality by instance optimality, improvements have been achieved under the notion of adaptive online learning, with gradient adaptivity and comparator adaptivity being the two prominent types.• gradient adaptivity aims at bounding sup u ≤d regret t (env, u) by a function of the observed gradient sequence g 1 , . . . , g t . using learning rates dependent on past observations, ogd can achieve the optimal second order gradient adaptive bound supwhere v t := t t=1 g t2 is the (uncentered) gradient variance. this has been a hallmark of practical online learning algorithms, popularized by the massive success of adagrad .• comparator adaptivity aims at bounding sup env regret t (env, u) by a function of the comparator u.without imposing the extra bounded-u assumption, one could use a dual space framework (which differs considerably from ogd) to achieve the optimal bound sup env regret t (env, u) ≤ o u g t log u .(2)due to the absence of learning rates, such algorithms are also called "parameter-free" in the literature. they have exhibited the potential to reduce hyperparameter tuning in the modern deep learning workflow .while both types of adaptivity are well-studied separately, achieving them simultaneously is an active research direction, which we call simultaneous adaptivity. again without the bounded-u assumption, a series of works have proposed drastically different approaches to obtain simultaneously adaptive regret bounds like 2 regret t (env, u)combining the strengths of eq.(1) and eq.(2) above. a remaining issue is that compared to the lower bound ora13,zcp22a], eq.( 3) is still suboptimal due to a √ log v t multiplicative factor. that is, in terms of the dependence on the gradient variance v t alone (which is typically the emphasis of the field), eq.(3) is o √ v t log v t rather than the standard optimal rate o √ v t as in eq.(1). the first goal of this paper, on the quantitative side, is to close this gap. 3to this end, we will take a detour through the continuous time (ct), first solving a ct analogue of the problem, and then converting the solution back to discrete time (dt). quantitatively, our goal above can be seen as the gradient adaptive refinement of -without considering gradient adaptivity, presents an algorithm designed in ct that natively achieves the optimal comparator adaptive bound eq.(2), while earlier algorithms designed in dt do not (unless they resort to the impractical doubling trick ) . broadly speaking, such a result exemplifies a higher level observation: while various benefits of the ct approach have been demonstrated in online learning before , it remains unsatisfactory that no existing work (to the best of our knowledge) has used it to obtain dt gradient adaptive regret bounds, even though the ct analogue of gradient adaptivity is often natural4 and fairly standard to achieve . in other words, one would expect the ct approach to make gradient adaptivity (hence, simultaneous adaptivity) easier as well, but such a benefit has not been demonstrated in the literature.the key reason of this limitation appears to be the crudity of existing discretization arguments, i.e., the modification applied to a ct algorithm and its analysis to make them work well in dt. the state-of-the-art technique, due to , replaces the continuous derivative in potential-based ct algorithms (i.e., ftrl and dual averaging ) by the discrete derivative, and consequently, the standard itô's formula in the ct regret analysis by the discrete itô's formula. applying the discrete derivative amounts to implicitly assuming the worst case gradient magnitude ( g t = g), therefore any gradient adaptivity in ct is lost in dt by construction. the second goal of this paper, on the technical side, is to propose a refined discretization argument that preserves such gradient adaptivity. without knowing the time horizon t , the environment env and the comparator u, our goal is to guarantee low regret, defined as regret t (env, u) := t t=1 g t , x t -u . existing research on this problem started from the minimax regime: under the additional assumption of u ≤ d, it has been long known that online gradient descent (ogd)guarantees the optimal upper bound on the worst case regret, sup env; u ≤d regret t (env, u) ≤ o dg √ t .• gradient adaptivity aims at bounding sup u ≤d regret t (env, u) by a function of the observed gradient sequence g 1 , .without imposing the extra bounded-u assumption, one could use a dual space framework (which differs considerably from ogd) to achieve the optimal boundsup env regret t (env, u) ≤ o u g t log u . quantitatively, our goal above can be seen as the gradient adaptive refinement of-without considering gradient adaptivity,presents an algorithm designed in ct that natively achieves the optimal comparator adaptive bound eq. applying the discrete derivative amounts to implicitly assuming the worst case gradient magnitude ( g t = g), therefore any gradient adaptivity in ct is lost in dt by construction.• excluding the recent arxiv update, this is the first simultaneously adaptive regret bound matching the optimal o( √ v t ) rate (with respect to v t alone), improving a series of prior works .t log |u| , which is the desirable ct simultaneously adaptive bound, analogous to the o |u| v t log |u| bound we aim for in dt. let us assume the lipschitz constant g is unknown, but at the beginning of each (the t-th) round we have access to a hint h t which satisfies h t ≥ h t-1 and g t ≤ h t (initially, assume h 0 = h 1 > 0 w. in comparator adaptive online learning, regret bounds of this form are said to characterize the loss-regret tradeoff: with a small ε, one could ensure that the cumulative loss regret t (env, 0) is low, while only sacrificing a log(ε -1 ) penalty on the leading term of the regret bound.the key strength of this bound is that, the dependence on v t alone is o( √ v t ), matching the optimal gradient adaptive bound achieved by ogd. furthermore, in the regime of large u and v t , the leading order term is 4αv t log( u ε -1 ), where the multiplying constant √ 4α almost matches the √ 2 lower bound from. in the special case with bounded domain ( x t , u ≤ d), a simplified variant of this procedure guarantees an even smaller bound, regret t (env, u) ≤ r(u, v t , g) + 2dg.we aim to show the bracket on the rhs is negative at x = v + c 2 + z + k(s + c) and y = s + c, where k = 2g. ≤ u s + ε α(v + z t + k t s), where s = 4αk t 1 + log(2uε -1 + 1) 2 + 4α(v + z t ) 1 + log(2uε -1 + 1) . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/432.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/432.txt new file mode 100644 index 0000000000000000000000000000000000000000..fd150a7388435bdaaf7116ceb9103c53a0e0588c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/432.txt @@ -0,0 +1 @@ +low-bit neural networks that use limited precision quantization for inference can make stateof-the-art models much more practical. however, perturbations induced by quantization of weights and activations can change dnn behaviour in non-trivial ways. in some cases, state-of-the-art performance can have significant degradation after quantization of weights and activations . so how can we find high-performant, quantization robust cnn parameters? recent works by and have shown remarkable performance using graph hypernetworks (ghn) to predict all trainable parameters of unseen dnns in a single forward pass. preliminary research in has explored the use of ghns to predict quantization-robust parameters for 8-bit and 4-bit quantized cnns. however, this work trained ghns to predict parameters for full-precision float32 candidate cnns and only quantized them for testing. building on this, we explore quantizationspecific training and find that quantization-aware training of ghns (which we refer to as ghn-qat) can significantly improve quantized accuracy for ghn predicted parameters of 4-bit quantized cnns and even achieve greater-than-random accuracy for 2-bit cnns. more specifically, we simulated quantization (simquant) in sampled cnns such that ghn-qat adapts to the quantization errors induced by quantizing ghn-predicted models (see fig. 1). by finetuning ghns on a mobile-friendly, quantized cnn architecture space, ghns learn representations specifically for efficient quantized cnns. so how can we find high-performant, quantization robust cnn parameters? recent works byandhave shown remarkable performance using graph hypernetworks (ghn) to predict all trainable parameters of unseen dnns in a single forward pass. preliminary research inhas explored the use of ghns to predict quantization-robust parameters for 8-bit and 4-bit quantized cnns. however, this work trained ghns to predict parameters for full-precision float32 candidate cnns and only quantized them for testing. building on this, we explore quantizationspecific training and find that quantization-aware training of ghns (which we refer to as ghn-qat) can significantly improve quantized accuracy for ghn predicted parameters of 4-bit quantized cnns and even achieve greater-than-random accuracy for 2-bit cnns. more specifically, we simulated quantization (simquant) in sampled cnns such that ghn-qat adapts to the quantization errors induced by quantizing ghn-predicted models (see fig. by finetuning ghns on a mobile-friendly, quantized cnn architecture space, ghns learn representations specifically for efficient quantized cnns.we first investigate simquant based quantization training (commonly referred to as quantizationaware training/qat) on a target design space for limited precision quantization. to establish the benefits of qat, we also include results fromwhere the authors used full-precision float32 training and only quantized cnns for testing. the parameters predicted by ghn-qat are remarkably robust and the qat finetuning results (see table1) show a significant improvement over the full-precision float32 finetuning used in. additional possibilities/challenges of leveraging quantization-aware training, such as learned quantization thresholds or reducing qat, should be explored to further improve ghn-qat, especially for "extreme" low bitwidths.from ghn-qat, we can see that introducing quantization into our ghn training allows for greater use of ghns for quantization-specific neural network parameter prediction. besides leveraging ghn-qat for quantized versions of floating point operations, we should be able to encode quantization information such as bit-width and quantization scheme into the graphs. if used as a form of quantized accuracy prediction, ghn-qat could greatly accelerate the process of searching for accurate, quantized cnns. additionally, ghn-qat could be a useful weight initialization for quantized cnn training. if ghn-qat-predicted parameters can be used as initialization for quantizationaware training rather than first training models to convergence in full float precision before additional qat, then the training time of quantized models would be significantly reduced. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/433.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/433.txt new file mode 100644 index 0000000000000000000000000000000000000000..69ba9eb24f3a974fecf1423bf29e00fa7bc5955f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/433.txt @@ -0,0 +1 @@ +on the 10th of september 2023, tropical storm daniel caused unprecedented flooding in libya, resulting the loss of thousands of lives and many more reported missing. natural disasters profoundly impacts communities, economies, and infrastructure, making the prediction vital for preparedness and mitigation . as climate changes and urban areas expand, the need for holistic, global-scale predictions becomes paramount. while single-modal methods can capture specific aspects, such as how landslides are influenced by weather, they might overlook other critical factors like soil type and geographical topography. transitioning to a multimodal approach, which considers multiple facets of disaster prediction, presents its own challenges. the broader the scope, the harder it becomes to obtain comprehensive data for each modality. given these complexities, there's a noticeable gap in research dedicated to global-scale, multimodal disaster prediction.different from single-modal data, multimodal data is introduced to have better potential, for each modality can influence the training of each other . integrating diverse data types can provide valuable potentials in many applications . weather stands as a paramount external factor, wielding significant influence in the facilitation of specific natural calamities. in numerous instances, its indispensability and substantial impact remain unequivocal. concurrently, geographical information harbors the potential to elucidate latent triggers, particularly in the context of disasters profoundly shaped by topographical factors, exemplified by phenomena like landslides.this study introduces a multimodal disaster prediction framework, integrating diverse data sources including weather information, geographical information in the form of satellite imagery, and textual descriptions. by synthesizing these varied sources of information, our approach provides a unique solution for predicting different types of natural disasters. the results emphasize the critical role of combining multiple data streams to enhance the accuracy and comprehensiveness of disaster predictions. while single-modal methods can capture specific aspects, such as how landslides are influenced by weather, they might overlook other critical factors like soil type and geographical topography.different from single-modal data, multimodal data is introduced to have better potential, for each modality can influence the training of each other. concurrently, geographical information harbors the potential to elucidate latent triggers, particularly in the context of disasters profoundly shaped by topographical factors, exemplified by phenomena like landslides.this study introduces a multimodal disaster prediction framework, integrating diverse data sources including weather information, geographical information in the form of satellite imagery, and textual descriptions. by synthesizing these varied sources of information, our approach provides a unique solution for predicting different types of natural disasters. the results emphasize the critical role of combining multiple data streams to enhance the accuracy and comprehensiveness of disaster predictions.to discern a sequential trend in weather variations, we gathered weather data spanning the past five years for each designated city. given our objective to forecast disasters in 2021, the weather data pertains to the annual intervals from 2016 to 2020.as a multimodal prediction task, we collect data from various sources for each sample and process them through distinct pipelines. in the context of landslide prediction, the model encompassing weather, textual, and satellite images shows an improvement of 30% in f1 score over the performance of the model relying solely on weather and textual data. for example, in flood predictions, the model uses all three types of data has a slightly lower auroc score than the one uses only weather and text data.in conclusion, this research underscores the significance of diverse data in offering tailored insights for specific disaster types. incorporating remote sensing imagery within the domain of multimodal disaster prediction framework enriches the analysis with vital geographical and hydrological details; while for disasters heavily influenced by meteorological factors, statistical weather data becomes indispensable. our study advocates for a comprehensive strategy in global disaster prediction, seamlessly merging varied data types and harnessing the strengths of both computer vision and natural language processing techniques. however, given the restricted temporal range and the suboptimal quality of available weather and imagery datasets, the application of more intricate models like lstm, designed for time series data, becomes inherently challenging. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/434.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/434.txt new file mode 100644 index 0000000000000000000000000000000000000000..0154412bafe7c3bbecaae5c4c570373b87eb0684 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/434.txt @@ -0,0 +1 @@ +the federal trade commission has recently reported an alarming increase in credit card fraud reports and the revenue lost due to such frauds in the past few years . one of the key factors of such an increase in credit card fraud is the digitalization of commerce because of the covid-19 outbreak and the shutdown of the whole world , . credit card fraud has existed since the invention of payment cards, and different policies were formulated and brought into practice from time to time to reduce the losses incurred by such frauds. the address verification system, keeping a scoring record of positive and negative lists to identify and prevent high-risk transactions , and the use of card verification value (cvv) by visa and card verification code (cvc) by mastercard are a few of the examples of the preventive policies. as for the detective approach, many machine learning (ml) models, such as support vector machines (svm), logistic regression, random forest , artificial neural networks, knearest neighbors (knn) and self-organizing maps (som) , have been implemented for this cause. the uptrend in fraud cases and lost revenue, despite these policies, clearly shows that the previous set of measures, both preventive and detective, is not enough.in order to have a better solution that can effectively and efficiently mitigate the losses due to these frauds, we have to understand the shortcomings of the previous approaches as well as the challenges in the credit card fraud detection problem in general. credit card fraud detection is a binary classification problem having two classes: normal (or positive class) and fraudulent (or negative class). a very basic property and one of the main issues in these problems is that the data is highly imbalanced , owing to the fact that billions of card transactions take place every month worldwide, and a significantly smaller amount of transactions are fraudulent. to deal with the data imbalance issue, the ml models that are in practice have used sampling techniques; that is, a sample from the majority class, based on some sampling criterion, is taken or instances for the minority class are synthetically generated based on some criterion so that the number of instances in both classes is made equal. in some cases, an approach based on both of the sampling techniques is used to have a balanced dataset .another property of fraud detection problems is that the fraudulent activities and techniques evolve with time . any method used by the fraudsters is identified by the antifraud team of the respective organization, and efforts are made to stop further losses through the same fraudulent technique. consequently, personnel with the aim of gaining unlawful advantage of people or systems (or both) try to come up with new ideas and techniques. the ml algorithms that have been implemented for this purpose can only model the fraudulent techniques that are already in practice; that is, they cannot model, and hence, detect, the fraud that will be carried out by methods that are not existent and are yet to be invented. therefore, we need a model that can also detect, predict, and stop fraud by the methods that will be invented in the future.one-class classification (occ) algorithms, on the other hand, take data from only a single (positive or the normal) class for training, which is usually available in abundance, and form a boundary around the positive class (or between the two classes). these algorithms classify everything that lies outside the inferred boundary as a negative class object. these algorithms have been implemented in many different domains and have proved to be a good solution with good performance for the respective problem. the examples of such domains include but are not limited to bot detection on twitter , spoofing detection , , video surveillance , machine fault detection for predictive maintenance , hyperspectral image analysis and classification , and myocardial infarction (mi) detection .to address the above-mentioned challenges in fraud detection problems and to resolve the curse of dimensionality by embedding the feature extraction into the training phase of the algorithm and letting the model extract a discriminative set of features, we propose to use a set of occ algorithms that are ideal for the highly imbalanced dataset and can effectively model and detect the fraudulent transactions carried out by to-be-invented techniques. for this purpose, we experimented with several occ models to find a more efficient way to reduce the losses by such frauds. to deal with the data imbalance issue, the ml models that are in practice have used sampling techniques; that is, a sample from the majority class, based on some sampling criterion, is takenor instances for the minority class are synthetically generated based on some criterionso that the number of instances in both classes is made equal.one-class classification (occ) algorithms, on the other hand, take data from only a single (positive or the normal) class for training, which is usually available in abundance, and form a boundary around the positive class (or between the two classes).to address the above-mentioned challenges in fraud detection problems and to resolve the curse of dimensionality by embedding the feature extraction into the training phase of the algorithm and letting the model extract a discriminative set of features, we propose to use a set of occ algorithms that are ideal for the highly imbalanced dataset and can effectively model and detect the fraudulent transactions carried out by to-be-invented techniques.in the occ setting, data from the target (positive) class is used to develop an optimal boundary between the target data and outliers. for instance, the one-class support vector machine (ocsvm) has a hyperplane, the support vector data description (svdd) has a hyper-sphere, and the ellipsoidal subspace support vector data description (essvdd) has an ellipsoidal boundarydifferentiating the two class (fraudulent and normal transaction) data from each other. the subspace support vector data description (ssvdd), is the svdd-based model where the data is projected, using a projection matrix q, from the original d dimensions to the lower d-dimensional subspace iteratively during the training. q, incorporated with the matrix s q , representing the geometric information of the data in the subspace, is employed to find the optimized set of features in the graph-embedded subspace support vector data description (gessvdd). if an α value falls between 0 and the regularization parameter c, such data point, denoted by s, lies on the boundary of the hyper-sphere and is known as a support vector.(3) to classify any test data vector x * into its respective class, it must first be transformed to the lower d-dimensional subspace using the same q and s q and the distance of x * from u in transformed feature subspace is calculated and checked if it is greater or smaller than the r given in(3).the matrix s q , having geometric information in the data in the transformed feature subspace, is mathematically represented as:.where x ∈ r n ×d is the data matrix and l is the matrix representation of the graph. • in third variant, denoted by gessvdd-knn, the l x is replaced with the knn graph l kn n , where l kn n = d kn n -a kn n . the elements of the a kn n matrix are set to 1 if data points x i or x j are in each other's neighborhood, and 0 otherwise, mathematically expressed as. these models are either biased towards the positive class (in case of high values), or the boundary formed by these models is very small, and consequently, the normal transactions are forced out of the boundary and classified as fraudulent (in case of low values). in the future, we plan to adapt multi-modal subspace support vector data description (mssvdd)for credit card fraud detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/435.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/435.txt new file mode 100644 index 0000000000000000000000000000000000000000..cecd73de853cd2109130730f00dc3a780f091d37 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/435.txt @@ -0,0 +1 @@ +cosine (dis)similarity is a popular measure for data that can be characterised by a collection of token frequencies, such as texts, because it only takes into account the relative frequency of each token. cosine dissimilarity is particularly relevant for distance-based algorithms like classical (weighted) nearest neighbours (nn) and fuzzy rough nearest neighbours (frnn). in the latter case, cosine dissimilarity has been used to detect emotions, hate speech and irony in tweets .a common way to calculate cosine dissimilarity is to normalise each record (consisting of a number of frequencies) by dividing it by its euclidean norm, and then considering the squared euclidean distance between normalised records. euclidean distance can be seen as a special case of a larger family of minkowski p-distances (namely the case p = 2). it has previously been argued that in high-dimensional spaces, classification performance can be improved by using minkowski p-distance with fractional values for p between 0 and 1 .in light of this, we propose angular minkowski p-distance: a natural generalisation of cosine dissimilarity obtained by substituting other minkowski p-distances into its definition. the present paper is a case study of angular minkowski p-distance using the well-known 20-newsgroups classification dataset. in particular, we investigate the relationship between the hyperparameter p, the dimensionality m, the number of neighbours k, and the choice of classification algorithm and weights.to the best of our knowledge, this topic has only been touched upon once before in the literature. unlike the present paper, the authors of do not evaluate classification performance directly, but rather the more abstract notion of 'neighbourhood homogeneity', and they only consider a limited number of values for p and m.the remainder of this paper is organised as follows. in section 2, we motivate and define angular minkowski p-distance. in section 3, we recall the definitions of nn and frnn classification. then, in section 4, we describe our experiment, and in section 5 we present and analyse our results, before concluding in section 6.the rootless p-size is not a norm for any p (other than p = 1, for which it coincides with the ordinary 1-norm); rootless p-distance is a metric for p ≤ 1.thus, cosine dissimilarity corresponds to rootless angular minkowski 2-distance, and we can consider angular minkowski p-distance with different values for p as alternatives to cosine dissimilarity.in order to evaluate the behaviour of nn and frnn with angular minkowski p-distance, we systematically vary different values for p, m as well as the number of nearest neighbours k. this qualitative difference is somewhat surprising, but it can perhaps be explained by the fact that for the upper approximation, neighbours are drawn from a uniform concept (each decision class), whereas for the lower approximation and nn, neighbours are drawn from across decision classes. -the upper approximation is a better classifier (in terms of auroc) than the lower approximation and nn for the 20-newsgroups dataset. given the relatively poor performance of the lower approximation, it is surprising that the mean approximation produces even better results than the upper approximation. not only is a good choice for p necessary to make use of the potential performance increase from adding more dimensions, choosing p poorly can actually cause performance to decrease with dimensionality. higher values for k lead to higher auroc, and within the range of investigated values, the relationship appears to be similar to the relationship between auroc and m: each doubling of k leads to an increase in auroc that is slightly smaller than the previous increase. -in contrast, for the upper and mean approximations, auroc starts out quite high for high values of p, and increases only little thereafter. howewer, from k = 8 upwards, auroc starts to strongly increase for lower values of p, eventually surpassing the auroc obtained with higher values of p from k = 64 upwards. this means that the good performance of the mean and upper approximations around p = 1 is only realised for high values of k. in summary (table1), we obtain the best classification performance on the 20-newsgroups dataset with the upper and mean approximation and angular minkowski p-distance with values of p around 1, but only when k is high enough (≥ 64). in an exploratory case study of the large 20newsgroups text dataset, we showed that the choice of p can have a large effect on classification performance, and in particular that the right choice of p can increase classification performance over cosine dissimilarity (which corresponds to p = 2).we have also examined the interaction between p and the dimensionality m of a dataset, the choice of classification algorithm (nn or frnn), the choice of weights (linear or reciprocally linear), and the choice of the number of neighbours k. under optimal circumstances (high k and high m), the best-performing values for p are in the neighbourhood of 1 (frnn with upper or mean approximation) and around 4 (nn and frnn with lower approximation). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/436.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/436.txt new file mode 100644 index 0000000000000000000000000000000000000000..97480beed1349aebab47179c2a28c4ddaf17a17f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/436.txt @@ -0,0 +1 @@ +explainable ai (xai) is gaining more and more prominence in recent years. one of the notable directions is to estimate the attribution/importance/saliency of input variables (ribeiro et al., 2016;zhou et al., 2016;selvaraju et al., 2017;sundararajan et al., 2017;lundberg & lee, 2017). in particular, the shapley value (weber, 1988) is widely considered as a standard attribution method with a relatively solid theoretical foundation, because it is proven that the shapley value is a unique attribution metric that satisfies the axioms of symmetry, dummy, additivity and efficiency. the shapley value can roughly be considered to be computed based on the marginal difference of the outputs when we mask an input variable, so as to calculate the i-th input variable's attribution ϕ(i) which represents its numerical effects on the ai model output.however, these game-theoretic attribution methods have two core challenges: (1) how to define baseline values, which are used to represent the masked state of input variables (ancona et al., 2019;sundararajan et al., 2017;ren et al., 2023). (2) how to define the partition of input variables, e.g., taking pixels or local regions as input variables for image classification and taking words or tokens as input variables for natural language processing. the above challenges are rooted in the conflict of attributions computed under different partitions of input variables, which is a common problem shared by different attribution metrics. when we consider each i-th individual input variable as a basic input unit, we can compute the attribution of the i-th input variable, denoted by ϕ(i). alternatively, we can apply a new partition by grouping a set of input variables in s, and we consider the entire set as a singleton variable, s. we can call s a coalition of input variables.in this way, the attribution method may estimate the attribution of the coalition s, ϕ(s). thus, the conflict of attributions means that the attribution of the coalition s is not equal to the sum of attributions of the input variables in s. for example, in natural language processing tasks, we can either use words or tokens as input variables. in this case, if a word contains two tokens, then the attribution of the word is not equal to the attribution of two tokens within the word. more crucially, there is often no broad consensus on the partition of input variables in different tasks. thus, there is still no standard solution to the conflict problem. in spite of that, in this paper, we revisit the conflict between individual variables' attributions and their coalition's attribution from a fully new perspective. we derive the fundamental mechanism behind the conflict and develop a new method to estimate the attribution of coalitions, as follows.• first, as the theoretical foundation to analyze the conflicts of attributions, we reformulate the shapley value as the allocation of harsanyi interactions encoded by the ai model. the harsanyi interaction between input variables in s is a typical metric to measure the non-linear relationship between these variables encoded by an ai model. as a toy example in figure 1(a), the ai model models the interaction in s = {x 1 , x 2 , x 3 , x 4 , x 5 } as an inference pattern for the face. the interaction between image regions in s makes a numerical effect i(s) on the output score for face detection. in this way, it is proven that the attribution (the shapely value) of the i-th input variable can be reformulated as the allocation of interactions i-th input variable participates.• second, the above perspective of interaction enables us to extend the definition of the shapley value to define the attributions of coalitions. from the new perspective of reallocating interaction effects, the essence of the shapley value of the i-th input variable ϕ(i) can be considered as the accumulation of effects allocated from different interactions. thus, just like the shapley value, the attribution of any coalition s can be computed as an allocation of the interactions containing the coalition s. specifically, for example, in figure 1(b), the attribution of the coalition s, φ(s), receives 1 of the effect from interaction i({x 1 , x 2 }) and 2 3 of the effect from interaction i({x 1 , x 2 , x 3 }). • third, we further prove the essential mechanism that causes the conflict between individual variables' attributions and the attribution of the coalition s. essentially, for any coalition s, the computation for the attribution of the coalition and the shapley value of each input variable always use three types of interactions, i.e., the interactions covering all variables in s, the interactions covering partial variables in s, and interactions independent with variables in §. specifically, the interactions covering all variables in s correspond to the joint parts of the attributions of coalition s and shapley value of each variable i ∈ s. the lack of interactions covering partial variables in s is responsible for the conflicts between the attributions of coalition s and the shapley value of each variable i ∈ s., 2016;selvaraju et al. (2) how to define the partition of input variables, e. as a toy example in figure1(a), the ai model models the interaction in s = {x 1 , x 2 , x 3 , x 4 , x 5 } as an inference pattern for the face. in this way, it is proven that the attribution (the shapely value) of the i-th input variable can be reformulated as the allocation of interactions i-th input variable participates. specifically, for example, in figure1(b), the attribution of the coalition s, φ(s), receives 1 of the effect from interaction i({x 1 , x 2 }) and 2 3 of the effect from interaction i({x 1 , x 2 , x 3 }). in this way, v(∅) represents the model output when all input variables are masked, and v(n ) denotes the model output on the original input sample x. for example, we consider the slang term s = {x 1 = raining, x 2 = cats, x 3 = and, x 4 = dogs} in the input sentence "it was raining cats and dogs outside.given an ai model v with n input variables in n , the estimation of the numerical attribution/importance of each i-th input variable phi(i) depends on the partition of input variables. for example, in natural language processing, for the input "raining cats and dogs", some people choose to take each token as an input variable and compute the attributions of different tokens, n = {rain, -ing, cats, and, dogs}, while other people use words as input variables, p = { , cats, and, dogs}.theorem 2 (reformulation of the shapley value) the shapley value ϕ(i) of each input variable x i can be explained as ϕ(i) = s⊆n,i∈s theorem 2 explains the internal mechanism of the shapley value.", the dnn may encode the and interaction s 1 = {x 1 = raining, x 2 = cats, x 3 = and, x 4 = dogs} to measure the and relationship between variables in s 1 . besides, the word x 2 = cats may also be involved in other interactions, such as i and (s 2 = {x 1 , x 2 }) and i or (s 3 = {x 2 , x 3 , x 4 }). then the shapley value of the word x 2 = cats can be computed as the accumulation of all allocated effect ϕ(x1) = 1 it means that the shapley value is computed by evenly allocating each interaction effect to all its compositional input variables. let the model encode an and interaction t = {x1 = rain, x2 = -ing, x3 = cats, x4 = and, x5 = dogs}., an ai model's output on the input x can be accurately mimicked by the sum of the coalition s's attribution, the shapley value of each variable in n \ s, and the numerical effects of the interaction covering partial variables in s. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/437.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/437.txt new file mode 100644 index 0000000000000000000000000000000000000000..d67eaf18e99524240f0c2c0ccbcbd56bfd6f72bf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/437.txt @@ -0,0 +1 @@ +diabetes continues to be one of the most challenging noncommunicable diseases worldwide. it is a chronic metabolic disorder characterized by high blood sugar levels caused by problems in insulin production, sensitivity of cells' response to * corresponding author insulin, or both . there are four types of diabetes, namely type-1, type-2, gestational type, and other types. however, type-2 diabetes (t2dm) dominates all other diabetes types , accounting for more than 90% of all diabetes cases. the high prevalence of t2dm is strongly associated with the unhealthy modern lifestyle, including unhealthy eating habits, smoking, obesity, and a lack of physical activity, as well as internal predisposition factors such as race and family history .the predominant challenge associated with diabetes stems from the array of complications that can arise when diabetes is not adequately controlled. among these unwanted complications, one particularly notable issue is kidney complication, which falls under the category of microvascular complications, affecting the smaller blood vessels , . this specific complication is commonly referred to as diabetic nephropathy and accounts for approximately 14.0% of diabetes-related complications .diabetic nephropathy is considered as a type of chronic kidney disease (ckd). according to the kidney disease improving global outcomes (kdigo) 2012 guidelines, ckd is established when there are markers of kidney damage and/or a glomerular filtration rate (gfr) < 60 ml/min/1.73m 2 that lasts for at least ≥ 3 months. the kidney damage markers for ckd include the presence of pathologically high quantities of urinary albumin excretion (albuminuria), the presence of urine sediment abnormalities, structural abnormalities detected by imaging, and a history of kidney transplantation .as mentioned in the preceding paragraph, the presence of albuminuria can be indicative of a kidney problem. albumin in urine can signal an issue with the kidney filtration function. albuminuria can be divided into two categories: microalbuminuria and macroalbuminuria. microalbuminuria is diagnosed when the albumin-creatinine ratio is > 30 mg/24h and < 300 mg/24h, while macroalbuminuria is diagnosed when the albumin excretion is > 300 mg/24h in a 24-hour urine collection sample . as albuminuria can serve as a signal of kidney problems, it becomes essential for diabetes patients to be aware of their risk of developing this condition.therefore, the primary objective of this study is to develop a supervised learning model capable of predicting the risk of albuminuria development in diabetes patients, particularly those with t2dm. the primary contributions of this paper can be summarized as follows:• development of a supervised model capable of predicting early albuminuria in patients with type 2 diabetes mellitus (t2dm). • identification of the optimal supervised algorithm for early albuminuria detection in t2dm patients.therefore, the primary objective of this study is to develop a supervised learning model capable of predicting the risk of albuminuria development in diabetes patients, particularly those with t2dm.in their study used machine learning approaches to predict the risk of albuminuria in person with diabetes. they conducted their study on 1330 subjects and used a variety of machine learning algorithms, including random forest, gradient boost, logistic regression, support vector machines, multilayer perceptron, and a stacking classifier.this study aimed to evaluate the performance of supervised learning algorithms in predicting the risk of developing albuminuria in patients with t2dm patients. we evaluated several supervised learning algorithms, including 6 machine learning algorithms and 1 deep learning algorithm. the machine learning algorithms used were naïve bayes, support vector machine (svm), decision tree, random forest, adaboost, and xgboost. among these machine learning algorithms, random forest, adaboost, and xgboost are ensemble algorithms. the use of deep learning in this experimental design, as compared to machine learning algorithms, was intended to evaluate its potential performance considering the limited size of the dataset.we train each supervised learning algorithm with its defined parameters, as shown in tableii, using the training dataset, which results in trained models. the superior results were obtained from the deep learning algorithm, specifically the multi-layer perceptron (mlp), which achieved an accuracy and f1-score of 0.71, respectively. for example, there could be a patient with uncon-trolled diabetes, indicated by high blood glucose and high lipid profile, but not developing albuminuria, while another patient with normal glucose levels and normal lipid profile developing albuminuria. therefore, one possible solution to improve the model's accuracy is to increase the dataset size and variety, allowing the learning algorithms to better understand the complex patterns present in such data. this enables the mlp to solve complex problems relatively easily compared to other traditional machine learning algorithms when dealing with such complex data. among the various supervised learning models examined in this study, the mlp algorithm demonstrated superior performance in terms of precision, recall, accuracy, and f1-score. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/438.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/438.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3ba352b2c1232816dbbcef5b7df129e5465ca9e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/438.txt @@ -0,0 +1 @@ +multiple classifier systems (mcs) are often used to improve the accuracy and reliability of machine learning models . mcs has three main phases: generation, selection, and combination. in the generation phase, a pool of classifiers is created by training base classifiers with techniques such as bagging , different models, and variations of learning algorithms .the selection phase selects the most competent classifiers in the pool to predict a given query sample. two main approaches for selecting classifiers are static selection and dynamic selection (ds) . in the static approach, the selection is performed during training and works on the classifiers' overall performance on a validation set. in contrast, the ds approach selects the classifiers on the fly based on their competence in predicting a specific query sample. when only one classifier is selected, it is called dynamic classifier selection (dcs), and when more than one classifier is selected, it is called dynamic ensemble selection (des). examples of des techniques include meta-des , dynamic ensemble selection performance (des-p) , and k-nearest oracles union (knora-u) . the last phase of an mcs is combination, also called integration. in this phase, the output of all the classifiers selected is combined to produce a final prediction. the combination or integration of the predictions can be done in various ways, including voting and weighting .currently, research efforts in des are focused on proposing new methods to improve phases, such as generation , selection , and combination . additionally, there have been attempts to apply des to other areas of knowledge . despite the progress that has been made, no des technique is suitable for all problems. this is in line with the statistical rationale for mcs , which suggests that combining multiple classifiers increases the likelihood of finding the optimal result for any given problem. however, to the authors' knowledge, the field still lacks techniques that work on evaluating the ensembles selected by des methods and explores the advantages of pre-selected ensembles to obtain better performance.aiming to evaluate this gap in des' research field, we pose the following research question: "how to analyze ensembles selected by different des techniques and choose the one having the highest correct prediction potential?" to investigate this question, we propose the post-selection dynamic ensemble selection (ps-des) approach. ps-des is based on the assumption that different selection criteria may lead to different selected ensembles, and the best criteria used to select an ensemble may differ on an instance-toinstance basis. ps-des aims to analyze and choose the best ensemble from a set of ensembles generated by various des techniques to obtain more reliable predictions. therefore, our proposal work as a post-selection scheme, i.e., it performs after the selection phase of different des methods and before the combination phase.moreover, the best ensemble is selected based on a new concept of ensemble potential proposed in this work. in contrast to the selection criteria employed in many des methods such as meta-des that work by estimating the quality or competence of each model, the proposed ensemble potential evaluates whether the final selected ensemble of classifiers is reliable. we propose three approaches based on classical performance estimation metrics for measuring the ensemble potential: accuracy, f-score, and matthew's correlation coefficient.experiments over 20 classification datasets and considering three different performance evaluation metrics demonstrate that the post-selection scheme based on the ensemble potential leads to systematic improvement in classification performance over state-of-the-art des methods. thus, the evaluation of the pre-selected ensemble capabilities should not be neglected. the rest of the paper is organized as follows: section ii shows a literature review on des. section iii presents our proposal. section iv shows the experimental setup. the results are discussed in section v, and section vi presents the conclusions. when only one classifier is selected, it is called dynamic classifier selection (dcs), and when more than one classifier is selected, it is called dynamic ensemble selection (des).aiming to evaluate this gap in des' research field, we pose the following research question: "how to analyze ensembles selected by different des techniques and choose the one having the highest correct prediction potential?" to investigate this question, we propose the post-selection dynamic ensemble selection (ps-des) approach. ps-des is based on the assumption that different selection criteria may lead to different selected ensembles, and the best criteria used to select an ensemble may differ on an instance-toinstance basis. in contrast to the selection criteria employed in many des methods such as meta-desthat work by estimating the quality or competence of each model, the proposed ensemble potential evaluates whether the final selected ensemble of classifiers is reliable.experiments over 20 classification datasets and considering three different performance evaluation metrics demonstrate that the post-selection scheme based on the ensemble potential leads to systematic improvement in classification performance over state-of-the-art des methods.the second phase of developing a des is selection, which aims to choose a subset of classifiers (p ′ ⊆ p ), also known as an ensemble. in contrast, the dynamic approach, called dynamic ensemble selection (des), involves selecting a subset of the pool for each query sample x q .the final phase of a des is integration, also called aggregation or combination, which involves combining the classifiers selected in the selection phase when multiple classifiers are chosen.the proposed post-selection dynamic ensemble selection (ps-des) is based on the notion of potential, the capability of an ensemble selected by a given ds technique to make a correct prediction. in addition, this proposal aims to evaluate the quality or potential of a selected ensemble, which contrasts with the current des methods that build an ensemble by selecting multiple competent classifiers individually without trying to characterize the selected dynamic ensemble. all des approaches, des set = {des 1 , des 2 , . given a query sample (x q ), the validation dataset dsel, and a set of des techniques (des set ), this phase involves selecting several dynamic ensembles p ′ , each one created using a different des method, and assessing their effectiveness in order to determine which ensemble is most likely to perform well for the given x q .then, for each technique in des set , a set of classifiers is selected according to its competence estimation and selection criterion, forming the ensemble p ′ (lines 5 and 6).this work proposed a new dynamic ensemble selection (des) method: post-selection dynamic ensemble selection (ps-des). additionally, ps-des was consistently superior to the existing state-of-the-art des techniques, which implies that evaluating the selected ensembles as a collective is more important than assessing and choosing each base classifier separately, as is the trend in most des methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/439.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/439.txt new file mode 100644 index 0000000000000000000000000000000000000000..e979863672b131f25fba7f36c385152231f1a97c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/439.txt @@ -0,0 +1 @@ +soccer, also known as "association football" or "football", is widely recognized as the most popular sport worldwide in terms of both spectatorship and player numbers. as a generally low-scoring sport, especially at the professional level, small goal margins, competitive leagues, and draws being a common outcome make predicting soccer match results a challenging task (bunker & susnjak, 2022;yeung, bunker, & fujii, 2023), especially when only goals are available in the dataset (berrar, lopes, & dubitzky, 2019). the inherent unpredictability of outcomes is, of course, one of the primary reasons soccer attracts such a large number of fans. despite its challenging nature, given the popularity of the sport, there is a wide range of stakeholders who are interested in the prediction of soccer match results including fans, bookmaking companies, bettors, media, as well as coaches and performance analysts.models for match result prediction were historically proposed from disciplines such as statistics (dixon & coles, 1997;maher, 1982), operations research, and mathematics. however, with the advent of machine learning (ml), which is a sub-discipline of computer science that uses many techniques from statistics, over the past two decades, ml models have become a popular approach to predict match outcomes in soccer. the lack of publicly-available benchmark datasets has, however, meant that it has been challenging for researchers to evaluate their results against other studies. match features used in models, which are derived from events that occur within matches, are often contained in vendor-specific event data streams that are generally only available to professional teams (decroos, bransen, van haaren, & davis, 2019). the open international soccer database (dubitzky, lopes, davis, & berrar, 2019), despite not containing such match features, has enabled researchers to compare their models in a like-for-like manner on a large number of soccer matches (over 216,000 matches across 52 leagues). the 2017 soccer prediction challenge (berrar, lopes, davis, & dubitzky, 2019) was held, with participants using the open international soccer database to predict 206 unplayed matches. some of the top-ranked participants in the 2017 soccer prediction challenge used gradient-boosted tree models and/or rating features (berrar, lopes, & dubitzky, 2019;constantinou, 2019;hubáček, šourek, & železnỳ, 2019), which suggested that condensing a wide range of historical match information into ratings was of benefit, as was using the accuracy-enhancing benefits of boosting. subsequently, other studies (m.n. razali, mustapha, mostafa, & gunasekaran, 2022;n. razali, mustapha, arbaiy, & lin, 2022;robberechts & davis, 2019) have used the open international soccer database, in some cases improving upon the 2017 challenge results (m.n. razali et al., 2022).deep learning has, over the past few years, gained in popularity for the prediction of match results in soccer, given its success in many domains including computer vision, trajectory analysis, and natural language processing. in soccer, deep learning has been helpful in predicting the locations and types of subsequent events (simpson, beal, locke, & norman, 2022;yeung, sit, & fujii, 2023), the outcomes of shots (yeung & fujii, 2023), and in using match video to detect and track players and/or the ball, to detect events, and to analyze matches (akan & varlı, 2023). two types of models have thus emerged as potential state-of-the-art models for sports match result prediction: deep learning and boosted decision tree models.a subsequent soccer prediction challenge competition was held in 2023,1 using a similar dataset. however, unlike the 2017 soccer prediction challenge, the 2023 competition required two tasks, the first of which, "exact score prediction," involved predicting match results in terms of exact goals scored (for each team), and the second, "probability prediction," involved prediction in terms of the probabilities for a win, draw, and loss.in the current study, initially, consistent with m.n. razali et al. (2022), a catboost model (prokhorenkova, gusev, vorobev, dorogush, & gulin, 2018) with pi-ratings (constantinou & fenton, 2013) as the model features was found to be the bestperforming model for win/draw/loss probability prediction. however, to explore the potential of deep learning models for match result prediction in soccer, we then developed a deep learning-based model for win/draw/loss probability prediction that utilizes a combination of cutting-edge techniques. specifically, the proposed method incorporates modules from the timesnet time series model (wu et al., 2022), transformer, a neural language processing model (vaswani et al., 2017), and a neural network. the model was trained using the most recent five years of data available in the challenge dataset. furthermore, to compute the prediction set features, we augmented the matches and features in the training set provided as part of the competition with additional matches that were played between 4 april and 13 april 2023. this nine-day period represented an "in-between period": the dates after the end of the training set but prior to the first match date in the evaluation set to be predicted. a grid search was used to select optimal hyperparameters by training the models with these various hyperparameters on three training sets and evaluating their performance on three validation sets. the results from the validation sets show that our model outperformed all previously published models from the 2017 soccer prediction challenge for win/draw/loss probability prediction.the main contributions of this study are as follows. first, we investigate whether our developed deep learning models are superior to existing state-of-the-art models. second, we examine how deep learning models can be applied for match result prediction in terms of learning the time series nature of the data and engineering additional features. finally, the proposed deep learning-based models are compared with existing models and real-world data are used to evaluate the effectiveness of the approach.the remainder of this paper is organized as follows. in section 2, we detail the model used in this study for soccer match result prediction. then, the experimental results are presented and discussed in section 3. following this, we discuss research related to the current study in section 4, including the existing literature related to the 2017 soccer prediction challenge, as well as studies subsequent to the competition that also used the open international soccer database. finally, the paper is concluded in section 5. the open international soccer database(dubitzky, lopes, davis, & berrar, 2019), despite not containing such match features, has enabled researchers to compare their models in a like-for-like manner on a large number of soccer matches (over 216,000 matches across 52 leagues). some of the top-ranked participants in the 2017 soccer prediction challenge used gradient-boosted tree models and/or rating features(berrar, lopes, & dubitzky, 2019;constantinou, 2019;hubáček, šourek, & železnỳ, 2019), which suggested that condensing a wide range of historical match information into ratings was of benefit, as was using the accuracy-enhancing benefits of boosting. two types of models have thus emerged as potential state-of-the-art models for sports match result prediction: deep learning and boosted decision tree models. however, unlike the 2017 soccer prediction challenge, the 2023 competition required two tasks, the first of which, "exact score prediction," involved predicting match results in terms of exact goals scored (for each team), and the second, "probability prediction," involved prediction in terms of the probabilities for a win, draw, and loss. (2022), a catboost model(prokhorenkova, gusev, vorobev, dorogush, & gulin, 2018)with pi-ratings(constantinou & fenton, 2013)as the model features was found to be the bestperforming model for win/draw/loss probability prediction. following this, we discuss research related to the current study in section 4, including the existing literature related to the 2017 soccer prediction challenge, as well as studies subsequent to the competition that also used the open international soccer database. while some recent studies(berrar, lopes, & dubitzky, 2019;hubáček et al. additionally, models from earlier literature were integrated, including the best-performing model from the 2017 soccer prediction challenge -berrar ratings coupled with xgboost(berrar, lopes, & dubitzky, 2019) -alongside the best-performing model published in a study after the conclusion of the 2017 challenge, which applied a catboost model to pi-ratings features (m.in this section, we review related research on deep learning for match results prediction in soccer and also studies that have used the open international soccer database(dubitzky et al.danisik, lacko, and farkas (2018)used an lstm model to predict soccer match outcomes, comparing classification, numeric prediction, and dense approaches, and also with baselines based on the average random guess, bookmaker odds-derived predictions, and home win (the majority class).the standard evaluation metric used in the 2017 soccer prediction challenge, and subsequent studies that have used the open international soccer database, has been the ranked probability score (rps)(constantinou & fenton, 2012;epstein, 1969).constantinou (2019)proposed a hybrid bayesian network, using dynamic ratings based on the pi-rating system developed in previous work(constantinou & fenton, 2013)but that also incorporated a team form factor to identify continued over-or under-performance.1925), which was better than the results achieved by the 2017 soccer prediction challenge participants.1956, which was still better than the 2017 soccer prediction challenge participants.in this study, our objective was to assess the performance of a deep learning model and determine the optimal feature set for a gradient-boosted tree model in predicting soccer match results in terms of win/draw/loss (w/d/l) probabilities as well as exact scores. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/44.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/44.txt new file mode 100644 index 0000000000000000000000000000000000000000..ddf2ff45a353de68a5c6d1e410b2d56b55952dbc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/44.txt @@ -0,0 +1 @@ +intelligence-based systems brought better insights into decision-making tasks and withdrew part of the humans' burden in recurring tasks, where most of these advances have arisen from research fostered by artificial intelligence (ai) and machine learning (ml) . they have been incorporated in a wide range of applications, such as autonomous driving , text classification , image and object recognition , and medical analysis , among others.the increasing demand for more complex tasks and the ability to solve unprecedented problems strengthened an ml sub-area, denoted as deep learning (dl) . dl algorithms are known for employing deep neural networks and millions of parameters to model the intrinsic nature of the human brain , i.e., learn how humans can process information through their visual system and how they can communicate between themselves. nevertheless, such learning is conditioned to the training data and the model's parameters and often does not reproduce the real-world environment, leading to undesired behavior, known as underfitting/overfitting .even though proper training usually accompanies underfitting/overfitting, it is common to perceive a poor performance when the model is collated with unseen data. this discrepancy lies in the fact that the model "memorized" the training data instead of learning its patterns, thus not reproducing the desired outputs when applied to slightly-different data (test data). the best approach to overcome this problem would be to employ combinations of all possible parameters and verify whether they are suitable or not when applied to the testing data. nevertheless, such an approach is unfeasible regarding dl architectures due to their vast number of parameters and exponential complexity .on the other hand, a more feasible approach stands for optimization procedures, where parameters are optimized according to an objective function instead of being joined in all possible combinations. a recent technique, denoted as metaheuristic, has attracted considerable attention in the last years, mainly due to its simple heuristics and ability to optimize nondifferentiable functions. for instance, rosa et al. used the harmony search algorithm for fine-tuning convolutional neural networks (cnn) hyperparameters, achieving improved results over the benchmark architectures. at the same time, rodrigues et al. explored single-and multi-objective meta-heuristic optimization in machine learning problems, such as feature extraction and selection, hyperparameter tuning, and unsupervised learning. furthermore, wang et al. presented a fast-ranking version of the particle swarm optimization algorithm to fine-tune cnn hyperparameters and remove the fitness function evaluation cost.nevertheless, most of the literature works focus on only optimizing the model's hyperparameters (learning rate, number of units, momentum, weight decay, dropout) - instead of optimizing its parameters (layers' weights and biases) . usually, parameters are optimized during the learning procedure through gradient-based approaches, such as the stochastic gradient descent, yet they might benefit from the metaheuristic techniques' exploration and exploitation capabilities. this work proposes an additional fine-tuning after the model's training, aiming to explore unknown search space regions that gradient-based optimizers could not find. such an approach is conducted by exploring weights under predefined bounds and evaluating them according to an objective function (accuracy over the validation set). therefore, the main contributions of this work are three-fold: (i) to introduce meta-heuristic optimization directly to the model's parameters, (ii) to provide insightful analysis of whether gradient-based optimizers achieved feasible regions or not, and (iii) to fill the lack of research regarding meta-heuristic optimization applied to machine learning algorithms.the remainder of this paper is organized as follows. section ii presents a theoretical background regarding the employed ml architectures, e.g., multi-layer perceptrons and recurrent neural networks. section iii introduces a brief explanation about meta-heuristic optimization, as well as the genetic algorithm and particle swarm optimization. section iv presents the mathematical formulation of the proposed approach, its complexity analysis, the employed datasets, and the experimental setup. finally, section v discusses the experimental results while section vi states the conclusions and future works. nevertheless, such learning is conditioned to the training data and the model's parameters and often does not reproduce the real-world environment, leading to undesired behavior, known as underfitting/overfitting. this discrepancy lies in the fact that the model "memorized" the training data instead of learning its patterns, thus not reproducing the desired outputs when applied to slightly-different data (test data).on the other hand, a more feasible approach stands for optimization procedures, where parameters are optimized according to an objective function instead of being joined in all possible combinations.explored single-and multi-objective meta-heuristic optimization in machine learning problems, such as feature extraction and selection, hyperparameter tuning, and unsupervised learning.nevertheless, most of the literature works focus on only optimizing the model's hyperparameters (learning rate, number of units, momentum, weight decay, dropout)-instead of optimizing its parameters (layers' weights and biases). usually, parameters are optimized during the learning procedure through gradient-based approaches, such as the stochastic gradient descent, yet they might benefit from the metaheuristic techniques' exploration and exploitation capabilities. therefore, the main contributions of this work are three-fold: (i) to introduce meta-heuristic optimization directly to the model's parameters, (ii) to provide insightful analysis of whether gradient-based optimizers achieved feasible regions or not, and (iii) to fill the lack of research regarding meta-heuristic optimization applied to machine learning algorithms. section iii introduces a brief explanation about meta-heuristic optimization, as well as the genetic algorithm and particle swarm optimization., stochastic gradient optimization across a training set, followed by a fine-tuning using meta-heuristic optimization across a validation set (posttrained).the proposed approach intends to provide an additional optimization step after the network's pre-training, where agents will encode the pre-trained weights and biases as their positions, search for better solutions throughout the space and evaluate the fitness function (validation). regarding the meta-heuristic techniques, we opted to use an evolutionary-based algorithm denoted as genetic algorithm and a swarm-based one, known as particle swarm optimization. additionally, considering both datasets, α-ga-mlp obtained the best accuracy and recall metrics amongst the evaluated models (underlined cells), yet every evaluated model was statistically similar according to the wilcoxon signed-rank test (bolded cells). essentially, after training architectures through standard gradient descent algorithms, meta-heuristic algorithms attempt to explore the trained search space and find more suitable positions.regarding the sentiment analysis task conducted by the lstms, one can perceive that none meta-heuristic could obtain better metrics than the standard architecture, yet all results were statistically similar according to the wilcoxon signed-rank test. such behavior might be explained due to only fine-tuning the last fully-connected layer (layer before the softmax activation) instead of fine-tuning the recurrent layer and ignoring the biases fine-tuning, which may help metaheuristics in achieving more competitive results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/440.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/440.txt new file mode 100644 index 0000000000000000000000000000000000000000..5563f93f925d6182ea9f146fb2e2b71568e6a838 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/440.txt @@ -0,0 +1 @@ +counterfactual explanations (ces) have become a major methodology to explain nns due to their simplicity, compliance with the regulations , and alignment with human thinking . given an input point to a classifier, a ce is a modified input classified with another, often more desirable, label. consider a customer that is denied a loan by the machine learning system of a bank. a ce the bank provided for this customer could be, the loan application would have been approved, had you raised your annual salary by $ 6000. several desired properties of ces have been identified in the literature, the most fundamental of which is validity, requiring that the ce needs to be correctly classified with a specified label . proximity refers to the closeness between the ce and the input measured by some distance metric, which translates to a measure of the effort the end user has to make to achieve the prescribed changes . the ces should also lie on the data manifold of the training dataset and not be an outlier, which is assessed via plausibility . most recently, the robustness of ces, amounting to their validity under various types of uncertainty, has drawn increasing attention due to its real-world importance. in this work, we consider robustness to the model parameter changes occurring in the classifier on which the ce was generated. continuing the loan example, assume the bank's machine learning model is retrained with new data, while, in the meantime, the customer has achieved a raise in salary (as prescribed by the ce). the customer may then return to the bank only to find that the previously specified ce is now invalidated by the new model. in this case, the bank could be seen as being responsible by the user and could potentially be legally liable, risking financial and reputational damage to the organisation. the quality of such unreliable ce is also questionable: have shown that ces found by existing non-robust methods are prone to such invalidation due to their closeness to the decision boundary.various methods have been proposed to tackle this issue. focus on building heuristic methods using model confidence, lipschitz continuity, and quantities related to the data distribution. consider optimising the validity of ces under bounded model parameter changes, which are also empirically shown to be robust to the unbounded parameter changes scenarios. among the existing methods, only provides robustness guarantees in a formal approach, which are known to be lacking in the explainable ai (xai) literature in general, aside from some notable examples, e.g. as introduced in . their method generates such provably robust ces via iteratively tuning the hyperparameters of an arbitrary non-robust ces method and testing for robustness. however, this method cannot always guarantee soundness and is not complete, which is also the case for the method in . another limitation in the current literature is that the methods targeting this form of robustness guarantee do not find plausible ces, limiting their practical applicability.such limitations have motivated this work. after discussing relevant studies in section 2, we introduce the robust optimisation problem for computing ces with proximity property as the objective, and robustness and plausibility properties as constraints (section 3). in section 4, we then present provably robust and plausible ces (proplace), a method leveraging on robust optimisation techniques to address the limitation in the literature that no method optimises for proximity and plausibility while providing formal robustness guarantees. we show the (conditional) soundness and completeness of our method, and give a bi-level optimisation procedure that will converge and terminate. finally, in our experiments, we compare proplace with six existing ce methods, five of which target robustness, on four benchmark datasets. the results show that our method achieves the best robustness and plausibility, while demonstrating superior proximity among the most robust baselines., 2022;dutta et al.took the approach of augmenting the training data with previously generated ces.other forms of ces' robustness have also been investigated, for example, robustness against: input perturbations ). the method addresses the limitation in the literature that existing methods lack formal robustness guarantees to bounded model parameter changes and do not generate plausible ces. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/441.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/441.txt new file mode 100644 index 0000000000000000000000000000000000000000..00a97178d180ced1afbe1ea4d9295430dd685f50 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/441.txt @@ -0,0 +1 @@ +different kinds of cyber-security vulnerabilities can potentially render vehicles prone to several types of attacks, such as taking control of the vehicle, denial of service (dos) attacks, and spoofing attacks . as emerging technologies push devices across all domains to become more connected than ever before, the internal infrastructure of modern vehicles is still based upon antiquated standards and regulations from a time long before this interconnectivity was thought possible. within the core of all vehicles lies the same internal communication protocol, the controller area network (can) protocol, which was designed before the advent of wifi and bluetooth-enabled vehicles. the vehicles' can networks were designed for quick and efficient transmission of messages with no cyber-security features because they were not necessary for the time. but in a modern vehicle, this same protocol is used when it is exposed to external networks such as the internet. these modern features open vehicles' networks to new vectors of attack that they were never designed for. this openness leaves vehicles as prime targets for well-known and widely used cyber-security attacks - . the gravity of these threats is exhibited by researchers who performed hacks on tesla vehicles that disabled critical safety features . as these threats become more apparent, automobile manufacturers are implementing countermeasures such as data encryption and message authentication in order to mitigate the likelihood of a successful attack. these additions would make newer models of vehicles more resilient to these attacks, but vehicles that were manufactured before these changes would still have no protection against these threats.traditional machine learning techniques have been employed for the purpose of detecting injection-based cybersecurity attacks. these techniques read the traffic of can messages on the vehicle's can bus in order to make a decision about whether the traffic is normal or anomalous. often, several different characteristics of the bus's traffic are extracted, such as the inter-signal arrival time, the frequency of messages on the bus, and appearances of message sequences - . however, these techniques rely on training data containing samples of both normal and anomalous data, meaning that prior knowledge of the specific type of attack is required in order to detect it in the future. these models and techniques perform well in scenarios where the models were given sufficient training data of the type of attack used, but their performance is affected when the models are tasked with detecting attacks for which they were given little to no training data. this poses an issue where in order for these models to remain updated and secure against new and emerging threats, they must be retrained with new data of these attacks. research has been done on trying to mitigate this issue of detecting anomalies consisting of "unknown" attacks. in 2016, researchers bezemskij et al. proposed a knowledge-based approach to detect anomalies on a robotic vehicle . the researchers attempted to improve the detection of unknown attacks by adjusting weights that were trained on known attacks to accentuate the features of the data that indicated anomalous activity. the researchers reported that this technique proved effective in detecting unknown attacks within their experimental setup, but this technique may prove ineffective against unknown attacks whose features do not follow the trend of features of known attacks. researchers in proposed another self-supervised technique to detect unknown attacks. the researchers create a pseudo-normal data generator to generate data that mimics normal operating data on a can network but lies just outside of the normal data within a given feature space. a binary classification model is then trained with the noised data labeled as anomalous data to make a decision boundary between the normal data and noised data. for these techniques to be useful, either data of these attacks must be available, or a method for generating data that mimics attack data must be available.we propose the use of one-class classification methods for the purpose of detecting anomalous data on a vehicle's can bus. one-class classification methods have been used successfully for identifying data that falls outside the bounds of normal or expected behavior across multiple domains, such as the medical field with detecting myocardial infractions and taxa identification with identifying rare benthic macroinvertebrates . these approaches use oneclass classification for identifying data outside the norm using features from their respective datasets. our proposed approach follows this paradigm, where we only train the model on data collected from the can bus under normal behavior. this methodology differs from already established techniques of intrusion detection on can buses by only requiring normal data rather than training a model on both normal and anomalous data. the main contribution of this work is that because the model will be trained without anomalous data from any specific type of cyber-attack, we claim that this approach will be more suited to detecting unknown attacks within a vehicular network.the structure of this paper is as follows: section ii provides an overview of work associated with intrusion detection in the context of cyber-security on can buses as well as the application of one-class classification techniques for the purpose of intrusion detection. section iii provides the methodologies used for the various one-class classification techniques that were applied to our dataset. section iv shows our experimental setup, which includes the physical layout of our data collection apparatus, the feature generation, and the methods used for generating anomalous data for testing purposes. section vi then presents the conclusions that were made from our experiments as well as potential improvements and future work. however, these techniques rely on training data containing samples of both normal and anomalous data, meaning that prior knowledge of the specific type of attack is required in order to detect it in the future. these models and techniques perform well in scenarios where the models were given sufficient training data of the type of attack used, but their performance is affected when the models are tasked with detecting attacks for which they were given little to no training data. proposed a knowledge-based approach to detect anomalies on a robotic vehicle. the researchers attempted to improve the detection of unknown attacks by adjusting weights that were trained on known attacks to accentuate the features of the data that indicated anomalous activity. researchers inproposed another self-supervised technique to detect unknown attacks. the researchers create a pseudo-normal data generator to generate data that mimics normal operating data on a can network but lies just outside of the normal data within a given feature space. a binary classification model is then trained with the noised data labeled as anomalous data to make a decision boundary between the normal data and noised data. one-class classification methods have been used successfully for identifying data that falls outside the bounds of normal or expected behavior across multiple domains, such as the medical field with detecting myocardial infractionsand taxa identification with identifying rare benthic macroinvertebrates. this methodology differs from already established techniques of intrusion detection on can buses by only requiring normal data rather than training a model on both normal and anomalous data. the main contribution of this work is that because the model will be trained without anomalous data from any specific type of cyber-attack, we claim that this approach will be more suited to detecting unknown attacks within a vehicular network. section iv shows our experimental setup, which includes the physical layout of our data collection apparatus, the feature generation, and the methods used for generating anomalous data for testing purposes. most of these techniques utilize models that rely on data from both normal and anomalous data. this paper focuses on detecting faults within the vehicle's infrastructure rather than cyber-security attacks but still uses one-class models for intrusion detection based on data extracted from can messages. • subspace support vector data description (s-svdd): this method builds upon svdd by transforming the data from the given feature space to an optimized lower-dimensional feature space. data was collected from the primary and electric vehicle(ev) can buses on the vehicles, but the methods used for feature extraction and intrusion detection are not specific to any can bus. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/442.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/442.txt new file mode 100644 index 0000000000000000000000000000000000000000..75a0f82605f4426f43771532753687aebeeb68df --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/442.txt @@ -0,0 +1 @@ +learning algorithms consider the availability of both positive and negative examples in common binary classification tasks. sometimes a strong requirement like this is needed but it does not work in the context of an application for real-world application. in actuality, labeling data is an expensive, timeconsuming task that necessitates a high level of domain knowledge. in some cases, this operation is quick, but usually, defining reliable labels for each data example is a hard task .the goal of one-class classification (occ) methods is to create classification models when the negative class is either nonexistent, poorly sampled, or poorly specified . thus, this technique creates class boundaries only with the knowledge of positive class. examples of one-class classification include outlier detection and novelty detection, where the outlier elements are identified independently of all the other data elements. this one-class classification problem happens in a variety of situations, including:• outlier detection: the objective is to find samples from an unlabeled dataset that are outliers. outliers in the training set are observations that deviate significantly from the others. outlier estimators ignore the deviating observations and attempt to fit the majority of the training data under the region. an alternative name for it is unsupervised anomaly detection. • novelty detection: consider training data where there are no outliers, and we want to determine whether the incoming or fresh observation is an outlier or not. the outlier can be referred to as a novelty in this situation. anomaly detection that is semi-supervised might be said to be involved. • information retrieval: the purpose of this classification is to find samples in the unlabeled dataset that are related to the ones provided by the user. • one-vs-rest: this is taken into account in this case when the negative class is too diversified and it is difficult to collect and label numerous negative samples .this technique can find outliers that deviate from the training set in some way. it is highly helpful in resolving a classification problem where samples for one class are in great abundance and samples for the other classes are scarce . the primary objective of occ is to estimate the support of the data distribution, which is very helpful in unsupervised learning, particularly in high-dimensional feature spaces where doing density estimation is exceedingly challenging . several real problems, including novelty discovery, outlier detection, and others. among the first authors to develop occ algorithms were and . a classifier that finds the least radius hypersphere enclosing data is proposed by whereas specifically suggests a classifier that finds the hyperplane separating data from the origin with the largest margin. establishes that despite these two approaches having differences between them, they both yield the same results for translation-invariant kernels like the gaussian kernel. with the aim of addressing large-scale non-linear optimization problems, first-order approaches for non-linear optimization problem solving have been developed and used. several methods that require the solution of linear systems of equations, such as the exterior-point method (epm) and interior-point methods (ipm) have been used to address quadratic programming issues. one of these techniques, for instance, works well for medium-sized situations with a few thousand variables in recent years, algorithms like machine learning and others like as have been used to solve nonlinear optimization problems involving very large scale variables or datasets. suggested combining the fast projected gradient approach (fpgm) with the argumented lagrangian (al) method to train svms that simulate large-scale convex quadratic optimization problems with linear constraints and straightforward bounds. the study omitted the al-convergence fpgm's analysis, despite the encouraging results of the model. on the other hand, the convergence of the al-fpgm was theoretically investigated in paper .the three main contributions of this paper are (i) applying a technique based on fast projected gradient for training dual soft margin occ-svms, (ii) creating and implementing an al-fpgm-based qp solver in python for training occ-svms, and (iii) testing the qp solver by training occ-svms on few datasets used under pu-learning problems the remaining parts of this paper is organized as follows: section 2 describes the dual soft margin occ-svms training problem, section 3 describes the augmented lagrangian method, section 4 describes the fast projected gradient method, section 5 presents numerical results for training the occ-svms with the al-fpgm and section 6 presents concluding remarks.2 the dual soft-margin svm problem developed a method called "one-class classification" that extends the svm methodology to handle training with just positive input. only positive data can be used with the suggested schölkopf mechanism. the algorithm checks for "outliers" within the positive instances and uses them as negative examples . after changing the feature via a kernel, the one-class classification method of treats the origin as the unique member of the second class. the image of one class is then separated from the origin using "relaxation parameters." following that, the conventional occ-svms algorithms are used . the following quadratic programming problem must be solved in order to separate the data set from the origin:subject to:here, v ∈ (0, 1) is a parameter whose meaning will become clear later. since nonzero slack variables ζ i are penalized in the objective function, we can expect that if w and β solve this problem, then the decision function f (x) = sign((w • φ(x))β) will be positive for most examples x i contained in the training set, while the sv type regularization term w will still be small. the actual tradeoff between these two goals is controlled by v. it is possible to demonstrate that the solution has an sv expansion by deriving the dual problem and applying the kernel transformation. using lagrange multiplier, constraint optimization problem can further be expressed as equation (2).the lagrange multiplier α must be greater or equal to zero. for the purpose of simplification, we expressed the 1 vn as c. we further find the partial derivatives of the loss function l with respect to ζ, w and βthe optimal value will be obtained using ζ, w and β from the equations above, and this gives rise to equation 6.where x t x = k(x, x) is a kernel to be computed. minimizing equation (8) gives:equation 9 can further be expressed in a more compact form as:the patterns x i with nonzero α i are called svs, where the coefficients are found as the solution of the dual problem:and the bounded set:then the optimization problem 11 can be rewritten as follows:the augmented lagrangian can be written as follows:where µ ∈ r is the unknown langrage multiplier that corresponds to the equality constraint and c > 0 is the scaling parameter.3 augmented lagrangian method algorithm followed by updating the lagrange multiplier µ. we use the following function, which measures the first-order optimality conditions for problem (14), as the stopping criteria: algorithm 1 provides a preliminary sketch of an augmented lagrangian technique.the goal of one-class classification (occ) methods is to create classification models when the negative class is either nonexistent, poorly sampled, or poorly specified. examples of one-class classification include outlier detection and novelty detection, where the outlier elements are identified independently of all the other data elements. • novelty detection: consider training data where there are no outliers, and we want to determine whether the incoming or fresh observation is an outlier or not. a classifier that finds the least radius hypersphere enclosing data is proposed bywhereasspecifically suggests a classifier that finds the hyperplane separating data from the origin with the largest margin. with the aim of addressing large-scale non-linear optimization problems, first-order approaches for non-linear optimization problem solving have been developed and used. one of these techniques, for instance, works well for medium-sized situations with a few thousand variables in recent years, algorithms like machine learning and others like ashave been used to solve nonlinear optimization problems involving very large scale variables or datasets.suggested combining the fast projected gradient approach (fpgm) with the argumented lagrangian (al) method to train svms that simulate large-scale convex quadratic optimization problems with linear constraints and straightforward bounds.the three main contributions of this paper are (i) applying a technique based on fast projected gradientfor training dual soft margin occ-svms, (ii) creating and implementing an al-fpgm-based qp solver in python for training occ-svms, and (iii) testing the qp solver by training occ-svms on few datasets used under pu-learning problemsthe remaining parts of this paper is organized as follows: section 2 describes the dual soft margin occ-svms training problem, section 3 describes the augmented lagrangian method, section 4 describes the fast projected gradient method, section 5 presents numerical results for training the occ-svms with the al-fpgm and section 6 presents concluding remarks.2 the dual soft-margin svm problemdeveloped a method called "one-class classification" that extends the svm methodology to handle training with just positive input. since nonzero slack variables ζ i are penalized in the objective function, we can expect that if w and β solve this problem, then the decision function f (x) = sign((w • φ(x))β) will be positive for most examples x i contained in the training set, while the sv type regularization term w will still be small.in this section, we report on the numerical results of training occ-svms with the al-fpgm on datasets that were used to solve pu-learning problem.this manuscript presents numerical results on training dual soft margin occ-svms with al-fpgm. we developed and implemented a qp solver based on the al-fpgm and tested the qp solver by training medium-sized data sets from the usmo paper. the numerical results demonstrate that starting with a sample size of about a few data points, al-fpgm consistently outperforms the original occ-svms solver in python in almost all the datasets. it is important to note that the presented al-fpgm can still be used as a starting point for the development of a more complex algorithm for training occ-svms with a large amount of data (hundreds of thousands of data points), despite the fact that it does not use any type of sequential incorporation of the training data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/443.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/443.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e65c3b4e05f1877600ef6e2af6ff0baa055d651 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/443.txt @@ -0,0 +1 @@ +the feature selection problem is a very important part of data preprocessing that crucially affects the overall performance in predictive analysis . given a large dataset, statistical tests are typically performed to estimate the correlation between pairs and subsets of features and a subset of the total feature set is then chosen using standard feature selection methods like filters and wrappers . this is done to reduce data redundancy and also improve the performance of the statistical or machine learning methodology to which the resulting data is fed .in this paper, we use a probabilistic approach to the data feature redundancy problem by defining a random graph model that allows for both collinearity and multicolllinearity among features. we use an auxiliary result regarding the size of mutually good constrained sets to obtain a lower bound on the minimum size of a feature set that has low collinearity and low multicollinearity.in the following section, we state and prove our main result regarding the size of feature sets with low collinearity and multicollinearity, using mutually good constrained sets. we also prove a lemma regarding the size of mutually good constrained sets, that is of independent interest.we use the notation f (∅) = u and say that f (s) is the set of elements that are f -good or simply good with respect to s.for example, if u is the set of vertices in a graph, then the function f 0 (s) that determines the set of all vertices not adjacent to any vertex of s is a goodness function. any stable set i is a constrained set and the set h(i) is the set of all vertices adjacent to some vertex in i.any single set in u is assumed to be a mutually good set and so we always set p 1 = 1 = 1q 1 . , x i } is a mutually good set and let h i be the event that {x 1 , . clearly j i+1 ⊆ j i for 1 ≤ i ≤ l -1 and suppose that the event j l-1 occurs. , x l-1 } is known to be a b-constrained set, due to the occurrence of the event j l-1 . we now use the properties (i) -(ii) in the definition 1 to show that if x l ∈ f ({x 1 , . summarizing we have that if j l-1 occurs and x l ∈ f ({x 1 , . , x l-1 } is already a constrained set and so the probability that {x l } does not satisfy the b-constraint with respect to {x 1 , .using p(a ∩ b) ≥ p(a) -p(b c ) with a = e l and b = h l , we get from (2.20) and (2.21) that the conditional probability of both e l and h l happening is at least p l-1q l-1 .taking expectations and using the fact that j l ⊂ j l-1 we get that p(j l ) ≥ (p l-1q l-1 ) • p(j l-1 ).(p jq j ) • p(j 1 ) = l-1 j=2 (p jq j ) • (1q 1 ) (2. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/444.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/444.txt new file mode 100644 index 0000000000000000000000000000000000000000..4d804237c87e1c3f6f807fda9011f47122ce1797 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/444.txt @@ -0,0 +1 @@ +t here have been increasing research efforts in lever- aging the powerful approximation capabilities of deep neural networks (dnns) to learn the high dimensional loadto-solution mappings of the important optimal power flow (opf) problems. high-quality near-optimal opf solutions can be instantly obtained from well trained dnn models, significantly accelerating the solution process compared to traditional solvers , . however, most existing dnn models were built and trained only for a specific power network with a given topology. with the expansion of buses, lines, loads, and distributed energy resources (ders) and the corresponding change of power network topologies, the existing dnn models need to be rebuilt through repeated trainings, incurring heavy storage and computation burdens.solving alternating-current (ac) opf problems across multiple networks is challenging, due to the difference in network topologies, line admittances, lengths of load and solution vectors, et cetera. several methods have been developed recently to partly address the topology variation issue. for instance, resampled the training data and retrained the dnns in real time to adapt to the emerging new topologies, which is often computationally expensive. reference integrated the topology labels into the training process, while encoded discrete topology representations with line admittances into the dnn input. these methods, without retraining, can directly predict opf solutions under flexible topologies. however, they are still limited to a fixed number of loads and generators and incapable of incorporating plug-and-play ders in a network expansion setting. reference learned a linear controller for an expanding radial network, which is not applicable to networks with general topologies.in this paper, we propose deepopf-u, a novel approach that utilizes one unified dnn to learn the load-to-solution mappings of ac opf problems across multiple and expanding networks with different numbers of buses, lines, loads, and generators. the contribution of this work includes:• we design elastic dnn input and output layers with plugand-play neurons, to adapt to the varying lengths of load and opf solution vectors in different networks. where n g k denotes the set of buses with dispatchable generators. p g i and q g i , p d i and q d i represent the active and reactive power generation, active and reactive power consumption at bus i, respectively. v i denotes the complex voltage at bus i. y ij is the (i, j)-th entry of the network admittance matrix y . constants x and x are the lower and upper limits of variable x. the generation cost at bus problem p k varies significantly across different networks k. prior methods , , solved opf problems in a specific network by using a dedicated dnn. as a new network emerges or the network successively expands, a new dnn needs to be built and trained, which lacks generalizability.to overcome the limitation above, we design one unified dnn to learn the load-to-solution mappings of ac opf problems across multiple and expanding networks. the proposed approach, called deepopf-u, is applicable to:• multiple networks with different numbers of buses, lines, loads, ders, and different topologies; • an expanding network with increasing numbers of buses, lines, loads, and ders.t here have been increasing research efforts in lever- aging the powerful approximation capabilities of deep neural networks (dnns) to learn the high dimensional loadto-solution mappings of the important optimal power flow (opf) problems. with the expansion of buses, lines, loads, and distributed energy resources (ders) and the corresponding change of power network topologies, the existing dnn models need to be rebuilt through repeated trainings, incurring heavy storage and computation burdens.solving alternating-current (ac) opf problems across multiple networks is challenging, due to the difference in network topologies, line admittances, lengths of load and solution vectors, et cetera.in this paper, we propose deepopf-u, a novel approach that utilizes one unified dnn to learn the load-to-solution mappings of ac opf problems across multiple and expanding networks with different numbers of buses, lines, loads, and generators.• we design elastic dnn input and output layers with plugand-play neurons, to adapt to the varying lengths of load and opf solution vectors in different networks. p g i and q g i , p d i and q d i represent the active and reactive power generation, active and reactive power consumption at bus i, respectively. the generation cost at bus problem p k varies significantly across different networks k.to overcome the limitation above, we design one unified dnn to learn the load-to-solution mappings of ac opf problems across multiple and expanding networks.• multiple networks with different numbers of buses, lines, loads, ders, and different topologies; • an expanding network with increasing numbers of buses, lines, loads, and ders.we design a single unified dnn to learn the load-to-solution mappings of ac opf problems in multiple networks.where u k and x k are the increments of input and output vector lengths from network (k -1) to k.the unified dnn is trained to minimize the following loss function corresponding to power network k: where | vi | and θi are the voltage magnitude and phase angle at bus i predicted by the dnn; |v i | and θ i are their ground truth; factor γ tunes the relative importance of the two terms. the second set consists of 73-bus, 90-bus, 106bus, and 118-bus feeders, which (except the 118-bus feeder itself) are all formed by removing buses (and loads and ders on them) and lines from the ieee 118-bus feeder, to emulate a successively expanding case. deepopf-u can predict ac opf solutions in three feeders using a single dnn without compromising solution quality, while deepopf-v can only predict solutions in each single feeder with a separate dnn.we proposed deepopf-u, a unified dnn to learn the loadto-solution mappings of ac opf problems across multiple and expanding power networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/445.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/445.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc987a7bc8a62eb4891776777b41a55856122175 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/445.txt @@ -0,0 +1 @@ +even before the common era, the concept of viewing an object as the collection of its appearances, has already sprouted in early philosophy. the buddha, in the 'diamond sutra', separated the essence of universe from various modalities such as sight, sound, smell, taste and touch. two centuries ago, immanuel kant made a further step, positing that humans perceive only the representations of 'noumena' from the empirical world. he wrote:"and we indeed, rightly considering objects of sense as mere appearances, confess thereby that they are based upon a thing in itself, though we know not this thing in its internal constitution, but only know its appearances, viz., the way in which our senses are affected by this unknown something.-prolegomena" from this perspective, human cognition of the world, may therefore be considered effectively equivalent to the multiple modalities of the underlying objects. the importance of multimodality extends beyond metaphysics to everyday life: children learning languages often rely on illustrations, and even mathematicians benefit from visual aids. however, machine learning, which could be seen as the cognition of computer systems, has not fully harnessed the power of multimodality. multimodal machine learning, which processes and learns from data with multiple modalities, remained relatively under-explored until recently. despite the impressive success of multimodal learning in empirical applications, such as gato and gpt-4 , the corresponding theoretical understanding is largely absent, often limited to heuristics.a fascinating observation from empirical multimodal learning is that a model trained with multiple modalities can outperform a finely-tuned unimodal model, even on population data of the same unimodal task. it's not immediately clear why multimodality offers such an advantage, considering that the trained model's focus is spread across different modalities.while it seems challenging to outperform unimodal learning asymptotically when sufficient data is available, multimodal learning can still provide an edge under a fixed data budget. different modalities might focus on different aspects of an object, and for a specific classification problem, one modality may require a smaller sample complexity. this phenomenon often occurs with large models handling many tasks and a vast amount of training data, suggesting that:training across tasks learns a common connection between modalities efficiently, allowing the model to adapt to the modality with the smallest sample complexity.an intuitive example of how multiple modalities help is learning parametric sine functions. the samples come in the form ofx ∈ (0, 1], y = θx, z = sin(1/y), where x, y are the two modalities and z is the label. given data from both modalities the learning problem is trivial, even with a single training data point, while learning solely on x is hard albeit there is a bijective mapping between x, y. from a perspective of vc-dimension, there is a gap between the class of linear functions {θx} and the class of parametric sine functions {sin(1/θx)}, in that the former one has vc-dimension 1 while the latter one has infinite vc-dimension. more details will be provided later.the theory problem we study in this paper, is thus how to formalize the above heuristic with provable guarantees. to this end, we examine generalization bounds of a simple multimodal erm algorithm, which involves two parallel stages: learning a predictor f ∈ f based on multimodal training data, and learning a connection ĝ ∈ g that maps one modality to another with potentially unlabeled data. during inference, the composition f • ĝ is used to perform prediction on unimodal population data.in this setting, we prove that the learnt unimodal predictor f • ĝ can achieve vanishing generalization error against the best multimodal predictor f * as if given multiple modalities, whenever g is expressive enough to realize the training data. in addition, such generalization bound depends on the complexities of both hypothesis classes f , g separately, better than unimodal approaches which typically involve the complexity of f • g or a worst-case complexity of f , up to an o( √ n) factor where n denotes the size of training data. on the other hand, we show a separation between multimodal and unimodal learning, by constructing a hard instance learnable by multimodal learning, in which no matter what hypothesis class is chosen for the unimodal learning problem, it's either under-expressive or over-expressive and thus incurs constant error. putting the two pieces together, our theory suggests that with both connection and heterogeneity, multimodal learning is provably better than unimodal learning.the paper is organized as follows. in section 2 we formalize the setting of multimodal learning and provide a motivating example. section 3 proves a generalization upper bound of the two-stage multimodal erm algorithm on semi-supervised multitask learning problems. the lower bound on the separation between multimodal and unimodal learning is given in section 4, then we discuss the limitations of this paper and future directions in section 5.x ∈ (0, 1], y = θx, z = sin(1/y), where x, y are the two modalities and z is the label. given data from both modalities the learning problem is trivial, even with a single training data point, while learning solely on x is hard albeit there is a bijective mapping between x, y. to this end, we examine generalization bounds of a simple multimodal erm algorithm, which involves two parallel stages: learning a predictor f ∈ f based on multimodal training data, and learning a connection ĝ ∈ g that maps one modality to another with potentially unlabeled data. on the other hand, we show a separation between multimodal and unimodal learning, by constructing a hard instance learnable by multimodal learning, in which no matter what hypothesis class is chosen for the unimodal learning problem, it's either under-expressive or over-expressive and thus incurs constant error. here, each element s ∈ s constitutes a pairing of inputs from both modalities x ∈ x ⊂ r q , y ∈ y ⊂ r k and their associated label z ∈ r, thus forming a tuple (x, y, z).in particular, we will consider learning algorithms as a composition of functions a(x, y) = f (x, g(x)), where f ∈ f and g ∈ g represent the hypothesis classes for both functions. let g be a function class from the domain x to r k , and x = {x 1 , .thereby, the superiority of multi-modality can be naturally decomposed into two parts: a model trained with multi-modal data performs comparably on uni-modal population data as if multi-modal data is provided (connection), a model trained and tested with multi-modal data outperforms any model given only uni-modal data (heterogeneity).we note that both connection and heterogeneity are crucial to achieving such advantage: connection allows efficiently learning of y from x , while heterogeneity guarantees that learning with x is harder than learning with both x , y.on the other hand, the population loss of any f (x, g(x)) composition is clearly at least 1 2 because the label z is independent of x, while the population loss of {f t (x, y)} with the choice of f t (x, y) = b t (y) is zero.so far we have demonstrated that as long as a good connection indeed exists, learning with multimodal training data using a simple erm algorithm yields a unimodal model which is guaranteed to perform as well as the best model f * t (x, y) with both modalities.we are going to show that, either the intrinsic gap of risk between unimodality and multimodality e (x,y,z)∼µ ℓ(g * (x), z) -e (x,y,z)∼µ ℓ(f * (x, y), z).to sum up, our theory demonstrates that the superiority of multi-modality can be explained as the combined impact of connection and heterogeneity: when connection (theorem 4) and heterogeneity (theorem 7) both exist, multimodal learning has an edge over unimodal learning even if tested on unimodal data, providing an explanation to the empirical findings. failing to exploit the "explicit representations" y from the training data requires learning a composition f • g from scratch, which typically leads to a worst case gaussian average term, for example inthey have max g∈g g(f (s(g))) instead where s(g) = {(x ti , g(x ti ))} is a random set induced by g. notice such lower bound is on the intrinsic gap e (x,y,z)∼µ ℓ(g * (x), z) -e (x,y,z)∼µ ℓ(f * (x, y), z), it directly translates to a lower bound on the actual risk as well. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/446.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/446.txt new file mode 100644 index 0000000000000000000000000000000000000000..37da45711d43cafa2719bda2818591352d50a1bb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/446.txt @@ -0,0 +1 @@ +bearings are a crucial component of machines used across various industries, and their reliable operation is critical for an organization to robustly maintain its supply chain. unplanned downtime of machines leads to revenue loss, lowered productivity, and missed production targets which in turn adversely affects an organization's ability to meet its obligations leading to a reputation and other risks. however, bearings are prone to failure for a variety of reasons, including material defects, corrosion, wear, and poor installation (howard, 1994). predicting bearing failures in advance, and replacing the bearing to avoid inflicting significant damage to the machine will help lower both maintenance and capital costs. vibration signals obtained using a variety of accelerometers are widely used to monitor and assess the health of rotatory systems (murphy, 2020). the scale at which machines are deployed across various industries requires automated ways to monitor failures arising in these machines and alert maintenance personnel (fausing olesen & shaker, 2020). automated detection of bearing failures (howard, 1994) and the estimation of a component's remaining usable life (schwendemann et al., 2021) have been active research topics for many years, however the technologies to monitor and detect failures at scale has become possible by advances in internet of things (iot) and artificial intelligence (ai) (z. zhao et al., 2020). monitoring the health of thousands of machines has become possible, with a wide variety of vibration sensors (both piezoelectric and micro electro mechanical systems (mems) accelerometers) using a variety of signal acquisition characteristics, battery life, and data communication capabilities to on-premise or cloud servers. automated detection approaches have evolved from basic statistical approaches to more modern learning based approaches. statistical methods in the time domain use energy level (root mean square value, crest factor), kurtosis, peak, and shape of the amplitude probability distribution, to detect failures (yazdi et al., 2019). spectral features provide a complementary view of the vibration waveform that readily shows differences between various bearing failures (xu, lu, jia, & jiang, 2020). timefrequency approaches that are effective in non-stationary signals,such as the short-time fourier transform (stft) and wavelet transform (wt) have also been used to analyze vibration signals (p. wang et al., 2017). in addition, other orthogonal transforms such as continuous wavelet transform (cwt), discrete wavelet transform (dwt), and empirical mode decomposition (emd) (buchaiah & shakya, 2022) have been explored. condition monitoring approaches used heuristics/thresholds (e.g. iso 20816) on these features to detect and classify failure, however the performance of these approaches is limited as these thresholds don't necessarily generalize to different operating settings. machine learning (ml) approaches have helped overcome these limitations by learning to discriminate the failures automatically from data.ml approaches have used features extracted from vibration signals along with support vector machine (svm, random forest (rf), xgboost, and various other classifiers (tyagi, 2008), and demonstrated improved performance in detecting various faults. recently, deep learning (dl) approaches, which have demonstrated superior performance across several large-scale benchmarks in computer vision and natural language processing (lecun et al., 2015), have garnered increased attention in the prognostics and health monitoring (phm) community for detecting failures of rotating machinery. a variety of dl architectures including convolutional neural networks (cnn) and its variants, recurrent neural networks (rnn), auto encoder (ae), generative adversarial networks (gans) have been used for detecting failures. however there is a lack of consensus on choice of how the machine learning problem is formulated, model is trained, or evaluated across these studies.bearing fault classification problems are formulated as binary or multi-class classification problems with outputs as bearing labels (z. zhao et al., 2020), failure/no-failures (cui et al., 2022), or different failure classes (b. zhao & yuan, 2021). several studies that were developed to detect failures have leveraged popular open source datasets (b. wang et al., 2018;berghout et al., 2021;hendriks et al., 2022). vibration data available in these datasets have been gathered with experimental setups, using accelerated run-to-failure of overloaded bearings or manually damaged bearings, under a variety of operating conditions. in order to accurately assess performance of these bearing classification models, input data has to be split into training, validation and test partitions without any information leakage between the splits (abu-mostafa et al., 2012). information leakage leads to over-optimistic performance estimates of models whose performance fails to hold in real-world scenarios (abu-mostafa et al., 2012;riley, 2019). several fault classification studies (y. zhao et al., 2020;ruan et al., 2023) that have reported high performance however assign waveform recordings from the same bearing to both training and test partitions. we demonstrate such assignment of a bearing data across partitions leads to high performance estimates. in the run-to-failure experiments, the rate of degradation of the bearings is variable. lesser number of data samples are collected from bearings that fail faster (assuming a fixed vibration data acquisition rate) which in turn results in lesser amount of data available to train the failure detection models. further the criteria used by various studies to segment the run-to-failure data into failure and normal operating region, such as considering only last few samples in the wave file (z. zhao et al., 2020), or using a principal component analysis (pca) approach (juodelyte et al., 2022) adds to the variation and imbalance in the data available across classes. the number of bearings with particular injected failures further influences the amount of data available to train fine grained failure detection models. in spite of these differences in the number of bearings associated with a failure, and the amount of data available in the failure region of a bearing, accuracy has been widely used as performance metric by theses studies (schwendemann et al., 2021;neupane & seok, 2020), a choice not ideal in evaluating classifiers with unbalanced datasets and multi-class classifiers (davis & goadrich, 2006). our work considers the effect of these choices on the failure prediction models.in this work, we take a closer look at the formulation of the ml problem, the dataset apportioning choice for model development, and the metrics to evaluate failure classification models. we formulate the failure classification problem as a coarse failure/no-failure binary classification, or as more fine grained failures/no-failure multi-class classification to study their efficacy on different datasets. we investigate this using three bearing failure datasets (both run-to-failure and injected failures). we also demonstrate how the choice of segmenting the run-to-failure datasets using a threshold or an unsupervised pca followed by a k-means clustering method influences the amount of data available for training the model. we further investigate the influence of training, validation, and test dataset splits that considers the bearing information, and importance of using metrics (precision, recall, f-score, and f mac ) in addition to accuracy. this work helps underscore the importance of several key choices in reliably developing models, and deploying them in practical settings for phm.the rest of this paper is organized as follows. in section 2 we briefly describe the datasets that are explored in this study followed by the proposed approach in section 3. in section 4, we demonstrate through experiments the influence of various choices, and finally summarize the effect of these choices in section 5.zhao et al.ml approaches have used features extracted from vibration signals along with support vector machine (svm, random forest (rf), xgboost, and various other classifiers(tyagi, 2008), and demonstrated improved performance in detecting various faults. several studies that were developed to detect failures have leveraged popular open source datasets (b.wang et al., 2012). several fault classification studies (y.zhao et al. we formulate the failure classification problem as a coarse failure/no-failure binary classification, or as more fine grained failures/no-failure multi-class classification to study their efficacy on different datasets. we also demonstrate how the choice of segmenting the run-to-failure datasets using a threshold or an unsupervised pca followed by a k-means clustering method influences the amount of data available for training the model. the proportion of samples available for each split is not consistent across datasets as our splits are based on the bearings, and the number of observations available for each bearing may vary due to differences in either time to failure or duration of the recording.in this section we investigate the approaches to identify failures in run-to-failure experiments, performance of binary and multi-class fault classifiers, and the influence of the splits, we generated the labels for run-to-failure data using a naive threshold and pca approaches in figure2. the results of training various multi-class classification models on the data partitioned using the 10g threshold, and the pca approach in xjtu dataset is shown in tables4and6respectively. we observe the performance of the binary classification is in general better across the methods than that of the the number of samples available for both the binary and multi-class classification is highly imbalanced as shown table3, however the metric used to assess models widely in the literature is just accuracy (z. we also show vibration datasets are heterogenous in the amount of data available for different failures, and its important to look at metrics that handles the multi-class scenario and imbalance in the dataset. we also show the usefulness of baseline dummy classifiers that uses class statistics, well known strategy used in practical ml settings, for developing bearing fault classification approaches, and recommend other research studies report their performance on this baseline along with other baselines. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/447.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/447.txt new file mode 100644 index 0000000000000000000000000000000000000000..edf503bcfacafbd02c08eb7cad678eb76a5db7cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/447.txt @@ -0,0 +1 @@ +in the past two decades, researchers have been working on innovative ways to make significant improvements in security and comfort in intelligent transportation systems (its) . safety is one of the major design goals for autonomous driving as it allows autonomous vehicles to navigate roads independently with fewer human interventions. this will eventually lead to fewer accidents compared to human drivers who are mostly impaired due to many reasons such as sickness and fatigue after a long drive to name a few. a multitude of onboard sensors is used in most current autonomous driving systems to gather relevant data . sharing these relevant data among multiple autonomous cars will be beneficial to have an efficient its. the advent of advanced communication technologies such as vehicle-to-vehicle (v2v), vehicle-to-infrastructure (v2i), and vehicle-to-everything (v2x) allows for highly efficient wireless communication within the its domain . this is very important for autonomous vehicles to communicate with one another, central servers, and off-board road side units (rsus).an essential aspect of autonomous vehicle operation in complex driving scenarios is having a decision-making control module that is accurate and that can be executed almost instantly. this module is responsible for sending instructions to the vehicle's action execution module to perform numerous actions like following cars, avoiding obstacles, changing lanes, and overtaking . driving maneuvers such as following a car in front and changing lanes are the two most important driving situations that occur most frequently. consequently, multiple research works have emerged with various car-following models and lane-changing models - . however, most of these research works studied and proposed car-following models and lane-changing models individually. the effects of lane-changing behavior on vehicles in the opposite lane have been pointed out by many scholars. to this end, the importance of joint research on vehicle driving systems that considers both lane-changing behavior and car-following behavior cannot be overstated.recently, machine learning (ml) based car-following models have yielded outstanding performance as reported in the literature. specifically, reinforcement learning (rl) and deep reinforcement learning (drl) stand out among other ml approaches adopted - . similarly, lane-changing models are seeing similar research trends in the literature (i.e., researchers are adopting rl and drl such as deep deterministic policy gradient (ddpg) and deep q-network (dqn) algorithms to solve the lane-changing maneuver problem) - . however, only a few research works have been reported in the literature .for any its to be effective, vehicle-to-everything (v2x) applications must be deployed to enable vehicles to exchange information with nearby vehicles and infrastructure to coordinate maneuvers. it is very common to execute several intensive computational operations in a very short period of time in the its domain to have safe and effective coordination among vehicles. this necessitates the deployment of servers with high computational resources along the road. as part of the v2x infrastructure deployment, operators install rsus to reduce communication delays between vehicles and central servers, which improves the coverage range.in response to concurrent delay and computational requirements, the european telecommunications standards institute (etsi) introduced the multi-access edge computing (mec) concept . with mec, computational resources are moved closer to the vehicles. mec-assisted its applications are being heavily investigated in the literature.the contributions of this paper are summarized as follows:• develop a cohesive decision control framework for carfollowing and lane-changing operations, utilizing drl techniques. this is specifically tailored for scenarios involving abrupt highway construction work. • formulate the given scenario as an mdp and employ the deep q-network (dqn) algorithm during the experimentation phase to train an rl agent in making optimal decisions. • integrate a mec-assisted architecture to address latency and computational demands associated with drl algorithms.• evaluate and contrast two distinct decision-making policies, namely boltzmann and epsilon greedy, to ascertain the superior approach in enhancing traffic flow efficiency within the simulation environment. the structure of the rest of the paper is as follows: section ii presents the background knowledge and the relevant related works in the literature. section iii presents the proposed system model and its description. section iv presents the description of the implementation of the environment and model used for training the agent. the section concludes with results and a discussion of the performance analysis of the proposed model. section v gives conclusion remarks and highlights the intended future work., researchers are adopting rl and drl such as deep deterministic policy gradient (ddpg) and deep q-network (dqn) algorithms to solve the lane-changing maneuver problem)-. a typical mdp is characterized as a tuple < s, a, p, r, γ > where s is the set of state space, a is the set of action space, p is the probability transition function, r is the reward function, and γ is a discount factor used to adjust immediate and future rewards. the authors demonstrated that during the course of the lane-changing decision, the objective quantification of lanechanging intention must be used in order to determine the lane-changing intentions of avs. a duelling double deep q-network (d3qn) algorithm is used to distinguish the value of selected lane-changing actions and the potential value of the environment in the upper layer model.• state space definition: we model each state s ∈ s as tuple < d c , d f , v eh , x p , y p > where d c represents the distance of the target car from the roadblock, d f represents the distance of the target car to the next car in its path, v eh represents the speed of the target car, x p and y p represents the location of the target car along the x-axis and y-axis respectively. note that this action space is represented by 12 discrete action spaces, which correlate to the positions in the lane with the roadblock where lane-changing action can be initiated. • reward definition: the reward definition is given as a tuple < r + m , r - m , -r + m > where r + m represents a positive reward when the target car successfully changes lane and merge with average speed before reaching the roadblock, r - m represents a negative reward when the target car fails to change lane after reaching the roadblock (i., the end of the lane in this case) and -r + m represents negative positive reward when the target car change lane and merge with minimum speed before reaching the roadblock. we employ two policies; ϵ-greedy policy and boltzmann policy as explained in the dqn algorithm's pseudocode in algorithm 1. the off-board rsus forward the state space to the central mec edge server, which uses the dqn algorithm to determine the correct actions to execute. the gym environment allows us to define the parameters of our problem such as state space, action space, observation algorithm 1 dqn algorithm on the mec server require: initial w set randomly, initial replay buffer d set to n , initial policy π(s) as either ϵ-greedy policy or boltzmann policy, initial s 0 1: for t = 0 to t do 2:. figure3shows that for this environment the greedy policy is actually taking more diverse actions whereas boltzmann is taking the more consistent actions. the boltzmann policy takes more consistent actions that result in the same rewards, while the greedy q starts taking different actions to increase rewards as the simulation runs. this is because the boltzmann q operates on a range of actions that it determines based on q values, while the greedy q takes actions with the intention of increasing rewards and therefore q value. it also shows that greedy policy is taking a little more time and this can be because the algorithm includes memory for its previous steps before making a new action whereas boltzmann is making different actions every step. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/448.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/448.txt new file mode 100644 index 0000000000000000000000000000000000000000..827d305d6112229f0a63dbd932a5aecdde6732e5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/448.txt @@ -0,0 +1 @@ +the global trend in machine learning networks is that larger models yield more accurate results. consequently, new models have been designed with an ever-increasing number of parameters. however, training such models can be consuming in term of computing power, which is why recent work has been aimed towards alternatives to the single-precision arithmetic (fp32). most recent models are so large they cannot fit on a single gpu using a traditional training framework: llama (meta ai's latest large language model) would require for instance 560 gb of memory, which is far more than state of the art gpu can offer.as neural networks grow larger and larger, the need to reduce their memory footprint has become increasingly imperative. while prior research has focused on increasing speed, there remain room for improvements in the way gpu memory is used. indeed, the common approach to maintaining some accuracy on the parameters while training on a half precision (fp16) model is to keep a master copy of them in full floating-point precision. the drawback of doing so is that every parameter now has to be saved in memory in both fp16 and fp32, further expending the charge for gpu memory.typical mixed precision training uses half precision values during the forward pass and single precision for the parameters' update. for each parameter, the components stored on gpu memory are then: the single precision value of the model weight and its half precision copy (6 bytes), the optimizer state (dependent on the optimizer), the gradient (usually 4 bytes), and some additional values like the forward activations whose size may vary depending on the model. to lower the memory pressure, solutions have already been developed towards smaller optimizer footprint. indeed, memory requirements for modern model training are often dictated by the optimizer state, with up to twice as much memory required for each parameter. alternative optimizers, such as adafactor or 8bit-adam, already offer a remedy to this problem by changing the way state memory is stored. where a standard adam optimizer would require 8 bytes of state memory by parameter, these optimizers respectively uses 4 bytes and 2 bytes only. the model's parameters and their gradients then becomes the main challenge to enhance memory use.our work aims at removing the additional memory cost incurred by the fp32 copy of the parameter by keeping in memory only the difference between the original parameter and its fp16 value. we also get rid of the gradient value by directly applying the update during the backward pass, thus relieving the need to keep those values at all time. for a given parameter, this leads to -at least -6 bytes less having to remain stored on the memory.our method does not necessitate any alterations to the hyperparameters or the training framework. it is designed to fit models that require an extensive amount of memory for their training, such as large language models, image classification or generation, natural language processing, object detection. indeed, the common approach to maintaining some accuracy on the parameters while training on a half precision (fp16) model is to keep a master copy of them in full floating-point precision.typical mixed precision training uses half precision values during the forward pass and single precision for the parameters' update. for each parameter, the components stored on gpu memory are then: the single precision value of the model weight and its half precision copy (6 bytes), the optimizer state (dependent on the optimizer), the gradient (usually 4 bytes), and some additional values like the forward activations whose size may vary depending on the model. indeed, memory requirements for modern model training are often dictated by the optimizer state, with up to twice as much memory required for each parameter.our work aims at removing the additional memory cost incurred by the fp32 copy of the parameter by keeping in memory only the difference between the original parameter and its fp16 value.as stated earlier, a classic implementation of mixed precision optimization typically stores in memory both a fp16 and a fp32 value, whereas our approach consists of storing only the difference between the two formats, thus resulting of at least a third less memory dedicated to the storage of a model's parameter.classic 16bits floating point format (fp16) contains 10 bits of significand whereas alternative brainfloating point (bf16) contains only 7bits.to do so, we have developed an overload of the arithmetic operators used in the parameters' update (elementwise add, multiply, divide and their classic combinations) that performs the operation in full precision using the extra bits saved separately and outputs both the updated 16 bits float and its extra bits. 32 * 12 bits 12 * 32 bits figure1: storage example for 12 extra-bits: the extra storage is created and accessed using twelve 32-bits integers, the bits stored to keep some accuracy (12 bits per parameter) are distributed among 32 slots.to validate the ability of our setup to maintain training performances comparable to a fp32 setup while using less memory, we carried out several model training from scratch. fusing the back-propagation and the optimization provides on its own a 11% decrease on peak memory while fine-tuning a t5 model on the glue benchmark, without significantly slowing down the training (detailed results are presented in this hardware does not natively support fp8, therefore we emulated 8bits precision on fp16 values (e5m2 fp8 is equivalent to a truncated fp16), which is why execution time and memory usage is not relevant in that case. we also observe that keeping only part of the mantissa (for instance 8bits on a fp16 training) can be enough to maintain the accuracy while further reducing the memory pressure (see fig. on the recommendation model we trained, we match the accuracy of the full precision training by adding only 8bits of precision. on the dc-gan model, our optimizer produces significantly better results than a standard bf16 training, but it does not exactly match the results of full precision training. the results displayed in figure5show that our solution does not perform better than fused-fp32 when using 8 extra-bits, however in this case bf16 training is slower than full precision training (likely because the input data is in fp32 format). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/449.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/449.txt new file mode 100644 index 0000000000000000000000000000000000000000..db1bf56456443aa518b3bf78c35fb55bae071a39 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/449.txt @@ -0,0 +1 @@ +random reshuffling(rr), which samples training data examples without replacement, has become the de facto example ordering method in modern deep learning libraries. however, recent research on online gradient balancing (grab) (lu et al., 2022) reveals that there exist permutationbased example orderings that are guaranteed to outperform random reshuffling(rr), and the follow-on work shows that grab is theoretically optimal (cha et al., 2023). grab connects permuted-order sgd to the herding problem (harvey and samadi, 2014) that greedily chooses data orderings depending on per-sample gradients to further speed up the convergence of neural network training empirically. empirical study shows that not only does grab allow fast * preprint. under review. cornell cs m.eng. project. package publicly available at https://pypi.org/project/grab-sam pler/ † project advisor: chris de sa (cdesa@cs.cornell.edu) of cornell university minimization of the empirical risk, but also lets the model generalize better on multiple deep learning tasks.the herding problem requires all vectors to be precentered to ensure they sum to zero (lu et al., 2022). the original work of grab proposes a herding-based online gradient balancing algorithm that uses stale gradient means to stimulate the running average of the gradients in the current epoch (mean balance). more recent work in cd-grab (cooper et al., 2023) proposed pair balance that further reduces dependencies on stale gradient means and works efficiently under distributed and parallel settings. in this work, we propose more variants of the balancing subroutine that attempts to solve various issues, namely: batch balance, recursive balance, and recursive pair balance.grab algorithm requires per-sample gradients while solving the herding problem. in general, it's hard to implement it in the vanilla pytorch automatic differentiation (ad) framework (paszke et al., 2017) because the c++ kernel of pytorch averages the per-sample gradients within a batch before it is passed back to python or forwarded to the next layer. the previous implementation of grab was based on backpack (dangel et al., 2019), a thirdparty library that builds on the top of pytorch to compute quantities other than the gradient efficiently. for other implementations, no efficient solution exists. one goal of this project is to get rid of third-party library dependencies other than pytorch and to provide the community with a simple, efficient, and off-the-shelf solution of grab.to make it easier for the entire community to use grab algorithm in their code, my work implements a python library, grab-sampler, that allows users to use grab with a minimum of 3 line changes to their training script.as a cs m.eng. (a) deterministic balancing (b) probabilistic balancing with logarithm bound (lu et al., 2022;alweiss et al., 2020) and other functional requirements that will be discussed in section 3. 2. reproduce the lenet on cifar-10 experiments and performance of the original paper. 3. benchmark the performance of all balance algorithms. the library is now released on pypi., 2022). the original work of grab proposes a herding-based online gradient balancing algorithm that uses stale gradient means to stimulate the running average of the gradients in the current epoch (mean balance). more recent work in cd-grab(cooper et al.while the vanilla grab algorithm refers to the herdingbased online gradient balancing algorithm using the stale mean of sample gradients(lu et al.mean balance (vanilla grab) mean balance is the vanilla grab algorithm that uses the stale gradient means to stimulate the running average of gradients and then solves the herding problem by assigning all examples with either + orsign while computing the balancing. mean balance takes o(n) computation and o(d) memory overhead for storing the stale mean and accumulating vector.pair balance (cd-grab) instead of using a precentered vector, the centralized version of pair balance uses the difference between 2 vectors to compute the balancing and assigns + to one example and assignsto the other one.batch balance batch balance is designed the seek more parallelism while computing the balancing across the batch. batch balance delays updating the accumulator vector until the balancing is calculated for a full batch, which makes all example gradients within the same batch relatively independent of each other while computing the balancing, bringing the potential of parallelism.recursive balance one issue of using grab in practice is that grab requires various epochs (usually ≥ 10) of training and reordering to witness the performance gain compared with rr. recursive balance, inspired bydwivedi and mackey (2021), taking advantage of a binary-tree structure of accumulator-balancing computation, balances each example d times, where d is the depth of the recursive tree, within a single epoch.recursive pair balance recursive pair balance is designed to seek all the goods from all variants. recursive pair balance uses the difference between 2 example gradients to compute the balancing sign, so there is less memory overhead without the need to store the stale mean vector.however, recursive pair balance requires the batch size to be a perfect power of 2, which is not a big issue in practice because people used to choose small batch sizes like 16, 64, or large batch size 1024, which are all power of 2. among the 5 variants, batch balance, recursive balance, and recursive pair balance are newly proposed. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/45.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/45.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c15e17748718708882616b0c118b4613a000494 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/45.txt @@ -0,0 +1 @@ +analogical proportions (ap) are statements of the form "a is to b as c is to d". they compare the pairs of items (a, b) and (c, d) in terms of their differences and similarities. the explicit use of aps in analogical reasoning has contributed to a renewal of its applications, leading to many developments, especially in the last decade; see for a survey. however, even if much has been already done both at the theoretical and at the practical levels, the very nature of aps may not yet be fully understood and their full potential explored.in the following, we survey recent works on aps along three directions:• their role in classification tasks ;• their use for providing explanations ;• their relation with multi-valued dependencies .this just intends to be an introductory paper, and the reader is referred to the above references for more details on each issue. we first start with a background, before addressing the three questions above in turn.which along with a : b :: a : b (reflexivity), are the basic ap postulates (e.g., ).these properties mimic the behavior of arithmetic proportions (i.e., ab = cd) or geometric proportions (i.e., we first consider the case of one boolean attribute applied to each of the four items (n = 1). in that boolean case, the following logical expression has been proposed for an ap :this formula expresses that a differs from b as c differs from d and b differs from a as d differs from c. it is only true for 6 valuations, namely 0 : 0 :: 0 : 0; 1 : 1 :: 1 : 1; 0 : 1 :: 0 : 1; 1 : 0 :: 1 : 0; 0 : 0 :: 1 : 1; 1 : 1 :: 0 : 0. this is the minimal boolean model agreeing with the three postulates of an ap .boolean aps enjoy a code independence property: a : b :: c : d ⇒ ¬a : ¬b :: ¬c : ¬d. in other words, encoding truth (resp. falsity) with 1 or with 0 (resp. with 0 and 1) is just a matter of convention, and does not impact the ap.this easily extends to nominal or categorical values where a, b, c, d belong to a finite attribute domain a. in that case, a : b :: c : d holds true only for the three following patterns (a, b, c, d) ∈ {(g, g, g, g), (g, h, g, h), (g, g, h, h)}, g, h ∈ a, g = h. this generalizes the boolean case where a = {0, 1}.in the boolean and nominal case n = 1, the equation a : b :: c : x where x is unknown does not always have a solution. indeed neither 0 : 1 :: 1 : x nor 1 : 0 :: 0 : x have a solution (since 0111, 0110, 1000, 1001 are not valid patterns for an ap). similarly, g : h :: h : x has no solution in the nominal case when g = h. the boolean solution exists if and only if (a ≡ b) ∨ (a ≡ c) is true. if the solution exists, it is unique and given by x = c ≡ (a ≡ b). in the nominal case, the solution exists (and is uniquenote that, assuming that the ap a : b :: c : d is true, one can indeed recalculate d from a, b, c.more generally, analogical inference amounts to an analogical jump stating that if an ap holds between four items for n attributes, an ap may also hold for an attribute n + 1:∀i ∈ {1, ..., n}, a i : b i ::if a n+1 , b n+1 , c n+1 are known, this enables the prediction of d n+1 , provided that a n+1 : b n+1 :: c n+1 : x is solvable. when attribute n + 1 is a class label, analogical inference is the basis for analogical proportion-based classification .there are usually many triplets (a, b, c) in a data set that enable us, using the above inference scheme, to predict a value for d n+1 from the fact that a : b :: c : d holds and from the knowledge of a n+1 , b n+1 , and c n+1 . it is also assumed that the ap p : q :: r : s is true, which means that p = q and r = s, or that p = r and q = s, in column c.let us examine the case where p = r and q = s with p = q (the other case p = q and r = s can be obtained by exchanging b and c). the tilting of the c value from p to q between a and b and between c and d can be explained, in view of the attributes considered, only by the change of values of the attributes from a ℓ to a n (which is the same for pair (a, b) and pair (c, d)).the two pairs (a, b) and (c, d), which correspond to different contexts (described by the attributes a 1 to a ℓ-1 , and differentiated by values of attributes a j to a ℓ-1 ), suggest to see these pairs as instances of a rule expressing that the change on the attributes a ℓ to a n determines the change on c whatever the context. indeed nothing forbids that there exist items a ′ and b ′ such that a ′ : b ′ :: c : d holds on attributes a 1 to a n and c(a ′ ) = c(b ′ ) = p, which would lead to the analogical prediction c(d) = s = r = p (remember we assumed p = r), contradicting the fact that s = q (since p = q). first, it is easy to build ap examples with a database flavor, as in moreover if we go back to table5, a database reader may notice that i) if p = q, a case left aside, it would suggest that the rule x = s → result = p may hold, and even that a functional dependency x → result might hold; ii) using notations in table5, if for all a, b, c in a relational table r there exists d in r, the weak multivalued dependencies x ։ w y and x ։ w z hold in r; iii) if for all a, d in r there exist b, c in r, the multivalued dependencies x ։ y and x ։ z hold in r.departing from a functional dependency, the definition of a multivalued dependency requires the existence of particular tuples in the data base, under some conditions: the multivalued dependency(see also) x ։ y (which can be read as "x multidetermines y ") holds on r if, for all pairs of tuples t 1 and t 2 in r such that t 1 = t 2 , there exists some tuple t 3 in r such that t.a more simple, equivalent version of the above conditions can be expressed as follows: if we denote by (x, y, z) the tuple having values x, y, z for subsets x, y , r \ (x ∪ y ) respectively, then whenever the tuples (p, q, r) and (p, s, u) exist in r, the tuples (p, q, u) and (p, s, r) should also exist in r. note that in the definition of x ։ y , not only the attributes in x and in y are involved, but also those in r \ (x ∪ y ), which departs from functional dependencies (where only the attributes in x and in y are involved).a multivalued dependency x ։ y is trivial if y is a subset of x, or if x ∪ y is the whole set of attributes of the relation (then r \ (x ∪ y ) is empty).multivalued dependencies are of interest in databases since decomposition of a relation r into (x, y ) and (x, r \ y ) is a lossless-join decomposition if and only if x ։ y holds in r.this can be easily generalized to nominal attributes, as shown in table5, where a, b, c, d are equal on the subset of attributes x, where a = b = c = d on the subset of attributes y , and where the same change take place between a and b and between c and d for attributes in z. indeed x ։ y holds as soon as whenever the tuples (p, q, r) and (p, s, u) exist in r on subsets x, y , r \ (x ∪ y ), the tuples (p, q, u) and (p, s, r) also exist in r on subsets x, y , r \ (x ∪ y ).x y r \ (x ∪ y ) t 1 p q r t 2 p s u t 3 p q u t 4 p s r. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/450.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/450.txt new file mode 100644 index 0000000000000000000000000000000000000000..4d4bee450e09e44f292f2123e82aaced4b64f915 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/450.txt @@ -0,0 +1 @@ +the aftermath of the covid-19 pandemic has drawn attention to the long-term health consequences experienced by individuals who have recovered from the acute phase of the disease . among these consequences, persistent inflammation has emerged as a prominent concern, affecting various aspects of health and well-being . detection and monitoring of persistent inflammatory biomarkers in post-covid-19 patients have become crucial in providing timely medical intervention and personalized care. while the post-acute phase of covid-19 can be characterized by a range of symptoms and complications, the presence of persistent inflammation is of particular significance. inflammation is a complex immune response that can be triggered by viral infections, including sars-cov-2, the virus responsible for covid-19 . in some individuals, this inflammatory response may persist long after the acute infection has resolved, leading to a spectrum of health issues. understanding the dynamics of persistent inflammation in post-covid-19 patients is a multifaceted challenge. factors contributing to persistent inflammation may include individual variations in immune response, comorbidities, and genetic predispositions . therefore, a comprehensive approach is required to identify and track these inflammatory biomarkers effectively. this study aims to leverage the power of machine learning techniques to automate the detection of persistent inflammatory biomarkers in post-covid-19 patients . the analysis incorporates a diverse dataset encompassing clinical parameters, demographic information, comorbidities, and treatment histories. this comprehensive dataset was meticulously curated from medical records of 290 post-covid-19 patients treated in hospitals across iraq . the utilization of machine learning algorithms offers a promising avenue to analyze and interpret complex data patterns related to persistent inflammation . by doing so, it enables the development of accurate predictive models capable of identifying patients at risk of persistent inflammation. early detection of such patients is crucial for facilitating timely interventions and personalized treatment plans, ultimately improving the quality of care and long-term outcomes . in the following sections, we will delve into the methodologies employed in this study, the results obtained, and the implications for healthcare in the post-covid-19 era. the integration of machine learning into medical research, as demonstrated in this study, highlights its potential to revolutionize the field of healthcare and contribute to more effective post-acute covid-19 management. detection and monitoring of persistent inflammatory biomarkers in post-covid-19 patients have become crucial in providing timely medical intervention and personalized care. understanding the dynamics of persistent inflammation in post-covid-19 patients is a multifaceted challenge. this study aims to leverage the power of machine learning techniques to automate the detection of persistent inflammatory biomarkers in post-covid-19 patients. the utilization of machine learning algorithms offers a promising avenue to analyze and interpret complex data patterns related to persistent inflammation. by doing so, it enables the development of accurate predictive models capable of identifying patients at risk of persistent inflammation. early detection of such patients is crucial for facilitating timely interventions and personalized treatment plans, ultimately improving the quality of care and long-term outcomes. the integration of machine learning into medical research, as demonstrated in this study, highlights its potential to revolutionize the field of healthcare and contribute to more effective post-acute covid-19 management.machine learning models were used to automate the detection of persistent inflammatory biomarkers based on patient data.by implementing a combination of data preprocessing, statistical analysis, and machine learning techniques, this study aimed to develop accurate and reliable models for the automated detection of persistent inflammatory biomarkers in post-covid-19 patients.the study analyzed data from 290 post-covid-19 patients to detect persistent inflammatory biomarkers using machine learning techniques.the present study focused on the automated detection of persistent inflammatory biomarkers in post-covid-19 patients using machine learning techniques.post-covid-19 patients often experience persistent inflammation, which can have detrimental effects on various organs and systems.to address the challenge of early detection of persistent inflammation, we employed machine learning models. in our study, we evaluated their performance in identifying post-covid-19 patients with persistent inflammation, thereby aiding in timely intervention and management.in conclusion, our study leveraged machine learning techniques to automate the detection of persistent inflammatory biomarkers in post-covid-19 patients. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/451.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/451.txt new file mode 100644 index 0000000000000000000000000000000000000000..9e08e439ec2f8cb1b91d1680a3f44e5395a9acc5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/451.txt @@ -0,0 +1 @@ +in recent decades, machine learning (ml) models have achieved many successes. in comparison with traditional methods, machine learning models are often capable of increasing accuracy at the expense of black-box functionality. the importance of model explanation is particularly important for highly regulated industries such as the financial sector . as an example, the consumer financial protection bureau (cfpb) confirmed that anti-discrimination law requires companies to provide detailed explanations when denying an application for credit when using machine learning methods 1 . in response to the growing regulatory requirements, researchers are investigating explainable machine learning methods.explainable machine learning (xml) methods have been successfully used in the past to achieve great success in machine learning. popular used methods include shapley additive explanations (shap) , local interpretable model-agnostic explanations (lime) , integrated gradients (ig) , anchors , and sensitivitybased methods . as a result of these methods, we have gained a better understanding of how ml models function .in this paper, we address the attribution problem, which involves allocating the prediction score of a model for a given input to its base features. the attribution to a base feature can be understood as the importance of the feature in the prediction. credit scoring, for instance, can utilize attribution to understand how each feature contributes to the credit score. the shap and ig have been successfully applied to the attribution problem. further, their use has been demonstrated to comply with a number of theoretical properties, as discussed in . while extensive analyses have been conducted, the majority of results have been based on blackbox machine learning models without domain knowledge.modeling and ensuring conceptual soundness require domain knowledge: the true model should be consistent with the underlying theories. a number of studies have demonstrated that physicsinformed machine learning (piml) improved black-box machine learning models in terms of interpretation and accuracy by enforcing conservation laws, for example. finance and other applications often require monotonicity. a person's credit score should be decreased when there is one more past due balance on the account, for example. it is possible to achieve better generalization and interpretation when monotonicity is successfully enforced . such models can be categorized as finance-informed machine learning models (fiml) or more generally science-informed machine learning models (siml). in addition, monotonicity is often associated with fairness. as an example, with other factors being equal, a person with more past dues should have a lower credit score. the violation of monotonicity may result in unfair consequences that could cause damage to our society.in this paper, we ask the following question. can attribution methods deliver consistent scientific explanations if siml models contain certain scientific knowledge? if so, to what extent? specifically, do attribution methods preserve monotonicity for monotonic ml models?in the past, monotonicity has been considered for xml methods. in , it is shown that among different shap methods, the baseline shapley value (bshap) method, which is a generalization of shapley-shubik , preserves demand individual monotonicity. however, as recently highlighted in 10], individual monotonicity is not the only problem to be addressed; pairwise monotonicity is just as important. the concept of pairwise monotonicity refers to the comparison of features within a given pair. for instance, a past due of more than 60 days should be considered more serious than one of less than 60 days. pairwise monotonicity is a requirement of fairness that is informed by domain knowledge. it is unfortunate that pairwise monotonicity has been neglected in the existing literature.this paper extends monotonicity results to a broader range of cases. as a summary, we have made the following contributions:(1) we propose three new axioms concerning the average preservation of three types of monotonicity. accordingly, a good attribution method should satisfy these axioms if the underlying model is monotonic.(2) as we show, ig preserves all average monotonicity axioms, but failing to preserve demand individual monotonicity. (3) while bshap preserves individual monotonicity and weak pairwise monotonicity, it fails to preserve average strong pairwise monotonicity.a number of examples are used to illustrate our results. bshap is appropriate when only individual monotonicity is considered and is better suited if demand individual monotonicity is required. on the other hand, when strong pairwise monotonicity is involved, ig provides reasonable explanations on average.the remainder of the paper is organized as follows. section 2 introduces attributions, integrated gradients, (baseline) shapley value, axioms, and types of monotonicity. in section 3, we propose three monotonic-related axioms and analyze ig and bshap for these axioms. in section 4, we present an empirical example. section 5 concludes. can attribution methods deliver consistent scientific explanations if siml models contain certain scientific knowledge? if so, to what extent? specifically, do attribution methods preserve monotonicity for monotonic ml models?. in, it is shown that among different shap methods, the baseline shapley value (bshap) method, which is a generalization of shapley-shubik, preserves demand individual monotonicity. however, as recently highlighted in10], individual monotonicity is not the only problem to be addressed; pairwise monotonicity is just as important.(2) as we show, ig preserves all average monotonicity axioms, but failing to preserve demand individual monotonicity. (3) while bshap preserves individual monotonicity and weak pairwise monotonicity, it fails to preserve average strong pairwise monotonicity. bshap is appropriate when only individual monotonicity is considered and is better suited if demand individual monotonicity is required. on the other hand, when strong pairwise monotonicity is involved, ig provides reasonable explanations on average. suppose is the set of all individual monotonic features and ¬ its complement, then the input x can be partitioned into x = (x , x ¬ ). lastly, we require that all features exhibiting pairwise monotonicity also exhibit individual monotonicity.in addition, there is a stronger condition of pairwise monotonicity, known as strong pairwise monotonicity, which is independent of the condition that = . in light of example 3. for ′ = ′ = 0, the weak pairwise monotonicity in guarantees that math is more important than verbal when they are equal when we determine the general importance of features. we demonstrate the application of ig and bshap to black-box fcnns and then provide a more detailed analysis of transparent mgnams regarding pairwise monotonicity. in the case of pairwise monotonicity revealed by ig, however, average attributions of 1 are lower than those of 2 . by analyzing and illustrating, we demonstrate that bshap provides satisfactory explanations when only individual monotonicity is involved, but ig provides reasonable explanations when strong pairwise monotonicity is involved. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/452.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/452.txt new file mode 100644 index 0000000000000000000000000000000000000000..5ed858529d581917b7bd291e6e8fe204ccef042a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/452.txt @@ -0,0 +1 @@ +dr, an inflammatory condition affecting the retina (the light-sensitive tissue in the back of the eye), is one of the most prevalent and potentially blinding complications of diabetes mellitus. degenerative conditions such as diabetic retinal disease are characterized by vascular injury to the retina, primarily brought on by prolonged high blood sugar levels. more than one-third of individuals with diabetes experience some kind of diabetic retinopathy; if not treated suitably, this condition can worsen from moderate non-proliferative abnormalities to proliferative diabetic retinopathy (pdr), which can be blindness, and diabetic macular edoema (dme). by 2030, over 11 million people worldwide are predicted to suffer from advanced dr due to the rising incidence of diabetes. when aberrant blood vessels appear in dr, it is an indication of a complex pathological process that progresses through various phases. these phases range from moderate non-proliferative modifications to more severe and advanced proliferative modifications. ensuring an early diagnosis and accurately classifying the severity of dr are critical goals. accurate assessments of this kind are essential to enable effective therapeutic interventions and putting preventative measures in place to avoid irreversible sight loss.the convergence of medical science and artificial intelligence has resulted in a paradigm shift in the diagnosis and management of dr in recent years. traditional procedures frequently struggle to provide complex severity classifications, and more advanced methodologies are desperately needed. this study aims to bridge the gap by introducing a novel categorization approach that overcomes the constraints of existing automated grading systems. in this work, the potential of deep learning, specifically convolutional neural networks (cnns), is leveraged to offer a robust and efficient approach for distinguishing diabetic retinopathy severity stages. this method not only offers improved accuracy, but it also aims to address the clinical care spectrum, which includes the five expert-defined severity stages: no apparent retinopathy, mild, moderate, severe non-proliferative diabetic retinopathy (npdr), and proliferative diabetic retinopathy (pdr). the goal of this study is to delve into the intricate aspects of retinal images and make use of the capabilities of modern technology to contribute to the refining of diagnostic techniques and, in the end improve the standard of care for patients with dr.the advancement of deep learning technology, particularly cnns, has made it feasible to automate the analysis of retinal imagery. this has made the process of detecting dr more accurate and efficient.the main goal of this project is to develop a methodology for categorizing retinal images that will be more effective at discriminating between the five different severity classifications of dr than the automated grading systems that are currently in use. currently, most published models simplify the disease category for binary identification of diabetic retinopathy without considering the degree of development. although some studies classified three grades, few studies distinguish between the five expertdefined severity stages that directly affect clinical care. this paper's novelty is found in the methodical research and assessment of deep learning models with various filters, with the goal of utilizing the best filters for maximum efficiency. a high accuracy of 96% is achieved by inceptionnetv3, by using the gaussian filter.the remaining sections of the paper is organized as follows: section ii describes the literature survey. section iii introduces the data and how it was gathered. section iv discusses the proposed methodology. this is followed by discussion of the results and their analysis and lastly, section vi summarizes about the conclusion and future scope. more than one-third of individuals with diabetes experience some kind of diabetic retinopathy; if not treated suitably, this condition can worsen from moderate non-proliferative abnormalities to proliferative diabetic retinopathy (pdr), which can be blindness, and diabetic macular edoema (dme). in this work, the potential of deep learning, specifically convolutional neural networks (cnns), is leveraged to offer a robust and efficient approach for distinguishing diabetic retinopathy severity stages. this method not only offers improved accuracy, but it also aims to address the clinical care spectrum, which includes the five expert-defined severity stages: no apparent retinopathy, mild, moderate, severe non-proliferative diabetic retinopathy (npdr), and proliferative diabetic retinopathy (pdr). to train retinal illness recognition algorithms for the first time, the authors employed two internal and two external datasets, one of which included over a million fundus pictures covering over 50 retinal diseases. in order to identify diabetic retinopathy, the work done inuses a methodology that includes picture preprocessing, feature extraction, and classification using svm. the approach described inoutperforms previous methods in datasets related to malaria and diabetic retinopathy by using intensity and compactness features in a multi-level superpixel architecture to detect retinal leakage.yang inpresented a two-phase deep convolutional neural network method for assessing the severity of diabetic retinopathy and identifying lesions. compared to using the original images without pre-processing, the results demonstrate that the pre-processed images increase the accuracy of the cnn model as well as other pre-trained models like alexnet, vgg16, and resnet50.a cnn approach is proposed by the study into identify diabetic retinopathy in fundus images with 92% accuracy. in contrast to traditional filters, the gabor filter improves the accuracy of diabetic retinopathy diagnosis in retinal pictures by highlighting these particular patterns, which leads to a more sophisticated and sensitive categorization technique. these models are intended to evaluate the information retrieved by the filters and forecast whether or not diabetic retinopathy will be present in the retinal images, as well as how severe it will be.the performance of multiple deep learning models on the task of diabetic retinopathy severity classification using retinal pictures with different filters are shown in table1. relu activation functions can improve feature learning in convolutional neural networks by encouraging non-linearity, which helps the model capture complex patterns essential for differentiating between phases of diabetic retinopathy and improve the severity classification of the condition.the proposed work focuses on three different filters that were applied to the original greyscale images, the gaussian filter emerged as the most promising filter among those that were used. this is due to the fact that features improved by gaussian filtering in conjunction with the inceptionnetv3 model shown a greater ability to capture the crucial features of diabetic retinopathy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/453.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/453.txt new file mode 100644 index 0000000000000000000000000000000000000000..465318a39199fdf9e761684f4aad147ac2b7847d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/453.txt @@ -0,0 +1 @@ +the development of autonomous driving (ad) technologies has brought the transportation systems to the next level, promising safer and more efficient roadways. among the various techniques used to enable autonomous vehicles (avs), imitation learning (il) has emerged as a promising approach due to its ability to learn complex driving behaviors from expert demonstrations. by leveraging largescale driving datasets and deep neural networks (dnns), il has demonstrated remarkable success in training autonomous agents to emulate humans' driving behaviors (pan et al., 2017;2020;zhang & cho, 2016).despite its impressive performance, dnn-based il (dil) inherits the critical limitations of dnns, hindering its widespread adoption in real-world autonomous systems. a prominent drawback is the lack of interpretability in the learned driving policies due to the black-bax nature of dnns (zhang et al., 2021), making the decision-making process of ad challenging to understand. this lack of interpretability not only limits our ability to diagnose failures or errors but also hampers the trust and acceptance of autonomous systems by society and regulatory bodies (kim & canny, 2017). moreover, the generalizability of dil driving policies remains a concern. although they can learn to imitate expert drivers in specific scenarios (with a given state distribution), adapting these policies to unseen situations (out of state distribution) can be problematic since dil can just learn the demonstrated behaviors (ghasemipour et al., 2020), thereby having little knowledge about novel situations when mismatching test and train state distributions (zhu et al., 2020). the rigid nature of learned policies often leads to suboptimal or unsafe behavior when encountering unfamiliar conditions, such as adverse weather or complex traffic scenarios. finally, though more sompleefficient as compared to reinforcement learning, dil methods still suffer from sample-inefficiency since they need millions of state-action pairs to learn efficiently (fang et al., 2019;yu et al., 2023).recently, state-of-the-art explainable ai (xai) methods seek to improve the transparancy and interpretability of dnns by providing various criteria (došilović et al., 2018;tjoa & guan, 2020), most cutting-edge criterion of which is a neuro-symbolic learning method aiming at combining the learning capabilities of dnns with symbolic reasoning (sarker et al., 2021;hitzler & sarker, 2022). since most neuro-symbolic methods employ symblic logic-based reasoning to extract domainspecific first-order logical (fol) rules using different rule-generation techniques (kimura et al., 2021;zimmer et al., 2021), they are identified as sample-efficient techniques that have a credible sense of generalizability (hitzler & sarker, 2022). for instance, zimmer et al. (2021) proposed a differentiable logic machine (dlm), as a neural-logic approach, to extract fol rules using inductive logic programming (ilp) (cropper & dumančić, 2022), as a technique aiming at inducing new rules from human-provided examples. similarly, dlm was employed by song et al. (2022) to learn an interpretable controller for ad in the form of a first-order program. likewise, our work aims to replace dil with ilp-based programs, proposing a sample-efficient and transparent method while transferable to unseen scenarios.in this paper, we propose a novel solution to address the challenges of the current dil methods by introducing the concept of sil, as an approach aiming to convert black-box driving policies into explainable policies by incorporating ilp to extract symbolic rules from humans' background knowledge. using ilp, we seek to unlock the potential for transparent, interpretable, and adaptive driving policies, laying the foundation for safer and more reliable ad systems. the primary objective of this paper is twofold: first, to enhance the transparency of learned driving policies by inducing human-readable and interpretable rules that capture essential safety, legality, and smoothness aspects of driving behavior. second, we strive to improve the generalizability of these policies, enabling avs to handle diverse and challenging driving scenarios with reliability.the rest of the paper is divided into several sections. section ii introduces the prerequisites of the method. section iii describes the proposed method in general and sention iv particularly discusses it for the ad system. section v presents the simulation environment, results, and discussion. finally, section vi draws a conclusion. in the knowledge acquisition phase, we provide essential inputs using our background knowledge about the environment for ilp, including b, bk, and e, facilitating the induction of a single rule in the rule induction phase. for all required rules, this process is repeated to accumulate all necessary rules induced by ilp, gradually assembling individual rules at each stage to constitute h set. keep note that when the av executes lane changes, it typically employs one of three actions: lane keeping (lk), right lane change (rlc), or left lane change (llc).for instance, to instruct an ilp program in deriving unsafe rlc rules, states wherein taking rlc action is unsafe receive positive labels; otherwise, such scenarios receive negative labels.after obtaining sufficient amount of knowledge-driven data for each unknown rule, we elaborate on the rule induction stage of the sil framework, discussing the extracted rules for autonomous highway driving. we predominantly focus on safety rules concerning lane change scenarios, which can be categorized into two primary parts: unsafe rules and dangerous rules.employing ilp, we have deduced two rules, one relating to unsafe rlc (rule h1) and the other to unsafe llc (rule h2). the first one (rule h3) indicates that if there is a vehicle in the backright section with a velocity exceeding that of the av, or if there is a vehicle in the frontright section with a velocity lower than that of the av, then executing the rlc action is dangerous:.this rule asserts that if there is a tv in the back section, the distance between the av and the tv is critical, and the tv's velocity exceeds that of the av, staying in the current lane (lk) is considered dangerous and could lead to a collision. to establish corresponding rules to each phase, we consider the relative distance between the av and the front tv and classify states as positive or negative examples for each rule, as the rules specifications shown in table1. three rules have been induced for adjusting a x in each phase; the first rule, for instance, indicates that when the front section is unoccupied (free), accelerations should be adjusted to obtain the desired velocity.the second rule indicates that when the front section is busy and the relative longitudinal distance between the two vehicles is safe, then the av should change the acceleration to reach the front tv's speed.finally, the third extracted rule indicates that when there is a vehicle in front of the av and the relative distance between them is lower than the allowed distance, then the av should brake to avoid collision with the front tv.to identify the best action for state s t , we initially employ h1 to h5 rules to eliminate unsafe or dangerous actions, thereby reducing the action space.1), we utilize the current state s t to identify the rule among h8, h9, and h10 rules where the head predicate holds true. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/454.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/454.txt new file mode 100644 index 0000000000000000000000000000000000000000..bc3984c3b1f1344a59cc7a4f906c73cbf6b8409f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/454.txt @@ -0,0 +1 @@ +despite being introduced over 50 years ago, exponential smoothing methods remain among the most widely used methods for forecasting (goodwin 2010). their enduring popularity stems primarily from their relative simplicity, robustness, flexibility, and good forecasting performance (gardner jr. 2006). for example, exponential smoothing models were able to outperform several more sophisticated and complex algorithms in the influential m3 forecasting competition (makridakis & hibon 2000), and exponential smoothing models were successfully used as building blocks in many of the methods used in the recent m4 competition (makridakis et al. 2018).exponential smoothing was originally created as a simple heuristic, in which the next point forecast is given by the previous point forecast plus a correction term that is proportional to the error made by the previous forecast. over the years, the basic exponential smoothing technique has been extended to include additional aspects such as seasonality (additive and multiplicative), local trends (additive, multiplicative, and damped versions of both), and more complex stochastic error term specification (additive and multiplicative). exponential smoothing techniques are frequently referred to in the literature by the initialism "ets", which may stand for either exponential smoothing, or error, trend, and seasonality. hyndman et al. (2002) and hyndman, koehler, ord & snyder (2008) systematically consolidated the various exponential smoothing models, and gave them a solid theoretical base, by reformulating them within a state-space framework with a single source of error. subsequently, this theoretical contribution was followed by the creation of the forecast package (hyndman & khandakar 2008) in the r programming language (r core team 2018), which implements ets models in an accessible and user-friendly way within an open-source framework. the package has since become the de facto standard tool for the forecasting of time series in many areas. a more recent continuation of this development is via the fable package (o' hara-wild et al. 2021), which reimplements and extends the functionality of the forecast package. apart from these main lines of research, other authors have developed various robust versions of exponential smoothing, e.g., (cipra 1992, gelper et al. 2010, crevits & croux 2016, crevits et al. 2018). svetunkov et al. (2022) developed the complex exponential smoothing method, and sbrana & silvestrini (2020) present a damped trend model that uses a structural approach, i.e., a multiple sources of error formulation.the ets models that are currently in use (as described in hyndman, koehler, ord & snyder (2008), and implemented in the forecast package), are either linear or exponential in their trend, with the only option currently available to potentially bridge this gap being a damped trend. so, the choice is limited especially if the underlying growth rate cannot be suitably modeled by these available options. one way to currently address this shortcoming is by using data transformations as a preprocessing step, such as a logarithm or box-cox transformation (box & cox 1964). however, the logarithm is a very strong transformation that can lead to explosive behaviour. the strength of the box-cox transformation can be adjusted using its parameter λ, but choosing this parameter is not trivial and we observe in the experiments of our ablation study that standard methods for this task also often lead to explosive forecasts. attempts have been made in adjacent fields to develop more flexible trend formulations, for example, parzen (1982) presents a model where the differenc-ing of arima is replaced by other, slightly more complicated non-stationary ar models, thus allowing for more flexible trend choices. apart from the relatively unflexible trend, the ets models assume normality of the errors, homoscedasticity of the errors for additive models, and have some further shortcomings described in details in section 3.the ets models that are currently in use (as described inhyndman, koehler, ord & snyder (2008), and implemented in the forecast package), are either linear or exponential in their trend, with the only option currently available to potentially bridge this gap being a damped trend. for example, an ets model with additive error, additive damped trend and multiplicative seasonality, which is commonly known as the holt-winters' damped method, would be denote as an "aadm model" under the common nomenclature. for the classical ets models, the parameter vector θ usually consists of (a mix of) the following parameters: a damping coefficient, an error variance, initial trend terms, initial seasonality factors, and smoothing factors for the level, trend, and seasonality.one particular model from the ets model family, the non-seasonal (damped) linear trend model (aadn), is given by.where y t denotes the value of the dependent variable of interest at time t, ŷt+1 is the conditional expectation of y t+1 given the information up to time t, σ denotes the standard deviation of the error distribution, l t denotes the level at time t, and b t the local trend at time t. similar to equation (4), the updating process of the trend is based on the weighted average of the current trend value (l t+1 -l t ) and the previous estimate of the trend b t .we note that when using a linear trend, we are making the assumption that we expect that in addi-tion to the level changing over time, we also expect that the trend will change over time. the weighted average estimate for the trend value usually works best if the trend changes gradually over time in the corresponding time series data.to address the shortcomings identified in section 3, we propose the non-seasonal local and global trend model (lgt, note that "local" and "global" refers here to parts of the series or the whole series, as opposed to models that learn across time series and are also oftentimes called global models, see, e. while this generalisation of additive and multiplicative trend model is interesting, we found in practice that its performance was typically worse than that of the alternative global trend model:.the one-step-ahead forecast given by equation19, is given by the linear combination of the level l t , the global nonlinear trend γl ρ t , and the damped local linear trend λb t . if ρ is close to zero, the contribution of the global trend to the one-step-ahead prediction becomes near-constant (a drift term), and the resulting trend becomes close to linear, leading to an overall (global) upward or downward trend. this combination of local and global trend can be especially useful, for example, in situations like the covid-19 pandemic, as the model is able in such a situation to model both longterm overall trend and short-term changes in the behaviour of the series.as our proposed models are extensions of the traditional exponential smoothing model family, they are per-series univariate models in nature and do not here, f is the frequency, #s the amount of time series, l min and l max are the minimum and maximum length of the series, and h is the forecast horizon. furthermore, for non-seasonal data we run our custom gibbs sampler that is available for the nonseasonal case, and further run ets/aan, ets/aan (bayesian version) -ets (aan) fitted with stan using our bayesian approach, and an implementation of the lgt method within the forecast package in r, which is thus fitted using the standard optimisation method which is used to fit ets methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/455.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/455.txt new file mode 100644 index 0000000000000000000000000000000000000000..eada9fd8665152a2f13a5073c1eba2449830341f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/455.txt @@ -0,0 +1 @@ +the gnnhls framework, as depicted in figure 1, comprises two primary components: data generation and hls fpga. the former is designed to generate input and output files and measure baselines on a cpu and a gpu, while the latter is designed to implement the optimized hls applications on an fpga. the data generation component mainly consists of the training system and the inference system, which are based on pytorch and dgl. to account for the impact of graph topology on gnn model performance, it uses graph datasets with various topologies, including those from open graph benchmarks . in addition, six commonly used dgl gnn models obtained from a previous gnn benchmark are incorporated. thus, realistic model parameters, generated in the training phase, are utilized in inference.the hls fpga component implements the gnn kernels on the fpga. these kenels match the functionality of the dgl baselines and are optimized with several optimization techniques . the optimized hls kernels, with associated 1 released as a benchmark suite and also available at https://github. com/chenfengzhao/gnnhls figure 2 illustrates the dataflow diagrams of the gnnhls kernels, in which memory and computation operations are divided and pipelined based on the complexity of each kernel. to mitigate the cost of dataflow, we also (1) tune the location of fifo accesses to achieve better throughput, (2) apply vectors for fifo widening and associated operations, and (3) split loops to optimize the fifo properties of loop indices. despite these promising results, hdl design methodology is not suitable for widespread adoption for gnn implementations due to the conflict between the non-trivial development efforts with hdl and the rapid emergence of new gnn models.• we propose gnnhls, a framework to evaluate gnn inference acceleration via hls, containing: (a) a software stack based on pytorch and dgl for data generation and baseline deployment, and (b) fpga implementation including 6 well-tuned gnn hls kernels with host and configuration files which can also be used as benchmarks. • we provide a comprehensive evaluation of our gnn hls implementations on 4 graph datasets, assessing both performance improvement and energy reduction. note that the weight parameter ⃗ a t is decomposed into a l src and a l dest ∈ r d in the dgl library, because it is more efficient in terms of performance and memory footprint by transferring vmm between u k,l and h l from edge-wise to node-wise operations, especially for sparse graphs where the edge number is larger than the vertex number. considering the irregularity and large maximum |n i | of graphs, we divide the gat model into 2 hls kernels linked to the same memory banks for shared intermediate results: kernel 1 is designed to perform vmm with u and h, and multi-headed element-wise multiplication (mhewm) with a src and a dest , respectively, in(6). these graphs represent two classes of graphs with distinct topologies used in the gnn community: mh and mt consist of multiple small dense graphs, while ax and pt each consist of one single sparse evaluation methods: to perform evaluation, we use a xilinx alveo u280 fpga card, provided by the open cloud testbed, to execute the hls kernels.we next examine the performance improvement by showing the overall speedup, defined as the execution time of the gnn hls kernels relative to cpu-dgl (using all 10 cores on the cpu), in figure5.turning our attention to how the performance benefit of hls implementations varies across graph datasets, we observe that the speedup of isotropic kernels relative to dgl-cpu on regular-like graphs (i., ax and pt) because (1) the edgewise operations are less computation-intensive than nodewise operations in these kernels, making the baselines more computationally efficient on powerlaw-like graphs containing more edges than nodes; and (2) the edge-wise aggregation operations in hls implementations are executed sequentially without leveraging edge-level parallelism, making these hls kernels less computationally efficient for powerlawlike graphs. distinct from isotropic kernels, the speedup of anisotropic kernels on powerlaw-like graphs is higher than regular-like graphs because the edge-wise operations of these kernels are more computation-intensive than isotropic kernels, making baselines less efficient on powerlaw-like graphs. in spite of the promising gpu performance, there are still some drawbacks of gpu compared with hls implementations. it is speculated that the gpu is designed to achieve high throughput in the cost of latency which plays a more important role for small-scale graphs than large-scale graphs.since gengnnalso discusses 3 of the gnn models included in this paper (gcn, gin, and gat), we can make a limited comparison of our gnn hls implementations with theirs. the two are not directly comparable for a number of reasons: (1) the feature dimensions of our gnn hls kernels are higher, (2) we use off-chip memory instead of on-chip memory, (3) our general-purpose gnn hls kernels focus. it is because of the low power of fpga logic, low clock frequency, and efficient pipeline structure of hls implementations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/456.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/456.txt new file mode 100644 index 0000000000000000000000000000000000000000..70ee9e967032ee6d701e02e6f49fb078e2e1dca9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/456.txt @@ -0,0 +1 @@ +the covid-19 pandemic, caused by the novel coronavirus sars-cov-2, has not only posed a significant threat to global public health but has also brought to light various indirect consequences affecting individuals' mental wellbeing . as healthcare systems around the world grapple with the immediate challenges of treating covid-19 patients, it has become increasingly evident that there is a pressing need to understand and address the potential long-term mental health repercussions of this global crisis. numerous studies have reported a spectrum of mental health issues emerging in the wake of covid-19 recovery, including anxiety, depression, post-traumatic stress disorder (ptsd), and other neuropsychiatric disorders .these conditions, often collectively referred to as post-covid-19 mental health disorders, can be debilitating and require comprehensive evaluation, risk assessment, and timely intervention. to effectively mitigate these mental health challenges, it is imperative to identify the risk factors contributing to their development. machine learning, with its capacity to analyze vast datasets and extract intricate patterns, presents an invaluable tool for this purpose . by leveraging data-driven insights, we can gain a deeper understanding of the variables and circumstances that predispose individuals to post-covid-19 mental health disorders. in this study, we utilize a machine learning perspective to identify key risk factors associated with the onset of mental health disorders in individuals recovering from covid-19. our dataset comprises medical information from 669 patients collected across various healthcare facilities in iraq. by applying advanced analytical techniques, we aim to pinpoint the predictors that significantly influence the likelihood of developing mental health complications following covid-19 infection. this research contributes to the growing body of knowledge on post-covid-19 mental health and provides a foundation for the development of targeted interventions and support strategies for at-risk individuals . through a comprehensive understanding of these risk factors, healthcare professionals and policymakers can work towards implementing proactive measures to safeguard the mental well-being of covid-19 survivors. in this study, we utilize a machine learning perspective to identify key risk factors associated with the onset of mental health disorders in individuals recovering from covid-19.this methodology allowed for a comprehensive analysis of risk factors associated with post-covid-19 mental health disorders, combining traditional statistical methods and machine learning techniques for a more accurate and predictive assessment. showing that patients with mental health disorders were slightly older on average and had a higher percentage of pre-existing mental health conditions. these symptoms encompassed anxiety, depression, and posttraumatic stress disorder, among others(16-18). moreover, (18,19) conducted a comprehensive analysis of covid-19 patients and found a clear relationship between the severity of respiratory symptoms and the prevalence of subsequent mental health issues. other studies (20,21) investigated the complex interplay between covid-19 severity, mental health, and substance use disorders. an intriguing gender difference emerged from our data, with females manifesting a higher prevalence of mental health disorders compared to males. their insights corroborate the importance of addressing gender disparities in mental health outcomes, which our study underscores.the study conducted a comprehensive study that examined the mental health impact of the covid-19 pandemic, including individuals with pre-existing mental health conditions. the study explored the long-term effects of pre-existing mental health conditions on the mental well-being of covid-19 survivors. while not directly related to pre-existing mental health conditions, their work underscores the broader relevance of data analytics in healthcare, including mental health, and aligns with our study's emphasis on tailored care approaches. the study presented a comprehensive analysis of post-covid-19 mental health disorders, and several key findings emerged from this research, with implications that extend beyond the scope of the study. these findings underscore the substantial psychological toll associated with severe covid-19 illness experiences and emphasize the importance of recognizing and addressing the heightened mental health needs of such patients. furthermore, the study unveiled a compelling link between pre-existing mental health conditions and the likelihood of post-covid-19 mental health disorders. these findings emphasize the importance of continued mental health support for individuals with a history of mental health issues and the need for a holistic approach to healthcare. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/457.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/457.txt new file mode 100644 index 0000000000000000000000000000000000000000..e0811ddec9635be8ffe135073b93789a8b7b85ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/457.txt @@ -0,0 +1 @@ +in recent years, integrating stochastic variational inference into deep neural networks (dnns) forms a new paradigm -deep variational learning (dvl). dvl jointly characterizes dependencies between hidden neural features and between their distributions, going beyond deep neural principles and synergizing analytical statistical principles. variational autoencoders (vaes) represent a typical milestone for dvl, which transforms point-based autoencoders into process-oriented vae learning. various vaes have been proposed in recent years to robustly fit the likelihoods of diverse data, such as tabular data , images , and sequences . by estimating the likelihood over all data points, a vae learns a smooth representation space under certain manifold hypotheses. it characterizes variational low-dimensional distributions corresponding to the input feature space and produces analytical results leveraging deep features and relations learned by dnns. consequently, vaes further enhance representation learning for more challenging learning tasks such as out-of-domain detection , time series anomaly detection , multi-task learning , domain adaptation , and continual learning . however, a significant gap remains in vaes, i.e., exploring the distribution dependency between hidden features of dnns, which has shown beneficial for leveraging stochastic factor interactions and downstream tasks .on the other hand, to enable more explainable variational reconstruction, a recent interest and challenge in vae studies are to enable their unsupervised disentangled learning. disentangled learning has been widely explored in supervised representation learning and classification to learn single hidden units sensitive to single generative factor change but invariant to other factors' variances. however, unsupervised disentangled learning in vaes is more challenging. a common approach involves the total correlation (tc) to remedy the insufficient expressive posterior in the surrogate loss of vanilla vaes. tc is a variant of mutual information to quantify the redundancy in multivariate dimensions . for vaes, tc is incorporated into their evidence lower bounds (elbo) to induce factorized variational distributions with a loss 𝑇𝐶 (𝒁) capturing the divergence between estimated posterior 𝑞 𝜃 ( 𝒁) and prior 𝑝( 𝒁) over hidden features 𝒁:𝑞 𝜃 (𝒛 1 , 𝒛 2 , . . . , 𝒛 𝑑 ) 𝑝 (𝒛 1 ) 𝑝 (𝒛 2 ) . . . 𝑝 (𝒛 𝑑 ) = 𝐾 𝐿 (𝑞 𝜃 ( 𝒁)|| 𝑝( 𝒁)).(1) however, factorizing the prior, i.e., 𝑝( 𝒁) := 𝑑 𝑗=1 𝑝 𝒛 𝑗 involves strong iid assumption between hidden features {𝒛 𝑗 }. further, enforcing tc does not guarantee to capture dependent structures by the posterior distribution, no matter what the estimator is, by either mutual information estimators or density ratio tricks . this is because the dependencies between hidden features may vary, where some are coupled more strongly than others, resulting in more (we call explicit) vs less (implicit) explanatory hidden features. for example, high cholesterol may be more affiliated with dietary habits and exercises than with age and gender. while the tc-based factorization ensures the independence between features, more explanatory (explicit) features may still be coupled with other less explanatory (implicit) ones in the hidden feature space. hence, the tc factorization only guarantees the independence between those disentangled explicit features but ignores the dependencies in the entire hidden space. this forms another important gap in vaes. this work addresses both aforementioned gaps in modeling distribution dependency in the hidden neural space and further differentiates strongly coupled hidden features from weakly coupled features for improving unsupervised disentangled representations. to this end, we build a contrastive copula variational autoencoder (c 2 vae). first, as copula functions have been demonstrated powerful in learning high-dimensional dependence , a neural gaussian copula function learns the dependence between hidden features and identifies coupled representations. then, a self-supervised contrastive classification mechanism contrasts the disentangledly factorized representations with these coupled representations sampled from a neural gaussian copula function. further, c 2 vae filters those strongly dependent hidden features captured by the copula function and induces an optimal posterior distribution characterizing more factorizable hidden features for improved disentangled representations.the main contributions include:• we disclose the existence of different degrees of dependencies between hidden features in the deep feature space, where some features are more strongly coupled than others. a neural copula function is incorporated into vae to learn high-dimensional feature dependencies and differentiate strongly vs weakly coupled representations over features.• we enhance disentangled representations in tc-based factorization by contrasting the weakly with strongly coupled representations. a contrastive loss is incorporated into vae, which differentiates those strongly vs weakly dependent features and encourages more disentangled representations, thus filtering more dependent features with coupled representations.• our work thus learns a more expressive posterior with more explanatory features, where we extract more independent features for disentanglement but filter more coupled representations. c 2 vae thus improves disentangled representations, the instability of tc-based vaes, and the trade-off between reconstruction and representation.we evaluate c 2 vae on four synthetic and natural image datasets: two grayscale (dsprites, small-norb) and two colored (3d shapes, 3d cars). it demonstrates the effect of the c 2 vae design and mechanisms in outperforming the existing tc-based models in terms of four intervention, prediction, and information based disentanglement performance measures. this is because the dependencies between hidden features may vary, where some are coupled more strongly than others, resulting in more (we call explicit) vs less (implicit) explanatory hidden features. while the tc-based factorization ensures the independence between features, more explanatory (explicit) features may still be coupled with other less explanatory (implicit) ones in the hidden feature space. this work addresses both aforementioned gaps in modeling distribution dependency in the hidden neural space and further differentiates strongly coupled hidden features from weakly coupled features for improving unsupervised disentangled representations. first, as copula functions have been demonstrated powerful in learning high-dimensional dependence, a neural gaussian copula function learns the dependence between hidden features and identifies coupled representations. further, c 2 vae filters those strongly dependent hidden features captured by the copula function and induces an optimal posterior distribution characterizing more factorizable hidden features for improved disentangled representations.• we disclose the existence of different degrees of dependencies between hidden features in the deep feature space, where some features are more strongly coupled than others. a neural copula function is incorporated into vae to learn high-dimensional feature dependencies and differentiate strongly vs weakly coupled representations over features. a contrastive loss is incorporated into vae, which differentiates those strongly vs weakly dependent features and encourages more disentangled representations, thus filtering more dependent features with coupled representations.unsupervised disentangled learning in vaes aims to learn hierarchical distribution dependencies between hidden features toward inducing hidden units independently discriminative to generative factor variance, thus capturing those explanatory features in the hidden space. in contrast, c 2 vae involves a new attempt for disentangled learning to differ disentangled from coupled features and representations.copula functions are introduced to dvl neural networks including vaes and variational lstm (vlstm), where copula learns the dependencies between hidden features.as shown in figure1, the encoder output in c 2 vae is converted to two sets of representations: (1) the neural factorized posterior distribution 𝑞 𝜙 (𝒛|𝒙) as a multivariate gaussian with a diagonal covariance structure; and (2) a copula coupled representation by a new encoder branch as a covariance encoder, which shares the same framework as the posterior encoder.when trained by a stochastic gradient variational bayes (sgvb) estimator, vae optimizes: to learn disentangled representations by vaes for explanatory hidden generative factors, under the factorizable assumption, the posterior distribution 𝑞 𝜙 (𝒛|𝒙) is estimated by decomposing it into several independent and identically distributed (iid) conjugate distributions.this paper presents a novel tc-based vae c 2 vae, which is trained with contrastive disentangled learning by differentiating and removing coupled features and their representations. consequently, c 2 vae learns more factorizable representations for disentanglement while eliminating those strongly coupled features and representations by copula-based dependency learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/458.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/458.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0d0feff9c366d0394d542b4f559687c6324ec6a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/458.txt @@ -0,0 +1 @@ +additive manufacturing (am) refers to a family of manufacturing processes where materials are directly joined to manufacture new parts from 3d modeling data . this technology has revolutionized the manufacturing of complex parts by enabling direct material joining and offers several advantages such as cost-effective manufacturing of complex parts, reducing manufacturing waste, and opening new possibilities for manufacturing automation . am also enables mass part customization and eliminates the need for hard-tooling of machining setups, making it suitable for on-demand manufacturing, thus mitigating supply chain challenges . nevertheless, am also presents certain limitations. these encompass a lack of inherent repeatability and a shortage of widespread design knowledge and tools . also, another challenge in the acceptance of metal additive manufacturing is the lack of a streamlined process qualification methodology . additionally, the quality of am-produced parts can be lower, since defects negatively influence the structural integrity of the parts, due to the complex physics of am processes .to leverage the benefits of am, the design, processing, and production have become more complex in recent years. accordingly, these complexities require significant knowledge for selecting and optimization of the am process parameters. although pivotal for achieving highquality products and minimizing material and time losses, the selection of process parameters can prove time-consuming and expensive. moreover, process inconsistencies in am can exist due to complex physics and there should be knowledge-backed consensus on these methods to avoid defects . the complex multi-physics and multi-scale nature of am processes and the influence of processing parameters on quality have resulted in a shift in am from purely physicsbased approaches to a combination of physics-based and data-driven approaches .one group of materials for which am holds great potential for enhancing component performance and properties is functionally graded materials (fgms). fgms are an important group of composite advanced materials that exhibit smoothly varying properties making them desirable for aerospace, automobile, biomedical, and defense industries . such composition differs from traditional composite materials, since the location-dependent composition changes gradually in fgms, leading to enhanced properties.among the different am methods, directed energy deposition (ded) assumes a vital role in the fabrication of metallic fgms and finds extensive application in various industries for the fabrication of high-degree precision components with advanced properties . furthermore, laser-assisted directed energy deposition (ded) emerged as an advanced technique for fabricating fgms where the processing parameters have substantial influence over microstructure and mechanical properties . moreover, functionally graded lattice structures (fglss) are an important class of materials in am due to their adaptability and multifunctionality. the methods for designing fglss vary and include rule-based approaches, topology optimization, and generative design, each offering distinct advantages .to address the stated challenges in am, machine learning (ml) techniques have emerged as a promising means for optimizing processing parameters, improving product quality, and detecting manufacturing defects . ml techniques also offer a potential solution to challenges related to material development . by establishing connections between composition, microstructure, process, and performance, these techniques enable the discovery of new materials with improved properties . notably, traditional topology optimization methods have been augmented by deep learning-based approaches, particularly in the optimization of composite structures . also, techniques involving in-situ monitoring contributed to real-time defect detection, microstructure classification, and property prediction .in the following text, we first provide a brief literature review of works related to fgm fabrication, followed by reviewing works on employing ml in am. section 4 is the main section of the document and it presents published works in the literature related to the application of ml techniques in ded and for fgm fabrication. the last section concludes the paper. the researchers selected 48 papers from the comprehensive collection of works and provided a systematic literature review to assess the potential of ai applications in optimizing am processes and techniques. an artificial neural network (ann) was used to predict the effective material properties based on given design variables, which was trained using data generated from rve simulations, enabling quick and accurate predictions of material properties for different configurations.developed an approach for coupling physics-based process modeling with ml and optimization methods to accelerate searching the am processing space for suitable sets of processing parameters in laser powder bed fusion (lpbf). the thermal model served as a tool for predicting the thermal histories of fabricated components and understanding how processing parameters influence the material's properties. by optimizing scan parameters locally, the researchers achieved more uniform part processing and significantly reduced the variation in thermal histories, which led to improved mechanical properties and enhanced overall performance of the printed components, as well as reduced heterogeneity in lpbf. the experiments were performed by varying the p and v using an l-ded system and they were afterward used to generate a process map of melt pool the results from the different p-v combinations were used to generate a process map that was used as a validation tool to verify the optimal solutions returned by the algorithm and the results proved the effectiveness of the model in predicting optimal combinations of power and velocity.in order to establish the relationship between the build height and density of ded samples and fabricate defect-free and high-degree precision components, narayanadesigned an ann to optimize scan speed, powder feed rate, and layer thickness processing parameters for ded. to optimize the model parameters of ded, the laser power, scan speed, powder feed rate, and layer thickness were considered as inputs, and the density and build height of the samples were outputs of the model. to examine the processing parameter influence, 3d plots (process maps) were developed based on the ann predictions displaying the relationship between speed rate and power, and also feed rate with thickness for density and build height separately, highlighting the optimum regions in each. identifying the relationship between process parameters and lof porosity is a challenging problem due to the high dimensionality of the process design space and the large number of simulations or experiments required to evaluate all processing conditions. to establish the relationship between processing conditions and lof porosity, the authors proposed an nn architecture that employed symbolic regression and predicted the lof porosity using a physics-based thermo-fluid model. specifically, the porosity was predicted using the thermo-fluid model and afterward, the predicted porosity data for different processing conditions were used to show the efficacy of an active learning framework to reduce the number of simulations for effectively mapping the process design space. the researchers formulated a constrained optimization problem using dimensional analysis and nonlinear regression to identify equations that relate the process parameters to the melt pool depth, width, and length. furthermore, the process quality classification method for ded exhibited promising performance, achieving a median f1macro score of over 90% across distinct datasets acquired from various ded machines, emphasizing its practical suitability for real-world applications. during the printing of these samples, in-situ process data, including laser power, melt pool size, scan line energy density, powder concentration gas, and temperature were collected. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/459.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/459.txt new file mode 100644 index 0000000000000000000000000000000000000000..dbdf69feea0c221cec5f1b0037643e74e7edc1ad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/459.txt @@ -0,0 +1 @@ +in the past several years, dnns have become a common tool in many scientific fields and real-world applications. as their use becomes more widespread, it is more important now than ever to better our understanding of these models. one way this can be accomplished is by studying their learned representations. this topic has been explored by many papers in recent years, including methods such as linear probing ( ), studying the dimensionality of the manifold underlying the activations ( ), and studying the geometry of the learned representations ( ).in this paper, we return to a classical tool for data analysis, principal component analysis, to help us better understand the learned representations present in dnns. while several papers have used pca to study learned representations (e.g. ), we are the first to study in depth the performance of multiple surrogate models using varying number of pcs across an entire cnn. we train a k-nearest neighbors classifier (k-nn), a nearest class-center classifier (ncc), and a support vector machine (svm) on each residual block's activations after projecting down to the first d principal components (pcs) and make qualitative observations based on the results. studying a pretrained resnet-18 on the cifar10 dataset, we observed that: 4. in the latter half of the network, the pcs necessary for 90% of the classification accuracy account for only 20%-40% of the variance.), we are the first to study in depth the performance of multiple surrogate models using varying number of pcs across an entire cnn. we train a k-nearest neighbors classifier (k-nn), a nearest class-center classifier (ncc), and a support vector machine (svm) on each residual block's activations after projecting down to the first d principal components (pcs) and make qualitative observations based on the results. first defined by, neural collapse is a phenomenon observed in the last layer activations of deep neural networks characterized by several properties, two of which are: (nc1) within-class variability collapses to zero and the activations collapse towards their class means and (nc4) the dnn classifies each activation using the ncc decision rule. bothandstudy the performance of the ncc classifier across the layers of a neural network and observe an increase in performance the deeper the layer is in the network and the more training epochs used. for each model and layer, we also found the minimum number of pcs needed to attain at least 90% of the best accuracy attained at that layer and by that model, as well as the variance explained by those pcs.looking at figure2, we see that up until block 4, each of our three models exhibits different behaviors as we increase the number of pcs, and that from block 5 onwards, all three models exhibit qualitatively identical behavior. the svm (figure2c) performs similarly to the k-nn for the first ∼100 pcs, but continues to improve in accuracy as the number of pcs increases. on blocks 5 onwards, all three models see a sharp, almost identical spike up to the true accuracy of the dnn between one and ten pcs, followed by no change in accuracy beyond that. we observe that, just as in figure2, all three models exhibit identical profiles for blocks 5 through 8 and that, excluding block 5, they require only 2-3 pcs to attain 90% of the accuracy of the dnn.while the performance of the k-nn and ncc models is determined by the first ∼100 pcs, the svm's performance increases with the number of pcs up to using the whole space. when considered along with the observations of intermediate neural collapse of, this could perhaps point to there being a "partially collapsed" subspace in each layer that determines the behavior of the k-nn and ncc models, while the svm also accounts for information helpful to classification in the low variance subspaces. while all three models contribute to our intuitive understanding of how the representation is changing across the network, the svm's accuracy suggests that applications using learned representations might benefit most from simpler models. the notion of intermediate neural collapse is further supported by the fact that the number of pcs needed for good classification with the svm decreases monotonically across the network and that the variance necessary for accurate classification (by all models) decreases until block 5, which is where we see "full collapse". additionally, since the non-linear methods required only ∼100 pcs or less throughout the network, this implies that the curved manifold underlying the activations most likely lives within a relatively low-dimensional subspace, which can be found using pca.lastly, while it is common to select the number of pcs to keep using metrics such as accounting for 90% of variance-as seen inand-figure3bshows that this may not be the best approach for analyzing learned representations, as the majority of the variance is not necessary for classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/46.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/46.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a80409d1ee77cc97df4e6b19de5f0171415f553 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/46.txt @@ -0,0 +1 @@ +causality is the science of cause and effect . as identifying causal mechanisms is often regarded as a fundamental purist in most sciences, causality becomes elemental in advancing our knowledge. while causality is more profound in some research areas, the concept of causality is often vague or forgotten in others . with the advent of data science and the ready accessibility of big data, there is a rising potential to leverage such data in pursuit of unlocking previously unknown, hidden mechanisms or perhaps confirming ongoing hypotheses and empirical knowledge .traditionally, researchers would collect data pertaining to a phenomenon and then analyze this data to describe it, creating a model that could be used to predict such a phenomenon and/or causally infer (or explain/understand) interesting questions about such a phenomenon (see table 1) .when the primary goal is to describe the data on hand, the researcher simply aims to visualize the data to tell its story. such visualization can take a number of fronts, i.e., examine data distribution, display and quantify the magnitude or existence of relationships and associations within the data, etc. for example, one can report statistical insights (mean, median, etc.), correlations between variables, etc. when the primary goal is to describe data, we rarely delve into how and why such relationships/associations exist.to be able to predict, on the other hand, some variables from the data are to be identified as predictors, and at least one variable is selected as an outcome (or response). for example, one can think of regression as a mapping function wherein a list of predictors is tied together to realize an outcome. by doing so, the researcher declares the existence of a form of relationship between all variables, and such assignment can be made via assumptions and/or from domain and expert knowledge . such an assignment implicitly assumes that all data conditions have been met; hence, our view of the world does not change . simply put, the form of a regression equation is identical to the essence of the innings of the phenomenon at hand. finally, to causally infer, explain, or understand a phenomenon, the researcher ought to go beyond mere descriptions and predictions. such a researcher would hope to uncover the data generating process (dgp) responsible for the phenomenon at hand. in some instances, one may postulate a theory with a/series of hypotheses to identify and lay out how variables can become causes or are, in fact, causes and not effects 1 .here, a question may arise as to how a regression formula differs from a dgp? shmueli presents a brief historical review of such a question and then outlines that the type of uncertainty associated with explanations and that with predictions are different . further, a causal analysis ties a variable to a cause in support of a causal theory (dgp model), whereas a predictive analysis captures associations between a predictor and an outcome as obtained from the data at hand. unlike a causal model, which is retrospective, a predictive model aims to attain high accuracy in forecasting new and future observations. thus, in the latter, a researcher focuses on selecting high quality predictors with sufficiently available and quality data rather than on the role of such predictors or the mechanics behind their relationships. by contrast, in the former, the same researcher strives to study the causal role of predictors and may, in fact, opt to retain a variable with strong causal ties despite being statistically insignificant.traditionally, researchers would collect data pertaining to a phenomenon and then analyze this data to describe it, creating a model that could be used to predict such a phenomenon and/or causally infer (or explain/understand) interesting questions about such a phenomenon (see table1). further, a causal analysis ties a variable to a cause in support of a causal theory (dgp model), whereas a predictive analysis captures associations between a predictor and an outcome as obtained from the data at hand. by contrast, in the former, the same researcher strives to study the causal role of predictors and may, in fact, opt to retain a variable with strong causal ties despite being statistically insignificant.show the following example, "thus, if an observational study can demonstrate that the cause always precedes the effect, that the effect is consistently close to the cause, and that the association is repeatedly and constantly observed, we can in fact still claim causation in a humean sense. implicitly, the counterfactual approach to causality allows us to reason without having to actually observe the cause and effect (since, by definition, a counterfactual is an event that we cannot observe). o a more general example with a theme on policy is, "policy analysts are often interested in answers to questions with the structure 'what would have happened to y (the 'target variable'), had x been x (the 'control variable' and its value, respectively)?"." similarly, for sobel"view the cause as an event or state that can be manipulated (or at least potentially manipulated); under this view, causation resides in the existence of a one-to-one correspondence between the state of the manipulated cause and the state of the effect.hillproposes nine criteria to determine causation, namely, strength (strong association), consistency (regularity/repeated observations), specificity (a cause leads to a single effect), temporality (a cause precedes its effect in time), biological gradient (presence of a unidirectional dose-related response), plausibility, coherence (interpretations of a cause and effect does not conflict with what is known), experiment, and analogy. while this limits our estimation of possible causal effects at the unit level, such an estimation can be aggregated at the group level to yield the average causal effect.• "… zero-level causality suffers from the criticism that their is no intervention involved to observe the causal effect of doing something on the system.• "… first-level causal estimation that mostly involves randomization experiments may make the conclusions of the study more secure, but fails to reveal the … processes working behind the effect observed. in a way, an intervention describes the distribution of the outcome conditional on setting the value of a variable, x = x., p(y|x) = p stating that: the probability of event y = y given that we observed event x = x is equal to p." "at the interventional layer we find sentences of the type p(y|do(x), z), which denotes "the probability of event y = y given that we intervene and set the value of x to x and subsequently observe event z = z." "at the counterfactual level, we have expressions of the type p(yx|x′, y′) which stand for "the probability that event y = y would be observed had x been x, given that we actually observed x to be x′ and y to be y′. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/460.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/460.txt new file mode 100644 index 0000000000000000000000000000000000000000..fd07b9a0bd1694bcbabd9d35d4d75e780983dff2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/460.txt @@ -0,0 +1 @@ +electric mobility service (ems) refers to the use of electricpowered vehicles, including e-bikes, e-scooters, hybrid electric vehicle (hev), and plug-in hybrid electric vehicle (phev), for transportation needs. ems has rapidly transformed the transportation landscape, offering sustainable alternatives to traditional combustion engine vehicles. these electric vehicle (ev)s not only address environmental concerns but also contribute to the development of an interconnected transportation ecosystem, advancing intelligent transportation systems (its). by embracing ems, we promote a future where interconnected vehicles, advanced data analytics, and smart infrastructure combine to create a safer, more efficient, and sustainable transportation network. energy management is crucial in ems to ensure the efficient operation of electric vehicles and their charging infrastructure. it involves controlling and optimizing energy flow to meet specific requirements. three key concerns in ems energy management include ensuring a reliable range (often referred to as range anxiety), optimizing charging rates, and maximizing energy storage lifespan. achieving this requires coordinating electrical energy resources like charging stations, renewable energy sources, and energy storage systems to facilitate electric vehicle charging.effective energy management is crucial for multiple reasons. one important aspect is ensuring the availability of charging infrastructure to meet the rising demand for electric vehicle charging . as the number of evs continues to grow, the charging load on the power grid can become substantial. therefore, meticulous management is essential to prevent overloading the grid and potential blackouts. another key benefit of energy management is optimizing the utilization of energy resources, minimizing wastage, and maximizing the efficiency of the charging process. this not only helps reduce operational costs but also enhances the overall sustainability of ems systems. additionally, energy management facilitates grid integration and empowers evs to contribute to the grid by providing ancillary services or participating in vehicleto-grid systems, thus strengthening the grid's stability and responsiveness.artificial intelligence (ai) technologies offer a transformative solution to the limitations of traditional energy management techniques in ems . conventional methods, which are primarily based on predetermined charging schedules and basic load balancing algorithms, struggle to meet the dynamic optimization requirements and growing complexity of modern ems . in contrast, ai leverages advanced algorithms and real-time data analysis to optimize charging strategies intelligently. by adapting to changing conditions, utilizing predictive modeling, and employing multi-objective optimization, ai enables more efficient and effective energy management in ems, addressing the demand for optimal charging solutions.the potential of ai in transforming energy management for ems lies in its computational techniques, including machine learning (ml) and deep learning (dl). ai algorithms and data-driven approaches enable intelligent systems to adapt to varying conditions, optimize charging operations, predict user behavior, and manage energy resources in real time. ai facilitates dynamic load balancing, efficient energy allocation, and demand-response strategies, resulting in improved charging infrastructure utilization, reduced energy costs, and enhanced grid integration. this paper comprehensively reviews ai technologies and techniques for energy management in ems, covering energy consumption modeling, estimation, and prediction. it also discusses current challenges and proposes a research roadmap for future advancements. by assessing the state of ai-based energy management, this paper contributes to the development of effective and sustainable ems solutions.the paper is organized as follows. section ii presents the methodology used in our paper, proposes the research questions we plan to investigate and summarizes and compares other existing surveys. section iii provides an overview of conventional energy management systems, discussing their advantages and limitations. section iv focuses on ai approaches for energy management, delving into the current state of affairs in this domain. section v provides some discussion and introduces challenges to ai-based energy management methods. finally, section vi offers a brief conclusion summarizing the entire paper and presents future research directions. energy management is crucial in ems to ensure the efficient operation of electric vehicles and their charging infrastructure. three key concerns in ems energy management include ensuring a reliable range (often referred to as range anxiety), optimizing charging rates, and maximizing energy storage lifespan. achieving this requires coordinating electrical energy resources like charging stations, renewable energy sources, and energy storage systems to facilitate electric vehicle charging. another key benefit of energy management is optimizing the utilization of energy resources, minimizing wastage, and maximizing the efficiency of the charging process. by adapting to changing conditions, utilizing predictive modeling, and employing multi-objective optimization, ai enables more efficient and effective energy management in ems, addressing the demand for optimal charging solutions.the potential of ai in transforming energy management for ems lies in its computational techniques, including machine learning (ml) and deep learning (dl). ai facilitates dynamic load balancing, efficient energy allocation, and demand-response strategies, resulting in improved charging infrastructure utilization, reduced energy costs, and enhanced grid integration. this paper comprehensively reviews ai technologies and techniques for energy management in ems, covering energy consumption modeling, estimation, and prediction.2) how are ai-based approaches employed in energy consumption modeling, estimation, and prediction in ems? 3) what are the current challenges and limitations of ai methods in energy management for ems? 4) what is the future research roadmap for advancements in ai-based energy management for ems? 5) how does the use and focus of ai approaches vary among different ems?.the literature search process was conducted using a set of specific keywords, including "energy management", "electric mobility service", "machine learning", "ev", "e-bike", "escooter" and "energy consumption prediction".as the complexity of energy management systems continue to rise, conventional approaches are being surpassed by more advanced ai methods, offering enhanced energy management capabilities. in the subsequent discussion, we review ai strategies deployed within ems energy management, examining them from two vantage points: traditional ml methods and dl methods. various ml approaches, including linear regression (lr), multiple linear regression (mlr)-, support vector machine (svm) or support vector regression (svr)-, decision tree (dt),, random forest (rf),,, extreme gradient boosting (xgb),, light gradient boosting machine (lgbm), k-nearest neighbor (knn),,, and artificial neural networks (ann),-, have been widely employed to address the challenges associated with energy consumption modeling or prediction for ems. we can employ techniques like probabilistic forecasting, optimization algorithms, and energy management systems to optimize renewable energy use, minimize grid strain, and reduce carbon emissions.in summary, future challenges in e-mobility energy management include data availability and quality, model complexity and scalability, real-time prediction, renewable energy integration, and uncertainty management. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/461.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/461.txt new file mode 100644 index 0000000000000000000000000000000000000000..f61739f489bb5be0b2cd58df7e8ab3f4221f7e45 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/461.txt @@ -0,0 +1 @@ +cancer is one of the leading causes of death worldwide. over the past decades, the world has seen a dramatic increase in yearly cases of oncology detection . on the one hand, this is due to factors of worsening environment . paradoxically, on the other hand, this is due to the general tendency of an increase in life expectancy and substantial improvement in medical diagnostic methods . while oncology remains a quire the use of specific or deep medical data. it allows the model to be sensitive to the target disease but also imposes restrictions on mass implementation.for these reasons, it is a disruptive change for healthcare to apply ai methods for mass personalized assessment of the cancer risk among patients based on the existing volume of electronic health records (ehrs) . by utilizing ai methods in the background, it becomes possible to estimate the instant risk of each patient without their active participation or additional examinations. during patient interactions with the healthcare system, high-risk individuals can be tested explicitly in a preventive manner to verify the presence of cancer. if the disease is confirmed, early detection allows for more time and increased chances of successful treatment. implementing such methods on a population level can lead to a substantial improvement in early-stage cancer detection. additionally, this method enables a more refined patient ranking within existing cancer screening protocols, allowing for prioritized testing of patients with a high cancer risk while staying within the same budget.in this study, we propose a novel method for personalized cancer risk prediction based on an ensemble of survival analysis and machine learning models. among other methods, our one stands out by the minimum data greedy policy, requiring only a history of medical service codes and diagnoses from ehr. such a feature greatly expands the method's applicability among clinics with varying data completeness. furthermore, we present a comprehensive set of numerical and medical experiments to demonstrate the effectiveness of the proposed method and identify the key factors contributing to cancer development. additionally, this method enables a more refined patient ranking within existing cancer screening protocols, allowing for prioritized testing of patients with a high cancer risk while staying within the same budget.the problem of cancer risk assessment based on ehr data evokes excellent interest among researchers where the ehr is a personal storage containing a sequence of medical events of the patient in complete detail. for example, in, to train the model, 17 specific laboratory variables are required, among which are 25-oh vitamin d and bicarbonate values; in, the breast cancer assessment model relies on the breast cancer risk assessment tool (bcrat) parameters from ehr; in, a patient's psa analysis over last 10 years is needed to predict the risk of prostate cancer, and so on.where id i is the personal id of the i-th patient; sex i is the sex of the i-th patient (0 -female, 1 -male); birth i is the birth date of the i-th patient; e i = {e i j } m i j=1 is a sequence of medical events for the i-th patient, m i is the number of medical events for the i-th patient in ehr q i .where date i j is the date of the j-th medical event; code i j is the j-th medical event code; type i j is the type of the j-th medical event ("diagnose" or "medical service"); description i j is the text record of the j-th medical event (if not empty).at the time t pred , we compute the patient's risk that a medical event e x occurs in the following t months where x is a target diagnosis. according to the world health organization (who), an ehr usually contains a patient's medical history, diagnoses, treatments, medications, allergies, vaccinations, x-ray images, medical examination reports, laboratory and instrumental examination results, etc. we assign target = 1 if any diagnosis in the range c00-c99 (note as c-diagnosis) occurs and assign target = 0 otherwise; otherwise (target = 0), the time t end is limited to 1 year before the date of the last medical event in the sequence (to exclude the occurrence of the c-diagnosis less than 1 year after the last record in the ehr); 3.is the number of patients diagnosed with cancer in t j and n j is the number of patients not yet diagnosed with cancer before t j .let any medical event (diagnosis or medical service) be a patient's visit to a medical facility.since one patient visit generates one observation in the sample, each patient can undoubtedly have some previous visits and, consequently, some values of the c-diagnosis risk (risk = probability) at the t pred . the auc roc values among the baseline method, proposed survival ensemble, and aft regression are indistinguishable but significantly higher than for the model without survival models. table12presents the actual medical statistical data describing how many patients need to be examined (on average) to detect one patient diagnosed with cancer. these findings suggest that the current screening system is far from perfect, and ai-based methods like the proposed survival ensemble have the potential to significantly increase the early detection of patients diagnosed with cancer. we propose the novel method to address such a problem on the primary data from ehr commonly available in most medical facilities, namely codes of medical diagnoses and services. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/462.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/462.txt new file mode 100644 index 0000000000000000000000000000000000000000..fbcedb089849f25d807a620f567219ff7305a3ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/462.txt @@ -0,0 +1 @@ +graph neural networks (gnns) have proven to be a powerful deep learning architecture for processing relational data. more specifically, gnns operate in graph domains comprised of pairwise relations between nodes. topological neural networks (tnns) extend gnns by operating on domains featuring higher-order relations. such domains, * equal contribution 1 called topological domains, feature part-whole and/or settype relations (fig. 1) (hajij et al., 2023), allowing a more expressive representation of the data. by operating on a topological domain, a tnn leverages the intricate relational structure at the heart of the data. topological deep learning (bodnar, 2022;hajij et al., 2023) has shown great promise in many applications, ranging from molecular classification to social network prediction. however, the adoption of its architectures has been limited by the fragmented availability of open-source algorithms and lack of benchmarking between topological domains.the challenge described in this white paper aims to fill that gap by implementing models in a unifying open-source software. in doing so, the challenge contributes to fostering reproducible research in topological deep learning. participants were asked to contribute code for a published tnn, following topomodelx's api (hajij et al., 2023) and computational primitives, and implement a training mechanism for the algorithm's intended task. this white paper is organized as follows. section 2 describes the setup of the challenge, including its guidelines and evaluation criteria. section 3 lists all qualifying submissions to the challenge and its winners.graph neural networks (gnns) have proven to be a powerful deep learning architecture for processing relational data. topological neural networks (tnns) extend gnns by operating on domains featuring higher-order relations. such domains, * equal contribution 1 called topological domains, feature part-whole and/or settype relations (fig. by operating on a topological domain, a tnn leverages the intricate relational structure at the heart of the data. topological deep learning(bodnar, 2022;hajij et al. in doing so, the challenge contributes to fostering reproducible research in topological deep learning. participants were asked to contribute code for a published tnn, following topomodelx's api(hajij et al.the challenge 1 was held in conjunction with the workshop topology and geometry in machine learning of the international conference on machine learning (icml) 2023 2 . does the submission implement the chosen model correctly, specifically in terms of its message passing scheme? (the training schemes do not need to match that of the original model). rather, these criteria aimed to reward clean code and accurate model architectures that will foster reproducible research in topological deep learning. upon voting, participating teams and reviewers were each asked to select the best and second best model implementation in each topological domain, thus making eight choices in total. all four topological domains are represented in this set of models: 12 hypergraph implementations, 11 simplicial model implementations, 3 cellular implementations, and 2 combinatorial implementations.this white paper presented the motivation and outcomes of the organization of the topological deep learning challenge hosted through the icml 2023 workshop on topology, algebra and geometry in machine learning. challenge submissions implemented a wide variety of topological neural networks into the open-source package topomodelx. we hope that this community effort will foster reproducible research and further methodological benchmarks in the growing field of topological deep learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/463.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/463.txt new file mode 100644 index 0000000000000000000000000000000000000000..469a1cbfde61034e0531fc9ec3e7913124ef2591 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/463.txt @@ -0,0 +1 @@ +time-series (ts) forecasting is a fundamental task in contemporary data analysis, aiming to predict future events based on historical data. its broad applicability across various domains, including strategic planning, resource allocation, risk management, and control design, underscores its importance. consequently, extensive research efforts have been dedicated to developing efficient and accurate long-term time-series forecasting (ltsf) methods within the machine learning (ml) and applied statistics communities. the ideal ltsf method should provide an end-to-end solution capable of handling diverse time-series data characteristics across domains. however, determining the optimal default method for a specific ltsf task and ensuring reliable out-of-the-box performance remains a challenge. the landscape of available methods encompasses deep learning-based approaches, continuous time models, statistical techniques, and more. each method class has its advantages and an active research community. notably, deep neural network (nn) models have recently experienced rapid advancements in the context of the ltsf problem , motivating our focus on evaluating such models within our benchmark. in ltsf research, the availability of large and diverse datasets is crucial for training and benchmarking contemporary ml models. while natural language processing benefits from abundant data, timeseries datasets still lag behind in quantity and diversity. for example, training foundational models for time-series analysis necessitates access to substantial and varied data. as highlighted by zhou et al. , the largest existing dataset for time-series analysis falls below 10gb, whereas our dataset exceeds 100 gb.existing ltsf datasets can be broadly classified into two categories. first, real-life datasets are sourced from diverse domains, such as temperature records, traffic data, and weather observations. however, these datasets often suffer from being univariate or treated dimension by dimension, limiting their suitability for comprehensive evaluation. second, researchers resort to custom synthetic 3. introduction of new hand-crafted models: we recognized the need to explore and evaluate new models not tested before in the context of ltsf. our research introduces two models: the latent nlinear model and deepar enhanced with curriculum learning (cl). these models have not been previously applied to ltsf tasks, and their inclusion in the benchmark is intended to highlight their performance and potential. we observed significant improvements across the entire dataset when using these models, suggesting their effectiveness as baselines for ltsf.having the unified dataset, we perform a thorough benchmark of a suite of nn-based methods, including classical approaches like lstm, deepar, and latent ordinary differential equation (ode) with recently published newer approaches demonstrated to improve the forecasting accuracy over the existing methods: spacetime, n-hits, ltsf nlinear, and patch transformer. last but not least, we share an open-source library with state-of-the-art implementations as a toolkit in the pytorch framework to help accelerate progress in the field. notably, deep neural network (nn) models have recently experienced rapid advancements in the context of the ltsf problemand climate data (ushcn) the united states historical climatology network (ushcn) dataset. it evaluates statistical approaches for ltsf,bauer et al. the dataset is multivariate (three variables), and the main difficulty of the lorenz dataset lies in modeling the chaotic dynamics exhibiting high sensitivity to the initial history; the forecasting problem is especially challenging for shorter context windows. there are two main difficulties of the forecasting task for the ks dataset: the equation is chaotic and exhibits high sensitivity to the initial condition; the spatial dimension is large (100), requiring an efficient state encoding in a forecasting method.our dataset comprises two main components: real-life datasets commonly used in the recent literature, the pems-bay traffic dataset, contrary to the standard datasets, characterized by a large number of interacting channels, and a collection of clean, diverse synthetic datasets on top of that. by combining reallife and synthetic datasets, we believe our benchmark dataset provides a comprehensive evaluation framework to assess the performance of nn and, in general, ml models in time-series forecasting. as described by the authors, an 'embarrassingly simple one-layer linear model' was demonstrated to outperform the sophisticated transformer-based models in real-life datasets. in order to circumvent the limitations of ltsf linear models about restricted state-space dimension and modeling of nonlinear dependencies between the state components, we introduce the latent ltsf nlinear model.our benchmark datasets consist of a relatively large number of training trajectories (all synthetic datasets consists of 20k trajectories). we performed an experiment in which the training dataset was reduced to only 1000 trajectories and evaluated the nn models in such a scenario for a selection of datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/464.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/464.txt new file mode 100644 index 0000000000000000000000000000000000000000..9ae8790121b204fa805abb89b72aa01464571f5b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/464.txt @@ -0,0 +1 @@ +the covid-19 pandemic has ushered in an era of unparalleled challenges and critical questions regarding the virus's multifaceted impacts on human health and well-being (1)(2)(3)(4)(5)(6)(7)(8). among the myriad aspects that researchers worldwide have been diligently investigating are the intricate relationships between covid-19 and various aspects of human physiology and pathology (9)(10)(11)(12)(13)(14)(15)(16)(17)(18)(19)(20). this pursuit is critical, as it contributes significantly to our understanding of the virus's pathogenesis and informs strategies for effective management and mitigation of its effects. consumer behavior during the pandemic has been a subject of keen interest, as observed in research by murugan et al. (2022) . their work focuses on predicting consumer behavior during pandemic conditions using sentiment analytics. understanding how consumers respond to such crises is critical for businesses and policymakers. another area of research has been the detection of insincere questions on platforms like quora, as explored by chakraborty et al. (2022) . their attention-based model for classifying insincere questions can help maintain the quality of online discussions during these times. advancements in medical research during the pandemic have been essential. yousif (2022) conducted a comprehensive review of emerging insights in medical research in iraq, shedding light on important developments. the health benefits of specific foods have also come under scrutiny. al-amrani and yousif (2022) explored the potential of pomegranates as a superfood with numerous health benefits. in the realm of microbiology, shahid (2022) investigated the prevalence of the chua gene virulence factor in escherichia coli isolated from clinical samples in al-diwaniyah province. such studies contribute to our understanding of infectious diseases, including those exacerbated by the pandemic. finally, the impact of covid-19 on comorbidities has been a major area of investigation. yousif et al. examined comorbidities associated with covid-19, shedding light on the complex interplay between the virus and pre-existing health conditions. one of the noteworthy areas of research has centered around hematological changes in covid-19 patients (27). hematological alterations, ranging from changes in white blood cell counts to coagulation abnormalities, have been frequently reported among individuals infected with the sars-cov-2 virus (28). these changes can hold crucial diagnostic and prognostic value, guiding healthcare practitioners in assessing disease severity and predicting patient outcomes (29). in the realm of data-driven research, machine learning and data science have emerged as invaluable tools for understanding and combatting covid-19 (36). other studies highlighted the role of machine learning in insurance risk prediction, underlining the adaptability of these technologies in addressing healthcare challenges during a pandemic (37)(38)(39). it involves collecting data from a group of post-covid-19 patients who have experienced cardiovascular complications and a control group of post-covid-19 patients who did not experience such complications. these patients are divided into two groups: a case group with cardiovascular complications and a control group without complications.case group (n=200) control group (n=200) age (years) 45.the findings of this study shed light on the significant cardiovascular complications that may arise in patients recovering from covid-19. these findings are consistent with research suggesting that covid-19 may lead to arrhythmias, possibly due to myocardial inflammation or direct viral effects on cardiac tissue(42,43). furthermore, heart failure occurred in 9% of the case group compared to 1. stroke was another concerning complication, with 4% of the case group experiencing it compared to 0. troponin levels, a marker of myocardial injury, were significantly higher in the case group compared to the control group (table4). this elevation is consistent with previous studies linking elevated troponin levels with severe covid-19 and adverse cardiovascular outcomes(48,49). these results are in agreement with research indicating that while lipid metabolism may be altered in covid-19 patients, the clinical significance remains unclear (50,51). inflammatory markers, such as crp and il-6, were notably elevated in the case group (table4).echocardiogram abnormalities were more prevalent in the case group, affecting 20% of participants, compared to 5% in the control group (table5). angiogram results further substantiate the presence of cardiovascular complications in the case group, with 30% exhibiting abnormalities compared to 7. the high specificity (90% for the case group, 94% for the control group) suggests that these models are effective at correctly identifying individuals without cardiovascular complications (table6). however, the sensitivity of 75% for the case group and 89% for the control group indicates that there is room for improvement in correctly identifying individuals with cardiovascular complications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/465.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/465.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab4ac27afadff8fa1eba04ba0f011e5d0b0c1caf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/465.txt @@ -0,0 +1 @@ +support vector machines (svms) are computationally powerful tools for supervised learning, and therefore frequently utilized in classification and regression problems . svms have been effectively used to solve a range of real world problems, including intrusion detection system , facial expression recognition , speaker identification , text classification , and seizure detection . the methodical approach of svms is driven by the statistical learning theory. finding the best-separating hyperplane between the positive and negative instances is the main goal of svm. however, despite their effectiveness, svms have some limitations, such as higher time complexity, sensitivity to noise and outliers, and inability to handle class imbalance problems .to address the limitations mentioned above, several variants of svm have been proposed, such as generalized eigenvalue problem-based svm (gepsvm) and fuzzy support vector machine (fsvm) . gepsvm solves a generalized eigenvalue problem which is computationally efficient and provides a better solution than svm. jayadeva et al. proposed a twin support vector machine (twsvm) that constructs two non-parallel hyperplanes like gepsvm. however, unlike gepsvm, twsvm solves a pair of smaller-sized quadratic programming problems (qpps) instead of a generalized eigenvalue problem. twsvm is efficient with respect to time complexity in comparison to svm. to reduce the computational complexity of twsvm, the least squares twin support vector machine (lstsvm) was proposed as an alternative to the convex qpps in twsvm. lstsvm utilizes the squared loss function instead of the hinge loss function, resulting in lower training time . however, the lstsvm model needs the hyperplane at exactly one distance from the other class, making it sensitive to noise and outliers.to address the sensitivity to noise near the hyperplanes, nasiri et al. developed the energy-based lstsvm (els-tsvm) model, which introduces energy parameters to relax the constraints for the hyperplanes. furthermore, tanveer et al. developed a robust energy-based least square twin support vector machine (rels-tsvm) by incorporating regularization terms in els-tsvm, making it more robust to noise. recent studies have shown that the rels-tsvm model and its variants outperform other twsvm-based models in binary class problems because they can handle noise and outliers effectively . recently, laxmi et al. suggested another twsvm variant, intuitionistic fuzzy least square twin support vector machine (iflstsvm). iflstsvm incorporates the concept of the intuitionistic fuzzy set , which handles the uncertainties in the data more precisely.one of the frequently encountered problems in real world classification tasks is class imbalance . in case of rels-tsvm, the classifier gives equal weightage to each sample, causing the learned decision surface to get biased toward the majority class. to address this issue, we propose two improved variants of rels-tsvm algorithm, known as robust energy-based intuitionistic fuzzy least squares twin support vector machine (if-relstsvm) and robust energy-based fuzzy least squares twin support vector machine (f-relstsvm) for class imbalance learning. unlike twsvm, lstsvm, els-tsvm, and rels-tsvm models, the proposed if-relstsvm introduces intuitionistic fuzzy scores to both classes to reduce the negative effect of noise and outliers. this intuitionistic fuzzy score is achieved by introducing a pair of membership and non-membership degrees for every data point. the proposed if-relstsvm model also incorporates a regularization term to reduce over-fitting. this extra component encapsulates the marrow of statistical learning theory and implements the structural risk minimization (srm) principle in the proposed formulation.furthermore, in the proposed f-relstsvm model, we present another approach where the projection on proximal hyperplanes is utilized for the fuzzy memberships. both the proposed if-relstsvm and f-relstsvm models solve systems of linear equations instead of qpps as in twsvm. therefore, the proposed models are robust compared to the twsvm, lstsvm, and els-tsvm. moreover, the proposed models can handle the class imbalance problem by introducing a weight parameter to the objective function, which balances the influence of the minority and majority classes in the learning process. to evaluate the performance of the proposed algorithms, we conducted experiments on several benchmark datasets. experimental results show that the performance of the proposed f-relstsvm model is better than other state-of-the-art algorithms, including lstsvm, els-tsvm, rels-tsvm, and iflstsvm in terms of auc .the proposed if-relstsvm and f-relstsvm models have the following appealing features:1. unlike twsvm, lstsvm, els-tsvm, and rels-tsvm, the pro-posed if-relstsvm model introduces intuitionistic fuzzy score to both classes that reduce the effect of noise and outliers in samples. if-relstsvm introduces a pair of membership and non-membership degrees for every data point. moreover, the membership degree uses the imbalance ratio (ir) as a multiplier to deal with class imbalance issues in the datasets. so, the problems of class imbalance and noisy data are handled in this approach. 2. the proposed if-relstsvm algorithm provides solution to two types of noise: (a) noise near the hyperplane which is dealt with energy-based approach. (b) noise away from the hyperplane which is dealt with the intuitionistic fuzzy-based approach in this work. 3. the proposed f-relstsvm model utilizes the projection on proximal hyperplanes to measure the membership leading to proper fuzzy memberships to the data points before formulating the final classifier. 4. if-relstsvm and f-relstsvm involve the regularisation term to each objective function to maximize the margin. this involves the structural risk minimization principle in the proposed formulations. 5. the proposed formulations involve the solution of system of linear equations instead of qpps, leading to lesser computation time.to address the limitations mentioned above, several variants of svm have been proposed, such as generalized eigenvalue problem-based svm (gepsvm)and fuzzy support vector machine (fsvm). to reduce the computational complexity of twsvm, the least squares twin support vector machine (lstsvm)was proposed as an alternative to the convex qpps in twsvm. to address this issue, we propose two improved variants of rels-tsvm algorithm, known as robust energy-based intuitionistic fuzzy least squares twin support vector machine (if-relstsvm) and robust energy-based fuzzy least squares twin support vector machine (f-relstsvm) for class imbalance learning. unlike twsvm, lstsvm, els-tsvm, and rels-tsvm models, the proposed if-relstsvm introduces intuitionistic fuzzy scores to both classes to reduce the negative effect of noise and outliers. experimental results show that the performance of the proposed f-relstsvm model is better than other state-of-the-art algorithms, including lstsvm, els-tsvm, rels-tsvm, and iflstsvm in terms of auc. unlike twsvm, lstsvm, els-tsvm, and rels-tsvm, the pro-posed if-relstsvm model introduces intuitionistic fuzzy score to both classes that reduce the effect of noise and outliers in samples. the proposed if-relstsvm algorithm provides solution to two types of noise: (a) noise near the hyperplane which is dealt with energy-based approach. the proposed f-relstsvm model utilizes the projection on proximal hyperplanes to measure the membership leading to proper fuzzy memberships to the data points before formulating the final classifier.proposed an energy-based least square twin support vector machine that incorporates an energy parameter for each hyperplane to limit the effect of noise and outliers. one such drawback observed in the class center-based fuzzy membership is that these function assigns membership values solely based on the distance from the class center, disregarding whether the data point is closer to the proximal hyperplane of its own class or not.proposed an energy-based least square twin support vector machine lstsvm (els-tsvm) that reduces the influence of noise and outliers by incorporating an energy component for each hyperplane.in this section, we perform numerical experiments to compare the proposed f-relstsvm and if-relstsvm methods with the baseline models and also perform statistical analysis to demonstrate the significance of the proposed models.we demonstrate the effectiveness of the proposed fuzzy membership assignment by comparing the proposed f-relstsvm and if-relstsvm with other baseline algorithms such as lstsvm, els-tsvm, rels-tsvm, and iflstsvmon different synthetic and real world imbalanced datasets.to assess the performance of the proposed if-relstsvm and f-relstsvm algorithms, we compare proposed algorithms to lstsvm, els-tsvm, iflstsvm, and rels-tsvm on 29 datasets from the keel repository. to deal with the class imbalance and noise simultaneously, we presented a projection based fuzzy membership assignment (pfma), and used in our proposed robust fuzzy energy-based least square twin support vector machine (f-relstsvm). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/466.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/466.txt new file mode 100644 index 0000000000000000000000000000000000000000..6fb071056d29bff90dff53c14eb64824eca1510c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/466.txt @@ -0,0 +1 @@ +advances in machine learning (ml) during the past decade have led to a significant performance improvement in a wide range of application with human-level performance being reached in many tasks. to a large extent, this success is a result of breakthroughs in deep learning (dl). however, adopting dl in most applications relies on the availability of large annotated datasets for training deep neural networks. the emergence of crowdsourcing data labeling platforms like amazon turk has made data annotation feasible for general applications. these platforms allow to benefit from the wisdom of crowd by hiring individuals from diverse backgrounds to contribute to the data annotation process with minimal training procedures. we can often correct mislabels using aggregation techniques and reach expert-level annotation accuracies rostami et al. . however, generating annotation datasets remains mohammad rostami is with the university of southern california. contact: rostamim@usc.edu. a challenging, expensive, and complex task in many applications. for instance, in the field of medicine, data annotation requires the expertise of trained physicians who are generally busy with vital tasks avramidis et al. , sun and rostami . similarly, in non-natural image modalities such as synthetic aperture radar (sar), data interpretation and annotation can only be effectively performed by trained experts who have years of experience rostami et al. . when dealing with graph-structured data, human intuition would become weak which makes model interpretability more challenging pope et al. ]. additionally, there are domains where data cannot be readily shared due to concerns related to security and privacy ?. as a result, crowdsourcing is not a viable solution for data annotation in these fields. the sensitive nature of the data and the associated risks in these applications necessitate alternative approaches to data annotation that ensure privacy and data confidentiality while making ml feasible.even if we are able to generate a high-quality annotated training dataset and then train a good model, its performance would degrade if we face distributional ai magazine. 00(000): 1-18; doi:10.1002/aaai.0001 the authors. association for the advancement of artificial intelligence.changes or drifts during the testing time after the initial training phase rostami . distributional drifts lead to a distribution discrepancy on the testing dataset, necessitating expensive, time-consuming, and everlasting model retraining which in tern requires continual data annotations. despite some behavioral similarities between deep neural networks and the nervous system morgenstern et al. , the human nervous system can tackle these challenges considerably better. these similarities suggest that perhaps, we can equip deep learning with mechanism that the nervous system uses for improved performance. for example, humans have the ability to learn many diverse problems efficiently in terms of training data points and, in some applications, much faster. in many cases, humans can learn a novel class by observing only a handful of examples and sometimes with no training instance, just by relying on high-level descriptions. moreover, they adapt to changes fast and can generalize their knowledge to unexplored domains more straightforwardly. in contrast, current ai approaches primarily do not offer such abilities. these abilities seem feasible because humans do not start learning from scratch every time they learn and use acquired and accumulated knowledge from past experiences when learning new problems. inspired by these abilities of natural intelligence, an important research direction in ai is to explore how an agent can store knowledge when it faces and learns multiple problems and then how it can use the acquired knowledge to improve learning quality/speed, and to overcome data scarcity in other related problems rostami . the hope is to broaden the applicability of ml to more domains and applications and develop intelligent agents that are more autonomous.to improve ai, we need to go beyond the classic setting of learning a single problem in isolation. in my research, i explore the concept of learning not just a single isolated machine learning problem, but leveraging the relationships between multiple problems in order to enhance the learning process and avoid redundant learning. traditionally, learning has been viewed as the search for a predictive function within the context of a single problem. however, this perspective fails to capture the potential benefits that can be gained from transferring knowledge between related problems. we can consider a probabilistic framework, where the learning ability is acquired through an identically distributed dataset consisting of samples drawn from an unknown distribution. in this context, the aim for transfer learning is to exploit the connections between distributions of different problems and utilize the acquired knowledge to improve learning outcomes across the problems. as a result, we leverage knowledge gained from one task to benefit another through updating the ml learning pipeline rostami .a prominent approach for knowledge transfer is to map data points originating from diverse distributions into a shared latent embedding space such that the acquired knowledge is more interpretable rostami . in this shared space, the relationships and similarities between the distributions are encoded as geometric distances between the data representations. in other words, the embedding space represents data in terms of meaningful features that allow transferring knowledge across different problems. by doing so, knowledge can be effectively transferred across different distributions, encompassing tasks rostami et al. , domains stan and rostami , and agents rostami et al. , rostami and eaton . the intermediate data representations capture the similarities between the distributions in a higher level, allowing for the transfer of knowledge based on high-level similarities or descriptions rostami et al. . for example, two classification tasks that have different input spaces but both share the same classes, may have aligned distributions in the embedding space. geometric notions play a vital role in modeling similarities, enabling the formulation of coupled optimization problems that incorporate objective functions based on geometric distances of representations.the above approach allows for a problem-level analysis, where multiple probability distributions can capture the underlying relationships between the problems. for example, if the embedding space is designed to be task-agnostic when two problems share the same classes, a classifier trained on one problem can generalize well to other related problems due to sharing similar distributions in the embedding space. additionally, by simultaneously learning several problems with a limited number of annotated data points, it becomes possible to leverage all available data across the domains effectively, leading to requiring less data to achieve a similar performance compare to single task learning or improve the learning speed rostami et al. , mirtaheri et al. . the shared embedding can also be used to relate the current task to past obtained experiences to learn it more efficiently, rather learning the task from scratch. by considering the interplay between various problems and leveraging the relationships between them, we can enhance the learning speed, generalization, and overall performance of the learning process using less data. this approach extends the traditional isolated problem learning paradigm and opens up possibilities for knowledge transfer and improved learning across diverse problems.in my research, i examine the utilization of representation learning in embedding spaces as a means of transferring knowledge to address a set of significant machine learning problems. specifically, i focus on the development of embedding spaces that effectively capture and represent data in a desired format. through my investigation, i found that despite variations in the transfer learning scenarios, such as the direction of knowledge transfer and the organization of data, they can benefit from this common perspective. my contributions to the field encompass two main areas of "continual learning" and "learning with limited labeled data" which will be survey in what follows. additionally, by simultaneously learning several problems with a limited number of annotated data points, it becomes possible to leverage all available data across the domains effectively, leading to requiring less data to achieve a similar performance compare to single task learning or improve the learning speedrostami et al. by leveraging the labeled samples from the target domain, the goal is to bridge the domain gap and facilitate knowledge transfer from the labeled target domain to the unlabeled source domain. .the goal in an unsupervised domain adaptation (uda) setting is to transfer knowledge from a source domain, where labeled data is accessible, to a distinct target domain, where labeled data is scarce. by aligning the statistical properties of the features for the source and target domains in the shared domain-agnostic embedding space, uda methods aim to reduce the distribution discrepancy and enable generalization of a classifier that receives its input from the shared embedding space and is trained using the source domain annotated data on the target domain.in classic uda, the goal is to adapt a model trained on labeled data from a source domain to perform well on an unlabeled target domain when data for both domains is accessible at the same time. although source-free domain adaptation presents additional challenges due to the lack of labeled source data, it also presents opportunities for the development of innovative algorithms and techniques that can adapt source-trained models to new domains using only target domain data. by aligning the target domain distribution with the estimated gmm, we enable effective knowledge transfer from the source domain to the target domain without direct access to the source domain data. instead of treating each task in isolation, cl takes into account the temporal nature of the data, allowing ai systems to adapt and incorporate new knowledge while preserving previously learned information when encountering new situations. transfer learning allows the model to transfer knowledge and representations learned from previous tasks to accelerate the learning process for new tasks. to implement experience replay, we store a representative subset of training datasets for each past learned tasks in a memory buffer and then replay them back along with the new task data when a model is updated to learn a new task. when updating the model during the learning process, we specifically aim to align the internal distribution of the current task with the estimated gmm obtained from the previous time step. by incorporating this approach into our algorithm, we enable learning and expanding knowledge about continually emerging concepts in an incremental learning scenario. the experimental results we obtained demonstrate the effectiveness of our proposed strategies in adapting the model to new events while mitigating the occurrence of catastrophic forgetting. clif is designed to address the learning process where a model sequentially learns from a diverse range of nlp tasks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/467.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/467.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e0a40d4af3917751d7f4056b00e3bd2c5aab92e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/467.txt @@ -0,0 +1 @@ +the covid-19 pandemic, caused by the novel coronavirus sars-cov-2, has brought about a multitude of health challenges and continues to be a subject of extensive research worldwide. beyond its immediate respiratory manifestations, covid-19 has been associated with a wide array of health issues that extend into the post-acute phase, affecting various organ systems . among these, the cardiovascular system has garnered significant attention due to its susceptibility to infection and the potential for severe complications, including myocardial ischemia and atherosclerosis . additionally, various medical conditions, such as cancer , preeclampsia , and infectious diseases like urinary tract infections , continue to be prevalent, further complicating the healthcare landscape. this introduction serves as a gateway to the exploration of the intricate relationship between covid-19 and various health conditions, with a specific focus on the cardiovascular system, cancer, and infectious diseases. it also sets the stage for understanding the broader context of our research endeavors. the objective of this study is to investigate the effects of covid-19 on cardiovascular health, cancer incidence, and the prevalence of infectious diseases in the context of the iraqi population. to achieve this, we have leveraged a range of data sources, including clinical trials , longitudinal studies , and molecular investigations . our research encompasses a diverse array of medical conditions and seeks to shed light on the multifaceted consequences of covid-19. as we embark on this exploration, we will draw upon a rich body of literature that delves into the pathophysiology and clinical outcomes of these health conditions . in this context, we aim to provide a comprehensive overview of the impact of covid-19 on cardiovascular health, cancer incidence, and infectious diseases. our investigation spans several years, primarily focusing on data collected during 2021, 2022, and 2023, to offer a current and evolving understanding of these interrelated health domains. this research is underpinned by an array of scientific studies and clinical trials conducted within iraq , ensuring the relevance and applicability of our findings to the local healthcare landscape. in the following sections, we will delve into the details of our research methodology, data sources, analytical techniques, and key findings. through this comprehensive examination, we endeavor to contribute to the growing body of knowledge surrounding covid-19's far-reaching impact on human health.the covid-19 pandemic, caused by the novel coronavirus sars-cov-2, has brought about a multitude of health challenges and continues to be a subject of extensive research worldwide. beyond its immediate respiratory manifestations, covid-19 has been associated with a wide array of health issues that extend into the post-acute phase, affecting various organ systems. among these, the cardiovascular system has garnered significant attention due to its susceptibility to infection and the potential for severe complications, including myocardial ischemia and atherosclerosis. this introduction serves as a gateway to the exploration of the intricate relationship between covid-19 and various health conditions, with a specific focus on the cardiovascular system, cancer, and infectious diseases. the objective of this study is to investigate the effects of covid-19 on cardiovascular health, cancer incidence, and the prevalence of infectious diseases in the context of the iraqi population. in this context, we aim to provide a comprehensive overview of the impact of covid-19 on cardiovascular health, cancer incidence, and infectious diseases.inferential statistics: inferential statistical methods, such as t-tests and chi-square tests, were utilized to compare the prevalence of specific health conditions among covid-19 patients and non-covid-19 control groups.the covid-19 pandemic has had a profound impact on public health worldwide, leading to a surge in research across various domains to understand the virus's implications and its aftermath. in this discussion, we delve into the findings of our study, focusing on the effects of covid-19 on patients' health conditions, considering factors such as age, comorbidities, and specific complications post-infection. post-covid-19, a substantial proportion of patients experienced health complications, including respiratory, cardiac, neurological, hematological, and renal complications (table3). these findings corroborate existing literature on the diverse health impacts of covid-19, which extend beyond the acute phase of the infection. the prevalence of these complications underscores the need for comprehensive post-covid-19 care and monitoring to address the long-term health implications(40)(41)(42)(43)(44). these comorbidities have been consistently associated with increased susceptibility to severe covid-19 and its complications, including respiratory issues(46)(47)(48).in conclusion, our study highlights the prevalence of comorbidities among covid-19 patients and the significant burden of post-covid-19 health complications, particularly among older individuals.the study contributes to the evolving body of knowledge on the long-term health effects of covid-19 and provides valuable insights for healthcare professionals and policymakers in developing comprehensive strategies for post-covid-19 care and support. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/468.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/468.txt new file mode 100644 index 0000000000000000000000000000000000000000..312459346d49c661ccddca0fd26d61ed6c8d99b5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/468.txt @@ -0,0 +1 @@ +quantization is a method for mapping continuous values to a set of discrete values. the goal of neural network quantization is to perform computations and store tensors at lower bit-widths than floating point precision to reduce model size and inference latency while maintaining model accuracy, which allows for deploying dnns on platforms with constrained computational resources, e.g.: real time inference on mobile devices. quantization can be performed during training or inference. in this paper we focus on quantized inference, specifically post-training quantization, which quantizes a full precision trained model without the need for re-training or fine-tuning.quantized inference can be either simulated or integer-only, and in this paper we focus on simulated quantization, where the quantized model parameters are stored in low-precision, but the mathematical operations on them (e.g. matrix multiplications and additions) are performed with floating point arithmetic . in tensorflow, pytorch, and huggingface (qdqbert model), simulated quantization is referred to as fake quantization. this means that the dnn parameters are first quantized from f32 to, for example, int4, and then dequantized back to f32 to perform the forward pass executed during inference. we show that the roundtrip process of quantizing and dequantizing the model parameters leads to roundoff error, which may lead to numerical instability.similarly to prior works, which have shown that both biases and activations are more sensitive to quantization and are best kept in full precision or quantized with higher bit-widths , we show that some weights are more sensitive than others which should be reflected on their quantization bit-width. to that end we propose mixquant, a search algorithm that finds the optimal quantization bit-width from int2, int3, int4, int5, int6, int7, and int8 for each layer weight based on roundoff error and can be combined with any quantization method as a form of pre-processing optimization. we show that combining mixquant with brecq , a state-of-the-art quantization method, yields better quantized model accuracy than brecq alone. additionally, we combine mixquant with vanilla asymmetric quantization to show that mixquant has the potential to optimize the performance of any quantization technique.mixquant has three main benefits. first, mixquant is a component of the quantization process, which can be leveraged to find optimal quantization mixed precision bit-widths that can be plugged into any quantization method to optimize its performance. second, mixquant is linear and runs in a matter of seconds, which makes it practical. third, combining mixquant with brecq, a state-of-the-art quantization method yields better quantized model accuracy than brecq alone, omse , adaround , adaquant , and bit-split . to that end we propose mixquant, a search algorithm that finds the optimal quantization bit-width from int2, int3, int4, int5, int6, int7, and int8 for each layer weight based on roundoff error and can be combined with any quantization method as a form of pre-processing optimization. this approach is similar to mixquant ; however, mixquant finds the optimal quantization bit-widths based on quantization error (qe) minimization, while adaquant treats the bit-width as a constant and quantizes all weights and activations using the same bit-width (either int8 or int4 ).develop a method based on constraining all quantization levels as the sum of powers-of-two terms,propose a bit-split and stitching framework (bitsplit),study the effect of quantization on the structure of the loss landscape,develop aciq-mix, a 4 bit convolutional neural network quantization, andperform zero-shot quantization zeroq based on distilling a dataset that matches the input data distribution. there are only a few prior works that focus on mixed precision quantization since most focus on single precision quantization, where the quantization bit-width of all weights are uniform and therefore; treated as a constant. q = ( t o r c h . t e n s o r ( [ -1 .mixquant is a quantization scheme that relies on mixed precision to find the bit-widths of individual layer weights that minimize roundoff error and therefore, minimize model accuracy degradation due to quantization. the layer-wise qe is calculated as the mean squared error (mse) between the f32 model weights and the weights that have been dequantized following an int quantization (any quantization method can be used at line 8 in algorithm 1) to capture the information loss due to roundoff error caused by quantization.approximating roundoff error we use the quantization error, qe, (measured as the mse between f32 and dequantized weights) to approximate the impact of quantization on model accuracy for three reasons. mixquant with asymmetric quantization in addition to brecq, we combine mixquant with asymmetric quantization and compare its quantized model accuracy with f32 and int8 baselines.5 seconds, which represents a linear increase in runtime. lowering the quantization bit-width of conv layers with a 3x3 kernel has the most adverse impact on top-1 accuracy in shallower resnet architectures, while in deeper ones it is the conv layers with a 1x1 kernel followed by conv layers with a 3x3 kernel that impacts model accuracy the most. while the reason that the conv layers with a 3x3 kernel and 1x1 kernel are the most sensitive is the fact that those layer types account for the highest number of layers in resnet, we can still conclude that different layer types have different sensitivity to quantization bit-width measured as the impact on the overall model quality. (c) resnet50 (e) resnet152 (f) resnext50 32x4d weights quantization sensitivity by layer position in addition to the layer type, we investigate if the position of a layer has an impact on quantization sensitivity of weights. we measure the relative quantization error (rqe) of individual layers for the following bit-widths: 8, 7, 6, 5, 4, 3, 2, and define the rqe as rqe = avg(( f 32 wdequantized w)/ f 32 w), where w is the weights vector and the avg operation returns a scalar that represents the mean of all elements in a vector. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/469.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/469.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ae6e6e53a0b714b60beb21cfeeac58f87f31ddc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/469.txt @@ -0,0 +1 @@ +historically, when developing algorithms for the classification task of machine learning, the main focus has been on predictive performance, with a moderate emphasis on the simplicity of classification models (classifiers) through metrics such as the size of models produced -as a proxy to the more complex concept of comprehensibility or interpretability , . recently, the fairness of machine learning algorithms has come under increasing scrutiny , , . for example propublica investigated the compass recidivism algorithm that was used to categorise the risk of criminals going on to re-offend, finding that the algorithm was much more likely to classify black defendants as higher risk .fairness is a complex concept and no single measure can capture all the nuances of what it means to be fair. because of this, the field has created a large number of measures that each capture different notions of what it means to be fair . in fact some of these measures contradict each other and optimising for one measure can be at the detriment of another , .in this paper, we tackle the feature selection task, which consists of selecting a subset of features for a classifier in a pre-processing step, i.e., before learning the final classifier.the objective is to select a subset of features that optimises both the predictive accuracy and the fairness of the models (classifiers) learned by the subsequent classification algorithm.the main contribution of the paper is to compare two types of recently proposed genetic algorithms (gas) for multiobjective feature selection (optimising accuracy and fairness): one based on the pareto dominance concept and another based on the concept of lexicographic optimisation . this type of comparison is new and important because, although the pareto dominance approach is much more often used in the multi-objective ga literature than the lexicographic approach, the latter produced better results overall in our experiments, which suggests that the lexicographic approach deserves more attention in the literature.the rest of this paper is organised as follows. first, section ii presents the background on fairness and how it applies to machine learning, and some background on multi-objective optimisation. section iii discusses the two genetic algorithms (gas) for fair feature selection compared in this work. section iv describes the experimental setup to perform a controlled comparison between these two gas. section v reports the experimental results, before an analysis from both the lexicographic and pareto perspectives in section vi. section vii presents the conclusions.this paper compares two recently proposed multi-objective genetic algorithms (gas) for fair feature selection in the classification task: our lexicographic ga for fair feature selection (lgaffs)and a pareto dominance-based feature selection gathat uses the well known nsga-ii algorithm as its base.however, in order to make the comparison between pgaffs and lgaffs as controlled and as "fair" as possible, we modified the original ga inin two ways: (a) we replaced its original accuracy and fairness measures (fitness functions) by the same accuracy and fairness measures used by lgaffs; and (b) we extended the ga into use the same population initialisation procedure used by lgaffs, which promotes higher diversity of individuals.note that comparing the performance of these two gas in a "fair" way is not trivial because these two gas are based on two different assumptions about the relative importance of different objectives: lgaffs's lexicographic approach assumes that predictive accuracy has priority over fairness, whilst pgaffs's pareto approach does not make this assumption, implicitly treating all objectives with the same priority. in this scenario, where predictive accuracy has priority over fairness, we reduce the pareto set of solutions output by pgaffs to a single solution (individual) through a post-processing lexicographic filter, where that pareto set undergoes the same lexicographic ranking that is performed for implementing elitism during each iteration of lgaffs. then, we measure the number of solutions in the pareto set that are dominated (in the pareto sense) by the single lexicographic solution and, vice-versa, the number of solutions in the pareto set that dominate the single lexicographic solution. so, despite the prioritisation of accuracy over fairness incorporated in lgaffs, it is interesting that this lexicographic approach still managed to achieve fairness results statistically equivalent to (and even slightly better regarding number of wins) the fairness results obtained by the pareto approach used by pgaffs.as shown in tableiii, in 18 of the 21 classification problems (combinations of a dataset and sensitive feature), the number of pareto solutions produced by pgaffs that are dominated by the lexicographic solution produced by lgaffs (column lgaffs domination) is greater than the number of pareto solutions that dominate the lexicographic solution (column pgaffs domination).it should be noted that in nearly all classification problems (except the combination of dataset adult and sensitive feature race), the number of solutions in the pareto set returned by pgaffs that neither are dominated by nor dominate the lexicographic solution (column no domination) is greater than the final column of tableiiireports the proportion of individuals in the pareto front returned by pgaffs that have an accuracy equal to or better than the accuracy of the individual returned by the lexicographic ga (lgaffs).we have compared two gas for fair feature selection (in the classification task of machine learning) using two different multi-objective optimisation approaches: (a) the lgaffs algorithm, using the lexicographic approach; and (b) the pgaffs algorithm, which is a somewhat modified version of the nsga-ii proposed in, using the pareto approach.we compared the results of these two gas from two perspectives: a lexicographic perspective, representing the scenario where the user considers that accuracy has priority over fairness (a prioritisation incorporated into lgaffs); and a pareto perspective, where the user does not assign any priority to the objectives of accuracy and fairness (the principle underlying pgaffs).recall that, intuitively, the lexicographic evaluation perspective was expected to lead to better results for the lexicographic ga, whilst the pareto evaluation perspective was expected to lead to better results for the pareto ga, since each of those gas was designed specifically with the concepts of the corresponding evaluation in mind.in the evaluation of the results from the pareto perspective, we evaluated the results in terms of optimising both objectives together, based on the concept of pareto dominance (without prioritising any objective), by counting how often the solutions produced by one version of the ga dominates or are dominated by the solutions produced by the other version of the ga. in these cases, the solution returned by the lexicographic ga dominated (in the pareto sense) the solutions returned by the pareto ga much more often than the solutions returned by the pareto ga dominated the solutions returned by the lexicographic ga.in summary, in the large majority of cases, the lexicographic ga and the pareto ga have equivalent performance in terms of pareto dominance, but in the minority of cases where their pareto-dominance performance differs, surprisingly, the lexicographic ga outperformed the pareto ga.we hypothesise that the lexicographic approach was a more efficient search algorithm as its additional knowledge of the relative importance of the objectives concentrated its search in one area of the pareto front, whereas the pareto approach tried to find the entire pareto front, compromising its ability to find optimal solutions in all areas. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/47.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/47.txt new file mode 100644 index 0000000000000000000000000000000000000000..dfd924cbb01bcfd42dd21e5f2bb1dc9b335d443a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/47.txt @@ -0,0 +1 @@ +consider the data in table 1. they represent the results of an artificial case study from mueller and pearl (2022), where patients affected by a deadly disease test a drug that might help them. the table reports two independent studies carried out on different groups of patients: a randomised control trial (first eight rows) and an observational study (last eight rows). from those data, mueller and pearl (2022) compute a typical counterfactual such as the probability of necessity and sufficiency (pns), which is the proportion of people that would recover if treated and die otherwise. they can do this analytically by conditioning on gender. for males they eventually obtain that pns ∈ if only the randomised trial is considered, whereas the sharp value pns = 0.49 is obtained when observational data are also taken into account. analogous results are obtained for females. this example illustrates the main traits of the problem we are going to address in this paper: first and foremost that the computation of counterfactuals is often only partially identifiable, in the sense that we can at best obtain probabilistic bounds for them; but also that joining observational and randomised studies (under the guidance of a so-called structural causal model) can strengthen the results, i.e., narrow the bounds. such an se induces the degenerate cpt p (y |x ) via p (y|x) := f (x) = y for each x ∈ ω x and y ∈ ω y , where • denotes the iverson brackets that take value one if the statement inside the brackets is true and zero otherwise. a bn induces a joint pmf p (x ) that factorises as p (x) = n i =1 p (x i |pa x i ), for each x ∈ ω x , where (x i , pa x i ) ∼ x, i. a collection of ses { f v } v ∈v such that, for each v ∈ v the input variables of f v are in (u ,v ), is called a partially specified structural causal model (pscm).for each u ∈ ω u and v ∈ ω v , (u, v, pa v ) ∼ (u, v ) and where pa v are the parents of v according to g (i., the inputs of se f v ), while θ u denotes the true but unknown chances for u = u, to be considered for each u ∈ ω u and u ∈ u . the c-components of m are the elements of the partition {v (c) } c∈c of v , where v (c) denotes the endogenous nodes in g c , for each c ∈ c(tian, 2002). moreover, for each c ∈ c , let w (c) denote the union of the endogenous parents of the nodes in v (c) and v (c) itself. finally, for each v ∈ v (c) , obtain w v by removing from w (c) the nodes topologically following v and v itself (note that in the notation we dropped the index c as this can be implicitly retrieved from v ). in an fscm or pscm m, given v ∈ v and v ∈ ω v , do(v = v ) simulates a physical action on m forcing v to take the value v . notation m v is used for such a modified model, whose graph is obtained by removing from g the arcs entering v , and for which evidence v = v is considered. in an fscm m, given v,w ∈ v and v ∈ ω v , p (w|do(v )) denotes the conditional probability of w = w in the post-intervention model, i. in mathematical parlance, if w are the queried variables, v ′ the observed ones and v ′′ the intervened ones, we write the query by p (w v ′′ |v ′ ) with possibly v ′ ∩v ′′ = . the exogenous variable u 1 acts as a confounder for v 1 and v 3 , this being sufficient to have results consistent with those ofmueller and pearl (2022), which are obtained by conditioning on v 2 .with (u, v, pa(v )) ∼ (u, v ) and θ u ∼ θ u .for each pa v ∈ ω pa v , w ∈ ω w , and v ∈ v , where f v is the se of v in m, and, k(w) is the index of the interventional dataset associated with w, for each w = w , and v w ∈ ω v is the state of v appearing in w. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/470.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/470.txt new file mode 100644 index 0000000000000000000000000000000000000000..590eea730135fc99f5ad174cf8874713b707a466 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/470.txt @@ -0,0 +1 @@ +in this historical period, more and more online platforms, websites, social networks, and forums are being used to share and discuss various topics. these can be a supportive tool where people with illnesses interact, presenting their daily problems, symptoms, and difficulties, by speaking through the forum, they feel free to express their thoughts without fear of judgement from the outside world. in fact, a simple search on the internet is enough to realise the huge number of existing forums on the most diverse topics, which, especially in the medical field, relating to illnesses and discomforts, can lead to a huge number of first-hand testimonies. these data remain unused even though they have great potential and could also find application in the medical field.many applications of natural language processing (nlp) techniques for data mining can be applied. it can be observed that in the current literature the application of question answering models (qa) is widely used. qa models can answer questions given some context, and sometimes even without any context in an open-domain way. they can extract answer phrases from paragraphs, paraphrase the answer generatively, or choose one option out of a list of given options. it all depends on the dataset that was used for training, on the problem it was trained for and to some extent the neural network architecture .the qa models need to understand the structure of the language, have a semantic understanding of the context and the questions, have an ability to locate the position of an answer phrase, and much more. it is difficult to train models that perform these tasks. google's bert is a paradigm shift in natural language modelling, in particular because of the introduction of the pre-training and fine-tuning paradigms: after pre-training in an unsupervised way on a massive amount of text data, the model can be rapidly fine-tuned on a specific downstream task with relatively few labels. domain adaptation for a qa model is possible by creating a specific qa dataset, that contains many paragraphs of text, several questions related to the paragraphs, their answers, the index of the beginning of the answers in the paragraph and training the model on this domain-specific data . through bert models pre-training on schizophrenia data, it is possible to develop a qa model that can be used to obtain specific answers to questions concerning the limitations, problems, symptoms and specific information of patients with schizophrenia who periodically participate in forums dedicated to them.the construction of a qa system in the medical field presents unique challenges: i) obtaining large datasets related to the domain of interest. obtaining medical data for analysis or extraction is not easy, given the difficulty of collecting this type of data, the specificity required for certain diseases, privacy regulations and the presence of bias in them. ii)addressing the creation of an effective qa dataset. developing a qa dataset presents several complexities, such as the identification of topics of interest in the corpus, the annotation of questions and answers relevant to the domain of interest, with the possibility of having multiple answers. iii) train a qa model in an efficient way. to achieve good results with a qa model, it is crucial to obtain a good quality qa dataset. generally, for this task it is relied on domain experts who manually annotate relevant questions and answers, but this process can be time-consuming and laborious. failing to obtain a trained qa model with a high variance of relevant questions and answers. inadequate pre-processing of the corpus could be problematic for data extraction via qa model. domain adaptation for a qa model is possible by creating a specific qa dataset, that contains many paragraphs of text, several questions related to the paragraphs, their answers, the index of the beginning of the answers in the paragraph and training the model on this domain-specific data. through bert models pre-training on schizophrenia data, it is possible to develop a qa model that can be used to obtain specific answers to questions concerning the limitations, problems, symptoms and specific information of patients with schizophrenia who periodically participate in forums dedicated to them. developing a qa dataset presents several complexities, such as the identification of topics of interest in the corpus, the annotation of questions and answers relevant to the domain of interest, with the possibility of having multiple answers. we additionally show how to set up a pipeline for the use of a qa model, how whit the use of the retriever, a filter that can quickly examine the entire archive of documents and pass the most relevant documents to the response, a significant improvement in efficiency of the model can be achieved. we empirically demonstrate how it is possible to boost the performance in the schizophrenia field of those model by finetuning with the schizophrenia qa dataset, reaching state-of-the-art in the field of mental issues qa models.the reminder of the paper is structured as follows: in section 2, recent relevant works are discussed, showing the problems related to the medical domain datasets and the data mining of medical data by qa models.relevant to our work is work offering methods for data mining in the medical field, in particular work demonstrating techniques for obtaining large, non-biased medical datasets, setting up qa datasets and implementing qa models. one of the most interesting methods is presented in, which used qa forums to create a qa dataset, selected questions in the medical-biological field on "reddit" and used the answers with the highest score. it was therefore decided to base the construction of our qa dataset specific to the schizophrenia domain on the identification of questions and answers to be annotated based on the results of the lda method for topics analysis. as shown ina qa dataset construction was performed for the covid-19 and trained the covid-qa model, but it use of very long text documents and none n-way answers to singles questions. we illustrate how the training of bert models was conducted for the schizophrenia domain and the experiments conducted using three different models and fine-tuned version for the schizophrenia domain, including a model already pre-trained on bio-medical tasks: distilbert, roberta-base-squad2and biobert.in this way, it was possible to base the annotation of questions and answers in the qa dataset on the results of topics analysis, speeding up and making the annotation process faster. the roberta model was pretrained on the reunion of five datasets: bookcorpus a dataset consisting of 11,038 unpublished books; english wikipedia excluding lists, tables, and headers; cc-news a dataset containing 63 million english news articles crawled between september 2016 and february 2019; openwebtext, an opensource recreation of the webtext dataset; stories, a dataset containing a subset of commoncraw data filtered to match the story-like style of winograd schemas. the models were trained on 900 questions and answers annotated on the specific domain qa dataset for schizophrenia.the use of a noise-free data source specific not only to the medical macro domain, but also to the individual disease (in this case schizophrenia), it is of crucial relevance for the qa model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/471.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/471.txt new file mode 100644 index 0000000000000000000000000000000000000000..3e5136c2bcdbe9b0f4f083c98200b2d3356df195 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/471.txt @@ -0,0 +1 @@ +deep neural networks perform well at inference time when test data comes from the same distribution as training data. however, they become inaccurate when there is a distribution shift . this distribution shift can be caused by natural variations or corruptions . test-time adaptation (tta) aims at addressing this problem by adapting a model pre-trained on source data to make better predictions on shifted target data . in this work, we focus on the particular case of fully test-time adaptation (fully tta) . in this setting, the adaptation is done source free and relies only on: i) a model pre-trained on data from a source domain and ii) unlabeled test data from a shifted target domain. separating the training phase from the adaptation phase is particularly relevant for privacy-oriented applications where the training data is not available or can not be disclosed. fully tta is also online. test data is received as a continuous stream and the in this work, we choose to focus on small batches (16 and below, white zone). as the batch size decreases, the model performances remain stable until a batch size of 32 and then drops significantly for methods running on resnet50-bn. results reported are averaged over 15 corruptions and 3 runs. confidence intervals are too small to be displayed.model adaptation is done on-the-fly as data is received. this makes the setup more realistic and closer to real-world "inthe-wild" scenarios where information about potential distribution shifts or about the quantity of data to be received is not necessarily available. most of the recent solutions proposed to address fully tta are follow-ups of seminal work tent and aim at solving problems inherent to the online and unsupervised aspect of fully tta. for example, deal with the problem of the class imbalance data stream, improve the quality of the predictions used to adapt a model by selecting samples with a low entropy or leveraging the predictions of augmented samples and investigate different normalization to stabilize the adaptation arxiv:2310.02416v2 9 nov 2023 process. however, most of the tricks and techniques are presented in combination with others, which makes it difficult to identify their impact on the final model performance. some techniques might already help when applied alone whereas others might only work or work better in combination with other tricks. as this area of research is very active and developing fast, we aim in this study at disentangling the impact of some techniques recently proposed and evaluate objectively their contribution to the performance of fully tta models. we also propose possible improvements in specific cases.contribution. to address the fully test-time adaptation problem, we analyzed the following techniques: i) usage of batch renormalization or batch-agnostic normalization ii) class re-balancing iii) entropy-based sample selection iv) temperature scaling. those analyses were made considering small batch sizes (16 and below), which are closer to the potentially uncontrollable batch sizes of real-world scenarios. our experimental results show that those techniques are already boosting the performance at test time when used alone, but that combining all of them leads to the best classification accuracy compared to a vanilla tent method and 2 recent state-of-the-art methods on 4 different datasets. additionally, to the accuracy improvement, the selected techniques also bring other interesting benefits like higher and more stable performance with small batch sizes and a reduced computational load by adapting the model with a reduced set of selected data.the remainder of the paper is structured as follows. we conduct a literature review in section 2. then we analyze each trick separately in a different section: architecture design in sec. 4, class rebalancing in sec 5, sample selection in sec. 6 and network calibration in sec. 7 before showing results on combinations of tricks in sec. 8 and results on other datasets in sec. 9. finally, we conclude about the presented work in sec. 10.in our experiments, we followand use the following architectures: i) a resnet50 with batchnorm layers (resnet50-bn) ii) a resnet50 with groupnorm layers (resnet50-gn) iii) a vitbase/16 with layernorm layers (vitbase-ln) iv) to complete our pool of models to compare, we also include a variant of resnet50-bn where batch normalization is replaced by batch renormalization (resnet50-bren).2, we observe that the performance of tent method on a resnet50-bn architecture is dropping when the batch size is becoming small, with a particularly low performance when the batch size is 2 (5. however, we see that using batch renormalization instead of standard batch normalization improves the performance of a resnet50 model and avoids a complete collapse of the model when the batch size is 1.2, we observe that tent performance on using a batch renormalization layer leads to better performance than using a vanilla batch normalization. in this case, there is no guarantee that classes will appear in a balanced way or that different classes will appear in a given batch, especially when the batch size becomes much smaller than the total number of classes in the dataset. we consider small batch sizes already as a factor of online class imbalance as not all classes can be present in the same batch. batch normalization does not seem to be a suitable normalization method when the test set is unbalanced ii) the performances of tent and sar are more stable when the imbalance factor varies on the resnet50-gn architecture. performances are stable for all methods when the imbalance factor increases for a batch size of 16 or 8 but decrease when the imbalance factor increases for lower batch sizes.3are that group normalization and layer normalization are less sensitive than batch normalization to imbalance classes and that even if dot and sar are both performing better than tent, the sample selection of sar yields more stable performances in the case of small batch sizes and stronger class imbalance factor. on resnet50-gn and vitbase-ln, the classification accuracy remains stable when the batch size decreases for all models, dot yielding the best results except when the batch size is 1.4are that architectures with group or layer normalization are more suitable to handle small batch sizes and that the class rebalancing method dot is performing better than the sample selection method sar for small batch sizes greater than 1. so, when the batch size is 1, the sum of the weights of the batch is equal to the weight of the single sample of the batch. when considering the vitbase-ln architecture, we can see that the two pairs of tricks class rebalancing and temperature and class rebalancing and sample selection are close over all the batch sizes and yield the best results of the pairs of tricks.in this work, we addressed the fully test-time adaptation problem when dealing with small batch sizes by analyzing the following tricks and methods: i) usage of batch renormalization or batch-agnostic normalization ii) class re-balancing iii) entropy-based sample selection iv) temperature scaling. furthermore, the selected tricks bring additional benefits concerning the computational load: i) using group normalization instead of batch normalization in resnet50 yields more stable results for the same number of total parameters ii) using the entropy-based sample selection improves the adapted model performance by using fewer samples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/472.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/472.txt new file mode 100644 index 0000000000000000000000000000000000000000..2669af5040d762f2d4dea21e4c914b154001d5d5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/472.txt @@ -0,0 +1 @@ +the potential for artificial intelligence (ai) models to exhibit bias, or disparate performance for different protected groups, has been demonstrated in a range of computer vision and more recently medical imaging applications. for example, biased performance has been reported in ai models for diagnostic tasks from chest x-rays , cardiac magnetic resonance (mr) image segmentation , brain mr image analysis and dermatology image analysis . in response, the field of fair ai has emerged to address the challenge of making ai more trustworthy and equitable in its performance for protected groups .a common cause of bias in ai model performance is the combination of a distributional shift between the data of different protected groups and demographic imbalance in the training set. for example, in chest x-rays there is a distributional shift between sexes due to the presence of breast tissue lowering the signal-to-noise ratio of images acquired from female subjects . however, more subtle distributional shifts can also exist which cannot be perceived by human experts, and recent work has shown that race-based distributional shifts are present in a range of medical imaging modalities, including breast mammography . this raises the possibility of race bias in ai models trained using imbalanced data from these modalities.most work on ai bias to date has focused on deep learning techniques, in which the features used for the target task are optimised as part of the training process. in the presence of distributional shift and training set imbalance this learning process can lead to bias in the features and potentially in model performance. classical ai approaches are trained using fixed hand-crafted features such as radiomics, and so might be considered to be less susceptible to bias. however, despite these approaches still being widely applied, little experimental work has been performed to assess the potential for, and presence of, bias in these features and the resulting models.in this paper, we investigate the potential for bias in a classical ai model (random forest) based on radiomics features. our chosen application is potential race bias in random forest models trained using radiomics features derived from dynamic contrast enhanced magnetic resonance imaging (dce-mri) of breast cancer patients. this application is of interest because there have been reported differences in breast density and composition between races , as well as tumour biology , indicating a possible distributional shift in (imaging) data acquired from different races, and hence the possibility of bias in ai models trained using these data. our target task is the prediction of tumour molecular subtype from the radiomics features. this is a clinically useful task because different types of tumour are commonly treated in different ways (e.g. surgery, chemotherapy), and tumour molecular subtype is normally determined through an invasive biopsy. therefore, development and validation of an ai model to perform this task from imaging data would obviate the need for such biopsies.this paper makes two key contributions to the field of fair ai. first, we present the first thorough investigation into possible bias in ai models based on radiomics features. second, we perform the first investigation of bias in ai models based on features derived from breast dce-mri imaging. our chosen application is potential race bias in random forest models trained using radiomics features derived from dynamic contrast enhanced magnetic resonance imaging (dce-mri) of breast cancer patients. this application is of interest because there have been reported differences in breast density and composition between races, as well as tumour biology, indicating a possible distributional shift in (imaging) data acquired from different races, and hence the possibility of bias in ai models trained using these data. we chose to omit this step because one of our objectives was to analyse which specific radiomics features (if any) could lead to bias in the trained models, so we did not want to exclude any features prior to this analysis. the presence of such information is a known potential cause of bias in trained models as it would be indicative of a distributional shift in the data between races, not just in the imaging data but in the derived (hand-crafted) radiomics features. to investigate this, we trained rf classifiers to predict race (white or black) from the entire radiomics feature set, and also for the whole breast, fgt and tumour features individually. the training set consisted of 325/101 white/black subjects and 274/152 luminal a/non-luminal a subjects, and the test set consisted of 326/102 white/black subjects and 275/153 luminal a/non-luminal a subjects. all, white-only and black-only), we trained rf models for the task of classifying luminal a vs non-luminal a tumour molecular subtype and evaluated their performance for the entire test set as well as for the white subjects and the black subjects in the test set individually. we performed this experiment using all radiomics features, just the whole breast features, just the fgt features and just the tumour features. we can see that in terms of overall performance, the models trained using all data and the white-only data had higher accuracy than the models trained using black-only data, reflecting the impact of different training set sizes. with the exception of this last result, in general there was not a noticeable difference in bias between the models trained using all radiomics features, just whole breast features, just fgt features and just tumour features, which is consistent with the similar race classification results reported in section 4. from topto-bottom: results computed using all radiomics features, just whole breast features, just fibroglandular tissue (fgt) features and just tumour features.the main contribution of this paper has been to present the first investigation focused on potential bias in ai models trained using radiomics features.our key findings are that: (i) radiomics features derived from breast dce-mri data contain race-identifiable information, leading to the potential for bias in ai models trained using such data, and (ii) rf models trained to predict tumour molecular subtype seem to exhibit biased behaviour when trained using race-imbalanced training data.these findings show that the process of producing hand-crafted features such as radiomics features does not remove the potential for bias from the imaging data, and so further investigation of the performances of other similar models is warranted.in this work we have focused on distributional shift in imaging data (and derived features) as a cause of bias, but bias can also arise from other sources, such as bias in data acquisition, annotations, and use of the models after deployment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/473.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/473.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5ea1c5a5a503215501d2b179e0e6672b5171c10 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/473.txt @@ -0,0 +1 @@ +reinforcement learning (rl) is a powerful paradigm for interactive or sequential recommender systems (rs), since it can maximize users' long-term satisfaction with the system and constantly adapt to users' shifting interest (state). however, training optimal rl algorithms requires large amounts of interactions with environments (kakade 2003), which is impractical in the recommendation context. performing online interaction (on-policy) learning would hurt users' experiences and the revenue of the platform. since logged feedbacks of users are often abundant and cheap, an alternative is to make use of them and to learn a near-optimal recommendation policy offline before we deploy it online. although there are some offline (off-policy) rl algorithms have been proposed in the continuous robot control domain (fujimoto, meger, and precup 2019;kumar et al. 2019), the problem that how to build an effective offline rl framework for recommendation involving large numbers of discrete actions and logged feedbacks remains an open one. learning an effective recommendation policy from logged feedbacks faces the following challenges simultaneously:(a) discrete stochastic policy. in the testing, instead of recommending only one item, the rs requires generating a topk item list according a discrete policy. training a deterministic policy violates the simple machine learning principle: test and train conditions must match (see §4 for details). (b) extrapolation error. extrapolation error is an error in offpolicy value learning which is introduced by the mismatch between the dataset and true state-action visitation of the current policy (fujimoto, meger, and precup 2019;siegel et al. 2019). this problem is even more serious for the recommendation involving large numbers of discrete actions. (c) unknown logging policy. the feedbacks typically come from a unknown mixture of previous policies. the method that requires estimating logging policy are limited by their ability to accurately estimate the unknown logging policy. recent effort (chen et al. 2019a) for off-policy recommendation tries to alleviate the problem (a) by utilizing the inverse propensity score (ips) with model-free policy gradient algorithm. however the ips method suffers from high variance (swaminathan and joachims 2015a) and will still be biased if the logging policy does not span the support of the optimal policy (sachdeva, su, and joachims 2020;liu et al. 2019). moreover, it cannot address other problems mentioned above, i.e., (b) and (c). prior works (zheng et al. 2018;zhao et al. 2018b) try to build rl-based recommendation algorithms by directly utilizing the vanilla q-learning (sutton and barto 2018), an off-policy rl algorithm. however the q-learning is a deterministic policy method and also suffers from the extrapolation error due to no interaction with online environments (fujimoto, meger, and precup 2019). to address the aforementioned defects, in this paper, we propose a general and effective offline learning framework for interactive rs. we first formalize the interactive recommendation as a probabilisitic inference problem, and then propose a discrete stochastic actor-critic algorithm to maximize cumulative rewards based on the probabilistic formulation. in order to reduce the extrapolation error, we propose five regularization techniques: support constraints, supervised regularization, policy constraints, dual constraints and reward extrapolation for offline learning, which can constrain the mismatch between the recommendation policy and the unknown logging policy. our approaches can be viewed as a combination of supervised learning and off-policy reinforcement learning for recommendation with discrete ac-tions. we show that such combination is critical for improving the performance of recommendation in the offline setting. we highlight that we are the first to systemically study the offline learning problem in the interactive recommendation context. our contributions can be summarized as:(1) we propose a discrete stochastic rl algorithm to maximize cumulative rewards for interactive recommendation.(2) we propose a general offline learning framework for interactive recommendation with logged feedbacks, including support constraints, supervised regularization, policy constraints, dual constraints and reward extrapolation.(3) we conduct extensive offline experiments on two realworld public datasets, empirically demonstrating the proposed methods can achieve superior performance over existing learning methods for recommendation. although there are some offline (off-policy) rl algorithms have been proposed in the continuous robot control domain(fujimoto, meger, and precup 2019;kumar et al. the feedbacks typically come from a unknown mixture of previous policies. 2018;zhao et al. 2018b) try to build rl-based recommendation algorithms by directly utilizing the vanilla q-learning(sutton and barto 2018), an off-policy rl algorithm.(2) we propose a general offline learning framework for interactive recommendation with logged feedbacks, including support constraints, supervised regularization, policy constraints, dual constraints and reward extrapolation. model-based methods(zou et al.3 and demonstrated in our experiments, enforcing a specific constraint between learning policy and logging policy is critical for good performance.although the methods proposed in previous subsections can effectively control the divergence of optimizing policy and data logging policy, both of them only consider one-step regularization, and thus can not avoid actions that may lead to higher deviation at future time steps. to address this problem, instead of explicitly adding constraint for policy improvement at time t, we consider the logging policy as a prior and incorporating it directly into our original probabilistic model (eq. specifically, we first estimate the logging policy πb (a t | s t ) via supervised learning using the logged feedbacks as the proxy of unknown logging π b (a t | s t ), and then incorporate it as the action prior into eq.6, this method not only adds prior constraint on the policy improvement step, but also adds it to the target q value, which can avoid actions that are far from logging policy at future time steps. we conduct experiments on two public real-world datasets and investigate the following research questions: (rq1) how do the proposed methods perform compared with existing methods for interactive recommendation? (rq2) are the proposed learning methods robust to different types of neural architectures and sparse logged feedbacks? (rq3) can the adaptive update step improve performance of the supervised regularization? (rq4) how sensitive is the performance of the proposed learning methods with respect to the trade-off parameters?.we compare these baselines with our proposed stochastic discrete actor critic (sdac), support constraints (sc), supervised regularization (sr), policy constraints (pc), dual constraints (dc) and reward extrapolation (re).(2) different from the table1, re becomes a competitive method and outperforms sr, which demonstrates that control the divergence of recommendation policy and logging policy at future time steps is helpful on sparse feedbacks.to perform offline learning effectively, we proposed a general offline learning framework to minimize the distribution mismatch between the logging policy and learning policy, including support constraints, supervised regularization, policy constraints, dual constraints and reward extrapolation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/474.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/474.txt new file mode 100644 index 0000000000000000000000000000000000000000..dfd9c482506a852de9ea733a9d27e6f83c8d77d2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/474.txt @@ -0,0 +1 @@ +the emergence of automated essay scoring (aes) represents a paradigm shift in the realm of educational assessment, heralding cutting-edge technologies that transcend traditional grading methodologies. aes leverages a diverse array of scoring techniques to address multifaceted purposes, ranging from academic evaluation to personalized learning experiences.at the objective of this research lies a profound investigation into the user's understanding of recently acquired knowledge a quest that transcends conventional evaluation methodologies. unlike conventional aes, which predominantly targets holistic essay scoring, this research delves into a more specific and granular domain, the evaluation of argumentative essays. however, its ambitions extend beyond mere assessment, it aspires to empower users with the ability to deconstruct and analyze their comprehension of intricate subject matter, offering insights into the fabric of their learning journey. the research capitalizes on a carefully crafted scoring index, a product of large language models, which serves as a beacon of enlightenment, guiding users in comparing and contrasting their understanding of topics recently assimilated into their intellectual repertoire.it endeavours to bridge the chasm between knowledge acquisition and comprehension through a meticulously structured methodology. it capitalizes on the capabilities of the langchain tool, an instrument renowned for its prowess in summarizing complex pdf documents while meticulously extracting essential information. this tool becomes the linchpin in a process aimed at gauging the depth of a user's understanding of the summarized content. to this end, metrics of paramount importance employed are cosine similarity, sorensen similarity, jaccard similarity, and bert embedding similarity. this mathematical measure serves as the arbiter of similarity between the user's interpretation of the pdf content and the compact summary generated by langchain. it quantifies the congruence between these two sources of information, affording users a tangible score that reflects their grasp of the topic. furthermore, the research delves even deeper by comparing the user's understanding with the original pdf content, affording yet another score that quantifies the extent of their comprehension. in culmination, these two scores converge to produce a single, definitive value-a percentage that acts as a beacon, shining light on the user's understanding of the pdf topic. a higher percentage signifies a superior level of comprehension, while a lower one points the way to potential areas of improvement, illuminating the path to enriched learning and enhanced educational experiences.furthermore, the integration with the similarity based developer analyzer research promises to enhance candidate selection by incorporating advanced summarization and evaluation techniques. this integration holds the potential to refine the recruitment process, ensuring candidates align not only with job requirements but also with the organization's culture and values. this research marks a significant step forward, uniting technology, linguistic analysis, and educational empowerment. it leverages the prowess of large language models and advanced scoring techniques to unlock the hidden dimensions of comprehension and elevate the user's learning journey to new heights.the emergence of automated essay scoring (aes) represents a paradigm shift in the realm of educational assessment, heralding cutting-edge technologies that transcend traditional grading methodologies. unlike conventional aes, which predominantly targets holistic essay scoring, this research delves into a more specific and granular domain, the evaluation of argumentative essays.the field of automated essay scoring (aes) has a rich history spanning over 50 years, drawing substantial interest within the nlp community. their paper applies a financial risk management scoring method to assess university students' study skills and learning styles using computer-based methods, resulting in a single score. it reveals significant differences in study skill scores among students with various learning styles, emphasizing the potential of unsupervised machine learning for understanding and enhancing educational strategies. the evaluation of this method on discourse coherence and automated essay scoring tasks demonstrates its superior performance, establishing a new state-of-the-art in both domains. their study addresses the need for scalable automated essay scoring (aes) in online education by investigating three active learning methods. these approaches, including uncertainty-based, topological-based, and hybrid methods, were evaluated for essay selection and classification using a scoring model trained on transformer-based language representations. the problem of automated essay scoring (aes) has been exacerbated by the proliferation of online learning platforms like coursera, udemy, and khan academy. unlike conventional automated writing evaluation, which concentrates on holistic essay scoring, this niche field emphasizes assessing argumentative essays, offering specific feedback encompassing argumentation structures and argument strength trait scores. in the domain of automated essay scoring (aes), the utilization of machine learning, particularly natural language processing, has attracted significant attention for its ability to alleviate manual scoring burdens and provide prompt feedback to learners.recent advances in automated essay scoring (aes) have leveraged deep learning techniques, particularly neural networks, to achieve state-of-the-art solutions. this paper presents a framework that enhances the validity and accuracy of a baseline neural-based aes model, focusing on trait evaluation and scoring. this study focuses on ukara, an automatic essay scoring system combining nlp and machine learning, using datasets a and b from the ukara challenge.this literature review explores the use of similarity techniques in automated essay scoring (aes) systems over the past decade (2010-2020). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/475.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/475.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e954c73273c69ccd9cbf6d64525d8c955f5ebad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/475.txt @@ -0,0 +1 @@ +adversarial examples are amongst the most discussed malicious phenomenon in training robust machine learning models. the phenomenon refers to a situation where a trained model is fooled to return an undesirable output on particular inputs that an adversary carefully crafts. the ease of generating these examples and its abundance in most applications of supervised machine learning have posed serious challenges in adoption of machine learning in a safe and secure manner. for example, the phenomenon would allow a malicious actor to alter the predictions of a cloud-based machine learning model , or bypass a face-detection model through makeup . thus, many studies on the possible causes and effects of this phenomenon have been conducted.while there is no consensus on the reasons behind the emergence of these examples, many facets of the phenomenon have been revealed. for example, szegedy et al. show that adversarial perturbations are not random and they generalize to other models , or goodfellow et al. indicate that linear approximations of the model around a test sample is an effective surrogate for the model in the generation of adversarial examples . a known aspect of adversarial examples is that a hypothesis can be trained on its own adversarial examples and improve in robustness. the efficient implementation of an adversarial attack by goodfellow et al. was the first method that made adversarial training feasible . many related proposals and improvements have been proposed since, and adversarial training is regarded as the most effective defence method in the community .however, it has been shown that adversarial training suffers from a few shortcomings. as an example, it has been observed that adversarial training has a negative effect on the standard performance of the model . there are conflicting views regarding this observation. a popular explanation is that robustness and accuracy are in opposition, and binary classification problems have been synthesized in which standard accuracy and robustness are at odds with each other . there is a study that finds negative correlation between the robustness and standard accuracy of adversarially trained models . in contrast, there are others that challenge this idea. for example, there is evidence that some of the standard training sets are separable , and that robust linear classifiers appear to be possible . moreover, adversarially trained hypotheses do not generalize to unseen attacks . this issue has severely hindered the effectiveness of adversarial training . as a result, studies have been conducted on mixing different adversarial attacks in hope of training a more robust hypothesis .many ideas in the related literature are implicitly or explicitly based on the assumption that the optimal robust hypothesis, if existing, is bounded and lipschitz continuous - . there is a sentiment that a complete hypothesis space such as artificial neural networks (ann) should be able to represent the robust hypothesis given enough training samples , , and others are hopeful that an inept understanding of deep anns would bring about a solution for the phenomenon . the assumption of existence of a continuous robust and accurate hypothesis is an idea that conforms to intuition. we believe however that this assumption might not be warranted. we argue that continuity of a robust and accurate classifier is a strong assumption and it is not true in common applications. in this paper, we are aiming to describe a framework for the study of the discontinuities of a robust and accurate hypothesis. learning rules that regularize the norm of the gradient of the hypothesis play an important role in our framework. a part of the framework is concerned with detecting adversarial regularization of the norm of the gradient of the hypothesis is a technique that have been shown to be effective in training robust hypothesis - . the idea is mostly based on the fact that if we approximate a hypothesis around a test sample by a linear function, that linear function should be as constant as possible. in for example, jacobian norm has been used to improve the robustness of the linear approximation. an empirical study showed that regularized artificial neural networks exhibit robustness to transferred adversarial examples and that human volunteers find the misclassifications more interpretable . the concentration of the distribution of the the gradient of the loss function has been found to be related to the robustness of stochastic neural networks . gradient regularization has been used to decrease the effect of transferability of adversarial examples between models .there are also approaches that try to detect adversarial examples, a task that proved to be harder than it seemed at first . an empirical study of different methods of detection can be found in . it was shown in that the distribution of adversarial examples differs substantially from the distribution of natural training samples. this difference was shown by performing statistical testing and reinforced by the observation that a classifier can be trained to detect the adversarial examples produced by an attack. the activation of convolutional layers has been used to detect the adversarial examples . similarly, in the histogram created from the activations of the hidden layer of an artificial neural network is used to train a classifier for detecting adversarial examples. it was shown by that correctly classified examples tend to have greater maximum softmax probabilities compared to erroneously classified and out-of-distribution examples.a summary of our contributions are:• we introduce weakly-harmonic learning rules that use the dirichlet energy of the hypothesis as a reguralizer. by doing so, we are able to apply the methods of variational calculus and turn the learning problem into a partial differential equation. we will use this construct as a model to study the effects of regularization of the norm of the gradient of the hypothesis. • we further introduce the space of weakly-harmonic hypotheses as an abstraction for a solution to a weaklyharmonic learning problem, and provide a convenient representation for these hypotheses.• by extending the domain of weakly-harmonic hypotheses into the complex space, we present discontinuities in the optimal hypothesis as an explanation for the existence of adversarial regions in the domain of analytic hypothesis. we introduce the space of holomorphic hypotheses and propose a systematic approach for determining the discontinuity of an optimal classifier. • we propose a convolutional architecture for constructing weakly-harmonic and holomorphic classifiers and empirically show that there is a fundamental limit on the performance of continuous hypotheses in common image classification tasks. we can summarize the significance of our contributions in two parts. first, our results suggests that a robust and accurate hypothesis is a collection of hypotheses that are specialized for different regions of the domain. we further lay a theoretical basis for constructing such hypotheses. second, by relating the adversarial regions to established concepts in analysis of holomorphic functions, we open a path to a rigorous analysis of the adversarial examples phenomenon that can freely move between the study of the geometrical and the algebraic properties of the hypothesis. 2) the second smoothing problem of {(h k , ω k )} k k=1 is the problem of finding a set {g k } k k=1 of continuous nonvanishing functions g k : ω k → c for which. consider a finite holomorphic pac covering {(h k , ω k )} k k=1 of a domain of holomorphy ω ⊂ c n which is valid for a smoothing problem. consider some ϵ, δ ∈ (0, 1), a compact domain ω ⊂ c n , a sequence of pac learnable hypothesis spaces {f k : ω → c} ∞ 1 , f k (ω) ⊆ f k+1 (ω) with sample complexity functions m k : (0, 1) 2 → n, and a sequence of training sets. now that we have an extra dimension for every real dimension, we only have to separate each φ again between x j and y j , ψ(x j , y j ; α j ) = φ(x j ; α j )θ(y j ; α j ),. however, another function ψ † exists for which ψ † (x j , 0; α j ) = 0, ψ † (x j , y j ; α j ) = sin(α j x j ) sinh(α j y j ),.called the conjugate harmonic of ψ(x j , y j ; α j ) and we have cos(α j z j ) = ψ(x j , y j ; α j ) + iψ † (x j , y j ; α j ),.consider the first smoothing problem, and define functions g lk : ω k ∩ ω l → c such that g kl (z) = g l (z) -g k (z) z ∈ ω k ∩ ω l .it can be checked that g kl (z) + g lk (z) = 0 z ∈ ω k ∩ ω l , (90) g jk (z) + g kl (z) + g lj (z) = 0 z ∈ ω j ∩ ω k ∩ ω l .it can be checked that g kl (z)g lk (z) = 1 z ∈ ω k ∩ ω l , (93) g jk (z)g kl (z)g lj (z) = 1 z ∈ ω j ∩ ω k ∩ ω l .is an open hypercube, h k ∈ h(ω k ), and v(h k ) ∩ ω l = v(h l ) ∩ ω k .7 h k can be holomorphically extended to the tube domain t ω k . since x is compact, we can choose a finite holomorphic pac covering {(h k , ω k )} k k=1 of x . then a choice of a pac covering {(h k , ω k )} k k=1 of x exists that is valid for a smoothing problem, and the second smoothing problem of {(h k , ω k )} k k=1 is feasible by choosing g ′ k (z) = h k (z) hc(z) .1, a holomorphic solution {g k (z)} k k=1 exists as well, and denote the smoothed holomorphic function by h o . then {g k (z)} k k=1 is a holomorphic solution for the corresponding second smoothing problem of {(h k , ω k )} k k=1 for which g k (z) = h k (z) ho(z) . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/476.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/476.txt new file mode 100644 index 0000000000000000000000000000000000000000..4031517778aa3c9cfe486f839e88681e49dc2467 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/476.txt @@ -0,0 +1 @@ +transformer models have achieved great success in various machine-learning tasks and are the foundation for modern large-scale pre-trained language models such as bert , roberta , xlnet , transformer-xl , and the gpt family . these models have enabled leaps in performance on many nlp tasks, as for example experienced by the millions that use the chatgpt application . similarly, the vision-transformer (vit) , and similar models outperform cnns on many computer vision tasks such as image classification and object detection .the impressive gains in performance, however, come at a price. state-ofthe-art transformers are extremely large, having parameters in the hundreds of billions. the requirements for memory and computational power have become prohibitive, not only for deployment on resource-constrained embedded systems, but sometimes even in data centers, and their considerable energy usage is widely discussed in society. neural network quantization is a method to address this problem, as it reduces memory usage by employing lower-bit precision for weight and activation tensors. this not only decreases inference time but also increases energy efficiency through the adoption of lowbit fixed-point arithmetic instead of floating-point operations . other approaches to improve transformer's scalability and environmental sustainability include pruning , knowledge distillation and neural architecture search . in this work, we propose changes to the architecture's fundamental building blocks to take steps towards the same end.transformers use an attention mechanism to identify connections between different elements in a sequence, which conventionally takes the dot-product between the query and key matrices before passing the results through a softmax function. many alternative formulations have been proposed, such as the fastformer based on additive attention, or the reluformer that uses relu activation in place of softmax. these architectures share elements with what is proposed in this work, however, they still apply attention by a matrix multiplication with the value matrix. extending our earlier work with relu and addition-based gated rnns , we here propose the inhibitor attention mechanism, which is designed to not rely on variable-to-variable multiplication and softmax activation. these operations are more costly than constant-to-variable multiplications (also known as literal multiplication) and relu on many computing hardware. even more so, they are particularly challenging operations under homomorphic encryption, an arithmetic system that permits calculation with encrypted variables without access to the secret key .transformers use an attention mechanism to identify connections between different elements in a sequence, which conventionally takes the dot-product between the query and key matrices before passing the results through a softmax function. to make it easier to obtain a zero inhibition score, which allows entries of the value matrix, v , to pass unmodified through equation 4 we test using a slightly modified inhibition score, z ′ = (z -α) + , which applies a constant shift α ≥ 0 to the manhattan score. we carried out numerical experiments that trained transformer models based on the new inhibitor and the conventional dotproduct attention on four standard tasks. the aim was not to achieve sota results but rather to examine if the inhibitor mechanism would perform comparably on a set of familiar tasks, which is why we used simple set-ups without hyperparameter tuning. from table1, which reports the results, we note that for each task, the two alternative attention mechanisms score very similarly. the ground truth is simply the dot-product of the two inputs as vectors, which, rather obviously, is not a challenging task for a conventional (dot-prod-based) transformer. we also train one-layer transformers for the mnist handwritten digit recognition taskand imdb movie-review sentiment analysis task, which are simple go-to benchmark task for image classification and text analysis, respectively. although far from sota, we note that our simple models achieve decent accuracy for both attention mechanisms, where the differences in performance are not significant. next, we wanted to examine and compare the scaling properties for the proposed mechanism under two identified scenarios: i) quantization with integer arithmetics and ii) homomorphic encryption. therefore, the two alternative attention mechanisms were implemented directly in lowlevel code rather than high-level ml libraries, where built-in optimizations for conventional models and design choices would bias a comparison.for the plaintext experiment, we used integer 16-bit arithmetics implemented in the rust programming language, which gives detailed low-level control over circuits and supports advanced time benchmarking through the criterion package.the results indicate that the proposed inhibitor mechanism can have a significant advantage, with i) 30%-50% saving for the plaintext implementation on cpu as per table2, and ii) a factor 3-6 under encryption with tfhe as per table3. timing estimates are averaged over 20 repeated experiments and are significant at the 95% while experiment results are promising on simple training tasks, for future work, it is necessary to examine performance under more challenging settings, for example, by pre-training a much larger transformer model and testing on modern nlp and image recognition benchmark tasks.the ml software that we used for this study supports fused operations for certain operations that avoid memory bloat and make the execution of the inhibition much more efficient on cpu and gpu hardware.where the first two terms are simple sums that do not expand memory usage, and the last term can benefit from efficient fused pairwise manhattan distance implementations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/477.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/477.txt new file mode 100644 index 0000000000000000000000000000000000000000..9e7781032859b3ce892619983df1ac6a43a1acf4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/477.txt @@ -0,0 +1 @@ +numerous frameworks in learning theory and statistics formalize the intuitive insight that "smooth functions are easier to learn than rough ones" . the various measures of smoothness that were studied in a statistical context include the popular lipschitz or hölder seminorms; the bounded variation norm ; sobolev, sobolev-slobodetskii and besov norms ; averaged modulus of continuity ; and probabilistic lipschitzness in the context of classification .in particular, a recent line of work studied a notion of average smoothness with respect to an arbitrary measure. informally, the average smoothness is defined by considering the "local" hölder (or lipschitz) smoothness of a function at each point of the instance space, averaged with respect to the marginal distribution over the space; see figure 1 for a simple illustration, and section 2.1 for a formal definition. the main conclusion of the aforementioned works is that it is possible to guarantee statistical generalization solely in terms figure 1: illustration of a function and a measure µ exhibiting a large gap between "worst-case" smoothness (occurring in low density regions) and average-smoothness with respect to µ; figure taken from .of the average smoothness for any underlying measure, effectively replacing the classic hölder (or lipschitz) constant with a much tighter distribution-dependent quantity. in particular, hanneke et al. proved a uniform convergence bound for the class of on-average-smooth functions in the realizable (i.e. noiseless) case, and complemented this result with an efficient realizable learning algorithm. with regard to the the general case of agnostic learning, the results of hanneke et al. had some limitations. in particular, the general reduction from agnostic to realizable learning deployed therein left two unfulfilled desiderata. from a statistical perspective, it remained open whether a function class with bounded average smoothness under some distribution µ is µ-glivenko-cantelli, namely that the excess risk decays uniformly over the class; only the existence of some returned predictor with small excess risk was established. on the computational side, the agnostic algorithm is highly inefficient: its runtime complexity is exponential in the sample size, in contrast with the polynomial-time realizable algorithm., 2021, hanneke et al. for any measurable function f : ω → we associate its l 1 risk l d (f ) := e (x,y )∼d |f (x) -y |, and its empirical risk with respect to a sample.more generally, we associate to any measurable function its l 1 norm ∥f ∥ l 1 (µ) := ex∼µ |f (x)|, and given a sample (x 1 , . , x n ) ∼ µ n , we denote its l 1 norm with respect to the empirical measure by ∥f ∥ l 1 (µn. having recalled two notions that quantify the "size" of a normed function space (f, ∥ • ∥) -namely, its covering and bracketing numbers -it is useful to note they are related through n f (ε) ≤ n (f, ∥ • ∥, 2ε), though no converse inequality of this sort holds in general. on the other hand, the main advantage of using bracketing numbers for generalization bounds is that it suffices to bound the ambient bracketing numbers with respect to the distribution-specific metric, as opposed to the empirical covering numbers which are necessary to guarantee generalization (f, l 1 (µ), α) due to eq. (9). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/478.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/478.txt new file mode 100644 index 0000000000000000000000000000000000000000..ddb4e8321083e57df28f228245366a4c4bd10f8a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/478.txt @@ -0,0 +1 @@ +a new age in health care is rapidly approaching, one in which the wealth of biological data will play an increasingly significant role. by considering various aspects of a patient's data, such as variability in molecular traits, environment, electronic health records (ehrs), and lifestyle, precision medicine, for instance, aims to "ensure that the right treatment is delivered to the right patient at the right time." the abundance of biomedical data presents both enormous potential and difficulties for health care research. to build trustworthy medical solutions based on data-driven techniques and machine learning, a major difficulty is examining the relationships among all the many bits of information in these data sets. previous research has attempted to combine various data sources in order to create collaborative knowledge bases that can be utilized for discovery and predictive analysis .in the medical field, which places enormous demands on human life, the healthcare service system is crucial. healthcare professionals in developing nations are using intelligent technology, such as artificial intelligence (ai) and machine learning methods, to advance their professions. healthcare innovation has influenced research on intelligent healthcare systems that are oriented on people. ai technologies have an impact on how intensive care and administrative tasks are developed in hospitals and clinics. since 2019, jafar abdollahi has conducted extensive research in the area of disease diagnosis with artificial intelligence. according to his findings, artificial intelligence, including machine learning and deep learning, has been successfully used in medical image and healthcare analysis for diseases like diabetes , breast cancer , healthcare system , forecasting , stroke , types of the epidemic , and heart .deep learning techniques haven't, however, been thoroughly examined for a wide variety of medical issues that can benefit from their capabilities. deep learning has numerous features that might be used in the healthcare industry, including its better performance, endto-end learning model with integrated feature learning, capacity to handle complicated and multi-modality data, and more. the deep learning research community as a whole needs to address a number of issues related to the characteristics of health care data (i.e., sparse, noisy, heterogeneous, and time-dependent), as well as the need for improved techniques and tools that allow deep learning to interface with clinical decision support workflows.in this article, we cover current and upcoming deep learning applications in medicine, emphasizing the crucial elements to have a substantial influence on health care. we do not want to provide a thorough foundation on the technical aspects or widespread applications of deep learning. as a result, in the sections that follow, we'll give a brief overview of the general deep learning framework, examine some of its applications in the medical field, and talk about the advantages, drawbacks, and potential uses of these techniques in the context of precision medicine and next-generation health care. the taxonomy of popular deep learning architectures for hcs data analysis is shown in fig. 1, along with a few hcs applications, particularly one for illness detection . according to his findings, artificial intelligence, including machine learning and deep learning, has been successfully used in medical image and healthcare analysis for diseases like diabetes, breast cancer, healthcare system, forecasting, stroke , types of the epidemic , and heart. deep learning has numerous features that might be used in the healthcare industry, including its better performance, endto-end learning model with integrated feature learning, capacity to handle complicated and multi-modality data, and more. the deep learning research community as a whole needs to address a number of issues related to the characteristics of health care data (i. as a result, in the sections that follow, we'll give a brief overview of the general deep learning framework, examine some of its applications in the medical field, and talk about the advantages, drawbacks, and potential uses of these techniques in the context of precision medicine and next-generation health care.the deep learning and machine learning are evolutionary changes in various fields such as industry, companies, schools, colleges, and healthcare systems, and we can say that more changes are seen in the medical line by providing many types of online and offline facilities. for many years, building a machine learning system needed meticulous engineering and subject-matter knowledge to convert the raw data into an appropriate internal representation from which the learning subsystem, often a classifier, could find patterns in the data set.despite the encouraging outcomes produced by deep architectures, there are still a number of issues that need to be resolved before deep learning in healthcare may be used clinically. this is also one of the explanations for why deep learning is so effective in fields like computer vision, voice, and natural language processing where massive amounts of data are amenable to easy collection. therefore, in comparison to other media, the quantity of medical data required to train an efficient and reliable deep learning model would be much more from a big data viewpoint. • data quality: health care data are very varied, confusing, noisy, and incomplete, in contrast to other fields where the data are clean and well-structured. it is difficult to train an effective deep learning model with such large and diverse data sets and must take into account a number of factors, including data sparsity, redundancy, and missing values. the creation of unique solutions will be necessary for designing deep learning methods that can handle temporal health care data.deep learning techniques are potent tools that enhance conventional machine learning and enable computers to learn from the data in order to find ways to develop smarter applications. the findings from every study that has been published in the literature demonstrate how deep learning can also be used to analyze health care data. furthermore, given the emphasis on representation learning and not just classification accuracy, deep architectures have the ability to integrate various data sets across disparate data types and provide higher generalization due of their hierarchical learning structure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/479.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/479.txt new file mode 100644 index 0000000000000000000000000000000000000000..347f9415e1a2057fb63dfe1dc00f9a19f3293637 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/479.txt @@ -0,0 +1 @@ +time series analysis is crucial in the internet of things (iot), encompassing tasks such as environmental monitoring and trend prediction in smart cities. however, the complexity of real-world phenomena, amplified by factors like human activities and global climate change, poses challenges for traditional linear models to capture the non-linear and intricate patterns in sensor data. machine learning models , particularly transformer models , have garnered significant attention for their effective time series modelling, efficient handling of long sequences, and ability to capture global dependencies. despite efforts to reduce model complexity , the overhead associated with transformers hampers their deployment on iot devices with limited resources.this work uses model quantisation to alleviate the overhead, particularly when deploying transformers on low-power embedded field-programmable gate arrays (fpgas). model quantisation involves mapping computations from highresolution floating-point numbers (e.g., 32-bit) to lower-resolution integer/fixedpoint numbers (e.g., 8-bit, 4-bit, or 2-bit) through the application of quantisation schemes. previous research used post-training quantisation (ptq) to select the optimal resolution per layer, resulting in mixed-precision quantisation. in contrast, this work focuses on the selection of quantisation schemes. specifically, we assess the effects of symmetric and asymmetric quantisation schemes on transformer models and suggest enhanced quantisation-aware training (qat) to select the optimal quantisation scheme for each object dynamically. we evaluate this approach with mixed-precision quantisation and examine the trade-off between the computational overhead reduction and the prediction precision loss.the subsequent sections are structured as follows: section 2 discusses related work, followed by background information on time series transformer models in section 3. section 4 presents considerations for quantising the linear layers in the transformer model. section 5 elaborates on our approach for conducting this study. the experimental setup and results are provided in section 6. finally, section 7 concludes the paper, summarises our findings, and outlines future research. specifically, we assess the effects of symmetric and asymmetric quantisation schemes on transformer models and suggest enhanced quantisation-aware training (qat) to select the optimal quantisation scheme for each object dynamically. letting the model utilise seven feature variables from the preceding 24 observations to predict the target variable for the subsequent observation, the model consists of 56,449 parameters, with 50,305 parameters residing in the linear layers, accounting for 89.while quantising a linear layer, weights (w f loat ), biases (b f loat ), inputs (i f loat ), and outputs (o f loat ) could be chosen as quantisation objects.our research aims to explore the feasibility of quantising the transformer model by selecting the optimal quantisation scheme for each quantisation object in the linear layers.to assess the computational overhead introduced by the aq scheme compared to the sq scheme during quantised inference in the transformer model, it is important to analyse and quantify this overhead.taking the linear layer (l 1 ) of the model input layer as an example, with inputs i int with dimensions of (n, m) and outputs o int with dimensions of (n, d model ), the aq scheme applied to both the inputs and outputs introduce a computational overhead of n • m + n • d model operations.to explore the impact of different combinations of quantisation schemes (referred to as quantisation configuration) on the model performance, it is critical to have a comprehensive coverage of samples representing these configurations.all-aq aq scheme applied to all parameters and feature vectors all-sq sq scheme applied to all parameters and feature vectors sq+aq sq scheme for parameters, aq scheme for feature vectors sq+apq sq scheme for parameters, apq scheme for feature vectors. we then present results for two study phases: evaluating model precision and computational overhead with 8-bit quantisation, and extending the analysis to mixedprecision quantisation.our next goal is to evaluate the impact of different quantisation configurations in lower-bit quantisation scenarios in pursuit of smaller model size, improving their deployment possibilities on embedded fpgas.to understand the sensitivity of each linear layer to 4-bit quantisation, we conducted ablation studies to quantise one linear layer to 4 bits while quantising the remaining layers at 8 bits. the linear layer (l 8 ) in the model output layer exhibits the highest sensitivity to 4-bit quantisation, contributing significantly to the degradation in rmse when all linear layers were quantised to 4 bits.this paper studies the impact of different quantisation schemes on the linear layers of the transformer model for time series forecasting. we propose an adaptive method that dynamically adjusts the quantisation scheme during training, selecting the most suitable scheme for each quantisation object. our method effectively quantises the transformer model through both pure 8-bit quantisation and mixed-precision quantisation, achieving a balance between reduced computational overhead and improved precision. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/48.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/48.txt new file mode 100644 index 0000000000000000000000000000000000000000..a4ee5c2401aa821ba8d4cdb32cd890fd3e979189 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/48.txt @@ -0,0 +1 @@ +higher education has seen a significant shift towards online learning in recent years, and this trend has been accelerated by the covid-19 pandemic . many higher education institutions (heis) have had to quickly adapt to the challenges posed by the pandemic by transitioning to online classes and exams . it is unlikely that these trends towards online education will reverse in the near future notwithstanding the challenges encountered, since the benefits of remote learning have become appreciated by both heis and students alike .as the sector has increasingly moved online, concerns around academic integrity have also been amplified . the transition to online exams, in particular, has raised concerns about the potential for cheating and other forms of academic misconduct . this is due, in part, to the anonymity and lack of direct supervision that are inherent to online exams, as well as the ease with which students may be able to access and share resources during the exam.while concerns around academic integrity in online exams have been raised, there is a lack of definitive research with no consolidated literature reviews yet conducted quantifying the extent of dishonest practices in online assessments . indications are that the prevalence is on the rise. in earlier studies, fask et al. , corrigan-gibbs et al. , alessio et al. detect that significant rates of cheating occurred in online assessments, while in general, arnold mention that there is a belief among educators that academic misconduct is on the rise and that online assessment is particularly conducive to cheating. more recently, noorbehbahani et al. reported that cheating in online exams covering more than a decade of research, found that dishonesty in online exams is more prevalent than in traditional face-to-face exams.to preserve academic integrity in online exams, heis have implemented revised recommendations for formulating assessments , various technological strategies such as proctored exams , plagiarism detection software, exam security measures, as well as revisions of institutional academic integrity policies and educational campaigns to deter misconduct and honor codes corrigan-gibbs et al. . while these strategies individually or in tandem, may be effective in mitigating the risk of academic misconduct, there is currently insufficient evidence regrading their overall effectiveness in preserving academic integrity in online exams. meanwhile, ethical concerns surrounding the use of proctoring software on personal computers and their recent challenges on legal grounds have gained momentum.an additional measure that heis have explored is a shift towards using more challenging exam questions that require greater degrees of critical thinking. whisenhunt et al. 1 note that these types of assessments comprising essays and short-answer responses are generally perceived by educators to be more suitable at measuring critical thinking as well as facilitating deeper learning . the underlying intention behind them is to move away from multiple-choice and simple information-retrieval questions since these types of questions are regarded as more susceptible to cheating when encountering misconduct involving unauthorized web access.however, a new threat to the academic integrity of online exams, even ones requiring high-order reasoning has emerged. with the recent2 public release of chatgpt by openai , the world has seen a significant leap in ai capabilities that involve natural language processing and reasoning. this publicly 3 available technology is not only able to engage in sophisticated dialogue and provide information on virtually all topics. it is also able to generate compelling and accurate answers to difficult questions requiring an advanced level of analysis, synthesis, and application of information, as will be demonstrated in this study. it can even devise critical questions itself, the very questions that educators in different disciplines would use for their students' evaluation of competencies. assuming that high-stakes exams will continue to be perceived as valuable and will continue to be used in education, this development may spell the end of the academic integrity of online examinations. it is therefore imperative that the capabilities of this ai agent be examined.finally, when chatgpt was posed the question: "how would you suggest that students are prevented from using chatgpt from cheating in online university exams?", the following response was given: "there are a few steps that universities could take in order to prevent students from using chatgpt or any other ai tool to cheat on online exams. firstly, universities could implement measures to prevent students from using chatgpt or any similar tool during the exam. secondly, universities could also implement measures to detect and deter students who attempt to use chatgpt or any other ai tool to cheat on the exam. for example, they could use plagiarism detection software to identify and flag any exam answers that are suspiciously similar to answers generated by chatgpt or any other ai tool. they could also use machine learning algorithms to analyze the exam answers and identify any patterns or anomalies that may indicate the use of unauthorized tools or resources. thirdly, universities could also educate students about the ethical implications of using chatgpt or any other ai tool to cheat on exams. this could help to prevent students from attempting to use chatgpt or any similar tool to cheat on the exam, and could encourage them to approach the exam honestly and ethically. in addition to the steps mentioned above, universities could also consider using ai to help prevent students from using chatgpt or any other ai tool to cheat on online exams. for example, they could use machine learning algorithms to analyze the exam answers and identify any patterns or anomalies that may indicate the use of unauthorized tools or resources. they could also use natural language processing (nlp) algorithms to detect any answers that are suspiciously similar to answers generated by chatgpt or any other ai tool. by using ai in this way, universities could improve their ability to detect and deter students who attempt to use chatgpt or any other ai tool to cheat on the exam. for example, they could use ai to improve the security of the exam platform, and to prevent students from accessing unauthorized tools or resources during the exam. they could also use ai to improve the user experience of the exam platform, and to make it easier for students to take the exam from any location. overall, there are several ways in which universities could use ai to help prevent students from using chatgpt or any other ai tool to cheat on online exams.• use multi-modal channels for exam questions: embedding images to exam questions can make it more difficult for students to cheat and for chatgpt to generate accurate responses, as the technology relies on text input only. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/480.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/480.txt new file mode 100644 index 0000000000000000000000000000000000000000..59edd6880af610300d27fd0b5da44fe68f539a11 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/480.txt @@ -0,0 +1 @@ +people plan hierarchically-from planning the next holiday to their long-term career. previous studies have investigated the principles that guide how people structure their behavioral hierarchies based on how they can simplify representational and computational costs (solway et al. 2014;correa et al. 2023) in simple and unstructured planning domains. we build on a task decomposition framework introduced by correa et al. (2023), which was able to predict hierarchical behavior in people in graph-structured planning tasks. while the framework accounts for the complexity of planning, it does not explain how people can plan so efficiently despite limited cognitive resources. one explanation could be the incorporation of structural information about the task to facilitate planning. binder et al. (2021) suggest that people exploit visual structure to inform their planning. we thus extended the framework of task decomposition to include structured information about the planning task through a heuristic search based on spatial distance and applied it to a more complex spatial navigation task. we tested our novel framework on a navigation planning experiment that consists of a large family of tasks, where participants must choose between two paths to take in order to solve a maze (see figure 1). our results suggest that our framework can predict the navigation choice of the majority of the participants in an online experiment. while the framework accounts for the complexity of planning, it does not explain how people can plan so efficiently despite limited cognitive resources. one explanation could be the incorporation of structural information about the task to facilitate planning.our modeling approach builds on the framework of task decomposition by minimizing the computational cost of plan-ning while maximizing task utility(correa et al. we formalize the task as (s, t, s 0 , g), which is defined by the set of possible states, s; an initial state, s 0 ; a goal state, g; and the set of possible state transitions t ⊆ s × s, so that s can transition to s ′ when (s, s ′ ) ∈ t . in our setting, we assume the task has been decomposed into a fixed set of subtasks z for simplicity, consisting of the two states adjacent to s 0 , a simple approach for formalizing the choice between the two halves of the maze. subtask-level planning decides which subtask to choose based on the expected reward and computational cost of visiting it en route to the goal.we assume the overall task of reaching g can also be a subtask for the purpose of action-level planning. note that while we fixed subgoals above, the choice between subtasks here still balances task reward and computational cost. action-level planning finds a sequence of states from a start state s until the subtask z is completed by reaching z sg , π = ⟨s 0 , s 1 , .we incorporate structural information by using a*(hart, nilsson, and raphael 1968) with a spatial heuristic cost based on the manhattan distance defined as h(s; g) = |s xg x | + |s y -g y | given states with coordinates s = (s x , s y ). the subtask associated with the red path has a lower planning cost (planning cost 53, step cost 26, total cost: 79) than the other subtask with the blue path (planning cost 63, step cost 26, total cost 89). each of the resulting 48 mazes can be decomposed into two subtasks, where one subtask was designed to have a higher planning cost than the other subtask. our model hypothesizes that people would choose the subtask with the lower overall cost, which is the sum of the optimal path length and planning cost.to test this hypothesis, we recruited 41 participants (one participant was excluded due to not completing the task) on prolific to each navigate through 12 different mazes (two mazes were randomly sampled from each set to ensure that participants saw two of each base maze).we observed that after the third trial, the majority of participants chose the subtask which, according to our model, is more favorable due to a lower planning cost (see figure2). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/481.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/481.txt new file mode 100644 index 0000000000000000000000000000000000000000..39ba9dace1f6e01d08154d4d03967dbb172c8548 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/481.txt @@ -0,0 +1 @@ +modern deep neural networks have demonstrated remarkable performance in various complex tasks, including image classification and face recognition, among others. however, prior works have pointed out that deep learning models are sensitive to small changes in the input and can be fooled by carefully chosen and imperceptible perturbations szegedy et al. (2014); goodfellow et al. (2015); papernot et al. (2016b); madry et al. (2018). these adversarial attacks can be generally classified into white-box and black-box attacks. in a white-box setting, strong attacks such as projected gradient descent (pgd) madry et al. (2018) can generate effective adversarial examples by levering the information inside the model. however, in practical scenarios such as machine learning as a service (mlaas), the well-trained models and the training datasets are often inaccessible to the users, especially in the era of large models. hence, query-based black-box attacks become the primary threats in most real-world applications, where the adversary is assumed to have no knowledge of the model architecture and parameters. this paper proposes a lightweight, plug-and-play defensive method that can significantly decrease the success rate of query-based black-box attacks, including both score-based and decision-based attacks ilyas et al. (2018;2019); andriushchenko et al. (2020); guo et al. (2019); al-dujaili & o'reilly (2020); liu et al. (2019); chen & gu (2020); chen et al. (2020b); rahmati et al. (2020). adversarial examples generated through query-based attacks involve iterative procedures that rely on either local search techniques involving small incremental modifications to the input or optimization methods leveraging estimated gradients of the adversary's loss concerning the input. however, the process of requesting numerous queries is time-consuming and sometimes may raise suspicions with the presence of multiple similar queries. hence, the objective of defense is to perplex the adversary when attempting to generate adversarial examples. our proposed method accomplishes this by introducing noise into the feature space. unlike previous randomized defense approaches that solely rely on empirical evaluations to showcase effectiveness, this paper provides both theoretical analysis and empirical evidence to demonstrate improved robustness. our analysis also highlights the importance of strategically introducing noise to specific components of the model based on the gradient of the adversarial objective function, which can be dynamically adjusted throughout the attack process.our contributions can be summarized as follows:• we investigate the impact of randomized perturbations in the feature space and its connection to the robustness of the model to black-box attacks.• we design a simple yet effective and lightweight defense strategy that hampers the attacker's ability to approximate the direction toward adversarial samples. as a result, the success rate of the attacks is significantly reduced.• we extensively evaluate our approach through experiments on both score-based and decision-based attacks. the results validate our analysis and demonstrate that our method enhances the robustness of the randomized model against query-based attacks.small noise defense (snd)byun et al. for the query-based attack, an attacker can only access the input and output of the model; thus, the attacker cannot compute the gradient of the objective function with respect to the input x. mean of the randomized model f rand with input x is exactly the prediction of the original model for x. for random-search attacks, when the sign of l(f rand (x + ηu), y) -l(f rand (x), y) and the sign of l(f (x + ηu), y) -l(f (x), y) are different, the attacker chooses the opposite action to the optimal one.this theorem states that the robustness of the randomized model is controlled by both (i) the ratio between the defense and attack noises and (ii) the ratio of the norm of the gradient with respect to the feature h(x) and the norm of the gradient with respect to the input x. intuitively, the perturbations added by the attacker and by the defense induce a corresponding noise in the output; if the attack noise is dominated by the defense noise, the attacker cannot perceive how its update affects the model. in contrast, if the gradient norm with respect to the randomized layer is large for samples that have large l, the robustness of the model for the correctly classified samples will be high; thus, adding noise to this layer makes the model more robust against black-box attacks., without any defense), both randomized feature and input defenses significantly improve the models' robustness against score-based attacks. furthermore, for square attack and signhunt, which are strong adversarial attack baselines, randomized feature defense consistently achieves better performance on all 6 models, which supports our theoretical analysis in section 3.1% ro-bust accuracy under square attack, while the robust accuracy of the randomized input defense is 2% lower. in summary, randomized feature defense consistently achieves high robustness on most models except resnet50 where the robustness is similar to randomized input defense. similar to imagenet, randomized feature defense achieves significantly better robustness than randomized input defense in most experiments. for square attacks on resnet50 and deit, while the best robustness is achieved by randomized input defense, randomized feature defense is more robust when the defender sacrifices 2% clean-data accuracy. however, at the selected defense noise scales corresponding to the same clean accuracy drop, our defense is still more robust than randomized input defense; this improved robustness again can be explained by the analysis in section 3. while aaa is optimized for score-based attacks directly and thus is successful in fooling these attacks (as seen in table3in supplementary), the results show that aaa is not effective in defending against decision-based attacks, while randomized feature and input defenses improve the robustness. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/482.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/482.txt new file mode 100644 index 0000000000000000000000000000000000000000..e30e5d4ccf3290a6480df3bc5fc946ca37d1469c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/482.txt @@ -0,0 +1 @@ +w e consider the problem of reconstructing an unknown function f , defined over a set x, from noisy outputs y i collected over input locations x i . a typical measurements model iswhere the errors ν i are zero-mean random variables and µ is also called the regression function.given any x ∈ x, it represents the optimal predictor of future data in the mean squared sense.in supervised learning, the input locations are also seen as random variables and the couples {x i , y i } n i=1 are realizations from a (typically unknown) probability distribution. this particular version of the problem is also known as nonparametric regression under random design in statistical literature . many forms of regularization have been studied in the literature for supervised learning. examples include spectral regularization methods, like early stopping and boosting , and kernel-based regularization which will be the focus of this paper. within this framework, the function f is assumed to belong to a reproducing kernel hilbert space (rkhs) h, a particular hilbert space associated with positive-definite kernels . adopted estimators try to balance data fit and a regularizer built using the rkhs norm. they assume the form μn = arg min f ∈h j (f (x 1 ), f (x 2 ), . . . , f (x n ), f h ) (2) with j monotonically increasing w.r.t. the last argument and f h typically enforces smooth solutions. such formulation includes as special cases support vector machines and kernel ridge regression . in particular, this last instance, known in the literature also as regularization nework or regularized least squares , will be the focus of this paper. it arises when quadratic losses are used to measure the discrepancy between the data y i and the predictions f (x i ).kernel-based estimators (2) have been widely studied in the literature, with many years of theoretical developments. one central theme is the derivation of conditions under which μn converges to the regression function f as the data set size n grows to infinity. the literature on this subject is enormous and we cite e.g. where convergence rates and generalization properties of kernel-based estimates are obtained.a common feature of all of the above mentioned works is that the sampling distribution p of the input locations is assumed time-invariant. hence, the input locations x i form a stationary stochastic process. an extension can be found in where p is replaced with a convergent sequence of probability measures, so that stationarity of x i holds asymptotically. in this paper we instead assume that the input locations distribution may vary freely in time over an arbitrary set p. this point is important in many engineering applications, e.g. in exploration-exploitation problems and coverage control where a set of agents has to simultaneously explore the environment and reconstruct a sensorial field . note that in this setting the stochastic mechanism underlying the input locations establishes how the agents move inside the domain of interest. hence, it is convenient to update the movements rules on the basis of the acquired knowledge. this latter can consist of the current filed estimate, which can suggest the most interesting areas to monitor, or the agent could also follow external directives, independent of the field, requiring occasionally to inspect regions where an event is reported. the time-varying distributions might be gaussian whose means and variances change over time on the basis of such directives. under this complex scenario, our aim is to derive conditions that ensure convergence of kernelbased estimators, in particular regularized least-squares, to the regression function. this will be obtained through a non trivial extension of the statistical learning estimates derived in . we will see that the convex hull cop of the set p plays a special role. in fact, the learning rate is related to smoothness of the unknown function and absolute summability of some covariances computed over cop.the paper is organized as follows. in section 2, we first recall the specific form of the kernelbased estimator studied in the paper and state our assumptions on the unknown function and the data generator. in section 3, the main convergence result is reported while section 4 contains a numerical experiment. conclusions then end the paper.w e consider the problem of reconstructing an unknown function f , defined over a set x, from noisy outputs y i collected over input locations x i.in supervised learning, the input locations are also seen as random variables and the couples {x i , y i } n i=1 are realizations from a (typically unknown) probability distribution. within this framework, the function f is assumed to belong to a reproducing kernel hilbert space (rkhs) h, a particular hilbert space associated with positive-definite kernels. they assume the formμn = arg min f ∈h j (f (x 1 ), f (x 2 ), .let h be the rkhs induced by the mercer kernel k : x × x → r, with norm denoted by • h .the kernel k inducing the rkhs h is assumed to be mercer (continuous) and the input space x is a compact set of the euclidean space. to introduce it, let p be a probability density function on x while l 2 p denotes the lebesque space with the measure induced by p.then, the kernel operator is l p : l 2 p → h, and maps any f ∈ l 2 p into h where.the level of smoothness of h is now measured computing the norm in l 2 p of l -r p , r > 0. each input location x i is a random vector randomly drawn from a sequence of probability densities p i ∈ p. this just implies that any selected p allows the agent to visit all the input space x, a minimal condition to have consistency for any possible choice of p. for any time instant t, let s t : h → r t be the sampling operator defined by s t (f ) = {f (x i )} t i=1 with its adjoint s ⊤ t : r t → h given by s ⊤ t c = t i=1 c i k(x i , ).where the expectation takes into account all the randomness of the data {x i , y i } t i=1 and possibly also that underlying the operators l p i since they depend on the choice of p 1 , .since the noises ν i have all zero-mean and independent of the input locations x i , and recalling the definition of l p i , taking the expectation we also obtain.1: the input locations {x i } 2000 i=1001 are drawn from the blue curve while the last set {x i } 3000 i=2001 is generated using the red one. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/483.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/483.txt new file mode 100644 index 0000000000000000000000000000000000000000..f064000b51651ecc4fe51925e6d56883b9e9cae8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/483.txt @@ -0,0 +1 @@ +in this paper we consider universality of almost periodicity. more precisely, without any use of the fourier transform, we construct a suitable recursive formula which approximates the corresponding bounded discrete time series in a local time interval, and the corresponding explicit formula tends to an almost periodic function as time tends to infinity.let y : z → be a bounded discrete time series arbitrarily chosen, and first let us discretize the range as follows: for k ∈ z ≥1 , we choose {a• sup 1≤k≤k+1 |a k k-1 -a k k | ≤ ∃ c/k. note that the above second condition is almost always satisfied in the following sense.proposition 1. there is a closed set z of k whose lebesgue measure is zero such that the above second condition is satisfied as soon asproof. we drop the superscipt k for simplicity. regard a i (1 ≤ i ≤ k) as indeterminates. clearly the measure of the set where a i = 0 is zero. under the assumption that any a i is not zero, the conditionℓ for some ℓ (then the term ℓ a k ℓ does not vanish). hence by the following simple fact, the inequality holds for almost all values of a i :• let f be a nonzero polynomial in m variables. then the lebesgue measure of the zero point set {x ∈ r m : f (x) = 0} is zero.we now discretize y as follows:(1) ȳk (t) := arg minwhere a -0 := a -ε for any sufficiently small ε > 0. in what follows we sometimes omit the index k unless confusion occurs (for example, ȳ = ȳk and ū = ūk ).to define a suitable statistical feature, we need to classify patterns in ȳ. let σ n (n = 1, 2, • • • , n ) be a permutation operator, namely,), and we impose the following two conditions for determining n :(2)note that n ≤ k l due to the sequence with repetition. the crucial statistical feature is the following: for any y : z → , we have the following conditional probability: there exists a δ k ∈ such that the following inequality holds:for any k ∈ z ≥1 and t ∈ z ≥0 . moreover there exists a discretized almost periodic function apremark 1. deriving the inequality (6) itself is rather obvious and quite natural due to (3). the nontrivial discovery is explicit construction of u (see ( 10)), and it tends to the discretized almost periodic function, without any use of fourier transform.corollary 3. let us mention the dynamical system case (c.f. ). with the aid of takens' embedding theorem (see also ), it suffices to consider in a delay coordinate. letwhere ū is generated by (10), and assume there exists a map φ :, we give the simplest example of φ). then we see that, for any t ∈ z ≥0 , there exists t * ∈ z ≥0 such thatletand assume β ∈ r ≥1 . by applying the above inequalities inductively, then we havethis means thatin other words,note that the case β < 1 is trivial, since φ becomes a contraction mapping. again, there exists a discretized almost periodic function ap more precisely, without any use of the fourier transform, we construct a suitable recursive formula which approximates the corresponding bounded discrete time series in a local time interval, and the corresponding explicit formula tends to an almost periodic function as time tends to infinity.let y : z → be a bounded discrete time series arbitrarily chosen, and first let us discretize the range as follows: for k ∈ z ≥1 , we choose {a.• sup 1≤k≤k+1 |a k k-1 -a k k | ≤ ∃ c/k. there is a closed set z of k whose lebesgue measure is zero such that the above second condition is satisfied as soon as. then the lebesgue measure of the zero point set {x ∈ r m : f (x) = 0} is zero. the crucial statistical feature is the following: for any y : z → , we have the following conditional probability: there exists a δ k ∈ such that the following inequality holds:.for any k ∈ z ≥1 and t ∈ z ≥0 . the nontrivial discovery is explicit construction of u (see (10)), and it tends to the discretized almost periodic function, without any use of fourier transform. x is a regular matrix for almost all h : r → in the following sense. then there is a closed set z of r m whose lebesgue measure is zero such that x is regular as soon as (h(γ 1 ), .there is a smooth function h : r → r such that the absolute value of any eigenvalue of the w and w t w are strictly less than 1 (c. now we show that ūk (t) tends to a discretized periodic function for any k. since the number of permutation operators {σ n } n n=1 is finite, then there exist m, m ∈ z ≥2 (m < m ) such that (σ jm , a k(jm) ) = (σ jm , a k(jm ) ). this means that there exists t k such that ūk (t) is a periodic chain for t > t k . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/484.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/484.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c02c846aa77f84378dee368470bc4427b77727c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/484.txt @@ -0,0 +1 @@ +an ultimate goal of deep learning theories is to characterize the network parameters obtained by deep learning. we may formulate this characterization problem as a functional equation problem: let h denote a class of data generating functions, and let dnn denote a certain deep neural network with parameter γ. given a function f ∈ h, find an unknown parameter γ so that network dnn represents function f , i.e.by haar's theorem, if g is a locally compact group, then there uniquely exist left and right invariant measures d l g and d r g, satisfying for any s ∈ g and.the definition of koopman operators seems a trivial rewriting, but the strength is the so-called linearization effect that in the raw form ψ • g the dependence on g is nonlinear, whereas in the operator form k g the dependence on k g is linear, i.lemma 5 let g be a group of invertible maps g : x → x with product gh = g • h and left action g • x = g(x). then k : g → u (l 2 (x)) is a unitary representation of g acting from right on l 2 (x, d l x). namely, for any g, h ∈ g and ψ, ϕ ∈ l 2 (x, d l x),. the key concept is to identify each hidden layer, say g, with an element of a group g acting on the input space x, and the composite of hidden layers, say g • h, with the group operation gh. since a group is closed under its operation by definition, the proposed network can represent literally any depth such as a single hiden layer g, double hidden layers g • h, triple hidden layers g • h • k, and infinite hidden layers g • h • • • • .let g be a locally compact group equipped with an invariant measure dg, let x be a gspace equipped with invariant measure dx (induced from dg), and let l 2 (x) be the hilbert space of square integrable functions on x. however, the obtained solution is (1) less informative because hidden layer g f can be independent of f thus remain as a hyper-parameter, and (2) less feasible because computing pseudo-inverse k † g is in general another hard problem.the integration over g means that the entire network dnn is a γ-weighted parallel connection of (at most infinite) subnetworks {ψ • g | g ∈ g}. for example, if γ has a high intensity at g 0 ∈ g, then the subnetwork g 0 is considered to be essential for the entire network to express given f . we remark that the integral form is another linearization trick, since in the single operator form k g the dependence on raw g is still nonlinear, whereas in the integral form ⟨γ, k.in the following, we use right invariant measure d r g for l 2 (g) and left invariant measure d l x for l 2 (x) so that the koopman operator k becomes a unitary representation of g acting from right on l 2 (x, d l x) (as discussed in lemma 5).in particular, it leads to a constructive proof without handcrafting feature maps because the closed-form expression (10) of the ridgelet transform explicitly indicates which feature map ψ • g to use (from the pool of candidate subnetworks {ψ • g | g ∈ g}) by weighting on them.therefore, k g commutes with dnn ψ • r ψ for all g ∈ g as below. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/485.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/485.txt new file mode 100644 index 0000000000000000000000000000000000000000..8228b0fb4c49b8a7b1e6964a576de4b4cea9f790 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/485.txt @@ -0,0 +1 @@ +human vision has the innate capability of recognizing new categories when a person is shown just a few samples of that category. for instance, when a person is shown a couple of images of an unseen person or an unseen category, he can recognize the new face quickly by implicitly drawing connections from the acquired prior knowledge. although deep neural networks trained on millions of images have in some cases exceeded human performance in large-scale image recognition , under an open-world setting with emerging new categories it remains a challenging problem how to continuously expand the capability of an intelligent agent from limited new samples, also known as few-shot learning . moreover, in many machine learning applications, training data and labels are limited and require collection/annotation of new data which can be prohibitive . however, modern machine learning models including deep neural networks often require large amounts of training data to learn good representations needed for downstream tasks. as a result, models that can learn how to solve tasks with little training data are desirable. meta-learning essentially aims to solve this issue without having to re-train a base model on the data from the new classes .few-shot classification is an instantiation of meta-learning in the field of supervised learning. after splitting into training and testing sets (with different classes), each dataset d is split into two parts, a support set s for learning and a query/prediction set b for training or testing, d = ⟨s, b⟩. often we consider a k-shot n -way classification task: the support set contains k labelled examples for each of n classes. intuitively, the goal of the model is to learn from the small subset of data, i.e: the support set s should classify the data points in the query set b effectively . although deep neural networks trained on millions of images have in some cases exceeded human performance in large-scale image recognition, under an open-world setting with emerging new categories it remains a challenging problem how to continuously expand the capability of an intelligent agent from limited new samples, also known as few-shot learning. moreover, in many machine learning applications, training data and labels are limited and require collection/annotation of new data which can be prohibitive. however, modern machine learning models including deep neural networks often require large amounts of training data to learn good representations needed for downstream tasks. after splitting into training and testing sets (with different classes), each dataset d is split into two parts, a support set s for learning and a query/prediction set b for training or testing, d = ⟨s, b⟩.e: the support set s should classify the data points in the query set b effectively.in computer vision, the metric based learning algorithms are the most commonly used algorithms where a distance metric is learned between the support set and the query set features to perform the required task at hand. an episode consists of the support set s, which is the 'training set' for the episode, while the query set b is the 'testing set' for the episode. rather than training over a mini-batch of training data examples, in meta-learning, an epoch comprises a mini-batch of episodes, allowing the model to 'learn how to learn'.recent work bypapyan et al. this in turn simplifies the behaviour of the last layer classifier to that of a nearest-class center decision rule (n c 4 ). while neural collapse has been evaluated for the transfer learning setting for few-shot learning, the majorly used meta-learning frameworks have not been analyzed for neural collapse phenomenon.where f θ is the embedding model, s c is the labeled support set data points and |s c | is the number of support set data points for the respective class (equal to k, i.the query set vectors in u ∈ b are embedded by the model and the ℓ 2 distance is taken between u and v c (from equation1) for c ∈ c where c = {1, 2, . secondly, the query set and support set collapse metrics are quite similar, suggesting that the model learns the representation space of the class feature vectors quickly (allowing it to produce a similar structure across both sets of vectors.another interesting observation is that the classification decision in prototypical networks is identical to n c 4 , raising the question of whether these models 'force' the model to learn structures with distinct decision boundaries across all combinations of classes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/486.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/486.txt new file mode 100644 index 0000000000000000000000000000000000000000..e8a0192c503525c2efcf40e73cc1522e49357751 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/486.txt @@ -0,0 +1 @@ +the popularity of high-speed railways is on the rise, driven by their many benefits, including lower energy consumption, increased transport capacity, reduced pollution, time efficiency, safety, and comfort , . to meet growing demand for mobile wireless services, broadband communication has become essential in high-speed train systems , . the utilization of millimeter wave (mmwave) frequency bands in fifth-generation (5g) and beyond (b5g) cellular networks offer the potential for accommodating exponentially increasing data rates through new spectrum bands . nevertheless, mmwave bands come with challenges such as high path loss and susceptibility to rapidly changing channel conditions. to this end, dense deployment of mmwave cells is needed to ensure sufficient coverage and capacity, and the integration of edge computing further complicates these challenges.high-speed rail networks experience frequent handovers (hos) due to the dense deployment of 5g mmwave base stations (bss) and the high speeds of trains. this results in increased signalling overhead and latency, highlighting the significance of effective ho management and resource allocation during user association. load balancing through user association has emerged as a promising approach to handle higher data rates, manage cell congestion, and optimize wireless resource allocation across multiple links , . load imbalance leads to an unfair distribution of data rates among users, resulting in inconsistent quality of experience (qoe) for users. to handle this issue, load balancing is employed to transfer the users from overloaded cells to lightly-loaded cells, ensuring a fair distribution of the network load among cells.ho is a critical process in high-speed rail networks, providing seamless call switching between cells as the users move within the cellular coverage area. proper ho management is vital to maintain quality of service (qos), prevent reductions in service interruptions and average throughput. key ho control parameters (hcp), such as ho margin (hom) and time-totrigger (ttt), play a crucial role in load balancing. hom adjusts the cell coverage by modifying pilot power value of the cells, making the underloaded cells more appealing for user association. while this improves overall system traffic handling and increases user throughput, it can lead to lower signal-tointerference-plus-noise ratio (sinr) for users at the cell edge. as a result, setting the hom value appropriately becomes crucial for ensuring efficient ho processes. another important parameter is the ttt interval, which regulates duration for testing received signal strength before executing a ho. reducing the ttt time allows for earlier handovers from the overloaded cells to underloaded cells, leading to improved average throughput and a lower call dropping ratio (cdr). however, low ttt settings may increase ho probability (hop) and ho ping-pong (hopp), while high ttt settings can impede offloading from overloaded cells, thereby causing delays in hos for users experiencing poor communication quality.this study presents insight into deployment of ml methods for kpis' prediction for the high-speed train scneario in 5g and b5g mobile networks. ml perdiction with nested cross validation may enhance the prediction performance by increasing generalization ability of ml methods. this will pave ways for future reserach and applications. since the performances of the boosting methods have greatly improved with nested validation scheme, more efficient load balancing algorithms may be proposed with this ml modelling of the high speed train system scenarios. in addition, this ml modeling may be applied for different kpi prediction scenarios in 5g and b5g network systems as well.this paper is structured as follows: section ii surveys the relevant literature on this topic. section iii introduces the system and simulation models. section iv provides an overview of the ml techniques employed. section v discusses data collection, processing, and the practical application of our methods to address the problem. section vi presents the outcomes obtained by employing the ml methods. finally, section vii concludes the paper with a summary of the key findings and concluding remarks. since the performances of the boosting methods have greatly improved with nested validation scheme, more efficient load balancing algorithms may be proposed with this ml modelling of the high speed train system scenarios. these methods were used in a previous studywith the same data utilized in this paper as well, and this study aims to extend the previous work by approaching to the same problem with a different scheme, so that we use the same ml methods in the previous study for better comparison of the results.in the deployment in this paper, we use nested cross validation rather than conventional cross validation for training of ml methods.we present obtained results of ml deployment, and discuss the results with previously obtained results with the same data in ref. in addition, discussion of the performance of ml methods with nested cross validation denoted by * with respect to conventional cross validation results. in our deployment for nested cross validation in this paper, we use 6-fold split for outer cross validation set, and 4fold split for inner cross validation set. in non-nested cross validation scheme, 10-fold cross validation was made by the ml methods used in ref. boosting methods that use nested cross validation yield promising results when compared to their non-nested cross validation counterparts. however, svr, mlp, knnr, and krr methods with nested scheme do not produce fully superior results over the results of non-nested scheme.78, 0.051 for the same features with svr. in the deployment stage, dataset used is processed for the relevant ml methods, and this is followed by deployment of ml methods with nested cross validation scheme. results of the methods with nested cross validation scheme are compared with its non-nested scheme with the same methods in a previous study in terms of mae and mse metrics. on the other hand, svr, mlp, knnr, and krr methods achieve the best results for some outputs with nested cross validation scheme. hence, nested cross validation scheme has produced superior performance for the boosting methods with nested scheme, however, it has not produced fully superior performance with svr, knnr, krr, and mlp. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/487.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/487.txt new file mode 100644 index 0000000000000000000000000000000000000000..773a7fb299770d8f1d1c1d6cf14fdfc8294f1f1d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/487.txt @@ -0,0 +1 @@ +the fifth-generation (5g) of mobile networks has revolutionized wireless communication with its promise of high data rates, increased efficiency, and improved network performance - . to fully leverage the capabilities of 5g, accurate path loss prediction plays a vital role in tasks such as network planning, coverage estimation, and optimization.the path loss prediction in wireless communication systems traditionally relies on two main types of models: deterministic models , and empirical models , . deterministic path loss models are based on the physical principles of wave propagation and use computational techniques such as ray tracing or finite-difference time-domain (fdtd) simulations to predict the path loss. these models take into account detailed environmental information, including geometry, material properties, and electromagnetic characteristics, to predict the path loss. by accurately modelling the physical mechanisms of wave transmission, reflection, and diffraction, deterministic models offer the potential for high accuracy in path loss prediction. however, their drawbacks include the need for extensive computational resources and time, making them less practical for real-time applications. additionally, deterministic models often require subjective parameter settings and assumptions, which may introduce uncertainties and challenges in capturing the full complexity of wireless propagation in different scenarios.in contrast, empirical models are statistical models that derive their parameters from measured data collected in specific propagation environments. these models rely on the analysis of large-scale measurement data to establish statistical relationships between the path loss and various propagation parameters. these models are relatively simple and computationally efficient, making them attractive for quick estimations and initial network planning. however, their accuracy may be limited when applied to generalized wireless environments that differ significantly from the conditions in which the models were derived. empirical models often assume simplified conditions, and may not capture the full complexity of wave propagation effects, such as scattering, diffraction, or variations in environmental characteristics. both deterministic and empirical models have their strengths and weaknesses in the path loss prediction . the deterministic models can provide high accuracy when detailed environmental information is available, but their computational complexity and reliance on assumptions may restrict their practicality. on the other hand, the empirical models offer simplicity and efficiency but they may lack accuracy in generalized scenarios. in recent years, machine learning (ml) has emerged as a promising alternative approach for path loss prediction. ml models have potential to overcome the limitations of traditional models by leveraging comprehensive datasets and powerful algorithms to learn patterns and relationships directly from data, leading to accurate and efficient path loss predictions in diverse wireless propagation environments.by harnessing the capabilities of ml algorithms and leveraging large-scale datasets, ml-based models have the potential to surpass traditional methods in terms of accuracy and computational efficiency - . unlike empirical models and deterministic methods, ml models can automatically learn complex patterns and relationships from data, enabling them to capture the intricacies of wireless propagation in a more precise and adaptable manner. moreover, ml-based approaches can handle diverse environmental conditions, frequency bands, and propagation scenarios, making them suitable for the complex and dynamic nature of wireless networks. in the existing literature, several research studies have explored the application of ml methods for path loss prediction in different wireless communication environments.ref. presents a study on predicting path loss models in urban cellular networks using ml techniques. the research evaluates the performance of three machine learning algorithms, support vector regression (svr), random forest (rf), and k-nearest neighbor (knn), for path loss prediction in a long term evolution (lte) network scenario at 2.1 ghz. the evaluation encompasses both line-of-sight (los) and non-lineof-sight (nlos) propagation conditions, utilizing a path loss dataset generated from ray tracing simulations. the study compares the performance of the ml methods with the widely used cost231 walfisch-ikegami empirical model. the findings highlight the better performance achieved by all ml algorithms in path loss prediction, with root-mean-square errors ranging from 2.1-2.2 db for los and 3.4-4.1 db for nlos locations. ref. introduces application of various ml-based approaches for the path loss prediction in a smart campus environment. the study demonstrates better performance of ml algorithms, including artificial neural network (ann) and rf, compared to the conventional cost-231 hata model.ref. presents a path loss prediction method for urban areas using convolutional neural networks (cnn) and data extracted from online sources, such as openstreetmap and other geographical information systems. the approach incorporates top-view images and building footprint data to improve the accuracy of path loss estimation. the proposed model is compared with ray tracing and exhibited faster prediction speed while maintaining accuracy in path loss prediction.ref. studies the use of ml algorithms to predict path loss in tropical regions. the study includes a measurement campaign at broadcasting stations in akure metropolis, nigeria, and demonstrates the accuracy and feasibility of ml-based models. the study also proposes a data expansion framework that combines ml algorithms and classical models to improve prediction performance in scenarios with limited measured data at new frequencies. the findings of this study show the potential of ml algorithms for path loss prediction in tropical regions. however, the study also identifies the need for additional research to address data availability issues, improve accuracy, and enhance computational efficiency.ref presents a study on the path loss prediction in aircraft cabin environments using different ml methods. the models are compared with measured data at frequencies of 2.4 ghz, 3.52 ghz, and 5.8 ghz. the results demonstrate that the ml-based approaches, including bpnn, svr, rf, and adaboost, outperform the classical log-distance model in terms of prediction accuracy at these frequencies. the proposed data expansion method further enhances prediction accuracy, especially when there are limited measurement samples at the 5.8 ghz.ref. proposes an ml-based path loss prediction model for urban environments at millimeter wave frequencies. the model utilizes 28 ghz measurements from manhattan, incorporating street clutter features from lidar data and compressed building information. the suggested approach achieves improved prediction accuracy compared to statistical and 3gpp models, with a root-mean-square error (rmse) of 4.8 ± 1.1 db. the model employs linear ml algorithms, such as elastic-net regression and svr with radial basis function (rbf) kernel, to enhance performance in extrapolation and robustness against overfitting.in nearly most of the studies that used ml methods for path loss prediction problem utilize conventional cross validation scheme in their ml deployments. in this scheme, ml method tunes its parameters and evaluates model performance with the same data by using the same validation dataset, and thereby leaking the learned information from the same data into the model. consequently, the chosen parameters make the model biased towards the dataset used, resulting in overfitting where the model excessively adapts to the specific dataset. however, nested cross validation scheme differs from the conventional one in the manner that it does not use the same dataset for both model selection and parameter selection, thereby avoiding the information leakage from the dataset into the model. this study is, to our knowledge, the first to use this novel approach for ml deployment in the path loss prediction.by advancing the field of the path loss prediction through the application of ml with a novel cross validation scheme, this research aims to facilitate more accurate network planning, resource optimization, and performance improvement in wireless communication systems.this paper is motivated by the advantages of ml over traditional approaches for path loss prediction and the growing need for accurate path loss prediction in the context of 5g networks. it focuses on the development and evaluation of an ml-based path loss prediction model. in the rest of the paper, we briefly introduced path loss prediction through ml methods in section ii, and ml deployment stages in section iii. accordingly, we discuss results obtained in section iv, and finally make concluding remarks in section v.the path loss prediction in wireless communication systems traditionally relies on two main types of models: deterministic models,and empirical models,. deterministic path loss models are based on the physical principles of wave propagation and use computational techniques such as ray tracing or finite-difference time-domain (fdtd) simulations to predict the path loss. by accurately modelling the physical mechanisms of wave transmission, reflection, and diffraction, deterministic models offer the potential for high accuracy in path loss prediction. ml models have potential to overcome the limitations of traditional models by leveraging comprehensive datasets and powerful algorithms to learn patterns and relationships directly from data, leading to accurate and efficient path loss predictions in diverse wireless propagation environments. in the existing literature, several research studies have explored the application of ml methods for path loss prediction in different wireless communication environments. the proposed model is compared with ray tracing and exhibited faster prediction speed while maintaining accuracy in path loss prediction. the study also proposes a data expansion framework that combines ml algorithms and classical models to improve prediction performance in scenarios with limited measured data at new frequencies.refpresents a study on the path loss prediction in aircraft cabin environments using different ml methods.in nearly most of the studies that used ml methods for path loss prediction problem utilize conventional cross validation scheme in their ml deployments.by advancing the field of the path loss prediction through the application of ml with a novel cross validation scheme, this research aims to facilitate more accurate network planning, resource optimization, and performance improvement in wireless communication systems.this paper is motivated by the advantages of ml over traditional approaches for path loss prediction and the growing need for accurate path loss prediction in the context of 5g networks. in the rest of the paper, we briefly introduced path loss prediction through ml methods in section ii, and ml deployment stages in section iii.in this paper, we try to make the path loss prediction with ml deployment which , which overcome conventional path loss prediction models drawbacks. since the path loss prediction has landmark importance in optimizing wireless network performance, we approach to this problem with a novel scheme through deployment of ml methods which are svr, cbr, xgb, ann, and rf. in this study, we first acquire a publicly available dataset obtained through a comprehensive measurement campaign in an urban macro-cell scenario, enabling us to train and evaluate our novel ml scheme for path loss prediction in 5g wireless networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/488.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/488.txt new file mode 100644 index 0000000000000000000000000000000000000000..91fc9d299db69708f986e16d966b1fabfe21409a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/488.txt @@ -0,0 +1 @@ +neural networks have achieved remarkable success across various domains , but their predictions often lack reliability and satisfactory explanations, leading to low trust . one observed issue contributing to this problem is prediction churn, where models with similar performance exhibit significant variability in their predictions . this churn occurs even among models with identical hyperparameters differing only in their random initializations . when large portions of correctly classified data are misclassified upon model retraining, the models' reliability, trustworthiness, and explainability are reduced. churn has been observed across various domains and data structures, but understanding its underlying cause remains challenging. this work focuses on node classification with graph neural networks (gnns), a domain where churn is particularly prevalent . our work investigates the reason behind prediction churn in gnns by comparing the influence of context nodes on predictions. we propose a novel metric, the influence difference (id), which allows us to compare the exploited features for a given prediction between a pair of models and empirically verify this prevalence. contrary to previous investigations, we hypothesize that stable and unstable predictions exhibit similar differences in their utilized features. we further hypothesize that stable nodes possess redundant features, allowing their stable prediction even when the features utilized for the prediction change. to empirically validate our hypotheses, we introduce additional metrics based on id. knowledge distillation (kd) is a promising technique to transfer knowledge from a well-performing teacher model to a newly trained student this enables model compression for a computationally expensive teacher or regular model updates using new data . users expect consistent behavior after each update, so closely matching the teacher's predictions is crucial. previous work formulated the goal of kd as directly minimizing churn . based on our findings, we propose to extend kd by also matching the influences of predictions as a regularization. as the exact formulation is computationally prohibitive, we introduce dropdistillation (dd), an efficient approximation that mimics the influence of adjacent nodes by removing random edges equally from both the teacher and the student model. our empirical analysis validates our hypotheses for several benchmark datasets, further motivating the need for transferring the reasons. comparing dropdistillation with several state-of-the-art methods, our approach improves prediction churn between teachers and students and overall performance. we summarize our key contributions:• we investigate the reason behind prediction churn in gnns by comparing the influence of context nodes on predictions using a novel metric. we also connect differences in prediction stability with the availability of redundant features (section 3). • based on our findings, we extend knowledge distillation to minimize churn directly and match the features exploited for predictions as a regularization technique and propose an efficient approximation, namely dropdistillation (dd) (section 4). • our empirical evaluation validates our claims and confirms the effectiveness of dropdistillation for knowledge distillation for various benchmark datasets (section 5).these contributions aim to enhance our understanding of prediction churn in gnns, and propose a novel direction to address churn in knowledge distillation, resulting in increased reliability and overall performance., 2021, zhuang et al., 2022]. churn has been observed across various domains and data structures, but understanding its underlying cause remains challenging., 2022, klabunde andlemmerich, 2022].• we investigate the reason behind prediction churn in gnns by comparing the influence of context nodes on predictions using a novel metric. • based on our findings, we extend knowledge distillation to minimize churn directly and match the features exploited for predictions as a regularization technique and propose an efficient approximation, namely dropdistillation (dd) (section 4).these contributions aim to enhance our understanding of prediction churn in gnns, and propose a novel direction to address churn in knowledge distillation, resulting in increased reliability and overall performance., 2016, milani fard et al.axiom 1 (a1) prediction churn is a consequence of models utilizing different features for their predictions. as all neighboring nodes can affect the prediction to some degree, we propose to view each neighboring node as one potential feature the root node can utilize. when a root node relies on different neighbors for distinct models, this indicates that models learn disjoint feature subsets and would thus be more meaningful than churn c. a key property of id is that even when the prediction churn is small, it can still provide meaningful information about the differences between the knowledge acquired between the two models:.to verify this statement empirically, we calculate the correlation between the average influence differences id ∈ r n for each node and the stability s ∈ {0, 1} n of each node's prediction. we provide the results for accuracy or f1-score, churn c, influence difference id, the correlation corr(id, s), and the correlation corr(h, s) based on pairwise models with the same hyperparameters in table2.this work investigated the reasons behind prediction churn in graph neural networks by quantifying differences in the influence on a prediction between models based on our proposed influence difference (id) metric. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/489.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/489.txt new file mode 100644 index 0000000000000000000000000000000000000000..568f916d55d761f91cf3f2ba7538c7f523bba665 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/489.txt @@ -0,0 +1 @@ +many digital platforms now are offering organized events through the internet, where users can be organizers or participants. for example, the platform meetup1 allow people to organize offline gatherings through online registration. and there are flash sales platforms such as gilt2 that offer limitedtime product discounts. moreover, retweeting viral messages of the moment on social media platforms such as twitter3 can be also considered a type of event. effectively predicting event participants can provide many benefits to event organizers and participants. for example, organizers can send out invitations more effectively , while potential participants can receive better recommendations . some previous researches have found that the problem of event participant prediction can be solved with recommendation techniques, such as matrix factorization . indeed if one considers events as items, and participation as users, then recommending events to users can be performed similarly as recommending products to users with an e-commerce recommender system . unlike a product-based e-commerce platform, though, which has thousands of items, each purchased by thousands of users, events are organized and participated with much smaller frequency.therefore, one problem with many event-based platforms is that they have not collected enough data to effectively learn a model of user preferences.on the other hand, social media platforms such as twitter nowadays are generating huge amounts of data that are accessible publicly . a particular activity, that is retweeting, in which social media users repeat a popular tweet, can be seen as a type of event participant . we argue that eventbased platforms can use data of such activity to support their own prediction models even though some restrictions are required. for example, due to privacy concerns, it is assumed that users in the target domain will not offer their social media account information. this condition invalidates many cross-domain recommendation solutions that rely on linked accounts , , . nevertheless, even if the users are not linked to social media accounts, we can still have some useful information from social media. for examples, the interaction data that consists of user retweeting records of past tweets, and the tweet texts that are written in the same natural language. retweeting data are useful for event participant prediction because the act of retweeting generally reveals a user's preference towards what is described in the tweet text , .in this paper, we propose a method to utilize social media retweeting data during the learning of an event participant prediction model of a target domain, which has limited training data. as mentioned, we do not assume there are linkable users across social media and the target domain. instead, we only assume that the event descriptions in the target domains are written in the same language as the social media tweets. this will become our basis for linking two domains. we generate a joint graph using data from two domains, and learn crossdomain users embeddings in the same embedding space. in this way, we can increase training data by adding social media retweeting data, and train more accurate models. to the best of our knowledge, this is the first work to use social media retweeting to enhance event participant prediction.in this paper, we propose a method to utilize social media retweeting data during the learning of an event participant prediction model of a target domain, which has limited training data. note that we can apply the same graph technique to learn embeddings in single domains as well, denoted as l s (u) and l t (u) respectively for the retweeting data and target domain. a problem is that the graph embeddings l j (u) and l t (u) are only available for a small number of target domain users, because they are learned from limited participation data. these users have base embeddings l b (u) but not graph embeddings l j (u) and l t (u).we need to map base embedding l b (u) to the embedding space of l j (u) when making the prediction. after obtaining m from users who have both base embeddings and graph embeddings, we can map the base user embedding to graph user embedding l j ′ (u) = m × l b (u) for those users who have no graph embedding. this would then require us to map graph user embedding to target domain base user embedding. unlike mapping base embedding to graph embedding, where some target domain users have both embeddings, we do not have social media users with base embeddings. we solve it by finding the most similar target domain users for a social media user, and using their embeddings as the social media user base embedding.where u k is top-k target domain users most similar to the social media user u according to their graph embeddings.we have shown two ways to create joint training data by mapping graph embeddings to base embeddings, and by mapping base embeddings to graph embeddings. more specifically, for graph embedding space, the input l(u) is set to l j (u) if user u has graph embedding, and otherwise it is set to the mapped embedding l j ′ (u). in the case where test data contain events in the training data, which is called warm test, we randomly pick up one user from each event, adding it to the test data and removing it from the training data. in the case where test data contain no event in the training data, which is called cold test, we use all data shown in tableias the training data, and use additional 1,000 events as the test data. however, when there is no data for a new event, the useful information is mostly from the target domain itself, and retweeting data can only add limited useful information to the model, if not noises, especially when the target domain has sufficient training data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/49.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/49.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ed0c9a1c8aa82cd11569a58ca61b2480dd5d8fe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/49.txt @@ -0,0 +1 @@ +in my recent book it has been demonstrated that the intensional fol has a conservative tarski's semantics, shown also in this paper (only partially) by definition 6, with interpretations (see the diagram in ( 5))as the ordinary (extensional) fol with well known its deductive properties. by introduction of the abstraction operators with autoepistemic capacities, expressed by the know predicate in previous section, we do not use more a pure logical deduction of the standard fol, but a kind of autoepistemic deduction with a proper set of new axioms. however, the autoepistemic logic is introduced as a propositional logic with added universal modal operator, usually written k, and the axioms:for any proposition formulae φ and ψ, while know in ifol is a predicate and not modal (kripke-like) operator k.it has been demonstrated that intensional enrichment of the standard (extensional) fol, provided by definition 14 in , is a kind of modal predicate logic fol k (γ ), where the set of explicit possible world w e is equal to the set i t (γ )} of tarski's interpretations i * t = h • i (this composition is provided by diagram in ( 5)) of the standard fol with a given set of assumptions γ , that is, for a prefixed intensional interpretation of robot, this set of possible worlds is equal to the set of the extensionalization functions h ∈ e of robot's ifol. it has been demonstrated that in such a minimal intensional enrichment of standard (extensional) fol, we obtain exactly the montague's definition of the intension (see proposition 5 in ).we recall that each robot's extensionalitation function h ∈ e in (2) is indexed by the time-instance. the actual robot's world extensionalization function (in the current instance of time) is denoted by , and determines the current robot's knowledge. clearly, the robots knowledge changes in time and hence determines the extensionalization function h ∈ e in any given instance of time, based on robots experiences. thus, as for humans, also the robot's knowledge and logic is a kind of temporal logic, and evolves with time.note that the explicit (conscious) robot's knowledge in actual world (current timeinstance) here is represented by the ground atoms of the know predicate, for a given assignments g :with {y 1 , y 2 } β α ⊆ v, such that g * (y 1 ) = in present and g * (y 2 ) = me (the robot itself), for the extended assignments g * : t → d, where the set of terms t of ifol is composed by the set v of all variables used in the defined set of predicates of robot/s ifol, by the set of fol constants and by the set of abstracted terms in (6), such that (from definition 17 in ):where g(β) = g({y 1 , .., y m }) = {g(y 1 ), ..., g(y m )} and is a uniform replacement of each i-th variable in the set β with the i-th constant in the set g(β).notice that α is the set of all free variables in the formula φ.so that in the actual world , the known fact ( 16) for robot becomes the ground atomwhich is true in actual word, that is, from proposition (intensional concept)remark: note that for the assignments g : v → d, such that g(y 1 ) = in f uture and g(y 2 ) we consider robot's hypothetical knowledge in future, while in the cases when g(y 1 ) = in past we consider what was robot's knowledge in the past. consequently, generally the predicates of ifol for robots, based on the dynamic changes of its knowledge has to be indexed by the time-instances (which are possible worlds of ifol), for example by using an additional predicate's variable for them. in the examples in next, we will consider only the case of robot's current knowledge (in the actual world with extensional function ) when g(y 1 ) = in present, so we avoid the introduction of the time-instance variable for the predicates; only at the remark at the end of this section we will show how to use time-variable τ .from the fact that we do not use the modal kripke universal modal operator k, the principle of necessitation rule n for modal logics, which for a given proposition (sentence) φ derives the knowledge fact kφ, here in predicate based ifol, the robots current knowledge (ground atoms of predicate know) is directly derived from its experiences (based on its neuro-system processes that robot is using in this actual world), in an analog way as human brain does:-as an activation (under robot's attention) of its neuro-system process, as a consequence of some human command to execute some particular job.-as an activation of some process under current attention of robot, which is part of some complex plan of robot's activities connected with its general objectives and services.in both cases, for a given assignment g : v → d of virtual predicate φ(x) with the set of variables x = β α, which concept i(φ(x)) ∈ d |α| is grounded by this particular process, is transformed into abstracted term and hence robot's knowledge system generates the new ground knowledge atom know(y 1 , y 2 , ⋖φ(x)⋗ β α )/g with g(y 1 ) = in presence and g(y 2 ) = me, in robot's temporary memory. remark: we consider that only robot's experiences (under robot's attention) are transformed into the ground atoms of the know predicate, and the required (by robot) deductions from them (by using general fol deduction extended by the three epistemic axioms) are transformed into ground atoms of know predicate, and hence are saved in robot's temporary memory as a part of robot's conscience. some background process (unconscious for the robot) would successively transform these temporary memory knowledge into permanent robot's knowledge in an analog way as it happen for humans. thus, the three epistemic axioms of epistemic modal logic with modal operator k, used to obtain deductive knowledge, can be traduced in ifol by the following axioms for the predicate know, which in fact define the semantics of this particular know predicate, as follows:1. the modal axiom t, in ifol is represented by the axiom, for each abstracted term ⋖ψ(x)⋗ β α and assignment g :(b) if |α| ≥ 1 and for the intensional concept)) = t} with g i = g j if i = j and the tuple of hidden variables y in the virtual predicatethis axiom shows how the robot's experience of execution of the process (described by abstracted term ⋖ψ(x)⋗ α ), to which the intensional concept u 1 is grounded, transforms the true facts obtained by robot's neuro-system (of this process, which results are still the parts of robots unconscious knowledge) into the symbolic-ai fol formulaso by using axiom (19), and fol deduction, these deductive properties of the robot can deduce any true single fact (logical sentence) ψ derived by its neuro-system process, and to render it to robot's consciousness as a single known fact know(y 1 , y 2 , ⋖ψ⋗ β )/g. in the case (a), when α is empty, from (17) withsuch that (u 1 ) = (i(ψ)) = t, that is, the sentence ψ is true, so from the fact that the left side ground atom of axiom's implication in ( 19) is equal to know(g(y 1 ), g(y 2 ), u 1 ), this t axiom (19) becomesnote that the meaning of the intensional concept u 1 of the robot is grounded on robot's neuro-system process, which is just robot's current internal experience of what is he doing, and just because of that the robot's knowledge know(g(y 1 ), g(y 2 ), u 1 ) is true for him. so, this is really an reflexive axiom. consequently, the application of the t axiom (a), allows the extraction from robot's conscious knowledge the logical sentences which, successively, can be elaborated by robot's implemented deductive property of fol in two ways:a.1. to respond to some human natural language questions (parsed into a logical formula) and to verify if the response is "yes" or "no", or "i do not know" (if robot's conscious knowledge is incomplete for such a question); a.2. to deduce another sentences which then can be inserted in robot's conscious knowledge as ground atoms of the predicate know (where this deduced sentence is represented as an abstracted term). this process (in background, or when robot is free of other concrete activities) can be considered as a kind of consolidation and completion of robot's knowledge based on previous experiences, in an analog way as it is done by human mind when we sleep. for example, the standard fol formula (∃x k )φ(x i , x j , x k , x l , x m ) will be mapped into intensional concept ∃ 3 φ(x) ∈ a f ol where x is the list(tuple) of variables (x i , x j , x k , x l , x m ). for example, the fol formula φ(x i , x j , x k , x l , x m ) ∧ ψ(x l , y i , x j , y j ) will be replaced by a specific virtual predicate φ(x i , x j , x k , x l , x m ) ∧ s ψ (x l , y i , x j , y j ), with the set of joined variables (their positions in the first and second virtual predicate, respectively) s = {(4, 1), (2, 3)}, so that its extension is expressed by an algebraic expression r 1 ⊲⊳ s r 2 , where r 1 , r 2 are the extensions for a given tarski's interpretation i t of the virtual predicate φ, ψ relatively, and the binary operator ⊲⊳ s is the natural join of these two relations. for the existential quantification, the fol formula (∃x k )φ(x i , x j , x k , x l , x m ) will be replaced in a f ol by a specific virtual predicate (∃ 3 )φ(x i , x j , x k , x l , x m ).the intensional entities for the same logic formula, for example x 2 + 3 = x 2 1 -4, which can be denoted by φ(x 2 , x 1 ) or φ(x 1 , x 2 ), from above we need to differentiate their concepts by i(φ(x 2 , x 1 )) = i(φ(x 1 , x 2 )) because otherwise we would obtain erroneously that h(i(φ(x 2 , x 1 ))) = h(i(φ(x 1 , x 2 ))). the logic formula φ(x i , x j , x k , x l , x m )∧ s ψ(x l , y i , x j , y j ) will be intensionally interpreted by the concept u 1 ∈ d 7 , obtained by the algebraic expression conj s (u, v). the logic formula ¬φ(x i , x j , x k , x l , x m ) will be intensionally interpreted by the concept u 1 ∈ d 5 , obtained by the algebraic expression neg(u) where u is the concept of the virtual predicate φ, u = i(φ(x i , x j , x k , x l , x m )) ∈ d 5 . the logic formula (∃ 3 )φ(x i , x j , x k , x l , x m ) will be intensionally interpreted by the concept u 1 ∈ d 4 , obtained by the algebraic expression exists 3 (u) where u = i(φ(x i , x j , x k , x l , x m )) ∈ d 5 is the concept of the virtual predicate φ. thus, for each video clip in given database, this robot's neuro system computes the probability that considered clip satisfies a natural language query, parsed into logic fol formula (second natural language semantic level) and consequently into intensional algebra a int term with intensional concepts which labels are grounded by robot's neuro system processes (algorithms).where the time-variable x 1 (with values "in past", "in present", "in future") indicates the time of execution of this recognition-action, the variable x 2 is used for the subject who executes this action (robot in this case), the variable x 3 is used for the object given to be eventually recognized (in this case a video clip) and x 4 for the statement (users query) that has to be satisfied by this object, and virtual predicate.however, during execution of this process, the robot is able also to deduce the truth of the autoepistemic sentence, for a given assignment of variables g : v → d, with g(x 1 ) = in present and g(x 2 ) = me, know(x 1 , x 2 , ⋖f ind(inpresent, me, y, ⋖φ⋗)⋗ y )/g(11)of the virtual predicate know(x 1 , x 2 , x 3 ), where the time-variable x 1 (with values "in past", "in present", "in future") indicates the time of execution of this action, the variable x 2 is used for the subject of this knowledge and x 3 is used for an abstracted term expression this particular knowledge).note that, while the top line in the diagram (15) is the ordinary component of the natuaral language grounding developed by robot's neuro system, the two lines bellow is the new robots knowledge structure of the added symbolic ai system based on the intensional first order logic and its grounding to robot's processes (its neuro ai system), by which the robot is able to provide logic deductive operations and autoepistemic selfreasoning about its current knowledge states and communicate it to humans by using natural languages.it has been demonstrated that intensional enrichment of the standard (extensional) fol, provided by definition 14 in, is a kind of modal predicate logic fol k (γ ), where the set of explicit possible world w e is equal to the set i t (γ )} of tarski's interpretations i * t = h • i (this composition is provided by diagram in (5)) of the standard fol with a given set of assumptions γ , that is, for a prefixed intensional interpretation of robot, this set of possible worlds is equal to the set of the extensionalization functions h ∈ e of robot's ifol.in both cases, for a given assignment g : v → d of virtual predicate φ(x) with the set of variables x = β α, which concept i(φ(x)) ∈ d |α| is grounded by this particular process, is transformed into abstracted term and hence robot's knowledge system generates the new ground knowledge atom know(y 1 , y 2 , ⋖φ(x)⋗ β α )/g with g(y 1 ) = in presence and g(y 2 ) = me, in robot's temporary memory. know(g(y 1 ), g(y 2 ), g * (⋖ψ⋗ α )) ⇒ know(g(y 1 ), g(y 2 ), ⋖know(g(y 1 ), g(y 2 ), g * (⋖ψ⋗ α )))(24)which, in the case when g(y 1 ) = in present and g(y 2 ) = me, is traduced in natural language by robot as: "i know that ψ" implies "i know that i know that ψ" where in the logic virtual predicate ψ there are the hidden variables in α, with extension (u) of its intensional concept u = g * (⋖ψ. in order to render it permanent (by cyclic process of transformation of the temporary into permanent robot's memory), we need to add to any predicate of the robot's fol syntax, also the timevariable as, for example, the first variable of each predicate (different from know), instantiated in the known facts by the tamestamp value τ (date/time) when this knowledge of robot is transferred into permanent memory, so that the known facts (35) in permanent memory would become know(in present, me, ⋖f ind(τ, in past, me, g i (y), ⋖φ⋗) ∧ videoclips(g i (y))⋗) (36) where the second value of the predicate f ind, from in present is modified into the value in past, and hence the fol predicate f ind would be translated into natural language by the past time "have found" of this verb. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/490.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/490.txt new file mode 100644 index 0000000000000000000000000000000000000000..9289c6c43d455419e74a74216d7c4427035ccd09 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/490.txt @@ -0,0 +1 @@ +dueling bandits (yue & joachims, 2009) or the more general problem class of preference-based bandits (bengs et al., 2021) is a practically relevant variant of the standard reward-based multi-armed bandits (lattimore & szepesvári, 2020), in which a learner seeks to find in a sequential decision making process an optimal arm (choice alternative) by selecting two (or more) arms as its action and obtaining feedback in the form of a noisy preference over the selected arms. the setting is motivated by a broad range of applications, where no numerical rewards for the actions are obtained and only comparisons of arms (choice alternatives) are possible as actions. in information retrieval systems with human preference judgments (clarke et al., 2021), for example, humans choose their most preferred choice alternative among the two (or more) retrieved choice alternatives (e.g., text passages, movies, etc.). another example is the analysis of voting behavior, in which voters express their preferences over pairs of political parties or candidates (brady & ansolabehere, 1989). the rationale for these types of applications is that humans are generally better at coherently expressing their preference for two choice alternatives than at reliably assessing those two on a numerical scale (carterette et al., 2008;li et al., 2021).even though there is a large body of literature on dueling or preference-based bandits (see sui et al. (2018); bengs et al. (2021)) covering various variants or aspects of the initial setting, little attention has been paid to the variant, in which a learner might observe an indifference between the selected arms for comparison as the explicit feedback. in practice, however, this type of feedback is quite common, especially when the preference feedback is provided by a human. in the human preference judgment example above, the human might be indifferent between the two retrieved choice alternatives, as the two are considered equally good/mediocre/bad, so neither is chosen. similarly, voters might be indifferent between two political parties or candidates, or two athletes resp. sport teams competing against each other might draw.in several areas of preference-based learning, appropriate extensions of models or methods have been considered to appropriately incorporate such indifferences. notable examples are the recent extensions of established probabilistic ranking models (firth et al., 2019;turner et al., 2020;henderson, 2022), the field of partial label ranking (alfaro et al., 2021;rodrigo et al., 2021;alfaro et al., 2022alfaro et al., , 2023a,b),b), an extension of the established label ranking problem, or preference-based bayesian optimization (dewancker et al., 2018;nguyen et al., 2021). however, the field of preference-based bandits lags behind in this regard, as the only work appears to be gajane et al. (2015) that considers an adversarial learning scenario for regret minimization.motivated by this gap in the literature on preference-based bandits, we consider the stochastic dueling bandits problem with indifferences (or ternary feedback) for the task of identifying an optimal arm as quickly as possible, i.e., with as few queried feedback observations as possible. similarly as in the conventional dueling bandits problem with binary feedback (i.e., strict preferences) specifying the notion of optimality of an arm raises some issues. indeed, the most natural notion of an optimal arm would be an arm, which is non-dominated by any other arm in terms of the probability of being strictly preferred or indifferent. this notion corresponds to the condorcet winner (cw) in the conventional setting, where the probability of observing indifferences is zero. however, it is well known that such an arm may not exist in general in the conventional setting, an issue obviously shared by the adopted cw for our considered setting. in contrast to the conventional cw, the adopted cw does not even guarantee uniqueness of the optimal arm1 .the non-existence issue of the cw has led several authors to consider alternative notions for the optimality of arms guaranteed to exist in any case. most of them have their roots in tournament solutions used in social choice and voting theory (brandt et al., 2015(brandt et al., , 2016) ) or game theory owen (2013). one popular alternative is the copeland set (copeland, 1951) defined as the set of choice alternatives (arms) with the highest copeland score. in the absence of indifferent preferences, the copeland score of a choice alternative is the number of choice alternatives it dominates in terms of the pairwise preference probability. in settings like ours, i.e., where indifferences might be present, the copeland score of a choice alternative is the sum of (i) the number of choice alternatives it strictly dominates, and (ii) half the number of choice alternatives it is most likely indifferent to. again, both definitions coincide for the conventional setting, where the probability of observing indifference is zero.as the term itself suggests, there can be several arms in the copeland set, each of which is called a copeland winner (cowi). despite this non-uniqueness issue, the main advantages of considering the copeland set are that (i) it is guaranteed to exist and (ii) that it consists only of the condorcet winner(s) in case of its (their) existence2 . moreover, the majority of alternative optimality notions from tournament solutions are in fact supersets of the copeland set (see ramamohan et al. (2016)). here, the (i, j)-th entry of p ≻ or p ≺ denoted by p (i,j) ≻ or p (i,j) ≺ specifies the probability of observing a strict preference of i over j (or j over i), while the (i, j)-th entry of p∼ = denoted by p (i,j) ∼ = specifies the probability of observing an indifference between i and j. apparently, it holds that p (i,j) ≻ + p (i,j) ∼ = + p (i,j) ≺ = 1 for any i, j ∈ a, and consequently any dueling bandits problem with indifferences is uniquely determined by one of its strict preference probability matrices, since p (j,i). define kl((p 1 , p 2 , p 3 ), (q 1 , q 2 , q 3 )) for the kullback-leibler divergence between two categorical random variables with parameters (p 1 , p 2 , p 3 ) and (q 1 , q 2 , q 3 ), while we use the common notation kl(p, q) for the kullback-leibler divergence between two categorical random variables with parameters (p, 1p) and (q, 1q), i. (1)). in order to capture the dependence on the underlying instance p of interest, we may simply write l(p, j) and i(p, j) as well as d j (p), cp * (p) and cp(p, j) for these corresponding terms.note that the right-hand side of (7) depends not only via κ j,z (p) but also via d j = d j (p), l(j) = l(p, j) on the underlying instance p.by construction we have cp( p, i * ) ≤ cp(p, i * ) and obtain cp( p, j) = cp(p, j) + |l| ≥ cp(p, j) + d j + 1 = cp(p, j) + (cp(p, i * ) -cp(p, j)) + 1 = cp(p, i * ) + 1 ≥ cp( p, i * ) + 1.in case (x, y) ∈ {(j, z), (z, j)} for any z ∈ l, it holds that p x,y = p x,y so that kl p x,y , p x,y = 0.for z = j, we have cp( p, i * ) = cp(p, i * ) -1, and similarly we see cp( p, j) = cp(p, j) + |l| + 1. together with |l| ≥ d j -1, we obtain with the same argumentation as before that cp( p, j) ≥ cp( p, i * ) + 1 and thus i * ∈ c( p).as i + 2l ≥ 2d j + 1 holds by assumption, we thus get cp( p ≻ , j) ≥ cp(p ≻ , j) + d j + 1/2 = cp(p ≻ , j) + (cp(p ≻ , i * ) -cp(p ≻ , j)) + 1/2 = cp(p ≻ , i * ) + 1/2 ≥ cp( p ≻ , i * ) + 1/2. due to i + 2l ≥ 2d j -1 we obtain cp( p ≻ , j) ≥ cp( p ≻ , j) + 1/2 and thus i * ∈ c( p ≻ ).for proving (iii), construct p ≻ such that it differs from p only on positions (j, z), z ∈ i ∪ l ∪ {i * }, fulfills (12) for all z ∈ i and (12) for all z ∈ l and further p.as (15) holds for all (i, l) ∈ ψ(j), (16) for all (i, l) ∈ ψ ′ (j), if i * ∈ i(j), and (17) holds for all (i, l) ∈ ψ ′′ (j) if i * ∈ l(j), combining these estimates concludes the proof.(ii) there exists a sequence (p n ) n∈n of instances with (p n ) (i,j) ≻ , (p n ) (i,j) ∼ = , (p n ) (i,j) ≺ ∈ {1/3 -∆, 1/3 + 2∆} for all i, j ∈ a and cp * (p n ) ≥ ⌈ n 2 + f (n)⌉ such that ), δ /n , t 0 is as in theorem 3. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/491.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/491.txt new file mode 100644 index 0000000000000000000000000000000000000000..2bd6e7a5d6da0ef645e863b7ae755197f3854c8e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/491.txt @@ -0,0 +1 @@ +context. graph neural networks (gnns) have gained increasing importance over the last decade due to their ability to process and operate over graph-structured data (wu et al., 2021;zhou et al., 2020;joshi et al., 2020). tasks in which gnns excel include node classification (xiao et al., 2022), graph classification (errica et al., 2020), link prediction (teru et al., 2020;zhu et al., 2021), and query answering over knowledge graphs (daza & cochez, 2020;galkin et al., 2022). based on these, gnns have found applications in many fields, including social network analysis (kipf & welling, 2018), recommender systems (ying et al., 2018), chemistry (gilmer et al., 2017), semantic web (hogan et al., 2022;schlichtkrull et al., 2018), natural language processing (marcheggiani & titov, 2017), and combinatorial optimization (dai et al., 2021).the practical importance of gnns has spurred the community to study their expressive power. this refers to the ability of gnns to distinguish pairs of non-isomorphic graphs. as it was early observed in two landmark articles (morris et al., 2019;xu et al., 2019), the expressive power of so-called message-passing gnns (mpgnns) is precisely that of the weisfeiler-leman (wl) test (weisfeiler & leman, 1968), one of the most renowned methods for checking graph isomorphism.as recently observed, this correspondence holds even in cases where graphs are node-and edgelabeled, naturally representing the rich structure present in knowledge graphs (barceló et al., 2022). the above has ignited significant interest among experts in exploring which graph properties can be distinguished by the wl test, especially those that are crucial for the applications of mpgnns (arvind et al., 2020;chen et al., 2020;morris et al., 2020;barceló et al., 2021;huang et al., 2023;bouritsas et al., 2023).a class of graph properties that have received special attention in this context is the number of times a given "pattern" appears as a subgraph in a graph. the relevance of this class emanates from the fact that subgraph counts are used in several graph-related tasks, such as constructing graph kernels (shervashidze et al., 2009;kriege et al., 2018) and computing spectral graph information (preciado & jadbabaie, 2010). subgraph counts also lie at the basis of some methods that measure the similarity between graphs (alon et al., 2008). therefore, it is crucial to comprehend the extent to which these methods align with the expressive capabilities of the wl test (or equivalently, mpgnns).in its more general version, the wl test is characterized by a parameterized dimension, k ≥ 1 (cai et al., 1992;morris et al., 2019). the k-dimensional wl test, or kwl test for short, iteratively colors the k-tuples of nodes in a graph until a fixpoint is reached. two graphs are said to be distinguishable by kwl, if the multisets of colors of all k-tuples reached at this fixpoint are different. a graph property f can be distinguished by kwl if for any two graphs g, h where f (g) ≠ f (h) only if g and h are distinguishable by kwl. we are then interested in the least k for which a parameter can be distinguished by kwl. we refer to this k as the wl-dimension of the parameter.several important results have been obtained over the last few years regarding the wl-dimension of counting the number of copies of a graph p (the pattern). we summarize some of such results next, by distinguishing the case in which we count copies of p (subgraphs) from the one in which we count induced copies of p (induced subgraphs). for the sake of clarity, we call the former the subgraph wl-dimension of p and the latter the induced subgraph wl-dimension of p .• counting subgraphs: an important notion in graph theory is the treewidth of a graph, which intuitively measures its degree of acyclicity (diestel, 2012). the hereditary treewidth of a pattern p is, in broad terms, the largest treewidth of any of the homomorphic images of p . arvind et al. (2020) initiated the study of the ability of the wl test to count subgraphs in terms of the notion of hereditary treewidth. they established that if the pattern p has hereditary treewidth k, then p has subgraph wl-dimension at most k. nevertheless, the sole instance in which they were able to demonstrate the validity of the converse was for k = 1, i.e., they showed that p has subgraph wl-dimension one iff p has hereditary treewidth one. in the meantime, some partial results were obtained for the case when k = 2 over particular classes of graphs. for instance, by combining results in arvind et al. (2020) and fürer (2017), one obtains that the largest cycle (respectively, path) with a subgraph wl-dimension of two is that of length seven. very recently, however, this gap has been closed. in fact, neuen (2023) proves the following for each k ≥ 1: if p is a pattern, then p has subgraph wl-dimension k iff p has hereditary treewidth k. this also provides an alternative explanation for the aforementioned result on cycles (resp., paths), as one can observe that a cycle (resp., path) has hereditary treewidth two iff it is of length at most seven. • counting induced subgraphs: most of the existing results on counting induced subgraphs were obtained in chen et al. (2020). the authors show that all patterns with k + 1 nodes have an induced subgraph wl-dimension bounded by k. moreover, this is optimal for k = 1; i.e., no pattern with three or more nodes has induced subgraph wl-dimension one.it is not known if this correspondence continues to hold for k > 1, i.e., whether there are patterns with k + 2 or more nodes with induced subgraph wl-dimension k, for k > 1.it is noteworthy that the previously mentioned results regarding counting induced subgraphs were achieved in a broader context compared to the results concerning counting subgraphs. specifically, the former apply even to labeled graphs, where each node and edge is assigned a label, whereas the latter were derived for non-labeled graphs.therefore, we have different levels of understanding of the capabilities of the kwl test for counting subgraphs and induced subgraphs. furthermore, these two issues have been addressed separately and employing distinct techniques, which enhances the perception that there is no inherent structural linkage between the two. however, as evidenced by research in the field of counting complexity, there exists a cohesive approach through which these types of problems can be examined. in fact, the counting of subgraphs and induced subgraphs are fundamentally interconnected, akin to opposite faces of a coin, as they can be represented as linear combinations of one another (lovász, 1967). thus, we can achieve insight into either of them by exploring the linear combinations of the other.expanding upon this idea, curticapean et al. (2017) introduced a comprehensive framework for graph motif parameters, which are defined as linear combinations of subgraph counts. in this paper, we study the ability of the wl test to count graph motifs, which provides us with a general approach for studying problems related to subgraph counting in this setting. our main contributions are summarized below. it is worth noting that all such results are derived within the framework established in chen et al. (2020), which focuses on labeled graphs. this introduces an additional level of intricacy into all the proofs presented in this paper.• by building on tools developed by neuen (2023) and seppelt (2023), we establish a precise characterization for the class of labeled graph motifs with wl-dimension k, for k ≥ 1. specifically, for subgraph counting, this class precisely corresponds to the patterns of hereditary treewidth k, aligning with the characterization presented by neuen (2023) for the case of unlabeled graphs. for induced subgraph counting, this class contains precisely the patterns featuring k + 1 nodes, thus resolving the open issue posed by chen et al. (2020).• the previous result characterizes for which graph motifs γ the kwl test is able to distinguish between graphs with different numbers of occurrences of γ. a natural question arises: is it possible to obtain the number of occurrences of a graph motif γ in a graph g by computing a function over the multiset of colors of k-tuples of vertices obtained from the kwl test? we answer this question affirmatively. this result can be of interest to researchers working on mpgnn applications, as it suggests that by designing a suitable mpgnn architecture, one might be able to count the number of subgraphs that appear in a given graph.• we finally move into the problem of determining the wl-dimension for the problem of counting the occurrences of a given graph pattern p . our characterization shows that for counting induced subgraphs this problem is trivial as the wl-dimension is precisely the number of vertices in p minus 1. for subgraph counting, in turn, the problem is nontrivial, as we have to check for each homomorphic image of p whether its treewidth is at most k. since the number of homomorphic images of p is potentially exponential, this yields a naïve exponential time algorithm for the problem. we show that, in spite of this, the problem admits a polynomial time algorithm. the existence of such an algorithm was left open in arvind et al. ( 2020) even for the case k = 2.since many of the proofs in the paper are extensive and complex, we have opted to place technical details in the appendix and offer proof sketches in the main body of the paper to conserve space. the k-dimensional wl test, or kwl test for short, iteratively colors the k-tuples of nodes in a graph until a fixpoint is reached. a labeled graph motif parameter(curticapean et al.we also need the following result which establishes that two labeled graphs g and h are indistinguishable by kwl if and only if homs(f, g) = homs(f, h), for every labeled graph f of treewidth k. for a tree t with node labels l(u) ⊆ v (f ) of f we say that two homomorphisms are in colorful strict t leaf agreement if for every leaf ℓ of t there exist v ∈ v (g) k , w ∈ v (h) k such that µ, ν are in (v, w)-strict l(ℓ)-agreement and for some i ≥ 1 either. let g and h be labeled graphs, let f be a labeled graph with a nice td (t * , b) of width k, let ā be a tuple made up of k vertices that match a bag of the (t * , b).the goal of this section is to show that for any labeled graph f that has treewidth greater than k, there are labeled graphs g and h such that g ≡ kw l h but homs(f, g) ≠ homs(f, h). a class of plain graphs f is homomorphism distinguishing closed if for every graph f ∈ f , there are graphs g, h such that g ≡ f h and homs(f, g) ≠ homs(f, h). let g be a connected labeled graph, let u ⊆ v (g), and let f be any labeled graph. let g be a connected labeled graph, let u ⊆ v (g), and let φ ∈ hom(f, g) for some labeled graph f . from lemma 7 we directly see that for labeled graphs g, h, g ≡ kw l h implies homs(f, g) = homs(f, h) for every f ∈ lt k .a product g 1 g 2 of two k-marked labeled graphs g 1 , g 2 is constructed by taking the disjoint union of g 1 and g 2 , identifying the vertices with the same marking, and suppressing parallel edges with the same edge label. if there are vertices v ∈ v (g 1 ), u ∈ v (g 2 ) with the same marking but different vertex labels (we say that g 1 , g 2 are incompatible), the product is a single vertex with a self-loop (and arbitrary labels).the sum ranges over the elements of the vertex label alphabet) where g σ is the k-marked labeled graph with a single vertex v, mark g (i) = mathsf mark g (j) = v and λ(v) = σ. if i ≠ j, let g = ∑ σ,σ ′ ∈σ g σ,σ ′ where g σ,σ ′ is the graph with adjacent vertices v and u, marked as i, j and labeled with σ, σ ′ , respectively, and κ({v, u}) = δ. as discussed above, we have that for labeled graphs g, h, g ≡ kw l h implies the existence of a φ ∈ c l k+1 such that g ⊧ ϕ and h ⊧ ϕ. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/492.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/492.txt new file mode 100644 index 0000000000000000000000000000000000000000..05d12b3c7a274159a963e0f75d0f630aad6dd377 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/492.txt @@ -0,0 +1 @@ +power system state estimation is crucial for reliable and secure grid operation. however, traditional techniques relying on scada measurements suffer from sparse and error-prone data, leading to delayed and less accurate estimates. conventional state estimation techniques, rely on complex iterative methods, which are prone to large delays in case of large scale power systems. furthermore, if the measurement set of the state estimation includes both conventional measurements (i.e., power flow/injection) and measurements from phasor measurement units (pmus), large delays might affect the monitoring responsiveness of the state estimator for capturing short-duration transients. machine learning approaches offer accelerated state estimation by processing measurements promptly after neural network training. this paper introduces physics-informed neural networks (pinns) to meet the need for faster state estimation. while responsiveness may not be critical for conventional measurements with low reporting rates, high pmu observability in future power systems necessitates accelerated state estimation to leverage real-time pmu reporting.many of the existing machine learning techniques if used for power system state estimation demonstrate drawbacks in capturing the complex dynamics and constraints of power systems . relying solely on statistical patterns in historical data can lead to inaccurate estimates, particularly in scenarios with limited or noisy data. moreover, training traditional learning models requires substantial amounts of data, which is costly to collect in power system applications. these limitations underscore the need for advanced approaches that combine machine learning with domain-specific physics knowledge to improve the accuracy and reliability of state estimation.pinns have emerged as a promising approach for solving complex problems in various scientific and engineering domains , , including power system state estimation . pinns offer several advantages over traditional machine learning techniques. firstly, pinns integrate domain-specific physical laws and constraints into the neural network (nn) architecture, enabling the incorporation of prior knowledge about the system behavior. this ensures that learned models are consistent with underlying physics, leading to more accurate and interpretable results. secondly, pinns can effectively handle data scarcity by leveraging physics-based regularization terms, reducing the reliance on large datasets. additionally, pinns enable the efficient handling of multi-dimensional inputs and outputs, making them suitable for complex power system modeling and control tasks. the flexibility and interpretability of pinns make them valuable for various use-cases, including power system parameter estimation, fault detection and diagnosis of power system operation.in this paper, a novel nn training method for power system state estimation by leveraging the physics-informed approach is presented. the proposed approach integrates the physical laws and constraints of power systems as prior knowledge into the nn training process. the performance of the proposed architecture is tested under various training scenarios, comparing it against a benchmark plain nn. the results demonstrate that the proposed pinn achieves higher accuracy, improved algorithmic stability, and requires less training effort compared to the benchmark model. these findings highlight the potential of pinns as a powerful accelerator for power system state estimation, particularly in the context of the pmu era, where real-time and accurate estimation is crucial for ensuring reliable power system operation.the remainder of this paper is organized as follows: section ii provides a review of related work in the field of state estimation and the application of nns. section iii presents the methodology employed in this study, which includes a background on pinns and the formulation of the training process. section iv presents the experimental results, while section v concludes the paper. arxiv:2310.03088v1 4 oct 2023 ii. related work traditional power system state estimation techniques, such as the gauss-newton and weighted least squares methods, have long relied on iterative approaches and measurements obtained from legacy sensors transmitted through scada systems. however, these methods can become computationally demanding when applied to large-scale power systems. additionally, the sparse and infrequent nature of measurements from legacy sensors can result in delays and decreased accuracy, especially during dynamic system conditions. these limitations highlight the need for more advanced techniques that can overcome these challenges and provide more efficient and accurate state estimation in power systems.in the past, efforts were focused on utilizing machine learning techniques, including artificial nns and support vector machines, to overcome the limitations of traditional state estimation methods. by using historical data and statistical patterns, these approaches aim to improve the accuracy and efficiency of state estimation , . however, a prevalent drawback is the omission of the fundamental physical laws and constraints that govern power systems. this oversight can lead to imprecise estimates, particularly when confronted with limited data or system variations from training conditions.physics-informed neural networks are a class of machine learning models that integrate physical laws and constraints into the nn architecture . by incorporating prior knowledge about the system behavior, such as conservation laws and boundary conditions, pinns enhance the accuracy and interpretability of the learned models. this is achieved by enforcing the physics-based constraints as regularization terms during the training process, guiding the nn to produce predictions consistent with the underlying physics.in recent years, there has been growing interest in utilizing pinns for power system state estimation. previous studies have explored the application of pinns for power system parameter estimation, dynamic state estimation, and fault detection, among others , , . these works have shown promising results, demonstrating the capability of pinns to capture the complex dynamics of power systems and handle data scarcity. this approach, using pinns for power system state estimation offers significant advantages and paves the way for future research in this field. firstly, it simplifies the nn architecture design by eliminating the need for system-specific designs based on system topology. this enhances flexibility and applicability across different power system configurations. secondly, the proposed approach is not dependent on the optimal placement of pmus or legacy sensors, making it adaptable to various measurement configurations. lastly, it eliminates the reliance on modeling time-series dependent events, making it well-suited for real-time applications requiring accurate and timely state estimation. these advantages position the proposed approach as a valuable direction for further exploration and development in power system state estimation using pinns. conventional state estimation techniques, rely on complex iterative methods, which are prone to large delays in case of large scale power systems. while responsiveness may not be critical for conventional measurements with low reporting rates, high pmu observability in future power systems necessitates accelerated state estimation to leverage real-time pmu reporting.many of the existing machine learning techniques if used for power system state estimation demonstrate drawbacks in capturing the complex dynamics and constraints of power systems.pinns have emerged as a promising approach for solving complex problems in various scientific and engineering domains,, including power system state estimation. the flexibility and interpretability of pinns make them valuable for various use-cases, including power system parameter estimation, fault detection and diagnosis of power system operation.in this paper, a novel nn training method for power system state estimation by leveraging the physics-informed approach is presented. these findings highlight the potential of pinns as a powerful accelerator for power system state estimation, particularly in the context of the pmu era, where real-time and accurate estimation is crucial for ensuring reliable power system operation. related work traditional power system state estimation techniques, such as the gauss-newton and weighted least squares methods, have long relied on iterative approaches and measurements obtained from legacy sensors transmitted through scada systems.in recent years, there has been growing interest in utilizing pinns for power system state estimation. previous studies have explored the application of pinns for power system parameter estimation, dynamic state estimation, and fault detection, among others,,. this approach, using pinns for power system state estimation offers significant advantages and paves the way for future research in this field. these advantages position the proposed approach as a valuable direction for further exploration and development in power system state estimation using pinns. for example, in the context of power system state estimation, constraints related to power flow equations, kirchhoff's laws, or the admittance matrix can be incorporated.the task of power system state estimation involves inferring the voltage magnitude and voltage angle at each bus of the system. the choice of using the active power (p i ) and reactive power injections (q i ) measurements as the input dataset for the neural network is a driven by the fact that they are valuable for learning about the system's topology through data-driven patterns. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/493.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/493.txt new file mode 100644 index 0000000000000000000000000000000000000000..c01d4ebb5a03d097281ddf0af8593226b39a80cc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/493.txt @@ -0,0 +1 @@ +definition 2 for any measurable function σ : r → c and borel measure γ on r m × r, put(1)we call s σ an (integral representation of ) neural network, and γ a parameter distribution.the integration over all the hidden parameters (are summed (or integrated, to be precise) with weight γ, hence formally s σ is understood as a continuous neural network with a single hidden layer. we note, however, when γ is a finite sum of point measures such as(2)in other words, the integral representation is a mathmatical model of depth-2 network with any width (ranging from finite to continuous).definition 3 for any measurable functions ρ : r → c and f : r m → c, putwe call r ρ a ridgelet transform.the ridgelet transform is known to be a right-inverse operator to s σ . to be precise, the following reconstruction formula holds.theorem 4 (reconstruction formula) suppose σ and ρ are a tempered distribution (s ′ ) and a rapid decreasing function (s) respectively. there exists a bilinear form ((σ, ρ)) such thatfor any square integrable function f ∈ l 2 (r m ). further, the bilinear form is given bywhere ♯ denotes the 1-dimensional fourier transform.see sonoda et al. (2021, theorem 6) for the proof. in particular, according to sonoda et al. (2021, lemma 9), for any activation function σ, there always exists ρ satisfying ((σ, ρ)) = 1.here, σ being a tempered distribution means that typical activation functions are covered such as relu, step function, tanh, gaussian, etc... we can interpret the reconstruction formula as a universality theorem of continuous neural networks, since for any given data generating function f , a network with output weight γ f = r ρ reproduces f (up to factor ((σ, ρ))), i.e. s = f . in other words, the ridgelet transform indicates how the network parameters should be organized so that the network represents an individual function f . in this study, we showcase a new proof of the reconstruction formula based on the group theoretic arguments, and present a systematic scheme to find the ridgelet transform for a variety of given network architecture based on the symmetry in the data-parameter domain. however, these methods typically require handcrafting the network architecture for each specific symmetry and geometry.the ridgelet transform, the main topic of this study, is a pseudo-inverse operator of the integral representation neural network and is a detailed analysis tool that can describe the relationship between data and parameters due to its analytical representation. there are two main streams: one is the generalization of fourier transform, which expands functions on group g as a sum/integration of multiple irreducible unitary representations(sugiura, 1990), and the other is the generalization of wavelet transform called the voice transform, which expands functions in representation space h as a sum/integration of functions generated by a single square-integrable unitary representation(holschneider, 1998;berge, 2021).we showcase the original integral representation and the ridgelet transform, a mathematical model of depth-2 fully-connected network and its right inverse, then list a few facts in the group representation theory. in this study, we showcase a new proof of the reconstruction formula based on the group theoretic arguments, and present a systematic scheme to find the ridgelet transform for a variety of given network architecture based on the symmetry in the data-parameter domain.by haar's theorem, if g is a locally compact group, then there uniquely exist left and right invariant measures d l g and d r g, satisfying for any s ∈ g and.we introduce generalized neural networks and generalized ridgelet transforms induced from joint group invariant functions on data-parameter domain, and present a simple group theoretic proof of the reconstruction formula.definition 6 (joint g-invariant function) we say a function ϕ on x × ξ is joint ginvariant when it satisfies for all g ∈ g and (x, ξ) ∈ x × ξ,.in other words, the ψ-ridgelet transform r ψ is understood as a group theoretic generalization of the original ridgelet transform, as it is a right inverse operator of ϕ-transform nn ϕ .example 2 let g be any locally compact group, data domain x be any g-space, rewriting its g-action g •x as g(x) so as to formally identify g with a hidden layer map, and parameter domain ξ be the group g itself with dual g-action.therefore, assuming that the regular representation π g = ψ • g is irreducible on an invariant subspace h of l 2 (x), we can retain the formal deep network and deep ridgelet transform:.example 3 let g be any group, data domain x be any g-space, and parameter domain ξ be the group g itself with dual g-action g • ξ = gξ.we presented a systematic method to induce a generalized neural network and its ridgelet transform, from a joint group invariant function on the data-parameter domain.in the past,sonoda et al.in fact, at every g to summarize, in the case of fully-connected neural network (and its corresponding ridgelet transform), the invariant is a modulated distance σ(a • xb), and the dual action is the parallel translation of hyperplane so as to keep the distance invariant. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/494.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/494.txt new file mode 100644 index 0000000000000000000000000000000000000000..34bada04b766cbb07dd240888afe6f510e54c7c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/494.txt @@ -0,0 +1 @@ +as machine learning algorithms are increasingly deployed in high-stakes settings (e.g., healthcare, finance, justice), it has become imperative to understand the fairness implications of algorithmic decision-making on protected groups. as a consequence, a wealth of work has sought to define algorithmic fairness and learn machine learning-based policies that satisfy fairness constraints to ensure equitable outcomes across protected groups .most of this work focuses on whether the algorithm makes fair decisions in isolation. yet, these algorithms are rarely used in high-stakes settings without human oversight, since there are still considerable legal and regulatory challenges to full automation. moreover, many believe that human-ai collaboration is superior to full automation because human experts may have auxiliary information that can help correct the mistakes of algorithms, producing better decisions than the human or algorithm alone. for example, while many powerful ai systems have been developed for diagnosing medical images , the center for medicare and medicaid services only allows ai systems to assist medical experts with diagnosis .however, human-ai collaboration introduces new complexities -the overall outcomes now depend not only on the algorithmic recommendations, but also on which individuals the human decision-maker complies with the algorithmic recommendation. recent case studies have shown mixed results on whether human-ai collaboration actually improves decision accuracy or fairness . for instance, a recent experiment examines diagnostic quality when radiologists are assisted by ai models . the authors find that, although the ai models are substantially more accurate than radiologists, access to ai assistance does not improve diagnostic quality on average; the authors show that this is due to selective compliance of the algorithmic recommendations by humans, which they hypothesize is driven by improper bayesian updating. similarly, a recent study evaluates the impact of algorithmic risk assessment on judges' sentencing decisions in virginia courts . although risk assessment promised fairer outcomes , the authors find that it brought no detectable benefits in terms of public safety or reduced incarceration; in fact, racial disparities increased in the subset of courts where risk assessment appears most influential. once again, the mismatch is driven by selective compliance to algorithmic recommendations, which appears to be at least partly driven by conflicting objectives between judges and the algorithm (e.g., judges are more lenient towards younger defendants). selective compliance has significant fairness implications, e.g., in this case, the authors note that "judges were more likely to sentence leniently for white defendants with high risk scores than for black defendants with the same score. " these case studies make it clear that ensuring equitable outcomes in human-ai collaboration requires accounting for humans' complex and unexpected compliance patterns. to this end, gillis et al. , morgan and pass show that the outcomes of human-ai collaboration can be arbitrarily less fair than either those of the human alone or the algorithm alone.to resolve this state of affairs, we introduce the notion of compliance-robust algorithms -i.e., algorithmic decision policies that are guaranteed to (weakly) improve fairness in final outcomes, regardless of the human's (unknown) compliance pattern. in particular, given a human decisionmaker and her policy (without access to ai assistance), we characterize the class of algorithmic recommendations that never result in collaborative final outcomes that are less fair than the pre-existing human policy, even if the decision-maker's compliance pattern is adversarial. next, we prove that there exists considerable tension between traditional algorithmic fairness and compliance-robust fairness. unless the true data-generating process is itself perfectly fair, it is often infeasible to design an algorithmic policy that is fair in isolation, compliance-robustly fair, and more accurate than the human-only policy; this raises the question of whether traditional fairness is even a desirable constraint to enforce for human-ai collaboration. if the goal is to improve fairness and accuracy in human-ai collaboration outcomes, it may be preferable to design an algorithmic policy that is accurate and compliance-robustly fair, but not fair in isolation. our last result shows that the tension between traditional fairness and compliance-robust fairness is prevalent. specifically, we prove that for a broad class of fairness conditions, fair policies are not necessarily compliance-robustly fair, implying that compliance-robust fairness imposes fundamentally different constraints compared to traditional fairness.however, human-ai collaboration introduces new complexities -the overall outcomes now depend not only on the algorithmic recommendations, but also on which individuals the human decision-maker complies with the algorithmic recommendation. in particular, given a human decisionmaker and her policy (without access to ai assistance), we characterize the class of algorithmic recommendations that never result in collaborative final outcomes that are less fair than the pre-existing human policy, even if the decision-maker's compliance pattern is adversarial. unless the true data-generating process is itself perfectly fair, it is often infeasible to design an algorithmic policy that is fair in isolation, compliance-robustly fair, and more accurate than the human-only policy; this raises the question of whether traditional fairness is even a desirable constraint to enforce for human-ai collaboration. if the goal is to improve fairness and accuracy in human-ai collaboration outcomes, it may be preferable to design an algorithmic policy that is accurate and compliance-robustly fair, but not fair in isolation. specifically, we prove that for a broad class of fairness conditions, fair policies are not necessarily compliance-robustly fair, implying that compliance-robust fairness imposes fundamentally different constraints compared to traditional fairness. when given access to recommendations from an algorithmic policy , the human instead makes decisions according to a compliance function : x × a ↦ → {0, 1}, where ( , ) = 1 indicates that the human adopts the algorithmic decision for individuals ( , ).traditional algorithmic fairness would require the algorithmic policy to satisfy (0) = (1), without accounting for the human policy or the compliance function . we say an algorithmic policy reduces fairness under compliance function if the resulting human-ai policy satisfies ( ) > ( ).to avoid the above, we do not require that the human-ai decisions be perfectly fair -only that they are (weakly) fairer than the human policy without algorithmic assistance. given , an algorithmic policy is compliance-robustly fair if there does not exist any compliance function that reduces fairness for .let π fair be the set of compliance-robustly fair policies; note that these policies need not be fair in the traditional algorithmic fairness sense., 2016]. therefore, we introduce the concept of compliance-robust fairness and demonstrate how to derive algorithmic policies that weakly improve fairness regardless of the human's compliance pattern. we show that, as long as the human policy is slightly sub-optimal and not perfectly fair, the best performance-improving compliance-robust policy still generates improvements over the human in isolation. however, it is not always the case that we can also achieve the third property of traditional fairness -we may need to rely on algorithmic policies that are unfair in isolation to achieve compliance-robustly fair human-ai collaboration. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/495.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/495.txt new file mode 100644 index 0000000000000000000000000000000000000000..803a6ba3b6e7761007e81ca856480000aaafc046 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/495.txt @@ -0,0 +1 @@ +there are between 20,000 and 25,000 protein-coding genes in the human genome , and these genes' sequences interact with one another and with the environment to initiate various biological processes. understanding the regulatory mechanisms behind these interactions is crucial, especially when disruption can lead to diseases. one of the main challenges in this research area is the high dimensionality and complexity of gene expression data. this complexity is exacerbated by the massive amounts of data generated by sequencing techniques like rna-seq, making it difficult to quickly find and understand relevant gene-gene interactions that are essential for biological processes and disease development.there has been a lot of conducted work in the research community to find a reliable and accurate model to infer gene-gene interactions. for instance, h. cordell's work,titled "detecting gene-gene interactions that underlie human diseases", provided a comprehensive survey of methods for detecting interactions between genetic loci contributing to human genetic diseases ,while this study offers a broad overview, it does not delve into the application of deep learning for ggi detection. due to their capacity to handle highly dimensional and chaotic data, machine learning (ml) and deep learning (dl) algorithms have lately been recognised as viable approaches for detecting ggis. several artificial intelligence (ai) methodologies have been previously applied by numerous researchers for the detection of gene-gene interactions (ggis). for example, the contribution in the study by ritchie md et al., showed an optimized neural network architecture using genetic programming for better detection and modeling of ggis , although this study made strides in optimizing neural networks for ggi detection, it did not focus on using solely gene expression data, which is often more readily available. in the same context, yuan y et al. (2019) predicted gene-gene relationships based on single-cell expression data using rna sequencing and deep learning techniques to encode gene expression data . another interesting approach is the gne model , which focuses on integrating known gene interaction information and gene expression to predict gene interactions, utilizing a supervised deep neural network. these several deep learning models have showcased outstanding results in a variety of benchmarks, demonstrating immense potential in the prediction and identification of new ggis. consequently, they have gained traction and are being implemented in current bioinformatics research. however, the frequent need for other information, such as prior biological understanding or topological information, in addition to gene expression data which isn't always available, restricts the generalizability and scalability of these models. other machine learning methods focused more on dealing with gene-expressions only such as statistical correlation, mutual information whereas other methods such as dimensionality reduction focused on using topological features only. these kinds of methods didn't find great success compared to methods that deal with heterogeneous sources of information such as gne.given these limitations, there is a growing need for a deep learning model that can accurately identify gene-gene interactions solely based on gene expression data. apart from accuracy, this model should also prioritize resilience, generalizability, interpretability, and efficiency. therefore, the development of our new deep learning model signifies a crucial and timely progression in ongoing research in this field. cordell's work,titled "detecting gene-gene interactions that underlie human diseases", provided a comprehensive survey of methods for detecting interactions between genetic loci contributing to human genetic diseases,while this study offers a broad overview, it does not delve into the application of deep learning for ggi detection. for example, the contribution in the study by ritchie md et al., showed an optimized neural network architecture using genetic programming for better detection and modeling of ggis, although this study made strides in optimizing neural networks for ggi detection, it did not focus on using solely gene expression data, which is often more readily available. in the same context,yuan y et al. these several deep learning models have showcased outstanding results in a variety of benchmarks, demonstrating immense potential in the prediction and identification of new ggis.given these limitations, there is a growing need for a deep learning model that can accurately identify gene-gene interactions solely based on gene expression data.data acquisition for training our parallel layer deep learning network, we have used gene expression data alongside gene-gene interaction information from multiple data sources. additionally, in section 4, we compared the findings of our deep learning network to gne and other other deep learning and statistical models, with the results obtained from the second training experiment. meanwhile, for our first training experiment, we have used gene expression and gene-gene interaction information offered by yeastract database, which included 5970 unique gene expressions and 6736 ggi pairs. the use of a balanced subset allows for a more robust validation of our deep learning model, ensuring that the model is not biased towards any particular class of gene interactions or expressions.our deep learning (dl) model is an ensemble learning architecture with two parallel branches (figure1), leveraging both convolutional neural networks (cnns) and multi-layer feedforward neural networks (mfnns). to illustrate the importance of our parallel layer deep learning network compared to a single-branch deep learning model, we repeated the initial experiment using only a single branch, known as the cnn network. our aim was to infer gene-gene interactions by utilizing yeast gene expression and interaction data from the combined biogrid&dream5 dataset. it's worth noting that unlike certain deep learning methods that integrate spatial or temporal information, our approach relies solely on gene expression data for making predictions.in conclusion, our research has demonstrated the remarkable efficacy of our proposed model in the detection of gene-gene interactions, solely relying on features extracted from gene expression data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/496.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/496.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3058f3eb6d10e3e719109a4919f594d2e5bb3d0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/496.txt @@ -0,0 +1 @@ +intentionally crafted perturbations (adversarial examples) have the potential to alter the predictions made by neural networks (szegedy et al., 2014). many methods have been proposed to improve the robustness of deep networks, either empirically or provably. in safety-critical domains especially, guarantees against adversarial examples are indispensable. commonly, provable defenses provide certificates of local robustness to accompany a model's prediction; i.e., predictions should be guaranteed to be consistent within an ℓ p -norm-bounded ϵ-ball around the input. the success of robustness certification techniques is measured by the verified robust accuracy (vra)-the fraction of points with correct predictions that are proven to be ϵ-locally robust.to date, a look at the public robustness certification leaderboard (accessed sept. 2023) shows that the best results are achieved by variants of randomized smoothing (rs) (cohen et al., 2019b;salman et al., 2019;yang et al., 2021;jeong et al., 2021;carlini et al., 2022). however, there are two primary limitations associated with rs. to begin with, rs only offers a probabilistic guarantee, typically configured to have a 0.1% false positive certification rate. perhaps more importantly, the inference of rs involves substantial computational overhead-this limitation is significant enough that these methods are typically tested on only a 1% subset of the imagenet validation dataset due to timing constraints.another successful family of methods perform certification using lipschitz bounds (trockman and kolter, 2021;leino et al., 2021;hu et al., 2023;araujo et al., 2023;wang and manchester, 2023). essentially, the lipschitz constant of the neural network provides a bound on the maximum change in output for a given input perturbation, making it possible to certify local robustness. compared with rs-based methods, lipschitz-based methods can provide deterministic certification, and are efficient enough to perform robustness certification at scale, e.g., on the full imagenet (hu et al., 2023). while lipschitz-based methods are promising in terms of both deterministic certification and efficiency, there is a noticeable performance gap between these methods and rs-based methods. it is not established, however, that this discrepancy is tied to a fundamental limitation of deterministic certification. in this work, we aim to narrow the gap between lipschitz-based and rs-based methods.one important avenue for improving the performance of lipschitz-based certification is through increasing model capacity (ability to fit data). bubeck and sellke (2021) have shown that robust classification requires more capacity than is necessary for standard learning objectives, and leino (2023) has shown more specifically that further capacity is required for tight lipschitzbased certification. but while increasing model capacity for standard training is trivial-adding more blocks/layers, increasing the network width and using self-attention mechanisms are all possible approaches-in lipschitz-based certified training, the picture is more nuanced because the network's lipschitz constant is tightly controlled, limiting the function's expressiveness. thus, even models with many parameters may still underfit the training objective.in addition, we find that an apparent limitation preventing prior work from discovering the full potential of lipschitz-based certification stems from the framing and evaluation setup. specifically, most prior work is framed around a particular novel technique intended to supersede the stateof-the-art, necessitating evaluations centered on standardized benchmark hyperparameter design spaces, rather than exploring more general methods for improving performance (e.g., architecture choice, data pipeline, etc.). although we introduce several of our own innovations, we present this work as more of a "master class" on optimizing lipschitz-based robustness certification that draws from and synthesizes many techniques from prior work to achieve the best overall performance. this angle lets us explore design choices meant to be synergistic to the overall lipschitz-based approach, rather than restricting us to choices tailored for head-to-head comparisons.this work provides a more comprehensive evaluation to illuminate the potential of lipschitzbased certification methods. first and foremost, we find that by delving more thoroughly into the design space of lipschitz-based approaches, we can improve the state-of-the-art vra for deterministic certification significantly on a variety of benchmark datasets, and over a range of perturbation sizes. in the process, we propose a number of additional techniques not already used by the prior literature that contribute to these large performance improvements. that is, our results are achieved using a combination of design optimization, novel techniques, and synthesis of prior work.after covering the relevant background in section 2, we begin in section 3 with a brief survey of the design space for lipschitz-based certified training, focusing on three key components: (1) architecture choice, (2) methods for controlling the lipschitz constant, and (3) data augmentation. first, we cover the various architecture innovations and building blocks that have been used in the prior literature. based on an analysis of the challenges faced by existing work, and motivated by the goal of efficiently increasing network capacity, we propose additional directions to explore along the architecture axis, including two novel network building blocks. next, we provide an overview of the existing methods used for controlling the lipschitz constant during training, and propose one of our own that can be combined with other approaches. third we discuss the role data augmentation plays in training high-capacity models. specifically, we cover ddpm (karras et al., 2022), which prior work has found helpful for certified training, and propose an alteration to the typical augmentation strategy that we find further boosts performance. section 4 provides an in-depth evaluation that explores along the three dimensions identified in section 3, shedding light on the most promising design choices, and demonstrating the significant performance improvements we achieve in this work. finally, section 5 concludes the paper. but while increasing model capacity for standard training is trivial-adding more blocks/layers, increasing the network width and using self-attention mechanisms are all possible approaches-in lipschitz-based certified training, the picture is more nuanced because the network's lipschitz constant is tightly controlled, limiting the function's expressiveness.after covering the relevant background in section 2, we begin in section 3 with a brief survey of the design space for lipschitz-based certified training, focusing on three key components: (1) architecture choice, (2) methods for controlling the lipschitz constant, and (3) data augmentation. this includes 1-lipschitz activation functions, dense layers, convolutional layers, and residual layers (with a few variations). on the other hand, new methods for performing lipschitz control that present results on larger architectures may come across as misleading, as it becomes unclear if the performance benefits come from the added capacity or the lipschitz control method.lipschitz-based certification requires the network to have a low lipschitz constant since an upper bound on the lipschitz constant is used to approximate output changes from input perturbations, and if it is too large, certification becomes difficult. there are two primary categories of lipschitz control used in the literature: (1) lipschitz regularization and (2) lipschitz constraints.a wide variety of lipschitz constraint approaches exist, typically using special re-parameterizations that each linear layer's weights to be orthogonal (the lipschitz constant of an orthogonal transformation is 1). although we do not try all combinations, in our experiments, we use gloro regularization for convolutional layers while combining different lipschitz control techniques for the dense layers., its backbone contains 12 linear residual convolution blocks with 512 channels each. although spatial-mlp shows comparable performance to transformers and conv-net in standard training, the capacity of spatial-mlp may not be enough for certificated training in a lipschitz bounded setting. while in lipschitz based training, the smoothness of the model is controlled by the network lipschitz constant thus large dense layers can improve the certification robustness effectively. adding more dense layers (16 layers) diminishing return, thus we choose 8 dense layers in our final configuration.2 seconds to train one epoch with cholesky, cayley and matrix exp respectively on cifar-10 dataset using the same a100 machine. cholesky-based orthogonal layer is the optimal lipschitz control choice for the dense layers considering both performance and efficiency. from this experiment, we think the reason the generated data helps is not that the generated data is of better quality, but generated data are easier to classify on average (training on generated data has a faster training accuracy convergence). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/497.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/497.txt new file mode 100644 index 0000000000000000000000000000000000000000..d46de64330f24cff00b9d056b6ab2f1c29e1d726 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/497.txt @@ -0,0 +1 @@ +graph-structure data explicitly expresses complex relations between items, and thus has attracted much attention from the deep learning community. extensive efforts have been devoted to deploying gnns (kipf & welling, 2017;hamilton et al., 2017;velickovic et al., 2018) on node-level tasks. recently, researchers have started to shift their focus from local properties to graph-level tasks (wang et al., 2021;liu et al., 2022;yue et al., 2022), and graph-level anomaly detection has become one of the most important graph-level tasks with diverse applications (ma et al., 2022;zhang et al., 2022;qiu et al., 2022), such as cancer diagnosis, enzyme prediction, and brain disease detection. in addition, applications of graph-level anomaly detection can be observed in trending topics, such as spam detection (li et al., 2019) and rumor detection (bian et al., 2020).following the common design of graph learning models, existing solutions for graph-level anomaly detection mainly employ spatial gnns with distinct pooling techniques. for example, cal (sui et al., 2022) and faith (wang et al., 2022) incorporate node features with topological characteristics of graphs to generate graph representations. meanwhile, due to the limitations of the average or sum pooling function in certain tasks, researchers have introduced various graph pooling functions (wu et al., 2022;hua et al., 2022;liu et al., 2023). however, to the best of our knowledge, no previous attempt has provided spectral analysis for anomalous graphs, missing an important feature that can help better capture the properties of anomalous graphs.to address this issue, we start by investigating the spectral energy of the graph laplacian. our key findings and theoretical analysis validate that the accumulated spectral energy can be represented by the rayleigh quotient, which has been studied in the physics and mathematics area (pierre, 1988;chan et al., 2011). besides, tang et al. (2022) makes an interesting observation related to the rayleigh quotient for node anomaly detection. however, the inherent properties of the rayleigh quotient in the graph domain are still relatively under-explored. hence, we investigate the rayleigh quotient for graphs and further empirically show that the rayleigh quotient distributions of normal graphs and anomalous graphs follow different patterns. in particular, we first randomly sample n a anomalous graphs and n n normal graphs. for each graph, we calculate its corresponding rayleigh quotient. subsequently, we set the maximum and minimum values of the rayleigh quotient of graphs as the bounds of the value range, which is then divided into 10 equal-width bins. after that, we assign each value of the rayleigh quotient of graphs to its corresponding bin. finally, we calculate the frequency of values that fall into each bin and normalize them, which can be regarded as the normalized rayleigh quotient distribution of the sampled dataset. figure 1 reports the rayleigh quotient distribution on the sn12c dataset, and the results on other datasets can be found in appendix a.9. as we can observe, regardless of the variations in the sample size, the rayleigh quotient distribution of each class exhibits a consistent pattern across different sample sets. in addition, it is evident from figure 1 that the rayleigh quotient distribution of anomalous graphs and that of normal ones are distinct from each other statistically. this observation highlights how the rayleigh quotient can reveal the underlying differences between normal and anomalous graphs. hence, the rayleigh quotient should be encoded and explored when identifying anomalous graphs. additionally, as we establish a connection between the rayleigh quotient and the spectral energy of the graph laplacian, it becomes apparent that the spectral energy distribution exhibits robust statistical patterns. this, in turn, empowers us to leverage spectral graph neural networks for further encoding and utilization of this valuable information.motivated by the observation and theoretical analysis, in this paper, we propose rqgnn, a rayleigh quotient-based gnn framework for graph-level anomaly detection tasks. it consists of two main components: the rayleigh quotient learning component (rql) and chebyshev wavelet gnn with rq-pooling (cwgnn-rq). firstly, we adopt rql to derive the rayleigh quotient of each graph and then employ a multi-layer perceptron (mlp) to generate the representation of each graph, aiming to capture explicit differences between anomalous and normal graphs guided by their rayleigh quotient. secondly, to obtain the implicit information embedded in the spectral space, we draw inspiration from the chebyshev wavelet gnn (cwgnn) and adopt it to learn the inherent information in the graph data. besides, to alleviate the drawbacks of existing pooling techniques in graph-level anomaly detection, we introduce a powerful spectral-related pooling function called rq-pooling. furthermore, we address the challenge of imbalanced data in graph-level anomaly detection via a class-balanced focal loss. the final graph embedding is the combination of representations generated by the rql and cwgnn-rq. by combining the explicit information from the rayleigh quotient and the implicit information from the cwgnn-rq, rqgnn effectively captures more inherent information for the detection of anomalous graphs.in our experiments, we evaluate rqgnn against 10 alternative frameworks across 10 datasets.extensive experiments demonstrate that our proposed framework consistently outperforms spectral gnns and the state-of-the-art (sota) gnns for both graph classification task and graph-level anomaly detection task. we summarize our contributions as follows:• our main observation and theoretical analysis highlight that the rayleigh quotient reveals underlying properties of graph anomalies, providing valuable guidance for future work in this field. • we propose the first spectral gnns for the graph-level anomaly detection task, which incorporates explicit and implicit learning components, enhancing the capabilities of anomaly detection. • comprehensive experiments show that rqgnn outperforms sota models on 10 real-world graph datasets, demonstrating the effectiveness of rqgnn. (2022)makes an interesting observation related to the rayleigh quotient for node anomaly detection. this observation highlights how the rayleigh quotient can reveal the underlying differences between normal and anomalous graphs. firstly, we adopt rql to derive the rayleigh quotient of each graph and then employ a multi-layer perceptron (mlp) to generate the representation of each graph, aiming to capture explicit differences between anomalous and normal graphs guided by their rayleigh quotient. by combining the explicit information from the rayleigh quotient and the implicit information from the cwgnn-rq, rqgnn effectively captures more inherent information for the detection of anomalous graphs.1 further shows that the accumulated energy of the graph can be represented by the rayleigh quotient, which motivates us to apply the spectral gnn to capture the spectral energy information, to be detailed in section 3. the following two theorems show that the change of the rayleigh quotient can be bounded given a small perturbation on graph signal x and graph laplacian l, and proofs can be found in appendix a. if the graph laplacian l and graph signal x of two graphs are close, then their rayleigh quotients will be close to each other and these two graphs will highly likely belong to the same class.the proposition 1 indicates the rayleigh quotient represents the accumulated spectral energy of the graph, which motivates us to design a spectral gnn and spectral-related pooling function to capture the inherent properties behind anomalous graphs, as we will show in section 3.except for explicitly learning from the rayleigh quotient, following the common design of gnn, we need to implicitly learn from the topology and node features of graphs, so that we can collect comprehensive information for graph-level anomaly detection. we compare rqgnn against 10 sota gnn competitors, including spectral gnns, graph classification models and graph-level anomaly detection models.40% and 25.78% in terms of auc score, respectively. the combination of the rql component that explicitly captures the rayleigh quotient of the graph and cwgnn-rq that implicitly explores graph anomalous information provides different spectral perspectives for this task.the third group is sota gnn models for graph-level anomaly detection: table4: statistics of 10 real-world datasets, where n n is the number of normal graphs, n a is the number of anomalous graphs, h = na nn+na is the anomalous ratio, n is the average number of nodes, m is the average number of edges, and f is the number of attributes. the rql learns the explicit representation of the rayleigh quotient of graphs while the chebyshev wavelet gnn with rq-pooling learns the implicit representation of graphs guided by the rayleigh quotient. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/498.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/498.txt new file mode 100644 index 0000000000000000000000000000000000000000..cef3829f10870884de45474c7abde9132c0c1090 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/498.txt @@ -0,0 +1 @@ +credit score is a statistical model used by lenders to assess the creditworthiness of potential borrowers. the model typically uses a variety of factors, such as the borrower's credit history, income, and debts, to generate a score that represents the borrower's likelihood of repaying a loan. lenders use credit scores to make decisions about whether to approve a loan application and, if so, what interest rate to charge. credit scores can also be used to determine the credit limits on a credit card. there are a number of different credit scoring models, but they all share some common features. first, they all use a combination of positive and negative factors to assess a borrower's creditworthiness. second, they all assign a higher score to borrowers who have a history of paying their debts on time and in full. third, they all take into account the borrower's current financial situation, such as their income and debts , , .in recent years, machine learning models have been increasingly used in credit scoring systems to help identify and predict potential default customers. these models use a variety of data, such as the customer's credit history, income, and debts, to generate a score that represents the customer's likelihood of defaulting on a loan. machine learning models can be more accurate than traditional credit scoring models in predicting default risk. this is because machine learning models can take into account a wider range of data and can learn from past data to improve their accuracy , , .many machine learning models have been applied for predicting credit scores and default customers. bahnsen et al. proposed an example-dependent cost matrix for credit scoring, which incorporates all the real financial costs associated with the lending business. they employed logistic regression as the prediction model by modifying the model's objective function to be cost-sensitive. the proposed model was evaluated against two publicly available datasets: 2011 kaggle competition give me some credit and 2009 pacific-asia knowledge discovery and data mining conference (pakdd) competition. their results showed that their proposed method outperformed the arxiv:2310.02956v2 15 oct 2023 state-of-the-art methods.butaru et al. conducted a comprehensive study employing various machine learning models to assess credit card risks. these models included decision trees, regularized logistic regression, and random forests. the evaluation was performed on an extensive dataset comprising anonymized information sourced from six major banks. the algorithms can utilize integrated information encompassing consumer tradelines, credit bureau data, and macroeconomic data spanning from january 2009 through december 2013. their findings revealed that, in both samples and time frames, decision trees and random forests outperformed logistic regression in predicting credit card risks. this highlights the potential advantages of using big data and machine learning techniques for the benefit of consumers, risk managers, stakeholders, and anyone seeks to avoid unexpected losses and reduce the costs associated with consumer credit.sun and vasarhelyi implemented a deep neural network for credit card risk prediction, employing a dataset comprising 711,397 credit card holders from a prominent brazilian bank. their study demonstrated that the deep neural network outperformed several other machine learning models, including logistic regression, naive bayes, traditional artificial neural networks, and decision trees. it achieved the highest f scores and area under the receiver operating characteristic curve (roc-auc) among the models tested. kumar et al. introduced deep learning with k-means algorithm for credit card scoring prediction, using a home credit default risk dataset publicly available on kaggle . their approach invlolved the following steps, including data preprocessing, feature selection, training a deep learning model, and incorporating a decision support system to enhance the accuracy of the deep learning predictions. their findings indicated that the proposed model delivered good performance, achieving an 87% accuracy rate when tested on the dataset .ala'raj et al. proposed machine learning approach for predicting the consumer behavior, assisting banks management in credit card scoring clients. their approach consists of three phases: evaluating the probability of single and consecutive payment delinquencies among credit card customers, analyzing customer purchasing behavior, and grouping customers based on their expected loss in a mathematical context. their implementation consists of two models: the first model assesses the probability of missed payment in the following month for each customer, and the second model focuses on estimating total monthly purchases. the customer behavior grouping is generated from both models, and both models are trained on real credit card transactional datasets. their experimental findings revealed that their neural networkbased model significantly enhanced consumer credit scoring in comparison to traditional machine learning algorithms.zhu et al. applied machine learning models to predict and analyze loan defaults. they conducted a performance comparison among logistic regression, decision tree, xgboost, and lightgbm using a large dataset from a chinese bank. different feature selection techniques were employed to reduce the number of features, including deletion, principal component analysis, feature interaction analysis, and the population stability index. their findings indicated that lightgbm outperformed the other models in the comparison. additionally, they identified several factors, such as loan term, loan grade, credit rating, and loan amount, that significantly influenced the predictive outcomes. furthermore, alam et al. investigated the prediction of default credit card outcomes in imbalanced datasets. in this investigation, they employed various undersampling and oversampling techniques, in addition to utilizing several machine learning models. their findings indicate that undersampling techniques tend to yield higher accuracy compared to oversampling methods. moreover, the performance of different classifiers improved significantly when tested on balanced datasets.this study introduces a new credit card default dataset for an american bank and investigates various machine learning models to enhance the prediction of defaulting credit cards. we first analyze the features of the data, extract the important features, and then select the most relevant ones. lastly, we compare the prediction performance using the following machine learning models: logistic regression, decision tree, random forest, xgboost, lightgbm, and neural network. therefore, the main objective of this study is:"to determine how to extract the most important features from the proposed dataset and identify the best-performing machine learning model."the rest of the paper is structured as follows: section 2 provides a review of the methodology and techniques employed in this study. section 3 presents the dataset description and preparation. section 4 outlines the results obtained from the implementation stage. finally, section 5 presents the conclusions and future works. credit scores can also be used to determine the credit limits on a credit card.in recent years, machine learning models have been increasingly used in credit scoring systems to help identify and predict potential default customers. these models use a variety of data, such as the customer's credit history, income, and debts, to generate a score that represents the customer's likelihood of defaulting on a loan. machine learning models can be more accurate than traditional credit scoring models in predicting default risk.conducted a comprehensive study employing various machine learning models to assess credit card risks.sun and vasarhelyiimplemented a deep neural network for credit card risk prediction, employing a dataset comprising 711,397 credit card holders from a prominent brazilian bank. their study demonstrated that the deep neural network outperformed several other machine learning models, including logistic regression, naive bayes, traditional artificial neural networks, and decision trees.introduced deep learning with k-means algorithm for credit card scoring prediction, using a home credit default risk dataset publicly available on kaggle. their approach invlolved the following steps, including data preprocessing, feature selection, training a deep learning model, and incorporating a decision support system to enhance the accuracy of the deep learning predictions. their experimental findings revealed that their neural networkbased model significantly enhanced consumer credit scoring in comparison to traditional machine learning algorithms.this study introduces a new credit card default dataset for an american bank and investigates various machine learning models to enhance the prediction of defaulting credit cards. lastly, we compare the prediction performance using the following machine learning models: logistic regression, decision tree, random forest, xgboost, lightgbm, and neural network. in summary, lightgbm outperformed all compared models in this credit card scoring task, showing high values in accuracy, auc, precision, and recall. therefore, our observations are twofold: (a) complex models like lightgbm and random forest showed superior predictive capabilities compared to traditional logistic regression and decision tree models; (b) lightgbm demonstrated a slightly higher predictive ability than the random forest model. in the future, we aim to enhance the mlp model to improve its accuracy and recall values to make it more powerful model for credit card scoring. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/499.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/499.txt new file mode 100644 index 0000000000000000000000000000000000000000..173cf641bc26a4a33f87dbe85952f30d40498935 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/499.txt @@ -0,0 +1 @@ +automatic speech recognition and hidden markov models (hmm) enjoy a historical friendship. the task of speech recognition is not simple, requiring the mapping of a sequence of acoustic features to a sequence of words or phonemes. hmms provided an effective solution to this problem by being a simple enough model to be efficiently computable and yet powerful enough to capture the temporal structure of speech. the essential modeling idea is to introduce a sequence of hidden states that has the same length as the sequence of acoustic features. it is supposed to capture the temporal structure of the speech signal -at what time does the speaker pronounce which phoneme?in their early applications in speech recognition, hmms were generative models, i.e., modeling the distribution of the acoustic features, using gaussian mixture models (gmm). later, they were extended to discriminative models, so called discriminative hmms , to take advantage of the enormous discriminative prowess of neural networks (nn). today we find hmms being used in the so called hybrid approach still providing competitive results . before nn training in this approach a viterbi alignment must be crafted by a gmm-hmm system. the nn is then trained to predict the viterbi alignment with a cross-entropy loss on a framewise basis. hence, if we think of end-to-end training, it is not hmms that come to mind first but rather sequence-tosequence models such as the connectionist temporal classification (ctc) or the recurrent neural network transducer (rnn-t) .but with this an important capability of the hmm approach is neglected. only recently it has garnered some attention again by using full-sum hmm training which is closely related to ctc and rnn-t. in any full-sum training algorithm, the training loss is the weighted sum of the scores of different segmentations of the label sequence. for instance, in ctc one segmentation would be a possible padding of the label sequence with blank symbols. the weighting then depends on the output probability of the blank symbol. in hmms, on the other hand, the weighting is given by the transition probabilities between hidden states. this poses a big question: how do we determine these transition probabilities? while we could tune them or estimate them from given statistics, we may also learn them. in fact, transition model learning is a built-in mechanism of hmms as part of the baum-welch algorithm and is commonly used in other applications for hmms such as computational biology and computational finance . the training of the transition model is so integral to hmms in general, but virtually unexplored in the context of speech recognition.it is for this reason that in this paper we investigate the role of transition model training in speech recognition. to achieve this we will revisit the forward-backward algorithm as it was originally envisioned since its introduction in as an efficient way to train both the transition model and the label posterior model.experimentally, we will find that the transition model training can be run in conjunction with the training of the 978-1-6654-7189-3/22/$31.00 ©2023 ieee arxiv:2310.02724v2 9 oct 2023 label posterior. although the wers slightly degrade in comparison to the standard full-sum training of the label posterior alone, the quality of the alignments is improved by the addition of an explicit transition model. to our best knowledge, there is no well established measure for the evaluation of an alignment. we rely on a two-fold evaluation approach via (1) the asr accuracy of a model trained on the alignment, and (2) the time stamp error (tse) with respect to a gmm alignment as proposed in . we then go on to show that our suggested training procedure can replace state-of-the-art cart-gmm pipelines for viterbi training. note that this is in stark contrast to ctc, where the alignments are known to exhibit peaky behavior . the training of the transition model is so integral to hmms in general, but virtually unexplored in the context of speech recognition. to achieve this we will revisit the forward-backward algorithm as it was originally envisioned since its introduction inas an efficient way to train both the transition model and the label posterior model.experimentally, we will find that the transition model training can be run in conjunction with the training of the 978-1-6654-7189-3/22/$31. although the wers slightly degrade in comparison to the standard full-sum training of the label posterior alone, the quality of the alignments is improved by the addition of an explicit transition model. these models are very similar to hmms, in that it is possible to model label posterior and transition probability conjointly via a log-linear combination of a set of feature functions. the training of an explicit neural hmm transition model is a novelty in speech recognition as far as the authors are aware.here we call p t (a|x t 1 ) the label posterior model (lpm) and p t (s ′ |s, a, x t 1 ) the transition model (tm). in the simplest case we have a stationary transition model, where all speech states have the same transition probabilities p f,a = p f,speech and only silence has a separate one p f,silence .in order to test the stability and performance of the proposed training under different conditions we introduce different initialization strategies combining various initializations for the label posterior and the transition model.in this section we investigate the effect of training the transition model on the performance of full-sum training.we saw no word error rate improvement although transition model training improved the training scores (see table1). furthermore, the learned full transition model managed to outperform the fixed transition model. sim- ilarly to other sequence-to-sequence approaches, hmms with label posterior and different variants of a transition model can be trained in an end-to-end neural fashion and generate meaningful alignments in the process. we show that while the joint training of label and transition model does not improve the recognition performance, it helps generating better alignments. we showed that full-sum neural hmm training can replace this step and provide alignments of similar if not better quality with the help of transition model training. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/5.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/5.txt new file mode 100644 index 0000000000000000000000000000000000000000..9ea9ba93d3a9c85a56069eabb7b35eb3aa815bff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/5.txt @@ -0,0 +1 @@ +artificial intelligence (ai) technology has shown considerable promise for benefiting society, but irresponsible development and misuse have also led to great harm , . failures in ai technology have recently included the discovery of racial bias in healthcare algorithms , false arrests based on faulty facial recognition , and increasing concern about car crashes involving driverless or driver assistance tech . accountability for addressing these ai challenges lies not just with individual ai developers, but also with governments, agencies, educators, professional societies, and organizations that develop and deploy ai . these institutional actors play a crucial role in implementing policies and practices that foster responsible ai.ai is generally understood to be a machine or computer system able to perform tasks normally requiring human intelligence, including but not limited to making predictions, recommendations, or decisions - . responsible ai considers the larger framework within which ai is developed and used and advocates for a value-driven process that prioritizes consideration of fairness, accountability, antidiscrimination, privacy, security, participatory engagement, explainability, sustainability, and societal impact , , . in her book on responsible ai, dignum says that responsible ai is about, "ensuring that results are beneficial for many instead of a source of revenue for a few." responsible ai is often presented together with ethical ai; however, responsible ai considers not only ethical concepts but also legal, economical, and cultural ones , . although ethical ai has been critiqued for making the "non-obvious" assumption that poor ai ethics and bad design alone produce harmful outcomes, responsible ai retains a focus on normative action and equity .at its roots, ai technology is developed by individual developers, often computer scientists, data scientists, or software engineers. although all individuals involved in ai development can and should take responsibility for potential implications of their work , an individual commitment to learn about and implement responsible ai is insufficient for scalable change. peters points out that developers are not, and should not, be expected to do the work of philosophers, psychologists, and sociologists; policies and practices should instead be enacted to support collaboration with experts to anticipate and mitigate risks as a standard of practice. ai developers often find that normative aspirations conflict with commercial values of efficiency, speed, and profit , . individual developers operate within this culture, generally with limited influence, and it is unrealistic to expect individuals alone to change these dynamics , . furthermore, ai incidents can rarely be traced back to a single team member or action, and responsibility instead lies with the entire network of ai actors, including organizations that develop and deploy ai and regulatory institutions .building a culture of responsible ai development and use requires implementing policy and practices throughout this network. effective mechanisms exist at various levels, including rti international provided support for this research.national and state legislatures, government agencies, professional societies, and private and public sector organizations . national, state, and local legislatures often play a role in "hard governance" including laws and mandated policies . examples include the european union (eu) general data protection regulation (gdpr), which includes numerous data protection guidelines ; the california consumer privacy act, which focuses on increased transparency for data subjects ; and the new york city local law 144 requiring bias assessments of ai tools used in hiring . government agencies, while often tasked with enforcing "hard governance" laws created by legislative bodies, are also often frequently involved in "soft governance" such as creation of rules and voluntary standards . professional societies are also involved in "soft governance," such as the ieee ethically aligned design standards for ai systems . private and public sector entities, and academic institutions, can implement internal policies and procedures that support adherence with soft and hard governance approaches. yet overall, the efforts across these organizations and institutions to date have been for narrow ai use cases, especially in the united states, and lack an explicit focus on responsible ai .no single institutional practice or policy will be sufficient to ensure responsible ai; rather, all institutional ai actors in the network must be simultaneously engaged in developing and implementing policies and practices . in section ii we review proposed and existing responsible ai policies. in section iii, we assess potential for use and impact in the united states and suggest prioritization for implementation by each institutional stakeholder. the prioritization is particularly novel because although several frameworks to support responsible ai have been developed , , , few provide a comprehensive, multi-stakeholder, ranked perspective of policy options in a format useful to institutional stakeholders. national and state government, agencies, professional societies, and organizations can use this resource to better understand where to focus responsible ai efforts and to appreciate the larger context in which their efforts must operate. accountability for addressing these ai challenges lies not just with individual ai developers, but also with governments, agencies, educators, professional societies, and organizations that develop and deploy ai." responsible ai is often presented together with ethical ai; however, responsible ai considers not only ethical concepts but also legal, economical, and cultural ones,.this review considers nine policy and practice areas: licensure or certification of ai developers, ai ethics statements, pre-deployment assessments and audits, post-deployment accountability, databases of ai technologies or incidents, involvement of community stakeholders, policies that support responsible ai education, policies that support responsible ai research, and policies that support diversity in ai development. although no government agency currently maintains a public ai incident database, a number of ad hoc initiatives have been startedsuch as ai incident database, ai tracker, and awful ai.given the outsized societal impact of ai tools, multiple frameworks call for a focus on community and diverse stakeholder involvement throughout the ai life cycle to support responsible ai development,,,. low potential impact denotes policies which, even if implemented, may not substantially contribute to practical application of responsible ai principles, while high potential impact signifies policies that could lead to real-world expansion, promotion, and implementation of responsible ai. existing research, including by large ai tech companies, has shown that policies that support responsible ai research can lead to important findings and development of responsible ai tools. two policies, ai ethics statements and database of ai technologies or incidents, are speculated to have low impact; numerous ai ethics statements and databases exist and their impact on responsible ai development is important but small relative to the other policies. the first, licensure or certification of ai developers, requires not only the creation of responsible ai licensure or certification standards, individual appetite for obtaining a license or certificate, and generating industry demand, but also a clear definition of the ai developer profession and what should be standardized about the role. policies that support responsible ai education and policies that support diversity in ai development are both rated as medium likelihood of voluntary use; education institutions and organizations have publicly stated intentions to use these policies, although follow-up is needed to understand if and how they are ultimately applied. ai ethics statements and database of ai technologies or incidents are much easier to implement but are unlikely to have substantial impact and in the case of database of ai technologies or incidents, are unlikely to see considerable growth with voluntary use alone. government agencies should also prioritize funding for responsible ai tools and methods and require commitment to responsible ai tenets in ai contracts. to support increased diversity in ai development, professional societies can amplify the work of diverse ai researchers through internal and external communication channels, providing networking and mentoring spaces for underrepresented members, and generally promote the need for increased diversity in ai development. ai ethics statements are useful for organizations to publicize their commitment to ai ethics and agree on a shared ai ethics framework but are unlikely to lead to direct impact alone. organizations may someday be mandated to report ai technologies or incidents to a legislative body, but in the short term, the only database prioritization is likely for an internal ai inventory to support ai risk management best practices. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/50.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/50.txt new file mode 100644 index 0000000000000000000000000000000000000000..3f2e564f0dd75668267b2b1eb59881a3bf918cc9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/50.txt @@ -0,0 +1 @@ +in computational creativity (cc), there are two main strategies to assess the creativity of artefacts: evaluation metrics and human judges . evaluation metrics, proposed by humans, are usually used by generative systems to evaluate novelty and value of potential creative artefact candidates. the best ones are ultimately evaluated by humans, since they are still the ultimate judges on creativity. despite evidence that non-expert judges cannot appropriately evaluate the creativity of a human or machine , studies have relied on hiring non-expert volunteers on crowd sourcing platforms, such as amazon mechanical turk, to evaluate/rate artifacts in the creative domain . these studies usually do not ask volunteers to explain the reasoning behind their scores, but accept their judgement as valid. in this paper, we assume that machines, much like non-expert humans, can be used to judge the creativity of artefacts. this assumption is backed on recent advances of large language models (llms) such as gpt-3 , that enable emergent behaviour through few-shot and augmenting prompting , that is, abilities that were not present in smaller models. in particular, jokes and humour are challenging for machines because they involve complex concepts such as irony, sarcasm, and puns . jokes often rely on cultural context and knowledge, which can be difficult for a machine to access. however, recent work shows that prompting or fine-tuning llms for humour detection is a viable approach. on top of it, some recent publications show that llms can be configured/prompted to assume different personalities with zero and few-shot prompting . this favours the creation of a crowd of ai voters, where each one's vote is aggregated into a rating/score that can accurately measure the level of funniness of jokes, the cc-crowd score. in order to validate those votes, we apply an auditing technique that checks if the explanation for a particular vote is reasonable using the llm. we believe that this method could be applied to other creative domains such as story, poetry, slogans, etc. it could both help the adoption of a flexible and accurate standard approach to compare different work in the cc community under a common metric and by minimizing human participation in assessing creative artefacts, we can accelerate the prototyping of creative artefacts and reduce the cost of hiring human participants to rate creative artefacts. in this paper, we focus on evaluating funniness of jokes as a case study.we tested our methodology to assess the funniness of jokes from in a crowd of four voters with different humour types . our results show that: i) few-shot prompting leads to better results than zero-shot for the voting question, where picking the least appropriate opposite word can reduce balanced accuracy by 26% and 25% for zero-shot and few-shot respectively; ii) personality induction showed that aggressive and self-defeating voters are significantly more inclined to find more jokes funny of a set of aggressive/self-defeating jokes than the affiliative and self-enhancing voters; and iii) the crowd score follows the same trend as human judges by assigning higher scores to jokes that are also considered funnier by human judges.our main contributions are:• the crowd score, a novel method to assess jokes with llms using their intrinsic evaluation metrics. it relies on ai voters as judges for creativity instead of human judges, in which a crowd of ai voters are induced in a llm and their votes are aggregated into a single score to rate jokes. • an auditing technique to validate the votes of the ai judges using llms.• a case study with 52 jokes and 4 induced personalities to assess the funniness of jokes.• a set of prompt templates that could be customized to assess other creative artefacts.the rest of this paper is organized as follows. in section 2, we present the background and recent related work to support this research. in section 3, we describe our crowd score method in details. section 4 presents and analyzes the experimental results. this paper is concluded in section 5. despite evidence that non-expert judges cannot appropriately evaluate the creativity of a human or machine, studies have relied on hiring non-expert volunteers on crowd sourcing platforms, such as amazon mechanical turk, to evaluate/rate artifacts in the creative domain. it could both help the adoption of a flexible and accurate standard approach to compare different work in the cc community under a common metric and by minimizing human participation in assessing creative artefacts, we can accelerate the prototyping of creative artefacts and reduce the cost of hiring human participants to rate creative artefacts. our results show that: i) few-shot prompting leads to better results than zero-shot for the voting question, where picking the least appropriate opposite word can reduce balanced accuracy by 26% and 25% for zero-shot and few-shot respectively; ii) personality induction showed that aggressive and self-defeating voters are significantly more inclined to find more jokes funny of a set of aggressive/self-defeating jokes than the affiliative and self-enhancing voters; and iii) the crowd score follows the same trend as human judges by assigning higher scores to jokes that are also considered funnier by human judges. it relies on ai voters as judges for creativity instead of human judges, in which a crowd of ai voters are induced in a llm and their votes are aggregated into a single score to rate jokes. in our proposal, we combine the possibility of inducing a certain personality with the capability of llms to evaluate the humour level of jokes to create a method, as an alternative to traditional metrics, that could also be applied to evaluate jokes.the use of llms as an alternative to traditional metrics is supported by recent research in, where the authors show that automatic reference-free and reference-based metrics, such as bleu, bertscore, blanc and questeval, are ineffective to evaluate the quality of news summaries generated by zero-shot gpt-3, when compared to the human evaluation.in this paper, we argue that by deploying a large language model equipped with voters with different personalities, we can accurately rate the funniness of jokes, without the need for human judges. on top of that, this dataset is one of the few that also has a score/rating of funniness rated by human judges, so we can compare ai voters with human ones. the results inshow that jokes generated by gpt-3 achieved the lowest scores, while human jokes achieved the highest scores. then, the four least funny (negatives) and the four funniest jokes (positives) were selected to compose a test dataset for finding the voting question with the highest accuracy in classifying jokes into funny and not funny.76 table2: accuracy results using the full dataset composed of 52 jokes for zero-shot and few-shot prompting of the voting question. in order to compare our results with, where human judges rated jokes from 1 (not a joke) to 4 (very good joke), we also normalized the crowd score in the range.in this paper, we present the crowd score, a new method for assessing the creativity of artefacts using llms as ai judges. we applied this method to assess the funniness of jokes fromin a crowd of four voters with different humour types: affiliative, self-enhancing, aggressive and self-defeating. the crowd score follows the same trend as human judges by assigning higher scores to jokes that are also considered funnier by human judges. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/500.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/500.txt new file mode 100644 index 0000000000000000000000000000000000000000..ad584fd8b7d4b9d5f1dc3bf3cd174a36be22813a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/500.txt @@ -0,0 +1 @@ +the 'prakriti' type of an individual can be a portal to understanding the eclectic behaviour of humanity. the study not only involves studying a particular individual based on the body type but also takes into consideration his/her body vitals like normal heart rate, number of hours of sleep, number of steps walked, etc, ...the data retrieved from the body then can be aggregated with the 'prakriti' type to better understand the physical aspect of an individual. based on the obtained results, we can then go ahead to give medical recommendations for a particular or general diagnosis problem. an automated machine learning model is employed to continuously study and account for nuanced differences to suit a person. the research work focuses on all three types -'ekadoshaja', 'dwandwaja', and 'tridoshaja'. according to ayurveda, they have been named and recognized as 'vatt', 'pitt', and 'kaph'. however, in this work, we shall be naming them as 'vata', 'pita', and 'kapha'. in his research, he had performed genome-wide single nucleotide polymorphism analysis on 262 men, and with the help of this data, he classified them into 'prakriti. he made use of pca, in which the important or necessary features can be extracted, to reduce the dimensions, which would not only optimize the code but would also increase the accuracy when knn is used.decision tree used inis a simple algorithm that might be used in our research, this machine learning model is generally used when a decision analysis needs to be made to identify a strategy to reach a goal. keeping these factors in mind, decision tree might seem like a fitting method, but due to the high number of features, i., 140+ features, the usage of decision tree becomes very inefficient, as the entropy of each feature needs to be calculated and based on this value, the tree subdivides. she used chi-square to reduce the number of features and include only necessary columns in the model. to apply a classification algorithm on a dataset that is in categorical form, the traditional algorithms might fail as they might be expecting a float type value, but they will receive a string type value. after studying enormous chunks of body features that impact the type of 'prakriti' and scanning the various types of questionnaires published online by many websites, all important features are aggregated. we also use selectkbest, which is a feature engineering technique to retrieve a ranked list of the most important features to the least important features. it would be used to evaluate the accuracy of the model by calculating the accuracy, precision, f-score, and recall.the goal of our research work is to effectively classify a person into the seven doshas that are 'vata', 'pita', 'kapha', 'vata-kapha', 'vata-pita', 'pita-kapha', 'vata-pita-kapha' based on selected features of the body. in the results, we have effectively tested two algorithms namely multinomial naïve bayes and decision tree, against four major criteria namely accuracy, precision, f-score, and recall. algorithms proposed inandmade use of kmodes as their data was categorical but did not use any specific feature selection technique which would boost their accuracy. out of these features, as not all played a significant role in determining the 'prakriti' of a person, we made use of chi-square test to make sure that the features we are selecting entirely make an impact on the 'dosha' of a person. these are less than the metrics we received when number of features was 80, due to irrelevant features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/501.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/501.txt new file mode 100644 index 0000000000000000000000000000000000000000..fd7fb9bb148fef1be49585a58faf8a5872a0b10c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/501.txt @@ -0,0 +1 @@ +in recent years, the landscape of deep learning has been characterized by models that are increasingly large and intricate. while such models, often boasting billions of parameters, consistently set new benchmarks in accuracy, their computational intensity presents deployment challenges, especially in environments with limited computational resources, such as edge devices . knowledge distillation offers a viable solution to this quandary, facilitating the transfer of knowledge from a sophisticated, high-capacity "teacher" model to a more compact "student" model, aiming to retain as much of the performance as possible .central to knowledge distillation is the principle that learning can be enhanced when models are trained not just on hard labels but also on the richer, probabilistic outputs of a teacher model. these soft labels can be perceived as capturing the teacher's confidence distribution across classes, providing nuanced insights which hard labels might overlook .a critical component of this approach is temperature scaling, which modulates the granularity of these soft labels. the temperature parameter, introduced by hinton et al. , plays a pivotal role in controlling the "sharpness" of the teacher's output distributions, thus influencing the quality of the information relayed to the student model.the training of the student model is then typically guided by a weighted loss function that balances between the conventional cross-entropy loss and the divergence from the teacher's outputs, usually measured using kullback-leibler divergence .however, the process is not without complexities. the optimal architecture of the student model, the quality of the teacher, and the precise balance of hyperparameters are all determining factors in the success of the distillation . the intricacies of these factors and their interplay remain a focal point of contemporary research.in conclusion, knowledge distillation emerges as a key technique in the deep learning toolkit, bridging the divide between cutting-edge performance and practical, efficient deployment. its continued exploration holds the promise of further refining and expanding its applicability across diverse domains.to use knowledge distillation for creating efficient transformers, the process typically involves the following steps:1. train a large, complex transformer model as the teacher model on the task of interest. 2. generate a dataset of examples for the task, and use the teacher model to generate predictions for each example. 3. train a smaller, simpler transformer model as the student model on the same task, using the predictions of the teacher model as targets. 4. use a combination of the original task loss and a distillation loss to train the student model.the distillation loss encourages the student model to mimic the predictions of the teacher model, rather than just trying to optimize the original task loss.by using knowledge distillation in this way, it is possible to create efficient transformer models that are smaller and faster than the original model, while still achieving comparable or even better performance on the task of interest.there are several benefits to using knowledge distillation in building efficient transformers:1. improved efficiency: knowledge distillation allows you to create smaller, more efficient transformer models that require fewer computational resources for training and inference. this enables faster processing and reduced memory usage, making it easier to deploy the models on resource-constrained devices like mobile phones or edge devices. 2. reduced energy consumption: smaller models produced through knowledge distillation consume less energy during inference, which is crucial for battery-powered devices and sustainable ai solutions. 3. faster inference: the reduced size and complexity of distilled models lead to faster inference times, which is essential in situations where real-time or low-latency processing is required, such as natural language understanding in voice assistants or real-time text translation. 4. enhanced generalization: knowledge distillation transfers knowledge from a large, highperformance model to a smaller model by training on soft targets. these soft targets contain richer information about the relationships between different classes, which can help the student model learn better representations and generalize better to unseen data . 5. retained performance: despite the reduction in size and complexity, distilled transformer models can retain much of the performance of the larger teacher models. this means you can still achieve strong results on nlp tasks while benefiting from the efficiency improvements. 6. cost-effective deployment: the reduced computational requirements of distilled models can lead to lower costs when deploying ai solutions, especially in cloud-based services where computation costs are determined by the resources consumed. 7. easier distribution and updates: smaller models are easier to distribute and update, reducing the time and bandwidth required for users to download and install updates, which is particularly beneficial for applications on mobile devices or in areas with limited internet connectivity.overall, knowledge distillation provides a powerful technique for building efficient transformers that can achieve high accuracy, generalize well, and be deployed on resource-constrained devices., 2020]. train a smaller, simpler transformer model as the student model on the same task, using the predictions of the teacher model as targets.the distillation loss encourages the student model to mimic the predictions of the teacher model, rather than just trying to optimize the original task loss. enhanced generalization: knowledge distillation transfers knowledge from a large, highperformance model to a smaller model by training on soft targets.knowledge distillation is a technique used in machine learning to transfer knowledge from a larger, more complex model (called the teacher model) to a smaller, simpler model (called the student model).the knowledge distillation approach entails training a smaller, more focused model to replicate the results of a bigger, more broad language model, likegpt-3 [brown et al. the bigger model's high-level symbolic knowledge should be reduced into a smaller, more effective model that can accurately carry out specific tasks. the teacher model is the larger, more general language model, while the student model is the smaller, more specialized model being trained.where l is the total loss function, α is a weighting factor determining the balance between the original loss and the distillation loss, ce is the cross-entropy loss function, y is the true labels, student(x) is the student model's predictions, t is the temperature parameter used to soften probabilities, kl is the kullback-leibler divergence, and t eacher(x) is the teacher model's predictions. performance: generally, the distilled model will perform worse than the original larger model, but better than a similarly-sized model trained from scratch[du et al.so, the temperature t should be set in such a way that it balances the need for the student model to learn nuanced behaviors from the teacher model while also ensuring the student model can discern between more and less likely predictions. performance gap: the performance of the distilled student model, although better than a similarly-sized model trained from scratch, typically doesn't reach the performance level of the larger teacher model. dependence on a good teacher model: the effectiveness of knowledge distillation heavily depends on the quality of the teacher model. computational overhead: although the student model is smaller and more efficient, the distillation process itself requires the teacher model to generate predictions for the data, which could be computationally expensive, especially for large models and datasets.• privacy and security in distillation: as distillation involves transferring knowledge from a teacher model, there could be concerns about privacy and security, especially when the teacher model has been trained on sensitive data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/502.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/502.txt new file mode 100644 index 0000000000000000000000000000000000000000..f502b82ecf1de9c693539ac12dd72a57253846e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/502.txt @@ -0,0 +1 @@ +explainability and safety in ai are becoming increasingly important as language models like gpt-4 make up the next generation of software (baktash and dawodi, 2023;wu et al., 2023). methods in explainable ai, making ai more understandable for human users, often draw inspiration from interpretability research. recent work has shifted attention from vision models (erhan et al., 2009;simonyan et al., 2014) to transformer and generative models (vaswani et al., 2023;elhage et al., 2021).transformers contain attention and multi-layer perceptron (mlp) layers (brown et al., 2020). the attention layers (that "transfer information" between sections of the text in latent space) have received more attention within the interpretability research community (olsson et al., 2022), while the mlp layers (that non-linearly modify this information) remain underexplored (elhage et al., 2022a). given their role in complex language tasks, the lack of targeted interpretability tools for mlps is a glaring gap in ensuring explainable ai becomes the norm. in this paper, we introduce and describe deepdecipher, a tool to visualize, interpret, and explain mlp neuron activation (foote et al., 2023). b) the graphs from (a) are compared, and co-occurrence of token nodes is used to calculate the similarity score as the two-way maximum proportion of overlapping nodes. c) gpt-4-based automated descriptions of the semantics associated with neuron activation (bills et al., 2023). d) a list of snippets that this neuron activates highly to. each snippet consists of 1024 tokens colored according to the neuron activation on that token (nanda, 2022a). e) statistics and meta-information about the neuron f) meta-information about the model and layers in large language models. it eases access to explanations of low-level causal components of open language models (radford et al., 2019;biderman et al., 2023) based on the principles of mechanistic interpretability (olah et al., 2020). deepdecipher does not introduce novel methods for neuron explanations but makes recent interpretability research results available in an accessible user interface and api., 2023).deepdecipher aims to simplify access to the data generated by existing interpretability methods, specifically methods that provide information on the behaviour of mlp neurons in transformerbased language models.neuron to graph (n2g) is a method to create a graph of tokens sequences that affect a specific mlp neuron's activation. it is generated by backtracking from the most activating tokens and finding the tokens that affect the activation on the final token the most by taking examples of sequences that lead to activation and replacing tokens until one finds 1) the tokens most important for activation on 2) the end tokens where the neuron activates., 2020), c4 and python depending on the training dataset of the models.neuron explainer is an automated interpretability method that uses gpt-4 to explain which categories of token sequences a neuron responds to (e.based on the research behind n2g, we also introduce a search function to find the neurons that respond the most to a particular token along with a neuron similarity based on the same n2g data. deepdecipher creates dedicated web pages for each model, layer, and mlp neuron, complete with static links that grant users convenient access to the api data. for mechanistic interpretability researchers, the api provides access to useful data on mlp neuron functionality without the need to re-run the computationally expensive processing (e., 2020)introduce mechanistic interpretability as the foundational pursuit to reverse-engineer the algorithms learned by neural networks. when(elhage et al., 2021)defined a mathematical overview of the transformer models, mechanistic interpretability became more focused on language models(nanda, 2023;elhage et al.research tooling for mechanistic interpretability: contemporary interpretability research has two challenges that tools for interpretability can solve; 1) to extract the features and variables we need for research, we often have to rerun models, which can be slow and expensive, and 2) accessibility of large language model internals(schubert et al.(schubert et al.here is an example using the neuron store api to neuron2graph-search that receives a trimmed and lowercase token and returns lists of neurons that (a) activate the most to that token and (b) whose activation is most affected by this token. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/503.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/503.txt new file mode 100644 index 0000000000000000000000000000000000000000..6fc961f4f3b7cd6699e8d37a29d379cf4dcbfed8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/503.txt @@ -0,0 +1 @@ +in the u.s., electric-power systems are characterized by generation mixes dominated with firm generation resources such as natural gas, coal, hydropower and nuclear. forecasting scenarios of dayahead emissions with largely firm resources is relatively simple although not trivial. this complexity is due to competitive power markets, heterogeneity in nodal vs wholesale power market design, and other factors such as the relative size of the spot market compared to the day-ahead market. in contrast, as we shift to an energy system characterized by flexible power markets, dispatchable sources, and competing low-cost generation such as large-scale battery or hydrogen storage, system operators will be able to choose from a mix of different generation as well as emission pathways.with electrification, multiple sectors of the economy rely on knowing real-time market prices as well as emissions to make decisions. for example, enterprise fleet operators that account for scope 2 emissions charge their fleet of electric vehicles when the emissions intensity of the grid is the lowest. thus, having real-time or near real-time access to these key parameters is becoming increasingly important. to fully develop the emissions implications of a given dispatch schedule, we need a near real-time workflow with two layers. the first layer is a market model that continuously solves a security-constrained economic dispatch model. the second layer determines the marginal emissions based on the output of the market model, which is the subject of this paper.marginal emissions and marginal emissions factors help quantify the health, environmental, and climate change impacts caused by changes in marginal net electricity consumption, which could result from new technologies or policies . in our analysis, we focus on predicting dayahead marginal emissions given a system-level forecast of demand. this also allows us to conduct a sensitivity analysis on the computed emissions by marginally increasing the day-ahead forecasts to characterize the exact impact on the resulting generation mix. most studies and capacity expansion analyses focus on the average emissions intensity of a given generation mix . while, the average emissions intensity provides a systems-level metric, it does not yield any information in the event that the forecasts are inaccurate. the marginal emissions intensity, however, characterizes where the extra unit of generation is coming from so that we can choose from a variety of dispatch alternatives.we propose predicting day-ahead emissions, marginal emissions, and marginal emissions intensity using day-ahead forecast of hourly demand or hourly generation by fuel type. convolutional neural networks (cnns) are a type of neural network that was initially designed to handle image data . the ability of cnns to automatically extract features from raw input data can be applied very effectively to time series forecasting problems . cnns support multivariate inputs, multi-step outputs, and when coupled with long short term memory (lstm) networks, support the efficient learning of temporal dependencies . for example, enterprise fleet operators that account for scope 2 emissions charge their fleet of electric vehicles when the emissions intensity of the grid is the lowest. the second layer determines the marginal emissions based on the output of the market model, which is the subject of this paper.marginal emissions and marginal emissions factors help quantify the health, environmental, and climate change impacts caused by changes in marginal net electricity consumption, which could result from new technologies or policies. in our analysis, we focus on predicting dayahead marginal emissions given a system-level forecast of demand. this also allows us to conduct a sensitivity analysis on the computed emissions by marginally increasing the day-ahead forecasts to characterize the exact impact on the resulting generation mix. most studies and capacity expansion analyses focus on the average emissions intensity of a given generation mix. the marginal emissions intensity, however, characterizes where the extra unit of generation is coming from so that we can choose from a variety of dispatch alternatives.we propose predicting day-ahead emissions, marginal emissions, and marginal emissions intensity using day-ahead forecast of hourly demand or hourly generation by fuel type.for every 24-hour period, the average emissions factor for co 2 is taken as the ratio of the total emissions to the total electricity generated. in contrast, the marginal emissions, ∆e co2 t , and marginal generation, ∆g fossil t , are computed using equations 1 and 2. the marginal emissions intensity is the ratio of the marginal emissions to the marginal generation for each timestamp t. using the system-level day-ahead forecast, previous day demand, observed emissions, marginal emissions, and marginal demand, we train the cnn model with observed emissions as the endogenous variable. using a mean hourly emissions intensity depicted by the red line results in the loss of granular information that may lead to incorrectly computing the marginal emissions of a dispatch schedule. similarly, considering an average emissions intensity value depicted by the blue line shows that we at times underestimate or overestimate emissions over the course of the day.using methods that allow us to do day-ahead nowcasting as data continuously becomes available will further generate real-time insights for marginal emissions that enable downstream demandside optimization strategies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/504.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/504.txt new file mode 100644 index 0000000000000000000000000000000000000000..73ed9908dc03553dbe88cb4667b4e56d75c6e439 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/504.txt @@ -0,0 +1 @@ +in the burgeoning domain of deep learning, transformer models have indisputably reshaped the landscape of natural language processing tasks. their unprecedented capabilities in capturing intricate contextual information and establishing novel benchmarks across a myriad of applications have made them a vanguard in modern artificial intelligence . however, with great power comes inherent complexity. these models, characterized by their massive parameter counts, have posed significant challenges for real-time applications, deployment on resource-constrained devices, and even exacerbated the environmental concerns of large-scale computations .given the pressing demand for efficient and scalable deep learning solutions, model optimization techniques have emerged as pivotal endeavors in contemporary research. among these, weight pruning stands out for its intuitive appeal and empirical success . by strategically eliminating certain weights or connections based on their significance, pruning endeavors to retain, or even enhance, model performance while substantially reducing computational and memory footprints .yet, the journey of weight pruning in the vast sea of transformer architectures is still nascent, with its potentials and pitfalls only beginning to be explored. this paper delves into the intricate interplay between weight pruning and transformer models, investigating the nuances of different pruning strategies, their ramifications on model performance, and charting the way forward for more efficient and environmentally responsible neural network architectures . through systematic analyses and comprehensive experiments, we aim to bridge the chasm between the desideratum of model efficiency and the inexorable demand for superior performance.the implementation of weight pruning in transformer models can have both positive and negative impacts on their performance, and these effects often depend on various factors, such as the degree of pruning, the specific pruning strategy used, and the complexity of the task at hand. 1. computational efficiency: one of the primary benefits of weight pruning is the reduction in the number of parameters in the model, which can lead to increased computational efficiency. fewer weights mean fewer operations during the forward and backward passes, resulting in less computation time. this advantage is particularly important when deploying models on resource-constrained devices or when processing large datasets.2. model size: weight pruning can significantly reduce the model's size, making it more suitable for deployment in environments with limited storage capacity. this reduced footprint does not only benefit deployment, but also makes it easier and faster to transfer models across different platforms or networks .3. overfitting: weight pruning can act as a form of regularization, helping to reduce overfitting. by removing less important weights, the model's capacity is reduced, making it less likely to overfit to the training data . this can lead to better generalization performance on unseen data.4. performance trade-off: despite these benefits, weight pruning must be carefully managed to avoid a significant drop in model performance. if too many weights are pruned, or if the pruning is not conducted carefully, the model can suffer a loss in its capacity to capture complex patterns in the data, resulting in reduced accuracy or other performance metrics .5. fine-tuning requirement: after pruning, the model often needs to be fine-tuned on the taskspecific data to recover any potential loss in performance. this fine-tuning step introduces an additional computational cost and requires careful hyperparameter tuning.in summary, while weight pruning can lead to increased computational efficiency, reduced model size, and potentially better generalization, it can also introduce a performance trade-off and additional computational costs associated with fine-tuning. it's crucial to carefully consider these factors when implementing weight pruning in transformer models, ensuring that the pruning strategy aligns well with the specific requirements and constraints of the task and deployment environment. this paper delves into the intricate interplay between weight pruning and transformer models, investigating the nuances of different pruning strategies, their ramifications on model performance, and charting the way forward for more efficient and environmentally responsible neural network architectures. the pruning schedule can influence how the model adapts to the pruning process and can have a substantial impact on the final model performance. the pruning rate, one of the primary hyperparameters in weight pruning techniques, significantly influences the performance of a language model such as a transformer. model performance: higher pruning rates might lead to a more substantial reduction in the model size, which is beneficial for computational efficiency[neklyudov et al. conversely, a lower pruning rate can preserve more information, reducing the risk of performance degradation but resulting in a less compact model.in conclusion, the choice of pruning rate involves a trade-off between model performance, computational efficiency, fine-tuning requirements, and the potential for regularization. post-pruning fine-tuning: after pruning, the model usually needs to be fine-tuned to recover any performance loss caused by the pruning process. if the model is not well trained before pruning, the pruning process may remove important weights, leading to substantial performance degradation.• advanced pruning strategies: develop more advanced and nuanced pruning strategies that can better balance model size reduction and performance.• pruning with knowledge distillation: combining pruning with knowledge distillation, where the knowledge from a larger model (teacher) is transferred to a smaller model (student), could be a promising avenue for maintaining performance while reducing model size. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/505.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/505.txt new file mode 100644 index 0000000000000000000000000000000000000000..12033752fbe5c10842f3097a4d843cf6bb750fc5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/505.txt @@ -0,0 +1 @@ +many machine learning systems operate on continuous timeseries data streams to detect and predict target variables . improving the performance of streaming models has typically involved increasing the number of parameters, finding better architectures or adding more training data . these approaches, while effective, come with crucial real-world challenges, including increased requirements and complexity of hardware, higher latency, and unavailability of large-scale representative data. also, adding more data is not always helpful for small models with limited capacity .streaming models often use connectionist temporal classification (ctc) or recurrent neural network transducer (rnn-t) losses for alignment-free training thanks to barry-john theobald, john bridle, pranay dighe, ahmed hussen abdelaziz, and mohammad samragh for feedback. and frame-wise cross entropy or focal losses for frame-wise training . frame-wise cross entropy loss (fcel) computes the cross entropy loss between the frame prediction and the frame ground truth label, which can be sub-optimal in streaming tasks as the training occurs on a highly imbalanced set of frames with respect to the prediction class as well as the importance of the frames in the task . saxena et al. and higuchi et al. use learnable data parameters that modulate the importance of data samples and classes during training. ryou et al. introduced an anchor loss, improving over focal loss , that dynamically rescales the cross entropy in image classification and human pose estimation based on prediction difficulty for samples. some image and video segmentation works modify the regular pixel-wise and frame-wise cross entropy to better learn around the boundary pixels and frames respectively, which are usually more challenging .we propose a new loss for training streaming models to learn corresponding frame-wise predictions in speech and sensory based tasks without modifying the resource-efficient model architecture or increasing the required training data. specifically, we introduce a frame-wise loss function, streaming anchor loss (sal), that is temporally more informed of the time-series task and corresponding labels, enabling the model to learn the prediction for each frame or timestep with varying importance. the loss incorporates task-specific domain knowledge into the loss using anchors, that are the frames crucial to the detection task's ground truth labels. instead of gathering new training data, which is often expensive and time consuming, our method augments the existing training data with anchor points, allowing sal to use the available data more effectively. sal dynamically rescales the frame-wise cross entropy based on the temporal proximity to task-specific anchors. figure 1 illustrates examples in the context of particular streaming tasks where the loss function penalizes errors on frames closer to the task anchors, i.e., the crucial frames, more heavily (detailed elaborations in section 2.2.2). such a loss regularizes the model training to prioritize its limited available resources on predicting the infrequent and significant frames primarily responsible for the task at hand. experimental results demonstrate that the loss significantly improves the performance across different model architectures, detection tasks, and data modalities. 2), and how the streaming anchor loss (sal) modulates the frame-wise cross entropy loss (fcel) with a frame-wise multiplicative factor w t , referred to as loss weights in the plots above, such that l sal = w t * l f cel (w t in green) and l f cel = w t * l f cel (w t in red).to this end, we make the following contributions:• we propose a new streaming anchor loss and its focal variations to learn frame-wise predictions for streaming tasks. we present a general formulation of the losses and adopt them for real-world tasks (section 2).• we demonstrate generalizability and effectiveness of the proposed losses using common resource efficient model architectures like convolutional and recurrent networks on three specific tasks using audio and multimodal data -keyword spotting, multi-modal trigger detection, and speech onset detection.• compared to baseline frame-wise losses, our proposed losses achieve improved accuracy as well as detection latency (section 3). frame-wise cross entropy loss (fcel) computes the cross entropy loss between the frame prediction and the frame ground truth label, which can be sub-optimal in streaming tasks as the training occurs on a highly imbalanced set of frames with respect to the prediction class as well as the importance of the frames in the task.we propose a new loss for training streaming models to learn corresponding frame-wise predictions in speech and sensory based tasks without modifying the resource-efficient model architecture or increasing the required training data. specifically, we introduce a frame-wise loss function, streaming anchor loss (sal), that is temporally more informed of the time-series task and corresponding labels, enabling the model to learn the prediction for each frame or timestep with varying importance. the loss incorporates task-specific domain knowledge into the loss using anchors, that are the frames crucial to the detection task's ground truth labels. instead of gathering new training data, which is often expensive and time consuming, our method augments the existing training data with anchor points, allowing sal to use the available data more effectively. experimental results demonstrate that the loss significantly improves the performance across different model architectures, detection tasks, and data modalities.• we demonstrate generalizability and effectiveness of the proposed losses using common resource efficient model architectures like convolutional and recurrent networks on three specific tasks using audio and multimodal data -keyword spotting, multi-modal trigger detection, and speech onset detection.streaming anchor loss (sal) uses a frame-wise weight, w t , to modulate the frame-wise cross-entropy loss (fcel) such that this weight contains information about the importance of the corresponding frames in context of the task., t }.let l f cel (y t , ŷt ) and l sal (y t , ŷt ) respectively denote the frame-wise cross entropy loss and streaming anchor loss.recent workshave used a temporal adaptation of focal loss, what we refer to as frame-wise focal loss (ffl), to learn frame-wise predictions since there is usually a class imbalance as most frames belong to background (trigger absent, speech absent, etc) and significantly fewer frames belong to foreground (trigger present, speech present, etc). as such, we compare our proposed sal with ffl and also propose two focal variations of streaming anchor loss: streaming anchor+focal loss (sa+fl) and streaming anchor focal loss (safl).streaming anchor+focal loss (sa+fl): the model learns by relatively up-weighting the loss for frames temporally closer to the task anchors, or for challenging frames dependent on the classifier confidence.streaming anchor focal loss (safl): the model learns by relatively up-weighting the loss for the challenging frames which are themselves temporally closer to the task anchors. resource-constrained models which are central to such tasks particularly show more pronounced effects, as we believe the loss function regularizes the model during training to use most of its limited capacity on classifying the relatively rare and task-relevant anchor frames. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/506.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/506.txt new file mode 100644 index 0000000000000000000000000000000000000000..9caccea4693407f8f992bfee4847a0248f555ef0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/506.txt @@ -0,0 +1 @@ +gesture recognition systems provide an uncomplicated and intuitive modality for human machine interfaces. unlike physical buttons, switches and touch screens, gesture-controlled systems are touch-less, which improves their hygiene. the complexity of gestures limits those systems to simple control tasks compared to the more feature-rich touch-screens and voice assistants. on the other hand, this simplicity allows a less cognitive demanding control, while the user can focus on other things. this makes gesture sensing an ideal candidate for controlling various consumer and iot devices like intelligent thermostats , smart tvs or other smart home appliances .in this work, we focus on 60 ghz fmcw based radar sensing, specifically on soli -a miniature radar device that was co-developed by infineon and google . the small form-factor of the soli sensor allows an integration into iot devices with space limitations. additionally, the sensor is tuned for efficiency and low power consumption , which is essential for iot and consumer applications. while image sensors offer a higher resolution to resolve gestures, these systems suffer from privacy issues, high cost, high power consumption, and higher compute and memory requirements. we show that the reduced information in the radar signal is sufficient for robust gesture recognition, while still benefiting from the advantages mentioned earlier.early studies of the soli sensor propose a feature extraction based on range-doppler images (rdis) followed by a random forest classifier . other authors propose for 24 ghz fmcw radar a similar signal processing combined with an rnn to encode temporal characteristics of gestures . the majority of work replaces those hand-crafted processing chains with neural networks, which jointly extract features and classify gestures . in radarnet a convolutional neural network, called frame model, extracts features on a frame basis and aggregates the information using an rnn, namely temporal model. all those approaches rely on heavy 2d data processing, either as preparation for feature extraction , or as input to neural networks , .our main contributions are: (1) a lightweight radar processing algorithm, which requires significantly less computational resources than conventional approaches; (2) a tiny neural network enhanced with a custom label refinement and data augmentation strategy to detect and classify robustly five different gestures.finally, we conclude the results of our work and give some directions for future work.in this work, we focus on 60 ghz fmcw based radar sensing, specifically on soli -a miniature radar device that was co-developed by infineon and google. other authors propose for 24 ghz fmcw radar a similar signal processing combined with an rnn to encode temporal characteristics of gestures.our main contributions are: (1) a lightweight radar processing algorithm, which requires significantly less computational resources than conventional approaches; (2) a tiny neural network enhanced with a custom label refinement and data augmentation strategy to detect and classify robustly five different gestures. the set of gestures contains four directional swipes and a push gesture towards the sensor, see fig. we claim that those gestures can be fully described by a time series of rf scattering characteristics of the moving hand:.• radial distance • radial velocity • horizontal angle • vertical angle • signal magnitude based on this assumption, the proposed radar processing algorithm identifies the hand as a target first, followed by an extraction of those characteristics. the device receives the reflected rf signal on three receive antennas, arranged in an l-shape, which allow to estimate the angle of the scattering target in two planes. the bursts contain 32 chirps, which results in a 3d array with r = 3 receive channels, the slow time axis c = 32, and fast time axis s = 64.in order to detect the hand as a moving target, we start to transform the raw radar via fast time processing into range profiles, and apply then a peak search on this data.based on the premise that the person performs the gesture towards the sensor, we can assume that the subject's hand is the closest moving target. the range profile shows two targets above the threshold, the moving hand and the body of the person. instead of selecting the target with the highest signal strength, the body, we select the closest target, the hand. instead of performing slow time ffts across all range bins to generate rdis, as in,,, we perform only ffts on the detected range bin across all receive channels. the supported gestures have in common that during their execution the hand reaches at one point a closest distance to the radar (see fig. we mix raw radar data from background sequences together with randomly sampled gestures -this allows us to create complex sequences of gestures, which would be difficult to record and label, compare fig. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/507.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/507.txt new file mode 100644 index 0000000000000000000000000000000000000000..f3c8ccabc81f886a40646b4b30c3ad83d4e10f24 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/507.txt @@ -0,0 +1 @@ +inductive logic programming (ilp) systems induce a set of first-order logical rules that can be used to explain observations. the solutions generated by ilp systems are naturally interpretable by humans, generalizable, and require very little data. however, ilp systems are limited to only learning from symbolic inputs. in many settings, data is instead presented in a raw form, such as a set of images or natural text. to overcome this issue, neuro-symbolic methods have been proposed in recent years. a promising approach involves separating the system into two different components: a neural component, which is responsible for mapping raw data to latent symbolic concepts, and a symbolic component which reasons over these latent concepts and produces a final answer. however, training the neural and symbolic components jointly is a difficult task. on the one hand, when the neural component is untrained, the symbolic component receives highly noisy input. on the other hand, the neural component does not have a reliable training signal before the symbolic component can induce the correct rules. for this reason, many neuro-symbolic systems use hand-engineered rules to train the neural network .to our knowledge, three approaches have been proposed that jointly train a neural network and use an ilp system to induce first-order rules.m eta abd builds on top of meta-interpretive learning and uses a process of abduction to derive the most likely rules and labels for use in backpropagation. however, it cannot learn rules involving default negation. nsil uses ilasp or fastlas to learn answer set programs. at each iteration, it induces a hypothesis and trains the neural network using neurasp . it uses a process of exploration/exploitation of examples to improve learning. however, its dependence on the repeated search for an optimal hypothesis results in a slow training process and scalability issues. the apperception engine is another method that trains two components together, however the perception component is a binary neural network whose weights can be 0 or 1, hence it is unlikely to be able to scale to complex perception tasks.in this paper, we introduce neuralfastlas, a neurosymbolic learning system built on top of fastlas. neural-fastlas receives examples in the form of raw data inputs together with a final label. from these examples, the system trains a neural network to recognise latent symbolic concepts from the raw data and induces an answer set program that can explain the final label given these latent concepts. similarly to fastlas, neuralfastlas first constructs an opt-sufficient subset of the hypothesis space that is guaranteed to contain an optimal hypothesis. using this smaller set of rules, neural-fastlas trains a neural network to recognise the latent concepts using a semantic loss function , greatly speeding up training. to help guide the training, neuralfast-las uses a novel technique of learning a posterior distribution over the rules in the opt-sufficient subset. once the neural network is trained, the final hypothesis is constructed by finding a set of rules of the shortest length that maximise the prediction scores of the network over the raw data.we prove theoretical results for the correctness of the optsufficient subset produced by neuralfastlas and show a sufficient condition on the convergence of the neural network to guarantee that the correct rules are learnt. furthermore, we evaluate neuralfastlas on arithmetic and logical tasks and show that it consistently achieves state-of-the-art accuracy while having training times that are orders of magnitude faster than other neuro-symbolic systems that learn rules.the paper is structured as follows. section 2 provides background knowledge on answer set programming and the fastlas system for learning such programs. section 3 formalises the neuro-symbolic learning task that neuralfastlas solves. section 4 covers each stage of the neuralfastlas algorithm in detail. section 5 presents the results of a systematic evaluation of the neuralfastlas. section 6 concludes the paper. from these examples, the system trains a neural network to recognise latent symbolic concepts from the raw data and induces an answer set program that can explain the final label given these latent concepts. a cdpi e = e id , e pi , e ctx , where e id is the example identifier, e pi is a partial interpretation composed of a pair of disjoint sets of atoms e pi = e inc , e exc called the inclusions and exclusions respectively, and e ctx is an asp program consisting of normal rules, called a context. a program π accepts a cdpi example e, if and only if there is an answer set a of π ∪ e ctx such that e inc ⊆ a and a ∩ e exc = ∅. a learning from answer set (las) task is a tuple t = b, m, e where b is an asp program, called background knowledge, e is a finite set of cdpis, and m is a mode bias.• for any e ∈ e, h covers e iff b ∪ h accepts e. a task is non-recursive if no predicate in m h occurs in m b or the body of any rule in b ∪ e ctx for any e ∈ e. informally, in the first step it constructs a sat-sufficient space which is comprised of two sets c + (t ) and c -(t ); in the second step, called generalisation, it constructs g(t ) as a set of rules that generalise c + (t ) without covering any rules in c -(t ). a raw-data example is of the form e = e id , e inc pi , e exc pi , e raw . e id is the identifier for the example, e inc pi and e exc pi are the inclusion and exclusion set, respectively, and e raw is the set of raw data for that example. a neuralfastlas task is of the form t = b, m, e, z, θ where b is the background knowledge in the form of an asp program, m is the mode bias, z is the latent space and e is a set of raw-data examples. we say that a hypothesis h covers a neural-fastlas example e if there exists a possibility a ∈ as(b ∪ p z ) such that a ∪ h accepts e.the set of neural networks outputs that can be used by h to cover an example e is denoted cov(h, e). to compute the semantic loss for an example e, the answer set program p e = b ∪ p z ∪ s opt m ∪ {:not y. let t be a neuralfastlas task, then there exists a hypothesis h ⊆ s opt m in the opt-sufficient subset such that (h, θ * ) is a solution to t and for any other hypothesis h ′ such that (h ′ , θ * ) is also a solution of t , s len (h) ≤ s len (h ′ ). the experiments conducted aim to answer the following questions: (1) can neuralfastlas learn a correct hypothesis and train the neural network jointly with high accuracy? (2) how does neuralfastlas compare to fully neural and other neurosymbolic learning methods in terms of accuracy and training time? (3) does the posterior distribution learnt over the rules accurately reflect the correct hypothesis? (4) is neuralfast-las able to scale up with a larger hypothesis space?. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/508.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/508.txt new file mode 100644 index 0000000000000000000000000000000000000000..c66c4faa548f88a87135fe245c433571d1ee3a1f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/508.txt @@ -0,0 +1 @@ +freezing of gait (fog) is a common parkinson's disease (pd) mobility disturbance that episodically inflicts pd patients with the inability to step or turn while walking. in advancing stages of pd, 60% of pd patients could experience fog events ; each fog event could last up to a few minutes. fog episodes often occur at the initialization of walking (start hesitation), turning, or during walking periods, during which pd patients would experience dystonic gait during the "on" state and hypokinetic gait during the "off" state of fog .while medical experts can recognize conditions that precipitate to fog, such as narrow passages, being time pressure, distractions, dual-tasking, and male sex and actions that could alleviate fog, such as emotion, excitement, and auditory cueing , they are still trying to find underlying causes and develop reasonable prediction models for fog events. various machine learning and data analysis techniques have been used to detect fog events and pre-fog events from time-series data.one fog detection algorithm uses pure statistical inference on lower-limb acceleration to detect fog events . vertical, horizontal forward, and horizontal lateral acceleration data are collected from sensors located at pd patients' shank, thigh, and lower back as they perform three walking tasks. experiments are videotaped to allow medical experts to annotate onsets and durations of fog episodes. this algorithm predominantly relies on three features -freeze index (fi) and freezing index threshold, wavelet mean (wm), and sample entropy (se) -extracted on acceleration data over sliding windows of 2 to 4 second durations . naghavi et al. discovered that using the and the produces the highest predictivity of 96.7% ± 5.6% and 95.7% ± 3.9%, respectively. however, while using patient-dependent fi thresholds achieves around 90% fog detection , using a global fi threshold of 11 subjects achieves only 78% fog detection . due to the considerable post-processing time required in generating optimal fi thresholds for different patients, the fi algorithm is not a viable method to generalize fog detection for larger groups of pd patients.one machine learning model uses time-series plantar pressure data from 11 pd patients to detect fog events. each pd patient is required to complete a 25-meter walking task, during which a set of 16 features related to the center of pressure coordinates, center of pressure velocities, center of pressure accelerations, and ground reaction forces is collected from plantar pressure sensors. a 2-layer lstm neural network architecture and a 3-layer lstm neural network architecture show similar performance, achieving 82.1% mean sensitivity and 89.5% mean specificity and 83.4% mean sensitivity and 87.7% mean specificity in leave-one-out cross validation detection, respectively. however, plantar pressure insole sensors in the research are for single use, which means that this detection system cannot generalize to larger scale experiments or reallife detection systems .another model collects acceleration data from inertial sensors on the shins of 11 pd patients as they perform a 7meter tug test. feature extraction includes standard deviation, angular jerk, power spectral entropy, principal harmonic frequency, etc. the experiment uses a decision tree for feature selection and svm for fog/non-fog classification. it achieves 85.5% sensitivity and 86.3% specificity for pre-fog detection and 88.0% sensitivity and 90.3% specificity for fog episode detections in leave-one-out cross validation .however, this model also does not detect the type of fog onset, which may be important to researcher's understanding of fog occurrences as well.in feature extraction for robust physical activity recognition, zhu et al. mentions various feature extractions from body motion acceleration data that could be used for physical activity classification. features used in the time domain includes original accelerometer signals, magnitude of combined signals, jerk signals, magnitude of combined jerk signals, while features from the frequency domain includes fast fourier transforms (fft) from original signals, fft from magnitude signal, and fft from jerk signal. further feature extractions include mean value, standard deviation, signal magnitude area, skewness, kurtosis, etc. results from the experiment reveal that timedomain feature models consistently outperform frequencydomain feature models and time-frequency-combination models . therefore, feature selections in this paper primarily focus on manipulating acceleration data in the time-domain.in accordance with kaggle's parkinson's freezing of gait prediction competition, this paper aims to discover the most suitable deep learning model that can detect fog episodes and specify the type of fog onset (start hesitation, turn, or walking) from time-series acceleration data. model development draws on data-preprocessing, feature extraction, model architecture, and training techniques utilized in related prior works, with a particular emphasis on using different feature sets to determine which features produce more robust models.in accordance with kaggle's parkinson's freezing of gait prediction competition, this paper aims to discover the most suitable deep learning model that can detect fog episodes and specify the type of fog onset (start hesitation, turn, or walking) from time-series acceleration data. model development draws on data-preprocessing, feature extraction, model architecture, and training techniques utilized in related prior works, with a particular emphasis on using different feature sets to determine which features produce more robust models.defog data files, tdcsfog data files, notype data files, defog metadata, tdcsfog metadata, and subject data file generously provided by the michael j. defog, tdcsfog, and notype data files are time-series acceleration data collected from a wearable 3d acceleration sensor on parkinson's disease patients' lower back; defog metadata and tdcsfog metadata contain information about each time-series data file; subject data file contains information about each testing participant. six defog model groups are subsequently used to generate "semi-pseudo-labelled" data with notype training data and retrained with additional data to create six additional model groups.preliminary data analysis on defog time-series data files and tdcsfog time-series data files reveals a notable separation between these two datasets (fig. therefore, each feature set model group is separated into a defog model group and a tdcsfog model group, denoted by defog #x and tdcsfog #x.the mean, maximum, minimum, and standard deviation of each acceleration type in all time-series data files are collected as feature values, forming a 12-dimensional feature vector for each of the 924 time-series data files. the bilstm layers produce 320-dimensional outputs. first, one of the four highest-scoring preliminary defog model groups, defog model #x, is chosen to predict fog probability for each class for each timeframe of a notype time-series file using acceleration data and any additional features used in that preliminary model. then, start hesitation, turn, and walking columns are created, where: during each fold of model training, the performance of a model is monitored by the validation mean absolute error; the training epoch that produces a model with the lowest validation mean absolute error is saved to the output. after each fold of cross validation, the performance of the model is determined by the map of its predictions on the out-of-fold validation data and the true labels of the out-of-fold validation data.the defog model group and the tdcsfog model group for each feature set are combined into one prediction instance on defog data and tdcsfog data, respectively.new defog model groups trained on pseudo labelled data allow feature set performances to increase by an average of 9% and testing map scores to increase by an average of 3%. reduction in prediction performance compared to its basis feature set -feature set c -suggests that patients' parkinson's disease statuses are excessive information in a real-time fog prediction system based primarily on motion sensor data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/509.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/509.txt new file mode 100644 index 0000000000000000000000000000000000000000..fcc9eed1691492fafb9586878eb55ad7a00b1d34 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/509.txt @@ -0,0 +1 @@ +motivation. "zero tolerance for net zero greenwashing" is the central aim of un general secretary antonio guterres at the gop27 (un, 2022). to limit global warming to 1.5°c, massive efforts in emission reduction are necessary (klaassen and steffen, 2023). consequently, an increasing amount of net zero and emission reduction targets are introduced, communicated, or updated by private and public institutions (höhne et al., 2021). with the help of natural language processing (nlp) methods, it is possible to automatically assess large chunks of textual data and gain structured insights about climate-related information (e.g., webersinke et al., 2022;bingler et al., 2022a;stammbach et al., 2022). however, none of the previous works have studied the arguably most relevant and best tractable objective for institutions -the extraction of information about net zero and reduction targets. contribution. as a remedy, this study delivers a threefold contribution at the intersection of climate change and nlp by creating a tool that automatically extracts and assesses net zero and reduction target information from various sources. our first contribution is introducing an expert-annotated data set with 3.5k text samples, which builds on the net zero tracker project (lang et al., 2023). second, we develop and publish climatebert-netzero (based on climate-bert (webersinke et al., 2022)), a comparatively lightweight and, therefore, less energy-intensive nlp model that can effectively classify net zero and reduction targets in text samples. 1 third, we provide two real-world use cases: we demonstrate how to extend the model capabilities and analyze the ambitions of the net zero and reduction targets. finally, we utilize climatebert-netzero to analyze the appearance of net zero and reduction claims in companies' earning calls transcripts from 2003 to 2022. contribution. as a remedy, this study delivers a threefold contribution at the intersection of climate change and nlp by creating a tool that automatically extracts and assesses net zero and reduction target information from various sources. moreover, we demonstrate that we can effectively detect the target year for net zero claims; as well as the target year, baseline year, and reduction targets in percent for general emission reduction claims., 2023). reduction targets are claims that refer to an absolute or relative reduction of emissions, often accompanied by a baseline year to which the reduction target is compared. the data set differentiates between three classes: net zero, reduction, and no targets.since net zero targets are a special case of reduction targets, we also create a model called climatebert-reduction. this model simply differentiates between general reduction targets (including net zero targets) and no targets. first, we showcase how the model can be used to perform a deeper analysis of net zero and reduction targets by analyzing the underlying ambitions of the statements. for net zero claims, the primary measure of ambition is given by the year the institution wants to achieve net zero. for approximately 95% of the net zero targets, the model can detect by which year net zero is aimed to be achieved.in conclusion, this paper demonstrates the development and exemplary employment of climatebert-netzero, a model that automatically detects net zero and reduction targets in textual data. we use the following prompt for zero-shot classifying the dataset to contain a reduction, net zero, or no target at all: """ your task is to classify a provided text whether it contains claims about reduction or net zero targets or none of them . net zero targets represent a special case of reduction targets where an institution states to bring its emissions balance down to no additional net emissions by a certain year .as an answer to the provided text , please only respond with ' reduction ' for reduction targets , ' net zero ' for net zero targets or 'none ' if no category applies . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/51.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/51.txt new file mode 100644 index 0000000000000000000000000000000000000000..9ae8c9e658181d7ad5cc419f6c19ab1edfea08b2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/51.txt @@ -0,0 +1 @@ +an ontology is the explicit representation of components of a shared conceptualization . in machines, an ontology is a vocabulary which is used by said machine in the representation of knowledge. an ai knowledge-based system would take an ontology as its universe of components and their relations and derive new implicit knowledge within that universe.as for the components of an ontology, z. dragisic defines them as follows:• concepts (classes) -the types of objects in a domain or area.• relations (properties, roles) -the relations between two concepts/classes. • instances (individuals) -instances of concepts/classes.• axioms -model sentences that are always correct in the domain.they're often utilized to represent information that cannot be formally specified by the other components .ontologies play a vital role in data and knowledge integration by making a common schema available . unfortunately, ontology construction is extremely expensive, in terms of both time and resources, and dependent on the availability of knowledge experts , . the construction of an ontology for a certain field requires the contribution of a knowledge engineer and of an expert in that specific field. this dependency persists throughout the lifetime of an ontology as an expert is required to develop and expand it as new requirements arise. to overcome such a bottleneck in knowledge acquisition, the field of ontology learning was conceived. ontology learning , , is the task consisting of the automatic generation of ontologies. ontology learning includes a variety of techniques and those are grouped into:• linguistic techniques -natural language processing mostly used in the preprocessing of data, and some learning tasks such as term extraction . • statistical techniques -such as data mining and information retrieval methods used to extract terms and associations between them , . • inductive logic programming (ilp) is a branch of machine learning that uses logic programming to generate hypotheses based on prior knowledge and a set of examples , , .each one of these technique groups is involved in one or more of the stages of ontology learning, those stages being preprocessing, term and concept extraction, relation extraction, concepts and relations hierarchies, axioms schemata and general axioms . linguistic techniques can have a role in almost all the stages, but as we mentioned they are mostly used for data preprocessing.as for statistical techniques, they are methods that rely entirely on the statistics of the textual resources without any concern for the semantics. one such method is the possibilistic (statistics-based) heuristic detailed in , where the authors have developed a possibilistic framework for the web ontology language (owl 2) to test axioms against evidence expressed in the resource description framework (rdf). the heuristic uses support, confirmations, and counterexamples to define possibility and necessity of an axiom and an acceptance/rejection index combining both of them. they test the developed theory on subclassof axiom testing against the dbpedia1 database. the results of their experiments showed that the method was suitable for axiom induction and ontology learning . the suggested heuristic has the drawback of being time consuming; this was addressed in a revision of the method that added a time cap for the querying process, of course at the cost of a little increase in error rate reaching 3.96% . the number of axioms tested was 5050 and it took almost 342 cpu hours and a half with an average of 244 s per axiom with time capping. this is a significant improvement, considering that testing the same amount of axioms without a time cap would have taken approximately 2,027 cpu days. another limitation is that lack of support means an inconclusive judgement, as the method queries for confirmations and counterexamples, and sometimes it might find none. the third drawback is that being a statistical approach it relies solely on instance data from the dataset, data that is prone to errors.in comparison, ilp techniques are a sub-field of machine learning that follow exhaustive statistical or linguistic processing. one such example is , where the authors describe a method for the automated induction of fuzzy ontology axioms which follows the machine learning approach of ilp named softfoil. one of softfoil's limits is a result of its sequential covering strategy. as it uses a greedy search to find rules, it does not guarantee to find the smallest or best set of rules that explain the training examples. another is susceptibility to being trapped in a loop while searching for the best rule.a new emerging group of techniques, is a hybrid breed that takes advantage of combining classical ilp and statistical machine learning. to stay in the context of the previous examples, a model would be trained with axioms having their scores assigned by a statistical method such as , as well as using a similarity measure that results from the logical processing of the data. this is exactly what was done in , where the authors modified the support vector clustering algorithm to attempt to predict the possibilistic score of owl axioms. they used the heuristic from as the scorer, and used a model originally developed for inferring the membership function for fuzzy sets. as a similarity measure they used one specific for subsumption axioms based on semantic considerations and reminiscent of the jaccard index. the predictor performance was poor in terms of root mean square error (rmse) scoring 0.572 (table 3 in ). in addition, the authors mentioned that they found a group of axioms that were hard to predict, and could not find a reason that explains why. this method was computationally far more efficient than , yet it was still reliant on the instances in the data set and querying them to construct the similarity measure, meaning that even though it is an improvement, it still falls victim to the same problems. the authors explicitly mention that a major weakness of their method is that training such a model consumes a significant amount of resources.our work addresses the shortcomings of the previous techniques that heavily rely on error-prone instance-dependent statistics. it is also able to predict the scores of multiple types of axioms and is not bound to simply subsumption. we propose a method that can be used as a building-block or an extension/plug-in to other existing statistical analysis or ilp options, such as dl-learner , to allow faster execution while maintaining high scoring accuracy, while still having the ability to perform as a simpler stand-alone scorer. the method works by training a model on a set of atomic class axioms scored by an algorithm, in this case . this enables the model to predict the score of any new atomic (consisting of a single concept on each side) candidate axiom. we experimented using multiple machine learning methods, and compared our work to the state of the art that aims to achieve the same goal. this paper is structured as follows: sect. ii provides some background about both axiom scoring and concept semantic similarity which are both prerequisites to training the models. as for sect. iii it lays out the methodology explaining how the axioms were extracted and scored, how the semantic measure we use was developed, and also how an axiom based vector space was modeled leading to the prediction of the scores. we detail our experiments including a comparison with the method presented in in sect. iv then present the results while listing our observations and findings. we end the paper with some notes and conclusions.in an owl ontology containing an inheritance hierarchy of concepts formed by the subsumption axiom rdfs:subclassof, our aim is to predict an acceptability score for a candidate atomic class axiom by learning a set of previously scored axioms of the same type, the score used is the one detailed in ii-a. to measure the similarity between (candidate) axioms, we construct a similarity measure by extending the ontological distance discussed in ii-b, which is defined among concepts, not axioms. 3) axiom base vector space modeling: this step focuses on using axiom similarity measures as weights, each axiom can be represented as a vector in an axiom based vector space. 4) score prediction: this step is dedicated to training a machine learning model with the data set (vector space model in addition to the scores) and predicting the acceptability score of new candidate axioms. machine learning models are used in the end to learn the set of scored axioms with their similarity weights and predict the acceptability score of a candidate axiom. the goal is to end up with a similar square symmetric matrix of the shape m × m, m being the number of axioms, that has axioms instead of concepts as both first row and column, and the cells would be the similarities between a pair of axioms. while comparing axioms a i and a j , we first deal with the concept on the left side of each axiom, so the left algorithm 1 constructing the matrix of axiom similarities require: all concepts included in axioms in ta be present in concept similarity matrix mc ensure: 0 ≤ s ≤ 1 ⊲ s is the similarity between 2 axioms mc ← concept similarity matrix ta ← set of labeled axioms ma ← axiom similarity matrix to be f illed. the model's goal is to predict the score of a candidate axiom, which is represented as a vector v in our vector space having m dimensions (features), which are the axiom's similarities with the axioms of the same type used to train the model.we used two datasets for our experiments, one for axiom type subclassof and it is the one used in, and another for axiom type disjointwith which is a generated set of atomic disjointwith candidate axioms, as described in sect. in case the used ontology is already populated with explicit axioms, this would be done in an attempt to obtain a set of axioms that the scorer will not provide a score close to 0 i. the algorithm applied to our axiom data set t a is algorithm 1, this is the same algorithm used to encode candidate axioms into the vector space, it is the equivalent of running queries 7 and 8 for the method proposed into retrieve the similarity. for the instance based similarity it is a problem during dataset preparation as it needs seven and a half days to prepare a data set of 722 axioms (and their negations), as well as candidate axiom encoding/processing where it takes half an hour to process every candidate axiom we want to predict a score to, whereas our ontological based method requires about fourteen seconds to process and prepare the exact same dataset, and less than 0. this is because the method detailed incan only handle subclassof axioms making it very limited and constrained, whereas our proposed method can address all atomic class axiom types, all that is needed is training one model for each set.we observe in tableithat the time cost for creating the disjointwith (129 seconds for 3868 axioms) experiment's asm was almost ten times that of the subclassof (13 seconds for 722 axioms), even though the number of axioms is not ten times as much, only about five. this is normal since disjoin-twith axioms are symmetrical, and as shown in algorithm 1 and explained in section iii-b, the calculation is doubled for every axiom to check forwards and backwards the similarity between the pair of axioms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/510.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/510.txt new file mode 100644 index 0000000000000000000000000000000000000000..fde3f98c87dbbb402815f60c48658c5d8bfb4d11 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/510.txt @@ -0,0 +1 @@ +global health is greatly impacted by the widespread mental health problems of depression and bipolar disorder. the world health organization (who) estimates that over 264 million people worldwide suffer from depression, making it the main cause of disability . although bipolar disorder only affects 1% to 2% of the world's population, it has a significant negative impact on the affected population's quality of life and functional impairment . these diseases have significant financial repercussions; depression and anxiety are estimated to cost us$ 1 trillion annually in lost productivity . with the introduction of on-body sensors, personal health monitoring has undergone a revolutionary change. today's people use enormous amounts of data every day for a variety of goals, such as improving life quality, tracking their fitness levels, and changing unhealthy habits. this information includes continuous records of heart rate and activity levels, which have considerable promise in the field of psychiatry and go beyond the simple metrics of daily steps taken or calories burned. growing emphasis has been paid to the complex association between activity data and a variety of mental health problems such mood swings, stress management, and social disengagement . since 2010, mental health issues-with depression leading the list of most common illnesses - have been the primary reason for years lived with disability worldwide. depression presents a variety of difficulties in the physical, financial, and emotional spheres, which frequently result in problems at work and sick days . the underlying etiology of these illnesses involves a complex combination of genetic, environmental, and social variables, with biological rhythm disruptions-often sparked by environmental disturbances-showing up in afflicted people as altered motor activity patterns . by examining actigraph data to find patterns of motor activity suggestive of depressive and bipolar illnesses, this study aims to advance our understanding of these disorders. the study intends to clarify the distinctive motor activity patterns connected to various mood disorders using cutting-edge statistical and machine learning technologies, potentially permitting better diagnostic and curative approaches.the main contributions of this paper are:1. our study introduces a groundbreaking hybrid random forest -neural network model for depression classification, promising enhanced accuracy in mental health diagnosis. 2. our findings directly benefit clinical practice by enabling early depression detection, potentially improving patient outcomes in mental healthcare.the paper is organized as follows: section 2 provides a comprehensive literature review, summarizing prior work and baseline algorithms. section 3 presents the proposed approach, which focuses on the novel algorithm. section 4 discusses the implementation results, demonstrating the effectiveness of the proposed approach. section 5 concludes the work and suggests directions for future research.global health is greatly impacted by the widespread mental health problems of depression and bipolar disorder. today's people use enormous amounts of data every day for a variety of goals, such as improving life quality, tracking their fitness levels, and changing unhealthy habits. growing emphasis has been paid to the complex association between activity data and a variety of mental health problems such mood swings, stress management, and social disengagement. since 2010, mental health issues-with depression leading the list of most common illnesses-have been the primary reason for years lived with disability worldwide. the underlying etiology of these illnesses involves a complex combination of genetic, environmental, and social variables, with biological rhythm disruptions-often sparked by environmental disturbances-showing up in afflicted people as altered motor activity patterns. by examining actigraph data to find patterns of motor activity suggestive of depressive and bipolar illnesses, this study aims to advance our understanding of these disorders. the study intends to clarify the distinctive motor activity patterns connected to various mood disorders using cutting-edge statistical and machine learning technologies, potentially permitting better diagnostic and curative approaches. our study introduces a groundbreaking hybrid random forest -neural network model for depression classification, promising enhanced accuracy in mental health diagnosis. in the context of the proposed study, the artificial neural network (ann) model takes various activity statistics (a 1 , a 2 , a 3 ) as input and classifies the binary variable "state" which denotes sad vs. for the system covered by the study, the random forest model applies activity statistics, such as mean logarithmic activity, standard deviation, minimum and maximum logarithmic activity, and proportion of zero activity, as input features to categorize the binary variable 'state' which denotes depressed or non-depressed states, through an ensemble of decision trees. the data was then divided into daily segments using a date-based filtering strategy, allowing for a controlled and methodical study of the motor activity patterns across time.the hybrid random forest -neural network combines the power of two distinct machine learning techniques, random forests (rf) and artificial neural networks (ann), to create a formidable ensemble model for predictive analytics. additionally, the dataset provides madrs (montgomery-sberg depression rating scale) scores, which include a unique identifier, the number of days of measurements, gender, age groups, the type of affective disorder (bipolar ii, unipolar depressive, or bipolar i), the presence of melancholia, patient status (inpatient or outpatient), education level categorized in years, marital status, employment status, and madrs scores for each patient.the study presents a novel method for categorizing depression that is based on a hybrid random forest-neural network model. this hybrid model provides a viable path for enhancing mental health diagnostics by integrating the benefits of both random forest and neural network techniques. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/511.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/511.txt new file mode 100644 index 0000000000000000000000000000000000000000..61d77e027698321b41e43933b734f299101d82ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/511.txt @@ -0,0 +1 @@ +machine learning (ml) models are susceptible to adversarial attacks , i.e., input samples carefully perturbed to mislead the model. to evaluate adversarial robustness, many different gradient-based attacks have been proposed, whose performance is significantly affected by the choice of the loss function to optimize, the optimization algorithm, and the step-size scheduler. from a practical perspective, attacks tend to be run with a "default" configuration and set of hyperparameters that are deemed to fit most of the cases. yet, the attack effectiveness is highly case-dependent, implying that the choice of the configuration needs to be carefully tailored to the model rather than a de-facto standard choice. in autoattack (aa) , the authors try to overcome this limitation by proposing an ensemble of parameter-free attacks, each including an internal auto-tuning process for each relevant hyperparameter. with adaptive autoattack (aaa) , the approach is configured to run the parameter-free aa to look for a fast and good evaluation or alternatively come forward with an extensive search on a pool of attacks.in this paper, we aim to use a smart and effective search for the best configuration that adapts the attack to the model. hence, we propose a systematic framework for configuring the state-of-the-art, fast minimum-norm (fmn) attacks properly instead of running extensive searches on multiple attacks. to this end, we develop our framework by rethinking the choice of the loss function, optimizer, and step-size scheduler as attack hyperparameters and then using a unified hyperparameter optimization procedure.algorithm 1: fast minimum-norm (fmn) attack input : x, the input sample; y, the target (true) class label; α 0 , the initial δ-step size; k, the total number of iterations; l, the loss of the attack; h, the step size scheduler; u, the update function for the gradient. output: the minimum-norm adversarial example x ⋆ .▷ projection onto the feasible domain to evaluate adversarial robustness, many different gradient-based attacks have been proposed, whose performance is significantly affected by the choice of the loss function to optimize, the optimization algorithm, and the step-size scheduler. to this end, we develop our framework by rethinking the choice of the loss function, optimizer, and step-size scheduler as attack hyperparameters and then using a unified hyperparameter optimization procedure.algorithm 1: fast minimum-norm (fmn) attack input : x, the input sample; y, the target (true) class label; α 0 , the initial δ-step size; k, the total number of iterations; l, the loss of the attack; h, the step size scheduler; u, the update function for the gradient.we introduce here a modified fmn attack algorithm, referred to as ho-fmn, in which the loss function, the optimizer, and the step-size scheduler, along with their hyperparameters, are all exposed to be optimized. we report in algorithm 1 a revisited formulation of the fmn attack, in which the role of the loss function l, optimizer u, and scheduler h are better isolated. this novel formulation of the fmn attack enables us to generalize it by allowing a different selection of each component, treating each of them as a different hyperparameter or attack configuration. while the overall algorithm remains conceptually unchanged, we modify the attack loss l, the optimizer u, and the step-size scheduler h used in the δ-step. in practice, given a model, we exploit hyperparameter optimization to find the best combination of loss, optimizer, and scheduler along with their hyperparameter values (e. 2, we aim to improve fmn by optimizing the choice of: (i) the loss function, selecting between the logit loss (ll) and the cross-entropy loss (ce); (ii) the optimizer, selecting between sgd (with and without nesterov acceleration) and adam (with and without amsgrad); and (iii) the step-size scheduler, selecting among cosine (calr), cosine annealing with warm restarts (cawr), multistep (mslr), and reduced on plateau (rlrop). in particular, for each optimizer, we tune the initial step size, the momentum, and weight decay; for each scheduler, we tune the most important parameters such as the milestones in mslr, the iteration parameters in calr and cawr and the factor in rlrop. we take a subset of 100 samples from the cifar10 test set for running our hyperparameter optimization, where for every model we analyzed each and every loss/optimizer/scheduler configuration. upon finding a specific set of best hyperparameters, we use a separate set of 1000 samples (also taken from the cifar10 test set) to run the fmn attack on the models and discuss the results. we consider 9 state-of-the art robust models from robustbench: m0, the wideresnet-70-16 in; m1, the wideresnet-28-1 in; m2, the wideresnet-70-16 in; m3, the wideresnet-106-16 in; m4, the wideresnet-28-10 in; m5, the wideresnet-70-16 in; m6, the resnet-152 in; m7, the wideresnet-28-10 in; and m8, the resnet-18 in. * we report the hyperparameters found with the first setting, which we name ho-fmn (adam), listed for each model as (learning rate, weight decay, factor, amsgrad), m0: (5,534 0,025 0,327, false); m1: (8,801 0,043 0,366, false); m2: (4,073 0,019 0,286, false); m3: (9,616 0,024 0,301, false); m4: (7,078 0,019 0,260, false); m5: (7,078 0,019 0,260, false); m6: (4,194 0,020 0,235, false); m7: (9,339 0,023 0,352, true); m8: (4,073 0,019 0,286, false).in this work, we investigated the use of hyperparameter optimization to improve the performance of the fmn attack algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/512.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/512.txt new file mode 100644 index 0000000000000000000000000000000000000000..17b7685e4b420d13ced5321d3caceb12fc79162a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/512.txt @@ -0,0 +1 @@ +divorce also is known as the dissolution of marriage, which is the process of terminating a marriage or marital union . while it seems as simple as a separation between two people, a divorce may have a significant effect on an individual. a study conducted by the institute for population research showed that around 40% of american children usually experience parental divorce/separation during their childhood and has a long-term effect on their mental health. parallel research identifies that children from divorced families received less financial support compared to other children during their lifetime. additionally, those children also felt more distress due to a lack of emotional support from the families . as a result, parental divorce influences child behavior in a negative manner, which leads to anger, frustration, and depression. a divorce could significantly hamper couples, personal and social life. duncan & hoffman (1985) and morgan (1989) , , showed that individuals' separation also confronted a variety of stress, financial crisis, reduction in social networks and moving on (amato, 2000;mclanahan & sandefur, 1994) , . augustine (2000) found that men were nearly 4.8 times as likely to commit suicide as women. they have used the nlms data for their study. their study demonstrated a potential relationship between marital status with suicidal risk and,according to the centers for disease control and prevention (2017), on average, 1.4 million american attempted suicide. the combined medical and work loss are observed as 69 billion dollars . apart from this, the relationship between divorce and the individuals driving performance such as accident rate, speeding, failure to yield etc. are also highly correlated . in a nutshell, divorce has a significant effect, which brings damage to various aspects such as social, economic, physical, mental and so on. during the past few decades, the effect of divorce and the reason behind the divorce/marriage, have been studied thoroughly, while very few studies focused on forecasting the divorce based on some specific criteria - . irfan et al. (2018), uses naïve bayes and k-nearest neighbor methods to predict whether the couple are more likely to get divorce or not. the study took into account several attributes including the plaintiff's age, age of the defendant, marriage age, child age and so forth. using those attributes, their computational results show that naïve bayes and knn algorithms can differentiate between divorce and married couples with an accuracy of 72.5% and 57.5% respectively. however, the study negates the importance of feature selection procedure, which is one of the most significant drawbacks of this research. in addition, the lower accuracy of the model constitutes the probability of a less accurate model as a decision making tool. another study conducted by wolfinger (2018) uses a parametric sickle model in order to divorce prediction .however, none of the referenced literature , mentioned regarding the use of standard scale of divorce prediction, that potentially applied in the medical and psychology field. mustafa et al. ( 2019) have used correlation-based feature selection and ann networks in their research. they used the divorce predictor scale (dps), which is based on gottman couples' therapy, which includes two class people: divorce (49%) and married (51%). their analysis demonstrated 98.23% accuracy with rbf. one of the biggest drawbacks of this research is that the dataset is highly balanced, which ultimately results in the possibility of higher accuracy with high overfitting risk .li (2018) uses the markov logic networks based method to predict judicial decision cases . however, the study ignored divorce prediction combined with psychological situations, which is thoroughly studied by some other studies .ibrahim (2020) uses artificial neural network (ann) methods to predict whether a couple is going to get divorce or not. the model went through multiple learning-validation cycles until it achieved an accuracy of 100%. jue et al. (2020) uses three different machine learning algorithms-support vector machines (svm), random forest (rf), and natural gradient boosting (ngboost)-on divorce prediction dataset. and their preliminary computational result indicates 98.33% accuracy on predicting whether the marriage is reliable or not. ranjitha et al. (2020) uses particle swarm optimization (pso) in order to reduce the redundant features that do not contribute to the divorce prediction. however, none of the study results explain in details of feature selection or discuss why their proposed model should be trusted.while data mining is often employed by the majority of the study in psychology and psychiatry, one cannot conclude that it is adequate for a phenomenon with long-term consequences, such as divorce. fortunately, if divorce predictors can be used in advance, many of the divorces would be avoided. taking these opportunities into account, firstly, this paper analyzed six different machine learning algorithms that evaluate whether a couple might get divorce or not, using "divorce predictor dataset" secondly, top 10 features out of 54 features were identified that highly correlate with the divorce prediction. thirdly, the prediction probabilities are analyzed using local interpretable model-agnostic explanations (lime). finally, an desktop based graphical user interface (gui) application is developed using tkinter and machine learning model that can be accessed and used by individual to determine whether he/she is more likely get divorce in the future, ultimately presenting the opportunity to take necessary steps in saving conjugal life. during the past few decades, the effect of divorce and the reason behind the divorce/marriage, have been studied thoroughly, while very few studies focused on forecasting the divorce based on some specific criteria-.irfan et al. the study took into account several attributes including the plaintiff's age, age of the defendant, marriage age, child age and so forth. however, the study negates the importance of feature selection procedure, which is one of the most significant drawbacks of this research. (2019) have used correlation-based feature selection and ann networks in their research. however, the study ignored divorce prediction combined with psychological situations, which is thoroughly studied by some other studies. jue et al.while data mining is often employed by the majority of the study in psychology and psychiatry, one cannot conclude that it is adequate for a phenomenon with long-term consequences, such as divorce. taking these opportunities into account, firstly, this paper analyzed six different machine learning algorithms that evaluate whether a couple might get divorce or not, using "divorce predictor dataset" secondly, top 10 features out of 54 features were identified that highly correlate with the divorce prediction. finally, an desktop based graphical user interface (gui) application is developed using tkinter and machine learning model that can be accessed and used by individual to determine whether he/she is more likely get divorce in the future, ultimately presenting the opportunity to take necessary steps in saving conjugal life.we applied six widely used machine learning algorithms: logistic regression(lr), linear discriminant analysis(lda), k-nearest neighbors(knn), classification and regression trees(cart), gaussian naive bayes(nb) and support vector machines(svm) on "divorce predictor dataset". finally, a graphical user interface (gui) based divorce predictor app is developed using top 10 important features from gottman couples therapy which can be accessed by individual who are more likely identify their pre-existing relationship status.when the overall results of the study are examined, it was observed that the divorce predictors scale developed within the scope of gottman couples therapy can predict divorce rates with an accuracy of around 99% using svm. we have used 6 different algorithms in order to predict the probability of whether the couple is more likely getting divorce or not. researcher, practitioner and psychiatrist can use such tools to evaluate the patient's current state regarding divorce and can take necessary steps to prevent the divorce. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/513.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/513.txt new file mode 100644 index 0000000000000000000000000000000000000000..f7e993c90473d75e8c155f0da64dbf63517eac96 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/513.txt @@ -0,0 +1 @@ +binary classification is a relevant topic in machine learning due to its wide range of application areas, and the design of new algorithms has resulted in a fruitful research area . the performance of the classifiers, however, depends not only on the selected algorithm but also on the quality of the input data . in this sense, the class imbalance problem is an important issue that arises in many tasks . this problem occurs when one of the two classes is underrepresented with respect to the other. imbalanced big data classification has been acknowledged as a relevant open challenge in machine learning . the most popular strategies for dealing with the class-imbalance issue, such as random undersampling (rus) and the synthetic minority oversampling technique (smote), have been adapted for large datasets . these techniques preprocess the data, either by downsizing the majority class (undersampling) or by creating synthetic examples from the minority class (smote and other oversampling variants).dealing with large-scale datasets effectively is of utmost importance in predictive analytics and, in particular, in binary classification tasks. this is because of the varied and complex nature of large datasets and the recent need to handle hundreds or even millions of variables from different sources to explain a given phenomenon . predictive models for classification tasks are usually more time-consuming than other data analysis tasks, such as data storage and dashboarding, and traditional methods become intractable in terms of running times when facing large datasets. to address this challenge, distributed machine learning approaches have been developed. this "divide and conquer" strategy is designed to scale to larger input data sizes via multinode algorithms and systems . to this end, the mapreduce framework has become popular as a big data tool for machine learning, and it has been used for dealing with the class-imbalance problem .there is an important gap in the imbalanced big data classification literature. this is because no hybrid undersamplingoversampling methods have been proposed, to the best of our knowledge, and only a few intelligent undersampling techniques have been reported in the literature. this research is designed to fill this gap. on the one hand, the combination of two or more resampling methods has shown the best performance in relation to using a single strategy . on the other hand, there are several undersampling strategies that remove examples from the majority class in an "intelligent" manner, i.e., by exploiting the structure of the data instead of simply discarding examples randomly. for example, the edited nearest neighbor (enn) approach "cleans" a neighborhood of samples from the minority class by eliminating majority-class examples within its boundaries .the main issue with hybrid methods is that combining strategies can be time-consuming when performed independently. to overcome this issue, we propose a novel hybrid approach that combines smote and enn in a single pass over the data. these two methods have in common that a neighborhood is defined; however, the computation of distances can be the most time-consuming task for resampling techniques in big data environments . we propose a method called smotenn that defines a single neighborhood for each example in the minority class, in which both the creation of synthetic examples and the elimination of majority class samples are performed. this method can be combined further with rus to provide suitable preprocessing for binary classification tasks.in summary, the main contributions of this study are as follows:• we present a novel hybrid undersampling-oversampling method for imbalanced classification. to the best of our knowledge, this is the first hybrid undersampling-oversampling approach for large-scale machine learning, but it is also suitable for datasets of regular size. • we conducted a comprehensive experimental study on 35 datasets of different sizes, analyzing the performance of several resampling methods. our results show that smotenn is able to outperform other resampling methods for small-and medium-sized datasets. for large datasets, our method has a positive performance on average compared to other imbalanced big data techniques, performing better than rus and the combination of rus and smote. • we empirically discuss and evaluate the value of creating new examples from the minority class in a big data context. most studies on imbalanced big data classification focus either on undersampling or oversampling , without discussing the differences between these two approaches.the remainder of this study is structured as follows: section 2 discusses prior work on class-imbalanced classification, providing the preliminaries for the smotenn. next, the proposed hybrid undersampling-oversampling method is formalized in section 3. the experimental results obtained for small, medium, and large benchmark datasets are presented in section 4. finally, section 5 provides the main conclusions of this paper. the most popular strategies for dealing with the class-imbalance issue, such as random undersampling (rus) and the synthetic minority oversampling technique (smote), have been adapted for large datasets. these techniques preprocess the data, either by downsizing the majority class (undersampling) or by creating synthetic examples from the minority class (smote and other oversampling variants). predictive models for classification tasks are usually more time-consuming than other data analysis tasks, such as data storage and dashboarding, and traditional methods become intractable in terms of running times when facing large datasets. we propose a method called smotenn that defines a single neighborhood for each example in the minority class, in which both the creation of synthetic examples and the elimination of majority class samples are performed. for large datasets, our method has a positive performance on average compared to other imbalanced big data techniques, performing better than rus and the combination of rus and smote. the second topic is imbalanced big data classification, in which a relevant framework for distributed machine learning and other solutions for handling large datasets are discussed. data resampling consists of balancing the training set as a preprocessing step, either by downsizing the majority class (undersampling) or by creating synthetic examples from the minority class (oversampling).the main goal of the proposed technique is to perform data resampling for imbalanced classification efficiently, considering 1) a distributed framework for data loading and processing, 2) a fast and scalable distance metric to define a neighborhood of samples, and 3) a novel algorithm able to perform both smote-like oversampling and intelligent undersampling in a single pass over the data. both spark and mllib are considered in the proposed method for the implementation of the data resampling techniques and the machine learning strategies when facing imbalanced classification. in particular, information from the majority class can be useful for improving both undersampling and oversampling strategies, such as in enn, the borderline smote, and safelevel smote methods discussed in the previous section.having defined the approximated distance metric and the mapreduce framework, we now describe the novel data resampling algorithm for imbalanced big data classification. we first remove these majority class samples (enn step) and then generate synthetic minority class examples through interpolation (smote step). the following alternative methods were considered: smote, borderline-smote (b-smo), safe-level smote (sl-smo), adasyn, adaptive neighbor smote (an-smo), density based smote (d-smo), mwmote, and relocating safe-level smote (rsl-smo). the following parameter values were explored for the smotenn method: the number of nearest neighbors k = {5, 11, 15}, the amount of oversampling for the smote step n = {1, 3}, and the proportion of the majority class after the rus step p = {3, 4, 5, 6}. after a neighborhood of size k is defined, samples from the majority class are removed in case they are outnumbered by the examples from the minority class in the neighborhood (enn step). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/514.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/514.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a0cb177ec0f1e3cc0ba2e118af4579cad5b1075 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/514.txt @@ -0,0 +1 @@ +the covid-19 pandemic and its induced mitigation measures such as lockdowns and social distancing had unprecedented impacts on socioeconomic environments and resulted in increases in adverse conditions and events such as loss of employment, reduced income, food shortage, housing insecurity, delayed/unmet needs of healthcare in the past two years. researchers rely largely on population surveys and administrative records to estimate the prevalence of a specific characteristic/outcome/condition in the general population. these types of data are often collected in a retrospective manner, however, lack the power to produce real-time estimates that can be used for monitoring changes over a short period of time. in this paper, we explore to assess the maximum utility of social media data for monitoring temporal changes promptly and its limitations as well. we will compare the temporal trends of adverse experiences captured in social media data to the prevalence estimates yielded by the household pulse survey (hps) (u.s. census bureau 2020). the authors will update the prevalence estimates of four types of adverse experiences for 54 discrete time points in the past 2 years .the rampaging covid-19 pandemic has greatly affected emotion and everyone's daily life in the past two years. with the abundance of generated social media data during the pandemic, more users intend to express their emotions and opinions about social events, such as mitigation policies or the invention of vaccines. such dramatic social media data increase provides great research opportunities in social media mining and natural language processing. to further understand the various public behavior, some researchers built a real-time tweets analyzer to get highfrequency words and polarity over time in the united states . previous work by qazi et al. employs a gazetteer-based approach to infer the geolocation of tweets. accordingly, we deploy an elasticsearch search engine server to manage all the geo-tagged tweets by time and spatial coordinates, which makes it possible to select the appropriate study scope as needed. to mine the information and topics in these tweets, we looked at some of the existing data mining models .event detection in specific domains using social media has garnered significant attention over the past decade , . most existing research aims to identify events related to specific themes, such as earthquakes, disease outbreaks, or civil unrest, focusing on events with consistent representation types. to our understanding, our paper is the first to detect initiating events for various adverse experiences during the covid era. we employ chosen keywords from social media to capture the linguistic trends in discussions related to covid. in constructing the detection model, we incorporate a multi-task learning framework to capture the interconnectedness between various adverse experience-arxiv:2310.03941v1 5 oct 2023 related online conversations. this approach stems from the observation that many discussions exhibit overlapping linguistic patterns, and the language used in twitter posts about adverse experiences tends to be analogous. figure x illustrates the common linguistic trends observed in discussions about loss of employment income (li) and food scarcity (fs).in line with the previously mentioned specifications, we propose language pattern-aware triggering event detection for adverse experience(l a t e x) model based on multi-task learning framework. our main contributions are:• formulating a novel machine learning framework for triggering event detection using natural language features. differing from current approaches, we define the challenge of identifying initiating events for adverse experiences during the covid period as a multi-task supervised learning issue. in our suggested techniques, models for various adverse experience-related online discussions are concurrently learned using a shared set of linguistic features.• modeling similarity among various adverse experiences via common language patterns in feature space. given the common linguistic trends observed in discussions about covid and adverse experiences on twitter, we have crafted specific constraints to capture the similarities in language patterns across these experiences. these resemblances in the feature domain are influenced by recurring keywords present in online dialogues.• developing an efficient admm algorithm to learn sparse model parameters. the foundational optimization challenge of our suggested multi-task model is intricate, characterized by its non-smooth, multi-convex, and inequality-constrained nature. by incorporating auxiliary variables, we devise an efficient admm-driven algorithm that breaks down the primary challenge into multiple sub-problems. these can then be addressed using block coordinate descent and proximal operators.the rest of our paper is structured as follows. related works are reviewed in section ii. in section iii, we describe the problem setup of our work. in section iv, we present a detailed discussion of our proposed l a t e xmodel for detecting the occurences of the triggering events of the adverse experiences during covid and long covid eras, and its solution for parameter learning. in section v, extensive experiment evaluations and comparisons are presented. in the last section, we discuss our conclusion and directions for future work.the covid-19 pandemic and its induced mitigation measures such as lockdowns and social distancing had unprecedented impacts on socioeconomic environments and resulted in increases in adverse conditions and events such as loss of employment, reduced income, food shortage, housing insecurity, delayed/unmet needs of healthcare in the past two years.in line with the previously mentioned specifications, we propose language pattern-aware triggering event detection for adverse experience(l a t e x) model based on multi-task learning framework. differing from current approaches, we define the challenge of identifying initiating events for adverse experiences during the covid period as a multi-task supervised learning issue. given the common linguistic trends observed in discussions about covid and adverse experiences on twitter, we have crafted specific constraints to capture the similarities in language patterns across these experiences. in section iv, we present a detailed discussion of our proposed l a t e xmodel for detecting the occurences of the triggering events of the adverse experiences during covid and long covid eras, and its solution for parameter learning.in this section, we offer an in-depth examination of contemporary research, differentiating between methods for detecting triggering events for adverse experiences via social media. while existing methodologies have proven the efficacy of social media data in forecasting various domains, such as infectious disease spread,,, crime, and disaster tracking, there's a noticeable gap in research on adverse experience triggering event detection via social media. to the best of our understanding, our research is at the forefront of merging social media analysis with multi-task learning to identify events leading to adverse experiences during the covid-19 and long covid phases. as far as we're aware, our method is the pioneering supervised learning framework that captures the language interplay under the context of triggering events of adverse experiences during covid-19 and long covid phases through the lens of the multi-task learning approach. then based on which covid-19-related events are referred to in each tweet, t + is grouped into {t + c } c∈φ , where φ = {li, fs, hi, um} represents four types for adverse experiences:loss of employment income (li), food scarcity (fs), housing insecurity (hi), and unmet needs for mental health services (um).in this work, we address two major questions: 1) given a type of adverse experience c, a time slot t, and the collection of corresponding tweets t + c,t , is there a corresponding event that triggers such adverse experience c during time period t? to answer this question, we will cast it as a supervised learning problem using the multi-task learning framework; and 2) given a set of classified twitter set t + t in the time period t, what is sentiment score for that time period? to answer this question, we will also format it as a supervised learning problem but applying the recurrent neural network models. (1) topical relatedness of adverse experiences: the adverse experiences are topically related together (e. (2) common complaint vocabulary targetting covid-19: we assume that the words used by twitter users to complain about covid-19, in general, will be similar across all adverse experiences.considering that we want to predict if there is a triggering event for a specific type of adverse experience resulting from covid-19, given a subcollection of tweets t + c,t which includes the specific adverse experience c during time slot t, our problem fits well into the scope of a classification or regression problem. the correlation between adverse experiences results in semantic similarity in twitter space and, therefore, a similar distribution of tweets complaining or discussing adverse experiences. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/515.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/515.txt new file mode 100644 index 0000000000000000000000000000000000000000..41b43b4488e654700eef8d4398840a305046bcab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/515.txt @@ -0,0 +1 @@ +in query learning, a learner attempts to identify an unknown concept from a collection via a series of data requests called queries. typically, algorithms designed for learning in this setting attempt to bound the number of required queries to identify the target concept in the worst case scenario. if one imagines the queries of the learner being answered by a teacher, the usual setup imagines the teacher answering queries in an adversarial manner, with minimally informative answers. alternatively, for a given algorithm, the bounds for the traditional model are on the worst-case answers over all potential targets. in variations of the model, one of these two factors is usually modified.for instance, kumar et al. (2021) studies the case in which the answers are assumed to be maximally informative in a certain sense. in this manuscript, we first work in the setup originating with angluin and dohrn (2017), where we assume that the answers to the queries are randomly selected with respect to some fixed probability distribution.consider a concept class c = {c 1 , . . . , c n }, subsets of a fixed set x. fix a target concept a ∈ c. an equivalence query consists of the learner submitting a hypothesis b ∈ c to a teacher, who either returns yes if a = b, or a counterexample x ∈ a△b. in the former case, the learner has learned a, and in the latter case, the learner uses the new information to update and submit a new hypothesis. angluin and dohrn (2017) fix a probability distribution µ on x and assume that the teacher selects the counterexamples randomly with respect to µ restricted to a△b. they show that for a concept class c of size n, there is an algorithm in which the expected number of queries to learn any concept is at most log 2 (n). it is natural to wonder whether there is a combinatorial notion of dimension which can be used to bound the expected number of queries independent of the size of the class -perhaps even in infinite classes. in fact, angluin and dohrn (2017) (theorem 25) already consider this, and show that the vc-dimension of the concept class is a lower bound on the number of expected queries. on the other hand, angluin and dohrn (2017) (theorem 26), using an example of littlestone (1988), show that the vc-dimension cannot provide an upper bound for the number of queries.the motivation for bounds depending on some notion of dimension rather than the number of concepts is two-fold:• many combinatorial notions of dimension (e.g. littlestone or vc) of a class c can be small while |c| is large.• investigating this model of learning in settings where c is an infinite class will require methods and bounds which do not use |c|.we show that the littlestone dimension provides such an upper bound; we give an algorithm which yields a bound which is linear in the littlestone dimension for the expected number of queries needed to learn any concept. in section 2 we establish the bounds for finite concept classes c.in section 3 we give a specific example which shows finite littlestone dimension of a infinite class c is not sufficient to guarantee learnability of the class in the model of angluin and dohrn (2017). that is, we show the expected number of queries is impossible to bound over all target concepts even in very simple infinite classes. suppose that the target concept is itself selected randomly with respect to some (perhaps unrelated to the feedback mechanism) probability distribution. in this case, we give an algorithm so that the expected number of queries (over both sources of randomness) is at most õ(d) where d is the littlestone dimension of the class c. this result uses the bounds developed in section 2 in an essential way, in particular by using the finite class's littlestone dimension instead of its size.in section 4, we give another application of littlestone dimension -to compression schemes which answers a question of johnson and laskowski (2010) on d-compression with b extra bits, a notion originating with floyd and warmuth (1995). the existence of a d-compression is closely related to various notions of learning; d-compressibility of a class c implies the class has vcdimension at most d. a famous conjecture of floyd and warmuth (1995) asks if the every vc-class has a d-compression where d is the vc-dimension.1 our result in section 4 proves a strong version of the conjecture for littlestone dimension. they show that for a concept class c of size n, there is an algorithm in which the expected number of queries to learn any concept is at most log 2 (n). it is natural to wonder whether there is a combinatorial notion of dimension which can be used to bound the expected number of queries independent of the size of the class -perhaps even in infinite classes. in fact,angluin and dohrn (2017)(theorem 25) already consider this, and show that the vc-dimension of the concept class is a lower bound on the number of expected queries.we show that the littlestone dimension provides such an upper bound; we give an algorithm which yields a bound which is linear in the littlestone dimension for the expected number of queries needed to learn any concept.in section 3 we give a specific example which shows finite littlestone dimension of a infinite class c is not sufficient to guarantee learnability of the class in the model ofangluin and dohrn (2017). in this case, we give an algorithm so that the expected number of queries (over both sources of randomness) is at most õ(d) where d is the littlestone dimension of the class c.for any a ∈ x, either c a=1 or c a=0 has littlestone dimension strictly less than that of c and so:.9 to give an algorithm which yields the correct concept in linearly (in the littlestone dimension) many queries from c.let t (c) be the expected number of queries before the learner correctly identifies the target concept.10 the expected number of queries to learn a concept in a class c is less than or equal to 2 ldim(c).proof the expected drop in the littlestone dimension of the concept class induced by any query before the algorithm terminates is at least 1/2 by theorem 2. since c is countable, enumerate the collection c = {c i } i∈n . one might also think of the random eq learning of angluin and dohrn as analysing the maximum number of expected number of queries over all possible targets, while our model will analyze the expected number of queries where the expectation is taken over the the concepts (with a fixed but arbitrary distribution) and over the counterexamples. during each step of the algorithm, we also have a concept class c i , with c 0 = c initially. in this case, set c i := (c i-1 ) (a i ,1) = {g | g ∈ c i-1 , g(a i ) = 1}. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/516.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/516.txt new file mode 100644 index 0000000000000000000000000000000000000000..af3c672983a6008536251909f985223cbcb679b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/516.txt @@ -0,0 +1 @@ +many space bodies like satellites and the international space station (iss) lie in the low earth orbit (leo), resulting in great exposure to cosmic microwave background radiation, solar radiation from the sun and van allen radiation. although most radiation gets deflected by the earth's magnetosphere, high energy charged particles may still pass through and disrupt the operation of space devices. these charged particles, when passing through a medium, lose energy by ionisation and induce electron-hole pairs along their path. the interaction of electrons, protons, and heavy ions with integrated circuits could therefore lead to a total dose degradation, or worse, a single event effect-a class of radiation effects in electronic devices. in this research paper, we will be focusing on predicting single event upsets (seu)-a subset of single event effects.in digital memory and logic devices, seus are nondestructive 'soft' errors. they normally appear as a bit flip in memory units or transient pulses in logic devices, and do not permanently destruct the functions of a device. therefore, error detection and correction codes are frequently adopted to reduce the impact of seus. however, modern semiconductor devices tend to have tiny junction areas with proportionately small amounts of charge to control the state of memory units. this increases seu disturbances because a single heavily charged particle passing through the junction would be sufficient to induce charge into the node and change its state, causing disruption of data stored at the node. as modern semiconductor devices are getting even smaller, predicting and mitigating seus is becoming an increasingly important problem. our research is focused on mitigating the impact of single event upsets by employing machine learning. the model presented will allow the prediction of seus in memory units-prompting timely mitigation of the occurrence.previous studies have been done on a similar problem , however, our approach differs by utilising a novel method on a more constrained dataset. although most radiation gets deflected by the earth's magnetosphere, high energy charged particles may still pass through and disrupt the operation of space devices. this increases seu disturbances because a single heavily charged particle passing through the junction would be sufficient to induce charge into the node and change its state, causing disruption of data stored at the node.current models used to predict seus are largely based on the transfer of energy from protons to electrical components as they pass through very large-scale integration (vlsi) devices. these models are mostly tested in controlled environments with the aim of predicting critical energy levels of vlsi devices and to verify the strength of the shielding methods in place. the calculations behind these models are based on data collected in labs under the assumption that a device's operation in such simulated environments is an indication of its operation in space.linear energy transfer (let) is the fundamental idea behind several seu prediction models. let is the deposition of energy by high energy protons onto the cells of a semiconductor device as it pierces through the device.where 𝐿is the net energy transferred to a cell if the path of the proton is perpendicular to the cell, and 𝜃 is the angle of entry into the cell. if the charged energy deposited from the proton into the cell exceeds a certain amount (critical charge), an seu can occur.path-length distribution model is a model that is currently used to test the likelihood of an seu occurring in semiconductor devices.bendel proton upset model is another model that is currently used to provide an estimate of the seu occurrence rate in a semiconductor device using semi-empirical fit parameters. in particular, it considers high energy protons that produce secondary particles with energy high enough to cause an seu. since the seu occurrence rate is calculated based on the mean value in a hyper dynamic environment, the short term rate of seu occurrence may be much higher than the estimate. therefore, systems with semiconductor devices susceptible to seus need to have built in shielding to withstand short periods of greatly increased seu occurrence rates. the dataset contained the orbital positions, altitudes and timestamps of the seu observed in an onboard memory device of the low earth orbit "flying laptop'' satellite mission during its in-orbit operation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/517.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/517.txt new file mode 100644 index 0000000000000000000000000000000000000000..00d320bd2a5280ae873e90e51cdb3fe44b095041 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/517.txt @@ -0,0 +1 @@ +the proof of theorem 4.1 is presented in appendix e. it is composed of the following main steps: first, we bound the number of times the consistency check can fail (i.e., line 13 is executed) by lemma 4.4. combining this with lemma e.5, an elliptical potential argument bounding the number of times the average uncertainty can be large (these are the only two ways that the main iteration can continue) implies a sample-complexity result for skippyeleanor (corollary e.6). having limited the number of times the consistency check can fail, we derive guarantees regarding the performance of the policy returned by the algorithm: via an induction argument (lemma e.8) we show corollary e.9, which shows that with high probability the difference between the optimization value of optimization problem 4.10, ˆ , ¯ ( 1 ) and scales with the average uncertainty term =1 ¯ . thus, they are close when skippyeleanor returns in line 17. this is complemented with the optimism property proved in lemma e.10, stating that the optimization value ˆ , ¯ ( 1 ) is close to ★ ( 1 ). combined, this proves theorem 4.1.given some data collected so far and ( ℎ ) ℎ∈ , skippyeleanor computes optimistic estimates of the action-values by calculating an optimistic policy parameter ¯ , as well as a guess ˆ to a nearoptimal design which is used to estimate the range for the states (due to technical reasons, ˆ will guess a near-optimal design for the transformed parameter space -1 ℎ θ ℎ ). data is collected by running stochastic versions of skippy policies on the mdp, where the states to be skipped over are determined based on the range estimates; when a state is skipped, an action is selected using a deterministic policy 0 that always chooses the first action in every state. since we do not know these states in advance, we run exploratory policies that skip over next states starting from any state: namely, we run skippypolicy( ˆ , ¯ , ) for all ∈ with a maximum number of unskipped states (phase i), and once this is skip budget is exhausted, all remaining states are skipped over by rolling out 0 (phase ii), which ensures that we collect enough data at every stage of the mdp to be able to estimate the one-skippy-step reward of any skipping mechanism. when a transition from state leads to skipped states, the linear mdp returns with the copy of the first non-skipped state that has a stage counter of stage( ) + 1, so that in this linear mdp the stage numbers are consecutive (as required by our definitions).the optimization problem computes optimistic estimates ¯ of the parameters of the mdp simultaneously for all ∈ , and in each iteration of the loop, more data is collected according to the policy that is optimal for the mdp defined by the estimated parameters. therefore, (i) we need access to the rewards of the current policy at any stage (similarly to eleanor), and hence we run the current policy to any stage (including the last one); and (ii) perform rollouts with the fixed policy 0 (from any stage) to be able to estimate the reward + .there is an event e 1 that happens with probability at least 1 -, such that under e 1 , during the execution of skippyeleanor, when the beginning of any iteration (line 5) is executed, for any ∈ , ∈ , for any ˆ ∈ g, ¯ ∈ θ, and , ∈ b (1), for all ( ,. there is an event e 2 with probability at least 1 -, such that under e 1 ∩ e 2 , during the execution of skippyeleanor, when optimization problem 4. under e 1 ∩ e 2 , skippyeleanor returns with a policy before exiting the while loop of line 3, and as each iteration executes trajectories in line 8, the number of interactions of skippyeleanor with the mdp is bounded by õ 11 7 / 2 . there is an event e 3 with probability at least 1 -3 , such that under e 1 ∩ e 2 ∩ e 3 , during the execution of skippyeleanor, whenever line 16 is executed, for ( ˆ , ¯ ) as recorded in line 5 of the current iteration, for ∈ ,. there is an event e 4 with probability at least 1 -, such that under e 1 ∩ e 2 ∩ e 4 , throughout the execution of skippyeleanor, the value of optimization problem 4.where the first line is due to both + and -following on states with stage less than ℎ, the second line follows from the fact that for any ∈ s ℎ , + ( ) = -( ) for any ≠ ord( ); combining this with eq. we first use the usual high-probability bounds on the least squares predictor and hoeffding's inequality on the empirical mean quantities, to prove that with probability at least 1 -3 , during the execution of skippye-leanor whenever line 16 is executed, for all ∈ ,. the third inequality relies on the fact that , +1 = 0 if the clipped inner product is negative, and the final equality is due to the definition of along with the fact that p( ) = + ( p( ) ), as this is the last state in the trajectory where skippypolicy takes the inner-product maximizing action ( + ) before rolling out with 0 . (31)). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/518.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/518.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a629b5cdb3d475c86ab75ec07d902d0fc730608 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/518.txt @@ -0,0 +1 @@ +kernel density estimators provide a non-parametric method for estimating probability distributions. in contrast to parametric machine learning models, in which a set of training data is used to determine an optimal parameter vector which can be used to predict future outcomes without the training data, for non-parametric methods the number of parameters of the model has the capability to grow as the size of the dataset grows. our paper will focus on the task of kernel density estimation, in which one estimates a probability distribution using kernel functions of the form k : d ×d → r. kernels are often defined with a bandwidth parameter 1/α that controls the width of the kernel function.given a probability distribution ρ and a collection of points x = {x 1 , ..., x n } ∼ ρ sampled independently, it is well known that for certain well-behaved kernel functions k, the distribution ρ can be approximated very well by the kernel density estimator (kde)k(x i , y). in particular, it is known that under certain conditions on the kernel function, kde x approximates ρ at the minimax optimal rate as |x| → ∞ .although this is an elegant theoretical result, in practice it is computationally inefficient to store and make computations with an arbitrarily large number of data points n. one solution to reduce the computational complexity is to use an ε-coreset for a kernel density estimator. definition 1.1 (kde ε-coreset). for fixed ε > 0, kernel function k : d×d → r, and data set x ⊆ d, an ε-coreset for k is a subset q ⊆ x so thatwe will say that the coreset complexity of a kernel function k is the minimum possible size of a ε-coreset q for k.in general, coreset complexity bounds will depend on ε and the dimension d of the kernel domain, and they will be independent of the size of the set x. these bounds are also often independent of the choice of x ⊆ d, although several previous results and several of our results give an explicit dependence on x that may allow improvement over existing bounds for sufficiently nice data sets (see section 1.3). in particular, several of our bounds will depend on the radius of the set x.for more details about non-parametric methods and kernel density estimation, see for example .1.1. the discrepancy approach. one powerful method for proving bounds on the coreset complexity of kernel functions is the discrepancy approach. it has also been used in and is based on a method for computing range counting coresets . following the notational conventions of , we make the following definition. definition 1.2 (kernel discrepancy). given a data set x ⊆ d, a kernel k : d × d → r, and a coloring β ∈ {±1} x , the kernel discrepancy at a point y ∈ d is defined as disc k (x, β, y) := x∈x β(x)k(x, y) .then the kernel discrepancy can then be defined aswe will also use the notation disc k (x) to denote the kernel discrepancy with respect to a fixed data set x. bounds on kernel discrepancy can be leveraged to obtain bounds on the coreset complexity for a given kernel k via the following strategy, often called the "halving trick" . we construct a coreset of x by iteratively removing half of the points in x, and we select which half of the points are removed by creating colorings β ∈ {±1} x minimizing the kernel discrepancy and then removing those points assigned -1 (in principle, there is no reason to expect that exactly half of the points are assigned +1, and half -1, but there are standard techniques to overcome this challenge ). indeed, supposing that we have an optimal choice of signs β ∈ {±1} x such that supthen we simply note that, letting x + be the set of points assigned +1 and x -be the set of points assigned -1, then under the assumption thattaking a supremum over y ∈ d, the final line of ( 1) is exactly kde x (y) -kde x -(y). thus, iterating this procedure t times, and denoting the resulting set at iteration s by x s (with x 0 := x), we find thatassuming that the function f grows sufficiently slowly1 , this sum will be dominated by the final term, which allows us to calculate the size of a coreset yielding error at most ε. based on this connection, our proofs will focus on bounding the quantity disc k (n) for different kernels k (or in some cases disc k (x), when we want to account for the geometry of the data set x), and then the "halving trick" can easily be used to determine the corresponding size of the coreset thus obtained.2 (kernel discrepancy). based on this connection, our proofs will focus on bounding the quantity disc k (n) for different kernels k (or in some cases disc k (x), when we want to account for the geometry of the data set x), and then the "halving trick" can easily be used to determine the corresponding size of the coreset thus obtained. chen et al. in the case that c k is a small constant, as is the case for most kernels of interest, it is easy to see that taking a 2ε/(c k √ d)-net g ε over the domain of k and mapping each point x ∈ x to the closest point in g ε (with multiplicity) to obtain x gε , we find that sup y∈d |kde x (y) -kde x gε (y)| ≤ ε.finally, a related but not directly comparable result due to karnin and libertyapplies to kernels that are analytic functions of the dot product and satisfy the very strong condition that sup x,x ′ ∈d xx ′ 2 ≤ r k , where r k is a fixed constant determined by the kernel k. given a kernel k : d × d → r and a data set x ⊆ d, we define the query space of k with respect to x as. let k : d × d → r be a kernel, x ⊆ d a data set, and q the query space associated to k and x. let k : r d × r d → r be a kernel with bandwidth parameter 1/α and x ⊆ r d be a dataset. let k : r d × r d → r be a kernel with bandwidth parameter 1/α and x ⊆ r d a data set.3 each kernel k has an associated rkhs h k and a map φ k : d → h k .1 to the vectors φ(x) for x ∈ x, noting that by condition (ii), φ(x) h k = 1 for each x ∈ x. as we assume that k is positive definite and satisfies k(x, x) = 1 for all x ∈ q, there exists a map φ : r d → h k , where h k is a rkhs such that k(x, y) = φ(x), φ(y) for all x, y ∈ r d , and φ(x) h k = 1 for all x ∈ r d .by the first observation, we can apply our bound on the exponential kernel discrepancy of the set {f (x) : x ∈ x} ⊆ s d to find signs β ∈ {±1} x so that sup. then as φ(x) h k j s = e 0 = 1 for any choice of x ∈ ∆ d , we can apply the gram-schmidt walk to the collection of vectors {φ(x)} x∈∆ d exactly as in the proofs of theorems 1.4 to this collection of random variables with the pseudometric d k j s (x, y) = φ(x)φ(y) h k j s = 2 -2k js (x, y), we find that e disc k j s (x) diam(d k j s ) 0 log n (∆ d , d k j s , r) dr. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/519.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/519.txt new file mode 100644 index 0000000000000000000000000000000000000000..97e363af0f4aa02019d9bb4aa3f668a1c52fbaed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/519.txt @@ -0,0 +1 @@ +the evolution of human-computer interaction (hci) has undergone several transformations over the decades, with tech- § equal contribution. nology continuously striving to make computers more userfriendly and accessible. from the command-line interfaces of the 1960s to the graphical user interfaces (gui) of the 1980s and, more recently, the touch interfaces on mobile devices, each shift has represented a significant leap towards more intuitive, efficient, and seamless user experiences. today, as we find ourselves at the precipice of another paradigm shift, the question is not whether, but how, we continue to shape this ongoing evolution to ensure a future where technology serves us in increasingly human-centric ways.in the current technological landscape, artificial intelligence (ai) stands as a powerhouse of potential, particularly for augmenting and redefining current operating systems and user interfaces. the abilities of large generative models (lgms), such as large language models (llms) and diffusion models (dms), have given us a glimpse into a future where our interactions with technology transcend the traditional boundaries.llms, built upon vast data sets and sophisticated architectures, are capable of completing complex tasks, demonstrating chain-of-thought reasoning akin to human capabilities, and displaying impressive generalization skills. their proficiency in comprehending and generating language makes them ideal base-reasoners, capable of orchestrating diverse system components to create a seamless, intuitive, and responsive user interface.moreover, with advances in generative computer vision models, especially dms, our toolbox for enhancing humancomputer interaction has expanded. these models can generate incredibly realistic outputs, setting the stage for them to serve as the foundation for user interface generation: the ability to generate personalized interfaces on-the-fly, that cater and adapt to individual user preferences, their character, and mood marks a shift toward highly customized and user-centric design, a shift that promises to enrich user experiences significantly.this new paradigm of human-computer interaction presents exciting opportunities, such as enabling communication between systems that otherwise do not integrate the same api. by utilizing natural language, a universal medium, we can bridge the gap between disparate systems, fostering a more unified, coherent, and efficient interaction landscape. however, this shift in paradigm also brings its share of challenges. a prime example is the need to ensure data persistence within these models. one key question when implementing this new approach is how we can keep a consistent and ongoing dialogue over time, especially when the system is working on complicated or multi-stage tasks. this steady interaction is crucial for a smooth user experience and for building trust in the system's ability to assist the user effectively. to make this possible, we may need to step away from the methods we're used to and start thinking about new ways to improve the performance of these generative models. for instance, current methods of data management, such as storing files explicitly in computers or data centers, may provide some benefits, but they may not fully meet the unique needs of generative models, which store their knowledge implicitly, compressed within their parameters.while the capabilities of llms in understanding and generating language are remarkable, they are not without their limitations. these issues primarily originate from the data employed for their pre-training, which is frequently obtained from web crawls. this data can often contain biased, toxic, or harmful content, consequently impairing models' reliability. another limitation is the tendency to hallucinate, i.e., despite not having any explicit misinformation, llms may generate outputs that are not entirely accurate or faithful. this propensity to deviate from the input can occasionally lead to responses that, while contextually plausible, might misrepresent the user's intent or the factual information at hand.moreover, the promise of seamless interaction and communication must balance with considerations of trustability, privacy, security, and ethics. for this reason, developing new protocols for information exchange becomes a necessity in this envisioned future. these protocols must meet and surpass current standards, protecting user data while simultaneously ensuring private and secure interactions. the design of such protocols also must anticipate and be resilient against potential misuse of ai systems, providing robust safeguards to exploitation and unethical practices.these represent just a few of the challenges in harnessing the full potential of llms in revolutionizing human-computer interaction. as we venture into this exciting new territory, it is essential to confront these challenges head-on, ensuring that the solutions we develop are not just technologically advanced, but also reliable, ethical, and user-centric.the road ahead in this new paradigm is both promising and challenging. this paper serves as an exploration into the future of human-computer interaction -a future where our interactions with technology become akin to a natural conversation. we delve deeper into the benefits, challenges, and implications of this envisioned future in the following sections, charting a course for continued research and development in this transformative and exciting direction. in particular, section ii reviews current work in this area, and provides an idea of the current technological landscape; sections iii and iv describe our vision and propose a possible architecture, respectively. section v questions the main challenges that may arise; finally, section vi concludes our discussion. from the command-line interfaces of the 1960s to the graphical user interfaces (gui) of the 1980s and, more recently, the touch interfaces on mobile devices, each shift has represented a significant leap towards more intuitive, efficient, and seamless user experiences.in the current technological landscape, artificial intelligence (ai) stands as a powerhouse of potential, particularly for augmenting and redefining current operating systems and user interfaces. the abilities of large generative models (lgms), such as large language models (llms) and diffusion models (dms), have given us a glimpse into a future where our interactions with technology transcend the traditional boundaries. these models can generate incredibly realistic outputs, setting the stage for them to serve as the foundation for user interface generation: the ability to generate personalized interfaces on-the-fly, that cater and adapt to individual user preferences, their character, and mood marks a shift toward highly customized and user-centric design, a shift that promises to enrich user experiences significantly. for instance, current methods of data management, such as storing files explicitly in computers or data centers, may provide some benefits, but they may not fully meet the unique needs of generative models, which store their knowledge implicitly, compressed within their parameters. don't show me all the options; propose the cheapest one directly" 2) a c to : "my user would like a flight to paris, between the 16th and 17th of july, preferably in the evening, at a cost not exceeding 120 usd. given that these models can be trained to generate a wide range of visual outputs, they could be directed to design interfaces that echo a user's aesthetic preferences or adapt to their current mood. users could convey their needs verbally, and the system could respond in kind, further blurring the lines between human-computer interaction and human-human conversation.for instance, in the previously discussed scenario, the user could verbalize their request for a flight booking, and the client agent (a c ) could acknowledge, confirm, and execute these instructions using spoken language. this seamless integration of speech-to-text and text-to-speech models would provide an interaction experience that is not just intuitive but also highly efficient, especially for users with visual impairments or those who are occupied with other tasks and prefer to interact verbally with their devices. with advancements in ai and ml, the future of hci could encompass an array of sensory interactions, each tailored to individual user needs and preferences, creating an immersive and inclusive technological environment.• graphical processor based on diffusion models: this caters to visual tasks, allowing for the generation and interpretation of personalized user interfaces and graphical content, ensuring a multi-modal interaction platform. challenges developing an operating system that integrates generative ai models like the one sketched in figure1promises to reshape system design dramatically.the evolution of human-computer interaction, enhanced by the capabilities of lgms such as llms and dms, has the potential to reshape system design and the dynamics of communication, interaction, and collaboration between users and machines. through the integration of ai into operating systems, we envision a future where interfaces are not only intuitive but also deeply personalized, adapting to individual needs and preferences, allowing for seamless and coherent interactions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/52.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/52.txt new file mode 100644 index 0000000000000000000000000000000000000000..ac19c96accf4784179db442d80b01421dbbf9283 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/52.txt @@ -0,0 +1 @@ +together with the global neuronal workspace, integrated information theory (iit) is currently the most prominent approach to obtaining a scientific theory of consciousness, i.e, a theory that can identify the neural correlates of consciousness through some direct or indirect measure. difficulties in agreeing on experimental setups that may discriminate between different theories have recently led to a sort of adversarial collaboration between them. within said collaborative framework, each theory provides the conditions for it to be experimentally falsified. however, despite such partnership and the obtaining of promising results in clinical tests, reaching definitive answers may prove more difficult than initially expected because of the multifactorial complexity of the biological substrate of consciousness, namely the living brain. the appeal of iit for computation science and, in general, the ai community relies on its straightforward definition of consciousness as a formal property of any physical system, composed of nodes and interactions, that instantiates it. consciousness is maximally integrated information φ; the latter quantity being amenable to a strict mathematical definition in a network of interconnected elements that can be thought of as a probabilistic causal graph. were said statement true, iit would be providing not only a quantifiable prediction for a system to be conscious but also a heuristic approach to generate conscious systems -the holy grail of the ai program. yet it could also happen that iit offers just a necessary, but not sufficient, condition for the emergence of consciousness in a system. as a logical consequence, a system implementing an artificial intelligence algorithm need not be conscious whatsoever and, hence, need not understand the meaning of the problems that it solves. interestingly, iit derives its mathematical definition of consciousness from five axioms that fully account for what, phenomenologically, being conscious implies. to wit: the present experience of consciousness exists, is informative, composed, integrated, and excludes other conscious experiences . said axioms should naturally determine the emergence of a physical scale in which the system, from its own perspective, presents a maximally integrated structure of intrinsic information, in which the information of the whole is maximally irreducible to the sum of information in subsets of any partition of the system. additionally, the physical state of the substrate instantiating maximal φ, for the same token, presents a maximal cause-and-effect composition (for the details of the link between measures of integrated information and causality in an interconnected network, one may see ).whereas iit continues developing as a research program that strives for finding the best definition of distance between informational contents and including in its framework many other features of consciousness, it has been criticized on diverse grounds . more specifically for our interests in this paper, some authors have raised the issue of the so-called 'intrinsicality problem' (ip), namely, the fact that consciousness cannot be an intrinsic property of the system because maximal φ crucially depends on the possible existence of bigger values of φ if the initial system is appropriately linked to or embedded in larger systems. ip's threat, if true, could manifest that the causality involved in the constitution of the system goes beyond the conceptualization of causality provided by iit in its current form. even though the literature on the computation of φ in different networks is rapidly increasing and gaining insights on the logical architectures favoring maximal φ (basically those consisting of modular, homogeneous, and specialized networks with feedback connections ), the prohibitively large computation times for big networks, scaling with the number of nodes (n) as o(n53 n ), make it necessary the search for alternative heuristics and/or new methods for measuring φ in larger systems. not much is known about the actual distribution of φ over different network types and topologies. this paper thus explores the benefit of random search algorithms in order to compute φ. if, according to iit's creators "the quantity and quality of consciousness are what they are," and consequently "the cause-effect structure cannot vary arbitrarily with the chosen measure of intrinsic information" , how maximal φ varies with the number of systems nodes must be a relevant matter to get a glimpse of how consciousness could eventually scale up in networks. hence, our paper studies the expected increase of maximal φ with the number of nodes, paving the way for gaining insights on the network's architectures that favor consciousness and assessing the seriousness of the ip, if iit holds, in future works. in such an endeavor, our paper will also show the limits of some optimization techniques for iit as the number of network nodes increases. this paper is organized as follows. first, we provide a cursory definition of integrated information, a problem definition of φ optimization, and a technical description of the methodology that we have implemented to optimize φ. most critically, we provide arguments to justify why our proposed algorithm does not use smarter black-box global optimization techniques such as bayesian optimization. then, we show in an experiments section empirical evidence to support the claim that our method is able to optimize φ. finally, we present some concluding remarks and refer to possible research lines to optimize φ in further work. in particular, we enhance a random search procedure instead of a grid search because random search significantly outperforms grid search when the percentage of explained variance on the variable to optimize is not uniform over the explanatory variables. the prior guided random search illustrated on algorithm 1 will obtain φ , which is an approximation to φ best , that is, φ ≈ φ best , which could only be found using an exhaustive search throughout all the tpm d-dimensional space ω. where, in algorithm 1, µ is the adaptation rate of p(ω), the multinomial a priori distribution of the probability of obtaining φ in a certain d-dimensional space, is the number of iterations that are needed to update p(ω), t is the budget of total iterations that the user can afford, dmin is the lower dimension of the tpm d-dimensional space and dmax is the higher dimension of the tpm d-dimensional space. namely, we compare our proposed prior guided random search method with respect to a grid search method and a random search method. however, the grid search method uniformly splits the dimensions of the tpm across all the space range, and the binary tpms are selected by obtaining the binary representations of a linear space of integers from 0 to 2 n where n is the number of nodes which are the columns of the matrices. as the prior guided random search method performs almost twice as better as the grid search method, a statistical hypothesis testing method will provide a p-value lower than . hence, for the next experiments, and also for computational reasons due to the complexity of φ with respect to the number of nodes, we will only compare the prior guided random search method with the random search method.1, 0.we can see, once again, that both searches are able to find better tpms according to the φ measure once the iterations are executed and that prior guided random search once again outperforms random search, not only on average but in absolute value. as we have stated previously, the complexity of evaluating φ with respect to the nodes is high, in particular, o(n53 n ), which must be multiplied by 2 n in these searches as we look for a feasible state for a randomly sampled tpm.8] and the number of iterations is 500, with µ = 1 and = 5, obtained the results plotted in figure6. by doing this procedure, we can validate, with the previous set of experiments, that our algorithm is going to work even better when the number of nodes grows, assuming that the same inference will hold when the number of nodes grows, which we can not empirically test since the complexity of computing φ when the number of nodes grows is exponential.38 whose associated p-value is 0. we obtain a t-statistic of -2. whereas the present calculations still work with toy models with a maximum of 6 nodes, the obtained results with a guided random method strongly suggest that the latter outperforms other methods to explore optimal tpms as the number of nodes increases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/520.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/520.txt new file mode 100644 index 0000000000000000000000000000000000000000..c13ebe5d2845d69fe8b67077ebaee10fd316bc21 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/520.txt @@ -0,0 +1 @@ +policy gradient methods , are a popular class of approaches in reinforcement learning. the policy in these approaches is considered parameterized and one updates the policy parameter along a gradient search direction where the gradient is of a performance objective, normally the value function.the policy gradient theorem , , which is a fundamental result in these approaches relies on an interchange of the gradient and expectation operators and in such cases turns out to be the expectation of the gradient of noisy performance functions much like the previously studied perturbation analysis based sensitivity approaches for optimization via simulation , .the reinforce algorithm , is a noisy gradient scheme for which the expectation of the gradient is the policy gradient, i.e., the gradient of the expected objective w.r.t. the policy parameters.the updates of the policy parameter are however obtained once after the full return on an episode has been found. actor-critic algorithms , , , , have been presented in the literature as alternatives to the reinforce algorithm as they perform incremental parameter updates at every instant but do so using two-timescale stochastic approximation algorithms.the author was supported by a j. c. bose fellowship, project no. dftm/ 02/ 3125/m/04/air-04 from drdo under dia-rcoe, a project from dst-icps, and the rbccps, iisc. tasks or the stochastic shortest path setting. our algorithm performs parameter updates upon termination of episodes, that is when goal or terminal states are reached. in this setting, as mentioned above, updates are performed only at instants of visit to a prescribed recurrent state , . this algorithm is based on a single function measurement or simulation at a perturbed parameter value where the perturbations are obtained using independent gaussian random variates.gradient estimation in this algorithm is performed using the smoothed functional (sf) technique for gradient estimation , , , . the basic problem in this setting is the following: given an objective function j : r d → r such that j(θ) = e ξ , where θ ∈ r d is the parameter to be tuned and ξ is the noise element, the goal is to find θ * ∈ r d such thatsince the objective function j(•) can be highly nonlinear, one often settles for a lesser goal -that of finding a local instead of a global minimum. in this setting, the kiefer-wolfowitz finite difference estimates for the gradient of j would correspond to the following: for i = 1, . . . , d,, where e i is the unit vector with 1 in the ith place and 0 elsewhere. further, ξ + i and ξ - i , i = 1, . . . , d are independent noise random variables having a common distribution. the expectation above is taken w.r.t this common distribution on the noise random variables. this approach does not perform well in practice when d is large, since one needs 2d function measurements or simulations for a parameter update.random search methods such as simultaenous perturbation stochastic approximation (spsa) , , , smoothed functional (sf) , , or random directions stochastic approximation (rdsa) , typically require much less number of simulations. for instance, the gradient based algorithms in these approaches require only one or two simulations regardless of the parameter dimension d, while their newton-based counterparts usually involve one to four system simulations for any parameter update (again regardless of d). a textbook treatment of random search approaches for stochastic optimization is available in .we consider here a one-simulation sf algorithm where the gradient of j(θ) is estimated using a noisy function measurement, at the parameter θ + δ∆, where δ > 0 and ∆ △ = (∆ 1 , . . . , ∆ d ) t with each ∆ i ∼ n(0, 1) with ∆ i being independent of ∆ j , ∀j = i. further, ∆ is independent of the measurement noise as well. the gradient estimate in this setting is the following: for i = 1, . . . , d,in the above, ξ denotes the measurement noise random variable. the expectation above is with respect to the joint distribution of ξ and ∆. before we proceed further, we present the basic markov decision process (mdp) setting as well as recall the reinforce algorithm that we consider for the episodic setting.we remark here that there are not many analyses of reinforce type algorithms in the literature in the episodic or stochastic shortest path setting. the policy in these approaches is considered parameterized and one updates the policy parameter along a gradient search direction where the gradient is of a performance objective, normally the value function.the policy gradient theorem,,which is a fundamental result in these approaches relies on an interchange of the gradient and expectation operators and in such cases turns out to be the expectation of the gradient of noisy performance functions much like the previously studied perturbation analysis based sensitivity approaches for optimization via simulation,.the reinforce algorithm,is a noisy gradient scheme for which the expectation of the gradient is the policy gradient, i. the basic problem in this setting is the following: given an objective function j : r d → r such that j(θ) = e ξ , where θ ∈ r d is the parameter to be tuned and ξ is the noise element, the goal is to find θ * ∈ r d such that. for instance, the gradient based algorithms in these approaches require only one or two simulations regardless of the parameter dimension d, while their newton-based counterparts usually involve one to four system simulations for any parameter update (again regardless of d).}, with µ i : s → a, i ≥ 0, such that µ i (s) ∈ a(s), ∀s ∈ s. the goal of the decision maker is to select actions a k , k ≥ 0 in response to the system states s k , k ≥ 0, observed one at a time, so as to minimize a long-term cost objective.our basic setting here is similar to chapter 3 of, where it is assumed that under any policy there is a positive probability of hitting the goal state t in at most p steps starting from any initial (non-terminal) state, that would in turn signify that the problem would terminate in a finite though random amount of time. a stationary randomized policy is one for which φ j = φ k △ = φ, ∀j, k = 0, 1, .in practice, one might be able to relax this assumption (as with the model-based analysis of) by (a) assuming that for policies that are not proper, v π (i) = ∞ for at least one non-terminal state i and (b) there exists a proper policy.the reinforce algorithm,makes use of the policy gradient theorem as the latter indicates that the gradient of the value function is the expectation of the gradient of a function of the noisy returns obtained from episodes. we assume that multiple trajectories of data can be made available and the data on the mth trajectory can be represented in the form of the tuples (s m k , a m k , g m k , s m k+1 ), k = 0, 1, . also, s m j is the state at instant j, j = k, k + 1 in the mth trajectory. let θ(n) denote the parameter value obtained after the nth update of this procedure which depends on the nth episode and which is run using the policy parameter γ(θ(n) + δ n ∆(n)), for n ≥ 0, where θ.let c ⊂ r d be a compact and convex set as before and γ : r d → c denote the projection operator that projects any x = (x 1 , . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/521.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/521.txt new file mode 100644 index 0000000000000000000000000000000000000000..5eda72f296acbaf9c0a18c64a3c586cc82f6966b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/521.txt @@ -0,0 +1 @@ +although the largest part of deep learning models is defined following the rules of real-valued numbers and algebra r, such a choice is not always the best one for multidimensional data. therefore, an increasing number of works are exploring the possibility of defining models with different underlying algebras that better fit the problem of study. among these, clifford, cayley-dickson, and hypercomplex algebras have been widely adopted. more recently, parameterized hypercomplex neural networks (phnns) have transformed the wide research field of neural models defined over such hypercomplex algebras. this happened thanks to their flexibility and malleability that allow users to make use of hypercomplex-based networks without developing ad-hoc architectures or seeking the proper algebra domain that best fits the specific task. indeed, phnns grasp algebra rules directly from data and therefore they can be employed for any corresponding author's email: eleonora.grassucci@uniroma1.it. we acknowledge financial support from pnrr mur project pe0000013-fair.(srfkv $ffxudf\ 3+5hv1hw 3+5hv1hw:.3 3+5hv1hw3+<', fig. 1. the proposed phydi initialization speeds up the convergence of parameterized hypercomplex neural networks in which it is employed.n-dimensional data without modifications to architectural layers. for that reason, the already ample field of hypercomplex models based on complex , quaternion , dual quaternion , and octonion numbers has been permeated by phnns. these networks have been defined with different known backbones such as resnets , gans , graph neural networks , and transformers , among others . so as with any neural model, their expressiveness increases with deeper representations, also due to the fact that for high values of the hyperparameter n, which also affects the number of parameters reducing them to 1/n, phnns may be defined with very few parameters even if the number of layers is large. however, any convergence study or regularization and normalization strategies have been proposed for improving phnns training stability and convergence when the number of layers increases. indeed, phnns behavior with a large number of layers is still unknown, as well as it is not clear how the parameters reduction driven by the hyperparameter n affects the learning of very deep networks and whether intra-layer parameters and overall parameters are balanced during training.in this paper, therefore, we first conduct a study on phnns convergence in large-scale training, discovering that very deep architectures have convergence issues, and founding that the hyperparameter n is related to the convergence. in order to address these issues, we propose parameterized hypercomplex identity initialization (phydi), a method to help phnns converge fast and improve large-scale model learning motivated by the dynamical isometry that has been proved a clear indicator of trainability . we propose to initialize each parameterized hypercomplex multiplication (phm) or convolution (phc) layer, that represent the core of phnns, as an identity function. the proposed initialization is carried out by adding a residual connection and a trainable zero-initialized parameter that multiplies the actual layer parameters, as introduced for real-valued models .we prove that phydi improves very deep resnets-and transformer-based phnns convergence in different benchmarks even when standard phnns diverge, therefore improving the learning ability of large architectures. furthermore, the proposed method leads to faster convergence of each phnn we test, both in image and language datasets. in summary, our contributions are: i) we conduct, to the best of our knowledge, the first study on the convergence of phnns in large-scale training, showing that this is also related to the key hyperparameter n. ii) we propose phydi, a method to avoid divergence of very deep phnns, which also fastens the convergence of convolutional-and attentionbased phnns in multiple benchmarks, allowing the learning of large-scale networks with fewer iterations.the rest of the paper is organized as follows. the background on phnns is developed in section 2, the proposed method is presented in section 3, while the experimental evaluation is performed in section 4. finally, conclusions are drawn in section 5. more recently, parameterized hypercomplex neural networks (phnns) have transformed the wide research field of neural models defined over such hypercomplex algebras. so as with any neural model, their expressiveness increases with deeper representations, also due to the fact that for high values of the hyperparameter n, which also affects the number of parameters reducing them to 1/n, phnns may be defined with very few parameters even if the number of layers is large. however, any convergence study or regularization and normalization strategies have been proposed for improving phnns training stability and convergence when the number of layers increases. indeed, phnns behavior with a large number of layers is still unknown, as well as it is not clear how the parameters reduction driven by the hyperparameter n affects the learning of very deep networks and whether intra-layer parameters and overall parameters are balanced during training.in this paper, therefore, we first conduct a study on phnns convergence in large-scale training, discovering that very deep architectures have convergence issues, and founding that the hyperparameter n is related to the convergence. in order to address these issues, we propose parameterized hypercomplex identity initialization (phydi), a method to help phnns converge fast and improve large-scale model learning motivated by the dynamical isometry that has been proved a clear indicator of trainability.we prove that phydi improves very deep resnets-and transformer-based phnns convergence in different benchmarks even when standard phnns diverge, therefore improving the learning ability of large architectures. in summary, our contributions are: i) we conduct, to the best of our knowledge, the first study on the convergence of phnns in large-scale training, showing that this is also related to the key hyperparameter n. ii) we propose phydi, a method to avoid divergence of very deep phnns, which also fastens the convergence of convolutional-and attentionbased phnns in multiple benchmarks, allowing the learning of large-scale networks with fewer iterations. the family of phnns comprises neural models defined over various domains by means of hypercomplex layers parameterized by the user. we evaluate the performances of our initialization method according to two metrics, namely the number of epochs the models require to reach the 80% of accuracy (m1), and the number of epochs to beat a model with the proposed approach (m2). indeed, while phresnets152 suffers from slow convergence, endowing such networks with phydi initialization drastically fastens the convergence, lowering the number of epochs by more than 20 in some cases. the phtransformer equipped with phydi initialization still requires just 2 epochs to reach a perplexity value of 200 even with 96 encoder layers, proving that the proposed approach improves the learning of largescale phnns.in this paper, we study the convergence of common phnns, proposing parameterized hypercomplex identity initialization (phydi), a method that, with very few architectural changes, improves convergence in speed and learning depth. we experiment with ph convolutional and transformer models on known benchmarks and we prove that phnns endowed with phydi gain convergence speed and a better gradient propagation when the number of layers increases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/522.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/522.txt new file mode 100644 index 0000000000000000000000000000000000000000..72fa5c5326698079492105658a7c6d32113bcbfb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/522.txt @@ -0,0 +1 @@ +artificial intelligence (ai) systems have become prevalent in everyday life. ai is used in retail, security, manufacturing, health, finance, and many more sectors to improve or even replace existing processes. however, with the rise in ai adoption, different risks associated with ai have been identified, including privacy risks to the people whose data was used to train the models. in addition to fundamental societal harm, these risks can result in negative brand reputation, lawsuits, and fines. this has given rise to the notion of trustworthy or responsible ai.a key aspect of responsible ai is the ability to assess (and later mitigate) these risks. assessing the privacy risk of machine learning (ml) models is crucial to enable well-informed decision-making about whether to use a model in production, share it with third parties, or deploy it in customers' homes. the most prevalent approach to privacy risk assessment is to run one or more known attacks against the model and measure how successful they are in leaking personal information.the most common attack used in model assessment is called membership inference. membership inference (mi) attacks aim to violate the privacy of individuals whose data was used in training an ml model by attempting to distinguish between samples that were part of a target model's training data (called members) and samples that were not (non-members), based on the model's outputs. these can be class probabilities or logits (for classification models), the model's loss, or activations from internal layers of the model (in white-box attacks). most attacks choose one or more of these features and train a binary classifier to try to distinguish between members and non-members.in this paper, we present a novel framework for mi attacks against classification models that takes advantage of the ensemble method to generate many specialized attack models for different subsets of the data. we show that this approach outperforms existing approaches based on a single attack model or an attack model per class label, both on classical and language classification tasks.in the realm of large language models (llm), membership inference can be assessed for different phases of the model's development, namely the pre-training and fine-tuning stages. pre-training is largely performed on publicly available datasets, and the data used to train a model is often also public knowledge. fine-tuning is typically performed on a smaller, proprietary dataset. it is therefore more common to look at the fine-tuning phase in the context of mi attacks. however, our framework is not limited to this scenario and can also evaluate pre-trained models if necessary. moreover, our approach is adapted to work with both language models that have an explicit classification head and generative models that can respond to classification prompts or instructions, as can be seen in the evaluation section with the flan-ul2 model.our method can cater for both privacy audit mode, in which an organization assesses the privacy vulnerability of their own models, and attack mode, where the real training data is unknown to the attacker. for the latter, a preceding step of generating shadow models and data is required. this paper starts by surveying relevant prior work in section 2. next, we describe our framework for improved membership inference based on small specialized attack models in section 3. we present our evaluation results in section 4. we discuss those results in section 5 and conclude in section 6. membership inference (mi) attacks aim to violate the privacy of individuals whose data was used in training an ml model by attempting to distinguish between samples that were part of a target model's training data (called members) and samples that were not (non-members), based on the model's outputs. these can be class probabilities or logits (for classification models), the model's loss, or activations from internal layers of the model (in white-box attacks).in this paper, we present a novel framework for mi attacks against classification models that takes advantage of the ensemble method to generate many specialized attack models for different subsets of the data. we show that this approach outperforms existing approaches based on a single attack model or an attack model per class label, both on classical and language classification tasks. moreover, our approach is adapted to work with both language models that have an explicit classification head and generative models that can respond to classification prompts or instructions, as can be seen in the evaluation section with the flan-ul2 model.there are several types of privacy (inference) attacks against ml models, including membership inference, attribute inference, model inversion, database reconstruction, and most recently, training data extraction from generative models.in the past few years, investigations have begun into mi in the context of large language models (llm), starting with embedding models and masked language models song & raghunathan (2020);mahloujifar et al.most existing approaches to mi that employ a classification model use either a single attack model for the entire dataset or a separate attack model per class. (2023),fu & cui (2023).we propose a method for performing membership inference attacks by splitting the initial member and non-member datasets into multiple, small non-overlapping subsets, used to train different attack models. this includes classical ml models, such as a decision tree or random forest, language classification models, and even generative models that were fine-tuned for text classification tasks. our experiments show that even for the same pair of member and non-member subsets, the half used for training the attack model has significant impact on the model's ability to infer membership.we compare our method both to training a single attack model on the entire dataset and class-based attacks, where a separate attack model is trained per class label. for each model and its corresponding data, we conducted six experiments: using a single attack model for the entire dataset (s-cl01) and a single model for class 0 (s-cl0) and for class 1 (s-cl1); and using many small specialized attack models for the entire dataset (m-cl01), for class 0 (m-cl0), and for class 1 (m-cl1). it can be applied to both classical models as well as large language models that perform classification tasks, and even succeeds in attacking models defended using differential privacy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/523.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/523.txt new file mode 100644 index 0000000000000000000000000000000000000000..e324910fea45d5424e8028c85713f76f5eace246 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/523.txt @@ -0,0 +1 @@ +quantum computing is based on the principles of quantum mechanics, while classical computation is built on the rules of classical physics (nielsen and chuang 2010) (desurvire 2009). classical computers operate by manipulating bits, while in quantum computers, the information is processed via the means of its building blocks called qubits. quantum bits or qubits live in a two-dimensional linear vector or hilbert space, unlike bits that can assume discrete values of either 0 or 1. the two computational basis states that span the hilbert space of a qubit are denoted by the states |0⟩ and |1⟩ , as shown in eq. (1).quantum computing works on the basis of two principles of quantum mechanics: superposition and entanglement. superposition states that the quantum states can be added together and the qubits can be broken down into multiple quantum states. this principle allows the bit to be both figure 1: example of q quantum circuit with two qubits (generated using qiskit) one and zero or neither at any given time which simply represents a linear combination of its states:(where coefficients α and β are complex numbers (α, β ∈ c) and are often referred to as probability amplitudes such that:where |α| 2 is the probability of the qubit collapsing to the state |0⟩ upon measurement, and |β| 2 is the probability of it collapsing to the state |1⟩. in theory, by leveraging the principle of superposition, a qubit can store and process information more rapidly than classical computers, offering potential energy efficiency advantages.in addition to superposition, qubits also exhibit quantum entanglement allowing them to form co-relations between individually random behaviors. this property plays an important role in applications such as malicious attack detection, secure communication, and information processing. interestingly, the qubits can be used to train machine learning algorithms in a much faster way as we will see later.classical computers manipulate bits using logic gates such as and, or, not, nand, xor, etc. likewise, quantum computers manipulate qubits using quantum gates. a quantum gate is modeled as a unitary matrix that multiplies the system qubit state, and all quantum operations must be reversible (except measurement). a quantum program, implemented as a quantum circuit, is a sequence of quantum gates applied over one or more qubits as show in figure 1. more details about qc fundamentals can be found in (rieffel and polak 2000).quantum neural networks quantum machine learning is an emerging research area that bridges quantum computing and machine learning. being at the border of these two research disciplines, qml entails methods that allow the exploitation of quantum phenomena to improve machine learning algorithms and the application of machine learning algorithms for improving quantum algorithms and designs. qml is based upon two main components -data and algorithms. they can be either quantum or classical as shown in figure 2 .quantum neural networks (qnns) are currently one of the most trending topics in quantum machine learning. they while classical machine learning techniques have been applied to several problems in this area, quantum machine learning (qml) offers a promising approach to overcome classical machine learning (ml) limitations in climate change research by leveraging quantum computing(singh et al. this section introduces the need for significant actions to face climate change, most importantly, by introducing new cutting-edge technologies such as quantum machine learning (qml) (wittek 2014) to help accelerate the co2free transition.quantum computing is based on the principles of quantum mechanics, while classical computation is built on the rules of classical physics (nielsen and chuang 2010)(desurvire 2009). a quantum program, implemented as a quantum circuit, is a sequence of quantum gates applied over one or more qubits as show in figure1.quantum neural networks quantum machine learning is an emerging research area that bridges quantum computing and machine learning. being at the border of these two research disciplines, qml entails methods that allow the exploitation of quantum phenomena to improve machine learning algorithms and the application of machine learning algorithms for improving quantum algorithms and designs. by leveraging quantum algorithms and quantum annealers, qml can process massive datasets, simulate intricate climate models, and optimize resource allocation for sustainable energy production. in their research, four distinct neural network models were evaluated: convolutional neural network (cnn), neural network (nn), quantum neural network (qnn), and quantum convolutional neural network (qcnn). in fact, quantum algorithms are designed to harness the unique properties of quantum mechanics, such as superposition and entanglement, which don't have direct counterparts in classical systems. thus, when quantum methods are implemented or simulated on classical systems (quantum simulators), they can be inefficient and don't exploit the potential advantages of genuine quantum computation. (besir ogur 2023) explored the impact of quantum phenomena, specifically superposition and entanglement, on weather forecasting using a variational quantum circuit (vqc) model. to improve forecast accuracy, a high-precision hybrid prediction model based on variational quantum circuit (vqc) and long short-term memory (lstm) network is developed by(yu et al.quantum computing (qc) has recently demonstrated remarkable progress by achieving exceptionally high advan-tages in computational performance for certain tasks performed on quantum computers that are otherwise intractable to tackle even with the most powerful supercomputers.additionally, the reviewed literature uses quantum simulators which are software tools that mimic the behavior of a quantum computer but run on classical computers.common limitations of the studied literature are 1) restricted data encoding and quantum kernel usage, 2) computing on ideal simulated quantum computer such as ibmquantum1and pennylane2, and 3) the assumption of an infinite number of samples sampled from the quantum circuits that encode the data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/524.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/524.txt new file mode 100644 index 0000000000000000000000000000000000000000..c089865aa1b949b0a0e21ce6b36fe67ac58b525e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/524.txt @@ -0,0 +1 @@ +our world is inherently sequential. adapted to this, humans are successful in continuously learning new skills over their lifetime. however, most state-of-the-art training procedures for artificial neural networks (anns) rely on data being independent and identically distributed. in settings where the data distribution changes, networks have been reported to rapidly forget previous knowledge (parisi, kemker, part, kanan, & wermter, 2019;hadsell, rao, rusu, & pascanu, 2020). this phenomenon is commonly termed catastrophic forgetting (french, 1999;mccloskey & cohen, 1989).a number of factors influence the degree to which performance decreases in sequential learning scenarios: the dimensionality of representations (mirzadeh et al., 2022), pretraining (ramasesh, lewkowycz, & dyer, 2022), objective function (s. li, du, van de ven, & mordatch, 2022;davari, asadi, mudur, aljundi, & belilovsky, 2022) and task similarity (ramasesh, dyer, & raghu, 2020). however, the changes to the task-relevant representations during continual learning remain to be fully characterized (see davari et al. (2022) for first steps). in this work, we characterize changes in representational geometry and their contribution to the observed decrease in performance. we find that rather than forgetting, much of the degraded performance can be explained by a misalignment of representations and the readouts of the network.we characterise the information present throughout learning by training diagnostic readouts for all tasks after every phase of training. contrary to this, performance loss attributed to misalignment is computed by the difference in performance between the original readout (t = 0) and the newly trained diagnostic readouts at every phase of training. third, to estimate the extent to which misalignment is due to rotation, translation, and uniform scaling of an otherwise static geometry, we align representations for each task after each training phase to the representations immediately after learning the task (t = 0) with a geometry-preserving procrustes transformation(gower, 1975). a rapid drop in performance of the original readouts as the network is trained on additional tasks (fig.does misalignment preserve the original representational geometry? if so, we'd expect that procrustes alignment should yield performance as good as the linear diagnostic readouts. prior to averaging, task performance trajectories are temporally aligned to task onset such that the x-axis reflects performance after t additional tasks have been learned. shown are representation vectors directly after learning the task, after learning 5, and after learning 9 additional tasks (left to right).an open question that remains from our and previous work is whether the comparably good performance of the diagnostic readout is explained by transfer learning based on features learned for earlier tasks in the sequence. indeed, we observe that the features learned for previously encountered tasks transfer to unseen tasks ('feature transfer' in fig. yet, transfer cannot fully explain the performance observed with diagnostic readouts, as a clear discontinuity in the diagnostic readout performance trajectory from before to after training a new task (t = 0) can be seen. this additional information stays preserved in the network over learning of multiple additional tasks, as evidenced by the fact that diagnostic readout performance stays above the performance measured at t = -1 for the subsequent phases (t > 0).in characterizing representational changes in a neural network during continual learning, we observed that misalignment of the pre-readout representations with the task readouts explains large parts of performance degradation that is commonly referred to as 'catastrophic forgetting'.many algorithms addressing catastrophic forgetting rely on restricting learning at synapses that encode information for previous tasks(zenke et al. we argue that information in hidden layers is largely preserved, even without restricting learning trajectories or placing constraints on representations the network is allowed to learn. this is especially prominent in larger networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/525.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/525.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a4e311dd16bf741719e3f3bed6fded861080746 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/525.txt @@ -0,0 +1 @@ +ensemble learning (or ensembling) is a set of methods which leverage an ensemble of models (also called weak learners), instead of relying on a single learner to perform a given machine learning task (e.our contribution, in this work, is to introduce aggregated f -average (afa) neural networks (nns), based on a novel architecture for the output fusion phase of ensemble learning.we now propose to extend the generalised average framework (1) to the case when scalars (ω k ) 1≤k≤k are replaced by matrices (ω k ) 1≤k≤k in r n ×n , so as to allow a full mixing of the weak learners. given some functions (f, f -1 ) defined as previously, the f -average output x ∈ r n is obtained as. the f -average network is interpretable, as the contribution of each output in the final prediction can easily be retrieved using the weights in matrix w , and the averaging operation is determined by the choice of f .and f j is a function operating component-wise from which consists of collecting predictions from all previous weak classifiers on training sets from all previous sessions to form the training set of the ensemble learning model. performance were also compared against three different types of ensembling neural network: 1) a shallow neural network with similar number of parameters and layers as our afa model, 2) a deeper neural network with five fully connected layers, and 3) a neural network specifically designed for ensembling including a weighted average layer followed by a fully connected layer for the output. all neural network models, including the afa model, were trained with the same process with only slight adjustments on learning rate parameters to adapt to each model architecture. typically, a model trained on base session only (called no fine-tuning in tableiv), reaches a decent mean accuracy thanks to its performance on base session classes (k = 1), while it yields very poor performance on new classes (k > 1).afa also outperforms other nn approaches: among the three models compared with the afa model, best results were achieved by the weighted average neural network model. indeed, these later sessions drastically increase its architecture size due to the higher number k of models to ensemble and the higher number of classes n k to predict, while not providing a significantly larger training set because of the few-shot constraint of our task at hand. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/526.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/526.txt new file mode 100644 index 0000000000000000000000000000000000000000..b08bb1270da15685f07eda83450fcc3e91c1b7e9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/526.txt @@ -0,0 +1 @@ +reinforcement learning (rl) has achieved remarkable success in many fields that require sequential decision-making and optimal control, such as games , , robotics , and industrial control , . the foundation of rl algorithms lies in the theory of markov decision process (mdp) . at each time step, the agent chooses an action based on its current state and gets an intermediate reward.the objective is to find an optimal policy that maximizes each state's value, i.e., the infinite horizon discounted accumulative rewards. the bellman equation identifies the necessary and sufficient conditions of optimal values based on bellman's principle of optimality.value iteration and policy iteration are two fundamental algorithms to solve the bellman equation . value iteration is essentially a fixed-point iteration technique by consecutively applying bellman operator to the current value. policy iteration alternately performs two steps: policy evaluation and policy improvement. puterman and brumelle (1979) show that policy iteration is a particular variant of newton-raphson method applied to bellman equation . since bellman equation contains the max operator, it is nonsmooth and cannot be directly solved by newton-raphson method in which its derivative is required. it turns out that, in policy iteration, the nonsmooth bellman equation is linearized at the current value and the derivative is calculated with the locally linearized equation. puterman and brumelle (1979) prove that policy iteration enjoys global linear convergence with the rate being γ (discount factor) . they also show that policy iteration converges quadratically for some special classes of mdps (transition probabilities are affine in action and reward function is strictly concave and twice differentiable in action) , . a widely used variant of policy iteration is the modified policy iteration algorithm, in which repeated cycles of policy improvement and finite-step policy evaluation are performed , , . modified policy iteration reduces to value iteration with one-step policy evaluation and becomes policy iteration in the limit case of infinite-step policy evaluation. puterman and shin (1978) prove the convergence of modified policy iteration under the assumption that the initial value is element-wise smaller than the optimal value. scherrer et al. (2012) show that modified policy iteration has global linear convergence with the rate being γ and provide comprehensive error propagation analysis for its approximate form .regularization techniques are frequently used in contemporary rl algorithms for various purposes, such as encouraging exploration and improving robustness. this kind of techniques adds regularizers into the optimization objective and thus modifies the standard mdp framework. shannon entropy is one of the most frequently used regularizers. haarnoja et al. (2017) propose the soft q-learning algorithm by integrating shannon entropy with value iteration . haarnoja et al. (2018) further combine shannon entropy with policy iteration to construct a soft policy iteration framework and propose the soft actor-critic algorithm . duan et al. (2021) integrate distributional value learning with soft policy iteration and propose the distributional soft actor-critic algorithm . xiang et al. (2021) propose a data-efficient model-free algorithm for robotic skill acquisition by fusing a task-oriented guiding reward with shannon entropy regularization . srivastava et al. (2022) utilize shannon entropy to quantify exploration and determine the stochastic policy that maximizes it while guaranteeing a low value of the expected cost . another important regularizer is tsallis entropy. lee et al. (2018) construct a sparse mdp with tsallis entropy regularization and propose a sparse value iteration algorithm to solve it . chow et al. (2018) also use tsallis entropy in path consistency learning to derive the sparse consistency equation . kullback-leibler (kl) divergence is also used in rl regularization techniques. yang et al. utilize kl divergence to learn the representation of state transition to conduct multimodal stochastic policies . the aforementioned algorithms share the idea of using regularization, but they are derived from various motivations. geist et al. (2019) show that in rl different regularizers can be unified into the same framework named regularized mdps . the key idea is to define a regularized bellman operator, which is obtained by adding a strongly convex function (the regularizer) into the standard bellman operator and making use of legendre-fenchel transform , . since regularized bellman operator shares the same properties with its unregularized counterparts, such as contraction and monotonicity , the dynamic programming techniques in standard mdps (i.e, policy iteration and modified policy iteration) can be utilized to solve regularized mdps.despite the establishment of the regularized mdp framework, the convergence results of its associated algorithms remain limited. geist et al. (2019) show that regularized policy iteration has global linear convergence with the rate being γ by utilizing the monotone contraction of regularized operators . more recently, cen et al. (2022) prove that soft policy iteration (a special case of regularized policy iteration when choosing shannon entropy as the regularizer) achieves asymptotic quadratic convergence . their result requires the assumption that the visitation probability of the optimal policy for every state is strictly larger than zero, which is hard to verify and may not hold for all regularized mdps.motivated by puterman and brumelle (1979) , this paper establishes the equivalence between regularized policy iteration and the standard newton-raphson method. this equivalence reveals the theoretical underpinning of regularized policy iteration and further leads to a unified analysis for both its global linear convergence and local quadratic convergence. different from cen et al. (2022) , our quadratic convergence result is derived for general regularized mdps without any additional assumptions. the contributions of this paper are summarized as follows.• this paper proves that regularized policy iteration is equivalent to the standard newton-raphson method in the condition of smoothing out bellman equation with strongly convex functions. the key idea is that smoothed bellman equation can be converted into an equivalent affine transformation form, in which the jacobian serves as the linear map. this enables the newton iteration formula to be simplified into a regularized self-consistency equation which corresponds to the policy evaluation part of regularized policy iteration. • this paper proves that regularized policy iteration has global linear convergence with the rate being γ (discount factor). the key method is to prove that smoothed bellman equation enjoys vector-valued convexity and the inverse of its jacobian is negative, making the value sequence generated by newton-raphson iteration converges monotonically to the optimal value. this result is consistent with the proposition of geist et al. (2019) , whose proof is based on the monotone contraction of regularized bellman operator. • furthermore, this paper proves that regularized policy iteration converges quadratically in a local region around the optimal value. the key of proof is to exploit the global lipschitz continuity of the jacobian of smoothed bellman equation and bound the difference terms in the newton iteration formula. this result casts light on the role of regularization in enabling fast convergence. to the best of our knowledge, this is the first quadratic convergence result for regularized policy iteration. • this paper further extends the analysis from regularized policy iteration to that with finite-step policy evaluation. the latter is called regularized modified policy iteration, whose algorithm is discussed in scherrer et al. ( 2012) and guist et al. ( 2022) . this paper shows that regularized modified policy iteration is equivalent to inexact newton method in which the newton iteration formula is solved by truncated iterations. this paper proves for the first time that the asymptotic convergence rate of regularized modified policy iteration is γ m in which m denotes the number of iterations carried out in policy evaluation. this result is obtained by proving the error term decays in the rate of γ m with respect to a particular norm related to the optimal value.f ω (q) = f ′ ω (q)q + γp e ω (q) + r = γp (∇f ω (q) q + e ω (q)) + r -q,(24)in which i = -1 n ω (∇ max ω (q (s i , •))) and e ω (q) ∈ r n .following lemma 2, we immediately have f ω (q 2 )-f ω (q 1 ) ≥ f ′ ω (q 1 )(q 2 -q 1 ) for all q 1 , q 2 ∈ r nm . we show that q k+1 obtained by one-step nr method on smoothed bellman equation equals q π k+1 obtained by one-step regularized pi, starting from the same point q k = q π k .substituting it into the previous formula, we obtain (γp ∇f ω (q k ) -i) q k -(γp ∇f ω (q k ) -i) q k+1.= γp ∇f ω (q k ) q k + γp e ω (q k ) + r -q k , which can be simplified to q k+1 = γp ∇f ω (q k ) q k+1 + γp e ω (q k ) + r. in the proof of theorem 1, we have shown that (32) is equivalent to regularized self-consistency condition (38), so the relationship t π k+1 ω (q) = γp ∇f ω (q k ) q + γp e ω (q k ) + r holds. utilizing the decomposition of smoothed bellman equation at q k (31), we also have t π k+1 ω (q) = γp ∇f ω (q k ) q + f ω (q k ) + q k -γp ∇f ω (q k ) q k . for m = 1, t π k+1 ω (q k ) = q k + f ω (q k ) holds, which can be obtained by substituting q = q k into (41). given any initial q 0 ∈ r nm , q 1 = q 0 -(f ′ ω (q 0 )) -1 f ω (q 0 ) satisfies that f ω (q 1 ) ≥ 0 and q 1 ≤ q * , in which q * denotes the solution of smoothed bellman equation(15). since smoothed bellman equation is convex(26), we have f ω (q 1 ) ≥ f ω (q 0 ) + f ′ ω (q 0 )(q 1 -q 0 ) = 0. since q * is the solution of smoothed bellman equation, we have 0 = f ω (q * ) ≥ f ω (q 1 ) + f ′ ω (q 1 )(q * -q 1 ).lemma 3 shows that no matter which initial point q 0 we choose, q 1 obtained by the first step of regularized pi satisfies f ω (q 1 ) ≥ 0 and q 1 ≤ q * . we will first show that if q k ≤ q * and f ω (q k ) ≥ 0, then q k ≤ q k+1 ≤ q * , f ω (q k+1 ) ≥ 0.transposing terms, we obtain -f ω (q k+1 ) ≥ f ′ ω (q k+1 ) (q * -q k+1 ) .taking the limit of (49) and utilizing the squeeze theorem, we have lim k→∞ f ω (q k ) = 0, which indicates that the sequence {q k } converges to the solution q * . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/527.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/527.txt new file mode 100644 index 0000000000000000000000000000000000000000..c919089e6c0c983a743ecf086a07ecbf6803aa79 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/527.txt @@ -0,0 +1 @@ +management of the access of people to emergency departments (ed) is a critical problem for healthcare administration. the correct management of queues may improve the level of quality of hospitals as well as may contribute to control costs and reimbursement.access to ed presents some peculiar characteristics in different countries. for instance, in italy, the request for access to ed is universal, so all the people staying in italy, without any restriction, may require access. this often cause overcrowding, even in absence of special conditions such as pandemics, so the problem is high important - .the management of queues is based on prioritising patients considering the clinical aspects to guarantee high-priority access to patients requiring immediate (or urgent) care. for these reasons, a set of systems, known as triage systems have been introduced. triage systems implement patient screening to prioritise examination at an adequate priority level.identify applicable funding agency here. if none, delete this.in italy, triage is a process by nursing staff before entering the treatment rooms, and it is ruled by the national ministry of health https://www.salute.gov.it/imgs/c 17 notizie 3849 listafile itemname 1 file.pdf. still, the system's core is a four-level in-hospital triage based on an acuity scale measurement and it may present some little difference among regions.considering a worldwide scenario, strategies adopted in eds are based on the streaming, which is based on grouping patients based on the severity of illness and the assignment of patients into separate areas of the ed (e.g. fast-track, see and treat minor injuries) .ed triage systems are diffused worldwide, and among the others, according to literature , the most diffuse and adopted systems are the australasian triage system (ats) , the emergency severity index (esi, united states) , the manchester triage system (mts) , and the canadian triage and acuity scale (ctas) , . in parallel, some other indexes are currently in use, such as the korean triage and acuity scale (ktas) , the taiwan triage acuity scale (ttas) , and the south african acuity scale (saas) .machine learning and artificial intelligence (ai) algorithms may create predictive clinical models able to handle large heterogeneous datasets, such as electronic medical records (emrs) , . in particular, ai models provide better prediction tasks, outperforming classical clinical scoring systems . consequently, several prediction models have been developed to improve the triage process providing stratification of patients as well as more sensitivity and specificity to the proper identification of patients at greater risk of mortality. in this study, we present a novel clinical algorithm based on artificial intelligence and network science for assigning priority to patients.figure 1 depicts these steps. we start by considering clinical patient data extracted from patient records. these data include analytical observation as well as subjective ones. after an initial preprocessing step to identify possible noise and outlier a modelling phase is applied. in this step each patient is modelled as a node of a graph, while edges represent the similarity among the observation data. at this point graph is embedded into a latent space. finally patient are classified into the risk groups by applying a node classification algorithm. since the embedding is inductive, when a novel patient is arxiv:2310.05996v1 9 oct 2023 we tested our pipeline on public data to demonstrate the effectiveness of our approach and the improvement with respect to state of th e art approaches.considering a worldwide scenario, strategies adopted in eds are based on the streaming, which is based on grouping patients based on the severity of illness and the assignment of patients into separate areas of the ed (e.ed triage systems are diffused worldwide, and among the others, according to literature, the most diffuse and adopted systems are the australasian triage system (ats), the emergency severity index (esi, united states), the manchester triage system (mts), and the canadian triage and acuity scale (ctas),. in this step each patient is modelled as a node of a graph, while edges represent the similarity among the observation data. some of the most diffuse and adopted systems are the australasian triage system (ats), the emergency severity index (esi, united states), the manchester triage system (mts), and the canadian triage and acuity scale (ctas),.automatic, or computational based triage systems, presents some advantages such as: (i) stability of assignment, (ii) filtering out noise in the patient variables, (iii) model and analyse patient similarity, (i.similarity is calculated using the measure of cosine similarity between instances,and the two metrics relating to the euclidean distance and the manhattan distance between instances. in order to set appropriate thresholds, the average values of similarity and distances on the data-set were calculated before, to choose the values that would neither cause the exclusion of too many nodes nor the creation of an excessive number of links. therefore, graph embedding approaches are the best way to convert a graph structure into a tabular one that can be mined with deep learning algorithms. graph representation learning (grl) or graph embedding is a set of algorithms and methods to encode graph structures into low-dimensional spaces, using both mathematical and softcomputing techniques,. node embedding methods can be divided into three major categories: i) matrix-factorization; ii) random-walks; and iii) graph neural networks.the first architecture is composed by five gcnconv layers of 64 nodes, and it is used for the analysis of the graphs created on the basis of the cosine similarity (as represented in figure3) and the manhattan distance. on the other hand, for the graph created based on the euclidean distance, four gcnconv layers of dimension equal to 32 are the network's structure.the gcn architecture and the gat architecture were trained for 300 epochs on the graphs created by the manhattan distance and the cosine similarity; on the contrary, they were trained for 200 epochs on the graph created by the euclidean distance. the results obtained (shown in figure3) were higher than gcn and gat applied on graph structures but are lesser than graphsage results that could be improved in future research to obtain better results. they preprocess patient data, create graphs based on similarity metrics (cosine similarity, euclidean distance, and manhattan distance), and apply graph neural networks (gnns) including gcn, gat, and graphsage for classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/528.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/528.txt new file mode 100644 index 0000000000000000000000000000000000000000..d7e593e3a420075727210067e6b4e96dec04e48c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/528.txt @@ -0,0 +1 @@ +in recent years, there have been unprecedented advancements in large language models (llms) such as transformers , bert , gpt , and their variants. llms can be treated as foundation models that can be readily applied to diverse downstream tasks with little adaptation . these models have achieved state-of-the-art results on many natural language processing tasks including text classification, machine translation, sentiment analysis, and text summarization . significantly, advancements in architectures and training methodologies have given rise to emergent capabilities, setting state-of-the-art models like gpt-3.5 , gpt-4 , claude-2 , bard , llama , and llama-2 apart from their predecessors. for instance, in-context learning and zero-shot capabilities enable these models to generalize across tasks for which they were not explicitly trained. this is confirmed by their excellent performance in complex activities such as mathematical reasoning and question answering (qa) systems. however, most of the tasks that large language models (llms) surpassed previous benchmarks are natural language processing (nlp) tasks involving sequential data. graph-structured data presents additional complexity beyond sequences as it contains rich topological connections between entities that must be modeled along with node, edge, and graph attributes. graph-structured data is ubiquitous across many domains, including social networks , knowledge graphs , molecular structures , and transportation networks . while llms have shown powerful reasoning and generalization capabilities in sequential data, it remains unclear if they can handle structural information beyond context when applied to graph-structured data. this raises a compelling research question: can the strengths of llms be extended to graph-structured data, enabling them to exhibit significant predictive ability? further, can they compete with state-of-the-art models specialized for graph data, such as graph neural networks (gnns)?to comprehensively study the capabilities of llms on graph-structured data, we conduct a series of empirical experiments with leading llms on diverse graph-based tasks that span node-, edge-, and graph-level predictions. by comparing their performance to specialized graph models like gnns, we aim to assess the potential strengths and limitations of llms in this domain. critically, by altering the input prompt formats, we aim to evaluate how effectively llms can extract and leverage the underlying structural information from the graph to enhance their performance in subsequent tasks. additionally, we explore the importance of the structural data across different task dimensions spanning node, edge, and graph levels as well as diverse dataset domains such as citation networks, social networks, and chemical networks.broadly, this paper focuses on studying the central question of investigating the capabilities of llms on graph-structured data from three perspectives:• can llms effectively process graph analytics tasks even without explicit graph structure?given that llms have already shown the capability to leverage contextual information for humanlike reasoning in many nlp tasks, it becomes intriguing to assess whether they can attain substantial predictive performance on graph data tasks, even in the absence of structural information. • how well can llms interpret graph structures to enhance downstream task performance?it is essential to investigate to what extent llms can perceive and interpret important graph structures. furthermore, it is imperative to understand whether such recognition can influence and enhance performance in subsequent tasks. • how do task dimensions and dataset domains affect llms' ability to handle structured data? llms' ability in identifying pivotal structural information for predictions can be influenced by specific tasks and data domains. for example, node-level tasks may heavily rely on entity attribute interpretation, while graph-level tasks may demand comprehensive understanding of intricate inter-node interations. also, the distinct topologies properties to various dataset domains, whether derived from intricate social networks or sophisticated molecular structures, further influence the proficiency with which llms decipher and manage structured data.the subsequent sections of this paper are structured as follows: we initiate with an extensive literature review, highlighting the recent advancements of llms within graph domains. subsequent to this, we present our comprehensive findings on benchmarking llms on graph data, aiming to address the aforementioned research questions. this is accompanied by a detailed discussion, delving into the depth of our discoveries across varied experimental setups. we conclude by summarizing the key points and proposing ideas for future explorations. however, most of the tasks that large language models (llms) surpassed previous benchmarks are natural language processing (nlp) tasks involving sequential data. while llms have shown powerful reasoning and generalization capabilities in sequential data, it remains unclear if they can handle structural information beyond context when applied to graph-structured data. this raises a compelling research question: can the strengths of llms be extended to graph-structured data, enabling them to exhibit significant predictive ability? further, can they compete with state-of-the-art models specialized for graph data, such as graph neural networks (gnns)?.to comprehensively study the capabilities of llms on graph-structured data, we conduct a series of empirical experiments with leading llms on diverse graph-based tasks that span node-, edge-, and graph-level predictions. critically, by altering the input prompt formats, we aim to evaluate how effectively llms can extract and leverage the underlying structural information from the graph to enhance their performance in subsequent tasks. additionally, we explore the importance of the structural data across different task dimensions spanning node, edge, and graph levels as well as diverse dataset domains such as citation networks, social networks, and chemical networks.given that llms have already shown the capability to leverage contextual information for humanlike reasoning in many nlp tasks, it becomes intriguing to assess whether they can attain substantial predictive performance on graph data tasks, even in the absence of structural information. • how do task dimensions and dataset domains affect llms' ability to handle structured data? llms' ability in identifying pivotal structural information for predictions can be influenced by specific tasks and data domains. on node-level tasks, we choose the semi-supervised results from graph neural network (gnn), graph convolutional network (gcn)and graph attention network (gat)to compare with performance from llms. however, these studies aim to offer a foundational understanding of llms' proficiency in understanding graph data structures and forecasting downstream tasks.driven by the goal of investigating llms' capabilities in discerning patterns within textual graphs and leveraging this for downstream tasks, we crafted three distinct prompts for our node-level prediction task experiments: (1) absence of graph topology descriptions; (2) straightforward presentation of all neighborhood data to the llm; and (3) a retrieval-based prompt guiding the llm to extract task-centric structural details.this research presented a systematic empirical evaluation of leading llms on diverse graph learning tasks spanning node, edge, and graph levels. our results demonstrate that while llms exhibit reasonable node classification capabilities even without explicit graph data, likely by relying on contextual clues, their zero-shot performance continues to lag behind stateof-the-art gnns specialized for this domain. in summary, this research provides valuable evidence that llms have promising capabilities on graph analytics, while also revealing clear areas for improvement compared to specialized graph models.our future work should explore more rigorous benchmarking llms on graph learning tasks with graph specialized models, novel prompt designs to focus on topological structures, evaluating on additional graph tasks, and even fine-tuning open-sourced llms on graphs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/529.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/529.txt new file mode 100644 index 0000000000000000000000000000000000000000..cccac6fe1fc8bc231ed8265e11fd1a0eda3d9615 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/529.txt @@ -0,0 +1 @@ +in recent years, the research field of neural combinatorial optimization (nco) has developed a veritable zoo of methods to solve the vehicle routing problem (vrp) and its variants. the general motivation for nco is two-fold: (i) finding reasonably good solutions faster than traditional approaches and (ii) saving development effort and hand-crafted engineering by learning a parametrized policy to trade off computational complexity and optimality in solving combinatorial optimization (co) problems . the recent surge of established approaches, that often share many architectural and conceptual similarities, has led to a need for a unified evaluation protocol. so far, the evaluation of runtime efficiency in nco was based on a method's total runtime, which favors neural approaches, since they often use parallel batch processing to quickly solve large numbers preprint. under review. figure 1: % gap to bks over the cumulative running time for the first uniformly distributed instance in of size 100.of problems on the gpu compared to or solvers, which operate on a per instance level. such approaches could also be parallelized over batches, e.g. by running them on different cores of a multi cpu machine, however this comparison has not been considered in the literature so far. accordingly, the comparison between those methods is not consistent. generally, comparing total run-times, even on a per-instance level, to evaluate efficiency is not recommended, as it completely ignores the point of time when the final solution of a local search (ls) has been found along the search trajectory. in fact, the operations research (or) literature has developed strategies to improve considerably on the run time complexity issue and brings forth stateof-the-art meta-heuristics, such as hgs-cvrp ( ), that deliver qualitatively strong solutions within milliseconds (see figure 1). thus, to evaluate the overall performance of primal heuristics, i.e. methods that should find good solutions quickly, the solution trajectory over time should be assessed, as has been done in the or field for years (). given these two, currently independently evolving, strings of research for routing problems, we advocate the need for a unified evaluation protocol that allows researchers to truthfully assess their methods against state-of-the-art neural-as well as or methods on prevalent benchmarks. starting with the classic vrp, we propose the routing arena (ra), a benchmark suite that functions as an assessment center for novel methods and a development tool to promote research. the contribution of the ra can be summarized as follows:• data & model pool: the routing arena provides an integrated testing suite to benchmark novel methods on over 15 datasets against nine machine learning-and five or-based meta-heuristic solvers.• the evaluation: we propose a unified evaluation protocol together with a novel evaluation metric, weighted relative average performance (wrap), to evaluate the performance of a method's solution trajectory, by capturing the relative improvement over a simple baseline as well as its gap to a best known solution (bks). wrap aggregates well over instance sets, enabling to better capture the real performance signal across experiments. • fairness and integration: all metric evaluations incorporate hardware information about resources used to perform the benchmark runs in order to neutralize effects on runtime efficiency that stem solely from improved hardware capabilities. thus, evaluation performed in the benchmark suite produces standardized results irrelevant of the machine(s) in use. given these two, currently independently evolving, strings of research for routing problems, we advocate the need for a unified evaluation protocol that allows researchers to truthfully assess their methods against state-of-the-art neural-as well as or methods on prevalent benchmarks.• the evaluation: we propose a unified evaluation protocol together with a novel evaluation metric, weighted relative average performance (wrap), to evaluate the performance of a method's solution trajectory, by capturing the relative improvement over a simple baseline as well as its gap to a best known solution (bks). thus, the main motivation stated in the literature for applying ml to co problems is to learn heuristics or replace parts of heuristics with learned components with the goal of reaching either better overall solutions or high quality solutions faster than traditional handcrafted algorithms on the problem distribution. in a first instance, the routing arena focuses on the capacitated vrp (cvrp), since it is one of the most common problems in nco and among the most well studied problems in or, where it is also used as a testbed for new ideas, providing high quality solvers to compare against.recently, the x-set appeared increasingly in the nco field, which, previously, has predominantly focused its evaluation on the instance set provided in, which consists of problems with uniformly sampled customer-positions in the unit square and uniformly distributed demands for the problem sizes of 20, 50 and 100 and do not come with the documentation of a bks for each instance, as commonly the solution qualities are averaged or only sub-samples or newly generated samples from the same distribution are compared. since handling different sized instances within a batch often requires not insignificant extra implementation effort, some works (,) adapt the benchmark set x inby generating smaller equal-sized problem instances from the documented distributions to incorporate this more challenging set in their experimental analysis.the ml methods (first nine columns) for solving the cvrp are mainly evaluated on the uniformdataset (we focus on the problem size of 100 here), while only the most recent neural ls approaches are evaluated in parts on the x set. as the main goal of nco approaches is to find sufficiently good results in a reasonable amount of time (), the prevailing evaluation protocol accounts for some essential flaws; (1) the total runtime of a method on a set of instances is too coarse and masks how effectively the model solves a given instance. (2) without indication of a per instance time limit the comparison, especially to or local search solvers that have a high pre-defined maximum number of iterations, is ill-posed, since they often find their final issued solution within milliseconds for "simple" problem instances. to improve on the above flaws in evaluation, the routing arena proposes to have two protocols attributed to different problem settings for co problems; one that correctly compares solution quality performances and another to address a method's effectiveness, both of which will be discussed in section 5. all other benchmark sets are available for individual evaluation except for the duration-constrained cvrp set inand the very large instance set provided by, which are left to be included in upcoming versions of the routing arena. hence, for the best obtained solution value z after terminating with a time budget t max , we compare methods in terms of the relative percentage gap to a bks with solution value z bks as follows:., demonstrates convincing results with regards to computational efficiency, which is due to the implementation's highly optimized code in c++ and adapted and trimmed down data structures. the experimental runs for the fixed budget evaluation and the any-time evaluation are conducted simultaneously, where we give the methods a maximal time budget of t max = 120 seconds for the unif100 dataset, a limit of t max = 240 seconds for the xml dataset and a runtime budget of t max = 2.given a solution time budget t max ∈ r ≥0 , the primal gap function p : → is defined as p(t) := 1, if no incumbent solution so far, γ(x(t)), incumbent solution x at time t. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/53.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/53.txt new file mode 100644 index 0000000000000000000000000000000000000000..b22e73b2a6cf54d57d0e7692d7298844a867f5ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/53.txt @@ -0,0 +1 @@ +automated temporal planning concerns the synthesis of strategies to reach a desired goal in a system subject to temporal constraints that is specified by providing an initial condition together with the possible actions that can drive it.over the years, several techniques to solve the temporal planning problem have been presented; e.g. encoding into satisfiability modulo theory (smt) (shin and davis 2005), plan-space planning (frank and jónsson 2003) and decision-epoch planning (do and kambhampati 2003). one technique in particular emerged to deal with temporally-expressive (cushing et al. 2007), action-based temporal planning: forward-chaining temporal planning (fctp) (coles et al. 2009a), embodied in planners such as crikey (coles et al. 2008(coles et al. , 2009b)), popf (coles et al. 2010), optic (benton, coles, and coles 2012), vhpop (younes and simmons 2003) and tamer (valentini, micheli, and cimatti 2020). the basic idea behind fctp is to use forward heuristic search to incrementally construct a causally-sound interleaving of events and to check the temporal feasibility of any plan prefix using scheduling techniques, where the timing of each event is encoded as a symbolic variable.when the planning problem at hand does not contain numeric variables or continuous change (i.e. we are dealing with pure temporal planning), the most efficient way to implement an fctp schema is to use simple temporal networks (stns) (dechter, meiri, and pearl 1991) to represent and check the consistency of the symbolic scheduling problems. stns have been widely studied and several algorithms to incrementally add and remove constraints have been presented (cesta and oddi 1996;ramalingam and reps 1996;gerevini, perini, and ricci 1996;coles et al. 2009a); moreover, stns can be easily encoded in linear programming (lp) or in smt. however, all these algorithms focus on how to efficiently solve, propagate and maintain a single stn; instead, in an fctp schema, we have one stn associated to each search state, with minimal differences between the stn of a state and the one of its predecessor in the search (usually, each search step either introduces a new time-point in the stn or adds precedence constraints).in this paper, we present a novel data structure to efficiently represent, maintain and solve stns in an fctp schema. our data structure, called δ-stn, is persistent (okasaki 1999) and explicitly avoids the support for retracting constraints (as this is never used in the fctp schema) and efficiently saves memory, by maintaining references to stns of parent nodes in the search tree. effectively, we identify the set of operations needed for an stn to be employed in an fctp schema, we show how to implement these in δ-stn and we show that δ-stn is consistently superior in terms of both solving time and memory usage to the other approaches and algorithms in the literature. 2009a); moreover, stns can be easily encoded in linear programming (lp) or in smt. effectively, we identify the set of operations needed for an stn to be employed in an fctp schema, we show how to implement these in δ-stn and we show that δ-stn is consistently superior in terms of both solving time and memory usage to the other approaches and algorithms in the literature. an stn is a pair t, c where t is a set of time points and c is a set of constraints of the form x-y ≤ k with x, y ∈ t and k ∈ q. 2001), over the "distance graph" (dg). the dg of a given stn is the graph having one node for each time point, and an edge x, y with weight k for each constraint y -x ≤ k in the stn. in the latter case, we construct all the causally-sound children and for each of them we create an stn inheriting all the constraints from the parent stn (line 9). in fact, we represent the set of temporal constraints in a δ-stn as an associative container (implemented with a sorted vector to save space and retain logarithmic access time), which maps a time point to a persistent list of pairs of a time point and a bound. the second key idea stems from the observation that for the incremental bellman-ford checking algorithm to work(wang et al. essentially, the constraints of a δ-stn stn are represented as a pointer (stn. in this way, when an stn is copied to construct a descendant state in fctp, the constraints are not copied in memory, we simply copy the associative map retaining pointers to the δ-neighbors list of the ancestor and new constraints are added as new heads of such list (see line 4 of algorithm 3). the copystn method, also creates a new δ-stn, but this time the constraints of the parent δ-stn are inherited by shallow copying the p. in this way, subsequent calls updating the consistency or the distances will not affect the parent, because the δ-neighbors structures are persistent, but parent constraints are still reachable and new constraints can be added on top of the parent constraints. we first extend the distances and constraints maps, if needed, by setting the new time points to distance 0 (because an unconstrained timepoint can always be set to 0 in a consistent model) and the constraints to empty lists if not already present. however, we limit this problem by maintaining a partial ordering property on the list of neighbors of each time point x: if there are two elements y, k and y, k ′ in stn.in this section, we experimentally compare the merits of δ-stn against several alternative techniques: linear programming using the clp (johnjforrest et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/530.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/530.txt new file mode 100644 index 0000000000000000000000000000000000000000..cac6a6627fcc369ef5dfe96f5fd8cb10d4aa81e9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/530.txt @@ -0,0 +1 @@ +there has been a great deal of recent interest in binarized neural networks (bnns) (hubara et al. 2016;courbariaux et al. 2016;yuan and agaian 2021), due to their impressive statistical performance (rastegari et al. 2016, e.g.), the ease of distributing the computation (hubara et al. 2016, e.g.), and especially their explainability. this latter property, which is rather rarely encountered in other types of neural networks, stems precisely from the binary representation of the outputs of activation functions of the network, which can be seen as logical rules. this explainability is increasingly mandated by regulation of artificial intelligence, including the general data protection regulation and the ai act in the european union, and the blueprint for an ai bill of rights pioneered by the office of science and technology policy of the white house. the training of bnns typically utilizes the straight-through-estimator (ste) (courbariaux, bengio, and david 2015;courbariaux et al. 2016;rastegari et al. 2016;zhou et al. 2016;lin, zhao, and pan 2017;bulat and tzimiropoulos 2017;cai et al. 2017;xiang, qian, and yu 2017), where the weight updates in backpropagation unfortunately (alizadeh et al. 2018) do not correspond to subgradients of the forward paths. this can lead to poor stationary points (yin et al. 2018), and thus poor explanations.here, we draw a new relationship between binarized neural networks and so-called tame geometry (van den dries 1998) to address this challenge. we introduce a certain reformulation of the training of bnn, which allows us to make use of the results of implicit differentiation and non-smooth optimization when training the bnns (davis et al. 2020;bolte and pauwels 2021;bolte et al. 2021bolte et al. , 2022) ) and, eventually, to obtain weight updates in the back-propagation that do correspond to subgradients of the forward paths in common software frameworks built around automated differentiation, such as tensorflow or pytorch.this builds on a long history of work on tame topology and o-minimal structures (grothendieck 1997;van den dries 1998;kurdyka 1998;kurdyka, mostowski, and parusinski 2000;fornasiero and servi 2008;fornasiero 2010;kawakami et al. 2012;fornasiero 2013;fujita 2023, e.g.), long studied in topology, logic, and functional analysis.our reformulation proceeds as follows: in theory, the training of bnns can be cast as a mixed-integer program (mip). we formulate its sub-additive dual, wherein we leverage the insight that conic mips admit a strong dual in terms of non-decreasing subadditive functions. we show that this dual problem is tame, or definable in an o-minimal structure. this, in turn, makes it possible for the use of powerful methods from non-smooth optimization when training the bnn, such as a certain generalized derivative of (bolte and pauwels 2021) that comes equipped with a chain rule. thus, one can use backpropagation, as usual in training of nerural networks.in the process, we establish a broader class of nice mips that admit such a tame reformulation. a mip is nice if its feasible set is compact, and the graph of the objective function has only a finite number of non-differentiable points. this class could be of independent interest, as it may contain a number of other problems, such as learning causal graphs (chen, dash, and gao 2021), optimal decision trees (nemecek, pevny, and marecek 2023; nemecek et al. 2023), or certain problems in symbolic regression (austel et al. 2020;kim, leyffer, and balaprakash 2023). we hope that this could bring closer symbolic approaches, which can often be cast as mips, and approaches based on neural networks and backpropagation. 2,3,3,2). the input vector x = (x 1 , x 2 ) take values in r 2 , while the activation functions, σ, in the following layers compress this to lie in the set {0, 1} l ℓ , for ℓ > 0. we formulate its sub-additive dual, wherein we leverage the insight that conic mips admit a strong dual in terms of non-decreasing subadditive functions. 2020;kim, leyffer, and balaprakash 2023). we discuss how the bnn can be recast as a mip, and thus, by strong duality, how training the bnn relates to a maximization problem over a set of subadditive functions. however, the precise form of the corresponding mip depends on the nature of the loss function.the loss function l can be chosen in different ways; for example the 0-1 loss function l (ŷ, y) = i ŷ,y , where i is the indicator function, or the square loss l (ŷ, y) = ∥ŷ -y∥ 2 .the set of subadditive functions that are non-decreasing with respect to a regular cone k ⊆ r m is denoted f k and for f ∈ f k we further define f (x) := lim sup δ→0 + f (δx).in general, the subadditive dual (7) is a weak dual to the primal conic mip (6), where any dual feasible solution provides a lower bound for the optimal value of the primal(zȃlinescu 2011;ben-tal and nemirovski 2001;morán r, dey, and vielma 2012). for example, if f, g are two non-decreasing subadditive functions on r m , then the following hold:.• if further f is non-negative and g positive on the positive quadrant r m + then f (x)g(x) is subadditive on r m + .definition 3 (nt) for a family (a k ) k∈n of subsets of r n we say that nt(a k ) holds, if for every bounded/convergent sequence {a j } in r n some a k contains a translate of a subsequence of {a j }. typically, we refer to a set included in an o-minimal structure as being definable in that structure, and similarly, a function, f : r m → r n , is called definable in an o-minimal structure whenever its corresponding graph, γ(f ) = {(x, y) | f (x) = y} ⊆ r m×n , is definable. the research in o-minimal structures really took off in the middle of the nineties, after wilkie(wilkie 1996) proved that we can add the graph of the real exponential function, x → e x , to r semialg.definition 6 (locally o-minimal structure) a definably complete structure k extending an ordered field is locally o-minimal if, for every definable function f : k → k, the sign of f is eventually constant. furthermore, this dual is locally o-minimal by considering the no-trumps theorem (theorem 3) together with the fact that f (x) is non-decreasing and subadditive.the subadditive dual problem is obviously an infinitedimensional optimization problem over the whole space of subadditive functions f r+ . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/531.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/531.txt new file mode 100644 index 0000000000000000000000000000000000000000..a9dc97da300625f97fde460de2083b94a06e36d0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/531.txt @@ -0,0 +1 @@ +the rise in the number of on-chip cores has led to more frequent development of parallel code. nevertheless, to unleash the capabilities of multi-core systems, the need for developing parallel programs will continue to grow. however, developing parallel programs is not a trivial task. the communication among cores, effective data sharing among the threads, synchronization, and many other factors need to be considered while crafting parallel programs, which makes the process of developing parallel programs far more complex than serial ones.hpc communities have published different tools and programming models to ease the process of moving from serial to parallel code. one of the well-established parallel programming models is openmp, which is a directive-based programming model that allows users to parallelize sequential code with minimal changes. most modern compilers recognize and support parallelization through openmp constructs. however, even with openmp, developers must carefully decide which clauses or directives they need to use. inappropriate usage of clauses can cause concurrency bugs such as data race or decrease performance. therefore, tools such as discopop (li et al., 2015) were developed that can automatically discover parallelism opportunities and insert openmp constructs. however, due to the conservativeness of these tools, they sometimes miss potential parallelization opportunities.nowadays, with the advancements in deep learning, various deep learning approaches have been proposed to identify parallelism and advise openmp constructs (chen et al., 2023a;harel et al., 2023;shen et al., 2023;kadosh et al., 2023). in this paper, we propose autoparllm. an approach that leverages large language models (llms) and graph neural networks (gnns) to produce parallel code. autoparllm has two main modules: parallelism discovery and parallel code generation. autoparllm uses graph neural network (gnn) to model flow-aware information, i.e., data-, control-flow, and call graphs. once a parallelism opportunity is identified, a gnn-guided prompt is engineered using gnn's results to generate parallel code with the help of an llm. due to the nature of openmp constructs, sometimes the order is important in the predictions, and sometimes not. to better measure the quality of generated openmp constructs we provide a new metric in this paper called ompscore. our experimental results show that the enhanced designed prompt helps to unleash the potential power of llms in generating parallel code out of serial code.in summary, our paper provides the following key contributions:• we propose a novel approach, called autoparllm, leveraging gnns to guide the automatic parallelization of code using large language models.• to the best of our knowledge, autoparllm is the first automatic parallelization tool built on top of gnns and llms, having the benefits of both worlds.• we evaluate the proposed approach on well-established benchmarks such as nas parallel benchmark and rodinia benchmark.• we propose a new evaluation measure called ompscore to assess the quality of generated openmp code.the format of the paper is as follows: in the next section, we will discuss the related works. in section 3, motivation examples will be discussed. followed by section 4, where our approach is explained in detail along with the ompscore. section 5 presents the experimental results. section 6 discusses ablation study and finally, section 7 concludes the paper.5, gpt-4, and codegen-16b were prompted about whether this loop could be parallelized using openmp or not, all three llms wrongly parallelized the loop by adding the #pragma omp parallel for clause except for codellama-34b. the loop in listing 3 is a parallel loop that is recommended to be parallelized using the private clause, as the variable i needs to be private to each thread. however, all four llms identify the parallel loop but do not add the private clause for variable i, and when we checked the ground truth value for this loop, we found that the developers added the private clause to variable i. an approach that leverages graph neural network to learn the flow-aware characteristics of the programs, such as control flow, data flow, and call flow, to guide llms to better generate parallel code by constructing a gnn-guided omp prompt. the omp serial dataset contains around 6k compilable c source files that are crawled from github and well-known benchmarks like polybench(pouchet & yuki, 2017), starbench(andersch et al. for training "private" clause detection model, two classes are created: private (2160 files) and non-private (contains 2000 files, 50% of those are taken randomly from "reduction" and 50% of those are randomly taken from "non-parallel"). similarly, for training "reduction" clause detection model, two classes are created: reduction (2100 files) and non-reduction (contains 2000 files, 50% of those are taken randomly from "private" and 50% of those are randomly taken from "non-parallel"). finally, the loop is passed to the "reduction" clause detection model, and similarly, if it classifies the loop as a "reduction" loop, the "reduction" clause is also added to the omp prompt. of those 90 loops, 58 are parallel, with 56 loops having "private" clause and 2 loops having "reduction" clause. also, autoparllm correctly detects 52 out of 56 loops with "private" clause, and it correctly detected all two loops with "reduction" clause. for all other seven applications, we replace the sequential loops in the testing set with the parallel loops generated using both regular llms and autoparllm augmented llms and then execute the application to measure execution time. after generating the parallel code, we evaluate the quality of the codes generated by both basic prompt llms and autoparllm in terms of the metrics score as well as our proposed ompscore. we can see the latter code generated with autoparllm is properly decorated with "reduction" clause however without autoparllm both llms fail to add the "reduction" clause. the private loop in listing 3 is also properly parallelized using the "private" clause by all four llms after autoparllm guidance is applied as can be seen in listing 6 and 7 which also matches with the clause added by developers. the key idea of autoparllm is that it leverages gnns to learn control flow and data flow and steers the code generation of llms to add appropriate and necessary openmp constructs through a specially designed prompt called gnn-guided omp prompt. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/532.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/532.txt new file mode 100644 index 0000000000000000000000000000000000000000..1907c4af1473e8c8bb52d71702a294ad22a22aad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/532.txt @@ -0,0 +1 @@ +bayesian networks provide a compact and expressive framework for modeling causality by utilizing joint probability distributions (jpds) and enabling causal inference. this allows for a systematic representation of causal relationships between variables and facilitates reasoning and decision-making under uncertainty. they consist of a directed acyclic graph (dag) where nodes represent random variables and conditional probability distributions are assigned to variables based on their parents in the graph , . however, learning the graph structure of bns from data poses significant challenges to computation and data efficiency, even when the data is complete , , . in the field of structure learning for bayesian networks, existing algorithms can be broadly categorized into two approaches: constraint learning and search & score methods. the constraint learning approach, outlined in the work by spirtes, glymour, and scheines (sgs) , involves performing a series of conditional independence tests on the available data. based on the results of these tests, a bn is constructed that aligns with the observed dependencies among variables. in contrast, the search & score approach, introduced by cooper and herskovits , focuses on finding a graph structure that maximizes a chosen scoring metric. this metric acts as a measure of the goodness of fit between the bn's structure and the provided data. the search & score approach employs an iterative search process to explore and evaluate different graph structures based on the chosen scoring criterion . both constraint learning and search & score methods have their strengths and limitations.the constraint learning approach leverages statistical tests to directly identify the conditional independence relationships among variables, which allows for the construction of a bn that adheres to the observed dependencies in the data. on the other hand, search & score methods provide flexibility in selecting a scoring metric that captures the desired properties of the bn. by searching through the space of possible graph structures, these methods aim to find the structure that optimally fits the given data according to the chosen scoring criterion , . the search & score algorithms are known to be more computationally expensive compared to constraint-based learning algorithms, especially when dealing with large feature spaces . as a result, these algorithms can become practically infeasible for such scenarios. they use greedy strategies to explore the space of directed acyclic graphs (dags) and often rely on prior knowledge or assumptions to guide the search process. the selection of the appropriate approach depends on various factors, including the characteristics of the available data, the complexity of the bn structure, and the specific goals of the analysis. researchers and practitioners often consider the trade-offs between time efficiency, accuracy in capturing dependencies, and the interpretability of the resulting bn structure when choosing among structure learning algorithms.both the constraint learning and search & score approaches for structure learning in bns are known to be computationally challenging. it has been proven that determining the optimal structure is an np-hard problem , meaning that finding an exact solution becomes increasingly difficult as the number of variables grows. the best exact methods available exhibit exponential time complexity, which limits their applicability to small-scale problems with around 30 variables , , . to handle larger networks, approximate procedures are often employed. however, these approximate methods can encounter difficulties in getting trapped in local maxima, leading to suboptimal structures. despite this challenge, the quality of the learned structure is crucial for the accuracy of the resulting bn model. if the dependencies among variables are not correctly captured during the structure learning process, the estimated distribution may deviate significantly from the true distribution. researchers and practitioners in the field continue to develop and refine algorithms that strike a balance between time efficiency and accuracy in structure learning. various techniques, such as heuristics, optimization algorithms, and parallel computing, are being explored to improve the scalability and effectiveness of structure learning algorithms for bns.this paper introduces two novel algorithms for learning the structure of bayesian networks from data, both falling under the category of constraint-based learning methods. these algorithms utilize a series of local structure inductions, where conditional independence (ci) tests (by using χ 2 test) are employed to determine the presence or absence of connections between pairs of nodes. to minimize the number of ci tests required, the algorithms utilize topology information inferred from previously conducted ci tests. this information is used to prioritize and sort future candidate ci tests. tests that are more likely to reveal connections that should not exist are given higher priority, thereby avoiding unnecessary statistical tests. additionally, considering that the ci tests are employed across various constraint-based learning algorithms for both inductions of markov blankets and bayesian networks, the proposed techniques can also be employed and may assist in accelerating the induction of the bn, as well as in the development of uni-and multi-dimensional bn classifiers. the effectiveness and efficiency of both algorithms are demonstrated through experiments conducted on synthetic and classical networks. the results highlight the algorithms' ability to accurately learn the structure of bns while minimizing computational overhead. the remaining content is organized as follows: section ii, notations, basic concepts of a bayesian network, and its interpretability with causal reasoning are presented. we introduce these novel heuristics, while the specifications of the algorithms and the soundness of our approach with proofs are presented in sections iii and iv, respectively. experimental studies, prospective discussions, and the conclusion are presented in sections v, and vi respectively. x), and their assignments by lowercase (e. g=(v, a) is a dag, g in which v corresponds to u, and an arc a contains the connectivity information among v. if there exists a directed path from node x to node y , we say that x is an ancestor of y and y is a descendant of x. nodes x, y , and z form a v-structure if node z has two incoming arcs from x and y , but x and y are not adjacent. the notation x ⊥ y | z represents the conditional independence between x and y given z, while x ̸ ⊥ y | z represents their conditional dependency. there exists no set z such that x ⊥ y | z and x, y ̸ ∈ z. during the induction process, we employ a "backward selection" approach, assuming that a node x is connected to every other node y ∈ v \ x and then eliminating false connections based on the presence of a d-separation (ds(x, y )). during the search for the d-separator of x and m , the set {z} is selected first due to its higher rdsa z x value. since {z} indeed serves as the d-separator for x and m , the remaining ci tests can be disregarded, and rdsa z x is incremented to 2. when lspc is called on node t , it assumes t is connected to all nodes x ∈ v, excluding pairs like t -x that are stored in a del . when a d-separator s is found (line 11), the following steps are performed: 1) x is excluded from the neighbor set v canp c (line 12); 2) x -y is removed from both a can and a del (line 13 and 14); 3) the rdsa of each y ∈ s is updated based on equation 2 (line 16); 4) the analysis of node x is terminated (line 17), and the next node in v canp c is processed.assuming that a true edge x -y is included in z ind implies that there exists a set of variables on which the conditional independence between x and y holds.according to the commutative law of probability, if variables x and y are independent (x ⊥ y ), then it follows that y is also independent of x (y ⊥ x).during the execution of lspc(x), the set v canp c is constructed to include all nodes connected to x, excluding pairs like x -y that are stored in a del . specifically, the time required is proportional to the sum of the cardinality of the set x, y and the set z, which can be expressed as |x, y +z| = 2+|z|. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/533.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/533.txt new file mode 100644 index 0000000000000000000000000000000000000000..519aa679f86b4c7f8bd1be557a523246d2d11840 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/533.txt @@ -0,0 +1 @@ +deep learning has sparked success in a wide variety of previously challenging reinforcement learning (rl) domains (mnih et al., 2015a;jumper et al., 2021;fawzi et al., 2022). although its applications seem endless, some fundamental problems still remain unsolved. these include the availability of data, the lack of model interpretability (garnelo et al., 2016;peters et al., 2017), the susceptibility of learned policies to changes in the input distribution, and the challenge to leverage expert data. in this paper, we combine recent advances in the latter two open problems. domain adaptation, a form of transfer learning, is the ability of rl agents to adapt to changes in the input distribution (bengio et al., 2013). in such a scenario, an rl agent is trained on a particular input distribution (source domain) and is then placed in a setting where the input distribution is modified (target domain). in many real-world applications, data from the target domain may be expensive, difficult to obtain, or not available at all (finn et al., 2016). however, learning a policy by simply leveraging information from the source domain (called zero-shot learning) can lead to over-fitting to the input distribution, which results in poor adaptation performance (lake et al., 2016). therefore, it is crucial to learn a good low-dimensional state representation that is not task or domain-specific. there is a plethora of work that tries to learn a low-dimensional factorized state representation, which is called disentangled representation learning (schmidhuber, 1992;cohen & welling, 2014;kulkarni et al., 2015;kingma & welling, 2013;laskin et al., 2020;xing et al., 2021). a disentangled representation is defined as a factorized latent representation where either a single factor or a group of factors is responsible for the variation observed while it is invariant to changes in other factors (bengio et al., 2013).imitation learning (il) is the problem of learning to perform a task from expert trajectories. approaches to il can broadly be classified into two categories: 1) behavioral cloning (bc) (ross & bagnell, 2010) or 2) inverse reinforcement learning (irl) (ng & russell, 2000). bc is conceptually simple as it formalizes the il problem as a supervised learning problem where the policy is a learned map between input states and output actions. this often requires a large number of trajectories (pomerleau, 1988) and small errors compound quickly. a natural extension is to frame the il problem as an inverse rl (irl) problem: first, learn a reward function under which the expert's trajectories are optimal and from which a learned imitation policy can be trained (ho & ermon, 2016). much of the difficulty with this approach however relies on the min-max problem formulation over reward and policy. instead, one can also learn a single model for the q-value which implicitly defines both a reward and a policy function (garg et al., 2021).a real-world application, where both the need for domain adaptation and imitation learning is evident, is the control of a robot arm trying to pick up an object. firstly, training in the real world is slow and expensive as the robot might break and the repairs are costly. second, it is hard to define a good reward function but generating a few expert trajectories manually can be trivial. using imitation learning (il) one can learn an optimal policy given the expert demonstrations in a simulation and then transfer it to the real world.there is some previous work that tries to combine il and zero-shot transfer learning. however, most of these approaches rely either on the inferior behavioral cloning approach to il (young et al., 2020;jang et al., 2022) or they use the complicated adversarial min-max approach to irl (google & tompson, 2020). motivated by those deficiencies, we propose to use the approach of darla (higgins et al., 2017) to learn a disentangled latent space representation of each state to adapt to changes in the input distribution. using such latent representation, we aim to solve the problem of reward function definition by applying the il approach iq-learn (garg et al., 2021) to learn an optimal policy given expert demonstrations. these include the availability of data, the lack of model interpretability(garnelo et al., 2017), the susceptibility of learned policies to changes in the input distribution, and the challenge to leverage expert data. bc is conceptually simple as it formalizes the il problem as a supervised learning problem where the policy is a learned map between input states and output actions. a natural extension is to frame the il problem as an inverse rl (irl) problem: first, learn a reward function under which the expert's trajectories are optimal and from which a learned imitation policy can be trained(ho & ermon, 2016). using imitation learning (il) one can learn an optimal policy given the expert demonstrations in a simulation and then transfer it to the real world., 2020;jang et al., 2022)or they use the complicated adversarial min-max approach to irl(google & tompson, 2020)., 2017)and the iq-learn(garg et al.2) using a standard rl algorithm, the agent is tasked to learn a source policy π s (a | z s ) based on the latent factors z s in the source domain s. there, the agent tries to collect rewards using as input state space the latent representations z t ∼ p ϕ (• | x t ) from the target domain t . iq-learn (inverse soft-q learning) proposes an elegant solution to the max-min problem of irl by learning a single q-value from which both the reward and policy function can be deduced. given an expert policy π e , the inverse reinforcement problem (irl) tries to find a reward function r that assigns a low cost to the expert policy and a high cost to any other policy π, i.3) a ppo-agent trained on the target environment t (ppo-target), 4) an agent trained on the source environment s and directly transferred to the target environment without using the annealedvae. by comparing the random agent with the ppo-transfer agent, we see that without using the annealedvae, the transfer of a ppo policy to a previously unobserved target environment performs barely better than a random policy.this shows that learning a disentangled representation of the feature space is extremely challenging and not possible for arbitrary shifts in the input distribution as the super mario environment showed. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/534.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/534.txt new file mode 100644 index 0000000000000000000000000000000000000000..4291e8007bd22075384d186624d2be7a22b3f16b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/534.txt @@ -0,0 +1 @@ +neural network pruning originates in the early 90s but gained notable popularity only after the rise of deep learning, which featured deeper and wider networks over time. following such dimensional escalation, pruning turned out to be a valuable tool to support the deployment of deep networks in resource-constrained scenarios, where only smaller networks are suited to fit the needs. when operating in such scenarios, pruning a network (i.e., reducing the size by zeroing the parameters) can produce, if properly enhanced, highly sparse yet reasonably good systems, both in terms of generalization and robustness to adversarial attacks . learning algorithms, in fact, are susceptible to adversarial attacks , where carefully crafted input samples are conceived to fool the classification made by the algorithm. following the rise of deep learning, both pruning and adversarial machine learning evolved and ultimately crossed paths .neural network pruning is mainly regarded as zeroing parameters (weights and, less frequently, biases) or directly removing network structures of an already pre-trained network, which then needs to be fine-tuned accordingly . depending on the kind of pruning structure, criterion, rate, and pipeline, many approaches combining sparsity and robustness can be found in the literature, all ultimately trying to obtain a sparse and robust network. we call these methods as adversarial pruning (ap) methods. among the most prominent ap works, the contributions of have reached notable results in terms of robustness at considerably high sparsity and with great technique diversity. in , the authors proposed an adversarially trained model compression technique (atmc) consisting of a unified framework integrating the robustness objective (obtained with adversarial training) to model compression (in the form of pruning factorization and quantization). hydra was proposed as an optimization mechanism where the pruning technique is aware of the robust training goal, thus pruning connections based on optimizing the robustness objective. finally, in , the authors leveraged the alternating direction method of multipliers (admm) to build a concurrent adversarial training and pruning framework. all proposed approaches reported notable accuracy values on clean and adversarial samples. however, there has been a growing concern about the evaluation methods employed to assess robustness, and the research community has thus proposed new frameworks to avoid overestimating robustness. yet, the soa ap networks have not been able to address robustness in such a standardized way.in this work, we use the ensemble of attacks proposed in au-toattack to re-evaluate the ap robustness and offer a much more thorough evaluation. our contribution in sect. 3.1 shows that the soa ap papers are overestimating the robustness of their networks. then, focusing on how the pruned models differ from their dense counterparts, in sect. 3.2 we show that the samples misclassified (or occasionally corrected) by the pruned model lie in the proximity of the decision boundary. in, the authors proposed an adversarially trained model compression technique (atmc) consisting of a unified framework integrating the robustness objective (obtained with adversarial training) to model compression (in the form of pruning factorization and quantization). hydrawas proposed as an optimization mechanism where the pruning technique is aware of the robust training goal, thus pruning connections based on optimizing the robustness objective. however, there has been a growing concern about the evaluation methods employed to assess robustness, and the research community has thus proposed new frameworks to avoid overestimating robustness. it follows that the at formulation can be integrated into most of the pruning pipeline steps, either when pre-training the model, pruning or fine-tuning, leading over the years to the design of a great and diverse fashion of ap methods, hence models that even if pruned are designed to keep up adversarial robustness. however, we observe that prominent ap methods such as, have not yet been able to properly keep up with a standardized and extensive robustness evaluation such as the one proposed with autoattack, thus limiting the reliability of their robustness evaluation. hence, we additionally focused on single samples to understand what changes in the network after pruning make some samples misclassified or corrected by the pruned model, albeit conversely predicted by the dense version.• s 1,0 samples, classified correctly by the dense model and wrongly by the pruned model.interestingly, as figure1shows, we observe that samples lying in the proximity of the decision boundary (samples on thin ice) are more likely to be missed (or occasionally corrected) by the pruned model with respect to the dense, hence more likely to be s 1,0 (or occasionally s 0,1 ) samples.1, we analyzed the robustness value of ap networks measured with autoattack showing how, in most cases, the methods were overestimating the robustness to adversarial examples and how this overestimation was more severe in some cases. yet, a still challenging question follows the analysis of pruned models: how does a network's boundary change after pruning? on what samples will it introduce more errors? in a security-related scenario where these ap networks are supposed to operate, it is crucial to understand how the model has changed its predictions and why. being the decision boundary affected by small changes after pruning, we conjectured that s 1,0 and s 0,1 samples were merely a portion of samples near the decision boundary and thus more likely to be misclassified or corrected after pruning. these findings, overall, suggest that it is possible to significantly estimate from prior information the samples that the pruned model will misclassify with respect to the dense, hence implying that to uplift the performance of the pruned model, we might consider a training procedure weighting samples based on their distance to the boundary.2, other works have tried to characterize the difference between pruned and dense models as in, which focused on the similarity between the models, their robustness to noise and to out-ofdistribution samples. being the distance to the boundary ϵ equal to 0 for samples misclassified by the dense model (for s 0,0 and s 0,1 samples, the minimum distance to make a sample adversarial measured by fmn is 0 as the class is predicted wrongly), we still measure the distance to the boundary by imposing the target class as predicted and underline it through a negative ϵ. additionally, we have developed a sample-wise analysis aiming to characterize the difference between dense and pruned robust models, featuring a careful statistical analysis of the samples' distance to the decision boundary to establish and ultimately confirm the validity of such correlation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/535.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/535.txt new file mode 100644 index 0000000000000000000000000000000000000000..478cf410674b1691421a87fb6e151947f52c13bc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/535.txt @@ -0,0 +1 @@ +continual learning, also known as incremental learning - , refers to the process of sequentially learning multiple tasks without forgetting previous knowledge. in this setting, catastrophic forgetting , is a major challenge for continual learning, where previously learned knowledge is lost when learning new tasks.continual learning has recently gained increasing attention in the field of artificial intelligence. various strategies have been proposed to overcome catastrophic forgetting , including rehearsal-based strategies - , regularizationbased strategies , , and parameter isolation-based strategies , . these strategies are mutually orthogonal and can be combined in a specific scenario. among these strategies, rehearsal-based methods have proven to be a simple yet effective approach that uses episodic memory to replay training samples. despite its encouraging success, there are still challenges that need to be addressed, including the issue of overfitting and biased knowledge representation due to knowledge imbalance in episodic memory. a naive but effective solution is increasing the memory size when new samples come. however, this approach increases the memory requirement and violates the setting of limited memory resource requirements in continual learning.to address this issue, we propose a novel replay strategy called manifold expansion replay (maer). maer investigates two factors to improve neural network performance in continual learning settings. firstly, maer views the process of continual learning as a fusion of implicit manifolds represented by knowledge. when the diameters of manifolds are imbalanced, the larger one will receive more bias while the smaller one will experience forgetting. inspired by this, maer adopts a greedy sampling strategy to manage memory, helping the neural network to learn unbiased presentation of all data. secondly, maer introduces the wasserstein distance as distillation loss. the wasserstein distance between two distributions is defined as the minimum cost required to match one distribution with another. unlike traditional distance metrics such as euclidean distance, wasserstein distance considers the underlying structure of the compared distributions, which can help the neural network better fuse knowledge manifolds.we mainly evaluate maer on permuted mnist, rotated mnist, split cifar10, split cifar100, and split tinyimagenet datasets. the extensive ablation studies and experimental results show that maer gains significant performance improvement, outperforming state-of-the-art in accuracy.our contributions are summarized as follows:• we propose a greedy sampling strategy to balance knowledge by expanding the diameter of the knowl-edge manifold in episodic memory. • we propose to distill knowledge using wasserstein distance, which helps neural networks effectively fuse knowledge in continual learning.continual learning, also known as incremental learning-, refers to the process of sequentially learning multiple tasks without forgetting previous knowledge. in this setting, catastrophic forgetting,is a major challenge for continual learning, where previously learned knowledge is lost when learning new tasks. unlike traditional distance metrics such as euclidean distance, wasserstein distance considers the underlying structure of the compared distributions, which can help the neural network better fuse knowledge manifolds. • we propose to distill knowledge using wasserstein distance, which helps neural networks effectively fuse knowledge in continual learning. a) rehearsal-based strategy: the rehearsal-based strategy can be viewed as a review strategy that uses a capacity-limited buffer called episodic memory to replay a portion of the samples from the previous task at each training session. the main idea of datafocused methods is to transfer knowledge from a teacher model to a student model using the knowledge distillation technique, where the teacher model has been trained on previous tasks. typical methods include learning without forgetting (lwf), learning from less (lfl), and dark knowledge distillation with memory consolidation (dmc).a positive value of bwt means that learning a new task benefits the old tasks, while a negative value indicates that learning a new task interferes with the old tasks. if we view the knowledge manifold as a distribution of meta-knowledge, one possible choice for a distance metric is kl divergence, which is commonly used to measure the distance between two distributions. our method, maer, uses wasserstein distance to measure the distance between two knowledge manifolds.in mathematics, the wasserstein distance is a distance function defined between probability distributions on a given metric space (m, ρ), where ρ(x, y) is a distance function for two instances x and y in the set m . when learning task i, we have a teacher model f t that has learned the previous i -1 tasks and a student model f s responsible for learning task i. during training, we replay samples from the memory buffer m, and in addition to learning to classify these samples accurately, we aim to minimize the wasserstein distance between the knowledge representation learned by f s and that of the teacher model f t . intuitively, our sampling method needs to account for two aspects: (1) sample as uniformly as possible to maintain the geometric properties, (2) the sampled metaknowledge should span the entire knowledge manifold as much as possible, avoiding bias towards new knowledge and preventing forgetting during learning. maer employs knowledge distillation techniques and introduces the wasserstein distance between the features of the teacher and student models as a distillation loss to integrate old and new knowledge better. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/536.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/536.txt new file mode 100644 index 0000000000000000000000000000000000000000..49bd860d3001f32c63420ba8dc086d0a6f88c11e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/536.txt @@ -0,0 +1 @@ +in recent years, there has been significant progress in machine learning (ml) research, with breakthroughs in deep learning, natural language processing, and computer vision. a deep neural network (dnn) is one of the most significant tools of a ml scholar . dnns are constructed from multiple layers that transform the data sequentially via operations such as pooling, convolution, and activation. in most successful dnns, the greater portion of computational resources is consumed by performing convolution.a popular implementation of convolutional layers is expanding the input into a column matrix form (im2col) and then calling a highly tuned general matrix multiplication (gemm) procedure from the existing linear algebra library such as blis or mkl . since the hardware optimized gemm implementations exist for every standard cpu, graphics processing unit (gpu), or digital signal processor (dsp), the im2col approach has been highly successful in dnn frameworks such as caffe , torch and onnx .however, these advances have primarily benefited large corporations, and research institutions with access to massive computational resources. the democratization of ai on low power and edge devices aims to bring the benefits of ai to a wider audience, including small businesses, individual users, and the billions of internet of things (iot) devices. edge devices, such as smartphones, wearables, and iot sensors, are often resource-constrained, with limited processing power, memory, and battery life.one major challenge in deploying ai on edge devices is the size of deep learning models, which can be hundreds of megabytes or even gigabytes. the im2col conversion further increases the memory footprint of the input matrix and reduces data locality. for a convolution with a filter size k, the column matrix is k times larger than the original input tensor. a lot of research effort has been put into applying the gemm routines to the smaller intermediate data structures or even to the original input data .to reduce the memory requirements on edge devices and improve performance, researchers have been exploring various techniques, including model compression, network optimization, and hardware acceleration.to reduce the memory requirements on edge devices and improve performance, researchers have been exploring various techniques, including model compression, network optimization, and hardware acceleration. unstructured pruning can achieve high levels of sparsity but may not lead to actual speedup on hardware, whereas structured pruning can result in a more efficient hardware implementations. the sliding window techniquereplaces gemm with a novel computation kernel that operates on the unmodified input and eradicates the memory bloating problem. kernels of larger width do not fit into the hardware vector and require a special version that operates on multiple hardware vectors treating them as a single long compound vector.the 2-d sliding window convolution exhibits the same roughly logarithmic speedup in correlation to the filter size. as the filter size increases, the throughput of the sliding window convolution kernels approaches the hardware limits. it is also interesting to see that the filter size misalignment with the hardware vector length results in similar performance patterns for both sliding window and gemm kernels.we have measured the performance and throughput of the sliding window convolution kernels. wider adoption of the sliding window sum algorithm could promote ai usage on the low power and low memory devices avoiding the expense of specialized hardware.optimized network architectures tend to use small convolution filters that diminish the advantages of the sliding window convolution.in many cases the hardware accelerators can be repurposed for sliding window algorithms with various degree of success depending on how specialized the hardware is. combining sliding window techniques with optimized network architectures and model compression results in fast and energy efficient solutions. the benefits of streamlined memory access are less pronounced since explicitly controlled on-chip memory hierarchies make gpus already highly efficient in gemm computation.since the accelerators for matrix multiplication are already present in the current generation of hardware and are likely to stay in future devices, they could improve throughput and performance of many computational tasks beyond gemm.sliding window convolution algorithms exhibit excellent performance using commodity hardware. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/537.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/537.txt new file mode 100644 index 0000000000000000000000000000000000000000..4c191ede18e32c92123a5e39ea2df19d8633ec8a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/537.txt @@ -0,0 +1 @@ +materials discovery is a strong driver of innovation, unlocking new materials with tailored properties that benefit various domains including energy efficiency, transportation systems and electronics . yet, it faces significant hurdles. characterizing new materials is computationally expensive , even when replacing lab experiments by quantum mechanical simulations like the density functional theory (dft) . besides, exploration of materials is hindered by the vastness of the search space, which encompasses myriad compositions, atomic arrangements and properties .to overcome these challenges, researchers have turned to machine learning (ml) for accelerated materials discovery for two primary reasons: first, ml holds the power to quickly model materials' properties (i.e., to evaluate candidates) using geometric graph neural networks (gnns). second, generative ml can automatically propose new and consistent material candidates.in recent years, ml has emerged as a crucial tool for the discovery of electro-catalysts, which play a key role in promoting renewable energy processes and sustainable chemical production, including the production of ammonia for fertilizers and hydrogen . the open catalyst project (ocp) has significantly contributed to this field by releasing an extensive dataset of 1,281,040 dft relaxations of catalyst-adsorbate pairs, selected from a pool of meanigful candidates1 . this dataset was specifically designed to train ml models to predict the relaxed energy of 3d adsorbate-catalyst (adslab) systems, a critical property that influences a catalyst's activity and selectivity , or its effectiveness for a specific chemical reaction2 . ocp has facilitated significant advancements in catalysis discovery, with ml models increasingly bridging the performance gap with dft simulations, while offering a speed advantage of several orders of magnitude .while research has primarily focused on predicting material properties , it's equally crucial to efficiently explore the vast search space of potential catalyst candidates . this involves generating consistent candidates automatically, a challenge due to the intricate process of creating adslab samples. this process, further detailed in appendix a, includes positioning an adsorbate molecule (e.g., h2o) with a catalyst in 3d space, meaning cutting a surface through the catalyst bulk, selecting the adsorbate's spatial orientation, and sampling a plausible binding site . these steps, which are time-consuming and challenging to model, determine the input configuration of the adslab sample and significantly influence the relaxed energy prediction . an additional challenge is that this process and its relevance to the actual in-lab efficiency of a real material is not yet fully understood by chemists .in light of these challenges, this paper explores the possibility of predicting the relaxed energy of an adslab without co-locating the adsorbate and the catalyst in the same 3d space. this direction of study is valuable for several reasons: (1) to better understand the role of adsorbate-catalyst geometry in determining the relaxed energy; (2) to reduce reliance on the exact input configuration, which often correlates with local energy minima, (3) to avoid the complexity and computational cost of determining a good input configuration (e.g., finding a binding site and adsorbate orientation). our primary objective is quantify the loss in accuracy that occurs when the geometric relationship between inputs is unavailable.to achieve this, we propose four modifications of existing gnn architectures, collectively referred to as disconnected gnns. all four modifications enable the base architecture to make relaxed energy predictions without explicitly modeling geometric interactions between the adsorbate and the catalyst. we then evaluate the trade-off of omitting these interactions through experiments on the oc20 dataset and suggest future directions to overcome the limits associated with the input adslab configuration. the open catalyst project (ocp)[chanussot et al.in light of these challenges, this paper explores the possibility of predicting the relaxed energy of an adslab without co-locating the adsorbate and the catalyst in the same 3d space. this direction of study is valuable for several reasons: (1) to better understand the role of adsorbate-catalyst geometry in determining the relaxed energy; (2) to reduce reliance on the exact input configuration, which often correlates with local energy minima, (3) to avoid the complexity and computational cost of determining a good input configuration (e. all four modifications enable the base architecture to make relaxed energy predictions without explicitly modeling geometric interactions between the adsorbate and the catalyst.the baseline disconnected gnn model does not create any edges between the catalyst and the adsorbate in the graph creation step. by removing all such edges, no relative geometric information will pass from the adsorbate to the catalyst when modelling the system, since gnns only propagate information through graph edges. this final mlp layer gives more expressive power to the model than the disconnected baseline because we explicitly model non-geometric interactions between the adsorbate and the catalyst. for every catalyst node i and adsorbate node j, we create the new edges (i, j) and (j, i) with weights z and -z respectively, where z is the z-axis coordinate of x i . notice that this model can run without locating the adsorbate and catalyst in the same 3d plane since no information regarding the adsorbate's location (e. let f denote a "normal" intra-interaction layer, let h l ads and h l cat denote the node embeddings of the adsorbate and the catalyst at interaction layer l, and let e ads and e cat stand for the edges of the adsorbate and catalyst, respectively. to produce h l+1 ads and h l+1 cat , we first calculate h ′ ads = f (h l ads , e ads ) and h ′ cat = f (h cat , e cat ). binding site, adsorbate orientation) for each adsorbate-catalyst pair along with the associated relaxed energy.the obvious benefit of such approach is that it marginalises over adslab binding sites and adsorbate orientations, avoiding the need to explore all configurations to actually find the relaxed energy global minima of this adsorbate-catalyst pair, which is expected to be the main reaction driver in real life experiments[lan et al. looking forward, we see the potential of using such disconnected models to find the global energy minima of adslab systems while circumventing the need to consider all possible input adslab configurations. the second one is to find a way to effectively allow the nodes of the adsorbate and catalyst to communicate, and to do this while ommitting the relative position and orientation between the adsorbate and the catalyst. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/538.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/538.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ab72e5074dcf37c7da7ded8a70f31941c597822 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/538.txt @@ -0,0 +1 @@ +the successful recognition of personal named entities, including a user's contacts, favorite music, and apps, is crucial for automatic speech recognition (asr) applications such as voice assistants on mobile devices. however, recognition of personal named entities is challenging, particularly for endto-end (e2e) asr, because named entities often contain rare words, and many are pronounced or spelled irregularly. to compensate, asr systems are frequently adapted to particular users, a process often called personalization.in a hybrid asr system, personalization is relatively straightforward. this type of system usually consists of an acoustic model (am), a language model (lm), and a pronunciation lexicon. assuming the am is sufficiently trained on acoustic units that cover rare personal named entities, only the lexicon and lm need to be personalized. a typical solution in hybrid asr systems is to introduce a contextual language model which is trained on personal named entities and represented by a finite state transducer (fst). during recognition, the contextual lm is combined with the general language model through on-the-fly fst operations to achieve high accuracy for personal named entities (e.g. .)in contrast to hybrid asr systems, personalization of e2e asr systems is quite challenging. e2e asr systems use a single neural network to jointly model acoustics and language. these systems are trained on paired audio and text data and often do not perform well on entities that appear infrequently or appear with different pronunciations in their training data. without separate lm and lexicon components, these models lack clear mechanisms for personalization. still, contextual biasing approaches have been applied to e2e systems with some success . wordpiece prior normalization, a technique which attempts to remove the lm component of the e2e model score so that it may be more effectively combined with an external lm, has provided an additional benefit . further, huang et al demonstrated accuracy improvements using a method to tokenize entities into wordpiece sequences based on their pronunciations. despite the positive effects of these techniques, at least in our experiments, they were not sufficient to match hybrid system accuracy on personal named entities.while most previous work has addressed the personalization of e2e systems with attention-based encoder-decoder (aed) or transducer architectures, here we begin with a system which uses the connectionist temporal classification (ctc) . ctc has sparked renewed interest due to its simplicity, competitive accuracy, and amenability to efficient fst decoding (e.g. .)starting from a ctc baseline system, we extend the work of and present a novel method for generating wordpiece sequences from entity pronunciations. we show that using this technique in combination with contextual biasing and wordpiece prior normalization, we are able to achieve personal named entity accuracy on par with a competitive hybrid system. a typical solution in hybrid asr systems is to introduce a contextual language model which is trained on personal named entities and represented by a finite state transducer (fst). during recognition, the contextual lm is combined with the general language model through on-the-fly fst operations to achieve high accuracy for personal named entities (e. wordpiece prior normalization, a technique which attempts to remove the lm component of the e2e model score so that it may be more effectively combined with an external lm, has provided an additional benefit. we show that using this technique in combination with contextual biasing and wordpiece prior normalization, we are able to achieve personal named entity accuracy on par with a competitive hybrid system. as in, in this work we use a wordpiece uni-gram trained on the e2e model training data as an estimate of the internal lm.in some cases, the ctc model scores the wordpiece sequence corresponding to a personal named entity so poorly that wordpiece prior normalization and contextual biasing are not sufficient to compensate. we then apply a phone-to-wordpiece (p2wp) model trained to produce wordpiece sequences that the ctc model will assign high probabilities given an acoustic realization of the input phone sequence. to obtain the required phone sequence/wordpiece sequence pairs, we apply the wordpiece decomposition used for training the ctc model to the words. we want the p2wp model to mimic the behavior of the ctc model, so we weight the training data pairs according to the word frequency in the ctc model training data. we use the model structure and parameters recommended by those works, except that the output size of the ctc model is adjusted to fit our in-house wordpiece list. then, we build a personalized ctc system and use either the lg fst or the p2wp fst to provide additional pronunciation-driven wordpiece sequences. we then feed all pronunciations to the lg fst or the p2wp model to get the 10-best wordpiece sequences for each. the results show that despite being over 10x smaller than the lg fst measured by the number of fst arcs, the p2wp model yields better personal named entity accuracy, a 13. while p2wp is also we then conduct experiments in the aforementioned three languages to better understand the accuracy impact of the p2wp model and wordpiece prior normalization. we show that by using this method to generate additional personal entity representations for contextual biasing and applying wordpiece prior normalization, we are able to match the accuracy of high-quality hybrid systems on personal named entity recognition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/539.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/539.txt new file mode 100644 index 0000000000000000000000000000000000000000..815f3be66d1ac6b5834b8892080fa7a8ff18355d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/539.txt @@ -0,0 +1 @@ +deep learning is commonly used for the end-to-end classification of electrophysiology (eeg) data or as a feature transformer for eeg data . however, training deep learning models requires a large amount of data, and this is often a limitation with most eeg datasets where only a handful of subjects are available. a solution to the scarcity of large eeg datasets is to combine datasets from different sources. however, a challenge to this approach is the harmonization of data with different channel montages. different eeg caps are being used in different eeg experiments, so the number and locations of channels/electrodes are rarely the same.a straightforward solution would be to subsample the channel space, only selecting common channels across experiments when it is even possible. this approach, however, potentially underutilized available data. previous works have been solving this problem by transforming raw eeg signals into a common feature space, such as the topography distribution of spectral power or the spectrogram of single-channel eeg , before using deep neural networks. these solutions are nevertheless limiting since only spectral information of specific frequency bands is being considered.recent works are now showing that deep models trained on raw eeg data might learn statistical properties of the data well beyond the commonly used eeg frequency bands and that deep learning models trained on raw eeg outperform models trained on spectral features . spectral approaches limit the data-driven capacity of deep learning models, which could potentially extract more information from the raw eeg data. thus, researchers should explore channel harmonization methods for the training of deep learning models on raw eeg data from datasets with different montages.in recent works, a mechanism called spatial attention was implemented to train deep models to correlate eeg/meg with speech data . in this method, deep learning (dl) models take into account the relative position of eeg electrodes on the scalpinformation that is not available when dl models use as input 2-d channel x time eeg segment time series. this procedure consists of mapping channel locations from the coordinate space into a 2-d fourier space where spatial dimensions are defined by channel 2-d locations proximity, then applying the attention mechanism to the frequency-transformed channels to map the input channels to a fixed number of output channels. while défossez and collaborators applied this mechanism to leverage information pertaining to channel spatial distribution, this could also be used to map different channel montages into a common output channel space.in this report, we explore the application of the aforementioned spatial attention mechanism to the problem of training a single deep-learning model on data with different input channel montages. we used a large eeg resting state dataset from more than a thousand juvenile (5-22 years) participants, collected and made publicly available by the child mind institute healthy brain network project. we performed gender classification using a simple convolution deep neural model , to which we added spatial attention. to instigate the usefulness of using the spatial attention mechanism for channel harmonization, we performed data subsampling to obtain a subset of the data with two differentrecent works are now showing that deep models trained on raw eeg data might learn statistical properties of the data well beyond the commonly used eeg frequency bandsand that deep learning models trained on raw eeg outperform models trained on spectral features. this procedure consists of mapping channel locations from the coordinate space into a 2-d fourier space where spatial dimensions are defined by channel 2-d locations proximity, then applying the attention mechanism to the frequency-transformed channels to map the input channels to a fixed number of output channels. while défossez and collaboratorsapplied this mechanism to leverage information pertaining to channel spatial distribution, this could also be used to map different channel montages into a common output channel space. to instigate the usefulness of using the spatial attention mechanism for channel harmonization, we performed data subsampling to obtain a subset of the data with two different. to generate a dataset with a different channel count, we sub-selected 23 channels from our original 128-channel data using the same channels as with previous works. the combination of 128channel and 23-channel data for each training, validation, and test set constitutes the mixed-channel channel data for each of those sets. we set the baseline for our experiments by first training and evaluating the deep learning models without spatial attention on both 128-channel and 23-channel data individually (see table2). we then applied spatial attention and retrained the individual models to ensure that adding spatial attention would still allow the model to learn the gender classification task while not reducing the models' learning capacity on the data. we also trained models on mixed-channel data (dataset with both 128-channel and 23-channel samples) and evaluated them on 128-channel, 23-channel, and mixedchannel data. while training 128-channel data and 23-channel data models, we observed that the models converged and started to overfit the training data after about 15 epochs.we have shown both spatial attention and the number of data channels improve the classification performance of dl models applied to eeg data. thus, the spatial attention mechanism can flexibly be used as a method to combine data with different channel counts for training deep learning models while taking into account the spatial information of the channels. channel harmonization using spatial attention thus gives a promising path forward for aggregating eeg data across datasets for the effective largescale training of deep neural networks. the data regularization effect we observed when evaluating models trained on the subsampled 23-channel data on 128-channel and mixed-channel data might only hold for this experiment. we expect that in the situation where our data comes from heterogeneous sources with different channel montages, models trained on datasets of specific recording setup will only perform well when applied to data of the same recording parameters, further emphasizing the need for training deep models on non-uniform data samples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/54.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/54.txt new file mode 100644 index 0000000000000000000000000000000000000000..b6010ad3de5817c85d5858c30c588174ebecbd9d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/54.txt @@ -0,0 +1 @@ +in today's ever more interconnected world, decision-making in dynamic environments is often extremely difficult despite vast streams of data and huge models within disparate domains of relevant expertise. decision support can be valuable, but needs to incorporate all the relevant inputs in a clear and coherent way so that a decision making team can make a defensible selection among policy options. in these contexts, such decision centres often need to draw together inferences in dynamic, plural environments and integrate together expert judgements coming from a number of different panels of experts where each panel is supported by their own, sometimes very complex, models. these judgements need to be networked together to provide coherent inference for appropriate decision support in increasingly complex scenarios.a formal statistical methodology to network together diverse supporting probabilistic models needed to achieve this, the integrating decision support (idss), was developed in (smith et al., 2016). here we capitalise on this exciting new development to construct decision support for policy selection in the domain of pollination ecosystem services.in 2014 the uk government issued its first pollinator strategy (department for environment, food and rural affairs, 2014) and more recently the pollinator action plan 2021 to 2024 (department for environment, food and rural affairs, 2022) and the healthy bees plan 2030 (department for environment, food and rural affairs, 2020b). the given reason is that bees and other pollinators are an essential part of our environment and play a crucial role in food production -they contribute the equivalent of more than £500 million a year to uk agriculture and food production, by improving crop quality and quantity -and are also vital to our wider, natural ecosystems; critical to our food industry, our green spaces, wider biodiversity and ensuring healthy and productive ecosystems (department for environment, food and rural affairs, 2022).the importance of pollination services to food production in the uk and worldwide are undisputed (vanbergen et al., 2014). pollinatordependent food products are important contributors to healthy human diets and nutrition (potts et al., 2016) and it is estimated that over 70% of important food crops worldwide are dependent upon pollinators (klein et al., 2007). therefore, the status of bees and other pollinators is of significant concern in global food security (bailes et al., 2015;blaauw and isaacs, 2014;lonsdorf et al., 2009;lucas, 2017;ollerton, 2012;novais et al., 2016). pollination has a direct economic value through increasing the yield and quality of insect-dependent crops. in the uk, this includes oilseed rape, orchard fruit, soft fruit and beans. many agricultural businesses employ migratory bee services in order to ensure adequate pollination of crops (bishop et al., 2016;gordon et al., 2014). total loss of pollinators could cost up to £440m a year, about 13% of uk income from farming (post, 2010). insect-dependent crops can be pollinated by hand, but the cost would be prohibitive (estimated at £1500 million a year), raising the cost of food in the marketplace and increasing food insecurity and nutrition insecurity substantially.it is estimated that pollinator loss would reduce world agricultural production by 5% overall, reducing the diversity of food available, particularly affecting 'five-a-day' crops (post, 2010), with the obvious downstream effect of increasing burden of disease and health costs.of course, humans are not the only beneficiaries of pollination services and the social and tourist value of the insects themselves, the other wildlife they support and the floral species reliant on them should not be discounted.the pollinator action plan defines pollinator health as the state of well-being of wild and managed pollinators that allows individuals to live longer and reproduce more, even in the presence of pathogens, and therefore provide ecosystem services more effectively. pollinator health is a function of pests, parasites, disease, and other anthropogenic stressors, the availability of appropriate nutrition across life-stages, nest-sites, host plants, mating areas, and hibernation sites. honey bee health also depends on the beekeepers managing them. if pollinator health is high, we would expect a greater abundance of pollinators.the uk national pollinator strategy (department for environment, food and rural affairs, 2014) acknowledged that whilst there is an abundance of excellent research in many aspects, the evidence for the system as a whole is patchy. therefore there is a need for decision support to identify optimal policies across this complex landscape. there is a need to assess population-level impacts of insect pollinator management actions and the link between insect pollinator population size change and drivers (department for environment, food andrural affairs, 2022, 2020b). this paper seeks to contribute to this need using the idss paradigm.by bringing the various component models together, an idss aids decision-makers in evaluating the effects of candidate policies on the various different evolving variables which can influence the outcome variables.many statistical models satisfy the coherence conditions for an overarching composite model to bring expert model component models together under the idss paradigm(leonelli and smith, 2015), including bayesian networks (bns), multiregression dynamic models, chain event graphs, markov networks and influence diagrams.bayesian networks are a well-established family of probabilistic graphical models that combine together a statistical model that decomposes a complex system into a collection of conditional independence relationships among its defining variables, and a graph that visually represents these conditional independence relationships. (2019);davies and hope (2015);pollino et al.bayesian network a bayesian network (bn) b = ( g, p ) is a probabilistic graphical model over a set of variables x x x = {x 1 , x 2 , .here g = (v ( g), e ( g)) is a directed acyclic graph (dag) whose node set v ( g) is given by the variables in x x x, and p is a joint probability distribution over the variables x x x. the edge set e ( g) ⊆ v ( g) × v ( g) consists of directed arcs such that lack of an edge between two nodes represents conditional independence between the variables represented by the nodes, and similarly, edges between nodes encode conditional dependence. the effects of a given candidate policy can be evaluated within the bn by intervening on the variables sought to be changed by the policy, and propagating the effects of these changes through the system.dynamic bayesian network a dynamic bayesian network (dbn) is a dynamic variant of the bn that evolves in discrete time. a dbn, defined over a set of variables x x x (t ) = {x 1 (t ), x 2 (t ), . , b n ) where b 1 is the initial bn over x x x (1) and each subsequent bn b t represents the state of the system at time-slice t over x x x (t ) for t ≥ 2. assuming the system satisfies the first-order markov property, the bn b t is connected to the bn b t +1 by directed inter-slice temporal arcs to represent the effect of the variables at time t on the variables at time t + 1. such a dbn is called a 2-time-slice dbn and can be compactly given by the tuple ( b 1 , b → ) where b 1 is the initial bn and b → is the transition bn that describes the dependencies of a variable x at time t given the values of its parents in time-slices t and t -1.where h (x ) is the marginal entropy of x , h (x |y ) is the conditional entropy of x given y and i (x ;y ) is measured in bits. the mutual information metric for x and y is non-negative (ranges from ), symmetric and indicates how much information x communicates about y . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/540.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/540.txt new file mode 100644 index 0000000000000000000000000000000000000000..fd395e895048b16a718d92b770c685af63a587cc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/540.txt @@ -0,0 +1 @@ +chemical-based machine learning has gained widespread adoption as an efficient and accurate approach for predicting molecular properties, owing to its capacity to effectively represent crucial structural aspects of molecules . recent advancements in foundational models have shown promising results by leveraging chemical language representations through a two-step process of pre-training on extensive unlabeled corpora and subsequent fine-tuning on specific downstream tasks of interest .despite the emergence of pre-trained language models as viable options for molecular property prediction , they are still in their nascent stages of development. there is a pressing need for further research to enhance their performance and address issues like generalization and sample efficiency .furthermore, recent discussions have emphasized the pivotal role of enhancing data quality and representations in elevating the overall quality of generated models and diminishing the reliance on excessively large models . the incorporation of high-quality data and representations has the potential to advance the state-of-the-art in pre-trained language models while concurrently decreasing the dataset volume and training computational resources . of equal significance, the adoption of smaller models that necessitate less extensive training can substantially mitigate the environmental impact associated with large pre-trained language models .in this study, we introduce a multi-view approach that leverages on the fusion of latent spaces from different natures generated by two state-of-the-art chemical-based models, namely molformer-base which is based on transformers, and mhg-gnn a graph-based approach. our approach is geared towards enhancing the prediction of molecular properties. our findings demonstrate that our proposed method surpasses existing state-of-the-art algorithms, including the chemical languagebased molformer-xl, when it comes to tackling intricate tasks like predicting the toxicity of drugs in clinical trials and gauging the potential of small molecules to inhibit hiv replication. these challenging tasks are part of the moleculenet benchmark dataset . furthermore, our approach exhibits superior performance in five out of six datasets studied during our experiments.it is also important to highlight that the proposed approach refers to a fusion of latent spaces of models that were trained on 1.7 million molecules (combined), and consistently performed better than molformer-xl which was trained on 1.1 billion molecules. this prompts the discussion of the necessity of very large models which are costly, resource-hungry, and laborious. in fact, our approach opens up promising avenues for future research in molecular property prediction. by leveraging the fusion of latent spaces and feature sets, we have demonstrated a significant enhancement in performance that holds potential for advancing the field.chemical-based machine learning has gained widespread adoption as an efficient and accurate approach for predicting molecular properties, owing to its capacity to effectively represent crucial structural aspects of molecules.furthermore, recent discussions have emphasized the pivotal role of enhancing data quality and representations in elevating the overall quality of generated models and diminishing the reliance on excessively large models. the incorporation of high-quality data and representations has the potential to advance the state-of-the-art in pre-trained language models while concurrently decreasing the dataset volume and training computational resources.in this study, we introduce a multi-view approach that leverages on the fusion of latent spaces from different natures generated by two state-of-the-art chemical-based models, namely molformer-basewhich is based on transformers, and mhg-gnn a graph-based approach. our findings demonstrate that our proposed method surpasses existing state-of-the-art algorithms, including the chemical languagebased molformer-xl, when it comes to tackling intricate tasks like predicting the toxicity of drugs in clinical trials and gauging the potential of small molecules to inhibit hiv replication.it is also important to highlight that the proposed approach refers to a fusion of latent spaces of models that were trained on 1. our approach relies on two pivotal elements: the embeddings derived from mhg-gnn, which represent molecular structures as graphs, and molformer embeddings rooted in chemical language.table 2 offers a comprehensive overview of the comparative performance between our proposed multiview approach and state-of-the-art algorithms on various benchmark datasets. a keen analysis of the table reveals that the multi-view approach, which leverages the fusion of embeddings, outperforms its counterparts in 5 out of 6 datasets, underscoring its potential to excel in diverse domains. this pattern of success across multiple datasets strongly suggests that the fusion of embeddings from different natures plays a pivotal role in enhancing the algorithm's performance. our proposed fusion-based approach harnesses the power of 768 embeddings from transformersbased molformer-base and 1024 embeddings from graph-based mhg-gnn, capitalizing on their complementary strengths to excel in a variety of challenging tasks.it is important to highlight that we use the fusion of latent spaces of two small models when compared to the state-of-the-art, molformer-base and mhg-gnn was pre-trained in a small portion of selected molecules from pubchem.in conclusion, the findings showcased in table2underscore the potential of our proposed multi-view approach, emphasizing its ability to harness diverse features for improved performance in various complex tasks.this paper introduces a multi-view approach that synergizes the latent spaces of two state-of-the-art algorithms for molecules properties prediction, molformer-base and mhg-gnn, to deliver superior performance across a range of demanding tasks sourced from the moleculenet dataset.the proposed multi-view approach used features from models trained on small datasets and performed consistently better than molformer-xl, state-of-the-art, which was trained on 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/541.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/541.txt new file mode 100644 index 0000000000000000000000000000000000000000..30206d0151dc1c578a84b04bab4285ea2b00487a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/541.txt @@ -0,0 +1 @@ +artificial intelligence, or ai, has proven itself to be a powerful tool across various industries, simplifying complex tasks and pattern recognition that would otherwise be overwhelming for humans or traditional computer algorithms. its versatility is evident in its ability to transform operations in many fields, and healthcare is no exception. in healthcare, ai is invaluable for its capacity to manage vast amounts of data, which can lead to more accurate and speedy diagnoses, ultimately easing the workload on healthcare professionals.the utility of ai spans far and wide, from optimizing supply chains to revolutionizing customer service and financial forecasting. however, when it comes to healthcare, the focus shifts to its incredible potential to handle the immense volumes of medical data we encounter daily.in the healthcare sector, data-driven decisions are crucial. precise and timely diagnoses and prognoses are paramount, and ai plays a pivotal role in achieving these goals. it can compile and analyze millions of data points, creating comprehensive models that assist in making medical assessments. this becomes particularly important during critical times, such as the peak of the covid-19 pandemic. throughout the pandemic, healthcare workers faced an unprecedented workload, strained resources, and a dire need for rapid and accurate decision-making. in such circumstances, ai modeling became a lifeline. ai tools were employed to analyze patient data, predict disease progression, and optimize the allocation of resources. these applications not only saved time but also helped save lives when healthcare systems were pushed to their limits.furthermore, healthcare research and analysis inherently involve vast amounts of data. this encompasses patient records, genetic information, clinical trials, and medical imaging, creating a need for a sophisticated approach. while traditional computer algorithms can handle large datasets, they may struggle to adapt to changing data trends and patterns.ai systems excel in this regard. they possess the capability to continuously learn and adapt as new data becomes available, making them ideal for the dynamic nature of healthcare research. whether it's identifying rare genetic mutations linked to diseases or predicting the outcomes of innovative treatments, ai's ability to navigate extensive datasets and discern nuanced patterns is unparalleled.integrating ai into healthcare isn't just a technological advancement; it represents a transformative shift in how we approach medical diagnoses, treatments, and research. by harnessing ai's capacity to process and interpret vast datasets, healthcare professionals can make more precise decisions, ultimately leading to improved patient outcomes and a more efficient healthcare system. as ai continues to evolve, its role in reshaping healthcare is poised to grow, benefiting patients and healthcare providers alike. r a n d i n t ( 0 , 1 ) i f g e n d e r == 0 : w e i g h t = random . u n i f o r m ( 0 , m h i g h w e i g h t l i s t + m l o w w e i g h t l i s t ) a c c e p t a b l e w e i g h t = ( m h i g h w e i g h t l i s t + m l o w w e i g h t l i s t ) / 2 e l s e : w e i g h t = random . u n i f o r m ( 0 , f h i g h w e i g h t l i s t + f l o w w e i g h t l i s t ) a c c e p t a b l e w e i g h t = ( f h i g h w e i g h t l i s t + f l o w w e i g h t l i s t ) / 2 s e v e r i t y = ( 1 -a g e / 2 4 ) * ( v i r i o n c o u n t ) + abs ( ( a c c e p t a b l e w e i g h t -w e i g h t ) / a c c e p t a b l e w e i g h t ) * ( ( v i r i o n c o u n t ) ** 2 ) # add 0.01% v a r i a n c e t o t h e s e v e r i t y by m u l t i p l y i n g i t w i t h a random f a c t o r v a r i a n c e _ f a c t o r = 1 + random . 0 0 0 1 , 0 . 3 , 9 . 0 , 9 . 3 , 9 . 6 , 9. c s v c o n t a i n s ' s e v e r i t y ' w i t h v a r i a n c e y _ d a t a _ p r e c i s e = pd . r e a d _ c s v ( ' y _ d a t a _ p r e c i s e . c s v c o n t a i n s ' s e v e r i t y ' w i t h o u t v a r i a n c e # c o n v e r t y _ d a t a _ v a r i a n c e t o a 1d a r r a y y _ d a t a _ v a r i a n c e = y _ d a t a _ v a r i a n c e . r a v e l ( ) # s p l i t t h e d a t a i n t o t r a i n i n g and t e s t i n g s e t s x _ t r a i n , x _ t e s t , y _ t r a i n , y _ t e s t = t r a i n _ t e s t _ s p l i t ( x_data , y _ d a t a _ v a r i a n c e , t e s t _ s i z e = 0 . 2 , r a n d o m _ s t a t e = 4 2 ) # c r e a t e and t r a i n t h e b a y e s i a n r i d g e r e g r e s s i o n model model = b a y e s i a n r i d g e ( a l p h a _ 1 = 2 . p r i n t ( " mean s q u a r e d e r r o r (mse) on t e s t d a t a w i t h p r e c i s e v a l u e s : " , mse ) p r i n t ( "r-s q u a r e d ( r2 ) on t e s t d a t a w i t h p r e c i s e v a l u e s : " , r 2 ) another way to better fit the model to the data would be to tweak the bayesian hyper parameters. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/542.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/542.txt new file mode 100644 index 0000000000000000000000000000000000000000..63aa58b7d01c36d29ad9ff49242ceeb9be2a3c4e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/542.txt @@ -0,0 +1 @@ +in a traditional discounted reinforcement learning (rl) problem , the objective is to maximize the value function, which is the expected value of the infinite-horizon cumulative discounted reward. however, optimizing only the expected value is not appealing in several practical applications. for instance, in the financial domain, strategists like to consider the risk of investments. similarly, in transportation, road users are sensitive to the variations in the delay incurred and would especially like to avoid a large delay even if it occurs infrequently. risksensitive rl addresses such applications by incorporating a risk measure in the optimization process, either in the objective or as a constraint.going beyond the expected value, three well-known risk measures are variance, value-at-risk (var) and conditional value-at-risk (cvar) . for a cumulative distribution function (cdf) f , the var v α (f ) and cvar c α (f ) at a given level α ∈ (0, 1) is defined bywhere x ∼ f . from the above, it is apparent that v α (f ) is a certain quantile of the cdf f . for a continuous distribution f , v α (f ) = f -1 (α), while cvar can be equivalently written as c α (f ) = e for x ∼ f . in words, cvar is the expected value of x, conditioned on the event that x exceeds the var. choosing a α close to 1 and taking x as modeling the losses of a financial position, cvar can be understood as the expected loss given that losses have exceeded a certain threshold (specified by a quantile). in the financial domain, cvar is preferred over var because cvar is a coherent risk measure , while var is not. in particular, with var as the risk measure, diversification can cause an increase in the risk as indicated by var -an attribute that is not desirable in a financial risk measure.table 1: summary of the sample complexity lower and upper bounds, in an expected sense, for estimating various risk measures. for a given ǫ > 0, sample complexity is the number of sample transitions n such that the estimation error e|η nη(d)| < ǫ for all n ≥ n. here η is the risk measure, d is the cumulative discounted cost, and ηn the risk estimate. here õ (•) is a variant of the big-o notation that ignores logarithmic factors. with independent and identically distributed (i.i.d.) samples, the estimation of var and cvar has received a lot of attention recently in the literature, cf. . the concentration bounds for cvar have been useful in deriving sample complexity results in the context of empirical risk minimization and bandit applications. for a cumulative distribution function (cdf) f , the var v α (f ) and cvar c α (f ) at a given level α ∈ (0, 1) is defined by. for a continuous distribution f , v α (f ) = f -1 (α), while cvar can be equivalently written as c α (f ) = e for x ∼ f . also, γ ∈ [0, 1) denotes a discount factor, and f t (m, f ) the cdf of the cumulative discounted cost over the horizon t, i.finally, m := {(m, f ) : f t (m, f ) converges weakly to a cdf} , and, for (m, f ) ∈ m ,.also, for (m, f ) ∈ m , the expressions µ(m, f ), v(m, f ), v α (m, f ), and c α (m, f ), with α ∈ (0, 1), denote the mean, variance, var and cvar (both at the confidence level α ∈ (0, 1)) of the cdf f (m, f ), respectively. setup: we presume we have access to an mcp (m, f ) ∈ m , where m and f are unknown, but whose state and single-stage cost trajectory can be observed.research goals: obtain lower and upper bounds on the samples needed to obtain an ǫ-accurate estimate of a risk measure η(m, f ) related to the unknown underlying mcp (m, f ).as a function of n and ǫ, where e and p are with respect to h n ∼ p n m,r r r , and η(m, f ) is either µ(m, f ), v(m, f ), or one of v α (m, f ) and c α (m, f ) for a given α ∈ (0, 1).lower bounding the minimax error: let a ≡ (η, r r r) be any var estimation algorithm, and p * the probability distribution of h n under p n m * ,r r r .where f 1 is a shorthand for the cdf f (m * , f 1 ). first, we show that the infinite-horizon cumulative discounted cost's cdf f (m 0 , f ) (see (4)) exists for the above mcp and obtain expressions for its mean, variance, var, and cvar. next, we establish a relationship between f (m 0 , f ) and the cdf of f (s) given in (39).clearly, the n-horizon cdf f n (m 0 , f ), n ∈ n, equals the cdf of n-1 t=0 γ t x t , where (x t ) is an iid sequence of random variables having the same distribution as f (s). we next discuss a relationship between the samples of f (m 0 , f ) and the distribution of f (s). consider two different variants (m 0 , f +1 ) and (m 0 , f -1 ) of the mcp (m 0 , f ), where f ν (s),. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/543.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/543.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9a31ce72777623a1f03a302ec4494f35a16ee46 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/543.txt @@ -0,0 +1 @@ +neural networks can unintentionally memorize specific parts about their training examples. prior works demonstrated that auto-regressive language models and vision models are susceptible to unintended memorization of their training examples, and thus may disclose potentially sensitive information during inference. however, for non-auto-regressive models, memorization can be hard to distinguish from generalization. for example, when a non-auto-regressive asr model accurately transcribes a training example, it is hard to tell whether the model is generalizing well or it has unintentionally memorized the example. the reason is that the difference in accuracy between the two cases is so small that it easily gets hidden by other sources of variance such as inher-ent hardness of different training examples (e.g. some training examples are intrinsically easier/harder to learn than other examples and thus have higher/lower accuracy). existing works train "reference" models to calibrate the hardness of different training examples to rule out the variance and bring out the subtle difference between memorization and generalization. however, these works have to train tens to hundreds of reference models for obtaining good calibration. as the size of trained models increases, obtaining comparable reference models can be very cost/compute/memory intensive. thus, a way to efficiently measure unintended memorization for large non-auto-regressive asr models is urgently needed.in this work, we propose the first efficient memorization auditing framework for large non-auto-regressive asr models. to address the ambiguity between generalization and memorization, we propose to create out-of-distribution training examples that are extremely hard to be learnt from normal training examples, such that the model can only memorize them for accurate transcription. to obtain such training examples, we speed up normal utterances to create a mapping between vocal and text information different from typical training examples. on the state-of-the-art asr models , we manage to show that these training examples are unique enough such that memorization is the only way to accurately transcribe them. as a result, accurate transcripts given by the asr model for sped-up training examples can serve as clear evidence for memorization, and the level of accuracy can be used as a measure of memorization.to mitigate memorization, we propose to apply perexample gradient clipping during training. specifically, we clip each training example's gradient to a fixed l2 norm bound if it's originally larger than the bound. the intuition is that using per-example clipping, how much an individual example can influence the final model is bounded, and thus the final model should not memorize too much about any training example. our evaluation on the fine-tuning of the state-of-the-art best-rq pre-trained asr models shows that per-example clipping can effectively mitigate memorization for training examples occurring up to 16 times in the training set. however, per-example clipping incurs extra training time overhead because we can no longer avoid materializing per-example gradients for acceleration like in non-private training. to address the issue, we revisit the idea of micro-batch clipping , which shards the gradients arxiv:2310.11739v1 18 oct 2023 into several micro-batches, averages the gradients within the same micro-batch, and then applies clipping. coincidentally, in large-scale distributed training, each compute core (e.g. tpu/gpu) has to maintain the average gradient of all the training examples on it and thus forms natural micro-batches without incurring extra overhead. our empirical results show that per-core gradient clipping achieves comparable or even better word error rate (wer) and neutral running time compared with the non-private baseline while providing much better empirical privacy.in section 2, we introduce our memorization auditing method and empirically evaluate it. next in section 3, we introduce the countermeasure, gradient clipping, and use the proposed auditing method to demonstrate the effectiveness of the countermeasure. we conclude this work in section 4. to address the ambiguity between generalization and memorization, we propose to create out-of-distribution training examples that are extremely hard to be learnt from normal training examples, such that the model can only memorize them for accurate transcription. on the state-of-the-art asr models, we manage to show that these training examples are unique enough such that memorization is the only way to accurately transcribe them. as a result, accurate transcripts given by the asr model for sped-up training examples can serve as clear evidence for memorization, and the level of accuracy can be used as a measure of memorization. the intuition is that using per-example clipping, how much an individual example can influence the final model is bounded, and thus the final model should not memorize too much about any training example. our evaluation on the fine-tuning of the state-of-the-art best-rqpre-trained asr models shows that per-example clipping can effectively mitigate memorization for training examples occurring up to 16 times in the training set. however, per-example clipping incurs extra training time overhead because we can no longer avoid materializing per-example gradients for acceleration like in non-private training.the secret sharer frameworkhas been widely used to measure unintended memorization of textual data in language models, and detect such memorization even when they are fused with acoustic models for asr. to measure the level of memorization, secret sharer measures accuracy metrics, such as perplexities, of the canaries inserted in the training set and compare them with the metrics of examples drawn from the same distribution but unseen during training (i. if the model performs significantly better on the inserted canaries than on the holdout set, then it is strong evidence that the model memorizes the inserted canaries, and the rank of the inserted canary's metric among the holdout set can serve as a measure of memorization. for measuring memorization of utterances used for training asr models, there is only one workthat uses the secret sharer framework but requires additional similarly-trained asr models as reference models for hardness calibrationas mentioned in section 1. however, attacks for auto-regressive models are not directly applicable to nonauto-regressive asr models, and existing extraction attacks on asr modelshave been shown to work only for memorization from commonly-occurring structured components in the training data. to demonstrate this, we conduct experiments by training state-of-the-art asr models and audit memorization using the method inwithout reference models. instead, the reason is that even a model that never sees a canary can perform reasonably well in transcribing the canaries because the model generalizes well to learn how to transcribe the canaries used infrom normal training examples, and leaves no much room for improvement due to canary insertions for memorization measurement. second, we observe that models fine-tuned with sped-up canaries can accurately transcribe the canaries seen during training but cannot generalize to the examples in the sped-up holdout set, and thus exhibit high exposure as shown in figure2. we show that while clipping each example's gradient shows stronger robustness to memorization, clipping the average gradient of the examples on the same compute core (e. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/544.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/544.txt new file mode 100644 index 0000000000000000000000000000000000000000..d73f792fbe7d5a96128f14cfec2ccb744701a056 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/544.txt @@ -0,0 +1 @@ +network monitoring is at the core of networks operations with traffic classification (tc) being key for traffic management. traditional deep packet inspection (dpi) techniques, i.e., classifying traffic with rules related to packets content, is nowadays challenged by the growth in adoption of tls/dnssec/https encryption. despite the quest for alternative solutions to dpi already sparked three decades ago with the first machine learning (ml) models based on packet and flow features, a renewed thrust is fueled today by the rise of deep learning (dl), with abundant tc literature reusing/adapting computer vision (cv) training algorithms and model architectures .in this work, we argue that opportunities laying in the data itself are still underexplored, based on two observations. first, cv and natural language processing (nlp) adopt "cheap" data augmentation (da) strategies (e.g., image rotation or synonym replacement) for improving models performance. yet, almost no tc literature investigates da. second, network traffic datasets are imbalanced in nature due to app/service popularity skew, which calls for strategies to augment the minority classes. again, the interplay between imbalance and model performance is typically ignored in tc literature.in this paper, we propose a two-fold research agenda: (𝑎) first, we study hand-crafted da to assess its benefits and relationship with class imbalance (sec. 2); then, (𝑏) we charter a roadmap to pursue better augmentation strategies via generative models, i.e., learning da in a data-driven fashion rather than adopting manual design (sec. 3). despite the quest for alternative solutions to dpi already sparked three decades ago with the first machine learning (ml) models based on packet and flow features, a renewed thrust is fueled today by the rise of deep learning (dl), with abundant tc literature reusing/adapting computer vision (cv) training algorithms and model architectures. such transformations are meant to be directly used in the input space, thus their design requires domain knowledge to control samples variety-too little produces simple duplicates; too much breaks class semantics and introduces undesired data shifts. indeed, to be beneficial, da should introduce additional training points that foster better clustering of the classes in the latent space. however, without explicitly knowledge of how input samples are projected in the latent space (as models are "black boxes"), domain knowledge hardly suffices for effectively designing these transformations-the use of da is a trial and error process., in the medical field), reusing such methods is not trivial for tc as data suffers from two extra undesirable restrictions: input samples are short-traditionally, they are time series of the first n packets of a flow (e. first of all, it is reasonable to assume that the augmentations performance gaps is rooted in the geometry of the latent space. this raises questions such as "where would be more effective to project synthetic points? what level of samples variety is most effective for training?" which we will address by using clustering metrics and latent space geometry analysis-we aim to uncover how augmentations can help to "regularize" the latent space. second, better da should be viable via generative models such as generative adversarial networks (gan) and diffusion models (dm). these techniques approximate the input data distribution, generate diverse samples, and can be guided by conditional mechanisms to steer their projection in the latent space of a classifier.however, generative models are usually trained separately from the final downstream task and with datasets having a large variety of samples.5linking back to the previous stage, we envision a first exploration based on conditioning the generative models on the latent space properties learned via hand-crafted da. then, we will target the more challenging scenario of training unconditionally using datasets enlarged with handcrafted da and verify if effective regularizations are automatically learned. last, we expect generative models based on pre-training to be sub-optimal in tc due to lower variety in the data. to link generative models to classification needs we will consider also an end-to-end training pipeline where both classifier and generative model are learned jointly.in this paper we presented a preliminary study supporting the use of da and outlined a research agenda for adopting generative models in tc. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/545.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/545.txt new file mode 100644 index 0000000000000000000000000000000000000000..82d2bbf89d48adafe0f924dc5aee959e89b5bc4f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/545.txt @@ -0,0 +1 @@ +predicting people's trajectory in urban areas has become an essential task in fields such as traffic modeling and urban planning. people flow effects several complex traffic tasks such as taxi demand forecast and bike share rebalancing problem . travel surveys or national census have been used to capture peoples flow in the real world. recently, mobile phones and social media are used to estimate people's trajectory data. despite the importance of the problem, it is difficult to benefit from open technological development as the trajectory data is independently retained by each organization from the perspective of privacy.in such a context, the humob challenge is landmark competition as the challenge provides realistic data set of human mobility. yahoo japan co., ltd., provided the data representing the movement routes of individuals in a major urban area over a period of 90 days. we conducted the prediction of people's movement for two given tasks. for the tasks, we forecasted the movement of 100,000 people under normal circumstances in task 1, and the movement of 25,000 people in emergency situations in task 2. when predicting people's movement patterns, five general approaches can typically be considered. 1) personalized models using machine learning: each user has own prediction model. although we would like to try all the above, we adopt approach 1) personalized model considering the time limitation to the deadline. the reason why we choose 1) personalized model is based on the hypothesis that human movements are entirely unique to each individual.in the following, we describe the proposed method in section 2. we described offline evaluation and dataset creation for submission in section3 and 4, respectively. we conclude this paper in section 5. for the tasks, we forecasted the movement of 100,000 people under normal circumstances in task 1, and the movement of 25,000 people in emergency situations in task 2. we prepared two feature sets b: basic features (features 2-5) and poi features (feature 6). in the case of k-means, we calculated the cluster number for each user and then transformed into feature quantities by dummy variable transformation. we tune the model based on four evaluation metrics: average geobleu (geo), standard deviation of geobleu (sdg), average dtw (dtw), and standard deviation of dtw (sdd). we compare the following four feature sets using 20 people datasets. we created models using various combinations of b, p, bc, and pc, and conducted accuracy evaluations. according to these results, the combination in pattern 9 (b, pc, pc) had the best accuracy, hence, we adopted pattern 9. according to these results, pattern 2 had the highest accuracy and was therefore adopted. we then tuned the cluster number using 100 people datasets. in pattern 9, we set the bc cluster number to 5 and 10, and the pc cluster number to 10, 50, 100, and 150, and performed accuracy evaluations. the results showed that for task 1, the highest accuracy was achieved when the bc cluster number was 5 and the pc cluster number was 50. for task achieved when the bc cluster number was set to 5. the results showed that k-means had higher accuracy for both tasks, and thus, we adopted the k-means method.based on the offline evaluation, for task 1, we constructed submission models using the fundamental features of features 1-5, clustering features of the basic features (5 clusters), and clustering features related to poi (50 clusters). for task 2, we constructed submission models using clustering results of the basic features (5 clusters). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/546.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/546.txt new file mode 100644 index 0000000000000000000000000000000000000000..566d2967906be3934f23e546eb57317722da30cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/546.txt @@ -0,0 +1 @@ +markov decision processes (mdps) models have proved to be remarkably effective at representing sequential decision making problems. when the model is known, dynamic programming provides an elegant framework for planning algorithms for mdps. unfortunately, in practical problems the mdp model is often unknown. this has necessitated the development of reinforcement learning (rl) methods to deal with such settings. such rl methods when combined with deep learning architectures then have yielded a number of remarkably effective deep rl algorithms such as dqn (mnih et al., 2015;osband et al., 2016), ppo (schulman et al., 2017), etc. for very large state and action spaces. often, when the model is unknown, there is a need for some systematic exploration to facilitate efficient learning. online learning methods exactly provide such systematic exploration. the antecedents of many online reinforcement learning algorithms is actually in algorithms for bandit learning (auer et al., 2002a;russo et al., 2018). these algorithms can broadly be classified into two categories: optimism-based, and posterior sampling (ps)-based. while there is an elegant theory for both, often ps algorithms are found to have superior empirical performance. on the whole, with the developments over the last decade, there is a fairly well-developed theory of reinforcement learning (rl) for mdps, and effective algorithms for both offline and online settings.however, in many practical problems the state is not fully observable, and partially observable markov decision processes (pomdps) are better models. even when the transition and observation models are known, the planning problem for pomdps is computationally challenging as it requires converting it into an equivalent mdp problem over the belief state space, a continuum (kumar and varaiya, 2015). nevertheless, the belief state is still a sufficient statistic of the history, and many approximate planning algorithms (shani et al., 2013;silver and veness, 2010) are based on this observation. when the model parameters are unknown, the learning for pomdps problem becomes much harder than for mdps since the belief state itself can no longer be computed. thus, unlike their planning counterparts, pomdp learning problems cannot be simply reformulated as belief-state-mdp learning problems. this represents significant challenges in an online learning setting when the learning agent must also do systematic exploration, and accounts for the lack of progress on designing effective and implementable online learning algorithms for pomdps.in this paper, we consider episodic reinforcement learning problems on finite horizon pomdps with finite state, action, and observation spaces. the exact models of the transition and observation kernels are unknown to the learning agent. we propose a posterior sampling-based reinforcement learning algorithm for pomdps (ps4pomdps), which is an adaptation of the posterior sampling (ps) method used for bandit learning (agrawal and goyal, 2012;russo and van roy, 2016) and mdp learning problems (osband et al., 2013). the ps-based algorithms are known to have superior empirical performance over optimism-based approaches in a number of bandit and mdp learning settings (agrawal and goyal, 2012;osband and van roy, 2017;ouyang et al., 2017ouyang et al., , 2019;;jafarnia-jahromi et al., 2021a,b;jahromi et al., 2022;kalagarla et al., 2023). if this is any guide, we can expect a similar observation for pomdp learning as well. compared to ps in mdps, our algorithm updates the posterior on both the transition kernel and the observation kernel. we analyze bayesian regret of the ps4pomdps algorithm in two settings, namely, (1) the general case, where no assumption on the pomdp is imposed; (2) the undercomplete α-revealing pomdps (jin et al., 2020;liu et al., 2022), which quantify the requirement that the observations must be informative to a certain degree. we show that in general pomdp learning problems, the regret is poly(s, a, o, h) • õ( (oa) h k), where k is the number of episodes and h is the horizon length. we show that the exponential dependence on h is necessary by proving an ω( √ a h-1 k) lower bound of the regret. under the assumption that the pomdp is undercomplete and α-revealing, we establish an õ α -2 h 2.5 s 2 a 0.5 o 1.5 (1 + sa/o)k upper bound on the bayesian regret.the main contributions in this paper are the following: (i) we introduce a posterior samplingbased online reinforcement learning algorithm for pomdps (ps4pomdps) that is simple and implementable, and yet has a provable sublinear regret bound that scales as õ( √ k), where k is the number of learning episodes and matches the known lower bound (chen et al., 2023), while being polynomial in the other factors under the weakly-revealing assumption. a ps-based learning algorithm for pomdps was first proposed in jahromi et al. (2022) but is computationally impractical. jin et al. (2020); zhan et al. (2022); liu et al. (2022), and liu et al. (2023) proposed optimism-based algorithms but unfortunately, are also computationally impractical either in the posterior update, or in the optimism step, as explained further in section 1.1. we note that our algorithm assumes access to a pomdp solver, and computationally tractable posterior updates, and indeed there is extensive literature on computationally tractable approximate pomdp solvers (shani et al., 2013) and posterior update methods (russo et al., 2018). (ii) we establish an õ α -2 h 2.5 s 2 a 0.5 o 1.5 (1 + sa/o)k upper bound on the bayesian regret that is the best known for a posterior sampling algorithm for pomdps (see jahromi et al. (2022)) and close to similar regret bounds for optimism-based algorithms (liu et al., 2022;chen et al., 2023). (iii) our proof makes use of a tighter index change lemma (see appendix a) in the setting of undercomplete α-revealing pomdps. this may prove useful in other contexts as well. furthermore, our analysis is enabled by introducing an appropriate confidence set specifically for posterior-sampling algorithms, different from those for optimism-based algorithms (liu et al., 2022;chen et al., 2023). (iv) we also introduce an algorithm for multi-agent pomdps, and characterize its performance in terms of regret bounds., 2021a,b;jahromi et al. a finite pomdp is characterized by a tuple (s , a , o, h, b 1 , t, z, r), where s is a finite set of states with |s | = s; a is a finite set of actions with |a | = a; o is a finite set of observations with |o| = o; h is the horizon length; b 1 ∈ ∆(s ) is the distribution of the initial state; t = (t h ) h-1 h=1 , t h : s × a → ∆(s ) are the transition probabilities; z = (z h ) h h=1 , z h : s → ∆(o) are the observation probabilities; r = (r h ) h h=1 , r h : o × a → are the instantaneous reward functions. for each pomdp characterized by the above tuple, we also define the following matrices: t h,a = (t h (s ′ |s, a)) s ′ ∈s ,s∈s is the s ×s probability transition matrix (where the rows represent the next state) under action a ∈ a at time h; z h = (z h (o|s)) o∈o,s∈s is the o × s observation probability matrix at time h.a (deterministic) policy π = (π h ) h h=1 is a collection of mappings π h : (o × a ) h-1 × o → a , where π h is the mapping an agent uses to choose an action at time h ∈ based on action and observation history in the current episode. under a policy π ∈ π, the probability of a trajectory τ = (o h , a h ) h h=1 is given by p π (τ ) = π(τ )p -(τ ), where. more specifically, for k ∈ n, let d k := (τ j , π j ) k-1 j=1 = (o j 1:h , a j 1:h , π j ) k-1 j=1 denote the data which the learning agent possesses at the beginning of the k-th episode, composed of trajectories and policies in the first k -1 episodes. at the beginning of the k-th episode, the learning agent chooses a random policy π k ∼ φ k (d k ) via a mapping φ k : (t × π) k-1 → ∆(π) and applies this policy throughout the k-th episode., o ≥ s, and the pomdp is α-weakly revealing, i. applying the standard result on the posterior sampling algorithm for linear bandits(russo and van roy, 2016)we obtain an õ(h √ o 2h+1 a h k) regret bound, where the additional o h comes from the fact that |π| = ω(a o h ).the posterior sampling for multi-agent pomdps (ps4mapomdps) algorithm works in a algorithm 2 ps4mapomdps: agent i input: prior ν 1 ∈ ∆(θ); number of episodes k for k = 1 to k do use common randomness source to sample θk ∼ ν k invoke the ma-pomdp solving oracle to obtain a policy πk ∈ arg max π∈π (v π θk ) apply πk.•;i in the k-th episode at the end of k-th episode, share the local trajectory τ k,i = (a k 1:h;i , o k 1:h;i ) with other agents use τ k = (τ k,j ) j∈to compute new posterior ν k+1 ∈ ∆(θ) using (3) end for similar way to its single-agent counterpart: at the beginning of each episode k, a common sample θk is drawn based on the latest posterior distribution based on the collective action and observation history, then the agents collectively invoke an ma-pomdp solving oracle to obtain a joint policy πk . then, for any τ = (o h , a h ) h h=1 ∈ t , using (2) and (15) we have p - θ (τ ) ≥ (1 + ǫ) -2h p - θ (τ ), establishing (11). let θk ∈ θ be such that θk = ι( θk ), then we have p π θk -p π θk tv ≤ 2hǫ = 1 k .for a full trajectory τ = (o 1:h , a 1:h ) ∈ t , let τ h = (o 1:h , a 1:h ) denote its corresponding partial trajectory up to time h and τ -h = (o h+1:h , a h+1:h ) denote its corresponding partial trajectory from time h + 1 to h.where τ h ∈ (o × a ) h is a trajectory such that a h = a, o h = o, and the first h -1 steps are made of τ h-1 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/547.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/547.txt new file mode 100644 index 0000000000000000000000000000000000000000..b80bb31b741496f0217e4d3fe11a1da2bb24dfc1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/547.txt @@ -0,0 +1 @@ +deep reinforcement learning (drl) has emerged as a powerful technique for solving complex decision-making problems, with impressive results achieved in domains ranging from gaming to autonomous driving. drl algorithms iteratively learn a strategy that maximizes cumulative rewards by dynamically interacting with the environment. although they have achieved remarkable success in many applications, drl algorithms face high computational requirements and slow learning rates, which pose significant challenges for their deployment in resource-constrained environments and real-time applications .on the other hand, knowledge distillation (kd) is a popular machine learning technique that is used to transfer knowledge from larger, more complex models (known as teachers) to smaller, simpler models (called students) . it is a highly valuable technique as it enables students to learn from the experiences of their more advanced teachers, thus transferring information that can be used to develop more accurate predictions. kd has been effectively used for a variety of purposes, particularly in reducing the computational and memory requirements of deep learning models without significantly affecting their performance . drl and kd are both popular techniques in machine learning today. however, the intersection of drl and kd remains relatively unexplored. there is relatively little research on the effects of distillation on different drl algorithms and the impact of varying distillation methods on drl algorithms' performance. despite their respective successes, it is still not clear how to best combine drl and kd to achieve even better results in complex decision-making problems. a key question in this area is how to effectively distill the knowledge from a teacher drl model to a student drl model, taking into account the specificities of each algorithm. answering this question could lead to more efficient and effective drl algorithms that are better able to handle resource-constrained environments and real-time applications. this paper aims to explore the applicability of kd techniques in the realm of drl. by combining the power of drl with the knowledge transfer capabilities of kd, researchers can unlock the potential for faster training, reduced computational resources, and effective decision-making in complex environments.the integration of drl with kd allows for the distillation of complex, high-performance reinforcement learning strategies into more straightforward and more efficient models. this synergy offers several benefits. firstly, it enables the development of more efficient drl models that are better suited for deployment on resource-constrained devices. by distilling various drl algorithms and analyzing their distillation effects, this research provides a benchmark for evaluating the performance of different drl algorithms refined using kd techniques.secondly, the incorporation of kd accelerates the learning process of drl models, resulting in reduced training time and cost. this acceleration is crucial for achieving faster decisionmaking in complex environments, which is particularly relevant in real-time applications such as robotics, online recommender systems, and self-driving cars. by distilling drl algorithms and exploring the combination of drl and kd, this study endeavors to develop models that require fewer gpu resources, exhibit faster learning rates, and make quicker decisions in complex environments. through this study, it is expected to shed light on the unexplored synergies between drl and kd, paving the way for more efficient and resource-friendly ai algorithms.on the other hand, knowledge distillation (kd) is a popular machine learning technique that is used to transfer knowledge from larger, more complex models (known as teachers) to smaller, simpler models (called students). there is relatively little research on the effects of distillation on different drl algorithms and the impact of varying distillation methods on drl algorithms' performance. a key question in this area is how to effectively distill the knowledge from a teacher drl model to a student drl model, taking into account the specificities of each algorithm.the integration of drl with kd allows for the distillation of complex, high-performance reinforcement learning strategies into more straightforward and more efficient models. by distilling various drl algorithms and analyzing their distillation effects, this research provides a benchmark for evaluating the performance of different drl algorithms refined using kd techniques. by distilling drl algorithms and exploring the combination of drl and kd, this study endeavors to develop models that require fewer gpu resources, exhibit faster learning rates, and make quicker decisions in complex environments. in the distillation part, the teacher network is based on the dqn/ddqn/dueling dqn algorithm, while the student network is a smaller-layered version of the dqn/ddqn/dueling dqn algorithm. however, double dqn showed slower convergence compared to dqn before distillation yet demonstrated superior performance after distillation.distillation benefits: the improved performance of doubledqn after distillation can be attributed to the knowledge transfer process from the ddqn teacher network. the distillation process can provide a stronger initial policy for doubledqn by transferring the knowledge from the teacher network to the student network, and guiding the learning process of doubledqn. doubledqn exhibits slower convergence before distillation but shows improved performance after distillation compared to dqn. these results underline the merits of particular architectural alterations in drl algorithms as well as the advantages of knowledge distillation in improving drl algorithm performance.the results of this experiment prove that the performance of compressing the original deep reinforcement learning model by knowledge distillation far exceeds the performance of compressing the original drl model by changing the parameters. secondly, the experiment above compares the performance of dqn, drqn, double dqn and dueling dqn algorithms after distillation with the original teacher model through the simple environment of discrete-action catpole-v1. the experiment also demonstrated the feasibility and effectiveness of knowledge refinement in deep reinforcement learning, providing new ideas for actors to critique such algorithms with poor convergence, while training drl models with fewer parameters but not weaker performance than large models, a trait that can do well with less gpu resources, learn faster, and make judgments more quickly under challenging circumstances. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/548.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/548.txt new file mode 100644 index 0000000000000000000000000000000000000000..697bf844475a5c82da5530076c0e4ad89460f080 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/548.txt @@ -0,0 +1 @@ +the original vision of reinforcement learning (rl) was that of a learning agent taking actions, getting state and reward feedback and learning from it. the learning agent may have some prior information but learning mostly happens from these online interactions. this requires carefullycrafted strategies that balance exploration and exploitation, including the need for deep exploration in many sequential decision-making problems. this was regarded as a difficult problem for a long time. fortunately, over the last decade or so, substantial progress has been made on the online reinforcement learning (orl) problem. unfortunately, orl algorithms tend to be rather "datahungry", i.e., they require a lot more interaction data than other types of learning methods.in some orl problem settings, offline datasets are available. there is thus a possibility of improving learning efficiency by using offline datasets for pre-training, and then the learning agent does further fine-tuning upon deployment for online learning. for example, such offline could be demonstrations from an expert, or even a sub-expert. this allows for addressing the distribution shift problem with the offline reinforcement learning methods, i.e., policy learnt offline do not perform well because the distribution of the system upon deployment is different from the one encountered offline. one can also expect that when such offline datasets are available for pretraining, it would improve online learning efficiency (in terms of cumulative regret). the question is by how much, and how does it depend on the dataset quality, and indeed how do we even quantify dataset quality. furthermore, even with the same dataset, a question arises on how to extract the most from such a given dataset.an important reason via a combination of offline and online learning is important is that reinforcement learning from offline datasets can often suffer from the distribution shift problem, i.e. the distribution of the system upon deployment is different from the one encountered offline, and hence the policy learnt offline do not perform well upon deployment. learning from offline dataset first followed by online learning allows for adaptation of learnt policies to different distributions of the underlying environment.in this paper, we develop a systematic method to incorporate expert demonstrations into online learning algorithms to facilitate faster learning for infinite horizon mdps. we first propose the idealized infinite-horizon informed posterior sampling based reinforcement learning (inf-ipsrl) algorithm. under some mild assumptions on the underlying mdp, we show that the algorithm achieves o(1) + õ( √ εt ) where ε is the estimation error probability of the optimal policy given the offline data, and t is the learning horizon. the regret bound also has polynomial dependence on problem parameters of the underlying mdp. we also show that, if the offline data is generated by an expert with suitably high competence level, under certain assumptions, the regret of the inf-ipsrl algorithm goes to o(1) as the size of the demonstration dataset goes to infinity. we would also like to point out the inf-ipsrl algorithm has a simpler design compared to the tsde algorithm in ouyang et al. (2017), since the tsde algorithm uses a visitation-count-based episode schedule while the inf-ipsrl algorithm uses fixed episode schedules. however, the inf-ipsrl algorithm is still impractical due to the complicated nature of exact posterior updates. therefore we introduce the informed randomized least-squares value iteration (inf-irlsvi) algorithm which replaces exact posterior sampling with an approximation procedure. just like the rsvi algorithm of osband et al. (2019) (from which we borrowed the name), the inf-irlsvi algorithm generates approximate posterior samples via optimizing a randomly perturbed loss function.related work. recently, offline reinforcement learning, where offline datasets are incorporated (levine et al., 2020), has attracted some interest due to the widespread practice of pretraining with offline datasets in large language models (brown et al., 2020;thoppilan et al., 2022;hoffmann et al., 2022). one fundamental challenge in this line of research is the distribution shift problem, where the policy learnt from offline data performs poorly in the real world. to address this problem, jin et al. (2021); rashidinejad et al. (2021); xie et al. (2021a) adopted the pessimistic approach, which can be overly conservative in practice. in uehara and sun (2021); rashidinejad et al. (2021); xie et al. (2021a); agarwal and zhang (2022), the authors identified sufficient conditions on the dataset that guarantees a certain level of performance for offline rl algorithms. however, determining whether the dataset meets these conditions can be a complicated problem itself, hence making these results impractical (kumar et al., 2020;nair et al., 2020;argenson and dulac-arnold, 2020;levine et al., 2020;kostrikov et al., 2021;wagenmaker and pacchiano, 2022). there have also been a few experimental work on leveraging offline data for online learning (zheng et al., 2023;feng et al., 2023;hu et al., 2023).on the other hand, there have been many works on online reinforcement learning (see russo et al. (2018) for a survey). among these works, there are two prominent approaches: the optimism under the face of uncertainty (ofu) approach (auer et al., 2008) and posterior sampling (ps) approach (osband et al., 2013;russo and van roy, 2016;ouyang et al., 2017). most work in online rl focuses on the online dataset and does not consider leveraging the offline dataset. closely related to the idea of using the offline dataset is the concept of imitation learning (schaal, 1996;hester et al., 2018;beliaev et al., 2022), where one aims to learn the behavioral policy of the expert from the offline dataset. no online finetuning is present in these works. in ernst et al. (2005); vecerik et al. (2017); rashidinejad et al. (2021); hansen et al. (2022); kumar et al. (2022); lee et al. (2022), the authors combine imitation learning with more traditional offline rl methods. in (schrittwieser et al., 2021;uehara and sun, 2021;xie et al., 2021b;agarwal and zhang, 2022;song et al., 2022;fang et al., 2022;wan et al., 2022;ball et al., 2023), the authors combined offline rl with limited online policy fine-tuning to minimize the simple regret.the original vision of reinforcement learning (rl) was that of a learning agent taking actions, getting state and reward feedback and learning from it. there is thus a possibility of improving learning efficiency by using offline datasets for pre-training, and then the learning agent does further fine-tuning upon deployment for online learning.an important reason via a combination of offline and online learning is important is that reinforcement learning from offline datasets can often suffer from the distribution shift problem, i. learning from offline dataset first followed by online learning allows for adaptation of learnt policies to different distributions of the underlying environment. under some mild assumptions on the underlying mdp, we show that the algorithm achieves o(1) + õ( √ εt ) where ε is the estimation error probability of the optimal policy given the offline data, and t is the learning horizon. we would also like to point out the inf-ipsrl algorithm has a simpler design compared to the tsde algorithm inouyang et al. recently, offline reinforcement learning, where offline datasets are incorporated(levine et al.we now introduce the informed posterior sampling-based reinforcement learning (inf-ipsrl) algorithm (algorithm 1) for infinite-horizon average mdps that combines the offline data with the powerful posterior sampling method(osband et al. at the beginning of episode k, the learning agent randomly samples a new environment θk , uses the planning oracle mdpsolve to find an average-reward maximizing policy πk for θk , and apply the policy πk for the duration of episode k.where š′ k = šk+1 for all k except when k = n -1, in which case š′ k = sn .therefore, the (pre-randomization) imitation loss in (8) can be written as a weighted sum of the individual imitation loss defined above imitation loss in (8) = s∈s n (s)l il (β, θ, s)constant therefore, we have shown that the randomized loss function (9) can be viewed as the sum of the rlsvi loss related to online learning, and the randomized imitation loss related to imitation learning.in this paper, we have introduced an ideal bayesian online reinforcement learning algorithm, inf-ipsrl, that naturally uses any offline data available for pre-training to boot-strap online learning.we then presented an approximate bayesian online learning algorithm, inf-irlsvi, that bridges online learning with imitation learning.recall that t k = k-1 l=1 t k is the starting time of learning episode k. let dk := d 0 ∪{(s 0 , a 0 , • • • , s t k -1 , a t k -1 , s t k )} denote the offline and online data the learning agent used to form a posterior to sample the environment for the k-th learning episode. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/549.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/549.txt new file mode 100644 index 0000000000000000000000000000000000000000..96f6cf8f592a7436086b8dd5ddfad599825b677e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/549.txt @@ -0,0 +1 @@ +large language models (llms) have been dominating the field of natural language processing (nlp) since the introduction of the transformer architecture (vaswani et al., 2017) and the first large encoder-based (devlin et al., 2019, bert), decoder-based (radford et al., 2018, 2019, gpt), and full transformer (raffel et al., 2020, t5) pretrained models. more recently, large transformerbased models began to expand beyond natural lan-* equal contribution guages, most notably to the area of programming languages, where they are being applied to tasks such as code understanding and code synthesis (also referred to as text-to-code generation), or code translation (lu et al., 2021a;wang et al., 2021;tipirneni et al., 2022). often, models addressing specific tasks build upon large pre-trained (code) language models, i.e. "foundation models" (bommasani et al., 2021), and apply further fine-tuning on task-specific data under a fully supervised lm or imitation learning paradigm.for code synthesis, that is, the generation of programming language code conditioned on a natural language prompt, reinforcement learning (rl) has recently gained traction as an alternative or complementary training method (le et al., 2022;wang et al., 2022). these approaches make use of the fact that, as opposed to natural language, correctness of code is relatively straight forward to evaluate through compilation (syntax) and unit tests/functional testing (semantics).however, rl approaches that train on code functionality rely on data with corresponding unit tests, that are often only available in very limited quantities or not at all. this is in stark contrast to approaches that employ language modelling objectives, trainable on large quantities of crawled data. overcoming this lack of unit test paired data has the potential to greatly improve the results achievable with such methods.in this work, we also employ reinforcement learning to improve the performance of a pretrained code language model on the code synthesis task. modelling the reward based on unit test pass rates, we introduce a simple yet effective rl approach to fine-tuning code synthesis models, based on policy gradients and a simple feed-forward critic model. in an effort to overcome the data sparseness issues of previous approaches, we introduce a heuristic approach to generate large data of natural language problems, function signatures, and unit tests, and show that the automatically generated data can help further improve code synthesis performance. finally, we release our code, models, new dataset of problems, signatures, and unit tests. 1, 2017)and the first large encoder-based(devlin et al.for code synthesis, that is, the generation of programming language code conditioned on a natural language prompt, reinforcement learning (rl) has recently gained traction as an alternative or complementary training method(le et al.in this work, we also employ reinforcement learning to improve the performance of a pretrained code language model on the code synthesis task. modelling the reward based on unit test pass rates, we introduce a simple yet effective rl approach to fine-tuning code synthesis models, based on policy gradients and a simple feed-forward critic model. in an effort to overcome the data sparseness issues of previous approaches, we introduce a heuristic approach to generate large data of natural language problems, function signatures, and unit tests, and show that the automatically generated data can help further improve code synthesis performance. (2022), proposed, among other things, to generate code in an rl environment, and reward those snippets which were able to compile, while punishing the others. some code datasets have been released that contain code along with associated tests, such as mbpp(austin et al. during rl training and evaluation, we employ unit tests, which allow for semantic evaluation of function level code, that is, they precisely define the expected behaviour of the code in terms of computation results.owing to the unit test-based approach to training, and follow previous work(chen et al. however, unit tests are not usually available for the code provided, or where they are, they might be provided indirectly as part of the code description or in other implicit ways, making them hard to extract. in cases where we make use of automatically extracted code and unit tests, but have to drop the original code, we simply leave the initial replay buffer of the corresponding valid solutions in algorithm 1 empty, and populate it over time with found valid solutions.overall, we find that both, our actor-critic training method, as well as our method of augmenting the available training data automatically with unit test generation and conversion can contribute positive impacts on code synthesis model performance. to provide additional learning signals, we furthermore introduced a method of extracting function-level strongly-typed code from large crawled code datasets, automatically generating unit tests for extracted functions, and conversion for use with a weakly typed target language. (2022)is significantly larger and of a different architecture than our ∼300m parameter gpt2-like plm, making direct comparison difficult. on this test data, both our models trained on the augmented data (ours+aug), as well as on all available training data (ours+all) are better than the model trained on mbpp alone, showing that learning on more ut increases the overall model performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/55.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/55.txt new file mode 100644 index 0000000000000000000000000000000000000000..f8a96c4ee3aed582f5f2e33a582062225dcf2544 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/55.txt @@ -0,0 +1 @@ +collaboration is a process where two or more agents work together as partners to achieve a shared goal . collaboration is a key challenge for creating artificial intelligence (ai) technologies that enhance human capabilities and empower people and society. we expect collaborative ai to serve as a catalyst for the maturation and integration of ai technologies, enabling novel applications with important potential for economic and societal impact. collaboration at each level builds on abilities at the lower levels to determine solutions, procedures, tasks, and actions as shown on the right. common ground for collaboration requires shared comprehension of situations (states), intentions, expertise and problems, as shown on the left. common ground can be reached through explanation, instruction, demonstration and experience.reactive collaboration assumes a form of tightly-coupled interaction where the actions of each agent are immediately sensed and used to trigger actions by the other.situational collaboration requires that people and systems share a situation model, including a common vocabulary for entities, relations and situations, and the actions that result in changes of situation. sharing comprehension of a situation through explanation is an important challenge for collaboration with intelligent systems that is beyond the current state of the art. information at the operational level includes the current and desired situations, their expression as intentions, goals and sub-goals, tasks and sub-tasks, and plans of actions that can be used to attain the desired situation. operational collaboration can be facilitated by a shared comprehension of the current situation, desired goal, and plans of actions that can be used to attain the goal.an ability to comprehend a human explanation for a plan of action, including authorizations or limits to authority remains an important challenge for collaboration with intelligent systems.praxical collaboration involves the exchange of knowledge about actions and procedures for collaboration based on experience or training. an important challenge in this area is to develop a technology for systems that can acquire and refine praxical abilities for protocols for socially correct interaction through training, explanation and experience, balancing pre-programmed abilities with learned behaviors. the emergence of powerful new machine learning techniques that can acquire reactive, situational, operational and praxical knowledge offers the possibility of building intelligent systems that can amplify human capabilities through creative collaboration. for situational and operational collaboration, a system must be able to comprehend explanations of the current and desired situations and to generate explanations of its own comprehension to share with a partner. for operational collaboration, an explanation can provide a description of the sequence of intended actions that can take a situation to a desired state, as well as a description of the sequence of intermediate situations that can be used to ensure the proper execution of actions and operations. for praxical collaboration, explanations can be used to share knowledge about how to obtain information and coordinate actions based on habits and social norms. this provides an important new technology that appears well-suited for linguistic communications at all levels, including sharing information about situations, agreeing on tasks at the operational level, as well as sharing praxical knowledge and communicating for creative collaboration. each level of the hierarchy concerns interaction with distinct forms of information: sensori-motor signals for the reactive level, entities and relations for the situational level, tasks and plans for the operational level, domain specific knowledge about how to perceive and act for the praxical level, and problems, hypotheses and solutions at the creative level. an obvious example is in computer games and virtual worlds (the metaverse) where enabling virtual characters with abilities for situation understanding, operational collaboration and creative problem solving can have enormous impact. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/550.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/550.txt new file mode 100644 index 0000000000000000000000000000000000000000..556ce6590b86f68e195bfc9ac9b9397233512b98 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/550.txt @@ -0,0 +1 @@ +comparing the performance of different software and hardware architecture in the field of deep neural networks is a challenging endeavor. to establish a benchmark for comparison between various architectures, one must identify relevant metrics comparable across a wide variety of architectures and systems. those metrics must have an adequate level of precision. however, the literature shows that benchmarks seldom comprise metrics other than the time required for the execution, the accuracy of algorithms, and the number of parameters and necessary multiply-adds - . this approach is relevant in a paradigm where algorithms need to run faster and more accurately, with little regard to the marginal cost of increased performance. however, this approach is not optimal if the objective is to optimize the power consumption of the algorithm, be it out of ecological concern or due to constraints on the hardware available. such purposes require specific metrics. we advocate that comparing systems should be done using more criteria. the aim is to balance out accuracy and efficiency, specifically in power efficiency. this goal can be achieved by using scores. one of the first scores in the literature, characterizing the trade-off between accuracy and complexity, was introduced in as:with the idea of representing the amount of accuracy captured by a single parameter. however, the number of parameters does not always correlate well with the complexity of a network . especially, convolutional neural networks comprise few parameters but are computationally expensive. another iteration in neural network scoring, netscore, taking multiplyaccumulates (macs) into account, was thus introduced in . they propose netscore, which they define asnetscore offers a more comprehensive view of the accuracyefficiency trade-off of a neural network and seems especially relevant for convolutional neural network scoring. to elaborate upon this work, we want to introduce something that better reflects efficiency -especially in terms of power efficiency and adequation between hardware and software.this paper presents a new score that uses measurements rather than technical information on the network. we believe introducing measurements in neural network scoring is necessary to characterize a neural network's behavior accurately. to obtain the necessary metrics, we introduce tub.ai , a new open-source tool that provides researchers with more diverse and granular metrics on their systems. we believe this methodology can be used in many fields -autonomous vehicles/drones, spaceborne applications, high-performance computing -and not only in ai. its use is measuring both software and hardware performance. this paper also presents a benchmark of our score computed on various nn architectures during inference. scoring was realized on various hardware platforms. this paper will first provide an overview of the motivation behind our new score and the tools we use in section ii. wethe final version of this paper is present in the proceedings of the 56 th edition of ieee's international symposium on circuits and systems (iscas), 2023. arxiv:2310.09554v1 14 oct 2023 will then take measurements during inference using tub.ai in section iii, before presenting the scores obtained by several state-of-the-art nn architectures in section iv. to establish a benchmark for comparison between various architectures, one must identify relevant metrics comparable across a wide variety of architectures and systems.netscore offers a more comprehensive view of the accuracyefficiency trade-off of a neural network and seems especially relevant for convolutional neural network scoring. we believe introducing measurements in neural network scoring is necessary to characterize a neural network's behavior accurately.average power consumed to obtain one correct inference(4)we found that the power consumption per inference was sufficiently dependent on the speed of inference (the slower the inference, the higher the power consumption per inference) not to include the speed of inference as is.this metric aims to be a tool to evaluate the efficiency of different neural network architectures. third, by observing the factor of proportionality between our scores and the a100 score (in small fonts next to each of our scores), we can see that some architectures (inception resnetv2, efficientnetv2 large) are much more penalized than other architectures (mobilenet, nasnet) when changing setup. despite having the highest average power consumption of all platforms, the a100 has the highest score across all architectures. while higher-grade hardware has higher idle and in-charge energy usage, it usually exhibits a lower power consumption per inference due to increased inference speed. it also calls for more work on the architecture of neural networks to identify the limiting factor across different hardware settings and obtain the best hardware-software fit.we hope that it will be more commonplace in the future that some benchmarks rank neural network architectures using not only the validation accuracy but also more metrics, especially power consumption measured on both training and inference. the score values we have obtained across several architectures show that there can be significant discrepancies between platforms for the same neural network -which proves the interest of scoring using measurements. to our best knowledge, this is the first time power efficiency measurements have been included in the performance evaluation of neural network architectures. in a context of growing concern for the ecological impact of machine learning and high-performance computing in general, it is a step forward in the field of hpc where the main focus is not solely the algorithm's or architecture's performance but also their power efficiency. using identical neural network architectures implemented on different frameworks and running them on various test hardware, we would like to evaluate how well available deep learning frameworks can exploit the hardware's capabilities. we would be especially interested in seeing the results of neural network architecture search using this new score as the optimizing criterion. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/551.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/551.txt new file mode 100644 index 0000000000000000000000000000000000000000..3131e627d78cc4d17815fb6820004a5e97dad484 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/551.txt @@ -0,0 +1 @@ +in recent years, deep auc maximization (dam), focusing on developing deep learning models that directly optimize the area under the receiver operating characteristic curve (auc), has gained growing importance. different methods of dam, such as optimizing the auc margin loss (aucm) and compositional dam , have been successfully applied to medical image classification to improve the auc performance. these methods demonstrate superior performance in large-scale medical image classification tasks such as chexpert and melanoma compared to optimizing traditional loss functions, such as cross-entropy loss (ce) and focal loss.although dam methods work well on large datasets, they still face the challenge of vulnerability to overfitting when training on imbalanced datasets with a small size. typically, dam losses place more emphasis on the positive class with fewer samples, and as the overall data volume decreases, the samples of this class become scarce and limited, which leads to the overfitting problem. in this context, mixup augmentation is an effective solution by introducing soft labels into training and generating much more data using convex combinations of samples. with mixup augmentation, the focus of dam would shift from the minor class to the combination of samples from different classes, which mitigates the overfitting issue.however, existing dam losses are developed on hard labels and thus are incompatible with mixup augmentation. different from traditional loss functions that are defined over individual data, dam losses are non-decomposable, making the incorporation of soft labels more complicated. to address the problem, we propose an auc-mixup loss by replacing conditional means in the min-max auc margin loss with soft means by using soft labels. our goal is to utilize the auc-mixup loss to improve dam and compositional dam methods in medical tasks with a small number of data by incorporating mixup augmentation. we validate our method on imbalanced benchmark and medical image datasets, including several 3d datasets, to demonstrate the superiority in generalization performance over two standard dam baselines. different methods of dam, such as optimizing the auc margin loss(aucm) and compositional dam, have been successfully applied to medical image classification to improve the auc performance. these methods demonstrate superior performance in large-scale medical image classification tasks such as chexpertand melanomacompared to optimizing traditional loss functions, such as cross-entropy loss (ce) and focal loss.although dam methods work well on large datasets, they still face the challenge of vulnerability to overfitting when training on imbalanced datasets with a small size. to address the problem, we propose an auc-mixup loss by replacing conditional means in the min-max auc margin loss with soft means by using soft labels. our goal is to utilize the auc-mixup loss to improve dam and compositional dam methods in medical tasks with a small number of data by incorporating mixup augmentation. we validate our method on imbalanced benchmark and medical image datasets, including several 3d datasets, to demonstrate the superiority in generalization performance over two standard dam baselines. auc margin loss is shown to enjoy better robustness than auc square loss. an improvement of minimizing the auc margin loss from scratch is compositional dam, which minimizes a compositional objective function, where the outer function corresponds to the auc margin loss and the inner function represents a gradient descent step for minimizing a cross-entropy (ce) loss. with respect to benchmark datasets, we choose the cat&dog, cifar-10, cifar-100, and stl-10 and construct a binary imbalanced version of these datasets by following the instruction of yuan et al. we split all datasets into training set, validation set, and test set to conduct cross-evaluation for tuning hyperparameters, and report the auc score on the test set(the means and standard deviations of three runs). in the experiments, we apply auc mixup strategy to dam for vanila training from scratch (auc-mixup) and compositional training (ct-mixup). we choose four methods with different losses as baselines: cross-entropy loss (ce), focal loss (focal), aucm loss, and compositional auc loss (ct-auc).0 on all datasets. we can observe that (i) the auc-mixup helps achieve the highest auc scores on all datasets; (ii) the auc-mixup strategy usually yields an improvement of varying degrees compared to the corresponding dam methods without using auc-mixup; (iii) the auc-mixup is competitive if not better than ctmixup, which indicates that employing the auc-mixup loss for training from scratch can eliminate the additional compositional training overhead without sacrificing the prediction performance. we further show the learned feature representations of dam methods on the breastmnist training data in figure1, which illustrates that employing the auc-mixup loss obtains better feature representations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/552.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/552.txt new file mode 100644 index 0000000000000000000000000000000000000000..c26b30f137b6e749e8dc336ffc77b9f12d8e085a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/552.txt @@ -0,0 +1 @@ +bounds for wtd-lba. let us for simplicity consider the restricted setting of thm. 1.2, and just one of the bags given by s 1 . let x be the matrix whose rows are the feature vectors of s 1 and y be the vector given by their labels. let w be the weight vector sampled for s 1 yielding x t := w t x and w t y as the output corresponding to this bag. suppose for the moment that y belongs to the column-space of x, i.e. there is h s.t. y = xh, and thus w t y = w t xh = x t h. in this case therefore, given x t := w t x there is no randomness in the output w t y and any change from y to y ′ in the ith coordinate can be detected as long as x i ∈ s 1 and w i = 0 (see appendix j for a detailed explanation).the above leads us to the (more realistic) case of y being sufficiently far from being in the columnspace of x which is ensured by opt((x, y), f) = inf r y -xr 2 2 ≥ αk (*). our key finding is that for any choice of x t , the conditional distribution on w t y is a µ-mean gaussian with variance precisely equalling opt((x, y), f) (see eqn. ( 5)). further, except with ≈ exp(-ω( √ k)) probability over the choice of the conditioning, any perturbed y ′ additively changes the mean and variance by at most o(1). we prove a deviation bound for perturbed gaussians -a strengthened version of the gaussian mechanism differential privacy from incorporating shift in the variance -to obtain (ε, δ)-label-dp for such good conditionings, where δ = exp(-ω( √ k)). integrating over the choice of x t we obtain the same label-dp guarantees overall.while we assume (*) in the above sketch for a bag s 1 , to ensure the condition we prove (i) a lower bound on the lhs of (*) for linear regressors r with bounded norm k, and (ii) another lower bound for linear regressors exceeding the norm bound k. for proving (i) (as also in several other parts of our proofs) we employ union bound on an appropriately fine-grained net (see ) over the relevant class of linear-regressors to transform probabilistic bounds for individual linearregressors to high probability bounds for the entire class. to ensure (ii) we use a matrix chernoff bound to translate a lower bound λ * on the minimum non-zero eigenvalue of q (see eqn. ( 1)) into a nearly same bound for q restricted only to the feature-vectors in s 1 .for the utility bound we first require that the loss val of any linear-regressor in f 0 on the input dataset be preserved with high probability on the aggregated output dataset. we observe that the mse-loss for each bag (after fixing it) is distributed as the square of a mean-zero gaussian. in particular, given the bags, the expected (over the aggregating weights) mse-loss is ∑ (x,y) yr t * x 2 where the sum is over all the mk examples in the bags. applying the hoeffding bound we obtain high probability concentration for this sum, with the next step being to bound the sum of squared mean-zero gaussians whose sum of variances is bounded, which can be done using gaussian concentration bounds. to complete the utility argument, we also need to show that w.h.p. any bad linear regressor remains bad for the lba output. finally, we use the aforementioned net argument to obtain a high probability bound for all regressors in f 0 . overall we obtain a (1 ± o(1))opt multiplicative bound on the lba loss w.r.t. the loss val on the input dataset for all linear-regressors in f 0 , except with probability ≈ exp(-ω(m)). the utility result in theorem 1.2 follows trivially.bounds for noisy-wtd-llp. in this case a fixed ρ-fraction s of the labels are noisy i.e., iid n(0, 1) noise is added to them. using the hoeffding bound, for a random k-size bag, ≈ ρ-fraction of its points are noisy except with probability exp(-ω(k)). this additive noise along with appropriate tail bounds on the magnitude of the aggregation weights for this bag along with the gaussian deviation bounds mentioned above we show that except with exp(-ω(k) + o(log m)) probability over the choice of the bag and its aggregation weights, the random additive noise in the aggregate label of the bag implies (ε, δ)-label-dp where δ = exp(-ω( √ k)), for any constant ε > 0. taking an expectation over the conditioned choice of the bag and its aggregation weights, along with a union bound over all the bags yields δ = exp(-ω(for the utility bound, we first show that for anyto do this, we show w.h.p concentration of both these quantities -involving multiple applications of the hoeffding and gaussian concentration bounds, along with appropriate tail bounds on the magnitude of the weights w ij and noise g i . the next step is a net based argument except here we use one over the vector of the neural-network's weights, along with the l-lipshcitzness bound on its output w.r.t. perturbation in its weights. this enables us to show that w.h.p. ( †) holds for all f ∈ f 1 , and the utility result in theorem 1.3 follows trivially.experiments. we evaluate wtd-lba and noisy-wtd-llp for linear and neural regression respectively on two large-scale regression datasets. our experiments reinforce our theoretical guar-antees for wtd-lba and noisy-wtd-llp, demonstrating that they preserve the utility even with a small number of sampled bags (see appendix k). let x (j) := ∑ k r=1 w jr x (i jr ) and y (j) = ∑ k r=1 w jr y (i jr ) .let f be the class of all homogeneous linear regressors r t x and f 0 ⊆ f be those where r 2 ≤ b 3 for some b 3 > 0. . output dllp := {(i jr , x (i jr ) , j, w jr )}, y (j) := ∑ k r=1 w jr ỹ(i jr ) m j=1 . we consider the class f 1 of neuralnetworks f characterized by its d ′ weights s f which satisfy: s f 2 ≤ b 4 for all f ∈ f 1 , and the following lipshcitzness property:. in this case therefore, given x t := w t x there is no randomness in the output w t y and any change from y to y ′ in the ith coordinate can be detected as long as x i ∈ s 1 and w i = 0 (see appendix j for a detailed explanation). , s m sampled in wtd-lba satisfy the following condition: for any s j (j ∈ ) letting x be the matrix with rows {x (i jr ) } k r=1 and y be the column vector (y (i jr ) ) k r=1 , the condition of lemma 3. . then, its aggregated feature vector x = ∑ k r=1 w jr x (i jr ) and the aggregated label y = ∑ k r=1 w jr y (i jr ) where g ∼ n(0, i). let f * = argmin f ∈f 0 val( d, f ) and f * * = argmin f ∈f 0 val(d, f ), then our result follows by the following argument. we consider the class f 1 of neural-networks f characterized by its d ′ weights s f which satisfy: s f 2 ≤ b 4 for all f ∈ f 1 , and the following lipshcitzness property:. given access to it, the objective is to minimize val( dllp , f ) = ∑ m j=1 (y (j) -∑ i∈s j w ij f (x (i) )) 2 , where opt( dllp ,.in this section, we shall prove that 1 n val( d, f )θ ≤ 1 mk val( dllp , f ) ≤ 1 n val( d, f ) + θ for all f ∈ f 1 with probability at least that mentioned in theorem 1. let f * = argmin f ∈f 1 val( dllp , f ) and f * * = argmin f ∈f 1 val( d, f ), then our result follows by the following argument.notice here that g jr ∼ n(0, w 2 jr (∑ k r=1 w jr (y (i jr )f (x (i jr ) ))) 2 ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/553.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/553.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c217ac5cd65d0f9c005787e13c4f6fcb951aa0e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/553.txt @@ -0,0 +1 @@ +prompt engineering (pe) first emerged in the field of large language models (llms) in 2020, as researchers realized that well-designed prompts could significantly enhance the capabilities of llms without additional model training . the development of pe can be contextualized within the larger scope of natural language programming -an increasingly prevalent paradigm that allows for the manipulation of computational systems through natural language, thus offering a more intuitive alternative to traditional programming languages. much like the transition from machine language to higher-level languages like c marked a significant leap in expressive power and ease of use, prompt engineering -or natural language programming in a broader sense -represents a further evolutionary leap, making it easier than ever to instruct machines in performing complex tasks. when implemented properly, pe can yield dramatic performance improvements, particularly in the context of advanced llms such as gpt-4 and claude. in these sophisticated models, the gap between well-engineered and poorly conceived prompts can be stark, reinforcing the critical role of effective pe in leveraging the full potential of llms.initially, the focus of pe was on single-round prompting, a mechanism suited for relatively straightforward tasks. however, as the need for more complex problem-solving through natural language programming became evident, the field saw a shift towards more intricate forms of engagement, such as multi-round and even multi-agent interactions with llms . this evolution in pe bears a striking resemblance to the historical trajectory of optimal control theory , which itself originated from the need for point-to-point trajectory optimization and later expanded its scope to accommodate dynamic systems with feedback mechanisms.the growing complexity of multi-round pe interactions presents significant challenges. traditional pe approaches often rely on heuristic or empirical methods that, while effective in specific scenarios, lack a systematic foundation amenable to rigorous analysis. this highlights the pressing need for a unified mathematical framework that can serve as a descriptive foundation and facilitate optimization of multi-round pe dynamics.the primary aim of this paper is to introduce a novel optimal control framework tailored for multiround interactions with llms. unlike previous works with limited theoretical scopes , our approach offers a comprehensive mathematical structure for the systematic design, analysis, and optimization of pe methods, broadening its applicability to include ensemble and multi-agent strategies.adopting an optimal control perspective holds the promise of evolving pe along a trajectory similar to that of optimal control theory itself. initial methodologies in pe mainly focused on single-round prompts, comparable to point-to-point trajectory optimization problems . as optimal control theory incorporated feedback mechanisms for handling complex systems, our framework is designed to accommodate both single-round and multi-round interactions. this shift aims to offer a coherent understanding of the dynamics governing these intricate exchanges and to foster innovative applications transcending current limitations.to realize these objectives, our methodology employs optimal control to conceptualize multi-round llm interactions. while acknowledging existing gaps in mathematical rigor due to poorly understood metrics in discrete language spaces, the framework aims to serve as a unified lens for qualitatively evaluating existing pe techniques. thus, it lays the groundwork for potential improvements in pe by providing an intuitive, structurally coherent approach to model extended dialogic interactions.contributions of this paper are summarized as follows:1. we introduce a novel optimal control formulation that unifies a wide range of existing methods under a single mathematical framework. this provides a rigorous foundation for analyzing and improving prompt design.2. we highlight theoretical challenges revealed by the framework, specifically regarding the formalization and optimization of multi-round interactions. while complex, these issues offer exciting directions for future studies to deepen the mathematical understanding of pe.3. our perspective yields valuable insights into the inherent capabilities and limitations of current techniques. these could catalyze innovations in pe, pushing the boundaries of human-computer interaction.4. we extend the framework to ensemble pe methods and multi-agent pe, serving as an important stepping stone for studying complex interactions with llms.we note that the primary aim of this paper is not to present new theoretical results or algorithmic improvements substantiated by experiments; rather, we introduce an optimal control framework to systematize and interpret existing pe methods, thereby laying the groundwork for future rigorous analysis in the domain of pe.the remainder of this paper is structured as follows. section 2 elaborates on the pivotal concepts in pe and introduces the optimal control framework designed to systematize pe. in this section, we also shed light on the significance of multi-round interactions, highlighting the challenges and opportunities that multi-round pe presents. in section 3, we review several well-established pe methods, integrating them into the proposed optimal control framework and elucidating the new insights that emerge from this integration. section 4 is dedicated to extended pe methodologies, such as ensemble and multi-agent pe strategies. we illustrate how minor adaptations to the proposed framework can accommodate these more sophisticated, yet potent, pe methods. the paper concludes with section 5, where we summarize our contributions. we extend the framework to ensemble pe methods and multi-agent pe, serving as an important stepping stone for studying complex interactions with llms.we note that the primary aim of this paper is not to present new theoretical results or algorithmic improvements substantiated by experiments; rather, we introduce an optimal control framework to systematize and interpret existing pe methods, thereby laying the groundwork for future rigorous analysis in the domain of pe. section 2 elaborates on the pivotal concepts in pe and introduces the optimal control framework designed to systematize pe. in our optimal control framework, the enlargement of the prompt candidate set p t is permitted only after the associated information for new prompts has undergone scrutiny. in our optimal control formulation (1), the task (or query) is denoted by z q , the prompt candidate sets are denoted as p t , which is updated based on preceding response z r t-1 . additionally, the prompt candidate set p t often comprises an extensive array of potential prompts, thus inflating the action space for the optimal control problem considerably.3, pe encompasses three pivotal elements: the evaluation function f , the prompt candidate set p t , and methods for solving the optimal control problem. for each aspect, we first describe the task at hand and offer an interpretation of the highlighted pe methods within the context of our proposed optimal control framework. the subsequent prompt z p t+1 is constructed based on the previous round's prompt z p t and the response z r t , by appending z r t to the end of the hint array z p t . nevertheless, through the first t interactions and a fixed prompt template, ltm effectively expands its prompt candidate set p t +1 to include all these sub-tasks, which makes p t +1 a much better prompt candidate set comparing with p 1 .building upon the pe methods such as php and ltm, which employ an evolving p t for prompt candidate sets, the implications for optimal control theory are substantial. the final response z r to the task is formulated using an ensemble function en(•) applied to all these responses: z r = en({z r i } i∈i ), where en(•) represents the ensemble strategy in use. then the prompt candidate sets are designated as p (i,t) , the prompt for the i-th agent at time-step t is denoted as z p (i,t) , and the corresponding response is z r (i,t) .the extended framework for the multi-agent pe is given as follows max {τi} max z p (i,t) ∈p (i,t) i∈i f i (z r (i,τi) ; z q i ) s. as discussed in various sections of this paper, the proposed frameworks grant a unified perspective on pe methods, has enabled us to propose various possible improvements to existing pe methods and has illuminated new directions for future research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/554.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/554.txt new file mode 100644 index 0000000000000000000000000000000000000000..b5e4f60399bd417beead0c0c3f0b8f19c3d87d7c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/554.txt @@ -0,0 +1 @@ +bandit convex optimization (bco) is a fundamental framework of sequential decision-making under uncertain environments and with limited feedback, which can be regarded as a structured repeated game between a learner and an environment (hazan et al. 2016, lattimore andszepesvári 2020). in this framework, a learner is given a convex feasible region x ⊆ r d and the total number t of rounds. at each round, t = 1, 2, . . . , t , the learner makes decision x t ∈ x , and then a convex loss function f t : x → r is revealed. the learner cannot access the loss function f t , but only the bandit feedback is available, i.e., the learner can only observe the value of the loss at the point she committed to, i.e., f t (x t ). the objective of the learner is to generate a sequence of decisions {x t } t t=1 ⊆ x that minimizes cumulative loss t t=1 f t (x t ) under bandit feedback. the performance of the learner is evaluated in terms of regret, which is defined bythis regret measures the difference between the cumulative loss of the learner's strategy and the minimum possible cumulative loss where the sequence of loss functions {f t (x)} t t=1 had been known in advance and the learner could choose the best fixed optimal decision in hindsight.in many real-world scenarios, the decisions are often subject to some constraints such as budget or resources. in the context of online convex optimization (oco), where the learner has access to the complete information about the loss functions, a projection operator is typically applied in each round so that the decisions belong to constraints (zinkevich 2003, hazan et al. 2016). however, such a projection step is typically a computational bottleneck when the feasible region is complex.to address the issue of the projection step, mahdavi et al. (2012) considers online convex optimization with long-term constraints, where the learner aims to generate a sequence of decisions that the decisions satisfy constraints in the long run, instead of requiring to satisfy the constraints in all rounds. they introduce the cumulative soft constraint violation metric defined by v soft t := t t=1 g t (x t ), where g t (x) ≤ 0 is the functional constraint to be satisfied. later, yuan and lamperski (2018) consideres strict notion of constraint violation reffered to as cumulative hard constraint violation, which is defined by v hard t := t t=1 max{g t (x t ), 0}. this metric overcomes the drawback of cumulative soft constraint violation, and it is suitable for safety-critical systems, in which the failure of constraint violation may result in catastrophic consequences.to see that the notion of cumulative hard constraint violation is a stronger metric, let us consider the example discussed in guo et al. (2023). given a sequence of decisions whose constraint functions are {g t (x t )} t t=1 with t = 1000 such that g t (x t ) = -1 if t is odd; otherwise g t (x t ) = 1, we have τ t=1 g t (x t ) ≤ 0 for any τ ∈ {1, 2, . . . , t }, however, the constraint g t (x) ≤ 0 is violated at half of rounds. on the other hand, the notion of hard constraint violation can capture the constraint violation since we have v hard t = 500. thus, the conventional definition of cumulative soft constraint violation v soft t cannot accurately measure the constraint violation but cumulative hard constraint violation v hard t can.many existing algorithms for bco with constraints proposed in prior works typically involve projection operators as well as algorithms for oco with constraints (agarwal et al. 2010, zhao et al. 2021), and are generally limited to the simple convex set. chen et al. (2019), garber and kretzu (2020) consider a projection-free algorithm for bco, but the constraint violation bound has not been reported. some studies have extended the algorithm for oco with soft constraints to the bandit setting (mahdavi et al. 2012, cao andliu 2018), however, these algorithms cannot be directly extended to bco with hard constraints. in other words, there has been no algorithm that can simultaneously achieve sub-linear bound both regret and cumulative hard constraints violation.the present study focuses on the particular case of multi-point feedback of bco with constraints, in which the loss functions are convex or strongly convex, and constraint violation is evaluated in terms of hard constraints. this kind of problem widely appears in real-world scenarios such as portfolio management problems, in which the manager has concrete constraints to be satisfied but only has access to the loss function f t (•) at several points close to the decision x t . we present a penalty-based proximal gradient descent method which attains both o(d 2 t max{c,1-c} ) regret bound and o(d 2 t 1-c2 ) cumulative hard constraint violation bound, where d is the dimensionality of the feasible region and c ∈ [ 1 2 , 1) is a user-determined parameter. our proposed algorithm is inspired by a gradient estimation in the bco literature (flaxman et al. 2005, agarwal et al. 2010) and an algorithm for oco with hard constraints (guo et al. 2022). ., f t (x t ). (2023).with the full knowledge of loss functions {f t (x)} t t=1 and constraint functions {g t (x)} t t=1 in all rounds, the offline constrained oco is formulated as the following convex optimization problem:.submit x t+1 , incur loss f t+1 (x t+1 ) and observe constraint g t+1 (x). of eq.at round t, where we find the decision x t+1 ∈ x , since we do not have the prior knowledge of the loss function f t+1 (x) to be minimized, we estimate the loss by the first-order approximation at the previous decision x t as f t+1 (x) = f t (x t ) + ∇f t (x t ), xx t . to prevent the constraint from being severely violated, we also introduce the rectified lagrange multiplier λ t associated with the functional constraint g t (x) ≤ 0, and add the penalty term λ t g + t (x) to the objective function (6), which is an approximator of the original penalty term θ t g t (x), where θ t is the lagrangian multiplier associated with the constraint g t (x) ≤ 0. (5).where the first inequality follows from the triangle inequality, the second inequality follows from the cauchy-schwarz inequality, the third inequality follows from ∇f (x) 2 ≤ lip(f ) for any lipshitz continuous function f and for any x ∈ x , and the last inequality follows from lip( f t ) = lip(f t ).8) let x ⊆ r d be a convex set. let {x t } t t=1 be a sequence of decisions generated by algorithm 1 and let x ⋆ ∈ x be an optimal solution to the offline oco of eq. similar to the argumant inflaxman et al. (7) is also strongly convex with modulus σ t , namely, h t (y) ≥ h t (x) + ∇h t (x), yx + σt 2 yx 2 2 for any x, y ∈ x . let {x t } t t=1 be a sequence of decisions generated by algorithm 1 and let x ⋆ ∈ x be an optimal solution to the offline oco of eq. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/555.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/555.txt new file mode 100644 index 0000000000000000000000000000000000000000..8220293a810c5a43d2e797ad72ae0cbf85e01c82 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/555.txt @@ -0,0 +1 @@ +spectral clustering - has been widely applied in practice due to its ability to exploit the non-euclidean property of data. spectral clustering originates from the graph cut problem, e.g., ratio cut , normalized cut , balanced cut , improved normalized cut . the procedure of spectral clustering and its variants usually consist of two phases: (1) construct a graph and calculate the relaxed solution; (2) compute the discrete solution. in general, the first step is to convert an arbitrary dataset into a graph so that the clustering is equivalent to partitioning a graph into several cohesive disjointed subsets of vertices, which is a well-known graph cut problem. since most graph cut problems are np-hard, most spectral clustering models turn to solve the continuously relaxed problem, which is usually easy to compute the optimum. after obtaining the continuous solution, an essential step is to compute an approximated discrete solution according to the continuous solution, which corresponds to step 2.compared with step 2, the strong extensions of spectral clustering - prefer to focus on step 1, i.e., how to construct an effective graph that captures the potential topology of data. specially, clr and can attempt to directly construct a graph with c connected components (where c is the number of clusters) so that step 2 could be omitted.although step 1 has been extensively investigated in recent decades, the study of step 2 is relatively limited. the most popular technique to discretize the continuous solution is to run k-means on the relaxed solutions . it is a heuristic method since it does not aim to find the optimal discrete optimum, even when k-means converges to its optimum. literature provides a convincing explanation: k-means can compute the nearly optimal partitions from the relaxed solution provided that the graph is easy to be cut to c connected components. another technique is to directly find the closest discrete solution regarding euclidean distance, namely spectral rotation , . it can be also regarded as heuristic since the closest solution regarding euclidean distance is usually not the optimal solution, which is elaborated in succeeding sections.aiming at designing a reliable method to compute the discrete solution from the continuous optimum, we propose a non-heuristic discretization algorithm and the contributions are summarized as follows: (1) inspired by the first-order gradientbased algorithms, a non-heuristic algorithm is proposed in this paper, which is the first non-heuristic method to the best of our knowledge. the proposed framework bridges the original graph cut functions and discretization algorithm via the gradient. (2) although simply finding the nearest discrete solution under euclidean distance is unreliable, we theoretically show that starting from the continuous optimum is beneficial and meaningful. (3) experiments strongly verify the effectiveness of our idea. the proposed method significantly outperforms other discretization methods on numerous datasets. since the non-heuristic method is aware of the original graph cut problem, the final discrete solution is more reliable and achieves the preferable loss value. we also theoretically show that the continuous optimum is beneficial to discretization algorithms though simply finding its closest discrete solution is an existing heuristic algorithm which is also unreliable. the procedure of spectral clustering and its variants usually consist of two phases: (1) construct a graph and calculate the relaxed solution;(2)compute the discrete solution. in general, the first step is to convert an arbitrary dataset into a graph so that the clustering is equivalent to partitioning a graph into several cohesive disjointed subsets of vertices, which is a well-known graph cut problem. since most graph cut problems are np-hard, most spectral clustering models turn to solve the continuously relaxed problem, which is usually easy to compute the optimum. after obtaining the continuous solution, an essential step is to compute an approximated discrete solution according to the continuous solution, which corresponds to step 2.aiming at designing a reliable method to compute the discrete solution from the continuous optimum, we propose a non-heuristic discretization algorithm and the contributions are summarized as follows: (1) inspired by the first-order gradientbased algorithms, a non-heuristic algorithm is proposed in this paper, which is the first non-heuristic method to the best of our knowledge.1: empirical illustration of our motivation: g † represents the closest discrete solution regarding euclidean distance and g * represents the optimal discrete solutions. with the definition of ∆, if ∥∆ 1 ∥ = ∥∆ 2 ∥ and ∥∆ 1 ∥ is small enough, the following inequality, the second important question is raised according to: is the continuous optimum f * necessary? or formally, is the term ∥f * r-g∥ beneficial to the discretization algorithms? to answer this question, we define a lower-bound metric, ρ(∆), of ∥∆∥ due to that the laplacian matrix l is positive semidefinite. to better show the performance of different discretization methods and guarantee reproducibility, we choose the first 10-15 samples to show the difference between the optimal discrete solution and solutions returned by diverse discretization methods.to fairly show the feasibility of introducing gradient information, we collect 4 different methods as competitors, including k-means (km), k-means on normalized f * (km-norm), spectral rotation (sr), improved spectral rotation (isr), and directly solving normalized cut (dnc).the primary metric is the value of graph cut functions defined in problem (1) since the goal of the discretization algorithms is to find solutions that minimize problem(1). however, a fundamental assumption is that the used graph cut model is suitable for clustering on these datasets, which indicates that the better solution leads to better clustering results. if a discrete solution causes a small value but results in a bad clustering partition, it indicates the inappropriateness of the graph cut problem. we first theoretically and empirically show the drawbacks of existing discretization algorithms and therefore propose a first-order term to obtain the preferable discrete solution and meanwhile reduce the difficulty of solving the original nphard problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/556.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/556.txt new file mode 100644 index 0000000000000000000000000000000000000000..e532d1fa04ff2710532062e310ae34fab0f40c39 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/556.txt @@ -0,0 +1 @@ +graph neural networks (gnns) are deep learning architectures for input data that incorporates some relational structure represented as a graph, and have proven to be very performant and efficient for various types of learning problems . understanding the expressive power of gnns, and its dependence on the activation function of the underlying neural networks is one of the basic tasks towards a rigorous study of their computational capabilities.in this context, several approaches have been conducted in order to describe and characterize the expressivity of gnns. the first approach consists in comparing gnns to other standard computation models on graphs such as the color refinement or the wesfeiler-leman algorithms. this reduction type of approach stands to reason as the computational models of gnns, wesfeiler-leman/color refinement algorithms are intimately connected: they all fall under the paradigm of trying to discern something about the global structure of a graph from local neighborhood computations. in that regard, it has been proven that the color refinement algorithm precisely captures the expressivity of gnns. more precisely, there is a gnn distinguishing two nodes of a graph if and only if colour refinement assigns different colours to these nodes. this results holds if one supposes that the size of the underlying neural networks are allowed to grow with the size of the input graph. hence, in his survey, emphasizes the fact that this equivalence has been established only for unbounded gnn, and asks: can gnns with bounded size simulate color refinement? in , the authors answer by the negative if the underlying neural network are supposed to have rectified linear unit (relu) activation functions. in the authors provide a generalization of this result, for gnns with piecewise polynomial activation functions. furthermore, explicit lower bounds on the neural network size to simulate the color refinement can be derived for piecewise-polynomial activation functions given upper bounds on the number of regions of a neural network with piecewise-polynomial activation.the second line of research to study the expressive power of gnns is to characterize the types of boolean queries interpreted over labeled graphs that a gnn can simulate. given a boolean query q (taking as input a graph, or a graph and one of its vertices), does there exist a gnn and whose output characterizes the output of q? for example, can a gnn express if a vertex of a graph is part of a clique of given size? furthermore, can we characterize the set of queries that can be simulated by gnns? in that context if the size of the gnn depends only on the to grow with the size of the input, graph, then we say that the gnn expresses the query uniformly. uniform expressivity is interessing from a practical standpoint as it captures the expressivity of gnns of fixed size with respect to the input graphs.several mathematical answers to such questions have been already obtained. notably, a complete description of the logic of the queries that the gnns can express uniformly have been proved. this suitable formal logic interpreted over labelled graphs is a two variable fragment of graded model logic (gc2). any gnn expresses a query of this logic, and conversely, any query of this logic can be expressed by a gnn whose size and iterations only depends on the depth of the query . for specific activation functions such as relus, the size of a gnn required to express a given query of gc2 does not depend on the size of the input graph, but only on the number of subformulas (or depth) of the query. the known proofs of this result provide an explicit construction of a gnn with relu activations that expresses a given query. in recent results , the author provides a more general description of the logics expressible by gnns, and also treat the non-uniform case. the uniform case is obtained for rational piecewiselinear activations (or equivalently, rational relus), and a non-uniform result is presented for gnns with general arbitrary real weights and activation functions. the author also presents new results about the expressivity of gnns if random initialisation is allowed on the features of the vertices. in this article, we focus on uniform expressivity and consider the following question: what is the impact of the activation and aggregation functions on the logic uniformly expressed by gnns?main contribution. in this article we show a separation between polynomial and non-polynomial activations (and in particular, piecewise linear activations) with respect to the logic expressible by gnns with those activation functions. more precisely, we prove that gnns with polynomial activation and aggregation functions cannot express all gc2 queries (uniformly), although gnns with piecewise linear activations and a linear aggregation function can. this results holds even if: i) the weights of the polynomial gnns are arbitrary real numbers with infinite precision, and ii) the weights of the gnns with piecewise polynomial are restricted to integers (also, the underlying neural networks are supposed to have finitely many linear pieces). this shows how the power of graph neural networks can change immensely if one changes the activation function of the neural networks. our result constitutes an additional step towards an complete understanding of the impact of the activation function on the formal expressivity of gnns.the rest of this article is organized as follows. section 2 presents the definitions of gnns and the background logic. in section 3, we state our main result and compare it to the existing ones. section 5 presents an overview of the proof of our main result, as well as proofs of the technical lemmata are presented. we conclude with some remarks and open questions in section 6. given a boolean query q (taking as input a graph, or a graph and one of its vertices), does there exist a gnn and whose output characterizes the output of q? for example, can a gnn express if a vertex of a graph is part of a clique of given size? furthermore, can we characterize the set of queries that can be simulated by gnns? in that context if the size of the gnn depends only on the to grow with the size of the input, graph, then we say that the gnn expresses the query uniformly. when there is no ambiguity about which graph g is being considered, n (v) refers to the set of neighbors of v in g not including v. for any number of hidden layers k ∈ n, input and output dimensions w 0 , w k+1 ∈ n, a r w0 → r w k+1 neural network with σ activation is given by specifying a sequence of k natural numbers w 1 , w 2 , • • • , w k representing widths of the hidden layers and a set of k + 1 affine transformations t i : r wi-1 → r wi , i = 1, . each vertex v is attributed an indicator vector ξ 0 (v) of size ℓ, encoding the color of the node v: the colors being indexed by the palette {1, • • • , ℓ}, ξ 0 (v) = e i (the i-th canonical vector) if the color of the vertex v is i.given a number of colors ℓ and a colored graph g = (v (g), e(g), p 1(g), .. we interpret φ with (g, v) as a s-structure, where g is a graph and v one of its vertices (and the assignment that maps x to v).where g = (v, e) is a graph and v ∈ v , is an interpretation with g as the s-structure and the assignment β maps x to v. given a set x, an embedding ξ is a function that takes as input a graph g and a vertex v ∈ v (g), and returns an element ξ(g, v) ∈ x. given a graph g, and v ∈ v (g), let (g, v) → col(g, v) be the function which returns the color of the node v. then, there exists a gnn returning an embedding ξ t such that for graph g and any vertex v ∈ v (g), ξ t ∈ {0, 1} d , and for any.the overall gnn will take as input the graph g as well as for each node of v, ξ 0 (v) ∈ {0, 1} ℓ encoding the colors of each node of g; and after l iterations, outputs for each node a vector ξ d (v) ∈ {0, 1} d . then there exist x * ∈ s and u ∈ n p such that for any x ∈ s -{x * }, x * , u > x, u . , k m ], s) be the embedding of the tree displayed in figure 1 obtained via a gnn with polynomial activation and aggregation functions after t iterations, where ξ 0 (v) = 1 for all vertices v ∈ v (t [k 1 , . namely, for any integer p ≥ 2, let q p (s) := ¬ ∃ ≥1 x(e(s, x) ∧ ∃ ≤(p-1) se(x, s)) = ∀xe(s, x)∃ ≥p se(x, s) q p queries if vertex s has neighbors whose degree are all at least p. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/557.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/557.txt new file mode 100644 index 0000000000000000000000000000000000000000..02190a7e1d9f17c2139d9e8ce7732422dfc1f775 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/557.txt @@ -0,0 +1 @@ +people have started to realize the importance of elearning systems as they were compelled to attend or perform online classes after the breakout of covid-19 pandemic. under these circumstances, adaptive technologies have been wielded by many institutes with the objective of creating appropriate education environments where students can experience better learning gains . adaptive educational systems enhance and support the learning process monitoring learners' characteristics and making adjustments accordingly .as shute and zapata-rivera stated , utilization of adaptive technologies requires identifying students' characteristics-e.g. level of knowledge, skills, personality traits-as accurately as possible. regarding this identification phase, some students may have more than one character or learning trait when measured by relevant scales. also, there is another potential problem which comes in when there are more than one scale applied to students in order to group them. this grouping process may possibly be done by hand if the quantity of students is relatively small and there is only one scale applied however it may be hard enough to group them as the number of students is getting larger and there are multiple scales. to address these problems, machine learning techniques can be used for facilitating the grouping and reducing the workload that instructors undertake.the vast chunk of the industry is commencing to utilize the machine learning algorithms to enhance the quality of their service. in the past, researchers have done studies on the usage of clustering from which it can be also inferred that many research papers are getting published about implementation of machine learning algorithms into education like merceron & yacef did on clustering students to help instructors in the evaluation phase. in this context, this study can provide a new perspective of grouping students for instructors. this grouping process may possibly be done by hand if the quantity of students is relatively small and there is only one scale applied however it may be hard enough to group them as the number of students is getting larger and there are multiple scales. this being the case, researchers have conducted studies using unsupervised learning techniques such as clustering students for helping teachers in the evaluation part, clustering moodle data for profiling students, clustering lifestyle factors-physical activity, alcohol consumption, diet quality and smoking habit-of studentsand so on. but the ones that are closely related to this research paper are those in one of which researchers tried to cluster students according to their learning styleand in another students have been clustered based on their gamification behavior.however, as far as we could see, in most of the research papers on clustering students researchers have used the data in which the scales that provide information about the educational tendencies of students are found but the data which clustering was done according to consisted of one element.the type of this research is applied research which aims at clustering students according to their gamification user type and learning style by making use of unsupervised learning methods.the data used in this study comprises two datasets containing each student's both gamification user type and learning style. for obtaining the learning styles of students, grasha-riechmann student learning style scale (grslss)has been used as learning style scale that is comprised of 60 questions on 5-point likert scale and this part of the dataset is collected by applying the adaptation of grslss into turkish language.because the scales applied are on different ranges, gamification user type hexad scaleis on 7-point likert scale and grasha-reichmann learning style scaleis on 5-point likert scale, there is a need of data standardization-putting all the features of data on the same scale. firstly, we have taken a look at the distribution of students and it is observed that cluster 4 has the largest number of students with 69 students while cluster 1 having the smallest number with 43 students. the distribution of the students can be seen in the table1and figure3the means of all the students' gamification user type and learning style scores have been shown in the table2which contains the descriptive statistics.• cluster 2 has the collaborative learning style while having achiever gamification user type • cluster 3 has the dependent learning style as it shows achiever traits as gamification user type. since we have fit k-means algorithm with the student answers, not with calculated learning style or gamification user type scores, we have also tried to fit the algorithm with learning style and gamification user type scores without giving the answers of students-pca was also applied taking the first 5 components which account for over 75% of the total variance-and the silhouette coefficient yielded was 0.this study was conducted for providing instructors with a new perspective of grouping students when there is a need of clustering students which cannot be done by hand because of the dataset size and number of scales that students get clustered according to.in this paper, we have tried to cluster students according to their gamification user types and learning styles with the objective of demonstrating that unsupervised learning techniques can be an asset for instructors when clustering students according to more than one scale describing students' learning styles and behavior in educational games. the reasons could be these: the scales applied to the students may not be appropriate enough for this kind of clustering, k-means algorithm may not be doing well on such data where the data consists of ordinal categorical variables, data preprocessing may not be done suitably. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/558.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/558.txt new file mode 100644 index 0000000000000000000000000000000000000000..369dd6915c4961f09e652e93f029db9e4534b2d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/558.txt @@ -0,0 +1 @@ +machine learning (ml, bishop 2006) techniques are designed for predictive modelling, data analysis, and decisionmaking and are applied in numerous fields such as medicine, finance, and social sciences. one of the most significant challenges in ml is dealing with observational data, which is collected without controlling for the variables of interest or the confounding variables. this type of data can lead to biased results, hindering the generalizability and interpretability of the ml models. to reduce the number of dimensions and avoid memory and modelling issues like the curse of dimensionality, ml models are trained over a set of variables identified by feature selection (li et al. 2017), which usually chooses the features with the largest association with the target, without inspecting the causal relationships (peters, janzing, and schölkopf 2017).causal discovery is the process of identifying causal relationships among variables from observational data without external interventions (spirtes et al. 2000;eberhardt 2017). its primary purpose is to undercover the direction of causality and estimate the magnitude and uncertainty of causal effects. this approach not only helps in developing more accurate and robust models but also provides a deeper understanding of the underlying mechanisms governing the data (pearl 2009).the importance of observational causal discovery for time series data in ml lies on the fact that a model trained on causal features enables researchers to predict the outcomes of an intervention or a policy change in a robust and interpretable way (schölkopf 2022). this is particularly useful in situations where it is not feasible or ethical to intervene, such as in healthcare, social sciences, or economics (imbens and rubin 2015). furthermore, considering a physical system such as climate science, the causal knowledge of direct and indirect effects among variables can help to understand the physical system, filtering the effect of spurious correlations. causal discovery can also guide the choice of the features by performing a causal feature selection that identifies those with the most relevant flow of information to the target. however, causal discovery for time series is challenging on complex systems due to unobserved confounders, high dimensionality, autocorrelated variables, and time lags (runge et al. 2019a). moreover, causal discovery algorithms usually focus on theoretical guarantees on the (asymptotic) convergence to the real causal graph, or to an equivalence class of graphs, without inspecting the effect of the selection of the candidate causes of a variable on its prediction.contributions in this paper, we propose a novel methodology at the intersection between feature selection and causal discovery. we start in section 2 by reviewing the main approaches and assumptions of causal discovery for time series. then, in section 3, we introduce the notation and formulation of the problem. in section 4, we introduce the novel causal feature selection methodology that relies on the forward and backward feature selection procedures and leverages the transfer entropy (te) (schreiber 2000) as building block to guide the selection of the features. te is a causal quantity specifically designed to estimate the flow of information between features and the target in time series.unlike traditional feature selection and similar to granger causality, our causal approach exploits the property of te to filter out the autoregressive component of the target in order to measure the asymmetric flow of information from a feature to it without the confounding association due to the target itself. furthermore, conditioning on the other features also makes it possible to filter out the information due to the presence of a causal variable that a feature and the target have in common. in this setting, we provide theoretical guarantees about the regression and classification error due to the reduction of features. in addition, in section 5, we analyse the finite-sample scenario, exploiting a concentration bound of a specific kernel-based te estimator available in the literature (singh and póczos 2014a). finally, in section 6, we provide numerical simulations in regression settings, showing the capability of the proposed approaches to identify causal features in synthetic experiments and to achieve competitive performance on real-world datasets. ) that form a nonlinear, discrete-time, stationary vector autoregressive process. we also denote x a ∪ x i := x a∪{i} and x a \x i := x a\{i} .causal model together with the four classical assumptions of causal discovery introduced in the previous section (causal sufficiency, acyclicity, causal markov assumption, faithfulness), we assume that the actual target may only depend on its last m values and the last l values of the features. then, considering a lag of l, m ∈ n ≥0 for the features and the target respectively, the regression error suffered by considering only the set of features x ā is bounded by:.the result of the theorem complies with the intuition that, considering a subset of features, the expected mse is bounded by the irreducible expected mse (unavoidable even when considering the entire set of features), plus an index of the information flow from the discarded features to the target t e x a →y |x ā . on the other hand, given a time series of length t , increasing the number of past values of the features (and of the target) reduces the number of available samples (at the limit case, considering l = m = t we only have one sample with the last observed target y t and all past features and target to condition on). let t (x a ) be the conditional te between the target and the discarded features x a , given its complementary x ā (t (x a ) := t e x a →y |x ā ). let also t ā = min xi∈x ā t e xi→y |x ā\xi be the minimum conditional te of a single feature among the selected ones and the target, given the other selected features, and algorithm 2: forward tefs: forward transfer entropy feature selection.intuitively, lemma 2 shows that the information flowing from x a and the feature x i to the target y is the information flowing from x a to the target plus the additional information flowing from x i to the target, given the information already flowing from the features in x a . intuitively, the equality states that the expected error when the target is estimated with its maximum probability given the features and the past of the target is equal to 1 minus the probability of correctly estimating the target with its maximum conditional expected probability given the features and the past values of the target.then, applying multiple times the chain rule of the conditional mutual information and the definition of t (x a ) = t e x a →y |x ā and t ā = min x i ∈x ā t e x i →y |x ā\x i , the following equalities hold, proving the theorem:. in particular, each node without causal parents has been associated with a feature of the real-world experiment on droughts that considers fifteen features discussed in the next subsection (x 0 for the three-dimensional dataset, x 1 , x 3 for the five-dimensional dataset, x 0 , x 5 , x 7 for the ten-dimensional one). indeed, pcmci algorithm tries to identify not only the causal features but also the time lags that have causal effects on the actual target (see remark 2 in the main paper for a broader discussion on the choice to consider all the contribution of a feature as a whole for the forward and backward tefs). com/jakobrunge/tigramite.table6contains the subset of selected features and the r 2 test score with linear regression of the three climatological datasets with five features and the one with fifteen features, considering the three values of l, m and the feature selection and causal discovery algorithms discussed above. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/559.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/559.txt new file mode 100644 index 0000000000000000000000000000000000000000..d8c1d33bbc282b7c7b8fe8d626dcbff633ab1cb0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/559.txt @@ -0,0 +1 @@ +in a world where climate change is rapidly accelerating, droughts are becoming more frequent and severe, posing a serious challenge to food security in the most vulnerable regions of our planet. in this context, communities that rely solely on rainfall for their livelihoods are especially at risk, often requiring immediate humanitarian assistance to survive . failure to act or provide adequate aid can have immense consequences, including devastating economic losses, mass displacement of people, malnutrition in infants, and elevated mortality rates due to hunger and famine . humanitarian organizations are facing a significant challenge due to the widening gap between funding and the needs of the people affected by food crises . as a result, designing effective humanitarian interventions in resource-constrained situations has become a critical issue. despite numerous comprehensive reviews, there is still a lack of solid evidence to identify the best strategies to help populations affected by crises . cash-based and voucher aid programs are considered effective in emergencies, but their cost-effectiveness varies by context . standardized methods for evaluating humanitarian interventions in food emergencies are lacking . our aim is to determine the impact of interventions, using observational causal inference to enhance intervention design, and transparency in charity, and improve humanitarian aid outcomes during extreme droughts.the horn of africa has witnessed a concerning rise in acute malnutrition, affecting 6.5 million people in 2022 . prolonged dry spells significantly contribute to this crisis , yet it is crucial to recognize that droughts are not the sole driver. various factors, including hydrological conditions, food production capabilities, market access, insufficient humanitarian aid, conflicts, and displacement, play a significant role . studying food security in this context is intricate, involving multiple variables, scales, and non-linear relationships. predictive machine learning (ml) techniques are not suited to understanding the causes and estimating the causal effect by default , instead, this paper focuses on causal inference, specifically assessing the impact of humanitarian interventions during the 2016, 2018, and 2022 horn of africa droughts. our aim is to demonstrate the application of causal inference for evaluating the effectiveness of cash-based interventions in food crisis scenarios. our aim is to determine the impact of interventions, using observational causal inference to enhance intervention design, and transparency in charity, and improve humanitarian aid outcomes during extreme droughts. predictive machine learning (ml) techniques are not suited to understanding the causes and estimating the causal effect by default, instead, this paper focuses on causal inference, specifically assessing the impact of humanitarian interventions during the 2016, 2018, and 2022 horn of africa droughts. observational data for causal inference has gained prominence across various disciplines, including ecology, agriculture, public policy, and earth sciences, to the best of our knowledge, this is the first effort to apply modern observational causal inference methods to evaluate humanitarian policy in a food emergency context. the contributions of our work are summarized as follows: i) identifying the overarching causal graph and the drivers of food insecurity in the horn of africa, ii) building a harmonized database with the best available data suitable to evaluate cash-based interventions, iii) the estimation of the causal effect of humanitarian interventions on malnutrition. these issues are partially attributed to the absence of causal formalism in modern machine learning (ml) systems, leading to a growing interest in causal machine learning (causalml), which incorporates causal knowledge into ml methods.to reason about the causal effects of certain random variables on others, first, we need to codify causal relations. causal inference provides a language for formalizing structural knowledge about the data-generating processwith which we can estimate what will happen to data after changes, called interventions. the canonical representation of causal relations is a causal direct acyclic graph (dag), which can encode a priori assumptions about the causal structure of interest. therefore, when modeling the causal relations of the food security system involving climate and socio-economic dynamics, multi-scale and non-linear drivers, we rely solely on the information provided by background knowledge and associated literature. we perform the following tests: i) placebo treatment, where the treatment is randomly permuted, and the estimated effect is expected to drop to 0; ii) random common cause (rcc), where a random confounder is added to the dataset and the estimate is expected to remain unchanged; iii) random subset removal (rsr), where a subset of data is randomly removed and the effect is expected to remain the same.given the data quality challenges and the context-driven nature of food security systems, our focus turns to data-driven causal discovery as a crucial methodology. catering to experts less versed in causality but eager to conduct further data experiments, causeme allows the execution of various causal discovery methods on time series data through an interactive interface. in this paper, we presented a novel data-driven approach for assessing the effectiveness of humanitarian interventions in food emergencies through the lens of causal inference. preliminary country-wise results did not reach statistical significance, although a singular district analysis did, prompting further steps: i) identifying more suitable treatment variables, ii) refining the causal graph with domain experts, iii) gaining insights on the spatio-temporal heterogeneity of impact of interventions through conditional average treatment effects (cate). if data allows it, causal inference can be used to assess the efficacy of interventions in specific locations, supporting targeted aid where on-ground surveys are not feasible. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/56.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/56.txt new file mode 100644 index 0000000000000000000000000000000000000000..d556060e740db650441bcaa199ddd2f38b134648 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/56.txt @@ -0,0 +1 @@ +the usage of artificial intelligence (ai) is widespread. individuals use ai in recommendation systems, maps, healthmonitoring apps, etc. they trust these applications and depend on them for several day-to-day decisions expecting them to be fair. domain experts use ai to assist their work, such as the diagnostic and therapeutic tools used by doctors. any bias in such systems can jeopardise the health of their patients and might lead to incorrect treatment or more visits to the hospital. enterprises use ai for their internal systems, such as growth predictions, recruitment services, and fraud detection, besides the services they provide to their customers through their products. biased systems would lead to a lack of credibility and business losses. research organisations use ai to further their study and discover new areas in their field. space exploration and medicinal research also use ai, where biased datasets may lead to the development of faulty products, increased budgets, and harmful consequences to society. governments, judiciary, and defence services use ai for policy proposals, delivery of citizen-centric services, legal decisions, and warfare. here, biased systems affect the rights of the citizens and can negatively affect the unprivileged class.therefore, bias mitigation and ai fairness are becoming ethical, social, commercial, and legal requirements. there is an urgent need to create a standardised holistic framework that addresses the bias concerns of various ai systems. the framework should include independent fairness ratings, transparency reports, and disclosures. data is the direct representation of society and the system that generates it. several well-documented cases illustrate that society has not been fair to everyone historically. even in today's times, discrimination is prevalent. when one trains ai systems using datasets representing such a society without proper checks, the systems are also highly likely to be biased. many studies focus on identifying biases and mitigating them from the databases that can fall into the data pre-processing and model training phases of the lifecycle. but datasets are not the only source of bias, as other activities are also prone to biases. lists three categories of types of biases: preexisting, technical, and emergent. categorises bias into systemic, statistical, and human biases.standard processes are required to mitigate bias and assess the fairness of a system in each phase of the ai development lifecycle. the challenges are multifold. ai fairness is context-sensitive. developers use different types of machine learning (ml) techniques and algorithms for different types of ai systems. even two similar types of systems require different approaches as each has a unique problem statement and a unique set of requirements. the consequences of biases vary for different scenarios. the sources and type of biases are also not the same for all applications. the tolerance for biases varies from case to case and also from region to region. further, the concept of fairness is region and culture-dependent. something considered fair in one country or culture may be regarded as biased in another. it is also data dependent. a face recognition model trained with european faces is more likely to be unbiased when deployed in france than in india.considering the varied requirements for each ai application, standardising a single end-to-end process for assessing fairness may not be feasible. however, most ai systems have similar development lifecycles, from framing the problem statement to their deployment and usage. standardisation of bias mitigation procedures, fairness metrics, etc. is more pragmatic at each lifecycle stage, vaguely similar to the approach followed by the seven-layer open system interconnection (osi) model for data communication. this paper proposes to handle fairness in a layered manner. for each lifecycle stage, we elaborate on the possibilities of biases, the precautions to avoid these biases, and the checklists to formalise the process. this paper aims to empower designers, developers, evaluators, and service providers of ai applications to detect bias efficiently and develop fair ai systems. this paper not only helps standardise the fairness process but also makes it more thorough. it will also help individual developers, start-ups, and small organisations develop high-quality and fair ai systems competing with big organisations. in this section, we review the contemporary work related to biases observed in ai systems used by governments and leading corporations, biases due to imperfect datasets, biases due to algorithms, the concept of fairness, and metrics used to assess fairness., 2016]demonstrates how the exclusion of african-americans resulted in their misclassification in clinical studies. by high-level expert group on artificial intelligence (ai hleg) provides a detailed checklist of guidelines for the developers working on ai systems to self assess and ensure they develop along the principles of trustworthy ai. excessive focus on biases in the datasets, fairness metrics, and algorithmic bias leads to ignoring other aspects affecting ai fairness. this layer also involves identifying the ai fairness technique, such as group fairness or individual fairness, which will be used throughout the rest of the layers to identify biases in the ai model. fairness score and bias index proposed by, are pivotal for this layer.the seven-layer model proposed in this paper provides a more structured, systematic, and standard approach to dealing with ai biases, required as we move from the present-day soft-touch self-regulatory conditions to a more regulated environment for ai fairness.the paper proposes to split the ai system lifecycle into seven distinct layers -requirements, context and purpose, data collection and selection, data pre-processing and feature engineering, algorithm building, ai system training, independent audit and fairness certification, and usage. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/560.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/560.txt new file mode 100644 index 0000000000000000000000000000000000000000..6f24f48b4cd0fa549060026b80720e2ce0cbdce1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/560.txt @@ -0,0 +1 @@ +wearable devices and smartphones have enabled the collection of large amounts of data that reflect human behavior patterns. this has given rise to a field called digital phenotyping, which attempts to quantify behavior using passively captured mobile data . this field has gained interest in mental health diagnosis due to its ability to capture behaviors and habits like sociability, physical activity, mobility, sleep, among others . mental health conditions are highly prevalent. however, the diagnosis and management of these conditions face limitations like barriers in seeking help or reliance on self-reports during short clinical visits . hence, digital phenotyping can be a potential tool to support decisions in a clinical setting.extensive research in digital phenotyping aims to predict mental states or mental disorders, but there are still a lot of open questions. one of them is understanding well what type of data is actually relevant for mental health assessment. from the machine learning perspective, most of the solutions do not provide sufficient information about which factors or what types of features contribute to the model's inference of certain mental state or disorder. as a consequence, it is difficult for mental health specialists to rely on model's predictions to evaluate or intervene in patient diagnosis .in this work, we study the contribution of different data types, coming from different sensors and sources of information, like gps, wifi, or phone logs for predicting mental health states using deep learning models. concretely, we focus on two particular tasks: prediction of the depression score and prediction of the self-report stress level. we use the studentlife dataset for this study. studentlife is one of the few public and available datasets for digital phenotyping.inspired by the feature analysis done in , we study the importance of features by analyzing the performance of a neural network trained with all the features extracted from passive sensing data or trained just using a group of features. we can gain insight into which features are more relevant for predicting depression or stress. until now, we have not found a previous study that evaluates the importance of features for prediction of the depression score and prediction of the stress level using the studentlife dataset. in our analysis, we find the wifi features, which represent the mobility patterns of a student, are the most discriminant for the neural models for both prediction tasks. additionally, phone log features show higher discrimination information for stress level classification. for reproducibility purposes, our feature extraction and models can be found in our public code repository 1 .inspired by the feature analysis done in, we study the importance of features by analyzing the performance of a neural network trained with all the features extracted from passive sensing data or trained just using a group of features. until now, we have not found a previous study that evaluates the importance of features for prediction of the depression score and prediction of the stress level using the studentlife dataset. the studentlife dataset has been widely used to study depression diagnosis,sedentary behavior prediction and activity recommendation, food purchase prediction, recommendations for improving academic performance, mood monitoringand stress level prediction-stress level prediction on the studentlife dataset has been approached in different studies. furthermore, stress prediction has been addressed as a binary classification problem, as seen in, where authors used lstms on the last 2-12h of sensor data to predict between stress / not stress.wifi (36 features): wifi data tracks the student's connection time to different locations on the campus (over 60 different locations), reflecting their mobility patterns.phone log (14 features): these features provide information about the duration in seconds of the smartphone in certain state: phone charging time, phone lock time, and light sensor (time in a dark environment). we rescale the original stress data that comes in a 1-5 scale into 3 stress levels: below-median (low stress), median (medium stress) and above-median (high stress). tableiishows that the model trained with mobility features embedded in wifi data outperforms the model using all features. wifi features, in particular, also exhibit a lower standard deviation, which supports the finding that these features have significant relevance for predicting depression. we observe that wifi and phone log features exhibit higher performance when used as individual group features, when compared to the other feature groups. wifi data captures the mobility patterns of students within the experiment, while phone log features indirectly represent sleep patterns through indicators such as phone charging and time spent in the dark environment. on the contrary, social features show the worst performance overall, suggesting that metrics like the number of calls or sms may not be highly influential for stress level prediction in this dataset. the encoded features are inspired by previous works, but other additional features could be computed from the raw data, which might also have an impact on the results.in this study we use the studentlife dataset to compare the contribution of various passive data features for stress level prediction and depression score regression. phone log features has relevance in stress level prediction, indicating the relation of sleep patterns and mental health states. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/561.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/561.txt new file mode 100644 index 0000000000000000000000000000000000000000..940e9d1a0914c08828794a3fffbb72b9e9aed8cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/561.txt @@ -0,0 +1 @@ +amid the increasing prevalence of ai in business and industries, ensuring unbiased outcomes by mitigating inherent biases and enhancing ai's explainability is essential to maintain equity and promote trust among various stakeholders. this paper presents a post-processing approach to ai fairness using causal modeling to: (1) detect algorithmic biases and (2) provide statistical remedies to correct the biases.in the context of ai fairness, we are concerned with biases that are based on protected attributes such as gender, race, religion, etc. . our goal is to ensure that outcomes from ai do not exhibit any bias based on these attributes. to demonstrate ai fairness, the most commonly used approach is to establish statistical parity. this approach attempts to equalize or maintain an acceptable level of disparity in the output from ai between protected and non-protected groups. for example, pymetrics, a company that uses ai to evaluate job applicants, mitigates bias in their ai models by ensuring that the selection rate for any protected group is at least 80% of the selection rate of the non-protected group, in accordance with us' equal employment opportunity commission . however, for certain job categories, some of the protected attributes may correlate with job performance, e.g., jobs that require physical strength. relying solely on statistical parity may lead to "positive discrimination", which may undermine the principle of meritocracy. according to this principle, employers should recruit employees based on merit, regardless of age, race, gender, religion, marital status, family responsibilities or disability .our proposed approach to ai fairness involves creating a causal model of biases; determining the statistical significance of the biases; and providing statistical remedies to correct for the biases. the advantages of our proposed approach are as follows:1. the proposed approach allows protected attributes to correlate with the underlying factor that forms the basis of decisions (e.g., job performance). as a result, it can prevent the unintentional introduction of positive discrimination in ai solutions. 2. the statistical nature of the proposed approach means that the results are easily interpretable and can be used to explain the nature of the biases introduced by an ai application and how the biases are corrected, thereby enhancing explainability and promoting trust among different stakeholders of ai. 3. our approach separates ai development from bias detection and correction, eliminating the need for de-biasing during the training or pre-training phase and significantly saving time.in the remainder of the paper, we will review some related work in ai fairness, describe our research method, present our experiments, evaluate the effectiveness of our proposed method, summarize the paper and identify a number of future research directions. this paper presents a post-processing approach to ai fairness using causal modeling to: (1) detect algorithmic biases and (2) provide statistical remedies to correct the biases.in the context of ai fairness, we are concerned with biases that are based on protected attributes such as gender, race, religion, etc. for example, pymetrics, a company that uses ai to evaluate job applicants, mitigates bias in their ai models by ensuring that the selection rate for any protected group is at least 80% of the selection rate of the non-protected group, in accordance with us' equal employment opportunity commissionanalyzed how causal models can improve prediction accuracy in the presence of confounding factors using experiments based on health data. to determine if there is any bias in 𝑦 ̂, a causal model is created based on the protected attributes, the target 𝑦, and the predicted 𝑦 ̂, as enclosed by the dotted rectangle. next, using the training data, we will develop the prediction model, evaluate the gender bias introduced by the prediction model, and illustrate how causal modelling can be used to mitigate the gender bias.we will use r to create the prediction model, create the causal model, and evaluate the bias mitigation based on the causal model.in this subsection, we will use the training data to develop and evaluate (1) the prediction model and (2) the bias mitigation model based on causal modeling. our results show that when bias exists in the prediction model, causal modeling can be used to detect the bias. even though an ai model may be a black box, the post-hoc analysis provides a way to describe the nature of bias and the remedies taken to address the bias. secondly, although we have only addressed bias introduced by the prediction model in this study, we will apply causal modeling to address biases that exist within the data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/562.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/562.txt new file mode 100644 index 0000000000000000000000000000000000000000..524cf18f17b3f221f40b06944f8f8b0c3facd4c7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/562.txt @@ -0,0 +1 @@ +diffusion models have been progressively advancing the state-of-the-art in generative modelling, especially in the field of image processing . this is thanks to the usage of the diffusion processes that allows to learn complex data distributions .however, diffusion models tend to struggle when in comes to non-continuous data. this is mostly because of the fact that denoising process for discrete, discretised or tabular data is not easy to define. therefore, alex graves et al. recently introduced bayesian flow networks (bfns) to efficiently train the model and iteratively update the data parameters without forward pass. the general idea behind this technique is to change the way in which we model training data where instead of modelling a single instance, authors propose to output the parameters of the distribution that best fits the training data. the main motivation behind this concept is that by doing so, authors introduce an elegant way to directly model the discrete data distribution.in this work, we argue, that with direct modelling of parameters describing the original training data, bfns can also be used to efficiently consolidate portions of separate data chunks. therefore, we relate to the problem of continual learning, which tackles the ability of ml models to learn progressively as new data arrive. bayesian update is an elegant way to manage prior belief and information from new observations. however, in bayesian learning we often face the issue of turning theory into practical implementations, limiting the use of the bayesian learning paradigm .in this preliminary studies, we show the first benchmark of bayesian flow networks in continual learning setup. we show how we can adapt several known techniques that prevent catastrophic forgetting in neural networks to continually train bfns. we highlight their strengths and drawbacks and discuss future directions on how to employ bfns to continually consolidate knowledge. therefore, alex graves et al. recently introduced bayesian flow networks (bfns)to efficiently train the model and iteratively update the data parameters without forward pass. the general idea behind this technique is to change the way in which we model training data where instead of modelling a single instance, authors propose to output the parameters of the distribution that best fits the training data.in this work, we argue, that with direct modelling of parameters describing the original training data, bfns can also be used to efficiently consolidate portions of separate data chunks.continual learning (cl) gathers various approaches in machine learning that aim at reducing catastrophic forgetting, a phenomenon where models suffer from abrupt loss in performance when retrained with additional data. usually there are three standard group of approaches that try to mitigate this issue: (i) architectural approaches -methods that focus on the structure of the model itself that adds task-specific submodules to the architecture; (ii) memory approaches, methods involve storing some extra information in memory which are then used to rehearse knowledge during training in subsequent task; (iii) regularization approaches: that identify the important weights for the learned tasks and penalise the large updates on those weights when learning a new task.although bfns work on different type of data, both discrete and continuous time steps, the most approachable way to understand the dynamics is through continuous data in the discrete setting and extending it as in 6.we use neural network ψ parameterised by θ to learn the parameters ξ controlling the data distribution. the general idea is to start with uneducated guess of normal distribution with high variance, and iteratively improve the estimation of data distribution with the help of the network as we sample more and more data.rather than data points, we receive the noisy samples forming the normal distribution centered at the true values with variance purely depended on the accuracy scheduler.iteratively, for the next time steps, we apply bayesian updates to improve the input distribution (that accumulates only local knowledge about a single dimension) by the acquired knowledge from receiver (that encodes global knowledge about interactions between dimensions). our objective is to match output distribution to the data distribution indirectly by optimizing kl divergence between their noisy versions. in the first one we employ a simple buffer-based rehearsal where we store a subset of previous data examples in a buffer and use them together with new data samples when retraining a model on new tasks.to evaluate the performance of bfns in modelling categorical data in the continual learning scenario, we refer to the problem of tabular data modelling. in this work we highlight that modelling data distribution parameters does not prevent those models from catastrophic forgetting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/563.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/563.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c2f063c79dd4d49f14d328148787508a5346e73 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/563.txt @@ -0,0 +1 @@ +satellite networks offer an appealing solution for delivering ubiquitous connectivity across diverse domains such as the maritime and aeronautical markets and communication services to remote regions . current geostationary (geo) broadband satellite systems use a multibeam footprint strategy to enhance spectrum utilization. in these systems, both power and bandwidth resources are typically allocated uniformly across the various beams. while this uniform allocation simplifies resource management, it may lead to inefficiencies in scenarios with varying traffic demands. some beams may experience high demand, exceeding their available capacity, while others may have underutilized resources. this challenge has prompted research into more adaptive and dynamic resource allocation methods. in this regard, flexible payloads have emerged as an enabling technology to manage limited satellite resources by dynamically adapting the frequency, bandwidth, and power of the payload transponders according to users' demand .existing approaches aim to minimize the difference between offered and required capacity while adding constraints in terms of power , , and co-channel interference . the power allocation derived in is solved using water-filling, whereas a sub-optimal complexity game-based dynamic power allocation (ag-dpa) solution is proposed in . a modified simulated annealing algorithm, as presented in , outperforms conventional payload designs in matching requested capacity across beams, emphasizing its effectiveness. however, the intricate computational complexities associated with these algorithms can significantly limit their practical applicability within real-world systems. moreover, these approaches do not adequately consider the dynamic nature of capacity requests that change over time. in this context, machine learning (ml) algorithms emerge as a more favorable alternative, as they are able to learn from varying capacity request scenarios.ml algorithms have gained popularity in satellite communications, particularly in resource allocation . some studies explored reinforcement learning (rl) techniques to cope with the time-varying capacity; however, they introduced additional delays due to online payload controller training. also, the rl exploration phase, aimed at discovering optimal strategies through action exploration, can occasionally result in system outages or disruptions when untested actions are selected. in contrast, adopts a multi-objective optimization approach using supervised learning, offering an alternative perspective.in this work, we extend the ml-based method in which originally employed a convolutional neural network (cnn) for solving the rrm task as a classification problem. in this approach, the ml model's objective is to select the best payload configuration from a discrete set of power and bandwidth combinations, treated as distinct classes. this technique considers 8 beams with 12 configurations each, giving a total of 4.3 × 10 8 potential payload configurations. we expand to 10 beams with 9 configurations each, totaling 3.5×10 9 configurations. although the number of configurations decreases after applying the system constraints, incorporating more beams inevitably increases the number of classes. having many classes complicates the ml model evaluation as traditional metrics like accuracy can be inadequate, and metrics like recall may not fully depict system performance. the situation worsens when dealing with imbalanced class distributions since the models may favor dominant classes, leading to bias. to address this, we reframe the rrm task as a regression problem, incorporating rrm objectives and constraints into the ml loss function. we also introduce a new metric to assess the ml model's performance, offering an alternative and insightful way to evaluate its effectiveness in the context of rrm.this paper is organized as follows. section ii introduces the flexible payload architecture and outlines the rrm task. section iii compares regression and classification-based ml methods for flexible payload. in section iv, we present metrics for evaluating model performance, including a new ml metric for rrm. the methods are evaluated in section v. finally some conclusion remarks are included in section vi. in this approach, the ml model's objective is to select the best payload configuration from a discrete set of power and bandwidth combinations, treated as distinct classes. having many classes complicates the ml model evaluation as traditional metrics like accuracy can be inadequate, and metrics like recall may not fully depict system performance. we also introduce a new metric to assess the ml model's performance, offering an alternative and insightful way to evaluate its effectiveness in the context of rrm.the cir represents the ratio of the power allocated at b-th beam (p b , in dbw) to the interference power at b-th beam (i b , in dbw).where eirp3db b = p b + g b is the effective isotropic radiated power in dbw, g b is the beam gain that depends on the half power beamwidth θ 3db in dbi, g/t is the merit figure of the user terminal, a is the free space attenuation in clear sky conditions in db, and k is the boltzmann constant.the rrm aims to effectively allocate the available satellite resources such as power p b and bandwidth bw bc so that c b matches r b for each beam b = 1, • • • b over time t, avoiding resource waste.the cost function in equation (4) aims to simultaneously minimize three terms: the difference between c b and r b , the power p b (in w), and the bandwidth bw bc (in hz) across all beams for each time instant t.the constraint in equation (5) ensures that offered capacity either meet or surpass the required capacity for each beam, under the condition that both the power and bandwidth allocations for the b-th beam at time t do not exceed their upper bounds.to manage the overall power consumption, the total power b b=1 p b (t) is constrained to not surpass the prescribed upper limit p max,t in equation(7). the total bandwidth is allocated to the beams of each color c within the frequency plan comprising n c colors where b c is the number of beams with the same frequency and polarization defined by color c.in this section, we propose a cnn model to solve the rrm task as a regression problem, which involves determining the optimal payload configuration for specific traffic demands.evaluates the performance of flexible payload models, where s is the number of instances in which the offered capacity in beam b was sufficient, and m l is the number of samples is class l.when evaluating the model in terms of the system performance, we are interested in finding a payload configuration that ensures that the offered capacity satisfies the requested capacity for each beam. figure2presents the recall (green line) for each class when evaluating the ml-based flexible payload model using the classification approach (cnn c). figure2presents the flexible accuracy per class (in magenta) for the ml-based flexible payload model via regression (cnn r). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/564.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/564.txt new file mode 100644 index 0000000000000000000000000000000000000000..898b5b6de68a4b1b2989af8eed4e326627ecf3a9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/564.txt @@ -0,0 +1 @@ +reinforcement learning (rl) (sutton and barto 2018) is a sampling-based approach to learning a controller. inspired by models of animal behavior, the rl agent interacts with the environment and receives feedback on its performance in terms of a numerical reward, that either reinforces or punishes certain behaviors. this learning approach has produced impressive results in recent years (mnih et al. 2015;silver et al. 2016). however, failure to precisely capture designer's intent in reward signals can lead to the agent learning unintended behavior (amodei et al. 2016). as a response, formal languages-in particular linear temporal logic (ltl) and ω-regular languages-have been proposed to unambiguously capture learning objectives. while these languages have enjoyed practical success (hahn et al. 2019;bozkurt et al. 2020), their theoretical complexity is relatively underexplored. in this paper we propose and study a model-based probably approximately correct rl algorithm for ltl and ω-regular languages.probably approximately correct (pac) learning (valiant 1984) is a framework for formalizing guarantees of a learning algorithm: a user selects two parameters, ε > 0 and δ > 0. a learning algorithm is then (efficient) pac if it returns a solution that is ε close to optimal with probability at least 1 -δ using a polynomial number of samples. in rl, many pac learning algorithms have been proposed for both discounted and average reward (kakade 2003;brafman and tennenholtz 2003). these algorithms usually provide sample bounds in terms of the sizes of the state and action spaces of the markov decision process (mdp) that describes the environment. finite-horizon and discounted reward both have the property that small changes to the transition probabilities result in small changes to the value of the objective. this means that the sample complexity is independent of the transition probabilities of the mdp. however, infinite-horizon, undiscounted objectives, like average reward and the satisfaction of ltl properties, are sensitive to small changes in probabilities, and their sample complexity is dependent on some knowledge of the transition probabilities. hence, if only the number of state/action pairs is allowed, alongside 1/ε and 1/δ, as parameters, creating a pac learning algorithm for undiscounted, infinite-horizon properties is not possible. specifically for ltl, this has been observed by yang, littman, and carbin (2021) and alur et al. (2022). example 1 (intractability of ltl). figure 1 is an example adopted from (alur et al. 2022) that shows the number of samples required to learn safety properties is dependent on some property of the transition structure. the objective in this example is to stay in the initial state s 0 forever. this can be specified with average reward (a reward of 1 in s 0 and 0 otherwise) and in ltl (φ = gs 0 ). the transition from s 0 to s 1 under action b must be observed in order to distinguish action a from action b and produce an ε-optimal policy for any ε < 1. the number of samples required to see this transition with high probability is affected by the value of p. smaller values of p means it takes longer for a policy's finite behavior to match its infinite behavior.this non-pac-learnability may motivate using discounted versions of ltl (littman et al. 2017;alur et al. 2023), which, however, have significantly different semantics fromfigure 1: example adopted from (alur et al. 2022). the objective is to remain in s 0 forever.the undiscounted logic. one may argue instead that the complexity of the dynamics of an mdp is not entirely captured by the number of state-action pairs. for example, for average reward, kearns and singh (2002) use the ε-return mixing time, a measure of how fast the average reward is achieved in a particular system, for this purpose. they argue that in order to know the learning speed of an algorithm, one must know the speed at which the policy achieves the limit average reward. the r-max algorithm of brafman and tennenholtz (2003) also utilizes the ε-return mixing time. the ε-return mixing time is defined based off of a given reward function, which we do not have in our context. therefore, we require an alternative notion. we propose the ε-recurrence time as a way to reason about the speed at which an ω-regular objective is achieved. informally, the εrecurrence time is the expected time for a set of recurring states to be visited twice. in figure 1, the ε-recurrence time increases when p decreases. we will show that this additional parameter is sufficient for defining a pac algorithm for ω-regular objectives.contributions. we introduce a model-based pac learning algorithm for ltl and ω-regular objectives in markov decision processes. for our algorithm, we introduce the εrecurrence time: a measure of the speed at which a policy converges to the satisfaction of the ω-regular objective in the limit. we show that the number of samples required by our algorithm is polynomial in the relevant input parameters. our algorithm only requires the ability to sample trajectories of the system, and does not require prior knowledge of the exact graph structure of the mdp. finally, we demonstrate the practicality of our algorithm on a set of case studies. to demonstrate the intuition behind the ε-recurrence time being sufficient to understand long term behavior from finite trajectories, we will sketch a simple model-free algorithm for estimating the probability of satisfaction p in a markov chain m = (s, p, s 0 , f ). an (α, t )-approximation of an mdp m = (s, a, p, s 0 , f ) is an mdp m ′ = (s, a, p ′ , s 0 , f ) such that for all s, s ′ ∈ s t and a ∈ a, |p (s, a, s ′ ) -p ′ (s, a, s ′ )| ≤ α and, if p (s, a, s ′ ) = 0, then p ′ (s, a, s ′ ) = 0, where s t ⊆ s are the states reachable with positive probability in t steps from s 0 under some strategy. additionally, enough samples will yield |p (s, a, s ′ ) -p ′ (s, a, s ′ )| ≤ α with high probability.then with probability at least 1-δ, | p (s, s ′ )-p (s, s ′ )| ≤ α and p (s, s ′ ) = 0 if p (s, s ′ ) = 0 for all s ′ ∈ s. since p (s, s ′ ) = 0 implies that c(s, s ′ ) = 0 and thus p (s, s ′ ) = 0, all we need to show is that | p (s, s ′ ) -p (s, s ′ )| ≤ α with probability at least 1 -δ. if the probability to reach s ′ ∈ s from s ∈ s in at most t steps in m is p, then the probability to reach s ′ from s in at most t steps in m ′ is at least p -αn t . the unrolling of a markov chain m = (s, p, s 0 , f ) is a markov chain m x = (s x , p x , (s 0 , 0), f x ) that has the same dynamics as m , but keeps track of the visitation counts of each state. finally, let p b and p ′ b be the probability of reaching a winning bscc b ∈ b in t steps s 0 in m and m ′ , respectively. let p be the optimal probability of satisfaction in m, and let p σ and p ′ σ be the probability of satisfaction in m and m ′ under σ, respectively. we say that a state-action pair s ∈ s, a ∈ a in m is α-accurate if for all s ′ ∈ s, | p (s, a, s ′ ) -p (s, a, s ′ )| ≤ α and if p (s, a, s ′ ) = 0 then p (s, a, s ′ ) = 0. let m ′ = (s, a, p ′ , s 0 , f ′ ) be an mdp such that the transition probabilities for all known state-action pairs are identical to m, are ε n t -accurate for unknown state-action pairs that are reachable in t steps from s 0 with positive probability under some strategy, and are accepting sinks otherwise. therefore, since the optimal probability of satisfaction p in m is an upper bound on the probability of satisfaction in m ′ , by the construction of m, π is optimal in m ′ . for theorem 1, we assume we run the algorithm until termination, which occurs with probability 1: if it has not terminated, π visits an unknown state-action pair with positive probability in t steps, and there can only be k|s||a| such visits before all state-action pairs are marked as known. if π is not α-optimal from s 0 in m , this means that p -p π ≥ α, which implies p π -p π ≥ α. as the values p π and p π only differ due to π reaching state-action pairs in u in m, this means that π must reach a state-action pair in u in m from s 0 with probability at least α. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/565.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/565.txt new file mode 100644 index 0000000000000000000000000000000000000000..50ce2090104defe0b983e6ee1723fb25e691e38e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/565.txt @@ -0,0 +1 @@ +recent advances in ai capabilities such as conversational question answering, intelligent code completion, and text-to-image generation have seen rapid adoption in practical technologies. these advances have been realized primarily through scaling up the size of the underlying deep learning model. however, this scaling up has led to a significant increase in the computing power and storage capacity necessary to train and deploy such models.one method to reduce deep learning models' computational and storage cost is to use low bit-width data formats instead of the conventional fp32. great strides have been made to enable training using fp16, bfloat16, and most recently fp8 , as well as to perform inference in narrow integer formats like int8. native support for low bit-width formats is now commonplace in ai-oriented hardware such as gpus, tpus, and edge inference devices. the narrowest formats, such as fp8 and int8, require per-tensor scaling factors to adjust to the dynamic range of each tensor. tensor level scaling has has been shown to be insufficient, though, for sub-8-bit formats due to their limited dynamic range. research has shown that micro scaled data formats that associate scaling factors with fine-grained sub-blocks of a tensor are more effective in sub-8 bit regime (e.g., ). this paper evaluates microscaling (mx) data formats -the first open standard for a family of micro-scaled datatypes aimed at deep learning training and inference. the mx standard aims to create an effective data format by achieving a balance among three key factors:• hardware efficiency -maximize compute and storage efficiency via reduced bit-width.• model accuracy -minimize the gap in the quality of results compared with baseline fp32 for ai training and inference.• user friction -ensure seamless integration within existing training and inference frameworks and generalizability across different workloads.details on the mx standard and the concrete binary formats can be found in the ocp microscaling specification . this paper will focus on the empirical results of using mx formats for direct-cast inference, error diffusion inference, and finetuned inference, as well as training on various benchmarks. our results corroborate the effectiveness of mx formats in balancing the competing demands of hardware efficiency, model accuracy, and user friction. 8-bit mx formats can perform inference directly on fp32 pretrained models with minimal accuracy loss and without the need for calibration or finetuning. inference with 6-bit mx formats is also very close to fp32 after quantization-aware fine-tuning or using a post-training quantization method. using 6-bit mx formats, we demonstrate the first instance of training large transformer models with sub-8-bit weights, activations, and gradients to an accuracy matching fp32 without modifications to the training recipe. going even further, we show that training of large transformers can be done with 4-bit mx format weights, incurring only a minor accuracy drop.the custom cuda library to emulate mx formats on existing gpus can be found at . this library can be used to reproduce the experimental results reported in this paper. great strides have been made to enable training using fp16, bfloat16, and most recently fp8, as well as to perform inference in narrow integer formats like int8. this paper evaluates microscaling (mx) data formats-the first open standard for a family of micro-scaled datatypes aimed at deep learning training and inference. this paper will focus on the empirical results of using mx formats for direct-cast inference, error diffusion inference, and finetuned inference, as well as training on various benchmarks. 8-bit mx formats can perform inference directly on fp32 pretrained models with minimal accuracy loss and without the need for calibration or finetuning. inference with 6-bit mx formats is also very close to fp32 after quantization-aware fine-tuning or using a post-training quantization method. using 6-bit mx formats, we demonstrate the first instance of training large transformer models with sub-8-bit weights, activations, and gradients to an accuracy matching fp32 without modifications to the training recipe.table1shows the parameters that define the concrete mx formats, which are named by prepending "mx" to the name of the element data format. for mixed-precision training where the weights and activations use different data formats, the gradients (e i in figure2) are quantized to the activation format.in this section, we examine inference results with mx formats across a variety of discriminative tasks including language translation, text encoding, image classification, speech recognition, and recommendation models.we leveraged the open source lm eval harness by eleuther ai for our evaluation of mx data formats in generative inference of openai gpt3-175b and open source llama-7b. the columns with a single mx format use that format for both weights and activations; the other columns list separate formats for weights (wt) and activations (act) and utilize mixed-precision. mxfp6 provides the first demonstration of training generative language models to parity with fp32 using 6-bit weights, activations, and gradients with no modification to the training recipe. the evaluated concrete mx formats provide compelling alternatives to fp32 training and inference with minimal user friction. experimental results show the effectiveness of mx formats for a variety of deep learning models including generative language models, image classification, speech recognition, recommendation models, and translation. mxfp6 also, for the first time, enables generative language model training at sub-8-bit weights, activations, and gradients without sacrificing model accuracy or needing changes to the training recipe. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/566.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/566.txt new file mode 100644 index 0000000000000000000000000000000000000000..174c90209d1418a63c89deb87d8f99cfd17db08e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/566.txt @@ -0,0 +1 @@ +mental disorders are a significant problem affecting 1 in 8 people worldwide, or around 970 million people . not only are mental disorders common, but they can also significantly affect patients' lives. researchers have consistently found that patients with mental diagnoses are likelier to have a lower life expectancy , , , , . there are a variety of unique factors that contribute to reduced life expectancy. unnatural causes like suicide and accidents accounted for 14 percent of excess mortality, while natural causes accounted for 86 percent . accidental deaths were twice as common as suicide . various causes are associated with premature death, such as cardiovascular disease and substance-induced death, primarily from alcohol or drugs , , . for example, diabetes, an extensive history of tobacco use, and a diagnosis of delirium were also found to be risk factors , , . despite the numerous studies conducted to analyze factors of excess mortality in patients with mental diagnoses, the problem remains an ongoing concern.one popular data set used for mortality prediction is the mimic-iii data set, a freely accessible, containing clinical data of patients admitted to critical care units . the data set allows clinical studies to be reproduced and serve as a basis for collaborative research. being widely accessible to researchers, the data set has been used for various machine-learning applications in clinical settings, such as predicting hospital length of stay, predicting sepsis, and acute kidney injury prediction , , . regarding mortality prediction, the data set has been used to analyze mortality for numerous sub-patient groups, including patients with sepsis-3, heart failures, acute respiratory distress syndrome, or acute pancreatitis , , , . however, literature examining mortality prediction among patients with mental diseases remains scarce. this paper aims to address this gap in existing research by utilizing a machine-learningbased approach to mortality prediction for patients with mental diagnoses with mimic-iii. despite the numerous studies conducted to analyze factors of excess mortality in patients with mental diagnoses, the problem remains an ongoing concern.one popular data set used for mortality prediction is the mimic-iii data set, a freely accessible, containing clinical data of patients admitted to critical care units. being widely accessible to researchers, the data set has been used for various machine-learning applications in clinical settings, such as predicting hospital length of stay, predicting sepsis, and acute kidney injury prediction,,. regarding mortality prediction, the data set has been used to analyze mortality for numerous sub-patient groups, including patients with sepsis-3, heart failures, acute respiratory distress syndrome, or acute pancreatitis,,,. however, literature examining mortality prediction among patients with mental diseases remains scarce. this paper aims to address this gap in existing research by utilizing a machine-learningbased approach to mortality prediction for patients with mental diagnoses with mimic-iii.this study used the mimic-iii (medical information mart for intensive care) data set, which contains information on patients admitted to critical care units at beth israel deaconess medical center in boston, massachusetts.from the data set, 13,400 patients with at least one mental disease diagnosis in their medical history were identified using the icd-9 international classification of the diseases code system. for patients with more than one admission, only the data from the earliest hospital admission diagnosed with a mental disease was kept, while the other admissions were discarded. thus, the final data set used in the study contains data on 13,400 unique patients and 13,400 unique admissions, where each patient only has one hospital admission. data regarding the date of death was used in conjunction with discharge time to calculate mortality within 30 days. this shows that prescription information seems to be the most relevant when predicting mortality for patients with mental illnesses, with procedural and demographic data less relevant to model performance. a causal study into causes of mortality in mental disease patients may be a topic of interest for future work.this study used machine-learning-based approaches to tackle the problem of mortality prediction for patients diagnosed with mental illnesses using mimic-3 data. this research can be applied in a hospital setting to identify at-risk mental illness patients at an early stage of their hospital stay and medical journey to reduce premature mortality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/567.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/567.txt new file mode 100644 index 0000000000000000000000000000000000000000..3e17bc6001c3f670bca6f47b7a283f81b878c37f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/567.txt @@ -0,0 +1 @@ +if the distribution ν of the traders' valuations admits a density bounded by some constant m , then, for any time horizon t :• in the full feedback case, we design an algorithm (algorithm 1) achieving regret o (m log t ) (theorem 2) and provide a matching lower bound ω (m log t ) (theorem 3).• in the two-bit feedback case, we design an algorithm (algorithm 2) achieving regret o √ m t (theorem 4) and provide a matching lower bound ω √ m t (theorem 5).if we drop the bounded density assumption, we show that the optimal rate degrades to θ √ t in the full feedback case (theorem 6 and 8), while the problem becomes unlearnable in the two-bit feedback case (theorem 9). furthermore, in the full feedback case, we design an algorithm (algorithm 4) achieving simultaneously the optimal o( √ t ) regret in the general case and o(m log t ) in the bounded density case while being oblivious to the validity of the bounded density assumption.finally, we stress that ours is the first paper on online learning in bilateral trade where lower bounds have the correct dependency on m . in all existing literature, optimality was only proved in the time horizon t , not in m . formally, for any p, v 1 , v 2 ∈ , the gain from trade of a price p when the valuations of the traders are v 1 and v 2 is g(p,.the information collected by the full feedback model corresponds to direct revelation mechanisms, where the traders communicate their valuations v 2t-1 and v 2t before each round, and the price proposed by the mechanism at time t only depends on past bids v 1 , .• in the full feedback case, we design an algorithm (algorithm 1) achieving regret o (m log t ) (theorem 2) and provide a matching lower bound ω (m log t ) (theorem 3).• in the two-bit feedback case, we design an algorithm (algorithm 2) achieving regret o √ m t (theorem 4) and provide a matching lower bound ω √ m t (theorem 5).if we drop the bounded density assumption, we show that the optimal rate degrades to θ √ t in the full feedback case (theorem 6 and 8), while the problem becomes unlearnable in the two-bit feedback case (theorem 9). furthermore, in the full feedback case, we design an algorithm (algorithm 4) achieving simultaneously the optimal o( √ t ) regret in the general case and o(m log t ) in the bounded density case while being oblivious to the validity of the bounded density assumption. the idea of trying to approximate the representation given by the second lemma and the fact that this representation reduces to a simpler form in the bounded-density case allows us to design a simple algorithm that enjoys optimal regret guarantees both in the bounded-density (m log t regret) and the non-bounded density ( √ t regret) cases while being completely oblivious to which of the two assumptions hold. dropping the assumption leads to a pathological phenomenon typical of bilateral trade problems (known as needle-in-a-haystack) leading us to the design of hard instances in which all-but-one prices suffer a high ω(1)-regret, and where it is essentially impossible to find the optimal price among the continuum amount of suboptimal ones given the small amount of information carried by the two-bit feedback. in fact, given that in the adversarial case the sequence of traders' valuations can be chosen arbitrarily, we can set v 2t-1 := s t and v 2t := b t where s t and b t are defined as in the adversarial lower bound construction in the proof of theorem 5. the next theorem shows that ftm algorithm 1: follow the mean (ftm) post p 1 := 1/2, then receive feedback v 1 , v 2 ; for time t = 2, 3, .,) after observing that gft t (p t ) = g(p t , v 2t-1 , v 2t ) and p t is independent of (v 2t-1 , v 2t ); (t) from theorem 1; (f) follows from fubini's theorem; and (h) from hoeffding's inequality. defining z := 1+e 2 , and r ν t as the regret of the algorithm (α t ) t∈n at time t when the underlying sequence of traders' valuations follows the distribution ν, we have that the worst-case regret sup ν∈dm r ν t is lower bounded by. we will show that for each algorithm for the 2-bit feedback setting and each time horizon t , if r ν t is the regret of the algorithm at time horizon t when the underlying distribution of the traders' valuations is ν, then max r. we will show now that even this strategy will not improve the regret of the algorithm (by more than a constant) because of the cost of determining the sign of ±ε with the available feedback. . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/568.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/568.txt new file mode 100644 index 0000000000000000000000000000000000000000..e9acfbbac482aa03dfd173d7106b20e50eb041f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/568.txt @@ -0,0 +1 @@ +in the reinforcement learning (rl), the derivation of reward functions has always been a subject of intricate debates and investigations . inverse rl (irl) emerges as a pivotal methodology in this dialogue, where the aim is to decipher the underlying reward structure from observed expert behavior . however, one of the intrinsic challenges that plagues irl is the issue of reward ambiguity. a fundamental realization in irl is that multiple distinct reward functions can lead to the same or similar expert behaviors. this many-to-one relationship between reward functions and policies complicates the task of pinpointing a unique reward landscape from demonstrations . attempts to navigate this conundrum have been diverse. the maximum entropy irl framework, for instance, integrates a probabilistic lens into irl . by postulating that expert demonstrations align not solely with optimality but also with an exponential reward distribution, this approach employs entropy maximization to favor reward functions yielding more uniformly distributed trajectories, serving as an inherent countermeasure to reward ambiguity. further advancements came from bayesian irl, where the uncertainty in reward functions is addressed directly by employing a bayesian framework . instead of seeking a single reward function, bayesian irl estimates a posterior distribution over reward functions given the demonstrations. infogail, an extension of the gail framework, is another noteworthy mention . it incorporates mutual information into the objective, aiming to capture multiple modes of the expert's behavior. by doing so, infogail effectively disentangles different underlying reasons (latent variables) for the expert's actions, which can be seen as a means to shed light on the ambiguous regions of the reward landscape.optimal transport (ot) theory presents a fresh, promising avenue for reward ambiguity problem. the fundamental tenet of ot lies in its ability to measure the distance between different probability distributions, or in our context, between various reward functions. this is achieved using a concept called the wasserstein distance. by understanding how far apart or close together different reward functions are, one can gain clearer insights into the nebulous territory of reward ambiguity that irl grapples with. the true potential of ot for irl is its geometrically-grounded framework. where traditional approaches might only offer probabilistic insights, the marriage of irl and ot provides a more structured, spatial understanding of reward landscapes. imagine mapping reward functions within a geometric space, where the proximity between points indicates similarity. ot provides the tools to construct, measure, and interpret this space, and in doing so, can potentially revolutionize our comprehension of reward ambiguity. in this paper, our primary contribution is the fusion of the analytical power of ot with the challenges of reward ambiguity inherent in irl. we advocate for a synthesis, where the discerning capabilities of irl meet the geometric precision of ot, aiming to deliver a more holistic solution to one of rl's most enduring challenges. by postulating that expert demonstrations align not solely with optimality but also with an exponential reward distribution, this approach employs entropy maximization to favor reward functions yielding more uniformly distributed trajectories, serving as an inherent countermeasure to reward ambiguity. instead of seeking a single reward function, bayesian irl estimates a posterior distribution over reward functions given the demonstrations. a key challenge in irl is the ambiguity of reward functions: multiple reward functions might give rise to the same, or nearly identical, expert policies. let r true be the true underlying reward function of an expert, and r n be the inferred reward function based on n expert trajectories. at its core, it confirms that, given a broad range of expert demonstrations, our estimated reward structures gradually approach the true underlying reward function driving the expert's decisions. let r be a compact metric space representing the space of all reward functions, π * be an expert's policy, s = {r 1 , r 2 , . . it posits that amidst the vast space of plausible reward functions that can induce a given expert policy, there emerges a central or "centroid" reward function that serves as the most representative embodiment with respect to the wasserstein distance.) this implies that the distance between reward functions inferred from noisy and noise-free demonstrations remains bounded in the wasserstein space, ensuring robustness of the ot-integrated irl process to noisy expert demonstrations. given the set s d of reward functions for dimension d, let ∆ d be the average pairwise p-wasserstein distance between reward functions in s d defined as. this indicates that as the dimensionality of the state-action space grows, the average pairwise wasserstein distance between potential reward functions grows as well, suggesting a more dispersed reward landscape and thus intensified reward ambiguity. the spread of these reward functions implies that as we consider problems with more dimensions, the ambiguity in determining the "true" reward function (among many that can explain the expert behavior) becomes more pronounced. given the true reward function r true, , as the number of expert trajectories n → ∞, the expected reward under the inferred policy π n based on r n converges to the expected reward under the expert's policy π * based on r true . given a set of inferred reward functions {r n } based on increasing number of expert trajectories, the sequence is relatively compact in the wasserstein space. given the compactness of the space of reward functions (from the proposition) and the convergence of expected rewards, it implies that the sequence {r n } has a convergent subsequence in the wasserstein space. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/569.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/569.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a7eaad303834afdab162fc437f52ae1b3c87509 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/569.txt @@ -0,0 +1 @@ +fractional derivatives have been studied in the context of activation functions , which can morph activation functions from one to another in a continuous manner. the activation function is a fundamental element within a neuralthe gamma function, denoted by γ(z), is a generalization of the factorial operator and is used to define the fractional derivative in fractional calculus. the fractional sigmoid activation function can be implemented using the soft plus function or directly by applying the fractional derivative to the sigmoid function.the term "fractional" suggests that these loss functions may involve fractional exponents or weights, which can be used to fine-tune the contribution of individual data points or classes.in this equation, y(t) represents the actual label or target value, ŷ(t) represents the predicted value by the model, a is the fractional derivative order, d a represents the fractional derivative operator, and w(t) is a weight function that assigns weights to different time points or data points. one example of a loss function with a fractional derivative is the fractional mean squared error (fmse), which is defined as:.the fractional cross-entropy loss is a loss function with a fractional derivative that can be used in classification problems to improve the model's accuracy.the fractional huber loss is a modified version of the huber lossthat incorporates a fractional derivative to capture more complex relationships between variables. in the case of fractional sigmoid, convergence is observed toward its second derivative, whereas fractional mish remains closely aligned with the original function.in this set of experiments, we explore the effectiveness of three distinct loss functions: fractional cross-entropy, fractional mean squared error (mse), and fractional huber loss. fractional cross-entropy is evaluated on cifar-100ltand imagenet, while fractional mse and fractional huber loss are tested using the utkface regression dataset. lower learning rates tend to benefit from fractional derivative orders less than 1, whereas higher learning rates exhibit improved performance with fractional derivative orders greater than 1. the fractional sigmoid tends to converge close to the second derivative, while the fractional mish converges towards the original function. we investigated the impact of using a unique activation function with a fractional derivative order per layer compared to a shared activation function with a fractional order per block (meaning the same activation function shape is employed within a block).the experimental results presented in our study underscore the potential benefits of fractional sigmoid and the fractional loss functions. moreover, the fractional loss functions consistently outperform their non-fractional counterparts, although they require the manual selection of fractional derivative orders. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/57.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/57.txt new file mode 100644 index 0000000000000000000000000000000000000000..80774b1565b772631295264143bb96bf8b916eae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/57.txt @@ -0,0 +1 @@ +argumentation is one of the major fields in non-monotonic reasoning (nmr) which has been shown to be very relevant for decision making and for explanation . the relationships between preferential semantics of commonsense reasoning and argumentation semantics are very strong .in particular, the relationships between some multi-preferential semantics for weighted conditional knowledge bases with typicality and gradual argumentation semantics have been recently investigated . this has given rise to some new gradual argumentation semantics stemming from fuzzy preferential semantics for conditionals.this paper develops a general approach to define a preferential interpretation of an argumentation graph under a gradual semantics, to allow for defeasible reasoning over the argumentation graph, by formalizing conditional properties of the graph in a many-valued logic with typicality, i.e., a many-valued propositional logic in which arguments play the role of propositional variables, and a typicality operator is allowed, inspired by the typicality operator proposed in the propositional typicality logic . the operator allows for the definition of defeasible implications of the form t(a 1 ) → a 2 , meaning that "normally argument a 1 implies argument a 2 ", in the sense that "in the typical situations where a 1 holds, a 2 also holds". the truth degree of such implications can be determined with respect to a preferential interpretation defined from a set of labellings of an argumentation graph, according to a chosen (gradual) argumentation semantics. they correspond to conditional implications α |∼ β in the klm approach . more precisely, the paper considers graded defeasible inclusions of the form t(α) → β ≥ l, meaning that "normally argument α implies argument β with a degree at least l", where α and β can be boolean combination of arguments. the satisfiability of such inclusions in a multi-preferential interpretation of an argumentation graph (wrt. a given semantics), exploits the preference relations over labellings, relations which are associated to arguments.as a proof of concept, we consider a finitely-valued variant of the gradual semantics presented in , called the ϕ-coherent semantics. for the case when the truth degree set is c n = {0, 1 n , . . . , n-1 n , 1}, for some integer n ≥ 1, an answer set programming approach for conditional reasoning over an argumentation graph is developed for the ϕ-coherent semantics, based on the idea of encoding the labellings of an argumentation graph as answer sets. the defeasible implications satisfied in a preferential interpretation are determined by reasoning about preferred answer sets, namely, the answer sets maximizing the admissibility value of some arguments.the definition of a preferential interpretation i s associated to a gradual semantics s, sets the ground for the definition of a probabilistic interpretation of gradual semantics s. in section 5, for the gradual semantics with domain of argument valuation in the unit real interval , we propose a probabilistic argumentation semantics, which builds on a gradual semantics and is inspired by zadeh's probability of fuzzy events.2 many-valued coherent, faithfull and ϕ-coherent semantics for weighted argumentation graphsin this section we generalize the gradual argumentation semantics proposed in , the ϕ-coherent semantics, whose definition is inspired to some recently studied semantics of conditional knowledge bases . as a proof of concept, the ϕ-coherent semantics will be used in section 4 for conditional reasoning over an argumentation graph.we let the domain of argument valuation s to be either the real unit interval or the finite set c n = {0, 1 n , . . . , n-1 n , 1}, for some integer n ≥ 1. this allows to develop the notions of many-valued coherent, faithfull and ϕ-coherent labellings for weighted argumentation graphs, which include both the infinitely and the finitely-valued case.following , we let a weighted argumentation graph to be a triple g = a, r, π , where a is a set of arguments, r ⊆ a × a and π : r → r. this definition of weighted argumentation graph is similar to weighted argument system in , but here we admit both positive and negative weights, while only positive weights representing the strength of attacks are allowed in . in this notion of weighted argumentation graph, a pair (b, a) ∈ r is regarded as a support of argument b to argument a when the weight π(b, a) is positive and as an attack of argument b to argument a when the weight π(b, a) is negative. this leads to bipolar argumentation, which is well-studied in argumentation literature . the argumentation semantics introduced below deals with positive and negative weights in a uniform way. a notion of basic strength is not introduced, even though it could be added.given a weighted argumentation graph g = a, r, π , a many-valued labelling of g is a function σ : a → s which assigns to each argument an acceptability degree in s.let a as a partial function w g σ : a → r, assigning a positive or negative support (relative to labelling σ) to all arguments a i ∈ a such that r -(a i ) = ∅, as follows:) is let undefined. we exploit this notion of weight of an argument to define some different argumentation semantics for a graph g, by extending also to the finitely-valued case the semantics in .definition 1. given a weighted graph g = a, r, π and σ : a → s a labelling, we say that:observe that definition of ϕ-coherent labelling of g is defined through a set of equations, as in gabbay's equational approach to argumentation networks . the notion of ϕ-coherent labelling requires the range of function ϕ to be s. the notions of coherent, faithfull and ϕcoherent labelling of a weighted argumentation graph g do not put constraints on the labelling of arguments which do not have incoming edges, provided the constraints on the labellings of all other arguments can be satisfied, depending on the semantics considered.a many-valued ϕ-coherent labelling of a weigthed argumentation graph g can be proven to be a coherent labelling or a faithfull labelling of g, under suitable conditions on function ϕ, a result which extends the one in .in it has been shown that, for labellings with values in , the notion of ϕ-coherent labelling relates to the framework of gradual semantics studied by amgoud and doder , by considering a slight extension of amgoud and doder's gradual argumentation framework so to deal with both positive and negative weights, to capture the strength of supports and attacks. the notion of bipolar argumentation has been widely studied in the literature, e.g. in . potyca has considered an extension of the bipolar argumentation framework qbafs by baroni et al. which also includes the strength of attacks and supports.as observed in , since multilayer perceptrons can be mapped to weighted conditional knowledge bases, they can as well be seen as a weighted argumentation graphs, with positive and negative weights, under the proposed semantics. in this view, ϕ-coherent labellings correspond to stationary states of the network, where each unit in the network is associated to an argument, synaptic connections (with their weights) correspond to attacks/supports, and the activation of units can be regarded as the values of the corresponding arguments in a labelling. this is in agreement with previous work on the relationship between argumentation frameworks and neural networks, first investigated by garcez, gabbay and lamb and more recently by potyca .while we refer to for a description of the relationships of the ϕ-coherent semantics with the gradual semantics studied by amgoud and doder , let us generalize the ϕ-coherent semantics, by considering different functions ϕ i : a → s, one for each argument a i , rather than a single function ϕ for all arguments. this generalization allows, for instance, to capture mlps in which different activation functions are associated to different layers. definition 2. given a weighted graph g = a, r, π and a function ϕ i : r → s, for eachnote that restricting the domain of argument valuation to the finite number of values in c n , allows considering finitely-valued labellings which approximate infinitely-valued ones considered in . as a special case, for s = c n with n = 1 or n = 2, one gets notions of two-valued and three-valued semantics and, for instance, in the ϕ-coherent semantics, ϕ i can be taken to be a two or three-valued approximation of the activation function of unit i.this paper develops a general approach to define a preferential interpretation of an argumentation graph under a gradual semantics, to allow for defeasible reasoning over the argumentation graph, by formalizing conditional properties of the graph in a many-valued logic with typicality, i. the truth degree of such implications can be determined with respect to a preferential interpretation defined from a set of labellings of an argumentation graph, according to a chosen (gradual) argumentation semantics. , n-1 n , 1}, for some integer n ≥ 1, an answer set programming approach for conditional reasoning over an argumentation graph is developed for the ϕ-coherent semantics, based on the idea of encoding the labellings of an argumentation graph as answer sets. in section 5, for the gradual semantics with domain of argument valuation in the unit real interval , we propose a probabilistic argumentation semantics, which builds on a gradual semantics and is inspired by zadeh's probability of fuzzy events. in this notion of weighted argumentation graph, a pair (b, a) ∈ r is regarded as a support of argument b to argument a when the weight π(b, a) is positive and as an attack of argument b to argument a when the weight π(b, a) is negative. the notions of coherent, faithfull and ϕcoherent labelling of a weighted argumentation graph g do not put constraints on the labelling of arguments which do not have incoming edges, provided the constraints on the labellings of all other arguments can be satisfied, depending on the semantics considered.while we refer tofor a description of the relationships of the ϕ-coherent semantics with the gradual semantics studied by amgoud and doder, let us generalize the ϕ-coherent semantics, by considering different functions ϕ i : a → s, one for each argument a i , rather than a single function ϕ for all arguments.the strong relations between the notions of coherent, faithfull and ϕ-coherent labellings of a gradual argumentation graph and the corresponding semantics of weighted conditional knowledge bases have suggested an approach for defeasible reasoning over a weighted argumentation graph, which builds on the semantics of the argumentation graph.given an argumentation graph g and a gradual argumentation semantics s, we define a preferential (many-valued) interpretation of the argumentation graph g, with respect to the gradual semantics s. then, we extend the language with a typicality operator, to introduce defeasible implications over boolean combinations of arguments and define a (multi-)preferential interpretation associated with the argumentation graph g and the argumentation semantics s.given a specific semantics s of a weighted argumentation graph g, with domain of argument valuation s, if the set of labellings σ of g in s is well-founded, we let i s = (s, σ) be the preferential interpretation of g with respect to the gradual semantics s. for such argumentation semantics s, a set of labellings σ s σ0 can be associated to a given choice σ 0 of the base score, and a preferential interpretation i s = (s, σ s ) can be defined for a semantics s by considering the labellings for all the possible choices of the base score σ 0 , or for some of them (the choices satisfying some conditions), or for a single one.the preferential interpretation i s can be used to validate (under the semantics s) properties of interest of an argumentation graph g, expressed by graded implications (including strict or defeasible implications or their boolean combination) based on the semantics s. in case there are infinitely many labellings of the graph in the semantics s, which may give rise to non well-founded preference relations associated to arguments, approximations of the semantics s over a finite domain, can be considered for proving properties of the argumentation graph. the following graded implications hold: when the domain of argument valuation is in the interval , the definition of a preferential interpretation i s associated to a gradual semantics s, which has been developed in section 3, also suggests a probabilistic argumentation semantics, inspired to zadeh's probability of fuzzy events. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/570.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/570.txt new file mode 100644 index 0000000000000000000000000000000000000000..0b3fda24a32b6930734662b76c80e461ead6c7aa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/570.txt @@ -0,0 +1 @@ +solar energy has become a key source of sustainable electricity generation due to the growing need for renewable energy sources and concern over climate change . although solar energy is a clean and accessible resource, its stochastic nature and dependence on weather present distinct challenges to smart grids . solar radiation, which measures sun intensity reaching the surface of the earth, plays an important role in determining solar energy production system effectiveness and predictability . solar irradiance forecasting has become a critical process in the smart grid sector . this forecasting is essential for smart grid integration and stability , and reduces the negative impact of solar power instability on the grid, power electronic equipment , optimizing energy markets , energy storage management , transformers , and environmental concerns . so, a comprehensive understanding of solar irradiance is essential to effectively integrating solar power into smart grids. it is essential to develop advanced forecasting methods that can provide reliable insights into future irradiance patterns in order for this understanding to be achieved.in recent years, machine learning has emerged as a powerful tool for addressing the various and complex problems in smart grids, such as environment explosiveness level evaluation , smart monitoring systems , diagnosing location , generation , demand-side cooperation , and occupant estimation . one of the most important applications of machine learning algorithms is their ability to forecast. a machine learning model can be trained using historical solar irradiance data, such as sophisticated meteorological variables and sensor-based data. the authors assess the use of hybrid machine-learning models for solar radiation forecasting . to build a more precise forecasting model, they mix various machine learning techniques. convolutional neural networks are utilized in to forecast solar radiation by taking advantage of the spatial and temporal trends in the inputs and outputs to make better forecasts. an mlp-ann algorithm is developed in for solar irradiance forecasting based on meteorological data. in this work, feature importance is used to specify the most relevant features. long short-term memory is one of the various types of recurrent neural networks for sequential data, which are used in for solar irradiance forecasting. develops a support vector machine regression model to forecast solar irradiance.although significant studies have been conducted by using machine learning to predict solar irradiance, there are still significant aspects such as feature importance analysis, hyperparameter tuning, model validation, and testing on different unseen datasets that are crucial for developing accurate and robust forecasting models. addressing these aspects can lead to more reliable predictions in solar irradiance forecasting that can improve the performance of the smart grids. the main contribution of this study can be summarized as follows:• advanced machine learning comparison: this study contributes by employing next-generation machine learning algorithms, including random forest, extreme gradient boosting (xgboost), light gradient boosted machine (lightgbm) ensemble, multilayer perceptron artificial neural networks (mlp-anns) and catboost. by utilizing these next-generation techniques, the research not only explores their individual capabilities but also provides a comprehensive comparison of their performance.• enhanced forecasting through feature selection: another key contribution is the incorporation of feature selection techniques to improve forecasting results.• hyperparameter tuning via bayesian optimization: the research advances the area by using bayesian optimization to adjust the hyperparameters of the machine learning models.• cross-validation: this work further contributes by precisely testing the developed forecasting algorithms on diverse unseen datasets.• spatial generalization through multilocation testing: the final contribution is to test the developed models on different datasets collected from various geographical locations. a machine learning model can be trained using historical solar irradiance data, such as sophisticated meteorological variables and sensor-based data.although significant studies have been conducted by using machine learning to predict solar irradiance, there are still significant aspects such as feature importance analysis, hyperparameter tuning, model validation, and testing on different unseen datasets that are crucial for developing accurate and robust forecasting models.• advanced machine learning comparison: this study contributes by employing next-generation machine learning algorithms, including random forest, extreme gradient boosting (xgboost), light gradient boosted machine (lightgbm) ensemble, multilayer perceptron artificial neural networks (mlp-anns) and catboost.• enhanced forecasting through feature selection: another key contribution is the incorporation of feature selection techniques to improve forecasting results.feature selection is a technique in machine learning where a subset of the most relevant features or inputs is selected from a larger set of features. the goal of feature selection is to improve model performance, reduce computational complexity, enhance interpretability, and mitigate the risk of overfitting by focusing on the most relevant features. the simplest way to remove the less significant and select the most relevant features is to train the machine learning through all possible combinations of features and choose the best one. accordingly, to select the most important features of a learning model, the feature selection technique is used. it should be noted that machine learning models like random forest, xgboost, lightgbm, and catboost are based on the inherent feature importance provided by the algorithms during their training process. feature selection techniques involve selecting features before feeding the data into the machine learning algorithms. many models are developed with all possible combinations of features, and the best model is selected based on a performance metric among all the models created by the wrapper feature selection models. unlike wrapper methods that involve training and evaluating models, filter methods pre-process features independently of the chosen machine learning algorithm.1variables such as ambient temperature, humidity, and wind direction show a substantial correlation with solar irradiance, indicating a higher impact on solar irradiance forecasting. to assess the impact of the feature selection technique on the performance of the models, the mlp-anns are trained and tested with and without feature selection.in this study, we explored the use of advanced machine learning algorithms for forecasting solar irradiance, a key factor for efficient smart grid management. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/571.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/571.txt new file mode 100644 index 0000000000000000000000000000000000000000..c58036c44fc4fdf30240f158f88098189ddc9f1c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/571.txt @@ -0,0 +1 @@ +reinforcement learning (rl) is a sequential decisionmaking framework that finds its applications in a wide range of areas, ranging from epidemic control to transportation to online marketing (ling et al., 2023;singh et al., 2023;al-abbasi et al., 2019). the objective of an rl agent is to obtain a policy that maximizes the discounted sum of expected rewards that are generated due to its interaction with the environment. one way to solve this problem is called the policy gradient (pg) approach which performs the optimization directly in the policy space. value function-based approaches, on the contrary, optimize the q-functions which are then used to filter out the policies.paired with the general function approximation, pgbased methods turn out to be an effective tool in handling large state spaces. specifically, deep neural network (dnn)-based pg has shown tremendous success empirically (schulman et al., 2015(schulman et al., , 2017)). the idea of function approximation is to parameterize the policies by a d-dimensional parameter, θ so that the optimization can be performed over r d . in this case, however, the objective function, j(θ), appears to be non-convex. as a result, many articles primarily analyze the firstorder stationary (fos) convergence properties of j(θ). for example, (yuan et al., 2022) derives õ(ǫ -4 ) sample complexity for vanilla-pg algorithm to achieve an ǫ-fos error.in this paper, however, we are interested in the global convergence property of j(θ). it can be shown that under the assumption of fisher non-degeneracy (fnd), j(θ) satisfies the gradient domination property which implies that there does not exist any fos sub-optimal points of j(θ). as a result, one can achieve any ǫ global optimum (go) gap, despite the objective function being non-convex. for example, (liu et al., 2020) derives o(ǫ -4 ) and o(ǫ -3 ) sample complexities of the pg and the natural pg (npg) algorithms respectively corresponding to an ǫ go gap. in recent years, importance sampling (is) has garnered popularity as an important sample complexityreducing method. (liu et al., 2020) utilizes is to propose variance-reduction (vr) versions of pg and npg algorithms, each achieving a sample complexity of õ(ǫ -3 ). the caveat of is-based methods is that one needs to impose a bound on the variance of the is weights. this is a strong assumption and might not be verifiable for most practical cases. alternatives to the is-based method include the momentum-based n-pg-igt algorithm of (fatkhullin et al., 2023) and the hessian-based scrn algorithm of (masiha et al., 2022), both of which achieve õ(ǫ -2.5 ) sample com-lower bound (azar et al., 2017) o(ǫ -2 ) ---table 1: summary of recent sample complexity results for global convergence in discounted reward mdps with general parameterized policies. the term, "hessian-free" states that the underlying algorithm only utilizes firstorder information. the "is-free" column states whether a bound (w ) on the variance of the importance sample (is) weights is assumed. it is worthwhile to mention here that although the harpg algorithm is hessianaided, its per-iteration computational and memory requirements are similar to that of hessian-free methods (fatkhullin et al., 2023). we have explicitly written the logarithmic terms in the sample complexities of harpg and vr-scrn to highlight the superiority of our result.plexity. hessian-based (second-order) approaches typically have more memory and computational requirements in comparison to the first-order algorithms and thus, are less preferred in practice. the state-of-theart (sota) õ(ǫ -2 ) sample complexity is achieved by two recently proposed algorithms: the vr-scrn by (masiha et al., 2022) and the harpg proposed by (fatkhullin et al., 2023). the first one uses a combination of hessian and is-based methods whereas the other one solely applies a hessian-aided technique. this leads to the following question.does there exist an is-free and hessian-free algorithm that either achieves or improves the sota õ(ǫ -2 ) sample complexity? moreover, the iteration complexity of our algorithm matches the sota iteration complexity of hessian-free and is-free algorithms and beats that of the harpg algorithm by a factor of o(ǫ -1 ).the npg algorithm updates the policy parameter, θ for k iterations and at each iteration, k ∈ {0, • • • , k -1}, at estimate, ω k , of the true natural gradient, ω * k is calculated via an h-step sgd method.number of times following (16) where {ω k } k-1 k=0 denote the estimates of {ω * k } k-1 k=0 .recall that in the outer loop, the policy parameter θ k 's are updated via (16) where ω k 's are estimates of ω * k 's, the natural policy gradients. the following lemma establishes how θ k 's convergence is intimately connected with the convergence of ω k 's. let, the parameters {θ k } k-1 k=0 be updated via (16), π * be the optimal policy and j * ρ be the optimal value of the function j ρ (•).observe that, if the estimate ω k produced by the inner loop is a good approximation of ω * k , then the first order term in (20) will be small.notice that the first term in (22) will be small if ω k well approximates ω * k , ∀k.recall that the inner loop produces estimates of ω * k = arg min ω∈r d l ν π θ ρ (ω, θ)| θ=θ k using stochastic gradients obtained from algorithm 1. let ω k be an estimate of ω * k generated by algorithm 2 at the kth iteration of the outer loop, k ∈ {0, 1, • • • , k-1}. let {θ k } k-1k=0 be the policy parameters generated by algorithm 2, π * be the optimal policy and j * ρ denote the optimal value of j ρ (•) corresponding to an initial distribution ρ. if we follow existing analysis such as that given in(liu et al. by introducing an improved global convergence analysis, and utilizing a novel observation that a first-order term can be interpreted as the error generated by a noiseless accelerated gradient descent procedure, we establish a sample complexity of o(ǫ -2 ) and an iteration complexity of o(ǫ -1 ) for the anpg.summing from k = 0 to k -1, using the non-negativity of kl divergence and dividing the resulting expression by k, we get the desired result.where (a) utilizes the cauchy-schwarz inequality and the definition that ω * k = f ρ (θ k ) † ∇ θ j ρ (θ k ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/572.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/572.txt new file mode 100644 index 0000000000000000000000000000000000000000..38f7a7c7cd90951585ef94eb1856fea1b3f90634 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/572.txt @@ -0,0 +1 @@ +the calculation of expectations and probabilities is a prevalent objective in almost all areas of sciences. the problem can be generically phrased as follows: given a probability measure µ * defined on some sample space ω ⊆ d and an observable f : ω → , we wish to estimate µ * ( f ) := ω f (x)dµ * (x).(choosing f to be the indicator function of some set a ⊂ ω, this gives the probability of a.oftentimes the probability measure µ * is known only up to a normalization factor, and the estimation of this factor is also desirable: it is known as the partition function in statistical mechanics and the evidence in bayesian inference.the analytical evaluation of ( 1) is possible only in few very specific cases. standard numerical quadrature methods based on gridding space are also inapplicable as soon as the set ω is high dimensional (i.e. d is larger than 3). in practice one must therefore resort to monte carlo (mc) methods whereby one estimates (1) by replacing it by some empirical average over samples. since direct sampling of µ * is typically not an option either, the most common monte carlo strategies are importance sampling, in which independent samples drawn from a simpler distribution than µ * are properly reweighted to estimate (1), and markov chain monte carlo (mcmc), in which a markov sequence ergodic with respect to µ * is used -these two classes of methods will be discussed in more details below. to improve their efficiency, these vanilla mc strategies are typically integrated into more sophisticated methods such as umbrella sampling (aka stratification) or replica exchange . as a rule, however, to be effective all these approaches must be carefully tailored to the problem at hand using some prior information about µ * and f . as a result the success of monte carlo strategies often relies on the skill of the user. the aim of this paper is to discuss how to leverage recent advances in machine learning (ml) to improve this situation and streamline the design of efficient monte carlo sampling strategies. this is a natural aim considering that machine learning tools and concepts have revolutionized the way we process large data sets, in particular in situations where these data were drawn from complex high-dimensional distributions. this feat is a priori appealing in the context of monte carlo sampling where one is confronted with probability distributions with similar features. there is however one important difference. by and large the successes of ml have relied on the prior availability of large quantities of data. in the context of mc, there is typically no data accessible beforehand: in fact, the whole aim of mc is to generate such data in order to make controlled estimation of (1) possible. this leads to specific challenges, uncommon in the context of supervised learning, but also opportunities to understand better certain aspects of ml training procedures by relying e.g. on online learning in the context of which issues like convergence or generalization error can be better understood. our aim is also to discuss these aspects.here, we will discuss several schemes that are based on constructing transport maps in order to improve mc sampling: as a rule, these maps push samples from a simple base distribution onto samples that are better adapted to the target distribution. this methodology is applicable in the context of importance sampling as well as mcmc, and it can also be generalized to non-equilibrium sampling. this leads to a feedback loop in which we sample an objective by a mc method to learn a better transport map, and use this map to produce better samples via mc, much in the spirit of reinforcement learning strategies. while the task of devising maps between distributions is foundational in the field of optimal transport , the variational framing of this problem in machine learning has its roots in . further progress in this direction have been made in where the maps used for sampling are parameterized by deep neural networks (dnn), an innovation realized in . the possibility to calculate such maps rests on the approximation power of the dnn used to parametrize them, as well as our ability to optimize the parameters in these networks. this paper will offer little insight about the first assumption: we will simply assume that dnn are sufficiently expressive to the task at hand, basing this belief on the growing empirical evidence that this is indeed the case and that the ongoing design of dnn of increasing complexity will keep improving their capabilities. regarding the question of optimization, we will carefully design objective functions for the maps that are amenable to empirical estimation via sampling. this guarantees that training of the dnn used to approximate these maps can be performed in practice using online learning with stochastic gradient descent (sgd). the objective functions we introduce have the feature that all their local minimizers are global; however, this nice feature may not be preserved after nonlinear parametrization by a neural network, and it does not necessarily guarantee fast convergence of sgd nor does it easily allow us to obtain convergence rates. here too, we will leave this question open to further investigations.then, for all local minimizing pairs (v, x t ), we have x t=1 ♯µ b = µ * , where µ * and µ b are the measures whose densities are ρ * = z -1 * e -u * and ρ b = z -1 b e -u b , respectively.note that we can solve the first equation in(23)forward in time from x t=0 (x) = x first, then use x t=1 (x) to solve the second equation backward in time from g t=1 (x) = ∇u * (x t=1 (x)).then, for all local minimizing pairs (v, x t ): (i) x t=0 ♯µ * = µ b where µ * and µ b are the measures with density ρ * = z -1 * e -u * and ρ b = z -1 b e -u b , respectively; and (ii) the solution to.note that the maps x t and x t are inverse of each other in the sense that, for all t ∈ , we have x t (x t=1 (x)) = x t (x) and x t ( x t=0 (x)) = x t (x).note that we can solve the first equation in(23)backward in time from x t=1 (x) = x first, then use x t=0 (x) to solve the second equation forward in time from ḡt=0 (x) = -∇u b ( x t=0 (x)).where ẋ t (x) = v ( t, x t (x)) with x t=0 (x) = x and we used(18)as well as ρ b = z -1 b e -u b to get the second equality.then, for all local minimizing pairs (v t , x t ), we have x t=1 ♯µ b = µ where µ and µ b are the measures whose densities are ρ = z -1 e -u and ρ b = z -1 b e -u b , respectively.where ẋ t (x) = v t ( x t (x)) with x t=1 (x) = x and the constant z 2 z -2 b can again be neglected. by choosing t large enough and replacing ∇ log ρ t (x) by s θ (t, x) with θ obtained by minimizing (55), we can use gaussian samples for x r t=0 and turn them into samples x r t whose measure is approximately the target µ.for example, we could take by construction we have x t=0 = x b ∼ µ b and x t=1 = x * ∼ µ * , and since the process x t is continuous and differentiable in time, its measure inherits these properties and interpolates between the base µ b and the target µ * . therefore, if we are given a data set {x b i , x * i } n i=1 of independent samples from π, and we wish to approximate v(t, x) via some parametric v θ (t, x) with θ ∈ θ, these parameters can be estimated by minimizing the empirical loss.in the context of metropolis-hastings mcmc, this is achieved by (i) proposing a new sample x using some transition probability kernel q x i (d y) = q x i ( y)d y; (ii) accepting x as new state. for the mh-mcmc scheme in which x = x t=1 (x b ) with x b ∼ µ b , from (87) we have. x t=1 ( x t=0 (x)), and satisfy the composition rule x t ( x t=0 (x)) = x t (x), to deduce. when the current state of the mcmc chain x i was also constructed via transport and therefore was produced by pushing an x b i ∼ µ b through to x i = x t=1 (x b i ), the likelihoods ρ * (x i ), ρ b (x b i ), and ρ t=1 (x i ) are already available and need not be recomputed to estimate the factor in (94). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/573.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/573.txt new file mode 100644 index 0000000000000000000000000000000000000000..ec88c251350b113a838f32eb01906fe7321baa1f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/573.txt @@ -0,0 +1 @@ +in e-commerce, we are often faced with two forecasting challenges. first, forecasts at the lowest granularity -often the individual product level -are required but we also need forecasts at higher granularities, for example at the category, department, or regional level, as higher level forecasts are often needed in logistics and financial planning. second, forecasts at different time granularities are required, for example daily or weekly forecasts. it is common that separate forecast models are made for each separate (temporal) granularity, and as such these forecasts may not be coherent with each other. hierarchical forecasting and temporal hierarchical forecasting techniques aim to solve the problem of creating forecasts that are coherent with respect to a pre-specified cross-sectional and/or temporal hierarchy of the underlying time series.challenges with existing cross-sectional and temporal hierarchical forecasting techniques. reconciliation methods adjust the forecasts for each level in the hierarchy by minimizing the errors at each forecast level. these methods are applied as a post-processing step that requires a matrix inversion that scales cubically with the number of products or product hierarchies . in settings with millions of products such as in e-commerce, this becomes computationally expensive at prediction time. neural network methods can optimize for the hierarchy in an end-to-end manner, however, these are either multivariate methods that scale poorly to millions of time series or they can only optimize for the temporal hierarchy .sparse loss function. in order to overcome these scaling issues, we design a sparse hierarchical loss (hl) function that directly optimizes both cross-sectional and temporal hierarchical structures. our corresponding sparsity-aware implementation ensures that the number of operations in our loss function scales quadratically rather than cubically with the number of products and levels in the hierarchical structure, enabling computationally efficient training. the benefit of our sparse hierarchical loss function is that it provides practitioners a method of producing bottom-level forecasts that are coherent to any chosen cross-sectional and temporal hierarchy. in addition, removing the need for a postprocessing step as used in traditional hierarchical forecasting techniques reduces the computational cost of the prediction phase in the forecasting pipeline. furthermore, this also reduces the deployment complexity of the forecasting pipeline.evaluation. we evaluate our sparse hl function on a gradient-boosted forecasting system on the public m5 dataset and a proprietary dataset from our ecommerce partner. for the m5 dataset, we demonstrate that our implementation provides up to 10% better forecasting performance as measured by both rmse and mae compared with (i) reconciliation methods and (ii) baseline bottom-level forecasting methods that use a standard loss function. for the proprietary dataset, we present the results of an offline test on the product-level forecast system of bol, a european e-commerce company with a catalog of millions of unique products. we find our sparse hl function improves the forecasting performance by about 2% on rmse and 10% on mae as compared to the baseline forecasting system. this demonstrates the usefulness of our sparse hl function in a large-scale setting.contributions. in summary, the main contributions of this paper are:1. we design a sparse hierarchical loss function that enables direct end-to-end training of cross-sectional and temporal hierarchical forecasts in large-scale settings in section 4.2. we empirically demonstrate that our sparse hierarchical loss function can outperform existing hierarchical forecasting reconciliation methods by up to 10% in section 5.1. contrary to most end-to-end hierarchical forecasting methods that leverage neural networks , we use lightgbm as our base forecasting model, a highly popular gradient boosting-based forecasting method that is widely used in industry and was used by the majority of the top performing solutions in the m5 forecasting competition .3. we show how our sparse hierarchical loss function scales to large-scale settings and demonstrate a reduction of both training and prediction time of up to an order of magnitude compared to the best hierarchical forecasting reconciliation methods (section 5.1).4. we present the results of an offline test of our method for the primary product demand forecasting model at bol, a european e-commerce company with a catalogue of millions of unique products, demonstrating an improvement of 2% on rmse and 10% on mae as compared to the baseline forecasting system, in section 5.2. for the m5 dataset, we demonstrate that our implementation provides up to 10% better forecasting performance as measured by both rmse and mae compared with (i) reconciliation methods and (ii) baseline bottom-level forecasting methods that use a standard loss function. we show how our sparse hierarchical loss function scales to large-scale settings and demonstrate a reduction of both training and prediction time of up to an order of magnitude compared to the best hierarchical forecasting reconciliation methods (section 5.where s ∈ {0, 1} n × n b is a matrix that defines the hierarchical relationship between the n b bottom-level time series and the n a = nn b aggregations, g ∈ r n b × n is a matrix that encapsulates the contribution of each forecast to the final estimate, and ỹh ∈ r n is the vector of forecasts adjusted for the hierarchy. we are interested in finding forecasts that can be aggregated according to a pre-specified crosssectional hierarchy s cs ∈ {0, 1} n cs × n cs b and temporal hierarchy s te ∈ {0, 1} n te × n te b :.• in eq. we now construct a matrix of bottom-level base forecasts ŷn b ∈ r n cs b × n te b , in which the columns represent the forecasts of the bottom-level time series at a timestep h.where s cs 1 cs and s te 1 te denote the row-sum of s cs and s te , respectively, and l cs and l te denote the number of levels in hierarchies s cs and s te , respectively. furthermore, suppose we have a single cross-sectional aggregation (the sum of the two time series, thus n cs a = 1 and n cs = n cs a + n cs b = 3), and a single temporal aggregation (the sum of the two timesteps, thus n te a = 1 and n te = n te a + n te b = 3). this derivation also shows the motivation of adding the denominator matrix d cs d te to the loss function(9): it is neccessary to scale the aggregation gradients by the number of elements in the aggregation, otherwise the magnitude of the gradient grows with the number of time series and the number of levels in the hierarchy, which we found to be undesirable when trying to facilitate stable learning. for example, s cs has at most n cs b l cs non-zero elements: the number of bottom-level time series multiplied by the number of aggregations in the hierarchy. the training and prediction time complexity is indicated by how respectively the training time and prediction time scales with respect to the default lightgbm training and prediction time complexity. however, our sparse implementation of the hierarchical loss reduces this component from n cs b 3 to n cs b 2 l cs , effectively reducing the scaling from cubic to quadratic in the number of bottom-level time series, as l cs is generally small. we have n cs b = ±5m bottomlevel time series and n cs a = ±6, 070 aggregated time series across l cs = 4 levels: product (bottom-level), product group, seasonality group and total. we demonstrated that we are able to outperform existing hierarchical forecasting methods both in terms of performance as measured by rmse and mae by up to 10% as well as in terms of computational time required to perform the end-to-end hierarchical forecasting in large-scale settings, reducing prediction time as compared to the best hierarchical forecasting reconciliation method by an order of magnitude. b the number of time series in each level in the hierarchy and l cs the number of levels in the cross-sectional hierarchy, n cs (n te ) the total number of cross-sectional (temporal) aggregations, and n a = nn b . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/574.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/574.txt new file mode 100644 index 0000000000000000000000000000000000000000..62f2fd3a7d4f025d927c080076ee2c459373e69a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/574.txt @@ -0,0 +1 @@ +our algorithm is based on a bayesian/multiplicative weights type approach to the problem, and is along the lines of the splitting-based approach of dasgupta .we maintain a set of weights w(h) for each h ∈ h, starting at 1; these induce a distribution λ(h) := w(h) h w(h) which we can think of as our posterior over the "true" h * .realizable setting. as initial intuition, consider the realizable case of η = ε = 0 where we want to find the true h * . if h * really were drawn from our prior λ, and we query a point x, we will see a 1 with probability e h∼λ h(x). then the most informative point to query is the one we are least confident in, i.e., the point x * maximizing r(x)suppose an algorithm queries x 1 , . . . , x m and receives the majority label under h ∼ λ each time. then the fraction of h ∼ λ that agree with all the queries is at least 1 -m i=1 r(x i ) ≥ 1 -mr(x * ). this suggests that, if r(x * ) ≪ 1 m , it will be hard to uniquely identify h * . it is not hard to formalize this, showing that: if no single hypothesis has 75% probability under λ, and any algorithm exists with sample complexity m and 90% success probability at finding h * , we must have r(x * ) ≥ 1 10m . this immediately gives an algorithm for the η = ε = 0 setting: query the point x maximizing r(x), set w(h) = 0 for all hypotheses h that disagree, and repeat. as long as at least two hypotheses remain, the maximum probability will be 50% < 90% and each iteration will remove an ω( 1 m ) fraction of the remaining hypotheses; thus after o(m log h) rounds, only h * will remain. this is the basis for dasgupta .handling noise: initial attempt. there are two obvious problems with the above algorithm in the agnostic setting, where a (possibly adversarial) η fraction of locations x will not match h * . first, a single error will cause the algorithm to forever reject the true hypothesis; and second, the algorithm makes deterministic queries, which means adversarial noise could be placed precisely on the locations queried to make the algorithm learn nothing.to fix the first problem, we can adjust the algorithm to perform multiplicative weights: if in round i we query a point x i and see y i , we setfor a small constant α = 1 5 . to fix the second problem, we don't query the single x * of maximum r(x * ), but instead choose x according to distribution q over many points x with large r(x).to understand this algorithm, consider how log λ i (h * ) evolves in expectation in each step. this increases if the query is correct, and decreases if it has an error. a correct query increases1111 0000 h 3 10 -6 0000 1110 y 0000 1111 figure 1: an example demonstrating that the weight of the true hypothesis can decrease if λ is concentrated on the wrong ball. in this example, the true labels y are closest to h3. but if the prior λ on hypotheses puts far more weight on h1 and h2, the algorithm will query uniformly over where h1 and h2 disagree: the second half of points. over this query distribution, h1 is more correct than h3, so the weight of h3 can actually decrease if λ(h1) is very large.proportion to the fraction of λ placed on hypotheses that get the query wrong, which is at least r(x); and the probability of an error is at most η max x q(x)dx(x) . if at iteration i the algorithm uses query distribution q, some calculation gives that.(1)the algorithm can choose q to maximize this bound on the potential gain. there's a tradeoff between concentrating the samples over the x of largest r(x), and spreading out the samples so the adversary can't raise the error probability too high. we show that if learning is possible by any algorithm (for a constant factor larger η), then there exists a q for which this potential gain is significant.then there exists a query distribution q over x withat a very high level, the proof is: imagine h * ∼ λ. if the algorithm only sees the majority label y on every query it performs, then its output h is independent of h * and cannot be valid for more than 80% of inputs by the ball assumption; hence a 99% successful algorithm must have a 19% chance of seeing a minority label. but for m * queries x drawn with marginal distribution q, without noise the expected number of minority labels seen is m * e, so e ≳ 1/m * . with noise, the adversary can corrupt the minority labels in h * back toward the majority, leading to the given bound.the query distribution optimizing (1) has a simple structure: take a threshold τ for r(x), sample from d x conditioned on r(x) > τ , and possibly sample x with r(x) = τ at a lower rate. this means the algorithm can efficiently find the optimal q.except for the caveat about λ not already concentrating in a small ball, applying lemma 2.1 combined with (1) shows that log λ(h * ) grows by ω( 1 m * ) in expectation for each query. it starts out at log λ(h * ) = -log h, so after o(m * log h) queries we would have λ(h * ) being a large constant in expectation (and with high probability, by freedman's inequality for concentration of martingales). of course λ(h * ) can't grow past 1, which features in this argument in that once λ(h * ) > 80%, a small ball will have large probability and lemma 2.1 no longer applies, but at that point we can just output any hypothesis in the heavy ball.handling noise: the challenge. there is one omission in the above argument that is surprisingly challenging to fix, and ends up requiring significant changes to the algorithm: if at an intermediate step λ i concentrates in the wrong small ball, the algorithm will not necessarily make progress. it is entirely possible that λ i concentrates in a small ball, even in the first iteration-perhaps 99% of the hypotheses in h are close to each other. and if that happens, then we will have r(x) ≤ 0.01 for most x, which could make the rhs of (1) negative for all q.in fact, it seems like a reasonable bayesian-inspired algorithm really must allow λ(h * ) to decrease in some situations. consider the setting of figure 1. we have three hypotheses, h 1 , h 2 , and h 3 , and a prior λ = (0.9, 0.099999, 10 -6 ). because λ(h 3 ) is so tiny, the algorithm presumably should ignore h 3 and query essentially uniformly from the locations where h 1 and h 2 disagree. in this example, h 3 agrees with h 1 on all but an η mass in those locations, so even if h * = h 3 , the query distribution can match h 1 perfectly and not h 3 . then w(h 1 ) stays constant while w(h 3 ) shrinks. w(h 2 ) shrinks much faster, of course, but since the denominator is dominated by w(h 1 ) , λ(h 3 ) will still shrink. however, despite λ(h 3 ) shrinking, the algorithm is still making progress in this example: λ(h 2 ) is shrinking fast, and once it becomes small relative to λ(h 3 ) then the algorithm will start querying points to distinguish h 3 from h 1 , at which point λ(h 3 ) will start an inexorable rise.our solution is to "cap" the large density balls in λ, dividing their probability by two, when applying lemma 2.1. our algorithm maintains a set s ⊆ h of the "high-density region," such that the capped distribution:has no large ball. then lemma 2.1 applies to λ, giving the existence of a query distribution q so that the corresponding r(x) is large. we then define the potential functionfor h * / ∈ s i , and ϕ i = 0 for h * ∈ s i . we show that ϕ i grows by ω( 1 m * ) in expectation in each iteration. thus, as in the example of figure 1, either λ(h * ) grows as a fraction of the whole distribution, or as a fraction of the "low-density" region.if at any iteration we find that λ has some heavy ball b(µ, 2η + ε) so lemma 2.1 would not apply, we add b (µ ′ , 6η + 3ε) to s, where b (µ ′ , 2η + ε) is the heaviest ball before capping. we show that this ensures that no small heavy ball exists in the capped distribution λ. expanding s only increases the potential function, and then the lack of heavy ball implies the potential will continue to grow.thus the potential (2) starts at -2 log |h|, and grows by ω( 1 m * ) in each iteration. after o(m * log h) iterations, we will have ϕ i ≥ 0 in expectation (and with high probability by freedman's inequality). this is only possible if h * ∈ s, which means that one of the centers µ of the balls added to s is a valid answer.in fact, with some careful analysis we can show that with 1 -δ probability that one of the first o(log h δ ) balls added to s is a valid answer. the algorithm can then check all the centers of these balls, using the following active agnostic learning algorithm: proof. the algorithm is the following. take any pair h, h ′ with ∥h-h). one of h, h ′ is wrong on at least half the queries; remove it from h and repeat. at the end, return any remaining h.to analyze this, let h * ∈ h be the hypothesis with error η. if h * is chosen in a round, the other h ′ must have error at least 2η. therefore the chance we remove h * is at most δ/ |h|. in each round we remove a hypothesis, so there are at most |h| rounds and at most δ probability of ever crossing off h * . if we never cross off h * , at the end we output some h with ∥h -h * ∥ ≤ 3η, which gives ε = 3η.the linear dependence on |h| makes the theorem 2.2 algorithm quite bad in most circumstances, but the dependence only on |h| makes it perfect for our second stage (where we have reduced to o(log |h|) candidate hypotheses).overall, this argument gives an o m * log |h| δ + log |h| δ log log|h| δ sample algorithm for agnostic active learning. one can simplify this bound by observing that the set of centers c added by our algorithm form a packing, and must therefore all be distinguishable by the optimal algorithm, so m * ≥ log c. this gives a bound ofby starting with an η-net of size n , we can reduce |h| to n with a constant factor increase in η.with some properly chosen constants c 4 and c 5 , the entire algorithm is formally described in algorithm 1.remark 1: as stated, the algorithm requires knowing m * to set the target sample complexity / number of rounds k. this restriction could be removed with the following idea. m * only enters the analysis through the fact that o 1 m * is a lower bound on the expected increase of the potential function in each iteration. however, the algorithm knows a bound on its expected increase in each round i; it is the valueoptimized in the algorithm. therefore, we could use an adaptive termination criterion that stops at iteration k if). this will guarantee that when terminating, the potential will be above 0 with high probability so our analysis holds.remark 2: the algorithm's running time is polynomial in |h|. this is in general not avoidable, since the input is a truth table for h. the bottleneck of the computation is the step where the algorithm checks if the heaviest ball has mass greater than 80%. this step could be accelerated by randomly sampling hypothesis and points to estimate and find heavy balls; this would improve the dependence to nearly linear in |h|. if the hypothesis class has some structure, like the binary search example, the algorithm can be implemented more efficiently.for every x ∈ x find a query distribution by solvingquery x ∼ q * , getting label yfind the best hypothesis ĥ in c using the stage two algorithm in theorem 2.2 return ĥgeneralization for better bounds. to get a better dependence for 1d threshold functions, we separate out the lemma 2.1 bound on (1) from the analysis of the algorithm given a bound on (1).then for particular instances like 1d threshold functions, we get a better bound on the algorithm by giving a larger bound on (1). theorem 2.3. suppose that d x and h are such that, for any distribution λ over h such that no radius-(c 4 η + c 5 ε) ball has probability more than 80%, there exists a distribution q over x such thatfor some β > 0. then for ε ≥ c 1 η, c 4 ≥ 300, c 5 = 1 10 and c 1 ≥ 90c 4 , let n = n (h, d x , η) be the size of an η-cover of h. algorithm 1 solves (η, ε, δ) active agnostic learning withcorollary 2.4. there exists a constant c 1 > 1 such that, for 1d threshold functions and ε > c 1 η, algorithm 1 solves (η, ε, δ) active agnostic learning with o log 1 εδ log log 1 ε δ samples.proof. because the problem is only harder if η is larger, we can raise η to be η = ε/c, where c > 1 is a sufficiently large constant that theorem 2.3 applies. then 1d threshold functions have an η-cover of size n = o(1/ε). to get the result by theorem 2.3, it suffices to show β = θ(1).each hypothesis is of the form h(x) = 1 x≥τ , and corresponds to a threshold τ . so we can consider λ to be a distribution over τ .let λ be any distribution for which no radius-r with probability greater than 80% ball exists, for r = c 4 η + c 5 ε. for any percent p between 0 and 100, let τ p denote the pth percentile of τ under λ (i.e., the smallest t such that pr ≥ p/100). by the ball assumption, τ 10 and τ 90 do not lie in the same radius-r ball. hence ∥h τ10 -h τ90 ∥ > r, orwe let q denote (d x | τ 10 ≤ x < τ 90 ). then for all x ∈ supp(q) we have r(x) ≥ 0.1 andtherefore we can set3 proof of lemma 2.1lemma 2.1 (connection to opt). define ∥h -let λ be a distribution over h such that no radius-(2η + ε) ball b centered on h ∈ h has probability at least 80%.let m * = m * h, d x , η, ε, 99 100 . then there exists a query distribution q over x with, 1994, dasgupta, 2005, nowak, 2011]. we say an active agnostic learning algorithm a solves an instance (h, d x , η, ε, δ) with m measurements if, for every distribution d with marginal d x and for which some h * ∈ h has err(h * ) ≤ η, with probability 1 -δ, a uses at most m queries and outputs h ∈ h with err h ≤ η + ε., the smallest set s ⊆ h such that for every h ∈ h there exists h ′ ∈ s with pr x∼d x ≤ α.we maintain a set of weights w(h) for each h ∈ h, starting at 1; these induce a distribution λ(h) := w(h) h w(h) which we can think of as our posterior over the "true" h * . but if the prior λ on hypotheses puts far more weight on h1 and h2, the algorithm will query uniformly over where h1 and h2 disagree: the second half of points. over this query distribution, h1 is more correct than h3, so the weight of h3 can actually decrease if λ(h1) is very large. it starts out at log λ(h * ) = -log h, so after o(m * log h) queries we would have λ(h * ) being a large constant in expectation (and with high probability, by freedman's inequality for concentration of martingales).099999, 10 -6 ). because λ(h 3 ) is so tiny, the algorithm presumably should ignore h 3 and query essentially uniformly from the locations where h 1 and h 2 disagree. w(h 2 ) shrinks much faster, of course, but since the denominator is dominated by w(h 1 ) , λ(h 3 ) will still shrink. for the algorithm to succeed on a sample h, its output h must have ∥h -h∥ ≤ 2η + ε. remember that w i (h) denote the weight of hypothesis h in iteration i and λ i,s (h) = wi(h) h ′ ∈s wi(h ′ ) for some s ⊆ h denote the proportion of h in s.because h ′ ⊆ h is a maximal 2η-packing, every hypothesis in h is within 2η of some hypothesis in h ′ . the problem h, d x , c 4 η, c 5 ε -2η, 99 100 is harder than the problem h ′ , d x , c 4 η, c 5 ε, 99 100 because we can reduce the latter to the former by simply adding more hypotheses and solve it then map the solution back by returning the closest hypothesis in h ′ . there exist some constants c 1 , c 2 and c 3 such that for any instance (h, d x , η, ε, δ) with ε ≥ c 1 η, algorithm 1 solves the instance with sample complexity m(h, d x , η, ε, δ) ≲ m * h, d x , c 2 η, c 3 ε, 99 100 + log 1 δ • log n (h, d x , η) δ and polynomial time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/575.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/575.txt new file mode 100644 index 0000000000000000000000000000000000000000..aba4d3ff86ce1eb51d83b3b548d635519165c5b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/575.txt @@ -0,0 +1 @@ +this section presents the complete versions of theorems 1.4 and 2.2. we provide a concise overview of the key ideas and techniques employed in the proofs. all proofs appear in the appendices. please refer to section 3 for a complete overview of preliminaries, including all technical terms and definitions.h has a pac learning rule that is h has a pac learning rule that is stable according to definition a stable according to definition b ⇐⇒ this type of equivalence is weak because it does not imply that a learning rule satisfying one definition also satisfies the other. these are bayesian definitions of stability in which p is some fixed prior that depends only on the class h and the learning rule a, and does not depend on the population distribution d. in a nutshell,proved that (i) a class h is pure dp learnable if and only if the fractional clique dimension of h is finite; (ii) the fractional clique dimension is finite if and only if there exists a polynomial q(m) and a distribution over hypothesis p m , such that for every realizable sample s of size m, we have.the next step in the proof is to define a learning rule a: (i) sample hypotheses from the prior p; (ii) return the first hypothesis h that is consistent with the input sample s (i. since the posterior a(s) is supported on {h : l s (h) = 0}, a simple calculation which follows from equation(1)shows that for every realizable distribution d, d ∞ (a(s) p) ≤ log(q(m)) almost surly where s ∼ d m . let x be a set, let h ⊆ {0, 1} x be a hypothesis class, and let a be a learning rule. a population distribution is a distribution d ∈ ∆(x × {0, 1}) over labeled domain elements, and a prior distribution is a distribution p ∈ ∆ {0, 1} x over hypotheses. let p y , q y be the distributions of y when x is sampled from p x , q x , respectively. let x be a set, let h ⊆ {0, 1} x be a hypothesis class, and let a be a learning rule. to see that such a machine can estimate kl(a(st) p), observe that if a uses some finite number of random coins, then a(st) has a finite support, and so computing kl(a(st) p) involves querying p at a finite number of locations.the fractional clique number characterizes pure dp learnability, as follows: given that h is dp learnable, we define a learning rule a and a prior p, and show that a pac learns h subject to distribution-independent kl-stability with respect to p.let a be a randomized learning rule given by s → q s ∈ ∆ {0, 1} x such that q s (h) = p(h|c s ) if h ∈ c s , and q s (h) = 0 otherwise.to see that the kl is indeed small, let p a ⋆ (s ′ ),s ′ and p h ⋆ ,s ′ be two joint distributions. the variable s ′ has marginal p s ′ in both distributions, a ⋆ (s ′ ) ∼ q depends on s ′ , but h ⋆ ∼ p ⋆ is independent of s ′ . then, kl(q p ⋆ ) = kl p a ⋆ (s ′ ) p h ⋆ ≤ kl p a ⋆ (s ′ )|s ′ p h ⋆ |s ′ p s ′ (lemma a. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/576.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/576.txt new file mode 100644 index 0000000000000000000000000000000000000000..d5f5f9eaf47af82409fa019ddbc4dd45bbe83c91 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/576.txt @@ -0,0 +1 @@ +the accurate forecasting of labor and non-labor costs at the project level is a crucial aspect of effective project management. a reliable expense forecast model can aid project teams in anticipating the resources required at each stage of the project, ensuring timely completion and adherence to budget constraints. this is particularly important in businesses where revenues are closely linked to costs, such as contract research organizations, where a significant portion of contract awards is derived from customer reimbursements for expenses incurred.traditionally, financial analysts within organizations have relied on historical cost data from similar projects to predict future project expenses. however, these estimations can be prone to errors due to variations in factors such as budget and project type, leading to suboptimal decision-making and potential financial losses. to enhance the accuracy of expense forecasting, the application of advanced data science techniques has gained increasing prominence in recent years.project expenses typically exhibit time-series patterns, making them amenable to analysis using time-series forecasting techniques. one such approach is the auto-regressive integrated moving average (arima) model, which has shown promise in predicting project expenses. however, arima is not the most suitable method for addressing budget-related c. qian and l. glass are with the analytics center of excellence, iqvia, usa. e-mail: alextoqc@gmail.com (c. qian), lmglass@us.imshealth.com (l. glass) n. sidiropoulos is with the department of electrical and computer engineering, university of virginia, charlottesville, va 22904. e-mail: nikos@virginia.edu concerns and may exhibit reduced accuracy in the early stages of a project. an alternative approach involves the use of recurrent neural networks (rnns), such as long short-term memory networks (lstms) , which have demonstrated success in various time-series prediction tasks. however, the application of lstms to project-level expense forecasting is not without challenges. the scarcity of training data and lack of disciplined ways of capturing budget constraints limit the effectiveness of lstm-based models in this context.in light of these limitations, there is a pressing need for the development of novel expense forecasting models that can effectively leverage available data, account for projectspecific constraints, and ultimately improve the accuracy of cost predictions. the integration of cutting-edge techniques from the fields of signal processing, machine learning, and optimization offers promising avenues for advancing the stateof-the-art in expense forecasting, paving the way for more efficient and cost-effective project management across a wide range of industries.this study aims to contribute to this ongoing research by proposing and investigating a novel approach to expense forecasting, with a particular focus on addressing the aforementioned challenges and enhancing the practical applicability of these techniques in real-world settings. in contrast to conventional time-series based methods, our proposed technique transforms the forecasting problem into a matrix completion problem with constraints. we introduce the triple simplex matrix completion (tsmc) algorithm, which utilizes three simplex projections to limit the factor matrices and missing values, ensuring that forecasts do not exceed budget constraints. our method employs an inexact projected gradient descent algorithm to solve the non-negative matrix completion problem with three simplex constraints. experimental results using two real datasets demonstrate that our proposed approach outperforms existing algorithms in expense prediction. a reliable expense forecast model can aid project teams in anticipating the resources required at each stage of the project, ensuring timely completion and adherence to budget constraints. we introduce the triple simplex matrix completion (tsmc) algorithm, which utilizes three simplex projections to limit the factor matrices and missing values, ensuring that forecasts do not exceed budget constraints. experience with real project expense data suggests that projects with similar characteristics tend to have similar expense patterns. for instance, smaller projects typically have faster expense patterns, while larger projects tend to have slower expense patterns that take years to complete.there are three overarching constraints that must be considered in project expense forecasting practice: the known ted of the project, the allocated project budget, and the non-negativity of expense values. specifically, we can define x n = y n /b n , where x n is the expense vector expressed in terms of fractions of the budget allocated to the n-th project.is a matrix containing a learned dictionary of expense patterns, with w f being the f -th pattern, and h n is the project embedding. the constraints on w and h enforce the low-rank approximation within the budget limits, while the constraints on z ensure that the sum of the predicted and observed expenses is within the budget limit.1) update w: assuming we have estimated w, h, and z after r iterations, we need to solve the subproblem with respect to w for the next iteration, i.w is a parameter associated with the step size which can be chosen as the spectral norm of (h (r) ) t h (r) , ⟨•⟩ is the inner product, the gradient ∇f ( ŵ(r) ) is given by ∇f ( ŵ(r) ) = ŵ(r) (h (r) ) t -z (r) h (r) , and ŵ(r) = w (r) + β (r) (w (r) -w (r-1) ).algorithm 1 the tsmc algorithm 1: input {xij, ∀i, j}, f 2: initialize w and h such that both matrices are non-negative, the rows of w sum up to one and the columns of h sum up to one. rmse measures the average error between the true expense values and their estimates, while relative rmse takes into account the proportion of this error relative to the magnitude of the true values. these metrics are calculated based on the formulas: rmse = ∥x -x∥ f / √ n and relative rmse = ∥x -x∥ f /∥x∥ f , where n is the number of testing samples, x contains the true expense values, and x is the corresponding estimate. we can use the likelihood of a project belonging to different expense patterns, represented by each row of matrix h, to cluster the projects. component 1 mostly consists of short phase i studies with an average budget of $424,718, while component 3 mostly consists of longer phase iii studies with an average budget of $2,022,607. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/577.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/577.txt new file mode 100644 index 0000000000000000000000000000000000000000..76e863d658766c0bac0feeeecc10b74091f43e48 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/577.txt @@ -0,0 +1 @@ +clustering methods are popular techniques widely used to extract knowledge from a variety of datasets in numerous applications . clustering methods aim at grouping similar data into a subset known as cluster. formally, the clustering consists in partitioning a dataset annotated x = {x 1 , ..., x n } with n = |x| into c clusters c 1 , ..., c c , so that x = ∪ c i=1 c i . in this paper, we only consider hard clustering according to which ∀i ∈ , ∀j = i ∈ , c i ∩ c j = ∅. we focus on density-based clustering methods that are able to identify clusters of arbitrary shape. another interesting element in these approaches is that they do not require the user to specify the number of clusters c. density-based clustering is based on the exploration of high concentrations (density) of points in the dataset . in density-based clustering, a cluster in a data space is a contiguous region of high point density separated from other such clusters by contiguous regions of low point density . density-based clustering has difficulties to detect clusters having low densities regarding high-density clusters. low-density points are either considered as outliers or included in another cluster. in the same way, near clusters of similar densities are often grouped in one cluster. moreover, density-based clustering does not manage well in high-dimensional data because the density is not evenly distributed and may vary severely. this paper tackles these challenging issues by defining a new density-based clustering approach.among the most known density-based algorithms are dbscan , optics , hdbscan , dbclasd , and denclue . historically first, dbscan introduces density as a minimum number of points within a given radius to discover clusters. nevertheless, dbscan poorly manages clusters having different densities. optics addresses these varying density clusters by ordering points according to a density measure. hdbscan improved the approach by introducing a new density measure and an optimization function aiming at finding the best clustering solution. although these approaches solve part of the problem of varying density clusters, they suffer from unevenly distributed density and high-dimensional datasets. they still mismanage low-density clusters by tending to consider them as outliers or to merge them into a higher-density cluster.dbclasd introduces a probabilistic approach of the density. dbclasd assumes that clusters follow a uniform probability law allowing it to be parameter-free . however, it suffers from detecting non-uniform clusters because of this strong assumption. as dbclasd is also a grid-based approach, its density calculations are less precise when the dimension increases .finally, denclue detects clusters using the probability density function (p.d.f ) of points in data space. denclue extends approaches of clustering such as dbscan and k-means . therefore it also inherits from these the difficulty to detect low-density clusters when the density is not evenly distributed.the common problems of the existing density-based clustering approaches are related to the difficulty of handling low-density clusters, near clusters of similar densities, and high-dimensional data. the other limit concerns their inefficiency to properly handle nested clusters of different shapes and uneven distribution of densities. in this paper, we propose a new clustering approach that overcomes these limitations. briefly, the contributions of this article are as follows: (1) we propose decwa (density-based clustering using wasserstein distance), a hybrid solution combining density and probabilistic approaches. it first produces sub-clusters using the p.d.f defined on pairwise distances. then, it merges sub-clusters with similar p.d.f and close in distance. (2) we propose to consider every cluster as a contiguous region of points where the p.d.f has its own law of probability to overcome the previously explained limitations. (3) we conducted experiments on a wide variety of datasets, decwa outperforms state-of-theart density-based algorithms in clustering quality by an average of 20%. also, it works efficiently in high-dimensional data comparing to the others., x n } with n = |x| into c clusters c 1 , . another interesting element in these approaches is that they do not require the user to specify the number of clusters c. density-based clustering is based on the exploration of high concentrations (density) of points in the dataset. historically first, dbscan introduces density as a minimum number of points within a given radius to discover clusters. optics addresses these varying density clusters by ordering points according to a density measure. although these approaches solve part of the problem of varying density clusters, they suffer from unevenly distributed density and high-dimensional datasets.the common problems of the existing density-based clustering approaches are related to the difficulty of handling low-density clusters, near clusters of similar densities, and high-dimensional data. briefly, the contributions of this article are as follows:(1)we propose decwa (density-based clustering using wasserstein distance), a hybrid solution combining density and probabilistic approaches. formally, we represent a cluster c i by a set of pairwise distances between the points contained in the cluster c i . we consider that the value of the edge linking sub-graphs corresponding to c i and c j as the distance between c i and c j . because of the mst structure, we assume that this distance is nearly the minimum distance between the points of c i and c j . c i and c j ), we verify that d(c i , c j ) ≤ λ and ws(d i , d j ) ≤ α. this is operated by the union of d i , and d j and considering the points of c i and c j as belonging to the same sub-cluster. sd stands for near clusters of similar densities, it means that the marked datasets have at least two overlapping clusters with similar densities.the outlier ratio is not relevant in case of a bad ari score because in this case, although the points are placed in clusters, clustering is meaningless. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/578.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/578.txt new file mode 100644 index 0000000000000000000000000000000000000000..bcee728c759370df143f2c0f7f8ac01f92023cc5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/578.txt @@ -0,0 +1 @@ +collaborative training of machine learning (ml) models across networked agents allows users optimize a common ml model over the union of their local data without directly sharing the data . the prevailing research in collaborative machine learning has concentrated on server-based federated learning . in the meanwhile, decentralized learning with locally connected agents has gained significant attention and interest recently due to its scalability and resistance to singlenode failure .consensus-based decentralized stochastic gradient descent (d-sgd) is a widely used first-order optimization method for decentralized learning . the plain version of d-sgd follows an iterative procedure where in every iteration each agent updates its model by combining the models received from its neighbors and its own stochastic gradient. the convergence of d-sgd has been widely studied in the literature - . in the predominant body of literature, the convergence speed of d-sgd algorithm is characterized by the level of error reduction per iteration, while the actual runtime (or communication costs) in every iteration is neglected. as pointed out in , there is a fundamental trade-off between the runtime per iteration and the error-versus-iterations convergence. this aspect is even more profound when implementing d-sgd over wireless networks, where access control and communication coordination is needed to reduce information loss caused by packet collision. the specific choice of access control protocol determines the amount of communication costs (e.g., transmission slots) in every consensus updating iteration. in some cases, introducing partial communication where a subset of nodes or links are activated in every iteration may accelerate the convergence of d-sgd in terms of error reduction per communication cost.communication-efficient decentralized learning has been studied in many existing works , , where the main focus is on the impact of model compression to reduce the amount of data transmitted over every link. others works , have explored graph sparsification techniques for improving convergence speed with reduced communication frequency. following this line, more recent works such as and have pointed out an important message that justifies the use of graph sparsification: "not all links are equally important in a graph". fast convergence can be achieved by allowing more important links to communicate more often, while the importance of a link is characterized by its impact on the connectivity of the communication graph.another significant feature of wireless networks is their broadcasting nature. with one broadcast transmission, a node can reach all its neighbors, increasing the level of information dissemination in the network. in our work, the main motivation is that "not all nodes are equally important in a graph". we propose a broadcast-based node scheduling scheme for d-sgd over wireless networks, wherein in each iteration, we sample subgraphs of the base topology with probabilities related to the importance of the nodes contained in the subgraphs. to address the asymmetric information flow associated with broadcast communication, we also present a general method for generating symmetric laplacian matrices from directed subgraphs of a given base topology. additionally, we optimize the weight matrix to accelerate convergence. as compared to similar designs with link-based scheduling , our proposed method bass shows significant performance gain under the same communication cost, especially when the network topology contains densely connected local structures. in the predominant body of literature, the convergence speed of d-sgd algorithm is characterized by the level of error reduction per iteration, while the actual runtime (or communication costs) in every iteration is neglected. in some cases, introducing partial communication where a subset of nodes or links are activated in every iteration may accelerate the convergence of d-sgd in terms of error reduction per communication cost. as compared to similar designs with link-based scheduling, our proposed method bass shows significant performance gain under the same communication cost, especially when the network topology contains densely connected local structures.we consider a communication protocol wherein one communication round (one iteration of the d-sgd algorithm) is divided into multiple transmission slots, as illustrated in fig. to reflect the direction of communication, instead of considering an undirected graph g for the communication topology, we view it as a directed graph g d = (v, e d ) with bi-directional links between each pair of connected nodes. considering the collision-free condition of simultaneously transmitting nodes, we divide the base topology into disjoint subsets of nodes and their outgoing directed links s k = {v k , e k }, k ∈ {1, . if all subsets are activated in one round, which represents the full communication case, the total communication cost per round will be q transmission slots. with partial communication, per-round convergence might be slowed down, but it can accelerate convergence in terms of actual runtime of the algorithm, especially under a limited communication budget.therefore, the number of scheduled subsets in each iteration is a random number, and on average, the number of required transmission slots per iteration is q l=1 p s l .here, b is the communication budget, which indicates the average number of transmission slots (also the average number of activated subsets) per iteration. for performance comparison, we also implement a modified version of matcha, and the full communication case, where all nodes are activated in every round. another observation is that matcha does not always outperform the full communication case, indicating that partial communication with link scheduling is not always beneficial for improving convergence per transmission slot. from this figure, we observe that the effect of partial communication varies a lot depending on the communication budget per iteration. on one hand, if the communication budget is very low (small activation percentage), the number of activated links per iteration is very low, which leads to poor information fusion/mixing. with this work, we further demonstrate that broadcast-based communication can be exploited to accelerate convergence (measured by improvement per transmission slot) of d-sgd over wireless networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/579.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/579.txt new file mode 100644 index 0000000000000000000000000000000000000000..11b69b96d88bd2a58541c0c17a150571a8273994 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/579.txt @@ -0,0 +1 @@ +federated learning (fl) in standard federated learning (fl), the server sends a model to a group of clients, who train it on their local data and then send their updated parameters back to the server for aggregation. among existing compression techniques, quantization and pruning have been implemented to reduce the complexity of inference and training of neural networks,.another widely applied technique is quantization, neural network models are generally constructed using 32-bit floating point numbers (fp32), which are more expensive in terms of computation, memory and energy than integers. we used the flowerframework to simulate 10 fl clients.as noted in previous works,the number of local iterations performed by clients during training can have an important impact on model aggregation.2show that spending more time on each client contributes to a more robust model, allowing sparser data communications while retaining approximately the same accuracy, even though this approach also results in a higher total number of local iterations. even though swat plays a significant role in reducing the communication cost, it also has an impact on the model accuracy, resulting in an overall hindrance. we chose to work with 1-bit, 4-bit and 8-bit quantization levels, using binary connectfor binary networks and the brevitasframework for 4-and 8-bit with the default quantization scheme. the number of rounds needed to reach maximum accuracy, is not the same from one experiment to another, as it also depends on the level of quantization.3, it is necessary to perform 40 rounds of communication and 40 total epochs when using 1 local epoch, while in the case of 10 local epochs, 100 total epochs are needed within 10 rounds. still, in the case of one bit, increasing the number of epochs per round on the client from 1 to 10 considerably increases accuracy, from 48. for quantization, the message size depends only on the quantized weights, since the server knows the client's quantization. conclusion federated learning represents a new approach to training models in a distributed manner, bringing forth fresh optimization challenges due to the presence of embedded systems serving as fl clients. our easy to implement yet effective technique achieved up to a 50% reduction in message size without any significant impact on accuracy, thereby resulting in direct savings in energy and bandwidth costs. it is conceivable that combining quantization and pruning could further enhance message compression, although our results already demonstrate the significance of both techniques individually. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/58.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/58.txt new file mode 100644 index 0000000000000000000000000000000000000000..5eeaeb634680d64b2cdae75d85e7352239528374 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/58.txt @@ -0,0 +1 @@ +the complexities of computational problems in our world today make it increasingly appealing to, where possible, address such problems using collaborative autonomous agents. to this end, the domain of multi-agent collaboration has been studied from different perspectives in recent years. some interesting application domains include power systems, mobile sensing , disaster management , environment monitoring , and traffic light management .in several of these application domains, the environment is dynamic , and the interactions between the agents evolve over the problem horizon. thus, predefined interaction structures may not be useful. it is then significant to address how the agents interact in a dynamic environment to facilitate the application of algorithms that leverage such agent structures. in , this is referred to as the dynamic distributed multi-agent hierarchy generation ((dyndismhg)) problem. there exist other works in the literature with similar motivations . multi-agent hierarchies enable the application of certain distributed constraint optimization problem (dcop) proposals for multi-agent collaboration. additionally, multi-agent hierarchies have applications in mobile sensing agents and mobile ad-hoc networks.one common assumption among existing approaches is that there is an expected interaction graph to enable the generation of the multi-agent hierarchy. in open and dynamic environments such as a multi-agent rescue system or a mobile sensor team (mst), an agent's interactions can vary over time, and it may be introduced or removed from the environment at arbitrary periods . therefore, the agents will have to be equipped to generate and maintain the multi-agent hierarchy collaboratively.in this study, we discuss the ad-hoc distributed multi-agent hierarchy generation problem as an extension of the (dyndismhg) problem and propose an algorithm for this class of problems in multi-agent systems (mass). we focus on applying our approach in the dcop domain and show the proposed method's effectiveness via experiments. the proposed method has application in areas such as dcop msts where agent positions or environment may change and agent interactions have to be defined to enable the use of dcop methods.in what follows, we discuss related work in section 2. section 3 introduces the background and problem formulation of this study. section 4 discusses our proposed approach. in section 5, we first introduce the experiment setup and then discuss the results. we draw our conclusions in section 6. the proposed method has application in areas such as dcop mstswhere agent positions or environment may change and agent interactions have to be defined to enable the use of dcop methods. π is a function that, given an agent a j ∈ a, specifies the parent of a j already in the hierarchy.we consider adding an agent, removing an agent, and constraint function modification as events that transition the environment from one dcop to another. agent a j (referenced as j) is an agent already in the environment that i interacts with.the agent is set to an inactive state on initialization, and other initial properties are set (line 1). once the connection conditions are satisfied, the agent broadcasts an announce message in the environment (line 4) and waits for a period, condition, or timeout before proceeding (line 5). during this waiting period, an available agent in the environment receives the announce message and responds by sending an announceresponse message (lines 19-22).when i receives an announceresponse message, it adds the sending agent to a response list if it is still in an inactive state (lines 23-26). agent i then sends an addme message to the selected agent j and goes into an active state while it waits to hear from j (lines 7-9). on line 10, it sets a timeout property to enable resetting the state on a subsequent connect call if j never responds (lines 14-17), probably because it is no longer reachable.when i receives an addme message from j, it adds j to its children and sends a childadded message to j if i is in an inactive state (lines 28-30). when an alreadyactive message is received by i, it sets its state to inactive (line 45) to enable the next call to the connect procedure to pass the condition on line 3.after receiving a childadded message from j, i must be in an active state and still without a parent to proceed (line 36). agent i then sends a parentassigned message to j (line 39). when i receives a keepalive message, it adds the sender to the keep alive message list p (lines 6-9). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/580.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/580.txt new file mode 100644 index 0000000000000000000000000000000000000000..7dcd5c880f2f7e4723441fb9caf90a1dbbbddd81 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/580.txt @@ -0,0 +1 @@ +as machine learning (ml) algorithms are increasingly used in decision-making systems with high impact on individuals, there is a need to ensure not only that the decisions made are fair, but also to explain the decision to the individual affected. a system is considered to be fair if it does not discriminate based on protected personal characteristics such as race, sex, religion, etc. there have been instances where decision-making systems discriminated against individuals in domains such as criminal justice (the partnership on ai 2019), recruitment (tilmes 2022), and social services (gillingham 2019). an analysis of compas (northpointe 2019), a popular tool used in the us to predict whether criminals will re-offend, found that black defendants were identified incorrectly as re-offending at a higher rate than white defendants (larson et al. 2016).research on fairness has received increased attention in recent years and several fairness metrics have been developed to quantify the fairness of a system (see mehrabi et al. (2022) for an overview on bias and fairness in machine learning). these metrics can be classified into group fairness (i.e. detecting bias across different values of a protected attribute, e.g., male and female individuals (garg, villasenor, and foggo 2020)) and individual fairness (i.e. detecting bias for an individual compared to similar individuals (mukher-jee et al. 2020)). whilst several notions of evaluating fairness have been proposed in the literature, there is no agreement as to which fairness metric to apply in which scenario (verma and rubin 2018). furthermore, interpreting the meaning of the values returned by a metric is not always intuitive; for example, simply reporting the percentage level of fairness of a system (e.g., 80%) may not give full confidence for stakeholders in the system. most existing group metrics also require the specification of protected attributes and can only detect unwanted bias with respect to one binary protected attribute. finally, quantifying fairness requires full access to the training data and protected attributes in order to measure the difference in positive classifications across protected groups. in reality, this data may not be available and it may be difficult to pre-define protected attributes before deploying the system (haeri and zweig 2020).identifying a link between the input data and the final decision is an important step towards providing a fair and transparent explanation (hamon et al. 2022). computational argumentation has long been seen as a means for explaining reasoning (see vassiliades, bassiliades, and patkos (2021); cyras et al. (2021) for an overview). specifically, abstract argumentation frameworks (afs) were proposed as a way to represent and reason with conflicting information (dung 1995). several types of semantics have been proposed to evaluate the acceptability of arguments in afs (baroni et al. 2015) and their extensions. afs have been used for a variety of applications such as decision-making systems (amgoud and prade 2009; brarda, tamargo, and garcía 2021), recommender systems (cocarascu, rago, and toni 2019;rago, cocarascu, and toni 2018), knowledge-based systems (kökciyan et al. 2020), and planning and scheduling systems (cyras et al. 2019). however, until now they have not been explored in relation to individual fairness in decisionmaking systems.in this paper, we propose a novel argumentation-based approach for identifying bias in relation to individual fairness which does not require access to labelled data, the training algorithm, or the specification of protected attributes before deployment. we focus on individual fairness as subjects of decisions will mostly be concerned about their personal treatment, rather than any group. hence we move away from quantifying fairness using existing group fairness metrics and offer a transparent representation of the reasons for a classification from which an explanation can be extracted.we use a quantitative argumentation framework to represent the arguments of similar individuals that reason why the queried individual received a classification. reasons are differences in the values of attributes in the queried individual in relation to those of similar individuals with different classifications. the strength of attacks between attributevalue pairs is calculated as the proportion of similar individuals with particular characteristics and the overall evaluation is done using the weighted h-categorizer semantics (amgoud, doder, and vesic 2022) which calculates the final weights of arguments. as a result, final weights correspond to the attribute-value pairs that contribute most to the negative classification of a queried individual compared to similar individuals.the distance between individuals with the same attribute values is 0 and that the distance between two individuals increases in proportion to the number of attributes with different values in the individuals. intuitively, example 1 shows that given the queried individual in the top row and its similar individuals, (race, black) is the attribute-value pair contributing the most to the negative classification of the queried individual, since all combinations of values of the other attributes in the similar individuals lead to a positive classification. let e 0 be the queried individual and sim k (e 0 ) the set with the k individuals most similar to e 0 according to some similarity measure sim. if a similar positivelyclassified individual has a value for an attribute z different to the value of the negatively-classified queried individual, then the similar individual attacks that attribute-value pair in the queried individual. specifically all attribute-value pairs of similar individuals e i will attack the value of the attribute z in the queried individual e 0 if the values of z differ in e 0 and e i . given a binary classifier f : e → {+, -}, a queried individual e 0 , and the set of similar individuals sim k (e 0 ):. towards the queried individual's attribute-value pairs) and that attacks from the attribute-value pairs of a similar individual e i only occur towards an argument (z j , v(z j , e 0 )) when the values of the attribute z j differ in e 0 and e i (i. assume there is only one attribute z for which all positively-classified similar individuals have a different value than the queried individual and all other attributes have the same values for all individuals. table3shows that there is a greater percentage of negative labels for young or old individuals than mid-age individuals, and for single and divorced individuals compared to married individuals, however the percentage of negative labels does not vary greatly for marital.we took as queried individuals every individual with a negative classification from our test sets amounting to 7,252 and 1,253 individuals for the adult and bank marketing datasets, respectively.01; as shown by proposition 2, if all similar individuals have the same (negative) classification as a queried individual, the final weights of all arguments are 1 and hence the queried individual has been treated consistently with respect to the similar individuals. furthermore, 70% of the queried individuals are consistent with the similar individuals, meaning all similar individuals are also classified negatively. furthermore, 21% of the queried individuals are consistent with similar individuals, hence all similar individuals are also classified negatively. there is no difference in classifications between a queried individual and its similar individuals, or bias-attr = 0 will be identified as contributing the most to the negative classification of the queried individual.our method correctly identifies that, for all negatively-classified queried individuals where at least one of the similar individuals is positively-classified, bias-attr = 0 is amongst the weakest arguments and therefore identified as contributing the most to the negative classification, i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/581.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/581.txt new file mode 100644 index 0000000000000000000000000000000000000000..a48d61de31d6a1dc0409a73365fe4d7ac93bf454 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/581.txt @@ -0,0 +1 @@ +traffic forecasting has raised intensive attention with the increasing spatio-temporal data collected by entities like governments and transportation companies, which contributes to convenient transportation services, including order dispatching, route planning, and ride sharing.numerous efforts have been made to achieve encouraging accuracy in traffic forecasting by addressing spatio-temporal dependencies within data originating from road network topology and traffic dynamics. traditionally, early works viewed traffic forecasting as a time series problem and addressed it via statistical and machine learning methods , . however, these methods overlooked spatial dependencies, leading to less-than-ideal performance. recently, graph neural networks(gnns) and their variants - have dominated this field, arising from their remarkable ability to capture correlations among nodes. prominent approaches, exemplified by stgcn-based methods - characterize the road network † is the corresponding author.topology by representing the pair-wise relationships among nodes using simple graphs and model the traffic dynamics as diffusion progress .although the aforementioned approaches have shown encouraging performance, we argue that the simple graph squeezes the complex spatio-temporal dependencies into pairwise ones, which leads to incomplete modeling of the road network topology and traffic dynamics. technically, most gnnsbased works typically adopt graph convolution networks over a simple geographic graph constructed with spatial correlations and model the traffic dynamics as a discrete diffusion process. they face limitations in two critical aspects: i) the use of simple pair-wise graphs does not adequately model the complex road network topology. ii) discrete gcns are inadequate to model traffic dynamics for effectively capturing the evolution of the traffic system.to address these issues, we propose spatio-temporal hypergraph ordinary differential equation network(sthode) for traffic forecasting. the key idea of sthode is to leverage hypergraph structure to represent complex spatial correlations and ordinary differential equations (odes) to model the evolution of dynamical systems. to achieve this goal effectively, we introduce two modules, i.e. spatial module and temporal module. in the spatial module, we construct a spatial hypergraph and employ an adaptive mixhop hypergraph ode layer to capture high-order spatial dependencies caused by the road network topology. in the temporal module, we construct a temporal hypergraph and leverage a hyperedge evolving ode layer to capture high-order temporal dependencies caused by the traffic dynamics. furthermore, we aggregate the outputs of the stacked sthode layers, leveraging their mutual interactions to improve prediction performance within a supervised learning framework. we validate the effectiveness of sthode on four real-world datasets and the extensive experimental results demonstrate that our sthode model outperforms various baseline models. in summary, the main contributions of this paper are as follows:• we propose a spatial module and a temporal module to model the road network topology and traffic dynamics respectively. we construct two types of hypergraphs to enhance the capture of spatio-temporal dependencies. numerous efforts have been made to achieve encouraging accuracy in traffic forecasting by addressing spatio-temporal dependencies within data originating from road network topology and traffic dynamics.although the aforementioned approaches have shown encouraging performance, we argue that the simple graph squeezes the complex spatio-temporal dependencies into pairwise ones, which leads to incomplete modeling of the road network topology and traffic dynamics. in the spatial module, we construct a spatial hypergraph and employ an adaptive mixhop hypergraph ode layer to capture high-order spatial dependencies caused by the road network topology. in the temporal module, we construct a temporal hypergraph and leverage a hyperedge evolving ode layer to capture high-order temporal dependencies caused by the traffic dynamics.• we propose a spatial module and a temporal module to model the road network topology and traffic dynamics respectively.notation 3:(traffic sensor) a traffic sensor is a sensor deployed in the road network, which samples traffic signals x such as flow and vehicle speed.notation 4:(road network) a road network is represented as a hypergraph g = (v, ξ, h), consisting of different road segments that vary in structure and functionality. in the following subsections, we introduce how we capture high-order spatial dependencies with the spatial module and high-order temporal dependencies with the temporal module.1) construction of spatial hypergraph: we define g sp = (v, ξ, h, w, e) as a spatial hypergraph, with v representing traffic sensors and ξ denoting hyperedges set. to model topological influences, we use diagonal matrix w for road segment types and combine it with hyperedge embedding matrix e to represent road segment impacts on traffic sensors.2) adaptive mixhop hypergraph ode: technically, given the spatial hypergraph g sp , we have the normalized adaptive hypergraph matrix ã as follows:.we define g te = (v, ξ te , h te ) as a r-uniform temporal hypergraph, where v represents traffic sensors, and each hyperedge contains r nodes with strong similarity. the improvement in performance can be attributed to some key factors: 1) the spatial module enables sthode to effectively model the road network topology, surpassing graph-based methods; 2) the temporal module enables sthode to fully extract highorder temporal dependencies, further improving prediction accuracy. it models the road network topology and traffic dynamics to capture high-order spatio-temporal dependencies for short-term traffic predictions. sthode utilizes two odebased modules that encode a spatial hypergraph and a temporal hypergraph working in parallel to capture high-order spatiotemporal dependencies respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/582.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/582.txt new file mode 100644 index 0000000000000000000000000000000000000000..628552a74181c79b7a8add08de273ce55114dd9a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/582.txt @@ -0,0 +1 @@ +in recent years, deep learning techniques have shown promising results in various domains, including event detection in time series data. notably, - provide comprehensive reviews of state-of-the-art deep learning and graph-based techniques for time series anomaly detection, respectively. additionally, to the best of our knowledge, the most comprehensive anomaly detection benchmarks are provided by , . these works demonstrate the potential of advanced machine learning techniques for detecting events in time series data.unlike traditional binary classification methods, where each time step is labeled as an event or non-event, our approach is based on regression. regression enables the prediction of continuous values, offering more nuanced information about event presence and strength in the time series. this approach is advantageous for complex and noisy data, where events may be less defined or easily distinguished from non-events. the regression approach can predict a continuous output variable, which can be beneficial when dealing with events that vary in intensity or magnitude. this flexibility allows the model to capture more detailed information about the event, such as its duration or severity. in real-world data, events often occur amidst background noise. binary classification might struggle with this, as it requires a clear distinction between event and non-event classes. regression, on the other hand, can handle this complexity better by predicting a continuous spectrum of values. the output of a regression model can provide more interpretable results. for instance, a higher predicted value could indicate a stronger or more significant event. this can provide more precise and actionable insights for decisionmaking. in many real-world scenarios, events of interest are rare compared to non-events. this leads to imbalanced data, which can pose challenges for binary classification models. regression models can potentially handle such scenarios better by focusing on predicting the event's characteristics rather than just its occurrence. using regression, our method provides more precise and accurate insights into event characteristics within the time series data.secondly, our method does not require labeling each time step within a dataset, which can be a time-consuming and labor-intensive process. instead, our method only requires reference events to be defined as time points or intervals of time, making it more efficient and practical to use. thirdly, our method utilizes a stacked ensemble learning meta-model which allows for improved performance by leveraging the strengths of multiple base models and optimizing their predictions . this approach enhances the overall accuracy and robustness of event detection.finally, to facilitate practical implementation, we have developed a python package that accompanies our proposed method. this package provides an easy-to-use interface for applying our method to real-world datasets and allows users to quickly and easily detect events in their own time series data. we provide detailed documentation and examples to help users get started with our package and apply it to their own data.overall, our method offers a powerful and flexible approach for detecting events in multivariate time series data that is both accurate and efficient.the rest of this paper is organized as follows: in section 2, we provide a detailed description of the regression aspect of our method, explaining how it differs from traditional binary classification approaches. in section 3, we present our stacked ensemble learning approach, which combines the strengths of multiple base models to improve the accuracy and robustness of event detection. in section 4, we describe the design and implementation of the python package, which provides an easy-to-use interface for applying our method to real-world datasets. in section 5, we showcase the usage of the package and demonstrate the effectiveness of the method through a series of experiments on different real-world datasets from nlp to financial security. finally, in section 6, we conclude the paper by summarizing our key findings and contributions. we highlight the strengths and limitations of our method and discuss potential future directions for research in this area.in recent years, deep learning techniques have shown promising results in various domains, including event detection in time series data. using regression, our method provides more precise and accurate insights into event characteristics within the time series data. instead, our method only requires reference events to be defined as time points or intervals of time, making it more efficient and practical to use. this package provides an easy-to-use interface for applying our method to real-world datasets and allows users to quickly and easily detect events in their own time series data.overall, our method offers a powerful and flexible approach for detecting events in multivariate time series data that is both accurate and efficient.in our approach, we require two pieces of data: one representing the time series (dataset) s, and the other representing the list of reference events (or ground truth events) e.1 import pandas as pd 2 from typing import union the dataset variable represents the time series data in a pandas dataframe format, where the index represents the time and the columns represent the features (f ).the proposed method is based on regression, so we must create a scalar parameter for this task based on the required time series s and a list of reference events e.to facilitate the comparison between the reference events and the sliding windows, we convert the reference events into fixed-size intervals denoted by width events. the overlapping parameter op(w i ) provides a measure of how well the window aligns with the most relevant event, which can be used to guide a regression model in predicting the presence or absence of events in the time series data. to initialize the metamodel object, we need to provide several arguments, including the time series dataset, the reference events, the sliding window width, the events width, and the output directory where the results and models will be saved.the final step is to visualize the results, including the losses of the stacked models and the meta-model, true/predicted op, true/predicted events, and a histogram illustrating the time shift between the true and predicted events. to be used by our package, we have converted this column into a list of reference events represented as time points based on the time sampling of the dataset.given that keywords and adjectives may occur at various positions within the text (which has been transformed into a time series), we establish a mapping between these positions and corresponding timestamps based on the index of the obtained time series.we have introduced a novel deep-learning supervised method designed for the detection of events in multivariate time series data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/583.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/583.txt new file mode 100644 index 0000000000000000000000000000000000000000..11b4db29277a3f6315676db21a3759287621b2e6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/583.txt @@ -0,0 +1 @@ +the emergence of large language models (devlin et al., 2019;radford et al., 2019;raffel et al., 2019) in recent years has significantly transformed the applications of deep learning methods in natural language processing. these models, pretrained in an unsupervised fashion on massive textual datasets, enable the fine-tuning of powerful models with just a few thousand -or even hundred-observations. they achieve generalization performances that required millions of observations just a few years ago, particularly when used in conjunction with discrete instruction prompts (brown et al., 2020).extending these discrete methods to the learning of continuous prompts (lester et al., 2021), which conceptually falls within the framework of socalled "parameter efficient fine-tuning" (peft) methods (houlsby et al., 2019;bapna and firat, 2019), poses certain challenges in the context of few-shot learning. one such challenge is the issue of model adjustment guidance through validation metric during gradient descent (mao et al., 2022). traditionally, in the process of model fitting, approximately one-third of the training dataset is excluded beforehand to create a validation (or development) set dedicated to inferring an unbiased estimation of the model's performance (hastie et al., 2001). this metric is utilized both during gradient descent (to estimate convergence of the descent algorithm or inform early stopping heuristics), and subsequently to guide hyperparameter searches typically employed in the fine-tuning of large language models. however, the validity of this approach relies on the assumption that the distribution of the validation set is representative of the real observed phenomenon. this assumption quickly loses its relevance in the context of few-shot learning, where at most a few tens of observations are available for estimating the validation metric. this notion has become problematic enough in present times that a portion of academic literature on continuous learning with small datasets presents experiment results utilizing validation sets that are unrealistic and artificial, containing several orders of magnitude more observations than the training set used for the model adjustment itself (mao et al., 2022).in the machine learning community, characterizing local minima with desirable generalization properties has been a topic of interest for decades (garipov et al., 2018;zhang et al., 2021;hochreiter and schmidhuber, 1997). from flat minima (hochreiter and schmidhuber, 1997) to mode connectivity (garipov et al., 2018), this body of work arxiv:2310.15793v1 24 oct 2023 has provided the basis for several practical observations regarding the connection between the properties of local minima and a model's generalization abilities.the concept of learning neural network subspaces (wortsman et al., 2021) is an example of a method built using these considerations. this approach proposes to find not just a local minimum of the cost function in the model's parameter space, but an entire simplex associated with low values of this objective. this additional constraint is meant to bias the descent algorithm towards wider minima, empirically associated with better generalization (dziugaite and roy, 2018a). in addition, the availability of this entire simplex of models allows for the inference of not only one scalar development metric, but an entire distribution, at any given moment during model fine-tuning. these two phenomena, become particularly relevant when viewed through the lens of large language models, and most especially for few-shot learning problems, where the model's ability to generalize a concept class from a limited number of examples is crucial.the contributions of this article are as follows. first, we introduce the first adaptation of the subspace method to large language models through subspace adjustment of prefixes (a peft method similar to the state-of-the-art continuous prompt adjustment in current academic literature). next, this article proposes to leverage certain natural advantages offered by the subspace method to revisit the concept of guiding model adjustment through the validation metric. we will empirically demonstrate that the combination of these two ideas leads to a significant improvement in terms of average prediction on natural language understanding tasks provided by the glue benchmark (wang et al., 2018). finally, an ablation study will be presented to provide some insights into the mechanisms underlying this prediction improvement., 2001). in addition, the availability of this entire simplex of models allows for the inference of not only one scalar development metric, but an entire distribution, at any given moment during model fine-tuning. first, we introduce the first adaptation of the subspace method to large language models through subspace adjustment of prefixes (a peft method similar to the state-of-the-art continuous prompt adjustment in current academic literature).on the other hand, continuous prompt adjustment methods and, by extension, peft methods(houlsby et al., 2019;hu et al., 2021;li and liang, 2021;liu et al.to increase the expressiveness of this approach (which is particularly limited in terms of the number of learnable parameters), prefix tuning (li and liang, 2021), chosen in this article as a candidate for applying the subspace method, proposes to concatenate these virtual tokens not to the input sequence of the model, but to the key and value sequences used as inputs to the multiplicative attention modules in each layer of the language model.the adjustment of a large language model is typically guided by estimating a performance metric on a validation set, both during hyperparameter search and the descent process itself, where the best model according to this scalar value is selected as the final model. similar to prefix-tuning, these alternative fine-tuning approaches are based on the idea of freezing the language model's parameters and introducing a fraction of new adjustable parameters (typically with a cardinality several orders of magnitude lower than that of the model itself), but they differ in how they introduce these new parameters into the model:. the use of the validation-guided subspace method estimated deterministically collapses for k=50, k=100, and k=200 (cases where the performance is even lower than the classical prefix fitting method) and eventually becomes equivalent to the proposed method. the first one, an adaptation of the subspace method for training large language models through the peft method, is, to our knowledge, the first example of its use in the academic literature on natural language processing. the combined use of these two methods leads to a significant improvement in the performance of common language models such as bert on language comprehension tasks proposed by the glue benchmark, rephrased in a "few-shot learning" context. however, this gain appears to diminish for larger sample sizes, where the subspace method applied to prefix tuning seems to be sufficient on its own to achieve performance gains over standard peft methods as well as classical model training.finally, applying subspace learning to peft methods also enables the training of powerful predictive models while significantly reducing the computational resources typically required for training large language models.in addition, the fact that this method allows for fine-tuning without high sample sizes in the development set might let people use it without any additional test set, and thus any model validation of any sort, which might lead to the implementation of highly biased models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/584.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/584.txt new file mode 100644 index 0000000000000000000000000000000000000000..795742c004f8d77d7dac1144c48d3dcfc8492643 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/584.txt @@ -0,0 +1 @@ +background: reconstruction of signals in an under-determined linear measurement setup, where the signals have generative models that are linear with latent sparse signals, has been thoroughly investigated in literature. this is primarily known as compressed sensing (cs) or compressed sampling .in this article we refer to the linear measurement setup alongwith the linear generative model as the traditional cs. many practical algorithms exist for signal reconstruction using sparse-latent signals as a-priori knowledge. algorithms are mainly divided as convex, greedy pursuits and bayesian, and their suitable combinations . convex algorithms, such as lasso , use ℓ1-norm based penalties to promote sparsity. motivation: in this article, we consider a cs setup where we have linear measurements of an ambient signal like traditional cs, but do not have the linear generative model for the ambient signal. instead, we assume non-linear, non-convex, generative models where an ambient signal is the output of a neural network excited by a sparselatent input signal. signals which are generated in such a way are referred to as 'generative sparse-latent' (gsl) signals. our objective is to design reconstruction algorithms for gsl signals in the cs setup.in the cs setup measuring gsl, we have a non-linear mapping between the measurement and the sparse-latent signal. due to the non-linear mapping, the reconstruction problem is inherently nonconvex. we design a reconstruction algorithm that can use a-priori knowledge of sparsity. the reconstruction algorithm uses gradient search for sparse-latent estimation. using simulations we show that it is possible to achieve a good quality signal reconstruction, although the mapping between measurement vector and sparse-latent signal is highly non-linear.naturally a question arises: how to measure a degree of nonlinearity? such a measure could help us to perform simulation studies in a controlled manner. to the best of authors' knowledge, we correspondence: sach@kth.se are not aware of any such measure. therefore we provide a simple measure based on how far a non-linear mapping is from a linear mapping. this helps us to perform simulation studies in a controlled manner.relevant literature: in a cs setup, there exists prior work where neural networks-based generative models are used. in , variational auto-encoders (vaes) and generative adversarial networks (gans) are used where the dimension of latent signal is (much) lower than the dimension of ambient signal. in , the authors considered gan-based generative models and consider the case where the dimension of the latent is small. robustness of the cs for generative models was addressed in . the work considered normalizing flows (nfs) that are invertible neural networks. these works address cs of signals with generative models -vaes, gans and nfs -excited with dense, gaussian latent signals.our contributions: use of sparse-latent signals for generative models in cs setup is the major novelty of the article compared to previous works. the contributions are: (a) proposing the construction of gsls. (b) defining a measure of non-linearity and using the measure to rank complex non-linear mapping functions to generate gsl signals. (c) proposing sparsity inducing reconstruction algorithms for cs of gsl signals and demonstrating their performances.background: reconstruction of signals in an under-determined linear measurement setup, where the signals have generative models that are linear with latent sparse signals, has been thoroughly investigated in literature. motivation: in this article, we consider a cs setup where we have linear measurements of an ambient signal like traditional cs, but do not have the linear generative model for the ambient signal. instead, we assume non-linear, non-convex, generative models where an ambient signal is the output of a neural network excited by a sparselatent input signal.in the cs setup measuring gsl, we have a non-linear mapping between the measurement and the sparse-latent signal. using simulations we show that it is possible to achieve a good quality signal reconstruction, although the mapping between measurement vector and sparse-latent signal is highly non-linear. these works address cs of signals with generative models -vaes, gans and nfs -excited with dense, gaussian latent signals.our contributions: use of sparse-latent signals for generative models in cs setup is the major novelty of the article compared to previous works.in this article, we investigate the reconstruction of gsl signal x from linear cs measurements. this in turns raises the question: does the optimization problem gets harder if f θ θ θ gets more nonlinear according to some metric of non-linearity? then a natural queries can be: how to measure non-linearity? how to think that a function is more non-linear that another function? can we conduct a systematic study to show that solving optimization problem (4) becomes harder with the increase in some measure of non-linearity?. the trained model is later used for our purpose where a gsl signal is generated by exciting the trained model using a sparse-latent signal.1we can observe that increase in the number of coupling layers from 4 (denoted as rnvp, nc = 4) to 8 (denoted as rnvp, nc = 8) for rnvp with random parameters leads to an increase in nnlm, which indicates a higher amount of non-linearity. the ranking of the rnvp models according to an increasing trend of non-linearity is as follows: (i) rnvp u(0, 1) as the pretrained rnvp, (ii) rnvp with random parameters and 4 coupling layers, (iii) rnvp with random parameters and 8 coupling layers.for gsl cs, we now study the performances for three rnvp models: rnvp u(0, 1), rnvp nc = 4, and rnvp nc = 8.following, we use two performance measures for the evaluation of the reconstruction: (1) signal to reconstruction noise ratio (srnr) = 10 log 10 e e , and (2) average support cardinality error (asce) = 1 -1 k e{|sz ∩ s ẑ|}, where sz denotes the k-size support of z.finally we show a gsl signal realization using rnvp, nc = 4, and its reconstruction using the optimization algorithm (4). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/585.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/585.txt new file mode 100644 index 0000000000000000000000000000000000000000..3227f5dcb35ea1c5ce39b0b75cb4a778e1b1f10e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/585.txt @@ -0,0 +1 @@ +in this short note we consider a class of simple feed-forward neural networks , also known as extreme learning machines (elm) . elms consist of a hidden layer where the data is encoded using random projections, and an output layer where the weights are computed using the ridge regression (rr) method. here we propose a new rr boosting approach for elms, which significantly improves their classification performance and robustness.let us assume that x = n ×m is a data matrix, where each row x n = ∈ r m is a data point from one of the k classes. the classification problem requires the mapping of the rows of a new, unclassified data matrix x = n ′ ×m , to the corresponding classes {0, 1, ..., k -1}.the first layer of the elm encodes both x and x matrices using the same random projections matrix r = j×m drawn from the normal distribution r j,m ∈ n (0, 1):where h() is the activation function, which is applied applied element-wise. the second layer of elm solves the ridge regression (rr) problem:where y = n ×k is the n × k target matrix for k classes, and λ > 0 is the regularization parameter.each row y n ∈ r k of y corresponds to the class of the data point x n . the classes are encoded using the one-hot encoding approach:the solution of the above rr problem is:where i is the j × j identity matrix.therefore, in order to classify the rows of a new data matrix x we use the following criterion:where2 boosting method several boosting methods have been previously proposed for the rr problem , , . our approach here is different, and it uses several levels of boosting.at the first boosting level, ℓ = 0, one computes the approximation:and then continues by successively solving for the next t -1 approximations:after t iteration steps the first level will provide an approximation:then we set:and we repeat the procedure for the next l -1 boosting levels, obtaining y ℓ , ℓ = 1, ..., l -1.the general equations for ℓ = 1, ..., l -1 can be written as:given an unclassified data matrix x = n ′ ×m , where each row is a new sample, we encode it using the same random projection matrices:we compute the output:and then we use the decision criterion (5) to decide the class for each new sample (row of x).we should note that different random projection matrices r ℓ t are generated for each level and each time step:and α ∈ (0, 1) is a discount parameter (or a "learning" rate). also, the random projection matrices can be generated on the fly, and they don't require additional storage. elms consist of a hidden layer where the data is encoded using random projections, and an output layer where the weights are computed using the ridge regression (rr) method.let us assume that x = n ×m is a data matrix, where each row x n = ∈ r m is a data point from one of the k classes. the classification problem requires the mapping of the rows of a new, unclassified data matrix x = n ′ ×m , to the corresponding classes {0, 1, .the first layer of the elm encodes both x and x matrices using the same random projections matrix r = j×m drawn from the normal distribution r j,m ∈ n (0, 1):.each row y n ∈ r k of y corresponds to the class of the data point x n .therefore, in order to classify the rows of a new data matrix x we use the following criterion:.given an unclassified data matrix x = n ′ ×m , where each row is a new sample, we encode it using the same random projection matrices:.and then we use the decision criterion (5) to decide the class for each new sample (row of x). given a data set of points x = {x 0 , x 1 , ., x n -1 }, x n ∈ r m , and a query point q ∈ r m , the goal is to find a point x ∈ x such that d(q, x) ≤ (1 + ε)d(q, x * ), ε > 0, where x * is the true nearest neighbor of q, and d() is a distance measure. with this choice of the hash function, one can easily show that given two vectors x, x ′ ∈ r m the probability of h(r, x) = h(r, x ′ ) is:.where θ(x, x ′ ) is the angle between x and x ′ .we should note that each randomly drawn hyperplane defines a different hash function, and therefore to map the data vectors x ∈ r m to {-1, +1} j we need to define j hash functions:.one can see that in our case, each column of the matrix r = j×m corresponds to a randomly drawn hyperplane. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/586.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/586.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b06a88232e35504c123bd1ae64017e7fc625af5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/586.txt @@ -0,0 +1 @@ +over the last few years, the decreased cost of data storage and increased usage of apps and digital technologies have led to an unprecedented surge in data collection and availability. this data collection revolution has proceeded alongside numerous advances in deep learning which has provided a usage for this data for a large variety of applications. the output predictions from these statistical models has become increasingly sophisticated and continues to have a direct impact on both society and the global economy. notably, much of this data contains personal information, often collected directly from individuals and inferred from their behavioral patterns, or directly recorded in the form of digital healthcare data.the widespread usage of this data in deep learning technologies has induced a greater interest and concern for data protection and privacy and a call for the codification of such protections under a legal framework. recently, the european union has enacted the general data protection regulation (gdpr) which specifies the legality of personal data collection and usage as well as establishes the control of personal data as a human right. in the healthcare domain, collected medical data that contains sensitive information, electronic health records, is protected under the health insurance portability and accountability act (hipaa) in the united states which restricts and prohibits disclosure of this information to third parties without patient consent.deep learning model training tasks generally require the aggregation of disparate data sources into one centralized location so it is fully accessible by the model. for example, one may wish to train a model using edge-devices belonging to individual users, which would necessitate combining this data together in one location. in the healthcare domain, medical images and patient clinical data, which may exist in different data centers, may need to be pooled to successfully train models representative of the general population.in response to the recent concerns around data privacy, and the legal requirements around protecting the interests of patients and end-users, researchers at google introduced the federated learning framework . federated learning addressed many of the data privacy and security concerns by allowing multiple datasets to be trained while located in separate locations so that they cannot be aggregated together. the local machines that contain both the data and the model are known as clients and they connect to a central server to aggregate weights to be used in the next round of training. such decentralized training aims to solve the problems of data privacy and has been successfully applied to a number of different domains including applications in edge computing, such as internet of things networks , wireless computing , and the healthcare domain .the open-source flower python package was recently introduced to provide federated learning capabilities to a variety of different modeling frameworks and enable running on edge devices. flower solves a number of different challenges to training federated models and running them in a production environment. recently, flower has been popular choice as a federated learning framework due to the simplicity of its lightweight design and flexibility. however, one challenge that has not yet been addressed is allowing flower to run training asynchronously. at present, the flower framework requires all connected clients to send their weights to the server before aggregation. consequently, the next federated training round is delayed until all models have successfully completed their local epochs. when a client crashes due to out-of-memory or other common errors, the training needs to be restarted. another operational pain point is managing the federation server. often a separate server needs to be started for each training experiment, making it hard to scale to hundreds of experiments.to address these issues, we introduce flwr-serverless, a wrapper around the flower (flwr) framework that extends its capabilities to allow for both synchronous and asynchronous forms of federated learning without altering its core use pattern. furthermore, the changes we make effectively allow flower to be run "serverless" in the sense that weight aggregation occurs on the client side rather than the on the central server. the weights are updated from any accessible remote weight storage directory. thus, our approach inherits all of the functionality and convenience of flower while reducing the time and cost complexity related to updating the weights for the global model on the central server. herein, we describe the architectural changes made to the federated learning workflow and the flower package along with the asynchronous learning strategy that we employ. finally, we demonstrate the results on some federated benchmark datasets. our results show that asynchronous federated learning is robust, and in specific situations can significantly speed up federated training without sacrificing model performance. the local machines that contain both the data and the model are known as clients and they connect to a central server to aggregate weights to be used in the next round of training. our results show that asynchronous federated learning is robust, and in specific situations can significantly speed up federated training without sacrificing model performance.federated learning is a decentralized learning strategy that allows separate, private datasets stored across multiple devices or machines to be used to train a global model without requiring the model to access the dataset, or moving the dataset off the device into a central storage location. each device that participates in federated learning is known as a client which undergoes a number of local rounds of training on its local dataset. after the client finishes its local rounds of training, it sends its current weights to a centralized server, which stores the weights. after aggregation, each client receives the newly aggregated weights and resumes training local rounds to update these weights.synchronous versus asynchronous federated learning in most cases, federated learning is performed in synchronous fashion (figure1, left panel). each client submits its weights to the server after completing a set number of local training rounds on its private data. the synchronization of all of the client's weights occurs once the last client to finish local training submits its weights to the server. • serverless implementation: due to the numerous difficulties we encountered with launching and maintaining federated learning servers, we aim to provide asynchronous federated learning that can run in a serverless fashion. • flexibility: much like the flower package, we aim for asynchronous federated learning to be compatible with machine learning frameworks such that it can be activated through callback functionality.design in a typical synchronous federated learning experiment, each client runs for a number of local training training epochs before it sends its weights to a central server for aggregation. all participating clients are expected to submit their weights before the server aggregates the weights and broadcasts the new weights back to the clients. then, the client sends its weights to a remote weight store and checks with the server to see if another client has recently deposited weights to this shared folder.synchronous serverless federated learning note that we also provide the functionality to use synchronous federated push w k to weight store; pull ω from weight store;. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/587.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/587.txt new file mode 100644 index 0000000000000000000000000000000000000000..60e9af4abcc0fe1b9fd444401f3323cbfa00444d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/587.txt @@ -0,0 +1 @@ +applications of artificial intelligence to automation of mathematics have a long history, starting from early approaches based on a collection of hand-crafted heuristics for formalizing new mathematical concepts and conjectures related to them . in the last decade, there has been a growing interest in formalization of mathematics with proof assistants, which verify the formal correctness of mathematical proofs and constructions, and help automate the tedious parts. the trend is correlated with the interest of machine learning community in aiding formalization efforts with its expertise.machine learning methods are often used to address premise selection, i.e., recommendation of theorems that are useful for proving a given statement. deepmath proposes using convolutional and recurrent neural networks to predict the relevance of a premise for proving the given statement. while many other approaches use transformers and general language models, paliwal et al. have shown that taking into account the higher-order structure of logical expressions used in formalizing mathematics can greatly improve the performance of premise selection and automated proving. indeed, many approaches use graph neural networks to learn from the higher-order structures, e.g., . more recently, graph neural networks have also been proven useful for explorative, unsupervised approaches to automated theorem proving with reinforcement learning . some of these approaches address alternative tasks, such as recommending or automatically selecting suitable proof tactics, i.e., routines for performing a series of proof steps, applying a decision procedure, or for carrying out proof search.data sets of different origins have been used to evaluate the proposed approaches. welleck et al. evaluate their approach on a selection of three hundred proofs included in the proofwiki library of mathematical proofs written in a combination of natural language and l a t e x. polu and sutskever use a standard library of the metamath proof assistant. lample et al. combine proofs from the metamath library with proofs from the mathlib library of the lean proof assistant. the latter has also been used for evaluating the approaches in . wang et al. , paliwal et al. , bansal et al. evaluate their models within the hol light proof assistant based on higher-order logic . the formalized proofs in standard hol libraries have been transformed into a holstep data set for machine learning, where examples correspond to more than 2 million steps from 11 400 proofs . the training set includes proof steps in context (local hypotheses and the current statement being proved) and the library entry used in the step. descriptions of the library entries are included in humanreadable and machine-readable, tokenized versions. the data set has been recently upgraded to the interactive benchmark environment holist for training automated proof systems with reinforcement learning .we present a collection of data sets, mlfmf, based on libraries of formalized mathematics encoded in two proof assistants, agda and lean. it supports evaluation and benchmarking of machine learning approaches for recommendation systems in the context of formalized mathematics. this is proved by induction on m. lemma 2 (l2) m + suc(n) = suc(m + n), for all m, n ∈ n.this is proved by induction on m. theorem (t) m + n = n + m, for all m, n ∈ n. this is proved by induction on m. in the base case (m = 0), we need l1. in the induction step (m = suc(ℓ)), we need l2. table 1: an example formalization of proof that the addition of natural numbers is commutative.we transform each library into a directed multi-graph whose nodes represent library entries (theorems, lemmas, axioms, and definitions), while edges represent the references between them. consider the example in table 1. it starts with a definition of the set of natural numbers with two simple constructors that define the first natural number 0 and constructs all the others inductively by asserting that a successor suc(n) of a natural number n is also a natural number. the definition of the addition of natural numbers follows their definition by asserting two simple rules for the left addition of 0 and the left addition of a successor. note that the definition of + references the definition of n. next, the first lemma establishes the rule for the right addition of zero as the first simple commutativity case.the second lemma establishes the right addition of a successor as the second case. the theorem at the end references the two lemmas to show (and prove) the commutativity of adding natural numbers.the entries from table 1 are transformed into a multi-graph depicted in figure 1a. it contains five nodes, each corresponding to a table row. the multi-graph includes an edge from the node + to the node n, indicating the reference to the set of natural numbers in the definition of addition. it also contains the self-reference of +, since the second case of this definition is recursive. similarly, there are four edges from the theorem node to the two lemma nodes and the two nodes defining natural numbers and addition thereof. the obtained data allows us to approach premise selection as a standard edge prediction machine learning task. furthermore, we transform each formalized entry into a directed acyclic graph that retains complete information about the entry, see figure 1b. by including the entire entry structures in the data sets, we make them suitable for further exploration of the utility of the state-of-the-art approaches to graph-based machine learning. a detailed description of the format is given in sections 3.3 and 3.4.the multi-graph representation of the example proof from table 1.(:entry (:name n) (:type (...)) (:data (...) (:name n.zero) (:name n.suc) ) )(b) an s-expression from which we obtained the dag for the entry n in table 1.figure 1: the two-part representation of a library. library as a whole is represented as a network of references (a). additionally, every entry is represented as a dag which is shown here in its textual s-expression format (b). note that some nodes of dag were replaced by (...) for better readability.our approach is general and can be applied to other proof assistants based on type theory. moreover, even though agda and lean have quite different internal representations, the corresponding data sets use a common format that requires little or no knowledge about the inner workings of proof assistants. thus our collection provides the machine learning community with easy access to a large amount of formalized mathematics in familiar formats that allow immediate application of machine learning algorithms. to our knowledge, mlfmf is the first and most extensive collection of data sets featuring more than one proof assistant and providing access to the higher-order structured representation of more than 250 000 mathematical formalization entries.we present a collection of data sets, mlfmf, based on libraries of formalized mathematics encoded in two proof assistants, agda and lean.we transform each library into a directed multi-graph whose nodes represent library entries (theorems, lemmas, axioms, and definitions), while edges represent the references between them. it starts with a definition of the set of natural numbers with two simple constructors that define the first natural number 0 and constructs all the others inductively by asserting that a successor suc(n) of a natural number n is also a natural number. to our knowledge, mlfmf is the first and most extensive collection of data sets featuring more than one proof assistant and providing access to the higher-order structured representation of more than 250 000 mathematical formalization entries.libraries of formalized mathematics comprise units, organized hierarchically with a module system or namespaces, each of which contains a number of entries: definitions of types, constructions of elements of types, statements and proofs of theorems, unproved postulates (axioms), as well as meta-level content, such as embedded documentation, definitions of tactics, hints for heuristics, and other automation mechanisms. in our work we addressed the specific problem of recommendation: given a large body of formalized mathematical knowledge, how can the proof assistant competently recommend theorems or constructions that are likely useful in solving the current goal? there are two typical scenarios: the user knows which theorem they would like to use but have a hard time finding it in the library, or the user is not aware of the existence of a potentially useful theorem that is already available. we first explain the semantic content of the data extracted from libraries of formalized mathematics, describe the format and information content of the data sets, continue by reviewing the machine learning tasks for which the data sets were built, and finish with an overview of the technical aspects of the library-to-data-set transformation process. we start with a brief description of data transformation process, continue with the detailed description of the resulting pair of computational graphs for the entries in the library, and the directed, multi graph of references in the library (see 3. the first part is a set t of abstract syntax trees (ast) that correspond to the entries in the library, while the second is a directed multi-graph g(v, e), where v is a set of library entries, and e includes the references among them. however, the graph might contain in the case of agda libraries, the module nodes correspond to the modules that are actually present in the library and resemble the file system of the library. formally, we learn a model m : (u, v) → m (u, v) ∈ that given two nodes u and v outputs the model confidence in the presence of the edge (u, v). in the case of the link prediction and recommendation tasks, we should focus on function nodes, since these are the only nodes that correspond to a computational graph whose body contains a proof of a claim formalized in the declaration part of the computational graph.we introduced mlfmf, a suite of four data sets corresponding to three libraries in agda and one library in lean proof assistants. references between entries are included in a multi-graph, where nodes are entries, edges represent references among the entries, and each entry is represented with a direct acyclic graph reflecting the structure of the entry source code encoding.a link a → b between two entry nodes a and b denoting that the entry corresponding to a references the entry b from the type (declaration) part of its computational graph g(a). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/588.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/588.txt new file mode 100644 index 0000000000000000000000000000000000000000..328c1a7463140d3e138ede98f959fe2bad426447 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/588.txt @@ -0,0 +1 @@ +in machine learning, variable(s) y are commonly predicted from observed variable(s) x by estimating the conditional probability distribution p (y|x) (bishop and nasrabadi 2006). this approach is effective for understanding correlations or associations in the data, but it falls short when we seek to understand how changes in x would affect y. such an understanding requires a different methodology, known as causal inference (in population), which involves estimating the interventional distributions (or causal effect), denoted by p x (y). p x (y) represents the probability of an outcome y if we were to intervene or change the values of the input variable(s) x (pearl 2000(pearl , 2009;;hernán and robins 2010). 1 the gold standard for estimating a causal effect is to perform experiments/interventions in the environment, for instance, by using techniques such as randomized controlled trials (rcts) (fisher 1936). however, these methods often require real-world experiments, which can be prohibitively expensive, unethical, or simply infeasible in many scenarios. alternatively, researchers can turn to observational methods, utilizing the causal graph of the environment and available data to estimate interventional distributions (pearl 2009;spirtes et al. 2000). the causal graph, a graphical representation that depicts the causal relationships between variables, plays a central role in this methodology. this observational approach avoids the need for costly or impractical experiments but comes with its own challenges. in particular, computing interventional distributions uniquely may not always be feasible.identifiablity in population. identifiability refers to the ability to uniquely compute a distribution from the available data. when all variables in the system are observable and the causal graph is known, all interventional distributions are identifiable using the so-called back-door adjustment sets, meaning all causal effects are identifiable (pearl 1993). however, only a subset of causal effects can be identified in the presence of unobserved variables or hidden confounders (pearl 1995). selection bias can also make some causal effects unidentifiable (shpitser and pearl 2006a). this bias, which is similar to distribution mismatch in learning theory (masiha et al. 2021), often arises from conditioning on selection variables. the problem of causal effect identification in population pertains to whether, given the causal graph, an interventional distribution can be uniquely computed from the available data. various forms of available data lead to different problems in causal inference in population, the most well-known of which is the id problem (pearl 1995;tian and pearl 2003). this problem arises when the available data is from the joint distribution of the observed variables. a summary of these problems is provided in table 1, and a more comprehensive discussion can be found in the related work section.conditional causal effects represent the conditional distributions that capture the impact of a treatment on the outcome within specific contexts or sub-populations. this concept allows for targeted interventions and tailored policies, offering valuable insights for practical applications (qian causal inference problem given distribution(s) target interventional distribution on population id p (v) p x (y)s-recoverabilityon sub-population c-id p (v) p x (y|z) c-gid {p zi (v \ z i )} m i=0 p x (y|z)s-id p (v|s = 1) p x (y|s = 1)table 1: various causal inference problems based on given and target distributions. herein, v is the set of observed variables, x is the set of intervened variables, y is the set of outcome variables, and s = 1 corresponds to a sub-population. in this paper, we introduce the s-id problem. note that in all of these problems, the causal graph is given.and murphy 2011). shpitser and pearl (2006a) considered the c-id problem, which pertains to identifying a conditional interventional distribution p x (y|z) from the joint distribution of observed variables.2 an important practical limitation of the c-id formulation is that it assumes access to samples from the observational distribution of the entire population rather than just the target sub-population. unfortunately, the c-id identification result cannot be directly extended to the setting where the available samples are from the target sub-population, which is often the prevailing scenario in practical applications. the recent extension of c-id, known as c-gid problem (correa, lee, and bareinboim 2021;kivva, etesami, and kiyavash 2023), which we will discuss in related work, also suffers from the same practical limitation.identifiablity in sub-populations. as mentioned earlier, a sub-population is a specific subset of individuals within a larger population distinguished by certain characteristics or traits. 3 we utilize an auxiliary binary variable s to model a sub-population akin to bareinboim and tian (2015): s is added as a child variable representing the specific traits that distinguish the sub-population of the population (s can have several parents), and s = 1 corresponds to the target subpopulation. we will formally introduce the auxiliary variable s in equation (1). in this paper, we address the problem of causal inference in a sub-population, where the objective is to identify p x (y|s = 1), which is the causal effect of a treatment or intervention on a specific subgroup of individuals within a larger population. specifically, we introduce the s-id problem, an identification problem on sub-population when we merely have access to observational data of the target sub-population. that is, given the causal graph, we seek to determine when p x (y|s = 1) can be uniquely computed from p (v|s = 1), where v is the set of observed variables. a real-world example. consider the causal graphs depicted in figure 1, where we analyze a hypothetical scenario in a random country. here: clearly x influences z, and both z and w affect y . the relationship between w and x can be explained by the possibility that in countries with older populations, there may be greater awareness and concern about the health risks of smoking, potentially leading to stricter health policies such as public smoking bans. additionally, one could argue that w may also have an impact on z. nevertheless, our subsequent analysis remains valid whether or not we consider a causal link between w and z. now, consider the scenario where the data from x, y, z, w is available from a subset of countries (sub-population) with younger populations than the world average. this scenario is illustrated in the left graph in figure 1. the s-id problem aims to identify the causal effect of a new policy x on the outcome variable y for this target sub-population, given only observational data from this group. as we will demonstrate, this causal effect is identifiable and can be calculated using algorithm 1.in contrast, in the setting of the s-recoverability problem, a causal inference problem in population (refer to the second row of table 1), the task is to compute the causal effect of x on y for the entire population using only data from this subpopulation. the limitation of data coming only from the subpopulation renders the inference for the whole population particularly challenging. accordingly, bareinboim and tian (2015) showed that in this example, the causal effect of x on y (in population) is unidentifiable. in the c-id setting, the conditional causal effect of x on y in sub-population is identifiable, but it requires observational data from the entire population, i.e., from all the countries in the world. lastly, consider another scenario where the sub-population is based on a condition on the mediator variable z rather than the confounder w (the right graph in figure 1). an example of this scenario might involve a sub-population of countries that have had high smoking rates in recent years. applying our theorem 1, we can show that in this case, p x (y |s = 1) is not identifiable from p (v|s = 1). note that in the id setting, p x (y ) is identifiable from p (v). this shows that simply ignoring the sub-population and applying any algorithms in the id setting leads to an erroneous inference. the purpose of this example is to (i) demonstrate the critical role of causal graphs in whether a causal effect in a subpopulation is identifiable or not and (ii) show that previous identification results in the literature do not suffice to answer the s-id problem. an additional example is provided in appendix a.our main contributions are as follows.• we formally introduce the s-id problem, a practical scenario for causal inference in a sub-population. this problem asks whether, given a causal graph, a causal effect in a sub-population can be uniquely computed from the observational distribution pertaining to that sub-population. • we provide necessary and sufficient conditions on the causal graph for when a causal effect in a sub-population can be uniquely computed from the observational distribution of the same sub-population (theorems 1 and 2). • we propose a sound and complete algorithm for the s-id problem (algorithm 1).on sub-population c-id p (v) p x (y|z) c-gid {p zi (v \ z i )} m i=0 p x (y|z). we denote by pa g (x), ch g (x), and anc g (x) the set of parents, children, and ancestors of x (including x) in g, respectively.denoted by (x ⊥ ⊥ y|w) g , we say w d-separates x and y if for any x ∈ x and y ∈ y, w blocks all the paths in g between x and y . furthermore, for two disjoint subsets x and y of v, p x (y|s = 1) (or p s x (y)) corresponds to the causal effect of x on y in that sub-population. conditional causal effect p x (y|s = 1) is s-id in g s if for any two sems m 1 and m 2 with causal graph g s such that p m1 (v|s = 1) = p m2 (v|s = 1) > 0, then p m1 x (y|s = 1) = p m2 x (y|s = 1).let g ′ be a minimal (in terms of edges) subgraph of g s such that (i) g ′ contains p, (ii) x ∈ anc g ′ (s), and (iii) if p has exactly one collider, then the collider is an ancestor of s in g ′ . for x 2 := x \ anc(s), conditional causal effect p s x2 (v \ x 2 ) is s-id in g s and can be computed from p s (v) by p s (anc(s) \ s).so far, we have shown that p s x2 (v \ x 2 ) is always s-id in g s , where x 2 = x \ anc(s). therefore, in g * , the sidentifiability of p s x * (y * ) is equivalent to s-identifiability of p s x (y * ), thus p s x (y * ) is not s-id in g * .algorithm 1: a sound and complete algorithm for s-id 1: input: x, y, g s , p s (v) 2: output: a formula for p s x (y) based on return fail 12: end if a sound and complete algorithm for s-id equipped by proposition 1 and theorem 2, we present algorithm 1 for the s-id problem. if x 1 = ∅, due to the first part of theorem 2, p s x (y) is s-id and the algorithm returns equation (4) by replacing p s x2 (v \ x 2 ) with equation (3). now, consider the path starting from x to v l by the edges in p, then from v l to n by the edges in p 1 , then from n to v r by the edges in p 2 , and then from v r to y by the edges in p. in the second case, let g ′′ be the subgraph of g ′ consisting of the edges of p x←z , p z→w , p w →s , and p x→s . similarly, if p w →s has an intersection with p x←z in a node t , then (p x←t , p t ←w , p w ←m , p m→y * ) does not have any colliders and x 2 ∪{s} does not block it in g 1 , which is again a contradiction.we define g 2 to be a minimal (in terms of edges) subgraph of g s such that (i) g 2 contains p, (ii) g 2 contains p w →s if p has a collider, and (iii) x ∈ anc g2 (s). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/589.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/589.txt new file mode 100644 index 0000000000000000000000000000000000000000..c4bad11e154c2fd72f2f9e7ab074ecf15157c39c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/589.txt @@ -0,0 +1 @@ +large language models or llms, and especially generative pre-trained transformer (gpt) models have shown excellent performance on many complex language tasks . such breakthrough leads to the desire to run these llms locally on mobile devices for user privacy , but even small llms are too big for on-device execution. for example, the smallset llama model has 7b parameters which is 14gb in fp16 , while high-end mobile devices have only up to 18gb dram. therefore, aggressively compressing llms via train-time optimizations, such as sparsification, quantization, or weight clustering, is a crucial step for on-device llm deployment however, train-time optimization of llm is highly expensive due to the model size and computational resource overheads. especially, the computational resource demand from a train-time differentiable weight clustering in dkm , one of the state-of-the-art weight clustering algorithm is prohibitively high, as it needs to analyze the interactions between all the weights and all possible clustering options. accordingly, many existing llm compression techniques, such as gtpq , and awq rely on post-training optimization.in this work, we propose memory optimization techniques to enable train-time weight clustering and their applications to dkm , leading to edkm. our techniques include cross-device tensor marshaling and weight matrix uniquification/sharding. when we used edkm to fine-tune and compress llama 7b model into 3bit-per-weight, we achieved about 130× memory footprint reduction for a decoder stack, yet outperformed the existing 3bit compression techniques.accordingly, we need to tap onto cpu memory to handle such large memory demand by overflowing to cpu memory and copying back to gpu when needed later. however, it will incur significant traffic between gpu and cpu (slowing down the training), and need immense cpu memory capacity.• cross-device tensor marshaling: we track tensors being copied across devices and avoid redundant copying to reduce the memory footprint and expedite training.pytorch represents a tensor with data storage that links to the actual data layout and metadata that keeps the tensor shapes, types, and so on. such tensor architecture lets pytorch reuse the data storage whenever possible and efficiently reduces the memory footprint., from gpu to cpu), the data storage cannot be reused and a new tensor needs to be created. when its view is changed in line 1, no additional gpu memory is required as the underlying data storage can be reused (i. however, when x 0 and x 1 move to cpu as in lines 2 and 3, the cpu memory consumption becomes 8mb, although y 0 and y 1 could share the same data storage on cpu, which leads to the redundancy on cpu memory and increases gpu-cpu traffic. for example, although x0 and x1 are the same tensor with just a different view, when copied to cpu, the resulting tensors y 0 and y 1 do not share the data storage while x 0 and x 1 do on gpu. figure2: when the proposed cross-device tensor marshalling is applied to the case in table1, we can avoid duplication on the cpu side, which saves the memory/traffic. before copying x 1 to cpu, our marshaling scheme checks if there exits tensor with the same data storage on the cpu (i.2(a) illustrates the example in table1(with the corresponding line numbers) where x 1 shares the data layout with x 0 but y 0 and y 1 have independent/duplicated data storage on cpu. therefore, when a new tensor enters our marshaling system, we turn to the forward graph and check if there exists another tensor that is already on cpu and is reachable via only data-storage invariant operations (i. if found, we return the reference of the existing tensor and the list of operations tracing back to the new tensor.2 (b), instead of copying x 1 to cpu, we simply return the reference to y 0 and the view operation between x 1 and y 0 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/59.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/59.txt new file mode 100644 index 0000000000000000000000000000000000000000..6ff2c818bcec533d4c36c55097597a59f58e4666 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/59.txt @@ -0,0 +1 @@ +generalization of reinforcement learning (rl) agents to previously unseen environments is a key topic in reinforcement learning. reinforcement learning agents have the tendency to overfit the environment on which they are trained. this problem has been highlighted often in the literature, for example by cobbe et al. and packer et al. .the eco-system approach described by moulin et al. is one of the approaches put forward to improve generalization across environments while maintaining performance on previously seen environments. it is based on an eco-system of agents with the idea that each agent has its own policy with some generalizability, where the combination makes up a highly generalizable system. when a new environment is encountered, existing agents are used, or a new agent is trained when none performs satisfactorily. while the approach is unique in its ability to avoid catastrophic forgetting, it requires a lot of access to the environment to achieve its goal.in this paper, we aim to improve this approach. hereby, we focus on initialization procedures for new agents. better initialization has the potential to reduce the heavy burden of ample access to the environments and can additionally improve generalizability. drawing inspiration from papers on initialization techniques for deep neural networks (e.g. boulila et al. ) and on transfer learning (e.g. taylor et al. , we consider the following initialization options: (i) initialization with the best agent in the pool on the new environment, (ii) with a random agent chosen from a pool, (iii) with an agent not included in the pool and trained on all past environments we refer to as forked agent (iv) and with no initialization at all, which matches the setup from moulin et al. . we evaluate the performance of our innovations in the well-known minigrid environment and compare to existing state-of-the-art methods.our contributions are: this paper is organized as follows. section 2 presents the related work which has inspired the approaches tested in this paper. section 3 provides an explanation of our approach. next, section 4 presents the experimental setup used to evaluate the approach. section 5 presents the results of the experiments. we end with a discussion in section 6.) and on transfer learning (e. this approach offers some similarities to our approach by using very specialized agents as well as leveraging the generalizability of one agent being trained on multiple environments, but it also differs from our forked agent approach where we use one agent trained on multiple environment (generalist agent) to initialize the agents of the eco-system (specialist agents). the data used to train the agent is gathered directly from the environment at the same time (like in our case), or at a later time (like when using replay memory) it is explored by the agent, which makes it a different approach from other machine learning techniques, like supervised learning, for example, where all the training data is provided upfront to the agent before it starts interacting with the environment. if no agent from the pool can be found, then a new agent will be created, trained on the new environment and added to the pool. the eco-system will then check if this new agent can replace an existing agent in the pool, and if it is the case remove the old agent from the pool.random initialization with this approach (figure1), each time an agent is created, its neural network is copied from another agent randomly chosen from the pool of agents.in the eco-system a new agent is created only if no agent from the pool was able to solve the new environment (reaching the threshold).forked agent initialization with this approach (figure1), we create a new agent, called main agent outside of the pool of agents. in figure4, we can see that the forked agent approach is showing a bit higher number of agents in the pool compared to the best agent and the basic initialization approaches. this increase of the number of agents comes from the fact that each agent trained using the forked agent initialization embedded a better generalization to other environments. the better generalization capabilities in this case makes it more difficult for one agent to fully match all the environments of another agent, leading to some overlap and an increase in the number of agents in the pool.the forked agent approach (figure4) is clearly superior when looking at the number of training steps needed to complete the training on 500 environments, being nearly half of what is needed by the best agent and random approaches. this is easily explained because each agent created embeds far better generalization capabilities as it has been trained on a lot more environments (as all coming from the main agent) before being trained on its dedicated environment. we can also see that any initialization techniques performs far better than the standard initialization originally proposed with the eco-system setup, around 3 time better for the random and best agent approach and 6 times better for the forked agent. this can be easily explained by the fact that any new agent is initialized with a neural network which has already been trained on similar but slightly different environment that the one on which it is trained, then a part of the learning has already been done and is transferred to the new agent. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/590.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/590.txt new file mode 100644 index 0000000000000000000000000000000000000000..a237c93d5a16ad4e174919faaf41bba0e5d616e4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/590.txt @@ -0,0 +1 @@ +structure learning aims to uncover the underlying directed acyclic graphs (dags) from observational data that can represent statistical or causal relationships between variables. the structure learning task has many applications in biology , economics , and interpretable machine learning . correspondingly, it is gaining scientific interest in various domains such as computer science, statistics, and bioinformatics . one challenge of traditional structure learning methods such as ges is the combinatorial search space of possible dags . no-tears proposes a solution for this challenge by relaxing the formulation of the learning task in a continuous space and employs continuous optimization techniques. however, with the continuous representations, another challenge also arises which is the acyclicity constraint of the graphs.in most continuous score-based methods , , , , the constraints of graph acyclicity are defined in a form of a penalizing score and minimizing the score will also minimize the cyclicity of the graphs. this type of approach requires a large number of running steps with complex penalization weight scheduling to ensure the correctness of the constraint, which varies greatly depending on settings. this lack of certainty will affect the quality and restrict the applicability of the learned structures. another approach is to embed the constraint acyclicity in the generative model of the graphs such as in by utilizing weighted adjacency matrices that can be decomposed into the combinations of a permutation matrix s, which has a strictly upper triangular form, is the adjacency matrix of the dag when the topological ordering of the variables is correct. this matrix is generated from a latent variable z. for each instance of the permutation matrix p, the rows and columns in s can be permuted to generate an isomorphic graph g. the variable θ defines the parameters of the local conditional distributions of the nodes given their parents in g. the observational data d consisting of n observations is assumed to be generated from this generative model. and a strictly lower triangular matrix. our study is inspired by this approach by using a direct constraint in the generation process instead of a post-hoc penalizing score.there is a parallel branch of permutation-based causal discovery approaches whose methods allow us to find the topological ordering in polynomial time , , , , which can provide beneficial information. inspired by these approaches, we propose a framework, topological ordering in differentiable bayesian structure learning with acyclicity assurance (tobac), to greatly reduce the difficulty of the acyclicity-constraining task. conditional inference is performed in this framework with the condition being the prior knowledge provided from the topological orderings. our work is based on the independent factorization property of a dag's adjacency matrix into a permutation matrix p and a strictly upper triangular matrix s which represents the adjacency matrix when the ordering is correct. the factorization p (g, s, p) = p (s) p (p) p (g | s, p) allows us to infer p and s independently. especially, decoupling these enables us to apply recent advances in learning of topological ordering and probabilistic model inference techniques. for each case of the permutation matrix p, we can infer the dag's strictly upper triangular matrix s and compute the adjacency of an isomorphic dag with g = psp ⊤ . in order to infer this dag g, we choose the recent graph inference approach in this field, dibs , as ours inference engine for s. we run experiments on synthetic data and a real flow cytometry dataset in linear and nonlinear gaussian settings. the proposed constraint in the structure approach shows better dag predictions and achieves better performance compared to other approaches. contributions. the main contributions of this study are summarized as follows 1) we address the limitations of post-hoc acyclicity constraint scores by strictly constraining the generative structure of the graphs. by utilizing the permutationbased decomposition of the adjacency matrix, we can strictly guarantee the acyclicity constraint in bayesian network. 2) we introduce tobac, a framework for independently inferring and conditioning on the topological ordering.our inference process guarantees the acyclicity of inferred graphs as well as reduces the inference complexity of the adjacency matrices. 3) we demonstrate the effectiveness of tobac in comparison with related state-of-the-art bayesian score-based methods on both synthetic and real-world. our approach obtains better performance on synthetic linear and nonlinear gaussian data and on the real flow cytometry dataset. another approach is to embed the constraint acyclicity in the generative model of the graphs such as inby utilizing weighted adjacency matrices that can be decomposed into the combinations of a permutation matrix s, which has a strictly upper triangular form, is the adjacency matrix of the dag when the topological ordering of the variables is correct. our work is based on the independent factorization property of a dag's adjacency matrix into a permutation matrix p and a strictly upper triangular matrix s which represents the adjacency matrix when the ordering is correct. the factorization p (g, s, p) = p (s) p (p) p (g | s, p) allows us to infer p and s independently. bcd netsdecomposes the weighted adjacency matrix to a permutation matrix and a strictly lower triangular matrix, and infers the probabilities of these matrices using the evidence lower bound (elbo) of the variational inference problem.for any function f (g, θ) of interest, we can compute its expectation from the distribution p (g, θ | d, p) by inferring p (z, θ | d, p) with the following formula. the distributions of the parameters p (θ | g) and the data p (d | g, θ) are chosen differently for the linear and nonlinear gaussian models. our tobac models with the orderings from eqvarand the ground-truth orderings are compared with bcd nets (denoted as bcd), dag-gflownet (denoted as gfn), and dibs. beside dibs, which can learn the joint distribution p (g, θ | d) and infer nonlinear gaussian networks, bcd nets and dag-gflownet are designed to work with linear gaussian models. dibs and our approach use the prior of the erdős-rényi graphs, which is in the form of p (g) ∝ q g 1 (1q) ( d 2 )-g 1 where q is the probability for an independent edge being added to the dag. we followwhere the e-shd score is the expectation of the structural hamming distance (shd) between each g and the ground-truth g * over the posterior distribution p (g | d) to compute the expected number of edges that has been incorrectly predicted. performance on synthetic data a) linear gaussian models: figure2illustrates the performance of the methods with linear gaussian models. as the graph becomes denser, as in the er-2 settings, dibs models have high variances in the results, whereas our models are as stable as other approaches. considering dag-gflownet, which uses gflownetsto infer the posterior distribution, our models achieve lower e-shd score and substantially higher auroc score in the denser graph setting. our tobac models with the orderings from eqvarand the ground-truth orderings are compared with bcd nets (denoted as bcd), dag-gflownet (denoted as gfn), and dibs.b) topological orderings from other approaches: we summarize the results of the graphs inferred by tobac with the ground-truth ordering and the orderings learned by several approaches consisting of eqvar, npvar, and scorein figure5. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/591.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/591.txt new file mode 100644 index 0000000000000000000000000000000000000000..933ac02d8cabfd8324aa4dd827b95c937e3c0076 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/591.txt @@ -0,0 +1 @@ +let h ⊂ y x be a class of functions mapping the features (instances) x to the labels y, where |y| = n is a finite set of size n . the realizable online classification problem is defined as a game with the following protocol: at the beginning, nature selects h ∈ h; at each time step t, nature chooses x t ∈ x and reveals to a learner; the learner then makes a prediction ŷt ∈ y, potentially using the history observed thus far; nature then reveals the true label y t = h(x t ) and the learner incurs the loss 1{ ŷt = y t }. the goal is to minimize the cumulative loss for a given horizon t . it was shown in the seminal work of littlestone (1988) that such cumulative loss can be completely characterized by the littlestone dimension ldim(h) of h if the features x t are selected adversarially. attempts to generalize this realizable case primarily focus on the regret formulation (ben-david et al., 2009;daniely et al., 2015;rakhlin et al., 2010), and assume the observable labels ỹt = {ỹ 1 , • • • , ỹt } to be generated adversarially. here, we do not evaluate the actual cumulative loss for the underlying truth, rather, we evaluate the prediction quality through the following regret:the expected regret will then be upper bounded by o( t ldim(h) log(t n )) daniely et al. (2015).while the sub-linear regret bounds for the adversarially generated observable labels are intriguing from a mathematical point of view, they do not necessarily reveal the actual cumulative errors a learner might incur, especially when there are correlations between the observable labels and the underlying truth. indeed, a notable result by ben-david et al. (2009) showed that if the hypotheses are binary valued and the observable (noisy) labels ỹt are generated by flipping the true label y t = h(x t ) w.p. η t ≤ η < 1 2 (and w.p. 1η t remain unchanged), then one can achieve the actual expected cumulative loss of order o(log |h|/(2η -1) 2 ). surprisingly, the best approximating error of hypotheses in h to noisy labels is of order ηt ; the actual cumulative error is independent of the time horizon t ! as remarkable as this result is, its proof, as provided in ben-david et al. (2009), is based on a somewhat non-intuitive backward induction.this paper generalizes the noisy-label scenario of ben-david et al. (2009) to broader stochastic noisy models. our approach not only provides results that go beyond ben-david et al. (2009) through substantial generality, but also provide an intuitive understanding of the underlying paradigm. let ỹ be the set of noisy observations, which we assume is finite (this assumption can be relaxed) and of size m . we model the noise process as a noisy kernel k : x × y → 2 d ( ỹ) , where d( ỹ) is the set of all distributions over ỹ. that is, the kernel k maps each pair (x, y) to a subset q x y = k(x, y) ⊂ d( ỹ) of distributions over ỹ. we consider the following robust (noisy) online classification scenario: at the beginning, nature selects h ∈ h; at each time step t, nature chooses x t ∈ x and reveals to the learner; the learner then makes a prediction ŷt , based the feature x t and noisy labels ỹt-1 ; an adversary then selects a distribution pt ∈ q xt h(xt) , samples ỹt ∼ pt and reveals ỹt to the learner. let φ and ψ be the strategies of the learner and adversary, respectively. the goal of the learner is to minimize the following expected minimax risk:where ŷt = φ(x t , ỹt-1 ), ỹt ∼ pt and pt = ψ(x t , ŷt ) ∈ q xt h(xt) . in addition, we also consider scenarios that hold with high probability and cases for which the features are generated from general stochastic process. we refer to section 2 for a more complete specification of our setting. we model the noise process as a noisy kernel k : x × y → 2 d ( ỹ) , where d( ỹ) is the set of all distributions over ỹ. more precisely, assume y = {0, 1}, we will construct for any h ∈ h a distribution-valued function f h , such that ∀x ∈ x , f h (x) = q x h(x) , where ∀y ∈ y, q x y ∈ q x y are distributions satisfying. to remove the dependency on such randomness and to assess the fundamental limits of the prediction quality, we consider the following two measures: definition 4 let h ⊂ y x be the set of hypotheses, p be a set of random processes over x t , and k : x × y → 2 d( ỹ) be a noisy kernel.example 1 (robust hypothesis testing) let y = {0, 1}, x = {x} be a singleton set, h = {h i (x) = i : i ∈ y} and ỹ be any finite set of size m .we now define for any h ∈ h the function f h such that ∀x ∈ x , f h (x) = q x h(x) . let f = {f h : h ∈ h} and φ be the predictor from lemma 9 with class f and l 2 divergence (using x t , ỹt from the original noisy classification game). note that in the binary label case we define f with values q x 0 , q x 1 that attain l 2 (q x 0 , q x 1 ) = l 2 (q x 0 , q x 1 ). recall that our robust online classification problem is completely determined by the tuple (h, p, k) of hypothesis class h ⊂ y x , random process class p, and noisy kernel k., the loss v i t = 1 if and only if h i (x t ) = h j (x t ) and the predictor φ i,j (x t , ỹt-1 ) differs from h i (x t ).since for any k ∈ s t such that h k (x t ) = h k * (x t ) either k is removed from s t+1 (which contributes at most n t -n t+1 ) or its contribution to e t+1 is decreased by 1 when compared to e t (this is because by our construction of algorithm 1 and property (10) that once the contributions of k to e t equals 0 it must be excluded from s t+1 ). let q x 0 , q x 1 and q x 0 , q x 1 be as defined in the proof of theorem 12. define for any h ∈ h, a function f h such that f h (x) = (1η)e h(x) + ηu and f = {f h : h ∈ h}.proposition 32 for any noisy kernel k such that ∀x ∈ x there exists y = y ′ ∈ y such that inf p∈q x y ,q∈q x y ′ {h 2 (p, q)} ≤ γ and k ≥ 1. if |x | ≥ log k, then there exists a hypothesis class h with |h| = k such that for p being the singleton process over x t , we have the expected minimax risk lower bounded by rt (h, p, k) ≥ ω log |h| γ . let q i ∈ q x i y i and q ′ i ∈ q x y ′ i be the elements satisfying h 2 (q i , q ′ i ) ≤ γ. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/592.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/592.txt new file mode 100644 index 0000000000000000000000000000000000000000..eeb085d1198a6a152d053e4e4edeed0b781b2071 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/592.txt @@ -0,0 +1 @@ +we consider the problem of score aggregation through the general choice problem of having to choose one item from a set of options called candidates. the actual utility of each candidate is unknown and impossible to compute, but we have access to human or software agents that can provide a noisy estimate of it. how to select the best possible candidate?in the aggregated output, the errors of the individual agents should ideally mitigate one another and produce a collective decision that would outperform the one of a single agent, like in the seminal condorcet jury theorem . however, two issues can hamper aggregation. the first one is the varying accuracy of the agents. although of great importance, this issue is outside the scope of this paper, where we focus on the second one: correlations between agents.this problem can arise from various reasons. in the case of software agents, it is frequent to use several variants of an algorithm, or the same algorithm with different hyperparameters. in the case of human experts, some of them may share a common sociocultural background or prior deliberation on the topic at hand, which may give them a common bias.these correlations, which may be unknown to the aggregation rule, may hinder its performance. consider the following toy example: three algorithms are used, two of them being exact clones (they always return the same estimate). unaware of the clones, the aggregation rule uses the median score. as the median estimate is always the one of a clone, the aggregation behaves exactly as any of the clones, and the potential benefit from the independence of the third algorithm is lost.our model is close to the one of epistemic social choice, in which voters have noisy estimates of the candidates' true values. some works investigate the quality of the results of specific voting rules in this model .decisions based on the aggregation of scores appear in many contexts, not necessarily related to the human activity of social choice. in regression analysis, the quality of prediction can be improved by aggregating the results of different algorithms. for example, the methods of boosting and bagging achieve some form of aggregation .when several estimations are available for the same problem, it is common for some of them to be correlated, for example when the same technique is used with different hyperparameters or different training datasets. since these correlations can be harmful for aggregation, many of the main methods require the panel of algorithms to be diversified . in particular, this avoids biasing the results in the direction of a potential "majority group". here, we do not assume this diversity constraint to be satisfied.our topic is also close to the unsupervised ensemble learning problem, where the goal is to find the best way to aggregate the outputs of a family of classifiers without any feedback. some methods in this area address the problem of correlated classifiers . these methods are also useful in the crowd-sourcing framework in which agents might be correlated. however, in their setting, each choice problem always examines the same candidates (e.g. "cat" and "dog") in different situations (e.g. images to classify), whereas in our setting, there is no obvious mapping between the candidates of any two distinct choice problems.finally, there is a vast literature in statistics applied to physics on the problem of aggregating correlated measures of the same quantity to reduce the noise . in particular, the method called blue is equivalent to the model-aware benchmark that we present in section 3.2 . but our general problem differs as we do not assume any prior knowledge on the distributions and the correlations of the estimators. moreover, we focus on the choice problem of selecting the best candidate, which is not the same as inferring one value from noisy estimates. we also differentiate ourselves by proposing a novel method based on spectral decomposition for this problem.it is worth noting that this paper introduces a new choice rule where the identification of correlations between agents is based on the classic data preprocessing technique known as singular value decomposition (svd) . the use of svd is not uncommon in re-lated fields such as multi-armed bandits (to compute upper confidence bounds) and recommendation systems (for collaborative filtering). however, to the best of our knowledge, this is the first time that svd is applied to a choice problem based on noisy estimations by correlated agents. in our case, viewing the scores as subjective utility values, the corresponding aggregation rule is as follows: compute the welfare of each candidate as the sum of its scores, wutil(cj) := n i=1 si(cj); then choose a candidate with maximal welfare. in most cases, the utilitarian welfare will mostly depend on the scores given by the large group of 20 agents, and the information given by the 4 independent agents will mostly be ignored. it also suffers in the case of varying correlations: for example, if one independent agent mistakenly doubles the score of a candidate, then the welfare of the candidate is doubled, but if a group of k correlated agents do the same, then the welfare of the candidate is multiplied by 2 k . but it is also possible that both agents are noisy and fully correlated, that both candidate have the same utility, and that the difference between the values 10 and 15 just comes from the shared error of the agents.equation 2 is affected by the size of the groups, like equation1, but with the same scaling factor for each candidate: for example, if we double the size of one group, it simply doubles the welfare of each candidate, which does not change the outcome of the choice problem. we can represent the link between agents and groups by vectors: to each agent ai ∈ a l , associate the 1 × k vector ⃗ ei := (1 l ′ =l ) 1≤l ′ ≤k (1 at the l th position, 0 everywhere else).to this end, we simply define the estimated embedding vectors ⃗ ei of the agents as their score vectors (including the scores on a potential training set of candidates), normalized such that all the score vectors of the agents have the same mean and standard deviation. mj can contain redundant dimensions (in particular if the agents are correlated), so we would like to compute the welfare w(cj) as the product k l=1 λ l 2 of the k greatest singular values, where k would estimate the number of relevant dimensions in the embedding of the agents.equivalently, let s(cj) be the n × 1 random vector of the scores provided by the agents, ē := σ d in σ f e be the n × (n + k) matrix that concatenates the characteristics of distinct noise and feature noise (where in denote the n × n identity matrix), and xj = (d1,j, . independent agents; the case e =   1 0 1 0 0 1   with σ d = 0 models the example with two clones given in the introduction (more generally, the ideal case with k perfect groups introduced in section 2. , λ k are the k greatest singular values of the score matrix mj and k is the estimated number of relevant dimensions, computed as described in section 2. the embedding vectors ( ⃗ ei)i of the agents, used to define ŝ and mj, are based on the matrix of scores of the agents on the candidates considered for the problem. in the extreme case of 2 candidates, the non-trained ev is significantly less efficient (75%) than ma, pl+ and ev+ (86%), but with at least 10 candidates, the difference is within the margin of error: ev efficiently exploits the candidates of the current election as a training set to identify the correlations between agents., with c = 1 √ β 2 +(1-β) 2 .in the context of aggregating correlated noisy agents in a choice problem, we have proposed embedded voting, a method that embeds the agents according to the scores they produce, and we have compared its performance with a variety of other methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/593.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/593.txt new file mode 100644 index 0000000000000000000000000000000000000000..a19be01a5ca91ef133304ee932925f2a43c64e0f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/593.txt @@ -0,0 +1 @@ +the internet of things (iot) is nowadays a paradigm of reference for several applications. iot is adopted in various domains such as smart farming, drone imaging, industry 4.0, and safety and is implemented in many challenging environments like satellites, submarines, and the large hadron collider (lhc). hence, huge amounts of data are amassed by sensors and can be overwhelming to process. for example, the lhc generates approximately one petabyte of collision data per second . therefore, iot sealed an alliance with artificial intelligence (ai) to improve data management pipelines and offer better analytics to clients. in the context of artificial intelligence of things (aiot), deep learning (dl) models, mainly fostered in cloud servers, have proven their efficacy in injecting intelligence into the network of connected iot devices . raw data captured by sensors can thus be used to produce pertinent insights for the end user's benefit.with the rapid development of edge computing, ai services have known a migration from cloud servers to edge devices (e.g., iot gateways and fog nodes). for sensitive applications, such as e-health and smart surveillance, deploying ai services at the edge has numerous benefits . data will no longer need to be transferred through the internet, increasing data privacy and reducing security breaches. latency will also be reduced to account only for the deployed models' computational complexity. however, edge devices can span large this work was partly funded by the european union's horizon research and innovation program under grant agreement no 101070374. computational resource constraints from microcontrollers to cloudlets passing by single-board computers . therefore, the observed heterogeneity in edge device requirements can be troublesome for deploying state-of-the-art dl models as they usually perform with very deep networks implicating large numbers of parameters, thus resulting in an inevitable increase in memory footprint and inference time.to address this issue, an extensive body of work proposes various techniques of model compression for lighter memory footprints. for instance, quantization aims at coding weights on low-precision arithmetic and knowledge distillation allows the design of a small model (i.e., student) trained on quality features from a bigger network (i.e., teacher) . another avenue of research takes inspiration from the brain, known for its efficiency and plasticity.the human brain is able to conduct a wide range of tasks dexterously and process huge amounts of data with around 20 watts of power . the brain's neuronal morphology could explain this frugality in energy consumption. the brain relies on inhibitory mechanisms that are necessary for quick decision-making in a survivalist situation as well as bidirectional connections between higher and lower visual areas that enhance the brain's abstraction capacities of the surrounding environment . to understand the interactions between both visual areas, predictive coding (pc) theory postulates that the main function of the brain is to minimize a prediction error defined as the difference between the real and predicted stimuli. pc shows that feed-forward connections drive prediction errors to higher layers, whereas feedback connections attempt to predict lower layers' neural activity. the bidirectional movement relies on continuously refining the brain's internal input representations throughout the visual hierarchy. when implemented in convolutional neural networks (cnn), pc often yields higher accuracy than its conventional counterpart .in light of these ideas, our goal is to benefit from pc in designing shallow cnns. we aim to show empirically that the pc refinement process allows the same expressivity and feature diversity that width and depth allow in cnns . furthermore, the refinement of feature representations demands a certain amount of cyclic processing between lower and higher visual areas until equilibrium is reached . to alleviate this issue, early exiting techniques are used to abort further cycling over the feature extractor network once a performance threshold is reached.our contributions can be formulated as follows:• we apply pc techniques to cnns to design shallow networks with a considerably reduced memory footprint that are deployable on edge devices • we improve pc cyclic processing with an early-exiting mechanism which further reduces the computational cost and inference time • we evaluate our proposed model against vgg-16 and achieve for image classification on cifar-10 comparable results with only a 3% difference and less than 1% of the baseline's number of parameters therefore, the observed heterogeneity in edge device requirements can be troublesome for deploying state-of-the-art dl models as they usually perform with very deep networks implicating large numbers of parameters, thus resulting in an inevitable increase in memory footprint and inference time.• we apply pc techniques to cnns to design shallow networks with a considerably reduced memory footprint that are deployable on edge devices • we improve pc cyclic processing with an early-exiting mechanism which further reduces the computational cost and inference time • we evaluate our proposed model against vgg-16 and achieve for image classification on cifar-10 comparable results with only a 3% difference and less than 1% of the baseline's number of parameters. the section covers some solutions to deploy dl models on low-resource edge devices, previous work done on pc with dl models, and recent applications of early exiting techniques. so far, pc has been utilized to improve model accuracy or robustness by adding feedback connections to deep feed-forward networks, which causes the number of parameters to double, and thus the latency to increase. however, not only does the underlying competition launched between classifiers over the shared weights of the backbone help achieve a pareto optimal solution, but it also encourages pc cycles to achieve consistency early enough as the loss guides the classifiers to collaborate in generating semantically similar feature vectors.the model design process was driven by the motivation of exploiting pc dynamics in order to build shallow networks, in terms of both depth and width, that could perform as well as the established vgg-16 architecture and that could be deployed on edge devices of kilobytes (kb) to megabytes (mb) of memory. hence, given a shallow model a, we construct model b to be wider and model c deeper in order to show empirically that recurrence could account for the improvements that width and depth bring to the neural network. it is worth mentioning for model b that a latency of 80 milliseconds (ms) is rather acceptable knowing that the network has already done 5 cycles through the backbone and computed 5 class probabilities, thus, offering better performance and more accurate classification. however, given that our architecture is shallow, attaining reasonably large numbers of cycles will barely reach the computational cost attained by a conventional deep network like vgg-16, as seen in the flops column of tableii.from a performance perspective, we observe from tableiii, an increasing accuracy through cycles as more cycles allow more expressivity to the shallow network. furthermore, our proposed models are not very far from vgg-16 performance on cifar-10 with only a 3% difference from the 4-cycle and 5-cycle recurrent processing and a considerable reduction in the numbers of parameters from 10 8 (i. wide model b and deep model c reach almost the same performance as model a which is tinier and faster. nevertheless, it is worth mentioning that width helped the model gain high accuracy at later cycles and depth improved the internal classifiers' average performance. overall, we can conclude that pc dynamics might serve as a complementary key aspect for increasing model expressivity along with depth and width in feed-forward neural networks. hence, in numerous cases, a 1-cycle network will be able to yield a similar feature vector as a 6-cycle network; a property which is most wanted for reducing computational cost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/594.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/594.txt new file mode 100644 index 0000000000000000000000000000000000000000..4b74668baea56dd37f606a126b6c57a1d192464f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/594.txt @@ -0,0 +1 @@ +as introduced before, we are targeting cloud native deployments with segregated storage. this means, the data transfer bottleneck is always to be searched for in the network connection separating the storage system from the compute nodes. therefore, we need to test for only two properties: (1) can our system saturate network bandwidth between compute node and storage system, and (2) does our system linearly scale (obviously eventually saturating network switching fabric's capacity)? to test (1) and ( 2), we have conducted the following experiments:• saturation test in hpc data center using an ibm ds8000 gpfs storage system connected with 50 gbit/s • saturation test on an aws hpc7g.16xlarge instance connected to aws s3 with 25 gbit/s • saturation and scaling test on ibm codeengine , 10 gbit/s per worker to ibm cloud object storage we are randomly accessing indices on the super-tensor and streaming sub-tensors of 8 mb size back to the client. we increase number of parallel threads until we saturate the network. in case of the scaling experiment, we ran multiple instances of the saturated configuration.big data processing evolved from jbodsystems like hdfsand mapreduceover apache sparkaway from jbod towards real-time query engines like prestoor apache impalaon top of cloud object storage (cos), which evolved as generic term for s3 compatible object storage. storage formats evolved from csv over json to high performance column stores like parquetand avro, managed by data lakehouse storage managers like apache iceberg, delta lake or apache hudi. although this architecture features nearly infinite scalability and relational database features like acid, it is limited to tabular data with sql as the only cross-cutting open standard interface, focusing on data discovery tasks, mainly composed of aggregations and filtering.figure1: state of the art data lakehouse architecture foundation modeling entails building pre-trained transformerswith big data from multiple modalities in addition to natural language, that can be adapted to perform a range of downstream tasks. in addition to growing volumes of eo data from next-generation satellites, increasingly gridded weather and climate model simulation outputsare also being emulated with deep learning methods using tensors. zarr is a very interesting data format for storing and retrieving tensor data. what parquet and avro is for tabular data, zarr is for multidimensional tensor data. a zarr folder stores tensor data of arbitrary dimensions. actual data is organized in chunks, which is a predefined sub-tensor of the super tensor stored in a particular zarr folder. this layout allows for minimizing number of reads for obtaining a particular scalar or sub-tensor from a zarr folder as the (cached) metadata can be queried to obtain the file name containing a particular chunk of interest and within a chunk, the actual block can be obtained by computing the byte offset using the tensor indices used to address the data item.xarraysimilar to pandas for tabular datais an interface layer which can read and write a variety of tensor data formats like netcdf, geotiff, cloud optimized geotiffand zarr.up to this point, each zarr folder on storage resembles a set of super-tensors containing a database or collection of data.• in atmospheric data, at different resolutions, data points in a three-dimensional cube are grouped together and summary statistics like minimum, maximum and average temperature are calculated and stored to the hsi. as hsi contain domain specific statistical summaries of underlying discrete and continuous data at different hierarchical resolutions, the consumer can decide how often a tensor of a specific class or category has to be shown (if at all) to the model.as illustrated in figure7, in the hpc data center we could show to saturate the 50 gbit/s link using 10 parallel threads by obtaining a tensor stream rate of 762. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/595.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/595.txt new file mode 100644 index 0000000000000000000000000000000000000000..d50d1868efaebeb34c6dbea4c3536ab49545c166 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/595.txt @@ -0,0 +1 @@ +in machine learning, optimizers implement gradient descent. adaptive optimizers, represented by adam , have been in competition with traditional optimizers sgd with momentum. from the initial adagrad to rmsprop , and then to adam and amsgrad , adaptive optimizers have undergone many improvements. however, their applicability remains limited to sparse gradients. while adaptive optimizers can converge quickly on sparse datasets, their performance is still not as good as traditional sgd with momentum on non-sparse datasets. similarly, although sgd can perform well under appropriate scheduling, its convergence is slower. this means that while adaptive optimizers may require twice the computational effort, sgd also needs twice the number of iterations. consequently, both types of optimizers still play different roles on the datasets where they are most effective. this has also led to some discussions on switching between different optimizers, such as methods for switching from adam to sgd .the saddle point problem is the primary factor affecting the effectiveness of convergence in the early to mid-stages and also influences the final convergence location. this paper mainly discusses the saddle point issue. mathematically, a saddle point can be described as a point in a multivariate function where it acts as a local maximum in certain directions and a local minimum in others. in high-dimensional spaces, due to the vast number of directions, finding a genuine local minimum becomes highly challenging, making saddle points more prevalent. the optimization surface of deep learning models is intricate, comprising many saddle points and flat regions.this paper reinterpret the iterative process of machine learning optimizers and primarily discusses through experimentation. the contributions of this paper are as follows:• we experimentally demonstrate that for non-sparse datasets, weights with larger changes at beginning should be accelerated, which is completely opposite to the perspective of traditional adaptive optimizers. this implies that improving and enhancing adaptive optimizers solely based on historical gradient information is incompleteness.• we make breakthrough that use loss values for weight separation and design saddle-point experiments to compare the effects brought by different directions of weight acceleration, validating that different datasets should accelerate weights in different directions.it should be noted that this paper focuses more on exploring the fundamental theory of machine learning but not final accuracy. our experiments are based on the wide residual network . in our preliminary tests, other networks showed similar effects in the special experimental part, which can also prove the correctness of the theory. however, it may not be effectively applied to the actual training of all networks. this is because the wide residual network has a strong ability to distinguish between sparse and non-sparse features. finally, we simply employ a very straightforward scheduling approach to validate the effectiveness of asymmetric momentum in wide residual network. while adaptive optimizers can converge quickly on sparse datasets, their performance is still not as good as traditional sgd with momentum on non-sparse datasets.g t is the gradient at time step t, ∇j is the gradient of the objective function j at θ t , g t is a diagonal matrix where each diagonal entry g ii t is the sum of the squares of the gradients with respect to θ ii up to time step t. in the later stages, the accumulated historical gradients √ g t + ǫ are used to slow down the update of weights that were updated quickly earlier on, while continuing to train the side of the weights that have been updating more slowly. consistent with the core idea of adaptive optimization, θ n represents parameters that have historically changed quickly in the non-sparse direction; θ s represents parameters that have historically changed slowly in the sparse direction. by penalizing θ n , which represents the weights in the non-sparse gradient direction, it inevitably cause the mass center of the spring system, or the overall weights, to shift significantly towards the sparse direction in the bottom right as the gradients update. although its performance is significantly better than adagrad, its versatility is still lacking, and not sufficient to compare with a balanced sgd in non-sparse gradient, which is inherently closer to the optimal point for datasets like cifar10. the adaptive optimizer adagrad essentially accumulates historical gradients to quickly reduce θ n , thereby weakening the spring's effect and prematurely reducing the non-sparse parameters in the direction of θ n . as shown by the blue line, while sgd can be effective on balanced datasets, it requires a long time to change direction and escape saddle points for both sparse and non-sparse datasets. this is also why adam appears more effective, its decay exponential moving average strategy allows it to cover a broader range of destination, training without the need for extra training time, even though this coverage is entirely bias towards the sparse direction. this not only allows for accelerating weights that change slowly in the sparse gradients but also weights that change frequently in non-sparse gradients, thereby making it adaptable to all kinds of datasets. specifically, when the loss is larger, it will be on the left side of the loss phase line; when the loss is smaller, it will be on the right side of the loss phase line, showed in figure . if we increase the momentum whenever θ n moves to the left side, it's clear that, with the accumulation of applied force, we will ultimately pull the mass center of the spring system towards the left, which is the red direction of non-sparse gradients. conversely, if we increase the momentum whenever θ n moves to the right side, then as the force accumulates, we will eventually push the mass center of the spring system towards the right, which corresponds to the green direction of sparse gradients. at the same time, we can clearly observe that after swapping the momentum direction, the acceleration momentum in green exhibits the same missing optimal destination. this confirms that weights have a very distinct direction specificity, and the missing is primarily caused by momentum towards θ s , it also means cifar10 is a kind of θ n lead non-sparse datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/596.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/596.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b806c35d1383deadb3744820baa8bec43eaedaf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/596.txt @@ -0,0 +1 @@ +first-order gradient methods have been broadly used in the training of deep neural networks. the popular first-order gradient methods, in general, can be categorized as accelerated schemes (e.g. stochastic gradient descent with momentum (sgdm) ) and adaptive methods (e.g. adam and adamw ). adaptive methods generally compute an individual stepsize (a.k.a. learning rate) for each parameter and play a significantly important role in the training of modern deep neural networks. especially, adam can attain rapid training speed and has been acting as the default choice for deep learning training.much progress on adaptive methods is built upon adam. for instance, considering the fact that adam does not generalize as well as sgd with momentum when handling image classification tasks, loshchilov et al. propose the adamw optimizer which introduces decoupled weight decay into adam and achieves competitive performance as sgdm when tackling image classification tasks. based on the observation this work was supported in part by the state administration of science, technology and industry for national defence (no. wdzc20235250118). that nesterov's accelerated gradient (nag) is empirically superior to the regular momentum, timothy dozat incorporates nesterov momentum into adam and proposes the nadam optimizer. to achieve fast convergence, comparable accuracy to sgd, and provide high stability in the training of a gan, zhuang et al. propose the adabelief optimizer. adabelief views the exponential moving average (ema) of the noisy gradient as the prediction of the gradient in the next time step and adapts the stepsize according to the "belief" in the current gradient direction. the advantage of adabelief over adam mainly lies in the " large gradient, small curvature" case where adabelief, unlike adam, increases the stepsize as the ideal optimizer does.it's obvious that adamw, nadam, and adablief all build based on adam but enjoy different advantages in terms of boosting adaptive methods. to combine the benefits of these three adaptive methods, we propose a new optimizer adaplus which, on the adamw basis, simultaneously integrates nesterov momentum as in nadam and precise stepsize adjustment as in adabelief. to validate the effectiveness of adaplus, we experiment with three typical machine learning tasks, including image classification with cnns on ci-far10, language modeling with lstm on penn treebank, and generative adversarial networks (gan) on cifar10. we compare adaplus with eight state-of-the-art optimzers including sgdm , adam , nadam , radam , adamw , adabelief , adamw-win , and lion . the experiment results demonstrate that adaplus outperforms the other optimizers in simultaneously achieving the goal of (i) fast convergence, (ii) good generalization ability, and (iii) high stability in the training of gans. for example, on the image classification task, adaplus yields an average test accuracy improvement of 1.97% (up to 2.36%), 1.85% (up to 2.0%), and 0.52% (up to 0.89%) over adamw, nadam, and adabelief, respectively. furthermore, on the gan training, adaplus always attains a low fid score, illustrating pretty good stability.the contributions of this paper can be summarized as follows:( (2) we conducted extensive experimental evaluations on three machine-learning tasks to validate the effectiveness of adaplus. adaplus, among all evaluated optimizers, is the best adaptive method that performs most comparable with sgdm and performs the best in simultaneously achieving the goal of fast convergence, good generalization ability, and high stability.propose the adamw optimizer which introduces decoupled weight decay into adam and achieves competitive performance as sgdm when tackling image classification tasks. that nesterov's accelerated gradient (nag)is empirically superior to the regular momentum, timothy dozatincorporates nesterov momentum into adam and proposes the nadam optimizer. adabelief views the exponential moving average (ema) of the noisy gradient as the prediction of the gradient in the next time step and adapts the stepsize according to the "belief" in the current gradient direction. the advantage of adabelief over adam mainly lies in the " large gradient, small curvature" case where adabelief, unlike adam, increases the stepsize as the ideal optimizer does.it's obvious that adamw, nadam, and adablief all build based on adam but enjoy different advantages in terms of boosting adaptive methods. to combine the benefits of these three adaptive methods, we propose a new optimizer adaplus which, on the adamw basis, simultaneously integrates nesterov momentum as in nadam and precise stepsize adjustment as in adabelief. to validate the effectiveness of adaplus, we experiment with three typical machine learning tasks, including image classification with cnns on ci-far10, language modeling with lstm on penn treebank, and generative adversarial networks (gan) on cifar10. we compare adaplus with eight state-of-the-art optimzers including sgdm, adam, nadam, radam, adamw, adabelief, adamw-win, and lion. the experiment results demonstrate that adaplus outperforms the other optimizers in simultaneously achieving the goal of (i) fast convergence, (ii) good generalization ability, and (iii) high stability in the training of gans. adaplus, among all evaluated optimizers, is the best adaptive method that performs most comparable with sgdm and performs the best in simultaneously achieving the goal of fast convergence, good generalization ability, and high stability. we mainly consider the "large gradient, small curvature" case in which adabelief, with precise stepsize adjustment, performs differently from other adaptive methods (e.we perform extensive comparisons with eight state-of-the-art optimizers: sgdm, adam, nadam, adamw, radam, adabelief, adamw-win, and lion. representative adaptive methods include adagrad, rm-sprop, and adam, which enjoy fast speed in the early training period yet exhibit poorer generalization ability than sgdm.this paper proposes a novel and efficient adaptive method adaplus which combines the benefits of adamw, nadam, and adabelief and does not introduce any extra parameters. the extensive experiment evaluations demonstrate that adaplus outperforms the other eight state-of-the-art optimizers in terms of simultaneously considering convergence trait, generalization ability, and training stability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/597.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/597.txt new file mode 100644 index 0000000000000000000000000000000000000000..43eea51a51d3da25b564185025f835721731b225 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/597.txt @@ -0,0 +1 @@ +reinforcement learning (rl) describes a class of problems where a learner repeatedly interacts with an unknown environment with the intention of maximizing the cumulative sum of rewards. this model has found its application in a wide array of areas, ranging from networking to transportation to epidemic control (geng et al. 2020;al-abbasi, ghosh, and aggarwal 2019;ling, mondal, and ukkusuri 2023). rl problems are typically analysed via three distinct setupsepisodic, infinite horizon discounted reward, and infinite horizon average reward. among these, the infinite horizon average reward setup holds particular significance in real-world applications (including those mentioned above) due to its alignment with many practical scenarios and its ability to capture essential long-term behaviors. however, scalable algorithms in this setup have not been widely studied. this paper provides an algorithm in the infinite horizon average reward setup with general parametrized policies which yields sub-linear regret guarantees. we would like to mention that this result is the first of its kind in the average reward setting.there are two major approaches to solving an rl problem. the first one, known as the model-based approach, involves constructing an estimate of the transition probabilities of the underlying markov decision process (mdp). this estimate is subsequently leveraged to derive policies (auer, jaksch, and ortner 2008;agrawal and jia 2017;ouyang et al. 2017;fruit et al. 2018). it is worth noting that modelbased techniques encounter a significant challenge -these algorithms demand a substantial memory to house the model parameters. consequently, their practical application is hindered when dealing with large state spaces. an alternative strategy is referred to as model-free algorithms. these methods either directly estimate the policy function or maintain an estimate of the q function, which are subsequently employed for policy generation (mnih et al. 2015;schulman et al. 2015;mnih et al. 2016). the advantage of these algorithms lies in their adaptability to handle large state spaces.in the average reward mdp, which is the setting considered in our paper, one of the key performance indicators of an algorithm is the expected regret. it has been theoretically demonstrated in (auer, jaksch, and ortner 2008) that the expected regret of any algorithm for a broad class of mdps is lower bounded by ω( √ t ) where t denotes the length of the time horizon. many model-based algorithms, such as, (auer, jaksch, and ortner 2008;agrawal and jia 2017) achieve this bound. unfortunately, the above algorithms are designed to be applicable solely in the tabular setup. recently, (wei et al. 2021) proposed a model-based algorithm for the linear mdp setup that is shown to achieve the optimal regret bound. on the other hand, (wei et al. 2020) proposed a model-free qestimation-based algorithm that achieves the optimal regret in the tabular setup.one way to extend algorithms beyond the tabular setting is via policy parameterization. here, the policies are indexed by parameters (via, for example, neural networks), and the learning process is manifested by updating these parameters using some update rule (such as gradient descent). such algorithms are referred to as policy gradient (pg) algorithms. interestingly, the analysis of pg algorithms is typically restricted within the discounted reward setup. for example, (agarwal et al. 2021) characterized the sample complexity of pg and natural pg (npg) with softmax and tabular parameterization. sample complexity results for general parameterization are given by (liu et al. 2020;ding et al. 2020). however, the sub-linear regret analysis of a pg-based algorithm with general parameterization in the average reward setup, to the best of our knowledge, has not been studied in the literature. this paper aims to bridge this gap by addressing this crucial problem.(wei et al.in this paper, we consider an infinite horizon reinforcement learning problem with an average reward criterion, which is modeled by the markov decision process (mdp) written as a tuple m = (s, a, r, p, ρ) where s is the state space, a is the action space of size a, r : s × a → is the reward function, p : s × a → ∆ |s| is the state transition function where ∆ |s| denotes the probability simplex with dimension |s|, and ρ : s → is the initial distribution of states. the standard policy gradient algorithm iterates the policy parameter θ as follows ∀k ∈ {1, 2, • • • } starting with an initial guess θ 1 . 2020). in the kth epoch, the algorithm generates a trajectory of length h, denoted as t k = {(s t , a t )} kh-1 t=(k-1)h , by following the policy π θ k .where ω * k := ω * θ k and ω * θ k is defined in (24), j * = j(θ * ), and π * = π θ * where θ * is the optimal parameter. let {θ k } k k=1 be defined as in lemma 5.theorem 1 dictates that the sequence {j(θ k )} k k=1 generated by algorithm 1 converges to j * with a convergence rate of o(t -1 4 + √ ǫ bias ). if assumptions 1 and 2 hold, then for k = t /h where h = 16t mix t hit √ t (log 2 t ) 2 , the following inequalities are true ∀k, ∀(s, a) ∈ s × a and sufficiently large t .where θk denotes some convex combination of θ k and θ k+1 , c √ agt mix + √ lt mix t hit , and {s k } k k=1 is an arbitrary sequence of states.summing from k = 1 to k, using the non-negativity of kl divergence and dividing the resulting expression by k, we get the desired result.where θk is some convex combination of θ k and θ k+1 and (a) follows from assumption 2. for the first term, note that, ((p π θ k+1 ) t -(p π θ k ) t )r π θ k+1 ∞ ≤ p π θ k+1 ((p π θ k+1 ) t-1 -(p π θ k ) t-1 )r π θ k+1 ∞ + (p π θ k+1 -p π θ k )(p π θ k ) t-1 r π θ k+1 ∞ (a) ≤ ((p π θ k+1 ) t-1 -(p π θ k ) t-1 )r π θ k+1 ∞ + max s p π θ k+1 (s, •) -p π θ k (s, •) 1 (77).inequality (a) holds since every row of p π θ k sums to 1 and (p π θ k ) t-1 r π θ k+1 ∞ ≤ 1. moreover, invoking (72), and the parameter update rule θ k+1 = θ k + αω k , we get, where (a) follows from (72) and the parameter update rule θ k+1 = θ k + αω k while (b) is a consequence of (32). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/598.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/598.txt new file mode 100644 index 0000000000000000000000000000000000000000..251a6cd604ede4819fe3abefc978f20646f80576 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/598.txt @@ -0,0 +1 @@ +in modern signal processing applications, the analysis of signals residing on networks, often referred to as graph signals, has become increasingly important . these graph signals emerge in a variety of fields, such as data gathered from wireless sensor networks and electroencephalography (eeg) signals recorded in brain connectivity networks. laplacian constrained gaussian graphical models (ggms) provide a powerful tool for characterizing these signals on smooth graphs , where a substantial edge weight between two vertices indicates a high similarity in their signal values. in this paper, we tackle the network topology inference problem under laplacian constrained ggms, which recasts the task as estimating the precision matrix (i.e., inverse covariance matrix) as a graph laplacian in a multivariate gaussian distribution. the zero pattern of the precision matrix reveals the network topology, offering valuable insights into how these sensors interact, which can be used to optimize the network's performance and reliability.ggms have been widely explored in the literature, with the graphical lasso serving as a prominent estimation method. this approach leverages an ℓ 1 -norm regularized gaussian maximum likelihood estimation, which has proven effective in imposing sparsity on the solution. with larger ℓ 1 -norm regularization parameters, the solution becomes increasingly sparse. in this paper, our focus lies on laplacian constrained ggms, wherein the precision matrix takes the form of a graph laplacian. interestingly, recent studies have revealed that applying the ℓ 1 -norm to learn laplacian constrained ggms results in an increased number of nonzero entries as the regularization parameter grows, yielding dense graphs rather than sparse ones. while nonconvex regularization overcomes this issue , it necessitates tuning multiple parameters. we introduce a graph laplacian estimation method incorporating the ℓ 0 -norm constraint, which is the most intuitive and natural approach to control the sparsity of solutions.laplacian constrained ggms have attracted growing interest in the fields of signal processing and machine learning over graphs . recent work has established that the maximum likelihood estimator (mle) under laplacian constraints exists with as few as one observation, irrespective of the underlying dimension. this finding significantly reduces the sample size requirement from n ≥ p in general ggm cases, where n and p denote the sample size and problem dimension, respectively. precision matrices in laplacian constrained ggms take the form of a graph laplacian, which enables the interpretation of the eigenvalues and eigenvectors as spectral frequencies and fourier bases . structured graph learning has been explored by leveraging spectral graph theory .generalized laplacian constrained ggms have also garnered increasing attention . these models feature nonpositive off-diagonal entries in the precision matrix, while the zero-sum condition for rows/columns is not upheld. the resulting precision matrix is a symmetric m-matrix, and such models satisfy the total positivity property , a strong form of positive dependence. the works have demonstrated that the mle for these models exists if the sample size meets the condition n ≥ 2. one approach to estimating a generalized graph laplacian is the mle , which implicitly promotes sparsity through the mmatrix constraint. the (weighted) ℓ 1 -norm regularized mle provides improved sparsity control, and several algorithms have been developed to tackle it, such as block coordinate descent , proximal point algorithm , and projected newton-like methods . the estimation of diagonally dominant m-matrices as precision matrices has been studied in .in this paper, we investigate the network topology inference problem by estimating the precision matrix as a graph laplacian under a sparsity constraint. it is important to note that conventional estimation methods for general ggms, like graphical lasso, typically utilize sparsity-promoting regularization to learn sparse graphs, as the ℓ 0 -constrained formulation does not yield optimal solutions when the sample size is smaller than the dimension (i.e., n < p). our paper presents three main contributions:• we propose a graph laplacian estimation method that incorporates the ℓ 0 -norm constraint, addressing the shortcomings of the ℓ 1 -norm regularization when estimating laplacian constrained ggms. we establish that the existence of optimal solutions can be guaranteed under laplacian constraints, even when n = 1. • we devise an efficient gradient projection algorithm to solve the resulting estimation problem with sparsity and laplacian constraints. • we conduct numerical experiments on both synthetic and real-world datasets, demonstrating the effectiveness of our proposed method in inferring network topologies.notation: ∥x∥ and ∥x∥ 0 denote euclidean norm and the number of nonzero entries, respectively. s p + and s p ++ denote the sets of positive semi-definite and positive definite matrices with the dimensions p × p, respectively. r p + represents the set of all p-dimensional vectors with non-negative real-valued components. denotes the set {1, . . . , p}. in this paper, we tackle the network topology inference problem under laplacian constrained ggms, which recasts the task as estimating the precision matrix (i. in this paper, our focus lies on laplacian constrained ggms, wherein the precision matrix takes the form of a graph laplacian. interestingly, recent studieshave revealed that applying the ℓ 1 -norm to learn laplacian constrained ggms results in an increased number of nonzero entries as the regularization parameter grows, yielding dense graphs rather than sparse ones. we introduce a graph laplacian estimation method incorporating the ℓ 0 -norm constraint, which is the most intuitive and natural approach to control the sparsity of solutions. precision matrices in laplacian constrained ggms take the form of a graph laplacian, which enables the interpretation of the eigenvalues and eigenvectors as spectral frequencies and fourier bases.in this paper, we investigate the network topology inference problem by estimating the precision matrix as a graph laplacian under a sparsity constraint. it is important to note that conventional estimation methods for general ggms, like graphical lasso, typically utilize sparsity-promoting regularization to learn sparse graphs, as the ℓ 0 -constrained formulation does not yield optimal solutions when the sample size is smaller than the dimension (i.• we propose a graph laplacian estimation method that incorporates the ℓ 0 -norm constraint, addressing the shortcomings of the ℓ 1 -norm regularization when estimating laplacian constrained ggms. s p + and s p ++ denote the sets of positive semi-definite and positive definite matrices with the dimensions p × p, respectively.we define a weighted, undirected graph g = (v, e, w ), where v denotes the set of vertices, e represents the set of edges, and w ∈ r p×p + is the weighted adjacency matrix with w ij denoting the graph weight between vertex i and vertex j. following from spectral graph theory, the laplacian matrix of a connected graph with p vertices has a rank of p -1. when the inverse covariance matrix, also called precision matrix, is a graph laplacian, the random vector forms a laplacian constrained ggm.we consider the problem of estimating the precision matrix as a graph laplacian, given n independent and identically distributed observations {y (k) } n k=1 . for instance, for learning structured graphs with p vertices, a connected tree graph has p -1 edges, while a circular graph has p edges. the projection p ωs∩r p + (z) can be computed efficiently by sorting the p entries of p r p + (z) and retaining only the s largest values, while setting the remaining ones to zero. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/599.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/599.txt new file mode 100644 index 0000000000000000000000000000000000000000..ad5a8906bba21b05a5e49c1106b6a09626006e90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/599.txt @@ -0,0 +1 @@ +gaussian process state-space models (gpssms) have gained significant popularity among data-driven state-space models (ssms) , due to their capability of integrating non-parametric bayesian gaussian processes (gps) as function priors within the classical ssm . this integration empowers the model to effectively learn the system dynamcis from noisy measurements with explicit uncertainty calibrations . additionally, gp is able to automatically scale model complexity based on data volume . consequently, gpssms and their variants have demonstrated successful applications in diverse domains, including human motion capture, pedestrian tracking, and navigation . research efforts have also focused on advancing the simultaneous learning and inference capabilities of gpssms .however, in the context of high-dimensional latent state spaces, all the existing gpssms face two primary challenges. first, current gpssm methods often resort to independent gps for modeling multiple outputs of transition functions, aiming for simplicity but overlooking their dependencies. this disregard for dependencies can lead to a model mismatch and the loss of inductive bias among the outputs . this can ultimately impede the model's generalization capacity and cause a decline in inference performance, particularly when latent states are only partially observed . second, employing separate gps to model the state transition for each latent state dimension leads to a quadratic expansion in the number of parameters, coupled with a linear rise in o(n 3 ) computational complexity, as dimensionality increases, see fig. 1. here, n represents the sample count used for computing the gp kernel matrix . this escalating computational burden and parameters proliferation can become prohibitively cumbersome, especially when dealing with high-dimensional latent spaces. consequently, addressing these two challenges becomes imperative to enhance the applicability and scalability of gpssm in practical applications. for the first challenge, existing approaches have explored potential solutions, with some using a linear model of coregionalization (lmc)-based multi-output gp to model this correlation . the transformed gp (tgp) framework has also been introduced, wherein multiple independent gps are transformed by a normalizing flow to somewhat obtain correlated outputs . despite these efforts, the persistent challenge of escalating complexity remains an obstacle across existing works, necessitating research to further enhance the applicability of gpssms in high-dimensional latent state spaces.this paper aims to address the escalating computational complexity and parameter proliferation in the gpssm while introducing a novel form of output dependence. the main contributions are summarized as follows. first, we present an innovative efficient gpssm paradigm that deviates from the standard approach of employing separate gps for each one of the latent dimensions. instead, we adopt the efficient transformed gp (etgp) , capitalizing on multiple normalizing flows to enact transformations on a shared gp across each dimension of the latent state space. this strategic shift allows us to attain streamlined modeling while effectively establishing output dependencies. second, for joint learning and inference in the proposed efficient gpssm, we propose a proficient sparse gp -based variational algorithm that enhances computational efficacy and streamlines parameter scale. third, experimental results, obtained using real and synthetic datasets, corroborate comparable performance of the proposed efficient gpssm to existing gpssms, albeit at substantial reductions in both computational complexity and parameter count.the remainder of this paper is organized as follows. some preliminaries related to gpssm are provided in section 2. section 3 introduces our proposed efficient output-dependent gpssm and the associated learning and inference algorithm. numerical results are provided in section 4. finally, we conclude the paper in section 5. second, employing separate gps to model the state transition for each latent state dimension leads to a quadratic expansion in the number of parameters, coupled with a linear rise in o(n 3 ) computational complexity, as dimensionality increases, see fig. instead, we adopt the efficient transformed gp (etgp), capitalizing on multiple normalizing flowsto enact transformations on a shared gp across each dimension of the latent state space. second, for joint learning and inference in the proposed efficient gpssm, we propose a proficient sparse gp-based variational algorithm that enhances computational efficacy and streamlines parameter scale. typically, a gp is employed to represent a distribution over random functions f (x) : r dx → r, given by:.where µ(x) is a mean function, often set to zero in practice; k θgp (x, x ′ ) is a covariance/kernel function; θgp represents a set of hyperparameters that are tuned for model selection. specifically, given a noise-free training dataset d = {x, f } = {xi, fi} n i=1 , the posterior distribution p(f (x * )|x * , d) at any test input x * ∈ x follows a gaussian distribution, fully characterized by the posterior mean ξ and the posterior variance ξ:.where kx,x represents the covariance matrix evaluated on the training input x, with each entry given by i,j = k(xi, xj); kx * ,x denotes the cross-covariance matrix between the test input x * and the training input x. based on these configurations, the joint distribution of the proposed gpssm, augmented with inducing points, is(10)where p(⃗ u) = n (⃗ u|0, k ⃗ z,⃗ z ), and the distribution of the transition function outputs p(ft|xt-1, ⃗ u) are determined by the shared gp p( ft|xt-1, ⃗ u) and the normalizing flows, where.is the gp posterior distribution with xt-1 being the test input while (⃗ z, ⃗ u) being the training data, see eqs.learning and inference within the gpssm is plagued by the intractability of p(⃗ y) = p(⃗ y, ⃗ x, ⃗ f , ⃗ u)d⃗ xd ⃗ f d⃗ u. variational inference methods involve approximating the intractable posterior distribution p(⃗ x, ⃗ f , ⃗ u|⃗ y) = p(⃗ y,⃗ x, ⃗ f ,⃗ u) p(⃗ y) with a variational distribution q(⃗ x, ⃗ f , ⃗ u), leading to a learning and inference objective function, the evidence lower bound (elbo), denoted as l,. in this paper, we adopt a specific form for the variational distribution q(⃗ x, ⃗ f , ⃗ u) given by q(x0) t t=1 p(xt|ft)p(ft|xt-1, ⃗ u)q(⃗ u), where q(⃗ u) = n (⃗ u|m, s) with m ∈ r m and the covariance matrix s ∈ r m×m representing the free variational parameters.with the proposed variational distribution, q(⃗ x, ⃗ f , ⃗ u), and after. we first use the reparametrization trick to sample the (1-d) gp function value, ft, from q( ft|xt-1) by conditioning on the latent state xt-1, where q( ft| xt-1) = e q(⃗ u) = n ( ft| µ t|t-1 , s t|t-1 ),(15)with.we then push the sampled function value ft into the dx normalizing flows, {g θ d (•)} dx d=1 and get the transformed gp function value ft, as shown in eq. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/6.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/6.txt new file mode 100644 index 0000000000000000000000000000000000000000..2af6f28254c11518442e160039fc292afc24548e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/6.txt @@ -0,0 +1 @@ +e volutionary optimization algorithms (eoas) as well as their applications have grown manifold in last decades. thus, it is necessary to develop effective evaluation methodology that helps in better understanding the performance of the algorithm. eoas are evaluated primarily from the perspective of solution quality and convergence. solution quality is measured on the basis of solutions obtained in different trials of the algorithm. on the other hand, empirical convergence analysis is mostly done on the basis of solutions obtained in different iterations for a single trial of the algorithm. majority of evaluation techniques focus on solution quality of eoas, which include non-parametric approaches , parametric approaches , , statistical tests , bootstrapping , , drift analysis , exploratory landscape analysis (ela) , theoretic analysis , and visual analysis approaches - etc. whereas, convergence analysis in general is done through visual inspection of graphical presentation of solutions obtained in different iterations sequentially - .although many approaches have been developed, parametric and non-parametric approaches are widely used for analyzing solution quality of eoas. most of these approaches incorporate results obtained on benchmark functions or some specific problems. basic statistical measures such as standard deviation, mean, median, maximum, minimum are estimated for comparing performance of eoas. however, comparison of statistics obtained for two algorithms is an indirect approach as the solutions obtained with two algorithms are not compared directly. most importantly, same values of statistics do not imply that the solutions of two algorithms are exactly the same. for instance, two algorithms may have the same mean values, but this certainly does not imply performance of both the algorithms are same as well. this is the reason why along with mean, standard deviation and other statistics are often considered and interpreted together. however, observing multiple statistics to draw conclusion poses added difficulty in the performance comparison process of eoas.in this paper, a direct comparison approach is proposed to analyze performance of eoas, where solutions obtained with two algorithms are compared directly. a direct comparison matrix called prasatul matrix is prepared based on the optimality and comparability level of the solutions obtained with two different algorithms. the prasatul matrix is used to design five different performance measures as well as ranking schemes for eoas. the key features of the proposed approach are as follows:• unlike comparing statistics such as mean and standard deviation, the solutions obtained with two algorithms are directly compared and recorded in the prasatul matrix. • the five measures (section ii-d) which are designed based on prasatul matrix are equally capable of comparing eoas both in the grounds of solution quality and convergence.• unlike comparing scores obtained with two algorithms, here the score itself gives the comparative outcome of solutions obtained with two eoas (section iii-a). interpretation of proposed measures are done in pairs of two eoas.• score-driven ranking schemes (section iii-b) designed based on prasatul matrix are also capable of ranking eoas both in the grounds of solution quality and convergence. • to best of our knowledge, for the very first time this work introduces score-driven comparison as well as ranking for analyzing convergence of eoas.rest of the paper is organized as follows. section ii describes the proposed direct comparison approach covering preliminary definitions, the prasatul matrix, different measures designed and the algorithm for generating prasatul matrix for a pair of two eoas. section iii details about how the newly designed measures can be utilized for direct comparison of multiple algorithms and ranking both in terms of quality and convergence. section iv details about the experimental setup. section v presents the results on several benchmark functions. finally, concluded in section vi. if i th solution p i obtained with algorithm a p is equal to the solution q i of algorithm a q then p i is termed as in comparability level 2. if i th solution p i obtained with algorithm a p is greater than the solution q i of algorithm a q then p i is termed as in comparability level 3. if i th solution p i obtained with algorithm a p lies in between the optima (o) and universe best (u b ) including o and u b then p i is termed as in optimality level 1. if i th solution p i obtained with algorithm a p lies in between the universe best (u b ) and universe mean (u σ ) including u σ then p i is termed as in optimality level 2. if i th solution p i obtained with algorithm a p lies in between the universe best (u σ ) and universe worst (u w ) including u w then p i is termed as in optimality level 3.the prasatul matrix l is prepared by directly comparing the best solutions obtained with the primary algorithm a p and the best solutions obtained with the alternative algorithm a q for n trials.where, high o 1 value is interpreted as best solutions of algorithm a p are better than best solutions of algorithm a q . here, high o 2 and o 3 values are interpreted as average solutions of algorithm a p are better than average solutions of algorithm a q and worst solutions of algorithm a p are better than worst solutions of algorithm a q respectively.where, high c 1 values indicate the solutions of algorithm a p that are better than algorithm a q are the best solutions of a p in terms of optimality. here, high c 2 values indicate that solutions of algorithm a p that are same as algorithm a q are the best solutions of a p in terms of optimality. however, high c 3 values indicate that the solutions of algorithm a p that are worst in comparison to algorithm a q are the best solutions of a p in terms of optimality.where, high k c 1 value indicates that overall solutions are better compared to other algorithm, as comparing algorithm mostly wins irrespective of whether it is best, average or worst.where, high k c 2 value indicates that overall solutions are same as other algorithm, as comparing algorithm mostly ties irrespective of whether it is best, average or worst.where, high k c 3 value indicates that overall solutions are worse compared to other algorithm, as comparing algorithm mostly ties irrespective of whether it is best, average or worst.where, high kt value indicates that overall the algorithm a p are better than that of algorithm a q be it optimality or comparability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/60.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/60.txt new file mode 100644 index 0000000000000000000000000000000000000000..ed2f901586298db544a56a0dbdfde33daf1e2fe8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/60.txt @@ -0,0 +1 @@ +with increasing interest in, and monetisation of, the value of forest ecosystems for mitigation and adaptation to climate change, new pressures are growing for large-scale rapid, accurate and robust monitoring approaches . forests are home to up to 80% of terrestrial biodiversity , are a globally important store and sink of carbon , , and are therefore target ecosystems for enhancing carbon storage and sequestration, for biodiversity loss prevention, and for climate regulation. accordingly, afforestation and forest restoration are central to many international initiatives (e.g. the bonn challenge , afr100 and ecca30 ) and individual countries' national action plans .charting progress towards these goals, as well as effective monitoring of deforestation, degradation, and forest responses to climate change, relies on efficient large-scale, low-cost monitoring to, where possible, automate data collection and processing. in the last decade or so there has been a rapid increase in the availability and uptake of remote sensing technologies (from the ground, air and space) that are capable of collecting data at high enough resolution to identify individual trees. these new data sources have the potential to substantially increase the spatiotemporal scale of information available about the state and functioning of forests, compared to manual ground measurements, but come with challenges of interpretation . nevertheless, use of these sensors has led to an explosion in the size, type and complexity of datasets available, and therefore the need for new methods of analysis drawn from data science to rapidly extract critical, ecologically relevant, information from them. further, the way in which the field has developed has brought particular challenges which make intercomparison of studies, and therefore methodological development, difficult. one particular reason for this is the common approach of using small-scale data for development due to a lack of widely accepted and representative benchmarking datasets. here we present a perspective of what is to be gained by the creation and use of such datasets, and a vision for what properties they need to be of most use to the community.while artificial intelligence (ai) shows impressive promise in tackling some of the big challenges of rapid, ecologically relevant, forest monitoring, including individual tree and species identification , biomass and carbon estimation , forest health and invasive species detection , and disturbance and degradation characterisation , method development has largely occurred in ad-hoc ways on small datasets. data collection approaches in the discipline are not standardised, with differences in sensor choice and collection strategy often according to research group experience, equipment cost and availability, and ecosystem type. differences in data resolution and quality are inherent in the ways in which they are collected in the field. differences in data collected for the same task arise from: different types of sensors and differences within single sensor types; different ecosystems with highly varied levels and types of complexity (including structural, spectral, and diversity), and different approaches to collection and survey methods. such heterogeneity in the structure of data means we can expect different ai methods to perform the same task very differently on different datasets , making easy choice of an approach impossible for a user. this confusion slows the development of robust and standardised approaches needed for the large-scale application of these technologies to solve pressing environmental problems.rapid increases in both data volume and processing capacity mean that the field can and should now switch from small, single-sensor, single-ecosystem method development to multi-sensor multi-ecosystem benchmark data sets upon which algorithms should be tested. the gains to be made from such a switch -through development of robust methods, reduced researcher effort and barriers to entry, and increased transferability of methods to new ecosystems and sensors -are substantial. differences in data collected for the same task arise from: different types of sensors and differences within single sensor types; different ecosystems with highly varied levels and types of complexity (including structural, spectral, and diversity), and different approaches to collection and survey methods.high resolution remote sensing data analysed with ai methods have the potential to increase the information available for a wide range of forest monitoring applications by orders of magnitude, including to automate at large scale knowledge about properties currently understood only at small spatial scale with manual approaches. here, we identify a series of use cases where the impact of widespread uptake of use of artificial intelligence methods with such data will be high, and where methodological development is commonly carried out on small sample sizes, making assessing the value of a method for a different sensor or ecosystem difficult, and so for which use of benchmarking dataset is particularly important:. to require standardised data collection for inclusion in benchmarking would not only significantly reduce the data available, but also increase risk of future redundancies as sensor resolution increases. forest ecosystems vary enormously in structural complexity, so the same data collection techniques can result in data with highly variable amounts of information depending on the location and ecosystem type. for three-dimensional data from sensors like lidar, this is most markedly because of greater occlusion in complex canopies, leading to much less information about structural elements further from the sensor (for ground based, lower density canopy data, and for airborne, lower density ground and trunk data), whereas for two-dimensional imagery, multi-layered forests with high stem density may have less clear discrimination between crowns, making crown and tree detection much harder. there is a mismatch between the geographical location of the highest diversity and highest carbon forests, and dataset coverage for key forest properties, and ground truth data for many use cases relies on diverse and highly specialised data collection that is not always well recognised or rewarded (seefor a discussion of fair rewards for grassroots communities collecting tropical forest data). these methods are of particular value in forest monitoring, where data labelling, particularly for pointbased three dimensional data, is extremely time consuming. flight height, line density or ground scanning density, and survey team expertise); the data processing, including geographic referencing and precision, orthorectification methods applied, and any potential data distortions introduced, for example through image alignment.a goal of the field must be to collate and standardise the use of high quality, well documented data that, for a given use case, is representative of as wide a variety of types and conditions of ecosystems as possible, and which is accompanied by high-quality ground truth information to validate accuracy of methods and quantify uncertainty of outputs. model performance compared against multi-sensor data, real or simulated, can also feed back into informing further fieldwork, including cost-effective data collection for different use cases, and even direct future sensor development. where choice of method depends on the sensor or structure of data, multi-sensor data can inform the creation of multi-pass classifiers, with the first pass classifying the type of data or forest, and the choice of model or classifier for the second task selected according to the best performing algorithm given the data. instead, simulated data can massively increase data size available for training, and, as long as they are presented alongside guidance on representative forest types, are a valuable source of training information (e. for many ecosystems and use cases the collection of more data is less important than the collation and curation of what already exists, particularly when large-scale data may exist but is not yet curated for our benchmarking needs (e.). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/600.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/600.txt new file mode 100644 index 0000000000000000000000000000000000000000..8fd3702dfd9a8db9834af408c4d459965dc442a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/600.txt @@ -0,0 +1 @@ +although machine learning models can achieve high prediction performance in a well-controlled environment, they may not be as accurate as expected when actually applied, or their accuracy may deteriorate over time. reasons for this include the fact that the initial training data does not adequately cover the operational environment, or that the operational environment is unstable and continues to change from moment to moment, i.e., concept drift . in such a situation, it is necessary to update the model from time to time using data that come in one after another to maintain the prediction accuracy of the model .one of the problems in updating a model while operating it is how to obtain samples to be used for updating . this is not so much of a problem when the groundtruth labels can be obtained immediately, as in the case of time-series prediction of stock prices or electricity consumption . however, when human measurement and annotation are required to obtain the groundtruth labels, the cost is often an obstacle.active learning is a technique for selecting useful samples for training to improve model performance with the lowest possible labeling cost. there are two types of active learning: pool-based approaches and streaming approaches . in a pool-based approach, the process of extracting a batch of valuable samples from a pre-prepared pool of samples and updating the model is repeated. this is often accomplished by preparing a utility evaluation function and comparing its value among samples. in a streaming approach, each time a sample arrives, a decision is made on whether or not to label and update the model. streaming active learning is a more difficult problem than pool-based active learning because it requires an evaluation function that can output meaningful absolute values rather than relative values.several existing studies have addressed streaming active learning, especially for classification problems . in the classification problem, the maximum value of the probability of belonging to each class output by the classifier can be used as the utility. the value is bounded by 1 k , 1 where k is the number of classes and is easy to use as an absolute utility. however, similar studies in regression problems have rarely been undertaken. this is because there is no easy-to-use evaluation function like class-wise probability in regression problems, and absolute evaluation is difficult since the output of the model can take arbitrary values. the few existing methods even have drawbacks, such as being only applicable to specific regression models, e.g., fuzzy systems and linear models .in this paper, we propose a streaming active learning method for regression problems using regression via classification (rvc) , which transforms a regression problem into a multi-class classification problem. by using rvc, the target variables are no longer realvalued but discrete classes, and thus arbitrary classifiers can be used to solve regression problems. this approach is simple but effective, and at the same time, it is useful in that it allows various methods that have been proposed for streaming active learning for classification problems to be applied directly to regression problems. we demonstrate the effectiveness of the proposed method by evaluating it on a variety of real-world datasets. streaming active learning is a more difficult problem than pool-based active learning because it requires an evaluation function that can output meaningful absolute values rather than relative values.in this paper, we propose a streaming active learning method for regression problems using regression via classification (rvc), which transforms a regression problem into a multi-class classification problem. this approach is simple but effective, and at the same time, it is useful in that it allows various methods that have been proposed for streaming active learning for classification problems to be applied directly to regression problems. during inference, the belonging class of input x is first estimated and the estimation is then transformed into a regression value as follows: since rvc uses a classifier in (2), the confidence value can be used as a certainty in the same manner in the conventional studies on classification.with this technique, various methods of active learning developed for classification problems based on uncertaintycan be directly applied to regression problems.algorithm 1 shows the streaming active learning framework for regression used in this study, which is the simplified version of that used in the previous study.for the proposed rvc-based utility estimation, three streaming active learning algorithms that were originally proposed for classification tasks are used in this study. since these utility values have different ranges, that is, utilities from qbc can take any value greater than or equal to zero while those from rvc are bounded to , we evaluated the relationship between absolute error and rank of utility instead of absolute utility value. note that even if qbc and rvc are comparably good utility estimators, the rvc-based one is advantageous in that its utilities allow direct application of the variety of active learning methods proposed for classification problems.25, 0. in conclusion, it can be said that methods developed for classification problems can be directly applied to regression problems by using rvc, and that if a method is superior for classification problems, it will also be superior for regression problems. rvc has the potential to easily extend the method proposed for classification problems to regression problems in scenarios other than streaming active learning, e.in this paper, we proposed a utility estimation method for regression problems and demonstrated its effectiveness in streaming active learning. in the proposed method, we used the rvc framework to convert regression problems into classification problems and thus many conventional methods of streaming active learning for classifications can be directly applied. experimental results on four real datasets showed the proposed method improved streaming active learning for regression problems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/601.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/601.txt new file mode 100644 index 0000000000000000000000000000000000000000..35dff29a1dd408e0e632ad789a4b4d24755779d2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/601.txt @@ -0,0 +1 @@ +off-policy evaluators are a crucial component in the development of many real-world recommender systems. they allow us to estimate the performance of a new target recommendation policy based on interaction data logged from a different logging policy (for instance, the current production recommender), thereby reducing the need to run slow and costly a/b tests.many counterfactual off-policy estimators are based on the inverse propensity scoring (ips) principle .given a stochastic logging policy and some mild assumptions, ips-based estimators are unbiased, but often suffer from high variance. this is true even on industrial-scale data sizes; in particular, if the logging policy is close to being deterministic. intuitively speaking, most ips estimators contain propensity ratio weights of the form 𝑤 = 𝑝 target /𝑝 logging , where 𝑝 target is a target propensity (e.g., the probability that the target policy recommends a particular action to the user) and 𝑝 logging is the logging propensity (e.g., the probability that the logging policy recommended that same action to the user). these ratios can become arbitrarily large for small logging propensities, which then leads to high variance in the overall estimate.the literature has proposed various variance-reduction techniques for ips-style estimators, including weight clipping , self-normalization , doubly-robust estimators , as well as generalizations of those ideas . in this article we revisit weight clipping, which is still used extensively due to its simplicity (it does not require a reward model) and its generality (it is readily applicable to ips-style estimators used in more complex real-world applications, such as ranking or slate recommendation , where self-normalized or doubly-robust estimators are not available or difficult to implement).the basic idea of weight clipping is to simply avoid large propensity weight ratios by (hard-)clipping the ratios by a constant upper bound 𝑈 , which is usually treated as a hyper-parameter for the estimation procedure. just like other variance-reduction techniques, the clipping procedure effectively reduces the variance of the ips estimator at the cost of introducing a bias. unlike other techniques, however, the bias introduced by clipping is always pessimistic. in other words, on average, the estimator underestimates the true expected reward (under the technical assumption that rewards are always non-negative), as illustrated in figure 1a.in this work, we exploit this property of the clipping bias, so as to obtain more accurate estimates. specifically, we clip the propensity ratios from both sides rather than just from above, thereby potentially correcting pessimistic 1a: mean (solid line) and corresponding standard error bands of reward estimates across 100 repetitions as a function of clipping constants 𝑈 (for both cips and dcips) and 𝐿 = 𝑈 (only for dcips). the dashed red line shows the true reward of the target policy, i.e., the estimation target. the dotted grey line shows the average logging reward observed in the data set. figure 1b: mean squared error (mse) between reward estimate and true target reward across 100 repetitions. the dashed lines show the variance components, the dotted lines show the squared-bias components for both estimators.underestimates with optimistic overestimates. experiments with synthetic data show that this approach leads to a reduction in mse. they allow us to estimate the performance of a new target recommendation policy based on interaction data logged from a different logging policy (for instance, the current production recommender), thereby reducing the need to run slow and costly a/b tests. intuitively speaking, most ips estimators contain propensity ratio weights of the form 𝑤 = 𝑝 target /𝑝 logging , where 𝑝 target is a target propensity (e., the probability that the target policy recommends a particular action to the user) and 𝑝 logging is the logging propensity (e. in this article we revisit weight clipping, which is still used extensively due to its simplicity (it does not require a reward model) and its generality (it is readily applicable to ips-style estimators used in more complex real-world applications, such as rankingor slate recommendation, where self-normalized or doubly-robust estimators are not available or difficult to implement).the basic idea of weight clipping is to simply avoid large propensity weight ratios by (hard-)clipping the ratios by a constant upper bound 𝑈 , which is usually treated as a hyper-parameter for the estimation procedure. specifically, we clip the propensity ratios from both sides rather than just from above, thereby potentially correcting pessimistic1a: mean (solid line) and corresponding standard error bands of reward estimates across 100 repetitions as a function of clipping constants 𝑈 (for both cips and dcips) and 𝐿 = 𝑈 (only for dcips).if the clipping constant 𝑈 is higher than the highest attainable propensity weight ratio 𝑤 (𝑥, 𝑦) across all requests, then the clipped ips estimator essentially becomes the standard, unbiased ips estimator. as soon as the clipping constant becomes "active" in the sense that it starts clipping propensity weight ratios, then the bias is always strictly negative assuming non-negative rewards (ignoring the trivial case in which all clipped requests have zero expected reward).this allows the intuitive interpretation of dcips as an estimator that regularizes towards the mean of the logging policy reward and the prior variance is determined by both clipping constants 𝑈 and 𝐿. in other words, we can try to tune the lower clipping constant 𝐿 so as to compensate the bias introduced by the upper clipping constant 𝑈 .based on d, we estimate the expected reward of a new target policy using clipped ips evaluators with different clipping constants. the lower clipping of the dcips compensates some of the large bias suffered by the cips evaluator (for a given point on the x-axis, both estimators use the same upper clipping constant 𝑈 and thus the difference in biases reflects the bias compensation from using lower clipping as well). we plan to study algorithms to select clipping constants for dcips in a data-driven wayand investigate theoretically when the bias of double clipping is less than standard clipping. this lead to a situation where both logging and target policy had full support (all actions had a positive probability of being selected) but the logging policy "favored" actions in order 𝑎 8 , 𝑎 7 , 𝑎 6 , . 300 collected by the logging policy was then used to estimate the target policy reward using different ips estimators. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/602.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/602.txt new file mode 100644 index 0000000000000000000000000000000000000000..16eb04e31fc6e69dd35f2aad88597d7155ef6268 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/602.txt @@ -0,0 +1 @@ +basic advantages of lfads-torch over previous implementations are eager execution, improved modularity, and less engineering boilerplate code. the advanced functionality spans three main categories: modules ( §2.1), configuration ( §2.2), and large-scale runs ( §2.3).modern technologies for monitoring neural systems include a wide range of modalities, many of which record noisy, high-dimensional observations. these data are often processed and denoised to yield representations of activity that are useful for studying neural function, or for decoding or classifying behaviors. neural population models (npms) accomplish this denoising by leveraging spatiotemporal structure in neural recordings, yielding denoised activity patterns on a single-trial basis and millisecond timescale. npms based on artificial neural networks offer significant performance and modularity advantages over previous approaches, as architectures and loss functions can easily be modified to support new modeling goals and data modalities. latent factor analysis via dynamical systems (lfads)is one such model that has matured over the the last several years to support automated hyperparameter tuning, electromyography (emg) and calcium imaging modalities, and stabilization of long-term recordings. finally, model configuration was handled using a command-line interface, so alternative architectures had to be implemented using control flow within the code itself. pytorch introduced dynamic computation graphs and fast eager execution, which allow an intuitive model development and debugging workflow. together, these technologies provide an opportunity to substantially lower the barriers to modeling neural data with lfads and further enhance its capabilities.in this work we introduce lfads-torch, an implementation of lfads and related models that uses modern python libraries to achieve intuitive and user-friendly development and debugging.another noteworthy change is the separation of the data used as input (encod data) from the data being reconstructed (recon data).in lfads-torch, a flexible configuration process gives users significant control over model and architecture configuration without needing to edit source code.while the lfads class and other training objects can be instantiated directly like any other python class, we provide flexible machinery for instantiating and training many models in parallel with different configurations for hyperparameter tuning.for users who want to perform large-scale hyperparameter tuning using autolfadswith minimal setup and infrastructure costs, we made lfads-torch available on neuro-caasfor a convenient drag-and-drop modeling workflow. we then trained paired models with identical hyperparameters and initializations on several nlb datasets (5 ms; mc maze, mc rtt, area2 bump, dmfc rsg; figure1) and found that final losses were nearly identical across the hyperparameter space.lfads-torch is an implementation of lfads that leverages modern deep learning frameworks, allowing easier application, robust hyperparameter tuning, and experimentation with new architectures and training methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/603.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/603.txt new file mode 100644 index 0000000000000000000000000000000000000000..c80a248ff7e4444f02c9128bcdfd0cc25095f1bd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/603.txt @@ -0,0 +1 @@ +in fields regarding communication networks, graph generative models have a wide variety of applications such as network synthesis for simulations, emulation of information spreading on networks, link prediction on social networks, etc. generative models for graphs can be categorized into two types: stochastic and learning-based models. stochastic models focus on reproducing only a single-aspect feature of graphs (e.g., scale-free feature). on the other hand, learningbased models aim to learn features directly from a graph dataset and reproduce graphs that have similar features to the graph dataset, thus reproducing features in every single aspect. we have proposed a generative model for graphs, graphtune , that allows continuous tuning of specific features while maintaining the reproducibility of the other this work was partly supported by jsps kakenhi jp23h03379. graph features. although graphtune has succeeded in making features of a generated graph change depending on userspecified values, there still remains an issue on the tuning accuracy of features.in this paper, we propose to extend graphtune by adding a feature estimator that feeds back information on features of graphs reconstructed by graphtune. these two models, the long short-term memory (lstm)-based feature estimator and graphtune, are trained independently of each other by an alternate training algorithm to avoid target leakage in feature. graphtune with the feature estimator enables tuning specific features more accurately than graphtune while keeping the reproducibility of graphtune in every single feature. on the other hand, learningbased models aim to learn features directly from a graph dataset and reproduce graphs that have similar features to the graph dataset, thus reproducing features in every single aspect. we have proposed a generative model for graphs, graphtune, that allows continuous tuning of specific features while maintaining the reproducibility of the other this work was partly supported by jsps kakenhi jp23h03379. although graphtune has succeeded in making features of a generated graph change depending on userspecified values, there still remains an issue on the tuning accuracy of features.in this paper, we propose to extend graphtune by adding a feature estimator that feeds back information on features of graphs reconstructed by graphtune. these two models, the long short-term memory (lstm)-based feature estimator and graphtune, are trained independently of each other by an alternate training algorithm to avoid target leakage in feature. graphtune with the feature estimator enables tuning specific features more accurately than graphtune while keeping the reproducibility of graphtune in every single feature.we propose an accurate generative model for graphs that extends graphtune by adding an lstm-based model called a feature estimator (see fig. the feature estimator estimates value of features of a generated graph and adds an error between the estimated values and elements of a condition vector (i. in other words, the feature estimator feeds back information on features in generated graphs to neural networks in the graphtune part, thereby allowing more accurate tuning of features.graphtune part: graphtune part in our model learns to reconstruct a sequence from input sequence and a condition vector in the same manner as the original graphtune.feature estimator: the feature estimator learns to estimate values of features of an output graph from a sequence reconstructed by graphtune part. the error between the estimator and the values of the features of the input graph is added to the loss of the graphtune part to provide direct feedback on the accuracy of feature reproduction. the value of the feature of input graphs) that is input to graphtune part may leak to the feature estimator side, and appropriate training cannot be expected. therefore, an alternate training algorithm is applied to train the graphtune part and the feature estimator alternately. the parameters of the graphtune part in the proposed model and graphtune as a comparison method were set according to the paper of graphtune. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/604.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/604.txt new file mode 100644 index 0000000000000000000000000000000000000000..fc3a076736b5c00888055bb8710686d9155aa7fa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/604.txt @@ -0,0 +1 @@ +the universal approximation property (uap) refers to the capability of neural networks to approximate a wide range of functions. as this property forms the foundation for the efficacy of neural networks, it has garnered significant interest within the research community.initial research focused mainly on two-layered multilayer perceptrons (mlps). cybenko (1989) demonstrated that two-layered mlps with sigmoidal activation functions possess the uap for approximating continuous functions. later, leshno et al. (1993) expanded the scope of activation functions to more general ones. in addition to two-layered mlps, extensive investigation has been conducted into the uap of deep, narrow mlps. these mlps have a constrained width and an arbitrary number of layers. given the common use of mlps with relatively modest widths and more than two layers in practical scenarios, the uap of deep, narrow mlps has attracted significant interest.in this regard, a series of studies have been undertaken to determine the minimum width, which is the necessary and sufficient width for the uap. the minimum width depends on factors such as the input dimension d x , the output dimension d y , the activation function, and the type of norm employed. for instance, lu et al. (2017) demonstrated that deep, narrow mlps with relu activation functions possess the uap, leading to further research that narrowed down the minimum width range. hanin and sellke (2017) extended the study to encompass arbitrary output dimensions d y . johnson (2018) showed that a width of d x is insufficient to achieve the uap in continuous function spaces, while kidger and lyons (2020) proved that a dimension of d x + d y + 2 is sufficient. on the other hand, park et al. (2020) presented the optimal minimum width for deep, narrow mlps with relu activation functions in l p space. furthermore, cai (2022) explored the lower bound of the minimum width for arbitrary activation functions.in this paper, we concentrate on the universal approximation of continuous functions under the uniform norm. the previous results concerning uniform approximation are organized in table 1. so far, research on the minimum width for approximations under the uniform norm using continuous activation functions has suggested that the minimum width lies between max(d x + 1, d y ) and d x + d y . recently, li et al. (2023) claimed that the upper bound could be reduced to max(d x + 1, d y ) + 1 dx+1=dy . on the other hand, kim et al. (2023) proved that the lower bound equals or exceeds d y +1 if d y is less than or equal to 2d x . this leads to the contradiction d y + 1 ≤ w min ≤ d y for d x + 2 ≤ d y ≤ 2d x . therefore, there should be a more rigorous proof of the minimum width for the uniform approximation of continuous functions.in this context, we provide rigorous upper and lower bounds for the minimum width required for deep, narrow mlps to possess the uap. it is substantiated by proving that the minimum width for deep, narrow mlps with leaky-relu activation function is equal to a geometrical function denoted as w(d x , d y ). w(d x , d y ) is the required dimension of diffeomorphisms for approximating arbitrary continuous functions with d x -dimensional input and d ydimensional output. this is built upon the concept of the uap of invertible neural networks. specifically, we employ the result of teshima et al. (2020), which demonstrated that approximating arbitrary c 2 -diffeomorphisms is equivalent to approximating arbitrary single-coordinate transformations. we prove that deep, narrow mlps are capable of approximating single-coordinate transformations, thereby confirming their capability to approximate c 2 -diffeomorphisms. using the above statement, we provide some upper and lower bounds. by leveraging classical results from topological geometry, we establish that any continuous function can be approximated by mlps with width max(2d x + 1, d y ). moreover, we provide the non-trivial lower bound 4 for the case of input and output dimensions two, which provides a necessity of the framework.our contributions are as follows: • we suggest the purely topological indicator w(d x , d y ), which is equal to the optimal minimum width for the uap of deep, narrow mlp with leaky-relu activation function.• building on the above results, we prove that deep, narrow mlp with width max(2d x + 1, d y ) + α(σ) can approximate any continuous function in c(r dx , r dy ) on a compact domain, where 0 ≤ α(σ) ≤ 2 is the constant depending on the activation function.• we prove that width 4 is the optimal minimum width that deep, narrow mlp to approximate arbitrary continuous function from a compact set k ⊂ r 2 to r 2 .hanin and sellke (2017)extended the study to encompass arbitrary output dimensions d y . so far, research on the minimum width for approximations under the uniform norm using continuous activation functions has suggested that the minimum width lies between max(d x + 1, d y ) and d x + d y . it is substantiated by proving that the minimum width for deep, narrow mlps with leaky-relu activation function is equal to a geometrical function denoted as w(d x , d y ). w(d x , d y ) is the required dimension of diffeomorphisms for approximating arbitrary continuous functions with d x -dimensional input and d ydimensional output.our contributions are as follows: • we suggest the purely topological indicator w(d x , d y ), which is equal to the optimal minimum width for the uap of deep, narrow mlp with leaky-relu activation function.• building on the above results, we prove that deep, narrow mlp with width max(2d x + 1, d y ) + α(σ) can approximate any continuous function in c(r dx , r dy ) on a compact domain, where 0 ≤ α(σ) ≤ 2 is the constant depending on the activation function.• we prove that width 4 is the optimal minimum width that deep, narrow mlp to approximate arbitrary continuous function from a compact set k ⊂ r 2 to r 2 .• for a d-dimensional vector x ∈ r d , x i will denote the i-th component of x; that is, x = (x 1 , x 2 , . for a function f ∈ c(x, y ) and a set x ′ ⊂ x, f | x ′ denotes a restriction of the function to the domain x ′ . the lemma directly implies the subsequent corollary: deep, narrow mlps with leaky-relu activation function can approximate a deep, narrow mlp with an increasing activation function and the same width. for a multidimensional function from r d to r increasing with a coordinate x d , we can freely change the value when x d is large, while the value remains unaffected when x d is small. , x d-1 , g 0 (x)) such that g 0 (x 1:d-1 , 0) = u 0 (x 1:d-1 ). . let x be a compact topological space. as the image g(γ(s 1 )) is compact, the image in s 1 × r can be embedded in the annuls {(x 1 , x 2 ) ∈ r|1 -ϵ ≤ |x 1 | + |x 2 | ≤ 1 + ϵ}. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/605.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/605.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c9d647f615078367bc58409000ce52088d3581e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/605.txt @@ -0,0 +1 @@ +stochastic multi-armed bandits (lattimore and szepesvári, 2020) are a sequential decisionmaking framework where, during each interaction round, the learner selects an arm and observes a sample drawn from its reward distribution. contrary to regret minimization problems, where the agent aims at maximizing the cumulative reward, in best-arm identification (bai) scenarios (even-dar et al., 2002), the agent's primary focus lies in computing the arm with the highest expected reward (i.e., the optimal arm) as accurately and efficiently as possible. more specifically, in the fixed-confidence setting, given a maximal risk parameter δ, the agent's primary focus is on identifying, with probability at least 1 -δ, the optimal arm with a minimum number of samples. nevertheless, the sequential interaction protocol of classical bai settings, in which the agent has complete control of the arm being pulled at each round (i.e., at each step, the agent chooses which arm to query), fails to adequately represent various decision-making problems that are of importance. in fact, in some relevant scenarios, the agent possesses only partial or no control over the arms being played. consider, indeed, the following examples.• off-policy learning. off-policy learning is a crucial aspect of decision-making theory that has gathered significant attention, especially within the reinforcement learning (rl) community (sutton and barto, 2018). here, the agent continuously observes, at each round, actions sampled from a fixed behavioral policy, together with the corresponding rewards. the goal, here, consequently, lies in exploiting these off-policy interactions to identify the best arm with high probability.• active off-policy learning. this scenario generalizes the off-policy setting previously presented. in this case, multiple behavioral policies are available to the agent. the learner can decide which behavioral policy to query to quickly identify the optimal arm. in practice, these behavioral policies can be, for instance, those of experts with the skill necessary to perform a subset of actions within the arm set. another relevant example might arise in scenarios with human feedback (li et al., 2019), where multiple humans can perform actions on the agent's behalf according to some private and personal policy.as we can see, these scenarios cannot be properly modeled with the usual bandit interaction protocol as the agent has limited or no control on the arms being pulled during each interaction round. for this reason, in this work, we study a strict generalization of the classical bai framework that circumvents the limits of complete controllability that is typical of bandit frameworks. to this end, we introduce the best-arm identification problem under mediators' feedback, where the learner has access to a set of mediators, each of which will query arms on the agent's behalf according to some stochastic, possibly unknown and fixed behavioral policy. the mediator will then communicate back to the agent which action it has played, together with the observed reward realization. in this setting, the agent's goal lies in sequentially choosing which mediator to query to identify with high probability the optimal arm while minimizing the sample complexity. as one can verify, such formalism decouples the arms' pulls from the agent's choices, thus allowing to properly model all the scenarios depicted above. given a bandit model ν with k arms, the learner cannot directly sample rewards from each arm ν a , but, instead it can query a set of e mediators, each of which is described by a possibly unknown and fixed behavioral policy π e ∈ ∆ k . for brevity, we adopt the symbol π as a shortcut for the set of mediators' policies (π e ) e e=1 .assumption 1 for any a ∈ there exists e ∈ such that π e (a) > 0. given ω ∈ σ e , we define π(ω) ∈ σ k , where πa (ω) = e e=1 ω e π e (a) denotes the probability of playing an arm a when sampling mediators according to ω.due to lemma 13, the sampling strategy, the assumption on the mediators's policies, and the law of large numbers we know that e is of probability 1. let p min (a) = min e π e (a), and define the the event j t as.lemma 16 (lemma 35 in degenne and koolen (2019)) there exists a constant t ǫ such that for t ≥ t ǫ it holds that on e t , c-tracking verifies:.lemma 17 there exists a constant t ǫ such that for t ≥ t ǫ it holds that on e t ∩ e ′ t (3/4), c-tracking verifies:.lemma 18 suppose there exists t 0 ∈ n such that, for all t ≥ t 0 , e t ∩e ′ t ⊂ τ δ ≤ t .proof using lemma 17, for t ≥ t ǫ , on the event e t ∩ e ′ t it holds that for every t ≥ √ t :.therefore, the condition of lemma 19 are satisfied, and we can use t 0 defined as in lemma 19 to apply lemma 18. the following lemma, combined with the proof of lemma 14, directly implies that p µ,π (e c t ) ≤ bt exp(-ct 1/8 ) holds. at this point, let t be such that h(t ) ≥ e 2 , then due to lemma 10, n e (t) ≥ √ t -e for every mediators' e.the new definition of e t plays a role in the equivalent of lemma 16 that needs to be derived for the unknown policy setting (for which we report the proof below for completeness).lemma 21 there exists a constant t ǫ such that for t ≥ t ǫ it holds that on e t , c-tracking with unknown mediators' policies verifies:. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/606.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/606.txt new file mode 100644 index 0000000000000000000000000000000000000000..633d7d03e715d898d8681fe38f9bddf6025f516b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/606.txt @@ -0,0 +1 @@ +auxiliary classifier generative adversarial network (acgan) is a specific type of generative adversarial network (gan) that is used when the data consists of multiple classes. in addition to classification, gans can be used to generate new "deep fake" data instances that resemble the training data.a gan has two neural networks, a generator and a discriminator, that compete in an adversarial zero-sum game. the generator produces new pieces of data that are as close to the training data as possible. the discriminator attempts to determine whether the input it receives-some of which comes from the generator and some of which comes from the actual training data-is generated or authentic. the discriminator and generator weights are updated in a way that incentivizes the generator to produce "fake" data that is similar to the training data, and incentivizes the discriminator to accurately diagnose if a sample is fake or real .an acgan works similarly, except that the discriminator also returns the class it thinks the data belongs to. the acgan incentivizes the generator to produce believable fakes that conform well to a specific class, while the discriminator is incentivized to accurately diagnose fake samples and classify the data. the authors ofembed malware in a learning model by carefully selecting weights that have minimal effect on model performance, and then overwrite these weights with the malware sample." among other contributions, this paper includes experiments consisting of modifying the least significant bits of model weights, and they propose a plausible trigger mechanisms for malware that is embedded in a machine learning model.to determine the overall capacity of a model, we find the number of bits n that must be overwritten for a 1% drop in accuracy, as compared to the original trained model, which has no bits of its weights overwritten.there are 100 weights in the output layer, and 34,048 weights in the hidden layer, which makes the total number of weights 34,148 in this particular mlp model. our cnn model has 5130 weights in the output layer and 1,484,544 weights in the internal layers, for a total of 1,489,674 weights. our inceptionv3 model has 10,240 weights in the output layer, and 2,097,152 weights in the dense layer, for a total of 2,107,392 trained weights. for the pre-trained weights, we observe a drop of about 1% in accuracy at 15 bits, followed by a steep drop at 16 bits, and hence we consider 14 bits as the perweight capacity with respect to the pre-trained weights. there are 21,802,784 weights in the pre-trained inceptionv3 layer, so even with its lower per-weight capacity of 14 bits, the total steganographic capacity of the pre-trained weights is large, at 38.in this particular xception model, the hidden layers have 29,046 weights, and the output layer contains 5130 weights, for a total of 34,176 trained weights.8, we found that the per-weight capacity for the pre-trained layers of the inceptionv3 model was just 14 bits, as compared to 25 bits for its trained weights. in spite of this low per-weight capacity, the number of pre-trained weights in inceptionv3 is large, and hence the steganographic capacity is large-if we consider all weights, the capacity is 44.all of the trained learning models underwent a similar testing procedure: we first determined the accuracy of a model on the test set, then we embedded information in the n low-order bits of the weights, for n = 1, 2, . for generic deep learning models, we experimented with the output layer weights, the hidden layer weights, and all of the weights, while for pre-trained models, we considered the trained weights. these results were also reasonably consistent across the various layers of the models, with the only notable exception being the pre-trained weights of the inceptionv3 model, which had a lower per-weight steganographic capacity. our results indicate that standard 32-bit weights do not yield a significant improvement in accuracy over what could be achieved with, say, 16-bit weights, and for some models, 8-bit weights would be more than sufficient. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/607.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/607.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d6b0d1b22d8f4e9da11d0eefcf0bd778617d4e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/607.txt @@ -0,0 +1 @@ +understanding how genes are causally related and mapping those interactions is a crucial early step in drug discovery. representing the causal underlying model of a biological system at the cell level as a directed graph can advance our understanding of disease-relevant mechanisms and thus help identify potential targets for the development of new medicines (nelson et al., 2015;yu et al., 2004;chai et al., 2014;akers & murali, 2021;hu et al., 2020). recent technological advancement in single-cell transcriptomics enables directly targetting individual genes and repressing their expression (dixit et al., 2016;datlinger et al., 2017;2021). given the perturbational nature of this data, causal discovery methods that leverage interventional data may be an attractive candidate approach to infer gene interaction networks. to test this hypothesis and facilitate the progress towards that goal, chevalley et al. (2022) introduced causalbench, a large-scale benchmark suite for evaluating network inference methods on single-cell perturbation gene expression data. causalbench operates on two recent public crispr perturbation datasets (replogle et al., 2022) of unprece-dented scale and introduces novel evaluation metrics that are biologically relevant. however, early results from evaluating existing state of the art network inference methods with this benchmark highlighted that the performance of the tested methods did not scale with additional perturbation data provided. more surprisingly, interventional methods were observed not to outperform methods that did not utilise perturbation data, as they seem to only marginally leverage the additional interventional signal. these observations suggest a large gap for interventional causal discovery methods between their reported performance on synthetic and real-world data. to remedy this and to advance the state of the art in gene interaction network inference on single-cell perturbation data, we organised a machine learning community challenge named the causalbench challenge (cbc) https://www.gsk.ai/causalbench-challenge/. the aim behind the challenge was to inspire the machine learning community to work on this critical task and to advance the state of the art.in this report, we present the design of the challenge in terms of the goals, logistics, metrics and evaluation procedures. we also describe the computational platform used to evaluate the submitted methods. we then summarize the idea behind some of the best-performing and most insightful submissions and conduct an analysis of the performance of them to establish a new state of the art for the task of gene network inference from single-cell perturbation data at the time of the causalbench challenge (cbc2023)., 2004;chai et al.proposed methods significantly improve utilization of the interventional data the submitted methods show significant utilization of the interventional data as can be seen in fig. this indicates that they leverage the interventional data, which is a major improvement compared to the previous state of the art, where methods showed no benefit from the additional interventional data.betterboost -inference of gene regulatory networks with perturbation data(nazaret & hong, 2023)betterboost builds upon grnboost to harness the power of interventional data obtained from perturb-seq experiments.in grnboost, each directed interaction from a gene i to a gene j is assigned a score g i,j , indicating the predictive capability of gene i for the target gene j. but with additional labeled interventional data, one can attempt to identify the true causal parents of a target gene j by looking at the effects of interventions on the candidate parents for i. in particular, for a true causal parent i, we expect that when i is knocked down, there will be a statistically significant shift in the distribution of observed unique molecular identifiers (umis) of gene j between observational and interventional data.to formulate the new score utilized by betterboost in ranking the impact of gene i on gene j, we compute the predictive score g i,j obtained from grnboost and the benjamini-hochberg corrected p-value p i,j from the ks test, which measures the impact of knocking down gene i on gene j.05, the ranking primarily favors edges with small p-values (derived from combined interventional and observational data), followed by edges with the highest grnboost scores g i,j (obtained solely from observational data).we calculated the pearson correlations for all directed gene pairs ⟨g i , g j ⟩ which indicated g i affect g j . g i and g j were genes with expression measurements from the columns of the expression matrix e and i ̸ = j. let the i, j be the column indexes of g i and g j , i ′ , j ′ be the row indexes for all records intervened by g i and g j , and n be the row indexes of all observational records, we first retrieved the intervened expression data: e i ′ ,i and e i ′ ,j , and concatenated the observational data sampled from e n,i and e n,j with the same lengths as the interventional data. when the interventional data was unavailable, the two vectors for correlation were only the observational data e n,i and e n,j . we first row-wise normalized the expression matrix with the z-score normalization, and for the gene pair ⟨g i , g j ⟩, we extracted four features: e n,i , e n,j , the average observational expression of g i and g j , and e i ′ ,i , e i ′ ,j , the average intervened expression by g i . to also test that the methods demonstrated an improvement in performance given more interventional data, we computed the difference in mean wasserstein distance between the 25% and 100% ratios of interventional data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/608.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/608.txt new file mode 100644 index 0000000000000000000000000000000000000000..8386acef345c0744044e87d7921b1fc2d80a486e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/608.txt @@ -0,0 +1 @@ +the increasing prevalence of edge devices in the era of the internet-of-things (iot) and cyber-physical systems has resulted in a significant upsurge in available data. these devices, functioning within a federated network, present unique challenges. in particular, traditional machine learning methodologies often require a central server for data processing, raising privacy concerns . in response to these concerns, federated learning (fl) has emerged as an innovative solution - for edge devices to collectively train a global model using their locally stored data, eliminating the need for direct data sharing , . however, federated settings are frequently besieged by outlier data, often exhibiting heavy-tailed distributions , . these outliers can considerably distort the accuracy of learning outcomes. nevertheless, developing robust and computationally efficient solutions for managing outlier data in fl remains a critical research question, which this paper aims to address in the context of localization.localization is the process of estimating the location of an object or an event within a given environment. this problem has given rise to a broad range of applications in vehicle localization , aviation , healthcare , environmental , and industrial fields . based on the type of measurements this work was supported by the research council of norway. used to estimate position, the localization problem has different formulations which include time-of-arrival (toa) , received-signal-strength (rss) , time-difference-of-arrival (tdoa) , angle-of-arrival (aoa) , and frequencydifference-of-arrival (fdoa) . in iot applications, toa and rss are particularly effective as they provide relatively simple and cost-effective ways to estimate distances using the existing communication infrastructure. therefore, in this paper, we focus on distance-based localization that inherently encompasses toa and rss techniques, as they can be interpreted as distance measurements . this approach finds relevance across a spectrum of practical scenarios and continues to be an area of research interest.conventionally, various methods have been proposed for localization problems, including the parallel projection method , projection-onto-convex-sets method , nearest local minimum , boundary-of-convex-sets , recursive weighted least-squares algorithms , augmented lagrangian-based method for localization , and iterative re-weighted least-squares . however, these methods have limitations, such as being centralized or sequentially updating estimates, which hinders parallelization and flexibility. more recently, a parallel distributed alternating projection algorithm (dapa) has been proposed for the distributed case, formulating the localization problem as a ring intersection problem . despite the advancements of dapa, challenges remain, particularly in handling three-dimensional localization problems and targets lying outside the sensor's convex hull . to address these limitations, an alternative approach called el-admm has emerged. el-admm directly solves the non-convex and non-smooth event localization problem using the alternating direction method of multipliers . by bypassing the need for high-computation convex relaxation techniques, el-admm shows promise in handling threedimensional environments . however, el-admm still faces challenges in outlier handling and lacks comprehensive convergence proof. given these constraints, there is a compelling research imperative to develop a robust, distributed algorithm capable of efficiently handling three-dimensional localization problems with outliers.in this paper, we address a challenging class of robust localization problems that require tackling non-convex and nonsmooth optimization within a federated setting. the decentralization, energy constraints, and the presence of outliers further exacerbate the difficulty of accurately estimating locations in such settings. to overcome these challenges, we propose a novel distributed sub-gradient-based algorithm specifically designed to address this problem. our contributions can be summarized as follows:• efficient optimization algorithm: we introduce a novel optimization algorithm that operates within a single loop framework. this algorithm directly solves the localization problem in its original form, utilizing simple updating steps. the use of this approach leads to increased accuracy and eliminates the need for iterative approximation processes. • theoretical analysis: we provide comprehensive theoretical insights into the convergence behavior of our algorithm. through rigorous mathematical analysis, we elucidate the conditions under which our method is guaranteed to converge, thereby offering deeper insights into its reliability and robustness. • empirical validation: we substantiate our theoretical results through extensive numerical simulations. notably, our algorithm exhibits exceptional resilience in the presence of heavy-tailed noise and outliers. furthermore, a comparative study with the state-of-the-art el-admm algorithm highlights the superior accuracy and comparable fast convergence rate of our approach. mathematical notations: scalars are denoted by lowercase letters, column vectors by bold lowercase letters, and matrices by bold uppercase letters. the transpose of a matrix is signified by (•) t . the jth column of a matrix a is represented as aj. the element in the ith row and jth column of a is represented as a ij . the sub-gradient of a function f (•) at a given point u is signified by ∂f (u). nevertheless, developing robust and computationally efficient solutions for managing outlier data in fl remains a critical research question, which this paper aims to address in the context of localization. used to estimate position, the localization problem has different formulations which include time-of-arrival (toa), received-signal-strength (rss), time-difference-of-arrival (tdoa), angle-of-arrival (aoa), and frequencydifference-of-arrival (fdoa).conventionally, various methods have been proposed for localization problems, including the parallel projection method, projection-onto-convex-sets method, nearest local minimum, boundary-of-convex-sets, recursive weighted least-squares algorithms, augmented lagrangian-based method for localization, and iterative re-weighted least-squares. more recently, a parallel distributed alternating projection algorithm (dapa) has been proposed for the distributed case, formulating the localization problem as a ring intersection problem. despite the advancements of dapa, challenges remain, particularly in handling three-dimensional localization problems and targets lying outside the sensor's convex hull. el-admm directly solves the non-convex and non-smooth event localization problem using the alternating direction method of multipliers. given these constraints, there is a compelling research imperative to develop a robust, distributed algorithm capable of efficiently handling three-dimensional localization problems with outliers.in this paper, we address a challenging class of robust localization problems that require tackling non-convex and nonsmooth optimization within a federated setting. this algorithm directly solves the localization problem in its original form, utilizing simple updating steps.the maximum-likelihood estimate for the source location x with additive independent and identically distributed gaussian noise affecting range measurements can be determined as the solution of the optimization problem:.the proposed algorithm for localization in a distributed setting called distributed sub-gradient method for robust localization (dsrl), updates each node's estimate of the actual parameter x * through a diffusion step and a sub-gradient update step.xa i = 0 (6) algorithm 1 summarizes the proposed method for solving distributed robust localization. the convergence of our proposed dsrl algorithm is substantiated by verifying that all the prerequisites set forth in , inclusive of the regularity conditions of our robust localization function, are met.in this section, we assess the performance of the proposed dsrl algorithm via simulations and draw comparisons with the distributed event localization via the alternating direction method of multipliers (el-admm) algorithm, as outlined in.this paper presents a sub-gradient algorithm devised for the distributed robust localization problem that directly tackles non-convex and non-smooth objective functions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/609.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/609.txt new file mode 100644 index 0000000000000000000000000000000000000000..ac7d8821125f1e2c4ab3fbbc563eb36f53d6ac22 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/609.txt @@ -0,0 +1 @@ +using synthetic or artificially generated data in training ai algorithms is a burgeoning practice with significant potential. it can address data scarcity, privacy, and bias issues and raise concerns about data quality, security, and ethical implications. this issue is heightened in the global south, where data scarcity is much more severe than in the global north. synthetic data, therefore, addresses the problem of missing data, leading, in the best case, to better representation of populations in datasets and more equitable outcomes. however, we cannot consider synthetic data to be better or even equivalent to actual data from the physical world. in fact, there are many risks to using synthetic data, including cybersecurity risks, bias propagation, and simply an increase in model error. this policy brief proposes recommendations for the responsible use of synthetic data in ai training and the associated guidelines to regulate the use of synthetic data.the objective of this policy brief is to explore the potential of synthetic data to accelerate the attainment of the sdgs through ai in the global south while mitigating its important risks. this policy brief proposes recommendations for the responsible use of synthetic data in ai training and the associated guidelines to regulate the use of synthetic data.synthetic data offer numerous opportunities, such as rebalancing biased datasets, protecting data privacy, and reducing the cost of data collection.data availability: synthetic data can overcome limitations associated with data scarcity, enabling more robust ai training and development.data availability synthetic data can address data availability and representation concerns by "completing" training datasets for ai systems. in the healthcare industry, pii is removed or de-identified before using real-world healthcare data to generate synthetic data, allowing ai models to be trained on realistic data while protecting patients' privacy (sdg3) xi .nevertheless, there are many risks to using synthetic data, such as data quality, cybersecurity, misuse, bias propagation, ip infringement, data pollution and data contamination. third, if the quality of the real-world data used to train the model is high, the quality of the synthetic data will also be high because the model can learn from real-world data and generate similar synthetic data.security risks: synthetic data, if reverse-engineered, could potentially reveal information about the underlying real data or the process used to generate it, posing security risks. re-identification is therefore a real risk for synthetic data, especially if the source data used is published with the synthetic data, or if the model used to create the synthetic data "overfits" the training data, meaning that it too closely resembles the original dataset. use diverse data sources when creating synthetic datasets: when generating synthetic data, it is crucial to utilize a variety of data sources to ensure that the data are as diverse and has many independent characteristics as feasible. this may involve the use of both collected, real-world data and data from other sources, such as simulations, expert knowledge or participatory data from citizens xvi . disclose or watermark all synthetic data and its provenance: it is essential to disclose where all synthetic data comes from and how it was produced xvii , and comply with any intellectual property protection provision. calculate and disclose quality metrics for synthetic data: once the synthetic data has been generated, its quality must be evaluated to ensure that it is suitable for the intended application xviii . this requires being aware of the limitations of synthetic data and using it without misleading or deceiving the users of the synthetic data. link synthetic data to global ai governance efforts: in ongoing global ai governance efforts, including those recommended by the united nation's multistakeholder advisory body on artificial intelligence, it will be essential to establish a working group on synthetic data to ensure that the risks outlined are addressed globally and comprehensively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/61.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/61.txt new file mode 100644 index 0000000000000000000000000000000000000000..139671c66f78406d3219aabc20b7fde5714c852e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/61.txt @@ -0,0 +1 @@ +during the last decade, the growth of machine learning algorithms has been relevant to the point of being successful in areas of science as well as in the daily life of humans. some areas include health, image processing, games, autonomous cars, recommended content, among others . many of these algorithms use artificial neural networks (ann) that are known as a black box system or a combination of ann and phenomenological models known as gray box systems , . an ann allows to infer an output value that depends on a combination of input values, however, the process for determining the inference value is based on empirical data. as a result, these systems lack credibility in environments where the decisions need to be selected in such a way so that they are correct and grounded. for example, in the case of diagnosing illnesses, an algorithm needs to be capable of considering different alternatives and determine and catalog the test correctly.furthermore, there exist other autonomous learning algorithms mainly used for solving sequential decisions problems, known as reinforcement learning (rl) . in this type of algorithm, an agent needs to learn to make a decision through trial and error to solve a task formulated as a markovian decision problem. thus, commencing from a state s t , an agent chooses an action a t that allows the transition to the next state s t+1 , obtaining a reward signal r t+1 to evaluate the quality of the action selected from state s t . the main idea is that through observation of the reward at each step, the agent will be capable of refining a policy that allows it to select actions to receive a higher reward at the end of the task . similarly for the rl algorithms, a person without knowledge of artificial intelligence does not know the form or aspects that are considered by the agent to select an action . in rl methods, the hierarchical approach is based on the ability of cognitive beings to resolve complex challenges by dividing them into more tractable smaller parts. in addition, it is possible to learn new tasks quickly through the sequence of the behaviors learned, although the task requires various lowlevel actions . for example, humans can learn new tasks quickly by classifying the parts learned, including even if the task requires millions of low-level actions, such as muscular contractions. hierarchical reinforcement learning (hrl), an extension of rl, models these problems in order to make the agents represent complicated behaviors as a short sequence of high-level actions. as a result, the agent can solve more complex problems. therefore, if some solutions require a great number of low-level actions, the hierarchical policy could be converted into a sequence of high-level actions .in this regard, explainable artificial intelligence (xai) is the area that seeks to provide the ability to those systems in order to be able to explain its behavior in such a way that it is understandable to humans , . likewise, explainable reinforcement learning (xrl) emerges as a sub-task of xai . since this subarea is focused on rl, the methods for making the system capable of providing an explanation are based on stages from the learning process. these methods may be based on: relevant features, the learning and markov decision process (mdp), or at policy level .diverse techniques have been used in order to be able to explain behavior in hierarchical environments. for instance, just as dividing the tasks into different levels where the highest level groups the lowest level tasks and trains an agent for each arxiv:2212.06967v1 14 dec 2022 level . also, based on human behavior to navigate through a room or simply that the model learns to carry our basic tasks and afterwards put together these basic tasks to carry out new ones. in this research, we sought to provide another alternative so that the models would be capable of providing an explanation in hierarchical contexts. using the memorybased explainable reinforcement learning method , we proposed analyzing the probabilities of success for different low-level tasks in a hierarchy, in addition to obtaining a global probability of success, for the completed task. the probability of success may be used as a basis for explaining the behavior of an autonomous agent in a hierarchical environment. the explanations generated are offered in a natural language representation with the ability to be better understood by not only rl practitioners, but also by any person with no knowledge of the area .our main contribution is the extension of a memory-based explainability method for hierarchical scenarios. the rest of the paper is structured as follows. the next section reviews the background and related works. the third section introduces the explainability method used in this work, i.e., hierarchical explainable reinforcement learning. section 4 describes the experimental scenario and section 5 shows the results obtained during experiments. finally, section 6 depicts the main conclusions and possible future work. thus, commencing from a state s t , an agent chooses an action a t that allows the transition to the next state s t+1 , obtaining a reward signal r t+1 to evaluate the quality of the action selected from state s t . using the memorybased explainable reinforcement learning method, we proposed analyzing the probabilities of success for different low-level tasks in a hierarchy, in addition to obtaining a global probability of success, for the completed task. the explanations use the probability of successfully completing the task by selecting an action in a state, in addition to the number of transitions the agent carries out to accomplish the task. once the task in finished, the agent may give an explanation based on probabilities of success; or a counterfactual explanation of why the agent selected one action over another, using more comprehensible language to a non-expert user.• third high-level task: once the spaceship has escaped from the black holes in the corner and collected the shield, the agent needed to solve the next high-level task, that was to find the wormhole that corresponded to exiting, or rather, reaching state 7 with the shield. for example, in state 21, the action go down showed 100% probability of success since always when the agent chose to move down from state 21 would escape the black-holes zone and complete the first highlevel task. the action go left or right from state 11 showed a low probability of success but was not null, as the agent would move away from the goal but still had a probability of escape. the agent with the probability of success as a basis could respond using a template in the following way: i did not move to the left since carrying out this action, i would only have a 25% probability of escaping the black holes while going down, i have a 60% probability of escaping.in the second high-level task, the agent had state 31 as the starting point and 93 as goal state where it sought to collect the shield.considering this scenario, when the agent finds itself in state 83, and carries out the action to move down, the user could ask: why did you move down with the last action? based on the probabilities of success computed, the agent using a template could respond: i moved down because in doing so, i have a 100% probability of collecting the shield. for example, from state 16 and executing the action to move up, it was possible to ask the agent: why did you not move to the left in the last action? then, the agent could respond using a template in the following way: i did not move to the left because doing so, i would have only have a 30% probability of reaching the wormhole and of returning home, while moving up i have an 80% probability of completing the mission successfully. the matrix for the global probabilities of success, unlike the individual matrices, did not represent a particular high-level task, but it combined the three individual matrices together in order to obtain the general probabilities of success for the global problem. overall, although the highest probabilities of success were obtained by completing the different high-level tasks, none reached 100% success, as pointed out above, as no action could guarantee escape.in this work, we have demonstrated that with the memorybased explainable reinforcement learning method using hierarchical training, it is possible for a learning agent to learn how to escape a scenario while computing probabilities of success to be used for explanations. the global matrix showed coherent probabilities of success for the agent when executing an action starting from a specific state being, therefore, a good basis to be used for generating explanations that can be understood by non-expert users. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/610.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/610.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a439d78b9b20d1ba915c69c247fef8a78ee530a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/610.txt @@ -0,0 +1 @@ +a number of self-explainable deep models for image data have been recently developed that sport high performance without giving up on interpretability . at a high level, they follow a twolevel architecture that first maps input images to a set of interpretable, high-level "concepts" (using, e.g., a convolutional neural network) and then infers predictions based on the extracted concepts only. this step is implemented with a transparent inference layer (e.g., a sparse linear map) from which concept-based explanations can be easily derived. this makes local explanations cheap to compute, expressive , and faithful to the model's inference process . see for an overview. these developments have spurred interest into self-explainable gnn architectures that follow a similar setup and aim at achieving the same feat for graph data. a brief overview of these architectures is given in section 2. this begs the question: do self-explainable gnn architectures uphold the promise of producing explanations that are faithful to the model's reasoning? in this preliminary investigation, we focus on graph classification and provide an initial answer to this question for a representative selection of self-explainable gnn architectures. namely, we empirically analyze the faithfulness of explanations output by one information-constrained architecture (gisst ) and two prototype-based architectures (protgnn and pignn ) on four different data sets, and according to different metrics.our results highlight several interesting phenomena. first, these architectures fail to guarantee faithfulness, thus falling short of the original desideratum. furthermore, their explanations exhibit widely different degrees of faithfulness depending on the data set. naturally, this hinders trustworthiness. second, and equally importantly, we find that well-known measures of faithfulness lack a natural reference, and that therefore can paint an incomplete picture of explanation quality. summarizing, our results raise doubts on the interpretability of self-explainable gnn architectures and call for stricter scrutiny of their properties and of the measures proposed to evaluate them. this begs the question: do self-explainable gnn architectures uphold the promise of producing explanations that are faithful to the model's reasoning? in this preliminary investigation, we focus on graph classification and provide an initial answer to this question for a representative selection of self-explainable gnn architectures. namely, we empirically analyze the faithfulness of explanations output by one information-constrained architecture (gisst) and two prototype-based architectures (protgnnand pignn) on four different data sets, and according to different metrics. in short, gisst introduces an attention layer on the model embeddings and encourages this layer -via an appropriate sparsification penalty over the features and the edges of the graph -to focus on a subgraph e of g (and a subset of features x ) capturing all information necessary to infer a label.we address the following research questions: q1: do self-explainable gnns guarantee faithfulness in practice? q2: do existing faithfulness metrics properly gauge the quality of the model's explanations? to this end, we integrated the implementations of gisst, protgnn and pignn from the original papers into a unified framework built on pytorch geometric. for the ba data sets, node features encode the number of edges and triangles the node participates in, and the ba subgraphs were generated using a fixed seed, for reproducibility.fidelityincludes two metrics assessing to what degree explanations are necessary (fid + ) and sufficient (fid -). high fid + means that e is necessary, in the sense that the prediction does change when the model is fed the irrelevant subgraph c only, while low fid -means the explanation is sufficient, in that the prediction remains unchanged when considering the relevant subgraph e only.these measures differ in that fid + and fid -look at the hard predictions and depend on the groundtruth label, whereas unf considers the change in label distribution only.in stark contrast, their faithfulness -indicated as unf(e), fid + (e), and fid -(e) -is far from perfect. note that in the case of protgnn on mutag, unf might be zero due to negligible changes in the prediction vector and rounding in the results, while fid -, although being very low, is 0. of classification accuracy, faithfulness of the model explanations e, and faithfulness of random subgraphs r. this is especially the case for protgnn on mutag and for pignn+t on ba2motif, which both attain near-perfect unf and fid -. we offer such a term of reference by compare each measure by the same measure computed on a random explanation model that generates subgraphs r of g of the same size of the explanation e.we can see that oftentimes the model's explanations are in fact more faithful than the random ones, especially for mutag data. in conclusion, while faithfulness measures offer an intuitive idea of the model's explanations, they provide only a partial view of their overall quality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/611.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/611.txt new file mode 100644 index 0000000000000000000000000000000000000000..465281331e9d86dae443efd9c01323a4078e0809 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/611.txt @@ -0,0 +1 @@ +press releases today are invaluable tools to get the word out about new products and services, which drive engagement, traffic, and audience reach. media pitches are often used to persuade journalists to run coverage on those news stories, effectively increasing their popularity. finding the right journalists to run those stories makes a real difference in garnering interest in the product's offerings. this can often prove to be tedious, as journalists are frequently flooded with several such pitches. additionally, different journalists often work on different beats or news topics, which makes it essential to find the right contacts. hence, pitching press releases to the right journalists with relevant interests makes them more likely to run media coverage on press releases. this can often be a dealbreaker when it comes to the popularity and revenue of the pitched products.however, finding the most suitable journalists for this process often becomes an uphill task without the right contacts and a media list. moreover, staying updated with the recent interests of journalists and the trends they cover can often be a tedious task. our work aims to automate this process by leveraging text analytics.text mining, or text analytics, is a branch of artificial intelligence that uses natural language processing techniques to extract useful insights from large amounts of text data. this process involves several steps that transform real-world, unstructured data into a structured, normalized representation that can be fed into predictive models or analyzed to uncover hidden patterns that can provide actionable insights to decision-makers. text mining has a variety of applications ranging from fraud detection, personalized advertising, risk management, and so on.this study proposes the use of text mining techniques combined with predictive models to automate and speed up the process of finding relevant journalists who might be interested in covering potential press releases by taking into account their interests. it also compiles media contact information that enables users to quickly find and get in touch with the most suitable journalists to take point and optimize engagement on their press releases. figure 1.1 outlines and summarizes the approach followed in this paper. text mining, or text analytics, is a branch of artificial intelligence that uses natural language processing techniques to extract useful insights from large amounts of text data.a collectionof almost 267k news articles authored by various journalists containing the article topic, title, description, website links, and outlets they mainly write for is used as a starting point to get an insight into the relevant interests of those journalists. to create models that pick up on the meaning of the words, all the text articles and the data collected for the iab classifier are processed using the following steps by leveraging python's libraries.the proposed classifier accepts a text article as an input and returns the list of journalists who authored articles that are the closest, or most similar to it. count vectorizer and the tf -idf vectorizer are 2 such methods to generate word vectors from a given corpus of text, ie. words occurring frequently across documents do not have as much significance and have a lower idf value, but rarer words have a higher value since the less pressmatch: automated journalist recommendation for media coverage with nearest neighbor search title: 'pack lego': perth family caught in hard border crossfire at christmas description: perth mother clare has found herself mostly confined to a small sydney city apartment with her six-year-old and two-year-old as the northern beaches outbreak takes its toll.full text: perth mother clare* has found herself mostly confined to a small sydney city apartment with her two young daughters as the northern beaches outbreak takes its toll. there are various metrics to compute the distance between feature vectors, such as euclidean distance, manhattan distance, hamming distance, and so on. we then select the k nearest (k = 5) vectors and look up the journalists who have authored the most similar articles when compared to the input text. thus, an iab classifier is created to predict the 4 iab text tiers for any given text.a multiclass classification problem is different from a multilabel classification problem since the former assigns a data point to one class out of multiple classes, whereas the latter can assign one data point to more than one label out of multiple classes.p(b) refers to the prior probability of the text belonging to class b, p(a) refers to the prior probability of the text belonging to class a, p(b | a) refers to the probability that text belongs to class b given that it belongs to class a.hence, a naive bayes classifier is used along with the ovr classifier to solve the multilabel classification problem for predicting iab content taxonomy tiers for any given text article.the iab classifier is trained separately using text obtained with both methods -text directly collected from the webpage (using python's web scraping tools), as well as text collected by removing boilerplate html code from the webpage. only those news articles are selected which are authored by journalists having valid muckrack beats, which do not include beats like content source geo, content language, content source since these are generic beats that span a large variety of topics and hence cannot be assigned to only a few specific labels. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/612.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/612.txt new file mode 100644 index 0000000000000000000000000000000000000000..937a6e36866e3d472a720946894f3928ab741e61 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/612.txt @@ -0,0 +1 @@ +machine learning models trained on user data are now routinely used virtually everywhere, from recommendation systems to predictive models. in many cases, this user data itself includes some sensitive information (e.g., healthcare or race) or private aspects (customer habits, geographic data), sometimes even protected by law. to address this issue -that the models trained on sensitive datasets must not leak personal or private information -in a principled fashion, one of the leading frameworks is that of differential privacy (dp) , which has de facto become the standard for privacy-preserving machine learning over the past decade.at its core, dp requires that the output of a randomized algorithm m not change drastically if one to modify one of the datapoints: that is, if x, x ′ are two datasets only differing in one user's data, then for all possible outputs s of the algorithm one should have roughly the same probability of observing s under both inputs:where ε > 0 and δ ∈ (0, 1] quantify the privacy guarantee (the smaller values, the better the privacy; see section 2 for formal definitions). intuitively, an algorithm m being (ε, δ)-dp means that its output does not reveal much about any particular user's data, since the output would be nearly identical had this user's data been completely different.while the use of differential privacy can mitigate many privacy concerns, it does come with some limitations. the first is the overhead in brings: that is, ensuring differential privacy for a learning task typically incurs an overhead in the number of data points needed to achieve the same accuracy guarantee. perhaps more importantly, dp does not solve all possible privacy concerns: even if a ml model is trained on a sensitive dataset in a differentially private way, the dataset may still be subject to some attacks -e.g., if the server where the training data is stored is itself compromised. somewhat tautologically: dp is not a silver bullet, and only provides meaningful guarantees against the threat models it was meant to address.another type of concerns focuses on the individual right to maintain control on one's own data: broadly speaking, this is asking that each user can (under some reasonable circumstances) require that their personal data and information be removed from a company's collected data and trained models. this so-called "right to be forgotten," which allow people to request that their data be deleted entirely from an ml system, has been passed into legislation or is considered in some form or another by various countries or entities, prominently the european union's general data protection regulation (gdpr), the california privacy rights act (ccra), canada's proposed consumer privacy protection act (cppa), and most recently in australia .however, translating this "right to be forgotten" into practice comes with a host of challenges, starting with that provided a formal definitional framework using cryptographic concepts -which led to a new area of research in ml and computer science, that of machine unlearning. a naive technical solution would be for a given company to keep the original training set at all times, and, upon a deletion request by a user, remove this user's data from the set before retraining the whole model on the result. this, of course, comes up with two major drawbacks: first, the cost to the company, in terms of time and computational resources, of retraining a large model on a regular basis. second, the privacy cost, as keeping the training set for an indefinite time in order to be able to handle the deletion requests leaves the door open to potential attacks and data breaches. fortunately, there have been, over the past few years, a flurry of better (and more involved) approaches to machine unlearning, to handle deletion requests much more efficiently, and requiring to maintain much less of the training set (see, e.g., , and related work below).the above discussion, still, brings to light an important question: is machine unlearning, paradoxically, at odds with (differential) privacy? what is the connection between the two notions: are they complementary, or is there a trade-off between them? this is the main question this work sets out to address. our starting point is the probabilistic definition of machine unlearning set forth by sekhari, acharya, kamath, and suresh , itself reminiscent of the definition of differential privacy (see definition 2.5 for the formal statement): a pair of algorithms (a, ā) is an (ε, δ)-unlearning algorithm if (1) a : x * → w is a (randomized) learning algorithm which, given a dataset x ⊆ x * , outputs model parameters a(x) ∈ w; and (2) ā : x * × w × t → w which, on input a set of deletion requests u ⊆ x, previous model parameters w, and some succinct additional "side information" t (x) ∈ t about the original dataset, output updated model parameters w ′ ∈ w from which the data from u has been unlearned, that is, such thatfor every possible set w ⊆ w of model parameters. loosely speaking, this requires that the outcomes of (a) training a model m via a on the dataset x then unlearning some of the original training data u ⊆ x from m using ā, and (b) training a model m ′ via a directly on the dataset x \ u then unlearning nothing via ā, be nearly indistinguishable.in their paper, sekhari et al. focus on generalization guarantees of unlearning algorithm, i.e., what can be achieved by unlearning algorithms when focusing on population loss, namely, when aiming to minimizegiven a prespecified loss function f : w × x → r, where the expectation is over the draw of a new datapoint from the underlying distribution p on the sample space. the quality of a learning algorithm a is then measured by the expected excess riskwhere the expectation is taking over the random choice of a dataset x ∼ d n of size n, and the randomness of a itself. the focus of , as is ours, is then to quantify the deletion capacity achievable for (ε, δ)-unlearning given a prespecified loss function, that is, the maximum number of data points one can ask to be forgotten (maximum size of the subset u ) before the excess risk increases by more than some threshold (see definition 2.6).in their paper, draw a connection between dp learning algorithms and unlearning ones, showing that dp learning algorithms do provide some unlearning guarantees out-of-the-box, and that one can achieve non-trivial unlearning guarantees for convex loss functions by leveraging the literature on differentially private optimization and learning. one of their main results is showing that these dp-based unlearning algorithms, which crucially do not rely on any side information (the additional input t (x) ∈ t provided to the unlearning algorithm ā) can handle strictly fewer deletion requests than general unlearning algorithms which do rely on such side information.their results, however, do not fully characterize the deletion capacity of these "dp-based" machine unlearning algorithms, leaving a significant gap between their upper and lower bounds. we argue that fully understanding this quantity is crucial, as dp-based unlearning algorithms are exactly those for which there is no conflict between the two notions of dp and unlearning -instead, this class of algorithms is the one for which they work hand in hand. this is in contrast to the more general unlearning algorithms relying on maintaining and storing side information about the training set, as this side information can make their deployment susceptible to privacy breaches.at its core, dp requires that the output of a randomized algorithm m not change drastically if one to modify one of the datapoints: that is, if x, x ′ are two datasets only differing in one user's data, then for all possible outputs s of the algorithm one should have roughly the same probability of observing s under both inputs:.5 for the formal statement): a pair of algorithms (a, ā) is an (ε, δ)-unlearning algorithm if (1) a : x * → w is a (randomized) learning algorithm which, given a dataset x ⊆ x * , outputs model parameters a(x) ∈ w; and (2) ā : x * × w × t → w which, on input a set of deletion requests u ⊆ x, previous model parameters w, and some succinct additional "side information" t (x) ∈ t about the original dataset, output updated model parameters w ′ ∈ w from which the data from u has been unlearned, that is, such that. loosely speaking, this requires that the outcomes of (a) training a model m via a on the dataset x then unlearning some of the original training data u ⊆ x from m using ā, and (b) training a model m ′ via a directly on the dataset x \ u then unlearning nothing via ā, be nearly indistinguishable.in their paper,draw a connection between dp learning algorithms and unlearning ones, showing that dp learning algorithms do provide some unlearning guarantees out-of-the-box, and that one can achieve non-trivial unlearning guarantees for convex loss functions by leveraging the literature on differentially private optimization and learning. one of their main results is showing that these dp-based unlearning algorithms, which crucially do not rely on any side information (the additional input t (x) ∈ t provided to the unlearning algorithm ā) can handle strictly fewer deletion requests than general unlearning algorithms which do rely on such side information.our next contribution, motivated by the similarity of the formalisation of machine unlearning (without side information) and that of differential privacy, is to establish the analogue of key properties of dp for machine unlearning, namely, post-processing and composition of machine unlearning algorithms. an unlearning algorithm ( ā, a) is said to be lazy if, when provided with an empty set of deletion requests, the unlearning algorithm ā does not update the model.literature in machine unlearning that relates to differential privacy branches to two: (1) models are prone to attacks when attackers have access to both before and after version when the deletion requests are processed, and (2) the conceptual similarity of machine unlearning and differential privacy.following, the notion of online unlearning algorithm -which receive the deletion requests sequentially -was put forward and studied in, again with memory efficiency with respect to the side information in mind; however, their primary focus is on the empirical performance of unlearning algorithm. there exists a lipschitz convex loss function (indeed, linear) for which any (ε, δ)-unlearning algorithm ( ā, a) which takes no side information must have deletion capacity.our work fully characterizes the deletion capacity of any unlearning algorithm ( ā, a) minimizing population risk under both convex and strongly convex loss functions, when only given the model parameters (output of the learning algorithm) and the set of deletion requests.we hope our work will lead to further study of the interplay between differential privacy and machine unlearning, and to additional study of "dp-like" properties of machine unlearning, such as the postprocessing and composition properties our present work identified.to establish our deletion capacity lower bound with respect to this loss function, we will proceed in three stages: the first, relatively standard, is to relate population loss (what we are interested in) to empirical loss -which allows us to focus on the existence of a "hard dataset." the second step is then to establish a sample complexity lower bound on the empirical risk (for this loss function) of any (ε, δ)-dp algorithm, via a reduction to differentially private computing of 1-marginals.under our laziness assumption, we can establish bounds on applying unlearning algorithm repeatedly when the overall deletion requests is within the deletion capacity: where the first and third inequality result from the definition of (ε, δ)-unlearning and the second equality is due to laziness assumption 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/613.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/613.txt new file mode 100644 index 0000000000000000000000000000000000000000..9607fa94d267ce7dfb7788d94909908d1f1db395 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/613.txt @@ -0,0 +1 @@ +implicit neural networks (nns) have recently emerged as a new paradigm in neural network design. an implicit nn is equivalent to an infinite-depth weight-shared explicit nn with inputinjection. unlike explicit nns, implicit nns generate features by directly solving for the fixed point, rather than through layer-by-layer forward propagation. moreover, implicit nns have the remarkable advantage that gradients can be computed analytically only through the fixed point with implicit differentiation. therefore, training implicit nns only requires constant memory.despite the empirical success achieved by implicit nns , our theoretical understanding of these models is still limited. in particular, there is a lack of theoretical analysis of the training dynamics and generalization performance of implicit nns, and possibly more importantly, whether these properties can be connected to those of explicit nns. demonstrates that any deep nn can be reformulated as a special implicit nn. however, it remains unknown whether general implicit nns have advantages over explicit nns. extends previous neural tangent kernel (ntk) studies to implicit nns and give the exact expression of the ntk of the relu implicit nns. however, the differences between implicit and explicit ntks are not analyzed. moreover, previous works have proved the global convergence of gradient descent for training implicit nns. however, it is still unclear what distinguishes the training dynamic of implicit nns and that of explicit nns.in this paper, we investigate implicit nns from a high-dimensional view. specifically, we perform a fine-grained asymptotic analysis on the eigenspectra of conjugate kernel (cks) and ntks of implicit nns, which play a fundamental role in the convergence and generalization high dimensional nns . by considering input data uniformly drawn from the unit sphere, we derive, with recent advances in random matrix theory, high-dimensional (spectral) equivalents for the cks and ntks of implicit nns, and establish the equivalence between implicit and explicit nns by matching the coefficients of the corresponding asymptotic spectral equivalents. surprisingly, our results reveal that a single-layer explicit nn with carefully designed activations has the same ck or ntk eigenspectra as a relu implicit nn, whose depth is essentially infinite. unlike explicit nns, implicit nns generate features by directly solving for the fixed point, rather than through layer-by-layer forward propagation. moreover, implicit nns have the remarkable advantage that gradients can be computed analytically only through the fixed point with implicit differentiation. in particular, there is a lack of theoretical analysis of the training dynamics and generalization performance of implicit nns, and possibly more importantly, whether these properties can be connected to those of explicit nns. however, it remains unknown whether general implicit nns have advantages over explicit nns.extends previous neural tangent kernel (ntk) studies to implicit nns and give the exact expression of the ntk of the relu implicit nns. moreover, previous workshave proved the global convergence of gradient descent for training implicit nns. however, it is still unclear what distinguishes the training dynamic of implicit nns and that of explicit nns. specifically, we perform a fine-grained asymptotic analysis on the eigenspectra of conjugate kernel (cks) and ntks of implicit nns, which play a fundamental role in the convergence and generalization high dimensional nns. by considering input data uniformly drawn from the unit sphere, we derive, with recent advances in random matrix theory, high-dimensional (spectral) equivalents for the cks and ntks of implicit nns, and establish the equivalence between implicit and explicit nns by matching the coefficients of the corresponding asymptotic spectral equivalents. surprisingly, our results reveal that a single-layer explicit nn with carefully designed activations has the same ck or ntk eigenspectra as a relu implicit nn, whose depth is essentially infinite.in this section, we prove the high-dimensional equivalents for cks and ntks of implicit and explicit nns. as a result, by matching the coefficients of the asymptotic spectral equivalents, we establish the equivalence between implicit and explicit nns in high dimensions.in the following corollary, we show a concrete case of a single-layer explicit nn with an quadratic activation, that matches the ck or ntk eigenspectra of a relu implicit nn. moreover, we establish the equivalence between implicit and explicit nns by matching the coefficients of the asymptotic spectral equivalents. in particular, we show that a single-layer explicit nn with carefully designed activations has the same ck or ntk eigenspectra as a relu implicit nn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/614.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/614.txt new file mode 100644 index 0000000000000000000000000000000000000000..7841dff11a0912ee1f5fb5d82c904a9a433e2977 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/614.txt @@ -0,0 +1 @@ +although deep neural networks can exhibit remarkable performances when trained on independent and identically distributed data drawn from a fixed set of classes, the practical need arises to train models on a continuous stream of data. in such real-world scenarios, multiple classification tasks are presented sequentially, leading to a situation where the data pertaining to previous tasks becomes inaccessible when learning new ones. this scenario is known as continual learning (cl) and poses many challenges to standard learning algorithms which can suffer from catastrophic forgetting (cf). learning algorithms must trade-off between maintaining performances on old tasks (stability)the university of tokyo a) nicolas.michel@esiee.fr b) giovanni.chierchia@esiee.fr c) romain.negrel@esiee.fr d) jf.bercher@esiee.fr e) yamasaki@cvm.t.u-tokyo.ac.jp and achieving competitive performances on the current task (plasticity). while the main objective remains to maximize accuracy across all classes at the end of training, it is also essential to define meaningful metrics to capture individual methods learning behavior and capabilities.metrics such as the average accuracy (aa) and the average forgetting (af) have been proposed in past studies. however, none of these metrics takes into account the increasing difficulty of the classification task, which automatically induces a loss in performance for any model. in that sense, we argue that the af metric is inherently linked to the continual learning setup and does not fairly represent how the model reacts to the continuous environment.in this paper, we analyze the limitations of the current forgetting metric through simple examples and propose new metrics for cl that take into account the increasing difficulty of the task being solved by the model. we show through several experiments on benchmark datasets that our proposed metrics can shed new light on the stabilityplasticity trade-off reached by the model when training on the continual environment. in that sense, we make the following contributions:• we review traditional metrics and show their current limitations; • we propose new metrics which take into account the increasing difficulty of the continual setup; • we experimentally demonstrate the advantages of our metrics compared to traditional metrics for analyzing continual learners. the rest of the paper is organized as follows. in section 2 we describe work related to ours. in section 3.2, we review two classical cl metrics and show their limitations. in section 4 we defined our proposed metrics. section 5 presents our experiments and eventually section 6 concludes the paper.in this paper, we analyze the limitations of the current forgetting metric through simple examples and propose new metrics for cl that take into account the increasing difficulty of the task being solved by the model. formally, we consider a sequential learning setup with a sequence {t 1 , • • • , t k } of k tasks, and d k = (x k , y k ) the corresponding data-label pairs.the final average accuracy aa k (g) is the accuracy after training g on the last task t k and is the metric of interest for evaluating the performance of g. the main limitation of af is that it does not solely measure how much the model forgot, but also how much harder the current overall problem is compared to a single task. to illustrate this phenomenon, let us consider a simple example where we learn from a sequence of 5 tasks {t 1 , • • • , t 5 }, each task being composed of two classes for a total of 10 distinct classes. we want to use a random classifier rand c k , where c k is the total number of classes when training on task k. while expanding the number of classes should make the task harder for the model, the drop in performance is not due to the model forgetting knowledge but rather to the fact that the model is not able to learn how to solve the hard task as well as the easy one. we propose two metrics that attempt to dissociate this setup-induced forgetting from the overall performance by rescaling the original aa and af using the performances of a random classifier to account for task difficulty.where c k is the total number of classes seen at task t k .3 let k be the total number of tasks and c k the total number of classes.4 if every task has the same number of classes, the rescaled average forgetting (raf) after training on t k , for a classifier g can be expressed as:. this situation can happen if the model cannot learn new tasks efficiently while maintaining high performance on past tasks (high stability and low plasticity) or oppositely the model perfectly learns new tasks while forgetting older tasks (low stability and high plasticity). in other words, a constant raa translates a failed stability-plasticity trade-off, while an increasing raa demonstrates that the model learns more than a random classifier and hence can still accumulate new knowledge despite the increasing task difficulty.because c k+1 = c k + n c with n c the number of classes per task.because c k = kn c with n c the number of classes per task and k-1 h k -1 is an increasing function. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/615.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/615.txt new file mode 100644 index 0000000000000000000000000000000000000000..4c28b9c5c5245e644a3527acb3d32daede970f3a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/615.txt @@ -0,0 +1 @@ +neurosurgeon supports several popular models within the huggingface transformers repository (wolf et al., 2019), including vit (dosovitskiy et al., 2020), resnet (he et al., 2016), gpt2 (radford et al., 2019), bert (devlin et al., 2018), and more. with neurosurgeon, one discovers functional subnetworks by optimizing a binary mask over weights (or neurons) within model layers, ablating everything except the units necessary for a particular computation. we have implemented two optimization-based techniques from model pruning (as well as a simple baseline technique) for generating these binary masks.hard-concrete masking: hard-concrete masking was introduced to provide an approximation to the l 0 penalty, providing a bias towards sparse solutions during model training (louizos et al., 2017). this technique produces masks by stochastically sampling mask values from a parameterized hardconcrete distribution.continuous sparsification: continuous sparsification was introduced to provide a deterministic approximation to the l 0 penalty (savarese et al., 2020). this technique produces masks by annealing a parameterized soft mask into a hard mask over the course of training.magnitude pruning: magnitude pruning simply ablates some fraction of the lowest magnitude weights (han et al., 2015). though simple, this approach has been used in several important works on pruning and subnetworks, notably the lottery ticket hypothesis (frankle and carbin, 2018). this method should be used as a baseline to compare against the optimization-based methods described above.when performing subnetwork analysis, we freeze the underlying model weights and optimize arxiv:2309.00244v1 1 sep 2023 the parameters introduced by continuous sparsification or hard-concrete masking. we typically include an l 0 regularization term on the mask to encourage parsimonious subnetworks. both optimization-based techniques can be used to discover subnetworks at the weight or neuron level. this approach seeks to automatically uncover circuits within a trained model and locate them in particular subnetworks. this approach borrows techniques from model pruning to uncover subnetworks that might implement such highlevel computations. we developed a python library -neurosurgeon -to simplify the process of subnetwork analysis, allowing researchers to more quickly uncover the internal structure that lies within trained models.neurosurgeon supports several popular models within the huggingface transformers repository(wolf et al., 2019), including vit(dosovitskiy et al., 2020), resnet(he et al.hard-concrete masking: hard-concrete masking was introduced to provide an approximation to the l 0 penalty, providing a bias towards sparse solutions during model training(louizos et al.continuous sparsification: continuous sparsification was introduced to provide a deterministic approximation to the l 0 penalty(savarese et al., 2020)., 2015).when performing subnetwork analysis, we freeze the underlying model weights and optimize arxiv:2309.in order to visualize the results of subnetwork analysis, we have implemented a visualizer that can be used to understand how subnetworks are distributed throughout the layers of a model. it can be used to display one or two subnetworks within the same model., 2021), which neurosurgeon implements.we present neurosurgeon, a python library designed to enable researchers to easily identify functional subnetworks within trained models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/616.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/616.txt new file mode 100644 index 0000000000000000000000000000000000000000..c380d1d49b6178324a302c6a1d86a51bf2ddec63 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/616.txt @@ -0,0 +1 @@ +graph transformers (gts) have recently emerged as popular alternative to conventional message passing graph neural networks (mpgnns) which dominated deep learning on graphs for years. a central premise underlying gts is their ability to model long-range interactions between vertices through a global attention mechanism. this could give gts an advantage on tasks where mpgnns may be limited through phenomenons like over-smoothing, over-squashing, and under-reaching, thereby justifying the significant runtime overhead of self-attention.the long-range graph benchmark (lrgb) has been introduced by dwivedi et al. as a collection of five datasets with strong dependence on long-range interactions between vertices:• peptides-func and peptides-struct are graph-level classification and regression tasks, respectively.their aim is to predict various properties of peptides which are modelled as molecular graphs. • pascalvoc-sp and coco-sp model semantic image segmentation as a node-classification task on superpixel graphs. • pcqm-contact is a link prediction task on molecular graphs. the task is to predict pairs of vertices which are distant in the graph but in contact in 3d space.the experiments provided by dwivedi et al. report a strong performance advantage of gts over the mpgnn architectures gcn , gine , and gatedgcn , in accordance with the expectations. subsequently, gps reached similar conclusions on lrgb. we note that these two works are strongly related and built on a shared code base. newer research on gts (see section 1.1) is commonly based on forks of this code base and often cites the baseline performance reported by dwivedi et al. to represent mpgnns.our contribution is three-fold2 : first, we show that the three mpgnn baselines gcn, gine, and gatedgcn all profit massively from further hyperparameter tuning, reducing and even closing the gap to graph transformers on multiple datasets. in fact, gcn yields state-of-the-art results on peptides-struct, surpassing several newer graph transformers. on this dataset in particular, most of the performance boost is due to a multi-layer prediction head instead of a linear one, again highlighting the importance of hyperparameters. second, we show that on the vision datasets pascalvoc-sp and coco-sp normalization of the input features is highly beneficial. we argue that, as in the vision domain, feature normalization should be the default setting. third and last we take a closer look at the mrr metric used to evaluate pcqm-contact. there, we demonstrate different filtering strategies have a major impact on the results and must be implemented exactly to specification to facilitate reliable comparisons.our contribution is three-fold2: first, we show that the three mpgnn baselines gcn, gine, and gatedgcn all profit massively from further hyperparameter tuning, reducing and even closing the gap to graph transformers on multiple datasets.. as a point of reference, we reevalute gps in an identical manner and also achieve significantly improved results on three datasets with this graph transformer. we apply channel-wise linear normalization to all input features and show that all models (baselines and gps) profit from it in an ablation in figure2b. for the mpgnn baselines we observe considerable improvements on both datasets as all three mpgnns outperform gps after tuning. while the benefit on peptides-func is considerable and highly significant, on peptides-struct the head depth accounts for almost the complete performance gap between mpgnns and gts. our results indicate that the prediction targets of both datasets appear to depend non-linearly on global graph information. graph transformers are not as sensitive to linear prediction heads, since each layer can process global graph information with a deep feed-forward network. this is only one percentage point below the results achieved by crawl, which currently is the only reported result with normalized features. for gatedgcn we observe a slight performance increase but a large reduction in the variance across random seeds. recall that these values are obtained in a raw setting with false negatives present. tuning yields an absolute improvement of around 3%. compared to the filtered mrr in c) the mrr metric increases by about 10 percentage points, indicating that self-loops strongly affect the results. and second, only on the two superpixel datasets graph transformers exhibit clear performance benefits against mpgnns, indicating that either there are ways to solve the other tasks without long-range interactions or graph transformers are not inherently better at exploiting such long-range dependencies.overall, we tried to incorporate the most important hyperparameters which we selected to be dropout, model depth, prediction head depth, learning rate, and the used positional or structural encoding. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/617.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/617.txt new file mode 100644 index 0000000000000000000000000000000000000000..75be6645c9eb3e76b8b4be65df39f7bf99389ce2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/617.txt @@ -0,0 +1 @@ +since chatgpt, gpt-4, and llama-2 family models entered the public sphere, they have impressed users with their ability to be helpful assistants for a surprising number of tasks . one key to their success, along with many other foundation models , is model alignment through rlhf. training a massive language model results in a network with a large amount of knowledge, however, it is not trained to discriminate within that knowledge, which could cause undesired behaviour and possibly lead to societal harm . alignment aims to solve this issue by adjusting the model's behaviour and has become an integral part for creating safe and controllable foundation models .while rlhf improves model alignment it is limited in usage, being both highly complex and demanding a massive amount of memory when loading and training multiple models during ppo . because the use of rlhf is in its infancy, there is a strong need to evaluate its variations in terms of speed and performance.to address this need, we delve into the training process and model architectures of standard rlhf-ppo. through this investigation, we identify substantial opportunities for memory/computation cost reduction through the implementation of model-sharing between reference/reward models and actor/critic models.given these findings, we propose hydra-ppo to reduce the number of trained and static models in memory during ppo. we perform run-time and performance comparisons to show these memory savings can then be utilized to increase the training batch size, reducing the per-sample latency of ppo by up to 65%. while rlhf improves model alignment it is limited in usage, being both highly complex and demanding a massive amount of memory when loading and training multiple models during ppo.stage 1: supervised fine-tuning (sft) an input llm is trained using the standard causal language model training objective l xent on a set of data d, yielding language model π sft .stage 3: ppo π sft and r ϕ (x, y) are used to initialize and subsequently train an actor and critic with ppo. we define a decoder-based model π hydra with two linear heads: 1) a head serves as the causal head, predicting the subsequent token for a sequence, and 2) another head serves as the reward model head, providing the immediate reward associated with the same input.stage 1: hydra-sft using a similar dataset to standard rm training, π hydra is trained by optimizing l π hydra (x, y w , y l ) = l xent (x, y w ) + γl θ (x, y w , y l ), where γ is a weighting multiplier. only one full base model is required in memory during ppo, leading to similar overall memory usage to lora finetuning given the same batch size. specifically, lora-ppo is initialized with the sft model, while both j-hydra-ppo and hydra-ppo are initialized with the hydra-sft model.the performance of sft and hydra-sft are comparable, suggesting that combining the rm and sft objectives within a single model does not consistently lead to improvements or hinder the generation performance across different tasks. this may be explained by the better reward model from hydra-sft which enables overall better ppo performance. overall, the study indicates that ppo improves model alignment and there is potential for further enhancing ppo performance by improving the rm. such a reward model may then be used to guide other models towards aligning to human preference, improving performance in a nontrivial way over supervised fine-tuning (sft) throughout many domains.alignment during supervised fine-tuning (sft) due to the complexity and high cost of ppo, some recent works have sought to replace the training process of ppo while retaining its benefits.reward model size in rlhf, the reward model can be smaller than the language model. in applied usage, hydra-rlhf comparatively saves less memory when standard rlhf uses a smaller reward model, however, this is also an advantage for hydra-rlhf; it uses a larger reward model for less training cost. we introduce hydra-rlhf as a method to save memory during ppo while maintaining performance, which consists of two major parts: combining reference and reward models, and dynamically switching the active lora module during ppo. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/618.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/618.txt new file mode 100644 index 0000000000000000000000000000000000000000..54fef2b8b8ae32b73a5b255527a7db2645f7c988 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/618.txt @@ -0,0 +1 @@ +tactical networks present unique challenges for cc. while operating in demanding circumstances marked by limited resources, unreliable links, frequent disconnections, and varying levels of connectivity, these networks need to support critical real-time functionalities to facilitate mission applications such as command, control, communications, computers, cyber, intelligence, surveillance, and reconnaissance (c5isr).traditional cc algorithms, such as those embedded in transport protocols like tcp, face significant difficulties in maintaining efficient communications within tactical networks, as they were developed for wired environments and misinterpret as congestion symptoms phenomena such as packet losses and temporary unreachability, that are very common in tactical networks, thus severely and unnecessarily reducing transmission speeds. optimizing cc in unreliable networks necessitates innovative approaches that are able to cope with their dynamic and unpredictable nature.in this context, the combination of rl techniques with deep learning (dl) for policy parameterization, often referred to as deep reinforcement learning, has emerged as a promising approach. deep rl demonstrated remarkable robustness across diverse domains, and its application in computer networks offers new possibilities for addressing the challenges of cc , . by leveraging rl, agents can be trained to learn optimal policies through interactions with the network environment, enabling more efficient and reliable communications.despite numerous efforts showing promising results, the performance of these rl agents still falls short of generalization capabilities, especially when unreliable and unpredictable links are encountered. this performance gap can be attributed to various factors either linked to the learning problem itself, such as designing the environment where the agent resides and its "learning curriculum", or to the challenging and partially observable nature of the networking environment. addressing these challenges requires the development of solutions that enable rl agents to learn effective policies for cc decisionmaking while experiencing complex and dynamic scenarios. however, creating dedicated environments for conducting experiments in such scenarios can be challenging, costly, and even infeasible in real network environments. to overcome this last constraint, the importance of an accurate emulation environment cannot be overstated.this paper extends our work within the marlin project by proposing an rl framework that leverages an accurate and parallelizable emulation environment to reenact the conditions of a tactical network, thus allowing agents to experience a wide range of dynamic behaviors. to better evaluate the decision-making effectiveness of our agent, we also introduce a novel metric, the rtt transition impact (rti), based on the maximum round-trip time (rtt) detected during a communication involving a link transition, that allows to measure the agent's responsiveness to link changes and its queue management capabilities.to evaluate our learning framework, we trained an rl agent in an emulated environment replicating a bottleneck link transition between a satcom link and an uhf radio linkrather typical in tactical networks. we then evaluated the agent performance in a file transfer task and compared it against conventional cc algorithms such as tcp cubic , as well as cc algorithms implemented within communication middlewares tailored for tactical environments such as mockets , which the same marlin utilizes as partnering protocol. the results demonstrate that the exposure of the agent to complex networking scenarios enables training policies able to achieve competitive decision-making performance, and validate the rl training approach based on accurate emulation and purposely designed valuation metrics for specialized environments like tactical networks.traditional cc algorithms, such as those embedded in transport protocols like tcp, face significant difficulties in maintaining efficient communications within tactical networks, as they were developed for wired environments and misinterpret as congestion symptoms phenomena such as packet losses and temporary unreachability, that are very common in tactical networks, thus severely and unnecessarily reducing transmission speeds.this paper extends our work within the marlin projectby proposing an rl framework that leverages an accurate and parallelizable emulation environment to reenact the conditions of a tactical network, thus allowing agents to experience a wide range of dynamic behaviors. to better evaluate the decision-making effectiveness of our agent, we also introduce a novel metric, the rtt transition impact (rti), based on the maximum round-trip time (rtt) detected during a communication involving a link transition, that allows to measure the agent's responsiveness to link changes and its queue management capabilities.to evaluate our learning framework, we trained an rl agent in an emulated environment replicating a bottleneck link transition between a satcom link and an uhf radio linkrather typical in tactical networks. we then evaluated the agent performance in a file transfer task and compared it against conventional cc algorithms such as tcp cubic, as well as cc algorithms implemented within communication middlewares tailored for tactical environments such as mockets, which the same marlin utilizes as partnering protocol. the results demonstrate that the exposure of the agent to complex networking scenarios enables training policies able to achieve competitive decision-making performance, and validate the rl training approach based on accurate emulation and purposely designed valuation metrics for specialized environments like tactical networks.tactical networks represent a unique communications environment due to a combination of particularly harsh and dynamic network conditions paired with a high degree of heterogeneity in the network technologies employed and, consequently, the characteristics of the links. this situation demands efficient data transmission strategies that can quickly respond to changes in the status of the network, which makes the role of transport protocols and cc algorithms of crucial importance in tactical networks and especially challenging. while it presents several advantages over tcp and other transport protocols in degraded environments, the existing cc fails to share link capacity with other communication flows going through the same links and cannot adapt quickly to changes in the available bandwidth. by introducing these diverse scenarios into the training environment, we can expose the rl agent to a wider range of experiences, enabling it to learn and adapt to the intricate behaviors exhibited by the heterogeneous entities in tactical networks. however, as the tactical network operates in a dynamic environment, it is crucial for it to respond efficiently even in situations where the satcom link becomes unavailable or experiences sudden performance degradation. by considering individual link measurements, the rti provides insights into the impact of link changes on the performance of the cc algorithm and it can be defined as follows:.to conclude and support our results, we assessed the average number of retransmissions per episode involving marlin, mockets, the random agent, and tcp cubic when a 3% packet loss was present on the uhf link.to the best of our knowledge, this paper presents a two-fold novel contribution: it is the first rl environment for cc with a focus on tactical networks and the first exploiting the flexibility of emulated networks with containerized applications to train and evaluate a rl agent for cc.this paper presented a step toward comprehensive rl environments capable of training policies on emulated networks with dynamic link behavior, paving the way for studying the application of rl to complex networking scenarios, which might be impractical or costly in real network environments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/619.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/619.txt new file mode 100644 index 0000000000000000000000000000000000000000..de0929140d3f5cd3104b6c5fef87858d5ec835f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/619.txt @@ -0,0 +1 @@ +c 3al (fig. 2) is centered around three agents -the learner, the system (sul), and the reviser -and the interfaces between them. the learner plays the same general role as in mat. crucially, any mat learner can be used in c 3al (e.g. l ⋆ , kv, ttt, l # ). the learner does not have to store the information obtained from tests on the system. it focuses on the questions "what is the next query to make?" and "how is the hypothesis built?". the system is the system under learning, together with its environment (e.g. noise). the reviser handles knowledge and conflicts. it answers the question "what do we know about the system?". it is set between the learner and the system with interfaces to both of them. c 3al is designed to improve the practical learning of reactive systems like mealy machines. as such, it makes use of features that are core to such models, like causality and closure under inputs and outputs. however, the main ideas behind c 3al's philosophy and separation of concerns can be adapted to learn other types of automata, such as acceptors like dfas.remark 2. the reviser acts as a mat teacher w.r.t. the learner, answering membership and equivalence queries, with the added ability to prune the learner to place it in a state coherent with the reviser's information. on the system's view, the reviser acts as a tester, providing input sequences (tests) and recording the system output. the outside views of the learner and system in c 3al are illustrated below. the interfaces on the learner side are similar to mat: the learner can perform membership queries (mq) and equivalence queries (eq) on the reviser, with the latter potentially resulting in a counterexample (ce). note that the queries are sent to the reviser and not directly to the sul: a crucial design choice. this allows us to control the information that the learner obtains, and reuse the information in the reviser with no new tests. formally, c 3al provides the following functions as module interfaces:• mq : σ * → (γ * ∪ {prune}) the membership query of the learner that the reviser has to implement.it varies from the mat function as the reviser may return a command to prune the learner's state instead of an output.• eq : mealy → ((σ * × γ * ) ∪ {prune}) the equivalence query of the learner that the reviser has to implement. it may return "prune " instead of "yes".• system : σ * → (σ * × γ * ) is a call to the system for a specific test. the system returns the corresponding behavior (input and output), with the effect of noise applied.in the interface mentioned above, an eq can never return "yes" as in mat. this work is left to the reviser, that will halt the learning process according to the termination criterion chosen (see section 3.2). the prune signal does not require us to modify the code of a mat learner, as it can be implemented by restarting the learner without requiring further access to the learner's internals. the reviser's caching of observations ensures that this operation does not add to the query complexity of the process.remark 3. the main cost of learning comes from unit interactions with the system -each individual symbol that is inputted into or outputted by the system -as these tests are generally costly to perform and that cost cannot be compensated. a mealy machine over alphabets (σ, γ) is a tuple m = (q, q 0 , δ , λ ) where q is a finite set of states, q 0 ∈ q is the initial state, δ : q × σ → q is a transition function and λ : q × σ → γ an output function. given an observation tree t , we define the following functions to access and modify t : note that the function update is the only one that alters the tree and handles conflicts.}| as the number of observations of which (i, o) is a prefix in an observation stream (i k , o k ) k∈n considered at step k. we evaluate this through the following research question: across different realistic model learning targets, types of noise, and noise levels, does c 3al provide a better learning environment in terms of both success rates and number of tests issued when compared to the state-of-the-art mat-based approaches?. a most frequent observation tree is defined as a state-weighted partial tree-shaped non-deterministic mealy machine over alphabets σ and γ with the following structure: t = (q, q ε , δ , ω), where ω : q → n is the state weight function and δ : q × σ → 2 q×γ is the transition and output function. when the next observation (i k , o k ) is processed by update t the algorithm progresses through (i k , o k ) with the main for loop. λ (state, i k ) is undefined, then a new node is created and the transition and output functions defined according to o k.in this case, the subtree rooted in λ (state, i k ) is suppressed, replaced by a new node and the transition and output functions defined according to o k. notice that this case corresponds exactly to (i k , o k ) (i, o) for all (i, o) previously stored in the suppressed subtree, with i kthe witness prefix.by combining the two previous cases, we have that, noting t k-1 the observation tree before step k and t k the observation tree after step k:. we make this proof by induction on the observation stream by considering the implementations of lookup and update for the most recent update strategy. when the next observation (i k , o k ) is processed by update t the algorithm progresses through (i k , o k ) with the main for loop. o k is the output with most weight associated to i k , the only modification is the increment of the weight of the states visited, which already are the maximal weight choice, thus l t is not modified. in this case, notice that l t := l t ∪ {(i k, o k) | j ≤ l ≤ |i|} only if the main branch was not left before. if this part of the tree is reached by calls to lookup) and the state of maximal weight appearing in δ (state, i k ) changes due to the increment, the subtree reached by lookup changes based on the new maximum. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/62.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/62.txt new file mode 100644 index 0000000000000000000000000000000000000000..a12a73d8d588bb37baecf760433b2ccbad352961 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/62.txt @@ -0,0 +1 @@ +the current direction in artificial intelligence, and more specifically deep learning (dl), is clearly to orders of magnitude more compute , reaching high-performance computing (hpc) scale. that means more energy, which comes from sources such as fossil fuels, nuclear power, water dams, wind, etc.. fossil fuel is the main source, contributing to 36% 1 in the total energy sources mix. fossil energy emits a significant amount of co2 into the environment. under all of these observations, it is therefore important to monitor the energy consumption of dl, to master its energy demand and attenuate its contribution to climate change.typical dl research focuses mainly in the quality of the predictions of a trained dl model. as deep learning being a significant part of ai, understanding dl and its energy consumption will build us the path to better balance computing and energy resources needed for its proper operation, and thus being less energy demanding. 1 https://ec.europa.eu/eurostat/cache/infographs/energy/bloc-3b.html?lang=en arxiv:2212.00582v1 14 nov 2022 this paper is a step towards building this path. it addresses the following challenges:-for deep learning running in hpc platforms, how much energy are the current popular and widely used dnns consume?-is it accurate to say that: more complex models will cost more energy?-does the model give higher accuracy, more energy will be consumed?in this work, we instrumented a deep learning benchmark with a software energy measurement tool to output the benchmark-tracker, which tracks the energy consumption of dnn models insight the dl benchmark. the results from running experiments with the developed instrument give us a better understanding of today's energy consumption for the widely used dnn models. from those insights, we can expand to further and more in-depth future studies on energy consumption issues. the available version of benchmark-tracker is on github2 .we organized the remaining of this paper in the following manner: section 2 presents the related works. in section 3 we present some preliminary background information. section 4 briefly presents the instrumentation details to implement benchmark-tracker, and section 5 present some preliminary results of our tool. finally, in section 6 we present our concluding remarks and our planned future works.2 related work as deep learning being a significant part of ai, understanding dl and its energy consumption will build us the path to better balance computing and energy resources needed for its proper operation, and thus being less energy demanding.in this work, we instrumented a deep learning benchmark with a software energy measurement tool to output the benchmark-tracker, which tracks the energy consumption of dnn models insight the dl benchmark. besides focusing on reducing energy consumption in the training process, one of the possibilities is that we can have better energy management once its energy consumption can be modeled and predicted.our paper situates in two fronts: (i) our paper bridges ai benchmarks and energy measurement tools, giving an out-of-the-box tool to help hpc dl developers to better balance the hpc infrastructure in terms of speed and energy consumption, and (ii) we go beyond only evaluating the prediction quality of ai models and algorithms, but we also evaluate the energy/complexity/performance trade-offs of popular ai models and algorithms, taking into consideration several hpc hardware. the benchmark relies on the tensorflow energy consumption and carbon emissions in hpc platforms when we measure the energy consumption for training an ai model running in hpc platforms, we also have to consider the additional power used to run the platform that is not directly related to computing, such as cooling.the quantity of carbon emission is just as substantial as the energy consumption because the higher the carbon intensity, the more polluting the energy consumption.through the following example, we see how one can track the energy consumption by covering the process with the experiment-impact-tracker: the experiment-impact-tracker launches a separate python process that gathers the energy consumption information in the background. from there, we instrumented the experiment-impact-tracker in these code regions, intending to be able to measure not only the hardware benchmark from the aibenchmark but also the energy consumption of each model running in this benchmark. the dl training process involves inference, because each time an image is fed into the dnn during training, the dnn tries to classify it.also, to give the reader an impression of the connection between the structure of the dnns and the energy consumption, we briefly present the main ideas of the evaluated models for the image classification task:.figure1presents the energy consumption in the inference process per image for classification, which shows us the amount of energy it will cost each time we input one more image into the trained relative model. from figure2, 3, the complexity of the models explains that inceptionresnet-v2, with twice as many parameters for the model as inception-v3, and inception-v4, has the highest energy consumption level.for a certain ai task (in our case example, for the image classification task), more complex dnn models can consume more energy, emit more co2, and take longer time than simpler models to train. if we reduce or increase the dataset size during training, we can consider how much energy the training will consume and how much accuracy we will get for each model to compare. we can control the training process for a specific objective by stopping the training when the total energy (measured by an energy measurement library) passes a defined budget. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/620.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/620.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e5d36472689f61a0fb1a2bcae983a2536e83807 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/620.txt @@ -0,0 +1 @@ +electronic health records (ehr) have become a cornerstone of healthcare research, encompassing a wealth of data crucial for clinical decision-making, epidemiological studies, and personalized medicine. however, the ubiquity of missing data in ehr poses challenges, potentially skewing analyses and causing sub-optimal patient outcomes. addressing such issues necessitates advanced imputation methodologies that not only impute missing values but also preserve inherent data structures and relationships .this paper proposes a self-attention method (sesa), an imputation approach that synergistically enhances structural equation modeling (sem) with self-attention mechanisms. inspired by the success of transformer models in capturing long-term dependencies, the proposed sesa leverages selfattention to dynamically capture latent structural and relational dynamics within ehr data, enabling a highly nuanced and context-aware imputation. such enhancement empowers sesa to adaptively focus on the most relevant factors in each imputation task, transcending the limitations of traditional static imputation methods.central to sesa design is the recognition that data missingness in ehr is rarely random but often structurally patterned and influenced by underlying health conditions, healthcare processes, and systematic data collection strategies. by employing sem, sesa meticulously maps latent structures, facilitating a hypothesis-driven approach to understanding interdependencies among various health variables. the integration of selfattention further enhances structural modeling by dynamically weighing the importance of different variables, allowing sesa to adapt its imputation strategies based on specific context and data characteristics.comprehensive empirical analyses of sesa across diverse datasets and missing data scenarios demonstrate its superior performance compared to established imputation methods. sesa consistently achieves reliable results, producing accurate and coherent imputations that align with underlying data distributions. moreover, the application of sesa to various ehr datasets, spanning general health parameters to specific clinical indicators, highlights its versatility and robustness in handling complexities and heterogeneity of healthcare data.by bridging the gap between statistical modeling and deep representation learning, sesa marks a reasonable leap forward in the development of imputation techniques for healthcare data. sesa is a powerful tool for researchers and practitioners seeking to mitigate the impacts of missing data in ehr by capturing complex data dependencies, adapting to diverse missingness patterns, and integrating domain knowledge positions. with its strong theoretical foundation and empirical effectiveness, sesa has the potential to become a promising enabler for reliable, data-driven decision-making in healthcare and other fields. sesa is a powerful tool for researchers and practitioners seeking to mitigate the impacts of missing data in ehr by capturing complex data dependencies, adapting to diverse missingness patterns, and integrating domain knowledge positions. from a statistical learning perspective, sesa can be viewed as a model that learns a mapping from observed to complete data, minimizing expected losses over joint distributions of observed and missing values.by learning attention weights that capture the dependencies between variables, sesa effectively maximizes the mutual information between the observed and missing data. by combining the structure before sem with the flexibility and efficiency of the self-attention mechanism, sesa provides a principled approach to learning from incomplete ehr data, minimizing the expected loss and maximizing the mutual information between observed and missing values. let x = (x obs , x mis ) denote a complete data matrix, where x obs and x mis represent the observed and missing data, respectively.3) self-attention mechanism: the self-attention mechanism is a key component of sesa, enabling a model to dynamically focus on relevant features and capture long-range dependencies in data.by focusing on the most relevant portions of data, the selfattention mechanism captures complex patterns and dependencies to effectively maximize the mutual information between observed and missing data. in considering missing data, observed and missing data are treated as two random variables. conversely, the self-attention mechanism learns how to utilize observed data to predict missing data, thereby minimizing the uncertainty of missing data for the given observed data. given a data point x with missing values, sesa initializes the missing values using fiml estimates based on the learned sem parameters.algorithm 1 shows the algorithmic implementation of sesa, integrating sem, fiml, and the self-attention mechanism into a unified framework for missing data imputation. fiml estimation proves robustness in small data environments, providing a solid foundation for self-attention, which leverages the learning strengths of neural networks in medium-sized data environments to uncover potential deep relationships in data, thereby optimizing imputation results. by synergistically integrating sem and self-attention mechanisms, sesa offers a principled and adaptive approach to imputation that captures the complex relationships and dependencies among variables in ehr data. the complexity of variable relationships in ehr data poses challenges for conventional correlation analysis and sem model testing in uncovering deeper connections within the data.grounded in statistical learning and information theories, sesa leverages sem to capture complex relationships among variables in ehr data, while the self-attention mechanism enables adaptive learning of intricate patterns. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/621.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/621.txt new file mode 100644 index 0000000000000000000000000000000000000000..869d9b58ae0a82e9888ea22059aac0151f1bac7a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/621.txt @@ -0,0 +1 @@ +time series anomaly detection (tsad) is a highly prevalent research area, with success stories in multiple application domains. however, solutions developed in one domain do not directly translate to success in others, and as a result tsad has been explored with broad perspectives. one of the most successful applications of large-scale tsad has been in monitoring e-commerce, where it provides essential health check support for production services. indeed, in the case of webscale service monitoring, tsad is often the only recourse to assure service reliability since humans do not have the capacity or ability to monitor millions of fragmented time series.as well as the sheer volume of time series to be monitored, the diversity of time series could also preclude the effectiveness of an off-the-shelf tsad algorithm. this diversity can manifest in many forms, including data type, sam- 1 work conducted during an internship at amazon prime video, uk. pling rate, seasonality and count levels. each of these can present at different degrees, and together create an implicit spectrum of data with varying performance characteristics. large datasets may consist of mixtures of multiple long-tail effects, each of which carries unique accuracy profiles. however, performance evaluation rarely stratifies datasets across these important traits. a specific failure case on a low-count time series is illustrated in figure 1. here, a forecasting-based ad model is unable to distinguish an obvious anomaly at t ∈ since its signature falls within the expected prediction bands. conversely, the anomaly at t ∈ is detectable. as demonstrated by this simple example, global average performance may lead practitioners to have optimistic performance expectations in these contexts, unless specific attention is given to these edge cases. the literature on tsad is varied and prolific, but the notion of what constitutes an anomaly is often only loosely defined. as a result, even thorough surveys and benchmarks may not accurately depict how well different methods perform in specific contexts. our work in this paper focuses on improving model performance along the long-tail of lowcount seasonal time series. while low-count time series may be perceived as describing services that are less important or significant, these often constitute a heavy tail in the distribu-tion of time series, resulting in a substantial portion of data having lower monitorability. moreover, collections of time series may exhibit intermittent behaviour at granular time intervals in spite of tracking business-critical services, such as sales of expensive items, user conversion, etc.to the best of our knowledge, no previous work has specifically paid attention to how tsad methods perform in low-count time series. in this paper, we are interested in assessing the resilience of ad algorithms across the full spectrum of high-to low-count metrics, where the overlap between expected and anomalous data distributions increases the complexity of accurate and timely detection. in particular, (i) we introduce a novel data simulation procedure for seasonal time series having different count levels and anomalous segments. (ii) we leverage this to construct a benchmark exploring disparities in performance, and (iii) recommend a post-hoc strategy based on anomaly score smoothing which improves performance without compromising time-todetection. finally, (iv) we outline general guidance on how our contributions can be used in new contexts.as well as the sheer volume of time series to be monitored, the diversity of time series could also preclude the effectiveness of an off-the-shelf tsad algorithm. while low-count time series may be perceived as describing services that are less important or significant, these often constitute a heavy tail in the distribu-tion of time series, resulting in a substantial portion of data having lower monitorability. moreover, collections of time series may exhibit intermittent behaviour at granular time intervals in spite of tracking business-critical services, such as sales of expensive items, user conversion, etc. while these provide helpful insights for solving general time series monitoring problems, recommendations from these studies do not specifically address challenges related to low-count time series.propose a multi-granular approach for detecting demand changes in intermittent data, which prove effective for their specific problems.one of the primary reasons for which ad in low-count time series has been underexplored is the lack of an established benchmark dataset covering the required diversity of countbased time series.forecasting methods: assuming the use of a probabilistic forecasting technique, methods in this category generate a predictive distribution p t (•) for each future time step, which is then compared to the incoming actual data x t via a scoring function that emits an anomaly score.forecasting methods achieve the best performance across all anomaly severity levels in the highest-count time series. however, their performance degrades very quickly for low-count time series, approaching random performance at count levels below 2 6 . our hypothesis for why this happens follows a similar reasoning to matrix-profile -anomalies would look similar to normal areas for low-count time series, and a reconstruction model trained on normal areas may just as effectively reconstruct anomalous areas. in light of these results, we recommend anomaly score smoothing with ema as a means to improve performance on low-count time series. given that we no longer have full control on the count levels, we grouped the time series into four bins of count levels to allow us to stratify and better visualise the results. for the low-count time series, the smoothing stabilises the noisy anomaly scores that would otherwise hinder the use of thresholds for distinguishing between normal and anomalous data. on the other hand, for high-count data smoothing introduces a lagged effect to the anomaly scores beyond the actual anomaly area, that could be misinterpreted as false positives when using performance metrics with no temporal tolerance. given that the time series in the sub-sampled m5 dataset are much shorter than our synthetic data, repeated experiments with different anomaly injections lead to higher variance in the results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/622.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/622.txt new file mode 100644 index 0000000000000000000000000000000000000000..e86c6192d630b4eb632c821530444338b051fff9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/622.txt @@ -0,0 +1 @@ +reinforcement learning (rl) has produced a series of breakthroughs over the last decade from exceeding human proficiency at playing simple games such as in the atari suite (mnih et al. 2015) to go (silver et al. 2016) and starcraft (vinyals et al. 2019), and to protein structure prediction systems (jumper et al. 2021), etc. a fundamental premise that the 'reward is enough' (silver et al. 2021) underlies all of such rl methodology. and yet, in most problems, a natural reward function is not available. nor it may be possible to engineer one from intuition. thus, a lot of effort is spent on reward shaping (ng, harada, and russell 1999) to make rl algorithms work, often without success.this problem is particularly acute when humans are in the loop, either as demonstrators or as evaluators. often, demonstration data comes from human experts and it is impossible to infer precisely what reward function human experts really have in mind while taking actions. to be fair, several inverse rl (irl) algorithms such as maxentropy-irl (ziebart et al. 2008) use a methodology wherein a reward function is first inferred from the demonstration data, and then used in conjunction with rl algorithms to design near-optimal policies. this has two lacunae. first, the performance of the rl algorithms can be very sensitive to errors in the reward function estimate. and second, the expert may not be using a policy that is optimal with respect to any reward objective at all! there is thus a need to develop imitation learning algorithms that do not depend on reward inference as a first step (arora and doshi 2021).behavioral cloning (bc) is a simple and natural idea for imitation learning (pomerleau 1988). it is a supervised learning scheme that aims to learn the expert policy as a map from states to actions. in fact, it largely ignores the inherent sequential nature of reinforcement learning problems. unfortunately, it suffers from severe covariate shift issues as it fails to generalize to less-visited parts of the state space and ignores the sequential decision-making aspect. this results in propagation of errors in the agent's performance (ross and bagnell 2010) resulting in a limited practical ability to generalize effectively.to address the issue of compounding errors that can afflict methods like bc, irl algorithms (abbeel and ng 2004;ng and russell 2000;syed and schapire 2007;ziebart et al. 2008;levine, popovic, and koltun 2011) take a different approach by learning a reward function that takes into account that transitions come from trajectories. but this requires use of reinforcement learning algorithms, making them extremely expensive to run, and at the same time sensitive to reward function estimation errors.in contrast, adversarial imitation learning (ail) is a technique centered around distribution matching through adversarial learning. it gained significant traction in the recent past as an approach to imitation learning (ho and ermon 2016;fu, luo, and levine 2018;ke et al. 2021). within this framework, the objective transforms into finding a behavioral policy that minimizes the divergence between the target distribution and the distribution of state-action pairs generated by the behavioral policy during its interaction with the environment. the primary drawback of current distribution matching methods via ail is that estimating distribu-tion density ratios, a crucial step in each iteration, usually demands samples from the behavioral policy distribution. consequently, new interactions with the environment are necessary for every behavioral policy update iteration. this limitation makes these algorithms unsuitable for problems where only offline data is available. this downside is even more apparent in continuous state and action space problems wherein each visited state is visited at most once, with most states not being visited at all in the demonstration data.a related strand of literature on imitation learning (ross, gordon, and bagnell 2011) assumes access to a generative model so trajectory data can be generated on the fly. we make no such assumption in our problem formulation.in this paper, we aim to develop imitation learning algorithms that do not need reward-feedback, do not rely on distribution matching between occupation measures, are not doing behavioral-cloning but use the 'meta' knowledge that the underlying dynamics are markovian, do not need access to a generative model, work for continuous state space problems, and allow for batch processing of the offline dataset. this version of the imitation learning problem is relevant in many real-world decision-making applications, such as medical, healthcare, robotic manipulations, and autonomous vehicles (le mero et al. 2022), where experimentation is either costly or unsafe.we introduce a simple and natural framework based on a simple premise. namely, that the demonstration trajectory data satisfies the balance equation between the demonstration policy, the markov decision process (mdp) transition density and that of the induced markov chain. this allows us to incorporate the fact that the demonstration data is coming from a system with markovian dynamics. the transition densities for the mdp and the markov chain are then estimated using conditional kernel density estimators which are universal density estimators with proven asymptotic consistency properties. we start with the discrete state and action space setting, but then show that the framework extends to the continuous state space setting as well. we prove consistency properties of the estimators, and validate the algorithm in a series of continuous state space problems. conceptual extension to continuous action space is straightforward but requires more work for numerical robustness.related work. we now discuss prior work broadly related to our paper. as already mentioned behavioral cloning faces a fundamental limitation due to discarding distributional insights from the demonstrations (ross and bagnell 2010;piot, geist, and pietquin 2014). to address this, several remedies have been suggested (ross, gordon, and bagnell 2011;piot, geist, and pietquin 2016) which involve either further online interactions with the environment, or the demonstrator, or using insights into model dynamics or the sparsity of rewards, all of which are in general impractical. the recent work (xu et al. 2022) aims to overcome these by using additional data from non-expert policies, which circumvents the need for additional online interactions but the additional offline data may not be available. the edm approach (jarrett, bica, and van der schaar 2020b) captures the expert's state occupancy measure by training an explicit energy-based model but its limitations has been scrutinized in (swamy et al. 2021).there also have been efforts to further develop irl approaches to overcome the limitations of earlier algorithms. (klein, geist, and pietquin 2011;klein et al. 2012) introduce lstd-µ, a temporal difference technique for calculating feature expectations. however, these approaches share the weaknesses of least squares estimators, being highly sensitive to basis features and training data distribution. (lee, srinivasan, and doshi-velez 2019a) propose dsfn, which estimates feature expectations in an off-policy setting. they also propose a transition-regularized imitation network that produces an initial policy close to expert behavior and an efficient feature representation. despite these advancements, the assumption of complete knowledge about reward feature functions in these methods can often be unrealistic, especially for complex problems (arora and doshi 2021). (piot, geist, and pietquin 2014) introduced rcal, a non-parametric algorithm that employs a boosting method to minimize the criterion directly without feature selection steps and can help tackle some of the above issues. (chan and van der schaar 2021a) propose avril, adopting a variational approach to jointly learn an approximate posterior distribution over reward and policy. however, due to inherent covariate shift problems, these methods encounter significant reward extrapolation errors, leading to misguided outcomes in novel environments and reduced learning efficiency. to address this, the clare (yue et al. 2023) model-based offline inverse reinforcement learning (irl) approach introduces conservatism to its estimated reward. it employs an irl algorithm within an estimated dynamics model to learn the reward. however, limitations arise when dealing with a limited number of expert demonstrations or predominantly low-quality transition samples from a behavior policy. in such cases, forcing alignment of the empirical state-action visitation measure across all data can lead to a recovered reward or policy that mimics the suboptimal behavior policy, undermining the accuracy of the expert model (zeng et al. 2023).adversarial imitation learning (ail) approaches (ho and ermon 2016) were a breakthrough when they were introduced a few years ago (blondé and kalousis 2019;kostrikov et al. 2019). however, these approaches require online interactions with the environment, and thus are not applicable when we must work only with offline data. employing a distribution matching strategy, (kostrikov, nachum, and tompson 2020a) introduces valuedice, an offline objective for assessing the distribution ratio between the imitator and expert policies. although theoretically allowing for comprehensive offline learning, the approach undertakes a complex alternating maximization-minimization optimization procedure. additionally, it suffers from difficulty estimating the expectation of an exponential that introduces bias when approximating gradients using minibatches (jarrett, bica, and van der schaar 2020b).thus, the algorithm we present in this paper is quite distinct in its approach from most of the prior literature. additionally, it demonstrates promising preliminary empirical results. to be fair, several inverse rl (irl) algorithms such as maxentropy-irl(ziebart et al. first, the performance of the rl algorithms can be very sensitive to errors in the reward function estimate.to address the issue of compounding errors that can afflict methods like bc, irl algorithms(abbeel and ng 2004;ng and russell 2000;syed and schapire 2007;ziebart et al. 2021). within this framework, the objective transforms into finding a behavioral policy that minimizes the divergence between the target distribution and the distribution of state-action pairs generated by the behavioral policy during its interaction with the environment. this limitation makes these algorithms unsuitable for problems where only offline data is available.in this paper, we aim to develop imitation learning algorithms that do not need reward-feedback, do not rely on distribution matching between occupation measures, are not doing behavioral-cloning but use the 'meta' knowledge that the underlying dynamics are markovian, do not need access to a generative model, work for continuous state space problems, and allow for batch processing of the offline dataset. namely, that the demonstration trajectory data satisfies the balance equation between the demonstration policy, the markov decision process (mdp) transition density and that of the induced markov chain. the recent work(xu et al. it employs an irl algorithm within an estimated dynamics model to learn the reward.in this section, we focus on the conditional kernel density estimation part of our approach and show that, as the training dataset size n approaches infinity, the ckde estimates algorithm 1: conditional kernel imitation learning (ckil) input: expert dataset of trajectories d = {(s i , a i )} n i=1 output: θ * 1: initialize policy parameter θ 2: transform dataset d into (s, a, s ′ , a ′ ) tuples, then store them in buffer b. our theoretical guarantee holds under the following assumptions (chacón and duong 2018): (a1) suppose the buffer b in algorithm 1 consists of n iid tuples (s, a, s ′ , a ′ ) generated according to a probability distribution p (s, a, s ′ , a ′ ) = µ(s, a)p π d (s ′ , a ′ |s, a), where p π d is the transition probability density of the induced markov chain on the state-action space under the demonstration policy π d (see (6)) and µ is a reference measure on (s, a). this comprehensive assessment covers a spectrum of methodologies, including the inherently offline behavioral cloning (bc); valuedice (vdice), a sample-efficient ail approach designed for offline scenarios by removing replay regularization; rewardregularized classification (rcal), a large margin classification approach, which introduces a sparsity-based penalty on inferred rewards to exploit dynamics information; energybased distribution matching (edm), an offline imitation learning algorithm that captures the expert's state occupancy patterns through explicit training of an energy-based model; avril, a recent model-free offline irl technique employing a variational approach to simultaneously learn an approximate posterior distribution over rewards and policies; and deep successor feature network (dsfn), an offline adaptation of the max-margin irl algorithm that transcends linear approaches by introducing a deep network architecture and employing least-squares temporal-difference learning to produce both reward and policy outputs.in this paper, we introduce conditional kernel imitation learning (ckil), a simple but novel approach to imitation learning for continuous state space problems. furthermore, it does not need access to a generative model or further online interaction data, does not first need reward inference, does not do distribution matching, and allows for batch processing of offline data for scalability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/623.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/623.txt new file mode 100644 index 0000000000000000000000000000000000000000..65bd236c0b732c1621ecb4147d671bd0013c5d3e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/623.txt @@ -0,0 +1 @@ +patient similarity can be estimated by calculating the distance between the vectors representing patient characteristics (e.g., diseases and medical events) .in clinical practice, healthcare professionals often use specific disease characteristics and adverse events to group similar patients to facilitate predictive clinical and managerial decision-making . however, due to variations in patient conditions and treatment needs, more robust methods need to be developed and introduced to support decisions, as illustrated by on the need for personalized patient care in icu during the covid-19 pandemic.machine learning has great potential to guide clinical practice . for instance, machine learning-based prediction models can be used for a wide range of clinical applications, including predicting the risk of in-hospital mortality and physiologic decline, estimating hospital length of stay, and classifying phenotype . when used on larger medical datasets, traditional machine learningbased prediction models are limited by their simple architecture design, making personalized prediction extremely challenging. deep personalized prediction models based on similar patients have emerged to deal with the challenge. these models are trained using information from similar patients and thus have the potential to identify risk factors associated with individual patients.increased application of graph neural networks (gnns) has been observed across a number of research fields in recent years, such as social recommender systems , bioinformatics , and knowledge graphs . gnn is a type of neural network for dealing with graph-structured data . when used on high-dimensional or complex data, very deep gnns can be constructed by stacking multiple graph convolutional layers. in gnns, each graph's convolutional layer aggregates information from neighboring nodes and edges using a message-passing strategy. at each gnn message-passing iteration, each node aggregates information from its neighborhood, and as these iterations progress, each node embedding reaches out further in the graph to extract global information. by doing so, both local and global information from the graph is taken into consideration for generating useful node and graph-level representations for various downstream predictions.most research on gnns has been carried out on pairwise relationships of objects of interest . previous studies by have demonstrated the effectiveness of gnn-based models on patient similarity computation. these studies mainly focus on using gnns to learn the representation of pairwise interaction between two patients for downstream applications. however, in many real medical applications, the relationships between patient characteristics (e.g., diagnosis codes) are beyond what a pairwise formulation can represent.recently, hypergraphs have been utilized for the modeling of a wide range of systems where high-order relationships exist among their interacting parts. current studies have demonstrated the effectiveness of hypergraphs in encoding high-order data correlation . motivated by these successful applications, in this paper, we aim to capture the non-pairwise relationships among patient characteristics by modeling structured electronic health record (ehr) data 7 with the utilization of a hypergraph convolutional network (figure 1). the intuition behind introducing hypergraph convolutional network can be explained as seeing the need to specify prior medical knowledge (i.e., previous icd-9 diagnosis codes) that is in the form of higher-order relationships in a hypergraph. based on the foundation established by hypergraph representation learning, we conduct patient similarity computation and then aggregate the information from similar patients as we analyze patient graphs. to demonstrate the efficacy of the proposed method, we conduct the experiments on predicting in-hospital mortality risks for icu patients using the publicly available eicu collaborative research database .our contributions are as follows:-we introduce hypergraph convolutional network for fine-grained icu patient similarity computation. to the best of our knowledge, this is the first study that uses a tailored hypergraph convolutional network for similarity computation among patients in icu settings. -we evaluate our method against deep prediction methods on the publicly available eicu collaborative research database, and the results surpass the state-of-the-art models in auroc, auprc, precision, f1 score, and min(se, p+). moreover, we demonstrate the advantages of our method in terms of effectiveness and robustness of decisions with several case studies. for instance, machine learning-based prediction models can be used for a wide range of clinical applications, including predicting the risk of in-hospital mortality and physiologic decline, estimating hospital length of stay, and classifying phenotype. motivated by these successful applications, in this paper, we aim to capture the non-pairwise relationships among patient characteristics by modeling structured electronic health record (ehr) data 7 with the utilization of a hypergraph convolutional network (figure1). based on the foundation established by hypergraph representation learning, we conduct patient similarity computation and then aggregate the information from similar patients as we analyze patient graphs. to demonstrate the efficacy of the proposed method, we conduct the experiments on predicting in-hospital mortality risks for icu patients using the publicly available eicu collaborative research database. to the best of our knowledge, this is the first study that uses a tailored hypergraph convolutional network for similarity computation among patients in icu settings. -we evaluate our method against deep prediction methods on the publicly available eicu collaborative research database, and the results surpass the state-of-the-art models in auroc, auprc, precision, f1 score, and min(se, p+). similarly,proposed an ensemble machine learning framework (empicu random forest) to predict the mortality risk of patients based on data from the first 24 hours and 48 hours after icu admission. based on the foundation established by hypergraph representation learning, we conduct patient similarity computation and then aggregate the information from similar patients as we analyze patient graphs. we focused specifically on predicting the in-hospital mortality risk of patients based on the data from the first 24 hours and 48 hours after eicu admission.tables2and3(below) provide the results of in-hospital mortality risk prediction based on the data from the first 24 hours and 48 hours after eicu admission. the superior performance of our than the our α verifies the efficacy of hypergraph convolutional networks, which can capture the higher-order relationships among icd-9 diagnosis codes and thus improve the prediction performance. moreover, the prediction performance of all methods improved significantly as the prediction window from the first 24 hours to the first 48 hours after eicu admission. for instance, from the data in table4, group i in chf patients reported significantly more auroc, auprc, precision, f1 score, and min(se, p+) scores than group i in diabetic patients. to the best of our knowledge, this is the first study that uses a tailored hypergraph convolutional network for similarity computation among patients in icu settings. experiments manifest that our proposed method has higher auroc, auprc, precision, f1 score, and min(se, p+) against state-of-the-art deep prediction models on the publicly available eicu collaborative research database. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/624.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/624.txt new file mode 100644 index 0000000000000000000000000000000000000000..971bfe9ad1eb4460b6f259726be4e0c1092c3be9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/624.txt @@ -0,0 +1 @@ +unplanned downtime of advanced technical systems such as aircraft, lithography systems, or rolling stock, is extremely costly for companies that rely on these systems in their primary processes. as such, these companies typically have agreements with maintenance service providers -external or internal -to ensure sufficiently high availability of their systems. recent advancements in information technology along with continuous reductions in costs of sensors have led to ample opportunities for service providers to improve their maintenance operations . indeed, modern systems are now increasingly equipped with sensors that relay degradation data of critical components in real-time to maintenance decision-makers. this data is useful for inference of system degradation behavior; however, the amount of data that each such system generates to predict failures of a particular component is scarce, especially for newly introduced systems.maintenance service providers typically maintain several systems of the same type (e.g. similar systems for the same customer at different locations, or similar systems for different customers). at the beginning of the life-cycle of a newly introduced system, the maintenance service provider thus faces a setting where (i) multiple systems of the same type generate a steady stream of degradation data, but at the same time, (ii) each such system alone has not yet generated sufficient amounts of data. a prime example of this can be found in the semiconductor industry, where the original equipment manufacturer itself is often also responsible for maintaining its lithography systems after they are sold. upon the introduction of a new generation of lithography system in the field, many critical components in this system are also used for the first time, and hence no historical degradation data is available .for the settings described above, it is evident that pooling degradation data from multiple systems can lead to cost reductions in maintenance operations. however, it remains unclear how we can precisely quantify these cost reductions, especially when we are interested in optimal decisions and the state space of the corresponding markov decision process (mdp) thus becomes large. in this paper we address this question. more specifically, we consider a maintenance service provider that is responsible for maintaining multiple systems of the same type at different locations or customers. these systems are equipped with sensory technology that relay degradation data in real-time to the service provider. as these systems are used for the first time, there is only limited information available per system at the start of their life-cycle.we consider a single component that is present in the configuration of all systems. these components deteriorate according to a poisson process with the same but unknown rate. as the components are critical, the systems fail whenever the component's degradation reaches a certain failure threshold. such failures can be prevented by performing preventive maintenance, which is cheaper than replacement upon failure, which generally leads to costly unplanned downtime. the maintenance service provider must periodically decide -based on the state of all systems and accumulated data -for each system to perform preventive maintenance or not, thereby trading off costly premature interventions with costly tardy replacements. systems are homogeneous with respect to the unknown deterioration rate, but are otherwise heterogeneous (i.e. costs and failure thresholds). we endow the unknown rate with a prior distribution and propose a bayesian procedure that is able to pool all data and learn this rate jointly on-the-fly as data becomes available. we model this decision problem as a bayesian mdp for which the optimal policy -in theorycan be computed through standard methods. however, because both the action and state space grow exponentially in the number of systems, this mdp will quickly suffer from the curse of dimensionality, making it impossible to assess the value of optimal data pooling. as a remedy, we establish a novel decomposition result that reduces this high-dimensional mdp to multiple two-dimensional mdps that permit structural analyses and computations.when components have constant failure rates, maintenance service providers typically replace these components with new spares only correctively upon failure, i.e. they apply repair-by-replacement. the underlying spare parts inventory system responsible for supplying these spares then largely determines the availability of the technical systems.as an extension, we will show that our decomposition result also applies to such a spare parts inventory system consisting of multiple local warehouses that keep spares for the same critical component whose failure rate is unknown. sequential bayesian learning based on sensory data stemming from systems has been used extensively in the maintenance literature to study optimal maintenance decision-making when relevant parameters are a-priori unknown , but only exclusively for single-component systems in isolation (we refer to for a comprehensive overview of the area). this makes sense when the unknown parameter is unique to the specific system in use. however, as we argued above, in practice parameters may be the same for multiple systems of the same type. when a maintenance service provider maintains several systems of the same type, as we consider in this paper, it is natural to pool data stemming from all these systems to jointly learn the common parameter.the benefit of pooling has been extensively studied in many application domains, yet almost exclusively related to pooling of physical resources. notable examples include inventory pooling in inventory networks and pooling of server capacity in queuing networks . recently, researchers have started to investigate the benefits of pooling data, mainly driven by the benefits of pooling physical resources . within the maintenance literature, only two works exist on data pooling for learning parameters . investigates the benefits of combining data from a set of heterogeneous machines in the context of time-based preventive maintenance.the authors propose a method in which limited data stemming from multiple systems can be aggregated such that it can be utilized for selecting a periodic interval at which preventive maintenance is performed for each individual system. exploits the use of data pooling to determine whether a set of systems is stemming from a so-called weak or strong population, where the former has lifetimes that are stochastically smaller than the latter. unlike , who proposes a static estimation procedure based on historical pooled data, builds a partially observable mdp that sequentially learns as more data becomes available. they numerically show -only for small instances due to the curse of dimensionality -that data pooling can lead to savings of up to 14% compared to not pooling data. we also learn from pooled data in a dynamic, sequential way, but circumvent the resulting curse of dimensionality by leveraging our new decomposition result. both and pool data to learn a time-to-failure model in a time-based maintenance setting, while we focus on learning a degradation model in a condition-based maintenance setting.the main contributions of this paper are as follows:1. we formulate the problem of optimally maintaining n systems with a common, unknown deterioration rate over a finite lifespan t as a finite horizon bayesian mdp in which data is pooled for shared learning. this formulation suffers from the well-known curse of dimensionality: the cardinality of both the action and state space grow exponentially in n. as a remedy, we provide a new decomposition result that establishes the equivalence between the original mdp and n two-state mdps with a binary action space, each focused on an individual system.2. using the decomposition, we are able to show that the structure of the optimal policy of each individual system has a control limit structure, where the control limit depends on the pooled data obtained from all systems.perhaps counterintuitively, we show that this optimal control limit is not monotone in general. although the control limit typically decreases first, it always increases and converges to the failure level when the pooled data grows very large, implying that preventive maintenance is never optimal in that asymptotic regime.3. we investigate numerically the savings that can be attained by pooling data to learn the a-priori unknown deterioration rate, while optimally maintaining the systems. we find that the savings can be significant, even for small values of n, and that the exact magnitude of these savings largely depends on the magnitude of the uncertainty in the parameter (measured by the variation of the initial prior distribution). when there is high uncertainty, huge savings of close to 57% can be realized on average, while these savings become almost negligible when the uncertainty is low.4. we finally demonstrate the general applicability of our decomposition result by applying it to a spare parts inventory system consisting of multiple local warehouses where a common, but unknown failure rate needs to be learned. for this setting, we establish the optimality of monotone order-up-to policies, where the optimal order-up-to levels are non-decreasing in the data obtained from all local warehouses.the remainder is organized as follows. we provide a model description in section 2. in section 3, we formulate the problem as a bayesian mdp and we show that it can be decomposed into n alternative mdps. we present some structural properties of both the expected cost and the optimal policy of the alternative mdp in section 4. in section 5, we report on an extensive numerical study that highlights the benefit of pooling data. in section 6, we apply our decomposition result to a set of spare parts inventory systems. finally, section 7 provides concluding remarks. at the beginning of the life-cycle of a newly introduced system, the maintenance service provider thus faces a setting where (i) multiple systems of the same type generate a steady stream of degradation data, but at the same time, (ii) each such system alone has not yet generated sufficient amounts of data. sequential bayesian learning based on sensory data stemming from systems has been used extensively in the maintenance literature to study optimal maintenance decision-making when relevant parameters are a-priori unknown [e. , t , starting from state (x, k) ∈ s, and let the terminal cost, v n t (x, k), be equal to the function c(x) ≜ i∈n i i (x)c i u for all k. , t , starting from state (x, k) ∈ n 2 0 , and let the terminal cost, ṽn,i t (x, k), be equal to the function.where (a) is because the direct costs can be decomposed (see (3) and (7)), (b) is due to the induction hypothesis, (c) is because of the linearity of an expectation and extracting z i from the summation, (d) is because the sum of n -1 independent negative binomially distributed random variables with r = α 0 + k and p = β 0 +n•t β 0 +n•t+1 is again negative binomially distributed with the same p but with r = (n -1) • (α 0 + k) [see, e. the next result establishes the monotonicity of the value function ṽn,i t (x, k) in x and k.by proposition 1, we also conclude that the value function v n t (x, k) of the original mdp is non-decreasing in the standard component-wise order in x, and non-decreasing in k.proposition 2 shows that the control limit at each time of each component, δ (k,t) i , depends in real-time on the shared learning process across all components through pooling data via the state variable k.since the random variable z is increasing in k (see lemma 1), one can show that p z ≥ ξ i → 1 and p z ≥ ξ ix → 1 for each x < ξ i as k → ∞.to this end, we define the performance measure ∆ = 100 1 -ṽn 0 (0,0) ṽ1 0 (0,0) , which is the percentage savings per system over the lifespan when the learning of n systems is pooled compared to not pooling any data for those systems and learning the unknown rate independently from the other systems. when there is high uncertainty in the unknown parameter, pooling data allows the maintenance service provider to faster learn the unknown parameter compared to learning it from data generated by a single system. we also illustrated how our decomposition result can be applied to other settings, notably a set of canonical spare parts inventory systems, where an unknown but common demand rate needs to be learned across this set of inventory systems.recalling from lemma 1 that the random variable z is stochastically increasing in k, we find that ∆g n,i t -1 (a, k + ) ≤ ∆g n,i t -1 (a, k -) with k + ≥ k -. following a similar reasoning and noting that δ (k,t -1) i is the smallest a for which ∆g n,i t -1 (a, k) ≥ 0 due to convexity of ṽn,i t -1 (x, k), we also find that δ (k + ,t -1) i ≥ δ (k -,t -1) i with k + ≥ k -.which yields the base case ∆ ṽn,i t -1 (x, k + ) ≤ ∆ ṽn,i t -1 (x, k -) for part (ii) of proposition 6 since δ (k + ,t -1) ≥ δ (k -,t -1) . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/625.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/625.txt new file mode 100644 index 0000000000000000000000000000000000000000..b90e7cf07cac63d3c857f8b25050a40c544158a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/625.txt @@ -0,0 +1 @@ +deep learning models often represent black-box models that make it difficult for human users to understand their decision processes adadi & berrada (2018); cambria et al. (2023); saeed & omlin (2023) and interact with these schramowski et al. (2020); teso et al. (2023). one recent branch within explainable artificial intelligence (xai) focuses on the potential of so-called concept bottleneck models (cbms) koh et al. (2020); stammer et al. (2021) to tackle such issues.these are designed to be inherently human-interpretable. they perform inference (e.g., for bird image classification, cf. fig. 1 (top)), by transforming the initial raw input into a set of human-understandable concepts (e.g., wing shape and color) with a bottleneck network and provide a final task prediction based on the activation of these concepts with a predictor network. the concept activations thereby serve as an inherent explanation of the model's decision teso et al. (2023). arguably even more valuable, these concept activations can be used as a means for humans to perform targeted interactions, e.g., for querying further explanations abid et al. (2022) or to correct the model's concept prediction koh et al. (2020).a recent surge of research has focused on the benefits of leveraging interactions in ai models in general ouyang et al. (2022); miller (2019), and also cbms in particular teso et al. (2023). multiple such approaches focus on leveraging interactions for mitigating errors of the predictor network bontempelli et al. (2021); stammer et al. (2021). however, one likely reason for this is that interventions according to koh et al. (2020) represent a once-use tool for updating model performance by adding human-provided concept labels to an increasing number of randomly selected concepts. for sustainably improving a model's performance, however, this approach is inefficient and potentially demands a large number of repeated user interactions, where providing such repeated feedback has been identified to lead to a loss in focus of human users amershi et al. (2014).in this work, we therefore argue to harvest the rich information present in previously collected interventions in a multiuse approach. specifically, let us suppose a user corrects a model's inferred concepts through a targeted intervention. in that case, the intervention carries information on where the model did not perform well. this can be used to improve prediction in similar future situations (fig. 1 bottom). in this context, we therefore introduce concept bottleneck memory models (cb2m) as a novel and model-agnostic extension to cbms. cb2m are based on adding a two-fold memory of interventions to the cbm architecture, which allows to keep track of previous model mistakes as well as previously applied interventions. this memory enables a cb2m to reapply interventions when the cbm repeats mistakes and thus automatically corrects them without the need for additional human feedback. this ultimately allows to overcome the issue of one-time interventions of standard cbms and enables the model to learn more effectively from provided human feedback. overall, however, human feedback can be unavailable, and obtaining it is costly. cb2m mitigates this issue by its ability to detect potential model mistakes prior to initial human feedback where its memory module can be used to specifically select data points for human inspection, and thus guide human feedback to where it is really needed.we illustrate the full potential of cb2m in our experimental evaluations on several challenging tasks, such as handling distribution shifts and confounding factors across several datasets. in summary, we make the following contributions: the rest of the paper proceeds as follows: in sec. 2, we provide a brief background followed by the introduction of cb2m. we present our experiments evaluations in sec. 3. afterwards, we relate cb2m to other work in sec. 4 before concluding the paper together with potential future research directions in sec. 5. cb2m are based on adding a two-fold memory of interventions to the cbm architecture, which allows to keep track of previous model mistakes as well as previously applied interventions. this memory enables a cb2m to reapply interventions when the cbm repeats mistakes and thus automatically corrects them without the need for additional human feedback. cb2m mitigates this issue by its ability to detect potential model mistakes prior to initial human feedback where its memory module can be used to specifically select data points for human inspection, and thus guide human feedback to where it is really needed.let us now introduce concept bottleneck memory models (cb2m) as a flexible extension for any cbm architecture. the bottleneck and predictor networks of the cbm remain unchanged but are extended by a two-fold memory module m which consists of a mistake memory m m coupled with a intervention memory m i . it measures the similarity between two data points x and x ′ via the euclidean distance of their encodings, d(x e , x ′ e ) = ∥x e -x ′ e ∥. the intervention memory directly keeps track of known interventions and associates them to elements of the mistake memory, meaning that the memorized intervention i can be used to correct the memorized mistake of x e . intuitively, if a data point is similar to other examples where the model has made mistakes, the model will more likely repeat these mistakes on the new data point.next to detecting model errors with the mistake memory, we can use both the mistake memory and the intervention memory together to generalize interventions. specifically, whenever an intervention i is applied to a model, we store it in the intervention memory m i and keep the encoding of the original input point in the mistake memory m m . next, we go from the generalization of interventions to the second use-case for which cb2m can be deployed, namely for detecting model mistakes prior to human feedback. while detecting mistakes of the whole model covers all potential model errors, we hypothesize that detecting mistakes of the bottleneck network is more suitable for interventions, as they are tied to the bottleneck network. we have shown that the two-fold memory of cb2m can be used to generalize interventions to previously unseen datapoints, thereby overcoming the issue of current one-time intervention approaches without the necessity of further human interactions. additionally, we have demonstrated that cb2m can be utilized to detect model mistakes prior to any human interactions, allowing query humans to efficiently provide interventional feedback in a targeted manner. to check whether a newly obtained test instance x should be automatically updated with an intervention from memory, we do the following: first, the mistake x ′ with the smallest encoding distance to the new datapoint is retrieved from the mistake memory m m . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/626.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/626.txt new file mode 100644 index 0000000000000000000000000000000000000000..e28b71df66dd5347e501df7501f2ffc09f7695de --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/626.txt @@ -0,0 +1 @@ +deep multi-task learning (mtl) techniques are widely applied to real-world embedded computer vision applications. a deep mtl model explores and exploits the synergies among multiple tasks to be learned simultaneously to improve the joint performance, as well as to reduce inference time and computational costs. however, designing efficient mtl models on budgeted devices poses two major challenges. first, the model design needs to be efficient to stay compact for meeting the computational budget constraints. second, the model needs to be effective on resource sharing among multiple tasks learned simultaneously to avoid resource competition.motivated by previous research on deep linear network that even the input-output map can be rewritten as a shallow network, it nevertheless demonstrates highly nonlinear training dynamics and can help to accelerate optimisation and improve generalisation . to tackle the aforementioned challenges, we propose an overparameterised mtl method by initialising the parameters of each shared neural network layer as the product of multiple matrices following the spatial singular vector decomposition (svd) . the left and right singular vectors are trained with all task losses, and the diagonal matrices are trained using taskspecific losses. our design is mainly inspired by analytical studies on overparameterised networks for mtl that the training/test error dynamics depends on the time-evolving alignment of the network parameters to the singular vectors of the training data, and a quantifiable task alignment describing the transfer benefits among multiple tasks depends on the singular values and input feature subspace similarity matrix of the training data.in this work, we follow the definition of overparameterisation , referring to the replacement of neural network layers by operations of the compositions of multiple layers with more learnable parameters, but without adding additional expressiveness of the network. the contribution of this work can be summarised as follows.• we propose an mtl neural network design with overparameterised training components and a compact inference architecture, applicable for embedded applications with limited computational budgets. • we implement an iterative training strategy for the proposed design that is effective and efficient for the multitask computer vision dense prediction tasks, compared to the state-of-the-art. a deep mtl model explores and exploits the synergies among multiple tasks to be learned simultaneously to improve the joint performance, as well as to reduce inference time and computational costs., 2013]that even the input-output map can be rewritten as a shallow network, it nevertheless demonstrates highly nonlinear training dynamics and can help to accelerate optimisation.• we propose an mtl neural network design with overparameterised training components and a compact inference architecture, applicable for embedded applications with limited computational budgets.we replace the fully-connected layers and/or convolutional layers of modern neural networks with overparameterisation, and share the overparameterised parameters among different tasks, to achieve higher performance for reduced inference parameter size and computational cost.fully-connected layers for any shared layer of a deep mtl model, given a weight matrix w that is shared among t tasks, we directly factorise the weight matrix w of the size m × n using svd, similar toandcoco [lin et al. the proposed method outperforms all baselines except for some tasks un- der the cross-stitch model, which has increased expressiveness by enlarging the model size (∼ 3 times larger in inference model parameter size). among these studies, expandnet[guo et al.in this paper, we propose a parameter-sharing scheme and an iterative training for deep multitask learning that effectively share parameters using overparameterised models during training, while the model architecture stayed slim and compact during inference. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/627.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/627.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1cc432fdc465d69677c1d1bb584d153ab977571 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/627.txt @@ -0,0 +1 @@ +factoring integers into primes is notoriously difficult. if you're reading this paper, chances are you already know that, but it's worth stating. the difficulty of integer factorisation is exploited to form the backbone of the ubiquitous rsa public-key cryptosystem .at present, no practical, polynomial time algorithm for integer factorisation exists which runs on a classical computer. however, there is no known theoretical reason why integer factorisation should be difficult, even for an algorithm which runs on a classical computer .it is conjectured that in the future quantum computers will be able to factor large, rsatype semiprimes using shor's algorithm and consequently break rsa encryption . of course, we do not know exactly when this will happen, if ever. nation states are betting on such a breakthrough and storing vast amounts of encrypted data in the hope that one day they will be able to decrypt it using such a breakthrough. this has been referred to as a harvest now, decrypt later (hndl) attack .we can track the (known) state of the art in integer factorisation using the rsa challenge numbers . these semiprimes were constructed in 1991 to encourage research into integer factorisation and track the practical difficulty of factoring large semiprimes and consequently cracking rsa keys used in contemporary cryptography . rsa-250, a 250 decimal digit semiprime with no known weaknesses, was factored in 2020 by a team lead by paul zimmermann . at present, the factorisation of rsa-260 has not been announced, nor has the factorisations of any larger rsa challenge numbers.the oldest and best known algorithm for integer factorisation is trial division, which begins by dividing-out all powers of 2, powers of 3, then divides-out odd numbers of the form 6k ± 1 for k from 1 to ⌊ √ n ⌋. trial division was first described by fibonacci in his book liber abaci in 1202. there are many modern factorisation algorithms including the continued fraction factorisation algorithm by lehmer and powers , pollard's rho and p -1 algorithms , shanks squfof (square forms of factoring) algorithm , dixon's random squares method , pomerance's quadratic sieve factoring algorithm , lenstra's elliptic curve factorisation algorithm , silverman's multiple polynomial quadratic sieve , and most recently hart's one line factoring algorithm . an excellent history of integer factorisation is given by wagstaff .while the integer factorisation problem is not known to be np-hard, there is a relatively recent precedent for using deep learning to find approximate solutions to np-hard problems. the alphafold 2 algorithm, developed by the google deepmind team in 2021, is a deep learning-based method primarily designed for predicting protein structures . the success of alphafold in predicting protein structures stems from its ability to encode complex spatial relationships between amino acids and constructing complex deep learning models to capture intricate patterns and representations from training data.at present, no practical, polynomial time algorithm for integer factorisation exists which runs on a classical computer. however, there is no known theoretical reason why integer factorisation should be difficult, even for an algorithm which runs on a classical computer.it is conjectured that in the future quantum computers will be able to factor large, rsatype semiprimes using shor's algorithm and consequently break rsa encryption. these semiprimes were constructed in 1991 to encourage research into integer factorisation and track the practical difficulty of factoring large semiprimes and consequently cracking rsa keys used in contemporary cryptography.the oldest and best known algorithm for integer factorisation is trial division, which begins by dividing-out all powers of 2, powers of 3, then divides-out odd numbers of the form 6k ± 1 for k from 1 to ⌊ √ n ⌋. there are many modern factorisation algorithms including the continued fraction factorisation algorithm by lehmer and powers, pollard's rho and p -1 algorithms, shanks squfof (square forms of factoring) algorithm, dixon's random squares method , pomerance's quadratic sieve factoring algorithm, lenstra's elliptic curve factorisation algorithm, silverman's multiple polynomial quadratic sieve, and most recently hart's one line factoring algorithm.devised by the ingenious pierre de fermat in 1643 is a factorisation algorithm which is based on representing an odd integer, n , as the difference of two squares.if p and q are primes, and n = p q, then fermat's algorithm is quite efficient if p/q is near 1 (hence p, q are close to √ n ), but the number of trials required quickly grows if p/q is not close to 1.for example, consider primes p, q and semiprime, n = p q, generated in python 3 by computing p = gmpy2.next_prime(2**n_bits) n = p*q where p, q are n_bits-bit primes and n_lsb_bits is used to modify the difference between p and q. as a way to empirically estimate how close p/q must be to 1 to practically factor n , we can compute the number of iterations fermat's algorithm takes to compute a factor of n as a function of n_lsb_bits/n_bits.in 1895, lawrenceextended fermat's algorithm, when p/q is not close to 1, but we have u, v such that u/v is sufficiently close to p/q, then we can use fermat's algorithm to factor u v n .as an example, given primes p = 33059500175075655435169 and q = 22642302873041910393781, with n = p q = 748543215795445052722625573101291605706283989.we cannot immediately factor n using fermat's algorithm, however if by some means we have the approximation p/q ≈ 210381/144089,.unless the binary classification model is substantially improved then it is unlikely that this algorithm will result in a scalable, practical factorisation algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/628.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/628.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3c31cbcb443d95c4238e302934a0921a2c47f7c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/628.txt @@ -0,0 +1 @@ +monte carlo simulations remain a very important computational tool in many areas ranging from social sciences, bayesian data analysis, and inference to physics. in many cases to generate samples from a given target distribution one resorts to the construction of an associated markov chain of consecutive proposals . the only limiting factor of the approach is the statistical uncertainty which directly depends on the number of statistically independent configurations. hence, the effectiveness of any such simulation algorithm can be linked to its autocorrelation time which quantifies how many configurations are produced before a new, statistically independent configuration appears. for systems close to phase transitions the increasing autocorrelation time, a phenomenon called critical slowing down, is usually the main factor which limits the statistical precision of outputs.the recent interest in machine learning techniques has offered possible ways of dealing with this problem. ref. proposed normalizing flows based on neural networks as a mechanism for generating independent configurations in lattice field theories (lft) which can be used as proposals in the construction of the markov chain. the new algorithm was hence called neural markov chain monte carlo (nmcmc). for discrete statistical systems like e.g. the ising model, autoregressive neural networks were used in the nmcmc sampling algorithm . once the neural network is sufficiently well trained, one indeed finds that autocorrelation times are significantly reduced as was demonstrated in the context of the two-dimensional ising model in ref. .neural networks that build up the normalizing flows have to be trained, i.e. their weights should be tuned so that the model can approximate the desired probability distribution. the standard approach for achieving this is using the stochastic gradient descent (sgd) algorithm which requires the estimation of gradients of the loss function with respect to the neural network weights. the most commonly used estimator of the gradient is based on the so-called "reparametrization trick" (r.t.) . it is straightforward to implement but requires the calculation of gradients of the target probability. if this probability is given by a complex formula this may lead to severe degradation of performance.in ref. we have proposed to use reinforce (re) algorithm for the gradient estimator. we have shown how it can be implemented in case of reversible 1 normalizing flows and that it avoids calculating the derivative of the action. in there, we have applied this estimator to ϕ 4 lft and while it had better convergence properties, the ϕ 4 action is very simple and did not bring out the full capabilities of this approach. the same implementation for re as in ref. was later proposed also in ref. . in this contribution, we apply this estimator to the case of the 2d lattice schwinger model with wilson fermions. the fermionic action requires the calculation of the determinant of the dirac operator, which is represented by a large (2l 2 × 2l 2 for l × l lattice) matrix, so avoiding propagating gradients through those calculations may prove beneficial. this is also probably the simplest model with dynamical fermions so it is often used as a testing ground for algorithms that eventually can be used for lattice qcd making it an interesting model to study. we demonstrate that the re estimator is significantly faster than the r.t., which is currently the most commonly used gradient estimator. already at l = 12 re outperforms the r.t. estimator and the difference grows quickly with l, reaching a factor of 10 for l = 24. in addition, we show that the re requires much less memory which plays a role for larger systems sizes. the code used in this paper is available at . this paper is organized as follows: in section 2 we present the neural markov chain monte carlo algorithm and explain how it can be implemented in terms of normalizing flows. in section 3 we present re and r.t. gradient estimators that can be used to approximate the gradient of the loss function with respect to the model parameters. we show how the re can be implemented in practice. in section 4 we introduce the 2d lattice schwinger model and in section 5 we present a detailed comparison of both estimators for this model. appendix a gives the details of the implementation. in the second stage, the trial configuration is accepted with probability p a (ϕ trial |ϕ i ) usually given by the metropolis-hastings acceptance probabilityp a (ϕ trial |ϕ i ) = min 1, p(ϕ trial ) q(ϕ trial |ϕ i ). hence, one assumes that q(ϕ) can be represented by some appropriate model parametrized by some (very large) set of parameters θ q(ϕ) = q(ϕ|θ). a natural choice for such loss function is the kullback-leibler divergenced kl (q|p) = dϕ q(ϕ|θ) log q(ϕ|θ)log p(ϕ) = e q(ϕ|θ) ,.where the function q pr (z) is the probability density defining a prior distribution of random variable z. φ(z|θ) has to be a bijection which implies that if the input z is drawn from q pr (z) then the output ϕ is distributed according to q(ϕ|θ) = q z (z|θ) ≡ q pr (z) j(z|θ. (6) can be rewritten in terms of q pr (z), q z (z|θ) and φ(z|θ) as f q = dz q pr (z) log q z (z|θ)log p(φ(z|θ)) = e log q z (z|θ)log p(φ(z|θ)) q pr (z) .the training of the machine learning model is done with the stochastic gradient descent (sgd) method and requires the calculation of the gradient of f q with respect to θ.and so calculating the gradient of f q requires the gradient of the action s with respect to the fields ϕ. instead, the reinforce algorithm relies on first differentiating the exact formula (6)df q dθ = dϕ ∂q(ϕ|θ) ∂θ log q(ϕ|θ)log p(ϕ).1: # generate ϕ 2: switch on gradient calculations 3: z ∼ q pr (z) # generate z from prior distribution 4: ϕ ← φ(z|θ) # forward pass 5: # calculate q(ϕ|θ).6: q ← q pr (z) det ∂φ -1 (ϕ|θ) ∂ϕ 7: # calculate loss 8: loss ← log q(ϕ|θ)log p(ϕ).1: # generate ϕ 2: switch off gradient calculations 3: z ∼ q pr (z) # generate z from prior distribution 4: ϕ ← φ(z|θ) # forward pass 5: # calculate signal 6: s ← log q(ϕ|θ)log p(ϕ).8: switch on gradient calculations 9: z ′ ← φ -1 (ϕ|θ) # backward pass 10: q ← q pr (z ′ |θ) det ∂φ -1 (ϕ|θ) ∂ϕ 11: # calculate loss 12: loss ← log q × (ss).after ∼ 120k gradients updates we achieved the acceptance rate of 22% and autocorrelation time of approximately nine monte carlo steps for the chiral condensate,29) as a function of the number of gradient steps for two gradient estimators. to check this, we compared29) as a function of the number of gradient steps for the reinforce gradient estimator. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/629.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/629.txt new file mode 100644 index 0000000000000000000000000000000000000000..dad615245278d297664a4d9c09776d746eabb315 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/629.txt @@ -0,0 +1 @@ +generative adversarial networks (gans), introduced in goodfellow et al. (2020), are a type of neural networks that aim to replicate the distribution of the datasets they are trained on through generating new samples. this is celebrated by practitioners as a reliable and extensively used tool for data augmentation, especially in domains where data is scarce, as they have shown the seeming ability to learn the underlying data distribution with few examples, however complex.however, due to an inherent failure mode called mode collapse (goodfellow 2017), when the training distribution is skewed towards some dimension, gans have been proven to not only perpetuate but also magnify the existing biases in the data. this makes the generated data distribution less diverse than the original, which could pose ethical implications in human-data datasets where biases pertaining to sensitive attributes such as race or gender can be affected (jain et al. 2022). this study aims to quantify bias by systematically evaluating various state-of-the-art metrics that quantify the extent to which mode collapse occurs in ganaugmented datasets, with the motivation being that quantifying mode collapse could help elude adverse societal consequences that stem from the use of biased data.generative adversarial networks (gans), introduced ingoodfellow et al. (2020), are a type of neural networks that aim to replicate the distribution of the datasets they are trained on through generating new samples.however, due to an inherent failure mode called mode collapse(goodfellow 2017), when the training distribution is skewed towards some dimension, gans have been proven to not only perpetuate but also magnify the existing biases in the data. this makes the generated data distribution less diverse than the original, which could pose ethical implications in human-data datasets where biases pertaining to sensitive attributes such as race or gender can be affected(jain et al. 2022). 1998)andcifar-10 (krizhevsky, hinton et al. for each dataset, a set of ten artificially mode collapsed/skewed subsets were created by sampling from the original such that the ith dataset created (for 0 ≤ i ≤ 9) has classes 0, 1, . as an alternative, the jensen-shannon score is used to find the divergence between the reference bins distribution and that of the tested model when the number of samples is high(richardson and weiss 2018).inception score (is) the is(salimans et al.mode score the mode score(che et al.fréchet inception distance (fid) the fid(heusel et al.we artificially skew the distribution by removing one class at a time for both datasets and show the metrics' outputs in figure1. the mode score, on the contrary, does not show the extent of mode collapse on either dataset, showing inconsistent results for both dataset evaluations. potential future work is to examine the mnist dcgan dataset manually to estimate mode collapse and corroborate the effectiveness of the measures. further, adding labeled gan-generated datasets already used in existing literature -especially those that pose societal consequences such as ones involving face-imagery or human data -would serve as benchmarks that more accurately reflect bias in society. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/63.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/63.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3451649e04bc086dc559b83f2747913186fdfa1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/63.txt @@ -0,0 +1 @@ +deep reinforcement learning (drl) has been applied in multiple domains including object detection , robotics control and natural language processing . the drl networks are also successful in playing games such as atari games and go . however, the inner logic and reasoning of drl systems are opaque and difficult to be understood even by their own designers. the human factors and computational literature has pointed out the need for system transparency as a way to increase trust in the system. additionally, transparency would be useful for human collaboration with autonomous agents that use drl. when interacting with autonomous intelligent agents, people tend to regard them as intentional individuals and explain their behaviors as in interpersonal relationships . that requires an explainable agent to clarify its actions by offering reasons of beliefs, desires, and intentions . additionally, system transparency can provide clues for designers to debug system. while some amount of work has been done in computer vision to make deep neural networks more transparent , there are only a couple of works , , that address transparency in drl networks. this is due to the additional complexity of learning for sequential decision making. researchers found that visualizations of drl reasoning via a new technique, objectsaliencey maps, were as effective in enabling subjects to make predictions about a drl agent's (ms. pacman) next actions as giving the subjects access to sequences of game screenshots. although these results were promising, the human's predictions had accuracy of 60%. one of the possible reasons was that in the objectsaliencey maps there were multiple objects that ms. pacman could attend to and that could influence its subsequent decisions. this could induce ambiguity in the subjects' mind as to which action pacman would take next. we, therefore, decided to develop focused verbal explanation models of the drl system since (a) the verbal explanation could refer to objects that would be the most important in influencing the agent's selection of next action and (b) language may be more satisfying to people as a means of communication. however, no public datasets on verbal explanation of drl systems are available currently. therefore we chose to construct explanation datasets and verbal explanation models by ourselves. this paper reports on the methods of generating the datasets and models.since the verbal explanation of drl system pertains to explaining the internal logic of the agent, rahter than a human subjective interpetation on the agent's behavior, we generated an initial rulebased model based on prior knowledge of the ms. pacman game and its rules. although the rule-based model is capable of generating reasonable explanations, it lacks of generalizability and flexibility for unexpected situations. in these cases, neural network based learning models are entrusted with the responsibility. they can learn the implicit logic of generating verbal explanation for drl system through neural connections, once given sufficient training data. the challenge for the learning model lies in extracting distinguishable features from drl systems, especially from high structural similar images, such as game image with fixed board or fixed map, which can not be fully solved by existing networks currently.in this paper, both rule-based model and learning model are presented to generate verbal explanation for drl systems. game image, agent position map, and object saliency map act as the input for both models. data generated by the rule-based model is employed to train the learning model, which consists of two parts: an encoder on feature extraction, and a decoder on generating the explanation in natural language with attention mechanism. we make the following contributions:• to reduce the ambiguity of saliency maps and to improve the rationalization of explanation, we put forward conceptual level verbal explanation employing natural language and test human subject satisfaction. figure 1 provides a conceptual overview of the roles of the rule based-model and learning model using a screenshot and the corresponding o-saliencey map from ms. pacman. we, therefore, decided to develop focused verbal explanation models of the drl system since (a) the verbal explanation could refer to objects that would be the most important in influencing the agent's selection of next action and (b) language may be more satisfying to people as a means of communication.since the verbal explanation of drl system pertains to explaining the internal logic of the agent, rahter than a human subjective interpetation on the agent's behavior, we generated an initial rulebased model based on prior knowledge of the ms. the challenge for the learning model lies in extracting distinguishable features from drl systems, especially from high structural similar images, such as game image with fixed board or fixed map, which can not be fully solved by existing networks currently.in this paper, both rule-based model and learning model are presented to generate verbal explanation for drl systems. data generated by the rule-based model is employed to train the learning model, which consists of two parts: an encoder on feature extraction, and a decoder on generating the explanation in natural language with attention mechanism. if we use the image input of time step (𝑡 -1) to generate verbal explanation of time step 𝑡 in game episode, then the model becomes a prediction model.image encoder aims to encode game image, pacman position map, and object saliency map for the verbal decoder part.the structure of encoder for game image, pacman position map, and object saliency map, is shown as follows:. while in the verbal evaluation task, an language explanation generated by either the rule-based or learning based model is given in addition to the two images.to ensure the quality of generated verbal explanation from our proposed learning model (named as cnn+atten+gru), we evaluate the model by comparing with rule-based model and other similar learning models.to evaluate the reliability of proposed learning model, and investigate if the pacman position map, object saliency map, and attention mechanism play their roles, seven models are designed.the game image, pacman position map, and object saliency maps act evenly in model cnn+gru, because there is no attention mechanism in this model. the learning model mines the implicit logic of generating verbal explanation with the help of rule-based model to all the situations. in return, the learning model generalizes the rule-based model to all the situations, including the ones beyond the capabilities of the rule-based model. experiments show that explaining drl system with natural language generated by the rule-based model gains higher user satisfaction than that with only object saliency map, showing the usefulness of verbal explanations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/630.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/630.txt new file mode 100644 index 0000000000000000000000000000000000000000..998b8e97cc304a1b689b671926b1489421187671 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/630.txt @@ -0,0 +1 @@ +model-based methods have been widely used to perform channel prediction. however, recent advances in ml have accelerated several research areas, and recent studies have suggested that ml has the potential to outperform conventional channel prediction model-based methods. the ml model is a function that maps input data to an output decision or prediction, defined by its trainable parameters and its architecture. training a supervised ml model means tuning the parameters to output a satisfactory output, usually by solving an optimization problem that minimizes a loss function. in our case, the ml model uses historical time series of channel measurements as input and outputs the future channel.the performance of the model-based kalman filter has been compared to a multilayer perceptron (mlp) in . the mlp is a basic neural network that consists of several layers of nodes, where each node in one layer connects by a trainable parameter to every node in the following layer. to replicate the training process as with real channel data, used noisy simulated data to train the mlp. all other papers surveyed in our work that use simulated data assume perfect knowledge of the channel when training the neural network. the comparison of the kalman filter and the mlp method, with a small advantage to the kalman filter, suggests a need for more advanced neural networks.several works have implemented more advanced structures for enhanced prediction accuracy. a popular model in image recognition is the convolutional neural network (cnn), which in contrast to mlp can take a matrix as input instead of a vector. it can learn to recognize patterns in smaller sections from an input matrix. by constructing a matrix of the size given by the time steps and the number of antennas, a cnn is proposed in to predict ar coefficients for channel evolution. channel prediction has also been performed using a recurrent neural network (rnn) that utilizes the temporal correlation in sequential data, in contrast to the cnn. a subset of frequency subcarriers was predicted individually by an rnn in , followed by performing interpolation to predict the entire frequency domain used by the antenna. other works have combined cnns and rnns to predict the channel. both , have combined a cnn with a long short-term memory (lstm) model, which is a type of rnn. a wellknown issue with rnn is that it has training convergence issues due to vanishing or diverging gradients. lstm alleviates these problems. a comparison between lstm to conventional model-based methods has been proposed in , where the effect of moving at different velocities were studied for channel prediction. in a recent paper , the authors modeled the channel between a ue and base station (bs) via a reconfigurable intelligent surface as a fast-fading channel using the lstm, assuming stationarity between the bs and the reconfigurable intelligent surface. aside from lstm, gated recurrent unit (gru) has also been proposed to improve the sequential rnn, and is more computationally efficient than lstm. in channel prediction, the gru has been tested empirically by several researchers. the authors in compared the lstm and gru with a proposed prediction model exploiting an encoder-decoder scheme, with lstm or gru layers at both the encoder and decoder side. several datasets, one including 4g measurements, revealed a slight advantage to the lstm. in , an overview of channel prediction has been made where the lstm and gru have been studied over several prediction horizons. the first evaluation of a deep gru has been conducted, in favor of the gru. one additional type of neural network has been recently proposed in , which adopts the transformer model to predict the channel. the transformer has the ability to predict multiple future time steps in parallel, by learning to identify and pay attention to critical behavior in sequential data. another transformer-based model has been proposed in to predict the channel impulse response, based on the location of the ue. it does not use historical channel measurements as input like the previously discussed papers but shows good results compared to the lstm.in table i, contributions from all papers are categorized in columns and summarized, with the topics of interest in this paper. as can be seen in the column prediction procedure, the channel is predicted using time series for all papers in table i, except for . although there are some papers using slightly different prediction procedures, time series are the foundation to make the prediction procedure more efficient.one conclusion from table i is that the majority of the papers evaluate the channel prediction models based on simulated data. this is understandable since it is less costly and less time-consuming to collect simulated data. however, in a real-world implementation of channel prediction, measured data have to be used to conduct the predictions. the channel measurement and estimation process is unavoidably affected by noise. for this reason, noise was introduced in the training process in , including the true data that is used to update the model. the column noisy label indicates whether the paper considers a noisy label for training the model. if the data is generated by measurements, the label is automatically noisy. there is only one paper that considers noisy labels while using a simulator to generate the dataset to train the prediction model. in our paper, we investigate its role and we show that it can have a major effect when evaluating the performance of the prediction methods.the prediction horizons considered in table i are almost exclusively correlated with mobility. if a paper considers high mobility of the ue, the horizon is short, and vice versa, due to difficulties of predicting the channel over long horizons with a fluctuating channel. if the prediction horizon exceeds the coherence time, the channel's temporal correlation vanishes, and it becomes impossible to infer the channel out of current or past measurements. the prediction horizons of the papers listed in table i are generally short. half of the papers do not state on what time horizon the channel is predicted. furthermore, no paper has included a prediction horizon long enough for the performance to fail.from the summary of previous works in table i, research gaps can be found. first, it is not obvious which neural network is the most suitable for channel prediction. second, although different data-driven models may each have good results, they have not been compared to each other. in most of the existing literature, datadriven models are compared to conventional model-based methods. the overviewed papers generally do not perform comparisons among data-driven models, or at best do partial comparisons. for example, compares lstm to a deep gru. the present paper is arguably the first to make a comprehensive comparison among the most prominent data-driven approaches. most commonly, channel prediction is incorporated by exploiting time series of past channel estimations. in the case of predicting future channels based on solely the previous channel estimations, the channel prediction problem becomes a time series learning problem. by relying solely on historical time series data and the temporal correlation of the channel, channel prediction algorithms become computationally more efficient and scalable among different environments. for this reason, the present article aims to overview the most prominent neural networks methods and to identify research gaps in channel prediction that strictly uses historical channel measurements as input data. the most promising neural networks for channel prediction, which has performed well in previous studies, are compared using a dataset, with and without noise, simulated by the common and realistic 3 rd generation partnership project (3gpp) tapped delay line (tdl)-a model. this paper is, to the authors best knowledge, the first comparison of channel prediction methods that represent multiple different classes of neural networks. the rest of the sections in this paper are organized as follows: the representative state-of-the-art in channel prediction using ml is overviewed; the prediction schemes that will be compared are described and justified; the proposed prediction schemes are numerically evaluated and compared by their performance; the outcome of the experiments are discussed; and finally, the results from the study we proposed in this paper are concluded. in our case, the ml model uses historical time series of channel measurements as input and outputs the future channel. channel prediction has also been performed using a recurrent neural network (rnn) that utilizes the temporal correlation in sequential data, in contrast to the cnn. in a recent paper, the authors modeled the channel between a ue and base station (bs) via a reconfigurable intelligent surface as a fast-fading channel using the lstm, assuming stationarity between the bs and the reconfigurable intelligent surface. in, an overview of channel prediction has been made where the lstm and gru have been studied over several prediction horizons.to evaluate and compare the performance of the promising neural networks for channel prediction, we consider a downlink mimo scenario with n t antennas at the bs and n r antennas at the ue. for longer prediction horizons of several sampling times ahead, the channel prediction of 1 ms ahead is used as historical measurements for the next prediction.3a, it is evident that the recurring memory cell gives a strong advantage in wireless channel time series prediction and constitutes a robust performance for the gru respectively lstm compared to the mlp, cnn, and transformer model, regardless of the prediction horizon. in the scenario of a noisefree channel, the numerical experiments of this paper established that two rnns, namely gru and lstm, achieved considerably better results for prediction horizons up to 15 ms than the mlp, cnn, and transformer model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/631.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/631.txt new file mode 100644 index 0000000000000000000000000000000000000000..d8b32a16e17123b4a701218a7734bd41b12f997c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/631.txt @@ -0,0 +1 @@ +self-supervised learning (ssl) is a machine learning (ml) paradigm where the ml model trains itself to learn one part of the input data from another part. ssl, which can learn from vast amounts of unlabeled data, is also called predictive or pretext learning as it transforms the unsupervised learning task into a supervised one by auto-generating the labels . it has been argued that ssl is likely a key toward "unlocking the dark matter of intelligence" , where yann lecun has been one of the biggest advocates of ssl, at least as a means to making deep learning data-efficient, who stated "if artificial intelligence is a cake, self-supervised learning is the bulk of the cake." . in fact, ssl has already started to take the world by storm, as embodied in large language models (llms) like openai's chatgpt and its many real world use cases .ssl is particularly attractive for unsupervised anomaly detection (ad) problems, for which acquiring labeled data is costly, laborious, in some cases impossible or even undesirable. to elaborate, it is hard in most settings to (pre)specify what constitutes anomalies, i.e. they are the "unknown unknowns", which makes labeling impractical. anomalies also frequently appear in adversarial scenarios, and thus are subject to change rapidly. to stay alert to emerging threats, it is desirable to adopt unsupervised techniques, often in hybrid combination with supervised classifiers that have been trained on historical schemes - . therefore, in the absence of any labeled anomalies, ssl based techniques offer opportunities for many unsupervised ad problems in the real world.at the heart of ssl lies the pretext (or surrogate, selfsupervised) task. depending on the type of pretext learning, ssl methods have been organized into contrastive, predictive, and generative , . contrastive methods typically employ data augmentation toward learning meaningful representations. predictive methods create surrogate (or pseudo) labels from the data itself, often using masking strategies. finally, generative methods aim to capture the underlying data distribution by trying to mimic the generative processes of the input data.in this vision paper, we introduce recent developments of ssl for ad and essential challenges that have arisen from the literature. we focus on the difficulty of augmentation tuning and model selection of ssl, given that a fair selection of hps is infeasible in ad where no labeled data are given at training time for the purpose of validation. fig. 1 illustrates why model selection is difficult especially on ssl for unsupervised ad.we summarize the key take-aways as follows: 1) ssl for ad is different from ssl for ml in essence, and it has the challenge of hp selection (sec. ii).2) the choice of a pretext task is important for the success of ssl in general (sec. iii). similarly, the choice of data augmentation plays a key role in ssl for ad (sec. iv). 3) we introduce recent works toward a fair and/or automatic selection of hps for ssl for ad, focusing on the idea of transduction; leveraging unlabeled test data (sec. v). 4) genai and foundation models can be the future of ad, provided massive amounts of training data exist (sec. vi). ssl, which can learn from vast amounts of unlabeled data, is also called predictive or pretext learning as it transforms the unsupervised learning task into a supervised one by auto-generating the labels. we focus on the difficulty of augmentation tuning and model selection of ssl, given that a fair selection of hps is infeasible in ad where no labeled data are given at training time for the purpose of validation. iv). in contrast, employing ssl in ad toward pseudo-anomaly generation is akin to "filling in" the inlier-only input space with negative samples; for example, various augmentations are employed into learn a better one-class (inliers) boundary than one could learn with unsupervised (deep) svdd alone,. as a consequence, success on a given ad task depends on which augmentation function is used or which external dataset the learning is exposed to as pseudo anomalies, and importantly "to what extent the pseudo anomalies mimic the nature of the true (yet unknown) anomalies" in the test data.have observed that sampling pseudoanomalies from a biased subset of true anomalies leads to a biased error distribution; the test error is lower on the seen type of anomalies during ssl training, at the expense of much larger error on unseen anomalies-even when the unseen anomalies are easily detected by an unsupervised detector (!). in a recent comprehensive study on ssl-based ad, we have observed similar alignment phenomena, where we find that the ad performance benefits from self-supervision to the extent that the pseudo-anomaly generation is capable of mimicking the true anomalies in the test data, which otherwise can even impair performance.the "fishing" issue is not limited to data augmentation (hyperparameters) and ssl-based ad specifically, but goes beyond more broadly to general ad model/hyperparameter (hp) selection at large. while the proper configuration of hps is critical to performance outcomes and both shallow and especially deep ad models with a longer list of hps are sensitive to hps, the ad community seems to have turned a blind eye to the issue, rendering (ssl-based) ad model selection "the elephant in the room"a major problem that is obviously present but avoided as a subject for discussion because it is more comfortable to do so. model selection) is nontrivial for unsupervised settings, in the absence of any labeled validation/hold-out data, although, the ad literature has been growing recently with novel ideas on unsupervised outlier model selection (uoms)-.". the principle is to not try to induce from having solved an intermediate problem (in this case, estimating a general decision boundary between inliers and all potential anomalies) that is no simpler or is more general/complicated/involved than the original problem at hand (detecting the specific, observed anomalies in test data). our recent attempt in this direction introduced the first end-to-end augmentation tuning framework for ssl-based ad, and utilized a differentiable, unsupervised alignment validation loss along with differentiable analytical formulas for various augmentation function choices.on the other hand, today's generative models are taking the world by storm, achieving outstanding results in learning data distributions by capitalizing on (i) massive amounts of (pre)training data, (ii) large-scale compute power and (iii) highly expressive, billion-scale parameterized transformer models. today's mostly autoregressive or diffusion based generative models are able to learn the underlying data distribution sufficiently well from massive amounts of unlabeled data (in other words, very densely sampled data manifold), to the extent that they can generate realistic, human-like content like conversationsand images. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/632.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/632.txt new file mode 100644 index 0000000000000000000000000000000000000000..db9b91deba2ae58f6e800816034d511cb36fe0ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/632.txt @@ -0,0 +1 @@ +the significance of residential occupancy detection has increased substantially, primarily driven by global urbanization and concurrent population growth in recent years . accurately determining patterns of occupancy holds utmost importance, as it can improve self-awareness of occupancy patterns for residents and enable various business opportunities for utility companies and building managers, including energy saving, thermal comfort control, and route optimization for work activities and deliveries , . consequently, accurate occupancy detection yields a range of benefits, including economic advantages, positive environmental impacts, and enhanced security and comfort for residents. a large body of research has been conducted on residential occupancy detection. many existing studies adopted one common approach involving the installation of supplementary cameras or sensors, such as thermal imaging cameras or motion sensors - . though these methods could achieve high accuracy, they may cause significant concerns or pose new challenges. for example, installing sensors is intrusive, requires regular maintenance, and can be costly. furthermore, constant monitoring can invade occupants' privacy, raising ethical concerns. lastly, the integration of multiple sensors increases system complexity, which in turn requires sophisticated algorithms and software. thus, these methods may face scalability issues and raise concerns regarding privacy risks.to address the aforementioned issues, researchers have been studying non-intrusive approaches. compared to camera or motion-sensing-based methods, smart meters, widely installed as a part of utility infrastructure, can provide an alternative and cost-effective approach to occupancy detection. the energy consumption data monitored by smart meters can be easily integrated with energy management or home automation systems, providing an inherent advantage over the camera or motion-sensing methods as there is no need for extra installation and maintenance. recent studies using high-resolution smart meter data , - have achieved comparable accuracy levels to other methods using sensors. however, privacy concerns still arise due to the detailed energy consumption information revealing occupants' habits. additionally, frequent sampling also cause scalability issues to data transmission, storage, and processing. thus, it is crucial to avoid using high-resolution data but using low-resolution data, which can preserve privacy.low-resolution smart meter data occupancy detection is proposed to further resolve privacy and scalability limitations. however, the reduced information content in low-resolution smart meter data makes occupancy detection tasks quite challenging to achieve high accuracy equivalent to other methods using high-resolution data. deep learning techniques provide promising solutions to improve the detection accuracy using low-resolution data. for example, hisashi et al. proposed a deep learning-based method to estimate residential occupancy status. their proposed method included manual feature extraction to derive statistical features from time-series sequences and subsequently process the extracted data through a bi-directional long short term memory (bi-lstm) networka commonly used recurrent neural network (rnn), with the attention mechanism. moreover, their method trained separate models for individual households, leading to generalization problems. according to , , households with diverse socioeconomic characteristics exhibit different energy consumption profiles. an effective occupancy detection method should overcome generalization limitations and be applied to a broad groups of households with diverse socioeconomic backgrounds and lifestyles.to overcome the limitations of existing methods, we are motivated to design a new deep-learning-based approach for residential occupancy detection using low-resolution smart meter data while achieving considerably high accuracy. specifically, we employ a hybrid transformer-bi-lstm architecture that enables processing of raw smart meter data without the need for manual feature extraction. in addition, our model is designed to be applicable to various households rather than being trained for each individual household separately. the goal of our work is to achieve state-of-the-art performance for occupancy detection using low-resolution smart meter. to evaluate the effectiveness of our model, we conduct experiments on the most comprehensive publicly accessible dataset . the results show that our model improves occupancy detection performance across households compared to baseline methods. the contributions of our work are summarized as follows.• our work presents a novel model by combining rnns and transformers to effectively model temporal dependencies in low-resolution smart meter data. by leveraging rnns' capability in sequential processing of short to medium-term dependencies and transformers' selfattention for long-range dependencies, we enhance the performance and accuracy of occupancy detection. • our work explores various transformer-rnn hybrid models by thoroughly examining the fusion of these architectures in different arrangements. through our investigation, we find an optimal combination: the concatenation of bi-lstm and transformers. our design leverages the temporal modeling of bi-lstm and transformers' self-attention mechanism, shedding light on the effective construction of such hybrid models for similar tasks.• we compare our model to different models in residential occupancy detection using a comprehensive benchmarking framework, including various performance metrics and cross-validation, based on a real-world household dataset. our findings demonstrate that the fusion of transformers and bi-lstm models through a concatenation operation consistently outperforms other baseline models in terms of a comprehensive set of performance metrics.the remainder of this paper is organized as follows. section ii presents the problem formulation and the hybrid transformer-rnn model for occupancy detection. section iii introduces the benchmark models and evaluation metrics. section iv discusses numerical results, and section v concludes this paper.low-resolution smart meter data occupancy detection is proposed to further resolve privacy and scalability limitations. however, the reduced information content in low-resolution smart meter data makes occupancy detection tasks quite challenging to achieve high accuracy equivalent to other methods using high-resolution data.to overcome the limitations of existing methods, we are motivated to design a new deep-learning-based approach for residential occupancy detection using low-resolution smart meter data while achieving considerably high accuracy.• our work presents a novel model by combining rnns and transformers to effectively model temporal dependencies in low-resolution smart meter data.• we compare our model to different models in residential occupancy detection using a comprehensive benchmarking framework, including various performance metrics and cross-validation, based on a real-world household dataset.we present a novel model aiming to enhance the performance of occupancy detection by leveraging a hybrid transformer-bi-lstm architecture on low-resolution smart meter data. for the purpose of feature extraction from smart meter data for occupancy detection, we utilize solely the encoder part of the transformer architecture. • bi-lstm + attention: proposed in a previous study, this model combines a bi-lstm with an attention mechanism, processing the input data with the bi-lstm and using the attention mechanism to weigh the importance of different time steps in the output. since our study aims to detect occupancy using low-resolution smart meter data, we resample both the smart meter and occupancy data to a lower resolution to obtain a dataset with one-hour intervals.2) effectiveness of transformer-rnn hybrid models and the impact of different integration approaches: among all the transformer-rnn hybrid models, despite the bi-lstm and transformers concatenation version, the bi-lstm + transformers and transformers + bi-lstm versions have also exhibited significant performance enhancements over previous bi-lstm + attention models for occupancy detection using original smart meter data. the bi-lstm + transformers model, which initially processes data via the bi-lstm before forwarding it to the transformer, and the transformers + bi-lstm model, which reverses this order, both achieve better performance compared to bi-lstm + attention models while transformer-rnn hybrids certainly present promising improvements over earlier bi-lstm + attention models, the mode of integration is crucial to optimize their performance.3) subtle impact and inferiority of manual feature extraction: when evaluating the bi-lstm + attention model on both original smart meter data and manually feature-extracted data, a nuanced impact on the effectiveness of manual feature extraction emerges in the problem of residential occupancy detection. while the model trained on original data exhibits a marginally higher accuracy, precision, and roc auc, the recall and f1 score for the model trained on feature-extracted data are slightly higher. consequently, relying on the innate feature extraction capabilities of neural networks could be an effective and more streamlined approach for occupancy detection using smart meter data.this paper presented a compelling exploration of hybrid models combining both bi-lstm and transformer architectures for the task of residential occupancy detection using lowresolution smart meter data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/633.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/633.txt new file mode 100644 index 0000000000000000000000000000000000000000..704c1e4ca991f3e3390c2b235e4212b062f2cb50 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/633.txt @@ -0,0 +1 @@ +as a revolutionary solution for data privacy and distributed computational learning, federated learning (fl) has gained immense popularity in recent years, since its remarkable achievements in improving the speed of distributed training significantly, while ensuring data privacy without data transmission among clients during training , . this is achieved by bringing together the local training efforts of each client's individual model, which is trained exclusively using their unique dataset. in the traditional practice of each communication round, the server distributes a global model with aggregated parameters, and each client uses their private dataset to train the model locally. afterward, the local models are transmitted to the server for aggregation. once all the local models are accumulated, the server aggregates them, shares the aggregated model with the clients, and begins a new training epoch .however, the heterogeneity of clients imposes on the performance of fl systems, which leads to an escalation in training latency due to the limited computing resources of certain clients that could straggle the server's aggregation process . although, the widely-used edge computing (ec) technology can be used to alleviate the phenomena of stragglers by providing the computing resources of the edge server for clients . however, in traditional ec, clients generally need to offload their data to the server, which violates data privacy protection in fl. fortunately, the forward inference and the backward training of the model are sequential, which enables the neural network (nn) to be split into a few parts and processed in serial . therefore, in the nn is split into three parts, each client has the up and bottom parts whose size is small, and the large middle part is offloaded to the server to process. however, when there are lots of clients, the computing burden of the server is too heavy to alleviate the phenomena of stragglers effectively, moreover, offloading middle parts to the server can be time-consuming for clients who are far away from the server. thus, designing a novel approach that can effectively tackle the straggler issues in both computing and communication latency remains a crucial area of research in optimizing the training speed of fl.inspired by the pervasive ec, which allows clients with limited computational resources to offload to other clients with abundant resources , a client-pairing-based split federated learning (sfl) framework, named fedpairing, is proposed in this paper, to deal with the challenge of stragglers. in this framework, clients with disparate computational resources are paired while considering the communication rates between paired clients, enabling each client to divide its local model into two parts based on computational resources of itself and its paired client, and each client only computes part of the local model assigned to it. remarkably, the upper part containing the input layer is processed by the client itself, thus avoiding data transmission between paired clients and safeguarding data privacy. under this framework, each client has two sources of gradients during the backward procedure: the gradient transmitted from the paired client, owing to split learning, and the gradient computed by the client itself. this structure enables clients to indirectly train their local models with a larger dataset, which is helpful to improve the training accuracy of the fl. to pair clients efficiently, a heuristic greedy-based algorithm is proposed, which takes into account the transmission rates among clients and the computing resources of each client, thereby enhancing the training speed. the main contributions of this paper are as follows.1) a novel sfl framework, named fedpairing, is proposed that pairs clients with different computing resources, aiding in managing the straggler phenomenon and enhancing data privacy within federated learning. 2) a heuristic greedy algorithm is proposed to optimize client pairing by reconstructing the optimization of training latency as a graph edge selection problem, which increases the effectiveness of the federated learning system. 3) simulation results show the proposed method can significantly improve the fl training speed and achieve high performance both in independent identical distribution (iid) and non-iid data distribution.next, the server computes the propagation lengths for each pair (c i , c j ), denoting them by l i and l j , as. specifically, for each pairs (c i , c j ), both forward and backward propagation processes of c i and c j are executed identically and in parallel. next, the client sends xi to c j , which further propagates the feature map through the remaining layers, l j = w -l i , in its local model to compute the output, ŷi , i. subsequently, c j back-propagates l j = w -l i layers on the local model ω j to obtain the gradients g i (li+1,w ) for the layers from l i + 1 to w . c j then sends the gradient g i (li+1) = ωj (li+1,w ) (l i ) of the l i + 1-th layer to c i . when both clients c i and c j complete backward propagation, they update their local model using cached gradients, which can be formulated as. specifically, to avoid either side in each pair to become straggler, in this paper, we adjust the propagation length of c i and c j so that the time taken for c i and c j to complete forward and backward propagation is as equal as possible. the propagation time on ω i is l i f f i , and the propagation time on ω j is l j f f j . the channel gain between clients c i and c j is determined by the distance ∥p i -p j ∥ between them, where p i , p j denote the positions of c i , c j , respectively. firstly, clients participating in the training are modeled as a weighted undirected graph g = (e, v), where each vertex in the vertex set v denotes each client and the edges in the edge set e denote the corresponding two clients (c i , c j ) can establish a communication link.where f i , f j are the computational frequencies of c i , c j , respectively, r i,j denotes the communication rate between c i and c j . given any pair of clients (c i , c j ), suppose their propagation lengths are set to l i = 1 and l j = 2, respectively. during backward propagation, ω j (2) accumulates the gradients from both c i and c j , thereby resulting in the aggregation of the gradients of the two clients to the second fully connected layer after each backward propagation. specifically, for any pair of clients (c i , c j ), if the k-th layer of c j is an overlapping layer, the iterative parameter updating process for that layer can be formulated as. for each round from 1 to t do for each pair e = (c i , c j ) in pair set e parallelly do for each client c i in e parallelly do set ω i = ω g . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/634.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/634.txt new file mode 100644 index 0000000000000000000000000000000000000000..987ec963d6828b310212734328016cf0f01ae43a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/634.txt @@ -0,0 +1 @@ +machine learning algorithms have become a cornerstone in data-driven decision-making across various domains. these algorithms are designed to identify patterns, make predictions, and extract insights from large datasets. however, their deployment can sometimes lead to unintended consequences, such as biased predictions or the amplification of existing societal disparities. this is particularly critical in the field of causal analysis , where accurately identifying cause-and-effect relationships is essential for making informed decisions and implementing effective interventions.the study of causal relationships often relies on propensity score models, which estimate the likelihood of treatment assignment given a set of covariates. these models help researchers account for confounding factors and mitigate biases when estimating causal effects. however, even with the use of propensity scores, machine learning models can still exhibit biases and associations that may impact the accuracy of causal inference .to address these challenges, this paper introduces the concept of "machine unlearning" on causal datasets. machine unlearning is an iterative process of refining and improving machine-learning models specifically tailored for causal analysis . the goal is to identify and eliminate biases and unwanted associations in the data, thereby enhancing the accuracy and reliability of causal inferences .the lalonde dataset , a well-known benchmark dataset evaluating the effectiveness of job training programs, serves as the basis for the experiments conducted in this study. the dataset consists of various covariates, such as age, education level, and income, along with a binary treatment indicator representing participation in the training program. by applying the machine unlearning methodology to the lalonde dataset , the study aims to demonstrate the efficacy of this approach in demonstrating machine unlearning for causal inference .the proposed methodology involves training an initial propensity score model on the original dataset. subsequently, forget sets are created by selectively removing instance pairs based on propensity score matching. the retain sets are then used to retrain the model, allowing for the identification and elimination of biases introduced by the original model. by comparing the performance of the retrained models with the original model using metrics such as root mean squared error (rmse), the impact of machine unlearning on causal analysis can be assessed.in addition to evaluating model performance, this paper also analyzes the distribution and histogram of propensity scores before and after the unlearning process. this analysis provides insights into the changes in the associations and biases present in the data and further supports the effectiveness of the machine unlearning approach.the findings of this study have important implications for researchers and practitioners in the field of causal analysis . by leveraging machine unlearning techniques, it becomes possible to refine machine-learning models while retaining causal information. this can lead to more reliable and fair decision-making processes, ultimately contributing to the advancement of evidence-based interventions and policies. the ability to "forget" certain information while retaining the essence of causal relationships is a groundbreaking capability offered by machine unlearning. as a result, privacy concerns can be effectively addressed without compromising the integrity of causal analyses. this feature is of particular importance when working with sensitive data, ensuring compliance with privacy regulations, and fostering trust among users and stakeholders.moreover, the potential applications of machine unlearning extend beyond causal analysis, offering diverse avenues for enhancing machine learning models' reliability in various domains. concept drift, a common challenge in dynamic environments, can be tackled with ease by allowing models to adapt to new patterns without being constrained by outdated data, thus ensuring up-to-date and accurate predictions over time.in sum, the methodology introduced in this study marks the first attempt at applying unlearning to causal inference . by embracing these novel techniques, researchers and practitioners can evaluate the quality of causal analyses, thus empowering evidence-driven decision-making, and positively impact society by promoting fairness, transparency, and trust in machine learning models. as the adoption of machine unlearning continues to evolve, we anticipate its profound contributions to shaping a more equitable and informed world.overall, this paper contributes to the growing body of research on machine unlearning and its application in the context of causal analysis . by showcasing the potential of this approach, it encourages further exploration and development of machine unlearning techniques to address biases and improve causal inference in various domains.the study of causal relationships often relies on propensity score models, which estimate the likelihood of treatment assignment given a set of covariates. however, even with the use of propensity scores, machine learning models can still exhibit biases and associations that may impact the accuracy of causal inference. machine unlearningis an iterative process of refining and improving machine-learning models specifically tailored for causal analysis. by applying the machine unlearningmethodology to the lalonde dataset, the study aims to demonstrate the efficacy of this approach in demonstrating machine unlearning for causal inference.the proposed methodology involves training an initial propensity score model on the original dataset. by comparing the performance of the retrained models with the original model using metrics such as root mean squared error (rmse), the impact of machine unlearningon causal analysiscan be assessed. the ability to "forget" certain information while retaining the essence of causal relationships is a groundbreaking capability offered by machine unlearning.moreover, the potential applications of machine unlearning extend beyond causal analysis, offering diverse avenues for enhancing machine learning models' reliability in various domains. by embracing these novel techniques, researchers and practitioners can evaluate the quality of causal analyses, thus empowering evidence-driven decision-making, and positively impact society by promoting fairness, transparency, and trust in machine learning models. by showcasing the potential of this approach, it encourages further exploration and development of machine unlearningtechniques to address biases and improve causal inferencein various domains.this paper utilizes the lalonde dataset, a widely-used benchmark for evaluating job training program effectiveness, to investigate the application of machine unlearningin causal analysis.the initial propensity score model is trained on the original dataset, using the covariates as inputs and the treatment indicator as the target variable.the retrained models, denoted as model 2 and model 3, are evaluated using the root mean squared error (rmse) metric to assess their performance in estimating the treatment effect.to gain insights into the changes introduced by the machine unlearningprocess, the distribution and histogram of propensity scores are analyzed before and after retraining the models.by combining forget set creation, model retraining, and propensity score analysis, this study explores the effectiveness of machine unlearningin refining propensity score models for improved causal inference. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/635.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/635.txt new file mode 100644 index 0000000000000000000000000000000000000000..57e72cfb00628d3e5ad9965f245e5476ee0c9ebd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/635.txt @@ -0,0 +1 @@ +the difficulty encountered in recent attempts (liu et al., 2023b;zhong and zhang, 2023, and to an extent also in sherman et al., 2023) towards establishing the rate optimal √ k stems from the need to control the capacity of the policy class explored by the optimization process. since the dynamics in linear mdps cannot be estimated pointwise, the estimation procedure of the actionvalue function involves a linear regression sub-routine where the dependent variable is given by the value function estimate from the previous timestep, which depends on past rollouts in a way that breaks the martingale structure. thus, to establish concentration, an additional uniform convergence argument is required in which the capacity of the policy class plays a central role.to illustrate, let us consider a simplified, non-optimistic estimation routine with non-zero immediate losses only at step h, and let (s i h , a i h , s i h+1 )k-1 i=1 denote a dataset of past agent transitions, and v k h+1 the value function estimated in step h + 1. then the estimation step on time h is given by:where truncate denotes some form of clipping used to keep the estimated action-values in reasonable range (e.g., ). notably, v k h+1 was itself estimated using the same procedure in the previous backward induction step, combined with an expectation given by the agent's policy:which means the estimated quantity is a random variable that depends on all past trajectories through the agent's policy. hence, to establish a least squares concentration bound, the common technique (originally proposed in this context in the work of jin et al., 2020) dictates arguing uniform convergence over the class of all possible value functions v k h+1 explored by the learner. further, the capacity of the class of learner value functions is inevitably tied to the capacity of the learner's policies, and when employing mirror descent updates, these are parameterized by the sum of past action-value functions:now, the problem is that the truncation of the q-functions implies the above expression does not admit a low dimensional (independent of k) representation, and thus leads to the agent's policy and value classes having prohibitively large covering number.the main component of our approach is to employ a reward free warmup period, that eventually allows to forgo the truncation of the action value function, thereby reducing the policy class capacity. indeed, if the action-value functions were not truncated, the policy parameterization could be made effectively independent (up to log factors) of k, as the sum of q-functions will "collapse" into a single d dimensional parameter of larger norm:wherein order to remove the truncations, we observe they are actively involved only in those regions of the state space that are poorly explored; indeed, assume the least squares errors are boudned as:, where λ k,h := i + i φ(s i h , a i h )φ(s i h , a i h ) ⊤ for some β that depends (among other quantities) on max s ′ v k h+1 (s ′ ), and assume we have already shown that v k h+1 (s ′ ) h for all s ′ . then as long as φ(s, a) points in a well explored direction in the state-action space -concretely one where φ(s, a)≤ 1/(βh) -we will get that:thus, forgoing truncations and if all directions were well explored, we would geth , and continuing inductively we accumulate errors across the horizon in an additive manner; v k h ∞ h + (hh)/h. now, while we cannot ensure sufficient exploration in all directions, we can in fact ensure it in "most" directions (those which are reachable w.p. 1/ √ k) using a properly tuned reward free warmup phase, which is based on the algorithm developed in wagenmaker et al. (2022b). the technical argument roughly follows the above intuition, carefully controlling the least squares errors through an inductive argument. this way, we establish the estimated value functions remain in the low capacity function class, for which we have a suitable uniform concentration bound. in each episode k ∈ , the agent interacts with the mdp m k = (s, a, h, {p h } , ℓ k h , s 1 ), that shares all elements with mdps of other episodes except for the loss functions. , ψ h such that p h (s ′ |s, a) = φ(s, a) ⊤ ψ h (s ′ ), and e = φ(s, a) ⊤ g h,k , for some g h,k ∈ r d .• for stationary losses, namely g h,k = g h ∀k, when given noisy bandit feedback, meaning the agent observes only l k h := ℓ k h (s k h , a k h ), and it holds that l k h ∈ and that the expected value of l k h conditioned on past interactions is φ(s k h , a k h ) ⊤ g h ., where λ k,h := i + i φ(s i h , a i h )φ(s i h , a i h ) ⊤ for some β that depends (among other quantities) on max s ′ v k h+1 (s ′ ), and assume we have already shown that v k h+1 (s ′ ) h for all s ′ . a finite horizon episodic mdp is defined by the tuple m = (s, a, h, p, ℓ, s 1 ), where s denotes the state space, a the action set, h ∈ z + the length of the horizon, p = {p h } h∈the time dependent transition function, ℓ = {ℓ h } h∈ a sequence of loss functions, and s 1 ∈ s the initial state that we assume to be fixed w. the learner interacts with a sequence of mdps m k k k=1 , m k = (s, a, h, p, ℓ k , s 1 ) that share all elements other than the loss functions. the feedback provided to the learner on episode k time step h is the random instantaneous loss l k h := ℓ k h (s k h , a k h ), where s k h , a k h denote the state and action visited by the agent on episode k time step h. on episode k, the standard optimistic estimates value function estimates are denoted q k h , v k h , while their restricted counterparts are defined by:. establishing the bound in e vbu involves showing (i) e qbd holds for q k;• h+1 , and (ii) that the policy π k h+1 belongs to a "small" policy class. let k, h, and assume we have already proved (i), (ii) and e vbu for (k ′ , h ′ ), k ′ < k and (k, h ′ ), h ′ > h. (11), and note that φ(s i h , a i h ) is f i h -measurable while s i h+1 is f i h+1 -measurable. hence, the value class v • (z h+1 , 2h) is independent of d k h , and we may apply lemma 2 to obtain that w. for any k ≥ k 0 , we have in the adversarial case ℓ k h (s, a) = ℓ k h (s, a) for all s, a, h, k, so ℓ k h (s, a) ≤ 1 by the assumption in definition 1. fix k 0 ≤ k ≤ k, and assume we have already proved the claim for all k ′ , h ∈ {k 0 , ., 2020). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/636.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/636.txt new file mode 100644 index 0000000000000000000000000000000000000000..9686321d677e761ed28c8ec5dbe2d651f9afb1ce --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/636.txt @@ -0,0 +1 @@ +despite the recent progress of machine learning, the question of the optimal encoding of data remains open, especially for tabular data . in this paper, we present the self-encoder, a neural network trained to guess the identity of each data sample. given n data samples x 1 , . . . , x n ∈ ir d , the self-encoder maps any sample x ∈ ir d to a probability distribution p over {1, . . . , n}, in such a way that p(x i ) is close to a dirac in i for each i ∈ {1, . . . , n}. in other words, the self-encoder is a classifier where each sample of the train set has its own label (its index). as such, it belongs to the category of self-supervised learning methods, like auto-encoders. the key difference is that, while auto-encoders rely on a reconstruction task, with the output in the same space ir d as the original sample, our self-encoder relies on an identification task, with the output in the set of probability distributions on the set of indices {1, . . . , n}.despite its simplicity, the self-encoder learns a very useful representation of data. it learns to distribute data in the embedding space in a way that makes them linearly separable from one another. this induces a geometry where two samples are close in the embedding space when they are not easy to differentiate. in particular, the self-encoder can be used for any classification or regression task, using the k nearest neighbors in the sense of this geometry, as given by the ranking of the predicted probabilities p 1 (x), . . . , p n (x), for any sample x.interestingly, these nearest neighbors do not correspond to those given by the euclidean distance in the original space ir d (nor by any other usual metric like a minkowski metric or cosine similarity for instance). the geometry is learned by the model. in particular, the predictions resulting from this encoding of data are invariant to any scaling of the features, making any preprocessing like min-max scaling not necessary.a drawback of the self-encoder is its complexity, as the dimension of the output is equal to n, the size of the training set. this induces a time complexity in o(n 2 ) for training. to overcome this problem, we present a variant based on sampling where the model is trained to predict the identity of samples in a random subset of the training set, reducing the training time.the rest of the paper is organized as follows. we present the related work in section 2. the self-encoder is presented in section 3. in section 4, we prove that the learned geometry is invariant to linear transformations, like any scaling of the features. the behavior of the self-encoder in the presence of categorical features is analyzed in section 5. the variant of the model based on sampling is described in section 6. the experiments are presented section 7. section 8 concludes the paper. , x n ∈ ir d , the self-encoder maps any sample x ∈ ir d to a probability distribution p over {1, . in other words, the self-encoder is a classifier where each sample of the train set has its own label (its index). in particular, the self-encoder can be used for any classification or regression task, using the k nearest neighbors in the sense of this geometry, as given by the ranking of the predicted probabilities p 1 (x), . , x n ∈ ir d be the set of training samples with d the dimension of the feature space and n the number of samples. the self-encoder is a multi-layer perceptron with input dimension d and output dimension n, trained to predict the identity i of each data sample x i .the output layer is a fully connected layer with input dimension d l (the output dimension of the last hidden layer) and output dimension n. this affine transformation is followed by an activation function φ which is either a coordinate-wise sigmoid function: the output of the network is then a vector p = φ(w h (l) +b) ∈ n that can be interpreted as probabilities: the ith component p i is the probability that the input x corresponds to the training sample x i .loss function in the following, f denotes the learned function of the network, mapping sample vectors x ∈ ir d to probability vectors p ∈ n .geometry in both cases (with or without hidden layers), the self-encoder learns a specific similarity measure in the sense that it can predict the training samples that are the most similar to any new data sample x. any sample x ∈ ir d is said to be close to the training sample x i if the corresponding predicted probability. . the only difference between the two set of samples is that the binary feature of the first binary column of x 1 is encoded on two bits in x 2 in the first two columns. it is reasonable to expect that a classifier trained on x 1 and fed with x1 = 1 1 0 0 makes the same decision as a classifier trained on x 2 and fed with x2 = 0 1 1 0 0 . this is not the case for a euclidean nearest-neighbor as the closest vector from x1 in x 1 is 0 1 0 0 and the closest ones from x2 in x 2 are 1 0 1 0 0 , 0 1 0 0 1 and 0 1 0 1 0 . given a set of training samples x = (x (i) ) i∈{1,. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/637.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/637.txt new file mode 100644 index 0000000000000000000000000000000000000000..7293f5949ec2df34dfe79028f3abcad29bacbfd4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/637.txt @@ -0,0 +1 @@ +the field of stochastic control is concerned with problems where an agent interacts over time with some random environment through the action of a control. in this setting, the agent seeks to select the control such that some objective depending on the trajectory of the system under their control and the choice of the control itself is optimised; commonly, as the system is stochastic, such an objective takes the form of an expectation of some pathwise cost or reward. the study of this class of problems has been successfully applied to many fields of modern sciences, including biology cucker & smale (2007), economics kamien & schwartz (2012), engineering grundel et al. (2007), finance pham (2009), and more recently, epidemics control hubert et al. (2022).stochastic control is nowadays regarded as a wellestablished field of mathematics. two main approaches govern the analysis: the stochastic maximum principle and the dynamic programming approach, see yong & zhou (1999); pham (2009). in either case, an agent is interested in characterising a set of optimal strategies, the dynamics of the system under such strategies, and the optimal value of the corresponding reward functional. the two main sources of complexity for tackling these problems are: 1) the continuous-time nature of the underlying stochastic dynamics, and 2) the presence of memory yielding a nonnegligible impact of the system's history on its future evolution.continuous-time non-markovian stochastic control problems, where the evolution of the system depends on its history and not only on its current state, have received an increasing amount of attention in recent years. non-markovian system provides a more faithful class of models to describe real-world phenomena than their markov counterparts, where the (infinitesimal) displacement of the state dynamics depend only on the current state. non-markovianity naturally arises in control problems due to the time delay effects in the system coefficients or the driving noises, which leads to the optimal control strategy being influenced by the historical trajectories of the system states.typical examples of continuous-time non-markovian stochastic control problems include rough volatility models gatheral et al. (2018) from quantitative finance in which the non-markovianity stems from having a fractional brownian motion as the driving noise. fractional brownian motion generalises brownian motion and involves historydependent increments. as a result, the state dynamics driven by fractional brownian motion exhibit non-markovian behavior. another example of non-markovian problems are delayed control problems, where memory is incorporated into the system by assuming path-dependence of the vector fields governing the dynamics (see sec. 3 for a precise statement). optimal decision with time delay is ubiquitous in economics, for example in the study of growth models with delayed production or pension funds models, kydland & prescott (1982); salvatore (2011), in marketing for models of optimal advertising with distributed lag effects gozzi et al. (2009), and in finance for portfolio selection under the market with memory and delayed responses øksendal et al. (2011). see also kolmanovskiı & shaıkhet (1996) for modelling systems with after-effect in mechanics, engineering, biology, and medicine.as the solution to a continuous-time non-markovian stochastic control problem is in general not known analytically, it is important to construct effective and robust numerical schemes for solving these control problems. an essential numerical challenge is to effectively capture the nonlinear dependence of the optimal control strategy on the historical trajectories of the system states.contributions using the modern tool set offered by neural rough differential equations (neural rdes) morrill et al. (2021) -a continuous-time analogue to recurrent neural networks (rnns) -we propose a novel framework which, to the best of our knowledge, is the first numerical approach allowing to solve non-markovian stochastic control problems in continuous-time. more precisely, we parameterise the control process as the solution of a neural rde driven by the state process, and show that the control-state joint dynamics are governed by an uncontrolled rde with vector fields parameterised by neural networks. we demonstrate how this formulation allows for trajectories sampling, monte-carlo estimation of the reward functional and backpropagation. to deal with sample paths of infinite 1-variation, which is necessary in stochastic control, we also extend the universal approximation result in kidger et al. (2020) to a probabilistic density result for neural rdes driven by random rough paths. the interpretation is that we are able to approximate continuous feed-back controls arbitrarily well in probability. through various experiments, we demonstrate that the proposed framework is time-resolution-invariant and capable of learning optimal controls with higher accuracy than traditional rnn-based approaches.the rest of the paper is organised as follows: in sec. 2 we discuss some related work, in sec. 3 we present our algorithmic framework and the universal approximation result of neural rdes, and in sec. 4 we demonstrate the effectiveness of the algorithm through numerical experiments.contributions using the modern tool set offered by neural rough differential equations (neural rdes)morrill et al. we demonstrate how this formulation allows for trajectories sampling, monte-carlo estimation of the reward functional and backpropagation.over the last decade, a large volume of research has been conducted to solve markovian stochastic control problems numerically using neural networks, either by directly parameterising the control and then sampling from the state process, such as done byhan et al. let α ∈ (0, 1] and x : ω × → t ⌊1/α⌋ (r 1+d ) be a stochastic α-hölder rough path with the property that the zero-th component of its trace is the time coordinate, x 0 t = t, and whose higher components that involve the zero-th are defined canonically through stieltjes integration. by proposition 8.1 and the fact that the signature of a stochastic process satisfies a linear sde (see equation (13)) that neural rdes parametrised by feedforward neural networks with linear activation functions are dense in probability (in the sense of equation (8)) in the space of all continuous functions on driving rough paths.1, valid in the case of brownian rough paths, which goes as follows: the stratonovich rough path lift x → x is measurable, and thus by lusin's theorem, for any δ > 0 continuous on a compact set k of probability 1 -δ. an α-hölder rough path x consists of an α-hölder continuous path x : → r d (the trace of x) together with a collection of higher-order functions defined on the simplex ∆ := {(s, ∈ 2 | 0 ≤ s ≤ t ≤ t } which represent, in a precise algebraic and analytic sense, iterated integrals of x against itself. we will denote c α (, r d ) the metrisable topological space of α-hölder rough paths taking values in r d with time horizon t : this is whatfriz & victoir (2010)friz & hairer (2020)(which only treats the case of α > 1/3, nevertheless sufficient for brownian motion, which is α-hölder regular for any α < 1/2) this space is denoted c α g (, r d ), the superscript g standing for "geometric". the main example of rough path that we will consider is the stratonovich brownian rough path augmented with time: if w is a d-dimensional brownian motion, we take α to be any real number in (1/3, 1/2) and for i, j = 1, .the main purpose of rough path theory is to give meaning to rough differential equations (rdes) dy = v (y )dx which, in addition to having usual existence and uniqueness theorems, have the property that the solution map x → y is continuous.the study of signatures is somewhat independent from that of rough paths, and is interesting even in the case of smooth or bounded variation paths (in which case x = x). the main property of interest of the signature, established inhambly & lyons (2010)(and extended to the full rough path case inboedihardjo et al. (2016)), is that, for paths of bounded variation, the series of tensors s(x) 0t determines the path x up to treelike equivalence. roughly speaking, the latter means that if two paths x, y are such that x ⋆ ← -y -with ⋆ denoting path concatenation and ←denoting path inversion -is a path that retraces itself and returns to the starting point, then the signature will not distinguish them: s(x) 0t = s(y ) 0t . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/638.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/638.txt new file mode 100644 index 0000000000000000000000000000000000000000..d78e6d0d9c88889ff29ec26c657accacc472c451 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/638.txt @@ -0,0 +1 @@ +information technology has become widely spread in industrial applications. extraordinarily large amounts of data have been made accessible to users. this has made it difficult to select the data that the user needs. one possible resolution of this issue came from the field of deep learning, from the discovery of recommender systems. these systems help users go through the process of decision-making and selecting the relevant data.a recommender system predicts the users behavior in order to detect their interests and needs, and the relevant data can then be recommended. the state of the art recommender systems typically incorporate embedding layers with enormous sizes. this appears to limit the choice of hardware to run these systems. embedding layers may not fit into gpu memory. there are various, more-or-less straightforward alternative solutions:• one such solution is to store the embedding layers outside of gpu memory, and to apply an efficient caching mechanism in an attempt to minimize the performance hit. , , • an other solution is to use multiple gpus and distribute the huge embedding layers between them. this can be expected to be more efficient, as the communication between multiple gpus can be done more efficiently than copying large chunks of data from cpu to gpu memory whenever a cache-miss occurs. storing embeddings on a parameter server can be expected to be even less efficient than storing them in cpu memory. • of course, alternative hardware could also be used. however, due to the enormous size of the embedding layers and the heavy-tailed access pattern of features, it's reasonable to expect that inference will be bottlenecked by memory operations.in this paper we attempt to address this problem by introducing a trainable, compressed embedding layer. intuitively, our solution transforms the enormous size (and the cost of memory operations that come with it) into additional tensor contractions. we show that it is possible to replace enormous embedding layers with significantly smaller, so called compressed, or rather decomposed embedding layers without changing the hyperparameters of the network. this replacement comes without any accuracy losses, at the expense of a negligible latency hit. this, in turn it gives us huge throughput gains when using modern gpus, since recommenders typically leave efficient gpus starving for compute tasks. see . moreover, it gives us significant savings in both hardware and operational costs of running recommender systems.it must be noted, that our method can be used in conjunction with the parameter servers and efficient caching methods already available. see . we show that it is possible to replace enormous embedding layers with significantly smaller, so called compressed, or rather decomposed embedding layers without changing the hyperparameters of the network. moreover, the paper claims that the accuracy of the compressed model is the same as the original one -however, in the paper only the values of the loss function were compared. we suspect, that in some cases the network may need additional training in order to offset the accuracy loss that comes from the int4 quantization of the embeddings. clustering alone requires a lot of compute on top of training the network, in exchange for a theoretical maximum of 8x size reduction (when going from the 32-bit floating point format to the 4-bit integer format), and a smaller expected size reduction. for this reason, it's difficult to compare the accuracy of the compressed model to the original one -since in this paper they were trained on different datasets. we suspect that compression harms the accuracy curve and we would be able to observe this only if we compared the original model to the compressed one, and both models were trained on the exact same dataset, and until the accuracy can improve. we claim that this can be reasonably expected, since the algorithm here creates a few representative embedding vectors to use in place of the original embedding vectors, and attempts to offset the accuracy loss with further training. the kind of measurement that would properly determine whether this algorithm comes with an accuracy loss was omitted from. " both the size of the tensor-train embedding and the extent of the accuracy degradation depend on the hyperparameters of the tensor-train algorithm. therefore, they are not meant to be used for recommenders, where even a slight loss in accuracy results in a measurable loss of revenue.the main assumption here is that if we preserve the pairwise distances between the embedding vectors, then we will also preserve the accuracy of the neural network. there is no reason for the accuracy of the neural network to depend only on the pairwise distances between the embedding vectors.it must be noted, that the claim ofis that this algorithm is able to compress the dlrm model 10000× without any accuracy losses. it must also be noted, that for industrial applications the compression ratio might be less important -accuracy and throughput (subject to an upper bound on latency) are more important. thanks to this idea, the training procedure was able to optimize the few embedding vectors in the cache directly -and perhaps independently from the embedding layers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/639.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/639.txt new file mode 100644 index 0000000000000000000000000000000000000000..065c530f77989896df48e8159b064e4f3071df86 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/639.txt @@ -0,0 +1 @@ +deep learning (dl) has revolutionized applications in areas including healthcare, transportation, and finance. however, as dl models are inherently "black box" in nature, it is hard to understand their inner decision-making logic, which makes it difficult to trust and validate them (rudin, 2019). research in explainable ai (xai) addresses this problem by investigating ways to describe how these black box models work with the help of explanations (van der waa et al., 2021). an explanation acts as an "interface" between humans and a dl model, and it should be at the same time both an accurate proxy of the algorithm and comprehensible to humans (guidotti et al., 2018). explanations can be global or local. global explanation methods provide explanations for how the model makes decisions with an overall view of all the model's components (e.g., model parameters) and their interactions. thus, these explanation methods attempt to justify the entire decision process of the black-box model at once. some examples of global explanation methods to explain tabular data include global attribution mapping (gam) (ibrahim et al., 2019), global aggregations of local explanations (gale) (van der linden et al., 2019), and class model visualizations (simonyan et al., 2013). local explanation methods, on the other hand, provide explanations for what features were considered important for a specific prediction. examples of local explanation methods to explain image data are integrated gradients (sundararajan et al., 2017), local interpretable model-agnostic explanations (ribeiro et al., 2016), and attention (de santana correia, 2021). in both cases (i.e., global, and local explanations), we want the generated explanation to be faithful i.e., to truly depict the internal decision process that the model underwent for its predictions (jain & wallace, 2019; wiegreffe & pinter, 2019;serrano et al., 2019;jacovi et al., 2020).in this paper, we focus on the faithfulness of local explanation methods for dl models and, hence, this is what we refer to with the term "explanations''. additionally, any method that uses an algorithm to generate a particular explanation for an input prediction is called an explanation method. there have been attempts to lay down common assumptions for what criteria should be satisfied for an explanation method to be faithful (jacovi et al., 2020). however, there is still a lack of a unified understanding of what properties are necessary, sufficient, or nice to have when it comes to determining if an explanation method is faithful or not. several tests have been proposed to determine the faithfulness of explanation methods (adebayo et al., 2018;jain et al., 2019;wiegreffe et al., 2019;serrano et al., 2019), however, 2 main issues are observed across these works. first, most of these tests have been applied only to specific domains and explanation methods, with minimal information on how they would extend to other domains and explanation methods, making it challenging to analyze and compare results if one must use a different test for every case scenario. second, these existing works lack a rigorous methodology because they do not specify proper thresholds to determine when an explanation method passes their tests.in this work, we contribute to the existing work on faithfulness by focusing on one of the tests proposed in the literature and extending it with a comprehensive methodology to ascertain when an explanation method passes or fails this test. this test is the independent parameter randomization (ipr) test proposed by adebayo et al. (2018) to assess the sensitivity of saliency-based explanation methods for convolutional neural networks (cnns) for image classification tasks. the saliency-based explanation methods considered in this work provide explanations in the form of saliency maps, namely, images that highlight the pixel-level regions of the input image that contributed most to a prediction. figure 1 shows the saliency maps generated by the different explanation methods considered by adebayo et al., (2018) for the classification of the sample input image on the left. the ipr test checks the sensitivity of a given explanation method to perturbations in a cnn as follows: at every turn, it randomizes one parameter layer of the cnn (the other layers retain their original parameters) and generates explanations for predictions from these randomized-layer models. using structural similarity (ssim) and rank correlation metrics, if the explanations provided for the original prediction for the image from these randomized layer models differ from those by the original model, the explanation method is sensitive to changes made to the model's parameter layers. sensitivity to such changes is seen as a necessary condition for the faithfulness of an explanation method, because if the explanation method is insensitive to a layer randomization, it is not leveraging that part of the model architecture and, therefore, it cannot be a faithful depiction of the model's decision-making process.however, two areas in the ipr test require improvements. first, the comparisons made with similarity metrics are done qualitatively, namely, without a specific threshold that defines when two saliency maps, one from the original model and the other from the randomized layer model are "similar enough". second, this work does not define criteria that go beyond individual images, namely, criteria to evaluate the sensitivity of a given explanation method over a whole dataset. this makes it challenging to do an overall comparison among multiple explanation methods for a particular model architecture and dataset. however, there are two important merits of the ipr test. first, it is well-suited to compare the sensitivity of multiple saliency-based explanation methods at once for individual images. second, it is agnostic to the model architecture, so it can be applied to other model architectures and domains as well.therefore, in this paper, we extend this notion of sensitivity of an explanation method by addressing the aforementioned limitations of the ipr test in two ways. first, we specify a formal threshold that defines when two saliency maps are "similar enough" to determine when an explanation method is sensitive to the randomization of a given layer.second, we build on this threshold to define criteria to determine the overall sensitivity of an explanation method with respect to a model architecture for an image and a dataset. we present examples of how the sensitivities of multiple explanation methods for cnns can be compared using this extended ipr test methodology. next, we briefly discuss the relationship between the sensitivity of an explanation method and its faithfulness. finally, we consider the extended utility of our test by providing intuition for how it can be adapted to assess different explanation methods in other domains (for example, natural language processing)., (2018)for the classification of the sample input image on the left. the ipr test checks the sensitivity of a given explanation method to perturbations in a cnn as follows: at every turn, it randomizes one parameter layer of the cnn (the other layers retain their original parameters) and generates explanations for predictions from these randomized-layer models. a recent work(halliwell et al.adebayo et al. (2018)report the results of the ipr test for six gradient-based explanation methods., 2016), guided backpropagation (springenberg et al.• by defining an ssim threshold to determine when two saliency maps are similar • by determining when an explanation method is sensitive to layer randomizations for an image • by determining when an explanation method is sensitive to layer randomizations for a dataset notation: let:.• 𝑀 be a model architecture with a set of trained parameter layers 𝐿 • 𝐿 𝑐 be the set of layers in m that are critical for its predictions (𝐿 𝑐 ⊆ 𝐿) • 𝑙 𝑗 ∈ 𝐿 be the trained parameter layers of m • 𝑙 𝑗 ′ be the randomized layer corresponding to a trained parameter layer 𝑙 𝑗 • 𝑀 𝐿 be the model with the original layers • 𝑀 𝑙 𝑗 ′ be the model with the randomized layer 𝑙 𝑗 ′ • 𝐼 be the set of images i.• ssim(𝐸 𝑀 𝐿 𝑖 , 𝐸 𝑀 𝑙 𝑗 ′ 𝑖 ) be the ssim between the explanations generated by the explanation method e from the models 𝑀 𝐿 and 𝑀 𝑙 𝑗 ′ for the prediction on image 𝑖 defining an ssim threshold to determine when two saliency maps are similar: to define a threshold for ssim, we rely on the mapping between ssim and human-perceived image similarity identified bywang et al.definition 1 -sensitivity of an explanation method e to the randomization of a layer 𝒍 𝒋 for an image i: for a given e, if the ssim score between the saliency map generated by a randomized layer model (𝐸 𝑀 𝑙 𝑗 ′.determining when an explanation method is sensitive to layer randomizations for an image: as we now have an ssim threshold to determine if an explanation method is sensitive to the randomization of a layer or not, we can build on this to define a criterion to establish the sensitivity of an explanation method for a specific image.definition 2 -sensitivity of an explanation method e for an image i: we state that 𝐸 is sensitive to 𝑖 if the saliency maps generated by 𝐸 for 𝑖 with model 𝑀 are sensitive to the randomizations of all layers (𝑙 𝑗 ∈ 𝐿) in m. this led to the creation of 17 randomized layer models for inceptionv3, 13 randomized layer models for vgg16, 19 randomized layer models for mobilenetv2, 8 randomized layer models for resnet18, and 16 randomized layer models for resnet50. this contrasts some of the results reported byadebayo et al. guided gc builds up on guided bp by generating saliency maps through an element-wise product of the guided bp saliency weights and the saliency weights from the last convolutional layer of the model, which provides high-resolution and class-discriminative explanations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/64.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/64.txt new file mode 100644 index 0000000000000000000000000000000000000000..c717c19678751bc6f183059f9a86132af430329c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/64.txt @@ -0,0 +1 @@ +the last decade has witnessed a remarkable transcendence of data and information-centric services in all spheres of the global domain. the availability of powerful computational resources and the ascendancy of learning systems have further led to unprecedented growth in data-generating devices and sensors. it has been estimated that close to 29 billion devices would be connected to the internet by 2023 with the data traffic expected to reach 131 exabytes (eb) by the end of 2024 . furthermore, the requirements for 6g aiming for data rates of approximately 1tbps per user have further reinforced a growing realization that the traditional centralized/cloud systems would be unable to efficiently manage the accompanying computation requirements. the ability to imbue systems with intelligence is at the forefront of this technological revolution. conventional machine learning tasks have rapidly found applications in multiple domains. simultaneously, deep learning (dl) has risen meteorically through the last decade with unparalleled performance primarily in computer vision and natural language processing (nlp) fields. however, the performance offered by deep learning systems comes with significant computation and memory costs in addition to massive data requirements. as of writing this article, the current state-of-the-art (sota) for image classification in the imagenet large scale visual recognition challenge (ilsvrc) is the fixefficientnet-l2 which achieves a top-5 accuracy of 98% using 480m parameters. however, providing similar performance in related applications at the user-end is currently constrained by limited computational resources exacerbated by collecting, communicating and storing the required amount of data. the paradigm shift in the nature of networked services and the transformation of the connected devices coupled with the distributed nature of data requires a decentralized approach for extracting maximum learning benefits. to avoid overwhelming the network and data servers, the computational load must be moved at or closer to the network edge. edge computing offers a potentially powerful solution to this problem. the edge computing framework aims to leverage distributed computing concepts to alleviate the computational load from the network core benefiting from processing power available close to the network edge. the confluence of artificial intelligence (ai) and edge computing results in edge intelligence (ei). application of deep learning to achieve edge intelligence offers the added benefit of employing raw data without the considerable feature engineering and pre-processing overhead. the computation power of the elements closer to the edge network offers a powerful alternative to centralized computing albeit in a distributed manner. successful exploitation may result in elements of cloud services being shifted in close proximity to the data sources ensuring better data security as well as reduced load on the network backbone. our contribution with this article is an attempt at formalizing the key constraints which must be addressed for realizing efficient dei applications. to the best of our knowledge, these constraints have been disparately discussed while 'device disparity' and 'inference transparency' have not been formally considered previously. we further unify broad research avenues and classify the work under these categories to provide a concise overview of the evolution of deep edge intelligence. lastly, we attempt to identify challenges and research directions not only related to the implementation of dei but also present some potential learning schemes which might be highly suitable for this rapidly evolving domain. to summarize, the unique highlights of this article are as follows: a) we consolidate the operational constraints for deep learning-based edge intelligence to reflect aspects not only related to edge intelligence but also deep learning. the edge computing framework aims to leverage distributed computing concepts to alleviate the computational load from the network core benefiting from processing power available close to the network edge. application of deep learning to achieve edge intelligence offers the added benefit of employing raw data without the considerable feature engineering and pre-processing overhead. successful exploitation may result in elements of cloud services being shifted in close proximity to the data sources ensuring better data security as well as reduced load on the network backbone. to summarize, the unique highlights of this article are as follows: a) we consolidate the operational constraints for deep learning-based edge intelligence to reflect aspects not only related to edge intelligence but also deep learning. edge intelligence aims at maximally offloading the learning and inference computations to the edge level thus alleviating resource demands at the cloud level.• communication costs: remote computations require data to be exchanged between various distributed elements introducing not only the cost of data communication but the associated overhead costs in already congested networks.• processing and memory: deep learning models require considerable processing and memory resources to extract and learn the deep representations of the data. 3) scalability: with the proliferation of devices at the edge level, sharing data for centralized as well as distributed computing becomes increasingly difficult due to communication and processing bottlenecks. deep learning-based edge intelligence or deep edge intelligence(dei) encompasses executing dl models the edge intelligence resulting from the confluence of deep learning and edge computing however demands a careful consideration of the operating requirements to maintain a delicate balance between often conflicting requirements already discussed in section-ii.as with all machine learning frameworks, deep learning also involves two distinctive phases: a) training and b) inference the modalities involved in both of these processes operating in a distributed environment significantly alter the way dei problems are formulated.classifies the ai solutions in the edge environment as ai for edge for managing network operations and ai on edge for various ai applications operating at edge networks.elucidates on the modes of deep learning operation for edge intelligence whilediscusses current works on software and hardware optimization for deep learning in a distributed setting. in a distributed environment, augmentation across all data instances may also lead to exacerbation in the adverse properties of the data, the effects of which are likely to propagate across devices due to model sharing. initiating with an emphasis on the fact that data and devices are on track to become prohibitive for centralized computations, we introduce the concepts behind edge intelligence subsequently focusing on deep edge intelligence (dei). effective learning strategies such as multi-task learning, self-supervised learning and meta-learning are some of the key domains whose in dei can lead to more generalized learning schemes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/640.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/640.txt new file mode 100644 index 0000000000000000000000000000000000000000..b382b475c1d6415ab88327d3b7a1b9de1b786c46 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/640.txt @@ -0,0 +1 @@ +deep neural networks have revolutionized various fields of machine learning over the past decade, from computer vision to natural language processing. these models are often left unexplained, causing practitioners difficulties when troubleshooting training inference issues or poor performance. this can lead to a lack of user trust in the model and an inability to understand what features are important to a model's function. various regulations have been proposed that would require that ml models be transparent in certain scenarios , , .attribution methods, sometimes called salience maps, are a response to this issue, purporting to explain the working of a model by indicating which inputs are important to a model's output. one group of such methods, game-theoretic attribution methods, go about producing attributions in a principled way by stipulating axioms, or guiding principals, and proposing methods that conform to those principles. when axioms are posited, the possible forms of an attribution become constrained, possibly to a single, unique method. this is the case with the integrated gradients method. initially introduced and analyzed in axiomatic attributions for neural networks , counterexamples to its uniqueness claims have since been provided by and .while the original uniqueness claim about ig is problematic, in this work, we show that ig uniqueness claims can be established rigorously via different axioms. we start by introducing different axioms common to game-theoretic attribution methods, namely, implementation invariance, linearity, dummy, and completeness. then, using axioms, we establish the following characterizations.1. path methods can be characterized among attribution methods by the linearity, completeness, dummy, and non-decreasing positivity axioms.2. ig can be characterized among monotone path methods by the symmetry-preserving and affine scale invariance axioms.3. ig can be characterized among attribution methods by the linearity, affine scale invariance, completeness, non-decreasing positivity, and proportionality axioms.4. ig can be characterized among attribution methods by the linearity, completeness, dummy, and symmetric-monotonicity axioms.5. ig can be characterized among attribution methods by its action on monomials and the continuity of taylor approximations for analytic functions axiom.furthermore, we show that ig attributions to neural networks with relu and max functions coincide with ig attributions to softplus approximations to such models. this establishes a sort of continuity of ig among softplus approximations.a bam reports a vector, so that a i (x, x ′ , f ) reports the contribution of the i th component of x to the output f (x), given the reference baseline input x ′ . the magnitude of a i (x, x ′ , f ) indicates that xi contributed that quantity to the change in function value from f (x ′ ) to f (x).the integrated gradients methodis the path method defined by the straight path from x ′ to x, given as γ(x, x ′ , t) = x ′ + t(x -x ′ ), and takes the form: definition 4 (integrated gradients method). to do this, we replace the condition ∂f ∂xi (x) ≤ ∂g ∂xj (x) ∀x ∈ with a condition applicable to non-differentiable functions. now, consider a simple example function we would like to perform attribution on f (x 1 , x 2 ) = (x 1 -x ′ 1 ) 100 (x 2 -x ′ 2 ). thus, for each m, there exists c ∈ r n such that c i = 0 and f (x) -f λ (x) + c ⊺ x is non-decreasing from x ′ to x.we may also gain the reverse, a i (x, x ′ , f ) ≤ ig i (x, x ′ , f ), using a similar method. because (x, x ′ , f ), (x, x ′ , f ) ∈ d ig , ∂f ∂xi and ∂f ∂xj can be integrated along the path γ(t) = x ′ +t(x-x ′ ), implying that the measure of points on the path where ∂f. an argument similar to the k = 1 step shows that for almost every x in our neighborhood, s k+1 • f k+1 • g k is equivalent to an analytic function in some new open neighborhood containing x, and s k+1 α • f k+1 • g k α converges. we then consider a collection of points x ∈ u with the desirable properties, and a collection of open sets n x containing them, where s k+1 • f k+1 • g k is locally equivalent to an analytic function on n x .assume that there exists u * ⊂ u with same measure as u , and that x ∈ u * implies that exists an open region containing x, b x , such that: 1) g k ≡ h x on b x , where h x is a real-analytic function on u , 2) dg k (x) exists, and 3) dg k α (x) → dg k (x) as α → ∞. then f k+1 • g k is analytic on b x and so is s k+1 • f k+1 • g k , and the derivative exists on b x .x , where properties 1-3 hold for s k+1 • f k+1 • g k and s k+1 α. according to our assumption, there exists a b x containing x where properties 1-3 hold for g k , g k α on b x .since lim n→∞ ig i (x, x ′ , f an ) = ig i (x, x ′ , f ) for any sequence a n , we have lim α→∞ ig i (x, x ′ , f α ) = ig i (x, x ′ , f ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/641.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/641.txt new file mode 100644 index 0000000000000000000000000000000000000000..441f952c98f1b3b619357ef242edbd08f67bafb0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/641.txt @@ -0,0 +1 @@ +septic shock, the most severe form of sepsis, is characterized by profound circulatory and cellular abnormalities and is associated with a high mortality rate . early detection and treatment of sepsis and septic shock are critical for improving patient outcomes, as delays in intervention can lead to a rapid decline in a patient's condition . while various methods have shown promise in predictive accuracy , their interpretability remains a significant concern. understanding and explaining a model's underlying mechanisms is crucial for gaining trust , facilitating model debugging, and informing clinical decision-making . interpretability and explainability methods for time series forecasting models can provide valuable insights into the relationships between vital signs and the risk of septic shock . several studies have been undertaken to incorporate model-agnostic explainability by leveraging rule-based methods and argumentation . this paper explores the interpretability of vital sign forecasting models for patients with sepsis and septic shock condition in critical care settings. based on our knowledge, this work is one of the first to explore deep learning models to forecast the vital signs in the eicu dataset . further, we investigate the interpretability and explainability of patients' forecasted signals in conjunction with drug infusion. our goal is to contribute to developing more interpretable and trustworthy models for septic shock prediction, ultimately improving patient outcomes. early detection and treatment of sepsis and septic shock are critical for improving patient outcomes, as delays in intervention can lead to a rapid decline in a patient's condition. interpretability and explainability methods for time series forecasting models can provide valuable insights into the relationships between vital signs and the risk of septic shock. this paper explores the interpretability of vital sign forecasting models for patients with sepsis and septic shock condition in critical care settings. our goal is to contribute to developing more interpretable and trustworthy models for septic shock prediction, ultimately improving patient outcomes. by accurately forecasting the trend component, we aim to provide valuable support to clinicians in monitoring the vital signs trend of patients and making informed decisions regarding medication administration. our experiments focused on forecasting the mean blood pressure (mbp) of the patients diagnosed with sepsis or septic shock.additionally, we investigate the extracted trend of the forecasts obtained using the n-beats interpretable configuration and analyze cases where the forecasted trend does not align with the actual trend.1. we observe that, in several samples, a noticeable deviation between actual and forecasted trends occurs when drugs are administered after the training cut-off, as seen with patient id 261982, who experienced an increased mbp trend due to the administration of vasoactive drugs like norepinephrine and vasopressin. since the drug infusion information was an unobserved variable during the training of the forecasting model, the discrepancy in the actual and the forecasted trend can be attributed to the fact that the model is trained on historical vital sign data, which does not include the effects of drugs introduced after the training cut-off. figure1shows the observed mbp trend, the forecasted trend, the training cut-off, and the interval of drug infusion for a specific patient. furthermore, our findings suggest that cases where the actual and forecasted trends matched had a higher mortality rate (92%) compared to cases where the trends were dissimilar (84%). conducting additional studies is crucial for understanding the causal inferences behind these interactions and better comprehending the relationship between medication administration, vital signs, and their subsequent effects.in conclusion, our study utilized the eicu dataset to evaluate the forecasting performance of the interpretable n-beats model, highlighting the significance of accounting for drug infusion's influence on trends in icu patients' vital signs. future research will focus on developing approaches that integrate drug infusion information and investigate drug-to-drug interactions within the icu context, aiming to enhance the overall performance of forecasting models in critical care. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/642.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/642.txt new file mode 100644 index 0000000000000000000000000000000000000000..921e0b713a41096b3c9a21fd4cac05b8bc5fde32 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/642.txt @@ -0,0 +1 @@ +we now turn to the full description and analysis of our algorithm cbnn. in appendix b we describe our novel algorithmic framework canprop. in appendix c we describe contractions and bayesian networks on them, showing how canprop can be implemented with them. finally, in appendix d we describe tsts and how they are used to perform our required operations efficiently. in appendix e we prove, in order, all of the theorems stated in this paper.recall that we created a sequence of distinct nodes ⟨x t | t ∈ ⟩ and defined, for all t ∈ \ {1}, the node n(x t ) := x n(t) . let x := {x t | t ∈ }. given some y : x → , we defined the y-regret and the complexity of y as:respectively. we have k actions and a metric space (c, ∆) where c is a (possibly infinite) set of contexts and for all x, x ′ ∈ c we have that ∆(x, x ′ ) is the distance from x to x ′ . we assume that learner does not necessarily know (c, ∆) a-priori but has access to an oracle for computing ∆(x, x ′ ) for any x, x here, our inductive bias is that if an action a ∈ is good for a context x ∈ c then it is likely also good for contexts that are near to x with respect to the metric ∆. for all x ∈ {x s | s ∈ } we will always have an unique leaf γ(x) ∈ z ⋆ t in which γ(γ(x)) = x. given x ∈ x , if there exists a leaf u ∈ j ⋆ with γ(u) = x then λ ′ (x) = λ(u).let a be a finite set of points in r d such that for all x ∈ r d with ∆(x, 0) ≤ 1 there exists x ′ ∈ a with ∆(x, x ′ ) < λ.since ŷ′ (x † ) = ŷ′ (x) we must have that ŷ′ (x ′′ ) ̸ = ŷ′ (x) so that ∆(x, x ′′ ) > ϵ/3.since x ∈ d\l we have ŷ′ (z) = ŷ′ (x) for all z ∈ h(x) and hence we must have x ′ / ∈ h(x) so that ∆(x, x ′ ) ≥ √ d/(2q).since ŷ′ (x † ) = ŷ′ (x) we must have that ŷ′ (x ′′ ) ̸ = ŷ′ (x) so that ∆(x, x ′′ ) ≥ ∆(x, x ′ ).but ŷ(z t ) ̸ = ŷ′ (x t ) implies that x t / ∈ d \ l so since x t ∈ d we must have x t ∈ l which happens with probability p and hence, by equation (7), we have:. by definition of β t (γ t,j ) this means that: e = ϵ t,j -e and that: e = -e multiplying these two equations by p and p respectively, and summing them together, then gives us: e = pϵ t,j -e.first note that the graph (with vertex set x ) formed by linking x to n(x) for every x ∈ x ′ is a tree so that φ(y) ≥ |{y(x) | x ∈ x }| -1. so since for all v ∈ b ′ we have q(y, v) ̸ = ∅ if and only if v has a descendent in {y(x) | x ∈ x } and each element of {y(x) | x ∈ x } has log(k) ancestors in b ′ we have:. given x ∈ x , if there exists a leaf u ∈ j ⋆ with γ(u) = x then λ ′ (x) = λ(u).we have κf ′ (u ′′ ) (z s+1 , λs+1 , u ′′ ) = 1 and for all u ∈ z s we have κf ′ (u) (z s+1 , λs+1 , u) = κf ′ (u) (z s , λs , u).we now define a bijection g : {0, 1} x → 2 x by: g(f ) := {x ∈ x | f (x) = 1} ∀f ∈ {0, 1} x note that for all f : x → {0, 1} and all x ∈ x \ {x 1 } we have: σ(x, g(f ))ϵ + (1 -σ(x, g(f )))(1 -ϵ) = f (x) ̸ = f (n(x)) ϵ + f (x) = f (n(x)) (1 -ϵ) and:. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/643.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/643.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1d2a260fddc5981dff20ddc5c1079aa48b0cbfe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/643.txt @@ -0,0 +1 @@ +one of the significant problems in classification tasks is a class imbalance, which occurs when one class value in the target variable is highly dominant over the other class values. it is imperative to handle the class imbalance in the target variable of classification datasets, especially in predicting rare-case scenarios. if the class imbalance is not treated, machine learning models built on the imbalanced data will suffer from unwanted bias, leading to incorrect classifications. in rare cases of classification problems, misclassification often leads to severe consequences. several class imbalance handling techniques have been proposed over the years. sampling techniques like random undersampling, random oversampling, smote , adasyn , and other variants of smote can be used to handle the class imbalance, but they have their limitations. due to recent advancements in deep learning, neural network models were employed to generate synthetic data. with the emergence of gans , gan-based architectures were also used in synthetic data generation. with the success of deep learning architectures in generating synthetic data on images and text, state-of-the-art architectures like gans were extended to synthesize tabular data. from and , it is evident that robust real-world application models need large-scale training data to yield superior performance. but large-scale data is not often readily available. in such cases, synthetic data generation techniques can curate a large-scale dataset with the same structural and statistical properties as the original data .this research uses gan-based models to create synthetic data points with similar structural and statistical properties to the original data. synthetic data with the best qualitative properties is combined with actual data to evaluate the utility of the synthetic data in class balancing and low-resource setting experiments. the objectives of this paper are as follows:• to evaluate the utility of synthetic data from gan, smote, and adasyn in complementing the train data in low resource settings (classification tasks). • to compare gan and non-gan synthetic data generation techniques (smote and adasyn) on highly imbalanced data. synthetic data with the best qualitative properties is combined with actual data to evaluate the utility of the synthetic data in class balancing and low-resource setting experiments. the research community is increasingly using synthetic data in combination with actual data in data crunch situations to overcome the shortage of data.the success of gans in generating synthetic data on images and text has led to their use in synthesizing tabular data with the same features and statistical properties as the original data. a technique for generating synthetic relational databases called ctgan was proposed, while synthetic data vault (sdv) uses gaussian copulas to understand the interdependencies of columns and generate synthetic data. gan architectures, including the synthetic data vault, data synthesizer, and synthpop ecosystems, have been evaluated for their utility in generating synthetic data for machine learning tasks. however, the authors have not addressed the class imbalance in the datasets and have used datasets with few data points, leaving scope to explore the use of tabular synthetic data to complement training data for machine learning models in low-resource settings. for class balancing, four versions of data were available for modeling, including the original imbalanced data and data balanced with each of the smote, adasyn, and gan techniques. twelve versions of data were available for low resource settings, including the original data sets and the enriched data sets generated using gan, smote, and adasyn.in the low resource setting experiment, classification models were built using the rf algorithm, and the data were transformed using principal component analysis (pca); for data enriched with synthetic data from gan, seventeen principal components were used to transform the data, while fifteen principal components were used for other data versions. copulagan required 280 minutes for the training phase and only 6 minutes and 11 seconds to produce 410930 (26750 + 53501 + 107002 + 223686) data points.82) values obtained from the imbalanced and smote-balanced data were better than those obtained from the data balanced with adasyn and gan synthetic data. the balanced data showed similar performance to the imbalanced data, but the real impact of balancing with synthetic data is reflected in the threshold values. the threshold values of the balanced data were at least twice as large as those of the imbalanced data, indicating that models trained on balanced data can make predictions with greater accuracy than those trained on imbalanced data. the random forest models trained on data enriched with gan synthetic data in low-resource settings have consistently produced superior recall values. the better recall values observed in the models trained on data enriched with gan synthetic data can be explained by the fact that gan has effectively learned the characteristics of minority class samples, thus generating highquality synthetic data from such samples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/644.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/644.txt new file mode 100644 index 0000000000000000000000000000000000000000..2319779e803b9c78a99be83c1c32f1ccffa552a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/644.txt @@ -0,0 +1 @@ +solomonoff's general theory of inductive inference aims to solve the problem of sequence prediction by considering a bayesian mixture of all computable explanations for the subsequence observed so far. this naturally give rise to the notion of kolmogorov complexity of a string x, the shortest computer program producing output x. inspired by this construction, we propose a prior based on a bayesian mixture of circuits, with the circuit complexity taking the place of the kolmogorov complexity. this overcomes some of the limitations of the kolmogorov complexity, such as its dependence on the choice of universal turing machine (utm). our model is "parameterized" by our choice of universal logic gate(s), but none of our main results depend on this choice significantly. our approach also circumvents the halting problem, since the output of a small circuit can always be calculated quickly. solomonoff's theory is discussed further in section 2.2.our prior is appropriate for learning boolean functions from partial information. within the machine learning community, this is referred to as supervised learning for binary classification. the inputs can be viewed as features, and we are provided with some input/output pairs (the training data). our task is to predict the output on unseen inputs (the test data). since this is clearly an underspecified problem, some inductive bias is necessary; our prior prefers explanations (boolean functions) with low circuit complexity (as well as those computed by many circuits). the circuit prior is constructed explicitly in section 3. the symbols introduced in this section appear for convenience in the appendix, section 7.2.in section 4.1, we propose a prediction scheme based on the smallest circuit consistent with the training data. this is a natural approximation to the circuit prior, ignoring multiplicity of explanations and predicting based on the best one. the problem is also reduced from an infinite sum to a search. in section 4.2 we derive upper and lower bounds on the number of errors we should expect to make with this approximation. these results are loosely analogous to the coding theorem in algorithmic information theory (ait) or to a deterministic case of the minimum description length (mdl) principle.finally, we suggest some directions for future work, including investigations of infinite sequence prediction. inspired by this construction, we propose a prior based on a bayesian mixture of circuits, with the circuit complexity taking the place of the kolmogorov complexity. since this is clearly an underspecified problem, some inductive bias is necessary; our prior prefers explanations (boolean functions) with low circuit complexity (as well as those computed by many circuits). instead of considering a fixed boolean function, which can be represented by a circuit, and judging the performance of a learning algorithm on test data as a random variable depending on randomly selected training data, we study bayesian optimal prediction based on a latent circuit with some prior distribution. since we are interested in computing strings by circuit, and a circuit produces a bit of a string given its index as input, it is perhaps more pertinent to consider programs which perform the same task.the naive extension of a tm computing a string to circuits would be to define the complexity of a string x as the "size" (or some other complexity measure involving number of inputs and/or gates) of the smallest circuit that outputs x on some input.in fact, the natural idea is to choose the smallest circuit that produces the bits of x given their index within x. of course, the length of x may not be a power of 2, so it's immediately clear that we will have some freedom in our choice of circuit for x, since the values for indices beyond it's length are ambiguous. then, if we are attempting to learn a boolean function, and our prior information tells us that it was computed by some circuit sampled from λ with probability given by , our prior on b is = • -1 , the pushforward * ( ) of to b .we will investigate effectiveness of this heuristic for -that is, how badly can we do by using the smallest circuit that explains the bits of a string x we have seen? this will form the first contribution we are aware to a theory of prediction based entirely on circuits analogous to solomonoff induction. note that i is the minimum circuit size in the sense of the well known minimum circuit size problem (mcsp).note that this definition can be read as stating that i(x) is the size of the smallest circuit that computes the bits of x when given their indices as input, with behavior on any out of range indices unspecified. in particular, a short program for a string x cannot necessarily be translated into a small circuit, since the cook-levin construction would roll a computation history out to a very large circuit if the runtime is large. we have shown that there are many degrees of freedom in specifying a reasonable circuit prior, which are encapsulated by , the choice of prior on the circuit size. the circuit prior is clearly lower semicomputable (enumerating circuits up to a maximum circuit size), and since it is a measure it is also estimable (the term "computable" is elsewhere used to mean estimable). interestingly few of the results in this paper actually depend on the nature of circuit computation, which was an intentional choice in view of the difficulty of finding circuit lower bounds, but suggests that results may be sharpened by taking advantage of circuit upper bounds. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/645.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/645.txt new file mode 100644 index 0000000000000000000000000000000000000000..9dd10dbc88695036332c516a0a3469dbc2da23db --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/645.txt @@ -0,0 +1 @@ +advances in digital technology have led to the digitization of everyday activities of billions of people around the world, generating vast amounts of data on human behavior. from what people buy, to what information they search for, to how they navigate the social, digital, and physical world, human behavior can now be measured at a scale and level of precision that human history has not witnessed before. these developments have created unprecedented opportunities for those interested in understanding observable human behavior-social scientists, businesses, and policymakers-to (re)examine theoretical and substantive questions regarding people's behavior.moreover, technology has led to the emergence of new forms of consumer marketplacecrowdfunding (whereby entrepreneurs obtaining funds from an anonymous online crowd; mukherjee, chang, & chattopadhyay 2019) and crowdsourcing (whereby organizations gather new ideas and business solutions from an anonymous online crowd; mukherjee, xiao, wang, & contractor, 2018)-which not only details people's behavior in exchange of products and services but also led to new behavior.making sense of the vast amount of fine-grained data about consumer behavior, however, poses nontrivial challenges for marketing researchers and practitioners. in the past, behavioral data about consumers originated from sources such as point-of-purchase scanner data, customer attitude or satisfaction surveys, consumer purchase panels, and laboratory-based experiments. these traditional sources of consumer data are much smaller in scale, much more structured (e.g., in numbers-based data formats which can be directly analyzed), and measured to purpose, than new consumer data sources. consequently, many of the methods used to analyze traditional customer data-such as conventional econometric and statistical methods-are not designed to deal with the breadth, precision, and scale of the new consumer data sources publicly available, which tend to be unstructured-written texts, images, audios, and videos-and require parsing and processing before data can be analyzed.fortunately, a parallel trend to the emergence of "big data" on consumer behavior has been the emergence of computational methods and analysis techniques needed to deal with these new sources of behavioral data-which tend to be more unstructured, of much larger scale, and noisier. specifically, data on consumers come in four basic forms: (1) structured data, (e.g., number of likes on facebook), (2) textual data (e.g., tweets on twitter), (3) audial data (e.g., spotify radio advertisements), and (4) visual data (e.g., photos on tripadvisor). consumer data can involve only one form, such as textual messages (e.g., tweets on twitter) and visual images (e.g., instagram photos). consumer data can also involve more than one form. video data, which are increasingly prevalent, combine a series of visual images (typically, 24 visual frames per second) and an audio track. many publicly available sources of consumer relevant data detailing people's consumption behavior involve multiple data elements. for example, consumer data from youtube combines all four of these basic forms-audial and visual data in the video, textual data in the comments, and structured data in the number of views and likes. for each of these four data elements, new machine learning and big data methods enable us to simply and easily parse the data to uncover consumer insights if analysts are equipped with the right toolkit.as both the availability of large-scale behavioral data and computational analysis methods are recent and emerging developments, many behavioral scientists and practitioners may be unaware or unfamiliar with (1) new sources of secondary data and types of data that are available to extracts insights about consumer behavior, and (2) new analysis techniques to study consumer behavior at scale. therefore, motivated by these recent developments and opportunities, the main objective of this article is to discuss computational methods (specifically, machine learning methods) for researchers and practitioners interested in addressing customer-relevant questions using new secondary data sources that are publicly available. this article offers a primer on the application of computational social science for understanding consumer data for both researchers and practitioners.the rest of this article is organized as follows. first, types of unstructured data pertaining to consumer behavior-including the information that consumers are exposed to and their digital footprints in the modern marketplace-will be decomposed to their underlying data elements.next, machine learning and computational techniques to parse and process unstructured customer data are described. finally, potential directions for future research using consumer data are discussed. consequently, many of the methods used to analyze traditional customer data-such as conventional econometric and statistical methods-are not designed to deal with the breadth, precision, and scale of the new consumer data sources publicly available, which tend to be unstructured-written texts, images, audios, and videos-and require parsing and processing before data can be analyzed.fortunately, a parallel trend to the emergence of "big data" on consumer behavior has been the emergence of computational methods and analysis techniques needed to deal with these new sources of behavioral data-which tend to be more unstructured, of much larger scale, and noisier. many publicly available sources of consumer relevant data detailing people's consumption behavior involve multiple data elements. for example, consumer data from youtube combines all four of these basic forms-audial and visual data in the video, textual data in the comments, and structured data in the number of views and likes. for each of these four data elements, new machine learning and big data methods enable us to simply and easily parse the data to uncover consumer insights if analysts are equipped with the right toolkit.as both the availability of large-scale behavioral data and computational analysis methods are recent and emerging developments, many behavioral scientists and practitioners may be unaware or unfamiliar with (1) new sources of secondary data and types of data that are available to extracts insights about consumer behavior, and (2) new analysis techniques to study consumer behavior at scale. therefore, motivated by these recent developments and opportunities, the main objective of this article is to discuss computational methods (specifically, machine learning methods) for researchers and practitioners interested in addressing customer-relevant questions using new secondary data sources that are publicly available. first, types of unstructured data pertaining to consumer behavior-including the information that consumers are exposed to and their digital footprints in the modern marketplace-will be decomposed to their underlying data elements.consumers information environment-including information and media that consumers are exposed to in learning about brands and products as well as data generated by consumers' behavior-can be decomposed to four basic data elements: structured (numbers), textual, audio, and visual data.these machine learning, natural language processing, and data processing tools allow researchers and practitioners to examine consequential customer behavior at scale, leveraging new secondary data sources (as described in earlier section) and types of data (e.often, the key analytical challenge with large-scale structured data is to determine how to organize the data analysis such that the data can be collected, stored, and analyzed in a principled workflow (seelazer et al.ai, and google's cloud services. these automated image analysis techniques can help parse and process image data for subsequent data analysis using standard statistical and econometrics models (seechang et al. these tools allow us to examine consequential consumer behavior in the marketplace at scale, leveraging new secondary data sources and types of data. machine learning methods facilitate data processing and analyses of data sources that were challenging to analyze just a few years ago, leading to new possibilities for researchers and analysts to extract novel behavioral insights about individuals and groups at scale. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/646.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/646.txt new file mode 100644 index 0000000000000000000000000000000000000000..85939dc872f707dfe9625802a4baa0a44881f2ff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/646.txt @@ -0,0 +1 @@ +a s artificial intelligence (ai) permeates our lives more and more, mechanisms such as deep neural networks are put to use (or their deployment planned) in more and more places in various distributed systems. in particular, wireless sensor network (wsn) can improve its coverage and connectivity, reduce energy and bandwidth usage by deploying ai onto edge nodes .the data pipeline with ai typically requires the creation and the use of a model, i.e. a layered structure of complex algorithms (also known as operators) which interpret data and make decisions based on that data. this model must first be trained (learning phase ), before it can be put in production (used for inference).recent work from the tinyml community forays into optimizing models to fit tinier resource budgets (and to perform efficiently nevertheless) on low-power microcontrollers in the internet of things (iot). as a consequence, both learning and inference placement possibilities are extended to encompass ultra low-power terminals.however, generic and convenient open source tools lack for designers tackling a combination of ai and iot (aiot), who are required to:corresponding author: z. huang. e-mail: zhaolan.huang@fu-berlin.de z. huang, k. zandberg, and k. schleiser are affiliated with freie universität berlin. e. baccelli is affiliated with inria, france.• evaluate the performance of their models when placed somewhere along the terminal-edge-cloud continuum, especially when including potential placement on different microcontroller-based devices; • fine-tune their models, and identifying performance bottlenecks at model layer granularity, on different microcontrollers;• select an adequate microcontroller to execute their model, for a targeted task running on a low-power device to-bedesigned.this paper thus introduces u-toe, an open source aiot toolkit which tightly combines a generic model compiler and a popular low-power iot operating system to automatically compress, flash and evaluate arbitrary models (output of tensorflow, pytorch...) on arbitrary commercial off-theshelf low-power boards (such as bbc:microbit, nrf52840dk, arduino zero, hifive...) based on the popular microcontroller architectures (arm cortex-m, risc-v, esp32).• evaluate the performance of their models when placed somewhere along the terminal-edge-cloud continuum, especially when including potential placement on different microcontroller-based devices; • fine-tune their models, and identifying performance bottlenecks at model layer granularity, on different microcontrollers;.this paper thus introduces u-toe, an open source aiot toolkit which tightly combines a generic model compiler and a popular low-power iot operating system to automatically compress, flash and evaluate arbitrary models (output of tensorflow, pytorch. this implementation enables compilation, flashing, and evaluation of neural network (computational graph based) models from mainstream ml framework onto various low-power boards based on popular isa. • we provide benchmarks and a comparative experimental evaluation using u-toe, reproducible both on an openaccess iot testbed and on personal workstations, which provide insights on inference performance with different models on different low-power hardware and demonstrate how u-toe can be re-used by tinyml experimental researchers and developers to fine-tune iot configurations. while such papers provide a performance comparison of specific frameworks on specific boards for specific tasks, u-toe offers greater flexibility and generality, allowing developers to evaluate a wider range of (user-specified) models on a larger variety of low-power devices, and to dive into the execution details of ml models. though it can provide us execution details in layer level, it still lacks the supports for on-device deployment and evaluation on various iot devices, while u-toe is a more general-purpose toolkit that provides a comprehensive solution on a wide range of low-power devices.one the one hand, as the most immediate limiting resource budget on microcontrollers concern memory limitations, typically in the order of kilobytes, tinyml performance evaluation typically focuses primarily on metrics measuring memory consumption -while keeping an eye on execution speed -as described below. by analyzing these metrics, users can make initial decisions regarding model selection, optimization techniques, and hardware configurations to maximize performance and minimize the resource footprint on lowpower devices.u-toe integrates utvm and riot to perform model compilation, flashing, and evaluation of arbitrary models from mainstream ml frameworks onto various low-power boards. riot was chosen to provide a lightweight runtime environment for model execution and evaluation on microcontrollers, with its advantages in extensibility and wide-spectrum support for low-power boards.we conducted experiments to validate the functionality and compatibility of u-toe on model side (universal support for model structure and ml frameworks) and on device side (wide-spectrum support for iot devices). hence, we dived into two orthogonal directions: for device support, we evaluated a quantized letnet-5 on various iot boards; for model compatibility, we evaluated multiple models on a local stm32f746g discovery board.tableiiipresents the results of various ml models on representative tinyml tasks on single iot board, proving the universal support of ml framework and model structure.perspectives -as riot board and cpu support expands and improves over time, and as utvm also expands support to other architectures in parallel (both open source communities are very active) u-toe can in a very short time expand its support for new use cases, automatically adding the support of utvm for new boards, and the support of riot for new models. finally, we demonstrate the use of u-toe, by providing initial experimental evaluation results on popular low-power boards used in the tinyml community, for a wide variety of models from popular model zoo. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/647.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/647.txt new file mode 100644 index 0000000000000000000000000000000000000000..cba85f5d8b688e47b37507a4bc9ea8ef44f611c9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/647.txt @@ -0,0 +1 @@ +in machine learning, distribution shift is the problem where the test distribution is not identical to the training distribution. deep learning (dl) models, including those with good i.i.d. generalization performance, often perform poorly when distribution shifts occur (nguyen et al., 2014). in real-world applications, distribution shifts are unavoidable because the environment changes in time. an emerging requirement for dl systems is that they must be able to handle distribution shifts (amodei et al., 2016).a common approach to the distribution shift problem is to detect out-of-distribution (ood) samples, samples from the shifted distribution, and remove them from the test data. there is a wide array of ood detection methods, ranging from classification-based, and density-based to distance-based methods (yang et al., 2021). classificationbased methods, which classify incoming data as ood or in-distribution (id) based on the confidence or feature embedding assigned by a classifier, are some of the most commonly used methods. several improvements to classification methods have been proposed, including modifying the loss function (wei et al., 2022;ming et al., 2023), changing the classifier architecture (malinin & gales, 2018), and using post hoc processing techniques (hendrycks & gimpel, 2017;liang et al., 2017;liu et al., 2020;lee et al., 2018b). among these techniques, post hoc methods are often preferred in practice due to their simplicity and ease of integration with pre-trained models without the need for additional training.in this paper, we introduce class typical matching (ctm), a post hoc algorithm for ood detection. ctm is based on our observation that the cosine similarity between the test input's feature and the in-distribution features is very useful for ood detection (section 2.2). different from other post hoc methods such as mahalanobis (lee et al., 2018a) and knn (sun et al., 2022) which leverage euclidean distance in the feature space, our method uses cosine similarity for ood score computation. in section 4, we theoretically show that cosine similarity is a good indicator for ood samples. our contributions are as follows:1. we empirically and theoretically show that cosine similarity between the feature representation of a test input and a typical id feature is an effective scoring function for ood detection.2. we propose ctm, a post hoc method that uses angular information for improved ood detection.3. we perform extensive experiments and ablation studies to evaluate the proposed method across 3 id datasets and 10 ood datasets. this method scores each input by the largest values of their logits vector. (2022)suggests that the norm of logit is the source of the over-confident behavior of neural network trained with cross-entropy loss.we test the effectiveness of cosine similarity for ood detection by making two modifications to the prediction process of an already trained network, (1) remove the bias b k and normalize w k and (2) normalize the penultimate features before feeding them to the linear layer.where ŵk = w k ∥w k ∥ and ẑ = z ∥z∥ . we also present another modification: cosine with mean (cm) which replaces w k by the mean µ k of the training feature of class k. in particular, given a scalar output function g w parameterized by w ,charpiat et al.this kernel measures how similar output of g at z and z ′ change if the weight w is perturbed. if the value k g (z, z ′ ) closes to 1 then g w (z) and g w (z ′ ) response similar to each other for a perturbation on w . intuitively, large k g (z, z ′ ) indicates that z and z ′ are similar under the point of view of g. (2021), we let g be the kullback-leibler (kl) divergence between the softmax output and a uniform distribution. the gradient of g(z ′ ) w.given the kernel k g , for each test point z which is predicted with label k, we chose the point z ′ as the reference point such that it represents class k and measure the influence between z ′ and z. to get the value k g (z, z ′ ), we first compute the gradient of g w. assume that z is not a zero vector and p is not uniform then ∥∇ w g w (z)∥ > 0.for z ′ = µ k , we observe that p ′ w is approximately onehot vector with (p ′ w ) k = 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/648.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/648.txt new file mode 100644 index 0000000000000000000000000000000000000000..ffece8b01c840943e2e55643280bee6ae05a2b38 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/648.txt @@ -0,0 +1 @@ +the forward-forward algorithm is a new learning procedure for neural networks that updates network parameters immediately after the forward pass of a layer. an objective (aka, "goodness") function is evaluated on the layer's latent output representations g(h |i) conditioned upon some data integrity i. integrity is broken down into positive and negative data; positive data is often thought of as correct data while negative data is incorrect data. when positive data is passed into the model, weights that support the data (aka, neurons that fire with large weights) are awarded. the assignment of these positive and negative data is subject to creativity with one of the most common practices being placing incorrect class assignments in the negative data.in a one-class problem context, it is assumed that the majority of the training dataset consists of "normal" data, and the model is assigned with determining the normality of the input data. therefore, negative data is not required, and the objective function can be simplified to g(h ). many deep learning methods answer this anomaly detection problem via inspirations from support vector machines like deep svdd or deep oc-svm .the forward-forward algorithmis a new learning procedure for neural networks that updates network parameters immediately after the forward pass of a layer. an objective (aka, "goodness") function is evaluated on the layer's latent output representations g(h |i) conditioned upon some data integrity i. integrity is broken down into positive and negative data; positive data is often thought of as correct data while negative data is incorrect data. when positive data is passed into the model, weights that support the data (aka, neurons that fire with large weights) are awarded. the assignment of these positive and negative data is subject to creativity with one of the most common practices being placing incorrect class assignments in the negative data.in a one-class problem context, it is assumed that the majority of the training dataset consists of "normal" data, and the model is assigned with determining the normality of the input data. therefore, negative data is not required, and the objective function can be simplified to g(h ).for a layer l we compute a forward pass h = relu xw + b arxiv:2306.lg] 27 jun 2023 where x ∈ r n,p is the data from the previous layer, h ∈ r n,q is the transformed data, and w ∈ r p,q and b ∈ r q are the trained weights and biases. a forward pass of normal class data can be used to calculate the loss function at layer l following some g(h ). these g(h ) can be any convex function; in the following table, we produce some candidate goodness functions.the network's weights are updated sequentially, where inputs h are passed through the layer to compute h , the loss l(h ) is calculated, and used to backpropagate using gradient descent.to convert the final embeddings h ∈ r n×q into an outlier probability, we pass them into the loss function to ascertain a distance value d = l(h ) ∈ r n for each sample and then convert these distances to probabilities by normalizing by the maximum value, so p = d max(d) ∈ r n . the normal backpropagation implmenetation conducts the weight update process for the weights in all layers after completing the forward pass on the last layer. so, while the forward-forward implementation has l instantiated optimizers, the normal backpropagation method has 1 instantiated optimizer. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/649.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/649.txt new file mode 100644 index 0000000000000000000000000000000000000000..146a06a696d4ab41b66454ba58a2fa16985f9821 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/649.txt @@ -0,0 +1 @@ +imitation learning (il) is a popular framework involving an apprentice agent who learns to imitate the behavior of an expert agent by observing its actions and transitions. in the context of mean-field games (mfgs), il is used to learn a policy that imitates the behavior of a population of infinitely-many expert agents that are following a nash equilibrium policy, according to some unknown payoff function. mean-field games are an approximation introduced to simplify the analysis of games with a large (but finite) number of identical players, where we can look at the interaction between a representative infinitesimal player and a term capturing the population's behavior. the mfg framework enables to scale to an infinite number of agents, where both the reward and the transition are population-dependent. the aim is to learn effective policies that can effectively learn and imitate the behavior of a large population of agents, which is a crucial problem in many real-world applications, such as traffic management , crowd control , and financial markets .il in mfgs presents new challenges compared to single-agent il, as both the (unknown) reward function and the transition kernel can depend on the population distribution. furthermore, algorithms will depend on whether we can only observe the trajectories drawn from the nash equilibrium (ne) or if we can access the mfg itself, either driven by the expert population or the imitating one.the main question we address is whether il in mfgs is actually harder than il in single-agent settings and if we can use single-agent techniques to solve il in the mfgs framework.although there exist il algorithms for mfgs in the literature, none comes with a characterization of the quality of the learnt imitation policy. so as to compare algorithms on a rational basis, we provide an extension of the concept of imitation gap to this setting and study it. our contributions are:• we provide a commented review of the existing literature on il for mfgs. notably, we will explain that they essentially amount to a reduction to classic il, and explain the underlying possible issues.• we introduce a new solution concept for il in mfgs called nash imitation gap, which is a strict generalization of the classic imitation gap and that we think may be more widely applicable to multi-agent reinforcement learning (marl).• in light of this new criterion, we first study the setting where only the reward depends on the population's distribution, while the dynamics does not. this setting was largely studied in the past few years , and we show that in this case il in mfgs reduces to single-agent il with similar guarantees for behavioral cloning (bc) and adversarial imitation (adv) type of algorithms.• then, we provide a similar analysis in the more general setting where the dynamics depends on the population's distribution. in this case, we provide for bc and adv approaches upper-bounds that are exponential in the horizon, suggesting that il is harder in this setting. on an abstract way, all previous works of the existing literature correspond to this setting.• due to these negative results, we introduce a new proxy to the nash imitation gap, for which we can derive a quadratic upper bound on the horizon. then, we discuss how a practical algorithm could be designed with an adversarial learning viewpoint. the idea behind it is to use an approach similar to adversarial il, where the underlying rl problem is replaced by a mean-field control (mfc) problem. we leave the design and experimentation of practical algorithms for future works, but this suggests that making progress on il in mfgs may have to build upon mfc. we also provide a numerical illustration empirically supporting our claims in the appendix. in the context of mean-field games (mfgs), il is used to learn a policy that imitates the behavior of a population of infinitely-many expert agents that are following a nash equilibrium policy, according to some unknown payoff function. this setting was largely studied in the past few years, and we show that in this case il in mfgs reduces to single-agent il with similar guarantees for behavioral cloning (bc) and adversarial imitation (adv) type of algorithms. mathematically, the mfg is defined by a tuple m = (s, a, p, r, h, ρ 0 ) where s is a finite state space, a is a finite action space, p : s × a × ∆ s → ∆ s is a transition kernel, r : s × a × ∆ s → r is a reward function, h is a finite horizon and ρ 0 ∈ ∆ s is a distribution over initial states. this form is similar to classic adversarial imitation approach, learning both a reward function and a policy, the inner problem being a reinforcement learning (rl) problem. remember that in the single-agent case (same as when we have l p = l r = 0) the transition dynamics and the reward function do not depend on the population distribution. they reframe the problem as finding a reward function that makes the expert policy π e the best response with respect to the expert population distribution ρ e , i. in fact, fixing the population distribution ρ e , the mfg is reduced to an mdp, and the imitation problem is reduced to single-agent il.as for the single-agent imitation gap, we cannot optimize it directly, since we do not know the reward function, but instead we can envision proxies, such as reducing the distance between the recovered policy π a and the expert policy π e (bc-like) or their occupancy measures (gail-like) as in the classic il setting (see sec. 1), we will also assume that the unknown reward function r for which the expert π e is a nash equilibrium is uniformly bounded when the population is the expert one.although we cannot claim our result is tight, this suggests that in mfgs we cannot hope to use bc to obtain a good imitation policy π a , since we need to control the divergence between the state-action occupancy induced by π a and the one induced by π e . assuming access to this mfg, the il problem reduces to doing classic il in an mdp without reward and with transition distribution p (•|s, a, ρ π e ). however, assuming to have access to the expert distribution may not be reasonable in practice, and due to this inthe authors replace the expert population distribution ρ e with its approximation from sampling ρe , in order to learn µ (π e )π a .we can observe that the inner problem is again an rl problem (for the mdp induced by ρ (π e ) ), and in practice any single-agent adversarial approach could be applied to solve for a similar proxy (related to a different min-max problem).assuming we can, we still learn an intermediate population-independent non-stationary reward, but now the underlying control problem is no longer rl, it is an mfc problem, as it implies studying max π v f (π, ρ (π) ), where the population does depend on the optimized policy. this suggests that for obtaining such an adversarial il approach for mfgs, one could start from an existing adversarial approach for the classic setting (for example, gail), and replace the underlying rl optimization problem by an mfc optimization problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/65.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/65.txt new file mode 100644 index 0000000000000000000000000000000000000000..5724585e7d9e23fb0f92f29c474a9ae9dcaa4279 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/65.txt @@ -0,0 +1 @@ +there is a vast amount of information that is growing at an exponential rate everyday in the form of news articles. digitization has led to an increasing number of people switching to online sources for daily news feeds. commensurate to the advances in the digital era, people generally are preoccupied with hectic work-life and prefer to read articles pertaining to their interests . thus, most of the news articles, though informative, might be of less relevance to an individual. hence, it poses a mammoth task for extracting relevant news with respect to an individual. the interests can depend on several factors like type of the news articles, place to which the news belongs to, etc. in this case, we have considered the interest based on geographical domain. for example, a person wants to read news specific to mumbai and is provided with a flood of news relating to all the cities of india. in this case, it would be cumbersome for the person to find the city specific news. in this research, we have implemented machine learning techniques to classify news articles belonging to a particular location. the location can be a city, state, country, etc but we have examined the results based on cities. the news articles from various websites like indian express, hindustan times,times of india etc are extracted to form our dataset.the underlying structure of the web page is the html language . this contains boilerplate elements like navigation bars, advertisements, comment section, etc. text classification method applied on this data directly would lead to a very less accuracy. the crux of solving this issue is designing a method to scrap out the clean news articles for further processing. for this we have designed a web crawler to crawl the set of news website and extract the main text out of the webpage. further processing of the article involves tokenizing the text and deriving the stem of the individual tokens. this is followed by the removal of stop words. finally, classification is performed and the trained classifier is used to predict the output class, in this case-city, of the input test article. random forest classifier, support vector machine and multinomial naive bayes have been used in the classification phase. the organization of the remaining part of the research paper is as follows. section 2 is a brief description of classification methods used. the main methodology and our approach fills in the section 3. experimental results are discussed and analyzed in section 4. a succint summary is provided in section 5. the interests can depend on several factors like type of the news articles, place to which the news belongs to, etc. for example, a person wants to read news specific to mumbai and is provided with a flood of news relating to all the cities of india.in this research, we have implemented machine learning techniques to classify news articles belonging to a particular location. the news articles from various websites like indian express, hindustan times,times of india etc are extracted to form our dataset. finally, classification is performed and the trained classifier is used to predict the output class, in this case-city, of the input test article.naive bayes classifier is a popular method for text classification problems where given a document or article the classifier has to decide the category of the article.the goal of our approach is to assign an output class to the news articles based on the content. data preprocessing methods are applied to the train data and is used as an input to the classifier for training. the same data preprocessing methods are used for the test data and this acts as an input to the trained classifier which predicts the output class of the test news articles.in this phase, news article data is being retrieved from various news websites. during this process rss feeds for the 5 cities delhi, chandigarh, kolkata, mumbai and lucknow were crawled from three news website indian express, hindustan times and times of india . a total of 2000 articles were collected with a distribution such that the data set has 400 articles for each city. since input to the classifier are two vectors, the set of news articles and the labels need to be vectorized.precisionis the ratio of the number of articles which are judged correctly(true positive) to the total number of articles which the classifier predicted to belong to a particular category(true positive and false positive).in this paper, we have investigated the possibility to use machine learning algorithms to classify the news articles based on cities. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/650.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/650.txt new file mode 100644 index 0000000000000000000000000000000000000000..c290ddfe8269ce1f63384decd230b8fadcbb81c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/650.txt @@ -0,0 +1 @@ +language models (lms) are models designed to understand and generate human language. in recent years, large language models (llms) trained on large amounts of text data have demonstrated stunning capabilities in various natural language processing and visualization tasks.given the rise of llms, it is natural for researchers and developers in the high-performance computing community to start exploiting lms for addressing various challenges in hpc, including code analysis, code generation, performance optimization, question answering, and so on.-we design an extensible framework for including and exposing relevant machine learning components to facilitate the adoption of large language models for hpc-specific tasks. -a set of pipelines have been developed to support common hpc tasks, including code similarity analysis, parallelism detection, question answering, and so on.language models (lms) are machine learning models designed to comprehend and generate human language.there has been a keen interest in deploying nlp techniques to programming language processing (plp) tasks, such as code summarization, code generation, and code similarity analysis. similarly, codet5is a variant of google's t5 language model, trained specifically on code datasets to perform advanced programming tasks like code completion, bug detection, and code summarization. lately, starcoder, a 15b parameter model trained with 1 trillion tokens sourced from a large collection of permissively licensed github repositories, is developed to be a large language model mainly for code generation or completion.with the recent breakthroughs in generative pretrained transformer (gpt) large language models, it has become increasingly intriguing to explore the application of large language models (llms) for hpc tasks.in this section, we evaluate the current prototype implementation of lm4hpc through experiments designed to generate leaderboards for three representative tasks: code similarity analysis, parallelism detection, and openmp question answering.the code similarity task is designed to measure the syntactic and/or semantic similarity between two code snippets. most large language models outperform traditional models (graphcodebert and codebert) that were specifically trained for code analysis. however, cerebras-gpt struggled to comprehend the code and mostly returned arbitrary word tokens, indicating a lack of effective code understanding since it is mostly designed for natural language processing. these steps are common practiceto ensure that code snippets are small enough to be fed into language models with limited input token sequence sizes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/651.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/651.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ad572c94c6a501fbc1c446293720be99f30839e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/651.txt @@ -0,0 +1 @@ +insurance is "interestingly uninteresting". in this work, we argue that in fact insurance is far from uninteresting and indeed a rich source of inspiration and insight to scholarship interested in social issues surrounding machine learning, specifically the field now known as fair machine learning. our proposal is that insurance can be viewed as an analogon to machine learning with respect to these issues arising from the social situatedness. while machine learning is a relatively recent technology, debates regarding social issues in the context of insurance have been ongoing for a long time. thus, we argue that taking inspiration from studies of insurance can contribute to a more integrative view of machine learning systems as socio-technical systems (selbst et al., ).both machine learning and insurance are firmly based on a statistical, probabilistic mode of reasoningan actuarial mode. indeed, insurance can be viewed as the first commercial test of probability theory (gigerenzer et al., ; mcfall, ). insurance, a technology for doing risk, transforms uncertainty into calculable risk (lehtonen & van hoyweghen, ). the key idea is to share the risk of a loss in a collective, organized through an abstract mutuality; due to the 'law' of large numbers, uncertainty thus becomes manageable and the effect of chance can be offset (ewald, ). in this way, insurance creates a "community of fate" in the face of uncertainty (heimer, ). to enter into this community (the insurance pool), the insurer demands a certain fee, called premium, from the policyholder.in insurance, questions of fairness inevitably arise, and have been the subject of much debate. the central point of debate is the tension between risk assessment and distribution (abraham, ). in other words, who is to be mutualized in the pool. some form of segmentation is found in many insurantial arrangements: the pool of policyholders can be stratified by separating high and low risk individuals. but the specific nature that such segmentation mcfall et al. ( ) call insurance "interestingly uninteresting", referring to how insurance is "hugely underresearched" given its societal importance, which is typically not recognized (ewald, ).takes typically depends not only on risk assessment, but on further considerations such as assignment of responsibility, modulated by social context; in this way, insurance is not a neutral technology (baker & simon, ; glenn, a).our non-comprehensive outline of the history of insurance illustrates how uncertainty, fairness and responsibility interact, and can be entangled and disentangled. from this background, we can extract conceptual insights which also apply to machine learning. the tension between risk assessment and distribution is mirrored in formal fairness principles: solidarity, which can be linked to independence in fair machine learning, contrasts with actuarial fairness, linked to calibration. briefly, actuarial fairness demands that each policyholder should pay only for their own risk, that is, mutualization should occur only between individuals with the same 'true' risk. in contrast, solidarity calls for equal contribution to the pool. on one level of this text, we problematize actuarial fairness (by extension, calibration) as a notion of fairness in the normative sense by taking inspiration from insurance. this perspective is aligned with recent proposals that stress the discrepancy of formal algorithmic fairness and "substantive" fairness (green, ), which some prefer to call justice (vredenburgh, ). parallel to this runs a distinct textual level, where we emphasize two intricately interacting themes: responsibility and tensions between aggregate and individual. both entail criticism of actuarial fairness, but we suggest that they additionally provide much broader, fruitful lessons for machine learning from insurance.at the highest level of abstraction, our goal is to establish a general conceptual bridge between insurance and machine learning. traversing this bridge, machine learning scholars can obtain new perspectives on the social situatedness of a probabilistic, statistical technology -we attempt to offer a new 'cognitive toolkit' for thinking about the social situatedness of machine learning. our point of view is that fairness cannot be reduced to a formal, mathematical issue, but that it requires taking broader social context into account, reasoning for instance about responsibility. and for this, we suggest, insurance is an insightful analogon. therefore, our objective is to furnish the reader with a guide that charts the landscape of insurance with respect to social issues and to establish links to machine learning.on a formal level, we use the following analogy. in a machine learning task, we are given some features x and associated outcomes y , which we attempt to approximate by predictions ŷ . the structural relation to insurance is established by conceiving of x as the features of policyholders (e.g. age, gender) with outcomes y (e.g. having an accident or not), and the task is to set a corresponding premium ŷ . in this work, we argue that in fact insurance is far from uninteresting and indeed a rich source of inspiration and insight to scholarship interested in social issues surrounding machine learning, specifically the field now known as fair machine learning. the tension between risk assessment and distribution is mirrored in formal fairness principles: solidarity, which can be linked to independence in fair machine learning, contrasts with actuarial fairness, linked to calibration. on one level of this text, we problematize actuarial fairness (by extension, calibration) as a notion of fairness in the normative sense by taking inspiration from insurance. both entail criticism of actuarial fairness, but we suggest that they additionally provide much broader, fruitful lessons for machine learning from insurance. fairness conceptions in insurance are contingent upon prevailing societal norms, particularly regarding responsibility, but concurrently insurance shapes the moral fabric of the society in which it is embedded(glenn, b; van hoyweghen et al. particularly interesting is also insurance in islamic law, which prohibits gambling and contracts based on usury: the morality of insurance is justified then by emphasizing the solidaristic nature of the arrangement(baker, ), in contrast to the view of insurance as a bilateral contract that is more prevalent in western societies., ), its modern formulation is due to arrow ( ). we now argue, in line with other social studies of insurance scholars(abraham, ; gaulding, ), that the variant of this question which is relevant for insurance and machine learning purposes is in fact fundamentally normative in character. in the limit, the distinction between group-based approaches to fairness and individual fairness (in the sense of the machine learning literature(dwork et al. on the other hand, aggregates are central to the workings of machine learning: they appear in the input data due to categorization processes; second, the fairness of machine learning systems is typically evaluated based on groups (with the exception of individual fairness, see below); third, machine learning in general, whether fairness-unaware or not, rests on aggregate criteria such as average training error. in contrast, the allure of perfect actuarial fairness associated with the personalization of risk, driven by big data and machine learning, is that it is supposedly individually fair -the goal being 'segments of one' and setting the premium as e. for instance, the problems of dataset shift and model ambiguity have been recognized in insurance as well as machine learning; for contributions from insurance see e. recent impossibility theorems in the fair machine learning literature(kleinberg et al.what then is the relevance of performativity for insurance, and by analogy, machine learning? actuarial fairness (calibration), or more broadly the fairness of 'accurate' statistical methods, carries with it an aura of objectivity and neutrality. yet against a background of such past injustice, it is not clear why actuarial fairness should be considered as a principle of justice -this argument has been made both in the insurance(daniels, ; lehtonen & liukko, ; barry, )as well as the machine learning literature(mitchell et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/652.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/652.txt new file mode 100644 index 0000000000000000000000000000000000000000..bfcfb8d3e72fd7780c38542d9d201e2d9a077537 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/652.txt @@ -0,0 +1 @@ +deep neural networks are ubiquitous and can be found in countless applications today. while their predictive powers are remarkable, trained neural networks are prone to adversarial attacks. specifically, a malicious player can perturb a regular input (which the trained network maps to a correct result) with a small, but well-chosen perturbation, such that the network maps the perturbed input to an incorrect output. the goal of this work is to develop a methodology for constructing robust neural networks, where small changes in the input do not lead to large changes in the output.this issue of adversarial attacks has been widely studied with many different attack methods created (bai et al. 2021). the training procedure colloquially referred to as "adversarial training" uses such maliciously perturbed inputs paired with the correct outputs to enrich the training set, and to robustify the neural networks model against such attacks. in this approach, guarding against adversarial attacks starts with specifying the type and method of attack itself. the attack model limits the perturbation in the input data according to a prescribed criterion, such that the benign and malign examples are not human-distinguishable. for example, adversarial examples are limited to live in a small-radius ℓ ∞ ball around the benign example.fast gradient sign method (fgsm) is perhaps the best known adversarial attack. first introduced by (goodfellow, shlens, and szegedy 2014), this one-step method perturbs the input along the direction of the fastest increase in the value of the loss function. later improvements added multiple steps, clipping the gradients, and projecting the perturbed input onto a desired attack model (kurakin, goodfellow, and bengio 2016). the gradient-based update has also been equipped with a memory term to create a momentumbased attack (dong et al. 2017). in (carlini and wagner 2017) it is proposed to perturb an input based on a distance metric until the input is classified as a different target class; three attack methods are proposed, based on three different distance metrics. in (madry et al. 2017), the authors proposed the pgd (projected gradient descent) attack, where a perturbation of some step size is projected onto a ball of interest of allowed maximum perturbation. the pgd attack also has a parameter-free extension described in (croce and hein 2020).to fend off different adversarial attacks, various defense methods have been developed in the literature. (zhang et al. 2019) proposes a new loss function named trades, which uses the loss between the natural image and the perturbed image as the regularizer to the original loss function, while also adversarially training a neural network. in (wang et al. 2020), the loss function is based on whether a perturbed input is misclassified or not. friendly adversarial training (fat) proposed in (zhang et al. 2020a), employs earlystopped pgd so that only the adversarial examples that help with minimizing the loss function are used. the training algorithm described in (zhang et al. 2020b) takes into consideration the distance of a training example to the decision boundary; the idea is that examples closer to the decision boundary can be more easily misclassified, and hence should be prioritized over examples that are further away from the boundary. a recent adversarial training method proposed in (wang and wang 2022) utilizes an ensemble based training approach to adversarially train a model without training an ensemble of classifiers. instead of using a set of classifiers, the authors compute the optimal weights of the model by considering the trajectory of the weights as an exponential moving average. this considers current weights and also has influence from previous weights.negative feedback control is a very general mechanism by which the outputs of a system are stabilized in the presence of unknown perturbations (lewis, vrabie, and syrmos 2012). specifically, in a feedback control system a controller monitors the values of output variables, and compares them with the reference values. the difference between the actual and desired values is fed back, and is used to generate a control action that nudges the system output back toward the reference values. therefore a control system is able to self-correct, and maintain a stable output, regardless of the disturbances that push the output away from reference values.as a simple example of negative feedback, consider a sink into which water flows from an open faucet; the faucet inflow rate is under our control. in the same time, the water flows out of the sink through a drain; the outflow rate is variable, and is under the control of an adversary. our goal is to keep the water level at a given constant height. however, for any inflow level we set, the adversary can change the drain outflow such as to prevent the water stabilizing at the desired height. a control system measures the difference between the current and desired water levels; if this difference is negative (the current level is lower) then the control action is to turn the faucet up such as to increase the inflow; viceversa, if the difference is positive (the current level is higher) then the control action is to turn the faucet down to decrease the inflow. this example illustrates the concept of "negative feedback": the control action compensates for the departure from the desired water level, irrespective of what causes this departure. we see that the control system self-regulates: regardless of the actions of the adversary (letting more or less water out of the sink), the controller always takes the action that brings the water level back to the reference height.negative feedback loops appear in many areas such as biology (e.g., regulating cell cycle), environment (e.g., stabilizing earth climate), and engineering (e.g., james watt's 1788 centrifugal mechanism that controlled the speed of his steam engine). the concept of negative feedback was mathematically formalized by (maxwell 1868), and (wiener 1948).in this paper, we propose a novel adversarial training approach based on control theory. specifically, we develop a new neural network architecture, named feedback neural networks, that incorporates feedback control. the original neural network is extended with a controller that feeds output discrepancy information back to the input of the original network. the controller is itself a neural network, that is trained using regular and adversarial data such as to stabilize the system outputs. the novel adversarial training approach based on the feedback control architecture is called feedback looped adversarial training (flat).the incorporation of negative feedback in the new architecture leads to a structure that is capable to self-correct for output errors regardless of the perturbations applied to the inputs, and help the original network in making correct predictions.the structure of the paper is as follows:• the background section reviews the formulation of adversarial training, as well as fundamentals of control theory. • the methodology section develops the proposed adversarial training method based on feedback control. • empirical evidence of the effectiveness of our proposed method is provided in the experimental results section. • final remarks and directions of future research are given in the conclusions section. specifically, a malicious player can perturb a regular input (which the trained network maps to a correct result) with a small, but well-chosen perturbation, such that the network maps the perturbed input to an incorrect output. 2021). specifically, in a feedback control system a controller monitors the values of output variables, and compares them with the reference values. a control system measures the difference between the current and desired water levels; if this difference is negative (the current level is lower) then the control action is to turn the faucet up such as to increase the inflow; viceversa, if the difference is positive (the current level is higher) then the control action is to turn the faucet down to decrease the inflow. we see that the control system self-regulates: regardless of the actions of the adversary (letting more or less water out of the sink), the controller always takes the action that brings the water level back to the reference height. the novel adversarial training approach based on the feedback control architecture is called feedback looped adversarial training (flat).in control theory(lee and markus 1967;doyle, francis, and tannenbaum 2013;woolf 2009), a system is stabilized by passing the output of the system that is measured using a sensor (or the error between the output and a reference signal) through a controller. the output information is passed backward to the input, whence the name feedback control; and the subtraction ensures that the sign of the change compensates for (acts against) the output errors, whence the name negative feedback. this mechanism helps stabilize system response in the presence of noise or perturbations in the input; we note that stabilization happens regardless of the perturbation, since whatever ∆x change is applied to the inputs, the negative feedback control action always nudges the output back toward the correct (unperturbed) value. the job of the controller is to predict the correction (e) of the input x, such that the corrected input (x ′ = xe) passed through the main system leads to the reference output (y). to avoid this, one can iteratively pass the input information through the main system, then feed back through the controller and form the corrected input; the next iteration repeats the process with the corrected input: 0) . we see that a suitable control gain κ (in general, a suitable control feedback matrix k) is chosen differently for iterated feedback (4) than for the exact feedback (3). consider next a perturbed input x + ∆x, which is passed through the nn model, to obtain a predicted output y + ∆y = f (x + ∆x, θ f ). so, the predefined model and the feedback network are connected and we consider this whole network as a new model.input: a feedback nn model f (including both the original and the control nns, as in figure3) with learnable parameters θ; loss function l; input data x; ground truth y; number of epochs/iterations n ; number of adversarial attack steps k; magnitude of maximum allowed perturbation ε; perturbation step size κ; learning rate τ . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/653.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/653.txt new file mode 100644 index 0000000000000000000000000000000000000000..04246d31e7f80efab3fa4fccc3cb38c8d9e7a03b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/653.txt @@ -0,0 +1 @@ +real-world applications of machine learning (ml) often involve training models from data streams characterized by distributional changes and limited access to past data . this scenario presents a challenge for standard ml algorithms, as they assume that all training data is avail- we present the averaged incremental accuracy (µ acc ) and the number of cases (w) in which a combination of algorithm and initial training strategy performs best for a combination of target dataset and efcil scenario (see sec. initial training strategies are defined by: arch-deep architecture used (resnet50 (rn50)or vision transformer (vit-s)); method -initial training method; ft -fine-tuning on initial classes of the target dataset; ext-use of an external dataset, such as ilsvrc; sup -type of supervision for the initial model: self-supervised (ssl) or supervised (sl). the main findings are that: (1) pre-training with external data improves accuracy, (2) selfsupervision in the initial step boosts incremental learning, particularly when the pre-trained model is fine-tuned on the initial classes, and (3) efcil algorithms based on transfer learning have better performance than their fine-tuningbased counterparts.transfer learning involves using a model trained on a source dataset as a starting point for training another model on a target dataset.2). in cil, learning a classification model is a sequential process, where each step in the sequence consists of integrating a set of new classes into the model. learning to recognize new classes. this type of approach favors plasticity over stability because at each incremental step, all model weights are updated using the training images of the latest classes. at the k th step of the cil process, k ∈ 2, k , the classification model m k recovers the weights of the model m k-1 obtained in step k-1 and is updated using the data subset d k and the algorithm incr.in the following, we describe the main characteristics of the training strategies used in our experimental study to obtain the initial model of the incremental learning process. we consider that all examples from the target dataset d are labeled, and we experiment with both supervised learning and self-supervised learning to obtain the initial model using d 1 . to unskew the statistical models we present in section 5, we consider the initial accuracy, defined as the accuracy of the first model on the first data subset d 1 and denoted by acc 1 , i.does the use of a model pre-trained on an external dataset d ⋆ always improve performance on the target dataset d? figure4highlights that no single initial training strategy outperforms the others on all datasets. the initial training strategy should be selected by considering characteristics of the dataset such as: number of classes, number of samples per class, domain gap with pre-training, and size of the initial batch of classes.in the absence of an external dataset, is it better to train the initial model in a supervised way or with a self-supervised learning method? as shown in figure3, supervised learning on the initial data is better on average. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/654.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/654.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b424453768afcf8434a812b325aa1e4fbd7e118 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/654.txt @@ -0,0 +1 @@ +in today's rapidly evolving and fiercely competitive retail landscape, the precision of demand forecasting has become paramount. the ability to accurately predict demand not only facilitates optimized inventory management and cost reduction but also serves as a catalyst for elevating customer satisfaction. however, relying solely on traditional forecasting techniques that hinge on historical sales data falls short of capturing the intricate dynamics that are influenced by a multitude of external economic factors. current studies in the literature delve into the exploration of advanced regression and machine learning algorithms, seeking methodologies to enhance predictive accuracy. while these advanced algorithms harness the potency of historical sales data, they mostly overlook the profound impact of macroeconomic conditions on the ever-shifting behavior of household shopping.within the scholarly discourse, documented empirical evidence demonstrates that macroeconomic variables display substantive influence over consumer expenditure trends. in particular, the consumer price index (cpi), index of consumer sentiment (ics), and unemployment rates manifest as pivotal factors in explaining retail demand. however, within the realm of demand forecasting efforts, the evidence of capturing this influence of external economic factors in shaping households' demand patterns is conspicuously scanty. this realization serves as a driving force for our research, as we strengthen the weakly established finding in the current literature of superior performance of retail forecasting for integrating these macroeconomic factors with time series data, and further confirm this weakly established finding by applying various regression and machine learning models in solving retail forecasting problems. going beyond the realm of refining retail demand forecasting with amplified precision, our academic endeavor extends to the inherent limitations of conventional approaches. the integration of macroeconomic indicators as integral constituents of our forecasting framework enables us to capture intricate relations between economic conditions and consumer behaviors.in short, within the context of a constantly evolving retail arena characterized by fierce competition, the spotlight upon the precision of demand forecasting intensifies. bearing in mind the constraints intrinsic to methodologies tethered solely to historical data, our scholarly focus lies in the augmentation of time series data through the seamless integration of macroeconomic variables, thereby confirming the pivotal role these factors play in shaping the contours of future retail demand. our research aims to improve this by using economic indicators alongside the usual data to predict demand better and understand the connection between economics and shopping habits. it's about going beyond just improving predictions it's about uncovering a deeper understanding of how economic conditions affect what people buy. this realization serves as a driving force for our research, as we strengthen the weakly established finding in the current literature of superior performance of retail forecasting for integrating these macroeconomic factors with time series data, and further confirm this weakly established finding by applying various regression and machine learning models in solving retail forecasting problems. bearing in mind the constraints intrinsic to methodologies tethered solely to historical data, our scholarly focus lies in the augmentation of time series data through the seamless integration of macroeconomic variables, thereby confirming the pivotal role these factors play in shaping the contours of future retail demand. additionally, the integration of macroeconomic and consumer indicators is highlighted as an emerging approach to enrich predictive models and enhance retail sales forecasting accuracy. punia et al. their study presents an automobile sales forecasting model that incorporates variables such as residents' disposable income and other relevant macroeconomic indicators. by examining the relationship between macroeconomic indicators and commodity market volatility, the authors highlight the crucial role that macroeconomic variables play in forecasting market fluctuations. (2022) propose a regional economic forecasting method based on recurrent neural networks.in his paper, haque (2023) introduces the integration of external macroeconomic variables, such as the consumer price index (cpi), consumer sentiment index (ics), and unemployment rates-in conjunction with time series data related to retail product sales. as businesses strive for greater forecasting accuracy to inform supply chain decisions, a key opportunity lies in combining the predictive prowess of machine learning with the enriched insights of macroeconomic and consumer data. while haque (2023) provides evidence of the superior performance of forecasting accuracy for the inclusion of external economic conditions, his finding is based on the results from an lstm model only. our study bridges this gap by utilizing various relevant models for forecasting retail demand and comparing performances obtained by including external economic conditions to those that ignore macroeconomic conditions. findings from our study strongly support the finding from haque (2023) and provide strong evidence for superior performance of forecasting accuracy for the inclusion of external economic conditions. we apply aforementioned machine learning techniques on the historical sales data enriched with macroeconomic variables to verify the impact on the macroeconomic variables on the performance of the models.our study stands at the juncture of two critical domains: demand forecasting within the retail industry and the profound impact of macroeconomic variables on consumer behavior. our findings fortify the findings from a previous study in the current literature in a more generalizable mannerthat by including external macroeconomic data into our models we can improve the predictive accuracy, and that these findings obtained from an lstm model in the current literature are also valid for other relevant models in the machine learning horizon. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/655.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/655.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0a83a92bafb6abb1a7f69cd79460c548b3cdbce --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/655.txt @@ -0,0 +1 @@ +security is indispensable and very crucial in modern information technology framework , , , , and so, as we had gotten to grapple with the fact that there is no perfect system, no matter how sophisticated or state of the art a system could be, it can be attacked and compromised. with hackers constantly coming up with ever changing innovative and highly sophisticated ways to compromise system, focus had shifted to making state of the art system extremely complicated and tedious to be compromised since there cannot be a perfect system. before any system could be compromised, there must be an intrusion for any damage to occur , , , . it is one thing for a system to be intruded, it is another thing for the intrusion to be immediately detected and dealt with before any compromise is made. an intrusion that lasted for about fifteen (15) milliseconds before being dealt with by a combination of machine learning (to accurately detect the actual intrusion) and game theory (changing parameters and configurations to prevent further attack) approach gives an insight of a system perfection.naïve bayesian algorithms are some of the most important classifiers used for prediction. bayesian classifiers are based on probability and with general assumption that all features are independent of each other which doesn't usually hold in real world, these assumptions account for why naïve bayes algorithm performed poorly on certain classification, the assumption is in addition to individual assumption as each variant of naïve bayes classifiers of. bayesian classifiers are based on probability and with general assumption that all features are independent of each other which doesn't usually hold in real world, these assumptions account for why naïve bayes algorithm performed poorly on certain classification, the assumption is in addition to individual assumption as each variant of naïve bayes classifiers of.• we showed that gaussian naïve bayes algorithm performed best among all the three variants of bayesian algorithm on anomalous detection in network intrusion in terms of efficiency and accuracy on kdd dataset, followed by bernoulli with 69. the assumption that the distribution is multinomial coupled with additional assumption of independence among the features makes multinomial naïve bayes a drawback when the two assumptions are not valid in test or train data.feature selection, which is also being referred to as attribute selection is a process of extracting the most relevant features from the dataset for the purpose of using classifier to train a model in order to ensure overall better performance of the model. since the presence of large number of irrelevant features in dataset increases both the training time and the risk of overfitting, having an effective feature selection method is a necessity. the calculation in chi-square test between each feature and the target greatly helps to determine if there is any association between two categorical variables in the dataset and whether such association will influence the prediction. we chose supervised feature selection techniques which use target variables like methods that can remove irrelevant variables from dataset. there are 79 columns in the dataset and so, it is wise to treat each column as separate entity when addressing missing values, hence for individual column, a combination of standard deviation and mean for the column was used to fill the missing and null values. one of the most important tasks in the data preprocessing was the feature selection (figure3), since our dataset has 79 features, only relevant features are needed to get optimal result. a combination of chi square feature selection test method and confusion matrix was used to select only the relevant features in the dataset to train our model while the irrelevant features were ignored. multinomial gave abysmal performance on the kdd dataset and was the worst performance among the three (figure4), (figure5), (figure6). it was important to visualize test and train accuracy, so as to ensure the classifier doesn't memorize the training data as that could cause a very wide gap between train and test accuracy which is over fitting, as seen in (figure6) there is closeness between train and validation accuracy to validate the implementation. we went back to thoroughly re-analyzed each of the relevant features and categorical label for any observation and then compare whatever our observation is to each variant assumption since each of the three variants of bayesian algorithm has different assumption.we concluded that the performance of each variant of bayesian classifier is impacted by its assumption, and each assumption is the single most important factor that influences their performance and accuracy, as we can see in the normal and continuous distribution of our label category which causes gaussian bayes to work best among bayesian variant on security dataset. we showed that gaussian naïve bayes algorithm performed best among the entire bayes based algorithm on intrusion detection, we also showed that each variant of bayes algorithm blindly follows its assumption which is the single most important factor that influences their performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/656.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/656.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a33200ab24c15954390da449a40bc600f44d031 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/656.txt @@ -0,0 +1 @@ +cancer is a leading cause of death worldwide, accounting for nearly 10 million deaths in 2020, or nearly one in six deaths . lung cancer is the most common cause of cancer death in 2020 with around 1.80 million deaths. the survival rate of lung cancer is strongly dependent on the cancer stage as well as the physical condition of the patient. on average, it is estimated that the five-year survival rate for lung cancer is around 56% for cases detected when the disease is still localized within the lungs. on the other hand, in later stages, when the disease has spread to other organs, the fiveyear survival rate drops to 5% , highlighting the need for early detection. in response, different recommendations have been made. based on the national lung screening trial (nlst) , the united states preventive services task force recommends lung cancer screening with low-dose computed tomography (ldct) in adults aged 55 to 80 years who have a 30 pack-year smoking history and are currently smoking or have quit within the past 15 years . the conclusion of the nlst was that screening using low-dose computed tomography (ldct) resulted in a decrease in mortality equal to 3 fewer deaths per 1,000 participants . this study amongst others such as dante (detection and screening of early lung cancer by novel imaging technology and molecular essays) or the dlcst (danish lung cancer screening trial) also studied different strategies for screening, the harms and radiations caused by screening and other factors related to lung cancer mortality. known risk factors are key indicators when identifying patients with high risks of lung cancer occurrence . in this study, we chose to focus on patients who are current or former smokers.we propose a machine learning (ml) tool to compute the likelihood of lung cancer occurrence trained on data from the prostate, lung, colorectal, and ovarian (plco) cancer screening trial and validated on the national lung screening trial (nlst). from this ml-based tool, we developed a freely available web application that people can use to estimate their likelihood of developing lung cancer and sensitize them to lung cancer screening for early detection of lung cancer. based on the national lung screening trial (nlst), the united states preventive services task force recommends lung cancer screening with low-dose computed tomography (ldct) in adults aged 55 to 80 years who have a 30 pack-year smoking history and are currently smoking or have quit within the past 15 years. this study amongst others such as dante (detection and screening of early lung cancer by novel imaging technology and molecular essays) or the dlcst (danish lung cancer screening trial) also studied different strategies for screening, the harms and radiations caused by screening and other factors related to lung cancer mortality.we propose a machine learning (ml) tool to compute the likelihood of lung cancer occurrence trained on data from the prostate, lung, colorectal, and ovarian (plco) cancer screening trial and validated on the national lung screening trial (nlst). from this ml-based tool, we developed a freely available web application that people can use to estimate their likelihood of developing lung cancer and sensitize them to lung cancer screening for early detection of lung cancer. conducted by the national cancer institute (nci), the nlst study aimed to evaluate the efficacy of low-dose computed tomography (ldct)in detecting lung cancer among individuals at high risk. on the other hand, we decided to keep the participants who stayed longer than average in the study: the reason is that, even though they introduce a bias in favor of positive screening for lung cancer, we would rather have more false positives than false negatives (favoring recall over precision). we decided for the sake of predicting lung cancer risk in the next 5 years to train the model on patients from plco who were either negative for lung cancer screening and studied for longer than 2100 days (5. in the context of lung cancer prediction, shapley values provide valuable insights into the individual feature importance, allowing patients to identify the most influential factors contributing to the predicted risk of lung cancer. ultimately, the utilization of shapley values for interpretability in the domain of lung cancer prediction using xgboost not only improves model transparency and performance but also supports the development of reliable and trustworthy prediction models in healthcare settings. however, when focusing on calibration-in-the-small, it figure1: shapley values of the features used for lung cancer risk prediction seems that the model is underconfident for people with a high risk of developing lung cancer.a data-driven risk model was established and tested to predict the 5-year outcomes related to national lung screening trial (nlst)-like ct lung cancer screenings. several models have been created to better identify smokers who should be screened for lung cancer katki et al developed and validated risk models for lung cancer screening using low-dose ct.in the context of lung cancer prediction using machine learning, several performance metrics are commonly employed to evaluate the effectiveness of the model and understand its predictive capabilities.we created a model to predict lung cancer risk at five years using xgboost with better precision and recall than the current uspstf recommendations. in that context, shared decision-making processes should be carefully evaluated, as most lung cancer deaths are currently not preventable through screening, even if ct screening can reduce lung cancer mortality by 20%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/657.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/657.txt new file mode 100644 index 0000000000000000000000000000000000000000..caf706fdf5ff8cfe6aeb735fd963a1203e72b89a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/657.txt @@ -0,0 +1 @@ +linear regression, an enduring cornerstone of statistical modelling , continues to play a pivotal role in deciphering variable relationships and making predictive inferences. renowned for its simplicity and interpretability, this technique remains a foundational tool in the realm of data analysis . however, in our pursuit of predictive excellence, we propose a paradigm shift that marries conventional linear regression with innovative methodologies. by harnessing the prowess of a kalman filter and pioneering curve area analysis , we embark on a journey to redefine predictive accuracy and minimize loss . our overarching goal is to propel linear regression models into uncharted territories, where predictive optimization meets a holistic understanding of data dynamics.the research focuses on finding an optimal linear regression equation using stochastic gradient descent (sgd)as the weight updation technique, along with the incorporation of a kalman filter and analysis of the area under the curve. the weights (w) and bias (b) are then updated using the gradient descent algorithm, where the weights are adjusted by subtracting the product of the learning rate (α) and the gradient of the loss with respect to the weights, and similarly, the bias is adjusted using its respective gradient.in the context of utilizing the kalman filter for state estimation, the algorithm involves several crucial steps. subsequently, the measurement and transition matrices are set, and the state and covariance variables are initialized using the provided initial_state and initial_covariance inputs.the state variable is updated by multiplying the transition matrix with the current state, projecting the current state forward.the covariance variable is updated by performing matrix multiplication involving the transition matrix, covariance, and the transpose of the transition matrix.the kalman gain is computed, which serves as a weighted factor for incorporating the measurement into the state estimation. this is achieved by multiplying the covariance, the transpose of the measurement matrix, and the inverse of the combined matrix involving the measurement matrix, covariance, and its transpose, along with the identity matrix. the state variable is adjusted by adding the product of the kalman gain and the discrepancy between the measurement and the product of the measurement matrix and the current state. this correction step aims to refine the state estimate using the new measurement information. the covariance variable is updated by matrix multiplication involving the difference between the identity matrix and the product of the kalman gain and the measurement matrix, combined with the covariance.the iteration continues for each new measurement, iteratively refining the state and covariance estimates. the final state and covariance are eventually returned as the outcome of this process, providing an optimized estimation of the system's underlying state while effectively accounting for measurement noise and uncertainty.we conducted experiments to compare the performance of our proposed methodology, which incorporates a kalman filter approach for minimizing loss via the area under the curve, with traditional linear regression using ordinary least squares (ols)and other popular regression methods such as ridge regressionand lasso regression. overall, our research contributes to the field of regression analysis by introducing a novel approach that harnesses the predictive power of the kalman filter and leverages the area under the curve to minimize loss in linear regression models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/658.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/658.txt new file mode 100644 index 0000000000000000000000000000000000000000..a4231bd713b60de72c2b3ab93266c2a80937826f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/658.txt @@ -0,0 +1 @@ +anomaly detection in data analysis typically refers to the discovery of uncommon observations of patterns that differ considerably from the majority of the data and do not adhere to a well-defined concept of normal behaviour. chandola et al. introduce the applications of anomaly detection in many areas such as intrusion detection, fraud detection and industrial damage detection. this paper will introduce a generic technique for extracting features from activity changes (transitions) for use in machine learning and signal processing.our paper is based on the paper from zhong et al. , which uses a process mining related technique for network intrusion detection. the technique in is inspired by process mining , algorithms that discover process models from event logs. there are procedures involved in every aspect of our daily lives, from the operations of large businesses to the management of private households. in the industrial sector, one can find both the production of automobiles and the fulfilment of customer orders. the procedure or series of activities for achieving a goal is known as the process. we use a network-based intrusion detection system as an illustration of our technique in the context where a network flow of multiple packets is treated as a process. introduced the feature generation algorithm and the result for intrusion detection, but did not introduce other capabilities of the algorithm. our paper extends the technique of . and explore deeper into the technique itself. this paper adds the generality that enables standardised input from applications in different domains, on top of already existing yet introduced capabilities of discovering global process structure that may aid in anomaly detection in concurrent processes, the packet-level (event-level) processing for online detection and time-series information encoding with reasonable computational complexity. an intrusion detection system (ids) is utilised to detect and classify security policy violations and attacks. we have network-based intrusion detection system (nids) and hostbased intrusion detection system (hids) depending on the purpose of the system. the nids is usually deployed on infrastructures like routers and switches to detects intrusions by monitoring network activities. the hids, on the other hand, inspects each individual system for any unauthorised file modifications, abnormal network activity, or suspicious behaviour.in the following section, we will first explore some related works that focus on intrusion detection. as is closely related to the intrusion detection domain, we will understand the problem better and discuss what benefit of the algorithm from provides. then we summarise the problems in section 3 that tfgen is able to solve. the technical details of tfgen will be presented in section 4, and finally we will discuss the possible applications of tfgen and some known issues of this technique.introduce the applications of anomaly detection in many areas such as intrusion detection, fraud detection and industrial damage detection. this paper adds the generality that enables standardised input from applications in different domains, on top of already existing yet introduced capabilities of discovering global process structure that may aid in anomaly detection in concurrent processes, the packet-level (event-level) processing for online detection and time-series information encoding with reasonable computational complexity.from the detection method perspective, signature-based intrusion detection systems (sids) and anomaly-based intrusion detection systems (aids) are typically the two types of intrusion detection systems. the first is the lack of ability to perform online detection; the second is that some packet-level detection techniques have difficulty applying to encrypted data; third, most techniques lack the ability to detect the global process structures.given a sequence of events p , we define a transition in p as a pair of consecutive events (p i , p j ) within the same case.here is an example, giving a series of events p = ⟨p 1 , p 2 , p 3 , p 4 , p 5 ⟩ with two flows t 1 and t 2 , where flow t 1 = ⟨p 1 , p 3 , p 5 ⟩ and flow t 2 = ⟨p 2 , p 4 ⟩, we will get two transitions for t 1 : (p 1 , p 3 ) and (p 3 , p 5 ); one transition for t 2 : (p 2 , p 4 ). p 1 and p 2 are two consecutive events, however, these events will not be considered as a transition as they belong to different flows.an event class ec(p) is the name of an event p, and normally it is the concatenated string of non-numerical attribute data.the temporal event table (tet), formerly known as the state table in, is a data structure used to prevent the loss of information on transitions for historical data not covered by the sliding window. for instance, c ♢ has case ♢, which exists in tet, and the event class record for ♢ in tet is b; the system creates the relation (b, c) and then updates the record in tet to the last observation c.tet in combination with a sliding window buffer that retains the previous l transitions eliminates the need to generate a graph for each window and the need to search for the previous event with the same trace id as the incoming event.the main difference between tfgen and traditional process mining is that process mining focus on process model generation and analytical method like conformance checking on process models, whereas tfgen is designed for online processing and feature generation for machine learning.these applications are based on hypotheses that tfgen supports all standard event logs as long as processes can be converted to event log, and the performance and practicability of using tfgen in these areas can be open research questions for the future work.to reduce the dimensionality, we only generate transition matrices based on a limited number of most frequent event classes out of over 260 observable event classes. we call the event classes that fall within the limited range visible event classes, and we call other event classes hidden event classes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/659.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/659.txt new file mode 100644 index 0000000000000000000000000000000000000000..3752b9d53b758e3c9516e8f4ad36382eb69a1299 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/659.txt @@ -0,0 +1 @@ +pod is a technique that aims to represent a big amount of data by a reduced number of basis elements built from the data. more precisely, if the data are stored in a m×n matrix a with n being a large number, is it possible to represent the n columns of a by a p orthonormal vectors with p very small compared to n. the mathematical formulation can be summarized as follows, letan orthonormal set of p vectors that better represent the n columns a in the following sense, w 1 satisfying the optimization problem,this constrained optimization problem can be solved using lagrange multipliers:and the solution is obtained by nullifying the gradientthenand then, by setting w = w 1 and λ = λ 1this shows that w 1 is the eigenvector of the matrix aa t and λ 1 is the associated eigenvalue. now evaluate the maximum by substituting w 1 ,we seek w 2 to be the second best representative with w 2 perpendicular to w 1 . we seek w 2 in the orthogonal space to the one spanned by w 1 . this can be formulated as follows:note the matrix aa t is symmetric, positive semi-definite, then it has m eigenvalues λ 1 ≥ λ 2 ≥ ...λ m ≥ 0 and m corresponding eigenvalues w 1 , ..., w 2 that can be chosen to be orthonormal, that is,since we look for a w orthogonal to w 1 , that is w ∈ span{w 2 , ..., w m } it can be expended asnow estimate the quantity to maximize at an arbitrary w ,we can check by expending in terms of components that,and by orthogonalitynow if we substitute w by w 2 following the same steps as in ( 4) we obtainwhich proves that the maximum is reached at w 2 . we can repeat the same process and show that the p vectors we looking for are the eigenvectors of aa t . for more details see . kernel methods are one of the most popular subset of data classifiers, and the support vector machine (svm) is one of the widely used technique of this family. the main principle of kernel methods, is to map the initial data, called attributes, into a higher dimension vector space called features space, where data are easily separated. the svm technique is designed to separate linearly separable data, and when this is not the case, the mapping intends to make the data linearly separable in the feature space. the mapping function is not defined explicitly, which is a tedious task, but rather a kernel is used as long as the method steps can be expressed by a dot product. the svm method is very popular for its demonstrated performance, however as any methods it has some shortcomings that can be summarized as follow; it is an optimization problem, the process can then fail for high dimension even if the cost function is a quadratic (convex function). imbalanced classes management is another issue, and finally a dynamic classification where new classes can form or disappear, all hyper-plans have to be re-calculated. as an alternative, this paper proposes a new kernel method based on a minimum distance to optimal subspaces (in the sense of smallest subspace containing the mapped classes). this method performs as well as the svm, with the following advantages; the method doesn't require any optimization process, which makes it more robust, the complexity grows linearly with the number of classes, which reduces sensibly the processing time, no need to any recalculation for new classes in dynamic classification, and the imbalanced classes is solved by decomposing the concerned subspace into sub-subspaces of a likewise dimensions. the subspaces in the features space are obtained using pod (proper orthogonal decomposition) and since distances to subspaces are calculated by projections they are expressed by dot products, and then can be calculated through the kernel.the paper is organized as follows, in chapter 1 an overview of the pod method is provided, in chapter 2 the new method is described, in chapter 4 2d tests are performed for validation and conclusions are drawn in chapter 5.in this work a method based on kernel theory for data classification is proposed, the idea is to map the original data (attributes) into a bigger, in terms of dimension, features space as for the svm, then optimal (in the smallest sense) subspaces that contain the mapped classes are built. note that similar techniques like pca (principle component analysis) are commonly used in kernel theory for data classification but as a pre-processing for features extraction and improvement. note that selecting a kernel such that the output space h is of infinite dimension is recommended to increase the chance having the data mapped into distinct subspaces. for dynamic classification, if a new class is created all hyperplanes in the output space have to be recalculated for the svm, while for the new method the only thing that needs to be calculate is the new pod subspace.the paper proposed an alternative to svm method based on a minimum distance to optimal subspaces containing the mapped attribute classes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/66.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/66.txt new file mode 100644 index 0000000000000000000000000000000000000000..9d108cc5a6a131a7a9bd61072014090c537ea08f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/66.txt @@ -0,0 +1 @@ +this research learns the rules of chess without direct expert intervention or heuristic guidance. extending previous work on learning go games with language transformers (ciolino et al. 2020), the work benefits from large archives of chess notation in text and a game replay visualization tool. we combine the millions of chess games in text formats with the remarkable feature learning parts of the large gpt-2 model (radford et al. 2018). unlike a traditional sequence generator, the transformers support built-in parallelism and a directed attention mechanism to overweight key features. the original contributions of this research include generating plausible chess moves following the fine-tuning of the large gpt-2 transformer with its 774 million model parameters. a second innovation features a novel game interface where human players can challenge the transformer in live play. to take advantage of graphical processing units (gpu-acceleration) we host the shared games on google's colaboratory platform (colab) 1 .in contrast to our previous exploration of transformers to play go, chess has received considerable attention in language modeling. a lisp chess engine (penson) 2 applied a frequency map and portable game notation (pgn) mentor database. the frequency map establishes conditional move probabilities for each forward move. to produce move probabilities from a board state vector (ahle 2018), another text-trained chess engine (fastchess) 3 applied a popular text classification library as a one-layer plus soft-max model. when first open-sourced in 2016, their fasttext classifier (bojanowski et al. 2017) from facebook ai research was state-of-the-art. facebook employed sentence structure features with bags of both words and n-grams, two strategies now overtaken in the literature by the rapid growth of transformers such as google's bert (devlin et al. 2018) and openai's gpt. these newer language models supplement the long tradition of using game trees to formalize decisionmaking and chess strategies (nornai 1997). one can postulate that the decision tree model is deep (enumerating 60+ moves ahead) but narrow compared to the language-based alternatives presented by our chess transformers. this approach further contrasts with the monte carlo tree search, (mcts) employed so effectively with alpha-go and reinforcement learning (silver et al. 2018). the application of models outside their initial languagerelated training sets has attracted interest in other cross-domain problems, for example, in imagery (parmar et al. 2018) and audio (child et al. 2019). presser and branwen (2020) first explored the application of gpt-2 to chess with a filter on invalid moves. like recurrent neural networks, transformers however specialize in modeling sequential data like language (or game moves here), but without relying strictly on presentation order during training. its core architecture features encoder-decoder cycles that apply weights to derive features with its unique 'attention' mechanism that effectively overweighs the most relevant features as it learns. the transformers' remarkable abilities to generate text arises from its parallelism during training, which enables traditional neural architectures to ingest vast amounts of internetscale textual inputs. the chess application of gpt-2 suggests new and innovative ways to expand chess training data with high-level simulation data. for instance, our exploration of only high-ranking gameplay (e.g. training elo ranks > 2,200) highlights the specialized transformer model may encode the basic features of multiple games and learn their winning strategies without human guidance or direction. extending previous work on learning go games with language transformers(ciolino et al. 2018). 2019). its core architecture features encoder-decoder cycles that apply weights to derive features with its unique 'attention' mechanism that effectively overweighs the most relevant features as it learns.to generate chess training data for the language models, we transformed chess game archives to single lines beginning with either the simple result (win/lose/draw) or the result plus elo ranks for black and white. as illustrated using gpt-2 visualization of chess moves in figure2, each token of the chess game receives varying importance through the transformers encoding-decoding layers(vig 2019). if an initial player enters the chess transformer at elo base (800), losing 894 games successively against an expert with elo 2,000 or greater would likely drop the white player to the floor rating of 100.in the discussion section, we revisit the more strategic formations for the chess openings to understand if a coherent game approach follows from the transformer training. c4)? with the game commentaries built into the arena chess visualization, each replayed game from the chess transformer conveniently annotates strategic positions, while automatically moving the pieces according to the pgn we generate from the fine-tuned gpt language model. while the current work focuses on plausible or interesting chess moves to learn in such a roundabout way, various other research paths could isolate the prompt or trigger phrases within gpt-2 finetuning and simulate better game rhythm from some midpoint in the game. first, how is a language model able to learn chess? the gpt-2 has memorized a series of tokens and like any number of other statistical methods has figured out how to maximize rewards. similarly, whether a 774 million hyperparameter language (or token) model has overfitted the game space for complex games like chess or go, may also appear largely a philosophical question to ponder. the notion that a trained elo 2,000 model consistently beats a lesser opponent suggests that perhaps there is a future better gpt-2 or gpt-3 text generator that can play chess at a consistent super-human level. does the trained model advance beyond a parroted amateur? to examine this question systematically, the live gameplay interface (figure6) allows humans to play against the chess transformer and rate its overall effectiveness for themselves in match play. while traditional game agents are trained with inherent game logic and mcts's depth search, this approach highlights the notion that a breath search of millions of games can allow a language model to define a game's rules and strategy by itself. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/660.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/660.txt new file mode 100644 index 0000000000000000000000000000000000000000..a11a7e363a9990e0aa03cc5cce5b2025c570e308 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/660.txt @@ -0,0 +1 @@ +as the sport of volleyball has gotten more popular and players have begun joining at younger and younger ages, the level of play has also increased. this increase in popularity and the subsequent improvements in player skill have lead to increasing demands for tactical analysis and better game strategies. these demands come with a greater necessity for computational analysis of the sport.with recent increases in interest for sports data analytics through all sports, we have seen an increasing number of studies looking into predicting game events (simpson et al. 2022), analyzing team and individual player performance (claudino et al. 2019), overall sport development (nadikattu 2020), and predicting or analyzing overall team performance across sports. for example, the sports of basketball, soccer, and baseball have seen several studies.basketball has seen several datasets released and other analysis (jain and kaur 2017;thabtah, zhang, and abdelhamid 2019), (miljković et al. 2010) (miljković et al. 2010;mahmood, daud, and abbasi 2021;tian et al. 2019) for predicting game outcomes, improving player developement, predicting rising stars, or identifying opposing team's offensive and defensive strategies. soccer (simpson et al. 2022;decroos et al. 2019;rudrapal et al. 2020;baboota and kaur 2019;prasetio and harlili 2016;sawchik 2015;whiteley 2007;sun, lin, and tsai 2022;huang and li 2021;mcpherson and macmahon 2008;chun, son, and choo 2021;aoki 2010) have also seen studies focused toward a variety tasksincluding game event and outcome predictions, posture analysis, game lineup prediction, and injury risk assessment.despite growing interest in sports analytics, studies on the sport of volleyball have been limited so far. the few studies that have been conducted have tended to have small scopes and use basic naive approaches, yet they have yielded a promising start and a good baseline to compare with. with the increasing need for more and more sophisticated data analytics strategies, we wish to introduce specialized encodings and models for the sport of volleyball to improve upon these current baseline approaches without the need for gathering additional data. on top of this, there is no information describing the temporal ordering of the contacts given to the models. pass contact location and pass rating relate to the pass contact, set rating and location relate to the set contact, etc. from further baseline testing, this additional information does not noticeably effect performance in any baseline model, so any performance changes with this task will be solely from the graph encoding. therefore, for this task each graph involves the previous round's hit node (if it exists), the previous round's block node (if it exists), the current round's pass node, and the current round's set node only including information from before the setter contacts the ball-such as where the setter will set the ball from. so for this task, each graph involves the previous round's block node (if it exists), the current round's pass node, the current round's set node, and the current round's hit node only including information from before the hitter contacts the ball-such as the location the hitter will be hitting from. lastly the graph transformer architecture we used for all tasks invloved one custom graph transformer convolution layer, a graph global pooling layer, then 2 dense layers. for example, the baseline models were better at predicting rally outcomes and hit types in the professional games because they are less random and more deterministic given the better, more consistent, and more mentally strong play by professional players; additionally the baseline models were better at predicting the set locations because professional setters are more skilled and make more randomized sets to confuse the opposing team. graph transformer yielded a large improvement in ncaa game performance and a slight but noticeable boost in pro game performance in all metrics except for brier score (which performed slightly better in the pro game and slightly worse in the ncaa game). the graph transformer did perform noticeably better on the ncaa game, however, so this again suggests that the lower level of play in the ncaa game made it harder to find these underlying relationships as compared to the pro game, but the graph encoding was able to make these relationships more clear and thus standardized performance between the two levels of play. first, given the fact that setters try to be as random as they can be when making set location predictions, a simpler model seems to do better. the simple cnn model gives a 2-3% performance boost over the baseline transformer, which is pretty noticeable in a hard to predict task, and gcn yields a similar performance boost over graph transformer. secondly, it seems that the graph encoding and additional information from the previous round does help improve set prediction performance, but the improvement is not as significant as the previous task. this result, combined with the better performance of the convolution models over the transformer models, would suggest that the location a setter will set to depends less on contact-by-contact information than the outcome of the rally. the graph encoding noticeably improved the performance of the graph transformer over the baseline for the ncaa games and was relatively the same for the pro games, and the gcn performed slightly worse than baseline cnn for both sets of games. these results would suggest that the encoded contact-by-contact information is not as important for this task; the additional information may slightly improve models that can use this information wellsuch as a transformer-but may harm simpler models that cannot analyze this information as well. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/661.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/661.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a148c5203d89a0d5b47cd65fa8964efe813a305 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/661.txt @@ -0,0 +1 @@ +the evolution of the modern financial sector has been marked by rapid advancements in technology, enabling financial institutions to offer better services with improved efficiency. one of the main contributors over the last decade is machine learning (dixon, halperin, and bilokon 2020;babaev et al. 2019). enhanced model quality built on timely and accurately collected data leads to improvement of quality and decision making speed in banks (tripathi and pavaskar 2012). however, these advancements have simultaneously opened up new channels for malicious actors to exploit, one of which is the emergence of adversarial attacks on machine learning models. the issue becomes even more pressing in the context of financial transaction data, where the stakes have explicit monetary value, and robust defense mechanisms are needed (fursov et al. 2021).while close to natural language (qiu et al. 2022) and event sequences data (khorshidi, wang, and mohler 2022;shchur et al. 2021) in structure, financial transaction data that consists of sequences of transactions produced by customers have notable differences. they include in particular dependence on macroeconomic situation, higher required attention ranges and higher diversity of available features (fursov et al. 2021).competitive evaluation has emerged as an effective way to measure and foster advancements in machine learning (goodfellow et al. 2013). however, current competitions often overlook the two-side dynamics of adversarial attacks and defenses and tend to ignore the unique challenges posed by financial transaction data (croce et al. 2021).through our investigation, we aim to contribute valuable insights into the robust models landscape of the financial sector handling emerging risks associated with artificial intelligence. in particular, the paper introduces a solid approach to advance the development of robust models for the processing of financial transaction data. our primary contributions are:• a novel competition scheme: we propose a remarkable competition framework to evaluate the robustness of machine learning models in the financial domain. our tournament phase encourages participants to actively probe and defend against vulnerabilities, simulating real-world scenarios and enhancing the reliability of the models. in addition, we release a new open dataset of sequences of financial transactions with the credit default as the target. • dynamic analysis: through analyzing the dynamics of the competition, we uncover insights into model sustainability and identify crucial factors influencing attack and defense quality. such analysis enables us to gauge the models' real-world efficacy. • tailored strategies for financial transactions: we demonstrate that financial transaction data require specialized algorithms for their attacks and defenses, differentiat- the rest of this paper is structured as follows. section competition structure presents the design and the structure of our competition. section results delves into the analysis of the competition's dynamics and the findings related to the robustness of the models as well as the comparison of developed attacks and defenses to existing baselines. section related work is devoted to related work on the topic. our investigation shows that for this design of defense, we improve over defense's main metric, reducing the effect of the baseline attack also, after the application of such a procedure, the baseline attack has negligible adversarial properties.as a solution for the attack track is a modified list of sequences of transactions, and a solution for the defense track is an updated model available as a docker container, we can evaluate the quality for each pair of the attack and the defense. as we have two sub-phases for the attack phase, we can observe not only the comparison of private and public scores but also the progress of scores during2 0 2 3 -0 2 -0 8 2 0 2 3 -0 2 -1 5 2 0 2 3 -0 2 -2 2 2 0 2 3 -0 3 -0 1 2 0 2 3 -0 3 -0 8 2 0 2 3 -0 3 -1 5 2 0 2 3 -0 3 -2 2 2 0 2 3 -0 4 -0 1 0we present the empirical cumulative density function for top scores for each participant for private and two public phases for an attack of a white box model and a private score for an attack of a black box model in figure2. during the second stage of attack against a black box model, we see that the improvement in the robustness of models made attacks almost useless. dynamic of the score suggest that after obtaining a model it is enough to spent about two weeks to completely break the model, so model owners should take actions after the model leakage in a timely manner. here we also present the results for different stages: two stages for a given attack and two tournament stages for unknown attacks developed by other participants. the part of the improvement comes from the improvement of the model quality for clean data, as we use the harmonic mean of the quality for clean and attacked data for evaluation.defence versus attack quality during the black box tournament stage of a competition, participants submit their defended models and their attacked sequences of transactions.defended models we consider five models with different defensive properties and raw data quality produced by top attacks as the number of mccs is limited, we adopt attacks close to a greedy brute force approach. alternative boosting mix alt is an attack on an ensemble of boostings different from that used in the model boosting mix alt, which is the approach used by the winner of the attack track, using a variety of different boosting models to generalize better to different black-box defences.to enrich the space of attacks we also consider attack with a similar number of random changes and two gradient-based attacks nn base grad, nn mix grad.attack and defense performance table2presents roc auc scores for pairs of considered attacks and defenses for the default dataset released with the competition.we see that the best options for developing the most powerful attacks and defense strategies rely on principles common to most data science competitions and adversarial attacks literature.finally, our investigation suggests that with limited amounts of training data and possibilities of using effective feature engineering approaches, gradient boosting models can be efficiently distilled from neural networks with similar or even better quality on unstructured data, while being significantly more robust to adversarial attacks. each separate model was significantly worse, than the baseline model, but in total the quality of models doesn't affect the quality of attacks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/662.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/662.txt new file mode 100644 index 0000000000000000000000000000000000000000..83bc40743cd3888414ca6e08548c237f02f41431 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/662.txt @@ -0,0 +1 @@ +the mlops method incorporates ml models into the software development process. as shown in fig. 1, it integrates ml features with devops concepts, allowing the automatic installation and effective monitoring of ml models in the development setting. mlops systems should be capable of working as a collective, constant, repeatable, validated, and monitored in a way to meet corporate mlops objectives. the three main components of the mlops development process are model, code, and data. the mlops structures 4 require technique automation to keep the development phase running. as a result, mlops makes software development more accessible and faster while posing less risk. productivity is aided by quick model building, high-quality ml models, and quick placement and production. the mlops methods allow us to manage and screen a large number of models thanks to cicd's extensibility. the high level of collaboration between teams helps to avoid conflicts and expedite the transfer procedure. controlling identifiability and reliability also helps to reduce the hazards that come with it. modelops, kubeflow, and tensorflow extended (tfx),,are examples of systems that provide end-to-end development control for ml methods by arranging their stages into different ml pipelines.mlops is a word that describes the fusion of ml operation (ops) and development (dev) created by applying devops codes to ml methods. mlops advocates for inspection and automation at all stages of the development and implementation of ml methods, including combination, experimentation, importing, set-up, and substructure control. consider the procedures necessary to design and set up ml systems to grasp the mlops issuessuch as in the beginning, data must be available for development. to summarise, the ml phases that occur before the completion of the ml model appear to be waterfall-like, however, the methods associated with traditional software are used to implement the model on a larger entire chart.the mlops method incorporates ml models into the software development process.investigated the current state-of-the-art ml to determine how much mlops is necessary for today's ml systems and how much software engineers are still dealing with numbers and demonstrating issues without considering implementation. klein et al. they also use some new technology techniques for this research. by implementing a devops experience with an ml platform on a common real object configuration, zhou et al. it is an open-source ml platform that structures the pieces of the ml system on top of the kubernetes platform and provides good efficiency, installation, and monitoring of ml applications throughout their lifecycle using standardised pipelines. dvc is a platformagnostic and open-source version system for ml tools that allows users to develop shared, uniform ml models while keeping track of model, pipeline, and record-type behaviour.mlops, or the ability to deliver ml software indefinitely, is becoming a requirement for companies that employ ml in development, comparable to devops for outdated software. this study also looked at cost-effective mlops solutions that could be used to suit the ml development phase's requirements. although there are several mlops platforms in use, the bulk of them has limitations when it comes to completing ml development phases and providing a mechanical structure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/663.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/663.txt new file mode 100644 index 0000000000000000000000000000000000000000..87b0b772abcfa46efc3229824c92306a298193e8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/663.txt @@ -0,0 +1 @@ +one of intensively developing clustering techniques is the graph spectral analysis (gsa). it works best for objects whose mutual relationships are described by a graph that connects them based on a similarity measure .one important application is text documents clustering, where the similarity of documents can be expressed in a number of ways, e.g. by the count of common words or in terms of more sophisticated descriptions (e.g. cosine similarity), see e.g. . in our experiments, we use the cosine between document vectors in the term vector space as the measure of document similarity. gsa applies eigendecomposition of the so-called graph laplacians, derived from the similarity matrix.the original gsa suffers from the lack of a method for assignment of new data items to the existing clusters. hence a clustering from scratch or training of some external classification model is required. clustering from scratch may be hard for large data collections. classification by the external model may cause cluster definition drift. due to these issues, several approaches were proposed to solve them, including . this paper can be seen as a contribution to this type of research. the mentioned approaches concentrate on transforming eigenvectors, while our method relies on eigenvalues only.the algorithm proposed in this paper allows to perform the clustering in batches. the algorithm has the following structure (details are given in section 3):• for each batch of documents, perform the traditional spectral clustering into the predefined number of clusters.• compute the vector of combinatorial or normalized laplacian eigenvalues of each cluster of each batch.• then, based on some dissimilarity criteria between the cluster spectra of different batches, make a decision to combine the corresponding clusters of different batches.• the matching of clusters is based on minimizing the difference between these vectors.we investigated the following (dis)similarity criteria:• normalize the spectra by dividing by the largest eigenvalue, then the dissimilarity is equal to an (approximate) integral between the class spectrum and the new data set spectrum (combinatorial laplacian relative lambda method, clrl)); see fig • compute not the combinatorial laplacian but rather the normalized laplacian (which has always by definition the largest eigenvalue not greater than 2 1 , then the dissimilarity is equal to an (approximate) integral between the class spectrum and the new data set spectrum (normalized laplacian method, nll); see fig 3.the dissimilarity measures mentioned above differ due to specific properties of gsa. nll is based on normalized laplacian while the other three measures refer to combinatorial laplacian. this has an effect on the shape of the respective spectrograms. eigenvalues of normalized laplacian are upper-bounded by the value of 2, whatever the sample size is. so if one has samples of different sizes from the same population, the value range is bounded and one needs only to adjust the indexes of eigenvalues to match the spectrograms of data from the same population. but the eigenvalues of combinatorial laplacian can grow without any limit if the sample size increases. the approaches clrl, clssal and clmxl handle the issue of matching spectrograms of data from the same population in different ways. it is necessary in all these cases to normalize the indexes of eigenvalues (into the range 0-1). the approach clrl normalizes the eigenvalues by dividing by the largest eigenvalue, while clssal divides them by the sample size. the effects of both on the spectrogram would be the same for samples from the population, but the shapes of different population spectrograms will differ in different ways (e.g. in clrl the spectrograms will meet at both ends, while in clssal they will not). divides them by the sample size. the effects of both on the spectrogram would be the same for samples from the population, but the shapes of different population spectrograms will differ in different ways (e.g. in clrl the spectrograms will meet at both ends, while in clssal they will not). clrl is more susceptible to noise at the largest eigenvalue than clssal. clmxl transforms the spectrogram in the same way as clssal, but instead of using an integral to assess the differences between populations it take the larges eigenvalue after normalization. for justifications of the used properties see .our algorithm is proposed in section 3.the experimental study of the effectiveness of our method is presented in section 5 and the conclusions are described in section 6. let us first provide with an overview of concepts behind spectral clustering methods in section 2.• normalize the spectra by dividing by the largest eigenvalue, then the dissimilarity is equal to an (approximate) integral between the class spectrum and the new data set spectrum (combinatorial laplacian relative lambda method, clrl)); see fig • compute not the combinatorial laplacian but rather the normalized laplacian (which has always by definition the largest eigenvalue not greater than 21, then the dissimilarity is equal to an (approximate) integral between the class spectrum and the new data set spectrum (normalized laplacian method, nll); see fig3.the traditional way to perform graph spectral clustering is based on relaxation of ratio cut (rcut) and normalized cut (ncut) graph clustering methods.(2) normalization of the rows of the aforementioned eigenvector sub-matrix to unit length prior to k-means clustering, (3) making use of more than k eigenvectors to cluster into k clusters,, (4) application of a supervised learning method, instead of clustering, preferentially on a subset of the rows of the aforementioned sub-matrix, followed by employing the learned classifier to the remaining rows.improved this method by proposing so-called landmark based spectral clustering (lsc) method, which performs k-means on the dataset to get p cluster centers as the p representatives.data: d -a (large) set of documents, to be processed in batches k -the number of clusters to be obtained result: γ -the clustering of d into k clusters split randomly d into (small) subsets {d 0 , . , d m }; for each d i compute its spectral clustering γ i into k clusters; for each cluster c i,j ∈ γ i compute the similarity matrix s i,j γ := γ 0initial clusters (γ 0 is the d 0 spectral custering) ; for i ← 1 to m do for j ← 1 to k do call algorithm 2 setting: s := s ij ; s := {s 0,1 , .the eigenvalue based clustering algorithm a drawback of this approach is that each cluster to be discovered must be a homogeneous group and must be distributed proportionally over various batches. in fact, our experiments reported indemon-data: s -similarity matrix of the new cluster of documents s -set of similarity matrices of the clusters of documents to match with result: c -the assigned cluster of documents l := l(s) -compute laplacian; l := l(s) -compute laplacians; e := spectrum(l) -compute laplacian eigenvalues; e := spectrum(l) -compute laplacian eigenvalue for each laplacian from l; f := specf un(e) -transform a spectrum into a function;. if the shares of groups in different batches differ, then the spectral clustering algorithms would havbe in general a problem because their underlying algorithm, k-means, does not "like" clusters that differ too much in size and shape.the ideal case for such a demonstration wold be that the base clustering algorithm splits the dataset along known labels (coming from an external labeling) and then our method matches clusters from different data portions combining the clusters with the same external label.first, based on the data portion 1, our cluster-matching algorithm was "trained" (the spectra of hashtags were acquired, that is for each hashtag in the data portion 1 the spectrogram of laplacian was computed for each data subset marked with this hashtag).as a next step, each data portion was clustered by normalized spectral clustering method with unit length rows and one additional dimension (that is by a real-world spectral clustering algorithm).to investigate the ability of our method to match clusters from various data portions appropriately, we again trained our cluster-matching algorithm based on the data portion 1, the algorithm 1 was applied then to an artificial series true/pred pseudo-1 pseudo- as one can see, the method clssal is the best one, but due to overlapping nature of spectrograms of clusters pseudo-1 and pseudo-2, they are not as well matched as the cluster pseudo-3. so instead of clustering of the entire set, we could propose in this paper to cluster subsets and then recover the total cluster and then match the clusters from subsets via the laplacian spectrum. if clusters obtained from a given method yield spectra of significantly different characteristics, then the spectral analysis may be exploited for the purpose of splitting the clustering process to smaller portions of data and then matching the obtained subclusters via the outlined combinatorial laplacian sample size adjusted lambda method method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/664.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/664.txt new file mode 100644 index 0000000000000000000000000000000000000000..6f806e5d15ec5224d7dd859ba832ba3b3359f801 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/664.txt @@ -0,0 +1 @@ +artificial intelligence (ai) has shifted the paradigm of computer science in the latest decade with deep neural networks (dnns), one of ai's illustrious instruments, demonstrating remarkable precision levels . this has led to their adoption in several safety-critical applications like autonomous driving . as dnn accelerators become more prevalent in safetycritical applications, hardware reliability of digital circuits has become increasingly more noticeable. the reliability of dnns is determined by the ability of their accelerators to function correctly in the presence of environment-related faults (soft errors, electromagnetic effects, temperature variations) or faults in the underlying hardware (manufacturing defects, process variations, aging effects) .various emerging techniques are explored to improve the computational efficiency of dnns' complex architectures, such as reducing the bit precision of parameters, which has led to the emergence of quantized neural networks (qnns). however, the effectiveness of such techniques raises concerns about the reliability of qnns, particularly in safety-critical applications. soft errors, a type of fault caused by charged particles colliding with transistors, can cause a logic value to flip, dramatically influencing the functionality of qnns , .throughout the literature, protecting dnns against soft errors is primarily achieved through architecture-level methods such as hardened pes or triple modular redundancy (tmr) . however, to alleviate overheads, there is a need, first, to identify the critical neurons within a neural network before applying the mentioned mitigation techniques to harden them against the faults.reliability assessment serves as the initial step towards exploiting an effective protection mechanism. fault injectionthe work is supported in part by the eu through european social fund in the frames of the "ict programme" ("ita-ioit" topic), by the estonian research council grant put prg1467 "crashless", estonian centre for research excellence excite and by estonian-french parrot project "entrusted".(fi) is a conventional method for reliability assessment that is vastly adopted for dnns. however, identifying the critical points in a qnn requires an exhaustive fi that is prohibitively complex due to their large number of parameters. to address this issue, analytical resilience assessment approaches are proposed to evaluate the reliability of dnns by analyzing them at the algorithm level .in previous works, the criticality of neurons has been identified based on their contribution scores to outputs - . hence, there is no clear resilience evaluation metric for selecting the critical neurons in the literature, and recent works extract the criticality based on the ranked scores. to tackle the drawbacks of the state-of-the-art in dnns' resilience analysis methods, a prior study has proposed a method called deepvigor , which provides vulnerability factors for all bits, neurons, and layers of dnns accurately. however, it does not consider qnns. in this work, we adapt and optimize deepvigor for identifying critical neurons in qnns. the resilience analysis enables us to design a method for correcting soft errors in the datapath of dnn accelerators.in this paper, we identify critical neurons in qnns based on a neuron vulnerability factor (nvf) obtained by fault propagation analysis through the qnns. the nvf represents the probability of misclassification due to a fault in a neuron which determines the level of criticality for neurons. to the best of our knowledge, for the first time, a protection technique based on splitting neurons' operations is proposed that modifies the network in a way that a lightweight correction unit (lcu) corrects the faults in critical neurons. the proposed method does not require redesigning the computational part of the accelerator. the accelerator executes the modified network, and only its controller needs to be aware of the critical neurons to be operated on the lcu. our method imposes half the overhead of tmr since it corrects faults with only one additional neuron instead of two.the contributions of this work are as follows:• developing an analytical fault resilience assessment method for qnns to identify the most critical neurons based on the conducted neuron vulnerability factor (nvf); • proposing a novel high-level modification method for qnns to improve fault resiliency by splitting the operations of critical neurons, without requiring a redesign of the computational part of the accelerator; • designing an effective lightweight correction unit (lcu) for selected critical neurons in accelerators, with low overhead (twice less than that of tmr) and high fault resiliency (similar to that of tmr).the paper is organized as follows. the proposed method for enhancing fault resilience of qnns is presented in section ii, experiments are performed and discussed in section iii, and the paper is concluded in section iv.in this paper, we identify critical neurons in qnns based on a neuron vulnerability factor (nvf) obtained by fault propagation analysis through the qnns. to the best of our knowledge, for the first time, a protection technique based on splitting neurons' operations is proposed that modifies the network in a way that a lightweight correction unit (lcu) corrects the faults in critical neurons.• developing an analytical fault resilience assessment method for qnns to identify the most critical neurons based on the conducted neuron vulnerability factor (nvf); • proposing a novel high-level modification method for qnns to improve fault resiliency by splitting the operations of critical neurons, without requiring a redesign of the computational part of the accelerator; • designing an effective lightweight correction unit (lcu) for selected critical neurons in accelerators, with low overhead (twice less than that of tmr) and high fault resiliency (similar to that of tmr).by obtaining the nvf of all neurons through the qnn, the critical neurons can be found based on the values for nvf. the idea is to split the selected neurons' operation into two neurons in the qnn at a high level and correct the critical outputs in the accelerator.splitting the critical neurons provides an opportunity for fault correction using the split neurons without redesigning the. the inputs of lcu are two split neurons representing one critical neuron, and the output is one corrected 8-bit data that will be written back to the corresponding neurons. the critical neurons regarding different thresholds for nvf are obtained to explore the number of neurons to be protected, which imposes an overhead as well. tableipresents the number of critical neurons in different nvfs ranging from 0% (all neurons are critical) to 50% (no neuron is critical).4: qnns comparison in terms of accuracy loss (a-c), critical faults (d-f), and network size (g-i) under different levels of protection: unprotected, proposed protection, and tmr, considering different thresholds for nvf from 0% to 50%.4illustrates the experimental results of accuracy loss (a-c) and critical faults (d-f) of the proposed resilience enhancement and tmr over different nvf thresholds for the qnns.4-(g-i) shows that the qnns' size (as measured by the number of neurons in each network) using the proposed protection is remarkably smaller than that of the tmr-based protected networks, resulting in half the overhead due to employing one additional neuron for correction instead of two.the proposed neuron splitting and correction method leverages only two neurons (one additional) for correcting faults, whereas tmr requires three neurons (two additional) to perform fault detection and correction. according to tablei, to protect qnns with an nvf of 20% using tmr, quantized mlp-7, lenet-5, and alexnet require 3,822, 5,058, and 104,412 neurons, respectively, whereas the proposed method requires only 3,319, 4,871, and 103,790 neurons, respectively. a neuron splitting method is introduced to modify the network in a way that the critical neurons selected by the resilience analysis are split into two halves. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/665.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/665.txt new file mode 100644 index 0000000000000000000000000000000000000000..5d60fe7557e6c286d75315becdaafafb41eb16e8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/665.txt @@ -0,0 +1 @@ +graphs represent a myriad of real-world data from social networks, knowledge graphs, gene expression networks, etc. graph neural networks (gnns) (kipf & welling, 2017;defferrard et al., 2016;veličković et al., 2017;you et al., 2020;gao et al., 2018;chiang et al., 2019;zheng et al., 2021b;chen et al., 2018;duan et al., 2022;thekumparampil et al., 2018), which use message passing (mp) strategy at their core for aggregating knowledge from neighbors, have been widely accepted as powerful algorithmic tools for learning over graphs. although message passing provides gnns superior performance over traditional mlps, the nature of evolving massive topological structures prevents mp-based gnns from scaling to industrial-grade graph applications, and the majority of state-of-the-art gnns are only tested on small graph datasets. additionally, due to the prevalent issues such as unhealthy gradients, over-smoothening and squashing (li et al., 2018;nt & maehara, 2019;alon & yahav, 2021;jaiswal et al., 2022;liu et al., 2021) while training gnns, increasing model capacity either by deepening (stacking more layers) or widening (increasing neighborhood coverage) often lead to sub-standard performance.previously, conforming to the empirical scaling laws (kaplan et al., 2020), where the final model quality has been found to have a power-law relationship with the amount of data, model size, and compute time; several works (li et al., 2021;jaiswal et al., 2022;zhou et al., 2021b) have attempted to scale gnns (up to 1000 layers) assuming that processing larger graphs would likely benefit from more parameters. unlike conventional deep neural networks, exploiting scale to revamp information absorption is not straight-forward for gnns, and numerous existing works rely on architectural changes, regularization & normalization, better initialization (li et al., 2019;chen et al., 2020;li et al., 2018;liu et al., 2020;rong et al., 2020;huang et al., 2020;zhao & akoglu, 2020;zhou et al., 2021a;jaiswal et al., 2022) for improving the trainability and try to overcome astonishingly high memory footprints by mini-batch training, i.e. sampling a smaller set of nodes or partitioning large graphs (hamilton et al., 2017;chen et al., 2018;zou et al., 2019;chiang et al., 2019;zeng et al., 2019). while these methods are a step in the right direction, they do not scale well as the models become deeper or wider, since memory consumption is still dependent on the number of layers. we are interested in exploring an orthogonal step: does there exist a principled way to scale gnns capacity without deepening or widening, which can improve its performance across small and large graphs?recently, for large pre-trained models with many applications in computer vision (han et al., 2020;li et al., 2023;mao et al., 2022;jaiswal et al., 2021a;zheng et al., 2021a) and natural language processing (talmor et al., 2018;jaiswal et al., 2021b;zheng et al., 2023;liu et al., 2023;chen et al., 2023;jaiswal et al., 2023), several works (wortsman et al., 2022b;ilharco et al., 2022;juneja et al., 2022) investigate the intriguing phenomenon of "model soup", and have shown that weights of multiple dense fine-tuned models (candidate ingredients of soup) can be merged together into better generalizable solutions lying in low error basins. despite enormous attention in nlp, it is unexplored for gnns, presumably due to traditional wisdom that unlike large pre-trained transformers in nlp, current state-of-theart gnn's model capacity is under-parameterized apropos of gigantic graphs. despite some recent works (wan, 2022;lin et al., 2022) illustrating the benefits of gnns ensembling, they exhibit high computational cost during inference which worsens in the context of large graphs. motivated by the mergability of multiple fine-tuned models illustrated by model soups, in this work, we raise the research question: is it possible to leverage the fundamentals of model soups to handle the aforementioned issues of memory bottleneck and trainability, during scaling of gnns?to this end, we propose not to deepen or widen current gnns, but instead explore a data-centric perspective of dividing ginormous graph data to build independently and parallelly trained multiple comparatively weaker models without any intermediate communication of model weights, and merge them together using greedy interpolation soup procedure to achieve state-of-the-art performance. our work draws motivation from recent advancements in parallel training of pretrained language models (lms). for example, branch-merge-train (btm) (li et al., 2022) learns a set of independent expert lms specializing in different domains followed by averaging to generalize to multiple domains, and lo-fi (wortsman et al., 2022a) illustrates the futility of communication overhead in data-parallel multi-node finetuning of lms. although these techniques seem to work for large pre-trained lms, it is still unclear and unexplored if they will work for comparatively much smaller gnns in the training-from-scratch regime. moreover, gnns deal with graph-structured relational data unlike independent samples in lms and have their own unique set of challenges in their trainability, which makes it interesting to understand if soup phenomenon will help or deteriorate gnns performance.to our surprise, we found that independently trained gnns from scratch can be smoothly aggregated using our greedy soup interpolation procedure to create a better generalizable gnn that performs exceptionally well. it suggests that linear scaling of gnns either by deepening or widening is not necessarily the right approach towards building high-quality generalizable gnns, and model soups can be an alternative. note that, unlike the conventional model soup, we explore greedy weight interpolation for graph models, and in a wellmotivated data-centric perspective (where it matters the most) considering the exploding size of real-world graphs. more specifically, in our work, we firstly illustrate easy adaptability of model soups across multiple sota gnn architectures trained on multiple small and large graphs (unexplored till now) and secondly present a novel data-centric perspective of model soups for large graph-structured relational data within constraints of available computational resource. moreover, we extend current state-of-the-art graph sampling and partitioning techniques to facilitate the training of candidate soup ingredients which can be seamlessly combined at end for better generalization. we also compare our recipe with distributed gnn training, e.g., concurrent work (zhu et al., 2023), in section 4.1.our primary contributions can be summarized as:■ we illustrate the harmonious adaptability of model soups for graph-structured data and experimentally validate its performance benefits across multiple gnn architectures and graph scales. our experiments reveal orthogonal knowledge stored in the candidate models which can be surprisingly aggregated during soup preparation using our greedy interpolation procedure.■ we present a novel data-centric perspective of model soups tailored for gnns and carefully study the benefits of independent and parallel training of candidate models and their mergability in scenarios without the luxury of having computation resources to process entire graph, by extending state-of-the-art (sota) graph sampling and partitioning algorithms.■ our extensive experiments across multiple largescale and small-scale graphs {cora, citeseer, pubmed, flickr, reddit, ogbn-arxiv, ogbn-products} using multiple gnn architectures {gcn, jknet, dagnn, appnp, sgc, graphsage, clutergcn, graphsaint} validates the effectiveness of our approach., 2016;veličković et al.recently, for large pre-trained models with many applications in computer vision(han et al., 2022;jaiswal et al.to this end, we propose not to deepen or widen current gnns, but instead explore a data-centric perspective of dividing ginormous graph data to build independently and parallelly trained multiple comparatively weaker models without any intermediate communication of model weights, and merge them together using greedy interpolation soup procedure to achieve state-of-the-art performance. more specifically, in our work, we firstly illustrate easy adaptability of model soups across multiple sota gnn architectures trained on multiple small and large graphs (unexplored till now) and secondly present a novel data-centric perspective of model soups for large graph-structured relational data within constraints of available computational resource.■ we present a novel data-centric perspective of model soups tailored for gnns and carefully study the benefits of independent and parallel training of candidate models and their mergability in scenarios without the luxury of having computation resources to process entire graph, by extending state-of-the-art (sota) graph sampling and partitioning algorithms. in the next sections, we will explain how model soups can be easily adapted for many state-of-the-art gnns, and how can we smoothly extend the current graph sampling and partitioning to create candidate ingredients for the model soup.in comparison with several state-of-the-art gnns, we experimentally illustrate that gnn soups prepared using ingredients having exactly the same model configuration, significantly perform better without any requirement of increasing model depth or width. note that although model soups have recently attracted significant attention for large pre-trained language models, it is still unclear and unexplored if they can work for comparatively much smaller graph neural networks trained from scratch which learn from graph-structured data with relational properties unlike nlp and vision datasets having independent training samples. model soups provide an orthogonal way of increasing model capacity without deepening or widening gnns which brings many unwanted trainability issues in gnns. it can be clearly observed that our model soup prepared by combining the strength of 50 candidate ingredients of 2-layer gcns can significantly outperform all these fancy methods, bolstering our claim that model explosion by deepening and widening is necessarily not the only and right direction for building high-quality generalizable gnns.in this section, we provide experimental results for preparing data-centric model soup in scenarios when we do not have the luxury of resources to perform message passing on the entire graph, by leveraging the sota graph sampling (node, edge, layer) and partitioning mechanisms to prepare the candidate ingredients of the soup. souping at intervals during training) benefit the performance of the final model soup? to this end, we prepared a data partition model soup of gcns using ogbn-arxiv dataset, where we executed souping across candidate ingredients at regular intervals of 100 epochs during training. recently,(li et al. we present a data-centric perspective of model soups to build powerful gnns by dividing giant graph data to build independently and parallelly trained multiple comparatively weaker gnns without any intermediate communication, and combining their strength using a greedy interpolation soup procedure to achieve state-of-the-art performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/666.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/666.txt new file mode 100644 index 0000000000000000000000000000000000000000..093f2bbef5542a9fa33d8f9a0288813e22eccdfe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/666.txt @@ -0,0 +1 @@ +alzheimer's disease which accounts for 60-80% of all dementias , remains a pernicious threat to older people. as of 2017, it is estimated that over 50 million people globally are living with dementia . unlike other chronic diseases such as cancer and heart disease, dementia can require intensive care for many years after diagnosis. as a result, the cost of care is significantly higher as sufferers increasingly require round-the-clock care. furthermore, the burden of caring for someone with dementia can negatively impact the wider families wellbeing, with associated detrimental effects on mental and physical health. the cost of long-term care also puts a disproportionate financial burden onto families and the wider community of those with a diagnosis. it is estimated that the cost of caring for those with dementia currently stands at 1% of global gdp . furthermore, 60% of those with dementia live in lowand middle-income countries and, as these countries transition into developed economies, with the associated lengthening life expectancy and falling birth rate, the strain on these nations to care for dementia sufferers will become financially untenable.in lieu of effective treatment for alzheimer's disease , research has turned towards the possibility of early detection of alzheimer's biomarkers before the onset of symptoms . work in this area has noted that accurate prediction of the risk of developing alzheimer's disease and subsequent early interventions aimed at delaying the onset of symptoms would ease the burden of suffering as well as financial costs of care for the patient and their family. even if no intervention were available, a prewarning of the risk of dementia would allow patients and carers to prepare for the possibility of cognitive decline, and thus may help reduce the psychological and practical effects of diagnosis. indeed, research has suggested that the structural brain changes that precipitate alzheimer's symptoms may begin several years before the onset of notable symptoms , and this may provide an opportunity to develop techniques for assessing dementia risk.there have been several attempts to create predictive models for alzheimer's disease. mathotaarachchiet et al. used the same repository (adni) as the present paper to build support vector machine (svm) and logistic regression models with pet neuroimages for svm and regularized logistic regression, achieving an auc of 0.91 . casanova et al. investigated a range of models as they explored genetic and non-genetics data as predictors of cognitive decline. the best model found in that study was random forest with an accuracy of 78% . the work of stamate et al used several different machine learning techniques in the exploration of the adni dataset, with a view to assessing the predictive power of variables found in the dataset. this work utilised, among others, the gaussian process technique which has, hitherto, seldom been explored in this context . the present paper also explores several techniques including gaussian process. this paper emulates this binary classification for cognitively normal subjects, but also extends it for those who were diagnosed with mild cognitive impairment at baseline. we seek to utilise statistical learning as a mechanism to predict those who would suffer cognitive deterioration resulting in receiving a diagnosis of mild cognitive impairment or alzheimer's disease. to this end, we will split the well-known dataset from the alzheimer's disease neuroimaging initiative (adni) into two separate groups at baseline: those who were cognitively normal (cn), and those who received a diagnosis of mild cognitive impairment (mci). we then predict what diagnosis each subject received upon their final visit to the adni test sites. we tune and test six different models on these datasets separately. as discussed, this study collapses the multinomial diagnosis received at final visit into a binomial progression outcome of whether there is a deterioration, i.e., received either "no deterioration": the same diagnosis (or a more favourable one) or deteriorated and consequently received a worse diagnosis at final visit. such a separation would have potential clinical benefits, as it would allow a greater understanding of the mechanisms that underpin deterioration for the two groups. an accurate prediction would serve as both a predictive and inferential tool as we may be afforded greater understanding of potentially modifiable risk factors that, if interventions were implemented, would allow delay or prevention of the onset of cognitive decline. thus, this paper aims to join an existing body of work which specifically looks at dementia prediction through the lens of deterioration across a longitudinal data collection period.the goal of this study is therefore to predict, using predictors derived at baseline, those who would go on to receive a worse diagnosis upon their last visit to a testing site. work in this area has noted that accurate prediction of the risk of developing alzheimer's disease and subsequent early interventions aimed at delaying the onset of symptoms would ease the burden of suffering as well as financial costs of care for the patient and their family. even if no intervention were available, a prewarning of the risk of dementia would allow patients and carers to prepare for the possibility of cognitive decline, and thus may help reduce the psychological and practical effects of diagnosis. indeed, research has suggested that the structural brain changes that precipitate alzheimer's symptoms may begin several years before the onset of notable symptoms, and this may provide an opportunity to develop techniques for assessing dementia risk. mathotaarachchiet et al. casanova et al. this work utilised, among others, the gaussian process technique which has, hitherto, seldom been explored in this context. the present paper also explores several techniques including gaussian process. we then predict what diagnosis each subject received upon their final visit to the adni test sites. as discussed, this study collapses the multinomial diagnosis received at final visit into a binomial progression outcome of whether there is a deterioration, i.the goal of this study is therefore to predict, using predictors derived at baseline, those who would go on to receive a worse diagnosis upon their last visit to a testing site. this package combines several variables from the different adni datasets and studies (adni1, adnigo, adni2, and adni3).the goal of this study was to predict, using predictors derived at baseline, those who would go on to receive a worse diagnosis upon their last visit to a testing site.those who received a diagnosis of cn at baseline were in group one (cn_b group), with the goal of predicting whether they received the same diagnosis at their last visit or received a worse diagnosis.those who received a diagnosis of mci at baseline were in the second group (mci_b group), with the goal of predicting whether they received the same diagnosis/received a more favourable diagnosis at their last visit or received a worse diagnosis.those, having received a diagnosis of mci at baseline, received a diagnosis of ad at their last visit. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/667.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/667.txt new file mode 100644 index 0000000000000000000000000000000000000000..c5967a69f267319b54fb159f979c87d99e4d2008 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/667.txt @@ -0,0 +1 @@ +dementia, of which approximately two-thirds constitute alzheimer's disease (ad) cases , is associated with a progressive decline of brain functioning, leading to a significant loss of autonomy, reduced quality of life and a shortened life expectancy . accumulated evidence indicates that individuals who have dementia have an excess mortality and a shorter life expectancy than individuals without this disease . in england, dementia is now reported as being the leading cause of death for women, having overtaken cancer and cardiovascular disease .the development of prognostic prediction models, built on combined effects of thoroughly validated predictors, using machine learning tools, can be used to forecast the probability of dementia developing within an individual. it is hoped that the availability of such prediction models will facilitate more rapid identification of individuals who are at a higher risk of dementia before the full illness onset . this, in turn, would reduce time to treatment initiation, subsequently minimising the social and functional disability and thereby improving the quality of life for many people affected by these disorders. identifying individuals at risk of developing dementia would allow the recruiting of patients at high risk for future clinical trials, thereby catalysing the assessment of new treatment or prevention programmes. furthermore, identifying modifiable risk factors would allow the development of new prevention programmes. for example, there are already some indications that being physically active, staying mentally and socially active, and controlling high blood pressure can potentially deter onset of dementia in the general population .proposed an efficient prediction modelling approach to the risk of dementia based mainly on the gradient boosting machines method, using a large dataset from cprd (clinical practice research datalink) repository with data from primary care practices across uk, and achieving an auc performance of 0. in other words, one would seek to utilise the well-established survival techniques found in cox proportional hazards or similar and build upon these frequentist approaches using modified machine learning tools. such an approach would preserve the potential information contained within a temporal outcome, and associated dichotomy of dementia versus no dementia whilst also strengthening the predictive power of the existing frequentist approach by overlaying modified machine learning techniques.despite the scarcity of survival modelling papers in relation to dementia prediction, recent examples have shown promise in attempting to outperform the classic cox proportional hazard model, using survival machine learning and survival deep learning on clinical datasets. a pertinent study within the current field of interest iswhose authors sought to look at survival machine learning performance when applied to datasets designed for dementia investigation.in this work we propose an approach to predicting the time to a dementia diagnosis, based on survival machine learning techniques such as survival random forests and survival elastic net, and on a conventional statistical method such as cox proportional hazard model.a simple cox proportional hazard model (hereafter denoted simply by cox) was constructed, which served as the baseline for comparison with two survival machine learning models:. cox penalised regression using elastic net (hereafter denoted simply by elasticnet), which is similar to the base cox proportional hazard model but with elastic net regularisation, allows the model to shrink the coefficients of less important variables, and even to make them equal to 0, depending on the shrinkage strength and the proportion of the lasso component in this model.1; (c) the standard deviations (provided in brackets in table2) for cindex performances on the test sets for survival random forest and survival elastic net are small, and about 3 times smaller than the standard deviations for cox, which means that the machine learning models are very stable, and by far more stable than the conventional statistical model.to our knowledge, this paper is the first attempt to develop, evaluate and validate a prediction model for estimating an individual risk of dementia onset in the elsa dataset using survival machine learning. our results showed that the machine learning models herein were able to outperform the classic cox model, with the best performing machine learning technique being the survival random forest followed closely by survival elastic net, as per test results in the nested cross-validation included in table1, and test results in the monte carlo validation included in table2, and figure1. as mentioned previously, the addition of machine learning paradigms to the classic frequentist survival approaches allows for more variables to be explored than would be possible in a standard cox proportional hazard model. moreover, as this study demonstrates, the best survival model based on random forests not only improved the predictive accuracy and stability but also provided a useful mechanism to infer the variables' importance, which concords with clinical interpretations of the role of the variables in dementia onset. further, the elsa dataset is a centre-based data collection study and, although extensive and varied data collection was carried out to try and account for confounding variables, it is possible that other predictors, unmeasured by the data collection procedure, could have an impact on model performance. another future work will investigate the applicability of an adapted version of the survival machine learning approach we developed here, to the prediction of dementia risk using routine primary care records such as cprd, by extending the machine learning based framework we introduced in. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/668.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/668.txt new file mode 100644 index 0000000000000000000000000000000000000000..dbbc5b20c7237da759ed4f1c515ac2f0e48c5309 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/668.txt @@ -0,0 +1 @@ +smart grids incorporate information technology systems in sensing, processing, intelligence, and control of the power systems for robust transmission and distribution of electricity . while smart measurement devices and coupled communication networks bring many benefits and robustness to the power system, adversaries may alter the measurements and the other system parameters by attacking those nodes. cyberattacks on smart grids may cause significant problems in the operations and consequently, may interrupt the delivery of electricity in the system and result in economic losses. the false data injection attacks (fdias) pose a critical threat to the smart grids, and are defined as the malicious activities carried out by an adversary via the injection of forged data into the information and control systems of smart grids.recently, machine learning-based fdia detection methods are emerging to efficiently detect the cyberattacks on smart grids. the complex rapidly-changing nature of smart grids makes it harder to efficiently and successfully detect the fdias using the conventional rule-based or deterministic approaches. machine learning algorithms enable efficient and robust detection and localization of the fdias by enabling the analysis of large volume data, identifying the complex hidden patterns from the historical data.recently, federated learning algorithms provide an efficient framework for training machine learning models on the edge devices. the federated averaging (fedavg) algorithm is proposed in . in fedavg, the local parameter updates of the local models are aggregated in the central server by taking the weighted average of the client parameter updates. however, the fedavg algorithm performs poorly for the nonindependent and identically distributed (i.i.d.) data. there have been attempts to address the problems in the federated learning environments. the use of federated versions of the adaptive optimizers, such as adagrad, adam, and yogi is examined in . it was shown that using the adaptive optimizers speeds up the training of the federated deep learning models and increases the performance. federated learning has a wide range of applications such as analysis of the mobile user behavior, learning pedestrian patterns for autonomous vehicles, predicting and detecting health health-related events from wearable devices .federated learning is a promising solution for detecting the fdias on smart grids since it avails distributed training of the machine learning-based detectors. federated learning solves the problem of cooperation between the electricity providers. due to the confidentiality of the electricity power data, different electricity providers may not be willing to share their own data and it prevents collaborative training of a detection model. federated learning enables the training of distributed local models without sharing the sensitive client data by sharing only the weights of the machine learning models. additionally, federated learning provides efficient training of the machine learning models by moving the computational burden from a centralized machine to distributed nodes. federated learning enables the training of distributed local models without sharing the sensitive client data by sharing only the weights of the machine learning models. additionally, federated learning provides efficient training of the machine learning models by moving the computational burden from a centralized machine to distributed nodes. in this paper, we propose a novel hybrid deep neural network architecture consisting of graph neural network layers as well as lstm layers in a federated learning environment for detection and localization of fdias.federated learning is a paradigm that enables joint training of a machine learning model using the distributed data from different client devices. by using a federated learning algorithm, we train a separate local model for each client and exchange only the required parameters between each client and a centralized coordination server. as a deep neural network model, we use stacked lstm and gcn layers in order to efficiently capture both the temporal and spatial patterns in the power data. we denote the graph modeling the power grid by g = (v, e), where v represents the set of vertices (nodes) that corresponds to the set of buses in the power grid, and e stands for the set of edges that identify the power lines in the power grid.in order to benchmark the performance of the proposed method, we implemented and compared the results with the federated transformer, federated lstm, and federated mlp algorithms. the proposed federated graph learning algorithm can be trained on any partition of the graph even with different numbers of buses in each client while enabling the use of the gcn architecture for training in a federated scheme. this is in line with the expected results because the federated graph learning algorithm is able to capture both the temporal and spatial patterns in the data. while the average f1-score, detection rate, and false alarm rate metrics are able to validate the performance of the fdia detection algorithms, we also need to evaluate the performance of the algorithms on each bus in the network. the federated graph learning algorithm outperforms the other algorithms in terms of the mean value and standard deviation of the f1-score because it is able to capture both temporal and spatial patterns in the data. federated transformer and federated lstm algorithms follow the federated graph learning algorithm with almost similar means because they can capture the temporal patterns in the data.this paper proposed a hybrid neural network architecture consisting of lstm and gcn layers for federated learning based fdia detection. the use of gcn layers combined with the fedgraph algorithm allows federated training on any partition of the power networks unlike the existing algorithms in the literature, which can be used only at the node level. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/669.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/669.txt new file mode 100644 index 0000000000000000000000000000000000000000..9f911edbb70709f93d672733d4962e907a65f6fe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/669.txt @@ -0,0 +1 @@ +one of the most pressing challenges for governments and healthcare systems is the rising number of people with dementia. more than 55 million people live with dementia worldwide, and there are nearly 10 million new cases yearly, with 60-70% of all dementias being of alzheimer's disease type (ad) . recently, attention has turned to machine learning (ml) as a tool for improving the predictive ability of clinical models concerning ad and addressing clinical challenges more widely. however, of the hundreds of clinical ml models that appear in scientific publications each year, few have thus far been successfully embedded into existing clinical practice . one of the reasons for this is that most models only provide predictions for disease cases without quantifying the probability of disease occurrence. this limitation restricts clinicians' ability to accurately measure and communicate the probability of disease development over time with the patient. . also, in the context of predicting the progression of ad in particular, many studies that use ml methods employ a classification approach, whereby the outcome to be predicted is either a binomial or multinomial outcome within a specific timeframe . the datasets are often derived from longitudinal studies, whereby clinical marker data is collected from participants over months and years . thus, such data has a temporal element inherent to the methodology employed in the collection process. however, standard classification ml cannot consider the predictive power of time in conjunction with other predictors. furthermore, classification models cannot handle drop-outs which are common in longitudinal studies.with this in mind, a newly emerging field of exploration seeks to build on traditional time-dependent statistical models, such as survival analysis, to develop machine learning models which can predict the time-dependent risk of developing ad and go beyond simple classification. survival analysis is a statistical method that aims to predict the risk of an event's occurrence, such as death or the emergence of a disease, as a function of time. a key aspect of survival analysis is the presence of censored data, indicating that the event of interest has not occurred while the subject was part of the study. the presence of censored data requires the use of specialised techniques. traditionally, the cox proportional hazards model has been the most widely used technique for analysing data containing also censored records. however, the cox model typically works well for small data sets and does not scale well to high dimensions . ml techniques that inherently handle high-dimensional data have been adapted to handle censored data, allowing ml to offer a more flexible alternative for analysing high-dimensional, censored, heterogeneous data . furthermore, the ability to predict not only a binary or multinomial outcome but also the risk of such outcomes occurring at different timepoints provides clinicians and researchers with more information for the benefit of research and patients.this work has several aims. first, it aims to build upon existing work demonstrating the utility of survival-based ml techniques in predicting the risk of deterioration at different time points in ad using the alzheimer's disease neuroimaging initiative (adni) database. secondly, it aims to explore the predictive power of these techniques once the more physically intrusive biomarkers available in the dataset are removed. these predictors, such as abeta, tau and ptau, which are established biomarkers for dementia, are collected via painful lumbar puncture procedures to sample cerebrospinal fluid (csf). recently efforts have been made to investigate alternative biomarkers such as blood metabolites which, in some studies, proved to have comparable predictive power to the established csf-biomarkers .the rest of the paper will be ordered as follows. first, it will review existing literature on survival-based ml as applied to clinical questions in general and ad prediction in particular. next, the problem of interest will be defined. then the proposed methodology will be outlined. before the results are presented, the study design of the dataset will be described, including predictors and diagnostic criteria. a discussion of the implications of these results will then follow.with this in mind, a newly emerging field of exploration seeks to build on traditional time-dependent statistical models, such as survival analysis, to develop machine learning models which can predict the time-dependent risk of developing ad and go beyond simple classification. ml techniques that inherently handle high-dimensional data have been adapted to handle censored data, allowing ml to offer a more flexible alternative for analysing high-dimensional, censored, heterogeneous data. first, it aims to build upon existing work demonstrating the utility of survival-based ml techniques in predicting the risk of deterioration at different time points in ad using the alzheimer's disease neuroimaging initiative (adni) database.systematically compared the performance and stability of ml algorithms and feature selection methods suitable for high-dimensional, heterogeneous, censored clinical data, in the context of cognitive ageing and ad, by predicting the risk of ad over time. another paper that explores the clinical utility of survival modelling within the domain of ad research comes from, which looked at the interaction between socioeconomic features and polygenic hazard scores on the timing of alzheimer's diagnosis using cox proportional hazard survival analysis.on the other hand,found the standard cox regression and two ml models (survival random forest and extreme gradient boosting) had comparable predictive accuracy across three different performance metrics, when applied to the prospective registry for persons with memory symptoms (prompt) dataset.despite the scarcity of survival modelling papers in relation to ad prediction, recent examples have shown promise in attempting to outperform the classic cox proportional hazard model, using survival ml and survival neural networks/ deep learning on clinical datasets. furthermore, the study aims to build models to predict the risk of receiving a worse diagnosis within the data collection period using survival-based ml. this paper explored three algorithms: cox proportional hazard model (cox ph) -the cox model is expressed by the hazard function, which is the risk of an event occurring at time as follows:. the best-performing model for the cn group with csf-derived biomarkers included was srf, followed by snn, followed by cox ph model. when considering the c-index, the best-performing model for the mci group, with csf-derived biomarkers included, was srf, followed by cox ph model, followed by snn. firstly, we demonstrated good predictive power for srf with very good discrimination and excellent calibration, which was superior to both the standard cox ph model and the snn model. good discrimination and calibration are essential in survival ml models to obtain accurate risk estimations at specific time periods of interest, which is not possible with traditional classification ml models. however, we should note thatdid not provide a comparison between the survival neural network models used and either a standard cox ph model or any other survival ml algorithm.snn had worse stability than the srf and cox ph models, as measured by the standard deviations of the c-index and calibration scores for these models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/67.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/67.txt new file mode 100644 index 0000000000000000000000000000000000000000..af056a67830838c5fa4ba3999120d8e8181d11c5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/67.txt @@ -0,0 +1 @@ +machine learning has led to a significant improvement in state-of-the-art performance in multiple domains, including image recognition, natural language processing and predictive analytics. however, as machine learning systems become more powerful, they also become more complex and opaque, making it harder for humans to understand how they work and what motivates their predictions. this lack of interpretability hampers wider adoption of machine learning systems, as there is a natural suspicion of predictions that lack an understandable rationale. as a result, there has been a surge in research activity directed at explaining ai models and interpreting their output .much of the explainable ai (xai) literature has focused on quantifying the significance of features for particular predictions or models. this is understandable, as features are the driving force behind model predictions. popular feature-importance measures include shap and lime . however, it is worthwhile reminding ourselves that the data matrix is two-dimensional, comprising of both features and instances. we present a novel class of instance-based explanations that complements existing featurebased explanations in the context of linear and tree-based regression models.we show that any linear or tree-based regression prediction is a linear combination of the target instances in the training data. importantly, this also applies to widely used treebased ensemble methods, such as random forests and gradient boosting machines. the weights employed in these linear combination representations are referred to as axil weights (additive explanations with instance loadings). since axil weights are additive across instances it can offer both local and global explanations of model predictions. to the best of our knowledge, our representation of linear and tree-based regression predictions as linear combinations of training data targets is novel.axil weights are a natural way to express a regression model prediction in terms of training data targets. specifically, for some regression model prediction ŷi and training data instances y train the axil weights k i satisfies the following linear representation:by way of analogy, axil weights allow for a model prediction to be expressed as a linear combination of instances in the same way that an ols model prediction is expressed as a linear combination of features. for the axil weights to be meaningful, we require that it is derived directly by reference to the internal model mechanism. 1 predictions from both linear regression models and tree regression models are linear combinations of training data targets. in the context of a dataset (x, y) the ols prediction ŷi for instance i can be written as(see section 8.1 for a proof outline). for tree predictions we can write ŷi = k t ree iwe show that any linear or tree-based regression prediction is a linear combination of the target instances in the training data. to the best of our knowledge, our representation of linear and tree-based regression predictions as linear combinations of training data targets is novel. specifically, for some regression model prediction ŷi and training data instances y train the axil weights k i satisfies the following linear representation:.by way of analogy, axil weights allow for a model prediction to be expressed as a linear combination of instances in the same way that an ols model prediction is expressed as a linear combination of features. 1 predictions from both linear regression models and tree regression models are linear combinations of training data targets.2as a result, gbm predictions are essentially compositions of tree residual functions, which makes it challenging to express the final gbm prediction as a linear combination of the original training data targets. by contrast, the aim of our work is to aid in the interpretability of regression predictions by representing each prediction as a linear combination of training data instances via axil weights.calculating axil weights involve two related algorithms; one for fitting to the training data, and one for calculating axil weights given some data, which may be either the original training data or new (test) data or indeed some combination of both.by lemma 1 (tree predictions are linear ) ŷi is a linear combination of y and q i is a linear combination of y by(5). (tree n is linear) the regression tree prediction t n is a linear combination of y for n ∈ n + . then t 1 is a regression tree prediction trained on y (by definition 1) and by lemma 1 (tree predictions are linear ) t 1 is a linear combination of y as required., t k-1 are linear combinations of y then t k is also a linear combination of y. since t 1 , ... \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/670.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/670.txt new file mode 100644 index 0000000000000000000000000000000000000000..46ef1d2078ff6173ac460d9ceebb315c81bedea4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/670.txt @@ -0,0 +1 @@ +the broad application of reinforcement learning (rl) faces a significant challenge, namely, the design of appropriate reward functions that align with specific mission objectives in given environments. to mitigate this challenge, preference-based rl (pbrl) (see, for example, (christiano et al., 2017)) has emerged as a promising paradigm, leveraging human feedback to eliminate the need for manual reward function design. however, real-world missions often entail multiple objectives and the consideration of preferences among diverse users, necessitating a balanced approach. existing pbrl methods primarily focus on maximizing a single performance metric, neglecting the crucial aspect of equity or fairness, e.g., (stiennon et al., 2020;wu et al., 2021;lee et al., 2021). consequently, the lack of fairness considerations poses a barrier to the widespread deployment of pbrl for systems affecting multiple end-users when it is 1 unmanned systems lab, department of electrical and computer engineering, the university of texas at san antonio, san antonio, tx, 78249, usa. correspondence to: umer siddique .the many facets of preference learning workshop at the international conference on machine learning (icml), honolulu, hawaii, usa, 2023. copyright 2023 by the author(s). critical to address fairness among these users.to address this critical gap, the development of methods enabling fairness in pbrl becomes imperative. while recent advancements have explored fairness in rl, albeit not within the pbrl framework, notable contributions in, e.g., (weng, 2019;siddique et al., 2020;fan et al., 2022), have employed welfare functions to ensure fairness in the singleagent rl setting. furthermore, the work in (zimmer et al., 2021) considered fairness in a multi-agent rl setting. this paper proposes an approach that builds upon existing studies on fairness, focusing on a pbrl setting. in particular, rather than relying on known ground truth rewards, our method involves learning fair policies by incorporating fairness directly into the pbrl paradigm, thereby eliminating the need for hand-crafted reward functions. by doing so, we aim to address fairness in pbrl without compromising on its advantages. owing to the unavailability of the reward function, many pbrl algorithms learn an estimated reward function model, r(•, •) : s × a → r.2, we extend previous rl formulations by redefining the estimated reward function as a vector function, denoted as r : s × a → r k , where k denotes the number of objectives., 2017).where π θ represents a policy parameterized by θ, ϕ w denotes a welfare function with fixed weights that requires optimization, and j (π θ ) represents the vectorial objective function that yields the utilities (i.note that optimizing the welfare function defined in (3) is an effective way to address fairness because the weights w are selected such that a higher weight will be assigned for objectives with lower utility values, which will ensure that all objectives are treated fairly than the cases when the weights are assigned without considering the utility values.our procedure to optimize the welfare function is an iterative process that integrates the policy update step and reward update step (via the collection of more preferences for reward function estimation). as a state-of-the-art policy gradient method, we adopt the proximal policy optimization (ppo) algorithm(schulman et al.it is important to note that the key distinction between our proposed approach and pbrl in(christiano et al.in this domain, our primary objective is to assess the effectiveness of our proposed method in optimizing the welfare function, denoted as ϕ w . to evaluate this, we conduct a comparative analysis of welfare scores between three approaches: ppo, pbrl, and our proposed fpbrl method within this domain. our results reveal that fpbrl achieves the highest welfare score, thereby demonstrating its ability to identify fairer solutions compared to ppo and the standard pbrl method. encouragingly, our proposed method achieved the highest welfare score, signifying a fairer solution when compared to both ppo and the standard pbrl method. notably, only fpbrl successfully maximizes the minimum objective utility, whereas ppo and the pbrl method yield the lowest minimum objective values, reflecting a prioritization of maximizing cumulative rewards at the expense of fairness considerations. this noteworthy result underscores the efficacy of fpbrl in optimizing the welfare function, which is cru-cial for ensuring fair and equitable treatment of the diverse objectives at hand. our findings underscore the effectiveness of our proposed method, fpbrl, in optimizing the welfare function and achieving fairness in the presence of multiple objectives. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/671.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/671.txt new file mode 100644 index 0000000000000000000000000000000000000000..db91a5fb6f26194571404105c75402a10ccf26d9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/671.txt @@ -0,0 +1 @@ +figure 1(a) depicts a scenario where the evolution of bit error rate (ber) over time distinguishes between normal behavior and the presence of anomalies, while fig. 1(b) provides an overview of the proposed anomaly detection scheme, which consists of two phases; that is, soft-failure evolution modeling, followed by real-time anomaly detection. first, an ed-lstm model is trained/tested over a dataset that contains sequences of ber information monitored at the coherent receivers of an optical node collected using software-defined networking (sdn) controllers. specifically, the ed-lstm learns a nonlinear function f (.), that, given the past and present observations, where s is the future time step, k is the number of past and present observations, and t is the current time instant. after model training and testing, the ed-lstm model can be used on-line over unseen (new) x ′ sequences to: (i) predict the time that a soft-failure is expected to turn into a hard-failure , and (ii) perform detection of unexpected anomalies. note that on-line anomaly detection also serves to "clean" (i.e., pre-process) new datasets, that are subsequently used for updating (i.e., re-training) the ed-lstm model in the presence of non-stationary ber evolution. for anomaly detection, that this work focuses on, a hypothesis testing scheme is proposed that combines statistical metrics with the on-line predictions of the ed-lstm model to identify in real-time deviations in the monitored ber data. specifically, a real-time anomaly detection scheme is proposed, that is specifically designed to detect unexpected signal deviations (i.specifically, a statistical hypothesis testing scheme is proposed that is based on leveraging information regarding the predictable types of soft-failures. in fact, it is shown that when the latter information is leveraged in the statistical hypothesis testing scheme, anomaly detection accuracy is increased, compared to the case where this information is not considered; an indicator of the importance of considering expected (i.the major limitation of existing sl-and ul-based anomaly detection schemes is, however, that they do not consider how predictable soft-failures evolve over time, leading to the detection of premature (i.this work proposes a statistical hypothesis testing scheme that leverages predictable soft-failure evolution over a long future horizon, to effectively detect unpredictable types of soft-failures.1(b) provides an overview of the proposed anomaly detection scheme, which consists of two phases; that is, soft-failure evolution modeling, followed by real-time anomaly detection. after model training and testing, the ed-lstm model can be used on-line over unseen (new) x ′ sequences to: (i) predict the time that a soft-failure is expected to turn into a hard-failure, and (ii) perform detection of unexpected anomalies. for anomaly detection, that this work focuses on, a hypothesis testing scheme is proposed that combines statistical metrics with the on-line predictions of the ed-lstm model to identify in real-time deviations in the monitored ber data.given a sequence of ber samples b = , ultimately forming a ber distribution over a specific time interval, the proposed approach aims to determine if a ber observation b τ deviates from its historical and future (i.for modeling predictable soft-failure evolution, the ed-lstm architecture, ed-lstm hyperparameters, physical layer model (plm) for elastic optical networks, and dataset analytically described inare utilized., lightpath(6,4)traversing links (6, 5) and (5, 4)), with the edfas spaced 100 km apart.to further assess the effectiveness of the proposed anomaly detection model, additional metrics are examined, namely the precision (p ), recall (r), and f -measure as given below:.3, the precision, recall, and f -measure metrics are evaluated for various values of θ τ using both the benchmark and proposed anomaly detection schemes.an anomaly detection scheme is proposed that is based on statistical hypothesis testing and the capabilities of an ed-lstm model on accurately modeling predictable soft-failure evolution.4% improvement) an anomaly detection scheme that does not consider the expected evolution of soft-failures; an indicator of the importance of considering this information in anomaly detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/672.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/672.txt new file mode 100644 index 0000000000000000000000000000000000000000..86a7225274b3fb70fcd4a4fc5673521663fa5830 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/672.txt @@ -0,0 +1 @@ +cluster analysis or clustering is defined as a method wherein a given set of data objects are grouped into distinctly different sets or groups. each such set contains objects which are similar to other objects in the same set; consequently objects in different sets are dissimilar to one another.it is worth mentioning that, from the same data set, different clusterings may be obtained by different clustering methods.clustering is generally performed by clustering algorithms using computers on large data sets; essentially it is not possible to perform this manually.one of the primary motivations for clustering is discovering previously unknown groups inside a data set . objects within a cluster are "similar" to one another; wherein similarity is calculated or derived in terms of "closeness" i.e. how close two objects are in space. this is done by using specific distance function.attributes of a cluster such asits diameter (the maximum distance between any two objects in the cluster) represents the "quality" of a cluster.applications of cluster analysis is done in a variety of diverse fields such as knowledge discovery in web searches, business intelligence, image pattern recognition, intrusion detection, intrusion prevention, genomics, speech processing, and fraud detection, to name a few.cluster analysis or clustering is defined as a method wherein a given set of data objects are grouped into distinctly different sets or groups.attributes of a cluster such asits diameter (the maximum distance between any two objects in the cluster) represents the "quality" of a cluster.the algorithm calculates the growing cluster density mean and then the cluster density variance for any core object, which is supposed to be expanded further by considering density of its e-neighborhood with respect to cluster density mean. if cluster density variance for a core object is less than or equal to a threshold value and is also satisfying the cluster similarity index, then it will allow the core object for expansion.  it incrementally augments an initial cluster by its neighbouring points as long as the nearest neighbour distance of the resulting cluster fits the expected distance distribution. a set of candidates of a cluster is constructed using region queries which are supported by spatial access methods (sam). a necessary condition for m is n × p(nndist c (p) >m)<1 when inserting a new point p into cluster c, a circle query with center p and radius m is performed and the resulting points are considered as new candidates. points already assigned to some cluster may switch to another cluster later.chi-square test is used to verify the hypothesis that the nearest neighbour distance set of the augmented cluster still fits the expected distance distribution.if the object is not marked as noise or it is not in a cluster and the difference between the average value of the cluster and new value is smaller than δe, it is placed into the current cluster.if two clusters c1 and c2 are very close to each other, a point p may belong to both c1 and c2. clustering of temporal-spatial data can be performed based on non-spatial, spatial and temporal attributes  it can detect noise points even when it is of varied density by assigning density factor to each cluster.to overcome the difficulty in using one set of global parameters in clustering analysis, a cluster analysis method called optics was proposed. objects in a denser cluster are listed closer to each other in the cluster ordering.if the local density value is very small, then the objects of clusters are discarded as noise in this method, objects under consideration are added to a cluster through density attractors using a step wise hill-climbing procedure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/673.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/673.txt new file mode 100644 index 0000000000000000000000000000000000000000..d5bc2a300e9843b27596ad543798af6587fe864a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/673.txt @@ -0,0 +1 @@ +continual learning (cl) aims to develop models and training procedures capable of learning continuously through a stream of data (delange et al., 2021). as opposed to the well-studied static setting of feeding the model with independent and identically distributed (iid) data, in cl, each experience has its distribution with a possible drift among tasks.given this distribution drift, one of the main challenges of cl is catastrophic forgetting (mccloskey & cohen, 1989). the latter refers to the process by which a model forgets to solve previously learned tasks when new experiences come in. in this context, replay-based methods provide a powerful and straightforward tool to counter catastrophic forgetting by storing and revisiting a subset of samples from previously learned tasks. these methods have achieved state-of-the-art results in a wide array of continual learning scenarios and benchmarks (chaudhry et al., 2019;buzzega et al., 2020).despite successful results, previous works have argued that memory-based methods are prone to overfitting (lopez-paz & ranzato, 2017;verwimp et al., 2021). by only storing a subset of previous distributions, the model only reinforces concepts and ideas that are present in the buffer, depending on how much previous distributions are represented. to reinforce useful concepts, the buffer should accurately represent the whole training distribution. however, if the buffer represents only a small percentage of the training distribution, it will start learning spurious correlations and will lose its generalization capabilities.we argue that compositionality is a critical factor for cl. however, spurious correlations in the data can lead the model to learn incorrect compositions of specific concepts, thus impairing generalization. in this paper, we show that, even if a model can learn to identify useful concepts to make a proper classification, the classifier will learn shortcuts that hurt out-of-distribution generalization (ood); shortcuts that help increase performance in the iid dataset and are amplified by memory-based methods. however, as a result, they increase the generalization gap between iid and ood examples.in this paper, we develop a controlled setting that tests outof-distribution generalization beyond the training distribution. we evaluate a basic cnn model on a set of examples that depart from the training distribution by including unseen combinations of latent and target variables. and we show that replay falters in this setting, giving further evidence that replay-based methods have a toll on generalization capabilities not seen on traditional machine learning benchmarks that test only the iid test set. here we propose an approach to test how ood and spurious correlations affect memory-based methods and hope our results influence future studies to focus on improving the performance of cl in ood data. in this paper, we show that, even if a model can learn to identify useful concepts to make a proper classification, the classifier will learn shortcuts that hurt out-of-distribution generalization (ood); shortcuts that help increase performance in the iid dataset and are amplified by memory-based methods.the common practice in cl dictates that, for each experience, the distribution of training and test sets follows a similar distribution, and changes in the distributions take place with new experiences., 2008).in cl, we assume that each experience t of the sequence follows a distribution p t (y i |x i , z i ), where y i is the label, x i is the input, and z i are a group of characteristics presented in the input. in a classification task, the objective is to minimize the function l t (f θ (x t i ), y t i ) for every experience of the sequence, meaning that the model f must learn parameters θ that find relevant characteristics from z i that generalize to solve the current and future task using only information from the input. however, as it has been shown in previous studies(geirhos et al., 2020;ming et al. ideally, a model should be able to correctly identify relevant features z g and irrelevant features z b . in this paper, we will assume that a model with proper ood generalization properties can correctly identify those relevant features and combine them to solve the task. in contrast, a model relying on spurious correlations is one that can correctly encode relevant features but incorrectly extracts this information or uses irrelevant features for solving the task. we identify one of these characteristics as the label y, a group of relevant features to solve the task z g , and the rest as irrelevant features. we expect that even with missing combinations of y, z g from the training set, a robust model that is able to identify y and z g independently can extrapolate its knowledge correctly to solve the task despite the absent combinations.., ẑ|zt| }, where z t1 ∩ z t2 = ∅, ∀t 1 ̸ = t 2 . we show that the learned representations have the necessary information to solve the main task, since training the probe with iid and ood data it obtains good performance in both iid and ood test sets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/674.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/674.txt new file mode 100644 index 0000000000000000000000000000000000000000..cbc38a7c82908dc95a76233ca8415945248abd21 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/674.txt @@ -0,0 +1 @@ +climate change is one of the biggest challenges facing humanity, with the risk of dramatic consequences if certain limits of warming are exceeded . to mitigate climate change, the energy system must be decarbonized. a difficulty in decarbonization is that renewable energy supply fluctuates depending on the weather. however, supply and demand must be balanced in the grid at every moment to prevent outages . in addition, with the ongoing decentralization of the renewable energy supply and the installation of large consumers, such as electric vehicle chargers and heat pumps, low-voltage grids are expected to reach their limits . thus, to balance the grid and to avoid congestions, advanced operation and control mechanisms must be installed in the smart grid of the future . this requires accurate forecasts on various aggregation levels, up to fine-grained low-voltage level load forecasts . such fine-grained load forecasts can be used for demand-side management, energy management systems, distribution grid state estimation, grid management, storage optimization, peer-to-peer trading, peak shaving, smart electrical vehicle charging, dispatchable feeders, provision of feedback to customers, anomaly detection and intervention evaluation . moreover, the aggregation of fine-grained load forecasts can result in a more accurate forecast of the aggregated load .with the smart meter rollout, fine-grained electrical load data will become available for an increasing number of clients. in such a scenario where load time series from multiple clients are available, different model training strategies are possible. the goal of our work is to compare training strategies for the transformer , which was recently used for load forecasting .we address the following multiple load time series forecasting problem: at a time step t, given the history of the electrical load of c clients x c 0 , .this section first presents related work on long time series forecasting and load forecasting with transformers. most of the load forecasting literature uses local models, but few works use global models, which are presented next. as transformer are often used for long time series forecasting with up to one month horizon, various extensions to the transformer architecture exist that aim to reduce the time and space complexity. each input vector consists of one (in the case of local and global models) or c (in the case of multivariate models) load values, and nine additional time and calendar features. finally, a linear layer transforms the h decoder output vectors into a forecast with h × 1 (for local and global models) or h × c (for multivariate models) values.comparison with the state of the art: the global transformer achieves a better result for short-term forecasting on the electricity dataset than related work, and achieves close results to the best results from patchtstfor longer horizons and on the ausgrid solar home dataset. however, to establish a state of the art for shortterm and medium-term load forecasting, a comparison to other forecasting models must be undertaken, including models that are not based on the transformer architecture and that are more sophisticated than our baselines. using weather data could improve the forecasts, because some electrical load patterns, such as the usage of electrical heating, linear models: as in related work, we observe that linear models are strong baselines. no general answer can be given on whether the local linear regression models are better or the global ltsf-linear is better, because each variant is better on one dataset.transfer learning: according to the definition of transfer learning in, the global training strategy can be seen as a transfer learning method, because the model must transfer knowledge between different types of buildings with different consumption patterns. init was shown that the transformer generalizes better to new time series than other approaches, but the forecasts are still better when training data from the target time series is available.the transformer model and the different training strategies are not designed for load forecasting in particular, but can also be applied to other forecasting tasks. we show that the multivariate training strategy used in related work on forecasting with transformersis not optimal, and it is better to use a global model instead. in particular, our approach gives better results than the linear models fromfor one day to four days forecasting horizons, which shows that, with the right training strategy, transformers are effective for load forecasting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/675.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/675.txt new file mode 100644 index 0000000000000000000000000000000000000000..b89f29fad410d732692b21c4e8495b77a4ad2a15 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/675.txt @@ -0,0 +1 @@ +tabular data is one of the most widely used media for storing information. table representation learning has a wide range of downstream applications such as table question answering, table search, table type detection, etc. it is thus vital to design an effective and practical solution for table representation learning. however, despite its popularity and importance in modern data science, table representation learning is not well addressed, compared to images, texts, or other media.most of the previous attempts apply recent progress in natural language processing (nlp), i.e., transformers and large language models (lms). these works directly serialize the entire table together with a query or related utterance into a sequence as the input to an lm, which is pretrained on a sufficient amount of table corpus. however, as the most common and best practices for table representation learning, this approach suffers from scalability and efficiency issues.first, serializing a large table containing a large number of rows will result in a long sequence which is hard to process by classical transformer-based models, because the complexity of such models is quadratic to the length of the input sequence. to solve this problem, some works optimize the transformer structure , while others use table specific solutions to reduce the complexity of attention computation, such as restricting attention computation to the same row or column or only between the schema and values . however, these approaches do not eliminate the scalability issue, because a pretrained lm is subject to a max sequence length constraint. for example, gpt-3 limits the input length to 2048 tokens, while bert sets this limit as 512. a table with a small number of rows can easily exceeds it, causing inevitable truncation and thus loss of information.second, the serialization process takes the given query as input, leading to query-specific encoding. because in real-world scenarios many queries concentrate on a few tables, repeatedly computing the table representation for every new incoming query is inefficient.to address the above problems, we proposed rotar which learns a query-agnostic row-based table representation. rather than serialize the whole table, rotar takes each row as input and efficiently produces row level encodings which can be re-used by any queries. it then uses query-specific aggregation to produce the table representation on top of these row encodings. table representation learning has a wide range of downstream applications such as table question answering, table search, table type detection, etc. these works directly serialize the entire table together with a query or related utterance into a sequence as the input to an lm, which is pretrained on a sufficient amount of table corpus.first, serializing a large table containing a large number of rows will result in a long sequence which is hard to process by classical transformer-based models, because the complexity of such models is quadratic to the length of the input sequence. to solve this problem, some works optimize the transformer structure, while others use table specific solutions to reduce the complexity of attention computation, such as restricting attention computation to the same row or columnor only between the schema and values. rather than serialize the whole table, rotar takes each row as input and efficiently produces row level encodings which can be re-used by any queries. it then uses query-specific aggregation to produce the table representation on top of these row encodings. we observe that although the information in different rows should be aggregated to form the representation of the whole table, there is no strong correlation among different rows. therefore, table representation can be factorized into an aggregation of independent row representations.inspired by this observation, rotar uses a weight-shared row-based transformer model to encode each row in the table independently and ignores the inter-row correlation in this encoding (fig. for instance, if a query q is encoded as a vector v q of the same dimension with the row representations, setting ϕ(c i , q) = v i ⊙ v q (⊙ stands for point-wise multiplication) and ρ(x) = 1 |x| x∈x x yields the table representation as the average row vector projected onto the query vector v q . for example, we can directly use a general-purpose pretrained lm like bertor roberta, or pre-existing table representation models like tapasif we view each row as a table consisting of a single row. while the learned row-based representation is query-agnostic, a query-specific aggregation module is introduced to produce query-specific table representation. however, with the help of the previous table representation methods during training time, the rotar model is able to improve its performance while still being efficient during inference time.in the table verification task, a query q is a statement to be evaluated based on a table t .we propose rotar which uses a shared row-encoder to generate query-agnostic row representations and learns instance optimized aggregation function to produce query-specific table representation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/676.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/676.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf9505840e1e030f6409ea0c69e6efc86429f8e6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/676.txt @@ -0,0 +1 @@ +in recent years, many applications have benefited from the fast development and high quality results of deep learning methods. most of these methods focus on real-valued pipelines for applications with real-valued signals, such as natural images or encodings of natural language processing. there is however a great amount of applications that naturally deal with complex-valued signals, such as mri images or remote sensing and the fourier transform of real-valued signals or images and it has been shown that fully complex-valued architectures often (but not always ) deliver superior performance when dealing with complex-valued signals. the complex numbers come with an intrinsic algebraic structure that can not be captured by the simple isomorphism of c ∼ r 2 , especially because there is no natural way to define multiplication in r 2 , which, however, is an important part of many deep learning building blocks. has provided a lot of those building blocks, such as complex-valued convolution, batch normalization and initialization. these building blocks are of great help for a large amount of current architectures, especially in image and signal processing. in many fields, architectures building on the idea of attention mechanisms have successfully been applied. especially the immense success of the transformer architecture has shown that attention based architectures can be superior and have since become standard in many applications. we seek to provide a solid generalization of the building blocks of the transformer architecture in the complex domain and show experimental evidence that it improves robustness to overfitting while maintaining on-par performance when compared to the real-valued transformer architecture.our key contributions are: 1) newly developed building blocks consisting of: a. derivation of a complex-valued attention mechanism, generalizing the scaled dot-product attention ; b. introduction of complex-valued layer normalization. 2) adaptation of building blocks from existing complex-valued neural networks for the transformer architecture. 3) demonstration of improved robustness to overfitting while maintaining on-par results compared to the real-valued model.the combination of the first two contributions provide the foundation for a mathematically rigorous complex-valued transformer architecture. the source code for the full architecture and all experiments is available as a pytorch module1 . there is however a great amount of applications that naturally deal with complex-valued signals, such as mri imagesor remote sensingand the fourier transform of real-valued signalsor imagesand it has been shown that fully complex-valued architectures often (but not always) deliver superior performance when dealing with complex-valued signals. we seek to provide a solid generalization of the building blocks of the transformer architecture in the complex domain and show experimental evidence that it improves robustness to overfitting while maintaining on-par performance when compared to the real-valued transformer architecture. derivation of a complex-valued attention mechanism, generalizing the scaled dot-product attention; b. 2) adaptation of building blocks from existing complex-valued neural networks for the transformer architecture. recently, an increasing number of works in complex-valued neural networks have been published, driven by the interest in applications, which naturally deal with complex-valued signals: remote sensing, mri processingand frequency analysis through fourier transform. complex-valued building blocks have been used to develop a multitude of architectures, such as complex-valued generative adversarial networks, complexvalued convolutional recurrent networksand a complex-valued u-net. these use different kinds of convolutional architectures but are not complex-valued versions of the scaled dot-product attention.this core concept of scaled dot-product attention is extended to the more general concept of multi-head attention (mha) by applying learnable linear projections w q , w k , w v to the inputs and project it back with another learnable linear projection w o :. using any of these formulations of the complex-valued scaled dot-product attention the adoption of multi-head attention as described in subsection 3. we can replace the learnable linear projections w q , w k , w v and w o by complexvalued linear projectionsand can then use the formulations as described inand equation 3. a complex-valued version has been proposed for batch normalization, however layer normalization is preferable for methods like lstm or rnnsas well as the transformer architecture. we compare the introduced methods for a complex-valued attention module as described in equations 8-11 using the proposed dot-product as well as the version using q(k t ). additionally, all complex-valued architectures show improved robustness to overfitting (figure3), with no or minor decreases after longer training time, while the real transformer shows massive overfitting starting after 10 epochs. that includes newly developed formulations of complexvalued attention mechanisms as well as a complex-valued layer normalization. additionally, the complex-valued fourier transform of signals can now directly be used in the transformer architecture without using the isomorphism c → r 2 that results in a loss in robustness against overfitting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/677.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/677.txt new file mode 100644 index 0000000000000000000000000000000000000000..462f6fbe88236fc0c1bf71d24c93232fc04bb982 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/677.txt @@ -0,0 +1 @@ +generalized linear models (glms) and single-index models (sims) constitute fundamental frameworks in supervised learning , capturing and generalizing basic models such as linear and logistic regression. in the glm framework, labeled examples (x, y) are assumed to satisfy e = u -1 (w • x), where u is a known monotone function (called the link function) and w is an unknown vector. single-index models (sims) are defined similarly, but for the case when the monotone link function u is also unknown.in the realizable setting where the labels are indeed generated according to a glm with a given lipschitz link function, the glmtron algorithm of is a simple and efficient learning algorithm. when the ground truth is only assumed to be a sim (and, hence, the link function is unknown), it can be learned efficiently by the isotron algorithm .in this work, we consider the significantly more challenging agnostic setting, where the labels are arbitrary and not necessarily realizable. moreover, we do not assume that we know the optimal activation; our goal is to output a predictor that has error comparable to that of the optimal sim, whatever its activation may be. that is, we must be competitive against not only all possible weights but also all possible monotone and lipschitz link functions that might fit the distribution. concretely, consider a distribution d over r d × and denote the squared error of a function h : r d → r by err 2 (h) = e (x,y)∼d . let opt(sim) denote optimal value of err 2 (h) over all sims h with bounded weights and arbitrary 1-lipschitz monotone activations (we call the inverse u -1 of a link function u the activation function). given access to samples from d, our goal is to compute a predictor p : r d → with error err 2 (p) comparable to the error of the optimal sim: err 2 (p) ≤ opt(sim) + ǫ.our main result is the first efficient learning algorithm for this problem.theorem 1.1 (informal, see theorem 3.1). let sim b denote the class of sims of the form x → u -1 (w • x) for some 1-lipschitz function u -1 and w 2 ≤ b. let d be any distribution over r d × whose marginal on r d has bounded second moments. there is an efficient algorithm that agnostically learns sim b over d up to errorthis result provides a guarantee comparable to that of the isotron algorithm but for the challenging agnostic setting rather than the realizable setting (where opt(sim b , d) = 0). moreover, isotron's guarantees require the distribution to be supported on the unit ball, whereas we only require a mild second moment condition.minimizing the squared error in the agnostic setting is a standard benchmark in learning theory, but it is often useful to simplify the problem by considering an alternative error function, tailored to the specific problem at hand. in this sense, we can still find the glm that is closest to the ground truth even in the agnostic setting; the key is to define closeness using an appropriate bregman divergence, which depends on the link function through fenchel-legendre convex duality.the problem of finding the closest model in bregman divergence amounts to a convex program where we minimize a certain loss called the matching loss . in fact, recent work by studying the notion of omniprediction has demonstrated that there exist efficient algorithms that minimize all of the matching losses (corresponding to monotone and bounded links) simultaneously. their solution concept is called an omnipredictor, i.e., a single predictor that is able to compete with the best-fitting classifier in a class c as measured by a large class of losses (as opposed to just a single pre-specified loss, as is standard in machine learning). they obtain such predictors through calibrated multiaccuracy or multicalibration . their results apply to the non-realizable setting and do not assume prior knowledge of the link function, but only provide guarantees for (simultaneous) matching loss minimization, rather than the standard squared error minimization.we propose a simple analytic approach to transforming matching loss guarantees over the class of linear functions to squared error guarantees over the class of glms with link function that corresponds to the matching loss at hand. our generic transformation, coupled with the omniprediction results from , yields our main result on agnostically learning sims. we thus obtain a best of all worlds statement: we do not need to know the link function, but we can always compete with the best sim model in terms of the squared loss. at the heart of our approach are distortion inequalities relating matching losses to ℓ p losses that we believe may be of independent interest.we first prove strong versions of such inequalities for matching losses arising from bi-lipschitz link functions, and obtain our results for general lipschitz activations by carefully approximating them using bi-lipschitz activations. in particular, if we let opt(glm u -1 ,b ) denote the optimal value of err 2 (h) over all glms of the form x → u -1 (w • x), where w 2 ≤ b, we obtain the following result about bi-lipschitz activations (including, for example, the leaky relu activation).theorem 1.2 (informal, see theorem 2.1). let u : r → r be a bi-lipschitz invertible link function. then, any predictor p : r d → r that is an ǫ-approximate minimizer of the population matching loss that corresponds to u, with respect to a distributionthis guarantee holds under milder distributional assumptions than are required by comparable prior work on agnostically learning glms . moreover, when we focus on distortion bounds between the logistic loss and the squared loss, we obtain a near-optimal guarantee of o(opt(glm u -1 ,b )) for logistic regression, when u is the logit link function (i.e., when glm u -1 ,b is the class of sigmoid neurons).theorem 1.3 (informal, see theorem 4.1). let u(t) = ln( t 1-t ). then, any predictor p : r d → r that is an ǫ-approximate minimizer of the population logistic loss, with respect to a distribution d over r d × whose marginal on r d has subgaussian tails in every direction satisfieswhile our error guarantee is weaker the one of , we do not make the anti-concentration assumptions their results require. a natural question is to ask if our guarantees are near-optimal, e.g., whether we can obtain a guarantee of the form err 2 (p) ≤ opt(sim) + ǫ. however, there is strong evidence that such results cannot be obtained using efficient algorithms . adapting a result due to , we show in section 5 that one cannot avoid a dependence on the norm bound b in our main result, theorem 1.1. concretely, consider a distribution d over r d × and denote the squared error of a function h : r d → r by err 2 (h) = e (x,y)∼d . given access to samples from d, our goal is to compute a predictor p : r d → with error err 2 (p) comparable to the error of the optimal sim: err 2 (p) ≤ opt(sim) + ǫ.we propose a simple analytic approach to transforming matching loss guarantees over the class of linear functions to squared error guarantees over the class of glms with link function that corresponds to the matching loss at hand. in particular, if we let opt(glm u -1 ,b ) denote the optimal value of err 2 (h) over all glms of the form x → u -1 (w • x), where w 2 ≤ b, we obtain the following result about bi-lipschitz activations (including, for example, the leaky relu activation). then, any predictor p : r d → r that is an ǫ-approximate minimizer of the population matching loss that corresponds to u, with respect to a distribution. then, any predictor p : r d → r that is an ǫ-approximate minimizer of the population logistic loss, with respect to a distribution d over r d × whose marginal on r d has subgaussian tails in every direction satisfies.for a concept class c : r d → r, we define opt(c, d) to be the minimum squared error achievable by a concept c : r d → r in c with respect to the distribution d. for a non-decreasing and lipschitz activation g ′ : r → r, the matching loss ℓ g : y × r → r is defined pointwise as follows:.we also define the bregman divergence associated with f to be d f (q, r) = f (q) -f (r) -(qr)f ′ (r), for any q, r ∈ ran(g ′ ). in particular, for a given matching loss l g , we transform guarantees on l g that are competitive with the optimum linear minimizer of l g to guarantees on the squared error that are competitive with the optimum glm whose activation (g ′ ) depends on the matching loss at hand.2 appropriately to bound the error of a predictor p by its matching loss l g (f ′ • p) and bound the matching loss of the linear function corresponding to w * by the squared error of g ′ (w * • x), where g ′ (w * • x) is the element of glm g ′ ,b with minimum squared error. for any fenchel-legendre pair (f, g) such that g ′ : r → r is l-lipschitz, and f ′ satisfies some mild boundedness conditions (see definition c. we use appropriate pointwise distortion bounds for the matching loss corresponding to the sigmoid activation and provide guarantees of o(opt) for logistic regression with respect to both squared and absolute error, under appropriate assumptions about the concentration of the marginal distribution.for the logistic model, the link function f ′ is defined as f ′ (r) = ln( r 1-r ), for r ∈ (0, 1) and the corresponding activation g ′ is the sigmoid g ′ (t) = 1 1+e -t for t ∈ r. we observe that therefore, in total, we have that err 2 (p) ≤ 8e r opt g + 8ce b 2 e r e -(r/b) 2 + 2ǫ and we may obtain theorem 4. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/678.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/678.txt new file mode 100644 index 0000000000000000000000000000000000000000..c7505f520b41801e588ac1c4f2cea43ca58218ff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/678.txt @@ -0,0 +1 @@ +due to the proliferation of mobile, iot, sensors, and edge devices in our daily life activities which produce tremendous amounts of data, ai/ml-based analytics, which can process these larger volumes of data, became an integral part of user products and applications as well as the key revenue source for most organizations , . these data-driven analytics rely on data which is produced on edge devices for training the ai/ml models , . traditionally, the data were collected and stored in the cloud where the analytics and intelligence are, however, recently edge ai paradigm was coined to move the analytics and intelligence where the data is. this is also driven by the recent security and privacy concerns over collecting or storing private user data .within the edge ai paradigm, several methods have evolved to achieve the learning task on the decentralized data such as: 1) federated learning (fl) : which runs training locally on the devices and the server aggregates the model updates and coordinates the training rounds; 2) split learning (sl) which splits the training task between the device and the cloud server (or edge server); 3) decentralized learning (dl) : which leverages peer-to-peer coordination for the model exchange among the edge devices. these approaches are made possible by harnessing the advances in the ai/ml accelerators embedded in edge devices and the high-throughput and low-latency 5g/6g technologies . however, several challenges manifest themselves impacting the efficiency of these methods and making them ill-suited for decentralized edge ai at scale. the highly heterogeneous devices, configurations, and environment and the strict synchronization requirements are among the key challenges. this not only results in models with low qualities and long training times but also hinders the existing approaches from scaling with a large number of learners , , , . it is evident that low-quality models can be costly to many businesses and organizations. for instance, recently the realestate zillow group wrote down $304m in inventory due to the low accuracy of their zestimate ai algorithm) .it is clear that it modern data-driven intelligence needs systems that can produce accurate and timely models , . in this work, we shed light on the key challenges in existing decentralized methods and try to propose a novel architectural design for scalable decentralized learning. to this end, we aim to leverage the edge-to-cloud continuum and transform the learning task into a collaborative knowledge transfer system which facilities for the learning parties to share (or trade) the trained models based on mutual benefits or needs. in this view, the trained ai/ml models are treated as a commodity that can be exchanged between learning entities to meet global or personal objectives. this view is also similar to the majority of online delivery services such as uber (passengers) or deliveroo (food). for example, uber's task is merely connecting the passengers wanting to be transported from point a to point b with the appropriate drivers. similarly, in this work, the proposed idea facilitates the exchange and delivery of the trained models among the learning entities. in particular, we seize an opportunity to decouple the training task of a common model from the sharing task of the trained models. this would facilitate the exchange of models among learning parties and significantly improve ai/ml-based analytics of data generated by various connected devices and sensors that are now an integral part of our daily-life applications.due to the proliferation of mobile, iot, sensors, and edge devices in our daily life activities which produce tremendous amounts of data, ai/ml-based analytics, which can process these larger volumes of data, became an integral part of user products and applications as well as the key revenue source for most organizations,.within the edge ai paradigm, several methods have evolved to achieve the learning task on the decentralized data such as: 1) federated learning (fl): which runs training locally on the devices and the server aggregates the model updates and coordinates the training rounds; 2) split learning (sl)which splits the training task between the device and the cloud server (or edge server); 3) decentralized learning (dl): which leverages peer-to-peer coordination for the model exchange among the edge devices. this would facilitate the exchange of models among learning parties and significantly improve ai/ml-based analytics of data generated by various connected devices and sensors that are now an integral part of our daily-life applications. nowadays, it is widely accepted that data is processed locally on edge devices and models will be trained via collaborative approaches on decentralized data. moreover, practical deployment aspects like heterogeneity, communication costs, coordination, synchronization and availability to participate in training are all factors that either cause hinder the scalability, slow down the training process and/or affect the overall model quality (generalization performance),,.the key question that forms the basis of this work: what would be the best architectural design for collaborative learning over decentralized data? we position this work as a call for architectural designs that simultaneously can: 1) efficiently decentralize training over learning parties; 2) securely store a representation of the models in vaults; 3) develop a scalable model discovery and exchange methods. the system is client-driven where learners create an initial model with their local datasets and then asynchronously seek models in the network to boost the model quality via means of distillation. the learning parties whenever they require improvements on their model, they send a request for a trained model to the discovery service specifying certain qualities of the requested model (e. ii) bh: a behaviour-heterogeneity case where the devices have variable availability patterns based on real-world trace; iii) dh: a device-heterogeneity case where the devices follow and behaviour heterogeneity; iv) h: a mixed-heterogeneity case with both device and behaviour heterogeneity as figure3shows, all the heterogeneity cases (bh, dh, and h) cases significantly impact the average test accuracy of the global model compared to the homogeneous case (u).the experiments involve a small group of 10 independent parties individually training the model on their local data (ind) for a variable number of epochs and another large group consisting of the remaining clients training a global model via federated learning over 50 global rounds (fl).5, we observe that training for the model locally for 100 epochs does improve the model for the individual parties (ind) and the fl model (fl) shows very low accuracy when tested on the ten parties. we note then that if the individual parties (ind) request the fl model and distil their models with the fl model then the result is a better model in accuracy by more than 5 accuracy points (i.6that in the rnn-reddit case training the model locally on the devices of the individual parties (ind) achieves comparable accuracy to the mode trained by the fl group (fl). in fl, training a global model is assigned to a sub-population of decentralized devices such as mobile or iot devices. these devices possess private data and engage in training the model on their data,. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/679.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/679.txt new file mode 100644 index 0000000000000000000000000000000000000000..a05744ad297f5d8865ab0050247c732b3263824e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/679.txt @@ -0,0 +1 @@ +the main theorem states that given a hypothesis class h and an unknown distribution d over feature space x × {0, 1}, if we have an unbiased sample s of size m and a biased sample s β of size m β , with the marginal probability of a positive example in group i of s β being p i β i , then running algorithm 1 with appropriate sample sizes ensures that, with high probability, the algorithm outputs a hypothesis h that has a low error on the distribution d.the proof of the main theorem consists of three parts:part a: this part focuses on normalizing the weighted empirical loss on the biased sample. the goal is to bound the difference between the sum of weights in the biased sample and the reciprocal of the expected weight. by achieving this normalization, we can account for the bias introduced by the weights and ensure that the algorithm's performance is not adversely affected. this normalization step is crucial for the subsequent analysis of the proof. see lemmas a.2 and a.3 in the appendix for details.part b: part b of the proof is concerned with estimating the inverse of the product of the bias parameters. it involves first obtaining reliable estimates of the probabilities of positive examples in each group of the unbiased data and biased data, respectively, which can then be used to estimate 1 β and, finally,the key lemma established in this part specifies the sample size requirements to achieve reliable probability estimation, which is crucial for accurately evaluating the algorithm's performance. it shows that we can estimate the probabilities of positive examples in each group with reasonable accuracy by having only a small number of unbiased samples in addition to our larger set of biased samples. see lemmas b.1 -b.4 for details.part c: part c combines the results from parts a and b to establish the sample complexity guarantee for algorithm 1. it utilizes the bounds obtained from the lemmas established in parts a and b, which cover both the intersectional case where the groups in the biased sample have overlapping instances and the computationally simpler case where they are disjoint. by combining these results, the proof demonstrates that running algorithm 1 with appropriate sample sizes outputs a hypothesis h with a low error on the true distribution d with high probability. see lemmas c.1 -c.2 for details. be the estimated bias for each group i. however, introducing bias parameters for each group allows our model to explicitly incorporate a way to identify social power into analyzing intersectional biases when supplemented with a more robust normative theory. we provide strong theoretical guarantees that show we can learn the bias parameters accurately with a very small sample of unbiased data, so this gives a proof of concept that a small amount of unbiased data can take us from "impossible" to "nearly optimal" at least in our intersectionality bias model. since the bias parameter β i represents the probability that a positive sample from group g i is retained in s β , the probability of a positive outcome within group g i in s β is the product of the original base positive rate p i and the bias parameter β i :.by incorporating the bias parameters, we emphasize the intersectionality of biases, accounting for the collective impact of multiple group memberships on the biases in the dataset. to ensure fairness, we apply the intersectional bias learning algorithm to obtain a hypothesis h that minimizes the risk of the learned model while considering the biases.the main theorem states that given a hypothesis class h and an unknown distribution d over feature space x × {0, 1}, if we have an unbiased sample s of size m and a biased sample s β of size m β , with the marginal probability of a positive example in group i of s β being p i β i , then running algorithm 1 with appropriate sample sizes ensures that, with high probability, the algorithm outputs a hypothesis h that has a low error on the distribution d.1: the risk difference between the learned hypothesis h on the biased sample s β and the risk of h on the true distribution d is bounded with probability 1 -δ. the model also has the strength that once the bias parameters are estimated, they can be used on future draws of biased data to find a hypothesis approximately minimizing prediction risk with respect to the true distribution with high probability.1 philosophical framework for epistemic intersectionality epistemic intersectionality: we introduce a philosophical framework we coin epistemic intersectionality that incorporates and analyzes intersectional biases in machine learning models while considering the epistemic dimension of intersectionality. empirical verification: epistemic intersectionality recognizes the importance of empirical verification in understanding the existence and extent of intersectional biases. comprehensive analysis of intersectionality: epistemic intersectionality promotes a comprehensive analysis of intersectionality by considering the complex interactions between social identities and systems of power. role in academic research: epistemic intersectionality contributes to academic research by offering a rigorous and evidence-based approach to understanding the impact of intersectional biases in machine learning models.by considering the bias parameters when analyzing the biased dataset, our model ensures that data points are included based on the collective impact of multiple group memberships, which may be explicitly tied to social power given one's moral and political theory. b represents the difference between the normalized risk on the biased sample reweighted with β and the normalized risk on the biased sample reweighted with β.by proving this lemma, we establish a bound on the difference between the reweighted risk of the learned hypothesis on the biased sample and the risk on the true distribution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/68.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/68.txt new file mode 100644 index 0000000000000000000000000000000000000000..683b9ae70d1b336beef5f90ec2cadf01b45c25ce --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/68.txt @@ -0,0 +1 @@ +it is common knowledge, in the machine learning domain, to use differential values, since they provide a simple way to model the data. however, such algorithms may not fit the lightmorphic signature properly, leading to a reduced quality of the obtained results. training a neural network to predict the lightmorphic signature can significantly increase the data quality. this is the task that lsat tries to accomplish.as such we define the lightmorphic metric learning (lml) as a branch of machine learning algorithms, set out with the purpose of learning lightmorphic signatures from multiple datasets trough usage of vibrating graph chords.in the pursuing sections we describe the main features of the toolkit, explain the general mathematical concepts and finally detail the plans regarding future functionalities.since the primary light source considered is the earth's sun, specific spacetime metrics (ex.according to special relativity, spacetime is seen as a four dimensional manifold described by a flat minkowski metric defined in cartesian coordinates (t, x, y, z, c = 1) as:.when considering the geometry of curved space, we have made use of the metric g µν , that replaces the flat minkowski metric η µν . this substitution was done considering that the geometry of curved space will eventually reduce to the flat spacetime of special relativity at a sufficiently small scale.where g µν is defined as the einstein curvature tensor, t µν is the stress-energy tensor and represents the mass-energy distribution, while k describes the einstein constant of gravitation defined as:.since there is not one general solution for the complex einstein equations, but a large variety of possible solutions that apply to particular circumstances, we've considered a weakfield approximation, where the nonlinear einstein equations where approximated towards linearity.for example, a very small perturbation specific to a gravitational wave, will impact the flat spacetime and it is defined as h µν (x) and it's value will be |h µν | << 1.where the constant amplitudes (h + , h × ) represent the two gravitational wave polarizations, the plus-and cross-polarization.we represent the distance between two neighboring points as defined by(berit (2013)) for a flat spacetime, trough the following expression:.that allows us to model in the tt gauge, the gravitational wave stretching along the x axis and compression along the y axis with the specific factor of: 1 ± h + (t) ≃ 1 + 1 2 h + (t) having modeled the photon's traveling path in outer space, in order to simplify the inherent path inhomogeneities, we separated the domains into outer space domain, atmospheric domain and earth specific domains (lithosphere, hydrosphere, biosphere, noises, etc).summing the light phase shift δ φatm and the δ φ earth which is derived from the noise sources like seismic or electromagnetic interferences, leads to the dataset of trajectory specific lightmorphic signatures:.while observing the distribution of multiple light segments within the dataset φ γ idt , it will be possible to estimate the probability for trajectory specific lightmorphic evolution:.where p φ k is the database's k-th segment specific probability, ρ k is the prediction weight for the k-th segment.the distribution matrices specific to the isochronous segmentation surfaces, which define the lightmorphic signature model, form the lsat core. as example, one may activate additional functionalities that consider input parameters like complex space weather forecasting, different electromagnetic wave disturbances or lithosphere, hydrosphere and biosphere specific localized data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/680.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/680.txt new file mode 100644 index 0000000000000000000000000000000000000000..24c293fe507914696932269ec1ef7ed79bde8ce2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/680.txt @@ -0,0 +1 @@ +improvements in reinforcement learning (rl), specifically deep reinforcement learning (drl) have contributed to new state-of-the-art performances in many fields from robotics , autonomous vehicles and games . these improvements have also led to a broad application to fields previously dominated by heuristic approaches. online portfolio selection (olps) has seen a large increase in the popularity of drl methods, leading to seemingly conclusive and positive results. olps is a dynamic management of financial assets, where we have the opportunity to redistribute funds across assets regularly, a sequential decision-making problem. this allows the managing algorithm to adapt to market changes with the aim of outperforming index performance.however, evaluations of existing works often rely on limited metrics and protocols, which may not be suitable for portfolio management. many works cannot be reproduced due to data unavailability or lack of experimental information. olps is unlike many other rl problems due to its high uncertainty this work benefited from the support of the chair "artificial intelligence applied to credit card fraud detection and trading" led by centralesupelec and sponsored by lusis. this work was performed using hpc resources from the "mésocentre" computing center of centralesupélec, école normale supérieure paris-saclay and université paris-saclay supported by cnrs and région île-de-france (https://mesocentre.universite-paris-scalay.fr/). and non-stationary nature. the financial market, used as the rl agent's environment, has a disproportionate impact on the proposed reward functions and lack of predictability. furthermore, rl algorithms are very sensitive to hyperparameter selection and initialization, which require additional evaluations and consideration. many published results often contain single-initialization results, which may misrepresent the capabilities of an approach and yield poorer performance if deployed. furthermore, traditional metrics only indicate the performance of an algorithm during its conception phase, close to its training data. we have found that agents tend to overfit, picking the same assets regardless of market variations, which reflect favorably in traditional metrics. however, when the market evolves, the static behavior degrades. evaluating the robustness of algorithms and their capacity to adapt to uncertainty and out-of-distribution data gives greater insight into their training quality and generalization capabilities.the aim of this work is to provide a standardized comparison process to assess portfolio management algorithms. the process provides reproducible results on the performance of the management agents. reinforcement learning is a complex task with many components sensitive to tuning and design choices. the proposed training and evaluation setup measures the robustness and generalization capabilities of our benchmarked algorithms. we rely on public data that are freely available and open-source implementations of drl algorithms to obtain transparent comparisons of popular approaches to olps. we focus on evaluating the training quality of our agents and their robustness to never-seen market conditions. to the best of our knowledge, there is no large comparison of approaches in this domain, with multi-dimensional evaluation of components' contribution to performance. furthermore, rl algorithms are very sensitive to hyperparameter selection and initialization, which require additional evaluations and consideration. many published results often contain single-initialization results, which may misrepresent the capabilities of an approach and yield poorer performance if deployed.all works evaluate their algorithms with classic financial metrics, such as net returns, risk metrics, including sharpe and sortino ratios or maximum drawdown. while these do well to evaluate portfolio management, we have found that overfitting agents predicting the same allocation weights regardless of selected assets and market changes had good performance measures in validation but would quickly become obsolete if deployed. combined with the difficulties of training drl agents, the same algorithm implementation with the same data may yield completely different results if initialized with different seeds. using only market data, composed of open, high, low, and adjusted closing prices, and volume, we select four market representations.in this section, we present market data acquisition and processing, algorithm details and training, and performance measures. we accounted for the range of historical data required for some market representations, such as sliding windows, to avoid leaking validation data into training. the first is the portfolio returns, defined as p r = value t -value t-n , with n the comparison period.m dd = bottom value -peak value peak value it measures the maximum value loss an agent incurred during the period, where investors would generally discard algorithms with m m d > 20%.where var, the value at risk, is the average 5%, a popular value, worse rate of returns, and x is a given rate lower than this threshold. we compare their performance during training and backtesting and the performance trend over sequential periods of the backtest. the first use-case is an ir quotient, comparing the management performance in the validation set to the performance during the backtest:. the names are abbreviated versions of previous approaches, where "default" are daily prices, "lagged" are sparse windows, "net" are daily returns and "value" are value changes over an episode. we can conclude that an approach based on daily values, which aims to maximize episodic rate of returns using ddpg would generally yield the best results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/681.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/681.txt new file mode 100644 index 0000000000000000000000000000000000000000..2b5410aa660b44bfc5686b3fecb173f4ebab84e8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/681.txt @@ -0,0 +1 @@ +recent developments in machine learning have spiked the interest in robustness, leading to several results in adversarial machine learning . a central goal in this area is the design of algorithms that are able to impair the performance of traditional learning methods by adversarially perturbing the input . adversarial attacks can be exploratory, such as evasion attacks, or causative, poisoning the training data to affect the performance of a machine learning algorithm or attack the algorithm itself. backdoor poisoning is a type of causative adversarial attack, in which the attacker has access to the whole or a portion of the training data that they can perturb. cleanlabel poisoning attacks are a type of backdoor poisoning attack that perturb only the features of the training data leaving the labels untouched, so as to make the poison less detectable. in the other end of the spectrum are label poisoning attacks that perturb or flip the training data labels.why compute provably nearly-optimal poison attacks? a limitation with current poisoning methods is that it is not possible to adversarially perturb an input so that the performance of any algorithm is negatively affected. moreover, it is generally not clear how to provably compare different poisoning methods. we seek to address these limitations of adversarial machine learning research using tools from computational geometry.specifically, we study the following optimization problem: given some data set, x, compute a small perturbation of x, so that the performance of a specific classifier deteriorates as much as possible. an efficient solution to this optimal poisoning problem can be used to compare the performance of different classification algorithms, as follows. suppose we want to compare the performance of a collection of classification algorithms, a 1 , ..., a t , on some fixed data set x, in the presence of a poisoning attack that produces a bounded perturbation, x ′ , of x. ideally, we would like to have provable worst-case guarantees on the robustness of a 1 , ..., a t . however, such results are often hard to prove rigorously, and thus many existing methods lack such guarantees. since the poisoned data set x ′ is unknown, we cannot simply run a 1 , ..., a t on x ′ and compare the results. instead, our method allows us to compute from x some poisoned data set, x ′′ , which is provably a nearly-optimal poison against the specific classification task. the classifier works as follows: given a set of labeled points, x ⊂ r d , and some unlabeled p ∈ r d , we can compute a label for p by taking the most frequently occurring label in the multiset of labels of the k nearest neighbors of p in x. on input x ⊂ r d , with |x| = n, and m ∈ n, algorithm poison-k-nn computes a m-poison against k-nn, with expected corruption opt m (x) -εn, in time n • 2 2 o(d+k/ε) , where opt m (x) denotes the maximum corruption of any m-poison. , where opt m (x train , x test ) denotes the maximum corruption incurred on x test when all neighbors are chosen from x train , of any m-poison on x train . diam m (x ′ ) = sup x,y∈x ′ ρ(x, y); we also write diam(x ′ ) when m is clear from the context. then, for each x ∈ x, we assign x to the cluster p ⌈α+log c (r(x))⌉ (x).for any x ∈ x, i ∈ n, let e x,i be the event that the cluster of p containing x, does not contain the i-nearest neighbors of x in x; i. let also e x be the event that the cluster of p containing x, does not contain all of the k-nearest neighbors of x in x; i., where opt m (x train , x test ) denotes the maximum corruption incurred on x test when all neighbors are chosen from x train , of any m-poison on x train . it follows that the interior of ball(p, γ(p)) contains at most k points in x train (it contains at most k -1 points in x train if p / ∈ x train ). let p be the random partition of x train obtained by applying lemma 2, setting γ : x → r ≥0 where r = bγ, with b = 2kβ m /ε, and c = 2 4k/ε .• the γ i (p) function is only defined with respect to the points within x train • the random partition in step 1 is only on x train • the corruption in step 2 is measured only on x test for the test points that fall within the same cluster.for any finite y ⊂ r d , and any integer i ≥ 0, let opt i (x train , x test ) be the maximum corruption that can be achieved for x train , x test with a poison set size of at most i.algorithm poison-k-nn for k-nn poisoning with train-test: the input consists of x train and x test ⊂ r d , with |x train | = n train , |x test | = n test and a map label : x → {1, 2} that maps x train and x test to their corresponding labels.for any x ∈ x test , i ∈ n, let e x,i be the event that the cluster of p containing x, does not contain the i th -nearest neighbor of x in x train ; i. let also e x be the event that the cluster of p containing x, does not contain all of the k-nearest neighbors of x in x train ; i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/682.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/682.txt new file mode 100644 index 0000000000000000000000000000000000000000..98e6b92f3df013f996ba677b07cffaac76c2a62e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/682.txt @@ -0,0 +1 @@ +the gibbs algorithm (ga) randomly selects a model by sampling the gibbs probability measure, which is the unique solution to the empirical risk minimization (erm) problem with relative entropy regularization (erm-rer) . the input of the ga is twofold. it requires a number of labeled patterns (datasets); and a prior on the set of models in the form of a σ-measure, e.g., the lebesgue measure, the counting measure, or a probability measure. one of the main features of the ga is that it does not require an assumption on the statistical properties of the datasets - . nonetheless, the generalization capabilities of the gibbs algorithm are often characterized by the generalization error, for which statistical assumptions on the datasets must be considered, e.g., training, and unseen datasets are identically distributed. when the prior on the set of models is a probability measure, a closedform expression for the generalization error is presented in , while upper bounds have been derived in - , and references therein.in a more general setting, when the prior on the set of models is a σ-measure, the generalization capabilities of the ga have been studied in , , and , using the sensitivity of the empirical risk to deviations from the gibbs probability measure to another probability measure. this method does not require any statistical assumptions on the datasets and is chosen as the workhorse of the present analysis.this work is supported by the inria exploratory action -information and decision making (aex idem) and in part by a grant from the c3.ai digital transformation institute.the main motivation of this work is to break away from the implicit assumption in existing literature that all training datasets are drawn from the same probability measure and thus, can be aggregated to improve the generalization capabilities of a given ga. in practical settings, training data might be acquired from multiple sources that might be subject to different impairments during data acquisition, data storage and data transmission. for instance, consider a ga trained upon a particular dataset and assume that a new dataset from a different source is made available. hence, the following questions arise concerning the generalization capabilities of such a ga: would such a ga generalize over the new dataset? should the new dataset be aggregated to the previous dataset to build a new ga in the aim of improving generalization? how does the ga trained upon the existing dataset compare in terms of generalization with respect to a new ga trained upon the new dataset? the answers to such questions are far from trivial. one of the main challenges to answer such questions stems from the fact that the probability measures generating each of those datasets are unknown and potentially different due to a variety of impairments.this paper introduces a closed-form expression for the difference of the expected empirical risk on a given dataset induced by a ga trained upon this dataset and the one induced by an alternative algorithm (another probability measure). this quantity was coined sensitivity of the ga algorithm in and is shown to be central to tackling the questions above. this is in part due to the fact that it allows studying the generalization capabilities of gas based on actual datasets, which disengages from the assumption that both training and unseen data follow the same probability distribution. more specifically, by studying the sensitivity, closed-form expressions for the difference between training error and test error can be obtained. these expressions lead to a clearer understanding of the roles of the size of datasets chosen for training and testing, as well as the parameters of the gas. as a byproduct, the difference between the expected empirical risk on the aggregation of two datasets induced by two gas trained upon the constituent datasets is characterized. similarly, the difference between the expected empirical risk on one of the constituent datasets induced by two gas trained upon the aggregated dataset and the constituent dataset is also characterized. these explicit expressions allow comparing two gas trained upon different datasets, which is relevant under learning paradigms such as federated learning .in a more general setting, when the prior on the set of models is a σ-measure, the generalization capabilities of the ga have been studied in,, and, using the sensitivity of the empirical risk to deviations from the gibbs probability measure to another probability measure. hence, the following questions arise concerning the generalization capabilities of such a ga: would such a ga generalize over the new dataset? should the new dataset be aggregated to the previous dataset to build a new ga in the aim of improving generalization? how does the ga trained upon the existing dataset compare in terms of generalization with respect to a new ga trained upon the new dataset? the answers to such questions are far from trivial.this paper introduces a closed-form expression for the difference of the expected empirical risk on a given dataset induced by a ga trained upon this dataset and the one induced by an alternative algorithm (another probability measure). similarly, the difference between the expected empirical risk on one of the constituent datasets induced by two gas trained upon the aggregated dataset and the constituent dataset is also characterized.lemma 1: given a dataset z ∈ (x × y) n and two probability measures p 1 and p 2 over the measurable space (m, b(m)), for all α ∈ , the function r z in(7)satisfies.theorem 1: given a σ-finite measure q over the measurable space (m, b(m)) and a probability measure p ∈ △ q (m, b(m)), it holds that for all datasets z ∈ (x × y) n and for all λ ∈ k q,z , with k q,z in(12),.proof: the proof uses the fact that, under the assumption in(10), the probability measure p (q,λ) θ|z=z in (13) is mutually absolutely continuous with respect to the σ-finite measure q; see for instance.datasets z 1 and z 2 are referred to as constituent datasets, whereas, the dataset z 0 is referred to as the aggregated dataset. for all i ∈ {0, 1, 2}, the empirical risk function in (4) and the expected empirical risk function in(7)over dataset z i are denoted by l zi and r zi , respectively.moreover, the expected empirical risk functions r z 0 , r z 1 , and r z 2 , defined in(7), satisfy for all σ-finite measures p ∈ △(m, b(m)),.the expected empirical risk induced by ga i on the training dataset z i is the training expected empirical risk, which is denoted by. alternatively, the expected empirical risk induced by ga i on the test dataset z j is the test expected empirical risk, which is denoted by r z j ä p (qi,λi) θ|z=zi ä and often referred to as the test error. proof: the proof is immediate from theorem 1 by noticing that for all i ∈ {1, 2} and for all j ∈ {1, 2} \ {i}, the differences r zi a reasonable figure of merit to compare two machine learning algorithms trained upon two different training datasets is the difference between the expected empirical risk they induce upon the aggregation of their training datasets. in this case, a figure of merit to validate such an approach is to study the difference of the expected empirical risk induced on the aggregated dataset by ga 0 and a convex combination of ga 1 and ga 2 . hence, a validation method for ga 0 is based on the expected empirical risk induced by ga 0 on a constituent dataset z i , with i ∈ {1, 2}, which is denoted by. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/683.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/683.txt new file mode 100644 index 0000000000000000000000000000000000000000..41cb3233081c6f58097528db4f12442d9f87b7fa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/683.txt @@ -0,0 +1 @@ +in many classification datasets, some features are predictive of the label but are not causally related. it is often said that these features are spuriously correlated with the label, as their correlation might not hold for data collected in another environment. for example, suppose we collect typical images of cows and camels and form a binary classification task. in that case, we will find that the background is correlated with the label, as cows are often photographed in barns or green pastures, while camels are often photographed in deserts (beery et al., 2018). however, this correlation will be spurious as the background information is not causally related to the label, and we can easily make another dataset of cows and camels in which this correlation does not hold.it is well-established that neural networks are susceptible to spurious correlations (torralba & efros, 2011;ribeiro et al., 2016;gururangan et al., 2018;zech et al., 2018;mc-coy et al., 2019;geirhos et al., 2019;2020;xiao et al., 2021). in such cases, neural networks learn representations that capture spurious features and make predictions that employ them. many approaches have been proposed for learning representations that do not capture spurious features (muandet et al., 2013;sun & saenko, 2016;ganin et al., 2016;wang et al., 2019b;a;li et al., 2018;arjovsky et al., 2019;zhao et al., 2020;lu et al., 2022). some methods are tailored against specific spurious correlations (e.g., texture); some require specifying a categorical spurious feature, while others require data collected from multiple labeled environments. nevertheless, to our best knowledge, none of such representation learning methods consistently outperform standard empirical risk minimization (gulrajani & lopez-paz, 2021;koh et al., 2021). this is partly because spurious features are often easier to learn and get learned early in training (shah et al., 2020;nam et al., 2020;hermann & lampinen, 2020;pezeshki et al., 2021).besides the unsatisfactory results, the approach mentioned above also goes against one of the main techniques of deep learning -using pretrained representations instead of learning from scratch. recently, a few works indicated a large potential in fixing pretrained representations and focusing on training a linear classifier on top of it that does not rely on spurious correlations. in particular, galstyan et al. (2022) find that a significant contribution to the out-ofdomain generalization error comes from the classification head and call for designing better methods of training the classification head. menon et al. (2021) propose to retrain the classification head on training data with down-sampled majority groups. kirichenko et al. (2023); izmailov et al. (2022);and shi et al. (2023) find that after training on data with spurious correlations, keeping the representations fixed and retraining the classification head on small unbiased data gives state-of-the-art results. when no information about spurious features is available, mehta et al. (2022) show that one can still get good results by using embed-dings from a large pretrained vision model. interestingly, representations learned by a vision transformer (dosovitskiy et al., 2021) seem to lead to more robust classification heads (ghosal et al., 2022). overall, these findings indicate that more research is needed to understand better how spurious features are represented and design better methods of training classification heads on representations that capture spurious features.we consider the waterbirds dataset (sagawa* et al., 2020), which is landbird vs waterbird image classification task where the background is spuriously correlated with the label. namely, most landbird images have land in their background, while most waterbird images have water in their background. we consider fixed pretrained representations learned through supervised or self-supervised learning. we investigate whether one can remove the spurious features from the representations in two settings. in the former (and more prevalent setting), one has access to the value of the binary spurious feature. in the latter, we also have access to per-example image masks indicating which parts of images correspond the spurious feature.interestingly, even with full knowledge of the spurious feature, it is not straightforward to remove it. while we find that representations are axis-aligned to a certain degree, the extent of alignment is not enough to remove spurious features by removing individual representation coordinates. since both the spurious feature and the label can be predicted well from the representation with a linear layer, we hypothesize that the entanglement of core and spurious features is linear and can be reversed with a linear transformation. for this we propose a linear autoencoder to split the representation into three parts corresponding to the class label, the spurious feature, and other features not related to the former two but required for reconstruction. importantly, in contrast to existing approaches, we do not enforce independence of the first and second parts on a biased training set. instead, we enforce independence on an upsampled variant of the training set.we find that a linear classifier trained on the core features of the encoding performs better than the standard approach but does not reach the performance of a classifier trained on an unbiased set. we demonstrate that this gap can be closed by performing additional feature selection within the core features. (2023);izmailov et al. since both the spurious feature and the label can be predicted well from the representation with a linear layer, we hypothesize that the entanglement of core and spurious features is linear and can be reversed with a linear transformation.we design a simple autoencoder where the linear encoder maps input z to three vectors: z y , z c and z n . we force z y and z c to contain information about the label and the background, respectively. we do this by adding another linear layer on top of z y and z c that predict ŷ = w y z y and ĉ = w c z c which are supervised by the corresponding signals. the linear decoder takes the concatenation of z y , z c and z n and reconstructs ẑ which should be close to the original z. following(jaiswal et al. this also justifies the minimization of the mutual information between z y and z c , as they are correlated in the original training distribution. the sum of the dimensions of the three vectors z y , z c and z n matches d for each backbone. the linear model trained on top of the concatenation of the three vectors is denoted by gwae(z, g), where gwae denotes the linear encoder of the group-wise trained autoencoder, and g refers to the group information required for the training. for all backbones we see that z y gathers core features and the linear models trained on them have significantly better worst group accuracy. in case of resnet-50 and dinov2, the results are even better than the ones by captum, which means that the autoencoder managed to isolate core features much better than it was possible by simply removing neurons in the original z.furthermore, we apply captum on top of the z y neurons to see whether we can still identify and remove spurious features in z y (the orange plot in figure1). we find that, indeed, training linear models on the n principal components of z y still improves the worst group accuracy for resnet and regnety. we note that applying pca directly on z does not help to identify core features: the principal components of the original space usually contain spurious features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/684.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/684.txt new file mode 100644 index 0000000000000000000000000000000000000000..59c0746873f5446140cf1179d5179ef747dd110a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/684.txt @@ -0,0 +1 @@ +stochastic optimization in the zeroth order (gradient-free) setting has attracted significant attention in recent decades. it naturally arises in various applications such as autonomous driving , ai gaming , robotics , healthcare , and education . this setting is particularly important when the gradient of the objective function cannot be directly evaluated or is expensive.an important case of interest in the zeroth order setting is the bandit optimization of smooth and strongly-convex functions. although there are ample results regarding the minimax rates of this problem , little is known about how its complexity depends on the geometry of the objective function f near the global optimum x * , as specified by the quadratic approximation• is there an algorithm that universally achieves the optimal instance-dependent bounds for all quadratic functions, but without the knowledge of hessian?as our main contributions, we fully addressed the above questions as follows. first, we established the tight hessian-dependent upper and lower bounds of the simple regret. our bounds indicate asymptotic sample complexity bounds of tr 2 (a -1 2 )/ (2ǫ) to achieve ǫ accuracy. this covers the minimax lower bound of ω(d 2 /ǫ) in . second, we prove the existence of a hessian-independent algorithm with a matching upper bound of o tr 2 (a -12 )/ǫ . thus, we complete the theory of zeroth-order bandit optimization on quadratic functions.related works beyond linear or convex bandit optimization. compared to their linear/convex counterparts, much less is known for finding optimal bandit under nonlinear/non-convex reward function. a natural next step beyond linear bandits is to look at quadratic reward functions, as studied in this paper and some prior work (see reference on bandit pca , its rank-1 special cases , bilinear , low-rank linear and some other settings ).when getting beyond quadratic losses, prior work on non-convex settings mainly focuses on finding achieving ǫ-stationary points instead of ǫ-optimal reward , except for certain specific settings (see, e.g., ).intuitively, our proof is to show that an allocation of at least r k ≥ ω(λ -2 k x -2 k ) energy is required to correctly estimate each x k with θ(1) probability. note that for any entry that is incorrectly estimated, a penalty of ω(λ k x 2 k ) is applied to the simple regret.specifically, the mmse estimator is given by x k,t |x k | tanh l k 2 , and we have the following lower bound for each cost entry., e k * being the first k * vectors. due to the convexity of set b, we have f ( x) ≥ f (x t ) with probability 1 (see appendix a for a proof). formally, for any fixed algorithm a, we can consider its expected simple regret e over an extended class of objective functions where f is defined by equation (1) but x 0 is chosen from the entire euclidian space.where the first term on the rhs above is obtained by taking the supremum over all objective functions that satisfies x 0 ∈ b, then the second term is obtained from the adversarial choice over all estimators for x 0 / ∈ b.for k ← 1 to t do let y + , y -be a sample of f at x 0 , x 1 , respectively compute the projection of the difference y + -y -to the interval ≤ m + |e|, we have the following inequality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/685.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/685.txt new file mode 100644 index 0000000000000000000000000000000000000000..dd18de78454543888a8bf110b4c2618072991302 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/685.txt @@ -0,0 +1 @@ +machine learning has exhibited impressive achievements in diverse fields, including healthcare . the complexity of these models, however, creates challenges for their adoption in healthcare . to address this issue, explainable ai (xai) has been introduced, enabling machine learning models to provide explanations for their predictions. model explainability is essential for gaining a deeper understanding of a model's decision-making process . in critical domains such as sepsis detection in the icu, where incorrect predictions can result in fatal consequences, the reliability of these models is of utmost significance. this paper aims to tackle a specific aspect of the interpretability challenge associated with these models, specifically the identification and explanation of scenarios in which black box predictive models fail or exhibit unexpected performance.examining instances in which machine learning models exhibit deviations from their usual performance holds significant importance. these insights empower decision-makers to exercise caution in deploying models in situations where their predictions are prone to errors, thereby mitigating potential adverse consequences. previous research endeavors have primarily centered on assessing the overall performance of these models through the adoption of evaluation metrics and methodologies aimed at gauging their reliability . w. duivesteijn et al. present an evaluation method that assesses the performance of a classifier, highlighting subspaces where the classifier excels or struggles in classification tasks, however, the method's applicability is limited to binary datasets and lacks model agnosticism. l. torgo et al. propose approaches that aim to offer interpretable descriptions of expected performance; however, the proposed visualization may not be well-suited when dealing with a high number of features. this paper provides an analysis by focusing on the identification of specific regions where the models exhibit significant deviations from their usual performance. the identification of these regions empowers healthcare practitioners to make informed decisions by exercising caution when relying on the model. additionally, these findings offer valuable insights that can guide the development of potential strategies aimed at improving and refining the model's overall performance .to achieve this, we propose an analytical approach that combines visual techniques to identify regions in the input space where the models' performance significantly diverges from their average performance. this visualization empowers users to grasp how various values of a particular predictor impact the models' performance. this paper aims to tackle a specific aspect of the interpretability challenge associated with these models, specifically the identification and explanation of scenarios in which black box predictive models fail or exhibit unexpected performance.examining instances in which machine learning models exhibit deviations from their usual performance holds significant importance. previous research endeavors have primarily centered on assessing the overall performance of these models through the adoption of evaluation metrics and methodologies aimed at gauging their reliability.present an evaluation method that assesses the performance of a classifier, highlighting subspaces where the classifier excels or struggles in classification tasks, however, the method's applicability is limited to binary datasets and lacks model agnosticism. this paper provides an analysis by focusing on the identification of specific regions where the models exhibit significant deviations from their usual performance. additionally, these findings offer valuable insights that can guide the development of potential strategies aimed at improving and refining the model's overall performance.to achieve this, we propose an analytical approach that combines visual techniques to identify regions in the input space where the models' performance significantly diverges from their average performance.to gain a deeper understanding of the model's performance and identify areas where it exhibits suboptimal results, we applied the proposed method.by identifying and examining these recurring features, we revealed specific regions where the classifier exhibited poor performance.in this study, we utilized lime to identify regions where a black-box model exhibits poor performance. by analyzing the model's fit to the training data, we gain insights into its performance and identify areas where it inadequately represents the underlying patterns in the feature space.when evaluating the model's generalization error on test data, we pinpoint specific regions within the feature space that contribute to erroneous predictions for unseen data.our study contributes to the understanding of machine learning models' performance by introducing a modified visualization approach that identifies regions of poor performance. by leveraging lime for the rule extraction method, we effectively pinpointed specific features responsible for misclassifications, allowing us to identify recurrent conditions associated with the classifier's suboptimal performance. in light of the study's insights, our future work aims to enhance the model's performance by making specific modifications to the model architecture, feature engineering, and training strategies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/686.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/686.txt new file mode 100644 index 0000000000000000000000000000000000000000..58cfe547cb211ea4f830404c600fd0a8fa97bf37 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/686.txt @@ -0,0 +1 @@ +intelligent agents ought to be able to complete complex, long horizon tasks and generalize to new scenarios. unfortunately, policies learned by modern deep-learning techniques often struggle to acquire either of these abilities. this is particularly true in planning regimes where multiple, complex, steps must be completed correctly in sequence to complete a task. realistic constraints, such as partial observability, the underspecification of goals, or the sparse reward nature of many planning problems make learning even harder. reinforcement learning approaches often struggle to effectively learn policies and require billions of environment interactions to produce effective solutions (wijmans et al. 2019;parisotto et al. 2020). imitation learning is an alternative approach based on learning from expert data, but can still require millions of demonstrations to learn effective planners (chevalier-boisvert et al. 2019). such high data constraints make learning difficult and expensive.unfortunately the aforementioned issues with behavior learning are only exacerbated in the low data regime. first, with limited training data agents are less likely to act perfectly at each environment step, leading to small errors that compound overtime in the offline setting. ultimately, this leads to sub-par performance over long horizons that can usually only be improved by carefully collecting additional expert data (ross, gordon, and bagnell 2011). second, deep-learning based policies are more likely to overfit small training datasets, making them unable to generalize to new test-time scenarios. on the other hand, humans have the remarkable ability to interpolate previous knowledge and solve unseen long-horizon tasks. after observing an environment, we might deduce plan or sequence of the steps to follow to complete our objective. however, imitation learning agents are not required to construct plans by default -they are usually trained to only output action sequences given seen observations. this begs the question: how can we make agents reason better in long-horizon tasks?an attractive solution lies in language instructions, the same medium humans use for mental planning (gleitman and papafragou 2005). several prior works directly provide agents with language instructions to follow (anderson et al. 2018;shridhar et al. 2020;chen et al. 2019). unfortunately, such approaches require the specification of exhaustive instructions at test time for systems to function. a truly intelligent agent ought to be able to devise its own plan and execute it, with only a handful of demonstrations. we propose improving policy learning in the low-data regime by having agents predict planning instructions in addition to their immediate next action. as we do not input instructions to the policy, we can plan without their specification at test time. though prior works have used hierarchical structures that generate their own instructions to condition on (chen, gupta, and marino 2021;hu et al. 2019;jiang et al. 2019), we surprisingly find that just predicting language instructions is in itself a powerful objective to learn good representations for planning. teaching agents to output language instructions for completing tasks has two concrete benefits. first, it forces them to learn at a higher level of abstraction where generalization is easier. second, by outputting multi-step instructions agents explicitly consider the future. practically, we teach agents to output instructions by adding an auxiliary instruction prediction network to transformerbased policy networks, as in seq2seq translation (vaswani et al. 2017). our approach can be interpreted as translating observations or trajectories into instructions.we test our representation learning method in limited data arxiv:2306.12554v1 21 jun 2023settings and combinatorially complex enviornments. we find that in many settings higher performance can be attained by relabeling existing demonstrations with language instructions instead of collecting new ones, creating a new, scalable type of data collection for practitioners. furthermore, our method is conceptually simple and easy to implement. this work is the first to show that direct representation learning with language can accelerate imitation learning.to summarize, our contributions are as follows. first, we introduce a method for training transformer based planning networks on paired demonstration and instruction data via an auxiliary instruction prediction loss. second, we test our objective in long-horizon planning based environments with limited data and find that it substantially outperforms contemporary approaches. finally, we analyze the scenarios in which predicting instructions provides fruitful training signal, concluding that instruction modeling is a valuable objective when tasks are sufficiently complex. a truly intelligent agent ought to be able to devise its own plan and execute it, with only a handful of demonstrations. 2020;hill et al. 2020;akakzia et al. 2021;goyal, mooney, and niekum 2021;shridhar, manuelli, and fox 2021), or for games(chevalier-boisvert et al. 2017), and sometimes even with hindsight relabeling(chan et al. instead of directly using instructions as policy inputs, other works use language instructions as an intermediary representations for hierarchical policies. language goals describe the desired final state of the environment environment, specifying what to do, whereas language instructions communicate how an agent should reach the desired state in a step-by-step manner. as in behavior cloning we predict actions from latents z using a policy network π ϕ , but we additionally use a language decoder g ψ to predict the current instruction from z.., z t ), and the language model is g ψ (x (i) |z 1 , . causal masking similar to that in(radford et al.we seek to answer the following questions: how effective is instruction modeling loss? how does instruction modeling scale with both data and instruction annotations? what architecture choices are important? and finally, when is instruction modeling a fruitful objective?. this is consistent with results found inchen et al. in babyai, instruction modeling does not appear to significantly help with the smallest number of demos, however, after twelve and a half thousand demonstrations that we find that policy performance with language scales almost linearly with data before it experiences diminishing returns at two-hundred thousand demonstrations.3% to 50. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/687.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/687.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c1cac9f2f14341d782a439a5833121f047a03b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/687.txt @@ -0,0 +1 @@ +resilience is a critical key performance indicator (kpi) in the upcoming 6g networks. thus, in contrast to previous generations, the network is supposed to provide instant recovery from a disaster or failure. within this context, its scope extends beyond the traditional approaches of robustness and reliability , . this adds a necessary functionality which is reconfigurablity of the system to adapt to the erroneous events and automatically recover its required function in a timely manner without any human interaction . within this framework, it is most vital for 6g networks, especially the ones providing critical sensing services, to ensure resilience in connectivity, where the system is able to tolerate connectivity disruptions and failure among its components while maintaining the system's functionality. among these disruptions, failure in the elements of the antenna arrays is considered a challenge in resilient networks. this paper considers the aspect of resilience with respect to direction of arrival (doa) estimation using an antenna array with failed elements. this work is funded by the german federal ministry of education and research (bmbf) in the course of the 6gem research hub under grant 16kisk037.a very important application of antenna arrays is doa estimation, where the spatial spectra of the received electromagnetic waves determined, to estimate the location of possible targets within the array's field of view. optimizing the antenna locations in an array can offer high degrees of freedom (dof) through extending a co-array aperture . this is due to the fact that the augmented covariance matrix of a difference co-array explores the unique correlation lags of a certain array configuration, leading to resolving more paths than the actual number of physical antennas. sparse arrays of n physical antennas can resolve up to o(n 2 ) uncorrelated paths, unlike uniform linear arrays (ula) with the same number of antennas. several array configurations were proposed in the literature, such as minimum redundancy arrays (mra) , nested arrays , and coprime arrays . among the previously mentioned configurations, mra is known to have the best estimation performance, since it is characterized by the largest hole-free difference co-array . despite the high dof offered by the sparse array, such attractive property remains susceptible to sensor failures in the array . the same coarray structure that offers the o(n 2 ) resolution property, will be partially destroyed when one or more antennas fail, leading to a large loss in the dof compared to the ula of similar physical antennas. this in turn causes performance degradation and possible breakdown of the overall system functionality. the authors in compared the robustness of different coarrays configuration and structures, using the essentialness property, where an antenna element is said to be essential, if its deletion affects the corresponding difference coarray structure. furthermore, the authors deduced that sparse arrays offering large difference coarray, like mra for instance, are less robust to sensor failures since most physical antennas in the array are considered essential elements. towards this purpose, this paper uses sensor failures in sparse array radar as a showcase for the criticality of a resilient radar system. in general, many previous works have addressed the problem of missing data within doa estimation. in , the authors proposed a maximum likelihood estimator based on cholesky parametrization when some sensors fail to work before the measurement is complete. however, they used ula in their model requiring a specific sequential failure pattern, which might not be practical. in , the authors addressed the same problem using ula as well, however they categorized the failures into nonessential and essential failures in the antenna array. the former problem was solved by modeling the ula with failed sensors as a sparse array, hence they used the remaining sensors to form the difference coarray. in the latter problem, the constructed difference coarray has some holes, in this case, they exploited the low-rank property of the signal subspace, and through trace norm minimization, they were able to recover those missing holes. however, the complexity of their approach was in terms of o(n 3 ), hence when the number of sensors is large, the algorithm will be computationally expensive to run in real-time, which would contradict the resilience requirements in 6g systems. in , the authors addressed this problem by devising an algorithm, that assumes that all sensors are functioning in the first sampling period, then they use the devised covariance matrix of the complete array to calculate the covariance matrix of the incomplete one. this approach would be considered very beneficial in static environments. however, in a rapidly changing environment, as expected in 6g networks, the information of the complete measurements can be totally independent of the incomplete measurements in later periods, leading to false estimations. in this work, we address all these aspects, to fulfill the resilience framework defined earlier. we use a machine learning (ml) approach to mitigate the effect of sensor failures in a sparse array. we specifically address the worst-case scenario as defined in , in which failures can occur in more than one essential sensor in the mra configuration of a large aperture. as a result, the constructed coarray would consist of very small continuous ulas, leading to degraded performance. thus, we use ml to introduce a resilient array that is tolerant to failures in any sensor. in this paper, the following notation is used. for a given matrix a, we denote its transpose, hermitian transpose, and conjugate by a t , a h , and a * , respectively. vec(•) represents the vectorization operation, where, it stacks the columns of a matrix to convert a matrix to a vector. further, to represent real and imaginary components of a complex number, we use ℜ(•) and ℑ(•), respectively. the expected value is denoted by e(•). note that, we denote the kronecker product and the khatri-rao product by ⊗ and , respectively. an identity matrix with a dimension of m × m is denoted by i m . this is due to the fact that the augmented covariance matrix of a difference co-array explores the unique correlation lags of a certain array configuration, leading to resolving more paths than the actual number of physical antennas. furthermore, the authors deduced that sparse arrays offering large difference coarray, like mra for instance, are less robust to sensor failures since most physical antennas in the array are considered essential elements. the former problem was solved by modeling the ula with failed sensors as a sparse array, hence they used the remaining sensors to form the difference coarray. in, the authors addressed this problem by devising an algorithm, that assumes that all sensors are functioning in the first sampling period, then they use the devised covariance matrix of the complete array to calculate the covariance matrix of the incomplete one. this is due to the fact that the mra has the largest hole-free difference coarray compared to other sparse arrays, such as nested arrays and coprime arrays, etc. hence, if there is m 1 failed elements in the array, the corresponding received signal is modeled as if there is m 1 zero entries and similarly, the physical covariance matrix of the received signal r will have 2m m 1 -(m 1 ) 2 missing elements in the matrix, modeled with zeros as well. the proposed dnn will take the r sm matrix as an input and predict the ss augmented covariance matrix of a complete array r ss . however, the proposed hybrid and data-driven approaches were able to achieve similar performance as a full mra with only 8 physical sensors.5shows the average mse of doa estimation based on(5)for mra, mra with missing sensors and the proposed approaches. in addition, we have compared the hybrid approach using two and three hidden layers, to examine how ss can be beneficial to reduce the learning complexity compared to a data-driven approach. however, as the snr increases, the data-driven approach along with the hybrid approach with three hidden layers have similar behavior, where they approach the mra curve. furthermore, if we have prior knowledge that the system operates in the low snr regime db, we can utilize the hybrid approach which has similar performance as the data-driven approach. however, the latter approach is better when doa accuracy matters the most since it provides the best performance compared to the original mra array. it can be noticed that the actual mra in high snr saturates with higher mse compared to the crb, this is due to the fact that crb depends on the snr, where it may approach zero as snr grows asymptotically large. moreover, the proposed approaches show better performance in doa estimation than the original sparse array with no failed elements in the low snr regimes, thanks to the denoising capabilities of the dnn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/688.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/688.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0b79673e72eb8de74eff77d3fe7e1f63e992712 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/688.txt @@ -0,0 +1 @@ +ml-based tools have the potential to significantly improve health and healthcare delivery , yet these methods are often "black-box" in nature. in this context, ml models often fail to elucidate which influential factors affect individual predictions as well as how changes to these observable inputs affect or modulate the outcome being predicted. this is an important deficiency because clinicians' understanding and confidence in using predictions to guide interventions in a complex process that is modelled is the key to trust. knowing the main contributing factors allows their evaluation in terms of their coherence with respect to the application task and its potential actionability could help in building trust in clinical settings . if the explanation cannot explain why a future problem will evolve, and thus justify treatment interventions then clinicians can rightly be sceptical. the lack of transparency in their function reduces their trustworthiness and is a barrier to their adoption for clinical decisionmaking. understanding of the methods should also be sufficient for clinician users to suspect when the tools are not working, or being used outside the purpose for which they were developed.to understand complex ml models, several explainable ai (xai) methods have been proposed in the literature . these methods can be categorised into (i) gradientbased e.g., smoothgrad , integrated gradients , deep taylor decomposition (dtd) ), layer-wise propagation , and (ii) perturbation-based e.g. lime , shap . however, there is little understanding of how applicable or useful they are in clinical settings or whether they should be significantly re-tailored or even novel xai methods developed.from clinicians' view, knowing the subset of features driving the model outputs is crucial as it allows them to compare the data-driven model decisions to their clinical judgment, especially important in case of a disagreement . tonekaboni et. al. also suggest that rigorous evaluation of explanations against the following criteria: (i) domain-appropriate representation, (ii) potential actionability and (iii) consistency could contribute to building trust in clinical settings. other recent studies report inconsistency between the explanations generated by various popular explanation methods. this variability implies that at least some generated explanations are incorrect. if incorrect, explanations at a patient level could be misleading and could lead to wrong decisions with dire consequences in applications such as healthcare. thus, to build trust, it is critical to investigate the conformity of explanations with clinical expert knowledge by evaluating them against the aforementioned criteria.this paper presents the results of the quantitative analysis of explanations at a patient (i.e. local explanations) and cohort (i.e. global explanations) level and discusses them in terms of their coherence with respect to the application task, impact on the workflow and consistency. to investigate the utility and the trustworthiness of the xai-generated explanations in the clinical context, we evaluate against criteria suggested by tonekaboni et al. . to analyse discordance between explanations, we employ agreement metrics proposed by krishna et. al. where appropriate. the analysis is performed on two emr datasets sourced from two major australian hospitals examining data-driven models predicting unexpected patient deterioration and hospital readmission after discharge .we used shap and dtd methods to generate explanations of tree-based and neural network (nn)-based ml models and patient and cohort level. these explanations were compared to each other and also to interpretations arising from the coefficients of the logistic regression (lr) model, the most accepted predictive model in healthcare applications. the explanations obtained for one of the two datasets used in this study (for which it was possible) were also benchmarked against the true causes recorded by the deployed data collection system in the study hospital. we discuss these results and their implications from clinicians' perspectives. the necessary criteria for having trustworthy explanations and how these guide the choice of intervention are also considered.also suggest that rigorous evaluation of explanations against the following criteria: (i) domain-appropriate representation, (ii) potential actionability and (iii) consistency could contribute to building trust in clinical settings.we used shap and dtd methods to generate explanations of tree-based and neural network (nn)-based ml models and patient and cohort level. the explanations obtained for one of the two datasets used in this study (for which it was possible) were also benchmarked against the true causes recorded by the deployed data collection system in the study hospital. global explanations) which are obtained by adding up the absolute values of all explanations obtained for individual patients and averaging them over the total number of considered patients.metrics for measuring the agreement between the explanations introduced inwere used in different experiments carried on to evaluate explanations intended to build trust in clinical settings.an intuitive measure of the agreement between the explanations obtained with different methods is the number of common features in sets of top features identified by different methods which will be denoted in the remainder of the paper as the feature agreement (fa).to evaluate explanations in the context of clinical predictive models against the criteria suggested inwe used explanations obtained for emr-based predictive models reported in,. whether they are informative and may impact the workflow by informing follow-up clinical workflow while at the same time being parsimonious and timely, explanations at the patient level were analysed, assessed and discussed. considering that different methods may produce different resultsand the discordance could impact clinicians' trust, explanation agreement between the methods at the patient has been also investigated and discussed.to examine the consistency of explanations in relation to variations in the design of underlying models (dnn and xgb), explanations calculated at the cohort level were analyzed for each of the five independent runs., sign), explanations obtained at the patient level for both xgb and dnn models using the shap method were analyzed across five independent runs. the aim of this study was to assess the explanations generated by popular xai methods used for explaining clinical predictive models. in the context of predicting patient deterioration and risk of readmission, explanations obtained at the patient level were recognised by clinical collaborators as actionable, i. when considering timeliness, both algorithms can be leveraged to provide real-time predictions and explanations that provide relevant complementary information that is well aligned with the current clinical workflows, allowing for early intervention and the prioritisation of clinical efforts for care planning. this paper is suggesting that if 1) sufficient disparate ml methods agree on influential relationships, 2) if observable and input factors can be modified and they change model output, and 3) if the adjusted ml model outcomes concur and agree with the realworld results, then we are one step closer to trustworthy explanations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/689.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/689.txt new file mode 100644 index 0000000000000000000000000000000000000000..1665e1cf67b77d5e5cfd1d9722a5805dcc274af2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/689.txt @@ -0,0 +1 @@ +federated learning (fl) (mcmahan et al., 2017) is a machine learning paradigm which utilizes multiple clients that collaborate to train models under the supervision of a central aggregator, usually referred to as the server. unlike traditional centralized methods which required the assemblage of data at the central server, fl methods require that only parameter updates are communicated in order to coordinate the fl training process, such that any number of clients can learn from the decentralized data without direct transfer of data. this allows for maintenance of local data privacy while also providing stronger model performances than what participants could have achieved locally. accordingly, fl has been adapted to many privacy-sensitive tasks, such as medical data classification (sheller et al., 2020). one of the main challenges in fl is the presence of data heterogeneity, where clients' local data distributions vary significantly from one another. this problem of data heterogeneity is most commonly addressed by personalized federated learning (pfl), which adapts clients models to local distributions. most techniques use full model personalization, where clients train both a personalized and a global model. however, this requires twice the computational cost of standard fl (dinh et al., 2020;li et al., 2021a) and is impractical in some settings. partial model personalization alleviates this by splitting clients into shared and personalized parameters (pillutla et al., 2022), where only the shared parameters are updated globally, but typically results clients that overfit to local distributions and reduced performance. additionally, the personalized architecture needs to be manually designed before training and cannot be adapted to specific settings.to address this, we propose fedselect, where we adapt both both architecture and parameters for each client to its local distribution during training. our method is based on the intuition that individual client models should choose only a necessary subset of shared parameters to encode global information for their local task, since it may not be optimal to reuse all global information from any full layer(s). we achieve this through the lottery ticket hypothesis (lth), originally proposed to prune models by finding optimal subnetworks, or lottery ticket networks (ltns) (frankle & carbin, 2019). however, instead of pruning the remaining parameters to zero, we reuse them as personalized parameters. we observe improved performance on cifar-10 compared to pruning-based lth-fl approaches (li et al., 2020;mugunthan et al., 2022) and other personalized fl approaches (liang et al., 2020;arivazhagan et al., 2019;collins et al., 2021;li et al., 2021a;oh et al., 2022) as well as reduced communication costs compared to partial model personalization.related works. partial model personalization seeks to improve the performance of client models by altering a subset of their structure or weights to better suit their local tasks. it also addresses the issue of "catastrophic forgetting" (mccloskey & cohen, 1989), an issue in personalized fl where global information is lost when fine-tuning a client model on its local distribution from a global initialization (kirkpatrick et al., 2017;pillutla et al., 2022). it does this by forcefully preserving a subset of parameters, u, to serve as a fixed global representation for all clients. however, existing methods introduced for partial model personalization (pillutla et al., 2022;collins et al., 2021) require handselected partitioning of these shared and local parameters, and choose u as only the input or output layers for their experiments.lotteryfl (li et al., 2020) learns a shared global model via fedavg (mcmahan et al., 2017) and personalizes client models by pruning the global model via the vanilla lth. importantly, parameters are pruned to zero according to their magnitude after an iteration of batched stochastic gradient updates. however, due to a low final pruning percentage in lotteryfl, the lottery tickets found for each client share many of the same parameters, and lack sufficient personalization (mugunthan et al., 2022)., 2017) is a machine learning paradigm which utilizes multiple clients that collaborate to train models under the supervision of a central aggregator, usually referred to as the server. one of the main challenges in fl is the presence of data heterogeneity, where clients' local data distributions vary significantly from one another. most techniques use full model personalization, where clients train both a personalized and a global model. our method is based on the intuition that individual client models should choose only a necessary subset of shared parameters to encode global information for their local task, since it may not be optimal to reuse all global information from any full layer(s). it also addresses the issue of "catastrophic forgetting"(mccloskey & cohen, 1989), an issue in personalized fl where global information is lost when fine-tuning a client model on its local distribution from a global initialization(kirkpatrick et al., 2022).lotteryfl(li et al. next, let θ denote the vector of parameters defined by the client model architecture.partial model personalization refers to the procedure in which model parameters are partitioned into shared and local parameters, denoted u and v, for averaging and local fine-tuning.we consequently define θ k = (u, v k ), where u denotes a set of shared global parameters, and v k the personalized client parameters.when training a client model on its local distribution during federated learning, parameters exhibiting minimal variation are considered suitable for freezing and encoding shared knowledge, while parameters demonstrating significant fluctuation are deemed optimal for fine-tuning on local distribution and encoding personalized knowledge.the set of parameters selected for personalization will be trained on local data and kept locally, while the rest of the parameters that are identified as frozen will be initialized as the global parameters, then locally updated, and finally submitted to the server for federated averaging to encode shared knowledge across clients.in fedselect, the input parameters c, θ 0 g , k, r, l, and p represent clients, the first global initialization, participation rate, gradltn iterations, and personalization rate, respectively. by the end of gradltn, v k is identified as the set of appropriate parameters for dedicated local fine-tuning via localalt; u is also updated in localalt and then averaged for global knowledge acquisition and retention.however, averaging among the shared parameters u k only occurs across parameters for which the corresponding mask entry in m k is 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/69.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/69.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d4e24ce9e72c06468af3157e51d58834f0d365e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/69.txt @@ -0,0 +1 @@ +many real world applications require message classification and regression, such as handling spam emails , ticket routing , article sentiment review and more. accurate message classification could improve critical scenarios such as in call centers (routing tickets based on topic) , alert systems (flagging highly important alert messages) , and categorizing incoming messages (automatically unclutter emails) . the main distinction between text and message classification is the availability of additional attributes, such as the sender information, timestamps, attached image, audio, affiliations, and more. new message classification contests often appear in the prominent platforms (i.e., kaggle ), showing how this topic is sought after. there are already many data-sets to explore in this field, but no clear winner algorithm that fits all scenarios with high accuracy, efficiency and simplicity (in terms of implementation and interpretation).a notable advancement in the field of nlp is the attention based transformers architecture . this family of methods excels in finding local connections between words, and better understanding the meaning of a sentence. a leading example is the bidirectional encoder representations from transformers (bert) as well as its variations , winning certain benchmarks . several packages, such as huggingface transformers , make such models accessible and easy to use as well as provide pre-trained versions. in addition, one can use transfer learning to further train bert on their on data, creating a tailored model for the specific task at hand. bert, and often other transformer based models, are designed to handle text. they operate on the words of a given text by encoding them into tokens, and by the connections between the tokens they learn the context of sentences. this approach is limited, since sometimes more information can be extracted and used, not necessarily textual. throughout this paper we refer to this information as meta-data to distinguish it from the main stream of textual content (though one may recognize it as the core data, depending on the application). for example, a meta-data could be the time stamp of when the text was written, sent, published, etc. another example is the writer of the text, when dealing with a small list of writers of a corpus. there have been some attempts to incorporate these into bert models, for example by assigning artificial tokens for writers or for temporal segments (token per month for example) . this approach is limited since not all meta-data entries are suitable for encoding by tokenization. in the example of temporal segments, more segments introduce more tokens, leading to large computational resources consumption, and less segments cause loss of information. another approach is to concatenate the embeddings, created by the transformer module, with the outputs of an embedding module for the meta-data. in this approach, a transformer for the text is trained (using direct or transfer learning) on the text, and other separate modules (time series embedding, senders embeddings, etc.) are used to embed the meta-data. all the embeddings are then concatenated and used as inputs to a classification network. a drawback of this approach is that the internal network features are not trained from a combination of diffident input streams, and therefore avoid cross dependent features (e.g. the importance of an email is not only determined by its content, but also by who sent it, when, to whom else, attachments, etc.). in this approach, a transformer for the text is trained (using direct or transfer learning) on the text, and other separate modules (time series embedding, senders embeddings, etc. in addition, compared to the standard practices that suggest separate training and implementation of a "voting between classifiers" method, the proposed approach trains on the text and meta-data simultaneously, such that the language model (transformer) block weights are adjusted based on the information passing through the meta-data classification block weights and vice versa. we present results of the method with a main block based on a transformer that handles the text, and an additional block that handles the pre-processed meta-data inputs individually. this method can be extended to support more complex blocks, such as an advanced dl model for images, a temporal analysis block to extract information from temporal meta-data, additional transformer blocks for multiple text inputs (for example, subject and body of an email), categorical data, and more. to demonstrate the performance of the method we run multiple experiments on publicly available data-sets (amazon, yelp, redditand enron) to show the advantages of using the block architecture, and compare them to the benchmarks in the literature (reviewed in the related work in section 2), which are based on the transformer benchmark (bert), random forest (rf) classifier, and multi-layer perceptron (mlp) networks. inthe meta-data is the layout information of scanned documents, and the authors propose an innovative architecture to extract information from both text and layout information.we propose a method based on blocks to train a linguistic model with meta-data for a specific text classification task. by splitting each type of meta-data input into different blocks, one can use state-of-the-art deep-learning architectures to handle each meta-data type uniquely and more efficiently. in addition, the training is done using all block and in a unified training loop, adjusting all the weights of all blocks in every optimizer step, so all information from the text and meta-data sources contributes to the learning process. since the blocks are trained simultaneously, information from the meta-data may impact the training of the core textual block (the transformer block), and vice-versa. as a toy illustrative example, let us say that a single sample has text suggesting a specific class but meta-data suggesting another, the textual block would train differently (and the textual representation would be different, taking this in account), and the weights of the textual block would be able to capture this difference.for the numerical experiments we train the network of the proposed approach with two blocks: a) the transformer block operating on the main text, and b) the meta-data block which is a fully-connected block operating on a one dimensional meta-data vector. in addition, it is simple to distinguish between the transformer only architecture, and the transformer and meta-data architecture in this way, giving us better explainability of the contribution of the meta-data. for the meta-data classification block we use two fully-connected layers, the first is of the size of the input meta-data and the size of the second is the number of classes, both use the rectified linear unit (relu) activation. methods 9-10 are the proposed ones, exploring the effect of combining the transformer output with the meta-data block output, once with averaging the block outputs and once with a fully-connected layer to learn the weights after concatenating the output representations of each block. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/690.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/690.txt new file mode 100644 index 0000000000000000000000000000000000000000..e51ca18286fceeb563796ec9ca323283bcd03012 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/690.txt @@ -0,0 +1 @@ +recall@k, also known as top-k accuracy, is one of the most popular evaluation metrics in retrieval and metric learning (musgrave et al., 2020). recall@k is closely related to practical applications, where the retrieval error rate must be measured and minimized. however, the good metric for evaluating machine learning models must exhibit additional properties, necessary for measuring overfitting and generalization (huai et al., 2019;roelofs, 2019). in this paper we show that recall@k lacks the desired properties. in particular, we show that metric values largely depend on the number of classes in the dataset and can't be used for quality comparison among dataset splits, including differences between train and test sets.to overcome the problems, related to recall@k, we propose grouped recall@k, a simple alternative to recall@k, which is invariant to the number of classes in the dataset and thus can be used for evaluating overfitting and generalization. we further demonstrate the importance of measuring these quantities by studying deep image retrieval (roth et al., 2020). in particular, we show that grouped recall@k can diagnose overfitting and underfitting, while traditional recall@k can not.contributions of the paper can be summarized as follows:1 tinkoff.correspondence to: maksim zhdanov , ivan karpukhin .figure 1. illustration of the difference between the commonly used recall@1 metric and the proposed grouped recall@1. recall@1 reduces monotonically as the number of test classes increases. the proposed grouped recall@1 is invariant to the number of classes, but larger dataset sizes increase evaluation stability. metrics are computed for the bn-inception model on the stanford online products (sop) dataset.• we show recall@k depends on the number of classes in the dataset, and thus can't be used for measuring overfitting and generalization.• we propose a simple grouped recall@k metric, that is invariant to the number of classes in the dataset and has a lower computational complexity, compared to recall@k.• we show, that the proposed metric can measure the generalization gap. we also provide theoretical bounds for generalization on unseen data.• we apply the proposed metric to deep image retrieval and demonstrate the importance of measuring generalization and overfitting., 2019;roelofs, 2019).to overcome the problems, related to recall@k, we propose grouped recall@k, a simple alternative to recall@k, which is invariant to the number of classes in the dataset and thus can be used for evaluating overfitting and generalization. the proposed grouped recall@1 is invariant to the number of classes, but larger dataset sizes increase evaluation stability.• we show recall@k depends on the number of classes in the dataset, and thus can't be used for measuring overfitting and generalization.• we propose a simple grouped recall@k metric, that is invariant to the number of classes in the dataset and has a lower computational complexity, compared to recall@k. first is called generalization gap(roelofs, 2019), and is evaluated as a difference of accuracy on the train set x train and the test set x test :.in retrieval, we want to measure the ability of the model to find elements from the gallery dataset d g , that have the same class label l(x), as a query element q. an efficient way to use evaluation data is to collect a single labeled dataset d, iterate over its elements, and consider each element as a query and the remaining part of the dataset as a gallery.in this paper, we raise the following question: how can we overcome the limitations of recall@k, while preserving the informativeness of the metric? we answer this question by proposing a grouped alternative to recall@k. the scale of the grouped recall metric becomes dependent on the group size k rather than on the number of classes in the dataset.to the best of our knowledge, there is no metric for image retrieval tasks that can be used to estimate empirical generalization for test data and overfitting.to empirically validate theoretical bounds on the generalization error, we compare the quality of the multiple methods on two subsets of the test set and measure the difference between metric values., 2011), inshop(liu et al. we thus conclude, that grouped recall measures the difference between the models, similar to recall@k, and can be used for retrieval evaluation.to show that our metric can quantify overfitting via generalization gap we gradually add dropout to the resnet34(he et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/691.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/691.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c8ef82a9064ff06ca9ba9c7a6b162e8d04feb90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/691.txt @@ -0,0 +1 @@ +consider the problem of sequential prediction in the realizable setting, where labels are generated from an unknown f * belonging to a hypothesis class f . sequential prediction is typically studied under two distributional assumptions on the input data: the stochastic setting where the data is assumed to be identically and independently distributed (i.i.d) according to some fixed (perhaps unknown) distribution, and the fully-adversarial setting where we make absolutely no assumptions on the data generation process. a simple empirical risk minimzation (erm) strategy works for the stochastic setting where the learner predicts according to the best hypothesis on the data seen so far. the number of mistakes of this strategy typically scales with the vapnik-chervonenkis (vc) dimension of the underlying hypothesis class f . however, in the fully adversarial setting, this strategy can lead to infinite mistakes even for classes of vc dimension 1 even if the adversary is required to be consistent with labels from f * . the littlestone dimension, which characterizes the complexity of sequential prediction in fully-adversarial setting, can be very large and often unbounded compared to the vc dimension . this mismatch has led to the exploration of beyond worst-case analysis for sequential prediction .in this work, we propose a new framework that sits in between the stochastic and fully-adversarial setting. in particular, we consider sequential prediction with an adversary that injects adversarial (or out-of-distribution) examples in a stream of i.i.d. examples, and a learner that is allowed to abstain from predicting on adversarial examples. a natural motivation for our framework arises in medical diagnosis where the goal is to predict a patient's illness based on symptoms. in cases where the symptoms are not among the commonly indicative ones for the specific disease, or the symptoms may suggest a disease that is outside the scope of the doctor's knowledge, it is safer for the doctor to abstain from making a prediction rather than risk making an incorrect one. similarly, for self-driving cars, in cases where the car encounters weather conditions outside of its training, or unknown information signs, it is better for the algorithm to hand over access to the driver instead of making a wrong decision which could end up being fatal.in the proposed framework, the learner's goal is to minimize erroneous predictions on examples that the learner chooses to predict on (i.i.d. or adversarial) while refraining from abstaining on too many i.i.d. examples. if the learner was required to predict on every example, then the adversary could produce a fully-adversarial sequence of examples which would force the learner to make many erroneous predictions. the abstention option allows us to circumvent this challenge and handle any number of adversarial injections without incurring error proportional to the number of injections. in this framework, we can ask the following natural question:is there a statistical price for certainty in sequential prediction?in particular, can we recover stochastic-like guarantees in the presence of an adversary if we are allowed to abstain from predicting on adversarial examples? a priori, it is not clear where on the spectrum between the fully-adversarial and stochastic models, the complexity of this problem lies. the main challenges arise from the fact that the adversary fully controls the injection levels and provides no feedback about which examples were adversarial, and the learner has to perform one-sample outlier detection, which is nearly impossible. despite this, we show that it is possible to guarantee certainty in a statistically efficient manner. we will work in the realizable setting where our label will be according to some function in f given a class f and a data set s = (x i , y i ) , we will denote by f | s , the class f | s = f ∈ f : ∀i f (x i ) = y i . the learner then picks ŷt ∈ {0, 1} and then receives as label y t = f ⋆ (x t ) where f ∈ f is the unknown function that the learner is trying to learn. , x k } ⊆ x is said to be shattered by f if for all y ∈ {0, 1} k there exists a function f ∈ f such that f (x i ) = y i . a mistake tree of depth ℓ is said to be shattered by a class f if for any root to leaf path (x i , y i ) ℓ i=1 , there is a function f ∈ f such that f (x i ) = y i for all i ≤ ℓ. the littlestone dimension of the class f denoted by ldim (f ) is the largest depth of a mistake tree shattered by the class f . the littlestone dimension completely characterizes the mistake bound of the class f in the sense that any algorithm can be forced to have a mistake a bound of ω ldim (f ) and there exists an algorithm known as the standard optimal algorithm (soa) with a mistake bound of o ldim (f ) . it is also possible to allow the adversary to adaptively choose f ⋆ ∈ f , that is, the adversary has to make sure the labels are consistent with some f ∈ f at all times but does not have to commit to one fixed f ⋆ . .in order to remedy this, we consider a modified version of the leave-one-out disagreement estimate which considers examples x in the disagreement region for the class f | s f \(x,y) where s f is the subset of the datapoints which disagrees with a fixed reference function f . further note that we have γ s t-1 , f xt→1 + γ s t-1 , f xt→0 ≤ γ (s t-1 , f ) since the points in the disagreement region corresponding to the two labels are disjoint. we say that a point x is attackable with respect to a data set s if there exists is a sequence of adversarial examples a x such that algorithm abstains on example x when the history is s ∪ a x \ {x}. in other words, x is attackable if there is a set of adversarial examples a x such that x ∈ s 1 f | s∪ax\{x} and. let f be a hypothesis class with vc dimension 1 and f ∈ f be the reference function corresponding to the characterization in theorem 6. in this setting, the true hypothesis f ⋆ corresponds to a path p and a threshold x ⋆ on t such that f ⋆ (x) = 1 if and only if x ∈ p and x ≺ x ⋆ . the key observation is that any attackable point u must be in γ s ∪ a x , f x→f (x) , f corresponding to v. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/692.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/692.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5313c20124865ced91d81b175f2bf969e35227b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/692.txt @@ -0,0 +1 @@ +in recent years, we have seen rapid development in deep neural networks with various model architectures (he et al., 2016;dosovitskiy et al., 2020) for a wide range of different tasks. however, the pretrained models could be poor at generalizing learned features or knowledge to new datasets or environments. even a slight shift from the network's original training domain could significantly hurt its performance (recht et al., 2019;hendrycks & dietterich, 2021;yang et al., 2021), which suggests that the successes achieved by deep learning so far have been largely driven by supervised learning and large-scale labeled datasets (deng et al., 2009). this domain shift issue has seriously impeded the development and practical deployment of deep neural models.a straightforward approach to deal with this domain shift is to collect some data from the target domain to adapt the source-domain trained model, relying on the assumption that target data is accessible for model adaption. this domain adaptation approach (lu et al., 2020;saito et al., 2018;ganin & lempitsky, 2015;long et al., 2015;liu et al., 2020;hoffman et al., 2018;gong et al., 2012;long et al., 2016;balaji et al., 2019;kang et al., 2019;kulis et al., 2011;gandelsman et al., 2022;sun et al., 2020;liu et al., 2021) is practical in many scenarios since we do not need extra labor and computation cost to label the collected target distribution data and simply leverage the unlabeled data for adaptation. however, it does assume that we have access to target domain data during training, which is infeasible and impractical in many cases.a more strictly defined problem is domain generalization, which does not assume access to target sample features during training and strives to learn robust representations against distribution shifts from multiple source domains during training. we later perform evaluation of the trained model on an unseen test domain to measure the generalizability, transferability, and robustness of the model. while many existing works in domain generalization attempt to learn domain-invariant features (arjovsky et al., 2019;bui et al., 2021;cha et al., 2021;ganin & lempitsky, 2015;li et al., 2018a;sun & saenko, 2016), some recent works (gulrajani & lopez-paz, 2021;koh et al., 2021) also demonstrate decent accuracies without explicitly enforcing invariance. recently, the state-of-the-art work miro (cha et al., 2022) aims to learn similar features to 'oracle' representations, reformulating the domain generalization task by maximizing mutual information between oracle representations and model representations while performing training on source domains. it achieves state-of-art performance on the widely-used domainbed (gulrajani & lopez-paz, 2021) benchmark.a seemingly completely irrelevant direction, pruning (le-cun et al., 1989;hassibi et al., 1993;han et al., 2015;frankle & carbin, 2019;lee et al., 2019;sun et al., 2022), aims to compress the model by removing the least important channels scored by some saliency criterion. since size of the model gets shrinked after pruning, it is also sometimes considered to reduce the overfitting of models and increase the domain generalization ability of the model from another perspective. in fact, some latest works (li et al., 2022;jin et al., 2022;bartoldson et al., 2020) began to investigate deeper relationship between pruning and the generalizability and robustness of models.in this paper, we made a further step, investigating whether we could use pruning as a reliable method to boost the generalization ability of the model. we aim to answer the following three questions:1. can we leverage existing popular and simple pruning metrics like l2 (li et al., 2017) to boost generalization accuracy by pruning unimportant channels?2. can we design a better pruning score taking the generalization ability of the model into consideration? more concretely, a score specifically designed to improve target domain accuracy instead of maintaining source domain accuracy as typical pruning.3. finally, can we combine it with modern state-of-theart domain generalization algorithms like miro (cha et al., 2022) as a simple plug-in component to further boost the accuracy?we answered the above three questions with solid empirical studies ranging across three datasets and model architectures. to begin with, we study the first two questions extensively across many different pruning sparsity ratios on mnist to mnist-m, which is randomly colored mnist. we found that the existing simple pruning method l2 (li et al., 2017) can offer a small improvement over the vanilla baseline(i.e. without using any domain generalization technique). later, we solve the question (2) by designing a novel pruning method specifically targeting generalization accuracy. given a convolutional neural networks(cnns), we evaluate the activation map for samples from different domains at each layer for every channel and compute a domain similarity score (dss) based on the distance of the activation maps. we then use structural pruning to prune the channels with the lowest dss, followed by a finetuning session to recover accuracy. from empirical results, we observe an obvious improvement from the standard l2 pruning score. notably, we can improve the baseline performance by more than 5 points by sparsifying 60% of the channels in the model, which may seem very surprising.after validating the effectiveness of our proposed dss score, we resolve the question (3) by combining our method with the state-of-the-art work miro (cha et al., 2022). we conduct experiments on two datasets pacs and officehome from the domainbed (gulrajani & lopez-paz, 2021) benchmark and observe a 1 point improvement of miro by introducing a 10% channel sparsity into the model, demonstrat-ing the capability of our method to even improve the sota result.a straightforward approach to deal with this domain shift is to collect some data from the target domain to adapt the source-domain trained model, relying on the assumption that target data is accessible for model adaption. this domain adaptation approach(lu et al.a more strictly defined problem is domain generalization, which does not assume access to target sample features during training and strives to learn robust representations against distribution shifts from multiple source domains during training. while many existing works in domain generalization attempt to learn domain-invariant features(arjovsky et al., 2019;bui et al., 1989;hassibi et al.in this paper, we made a further step, investigating whether we could use pruning as a reliable method to boost the generalization ability of the model. can we leverage existing popular and simple pruning metrics like l2(li et al. can we design a better pruning score taking the generalization ability of the model into consideration? more concretely, a score specifically designed to improve target domain accuracy instead of maintaining source domain accuracy as typical pruning.many works aim to tackle domain generalization task through learning domain-invariant features by either minimizing between-domain feature divergences(ganin & lempitsky, 2015;li et al., 2017;liu et al., 2017;molchanov et al. suppose we are dealing with image data and suppose x 1 ∼ p 1 x , x 1 ∈ r c×h×w , and x 2 ∼ p 2 x , x 2 ∈ r c×h×w , here, moreover, suppose t (x) ∈ r ĉ× ĥ× ŵ.in this paper, we study whether we could use pruning as a reliable method to improve the generalization performance of the model. we propose a novel pruning score dss, designed not to maintain source accuracy as typical pruning work, but to directly enhance the robustness of the model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/693.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/693.txt new file mode 100644 index 0000000000000000000000000000000000000000..3aa6537a08f80acaf09c4cf7e25fd3c567539572 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/693.txt @@ -0,0 +1 @@ +climate change requires clean and effective energy storage to replace gasoline, coal, or natural gas (ng). batteries are a clean carrier but do not have sufficient energy density for sectors such as cement, steel, and long-haul transport (emma et al., 2021). for those industries, one option that has received considerable attention is low-carbon hydrogen (mcqueen et al., 2020), which can store a large amount of energy and does not release greenhouse pollutants in combustion. however, the inefficiency of green h 2 manufacturing process is one of the biggest obstacles to its dissemination (joshi et al., 2022). while finding an environmentally friendly and affordable way to produce h 2 is a long-term task, it should not deny us hydrogen's immediate benefit.one viable strategy is blending h 2 with ng (hcng) to1 independent researcher 2 ibm research brazil 3 rand corporation, usa. correspondence to: minh triet chau .accepted after peer-review at the 1st workshop on synergy of scientific and machine learning modeling, syns & ml icml, honolulu, hawaii, usa. july, 2023. copyright 2023 by the author(s).reduce emissions when burning (melaina et al., 2013). by increasing the volume of h 2 from 0% to 15%, up to 50% reduction in co 2 emission was observed (pandey et al., 2022). blends with less than 20% h 2 by volume can be transmitted by repurposing existing natural gas pipelines, which are 67% cheaper than building new ones (peter et al., 2020). however, a major drawback with repurposed pipelines is during daily consumption, gas pressure may reach excessive values that lead to hydrogen diffusion through the most current pipeline wall materials (eu agency for the cooperation of energy regulators, 2021). specifically, due to friction incurred on the inner wall caused by the gas flow, atomic h can permeate into its metal lattice, reducing the stress required for cracks to form. this phenomenon, known as hydrogen embrittlement (he), causes pipelines to be prone to leaking h 2 , which can lead to catastrophic events due to h 2 ignition in the presence of air, as well as some other complications like decreasing the upper stratospheric ozone mixing ratios (nicola et al., 2022). such a risk is currently prohibiting hcng from becoming more popular. in germany, where it is most widely adopted, hcng only accounts for 10% of demand per capita (dolci et al., 2019).preventing he requires monitoring, operational pressure management, and pipeline maintenance (ronevich & san marchi, 2019). to the best of our knowledge, few works frame pipeline monitoring works from a data driven perspective (spandonidis et al., 2022) while the rest rely on signals from sensors and hardware (du et al., 2016;zhu et al., 2017;aba et al., 2021). to apply ml to this problem, there are two steps involved. the first is addressing the prediction task of the gas flow pressure. the next step is to use the predicted pressure as the input to apply a h 2 diffusion model through the pipe wall (fick, 1855;hafsi et al., 2018) to pinpoint in which segment of the pipe the next leakage will be likely to happen. out of these two issues, we focus on the turbulence gas flow modeling since it is not only a prerequisite but also a more intricate problem than diffusion. in this work, we propose a supervised machine learning-based model 1 that predicts future pressure values from previously observed data. namely, we implement an operator inference prototype for pipe surveillance and contrast it with transformer techniques. the inlet velocity u z and the mean inner wall pressure p of the whole pipeline through time. the periodic profile for u z is suitable to imitate the real-life demand (su et al., 2019).the paper is organized as follows. after this short introduction, we explain the dynamical system and the simulation setup for our experiments in section 2. in section 3, we discuss the results of these simulations and how they compare to other baselines. we conclude the paper with limitations to our work and some future research directions., 2022)., 2017;aba et al. the next step is to use the predicted pressure as the input to apply a h 2 diffusion model through the pipe wall(fick, 1855;hafsi et al.here, δ ij is the kronecker delta, u • and u • are the mean and fluctuating velocity, p is the mean static pressure,.set up for the training data, we select features from the set of (u, p, k, ω, nut) and organize them similarly to the same matrix below, with m the temporal and n the spatial dimension. here, u ∈ r 3 is the velocity in x, y, z axis, p is the pipeline internal static pressure, k is the rate of dissipation of kinematic turbulence, ω is the kinematic turbulence energy and nut is the kinematic turbulent viscosity.the first 50% of data is for the training set, and the next 10% is the validation set. every method has to predict k time-step forward from the end of the training data to the end of testing data without seeing the testing data . fno aims to learn a neural operator, or neural network, to map from the input to the pde solution by parameterizing the integral transformation in a fourier space. combining the linear pca transformation with the quadratic manifold in latent space is equivalent to applying galerkin projection to the original full-order model using a truncated pca basis.besides model selection, we also have to find the optimal number of the bases for its dimension reduction, and the regularizer value for the least square solver since unregularized rmse minimization may lead to overfitting. for the number of bases, we measure the quality of the reduced order model by computing the preserved energy with respect to the original dynamical system and calculating the rmse between the original state variables and a pca reconstruction of those variables. in fact,(zeng et al. however, decreasing the number of features while keeping the same number of training data took care of the high variance in opinf. therefore, to predict when and where the next leakage might occur, it is best to create a handful of scenarios with different u, p profiles in different pipeline systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/694.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/694.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d64c67378eca5513032c043a5b094f441432645 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/694.txt @@ -0,0 +1 @@ +dengue fever (df), an outbreak prone viral infection is transmitted by aedes mosquitoes, which is mostly found in tropical and sub-tropical climatic regions. the infection can result in dengue haemorrhagic fever (dhf), also known as severe dengue which can be fatal. df and dhf are caused by four serotypes of dengue virus (denv). till date there is no specific treatment for df and dhf. dengue prevention and effective management of control measures are necessary to reduce the catastrophic effects of dengue outbreaks.with dengue being identified as one of the four major global health threats by the world health organization (who), india records approximately 33 million dengue cases each year, sharing one-third of the world's total dengue cases . implementation of advanced early warning systems and disease control measures is the need of the hour to mitigate dengue fatalities. artificial intelligence-based solutions will go a long way in establishing active dengue surveillance needed to combat the disease outbreak .a review of the literature on epidemiological study of dengue spread reveals the strong association between climate parameters and dengue transmission . in addition to the meteorological data, socio-economic factors such as urbanization and population density also contribute for dengue dynamics . numerous other studies have considered including mosquito infection rate and vector surveillance data to improve the morbidity rate prediction further . novel approaches reported in the literature suggest the use of social media data such as twitter for prediction with high spatial resolution and remotely sensed data from satellites for modelling oviposition activity . these provide us the motivation to exploit every available data to advance the research to the next level. dengue dynamics has been modelled in the literature using various techniques ranging from classical methods such as ar, ma, arma, arima and sarima to machine learning models such as svm, naïve bayes, bagging and boosting algorithms and neural networks . classical methods are not without their limitations -they cannot learn non-linear relationships, not suitable for multivariate-forecasting and not robust to noise. in case of machine learning models, feature extraction is required to be done manually. hence, there is a need to develop better models to improve the prediction accuracy .the objective of this study is to use a deep learning model appropriate for processing time series data such as long short-term memory network, to evaluate the significance of climate parameters and vector larval indices on dengue outbreak.dengue fever (df), an outbreak prone viral infection is transmitted by aedes mosquitoes, which is mostly found in tropical and sub-tropical climatic regions. the infection can result in dengue haemorrhagic fever (dhf), also known as severe dengue which can be fatal. dengue prevention and effective management of control measures are necessary to reduce the catastrophic effects of dengue outbreaks.with dengue being identified as one of the four major global health threats by the world health organization (who), india records approximately 33 million dengue cases each year, sharing one-third of the world's total dengue cases.a review of the literature on epidemiological study of dengue spread reveals the strong association between climate parameters and dengue transmission. in addition to the meteorological data, socio-economic factors such as urbanization and population density also contribute for dengue dynamics. novel approaches reported in the literature suggest the use of social media data such as twitter for prediction with high spatial resolutionand remotely sensed data from satellites for modelling oviposition activity. dengue dynamics has been modelled in the literature using various techniques ranging from classical methods such as ar, ma, arma, arima and sarima to machine learning models such as svm, naïve bayes, bagging and boosting algorithms and neural networks.the objective of this study is to use a deep learning model appropriate for processing time series data such as long short-term memory network, to evaluate the significance of climate parameters and vector larval indices on dengue outbreak.data pertaining to dengue incidence, mosquito larval index and control measures are collected from tamilnadu public health department. the input is formatted in a 3d tensor with 3 timesteps (one time step is one point of observation in a time series sample), that includes climate parameters and dengue incidence of the preceding two months and climate parameters alone for the current month. climate parameters, mosquito larval index of the preceding two months and the current month and dengue incidence of the preceding two months are given as input to the network model to predict the dengue incidence for the current month. to better understand the influence of time lag of the predictors in dengue incidence, the lstm model is tested with four different time steps such as t = 2, 3, 4, 5 out of which the time step of 3 has the least mean square error for both the validation and test dataset (shown in table3).bidirectional stacked lstm, a recurrent neural network has proved to make dengue predictions with good accuracy, when trained with the available climate data and mosquito larval index with a lead time of 3 months. the model can be leveraged to strengthen dengue surveillance and early warning systems across the country for the timely implementation of effective disease control measures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/695.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/695.txt new file mode 100644 index 0000000000000000000000000000000000000000..4c3dafbc6a898410e68c79435883a4d36b04fc56 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/695.txt @@ -0,0 +1 @@ +the spread of fake news is a matter of concern due to its possible role in manipulating public opinion. we define fake news in line with the new york times as a "made up story with the intention to deceive, often with monetary gain as a motive" . the fake news problem is complex given its varied interpretations across demographics.we present a three level hierarchical attention network (3han) which creates an effective representation of a news article called news vector. a news vector can be used to classify an article by assigning a probability of being fake. unlike other neural models which are opaque in their internal reasoning and give results that are difficult to analyze, 3han provides an importance score for each word and sentence of an input article based on its relevance in arriving at the output probability of that article being fake. these importance scores can be visualized ⋆ these authors contributed equally to this work. through a heatmap, providing key words and sentences to be investigated by human fact-checkers.current work in detecting misinformation is divided between automated fact checking , reaction based analysis and style based analysis . we explore the nascent domain of using neural models to detect fake news. current stateof-the-art general purpose text classifiers like bag-of-words , bag-of-ngrams with svm , cnns, lstms and grus can be used to classify articles by simply concatenating the headline with the body. this concatenation though, fails to exploit the article structure.in 3han, we interpret the structure of an article as a three level hierarchy modelling article semantics on the principle of compositionality . words form sentences, sentences form the body and the headline with the body forms the article. we hypothesize forming an effective representation of an article using the hierarchy and the interactions between its parts. these interactions take the form of context of a word in its neighbouring words, coherence of a sentence with its neighbouring sentences and stance of a headline with respect to the body. words, sentences and headline are differentially informative dependent on their interactions in the formation of a news vector. we incorporate three layers of attention mechanisms to exploit this differential relevance.the design of 3han is inspired by the hierarchical attention network (han) . han is used to form a general document representation. we design 3han unique to the detection of fake news. when manually fact-checking an article the first thing that catches the eye is the headline. we observe a headline to be (i) a distinctive feature of an article , (ii) a concise summary of the article body and (iii) inherently containing useful information in the form of its stance with respect to the body. we refer to these observations as our headline premise. the third level in 3han is especially designed to use our headline premise.from our headline premise, we hypothesize that a neural model should accurately classify articles based on headlines alone. using this hypothesis, we use headlines to perform a supervised pre-training of the initial layers of 3han for a better initialization of 3han. the visualization of attention layers in 3han indicates important parts of an article instrumental in detecting an article as fake news. these important parts can be further investigated by human fact-checkers.we compare the performance of 3han with multiple state-of-the-art traditional and neural baselines. experiments on a large real world news data set demonstrate the superior performance of 3han over all baselines with 3han performing with an accuracy of 96.24%. our pre-trained 3han model is our best performing model with an accuracy of 96.77%. 1 we present a three level hierarchical attention network (3han) which creates an effective representation of a news article called news vector. unlike other neural models which are opaque in their internal reasoning and give results that are difficult to analyze, 3han provides an importance score for each word and sentence of an input article based on its relevance in arriving at the output probability of that article being fake. these interactions take the form of context of a word in its neighbouring words, coherence of a sentence with its neighbouring sentences and stance of a headline with respect to the body. a news vector is constructed using 3han. we denote word j of sentence i by w ij with sentence i containing t i words. each word w ij is converted to a word embedding x ij using gloveembedding w e (x ij = w e (w ij )). we use a bidirectional gruto form an annotation of each word which summarizes the context of the word with preceding and following words in the sentence. a sentence representation is formed using an attention layer to extract relevant words of a sentence. the similarity of each word u ij with a word level relevance vector u w decides the attention weights α ij normalized using a softmax function. similar to word attention, we identify relevant sentences in the formation of the body vector v b by using an attention layer. a sentence level relevance vector u s decides attention weights α i for sentence annotation h s i . to exploit our headline premise we design a third layer of encoding and attention with the headline being inputted word by word. the word embedding y i for word w 0i is obtained using glove embeddings (w e ) by y i = w e (w 0i ). word attention weights α w are normalized using sentence attention weights α s by α w = √ α s α w . the high accuracy of simple word count based models which do not take into account word ordering or semantics is an indication of vocabulary and patterns of word usage from the vocabulary being a distinguishing feature between fake news and true news. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/696.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/696.txt new file mode 100644 index 0000000000000000000000000000000000000000..8dc8cdd21879e1d61aa018afe9c2279e342d83cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/696.txt @@ -0,0 +1 @@ +novel class discovery (ncd) is a new and growing field, where we are given during training a labeled set of known classes and an unlabeled set of different classes that must be discovered. in recent years, many methods have been proposed in the context of computer vision .tabular data refers to data arranged in a table, where each row is an observation and each column is an attribute. it is one of the most common types of data in practical applications such as medical diagnosis, customer churn prediction, cybersecurity, and credit risk assessment. . an intuitive example of application of ncd in tabular data would be customer churn prediction: by using a dataset that includes the reasons why customers stopped using a product, we can more accurately identify other causes of churn in an unlabeled set where the reasons have not yet been identified.while in practice, tabular data is one of the most prevalent data types in the real world, to the best of our knowledge, only one paper has attempted to solve ncd specifically for tabular data . this is partly due to the heterogeneous nature of tabular data, and its lack of spatial and semantic structure, which makes it difficult to apply some computer vision techniques such as data augmentation arxiv:2306.12919v1 22 jun 2023 or self-supervised learning . furthermore, tabular data contains attributes that are specific to each domain. this means that analyzing and understanding the results of ncd or clustering algorithms can be challenging for a data scientist who is not necessarily familiar with the attributes of the dataset. on the other hand, the domain expert does not necessarily have the knowledge required to write code and run ncd or clustering algorithms.in an ideal scenario, the domain expert would be included in the training loop to interpret the results produced by the data scientist. but for practical reasons, it can be difficult to dedicate two people to this task, as having a data scientist run an algorithm, present the results to the expert, and update the parameters based on the expert's feedback can be a slow and tedious process.hence, the goal of the interface proposed here is to allow a domain expert to visualize his data and run ncd or clustering algorithms without having to write code, as in visual data mining . given a pre-processed dataset, a user can employ this interface to (i) get a first idea of the separability of the data with t-sne, (ii) select which features and classes to use, and which classes are considered unknown (iii) parameterize and execute ncd and clustering algorithms and (iv) train decision trees to generate rules and interpret the classes or clusters. based on theses results, an expert can remove features or classes that have too much influence on the results, re-train a clustering model and re-generate rules. this process can be very tedious through code, but it can be done in only a few clicks with this interface (which even a data scientist could benefit from).currently, this interface implements tabularncd , the state-of-the-art for ncd in the context of tabular data. other clustering methods are implemented: spectral clustering, k-means and a simple baseline method to solve ncd. this baseline trains a classification neural network on the labeled data, and then projects the unlabeled data in its last layer before clustering it with k-means.as expressed before, this interface cannot replace the domain expert. it only allows him to explore his dataset using machine learning tools without writing code. this interface is also upgradeable, as new ncd or clustering algorithms can be quickly implemented. the application is open source and can be installed locally using the code at https://github.com/colintr/interactiveclustering. the video of the demonstration is available at www.youtube.com/watch?v= w7ru8nhpj-8.novel class discovery (ncd)is a new and growing field, where we are given during training a labeled set of known classes and an unlabeled set of different classes that must be discovered.tabular data refers to data arranged in a table, where each row is an observation and each column is an attribute. an intuitive example of application of ncd in tabular data would be customer churn prediction: by using a dataset that includes the reasons why customers stopped using a product, we can more accurately identify other causes of churn in an unlabeled set where the reasons have not yet been identified.while in practice, tabular data is one of the most prevalent data types in the real world, to the best of our knowledge, only one paper has attempted to solve ncd specifically for tabular data. this is partly due to the heterogeneous nature of tabular data, and its lack of spatial and semantic structure, which makes it difficult to apply some computer vision techniques such as data augmentation arxiv:2306. this means that analyzing and understanding the results of ncd or clustering algorithms can be challenging for a data scientist who is not necessarily familiar with the attributes of the dataset. on the other hand, the domain expert does not necessarily have the knowledge required to write code and run ncd or clustering algorithms.in an ideal scenario, the domain expert would be included in the training loop to interpret the results produced by the data scientist. but for practical reasons, it can be difficult to dedicate two people to this task, as having a data scientist run an algorithm, present the results to the expert, and update the parameters based on the expert's feedback can be a slow and tedious process.hence, the goal of the interface proposed here is to allow a domain expert to visualize his data and run ncd or clustering algorithms without having to write code, as in visual data mining. given a pre-processed dataset, a user can employ this interface to (i) get a first idea of the separability of the data with t-sne, (ii) select which features and classes to use, and which classes are considered unknown (iii) parameterize and execute ncd and clustering algorithms and (iv) train decision trees to generate rules and interpret the classes or clusters. based on theses results, an expert can remove features or classes that have too much influence on the results, re-train a clustering model and re-generate rules.currently, this interface implements tabularncd, the state-of-the-art for ncd in the context of tabular data. this baseline trains a classification neural network on the labeled data, and then projects the unlabeled data in its last layer before clustering it with k-means. it adopts an architecture with two "heads": one to classify the known classes and introduce relevant high-level features in the latent space of the encoder, and another classifier for the unlabeled data trained with pseudolabels defined without supervision in the latent space. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/697.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/697.txt new file mode 100644 index 0000000000000000000000000000000000000000..aabc65967c5f6904e496ca6a89e14516a9368745 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/697.txt @@ -0,0 +1 @@ +in a node-edge-like graph structure, edges represent the connection between data samples denoted by nodes. graph data structures represent individual node samples in terms of their connected neighboring samples in bioinformatics, software vulnerability identification, and communication networks. a graph neural network (gnn) uses neighborhood connectivity and node feature vectors to learn a node representation for node classification . gnn assumes that node samples in a neighborhood are related at varying degrees, subsequently characterizing individual samples. this between-sample relationship is not considered for data samples generally assumed to be independent. for example, tabular data sets are collections of samples in rows with features positioned in columns. in tabular data, individual rows are assumed to be independent and identically distributed (i.i.d.) for machine learning purposes . it is important to note that each tabular data set is unique depending on the application domain. a bioinformatics data set can be structured in a tabular format concealing the relationships between samples. furthermore, attention learning can focus on how individual samples attend to other samples to find between sample relationship .we hypothesize that traditional machine or deep learning methods can disregard useful relationships between samples that can improve the representation of individual samples in certain domains. in other words, gnn and attention-based methods can be competitive for certain tabular data sets over traditional feature-based learning with the i.i.d assumption. this paper investigates whether gnn and attention models can learn hidden relationships between samples in tabular data to improve the baseline classification accuracy obtained using feature vectors alone. figure 1: a graph layer aggregating neighboring information via message passing. features of the node itself (self loop) and from neighbors (arrows point towards nodes) are aggregated to create embedding (z 1 , z 2 , z 3 , z 4 ) in a graph layer.the remainder of the manuscript is organized as follows. section ii provides a theoretical background on the graph and attention networks and a literature review on deep learning methods for tabular data. section iii describes methods for creating an adjacency matrix for learning tabular data on graph networks, the baseline graph and deep learning methods, experimental steps, and evaluation. section iv summarizes the results by comparing the performance of different graph and attention-based methods on tabular data sets. section v summarizes the findings, and the conclusions are provided in section vi. section iii describes methods for creating an adjacency matrix for learning tabular data on graph networks, the baseline graph and deep learning methods, experimental steps, and evaluation. however, tabular data sets often contain limited sample sizes and feature dimensionality, which may not take the full benefits of deep learning.because deep learning methods still fall short of traditional machine learning on tabular data, recent studies propose new methods to improve the deep representation of tabular data. tabnet is an interpretable model that uses attention to select important features from tabular dataand outperforms other baseline methods on four out of five data sets and three out of six synthetic data sets. several graph-based learning methods, including graph attention network (gat)and graph attention autoencoder (gate), have incorporated attention mechanisms to demonstrate superior performance over standard graph-based methods exclusively on graph data sets.d assumption, tabular data are rarely modeled as graph data to investigate the effectiveness of graph-based learning. few studies have proposed graph-based models for tabular data, including bipartite graphs (grape)and propagation to enhance tabular data prediction (pet), and multiplex graph (tabgnn).in the grape approach, tabular data (x m×n ) is modeled as a bipartite graph g = (v s , v f , e) where v s and v f denote sets of sample and feature nodes and e represents all undirected edges connecting sample and feature nodes. to the best of our knowledge, graph neural networks have not been considered in learning between sample relationships in tabular data with numerical variables or compared against traditional machine learning or attention-based learning methods. the section summarizes the baseline and evaluation methods for comparing several deep learning alternatives with superior machine learning methods on tabular data.we compare tabular data classification performances of nine baseline methods: three traditional machine learning models, three recently proposed attention-based deep methods for tabular data, and three graph-based neural networks. these baseline methods are compared against three graph neural network methods: graph convolution network (gcn), graph attention network (gat), and graph attention autoencoder (gate). second, graph neural networks, such as gat, gate, and gcn methods with cosine similarity metrics, have shown promising results, outperforming state-of-the-art deep tabular data learning methods on several data sets.the deep learning methods proposed for tabular data (ftt, npt, tabnet) are among the best methods for the data sets with the largest sample sizes (vehicle, satellite, and malware). this finding is important because it shows an alternative approach to improving deep learning of tabular data sets when traditional machine learning outperforms deep learning methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/698.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/698.txt new file mode 100644 index 0000000000000000000000000000000000000000..661bfa9654f04130d009d1446347557a92236459 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/698.txt @@ -0,0 +1 @@ +air pollution presents a significant health problem as far as urban metropolises are concerned. though air pollution monitoring and forecasting accurately and precisely are found to be tremendously critical, prevailing data-driven methods have so far entirely acquired the complicated interactions between spatial and temporal aspects of air pollution. moreover, uneasiness for the environment, health, and welfare has fascinated substantial global awareness owing to the new environmental confronting that menace the planet.a method called deep-air employing cnn and lstm framework was proposed by qi zhang et al. to fill the gap in ensuring fine-grained city-wide air pollution estimation by means of domain-specific features that, in turn, captured spatio-temporal features. also, a 1 to 1 convolution layer was designed with the purpose of improving the learning of temporal and spatial interaction. as a result, the forecasting accuracy for air pollution was improved.variational auto encoder (vae) based on the innovative integrated multiple direct attention deep learning architecture (imda) was proposed by abdelkader et al. , taking into consideration the conventional vae and attention mechanism to forecast distinct air pollutants in a computationally efficient and accurate manner. also, the temporal dependencies between nonlinear approximations concentrating on the relevant feature extraction were ensured.to this extent, this paper is proposed for a research study on applying discretized regression and least square support vector (dr-lssv) based air pollution monitoring and control for iot networks. in this study, we proposed a system for predicting air quality by advanced machine learning model -hybrid discretized regression and least square support vector air pollution forecasting. two baseline models were also built for comparison with our proposed method. though air pollution monitoring and forecasting accurately and precisely are found to be tremendously critical, prevailing data-driven methods have so far entirely acquired the complicated interactions between spatial and temporal aspects of air pollution. in this study, we proposed a system for predicting air quality by advanced machine learning model -hybrid discretized regression and least square support vector air pollution forecasting. a novel deep learning-based air quality forecasting by learning spatial-temporal correlation features and multivariate air quality-related interdependence time series data employing hybrid deep learning architecture was proposed by shengdong et al.to identify nonlinear relationships between input and output variables, wavelet neural networks were integrated with meteorological conditions for air pollution forecasting by qingchun et al. finally, the proposed dr-lssv method can classify air quality-related time series data by means of the air quality index and the selected features via the concordance correlative function for different weather conditions and different traffic states both on an hourly and daily basis. air quality data obtained as input is provided in the first hidden layer, where preprocessing is carried out with the aid of discretized hartley transformation function to remove the noise present in the raw air quality data. initially, the processed air quality data provided as input to the second hidden layer is split into two homogenized vectors, one for station-wise air quality data on an hourly basis and another for station-wise quality data on a daily basis. we also compared the performance of the proposed dr-lssv method withandin java language using an air quality dataset to measure the efficiency in terms of air pollution forecasting accuracy, air pollution forecasting time and false positive rate.this section provides the results analysis of three distinct parameters, air pollution forecasting accuracy, air pollution forecasting time and false positive rate.the first set of experiments is conducted to analyze the performance of the proposed method, dr-lssv, with respect to air pollution forecasting accuracy in forecasting air pollution. with air quality index being applied as the measure by the indian government to quantify air pollution, air pollution forecasting demands sophisticated monitoring tools and mechanisms along with advanced models to estimate time-related pollutant data. figure5shows the air pollution forecasting accuracy measured using the three methods dr-lssv, deep-airand vae-imdaconsidering the aqi measurements. to study the influence of air pollution forecasting time on distinct numbers of air quality data ranging between 2000 and 20000, experiments were performed with different thresholds (i. we note that as the number of air quality data is increased, the percentage of maximum semi-parametric likelihood estimator decreases, and the percentage of air pollution forecasting time decreases steadily., false air pollutants wrongly involved in the measurement of air quality index) and the total number of air quality data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/699.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/699.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e37aed14baefd8c775a48736d7fa7260456aa0a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/699.txt @@ -0,0 +1 @@ +deep learning (dl) has revolutionized the field of artificial intelligence by enabling remarkable advancements in various domains, including computer vision , natural language processing , causal inference , and reinforcement learning etc. standard deep neural networks (dnns) have proven to be powerful tools for learning complex representations from data. however, despite their success, standard dnns remain restrictive in certain conditions. for example, once a dnn is trained, its weights as well as architecture are fixed, e.g., , and any changes to weights or architecture need re-training the dnn. this lack of adaptability and dynamism restrict the flexibility of the dnns, making them less suitable for scenarios where dynamic adjustments or data-adaptivity are required . dnns, generally have large number of weights and they need large amounts of data to optimize those weights . this could be challenging in situations where large amounts of data are not available, e.g., in domains such as healthcare, the collection of sufficient data for rare diseases can be particularly challenging due to the limited number of patients available per year . finally, uncertainty quantification (uq) in the dnns' predictions is essential as it provides a measure of confidence, enabling better decision-making in high-stakes applications . the existing uq techniques have limitations, such as the need to train multiple models , and uq is still considered an open problem . similarly, domain adaptation, domain generalization, adversarial defence, neural style transfer, and neural architecture search etc., are important problems but far from being solved, where hypernets can provide effective solutions as discussed in section 4.hypernetworks (or hypernets in short) have emerged as a promising architectural paradigm to enhance the flexibility (through data-adaptivity and dynamic architectures) and performance of dnns. hypernets are a class of neural networks that generate the weights/parameters of another neural network called target/main/primary network, where both the networks are trained in an end-to-end differentiable manner . hypernets complement the existing dnns and provide a new framework to train dnns, resulting in a new class of dnns called as hyperdnns (please refer to section 2 for details). the key characteristics and advantages of hypernets that offer applications across different problem settings are discussed below.(a) soft weight sharing: hypernetworks can be trained to generate the weights of multiple dnns for solving related tasks . this is called soft weight sharing as unlike hard weight sharing that involves shared layers among tasks (e.g., in multitasking), here different dnns are generated by a common hypernet through task-conditioning. this is helpful for sharing information among tasks and can be used for transfer learning or dynamic information sharing . (b) dynamic architectures: hypernetworks can be used to generate the weights of a network with a dynamic architecture, where the number of layers or the structure of the network changes during training or inference. this can be particularly useful for tasks where the target network structure is not known at training time . (c) data-adaptive dnns: unlike the standard dnns whose weights are fixed at inference time, hyperdnns can be developed to generate target network customized to the needs of the data. in such cases, hypernets are conditioned on the input data to adapt to the data . (d) uncertainty quantification: hypernets can effectively train uncertainty aware dnns by leveraging techniques like sampling multiple inputs from the noise distribution or incorporating dropout within the hypernets themselves . by generating multiple sets of weights for the main network, hypernets create an ensemble of models, each with different parameter configurations. this ensemble-based approach aids in estimating uncertainty in the model predictions, a crucial aspect for safety-critical applications like healthcare, where having a measure of confidence in predictions is essential. (e) parameter efficiency: hyperdnns, i.e., dnns trained with hypernets can have fewer weights than the corresponding standard dnns, resulting in weight compression . this can be particularly useful when working with limited resources, limited data or when dealing with high-dimensional data and can result in faster training than the corresponding dnn .ha et. al coined the term hypernets (are also referred to as meta-networks or meta-models) and trained target network and hypernet in an end-to-end differentiable way. however, the concept of learnable context-dependent weights was discussed even earlier, such as fast weights in and hyperneat . our discussion on hypernets, focuses on neural networks generating weights for the target neural network, due to their popularity, expressiveness, and flexibility . recently, hypernets have gained significant attention and have produced state-of-the-art (sota) results across several deep learning problems, including ensemble learning , multitasking , neural architecture search , continual learning , weight pruning , bayesian neural networks , generative models , hyperparameter optimization , information sharing , adversarial defence , and reinforcement learning (rl) (please refer to section 4 for more details).despite the success of hypernets across different problem settings, to the best of our knowledge, there is no review of hypernets to guide the researchers about the developments and to help in utilizing hypernets. to fill this gap, we provide a brief review of hypernets in deep learning. we illustrate hypernets using an example and differentiate hyperdnns from dnns (section 2). to facilitate better understanding and organization, we propose a systematic categorization of hypernets based on five distinct design criteria, resulting in different classifications that consider factors such as (i) input characteristics, (ii) output characteristics, (iii) variability of inputs, (iv) variability of outputs, and (v) the architecture of hypernets (section 3). furthermore, we offer a comprehensive overview of the diverse applications of hypernets in deep learning, spanning various problem settings (section 4). by examining real-world applications, we aim to demonstrate the practical advantages and potential impact of hypernetworks. additionally, we discuss some scenarios and pose direct questions to understand if we can apply hypernet to a given problem (section 5). finally, we discuss the challenges and future directions of hypernet research (section 6). this includes addressing initialization, stability and complexity concerns, as well as exploring avenues for enhancing the theoretical understanding and uncertainty quantification of dnns etc. by providing a comprehensive review of hypernetworks, this paper aims to serve as a valuable resource for researchers and practitioners in the field. through this review, we hope to inspire further advancements in deep learning by leveraging the potential of hypernets to develop more flexible, and high-performing models.• we propose categorizing hypernets based on five design criteria, leading to different classifications of hypernets, such as based on inputs, outputs, variability of inputs and outputs, and architecture of hypernets. • we present a comprehensive overview of applications of hypernetworks across different problem settings, such as uncertainty quantification, continual learning, causal inference, transfer learning, and federated learning, and summarize our review, as per our categorization, in a table (table 2). • we explore broad scenarios for hypernet applications, drawing from existing use cases and hypernet characteristics. this exploration aims to equip researchers with actionable insights into when to leverage hypernets in their problem setting. • finally, we identify the challenges and future directions of hypernetwork research, including initialization, stability, scalability, and efficiency concerns, and the need for theoretical understanding and interpretability of hypernetworks. by highlighting these areas, we aim to inspire further advancements in hypernetworks and provide guidance for researchers interested in addressing these challenges.the rest of the paper is organized as: section 2 provides a comprehensive background on hypernets, while section 3 introduces a novel categorization scheme for hypernets. the diverse applications of hypernets across various problems are discussed in section 4, followed by an exploration of specific scenarios where hypernets can be effectively employed in section 5. addressing challenges and delineating future research directions is the focus of section 6, and finally, the concluding remarks are discussed in section 7. our discussion on hypernets, focuses on neural networks generating weights for the target neural network, due to their popularity, expressiveness, and flexibility.despite the success of hypernets across different problem settings, to the best of our knowledge, there is no review of hypernets to guide the researchers about the developments and to help in utilizing hypernets.• we propose categorizing hypernets based on five design criteria, leading to different classifications of hypernets, such as based on inputs, outputs, variability of inputs and outputs, and architecture of hypernets. • we present a comprehensive overview of applications of hypernetworks across different problem settings, such as uncertainty quantification, continual learning, causal inference, transfer learning, and federated learning, and summarize our review, as per our categorization, in a table(table 2). for example, task-conditioned hypernets are suitable for information sharing among multiple tasks, data-conditioned hypernets are suitable to deal with conditions where dnn need to adapt to input data, and noise-conditioned hypernets are suitable for uncertainty quantification in the predictions.this can be seen as a super categorization over input-based hypernets where task-conditioned hypernets fall in the static inputs category while random-noise and data-conditioned hypernets fall in the dynamic category.continual learning: continual learning, also known as lifelong learning or incremental learning, is a machine learning paradigm that focuses on the ability of a model to learn and adapt continuously over time, in a sequential manner, without forgetting previously learned knowledge.applied hypernets that take hyperparameters of the target network as input and generate optimal weights for the target network, and hence perform joint training for target network parameters and hyperparameters which are otherwise trained in nested optimization loops. they used data-conditioned hypernets where examples from the target domains are used as input to hypernet that generates weights for the target network. in the context of nlp, hypernets can be used to generate or adapt neural network architectures, tuning hyperparameters, for neural architecture search, and for transfer learning and domain adaptation etc. some applications of hypernets in computer vision are: ha et al.the above applications of hypernets are not exhaustive and some other interesting areas where hypernets have produced the sota results are knowledge graph learning, shape learning, network compression, learning differential equations, 3d point cloud processing, speech processing, quantum computing, and knowledge distillationetc. in other words, we can ask, 'are we working in a setting where the target network has to be customized to the input data? or data are changing regularly? in this scenario, we can employ data-conditioned hypernet that take data as input and adaptively generates parameters of the target network. hypernets have opened a new door to uncertainty quantification as noise-conditioned hypernets can generate distribution on target network weights and have been shown to have better uncertainties than the sota. we discussed some of the important applications of hypernets to different deep learning problems, including multitasking, continual learning, federated learning, causal inference, and computer vision etc. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/7.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/7.txt new file mode 100644 index 0000000000000000000000000000000000000000..d33cabb14b95947644eb7f3b3261b1b745546289 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/7.txt @@ -0,0 +1 @@ +a brain-computer interface (bci), which has been extensively studied in neuroscience, neural engineering and clinical rehabilitation, builds a communication pathway between the human brain and a computer . electroencephalogram (eeg), which records the brain's electrical activities from the scalp, has become the most widely used input signal in bcis due to its low cost and convenience . an eegbased bci system usually consists of four parts, namely signal acquisition, signal preprocessing, machine learning, and control action, as shown in fig. 1. the machine learning block includes feature extraction and classification/regression if traditional machine learning algorithms are adopted. machine learning has achieved great success in many applications - , including eeg-based bcis. despite their outstanding performance and robustness to random noise, machine learning models, especially deep learning models, are vulnerable to adversarial attacks, where carefully crafted human-imperceptible perturbations are added to benign samples to cause mis-recognitions - . the existence of adversarial examples raised wide attention and serious security concern about using machine learning models in safety/privacycritical applications, such as malware detection , speech recognition , autonomous driving , etc.for bcis, most studies so far focused on increasing the accuracy and efficiency of machine learning algorithms, but few considered their security. however, as first revealed by zhang and wu , adversarial examples generated by unsupervised fast gradient sign method (fgsm) can significantly degrade the performance of deep learning classifiers in eeg-based bcis . meng et al. further exposed the vulnerability of machine learning algorithms in regression tasks of eeg-based bcis. zhang et al. showed that adversarial examples can fool bci spellers to output any wrong character the attacker wants. liu et al. designed a total loss minimization approach to craft universal adversarial perturbations for eeg-based bcis, making adversarial attacks easier to implement. bian et al. used simple square wave signals to generate adversarial examples, which can mislead steady-state visual evoked potential based bcis. jiang et al. used active learning for efficient poisoning attacks to eeg-based bcis.the consequences of adversarial attacks to bcis could range from merely user frustration to severe injury, raising a critical safety concern and an urgent need for adversarial defense. for example, adversarial attacks can cause malfunctions in exoskeletons or wheelchairs controlled by eeg-based bcis , and may drive the user into danger on purpose. in bcibased driver drowsiness estimation , adversarial attacks may hijack the output of the bci system and increase the possibility of traffic accidents. more importantly, in military settings, adversarial attacks in bcis may result in false commands, such as friendly fire . therefore, it is critical to develop adversarial defense approaches for bcis.in the literature, multiple adversarial defense approaches have been proposed for other applications, e.g., computer vision and natural language processing, which can be divided into proactive defenses and reactive defenses . proactive defenses, such as adversarial training , , aim at obtaining a robust classifier. reactive defenses - attempt to identify adversarial examples to reject them or restore them to normal ones.though various approaches have been proposed to tackle adversarial attacks, challenges still exist , - , . a given detection approach performing well for one task may perform poorly for another. to our knowledge, adversarial detection in eeg-based bcis has not been studied yet. this paper implements and compares several state-of-theart adversarial detection approaches for eeg-based bcis. experiments on two eeg datasets and three convolutional neural network (cnn) models showed that by extracting features from the output of neural networks, we can effectively distinguish between white-box adversarial examples and normal ones.the remainder of this paper is organized as follows: section ii introduces several adversarial attacks used in our experiments. section iii describes our adversarial detection strategies. section iv details our performance evaluation settings. section v presents the detection results. section vi draws conclusions and points out some future research directions.designed a total loss minimization approach to craft universal adversarial perturbations for eeg-based bcis, making adversarial attacks easier to implement.the consequences of adversarial attacks to bcis could range from merely user frustration to severe injury, raising a critical safety concern and an urgent need for adversarial defense. according to how much the attacker knows about the target model, adversarial attacks can be categorized into whitebox attacks and black-box attacks.although bcis are vulnerable to adversarial attacks, adversarial detection in bcis has not been explored yet.adversarial detection based on kernel density and bayesian uncertainty (bu) has been shown as the most robust against optimization-based white-box attacks among ten different adversarial defense approaches, including adversarial retraining and input transformation. in our transferability-based black-box attack, the first substitute model was used to imitate the substitute model trained by the adversary, and the second to craft adversarial examples for training our adversarial detectors. before adversarial detection, we computed the bcas and rcas of neural network classifiers on various adversarial examples, and the ℓ 2 norms of adversarial perturbations imposed by various adversarial attacks. we compared the adversarial detection auc scores under two gradient-based adversarial attacks (fgsm and pgd) and one optimization-based attack (cw).all detectors can distinguish adversarial examples from benign ones to a certain extent, indicating that there are differences between adversarial examples and benign examples. it should be noted that the detection performance degradation of black-box attacks may not be attributed to the detector itself alone, but may also be because that the adversarial examples generated by black-box attacks were themselves less aggressive and thus their adversarial artifacts were less obvious.to evaluate how well the adversarial detection approaches perform under unknown attacks, we trained adversarial detectors on fgsm adversarial examples, and then evaluated them on pgd and cw. the performance differences of various detectors on pgd and cwiv, the auc scores of many adversarial detectors were significantly lower (below 50%), indicating that the distribution of adversarial examples generated in white-box and black-box attacks were quite different, even though both were based on fgsm.our detection results under white-box attacks indicated that, when proactive adversarial defense approaches are adopted, there is no need to require the model to fully defend against white-box attacks with strong attack strength. this suggests that adversarial examples generated by the same type of adversarial attacks have high similarity, whereas those by different types of adversarial attacks are quite different. the properties of adversarial examples generated by different adversarial attacks are usually different. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/70.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/70.txt new file mode 100644 index 0000000000000000000000000000000000000000..efe514d84ca8567797902666305dc9d319e2ea1c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/70.txt @@ -0,0 +1 @@ +simulated data is useful in machine learning (ml) work for developing and testing algorithms in a repeatable and verifiable way. we note that although historically there has been some debate about the utility of simulated data in machine learning applications given the challenges of sim2real (höfer et al. 2020), irrespective of whether simulated data is valuable for training production models, they are extremely valuable in the initial stages of model design, unit testing, and verification processes. because it is possible to have absolute certainty regarding ground truth labels as well as microscopic control over their data generation conditions, it is possible to remove the initial complexity and variability of noisy, real-world data, enabling a clean development environment to verify the functioning and logic of machine learning pipelines and models. simulations are particularly valuable when the system under study is characterized by large class imbalances, rare events, and significant label noise. all of these challenging modelling problems frequently appear in fintech machine learning.this article is organized as follows. we briefly describe possible approaches to synthetic dataset generation. we then describe the kinetic monte carlo (kmc) algorithm in a general way. next, we explain attributes that define agents in fintech-kmc, followed by a discussion related to the treatment of the rate constants which govern their behaviour.we provide examples of our simulation output, as well as some simple supervised machine learning tests which make use of this output. we discuss the limitations of our method, and finally, conclude. our major contributions are:• a new agent-based simulation tool, driven by kinetic monte carlo is introduced • a detailed description of the tool is provided, including its design, modelling capabilities, and output format • demonstration of a predictive model trained to detect "bad actors" is given as a prototypical example of an ml model that uses our synthetic data different types of synthetic data generation protocols exist. they include simple heuristics (e.g. if an input feature takes some value x, set the output to label y ), simulations which can be deterministic (papageorgiou and paskov 1999) or stochastic (wang 2012), and generative models (goodfellow et al. 2014;albergo, kanwar, and shanahan 2019) which have been trained from sample data streams from the true system. we believe that generative models, while they may be able to best mimic a real-world dataset, introduce an entirely new set of issues since they themselves have dependencies on high-quality data pipelines, training infrastructure, monitoring, etc. a stochastic simulation is an excellent balance between being realistic enough to find implementation bugs in machine learning workflows yet simple enough not to introduce them.kinetic monte carlo (kmc) kmc has a long history of use in the physical sciences for describing the microscopic time evolution of a wide range of systems, such as molecules on catalytic surfaces (stamatakis and vlachos 2012), self-assembly and growth (ghosh et al. 2021), and radiation damage (voter 2007). the method has also been successfully applied to the social sciences, where it has been used to simulate the structure and information flow through large online social graphs such as twitter (ryczko et al. 2017).unlike some other monte carlo based simulation techniques (most notably markov chain monte carlo, mcmc), kmc is a rejection-free algorithm. an event occurs at every simulation step. additionally, we note that the simulation step size, ∆t, is not fixed. in a hypothetical system with only a single type of event, occurring at one characteristic rate, r, each step in simulation time will be approxi- cummulative array = cumsum(rates/sum(rates))5:u 1 := rand() {get random number u 1 ∈ (0, 1]} 6:event := binary search(cummulative array, u 1 ) 7:carry out event(event)8:u 2 := rand() {get random number u 2 ∈ (0, 1], update simulation time} 9:simulation time := simulation time -ln(u 2 )/r 10:write out event information to logfile 11: end while mately ∆t = r -1 apart. in a simulation with many possible events (and rates), the simulation will progress forward in time according to the total effective rate of the system (e.g.in practice, this means that as the total number of possible events grows, the time between each successive one decreases. this is exactly what happens in a real-world system. consider an early-stage financial platform with exactly 1 customer. if that customer deposits money once per week, then after 1 month the transaction log will contain approximately 4 entries, spaced roughly 1 week apart. if the number of customers increases by x100 (and the timing of their deposities is randomly distributed) then after the same period of one month, we would expect 400 entries, with the average time between updates decreasing by x100. kmc simulations mimic this behaviour, and allow for irregularly spaced events and changing values of ∆t as the simulation progresses.we note that in the above example, we assumed that the rates of client deposits were uncorrelated with each other and occurred at random times. in general this of course is not guaranteed to be true, particularly with payments which are associated with customer paycheques, rent, loan repayments, etc, which tend to occur around specific times of the month or the year. see sec. dynamic rates for further discussion on how we account for this.. disclaimer: in the following discussion, for pedagogical purposes, we give examples of actions or rules which might take place on a fintech platform. these are purely hypothetical, and are not intended to describe a recommended set of policies of a real fintech platform. such platforms are subject to a large number of policies and regulations (particularly with respect to id requirements, customer age limits, etc). while fintech-kmc could be adapted to incorporate those specific requirements as needed, for this discussion we do not handle this complexity or region-specific limitations. we note that although historically there has been some debate about the utility of simulated data in machine learning applications given the challenges of sim2real(höfer et al. 2020), irrespective of whether simulated data is valuable for training production models, they are extremely valuable in the initial stages of model design, unit testing, and verification processes.• a new agent-based simulation tool, driven by kinetic monte carlo is introduced • a detailed description of the tool is provided, including its design, modelling capabilities, and output format • demonstration of a predictive model trained to detect "bad actors" is given as a prototypical example of an ml model that uses our synthetic data different types of synthetic data generation protocols exist. in a hypothetical system with only a single type of event, occurring at one characteristic rate, r, each step in simulation time will be approxi- cummulative array = cumsum(rates/sum(rates)). in a simulation with many possible events (and rates), the simulation will progress forward in time according to the total effective rate of the system (e. for example, an agent may purchase bitcoin (btc) at a frequency of ≈ 1/week, provided their account balance is above a threshold and they have passed the id verification process. allowing for dynamic and flexible modifications to the rate constants which determine agent behaviour allows for a realistic approach to modelling human behaviour.available actions agents interact with one another and evolve through time via the actions they take within the simulation.• id verification: successful verification of an agent "unlocks" other actions (specifically btc buy) and changes the maximum allowed cash in and cash out amounts.• btc buy: after an agent has successfully verfied their id via the id verification action, they can purchase btc.importantly, to represent the diversity of customer behaviour observed on fintech platforms by real customers, the individual rates each agent operates with are not identical, but rather are sampled randomly from a statistical distribution.for example, an agent archetype of a cypto enthusiast will be assigned a rate for btc purchases from the normal distribution n (µ, σ), where µ = 2 and σ = 1 (in units of days -1 ), whereas a crypto skeptic would have that rate set to zero. defining the agent archetype determines the distribution of rates and the typical range of money movement values an agent will use during the simulation. for this particular simulation, bad actors were configured to have a lower rate of id verification success (50% compared to 75% for regular customers) and slightly different peer-topeer money transfer behaviours (an average transfer amount of 5±3 vs 8±3 and a willingness to do such transfers only when their balance was above 15±3 vs 30±3).our simulation covered a time period of ≈ 8 days, which, given the various rate constants we used, represents approximately 100 actions for each agent. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/700.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/700.txt new file mode 100644 index 0000000000000000000000000000000000000000..8fb19445c3c80cebf2bc2735869b2e0c565306a5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/700.txt @@ -0,0 +1 @@ +the multi-armed bandits framework is a classic paradigm for sequential prediction and decision-making, encompassing applications in various domains including medical trials, web search, recommendation systems, and online advertising. the goal of the learner is to maximize the expected cumulative reward by selecting arms (actions, options, decisions) over a sequence of trials. in bandits problem, there are t rounds and k arms, each of which is associated with an unknown reward distribution. in each round, the algorithm selects an arm from the available candidates and subsequently observes a reward that is independently sampled from the corresponding distribution. the algorithm aims to minimize the regret, which quantifies the additional cumulative loss incurred by the algorithm compared to always playing the optimal arm.several well-known algorithms, such as ucb1 and thompson sampling, have been proven to achieve near-optimal performance while maintaining a comprehensive record of all arms in memory. however, in scenarios involving limited memory or a large action space, the storage of all arms becomes problematic for the learner. an exemplary case arises in recommendation systems, where the learner must choose from millions of items, such as music and movies, to present to users with the objective of maximizing the click-through rate. accomplishing this task using only a sublinear number of items stored in memory poses a considerable challenge. motivated by practical applications, substantial efforts have been dedicated to the development of algorithms for the streaming bandits setting with constrained m (m < k) arm memory . in this streaming version, arms arrive one at a time in a sequential stream, and the algorithm can only select arms that are stored in memory. once the memory is full and the algorithm intends to choose a new arm, it must remove at least one existing arm from memory before selecting the new one. consequently, all associated statistics, including the arm's index, mean reward, and number of pulls, are discarded. the streaming setting necessitates the use of more sophisticated strategies to effectively balance the reading of new arms from the stream and retrieving arms from memory, thereby optimizing the learner's reward.table 1: a summary of our results for streaming bandits with time horizon t , number of arms k, number of passes b, bounded arm memory m < k, and α = 2 b /(2 b+1 -1). previous lower bounds hold with m = o(k/b 2 ) while ours hold with m = o(k/b). recently,explore the dependence on the number of passes b, and establish the ω(4 -b t 2 b /(2 b+1 -1) ) worst-case lower bound and õ(t 2 b /(2 b+1 -1) √ kb) regret upper bound for algorithms with b passes and o(k/b 2 ) arm memory.for the worst-regret lower bound of the streaming bandits, we establish that any algorithm using 1 ≤ b ≤ log log t passes over the stream and o(k/b) arm memory incurs regret ω (t b) α k 1-α , where α = 2 b /(2 b+1 -1). we present some results with special b: any 1-pass algorithm with o(k) bounded memory incurs ω(k 1/3 t 2/3 ) regret; any log log t -pass algorithm with o( k log log t ) arm memory incurs ω( √ kt log log t ) regret. theoretical analysis of the lower bound implies that, during the p-th pass, the number of pulls for any arm should be capped by θ((t b) 2βp k -2βp ), β p = 2 b-p (2 p -1) 2 b+1 -1 . we show that given a time horizon t , a stream consisting of k arms, and b passes over this stream, there exists an algorithm capable of achieving regret regret(a) = o (t b) α k 1-α √ log t , while only storing two arms in memory. we then establish the ω(kǫ -2 ) sample complexity lower bounds by constructing a hard distribution, both for the ǫoptimal arm detection setting and the ǫ-optimal arm identification setting, with the single-pass data stream and bounded arm memory.note that the incurred regret during exploration for any single-pass ǫ-optimal arm identification algorithm is at least ω( x∈k ∆ x ǫ -2 ), while the incurred regret is ω(ǫ(t -kǫ -2 )) during exploitation with the ǫ-optimal arms. if ǫ 1 = θ(ǫ) and ǫ 2 = θ(ǫ), then we have |k 1 ∪ k 2 | = θ(k) and the algorithm has to use ω(ǫ -2 ) pulls for each of θ(k) number of arms identify at least one ǫ-optimal arm with constant probability. with the knowledge of one ǫ ′ -optimal arm, any algorithm still has to use ω((ǫ ′ ) -2 ) samples to identify ǫ ′ -optimal arms and use ω((ǫ) -2 ) samples to identify ǫ-optimal arms, then the exploration regret lower bound for the single-pass algorithms could be reduced to ω( x∈k (0,ǫ ′ ) ∆ x ǫ -2 + x∈k (ǫ ′ ,1) ∆ x (ǫ ′ ) -2 ). for the streaming setting,consider stochastic bandits with constant arm memory and propose an algorithm that achieves a o(log 1/∆) factor of optimal instance-dependent regret with o(log t ) passes, where ∆ is the gap between the optimal arm and second-optimal arm. in subsequent work,consider the dependence on the number of passes b, and show the ω(4 -b t 2 b /(2 b+1 -1) ) worst-case lower bound, and provide an algorithm achieving regret θ(t 2 b /(2 b+1 -1) √ kb). we show that any algorithm suffers ω (t b) α k 1-α , α = 2 b /(2 b+1 -1) worst-case regret in expectation, with a time horizon t , number of arms k, number of passes b, and bounded arm memory m = o(k/b). if ǫ 1 = θ(ǫ) and ǫ 2 = θ(ǫ), then we have |k 1 ∪ k 2 | = θ(k) and the algorithm has to use ω(ǫ -2 ) pulls for each of θ(k) number of arms identify at least one ǫ-optimal arm with constant probability.for all arms x ∈ k \ (k 1 ∪ k 2 ), with the expected reward of arm x ′ , any algorithm still has to use ω((ǫ ′ ) -2 ) samples to identify the sub-optimality, which leads to the regret ω( x∈k (ǫ ′ ,1) ∆ x (ǫ ′ ) -2 ).the key idea of the algorithm revolves around ensuring that the number of pulls for any given arm remains capped by θ((t b) 2βp k -2βp ), β p = 2 b-p (2 p -1) algorithm 2 multi-pass successive elimination input: number of passes b, time horizon t. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/701.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/701.txt new file mode 100644 index 0000000000000000000000000000000000000000..44e8c8cfe0b17f8fe1de59a6164bc274d2116b39 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/701.txt @@ -0,0 +1 @@ +cyber-physical systems (cps) are complex systems that integrate physical and computational components, such as autonomous vehicles, power grids, and industrial control systems . these systems must operate reliably and accurately in dynamic and unpredictable environments, where the consequences of failure can be severe. deep learning (dl) has shown great potential for improving the performance and robustness of cps by enabling systems to learn from data and adapt to changing conditions . however, the deployment of dl algorithms in cps is not without challenges, as they must operate efficiently in real-time, handle large amounts of data, and provide accurate predictions despite noisy or adversarial input and varying environmental conditions.one of the critical challenges in deploying dl algorithms in cps is the ability to generalize to new and unseen scenarios . in real-world applications, the distribution of the data may change slowly over time or suddenly if unexpected events occur, yet the the models must be able to adapt to these changes and maintain their accuracy and reliability. furthermore, in contrast to other application areas of dl like computer vision or natural language processing, dl models in cps generally have to learn from a domainspecific dataset which only contains sensor data from one system . once this system changes, e.g. because a sensor is added, usually the whole dl model has to be retrained from scratch. what is needed is a model that generalizes from the initial dataset, i.e. it can transfer learned knowledge about the physical systems to an altered system or a different application.in this paper, we focus on evaluating the systematic generalization performance of several dl algorithms on a simulation of a three-tank system which follows a sequence of process phases, changing the in-and outflow of the tanks in a predictable manner. this system presents several challenges for dl algorithms, including nonlinear dynamics, adhoc process phase changes, long-term trends and noisy sensor measurements. we test the algorithms on new scenarios that they have not seen during training to assess their ability to capture system dynamics, learn from limited data and handle out-of-distribution (ood) samples.the research questions (rq) to be discussed in this paper can be summarized as follows:1. what deep learning model architectures demonstrate robustness to perturbations in cps, specifically noise, variation on timings and faulty sensors, and how much do these scenarios impact the models' performance?2. how effectively can these dl model architectures generalize to unfamiliar system behaviors, which may include systems operating at different scales or presenting new combinations of process phases, all while ensuring the fundamental system dynamics are maintained?3. which dl model architectures exhibit the highest capa- we hope that this study can help to uncover the key design principles and modeling approaches that can improve the robustness and reliability of dl models in cps, especially when dealing with ood samples. furthermore, by making our code and the simulation environment available openly, we provide a benchmark for researchers of robust dl models on cps. 1 the structure of this paper is as follows: section 2 surveys relevant work, underscoring the research gaps this study aims to bridge. section 3 describes our simulation environment, the selected models, and our comprehensive experimental setup. section 4 presents the outcomes of our experiments alongside a detailed analysis. in section 5, we summarize our findings, draw conclusions, and propose potential avenues for future research. what deep learning model architectures demonstrate robustness to perturbations in cps, specifically noise, variation on timings and faulty sensors, and how much do these scenarios impact the models' performance?.experiment 1: robustness to perturbations to address rq 1, which pertains to the robustness of the models in the face of perturbations, we evaluate the trained models on various simulations where the time series are influenced by specific perturbations.to assess the generalization performance of various dl models on ood samples and thereby answer rq 2, we investigate their forecasting performance in different scenarios where the behavior or the settings of the underlying three-tank system change. we do this by reusing the model weights and retraining the models on a new objective: predict the next 50 time steps of the water tanks of the new ood scenario. this underlines the model's deficiency in learning fundamental properties of the underlying physical system, as such sudden fluctuations of water levels are implausible in the standard three-tank scenario the model was trained on.in the scenarios involving heightened noise and variable process phase durations, the behavior of the models is similar to scenario 1, albeit with less pronounced differences. the scenarios include: 'scaled inflow and outflow' (scenario 4), 'independent process phase merging' (scenario 5), 'independent process phase merging with an additional stable phase' (scenario 6), and 'dependent process phase merging' (scenario 7). hypothetically, the 'stable' process phase where the water levels in the tanks remain constant presents a new challenge for the models, as it is not part of the 'standard' scenario the models have been trained on.in a comprehensive review of all scenarios, it is evident that the majority of models fail to predict the correct ood behavior initially, but demonstrate a robust ability to adapt to the new system dynamics with fine-tuning, which answers rq 2 and 3. while further fine-tuning could potentially enhance the model's performance, it does not appear to leverage the system knowledge to the same degree as the other models. as anticipated, both data augmentation techniques marginally reduce the performance on the 'standard' scenario on which the models were initially trained. given that the principal motivation for employing data augmentation is to enhance model robustness, we examined performance in a scenario involving a perturbation (scenario 1) and another reflecting ood system behavior (scenario 5).the introduction of noise to the time series appears ben- eficial when managing perturbations such as faulty sensors, as most models exhibit improved performance on scenario 1. given that the performance on scenarios with ood behavior and the 'standard' scenarios is compromised for most models, the indiscriminate application of this data augmentation technique for all model architectures is not recommended. firstly, practitioners must approach the application of dl models in cps with caution, as performance disparities only became apparent when subjecting the models to specific test scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/702.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/702.txt new file mode 100644 index 0000000000000000000000000000000000000000..37523fea1d916d48a194759a3f9d48d7bf8627ec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/702.txt @@ -0,0 +1 @@ +since data creates a steady stream of wealth, the economic value of data attracts great attention from both industry and academia. data-driven applications, and more specifically machine learning (ml), promote data valuation to become an increasingly significant discipline in data science. in the context of ml, data valuation aims to equitably measure the contribution of each data point to the utility (i.e., performance) of an ml model. to approach the goal, many data valuation methods are developed, including leave-one-out score , shapley value , reinforcement learning-based value , etc. among these, shapley value has become the most prevalent method by virtue of its unique four properties for equitable payoff allocation: balance, symmetry, zero element, and additivity . recent research indicates that shapley value and its variations are effective in identifying both beneficial and detrimental data for an ml model with the demonstration of various tasks such as data selection and label noise detection.motivation. shapley-based data valuation methods depend on a utility function that assesses the value of a coalition of data points by evaluating the performance of the ml model trained on the coalition. previous work has commonly defined the utility function as the prediction accuracy on a validation set v. however, this approach may not effectively distinguish between beneficial and detrimental training data points for probabilistic classifiers. consider the following scenario: given two probabilistic classifiers 1 and 2 for binary classification where the classification * equal contribution. † corresponding author.threshold is 50% and a validation set v containing two data points labeled as either 0 or 1. we observe that 1 provides predictive confidence scores (i.e., predicted class probabilities) of {90%, 30%} for the correct class labels, while 2 provides scores of {60%, 30%}. although 1 demonstrates greater predictive confidence, a significant indicator of the model's trustworthiness and reliability, in predicting the correct class label than 2, both classifiers have an identical prediction accuracy of 50% with no discernible difference. it is therefore tempting to ask: how to effectively differentiate the utility of various probabilistic classifiers? moreover, although prediction accuracy provides a homogeneous smallest unit of improvement (i.e., 1/|v|), the change in confidence resulting from the addition of new data points is generally heterogeneous. for instance, an increase in predictive confidence score from 60% to 70% is distinct from an increase from 90% to 100%. in most cases, the latter is regarded as more valuable and challenging to achieve. it is therefore raising another question: how to accurately quantify the marginal contribution of various data? contribution. in this paper, we propose probabilistic shapley (p-shapley) value by constructing a probability-wise utility function that effectively differentiates and quantifies the contribution of each data point to the probabilistic classifiers.for the first question, we leverage the predicted class probabilities rather than binarized prediction results as inputs for the utility function. using the predicted class probabilities (i.e., predictive confidence scores) of probabilistic classifiers can effectively utilize the model's confidence in its predictions. for the second question, we propose a novel solution by combining the utility function based on the predicted class probabilities with confidence calibration. specifically, we incorporate different activation functions which tune the marginal improvements in predicted class probabilities to reflect their varying contribution to the ml models. we briefly summarize our contributions as follows.• we identify the problem of shapley value on probabilistic classifiers and propose probabilistic shapley (p-shapley) value by constructing a probability-wise utility function. • to effectively quantify the marginal contribution of each data point to probabilistic classifiers, we offer several activation functions for confidence calibration.• extensive experiments on four real-world datasets demonstrate the effectiveness of our proposed p-shapley value in evaluating the importance of data for building a high-usability and trustworthy ml model. in the context of ml, data valuation aims to equitably measure the contribution of each data point to the utility (i. recent researchindicates that shapley value and its variations are effective in identifying both beneficial and detrimental data for an ml model with the demonstration of various tasks such as data selection and label noise detection. shapley-based data valuation methods depend on a utility function that assesses the value of a coalition of data points by evaluating the performance of the ml model trained on the coalition. in this paper, we propose probabilistic shapley (p-shapley) value by constructing a probability-wise utility function that effectively differentiates and quantifies the contribution of each data point to the probabilistic classifiers.• we identify the problem of shapley value on probabilistic classifiers and propose probabilistic shapley (p-shapley) value by constructing a probability-wise utility function. • to effectively quantify the marginal contribution of each data point to probabilistic classifiers, we offer several activation functions for confidence calibration. given a binary classification task where data points are labeled as either 0 or 1, for any data point = ( , ) ( ∈ {0, 1}), we need to quantify the contribution of data point to the probabilistic classifier for the binary classification task.where i(•) is the indicator function that returns 1 for true condition and 0 otherwise, is the ground-truth label of data point from the validation set, ˆ is the label of data point predicted by the probabilistic classifier trained on s, and is the predictive confidence score that data point belongs to class 1.we essentially perform a transformation on the probabilistic classifier's utility from the prediction accuracy to the average of predictive confidence scores for correctly predicted data points in the validation set. therefore, we incorporate activation functions into the predictive confidence score to better capture the non-linear relationship between predictive confidence score and utility. for each permutation, we scan the data points progressively and evaluate the utility of the coalition consisting of the scanned data points (lines 6-10). training data points with higher valuation should contribute more to the model performance, so we measure the performance of each data valuation method with the performance drop following the removal of high-value data points. given a training set n in descending order by data value and removing data points progressively starting with the highest value data point, wad is calculated by aggregating the prediction accuracy decrease in each round, with weight inversely proportional to the number of rounds. this metric offers a probability-wise approach to evaluating model performance that considers both the effect of data point removal on model performance and its predictive confidence scores. one possible reason is that the swish activation's soft clipping nature helps produce a utility function that varies smoothly with the changes in predictive confidence score, resulting in well-calibrated data valuation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/703.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/703.txt new file mode 100644 index 0000000000000000000000000000000000000000..9dd81d4d90e1a1129fb6ec55d07e84e78a578ebb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/703.txt @@ -0,0 +1 @@ +multi-agent reinforcement learning (marl) is wildly utilized in multi-agent systems (mas) such as smart transportation and unmanned aerial vehicles , as a result of its superior performance in team decision-making problems. as the number of agents increases and the joint-action space of mas grows exponentially, marl faces the issues of high combinatorial complexity and poor scalability. centralized training and decentralized execution (ctde) is currently the more popular framework to address these problems, in which, all agents share the global information in the training process, while in the execution phase, each agent makes independent decisions based on its own perceptions and policy. value decomposition networks (vdn) and monotonic value function factorization (qmix) are classical ctde-based marl. they introduce a network in the training process to guarantee the individual-global-max (igm) .however, it is shown that the agents trained by these classical marl are sensitive to state perturbation , , , . in reality, the states of agents are often perturbed due to the presence of sensor noise and malicious attacks. furthermore, state perturbations to some of the agents can not only mislead the decision of victims but also have an impact on the cooperative policy of the team. robustness testing for a model trained by marl is an essential step for confirming the trustworthiness of mas against unexpected perturbations. the perturbation types are so diverse that it is impossible to cover all possible cases during testing. consequently, it is vital to generate perturbation states via adversarial attacks that have the most significant impact on team collaboration. intuitively, the more stealthy and disruptive the attack is, the more potential it is for robustness testing.there has been a lot of robustness testing technique on single-agent reinforcement learning (sarl), such as using the adversarial attack based on the gradient of the neural network , or constructing an adversary as an rl agent to generate the adversarial observation. however, there have been a few related research to test the robustness of marl against state perturbation of agents. zhou et al. demonstrate that the adversary for marl can be formulated as the stochastic game (sg) and there exists the joint optimal adversarial state. but the influence of individual policy on teams is not considered during the attack in , . they only generate adversarial observation misleading the victim to take actions that are not within expectations, which may not lead to the failure of team tasks. the methods in , , consider the effect of individual actions on teams by constructing the adversary as sarl or marl agent. however, the adversary is trained at the assumption that the victim is determined. when the victim changes, it needs to be retrained.compared with single-agent situations, multi-agent situations face the following challenges:1) the victims are uncertain which makes the robustness testing process can not formulate as sg. the testing process becomes more difficult due to this uncertainty. 2) an agent doing a sub-optimal action does not necessarily lead to team failure.3) the centralized training process is usually unknown and not be employed during testing. besides, centralized training is based on the assumption that all agents make the optimal decision. as a result, it may not evaluate accurately the team reward when the agent takes the suboptimal action. how to estimate the team reward for the adversary is important. aims to select critical agents as victims and determine the worst joint actions they should take to decrease the accumulated reward of the team. the second step is a targeted attack to compute adversarial observation according to the outcomes of the first step. 2) we introduce a sarsa-based approach for learning the joint action-value function marl to evaluate the team cooperation policy. the function has a good representation of the relationship between individual actions and team accumulated rewards and is used as the objective function for the optimation of de.3) the results demonstrate that rtca achieves superior performance when attacking a smaller number of agents. rtca is more suitable for robustness testing of models trained by mral due to its stealthiness and effectiveness. centralized training and decentralized execution (ctde) is currently the more popular framework to address these problems, in which, all agents share the global information in the training process, while in the execution phase, each agent makes independent decisions based on its own perceptions and policy.there has been a lot of robustness testing technique on single-agent reinforcement learning (sarl), such as using the adversarial attack based on the gradient of the neural network,or constructing an adversary as an rl agentto generate the adversarial observation. aims to select critical agents as victims and determine the worst joint actions they should take to decrease the accumulated reward of the team.is the state set of the environment, the state s ∈ s, a i ∈ a i is the action of agent i, a i is the action space of the agent i, o i ∈ o i drawn according to observation function z (s, a) :.where b i is the set of adversarial states of agent i, m ⊆ n is the set of victim agents and the number of victims is m ∆ = |m|.the joint adversarial perturbation and π •v denotes the joint policy under the joint adversarial perturbation.the goal of the joint optimal adversarial perturbation is to minimize the expected cumulative discount reward of every victim agent., solving of sa-dec-pomdps named rtca as shown in figure1that can choose critical agents from the set of n as the set of victims m and provide guidance on the worst joint action of victims, followed by generating adversarial observations on them.after obtaining the index of the critical agent and the worst joint action, we aim to generate adversarial perturbation using these to induce the victim to take the worst actions. in our setting, this type of method can be used to generate adversarial perturbations based on the policy of the victim agents, misleading them to output the targeted action.where z i is the action of victim agent i based on the clean observation, θ i is the parameter of the policy network of victim agent i, âi is the target action of victim agent i and ôi is the perturbation observation of agent i. the adversary is trained in two steps including the director giving advice for the worst joint action and the actor generating the adversarial observation based on this action. based on a survey, adversarial attacks on sarl can be classified into four distinct categories including perturbations to the state space, the reward function, the action space, and the model space.propose the method of generating adversarial states for marl, they use a two-step attack similar towhich reduces the team reward by perturbing the state of only a fixed agent. in these methods, the set of victim agents is fixed, while in rtca, the set of victim agents is variable. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/704.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/704.txt new file mode 100644 index 0000000000000000000000000000000000000000..94a19289d16e3e499b68e4c395ac5218c046c1fa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/704.txt @@ -0,0 +1 @@ +privacy has become a major concern in the collection and usage of data. in a privacy-first world, the access to raw data is very restricted and limited to infrastructure-related purposes. for the purpose of training machine learning models or computing analytics, data is accessed through privacy-preserving channels such as differentially-private query engines. in such an environment, developing new algorithms or debugging code can be challenging, as each query on the data is billed to the privacy budget, and it is impossible to "eye-ball" data to figure out potential issues and problems. to circumvent this problem, synthetic data generation offers to generate a synthetic dataset that follows the same distribution as the underlying data while preserving the privacy of individuals.a range of methods for synthetic data generation can be framed within the select-measure-update paradigm (mckenna et al., 2021;liu et al., 2021). such approaches iteratively select a (linear statistical) query to answer, such as computing a particular k-way marginal distribution, measure a noisy differentially-private evaluation of the selected query, and update a probabilistic generative model using the noisy measurements. one spends privacy budget to select and measure queries with substantial differences between model and data and repeatedly align the model with the noisy queries.different methods within this paradigm implement these 1 meta ai. correspondence to: alexandre sablayrolles .steps in different ways. a recent representative with good performance is aim (mckenna et al., 2022) that models tables with discrete columns. aim is an advanced variant of approaches that placed first and second in the 2018 and 2020 nist differential privacy synthetic data challenges respectively. aim utilizes a clever selection procedure to identify improvable k-way marginals, measures these noisy marginals, and updates a probabilistic graphical model trained to match these marginals. marginals are convenient for privacy calculations, as each row in a tabular dataset appears in one cell of a marginal (so the sensitivity is typically 1), and any computation that uses at most k columns can be answered from k-way marginals. marginals are hence popular in the select-measure-update paradigm (see for example, tao et al. (2021)).generally, k-way marginals with a low value of k are selected due to privacy-accuracy tradeoffs and complexity reasons. low values of k correspond to low-dimensional spaces on which it is easier to obtain good private statistics while higher values of k correspond to more complex statistics that require higher privacy budgets and memory, as the number of cells in a marginal distribution scales up exponentially with k. additionally, for graphical models such as that used by aim, the (exact) inference complexity scales exponentially with the junction tree width of selected marginals, and can only be limited by carefully selecting marginals with low k.in this short paper, we depart from the select-measureupdate paradigm and from measuring marginal distributions. we instead train (privately) a deep probabilistic model (a transformer) (vaswani et al., 2017) using well-known tools from language modelling (lm). we call this approach synlm. we treat each row in a table as a sentence, and train a transformer to predict elements in the row one-by-one. figure 1 is an example of such a prediction. we observe synlm is more scalable, fits the data better, and still approximates low-order marginal statistics well.selecting and measuring k-way marginals can become impractical as the number of columns increase or the number of unique discrete values per column increases. a large table with complex columns has many k-way marginals, each of which may contain many cells. in particular, numeric columns need to be discretized to coarse, low-precision val- ues. unlike marginal-based approaches, synlm's scaling is not dependent on exponentially-sized marginal distributions, but instead is linear in the number of columns and logarithmic in the number of possible discrete categories.our experimental results show synlm outperforms aim in terms of likelihood for almost all datasets, and the gap increases for datasets with higher likelihoods (see table 2).in other words, synlm becomes comparatively better for datasets with more columns or more categories per column. we also compare synlm and aim in terms of marginals. similar to past benchmarking of synthetic data generation (tao et al. (2021)), synlm lags aim in capturing lowdimensional marginals. synlm's weaker performance at low-dimensional marginals in conjunction with improved likelihood indicates complex dependencies are relevant in our experimental datasets.in total, our contributions are the following,• we show we can train a transformer from scratch on tabular datasets with differential privacy and obtain better likelihoods than aim.• we show our method preserves marginals to some extent despite not being explicitly trained to do so.• we propose combining a trie with the language model to improve its modelling performance. to circumvent this problem, synthetic data generation offers to generate a synthetic dataset that follows the same distribution as the underlying data while preserving the privacy of individuals., 2021;liu et al. aim is an advanced variant of approaches that placed first and second in the 2018 and 2020 nist differential privacy synthetic data challenges respectively. aim utilizes a clever selection procedure to identify improvable k-way marginals, measures these noisy marginals, and updates a probabilistic graphical model trained to match these marginals. low values of k correspond to low-dimensional spaces on which it is easier to obtain good private statistics while higher values of k correspond to more complex statistics that require higher privacy budgets and memory, as the number of cells in a marginal distribution scales up exponentially with k. additionally, for graphical models such as that used by aim, the (exact) inference complexity scales exponentially with the junction tree width of selected marginals, and can only be limited by carefully selecting marginals with low k.selecting and measuring k-way marginals can become impractical as the number of columns increase or the number of unique discrete values per column increases. we follow previous research and compare the marginals on the synthetic data to the marginals of the full dataset (train + valid). marginal-based methods need to compute all k-way marginals: there are c k = o(c k ) such marginals, and each marginal is a histogram over d k distinct values. the overall results show that synlm is better on most datasets in terms of likelihood (see table2), but aim is substantially better at computing marginals.we observe synthetic data generated using synlm present empirical marginals that are qualitatively close to marginals of the original data.in this short paper, we proposed an approach for privately generating synthetic data from tabular data using language models. our approach, synlm, trains a transformer language model on the tabular data and generates synthetic data that preserves the privacy of individuals. second, a broader range of comparisons is certainly possible; our comparison to aim itself is not exhaustive and other ablations, for example lower privacy budgets or different discretization, could be performed. comparing how other past synthetic data methods perform on held-out likelihood could also be useful. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/705.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/705.txt new file mode 100644 index 0000000000000000000000000000000000000000..11c5313cfe637379f9a2dd64581476084867b04c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/705.txt @@ -0,0 +1 @@ +practical applications in multiple areas such as logistics , portfolio management , manufacturing , and others share a combinatorial structure. finding the optimal solution for a combinatorial task requires an exhaustive search over all valid combinations of variables. the optimal solution for a combinatorial problem formulated as a mixed integer linear program can be efficiently obtained with branch-and-bound algorithm (b&b) . b&b algorithm employs divide-and-conquer approach. at each step, it splits the domain of one of the integer variables and eliminates paths that can not lead to a feasible solution. the performance of the b&b algorithm depends on two sequential decision-making processes: variable selection and node selection. node selection picks the next node in the b&b tree to evaluate, and variable selection chooses the next variable to split on. the variable selection process is the most computationally expensive and crucial for the performance of the whole algorithm. the optimal variable selection method, frequently dubbed as branching rule, will lead to smaller trees and a more efficient solver. although the optimal branching rule is not known , all modern solvers implement human-crafted heuristics, which were designed to perform well on a wide range of tasks . at the same time, practitioners frequently solve the same task with different parameters, so the branching rule adapted to a specific distribution of tasks may lead to a significant performance boost and business impact. the branching rule is applied sequentially to minimize the resulting tree size, which resembles the reinforcement learning paradigm in which an agent interacts with the environment to maximize the expected return. recently reinforcement learning achieved state-of-the-art results in a diverse set of tasks, from beating world champions in the games of go and dota2 to aligning optical interferometer , controlling nuclear fusion reactor , tuning hyperparameters of simulated quantum annealers and optimizing output of large language models .previous works introduced tree markov decision process (tree mdp). in the tree mdp, instead of a single next state agent receives multiple next states -descendant nodes of the current tree node. the value function of the current node is the sum of reward and value functions in the child nodes. work showed that a reinforcement learning agent trained to minimize each sub-tree minimizes the whole b&b tree. we follow this approach and develop a sample efficient off-policy reinforcement learning method adapted for a tree mdp. tree sizes produced by the b&b algorithm usually have a long-tailed distribution, even for hand-crafted branching heuristics. to overcome this challenge, we adapt the loss function to the distribution of tree sizes. as a result, our agent learns more stable, produces smaller trees, and is more sample efficient than the previous rl methods. our contribution is the following:1. we prove that the bellman operator in tree mdp is contracting in mean.2. we modify the learning objective to optimize the geometric mean.3. we propose a novel reinforcement learning algorithm -treedqn. node selection picks the next node in the b&b tree to evaluate, and variable selection chooses the next variable to split on. the branching rule is applied sequentially to minimize the resulting tree size, which resembles the reinforcement learning paradigm in which an agent interacts with the environment to maximize the expected return. in the tree mdp, instead of a single next state agent receives multiple next states -descendant nodes of the current tree node. the main difference between tree mdp and temporal mdp is that in tree mdp agent receives multiple next states -children nodes of the current node. the imitation learning agent can not produce trees shorter than the expert, however, it solves the variable selection task much faster, especially if running on gpu, thereby speeding up the whole b&b algorithm significantly. to test the generalization ability of our agent, we evaluate the trained agent twice: (1) on the test instances from the training distribution and (2) on the large instances from the transfer distribution. we compare the performance of our treedqn agent with the strong branching rule, imitation learning agent (il), and reinforce agent (tmdp+dfs,). our agent is based on the sample-efficient off-policy algorithm and requires much less training data than the reinforce agent. 4 shows that the treedqn agent significantly exceeds the results of the reinforce agent in all test tasks. the treedqn agent is close to the imitation learning agent in the first four tasks and substantially outperforms the strong branching in the multiple knapsack task.4, we see that in the combinatorial auction task, the treedqn agent for all task instances performs better than the reinforce agent and is close to the imitation learning agent. we optimize the geometric mean of expected tree size, so complex task instances may have less influence on the learning process. 5 that in the set cover, facility location, and multiple knapsack tasks, our treedqn agent transfers well and performs significantly better than the reinforce agent. in the maximum independent set task, the treedqn agent falls behind the reinforce agent since it adapted better for simple task instances, as seen from the p-p plot 4. it maps milp solving to an episode for our rl agent and trains the agent to optimize the final metric -the resulting size of the b&b tree. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/706.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/706.txt new file mode 100644 index 0000000000000000000000000000000000000000..ccb735cd3c36e8ef3250317e80046a5e651bc190 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/706.txt @@ -0,0 +1 @@ +kernel principal components analysis (kpca, schölkopf et al. (1998)) stands as one of the most widely used tools for unsupervised learning, with applications to dimensionality reduction, denoising, or features extraction. by embedding the data in some higher dimensional space thanks to a feature map, kpca aims at finding orthogonal directions in the feature space that allow to best reconstruct the empirical covariance operator.the classical way to solve the kpca problem is to compute the singular values decomposition (svd) of the gram matrix, a costly o(n 3 ) operation whose complexity scales cubicly with respect to the number n of datapoints. this prevents kpca from being used in large-scale scenarios, and, while some solutions exist, they mostly boil down to subsampling in fixed-size schemes to approximate the nonlinear mapping (langone & suykens, 2017) or focus on the online learning setting (günter et al., 2007;chin & suter, 2007;honeine, 2012). works on speeding up the simpler problem of linear pca exist (see (gemp et al., 2021) and references therein), but they cannot deal with infinite-dimensional feature spaces.from an optimization standpoint, the kpca problem can be formulated as variance maximization under orthonormality constraints. such problem belongs to the wider family of differences of convex functions (dc) problems, which has received a great deal of attention in multiple applications (e.g., see (tao & an, 1997)). in particular, in (beck & teboulle, 2021), pca was investigated as a dc problem, but only in the case of linear pca, i.e., not considering (potentially infinite-dimensional) feature mappings, and only for the simpler problem of finding the first component where the orthogonality constraints become void.enforcing desirable properties to the solution, such as sparsity or robustness, is a long-standing open problem in kpca. while many works have investigated robust/sparse kpca (e.g., robust kpca in (nguyen & torre, 2008;kim & klabjan, 2020;wang & tanaka, 2020a;fan & chow, 2020) and sparse kpca in (wang & tanaka, 2016;guo et al., 2019b;tipping, 2000;smola et al., 2002)), they use several different ad-hoc approaches or heuristics such as weighting schemes (alzate & suykens, 2008), leading to a multitude of different optimization problems. in (thiao et al., 2010), a dc program for the specific problem of sparse linear pca was explained, but is not extended to also handle robust losses within the same framework; notably, it does not deal with nonlinear feature maps nor with more than one component. the idea of using infimal convolution to design sparse or robust losses is exploited notably in sangnier et al. (2017); laforgue et al. (2020) for regression problems, and is known to work well in duality settings.in this paper, we derive a general dual-based formulation for kpca leading to a difference of convex function objective which encompasses both the variance and robust/sparse objective functions in the same framework. we derive efficient optimization algorithms showing significant speedups compared to the standard svd solvers for kpca. in particular, our approach allows to solve infinite-dimensional kpca problems in the dual. we focus on objectives that can be written as moreau envelopes, which include the huber and ϵ-insensitive losses, inducing robustness and spar-sity, respectively. we show how the resulting optimization problems can be tackled with efficient algorithms for each objective.the paper is structured as follows. in section 2, we formulate the general kpca problem as a difference of convex functions, which leads to a flexible framework that can be extended to other loss functions beyond the square loss. we present in section 3 a gradient-based optimization algorithm able to efficiently solve the standard kpca problem through dualization. later, in section 4 we exploit the flexibility of the proposed dc framework by modifying the objective function to promote robustness and sparsity. finally, numerical experiments on both synthetic and real-world benchmarks are presented in section 5, showing faster training times and illustrating the promotion of sparsity and robustness induced in the solution. all proofs are deferred to the appendix.the classical way to solve the kpca problem is to compute the singular values decomposition (svd) of the gram matrix, a costly o(n 3 ) operation whose complexity scales cubicly with respect to the number n of datapoints. when w ∈ h is a vector, w ♯ refers to the linear form x → ⟨w, x⟩. ϕ induces a positive definite kernel function k : x × x → r with associated rkhs h k . , w s ) ∈ h s , we denote by g(w ) ∈ r s×s the gram matrix such that g(w ) ij = ⟨w i , w j ⟩.the stiefel manifold over hilbert spaces: given a feature space h and a positive integer s, we introduce the stiefel manifold of orthonormal s-frames in h as.kernel pca from svd: the usual way to solve the kpca problem is to express the directions w = s j=1 as linear combinations of the features, introducing coefficients (α ij ) n,s i,j=1 ∈ r n×s such that.quick algebraic manipulations then ensure that the solution to problem 1 can be obtained by taking the coefficients α to be the top-s eigenvectors of the gram matrix g = n i,j=1 , rescaled so that the directions have unit norm.the main motivation for going from the primal problem to the dual in the kpca case is that the dual variable h is a finite dimensional matrix, suitable to gradient-based optimization schemes.we recognize in equation (4) the nuclear norm of the matrix √ h ⊤ gh, which is well-defined since g is a gram matrix associated to a positive definite kernel. while the dependency in h makes this easier to handle from an optimization standpoint, the dependency in g in what follows, we define π(h) := tr √ h ⊤ gh and build onlewis (1996)to give a gradient expression for π used in subsequent optimization algorithms., 1998), and randomized svd (rsvd)(halko et al.where for rsvd ŝ is the diagonal matrix of the largest s singular values of g and corresponding computed singular vectors û , v , while for the eigendecomposition solver ŝ is the diagonal matrix of the largest s eigenvalues of g and corresponding eigenvectors û = v found by the lanczos solver. in particular, for the dual norms ∥h∥ ⋆ = ∥h∥ 1 and ∥h∥ ⋆ = n i=1 ∥h i ∥ 2 , we obtain respectively the losses ℓ ∞ ϵ , ℓ 2 ϵ . also, r(x) = r(λ(x)), with r(z) = s i=1 √ z i , where z i indicates the i-th component of z and λ(x) is the eigenvalues function defined as λ(x) = [λ 1 (x), λ 2 (x), ., 2019a)relax and modify kpca to use elasticnet optimization for promoting sparsity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/707.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/707.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c35e99b416c7bcc8436f27f44488fdcc2aaceb6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/707.txt @@ -0,0 +1 @@ +generative ai systems allow users to create new content (e.g., text, image, audio, code) in response to an input, often relying on large-scale training datasets. such datasets may contain social stereotypes, inequalities, and hierarchies , which generative models can replicate in downstream uses. users may also exploit generative ai systems for disinformation, non-consensual synthetic sexual imagery, and other types of malicious content . responsible development of generative ai systems thus requires content moderation among other techniques to deploy systems that minimize harmful content. in this paper, we provide a framework for conceptualizing responsible content moderation in generative ai from a harm-reduction standpoint -defining safety, fairness, and metric equity.development of content filters is a key mode of moderating generative ai content. deciding what content to filter is a normative governance decision; in practice, generative ai content filters may index on illegal content (e.g., child sexual abuse material, copyright violations) , rather than a broader range of harmful content, including representational or cultural harms and violence or gore . when harmful content is filtered, algorithmic content moderation may disproportionately penalize content concerning socially marginalized groups , as moderation systems also learn and replicate demeaning associations from their training data . these limitations underscore the urgency of centering the experiences of oft-marginalized groups in defining and evaluating safety parameters and assessing the fairness of content moderation algorithms deployed on generative ai systems.assessing algorithmic fairness in generative ai systems is challenging. much scholarly attention focuses on algorithmic fairness within classification models (e.g., ), with statistical notions of fairness reliant on confusion matrices of model performance . here, algorithmic fairness is often divided into two parts, defining fairness based on: (1) predicted outcomes across groups (e.g., equality of opportunity) ; or (2) the consistency of predicted and actual outcomes when sub-groups change (e.g., counterfactual fairness) . these definitions fueled advances in classifier fairness (e.g., ), but are not directly applicable to generative ai systems, as generative models do not have one right outcome nor can "accuracy" be assessed across groups or individuals within generative models. importantly, there is rarely one definitive "correct" response to which any given input to a generative system can be measured against to quantify accuracy, as many input concepts are socially situated and contextual. the fact that many prompts provided to a generative system can properly produce an enormous range of results that respect the intent of the user, rather than a single answer (as in conventional ml), is the main source of challenge in defining generative model safety and fairness, and a characteristic distinguishing generative from finite classification contexts.we intervene in these challenges by offering a theoretical framework for assessing safety and fairness in generative ai from the perspective of harm-reduction content moderation, and provide a quantitative example of how to measure its constructs (section 3). in particular, we describe a method for adversarially challenging a text-to-image (t2i) generative system and machine annotating its outputs for a number of harms at scale. next, we demonstrate an approach to measure hateful, pornographic, and violent content in generated imagery and assess the relationships between the text prompts and the resulting harmful generated imagery (section 4). we additionally analyze how each metric interacts with the gender presentation of individuals in the generated imagery, constituting one of many potential sensitive attributes.the proposed metrics provide a means to measure harmful and biased content (safety, fairness) and how to "measure the measurements" to assess their performance across defined sociodemographic dimensions (metric equity). the metrics we describe inform efforts to evaluate models and foster greater alignment between ai systems and defined governance goals (section 5). this research contributes to responsible ai and content moderation scholarship, offering:• a tractable framework for proactive definition and measurement of safety, fairness, and equity in generative ai systems.• a harms taxonomy for safety and fairness in generative ai models.• a method for empirical measurement of the harms specified in the taxonomy, including "measurement of measurements", for bias. deciding what content to filter is a normative governance decision; in practice, generative ai content filters may index on illegal content (e. when harmful content is filtered, algorithmic content moderation may disproportionately penalize content concerning socially marginalized groups, as moderation systems also learn and replicate demeaning associations from their training data.ai safety and content moderation: content moderation falls under the umbrella of ai safety: a normative, governance approach to responsibly develop and deploy ml systems, including a focus on developing policies to outline desired characteristics of ml systemsand techniques to foster policy alignment, such as reducing harmful content.generative ai content moderation: conceptually, there are three key types of content moderation that can be applied to generative models to meet safety needs and improve fairness:(1)training data mitigations, (2) in-model controls, and (3) input and output filters. in the illustrative t2i system we use here, the safety harms are sexually explicit content, violent and gory content, and hateful content.the performance of generative ai systems has fairness considerations, particularly in terms of representational harms, such as how a model may learn harmful and demeaning stereotypesand how systematic absences in training data may lead to patterns of erasure, which safety and content filters may further exacerbate. about half of the dataset (n=5,638) comprised adversarial prompts intended to be broadly harmful, including prompts for the defined safety harms in our illustrative t2i system: sexually explicit content (n=203), graphic violence and gore (n=283), hateful (n=777) and harassing (n=202) content. a content moderation decision that could be made in response to this data in our illustrative system could be to set a thresh- old for exposure to users at the 95th percentile level, so that only the top 5% most sexually explicit images are blocked. as large scale training datasets are likely to contain a larger proportion of sexually explicit content, these low scores are consistent with the suggestion that removing sexually explicit content from the model's training set (a training data mitigation) reduced the extent to which it is able to produce sexually explicit content, even when directly prompted to do so. for example, if 10 images are generated in response to a prompt, but 8 depict only masculine presenting individuals and only 2 depict feminine presenting individuals, content moderation might enforce that a random selection of 3 of the images depicting masculine presenting individuals be rejected and new images generated until gender presentation equity is achieved. what this result demonstrates for content moderation is that even a safety filter that is seemingly neutral with respect to social identity (block sexually explicit content) may result in unequal treatment of people with different social identities (here, feminine presenting vs. our recommendation is that safety filters must be seen not as single constraint satisfaction prob-lems (block all images that are more sexually explicit than a threshold), but as a multiple constraint satisfaction problem (block all images that are more sexually explicit than a threshold while minimizing the discrepancy in treatment of different social identities). a content moderation decision that could be made in response to this data may include identifying whether this stereotype is present in the training data and attempt to perform training data mitigations to reduce its prevalence. the potential content moderation decision to block images above the 95th percentile score for sexually explicit content could be considered a fair one based on this counterfactual fairness metric, as one group is not disproportionately penalized. in preceding sections, we defined safety, fairness, and equity for algorithmic content moderation in generative ai (section 3), and provided examples of how to measure a subset of safety harms (sexually explicit, violent, and hateful generations) and fairness concepts (diversity of representation, equal treatment, and counterfactual fairness) (section 4). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/708.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/708.txt new file mode 100644 index 0000000000000000000000000000000000000000..c1f0aea8473d5885e39188cbe3a8acfb3bdb310a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/708.txt @@ -0,0 +1 @@ +as machine learning systems take more essential roles in decision making in critical situations, there is increasing focus on understanding the reasoning mechanism of the learned models (molnar, 2020). prior work has attempted to interpret black-box models in a post hoc manner (ribeiro et al., 2016;zeiler & fergus, 2014;sundararajan et al., 2017), train models that generate explanations besides predictions (park et al., 2018;kumar et al., 2022), or build inherently interpretable models (andrews et al., 1995;sudjianto & zhang, 2021).however, there is no consensus on choosing the best interpretation algorithm if the model is not inherently interpretable. the evaluation of post hoc or self-explaining interpretation methods is often subjective (doshi-velez & kim, 2017); there are systematic differences in how human individuals approach decision making based on model interpretation (broniatowski et al., 2021). different interpretation algorithms can generate different results for practical applications, while practitioners do not seem to have a principled way to resolve those differences (krishna et al., 2022). this paper argues that the above difficulties in designing and evaluating interpretation algorithms partially arise from fundamentally conflicting objectives of interpretability. in particular, we show that no attribution algorithm for black-box models possesses all the following desirable properties: specificity (unused inputs have a zero score), additivity (attribution scores decompose over model addition), completeness (attribution completely explains model output), and baseline invariance (rank of inputs is invariant to change of baseline inputs).instead of pursuing other post hoc interpretation methods, we advocate an alternative route towards model explainability. we formalize the concept of sound explanation. briefly speaking, a sound explanation is a subset of the information used by a machine learning system, perceived as understandable by humans, that causally determines the system's output. a sound explanation is always accurate and reliable in explaining how the system makes a prediction. the challenge faced by system designers is how to design conceptually understandable explanations. for example, a decision tree is a sound explanation; however, a tree with thousands of variables and hundreds of levels is hardly a satisfactory explanation to any human. the idea of sound explanation has been informally adopted by prior work. rudin (2019) argues that black-box models should be abandoned in favor of inherently interpretable models, which naturally provide sound explanations. koh et al. (2020) proposes models with concept bottlenecks, which is a type of sound explanation. unlike prior work, our research formally defines sound explanation instead of using heuristic arguments.we present a case in cancer prediction. we provide sound explanation via feature selection on large datasets derived from electronic health records (ehrs) containing millions of patients and thousands of features. we design an efficient algorithm for feature selection for neural networks. the final model is then trained on the less than 100 selected features. we work with two cancer types: pancreatic ductal adenocarcinoma (pdac, a common type of pancreatic cancer) and hepatocellular carcinoma (hcc, a common type of liver cancer). the selected features agree well with known pdac and hcc risk factors, thus providing confidence in our models among clinicians. for example, a decision tree is a sound explanation; however, a tree with thousands of variables and hundreds of levels is hardly a satisfactory explanation to any human.1 (interpretation). , a n ) with a i ∈ r being the contribution of x i towards the value of f (x) -f (x ′ ). we call the value of a(x; f, x ′ ) an attribution of f (x) to x relative to a baseline input x ′ . (2017)., 2017) is arbitrary. consider a linear function on r 2 : f (x) = x 1 -x 2 , two baseline inputs x ′ 1 = (-1, -1) and x ′ 2 = (1, 1), and the input x t = (1, 0).a sound explanation of a machine learning system should accurately and reliably describe how the system makes a prediction. the output of a is a sound explanation of the system because one can verify that the system is not using any information other than the presented explanation to make the prediction. let x be an input to the system, which is a map from a variable v ∈ i to its value, denoted as x v . for a machine learning system m = (g = (v = i ∪ {t} ∪ u, e), p ), a sound explanation is a cut c = (s, t ) such that i ⊆ s, t ∈ t , s ∩ t = ∅, and.the information in x x is presented to a human as the explanation of f x (t) on the input x.the design space of sound explanations is how to decompose a machine learning system into a computational graph and a cut so that the explanation x is meaningful to humans. for a non-input node v, f x (v) ∈ {0, 1} indicates whether this node is activated by the input x. the explanation x x shows which leaf node is activated for x, corresponding to a path on the tree. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/709.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/709.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6bfdf5fce08547226684008e68f82ae20ae9516 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/709.txt @@ -0,0 +1 @@ +we study novel efficient and expressive graph embeddings motivated by lovász' characterisation of graph isomorphism through homomorphism counts. while most graph embeddings drop completeness-the ability to distinguish all pairs of non-isomorphic graphs-in favour of runtime, we devise efficient embeddings that retain completeness in expectation. the specific way in which we sample a fixed number of pattern graphs guarantees an expectation-complete embedding in expected polynomial time. in this way, repeated sampling will eventually allow us to distinguish all pairs of non-isomorphic graphs, a property that no efficiently computable deterministic embedding can guarantee. in comparison, most recent graph neural networks are inherently limited by the expressiveness of some k-dimensional weisfeiler-leman isomorphism test (morris et al., 2019;xu et al., 2019).our approach to achieve an expectation-complete graph embedding is based on homomorphism counts. these are known to determine various properties of graphs important for learning, such as the degree sequence or the eigenspectrum (hoang & maehara, 2020). furthermore, homomorphism counts are related to the weisfeiler-leman hierarchy (dvořák, 2010;dell et al., 2018), which is the standard measure for expressiveness on graphs (morris et al., 2019). they also determine subgraph counts (curticapean et al., 2017) and the distance induced by the homomophism counts is asymptotically equivalent to the cut distance, which grohe (2020) and klopp & verzelen (2019) motivated as an appropriate graph similarity for graph learning tasks.in section 2 we introduce the required concepts. in section 3 we discuss that general expectation-complete embeddings can eventually distinguish all pairs of non-isomorphic graphs (lemma 3), which leads to a universal representation (theorem 4). then we propose our expectation-complete embedding based on sampling entries from the lovász vector (theorem 7) and bound the number of samples required to provably get as close as desired to the full lovász vector (theorem 8). in section 4, we show how to compute our embedding efficiently in expected polynomial time (theorem 14). in section 5, we show how to combine our embedding with graph neural networks. finally, we discuss related work in section 6 and show competitive results on benchmark datasets in section 7 before section 8 concludes. in the following f and g denote graphs, where f represents a pattern graph and g a graph in our training set. let hom(f, g) denote the number of homomorphisms from f to g and let ϕ g (g) = (hom(f, g)) f ∈g denote the vector of homomorphism counts from each graph of a family of graphs g to g. similarly to ϕ, we define ψ g (g) = t(g, g) = (t(f, g)) f ∈g and ψ n (g) = ψ gn . an isomorphism between two graphs g and g ′ is a bijection i : v (g) → v (g ′ ) such that {v, w} ∈ e(g) if and only if {i(v), i(w)} ∈ e(g ′ ). if there is an isomorphism between g and g ′ , we say they are isomorphic and denote it as g ≃ g ′ . a graph embedding ϕ is called permutationinvariant if for all g ≃ g ′ ∈ g it holds that ϕ(g) = ϕ(g ′ ).complete graph embeddings allow to determine whether two graphs are isomorphic, as g ≃ g ′ if and only if ϕ(g) = ϕ(g ′ ). if the expectation e x∼d is defined for all g ∈ g, we can define a (deterministic) graph embedding e x∼d : g → h. let ϕ x : g → h be a expectation-complete graph embedding and g, g ′ ∈ g which are not isomorphic. for a distribution d with full support on g n define the graph embedding ϕ f (g) = hom(f, g)e f with f ∼ d.the vector g has the entries (g) f ′ = pr f = f ′ hom(f ′ , g). let g ′ be a graph that is non-isomorphic to g and let g ′ = e f accordingly. theorem 5 holds for g, g ′ ∈ g ∞ and the mapping ϕ ∞ that maps each g ∈ g ∞ to an infinitedimensional vector. if we train a kernel-based classifier on a sample s ⊆ g n and want to classify a graph with size larger than n we do not have to recompute the embeddings ϕ ↓ ∞ (g) for g ∈ s as the terms corresponding to patterns with size > n in the kernel are zero anyway. there exists a distribution d with full support on g n such that computing the expectation-complete graph embedding ϕ f (g) with f ∼ d takes polynomial time in v(g) in expectation for all g ∈ g n . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/71.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/71.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4da9a72718d80dfc10f4c837ff2071bd0825bee --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/71.txt @@ -0,0 +1 @@ +deep rl has shown promise at learning complex behavior in many settings, including game-playing , robotics , and control systems . these algorithms typically take advantage of a markov assumption -that it is enough to consider only the current state when deciding which action to take. however, many real-world tasks are inherently temporally extended. the pattern of behavior the rl agent must learn depends not only upon the current state but also on past states and actions. for example, an agent that needs to get through a locked door to get high reward must have previously located and acquired the key. unfortunately, learning such temporally extended behavior can be incredibly challenging since the agent must learn to discern relevant features from its state-action history; these can be arbitrarily far removed from the present state, and may depend on this history in complex ways and without intermediate reward signal to aid learning. the standard deep rl solution to learning such temporally extended behavior is to use a recurrent neural network (rnn), which learns an abstract hidden state in order to summarize environment histories, but rnns require much data to train, and are difficult to tune. by contrast, in recent work, an algorithm for learning temporally extended behavior is proposed, where the rnn hidden states are replaced with an augmentation of the current state in terms of hand-designed propositional symbols, which incorporate domain knowledge and point the agent towards potentially reward-relevant properties of the state-action history (e.g. ). in the previous example, one can augment the agent with the propositional symbol have_keys, indicating whether the agent has acquired the keys to the door in the past. markovian policies on this state 4th knowledge representation and reasoning meets machine learning workshop (kr2ml 2020), at neurips. arxiv:2301.02952v1 8 jan 2023 space now become vastly more expressive: if one can additionally condition on the truth state of have_keys when deciding an action, we can perform the following temporally extended behavior: go towards the keys when have_keys is false, then go towards the door when have_keys is true. but how did we know to augment the agent with have_keys in the first place? while this example seems simple, this is only because we contrived the reward: it is not in general clear which propositional symbols to augment an agent with, in order to achieve high performance. to address this, we propose the use of automata learning within the rl framework to automatically yield such propositional symbols, rather than relying on domain knowledge. we demonstrate that the trained automata dramatically accelerate policy learning, with our end-to-end approach outperforming a state-of-the-art rl algorithm (recurrent-ppo) on several non-markovian reward domains. the pattern of behavior the rl agent must learn depends not only upon the current state but also on past states and actions. for example, an agent that needs to get through a locked door to get high reward must have previously located and acquired the key. unfortunately, learning such temporally extended behavior can be incredibly challenging since the agent must learn to discern relevant features from its state-action history; these can be arbitrarily far removed from the present state, and may depend on this history in complex ways and without intermediate reward signal to aid learning. the standard deep rl solution to learning such temporally extended behavior is to use a recurrent neural network (rnn), which learns an abstract hidden state in order to summarize environment histories, but rnns require much data to train, and are difficult to tune. by contrast, in recent work, an algorithm for learning temporally extended behavior is proposed, where the rnn hidden states are replaced with an augmentation of the current state in terms of hand-designed propositional symbols, which incorporate domain knowledge and point the agent towards potentially reward-relevant properties of the state-action history (e.).02952v1 8 jan 2023 space now become vastly more expressive: if one can additionally condition on the truth state of have_keys when deciding an action, we can perform the following temporally extended behavior: go towards the keys when have_keys is false, then go towards the door when have_keys is true. but how did we know to augment the agent with have_keys in the first place? while this example seems simple, this is only because we contrived the reward: it is not in general clear which propositional symbols to augment an agent with, in order to achieve high performance. to address this, we propose the use of automata learning within the rl framework to automatically yield such propositional symbols, rather than relying on domain knowledge. while these approaches rely on domain knowledge and a domainspecific vocabulary for specification of the reward function, we consider a black-box non-markovian reward and present an automated approach to uncover the reward structure. an alternate black-box approach to ours is to first train an rnn, with a standard deep (recurrent) rl algorithm, and then "quantize" the hidden state of the rnn, but this learned transition model is not a direct function of the state-action history. we will consider the following extension of the mdp: an nmrdp(non-markovian reward decision process) n = (s, a, p, r, γ) is as before, but where the reward r : h → r, where h = (s × a) * is the set of finite histories with states s and actions a: in other words, the agent can be rewarded for behavior which is arbitrarily far removed from its current experience. intuitively, propositions correspond to facts about the state-action history in a given episode, such as "the agent has at some point reached the top right corner" or "within the 3 most recent timesteps, the agent took action x". intuitively, a dfa with state space q that accurately discriminates reward 1 traces from reward 0 traces (which we define as consistent) must model all parts of the state-action history relevant to the goal, and therefore the augmented state space s × q must make the problem markovian. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/710.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/710.txt new file mode 100644 index 0000000000000000000000000000000000000000..133358a6cb6f8fd68572b7916831d280077536ff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/710.txt @@ -0,0 +1 @@ +the history of outlier detection can be traced back to the 19th century with the work of gosset (1908), who used the concept of statistical outliers to identify and analyze abnormal observations in his data. in the mid-20th century, tukey (1977) popularized the concept of outliers and introduced techniques for their detection and analysis. in the 1980s and 1990s, researchers such as hawkins (1980) and chambers (1986) expanded on tukey's work and developed methods for identifying and handling outliers in multivariate data. in recent years, with the advent of big data and the need for scalable outlier detection techniques, researchers have proposed and developed new methods for outlier detection, such as density-based methods, weight-based outlier detection methods and statistical learning techniques. these advancements have contributed to a better understanding of outliers, and the development of more effective and efficient techniques for their detection and analysis. outlier mining is a critical area of research in the field of data visualization and mining. the ability to detect outliers in datasets can aid in the development of systems for addressing various problems, such as fraud detection and health monitoring. to identify underlying patterns in a dataset, it is necessary to understand the correlations among features in the data group. this paper focuses on the development of a weighted outlier detection method using pattern-based approaches for the analysis of highdimensional massive data. the proposed method involves the extraction of relevant features from the data to reduce its dimensionality and enhance the outlier detection process . in recent years, with the advent of big dataand the need for scalable outlier detection techniques, researchers have proposed and developed new methods for outlier detection, such as density-based methods, weight-based outlier detection methods and statistical learning techniques.as compared with various clustering and outlier detection methods are uncertain for the infrequent data nodes, but using maximum weighted frequent data points, we can mine information from the data nodes effectively and accelerate the overall efficiency of the model (please refer to table -1). some popular wpsoda methods include the weighted-pattern distancebased outlier detection (wpdbod) algorithm, the weighted-pattern window-based outlier detection (wpwod) algorithm, and the weighted-pattern markov chain outlier detection (wpmcod) algorithm.iv. hodge et al.challenges of detecting outliers in high-dimensional data and compares conventional outlier detection methods such as mahalanobis distance, k-expensive, while density-based methods such as local outlier factor (lof) and its variants are more efficient and effective in handling high-dimensional data.weighted outlier detection (wod) is the problem of identifying data points that deviate significantly from the majority of the data in a dataset, considering the data points' importance (weights).may also struggle to identify outliers in datasets with nonuniformly distributed data an efficient outlier detection approach on weighted data stream based on minimal rare pattern mining-2019 cai et al. this typically involves steps such as removing missing or corrupted data, normalizing the data to scale, and transforming the data into a suitable format for the outlier detection algorithm. it is important to note that the success of the outlier detection process depends heavily on the quality of the data pre-processing step, as inaccurate or incomplete data can lead to false positive or false negative results.weight-based outlier detection, and the center and variance matrix of the data are important elements that contribute to the accuracy of the outlier detection process. the center of the data is typically represented by the mean of the data points and is used to calculate the distance between each data point and the center of the cluster. both the center and variance matrix is updated in each iteration of the weight-based outlier detection process allowing for a a more accurate representation of the data and a better detection of outliers.by considering the relative importance of different and each data points, this can result in improved accuracy and meaningful outlier detections compared to traditional outlier detection methods that treat all data points equally. biased data can lead to uncertainty in the results of weighted outlier detection methods or incorrect interpretation in computation of the weights can result in incorrect outlier detection, thus compromising the accuracy of the results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/711.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/711.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a9b3bf53b4f2417b95e075fa9b2368d089b3d40 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/711.txt @@ -0,0 +1 @@ +nowadays, the success of deep learning (dl) approaches has led to an increase in interest in multi-layer feed-forward (mlff) neural networks (lecun et al., 2015) insofar as a successful class of deep neural networks consists of mlff networks with more than one hidden layer and possibly some specific architectural choices. in the rest of the paper we will refer to such deep neural networks as dnns. in a nutshell, dnn networks are computational architectures organised as l consecutive layers or levels of elementary computing units, called neurons. the last layer l is the output layer, and the remaining layers are usually called hidden or internal layers.in a dnn network each hidden layer l performs a non-linear functional map φ l θ l from the output of the this paper has been published in its final version on pattern recognition letters journal in open access. the doi is https: //doi.org/10. 1016/j.patrec.2023.11.016. please refer to the peer-reviewed published version as main reference.previous layer z l-1 (and possibly other previous layers) to the output of the layer itself. where θ l are the weights associated to the connections incoming into the layer l, plus the biases of the layer. by contrast, the output layer may also perform a linear transformation. in other words, the whole computation of a dnn can be viewed as a non-linear parametric functional mapping y = m(x; θ) from a d-dimensional space to a cdimensional space, where d is the number of input variables and c = m l is the number of neurons in the output layer. the parameters θ are the weights and biases of the network, and y are the output values of the output layer.although from a theoretical point of view the dnns capability of being universal approximators has been extensively discussed (cohen et al., 2016;huang et al., 2000;longstaff and cross, 1987), together with the inducted hidden feature representation spaces (lerner et al., 1999), it is important to notice that the difficult to effectively find the most suitable θ remains. in particular, when dnns are applied in the context of classification problems, one has to find the parameters θ l such that the composition of l -1 non-linear transformations z l-1 = φ l-1 φ l-2 . . . φ 1 (x) maps each input x from a non-linearly separable space into a linearly separable one. in fact, in the context of classification problems, one of the main goals is to find a suitable data representation which allows to obtain a linearly separable classification problem. plausibly, when a dnn is used, each internal representation z l can be expected to make the representations of x "somewhat more linearly separable" than the previous one z l-1 . we underline that the complexity of a classification problem can be measured with respect different aspects, however class separability is a key aspect and different levels of class separability can be quantified (lorena et al., 2019). in particular, in (schilling et al., 2021) the generalized discrimination value (gdv) to measure the separability between two dataset is introduced. the gdv is defined as the gap between the mean intra-cluster and the mean intercluster distances, computed on a set of labeled data represented in some space. more in detail, the gdv compares in a quantitative way the degree of class separability between two data representations. since gdv can be computed on different types of representations, it can be also used to compare the separability of the same data represented in different spaces, such as the different representations returned by different neural networks' layers.however, we again emphasise that how to determine the appropriate parameters θ from a data set by a supervised learning process minimizing an error (or loss) function is still a critical problem. we notice that error functions usually depend on the final network output values only, without taking care about the results obtained in the hidden layers. thus, starting from the previous considerations, in this paper we investigate the possibility to achieve a supervised learning approach which favours solutions where x's representations at the hidden levels have a higher degree of linear separability between the classes with respect to standard approaches. to this aim, we propose a dnn architecture which induces an error function involving the output values of all the network layers. more specifically, as we will discuss in more detail in section 2, the output of each hidden layer l is sent to an additional linear output layer which is trained to classify the input x on the basis of the input representation encoding in the layer l (see figure 1). from now on, we named this architecture hidden classification layer network (hcl). we investigated the impact of this type of solution in a series of experimental scenarios as we will discuss in more detail in section 3.although similar approaches have already been partially discussed in the past literature (see, for example, (lee et al., 2015)), here we propose both a different version in terms of both neural architecture and error function, and a more extensive experimental analysis (see sections 2 and 3). in particular, in (lee et al., 2015) the supervision of the hidden layer was made by svms instead of linear neural layers as in our case. in (wang et al., 2020) a cascade of convolutional neural networks (c-cnn) was proposed. c-cnn is composed of hidden layers combined together through dilated convolutions and trained using a proposed progress optimisation algorithm. also in this case, our approach proposes a simpler architecture to favour hidden representations with a higher degree of class separability. the rest of the paper is organised as follow: in section 2 the proposed method is described; section 3 describes the experimental setup and the evaluation methods; in section 4 the results are reported and discussed; finally, section 5 contains final remarks.in a dnn network each hidden layer l performs a non-linear functional map φ l θ l from the output of the this paper has been published in its final version on pattern recognition letters journal in open access., 2016;huang et al. more specifically, as we will discuss in more detail in section 2, the output of each hidden layer l is sent to an additional linear output layer which is trained to classify the input x on the basis of the input representation encoding in the layer l (see figure1). each neuron i belonging to the l-th layer, achieves a two-step computation (see(bishop and nasrabadi, 2006), chapter 4): a linear combination a l i of the neuron's inputs is computed first, and then the neuron output z l i is computed by an activation function f l (•), i., 2021)for a review). the activation function input a l i is usually computed on the basis of real values, said weights, associated with the connections coming from the neurons belonging to the layer l -1 (and possibly from other previous layers) and a bias value associated to the neuron i. , l l-1 hidden layers and a final layer l l having c neurons, we connect each hidden layer l j , 1 ≤ j ≤ l -2 with a new layer l j acting as an independent classifier. l l-2 } composed of c neurons are added, and each l j layer receives connections from the hidden layer l j only, making each l j as an independent linear classifier. therefore, given a dnn m, we obtain an hcl network m which will be composed of two distinct sets of layers: i) standard neural network layers {l 1 , l 2 , . , l l }, composing m, and ii) hidden classification layers {l 1 , l 2 , . , l l-2 }, composing a set of layers where each layer l i favours more separable data representations in the respective hidden layer l i , independently from the subsequent layers.where y n is the score returned by the final layer of the classifier m, z n, j is the score returned by the hidden classification layer l j tied to the l j layer, θ m are the parame-ters of the model m, and {λ 1 , λ 2 , . setting λ 1 = λ 2 = • • • = λ l-1 = 0 results in standard ce loss applied to the final classification layer only, while different values give different weights to the hidden classification layers {l j } l-1 j=1 . each hidden layer l i of the main branch of the network produces an output vecz i , and the final classification layer l l produces the output vecy. to this aim, we proposed a novel dnn architecture, which we named hidden classification layer (hcl) network, where the output of each standard hidden layer is sent to a hidden classification layer trained to classify the input x based on the x representation given by the standard layer itself. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/712.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/712.txt new file mode 100644 index 0000000000000000000000000000000000000000..6243832845f402575b08df5d363a45906dd617d9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/712.txt @@ -0,0 +1 @@ +a major challenge in developing high-performing machine learning algorithms in the biomedical domain is the limited availability of labeled data, whereas state-of-the-art deep learning models require more labeled data to generalize better. this poses a significant problem if such models shall be leveraged to automatize tasks in this domain. accordingly, in this paper, we focus on the following: "is it possible to learn physiological domain agnostic deep learning architectures?" and "can we train such an architecture in a self-supervised manner without the need for labeled data?".one promising avenue in this research domain focuses on deep representation learning . these models learn to extract generalizable features, omitting the need for domainspecific hand-designed features. we particularly focus on selfsupervised learning (ssl) frameworks, a machine learning paradigm where the model is trained without labeled data, the feature encoder of ts-moco predicts physiological labels using only one dense layer that must be fitted on the labeled data on top of the learned embedding space.• how can we learn a generalizable feature encoder architecture and learning algorithm for diverse domains of physiological signal recordings?.in ssl with momentum contrast, two models of the same architecture are used: a student encoder and a teacher (or momentum) encoder. the student encoder is trained via loss backpropagation, while the teacher model parameters are set as an exponentially moving average of the student model parameters. the momentum encoder is hereby used to build a representation dictionary on-the-fly, which is then matched to the representation output by the student encoder network in a contrastive loss. in the byol framework by, both the momentum encoder and the student encoder predict representations where different input augmentations are applied.the former is encoded into a representation vector c by a feature encoder described through f enc with parameters θ, and the latter is used as target for a reconstruction head, that predicts the subsequent timesteps t ∈ of its input from a context vector. ts-moco framework architecture 1) augmentation function: whether or not training the proposed framework is successful heavily depends on the used augmentation function as it defines the encoding and reconstruction task.2) feature encoder: the used feature encoder consists of a tokenizer, the addition of positional embeddings, and a transformer encoder. as a first step, the signal used as input to the encoder is mapped into an embedding space by a single linear layer named tokenizer.in order to allow the feature encoder to exploit positional information of the signal values, we add positional embeddings to the tokenized signals, i. the context vector output by the student feature encoder is thereby used as initial hidden state.table i presents a comparison of the self-supervised ts-moco framework with a supervised trained baseline model with same encoding architecture (supervised), the selfsupervised ts-tcc framework, and a random classifier based on the strategy introduced in section iv-b1.2) randomly initialized encoder: to evaluate the feasibility of the pre-training phase of ts-moco, we compare it against a model with same encoding architecture, but the encoder of this setup is randomly initialized and freezed i.we present ts-moco 1 , the first transformer encoder-based self-supervised learning framework with momentum contrast for physiological signal recording domain datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/713.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/713.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ecfb8dec0bfe1a2bd8e38bb10c7482a192645af --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/713.txt @@ -0,0 +1 @@ +when dealing with problems that are computationally too costly to solve explicitly, such as np-hard problems, it is common to rely on heuristics. the idea of using neural networks to train such heuristics is quite appealing and has attracted considerable interest over the years. one aims to enhance an algorithm, such as greedy search, with a neural network module that is trained to improve the decision-making of the algorithm. see , or for an introduction and an overview of the area. in practice, problem instances typically come from a distribution with specific biases which are hard to describe explicitly. these can be exploited by a neural network. as an illustration, let us consider the hamiltonian cycle problem (hcp), which is at the core of this paper (nodes in the cycle can not repeat). it asks the following: problem 1 (hcp). determine whether or not there exists a cycle that passes through all vertices of a given graph. if it exists, such a cycle is called a hamiltonian cycle, and the graph is said to be hamiltonian.the general hcp is known to be np-complete and thus computationally intractable. currently, the fastest known exact this work has been supported in part by the croatian science foundation under the project single genome and metagenome assembly (ip-2018-01-5886) and the genome institute of singapore, a*star core funding solution algorithm is due to and has worst-case complexity of o(1.657 n ).as far as applications are concerned, hcp is used to improve runtimes of rendering engines, see . to do so, one solves the hcp for a dual graph of triangulation and renders the triangles in that order which reduces the number of points to process. another application of hcp comes from genomics, more specifically, the problem of de novo genome assembly. the task here is to reconstruct the genetic material of an organism, i.e. the exact sequence of nucleobases on all of its chromosomes, from a large number of sequenced fragments called reads. as chromosomes contain hundreds of millions bases, correctly assembling a single one is already a huge undertaking, see for an example. interpreting overlaps between reads as edges, after preprocessing and cleaning (see ), one ends up with a string graph as proposed in . the hamiltonian cycle in the string graph corresponds to the correct assembly of the chromosome. for more details see , , and . both triangular meshes of 3d objects and string graphs of various assemblers (such as or ) have specific structures and statistical properties arsing from the context. these could make solving the hcp easier but are difficult to exploit directly. we show here how to exploit them using graph neural networks in a similarly specific setting of erdős-rényi random graphs.for hcp in general, heuristics based on hopfield networks were already trained in the early 90-ties, see , . more recently, however, the area of geometric deep learning and graph neural networks has seen rapid developments and produced neural network layers such as message passing or graph attention layers . these layers are built to exploit any graph structure in data and can handle arbitrarily large graphs with a limited set of parameters, resembling convolution layers in computer vision. they have found applications in image and text processing, combinatorial optimization, physics, chemistry and biology . see and for a deeper dive into the area. in particular, they are excellent candidates for heuristics of graph-based problem. however, most efforts so far have been directed towards combinatorial optimization problems, the two-dimensional traveling salesman problem in particular. heuristics for the 2d-tsp based on transformer architecture were trained in , and those based on graph neural networks in and . the state-of-the-art result is achieved in where a comprehensive list of references can be found as well. it has to be noted that previously mentioned models still perform worse than the concorde tsp solver , a state-of-the-art exact solver based on branch and bound search combined with the cutting plane method. nevertheless, theoretical complexities of neural network models are superior to concorde. let us also mention , and which work with general combinatorial optimization and constraint satisfaction problems.in this paper we present a hcp solver based on graph neural networks and show that it easily outperforms most hand-made heuristics. the code is available at https://github.com/lbcbsci/gnns-hamiltonian-cycles. as an illustration, let us consider the hamiltonian cycle problem (hcp), which is at the core of this paper (nodes in the cycle can not repeat). if it exists, such a cycle is called a hamiltonian cycle, and the graph is said to be hamiltonian. we show here how to exploit them using graph neural networks in a similarly specific setting of erdős-rényi random graphs. more recently, however, the area of geometric deep learning and graph neural networks has seen rapid developments and produced neural network layers such as message passingor graph attention layers. heuristics for the 2d-tsp based on transformer architecture were trained in,and those based on graph neural networks inand.in this paper we present a hcp solver based on graph neural networks and show that it easily outperforms most hand-made heuristics.hence, tsp solvers can be used for hcp and we shall exploit this by using concorde tsp solver, see, to evaluate the performance of our models in section v. the hcp is classically posed as a decision problem: determine whether the graph contains a hamiltonian cycle or not. in case the output of a solver is not a valid hamiltonian cycle, which is straightforward to check, we assume the solver predicted that no hamiltonian cycle exists. there are datasets of collected hcp problems, see, for example,or, but they are not quite large enough to train neural networks on.since graph neural networks (gnn) form the central component of our model, hcp information needs to be represented in the suitable form. even though they can easily be generated using existing hcp solvers, we will show it is possible to train on artificially generated graphs such that hcp solution is known in advance. a single training example consists of a graph g and a hamiltonian cycle v 1 v 2 .(i) concorde tsp solver -the state-of-the-art exact tsp solver from, (ii) hybridham -an hcp heuristic from, (iii) ant-inspired heuristic -an hcp heuristic presented in, (iv) least degree first heuristic -simple greedy heuristic always selecting the neighbor with the lowest degree. nevertheless, neural network solvers are yet to achieve reasonable performance on large input graphs and concorde tsp solver remains the best-performing hcp solver. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/714.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/714.txt new file mode 100644 index 0000000000000000000000000000000000000000..3012b02c36523b82b242f2a9c51e5558f56fb7eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/714.txt @@ -0,0 +1 @@ +the rise of internet-of-things (iot) and cyber-physical systems has led to exponential growth in data collection from distributed devices. however, transferring this massive amount of data to a centralized processing point for inference and decision-making is often impractical due to resource constraints and privacy concerns. to overcome these challenges, distributed learning, with its on-device processing, is an attractive alternative, enabling efficient data analysis without moving the raw data out of the edge devices. federated learning (fl) is a distributed learning framework that facilitates collaborative model training across edge devices or clients without exposing the underlying data - . in particular, using its local data, each client refines a global model shared by a server, this work was supported by the research council of norway. a conference precursor of this work appears in the asilomar conference on signals, systems, and computers, pacific grove, usa, nov. 2022 .f. gauthier, v. c. gogineni, and s. werner are with the department of electronic systems, norwegian university of science and technology, trondheim, norway. email: {francois.gauthier, vinay.gogineni, stefan.werner}@ntnu.no.yih-fang huang is with the department of electrical engineering, university of notre dame, notre dame, in 46556 usa (e-mail: huang@nd.edu).anthony kuh is with the department of electrical engineering, university of hawaii at manoa, honolulu, hi 96882 usa (e-mail: kuh@hawaii.edu). and subsequently transfers the updated model back to the server which then aggregates all updated client models before sending an update back to clients for further refinements.to date, research on fl mostly uses a single-server architecture, which is susceptible to communication and computation bottlenecks and scales poorly with the number and geographical dispersion of participating clients. to address these concerns, some alternatives to the single-server architecture have been proposed, see, e.g., - , such as clientedge-server hierarchical learning and the graph federated architecture , . in client-edge-server hierarchical learning, edge servers perform partial aggregation with their associated clients and communicate their results to a single cloud server that performs the global aggregation. however, using a single cloud server is susceptible to bottlenecks and can only accommodate up to a limited number of edge servers. in contrast, the graph federated architecture uses a server network in which each server aggregates the information from its associated clients and shares its model with its neighbors. therefore, the graph federated architecture is highly scalable with the number of clients and easier to implement, thanks to its distributed nature.one of the main challenges in fl is data heterogeneity, which means there can be substantial differences in the underlying statistical distributions among clients' data - . consequently, a unique globally shared model can be inadequate for such settings, and personalized models must be learned instead - . for example, autonomous vehicles need to maintain vehicle-specific models of their highly dynamic environment while collaborating with nearby vehicles and/or smart city iot devices . this requirement can be met by personalized fl, where clients, or groups of clients (clusters), learn client-or cluster-specific models - . these personalized models typically share some similarities . as an example, the environment of an autonomous vehicle could be shared with other connected objects. leveraging the similarities between cluster-specific models can, therefore, improve performance , , a process known as intercluster learning, which is particularly important when some clients or clusters have insufficient data , .personalized fl has received considerable attention lately due to its ability to improve learning performance in settings where clients are required to observe device-specific behaviors, see, e.g., , - . it is used in many applications such as healthcare, electrical load forecasting, biometrics, drone swarms, and autonomous vehicles , , - . however, all those works are limited to single-server cases.for example, although extends personalized fl to a multiserver architecture, it assumes that all the clients associated with a given server learn the same model. under this assumption, each server maintains a single model trained via conventional fl and the model is refined by communicating with other servers about their models. however, the general case where each distributed server needs to enable the learning of personalized models and collaborate with its neighbors to refine those, is yet to be studied.in the context of graph fl, many devices take part in the training process, and ensuring the privacy and security of client data is crucial. the risk of eavesdropping attacks on the clientserver channels increases with the number of devices in the system, and not all devices can be trusted. even if data is not explicitly shared among clients, repeated message exchanges could reveal sensitive information to curious devices or external eavesdroppers , . in order to reduce this risk, differential privacy (dp) has been introduced to protect client privacy by ensuring that the inclusion or exclusion of an individual data sample does not significantly affect the algorithm output. in other words, dp limits the ability of attackers to infer information about individual data samples by adding controlled noise to the data before sharing it with the server - . in particular, the zero-concentrated dp (zcdp) variant is well-suited for iterative implementations, as it allows the privacy budget to be adjusted dynamically based on the number of iterations - . therefore, this paper considers zcdp in the graph fl architecture where the privacy of client data is of utmost importance. by employing zcdp, clients perturb their local model estimates with a noise sequence of known variance that decreases progressively throughout the computation to ensure privacy without compromising model accuracy.this manuscript tackles the general case of personalized graph federated learning (pgfl) in both a conventional and privacy-preserving manner. specifically, we consider a multiserver architecture with distributed clients grouped into clusters, irrespective of their associated servers, for the decentralized training of cluster-specific personalized models. the proposed algorithms, within the considered pgfl architecture, leverage similarities between clusters to mitigate data scarcity and improve learning performance. the local training in the proposed framework uses the alternating direction method of multipliers (admm), well-suited for distributed applications - and demonstrating fast, often linear , , convergence. the main contributions of this manuscript are summarized as follows.• a pgfl framework is proposed to improve learning performance in a distributed learning setting. our approach employs inter-cluster learning to improve the accuracy of local models by leveraging information from other clusters. the graph fl problem is formulated as a constrained optimization problem and solved in a distributed manner using admm. • we design a privacy-preserving variant of the pgfl algorithm, where clients perturb their local models to achieve local differential privacy using the zcdp framework. the privacy loss is quantified per iteration as well as throughout the computation. • mathematical analysis is given to show that the privacypreserving implementation of the pgfl algorithm converges to the optimal solution for each cluster in linear time. additionally, our analysis shows that utilizing intercluster learning leads to an alternative output whose distance to the original solution is bounded and that the bound depends on cluster similarity and can be adjusted with hyperparameter selection. the paper is organized as follows. section ii introduces the problem and presents the pgfl algorithm along with its zcdp variant. sections iii and iv are dedicated to the convergence and privacy analyses of the proposed algorithm. in section v, we demonstrate the effectiveness of the algorithm through a series of experiments involving regression and classification tasks. section vi concludes the paper.mathematical notations: matrices, column vectors, and scalars are denoted by bold uppercase, bold lowercase, and lowercase letters, respectively. the notation a t denotes transpose of the matrix a, the identity matrix is denoted by i, and a null vector by 0. the exclusion of an element a from set a is denoted a\a. the notation a, b denotes the inner product between vectors a and b. the statistical expectation operator is represented by e, and n (µ, σ) and u(a, b) respectively denote the normal distribution with mean µ and covariance matrix σ and the uniform distribution on an interval (a, b). finally, the gradient of a function a(•) is denoted by ∇a(•). federated learning (fl) is a distributed learning framework that facilitates collaborative model training across edge devices or clients without exposing the underlying data-. leveraging the similarities between cluster-specific models can, therefore, improve performance,, a process known as intercluster learning, which is particularly important when some clients or clusters have insufficient data,. section iii-c bounds the distance between the cluster-specific solutions obtained with and without intercluster learning by a function of the inter-cluster learning parameter sequence. usingand with a convex and smooth function f (w) demonstrates that the proposed pgfl algorithm, without inter-cluster learning (τ = 0 ), converges to the optimal solution of (2) in linear time for any given cluster.next, we investigate the effect of inter-cluster learning by comparing the performance of models obtained using the pgfl algorithm with and without inter-cluster learning.where the expectation is taken with respect to the privacyrelated noise added in(11)and the data observation noise, w(n) q,s denotes the model obtained by the algorithm without inter-cluster learning, and η is the maximum cluster model distance, defined as:. clients of a given cluster solve the ridge regression problem with data generated from an original model w * q , obtained with w * q = w * 0 + γw * 0 with γ ∼ u(-0.performance is evaluated by computing the normalized mean squared deviation (nmsd) of the local models with respect to the corresponding cluster-specific original model used to generate the data, w * q for k ∈ c (q) . the results illustrate the superiority of the proposed pgfl algorithm over fedavg, as cluster-specific learning tasks benefit significantly from personalized models tailored to each cluster. this figure confirms that inter-cluster learning has the potential to increase learning performance by alleviating data scarcity, as the pgfl algorithm achieves lower nmsd with τ (n) ∈ (0. the learning curves are presented in fig.we simulated the pgfl algorithm in the context of classification with client scheduling, privacy, a fixed inter-cluster learning parameter τ (n) = τ = 0.this paper proposed a framework for personalized graph federated learning in which distributed servers collaborate with each other and their respective clients to learn cluster-specific personalized models. our mathematical analysis showed that this algorithm converges to the exact optimal solution for each cluster in linear time and that utilizing intercluster learning leads to an alternative output whose distance to the original solution is bounded by a value that can be adjusted with the inter-cluster learning parameter sequence. finally, numerical simulations showed that the proposed method is capable of leveraging the graph federated architecture and the similarity between the clusters learning tasks to improve learning performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/715.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/715.txt new file mode 100644 index 0000000000000000000000000000000000000000..16e33afa9ec0a3d6cca8f68a2988ebb9b819526e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/715.txt @@ -0,0 +1 @@ +theoretical models of decision-making are often poor descriptions of behavior in practice. as a prime example, classic economic models such as nash equilibrium fail to describe salient aspects of human behavior: people often choose dominated actions and fail to account for others' strategic decision making . in response to such failures, fields such as behavioral game theory aim to develop interpretable models that can predict human responses to strategic situations. such models are helpful to cognitive scientists, for learning how humans think when confronted with economic or strategic choices; to designers of economic systems, for tuning these systems to perform better in practice; and to designers of cooperative ai agents, for enabling these agents to effectively coordinate their behavior with humans .however, evaluating the quality of such a model on a dataset requires a loss function. researchers working in behavioral game theory have made a wide variety of different choices about precisely which loss function to use for such evaluations, with error rate, negative log-likelihood, crossentropy, and (at least two notions of) mean-squared error all being common choices. clearly, the choice is a substantive one, as different losses will disagree about the quality of a prediction. which loss function should they use?in this paper, we attempt to answer this question with a first-principles argument. though we are motivated by behavioral game theory-and so it is the basis of our examplesour argument depends only on four key characteristics of this field. first, there is some mapping of interest from settings to distributions over finite sets of discrete outcomes (e.g., the distribution of human decisions in strategic situations). second, it is possible to collect multiple samples from this mapping for any given setting (e.g., by running an experiment with multiple participants). third, a researcher seeks a predictive model of this mapping, which can predict the distribution of unseen data. fourth, this model must also be interpretable, having few parameters whose values can be inspected and understood, and so it cannot generally represent the true mapping perfectly. our arguments can therefore be extended to other domains that share these characteristics; we give several examples at the end of this paper.from these characteristics, we argue that loss functions should satisfy five key axioms. the first two, which we call alignment axioms, ensure that the loss function induces a correct preference ordering over predictions. these axioms, sample pareto-alignment and distributional pareto-alignment, ensure that the loss function penalizes predictions that are clearly worse (on a given dataset or in expectation over realizations of this data, respectively). the other three, interpretability axioms, relate the numerical value of the loss to a prediction's quality. empirical distribution sufficiency requires that the loss be invariant to the number or order of the observations; counterfactual pareto-regularity ensures that the loss appropriately respects changes in the data; and zero minimum gives the loss an interpretable optimum.we show that it is possible to satisfy all of these axioms: we identify an entire family of loss functions that do so, which we dub "diagonal bounded bregman divergences". exactly one widely used loss function, the squared l2 error between the predicted and empirical distributions, belongs to this set; we show how each of the other common loss functions violates at least one axiom. in particular, the entire class of scoring rules, 1 a class of loss functions with celebrated alignment properties, all fail our interpretability axioms, making them suitable for training models but not evaluating them.the statistician's view: the likelihood principle. it might seem that the problem of choosing a loss function is a straightforward application of statistical inference: given a dataset and a model class that induces a set of probability distributions, we seek to understand how well each distribution describes the data. then, the standard statistics textbook argument is that we should use the likelihood of the data to evaluate each of these predicted distributions. this argument is known as the "likelihood principle" : if the data was generated by one of the predicted distributions, then likelihood is a sufficient statistic for this distribution. the catch is that this argument relies on the assumption that the model class is "well-specified", containing a model that outputs the true generating distribution. this is not usually the case when evaluating interpretable models, which typically approximate behavior rather than to predict it perfectly. we elaborate further on the problem of evaluating misspecified models when presenting our alignment axioms.the forecaster's view: scoring rules. another closely related problem is that of evaluating probabilistic forecasts of future events. work in this field generally uses scoring rules , a class of loss functions that evaluate predictions independently on each observation. axiomatic characterizations from this literature agree that losses should be properthe expected loss should be minimized by the true distribution, an axiom that we refer to in our analysis as "distributionally proper" -but diverge beyond this point: negative log-likelihood is the only proper scoring rule that satisfies a locality axiom , and two different neutrality axioms characterize brier score and the spherical score . our work differs in that we propose axioms that address critical problems that arise when evaluating behavioral models, without being concerned that we are left with an entire class of loss functions.some authors have proposed stronger alternatives to propriety. instead of simply requiring that the correct prediction minimize the expected loss, others have considered lowerbounding the loss of incorrect predictions , maximizing the loss of a naive prediction , or ensuring that it also receives a lower loss in finite samples with high probability . these axioms focus on identifying correct predictions, while we focus on comparing and evaluating incorrect predictions.the field of property elicitation extends the definition of propriety in a different way, aiming to construct loss functions whose expectations are minimized at other summary statistics of a distribution; propriety is the special case of eliciting the mean. of particular interest here is work on eliciting multiple properties , as their "accuracy rewarding" and "order sensitivity" axioms are similar to our alignment axioms. we discuss this relationship further in section 3.evaluating model classes. our axioms are concerned with refer to any arbitrary loss function. of course, our results on scoring rules only apply to the former, more restrictive definition.evaluating individual predictions. fudenberg et al. tackle the related problem of evaluating a model class, considering the cross-validation performance of a training algorithm that selects a model from this class. they formalize a completeness metric, which transforms an existing loss, giving a score of 100% to an algorithm with the best possible cross-validation performance and 0% to a baseline algorithm.their work complements ours: their completeness measure can be applied to any loss function, but they do not claim how this loss should behave on individual datasets. we thus recommend that researchers evaluating a model class should apply completeness to a loss that satisfies our alignment axioms.l ce (f, y) = 1 n l nll (f, a), l kl (f, y) = -d a=1 p(y) a log( fa p(y)a ). we say that q is a pareto improvement over p with respect to r, denoted by q ≻ r p, if for all a ∈ a, either p a ≤ q a ≤ r a or p a ≥ q a ≥ r a , and furthermore this inequality between p a and q a is strict for at least one a. for any n, under mild technical conditions, a loss function l that satisfies dp must be of the form l(f, y) = ∇ (b,db) (ρ(y), f ) + c(y) for some closed and proper strictly convex function b, subgradient db of b, translation c : a n → r, and summary statistic ρ : a n → ∆(a), where ey∼p n ρ(y) = p for all p. under mild technical conditions, a loss function l satisfies sp and dp if and only if l(f, y) = ∇ (bn,dbn) (p(y), f ) + c(y) for some family of closed and proper strictly convex functions b with subgradients db and some translation c. now, suppose that l(f, y) = ∇ (b,db) (p(y), f ) is a bregman divergence, and consider the alternative loss l ′ (f, y) = l(f, y) + c(y), where c(y) is an arbitrary function that depends only on the data. then there exists some closed and proper strictly convex function b and subgradient db, and some translation c such that l is of the form l(f, y) = ∇ (b,db) (p(y), f ) + c(y), for all f ∈ relint(∆(a)), y ∈ a n .since a bregman divergence ∇ (b,db) (p, q) of strictly convex b is uniquely minimized by p = q, for any y ∈ a n , l(f, y) is uniquely minimized by f = ρ(y); thus, sp constrains that ρ(y) = p(y) for all y. if l(f, y) = ∇ (b,db) (p(y), f ) + c(y) for some closed and proper strictly convex function all three of these axioms follow from a basic property of bregman divergences of all strictly convex functions, which is that ∇ (b,db) (p, q) ≥ 0, with equality if and only if p = q. for any bregman divergence with strictly convex b and fixed p(y), ∇ (b,db) (p(y), f ) is uniquely minimized by f = p(y). let p ∈ ∆(a) be a distribution, and let b 1 , b 2 : ∆(a) → r be two closed and proper strictly convex functions with subgradients db 1 and db 2 , respectively.for the only-if direction, let d(f ) = b 1 (f ) -b 2 (f ) be the difference between the two functions, and let dd(f ) = db 1 (f ) -db 2 (f ) be the difference between their subgradients. let l be a dbbd, where l(f, y) = ∇ b b (p(y), f ) for some b. we say that q is a weak pareto improvement over p with respect to r, denoted by q ≿ r p, if for all a ∈ a, f either p a ≤ q a ≤ r a or p a ≥ q a ≥ r a . for all m ≥ 1, settings {s i } m i=1 , datasets y i in each s i , and models f and g, if f (s i ) ≿ p(y i ) g(s i ) for all i and f (s j ) ≻ p(y j ) g(s j ) for some j, then l({(s i , y i , f (s i ))} m i=1 ) < l({(s i , y i , g(s i ))} m i=1 ). for all m ≥ 1, settings {s i } m i=1 , datasets y i and z i of the same size n i in each s i , and models f , if p(y i ) ≿ f (si) p(z i ) for all i and p(y j ) ≻ f (sj ) p(z j ) for some j, then l({(s i , y i , f (s i ))} m i=1 ) < l({(s i , z i , f (s i ))} m i=1 ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/716.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/716.txt new file mode 100644 index 0000000000000000000000000000000000000000..c2a7724fb5ac7e4c4706800f7e41343d2b5be4b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/716.txt @@ -0,0 +1 @@ +deep neural networks (dnn) have become ubiquitous in numerous application domains such as computer vision and natural language processing. however, the performance and computational requirements of dnn models are highly dependent on their channel dimensions, namely a set of hyperparameters that define the number of filters in dnn layers. finding the right set of channel dimensions for a dnn model plays a crucial role in achieving high performance under tight computational resource constraints, but it poses a challenging task for developers and engineers. therefore, researchers have developed neural architecture search frameworks that automatically look for optimal channel dimensions of a dnn model .the early versions of neural architecture search frameworks have resorted to reinforcement learning , evolutionary algorithms , and bayesian optimization to search for optimal dnn architectures. unfortunately, these methods have time and space complexities that increase combinatorially with the number of options that are defined in the search space, requiring excessive amounts of computational resources. to reduce the computational complexity of channel dimension search, wan et al. proposed the differentiable channel masking method, which mimics various channel dimensions by simply passing the feature maps through a set of binary masks. while the differentiable channel masking method significantly reduces the computational complexity of channel dimension search, it requires a search space that is carefully designed and tuned prior to the search phase, which hinders its practicality and usability in realistic scenarios.in this work, we propose flexcharts, which utilizes a flexible search space that does not need to be defined a priori. we reformulate the problem of differentiable channel dimension search such that flexcharts does not only search for the optimal channel dimensions but also modifies the boundaries of the search space on-the-fly to add further flexibility and reach optimal channel dimensions. the proposed method relaxes the requirement of an a priori well-designed search space for channel dimension optimization and enables finding the optimal channel dimensions only with loosely-defined initial conditions of a search space, improving the practicality and usability of neural architecture search.the rest of this paper is organized as follows: we first discuss the related work on neural architecture search, then we give a background information on the differentiable channel masking method, which forms the basis of our work. we then elaborate on the proposed flexcharts method as well as our novel dynamic channel allocation mechanism. finally, we give the details of our experiments and discuss the results. while the differentiable channel masking method significantly reduces the computational complexity of channel dimension search, it requires a search space that is carefully designed and tuned prior to the search phase, which hinders its practicality and usability in realistic scenarios. we reformulate the problem of differentiable channel dimension search such that flexcharts does not only search for the optimal channel dimensions but also modifies the boundaries of the search space on-the-fly to add further flexibility and reach optimal channel dimensions. the proposed method relaxes the requirement of an a priori well-designed search space for channel dimension optimization and enables finding the optimal channel dimensions only with loosely-defined initial conditions of a search space, improving the practicality and usability of neural architecture search.proposed differentiable channel masking method, which significantly improves the computational efficiency of neural architecture search with a search space that includes large numbers of options for channel dimensions.1, we observe that many channel dimensions are located at the boundary of the search space, which indicates that the optimal channel dimensions may lie outside of the engineered search space, resulting in a dnn architecture that does not correspond to the optimal solution. our work addresses these challenges and introduces a flexible search space for channel dimensions by proposing a novel differentiable neural architecture search to optimize for channel dimensions efficiently.in standard search methods, each candidate channel dimension requires an additional convolutional kernel, which increases the computational cost and memory requirements of the search linearly with the number of channel options. to efficiently search for optimal channel dimensions with large numbers of options, the channel masking method instantiates and trains a single kernel and simulates various channel dimensions masking out a fraction of the channels in this kernel. the channel masking method exploits the fact that any convolutional kernel with an output channel dimension f k that is smaller than f can be obtained simply by selecting f k channels from y and masking out the rest.the channel masking method permits to search for channel dimensions among various options with minimal computational overhead. however, the standard channel masking methods proposed in prior work can search only within a fixed range of channel dimensions, which hinders its effectiveness and practicality.to enable searching for channel dimensions in a flexible range, we first introduce the flexible channel masking method, which redefines the α variables so as to permit to change the range of channel dimensions as the search progresses.in short, the proposed flexcharts algorithm permits to search for optimal channel dimensions in a flexible channel dimension range while automatically managing the changes in the supernet with minimal computational overhead. because the effectiveness and efficiency of dmaskingnas method is highly sensitive to their predefined range of channel dimensions, we create two baselines that represent dmaskingnas methods with small and large range of channel dimensions, which we simply refer to as dmask-small and dmask-large. we also introduced a new dynamic channel allocation mechanism that allows changing the kernel dimensions efficiently during the search in order to dynamically adapt to the target channel dimensions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/717.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/717.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c4d5209681475b53c7d3737a4df1bd5d1fb26df --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/717.txt @@ -0,0 +1 @@ +humans communicate their affective state through a rich variety of nonverbal signals, such as facial expressions and vocal tone. these audiovisual signals can be analyzed using machine learning (ml) approaches for tasks like automated affect recognition . such endeavors have implications for many areas of human life, offering the potential for improved life quality while also bearing risks. for instance, automatic recognition of affect and emotions from audiovisual data can enhance human-machine interactions or help develop systems for the diagnosis and management of mental health . it has already been shown, however, that ml techniques often amplify existing societal biases potentially leading to discriminatory interactions or incorrect mental health diagnoses. given the need to provide fair benefits to all users and avoid potential unfair disadvantages, it is critical that decisions made by affective ml systems are transparent and trustworthy to both users of the systems and those affected by their decisions.ensuring transparency in affective ml poses a challenge, especially with the rise of more complex and opaque methods in the field . for instance, deep-learning methods are becoming more popular due to their high-performancewe gratefully acknowledge funding by the deutsche forschungsgemeinschaft (dfg, german research foundation): trr 318/1 2021 -438445824.capabilities, yet they are very difficult to interpret. research from the field of explainable artificial intelligence (xai) aims to address this problem by facilitating the interpretation of models , either by developing inherently interpretable models or through post-hoc explanations . despite their potential, the extent to which interpretability methods are applied in affective machine learning is not clear. even with an awareness of interpretability, adapting these techniques to affective computing may not be straightforward due to different interpretability needs. this calls for a close evaluation of interpretability in affective ml, similar to the review of tjoa and guan on xai in the medical domain.to our best knowledge, there are no reviews regarding interpretable and explainable affective computing. we aim to address this gap by contributing a comprehensive review of interpretability within affective ml for audio and/or visual data, analyzing the techniques employed to obtain interpretable models. through this process, we identify the gaps and challenges in the research, and suggest potential research directions to establish interpretable affective ml.the rest of the paper is organized as follows. in §ii, we provide a brief introduction to the definitions and taxonomy of interpretability and explainability. we then introduce the methodology for our review in §iii, before outlining the results in §iv. the findings are then discussed and summarized in §v and §vi, respectively. finally, we discuss the ethical impact of our work in §vii. most of these approaches provide local explanations (n = 11) while some generate global explanations (n = 6) (three articles include both global and local explanations).in the domain of post-hoc explainability methods, tableii, the most common approach (n = 13) was the implementation of feature attribution methods, such as locally-interpretable model agnostic explanations (lime), shapley additive values (shap), activation maps (cam, gradcam), layer-wise relevance propagation (lrp), and integrated gradients. similar to interpretability-by-design, the post-hoc methods mostly focus on local explanations (n = 13) but a few implement global explanations (n = 5) (three articles include both global and local explanations).we reviewed the use of interpretability methods in affective ml, focusing on tasks employing audio and/or visual data.in the majority of research reviewed here, interpretability was limited to some variation of saliency maps showing which input features or modalities contributed the most to the decisions of the given model. hence, we recommend performing systematic evaluation and comparison of different interpretability methods to ensure that the explanations are understandable and make sense for the given application.it is worth noting that even a carefully chosen interpretability method may only show what features are important but not necessarily explain why these features have a greater impact on the model's decisions than the others, resulting in an interpretability gap that must be filled by the end-user. for example, studies that used raw input for image data can generate salience maps that are more easily interpretable, compared to other input types, such as audio spectrograms which require expert knowledge to achieve a level of interpretability. in this way, global explanations may be used to understand and identify biases and generalizations of models towards different groups, while local explanations may be used to provide an understanding of how intra-and inter-personal variations affect model decisions for specific individuals. in line with recent calls for more rigorousand human-basedevaluation of interpretability, we suggest that affective computing researchers take a more human-centered approach and consider the contexts in which explanations will be situated, including both the application tasks and users.proposed interactive explanations, but this was limited to specific xai methods and did not allow the system to adapt the explanation methods to the users needs. to this end, we suggest researchers should focus on implementing and evaluating a breadth of interpretability methods, beyond just feature importance, in different affective computing contexts. however, interpretable affective ml is still a young field with a limited breadth and evaluation of methods, calling for increased clarity regarding the context of the explanations and the effectiveness of different interpretability methods within stated contexts. our analysis shows that affective ml could benefit from explicit consideration and discussion of the interpretability approach, such as the reliability of the used method, the interpretability of input representations and the choice of interpretability scope. while our work outlined the main developments and limitations, future work is needed for a more in-depth analysis of the use of interpretability methods for different contexts of different affective ml tasks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/718.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/718.txt new file mode 100644 index 0000000000000000000000000000000000000000..450ab91ac9182fc9d1565ef8278779a1616e91b1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/718.txt @@ -0,0 +1 @@ +the project which is available on github 1 was implemented in python and consists of three main modules whose roles are depicted in fig. 1. a straightforward to use but very powerful command line interface can be utilized in addition to the provided python development interface to interact with each of mlonmcu's components. 1) configuration: a prerequisite step to using mlon-mcu's core features is initializing and installing at least one environment. predefined environment templates are supplied. the idea of allowing to install several environments in parallel is motivated by the need for isolated dependencies and reproducibility.2) flow: the heart of mlonmcu is the definition and execution of single benchmarks or complex benchmarking sessions. each benchmark consists of a set of stages (see fig. 1) which will be invoked until the final stage is reached.3) evaluate: each session generates a set of artifacts, which can be used to further investigate the code generation results or to debug target-related problems, as well as a report with several details about each run. these metrics may consist of the model execution latency (e.g. instructions/cycles/runtime) as well as static (and optionally dynamic) memory usage information. using the included python development api, these reports and artifacts can directly be used to do further analysis of the data or to create comprehensive visualizations. the two main points of discussion are the overheads introduced by several supported framework backends in mlonmcu as well as the runtime performance of various tvm schedules, which define transformations of the computations in a program, on resourceconstrained microcontroller hardware.the default way to deploy a machine learning model using tensorflow lite for microcontrollers is based on the tflite micro interpreter (tflmi), which parses a tflite "flat-buffer" data structure at runtime. an alternative approach is available using the tflite micro compiler (tflmc) project proposed in, which generates minimal inference code for a given model. the aot-compiled models basically have no initialization steps, while the tvmrt requires at least one million instructions to prepare for the model execution, exceeding even the inference time for less complex models such as toycar. while the tvmrt backend introduces several overheads regarding the discussed performance and memory metrics, it is still a very powerful tool, as it allows profiling the model execution on the target device and provides the necessary utilities to use autotvm with microtvm workloads. if inference performance using tflm is sufficient for a given application, the tflmc backend can also be considered to deploy a model with minimal memory overheads.in the following, the four mlperf tiny models are deployed on the four different targets introduced in section iii-a2 using mlonmcu's zephyr platform. while the targets esp32c3 and stm32f7 have been able to run all four models without reaching memory limits, both stm32f4 and esp32 failed to deploy the large visual wake-word network, at least for some schedules, due to insufficient amount of ram available.only considering untuned results, it can be stated: the choice of the used data layout 3 has a large impact on the measured inference performance, especially for the vww and resnet model on the esp32c3 and esp32 target, while for the rest, the difference in inference latency is between ×1. when using the nchw layout in tvm, the activation and kernel tensors are internally transformed into a 5-respectively 6-dimensional nchwc (oihwio) layout to improve spacial locality leading to the large gap in inference time between the two considered layouts.in addition to tvm's default schedules (mainly targeting x86 architectures), operator implementations intended for usage with larger arm (aarch64) targets are now considered as well. it can be seen that on most cnns those kernels perform similar or worse than tvm's default implementations for both types of layouts while for the only dnn the dense/fullyconnected operators for arm targets are able to run up two 3 nhwc: channels-last (tflite default), nchw: channels-first (tvm default) times more efficiently. tunable convolution schedules exist for both considered nchw schedules as well as for nhwc schedules written for arm targets, allowing to optimize cnns effectively to improve the inference performance. for cnns, tvm's default nchw schedules performed best, especially with autotuning-enabled, while for dnns such as the toycar network, arm schedules are a better choice. however, this becomes a nontrivial task for tinyml devices as microtvm currently needs to cross-compile, flash and run a new program for every single tuning iteration, which is very time intensive and also degrades the lifetime of the flash memory used by the microcontrollers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/719.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/719.txt new file mode 100644 index 0000000000000000000000000000000000000000..1053539dc6f2ae78829d6013cc7685a95bd4410d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/719.txt @@ -0,0 +1 @@ +we study online learning problems in which a decision maker tries to maximize their cumulative reward over a time horizon t , subject to a set of m resource-consumption constraints. at each t, the decision maker plays an action x t ∈ x , and subsequently observes a realized reward f t (x t ), with f t : x → , and an m-dimensional vector of resource consumption c t (x t ). our framework extends the well-known bandits with knapsacks (bwk) framework of badanidiyuru et al. . in the bwk model, the resource consumption is monotonic (i.e., c t (•) ∈ m for all t ∈ ). this framework has numerous motivating applications ranging from dynamic pricing to online ad allocation (see, e.g., ), and it has been extended in numerous directions such as modeling adversarial inputs and other non-stationary input models , more general notions of resources and constraints , contextual and combinatorial bandits . kumar and kleinberg recently proposed a natural generalization of the bwk model in which resource consumption can be non-monotonic, that is, resources can be replenished or renewed over time so costs are no longer required to be such that c t (•) 0. we call such model bandits with replenishable knapsacks (bwrk).contributions. we propose a general primal-dual template that can handle online learning problems in which the decision maker has to guarantee some long-term resource-consumption constraints and resources can be renewed over time. we show that our framework provides best-of-both-worlds guarantees in the spirit of balseiro et al. : it guarantees a regret bound of õ(t1/2 ) in the case in which (f t , c t ) are i.i.d. samples from a fixed but unknown distribution, and it guarantees a constantfactor competitive ratio in the case in which budgets grow at least linearly in t , or when the possible per-round replenishment is a positive constant. we remark that known best-of-both-worlds frameworks like the one by balseiro et al. cannot be applied to this setting as they assume monotonic resource consumption. in that case, we show that our framework recovers the state-of-the-art rate of 1/ρ by castiglioni et al. , where ρ is the per-iteration budget. our primal-dual template is applicable to any online problem for which a suitable primal regret minimizer is available. therefore, we first provide general guarantees for the framework without making any assumption on the primal and dual regret minimizers being employed (section 4). then, we show how such regret minimizers should be chosen depending on the information available to the learner about the intensity of the budget replenishment (section 5). moreover, we provide explicit bounds that only depend on the guarantees of the primal regret minimizer. in particular, we show how the primal and dual minimizers should be instantiated in the case in which the amount of resources that can be replenished at each time t is known, and in the more challenging case in which it is unknown a-priori to the decision maker. finally, we demonstrate the flexibility of our framework by instantiating it in some relevant settings (section 6). first, we instantiate the framework in the bwrk model by kumar and kleinberg , thereby providing the first positive results for bwrk under adversarial inputs, and the first instance-independent regret bound for the stochastic setting. the latter complements the instance-dependent analysis by kumar and kleinberg . then, we apply the framework to a simple inventory management problem, and to revenue maximization in bilateral trade. in the following, we will also exploit a more general notion of regret, in which the regret minimizer suffers regret only in specific rounds. a banditfeedback primal regret minimizer a p which outputs a strategy x t ∈ x at each t, and subsequently receives as feedback the realized utility function u p t (x t ) = f t (x t ) + λ t , ρc t (x t ) , and a fullfeedback dual regret minimizer a d that receives as input the utility function: u d t : λ → λ, c t (x t )ρ . since the time steps in t g are the only rounds in which a p is invoked, any standard regret minimizer can be used to bound r p tg .in order to have meaningful guarantees in both the adversarial and stochastic setting, we need to choose the regret minimizers a p and a d so that r p tg , r d tg,<τ (d) and r d tg,>τ (d) all grow sublinearly in t .2, we will show that this assumption can be removed by employing a primal regret minimizer a p with slightly stronger regret guarantees. in general, the primal regret minimizer must meet the minimal requirement of guaranteeing a sublinear regret upper bound e p t,δ with probability at least 1δ, when the adversarial rewards are in .with probability at least δ, the primal regret minimizer a p guarantees a regret e p t,δ against rewards in . then, by re-scaling the realized rewards before giving them in input to the regret minimizer, we get a regret bound of 4 ν e p t against rewards u p t (•) that are in . assume that the dual regret minimizer is generalized fixed share on d, and that the primal regret minimizer has regret at most e p tg,δ against losses in with probability at least 1-δ, for δ ∈ (0, 1]. therefore, we need a dual regret minimizer that plays on r m + , but has sublinear weakly-adaptive regret with respect to lagrange multipliers in d. for any t g ⊂ and any t 1 , t 2 ∈ t g , if the dual regret minimizer is ogd with learning rate η, we have that the regret with respect to λ is upper bounded by. we instantiate the primal-dual framework (algorithm 1) using exp3-sixas the primal regret minimizer, while online gradient descent is employed as the dual regret minimizer.in the first case, since we are using ogd as the dual regret minimizer and, by assumption, it never has to perform projections during t , it holds that for all resources i ∈ :.3, we have that the regret of the dual regret minimizer with respect to 0 over is bounded by: this concludes the proof for the stochastic setting.5, the regret of the primal regret minimizer a p is bounded by: r p tg ≤ 1 + 16m ν. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/72.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/72.txt new file mode 100644 index 0000000000000000000000000000000000000000..173f46a11ef5d29ea4460f1a2a79e426fe3964d1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/72.txt @@ -0,0 +1 @@ + in addition, the most effective approach to combating adversarial attacks is to minimize the loss of adversarial examples generated at each epoch, which is named adversarial training.we observe that the integrity attack is the main source of adversarial attacks in deep learning-based load forecasting models, and the availability attack is only considered for linear models. considering a 'man-in-the-middle' scenario, availability attack is much cheaper than integrity attack as the attacker does not need to manipulate the data and repackage the packets. to fill this research gap, first, we design the availability adversarial attack targeting on piecewise linear neural network (plnn). in the simulation, we compare the performance of the availability adversarial attack with its integrity counterpart and demonstrate the effectiveness of the proposed adversarial training algorithm in a realistic load forecasting task.in the literature, adversarial attack commonly refers to maliciously perturbing a small value in the input data x n , that is, integrity adversarial attack. compared to the integrity attack, the availability attack is much cheaper, as it only requires the attackers to block measurements.to overcome the availability attack defined in (7) and (8), adversarial training can be implemented in which the loss of the worst-case attack is minimized in each epoch. consequently, the loss function of adversarial training for availability attacks can be written as14), l(•), l adv max (•), and l adv min (•) represent the clean loss, maximization, and minimization of adversarial loss(11)respectively.remark 2: although the proposed availability adversarial attack and adversarial training are based on feedforward neural networks, they can also be extended to other types of piecewise linear layer, such as the convolutional layer.in the following discussion, the model trained on clean dataset is referred as clean model, while the model trained through adversarial training is referred as adver model. furthermore, to distinguish different attack attempts, we use avai(mode, c, β) to represent the availability adversarial attack with mode∈{max,min} and c ∈ {0, mean}. similarly, inte(mode, ) represents the integrity adversarial attack with attack strength . meanwhile, as it is is much cheaper than the integrity attack, the availability attack is a promising attack strategy for the attacker. as c = 0 is a stronger attack attempt than c = mean, model trained on avai(mode, 0, 6) has higher mape than it trained on avai(mode, mean, 6). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/720.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/720.txt new file mode 100644 index 0000000000000000000000000000000000000000..aed6dfb02e154387847c596571e20f1f832fd410 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/720.txt @@ -0,0 +1 @@ +online kernel learning in the regime of the square loss is an important non-parametric online learning method (kivinen et al., 2004;vovk, 2006;sahoo et al., 2014). the learning protocol can be formulated as a game between proceedings of the 40 th international conference on machine learning, honolulu, hawaii, usa. pmlr 202, 2023. copyright 2023 by the author(s). a learner and an adversary.before the game, the learner selects a reproducing kernel hilbert space (rkhs) h induced by a positive semidefinite kernel function (aronszajn, 1950;shawe-taylor & cristianini, 2004). at each round t = 1, 2, . . . ,, the adversary sends an instance x t ∈ r d to the learner. then the learner chooses a hypothesis f t ∈ h ⊂ h and output f t (x t ). after that the adversary reveals the true output y t . the learner suffers a loss ℓ(f t (x t ), y t ). the goal is to minimize the regret defined as followsone of the challenges minimizing the regret is to balance the computational cost. kernel online gradient descent (kogd) enjoys a regret of o( l(f )) at a computational complexity (space and per-round time) in o(dt ) (zinkevich, 2003;srebro et al., 2010;zhang et al., 2019), where l(f ) = t t=1 ℓ(f (x t ), y t ). o( l(f )) implies the "small-loss" bound (wang et al., 2020;zhang et al., 2022). kernel online newton step (kons) (calandriello et al., 2017b) enjoys a regret of o(µ f 2 h + d eff (µ) ln t ) at a computational complexity in o(t 2 ), where µ > 0 is a regularization parameter and d eff (µ) is called effective dimension depending on the decay rate of eigenvalues of the kernel matrix (caponnetto & vito, 2007;rudi et al., 2015). the kaar algorithm (gammerman et al., 2004) and kernel ridge regression algorithm (zhdanov & kalnishkan, 2013) enjoy the same regret bound and computational complexity with kons. if the eigenvalues decay exponentially, then d eff (µ) = o(ln t µ ) (li et al., 2019). if the eigenvalues decay polynomially with degree p ≥ 1, then d eff (µ) = o((t /µ)1/p ) (jézéquel et al., 2019a).the o(dt ) and o(t 2 ) computational complexities are prohibitive. some approximate algorithms reduce the computational complexity at the expense of regret (lu et al., 2016;calandriello et al., 2017b;a). the fogd algorithm approximating kogd, achieves a regret of õ( t l(f )/d) at a computational complexity in o(dd) where d is a tunable parameter (lu et al., 2016). achieving the optimal regret bound requires d = ω(t ). the sketched-kons algorithm approximating kons, reduces the computational complexity by a factor of γ -2 , but increases the regret by γ > 1 (calandriello et al., 2017b). the pros-n-kons algorithm approximating kons, increases the regret by a factor of õ(d eff (α)) and suffers a space complexity in õ(d eff (α) 2 ) and an average per-round time complexity in õ(d eff (α) 2 + d eff (α) 4 /t ) (calandriello et al., 2017a), where α > 0. although sketched-kons and pros-n-kons can ensure a o(t ) computational complexity, they can not achieve the optimal regret bound. the pkawv algorithm keeps the regret of kons at a computational complexity in õ(t d eff (α) + d 2 eff (α)) (jézéquel et al., 2019a). although pkawv reduces the o(t 2 ) computational complexity, it can not ensure a o(t ) computational complexity. besides, pkawv must store all of the observed examples.in summary, existing approximate algorithms can not achieve nearly optimal regret bounds and a o(t ) computational complexity simultaneously. it is important to rise the question: is it possible to achieve nearly optimal regret bounds at a computational complexity in o(t )? to be specific, the question is equivalent to the following two. (1) is it possible to achieve a regret of o( l(f )) at a o(t ) computational complexity? o( l(f )) matches the lower bound in the stochastic setting (srebro et al., 2010). ( 2) is it possible to achieve a regret of o(µ f 2 h + d eff (µ) ln t ) at a o(t ) computational complexity? the regret bound is optimal up to ln t (jézéquel et al., 2019a). if the eigenvalues of the kernel matrix decay exponentially, then o(µ f 2 h + d eff (µ) ln t ) = o(ln 2 t ). if the eigenvalues decay polynomially with degree p ≥ 1, then o(µ f 2 h +d eff (µ) ln t ) = o(t 1 1+p ln t ) where µ = t 1 1+p . we just need to run a new ons in t j , which implies a j (s j -1) = αi and w j (s j ) = 0. the simple restart technique increases the regret by a factor of o(j). next we redefine the initial configurations. denote by f j-1 = w ⊤ j-1 φ j-1 (•) and f j = w ⊤ j φ j (•). thus it must be w j-1 (s j ) = p 1 2 s(j-1) (p 1 2 s(j) ) ⊤ w j (s j ).besides, at the (s j -1)-th round, w j-1 (s j ) must be the solution of the following projection w j-1 (s j ) = p ws j ( wj-1 (s j ))., 2017a), named con-knos, uses a different w j (s j )., 2016)and two second-order algorithms, pros-n-kons and con-kons(calandriello et al. using the property of projection, we have w j (t + 1)w j 2 aj (t)w j (t)w j 2 aj (t) ≤ wj (t + 1)w j 2 aj (t)w j (t)w j 2 aj (t) = w j (t) -a -1 j (t)∇ j (t)w j 2 aj (t)w j (t)w j 2 aj (t).= -2 w j (t)w j , a -1 j (t)∇ j (t) aj (t) + a -1 j (t)∇ j (t) 2.= -2 w j (t)w j , ∇ j (t) + ∇ ⊤ j (t)a -1 j (t)∇ j (t). , s j+1 -1} gives sj+1-1 t=sj w j (t)w j , ∇ j (t)σ ( ∇ j (t), w j (t)w j ) 2 ,.where we use the following two facts a j (t + 1) =a j (t) + 2σ∇ j (t + 1)∇ ⊤ j (t + 1), a j (s j -1) =a j (s j ) -2σ∇ j (s j )∇ ⊤ j (s j ).the analysis of pros-n-kons(calandriello et al.. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/721.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/721.txt new file mode 100644 index 0000000000000000000000000000000000000000..febd9a8de0938020279eced38b408f4a55eb7ec2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/721.txt @@ -0,0 +1 @@ +the increase in the use of photovoltaic (pv) energy in the world has shown that the useful life and maintenance of a pv plant directly depend on the ability to quickly detect severe faults on a pv plant.to solve this problem of detection, data based approaches have been proposed in the literature. however, these previous solutions consider only specific behavior of one or few faults. most of these approaches can be qualified as supervised, requiring an enormous labelling effort (fault types clearly identified in each technology). in addition, most of them are validated in pv cells or one pv module. that is hardly applicable in large-scale pv plants considering their complexity. alternatively, some unsupervised well-known approaches based on data try to detect anomalies but are not able to identify precisely the type of fault. the most performant of these methods do manage to efficiently group healthy panels and separate them from faulty panels. in that way, this article presents an unsupervised approach called dtw k-means. this approach takes advantages of both the dynamic time warping (dwt) metric and the kmeans clustering algorithm as a data-driven approach . the results of this mixed method in a pv string are compared to diagnostic labels established by visual inspection of the panels.the increase in the use of photovoltaic (pv) energy in the world has shown that the useful life and maintenance of a pv plant directly depend on the ability to quickly detect severe faults on a pv plant. alternatively, some unsupervised well-known approaches based on data try to detect anomalies but are not able to identify precisely the type of fault. the most performant of these methods do manage to efficiently group healthy panels and separate them from faulty panels. in that way, this article presents an unsupervised approach called dtw k-means. this approach takes advantages of both the dynamic time warping (dwt) metricand the kmeans clustering algorithm as a data-driven approach. the results of this mixed method in a pv string are compared to diagnostic labels established by visual inspection of the panels.the approach proposed in this article is composed of three phases: i) acquisition of the electric current signals; ii) feature extraction using the dtw metric; and iii) clustering using the k-means algorithm. the k-means algorithm receives as a parameter the number of clusters k to build and, based on this, the centroid of each cluster is calculated and the panels are assigned to the different clusters.figure1presents examples of results obtained with the approach named the dtw k-means on the pv current signals during one day. figure1bshows the two centroids found by dtw k-means method overlapping on the current signals. for the rest, the 4 panels grouped in the red cluster (abnormal cluster) belong to panels with broken glass.figure1illustrates that dtw k-means approach that is able to group all healthy panels in one cluster and faulty ones in another cluster correctly. however, after performing the visual inspection, it was found that in some of the healthy panels, there were some snail trail fault types, which are small discolorations. the difficulty for the dtw k-means was that at the time of the data processing, this fault did not reduce yet the power production of the panel. finally, this approach can be extended to different time windows, the approach was tested in windows of up to 3 minutes, presenting equally consistent results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/722.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/722.txt new file mode 100644 index 0000000000000000000000000000000000000000..618f397727e688d40f381d1d6a685d4046789b8d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/722.txt @@ -0,0 +1 @@ +diagnosis in pv systems aims to detect, locate and identify faults. diagnosing these faults is vital to guarantee energy production and extend the useful life of pv power plants. in the literature, multiple machine learning approaches have been proposed for this purpose. however, few of these works have paid special attention to the detection of fine faults and the specialized process of extraction and selection of features for their classification. a fine fault is one whose characteristic signature is difficult to distinguish to that of a healthy panel. as a contribution to the detection of fine faults (especially of the snail trail type), this article proposes an innovative approach based on the random forest (rf) algorithm. this approach uses a complex feature extraction and selection method that improves the computational time of fault classification while maintaining high accuracy. however, few of these works have paid special attention to the detection of fine faults and the specialized process of extraction and selection of features for their classification. as a contribution to the detection of fine faults (especially of the snail trail type), this article proposes an innovative approach based on the random forest (rf) algorithm. this approach uses a complex feature extraction and selection method that improves the computational time of fault classification while maintaining high accuracy.the approach proposed in this article is composed of four phases: i) acquisition of the electric current signal; ii) feature extraction using multiresolution signal decomposition and statistical features; iii) feature selection and iv) fault classification using random forest (rf) algorithm. in the first phase, the current signal is captured for each panel every minute. in the second, the current signal is decomposed using an iterative decomposition based on wavelets . the third stage uses the pca (principal component analysis) algorithm to reduce the dimensionality of the feature matrix built in the third stage. finally, the reduced feature matrix is used as input to the rf algorithm that builds multiple decision trees during the training phase and generates the final class by majority voting.figure2presents the current signals of the healthy panels and the snail trail panels during one day. the classification results of the rf algorithm were evaluated using the fscore and the confusion matrix.the classification results are presented in figure3, using the confusion matrix. as shown in figure3, rf mana ged to classify all the healthy panels and almost all (75%) of the panels with snail trail even with the reported high similarity between the signals of the two classes.the approach proposed in this article starts with n randomly selected samples for training each tree in the rf algorithm. the approach presented in this article does not require multiple sensors and performs efficiently on snail trail fault classification just by capturing the current signal from the panels. likewise, the proposed approach is capable of classifying the panels, both healthy and snail trail, despite a small number of training samples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/723.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/723.txt new file mode 100644 index 0000000000000000000000000000000000000000..010f542ec2bccd9b9299ade8dea2f8790dbc1592 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/723.txt @@ -0,0 +1 @@ +current reinforcement learning policies are heavily dependent upon the usage of a reward function. however, for advanced problems such as learning behavior for a self-driving car or a robot with many degrees of freedom, the reward function is very difficult or impossible to design to effectively encompass all human considerations. inverse reinforcement learning algorithms are a class of algorithms that attempt to solve this issue by learning a reward function from expert demonstrations, and then subsequently using any manner of standard reinforcement learning algorithms to learn an optimal policy based off that learned reward function .inverse reinforcement learning algorithms require a dataset of expert demonstrations to derive a reward function from. however, this reward function can often be inaccurate and highly variable. furthermore, the learned reward function can often be misaligned with the true intentions of the human user, even if it does accurately account for the demonstrated data it learned from . one recent advance in the training of inverse reinforcement learning algorithms is to use additional expert data about preferences between demonstrations . the general idea is to first train an inverse reinforcement learning algorithm on a dataset of demonstrations, and then subsequently actively query a human expert to select a preference between generated demonstrations based off the learned prior of trajectories from the dataset of demonstrations. the policy can then be updated using the new expert data.we identify two general categories of research direction within the field of active learning. first, one could learn how to more effectively utilize the information provided by the human expert with each query. second, one could learn how to increase the amount of information provided by each individual query. we can call the two directions information derivation (for learning how to maximize the information derived from a single query), and information optimization (for learning how to ask more optimal future queries) respectively. information optimization approaches can be further divided into approaches that take a policy/learning algorithm-based approach as opposed to a data-based approach.the key observation of our work is that both research directions can be viewed as an informational choice made by the robot at each step. more formally, we can view the active learning querying process itself as a markov decision process (mdp) with non-markovian reward. the action space and the transition function encompass both the choices of information type the robot can choose to query as well as the information derived by the robot from the query. in other words, both directions of research focus on determining the best setup for the querying mdp.we organize the field of query-based active learning using this formalism. furthermore, we outline future research directions using the proposed formalism and discuss applications of the outlined methods. inverse reinforcement learning algorithms are a class of algorithms that attempt to solve this issue by learning a reward function from expert demonstrations, and then subsequently using any manner of standard reinforcement learning algorithms to learn an optimal policy based off that learned reward function. the general idea is to first train an inverse reinforcement learning algorithm on a dataset of demonstrations, and then subsequently actively query a human expert to select a preference between generated demonstrations based off the learned prior of trajectories from the dataset of demonstrations. the action space and the transition function encompass both the choices of information type the robot can choose to query as well as the information derived by the robot from the query. we further define the action space a ′ as the set of potential queries (we use the term action and query interchangeably moving forward) we can ask using the current state s ∈ s ′1, an observation space ω that represents the set of human responses to a "query" a ∈ a ′ , and a set of conditional observation probabilities o : ω × s ′ × a ′ → that represent the probability of a response o ∈ ω given the action a ∈ a ′ and the new state s ′ ∈ s ′ . in this new system, the reward function r ′ represents the benefit of asking our query for the state of the system and its capability to learn the original reward function r ∈ r. since the transition function t operates as a tuple, we can decompose the transition function using a dataset transformation function f ∈ f : ξ → ξ and a learning algorithm. after the algorithm receives the response, it will add the selected sample to its dataset d and relearn the reward function r ∈ r, which can be stated as a deterministic transition between two states in our new system. this avenue has received much attention in the active learning literature due to the simplicity of the action space -the complexity of strategies utilizing this type of action space usually falls within the query selection algorithm. formally, the dataset d remains constant throughout, and the function l(d, r) can be modified to only update the reward function r as a result of the response to the given query. in contrast to comparison queries, which use more features in the action space, this approach minimizes action space features and instead moves complexity to the response space ω, which in this case would be equivalent to the space of all trajectories ξ. in the future, we hope this formalism will enable research on active querying with more integrated and complex learning algorithm as well as making it easier to do active learning research in new directions. normal methods also require many human samples to function properly -this approach allows for consideration for the human expert's time by probabilistically determining whether or not a given query would be valuable to the learning process as a whole. if our learning algorithm l has an element of randomness such that the output of the update l(d, r) is not deterministic, then we could instead represent r as a distribution over possible reward functions that l could output as a result of a query and data update instead of just one possibility. one potential future research direction could involve the design of a meta-rl algorithm that operates and optimizes on the "information mdp" by utilizing an approximation of query value as the reward function and then optimizing based on that. by considering and formalizing the full complexities of the active learning process, we believe we take a small step towards better understanding how learning processes can function as a whole, and we hope our work contributes to a future where in which robots fully take advantage of all the complexities inherent within their learning processes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/724.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/724.txt new file mode 100644 index 0000000000000000000000000000000000000000..597877ff6dab9bc67d178c460ef0a871632e98ca --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/724.txt @@ -0,0 +1 @@ +the development of advanced driver assistance systems is an essential goal for car manufacturers. as can be seen from a survey, driver assistance systems are by now an important purchase criterion for over 60% of potential buyers . in addition, a unique selling point over the competition and thus a competitive advantage can be gained through the further automation of vehicles. an example is the system from mercedes-benz, which was the first to receive approval for autonomous driving at level 3 in december 2021.autonomous driving at level 3 enables the driver to divert his attention from what is happening on the road in certain situations. the vehicle takes over the lateral and longitudinal guidance and independently recognizes errors or departure from system limits. in such a case, the system would prompt the driver to take back control of the vehicle. this transfer of vehicle control is a crucial challenge. an autonomous system must be able to recognize whether the driver is ready to take over control of the vehicle again. to ensure this, some form of driver monitoring is required. one way of detecting the driver's condition is a hands-on detection (hod). this is a system that detects whether the driver's hands are on the steering wheel and therefore control over the vehicle can safely be transferred.a hod can be implemented inexpensively by measuring steering angle and torque acting on the steering wheel. the necessary sensors are required for the servo-assistance, anyway. however, there is the disadvantage that false hands-off messages often occur in situations where the driver does not exert any significant force for lateral guidance. in such a case, the driver would be asked to put his hands back on the steering wheel, even though he has not let go of the steering wheel.a better hod variant, also used in this paper, uses a capacitance sensor. this allows to detect the driver's contact with the steering wheel, without relying on any exerted force to the steering wheel. however, the evaluation of capacitance values is more complex, since these are dependent on the driver and his environment.in this paper a machine learning algorithm is implemented, which is able to distinguish between a hands-on and a handsoff situation based on the capacitance values. the ai model is then ported to a micro controller and the reliability and response time of the hod is evaluated. a maximum response time of 200ms is assumed to be appropriate for timely hod. this paper aims to answer the question: can neural networks increase reliability of hod within a response time of 200ms? ii. background two techniques are combined in this paper to realize hod: capacity measurement and machine learning.a hod can be implemented inexpensively by measuring steering angle and torque acting on the steering wheel. in such a case, the driver would be asked to put his hands back on the steering wheel, even though he has not let go of the steering wheel. this allows to detect the driver's contact with the steering wheel, without relying on any exerted force to the steering wheel.in this paper a machine learning algorithm is implemented, which is able to distinguish between a hands-on and a handsoff situation based on the capacitance values.one option to realize hod is detection of a contact between the driver and the steering wheel by measuring the change in capacitance. touching the steering wheel is detected by a change in capacitance in a sensor element, with the capacitance being calculated indirectly from the measured frequency. if the driver puts his hands on the steering wheel, the capacity of the sensor element is increased, leading to a reduction in the frequency of the resonant circuit. on the one hand, the values of the steering angle or torque sensor are used and on the other hand, the capacitance values of the steering wheel are considered to distinguish between a hands-on and hands-off situation.these approaches bear a potential problem: if the driver only touches the steering wheel very lightly, the measured average maximum sensor value decreases. if the driver brings both hands close to the steering wheel without touching it, this could trigger a similar increase in capacity as previous two-finger touch. the hod would then recognize a hands-on situation even though the driver is not touching the steering wheel. this was done because the machine learning model should learn to classify a hands-on or hands-off situation based on capacitance values of just a few hundred milliseconds to speed up the reaction time of the hod. to eliminate this issue, the absolute capacitance values were converted into gradient values, focussing on change in capacity over time instead.to test if the system recognizes touches by the driver reliably, the steering wheel was touched with two fingers, four fingers one hand and two hands at the points shown in figure1. by using the change in capacitance instead of the absolute values in the machine learning model, the problem of normalizing the input values was solved and the hod worked without external calibration, independent of the driver and environment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/725.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/725.txt new file mode 100644 index 0000000000000000000000000000000000000000..09536af28830c96aebd238a3fe02e0090d8d28a4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/725.txt @@ -0,0 +1 @@ +a stochastic multi-armed bandit is an online learning problem where a learner sequentially interacts with an environment over n rounds. in each round, the learner takes an action and receives its stochastic reward. the goal of the learner is to maximize its expected cumulative reward over n rounds. the mean rewards of the actions are unknown a priori but can be learned by taking the actions. therefore, the learner faces the exploration-exploitation dilemma: explore, and learn more about the actions; or exploit, and take the action with the highest estimated reward. bandits have been successfully applied to problems where uncertainty modeling and subsequent adaptation are beneficial. one example are recommender systems , where the actions are recommended items and their rewards are clicks. another example is hyper-parameter optimization , where the actions are values of the optimized parameters and their reward is the optimized metric.cumulative regret minimization in stochastic bandits has been traditionally studied in two settings: frequentist and bayesian . in the frequentist setting, the learner minimizes the regret with respect to a fixed unknown bandit instance. in the bayesian setting, the learner minimizes the average regret with respect to bandit instances drawn from a prior distribution. the instance is unknown but the learner knows its prior distribution. the bayesian setting allows surprisingly simple and insightful analyses of thompson sampling. one fundamental result in this setting is that linear thompson sampling has a comparable regret bound to linucb in the frequentist setting . moreover, many recent meta-and multi-task bandit works adopt the bayes regret to analyze the stochastic structure of their problems, that the bandit tasks are similar because their parameters are sampled i.i.d. from a task distribution.many bandit algorithms have frequentist regret bounds that match a lower bound. as an example, in a k-armed bandit with the minimum gap ∆ and horizon n, the gap-dependent o(k∆ -1 log n) regret bound of ucb1 matches the gap-dependent ω(k∆ -1 log n) lower bound of lai and robbins . moreover, the gap-free õ( √ kn) regret bound of ucb1 matches, up to logarithmic factors, the gap-free ω( √ kn) lower bound of auer et al. . the extra logarithmic factor in the õ( √ kn) bound can be eliminated by modifying ucb1 . in contrast, and despite the popularity of the model, matching upper and lower bounds mostly do not exist in the bayesian setting. specifically, lai proved asymptotic c h log 2 n upper and lower bounds, where c h is a prior-dependent constant. however, all recent bayes regret bounds are õ( √ n) . this leaves open the question of finite-time logarithmic regret bounds in the bayesian setting.in this work, we answer this question positively and make the following contributions:1. we derive the first finite-time logarithmic bayes regret upper bounds for a bayesian upper confidence bound (ucb) algorithm. the bounds are o(c ∆ log n) and o(c h log 2 n), where c h and c ∆ are constants depending on the prior distribution h and the gaps of random bandit instances sampled from h, respectively. the latter matches the lower bound of lai asymptotically. when compared to prior õ( √ n) bounds, we better characterize low-regret regimes, where the random gaps are large. 2. to show the value of prior as a side information, we also derive a finite-time logarithmic bayes regret upper bound for a frequentist ucb algorithm. the bound changes only little as the prior becomes more informative, while the regret bound for the bayesian algorithm eventually goes to zero. the bounds match asymptotically when n → ∞ and the prior is overtaken by data.3. to show the generality of our approach, we prove a o(d c ∆ log 2 n) bayes regret bound for a bayesian linear bandit algorithm, where d denotes the number of dimensions and c ∆ is a constant depending on random gaps. this bound also improves with a better prior.4. our analyses are a major departure from all recent bayesian bandit analyses, starting with russo and van roy . roughly speaking, we first bound the regret in a fixed bandit instance, similarly to frequentist analyses, and then integrate out the random gap.5. we show the tightness of our bounds empirically and compare them to prior bounds.this paper is organized as follows. in section 2, we introduce the setting of bayesian bandits. in section 3, we present a bayesian upper confidence bound algorithm called bayesucb . in section 4, we derive finite-time logarithmic bayes regret bounds for bayesucb, in both multi-armed and linear bandits. these are the first such bounds ever derived. in section 5, we compare our bounds to prior works and show that one matches an existing lower bound asymptotically. in section 6, we evaluate the bounds empirically. we conclude in section 7. as an example, in a k-armed bandit with the minimum gap ∆ and horizon n, the gap-dependent o(k∆ -1 log n) regret bound of ucb1matches the gap-dependent ω(k∆ -1 log n) lower bound of lai and robbins. we derive the first finite-time logarithmic bayes regret upper bounds for a bayesian upper confidence bound (ucb) algorithm. the bounds are o(c ∆ log n) and o(c h log 2 n), where c h and c ∆ are constants depending on the prior distribution h and the gaps of random bandit instances sampled from h, respectively. to show the value of prior as a side information, we also derive a finite-time logarithmic bayes regret upper bound for a frequentist ucb algorithm. the bound changes only little as the prior becomes more informative, while the regret bound for the bayesian algorithm eventually goes to zero. to show the generality of our approach, we prove a o(d c ∆ log 2 n) bayes regret bound for a bayesian linear bandit algorithm, where d denotes the number of dimensions and c ∆ is a constant depending on random gaps. the bayesian ucb of action a at round t is u t,a = θt,a + c t,a , where c t,a = 2σ 2 t,a log(1/δ) is the confidence interval width and δ ∈ (0, 1) is a failure probability of the confidence interval. the bayesian ucb of action a at round t is u t,a = a ⊤ θt + c t,a , where c t,a = 2 log(1/δ)∥a∥ σt is the confidence interval width, δ ∈ (0, 1) is a failure probability of the confidence interval, and ∥a∥ m = √ a ⊤ m a. one bound matches an existing lower bound of laiasymptotically and all improve upon prior õ( √ n) bounds.in frequentist bandit analyses, it is standard to compare asymptotic lower bounds to finite-time upper bounds because finite-time logarithmic lower bounds do not exist. to compare our bound fairly to existing õ( √ n) bounds, we derive an õ( √ n) bound in appendix c, by a relatively minor change in the proof of theorem 6. we do not do this because the main reason for deriving the o(c h log 2 n) bound in theorem 5, an upper bound on the corresponding o(c ∆ log n) bound, is that it matches the lower bound in (2). specifically, for any action a ∈ a on event e t , a ⊤ θ -u t,a = a ⊤ (θ -θt ) -c t,a ≤ c t,a -c t,a = 0 . specifically, for any action a ∈ a on event e t , u t,a -a ⊤ θ = a ⊤ ( θt -θ) + c t,a ≤ c t,a + c t,a = 2c t,a . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/726.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/726.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb3dd40918f37d930bb8c1c12ff11fe99260e8d5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/726.txt @@ -0,0 +1 @@ +condition monitoring and fault detection are amongst the significant topics in wind turbine research. as the size and number of wind turbines continue to grow in line with the global renewable energy targets, fault detection of wind turbines became even more important. faults generally lead to downtimes in wind turbine operation and result in a decrease in the amount of energy conversion. moreover, unpredicted faults can have detrimental effects on other parts of wind turbines which contribute to decrease in lifetime of overall system. therefore, early detection and isolation of wind turbine faults are required.fault detection approaches can be investigated in two main classes which are model-based and data driven methods. in model-based fault detection, an explicit mathematical model of system is generated and outputs of the real system are evaluated comparing to responses of the mathematical model. model-based methods have the advantage of not requiring high frequency data, however their performance highly depends on the accuracy of the mathematical model which is difficult to build in real world applications and the model-based applications have a limited capability in supplying the details about wind turbine faults . data-driven methods are based on the analysis of measurements collected from the corresponding system. in the early data driven fault detection studies, multivariate statistics were widely used. with the advancements of intelligent algorithms, machine learning methods became more widespread. collection of wind turbine data for fault detection can be done in various ways. first of them is to collect data from specifically mounted sensors - . as these sensors are mounted for condition monitoring and fault detection aims, useful high frequency data can be obtained. however, this approach brings additional costs. the other alternative is to use data from supervisory control and data acquisition (scada) system - . the main advantage of this alternative is that scada is a built-in part of most modern wind turbines. therefore, additional hardware costs are not required. however, scada systems were not initially built for fault detection aims, so there are imperfections in data such as high proportion of missing values. moreover, the data output interval of wind turbine scada systems is generally 10 min. this low output rate results in the loss of high frequency data which is very useful in fault detection studies. to reduce the disadvantages of using scada data, intelligent data processing approaches are required. in this paper, a data-driven method using scada data was realized by implementing feature selection techniques along with artificial neural networks (ann).scada data set consists of many measurements such as wind turbine operation, wind speed characteristics, temperature values and fault information. in addition to these directly collected data, a feature construction step was conducted to have more indications on incipient faults. by this way, the number of the input features increased even further. feature selection is to identify a subset of relevant features from the overall feature set which is a compulsory process in machine learning applications involving moderate and high number of input features - . it brings many advantages such as preventing overfitting that can be caused by large number of features, reducing computational burden and training time, increasing accuracy of model. feature selection process can be employed by different approaches namely filter, wrapper and embedded techniques. filter approaches evaluate features without utilizing any classification algorithm. they rank features independently based on a selected criteria , . wrapper methods select and evaluate a subset of features together and search for the best subset describing the model . in embedded approaches, the selection is a part of the learning process . in this paper, a hybrid feature selection method was employed. in the first step, various filter methods were applied to find out and exclude the features that are non-discriminant. the remaining features which are informative on faults were evaluated in a wrapperbased approach to get the knowledge about mutual relations or additional redundancies. by combining these two approaches, it was aimed to benefit from the advantages of both. filter methods are practical in large data sets in terms of training time and complexity. however, they are not able to evaluate mutual dependencies between features. therefore, after using the filter approach as a pre-processing step, wrapper method was employed to eliminate redundancies and find subsets based on evaluating mutual relations.the type of wind turbine faults observed in this paper are generator heating faults. they are non-fatal but frequently occurring wind turbine faults. these type of faults show less indications than fatal faults but are one of the important reasons of long downtime durations. the success of detecting frequent/non-fatal faults are required to be increased especially in terms on reducing false alarms - .the layout of this paper is as follows; in section 2, the data were described in detail. in section 3, the hybrid feature selection method was presented. in section 4 results were presented and in section 5 conclusion was given, respectively. as the size and number of wind turbines continue to grow in line with the global renewable energy targets, fault detection of wind turbines became even more important. model-based methods have the advantage of not requiring high frequency data, however their performance highly depends on the accuracy of the mathematical model which is difficult to build in real world applications and the model-based applications have a limited capability in supplying the details about wind turbine faults. however, scada systems were not initially built for fault detection aims, so there are imperfections in data such as high proportion of missing values. in this paper, a data-driven method using scada data was realized by implementing feature selection techniques along with artificial neural networks (ann).scada data set consists of many measurements such as wind turbine operation, wind speed characteristics, temperature values and fault information. feature selection is to identify a subset of relevant features from the overall feature set which is a compulsory process in machine learning applications involving moderate and high number of input features-.besides the directly collected data, many additional features were generated with the aim of supplying inputs that carry information on incipient faults to improve the success of the fault detection system. the generated data consist of difference data, time series data, statistical data and knowledge-based data.difference data: the differences between the temperature measurements, operational data and wind speed measurements which may provide information on fault formations were calculated.by gathering the constructed features with the original features, a data set with 377 features was obtained. for example, exhaustive search is a kind of exponential search algorithms where all possible subsets of the feature space are used in the wrapper models to find the best combination, however even after the reduction of feature number by filter methods it is still computationally expensive in this case.as a set of relevant features were obtained in the first part of this research, the remaining number of features became relatively smaller so the use of sequential search is appropriate to remove redundancies, evaluate mutual relations and increase the accuracy of the system. for the second case, a heuristic feature construction and selection method mainly by using the original features and some additional features constructed by expert knowledge was used. as a result, this work contributes to the solution of one of the main challenges of wind turbine fault detection using scada data. in the feature construction part, in addition to the original features directly collected from scada system, various features were generated with the aim of providing stronger fault indications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/727.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/727.txt new file mode 100644 index 0000000000000000000000000000000000000000..7149650d8bdb0d647cef59377805da55a50cb2b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/727.txt @@ -0,0 +1 @@ +hypergraphs represent a natural extension of graphs, whereby each hyperedge can link an arbitrary number of hypernodes (or nodes for short). this flexibility more directly facilitates the modeling of higher-order relationships between entities (chien et al., 2022;benson et al., 2016;2017) leading to strong performance in diverse realworld situations (agarwal et al., 2005; li & milenkovic, † work completed during an internship at the aws shanghai ai lab. 1 school of computer science, fudan university 2 institute of modern languages and linguistics, fudan university 3 amazon 4 peng cheng laboratory 5 shanghai collaborative innovation center of intelligent visual computing. correspondence to: yuxin wang , quan gan , xipeng qiu , xuanjing huang , david wipf . proceedings of the 40 th international conference on machine learning, honolulu, hawaii, usa. pmlr 202, 2023. copyright 2023 by the author(s). 2017; feng et al., 2019;huang & yang, 2021). currently, hypergraph-graph-based modeling techniques frequently rely, either implicitly or explicitly, on some type of expansion (e.g., clique, star), which effectively converts the hypergraph into a regular graph with a new edge set and possibly additional nodes as well. for example, one approach is to first extract a particular expansion graph and then build a graph neural network (gnn) model on top of it (zhang et al., 2022).we instead adopt a different starting point that both allows us to incorporate multiple expansions if needed, but also transparently explore the integrated role of each expansion within a unified framework. to accomplish this, our highlevel strategy is to first define a family of parameterized hypergraph energy functions, with regularization factors that we later show closely align with popular existing expansions. we then demonstrate how the minimizers of such energy functions can be treated as learnable node embeddings and trained end-to-end via a bilevel optimization process. namely, the lower-level minimization process produces optimal features contigent on a given set of parameters, while the higher-level process trains these parameters (and hence the features they influence) w.r.t. downstream node classification tasks.to actualize this goal, after presenting related work in section 2, we provide relevant background and notation w.r.t. hypergraphs in section 3. the remainder of the paper then presents our primary contributions, which can be summarized as follows:• we present a general class of hypergraph-regularized energy functions in section 4 and elucidate their relationship with traditional hypergraph expansions that have been previously derived from spectral graph theory.• we demonstrate how minimizers of these energy functions can serve as principled, trainable features for hypergraph prediction tasks in sections 5 and 6. and by approximating the energy minimizers using provablyconvergence proximal gradient steps, the resulting architecture borrows the same basic structure as certain graph neural network layers that: (i) have been finetuned to accommodate hypergraphs, and (ii) maintain the inductive bias infused by the original energy function.• the resulting framework, which we name phe-nomnn for purposeful hyper-edges in optimization motivated neural networks, is applied to a multitude of hypergraph node classification benchmarks in section 7, achieving competitive or sota performance in each case. and by approximating the energy minimizers using provablyconvergence proximal gradient steps, the resulting architecture borrows the same basic structure as certain graph neural network layers that: (i) have been finetuned to accommodate hypergraphs, and (ii) maintain the inductive bias infused by the original energy function.• the resulting framework, which we name phe-nomnn for purposeful hyper-edges in optimization motivated neural networks, is applied to a multitude of hypergraph node classification benchmarks in section 7, achieving competitive or sota performance in each case. for the clique expansion(zien et al. , v n+m } and edge set e s defined such that {v i , v n+k } ∈ e s iff b ik = 1.importantly for our purposes though, if the input graph is chosen to be a hypergraph clique expansion, and we set d = d c , a = a c , λ = λ 0 , and λ 1 = 0, then we arrive at a special case of phenomnn simple from (25). of course one might not naturally conceive of the more generalized form that leads to phenomnn simple , and by extension phe-nomnn, without the interpretable grounding of the underlying hypergraph energy functions involved., such a graph could be assigned the edge types "hypergraph node belongs to hyperedge" and "hyperedge belongs to hypergraph node").in this section we evaluate phenomnn simple and phe-nomnn on various hypergraph benchmarks focusing on hypernode classification and compare against previous sota approaches.for datasets from(zhang et al., 2022)compared to previous baselines. for example, in table5(appendix b) we demonstrate the effect of different hypergraph energy function terms, which are associated with different hypergraph expansions per proposition 4. in brief, we fix other hyperparameters and obtain results across different hidden dimensions with phenomnn simple for simplicity; results for phe-nomnn are similar. with the potential to better understand hypergraph properties and expand their utility, we have introduced an expressive family of hypergraph energy functions and fleshed out their connection with previous hypergraph expansions., 2022)., 2023)proposes a quite interesting model called ed-hnn (for equivariant diffusion hypergraph neural network). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/728.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/728.txt new file mode 100644 index 0000000000000000000000000000000000000000..fb2382d288e3c9e5d6f0b5cae95c659e3e225d0a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/728.txt @@ -0,0 +1 @@ +in order for a regression model to make causal predictions, the effect of confounders must be disentangled from the effect of the treatment. for this reason, causal inference is closely related to the problem of domain shift, since the outcome predictor may be learned on observational data while being expected to perform well on the hypothetical domain with fully randomized treatments. more often than not, the available covariates are imperfect proxies for all the confounders in the causal system. this further compounds the task of causal inference, as the hidden confounders must somehow be taken into account. the best hope in these cases is to produce "ignorance intervals" that partially identify the causal estimands. the tighter the intervals, the more useful the partial identification, which depends on what can be said about the hidden confounders.a sensitivity model (rosenbaum and rubin, 1983) in causal inference is a structural assumption (manski, 2003) about the possible behavior of hidden confounders. it allows causal estimands to be partially identified as long as the extent of hidden confounding is consistent with the sensitivity model. the dependence of the treatment assignment on confounders, i.e. the propensity of treatments, is what makes a study observational rather than a fully randomized experiment. we consider sensitivity models that bound the complete propensity (colloquially, the true propensity of treatment assignments for an individual, taking into account all relevant variables, observed or not) in terms of the nominal propensity (based just on observed covariates, allowing it to be estimated by regression.) sensitivity models of this kind were first introduced by tan (2006) and have become popular due to their generality and simplicity. the most common setting for these models, in line with tan's initial formulation, is of binary treatments (jesson et al., 2021a;kallus et al., 2019;dorn et al., 2021), in which the marginal sensitivity model (msm) bounds the ratio of nominal-propensity odds to complete-propensity odds. when that ratio is unit, and the complete propensity equals the nominal propensity at all points, then the covariates are adequate to explain all the confounders. it is worthwhile to broaden the notion of the msm in light of recent developments with msm-like sensitivity models for continuous treatments (jesson et al., 2022;marmarelis et al., 2023) and other nonbinary domains. to accommodate these settings, we consider a general form of sensitivity model. in this paper, we explore prediction intervals of causal outcomes due to interventions on the treatment variable, termed outcome intervals, that incorporate empirical uncertainties (jesson et al., 2020) in addition to the orthogonal concept of hidden-confounding uncertainty. outcome intervals predict individual outcomes of treatments disentangled from confounders, relying on a sensitivity model to guide partial identification in the presence of hidden confounders. . an illustration of the proposed method for causal outcome intervals. first, one samples predictors from a bayesian posterior or otherwise learns an ensemble to approximate the distribution of outcome predictors that agree with the observational data. the ensemble average (grey dot) could be used to predict actual causal outcomes (red/blue dots). with hidden confounding, the learned ensemble might diverge substantially from the best predictor distribution to model causal outcomes. one cannot identify the correct distribution from observational data alone. instead, a sensitivity model says how wrong this learned ensemble could be, and one optimizes with respect to weights on the ensemble elements for each individual and treatment in order to upper-bound the (1 -α/2) quantile and lower-bound the (α/2) quantile of the ensemble prediction. these intervals incorporate both empirical uncertainties from prediction quantiles and hidden-confounding uncertainties from the ensemble modulation. they are evaluated against ground-truth causal outcomes by removing confounding through interventions on test-set individuals, using (semi-)synthetic data. we consider sensitivity models that bound the complete propensity (colloquially, the true propensity of treatment assignments for an individual, taking into account all relevant variables, observed or not) in terms of the nominal propensity (based just on observed covariates, allowing it to be estimated by regression. when that ratio is unit, and the complete propensity equals the nominal propensity at all points, then the covariates are adequate to explain all the confounders. it is worthwhile to broaden the notion of the msm in light of recent developments with msm-like sensitivity models for continuous treatments(jesson et al. outcome intervals predict individual outcomes of treatments disentangled from confounders, relying on a sensitivity model to guide partial identification in the presence of hidden confounders.instead of partial identification of (conditional) average treatment effect, (c)ate, the conformal sensitivity analysis (csa)(yin et al.we present a versatile, modular procedure for taking an ensemble of outcome predictors and, in coordination with some causal sensitivity model, producing tight causal outcome intervals. in bayesian notation the posterior p (θ | d), given a dataset d, induces a posterior predictive outcome distribution, which is described by a conditional expectation that averages the individual model predictions p m (y | t, x; θ):. it states that while the outcome would depend on the assigned treatment, a potential outcome for any treatment should not be affected by the treatment assignment, after conditioning on covariates.whichever sensitivity model is invoked to bound the extent of hidden confounding, all that is required for caus-modens is a pair of weight-bounding functions ω(t, x), ω(t, x) that are partial identifiers of the potential-outcome probability density function, p(y t |x). we introduce one layer of indirection by referring to potential outcome models θ t , heterogeneous in treatment t and covariate x (conditioning on the latter,) that can only be partially identified by means of the learned outcome model θ. the real potential outcomes are therefore (partially) identified by marginalization over the potential models: p(y t |x) = p(y|t, x; θ t ) p(θ t |x; d) dθ t , assuming integrability.assumption 2 (sensitivity model as weights) the sensitivity model under consideration uses the propensity e t (x) to produce bounds 0 < ω(t, x) ≤ 1 ≤ ω(t, x) < +∞ on weights for partial identification of the outcome model, in the sense that there exists some θ → ω(θ, t, x) ∈ that recovers the true potential outcome model density function, p(θ t |x; d) = ω(θ, t, x) p(θ|d).this formulation readily accommodates a variety of recently proposed sensitivity models once we pose the complete propensity in terms of potential outcome models rather than direct potential outcomes. suppose the complete propensity is denoted as e t (x, θ) and gives the propensity of (binary) treatment given that the potential outcome model θ t is known to be θ.formally, assumption 2 can be derived as a consequence of using a sensitivity model on the radon-nikodym derivative of the potential model θ t with respect to the learned model θ, while assuming absolute continuity between the two distributions:. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/729.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/729.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4a73e19035a9e3249b4dbe198944ed36c99e13d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/729.txt @@ -0,0 +1 @@ +over the past few years, offline rl has become a popular field of reinforcement learning research, since it promises to alleviate one of the most pressing issues when trying to apply rl methods to real-world (i.e. potentially physical) systems: online environment interaction. direct interaction is often prohibited in real systems, since as opposed to e.g. simulated atari video games, they incur significant (opportunity) costs and due to potential safety violations. offline rl methods such as thus constitute a large step towards broader applicability of reinforcement learning methodologies in practice, since they demonstrate the ability to learn purely from static, pre-collected datasets.a remaining issue is, that each of the proposed algorithms in one way or the other takes a trade-off between optimizing for return only (i.e. pure rl) and regularizing the policy towards the dataset distribution, since otherwise trained policies would exploit the return estimating models and transfer badly to the real system. the problem is, that nobody can with certainty determine the amount of regularization that is correct, i.e. how strictly does the policy need to be regularized given the dataset remains an open question (depending on the concrete form of this regularization, different terms such as conservatism, pessimism, risk-avoidance, uncertainty-avoidance, reconstruction penalty, behavior constraint, proximity, etc. have been proposed).recently, proposed that offline rl policies should be trained to be adaptive at runtime, i.e. after training has conceded. the authors propose to maintain a distribution of possible mdps, which can then at runtime be narrowed down to perform actions that are estimated to be optimal in the currently believed mdp. take this a step further and train policies that can remain adaptive in their level of regularization after training has conceded. while is developed for discrete action spaces, such as atari video games, lion is designed for continuous control environments such as the industrial benchmark (ib) or mujoco robotics locomotion tasks. lion enables human expert users to make the concrete trade-off conditioning hyperparameter choice in order to provide them with a utility instead of completely automating the regularization trade-off as in . while this is valuable feature in practice, some users would likely welcome the option to delegate this task in some situations, such as when they are supervising many systems simultaneously, allowing them to better focus on the problematic cases. our goal is therefore to extend lion with an autopilot mode that automatically chooses the trade-off during runtime. instead of training policies for a fixed trade-off between performance and proximity to the behavioral, it samples the trade-off parameter randomly for each starting state of a trajectory during training and conditions the policy on it, enabling policies to learn the entire range from pure behavior cloning over regularized rl up to "pure" (unregularized) rl.the lion methodology has been proposed in order to provide expert practitioners with a utility: instead of automating them away, users can benefit from a still highly autonomous system, yet also have the possibility to interact with the policy and alter its behavior on a higher level of abstraction, by handpicking λ at runtime, depending on their observations and knowledge about the system. however, we find that practitioners would still benefit from a solution that integrates a more autopilot-like behavior -since expert time is always costly and limited, and since users likely look after more than just a single system, it could be beneficial if λ could be tuned automatically most of the time, and only in critical situations or whenever the user desires they would take over. depending on the concrete system, different criteria may be of concern: in safety critical systems for example the metric has to reflect that the most important factor is to not enter potentially dangerous system states, while in other systems it is most important to find a good value as quickly as possible, since evaluations incur large opportunity cost as the system cannot be productively used. finally it could be most important to actually find the single best λ value, since the policy will run for a very long time and any underperformance in the search phase is easily offset by performing better in the long run. we consider the metrics final return (r), as well as return under budget (rub) indicating the best return possible with a limited budget, mean behavioral regret (mbr) representing the average underperformance during the search phase with respect to the behavioral, and mean optimal regret (mor) which is the average underperformance in the search phase compared to the best λ value. since this strategy can be considered quite restrictive, we consider a relaxation increase-behavioral (inc-beh), which stops only once the performance drops below that of the behavioral policy, and then uses the best so far observed λ value. these two strategies obviously aim to reduce regret compared to the behavioral performance, which is often a key metric, however we also evaluate a greedy strategy, which starts at the opposite end and moves towards lower λ values in the hope of finding the optimum quicker if the policy can generalize well. then we compare different gradient-free optimizing strategies that have previously been proposed to see if they can outperform the simple strategies in speed, performance, as well as regret measures: pso (particle swarm optimization) is a swarm based optimization method which uses neighbourhood information for search.we train lion policies as proposed in, and then search the space of tradeoff parameters λ with the proposed search methods from the previous section in order to provide an automated adaptive offline rl algorithm that works in continuous action spaces.2: we can see that, for obvious reasons, the inc-* strategies perform best in terms of regret compared with the behavioral policy, since they use underperformance with respect to the behavioral policy as their stopping criterion (the conservative strategy is even stricter, i. our proposed inc-beh strategy can be seen as an almost dominant solution compared with the conservative one, since it has close to zero mbr like inc-con, but mostly better mor, and very good r, almost reaching the top performances of greedy, pso, scr, and de. the greedy strategy appears competitive with the other gradient-free optimization algorithms in terms of return (especially on the bad datasets), however it is dominated by scr and de in every metric, especially in the two regret measurements. we introduce relevant performance metrics by which strategies can be evaluated and test the approach with seven different strategies, by augmenting the lion algorithm with our approach, yielding an extension to automatically adapt the trade-off for offline rl at runtime, providing the user an autopilot like option. the policy should rarely underperform the behavioral, our proposed relaxation inc-beh is likely the best option, while if quick trade-off search under an evaluation budget is needed, scr performs best. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/73.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/73.txt new file mode 100644 index 0000000000000000000000000000000000000000..e1dea601e4ea5fb07e0939e1f6224502afd6f07c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/73.txt @@ -0,0 +1 @@ +for an agent or system suffering from disturbances, its control input, as a defender, desires to complete a specified control mission by determining control policy to reject the influences of antagonistic input, i.e., non-cooperative disturbances that intend to disrupt the mission. this is known as zero-sum games or min-max problems , . in real-world applications, agent dynamics may be unknown. in order to make such an agent perform in the target trajectories exhibited by a target agent with optimal policy, optimal control theory assumes that the performance cost function is known, and rl , based optimal tracking control methods - compute optimal policy by observing states and control actions without knowing the system dynamics, where a standard iterative form for rl is known as policy iteration (pi) , , , . however, in real interactions, operators may not know the appropriate specified cost functions, i.e., the weights on states and inputs. as a result, these optimal control methods may not obtain the expected control performance or even be used.wenqian xue, jialu fan, tianyou chai are with the state key laboratory of synthetical automation for process industries and international joint research laboratory of integrated automation, northeastern university, shenyang 110819, china. (e-mail: xuewenqian23@163.com, fanjialu@gmail.com, tychai@mail.neu.edu.cn).bosen lian and frank l. lewis are with the uta research institute, the university of texas at arlington, texas 76118, usa. (email: bosen.lian@mavs.uta.edu; lewis@uta.edu).instead of manually selecting cost function weights, many efforts have been made on constructing cost function weights. inverse optimal control (ioc) and inverse rl (irl) construct cost function weights given system control behaviors. sometimes they are referred to as the same thing - , but they may differ in structure and how they are applied .assuming a stable control system, ioc constructs a cost function concerning which the system behavior is optimal. the cost function is constructed in the framework of lyapunov stability condition for continuous-time (ct) systems - and discrete-time (dt) systems - where considers finite horizon. online ioc methods to determine cost function in the infinite and finite horizon are studied in , . ioc is also used to verify the effectiveness of the proposed control laws in , . these works do not consider min-max or zero-sum games, but does. they all require system dynamics, which cannot be applied directly to systems with unknown dynamics.irl generally reconstructs reward and cost functions from expert demonstrations of the optimal policy. it is usually applied to apprenticeship learning and imitation learning problems of markov decision processes (mdps) , - where a learner seeks to imitate the demonstrations by learning the unknown expert's reward function from the observed demonstrations. irl methods construct reward function since reward function is a more succinct, robust, and transferable definition for the task than the policy mapping from states to actions. lyapunov stability is not necessarily considered here.irl has also been developed for trajectory tracking and imitation problems of differential systems in , , where uses a bilevel structure (also see , , ). that is, an optimal control problem is solved repeatedly in the inner loop. this two-loop iteration is computationally expensive. all of these works are model-based and do not consider min-max or zero-sum games. the work makes an effort for datadriven control by estimating model parameters before adopting the model-based irl method. unlike them, without the need for model identification, our previous studies , propose completely model-free irl methods that use merely system data, but not for zero-sum games. our work considers zero-sum games but propose an irl method using a two-loop iteration structure and partial system dynamics.this paper considers an expert-learner zero-sum game, that is, a learner agent suffering from non-cooperative disturbances with unknown dynamics expects to mimic the behaviors of the expert agent of optimal policy. as the solution, we propose a new interaction called zero-sum game irl, namely a novel data-driven off-policy irl algorithm for expert-learner zero-sum games of differential systems. it consists of a game solution correction modified from the standard rl and a cost function weight reconstruction using the standard ioc. using only the behavior data of the expert and learner, a learner agent learns the unknown cost function objective and the optimal control policy to mimic the expert's behavior. this algorithm does not need to know or identify system models and performs a single-loop learning procedure without solving optimal problems repeatedly in inner loops. moreover, no initial stabilizing policy is needed to start the iteration. the properties and effectiveness of the proposed data-driven are well-analyzed. notations. • is the euclidean norm. i n is n × n identity matrix, and diag{a,b,..} is diagonal matrix with a, b, .. in diagonal line. for a vectorfor an agent or system suffering from disturbances, its control input, as a defender, desires to complete a specified control mission by determining control policy to reject the influences of antagonistic input, i. in order to make such an agent perform in the target trajectories exhibited by a target agent with optimal policy, optimal control theory assumes that the performance cost function is known, and rl,based optimal tracking control methods-compute optimal policy by observing states and control actions without knowing the system dynamics, where a standard iterative form for rl is known as policy iteration (pi),,,. inverse optimal control (ioc) and inverse rl (irl) construct cost function weights given system control behaviors.assuming a stable control system, ioc constructs a cost function concerning which the system behavior is optimal.this paper considers an expert-learner zero-sum game, that is, a learner agent suffering from non-cooperative disturbances with unknown dynamics expects to mimic the behaviors of the expert agent of optimal policy. using only the behavior data of the expert and learner, a learner agent learns the unknown cost function objective and the optimal control policy to mimic the expert's behavior. the learner agent only knows the target agent's control actions and state behavior but does not know its performance cost function and system dynamics. by using the behaviour data of the expert (1) and the learner itself(7), the learner desires to reconstruct the unknown performance cost function(2)to exhibit the same control actions u t in (3a) and states x t as the expert (1). hence, our control goal is to determine the unknown cost function objective(2)to produce the optimal control input u * = -k * x with k * = k t using only target data of x t , u t , d t and learner data of x, u, d.in fact, the q * , r, γ, p * satisfying (19a)-(19c) that explain the same strategy k * = k t may not be unique and can be different from the actual target values q t , r t , γ t , p t shown in (3b) and (6). to remove this requirement, we develop here a data-driven irl algorithm for expert-learner zero-sum games based on algorithm 1, which only requires the data x t , u t , d t of the target agent (1) and x, u, d of the learner agent(7).in order to update q i+1 in (18) using only data x, u, d, inspired by the integral rl technique, multiplying both sides of (18) by x and adding and subtracting terms u t b t p i x and d t d t p i x, (18) can be rewritten as.we show three simulation experiments, a first one of the data-driven algorithm 2 to show its performance, a comparison simulation with the bilevel irl method into show the reduction of iteration steps of algorithm 2, and a second comparison simulation with the rl-based optimal tracking control method into show the improvement of control performance with the cost function weights correction of algorithm 2.in this subsection, the typical rl-based optimal tracking control methodfor linear disturbed systems, which computes optimal control policy given cost function weights, is simulated to show the advantage of algorithm 2 in control performance by computing both optimal control policy and cost function weights.this paper proposes a novel data-driven off-policy irl approach to determine both cost function and optimal control policy to stabilize a learner agent suffering from noncooperative disturbances by mimicking a target agent's trajectories using data of both agents. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/730.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/730.txt new file mode 100644 index 0000000000000000000000000000000000000000..e8f7de485dcf702a9026c8f7ba6321846d9216f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/730.txt @@ -0,0 +1 @@ +a time-series describes a collection of observations indexed in time order . time-series forecasting describes the task of predicting future values of a target by analyzing the corresponding past data, as illustrated in fig. 1. time-series forecasting involves a broad spectrum of applications, from climate modelling to financial time-series forecasting and energy time-series forecasting .energy time-series forecasting includes several sub-tasks. in this paper, we focus on three pivotal tasks: electric load demand forecasting (eldf), describing the process of predicting the electric load demand by employing historical load data, personalized energy consumption forecasting (pecf), namely the process of predicting the energy load consumption based on past load data for individual consumers, and renewable energy generation forecasting (regf), describing the process of predicting mainly the generated solar and wind energy by using, mainly, historical data (e.g., energy generation measurements, weather-related data).we deal with the aforementioned forecasting tasks with special emphasis to the greek energy market. for each of the tasks, we provide a brief description of earlier important works, as well as more recent deep learning (dl) methods, focusing to recent state-of-the-art methods, considering the aforementioned energy market. that is, considering the eldf task, we present amongs others, a novel online distillation methodology for improving the forecasting performance of an one-day-ahead dl model, as well as a novel anchored-based methodology. a novel methodology based on the residuals concept is applied both on eldf and pecf tasks. finally, regarding the regf task a method for wind energy generation prediction guided by multiple-location weather forecasts is presented. in addition, the performance of the presented method is evaluated on other res types, including solar energy. it should be emphasized that the presented methodologies can also be applied for generic time-series forecasting tasks. the rest of the manuscript is organized as follows. section 2 describes the task of electric load demand forecasting. subsequently, the task of personalized energy consumption forecasting is presented in section 3, while section 4 describes the task of renewable energy generation forecasting. finally, section 5 summarizes this work. in this paper, we focus on three pivotal tasks: electric load demand forecasting (eldf), describing the process of predicting the electric load demand by employing historical load data, personalized energy consumption forecasting (pecf), namely the process of predicting the energy load consumption based on past load data for individual consumers, and renewable energy generation forecasting (regf), describing the process of predicting mainly the generated solar and wind energy by using, mainly, historical data (e.electric load demand forecasting, also referred to as electricity demand forecasting, describes the process of estimating the amount of electricity that will be used in a certain area, using historical electric load data. for instance, a convolutional and recurrent neural network based model is proposed for short-term load forecasting in, whilst a recurrent neural network based sequence to sequence model is proposed to forecast the short-term power loads in. for example, a hybrid radial basis function -convolutional neural network model is proposed to address the week-ahead forecasting task in, while a novel regularization method for improving a lightweight one-day-ahead forecasting model is proposed in. finally, a thorough comparative study of state-of-the-art dl models initially proposed for generic forecasting tasks, applied to the eldf task on greek energy market is provided inin the following, we briefly present four recent state-of-the-art works on greek energy market.lightweight dl model for greek energy market ina simple lightweight model (mlp with two hidden layers) is proposed considering the electricity demand forecasting task considering the greek energy market., load one week prior the day whose load demand we aim to predict), and subsequently a forecasting model is trained, instead of learning the actual load values, to learn the offset. first, a model is trained to predict the actual load values (ground truth), considering the electric load demand forecasting task.correspondingly, considering the electricity demand forecasting task, it is advocated that, since similar input features lead to similar predicted load values, the forecasting ability of the network can be improved by incorporating these similarities to the training process, as opposed to training with the actual load values. considering a neural network for electricity demand forecasting, ϕ(• ; w) with weights w, an input sample x i , i = 1, • • • , n, its corresponding output of the network, ϕ(x i , w), and its ground truth vector g i ∈ r d , the soft target s i for the input sample in the distilled training process is computed as a combination of the ground truth vector and the prediction of the model as follows:. that is, using mape loss, a common loss considering forecasting tasks for training the model, the loss l osdf of method is formulated as follows, utilizing the computed soft targets:.considering the one-day-ahead eldf task on greek energy market, the osdf method improves the baseline performance of a simple mlp model using the input features that reported in, in terms of mape. more specifically, simple lightweight models have been proposed to address the energy consumption forecasting for medium voltage (mv), and high voltage (hv) consumers, considering one short-term and one mid-term forecasting tasks.renewable energy generation forecasting, also referred to as renewable energy sources (res) forecasting, concerns mainly the prediction of the generated solar and wind energy, within a specific time frame., electric load demand forecasting, personalized energy consumption forecasting, and renewable energy generation forecasting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/731.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/731.txt new file mode 100644 index 0000000000000000000000000000000000000000..fffd22a1a042781cfffd2ff2417e868e34d4e493 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/731.txt @@ -0,0 +1 @@ +due to the difficulty of collecting strong supervision information (i.e., fully labeled datasets) in some real-world scenarios, many weakly supervised learning settings were investigated to deal with weak supervision information. typical weakly supervised learning settings include semisupervised learning (chapelle, scholkopf, and zien 2006;sohn et al. 2020), noisy-label learning (liu and tao 2015;malach and shalev-shwartz 2017;patrini et al. 2017), and positive-unlabeled learning (elkan and noto 2008;niu et al. 2016;kiryo et al. 2017), and multiple-instance learning (maron and lozano-pérez 1997;andrews, tsochantaridis, and hofmann 2002).in recent years, another weakly supervised learning setting called partial-label learning (pll) (cour, sapp, and taskar 2011) has received much attention from the machine learning and data mining communities. in pll, each training example is annotated with a set of candidate labels, only one of which is the true label. due to the massive label ambiguity and noise in data annotation tasks, pll has been increasingly used in many real-world applications, such as web mining (luo and orabona 2010), multimedia content analysis (zeng et al. 2013), and automatic image annotations (chen, patel, and chellappa 2018).the major challenge of pll lies in label ambiguity, as the true label is concealed in the candidate label set and not directly accessible to the learning algorithm. to tackle this problem, many pll methods have been proposed. these methods achieved satisfactory performance by using appropriate techniques, such as the expectation-maximization algorithm (jin and ghahramani 2003;wang et al. 2022), the maximum margin criterion (nguyen and caruana 2008), metric learning (gong, yuan, and bao 2021a;liu et al. 2018), the manifold regularization (zhang and yu 2015;zhang, zhou, and liu 2016;gong et al. 2018;wang, li, and zhang 2019), and the self-training strategy (feng and an 2019;lv et al. 2020;feng et al. 2020;wen et al. 2021).despite the effectiveness of previous pll methods, they only focused on the classification setting where candidate labels are all discrete, which cannot handle continuous labels with real values. in many real-world scenarios, regression tasks that learn with real-valued labels can be commonly encountered. however, how to learn an effective regression model with a set of real-valued candidate labels is an open problem that still remains unexplored.in this paper, we provide the first attempt to investigate partial-label regression (plr), where each training example is annotated with a set of real-valued candidate labels. in order to solve the plr problem, we make the following contributions:• we propose a simple baseline method that takes the average loss incurred by candidate labels as the predictive loss to be minimized for model training. • we propose an identification method that takes the least loss incurred by candidate labels as the predictive loss to be minimized for model training. • we propose a progressive identification method that differentiates candidate labels by associating their incurred losses with progressively updated weights. • we theoretically show that the identification method and the progressive identification method are model-consistent, which indicates that the learned model converges to the optimal model. 2020;feng et al. however, this intuitive method does not differentiate the true label for model training, and thus may cause the training process to be misled by false labels in the candidate label set. to overcome this drawback, we further propose two theoretically grounded methods, where one directly identifies the true label with the least loss for model training and the other progressively identifies the true label by associating the loss of each candidate label with properly updated weights. (3), we take into account the influence of each candidate label and regard the averaged loss as the predictive loss on the plr example (x, s). apart from the true label, there are normally multiple false labels in the candidate label set, hence these false labels may dominate the model training process and thus have huge negative impacts on the learned model. to overcome this drawback, we propose an identification method, which regards the candidate label with the least loss as the true label and only considers the least loss of the identified pseudo label as the predictive loss:. that is, the model learned by the identification method from data with real-valued candidate labels converges to the optimal model learned from fully supervised data. the model f ⋆ min = arg min f ∈f r min (f ) learned by the identification method is equivalent to the optimal model f ⋆ = arg min f ∈f r(f ). here, we provide a convergence analysis for the above identification method, which shows that the model f min = arg min f ∈f r min (f ) (empirically learned from only data with real-valued candidate labels by using our identification method) can converge to the optimal model f ⋆ . to ensure that f min converges to f ⋆ , we can show that r min ( f min ) converges to r min (f ⋆ )., f ⋆ = f ⋆ min ), we can turn to show that r min ( f min ) converges to r min (f ⋆ min ). suppose the pseudo-dimensions of {x → ℓ(f (x), y) | f ∈ f, y ∈ y} is finite and there exists a constant m ≤ ∞ such that |ℓ(f (x), y)| ≤ m for all (x, y) ∈ x × y and f ∈ f.theorem 3 shows that the optimal regression model learned from fully labeled data can be identified by the progressive identification method given only data with realvalued candidate labels. we aim to show that the model f wet = arg min f ∈f r wet (f ) (empirically learned from only data with real-valued candidate labels by using our progressive identification method ℓ wet ) can converge to the optimal model f ⋆ . besides, if the loss of a candidate label approaches 0, we consider this candidate label to be the true label, hence the weight assigned to this candidate label would approach 1, and the weights of other labels would approach 0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/732.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/732.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c90db9286311d7d0de5e1739129ac87e33b089b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/732.txt @@ -0,0 +1 @@ +machine learning algorithms are regularly used to solve a plethora of regression problems. the demand for these algorithms has increased significantly due to the push of digitalisation, automation and analytics. traditional techniques such as random forest, decision trees and xg boost have been integral in various fields such as banking, finance, healthcare and engineering. technology is always evolving and technological advancements are driven by factors such as human curiosity, problem-solving and the desire for increased efficacy and reliability. researchers are constantly working on improving these existing methods as well as exploring new improved strategies as can be seen in (hosein, 2022). this approach uses a distance metric (euclidean distance) and a weighted average of the target values of all training data points to predict the target value of a test sample. the weight is inversely proportional to the distance between the test point and the training point, raised to the power of a parameter κ. in our paper, we investigate the performance of this novel approach and several wellestablished machine learning algorithms namely xg boost, random forest, decision tree and k-nn using the mean absolute error (mae) as the performance metric. we intend to showcase the potential of this new algorithm to solve complex regression tasks across diverse datsets. in the next section, we describe related work and then the theory of the proposed approach. after, we present and discuss the findings such as any issues encountered. finally, we advocate that the proposed approach may be robust and efficient making it extremely beneficial to the field. this approach uses a distance metric (euclidean distance) and a weighted average of the target values of all training data points to predict the target value of a test sample. the core of the approach is similar to k-nn but instead of using samples in a neighbourhood, all samples are used and closer samples are weighted more heavily than those further away. instead of using the samples available for each feature, his algorithm computes the weighted average of the target variable using all samples in the dataset. this algorithm uses the euclidean distance metric and a hyperparameter κ, which controls the influence of the distance (the weights) between points in the data. another aspect is that the same unit of distance is used for each feature which allows one, for example, to compare distance between a gender feature with the distance between an age feature. when κ is large, the influence of points further away from the test point decreases quickly since their distance raised to a large power becomes very large which in turn makes the weight very small. however, when κ is small, the influence of points further away decreases slowly since the distance raised to a small power results in a relatively smaller value which then makes the inverse weight larger. for a given test sample i, its prediction is the weighted average of the target value over all training samples. additionally, we may encounter some unique values in the testing data set that are not in the training data set or vice versa. in order to solve this problem for numerical features, we impute a value for means for each unique value in both the training and testing data sets. however, note that while the initial range of kappa values involves some trial and error, the process of finding the optimal combination within the range is essentially a grid search which is systematic and datadriven and ensures that the model is robust. since the algorithm uses all the training data points in its prediction, it will be robust for small data sets or where there are not enough samples per category of a feature. we used mean absolute error (mae) to measure the performance since it is robust and easy 1 : c ≡ set of categorical features 2 : o ≡ set of ordinal features 3 : x ≡ set of training samples 4 : y ≡ set of testing samples 5 : κ 1 , κ 2 > 0 tuning parameters 6 : for each f ∈ o do 7 :.(imputed mean target value over training samples when feature f has value v). the approach is based on a weighted average of the target values of the training points where the weights are determined by the inverse of the euclidean distance between the test point and the training points raised to the power of a parameter κ. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/733.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/733.txt new file mode 100644 index 0000000000000000000000000000000000000000..28c354ea02209854955d09b590bb4ab1f40a1ebf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/733.txt @@ -0,0 +1 @@ + here given k arms, one adaptively obtains samples until choosing one of the arms to output -the goal is to ensure that with probability 1 -δ, this arm has average reward within ε of the best arm, with minimum possible sample complexity depending on ε and δ. here given k arms, one adaptively obtains samples until choosing one of the arms to output -the goal is to ensure that with probability 1 -δ, this arm has average reward within ε of the best arm.4 shows that for the general reservoirs we consider, passing from fixed budget to fixed confidence as they do is inherently suboptimal: the factors of log log(1/δ) would remain, but are extraneous for fixed confidence. they proposed an algorithm with fixed budget sample complexity o log(1/δ) log log(1/δ) 2 , and asked whether the log log(1/δ) factors are necessary.to contextualize this result, we recall the important workwhich showed θ k log(1/δ) ε 2 samples are necessary and sufficient for (ε, δ)-pac pure exploration in the k-armed bandit problem with fixed confidence. equivalently, the optimal fixed budget sample complexity for an (η, ε, δ)-pac guarantee is (c -1 α,β ± o(1)) log(1/δ) log log(1/δ) 2 .5 we observe that our fixed budget algorithm can actually output log n good arms with the same success probability as for outputting a single good arm.focusing on the δ-dependence, a challenge with infinitely many arms is that to succeed with probability 1-δ, it is necessary both to sample log(1/δ) arms to ensure a good arm is ever observed, and to sample an arm log(1/δ) times to make it safe to output. each time a samples arm i for the n i = (a k + 1)-st time for a k ∈ a, a ′ samples arm i until n i = a k+1 . when an arm a i reaches b k samples for some k ≥ 0, it is checked for possible rejection by comparing its empirical average reward to the threshold τ k . algorithm 3 rejects arm i and moves to arm a i+1 if the empirical average pi,b k of arm a i drops below a moving threshold τ k . let x i be the number of samples used by arm a i before rejection, and i i ∈ {0, 1} be the indicator of the event that a i is ever rejected, even if algorithm 3 were to continue past time n and sample arm i an infinite number of times. , pi,b k 0 +j-ℓ the law of pi,b k 0 +j-ℓ-1 depends only on pi,b k 0 +j-ℓ and is given explicitly by a hypergeometric variable. , q 0 p i,b k 0 +j = q 0 ≤ exp -1 -o(̺ 2 ) b k 0 +j d f (α, β) 2 2 log 2 n . in the last line we used the fact that b k 0 +j ≥ b k 0 ≥ log 4 n to absorb the factor n l+1 ≤ e ̺ log 3/2 n for large n . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/734.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/734.txt new file mode 100644 index 0000000000000000000000000000000000000000..07bb8cfa243b0c049dd1893e936c038509f642b5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/734.txt @@ -0,0 +1 @@ +given the stunning empirical successes of deep neural networks, a pressing need has emerged to explain their ability to generalize to unseen data. the traditional approach proceeds via bounds on vc dimension or rademacher complexity and gives uniform convergence guarantees for a given function class .the most classical vc bounds for neural networks, see e.g. the book , scale with the number of neurons. however as early as it was realized that scale-sensitive bounds depending on the size of the weights may be more useful. such bounds have the potential to remain valid even for overparametrized networks with more free parameters than the number of training examples. further, as already noted in the abstract of , they are well-motivated by the prevalence of regularization procedures such as weight decay and early stopping.recent work on scale-sensitive generalization has focused on rademacher complexity bounds that scale with the product of operator norms of the weight matrices. important such results were shown in using covering number and pac-bayes techniques respectively. while one might hope for bounds depending only on weight matrix operator norms, the aforementioned estimates depend polynomially on the network depth, and later showed such depth dependence is unavoidable in general.surprisingly, showed this issue can be largely avoided if one is willing to consider a product of frobenius norms rather than operator norms; they obtain mild depth dependence of only a square-root factor, which can be removed entirely at the cost of worse decay in the number of samples. their approach stems from the natural idea of iteratively peeling off layers and using the ledoux-talagrand contraction lemma to handle each application of the non-linearity σ. a previous work used this idea directly and paid exponentially in the depth for repeated use of the contraction lemma; the technical innovation of was to apply the contraction lemma inside an auxilliary exponential moment.we give a refinement of the main result of which depends on upper bounds m f (i) and m op (i) on both the frobenius and operator norm of each weight matrix w i . our bound is never worse, and is fully depth-independent unless m op (i)/m f (i) ≈ 1 for nearly all of the initial layers (i.e. the weight matrices are approximately rank 1). the idea is to use their argument repeatedly along an well-chosen subsequence of the layers and take advantage of improved concentration estimates at the intermediate stages.finally we mention that although this work, along with the papers referenced above, apply for essentially arbitrary neural networks, more refined results have been obtained under further assumptions as well as for structured classes of neural networks .we give a refinement of the main result ofwhich depends on upper bounds m f (i) and m op (i) on both the frobenius and operator norm of each weight matrix w i .here each w i is a w i × w i-1 real weight matrix and x ∈ x ⊆ r w0 .we assume x is contained in the radius b ball in r w0 so that ∥x∥ 2 ≤ b for all x ∈ x . ) that an upper bound on r n (f) implies a uniform generalization guarantee for f. we assume b = 1 for simplicity1and prove inductively in 1 ≤ j ≤ k the bound:.to induct from d j to d j+1 , we will apply the technique of between d j , d j+1 . , x n ∈ x throughout the proof and define. our goal will be to iteratively bound e. note that if ∥x∥ ≤ 1 and f ∈ f dj then |f (x)| ≤ p op (d j ).2]), it follows that x j -e is sub-gaussian with variance proxy p op (d j ) 2 /n. in particular for λ j > 0 to be chosen later,.1] to peel the layers between d j and d j+1 , obtaining:.e e λj xj+1/p f (dj+1) ≤ 2 dj+1-dj e e λj xj /p f (dj ).=⇒ log e e λj xj+1/p f (dj+1) ≤ log e e λj xj /p f (dj ) + d j+1 -d j .and defining y j = x j /p f (d j ), we obtain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/735.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/735.txt new file mode 100644 index 0000000000000000000000000000000000000000..4083656775ce15cd0101fbf617fe650174d7c545 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/735.txt @@ -0,0 +1 @@ +in recent years, due to the acceleration of urbanization, many people are moving to cities rapidly. in many countries around the world, especially developing ones, the growing demand for public transport services and the growing number of private vehicles are putting enormous pressure on existing transport systems. frequent traffic accidents, serious traffic congestion, longer commuting time and other problems greatly reduce the efficiency of urban operations and affect the travel experience of passengers. to address these challenges, an increasing number of cities in the world have been developing intelligent transportation systems (its) to facilitate efficient traffic management by optimizing the utilization of system resources. typically, an intelligent transportation system can leverage a set of technologies and tools including connected sensors, real-time communication, advanced control and optimization methods to allow a variety of road users to efficiently share information in the road networks. for instance, a roadside camera can be facilitated in a highway network to timely track moving vehicles by using computer vision techniques. such a system can be used to accurately estimate the average speed of vehicles, predict future traffic flow as well as timely incident and congestion detection among many others.at the heart of its applications is often driven by some learning-based algorithms and mechanisms which allow optimal decisions to be made for some traffic scenarios of interest. specifically, graph-based machine learning methods have attracted tremendous interest and research efforts in recent years by researchers and practitioners worldwide thanks to their capability to effectively capture traffic data contained in the graph data structure. currently, these algorithms have been increasingly applied to address various intractable challenges which were hardly tackled in the past. therefore, our key objective of this chapter is to briefly review the algorithm designs in this field with a particular focus on graph-based learning approaches, including graph neural network (gnn) architectures. we will also present two recent applications leveraging gnns to address challenges in traffic management with reference to our recent works (chen et al., 2021c;wu et al., 2022) before we conclude the chapter.to begin with the chapter, we shall briefly introduce some background information related to this research subject by presenting some important research problems in the context of its and corresponding research directions with reference to the prior works in veres and moussa (2020); jiang and luo (2022). specifically, graph-based machine learning methods have attracted tremendous interest and research efforts in recent years by researchers and practitioners worldwide thanks to their capability to effectively capture traffic data contained in the graph data structure., 2019). we first review some existing methods including statistical methods and machine learning methods which have been widely used to address the challenges as aforementioned. roughly speaking, current methods reported in the literature can be summarized into the following three categories, namely, statistical methods, machine learning (ml) methods and deep learning (dl) methods., 2007;jamal et al.ml methods, on the other hand, aim to learn patterns contained in data and make predictions for unseen data using the identified patterns without means of explicit programming. ml models can better model complex nonlinear relationships contained in traffic data which may not be easily done using traditional statistical methods(boukerche et al., 2016;ling et al., 2017;tang et al., rf(ou et al. dl methods, a subfield of ml, can also learn patterns from data in the transportation domain., 2018;kipf et al. in this section, we review the prevalent variants of gnns for its, including graph convolutional networks (gcns), spatial temporal graph convolutional networks (stgcn) as well as graph attention networks (gat). this characteristic is particularly useful to address practical challenges, such as traffic-flow prediction, as data generated in a traffic network can be easily modelled using a spatial-temporal graph. after that, we briefly reviewed the research methods in the literature that have been applied to address these challenges, starting from classic statistical methods to modern machine learning and deep learning-based approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/736.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/736.txt new file mode 100644 index 0000000000000000000000000000000000000000..42529079d692bc56a4a241758b15947a0db50d20 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/736.txt @@ -0,0 +1 @@ +the last decade has witnessed the immense success of deep learning . as we all know, a major part of such successes are due to the powerful expressiveness of the deep neural networks in representing a function. in other words, the deep neural network is the engine of deep learning. in order to explain why deep learning works so well, there is need to understand deep neural networks from rigorous mathematical viewpoints. as deep neural networks have advanced in machine learning, they have gained much attention in the applied mathematics community and have gone beyond machine learning. deep neural networks, considered as a function class to represent or approximate a function, have exhibited superiority in many aspects to classical approximation functions such as polynomials, trigonometric polynomials, splines, finite elements, wavelets, and kernel functions in approximation and numerical analysis. unlike the classical function approximation in which a function is approximated by a linear combination of basis functions, deep neural networks approximate a given function by compositions of functions with a networks. specifically, a neural network is a vector-valued function defined through consecutive function compositions of a given activation function with parameters consisting of weight matrices and bias vectors. a deep neural network of a given function may be determined by finding the parameters that minimize the difference between it and the given function. mathematically, one would expect that as the number of layers of the deep neural network increases, the difference diminishes and eventually goes to zero as the number tends to infinity. in a special case when the activation function is the rectified linear unit (relu), this question was investigated in a number of studies .a closely related mathematical question, even more basic, is when a deep neural network converges to a meaningful function as its layer number tends to infinity. this question was studied recently for the relu activation function with a fixed width in , for the relu activation function with a convolution network in and for contractive activation functions with a fixed width in . although the contractivity hypothesis covers interesting activation functions such as sigmoid, there are many activation functions frequently used in applications that are not contractive, for example, relu, parametric rectified linear unit (prelu), exponential linear unit (elu) and scaled exponential linear unit (selu), to name a few. therefore, there is a need to understand uniform convergence of deep neural networks defined by a non-contractive activation function. this paper will study uniform convergence of deep neural networks of lipschitz continuous activation functions with weight matrices of variable widths (including bounded and unbounded widths).main difficulty in analyzing convergence of deep neural networks is a result of the nonlinearity of the activation function. this was overcome in for the relu activation function by re-expressing the functional application of the activation function in terms of matrix-vector multiplication with activation matrices. pointwise convergence of neural networks with the relu activation function was then analyzed by using the matrix-vector multiplication. when a general lipschitz continuous activation function is chosen, one can take the advantage of its lipschitz continuity to overcome the difficulty caused by its nonlinearity. we propose a condition that intertwines the lipschitz constant of the activation function with the weight matrices to ensure the uniform convergence of the resulting neural networks. the main contribution of this work lies in laying out a general framework for uniform convergence analysis of deep neural networks with general activation functions and pooling operators, both of which are lipschitz continuous.understanding conditions that ensure convergence of deep neural networks is not only theoretically interesting, but also practically advantageous in guiding their training in applications. as we know, a deep learning model usually possesses a large number of hidden layers and a massive amount of parameters. for example, residual networks (resnets) can reach over 1,000 layers . a resnet with only 50 layers has over 23 million parameters, and the overwhelming chatgpt (gpt-3) model has approximately 175 billion parameters. in applications, the parameters of a dnn are determined via a training process by minimizing a loss function on given training data. it is desirable to figure out whether or not a dnn system with so many parameters can eventually converge to a meaningful function in a rigorous mathematical sense. mathematical conditions on the parameter that ensure convergence of the dnns as the number of their layers increases will be beneficial to the interpretability of the dnns. such conditions can also be helpful in training a dnn. for instance, if such conditions are available, people can generate initial parameters of a dnn that satisfy or nearly satisfy the conditions. with such an initial deployment of parameters, the dnn will be inclined to converge more quickly. finally, uniform convergence rates of dnns will be applicable to mathematical analysis on the generalization ability of dnn models .we organize this paper in seven sections. in section 2, we describe the setting of deep neural networks. section 3 is devoted to developing a general framework for uniform convergence analysis of deep neural networks with pooling. a key ingredient that ensures uniform convergence of the deep neural networks is a condition that intertwines the lipschitz constants of the activation function and the pooling operator with the norm of the weight matrices. in sections 4 and 5, we present uniform convergence results for deep neural networks with weight matrices of fixed widths and bounded widths, respectively. while in section 6, we consider deep neural networks with weight matrices of unbounded widths. finally, in section 7 we present uniform convergence theorems for convolutional neural networks. hence, selu is again expanding.when σ is lipschitz continuous with the lipschitz constant l, the neural network n n is also lipschitz continuous with the lipschitz constant l n n j=1 w j . due to the unboundedness of the widths, we will extend all vectors and matrices to elements in sequence spaces ℓ p (n) and ℓ p (n 2 ), respectively, and consider uniform convergence of the neural networks that result from the extension in the sequence spaces. in ℓ p (n 2 )). when n = 1, by (2.2) and the induction hypothesis, for all x ∈ r d we obtain that nk+1 (x) ≤ σ(p µ ( ŵk+1 nk (x)) + bk+1 )σ(p µ (0 k+1 )) + σ(p µ (0 k+1 )) . throughout this section, we let l ∈ n be fixed and suppose that the sequence of weight matrices w 1 ∈ r l×s , w n ∈ r l×l , n ≥ 2, and the sequence of bias vectors b n ∈ r l , n ∈ n.uniform convergence of deep neural networks with the average pooling and a fixed width in the vector norms • p , for 1 ≤ p ≤ +∞, and that of deep neural networks with the max pooling in the vector norm • ∞ were established in.and p m µ x -p m µ y p ≤ (µ + 1) 1/p xy p , for all x, y ∈ r l+µ .1 we say that a sequence of vector-valued functions n n : r s → r mn , n ∈ n, converges uniformly to a function n ∈ c l (d) if the sequence of the augmented vector-valued functions ñn (x), n ∈ n, converges uniformly to n in the space c l (d) in a norm on r l .4 suppose that σ : r → r is lipschitz continuous with the lipschitz constant l, d ⊂ r s is bounded, l := sup{m i : i ∈ n} < +∞ and • is the matrix norm induced from a vector norm satisfying the extension invariant condition (2. in this case, we suppose that w n ∈ r mn×m n-1 , b n ∈ r mn , n ∈ n, and a sequence of deep neural networks n n (x), n ∈ n, are defined by(2. we say that a sequence of vector-valued functions n n : r s → r mn , n ∈ n, converges uniformly in d to a function n : r s → ℓ p (n), for 1 ≤ p ≤ ∞, if the sequence of the extended functions ñn : r s → ℓ p (n), n ∈ n, converges uniformly to n ∈ c ℓp(n) (d).we obtain that wx ℓp(n) = wx p , for all x := x x ′ ∈ ℓ p (n) with x ∈ r ν , x ′ ∈ ℓ p (n).1) and (3. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/737.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/737.txt new file mode 100644 index 0000000000000000000000000000000000000000..dd8bd5ea0007b7f2ca793e4c033ac13b891bd0dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/737.txt @@ -0,0 +1 @@ +backpropagation, a widely successful learning rule for artificial neural networks, has been instrumental in advancing deep learning. nevertheless, it presents significant challenges, including the intricate backward pass mechanism, which complicates training parallelism due to communication bottlenecks. in addition, it lacks biological plausibility, further limiting its practical utility.tive learning rule that alleviates the computational complexities and bio-plausibility issues associated with backpropagation (lillicrap et al., 2016). fa replaces the backward pass with random feedback matrices, promising a more straightforward approach to training neural networks.despite its advantages, the understanding of fa's underlying principles, particularly in the context of deep neural networks, remains elusive. this gap in knowledge motivates our current investigation, which seeks to unravel the inherent laws that govern the learning dynamics under fa.our work makes several significant contributions. firstly, we establish a set of conservation laws for the learning dynamics under fa, elucidating the implicit bias exhibited by this learning rule-a bias distinct from that of backpropagation. secondly, these conservation laws enable us to identify a sufficient condition for layer-wise alignment with feedback matrices. lastly, we provide evidence that twolayer linear networks trained with fa converge to a global optimum.we believe that our results will have broad implications for future research and practical applications. by quantifying the properties of alternative learning rules like fa, our analysis provides valuable insights that can inform both theoretical advancements and the design of more efficient and biologically plausible alternatives to backpropagation. firstly, we establish a set of conservation laws for the learning dynamics under fa, elucidating the implicit bias exhibited by this learning rule-a bias distinct from that of backpropagation., 2021;song et al., 2021;launay et al. to the best of our knowledge, this is the first result establishing layer-wise alignment for a non-linear network trained with feedback alignment, paving the way for future research in layer-wise alignment in more general settings such as wide neural networks or for other learning rules. each layer is associated with an activation function ϕ and a weight matrix w l ∈ r n l+1 ×n l .for a given input x ∈ r n0 , the pre-activation h l and the activation a l of layer l are computed recursively as follows:.in fa, the feedback weights, denoted as b l ∈ r n l ×n l+1 , are fixed random matrices independent of w l .where l(f ) is the loss function applied to the network, and ∇ a l l is the gradient of l with respect to the final layer activation a l .we note that unlike in backpropagation, the feedback matrices b l are not tied to the feedforward weights w l . we plot the ratio launay et al. we specifically aimed to observe the fa learning dynamics, understanding its implicit bias and ultimately, its convergence to the global optimum in over-parameterized two-layer linear networks.in particular, we find the (nearly) exact conservation of layer-wise in non-linear networks updating with the feedback alignment learning rule to be compelling. overall, our results connecting layer-wise alignment with convergence in linear-models suggest that layer-wise alignment may also be a useful tool for analyzing learning dynamics in wide neural networks. for a (leaky) relu layer network learning with a differentiable loss such as l : r m → r m → r ≥0 where we take δ l (t) = ∇ a l l as the gradient at time t. this can be defined as the matrix, x + = lim λ→0 + x t (xx t + λi) -1 also observe that there is a unique minimizer for the regularized problem, min w l(w) + λ∥w∥ 2 2 with value w λ = x t (xx t + λi) -1 y. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/738.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/738.txt new file mode 100644 index 0000000000000000000000000000000000000000..8fed0326f7220a729befaab8d081b570d327d2d3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/738.txt @@ -0,0 +1 @@ + to address this limitation, explainable artificial intelligence (xai) has emerged as a promising approach to develop interpretable machine learning models for medical diagnosis. by emphasizing the significance of interpretability and addressing its challenges, we hope to foster trust and understanding between healthcare professionals and machine learning models, ultimately advancing the field of medical diagnosis and improving patient outcomes. for instance, xai has been utilized to explain the predictions of deep learning models in medical image analysis, enabling clinicians to trust and understand the models' outputs. xai techniques aim to provide insights into the decisionmaking process of black-box models, allowing clinicians to understand the underlying features, relationships, or rules used for predictions. deep learning models, such as convolutional neural networks (cnns) or recurrent neural networks (rnns), can also be utilized for feature importance analysis by examining the gradients of the model's output with respect to the input features. the results showed that lime was able to identify the features that were most important for the model's predictions, and that this information was helpful for clinicians in understanding the model's decisions.in medical diagnosis, interpretability is essential for ensuring that clinicians trust and use machine learning models. shapley values and other model-agnostic approaches can help to improve the interpretability of machine learning models in medical diagnosis, which can lead to better patient care.there are a number of different visualization techniques that can be used to interpret machine learning models in medical diagnosis.these are just a few of the many visualization techniques that can be used to interpret machine learning models in medical diagnosis. these metrics assess the model's diagnostic performance and can be used to compare the performance of interpretable models against black-box models.some commonly used xai techniques in medical diagnosis include feature importance methods, such as permutation feature importance and partial dependence plots, decision trees, rule-based systems, local interpretable model-agnostic explanations (lime), shapley values, and attention mechanisms. we discussed various xai techniques, including visualizations, decision trees, rule-based systems, lime, and shapley values, highlighting their effectiveness in providing interpretable insights into the structure and behavior of medical diagnosis models. by providing explanations and interpretability, xai techniques empower healthcare professionals to understand and trust the decisions made by machine learning models. xai has the potential to revolutionize medical practice by improving the understanding of complex machine learning models, enabling personalized medicine, and facilitating evidence-based decisionmaking. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/739.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/739.txt new file mode 100644 index 0000000000000000000000000000000000000000..80be9092aece26eb74d654d0a3f5c9066959a927 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/739.txt @@ -0,0 +1 @@ +the development, implementation, and maintenance of credit risk models are subject to stringent regulations (e.g. , , ). consequently, banking institutions employ teams of experts who exercise utmost caution in constructing high-quality models. these models undergo a comprehensive process of verification, assessment, and validation. to ensure alignment with the bank's lending and collection processes, the model structure and data undergo scrutiny and challenges from the bank's business units. ongoing monitoring of model performance allows for the identification of necessary parameter adjustments over time.furthermore, an independent validation team conducts thorough analyses of all aspects of the modeling process, including verification of modeling codes and statistical results. the internal bank audit serves to verify compliance with internal policies, procedures, and risk management practices. finally, the regulator assesses the model's fitness for its intended purposes through a comprehensive evaluation or review of its accuracy, robustness, and adherence to regulatory requirements.the requirement to monitor, verify, and document the performance of internal ratings-based (irb) models provides a retrospective view of past issues encountered during the modeling process. these verification activities are meticulously documented in monitoring and validation reports, which hold particular significance for our study. validation reports bear similarities to referee reports in scientific papers, as independent reviewers-model validators-assess the clarity of documentation, design of the model, and the statistical methods employed in credit risk prediction. it can be assumed, therefore, that specialists in the field of credit risk modeling are actively involved in the development and validation of such models. this raises the intriguing question of the challenges faced by model developers within this sophisticated and specialized environment.this paper aims to investigate how textual information contained in validation reports of credit risk models can be leveraged to identify and categorize problems encountered within specific dimensions. for instance, if a credit risk model exhibits poor predictive power, as indicated by a low out-of-sample auc (area under the curve) statistic, this would be classified as an issue within the model output dimension. similarly, an unclear description of the modeling process would be attributed to a documentation flaw. moreover, the presence of numerous missing values in the explanatory variables of the modeling dataset would be addressed as an issue within the model input dimension.our study focuses on statistical models developed under the basel ii guidelines, with a specific emphasis on the internal ratings-based (irb) approach. the irb approach involves the use of credit risk models developed internally by banks for calculating regulatory capital. factors motivating our choice to investigate these models include standardization and comparability of irb models, stringent regulatory requirements affecting general quality of these models, and availability of validation reports written by independent reviewers.the paper is structured as follows: section 2 provides a literature review, section 3 presents the data description and preprocessing techniques employed, section 4 outlines the methodology, section 5 presents the results, and finally, section 6 concludes the paper. for instance, if a credit risk model exhibits poor predictive power, as indicated by a low out-of-sample auc (area under the curve) statistic, this would be classified as an issue within the model output dimension.in this context, our framework represents one of the pioneering efforts to apply clustering techniques to data from one of europe's largest bank groups, aiming to explore the suitability of existing dimension applications and potential extensions in credit risk model validation. newly developed models undergo a comprehensive pre-approval validation, which entails thorough checks across all model dimensions, including model documentation, design, use, and implementation. subsequently, during the obligatory annual periodical validation activities, the focus mainly shifts to the model input, model environment, and model output dimensions.each individual finding record within our dataset contains a title (providing a concise description of the finding), a description (explaining the issues identified in the investigated model), the finding date (indicating when the finding was raised), the person responsible for addressing the finding (person to act), the action plan (planned activities to rectify the issue), and the due date (mandatory deadline for resolving the finding). documentation: pertaining to the description of the model, encompassing technical model documentation, implementation specification documents, supplementary documents, modeling codes, and datasets. model environment: focused on ensuring the alignment of the developed model with its intended application scope, including data representing the model scope, internal processes, and external environment. model design: covers the statistical specification of the model, applied estimation and calibration methodologies, statistical tests used to assess model adequacy, and related topics. margin of conservatism: a technical dimension associated with the appropriate calculation of an additional level of conservatism beyond the best estimate of predicted credit risk in a credit risk model.when employing the approach that assigns the predicted dimension based on the most frequent dimension within the cluster to which an observation belongs, the dimensions of impact assessment, margin of conservatism, model design, and model input achieved predictability rates above 60% (table2). however, for some dimensions like model environment, model implementation, and model use, the accuracy was always 0%, indicating that this method is probably not appropriate for underrepresented dimensions. in this case accuracy for model environment, model implementation, and model use was also small (around 14-20%) but was significantly higher than zero. generally, both prediction methods succeeded in forecasting impact assessment, and margin of conservatism but failed to accurately predict the dimensions of model environment, model implementation, and model use in the findings.likewise, we identified "perform" for the model output dimension, "moc" for the margin of conservatism, "impact" and "calculate" for the model impact dimension, "isd" and "implementation" for the model implementation dimension, and "scope" and "representativeness" for the model environment dimension. furthermore, the dimension of model use may be closely related to model design, such as when predictive variables of an irb model cannot be easily employed in credit decision-making, and to model documentation, such as when potential applications of the irb model are not thoroughly described. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/74.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/74.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4e24fe5bab050bfe666e122aeab12390ceeedcf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/74.txt @@ -0,0 +1 @@ +cost-sensitive learning is relevant in many real-world classification problems, where different misclassification errors incur different costs. a prominent example is the field of medicine, where misdiagnosing an ill patient for a healthy one (a false negative) entails delayed treatment and potentially life-threatening consequences, while an error in the opposite direction (a false positive) would incur unnecessary medical examination costs and stress for the patient. cost-sensitive classifiers can account for the differences in costs not only between different classes, but also between data instances, making instancedependent cost-sensitive classification decisions.many cost-sensitive classifiers employ ensemble methods, which combine predictions from several classifiers to obtain better generalisation performance. superiority of ensembles over individual classifiers is very well known and has been extensively studied ( ). most cost-sensitive classification ensembles are homogeneous in nature, meaning their components are instantiated using the same learning algorithm.stacked generalization or stacking is a well known and widely applied heterogeneous ensemble, where the predictions of classifiers produced by different learning algorithms (the base-learners) are used as training inputs to another learning algorithm (the meta-learner) to produce a meta classifier, which makes the final classification decision. in the literature, the baseand metalevels of stacking are also referred to level-0 and level-1.homogeneous cost-sensitive ensembles such as cost-sensitive boosting and bagging are widely studied and have been shown very successful . examples of cost-sensitive stacking, on the other hand, are scarce and unsystematic, representing for the most part applications to single domains, where the classifiers are trained on synthetic, class-dependent costs and are evaluated with cost-insensitive performance metrics. for a discussion on the importance of real costs for a proper evaluation see the work by . in fact, there is currently no consensus as to how a cost-sensitive stacking ensemble is to be composed and at what stage (level-0 or level-1) cost-sensitive decision-making should be used. this can be clearly seen in table 1, which gives an overview of existing cost-sensitive stacking literature. stacking is typically made cost-sensitive simply through the application of a cost-sensitive classifier either at level-0 (cs-cis), level-1 (cis-cs) or at both levels of of our knowledge, this study is the only example where real instance-dependent costs were used in model training. models were evaluated using a cost-sensitive metric called the savings score, proposed in .the only study to date that considers all three different cost-sensitive stacking setups is one by on the application domain of software defect prediction. the misclassification costs were selected based on a literature however the authors emphasised that they treated costs as one of the hyperparameters of the classifier, which, we must note, is incorrect, as was previously discussed in . the experiments are run on 15 datasets using the same class-dependent cost matrix on all. balanced error-based metrics were used for evaluation together with cost-based evaluation metrics.identifying real misclassification costs is a complex task, which for many applications may prove too difficult to define and compute. most studies resort to artificially generated misclassification costs (see for a discussion on why this is inappropriate) and error-based evaluation metrics are typically employed to assess generalisation performance of cost-sensitive stacking. examples of metrics used include the auc, the arithmetic or geometric mean of class-specific accuracies, the f-measure, and the matthew's correlation coefficient (mcc). all of these metrics assume equal misclassification costs, and the f-measure does not incorporate the performance on the negative class, so using these metrics is not compatible with cost-sensitive learning .one of the challenges of stacking is the choice of the learning algorithms for the ensemble. earlier studies proposed to use linear regression to combine level-0 inputs , however wolpert does not impose any particular restrictions on which algorithm to use in level-1, and he believed that his famous 'no free lunch theorem' applies to the meta-learner as well. for the overview of which learning algorithms were used in cost-sensitive stacking ensembles to date we refer our reader to the summary table 1. stacking is typically made cost-sensitive simply through the application of a cost-sensitive classifier either at level-0 (cs-cis), level-1 (cis-cs) or at both levels of of our knowledge, this study is the only example where real instance-dependent costs were used in model training. we find that type-3 stacking ranks best and is significantly different from both type-2 and type-1 for all algorithms except svm, where the difference is only significant for the comparison between type-3 and type-1, but no conclusions can be made regarding the differences between ensembles of type-3 and type-2. similar to what we observed above with unweighted stacking, we can reject the null that type-3 stacking and its mec-weighted variants are equal in performance to type-1 stacking and variants. for stacking ensembles with knn in level-1 type-3 and type-3 acc classifiers are not significantly different from type-1 exp and type-1 sq, while for svm no significant differences were detected between type-3 exp and type-3 sq and other type-1 ensembles. we therefore perform the wilcoxon rank sum test for all combinations of pairwise comparisons of stacking algorithms of type-3 vs type-1 and of type-3 vs type-2 under the null hypothesis that the median of the rank differences between the two groups is equal to zero. we found that the differences between all type-3 mec-weighted stacking variants and type-2 stacking ensembles were not significant. however, type-3 unweighted stacking was significantly different from all type-2 stacking variants, see tablea. it is, however, an exception, and we must conclude that introducing cost-sensitivity through mec-weights into level-1 of stacking has no positive impact on type-1 stacking performance.for the setup of type-3 where both level-0 and level-1 of stacking are made cost-sensitive using dmecc, the null could not be rejected for adaboost, logistic regression and knn.outcome 1: using cost-sensitive models in both levels of stacking is recommended the results presented in this paper, have demonstrated that there is a statistically significant difference in performance between the three different stacking setups considered in our experiments, namely cis-cs, cs-cis, and cs-cs. contrary to the majority of cost-sensitive stacking papers that assumed that one level of cost-sensitive decision-making is sufficient, our experiments demonstrate that stacking models where the dmecc was applied in both levels of stacking achieved the highest ranking. another interesting avenue for future research would be investigating homogeneous cost-sensitive stacking, an example of which was proposed inusing cost-sensitive decision trees as base classifiers and cost-sensitive logistic regression as level-1 classifier.outcome 2: cost-insensitive classifiers do not perform well when costs are known, even in stacking as was previously shown incost-insensitive classifiers, having no way to account for differences in misclassification costs, typically perform worse than cost-sensitive models when evaluated using cost-based performance metrics. we also found that applying mec-weights to the training inputs of the level-1 classifier in stacking did not significantly change the performance of stacking models where the level-1 algorithm applied the default decision threshold to classify. moreover, mec-weighted stacking models where both levels were cost-sensitive performed worse than the unweighted stacking of the same type, indicating that two levels of cost-sensitivity is sufficient for good performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/740.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/740.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6affbe0cce298e32121941f12717bef4a61bd00 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/740.txt @@ -0,0 +1 @@ +channel capacity refers to the maximum data transmission rate achievable with an extremely low probability of errors .. shannon (1948) first tackled the capacity of a single-inputsingle-output (siso) additive white gaussian noise (awgn) channel. the introduction of powerful space-time coding schemes ( has unveiled the potential of multiple-inputmultiple-output (mimo) systems to surpass traditional techniques, presenting academia and industry with a means to achieve significantly higher channel capacity. this innovative approach provides a unique solution to meet the escalating demand for high-performance next-generation wireless communications. in this paper, mimo satellite communication (satcom) is considered where the receiver is a linear array antenna. we aim to find the channel capacity for low earth orbiting satellites whose positions are unknown to the terrestrial receiver. in low orbiting satellites, line of sight (los) becomes more dominant and the path loss reduces. although its efficiency in a rich scattering environment has already been demonstrated , less is studied under this new scenario. the typical way to analyze channel capacity of mimo systems is to find eigenvalue distribution for the channel matrix multiplied by its conjugate . in our setup, when the channel is modeled as pure los, the channel matrix h would become a vandermonde matrix . however, for this setup, eigenvalue distribution is unknown and only there have been some studies over asymptotic behavior of it , , . in , , the author found a lower and upper bound for the maximum eigenvalue and presented the channel capacity for a sufficiently large matrices. in this paper, the average channel capacity and outage probability of such channels are analyzed, assuming the receiver has the perfect channel state information (csi). this has been done by approximating the channel capacity and the accuracy of our method is justified with simulations. federated learning in the context of mimo (multiple-input multiple-output) channels refers to a collaborative learning approach where multiple wireless devices jointly train a machine learning model without directly exchanging their raw data.in federated learning, the mimo channel acts as the communication medium between the wireless devices and a central server. instead of transmitting their raw data to the server, the wireless devices locally compute model updates using their own data samples. these updates are then shared with the server over the mimo channel. the server aggregates the updates from multiple devices to improve the global model without accessing the individual data from each device.the mimo channel plays a crucial role in federated learning by enabling efficient and reliable communication between the wireless devices and the central server. the use of multiple antennas in mimo systems allows for increased data throughput and improved channel capacity, facilitating faster and more reliable transmission of model updates in federated learning scenarios .overall, federated learning in mimo channels combines the benefits of collaborative machine learning and the capabilities of mimo systems to enable privacy-preserving and distributed model training across a network of wireless devices . the typical way to analyze channel capacity of mimo systems is to find eigenvalue distribution for the channel matrix multiplied by its conjugate. in this paper, the average channel capacity and outage probability of such channels are analyzed, assuming the receiver has the perfect channel state information (csi). the first moment is a constant number, and therefore the second and the third moments play a substantial role in determining the distribution of channel capacity. also, we define new functions f l , 1 ≤ l ≤ n t , as shown in.now, we note that all n t terms in each of f l 's, where 1 ≤ l ≤ n t , are mutually independent. it is seen that all f 1l terms are mutually independent and also have them same distribution; therefore the central limit theorem (clt) is hold for f 1 and so is for other f l 's.there are n 2 t -n t and n t terms in ω with the same expected value as f 12 and f 11 , respectively.the correlated terms between f 1 and f 2 are the pairs (f 11 , f 21 ), (f 12 , f 22 ),..to evaluate equation (10), first we find e(f 1n t f 2n t ) and then e(f 1n t )e(f 2n t ).• e(f 1n t f 2n t ) first, we evaluate the terms in which coefficient s is common, i.now, we return back to find e(f 1n t f 2n t ). we break down the terms in f 1n t and f 2n t as shown in tablei. f 1n t and f 2n t and their corresponding terms are written in each column.tablei: the terms in f 1n t and f 2n t . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/741.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/741.txt new file mode 100644 index 0000000000000000000000000000000000000000..591fe96908116af7bc61820b984c019f46c55ee8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/741.txt @@ -0,0 +1 @@ +in this section, we introduce the proposed anomaly detector with global and loacal information algorithm(galdetector). fig. 1 illustrates the overall framework of the proposed method, which consists of three main stages, i.e., global and local score calculation, potential anomalies selection and weighted detector construction.in the first stage, we will calculate both local sparsity scores and global normal scores. for local sparsity scores, a local density integration algorithm based on partial identification is utilized to calculate the local sparsity scores of all unlabeled samples. and for global normal scores, a clustering operation is used to identify multiple patterns of normal samples and set them into different clusters, then the global normal scores of all samples are calculated based on these clustering centers. in the second stage, we select potential anomalies selection and set weights to all the samples. after the global normal scores of the samples are calculated according to the global normal scores and the local sparsity scores computed above, we set threshold to select the potential anomalies from the unlabeled samples. then we assign corresponding weights to known normal samples and selected potentially anomalous samples based on the global normal scores. in the third stage, a weighted detector construction is built based on previous weights and the model parameters are optimized by training such samples. our model will be employed to identify anomalous on the remaining unlabeled samples.we let κ = r d denotes the sample space and y = {0, 1} represents label space, in which y = 0 is for normal samples and y = 1 is for anomalous samples. the corresponding data set we selected is d = {(x 1 , y 1 ), ..., (x p , y p ), x p+1 , ..., x n }. among all given n samples, the first p samples are known normal ones d n = {(x 1 , y 1 ), ..., (x p , y p )}, and the leftover (n -p) samples are unlabeled ones d u = {x p+1 , x p+2 , ..., x n }. the specific motivation is to extract local anomaly information while making full use of the observed normal samples to obtain global normal information. in the first stage, the observed normal samples are clusteredto learn different patterns of normal samples. for example, lofis a typical local density-based anomaly detection algorithm; iforestis a random forest based algorithm, which utilizes the distance of leaf nodes from the root node in each tree to portray the anomalous degree; pcais a typical dimension reduction-based algorithm, it projects the samples into a low-dimensional space and calculates the deviation in all directions to calculate the anomaly scores. the traditional pu learning can be regarded as a two-stage task, in the first step we select reliable normal samples from unlabeled samples, and in the second step we train the classifier based on known abnormal samples and reliable normal samples. biased learningtreats the set of unlabeled samples as a set of normal samples with noise and assigns relatively high and low regularization parameters to known abnormal and normal samples. nevertheless, there is no mature method that can integrate global and local information of labeled normal samples and unlabeled samples, such method will be explored next in this paper. and for global normal scores, a clustering operation is used to identify multiple patterns of normal samples and set them into different clusters, then the global normal scores of all samples are calculated based on these clustering centers. after the global normal scores of the samples are calculated according to the global normal scores and the local sparsity scores computed above, we set threshold to select the potential anomalies from the unlabeled samples. then we assign corresponding weights to known normal samples and selected potentially anomalous samples based on the global normal scores. to calculate the global normal score, we need to cluster the known normal samples so as to learn the corresponding normal patterns. in stage two, we combine the local sparsity score and the global normal score after the two scores are calculated separately, so that we can obtain the anomaly characteristics of both global and local information of the sample points. as for known normal samples, the imbalance in the number of known normal samples and potential anomalies is taken into account, and the weights of the known normal samples are set uniformly to ϵ ∈ . for the semi-supervised algorithms, where pul, adoa, and cpi all select 80% of the total samples as unlabeled samples and 10% of the anomalous samples as known anomalies for training.we propose a method called galdetector, an anomaly detection algorithm based on observed normal samples, which concentrates on global and local information simultaneously. galdetector starts from normal samples and takes into account both global and local information of samples in the spatial structure, then the corresponding weighted detector is used for training to identify anomalies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/742.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/742.txt new file mode 100644 index 0000000000000000000000000000000000000000..7a619e50ebc700f507f6db236914f51fa918394b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/742.txt @@ -0,0 +1 @@ +keyword spotting, which is the ability to recognize speech commands or wake-words, is getting popular among batterypowered smart audio sensors . because a kws detection pipeline is intended to run continuously on the target system, the reduction of computation and memory costs of deep learning based kws algorithms have been extensively investigated over the last years . the design of a custom kws algorithm typically demands the training of a model on a dataset of collected user-defined keywords . despite its effectiveness, such a design process is subject to the availability of computing resources and speech recordings, preventing users from obtaining a custom solution in a short time, e.g., on-device.few-shot learning (fsl) provides a viable solution to deal with the scarcity of abundant user-defined keywords data. in the field of kws, there are several recent fsl methods that rely on the prototypical network (protonet) concept . during system setup in the target scenario, users are asked to provide a few enrollment samples for each keyword. these reference speech data are then processed through a trained feature encoder to produce a set of feature vectors. a class prototype is then computed as the mean of the feature vectors for each user-defined keyword. when it comes to inference, the distances between the output feature vector of a test sample, or embedding, and the class prototypes are calculated. the classification output is determined by the shortest distance. to gain high accuracy, the protonet's feature encoder is trained to cluster the embeddings of speech samples belonging to the same class. at the same time, feature vectors of different classes are forced to be distant according to a given distance metric, e.g. euclidean.with respect to fsl techniques involving fine-tuning on a few labeled data , the fit of a prototype-based classifier is a low-cost option that can be easily implemented on-device without the need for backpropagation. at present, however, the protonet-based fsl approaches have been primarily assessed for closed-set classification, i.e. the test categories match those of the training set . in contrast, we argue that a customized kws method shall work in an open-set setting, to distinguish user-defined keywords from unknown speech utterances. to this aim, the protonet approach has been recently extended by jointly training a generator of dummy prototypes for the unknown class . unfortunately, this work makes use of training data sampled from the same distribution of the target data, i.e. different class subsets of the google speech command dataset. in addition, other works showed angular variants of the prototypical loss to achieve the highest accuracy for closed-set fsl classification or efficiently learning the feature encoder using the triplet loss but not in a few-shot setting . hence, we denoted the present literature on few-shot open-set learning kws solutions to be highly fragmented and it is missing clear design guidelines for on-device kws customization.to bridge this gap, this paper contributes an evaluation framework for fsl architectures composed by a feature encoder and a prototype-based open-set classifier initialized with few-shot samples. more in detail, we leverage the recent multilingual spoken words corpus (mswc) dataset to train a feature extractor using the prototypical loss, its angular variant or the triplet loss. the evaluation is performed on the google speech command (gsc) dataset, which is partitioned between a collection of target keywords, i.e. the positive set, and a negative set of unknown keywords. in our analysis we compare the open-set classifier featuring a dummy proto generator with either a simple variant that computes the unknown-class prototype using few random words or an alternative based on open-max , which statistically models the distance of data samples from the class prototypes to estimate if a test sample can fit any of the known classes. when considering depthwise-separable convolutional neural network (dscnn) encoders tailored for low-power embedded systems , we show that a training process using the triplet loss and normalized features brings a superior accuracy than a protonet-based method for open-set fsl classification under a fixed training epoch budget. our code is available at: https://github.com/ mrusci/ondevice-fewshot-kws. these reference speech data are then processed through a trained feature encoder to produce a set of feature vectors. to gain high accuracy, the protonet's feature encoder is trained to cluster the embeddings of speech samples belonging to the same class. in addition, other works showed angular variants of the prototypical loss to achieve the highest accuracy for closed-set fsl classificationor efficiently learning the feature encoder using the triplet loss but not in a few-shot setting.to bridge this gap, this paper contributes an evaluation framework for fsl architectures composed by a feature encoder and a prototype-based open-set classifier initialized with few-shot samples. more in detail, we leverage the recent multilingual spoken words corpus (mswc) datasetto train a feature extractor using the prototypical loss, its angular variant or the triplet loss. in our analysis we compare the open-set classifier featuring a dummy proto generator with either a simple variant that computes the unknown-class prototype using few random words or an alternative based on open-max, which statistically models the distance of data samples from the class prototypes to estimate if a test sample can fit any of the known classes. when considering depthwise-separable convolutional neural network (dscnn) encoders tailored for low-power embedded systems, we show that a training process using the triplet loss and normalized features brings a superior accuracy than a protonet-based method for open-set fsl classification under a fixed training epoch budget.differently fromthat proposed to use maml to train an effective representation for fsl on kws, authors ofleveraged transfer learning on a large encoder model (effi-cientnet) to train a classifier on the target scenario. after training a resnet15 using data from 1000 classes of librispeech, the authors proposed to fine-tune the feature extractor on the categories of the test dataset to gain an accuracy of up to 96% for a 10-shot closed-set problem. in parallel,proposed the triplet loss to train a resnet15-based feature extractor on lib-rispeech, which showed a top score of 97% on gsc using a knn with all the training data, i. this class of works focused on fsl training methodologies but lacks clear design guidelines and evaluation in an open-set scenario for customized kws solutions that we address. at test time, an open-set classifier is plugged on top of the feature extractor and initialized with few-shot utterances taken from the target scenario (fig. a simple open-set variant estimates the c0 prototype for the unknown class using k random samples taken from the target domain but not belonging to the user classes. at training time, data samples from a subset of categories in the episodic batch are assigned to the unknown class to jointly learn the feature extractor and the generator g. among the analyzed feature extractor variants trained on the mswc dataset, we showed the triplet loss applied on normalized output features of a dscnn-l model leads to the highest accuracy when coupled with an openncm classifier, surpassing solutions based on prototypical networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/743.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/743.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6adfa5b313df4e658dde6e6a714841213a54297 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/743.txt @@ -0,0 +1 @@ +recently, several advanced approaches (ying et al., 2019;luo et al., 2020;schlichtkrull et al., 2021;huang et al., 2022;vu & thai, 2020;gui et al., 2022;yuan et al., 2021;schnake et al., 2021;yuan et al., 2020;yu & gao, 2022) have been proposed to explain the predictions of graph neural networks (gnns), and are divided into two categories (yuan et al., 2023), i.e., instance and modellevel explanation. the instance-level aims at critical nodes or subgraphs; on the other hand, modellevel focuses on a more high-level explainability. however, most of existing gnns' explainers are post-hoc and lack the study regarding message-passing selection for gnns' inference. in this paper, we aim at selecting the critical message-passing paths for gnns' aggregations, and thus develop an interpretable gnns' inference paradigm, i.e., msinterpreter, for the task of graph classification. as shown in figure 1, the main process is to build the messagepassing selection scheme (msscheme), and then plug it at the beginning of the existing gnns' baselines to reach selfexplainable inference. the contributions and details are summarized as follows:recently, several advanced approaches(ying et al., 2021;schnake et al. a graph consisting of n nodes can be represented as g = (a, x), where a ∈ r n×n and x ∈ r n×d represent adjacency matrix and node feature matrix respectively. the graph classification task is to learn a function f : g → y that maps graphs to the labels' set.to efficiently obtain the critical message aggregation path for gnns' inference with application to graph classification, we introduce the elaborate msscheme as following. for the vanilla structure base, we compute the edge weight between nodes v i and v j using the intersection over union (iou) of the number of. on the other hand, edge weight regarding node embedding is computed by w emb (v i , v j ) = φ(h vi , h vj ), where h vi and h vj denote the node embeddings of v i and v j via one-layer gnn., msinterpreter, which is beneficial to build an end-to-end framework while training gnns for the task of graph classification.in this paper, we introduce an novel framework to explain the gnns' inference with application to graph classification. firstly, we build a scheme msscheme to analyze the weight factors of messagepassing paths, which is essential to obtain the crucial message aggregations for gnns' inference.• mutag 0 is a molecular dataset for graph classification tasks and consists of 4,337 molecular graphs. we observe that carbon rings are present in both mutagenic and non-mutagenic graphs, but the carbon rings with the chemical groups n h 2 or n o 2 are mutagenic. therefore, the carbon ring can be considered a shared base map, and the two groups n h 2 and n o 2 are the base sequence of the mutagenic map. ba2motifs contains barabasi-albert (ba) base graphs of size 20, and each graph has five node patterns., 2022;chen et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/744.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/744.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f99596c056993a916f25c8f3796b23cc163adc4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/744.txt @@ -0,0 +1 @@ +the development of deep learning has led to significant breakthroughs in various fields, including healthcare. one area where it has made a particularly profound impact is in the analysis of electrocardiograms (ecgs) , . ecgs are noninvasive tests that measure the electrical activity of the heart and play a critical role in assessing heart health. however, interpreting ecgs requires extensive education and training , . the integration of deep learning into ecg analysis has ushered in a new era of improved accuracy.in recent years, there has been a surge of research exploring deep learning's potential in ecg diagnosis , . various architectures, such as stacked auto-encoders (sae) , deep belief networks (dbn) , convolutional neural networks (cnn) , and recurrent neural networks (rnn) , have been developed and have shown comparably better performance than manual classifications by experts. however, due to the increasing complexity of ecg data and the need for more accurate and real-time diagnosis, more robust and efficient deep learning architectures are needed.transformers, originally designed for natural language processing tasks, have been introduced to ecg classification. transformers' self-attention mechanism allows for the consideration of the entire sequence of an ecg signal, potentially capturing complex temporal relationships that other architectures might miss. however, there are few comprehensive reviews on the application of transformer architectures to ecg classification. this paper aims to provide a detailed overview of the advances and challenges in applying transformer architectures to ecg classification. we will analyze and summarize the technical underpinnings of transformer models, and their application to ecg data in terms of accuracy, efficiency, significance, and potential challenges. additionally, we will discuss the limitations of the current approaches and the potential improvements to be made on a broader scale for the ecg community in the future. we believe this review will be a valuable resource for researchers and practitioners in the field, shedding light on the novel use of transformer architectures in ecg classification and paving the way for future innovations.this literature review focuses specifically on transformerbased models in the context of electrocardiogram (ecg) interpretation. while conventional machine learning and other deep learning technologies also play important roles in this field, we will briefly introduce the current advancements but will not be discussing them extensively in this review. this is because there are already many excellent reviews that comprehensively cover these methodologies in the context of ecg analysis , , , . our primary discussion and comparative analysis will be reserved for the innovative use of transformer-based models in ecg interpretation. this paper is organized as follows: the current state-of-the-art ecg deep learning models will be summarized in section 2. section 3 discusses some novel deployments of transformers in ecg. in section 4, we present both challenges and opportunities for deep learning in ecg society. finally, a brief conclusion is drawn in section 5. however, due to the increasing complexity of ecg data and the need for more accurate and real-time diagnosis, more robust and efficient deep learning architectures are needed. we will analyze and summarize the technical underpinnings of transformer models, and their application to ecg data in terms of accuracy, efficiency, significance, and potential challenges. it is obvious that deep learning models that are trained on single-lead ecg data may not perform as well as those trained on multi-lead data due to the reduced complexity and dimensionality of the single-lead data. bilstm models process ecg signals in both forward and backward directions, providing richer context and improving the model's ability to recognize complex arrhythmias.when training on static ecgs, variations on cnn-based models are frequently used by researchers, as they are adept at extracting spatial information from ecg signals or images. furthermore, 12-lead ecg signals are increasingly common in static ecg dl model trainingbecause they provide the model with more vector information to learn from in a more comprehensive manner, leading to more accurate and reliable results.dynamic ecg models, however, generally employ singlelead ecg due to device limitations in acquiring ecg signals from an exercising object. researchers have extensively used machine learning and deep learning architectures to extract and understand latent features directly from ecg signals, leading to the development of highly effective models for ecg classification tasks,.• transformer is capable of processing various arbitrarylength single or 12-lead ecg signals with different input lengths, making them flexible and adaptable for a wide range of ecg data. • transformer when combined with convolutional neural networks (cnns), can handle both spatial and temporal information in ecg signals, enhancing the model's ability to detect abnormalities and arrhythmias,. a variety of architectures merge convolutional neural networks (cnns) with transformer models, demonstrating the efficacy of combining these two powerful deep learning techniques to capture both local features and long-range dependencies in ecg signals. to the best of our knowledge, transformer-based models are only employed with static ecg data, but researchers focus differently on the level and type of ecg data complexity, with some focusing on single-lead ecg data and others utilizing 12-lead ecg data.after conducting research on conventional and transformerbased deep learning models, it is clear that deep learning has been a well-established area of study in ecg diagnosis that has continued to grow over the years. the disparities in table2highlight the need for further investigation and comparison of different transformer-based models and techniques for ecg analysis, taking into account the nature of the ecg data and the specific classification tasks. given the increasing complexity of ecg data and the need for precise, realtime diagnosis, the exploration of efficient and robust deep learning architectures like transformers has become essential. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/745.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/745.txt new file mode 100644 index 0000000000000000000000000000000000000000..046afa3bd7e5b92b2bc690b53439aafc0d636d5a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/745.txt @@ -0,0 +1 @@ +with the development of internet-of-things (iot) devices and intelligent hardware, various data are generated on these devices every day. extracting useful knowledge from these distributed data with machine learning (ml) models to benefit data owners becomes necessary and important. federated learning (fl) provides a feasible way for this distributed ml task with a promise of protecting private information without consuming large communication costs. due to this favorable property, fl has been extensively studied and widely applied to many applications, such as virtual keyboard input suggestion and smart healthcare , to name a few.in fl, a commonly used approach to coordinate the collaboration between all participants is federated averaging (fedavg). in detail, the central server broadcasts the model parameter to all participants, i.e., data owners. each participant updates the received model parameter for multiple iterations by the stochastic gradient computed with its local data, and then uploads the updated model parameter to the central server. after receiving the updated model parameters from all participants, the central server broadcasts the averaged model parameters to start the next round. with this learning paradigm, all participants can collaboratively learn an ml model without communicating their raw data. as such, the private information in raw data can be preserved to some extent. meanwhile, since the model is shared and its size is much smaller than the raw data, the communication cost in fl is reduced significantly.along with such an extensive study of fl, federated optimization was born to further address the computation and communication challenges in fedavg. similar to the early hongchang gao and jie wu are with temple university; my t. thai is with university of florida. this submission was accepted to ieee network in january 2023. phase of fl where focus is on the centralized setting, most of the work in this area concentrates on the parameter-server communication topology, where all participants communicate with the central server. for instance, studied the resource and performance optimization in centralized federated learning. this kind of centralized communication topology, unfortunately, may lead to a single-point failure. in particular, when the number of participants is large, communicating with the central server will cause the communication bottleneck on the central server. with the advance of communication technology, such as 5g/6g, providing fast communication and cloud/edge computation through decentralized computation over iot and edge devices , an alternative strategy is to employ the decentralized communication strategy where all participants perform the peer-to-peer (p2p) communication. as such, the communication bottleneck will be alleviated. thus, the decentralized learning paradigm brings new opportunities to the fl development.in fact, decentralized optimization has been extensively studied in both ml and optimization communities for many years. numerous decentralized optimization approaches have been developed for the conventional distributed ml model. however, fl brings new challenges to the conventional decentralized optimization. just as shown in figure 1, decentralized optimization serves as the bridge between distributed data and fl models. it should address the unique challenges in the model and data, as well as the issues in itself. even though some efforts have been devoted to facilitating decentralized optimization for fl in the past few years, numerous challenges are still untouched.to advance the decentralized fl, in this article, we will review the current development of decentralized federated optimization approaches and then discuss the new opportunities in decentralized fl. specifically, this article will focus on the following aspects.• on the model side, how to improve the fl model's generalization performance with decentralized optimization approaches was discussed, pointing out the directions for new algorithmic designs. • on the communication side, various communication issues when applying decentralized optimization approaches to fl and potential techniques for addressing them were systematically discussed. • on the data side, we discussed the current challenges and future directions when designing new decentralized optimization approaches for fl.following this, we introduce the background of federated arxiv:2306.02570v1 5 jun 2023learning and decentralized optimization. then, we discuss the fundamental challenges and potential techniques in optimization algorithms for decentralized fl. in addition, we introduce challenging issues in communication of decentralized fl. finally, we discuss how to handle different kinds of data in decentralized fl. with the advance of communication technology, such as 5g/6g, providing fast communicationand cloud/edge computation through decentralized computation over iot and edge devices, an alternative strategy is to employ the decentralized communication strategy where all participants perform the peer-to-peer (p2p) communication.to advance the decentralized fl, in this article, we will review the current development of decentralized federated optimization approaches and then discuss the new opportunities in decentralized fl.• on the model side, how to improve the fl model's generalization performance with decentralized optimization approaches was discussed, pointing out the directions for new algorithmic designs. • on the communication side, various communication issues when applying decentralized optimization approaches to fl and potential techniques for addressing them were systematically discussed. different from the aforementioned federated optimization approach where a central server coordinates all participants, the decentralized optimization approach does not have such a central server, where each participant directly communicates with its neighboring participants. in particular,theoretically demonstrated that decentralized stochastic gradient descent (dsgd) algorithm has almost the same convergence rate with the centralized counterpart for nonconvex optimization problems and the decentralized communication topology only affects the high-order term of the convergence rate of dsgd. the decentralized communication under the fl setting requires new algorithmic design to handle new communication challenges. in the following, we will systematically discuss these challenges and potential techniques to address them from the perspective of the model, communication, and data, which will help fl researchers and practitioners deepen their understanding of decentralized fl. • co-design of fl models and decentralized optimization approaches: other than adapting existing approaches to decentralized fl, another promising strategy is to co-design the fl model and the decentralized optimization approach to pursue the well-generalized solution.however, the big model incurs new challenges for decentralized fl due to its large model size and the huge volume of training data.how to apply the promising big model to decentralized fl requires new efforts in the design of learning paradigms and corresponding decentralized optimization approaches.• zeroth-order approaches: since it is infeasible to train a big model under the decentralized fl setting due to the huge model and data size, a potential strategy for leveraging big models is to employ the pre-trained big models as a service provider. • low-dimensional approaches: since the big model has a large number of model parameters, a potential strategy to train or fine-tune this kind of big models is to optimize model parameters in the low-dimensional space.since different participants possess different computation and communication capabilities, there usually exists large communication latency in a fl system, which can slow down the empirical convergence speed of decentralized optimization approaches. even though some methods have been proposed for the centralized fl, they are not applicable to the decentralized fl due to the decentralized communication strategy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/746.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/746.txt new file mode 100644 index 0000000000000000000000000000000000000000..78d6c149fb3eaa385773b523ff78e477036ab780 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/746.txt @@ -0,0 +1 @@ +works on adversarial machine learning primarily focus on deep networks and are mostly evaluated on image data. apruzzese et al. (2022) estimate that approximately 70%-80% of works in the literature fall in this category. yet, a large number of high-stake tasks across fields like medical diagnosis (shehab et al., 2022), fraud detection (altman, 2021), click-through rate prediction (yang & zhai, 2022), or credit scoring (shi et al., 2022) neither only rely on deep networks nor operate on images (grinsztajn et al., 2022). these tasks often involve many discrete categorical features (e.g., country, email, day of the week), and the predominant models used are discrete tree-based (e.g., boosted tree ensembles, random forests). these two characteristics raise a number of challenges when trying to achieve robustness using previous works which focus on continuous features and continuous models (goodfellow et al., 2014;madry et al., 2018).accurate modelling of adversarial capability. the de-facto standard in adversarial robustness evaluation (croce et al., 2020) is to model robustness to perturbations bounded in some ℓ p ball, mostly in the image domain. this approach, however, was shown to not accurately represent the capabilities of a real adversary in other domains (kireev et al., 2022;apruzzese et al., 2022). instead, a realistic threat model would constrain the adversary with respect to their financial capabilities. this can be achieved by associating a financial cost with every input feature transformation, limiting the adversary to perform transformations within their total financial budget. such a constraint is common for computer security problems as it ties security, in this case, robustness, to real-world limitations.the inaccuracy of threat models in the literature translates on a lack of well-motivated benchmarks for robustness research on tabular data, unlike for image-oriented vision tasks (croce et al., 2020;koh et al., 2020).accounting for discrete categorical features. tabular data is usually heterogeneous and often includes categorical features which can be manipulated by an adversary in a non-uniform way, depending on the characteristics of the real-world concept they represent. for example, buying an email address is not the same as buying a credit card or moving to a different city. moreover, for a given feature, not all transformations have equal cost or are even possible: for example, one can easily change their email address to *@gmail.com, while changing the domain name to *@rr.com can be impossible, since this domain name is unavailable for new users. hence, the definitions of perturbation sets describing the capabilities of potential adversaries should support complex and heterogeneous constraints.robustness for models other than neural networks. gradient-based attacks based on projected gradient descent provide a simple and efficient method for crafting adversarial examples. they are effectively used for adversarial training, which became a de-facto standard defence against adversarial perturbations (madry et al., 2018). albeit defences and attacks are proposed for decision tree models, they often employ combinatorial methods and can be very inefficient time-wise (kantchelian et al., 2016;calzavara et al., 2020;kireev et al., 2022). however, in tasks involving tabular data, these models must be prioritized as they are widely used because they can provide superior performance on some tabular datasets than neural networks (grinsztajn et al., 2022).contributions. our contributions address the challenges outlined above as follows:• we propose a practical adversarial training algorithm supporting complex and heterogeneous constraints for categorical data that can accurately reflect financial costs for the adversary.our training algorithm is based on the continuous relaxation of a discrete optimization problem and employs approaches from projections onto an intersection of convex sets.• we propose a method to generate universal robust embeddings, which can be used for transferring robustness from neural networks to other types of machine learning models such as decision trees or random forests.• we use existing datasets to build the first benchmark that allows us to evaluate robustness for tabular tasks in which the adversary is constrained by financial capabilities.• using the proposed benchmark, we empirically show that our proposed methods provide significantly better robustness than previous works. the input domain's feature space x is composed of m features: x.we denote as x i the value of the i-th feature of x ∈ x. features x i can be categorical, ordinal, or numeric, and we define tabular data as data consisting of these three types of features in any proportion. categorical features are features x i such that x i is a finite set of size |x i | = t i , i. we also denote as x j i , the j-th value of the feature x i . (2022);kireev et al. (2022), we make the assumption that a realistic adversary is constrained by financial limitations.we assume that the cost of modifying features is additive: the total cost of crafting an adversarial example x ′ from an example x can be expressed as:. categorical features are typically preprocessed by encoding each value x i using a one-hot encoding vector x i . for instance, if a categorical feature x i can take four possible values {1, 2, 3, 4} and x i = 2, it is represented as x i = (0, 1, 0, 0) ⊤ .where l 1,w denotes the weighted l 1 norm and w i = c i x i is the costs of transforming x i to any other possible value in x i .where the vectors w, x, x ′ are the contenation of the vectors w i , x i , x ′ i respectively for ∀i : 0 ≤ i ≤ m.input: data point x, y, attack rate α, cost bound ε, cost matrices c, p gd steps, d steps output: adversarial sample x′ . for each feature, this relaxation allows us to replace the optimization over x ′ i ∈ x i with an optimization over x′ i ∈ r ti ≥0 . more precisely, we first replace the feature space x i by the set {x ′ i ∈ {0, 1} ti , j x ′j i = 1} using the one-hot encoding vectors. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/747.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/747.txt new file mode 100644 index 0000000000000000000000000000000000000000..f6a406d6dee8765047e6e5e2438cdc7a9edef7dc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/747.txt @@ -0,0 +1 @@ +in this section, we introduce the techniques we use in this paper. before demonstrating our main techniques, let us first review the algorithm of . at each iteration, the algorithm alternates by solving two weighted multiple response regressions: starting with an initial matrix y , it tries to find a matrix x ∈ r n×k that minimizes w • (m -xy ⊤ ) 2 f , then they zero out the rows of x with large ℓ 2 norms and use the qr factor of x to proceed. then, they alternate and solve minf given the new x. after proper zeroing out and qr, the algorithm proceeds to the next iteration. the main runtime bottleneck is to solve the weighted multiple response regression per iteration.following the trend of low rank approximation and fixed parameter tractable algorithm for weighted low rank approximation , it is natural to consider using sketching to speed up the multiple response regression solves. let us take the regression minas an example. let d √ w i denote the n × n diagonal matrix that puts √ w i on the diagonal, where w i is the i-th row of w . it is not hard to verify that (1) can be cast into n linear regressions (see details in claim a.2), each of which is in the form of minto solve these regressions fast. one can pick a random sketching matrix s ∈ r s×n where s = o(ǫ -2 0 k) and instead solve minby picking a sparse sketching matrix s, the above regression can be solved in o(ǫ -1 0 nnz(x)+ǫ -2 0 k 3 ) time with high probability, and the output solution y has cost at most (1 + ǫ 0 ) • opt where opt is the optimal regression cost. aggregate over n regressions, this gives anthis approach, however, has several drawbacks that make it infeasible for our application. the first is the error guarantee of such approximates regression solves. essentially, we compute a matrixin other words, the approximate solution y provides a relative backward error. unfortunately, the backward error is much less helpful when we want to analyze how close y is to the optimal solution y , i.e., the forward error. it is possible to translate backward error to forward error at the expense of dependence on other terms such as multiplicatively depending on the cost of the regression and the spectral norm of x † , the pseudo inverse of x. to cancel out the effect of these extra terms, we will have to set the error parameter ǫ 0 to be very small, thus, a polynomial dependence on ǫ -1 0 in the running time is unacceptable. this motivates us to design a fast and high precision regression solver whose ǫ 0 dependence is log(1/ǫ 0 ) (see lemma 4.8). given an algorithm that produces an (1 + ǫ 0 ) relative backward error of regression in log(1/ǫ 0 ) iterations, we can set ǫ 0 to inverse proportionally to opt • (w • x) † . as the spectral norm of (w • x) † is polynomially bounded, this incurs an extra log n term in the runtime. it remains to devise a regression solver with such runtime behavior. our approach is to use the sketch as a preconditioner: we pick a dense sketching matrix s ∈ r s×n with s = o(k) rows such as for any k dimensional vector x, sx 2 = (1 ± o(1)) • x 2 . we then apply s to d √ w i x to form a short fat matrix and compute the qr decomposition of this matrix. it turns out that the right qr factor of sd √ w i x is a good preconditioner to d √ w i x. we then use s to find a constant approximation to the regression problem and utilize it as a starting point. the algorithm then iteratively performs gradient descent to optimize towards an ǫ-approximate solution. overall, such an algorithm takes log(1/ǫ 0 ) iterations to converge, and each iteration can be implemented in o(nk) time. plus the extra o(nk +k 3 ) time to compute the initial solution, this yields an algorithm that runs in o((nk + k 3 ) log(1/ǫ 0 )) time to compute an ǫ 0 backward error solution. note here we sacrifice the input sparsity time in exchange of a sketching matrix that works with high probability. this also accounts for the fact that both x and y are quantities changed across iterations and the sparsity cannot be controlled.we want to remark that our high precision and dense regression solver not only works for weighted low rank approximation, but for any alternating minimization frameworks that require one to solve o(1) multiple response regressions per iteration. due to the good error dependence, the overall log(1/ǫ) convergence is well-preserved, even though each iteration is only solved approximately. we believe this high precision solver will also find its use in problems like (low rank) matrix sensing and matrix completion and tasks in which forward error for multiple response regression is required.in addition to our high-accuracy, high probability solver, we also devise a robust analytical framework for alternating minimization, which is the core to enable us with fast approximate solvers. in particular, we show that if we only output a matrix y that is close to the exact regression solution y in the spectral norm, then the alternating minimization still converges to the fixed point w • m with good speed. our analysis uses a different strategy from where they heavily rely on the closed-form of the regression solution. we show that the guarantees of the algorithm mainly follow from the ℓ 2 row norm of the matrices. specifically, given two matrices whose ℓ 2 row norms are close for each corresponding rows, we prove that statistics such as incoherence can be well-preserved. our argument relies on a novel interpolation between different equivalent formulations of incoherence, especially the point of view of leverage scores. we believe such an analytical framework will also be extremely helpful for robustifying the alternating minimization framework for faster matrix sensing and matrix completion .where the 1st step is due to a 2 w 's definition, the 2nd step is by rewriting each row as an independent regression problem, the 3rd step follows from simple algebra, and the last step follows from the fact that there is no x in min.where the first step follows from we choose ǫ sk = θ(∆ 2 f w • n 2 f ), the second step follows from ∆ f ≥ 1, the third step follows from w • n 2 f ≥ 1/ poly(n) (see eq.where the first step follows from the definition of x -u σv ⊤ y 2 f , the second step follows from s ⊆ , the third step follows from x ⊤ i,: = x ⊤ i,: when i / ∈ s, the fourth step follows from the triangle inequality, the fifth step follows from x ⊤ i,:., where the first step follows from a 2 = λ max (aa ⊤ ), the second step follows from x ⊤ x = r ⊤ r, the third step follows from λ max (a -1 ) = λ min (a) -1 , and the last step follows from λ(.where the first step follows from the definition of ρ(x), the second step follows from simple algebra, the third step follows from eq.where the first step follows from simple algebra, the second step follows from s g ⊂ , the third step follows from the property of the inner product, the fourth step follows from fact a.8, the fifth step follows from the definition of s g , the sixth step follows from (d w i ) j ≥ 0, the seventh step follows from 1ǫ/4 ≤ 1, and the last step follows from the property of v (e.where the first step follows from the definition of w x j,j ′22 , the second step follows from 1 n 1 ⊤ n x j,j ′ = 0, the third step follows from ax 2 ≤ a x 2 , and the last step follows from w -1 n 1 ⊤ n ≤ γn (see definition 5.where the first step follows from the definition of x j,j ′ 2 2 , the second step follows from (x j,j ′ ) 2 = ( v j ) 2 i (y j ′ ) 2 i , the third step follows from definition of ρ, the fourth step follows from the definition of • 2 2 , the fifth step follows from the fact that v j is defined to be the j-th column of y ⊥ y ⊤ ⊥ v , the sixth step follows from y ⊤ ⊥ y = 0, the seventh step follows from ab ≤ a • b , and the last step follows from y ⊥ y ⊥ ≤ 1.where the first step follows from a ≤ a f for all matrix a, the second step follows from definition of • f , and the third step follows from eq.where the first step follows from the definition of • 2 2 , the second step follows from simple algebra, the third step follows the definition of • 2 2 , the fourth step follows from the definition of ρ (definition d. (37)), the second step follows from σ is a diagonal matrix, the third step follows from the definition of w , the fourth step follows from ax 2 ≤ a • x 2 , the fifth step follows from.where the first step follows from y 1,i = 0 and the second step follows from simple algebra, the third step follows from the property of v i , and the last step follows from eq.10, the third step follows from definition of y 1 , and the fourth step follows from ab ≤ a • b , the fifth step follows from singular values of r and those of y 1 are identical, and the last step follows from • ≤ • f .where the first step follows from ax 2 ≤ a • x 2 , the second step follows from y 1,i 2 ≤ ξ, and the third step follows from r -1 = σ min (y ) -1 , and last step follows from σ min (y 1 ) -1 ≤ 2 (see eq. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/748.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/748.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a7ac796e2b5a3bdd26b53a9f523a23bd4fb5568 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/748.txt @@ -0,0 +1 @@ +random forests is a method for classification or regression in which we construct an ensemble of decision trees for (random subsets of) the training data and, in the classification phase, aggregate their outcomes by majority voting. the random-forests method has received a tremendous amount of attention for its simplicity and improved accuracy over plain decision trees . commonly, fast heuristics without performance guarantees are used for computing random forests , in particular for computing the individual decision trees in the forest. for plain decision trees, researchers lately made several advances in computing optimal decision trees, that is, decision trees that provably optimize criteria such as minimizing the tree size . with that increased amount of attention also came theoretical advances, showing the limits and opportunities for developing efficient exact algorithms for computing decision trees . one impetus to computing optimal decision trees is that minimizing the size reduces tendencies to overfitting . it is conceivable that such benefits transfer to globally optimizing the tree ensembles computed by random forests. however, apart from sporadic hardness results , we are not aware of exact algorithmic research for tree ensembles. in this work, we aim to initiate this direction; that is, we begin to build the theoretical footing for exact algorithmics of computing optimal tree ensembles and provide potential avenues for exact algorithms that are guaranteed to provide optimal results with acceptable worst-case running times.we study the algorithmic properties of two canonical formulations of the training problem for tree ensembles: we are given a set of training examples labeled with two classes and a number ℓ of trees and we want to compute a tree ensemble containing ℓ trees that classifies the examples consistently with the given class labels. 1 we want to minimize either the sum of the tree sizes, resulting in the problem minimum tree ensemble size (mtes), or the largest size of a tree in the ensemble, resulting in the problem minimax tree ensemble size (mmaxtes). 2 both contain as a special case the problem of computing a minimum-size decision tree, which is known to be np-hard . however, the hardness constructions do not necessarily reflect practical data. thus, we are interested in precisely which properties make the problems hard or tractable.mainly, we provide two novel algorithms for mtes and mmaxtes 3 and matching lower-bound results for their running times. we call the first one witness-tree algorithm. this algorithm demonstrates that prospects for tractable algorithms for optimizing decision trees can be non-trivially generalized to optimizing tree ensembles. namely, it was known that for small tree size s, moderate maximum domain size d of any feature, and moderate number δ of features in which two examples differ 4 , a minimum decision tree can be computed efficiently, that is, in f (s, d, δ) • poly time, where poly is a polynomial in the input size . however, the function f is at least δ s •(d s 2δ) s •2 s 2 and the algorithm involves enumerative steps in which the worst-case running time equals the average case. we show that, even for the more general mtes, we can improve the running time to o((6δds) s • sℓn), where s denotes the sum of the tree sizes, ℓ the number of trees in the ensemble, and n the number of training examples (theorem 4.1). moreover, we can avoid the enumerative approach, obtaining a search-tree algorithm that is both conceptually simpler and more easily amenable to heuristic improvements such as early search-termination rules and data reduction. 5 we achieve this by growing the trees iteratively and labeling their leaves with witness examples that need to be classified in these leaves. this allows us to localize misclassifications and their rectification, shrinking the search space. we believe that this technique may have practical applications beyond improving the worstcase running times as we do here. the running time that we achieve is tight in the sense that we cannot decrease the exponent to o(s) without violating reasonable complexity-theoretic assumptions (theorem 4.4).recently, exponential-time dynamic programming has been applied to compute optimal decision trees and the resulting trees have shown comparable performance to (heuristic) random forests on some datasets . with the second algorithm that we provide, we investigate the potential of dynamic programming for computing optimal tree ensembles. we first show that minimizing decision trees can be done in o(3 n ) time, where n is the number of input examples, by a dynamic-programming approach that works on all possible splits of the examples (corollary 5.2). (indeed, the algorithm employed by similarly computes a table over all possible splits in the worst case.) we then extend this algorithm to tree ensembles with ℓ trees, achieving (ℓ + 1) n • poly running time (theorem 5.3). unfortunately, we also show that the running time cannot be substantially improved: a running time of f (ℓ) • 2 o(log ℓ)•n would violate reasonable complexity-theoretic assumptions (theorem 5.4).finally, we compare the power of decision trees and tree ensembles in terms of their sizes. here, we show that a training data set d that can be classified by a tree ensemble with ℓ trees of size at most s, can also be classified by a decision tree of size (s+ 1) ℓ (theorem 3.2). however, such an exponential increase is necessary in the worst case: we show that there are such training data sets d that cannot be classified by any decision tree of size roughly (s/2) ℓ/2 (theorem 3.3).in summary, as the number of trees in a tree ensemble grow, the classification power increases exponentially over decision trees. nevertheless, we are able to carry over and substantially improve on tractability results for decision trees if in particular the number of cuts in the optimal ensemble is relatively small. the underlying witness-tree technique seems promising to try in practice. fur-thermore, we show that dynamic programming, which has been successful for decision trees, may also be viable for tree ensembles. we also provide matching lower bounds for the running times. apart from tuning our algorithms, in the future, deconstructing these lower bounds may provide further guidelines towards which properties of the input data we may exploit for efficient algorithms and which we likely may not. for this, an example e ∈ e is dirty for some tree t (or tree ensemble f ) if the label t (e) (or f (e)) assigned to e by t (or by f ) is not equal to λ(e). a decision tree r (without witness labeling) is a refinement of a witness tree t if there is a labeling of the leaves of r by witnesses such that (a) the labeling results in a witness tree, that is, for each leaf t of r the witness wit(t) of t is in e, and (b) after the labeling the witness tree r is a refinement of t . , 0) labeled red. the number of table entries is o(3 n ) since each entry corresponds to a 3-partition of e into e b , e r , and e \ (e b ∪ e r ).then, for each choice-j-dimension d j i , we set b j i = 1, and for each remaining choice dimension d q i , that is, q ∈ \ {j}, we set b j i = 0.• for each verifying example r j i,t and each choice-j-dimension d j i we set r j i,t = 1, and for each remaining choice dimension d q i , that is, q ∈ \ {j}, we set r j i,t = 0.• for each test example r j , we set r j = 1 for the first dummy dimension, and r f = 0 for each remaining dummy dimension.then, for each choice-j-dimension d j i we set r j = 0, and for each remaining choice dimension d q i , that is, q ∈ \ {j}, we set r j = 1. observe that for each i ∈ and for each j ∈ , the forcing example r i and the choosing example b j i only differ in the choice-j-dimensions: in these r i has value 0 and b j i has value 1. since there are a many choices for j, since s = 2a + 1, and since t has a + 1 cuts in dummy dimensions, we conclude that exactly one cut in t is done in each dummy dimension and exactly one cut in t is done in the choice-j dimensions for any j ∈ . observe that t left contains the forcing example r i and the choosing example b j i , and that t right contains the forcing example r t and the choosing example b j t where t is any number in \ {i}. observe that t left contains the forcing example r i and the choosing example b z i where i ∈ , and that t right contains the enforcing example b enf and the test example r z .step 3: since each tree in the ensemble t has exactly one inner node, we conclude that for each i ∈ since each tree in the ensemble t has exactly one inner node, we also conclude that for each j ∈ the ensemble t contains a tree which cuts a choicej-dimension. observe that the forcing example r i has symbol red, that the choosing example b j i has symbol blue, and that r i and b j i only differ in the choice-j dimensions.1, we showed that minimum tree ensemble size and minimax tree ensemble size are both fixed-parameter tractable when parameterized by δ (the maximum number of dimensions in which a pair of examples differ), d (the domain size), s and s (the total tree ensemble size and the maximum size of a tree in the ensemble, respectively), and ℓ (the number of trees in the ensemble), respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/749.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/749.txt new file mode 100644 index 0000000000000000000000000000000000000000..7aec3081898962e09242283aa12e8065a0566c13 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/749.txt @@ -0,0 +1 @@ +in reinforcement learning, an agent learns by interacting with an environment, and adjusts its behaviour (or policy) based on rewards received. the goal is to behave in such a way that the expected total rewards over the time horizon considered are maximised. central to most reinforcement learning algorithms is the estimation of values, in terms of either a state-value function representing the expected total reward of each state, or an action-value function representing the expected total reward of each action and state.temporal difference (td) learning algorithms are among the most popular reinforcement learning algorithms. in the tabular case (when the state and action set are small enough so that the values in each state or each state-action pair can be stored as tables) these type of algorithms have been shown to converge with probability one, both when estimating the value function given a specific policy , and in td control algorithms such as sarsa and q-learning . however, when the state space becomes large, tabular solution methods are no longer feasible. in this case, the algorithms need to be combined with function approximation when estimating the value (or action-value) function.however, when combining these methods with function approximation, convergence results have proven more difficult to obtain, even for algorithms using linear function approximation. in fact, there are examples of divergence of off-policy algorithms (such as q-learning) when combined with linear function approximation in the literature, see e.g. . semi-gradient td learning methods, in which the value function for a specific given policy is estimated using linear function approximation, have been shown to converge with the algorithm studied in this paper. section 4 presents our main convergence result, the assumptions used, and the proof of the convergence result. we conclude with a discussion of the results in section 5. hence, we need to show that the robbins-monro assumption (4), the square integrability condition (5) and the stability condition (6), are satisfied.where p θ (x) = p θ (x t = x), where p θ (•) denotes the probability given parameter θ, and f t is the σ-field generated by x t , x t-1 , . that there exists β ∈ , but some adjustments are required for the case of an absorbing mdp, hence this proof is also included below. the maximum is attained since p π is a continuous function of π, i -p π is nonsingular, the matrix inverse function of a nonsingular matrix is continuous (see e.1 d π is a continuous function of π, any norm is a continuous function, and ∆ ε is compact, hence the maximum is attained), and p π ≤ |s||a| p π ∞ ≤ |s||a|. the expected number of steps before being absorbed, when starting in state (s, a), is given by the (s, a)th element of t θ , denoted by t θ (s, a), where t θ = n θ 1, and n θ = (i -p π θ ) -1 , and the variance of the number of steps before being absorbed, when starting in state (s, a), is given by the (s, a)th element of (2n θ -i)t θ -t θ ⊙ t θ , where ⊙ denotes the hadamard product, see e.hence, since the robbins-monro assumption (4), the square integrability condition (5), and the stability condition (6) are satisfied, theorem 4.we also use the following well known norm equivalences:, but here considering a markov chain over state-action pairs, with p π (p in) and d π (q in) defined accordingly, and the policy π being ε-soft. hence q ⊤ d π p π q ≤ q ηπ p π q ηπ ≤ q 2 ηπ = q ⊤ d π q. for this inequality to be an equality, we need q and p π q to be colinear, and p π q ηπ = q ηπ . thus, the inequality is strict unless p π q = q or p π q = -q, which means that p 2m π q = q for all m ≥ 0. since the policy is proper, p 2m π converges to zero, hence we must have q = 0, i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/75.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/75.txt new file mode 100644 index 0000000000000000000000000000000000000000..a7c7c2b6bf8c9fab6be4f799dce4b5ec51d77bba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/75.txt @@ -0,0 +1 @@ +machine learning is the best way to solve problems with well-defined outcomes. image recognition, finding patterns in missing data and understanding clear and unambiguous language are all ai can do . it is also commonly used to find differences in financial transactions, make predictions based on patterns in past data (such as the stock market) and determine when someone has sent spam and mark it as such .under this premise and with the advance of quantum computers, a new field of study and research is beginning to emerge called quantum machine learning (qml). the goal of quantum technologies is to demonstrate their potential in comparison to classical machine learning, but this in turn shows weaknesses such as the limitation of qubits and continuous operations of logic gates .while some companies have explored the use of quantum machine learning in their research and research projects, it is still quite rare to find companies that are using quantum machine learning in their business operations on a widespread basis. some companies that have mentioned the use of quantum machine learning or have conducted research in this field include:google: the technology company has conducted research into the use of quantum machine learning in tasks such as route optimisation and financial data analysis.ibm: the technology company has developed a quantum machine learning platform called ibm q, which is used to investigate how quantum machine learning can improve the performance of tasks such as image classification and natural language processing.microsoft: the technology company has conducted research into using quantum machine learning to improve energy efficiency in data centres and to optimise power distribution in power grids. d-wave: this quantum technology company has developed a quantum machine learning platform called d-wave leap, which is used to investigate how quantum machine learning can improve decision-making in various industries.in this paper, we will apply dimensionality reduction to the structure of the data set, as well as show that linear discriminant analysis (lda) . shows a significant advantage in supervised preprocessing over principal component analysis (pca) .we aim to compare classical and quantum classification methods -quantum support vector classifier (qsvc) and variational quantum classifier (vqc). qiskit machine learning was used for the construction of fundamental computational building blocks, such as quantum kernels and quantum neural network, which we use for the classification of diabetes .while some companies have explored the use of quantum machine learning in their research and research projects, it is still quite rare to find companies that are using quantum machine learning in their business operations on a widespread basis.ibm: the technology company has developed a quantum machine learning platform called ibm q, which is used to investigate how quantum machine learning can improve the performance of tasks such as image classification and natural language processing. d-wave: this quantum technology company has developed a quantum machine learning platform called d-wave leap, which is used to investigate how quantum machine learning can improve decision-making in various industries. pennylane can be used to simulate and run quantum algorithms and programs on different quantum backends, including quantum machine simulators such as qiskit aer and real quantum machines.quantum machine learning is a subfield of machine learning that uses quantum computers to perform machine learning tasks. quantum machine learning algorithms can be used to solve problems that are difficult or impossible to solve using classical machine learning algorithms.1) quantum kernel: a quantum kernel is a function that is used to define the similarity between two quantum states in a quantum machine learning algorithm.in quantum machine learning, the quantum kernel is used to define the similarity between two quantum states in the feature space.faster speed: quantum machine learning can perform computations much faster than classical machine learning due to the higher speed of quantum computers. for example, one study has shown that a quantum machine learning algorithm can classify data faster than a classical machine learning algorithm.higher accuracy: quantum machine learning can produce more accurate results than classical machine learning due to the higher accuracy of quantum computers. for example, one study has shown that a quantum machine learning algorithm can classify data more accurately than a classical machine learning algorithm.higher learning capacity: quantum machine learning can learn faster and more accurately than classical machine learning due to the higher processing capacity and accuracy of quantum computers. for example, one study has shown that a quantum machine learning algorithm can learn faster and more accurately than a classical machine learning algorithm. for example, one study has shown that a quantum machine learning algorithm can process large amounts of data faster than a classical machine learning algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/750.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/750.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b63fef90c36f827ad9dcca3af0637c45a36e29a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/750.txt @@ -0,0 +1 @@ +as neural networks are becoming more relied on for many decision processes, there has been an increased focus into understanding how these algorithms come to a specific output. should such models be used to take high-stakes decisions, as it is often the case in finance or in medicine, it is indeed crucial for these tools to provide a justification along with their results, which could be used to discuss or even oppose a decision. the risk is that deep learning models, often considered as "black boxes", could produce biased and unfair behaviour. such behaviour has already been noticed in opaque algorithms such as compas, used to evaluate the risk of recidivism, which was accused of being biased toward people of color (rudin, 2019).as a result, many methods aiming to explain a deep learning prediction have been developed, which now constitutes a field usually called explainable ai (xai). among these methods, some of the most common are lime (ribeiro et al., 2016), shap (lundberg & lee, 2017), or integrated gradients (sundararajan et al., 2017). 2 2 alongside this research, there has also been a drive to create libraries unifying these different methods, enabling their use on popular deep learning libraries, as well as integrating evaluation tools to compare these xai methods. to this end, several pieces of software have been proposed, including shap (lundberg & lee, 2017), interpretml (nori et al., 2019), om-nixai (yang et al., 2022) or captum (kokhlikyan et al., 2020) , among many.however, several researchers (tonekaboni et al., 2020;crabbé & van der schaar, 2021) have noticed a lack of attention toward a specific type of data: time series. temporal data is nevertheless crucial in many applications: indeed, for instance, financial and medical data commonly consist in multivariate time series. models which produce predictions based on this type of data therefore need a careful consideration, as these applications often carry high-stakes decisions. consequently, several feature attribution methods have recently been introduced to tackle this specific case (choi et al., 2016;tonekaboni et al., 2020;crabbé & van der schaar, 2021). yet, to the best of our knowledge, there seems to be a lack of a unified library to regroup and evaluate these specific methods. 3as a result, we created time_interpret (short: tint), a python library designed as an extension of captum (kokhlikyan et al., 2020). although this library can be used with any pytorch (paszke et al., 2019) model, it has a specific focus on time series, providing several feature attribution methods developed for this specific type of data. time_interpret also provides evaluation tools, whether the true attributions are known or not, as well as several time series datasets. it also leverages pytorch lightning (falcon & the pytorch lightning team, 2019) to sim-plify the use of the original pytorch library. as such, it provides several common pytorch models used to handle temporal data, as well as a specific pytorch lightning wrapper.moreover, despite this focus on time series, several components of time_interpret have a slightly different application. it provides for instance various methods aiming to explain language models such a bert. its evaluation tools can also be used with any feature attribution methods, and not just the ones implemented in this library. this paper aim to give a general introduction to time_interpret. furthermore, several previously unpublished methods have been developed along with this library, which we also present here. we hope this study will give more clarity to the corresponding codebase, and will prove useful for further research in this field. we encourage the reader to also refer to the library documentation for more information, especially in case of new significant releases., 2020;crabbé & van der schaar, 2021)have noticed a lack of attention toward a specific type of data: time series., 2020;crabbé & van der schaar, 2021). these two methods, originally proposed by(slack et al. this method, introduced by(crabbé & van der schaar, 2021), is an adaptation of a perturbation-based method developed in(fong & vedaldi, 2017;fong et al. as such, it consists in perturbing a temporal data by replacing some of it with an average in time. this method (enguehard, 2023a) consists in a generalisation of dynamask, which learns not only the mask, but also the associated perturbation, instead of replacing perturbed data with a predetermined average in time.metrics metrics are an important component of time_interpret, as they provide elements of comparison between feature attribution methods in two scenarios: when the true salient features are known, and when they are unknown., 2017)to specifically handle time-series data. in this method, instead of directly computing attribution by summing gradients over interpolated points between the original data x and an uninformative baseline x, we first crop the temporal sequence up to a specific time t.as a result, in this setting, we keep the baseline as close as possible to the original input data, only modifying the last input in time. this method extends the one presented in(tonekaboni et al., 2020), allowing any captum method to be used on temporal data.as a result, future data is not used when computing the attribution of a temporal data x t . for instance, x t could be important at time t 1 , attr t1 having high values, but could also be less important at a later time t 2 , attr t2 having lower values. however, to properly work, they require a number of data points to be used as the original data distribution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/751.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/751.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c8231e41838a960496cb16ee94a5422e5149989 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/751.txt @@ -0,0 +1 @@ +our solution utilizes the atcll smart lamp posts, which are equipped with a variety of sensors and cameras. given their historical data on the number of identified pedestrians, vehicles, and other moving objects in each street, our algorithm is designed to predict the number of movements likely to occur on the street in the next 24 hours, accounting for the differences between workdays and weekends. based on these predictions, cem can provide recommendations on when to dim public lighting in specific streets. alternatively, if the lights do not support dimming, cem could advise shutting off half of the lamp posts in the street. this approach to public lighting enables a city to reduce its energy consumption while maintaining public safety. moreover, we highlight the possibility of integrating cem with smart energy communities and intelligent demand response strategies, such as . this can bring synergistic advantages because utility providers can effectively optimize and schedule flexible energy resources and energy storage across the city, leading to a reduction in costs and peak demand. by forecasting the absence of people on several streets at night, the system can dim the public lights in an event of peak grid consumption, acting as a smart regulating reserve mechanism. participation in such mechanisms can even generate revenue for the city. consequently, cem not only acts as an isolated smart-city solution, but it can also be part of the creation of a more efficient and robust energy grid for urban areas, while incentivizing the use of renewable energy.regarding the specifically designed ai time-series forecasting algorithm, its implementation process consisted of four steps: collection and preprocessing of historical data, feature engineering, model selection, and model training. these are the steps that permit our algorithm to learn the patterns and relationships between various factors that influence the number of pedestrians, vehicles, and objects on the streets of a city. first in the data collection and preprocessing step, we collect historical data from the smart posts in aveiro, which includes pedestrian and vehicle counts, as well as weather conditions, day of the week, holidays, time of day, and local events. the data is then preprocessed to remove any outliers, missing values, and inconsistencies.next, during the feature engineering step, we extract relevant features from the preprocessed data, which include temporal features (e.g., hour of the day, day of the week), weather features (e.g., temperature, humidity), and event-based features (e.g., holidays, local events). these features are crucial for improving the accuracy of our model. finally, we selected and trained our machine learning model for time series forecasting using the preprocessed data and features, adjusting hyperparameters as necessary to minimize the error in the forecasts. against this backdrop, the emergence of smart cities, powered by the use of advanced information and communication technologies (ict), such as the internet of things (iot), artificial intelligence (ai), machine learning (ml), and data analytics, promise to unlock cities' potential to become more sustainable, intelligent, efficient, and ultimately livable for their inhabitants. for instance, barcelona, spain, has established a comprehensive smart city platformthat includes various solutions, such as smart parking, waste management, and air quality monitoring, all aimed at improving urban life for citizens. the city has established the aveiro tech city living lab (atcll), which is a research platform that serves as a testing ground for smart city solutions.this work introduces the intelligent city management system (icms), the result of our participation in the first edition of the aveiro tech city hackathon, promoted by the municipality of aveiro, that aimed to further enhance the capabilities and services of the city's management platform.another iot applications for smart cities are related to vehicular traffic data, which represents one of the most vital data sources in a typical smart city. as such, this work attempts to facilitate and leverage the implementation of smart devices installed across aveiro city to create an intelligent city management system (icms).the proposed system is divided into four different components: (i) city security and safety (css), (ii) city energy management (cem), (iii) city infrastructure maintenance (cim), and (iv) city management dashboard (cmd).the css component focuses on improving road and pedestrian safety, by analyzing data provided by smart posts spread across aveiro city. in this context, city energy management (cem) has emerged as a crucial component in the design and operation of our smart city platform (fig. first in the data collection and preprocessing step, we collect historical data from the smart posts in aveiro, which includes pedestrian and vehicle counts, as well as weather conditions, day of the week, holidays, time of day, and local events.attempts to automate and improve the monitorization of aveiro by leveraging its smart public transportation to efficiently monitor in a distributed way the city's infrastructure, resorting to live-image capturing and computer vision to detect infrastructure defects, which in turn are reported in realtime to the city's infrastructure engineers, allowing them to make data-driven decisions to ensure maintenance of the infrastructure. the organizing committee of the aveiro tech city hackathon provided two months of recorded data from the atcll, so the first month could be used for training of ml models, and the second for a holdout evaluation.regarding the first use case, security and safety, the ratio between the number of speeding vehicles and the number of pedestrians, per hour of the day, can be visualized for each street of aveiro equipped with the smart infrastructure. additionally, based on the historical data provided in the first month, the proposed solution can predict the number of movements likely to occur on the street in the next 24 hours at a time, enabling a forecasting of the best hours to apply energy saving measures (fig. further research efforts must be made to develop smart city solutions capable of tackling the environmental challenges of urban environments, so cities like aveiro can provide more security and a better quality of life to their citizens. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/752.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/752.txt new file mode 100644 index 0000000000000000000000000000000000000000..27b1aafe78f3a958890d7811e06059b20de8d3de --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/752.txt @@ -0,0 +1 @@ +the emergence of groundbreaking generative ai models, such as chatgpt and dall-e , has catalyzed a new era in the synthesis and manipulation of digital content. concretely, these powerful machine learning algorithms have demonstrated unprecedented capabilities in synthesizing realistic images, audio, text, and other data modalities . in particular, these state-of-the-art language and image generation models, leveraging the prowess of deep learning and transformer architectures, have enabled the generation of a vast array of fields.generative ai refers to artificial intelligence that can generate novel content, rather than simply analyzing or acting on existing data like expert systems . generative ai models, equipped with vast data sets and intricate designs, have the extraordinary capability to create new and diverse content. they can process and learn from information gathered from a multitude of sources, such as wikipedia , github and others. by tapping into this wealth of data, these models can generate an extensive range of multimedia formats, including video, audio, and text.during the recent years, the continuous growth in computing power has used deep neural networks , transformers and other innovative models like generative adversarial networks and variational autoencoders . all these models can effectively capture the complexity of data, making them adept at modeling high-dimensional probability distributions of language or images from specific or general domains. by complementing generative models with additional techniques that map the latent high-dimensional semantic space of language or images to multimedia representations of text, audio, or video, it becomes possible to transform any input format, such as text, into a variety of output formats like video. this versatility allows for a seamless conversion between multimedia formats, making generative models invaluable in numerous applications. one of the most significant aspects of generative ai is its potential for endless applications. these models can be trained to generate genuinely different multimedia formats, like video, audio, or text, from various input formats. for instance, generative ai can be used to create realistic images from textual descriptions, produce video content from audio, or even generate music compositions based on specific styles or emotions. furthermore, generative ai has the potential to revolutionize industries such as advertising, entertainment, and education by automating content creation and providing personalized experiences. with the ability to learn from diverse data sources and generate a wide array of multimedia outputs, these models can help businesses and individuals alike save time and resources while tapping into new creative possibilities.in conclusion, generative ai models, bolstered by their access to extensive data and complex designs, offer unparalleled potential in content creation and transformation. their ability to learn from various sources, generate diverse multimedia formats, and convert inputs from one format to another opens up a vast array of applications in multimedia generation and conversion, making them indispensable tools in today's technology-driven world.in more recent work, there have been surveys of llms, and of generative ai, talking about different applications of the technology . in contrast to prior surveys, this comprehensive review aims to offer a unique perspective by highlighting not only the most prominent generative models and their underlying technologies but also by emphasizing on all the different uses of this technology. in addition, we give an up-to-date competitive outlook in this growing industry and the models behind this growth.this resource encompasses 15 categories, which include text, images, video, 3d, code and software, speech, ai understanding, business, gaming, music, biotech, brain, others, and multimodal. within each section, a thorough taxonomy of the current technologies is presented, detailing both the models and tools available. by offering a systematic exploration of these diverse ai applications, the survey serves as an essential reference for researchers, academics, and professionals, enabling them to comprehend better the evolving landscape of generative ai and its far-reaching implications.as an example, a 3d game designer may have various generative ai needs for a project of his. he may find a solution for his 3d ai needs under both 3d and gaming, getting more specific results and different answers. he may also find solutions for more business needs of his under both business and text. with this survey, we believe that users will get a very good outlook of how generative ai is shaping up and where they may find their needed technology.we introduce, in this article, the proposition of an extensive dictionary centered on the most sought-after generative ai applications, which are notably reshaping industries such as the videogames , design , and business op-erations sectors. the challenge users experience in identifying the developed programs within each distinct application field substantiates the demand for a comprehensive reference tool. here we present a summary of the different categories: regarding text, generative ai technologies in the text category aim to create and manipulate natural language text. about ai understanding, generative ai technologies in the ai understanding category are those models that convert an input into a text output. concerning business, generative ai technologies in the business category focus on the application of ai to improve business processes and decision-making. regarding biotech, generative ai technologies in the biotech category aim to apply generative ai to biological research and medical applications. some of these models could have been included in the business category, but this category was drawn up because of the abundance of generative ai applications in this field. concerning the human brain, generative ai technologies in the brain category focus on the application of generative ai to help people communicate. applications like colossyan ai, elai ai, heygen ai, hour one ai, rephrase aiand synthesiacan create proffessional videos through diverse avatars. specifically for user interface generation, we have three applications, diagram ai , galileo aiand uizard ai, which use generative ai for generating good user interfaces and optimize the customer's experience. as for web apps, debuild ai, literally anything ioand second aiare examples of generative ai technologies with which users can easily create web apps through text prompts. in terms of copywriting, a plethora of applications have already been developed including anyword, copy ai, google workspace-gmail and docs, hyperwrite, jasper, letterdrop, regie ai, simplified ai, type aiand writesonic.more specifically for social media content creation, there are some applications like clips ai, pictory ai, predis ai, tweethunterand tweetmonk. a series of apps achieve personalized chatbots to your business: one reach ai, opensight ai, brainfishand yuma ai. some of them are autoslide ai, canva docs to decks, chatba, decktopus ai, gamma ai, google workspace ai-slides, tome aiand slide ai.regarding drug discovery, nvidia bionemois a cloud service for generative ai in drug discovery researches are provided with generative and predictive biomolecular ai models at scale. there are a plethora of companies that use generative ai for drug creation including absci, atomic ai, bighat ai, exscientia, menten ai and proteinqure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/753.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/753.txt new file mode 100644 index 0000000000000000000000000000000000000000..60bcc0de7fb7675951fb9392e61efc6c2250fc0b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/753.txt @@ -0,0 +1 @@ +although reinforcement learning (rl) is widely applied to extensive fields, there is stills lack a work that establishes the objective of starting from rl from the markov decision process, which is very unfriendly to beginners. to fill the gap in this view, in this lecture, we provide a self-contained, teachable technical introduction to the objectives of rl, where each section tackles a particular line of work from the transition probability matrix over the markov decision process, reward, bellman equation, discounted state distribution, and objectives.concretely, this lecture provides three equivalent versions of objectives. the first version is presented in theorem 2.3, where it shows the objective as the expectation with respect to the random variable (s, a, s ′ ). theorem 2.3 illustrates all the random factors in the markov decision process (mdp), and we refer to it as the standard objective of mdp. furthermore, theorem 3.3 extends and unifies the objective that appears in theorem 2.3. theorem 3.3 is traceable to td(λ) ], and we present it as the expectation with respect to the random variable the state s, where the state s follows the λ-version of discounted state distribution. finally, we present a general objective that unifies the previous two versions (see theorem 4.1), which provides a high level to understand of rl's objective, where it shows a fundamental formulation that connects some widely used rl techniques (e.g., td(λ) and gae), and this objective can be potentially applied to extensive rl algorithms. for example, yang et al. apply the main technique of theorem 4.1 to obtain the surrogate function with respect to gae . although gae has been widely used in rl, it lacks a theoretical analysis of the related algorithms. theorem 4.1 provides a possible way to establish gae and empirical results by rigorous analysis. to clarify this view, we present a surrogate function with respect to gae, see section 4.3, where it provides a theoretical fundament for policy optimization with gae. to fill the gap in this view, in this lecture, we provide a self-contained, teachable technical introduction to the objectives of rl, where each section tackles a particular line of work from the transition probability matrix over the markov decision process, reward, bellman equation, discounted state distribution, and objectives.3 is traceable to td(λ)], and we present it as the expectation with respect to the random variable the state s, where the state s follows the λ-version of discounted state distribution.a stationary markov policy π is a probability distribution defined on s ×a, π(a|s) denotes the probability of playing a in state s.let p π ∈ r |s|×|s| be a state transition probability matrix, and their components are:. recall the following visitation sequence τ = {s t , a t , r t+1 } t≥0 induced by π, we use p π (s t = s|s 0 ) to denote the probability of visiting s after t time steps from the initial state s 0 by executing π. due to the markov property, we know p π (s t = s|s t-1 = s ′ ) = p π (s|s ′ ), then, we rewrite (5) as follows.1, which illustrates the relationship between single-step state transition probability and multi-step state transition probability.• for the general case time t, we can first travel from s 0 to a middle point s ′ (any state can be a middle point), after t -1 steps, and then go to the final state s during the last step.recall p π ∈ r |s|×|s| denotes the one-step state transition matrix by executing π, and we use ρ 0 ∈ r |s| denotes the initial state distribution vector, and their components are:.it is noteworthy that if the reward r t+1 depends on the state of the environment at the next state, we use r(s t+1 |s t , a t ) to replace r t+1 to denote a real value that the decision-maker receives at time t when the system is at state s t , action a t is played and the system transforms to the next state s t+1 . figure2has shown the reward after the state transformation from state s to s ′ by executing π, which also provides an insight for one-step state transformation probability r π (s). the state value function v π (s) and state-action value function q π (s, a) satisfy the following equation:.finally, we use v π ∈ r |s| to collect all the state value functions, and each entry of v π is defined as.where p t+1 π is the (s, s ′ )-th component of matrix p t+1 π , which is the probability of visiting s ′ after t + 1 time steps from the state s by executing π, i.it is similar to normalized discounted distribution d ρ 0 π (s), we introduce λ-return version of discounted state distribution d λ π (s) as follows: ∀s ∈ s,. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/754.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/754.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c3a61aa19490fd720b56301cc58fccb7d35812b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/754.txt @@ -0,0 +1 @@ +post-regularization techniques for random forests refer to methods used to reduce overfitting and improve the generalization performance of random forest models. random forests are powerful ensemble learning algorithms that combine multiple decision trees to make predictions. however, they can still suffer from overfitting, especially when the trees in the forest become highly complex and tailored to the training data. post-regularization techniques aim to address this issue by modifying or refining the random forest model after the initial training phase. these techniques typically focus on adjusting the complexity of individual trees or applying ensemble-level modifications. some commonly used post-regularization techniques for random forests include:pruning: pruning involves removing unnecessary branches or nodes from individual trees to simplify their structure. this helps prevent overfitting and promotes better generalization by reducing the complexity of the trees .feature selection: random forests can sometimes include irrelevant or redundant features, which can degrade performance. feature selection techniques aim to identify and remove such features from the model, allowing it to focus on the most informative ones and potentially reducing overfitting .calibration: calibration techniques aim to refine the predicted probabilities of random forests to better align with the true class probabilities. this can be particularly useful in tasks where reliable probability estimates are important, such as in certain risk assessment or medical diagnosis scenarios .these post-regularization techniques provide various approaches to combat overfitting and enhance the generalization ability of random forest models. by incorporating these techniques into the random forest workflow, practitioners can often achieve better performance and more reliable predictions on unseen data.here, we present a bayesian post-hoc regularization method for the calibration of the dection trees' leaf node probabilities. our method is implemented within the python package treesmoothing, which seemingly interfaces with sklearn functionalities and thus can be employed on any trained tree-based classifier.ii. related work -hierarchical shrinkage agarwal et al. (2022) proposed a post-hoc regularization technique known as hierarchical shrinkage (hs). unlike modifying the tree structure, hs focuses on shrinking tree predictions and adjusting sample weights during training. this additional regularization improves generalization performance and allows for smaller ensembles without sacrificing accuracy. hs also enhances post-hoc interpretations by reducing noise in feature importance measures, leading to more reliable and robust interpretations. the method replaces the average prediction of a leaf node with a weighted average of the mean responses of the leaf and its ancestors, controlled by a regularization parameter λ as defined in eq. ( 2).the following is a brief summary of the ideas proposed in . assume that we are given a training set d n = (x; y). our goal is to learn a tree model f that accurately represents the regression function based on this training data. given a query point x, let t l ⊂ t l-1 ⊂ • • • ⊂ t 0 denote its leaf-to-root path, with t l and t 0 representing its leaf node and the root node respectively. for any node t, let n (t) denote the number of samples it contains, and êt {y} the average response. the tree model prediction can be written as the telescoping sumhs transforms f into a shrunk model fλ via the formula:êtl {y} -êtl-1 {y}where λ is a hyperparameter chosen by the user, for example by cross validation. hs maintains the tree structure, and only modifies the prediction over each leaf node.post-regularization techniques for random forests refer to methods used to reduce overfitting and improve the generalization performance of random forest models. post-regularization techniques aim to address this issue by modifying or refining the random forest model after the initial training phase. feature selection techniques aim to identify and remove such features from the model, allowing it to focus on the most informative ones and potentially reducing overfitting.calibration: calibration techniques aim to refine the predicted probabilities of random forests to better align with the true class probabilities.these post-regularization techniques provide various approaches to combat overfitting and enhance the generalization ability of random forest models. by incorporating these techniques into the random forest workflow, practitioners can often achieve better performance and more reliable predictions on unseen data.here, we present a bayesian post-hoc regularization method for the calibration of the dection trees' leaf node probabilities. hs also enhances post-hoc interpretations by reducing noise in feature importance measures, leading to more reliable and robust interpretations. given a query point x, let t l ⊂ t l-1 ⊂ • • • ⊂ t 0 denote its leaf-to-root path, with t l and t 0 representing its leaf node and the root node respectively. here, we do not aim to manipulate the general structure of the trees, but we are adopting the leaf node probabilities such that more weight is given to nodes near my the root node (pruning by calibration). according to this idea we propose to update a conjugate beta prior b prior (α, β) from the root node to the leaf nodes by subsequently adding the number of classified samples to the model parameters α (class 0) and β (class 1).where n 0 (t l ) refers to the number of samples classified as class 0, and n 1 (t l ) is the number of samples classified as class 1 at node t l .the balanced accuracy is a performance metric used in classification tasks to measure the accuracy of a model by taking into account the imbalance in the class distribution.where n c is the number of classes, t p i represents the number of true positive instances in class i, and f n i represents the number of false negative instances in class i.the difference between of the two metrics is that balanced accuracy focuses on the accuracy of individual classes, accounting for class imbalance, while roc-auc evaluates the overall discriminative ability of a classifier across all possible classification thresholds, providing a single scalar value. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/755.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/755.txt new file mode 100644 index 0000000000000000000000000000000000000000..20c7b57462b7eb47c54cd2d475da535cd799890a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/755.txt @@ -0,0 +1 @@ +variational inference tries to approximate a complex target distribution with a distribution from a simpler family . if p(z, x) is a target distribution with latent variables z ∈ r d and observed data x ∈ r n , and q w is a variational family with parameters w ∈ w, the goal is to minimize the negative evidence lower bound (elbo).(for subsequent discussion, we have decomposed the objective into the free energy l and the negative entropy h. minimizing f is equivalent to minimizing the kl-divergence between q w to p(•|x), because kl(q w p(•|x)) = f (w) + log p(x). recent research has focused on "black box" variational inference, where the target distribution p is sufficiently complex that one can only access it through evaluating probabilities (or gradients) at chosen points . crucially, one can still get stochastic estimates of the variational objective f and of its gradient, and use these to optimize.still, variational inference can sometimes be unreliable , and some basic questions remain unanswered. most notably: does stochastic optimization of f converge to a minimum of the objective? there has been various progress towards answering this question. one line of research seeks to determine if the variational objective f has favorable structural properties, such as smoothness or (strong) convexity (sec. 2.1). another line seeks to control the "noise" (variance or expected squared norm) of different gradient estimators (sec. 2.2). however, few full convergence guarantees are known. that is, there are few known cases where applying a given stochastic optimization algorithm to a given target distribution is known to converge at a given rate.we identify two fundamental barriers preventing from analysing this vi problem as a standard stochastic optimization problem. first, the gradient noise depends on the parameters w in a nonstandard way (sec. 2.3). this adds great technical complexity and renders many traditional stochastic optimization proofs inapplicable. second, stochastic optimization theory typically requires that the (exact) objective function f is lipschitz continuous or lipschitz smooth. but in our vi setting, under some fairly benign assumptions, the elbo is neither lipschitz continuous nor lipschitz smooth.we obtain non-asymptotic convergence guarantees for this problem, under simple assumptions.central contributions (informal). suppose that the target model log p(•, x) is concave and lipschitz-smooth, and that q w is in a gaussian variational family parameterized by the mean and a factor of the covariance matrix (eq. 2). consider minimizing the negative elbo f using either one of the two following algorithms:• a proximal stochastic gradient method, with the proximal step applied to h and the gradient step applied to l, estimating ∇l(w) with a standard reparameterization gradient estimator (eq. 5), and using a triangular covariance factor; • a projected stochastic gradient method, with the gradient applied to f = l+h, estimating ∇f (w) using either of two common gradient estimators (eq. 7 or eq. 9), with the projection done over symmetric and non-degenerate (eq. 3) covariance factorsthen, both algorithms converge with a 1/ √ t complexity rate (cor. 12), or 1/t if we further assume that log p(•, x) is strongly concave (cor. 13).we also give a new bound on the noise of the "sticking the landing" gradient estimator, which leads to faster convergence when the target distribution p is closer to gaussian, up to exponentially fast convergence when it is exactly gaussian (cor. 14). this is achieved through a series of steps, that we summarize below. 1. we analyze the structural properties of the problem. existing results show that with a gaussian variational family, iflog p(•, x) is (strongly) convex or lipschitz smooth, then so is the free energy l. this is for instance known to be the case for some generalized linear models, and we give a new proof of convexity and smoothness for some hierarchical models including hierarchical logistic regression (see appendix 7.3). the remaining component of the elbo, the neg-entropy h, is convex when restricted to an appropriate set. it is not smooth, but it was recently proved to be smooth over a certain non-degeneracy set.2. we study the noise of three common gradient estimators. they do not satisfy usual noise bounds, but we show that they all satisfy a new quadratic bound (definition 5). for the sticking-thelanding estimator, our bound formalizes the longstanding intuition that it should have lower noise when the variational approximation is strong (thm. 4).3. we identify and solve the key optimization challenges posed by the above issues via new convergence results for the proximal and projected stochastic gradient methods, when applied to objectives that are smooth (but not uniformly smooth) and with gradient estimators satisfying our quadratic bound. if p(z, x) is a target distribution with latent variables z ∈ r d and observed data x ∈ r n , and q w is a variational family with parameters w ∈ w, the goal is to minimize the negative evidence lower bound (elbo). the second line uses young's inequality to bound the noise in terms of (i) the distance of w from w * and (ii) a constant determined by the distance of w * from some fixed parameters w.the noise of g ent can be bounded since it only differs from g energy by the deterministic quantity ∇h(w), and the fact that-provided w ∈ w l -the singular values of w cannot be too small and so ∇h(w) cannot be too large. we say that a random vector g is a quadratically bounded estimator for ∇φ at w with parameters (a, b, w * ), if it is unbiased e = ∇φ(w) and if the expected squared norm is bounded by a quadratic function of the distance of parameters w to w * , i.estimator for ∇l g energy = -∇ w log p(t w (u), x) estimator for ∇l + ∇h g ent = -∇ w log p(t w (u), x) + ∇h(w) estimator for ∇l + ∇h.iflog p is strongly convex and smooth, another proof strategy is possible: smoothness guarantees that w * ∈ w m and strong convexity guarantees that w * ∈ {w : ww 2 2 ≤ d/µ}. because log p(x) and h(u) are both constants, we deduce that minimizing kl(q w p(•|x)) over w ∈ w is equivalent to minimizing l + h, where h(w) =log det c if w ∈ w and +∞ otherwise. according to the definition of q w (see(17)), the hessian of log q w is ∇ 2 z (log q w )(z) = -(cc ⊤ ) -1 , and so ∇ 2 z (log q w )(z) = σ max ((cc ⊤ ) -1 ) = σ min (c) -2 .let us now introduce the function κ : r d × m d → r ∪ {+∞}, defined by κ(w) := e z∼qw log q w (z)log q w * (z) if c is invertible, +∞ otherwise.we decompose the last term of (28) as -∇ℓ(w t )+b t+1 , w t+1 -w * =b t+1 , w t+1 -w * -∇ℓ(w t ), w t+1 -w t + ∇ℓ(w t ), w * -w t .on the second term we can use the fact that l is m -smooth to write -∇ℓ(w t ), w t+1w t ≤ m 2 w t+1w t 2 + ℓ(w t )ℓ(w t+1 ).on the last term we can use the convexity of f to write ∇ℓ(w t ), w *w t ≤ ℓ(w * )ℓ(w t ). so the the last term of (33) can be decomposed as g t -∇ℓ(w t ), w t+1w * =g t -∇ℓ(w t ), prox γth ( t (w t ))prox γth (t (w t )). as for the first term, using the nonexpansiveness of the proximal operator (lemma 33), we have that g t -∇ℓ(w t ), prox γth ( t (w t ))prox γth (t (w t )) ≤ g t -∇ℓ(w t ) t (w t ) -t (w t ) = γ t g t -∇ℓ(w t ) 2 . we also know that h is closed convex, since we consider symmetric scale parameters (see lemma 19. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/756.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/756.txt new file mode 100644 index 0000000000000000000000000000000000000000..60844bb74037ee81a173bd79849a0729ba4b5eac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/756.txt @@ -0,0 +1 @@ +in this paper, we present findings towards employing sparse connectivity in order to reduce the memory consumption of the classification layer for problems with extremely large output spaces (xmc). such problems arise in, e.g., tagging of text documents , next-word predictions , and different kinds of recommendation tasks . in order to ensure computational tractability of these tasks, which can have up to several millions of labels, one typically builds a hierarchical label tree , only exploring branches that are likely to contain relevant labels for the current instance. even though this is very effective at reducing the computation (from linear in the number of labels to logarithmic), it does not help in addressing the memory consumption, which is still linear in the number of labels times the number of hidden units.as an illustration consider the amazon-3m dataset. if we were to map the inputs to a hidden representation of 1024 units, the fully connected last layer for this dataset would need about 2.9 billion parameters, corresponding to 10.7 gib1 . given that modern deep learning optimizers such as adam need to keep track of the value, gradient, and first and second moment, this leads to an overall peak memory consumption of over 40 gib, making it nigh impossible to train such models on commodity hardware. therefore, we want to investigate possibilities for memory efficient sparse training of this huge last layer. there are two pre-existing approaches that serve as an indication that this is an idea that could be successful: first, for dismec, a linear model applied to tf-idf representations of input text, it is known that the resulting layer can be sparsified after training to contain less than 1% non-zeros . in a linear model, the different classifiers for each label can be trained independently. as a result, only the full weights of the label that is currently trained needs to be kept in memory, and can be pruned as soon as the training for that label has finished. for non-linear models, the mach algorithm can be interpreted as a special case of training with static, random sparsity. it works by hashing the labels into different buckets, and performing training and predictions only on the level of buckets. if enough independent hashes are used, this method allows to solve the original problem in the large output space. however, in practice, the results presented for mach are not as good as for competing methods.the contributions of this paper are as follows: we show that naïvely applying a dynamic sparse training algorithm to the last layer of an xmc problem results in strongly reduced predictive performance. inspired by mach, we then propose to alleviate this problem by inserting a penultimate layer that is larger than the hidden representation of the inputs, but still much smaller than the size of the label space. such an increased layer size drastically improves the chances of dynamic sparse training finding a good subnetwork, and enables us to get results only slightly worse than training with a dense last layer. we demonstrate this on several large-scale datasets. to ensure memory efficient and quick computations, we propose to restrict the sparsity structure to uniform sparsity, such that each unit in the output layer receives exactly the same number of inputs. this has several important consequences : (i) it makes it impossible for the training to focus most non-zero weights on a few, prominent head labels, and instead ensures a more even distribution of the representational capacity, (ii) compared to coordinate-format this requires only half the memory to store the indices, and compared to compressed row sparse matrices the data layout is simpler, making it easier to implement the corresponding operations on a gpu, and (iii) it also means that changing the sparsity structure (redistribution of connections) can be implemented as a very cheap operation.in this paper, we present findings towards employing sparse connectivity in order to reduce the memory consumption of the classification layer for problems with extremely large output spaces (xmc).the contributions of this paper are as follows: we show that naïvely applying a dynamic sparse training algorithm to the last layer of an xmc problem results in strongly reduced predictive performance. such an increased layer size drastically improves the chances of dynamic sparse training finding a good subnetwork, and enables us to get results only slightly worse than training with a dense last layer. this has several important consequences : (i) it makes it impossible for the training to focus most non-zero weights on a few, prominent head labels, and instead ensures a more even distribution of the representational capacity, (ii) compared to coordinate-format this requires only half the memory to store the indices, and compared to compressed row sparse matrices the data layout is simpler, making it easier to implement the corresponding operations on a gpu, and (iii) it also means that changing the sparsity structure (redistribution of connections) can be implemented as a very cheap operation.here, we want to apply the sparse evolutionary training (set) algorithmto the classification layer, so that we have sparse training with dynamic sparsity structure.finally, we noticed that (even without uniformity constraint), replacing the dense layer with a sparse layer results in diminished classification accuracy, which we attribute to underfitting. thus, we propose to improve the expressiveness of the model by adding an intermediate layer between the embedding layer and the final classification layer.the number of structural non-zeros is chosen such that in the unstructured sparse layers, there are an average of 32 connections per label, and in the uniform sparse layers there are exactly 32 connections per label.unfortunately, due to the sheer size of the wikipedia-500k dataset, and inefficient training without intermediate layer (many epochs required) or without uniform sparsity (very slow-up to 4400 seconds/epoch), the training runs for these setups timed out, and thus we do not have data for these settings. p@1 p@3 p@5 p@1 p@3 p@5 the measurements further show that for training based on slice features, the sparse implementation manages to attain and slightly surpass the classification performance of the equivalent dense layer, whereas for cascade features there still remains a noticeable gap between dense and sparse training. therefore, it is not the sparse realizations that perform better, but instead the dense setting that performs disproportionately worse for slice features, as it does not have the benefit of the additional intermediate layer that allows non-linear classification boundaries.the data also shows a clear qualitative difference between amazon-670k and wikipedia-500k: for amazon-670k, switching from dense to sparse does not lead to a noticeable decline in the ability of the classifier to fit the training set, whereas for wikipedia-500k the drop is dramatic, especially in the case of slice features.the results above show that sparsification of the extreme layer is possible without a strong decrease in classification performance, relative to a dense layer. a schematic comparison of our architecture to (i) a dense last layer, (ii) a vanilla sparse last layer, and (iii) mach (as an inference strategy) is shown in figure2.in this paper, we have showed that it is possible to replace an extreme-scale dense classification layer with a memoryefficient sequence of an intermediately-sized layer followed by a uniformly-sparsely connected layer, without a strong drop in classification performance, and in some cases even improved p@k. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/757.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/757.txt new file mode 100644 index 0000000000000000000000000000000000000000..734792ff606829716840de34c8934bd413fb367a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/757.txt @@ -0,0 +1 @@ +federated learning in the context of satellites refers to a distributed machine learning approach where multiple satellites collaboratively train a model without sharing their sensitive data with a central server or each other.in this scenario, each satellite processes and learns from its own locally collected data, using onboard computational resources. the satellites exchange model updates rather than raw data, leveraging inter-satellite communication links or ground stations. the updates are aggregated in a centralized manner or through a decentralized scheme, allowing the collective intelligence of the satellite network to improve the global model.federated learning in satellites offers several advantages. firstly, it enables efficient utilization of computational resources on individual satellites, reducing the need for extensive data transmission and storage. this is particularly beneficial in scenarios where bandwidth and storage capacity are limited. additionally, federated learning ensures data privacy and security by avoiding the need to transmit sensitive information between satellites or to a central server.moreover, the distributed nature of federated learning in satellites enhances robustness and resilience. if a satellite encounters communication disruptions or malfunctions, other satellites can continue training the model independently. this decentralized approach also supports scalability, allowing new satellites to join the federation without requiring a centralized retraining process.by leveraging federated learning, satellite networks can collectively improve their machine learning models while maintaining data privacy, optimizing resource usage, and enhancing overall performance and adaptability in space-based applications.federated learning in the context of satellites involves the application of collaborative machine learning techniques within a network of satellites. it allows satellites to collectively train machine learning models while preserving data privacy and minimizing communication overhead.in satellite systems, federated learning can be employed to leverage the data collected by individual satellites without the need to transmit sensitive or large-scale data back to a central server on earth. instead, each satellite performs local model training using its onboard data and computational resources.the trained models are then shared among the satellites, either directly or through relay satellites, using inter-satellite communication links. these models can be combined or averaged to create an improved global model that captures knowledge from each participating satellite.federated learning in satellites offers several advantages. firstly, it enables collaborative learning across a distributed network of satellites, allowing the utilization of a larger and more diverse dataset. secondly, it reduces the need for extensive data transmission to earth, which can be costly and inefficient. moreover, it addresses privacy concerns by keeping sensitive data onboard the satellites and sharing only aggregated model updates.by employing federated learning techniques, satellite systems can benefit from collective intelligence and improved models while optimizing communication bandwidth and preserving data privacy in a distributed environment.federated learning in the context of satellites refers to a distributed machine learning approach where multiple satellites collaboratively train a model without sharing their sensitive data with a central server or each other. the satellites exchange model updates rather than raw data, leveraging inter-satellite communication links or ground stations.federated learning in satellites offers several advantages. firstly, it enables efficient utilization of computational resources on individual satellites, reducing the need for extensive data transmission and storage. additionally, federated learning ensures data privacy and security by avoiding the need to transmit sensitive information between satellites or to a central server.moreover, the distributed nature of federated learning in satellites enhances robustness and resilience.by leveraging federated learning, satellite networks can collectively improve their machine learning models while maintaining data privacy, optimizing resource usage, and enhancing overall performance and adaptability in space-based applications.federated learning in the context of satellites involves the application of collaborative machine learning techniques within a network of satellites. it allows satellites to collectively train machine learning models while preserving data privacy and minimizing communication overhead.in satellite systems, federated learning can be employed to leverage the data collected by individual satellites without the need to transmit sensitive or large-scale data back to a central server on earth.the trained models are then shared among the satellites, either directly or through relay satellites, using inter-satellite communication links.federated learning in satellites offers several advantages. firstly, it enables collaborative learning across a distributed network of satellites, allowing the utilization of a larger and more diverse dataset. moreover, it addresses privacy concerns by keeping sensitive data onboard the satellites and sharing only aggregated model updates.by employing federated learning techniques, satellite systems can benefit from collective intelligence and improved models while optimizing communication bandwidth and preserving data privacy in a distributed environment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/758.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/758.txt new file mode 100644 index 0000000000000000000000000000000000000000..65739fff38ac7fbe6252a160907057c889b89309 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/758.txt @@ -0,0 +1 @@ +in the past decades, various forms of structured prediction have been used extensively across many fields, including computer vision, natural language processing, network analysis, computational chemistry, to name a few. in these fields, examples of structured prediction problems include foreground / background detection in a digital image , grammatical partof-speech tagging in an english sentence , community identification and clustering in social networks , and identifying representative subsets of millions of chemical compounds .on a higher level, all of the structured prediction inference problems mentioned above seek to maximize some score function over the space of labels. in other words, a common goal in inference tasks is to recover the label of each entity, such that the prediction matches the observation as much as possible. suppose we represent the structured prediction inference problem using an undirected graph g = (v, e), where each node represents an entity, and each edge represents the interaction between two nodes. in the context of markov random fields (mrfs), inference can be formulated as solving the following optimization problem :where l is the space of labels, c v (l) is the score of assigning label l to node v, and c v 1 ,v 2 (l 1 , l 2 ) is the score of assigning labels l 1 and l 2 to neighboring nodes v 1 and v 2 . these two score terms in (1) are often called unary and pairwise potentials in the mrf and inference literature. the optimization formulation above aims to recover the global label structure, by finding a configuration that maximizes the summation of unary and pairwise scores across the graph. some prior literature has explored structured prediction and inference problems that involve unary and pairwise potentials, as demonstrated by equation (1). for instance, globerson et al. investigated label recovery in two-dimensional grid lattices. similarly, foster et al. extended this model to include tree decompositions. on another note, bello and honorio proposed a convex semidefinite programming (sdp) approach to exact inference. all these works were motivated by a generative model that assumes a ground truth label vector y * and generates potentially noisy unary and pairwise observations based on label interactions.in this paper, we follow the two-stage convex optimization approach proposed in bello and honorio , but turn our attention on the goal of partial inference. our choice involves two levels of significance:• from an optimization point of view, the primal and dual construction and karush-kuhn-tucker (kkt) analysis have been widely used in generative statistical models to study the guarantee of exact recovery in the past decade. besides structured prediction, examples include sparsity recovery (often referred to as primal-dual witness or pdw) and community detection .the standard approach of the primal and dual analysis for generative models usually involves three steps. first, it is assumed that there exists some unobserved groundtruth labeling, which generates the observations in a stochastic and noisy fashion (generative assumption). next, one lists all kkt conditions behind the primal and dual convex optimization problems. in particular, the groundtruth labeling should be a feasible solution to the primal problem. after that, one can analyze the statistical conditions for the groundtruth labeling to be the optimal and unique solution to the optimization problem with high probability.while being extremely powerful, the primal and dual approach was only applied to study the problem of exact recovery in the aforementioned literature. in this work, we are interested in extending the primal-dual framework to study the guarantee of partial recovery.• on an application level, recovering the majority of the labels can be more relevant and practical in structured prediction inference tasks.to see this, for example, consider an inference graph model with an isolated node. in other words, following the definition of (1), assume there exists some node v whose unary potential is zero, and is not connected to any other node in the graph. in this case, we can never recover the true label of this node better than random guessing, and consequently exact inference can never be achieved with a high probability. in contrast, provable guarantees can be achieved if the goal is to recover the majority (90% for example) of the labels, and our algorithm and analysis can be more robust to outliers.some prior literature considered tasks that are similar to partial inference in specific types of graphs. for instance, globerson et al. studied hamming error of recovery in twodimensional grid lattices, and foster et al. studied graphs allowing tree decompositions.it is worth highlighting that our analysis is general: we provides partial inference results for any type of graphs.summary of our contribution. our work is mainly theoretical. we propose a novel primal and dual framework to study the problem of partial inference in structured prediction. our framework analyze the kkt conditions of the convex optimization problem, and derive the sufficient statistical conditions for partial inference of the majority of the true labels. furthermore, our result subsumes the classic result of exact inference.on a higher level, all of the structured prediction inference problems mentioned above seek to maximize some score function over the space of labels. in other words, a common goal in inference tasks is to recover the label of each entity, such that the prediction matches the observation as much as possible. suppose we represent the structured prediction inference problem using an undirected graph g = (v, e), where each node represents an entity, and each edge represents the interaction between two nodes.where l is the space of labels, c v (l) is the score of assigning label l to node v, and c v 1 ,v 2 (l 1 , l 2 ) is the score of assigning labels l 1 and l 2 to neighboring nodes v 1 and v 2 . some prior literature has explored structured prediction and inference problems that involve unary and pairwise potentials, as demonstrated by equation (1).in this paper, we follow the two-stage convex optimization approach proposed inbello and honorio , but turn our attention on the goal of partial inference.• from an optimization point of view, the primal and dual construction and karush-kuhn-tucker (kkt) analysis have been widely used in generative statistical models to study the guarantee of exact recovery in the past decade.• on an application level, recovering the majority of the labels can be more relevant and practical in structured prediction inference tasks. in this case, we can never recover the true label of this node better than random guessing, and consequently exact inference can never be achieved with a high probability. we propose a novel primal and dual framework to study the problem of partial inference in structured prediction. our framework analyze the kkt conditions of the convex optimization problem, and derive the sufficient statistical conditions for partial inference of the majority of the true labels.we consider an undirected connected graph g = (v, e), where the number of nodes is |v| = n, and e denotes the set of edges in the graph.for the underlying undirected graph g = (v, e), we use ∆ i denote the degree of node i in g, and let ∆ max denote the maximum degree across all nodes. our partial inference analysis and results subsume the exact inference case. in particular, our results of partial inference on the following classes of graphs subsume the results of exact inference frombello and honorio . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/759.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/759.txt new file mode 100644 index 0000000000000000000000000000000000000000..f396835d9d697dc42b94224d50b179702a9fadec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/759.txt @@ -0,0 +1 @@ +accurate crop yield forecasts can benefit governments, policymakers, and individual farmers by providing better insights into various exogenous drivers that impact the agricultural markets. these insights can lead to earlier responses and better-informed decisions to improve food security at both regional and international scales (becker-reshef et al., 2022). recently, machine learning algorithms have been applied on earth observation (eo) data and have shown a great potential to improve the reliability of these forecasts (basso & liu, 2019).in this paper, we consider the use of eo data collected from the geoglam crop monitor ag-met system (https://cropmonitor.org) and tree-based algorithms to directly forecast wheat yields in kazakhstan, the 10 th largest wheat exporter in the world (fao, 2022). a prominent challenge negatively impacting machine learning models' performance in forecasting yields is the spatial yield heterogeneity due to exogenous factors like local farming practices or crop varietals that are not reflected in remote sensing data. lee et al. (2022) proposed to train a separate model for each province, successfully reducing the state-wise prediction errors. however, in our dataset, due to a very small amount of yield data available for each province (typically less than 20 data points), this approach results in highly unreliable and overfit models with error rates far exceeding those of baseline models, as shown in figure 3. to improve upon this issue, we focus on reducing the errors, especially in provinces with the least accurate yield predictions, by using state-wise additive bias. first, we followed the methodologies in sahajpal et al. (2020) to create features from eo data and investigate the performance of various baseline tree-based models, including xgboost, catboost, and random forest, in forecasting wheat yields at the state level. next, each state-wise additive bias was separately added to the model's predictions in each province to obtain the final yield forecast. this approach shows a remarkable increase in overall performance, with the most significant benefits being seen in the province with the highest baseline yield errors (almatinskaya). furthermore, since state-wise bias adds no computational overhead during the inference process, this technique can be efficiently applied to improve yield predictions in other datasets. a prominent challenge negatively impacting machine learning models' performance in forecasting yields is the spatial yield heterogeneity due to exogenous factors like local farming practices or crop varietals that are not reflected in remote sensing data. (2022)proposed to train a separate model for each province, successfully reducing the state-wise prediction errors. however, in our dataset, due to a very small amount of yield data available for each province (typically less than 20 data points), this approach results in highly unreliable and overfit models with error rates far exceeding those of baseline models, as shown in figure3. to improve upon this issue, we focus on reducing the errors, especially in provinces with the least accurate yield predictions, by using state-wise additive bias. first, we followed the methodologies insahajpal et al. we use eo data (ndvi, growing degree days, daily minimum and maximum temperature, soil moisture, evaporative stress index, and precipitation) to as input features for training and evaluating machine learning models and to compute state-wise bias.we trained and evaluated the effectiveness of the state-wise bias by applying this bias to the baseline tree models (xgboost, catboost, and random forest) to forecast wheat yields at the state level in kazakhstan. (2021), and split the remaining data into training (10 years) and validation sets (4 years) for model optimization.the fundamental motivation for computing state-wise bias is that we observed baseline models are often biased toward values close to the mean yields, underestimating high yields in provinces with high productions, as discussed in section 3. although we have incorporated the regional information as categorical data in baseline models, the models still suffer from this bias. therefore, state-wise bias is proposed as a simple yet effective technique to alleviate this spatial heterogeneity problem, resulting in a significant decrease in both mape and rmse, as shown in section 3 for each state do.to investigate the effect of state-wise bias, we test various models on different out-of-fold test years and compare the performance with and without state-wise bias.103 mg/ha.xgboost besides baseline models, we also compare our approach with region-specific models, an approach that has been used in several works to forecast crop yieldslee et al.machine learning models are frequently biased toward average yield in the dataset, resulting in higher errors for provinces with crop yields far from the mean, as shown in figure2this issue is exacerbated by the spatial heterogeneity between different provinces/states. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/76.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/76.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a750fda9df2c57ccd5941aa27f900568d3ab33d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/76.txt @@ -0,0 +1 @@ +consider a learner that wants to predict the next day's temperature range at a given location based on inputs such as the current day's temperature range, humidity, atmospheric pressure, precipitation, wind speed, solar radiation, location, and time of year. in our model, this learner is tested daily. on a given day, the learner gets inputs for that day, which it uses to output a prediction for the next day's temperature range; when the next day arrives, it sees the correct temperature range, then uses this feedback to update future predictions. as this is repeated, the learner accumulates information to help it make better predictions. a natural question arises: can the learner guarantee that its predictions become better over time, and if so, how quickly?we investigate a model of online learning of real-valued functions previously studied in where an algorithm a learns a real-valued function f from some class f in trials. past research on this model focused on functions of one input, for example, predicting the temperature range solely based on the time of year. the research showed that, as long as the function is sufficiently smooth, the learner can become a good predictor fairly rapidly. suppose that f consists of functions f : s → r for some set s, and fix some f ∈ f. in each trial t = 0, . . . , m, a receives an input s t ∈ s, guesses ŷt for the value of f (s t ), and receives the actual value of f (s t ).following , we focus on an error function which measures how difficult it is for a learner to predict functions accurately in the worst case. the error function depends on two parameters, p and q, which determine how harshly the learner is punished for errors and the types of functions that the learner might encounter, respectively. small values of p and q are more difficult for the : exact values and bounds on opt p (f q ) for p, q > 1 prior to the results in this paper learner, leading to higher values of the error function. for each algorithm a, p > 0, f ∈ f, and σ = (s 0 , . . . , s m ) ∈ s m+1 , definewhen f and σ are clear from the context, we refer to l p (a, f, σ) as the total p-error of a. define l p (a, f) = sup f ∈f ,σ∈∪ m∈z + s m l p (a, f, σ)and opt p (f) = inf a l p (a, f). note that unlike the definition of opt presented in , f may consist of real-valued functions on any domain, not just functions from to r.the case where f contains functions f : → r whose derivatives have various bounded norms was studied in . for q ≥ 1, let f q be the class of absolutely continuous functions f : → r such that 1 0 |f ′ (x)| q dx ≤ 1, and let f ∞ be the class of absolutely continuous functions f : → r such that sup x∈(0,1) |f ′ (x)| ≤ 1. as noted in , f ∞ contains exactly those f : → r such that |f (x)f (y)| ≤ |x -y| for all x, y ∈ . also, by jensen's inequality, f ∞ ⊆ f q ⊆ f r for all q ≥ r ≥ 1. hence opt p (f ∞ ) ≤ opt p (f q ) ≤ opt p (f r ) for all p ≥ 1 and q ≥ r ≥ 1. previous papers determined the exact values of opt p (f q ) for p = 1, q = 1, and p, q ≥ 2, as well as bounds on opt p (f q ) for p ∈ (1, 2) and q ≥ 2.the paper proved that opt p (f 1 ) = ∞ for all p ≥ 1. they also showed that opt 1 (f q ) = opt 1 (f ∞ ) = ∞ for all q ≥ 1. in contrast, they found that opt p (f q ) = opt p (f ∞ ) = 1 for all p ≥ 2 and q ≥ 2. this was also proved in using a different algorithm based on a generalization of the widrow-hoff algorithm , and a noisy version of this problem was studied in . in this paper, we extend the region of values of p, q for which it is known that opt p (f q ) = 1.theorem 1.1. for any reals q > 1 and p ≥ 2 + 1 q-1 , we have opt p (f q ) = 1. for p = 1 + ε with ε ∈ (0, 1), the paper proved that opt p (f q ) = o(ε -1 ) for all q ≥ 2, which implies that opt p (f ∞ ) = o(ε -1 ). however, these bounds are not sharp. in this paper, we determine opt 1+ε (f q ) up to a constant factor for all ε ∈ (0, 1) and q ≥ 2.: exact values and bounds on opt p (f q ) for p, q > 1 including the results in this paper theorem 1.2. for all ε ∈ (0, 1), we have opt 1+ε (f ∞ ) = θ(ε -12 ) and opt 1+ε (f q ) = θ(ε -1 2 ) for all q ≥ 2, where the constants in the bound do not depend on q.the proof of theorem 1.2 splits into an upper bound and a lower bound. for the upper bound, we use hölder's inequality combined with results from . for the lower bound, we modify a construction used in , which obtained bounds on a finite variant of opt 1 (f q ) that depends on the number of trials m.the results of and left open the problem of determining opt p (f q ) for q ∈ (1, 2). it was not even known up to a constant factor. we make progress on this problem by determining opt 2 (f 1+ε ) up to a constant factor for ε ∈ (0, 1). figure 1 shows the bounds and exact values known for p, q > 1 prior to the results in our paper, while figure 2 shows the bounds and exact values known for p, q > 1 including the results in our paper.theorem 1.3. for ε ∈ (0, 1), we have opt 2 (f 1+ε ) = θ(ε -1 ).the paper also discussed the problem of online learning for smooth functions of multiple variables. previous research on learning multi-variable functions has focused on expected loss rather than worst-case loss, using models where the inputs x i are determined by a probability distribution.we introduce a natural extension of the single-variable setup from to multi-variable functions. specifically, for q ≥ 1 and d ∈ z + , let f q,d be the class of functions f :) is in f q , where v i,x ∈ d is the vector formed when x is inserted at the i th position of (x 1 , . . . , x d-1 ).one of the most fundamental questions about opt p (f q,d ) is to determine when it is finite and when it is infinite. we answer this question almost completely when q = ∞. theorem 1.4. for any positive integer d, opt p (f ∞,d ) is finite when p > d and infinite when 0 < p < d.as a corollary, it immediately follows for 0 < p < d that opt p (f q,d ) = ∞ for all q ≥ 1. moreover, it is easy to see that opt p (f 1,d ) = ∞ for all positive integers d and p.the papers and also investigated worst-case mistake bounds for online learning of smooth functions when the number of trials is bounded. in particular, using the same notation as in the first paragraph of this section, definethe paper proved that opt 1 (f q , m) = o(log(m)) for all q ≥ 2 and opt 1 (f 2 , m) = ω( log(m)). the paper sharpened these bounds by proving that opt 1 (f q , m) = θ( log(m)) for all q ≥ 2 and opt± o(1). we obtain sharp bounds for online learning of smooth functions with a bounded number of trials when 0 < p < d. in particular, these sharp bounds are also new in the single-variable case.theorem 1.5. for any positive integer d and real number p with 0 < p < d, we have, where the constants in the bounds depend on p and d.in section 2, we focus on the single-variable setup. we prove theorem 1.3 in subsections 2.1 and 2.2. subsection 2.1 establishes the lower bound, while subsection 2.2 establishes the upper bound along with several useful lemmas. subsection 2.3 focuses on proving theorem 1.1. in subsection 2.4, we prove theorem 1.2. in section 3, we focus on the multi-variable setup, establishing various bounds on opt p (f q,d ). finally, in section 4, we discuss open problems.2 results in the single-variable setup for q ∈ (1, 2) small values of p and q are more difficult for the : exact values and bounds on opt p (f q ) for p, q > 1 prior to the results in this paper learner, leading to higher values of the error function. for q ≥ 1, let f q be the class of absolutely continuous functions f : → r such that 1 0 |f ′ (x)| q dx ≤ 1, and let f ∞ be the class of absolutely continuous functions f : → r such that sup x∈(0,1) |f ′ (x)| ≤ 1. hence opt p (f ∞ ) ≤ opt p (f q ) ≤ opt p (f r ) for all p ≥ 1 and q ≥ r ≥ 1. previous papers determined the exact values of opt p (f q ) for p = 1, q = 1, and p, q ≥ 2, as well as bounds on opt p (f q ) for p ∈ (1, 2) and q ≥ 2.p yields opt p (f q ) ≥ q (p2 p e ln 2)(q-1) .q-1 ≥ (q -1) q(q-1) 2 q-1 a q (1a) q max {a -1 , (1a) -1 } = (q -1) q(q-1) 2 q-1 max {a q-1 (1a) q , a q (1a) q-1 } .by the weighted arithmetic mean -geometric mean inequality, for r ∈ (0, 1) we have r q (1r) q-1 = q q (q -1) q (q -1)r q.thus max a q-1 (1a) q , a q (1a) q-1 ≤ q q (q -1) q q -1 2q -1 2q-1 = q q (q -1) q-1 (2q -1) 2q-1 , so a|u| q + (1a)|v| q ≥ (q -1) (q-1) 2 (2q -1) 2q-1 2 q-1 q q . consider f (q) = (q -1) 2 ln(q -1) + (2q -1) ln(2q -1) -(q -1) ln 2q ln q over q ∈ (1, 2). note that f ′ (q) = (2(q -1) ln(q -1) + (q -1)) + (2 ln(2q -1) + 2) -ln 2 -(ln q + 1) = 1 -ln 2 + 2(q -1) ln(q -1) + (q -1 -ln q) + 2 ln(2q -1).as |x| q is increasing for x ≥ 0, a|u| q + (1a)|v| q ≥ a (q -1) q-1 a(1a) q = (q -1) q(q-1) a q-1 (1a) q .using the work above, (q -1) q(q-1) a q-1 (1a) q ≥ (q -1) q(q-1) max {a q-1 (1a) q , a q (1a) q-1 } > 2 q-1 > 1, so the inequality holds whenever v ≥ 0.we obtain the next corollary since opt p (f ∞ ) ≤ opt p (f r ) ≤ opt p (f q ) whenever 1 ≤ q ≤ r.22, again using the fact that opt p (f ∞ ) ≤ opt p (f r ) ≤ opt p (f q ) whenever 1 ≤ q ≤ r. if opt p (f q,d ) = ∞, there is nothing to prove, and if opt p (f q ) = ∞, it is clear, by restricting the inputs x i to the set {ce 1 : c ∈ } ⊂ d (where e 1 ∈ d has a 1 in the first component and a 0 in the rest), that opt p (f q,d ) = ∞ as well. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/760.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/760.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3de7ae578bfa804056301c6e0342861f379d07b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/760.txt @@ -0,0 +1 @@ +the assumptions in machine learning (ml) models play a crucial role not just in appropriate applicability but also in interpretability, reproducibility, and generalizability. one common assumption is that the dataset is independent and identically distributed (i.i.d). however, in reality, this assumption rarely holds true, as human learning often involves connecting new information with what was previously observed. psychological theories such as primacy and recency effects , serial position effect, and frame effect suggest that the order in which data is presented can significantly impact decision-making processes. in this work, we have devised a learning algorithm that exhibits sensitivity to the order in which data is presented. this unique characteristic imparts our proposed model with decision boundaries or decision functions that rely on the specific arrangement of training data.in our research, we introduce the novel use of 'effort-to-compress' (etc) -a compressioncomplexity measure -as an impurity function for decision trees, marking the first instance of its application in machine learning. etc effectively measures the effort required for lossless compression of an object through a predetermined lossless compression algorithm . etc was initially introduced in as a measure of complexity for timeseries analysis, aiming to overcome the severe limitations of entropy-based complexity measures. it is worth noting that the concept of complexity lacks a singular, universally accepted definition. in , complexity was explored from different perspectives, including the effort-to-describe (shannon entropy, lempel-ziv complexity), effort-to-compress (etc complexity), and degree-of-order (subsymmetry). the same paper highlighted the superior performance of etc in distinguishing between periodic and chaotic timeseries. moreover, etc has played a pivotal role in the development of an interventional causality testing method known as compression-complexity-causality (ccc) . ccc and allied approaches based on compression-complexity have been rigorously tested in several practical applications for causal discovery and inference . etc has demonstrated robust, reliable and superior performance over infotheoretic approaches when applied to short and noisy time series data (including stochastic and/or chaotic ones), leading to its utilization in diverse fields such as investigating cardiovascular dynamics , conducting cognitive research , and analysis of muscial compositions .in this research, we present a new application of etc in the field of machine learning, offering a fresh perspective on its ability to capture structural impurity. leveraging this insight, we introduce a decision tree classifier that maximizes the etc gain (analogous to information gain in conventional decision trees). it is crucial to highlight that shannon entropy and gini impurity fall short in capturing structural impurity, resulting in an impurity measure that disregards the data's underlying structure (in terms of order). the utilization of etc as an impurity measure provides the distinct advantage of generating different decision trees for various permutations of data instances. consequently, this approach frees us from the need to adhere strictly to the i.i.d. assumption commonly employed in machine learning. in fact, etc itself makes no such assumption of the timeseries for characterizing its complexity. thus, by simply permuting data instances, we can develop a permutation decision forest.the paper is organized as follows: section 2 introduces the proposed method, section 3 presents the experiments and results, section 4 describes model interpretability, section 5 discusses the limitations of our research, and section 6 provides the concluding remarks and outlines the future work.to showcase the effectiveness of the proposed structural impurity measure (computed using etc) in capturing the underlying structural dependencies within the data and subsequently generating distinct decision trees for different permutations of input data, we utilize the following illustrative toy example.figure3: decision tree using the proposed structural impurity (computed using etc) for permutation a.figure4: decision tree using the proposed structural impurity (computed using etc) for permutation b.figure5: decision tree using the proposed structural impurity (computed using etc) for permutation c.the variability in decision trees obtained from different permutations of data instances (figures 3, 4, 5, 6,and 7) can be attributed to the etc measure's ability to capture the structural impurity of labels, which was not possible for both shannon entropy and gini impurity.figure6: decision tree using the proposed structural impurity (computed using etc) for permutation d.figure7: decision tree using the proposed structural impurity (computed using etc) for permutation e.to illustrate the contrasting decision trees obtained by the etc based impurity measure, consider the trees obtained for permutation b and permutation d.figure9: decision tree using the proposed structural impurity (computed using etc) for permutation d.permutation decision forest distinguishes itself from random forest by eliminating the need for random subsampling of data and feature selection in order to generate distinct decision trees. each such permutation is then used to create a specific permutation decision tree using the structural impurity measure (computed using etc) as the splitting criteria (etc gain). while random forest relies on random subsampling and feature selection, permutation decision forest achieves diversity through permutation of the input data.in permutation decision forest, every permutation of the data instance corresponds to an 'alternate' reality (a counterfactual?!) where that particular order of the data instances are presented to the algorithm to result in a specific set of decisions made subsequently by the classifier. by leveraging permutation, permutation decision tree facilitates the generation of distinct decision trees for varying permutations of data instances. inspired by this, we further develop a bagging method known as permutation decision forest, which harnesses the power of permutation decision trees which overcomes the limitations of random forest imposed by random subsampling and fearture selection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/761.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/761.txt new file mode 100644 index 0000000000000000000000000000000000000000..d5fa0c9708cbc488b6014f40d560a0c2b4b9c1de --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/761.txt @@ -0,0 +1 @@ +accurate traffic prediction is an integral component of intelligent transportation systems (its) as it is critical in traffic control strategies and traveler information systems. predicting evolving traffic patterns on a road network is not a trivial task, and researchers have used advanced models to approximate traffic behavior. over the years, these models include time series methods such as the autoregressive integrated moving average (arima) model , , , non-parametric regression models such as the k-nearest neighbor , or support vector regression , standard artificial neural network models of fully-connected , and recurrent neural networks . however, most recently, the state-of-the-art traffic prediction models are the graph convolutional neural network (gcnn) methods , .this paper first describes the graph convolution perspective and then develops a taxonomy of gcnn short-term traffic prediction models based on their components. afterwards, canada baher.abdulhai@utoronto.ca we explore different variations along these components and eventually arrive at a variant that is similar to a traditional recurrent neural network. we then revisit a regression view of short-term traffic prediction using random forests, a powerful ensemble regression method. finally, we compare the performance of these models using data from traffic simulations as well as the real world. over the years, these models include time series methods such as the autoregressive integrated moving average (arima) model,,, non-parametric regression models such as the k-nearest neighbor,or support vector regression, standard artificial neural network models of fully-connected,and recurrent neural networks. therefore, a gcnn is capable of extracting information using the spatial correlations between nodes in a graph and lends itself well to capturing the complex patterns needed for short-term traffic prediction.where k i is the output representation for node i, x n is the graph input for node n, a in is the influence from node n to node i that is defined by the aggregation matrix, w and b are respectively the weight and bias terms of the model that transforms the input to hidden dimension, and ρ(•) denotes the activation function.a short-term traffic prediction model can use gcnns to capture the spatial correlations between different nodes of a road network; however, the model also needs to account for the changing dynamic of traffic through time.where k i is the hidden states for node i; z i , r i , m i are respectively the update gate, the reset gate, and the candidate hidden state; * denotes the hadamard product; σ(•) denotes the sigmoid function; while w and u represent the weights and b represents the biases of the model.typically, a short-term traffic prediction model based on gcnns and rnns integrates the two components by replacing the matrix multiplications in the gru with a gcnn operation. for this investigation, we call this type of model the graph attention gated recurrent unit (ga-gru) as it combines the concepts of graph attention networks and gated recurrent units.afterwards, we explore the removal of shared weights among different nodes by designating a unique set of model weights w, u, and b for each node.lastly, we also include in this comparison an example in the literature where the independent model weights are factorized according to the spatial correlations among nodes, which results in a structure that shares weights between nodes yet applies a distinct model to every node.traffic propagation within a single time step is limited in space since the traffic state at a given link is independent of recent traffic states of faraway links. we can then define short-term traffic prediction as a regression problem with the predictions (x in this work, we solve this regression problem by employing random forests, which is a type of ensemble regression trees. random forests combats overfitting by splitting the training data to create multiple regression trees and averaging the output of all trees during prediction, which leads to a more robust regression model. moreover, the agrnn (input) model is competitive with other gcnns according to all error metrics; which signifies that the propagation of hidden states among nodes between consecutive rnn time steps is not essential in achieving accurate prediction.the agrnn with input-only attention is an extreme version of separating gcnn weights to create independent models; meanwhile, the factorized weights in agcrn can be viewed as a tradeoff between having completely shared weights of gcnns and completely independent weights. this supports the hypothesis that short-term traffic prediction can be formed as a regression problem and further highlights that sharing model weights and latent states are inconsequential in attaining model accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/762.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/762.txt new file mode 100644 index 0000000000000000000000000000000000000000..3bfaa8f364e73ca0b2b2c6794594966148a7e040 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/762.txt @@ -0,0 +1 @@ +heart failure (hf) syndrome occurs in a situation when the heart cannot fill up with enough blood or when the heart is too weak to pump properly to support other organs in the body as well as it should for the body's needs . it is a condition which progresses through different stages ranging from "high risk of developing heart failure" to "advanced heart failure" . heart failure can be sudden due to a medical condition or injury but mostly it develops slowly due to long-term medical conditions . some conditions that could cause heart failure are arrhythmia, cardiomyopathy, coronary artery disease, congenital heart disease, diabetes, obesity, and hypertension . admission of patients in hospitals for heart failure incurs costs to the patients and increases the burden on hospitals for resource allocation which could be reduced by preventing hospitalization or reducing the length of stay of patients . length of stay is defined as the time between hospital admission to discharge of a patient. in the united states alone, over 6.2 million people have heart failure which costs about 31 billion dollars to the us healthcare system . therefore, predicting length of stay in patients admitted for heart failure is important.electronic health records (ehrs) of a patient store the systematized longitudinal collection of patient's health information electronically . ehrs can be used to build predictive models and effective clinical decision support systems by leveraging artificial intelligence techniques like machine learning and natural language processing . a typical ehr contains diagnostic codes, procedure reports, discharge summaries, vitals, medications administered, laboratory tests, and imaging reports collected during a patient's visit to a healthcare facility. identifying and studying the abstraction of the underlying medical concepts in these clinical health records help us to identify the clinical phenotypes related to a disease and to determine appropriate discharge destination and care for the patients. these clinical phenotypes have a latent structure and so earlier researchers have used probabilistic methods like topic models , probabilistic graphical model , and latent class analysis to study clinical concepts associated with the disease. further, studied the importance of thematic structure of the notes to clinicians in inferring the hidden relationships and phenotypes.length of stay of a patient also depends on diagnosis and procedure reports of a patient. the implication of procedure reports and diagnostic codes for supporting to predict length of stay has been studied earlier , , which, however, still suffers from a limitation that these implications have not been studied in predicting length of stay in patients admitted for heart failure. as such, in this study, we study the underlying themes in the clinical notes from diagnostic codes and procedure reports to characterize patients admitted for heart failure to predict their length of stay.to the best of our knowledge, there has been no previous work on identifying the phenotypes of heart failure patients by modeling their disease in latent space and uncovering the underlying themes within their clinical notes. additionally, earlier researchers have not implemented data-driven artificial intelligence-based methods to predict length of stay using clinical notes of patients admitted for heart failure. so, in this study, we proposed topic modeling to study the underlying medical concepts in the form of themes to uncover the clinical phenotypes in the patients' records admitted for heart failure. additionally, these themes could be used to predict their length of stay in these patients. our rationale is that by using the natural topics detected by the algorithm, we are more likely to create a generalizable solution that can work on data collected in the future. thus, we explored the following research questions in this study:1. can we identify the clinical phenotypes of the patients admitted for heart failure in the form of themes?2. can these themes be used to predict length of stay of a patient? some conditions that could cause heart failure are arrhythmia, cardiomyopathy, coronary artery disease, congenital heart disease, diabetes, obesity, and hypertension. as such, in this study, we study the underlying themes in the clinical notes from diagnostic codes and procedure reports to characterize patients admitted for heart failure to predict their length of stay.the theme ischemic heart disease described coronary artery disease in patients as the keyword coronary atherosclerosis along with nonspecific chest pain was present in this theme. the theme renal dysfunction described chronic kidney disease and renal failure which could be present in patients with heart failure.the theme chronic heart disease explained the special conditions associated with chronic heart failure like chronic rheumatic heart disease and coronary atherosclerosis. some of the disorders associated with hf patients include tobacco-related disorders, sleepwake disorders, and disorders of lipid metabolism which were explained in the theme heart failure disorders.left ventricular dysfunction explained both systolic and diastolic conditions in heart failure patients, described as heart failure with reduced ejection fraction (hfref) and heart failure with preserved ejection fraction (hfpef), respectively. finally, the theme, myocardial ischemia described a common condition in hf patients where the blood flow to the heart muscle is obstructed due to the formation of plaque in the coronary artery as explained by the keywords in this theme like inferior ischemia, myocardial disease, and atrial fibrillation.the results of topic modeling on diagnostic codes depicted the various paradigms of heart failure, like comorbidities, signs and symptoms, and disorders related to heart failure. it revealed different phenotypes of heart failure in the form of themes like ischemic and non-ischemic heart disease, hypertensive heart disease, chronic heart disease, and acute heart failure. for example, for a single patient, our analysis showed that over time, a patient's disease could progress from chronic heart disease to acute heart failure. in between the first and last hospitalization, that patient's diagnostic codes were characterized by themes like heart failure comorbidities and renal dysfunction symbolizing that the presence of comorbidities associated with heart failure contributed towards the progression of the disease in that patient. additionally, topic modeling results using only procedure reports performed better as compared to results using only diagnostic codes to predict length of stay of patients which indicates that procedure reports are a better indicator of predicting a patient's stay.828 showed that studying the patterns in the diagnostic codes and procedure reports of patients in the form of phenotypes played an important role in predicting length of stay in heart failure patients. analysis of clinical notes of patients through lda using the data about diagnostic codes and procedure reports during each hospitalization of a patient revealed different phenotypes associated with heart failure in the form of themes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/763.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/763.txt new file mode 100644 index 0000000000000000000000000000000000000000..89f16d0252d70ff2d8a255578da41b93b306330f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/763.txt @@ -0,0 +1 @@ +in recent years, deep neural networks have made breakthroughs in supervised configurations for most natural language processing tasks. in particular, the birth of the large-scale pre-training model bert has enabled the pre-training plus fine-tuning paradigm to prevail. although large-scale pre-training models retain more a priori knowledge, they still require a large amount of labeled data for fine-tuning to achieve better results in a specific task or domain. in real-world scenarios, it is more common to have a large amount of unlabeled data and a small amount of labeled data. a simple and straightforward way to improve the performance of deep neural networks is to manually label these unlabeled data, which requires a lot of human, financial, and time resources, and for data in specific specialized fields, even requires specialized expertise to label. with a small amount of labeled data, deep neural network models can easily overfit the available labeled data, capturing only less information that can help in downstream tasks, resulting in poor model performance.in order to address the issue of poor model performance with limited sample data, a series of methods have been summarized and collected . among them, the classic solution at the data level is data augmentation, which generates additional data for model training based on prior knowledge. generally, existing text data augmentation methods can be classified into two categories: input-level augmentation and hidden representation-level augmentation. input-level augmentation modifies the original text at the character, word, or sentence level using specific strategies to generate new, similar texts while preserving the original labels, as in eda . the augmented data generated by these methods are perceivable and understandable by humans. hidden representation-level augmentation operates on the intermediate hidden representations of the text within the deep neural model, as in tmix . this method utilizes interpolation-based techniques to obtain new intermediate representations, which are then used as inputs for subsequent layers. the augmented data generated by these methods are virtual samples that are difficult for humans to intuitively understand.inspired by swapmix and manifold mixup , we propose a novel simple and effective data enhancement method: msmix (manifold swap mixup). the msmix method is a mixup method that acts on the space of textual hidden representations, and the hidden representations of two samples input to the depth model. the msmix method is a mixup method that acts on the hidden representation space of text hidden representations, where the hidden representations of two samples input to the depth model are manipulated to replace some of their dimensions. three different dimensional replacement strategies are also proposed to explore more effective replacement methods. extensive experiments are conducted on three chinese intention recognition datasets to confirm the effectiveness of the msmix method proposed in this paper. a simple and straightforward way to improve the performance of deep neural networks is to manually label these unlabeled data, which requires a lot of human, financial, and time resources, and for data in specific specialized fields, even requires specialized expertise to label. with a small amount of labeled data, deep neural network models can easily overfit the available labeled data, capturing only less information that can help in downstream tasks, resulting in poor model performance. among them, the classic solution at the data level is data augmentation, which generates additional data for model training based on prior knowledge. generally, existing text data augmentation methods can be classified into two categories: input-level augmentation and hidden representation-level augmentation. hidden representation-level augmentation operates on the intermediate hidden representations of the text within the deep neural model, as in tmix. the msmix method is a mixup method that acts on the space of textual hidden representations, and the hidden representations of two samples input to the depth model. the msmix method is a mixup method that acts on the hidden representation space of text hidden representations, where the hidden representations of two samples input to the depth model are manipulated to replace some of their dimensions. doublemixfirst extracts a sample, performs traditional data augmentation on it, then performs mixup operation on the two samples, and finally performs mixup operation on the mixup result and the original sample. if some dimensions are replaced randomly or strategically, it can be equated to apply noise to the hidden layer, and the noise is not randomly generated based on some distribution, but is extracted from the hidden layer of another real sample, which acts as a regularization and improves the robustness of the model. since swapmix can extract features of unrelated visual contextual objects from image data that has a large amount of labeled information, while text data does not have such rich labeling information, this paper only applies similar methods at the hidden representation level to select and replace dimensions of hidden representations, and explores strategies for selecting and replacing dimensions. the reason for this is that the virtual samples generated by the mixup method are linear values between two real samples in the hidden representation space, which are continuous.although msmix-base, msmix-a, and msmix-b exhibit unstable performance when used individually, using these three strategies in combination can achieve better performance than other data augmentation methods.from table2and table3, we can find that tmix (randomly selecting layers in a subset of hidden layers for mixup) will be better than mixup-transformer (fixedly selecting the last hidden layer for mixup operation).we propose a simple and effective interpolation-based data enhancement method: msmix, which performs dimensional replacement of the hidden representation of a layer after two samples are input to the deep neural network to obtain the new hidden representation passed to the subsequent layers for computation, and propose three different strategies for dimensional replacement. the experimental results on three chinese intention recognition datasets show that the proposed method can improve the robustness of the deep neural network model and help reduce the problem of overfitting in small sample scenarios, and the proposed msmix outperforms other existing data enhancement methods on three chinese datasets and achieves the optimal performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/764.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/764.txt new file mode 100644 index 0000000000000000000000000000000000000000..8dd261564dd035534f2efd3bb39492d62273b94f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/764.txt @@ -0,0 +1 @@ +in a world where the demand for intelligent vehicles is increasing rapidly , the concern for the safety of passengers and other road users should grow with it . according to the world health organization, approximately 1.3 million people die each year due to road traffic accidents, and this number is expected to increase if proper measures are not taken . therefore, it should be paramount for future autonomous vehicles to improve traffic safety.one of the most critical factors for ensuring a safe environment around intelligent vehicles is accurately predicting the future movements of surrounding traffic participants. these predictions allow for a better assessment of the environment and anticipation of potentially dangerous situations at an early stage, lowering the risk of accidents. the accurate predictions of interactive behaviours are especially important, as those comprise the most challenging situations.numerous methods have been used to tackle the human behaviour prediction problem - , with examples ranging from reasoning-based methods to data-driven techniques. over the last few years, data-driven approaches have shown great potential - , using machine learning algorithms to learn from large amounts of data to predict the trajectories of traffic participants. one of these data-driven models is trajectron++ , which stands out due to its public code availability, the general applicability and the results it achieved on multiple datasets (including nuscenes , and highd ).other methods to predict future behaviour of traffic participants are based on theories from cognitive science. instead of learning merely from data, the model is constructed to 1 department of cognitive robotics, delft university of technology, delft, the netherlands mimic human cognition. one class of such models based on the concept of evidence accumulation have proved useful specifically in predicting binary decisions in traffic interactions , . however, these models are not yet applicable to trajectory forecasting in a more general setting. another example of using insights from cognitive science for behaviour prediction in traffic is the use of a quantum-like bayesian model, a mathematical framework that combines elements of quantum theory and bayesian probability theory to describe decision-making and information processing in complex and uncertain environments . it is used in to more accurately predict human street crossing behaviour, compared to the more data-driven model social-lstm . yet another insight from cognitive science suggests that the brain has a limited capacity for shifting attention rapidly between different tasks . this is used in , where the application of the smoothing term to the attention module of a machine-learning prediction model -referred to as smooth-attention -which mimics human cognition, allows for better predictive performance.recent work demonstrated that integrating insights from cognitive science is a promising way of improving the performance of trajectory prediction models, but such cognitively inspired models need to be explored in a much more comprehensive way. specifically, cao et al. emphasize the need to combine smooth attention with more advanced interaction modelling network architectures. here we aim to address this challenge by applying smooth attention to a state-of-the-art behaviour prediction model. namely, we aim to improve upon the performance of trajectron++ (t++) by leveraging the method of smooth attention proposed in . applying a smoothness constraint on the attention module significantly reduces changes in attention, thereby ensuring that the module's output mimics the natural human cognitive processing. we name our approach of this combined model smooth-trajectron++. we test this new model on the nuscenes and highd datasets. another example of using insights from cognitive science for behaviour prediction in traffic is the use of a quantum-like bayesian model, a mathematical framework that combines elements of quantum theory and bayesian probability theory to describe decision-making and information processing in complex and uncertain environments. this is used in, where the application of the smoothing term to the attention module of a machine-learning prediction model -referred to as smooth-attention -which mimics human cognition, allows for better predictive performance.recent work demonstrated that integrating insights from cognitive science is a promising way of improving the performance of trajectory prediction models, but such cognitively inspired models need to be explored in a much more comprehensive way. applying a smoothness constraint on the attention module significantly reduces changes in attention, thereby ensuring that the module's output mimics the natural human cognitive processing. by incorporating the smoothness constraint, the smooth attention approach enhances the attention mechanism, improving the selection of important information while disregarding less relevant input variables and aligning better with the characteristics of human attention. at a high level, the original edge influence encoder is expanded by applying the attention module at each time step in a similar fashion as in the smooth attention model (the green highlighted box in figure2), where the outputs α τ i,j ab are the attention weights that are used to rank the importance the human agent i assigns to the semantic class j ab for neighbouring agents of types a and b (a and b can stand for agent types such as cars or pedestrians) at the time τ . in both scenarios, we trained and assessed both the original trajectron++ model (which is a special case of smooth-trajectron++ for β = 0) and the expanded model, with multiple versions of the latter, differentiated by five β-values ranging from 0.for this dataset, we evaluated the models according to three metrics: final displacement error (fde), average displacement error (ade) and kernel density estimation of negative log likelihood (kde-nll). contrary to the pedestrianonly predictions in tableiii, smooth-trajectron++ on the vehicle-only forecasts has better kde-nll numbers than the reproduced model, which indicates that the model is better able to match the original distribution of predicted trajectories with the inclusion of the smooth-attention term in the loss function. in case of highd dataset, we investigated the predictions of gap acceptance in lane-change decisions using a restricted version of highd (seefor the frameworkallowed us to use two methods of splitting the highd data into training and testing sets: the random split and the critical split.in addition to the metrics used for the nuscenes dataset, the gap acceptance benchmark includes an additional metric, the area under the receiver-operator curve (auc), used to evaluate the performance of binary classification models (here between accepted and rejected gaps).overall, in highd lane-change prediction experiments, there are instances of both better and worse performance of smooth-trajectron++ compared to t++, indicating no consistent benefits of adding smooth attention to t++. the application of the smoothing term β in the smooth-trajectron++ model relies on the attention module that compares different semantic classes of traffic participants.this paper proposed smooth-trajectron++, a trajectory prediction model based on an existing state-of-the-art model trajectron++in which we incorporated a cognitivelyinspired smooth attention module. hence, the concept of smooth attention might be better applied to models where the attention module is implemented over individual agents and not semantic classes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/765.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/765.txt new file mode 100644 index 0000000000000000000000000000000000000000..549e90603a0da8b2ddeb40e41f272632baf4f590 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/765.txt @@ -0,0 +1 @@ +neural seq2seq (sutskever et al., 2014) is a powerful generic framework for the task of transforming an input sequence of arbitrary length into an output sequence of arbitrary length. although seq2seq models can perform impressively in a great variety of tasks (raffel et al., 2020;lewis et al., 2020), they can still struggle in outof-distribution generalization (e.g., systematic generalization or length generalization), and sometimes, even in simple algorithmic tasks (kim et al., 2022;dubois et al., 2020;dehghani et al., 2019;lake & baroni, 2018;liska et al., 2018). even after extensive pre-training, neural models can show mixed results in such forms of generalization 1 computer science, university of illinois chicago. correspondence to: jishnu ray chowdhury , cornelia caragea . learning, honolulu, hawaii, usa. pmlr 202, 2023. copyright 2023 by the author(s)., 2021), we first consider the task of simply copying source texts in both forward and backward (reverse) directions. followingdubois et al., 2018;dai et al. note that if we made re-copy a bit simpler by requiring each number to be copied and repeated for a uniform frequency, then the determination of the ideal position for attention will again become trivially possible just from a decoding timestep; thus re-copy requires repeating with varying frequency depending on which number is being copied.reverse recopy: the reverse recopy task is similar to the recopy task in all aspects other than the fact that the copying takes place in the reversed direction (see example in table1).inv recopy: the inv recopy task (inverse recopy) is similar to the recopy task in all aspects other than the fact that the inputs and outputs are inverted (see example in table1).inv reverse recopy: the inv reverse recopy task (inverse reverse recopy) is similar to the reverse recopy task in all aspects other than the fact that the inputs and out-puts are inverted (see example in table1).limitation 1 (handling reverse tasks): as noted earlier (see recopy task description in §2), in some tasks like copy or lookup, the target cross-attention position is always at the same constant relative distance from the timestep.limitation 2 (handling recopy and beyond): as discussed in §2 (see recopy description), tasks like recopy, reverse recopy, or their inverted variants are specifically designed to serve as settings in which the ideal attention position can vary from timestep to timestep (no matter if the encoding positions are reversed or not). 15 is kept as described). relative attention (relative) does well in the forward copy and lookup tasks but it fails in the reversed tasks for the reasons discussed in §5. the copy tasks and lookup tasks are easy to learn for onestep attention because in either tasks it has to simply learn to take one step forward relative to the past attended position in every timestep. however, we also discover a trade-off -the restricted steps of onestep attention preclude it from solving the inverted versions of recopy tasks whereas the more unconstrained steps of monotonic attention manages the inverted recopy tasks but at the cost of the lookup tasks. we find location attention and our extensions of it (onestep attention or monotonic attention) to generally also perform better on the task of translating simple commands into sequences of actions than other forms of interlayer attention even though they are not designed explicitly keeping the struc-ture of scan task in mind.locattn s: this is a simplified (s) version of location attention (without content attention mixing) where we set ref t = b t for the first timestep to initialize the reference position using b t and then use ref t = pa t-1 like onestep attention/monotonic attention. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/766.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/766.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e89af86bbc07d8932b3aae3f191199cb434b18c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/766.txt @@ -0,0 +1 @@ +reinforcement learning (rl) is a powerful subset of machine learning that enables agents to learn through trial-and-error interactions with their environment. rl has succeeded in various applications such as game playing, robotics, and finance . however, one of the main challenges of rl is its susceptibility to environmental changes. when a trained policy operates in different environmental dynamics than it was trained under, it often underperforms and achieves suboptimal rewards .to mitigate this problem, robust mdps have emerged as a promising solution to address the issue of the sensitivity of reinforcement learning to changing environments. by optimizing policies to have large value functions even in the worst-case environmental dynamics, robust mdps provide a more stable approach to training agents. there are different variants of robust mdps, including distributionally robust mdps , softrobust mdps , and adversarial robustness . overall, robust mdps have the potential to enhance the reliability of reinforcement learning in practical applications greatly. improving rl's robustness is crucial in ensuring ai safety. mitigating disastrous failures of rl-trained agents in practice can prevent many undesirable outcomes . in several high-stakes environments, provable guarantees of robustness are paramount to preventing such undesirable outcomes. while the need for robust policies in rl is clear, finding them is challenging. the value function is highly nonconvex and is difficult to optimize with respect to policy and transition dynamics. furthermore, the task is even more daunting when dealing with large state and action spaces, as finding algorithms that scale becomes an enormous challenge.many existing solutions have made significant progress in finding efficient and accurate methodologies for solving robust mdps. some of the first algorithms to solve robust mdps with guarantees and empirical performance are via transition dynamics set assumptions , robust policy iteration or robust q-learning ; however, relatively few algorithms exist to solve this problem via policy gradient methods. traditionally, policy gradient methods are popular due to their simplicity, stability, and scalability to large action and state spaces . furthermore, some policy gradient methods have achieved promising convergence results for solving robust mdps. however, due to the difficulty of nonconvex optimization, these frameworks still have suboptimal convergence and robustness guarantees.however, a common framework for solving similar optimization problems with strong convergence guarantees is that of the repeated game structure or no-regret dynamics. this models an optimization problem as a repeated game where one player tries to improve some objective function, and one tries to reduce the objective function . the advantage of such a framework is that its convergence is remarkably simple; the two players converge to a correlated equilibrium in time proportional to the sum of their regrets . in fact, wang et al. showed that the margin or robustness of a linear classifier could be sufficiently improved over time using similar no-regret dynamics. we want to use a similar no-reget dynamics framework to generate robust mdp algorithms.using a repeated game framework for the current problem faces the challenge of dealing with nonconvex value functions. recently, suggala and netrapalli showed that follow the perturbed leader can achieve relatively strong regret even in the nonconvex case, provided that some minimization oracle is available. incorporating this approach into a no-regret online framework, such as the one from no-regret dynamics, could lead to strong robustness. however, the minimization oracle is challenging to parameterize in general. under gradient dominance assumptions of the value function, it is still possible to construct a minimization oracle, which allows for provable guarantees that the projected gradient ascent will converge to an optimal solution. therefore, we model the optimization process for robust mdps as a game between policy and environmental dynamics players. both utilize variants of follow the perturbed leader and projected gradient ascent.contributions combining these, we have derived an efficient algorithm for solving robust mdps with provable guarantees of convergence that improve over existing algorithms.1. we show we can model the solution process for a robust mdp as a two-player online framework between a policy player and an environmental dynamics player, where the two players' regrets bound the robustness of the policies. when the policy player utilizes follow the perturbed leader and the environmental dynamics player follows best response alongside an optimization oracle, we show the robustness is maximized at a rate of o 1 t 1 2 rate. in the direct parameterization setting, we show that the loss functions seen by both players are gradient dominated, meaning we can use projected gradient ascent as the optimization oracle.we can state that our mdp is a tuple of (s, a, p w , r) where s is a set of states, a is a set of actions, p w is a transition dynamics function parameterized by w that maps a s, a, s ′ ∈ s × a × s to a probability between 0 and 1, and r is a reward function that maps a state s to a reward r. moreover, we will slightly abuse notation call v π w (µ) = µ ⊤ v π w where v π w is the vector of value functions for all states. one common setup of robust mdp's is where g(π, w ) is simply the value function given π and w , specifically g(π, w ) = v π w (µ). we solve the optimization problem by iteratively choosing a policy and transition dynamics until convergence. in this setting, the tth loss function for the w -player is l t (w t ) = v πt wt (µ) and for the π-player is h t (π t ) = -v πt wt (µ).we now begin with the direct parameterization case with standard robust mdps, where p w (s ′ , a, s) = w s ′ ,a,s directly parameterizes the transition dynamics, π(s|a) = θ s,a , and g(w, π) = v π w (µ).as in for an arbitrary w ∈ w and the optimal parameter w * ∈ w, we have. here, we define the w -advantage function as a w (s ′ , a, s) = γv w (s ′ ) + r(s, a) -v w (s).as in for an arbitrary w ∈ w and the optimal parameter w * ∈ w, we have.here, the first inequality comes from seeing that the value p w * (s ′ |a, s)a w (s ′ , a, s) is minimized when p w * puts the most weight on the state minimizing the advantage function.here, the first equality comes from seeing that the value p w * (s ′ |a, s)a w (s ′ , a, s) is minimized when p w * puts the most weight on the state minimizing the advantage function.here, the first equality comes from the performance difference lemma, the second equality comes from the fact that a π(a, s)a π (s, a) = 0, the third equality comes from the definition of the advantage function for the π-player, and the final inequality comes from the both the policy gradient theorem and the fact that d π µ (s) ≥ (1-γ)µ(s).here, the first equality comes from the performance difference lemma, the second equality comes from the fact that a π(a, s)a π (s, a) = 0, the third equality comes from the definition of the advantage function for the π-player, and the final inequality comes from the both the policy gradient theorem and the fact that.here, the first equality comes from the performance difference lemma, the second equality comes from the fact that a π(a, s)a π (s, a) = 0, the third equality comes from the definition of the advantage function for the π-player, and the final inequality comes from the both the policy gradient theorem and the fact that d π µ (s) ≥ (1γ)µ(s).in this setting, the w -player still enjoys gradient dominance properties, so using projected gradient descent has α ≤ c w (t o , k w ) . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/767.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/767.txt new file mode 100644 index 0000000000000000000000000000000000000000..576463b44e1c0b781ea7e9a82ea5ab3dba897e6f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/767.txt @@ -0,0 +1 @@ +artificial intelligence (ai), and in particular machine learning (ml) techniques, have been used in decisionmaking systems which typically rely on historical data for training. however, the data may contain biases against groups or individuals with certain characteristics, which can lead to discriminatory or unfair decisions. such decision-making systems are used in both private and public sectors. organisations are increasingly relying on these systems to reduce workload and free up resources (e.g. deloitte, 2021;engin and treleaven, 2019), and local governments, in particular social services,1 have deployed systems to predict a score of how at risk a child is of neglect or abuse (church and fairchild, 2017). the potential harmful impact of these systems is therefore immense.there have been several recent examples of unfair decisions made by machine learning models in different domains, such as criminal justice (partnership on ai, 2019), recruitment,2 and social services (gillingham, 2019). algorithms used to predict if criminals will re-offend are used across many us states and yet an analysis of a popular tool, compas (northpointe inc., 2019), showed that black defendants were identified incorrectly as re-offending at a higher rate than white defendants (larson et al., 2016). a recruitment tool used by amazon was shown to be biased against women (jeffrey dastin, 2018), reinforcing historical biases due to the small sample size of women who had previously been hired.ensuring ai systems are fair to individuals and communities is an important cross-disciplinary issue which must consider the context and application of the systems deployed (waller and waller, 2020). a system is considered to be fair if it does not discriminate based on protected personal characteristics such as race, sex, religion, etc. discrimination may arise from the dataset used to train decision-making systems; specifically, unwanted bias occurs when the system favours or gives advantage to historically favoured groups or its outputs directly correlate with protected personal characteristics. mehrabi et al. (2021) identify several causes of unwanted bias including: i) historical bias where the training data includes embedded historical social biases, ii) representation bias where some groups with certain characteristics appear more frequently in the dataset, and iii) measurement bias which arises from how certain features are measured or used.mehrabi et al.in this section, we present some key concepts, the different types of bias mitigation methods, as well as the main datasets, fairness and performance metrics used to evaluate the methods, and report on related work. in section 3, we will present the approaches for the three types of bias mitigation methods pre-processing methods are used to mitigate bias in the training data, and are useful when we have access to the training data, but not necessarily to the machine learning model trained on the data(bellamy et al.bias mitigation methods are evaluated by calculating the amount of unwanted bias before and after the methods are applied; this can be achieved using fairness metrics. the filtering process ensured that only papers of high quality and relevance were included in the survey, providing a reliable representation of the state-of-the-art in the field.a limitation of the bias mitigation methods collated is that they cannot be directly compared due to being evaluated using different fairness metrics and different models trained on datasets of different sizes and distributions. for example, all individuals with a positive classification in the unprivileged group will be assigned the same weight which will be greater than the weight assigned to the individuals with a negative classification in the unprivileged group. the definitions of existing group fairness metrics (found in table2) only account for a single binary sensitive attribute meaning methods which use these metrics only allow the removal of unwanted bias with respect to that one attribute.illegal discrimination is measured as the difference between the level of unwanted bias according to an existing group fairness metric and the explained bias as shown below, where k is the number of explanatory attributes, e i is a value of the explanatory attribute values, and a and b are the values of the sensitive attribute (e.) is the probability that an individual has the explanatory attribute value e i given they are in the unprivileged group (in the privileged group), respectively; and p * (+|e i ) represents the probability of an individual having a positive classification given it has the explanatory attribute value e i . the discrimination score is defined in equation1, where p (d|xy) is the probability of a positive classification given all the attributes, calculated as the proportion of positive classifications when the classifier is trained with all attributes. the two naive bayes method learns two different models for the data, split by the values of the identified sensitive attribute, and uses these to classify individuals, using the model corresponding to their value of the sensitive attribute.a limitation of post-processing methods is that they can be easily manipulated to ensure some existing fairness metric is satisfied, for example by swapping the classifications of random individuals to ensure an equal number of positive and negative classifications across sensitive groups.instead of simply using existing metrics to detect unwanted bias or evaluate the impact of bias mitigation methods, we recommend developing methods that focus on transparency and explainability, by explaining the unwanted bias detected in decision-making systems. our survey differs from existing work by critically analysing existing bias mitigation methods and providing recommendations for the development of future bias mitigation methods for binary classification decision-making systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/768.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/768.txt new file mode 100644 index 0000000000000000000000000000000000000000..52d3710cd47736bc883bdeda2267cf027e245388 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/768.txt @@ -0,0 +1 @@ +heart disease (hd) is a major public health concern . according to the world health organization (who), onethird of global deaths are from heart and cardiac disease . high cholesterol, obesity, high triglycerides, hypertension, and other unhealthy behaviors increase heart disease risk. the presence of heart disease can reduce blood flow and cause heart failure . the high death rate is due to the difficulty of diagnosing heart disease, especially without advanced technology and medical specialists . heart disease requires blood pressure, ecg, auscultation, cholesterol, and blood sugar tests. these tests take a long time, delaying the patient's medication and harming them. researchers used data mining and machine learning (ml) algorithms to develop heart disease prediction systems. current methods for detecting heart disease are ineffective due to inaccuracy and execution time .machine learning (ml) prediction systems start with a dataset. medical facilities and organizations have begun collecting patient data to improve service quality, but the data is insufficient and noisy. data collection, especially survey data, is time-consuming. researchers used medical data to predict diseases. others built prediction models from survey data. others built prediction models from survey data . the behavioral risk factor surveillance system (brfss) surveys over 400,000 participants annually . this study aims to shorten surveys by selecting the most important questions while maintaining performance. we examine the survey's ability to predict heart disease. we build several prediction models and evaluate their performance before and after finding the most relevant set of features. researchers used data mining and machine learning (ml) algorithms to develop heart disease prediction systems.machine learning algorithms have been proposed in several research papers by researchers to predict the heart diseases in early stages, using different machine learning techniques. li et al. 2020 proposed a system that uses machine learning techniques to diagnose heart disease. the feature selection algorithms that have been applied the relief, minimal redundancy maximal relevance, least absolute shrinkage selection operator and local learning. 2020proposed a new feature selection algorithm called fast conditional mutual information. the proposed method included various machine learning techniques such as knn, svm, nb, rf and a multilayer perception neural network (mlnn) optimized by particle swarm optimization (pso) combined with ant colony optimization (aco) approaches.this section introduces the methodologies for building our prediction models using several machine learning algorithms, selecting the most stable list of questions through feature selection methods, and reducing survey time. figure1shows the methodology steps to determine how much we can rely on the initial survey to diagnose heart disease before proceeding for further heart disease tests.after the data preparation step, machine learning algorithms can be utilized to perform the predication of the heart disease or attack. consequently, we used the following machine learning algorithms: k-nearest neighbor (knn), support vector machine (svm), logistic regression (lr), random forest (rf), stochastic gradient descent (sgd), and gradient boosting (gb). there is a binary classification for heart disease in the data set, with 229,787 replies indicating that they do not have heart disease and 23,893 indicating that they do have heart disease. the experimental results for the training dataset show that the rf prediction model has 100% accuracy among all models, and for the testing dataset, the svm has 77% accuracy in identifying heart disease using half of the original features. using machine learning techniques as a tool for predicting heart disease is considered one of the efficient ways of diagnosis. our aim was to test the reliability of the data in predicting heart disease and to search for the most stable list of questions that could be used to reduce the survey time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/769.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/769.txt new file mode 100644 index 0000000000000000000000000000000000000000..2d87272f8a6081c25d956b6822a4fbf09460eac4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/769.txt @@ -0,0 +1 @@ +the study of deep learning models has increased along with their widespread adoption in applications processing large amounts of data . however, unexpected model behavior or system outcomes can be incurred when simply enlarging the scale of datasets and achieving high numerical accuracy without thorough examinations of the data and models. accordingly, warnings have been raised about the potential risks associated with biased datasets and models . this kind of biased model behavior based on spurious correlations is referred to as clever hans effect or shortcut learning .several studies have been conducted to understand how models work and uncovered potential biases in the data . for example, the models in the image domain are prone to focus on the background, rather than the object as a shortcut to predict class labels . the situation becomes more complex when using a deep-learning black-box model, which is difficult to interpret. explainable ai (xai) is one of the ways to interpret model behavior and build human-understandable models .in this study, we propose a novel framework to discover shortcut learning in binary classifiers, treated as black-boxes. the proposed framework, detailed in section 2, formulates a generic approach for introducing systematic biases into an existing database. purposefully introduced interventions in asymmetric ways lead the model to respond to the provided shortcuts. those interventions are applied both in the training and test sides, including the boundary cases of 'near-perfect' and 'worse than coin flip' cases (label flip). since the parameters of the intervention process are known, another key element of the proposed approach is to use these parameters as inputs in a regression model (linear mixed effects modeling or lme) in the classifier score domain. importantly, the inclusion of lme allows us to 'go beyond the eer' -to learn how the biases impact the class-conditional score statistics.the focus of our case study is on audio anti-spoofing, which determines whether the utterance is from a real human (bona fide) or spoofing attacks (e.g. voice conversion, text-to-speech). several studies have addressed data bias in audio anti-spoofing . the early studies in have investigated the distribution of waveform samples as a shortcut in spoofing countermeasures (cm). more recently, the validity of the asvspoof 2019 and asvspoof 2021 datasets has also raised concern about the proportion of silence in . while silence has only been examined in spoofing detection yet, our research explores several suspected candidates for leading bias.our proposed shortcut learning analysis framework is applicable to arbitrary types of data interventions and black-box classifiers. as a proof of concept, therefore, our experimental part includes five different types of interventions and three different spoofing countermeasures (cms) of varied complexity. in particular, experiments are conducted with two conventional methods, namely, gaussian mixture model (gmm) and light convolutional network (lcnn) as well as state-of-the-art audio anti-spoofing using integrated spectro-temporal graph attention network (aasist). by using a selected class of predictive models g : x → y and training loss l : x × y → r+, a system developer trains a model by minimizing l on the training data.to sum up, the intervention process is specified by two sets of control parameters: (1) the probability of applying intervention, p (fj ); and (2) the distribution of the intervention control variable, p (z; θj). for the randomly selected files that undergo intervention, we sample another variable zij ∼ p (z; θj) independent of xi which controls the intervention applied to xi. first, while the selected types of interventions are typical in the audio domain, we focus on each type at a time; we neither mix different interventions into an experiment nor consider combinations of different interventions applied sequentially. hence, direct regression modeling of the cm detection scores as a function of intervention parameters helps to 'go beyond the eer' and obtain explanations for the impact of interventions.here, µ is the global mean of the scores, d is a class discrimination parameter, y cls i the class label, (β bona j , β spf j ) two regression coefficients, and εi ∼ n (0, σ 2 ε ) is a random effect that models variation across the trials. the model parameters, obtained by fitting(2)to cm score data, are (µ, d, β bona , β spf , σ 2 ε ). the first one, ∆ bona i , is the absolute difference between the intervention probability of the test trial and the intervention probability of the bona fide training set. likewise, ∆ spf i is the absolute difference between the intervention probability of the test trial and the intervention probability of the spoof training set. for configuration a in table1, ∆ bona i = 0 and ∆ spf i = 1 for all bona fide trials; and ∆ bona i = 1 and ∆ spf i = 0 for all spoof trials. the difference between the bona fide and spoof class means (which relates to discrimination) is d + (β spfβ bona ). considering the difference of the class-conditional means, the only difference between the two sets of biased models is in the sign of (β spfβ bona ) or (β bonaβ spf ). an important observation is that when intervention is added to spoof utterances in training, it has a greater impact compared to adding intervention to bona fide utterances in training on neural classifiers, shown in the gap between c and d.to analyze the effect of each configuration on the detection score, we use the full model parameters for each intervention to define models for each configuration which were presented in table2. by fitting a mixed-effects model on countermeasure scores from diverse cm models, we demonstrate the effect of data bias on scores due to interventions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/77.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/77.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b14fcf5b8c855c0f9740855513a98a40c42d172 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/77.txt @@ -0,0 +1 @@ +consensus clustering reconciles clustering information from different sources (or different executions) to be better fit than the existing clustering results. the consensus clustering results, however can be biased due to different types of features, different executions of algorithms, different definitions of distance metrics and even different parameter settings, as is sharply observed by existing studies (jain, murty, and flynn 1999). all these factors may lead to disparity in clustering results. the major consensus clustering algorithms include hypergraph partitioning (strehl and ghosh 2003), (fern and brodley 2004), voting approach (dudoit and fridlyand 2003), (roth et al. 2003), mutual information (wang, she, and cao 2013) (topchy, jain, and punch 2004), co-association approach (xu and wunsch 2005), mixture model (topchy, jain, and punch 2004) (azimi and fern 2009), correlation consensus (wang et al. 2014), ensemble clustering (zhou et al. 2015), (gao 2016), (liu, latecki, and yan 2010), (huang, lai, and wang 2016), etc.one key observation is that in consensus clustering (strehl and ghosh 2003), if there exist noise and outliers in one source of features in any execution of algorithm, the clustering result might be significantly affected due to the least square loss function used in most of the clustering methods (such as k-means, gaussian mixture model), because the errors are squared. even worse, most of the time, the users have little prior knowledge of noise, which makes the clustering result more unstable and much harder to interpret with different initializations and parameter settings. for example, if one information source that we used for consensus clustering is not accurate, when we align the consensus clustering results against this "inaccurate" source, we will suffer from these inaccurate annotations. the inaccurate common characteristics extracted from the samples due to the biased clustering results, are in fact, however, less generalizable to those unseen ones.to address these issues, this paper proposes a robust consensus clustering schema that is minimally affected by the outliers/noise. in particular, we combine multiple experts' opinions on data clustering using the robust 1 -loss function that aims to find the maximum consistency on the experts' opinions with minimum conflict. our work can be viewed as an effective method of data clustering from heterogeneous information sources. the proposed method is practically feasible because it is independent of parameter settings of each clustering algorithm before aggregating the experts' opinions. driven by advertising applications, we apply the concensus clustering algorithm to cluster advertiser profiles (mahdian and wang 2009) into different clusters so as to accurately perform performance (e.g.. click, conversions) forecasting.the main contribution of this paper is summarized as follows.• to address the issue of consensus clustering performance degradation in existence of noise and outliers, we rigorously formulate the problem of robust consensus clustering as an optimization problem using the robust loss function.• to find the best solution for robust consensus clustering, we develop an effective algorithm upon admm to derive the optimal solution, whose convergence can be rigorously proved.• we experimentally evaluate our proposed method on both benchmarks and real-world advertiser segmentation and forecasting tasks, and show that our method is effective and efficient in producing the better clustering results. as an application, the proposed algorithm is applied for advertising forecasting tasks with more stable and trustful solutions.consensus clustering reconciles clustering information from different sources (or different executions) to be better fit than the existing clustering results. the consensus clustering results, however can be biased due to different types of features, different executions of algorithms, different definitions of distance metrics and even different parameter settings, as is sharply observed by existing studies(jain, murty, and flynn 1999).one key observation is that in consensus clustering(strehl and ghosh 2003), if there exist noise and outliers in one source of features in any execution of algorithm, the clustering result might be significantly affected due to the least square loss function used in most of the clustering methods (such as k-means, gaussian mixture model), because the errors are squared. for example, if one information source that we used for consensus clustering is not accurate, when we align the consensus clustering results against this "inaccurate" source, we will suffer from these inaccurate annotations.• to address the issue of consensus clustering performance degradation in existence of noise and outliers, we rigorously formulate the problem of robust consensus clustering as an optimization problem using the robust loss function.• to find the best solution for robust consensus clustering, we develop an effective algorithm upon admm to derive the optimal solution, whose convergence can be rigorously proved.otherwise co-association consensus clustering standard consensus clustering looks for a consensus clustering (based on majority agreement) g * ∈ {0, 1} n×k , such that g * is closest to the all the given partitions, i.analysis given little (or no) prior information of clustering result from each view, one natural way to compute the associate between data points i and j is to get the expected value of average association m = , i.• k-means on original feature space • kc: k-means on the consensus similarity matrix; • nmfcc(li, ding, andevaluation metrics as the other clustering tasks, we use the clustering accuracy as the metric to evaluate the performance of our algorithms. we also compare several multiview clustering methods: spectral clustering using simple multi-view feature concatenation (denoted as fc), multi- view k-means (denoted as mvkmean)(cai, nie, and huang 2013), affinity propagation using multi-view features ( denoted as ap)(dueck and frey 2007), low-rank consensus clustering (denoted as lcc)(tao et al.connection to the proposed robust clustering method our method consists of two key steps: (i) generating clustering results using each view information and obtain the co-association matrix of m of eq.similarity computation using ks statistics given the pairwise similarity in each view, a natural way is to segment different them into different disjoint k subsets where each subset denotes a clustering using graph cut type algorithm (e.clustering performance given the fact that we do not have ground-truth for advertiser clustering results, we cannot directly evaluate the performance of our method. instead, we view the robust consensus clustering result as the advertiser segmentation result, and use it to re-generate the ecpm cost and win-rate c.therefore, we re-generate the ecpm and win-rate distributions using the following several consensus clustering results: (i) only ads textual; (ii) only win-rate; (iii) only ecpm cost; (iv) proposed robust consensus clustering; (v) consensus clustering using 2 distance; and compare their performances. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/770.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/770.txt new file mode 100644 index 0000000000000000000000000000000000000000..aebfab9101ab52274f24812ef29558805f94dab9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/770.txt @@ -0,0 +1 @@ +shortly after the introduction of the (stochastic) multi-armed bandits (mab) framework , practitioners and researchers quickly raised the issue of nonstationarity, thus questioning the restrictive assumption of the original model that the environment (meaning the payoff distributions of the actions) remains intact. this opened the way to the development of important and well-studied extensions of the model, including adversarial bandits , reinforcement learning , and more . in many situations, a potential shift in the environment is not solely the result of external factors, rather than a natural consequence of its interaction with previously made decisions (see for examples). in an attempt to address this issue of action-induced nonstationarity various models have been proposed with restless and rested bandits being the most prominent. in these settings, every arm is associated with a state-machine and its mean payoff depends on the current state. the state of each arm can change (potentially stochastically) at every round (in the restless case) or only after the arm is pulled (in rested case). even ignoring the learning aspect and assuming complete knowledge of the underlying arm-state distributions, any attempt to compute a (near-)optimal planning policy -usually via solving bellman's equations ) -requires an exponentially large space in the number of actions and hits the wall of strong inapproximability results .more recently, researchers have shifted their attention to special cases of restless bandits, which are simple enough to accept efficient (near-)optimal planning algorithms, yet expressive enough to capture fundamental action-induced nonstationary phenomena . immorlica and kleinberg first attempt to model the effect of deprivation in online decision-making by introducing the model of recharging (a.k.a., delay-dependent) bandits. here, the (mean) payoff of each action depends -in an increasing fashion -on the time elapsed since the action was played for the last time (often called "delay"). soon afterwards, a number of works focused on generalizations , variations , and special cases of the model. due to the computational hardness of the underlying planning problem, these works have a dual purpose: to construct an efficient near-optimal planning algorithm and, subsequently, to adapt it into an online learning policy for the case where the payoff distributions are unknown.last switch dependent bandits. in an attempt to capture a richer class of action-induced nonstationary phenomena, laforgue, clerici, cesa-bianchi, and gilad-bachrach recently introduced the model of last switch dependent (lsd) bandits. in their setting, the notion of "delay" -of central role in recharging bandits -is replaced by that of a "switch": a change in the course of action from the part of the decision-maker (see below). this shift of perspective not only strictly subsumes the recharging bandits model, but also captures additional natural behaviors, including that of satiation: the (gradual) degradation in performance due to the repeated use of resources.in this work, we study a variation of the lsd model, which we (informally) describe below:problem (last switch dependent bandits with monotone payoffs (k-mlsd)). we consider a setting where the payoff of each arm is a function of its state at any given round. the state of an arm can be any (positive or negative) integer and changes, at the end of each round, according to the following rules: when an arm is at a positive state τ > 0 and is not played at the current round, its state "increases" to τ + 1, while if it is played, its state transitions to -1. dually, if an arm is at a negative state τ < 0, and is played at the current round, its state "decreases" to τ -1, while if it is not played it transitions to +1. the payoff of each arm is a monotone non-decreasing function over the space of integer states. at each round, the decision-maker selects at most k of the available arms and collects the sum of the associated payoffs (evaluated at the corresponding states). the objective is to maximize the total collected payoff within a (potentially unknown) time horizon.the fundamental difference between the above model compared to its original formulation is the monotonicity. specifically, we assume that the payoff function of each arm is monotone non-decreasing over the whole set of integer states, while in this assumption is only made for its negative part. although the assumption excludes from the model any possible seasonal behaviors, our setting still widens the class of action-induced phenomena that can be captured (e.g., satiation), and still generalizes many existing works -either strictly or conceptually . in addition, monotonicity is a plausible assumption in many situations where the positive effect of an action after a period of deprivation is higher than while in satiation (an everyday example is food consumption). finally, we believe that monotonicity changes dramatically the approximability status of the problem; in fact, we conjecture that without this assumption the problem does not accept any polynomial-time constant approximations under standard complexity assumptions (yet proving it falls beyond the scope of this work). the validity of such a statement would justify the fact the algorithm developed in is not efficient and the provided guarantees involve additive losses. for any l, u ∈ z such that u ≥ l we denote by s u l = s ∩ the subset of states ranging from l to u, while we define s u l = ∅ in the case where l > u. for any arm and given states u ∈ z + and l ∈ z -, a recurrent interval, denoted by i(u, l), is a sequence of distinct states (and associated actions) during which the arm starts from state +1 and moves back to the same state after a number of rounds.the characteristic trajectory of a recurrent interval i = i(u, l) satisfies the following property: if one starts an arm from any state in the interval, namely τ ∈ s ∩, then by repeatedly playing the arm if and only if β i (τ ) = •, the trajectory of its states will follow the periodic pattern indicated by i.the following lemma evaluates the loss from restricting the set of recurrent intervals of each arm to only i(u, l) with l ≥ τ l for some τ l ≤ -1: lemma 3. for any instance of k-mlsd and any τ l ≤ -1, there exists a (deterministic) near-optimal solution, where the sequence of plays and non-plays of every arm consists of a concatenation of recurrent intervals of the form i(u, l) with l ≥ τ l , potentially followed by a sequence of non-plays until the end of the time horizon.1) originate from the fact that, when at most k arms can be played at each round, the (total) fraction of time any arm is pulled during any recurrent interval cannot be more than k (recall, an arm is played exactly -l times during i(u, l)). for any instance of k-mlsd and any τ l ≤ -1, there exists a (deterministic) near-optimal solution, where the sequence of plays and non-plays of every arm consists of a concatenation of recurrent intervals of the form i(u, l) with l ≥ τ l , potentially followed by a sequence of non-plays until the end of the time horizon.by construction of π(i), for every arm i the sequence πi (i) consists of a concatenation of recurrent intervals i(u, l) such that l ≥ τ l , potentially followed by a sequence of non-plays. consider an instance i of k-mlsd and let π(i) be some deterministic solution such that, for every arm i, the schedule πi (i) consists of a concatenation of only recurrent intervals i(u, l) with l ≥ τ l , potentially followed by a sequence of non-plays. hence, for every j ∈ such that v j i,u,l = 0 and conditioned on w = v j , the triple (i, i(u, l), ν) does not belong to s and, thus, p (i, i(u, l), ν) ∈ s) | w = v j = 0. for the third equality, note that conditioned on w = v j , the only way that (i, i(u, l), ν) can belong to s is if the triple t i,u,l sampled for the coordinate i, u, l of w is (i, i(u, l), ν). assuming that algorithm 1 has access to η-estimates (p i (τ )) i of the payoff function of the arms, the algorithm computes an optimal solution x * to (lp) with expected aggregated payoffs qi (u, l) = pi (u) + -1 τ =l+1 pi (τ ). note that for every i ∈ a, u ∈ s τ max 1 and l ∈ s -1 τ l , it holds that qi (u, l) ≥ q i (u, l) + lη. the penultimate inequality holds because qi (u, l) ≥ q i (u, l) + lη for all i ∈ a, u ∈ s τ max 1 and l ∈ s -1 τ l and the last using lemma 3. , 2k} and so on. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/771.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/771.txt new file mode 100644 index 0000000000000000000000000000000000000000..e84d5660b8969a409c1f195f7961e4f04dcfedb9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/771.txt @@ -0,0 +1 @@ +automated music generation has a long history (briot, hadjeres, and pachet 2017). in recent years, large-scale neural network models for music generation have arisen, trained on massive datasets and requiring significant computation (civit et al. 2022). while these approaches have proven successful at replicating genres of music like those in their training sets, due to the nature of large-scale neural network models we expect this may not prove true for dissimilar genres. specifically, we hypothesize that these large scale models will perform poorly for out-of-distribution (ood) genres of music, those representing underrepresented or less globally popular types of music. we therefore conducted a study on one such large-scale neural network model to understand (1) how it performed on ood music genres, and (2) how we might best adapt the model to an ood music genre.for this paper, we focus on google magenta's music-vae model (roberts et al. 2018). we lack the space for a full discussion of the model, but direct interested readers to the original paper. this hierarchical variational autoencoder is trained on an enormous dataset of roughly 1.5 million unique midi files collected from the web. while its exact dataset was not made public, online repositories of midi files are typically made up of fan-made annotations of popular songs. thus, automatic and indiscriminate data collection would result in an unbalanced dataset in terms of genre diversity. this is due to the fact that popular charttopping songs are much more likely to be annotated in the midi format. the training requirements for musicvae are a problem when it comes to generating underrepresented music, like experimental music or music from particular cultures with distinct musical traditions. these genres of mu-sic are unlikely to have the massive datasets needed to train models like musicvae. even if such datasets existed, new musical genres are constantly being invented, meaning we could never use this approach to generate all underrepresented genres of music.if we want to be able to generate underrepresented music, one option outside of training musicvae from scratch is transfer learning (tan et al. 2018). transfer learning refers to the collection of approaches that can adapt knowledge from a model pre-trained on some source dataset (i.e., popular midi files) to a target domain with limited data (i.e., underrepresented music midi files). however, these approaches tend to require significant similarity between the source and target domains, which may not hold true for popular and underrepresented music genres (marchetti et al. 2021). combinational creativity, also sometimes combinatorial creativity, is a type of creative problem solving in which two conceptual spaces are combined to represent a third or new conceptual space (boden 2009). while different musical genres may vary in terms of their local features (e.g., melodies), they are all still music. as such, we hypothesized that a combinational creativity-inspired transfer learning approach may be able to outperform traditional transfer learning approaches at the task of adapting music-vae to an underrepresented genre of music (mahajan and guzdial 2023).in this paper, we explore the application of ce-mcts (mahajan and guzdial 2023), a combinational creativitybased transfer learning approach to adapt musicvae to an underrepresented music genre. while there are many deep neural network (dnn) models like musicvae for music generation, applying transfer learning to a dnn model for music generation remains under-explored (svegliato and witty 2016;marchetti et al. 2021). in addition, while combinational-creativity-based transfer learning approaches have been applied to many domains including image classification (banerjee 2021) and financial health prediction (mahajan and guzdial 2023), they have never been applied to the music generation domain. in the remainder of this paper, we first demonstrate an experiment to identify an outof-distribution (ood) music genre for musicvae. we then introduce ce-mcts and a number of more standard transfer learning baselines. finally, we demonstrate their performance in terms of reconstruction accuracy for an ood arxiv:2306.00281v1 1 jun 2023 music dataset and present a short discussion on their music generation performance. 2022). while these approaches have proven successful at replicating genres of music like those in their training sets, due to the nature of large-scale neural network models we expect this may not prove true for dissimilar genres. specifically, we hypothesize that these large scale models will perform poorly for out-of-distribution (ood) genres of music, those representing underrepresented or less globally popular types of music. this is due to the fact that popular charttopping songs are much more likely to be annotated in the midi format. while different musical genres may vary in terms of their local features (e.in this paper, we explore the application of ce-mcts (mahajan and guzdial 2023), a combinational creativitybased transfer learning approach to adapt musicvae to an underrepresented music genre. while there are many deep neural network (dnn) models like musicvae for music generation, applying transfer learning to a dnn model for music generation remains under-explored(svegliato and witty 2016;marchetti et al. 2021).in regards to iranian (persian) traditional or folk music, the prior work focuses on generation of music via traditional non-machine learning approaches and/or training from scratch. we note our goal is not to generate iranian music specifically, but to explore the best ways to adapt large dnn music generation models to ood genres like iranian music. we collected 100 midi files as this is in line with the target genre dataset size for prior finetuning-based transfer learning approaches with music generation models(svegliato and witty 2016;marchetti et al.we hypothesized that ce-mcts would outperform standard transfer learning approaches at adapting musicvae to our iranian folk music dataset.• finetuning (all), in which we use finetuning, a traditional network-based transfer learning approach that has been applied in prior music generation dnn transfer learning work(tan et al.in this paper, we investigated how musicvae, a music generation model, can be adapted to ood music. this suggests that we can successfully adapt these large music generation models for underrepresented genres of music, and that combinational creativity can be an especially helpful tool in this task. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/772.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/772.txt new file mode 100644 index 0000000000000000000000000000000000000000..222676ee2dce345cb3f2a7c46752ab9702241ad7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/772.txt @@ -0,0 +1 @@ +self-supervised methods have garnered significant interest due to their heuristic approach to learning representations that capture the semantic information of data without requiring explicit supervision in the form of labels. contrastive learning based methods among the former would use the repulsion among arbitrary pair of points in the batch, while non-contrastive would rely on the consistency among different views of the same image. while self-supervised representation learning becomes more ubiquitous in the wild, especially in the important domain such as medical imaging , the theoretical grounding of these methods would potentially help avoid the pitfalls in applications. unsurprisingly, researchers are already trying to build theoretically sound understanding of modern self-supervised representation learning methods.the overall goal of this work is to understand self-supervised representation learning through the lens of nonlinear dimensionality reduction methods (e.g. laplacian eigenmaps ) and low-rank matrix completion problem . to this end, we take on a laplace operator perspective on learning the optimal representations in the manifold assumption. we then derive a trace maximization formulation to learn eigenfunctions of the laplace operator of the underlying data manifold. we adopt the heat kernel based embedding map that in theory under certain conditions is an almost isometric embedding of the low-dimensional manifold into the euclidean space. as a result, we discuss how existing several ssl methods (e.g. simclr , barlowtwins , vicreg ) can be comprehended under this view.it is important to note that our current understanding of the topic lacks one crucial aspect. traditional spectral methods commonly operate with complete kernel matrices, whereas our approach deals with incomplete and potentially noisy ones. the only available similarity information among examples is derived from the data augmentation process, which generates a positive pair. meanwhile the remaining examples in the batch are either seen as negative ones (contrastive) or are not considered at all (non-contrastive). a pertinent and often overlooked question emerges: how can self-supervised learning methods effectively leverage such limited signals to converge towards meaningful representations? in response, we shed light on this matter by establishing a connection between ssl and a matrix completion problem. we demonstrate that these optimization problems are lagrangian dual of each other, implying that optimizing the ssl objective simultaneously entails reconstructing the kernel matrix. we can summarize the contributions of this paper as follows:• we propose an eigen-problem objective for spectral embeddings from graphs induced by augmentations and use it to interpret modern ssl methods.• we show that ssl methods do laplacian-based nonlinear dimensionality reduction and low-rank matrix completion simultaneously. we leverage theory behind matrix completion problem to provide insights on the success of ssl methods and their use in practice.• while the number of observed entries required by theory decreases with epochs, we find that the actual number is a constant and the former eventually intersects the latter.• we find a possible explanation for disparity in downstream performance of the backbone and projection outputs.graph laplacian and spectral embedding given a graph γ = (v, e) with |v | = n vertices and a set of edges e ij = (v i , v j ) that form a weighted adjacency matrix a ij = w ij with non-negative weight w ij ≥ 0, whenever there is e ij ∈ e, otherwise 0. a typical algorithm would include constructing a graph based on the affinity matrix, computing k first eigenvectors of its laplacian and setting the embeddings to be the rows of the matrix u ∈ r n×k that contains the eigenvectors as columns.matrix completion let m ∈ r n×n is partially observed matrix with rank r.notation any matrix x ∈ r n1×n2 has a singular value decomposition (svd) x = uσv ⊤ , where the columns of the matrix u ∈ r n1×n1 are left singular vectors, the singular values.to obtain the heat kernel matrix, we use the normalized random walk laplacian l rw , for which the heat kernel matrix is as before:. the kernel matrix choice in barlowtwins is a simple bi-diagonal adjacency matrix a ij = 1 as long as (i, j) is a positive pair, and 0 otherwise.we argue that the objective in (3) with a substitute incomplete kernel matrix h t implicitly contains an objective for the low-rank matrix completion problem. since the true underlying heat kernel matrix h t is observed only partially with known entries indicated by a collection of index pairs ω induced by augmentation process, we can form a sampling symmetric matrix w: w ij = 1 if (i, j) ∈ ω, and 0 otherwise, indicating observed entries.we proceed by showing that maximisation of the trace formulation in(12)embraces reconstruction of the incomplete underlying kernel matrix with entries known to us only from the augmentation process and specified by the matrix w. however, whenever x is obtained as the projection head output of a network learned via self-supervised learning method with a spectral embedding type objective, the incoherence of x is inherently tied to the incoherence of the kernel matrix.for illustrative purposes we plot the theoretical bound p * on the fraction of the observed entries for a successful matrix completion from(15)in figure1along with the actual fraction p of observed entries under self-supervised learning augmentation protocol to demonstrate that the latter intercepts the former given enough training epochs. to be specific, we set the size of the training dataset n = 50k (cifar-10 size), the cluster size (number of same class examples) n min = 5000 (c = 10), the number of views a = 2, the number of epochs n epochs range from 1 to 1000, and r = 512 (embedding size), assume µ = 20 (which seems to be a fair estimate in light of the experiments in section 5) and c 0 = 5, a constant used to control the probability of exact matrix recovery. to estimate coherence of representations in figure2(left), we embed the training set of imagenet-100 to get representations matrix a ∈ r 125952×512 and compute incoherence µ(a) using effective rank r e (a).in this work, we make an attempt to bridge modern self-supervised methods with classical laplacian-based dimensionality reduction methods and low-rank matrix completion in hopes to provide theoretical insights on the recent successes of ssl methods.we show that these methods are not only doing laplacian-based nonlinear reduction but are able to approximate and recover the truncated version of the underlying laplace operator given only noisy and incomplete information from augmentation protocol by adopting low-rank matrix completion extensive literature and results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/773.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/773.txt new file mode 100644 index 0000000000000000000000000000000000000000..444d818fc9c5d7401ca4a5b505fa9430113634f6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/773.txt @@ -0,0 +1 @@ +graphs are rich and expressive data structures that can represent properties of nodes and links in interconnected systems such as social networks, molecules and knowledge graphs. several graph neural networks (gnns) and graph transformers (gts) that leverage powerful data processing architectures have been proposed to learn challenging tasks for a wide range of datasets . recently, self-supervised learning (ssl), which was born out of natural language processing and was later successfully applied to computer vision , has been demonstrated to aid in graph representation learning. ssl has ushered in more powerful gnn and gt models . the success of self supervised models for graph-structured data has been demonstrated in different applications such as recommendation systems and molecular property prediction .self-supervision exploits unlabelled data to learn representations that may be useful across many downstream tasks . the state-of-the-art (sota) in graph self-supervision constrains pre-training to only one dataset (e.g., corafull), with one or many pre-training tasks at a time. as a result, representations learnt through ssl are likely to be specialized to one particular dataset, and thus lack the ability to generalize well to other graphs (e.g., dblp) of the same family (e.g., citation networks). thus, state-of-the-art graph ssl entails individualized pre-training for every dataset of interest, and exhibits several drawbacks. first, each model learns a distinct set of parameters, independent of other similar datasets. such a model does not leverage any shared parameters that could lead to learning universal features, nor does it exhibit the ability to exploit data from other datasets during training. this hampers generalizability of the resulting models, and as shown in this work, also the performance of ssl models on downstream node classification tasks. second, owing to the different node and edge feature dimensions of different datasets, models obtained with state-of-the-art ssl are not compatible with other datasets. as a result, with the availability of new datasets, it is imperative to build a new model from scratch, and one cannot leverage previously learnt representations to inform the training process, and reduce the computational load. in other words, state-of-the-art ssl models do not exhibit adaptability. finally, training a separate model for each dataset increases the computational cost of self-supervision. multiple models additionally require more storage, adding to the cost of ssl. on the other hand, graphs belonging to the same family are known to exhibit universal patterns .learning the universal representations across graphs poses an important challenge of disparate node and edge features for different graphs. specifically, node features of different graphs typically exhibit different dimensionality, and do not render themselves to straightforward comparison across graphs. additionally, the features of different graphs can represent different quantities even if they had the same dimensionality, hindering unified processing of these features. as a result, it is imperative for a universal ssl approach to be able to accommodate this diversity, and treat disparate node and edge features in a unified manner. along similar lines, there has been an increased interest in developing models that can handle data of different modalities, and learn features from different sources of data, such as videos and text, through modular structures and carefully crafted embeddings . these foundation multi-modal approaches transform multi-modal data into a common representation space to learn better and robust features. such an approach has met with incredible success, with a host of different architectures and data processing pipelines developed in the recent years , and is paving the way for artificial general intelligence . inspired by the success of such models, this work aims to investigate if a universal learning approach can be adopted to learn representations from multiple graphs with disparate features, and if the resulting models exhibit better performance in downstream tasks.in this work, we propose a generic framework that is rooted in universal representation learning capable of learning universal features from multiple graphs. we use a state-of-the-art graph transformer architecture to construct a universal model, and train it in an end-to-end manner with six benchmark citation networks. we explicitly address the challenges with ssl outlined above, and demonstrate the superiority of the resulting models over traditional approaches. specifically, this work makes the following contributions:1. present a universal representation learning framework through self supervision with multiple graphs (u-ssl). our universal model consists of graph-specific parameters that accommodate the disparity of node features of different graphs, and universal parameters that learn representations generic to all graphs used during training. the model can be trained to learn both graph-specific and universal parameters in an end-to-end manner.2. present a graph transformer-based u-ssl model, and perform extensive experiments with five benchmark citation network datasets, demonstrating the superiority of the resulting models over those obtained with ssl. we also demonstrate that training universal models is computationally efficient compared to ssl.3. demonstrate scalability and adaptability of universal models with a large citation network dataset (ogbn-arxiv).the proposed u-ssl framework is aligned with the core features of the foundation models, specifically, it learns across multiple graphs, exhibits properties such as unification, adaptability, and generalizability, and can serve several downstream tasks, resulting in foundation graph models. inspired by the success of such models, this work aims to investigate if a universal learning approach can be adopted to learn representations from multiple graphs with disparate features, and if the resulting models exhibit better performance in downstream tasks. our universal model consists of graph-specific parameters that accommodate the disparity of node features of different graphs, and universal parameters that learn representations generic to all graphs used during training. present a graph transformer-based u-ssl model, and perform extensive experiments with five benchmark citation network datasets, demonstrating the superiority of the resulting models over those obtained with ssl.the proposed u-ssl framework is aligned with the core features of the foundation models, specifically, it learns across multiple graphs, exhibits properties such as unification, adaptability, and generalizability, and can serve several downstream tasks, resulting in foundation graph models.the u-ssl model can take as input, disparate features from different graphs, and learn universal features that are common to all the datasets, thereby generalizing well to these datasets, and potentially also to other similar datasets.pre-training models with ssl involves selecting one or more pre-training task (also referred to in the literature as pretext tasks), typically depending on the type of downstream task, and appending a model with heads to learn the different tasks. we demonstrate the superiority of the features learnt with u-ssl by evaluating and comparing the performance of models obtained with ssl, u-ssl and supervised learning on node classification for all the graphs. the base learning rate is set to 1e -3 for pre-training and supervised learning, and 1e -2 for fine-tuning of ssl and u-ssl dataset baseline ssl u-ssl corafull 0. these results support our hypothesis, and demonstrate that there is more to graphs than can be learnt with plain ssl, and learning universal representations across graphs with u-ssl can bridge the gap between supervised and self-supervised performance.adaptability: finally, we study the adaptability of the u-ssl framework to new datasets, by examining if the representations learnt from a set of datasets can be used to solve the downstream task for a new dataset. we leverage the modular nature of the u-ssl models, and introduce a new graph-specific module θ 6 dedicated to the new graph, keeping the universal representation learning module φ unchanged.538 ± 0. we observe that this performance is comparable to that of the u-ssl model trained with six datasets discussed the previous section, demonstrating the adaptability of u-ssl models. thus, one can train a u-ssl model with a set of benchmark datasets, and then simply learn a graph-specific module for a new dataset to achieve comparable performance. we construct one u-ssl model with a state-of-the-art graph transformer, and with extensive experiments, show that the proposed framework provides an efficacious, efficient, scalable and adaptable appraoch to learn universal representations from graphs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/774.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/774.txt new file mode 100644 index 0000000000000000000000000000000000000000..400c98f5e65f623ceebea2a44090e3f3485c9e08 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/774.txt @@ -0,0 +1 @@ +large language models tang et al. (2023) have shown emergent abilities. wei et al. (2022a) wei et al. (2022b) discover chain-of-thought (cot) prompting as a simple and broadly applicable method for enhancing reasoning in language models. many work zhou et al. (2022a); wang et al. (2022b); shi et al. (2022); zhang et al. (2023a;b); wang et al. (2022a); zhou et al. (2022b); fei et al. (2023); yang et al. (2023); shi et al. (2023); diao et al. (2023) have tried to make further improvements based on cot. (2023)have shown emergent abilities.wei et al.prompt optimization is a black-box optimization problem, as it can only be evaluated based on the quality of the generated rationales or the correctness of the answers provided by llm. in this regard, we present our empirical findings on how to design f to update prompting constrained by the limit input sequence length based on two attributes of cot: correctness and depth. correctness is a crucial criterion for prompting engineers to update the prompting. here, we want to ask the question: can a valid rationale be replaced with an invalid one without performance drop? depth refers to the number of reasoning steps which can be reflected by the length of cot (different steps are separated by comma or \n). a deeper cot is longer and typically contains more complex rationales, while a shallower cot is more straightforward and contains fewer reasoning steps. here, the question is: given the similar question, can a deep cot always be replaced by a shallow cot?. we choose multiple datasets across different tasks including arithmetic reasoning (gsm8kcobbe et al.to address this question, we substitute ground-truth rationales that produce the correct answer with zero-shot-cot that produce the wrong answer. to distinguish between deep cot and shallow cot, we have established a simple heuristic parameter denoted as ξ, which is based on the number of \n in the cot. if the number of \n surpasses ξ, we classify it as a deep cot. conversely, if a cot has fewer \n than ξ, we classify it as a shallow cot. shallow-cot are selected by replacing lengthier rationales by shorter rationales in a batch. additionally, promptings that consist of shorter rationales exhibit superior performance compared to those with lengthier rationales. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/775.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/775.txt new file mode 100644 index 0000000000000000000000000000000000000000..6fbef995fc561a4b09a5bcee684ff8a428edb2c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/775.txt @@ -0,0 +1 @@ +conformal prediction (cp), a prominent uncertainty quantification technique, has drawn increasing attention in statistics and machine learning over the past decade. with its roots in classical frequentist statistics, this framework enables the construction of reliable prediction sets without the need for any distributional assumptions . a key advantage of conformal prediction lies in its validity guarantees for the constructed prediction sets, which cover the true outcomes with high probability. this makes it appealing for applications in safety-critical domains, such as risk assessment in finance , medical diagnosis and disease prediction , drug discovery and toxicity prediction , among many others.another machine learning (ml) setting dealing with set-valued data is partial label learning (pll), a specific type of weakly supervised learning . in a sense, pll is orthogonal to conformal prediction: while the predictions produced by cp are set-valued, the data used for training and calibration is supposed to be precise. in pll, it is exactly the other way around: although the training data might be imprecise (set-valued), the goal is to induce a unique model producing precise (point) predictions. this may strike as odd, as one may argue that if the training data is imprecise or ambiguous, it might be all the more important to reflect this imprecision or ambiguity in the induced model and the predictions produced by this model. for example, one may allow for a set of incomparable, undominated models, resulting, for instance, from the interval order induced by set-valued loss functions , or by the application of conservative, imprecise bayesian updating rules .as an alternative, we suggest the use of cp to capture (predictive) uncertainty in the setting of pll. in other words, we propose to combine pll and cp within a single framework. to this end, we propose a generalization of the cp procedure that can be applied to set-valued training and calibration data. for this approach, we establish theoretical validity guarantees. moreover, through experimental studies, we showcase the enhanced accuracy of our method in weakly supervised learning settings compared to natural baselines. assuming that x new is observed, but y new is not, cp aims to construct a prediction set of the form t (x new ) ⊆ y that is valid in the sense that y new ∈ t (x new ) with high probability., 2002a,b].for any set of nonconformity scores e, define the critical score q(e, ǫ) in terms of its ⌈(1 + |e|)(1 -ǫ)⌉ smallest value, or equivalently, its |e| -1 ⌈(1 + |e|)(1 -ǫ)⌉ empirical quantile. furthermore, given an instance x ∈ x , a classifier f : x -→ p(y), a set of nonconformity scores e, and an error rate ǫ, define the prediction set t (x, f , e, ǫ) as.icp outputs t (x new , f , e, ǫ) as the prediction set ŷnew for a new test point x new , thereby satisfying the marginal coverage property (in expectation) if samples in d calib ∪ {(x new , y new )} are drawn exchangeably from a joint probability distribution over the data space.coming back to the idea of combining conformal prediction with partial label learning, we are now again interested in the case of set-valued training data (3), where each data instance x i is associated with a set of potential labels s i ⊆ y. if the data points in o ′ calib ∪ (x new , y new ) are exchangeable, then the prediction set (11) with underlying score set (10) satisfies. the vanilla cp guarantees that the prediction set t (x new , fpll , e 1 , ǫ) is valid. to conclude the proof, we show that t (x new , fpll , e 1 , ǫ) ⊆ t (x new , fpll , e max , ǫ).5, then the prediction set t (x new , fpll , e all , ǫ) is valid. again, we prove this result by showing t (x new , fpll , e 1 , ǫ) ⊆ t (x new , fpll , e all , ǫ).first, observe that for any instance (x j , s j ) ∈ o calib , the nonconformity score for any false label in a candidate set is at least as great as the predicted probability of the true class, i. by definition, we have 1 -fpll (x l ) y l ≤ q(e 1 , ǫ), which implies fpll (x l ) y l ≥ 1 -q(e 1 , ǫ).the validity of the prediction set t (x new , fpll , e mean , ǫ) is established by the following theorem. if the data points in o ′ calib ∪ (x new , y new ) are exchangeable and fpll (x j ) yj ≥. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/776.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/776.txt new file mode 100644 index 0000000000000000000000000000000000000000..e8e5237dcbf3dea24f00a244f1b06245394fc69a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/776.txt @@ -0,0 +1 @@ +the development of state-of-the-art deep learning models is often limited by the amount of available training data. for instance, applications in precision medicine require a large body of annotated data. those exist, but are typically distributed across multiple locations and privacy issues prohibit their direct exchange. federated learning (fl) overcomes this issue by distributing the training process rather than sharing confidential data. models are trained locally at multiple clients and subsequently aggregated at a global server to collaboratively learn. many fl approaches such as federated averaging (fedavg) assume the data to be generated i.i.d., which usually cannot be guaranteed. for instance, different hospitals might use different experimental equipment, protocols and might treat very different patient populations, which can lead to distributional differences between the individual data sites. in such a case, covariate shifts have to be addressed, which is particularly challenging in scenarios where the target client does not have access to labeled training data. there, knowledge has to be both extracted from diverse labeled source clients and transferred without direct estimates of the generalization error.unsupervised domain adaptation (uda) addresses distributional differences between a label-rich source domain and an unlabeled target domain for improved out-of-distribution per- formance. most popular deep learning models utilize an adversarial strategy , where an adversary is trained to discriminate whether the samples are generated by the source or target distribution. simultaneously, the feature generator attempts to fool the adversary by aligning the latent representations of the two data sources, which encourages well supported target predictions if the adversary fails to separate the domains. however, these adversarial strategies generally require concurrent access to both source and target data, prohibiting their use in a federated setting without sharing encrypted data representations or using artificially generated data .we propose federated adversarial cross training (fact), a federated deep learning approach designed to leverage inter-domain differences between multiple source clients and an unlabeled target domain. specifically, we address the multi-source-single-target setting with non-i.i.d. data sources distributed across multiple clients. to adapt to the domain of an unlabeled target client, we propose to directly evaluate the inter-domain differences between source domains. this allows us to identify domain specific artifacts without adversarial maximization and thus facilitates distributed training among clients.in empirical studies we show that fact substantially improves target predictions on three popular multi-source-single-target benchmarks with respect to state-of-the-art federated, non-federated and source-free domain adaptation models. moreover, fact outperforms state-of-the-art uda models in several standard settings, also comprising standard single-source-single-target uda, where the basic model assumptions of fact are violated (i.e., non-i.i.d. source domains). finally, we investigate the behaviour of fact in different federated learning scenarios to motivate its application to real world problems. we show that fact benefits from additional source clients even though they are subject to strong covariate shifts, that fact is stable in applications with many source clients each carrying only a small number of training samples, and that fact can be efficiently applied in settings with communication restrictions. our implementation of fact, including code to reproduce all results shown in this paper, is publicly available at https://github.com/jonas-lippl/fact.unsupervised domain adaptation (uda)addresses distributional differences between a label-rich source domain and an unlabeled target domain for improved out-of-distribution per- formance. however, these adversarial strategies generally require concurrent access to both source and target data, prohibiting their use in a federated setting without sharing encrypted data representationsor using artificially generated data.we propose federated adversarial cross training (fact), a federated deep learning approach designed to leverage inter-domain differences between multiple source clients and an unlabeled target domain.in empirical studies we show that fact substantially improves target predictions on three popular multi-source-single-target benchmarks with respect to state-of-the-art federated, non-federated and source-free domain adaptation models. we show that fact benefits from additional source clients even though they are subject to strong covariate shifts, that fact is stable in applications with many source clients each carrying only a small number of training samples, and that fact can be efficiently applied in settings with communication restrictions.adversarial unsupervised domain adaptation traditional uda approaches aim to transfer knowledge from a single labeled source domain to an unlabeled target domain. here, a pre-trained source model is used to transfer knowledge to the target domain and subsequently fine-tuned exclusively on the target data with the objective to make the target predictions both individually certain and diverse. these models additionally use image augmentation strategies on the source data, which on their own out-perform state-of-the-art domain adaptation settings on multi-source-single target experiments. we assume each client to be associated with an individual domain and focus on the out-of-distribution performance on data from a single target client without training labels, corresponding to the unsupervised federated domain adaptation (ufda) setting introduced by peng et al.we propose federated adversarial cross training (fact) as a simple and highly efficient training scheme which leverages the inter-domain differences of distributed data to maximize the information transfer to an independent target domain without access to labeled data. the federated baselines are fada, which calculates a dann based adversary for each domain pair at the server based on an encrypted representation of the private data, and federated knowledge alignment (fedka), which matches the feature embedding between two domains using a multiple kernel variant of the maximum mean discrepancy in combination with a federated voting strategy for model fine-tuning.fact uses inter-domain differences between source domains to identify and mitigate domain shifts in the target domain. since fact requires a minimum of two source clients to apply the proposed training scheme without limitations, we artificially distribute each source domain to two equally sized source clients. in particular, svhn, mnistm and syn substantially benefit from additional source clients, which might be due to the large domain shifts these domains exhibit to most of the potential source domains.this also applies to standard uda scenarios with a single source domain, violating the main assumption of fact that data available at different clients are non-i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/777.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/777.txt new file mode 100644 index 0000000000000000000000000000000000000000..21553f973236d4c81244c576124eb8920f3f8420 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/777.txt @@ -0,0 +1 @@ +accurate estimation of predictive uncertainty plays a crucial role in high-stakes real-world applications, particularly in the field of medical ai, where precise and reliable classification of diseases and conditions is paramount. machine learning models have demonstrated their potential in aiding medical professionals with accurate diagnosis and treatment decisions. however, relying solely on point predictions without considering the associated uncertainty can lead to erroneous conclusions and suboptimal patient care.to illustrate the significance of model uncertainty quantification in medical ai, let us consider a classification task involving the identification of different types of skin lesions based on diagnostic images. the machine learning model is trained on a large dataset of annotated skin lesion images, along with corresponding clinical information. the goal of the model is to classify new, unseen images into specific categories, such as malignant melanoma, benign nevi, or basal cell carcinoma. suppose the model predicts a given skin lesion as malignant melanoma, indicating a high probability of malignancy. without an understanding of the associated uncertainty, medical professionals may proceed with aggressive treatment or surgical intervention, potentially subjecting patients to unnecessary procedures. there are inherent uncertainties in the prediction, stemming from various sources, such as variations in imaging quality, complex morphological features, or overlapping characteristics between different lesion types. therefore, for a prediction to be acceptable, in addition to the ability to achieve high predictive accuracy, it is also crucial to have a measure of the predictive uncertainty.although there are popular approaches to quantify the model predictive uncertainty, e.g. bayesian methods such as mc-dropout (gal and ghahramani 2016) and deep-ensemble (lakshminarayanan, pritzel, and blundell 2016), and evidential approaches (sensoy, kaplan, and kandemir 2018;yuan et al. 2020;sensoy et al. 2021), the lack of formal guarantees is a major limitation in the state-of-the-art methods of uncertainty quantification. to resolve this issue, conformal prediction (cp) or conformal inference (vovk, gammerman, and shafer 2005;papadopoulos et al. 2002) provides a compelling framework as a post-processing technique to address this challenge by offering a reliable indicator of uncertainty. rather than providing a single deterministic prediction, cp constructs a finite prediction set or uncertainty set that encompasses a plausible subset of class labels for a given unseen input data point in any pretrained classifier. this prediction set reflects the inherent uncertainty associated with the model's predictions. in addition to a point estimation of the most likely predictive probability as a measure of confidence, the size of a prediction set is considered as an indicator of the model uncertainty in classifying a new data point. larger prediction sets indicate higher model uncertainty associated with the input data. however, in case of using the prediction set size as an uncertainty indicator, the measure is not scaled to be compared with other state-ofthe-art uncertainty quantification methods.returning to our medical ai example, instead of a definite prediction of malignant melanoma (a single true label), cp offers a prediction set indicating the most likely class labels of the skin lesion with the respected probabilities, while providing a guarantee that the true label is a member of this set with a high probability. this additional information enables medical professionals to make more informed decisions, considering the potential risks and uncertainties associated with the model's predictions. cp is also fast, computationally efficient, and generally applicable to every dataset (arbitrary data distribution) and classification model (lei et al. 2018). nevertheless, the quantification of model uncertainty in conformal prediction for classification tasks remains an active research area, with several challenges and limitations. to the best of our knowledge, there is no scaled and reliable quantification of model uncertainty achieved by cp method. the uncertainty quantification is highly crucial when there is an intent to represent the amount of model uncertainty or perform comparative evaluations. thus, we aim to propose a novel approach to quantify the model uncertainty based on the produced prediction sets and improve the reliability and accuracy of uncertainty estimation.in this paper, we are making the following two contributions in the context of conformal prediction: (1) we investigate existing methodologies for model uncertainty estimation within conformal prediction for classification tasks, and analyze their strengths and limitations, (2) we propose a novel technique for uncertainty quantification of cp, aiming to facilitate the comparative evaluations between cpbased methods and other state-of-the-art uncertainty estimation methods. we use the formal guarantee of true label coverage (romano, sesia, and candes 2020) in the prediction set to devise a solid probabilistic theory along with certified boundaries of the model uncertainty. the proposed quantification method can enhance the accuracy and reliability of uncertainty estimation in real-world applications. by advancing the field of model uncertainty quantification in conformal prediction for classification tasks, this research endeavors to equip medical professionals with a more reliable and guaranteed method to quantify the model uncertainty and support their decision-making process.in this paper, we are making the following two contributions in the context of conformal prediction: (1) we investigate existing methodologies for model uncertainty estimation within conformal prediction for classification tasks, and analyze their strengths and limitations, (2) we propose a novel technique for uncertainty quantification of cp, aiming to facilitate the comparative evaluations between cpbased methods and other state-of-the-art uncertainty estimation methods.when constructing valid prediction sets, three distinct properties are required to be satisfied: (1) the marginal coverage property of the true label that guarantees the prediction set includes the true label with the probability of at least 1 -δ based on equation 2, (2) the set size property to reflect the desirability of a smaller size for the prediction set, and (3) the adaptivity property that necessitates the set size for unseen data is modified to represent instancewise model uncertainty, i. for instance, if δ = 0.following theorem 1, consider δ as the error level of true label coverage, q as the computed 1 -δ quantile of conformal scores over calibration data with size n, and c(x val , q) : r d ×r → 2 y as the prediction set function given the unseen validation data point (x val , y val ) ∈ x val . thus, the model uncertainty u c (x val ) associated with the validation data point x val based on the corresponding prediction set c(x val , q) can be quantified based on the following theorem: theorem 2 (conformal uncertainty quantification). let δ be the coverage error level of the true label y val , and c(x val ) be the corresponding prediction set of size m ∈ z achieved by 1 -δ quantile of calibration data with size n. then, the conformal model uncertainty u c (x val ) associated with x val is quantified to be 0 ≤ u c (x val ) ≤ 1, and guaranteed in the following marginal lower bound l c and upper bound h c as:. in cp, the size of the prediction set (model's outcome) is an indicator of the total model uncertainty denoted by u c which grows by increasing the size of the prediction set. now, we define pure model uncertainty as our baseline uncertainty by subtracting the probability of the only certain and desired case from the total model uncertainty u c that is when the model produces a prediction set containing only one class label (out of k possible labels) which is the true label with the probability of at least 1 -δ according to theorem 1. then, we have our heuristic notion of uncertainty as the pure model uncertainty u c ∈ r which is associated with the produced prediction set c(x val ) given x val , and scaled to be used as a baseline uncertainty to quantify the conformal model uncertainty.theorem 2 has two distinct cases with respect to m as the size of prediction set c(x val ): if m = 0, the model is fully uncertain that could not select any target label to include into the prediction set based on the computed q. otherwise, if p 1 holds, it means y val ∈ c(x val ). based on the proposed theorem 2, the model uncertainty is highly affected by two different measures: (1) the size of the produced prediction set, so that the larger set size indicates the higher model uncertainty associated with an unseen data point, and (2) the error level δ of true label coverage, so that the higher error level δ gives rise to lower value of 1 -δ which represents a lower probability of true label inclusion in the prediction set. when true label is not included in the prediction set, the model is expected to be highly uncertain in the prediction, therefore, the model shows higher uncertainty as the probability of true label inclusion in the prediction set is decreased. moreover, when the calibration set size is increased, the magnitude of uncertainty variation interval d c is significantly decreased in order to provide a tighter bound of uncertainty estimation since by having larger set of calibration data, the model can compute more accurate q as the 1-δ quantile of the conformal scores in the calibration data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/778.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/778.txt new file mode 100644 index 0000000000000000000000000000000000000000..09169a2cbf7334ac239716dfb4e481fc97f9ebcf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/778.txt @@ -0,0 +1 @@ +graphs, or networks, are prevalent in diverse domains such as social networks, protein interactions, and scientific collaboration. graph representation learning, also known as graph embedding, enables the representation of graphs using general-purpose vector representations, removing the need for task-specific feature engineering.graphs can be static, where their structure does not change over time, or dynamic, where their structure evolves over time. social networks are typically dynamic due to their constantly changing structure. representation learning on static and dynamic networks differs as static embeddings only need to capture network structure while dynamic embeddings must capture both structural and temporal aspects. while static embedding methods can be applied to dynamic networks, the resulting embeddings do not capture the evolving aspect of these networks. network embedding methods are categorized by granularity, from node to graph level. node embedding is the most common method in which nodes in a given network are represented as fixed-length vectors. while these vectors preserve different scales of proximity between the nodes, such as microscopic and structural role , they cannot capture proximity between different networks as node representations are learned within the context of the network they occupy. notably, considerable work has been done on node embedding for dynamic graphs , which preserves not only the network structural information but also the temporal information for each node.graph-level network embedding, unlike node embedding, allows us to learn representations of entire graphs and directly compare different graphs, enabling investigation of fundamental graph ranking and retrieval problems such as the degree of similarity between graphs. graph-level embedding methods have been studied extensively in the literature, but most of them focus on static networks . however, in real-world applications, dynamic networks are ubiquitous. to the best of our knowledge, only one prior method, called tdgraphembed , has been proposed for dynamic graph-level embedding. however, this method has a major limitation in that it treats dynamic graphs as a collection of independent static graph snapshots, ignoring the interactions between them.to address this gap, we propose a novel method called the temporal backtracking random walk, which, when combined with the doc2vec algorithm, can be used for dynamic graph-level embedding. our method smoothly incorporates both graph structural and temporal information. we evaluate our method on five publicly available datasets for the task of temporal graph similarity ranking and demonstrate that it achieves state-of-the-art performance. graph representation learning, also known as graph embedding, enables the representation of graphs using general-purpose vector representations, removing the need for task-specific feature engineering. representation learning on static and dynamic networks differs as static embeddings only need to capture network structure while dynamic embeddings must capture both structural and temporal aspects. while static embedding methods can be applied to dynamic networks, the resulting embeddings do not capture the evolving aspect of these networks. while these vectors preserve different scales of proximity between the nodes, such as microscopicand structural role, they cannot capture proximity between different networks as node representations are learned within the context of the network they occupy. notably, considerable work has been done on node embedding for dynamic graphs, which preserves not only the network structural information but also the temporal information for each node.graph-level network embedding, unlike node embedding, allows us to learn representations of entire graphs and directly compare different graphs, enabling investigation of fundamental graph ranking and retrieval problems such as the degree of similarity between graphs.to address this gap, we propose a novel method called the temporal backtracking random walk, which, when combined with the doc2vec algorithm, can be used for dynamic graph-level embedding. in this section, we review two adjacent categories of graph embedding techniques: temporal node and static graph-level embedding.temporal node embedding methods differ from static node embedding methods such as node2vec, sdne, and gaein that they incorporate historical information to preserve both structural and temporal information.given a discrete temporal graph 𝐺 = (𝑉 , 𝐸,𝑇 ), where each temporal edge (𝑢, 𝑣)𝑡 ∈ 𝐸 is directed from node 𝑢 to node 𝑣 at time 𝑡 ∈ 𝑇 , a snapshot of 𝐺 at time 𝑡 is defined as 𝐺𝑡 = (𝑉 𝑡 , 𝐸 𝑡 ), which is the graph of all edges occurring at time 𝑡.our framework consists of two parts: (1) building a multilayer graph and adopting temporal backtracking random walk on it (2) learning a doc2vec language model on the output of the modified random walk to obtain graph-level embeddings. during each step of the temporal backtracking walk, the walker can either stay in the current layer to obtain structural information or move to the previous layer to obtain historical evolving information. the temporal backtracking random walk combines the proximity information of nodes within a layer with the structural information of previous timestamps.we compare our model with three types of baselines: static graph-level embedding methods (represented by graph2vec, ugraphemb, and sub2vec), temporal node-level embedding methods (represented by node2vec aligned, sdne between our experiments and the results reported by them.we introduced a novel dynamic graph-level embedding method based on temporal backtracking random walk. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/779.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/779.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6bec224450ed4b11b1bfd01fd0d39f12bb8d8ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/779.txt @@ -0,0 +1 @@ +autoregressive modeling has been widely employed to model various types of sequential data, including language , images , and audio . in recent years, autoregressive models have gained significant attention due to their impressive scaling properties . by training on larger datasets, increasing model sizes, and utilizing prompt engineering1 , these models can achieve outstanding results in tasks such as question answering , summarization , and image completion , among others.transformers serve as the primary model for large-scale autoregressive learning. however, these models must operate at the same frequency as the original data, resulting in high computational costs. in language modeling, this problem is partially mitigated through tokenization , where frequent co-occurring character sequences are replaced by a single token. nevertheless, models aiming to learn dependencies at the sentence level must still handle the original input data frequency. in domains with even higher-frequency inputs, such as images or sound, this issue becomes a primary bottleneck. a large enough model capable of capturing the data distribution would be too slow and memory-intensive to operate at the original data's frequency. to put this in context, gpt-4, one of the most powerful language model trained to date, has a context window of 25,000 tokens, which would barely suffice to capture the individual pixels of a 92x92 rgb image in an autoregressive manner.to address this issue, we revisit the hierarchical recurrent encoder-decoder (hred) architecture , which separates the computationally intensive high-frequency data from the more complex low-frequency interactions. this model divides the input sequence into nonoverlapping sub-sequences that are encoded independently of each other. the resulting lowerfrequency vector stream is then processed by a potentially larger model, no longer bottlenecked by the original data's frequency. finally, the output vectors are individually mapped back to the original data frequency. although subsequent downsampling approaches have been proposed in the transformer literature, most of them feature an encoder or decoder that globally handles the entire original sequence, resulting in significant computational and memory cost. by encoding and decoding sub-sequences independently, the hred model can circumvent these issues.in this paper, we modify the hred model to enhance its performance and propose a learning algorithm specifically designed for this architecture. our main contributions include:• analyzing the different components of the hred to identify which parts contribute the most to model performance.• based on these insights, we propose a modification of the hred, termed the hierarchical attention encoder-decoder (haed) architecture, that considerably improves the performance of the original model. our model replaces the recurrent neural networks in the encoder and main model by mlps and transformers, respectively.• introducing a learning algorithm for the haed, capable of learning with low-frequency targets directly, which significantly reduces compute time and consequently allows for larger models and greater amounts of training data.• based on these insights, we propose a modification of the hred, termed the hierarchical attention encoder-decoder (haed) architecture, that considerably improves the performance of the original model. our model replaces the recurrent neural networks in the encoder and main model by mlps and transformers, respectively., 2019, ren et al.a similar approach to our implicit embedding matrix learning algorithm was proposed bythat replaces the recurrent neural networks (rnn) in the encoder and the main model by feed-forward mlps and transformers, respectively. to address this, we introduce the implicit embedding matrix (iem) algorithm, that trains the model without the decoder, making predictions directly in the embedding space defined by the encoder.as a first step towards our haed model we replace the lstm in the main model of the hred by a transformer. to confirm this intuition, we train the attention based hred from the previous section with lstm encoders that have varying numbers of units, keeping the flops/parameters of the rest of the model fixed, and plot the resulting impact on the final model performance.we will refer to the model with a mlp encoder and transformer main model as hierarchical attention encoder decoder (haed).as with the encoder model, we want to measure the impact of increasing the decoder capacity on the overall model performance.we have presented a thorough experimental study of the different components of the hierarchical recurrent encoder decoder (hred), which we have used to propose an improved model which we term the hierarchical attention encoder decoder (haed). in section 4.1, we use the decoder and main transformer model as in the previous paragraph and only vary the number of units in the encoder. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/78.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/78.txt new file mode 100644 index 0000000000000000000000000000000000000000..e47873098a89c0bc1c69b5d865ee2fa9f2a9f20e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/78.txt @@ -0,0 +1 @@ +one main objective of machine learning is to obtain useful information from often high-dimensional data. to this end, it is a common practice to extract meaningful feature representations from original data and then process features . neural networks and kernel methods - are two of the most representative approaches to map data into feature space. in neural networks, the features are represented as the outputs of hidden neurons in the network. in contrast, the feature mapping in kernel methods is defined by the used kernel, which is used implicitly and is often infinite dimensional. while kernel approaches require much fewer parameters and can obtain good empirical performance on certain tasks , the performance significantly relies on the choice of kernels. with many attempts to investigate kernel methods , , , there still lacks a theoretical understanding of the mechanism behind kernel methods, which restricts their applications on complicated data.on the other hand, the feature extraction in deep neural networks has been studied recently by information-theoretic and statistical analyses , . for example, it was shown in that, the feature extracted by deep neural networks coincides with the most informative feature, which is essentially related to the classical hirschfeld-gebelein-rényi (hgr) maximal correlation problem - . such theoretical characterizations provide a better understanding of existing algorithms and have been shown useful in designing algorithms for multimodal learning tasks .in this paper, our goal is to characterize kernel methods from the perspective of feature subspace and reveal its connection with other learning approaches. we first introduce the associated kernel with each given feature subspace, which we coin the projection kernel, to establish a correspondence between kernel operations and geometric operations in feature subspaces. this connection allows us to study kernels methods via analyzing the corresponding feature subspaces. specifically, we propose an information-theoretic measure for projection kernels, and demonstrate that the information-theoretically optimal kernel can be constructed from the hgr maximal correlation functions, coined the maximal correlation kernel. we further demonstrate that the support vector machine (svm) with maximal correlation kernel can obtain the minimum prediction error, which justifies its optimality in learning tasks. our analysis also reveals connections between svm and other classification approaches including neural networks. finally, we interpret the fisher kernel, a classical kernel induced from parameterized distribution families , as a special case of maximal correlation kernels, thus demonstrating its optimality.we adopt the notation convention introduced in, and let f x {x → r} denote the feature space formed by the (one-dimensional) features of x, with the geometry defined as follows. given d-dimensional feature mapping f : x → r d , the loss for svm based on f can be written as and l * svm (f ; λ) l svm (f, w svm , b svm ; λ) denote the optimal parameters and the value of loss function, respectively. specifically, for a given kernel k , the prediction of the corresponding kernel svm is1ŷ(k ) svm (x; λ) ŷsvm (x; ν, λ), where ν is any mapping given by fact 1.therefore, given a projection kernel k , the associated subspace can be represented as {f ∈ f x : τ (f ) = f }, where τ ↔ k is the associated operator. property 3: under assumption (16), we can express the map estimation as ŷmap (x) = sgn(f * (x)) for all x ∈ x, where f * ∈ f x is the maximal correlation function of p x,y .we first consider the svm algorithm applied on a given feature representation f (x) ∈ r d , which can also be regarded as the kernel svm on kernel k (x,., we simply denote ŷsvm (x; f, λ) by ŷsvm (x; f ).as a result, for each normalized feature f with covariance matrix λ f = i, the svm loss l * svm measures the informativeness of f in inferring the label y . definition 1) obtained from k , i. in addition, we have ŷsvm (x; f ) = ŷlr (x; f ) for ǫ sufficiently small.in addition, let τ ↔ k g denote the operator associated with k g , then from property 1 we have τ (f * ) = π(f * ; g). in addition, from the orthogonality principle, we have f *τ (f * ), τ (f * ) f x = 0 and thus.y |x .since λ f * = i, we have k * (x, x ′ ) = f * (x), f * (x ′ ) , for all x, x ′ ∈ x.first, note that logistic regression can be regarded as a special case of softmax regression with the correspondences from (11) and (12), we have ŷsvm (x; f ) = ŷlr (x; f ) for ǫ sufficiently small. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/780.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/780.txt new file mode 100644 index 0000000000000000000000000000000000000000..33f627d8731971ed785d37bb3a80afb844f08575 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/780.txt @@ -0,0 +1 @@ +in the standard multi-class image classification setting, the goal is to predict one applicable label for a given input. images often contain objects of different kinds, though. so, a more realisticand challenging -setting is multi-label image classification, where the objective is to predict all applicable labels for images that may belong to multiple categories. in this setting, data annotation is much more laborious because it requires labeling every category as present/absent. this has motivated work in learning multi-label classifiers with less supervision (verelst et al., 2023), (zhou et al., 2022), (durand et al., 2019), (cour et al., 2011). at the limit of minimal supervision is singlepositive multi-label (spml) learning (cole et al., 2021): for a given image, only one category is confirmed to be present and all others are unknown. thus, the main question in spml is how to deal with missing labels effectively. the naive approach is to assume that unknown categories are absent ("assume negative" loss an) (cole et al., 2021), but the introduction of false negatives adds noise to the labels. a better, but still simple, approach is to maximize the entropy of predicted probabilities for all unobserved labels ("entropy maximization" em loss) (zhou et al., 2022).learning from incomplete labels is not a unique problem to spml, though. in multi-class image classification, "pseudo labels" (lee et al., 2013) -predictions treated as ground truth -have been used in situations where there is a small dataset of annotated images and a large collection of unlabeled data. in that setting, one model is trained on the small annotated dataset and is then used to synthetically annotate the large collection of unlabeled data (yalniz et al., 2019). the large artificially-labeled dataset can then be used to re-train the same model (self-learning) or train a new model with more data resources. like spml, this is a setting with limited supervision. the difference however is that in spml all of the datapoints are partially labeled (with a single-positive label), whereas in the typical "pseudo label" setting there is a small amount of fully annotated data and a vast amount of unlabeled data. the common goal in both, though, is to use the available data resources to recover as much supervision from the unlabeled data. in this work, we adapt the ideas of "pseudo labels" to spml.in the standard multi-class image classification setting, the goal is to predict one applicable label for a given input. so, a more realisticand challenging -setting is multi-label image classification, where the objective is to predict all applicable labels for images that may belong to multiple categories. in this setting, data annotation is much more laborious because it requires labeling every category as present/absent., 2023),(zhou et al., 2019),(cour et al., 2011).learning from incomplete labels is not a unique problem to spml, though. in multi-class image classification, "pseudo labels"(lee et al., 2013)-predictions treated as ground truth -have been used in situations where there is a small dataset of annotated images and a large collection of unlabeled data. in that setting, one model is trained on the small annotated dataset and is then used to synthetically annotate the large collection of unlabeled data(yalniz et al. the large artificially-labeled dataset can then be used to re-train the same model (self-learning) or train a new model with more data resources. like spml, this is a setting with limited supervision. the difference however is that in spml all of the datapoints are partially labeled (with a single-positive label), whereas in the typical "pseudo label" setting there is a small amount of fully annotated data and a vast amount of unlabeled data. , l}.algorithm 1 pseudo multi-labels for spml inputs: single-positive dataset {(x n , z n )} n n=1 , threshold parameter τ ∈ [0, 1), untrained neural networks "teachernet" f (•; θ) and "studentnet" g(•; ϕ) output: trained multi-label classifier g. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/781.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/781.txt new file mode 100644 index 0000000000000000000000000000000000000000..845ee4bee347664c21c5f126723e7ff384ebc971 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/781.txt @@ -0,0 +1 @@ +irregularly sampled multivariate time-series data are found in several domains spanning from bio-medical systems to the smart grid. this is primarily the outcome of two factors. the first source is the inherent system design where data generation happens at different rate. for instance, in a smart grid, different sensors sense and transmit measurements at different time scales, i.e., advanced metering infrastructure (ami) samples at 15-min interval, data acquisition (scada) sensors at 1min interval, etc. . the second factor corresponds to system malfunctions that causes frequent unavailability of data samples. for example, communication impairments in the smart grid result in loss of data, further aggravating this issue . it is important to reconcile such multi-scale intermittent measurements at a common timescale in order to enhance the situational awareness of the grid. typically, this kind of irregularly sampled data is first imputed (i.e., estimate missing values) to a uniform scale so that an effective analysis can be carried out with the processed data. to this end, several methods are being proposed in the literature such as linear interpolation , knn and multitask gaussian process , . recently, neural odes (neuodes) have shown to be a very effective framework for handling irregular sampled data - . this is primarily due to the fact that neuodes s. dahale and b. natarajan are with the electrical and computer engineering, kansas state university, manhattan, ks-66506, usa (e-mail: sddahale@ksu.edu, bala@ksu.edu). s.munikoti is with the data science and machine intelligence group, pacific northwest national laboratory, usa, (email: sai.munikoti@pnnl.gov).this material is based upon work supported by the department of energy, office of energy efficiency and renewable energy (eere), solar energy technologies office, under award number de-ee0008767. can estimate values in a continuous space (using dynamics) unlike discrete space (proportional to the number of layers) in a vanilla neural network.however, most of the data imputation frameworks solely offer a point estimate of the missing value, ignoring their uncertainties. it is crucial to provide confidence scores with prediction so that an informed decision can be made while deployed in the real-world applications. uncertainty quantification (uq) of the imputed measurements can be helpful in various ways, including (i) variance-informed state estimation where variance (confidence interval) of the imputed measurement's can be used to modulate the inputs of the estimator . for instance, high variance input is disregarded or corrected before being fed to the estimator. (ii) effective data sampling where uq helps in selecting appropriate measurements from high or low-fidelity sensors.uncertainty in deep neural network-based models such as neuodes arises due to two components. the first factor is with respect to the model parameters and it arises due to the model either being blind to some portion of the data distribution which has not been seen while it's training or due to its over-parameterization. there are various ways to handle this and can be quantified in a computationally efficient manner. on the other hand, the second class of uncertainty is related to the input data, also known as aleatoric uncertainty. it is inherent in nature and cannot be alleviated by any means of model engineering . modeling and quantifying these uncertainties are critical with regard to time series imputation since the sensors are often associated with noise, and one cannot cover the entire data distribution while training the models.there exist a few works in the literature that quantify uncertainty in neuodes-based frameworks, - . authors in proposed a neural jump ode framework for modeling the conditional expectation of a stochastic process in an irregularly observed time series. however, most of these works only retrieve the epistemic part and ignore the aleatoric part. it is important to incorporate the aleatoric part, especially for critical complex systems where sensor-generated data is pivotal in various operational decision-making. therefore, we propose a novel framework to quantify the uncertainties in a comprehensive manner. in this regard, we first formulate an sde-rnn framework that combines the stochastic differential equation (sde) form of neuodes and recurrent neural network (rnn) for imputation. specifically, sde is chosen since it can propagate both mean (prediction) and variances (uncertainty) along the neuodes framework in an efficient manner. furthermore, rnn helps us to capture the inputs at observed time instances, and effectively carry forward the information to future states via its memory cell. altogether, their combination offers an powerful and efficient approach to quantify and propagate both uncertainties from the input to final predictions.the main contributions of this paper are summarized below:• we formulate a novel sde-rnn which combines the principle of stochastic differential equations and neural networks to model the irregularly sampled time series. • the proposed sde-rnn approach allows us to quantify both aleatoric and epistemic uncertainty in the imputed measurements. analytical results capturing these sources of uncertainty have been derived. we derive the theoretical expressions of uncertainty propagation for gated recurrent unit (gru) model. • simulation results on the power distribution test system (ieee 37 network) demonstrate the effectiveness of the proposed approach compared to the classic baseline (rnn + mc dropout). the paper is organized as follows. section ii presents a background of neuodes with a literature review. section iii formulates our fundamental sde-rnn framework followed by the detailed process of uncertainty quantification in section iv. experiments are discussed in section v with conclusions and future work in section vi.• we formulate a novel sde-rnn which combines the principle of stochastic differential equations and neural networks to model the irregularly sampled time series.recurrent neural networks (rnn) form the first choice for modeling high dimensional, regularly sampled time series data. however, neural ode, which is a novel framework combining deep learning principles and differential equations has been found to be suitable for modeling irregularly sampled time series. neural odes can systematically accommodate continuous time series data using a technique that captures the continuous nature of hidden states. a different route has been taken inwhere sdes are used to quantify both aleatoric and epistemic uncertainty by training the sde model based on out-of-distribution (ood) training data. neural ode exploits the continuous-time dynamics of variables from input state to final predictions, unlike a standard deep neural network which only performs a limited number of transformations depending on the number of layers chosen in the architecture. f : r m → r m and c : r m → r d are the neural ode operator and output function parameterized by neural network parameters θ and θ c , respectively.using this sde-rnn approach, we can quantify the uncertainty in the hidden and output states, as discussed in the following subsection. the sde-rnn approach propagates the uncertainty from the noisy input observations to the final outputs using the cvrnn model and the sde model. here, we refer to the cvrnn model as a modified form of rnn, that propagates the mean and uncertainty arising from the previous hidden states and the inputs to the next hidden state. the cvrnn model updates the hidden states at the time when an observation is available along with its associated uncertainty. theorem 1: transformation of uncertainty: consider a recurrent neural network represented by v(•) with neural network parameters θ v and hidden states given by hi = v( hi-1 , xi , θ v ).2) sde model: a neural sde model aims to capture the epistemic uncertainty with brownian motion and propagate it to the next time step.the cvrnn model discussed in the previous section can be any form of the recurrent neural network; for example, it could be either rnn, long short term memory (lstm), or gated recurrent unit (gru) model whose hidden states are given as,. the aleatoric uncertainty due to the sensor noise is well captured by the sde-rnn model, reflected by the uncertainty estimates at the observations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/782.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/782.txt new file mode 100644 index 0000000000000000000000000000000000000000..33e50d25f56899288dd11cdeab5e1628634e5176 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/782.txt @@ -0,0 +1 @@ +multi-label classification aims to predict the presence or absence of every category from a collection of categories of interest (tsoumakas & katakis, 2007). this is a generalization of the standard multiclass classification setting, in which the categories are mutually exclusive. multi-label classification is often more realistic than multi-class classification in domains like natural image classification where multiple categories tend to co-occur (wang et al., 2016;wei et al., 2015).the primary obstacle to training multi-label classifiers is the expense of exhaustively annotating the presence and absence of every category of interest. to mitigate this problem, cole et al. (2021) introduce the setting of single positive multi-label (spml) classification, which aims to train effective multi-label image classifiers using only one positive label per image.the spml benchmarks introduced in cole et al. (2021) are artificially generated from fully annotated multi-label classification datasets. for each training image, one positive label is chosen uniformly at random to be retained and all other labels are discarded. while this simple model is a reasonable starting point, it is unlikely to reflect realistic annotator behavior (spain & perona, 2011). if we want to apply spml in realistic settings, we need to understand how different positive label selection biases affect the performance of spml algorithms.our key contributions are as follows: (1) we extend the spml benchmarks in cole et al. (2021) by generating new label sets based on different models of annotator bias. ( 2) we provide the first study of the effect of label bias in spml, filling a crucial gap in the literature with significant implications for the real-world applicability of spml. the code and data are publicly available. 12 related work spml. cole et al. (2021) formalized the spml setting and introduced benchmarks and algorithms for the problem. subsequent work has used these benchmarks to develop new spml algorithms (kim et al., 2022;abdelfattah et al., 2023;2022;ke et al., 2022;zhou et al., 2022;xu et al., 2022;verelst et al., 2023). we complement this work by extending the benchmarks in cole et al. (2021) and clarifying the role of label selection bias in spml.models of annotator behavior. the benchmarks in cole et al. (2021) are unlikely to reflect the behavior of real annotators, who are more likely to mention an object if it is larger or closer to the center of the frame (berg et al., 2012) or may neglect to mention it if it is "too obvious" (spain & perona, 2011). we take inspiration from this literature for our models of annotator behavior. multi-label classification is often more realistic than multi-class classification in domains like natural image classification where multiple categories tend to co-occur(wang et al. to mitigate this problem,cole et al.the spml benchmarks introduced incole et al. while this simple model is a reasonable starting point, it is unlikely to reflect realistic annotator behavior(spain & perona, 2011).our key contributions are as follows: (1) we extend the spml benchmarks incole et al. (2021)by generating new label sets based on different models of annotator bias.cole et al. (2021)formalized the spml setting and introduced benchmarks and algorithms for the problem. the benchmarks incole et al.let (x, y) denote a training example, where x ∈ r p is an image and y ∈ {0, 1} l is a label vector.table1: test set mean average precision (map) results for different bias models and training losses (an = "assume negative" loss, an-ls = "assume negative" loss with label smoothing, role = regularized online label estimation, em = entropy maximization).p uniform (1) p size (2) p location (3) p semantic (4) l an(cole et al. this provides some evidence that the standard p uniform benchmarks are a reasonable surrogate for more realistic biases that might be encountered in practice. (iii) different losses have different sensitivities to label bias.these observations suggest that spml algorithm rankings are roughly stable under different label biases, but the performance under p uniform tends to overestimate the performance under other biases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/783.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/783.txt new file mode 100644 index 0000000000000000000000000000000000000000..44d443748b053a7caaa27267179803f83a9d9883 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/783.txt @@ -0,0 +1 @@ +in recent years, graph neural networks (gnns) have gained increasing popularity. gnns are a class of deep learning architectures that leverage the representation of graph-structured data by incorporating the structure into the embedding. an eminent gnn realization is the graph attention network (gat) , which performs attentional message-passing. similar to the attention mechanism for sequential data, gats update node features of a graph by weighting the features of its neighboring nodes using assigned attention scores. if designed and trained properly, the attention mechanism can improve a network's robustness by attending to only important nodes in graph-structured data . due to the inherent selection characteristic of the attention mechanism, several works claim that high attention scores indicate highly relevant information and conclude a certain interpretability . however, recent works on sequential data using the attention mechanism have questioned this type of interpretability . results suggest that the input-depended computation of attention matrices is less important than typically thought. while attention mechanisms have shown to be very powerful architectural components, there is dissent about how much they actually learn to attend and select relevant information. in general, the interpretability of data-driven architectures is a significant research topic .while most state-of-the-art prediction methods in the automotive field are based on conventional machine learning approaches, graph-based approaches have become very popular . representing a traffic scenario as graph-structured data allows to include relational information, which gnn architectures are able to inherently incorporate into the embedding. particularly, the ability of gats to weigh different relations context-adaptively seems to be very beneficial and could potentially identify relevant inter-dependencies . beside the very prominent example of graph-based attentional aggregation in vectornet , various works use gats to model high-level interactions of agent trajectories and/or map features . in comparison to other application fields like, e. g., brain analysis, the graph structures in automotive applications are rather small and sparse. as shown in this work, however, especially small and sparse graph architectures are likely to hinder appropriate parameter optimization in the gat such that even simple graph problems cannot be solved reliably. to be aware of these constrains in gats and know how to mitigate these effects is therefore of great importance. with focus on the application in the automotive domain, this work identifies key weaknesses of one of the most relevant gat realizations, namely gatv2 , and proposes architectural changes to surpass limitations. experiments show, that the model adaptions improve performance and reinforce robustness to parameter initialization. additionally, the attention assignment of the gat is investigated with regard to (w. r .t.) its interpretability as relevance indication. in order to evaluate if the attention mechanism learns to generalize relevant dependencies within a graph structure, ground truth data of the attention scores are required. in real-world datasets, however, this information is hardly accessible. such annotations are extremely hard to define and, if possible, require expensive labeling procedures. for this work, therefore, a synthetic graph dataset is generated. the simulated dataset allows to evaluate the attention mechanism in a controlled environment.contribution. this work contributes towards a better understanding of graph-based attentional information aggregation. the main contributions are as follows:• identification of potential pitfalls of gatv2 with focus on automotive applications. similar to the attention mechanism for sequential data, gats update node features of a graph by weighting the features of its neighboring nodes using assigned attention scores. furthermore, it has been shown that the popular gat architecture proposed inonly performs a limited kind of attention, where the ranking of the attention scores is unconditional on the query node. knyazev et al. the static form of gat is only able to perform a limited kind of attention, where the ranking of the attention scores is unconditional on the query node. under the assumption that attention scores indicate relevance, in such scenarios, identical attention scores should be assigned to the corresponding query node i and neighboring node j. nevertheless, since the attention scores are computed based on non-injective operations, the same attention scores α ii = α ij for node i and j can theoretically be induced even if h i ̸ = h j . if the parameter θ r is not learned properly, the underlying attention mechanism assigns attention scores unconditioned on the query node i and expressiveness is limited.to prove the proposition that the learning process is sensitive to initialization, the gradient b θ r for the parameter θ r = , composed of the weight θ rw and bias θ r b , is computed. 2) adapt update function: instead of introducing a new parameter θ n in the update function as done in gat-θ n , θ r can be used to transform the query node such that. in fact, using parameter θ r in the update function is reasonable since this maps the node features into the same representation space that the attention score assignment is based on. in addition to gatv2, model variants based on the proposed architectural adaptions, namely gat-θ r , gat-θ n , gat-θ + r and gat-θ + n , are evaluated. it holds that arg max j r(h i , h j ) = arg max j r(h j ) and subsequently, for selecting the relevant neighboring node the gat parameter θ r doesn't have to be optimized.0 to the most relevant node in more than 80 % of the samples, low attention scores still occur. also the adaptions made in gat-θ r , gat-θ + r benefit the selection accuracy as well as regression task performance. as can be seen in fig. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/784.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/784.txt new file mode 100644 index 0000000000000000000000000000000000000000..7130ca6c485a3c6db943a7dd6c98cbfdab879102 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/784.txt @@ -0,0 +1 @@ +recent years have seen reinforcement learning used in a variety of multi-agent systems. however, a rigorous understanding of how standard methods in reinforcement learning perform in a multi-agent setting with limited communication is only beginning to be available.one of the most fundamental problems in reinforcement learning is policy evaluation, and one of the most basic policy evaluation algorithms is temporal difference (td) learning, originally proposed in . td learning works by updating a value function from differences in predictions over a succession of steps in the underlying markov decision process (mdp).developments in the field of multi-agent reinforcement learning have led to an increased interest in decentralizing td methods, which is the subject of this paper. we will consider a simple model where n agents all have access to their own copy of the same mdp. naturally, the agents can simply ignore each other and run any policy evaluation method without communication. however, this ignores the possibility that agents can benefit from mixing local computations by each agent and inter-agent interactions. our goal will be to quantify how much td methods can benefit from this.we show a version of a "linear speedup" phenomenon: under a number of assumptions, we show that the convergence bounds of a distributed algorithm with n agents is a factor of n faster than the corresponding convergence time bounds associated with a centralized version.we next introduce the update rule of the classical temporal difference method with linear function approximation v θ , a linear function of θ :. more formally, each agent has the same 6-tuple (s, v, a, p, r, γ); at time t, agent v will be in a state s v (t); it will apply action a v (t) ∈ a with probability µ(a v (t)|s v (t)); then agent v moves to state., with the transitions of all agents being independent of each other; finally agent v gets a reward r v (t) = r(s v (t), a v (t), s ′ v (t)). along these lines, we propose our main method below as algorithm 1: each agent runs td(0) locally without any communication, and, at the end, the agents simply average the results. in the more common "nearest neighbor" model where the agents are connected over an undirected graph and nodes know the total number of nodes n, it is possible to find an ε-approximation of the average in o(n log(1/ε)) time using the average consensus algorithm from. if knowledge of the number of nodes not available, and the communication graph is further time-varying, it is possible to do the same in o(n 2 log(1/ε) using the average consensus algorithm from. for centralized td(0), convergence bounds generally scale both with the distance to the initial solution, and with the variance of the temporal difference error with average reward:. with each s v (t) sampled from the stationary distribution π, and r v (t) being the reward and s ′ v (t) being the next state when the action is taken from the policy µ.to summarize: parts (b) and (c) show that, when the number of iterations is large enough, we can divide the variance term by n as a consequence of the parallelism among n agents. the explanation for this is that in parts (a) and (c), the variance of the temporal difference error dominates the convergence rate, while in part (b) this is not the case. thus in theorem 1(a) we need only a constant error in the averaging step, while in theorem 1(b) and 1(c) we need an error rate proportional to a power or 1/t . since all average consensus methods previously discussed compute an ε-approximation to average consensus in o(log 1/ε) steps (treating all other variables as constants), this means that step 10 in our method requires us to run a distributed average consensus method for at most o(log t ) (treating all variables except t as constants) as previously claimed. since each agent in the system executes the classical td(0) at time t for t ∈ n 0 , then by part (c) of theorem 2 and lemma 1 in, for v ∈ v, we have that. the graphs for our method show the td error at each iteration if we stopped the method and run the average consensus to average the estimates across the network. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/785.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/785.txt new file mode 100644 index 0000000000000000000000000000000000000000..9ddb2912b078bc76e2e926fa4f07844f1cad3a16 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/785.txt @@ -0,0 +1 @@ +reinforcement learning (rl) has achieved significant empirical success in the online setting, where the agent continuously interacts with the environment to collect data and improve its performance. however, online exploration is costly and risky in many applications, such as healthcare and autonomous driving , in which case it is preferable to learn from a pre-collected observational dataset from doctors or human drivers using their own policies. due to lack of on-policy interaction with the environment, offline rl faces the fundamental challenge of distribution shift . a standard approach for handling distribution shift is importance sampling . more sophisticated approaches have been proposed to alleviate the high variance of importance sampling . recent works consider estimating the state marginal importance ratio, a more tractable problem.existing work on offline rl requires the dataset to have sufficient coverage. a standard measure for coverage is the concentrability coefficient : c π = max s,a d π (s,a) ρ(s,a) , which is the ratio between the stateaction occupancy measure of a policy π of interest and the (empirical) occupancy measure ρ of the behavior policy generating the offline dataset. however, this can be restrictive as the support of ρ must contain that of d π in order for c π to be finite. earlier work such as the fitted q-iteration (fqi) algorithm requires full coverage, i.e. c π < ∞ for all policies π. more recent works requires a more relaxed, partial coverage condition c π * < ∞ with π * being optimal policy. partial coverage is still a fairly strong requirement: the behavior policy must visit every state the optimal policy would visit, and take every action the optimal policy would take.in this paper, we seek to relax the coverage condition for offline policy evaluation in settings where the markov decision process (mdp) has a latent low-rank structure. similarly to , we view the q function as a matrix and exploit its low-rank structure to infer the entries that were not observed in the offline data. unlike typical results from the low-rank matrix completion literature, our setting requires completing the matrix under non-uniform sampling, as in ; moreover, the error is evaluated under a different distribution or weighted norm, leading to the fundamental challenge of distribution shift. by leveraging techniques from weighted and non-uniform matrix completion, we develop a new offline policy evaluation algorithm, which alternates between q iteration and matrix estimation. for both the infinite and finite sample settings, we show that the evaluation error can be bounded in terms of a novel discrepancy measure between the behavior and target policies. in contrast to the standard concentrability coefficient, our discrepancy measure may remain finite even when the behavior policy does not cover the support of the target policy. we present a concrete example where the concentrability coefficient is infinite but our method achieves a meaningful error bound. building on the above evaluation algorithm, we further design an offline policy optimization algorithm with provable performance guarantees.there are several challenges that arise when we borrow ideas from low-rank matrix estimation to offline rl. classical matrix estimation results require two assumptions that are hard to satisfy in mdp. first, independence is assumed between the sampling process and the observation noise. this is clearly not true for mdps, where observation noise is intertwined with the sampling. for example, if a state-action pair is sampled more frequently, the empirical observations (e.g., transition frequencies) are bound to be less noisy than others sampled less often. second, sampling in matrix estimation typically requires each entry to have a non-zero probability of being observed. sampling in mdps is very different: only entries on the support of the sampling distribution, which is determined by the behavior policy, can be sampled; those off the support have a zero observation probability. we note that various recent works attempt to relax the aforementioned assumptions to make matrix estimation more applicable to real-world sequential decision-making problems. for example, the paper allows for some dependence between the noise and sampling pattern, and the smallest sampling probability can be zero. their algorithm, which involves finding a maximum biclique, works best with datasets with a specific restrictive structure, which is often not present in offline rl. our goal in this paper is to derive a performance guarantee for a more general class of sampling patterns. a standard measure for coverage is the concentrability coefficient: c π = max s,a d π (s,a) ρ(s,a) , which is the ratio between the stateaction occupancy measure of a policy π of interest and the (empirical) occupancy measure ρ of the behavior policy generating the offline dataset. unlike typical results from the low-rank matrix completion literature, our setting requires completing the matrix under non-uniform sampling, as in; moreover, the error is evaluated under a different distribution or weighted norm, leading to the fundamental challenge of distribution shift. by leveraging techniques from weighted and non-uniform matrix completion, we develop a new offline policy evaluation algorithm, which alternates between q iteration and matrix estimation. the distribution shift that arises in the infinite-sample setting can be attributed to the difference in support, which is precisely reflected in our proposed distribution discrepancy in definition 1 and the corresponding error bound in theorem 1.in the finite-sample setting, we have a noisy and unbiased estimate p t (•|s, a) of the true transition probability p t (•|s, a) for (s, a) ∈ supp( d π β t ), where d π β t denotes the empirical data distribution of k independent samples from the true distribution d π β t . in particular, the finite-sample distribution shift depends not only on the difference in support, but also the difference in the specific distributions, which will be reflected in our proposed distribution discrepancy in definition 2 and the subsequent error bound in theorem 2.for a matrix m ∈ r n×m , let m * denote its nuclear norm (sum of singular values), m op its operator norm (maximum singular value), m ∞ = max i,j |m ij | its entrywise ℓ ∞ norm, and supp(m ) = {(i, j) : m ij = 0} its support. the operator discrepancy between two probability distributions p, q ∈ ∆(s × a) is defined as dis(p, q) := min g -q op : g ∈ ∆(s × a), supp(g) ⊆ supp(p) .note that dis(p, q) ≤ p -q op is always finite, and dis(p, q) = 0 if and only if supp(q) ⊆ supp(p). therefore, dis(p, q) measures the distribution shift between p and q in terms of preserving the expectation of low-rank matrices. note that dis(p, q) only depends on the support of p: if supp(p) = supp(p ′ ), then dis(p, q) = dis(p ′ , q) for all q. for instance, if p is the uniform distribution on s × a, then g * = q and hence dis(p, q) = 0 for all q. the algorithm also takes as input the target policy π θ , the initial state distribution µ 1 , weight matrices (ρ t ) t∈ , and a matrix estimation algorithm me(•) which will be specified in(7)and (9) for the infinite-sample and finite-sample settings, respectively.subsequently, to infer the q values off support, the algorithm uses the low-rank matrix estimation subroutine, me(•), which takes as input the weight matrix ρ t and the empirical estimates z t . the finite-sample distribution shift is reflected in the term dis(d π β t , d π θ t ), which is always finite, given any behavior policy π β and target policy π θ , in contrast to the concentrability coefficient c π = max s,a d π (s,a) ρ(s,a) . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/786.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/786.txt new file mode 100644 index 0000000000000000000000000000000000000000..f638e73790df0f25ba58e905c8faac55a931cf01 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/786.txt @@ -0,0 +1 @@ +congenital heart disease occurs when there are problems with the early development of the heart's structure. the detection of heart murmurs (sounds made by turbulent blood flow through the heart) and other noises with this technique can indicate structural defects in the heart. the analysis of signals in early life may therefore provide a rapid and non-invasive screening test for the presence of cardiac structural defects, enabling early diagnosis and intervention .in this work, we present our contribution to the heart murmur classification task from the 2022 george b. moody physionet challenge . the aim was to design an open-source algorithm to classify the presence, absence, or unknown cases of heart murmurs from heart sound recordings 1 . our contributions are threefold: (i) we design and implement two deep learning modelling approaches, one trained purely on two-dimensional representations of data (spectrograms) derived from the heart sound recordings, and a second that additionally utilises a patient's demographic data and features derived directly from the sound recordings; (ii) we compare the relative contribution of different data modalities to heart murmur classification; and (iii) discuss potential mechanisms and implications for the observed results. our contributions are threefold: (i) we design and implement two deep learning modelling approaches, one trained purely on two-dimensional representations of data (spectrograms) derived from the heart sound recordings, and a second that additionally utilises a patient's demographic data and features derived directly from the sound recordings; (ii) we compare the relative contribution of different data modalities to heart murmur classification; and (iii) discuss potential mechanisms and implications for the observed results. bayesian neural networks provide strong performance for supervised classification tasks and an estimate of the uncertainty in a classification, both of which are desirable for real-data tasks such as this challenge. figure3is a schematic diagram of the data preparation and two models considered in this paper, the dual bayesian resnet (dbres) and the dbres with demographic data and signal feature integration via xgboost. there are three major components to the models: classifying the individual spectrograms, aggregating these classifications, and integrating the demographic data and signal features via xgboost.the ternary murmur classification is split into two binary classifications: present versus unknown or absent, and unknown versus present or absent.the individual spectrograms classifications are aggregated by first taking the arithmetic mean of the output from the present versus unknown or absent resnet50. if this averaged output classifies the patient's murmur as unknown, it is classified as unknown, else the patient's murmur is classified as absent. this choice of model structure and classification aggregation was chosen to prioritise the accuracy of present classifications, aligning with the priority in the murmur challenge score(1).the second model considered in this paper integrates the output from the dbres with the patient's demographic data and extracted signal features using xgboost. the data contain mainly children and is highly unbalanced, with murmurs being absent in 74% of patients, present in 19% of patients, and unknown in 7% of patients, as shown in table1. table2provides the per class accuracy, accuracy, and the murmur challenge score for dbres and dbres with xgboost integration when evaluated on our held-out subset of the training set. in this work we proposed and evaluated two bayesian deep learning approaches to classifying murmurs as present, absent, or unknown from heart sound recordings and demographic data. the first approach, dbres, implements two binary bayesian resnet50 networks, which classify murmurs in segmented spectrograms of heart sound recordings. the second approach combines the output from dbres with features extracted the results in table2show that spectrograms are a good representation of the data, and when combined with resnet provide the majority of the predictive power. the results demonstrate that the architecture of dbres prioritises the accuracy on present, then on unknown, and then on absent cases, without using a weighted loss function. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/787.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/787.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f11df392a5478a207bf45acd23b6b1942e8037e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/787.txt @@ -0,0 +1 @@ +value iteration (vi) is foundational to the theory and practice of modern dynamic programming (dp) and reinforcement learning (rl). it is well known that when a discount factor γ < 1 is used, (exact) vi is a contractive iteration in the • ∞ -norm and therefore converges. the progress of vi is measured by the bellman error in practice (as the distance to the fixed point is not computable), and much prior work has been dedicated to analyzing the rates of convergence of vi and its variants. surprisingly, however, the optimal rate in terms of bellman error for the vi setup was not known, and finding a general acceleration mechanism has been an open problem. the classical o(γ k )-rate of vi is inadequate as many practical setups use γ ≈ 1 or γ = 1 for the discount factor. (not to mention that vi may not converge when γ = 1.) moreover, most prior works on accelerating vi focused on the bellman consistency operator (policy evaluation) as its linearity allows eigenvalue analyses, but the bellman optimality operator (control) is the more relevant object in modern rl.contribution. in this paper, we present the first accelerated vi for both the bellman consistency and optimality operators. our method, called anc-vi, is based on an "anchoring" mechanism (distinct from nesterov's acceleration), and it reduces the bellman error faster than standard vi. in particular, anc-vi exhibits a o(1/k)-rate for γ ≈ 1 or even γ = 1, while standard vi has rate o(1) for γ ≥ 1 -1/k, where k is the iteration count. we also provide a complexity lower bound matching the upper bound up to a constant factor of 4, thereby establishing optimality of the accelerated rate of anc-vi. finally, we show that the anchoring mechanism provides the same benefit in the approximate vi and gauss-seidel vi setups as well.where inequality comes from second inequality in lemma 9 with α = β k , u = u k , ũ = u k-1 , and previously defined ū .where the second inequality is from the fact that u 0 ≥ t ⋆ u 0 implies t ⋆ u 0 ≥ u ⋆ by lemma 5 and u ⋆ ≥ û ⋆ by lemma 3.where inequality comes from second inequality in lemma 9 with α = β k , u = u k , ũ = u k-1 , and previously defined ū .otherwise, if u 0 ≥ t u 0 , u k ≥ t u k by lemma 5.where inequality comes from lemma 8 with α = 1, u = u 0 , ū = u 0 -u ⋆ , and let ū be the entire right hand side of inequality.where inequality comes from lemma 8 with α = β k , u = u k , ũ = u k-1 , and previously defined ū .where inequality comes from lemma 9 with α = 1, u = u 0 , ū = u 0 -û ⋆ , and let ū be the entire right hand side of inequality.where inequality comes from lemma 9 with α = β k , u = u k , ũ = u k-1 , and previously defined ū now, we prove the first rate in theorem 6.where the second inequality is from the (2β 1 -1)(u 0 -u ⋆ ) ≤ 0, and first inequality comes from lemma 8 with α = 1, u = u 0 , ū = u 0 -u ⋆ , and let ū be the entire right hand side of inequality.where the first inequality comes from lemma 8 with α = β k , u = u k , ũ = u k-1 , and previously defined ū .where inequality comes from lemma 9 with α = β k , u = u k , ũ = u k-1 , and previously defined ū .where the first inequality comes from lemma 19 with α = β k , u = u k , ũ = u k-1 , and second inequality comes from nonnegativeness of p k gs .where the first inequality comes from lemma 20 with α = β k , u = u k , ũ = u k-1 , and nonnegativeness of pk gs . if u 0 ≤ t ⋆ gs u 0 , then u 0 -u ⋆ ≤ 0 and u k ≤ t ⋆ gs u k by lemma 22.otherwise, if u 0 ≥ t ⋆ gs u 0 , u k ≥ t ⋆ gs u k and u 0 ≥ u ⋆ ≥ û ⋆ by lemma 22 and 3. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/788.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/788.txt new file mode 100644 index 0000000000000000000000000000000000000000..7b0ee3dc0fb45f312120a5a06f18577f57a59944 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/788.txt @@ -0,0 +1 @@ +in this paper, we consider the sample complexity of learning function classes, where each function is a composition of one or more transformations given by x → f (w x) , where x is a vector, w is a parameter matrix, and f is some fixed lipschitz function. a natural example is vanilla feed-forward neural networks, where each such transformation corresponds to a layer with weight matrix w and some activation function f . a second natural example are vector-valued linear predictors (e.g., for multi-class problems), where w is the predictor matrix and f corresponds to some loss function. a special case of the above are scalar-valued linear predictors (composed with some scalar loss or nonlinearity f ), namely x → f (w ⊤ x), whose sample complexity is extremely well-studied. however, we are interested in the more general case of matrix-valued w , which (as we shall see) is far less understood.clearly, in order for learning to be possible, we must impose some constraints on the size of the function class. one possibility is to bound the number of parameters (i.e., the dimensions of the matrix w), in which case learnability follows from standard vc-dimension or covering number arguments (see anthony and bartlett ). however, an important thread in statistical learning theory is understanding whether bounds on the number of parameters can be replaced by bounds on the magnitude of the weights -say, a bound on some norm of w . for example, consider the class of scalar-valued linear predictors of the form {x → w ⊤ x : w, x ∈ r d , w ≤ b} and inputs ||x|| ≤ 1, where || • || is the euclidean norm. for this class, it is well-known that the sample complexity required to achieve excess error ǫ (w.r.t. lipschitz losses) scales as o(b 2 /ǫ 2 ), independent of the number of parameters d (e.g., bartlett and mendelson , shalev-shwartz and ben-david ). moreover, the same bound holds when we replace w ⊤ x by f (w ⊤ x) for some 1-lipschitz function f . therefore, it is natural to ask whether similar sizeindependent bounds can be obtained when w is a matrix, as described above. this question is the focus of our paper.when studying the matrix case, there are two complicating factors: the first is that there are many possible generalizations of the euclidean norm for matrices (namely, matrix norms which reduce to the euclidean norm in the case of vectors), so it is not obvious which one to study. a second is that rather than constraining the norm of w , it is increasingly common in recent years to constrain the distance to some fixed reference matrix w 0 , capturing the standard practice of non-zero random initialization (see, e.g., bartlett et al. ). following a line of recent works in the context of neural networks (e.g., vardi et al. , daniely andgranot ), we will be mainly interested in the case where we bound the spectral norm || • || of w 0 , and the distance of w from w 0 in the frobenius norm || • || f , resulting in function classes of the form(1)for some lipschitz, possibly non-linear function f and a fixed w 0 of bounded spectral norm. this is a natural class to consider, as we know that spectral norm control is necessary (but insufficient) for finite sample complexity guarantees (see, e.g., golowich et al. ), whereas controlling the (larger) frobenius norm is sufficient in many cases. moreover, the frobenius norm (which is simply the euclidean norm of all matrix entries) is the natural metric to measure distance from initialization when considering standard gradient methods, and also arises naturally when studying the implicit bias of such methods (see lyu and li ). as to w 0 , we note that in the case of scalar-valued linear predictors (where w, w 0 are vectors), the sample complexity is not affected1 by w 0 . this is intuitive, since the function class corresponds to a ball of radius b in parameter space, and w 0 affects the location of the ball but not its size. a similar weak dependence on w 0 is also known to occur in other settings that were studied (e.g., bartlett et al. ).in this paper, we provide several new contributions on the size-independent sample complexity of this and related function classes, in several directions.in the first part of the paper (section 3), we consider function classes as in eq. ( 1), without further assumptions on f besides being lipschitz, and assuming x has a bounded euclidean norm. as mentioned above, this is a very natural class, corresponding (for example) to vector-valued linear predictors with generic lipschitz losses, or neural networks composed of a single layer and some general lipschitz activation. in this setting, we make the following contributions:• in subsection 3.1 we study the case of w 0 = 0, and prove that the size-independent sample complexity (up to some accuracy ǫ) is both upper and lower bounded by 2 θ(b 2 /ǫ 2 ) . this is unusual and perhaps surprising, as it implies that this function class does enjoy a finite, size-independent sample complexity bound, but the dependence on the problem parameters b, ǫ are exponential. this is in very sharp contrast to the scalar-valued case, where the sample complexity is just o(b 2 /ǫ 2 ) as described earlier. moreover, and again perhaps unexpectedly, this sample complexity remains the same even if we consider the much larger function class of all bounded lipschitz functions, composed with all norm-bounded linear functions (as opposed to having a single fixed lipschitz function).• building on the result above, we prove a size-independent sample complexity upper bound for deep feed-forward neural networks, which depends only on the frobenius norm of the first layer, and the product of the spectral norms of the other layers. in particular, it has no dependence whatsoever on the network depth, width or any other matrix norm constraints, unlike previous works in this setting.• in subsection 3.2, we turn to consider the case of w 0 = 0, and ask if it is possible to achieve similar size-independent sample complexity guarantees. perhaps unexpectedly, we show that the answer is no, even for w 0 with very small spectral norm. again, this is in sharp qualitative contrast to the scalar-valued case and other settings in the literature involving a w 0 term, where the choice of w 0 does not strongly affect the bounds.• in subsection 3.3, we show that the negative result above yields a new construction of a convex linear prediction problem which is learnable (via stochastic gradient descent), but where uniform convergence provably does not hold. this adds to a well-established line of works in statistical learning theory, studying when uniform convergence is provably unnecessary for distribution-free learnability (e.g., shalev-shwartz et al. , daniely et al. , feldman , see also nagarajan and kolter , glasgow et al. in a somewhat different direction).in the second part of our paper (section 4), we turn to a different and more specific choice of the function f , considering one-hidden-layer neural networks with activation applied element-wise:with weight matrix w ∈ r n×d , weight vector u ∈ r n , and a fixed (generally non-linear) lipschitz activation function σ(•). as before, we focus on an euclidean setting, where x and u has a bounded euclidean norm and ||w -w 0 || f is bounded, for some initialization w 0 with bounded spectral norm.in this part, our sample complexity bounds have polynomial dependencies on the norm bounds and on the target accuracy ǫ. our contributions here are as follows:• we prove a fully size-independent rademacher complexity bound for this function class, under the assumption that the activation σ(•) is smooth. in contrast, earlier results that we are aware of were either not size-independent, or assumed w 0 = 0. although we do not know whether the smoothness assumption is necessary, we consider this an interesting example of how smoothness can be utilized in the context of sample complexity bounds.• with w 0 = 0, we show an upper bound on the rademacher complexity of deep neural networks (more than one layer) that is fully independent of the network width or the input dimension, and for generic element-wise lipschitz activations. for constant-depth networks, this bound is fully independent of the network size.these two results answer some of the open questions in vardi et al. . we conclude with a discussion of open problems in section 5. formal proofs of our results appear in the appendix.in this paper, we consider the sample complexity of learning function classes, where each function is a composition of one or more transformations given by x → f (w x) , where x is a vector, w is a parameter matrix, and f is some fixed lipschitz function. for example, consider the class of scalar-valued linear predictors of the form {x → w ⊤ x : w, x ∈ r d , w ≤ b} and inputs ||x|| ≤ 1, where || • || is the euclidean norm. as before, we focus on an euclidean setting, where x and u has a bounded euclidean norm and ||w -w 0 || f is bounded, for some initialization w 0 with bounded spectral norm. enforcing f to be lipschitz and the domain x to be bounded is known to be necessary for meaningful size-independent bounds, even in the case of scalar-valued linear predictors x → f (w ⊤ x) (e. for simplicity, we mostly focus on the case of x being the euclidean unit ball, but this is without much loss of generality: for example, if we consider the domain {x ∈ r d : ||x|| ≤ b x } in euclidean space for some b x ≥ 0, we can embed b x into the weight constraints, and analyze instead the class f f,bxw0 bxb,n,d over the euclidean unit ball {x ∈ r d : ||x|| ≤ 1}.the key ideas in the proof of theorem 2 can be roughly sketched as follows: first, we show that due to the frobenius norm constraints, every function x → f (w x) in our class can be approximated (up to some ǫ) by a function of the form x → f ( wǫ x), where the rank of wǫ is at most b 2 /ǫ 2 . 2018, which considered the case r = 1. letting w y = w ′ y + w 0 , we get a matrix of bounded distance to w 0 , so that w y x i encodes both i and y. sample {x i , y i } m i=1 from an unknown distribution, we can approximately minimize e (x,y) arbitrarily well over all w : w -w 0 ≤ b, provided that m is large enough.perhaps unexpectedly, we now turn to show that this positive learnability result is not due to uniform convergence: namely, we can learn this class, but not because the empirical average and expected value of ℓ(w x; y) are close uniformly over all w : w -w 0 ≤ b. specifically, we will consider the following hypothesis class of scalar-valued, one-hidden-layer neural networks of width n on inputs in {x ∈ r d : ||x|| ≤ b x }, where σ(•) is a lipschitz function on r applied element-wise, and where we only bound the norms as follows:. indeed, even by taking the activation function σ(•) to be the identity, b = 0 and b = 1 we get that our function class contains the class of scalar-valued linear predictors {x → v ⊤ x : x, v ∈ r d , ||v|| ≤ b 0 }. given any ψ ∈ ψ l,a=0 and x ∈ u b , construct ψ ′ : n x → n y as follows: let x ′ be the point in n x that is nearest to x, and let ψ ′ (x) be the point in n y that is nearest to ψ(x ′ ).where we remind that ψ l=1,a,r is the class of all 1-lipschitz functions from {x ∈ r r : ||x|| ≤ b} to r, such that f (0) = a for some fixed a ∈ r. for all x, y ∈ r, the first-order taylor expansion of σ(y) at x is define by σ(x) + σ ′ (x)(y -x), and the reminder r x (y) is define by. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/789.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/789.txt new file mode 100644 index 0000000000000000000000000000000000000000..32781b052931aa13cae358c720d162261b18a8e4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/789.txt @@ -0,0 +1 @@ +the performance of traditional machine learning algorithms heavily depends on the assumption that the training and test data are independent and identically distributed (iid). however, in wild environments, the test distribution often differs significantly from the training distribution. this mismatch can lead to spurious correlations, ultimately causing the machine learning models to perform poorly and become unstable. unfortunately, this limitation severely hinders the use of machine learning in high-risk domains like autonomous driving , medical treatment , and law .in recent years, there has been a growing interest among researchers in addressing the out-of-distribution (ood) generalization problem, where the iid assumption does not stand . this issue is addressed by various branches * corresponding author of research, such as invariant learning , distributionally robust optimization , stable learning , and domain generalization . of these, domain generalization assumes the heterogeneity of training data. specifically, domain generalization attempts to learn common or causal knowledge from multiple training domains to develop a model capable of generalizing to unseen test data.despite a quantity of interesting and instructive works in domain generalization, previously they do not have a common standard training and evaluation protocol. aware of this problem, domainbed proposes a framework for the hyperparameter search and model selection of domain generalization. it also sets a standard for experimental details like the model backbone, data split, data augmentation, etc. this unifies the protocol of domain generalization for subsequent works to follow. nevertheless, conflicts remain between the current standard protocol and the accurate and reliable evaluation of ood generalization ability. since domain generalization is depicted as the ability to learn a model from diverse training domains that can generalize to unseen/unknown test data , we should try to mitigate possible test data information leakage for a more precise evaluation of the ood generalization ability.in the current protocol, two key factors show the potential risk of test data information leakage. we make two recommendations for fairer and more accurate evaluation.recommendation 1 domain generalization algorithms should adopt self-supervised pretrained weights or random weights as initialization when evaluated and compared with each other.most domain generalization algorithms take advantage of imagenet supervised pretrained weights for better performance and faster convergence. yet this introduces the information on both images and category labels in im-agenet, which may bear a resemblance to the test domain.through comprehensive experiments, we demonstrate that more utilization of supervised pretrained weights and less utilization of training data can contribute to higher test do-main performance under many common settings of domain generalization. this reveals that the imagenet supervised pretrained weights may play a leading role in the test domain performance. thus the accurate evaluation of ood generalization is violated since the test domain performance does not really come from the generalization from training domains to test domains, for which most domain generalization algorithms are designed, but from the utilization of the supervised pretrained weights. we also demonstrate that when a test domain is quite similar to the imagenet dataset, such a phenomenon becomes most evident while it does not occur if the test domain is rather different from imagenet. this further confirms the test data information leakage.to address such an issue, it is safest to train from scratch to purely evaluate domain generalization. however, on one hand, with the remarkable development and broad application of pretrained models these days , it is too limited and not common practice to train from scratch in real applications without benefiting from pretraining. on the other hand, most of commonly used domain generalization datasets like pacs and vlcs are not large enough to support training from scratch. thus we investigate different pretrained methods and model backbones towards a set of pretrained weights with which there is less test data information leakage and we can still conduct a relatively accurate evaluation of ood generalization. based on our experimental findings, we suggest that self-supervised pretrained weights are a good alternative.recommendation 2 domain generalization algorithms should be evaluated on multiple test domains.for each trained model, domain generalization algorithms are typically evaluated on a single test domain. before do-mainbed, the choice of hyperparameters is not well specified, and there is a chance that model selection is conducted with the help of test data, i.e., the oracle model selection . this can introduce information leakage from the test data and undermine the validity of the evaluation. even following the standard protocol of domainbed, such a possibility still exists since the search space of hyperparameters could be pre-selected with the information of oracle data and fixed apriori for the domainbed model selection pipeline. moreover, the ultimate goal of domain generalization is to develop models that can generalize well to a wide range of unseen domains in real-world applications instead of tuning a set of hyperparameters for one single test domain. the current protocol allows models to select different hyperparameters for each test domain, which may not reflect the real-world scenario and could be inconsistent with the original purpose of domain generalization . we suggest that we should evaluate algorithms on multiple test domains for each trained model since we empirically demonstrate that by doing so, the potential leakage from oracle model selection can be greatly mitigated.new leaderboards based on the aforementioned recommendations, we have conducted a re-evaluation of ten representative domain generalization algorithms following the revised protocol and presented three sets of new leaderboards. for resnet50 that is employed in the current protocol, we provide leaderboards with moco-v2 pretraining across all commonly used datasets, and leaderboards with no pretraining on large-scale datasets like do-mainnet and nico++ . in addition, to support comparisons on more advanced network architectures like vision transformers, we also provide leaderboards for vit-b/16 with moco-v3 pretraining. combined with our previous analyses, the change in rankings of algorithms between the new leaderboard and the old one also implies that we are taking risks to improperly evaluate and rank existing methods with the current evaluation protocol. we believe the revised protocol and the leaderboards will stimulate future research in the field of domain generalization with more precise evaluation. since domain generalization is depicted as the ability to learn a model from diverse training domains that can generalize to unseen/unknown test data, we should try to mitigate possible test data information leakage for a more precise evaluation of the ood generalization ability.through comprehensive experiments, we demonstrate that more utilization of supervised pretrained weights and less utilization of training data can contribute to higher test do-main performance under many common settings of domain generalization. thus the accurate evaluation of ood generalization is violated since the test domain performance does not really come from the generalization from training domains to test domains, for which most domain generalization algorithms are designed, but from the utilization of the supervised pretrained weights. thus we investigate different pretrained methods and model backbones towards a set of pretrained weights with which there is less test data information leakage and we can still conduct a relatively accurate evaluation of ood generalization.it is worth noting that bringing in extra knowledge beyond training data may have a potential negative effect of test data information leakage, hurting the accurate evaluation of ood generalization. when using these data as test domains, there could be potential leakage of test data information from the pretrained weights. as lp only updates the last layer, it strongly relies on the pretrained weights to generalize on the test domains, while ft relies more on the training domains and less on the pretrained weights than lp.1shows that as the number of frozen layers increases, indicating higher utilization of pretrained weights and lower utilization of training domain data, the test domain accuracies also increase. in some settings, simple lp already brings higher improvement than many domain generalization algorithms), it may generate comparable or even better test domain performance than the algorithms that are better at true ood generalization. we vary the number of test domains k and compute the test accuracy of each test domain through the average of its accuracy across every combination of k test domains that includes this test domain.the above analyses suggest that to reduce the risk of test data information leakage and ensure accurate evaluation of ood generalization, it is advisable to use self-supervised pretrained weights or train from scratch, and to increase the number of test domains.in conclusion, we present the new leaderboards using the modified protocol to mitigate the possible test data information leakage, so that we can promote a fairer and more accurate evaluation and comparison between the domain generalization algorithms for their ood generalization ability. since we focus on the ability of models generalizing from training data to test data, strong pretrained weights naturally bring about possible test information leakage as we have analyzed in sec.recently, there have been works showing that pretraining on larger datasets with larger architecture backbones greatly improves test performance in ood tasks, and some directly design algorithms for better utilization of pretrained weights to improve test domain performance. however, for a fairer and more accurate evaluation of ood generalization from training data to test data during the fine-tuning stage, we should seek pretrained weights that exhibit less test data information leakage. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/79.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/79.txt new file mode 100644 index 0000000000000000000000000000000000000000..6d5ad8982398558d82e71fe5c7f7e8f348174791 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/79.txt @@ -0,0 +1 @@ +anomaly detection is the task of automatically detecting examples that do not follow expected patterns (chandola, banerjee, and kumar 2009). these examples, named anomalies, are usually indicative of critical events such as water leaks in stores (perini, vercruyssen, and davis 2022), breakdowns in gas turbines (zhao, wen, and li 2016), or failures in the petroleum extraction (martí et al. 2015). such critical events usually come along with elevated (maintenance) costs or with substantial natural damages (e.g., dispersion of petroleum or gas). thus, detecting anomalies in time is a relevant task that limits such resource waste.collecting labels, especially for anomalies, is often a hard task because anomalies are costly events (e.g., machine failures cannot be voluntarily induced), or simply time-consuming (e.g., you may need to label 100s of examples before getting an anomaly). thus, anomaly detection is often tackled from an unsupervised perspective. however, the lack of labels usually forces the unsupervised detector to have high uncertainty on specific regions of the example space (perini, vercruyssen, and davis 2020). high uncertainty is undesirable because it is often associated with poor predictive performance or reduced trust in the predictions.this uncertainty can be tackled in two complementary ways. on the one hand, one can try to learn a more accurate detector by acquiring a limited number of labels using active learning (al) (abe, zadrozny, and langford 2006). on the other hand, it is possible to increase the user trust in the detector's outputs by allowing the detector to abstain from making a prediction when it is highly uncertain, which is called learning to reject (lr) (hendrickx et al. 2021;de stefano, sansone, and vento 2000). one way to do this is to set a rejection threshold on the detector's uncertainty based on where its performance is poor (cortes, desalvo, and mohri 2016). however, evaluating the detector performance requires labels.both of these approaches rely on labeled data. however, the types of labels needed for each approach are quite different. many al strategies rely on biased sampling strategies such as explicitly targeting acquiring labels, for example, for which the detector is highly uncertain (i.e., near the detector's current decision boundary) as these are known to yield better performance (pimentel et al. 2020;culotta and mc-callum 2005). alas, using such labels to evaluate the detector's performance, as required when setting the threshold in lr, will yield a biased performance estimate and hence a sub-optimal threshold (marrocco, molinara, and tortorella 2007). thus, if a user has a fixed budget for acquiring labels there is a tension between collecting (a) strategic labels that can be used to train a better detector, or (b) i.i.d. labels that can be used to evaluate performance and set a proper rejection threshold. therefore, a data scientist is confronted with the challenging question of how they should optimally allocate their label budget between these two purposes.in this paper, we assume that the label budget can be split and allocated in multiple rounds. we introduce bal-lad (budget allocation for active learning and learning to reject in anomaly detection) a novel adaptive strategy that, in each allocation round, (1) measures the potential reward obtained by assigning the budget to either al or lr, and (2) chooses the highest reward option to collect the labels. we introduce bal-lad (budget allocation for active learning and learning to reject in anomaly detection) a novel adaptive strategy that, in each allocation round, (1) measures the potential reward obtained by assigning the budget to either al or lr, and (2) chooses the highest reward option to collect the labels.given: initially unlabeled training set d and validation set v , the dataset's contamination factor γ, an anomaly detector h, and a label budget b; do: decide whether, in each allocation round k, to acquire labels for d (al) or for v (lr).both training the detector with more labels (al) and learning a threshold using larger validation data (lr) improve the detector's performance. we initialize the problem by (1) training the detector with no labels and setting a default rejection threshold, and (2) collecting b random labels for v (lr) and for d (al) for a total of 2b labels. this allows us to compute the initial rewards by measuring how the detector varies from (1) to (2): for lr, we measure the variation after re-setting the validation threshold; for al, we measure the variation after re-training the detector with the new labels. in each round, we allocate the budget to the option (lr or al) with the highest reward, and we update the reward using the new labels. we propose two alternative reward functions: 1) the entropy reward looks at the detector's probabilities, either for prediction (al), or for rejection (lr); 2) the cosine reward considers the predicted class labels, either anomaly yes/no (al), or rejection yes/no (lr). we use the following methods to derive the reward for both al and lr, by using as detector's probabilities either the probability of predicting anomaly (al), or the probability of rejecting the example (lr). given a validation set with some labels, we (1) set a specific detector confidence c(x), and (2) set the rejection threshold τ ∈ . because rejecting is assumed to be less costly than mispredicting, the rejection cost needs to satisfy the inequality c r ≤ min{c f p × (1 -γ), c f n × γ}, otherwise one could predict either always normal and pay an expected cost of c f n × γ, or always anomaly and pay c f p × (1 -γ). we compare ballad1to two baselines: all-in-al allocates all the budget for active learning and sets the rejection threshold using the (biased) training labels; on the contrary, all-in-lr allocates all the budget for learning to reject and uses an unlabeled training set. for each of the 18 benchmark datasets, we go as follows: (i) we split the dataset into training, validation and test sets using the proportions 40 -40 -20 (we have a large validation set to better measure the impact of rejection); (ii) we fit the anomaly detector on the unlabeled dataset and set the rejection threshold to the default value of 0.1; (ii) we allocate a budget b to lr and al by randomly selecting the initial examples; (iii) we optimize the rejection threshold and measure the lr reward; (iv) we train the anomaly detector on the partially labeled training set and measure the al reward; (v) we allocate the next round budget b to the option with the highest reward and repeat (iii) or (iv) until the whole budget b is used. table2shows the mean and standard deviation of the cost, divided by allocation round. overall, using the cosine reward builds a strategy that produces on average low costs for little budget (≤ 10%), whereas, for a higher budget, the entropy reward obtains better average costs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/790.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/790.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c004ab242dec7c161887deef6f96beb857ffb6d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/790.txt @@ -0,0 +1 @@ +statistical validity is a widely recognized crucial feature of modern science. lack of validitypopularly known as the replication crisis in science poses a serious threat to the scientific process and also to the public's trust in scientific findings.one of the factors leading to the replication crisis is the inherent adaptivity in the data analysis process. to illustrate adaptivity and its effect, consider a data analyst who is testing a specific research hypothesis. the analyst gathers data, evaluates the hypothesis empirically, and often finds that their hypothesis is not supported by the data, leading to the formulation and testing of more hypotheses. if these hypotheses are tested and formed based on the same data (as acquiring fresh data is often expensive or even impossible), then the process is of adaptive data analysis (ada) because the choice of hypotheses depends on the data. however, ada no longer aligns with classical statistical theory, which assumes that hypotheses are selected independently of the data (and preferably before gathering data). ada may lead to overfitting and hence false discoveries.statistical validity under ada is a fundamental problem in statistics, that has received only partial answers. a recent line of work, initiated by and includes has resulted in new insights into ada and robust paradigms for guaranteeing statistical validity in ada. a major objective of this line of work is to design optimal mechanisms m that initially obtain a dataset s containing n i.i.d. samples from an unknown distribution d, and then answers adaptively chosen queries with respect to d. importantly, all of m's answers must be accurate with respect to the underlying distribution d, not just w.r.t. the empirical dataset s. the main question is how to design an efficient mechanism that provides accurate estimations to adaptively chosen statistical queries, where the goal is to maximize the number of queries m can answer. this objective is achieved by providing both upper-and lower-bound constructions, where the lower-bound constructions demonstrate how an adversarial analyst making a small number of queries to an arbitrary m can invariably force m to err. the setting for these lower-bound proofs is formalized as a two-player game between a mechanism m and an adversary a as in game 1.1.game 1.1 (ada game between a mechanism m and an adversarial analyst a).• m gets a dataset s of n i.i.d. samples from an unknown distribution d over x .• for i = 1, . . . , ℓ:-a sends a query q i : x → to m.-m sends an answer y i ∈ to a.(as a and m are stateful, q i and y i may depend on the previous messages.)a question that immediately arises from the description of game 1.1 is to whom should the distribution d be unknown, and how to formalize this lack of knowledge. ideally, the mechanism m should succeed with high probability for every unknown distribution d and against any adversary a.in prior work, this property was captured by letting the adversary choose the distribution d at the outset of game 1.1. namely, the adversary a can be seen as a pair of algorithms (a 1 , a 2 ), where a 1 chooses the distribution d and sends a state st to a 2 (which may contain the entire view of a 1 ), and after that, m and a 2 (st) interacts in game 1.1. in this adversarial model, hardt and ullman and steinke and ullman showed that, assuming the existence of one way functions, it is computationally hard to answer more than θ(n2 ) adaptive queries. these results match the state-of-the-art constructions . 1 in fact, each such negative result was obtained by constructing a single adversary a that fails all efficient mechanisms. this means that, in general, it is computationally hard to answer more than θ(n 2 ) adaptive queries even when the analyst's algorithm is known to the mechanism. on the other hand, in each of these negative results, the adversarial analyst has a significant advantage over the mechanism -their ability to select the distribution d. this allows the analyst to inject random trapdoors in d (e.g., keys of an encryption scheme) which are then used in forcing a computationally limited mechanism to fail, as the mechanism does not get a hold of the trapdoor information.for most applications, the above adversarial model seems to be too strong. for instance, a data analyst who is testing research hypotheses usually has no knowledge advantage about the distribution that the mechanism does not have. in this typical setting, even if the underlying distribution d happens to have a trapdoor, if the analyst recovers the trapdoor then the mechanism should also be able to recover it and hence disable its adversarial usage.in light of this observation, we could hope that in a balanced setting, where the underlying distribution is unknown to both the mechanism and the analyst, it would always be possible for m to answer more than o(n 2 ) adaptive queries. to explore this possibility, we introduce what we call a balanced adversarial model. encrypting a message to a speific identity j ∈ only requires mpk, but decrypting a message for identity j must be done using its secret key sk j . , sk m according to the ibe scheme, and defines d to be the uniform distribution over the triplets {(j, mpk, sk j )} m j=1 .we can implement the ibe scheme using a standard public-key encryption scheme: in the sampling process, we sample m independent pairs of public and secret keys {(pk j , sk j )} m j=1 of the original scheme, and define mpk = (pk 1 , . a knows e x∼d for the last query q ℓ that it asks (becuase it equals to 1 m m j=1 qℓ (j), where qℓ is the wrapped query which is part of a's view), and 2. let π be a two party protocol between two interactive ppt algorithms p 1 and p 2 , each outputs a value in , and denote by o 1 n , o 2 n ∈ and t n the random variables of the outputs of p 1 , p 2 and transcript (respectively) in a random the execution π(1 n ). , x n ) ∈ x n and a query q : x → which is not the last one, answers the empirical mean q(s) = 1 n n i=1 x i .• sample (mpk, msk) ∼ setup(1 λ ) and sk j ∼ keygen(msk, j) for every j ∈ , and let t = {(j, mpk, sk j )} m j=1 and d = u t (i.(c) define the query q i+k : x → {-1, 0, 1} that on input (j, x, y) ∈ × {0, 1} k × {0, 1} k outputs decrypt(y, ct i,j ).in the following, let m ′ be an (unnatural) variant of m that operates almost the same, except that in step 4b, rather than sampling ct i,j ∼ encrypt(mpk, j, 0) for j / ∈ j , it samples ct i,j ∼ encrypt(mpk, j, qi (j)).(b) for j ∈ j compute ct i,j ∼ encrypt(mpk, j, qi (j)) and for j ∈ \ j compute ct i,j ∼ encrypt(mpk, j, 0). assume the existence of a ppt adversary a = (a 1 , a 2 ) and functions ℓ = ℓ(n) ≤ poly(n) and x = x (n) with log|x | ≤ poly(n) such that the following holds: let n ∈ n and consider a random execution of ada n,ℓ,x where m is the mechanism that given a sample s and a query q, answers the empirical mean q(s).1 formalizes the second property which states that the mechanism, which answers the empirical mean along the way, will fail in the last query, no matter how it chooses to act (this behavior is captured with the ppt algorithm g), and moreover, it is enough to assume that this requirement only holds with respect to to transcript of the execution, and not with respect to the view of the mechanism.recall that the sampler a 1 first samples keys mpk, msk, {sk j } m j=1 (m = 2000n), and generates n uniformly random samples from the triplets t = {(j, mpk, sk j )} m j=1 . therefore, it is clear that a 2 knows the true mean for each wrapped query q i (and in particular, the last one), since it equals to 1 m m j=1 qi (j) (a description of qi is part of the view of a 2 ).for the secrecy guarantee, note that the transcript t n between p 1 and p 2 only consists of the transcript between the analysis and the mechanism in ada n,ℓ,x where m is the mechanism that answers the empirical mean of each query (holds by the answers that p 1 sends in step 2). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/791.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/791.txt new file mode 100644 index 0000000000000000000000000000000000000000..c32eb15b2568478e62ec3011737ee11b9d5c626c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/791.txt @@ -0,0 +1 @@ +large language models (llm), such as gpt-3 (floridi and chiriatti 2020), have made significant advances in natural language processing (nlp). in general, pre-training, where a model first trains on massive amounts of data before being fine-tuned for a specific task, has proven to be an efficient technique for improving the performance of a wide range of language tasks (min et al. 2021). for example, bert (devlin et al. 2018) is a pre-trained transformerbased encoder model that can be fine-tuned for various nlp tasks, such as sentence classification, question answering, and named entity recognition. in fact, large language models have shown a so-called few-shot learning capability to be efficiently adapted to downstream tasks.if we break down the architecture of llms, we can categorize their components into two general concepts: deep neural network (dnn) as a part of machine learning (ml), and trained data. despite all the advantages of llms, they come with some limitations. starting from the very beginning, machine learning has its own limitations, from supervised ml which heavily relies on large amounts of human-labeled data to reinforcement learning (rl) which requires a very large number of interactions between the agent and the environment. the brittleness of deep learning systems is largely due to machine learning models being based on the independent and identically distributed (i.i.d.) assumption, which is not a realisticfigure 1: a symbolic representation/sentence for node a2 in a higher level l2 of a hierarchical taxonomy for multi-class classification assumption in the real world. moreover, compared to human capabilities, dnns still lack in various aspects, such as adaptability, generalizability, robustness, explainability, abstraction, common sense, and causal reasoning. in general, multi-layer perceptrons (mlps) are good at generalizing within the space of training examples, but they perform poorly at generalizing outside the space of training examples, and this limitation is not improved even by adding more layers. so, the question is, what can be done? can increasing the size of trained data solve these shortcomings?another shortcoming, which is not addressed by simply using more data, is curve fitting (pearl 2019), mapping inputs to outputs. if our systems rely solely on curve-fitting and statistical approximation, their inferences will necessarily be shallow. instead of inducing a more abstract and causal understanding of the world, they try to approximate the statistical curves of how words are used to infer how the world works. consider the following example from gpt-2:"a good way to light a fire is to use a pint-sized furnace."it clearly shows a lack of understanding of the nature of arxiv: 2305.16341v1 24 may 2023 fire by the model, as it is essentially based on the statistical occurrence of words next to each other, without an inherent cognitive model.let us take a step back and explore another approach to training a machine, which is symbolic machine learning. a symbolic machine combines a sophisticated reasoner with a large-scale knowledge base. one well-known example of a symbolic machine is cyc 1 , which represents the single largest effort in the history of ai to create commonsense knowledge in a machine-interpretable form. cyc was launched in 1984 by doug lenat and required thousands of person-years of effort to capture facts about psychology, politics, economics, biology, and various other domains in a precise logical form. one famous test of cyc is the romeo and juliet quiz, in which cyc demonstrates an internal distillation of a complex scenario and provides an example of rich cognition. however, despite the extensive efforts put into cyc, it falls short compared to the remarkable results achieved by transformers and gpt-2, even without explicit knowledge engineering.what gary marcus (marcus 2020) believes is that symbol manipulation could be the solution, particularly for extrapolating beyond a training regime. symbol manipulation, specifically the machinery of operations over variables, offers a natural albeit incomplete solution to the challenge of extrapolating beyond a training regime. it also provides a clear basis for representing structured representations (such as the tree structures foundational to generative linguistics) and records of individuals and their properties. it, along with a satisfactory framework for learning, can bring a hybrid approach that combines the best of both worlds: the ability to learn from large-scale datasets and the capacity to represent abstract concepts. the power of combining statistical and symbolic artificial intelligence techniques to accelerate learning and improve transparency is exemplified by the work of mit-ibm watson ai lab and deepmind (mao et al. 2019).in this work, we aim to integrate abstract/prior knowledge (hierarchical taxonomy of labels) into the structure of machine learning. as one of our contributions, we leverage symbolic manipulation to represent the taxonomy. according to henry kautz's proposal on neural-symbolic computing (nsc) 2 (garcez and lamb 2023), our work can be categorized as type 5; n ou ro sy m bolic ; a tightlycoupled neural-symbolic system where a symbolic logic rule is mapped onto a distributed representation (an embedding) and acts as a soft-constraint (a regularizer) on the network's loss function. additionally, we combine type 5 with a method from type 1; symbolic neuro symbolic; which involves standard deep learning in which input and output of a neural network can be made of symbols. our target is an imbalanced classification problem where we have a hierarchical taxonomy of labels as our prior knowledge.many real-world classification problems exhibit imbalanced class distributions, such as having 25 fraudulent transactions among 1,000,000 normal transactions in a financial 1 www.cyc.com 2 https://www.cs.rochester.edu/u/kautz/talks/index.html security dataset of a reputable bank (hasanin et al. 2019). in current fully supervised classification tasks, models are trained on labeled datasets where labels are primarily injected into the objective function (e.g., cross-entropy) as prior knowledge. these labels typically originate from a larger hierarchical taxonomy, allowing for comprehensive reasoning over the labels.labels in machine learning (ml), especially in supervised ml, play an important role. among the three main components of supervised ml, data and models have received the most attention in active research (bagherinezhad et al. 2018). however, labels often present challenges. despite the human cost required for labeling, labels are frequently incomplete, ambiguous, and redundant. using a hierarchical taxonomy for labels can provide more information that leads to improved labels and ultimately enhances model quality in supervised learning, and even yields further gains in semi-supervised learning.in this paper, our aim is to inject a hierarchical taxonomy of classes into the loss function of the learning algorithm for text classification, which can be scaled to other data domains. we introduce two methods to represent and incorporate the hierarchical taxonomy. the first method (section 3.1) represents the taxonomy as constraints in boolean logic. for example, figure 1 illustrates a hierarchical taxonomy for class labels, where leaves at level l 1 indicate the actual class labels used in the loss function (e.g., cross-entropy), and nodes at a higher level l 2 indicate a higher level of conceptualization for the labels, which are typically not used in the classification algorithm. our goal is to use the higher levels of the taxonomy to constrain the distribution of neural network outputs. similar to (xu et al. 2018), we enhance neural networks with the ability to learn how to make predictions while adhering to these constraints, leveraging symbolic knowledge to improve learning performance. the second method (section 3.2) involves using graph convolutional networks (gnn) to represent and incorporate the hierarchical taxonomy into the loss function. our experimental results for both methods demonstrate the significant effect of higher levels of the hierarchical taxonomy in alleviating the unequal distribution of classes in severely imbalanced classification problems.our contributions in this paper focus on flat/general classification, referring to the standard multi-class classification problem. this differs from hierarchical classification, where the class set to be predicted is organized into a class hierarchy, typically represented as a tree or a directed acyclic graph (dag). despite the human cost required for labeling, labels are frequently incomplete, ambiguous, and redundant.in this paper, our aim is to inject a hierarchical taxonomy of classes into the loss function of the learning algorithm for text classification, which can be scaled to other data domains. for example, figure1illustrates a hierarchical taxonomy for class labels, where leaves at level l 1 indicate the actual class labels used in the loss function (e. our approach falls under algorithm-level techniques for addressing imbalanced classification problems, as we directly inject a hierarchical taxonomy of class labels as prior knowledge into the existing loss function (i.we propose two approaches to represent and integrate the hierarchical taxonomy as prior knowledge into the loss function of a learning algorithm.to integrate the hierarchical taxonomy of the classes into the loss function, we first represent the taxonomy as symbolic logical constraints. each level of concepts in a taxonomy is denoted as l i , i ∈ , where k is node-based length of the taxonomy and l 1 indicates the leaves of the taxonomy, which is associated with the class labels. for instance, in figure1, for a taxonomy used in multi-class classification, sentence α states that for a set of indicators x = {x 1 , . to represent hierarchical nature of the taxonomy, a set of variables b = {b 1 , b 2 , . b 1 , . the semantic loss l s (α, β, p, s) is defined as a function of sentences (α,β) in propositional logic, which is defined over variables x = {x 1 , x 2 , ., b k-1 }, a vector of probabilities p for variables x, and a satisfaction vector s for variables b = {b 1 , b 2 , ., leaves in taxonomy) data, ι2 is the matrix of one-hot vectors over nodes in level l2 without labels in leaves, and ι3 is the matrix of one-hot vectors over nodes in level l2 without labels in leaves and level l2. in this section, we propose a method to incorporate the hierarchical taxonomy of a classification task as prior knowledge into the loss function through a batch-based graph convolutional networks (bgcn).to examine the pure effect of taxonomy, we do not consider the taxonomy knowledge for both the labeled and unlabeled data in semi-supervised learning (tables6, 7, 8,and9). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/792.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/792.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ca3080deb49e6277c0db5168e9ef426cad89726 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/792.txt @@ -0,0 +1 @@ +feedback graphs provide an elegant interpolation between two popular online learning models: multiarmed bandits and prediction with expert advice. when learning with an undirected feedback graph g over k actions, the online algorithm observes not only the loss of the action chosen in each round, but also the loss of the actions that are adjacent to it in the graph. two important special cases of this setting are: prediction with expert advice (when g is a clique) and k-armed bandits (when g has no edges). when losses are generated adversarially, the regret in the feedback graph setting with strong observability has been shown to scale with the independence number α of g. intuitively, denser graphs, which correspond to smaller independence numbers, provide more feedback to the learner, thus enabling a better control on regret. more specifically, the best known upper and lower bounds on the regret after t rounds are o √ αt log k and ω √ αt . it has been known for three decades that this upper bound is tight for α = 1 (the experts case, ). when α = k (the bandits case), the lower bound ω √ kt -which has also been known for nearly three decades -was matched by a corresponding upper bound o √ kt only in 2009 . these results show that in feedback graphs, the logarithmic factor √ log k is necessary (at least) for the α = 1 case, while it must vanish from the minimax regret as α grows from 1 to k, but the current bounds fail to capture this fact. in this work, we prove new upper and lower regret bounds that for the first time account for this vanishing logarithmic factor.to prove our new upper bound, we use the standard ftrl algorithm run with the q-tsallis entropy regularizer (q-ftrl for short). it is well-known that for q = 1 2 this algorithm (run with appropriate loss estimates) achieves regret o √ kt when α = k (bandits case), while for q → 1 - the same algorithm (without loss estimates) recovers the bound o √ t log k when α = 1 (experts case). when g contains all self-loops, we show in theorem 1 that, if q is chosen as a certain function q(α, k), then q(α, k)-ftrl, run with standard importance-weighted loss estimates, achieves regret o αt (1 + log(k/α)) . this is a strict improvement over the previous bound, and matches the lower bounds for bandits and experts while interpolating the intermediate cases. this interpolation is reflected by our choice of q, which goes from 1 2 to 1 as α ranges from 1 to k. the main technical hurdle in proving this result is an extension to arbitrary values of q ∈ 1 2 , 1 of a standard resultsee, e.g., -that bounds in terms of α the variance term in the regret of q-ftrl. in theorem 2, using a modified loss estimate, this result is extended to any strongly observable undirected graph , a class of feedback graphs in which some of the actions do not reveal their loss when played. in theorem 3, we show via a doubling trick that our new upper bound can also be obtained (up to constant factors) without the need of knowing (or computing) α. as the resulting algorithm is oblivious to α, our analysis also applies to arbitrary sequences of graphs g t , where k is constant but the independence number α t of g t can change over time, and the algorithm observes g t only after choosing an action (the so-called uninformed case). in this setting, the analysis of the doubling trick is complicated by the non-trivial dependence of the regret on the sequence of α t .we also improve on the ω √ αt lower bound by proving a new ω αt log α k lower bound for all α > 1. this is the first result showing the necessity-outside the experts case-of a logarithmic factor in the minimax regret for all α < k. our proof uses a stochastic adversary generating both losses and feedback graphs via i.i.d. draws from a joint distribution. this sequence of losses and feedback graphs can be used to define a hard instance of the multi-task bandits problem, a variant of the combinatorial bandits framework . we then prove our result by adapting known lower bounding techniques for multi-task bandits . note that for values of α bounded away from 2 and k, the logarithmic factor log α k in the lower bound is smaller than the corresponding factor 1 + log(k/α) in the upper bound. closing this gap remains an open problem. it is well-knownthat for q = 1 2 this algorithm (run with appropriate loss estimates) achieves regret o √ kt when α = k (bandits case), while for q → 1 - the same algorithm (without loss estimates) recovers the bound o √ t log k when α = 1 (experts case). as the resulting algorithm is oblivious to α, our analysis also applies to arbitrary sequences of graphs g t , where k is constant but the independence number α t of g t can change over time, and the algorithm observes g t only after choosing an action (the so-called uninformed case). we consider the following game played over t rounds between a learner with action set v = and the environment.however, the former result would only recover a o( √ kt ) regret bound (regardless of α) with the best choice of q = 1/2, which could be trivially achieved by ignoring side-observations of the losses, whereas the latter bound would only manage to achieve a o( √ αt ln k) regret bound, incurring the extra √ ln k factor for all values of α.consider a greedy algorithm that incrementally constructs a subset of vertices in the following way: at each step, it selects a vertex v that maximizes p(v) b / u∈ng(v) p(u) , it adds v to the solution, and it removes v from g together with its neighbourhood n g (v). , g s+1 be the sequence of graphs induced by the operations of the algorithm, where g 1 = g and g s+1 is the empty graph, and let n r (v) = n gr (v) for v ∈ v (g r ).observe that this upper bound is tight for general probability distributions p ∈ ∆ k over the vertices v of any strongly observable undirected graph g (containing at least one self-loop), as it is exactly achieved by the distribution p ⋆ ∈ ∆ k defined as p ⋆ (i) = 1 |s| i {i ∈ s} for some maximum independent set s ⊆ v of g. , a(m ) ∈ m .an environment is identified by a function µ : × g → such that at every round t, after having drawn a graph g t from the uniform distribution over g (denoted with u g ), the environment latently draws for each j ∈ and g ∈ g, a bernoulli random variable γ t (j; g) with mean µ(j; g t ). a directed graph g is strongly observable if for every i ∈ v , at least one of the following holds: i ∈ n in g (i) or j ∈ n in g (i) for all j = i. it is knownthat such a variance term, restricted to nodes with a self-loop in the strongly observable feedback graph g = (v, e), has an upper bound of the form i∈v :i∈n in g (i).in addition to the fact that this variance bound has a linear dependence on the independence number α(g) of g, we observe that there is a logarithmic factor in k/α and 1/β given by the fact that we now consider directed graphs. indeed, it is possible to show that there exist probability distributions p ∈ ∆ k and directed strongly observable graphs g for which α(g) = 1 and i∈v :i∈n in g (i).we can further argue that, by following a similar analysis as in the proofs of theorems 1 and 2, this variance bound would allow to show that the regret of q-ftrl is o αt 1 + ln(k/α) • ln k , where there is an additional ln k factor when compared to our regret bound in the undirected case (theorem 2). however, the undirected graph g ′ obtained from any directed strongly observable graph g by reciprocating edges between distinct nodes has the same independence number α(g ′ ) = α(g) but the regret guarantee given by the more general analysis of q-ftrl would introduce a spurious ln k multiplicative factor. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/793.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/793.txt new file mode 100644 index 0000000000000000000000000000000000000000..fecdf6a16c2439afcf28eca4c7b14d0d3c74b08d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/793.txt @@ -0,0 +1 @@ +in short, gbn is performed by applying batch normalization on disjoint subsets of a batch, i.e. ghost batches. by intentionally operating on smaller batch sizes, gbn increases the stochasticity in the normalization statistics compared to standard batch normalization. this has been found to give a beneficial regularization effect in certain settings, see e.g. figure 1. our primary goal in this work is to explore and isolate the effect of ghost batch normalization.a batchthe output of gbn is obtained by performing batch normalization on each ghost batch. for ghost batch x g with elements x n,c,h,w , this can be expressed as:wheren,h,w (x n,c,h,w -µ g ) 2 and we use broadcasted operations similar to pytorch (paszke et al. 2019).here we note that the term "batch" is highly overloaded, encompassing different batch sizes of interest. accelerator batch size (denoted b) is the number of samples each worker (e.g. gpu) uses during a single forward / backward pass through the network. normalization batch size or ghost batch size (denoted n ) is the number of samples over which normalization statistics are calculated. (2017)empirically found that training with small batches arrives at flatter minima, which they argue contributes to better generalization. gni decouples ghost noise from the normalization process, allowing increased regularization from ghost noise without actually performing normalization using small batches.in this section we provide an overview of ghost batch normalization (gbn) from hoffer, hubara, and soudry (2017), followed by a novel modeling of gbn as a composition of gbn and bn, separating the normalization and noise effects. by intentionally operating on smaller batch sizes, gbn increases the stochasticity in the normalization statistics compared to standard batch normalization. normalization batch size or ghost batch size (denoted n ) is the number of samples over which normalization statistics are calculated. the key insight lies in the fact that applying standard batch normalization before ghost batch normalization preserves the output of gbn. for a batch x, where batch normalization is denoted as bn and ghost batch normalization as gbn, this relationship can be expressed as:.this decomposition of gbn into two successive normalization operations lets us isolate the differences between standard batch normalization and ghost batch normalization.within this section, we introduce two innovative approaches designed to enhance the generalization performance of deep neural networks (dnns): 1) exclusive batch normalization, which addresses the issue of train-test discrepancy from self-dependency; 2) ghost noise injection, a method that enhances training-time stochasticity by incorporating noise from ghost batches. since the ghost batch from which the normalization statistics are calculated is randomly sampled, the current sample itself is not necessarily included in the ghost batch.baseline -gbn the top left panel of figure3shows how the ghost batch size affects the final validation accuracy when using ghost batch normalization. figure4: measured gni noise distributions for resnet18 on cifar100 with a ghost batch size of 32 along with analytical fully connected distributions for batch size 256.sample-based gni within figure3, the lower-left panel presents the performance evaluation of ghost noise injection (gni) across varying ghost batch sizes. building on these insights, we introduced ghost noise injection (gni), a novel technique that elevates ghost noise levels without necessitating normalization over smaller batches, thus diminishing the train-test mismatch. empirical investigations across various scenarios, including those without batch normalization, showcased gni's beneficial impact on generalization, underscoring ghost noise's significance as a potent source of regularization in batch normalization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/794.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/794.txt new file mode 100644 index 0000000000000000000000000000000000000000..886e3d76c1a45af0fb1ddb3323e8f41102596bdd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/794.txt @@ -0,0 +1 @@ +ensuring the safety of autonomous vehicles (av) with efficiency has been a dominating factor in closing the gap from running demonstrations to safe products. among various dimensions to be considered, such as perception robustness or operational safety, one crucial aspect is verifying and validating the av stack. apart from scenario replays or simple random testing, intelligent testing techniques utilizing classical software testing paradigm and control theory have been currently under active research (to name a few recent results , , , , ), where fundamental techniques such as equivalence class testing or optimization-based testing are being extended by the research community to allow the efficient finding of scenarios that challenges the av.in this paper, inspired by the success of using reinforcement learning (rl) in games , , , we study how rl can be used in generating test case scenarios. in games such as chess or the game of go, the controller learned by rl interacts with the opponent interactively, where based on the current system state and the move of the opponent, provides the next move. for the testing of av, one can analogously view each agent in the road traffic (referred to as non-player-character npc) to be a chess piece controlled by rl, where the undesired situation for the av can be assigned as the reward of rl. this altogether establishes the basic framework.nevertheless, in realizing the test case generation method using rl, several non-trivialities arise. apart from setting the undesired situation (e.g., motion jerks or collision) into concrete numerical rewards, one aspect is related to the enormous action space caused by the product of actions for individual agents, making an efficient convergence of good testing strategy infeasible. thus, we propose using multi-agent reinforcement learning (marl) beyond simple rl for training agents. each agent in marl is trained with a projection operator separating its own state and the environment state (for other agents and av), and the reward is assigned accordingly. the design of reward assignment thus changes the agent's behavior, wherein in a fully cooperative setup, an agent shares its reward with the other agents as a common reward. however, due to the use of a common reward, each agent is being obfuscated on the value of its individual action. to mediate this issue, we further propose a novel potential-based reward shaping (pbrs) function that intuitively only rewards an agent if it contributes to the reward of other agents via a potential function inspired by counterfactual analysis. we also create a framework for efficient training of independent, collaboratively behaved agents.for evaluation, we created a simulation-based environment and evaluated the performance of marl-based testing techniques against different collaborative schemes. in contrast to standard competitive or naïve collaborative approaches, our pbrs-based credit assignment approach leads to considerably superior performance in generating crash scenarios.the rest of the paper is structured as follows. after section ii highlighting related work, we in section iii lay the foundation for rl-based testing for av. in section iv we present the key results, namely the pbrs and the agent training scheme. finally, we summarize our evaluation in section v and conclude with future work in section vi. each agent in marl is trained with a projection operator separating its own state and the environment state (for other agents and av), and the reward is assigned accordingly. the design of reward assignment thus changes the agent's behavior, wherein in a fully cooperative setup, an agent shares its reward with the other agents as a common reward. to mediate this issue, we further propose a novel potential-based reward shaping (pbrs) function that intuitively only rewards an agent if it contributes to the reward of other agents via a potential function inspired by counterfactual analysis.combines difference rewards and potential-based reward shaping to shape the reward of independent rl agents to capture the contribution of each agent to systems performance., where s is the state, a is the action of the agent, the r(s, a, s ′ ) is the reward signal of the agent, β is the learning rate, α is the discount factor, s ′ and a ′ are future state and action, respectively.the first reward signal is the local reward, where each agent receives only a reward based on their state and actions.if j = 0 and front-crash occurs -κ 2 , if j = 0 and crash occurs 0 otherwise (1) the local reward for agent i is defined by considering the interaction with other agents, while the global reward is defined as the sum of all local rewards.to implement a fully cooperative marl, one may replace r i (s, a i ) and use r(s, a) as the local reward for an agent, implying that the goal of training an individual agent is to maximize the global good. let f be the environment update function that changes state s t (state at time t) to s t+1 (the state at time t + 1) with agents using action a at time t (denoted as a t ), i.) be the corresponding state update without agent i being present, where use s t -i and a t -i to denote the state and action at time t without agent i and f -i as the update function where agent i does not exist.with the above formulation, we now offer an intuition on our reward shaping technique: in formula (10), the first term r -i (s t+1,c(t) ) is the immediate reward of other agents at t + 1 when agent i is absent, where the negative sign in the front highlights that the term should be subtracted. with the action of each agent (npc) made available, perform a simulation step via simulation-step(s, a) to obtain the next state s next and the collaborative reward r(s, a) defined in formula 3. we then apply the marl framework with three reward assignment strategies, namely, 1) the fully competitive setup where each agent takes the reward as governed by formula (2), 2) the naïve collaborative setup by using formula (3) as the local reward for each agent, and finally algorithm 1 marl with counterfactual analysis inspired reward shaping (homogeneous agent) input: α discount factor, γ hyper-parameter for pbrs, uim, uic, maxtimesteps, batch-size output: q π is the policy function approximator dnn that takes observation from an agent to be controlled (i.⊲ output of actions for all agents decided by q π ⊲ perform a simulation step to derive the next state and information whether the agent has achieved its end state take an agent indexed j 1 that is still active after counterfactual agent removal from s c .⊲ output of actions for all agents decided by π 17: if all agents at s next can not move further due to end of simulation steps or due to vehicle collision then train q π to minimize the mse loss by taking batch-size of samples from b, where for each sample, its label is r(s, a) + coop dif + α max a q π target (obs j (s next )) and the prediction is q π (obs j (s)) aj (when s next is a terminal state, use r(s, a) + coop dif as the label). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/795.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/795.txt new file mode 100644 index 0000000000000000000000000000000000000000..465dbee74e537c1ffc7f69ef16034a1d22ee6e3f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/795.txt @@ -0,0 +1 @@ +the optimization objective involved in the training of modern (overparameterized) machine learning (ml) models is typically underdetermined, meaning that it presents infinitely many global optima. yet, even unregularized first-order optimization methods are observed to converge to solutions that generalize well to test data, as multiple empirical studies have repeatedly confirmed (e.g., zhang et al. , neyshabur et al. ). moreover, robust optimization methods such as adversarial training are empirically observed to achieve solutions that generalizes well even in the presence of adversarial perturbations of data (e.g., madry et al. , li et al. ). these observations have spurred interest in what is commonly called the implicit bias of these optimization methods: namely, (a) which solution (i.e. global minimum) is favored by a particular first-order optimization method, and (b) at what speed do the parameters of the model converge to this solution?this paper addresses these questions in the regime of underdetermined linear classification. when the training data is separable, the optimization objective is typically the unregularized empirical risk measured through a convex loss function r(•) that acts as a suitable surrogate to the discontinuous 0 -1 loss (see equation (1) for a formal definition). specializing to exponentially-tailed loss functions (which include the popular logistic loss), we have a rich asymptotic theory that addresses question (a), linking the implicit bias of an optimization method to its geometry. the pioneering works ji and telgarsky , soudry et al. first characterized the implicit bias of gradient descent (gd) by the solution that maximizes the (normalized) margin measured in euclidean distance; this is commonly called the • 2-maximal margin classifier. subsequently, gunasekar et al. showed that the implicit bias of the steepest descent algorithm with respect to a general norm • is the corresponding • -maximal margin classifier, and sun et al. showed that the implicit bias of the mirror descent algorithm with the potential • q q (for q > 1) is the corresponding • q -maximal margin classifier. therefore, these richer families of algorithms can adapt to different data geometries by varying the choice of norm or potential function. on the side of adversarial training (at), gd augmented with adversarial perturbations in a bounded ℓs-norm (called ℓs-at as shorthand) is known to converge to the maximum (2, s)-mixed-norm margin classifier, which could yield improved robustness properties depending on the choice of s . the choice of s, and therefore the choice of the optimization algorithm, affects the entire nature of the eventual solution, thus playing a pivotal role in robustness. question (b), i.e., the rate of parameter or margin convergence to these implicit biases, has been addressed in part for specific optimization methods, but the picture remains incomplete and complex. even for the special case of euclidean geometry, parameter or margin convergence analyses present several technical challenges due to the non-smoothness of the margin function, the presence of multiple global minima of the original optimization objective function, and the fact that, for exponentially tailed losses such as the logistic loss, all of these minima are attained at infinity, meaning that the direction of the parameter is what needs to be considered . the initial works on gd only established a slow rate of o log n log t , where t is the time horizon and n is the cardinality of the dataset . since then, gd was shown to attain better rates of o log n+log t √ t and o log n t with a more aggressive step size schedule, and the fastest known rate of o log n t 2additionally imbues gd with momentum or nesterov's acceleration . the picture of margin convergence rates remains much more limited for generic optimization methods, with the fastest known rate being o log n+log t √ tfor steepest descent methods and o log n t 1/4 for mirror descent methods1 . for gd augmented with adversarial training, the fastest known rate is o poly(n) √ t for ℓ2-perturbations of the data, with all other perturbation norms (i.e., s = 2) yielding a much slower o log n log t rate . the required analyses for these methods are generally quite complex and idiosyncratic, and each tends to rely on specific details of the particular optimization procedure. we show that with an appropriately chosen step size, the algorithm achieves a faster • q -margin maximization rate on the order of o log n log t (q-1)t . when q = 2, the algorithm reduces to average gd, and our rate o 1 t + log n log t t 2 is a log n-factor tighter than the o log n t rate of the last-iterate of gd. in section 3, we show that solving a regularized bilinear game with online learning algorithms (top box) can directly maximize the margin, and the convergence rate is on the same order of the averaged regret c t (right box); in sections 4, we prove that minimizing the empirical risk with a series of generic optimization methods (left box) is equivalent to using online learning algorithms to solve the regularized bilinear game. as a result, the margin maximization rate (and also the directional error) of these optimization methods are exactly characterized by the regret bounds of the corresponding online learning algorithms., φ(w) = 1 2 w2q , where q ∈ (1, 2], and demonstrate that this optimization algorithm can enable faster • -margin maximization rates.we make a few final observations about this algorithm: 1) instead of using the weighted sum vt , we could output the weighted average vt t s=1 αs without altering the margin or directional convergence rate.the first part of theorem 2 indicates that the mirror descent algorithm can be described as two players using certain cleverly designed online learning algorithms to solve the regularized bilinear game in(2). we make the choice βt = the second part of theorem 2 shows that the average regret ct of algorithm 1 is on the order of o log n log t (q-1)γ 2 t . therefore, by plugging in theorem 1, we observe that the margin shrinks on the order of γ -o log n log t γ 2 (q-1)t , and the implicit bias convergence rate is o log n log t γ 2 (q-1) √ t. for the left box of algorithm 1, let ηt = t l(v t-1 ) , and let the final output be vt = t t=1 2 t+1 vt.the first part of theorem 5 elucidates the equivalent online dynamic of the steepest descent algorithm, which is also depicted in the right box of algorithm 3.in the preceding subsections, we showed that, with suitable step sizes, steepest descent and average mirror descent can achieve an o log n log t t margin maximization rate. the left box introduces a nesterov-acceleration-based mirror descent: in each iteration t, the algorithm initially performs algorithm 4 accelerated methods . on the other hand, the right box depicts a momentumbased steepest descent algorithm: in each iteration, the method maintains a momentum term gt with an additional gradient (step 2), then identifies the steepest direction with respect to gt (step 3), and applies this direction to update the decision (step 4).the final section of theorem 8 highlights the margin maximization rates as well as the direction convergence rates associated with the output of algorithm 5. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/796.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/796.txt new file mode 100644 index 0000000000000000000000000000000000000000..80aedba31664dbf3b69db3ec7ae5b3928fa7923b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/796.txt @@ -0,0 +1 @@ +sales or demand forecasting is essential for businesses operating across different regions, states or countries, such as walmart and amazon. reliable demand forecasting enables them to make wise decisions regarding their inventory, pricing and marketing tactics. by accurately predicting future demand, businesses can also optimize inventory levels, reduce wastes and increase profitability. correct demand forecasting is also crucial for a country's stable supply chain ( ). from the consumers side, a stable supply chain ensures availabilities of goods and services. this helps to maintain consumer confidence and prevent panic buying. for the businesses side, a stable supply chain allows them to operate smoothly and efficiently, as the risks of disruptions and downtime can be minimized.the application of both traditional time series models and modern machine learning and ai techniques for sales and demand forecasting has garnered significant attention in the field of forecasting. traditional time series models, such as arima, sarima, and exponential smoothing state space model (ets), have been widely used for their simplicity, interpretability, and ability to capture linear trends and seasonality. these models have demonstrated effectiveness in various sales and demand forecasting scenarios, making them a popular choice for many practitioners.however, with the increasing availability of large datasets and the growing complexity of retail sales data, modern machine learning and ai techniques have emerged as powerful alternatives. methods such as artificial neural networks (anns), support vector machines (svms), random forests, and gradient boosting machines (gbms) have shown promise in handling non-linear relationships, high-dimensional data, and complex interactions between variables. moreover, recent advancements in deep learning, such as recurrent neural networks (rnns) and long short-term memory (lstm) networks, have further improved the accuracy and adaptability of sales and demand forecasts by capturing long-term dependencies in time series data.combining traditional time series models with machine learning and ai techniques can lead to a more comprehensive forecasting approach. hybrid models, which integrate the strengths of both methodologies, have been proposed as a means to enhance forecast accuracy and robustness. ensemble methods, which combine the predictions of multiple models, have also been explored to capitalize on the diverse capabilities of different forecasting techniques.this paper presents such an hybrid model.we harness the power of the lightgbm algorithm, a high-performance gradient boosting framework, to generate sales forecasts for each time series under the assumption of stationarity. by assuming stationarity, we are able to utilize the lightgbm's ability to capture complex non-linear relationships and interactions between variables. we employ the prophet model, a robust and flexible forecasting tool developed by meta, to achieve more precise sales forecasts across a variety of aggregated levels within the time series data. the prophet model is particularly adept at handling the irregularities and seasonality commonly found in sales data, making it a valuable complement to the lightgbm approach. finally, we combine the results of these two powerful methods, creating an hybrid model that leverages their respective strengths to generate robust and accurate sales forecasts for each time series. this fusion of state-of-the-art techniques not only enhances the overall forecasting performance but also makes the approach more resilient to diverse and challenging retail scenarios. by integrating the lightgbm and prophet models, this paper presents an intriguing and effective solution to the complex task of sales forecasting, offering valuable insights and a practical tool for researchers and practitioners alike.the application of both traditional time series models and modern machine learning and ai techniques for sales and demand forecasting has garnered significant attention in the field of forecasting. moreover, recent advancements in deep learning, such as recurrent neural networks (rnns) and long short-term memory (lstm) networks, have further improved the accuracy and adaptability of sales and demand forecasts by capturing long-term dependencies in time series data. we employ the prophet model, a robust and flexible forecasting tool developed by meta, to achieve more precise sales forecasts across a variety of aggregated levels within the time series data.in recent years, deep learning models such as recurrent neural networks (rnns) and long short-term memory (lstm) networks have gained popularity for their ability to capture complex patterns in time series data.the walmart's sales dataset is a large and complex time series dataset that includes daily sales data for thousands of products across ten stores of walmart in california (ca), texas (tx) and wisconsin (wi). the fact that the time series patterns of these ten stores are distinct suggests that using different time series models for each store may be appropriate. this is because different time series models may be better suited to capture the unique patterns of variation in each store's sales data, such as differences in seasonality, trend, and cyclicality. if the same time series model is applied to both series, it may not capture the unique patterns of variation in each series, and the resulting forecasts may be inaccurate. feature engineering 1) constructing hierarchical time series: based on our data explorations, it appears that the sales data has a hierarchical structure, with sales data at the level of individual products, departments, stores, and states. this suggests that using a hierarchical time series framework may be a good approach to modeling and forecasting the sales data.additionally, incorporating more time series features, such as the last 7 and 28 days' mean sales, calendar time and lag features, can help to capture the temporal dependencies and patterns in the data. by combining the strengths of lightgbm and trend forecasting models such as prophet, it is possible to build a robust and accurate forecasting system by assuming the input time series of the lightgbm is stationary and its output can serve as weights to adjust the total sales forecast by prophet. for example, the sales data in the walmart dataset has a hierarchical structure, with sales data at the level of individual products, departments, stores, and regions.prophet is a time series forecasting library developed by facebook that is designed to make it easy for analysts and developers to create accurate forecasts for time series data. by incorporating the tweedie-based loss function into our robust and scalable framework, we can ensure that our sales forecasts are not only adaptable to a wide variety of retail scenarios but are also capable of effectively handling the unique challenges posed by zero-inflated and highly variable sales data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/797.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/797.txt new file mode 100644 index 0000000000000000000000000000000000000000..4a1445a0401af92d0f1fec9a5e0c3a3930e1961f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/797.txt @@ -0,0 +1 @@ +as data sharing is increasingly locking horns with data privacy concerns, privacy-preserving data analysis is becoming a challenging task with far-reaching impact. differential privacy (dp) has emerged as the gold standard for implementing privacy in various applications . for instance, dp has been adopted by several technology companies and has also been used in connection with the release of census 2020 data . the motivation behind the concept of differential privacy is the desire to protect an individual's data while publishing aggregate information about the database, as formalized in the following definition: definition 1.1 (differential privacy ). a randomized algorithm m is ε-differentially private if for any neighboring datasets d and d ′ and any measurable subset s ⊆ range(m), we havewhere the probability is with respect to the randomness of m.however, utility guarantees for dp are usually provided only for a fixed, predefined set of queries. hence, it has been frequently recommended that differential privacy may be combined with synthetic data to achieve more flexibility in private data sharing . synthetic datasets are generated from existing datasets and maintain the statistical properties of the original dataset. hence, the datasets can be shared freely among investigators in academia or industry, without security and privacy concerns.yet, computationally efficient construction of accurate differentially private synthetic data is challenging. most research on private synthetic data has been concerned with counting queries, range queries, or k-dimensional marginals, see, e.g., . notable exceptions are . specifically, provides utility guarantees with respect to the 1-wasserstein distance. invoking the kantorovich-rubinstein duality theorem, the 1-wasserstein distance accuracy bound ensures that all lipschitz statistics are preserved uniformly. given that numerous machine learning algorithms are lipschitz , this provides data analysts with a vastly increased toolbox of machine learning methods for which one can expect similar outcomes for the original and synthetic data.for instance, for the special case of datasets living on the d-dimensional boolean hypercube d equipped with the hamming distance, the results in show that there exists an ε-dp algorithm with an expected utility loss that scales like log(εn)where n is the size of the dataset. while succeeded in removing the logarithmic factor in (1.1), it can be shown that the rate in (1.1) is otherwise tight. consequently, the utility guarantees in are only useful when d, the dimension of the data, is small (or if n is exponentially larger than d). in other words, we are facing the curse of dimensionality. the curse of dimensionality extends beyond challenges associated with wasserstein distance utility guarantees. even with a weaker accuracy requirement, the hardness result from uhlman and vadhan shows that n = poly(d) is necessary for generating dp-synthetic data in polynomial time while maintaining approximate covariance.in , the authors succeeded in constructing dp synthetic data with utility bounds where d in (1.1) is replaced by (d ′ + 1), assuming that the dataset lies in a certain d ′ -dimensional subspace. however, the optimization step in their algorithm exhibits exponential time complexity in d, see .this paper presents a computationally efficient algorithm that does not rely on any assumptions about the true data. we demonstrate that our approach enhances the utility bound from d to d ′ in (1.1) when the dataset is in a d ′ -dimensional affine subspace. specifically, we derive a dp algorithm to generate low-dimensional synthetic data from a high-dimensional dataset with a utility guarantee with respect to the 1-wasserstein distance that captures the intrinsic dimension of the data.our approach revolves around a private principal component analysis (pca) procedure with a near-optimal accuracy bound that circumvents the curse of dimensionality. different from classical perturbation analysis that utilizes the davis-kahan theorem in the literature, our accuracy analysis of private pca works without assuming the spectral gap for the covariance matrix.notation. in this paper, we work with data in the euclidean space r d . for convenience, the data matrix x = ∈ r d×n also indicates the dataset (x 1 , . . . , x n ). we use a to denote a matrix and v, x as vectors. • f denotes the frobenius norm and • is the operator norm of a matrix. two sequences a n , b n satisfies a n b n if a n ≤ cb n for an absolute constant c > 0.organization of the paper. the rest of the paper is arranged as follows. in the remainder of section 1, we present our algorithm with an informal theorem for privacy and accuracy guarantees in section 1.1, followed by a discussion. a comparison to the state of the art is given in section 1.2. definitions and lemmas used in the paper are provided in section 2.next, we consider the algorithm 1 step by step. section 3 discusses private pca and noisy projection. in section 4, we modify synthetic data algorithms from to the specific cases on the lower dimensional spaces. the precise privacy and accuracy guarantee of algorithm 1 is summarized in section 5. finally, since the case d ′ = 1 is not covered in theorem 1.2, we discuss additional results under stronger assumptions in section 6.1.1. main results. in this paper, we use definition 1.1 on data matrix x ∈ r d×n . we say two data matrices x, x ′ are neighboring datasets if x and x ′ differ on only one column. we follow the setting and notation in as follows. let (ω, ρ) be a metric space. consider a dataset x = ∈ ω n . we aim to construct a computationally efficient differentially private randomized algorithm that outputs synthetic data y = ∈ ω m such that the two empirical measuresare close to each other. here δ x i denotes the dirac measure centered on x i . we measure the utility of the output by e w 1 (µ x , µ y ), where the expectation is taken over the randomness of the algorithm. we assume that each vector in the original dataset x is inside d ; our goal is to generate a differentially private synthetic dataset y in d , where each vector is close to a linear subspace of dimension d ′ , and the empirical measure of y is close to x under the 1-wasserstein distance. we introduce algorithm 1 as a computationally efficient algorithm for this task. it can be summarized in the following four steps:(1) construct a private covariance matrix m. the private covariance is constructed by adding a laplacian random matrix to a centered covariance matrix m defined asthis step is presented in algorithm 2.(2) find a d ′ -dimensional subspace v d ′ by taking the top d ′ eigenvectors of m. then, project the data onto a linear subspace. the new data obtained in this way are inside a d ′ -dimensional ball. this step is summarized in algorithm 3. (3) generate a private measure in the d ′ dimensional ball centered at the origin by adapting methods in , where synthetic data generation algorithms were analyzed for data in the hypercube. this is summarized in algorithms 4 and 5. (4) add a private mean vector to shift the dataset back to a private affine subspace. given the transformations in earlier steps, some synthetic data points might lie outside the hypercube.we then metrically project them back to the domain of the hypercube. finally, we output the resulting dataset y. this is summarized in the last two parts of algorithm 1. the next informal theorem states the privacy and accuracy guarantees of algorithm 1. section 5 gives more detailed and precise statements (theorems 5.1 and 5.2).where d means the right hand side of (1.3) hides factors that are polynomial in d, and σ i (m) is the i-th eigenvalue value of m in (1.2).note that m, the size of the synthetic dataset y, is not necessarily equal to n since the lowdimensional synthetic data subroutine in algorithm 1 creates noisy counts. see section 4 for more details.optimality. the accuracy rate in (1.3) is optimal up to a poly(d) factor when x lies in an affine d ′dimensional subspace. the second term matches the lower bound in for generating d ′ -dimensional synthetic data in d ′ . the first term is the error from the best rank-d ′ approximation of m. it remains an open question if the first term is necessary for methods that are not pca-based. a more detailed discussion can be found below theorem 5.2.improved accuracy. when the original dataset x lies in an affine d ′ -dimensional subspace, it implies σ i (m) = 0 for i > d ′ and e w 1 (µ x , µ y ) d (εn) -1/d ′ . this is an improvement from the accuracy rate o((εn) -1/d ) for unstructured data in d in , which overcomes the curse of high dimensionality.1 on data matrix x ∈ r d×n . we assume that each vector in the original dataset x is inside d ; our goal is to generate a differentially private synthetic dataset y in d , where each vector is close to a linear subspace of dimension d ′ , and the empirical measure of y is close to x under the 1-wasserstein distance.(low-dimensional synthetic data) use subroutine in section 4 to generate ε/3-dp synthetic data x ′ of size m depending on d ′ = 2 or d ′ ≥ 3. in this subsection, we characterize the 1-wasserstein distance between the empirical measure µ x and the empirical measure of the centered dataset x -x1 t , where 1 ∈ r n is the all-1 vector.2, we derive the following wasserstein distance bounds between the centered dataset x -x1 t and the dataset x. for input data x and output data x in algorithm 3, let m be the covariance matrix defined in (1.in algorithm 3, for any data x i we first shift it to x i -xλ and then project it to. d ′ ≥ 3: private signed measure mechanism (psmm). after generating the private synthetic data, since we shift the data by its private mean before projection, we need to add another private mean vector back, which shifts the dataset x to a new private affine subspace close to the original dataset x. , x n , suppose that the solution to the optimal transportation problem for w 1 (µ x , µ x ′′ ) is to match x τ (i) with x ′′ i , where τ is a permutation on.(3) m 3 (x, y, σ) for fixed y and σ, is to project the shifted data {x i -y} n i=1 to the first d ′ principal components of σ and apply a certain differentially private subroutine (we choose y and σ as the output of m 2 and m 1 , respectively). (4) m 4 (x, x ′ ) is to shift the dataset to {x ′ i + x priv } m i=1 . suppose that (1) x -(x + λ)1 t denotes the shifted data {x i -x -λ} n i=1 ;.(3) x ′ is the output of the synthetic data subroutine in section 4; (4) x ′′ = x ′ + (x + λ ′ )1 t denotes the data shifted back;. when d ′ = 1, consider algorithm 1 with input data x, output data y, and the subroutine pmm in algorithm 4. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/798.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/798.txt new file mode 100644 index 0000000000000000000000000000000000000000..67460357190cbd80a9438dad07a4f8cf1448e4c0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/798.txt @@ -0,0 +1 @@ +online continuous submodular maximization (ocsm) is a kind of online learning with monotone and continuous dr-submodular functions, which has attracted a growing research interest (chen et al., 2018b,a;zhang et al., 2019;zhu et al., 2021;zhang et al., 2022a,b) due to its wide applications in learning with structured sparsity (bach et al., 2012), nondefinite quadratic programming (ito and fujimaki, 2016), and revenue maximization (soma and yoshida, 2017;bian et al., 2020), etc. similar to the classical online learning (shalev-shwartz, 2011), it can be formalized as a repeated game between a decision maker and an adversary. at each iteration t = 1, . . . , t , the decision maker selects a decision x t from a convex set k, and then an adversary reveals a monotone and continuous dr-submodular function f t (•) : k → r, which brings a reward f t (x t ) back to the decision maker.note that even in the offline setting, the problem of maximizing a monotone and continuous dr-submodular function cannot be solved within an approximate factor of (1-1/e+ϵ) for any ϵ > 0 in polynomial time unless rp = np (bian et al., 2017). for this reason, in ocsm, the goal is to minimize the α-regretwhere α belongs to (0, 1]. the α-regret denotes the gap between the total reward of the decision maker and a discounted total reward of the best fixed decision.existing algorithms for minimizing the α-regret of ocsm can be divided into two types. the first type is projection-based algorithms including online gradient ascent (oga) (chen et al., 2018b) and online boosting gradient ascent (obga) (zhang et al., 2022a), which need to compute the projection operation over the decision set per iteration. the second type is projection-free algorithms including meta-frank-wolfe (meta-fw) (chen et al., 2018b,a) and mono-frank-wolfe (mono-fw) (zhang et al., 2019), which avoid the projection operation by using linear optimization steps over the decision set. note that in many applications with complicated decision sets such as matrix completion (chandrasekaran et al., 2009), network routing (hazan and luo, 2016) and structural svms (lacoste-julien et al., 2013), the projection operation is much more time-consuming than the linear optimization step, which makes projection-free algorithms more appealing than projection-based algorithms.moreover, even among those projection-free algorithms, the computational complexity is also very different. specifically, meta-fw (chen et al., 2018b) originally requires o(t 3/2 ) exact gradient evaluations and linear optimization steps in total, 1 and the stochastic variant of meta-fw (sto-meta-fw) (chen et al., 2018a) requires o(t 5/2 ) stochastic gradient evaluations and linear optimization steps. by contrast, mono-fw (zhang et al., 2019) only requires o(t ) stochastic gradient evaluations and linear optimization steps, which is much more efficient than previous two algorithms, and is the first oracle-efficient projection-free algorithm for ocsm (see definition 1 in hazan and minasyan (2020) for details). however, different from meta-fw and sto-meta-fw that can enjoy a (1 -1/e)-regret bound of o( √ t ), mono-fw only achieves a (1 -1/e)-regret bound of o(t 4/5 ). thus, it is natural to ask whether the o(t 4/5 ) regret bound could be further reduced without increasing the computational complexity.in this paper, we provide an affirmative answer by proposing an improved efficient projection-free algorithm, namely pobga, which can reduce the regret bound to o(t 3/4 ) while keeping the same computational complexity as mono-fw. our pobga is not based on mono-fw, but rather the projection-based obga that enjoys a regret bound of o( √ t ). the main idea of making obga be an efficient projection-free algorithm is to apply an infeasible projection technique (garber and kretzu, 2022) that approximates the projection operation via multiple linear optimization steps, and a blocking technique (zhang et al., 2019;garber and kretzu, 2022) that can keep the total number of linear optimization steps as o(t ). note that garber and kretzu (2022) utilize the infeasible projection technique to develop efficient projection-free algorithms for online learning with concave reward functions. by contrast, we apply it to efficiently solve the more challenging ocsm here.furthermore, we consider a more practical scenario-decentralized ocsm, which is well motivated by many applications in multi-agent systems and sensor networks (li et al., 2002;xiao et al., 2007;duchi et al., 2011;mokhtari et al., 2018). different from ocsm that only has one decision maker, in the decentralized ocsm, each node in the network denotes a local decision maker, which needs to make its local decision and then receives a local reward function. note that the goal of each local decision maker is to minimize its α-regret measured by the average of local functions at each iteration. to this end, it is allowed to communicate with its neighbors and share its local information. a previous study (zhang et al., 2022b) has extended mono-fw into this setting, and establish a (1 -1/e)-regret bound of o(t 4/5 ) for each local decision maker. by contrast, we develop a decentralized variant of our pobga, namely dpobga, and demonstrate that dpobga can improve the regret bound of each local decision maker to o(t 3/4 ). more importantly, we notice that the total communication complexity of our dpobga is only o( √ t ), which is significantly smaller than the total o(t ) communication complexity required by the decentralized variant of mono-fw., 2022a,b)due to its wide applications in learning with structured sparsity(bach et al. the main idea of making obga be an efficient projection-free algorithm is to apply an infeasible projection technique(garber and kretzu, 2022)that approximates the projection operation via multiple linear optimization steps, and a blocking technique(zhang et al.where π k = argmin y∈k ∥x-y∥2is the projection operation, η is the step size, and ∇f t (x t ) is an unbiased stochastic gradient of f t (x t ) i., e = ∇f t (x t ). meanwhile, as a cost for this computational improvement, mono-fw increases the regret bound from o( √ t ) to o(t 4/5 ).definition 4 a continuous submodular function f (•) : x → r + is called continuous drsubmodular if for any x ≤ y ∈ x , and any z ∈ r+, i ∈ such that x + ze i and y + ze i still belong to x , it holds that. there exists an infeasible projection oracle o ip over any convex set k ⊆ rb, which takes the set k, a pair of points (x 0 , y 0 ) ∈ k × r n , and an error tolerance parameter ϵ as the input, and can output. intuitively, since oga have regret guarantees on both online learning with concave functions and ocsm, it is not surprising that the algorithm ingarber and kretzu (2022)may also be extended to deal with algorithm 1 pobga 1: input: decision set k, horizon t , block size k, step size η, error tolerance ϵ 2: set x 1 = 0 and ỹ1 = 0. let η denote the step size and ϵ denote the error tolerance. similar to algorithm 1, for each iteration t in the block m, we keep the decision x i m unchanged, and query an unbiased stochastic gradient of the local function f t,i (x), i. moreover, we construct ∇f t,i (x i m ) = (1 -1/e) ∇f t,i (z t * x i m ) which is an unbiased stochastic gradient of the auxiliary function of f t,i (x), i.we first notice that (8), (9), and (12) in the analysis of algorithm 1 also hold when f t (•), ∇f t (•), f t (•), and ∇f t (•) are respectively replaced with f t,i (•), ∇f t,i (•), f t,i (•), and ∇f t,i (•) utilized in algorithm 2.x i+1 = x i + σ i (v i -x i ) 9: end for from lemma 1, we notice that given a feasible point x 0 ∈ k and an initial point y 0 ∈ r n , the infeasible projection oracle o ip aims to find an infeasible point ỹ ∈ rb such that ∀z ∈ k, ∥ỹ -z∥ 2 ≤ ∥y 0 -z∥ 2 (37) and a feasible point x ∈ k such that ∥x -ỹ∥ 2 ≤ 3ϵ.the detailed procedures of this algorithm is outlined in algorithm 3, which takes a convex set k, an initial point x 1 ∈ k, a target point y, and an error tolerance ϵ as the input. moreover, it is worthy to notice that the stop condition algorithm 4 detailed procedures of o ip 1: input: decision set k, feasible point x 0 ∈ k, initial point y 0 , error tolerance ϵ 2: set y 1 = y 0 / max{1, ∥y 0 ∥/r} 3: if ∥x 0 -y 0 ∥ 2 ≤ 3ϵ then 4:. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/799.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/799.txt new file mode 100644 index 0000000000000000000000000000000000000000..880013ba5aeb9301bd068410995d68e96cfbb24f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/799.txt @@ -0,0 +1 @@ +esport titles, such as league of legends and dota 2, have amassed both large audiences and player-bases (newzoo, 2022;petrovskaya and zendle, 2020). due to the competitive nature of the genre, the player community often develop so called "metas" as explained by kokkinakis et al. (2021). according to the author, metas are naturally discovered and developed strategies for optimum ways of playing the game that are focused in determining competitive advantage available within the current parameters of the game design. as a result, game developers are constantly updating and changing the rules of the game in order to balance the game, prevent game stagnation as well as to maintain player interest. this is done through releasing patches to the game, which can alter the rules and the parameters of the game (summerville et al., 2016). those changes can be manifested by introducing new content (such as new characters), changing existing content (such as altering the duration or cost of an in-game ability), removing old content (such as discarding excising mechanics or rules) or a combination of them (such as replacing an ability for an existing character to a new ability with unique traits).as the patch changes are generally made with the intent to re-balance the game, they often incur changes to the meta, forcing competitive players to play differently and creating new game mechanics interactions. as outlines by s. demediuk et al. (2021), the way in which a game is played, and the current rules set could impact machine learning models and other forms of data analysis. this could be caused both by differences in player decision making, as well as by the new environmental state (i.e. the changes to the game's parameters themselves). as a consequence, much of the esport literature is done focusing on a limited periods of time, to reduce the impact of those changes (s. demediuk et al., 2019;katona et al., 2019;pedrassoli chitayat et al., 2020;tot et al., 2021). however, as the game continues to change, the performance of such models may suffer, and models may need to be retrained to support a new architecture.one example that can be observed in the esport literature comes from models that include the characters present in a match. typically, characters are represented by their unique character ids (which are arbitrarily assigned numerical identifiers), which most commonly undergo a one-hot encoding or a similar variation summerville et al., 2016;makarov et al., 2018;katona et al., 2019;viggiato and bezemer, 2020;s. demediuk et al., 2021;ringer et al., 2023). in this encoding, the unique identifier of a character is represented as a vector class. this means that if a game only has four characters, with ids 1 through 4 respectively, character 1 can be presented as the vector (1,0,0,0), while character 2 would be (0,1,0,0), etc... this can lead to an architectural problem where, as the number of character ids increases so does the number of dimensions needed to encode them. using the same example, if a new character is then introduced with character id 5, the vector needed to represent all previous character would need an additional dimension.thus character 1 would then be represented as the vector (1,0,0,0,0). a machine learning model developed prior to the introduction of the 5th character would be architectured to support a 4-dimensional input vector for each character, and thus not support a 5-dimensional input needed given the new game design parameters. this could lead the model to be unusable due to the technical constraints of how it was trained and applied.additionally, characters within esport games usually have unique abilities and traits which allows them to be played in different ways (s. demediuk et al., 2019;s. demediuk et al., 2021). if a character is re-designed in a way in which several of their original abilities or traits are altered, the way in which they are played could significantly change from patch to patch. those changes would not be encompassed by the character ids. using the same example as above, if character 1 is designed to poses several supportive traits (such as healing or otherwise improving the in-game performance of allies) but then is re-designed to have several offensive abilities, the way in which the character is played could be changed significantly. however, as the character id would remain the same, a model that uses the id could have its performance impacted, as it had been trained in a different state that does not account for the new changes to the character. in these hypothetical scenario, while the model would still be able to produce results, it would be unclear if the results are reliable due to the uncertainty of the impact of the change to the game's environment.thus, this paper conjectures that changes to game design parameters may be interpreted in three ways:• breaking changes -those are fundamental changes to the design of the game which would require a changes in model architecture in order to produce any results.• impactful changes -those do not incur changes in architecture, however have a substantial impact in model performance.• unimpactful changes -those do not alter the state of the game significantly enough to affect the performance of previously trained models.while breaking changes are usually trivial to identify, due to their severe disruption to the application of models, differentiating between impactful and unimpactful changes may require analysis. furthermore, if either breaking or impactful changes are identified, a previously trained model may need to undergo the training process again to account for the new parameters. this can be a cumbersome process, especially as esport titles typically change rapidly and abruptly (summerville et al., 2016;kokkinakis et al., 2021). this paper builds on some of the methodology suggested in the literature (s. demediuk et al., 2019;viggiato and bezemer, 2020), to generate a novel character representation. this form of representing characters utilises patch specific game design data, which is then clustered to allow for a fixed and reusable notation that can be readily applied to future models. the proposed method holds meaningful information about the character's capabilities and it is sensitive to changes introduced in patches. this has been done using dota 2 -a popular multiplayer online battle arena (moba) esport titlewhich contains over 100 unique characters. each playable character has a range of unique abilities -which are active skills that can be used by players during play with in-game effects, such as causing damage or healing allies -and stats, including "intelligence", "agility", "strength", etc... dota 2 is a team based game, in which two teams of 5 players each attempt to destroy the main building in the opponent's team base. this game has been chosen as a focus in this study due to its popularity within academia (katona et al., 2019;pedrassoli chitayat et al., 2020;tot et al., 2021;semenov et al., 2017;hodge et al., 2019;agarwala and pearce, 2014;ringer et al., 2023;makarov et al., 2018), large complexity and abundant access to data. while the resources made available within this paper are designed for dota 2, a similar methodology could readily be applied to other moba titles -such as league of legends -with minimal alterations, as well as advising the development of similar techniques for other esport genres.the character representation proposed in this paper is then tested and evaluated in a case-study, which simulates a hypothetical future work within the esport literature.three versions of a neural network (nn) are trained using professional games of dota 2 from patches 7.27 to 7.33. these models attempts to predict the number of kills (also referred to as the score) for each team at the end of the match. it is important to note that these nns bear no direct contribution within the paper other than as a evaluation metric.they serve as an illustration for possible use case within future research, and outline the capabilities of this methodology. (nn1) was used as a simple base-line, where only the match duration was used as an input. no additional features were included in this baseline, therefore this network would have no way of determining or modeling the characters present in the game. (nn2) was utilised as an additional control, where character ids (one-hot encoded) were used to represent the characters present in each team, as well as the duration of the match. this network controls for the standard encoding typically used in the literature, which holds information about the characters that have been selected in the match. lastly, in order to evaluate the framework proposed, (nn3) was trained using the characters selected represented through the clustered approach proposed in this paper, as well as the match duration.towards addressing the impact of game changes in machine learning models and other forms of data analytics within esports, which poses a problem of short life-spam, this study provides 3 major contributions:• a feature set of character traits is compiled through the literature and game-design data made available by the game's publisher.• a novel way of representing characters is proposed and validated through performance, outlying how it is sensitive to patch specific context as well as reliant to fundamental changes to the core game environment.• access to standardized format, centroids, clustered abilities and characters are made freely available for use for future research in the field1 . this is done through releasing patches to the game, which can alter the rules and the parameters of the game(summerville et al., 2019;viggiato and bezemer, 2020;s., 2021;ringer et al. a machine learning model developed prior to the introduction of the 5th character would be architectured to support a 4-dimensional input vector for each character, and thus not support a 5-dimensional input needed given the new game design parameters. those changes would not be encompassed by the character ids. in this evaluation of models, the authors encoded the character present through a variation of one-hot encoding, where all characters present for the radiant team where encoded as 1, while the characters present in the dire team were encoded as -1 and 0 otherwise. the game design parameters of the character excluding any player decision making) are the most significant features in predicting the outcome of a game within their model. however, the model was limited on character attributes without including any information about character abilities, which other points of literature suggest bear significant importance to how a character is played (s., 2021;makarov et al. demediuk et al., 2018;katona et al. once all of the properties for every ability was compiled, info on the characters attributes -as described in the literature(viggiato & bezemer, 2020) -was extracted from the "heroes. the csv files have been made available in the resources listed in the introduction section for future use. in the current literature that relies on character selection data, no model could be found that decisively support the addition of a new character without needing to change its architecture (i. through clustering characters abilities and attributes, as extracted from the literature, this paper proposes a novel character representation that both encapsulate the iteration of the game for any given patch and support the inclusion of future characters without the need for a change in architecture. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/8.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/8.txt new file mode 100644 index 0000000000000000000000000000000000000000..2334c7b02db72f3c4aa613601cb4ca02f1645ff8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/8.txt @@ -0,0 +1 @@ +learning properties that are interpretable by humans have become a topic of interest with the growing need to explain the behavior of black-box systems. there is strong motivation to infer such properties from examples when the underlying rules are unknown; applications range from explaining and debugging to reverse engineering systems that were designed with the help of artificial intelligence.most systems produce sequences of data, and must be described by properties that capture evolution over time. linear temporal logic (ltl), amongst other formalisms, models such temporal properties, and expressing properties in ltl makes them particularly easy for humans to interpret. gaglione et al. (2022) assumed noise in the labeling of the examples, and proposed methods for learning temporal properties that are robust to that noise. when collecting demonstrations of real-world systems, some examples might be wrongly categorized as positive or negative due to noise in the measurement or uncertainties in the categorization process. gaglione et al. (2022) roy et al. (2022) proposed methods to solve the occ problem for ltl formulas and for deterministic finite automata (dfas), using language minimality as a measure for specificity.we present in this paper the methods and results from both (gaglione et al. 2022) and (roy et al. 2022). linear temporal logic (ltl), amongst other formalisms, models such temporal properties, and expressing properties in ltl makes them particularly easy for humans to interpret. when collecting demonstrations of real-world systems, some examples might be wrongly categorized as positive or negative due to noise in the measurement or uncertainties in the categorization process. the problem consists in finding an ltl formula ϕ that correctly classifies at least 1κ of the sample s.we developed two algorithms to solve problem 1: a maxsat-based algorithm, referred to as maxsat ltl , and an algorithm using a decision tree as an extra layer.we also adapted these algorithms to stl formula inference, using satisfiability modulo theories (smt) and maxsmt instead of sat and maxsat, respectively. we describe, in problem 2, the generic form of the problem statement from(roy et al.problem 2 (infer a language-minimal classification of data) let s be a sample composed of (positive) traces. the problem consists in finding an ltl formula ϕ such that |ϕ| ≤ n, that it is consistent with the sample s (i.we developed three methods to solve problem 2, the best of them being referred to as s-sym ltl . this second type of data have also been clustered in different strategies beforehand, and the motivation of ltl formula inference is to explain the underlying rules of these strategies. on the noisy samples, maxsat ltl produced results with running time within orders of magnitude less than sat ltl , and inferred formulas most of the time when sat ltl timed out, as demonstrated in figure1. we inferred stl formulas from these samples using the maxsmt stl method (maxsat ltl applied to stl).we inferred ltl formulas from three clusters using s-sym ltl . these three clusters turned out to be not separable, but since we only considered positive data, we still inferred interpretable ltl formulas that ended up characterizing the entire dataset nonetheless. for example, one of the inferred ltl formula was (f x 3 ) →(g x 3 ) which reads as "either the uav always glides, or it never glides". \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/80.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/80.txt new file mode 100644 index 0000000000000000000000000000000000000000..1bcc6111ab262b5d31da7a620cebfc125fe4de67 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/80.txt @@ -0,0 +1 @@ +several practically relevant applications including recommender systems, internet advertising have been formulated as sequential decision making problems using the framework of multi-armed bandits. the importance of privacy in such sequential decision making problems has been extensively discussed in the literature (see for example, thakurta and smith (2013); mishra and thakurta (2015); tossou and dimitrakakis (2016)).differential privacy, introduced by dwork et al. (2006), is one of the popular approaches to address such privacy concerns. in sequential decision making problems, algorithms providing differential privacy preserve data privacy by adding appropriate statistical noise to the data. duchi, jordan, and wainwright (2014) extend this notion to local differential privacy in which data remains private even from the algorithm. the main difference between global and local differential privacy is whether privacy is to be maintained from the algorithm or the (possibly unintended) recipient of the output of the algorithm. in global differential privacy, noise is added by the algorithm so the output does not reveal private information about the input. in local differential privacy, noise is added to the input of the algorithm so that privacy is maintained even from the algorithm.to understand the motivation for local differential privacy, let us consider the practical application of internet advertising1 . an advertising system receives, as input, feedback from the users which may reveal private information about them. the advertising system employs a suitable learning algorithm and selects ads for the users tailored to the feedback given by them. these selected ads are then given to the advertisers as output. while using global differential privacy, privacy is maintained from the advertisers by ensuring that the output of the learning algorithms does not reveal information about the input (i.e., user information). typically, advertising systems are established by leading social media networks, web browsers and other popular websites. korolova (2010); kosinski, stillwell, and graepel (2013) show that it is possible to accurately predict a range of highly sensitive personal attributes including age, sexual orientation, relationship status, political and religious affiliation using the feedback available to the advertising systems. such possible breach of privacy necessitates us to protect personal user information not only from the advertisers but also from the advertising systems. local differential privacy is able to achieve this objective unlike global differential privacy.in this article, we propose to use low privacy regime using local differential privacy. in low privacy regime, the noise added to the data is small and the aim of the privacy mechanism is to send as much information about data as allowed, but no more (kairouz, oh, and viswanath 2014). this is in alignment with our dual goal of using privacy in recommendation systems or internet advertising, and other similar applications: provide useful recommendations/ads to the users while respecting their privacy as much as possible.we measure the utility of our proposed algorithm using regret which is a measure of the total mistake cost (precise definitions will follow in the next section). when rewards are bounded (as assumed in most works in the literature), the regret of any algorithm is trivially bounded linearly in the number of time steps t . an algorithm is said to be learning if its regret is bounded sub-linearly in t . (2006), is one of the popular approaches to address such privacy concerns.duchi, jordan, and wainwright (2014)extend this notion to local differential privacy in which data remains private even from the algorithm. the main difference between global and local differential privacy is whether privacy is to be maintained from the algorithm or the (possibly unintended) recipient of the output of the algorithm. in global differential privacy, noise is added by the algorithm so the output does not reveal private information about the input. while using global differential privacy, privacy is maintained from the advertisers by ensuring that the output of the learning algorithms does not reveal information about the input (i. local differential privacy is able to achieve this objective unlike global differential privacy.in this article, we propose to use low privacy regime using local differential privacy. in low privacy regime, the noise added to the data is small and the aim of the privacy mechanism is to send as much information about data as allowed, but no more(kairouz, oh, and viswanath 2014). this is in alignment with our dual goal of using privacy in recommendation systems or internet advertising, and other similar applications: provide useful recommendations/ads to the users while respecting their privacy as much as possible. we propose non-stationary stochastic corrupt bandits, a novel formulation which aims to preserve local differen-tial privacy while still providing high utility for sequential decision making in a non-stationary environment. we assume that, for each arm, there exists a loose link between the reward and the feedback through a known corruption function g a which maps the mean of the reward distribution to the mean of the feedback distribution : g a (µ a (i)) = λ a (i), ∀a ∈ a and 1 ≤ i ≤ l t .as done in gajane, urvoy, and kaufmann (2018), a straightforward approach to achieve local differential privacy using corrupt bandits is to employ a corruption scheme on the user feedback.as it turns out, this is equivalent to the staircase mechanism for local privacy which is the optimal local differential privacy mechanism for low privacy regime(kairouz, oh, and viswanath 2016, theorem 14).in our analysis, we use the fact that when an arm a is picked at time t + 1 by sw-klucb-cf, one of the following is true: either the mean feedback of the optimal arm a * ,t with mean reward µ * ,t is outside its confidence interval (i. we devised an algorithm called sw-klucb-cf and proved its regret upper bound which is near-optimal in the number of time steps and matches the best known bound for analogous problems in terms of the number of time steps and the number of changes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/800.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/800.txt new file mode 100644 index 0000000000000000000000000000000000000000..975c061f7dcdb4215def92a14b642250a2b6b1a9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/800.txt @@ -0,0 +1 @@ +although convolutional neural networks (cnns) are commonly used for image inputs, they are shown to be effective in tackling a range of temporal sequence modeling problems compared to the traditional recurrent neural networks . for example, wavenet is a convolution-based model for generating audio from text. it utilizes a multilayer dilated convolutional structure, resulting in a filter with a large receptive field. the approximation properties of the dilated convolutional architectures applied to sequence modeling have been studied in . a complexity measure is defined to characterize the types of targets that can be efficiently approximated.this work aims to enhance prior research and contribute new findings. we formulate the approximation of sequence modeling in a manner that parallels classic function approximation, where we consider three distinct types of approximation results, including universal approximation, approximation rates, and inverse approximation. our main contributions are summarized as follows: 1. we refine the complexity measure to make it naturally adapted to the approximation of convolutional architectures. the resulting approximation rate in the forward approximation theorem is tighter compared to results in . 2. we prove a bernstein-type inverse approximation result. it states that a target can be effectively approximated only if its complexity measure is small, which presents a converse of the forward approximation theorem.the readers may refer to for a detailed discussion of related research on the approximation theory for sequence modeling. we formulate the approximation of sequence modeling in a manner that parallels classic function approximation, where we consider three distinct types of approximation results, including universal approximation, approximation rates, and inverse approximation. this inspires us to reshape the target representation ρ (h) into a tensor and consider the approximation between tensors, where this approach was first introduced in our previous workwe make use of higher-order singular value decomposition (hosvd)to achieve this.the universal approximation property (uap) of h l-cnn is proved in, which ensures that the hypothesis space h l-cnn is dense in the target space c. specifically, given a target h ∈ h l-cnn , we are concerned with how the approximation error behaves as we increase k and m .in general, a hypothesis h = m h (m) is usually built up by candidates with different approximation budgets, where a larger m typically results in better approximation quality.the error bound c h (•, m) decreases as m → ∞, and the speed of decay is the approximation rate, which quantitatively shows how the error behaves. c h (h, •) is a complexity measure for the target h. if c h (h, •) decays rapidly, the target h can be easily approximated.jackson-type results are considered forward approximation results, as they allow us to determine the approximation rate given the complexity measure. this leads us to the inverse approximation problem, where we are given the approximation rate of a target and want to determine its complexity.complexity measure based on the previous discussion, we define the complexity measure of targets suitable for the hypothesis space h l-cnn .this theorem shows that a target can be efficiently approximated by a dilated cnn if it has a fast decaying spectrum (small c 1 ) and fast decaying memory (small c 2 ).it suggests that a target can be well approximated by the model only if it has small complexity measure, which in turn implies that the target exhibits good spectrum regularity and rapid decay of memory. combining the two results, we obtain a complete characterization of h l-cnn : a target in h can be efficiently approximated by: linear temporal cnns if and only if it has fast decaying spectrum and memory. temporal cnns are adapted to approximate sequential relationships which have regular spectrum (small c 1 ) and decaying memory (small c 2 ), that is, we have a rate estimate as in theorem 1 if and only if the target has small complexity measure c 1 and c 2 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/801.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/801.txt new file mode 100644 index 0000000000000000000000000000000000000000..882d3b9667d5263744a9fc087de42127a40474a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/801.txt @@ -0,0 +1 @@ +additive manufacturing (am), commonly known as 3d printing, is a rapidly expanding technology that offers efficient and cost-effective production of intricate designs, surpassing traditional manufacturing methods. however, the success of am heavily relies on optimizing process parameters and identifying optimal designs. this is where artificial intelligence (ai) revolutionizes the am industry. one crucial factor in am is the selection and optimization of process parameters like temperature, speed, and layer height. ai aids in identifying optimal process parameters by analyzing data from diverse sources such as machine sensors, simulation software, and historical data. machine learning algorithms detect patterns and correlations between process parameters and product quality, optimizing efficiency and cost-effectiveness. quality control is another vital aspect of am, which can be challenging due to design complexity and parameter variability. ai assists in identifying defects and anomalies by analyzing sensor and camera data, facilitating real-time monitoring, feedback, and proactive defect correction.am offers unparalleled design freedom, enabling the production of complex shapes unattainable through traditional means. however, designing for am requires expertise in the technology and an understanding of design constraints and possibilities. ai aids in design optimization by generating and evaluating designs using generative algorithms and optimization techniques. this streamlines the process, identifying designs that meet specifications while minimizing production time and costs.researchers have proposed innovative approaches using machine learning in am applications. for example, kononenko et al. proposed the utilization of acoustic emission (ae) signals and machine learning techniques for in situ fracture identification of am-fabricated parts. by distinguishing crack ae events from background noise, they built classification ml models achieving up to 99% accuracy. mukherjee et al. presented a novel method to obtain subsurface temperature distribution metrics during laser melting, utilizing in situ synchrotron x-ray diffraction observations and supervised machine learning surrogate models. hemmasian et al. used flow-3d simulation software to create datasets and trained a convolutional neural network to predict the three-dimensional temperature field based on process parameters and time step inputs.as ai expands, transparency and interpretability become vital. explainable artificial intelligence (xai) addresses this concern by providing insights into ai algorithms' inner workings and decision-making processes. xai has broad applications across various industries, including additive manufacturing. by implementing xai, manufacturers gain visibility into the factors influencing the quality of finished products, such as temperature, humidity, and printing speed. analyzing this data enables process optimization for consistent quality and waste reduction. xai also evaluates finished product quality, detecting defects and errors in the printing process in real-time. with prompt feedback on product quality, manufacturers ensure adherence to required standards. xai can analyze data from 3d printers, identifying patterns indicative of impending equipment failure. by detecting potential issues early, manufacturers can conduct maintenance and repairs, minimizing downtime and enhancing productivity.this study investigates the influence of specific input parameters, namely infill percentage, layer height, extrusion temperature, and print speed, on the resulting tensile strength in additively manufactured specimens. the primary aim of this research is to gain a deeper understanding of the relationship between these input parameters and tensile strength, and to identify the key parameters that significantly affect the performance of the additive manufacturing process. the impact of the "infill percentage" input parameter on the predictions of a trained machine learning model is examined using the partial dependence plot (pdp) method from the shap library. the pdp allows for an investigation of the relationship between a specific input feature, in this case, the "infill percentage," and the output of the machine learning model while keeping all other input features constant at a particular value.the findings derived from the pdp plot, illustrated in figure4, reveal the association between the "infill percentage" input parameter and the output of the machine learning model. the line depicted in the plot represents the average effect of the "infill percentage" input feature on the model output, with all other input features held at a constant level. from a research perspective, the pdp plot offers valuable insights into the significance of the "infill percentage" input feature in predicting the machine learning model's output. researchers can leverage the pdp plot to optimize the "infill percentage" input feature to achieve the desired output from the machine learning model. the resulting plot is a partial dependence plot (pdp) that illustrates the influence of the "infill percentage" feature on the model's predictions, as depicted in figure5. the result interpreted from the pdp plot shows the relationship between the "infill percentage" input feature and the output of the machine learning model while accounting for the shap values. this means that the pdp plot displays the average effect of the "infill percentage" input feature on the model output, while taking into account the interactions between the "infill percentage" feature and the other input features.the partial dependence plot depicted in figure7shows the average effect of changing the "infill percentage" feature on the model's predicted output, while holding all other features at their mean values. the shap value overlay on the plot indicates how much of the observed effect of changing the "infill percentage" feature is due to the "infill percentage" feature itself (i.in the context of the shap library, the gam model is employed to generate explanations for predictions made by a black-box machine learning model. the variation in input feature importance after implementing the gam model may arise because the gam model assigns different levels of importance to certain input features compared to other models. this divergence could be attributed to the gam model's ability to capture complex relationships between input features and the output variable that other models may not capture. as a result, the changes in input feature importance observed after implementing the gam model in this case can be attributed to the model's capacity to grasp intricate relationships between the input features and the output variable, ultimately leading to a more accurate representation of their true relationship. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/802.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/802.txt new file mode 100644 index 0000000000000000000000000000000000000000..e18f2f28b1898a35e2a0447c046c8efb2d2638e9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/802.txt @@ -0,0 +1 @@ +recent progress in large language models (llms) has resulted in a rapid increase in the ability of models to produce convincingly human-like text, sparking worries that llms could be used to spread disinformation, enable plagiarism, and maliciously impersonate people. as such, researchers have begun to develop methods to detect ai generated text. these include watermarking algorithms, which subtly modify the outputted text to allow for better detection, given that the detector has sufficient access to watermarking parameters. differing from previous work, where the focus is on determining if text has been produced by a watermarked model, here we study the problem of if a language model has been watermarked. critically, our black-box algorithms only require querying the model and do not necessitate any knowledge of underlying watermarking parameters., 2023). a large sequence model s over a vocabulary t is a map from a finite sequence of tokens t * to a set of logits over all tokens l ∈ r |t | , along with a sampler r : r |t | → ∆t that randomly outputs a token based on the output logits. a watermark w s with secret key s over a vocabulary t is a map w s : l t → l t , where l t is the set of llms with vocabulary t . if, for any pair of such llms and all keys s, w s (l a ) is identical to w s (l b ) up to the same token permutation, then each w s is a principled watermark. for instance, the identity watermark is a valid principled watermark, but is not algorithmically detectable within a sequence of text. a watermark w s is (p, p )detectable for a model l, some expression p , and p ∈ (0, 0.5], if there exists a detector d s : t n × t n → {0, 1} that runs in p (n) time and correctly distinguishes between sequences of length n generated by l and w s (l) with probability at least 1 2 + p. while quality is somewhat subjective, if it is impossible to distinguish watermarked text from standard text generated by a llm, then the watermark must not affect any perceivable metric of quality. let w s be a watermark where s has length m and l is a llm, p and q are polynomials, n ≤ p(m), and q(m) ≥ 2. w s is quality-preserving if, for all l, m, n, a, p, and q, a is correct with probability at most 1/2 + 1/q(m) when given texts of length n generated by l and w s (l), over the randomness of llm generation and the choice of s.such a watermark is detectable, and perfectly preserves quality, though it fails the desideratum that watermarks should still be detectable after the text is modified slightly.though other watermarks are less sensitive to changes to the text, all known watermarks are vulnerable to attacks that preserve generated text quality while evading detection(sadasivan et al. suppose we have two generated texts from a model l and watermarked model w s (l). the easier it is for a detector with access to underlying watermark seed to detect the watermark, the easier it is for a detector without access to the seed to detect it.if the detector correctly distinguishes between positive and negative distributions is at most 1 2 + p, we can use the bound fromsadasivan et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/803.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/803.txt new file mode 100644 index 0000000000000000000000000000000000000000..67164c9f3bac6a270ffbd872402770f7ade3d2b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/803.txt @@ -0,0 +1 @@ +federated learning (fl) has emerged as a promising approach for privacy-preserving deep learning by distributing both data collection and model training to the edge. in fl, a group of edge devices, e.g., iot devices, collaboratively optimize deep learning models without sharing any information about their data. instead of sending their data, the clients train their models locally and periodically send model updates to a central server for aggregation . in real-world applications fl encounters two significant challenges, namely, the communication burden between the server and the edge devices and the heterogeneity of the devices in terms of computational and power resources - . recent studies in federated learning have disregarded these two significant limitations , utilizing large homogeneous deep neural networks. however, in practical iot settings, the devices are characterized by limited computational and communication resources, which force them to train smaller and computationally lighter neural networks, thus affecting heavily their performance .to increase communication efficiency in federated learning, compression schemes, e.g., sparsification , quantization and client selection , have been widely explored. however, these approaches may result in loss of accuracy and introduce bias towards certain devices. also, focusing on the hardware heterogeneity of the devices, one straightforward approach is to select only clients with adequate computational resources, while disregarding those with limited hardware, which may still possess valuable information. alternatively, a model architecture could be employed to fit the minimum capabilities of all clients, but this may constrain the overall representation ability of the global model . another direction relies on deploying different models across clients adapted to their computational resources. to exchange information over heterogeneous models the knowledge distillation technique is applied to enhance the global model with an ensemble of local predictions . however, implementing such approaches can be challenging due to the complex aggregation rules required on the server or the need for clients to share a public proxy dataset, which may not be feasible for devices with limited memory , .contribution: unlike the existing literature, in this study, we examine the fl under a different perspective, focusing on the structure and properties of the model employed by the edge devices. to be more precise, rather than employing a conventional deep learning network as referenced in prior work , we utilize the deep equilibrium (deq) models . we argue that these models are characterized by unique properties providing solutions to open-problems in fl including the communication burden between the devices and server and the computational heterogeneity of the local devices.although, the deep equilibrium models have been explored in numerous centralized settings - , to the best of our knowledge, our study is the first to investigate a connection between deq models and federated learning. more specifically, it is shown here that expressing the entire architecture of a deep learning model as an equilibrium (fixed-point) computation of a single layer or unit (e.g., a residual block) results in an efficient infinite-depth neural network that can offer substantial benefits to federated learning. this compressed representation requires notably less memory, thus enabling efficient communication of model updates between the server and the devices, while achieving competitive performance. furthermore, the complexity of the deq model can adapt dynamically based on the edge devices' computational capabilities by adjusting the number of fixed point iterations required to compute the corresponding equilibrium point. finally, we propose a novel, weighted average fusion rule that takes into account heterogeneous edge devices that employ different numbers of fixed point iterations, thereby utilizing this information effectively. note that our proposed method is applicable to any federated learning algorithm. furthermore, the complexity of the deq model can adapt dynamically based on the edge devices' computational capabilities by adjusting the number of fixed point iterations required to compute the corresponding equilibrium point. in light of this, a deq model is defined as a fixed point function f θ (z, x) = σ(wz + ux + b), which obeys the following relation.1) forward pass -calculating fixed points: during the training and testing procedures of edge device n, a large number of fixed point iterations needs to be computed based on the transformation map of deq model in (4) to estimate the fixed point z ⋆ = f θ (z ⋆ , x).let z ⋆ = f θ (z ⋆ , x) be the fixed point estimated during the forward pass given the input x from the local dataset of device n and l(z ⋆ ) = l(z ⋆ , y) be a loss function using only an example-target y. advantages (properties) of the deq models in fl communication efficiency: one of the most important benefits that stems from the adoption of a deq neural network model in the fl framework, is the significant reduction in the number of model parameters that need to be transmitted. in other words, if we consider that the transformation function f θ (•) is defined in terms of m parameters, then the total number of parameters required for the transmission of a deq model is somewhat more than m , if we also consider the parameters of the output layer. on the other hand, if a k-layer deep neural network model was employed in the federated learning framework, then each layer typically employs a number of parameters of the order of m per layer and the total number of parameters required for the transmission of one model would be close to k • m , i.reduced memory requirements: an obvious benefit that results from the adoption of deq neural network models in the federated learning framework, is that both the clients as well as the parameter server need a significantly smaller amount of memory, to store and process the models.support for heterogeneous devices: another important advantage of the use of deq models in the fl framework is that it naturally enables the incorporation of edge devices with significantly different processing capabilities. in order to explain this property, it is important to note that a deq model derived by following the procedure in section iii-b corresponds to an infinite depth neural network, if the algorithm used to compute the fixed point performs enough iterations to achieve convergence.on the server-side, considering that the deq models sent by the devices have been derived by heterogeneous edge devices that each may employ a different number of fixed point iterations, a weighted average fusion rule is proposed that effectively utilizes this information.n is the deq model sent to the server by client n and k n denotes the number of fixed point iterations performed by client n.25.regarding the resnet architecture, in figure1, we compared the fl-resnet method with the proposed federated deep equilibrium learning approach, which utilizes a deq model with varying numbers of residual blocks (one, two, the results validate that the connection of deq models with federated learning offers several advantages. we evaluated the performance of the fl-deq-2-residual-blocks method in two cases:(1)where all edge devices performed 10 fixed point iterations (homogeneous scenario), and (2) where the number of fixed point iterations varied depending on the computational resources of the edge devices (heterogeneous scenario). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/804.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/804.txt new file mode 100644 index 0000000000000000000000000000000000000000..8423de5b774bd90b2a8888c99e572d7c1c0c8560 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/804.txt @@ -0,0 +1 @@ +few-shot learning, which aims to learn with limited data, has become increasingly popular in response to the lack of large labelled datasets for many practical applications. models trained using self-supervision (where a deep neural network (dnn) is trained with pseudo-labels that define pre-text tasks) have demonstrated strong success on few-shot learning tasks, with contrastive objectives among the most successful. contrastive methods' efficacy is attributed to learning an inductive bias in the form of invariances to applied augmentations . for example, affine transformation invariance is typically useful for object category recognition, where pose is a nuisance factor . however, the ideal type and degree of invariance is not known apriori, and varies across downstream tasks. so, contrastively trained invariant features do not provide a one size fits all solution . for example, a model learned to be pitch-shift invariant would likely fail a task which relies on pitch sensitivity features. to learn a model which can successfully solve various downstream tasks, we require a feature representation with both invariant and transformation-sensitive properties. we propose a parameter-efficient multi-task learning framework to address this limitation of existing contrastive learners. we simultaneously learn a contrastive objective (to learn augmentation invariances) and a transformation prediction objective (to learn augmentation sensitivity), thus providing a more flexible feature for downstream tasks. our contributions include: 1) a novel multi-task learning framework; 2) a parameter-efficient solution to multi-task learning based on task-agnostic and task-specific features; 3) evaluation of fewshot classification over 10 datasets, spanning audio and speech domains; 4) analysis of learnt invariance strength and its relation to performance. code can be found here. models trained using self-supervision (where a deep neural network (dnn) is trained with pseudo-labels that define pre-text tasks) have demonstrated strong success on few-shot learning tasks, with contrastive objectives among the most successful. contrastive methods' efficacy is attributed to learning an inductive bias in the form of invariances to applied augmentations. we simultaneously learn a contrastive objective (to learn augmentation invariances) and a transformation prediction objective (to learn augmentation sensitivity), thus providing a more flexible feature for downstream tasks. our contributions include: 1) a novel multi-task learning framework; 2) a parameter-efficient solution to multi-task learning based on task-agnostic and task-specific features; 3) evaluation of fewshot classification over 10 datasets, spanning audio and speech domains; 4) analysis of learnt invariance strength and its relation to performance. one key trend is the success of methods which utilise augmentations for learning, including many contrastive methodsas well as predictive ones. additionally, we propose an extension to metaaudio, including 3 new speech datasets suitable for few-shot classification. multi-task learning & invariances: most highly related to this work are others which deal with multi-task learning and/or the study of invariances/equivariances learnt by selfsupervision. in particular, our work relates to:, which showed that different computer vision tasks benefit from different (in)variances; hypersimclr, which demonstrated that a hypernetwork can adapt a representation to the (in)variances needed for downstream tasks; and augselfthat also investigates co-learning contrastive and predictive self-supervision in computer vision.motivated by the intuition that solely learning invariances to augmentations may be suboptimal for specific downstream tasks, we propose to co-learn opposing objectives. we conjecture that different downstream tasks benefit from different type and strength of invariance, and that providing both augmentation sensitive and invariant features will lead to superior performance. objective: we introduce the notation t φ (t aug φ )aug∈a to denote applied augmentation pipelines, where t φ is a composition of individual augmentations (t aug φ ) and their parametrisations (φ), and a is the set of augmentations used during training (e. competitors: we compare the following methods: contrastive learning only; multi-label transformation predictive learning only; mt-simple denoting our multi-task loss on a simple resnet backbone; mt-split denoting a resnet backbone split at the final layer with one loss applied to each branch; mt-{bn, series, parallel} denoting a parameterefficient multi-task split with shared resnet blocks and taskspecific bn, series, or parallel adapters. in particular, we note that: 1) different heads of our multi-task approaches do indeed learn significantly different degrees of invariance to applied augmentations; and 2) on average, even the simple multi-task approaches decrease invariance strength compared to the contrastive baseline. this illustrates why the presence of both is advantageous for the numerical results in tab 3, and shows how downstream tasks can easily tune the degree of importance attributed to each feature by learning the linear combination, removing the need for human intervention at either the pre-train or downstream task steps. leveraging this insight, we developed a novel multitask learner that exploits both contrastive and predictive learning, providing both augmentation invariant and augmentation sensitive features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/805.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/805.txt new file mode 100644 index 0000000000000000000000000000000000000000..193be8f564eb84a859522c0b57b17e9e79a9e4c5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/805.txt @@ -0,0 +1 @@ +innovations in computer science shape the lives of everyone in our society. to create innovative solutions tailored to everyone, it is important that all groups of society are represented in the creation of these solutions. however, this is still not the case in the field of computer science (cs). having an awareness of the lack of representation and the different barriers people face in cs are fundamental in helping the field target those challenges and becoming more equitable and inclusive .statistics from europe show that women are still highly underrepresented in cs. according to eurostat , the percentage of female specialists in information and communications technology has evolved from 17% in 2012 to 19,1% in 2021.at university level in stem, the percentage of female bachelor, master, and phd students is 20%, while the percentage of female professors is 15%.specifically for the department of computer science at uit the arctic university of norway, only 13% of students, 14% of phd candidates and 21% of faculty members are female.better balance in informatics (bbi), a program led by the cs department at uit and funded by the research council of norway, aims to rectify this imbalance and create a more diverse learning environment for computer science. bbi is connected and builds upon an ecosystem of national and international projects which address gender balance in cs acting on different levels: school ( , ), university (, ), industry ( , , ), and the interplay of these levels ( ).bbi aimed to identify some of the reasons that led to the current gender dynamics in our cs department, and then propose measurements that could address those reasons. hearing directly from the cs students (bachelor, master) seemed to be a sensible way for us to identify those reasons. so, bbi organized structured discussion sessions, where we invited cs students (bachelor, master) to share their thoughts about:1. the reasons they picked cs for their studies, 2. their current experience with the cs studies, 3. their intention to pursue an academic career in cs, and 4. ways to make the cs community more diverse and inclusive.the answers of the students illuminated points of intervention, which could lead to a balanced flow of students into cs undergraduate program, a study environment that embraces diversity, and a balanced flow of students into higher levels of the cs academia.this paper presents the methodology ( §2) we employed to organize the discussion sessions, to collect responses, and to report the results. we then present the specific questions we asked the students and the analysis of their answers ( §3). finally, we list the recommendations ( §4) submitted to the cs department for achieving a gender-balanced environment, we discuss related work ( §5), and we conclude ( §6) with reflections about the discussion sessions.the answers of the students illuminated points of intervention, which could lead to a balanced flow of students into cs undergraduate program, a study environment that embraces diversity, and a balanced flow of students into higher levels of the cs academia.the end goal of the discussion sessions was to identify points of interventions that could increase the gender balance among the incoming cs students, the current cs students, and the cs graduates that are interested in entering the cs academia. the invitation to participate in the bbi discussion sessions was open to all students of the cs department, independently of their gender (female, male, non-binary).correlating their answers with their gender, we identified action items that could lead to a balanced flow of students into cs undergraduate program, a study environment that embraces diversity, and a balanced flow of students into higher levels of the cs academia. observing figure1, we can identify the reasons the minority chose cs studies: the problem solving aspect of cs, the flexibility of the cs studies, the job opportunities that cs graduates enjoy. to achieve this, we need to assess the student's experience within the cs department and identify aspects that can be improved to accommodate gender diversity.the late deadline for applying to a phd, which is not synchronized with the job-search period of senior students, is another reason why current students do not select a phd program. these actions aim to achieve a balanced flow of students into cs studies, a balanced environment within the cs department, and a balanced flow towards cs academia.the discussion sessions with the students helped us identify action items to achieve (i) a balanced flow of students into cs studies, (ii) a balanced environment within the cs department, and (iii) a balanced flow towards cs academia (i. a recent aggregated studycollects 22 measures and strategies for cs educators in secondary education to sustain the interest of female students in the cs classes. our observations are also aligned with a studyconcluding that: family and friends have high impact to the decision of girls to study cs, courses for cs should be introduced earlier at school, one should highlight the problem-solving aspect of cs and surface female role models. a successful strategy for increasing the percentage of cs female undergraduate students at cmu (close to 50% was the percentage of female students that entered the cs program in 2018) is presented in. then the authors present ways to address those reasons, by communicating different perspectives of cs and engaging female students to various cs experiences.to understand how the gender balance in our cs department can be improved, we organized discussion sessions among cs undergraduate students, who shared their thoughts about: the reasons they picked cs for their studies, their current experience with the cs studies, their intention to pursue an academic career in cs, and ways to make the cs community more diverse and inclusive. from their answers we identified action items for achieving a balanced flow of students into cs undergraduate program, a study environment that embraces diversity, and a balanced flow of students into higher levels of the cs academia. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/806.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/806.txt new file mode 100644 index 0000000000000000000000000000000000000000..027b3940b9a9284f5857a56e23b942cf4674bd44 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/806.txt @@ -0,0 +1 @@ +aris, short for augmented reality and interactive storytelling, is an authoring tool as well as an iphone application that work together to create mobile, locative, narrative-centric, interactive experiences. as a design response to a number of theories and examples from various disciplines of learning science, curriculum studies, media studies, contemporary social media and game design, it serves a complex continuum of user-designers including artists, teachers, students, administrators and researchers who each use it in a different capacity ranging from a rapid prototyping tool for interactive stories to a mobile scientific data collection tool.as the aris software has matured and become increasingly useful, much of the project's effort has become directed toward the design of communication tools, collaborative processes and community building efforts for each of the various audiences in effort to distribute the design, development, and dissemination of aris and it's content across a large and hopefully stable group of independent collaborators.in this manuscript i wish to highlight specific topics from the education and design literature that make a case for a specific set of attributes that informed aris's development. then attention will turn to the project narrative from a first person perspective, highlighting lessons in community building, agile development processes and iterative, user centric design methods. in the final section, aris adoption statistics and outside project collaborations will be discussed. as a design response to a number of theories and examples from various disciplines of learning science, curriculum studies, media studies, contemporary social media and game design, it serves a complex continuum of user-designers including artists, teachers, students, administrators and researchers who each use it in a different capacity ranging from a rapid prototyping tool for interactive stories to a mobile scientific data collection tool.as the aris software has matured and become increasingly useful, much of the project's effort has become directed toward the design of communication tools, collaborative processes and community building efforts for each of the various audiences in effort to distribute the design, development, and dissemination of aris and it's content across a large and hopefully stable group of independent collaborators. following the metaphor of conversation, the ad-hoc team who responded to class project pitch began selecting the characters and writing the branching dialog that would compose a game about the pop art movement in new york during the late 1950s and 60s. starting from teacher needs to find creative materials that teach to specific state standards, the team went through a process of brainstorming game ideas that would be practical and within scope of available technology.hernandez and was a locative puzzle game, in the line ofmyst (brøderbund, 1993), about a time traveling archaeologist who had been lost in the past and needed to be located through following clues that were left in the design and artifacts of the wi state capital building. because the game would be played indoors, gps would not be available for location detection and we needed to come up with other ways of making sure the narrative remained linked to the space the player was in. along these lines, we were curious about the design around alternate reality games such as i love bees(microsoft & 42 entertainment, 2004)and year zero(nine inch nails & 42 entertainment, 2007), deciding to make an alternate reality game that could be released to a public audience in episodes. the game was designed to pull in members of the general population though cryptic posters placed around madison that would direct them to contact the design team if they could determine it's meaning. in the following episodes they would perform a set of tasks for the nac that would slowly reveal the back-story of the game by having them perform various tasks around madison. even though the team had established upfront that anything created during the project would be given away and that grant funding would likely never result in a sustainable tool, the trip to utah for the open education conference and posting the code to a public repository showed we meant it. possibly due to a background in mathematics and previous exposure to a mobile design platform, chris was able to easily navigate the complex data structures of the aris authoring tool well enough to engage in a production ready game, even requesting access to the raw sql interface for final details.mentira is a place-based augmented reality game using the augmented reality for in what is becoming a habit, the aris team set high expectations for what would be shown at the yearly gls conference. this time reminded the team that if a project like this was going to survive it either needed to generate revenue directly, which was impossible because we had already given it away for free, or it would need to create partnerships with funded projects that could sustain its continued development. in these few short days, using a modified version of the scrum development process with only 2 hour iterations, the team produced 5 new game prototypes 5 , improved the existing dow day game greatly and made dozens of changes to the authoring tool that were released to the public.through rapid prototyping, aris game jams have contributed a formative set of mobile media game design verbs and mechanics that will inform future work. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/807.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/807.txt new file mode 100644 index 0000000000000000000000000000000000000000..858fc4d80f70f3a55494a5e89b1ae41af342ad08 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/807.txt @@ -0,0 +1 @@ +in this section, we describe the hardware setup for our nodes as well as the deployment information. this can be attributed to practical issues: i) limited internet and broadband connectivity in large rural farms, ii) node communication issues due to ground morphology, large distances, and varying conditions throughout the season (e., growing crops), iii) sparsity of powered locations for gateways and iot hardware, iv) access difficulty for on-site troubleshooting and maintenance, and v) lack of trained manpower that is often still needed for updating and maintenance of iot systems.furthermore, there can be unique distinctive properties between adjacent fields or fields within the same iot sensor network, such as: i) soil types and cover crops, ii) planted crops, iii) treatment strategies including applications of fertilizers, pesticides and insecticides, and more. in the former case, information of soil moisture content over time and after significant weather events (e. the dataset comes from iot nodes with temperature, humidity, and soil sensors (measuring temperature, conductivity, and volumetric water content). in addition, the dataset contains data from adjacent weather stations providing various measurements, including rain events. for the described dataset we present the measurements from deployed nodes with the teros-12 commercial soil sensors from the meter group.in addition to the soil information collected by the connected sensors, our nodes transmit temperature and humidity (%𝑅𝐻 ) information as collected by an on-board sensor.the dataset contains measurements collected by three nodes and two weather stations over a duration of five months.one of the analyses that can be done is on the effect of rain events on the temperature and water content at different soil depths for a particular crop type.for this analysis we use part of data collected from the iot node with the soil sensors at farmer 1 (soy field) and plot the vwc, converted into percentages, on the y-axis for the soil sensor at 6-inch depth and the one at 12-inch depth using the x-axis for time (dates). several interesting observations can be made by studying the trends of the plotted measurements, such as, a) the water content difference between the 2 depths can be up to 5%, b) even though rain events cause a noticeable variation at 6 inches, not all rainwater ends up at the 12-inch depth, and c) a higher rain event (e., the one occurred on 7/15) may not have as big an impact on the soil water content if the water content is above a certain level, in this case above 35%.in this work, we present an agricultural soil and weather dataset collected with nodes and stations deployed in corn and soybean fields across two counties in north-central indiana. our datasets include measurements collected throughout the farming season, in soybean and corn fields, and show the variations and dependencies between the environmental and soil conditions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/808.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/808.txt new file mode 100644 index 0000000000000000000000000000000000000000..fac0fe9f559c84f35e89b618e292dca4b31268fa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/808.txt @@ -0,0 +1 @@ +the last fifteen years have been characterized by the large diffusion of the internet and social media, such as facebook and instagram, along with the tendency of users to share their data, both consciously (through posts, photos, etc.), and unconsciously (accepting the terms of service, allowing cookies when navigating the web, etc.). all this information has become incredibly valuable when coupled with big data practices because that allows companies that hold it to exploit it, extracting new information or behavioral models from it , in order to influence and predict the users' behavior and so to capitalize on the advertisements.episodes in which this influence has been used outside the logic of business are well-known, from trying to influence the result of an election, like the cambridge analytica case , to mass-surveillance, like revealed by edward snowden .therefore it is important to note that threats to our autonomy do not just undermine our integrity as individuals, but are also a serious risk to society as a whole, making it essential to discuss future developments of surveillance capitalism. when mentioning big data and the correlated mechanisms, i will be referring to the operations of data extraction from all the possible sources, and to the operations performed on the data in order to analyze and extract patterns useful for behavior prediction and therefore manipulation. big data is essential because it can be considered the turning point for concerns about autonomy: forms of manipulation have long been enforced through traditional media (like newspapers, radio, and lastly via television), but the amount of data gathered with big data enables a "tailored influence", taking it to another order of magnitude of effectiveness.the first reason why i believe my concerns about our autonomy in the future are valid is that the big tech companies that benefit from surveillance capitalism will only increase their revenues if they are more able to model, predict and influence our behavior and choices, therefore they will try to reduce our autonomy as much and as fast as possible.the second reason why i think we will face a reduction in our autonomy is that all of the mechanisms that surveillance capitalism uses, meaning the ability to gather incredibly large amounts of data from the users, are embedded in the social tissue through social media, such as facebook, and smart assistants, such as amazon alexa. i think that it is enough to ask ourselves some questions such as: "could someone live without using social media nowadays?", "could a politician run a campaign without using facebook?" and "could i go somewhere new without using google maps?" to realize that the answer to the question "can we actually choose to be free from being subjects of surveillance capitalism?" is negative. even though studies (,) have shown how fomo can be linked with negative effects on one's mood and life satisfaction, this phenomenon highlights the fact that surveillance capitalism mechanisms (in this particular case social media) have become so powerful to influence us psychologically to promote their usage.an important feature is that mechanisms that are used to influence our behavior and choices, for example, what is shown in our facebook feed or which articles amazon recommends to us, are fed with data that the user generates without being conscious of it (an example could be the area on the screen of the smartphone that is touched) and that these algorithms work without the users noticing. this gives surveillance capitalism an unlimited scope of action and furthermore makes it more difficult for us to avoid being subjects of both data gathering and manipulation, giving us little or no actual space when we are completely free from being influenced. in this sense, our smartphones represent the first way of gathering data about us: we take them with us all of the time and they are able to know our location, they have access to our audio to detect if we are calling the smart assistant, and they have become the filter through which we interact with the world.the scope of surveillance capitalism does not end with everything that can be gathered from us, but it also extends to what can be derived from the data thanks to big data analyses. an example of future developments is the insurance sector: gathering data from our cars, like the way we drive and where we drive, big data would be able to understand how much will be the chance of us getting into a car accident, and therefore the insurance will be able to require a higher fee even if we always respect the traffic regulations.a possible reason for supporting surveillance capitalism practices is that they are the reason why, nowadays, we have access to an enormous variety of services and content for free (not taking into account the cost of having an internet connection, which is negligible and has been diminishing since its beginning) or products for a very moderate cost (like smart assistants).another argument against the limitations of surveillance capitalism practices is that the amount of data that is gathered is so huge and can unlock a knowledge so deep about us that it is possible to influence us to act for what is perceived as our own good. the first reason treated is the fact that it is thanks to surveillance capitalism that we have access to so many contents and services for free, but i have shown that is intrinsically wrong to define those services as "free" since we don't pay with money but with our data, that is sold to third parties and it is also used with the scope of influencing us and undermining our autonomy, therefore having a doubled price.in conclusion, it is fair to say that the current situation with regard to surveillance capitalism practices, like massive data gathering, behavioral prediction, and manipulation, already rises a lot of concerns for our autonomy, and it is expected to get worse, following the trend that it has followed since the early days if no action is taken against such techniques. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/809.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/809.txt new file mode 100644 index 0000000000000000000000000000000000000000..d0079dc0a3af8e5f09b3295808248fc412ac4dc2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/809.txt @@ -0,0 +1 @@ +in recent years several internet websites have become the hubs for communities where users can produce, consume, and disseminate content without central oversight. examples of these usergenerated content (ugc) websites include major social media platforms, like facebook or twitter, or global online knowledge production communities like wikipedia, which is known as a model for the production of vast reliable, high-quality knowledge (yasseri and menczer, 2021).however, a negative consequence of the popularity of ugc websites is that their low barriers to access, combined with the lack of supervision from experts or other gatekeepers, results in the proliferation of false or misleading information on the web as a whole (wardle and derakhshan, 2017;lazer et al., 2018).false or misleading content often spreads on social networking platforms (amoruso et al., 2020;castillo et al., 2011;zareie and sakellariou, 2021;grinberg et al., 2019;guess et al., 2019guess et al., , 2020;;allcott and gentzkow, 2017), but there are growing concerns that other ugc communities like wikipedia may be vulnerable to these threats too (sáez-trumper, 2019). this is especially worrisome since wikipedia is one of top most visited internet websites (similarweb ltd, 2022) and a popular source of knowledge (okoli et al., 2014). wikipedia contains over 50 million articles in more than 300 languages; in february 2022, the english language edition of wikipedia alone received 781m visits (from unique devices) and was edited over 5m times (wikipedia contributors, 2022c;wikimedia foundation, inc., 2022d). hence, preserving the integrity of wikipedia is of paramount importance for the web as a whole (sáez-trumper, 2019).there are many potential threats to the integrity of knowledge in wikipedia (sáez-trumper, 2019). one common threat comes from vandalism, which is "a deliberate attempt to compromise the integrity of the encyclopedia, often through the insertion of obscenities, insults, nonsense or crude humour, or by page blanking" (wikipedia contributors, 2021).vandalism, however, is not the only threat to the integrity of wikipedia's content. whereas vandalism focuses on defacing existing entries, there exists evidence showing that wikipedia is also targeted by hoaxes, whose aim is to create whole new entries about fake, fictitious topics. an example of a famous wikipedia hoax is the entry jar'edo wens, a fake australian aboriginal deity, which went undetected for almost 10 years before being debunked and deleted (dewey, 2015). but hoaxes remain a threat to wikipedia's content integrity to this day. recently, one of the largest such incidents the platform has ever seen has been discovered on the chinese wikipedia: a user named zhemao wrote 206 fake entries, starting from 2019 until 2022, about russia's history in the middle ages (moon, 2022).hoaxes are thus not to be confused with vandalism; although vandalism is a much bigger threat in scope and size compared to hoax articles, hoaxes constitute a more subtle threat, which has received less attention compared to vandalism.a crucial question that remains unresolved is what drives the creation of hoaxes on wikipedia. because their original authors are aware that these articles are false, hoax articles are different from mere misinformation, but should rather be considered instances of disinformation (wardle and derakhshan, 2017;lazer et al., 2018). as such, understanding the factors that determine the supply of hoaxes on wikipedia could shed light on disinformation in general, including broader threats to the integrity of the web, like state-sponsored propaganda (king et al., 2017;zannettou et al., 2019;golovchenko et al., 2020) and conspiracy theories (starbird, 2017).to bridge this gap, in this paper, we study the role of online attention, in the form of individual page views, in the supply of disinformation in wikipedia. the idea of an economy of attention was first introduced by simon (1971), who observed that human attention is a limited resource that needs to be allocated (goldhaber, 1997). here, to quantify the flow of collective attention to individual topics of knowledge, we take advantage of the unique wikipedia traffic dataset and api. specifically, in this work we seek to answer the following questions: q1. does online attention toward a topic increase the likelihood of disinformation being created about it?q2. operationally, is there a relationship between traffic to wikipedia and the production of hoax articles?to answer these questions, we collected a list of known hoax articles (wikipedia contributors, 2022a) along with their creation timestamps and content. to control for potential confounding factors in the distribution of traffic to wikipedia over time, for each hoax, we considered a cohort consisting of all the legitimate (i.e. non-hoax) wikipedia articles that were created on the same day as the hoax. similar to kumar et al. (2016), we find that hoaxes differ from legitimate articles in key appearance features, but do not strongly differ in the number of hyperlinks they contain. next, for each article (either hoax or non-hoax), we parsed its content and extracted all the outlinks, i.e. its neighbors in the wikipedia hyperlink network. the presence of a link between two wikipedia entries is an indication that they are semantically related. therefore, traffic to these neighbors gives us a rough measure of the level of online attention to a topic before a new piece of information (in this case an entry in the encyclopedia) is created.finally, we measure the relative change of traffic in the 7-day period before and after the creation of a hoax and compare this change to that of the articles in its cohort. to preview our results, we find that, on average, online attention tends to precede the creation of hoaxes more than it does for legitimate articles. this observation is consistent with the idea that the supply of false and misleading information on a topic is driven by the attention it receives.in the rest of the paper we discuss related work (section 2), and then describe our methodology (section 3): the details of the data collection process, the comparison between features of hoaxes and legitimate articles, and the pre-processing of the wikipedia traffic data. section 4 discusses the techniques used to quantify online attention and its relationship to the hoax creation, and the statistical procedures performed to asses the results. finally, section 5 summarizes our findings and future directions.all code and data needed to replicate the findings of this study are available on github at github.com/csdl-usf/wikihoaxes.hoaxes are thus not to be confused with vandalism; although vandalism is a much bigger threat in scope and size compared to hoax articles, hoaxes constitute a more subtle threat, which has received less attention compared to vandalism.to answer these questions, we collected a list of known hoax articles (wikipedia contributors, 2022a) along with their creation timestamps and content.in the rest of the paper we discuss related work (section 2), and then describe our methodology (section 3): the details of the data collection process, the comparison between features of hoaxes and legitimate articles, and the pre-processing of the wikipedia traffic data. unfortunately, access to these data was not public due to the nature of the npp process.to collect this list, we queried the wikipedia api using the 'prefix search' endpoint (medi-awiki contributors, 2022a) to collect the titles of the hoaxes residing in the administrative list maintained by wikimedia under the prefix 'list of hoaxes on wikipedia'. this observed behavior can be in part explained by the fact that the wikipedia community started patrolling new pages in november of 2007(kumar et al. (2016)who, in addition to appearance features, studied network, support, and editor features for both hoax and legitimate articles(kumar et al.in summary, hoaxes tend to have more plain text than legitimate articles and fewer links to external web pages outside of wikipedia.recall that the cohort of a hoax is defined as all the non-hoax articles created on the same day it figure3: modified z-scores for all hoaxes in our sample relative to non-hoax articles in their cohorts for the four appearance features we considered.traffic to wikipedia is known to fluctuate following circadian and weekly patterns, and is likely to depend on a host of additional, unknown factors, such as the relative popularity of wikipedia over the years, the total number and geographic distribution of web users(yasseri et al. when ∆v /v < 0, attention tends instead to follow the creation of the hoax.figure4shows the distribution of the ∆v /v values for each cohort, the cohort mean, and the value of ∆v /v of the corresponding hoax, for a manually selected sample of hoaxes collected from our data.having defined a way to quantify whether traffic to a given article preceded or followed its creation, we want to determine whether hoaxes tend to have a greater ∆v /v than legitimate articles in general. however, if hoaxes do not differ from legitimate articles, then on average the difference the ∆v /v of a hoax and that of its cohorts should be zero. this indicates that, on average, hoaxes tend to have more traffic accumulated before their creation than after. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/81.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/81.txt new file mode 100644 index 0000000000000000000000000000000000000000..cbcb47df90fce2ef8826eb217cfca3cf1c48bc7b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/81.txt @@ -0,0 +1 @@ +loss is an inevitable component of manufacturing. in the manufacturing of jewellery from precious metals, accounting and calculating the losses is a very crucial. gross loss of jewellery is the total metal loss during its manufacturing. loss in metal happens during casting, filing, polishing, setting and at almost every stage. even though most of this lost metal is recovered and refined in the refinery to get a recovery of 92%, on average, these losses are extremely crucial not to be accounted for.the loss on each piece of jewellery varies, based on various factors. estimating this gross loss beforehand was very crucial for the manufacturing of that jewellery. this estimated gross loss was used for while pulling wax patterns during the process of injection moulding . jewelry made from the heavier wax piece will have surplus metal that must be filed down and recovered later, which is a waste of time and materials because only some of the metal will be recovered. therefore, estimating the total loss provides a general estimate of the wax weight and can be used as a guide for how each procedure should be carried out.in a production process, a step wise loss of each of step of manufacturing is collected. this is done by weighing the jewelry after each step. hence after the jewelry has been manufactured it can assess the final data of gross loss that the company bore. total recovery that was done was also considered, and added to the database. this gross loss found out was further collected out of which a wide set of databases is manufactured by an in-house engineer. calculations based on current trends are made where a few other variables are also taken into consideration.variables like, weight of the final product, metal type (white gold, yellow gold, pink gold, silver, platinum and palladium), cartage of metal (8k, 9k, 10k, 12k, 14k, 18k, 20k etc.), the customer for whom the jewelry is being manufactured, the setting of diamond (whether the piece is handset or wax set) and of course the type of jewelry it is (whether it is a ring, a pendant, an earring, a bracelet or a bangle.)currently, the estimation comes with a variance of ±4-5%. hence there is a scope here by which, using the powerful tools of machine learning we can consider the variable constants to find out the gross loss in jewelry. these variable constants can most often than not be fetched directly from the cad files which are made way before the actual manufacturing process even begins.the aim of the paper is to estimate the gross loss of jewelry at the cad level with greater and repeatable accuracy using machine learning algorithms. this paper will systematically narrow down the variables responsible for gross loss of jewelry during its manufacturing, create a machine learning model that predicts the final gross loss based on the data collected from the cad file generated before manufacturing and ensure greater accuracy of the model as compared to the traditional methods of estimating loss. gross loss of jewellery is the total metal loss during its manufacturing. loss in metal happens during casting, filing, polishing, setting and at almost every stage.the loss on each piece of jewellery varies, based on various factors. estimating this gross loss beforehand was very crucial for the manufacturing of that jewellery. this estimated gross loss was used for while pulling wax patterns during the process of injection moulding. jewelry made from the heavier wax piece will have surplus metal that must be filed down and recovered later, which is a waste of time and materials because only some of the metal will be recovered. therefore, estimating the total loss provides a general estimate of the wax weight and can be used as a guide for how each procedure should be carried out.in a production process, a step wise loss of each of step of manufacturing is collected. hence after the jewelry has been manufactured it can assess the final data of gross loss that the company bore. this gross loss found out was further collected out of which a wide set of databases is manufactured by an in-house engineer.variables like, weight of the final product, metal type (white gold, yellow gold, pink gold, silver, platinum and palladium), cartage of metal (8k, 9k, 10k, 12k, 14k, 18k, 20k etc.), the customer for whom the jewelry is being manufactured, the setting of diamond (whether the piece is handset or wax set) and of course the type of jewelry it is (whether it is a ring, a pendant, an earring, a bracelet or a bangle. hence there is a scope here by which, using the powerful tools of machine learningwe can consider the variable constants to find out the gross loss in jewelry.the aim of the paper is to estimate the gross loss of jewelry at the cad level with greater and repeatable accuracy using machine learning algorithms. this paper will systematically narrow down the variables responsible for gross loss of jewelry during its manufacturing, create a machine learning model that predicts the final gross loss based on the data collected from the cad file generated before manufacturing and ensure greater accuracy of the model as compared to the traditional methods of estimating loss. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/810.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/810.txt new file mode 100644 index 0000000000000000000000000000000000000000..a143df6213f6db76da47a31051437ff20f728498 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/810.txt @@ -0,0 +1 @@ +knowledge tracing (kt) is a sequential prediction task that aims to predict the outcomes of students over questions by modeling their mastery of knowledge, i.e., knowledge states, as they interact r c 2 r c 3 r c 4 r c r q r c 1 individual scoring rates per kc and rates at kc and question levels q 2 q 3 q 4 q 6 q 1 q 5 s2 s3 s2 2/2 2/2 0/1 1/2 5/6 2/3 s3 2/2 2/4 1/1 2/2 7/9 3/5 r c2 r c3 r c4 r c r q r c1 individual scoring rates per kc and rates at kc and question levels " denote the question is answered correctly and q 2 q 3 q 4 q 6 q 1 q 5 s2 s3 s1 1/1 1/2 1/1 1/1 4/5 2/3 s2 2/2 2/2 0/1 1/2 5/6 2/3 s3 2/2 2/4 1/1 2/2 7/9 3/5 r c 2 r c 3 r c 4 r c r q r c 1 individual scoring rates per kc and rates at kc and question levels " denotes the student doesn't get chance to answer the question.with learning platforms such as massive open online courses and intelligent tutoring systems, as shown in figure 1. solving the kt problems may help teachers better detect students that need further attention, or recommend personalized learning materials to students.the kt related research has been studied since the 1990s when corbett and anderson, to the best of our knowledge, were the first to estimate students' current knowledge with regard to each individual knowledge component (kc) . a kc is a description of a mental structure or process that a learner uses, alone or in combination with other kcs, to accomplish steps in a task or a problem 1 . since then, many attempts have been made to solve the kt problem, such as probabilistic graphical models and factor analysis based models .recently, with the rapid development of deep neural networks, many deep learning based knowledge tracing (dlkt) models are developed, such as auto-regressive based deep sequential kt models , memory-augmented kt models , attention based kt models , and graph based kt models . besides model variations in terms of neural architectures, a large spectrum of dlkt models are designed to incorporate as much as possible learning related information to augment its prediction ability. such supplemental information includes question texts , question similarities , question difficulties , and relations between questions and kcs .although the aforementioned dlkt approaches have constituted new paradigms of the kt problem and achieved promising results, two important factors in real-world educational data are not well represented. first, existing explorations of modeling the intrinsic relations between questions and kcs and building accurate student answer predictors are loosely connected. previous approaches tend to learn relation enhanced embeddings from graphs involving questions, kcs and students and then augment the initial model input with the learned representations . unfortunately, such a graph is extremely sparse in real-world data. for example, table 1 shows basic data statistics of three widely used kt benchmark datasets. the majority of questions are only associated with 1 or 2 kcs and the average numbers of kc per question are 1.3634, 1.0136, and 1.0148 for the above datasets. furthermore, due to the fact that such associations are manually annotated, mislabeled relations are inevitable and the corresponding errors might be easily propagated in the learning process of graph based dlkt models . second, many existing dlkt models assess knowledge states without explicitly capturing the student-level variability, i.e., individualization, such as different knowledge acquisition abilities and learning rates. modeling such student-level individualization could benefit the kt model's statistical goodness of fit, as well as potentially improve the generalization of the kt model . figure 1 shows a toy example that illustrates the individualization effects on the kt prediction tasks, where 3 students have answered 5 questions related to 4 kcs. as we can see from figure 1, even though student 𝑆 1 and student 𝑆 2 have the same historical scoring rate at the question level 3 , their knowledge mastery levels (eg. scoring rate per kc.) differ a lot. meanwhile, student 𝑆 1 and student 𝑆 3 have the same scoring rate per kc, but their question-level and kc-level historical ratings are quite different. unfortunately, such individualization information of different students is not given in advance, which makes it very challenging to measure them.in this paper we develop solutions that are applicable and can learn kt models from real-world educational contexts. our work focuses on the refinements of a popular dlkt model: the deep knowledge tracing (dkt) and its application to student assessment. we aim to develop an algorithm to automatically learn a dkt that performs better student assessment by addressing the aforementioned two challenges.briefly, the dkt is one of the most widely used models of using deep neural networks to capture student interaction dynamics in the kt domain . this is due to its relative simplicity, mathematically accurate prediction behavior, and the fact that it still leads the leaderboard across 7 popular datasets across different education domains according to a recent dlkt benchmarking report . the dkt is markovian and assumes the dynamic knowledge states of the student are captured well using a small set of real-valued hidden-state variables. the dkt can be learned from observational interaction data with any gradient descent optimization algorithms.in this work, we address the above issues by introducing two auxiliary learning tasks including:• question tagging (qt) prediction task: automatically predicting whether questions contain specific kcs. • individualized prior knowledge (ik) prediction task:progressively predicting student-level prior knowledge that is hidden in students' historical learning interactions. our approach builds upon the original auto-regressive dkt architecture, and augments its original cross-entropy objective function that optimizes the probabilities that a student can correctly answer questions with two auxiliary tasks. in the qt task, we use a transformer encoder with a masked attention mechanism to extract contextual similar question-level information that is relevant to the exercise to be answered and assign kcs to each question. in the ik task, at each time step, we use a student ability network to measure individualized historical performance considering all the previous questions and responses for each student. to ensure that our approach can be fairly comparable with other recently developed dlkt models, we choose to follow a publicly available standardized kt task evaluation protocol . we conduct rigorous experiments on three public datasets and the results show that our auxiliary task enhanced dkt model, i.e., at-dkt, is able to improve the "simple but tough-to-beat" dkt model in terms of auc by at least 0.9%.knowledge tracing (kt) is a sequential prediction task that aims to predict the outcomes of students over questions by modeling their mastery of knowledge, i., knowledge states, as they interact r c 2 r c 3 r c 4 r c r q r c 1 individual scoring rates per kc and rates at kc and question levels q 2 q 3 q 4 q 6 q 1 q 5 s2 s3 s2 2/2 2/2 0/1 1/2 5/6 2/3s3 2/2 2/4 1/1 2/2 7/9 3/5 r c2 r c3 r c4 r c r q r c1 individual scoring rates per kc and rates at kc and question levels " denote the question is answered correctly and q 2 q 3 q 4 q 6 q 1 q 5 s2 s3 s1 1/1 1/2 1/1 1/1 4/5 2/3 s2 2/2 2/2 0/1 1/2 5/6 2/3 s3 2/2 2/4 1/1 2/2 7/9 3/5 r c 2 r c 3 r c 4 r c r q r c 1 individual scoring rates per kc and rates at kc and question levels " denotes the student doesn't get chance to answer the question.the kt related research has been studied since the 1990s when corbett and anderson, to the best of our knowledge, were the first to estimate students' current knowledge with regard to each individual knowledge component (kc).recently, with the rapid development of deep neural networks, many deep learning based knowledge tracing (dlkt) models are developed, such as auto-regressive based deep sequential kt models, memory-augmented kt models, attention based kt models, and graph based kt models. the qt task focuses on predicting the assigned kcs to the questions by modeling the intrinsic relations among the questions and kcs with students' previous learning outcomes and the ik task aims to estimate the individualized historical performance of each student according to their learning processes.o j t q i o i l 6 r q / o z c q s f + v d + l i m v q x y 5 x j 9 g f x 5 a 6 k + k 6 s = < / l a t e x i t > h t 1 < l a t e x i t s h a 1 _ b a s e 6 4 = " h w p w 2 t m n g.different from previous graph based dlkt approaches that first learn representations of questions and kcs from their relation graph and then concatenate the learned embeddings as part of the model input, we improve the representation discriminative ability by explicitly using the intermediate representations to predict whether a kc is associated with the question at each time step. then, based on each student's learning history, we use a transformer based question encoder with a masked dot production attention function to capture the long-term contextual dependencies of both questions and kcs within the student learning history., graph based kt models that capture interaction relations with graph neural networks, and attention based kt models that use the attention mechanism and its variants to capture dependencies between interactions.to make the kt prediction close to real application scenarios, we evaluate dlkt models in a multi-step prediction setting where the models are required to predict a future span of students' responses given their historical interactions., use the model estimation of r𝑡+1 , r𝑡+2 , • • • , r𝑡+δ-1 , when predicting r𝑡+δ where δ is an arbitrary future time span.from figures3and4, we can see that: (1) at-dkt outperforms all other sequential models in accumulative prediction for all datasets, which indicates that the estimated responses made by at-dkt are accurate and beneficial for the next-step prediction in sequential models; (2) compared to non-sequential methods, at-dkt has the best performance on al2005 and bd2006 datasets and is the second best model in nips34. since the proposed auxiliary task ik is to measure the prior knowledge of students according to their historical learning interactions, the step-level problems are strongly correlated to get a better prediction result in the prior knowledge estimation task hence improving the student response prediction;. thus, it is important to incorporate such intrinsic information of question-kc relations and studentlevel prior knowledge in dlkt models; and (2) when comparing at-dkt to at-dkt w/o ik and at-dkt w/o qt, we can see that the prediction improvements from the qt task and the ik task are complementary.in this paper, we propose to enhance the original deep knowledge tracing model with both the question tagging prediction task and the individualized prior knowledge prediction task. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/811.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/811.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a0c9d86bed6afcfa85ffdf627a39481b0ab0924 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/811.txt @@ -0,0 +1 @@ +fairness has emerged as an important concern in algorithmic systems, mainly focused on the potentially unfair outcomes of automated decisions powered by machine learning , . problematic systems have been studied in a variety of domains, including justice , health care , and education , , and many normative fairness concepts , , tests and corrective measures have been proposed.in this paper, we investigate how fairness concepts may be studied in complex socio-technical systems (sts), possibly including (but not limited to) algorithmic components. in other words, quoting judith simon el al. , we aim to widen the study of fairness to "the broader socio-technical system in which technologies are situated".in this work we consider sts of the computational kind, or social machines , i.e. networks of people interacting over a large-scale digital infrastructure, including various automated and semi-automated components 1 . the main motivation in analyzing such systems is not to evaluate the risk of using a particular algorithm (e.g. a deep neural network, with its limitations on explainability), but to understand the biases and harms that arise from the interactions of people and algorithms, where the main source of complexity is in the interactions rather than in any specific algorithm.this will also help us understand how harm might occurs in a socio-technical system where there is no obvious automated component to blame, even in combination with other factors.an example of such a situation is the case of abuse in online forums, which disproportionately affects women and minorities. while the behavior of specific participants may be the central problem, the interactions between perpetrators and victims are mediated and shaped by a digital infrastructure, and the lack (or insufficiency) of safeguards within the sociotechnical system -including both the social and the technical level -can also be blamed for these problematic outcomes.an obvious question is how such an analysis might differ from studying the fairness properties of a more narrowly defined algorithmic system or component.working to elucidate this general question, in this paper we expose a case study where we analyse the fairness properties of wikipedia. wikipedia is an emblematic sts, with complex interaction processes governed by numerous rules and norms , with many automated components involved (including thousands of bots , responsible for 20% of edits ), but no prominent automated decisions that directly affect people as in the usual model of fairness in algorithmic systems.wikipedia being a well-studied system, many types of "bias" relevant to our problem have already been studied on wikipedia. we therefore proceed by first systematically reviewing studies of "bias" in wikipedia, identifying and categorizing the problematic phenomena discussed in the papers.we then analyze these bias concepts in relation to established notions of harm and fairness defined for algorithmic systems, and analyze the causal relationships between these phenomena.our systematic review covers 75 papers investigating "bias" in wikipedia, and our analysis addresses the following research questions:• (rq3) are there any clear causal relationships between these problematic phenomena? our main conclusions are that fairness in sts can meaningfully be analyzed using concepts and techniques developed for algorithmic systems, and secondly that the complexity of the causal relationships between different bias phenomena justifies having a holistic view of the sts, because interventions on any bias phenomena are likely to have consequences on multiple others.the rest of the paper is organized as follows: in section ii we present our systematic review of "bias" in wikipedia, identifying and categorizing the bias phenomena, then in section iii we analyze these phenomena in terms of harm and fairness, before analyzing the causal relationships between the phenomena in section iv. we draw our conclusions in section v.the rest of the paper is organized as follows: in section ii we present our systematic review of "bias" in wikipedia, identifying and categorizing the bias phenomena, then in section iii we analyze these phenomena in terms of harm and fairness, before analyzing the causal relationships between the phenomena in section iv. the papers that we group in this category describe bias as "cultural bias", "geographical bias", "language-specific bias", "local bias", "ingroup bias", the "colonization of wikipedia", "political bias", and other closely related phrases. regarding racial bias, only one paper was found explicitly reporting racial bias in wikipedia, describing the underrepresentation of non-white academics in wikipedia biographies. while we don't want to minimize the importance of the reported bias, it should be noted that the under-representation of the "global south" in wikipedia (which is also described as "geographical bias") also implies a racial bias, and therefore it makes sense to us to group these group-defining attributes together.b) deletion of biographies: a related but separate phenomenon is the disparate rate of deletion for biographies of men and women: for one of wikipedia's page deletion processes (article for deletion, afd), a study found that 25% of the biographies being deleted were women's biographies.2) under-representation: a) biographies: following the observation that wikipedia contributors are mostly from a small number of countries, beytíapoints out that 62% of wikipedia biographies are also those of people from the same five countries.in addition to the main social groups discussed previously, for which biases are commonly discussed in the context of algorithmic fairness, a small number of papers,,have discussed the risk of bias against the social group of anonymous editors, a group that is defined by their status with respect to a technical feature of wikipedia, namely registration. in the case of the individual whose wikipedia biography was targeted, we referred earlier to individual fairness: while fairness is the normative expectation, here the harm is a form of interpersonal harm where technology was being weaponized to target another person's reputation, and contributing the most damaging information to a wikipedia article can be considered malinformation, as the primary goal is to cause harm.considering that the ideal situation would be if all notable people has wikipedia biographies, and non-notable people did not, there is also a correct decision for each person (create the biography if the person is notable), which is not necessarily the one taken by the wikipedia contributors. in our context, if we consider the process of publishing wikipedia biographies, applying equalized odds as a reference fairness criterion means verifying whether notable women (or members of the considered group) obtain biographies at the same rate as notable men. secondly, we would conjecture that the underrepresentation of a group's interests in wikipedia would also affect the group's participation in wikipedia, by reinforcing the idea that wikipedia is not "meant for them". b) causes: the under-representation of women in wikipedia is related to three causes: one is the underparticipation of women: assuming that women are more likely to be more interested than men in writing about other women, it stands to reason that a smaller community of women contributors will create fewer biographies or women or mentions of women in other articles, as compared to the hypothetical situation where the pool of contributors was more balanced.finally, we note that another indirect causality relationship may exist between the under-participation of group members and their under-representation: the more active participants of wikipedia contribute to shape wikipedia norms and policies, including notability rules. however, one may expect that members of the affected group would want to correct this situation, by editing the biased articles in order to reduce their bias; in this sense, the lack of participation from a group is also likely to contribute to biased representations of the group.b) causes: according to de laat, this phenomenon occurs when editors patrol the new edits of wikipedia and misinterpret the interface of automated flagging tools: as they are aware the anonymous groups are more likely to vandalise articles or produce "bad" contributions (according to wikipedia policies), they pay extra attention to edits produced by anonymous users, rather than simply relying on the flagging software, which already considers the anonymity of users as a risk factor. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/812.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/812.txt new file mode 100644 index 0000000000000000000000000000000000000000..b8b63187379c14fab2b012d46c97073be58aa97e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/812.txt @@ -0,0 +1 @@ +socializing across ideological boundaries has been reported to be increasingly difficult . this phenomenon, known as affective polarization, has been widely observed in the united states. for instance, the fraction of republicans and democrats that would be unhappy if their child married a supporter of the opposing party has increased from 5% in 1960, to more than 30% in 2010 . this kind of polarization contributes to creating a harsh political climate, where citizens believe that other people's opinions are irrational and not based on facts . it has also been associated with the spread of harmful conspiracy theories, which can lead to dehumanization , culminating in threats such as the emergence of far-right domestic terrorism .the causes of affective polarization are widely debated, and some studies have pointed at social media as a decisive factor . in this popular view , social media-through recommendation algorithms, algorithmic filter bubbles, and self-sorting-would reinforce ideological separation, thus making it harder for an individual to engage with information from the opposite side. while several studies have observed ideological homophily and echo chambers in some social media , in others interactions between opposing sides are the norm and not the exception . some scholars have argued that online media has actually increased chances of engaging with the opposite ideological side , and internet usage explains only a very small share of polarization .an alternative explanation for the increase in affective polarization lies in the role of demographic factors . underlying material rifts and inequalities in society make gender, race, age, and wealth boundaries increasingly divisive, and also increasingly correlated to party affiliation and ideological standpoints. in their seminal paper, lazarsfeld et al. distinguished between status homophily, when similarity in social status fosters connections, and value homophily, when similarity in beliefs and ideas tends to do so. in this view, the existing status homophily within these groups would therefore be one of the main drivers behind value homophily and thus contribute to affective polarization .to assess these competing hypotheses, this study focuses on the interactions between different demographic groups in one of the main loci where opinions are formed and challenged: the discussion of news . in particular, the discussion of news on social media is of twofold interest. first, it allows studying how specific demographic groups are connected in a context where they cannot directly observe each other's demographic conditions: any measured effect is entirely due to differences in worldviews, interests, and thoughts expressed through writing. second, it allows us to measure and compare, at the same time and in the same context, the likelihood of interaction between opposing ideological sides. these reasons lead us to investigate reddit, one of the most visited websites in the u.s., a public forum widely used to read and discuss news. we focus on the r/news subreddit, the main reddit community specifically dedicated to discussion of current news, with a focus on u.s. internal affairs.therefore, our main research question is whether social interactions in online news discussions on reddit tend to be segregated by demographic boundaries, or by ideological echo chambers.to this aim, we first distinguish groups of users more likely to be part of a certain demographic group thanks to the method proposed by waller and anderson . then, we measure how much the amount of interactions observed between demographic groups differs from a carefully-crafted null model, where such attributes would not affect interactions. we replicate this analysis separately for five years, from 2016 to 2020.in all these experiments, we find similar effects. first, that ideological echo chambers are absent: left-wing and right-wing users interact more than expected by a null model of random interactions, while within-group interactions are less likely to happen. this result suggests that ideological echo chambers do not necessarily appear in a typical context of opinion formation online. second, interactions in news discussions on reddit are unlikely to cross demographic boundaries, e.g., affluent users are more likely to interact among themselves than with less-affluent users. in other words, we observe segregation along demographic boundaries, in the sense of "restriction of contacts between various groups" , in line with previous literature on social networks . finally, we demonstrate that demographic homophily is not simply driven by common interests, such as discussions on the same topics. while indeed different demographic groups have different interests in the various news topics, their segregation goes beyond this simple explanation. most importantly, this segregation happens without users being able to directly observe the demographic attributes, and without the mediating effects of, e.g., geographic segregation.our results, therefore, support the idea that not only echo chambers might not be an intrinsic characteristic of social media , but that the separation of demographic groups is observable during opinion formation online: fundamental societal divisions might be reflected also in social media.therefore, our main research question is whether social interactions in online news discussions on reddit tend to be segregated by demographic boundaries, or by ideological echo chambers. while indeed different demographic groups have different interests in the various news topics, their segregation goes beyond this simple explanation.our results, therefore, support the idea that not only echo chambers might not be an intrinsic characteristic of social media, but that the separation of demographic groups is observable during opinion formation online: fundamental societal divisions might be reflected also in social media.compared the presence of echo chambers around several controversial topics across four social media platforms (twitter, facebook, reddit, and gab), highlighting that facebook shows higher segregation of news consumption than reddit. the current work finds a similar result in a different context (news discussion), and at the same time finds evidence for demographic segregation on social media.to check whether the effects of socio-demographic attributes on interactions are simply a byproduct of a varying degree of interest in news topics, we need to classify posts in r/news according to their news topic. the second model, the socio-demographic model with topics (sd+t), investigates whether such relationships hold after controlling for different interest by each demographic group in each news topic., are interactions among users within the same demographic group more likely? rq3 do our findings on rq1 and rq2 hold after controlling for varying degrees of interests in news topics by different demographic groups? rq4 which news topic is each demographic group more interested in?. this fact indicates that the topic acts as a confounder and not as a mediator: while the homophily and heterophily effects are not caused solely by the topic of the post, interactions are nonetheless partially driven by the different interests of demographic groups in different topics.find that gender influences news retention about business news, both age and educational attainment affect political news, and educational attainment also had an effect on science news. it is reasonable to expect that also news consumption would be affected, and indeed, the business topic shows a marked distinction on the gender axis (with males more interested in the topic), and age has an effect on the interest in political news (older users are more interested). we see more mixed evidence for this pattern, with, for instance, entertainment news being more engaging for younger users, but without any effect for gender, and sports news with a similar effect for age but being more engaging for a male audience. for instance, demographic segmentation has been observed even in political advertising on social media, where different demographic groups witness different messages. social media users are self-sortingby demographic groups, whereby interactions across groups are less likely. in this work, we have quantitatively compared two competing hypotheses to explain affective polarization and found that demographic segregation is more prominent than ideological echo chambers on reddit news discussions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/813.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/813.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c3f6c1ee5f6679f33229d60d22f54eb91e91b2c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/813.txt @@ -0,0 +1 @@ +recent advances in natural language processing and computer vision have led to the development of text-to-image systems with unprecedented levels of realism and flexibility. at the same time, commentators have noted potential ethical issues related to the use of copyrighted artworks in the training sets, the generation of hateful and offensive content, as well as issues of bias and diversity in the model outputs. relating to the latter, research work has begun to audit the output of such models, investigating stereotypical associations between occupations and particular races and genders (cho, zala, and bansal 2022), as well as between the word "american" and lighter skin colours (wolfe and caliskan 2022).here, we take an alternative approach inspired by stereotype research in social psychology and focus on perceived traits of individuals. however, we approach the problem from the inverse direction of most psychological studies. rather than treating a demographic group (say, women) as the independent variable, and asking respondents for the associated traits (say, nurturing and emotional), here we use the trait as a prompt to the text-to-image system, and observe the demographic properties of the resulting image. so, to continue our example: if we ask the system for an image of an emotional person, will it return mostly pictures of women?we ground our investigation in the abc model of social cognition (koch et al. 2016). this model proposes three basic dimensions of social judgement; namely: agency (a), beliefs (b), and communion (c). these three dimensions can be further broken down into 16 polar traits. for example, agency comprises traits such as powerful vs. powerless and high-status vs. low-status, beliefs include traits such as conservative vs. liberal and religious vs. science-oriented, and communion includes sincere vs. dishonest and altruistic vs. egoistic. this model suggests that all our stereotypes of different groups can be specified in this 3-dimensional space: e.g., in the north american context, southerners may be stereotyped as laid-back, friendly, and religious (low agency, high communion, low beliefs1 ), while tech entrepreneurs may be stereotyped as wealthy, science-oriented, and greedy (high agency, high beliefs, low communion).clearly, these adjectives are under-specified with respect to a visual representation: what does a powerful person look like? what does a sincere person look like? it is precisely this under-specificity that can result in biased outputs, as the model must "fill in the blanks" with whatever cultural knowledge it has learned from the training data. however, as hutchinson, baldridge, and prabhakaran (2022) point out, "descriptions and depictions necessarily convey incomplete information about all but the most trivial scene." the model's approach to handling under-specification will therefore have varied and wide-ranging effects.thus, our research question is as follows: if we prompt the model to generate a person with particular social traits (as defined by the abc model), will the resulting images show the stereotypical demographic characteristics associated with those traits? we investigate this question using the 16 traits of the abc model and the demographic characteristics of skin colour, gender, and age, with three popular text-to-image models: dall-e 2, midjourney, and stable diffusion. we find that while not all traits generate stereotypical images, each model shows idiosyncratic biases along certain dimensions. we also observe intersectional biases, in particular a bias in all three systems associating the adjective of "poor" with darker-skinned males.thus, our research question is as follows: if we prompt the model to generate a person with particular social traits (as defined by the abc model), will the resulting images show the stereotypical demographic characteristics associated with those traits? we investigate this question using the 16 traits of the abc model and the demographic characteristics of skin colour, gender, and age, with three popular text-to-image models: dall-e 2, midjourney, and stable diffusion. most relevant to our work here are social groups defined by gender, age, or skin colour.• high-agency words will tend to generate images of people with lighter skin, older age, and male gender, while low-agency words will tend to generate images of people with darker skin, younger age, and female gender. • high-belief (progressive) words will tend to generate images of younger and lighter-skinned people, while lowbelief (conservative) words will tend to generate images of older and darker-skinned people. • high-communion words will tend to generate images of people with lighter skin, older age, and female gender, while low-communion words will tend to generate images of people with darker skin, younger age, and male gender. briefly, each demographic variable can receive one of four possible annotations (gender: male, female, gender neutral, or no gender information available; skin colour: darker, lighter, in-between, or no skin colour information available; age: older, younger, in-between, or no age information available).2awe observe a significant difference in the midjourney results for agency, with high-agency words more likely to generate images representing male gender, and low-agency words more likely to generate images representing female gender, as hypothesized. when we break these dimensions down by trait (shown in the appendix), this corresponds to more images of men generated for words like high-status and dominating (high-agency) and dishonest and unpleasant (low-communion), while more images of women were generated for adjectives like powerless, friendly, and likable. additionally, the system shows a significant difference in the dimensions of beliefs (progressive beliefs more associated with lighter skin) and communion (low communion more associated with lighter skin, specifically for the words dishonest and threatening).2g) shows a significant difference in agency and communion, with lowagency words and high-communion words more associated with images of younger people (recall, the first trend is in keeping with our hypotheses but the second is not). in particular, for all three systems, the adjective likable was highly associated with younger age, with its contrast adjective unpleasant ranging from moderately to highly associated with older age. it had a slight tendency to generate darker-skinned males for competitive (with images typically showing athletes), and a slight tendency to generate darker-skinned women for traditional (with images showing women in "traditional" dress from various cultures). unlike dall-e, there is a higher density of points in the lighter-skinned female quadrant, and unlike midjourney the points all tend to be associated with 'positive' adjectives: benevolent and sincere occur right along the 0 axis for skin colour, with powerful and likable associated with lighter-skinned women. overall, midjourney outputs a range of ages, but primarily lighter skin colours (points concentrated on the left-most quadrants), dall-e produces a range of skin colours but mostly younger faces (points concentrated in the bottom two ), midjourney shows a surprisingly linear negative trend, with some adjectives associated with older males, and others associated with younger females, but no traits associated primarily with older females, and only one trait (competitive) associated primarily with younger males.many of the significant differences in figure2did confirm our hypotheses: high-agency words generated more men than women, as did low-communion words, and in the case of midjourney, low-agency words were associated with younger age. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/814.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/814.txt new file mode 100644 index 0000000000000000000000000000000000000000..3f9199f72cccf21a89012a4a8fcd53cb3e033f55 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/814.txt @@ -0,0 +1 @@ +the openai-developed gpt (generative pre-trained transformer) model has a variation called chatgpt. the gpt model was initially released in 2018 and trained using the common crawl, a sizable dataset of text from the internet. the transformer design, revealed in a 2017 study by google researchers, served as the model's foundation. unsupervised learning was used to train the initial gpt model, which meant that it was trained on a sizable text dataset without any explicit labels or annotations. as a result, the model could pick up on various textual patterns and structures and produce new text with a similar tone and structure (wikipedia 2023).a private artificial intelligence research facility made up of the for-profit openai lp, and its parent organization, the nonprofit openai inc., chatgpt, is owned and developed by openai. it is also known as gpt-3 (generative pre-trained transformer 3). elon musk, sam altman, greg brockman, ilya sutskever, wojciech zaremba, and several others created openai in december 2015 to advance and advance benign ai in a way that benefits humankind as a whole. san francisco, california, usa, is home to the business (wikipedia 2023; "about openai" 2015).chatgpt, also known as gpt-3 (generative pre-trained transformer 3), is the latest version of a series of language models developed by openai.➢ gpt-3 (generative pre-trained transformer 3), also known as chatgpt: the latest and the most advanced version of the gpt series, gpt-3 was introduced in june 2020, has 175 billion parameters, making it one of the largest and most powerful language models to date.usage the api enables programmers, academics, and data scientists to incorporate the capabilities of gpt-3 into their software and systems, including ❖ chatbots and virtual assistants: chatgpt can build chatbots and virtual assistants that comprehend user input and respond conversationally and naturally. ❖ language translation: before the invention of language translation tools, libraries gave language translation services to the users, but now technologies are replacing translation jobs.❖ content creation: chatgpt can produce a wide range of content, including text summaries, complete articles, and natural language answers to questions.❖ researchers: researchers can use chatgpt to perform natural language understanding and generation tasks, such as text summarization and text completion. text classification and sentiment analysis: the text classification and sentiment analysis capabilities of chatgpt can be honed for usage in applications like social media monitoring and customer feedback analysis. there are several ethical concerns related to using ai in academic writing, specifically language models such as chatgpt.ii) plagiarism: using language models to generate text can make it easier for researchers to engage in plagiarism by presenting text generated by the model as their work.using language models to generate text can lead to decreased creativity and critical thinking among researchers, who may rely on the model to generate ideas and text. chatgpt-3 can also aid the writing process by generating text for academic papers such as research papers, essays, and dissertations. therefore, it is vital to use tools such as chatgpt-3 to aid research and writing rather than relying solely on them to generate ideas and text. additionally, some scientists may view the use of chatgpt in this way as an attempt to circumvent traditional methods of authorship and give undue credit to the technology rather than the human researchers who conducted the work. purpose of using chatgpt: the most common uses for chatgpt among the 71 respondents who used it include language correction (62%), sentence making (47. ai-based models, including chatgpt, may require significant human editing to produce high-quality text, and it is ultimately the responsibility of the researcher to ensure accuracy, coherence, and relevance. in the future, educational institutions may subscribe to tools like chatgpt and other academic tools such as grammar correction, paraphrasing, plagiarism checking, and data analysis tools. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/815.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/815.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a8918a4b3945e8a94fd2fdef37397dd6fc349d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/815.txt @@ -0,0 +1 @@ +regulatory affairs is a highly advanced discipline, with a decades long history, in the fields of pharmaceutical drug development and medical device (md) and in-vitro diagnostic (ivd) technologies. artificial intelligence (ai), in the form of machine learning (ml), is a comparatively new technology which is being trialed across multiple areas of both md/ivd and the pharmaceutical development pipeline.with every new technology come new potential products but also new work-practices which must be established. in the regulated fields of the human health industries this is likely to pose new and difficult problems. further complicating matters is the fact that ai engineers typically have little to no experience of working in a regulated environment. and, medical regulators have little understanding of ai/ml technology.there is a lack of a common understanding of the regulatory basis for products where ai is combined with medical devices and/or the development of medicinal products. this leads to a lack of compliance on the part of the ai engineers. which in turn, leads to prolonged approval times.in this article, we focus on the topic of validation. this is a key step, in developing a product for use, and in the underlying regulatory processes. we assigned ourselves the following question: how can validation be understood in a common manner across med-tech, pharmaceutical drug development, and ai product development? worse, the eu medical device directive (mdd) even uses the term validation following two different meanings within a single sentence: "for devices which incorporate software or which are medical software in themselves, the software must be validated according to the state of the art taking into account the principles of development lifecycle, risk management, validation and verification.broad validation includes narrow validation but encompases a much wider set of activities which ultimately end in narrow validation. in this case, the target of software validation is understood as a synonym for software quality assurance over the entire software development process. in other words, whereas the eu's mdd combines both broad and narrow validation, the fda guidance document uses the term software validation exclusively in the broader sense. also integration and software system tests, that test against given requirements, are verification activities, even though they clearly form part of software validation in the broader sense. it is important then to understand that software verification and software validation are only disjunct, as terms, when software validation is understood in the narrower sense. software quality assurance (qa), which encompases aspects of ml development, begins firmly in broad validation but also tapers to narrow validation later in the product life-cycle. whereas ml engineers validate on validation sets and test on test sets, the medical ml literature has largely undocumented approaches to learning parameter choice and refers to testing on the ml test set as validation.clearly then, the medical ml literature largely eschews broad validation and uses the term validation of a ml model exclusively in the sense of a narrow validation. both software qa and ml model construction best-practices, but are overly focused on the ml model construction and tend to not be involved in the achievement of, narrow validation, of entire product goals such as clinical benefit and safety. this can be interpreted as not covering narrow validation -similarly to the usfda's software guidelines -since clearly the descriptions in iec 62304 describe software validation in the broader sense. this includes validation in the narrower sense towards the intended use of the device using the ml model, and also validation in the broader sense as to whether engineering best-practices were applied.in this context, the goal of software validation in pharma today frequently covers the entire software development life-cycle and includes validation in the broader sense, which is the subject of gamp5 -gxp.once the software is being used to develop actual drug candidates, however, the 'regulatory' phase begins and the legal burdens, to demonstrate the goals of alcoa and gdrp, quickly lead to much higher requirements in terms of software validation. we adopt the terms, broad and narrow validation, and use them to align the validation processes across the industries of pharmaceutical r&d, manufacturing and medical device and in-vitro diagnostics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/816.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/816.txt new file mode 100644 index 0000000000000000000000000000000000000000..c574ceb3d20a0ad6b9474d738953e470702883ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/816.txt @@ -0,0 +1 @@ + the goals of our five-year program include: (1) developing a network of like minded individuals,(2)motivating students through empowerment,(3)providing experience with realistic and impactful problems through community engagement, and (4) learning to work in diverse teams. in the first iteration of the program, our twenty-four students from diverse science and engineering backgrounds tackled a number of social and environmental problems relevant to the community over the course of 4 months.the program aims to engage students from diverse backgrounds in real world projects utilizing experiential learning methods, in particular community engaged learning and design thinking.the first step of the interview process consisted of a team activity to test each student's ability to(1)work in a team, (2) overcome any conflict that would occur in that short time span, and (3) self organize the team decisions. students were broken into teams of 5-6 students and tasked with completing a project that addressed a hypothetical problem and constraints which would require students to demonstrate their skills in these three areas. not only did each project address a pressing social or environmental challenge affecting the broader community, but each community partner also committed to mentoring the students in project specific training such as dealing with brain injury patients and norms of engaging with vulnerable clients. through this process, each team ended up with diverse members in terms of gender, sexual orientation, ethnicity, academic standing and background such that most students had another team member which they could relate to on some level, and thus catalyze the formation of meaningful friendships and bonds. for some of the students, this was their first work-integrated learning experience and most of the students had never worked with a community partner before. therefore, while each project team had a diverse blend of experiences, skills, and perspectives, all team members had an equal opportunity to work on a project that they were deeply motivated to work on.• p1-problem definition: students were provided with interpersonal skill training like diversity, equity and inclusion (dei), professional conduct and communication with clients, conflict resolution, project specific training, design thinking as well as technical skills training like web frameworks, code repositories, and agile development. we further interviewed the community partners to understand their perspective of working with the diverse set of teams, the program and the product afterwards, we analyzed the data to extract the key lessons and statements that illustrated the student's journey and could be further utilized to improve the program.working on a community project with real clients and community problems poses an abundance of rewarding yet challenging experiences that students would not otherwise be exposed to until post-graduation employment. this program was the first exposure for many students to work with real clients, therefore we provided them with a number of training sessions covering soft skills such as communication with clients, team management, professional conduct, diversity, equity and inclusion (dei) and leadership.despite real clients being so important to the students' motivation and success, the community partner's vision for the solution collided with the students' skillsets.as a result of the continuous guidance on practicing dei, soon the students started leveraging their team diversity through efficient work distribution as a student in project 1 describes, "being on a diverse team has positively impacted the project development, in that we can all work on different areas. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/817.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/817.txt new file mode 100644 index 0000000000000000000000000000000000000000..40d6d09d77a01a8ca054a3d53b1f57f03e31824e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/817.txt @@ -0,0 +1 @@ +health information systems (his) are increasingly collecting routine care data . this source of real world data (rwd) bears great promises to improve the quality of care. on the one hand, the use of this data translate into direct benefits -primary uses-for the patient by serving as the cornerstone of the developing personalized medicine . they also bring indirect benefits -secondary uses-by accelerating and improving knowledge production: on pathologies , on the conditions of use of health products and technologies , on the measures of their safety , efficacy or usefulness in everyday practice . they can also be used to assess the organizational impact of health products and technologies .in recent years, health agencies in many countries have conducted extensive work to better support the generation and use of real-life data . study programs have been launched by regulatory agencies: the dar-win eu program by the european medicines agency and the real world evidence program by the food and drug administration . germany has found that its data collection systems are very heterogeneous, limiting the potential of health data. figure1illustrates for a clinical data warehouse, the three phases of data flow from the various sources that make up the his:. it focuses on: governance, transparency, types of data, data reuse, technical tools, documentation and data quality control processes. these services range from technical expertise in order to build up the data flows and clean them up to the delivery of a platform integrating the different stages of data processing. search engines can be used to query all the hospital's data gathered in the cdw, without data compartmentalization between different softwares.half of the cdws have put in place documentation accessible within the organization on data flows, the meaning and proper use of qualified data (10/21 mentioned).as the cdw becomes an essential component of data management in the hospital, the creation of an autonomous internal team dedicated to data architecture, process automation and data documentation should be encouraged. the first level allow to ensure the quality of data integration as well as the pertinence of data reuse by clinicians themselves. this new dimension of medical research requires a much greater development of data science skills to change the focus from the implementation of the statistical design to the data engineering process. however, there is still a lack of adoption, either of research standards from hospital cdws to conduct robust studies with multiple sites, or from ehr vendors to allow sufficient data interoperability for efficient data communication.the french cdw ecosystem is beginning to take shape, benefiting from an acceleration thanks to national funding, the multiplication of industrial players specializing in health data and the beginning of a supra-national reflection on the european health data space. a combination of public health, data engineering, data stewardship, statistics and it competences is a prerequisite for the success of the cdw.a common data model should be encouraged, with precise metadata allowing to map the integrated data, in order to qualify the uses to be developed today from the cdws. more broadly, open-source documentation of data flows and transformations performed for quality enhancement would require more incentives to unleash the potential for innovation for all health data reusers. combining city data and hospital data would provide a complete view of patient care. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/818.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/818.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a751a039056e0a9cea6e490620ccd57620de76b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/818.txt @@ -0,0 +1 @@ +during the covid-19 pandemic many universities, e.g., in germany, were forced to switch to online classes. moreover, most final exams were held online. in pre-pandemic times, computer-based final exams have already proven their worth, but with the difference that they were proctored in the classroom. during the pandemic this was mostly unfeasible and students had to take the exam from a location of their choice.there exists a wide range of supervisory measures for take-home exams. e.g., one could use a video conference software to monitor students. at many universities, however, this is legally prohibited due to data protection regulations. the exams are therefore conducted as open-book exams, i.e., students are allowed to use notes or textbooks. yet, students must not cooperate with each other. any form of cooperation or collusion is regarded as attempted cheating.to our knowledge, it exists no universallyapplicable method for proctoring take-home exams. it is therefore hardly feasible to stop students from illegally working together. however, one can attempt to identify colluding students post-exam. the attempt alone could have a deterring effect on students. research in this area, however, is scarce. present a method for comparing exam event logs to detect collusion. they use a simple distance measure for time series, i.e., the event logs of two different students, to quantify the similarity of these student's exams. building on this, we propose an alternative distance measure, as well as the use of hierarchical clustering algorithms, to detect groups of potentially colluding students. we find that our method succeeds in finding groups of students with near identical exams. furthermore, we present an approach to categorise student groups as "outstandingly similar", by providing a proctored comparison group.the remainder of this paper is organised as follows: section 2 provides a brief overview of related work. section 3.1 describes the available data. section 3.2 presents our method, including the calculation of the distance matrices. section 4 discusses the empirical results. section 5 concludes., the event logs of two different students, to quantify the similarity of these student's exams. their findings suggest that collusion took place when the final exam was not proctored.3we also removed twelve students from the test group who reported internet problems during the exam. although the lecture of the comparison group was held in presence and the lecture of the test group was held online, both groups shared the same content and learning goals.we adopt an exploratory approach for finding clusters of students with similar event patterns and points achieved during the exam. the average linkage method (here: unweighted pair group method with arithmetic mean) defines the distance between any two clusters as the average distance among all pairs of objects in said clusters. this general shape is typical for the underlying algorithm, as average linkage clustering combines the long form of single linkage clustering with the smaller, tighter clusters of complete linkage clustering. since our primary interest lies in the detection of clusters at low dissimilarities, instead of the general structure of the data, we exemplary investigate the six lowest clusters (a -f) in figure1. for our data of the test group, there is no indication of the existence of suspicious clusters of more than two students. we want to identify cases in the unproctored test group which are rather extreme compared to the proctored comparison group. the boxplots in figure5show the distributions of the global pairwise dissimilarity d(x i , x i ′ ) of all students in the comparison-and test group. the median value of the test group is significantly lower, indicating a lower average global pairwise dissimilarity in this group. we apply the above mentioned lower bound on the test group's distribution to identify groups of students which are "outstandingly similar". while it is no surprise that these clusters were detected, our approach still aids us in deciding on when to stop inspecting further groups of students, as their level of similarity might as well occur in the comparison group. we find pairs of students in the test group with values below the minimum of the comparison group. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/819.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/819.txt new file mode 100644 index 0000000000000000000000000000000000000000..d208b68b75cebf9c17d2dc396999dc23b0818103 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/819.txt @@ -0,0 +1 @@ +traditionally political parties have used manifestos to communicate the set of policies they announce they would implement if elected and promoted their political agendas through mass media. with the emergence of online advertising platforms, online ads have become one of the main communication channels for political campaigners. during the 2020 us election cycle, 18% of political marketing spending went to online advertising, compared to 3% during the 2016 election cycle . moreover, online advertising spending by parties increased from 24% to 43% of advertising budgets between the uk general elections of 2015 and 2017 .besides the low cost, the key appeal of online micro-targeted advertising for political campaigners comes from the fact that they can communicate a more diverse set of information (than traditional mass media), and they can target subgroups of voters with information that is relevant to them. however, many researchers and civil societies are firing alarms that targeting technologies are also allowing the emergence of an "industry of political influence" where political advertisers can select very narrow groups of vulnerable people and tweak their messages to maximize their influence .the cambridge analytica scandal and the russian's interference in the u.s. elections through online ads has shaken to their core both online platforms as well as governments around the world. to curb such risks, both online platforms and regulators have agreed that researchers, journalists, and the civil society need to be able to scrutinize online political ads. consequently, online platforms such as meta and google have implemented ad libraries that contain information about all political ads running on their platforms . better yet, ad libraries do not risk being discontinued. thanks to the work of the european commission in the digital services act, access to ad libraries will be mandatory starting in 2024 for online platforms and search engines with more than 45 million monthly eu users . this leads to new technical challenges: when faced with access to such large volumes of ad data (e.g., over 14m political ads in the u.s. and over 400k political ads in france on the meta ad library) it is impossible to go through these ads manually; hence, the new quest is to provide the public with methods and tools to assist in scrutinizing political ads.in this paper, we focus on methods for detecting policy-related political ads. there are a number of reasons why identifying policyrelated political ads is important: (i) political communication-makes it possible to identify how political candidates and parties represent themselves and on which policies they focus their attention; (ii) mandate accountability-check, once elected, whether elected officials respected the policy pledges they advertised during elections (accountability is central to democratic theory ); (iii) influence on deliberation-mandate theories assume that voters are rational and they decide for whom to vote based on a careful consideration of available information . in practice, the deliberation process is more complex and is often based on emotions, convictions, and experiences . policy-related ads are interesting in both "rational voter" and "emotional voter" models. micro-targeting of policy-related ads could lead to some users being overly exposed to ads about specific policy issues (e.g., immigration), which might trigger strong emotions. in contrast, other voters might not get sufficiently exposed to any policy-related ads, which could lead to information incompleteness.for detecting policy-related ads, from a methodological perspective, we first need to decide what policy issues we should focus on and what is the right granularity (e.g., is "economy" too broad and should we consider "climate change" as an independent policy category?) while it is tempting to decide on a set of reasonable categories to detect, to robustly analyze policy-related ads across elections and countries, we need to rely on a solid, comprehensive, and stable theoretical basis. luckily, two codebooks have been developed and polished by several groups of political scientists over several decades: the cap (comparative agenda project) codebook and the cmp (comparative manifesto project) codebook . the cap codebook contains 28 main policy categories and 200 subcategories; while cmp contains 56 categories. the cap codebook aims at capturing policy attention, and hence it aims at being comprehensive in the policy categories they propose . the cmp codebook aims at capturing political parties' ideological positions on a left-right scale, hence, focusing on ideological goals. in this paper, we use the cap codebook as the underlying theoretical basis seems more suitable in the context of political micro-targeted ads.for the analysis, we gathered more than 96k political ads from the meta's ad library that appeared between 1 jan and 14 june 2022 (sec. 2). to gather labeled data, two experts annotated 431 ads with the relevant cap categories. to complement this dataset, we used prolific and qualtrics to post assignments for annotating ads, and we gathered labels for 4 465 ads. we observe only fair agreement (kappa>0.3) between prolific users and experts. we show disagreement mainly happens on ads that are related to more than two policy categories (sec. 2.4), hence, disagreement is linked to the text complexity of real-world ads.we implemented several machine learning (ml) models to classify ads in the relevant cap categories based on both traditional supervised models and pre-trained language models based on bert (sec. 3) that exploit as training data from cap and annotations from prolific users. our best configuration is able to achieve a micro average f1 score of 0.60 over a balance test set (sec. 4). the accuracy varies drastically depending on the policy category and ranges from a 0.19 f1 score for "social policy" to a 0.78 f1 score for "environment". the differences are explained by the disagreement present in the training data and the labeling complexity of real-world ads.finally, to show the practical usefulness of the classifier we developed, we analyze how policy attention varied across candidates and different demographic groups during the 2022 french presidential election (sec. 5). overall, we see big variations in policy attention across demographic groups, with women over-targeted with ads about "health", young users (ages 13-24) over-targeted with ads about "law and crime" and users aged over 55 over-targeted with ads about "immigration". this kind of imbalance could reinforce gender and age stereotypes, and may deprive users from relevant information that might be important in their voting deliberation.through our study, we aim to provide a solid foundation for analyzing policy-related ads that combines knowledge from both political science and computer science research. the challenges in solving the problem are diverse, and go from having the right codebooks for labeling, to having the right strategies to get high quality labels, and understanding which nlp algorithms are the most suitable for supporting such nuanced classification. all our datasets and code can be found at https://www.lix.polytechnique.fr/~goga/datasets/ policy_ads_www23.html. during the french presidential elections, we have also developed a public web service to monitor the political ads send (https://elections2022.imag.fr). the code of the web service can be found at https://github.com/romaissalmh/elections2022. and over 400k political ads in france on the meta ad library) it is impossible to go through these ads manually; hence, the new quest is to provide the public with methods and tools to assist in scrutinizing political ads. there are a number of reasons why identifying policyrelated political ads is important: (i) political communication-makes it possible to identify how political candidates and parties represent themselves and on which policies they focus their attention; (ii) mandate accountability-check, once elected, whether elected officials respected the policy pledges they advertised during elections (accountability is central to democratic theory); (iii) influence on deliberation-mandate theories assume that voters are rational and they decide for whom to vote based on a careful consideration of available information. micro-targeting of policy-related ads could lead to some users being overly exposed to ads about specific policy issues (e., is "economy" too broad and should we consider "climate change" as an independent policy category?) while it is tempting to decide on a set of reasonable categories to detect, to robustly analyze policy-related ads across elections and countries, we need to rely on a solid, comprehensive, and stable theoretical basis. overall, we see big variations in policy attention across demographic groups, with women over-targeted with ads about "health", young users (ages 13-24) over-targeted with ads about "law and crime" and users aged over 55 over-targeted with ads about "immigration".to obtain labeled data, we hired human annotators to manually annotate political ads according to the 26 main cap policy categories (tab.while we took several steps to make the labeling task as easy as we could for workers, we still observe a lot of disagreement on the policy categories chosen by different workers: on 16% of the ads annotators did not agree on any policy category. false negatives seem to happen when experts label ads with multiple categories, while prolific workers label the ads with only a subset of categories. we observe that some policy categories such as environment, energy and cultural policy are well detected, whereas the accuracy is much lower for ads related to social policy. for our case study, the precision is more important than the recall-it is more important not to mislabel ads with the wrong policy category than to miss some ads that are related to a policy category. for this section, this is not problematic as our analysis compares policy attention in different demographic groups and across presidential candidates, and a low recall should count equally in all groups.1out of the 76 067 political ads, our model predicted at least one policy category for 59 718 ads.we analyze both policy attention in ads coming from the official accounts of presidential candidates and their corresponding political parties and ads that do not necessarily come from official accounts but mention a candidate's name. the paper does not provide any details on the annotation process and does not show the accuracy across different policy categories. overall, there have been several related works on analyzing political content, however, none of them provides the solid foundations we provide for analyzing policy-related ads that goes from having the right codebooks, investigating difficulties in annotation and understanding which language models configurations are most suitable for supporting such nuanced classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/82.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/82.txt new file mode 100644 index 0000000000000000000000000000000000000000..f65f237923b54c9bf85c9561bfec1433132104ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/82.txt @@ -0,0 +1 @@ +minimax regret bounds in bandit environments are a well studied problem and results typically are limited to a particular action set, namely the l 1 and l ∞ balls in r d . we include in this article that display that the methods introduced by lattimore and szepesvári in chapter 24 can indeed be generalized to a wide variety of action spaces, namely to any l p ball where p is in the range (1, ∞).minimax regret bounds in bandit environments are a well studied problem and results typically are limited to a particular action set, namely the l 1 and l ∞ balls in r d . we include in this article that display that the methods introduced bylattimore and szepesvári in chapter 24 can indeed be generalized to a wide variety of action spaces, namely to any l p ball where p is in the range (1, ∞). let x be the l p ball defined as. we chose θ ∈ {+∆, -∆} d and note that the regret, defined as, r n (θ), is.where the third equality follows from lemma a.1 and the last inequality follows from lemma a. the remainder of the proof follows the same idea as that presented in the proof of the unit ball in section 24.where the last inequality follows from the definition of τ i ., where the last inequality follows under the assumption d (2nc 2 )., where the last inequality follows from the definition of τ i and setting the value of ∆ as. this result also shows that the minimum eigen value of the design matrix and regret are fundamentally different quantitiesbanerjee et al.for example note that for l p balls for p > 2, the minimum eigen value can grow at a rate lower than ω( √ n, whereas, the minimax regert remains bounded as ω( √ n.we expect that similar results can hold for general convex bodies and not just for l p balls.where the inequality follows from lemma a.where the first inequality follows from holder's inequality and the second inequality follows from the hypothesis. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/820.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/820.txt new file mode 100644 index 0000000000000000000000000000000000000000..658aa24b8d1c68b5e864fa1708b099c61cfdecea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/820.txt @@ -0,0 +1 @@ +chatgpt 1 has sparked debate over the range of content it and other large language models (llms) like it can competently produce . popular discussion of chatgpt in the educational community has centered around the concern that it could pose an existential threat to traditional assessments, should the quality of its answers be sufficient enough to score highly on many assignments . to the extent that this is true, we hypothesize that chatgpt generated answers to problems, with work shown, could also be effective for learning, serving as "worked solution" hints in computer tutoring systems. this style of solution hinting in algebra has been shown to lead to learning gains among secondary students and mechanical turk workers in algebra tutoring systems.we investigate if chatgpt generated hints can be beneficial to algebra learning by conducting an online experiment with 77 participants from mechanical turk. in our 2 x 2 design, participants 1 https://chat.openai.com/chat are randomly assigned to the manual hint or chatgpt generated hint condition and randomly assigned to one of two algebra tutoring lessons with questions adopted from openstax elementary algebra and intermediate algebra textbooks 2 . we use a soon-to-be released tutoring system, called open adaptive tutor (oatutor), and its pre-made human authored hints based on this same content as the control and replace the human hints with chatgpt produced hints to serve as the experiment condition to answer the following research questions:• rq1: how often does chatgpt produce low quality hints?• rq2: do chatgpt hints produce learning gains?• rq3: how do chatgpt hints compare to human tutor hints in learning gain?while tutor authoring tools have improved the efficiency with which humans can transcribe tutoring content , the creative process of generating content is still labor intensive. should chat-gpt, or other llm-based hints be effective automatic hint generators, it would open the door to previously unrealized scaling of computer tutoring approaches in a multitude of domains and learning contexts. we make both the tutor code and all content involved in the experiment available for full reproducibility 3 of what we believe to be the first experiment evaluating llm-based hints for learning gains. to the extent that this is true, we hypothesize that chatgpt generated answers to problems, with work shown, could also be effective for learning, serving as "worked solution" hints in computer tutoring systems.we investigate if chatgpt generated hints can be beneficial to algebra learning by conducting an online experiment with 77 participants from mechanical turk.com/chat are randomly assigned to the manual hint or chatgpt generated hint condition and randomly assigned to one of two algebra tutoring lessons with questions adopted from openstax elementary algebra and intermediate algebra textbooks2. we use a soon-to-be released tutoring system, called open adaptive tutor (oatutor), and its pre-made human authored hints based on this same content as the control and replace the human hints with chatgpt produced hints to serve as the experiment condition to answer the following research questions:.• rq3: how do chatgpt hints compare to human tutor hints in learning gain?. we make both the tutor code and all content involved in the experiment available for full reproducibility3of what we believe to be the first experiment evaluating llm-based hints for learning gains. to prevent incorrect or potentially inappropriate hint content from making its way to study participants, we conducted a quality check of all chatgpt generated hints. learning gain results from the four experiment conditions are shown in table2, as well as statistics on average time spent in the lesson per participant, number of total hints requested across all participants in the condition, and average pre and post-test scores.results of our study producing algebra hints using chatgpt showed a 30% rejection rate of produced hints based on quality (rq1), suggesting that the technology still requires human supervision in its current form. our experiments, comparing learning gain differences between chat-gpt generated hints and manually generated hints, showed that all experiments produced learning gains; however, they were only statistically significant among the manual hint conditions (rq2). manual hints produced higher learning gains than chatgpt hints in both lessons and these differences were statistically significantly separable (rq3). a similar amount of time was spent between both control and experiment in the two lessons, indicating that while the number of hints requested in the control was much greater, because manual hints were unbounded in the number that could be authored, there was not a time savings by seeing fewer hints in the experiment conditions. worth considering, is if this result is indicative of the quality difference between machine and human created hints or if it reflects the difference in efficacy between worked solutions and the use of hints and scaffolds. future work could isolate this by comparing to manually generated worked solutions or having chatgpt generate scaffolding and several hints through prompt engineering. further isolation of the effect of the hints could be achieved by adding an immediate feedback-only condition, whereby students are not shown any hints but are told the correctness of their response. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/821.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/821.txt new file mode 100644 index 0000000000000000000000000000000000000000..37466f231b80c87042ec478fb367a4b70f08a494 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/821.txt @@ -0,0 +1 @@ +ending poverty is the common mission of mankind, which is listed as the first pivotal goal of the united nations' sustainable development 1 . to eradicate extreme poverty for all people everywhere, a fundamental and critical question is where those vulnerable populations are located. this question refers to general poverty reduction policy interventions to provide material assistance to the poor, such as "where should the next school or main road be?" as the basic socio-economic unit, the village is always seen as the cell of the social system ; so poverty identification at the village level has become the key to mapping poverty.the household surveys and population census are the standard ways to measure an area/individual's socioeconomic conditions, which provide policymakers with critical statistics for mapping out resource assignments . however, with the rapid socioeconomic and demographic changes, data collection at a higher frequency is required, which means substantial costs . besides, due to the diverse sources of income and the asymmetric information between the investigators and interviewees, the reliability and validity of survey data are doubted. the development of web infrastructure and modeling tools provide new opportunities and fresh approaches to identify poor villages. in recent years, combining geospatial information and machine learning technology has become ever increasing interest for research on poverty area identification . geospatial information, such as nighttime lights, day-time satellite imagery, and crowd-sourced map data, can assist in capturing poverty and socioeconomic conditions on a coarse scale . machine learning technology allows researchers to effectively and efficiently utilize geospatial information . however, these methods rely too much on obtaining quantifiable geospatial features while some information, such as nighttime light, is unfeasible to collect at the village level. besides, these methods pay much attention to geographical characteristics but ignore the relationship between villages that shows regional economic activities.in this study, we propose a novel method to identify poor villages based on the web infrastructure and its widely applied graph-based modeling methods . we build the village graph based on distances and analyze the poverty occurrence from the graph perspective. through field investigation, we collected village poverty labels in enshi prefecture, one of the poverty-prone cities in china, and obtained the geological location by web map services. we identify two factors in the village graph to model poverty occurrence. 1) village centrality in the graph, 2) village's distance homophily decay effect. based on the observation, we designed a graph-based model to identify poor villages. a global centrality2vec module to capture the centrality similarity between nodes. we reconstruct the edges based on different kinds of nodes' centrality measures and perform random-walk-based skip-gram training to obtain centrality-aware node features. a local graph distance convolution is designed to aggregate information from direct neighborhoods, where we model the homophily decay effect as the decayed edge weight based on distance. the collected data and code are open-sourced at https://github.com/yangliangwei/graph-poverty-identification. our contributions are summarized as follows: this question refers to general poverty reduction policy interventions to provide material assistance to the poor, such as "where should the next school or main road be?"as the basic socio-economic unit, the village is always seen as the cell of the social system; so poverty identification at the village level has become the key to mapping poverty. 1) village centrality in the graph, 2) village's distance homophily decay effect. a local graph distance convolution is designed to aggregate information from direct neighborhoods, where we model the homophily decay effect as the decayed edge weight based on distance.the village graph g village = {v, e} is built to represent the connections between villages, where. the clustering effect can be reflected by the node centrality measure on village graph g village . the "p poor" indicates the number of poor village neighborhood of the poor center village, "p non poor" represents the number of nonpoor villages in the neighborhood of the poor center village, etc. with the increase in distance, the poor village percentage in neighborhoods is decreasing for poor villages while it is increasing for non-poor villages. based on the analysis, we model the homophily decay effect into message passing and propose the graph distance convolution to aggregate local neighborhood information.4, it consists of a global centrality2vec module to capture the village's centrality similarity, and a local graph distance convolution module that aggregates neighborhood information.3 shows poor villages tend to have a smaller centrality in g village . to capture the surrounding structure topology as in struct2vec, we compute village centrality similarity based on the ordered centrality sequences of villages within 1-hop in g village . based on the collected sequences, we aim to learn village representation h ∈ r | v | * 𝑑 that can capture the centrality similarity, where 𝑣 𝑖 is represented as ℎ 𝑖 . for village 𝑣 𝑖 , it embeds different kinds of centrality similarity into one dense vector h 𝑖 , which is used as the input feature to the local graph distance convolution module. we construct village graph g village with a distance threshold 𝑑 = 5𝑘𝑚. by connecting villages as a graph based on geographic distance, we observe two key factors (centrality, homophily decay effect) for identification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/822.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/822.txt new file mode 100644 index 0000000000000000000000000000000000000000..0e1b67c9f7d9985f4e5c4340dba12b23a89ffa4c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/822.txt @@ -0,0 +1 @@ +recommendation systems-the technology at the heart of online platforms-are inextricably entwined with our everyday lives. from movies (e.g., netflix, hulu) to short blogs (e.g., tiktok, twitter, mastodon) and e-commerce (e.g., amazon), people turn to these recommendation systems to select entertainment, information, and products. for example, a recent study by gomez-uribe and hunt (2015) revealed that 80% of the approximately 160 million hours of video streamed on netflix were recommended by the service's recommendation system. in light of this, countless papers have been written exploring how recommendation algorithms learn about users' preferences, and how users interact with recommendations.from what we understand, users and recommendation systems exist in a feedback loop: recommendation systems supply a user with recommendations, and in response, the user decides whether and how to engage with this recommended content. from this engagement, the system receives information about the user's preferences in the forms of explicit feedback-e.g., likes, comments, shares, and implicit feedback-e.g., information about which content the user elected to engage with. the system, in turn, uses its inferences about the user's preferences to supply the user with future recommendations.many models treat the user as a passive participant in this feedback loop, assuming that users simply consume what they like without regard for their ability to influence the content they will see in the future. however, others posit that users may actively try to affect what the platform learns about their preferences in order to "curate" their feeds based on the content they want to see more of (simpson et al., 2022). if users truly engage in curation, this has significant implications for how platforms learn users' preferences, because users' behavior in response to recommendations may be strategic, rather than simply maximizing enjoyment of content. and if platforms misrepresent users' preferences, there might be grave dangers of unfairness arising if minorities have differential incentives to engage in curation. this motivates the following two questions:question 1: are everyday users of online platforms aware of this feedback loop when they are curating their feeds? and if yes, what types of actions (if any) do they take in response?question 2: can we provide theoretical insights on the harms that arise in recommendation to strategic users? and are there any interventions that can provably ameliorate these harms? in the first phase, rs commits to recommendation policy g mapping consumption frequencies q ∈ q := n d to content that is to be served in the future, x ∈ x :=, where d denotes the number of different content types. we model the cold start phase to do exactly this: a user of type θ ∈ θ arrives, they make a consumption plan a θ ∈ a := d about the type of content they want to consume, and ultimately consume said content with frequencies q ∼ π(a θ ), where π(•) is the consumption realization function. we show that when the user population is split into minority and majority users in the npe the minority users will overconsume intra-group preferred content type, even if they do not actually like the particular piece of content that is currently served to them. in the protocol, we denote by a θ ∈ a := d the consumption plan of all users with type θ, which specifies for each content type j ∈ the probability a j θ that a user of type θ engages with content type j when exposed to it.in words, u u (q, x, a; θ) corresponds to the utility that an agent of type θ obtains when they consume content at the frequencies specified by q ∈ q in the cold start phase after having made a consumption plan a, and then they get served content of type x ∈ x in recommendation phase. if u's consumption plan is such that a j ≤ θ j , u derives the highest utility (while generating the same consumption counts observable by the platform) by consuming all the content they like and no other content. in the poisson consumption model, if the best response g of the platform (2) serves at least two types of content, no agent is exactly indifferent between the served types of content, and all types are interior, θ j ∈ (0, 1) for all j = 1, 2, . as there are two different types of content that are served, there is a type of content x ∈ x such that the set of realized consumption profiles g -1 ({x}) is neither empty nor the full set of realized consumption, ∅ ⊂ g -1 ({x}) ⊂ q. we show that for mainstream content, the minority user utility is decreasing in their consumption of mainstream content, and for minority-preferred content it is increasing. we show that a solution without algorithmic recommendation leads to close to optimal welfare: the recommendation policy that just recommends mainstream content, and does not personalize, leads to no consumption distortions by any of the users, and leads to welfare of 1 -ε for each user, as opposed to 1 for all the users that are correctly classified in the npe that we considered.in recommendation system models, one way to communicate this to users would be to automatically trigger an incognito mode when a user watches some type of content, meaning that the consumption (or not) of this content won't lead to a change in the recommendation. we say that a recommendation system makes a credible commitment to ignore consumption of a content type if it restricts its maximization problem in equation (2) to only those g such that g(q j , q -j ) = g((q j ) , q -j ) (5). in fact, recommending minority-preferred content to (0, 0, 0) may not happen in an npe in which mainstream content is ignored, as can be seen as the recommendation system was indifferent in the case with consumption differences in mainstream content.if users consume content type j like content type j under (a, g), (a, g) yields higher cold start utility under the cold start κ that replaces j by j if j quality-dominates content j type j .3, the three types of content are not ordered in any of the three dimensions: all three types of content are incomparable in quality, and mainstream is less informative than the other two types of content. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/823.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/823.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d0406ecfee9ad3f712fd27bbb8ca0564438b376 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/823.txt @@ -0,0 +1 @@ +in recent years, many studies pointed out the presence of a significant and long-lasting gender gap in the field of computer science . the gender gap exists at all levels: university students, researchers, professors, public administration, and industry. among stem disciplines, computer science is the one with the highest gender unbalance, with the percentage of women enrolled in ict graduate programs usually ranging between 10 and 20 percent depending on the country.the shortage of female ict professionals also affects the job market and the sustainable development of our society. digital transformation is currently predominantly guided by men, and this does not guarantee the diversity of positions and ways of thinking that would be indispensable in such an important field. the lack of women also contributes to the severe shortage of skilled digital workers that is retarding many countries' development.next generation eu (also known as the recovery fund) includes digitization, innovation, competitiveness, culture, green revolution, and ecological transformation among the most funded objectives of national plans. however, the risk is that those benefiting from the funds made available by the european community are mainly male-dominated sectors (such are indeed digital, construction, agriculture, and transport), although the pandemic has had devastating effects on female employment in particular. therefore, the need for actions to reduce the gender gap in these areas is extremely urgent.in order to face this problem, since 2014 the european commission has been monitoring the digital progress in the member states by means of the digital economy and society index (desi 3 ), which analyzes four key areas: human capital, connectivity, integration of digital technology, and digital public services. the european commission also launched the digital skills and jobs platform. the lack of digital skills affects companies and public administrations at different levels: there are no experts who can develop cutting-edge technology and there are not enough employees which are able to use existing technology to improve efficiency. the desi also focus attention on the gender gap, considered one of the causes of the lack of digital workers: from the 2022 desi report emerges that only 19 percent of ict specialists are female.despite the fact that computer science is the area where there are the most job offerings and the highest salaries, girls still do not enroll in ict degree programs and do not pursue careers in computer science. to understand the reasons why girls are not enrolling in ict degree programs, we do not have to focus on the job market. in fact, the factor that drives girls away from computer science disciplines is the persistence of gender stereotypes and biases, which lead people to consider computer science-related jobs inappropriate for women. unlike other types of prejudices, the one relating to women's limited aptitude for information technology appears rooted and more difficult to weaken, because it tends to condition girls' interests and inclinations from an early age, leading to a rejection of information technology disciplines which is very difficult to break down in adolescence or adulthood.while boys, when choosing a course of study, are particularly attracted by the possibilities of career and economic progression, for girls social and innovation impact are a very relevant factor . studies conducted at ucla show that for boys the main attractor in choosing to study computer science is a passion for videogames, while girls like computer games less , . furthermore, recent studies demonstrate how a strong deterrent for girls is the stereotype of "nerd", as an antisocial individual with poor communication skills, thus antithetical to their aspirations . many researchers have also suggested that the media is largely responsible for this kind of stereotyping . it is no coincidence that artificial intelligence, whose fascinating applications have been widely covered by the media in more recent times, attracts girls instead . in corroboration, the bachelor's degree program in applied computer science and artificial intelligence started in 2020/2021 by the department of computer science at sapienza university of rome has already reached 35 percent of girls' enrollment.we may conclude that the gender gap in ict is not the consequence of difficulty for women in accessing job positions (although some obstacles remain to reach the top positions), but rather a cultural resistance of the girls themselves when choosing their course of study. compared to other fields, where women face objective difficulties in advancing their careers, in the case of computer science it is the discipline itself that does not attract school-age girls, as a consequence of deeply rooted stereotypes at the societal level.to mitigate this problem and redirect girls into the computer science field, many initiatives have been launched at the european and national levels. for example, in italy a portal to collect and connect the numerous national initiatives has been recently created 4 , while at european level is to mention the eugain project5 . most initiatives have been aimed at female high school students. some of these projects achieved some results on a small scale but failed to reverse the general trend. recent studies have pointed out that undermining the prejudices of girls with respect to computer science is very difficult in adolescence, suggesting that, to be effective, awareness programs on computer disciplines should be offered in pre-school or lower school age, that is the moment when gender biases take root in girls' mind . even if these studies show that the most effective way to proceed is to intervene at a younger age, the ongoing digital transformation and the absence of workers with adequate skills in computer science requires fast action. there is a need to keep acting on the high school age range, even if the bias is already deep-rooted in girls, by looking for effective compensatory measures to show girls the potential of the computer science field. the "greedy" strategy proposed in this paper is built on this extremely urgent problem. among stem disciplines, computer science is the one with the highest gender unbalance, with the percentage of women enrolled in ict graduate programs usually ranging between 10 and 20 percent depending on the country.in order to face this problem, since 2014 the european commission has been monitoring the digital progress in the member states by means of the digital economy and society index (desi3), which analyzes four key areas: human capital, connectivity, integration of digital technology, and digital public services.despite the fact that computer science is the area where there are the most job offerings and the highest salaries, girls still do not enroll in ict degree programs and do not pursue careers in computer science. in fact, the factor that drives girls away from computer science disciplines is the persistence of gender stereotypes and biases, which lead people to consider computer science-related jobs inappropriate for women. studies conducted at ucla show that for boys the main attractor in choosing to study computer science is a passion for videogames, while girls like computer games less,. in corroboration, the bachelor's degree program in applied computer science and artificial intelligence started in 2020/2021 by the department of computer science at sapienza university of rome has already reached 35 percent of girls' enrollment.we may conclude that the gender gap in ict is not the consequence of difficulty for women in accessing job positions (although some obstacles remain to reach the top positions), but rather a cultural resistance of the girls themselves when choosing their course of study. recent studies have pointed out that undermining the prejudices of girls with respect to computer science is very difficult in adolescence, suggesting that, to be effective, awareness programs on computer disciplines should be offered in pre-school or lower school age, that is the moment when gender biases take root in girls' mind. there is a need to keep acting on the high school age range, even if the bias is already deep-rooted in girls, by looking for effective compensatory measures to show girls the potential of the computer science field. a strategy conceived as a grid of vertical (hard) and horizontal (soft) skills, intertwining topics to which girls are traditionally sensitive -such as environmental sustainability, health, and creativity -with digital skills and soft skills that the public education system more rarely considers -such as team-working, public speaking, social networking, and competition -with visible consequences more for girls than for boys. to attract girls' interest in icts at a later stage of education, it is necessary to make them understand that computer science has significantly contributed to the advancement of other disciplines or subjects in which women traditionally show great passion and aptitude, such as medicine, environment and sustainability, art, and social sciences.the g4greta project intercepts many elements of the next generation euprogramme: digitization and innovation, the green revolution, and the gender gap, albeit on a small scale, such as the proposal of a pilot program on digital and environmental culture, aimed at female students of high schools in lazio, italy.-school teachers: they will be made aware of ict and green issues, and then independently -or with advice from the university -design similar educational projects for their students7; -high school students: they will acquire technical (horizontal) skills in the field of software design and green technologies, as well as vertical skills such as:.-students, doctoral students, and young researchers of the university: they have been involved in the tutoring of school students participants, increasing their awareness of gender issues, being encouraged to network with each other and with the participating students, strengthening their sense of community, and reducing the discomfort of working in predominantly male contexts; -university professors: they are involved in the design and implementation of the initiative, acting as lecturers, role models, and mentors, thus contributing to an important goal such as that of gender equality in ict. this is done by tracking a study cohort consisting of girls who participated in the project and comparing it with a control cohort consisting of girls with similar characteristics but who did not attend the program, in addition to designing a social engagement strategy to keep all the project participants connected and active. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/824.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/824.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4106738c099dbf40ee2e1c32f6d8eb55057dea1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/824.txt @@ -0,0 +1 @@ +autonomous systems, broadly meaning systems capable of determining, initiating and executing action in pursuit of a goal, are being used around the globe to increase the safety and efficiency of tasks, and to lower economic and environmental cost. "to continue to make progress and innovate, ensuring that these systems have been designed responsibly and robustly will be key to safeguarding trust." while the technology enabling operation of these systems in the australian maritime, air and land domains is rapidly advancing, the assurance and accreditation framework, which forms a necessary part of commercial operationalisation, is not keeping pace. some common generalised names include autonomous vessel (av), unmanned vessel (uv), maritime autonomous vehicle (mav), autonomous vehicle (uxv), and maritime autonomous surface ship (mass).for the purposes of discussion around land-based autonomous systems, the key terminology used to refer to these systems are unmanned ground vehicle (ugv), unmanned autonomous ground vehicle (uagv), autonomous ground vehicle (agv), autonomous vehicle, automated vehicle or unmanned vehicle. other land-based autonomous systems include bio-mimetic systems, platform agnostic systems, and autonomous systems that are without physical instantiation such as diagnostic ai, legal automation, µzero and other similar automated ml programs.there are many challenges associated with effectively regulating autonomous systems, including how to ensure trust in these systems, how to ensure the regulatory approach is the most appropriate option, how to adapt current systems safety approaches, and how to adapt current assurance and accreditation frameworks. the complex, interconnected nature of autonomous systems, including cybernetic systems, means that assurance as a concept must shift to account for the high levels of interdependency between core systems.an example of an industry-led collaboration is the maritime uk autonomous systems regulatory working group (masrwg), which published a code of practice for maritime autonomous ships in november 2017, and have provided an updated version each 12 months.whilst systems safety focuses purely on systems, safety management systems (smss) expand the concept beyond systems to organisations, people and processes.in australia there is a lack of established assurance frameworks to integrate autonomous systems into traditional systems, which means neither industry, testing facilities, nor regulators, have clear or consistent expectations or understanding of what assurance activities are required to demonstrate compliance with requirements to indicate safe operations. this lack of established standards or codes of practice for autonomous systems, and lack of sophisticated understanding within the surveying and inspection industry, and within regulators, represents a lost opportunity for the australian economy to efficiently gain the economic benefit of new technology and to enable local industry to design, test and commercialise novel autonomous systems. while neither the national law act nor the navigation act specifically refers to autonomous systems, the broad definition of "vessel" means autonomous systems are generally included."amsa is actively working to improve their regulatory approach to autonomous systems, for example by seeking more hands-on experience, putting in place a policy to guide decision making, establishing an autonomous vessel team to triage queries and applications, working on a package of legislative amendments that would enable more flexibility to better address emerging technology, and establishing a partnership with the trusted autonomous systems defence cooperative research centre (tas) to explore issues around assurance and trust in autonomous systems.there is a critical role to be played by third parties from industry, government, and academia who can work together to develop, test and publish a new assurance and accreditation framework for trusted autonomous systems, and provide recommendations for areas the regulators should focus on to ensure the benefits of autonomous systems can be realized, without compromising safety.in order to facilitate development and commercial operationalisation of autonomous systems, in some jurisdictions industry groups have collaborated to create guidelines and codes of practice, such as the maritime autonomous surface ships industry conduct principles & code of practice mentioned earlier. the centre team, drawn from the australian maritime safety authority, civil aviation safety authority, and the university of adelaide, have deep regulatory and technical expertise in autonomous systems, and bring a wealth of practical experience and strong stakeholder relationships to the project.there are opportunities for third party collaborations in australia, such as the assurance of autonomy activity mentioned above, as well as the australian association for unmanned system's new maritime working group, to improve the assurance and accreditation approach for autonomous systems, and accelerate an improved regulatory approach which will facilitate innovation without compromising safety. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/825.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/825.txt new file mode 100644 index 0000000000000000000000000000000000000000..24e39f3b2db56b335cc91125d577e3df1accb5d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/825.txt @@ -0,0 +1 @@ +in recent decades, the average daily distance traveled by the french population has increased considerably (from 5 km on average in the 1950s to 45 km on average in 2011 ), as has the number of personal cars (11,860 million cars in 1970 compared to 38,3 million in 2021 ). for example in toulouse, cars concentrate 74% of the distances traveled by the inhabitants and contribute up to 88% to ghg emissions . the evolution of mobility is therefore an essential question, in the context of the climate crisis but also in terms of public health: the negative impact of a sedentary lifestyle , road accidents, air pollution and sound pollution . indeed, 40000 deaths per year are attributable to exposure to fine particles (pm2. 5) and 7000 deaths per year attributable to exposure to nitrogen dioxide (no2), i.e. 7% and 1% of the total annual mortality ; this report also concludes that the 2-month lockdown of spring 2020 in france made it possible to avoid 2300 deaths by reducing exposure to particles, and 1200 more deaths by reducing exposure to nitrogen dioxide. this shows that public policies and individual behaviour changes (modal shift towards cycling, more extensive teleworking) can have an impact on public health. for instance during the covid-19 pandemics many temporary cycling lanes were set up . however, aside from such emergencies, such public policies take time to set up, and they are not always well accepted. as an illustration, many of these temporary cycling lanes were returned to cars after the end of the different lockdowns .indeed, despite feeling more and more concerned about climate change, citizens are often reluctant to constraining public policies that could slow it down. such public policies, including changes in infrastructures or taxes, are not easily accepted (e.g. strikes against petrol price rises or new tolls), in particular by those citizens who depend (or believe they do) on their car to commute. as a result, mobilities evolve very slowly, for instance in france a large proportion of commuting is still done by car, even for very short journeys . some reasons for this inertia are well-known, such as the lack of alternatives: some rural areas being very poorly served by public transports, while cycling facilities are concentrated in town centres; the cost of electric or newer cars is too high for many workers. another known reason is the difficulty of changing habits , that are followed routinely unless a life event resets them. other work show the influence of individualism on user acceptation of radical transition scenarios . yet another possible explanation of this resistance to mobility change is the influence of cognitive biases in human reasoning .cognitive biases are heuristics that facilitate reasoning in situation of uncertainty or danger . as any heuristics, they are often useful, but can also lead to mistakes with sometimes serious consequences . research shows that becoming aware of one's biases can help overcoming them in decisions ; they thus propose debiasing interventions such as playing a game or watching a video and show their positive impact on short and medium term (2 months). we suggest that interactive simulation could be used as another debiasing intervention; indeed it has been successfully used previously to explain various complex phenomena, for instance the mechanisms of the pandemic .we therefore propose to use interactive simulators to raise awareness about a number of cognitive biases and their influence on our mobility decisions in the face of climate change. this article describes three simulators designed to illustrate various cognitive biases at work in the resistance to the adoption of so-called soft mobility: habits, reactance bias, and halo bias. these simulators are based on a model of the population as autonomous agents, each agent choosing its mobility according to various criteria, and under the influence of various biases. each simulator is willingly kept relatively simple, focusing on the role of one particular bias, to facilitate the exploration of links between inputs and outputs. the idea is that people should be able to play with it alone, without needing the guidance or explanations of a supervisor, and still understand or learn something in the process.this work is part of a larger project aiming at simulating the transition of cities towards more sustainable mobility. various simulators and serious games have already been proposed in this context . all simulators described in this paper are already available to play online, but have not yet been evaluated. an online survey is being designed for that purpose, and workshops are also being planned in high schools to test the simulators. the paper is structured as follows. section 2 introduces useful background about cognitive biases. section 3, section 4 and section 5 describe our three simulators. finally, section 6 discusses limitations and prospects of this work, and section 7 concludes the paper.in this paper we propose three simple simulators focused on three different factors of resistance to mobility change: habits (section 3), reactance bias (section 4), and halo bias (section 5). susceptible agents might trigger the reactance bias based on a distance condition: when the agent is exposed to another opinion, it evaluates its distance with its own current opinion.• when the user selects the opinion delta triggering the bias, and the content of the message, two monitor boxes update in real time the resulting size of the targets: the positive target are those agents who can be persuaded, while the negative target is made up of those agents who will react reversely due to the reactance bias.rational multicriteria decision model the model is based on an existing model of rational multicriteria evaluation of modes of mobilityin which 4 modes of travel (car, bike, bus, walking) are evaluated on the basis of 6 criteria (time, cost, comfort, safety, ecology, praticity) and their priority for each individual. it is expected that rational agents will switch to a different mobility when they grow unhappy with their current mode, while biased agents will ignore the negative criterion in order to restore satisfaction without having to change mobility.the population is then initialised with a believable distribution of citizens on the 4 modes (50% cars, 20% bikes, 20% bus, 10% walk), and their initial priorities are set so that their rational choice in the current urban setting is indeed their chosen mode, but with some random variability so that not all users of each mode have exactly the same priorities. the triggering threshold is set to 15: this means that when the score of the current mobility mode on a given criterion (rated between 0 and 100%) falls at least 15 points below the priority of that criterion for the agent (also rated between 0 and 100%), this agent will activate the halo bias and start ignoring this criterion altogether. the other options are various histograms to visualise at a glance: values of each mode on each criteria in the current urban setting, showing which mobility is favoured; average priorities of each criteria among users of each mode, illustrating the different user profiles; counters of users of each mode putting each criteria in halo (and thus ignoring it), allowing to spot the "weak" criterias for each mode (discrepancy between value and priority).on the centre, the user can select a mode and a criteria in the dropdown menus, which will automatically display the up-to-date value of that mode on that criteria in the current town (then editable with the buttons), as well as its up-to-date priority in the population (average on all citizens). under the parameters, several additional monitors also display: the average mark for that mode among its users; its average mark among users of other modes; and the list of criteria put in halo by its users (details can be seen on the halo histogram on the left). these two marks can differ because users of different modes have different priorities (which can be visualised on the priority histogram), but also because of the halo bias activated by biased citizens when evaluating their own mobility mode.our agents can use two strategies to restore cognitive consonance between their mobility and their priorities: modifying their choice of mobility, or their priorities (putting a halo on the negative criteria). here agents who deal with a cognitive dissonance between their current mobility choice and their growing ecological priority might also feel that they have no control on the problem (choice of mobility), because they (believe that they) depend on their car (there might indeed be no alternative, or it would be too costly). they can all provide an explanation to the inertia observed in people's adaptation to climate change: people do not change mobility because their habits bypass rational evaluation; or they do not change mobility because the halo bias keeps them satisfied with their current mode; or they do not change mobility because they want to assert their free will to do as they please. to address this question of equity, in other work we are designing an interactive simulator where the user tries to modify the urban infrastructures dedicated to the different mobilities, in order to raise the modal part of soft mobility, but also while preserving a number of equity indicators, for instance accessibility computed as the percentage of citizens who do have an available mobility mode. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/826.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/826.txt new file mode 100644 index 0000000000000000000000000000000000000000..142f246d995542cc90efa4e0c7d077c80a900939 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/826.txt @@ -0,0 +1 @@ +large generative ai models (lgaims) are rapidly transforming the way we communicate, create, and work. their consequences are bound to affect all sectors of society, from business development to medicine, from education to research, and from coding to entertainment and the arts. lgaims harbor great potential, but also carry significant risk. today, they are relied upon by millions of users to generate human-level text (e.g., gpt-4, chatgpt, luminous, bard, bing), images (e.g., stable diffusion, dall•e 2), videos (e.g., synthesia), or audio (e.g., musiclm), while further alternatives are already in the pipeline . soon, they may be part of employment tools ranking and replying to job candidates, or of hospital administration systems drafting letters to patients based on case files. freeing up time for professionals to focus on substantive matters-for example, actual patient treatment-, such multi-modal decision engines may contribute to a more effective, and more just, allocation of resources. however, errors will be costly, and risks ranging from discrimination and privacy to disrespectful content need to be adequately addressed . already now, lgaims' unbridled capacities may be harnessed to take manipulation, fake news, and harmful speech to an entirely new level . as a result, the debate on how (not) to regulate lgaims is becoming increasingly intense .in this paper, we argue that regulation, and eu regulation in particular, is not only ill-prepared for the advent of this new generation of ai models, but also sets the wrong focus by quarreling mainly about direct regulation in the ai act at the expense of the, arguably, more pressing content moderation concerns under the digital services act (dsa). significantly, the eu is spearheading efforts to effectively regulate ai systems, with specific instruments (ai act, ai liability directive), software regulation (product liability directive) and acts addressed toward platforms, yet covering ai (dsa; digital markets act). besides, technology-neutral laws, such as non-discrimination law, and also data protection law, continue to apply to ai systems. as we shall see, it may be precisely their technology-agnostic features that make them better prepared to handle the risks of lgaims than the technology-specific ai regulation that has been enacted or is in preparation.ai regulation, in the eu and beyond, has primarily focused on conventional ai models, however, not on the new generation whose birth we are witnessing today. the paper will situate these new generative models in the current debate on trustworthy ai regulation, and ask what novel tools might be needed to tailor current and future law to their capabilities.inter alia, we suggest that the terminology and obligations in the ai act and other pertaining regulation be further differentiated to better capture the realities of the evolving ai value chain. some of these observations also apply to traditional ai systems; however, generative models are special in so far as they create output designed for communication or speech-and thus raise important and novel questions concerning the regulation of ai-enabled communication, which we analyze through the lens of the dsa and non-discrimination law.to do so, the paper proceeds in five steps. first, we cover technical foundations of lgaims, and typical scenarios of their use, to the extent that they are necessary for the ensuing legal discussion. second, we critique the eu ai act, which seeks to directly address risks by ai systems. the versions adopted by the council (art. 4a-c ai act1) and the european parliament (art. 28-28b ai act ep version2) contain provisions to explicitly regulate lgaims, even if their providers are based outside of the eu . these proposals, however, arguably fail to fully accommodate the capacities and broad applicability of lgaims, particularly concerning the obligation for an encompassing risk management system covering all possible high-risk purposes (art. 9 ai act; art. 28b(1)(a) ai act ep version) . precisely because lgaims are so versatile, detailing and mitigating every imaginable high-risk use seems both prohibitive and unnecessary. we also address more recent proposals debated in the european parliament according to whichlgaims should, at least generally, qualify as high-risk systems under annex iii ai act. we ultimately reject this proposal.lgaim risk regulation, in our view, should generally focus on deployed applications, not the pre-trained model . however, non-discrimination provisions may apply more broadly to the pre-trained model itself (more precisely:to its developers) to mitigate bias at its data source .third, we highlight key data protection risks under the gdpr, with a particular focus on model inversion .fourth, we turn to content moderation . recent experiments have shown that chatgpt, despite innate protections , may be harnessed to produce hate speech campaigns at scale, including the code needed for maximum proliferation . furthermore, the speed and syntactical accuracy of lgaims make them the perfect tool for the mass creation of highly polished, seemingly fact-loaded, yet deeply twisted fake news . in combination with the factual dismantling of content moderation on platforms such as twitter, a perfect storm is gathering for the next global election cycle. we show that the eu's prime instrument to combat harmful speech, the digital services act (dsa) , does not apply to lgaims, creating a dangerous regulatory loophole. the paper argues for three layers of obligations concerning lgaims (minimum standards for all lgaims; high-risk obligations for high-risk use cases; collaborations along the ai value chain; cf. now also art. 28 and 28b ai ep version) and makes four specific policy proposals to ensure that lgaims are trustworthy and deployed for the benefit of society at large: direct regulation of lgaim deployers and users, including (i) transparency and (ii) risk management; (iii) the application of non-discrimination provisions to lgaim developers; and (iv) specific content moderation rules forlgaims. we conclude with a brief assessment concerning the vice and virtue of technology-neutral regulation. due to space constraints, we cannot address all social and regulatory concerns regarding lgaims and have to bracket, for example, questions of ip law, power dynamics , a deeper exploration of the comparative advantages of technology-neutral and technology-specific regulation , or the use of lgaims in military contexts .in this paper, we argue that regulation, and eu regulation in particular, is not only ill-prepared for the advent of this new generation of ai models, but also sets the wrong focus by quarreling mainly about direct regulation in the ai act at the expense of the, arguably, more pressing content moderation concerns under the digital services act (dsa). significantly, the eu is spearheading efforts to effectively regulate ai systems, with specific instruments (ai act, ai liability directive), software regulation (product liability directive) and acts addressed toward platforms, yet covering ai (dsa; digital markets act). the general approach adopted by the council on december 6, 2022, defines gpais as systems "intended by the provider to perform generally applicable functions such as image and speech recognition, audio and video generation, pattern detection, question answering, translation and others; a general purpose ai system may be used in a plurality of contexts and be integrated in a plurality of other ai systems" (article 3(1b) ai act)., article 8 to 15 ai act) if they may be used as high-risk systems or as components thereof (article 4b(1)(1) and 4b(2) ai act). precisely because large ai models are so versatile, providers will generally not be able to avail themselves of the exception in article 4c(1) ai act: by excluding all high-risk uses, they would not act in good faith, as they would have to know that the system, once released, may and likely will be used for at least one high-risk application. while, according to its article 2(7), the ai act shall not apply to any (scientific, see recital 12b ai act) research and development activity regarding ai systems, this research exemption arguably does not apply anymore once the system is released into the wild, as any public release likely does not have scientific research and development as its "sole purpose" (recital 12b ai act), particularly when, as is often the case, a commercial partner enters to limit liability and provide necessary fine-tuning. this is in direct opposition to the spirit of recital 61 sentence 5 ai act which-in the context of standardization-explicitly calls for an appropriate involvement of smes to promote innovation and competitiveness in the field of ai within the union (see also article 40(2)(b) and article 53(1b)(a) ai act).the first layer will apply to the providers (=developers) of a subset of gpais denominated "foundation models" (article 28b(1)-(3) ai act ep version) and generative ai (article 28b(4) ai act ep version).,40,84], the ep version defines foundation models as an ai system "that is trained on broad data at scale, is designed for generality of output, and can be adapted to a wide range of distinctive tasks" (article 3(1c) ai act ep version) [cf. these requirements have to be tested for, documented, and verified by independent experts, article 28b(2)(c) ai act ep version. also, what is the relationship to the general risk assessment (articles 9 and 28b(1)(a))? a third layer of requirements relates to the ai value chain (article 28(2)(2) ai act ep version), in line with suggestions made below in this paper (see section 3.  deployer: this is the entity fine-tuning the model for a specific use case. article 2(8) ai act; the ai act ep version contains no general exemption, but excludes non-professionals from the definition of deployers, article 3(4)). to meet the ai act requirements concerning training data (article 10), documentation and record-keeping (articles 11 and 12), transparency and human oversight (articles 13 and 14), performance, robustness and cybersecurity (article 15), and to establish the comprehensive risk management system (article 9), any person responsible will need to have access to the developer's and deployer's data and expertise. as our study shows, technology-neutral laws sometimes fare better because technology-specific regulation (on platforms; ai systems) may be outdated before (ai act, ai liability regime) or at the moment of its enactment (dsa). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/827.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/827.txt new file mode 100644 index 0000000000000000000000000000000000000000..330715f68480814b5f161b1bdf9bfe34b6969b84 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/827.txt @@ -0,0 +1 @@ +the definition is apt but inadequate in providing a comprehensive analysis of the interdisciplinary nature of cybersecurity. the introduction of the concept of property rights shifted the discussion to human actors and expanded on the potential motivations of hackers. however, the machine perspective was not discussed, with the definition of inferring that only malicious persons are involved in the practice. however, presenting cybersecurity as a field of study, not an isolated abstract concept, is significant in promoting further educational discourse.the term "cybersecurity" is commonly used to refer to a set of circumstances or events related to improving the integrity of a given information management system or infrastructure and addressing present and emerging challenges associated with the exercise. a review of the literature by the national institute of standards and technology (nist) reveals that construct can be analysed by categorizing it into its constituent elements, including the internet of things, cloud, network, application, and critical infrastructure security(cybersecurity, critical infrastructure, 2018).data security is a core aspect of the distribution cognition theory detailing the increased utilization of artificial intelligence technologies to pattern human thought processes, thus, preventing, predicting, detecting, reporting, and resolving potential threats two digital systems and information technology infrastructure. the construct is integral to understanding the definition of cybersecurity in the contemporary environment as it discusses emerging best practices in the field and the continuous adoption of information technology as a best practice. "cybersecurity is a practice of preventing possible threats or malicious attacks seeking to unlawfully access data, damage information or disrupt digital processes. "cybersecurity is the process of defending data networks, electronic systems, mobile devices, servers, and computers from malicious attacks" gartner (what is cybersecurity?, n. "cybersecurity is a collection of best practices, including infrastructural changes critical in ensuring the integrity of a computer system., 2019).network security is a core domain of cybersecurity as it involves eliminating systems and abilities that potential attackers can exploit to disrupt organizational operations (what is network security?, n.)."cybersecurity is the collection and concerting of resources including personnel and infrastructure, structures, and processes to protect networks and cyberenabled computer systems from events that compromise the integrity and interfere with property rights, resulting in some extent of loss. defining the range of the circumstances is critical to ensuring that cybersecurity challenges are not only perpetrated by malicious actors as is commonly assumed but may also occur as a result of accidental exposure of sensitive information by users or destruction of the system by natural occurrences.cybersecurity is often defined as the practice of allocating resources to safeguard the integrity of systems from unauthorized entry or manipulation by malicious actors to ensure minimal disruption of organizational operations. cybersecurity is constituted of several core aspects, including critical infrastructure, applications, networks, the cloud, and the internet of things. application security is also a major aspect of cybersecurity in evolving end-user software used to manipulate computer systems and networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/828.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/828.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae23069c09eaf8a195b0c51e3160046d7cbbed0f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/828.txt @@ -0,0 +1 @@ +prior to collecting the tweets, we identified a set of keywords that are meant to represent the breadth and the different sides of the discourse surrounding abortion in the united states. we followed a three-pronged data collection strategy, discussed below. we began collecting tweets on june 25, 2022. in order to collect data prior to june 25, 2022, we leverage twitter academic api's full-archive search2 . this gives us data from january 1, 2022 to june 24, 2022. we gather tweets in real-time from june 25, 2022 to january 6, 2023. to ensure our dataset coverage, we use the count end point from the twitter api to compare the number of tweets in our collection versus the number of tweets identified on twitter. we use the full-archive search endpoint to recollect the date if the total number of tweets per day in our dataset is less than the number of tweets identified on twitter. our data coverage is from january 1, 2022 to january 6, 2023. data collection is ongoing at the time of writing this article. we restrict our collection to english tweets. in all, we collected over 74m tweets shared by roughly 10m users. studies published by gallup(saad 2010b,a)noted that views of democrats and republicans on abortion rights had grown increasingly polarized since 1975 and americans continued to be divided along pro-choice and pro-life lines.(fiorina, abrams et al. 2008)) suggest that the general public's policy preferences are mostly centrist toward wedge-issues including abortion, or argue that the extent of polarization in the us society is exaggerated(mouw and sobel 2001), other evidence suggests different patterns. slogans like "bans off our bodies" (pro-choice), "abortion is a right" (pro-choice), "equal rights for the unborn" (prolife) and "time to reverse roe" (pro-life), echoed on the streets and online, reflecting a deeply polarized populace. the advantage of social media datasets over surveybased measurements used in public opinion polls is the ability to feasibly assess issue positions of the larger public without biases inherent in survey self-reports, especially of contentious political issues (e. large-scale social media datasets have also made possible, the analysis of protest mobilization(breuer, landman, and farquhar 2015;steinert-threlkeld 2017;munn 2021), proliferation of misinformation(nikolov, flammini, and menczer 2020;rao et al. 2021), moral and emotional attitudes(guo et al.the bulk of this work focuses on twitter not only due to the relative ease of obtaining online behavioral data from the platform (relative to other platforms, such as facebook) but also because twitter is uniquely suited to analyzing discourse about political issues, given the platform's important role in american politics. 2019), and influence what journalists and media report(nelson and tandoc jr 2019). 2019), and are more likely to engage in political activities online and offline than the general public(bestvater et al.in this study, we collect a large-scale twitter data set consisting of discourse surrounding abortion rights in the united states and the recent supreme court verdict that overruled its 1973 roe v. the #prochoice hashtag on the other hand, is used more frequently by individuals who believe that it is a woman's choice whether or not to get an abortion and any regulation of abortion infringes their freedom.given that not all users on twitter use hashtags in their tweets, we create a list of keywords based on the aforementioned hashtags by simply removing # from the hashtags. while fig3shows that prochoice hashtags seemingly dwarf pro-life hashtags, it is worth noting that in fig2(a), #prolife appears more commonly than #prochoice in our dataset. 2022), and capital riots(kerchner and wrubel 2021). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/829.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/829.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a2b6faf2fb9008509b21d05dfb3796e49818393 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/829.txt @@ -0,0 +1 @@ +electronic health record (ehr) systems provide a secure, integrated collection of patient and population electronically-stored health information in a digital format (odekunle et al., 2017;kukafka et al., 2007;akanbi et al., 2012;adetoyi and raji, 2020;kavuma, 2019;kohli and tan, 2016); it provides a comprehensive digital view of a patient's health history with the goals of eliminating legibility problems with handwritten records; enabling remote access of health records; facilitating intervention earlier in the course of the disease, patient care, and outcomes; increasing efficiency and lowering costs; and ameliorating billing procedures (schmitt and wofford, 2002;erstad, 2003). the potential benefits of ehr systems have enabled its wide adoption in developed and some emerging countries (black et al., 2011).while most developed countries are taking advantage of ehrs to improve their healthcare system, it remains challenging in developing countries to support clinical decision-making and public health using a computerized patient healthcare information system. some developing countries including sub-saharan africa still predominantly use paper-based systems in healthcare delivery, instead of computerized patient management systems (odekunle et al., 2017;akanbi et al., 2012;adetoyi and raji, 2020;kavuma, 2019;kohli and tan, 2016). the lack of an ehr system may lead to issues in managing patient health data to improve the quality of patient care and safety through decision support to clinicians. for instance, patient p lives in city x, travels to city y in the same country and falls sick during her stay. since clinician c in y does not have more health data about patient p, (i) treatment options provided to p could cause some important problems involving past health issues and (ii) prescription drugs delivered to p could ignore her medical history. medication errors can result in a substantial economic burden on patients, side effects, and irreversible consequences; there is a huge spectrum of medication errors. some errors may be minors and others may lead to adverse events causing complications and higher mortality (bates and slight, 2014;forster et al., 2008). however, ehr systems can potentially reduce prescription errors and adverse drug interactions (chaudhry et al., 2006) and make available medical history data during emergency care (stiell et al., 2003). this data provides vital medical history details and gives more options to clinicians to decide which treatment best corresponds to the problem and when it should be administered. we pose the question: "how could we replace paperbased systems with ehr systems in the context of developing countries?". a study identified some factors hindering the widespread adoption of ehr systems in developing countries. the identified fac- tors include but are not limited to high cost of procurement and maintenance, poor electricity supply and internet connectivity (odekunle et al., 2017). this paper therefore proposes an ehr architecture that addresses the previously mentioned factors.we believe that the implementation of ehr systems in the style of industrialized countries may fail to function and provide solutions in the context of developing countries. to implement an ehr system in developing countries, besides the aforementioned issues, we also address the issues related to social inclusion, discrimination and socioeconomic status in healthcare. everyone qualifies for health monitoring regardless of personal income, or standard of living. we propose a straightforward architecture to implement an ehr system that fosters inclusion and provides solutions tailored to all social classes. the proposed architecture takes into consideration internet coverage, electricity, and infrastructure issues and foresees alternative solutions to skirt these issues. more interestingly, our architecture proposes an internetfree alternative (an offline solution) to allow medical transactions within hospitals and clinics and the storage of ehrs in geographically underserved and rural areas. note that the offline solution does not require relatively expensive terminals (such as computers, tablets, and smartphones) to establish connections between healthcare organizations. the motivation behind this solution is to bridge inequalities in healthcare and allow healthcare organizations with limited means to access ehr systems with any type of mobile phone that they possess. additionally, the proposed architecture foresees the utilization of artificial intelligence to enable better public health policy and surveillance in (i) moni-toring patterns suggesting disease outbreaks, (ii) predicting disease based on symptoms, medical records, and treatments over time, and (iii) providing drug recommendations.the rest of this paper is organized as follows. a brief outline of some related work is given in §2. section 3 describes the proposed architecture.we discuss the scope of the proposed architecture, challenges, and opportunities in §4. we describe ethical considerations in §5. finally, we conclude and present future directions in §6.electronic health record (ehr) systems provide a secure, integrated collection of patient and population electronically-stored health information in a digital format(odekunle et al.while most developed countries are taking advantage of ehrs to improve their healthcare system, it remains challenging in developing countries to support clinical decision-making and public health using a computerized patient healthcare information system., 2017;akanbi et al. the lack of an ehr system may lead to issues in managing patient health data to improve the quality of patient care and safety through decision support to clinicians. in order to manage patient data, many studies addressed the problem of the implementation and adoption of ehr systems in the context of developing countries (adetoyi and raji, 2020;odekunle et al., 2008;fraser et al. ussd is a communication protocol used by mobile devices to communicate with a network service provider.an ehr architecture using ussd could enable healthcare providers to access and manage patient data and interact with the healthcare system, using simple text-based commands sent via ussd, even in areas with limited internet connectivity (see figure2c).one of the advantages of the ussd-based ehr system (ues) over wes and mes is that it can be used on any type of mobile phone, even feature phones that do not have internet access; this means that it can potentially be accessed by a wider range of users, including those in rural or low-income areas where internet access may be limited.our architecture proposes ehr systems that respond to the limitations of (adetoyi and raji, 2020;kamadjeu et al., 2005;jawhari et al.beyond ehr data storage and manipulation, we can utilize artificial intelligence (ai) to analyze ehr data to enable public health policy and surveillance in a number of ways., 2020;wong et al. we show how this architecture fosters social inclusion and discuss how the use of ai, on data stemming from the proposed architecture, can help to improve the effectiveness of public health policy and surveillance efforts in developing countries. in the future, we would like to build ai models that use metadata-induced contrastive learning to (i) provide drug recommendations within an ehr system and (ii) learn patient representations from ehr data to predict dangerous cases of polypharmacy usage and discover sociodemographic biases in the outcomes of polypharmacy usage. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/83.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/83.txt new file mode 100644 index 0000000000000000000000000000000000000000..4cf34f6405ac9228782a666322f842b6e8b5df48 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/83.txt @@ -0,0 +1 @@ + oco can be interpreted as a sequential game between an optimizer and an adversary over t time steps: at each time step t = 1, . therefore, the oco framework we discussed above, where each loss function f t depends on the most recent decision x t only, fails to capture the effect of earlier decisions to the present.) -given the previous decision x t-1 , the gradient of the previously revealed loss f t-1 (x t-1 ), and a step size η > 0-and then projects x ′ t back to the feasible convex set x to output the final decision x t . , t , the online optimizer chooses a decision x t from a convex set x ; then, the adversary chooses a loss f t : x m+1 → r to penalize the optimizer's most recent m + 1 decisions., 0 ∈ x , and xy 2 ≤ d for all x ∈ x , y ∈ x . given f t : x m+1 → r, the unary loss function is the f t (x) f t (x, . the loss function f t : x m+1 → r is convex, i. , t , ofw chooses an x t , after which the learner suffers a loss f t (x t ) and evaluates the gradient ∇f t (x t ) (lines 3-4).x is the linearized loss of the unary loss function f t (x t ) over which each base-learner opti-algorithm 2 meta ofw algorithm (meta-ofw).• ℓ t,i g t (x t,i ) + λ x t,ix t-1,i 2 is a surrogate loss associated with the i-th base-learner b i -the metalearner collects ℓ t,i for all base-learners, i. , t , first a control action u t is chosen; then, a loss function c t : r dx × r du → r is revealed and the system suffers a loss c t (x t , u t ).observe the state x t+1 and calculate the noise w t = x t+1 -a t x t -b t u t ; 17: end for a (κ, γ)-strongly stable matrix which is calculated given a t and b t , and w τ = 0 for all τ < 0.substituting the update step x t = (1 -η)x t-1 + ηx ′ t-1 in algorithm 1, i. the dynamic regret of omd with switching cost can be bounded by following(zhao et al.where the first inequality holds by lemma 2, the second inequality holds by the non-negativity of the bregman divergence, and the last inequality holds due to d ψ (x, z) -d ψ (y, z) ≤ γ xy for any x, y, z ∈ x . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/830.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/830.txt new file mode 100644 index 0000000000000000000000000000000000000000..8157ff73f194ee2ce7de4281ff7182602e8984ff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/830.txt @@ -0,0 +1 @@ +data science is an emerging interdisciplinary domain whose responsibility agenda has largely been borrowed or reconfigured from other domains. most data scientists work in teams where typically data come from different fields (such as business, finance, healthcare). to access and work with the data, data scientists are involved in 'data prospecting' , a process requiring the data scientists to explicitly and implicitly agree to abide by the responsibility agenda of these domains. for example, working with biomedical, clinical, and other health-related data requires protecting the anonymity and confidentiality of individual patients by means of an informed consent and other principles and norms . in general, working with data and data analysis also requires engaging and defending the necessary trade-offs between individuals' privacy and public's interest in research results. even if some concerns about data sharing were raised in the past , arguably, the benefits of open science and data sharing have been widely recognized in various research domains . in data science projects however, once the data is available, some questions remain hard to answer such as: how data and algorithms will be made transparent? what are the social and ethical implications of the outcomes of the project? is there a procedure for dealing with unintended outcomes? we argue that these questions should be answered and documented during the project life cycle so that the work and the results persist beyond the timeline of the project itself.wil van der aalst was one of the first to write about responsible data science . he refers to data being the new oil (now a familiar metaphor) and the use of data science methods to create new forms of energy. he mentions that in order to resolve these "pollution" problems caused by systematic discrimination based on data, invasions of privacy, non-transparent life-changing decisions, and inaccurate conclusions, data scientists should develop technological solutions that ensure fairness, confidentiality, accuracy, and transparency, today known as the fact principles. since then, more attention has been paid to ethics, fairness, and equity in artificial intelligence, machine learning, and big data. machine learning approaches are generally developed as black boxes, and the source code for these models is rarely inspected in terms of fairness, accuracy, accountability, and transparency . popular understandings of data science and data analytics have been negatively influenced by revelations of companies collecting and using big data illegitimately. further, revelations of algorithmic bias have shown the need for data scientists to foresee in advance the possibility for misuse of the technologies they build. in this work we view responsibility as more than simply complying with legal regulations and ethical requirements; it also means that it is neither an incidental characteristic of a data science project nor an accidental byproduct. instead, we refer to responsibility 'by design' as a reflection on two important components of data science projects: 1) foresight and 2) infrastructural support.• foresight: achieving responsibility involves a level of intentionality in the planning and execution of a data science project. what are the potential pitfalls? are the planned benefits of a project? achieving this requires a level of ethical selfreflection by researchers and the involved organisations about their preparedness to make certain types of decision, as well as honest assessments of their ability to execute. this self-reflection also requires self-assessments about the completeness of one's understanding about the impact of technology. • infrastructural support "by design" also refers to the way responsibility is built into all aspects of a data science enterprise, from how people are trained to how research projects are budgeted and supported by infrastructures and organizations. achieving responsibility should not be placed solely on individuals, but should be woven into a range of everyday practices.to achieve responsibility by design, we propose the taps responsibility matrix (taps-rm), a framework for defining responsibility in data science projects. we identify four components of responsibility, namely transparency, privacy and confidentiality, accountability and societal values, and their scopes which are used as a basis for defining responsibility. the responsibility matrix proposed here is viewed as a generic framework for thinking about and identifying responsibility in data science organisations/projects, rather than prescribing specific normative constraints about what responsibility should look like in these organisations. a similar concept for project management and coordination was described by raci matrix , which outlines the roles and responsibilities of involved actors. our matrix, in contrast, focuses on broader social and ethical responsibilities within a project (i.e., elsi aspects). some of the responsibility aspects that we cover have also been described in other frameworks such as fair , datasheets for datasets or model cards , here we provide the definition of responsibility as explicit components relating to actors, objects, processes and impacts. the contribution of the work is twofold: a generic framework to be used for defining the responsibilities in a data science project and a framework where other established ethics and responsibility frameworks can be aligned, to provide a structured view and clarify the scope of the responsibilities in a project. we compare the responsibility matrix with fact, fair and datasheets for datasets as alternative frameworks for addressing responsibility in data driven projects. we show that the proposed matrix is a generic and applicable tool that can be used in data science projects.the remainder of this work is organised as follows: in section 2 we provide an overview of the state of the art works on open data-sharing and data-driven development approaches. in section 3 we provide an overview of the components and dimensions of the matrix, our definitions and how it all works together to define a responsibility by design approach. we exemplify our approach with a case study. in section 4 we follow up with a comparison of the matrix with well-known approaches to responsible data science and in section 5 and proceed with a discussion of the approach. finally, section 6 concludes with a summary of the main findings and future works. to access and work with the data, data scientists are involved in 'data prospecting', a process requiring the data scientists to explicitly and implicitly agree to abide by the responsibility agenda of these domains. in data science projects however, once the data is available, some questions remain hard to answer such as: how data and algorithms will be made transparent? what are the social and ethical implications of the outcomes of the project? is there a procedure for dealing with unintended outcomes? we argue that these questions should be answered and documented during the project life cycle so that the work and the results persist beyond the timeline of the project itself. popular understandings of data science and data analytics have been negatively influenced by revelations of companies collecting and using big data illegitimately.to achieve responsibility by design, we propose the taps responsibility matrix (taps-rm), a framework for defining responsibility in data science projects. the responsibility matrix proposed here is viewed as a generic framework for thinking about and identifying responsibility in data science organisations/projects, rather than prescribing specific normative constraints about what responsibility should look like in these organisations. yet, these three are representative of efforts to promote responsibility in data science: each was developed by experienced practitioners with extensive backgrounds in responsibility; each is mainly prescriptive but is not tied to enforceable penalties (except in the case of compliance with gdpr); none is reflective of all the variables that influence data science projects in academia and business.we started our matrix development with a small working group (10 people) which identified the main dimensions of responsible approaches to data science which uniformly "tile the space" of responsible data science by design. related to this matter are the criteria and processes for the assembling of teams to carry out specific data science activities for an organisation and, more specifically, the definition and mapping of roles and data responsibilities for all individuals and groups in the data science pipeline. for example: is data and its provenance available; is there a description of the biases in the data; what is the license attached to the data? who is accountable for the quality, availability and terms of use of the objects? • processes: this defines the processes that are in place for monitoring and ensuring an accurate, ethical and lawful functioning of the project (i. two questions (q3, "data science that ensures confidentiality-how to answer questions without revealing secrets?" and q4, "data science that provides transparency-how to clarify answers so that they become indisputable?") can be clearly mapped to respectively the privacy/confidentiality and the transparency component of our responsibility matrix.building responsibility matrices for data science projects is a way to build a responsibility agenda into data science work. this also gives data science a unique perspective on the way different communities behave towards data and thus the need to acknowledge community needs and values in the data science life cycle. as data science programs aimed at producing data scientists multiply around the world, it is necessary to think about a range of attributes and practices that data science possesses, which distinguishes itself from other fields. we believe it is time for data science to begin developing its own approaches to responsibility, and in particular using its expertise with data life cycles to promote responsibility within society. we conclude that the responsibility matrix can assist data scientists to evaluate data science activities in the responsibility context. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/831.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/831.txt new file mode 100644 index 0000000000000000000000000000000000000000..805789e91bf83ddba38b1d7858a89aa6739bb733 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/831.txt @@ -0,0 +1 @@ +published in 2017, the recommendations of the financial stability board's task force on climate-related financial disclosures (tcfd) have been described by the government of the united kingdom as "one of the most effective frameworks for companies to analyse, understand and ultimately disclose climate-related financial information"(department for business, energy & industrial strategy 2021).the tcfd recommendations, which have been formally endorsed by more than 4,000 companies worldwide to date, are a set of voluntary disclosure guidelines aimed at providing consistent climate-related information to investors and other key company stakeholders (friederich et al. 2021). compared to other reporting frameworks (e.g. carbon disclosure project, global reporting initiative), a particular focus of the tcfd recommendations is on disclosing information on the integration of climate-related risks into risk management processes, control structures and key aspects of business operations (beyene, ongena, and delis 2022). overall, the recommendations are structured around four broad disclosure categories (governance, strategy, risk management, metrics and targets) and 11 underlying recommended disclosures. for an overview of the tcfd recommendations, see figure 3 in the appendix.since reliable information on climate-related risk exposures is critical for making informed investment decisions and appropriately pricing risks, an increasing number of investors have been exerting pressure on companies to issue reports that include comprehensive climate-related financial disclosures (krueger, sautner, and starks 2020). in addition, several countries, including the uk and switzerland, have taken steps to make tcfd reporting mandatory for large companies in their jurisdictions. from a company perspective, the disclosure of climate-related information often signals awareness and preparedness for climate-related issues, while the absence of disclosure may, on the contrary, indicate that such issues are not being addressed by the company (sullivan and gouldson 2012;bingler et al. 2022). illustrating these arguments, jung, herbohn, and clarkson (2018) and subramaniam et al. (2015) show that firms are more likely to integrate risks associated with climate change into their overall risk management when they also disclose information about such risks.against this background, it is surprising to find that research on climate-related disclosures is very sparse. what is more, prior studies have shown rather contrasted results with regard to the ability of the tcfd guidelines in delivering high-quality and material information due to several reasons, including greenwashing as well as a lack of standardization, quantitative data and transparency. for example, bingler et al. (2022) investigated climate-related disclosures related to the four core disclosure pillars based on a sample of 818 tcfd-supporting firms from 2015 until 2020. the au-arxiv:2302.00326v1 1 feb 2023 thors came to the conclusion that climate-related reporting is associated with selective disclosure, suggesting that firms disclose climate-related information primarily on the least material tcfd disclosure categories, i.e. governance and risk management. ding, liu, and chang (2022) analyzed how carbon emissions affect voluntary climate-related disclosures based on tcfd. their results show that firms with higher levels of carbon emissions disclose more climaterelated information. in particular, they find that carbon emissions drive disclosure at the category level for the strategy, risk management and metrics and targets pillars, but not for the governance pillar. so far, these studies have left a detailed analysis of the recommended disclosures largely untouched. in particular, the existing literature focuses on the quantity of information at the broad tcfd category level, rather than on the content (mehra, louka, and zhang 2022). a deeper analysis is crucial, however, as the content and materiality of the disclosures vary according to the sector of the company concerned. in particular, the tcfd recommendations are primarily aimed at financial institutions. as climate change affects the credit risk of different types of assets and poses the risk of stranded assets, high exposure to climate change by financial institutions could also increase the risk of financial instability (beyene et al. 2021). thus, the tcfd recommendations place great emphasis on the disclosure of concentrations of carbon-related assets in the financial sector (fsb 2015). furthermore, the tcfd published supplemental guidance for the financial sector and encourages banks to provide the metrics used to assess the impact of climaterelated risks on their lending and other financial activities.in this paper, we attempt to address this gap in two important ways. first, we provide new insights into the state of disclosure regarding the underlying tcfd recommendations by analyzing a sample of 3.335 reports published by tcfd-supporting banks between 2010 and 2021. we identify tcfd supporters by retrieving all banks that have publicly declared their support to tcfd and are listed as official supporters on tcfd's website. we focus explicitly on banks for the reasons mentioned before. overall, we contribute to the literature on the use of natural language processing (nlp) in the context of climate-related financial disclosures by introducing the zero-shot text classification as a new method for systematic and automated extraction of textual information from large amounts of reports. in addition, we develop a set of fine-grained labels that allow us to leverage information on the tcfd recommendations beyond the four core pillars.the zero-shot classification returns label probabilities for each extracted text sequence. this can therefore be interpreted as the probability that the corresponding sequence matches the label, or in other words, the probability that the text sequence deals with the topic addressed by the label. we assume that higher label probability is a proxy for disclosure quality because higher label probability indicates that the semantics of the text sequences are likely to match the semantics of the labels at hand. thus, when text sequences precisely and explicitly address a topic expressed in a label, they are associated with a high label probability in the context of zero-shot text classification. to some extent, a higher label probability is also an indicator of how much is disclosed about a particular topic, since labeling is associated with a higher probability if a text sequence discusses the topic in question in detail.compared to other language models, the zero-shot approach has the crucial advantage of being able to classify sentences using labels for which it has received no prior training. a weakness of algorithms trained to automatically identify and classify climate-related content is that such models require an extensive training set of human-labeled sentences. in particular, manual labeling of sentences is not only time-consuming, but can also be error-prone. therefore, for quality and consistency reasons, highly-trained and specialized "labelers" are required, which can also make the labeling process costly. furthermore, the more classes (or categories of labels) to be included into the classification scheme of the model, the more labeled data is needed to ensure that each class comes with a reasonable amount of examples attached to it, which can be a limiting factor in some scenarios. as our method does not require any labeled training data, it also does not impose any restrictions on the number of classes, which allows us to perform a more detailed analysis of the underlying tcfd recommended disclosures. additionally, the tcfd recommendations are well-suited for the zero-shot analysis, as they provide us with an already-existing framework and semantics (ding, liu, and chang 2022). in total, we develop 14 finegrained labels designed to capture the most central aspects of the tcfd recommendations for banks using similar semantics.our paper yields the following sets of findings. first, we investigate the level of disclosure at category level. specifically, we examine the mean probabilities that the text sequences in our sample relate to one of the four core tcfd categories. we find that the mean probabilities relating to the general labels (i.e., governance, strategy, risk management, and metrics and targets without explicit mention of climate) remain stable over the sample period from 2010 to 2021, while we observe an increase in all of the probabilities for the climate-related labels at category level (i.e., climate-related governance, climate-related strategy, climate-related risk management and climate-related metrics and targets) over the same time period. in particular, we report that the disclosures pertaining to the labels "climaterelated strategy" and "climate-related metrics and targets" grew particularly dynamically, reaching mean probabilities of 22% and 20% respectively, in 2021. at first sight, this might suggest that tcfd-supporting banks are more likely to focus on the strategic relevance of climate-related risks to their business or the development of climate-related business strategies with corresponding targets.however, one way to more accurately assess the quality of tcfd recommendation disclosures and their corresponding implementation by companies might be to look not only at the broad tcfd categories, but rather to examine corporate reporting on recommended disclosures within each broad category. to the best of our knowledge, the existing literature on tcfd has so far only investigated reporting at the category level. since text sequences in corporate reports oftentimes correspond to several categories at once, the results from such analyses might lead to diverging conclusions. in a second step, we therefore analyze the mean probabilities associated with our fine-grained labels, which cover the underlying recommended disclosures. our results indicate that there is considerable variation in disclosures, including within each tcfd category. in the strategy area, which is the most comprehensive one and contains several specific recommended disclosures for banks, we find that label probabilities are lower for disclosures related to financing and investment activities for carbon-intensive industries such as the fossil fuel industry. similarly, tcfdsupporting bank appear less likely to explicitly address the use of climate-related scenario models in their disclosures.under metrics and targets, we find that the incorporation of climate-related performance metrics into remuneration policies is associated with a lower label probability compared to labels related to carbon footprints and emissions reduction targets. in the governance area, the tcfd-supporting banks appear to report to similar levels on the board's responsibility for overseeing climate-related issues and the management's role in assessing and managing climate-related issues. however, maximum values for board oversight are higher, indicating a higher quality of reporting in some reports on the role of the board in overseeing climate-related issues.the remainder of this paper is organized as follows. first, we present our data, followed by our methods and model performance evaluation. our results section is twofold. in the first part, we present the results of the zero-shot classification at the category level. in the second part, we analyze the results for the fine-grained labels covering the tcfd recommended disclosures. the results are summarized and discussed in the last section.published in 2017, the recommendations of the financial stability board's task force on climate-related financial disclosures (tcfd) have been described by the government of the united kingdom as "one of the most effective frameworks for companies to analyse, understand and ultimately disclose climate-related financial information"(department for business, energy & industrial strategy 2021). we assume that higher label probability is a proxy for disclosure quality because higher label probability indicates that the semantics of the text sequences are likely to match the semantics of the labels at hand., climate-related governance, climate-related strategy, climate-related risk management and climate-related metrics and targets) over the same time period. in particular, we report that the disclosures pertaining to the labels "climaterelated strategy" and "climate-related metrics and targets" grew particularly dynamically, reaching mean probabilities of 22% and 20% respectively, in 2021.under metrics and targets, we find that the incorporation of climate-related performance metrics into remuneration policies is associated with a lower label probability compared to labels related to carbon footprints and emissions reduction targets. the reports are classified according to the following categories: annual reports, cdp reports, corporate governance reports, integrated reports, remuneration reports, sustainability reports, and tcfd reports.table4presents the probabilities associated with the general labels "governance", "strategy", "risk management", and "metrics and targets" as well as the probabilities for our labels "climate-related governance" (go. looking at the mean probability of "climate-related metrics and targets", we find a comparatively more modest increase from 12% for 2010 to 20% for 2021, but still reaching a higher level than "climate-related governance" and "climate-related risk management" in 2021. altogether, these results suggest that reporting on climate-related issues increased after the launch of the tcfd recommendations and, in particular, that climate-related risk management reporting exhibited the largest increase, which is consistent withding, liu, and chang (2022).we assume that higher label probability is a proxy for disclosure quality because higher label probability indicates that the semantics of the text sequences are likely to match the semantics of the labels at hand. in the governance area, the tcfd-supporting banks appear to report figure1: climate-related disclosures by broad tcfd categories to similar levels on the board's responsibility for overseeing climate-related issues (go.1 are higher, implying higher reporting quality on the role of the board in overseeing climate-related issues.several aspects could explain these results: for example, the reports in our sample might address only to a limited extent issues linked to the use of scenario analyses or climaterelated physical risks since several of the tcfd-supporting banks might not yet have the tools and expertise to perform such analyses or identify such risks. (2021);bingler et al. (2022);ding, liu, and chang (2022)). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/832.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/832.txt new file mode 100644 index 0000000000000000000000000000000000000000..bde2ba60025396287d5c3ea307c2f7c0e4cc66e3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/832.txt @@ -0,0 +1 @@ +the continuity of human civilization and the prosperity of the race depends on our ability to cooperate. from evolutionary biology to social psychology and economics, cooperation in human populations has been regarded as a paradox and a challenge (fehr and fischbacher, 2003;pennisi, 2009;santos et al., 2021). cooperation issues vary in scale and are widespread in daily human life, ranging from assembly line operations in factories and scheduling of seminars to peace summits between significant powers, business development, and pandemic control (dafoe et al., 2020).although cooperation can benefit all parties, it might be costly. thus, the temptation to evade any cost (i.e., the free-riding) becomes a tempting strategy, which leads to cooperation collapsing, or the multi-person social dilemma (rapoport et al., 1965;xu et al., 2019). that is, "individually reasonable behavior leads to a situation in which everyone is worse off than they might have been otherwise" (kollock, 1998). just as cooperation widely exists in human social, economic, and political activities, most thorny problems we face, from the interpersonal to the international, are at their core social dilemmas. this article presents two cases in recent years closely related to the future economic and political decisions of countries, namely autonomous driving and carbon trading, and the role of social dilemmas in them.autonomous driving (av), which promises world-changing benefits by increasing traffic efficiency (van arem et al., 2006), reducing pollution (spieser et al., 2014), and eliminating up to 90% of traffic accidents (gao et al., 2014), is a very complex systems engineering. existing work mainly focuses on accomplishing generic tasks, such as following a planned path while obeying traffic rules. however, there are many driving scenarios in practice, most of which have social dilemmas. examples include lane changing (dafoe et al., 2020), meeting, parking (li, 2022), and even ethical aspects of aggressive versus conservative driving behavior choices (bonnefon et al., 2016). therefore, the practicality of av depends on the efficient solution to social dilemmas.carbon trading is a greenhouse gas emission right (emission reduction) transaction based on the united nations framework convention on climate change established by the kyoto protocol to promote the reduction of greenhouse gas emissions, using a market mechanism (grimeaud, 2001). carbon emission is a representative social dilemma in which countries' direct gas emissions for the sake of economic development undermine collective interests. the typical mechanisms in carbon trading, such as distribution of allowances (fullerton and metcalf, 2014), joint implementation (grimeaud, 2001), etc., have obvious correspondences with the boundaries (ibrahim et al., 2020a,b) and institutions (koster et al., 2020;lupu and precup, 2020) used to solve social dilemmas in economics and social psychology.the social dilemma has been comprehensively studied in economics, social psychology, and evolutionary biology in the past few decades. this paper focuses on the public good dilemma in the intertemporal social dilemma (isd). a public good is a resource from which all may benefit, regardless of whether they have helped provide the good (producer) (kollock, 1998). this is to say that public goods are non-excludable. as a result, there is the temptation to enjoy the good (consumer) without contributing to its creation or maintenance. those who do so are termed free-riders, and while it is individually rational to free-ride if all do so, the public good is not provided, and all are worse off.artificial intelligence (ai) advances pose increasing opportunities for ai research to promote human cooperation and enable new tools for facilitating cooperation (dafoe et al., 2020). recently, multi-agent reinforcement learning (marl) has been utilized as a powerful toolset to study human cooperative behavior with great success (lowe et al., 2017;silver et al., 2018;jaderberg et al., 2019;liao et al., 2020;li et al., 2022). we believe it is reasonable to use marl as a first step in exploring the use of ai tools to study multi-person social dilemmas. the current model for reinforcement learning suggests that reward maximization is sufficient to drive behavior that exhibits abilities studied in the human cooperation and social dilemmas, including "knowledge, learning, perception, social intelligence, language, generalization and imitation" (yang, 2021;silver et al., 2021;vamplew et al., 2022). the justification for this claim is deeply rooted in the von neumann morgenstern utility theory (von neumann and morgenstern, 2007), which is the basis for the well-known expected utility theory (schoemaker, 2013) and essentially states that it is safe to assume an intelligent entity will always make decisions according to the highest expected utility in any complex scenarios1 (yang, 2021).in marl, the critical issue of multi-person social dilemma can be formalized as an isd (leibo et al., 2017;hughes et al., 2018b), and most marl methods have introduced ideas from social psychology and economics more or less. these methods could be divided into three categories, strategic solutions, structural solutions, and motivational solutions, based on whether the solutions assume egoistic agents and whether the structure of the game can be changed (kollock, 1998) according to the taxonomy of social science.structural solutions reduce the difficulty of the original social dilemma by changing the game's rules or completely avoiding the occurrence of the social dilemma. the mechanisms introduced into marl mainly include boundaries and sanctions (ostrom, 1990). ibrahim et al. (2020a) indirectly sets boundaries for resources by introducing a shared periodic signal and a conditional policy based on this signal, allowing agents to access shared resources in a fixed order. ibrahim et al. (2020b) achieves resource boundarization by introducing a centralized government module through taxation and wealth redistribution.. koster et al. (2020); lupu and precup (2020) introduce a centralized module and use rules and learning methods to punish the free-riding agent separately. lio (yang et al., 2020) enables each agent to punish, thereby implementing the sanction mechanism in a decentralized manner. vinitsky et al. (2021) adopts a combination of centralized and decentralized modules and judges the decentralized sanctioning behavior of the agent through the centralized module, thereby encouraging appropriate sanctioning behaviors and avoiding unreasonable behaviors. furthermore, dong et al. (2021) introduces homophily into the marl to solve the second-order social dilemma caused by sanctions.strategic solutions assume that all individuals in the group are egoists and that the algorithm does not change the game's structure. such methods rely on an individual's ability to shape other individuals' payoffs, thereby directly influencing the behavior of others. direct and indirect reciprocity is the main mechanisms introduced into marl. eccles et al. (2019) introduces the classic direct reciprocity algorithm tit-for-tat (axelrod and hamilton, 1981) into the solution of isd. in order to realize the "imitation" at the core of tit-for-tat and the definition of the binary action (cooperate and defect) in isd, eccles et al. (2019) divides the agents into innovators and imitators and introduces the niceness function based on the deep advantage function. anastassacos et al. (2021) introduces two core concepts of indirect reciprocity, reputation and social norm (santos et al., 2021) into marl and uses them as fixed rules to construct the agent's action space.motivational solutions assume agents are not entirely egoistic and so give some attention (passively or actively) to the outcomes of their partners. one of the typical mechanisms is communication. across a wide variety of economics and social psychology studies, when individuals are given a chance to talk with each other, cooperation increases significantly (orbell et al., 1988(orbell et al., , 1990)). although there are many works (sheng et al., 2020;ahilan and dayan, 2021) on communication learning in marl, little attention has been paid to the role of communication in solving isd. pretorius et al. (2020) first uses empirical game-theoretic analysis (tuyls et al., 2018) to study existing communication learning methods in isd and to verify the effects of these methods experimentally. another typical mechanism is social value orientation. social value orientations (svos), or heterogeneous distributive preferences (batson, 2012;cooper and kagel, 2016;eckel and grossman, 1996;rushton et al., 1981;simon, 1993), are widely recognized in social psychology and economics as an effective mechanism for promoting the emergence of human cooperative behavior in different social dilemmas (mckee et al., 2020). the above three types of methods mainly make breakthroughs in methodology and are accompanied by simulation experiments to verify the correctness of the conclusions. considering the completeness of the theory and the feasibility of convergence analysis, this paper mainly focuses on solving intertemporal or public good social dilemmas based on social value orientations. the aforementioned mainstream conclusions about svo from social psychology and economics are mainly supported by interdependence theory (hansen, 1982). in social psychology and economics games, classical game theory does not accurately predict human behavior. this is because, in these human-involved games, each player does not rely on the given payoff matrix to make decisions but on their own "effective" payoff matrix (hansen, 1982;mckee et al., 2020). the effective payoff matrix is constructed by redistributing payoffs for the given payoff matrix based on the players' respective svos. as seen from figure 1, different svo will make players choose different dominant strategies when facing the prisoner's dilemma, thus affecting the emergence of cooperation. many different social value orientations are theoretically possible, but most work has concentrated on various linear combinations of individuals' concern for the rewards for themselves and their partners.inspired by the interdependence theory, many previous works have introduced the svo into marl to solve the isd (peysakhovich and lerer, 2018b;hughes et al., 2018a;zhang et al., 2019;wang et al., 2019;baker, 2020;gemp et al., 2022;yi et al., 2021;ivanov et al., 2021;schmid et al., 2021). peysakhovich and lerer (2018b) introduces the svo into marl for the first time and proposes the concept of prosocial, that is, cooperative orientation agents. the reward function of a prosocial agent is shaped as a fixed linear combination of its reward and the others. hughes et al. (2018a) introduces an inequity aversion model in isd, namely equality orientation, which promotes cooperation by minimizing the gap between one's return and that of other individuals. the latter work is no longer satisfied with a fixed linear combination and begins to introduce trainable weight parameters. baker (2020) first attempts to randomize the linear weights of one's and others' rewards to observe whether cooperative behavior emerges. since the linear weights are always greater than 0, all agents can be roughly classified into three categories: cooperative-oriented, altruistic-oriented, or individual-oriented. going a step further, d3c (gemp et al., 2022) optimizes the linear combination weights by using the ratio of the worst equilibrium to the optimal solution (price of anarchy, poa) that measures the quality of the equilibrium points. concurrent work ltos (yi et al., 2021) models the optimization problem of linearly transforming weights as a bi-level problem and uses an end-to-end approach to train weights and policies jointly. considering the noise or privacy issues that instantaneous rewards for svo modeling may introduce, some recent works shape the agents' reward in other ways. schmid et al. (2021) realizes the conditional linear combination of agent rewards by introducing the idea of the market economy. zhang et al. (2019) and ivanov et al. (2021) use state-value and action-value functions to implement svo modeling. wang et al. (2019) directly uses reward-to-go and reward-to-come, combined with evolutionary algorithms, to optimize the weights of nonlinear (mlp-based) combinations. however, these methods cannot stably and efficiently converge to mutual cooperation under complex isds, which are further verified in our numerical experiments in section 4.the conceptual diagram of our solution is shown in figure 2. specifically, we find that a typical mechanism of human society, i.e., division of labor or roles, can benefit from providing a promising solution for the isd combined with svos. the effectiveness of the division of labor in solving the isd has emerged in existing marl works but is still underexplored. the numerical results from sanction-based methods yang et al. (2020); vinitsky et al. (2021) on the typical isd task cleanup (hughes et al., 2018b) and allelopathic harvest (köster et al., 2020) show that policies solving isds effectively exhibit a clear division of labor (figure 3). many natural systems feature emergent division of labor, such as ants (gordon, 1996), bees (jeanson et al., 2005), and humans (butler, 2012). in these systems, the division of labor is closely related to the roles and is critical to labor efficiency. the division of labor, or the role theory, has been widely studied in sociology and economics (institute, 2013). a role is a comprehensive pattern of behavior, and agents with different roles will show different behaviors. thus the overall performance can be improved by learning from others' strengths (wang et al., 2020). these benefits inspired multi-agent system designers, who try to reduce the design complexity by decomposing the task and specializing agents with the same role to certain sub-tasks (wooldridge et al., 2000;omicini, 2000;padgham and winikoff, 2002;pavón and gómez-sanz, 2003;cossentino et al., 2005;zhu and zhou, 2008;spanoudakis and moraitis, 2010;deloach and garcia-ojeda, 2010;bonjean et al., 2014). however, roles and the associated responsibilities (or subtask-specific rewards sun et al. (2020)) are predefined using prior knowledge in this systems (lhaksmana et al., 2018). although pre-definition can be efficient in tasks with a clear structure, such as software engineering (bresciani et al., 2004), it hurts generalization and requires prior knowledge that may not be available in practice. to solve this problem, wilson et al. (2010) in the first phase, resvo transforms the learning of roles into a social value orientation emergence problem, which is symmetrically solved by endowing agents with altruism to learn to share rewards with other agents.drawing the insight from studies in social psychology that characteristics of laborers, or roles, influence the svos reciprocally(sutin and costa, 2010;holman and hughes, 2021), this paper uses the agent's svo to represent the role of each agent, transforming the role learning problem into the emergence of the agent's svo, thereby naturally constructing a role-based framework in marl to solve isd. because consumers altruistically share a part of their profits with producers and making reward data unrestrictedly accessible is undesirable for several reasons, the proposed resvo achieves svo emergence by endowing agents with altruism to learn to share rewards with different weights to other agents.as mentioned in section 1, to consider the fact that consumers altruistically share a part of their profits with producers and avoid the realistic constraint (making reward data unrestrictedly accessible) imposed by directly learning the svo of each agent according to the independence theory, resvo enables agents to learn to dynamically share the reward with other agents, as shown in figure4., 2020;lupu and precup, 2020;yang et al. each agent j learns a svo-based role conditioned policy π j (• | o j , e j (η)) parameterized by θ j , where e j (•) is the svo-based role embedding of agent j. to ensure that the emergent svo can effectively represent the different roles of agents, resvo introduces a novel rank constraint on the svo matrix w t η of all agents, and k can be regarded as the theoretical optimal number of roles.for the d3c algorithm, agent 2 is stably assigned the role of a lever-puller, but there is no stable division of labor between agent 1 and agent 3. in this task, similar to the 3-player escape room, there is also an apparent division of labor between the two agents under the optimal cooperative policy: one agent needs to clean up wastes (producer), and the other agent needs to collect apples (consumer). at the same time, to maintain a stable division of labor or roles, agent 1 as a consumer will continue to share rewards with agent 2 as a producer (figure9(a) and (d)), to achieve a larger average extrinsic reward, or the social welfare. in the static division of labor, the role of each agent is fixed while completing the task; on the contrary, the agent's role will change in the dynamic division of labor. combined with the results of figure10, it can be seen that in tasks involving more complex optimal division of labor patterns, the static division of labor learned by resvo can be more efficient than the dynamic division of labor in lio. in the ipd, 3-player escape room, and cleanup tasks of varying complexity involved in the experiments, we find that static division of labor exhibits better performance compared to dynamic division of labor in tasks where the same role corresponds to multiple agents, such as the 3-player escape room, and the 10-player cleanup, converging faster to better social welfare. for example, dynamic division of labor may be more robust in tasks that involve roles that change dynamically; furthermore, static division of labor may pose fairness issues because some agents receive lower extrinsic rewards than others.each agent j learns a svo-based role conditioned policy π j (• | o j , e j (η)) parameterized by θ j , where e j (•) is the svo-based role embedding, to maximize the objective max θj j policy (θ j , {e j }). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/833.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/833.txt new file mode 100644 index 0000000000000000000000000000000000000000..023f884a301c42be33d4f58090967a4575311e4c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/833.txt @@ -0,0 +1 @@ +with the proliferation of social media, smartphones, internet-of-things (iot) devices and low earth orbit (leo) satellites, we live in a world where location data (data with reference to a physical location) is ubiquitous. recent estimates suggest that location data make up over 80% of the data created on a daily basis. while location data can be and have been used for widely agreed on, positive applications -such as understanding the spread of infectious diseases -it can be easily misused. for example, misleading users of a navigation smartphone app about collecting and/or selling their data .it is well established that a user's location is indicative of the type of real-world, day-to-day activities, such as shopping and dining (hereafter referred to as "offline activities") they perform. research has shown that basic offline activities can be inferred from gps traces using both conventional statistical methods - and machine learning algorithms - . the same has been demonstrated using mobile phone data collected city-wide at the base station level , . other relevant sources of data, such as geotagged social media posts , , wifi signals , and ride-hailing app data have also been successfully used to infer offline activities. in short, there is an abundance of evidence that points to the correlation between a person's location and the offline activity they are engaged in.as with any type of location technology, opportunities exist for malicious targeting. for example, the same algorithm used in to help patients with alcohol use disorder recover can be exploited to target users of a smartphone app most vulnerable to alcoholism. such a risk is further amplified given the recent widespread availability of powerful machine learning algorithms that require little to no knowledge of statistics and/or algorithm design to configure and employ.in this paper, we attempt to answer the following question: how well can modern machine learning algorithms infer user's offline activity given their location data? to this end, we empirically evaluate the performance of 6 models trained to infer 9 basic offline activities using anonymized data collected over a period of 18 months from ≈15k users of a prominent location-based social network active in 6 major cities spread over 4 different continents.the findings we report in this paper not only fill an existing gap in the literature, but also highlight the potential risks of applying machine learning to location data in a time where powerful machine learning models are easily accessible. the following is a summary of our most interesting findings:• our experiments show that modern machine learning algorithms are well capable of inferring basic offline activities with the best performing model achieving an average macro-f1 score of over 0.9. • we also found that "nightlife" is the most and "at home" is the least challenging activities to infer on average. • finally, we found that tabular models that require minimal machine learning knowledge to configure and limited resources to run not only excel at the task at hand but could also outperform sophisticated models trained endto-end. the remainder of this paper is organized as follows. previous relevant works are briefly overviewed in section ii. the methodology we follow to infer offline activities from location data is described in section iii. the results of our extensive empirical analysis are given in section iv. and finally the paper is summarized in section v.it is well established that a user's location is indicative of the type of real-world, day-to-day activities, such as shopping and dining (hereafter referred to as "offline activities") they perform. other relevant sources of data, such as geotagged social media posts,, wifi signals, and ride-hailing app datahave also been successfully used to infer offline activities.in this paper, we attempt to answer the following question: how well can modern machine learning algorithms infer user's offline activity given their location data? to this end, we empirically evaluate the performance of 6 models trained to infer 9 basic offline activities using anonymized data collected over a period of 18 months from ≈15k users of a prominent location-based social network active in 6 major cities spread over 4 different continents.the findings we report in this paper not only fill an existing gap in the literature, but also highlight the potential risks of applying machine learning to location data in a time where powerful machine learning models are easily accessible. the methodology we follow to infer offline activities from location data is described in section iii. in, random forests are used to infer the purpose of trips (high-level offline activities) of gps traces of 156 subjects collected over a one week period around zurich, switzerland. in, conventional machine learning algorithms are used to infer 8 offline activities from cdr data collected in barcelona and madrid.a third subset of the literature,use social checkin data to infer user's offline activities at the point of interest (poi) level. in, crfs combined with unsupervised clustering is used to infer 7 offline activities from dianping 1 check-in data of 83 users collected in beijing, china. similarlyuses non-zero matrix factorization to infer 9 offline activities from foursquare 2 check-in data of ≈2000 users collected in tokyo and new york city. for example,infers 8 offline activities from wifi traces of 13 subjects moving around a university campus,,infer offline activities from microblogs, andinfers 13 high-level activities from ride-hailing app data collected around the city of toronto, canada. finally, it is worth mentioning that works, such as-that "predict" future offline activities using data similar to ours are beyond the scope of this survey as we are interested in inferring the user's current rather than future activities.in this section we explain the methodology we follow to infer offline activities from location data. category wise on the other while the above results indicate that location data play an essential role in the model's performance what is more interesting is the observation that even when features are extracted using the lowest resolution grid the model is still able to correctly infer the user activity up to 36% of the time.in this paper, we attempted to answer the following question: how well can modern machine learning algorithms infer offline activities from location data? to this end, we empirically evaluated the performance of 6 models trained to infer 9 basic offline activities using anonymized data collected from ≈15k foursquare users active in 6 major cities spread across 4 continents. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/834.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/834.txt new file mode 100644 index 0000000000000000000000000000000000000000..fef0526cd801565b20c74f509c2a561dbb900546 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/834.txt @@ -0,0 +1 @@ +while artificial intelligence (ai) technologies are progressing fast, compliance costs have become a huge financial burden for ai startups, which are already constrained on research & development budgets. this situation creates a compliance trap, as many ai startups are not financially prepared to cope with a broad spectrum of regulatory requirements. particularly, the complex and varying regulatory processes across the globe subtly give advantages to wellestablished and resourceful technology firms over resource-constrained ai startups . the continuation of this trend may phase out the majority of ai startups and lead to giant technology firms' monopolies of ai technologies. to demonstrate the reality of the compliance trap, from a field deployment perspective, we delve into the details of compliance costs of ai commercial operations.while artificial intelligence (ai) technologies are progressing fast, compliance costs have become a huge financial burden for ai startups, which are already constrained on research & development budgets. to demonstrate the reality of the compliance trap, from a field deployment perspective, we delve into the details of compliance costs of ai commercial operations. based on the oecd regulatory compliance cost assessment guidance, we quantitatively compare the financial vulnerability of tech giants versus ai startups. the actual situation of ai startups is more complex than this estimation, as it is nearly impossible for external analysts to estimate the compliance costs. for instance, in an impact assessment report of the europe ai act, the estimated annual compliance cost of one ai product that averagely costs eur 170,000 to develop is eur 29,277, we believe this study has underestimated the actual costs of ai compliance.ai is a highly regulated industry, but unfortunately, there is no standardized ai regulation framework, and hence compliance costs often become a financial trap for ai startups. most ai entrepreneurs may not even be aware of the existence of compliance costs, let alone the severe impact compliance costs may have on the company's overall financial health.figure1: the compliance trap for ai startups first, unlike r&d budgeting, due to varying ai regulatory frameworks across the globe or even across multiple regions within a country, there is no standard method to budget for ai compliance costs.second, even with an ai compliance budget, the actual costs may significantly deviate from the budget.in this section, with more than six years of first-hand experience in deploying commercial autonomous driving services, we delve into the details of compliance costs from a field deployment perspective, in the hope that the insights we provide can raise awareness of the adverse impact of the lack of standardized ai regulations.in the case of perceptin, the compliance cost of one deployment project is $ 344,000 on average, whereas the average r&d cost is around $150,000, making the compliance costs 2.however, before a consensus can be reached regarding the golden standard, a new business model, compliance-as-a-service (caas), can specialize in dealing with varying ai regulatory frameworks and thus amortize compliance costs across different ai startups. unfortunately, the lack of standardized ai regulatory frameworks creates a compliance trap that may destroy an ai startup financially, which could lead to a more profound impact of creating a competitive advantage for tech giants over ai startups. ideally, if a global golden standard on ai regulation could be developed, then ai startups could accurately budget for compliance costs. however, before a consensus can be reached regarding the golden standard, we believe that a new business model, compliance as a service, can specialize in dealing with varying ai regulatory frameworks and thus amortize compliance costs across different ai startups. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/835.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/835.txt new file mode 100644 index 0000000000000000000000000000000000000000..500879e0482d3958efc15b925f292339cb9a546e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/835.txt @@ -0,0 +1 @@ +it is well-established that the field of computer science (cs) struggles with pervasive issues related to diversity, equity, and inclusion (dei). at all levels, the field of cs persistently under-represents many groups including women, black and indigenous people of color, queer people, and disabled people . underlying this lack of representation are more complex issues -ranging from field-specific cultural issues to social issues like systemic racism, sexism, and ableism -that make it difficult, unwelcome, and sometimes even unsafe for people outside our field's dominant social groups to pursue a career in cs . on top of these barriers, phd students face high rates of mental health issues due to academia's "dark patterns" , along with "stress about productivity and self-doubt,...feeling devalued, advisor relationships,... difficulties with work-life balance, and feelings of isolation and loneliness" . despite these struggles, phd students often do not seek help . while many of these issues affect students of all identities, we consider them dei issues because they place disproportionate burdens on community members from marginalized and underrepresented groups .as phd students in carnegie mellon's department of cs (csd), we experienced and witnessed many such dei issues. based on our experiences, a survey of our peers' experiences, and existing research, we hypothesized that these issues could be mitigated by an introductory dei course for first-year phd students. in particular, we envisioned a course engineered to (a) be more comprehensive and self-directed than a standard dei training, (b) be well-received as a required course for all students, and (c) connect core dei topics to the cs phd context. while there exist many dei education programs across industry and academia, none satisfied these criteria: some are measurably effective but designed to be optional (e.g., ); others are required but can be one-size-fits-all or focused on compliance, limiting their potential to create lasting knowledge retention or cultural change ; and the few programs that are both required and more in-depth are not tailored to the cs phd experience (e.g., ).in light of this gap, we developed our own open-access 1 dei curriculum, titled cs-jedi: justice, equity, diversity, and inclusion in computer science. cs-jedi is a 6-week introductory dei course that is, as of fall 2021, required for all new phd students in our program. as desired, cs-jedi is for phd students in that its content and structure are tailored to the cs phd experience. cs-jedi is also by phd students: it is designed to be mainly phd student-taught, and it was created primarily by a group of 15 cs phd students, who contributed their expertise, perspectives, and over 2500 personhours of work. beyond this core working group, the curriculum benefited from multiple rounds of detailed input from the cmu eberly center for teaching and learning, plus many students, staff, faculty, and other experts. in particular, we envisioned a course engineered to (a) be more comprehensive and self-directed than a standard dei training, (b) be well-received as a required course for all students, and (c) connect core dei topics to the cs phd context. cs-jedi is also by phd students: it is designed to be mainly phd student-taught, and it was created primarily by a group of 15 cs phd students, who contributed their expertise, perspectives, and over 2500 personhours of work. the ∼40 anecdotes we heard from ∼25 students were consistent with the trends documented in the literature: students described interactions with other students -and sometimes faculty -involving sexism, racism, xenophobia, homophobia, and harassment. more importantly, including all students can have much greater community impact: discussion about dei as a complete cohort can help students find supportive connections they would not have otherwise made. more concretely, cs-jedi aims to treat students' differences as assets rather than deficits, and to ensure that all students can find their experiences reflected in the material-two features of an inclusive class environment according to the culturally responsive teaching (crt) framework. since peer interaction is a core aspect of cs-jedi, meeting this goal means that students' differences are treated as assets by both instructors and other students. this tailoring allows cs-jedi to leverage students' common identity and experience as members of a cs phd program to increase the material's relevance and accessibility to all students at once. many of these approaches follow one of two frameworks: universal design for learning (udl), which focuses on lowering barriers to learning by giving students choice, and culturally responsive teaching (crt), which recognizes and responds to students' cultural backgrounds, experiences, and identities. in addition to giving students a chance to contribute to their group's collective knowledge, this approach exposes students to all lens topics in a short time. finally, to support students with more incoming knowledge, pre-class activities contain many nuanced sources, and students can request reading recommendations from instructors based on their interests. cs-jedi not only emphasizes these messages, but tries to equip students to enact them: it exposes students to a wide range of others' experiences in our field, teaches intent versus impact, offers students a plethora of sources with which to self-educate in the future, and provides an instructor-curated list of ongoing campus dei efforts that students can join. in response to a set of questions distinct from those in table1, 63% of students reported that after the course, they had more of a common language with their peers on dei topics; 48% said they formed at least one supportive connection with another student; 33% were more likely to ask peers for support; and 48% of students also felt that they or others would feel more welcome in our field if more members of the cs community took this course.although two students expressed sentiments of forced participation in free responses (see footnote 3), the majority of responses to all survey questions were neutral or positive, suggesting that most students were receptive to taking cs-jedi. at the top of this list, we want the course to do more to strengthen student community, give all students the option to learn at the right level of difficulty (table1, (f)), and make sure there is space for all students to bring their authentic selves to class (table1, (a)). the material in week 2 is especially difficult, and may have been overwhelming; many course aspects primarily engage with dei issues in the us context, potentially reducing accessibility or identity safety for students from non-us backgrounds; and students from different cultures may differ in their comfort levels with sharing or submitting their own opinions on sensitive topics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/836.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/836.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6b055e749dadda6f2de1192270fc280ab5e8ca4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/836.txt @@ -0,0 +1 @@ +the popularity and necessity of online education have increased the use of online education systems in recent years. given the amount of data produced as a result of student interactions, automatically understanding individual students' knowledge and learning process is essential for the success of such online education systems. knowledge tracing (kt) models aim to quantify students' state of knowledge at each point of the learning period. many modern kt models are formulated as a supervised sequence learning problem to predict students' future practice performance according to their past performances in learning activities , - . students learn from activities such as solving problems, taking tests, reviewing worked examples, and watching video lectures , (multi-activity). among these activities, the ones that can be used to assess student performance, like problem-solving and test-taking, serve as relatively reliable measures of student knowledge. unlike these assessed activities, the non-assessed activities, such as reading worked examples, cannot provide explicit indicators of knowledge in students. until recently, modeling the assessed activity types has been the main focus of these supervised sequence learning kt methods, and these models overlook essential aspects of the student learning process by ignoring the non-assessed learning activities.indeed, research shows that non-assessed learning activities can help students to learn better , . but, the realization and attainment of the gained knowledge from the assessed and non-assessed learning materials can be different. for example, hou et al. conclude that practice activities are useful for student success in projects, but they do not help as much in exam preparation . instead, they show that reviewing practice quizzes could help for exam preparation. in other words, the knowledge that is gained from one learning material type (e.g., video lectures) can be transferred to another (e.g., solving problems). however, the dynamics and realization of this transfer depend on the transition order of learning materials. for example, consider a student who is learning about "summation" and "multiplication" concepts by watching video lectures and practicing problems. since the multiplication concept can be explained as an extension of summation, the student can learn multiplication easier from a video lecture after practicing summation problems. meaning that the summation knowledge that is gained by solving problems can be transferred to help achieve better multiplication knowledge using video lectures. however, the reverse sequence may not be as helpful. a student may not be able to solve summation problems just by watching multiplication videos if they do not have background knowledge in summation.as a result, explicitly modeling knowledge transfer between different learning material types, particularly both assessed and non-assessed ones, is essential to accurately understand student learning processes. recently, a handful of works have sought to model both assessed and non-assessed learning activities - . however, none of these approaches model how student knowledge transfers from one learning activity type to another. in this paper, we propose transition-aware multiactivity knowledge tracing (tamkot) to explicitly model knowledge transfer every time a student transitions between different learning activity types. tamkot models student knowledge states in a set of latent variables at every step in the student learning sequence. every time a student transitions from one learning material to another, tamkot uses a transition-specific matrix to transfer the student's knowledge according to the type of involved learning activities. unlike previous kt models, our formulation allows for unlimited transitions between different learning activity types and does not limit sequence lengths for any of the material types. this is realized via the simple, yet efficient, formulation of transition identifiers in tamkot that activate one transitionspecific matrix at a time. our model provides the flexibility for different material types to have different latent representation spaces that are mapped to a shared student knowledge space.we evaluate tamkot on three real-world datasets. the experiments show that tamkot performs significantly better than state-of-the-art supervised knowledge tracing models in predicting student performance. furthermore, despite its simplicity, tamkot performs better than the existing multiactivity knowledge tracing models in datasets with granular learning materials. more importantly, our analysis demonstrates that knowledge transfer can be different depending on the transition order between learning material types, especially in complex learning materials. finally, we showcase the interpretability of the learned student knowledge states. until recently, modeling the assessed activity types has been the main focus of these supervised sequence learning kt methods, and these models overlook essential aspects of the student learning process by ignoring the non-assessed learning activities.as a result, explicitly modeling knowledge transfer between different learning material types, particularly both assessed and non-assessed ones, is essential to accurately understand student learning processes. in this paper, we propose transition-aware multiactivity knowledge tracing (tamkot) to explicitly model knowledge transfer every time a student transitions between different learning activity types. every time a student transitions from one learning material to another, tamkot uses a transition-specific matrix to transfer the student's knowledge according to the type of involved learning activities. ma-fm is based on factorization machines and models student knowledge state in predefined knowledge concepts by a weighted count of the student's previous successes, failures, and activities with different learning material types. mvkm models student learning activities of different material types as separate tensors and uses tensor factorization to capture latent students' features, student knowledge, and learning material latent concepts.our goal is to trace students' knowledge at each time step t as they learn from both assessed and non-assessed learning material types, explicitly model the knowledge transfer from each learning material type to another, and predict student performance on future assessed learning materials.to achieve our goal of predicting student performance, given their assessed and non-assessed learning activity history, we assume that students gain knowledge in a set of latent concepts or topics that are presented in learning materials. we build tamkot into three layers: (1) the embedding layer that maps each learning activity to the latent embedding space, (2) the hidden layer to model and transfer the knowledge between assessed and non-assessed interactions at each time step, and (3) the prediction layer to predict student's performance on an upcoming assessed learning material.the goal of this layer is to learn the embedding vector of each learning activity i t , d t as the input to hidden knowledge transfer layer for estimating the student's knowledge hidden state h t , using the latent representation of its learning material (q t and l t ) and student response (r t ). having problems as assessed and video lectures as non-assessed learning materials, we first map all problems into the problem latent space and video lectures into the video latent space and achieve their underlying latent concepts matrices a q ∈ r q×dq (for problems) and a l ∈ r l×d l (for video lectures).the hidden knowledge transfer layer is designed to represent the student' knowledge state h t and learn knowledge transfer while they are freely interacting with and transitioning between assessed and non-assessed learning material types. unlike previous attempts at multi-activity knowledge tracing that allowed for a limited number of non-assessed learning materials between every two assessed ones, this representation allows us to model unlimited transitions in any order between two learning material types in student sequence. particularly, we study if the knowledge transfer from the assessed learning materials to the non-assessed ones is different from the knowledge transfer from the non-assessed learning materials to the assessed ones.in this paper, we proposed transition-aware multi-activity knowledge tracing (tamkot), to model student learning from both assessed and non-assessed learning activities and explicitly learn the knowledge transfer between different learning activity types. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/837.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/837.txt new file mode 100644 index 0000000000000000000000000000000000000000..4cd20a249ee345df4502531704d0581d796f0062 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/837.txt @@ -0,0 +1 @@ +l earning analytics is concerned with collection and analyses of data related to learning in order to inform and improve the learning process or their outcomes . applying properly learning analytics can not only track student progress but also improve student performance . recent advancements in the development of data science and machine learning techniques has led to a rise in popularity of learning analytics within the educational research field.the flipped classroom is a new pedagogical method, which utilizes asynchronous video lectures and basic practice as homework, and conducts group-based problem solving discussions or activities in the classroom . since flipped classroom promotes cooperative learning and increases student engagement and motivation , it is gaining increasing importance for teaching and learning in recent years. a common in-class activity for the flipped classroom is student group discussions, where participants are involved in solving problems together. such discussion dialogs embed rich information that cannot be captured objectively by conventional data, such as students' in-class sentiments, degree of concentration, amount of information exchange... etc. the information from in-class discussion dialogs may reflect the processes and authors are with the chinese university of hong kong, shatin, n.t., hong kong progression of learning. therefore, spoken discussion dialogs in flipped classroom deserve greater attention for learning analytics, which aims to collect and analyze the discussion dialogs in flipped classroom in order to explore indicators that reflect group learning outcomes. however, current studies in flipped classroom research field have not paid sufficient attention to this research problem, not only because of the difficulty in collecting such data in formal flipped classroom, but also because of the technological difficulties in analyzing such data. for one, speech recordings of group discussions present a source separation problem from multiple speakers, tackling overlapping speech, ambient noise and many other challenges. one may consider temporarily circumventing this problem of automatically transcribing the speech recordings by using manual transcripts, but this may be a laborious process. moreover, many fields of technology including speech signal processing, natural language processing, data science and machine learning are required to be integrated together to conduct a holistic research, which is also a technical challenge. this work attempts to apply current technologies to analyze student group discussion dialogs in order to extract indicators of group learning outcomes. this may pave the way for deeper analysis into flipped classroom activities and their pedagogical values, and perhaps inform possible directions in developing future intelligent classroom.fig. 1 shows the framework of this study. we have recently transformed a freshman engineering mathematics course from the conventional instructional strategy to the flipped classroom strategy. students watched video-recorded lectures at home prior to group-based problem-solving discussion in class. multiple audio data streams from multiple groups are collected non-intrusively throughout the semester, and a customized speech classification technology is applied to obtain student group discussion dialog audios. the dialog audios are then manually transcribed. then, spoken dialog features are extracted by using some proper speech and language processing tools and techniques from bilingual transcription text data and audio data. learning outcome is measured in terms of examinations. several essential indicators from discussion dialogs that reflect the group learning outcome are found by statistical analysis. then, indicators obtained from statistical analysis are used as input to a variety of machine learning algorithms in order to predict the group learning outcome as high, mid or low. results indicate that it is feasible to use the indicators we found to automatically predict group learning outcome from face-to-face discussion dialog in flipped classroom.to the best of our knowledge, this is a novel work in investigating learning analytics from spoken discussion dialogs in order to explore the indicators of group learning outcomes from group discussions, correlation analysis and analysis of variance (anova) are performed between grouplevel features and group learning outcomes.5, the mean of average number of math terms spoken by each in-group student, the mean of average topic relevance score of each in-group student, and the sum of the number of quantity unit words in an entire discussion dialog are positively correlated with the group learning outcome. we observe from the anova result that low learning outcome groups tend to have higher mean number of leisure words spoken in an entire discussion dialog, which suggests students in low learning outcome groups tend to have more relaxing off-topic discussion rather than on-topic discussion. we also find that high learning outcome groups tend to have lower variance in the number of leisure words in an entire discussion dialog, which suggests that high learning outcome groups tend to speak fewer leisure words. the anova results show that the mean number of negative emotion/anger/anxiety/risk/negation words spoken in an entire discussion dialog are significantly greater in low learning outcome groups than in high learning outcome groups. besides, we also find that the variance of number of anger/anxiety/risk words in an entire discussion dialog are all significantly lower in high learning outcome groups, which indicates that high learning outcome groups tend to insist on speaking a low number of negative sentiment words.7, the mean of average number of assent words spoken by each in-group student, and the mean of average min h1-h2 of each in-group student are significantly higher in high learning outcome groups than low learning outcome groups.8shows that the mean of the average number of certainty words spoken by each in-group student is positively correlated with group learning outcome, which indicates that higher learning outcome groups tend to have higher conviction in discussion. correlation analysis results show that the mean of the number of causation words in an entire discussion dialog, the mean of the average number of conjunction words spoken by each in-group student, the mean of the average number of comparison words spoken by each in-group student are all fig. correlation analyses show that the mean of the cohesion score in an entire discussion dialog is positively correlated with the group learning outcome, which indicates that high context relevance discussions are associated with higher group learning outcome. moreover, the variance of the cohesion score in an entire discussion dialog is negatively correlated with the group learning outcome, which indicates that stably high context relevance of discussions is positively correlated with group learning outcome. automatic prediction of group learning outcome from discussion dialog features upon identifying the proper features from group discussions that reflect the group learning outcome, automatic prediction becomes possible. first, we label the top three, middle four and bottom three groups based on learning outcomes of every week's topics as high learning outcome, mid learning outcome and low learning outcome respectively. we have nine weeks of discussion data, thus our data consist of 27 high learning outcome discussions, 36 mid learning outcome discussions and 27 low learning outcome discussions (90 samples in all). naive bayes (nb), neural network (nn), k-nearest neighbours (knn), random forest (rf), lightgbm (lgbm), xgboost (xgb)and support vector machine (svm) are used to automatically classify the group learning outcome into high learning outcome, mid learning outcome and low learning outcome. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/838.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/838.txt new file mode 100644 index 0000000000000000000000000000000000000000..d061d61f0e97195259ead2a76667768d83219444 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/838.txt @@ -0,0 +1 @@ +data and big data, in particular, operates increasingly as a currency integral to contemporary moral, knowledge and monetary economies. its production, collection and analysis are implicated in a multitude of diverse practices, where data underpins epistemological and economic practices and ideas that generate value which materialise along multiple registers: as financial profit, epistemological insight, social benefit, and institutional and political change. these notions of value can be distributed unevenly across different social and economic domains -for instance, public and research institutions afford higher importance to the epistemological and social gains, while corporate entities are highly invested in the potential monetary returns from big data -there is increasingly an imperative for data sharing across both public and private organisations. big data, "generated continuously, seeking to be exhaustive and fine-grained in scope, and flexible and scalable in its production" (kitchin 2014a), is increasingly valued less in its organisational containment-as privileged intellectual property for example-and more through combinatorial possibilities of sharing, linking and manipulation (e.g. auer et al. 2007;bizer, heath & berners-lee 2009;kitchin 2014b). examples can range from highly curated mechanisms for the sharing of personal and internet usage data with advertisers (as is the case with big social media and platform companies like google and meta), to data lakes necessitated by public-private partnerships around projects for digitalisation and smart urbanism (scassa 2020), and further, to open data initiatives by governmental institutions that provide varying degrees of access to public datasets.these arrangements for data sharing increase both the value and risks associated with big data and, particularly, the risks of misuse and privacy breaches that carry high-stake reputational hazards such as the high-profile case of cambridge analytica. at the same time, the mechanisms for addressing issues of data ethics and data governance in the context of inter-organisational data sharing are still a challenge for companies in their practical implementation and a relatively understudied aspect in the social sciences, organisation studies and information systems studies (lis & otto 2020, 2021, nokkala, salmela & toivonen 2019, de prieëlle, de reuver & rezaei 2020). over the past decade, concerns about data security and privacy have prompted action by state and market holders of such sovereignty: legislative change by national and transnational governments (e.g. european parliament, council of the european union 2016), and more stringent security controls alongside apparently contrite apologies by technology corporations like google and facebook in response to incidents of data loss, hacking or exploitation (e.g. zuckerberg 2019). as dramatic as individual cases may be, their recurrence highlights the inadequacy and immaturity of data governance regimes in an era of vast networked information flows, across as well as within organisational borders.data sharing poses a challenge to the governance of data, which traditionally links managerial and governance responsibilities to structures and functions within the corporate organisation (wende 2007). roles like data steward, data owner and data custodian are tightly linked to how a particular organisation stores its data, what it identifies as key data assets and how it aligns its data practices with its particular domain of economic activity. instead, data sharing across organisational entities necessitates a different arrangement, which puts the organisations participating in the data sharing partnership themselves in key roles like that of data stewardship, or associated functions related to the ethical use or risk management of data (lis & otto 2021).our study contributes to this nascent field of research into the governance of shared datasets between different entities, with a particular focus on the role of research institutions in such arrangements. we are interested in the functions adopted by universities and research institutions in multilateral data partnerships in two specific aspects: how are notions of value, risk and ethics negotiated between these organisations and their partners within the data sharing arrangement? and are research institutions afforded a particular role or function?research institutions have long been guided by internal disciplinary principles regarding their interaction with participants, the safety, disclosure and availability of research data and the responsibilities of individual academics and universities. with the growing amount of digital data collected, analysed and stored by researchers, the parameters of research ethics are increasingly shifting towards the adoption of some form of data management or data governance principles as part of the research process of conception, project description, and ethics regulation. some academic disciplines like biomedical research have already pioneered research using big data and, subsequently, have adopted principles for its management. others, like the social sciences, are still faced with the reality of negotiating two incongruent frameworks of research ethics where principles of data management enforced by it departments and university risk management policies are often seen as opposed to traditional discipline-imposed norms of research ethics, as discussed in a 2018 forum in the journal social anthropology (pels et al. 2018). this tendency towards the incorporation of data governance into the principles of scholarly research has been accelerated by the growing imperative for open science collaborations between academia and corporate partners, increasingly articulated through the need of data sharing and collaborative data platforms between universities and various production and service industries (see mirowski 2018, fernández pinto 2020).such developments have epistemic and political effects in academia, where big data analytics feeds into fantasies of objectivity, control and prediction (halpern 2015(halpern , 2022)), and generates new interdependencies between risk, contingency and value. these interdependencies are predicated on the expected gains unlocked by the adoption of practices of datafication and data sharing (lavalle et al 2011). these gains are part due to the nearly mystical role big data plays in harnessing and channeling the destructive forces of risk and uncertainty (parisi 2016). at the same time, big data is not innocuous with respect to such forces itself. rather it exposes new forms of contingency that disturb regular methods of risk management, and requires its models and comparatively new practices of organisational governance (tallon 2013). these notions of value can be distributed unevenly across different social and economic domains -for instance, public and research institutions afford higher importance to the epistemological and social gains, while corporate entities are highly invested in the potential monetary returns from big data -there is increasingly an imperative for data sharing across both public and private organisations. 2007;bizer, heath & berners-lee 2009;kitchin 2014b). at the same time, the mechanisms for addressing issues of data ethics and data governance in the context of inter-organisational data sharing are still a challenge for companies in their practical implementation and a relatively understudied aspect in the social sciences, organisation studies and information systems studies(lis & otto 2020, 2021, nokkala, salmela & toivonen 2019, de prieëlle, de reuver & rezaei 2020). roles like data steward, data owner and data custodian are tightly linked to how a particular organisation stores its data, what it identifies as key data assets and how it aligns its data practices with its particular domain of economic activity. instead, data sharing across organisational entities necessitates a different arrangement, which puts the organisations participating in the data sharing partnership themselves in key roles like that of data stewardship, or associated functions related to the ethical use or risk management of data(lis & otto 2021). we are interested in the functions adopted by universities and research institutions in multilateral data partnerships in two specific aspects: how are notions of value, risk and ethics negotiated between these organisations and their partners within the data sharing arrangement? and are research institutions afforded a particular role or function?. with the growing amount of digital data collected, analysed and stored by researchers, the parameters of research ethics are increasingly shifting towards the adoption of some form of data management or data governance principles as part of the research process of conception, project description, and ethics regulation. 2018).we have chosen four cases of existing research partnerships explicitly formed around the imperative for data sharing, that have each produced documentation explaining their approach to the ethics of data sharing and the particular solutions adopted within their partnership.in our analysis of these partnerships we are interested in capturing key components of an emerging political economy of data sharing, which we believe to constitute important aspects of the economy of data governance and data ethics. access to facebook data is tiered according to sensitivity levels and in the case of high sensitivity data: "researchers may need to develop analysis code based on a synthetic data set and submit the code for automated (or manual) execution, and where all data analysis (and literally every keystroke) is subject to audit by us" (social science one, 2018b).as a data sharing collaboration founded directly to foster an innovative data economy in ireland, insight provides substantial benefits for both the research institutions and their industry partners while avoiding some of the obstacles of risk management across both academic and industry settings. ethics is established as a key multifunctional instrument to secure trust from the "originators of data" that will make possible future growth of the data economy (insight centre for data analytics, 2015, p.3noting that fears of privacy violation can act to hinder the benefits and innovation potential of big data science and industry, the provisional bill of data rights also critiques an exclusive and limited focus on privacy, which distract "from the wider issues around data ethics" (insight centre for data analytics, 2015, p. ethical oversight is guaranteed through the inclusion of institutional review boards in the preparation of the research proposals-a bureaucratic form that itself has been criticised for being unable to respond to the challenges of big data research (leetaru 2016)-and later, during research dissemination and assessment, by academic research and publishing institutions, while the funding of research projects is steered through independent non-profit foundations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/839.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/839.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6225d20bd2e341f5b00c268e5f2bfc35984ed90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/839.txt @@ -0,0 +1 @@ +about one in ten older adults in the u.s. have alzheimer's disease (ad) and another 15 to 20% have mild cognitive impairment (mci), a third of whom will develop dementia within 5 years. individuals with dementia eventually become unable to perform complex everyday activities including driving so most current driving research focuses on mci or early stage dementia. our 5-year project, funded by nih, focuses on creating an innovative in-vehicle sensors architecture, selecting a large group of older drivers, ages 65 to 85 years old, measuring driver's behavior during 3-year period, and identify cognitive changes that can lead to early dementia. leading vehicle manufacturing companies including volvo , ford , and kia , have adopted driver alert system applications that alert the drivers to avoid accidents. most of those systems used the same techniques and algorithms to monitor driver's behavior but the use of those systems are limited to alerting the driver of any situations and avoid incidents from happening i.e driver alert systems alert's the driver if he/she is driving long. advance driver assistance systems alerts the driver if vehicle to moving onto other lane or if someone is in their blind spot. measuring driving behaviors can divided into 3 types • driver based: drowsiness, sensation seeking, impulsive etc. • driving based: distraction, attention etc • qualitative based: speeding, braking, lane changing etc. among above mentioned distraction and drowsiness are major behavior that changes with time and can be used to find key insights into driving of participants. one of them is distraction. some of the usual distractions during driving are when drivers are looking off road and texting and using phone - . some of the metrics that are used to measure distraction are headpose and gaze patterns arxiv:2301.12269v2 27 mar 2023 individuals with dementia eventually become unable to perform complex everyday activities including driving so most current driving research focuses on mci or early stage dementia. our 5-year project, funded by nih, focuses on creating an innovative in-vehicle sensors architecture, selecting a large group of older drivers, ages 65 to 85 years old, measuring driver's behavior during 3-year period, and identify cognitive changes that can lead to early dementia. leading vehicle manufacturing companies including volvo , ford , and kia , have adopted driver alert system applications that alert the drivers to avoid accidents. most of those systems used the same techniques and algorithms to monitor driver's behavior but the use of those systems are limited to alerting the driver of any situations and avoid incidents from happening i.e driver alert systems alert's the driver if he/she is driving long. advance driver assistance systems alerts the driver if vehicle to moving onto other lane or if someone is in their blind spot. the concept of utilizing in-vehicle sensors to measure the behavior of drivers and detect cognitive change is in itself innovative and reflective of the rapid development of these sensors and their application for monitoring driver behavior.1) region of interest and face detection: : multiple techniques are used for face detection based on the region of interest. number of drivers are large in number so in order to detect the roi and face we used them interchangeably such that we use a less accurate but fast face detection library to have a rough estimate of the location of the face and then based on that location. 3) lane detection: lane detection is another feature of the video sensor system that uses deep learning model to detect lane while driving. imu sensors also identify driving over potholes and raised pavement markers, allowing for analysis of how drivers react to unexpected pavement defects and lane departure in conjunction with the other telematics data.our objective is to measure and monitor changes of driver behavior indices (dbis) using the vision and telematics sensor data. for illustration, fig 6 and 7 shows the driving pattern of a two senior drivers for 2-weeks period based on the data obtained from in-vehicle cameras that measured the number of times the driver closed eyes, number of distractions, crossing lines, and near collision events.details of the the data can be reviewed in previous works that are published is same domainwe presented an innovative architecture of an in-vehicle sensor system consisting of vision and telemetry sensors and includes a set of ai algorithms to measure driver behavior indices. the system was already installed in about 70 cars driven by older drivers in florida with the objective to monitor and detect cognitive changes in these drivers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/84.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/84.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d79a3dd9ea6bf45d58f3dbb6ff3e3641a03c07b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/84.txt @@ -0,0 +1 @@ +in temperate climates, perennial plants such as grapevines (vitis spp.) undergo alternating cycles of growth and dormancy. during dormancy, the shoot and flower primordia are protected by bud scales and can reach considerable levels of cold tolerance or hardiness to maximize winter survival (keller 2020). budbreak is identified as stage 4 on the modified e-l scale (coombe 1995) and it is strongly influenced by the dormancy period. once the shoots start to grow out during the process of budbreak in spring, the emerging green tissues become highly vulnerable to frost damage.an important issue is that ongoing climate change is increasing the risk of spring frost damage in vineyards because rising temperatures are associated with earlier budbreak and weather patterns are becoming more variable (poni, sabbatini, and palliotti 2022). consequently, the ability to predict the timing of budbreak would enable producers to timely deploy frost mitigation measures (e.g. wind machines) and improve the scheduling of vineyard activities such as pruning to adjust crop load. also, knowing when different grape varieties break bud under certain temperature scenarios enables investors and vineyard developers to better match more vulnerable varieties to lower-risk sites.several models have been proposed to assess the challenging task of budbreak prediction (nendel 2010), (ferguson et al. 2014), (zapata et al. 2017), (camargo-a. et al. 2017), (leolini et al. 2020), (piña-rey et al. 2021). as discussed by leolini et al. these phenological models can be classified into two main categories: forcing (f) and chillingforcing (cf) models. on one hand, forcing models are based on the accumulation of forcing units from a fixed date in the year. f models focus solely on describing the ecodormancy period by assuming that the endo-dormancy period has ended and the chilling unit accumulation requirement has been met. on the other hand, cf models account for both the endo-and eco-dormancy periods by considering the chilling unit and the forcing accumulation in relation to specific temperature thresholds -i.e., an estimated base temperature t b . although these models take into account thermal requirements, none of them include other environmental variables (e.g., solar radiation, relative humidity, precipitation, dew point) besides air temperature.the aim of this study is to investigate modern deep learning techniques for incorporating a wider range of weather data into budbreak predictions. in particular, we develop a recurrent neural network (rnn) for budbreak prediction from time series input of various weather features. the proposed models' performance tends to degrade in the case of cultivars that have limited data. multi-task learning has the potential to alleviate this issue, as it can utilize data across all cultivars to improve budbreak prediction. the main contributions of this work are: 1) to frame this multi-cultivar learning problem as an instance of multi-task learning, and 2) to propose and evaluate a variety of multi-task rnn models on real-world data. finally, the obtained results show that multi-task learning is able to significantly outperform single-task learning. due to lack of programmatic access to existing budbreak models at the time of this writing, we reserve a comparison to those models for future work. based on these datasets, our goal is to learn a model m i for each cultivar that takes in weather features up to any day t and outputs a probability of budbreak for the day t. the most common learning paradigm is single-task learning (stl), which for our problem corresponds to learning a cultivar model m i from only that cultivar's data d i . to address this issue, we consider a multi-task learning (mtl) paradigm, which uses data across all cultivars to make predictions for individual cultivars.our stl model makes causal budbreak predictions by sequentially processing a weather data sequence x 1 , x 2 , .our stl model, shown in figure1b, simply feeds daily weather data x t into the first fc layer as input and adds an additional fc layer to produce the final lte prediction output.we consider two types of mtl models that directly extend the rnn backbone of figure1a, the multi-head model and the task-embedding model. similarly, the task embedding model uses a neural network to learn a general model that accepts cultivarspecific parameters as well as learning the parameters for each cultivar.as illustrated in figure1d, the task embedding model first maps a one-hot encoding of the cultivar in consideration to an embedding vector (analogous to cultivar "parameters"), which is combined with the weather data x t and then fed to the gru unit.table2shows the difference in bce between our mtl models and the baseline stl model for each of the cultivars. a positive value indicates that the mtl model improved over the stl model in terms of bce. however, we see that if we consider the number of cultivars where mtl slightly underperforms table3: looking at the difference of days metric for different model variants. to get a better understanding of the practical differences between mtl and stl we now consider using the models to predict the day of budbreak. we see that there are many more outliers predictions with large errors for the stl model compared to the mtl model.to get insight into the nature of the large errors in stl compared to mtl, figure3shows the predicted probabilities for the stl and multih model for a particular cultivar and season where a large stl error occurred. rather, the mtl model avoids the early jump in probability, which is likely due to learning a better general model of budbreak. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/840.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/840.txt new file mode 100644 index 0000000000000000000000000000000000000000..668661bba691fce5aaea7eac5e5732991d974222 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/840.txt @@ -0,0 +1 @@ +gpt-3, the latest iteration of the generative pre-trained transformers by openai, has gained much attention in recent years 1 . it is the most advanced pre-trained language representation system, is a statistical representation of language, and works by producing credible text based on user prompts . in fact, an initial test on people's ability to tell whether a ∼ 500 word article was written by humans or gpt-3 showed a mean accuracy of 52%; just slightly better than chance 1 .gpt-3 has remarkable capabilities, but also carries potential implications. it can be a great tool for machine translations, text classification, dialogue/chatbot systems, knowledge summarizing, question answering, creative writing 2,5,6 , detecting hate speech 7 , and automatic code writing 2,8 , but also to produce 'misinformation, spam, phishing, abuse of legal and governmental processes, fraudulent academic essay writing and social engineering pretexting' 1, . gpt-3 is an amplifier of human intentions which can receive instructions in natural language and its output can be in natural or formal language. this tool is neutral from an ethical point of view -and it is subject to the dual use problem 12 .the advancements in ai text generators and the release of gpt-3 coincide with the ongoing infodemic 13 an epidemic-like circulation of fake news and disinformation, which, alongside the covid-19 pandemic, has been greatly detrimental for global health. given gpt-3's potential misuse and impact on global health, it is crucial to evaluate how its text affects people's understanding of information.in this paper we aim to determine whether gpt-3 can be used to produce accurate information and disinformation in the form of tweets, and compare its credibility to that of human-generated information. at the same time we aim to determine whether gpt-3 can be used to develop assistive tools to help identifying disinformation. we acknowledge that the definitions of disinformation and misinformation are diverse; in this paper we refer to an inclusive definition, which considers as disinformation both false information (also partially false information) and/or misleading content 14 .to achieve our goals, we asked gpt-3 to write information or disinformation tweets on topics prone to disinformation and public misconception, such as vaccines, 5g, covid-19, or the theory of evolution. we collected real tweets on these topics and created a survey in which participants were asked to classify synthetic tweets (written by gpt-3) and organic tweets (written by humans) as true or false and identify if they were written by a real user or ai. we collected real tweets on these topics and created a survey in which participants were asked to classify synthetic tweets (written by gpt-3) and organic tweets (written by humans) as true or false and identify if they were written by a real user or ai.to test gpt-3's ability to generate accurate or fake tweets, we created prompts instructing gpt-3 to generate tweets with either accurate information or disinformation on the following topics: climate change, vaccines safety, theory of evolution, covid-19, masks safety, vaccines and autism, homeopathy treatments for cancer, flat earth, 5g technology and covid-19, antibiotics and viral infections, and covid-19 and influenza. we call "synthetic" those tweets that are generated by gpt-3, and we call "organic" those real tweets retrieved from twitter.we measured how accurately participants recognized whether a tweet was containing disinformation or accurate information (disinformation recognition score), for four types of tweet: "organic true", which are tweets published by twitter users (organic) and containing accurate information (true); "synthetic true", which are tweets generated by gpt-3 (synthetic) and containing accurate information (true); "organic false", tweets generated by twitter users (organic) and containing disinformation (false); and finally "synthetic false", tweets generated by gpt-3 (synthetic) and containing disinformation (false). participants recognized "organic false" tweets with higher accuracy, compared with synthetic false tweets (scores 0. similarly, they recognized "synthetic true" tweets correctly more than "organic true" tweets (scores 0. for accurate information, gpt-3 obeyed our requests 99 times out of 101, whereas for disinformation the rate of obedience was much lower (80/102) (figure2b), indicating that gpt-3 can "refuse" to produce disinformation, and in rarer instances, it may produce disinformation when asked to produce accurate information., "organic true", "synthetic true", "organic false", "synthetic false"). that said, respondents obtained a significantly higher score for "organic true" tweets and, to a lesser extent, for "organic false" tweets (0.5 (0. looking at true versus false tweets, and organic versus synthetic tweets, we note something interesting; people's ability to determine organic versus synthetic tweets is not influenced by tweet accuracy, with answers being random on average. based on our data, and with the ai model adopted for our analysis, an efficient system relies on accurate information generated by gpt-3 (initiation phase), whereas it relies on trained humans to evaluate whether a piece of information is accurate or whether it contains disinformation (evaluation phase) (c). synthetic tweets containing reliable information are recognized as true better and faster than true organic tweets, while false synthetic tweets are recognized as false worse than false organic tweets. true tweets are those tweets containing accurate information, and false tweets are those containing inaccurate information, i. accurate information produced by a machine is more credible than accurate information produced by a human (synthetic versus organic accurate information). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/841.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/841.txt new file mode 100644 index 0000000000000000000000000000000000000000..05b668d47f5824dc6207bd98ce8079b7c0fa8e9f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/841.txt @@ -0,0 +1 @@ +over the past few years, the advancement of the internet of things (iot) technology has opened up a lot of research potential in the area of tracking and monitoring. among them, a wide variety of projects have been carried out to monitor the behavior of the human being as shown in - . these technologies made room for implementing convenient applications for enhancing day to day living of urban residents.with the increase of the world aging population as shown in and , research in monitoring the elderly has gained attention from different research principles. while these works - address the support of the elderly as independent beings of the society, and others , have provided the facility for the nursing home to monitor the daily activity behavior of the residents. the former method commonly leverages boundary-less tracking and monitoring techniques as shown in , , which include smartphones and smart billy pik lik lau, zann koh, yuren zhou, benny kai kiat ng, and chau yuen are with the engineering product development, singapore university of technology and design, corresponding e-mail: billy lau@mymail.sutd.edu.sg, yuenchau@sutd.edu.sg.mui lang low is with the peacehaven nursing home day centre run by the salvation army.manuscript received january 25, 2023 wearable devices. the majority of the latter approaches , mostly provide tracking in a confined environment, where the accuracy of the boundary-less approach is not ideal. our work focuses on the latter approach, where the constraints of monitoring senior citizens are limited to a nursing home.traditionally, it is labor-intensive to take care of the dayto-day life of an elderly resident, and it is not possible to constantly track an individual across 24 hours. therefore, using a building-scale human monitoring approach, it can assist the nursing home staff to monitor residents and better understand their activity behavior. with this challenge in mind, we design a system to monitor the deviated activity behavior of nursing home's residents leveraging iot technology. we use bluetooth low energy (ble) technology as backbone for collecting the elderly data due to nature of low energy, and coverage suitable for indoor application compared to wifi, rfid, zigbee, etc. the deviated activity behavior denotes a nursing home's resident behaves irregularly compared to his/her normal routine of daily life. this type of detection only can be achieved through fully understanding a resident's life routine. the main objectives of such a system are to identify the residents' activity behavior and determine the irregular activity behaviors. the constraints of monitoring residents' activity behavior in a nursing home are bounded by building structure, and also their daily activity is influenced by the group activities or community. therefore, our aim is to differentiate their activity between private and group activity when computing their deviated activity behavior. another constraint when designing this system is that we do not have ground truth on the data collected, which resulting the accuracy of system output cannot be validated. moreover, the identity of the nursing home residents is anonymized to comply with singapore personal data protection act . thus, unsupervised knowledge extraction is more desired when compared to the supervised knowledge extraction model.to address the aforementioned challenges, in this paper, we present a building-scale monitoring system to study 50 residents' activity behavior in the peacehaven nursing home, singapore. using the building-scale monitoring system, residents' activity behavior based on their location are investigated using a wearable card tag with a build-in bluetooth beacon. each room is equipped with a receiver to detect the bluetooth beacon transmitted to perform the resident's location detection. based on the detected location, we study the activity behavior of residents over 6 months and cluster them based on their common patterns. in order to identify the normal activity behavior, we use a data fusion method to generate the hybrid norm by combining the group and individual norm. using the hybrid norm, deviated activity behavior can be extracted, which does not follow the normal daily pattern of a resident. afterward, we perform empirical analysis on the deviated activity behavior and classify them.the key contributions of this paper are as follows:• we study the resident's activity behavior in a nursing home from a location-based implementation of a monitoring system. • we propose a data fusion method to identify the daily norm for each nursing home's resident based on two data sources, which are individual and group norm. • based on the daily norm generated, we perform empirical analysis on the deviated activity behavior and identify the types of them using rules-based classification. our paper can be detailed as follows: in section ii, we study related work about existing methodologies in detecting deviated activity behavior with their pros and cons. subsequently, in section iii, the system architecture and data processing model is presented. afterward, we describe the group activity behavior clustering method in section iv. based on the group detected, we compute the deviated activity behavior utilizing the hybrid norm and analyze them in section v. lastly, we conclude our work in section vi. the constraints of monitoring residents' activity behavior in a nursing home are bounded by building structure, and also their daily activity is influenced by the group activities or community. therefore, our aim is to differentiate their activity between private and group activity when computing their deviated activity behavior.the prediction model utilizes statistics to predict the potential activity behavior of a particular user and if the predicted behavior does not match the predictive outcome, it will be labeled as deviated activity behavior.state estimation modeling maps the state behavior of a particular user into a system state, which can be used to model the users' activity behavior and detect any deviated activity behavior.after the residents' location data is stored in the database, we perform a series of processing onto the residents' trajectory data to extract the deviated activity behaviors. based on these two norms, a data fusion technique is used to generate daily hybrid norm for each resident and from there further extract each resident's deviated activity behavior. to study the similarity between residents based on the location data, a custom similarity kernel is proposed to measure the resemblance between residents' activity behavior. to show an example of deviated activity behavior detection, we use resident u 21 's norm and the group norm (cluster 3) as an example to demonstrate the working of extracting the deviated locations from the data. by fusing both information, the deviated activity behaviors can be computed for every resident in the nursing home and further study the types of deviated activity behavior. by breaking down into different periods, the time before and after group activity can be investigated to generate features for studying deviated activity behavior. the category c1 represents the probability of residents not involving in deviated activity behavior at a different time of the day, where category c2 to c5 represents the location for a deviated activity behavior to be detected.based on the classification labels generated, we examine three residents as case studies to study normal daily activity behavior and deviated activity behavior.by utilizing the proposed hybrid deviated activity behavior classification, we managed to obtain types of residents' deviated activity behaviors in a nursing home. by understanding residents' normal activity behavior, the deviated activity behavior can be differentiated and extracted from normal activity behavior to study it. based on the types of deviated activity behavior, three categories of deviated activity behavior are proposed, which are (1) sleep irregularity, (2) awake irregularity, and (3) private visiting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/842.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/842.txt new file mode 100644 index 0000000000000000000000000000000000000000..f6899fe781ac2b5cfbaeca01eb4458b5ac0f08e6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/842.txt @@ -0,0 +1 @@ +recommender systems have been crucial for online commerce, and their importance keeps growing with the increasing amount of products in the market. while the consumer profiling systems have effectively solved the problem of product recommendation, the problem of "design recommendation" is more challenging because an aesthetic design cannot be handled in a quantifiable manner, similar to the product features. therefore, solving this problem requires recommender systems to handle design aesthetics and consider that consumer perception of design is variable.different consumer groups perceive the same design differently . research studies related to consumer perception of a product design suggested that we adopted a unified position in product aesthetics appreciation; thus, consumers from different backgrounds can perceive modern design similarly . it was found that consumer basic profile like gender and age, is not significant in aesthetic perception; however, consumer personality, namely the aspect of openness to experiences, correlates with perception variation of aesthetic design - .recommender systems were born out of necessity in the early days of the internet due to the exponentially growing amount of information. the earlier item recommendation systems were centered around consumer feedback, where early consumer rating is used to rank the content for new consumers . then, hybrid recommenders introduced the use of consumer action information, such as buying an item or consuming content, besides manual rating . this idea encouraged collecting actions of multiple users, which led to more powerful recommenders based on consumer profiling coupled with the use of item information . further development was made through context-aware recommender systems .several recommendation systems stem from these basic ideas, such as the group recommendation system, where a preference of a group member related to an item can help predict the position of all group members regarding that item . other recommendation systems assume that consumers with neighbouring characteristics most likely have similar preferences . or manually insert consumer preferences constraints in the consumer profile . personality is first introduced in such systems to consider that consumer rating is highly dependent on the personality aspect of agreeableness . then later, to take advantage of the similarity of presences between closely positioned personalities in music , tv programs , video games , and products .in kansei studies, the consumer affect is considered the current emotion compound. it corresponds to the most abstract, positive or negative, responses to a product design , . machine learning approaches have been employed in multiple domains - , including kansei studies - . where it has been used to model the affective response in the emerging field of aesthetic design recommendation , . in this paper, we suggest a new method for aesthetic design recommendation based on consumer openness traits and wanted affect. the article is organized as follows; first, we present the proposed aesthetic recommendation system, then the implementation procedure for consumer affect modelling, describe the consumer affect experiment, and discuss the results. while the consumer profiling systems have effectively solved the problem of product recommendation, the problem of "design recommendation" is more challenging because an aesthetic design cannot be handled in a quantifiable manner, similar to the product features. therefore, solving this problem requires recommender systems to handle design aesthetics and consider that consumer perception of design is variable. research studies related to consumer perception of a product design suggested that we adopted a unified position in product aesthetics appreciation; thus, consumers from different backgrounds can perceive modern design similarly. it was found that consumer basic profile like gender and age, is not significant in aesthetic perception; however, consumer personality, namely the aspect of openness to experiences, correlates with perception variation of aesthetic design-. in this paper, we suggest a new method for aesthetic design recommendation based on consumer openness traits and wanted affect. the article is organized as follows; first, we present the proposed aesthetic recommendation system, then the implementation procedure for consumer affect modelling, describe the consumer affect experiment, and discuss the results. illustrate the proposed aesthetic design recommendation system (adrs) based on multiple affective responses and wanted affect. and the second part consists of the interactive recommender, which will collect the personal aspects of an individual consumer and the affect they are looking for from a design and suggest the design that will most likely fit the affect wanted by this particular individual. however, in our research review, we found that, when it comes to product design evaluation, only a few parameters have an observable connection with the aesthetic evaluation of a design.the recommender system then uses the consumer's wanted affect as the reference and finds the product design variation corresponding to that affective response's maximum.participants were asked to rate the affective response of each design by selecting five degrees of affect (strongly disagree, disagree, neutral, agree, and strongly agree) in 12 affective adjectives, namely: feminine, emotional, delicate, elegant, technological, strong, gentle, traditional, loud, stable, practical, and luxurious. and the recommendation algorithm is based on the idea of comparing the wanted affect with the predicted affect and outputting the design that corresponds to the highest value in that particular affect adjective. this system recognizes that people have a different affective responses to aesthetic designs and models the individual response to aesthetics in relation to the aspect of openness in personality, plus the element of exposure and the mood during the aesthetic evaluation.the recommender part of this system uses the consumer's desired affect and returns the aesthetic design that corresponds to the highest value in that particular affect. also, investigate the connection between the aesthetic features and the affective response in order to build a more flexible design recommender that can generate designs instead of recommending from a limited set of designs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/843.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/843.txt new file mode 100644 index 0000000000000000000000000000000000000000..43415c6dffa883ed74e8c0c246ad1bb45a9ca634 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/843.txt @@ -0,0 +1 @@ +in the last decade, websites have spread non-evidence-based conspiratorial ideas in an unprecedented fashion, resulting in a supposed "golden age of conspiracy" . several websites, originally known for touting merely misleading information, have increasingly utilized conspiracy theories for specific political purposes . supporting this idea, today, almost 90% of the american public endorse at least one conspiracy theory , including those about 9/11 , ufos/aliens , flat-earth , covid-19 , and qanon . according to a poll by the public religion institute and the interfaith youth core in july 2021, approximately 15% of americans are even true "qanon believers" (i.e., those who believe that "the government, media, and financial worlds in the u.s. are controlled by a group of satan-worshipping pedophiles who additionally run a global child sex trafficking operation" ).researchers have found that individuals mostly gain exposure to conspiracy theories like qanon through the internet . indeed, websites like the gateway pundit, american thinker, and infowars have played an outsized role in developing toxic echo chambers and may have also exposed thousands of people to different conspiracy theories. prior research has shown that mere exposure to conspiracy theories can unconsciously influence people to believe in them . the exposure and subsequent belief in just one conspiracy theory can further induce paranoia and stimulate belief in other conspiracy theories . however, despite conspiracy theories' increasing prevalence, the precise relationship each conspiracy theory has with one another and the relationship between the coverage of conspiracy theories by online news outlets and the popularity of conspiracy theories is largely unknown. however, in this work, we use web crawling, hyperlink graphs, and statistical models of websites' behavior to examine the relationships between conspiracy theories and the role that online misinformation outlets, authentic news media, and the wider internet ecosystems play in their coverage, asking the following three research questions:(1) how do particular conspiracy theories hyperlink/interact with one another, the news media, and the wider internet? (2) how has news media's (both authentic news platforms and misinformation outlets) relationship with particular conspiracy theories changed over time? have different outlets' interactions with these conspiracies increased or decreased over time? (3) has the news media's (both authentic news platforms and misinformation outlets) interaction with particular conspiracy theories correlated with the rise or decline of specific conspiracy theories' popularity? to answer these three questions, we curate and analyze the behavior of a set of 755 conspiracy theory websites, subdivided into five different categories: qanon (227 websites), covid (134), ufo/aliens (193), 9/11 (104), flat-earth (97). using our own web scrapes and pages scraped by common crawl, 1 we then document the state and the changing behaviors of the conspiracy theory ecosystem and their relationship to a separate set of 530 known misinformation outlets, 565 authentic news websites, and 528 non-news websites.rq1: the state of the conspiracy theory ecosystem and its relationship with news media: to understand how different conspiracy theories interact amongst themselves, we first examine the shared hyperlink connections that our conspiracy theory categories have with each other. across all the different conspiracy theory domains considered, we find that qanon-focused websites have the highest percentage of shared connections with other conspiracy theory website groups, particularly covid (35.6%); this largely accords with news reporting that qanon has become a "big tent conspiracy theory" that incorporates the beliefs of other conspiracy theories . utilizing the common crawl harmonic and pagerank centrality measures that measure websites' centrality across all of the crawled internet, we then find many of the conspiracy theory-focused websites in our dataset have relatively high network centrality, suggesting that many of them are not peripheral on the internet but actually near the internet's core/are mainstream. indeed examining, the hyperlink connections between news media and these conspiracy theories, we find that many of them rely heavily on authentic news as well as misinformation outlets (compared to non-news websites) for their information, with many popular misinformation outlets also hyperlinking back to many of these conspiracy theory websites.rq2: news media's changing relationship with conspiracy theories having observed the relatively strong relationship between our set of conspiracy theory websites and different news outlets, we next seek to understand whether these connections have changed over time. we find, starting in 2018, a significant increase in hyperlinks to our set of conspiracy theory websites with the advent of qanon and then again in 2019-2020 with the beginning of the covid-19 pandemic. concurrent with this large increase in hyperlinks to our set of websites we further observe similar increases in the popularity of our set of conspiracy theory websites, and a more general increase from misinformation outlets towards conspiracy-oriented material. indeed, examining an additional set of 116k fringe websites, we find that between 2009 and 2021, the percentage of all misinformation websites' external hyperlinks to these conspiracy-oriented websites went from 9.0% to 13.2%, a 46.6% relative increase.rq3: misinformation outlets' role in promoting conspiracy theories: we finally apply partial granger causality analysis to ascertain whether the behavior of misinformation websites is a factor in the popularity (i.e., amazon alexa rank ) of conspiracy theories online. partial granger causality is a means of measuring if a given time series is useful for forecasting another while taking into account unmeasured endogenous and exogenous factors . our results suggest that in several cases as misinformation sites hyperlinked to conspiracy theory sites, this, in turn, correlated with the increased popularity of conspiracy theory websites. conversely, we find, for qanon in particular, that as more mainstream outlets wrote about this conspiracy theory, the popularity of the corresponding qanon-focused websites decreased in popularity.conspiracy theories have come to play an increasingly large role in world events, as was apparent in the partly qanon-inspired january 6, 2021, attack on the u.s. capitol . similarly, as covid-19 spread throughout the world, conspiracy theories became a major impediment to curbing the pandemic . our analysis shows the role news outlets have had in promoting and spreading conspiracy theories throughout the past decade. as conspiracy theories continue to play a larger role on the internet, we hope that our results help shed light on their spread and show the utility of online structure-based network analysis in understanding their ecosystems.(1) how do particular conspiracy theories hyperlink/interact with one another, the news media, and the wider internet? (2) how has news media's (both authentic news platforms and misinformation outlets) relationship with particular conspiracy theories changed over time? have different outlets' interactions with these conspiracies increased or decreased over time? (3) has the news media's (both authentic news platforms and misinformation outlets) interaction with particular conspiracy theories correlated with the rise or decline of specific conspiracy theories' popularity? to answer these three questions, we curate and analyze the behavior of a set of 755 conspiracy theory websites, subdivided into five different categories: qanon (227 websites), covid (134), ufo/aliens (193), 9/11(104), flat-earth(97). indeed examining, the hyperlink connections between news media and these conspiracy theories, we find that many of them rely heavily on authentic news as well as misinformation outlets (compared to non-news websites) for their information, with many popular misinformation outlets also hyperlinking back to many of these conspiracy theory websites. for this study, in order to examine how different and more general websites utilize specific conspiracy theories, we thus consider a website as misinformation if it generally regularly publishes false information about current events (without it having a focus on any given conspiracy theory) while considering a website as conspiracy theory website if it focuses on our five of our listed conspiracy theories. we now first investigate the percentage of news websites' hyperlinked domains that are shared by conspiracy theory websites before analyzing the percentage of conspiracy theory hyperlinked domains that are shared by news websites. we note that while our set of authentic news domains, as seen in (table4) rarely hyperlink to conspiracy theory websites (especially for the covid conspiracy theory) compared to misinformation domains, they do possess a somewhat stronger relationship with both 9/11 and ufo conspiracy theories. performing a series of mann-whitney u-tests comparing the relative levels of connections between every conspiracy theory website and mainstream and authentic news websites versus non-news websites, we find (after utilizing the bonferonni correction for multiple statistical) that every conspiracy theory group does indeed have elevated levels of shared domain connections with authentic news and misinformation websites.8 million different domains that were hyperlinked in our scrapes, 116k fig. hyperlinks from misinformation websites to individual conspiracy theory website groups-(note: hyperlinks were only collected to aug 2021) the vast majority of links from news websites to conspiracy theory websites are from misinformation sites.next looking at the interaction over time between news websites and conspiracy theory websites we see that misinformation websites have increasingly hyperlinked to conspiracy theory and conspiracy-oriented websites, concurrent with the increase in popularity of conspiracy theory websites (figures5, 6,and7). similarly, (given the relative lack of hyperlinks to our conspiracy theory websites from authentic news domains), we determine the relationship between the popularity of conspiracy theory websites and authentic news websites' frequency of mentioning different conspiracies. we further note that to perform tests for granger-causality, after fitting the autoregressive model, there must not be serial correlations among the construct, we determine whether the number of hyperlinks to each specific category of conspiracy theory websites from misinformation websites has a granger-causal influence on the popularity of this same category of conspiracy theory websites. conversely, we also determine whether the number of stories about a given conspiracy theory from authentic news websites has a partial granger-causal influence on the popularity of this same conspiracy theory while also taking into account the time-dependent influence of hyperlinks from misinformation websites and potential environmental exogenous input and endogenous latent variables.misinformation websites and propping up conspiracy theories: in figures1011121314, we see that for the 9/11 and covid conspiracy theories, the dcg of the popularity of each conspiracy theory group had a positive partial granger causality with the number of misinformation hyperlinks to these same websites.despite misinformation, authentic news, and non-news websites having substantially fewer connections to the same websites that conspiracy theory websites connect to, we do see that conspiracy theory websites have extensively utilized the same sources and domains as authentic news and misinformation outlets. using the notion of partial granger causality, we detailed the correlative relationships that exist between the number of hyperlinks on misinformation websites and the popularity of these conspiracy theory websites, also describing the more complex relationship that authentic news websites have with conspiracy theory websites. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/844.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/844.txt new file mode 100644 index 0000000000000000000000000000000000000000..3f8ecb61b4bc82427343c147cda6b4b09512fa9a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/844.txt @@ -0,0 +1 @@ +on february 24, 2022, the russian federation invaded ukraine. in the buildup to and following the initial invasion, russian state media conducted massive information campaigns justifying the russian state's invasion as a "special military operation" to "liberate" the people of ukraine (wong 2022). in response, the eu and the uk among others banned or otherwise censored russian news media. to circumvent this censorship, russian outlets like rt and sputnik news began to redirect users to and promote their content on the messaging app telegram; ultimately causing telegram to become one of the main platforms for russian propaganda (bergengruen 2022). when telegram eventually succumbed to pressure to de-platform prominent russian outlets (e.g., rtnews), several russian state media outlets created new telegram channels (e.g., swentr ) 1 and found other ways to circumvent the bans. however, while there has been extensive reporting on the russian state media's usage of telegram (bergengruen 2022), there has been no systematic study of how information flows between russian media and telegram.in this paper, we document the increased usage of telegram by russian outlets and present the first programmatic and multilingual study of the spread of news content amongst and between russian news sites and telegram.to do this, we crawl and gather content published between january 1 and september 25, 2022, from 16 russia-based news sites (215k articles) and the 732 telegram channels hyperlinked by these russian news sites (2.48m telegram messages). leveraging a multilingual version of the large foundational model mpnet (song et al. 2020), fine-tuned on semantic search, we perform semantic similarity analyses of the content spread between and amongst these news platforms and our corresponding set of english-language, russian-language, and ukrainian-language telegram channels. further, by improving upon an online and parallelizable non-parametric version of the k-means algorithm, we cluster our dataset into fine-grained topic clusters to understand the topics discussed.we find that much of the content shared between telegram and russian websites concerns the war in ukraine and western sanctions on russia. by performing this same clustering on semantic content specific to only russian news websites or only telegram channels, we find an emphasis on the day-to-day machinations (e.g., bombings of particular bridges) of the russo-ukrainian war on telegram that contrasts with a focus on us politics specific on russian news outlets. next, we track the spread of topics amongst and between telegram and russian news media. we find that 33.2% of distinct topics discussed on our set of telegram channels, making up 24.3% of all telegram messages in our dataset, were first published in russian news articles. in contrast, 13.9% of topics on russian news websites, comprising 18.4% of all the text (all messages from telegram and paragraphs from russian state media) on our set of russian websites, were posted first on telegram. telegramoriginated messages comprise a particularly large amount of content on news websites like waronfakes.com (28.2% of text), ukraina.ru (27.9%), and ura.news (25.6%) that all maintain large telegram presences. applying time-series analysis with the hawkes processes on our topic clusters, we then estimate the percentage of content on each platform that was influenced by activity on other platforms. while some websites like ura.news had relatively low amounts of content (2.3%) flowing from telegram, others such as ukraina.ru had much larger (26.7%) percentages of their content possibly influenced by activity on telegram. finally, we analyze how quickly topics spread amongst our set of news websites and onto telegram channels, finding that websites like ura.news, ukraina.ru, and news-front.info, and the telegram channel @genshab's topics flow most quickly to other platforms.our work presents one of the first in-depth analyses of semantic content and semantic similarity among and between russian state media websites and telegram channels. we show that by leveraging multilingual models, we can programmatically track the spread of topics and ideas across platforms. as misinformation and propaganda increasingly spread on messaging platforms like telegram and what-sapp, we hope that our work can serve as the basis for future studies about the spread of misinformation online.in this paper, we document the increased usage of telegram by russian outlets and present the first programmatic and multilingual study of the spread of news content amongst and between russian news sites and telegram.4% of all the text (all messages from telegram and paragraphs from russian state media) on our set of russian websites, were posted first on telegram. telegram channels.in this section, we analyze how russian state media has changed its utilization of telegram since the beginning of the russo-ukrainian war, as well as how the content promoted on the largely uncensored telegram platform has differed from the content on news media websites. 4 we note that while we utilize this methodology to identify the messages that bear the closest resemblance to specific russian websites, and thus to content sponsored by russian=-state media, among our set of 732 telegram channels, this methodology can be easily extended to identify suspect telegram channels that repeat or mention propaganda on a much wider scale, which we leave to future work.having explored the shared content amongst russian news and telegram channels, we now analyze the content specific to our set of russian news websites.having analyzed the static behavior of the topics shared among our telegram and russian news ecosystems, in this section, we examine the speed at which topics spread amongst and between russian websites and telegram channels. to estimate the spread of topics amongst and between different russian websites and telegram channels, we first cluster all 4,094,510 texts (telegram messages and paragraphs from articles) as specified in section 4.as seen in table6, even after removing official russian telegram channels, most of our websites had a noticeable portion of their content that was first posted on telegram.ru, and rbc.ru, indeed publish content after it first appears (at least a day later) on telegram, telegram channels themselves also utilize topics from russian news sources for nearly a third of their topics.ru first appears on telegram., content whose topic was first posted on a particular website or a telegram channel) from each of our websites and telegram channels travel to other websites and telegram channels.com/20220919/putin-zelensky-meeting-far-examining the spread of each website's content to our set of 732 telegram channels in figure6b, we see that on average, stories did not typically spread to more than approximately 20 telegram channels within the first 100 days." 8 finally, again examining the individual topics from telegram channels that spread the furthest (the most amount of telegram channels), we see a message from @uranews about the donbas regions of ukraine ostensibly wanting to declare independence and join russia that spread to 557 other telegram channels. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/845.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/845.txt new file mode 100644 index 0000000000000000000000000000000000000000..d57562b4b61e4a37b173787e231c6f2e50294bfc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/845.txt @@ -0,0 +1 @@ +the outbreak of the covid-19 pandemic at the beginning of 2020 and the recent significant epidemic have had a severe impact on all countries worldwide (sudheer reddy et al., 2020;mahmoud et al., 2020;nguyen, 2022). as of mid-october 2021, there were around 240 million infections worldwide, with 4.86 million fatalities (dong et al., 2020). faced with this dilemma, governments throughout the world have implemented several urgent steps to minimize the disease's effect and transmission, including travel restrictions, social isolation, mask use, and the closure of public areas. in tandem with initiatives to raise public knowledge of epidemic prevention through media such as television, newspapers, radio, social networks, and text messaging (binsar and mauritsius, 2020), etc., governments have also implemented many supportive measures to trace contacts and warn people infected with covid-19 (grekousis and liu, 2021). for example, the covid-19 self-reported symptom and contact tracking apps are the two types of apps widely being used. the former allows people to report their health conditions and the latter enables them to check and trace their contacts. both provide additional information such as the symptoms of covid-19, updates on covid-19, the rate at which the virus spreads in various locations, high risky areas in the country, etc. this kind of app is anticipated to assist state authorities in swiftly tracking and managing infections in the community and users will be able to collect timely information to help avoid epidemics (bansal et al., 2010;wang et al., 2020). to interact with devices, the contact tracking app essentially uses bluetooth low-energy technology. in this case, your phone will register any other phones it comes into contact with, as long as both your device and the others have a fully enabled covid-19 contact tracking app. they use random numerical id numbers that change regularly and are destroyed after 14 days (covid-19's incubation period) (fetzer and graeber, 2021).even though the covid-19 app is supposed to have a significant positive impact and that many people would utilize it, the actual usage was not as expected. for example, hargittai et al. (2020) reported that only 67% of respondents were willing to install a tracking app for several reasons, the majority of which are privacy concerns. similarly, garrett et al. (2021) estimated that the acceptability of tracking technologies ranges from 62-to 70% depending on the scenario. many more other examples can be found in the literature (o'connell et al., 2021;grekousis and liu, 2021). this issue raises a research question: what factors influence the use of a covid-19 tracking application?answering the above research question is critical to persuading individuals to participate in and support epidemic prevention in both physical and virtual spaces. the findings will help managers, policy researchers and software developers make the required modifications to promote the benefits and power of the covid-19 tracking app. as colizza et al. (2021) pointed out "time to evaluate covid-19 contact-tracing apps", literature work has made major contributions to this topic from multiple perspectives (e.g., qualitative and quantitative) (grekousis and liu, 2021;hargittai et al., 2020). however, no study has attempted to explain the causal connection between factors. as a result, this research has a distinct and important position in the current landscape, especially given that the covid-19 epidemic shows no signs of abating., 2022). an individual's behavioral intention can be described as the perception of whether (s) he will engage in a certain activity(fishbein and ajzen, 1977)., 2003). in this study, effort expectation depicts users' perceptions about the ease of use of the covid-19 tracker.(1) i believe that the information provided by the covid-19 tracker is reliable, (2) i trust the use of the covid-19 tracker, and (3) covid-19 tracker provides the functionality that users need. with the aforementioned studies, the following hypotheses were proposed: hypothesis 5 (h5): trust will have a positive effect on behavioral intention hypothesis 6 (h6): trust will have a positive effect on performance expectancy hypothesis 7 (h7): trust will have a positive effect on effort expectancy hypothesis 8 (h8): trust will harm privacy risk privacy risk: privacy risk is considered a user's anxiety about the disclosure of personal information(li, 2011). two questions were used to evaluate perceived risk, namely: (1) i think using the covid-19 tracker puts my privacy at risk, and (2) my personal data may be leaked when using the covid-19 tracker. as such, the following hypothesis was proposed: hypothesis 9 (h9): privacy risks harmed behavioral intention these assumptions were transformed into the study model depicted in fig.0361, 95% ci = 0. first and foremost, the authors extended the unified theory of acceptance and use of technology (utaut) theoretical model by highlighting the elements impacting behavioral intention to use the covid-19 contact tracking app. to the best of our knowledge, this is one of the unique research to investigate the causal relationships between a collection of factors and behavioral intention to use a covid tracker app, taking into consideration trust and privacy. as a result, the findings of this study add to the covid-related area and user behavior literature by emphasizing the consequences of covid-19 contact tracing app use in the real world. the extended unified theory of acceptance and use of technology model was adapted to investigate user behavior toward experiencing the covid-19 tracker application in terms of performance expectancy, effort expectancy, social influence, facilitating conditions, trust, and perceived risk. in turn, trust had a positive influence on effort expectancy and trust had a statistically significant and negative effect on privacy risk. however, hypotheses h2 (effort expectancy  behavioral intention), h3 (social influence  behavioral intention), and h4 (facilitating conditions  behavioral intention) were not supported in the experiment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/846.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/846.txt new file mode 100644 index 0000000000000000000000000000000000000000..60da580148ab715404cce98ea322b430ed1fb060 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/846.txt @@ -0,0 +1 @@ +the fair principles describe 15 aspirational dimensions of research data management (wilkinson 2016). they provide a starting point for mapping out data stewardship practices needed for any given research project. however, there is no way to ensure all research objects adhere to fair, and fair is not all encompassing. for example, fair is silent on data quality and reproducibility. fair also does not comment on data sovereignty such as is covered in the care principles of indigenous data governance (carroll 2022). consensus is lacking on whose responsibility it is to ensure fairness of research objects. stakeholders range from individual researchers to institutions to funders and publishers. in each case, the stakeholder group is looking for clear and practical advice and is less interested in philosophizing about the need for research data management practices or complex and detailed arguments over which approach is better. researchers must address these topics only as much as funders require it. the us national institutes of health (nih) requires a data management plan and, effective in 2023, increases the requirements to cover data sharing (national institutes of health). the national science foundation (nsf) requires a data management plan to be included with proposal materials. funders around the globe require discussion of research data and fair in varying degrees. countries and regions that lead the trends include the european union's (eu) research funding calls, and australia. were it not for these funder requirements, researchers would only take these steps on a voluntary basis. however, this requirement provides a key moment during proposal preparation for outreach and awareness of the fair principles and how they relate to newer technologies. the fair+ implementation survey tool (fairist) creates information that can be included in a proposal's data management plan and/or the project description. its contribution and value are as much in what it produces as well as the conversations and decisions it suggests by completing it. even where support and services are not available to researchers from their institution, the mention of them can initiate important discussion. this work is organized as follows. we introduce definitions and terminology in section 2. related work is also presented in this section. in section 3, we present the motivations and stakeholders of fairist. section 4 provides detail on fairist's design and functionality. we close this work with a discussion and perspectives on future work. fair also does not comment on data sovereignty such as is covered in the care principles of indigenous data governance(carroll 2022). the us national institutes of health (nih) requires a data management plan and, effective in 2023, increases the requirements to cover data sharing (national institutes of health). the fair+ implementation survey tool (fairist) creates information that can be included in a proposal's data management plan and/or the project description.fair data is data that meets principles in four categories: findability, accessibility, interoperability and reusability. according to libguides for fair data, the first thing to be in place to make data reusable is the ability to find them.even though the original fair principles publication called out the need to make all types of research artifacts fair, there has been an overemphasis on data, e. although often mistaken as the "r" in fair, reproducibility is aided by implementation of the fair principles, especially those that pertain to openness of software, tools and libraries, accessibility of data, etc. other important advancements that relate to fairist include tools for interviewing researchers about fair implementation, data management practices, and upcoming tools for publishing and reusing data management plans. it uses fair principles in how it collects data, utilizing apis wherever possible so that researchers, institutions, and funders are not manually entered but connected to a unique identifier.this new initiative from the go fair foundation and ios press, seeks to extend new tools for data stewards and researchers (fair connect). this capitalized on researchers' interest to learn more about fair implementation and research data management during the proposal process. the added benefits of planning data management at the outset of a research project are many: it makes it easier to audit, to check compliance with requirements, and to document the project for helping both researchers and funding agencies." adherence to the fair principles that relate to clearly stating data usage licenses and accessibility of data would prepare researchers to answer "yes" to this question. this tool could also be used in synchronous and asynchronous trainings, such as the codata-rda schools of research data science curriculum (codata-rda-datascienceschools/materials), a grantsmanship course (nsf hsi national stem resource hub), or a data management plan training course hosted by a university library. for example, for projects that will produce a new domain repository and are from the earth sciences, fairist could offer the option to include the repository in the magnetics information consortium (magic) or council of data facilities (cdf) consortium (council of data facilities). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/847.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/847.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4340638efa44b24629c1aea2294e50eaaaa8d08 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/847.txt @@ -0,0 +1 @@ +to address the carbon issue in the it sector, many green techniques have emerged. as presented in figure 2, these techniques will help reduce the carbon footprint in the three layers of the metaverse as well. at the infrastructure layer, the techniques can be categorized into the improvement of the it system, cooling system, power system, as well as the entire infrastructure concerning all of them. at the interaction layer, given the whole process of interaction with others, the techniques include the improvement of end-devices, ar/vr/mrbased applications, and networking. at the economy layer, the techniques mainly refer to the improvement of the blockchain technology. nevertheless, these existing techniques also show limitations when dealing with metaverse workloads. in the following sections, we will dive into these green techniques for the three metaverse layers by analyzing their applicability and limitations. it is worth noting that our estimation to the energy consumption and carbon emissions of the metaverse in section ii-b would not influence the discussion on green techniques in the following sections. even if there exists a relatively high estimation error, in view of the growing trend of the metaverse, the carbon footprint of the metaverse will still be large enough to emphasize the importance of green efforts. • how much energy will be consumed and how many carbon emissions will be produced by the metaverse? ( §ii) • what are the major components of the metaverse and what are their energy consumption and carbon footprint shares? ( §ii) • which green techniques can help drive decarbonization of these components, and how is their applicability when dealing with metaverse workloads? ( §iii, §iv, and §v) • what are the limitations of these existing techniques?. to reduce the estimation error as much as possible, we speculate the energy consumption and carbon emissions of the metaverse based on the energy figures provided by the following reputable sources: (1) the global it market size and metaverse market size: a report from the business research companyand a report from emergen research, respectively, (2) the energy consumption of datacenters, enddevices, and networking: articles on information and communications technology (ict) energy from huawei technologies,, and (3) the energy consumption of the blockchain: a research from the university of cambridge for calculating the bitcoin energyand an article from the technical university of munich for speculating the overall energy of cryptocurrencies.to quantify the carbon emissions, we should keep in mind that they are not only determined by the energy consumption, but also positively correlated with the carbon intensity of electricity generation which gets decreased due in large part to an increasing share of clean energy. the power usage effectiveness (pue) is a commonly used indicator for measuring the energy efficiency of a datacenter as a whole, and is defined as the ratio of the total amount of energy used by the whole datacenter infrastructure to the energy delivered to the it equipment.besides the energy consumption of it systems, attention should be paid on the energy efficiency of cooling systems and power systems that are the most power-consuming parts among non-it systems.propose a deep reinforcement learning based solution to reduce the cooling energy through workload scheduling and cooling adjustment, which achieves up to 15% of the energy savings. energy efficiency improvement of the entire infrastructure 1) clean energy: the adoption of clean energy can greatly reduce the carbon footprint from the aspect of energy supply because of its much lower carbon intensity than conventional fossil energy sources, as listed in tableiv., renewable energy, nuclear energy, and fossil energy with carbon capture and storage technologies) would need to contribute 70% of the energy demand in 2050. the continuous promotion of clean energy plays a key role where google matches the global energy consumption with 100% renewable energy since 2017. to address these challenges, there are three ways to achieve the service reliability while utilizing the clean energy: predicting the clean power generation, leveraging the energy storage, and utilizing the temporal and/or spatial correlation of the clean energy.design short-term prediction algorithms for both solar energy and wind energy, and propose a workload scheduling approach to improve the proportion of clean energy usage. they propose a market mechanism to motivate datacenter operators to sell waste heat to district heating systems, which not only improves the energy efficiency of datacenters but also reduces the usage of fossil energy in heat generation. for example, the electricity prices in many european countries are even set negative sometimes in order to maintain a must-run power capacity, which indicates that higher energy consumption of datacenters or a lower energy efficiency may be desired in this case. although the energy efficiency of 5g is higher than previous 4g/lte, the larger antenna numbers, higher base station density, and higher bandwidth, as well as the high data transfer demand by the metaverse will make its total energy consumption increase sharply. since these methods improve energy efficiency usually at the expense of performance, metaverse developers should carefully select the components and its frequency that achieves the best energy efficiency and performance tradeoffs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/848.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/848.txt new file mode 100644 index 0000000000000000000000000000000000000000..056ee20474d1747cf9e6dcd51b601d8b0b1301fc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/848.txt @@ -0,0 +1 @@ +covid-19 has triggered a chain of technological developments in healthcare systems that lead to the creation of 'society 5.0' , where artificial intelligence (ai) and industry 4.0 present the basis for digital personalised healthcare. digitalising the healthcare system has proven beneficial for managing covid-19 but has also increased the cyber risk surface, while the benefits and improvements for vaccine production and supply remain to be seen. this article conceptualises the construction of predictive algorithms that can improve the cyber risk posture of health care systems and the ability to rapidly adapt to hugeto prevent vaccine production and supply chain bottlenecks, this section conducts a conceptual analysis of six potential bottlenecks and suggests six solutions that encompass a design process for constructing ai algorithms based on new and emerging data and technologies during a disease x event. in the second stage, the ai system should enable the development of quantitative risk, impact and value assessment with real-time data, allowing for dynamic optimisation and managing of a disease x event.the integration of algorithms in cyber risk impact assessment of failures in healthcare systems -in the same way that was used to forecast covid-19 growth rates, would enable a progression from manual to automated assessment. this requires classifying probabilistic data into primary -disease x events and the health system response/failure to the events, and the secondary -the failure of other health systems as the reactions to disease x event (e. at present, there are no methods, frameworks, models, or formulas for measuring the secondary risk from cyber events, not to mention extraordinary events that could trigger catastrophic loss of life -such as disease x. the integration of algorithms in risk assessing and optimising the vaccine production and supply chain can apply stratified sampling and random sampling and integrate the fair institute method 1 for the data analysis and the comparative aspects of the collected data.the expected difficulties in applying the new solutions include the lack of data for testing the algorithms on a realtime vaccine supply chain -healthcare providers might be reluctant to allow experimental technology in their operational supply chains. without appropriate risk assessment of the increasingly more digitalised healthcare systems, the potential risks could exceed the benefits, especially because of the expected difficulties in integrating legacy healthcare systems with complex ai networks. in addition, expected difficulties include the modification of the healthcare systems' physical parameters as a result of disease x, for example, during covid-19 many new hospitals were built at speed and the cybersecurity of these field hospitals was not a priority, given the serious and urgent risk of covid-19 on the loss of life. alternatives to mitigate this risk include (1) seeking existing real-world testbeds designed for similar purposes -by healthcare providers, governments, private organisations; research institutions; (2) building a smallscale low-cost supply chain testbed for this project -with a limited number of connected devices -mostly sourced from existing testbeds built for different purposes.2conceptual framework integrating the new solutions for production and supply chain response during a disease x event presents a state-of-the-art solution based on edge analytics, for detecting anomalies and predicting (i. alternatives for future researchers to mitigate this risk include using training data from covid-19 and editing the parameters to reflex the characteristics of disease x. the new ai solutions are based on experimental developments in research on ai algorithms for risk assessment from adversaries that attack the healthcare system during disease x. alternatives to mitigate this risk include testing the algorithms with existing data from covid-19 and verifying their effectiveness as solutions for the next pandemic and disease x. this will mean that the algorithms developed during covid-19, would be tested on training data limited by the parameters of (by then) past pandemics, and will need readjusting of the training data to include an element of randomness -which is expected from disease x. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/849.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/849.txt new file mode 100644 index 0000000000000000000000000000000000000000..6cab91c542c64694d384f31b36cb126e670bbcb1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/849.txt @@ -0,0 +1 @@ +applications of artificial intelligence to artistic processes (creative-ai1 ) are becoming more common, and media attention towards these applications is large. at the focus of many media contributions is the question when a human artist-genius will be replaced by a similarly genius and autonomous ai. but a quick look at various widely discussed ai art projects clarifies that in most cases the ai acts far from autonomously, and that the symbolic realm of the artistic creation is densely connected to a material dimension. the immersive media installation titled "archive dreaming" by refik anadolu creates a "canvas with light and data applied as materials", and facilitates both user-driven and automatic exploration "of unexpected correlations among documents". the project was realised with the support of 20 collaborators at three institutions, and the ai comprises a model that has been trained on 1.7 million documents using the computational power provided to the artist by google's artists and machine intelligence program (anadol 2017). the completion of the composition of beethoven's 10th symphony was approached by a group of music historians, musicologists, composers and computer scientists over a period of several years involving extended data preparation and ai model design (elgammal 2021). the novel "1 the road" was written by an ai, but the artist ross goodwin carefully orchestrated the sensor inputs that would feed the ai model during a road trip. the creative process involved not only goodwin, his car, and an ai, but also a film crew accompanying him and documenting the road trip from new york to new orleans (hornigold 2018). in comparison to the ai beethoven project, however, the artist took the deliberate choice to present the ai output in its raw form, with the motivation to illustrate limitations and potential of such an autonomous machine creation.besides the formation of an ai model in the context of a specific artwork, many companies have shaped tools for media production that use -or at least claim the use -ai as a means to support the creative processes of the users of the tools. examples are the use of ai for mastering the sound of a music production 2 , audio restoration3 , or the scene analysis and filtering of images based on ai by photoshop (clarke 2020). such systems have been called creative support tools (cst), and these tools have been well-documented within hci research for over a decade (frich et al. 2019;gabriel et al. 2016). based on the literature review of 143 papers, frich et al. (2019) define cst as a tool that "runs on one or more digital systems, encompasses one or more creativity-focused features, and is employed to positively influence users of varying expertise in one or more distinct phases of the creative process". following this definition, a creative-ai tool is simply a cst that employs artificial intelligence. demystifying this latter buzzword, artificial intelligence in the context of creative-ai (and in most other contexts) is nothing but data-driven analysis and/or synthesis of media data, and the intelligence that emerges from training processes can be fairly considered as narrow. that is, all tools for artists and designers so far comprise an intelligence that can at least partially conduct a creative process in a fairly restricted context. the beethoven ai can continue phrases in the style of one specific composer, and the archive dreamer can hallucinate novel data in the context of one specific archive. going beyond these borders demands adapting and re-training models, which demands on the material side new collections of data and expenditure of energy for the training of models.the goal of this chapter is to develop a perspective on the environmental impact of creative processes that make use of ai, when we consider them as part of an economic system that transforms artistic creation to a commodity. i will discuss specific creative-ai cases and their environmental impact along with the role of large corporations involved in creative-ai development. already in the 1970s, the economic and social theorist jacques attali (1985) predicted a situation that strongly relates to current business plans by the largest music streaming provider spotify. as we will see, prospects of using creative-ai as a collaborative tool that supports creative processes promise an extension of the horizon of artistic possibilities, but such optimistic perspectives neglect the role of large corporations as gatekeepers to systems and infrastructures. i believe that it is timely to combine political, environmental, and sociocultural perspectives on creative-ai to better understand how the power exercised by capital and the environmental impact of creative-ai may motivate researchers and artists to adopt a more critical perspective. the chapter -written by an ethnomusicologist/computer scientist -is characterised by a certain focus on music, but its general implications can be transferred to other forms of art.besides the formation of an ai model in the context of a specific artwork, many companies have shaped tools for media production that use -or at least claim the use -ai as a means to support the creative processes of the users of the tools. i believe that it is timely to combine political, environmental, and sociocultural perspectives on creative-ai to better understand how the power exercised by capital and the environmental impact of creative-ai may motivate researchers and artists to adopt a more critical perspective. when it comes to potential environmental consequences of creative-ai use, it is unclear how large the energy consumption and related carbon footprints of specific artistic projects may be(jääskeläinen et al. 2022). on this side, information from existing ai-art projects allows us to obtain an overview of most common architectures and training data sizes.it is essential for an understanding of the political ecology of creative-ai to indicate the orders of energy consumption and/or carbon emissions related to training and developing certain models used in artistic contexts. as openai does not provide information of how much energy the whole r&d process required, let us assume in analogy tostrubell et al (2019)that the energy consumption of the overall r&d may have been three orders larger than a training the network a single time, which would send us around the planet 18,000 times by car, or supply a small swedish city with energy for one year.these two examples indicate that artistic projects that involve the use of creative-ai may involve large energy consumption for training the needed networks. as there is no reason to assume that this process is simpler, the development of an ai for artistic purposes can involve many iterations of training and result in energy consumption that is orders higher than training a network a single time.all this evidence implies that the carbon footprint resulting from artistic projects involving creative-ai is likely to be large compared to artistic projects with similar outcomes but not involving creative-ai.an equitable engagement of all stakeholders of ai applications into a discussion of the environmental impact of ai has been argued to be a cornerstone of a third wave of ai ethics (van wynsberghe, 2021), with the possible conclusion to refrain from using ai in certain application areas. with most artists who employ creative-ai having acquired the programming skills that are needed to run inference and training of models in environments such as python, it seems a realistic suggestion to employ tools to estimate energy consumption of the development and to report such estimates when publishing the artwork.as we observed injääskeläinen et al (2022b), in the design of interactions and experiences with creative-ai all current interfaces to ai-models have distanced the actual consequences of the actions very far away from the users of these technologies: the amount of energy required to perform some inference and/or training remains concealed, just as the kind of energy that was employed by some remote servers involved in the computations. 2022), and user evaluation tools to identify the best version of an automatic composition system(pachet & roy 2022b).the speculation that i presented in this chapter illustrates how the combination of the environmental impact of creative-ai with the increasing commodification of artistic creation by means of creative-ai is very likely to have severe consequences for cultural practices, the livelihood of artists, and the ecosystem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/85.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/85.txt new file mode 100644 index 0000000000000000000000000000000000000000..ea68cc7b83f6e077bd022f7aad74948585d297f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/85.txt @@ -0,0 +1 @@ +reinforcement learning (rl) enables an artificial agent to learn problem-solving skills directly through interactions. however, rl is notorious for its sample inefficiency. successful stories of rl so far are mostly limited to applications where a fast and accurate simulator of the world is available for collecting large amounts of samples (like in games). real-world rl, such as robot learning, remains a challenging open question.one key obstacle to scaling up data collection in real-world rl problems is the need for resetting the agent. the ability to reset the agent to proper initial states plays an important role in typical rl algorithms, as it affects which region the 1 microsoft research 2 rutgers university. correspondence to: hoai-an nguyen , ching-an cheng . agent can explore and whether the agent can recover from its past mistakes (kakade & langford, 2002). in most settings, completely avoiding resets without prior knowledge of the reset states or environment is impossible. in the absence of a reset mechanism, agents may get stuck in absorbing states, such as those where it has damaged itself or irreparably altered the learning environment. for instance, a robot learning to walk would inevitably fall before perfecting the skill, and timely intervention is needed to prevent damaging the hardware and to return the robot to a walkable configuration. another example is a robot manipulator learning to stack three blocks on top of each other. unrecoverable states would include the robot knocking a block off the table, or the robot smashing itself forcefully into the table. reset would then reconfigure the scene to a meaningful initial state that is good for the robot to learn from., 2018;sharma et al. we can think of reset states in reset-free rl as unsafe states in safe rl., 2022;ding et al.we weaken typical assumptions of current approaches in empirical reset-free rl, infinite-horizon rl, and safe rl by dropping any requirements on knowledge of undesirable states or for demonstrations, and by allowing arbitrary initial state sequences that admit reset-free policies. considering arbitrary initial state sequences where initial states potentially are correlated with past behaviors is not only necessary to the reset-free rl setting, but also allows for extensions to both lifelong and multi-task learning. finally, by the analogy between safe states and reset states, on the technical side, our framework and algorithm can also be viewed as the first provable safe rl algorithm that allows for arbitrary initial state sequences without strong assumptions. under the linear mdp assumption, our algorithm achieves õ( √ d 3 h 4 k) regret and violation (equivalently, the number of resets in resetfree rl), which is asymptotically equivalent toghosh et al. we create a framework to design provably correct reset-free rl algorithms via a reduction first to a sequence of cmdps with an adaptive initial state sequence, and then to a two-player game.we consider episodic reset-free rl: in each episode, the agent aims to optimize for a fixed-horizon return starting from the last state of the previous episode or some state that the agent was reset to if reset occurred.problem setup and notation formally, we can define episodic reset-free rl as a markov decision process (mdp), (s, a, p, r, h), where s is the state space, a is the action space, p = {p h } h h=1 is the transition dynamics, r = {r h } h h=1 is the reward function, and h is the task horizon., the initial state of the first episode) admits a reset-free policy is insufficient to make reset-free rl feasible; since the transition dynamics are unknown to the agent, under this assumption alone, for any algorithm, there is a problem3such that the number of resets must be linear. in addition, if reset happens, in practice, the agent is usually set to a state where it can continue to operate in without reset; if the agent is at a state where no such reset-free policy exists, reset should happen. finally, we show achieving sublinear regret in this two-player game implies sublinear regret and resets in the original resetfree rl problem in (2), and we discuss how to leverage this framework to systematically design reset-free rl algorithms. each problem instance in this sequence corresponds to an episode of the reset-free rl problem, and its constraint describes the probability of the agent entering a state that requires reset. in particular, we design a resetfree rl algorithm for linear mdps using our new reduction techniques, taking the first step towards designing provable reset-free rl algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/850.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/850.txt new file mode 100644 index 0000000000000000000000000000000000000000..be10b3e3f82fc7c97302035ca6a7816e1cfda0ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/850.txt @@ -0,0 +1 @@ +when used inappropriately, the harmful internet use (hiu) becomes a major problem, especially for the younger generation. according to , problematic internet use has been linked to behavioral addiction, major depressive disorder, adhd, deficit/hyperactivity disorder, sleeping disorders, cognitive deficits, and suicides. terms like "internet addict" have commonly been used to recognize the burgeoning destructive potential of the excessive internet use or being attracted to illicit pastimes.in this study, a rating scale is utilized as a tool for quantitative measuring of a new social phenomenon: hiu. considerable efforts are being made to improve its accuracy and reduce the uncertainty of measurements when enhanced by the peer assessment and analyzed with the help of differential evolution.the netflix september 2020 release of the docudrama "the social dilemma" (see ) has generated considerable publicity related to hiu, and is a prime illustration of the need for improved measurement of hiu.the social dilemma is a pivotal docudrama (see ) which delves into the dangers of social media in particular. by interviewing the designers of social media platforms, the film makes a compelling case that social media poses a viable threat to civilization itself. social media companies constrain us into adopting modes of thinking and behaving in ways that are profitable for corporations, rather than thinking and behaving in ways that are based on our own goals, beliefs, or values. designers of social media platforms force us to give our time away to corporations selling 'big data' to their clients. we argue that the approach presented here should be used to measure the harm suggested in the social dilemma. group assessments can be improved by: , , , and . another type of harmful internet use is presented by .analysis of question q2 shows that 39% of children see a problem related to avoiding social contacts. negatively effects of hiu on school performance is seen as a problem by only 26% of parents and 22% of children. the analysis of question q4 shows that almost 78% of children and 54% of parents believe that hiu does not impair their health, hygiene and eating pattern.analysis of question q5 shows that, consistent with the previous question, nearly 67% of children and 46% of parents do not view as a problem of the avoidance other activities as negative to the child's development.the results of the analysis of question q6 show that nearly 73% of children and 59% of parents have not even attempted to reduce their hiu. the majority of respondents (56% of children, 47% of parents) believe that hiu has no application to them or their children. however, question 8 was evaluated by parents in a way indicating that male children have more problems than female children., q3, q4, q5 or q7) increases the chance that the person has unsuccessfully tried to quit an unwanted activity. in this model, the care rating of parents or acquaintances (q3) has the strongest positive impact on the assessment obtained in question (q6). the relationship between gender and the hiu rating obtained in the question (q8) is shown by the logit binary model.for parents' (model m4), (q6) infers that there is the largest positive dependence between "the stop hiu question" (q6) and the avoidance of other activities (q5). the parents' increased concern (q3) and the fact that the respondent knows that the person has a hiu problem (q1) can be an important predictor of hiu. however, the high rating obtained in the hiu (q8) strongly implies educational problems (q7) for both genders. a slightly smaller impact on rating (q7) has a reduced hygiene and food eating pattern (q4) and poorer interpersonal relationship (q2).model m6 shows the relationship between the intensity of the hiu (q8) and the subject's gender (q9). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/851.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/851.txt new file mode 100644 index 0000000000000000000000000000000000000000..958a8bd2a129de6c5ba02c0969c580be07fb55eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/851.txt @@ -0,0 +1 @@ +p orts play a central role as part of the global transport chains, bridging the sea and land transport legs. as part of diverse transport and logistics networks, ports ought to support efficient transshipment and intermodal transfer . the importance of their role in the global transport system is increasing, as 90% of world trade passes ports on its way from origin to destination . ports are also at the center of more local economies, since being gateways to global trade they support domestic networks and can thus be considered as a catalyst for the economic development of regions and entire countries . with the steady growth of maritime transport and global sustainability efforts, ports are under increasing pressure to improve their profitability, environmental friendliness, energy performance and efficiency. . the dynamic and competitive character of the maritime transport chains drives the application of new technologies and innovation to enhance performance and increase cooperation and transparency, and attract new business . ports are economic catalysts for neighboring cities that can facilitate market integration and agglomeration of services and generate social and economic benefits. likewise, cities provide essential resources and infrastructure to ports, and ports provide goods, raw materials, and other transportation services for urban development . primarily, ports serve as transshipment hubs in transportation chains by linking different modes of transportation, acting as facilitators of international trade as transportation demand varies spatially, and facilitating temporary storage to match size differences between different modes of transport . the connection between port and hinterland is an area with room for improved efficiency in operations and sustainability. since the majority of freight transport between port and hinterland is carried out by truck, the presence of a port can lead to congestion on the (peri-)urban road network caused by hinterland traffic to and from the port area. the resulting congestion on the urban road network not only affects the quality of life among citizens in vicinity, but also the competitiveness of ports . ports are therefore closely linked to (smart) cities as well as to supply chains, as they are both nodes in transport chains and providers and users of urban infrastructure, as ports increase the global connectivity of cities while using their infrastructure for hinterland transport . apart from their functional linkage, all three are also characterized by a set of commonalities, mainly their complex interplay of various interconnected processes and actors.preparing for the future, many european ports, are focusing on safety, efficiency and sustainability and have been launching efforts to provide a complete and up-todate overview of port activities via digital twinning . twinning in this context aims to enhance ports real-time situational awareness for static, moving, human-controlled or autonomous entities and artifacts, by bringing together geographic, process, or sensor information, be it historic or real-time.taking a step back from ports, digital twins (dt) enable simulation of systems' behavior and have been referred to as a "quantum leap in discovering and understanding emergent behavior" . originally developed to support manufacturing, digital twinning has attracted a great deal of attention, especially from the perspective of systems' engineering . the main focus of several industries such as transportation, security, aerospace, manufacturing, and many others, is to achieve optimal performance, reliability, robustness, and efficiency among various of their systems with different characteristics, to accomplish a common objective . this has led to a growing interest in integrating different independent systems to enhance their overall capabilities and performance. within this context, the idea of the digital twin is to be able to design, test, manufacture, and eventually use the virtual version of complex systems . such systems relevant to ports encompass port congestion systems , information systems , and port traffic flow simulation systems . examples from the smart city domain, which is closely functionally connected to the port, include monitoring systems , intelligent transportation systems , urban fine management systems and system of systems approaches , .although the concept of the dt has largely evolved since its coining in 2002 and initial successful digital twinning implementations within the port domains exists, there is still a lack of standardization, methodologies, and tools for the development and implementation of dts . furthermore, the concept and content of dts lack a precise, uniform definition and description . this results in problems with a uniform implementation of the dts in ports. an additional obstacle is that different port actors usually keep a wide array of, practically, vertical information systems (i.e. with limited or no actual interconnection between them), due to the large number of actors in the port processes. a further obstacle is the large number of port actors with a variety of information systems with limited or no interconnection. this results in a lack of data exchange, leading to individual operators inability to plan required capacities (short and long term), making it difficult to accurately predict when a port a call will occur and hence what resources are needed when . furthermore, although all ports share common characteristics, such as providing services to freight and ships, they often differ wildly in terms of size, geographic characteristics, governance, functionality, and specialization . therefore, there is no one-size-fits-all solution for a port dt, but nevertheless a set of requirements and dt aspects relevant for most ports regardless of their specificities should be able to be identified, which will be the further discussed in this paper.to summarize, in contrast to traditional dt applications, ports are differentiated by their complex interplay of independent actors, processes, and activities that are because of present lack of connection between systems subject to uncertainty . however, these challenges are also shared with smart cities and supply chains. in addition, the port as a part of the transport chain in supply chains and as physically interconnected with city infrastructure is functionally intertwined with both of these areas. although research papers investigating the potential of twinning of port elements have contributed significantly to understanding the potential of digital twinning in ports, a holistic digital twinning approach that covers the entire functionality of the port in terms of its fundamental role as a node in the transport chain of global supply chains and as an important transport infrastructure in the smart city it is still in its infancy.based on commonalities of similar dt requirements as well as functional linkage with each other, this paper intends to discuss the requirements and structure of a port dt, building on the knowledge and developments of dts in the urban and supply chain contexts. more specifically, this paper contributes to the dt research by the following:• a thorough analysis of dt definitions across two application domains, that of smart cities and of the supply chain, yielding a discussion of core characteristics, enablers and potential usage in the port domain • a characterization of a port dt through a comprehensive analysis of its processes and characteristics, leading to the identification of three core requirements (taking into account the interrelationships and commonalities with smart city and supply chain dts) • proposing operational strategies on how a port dt can contribute to energy savings through the optimal use of port facilities and equipment and intelligent linking of port processes • discussing potential barriers delaying large-scale digital twinning of the port and possible consequences of the samethe paper is organized as follows. in section ii we outline the methodology of the paper. section iii addresses how definitions of the dt have developed, and differ depending on their application domain. building on the characteristics of the different dt definitions discussed in table 2, table 3 presents the cross-domain characteristics. in section iv, digital twinning implementations of the smart city and the supply chain and the decisive role of the port within these application domains are illustrated in table 4. dt implementations for the smart city and the supply chain are thus discussed in more detail in subsections iv-a and iv-b resulting in tables 5 and6. based on an intensive study of all operational processes in the port considering the dt characteristics discussed in section iii, and with respect to commonalities with smart city or supply chain aspects, three main objectives of the port's dt are defined and discussed in depth in chapter v. these three main objectives compromise the importance of situational awareness in subsection v-a1, data-driven decisionmaking in subsection v-a2, and the importance of providing tools for enhanced collaborative actions in subsection v-a3. the insights gained in section v will be further elaborated to gain an understanding of how a port dt can contribute to energy savings resulting in chapter v-b. concrete practices are presented in table 7 on how the dt can contribute to the effectiveness of port operations by optimizing the use of port facilities and equipment and aligning processes. section vi further discusses potential barriers in implementing a port dt. the core results are subsequently discussed and summarized in section vii and ix respectively. although research papers investigating the potential of twinning of port elements have contributed significantly to understanding the potential of digital twinning in ports, a holistic digital twinning approach that covers the entire functionality of the port in terms of its fundamental role as a node in the transport chain of global supply chains and as an important transport infrastructure in the smart city it is still in its infancy.• a thorough analysis of dt definitions across two application domains, that of smart cities and of the supply chain, yielding a discussion of core characteristics, enablers and potential usage in the port domain • a characterization of a port dt through a comprehensive analysis of its processes and characteristics, leading to the identification of three core requirements (taking into account the interrelationships and commonalities with smart city and supply chain dts) • proposing operational strategies on how a port dt can contribute to energy savings through the optimal use of port facilities and equipment and intelligent linking of port processes • discussing potential barriers delaying large-scale digital twinning of the port and possible consequences of the same. thereafter to identify functionality and requirements of a port dt supporting port efficiency as a whole, a detailed examination of the numerous, interconnected port processes and the complex interplay of different actors in the port were analyzed. the fusion of the port and the city in providing infrastructure critical to the port can be recognised in the port of hamburg's smartbridge project, where the city and the port are working together on a dt solution to maintain the most important bridge for the port serving most of its hinterland transport.the synchronized data provide opportunities to monitor, analyze, control, and optimize the supply chain and results in upto-date virtual simulation and optimization that provide agility integrated an integrated supply chain shares information from all involved actors and manages all distribution and logistics activities through a central system, making decisions across different stages of the supply chain together a dtsc actively operates in the entire business process throughout the supply chain and thus provides an integrated and holistic view that enables all stakeholders to collectively contribute to the creation across different stages of the supply chain intelligent an intelligent supply chain makes largescale, optimal decisions and uses predictive analytics to protect the supply chain from future risks a dtsc's predictive analytics, based on optimization and simulation algorithms, enable decision makers to look forward instead of backward and intelligently design the supply chain. present core techniques for a systematic framework of a dt-based model focusing on transport and operations for smart port management.the aim of this section is to demonstrate how a holistic dt can significantly optimize the port as a whole, based on three core statements, which provide characteristics and requirements of a port dt which support the realization of the three dt "tools" offocusing on situational awareness, intelligent decision making, and increasing the efficiency of collaboration between the numerous port stake-holders. similar to the key objective of enhanced coordination of the city dt and the dtsc, the port dt should also have the capability to facilitate information sharing between all actors involved in the supply chains with transports passing the port as well as the port actors. through standardised digital data exchange and subsequent dynamic visual representation through the dt, relevant port actors can gain the necessary situational awareness to make coordinated decisions that improve resource utilisation and enhance the safety and efficiency of operations to create added value for the individual port actor and the port as a whole. a dt implementation of the port should offer a high level of connectivity and visibility, similar to the supply chain dt, to best accommodate the transport flows that go through via the port (cross reference table6), while the level of detail and the dynamic nature of the ever-changing processes can be aligned with, for example, the traffic modules of the city's dt. the first dimension would thus be a 3d representation of all port buildings and a dynamic illustration of all port processes as described in chapter v-a1, while the second dimension would provide a platform for collaborative decision making, in which all port actors are networked, and simulations of what if scenarios predict the effects of different measures on the actor itself, but also on the port as a whole. compared to previous port studies regarding the potential of dts, this study approached the port as a system, observing the port in its full functionality in terms of its interconnected processes, but also with respect to its interconnection with other complex systems, such as the supply chain, and the smart city. from the dt development perspective, the numerous port processes in section v-a and the port operational strategies to reduce energy consumption in section v-b provide the necessary understanding for dt requirements and use cases from the port's perspective.since the port is also an important node in supply chains transport chains, as well as a mobility service provider and an economic hub of a smart city, future research should thus also address how the port dt should be linked to supply chain and city dts and identify areas of interaction. we therefore close the paper by proposing the following as a port dt: a digital twin of a port is a grouping of models and algorithmic components that jointly describe the complex interplay of port processes and operations allowing the characterization, estimation, and prediction of the most efficient operations at the process level, but also for the port as a whole. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/852.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/852.txt new file mode 100644 index 0000000000000000000000000000000000000000..74f8d874230d05f32a21dc800448070ba4367528 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/852.txt @@ -0,0 +1 @@ +the healthcare industry is growing rapidly in the united states because of the increased number of the aging population, shared consciousness of personal health problems, and medical technology improvements. as a result of the growing industry of healthcare, new emerging issues occur in the collection and storage of patient data, and new ways to process, analyze and distribute these data. this has exposed various security threats to personal health data (lee, 2022). as (al-harrasi et al., 2021) remarked "data are considered to be intellectual assets in organisations worldwide. data theft is a major part of the insider threat and, for this reason, the prevention of data theft and data leakage from insider threats is becoming a major area of interest within the field". in this case study, we will discuss the issues vanderbilt university medical center (vumc) challenges while implementing ehr systems which are used to analyze and monitor health records by the users such as doctors, organizations staff, and pharmaceutical agencies (kaul et al., 2020), and we will analyze these issues and provide solutions and recommendations to solve them.in 1997, vumc started gathering data as part of its ehr initiatives. the center required greater, more structured data handling by 2009. hospital executives started an initiative to create a data governance infrastructure at that time. putting data governance into practice. the executive staff at vumc had various challenges:• although it investments and technologies were constantly growing, him (healthcare information and management) policies did not apply to them. all data are not created equal, and using technology by itself won't help patients receive better care. providers and organizations must be able to tell the difference between a surplus of data, useful data, and data integration. whereas new technology and treatment modalities are changing and expanding at a rapid rate, healthcare companies are challenged to face these data issues in their daily operations and workflow. • medical records became increasingly susceptible to hacking, as they were made electronically so they could be shared and transmitted easily. cybercriminals are drawn to healthcare data because it contains financial and personal information, can be used as leverage in extortion, and-most lucrative of all-is perfect for fraudulent invoicing. due to the fluid and constantly changing nature of a patient's medical care as well as the sheer volume of physicians, facilities, and transactions necessary to connect patient care across various settings, they are also incredibly susceptible to penetration. • keeping up with the emergence of new electronic information applications was challenging for the medical facility. were, (curioso et al., 2022) "the health industry faces significant obstacles in resource-constrained environments, including effective health innovation projects and health management programs. additionally, one pertinent difficulty is enhancing health system operations to accomplish health sector goals. we risk digitizing chaos if procedures are not mapped, updated, or reviewed on a regular basis." as a result of the growing industry of healthcare, new emerging issues occur in the collection and storage of patient data, and new ways to process, analyze and distribute these data., 2021)remarked "data are considered to be intellectual assets in organisations worldwide. data theft is a major part of the insider threat and, for this reason, the prevention of data theft and data leakage from insider threats is becoming a major area of interest within the field". providers and organizations must be able to tell the difference between a surplus of data, useful data, and data integration. when healthcare organizations want to develop one integrated system that can hold all the health records electronically by implementing ehr, there will be three main issues derived from this problem: faulty data, data breaches, and the cost of failure.data will not be so accurate or complete in ehr systems because of improper lab and imaging results or medical errors which are mistaken by the physician documentation, these will damage patients data and the accreditation of organizations. as(li et al. these data breaches come up from the data stolen from laptops, or any portable devices, these breaches will destroy the trust of patients and are very expensive to retrieve. as(lee, 2022)commented, new advanced techniques are rapidly increasing because of cybercriminals attacks which makes it more difficult to protect patients' data, such attacks occur because of outdated systems, cyber staff insufficient experience, and extremely valuable data which has incentives to payoff these data. medical errors could also result from systems' inability to send alarms regarding dangerous prescription combinations, which can happen as a result of facility modifications, changes in how physicians enter data, or problems with ehr design.(munana et al. population is registered in ehr system and we want to calculate the approximate number of the population who might face these errors in their ehr in the year 2019: 328,239,523 × 21 100 = 68,930,300 up to 30 million patients might have an incorrect medical history, and about 3 million will have billing errors. this is a huge number to detect and it is affecting the healthcare industry in a negative way, patients may lose the trust on hospitals system because of the wrong patient history and personal data, and it can also cause some medical diagnosis faults as a result of the incorrect lab results and medication, and it will affect the hospitals financials because of the billing issues. also, if there are some existing low-quality information source providers, it will be hard to estimate the correct source reliability, such as improper sensors that release incorrect data and the false data spread by spam users. this causes a significant gap in the departments' patient data, and resolving these issues requires additional work, such problems are faulty data, data breaches and risk to patient safety. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/853.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/853.txt new file mode 100644 index 0000000000000000000000000000000000000000..c9600a0c5d16dbfa13a589c55eb008b02a6c50a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/853.txt @@ -0,0 +1 @@ +the term "metaverse" was coined by neal stephenson in his novel "snow crash" in 1992, which is described as a computer-generated space fusing digital and physical realities. driven by the recent advances in various emerging technologies including ai, blockchain and extended reality (xr), the metaverse is emerged as an exciting upcoming reality in the near future and is expected to revolutionize the current social interactions, social experience, and social commerce. the social metaverse is a shared virtual realm that combines a series of interconnected virtual worlds (also known as submetaverses), in which users can play, shop, work, and socialize , . as shown in fig. 1, the social metaverse is composed of user-controlled avatars, digital things, and virtual goods, as below.• digital avatars. avatars are the virtual embodiments of human users in the metaverse. users can maintain various avatars in diverse social metaverse applications, and the created digital avatars can be human-like, animals, aliens, monsters, and other imaginary creatures. • digital things. digital things (e.g., virtual building and digital non-player character (npc)) in the social metaverse constitute the interactive virtual environments, which can be hyper spatiotemporal (e.g., in ancient or future worlds). • virtual goods. virtual goods are the tradeable commodities such as land parcels, skins, digital arts, whose value can be identified by the non-fungible token (nft) to create a metaverse economic system. all the above elements in social metaverse can be created by users, ai algorithms, or virtual service providers (vsps). the former refers to the user-generated content (ugc), the middle refers to the ai-generated content (aigc), while the latter refers to the professional-generated content (pgc). as the social metaverse is parallel and interactive to the real world, there exist the following three kinds of social interactions.• user-user interactions. human users along with their social interactions and social ties (e.g., classmates, friends, relatives, and strangers) constitute the human society. • user-avatar interactions. through xr and humancomputer interaction (hci) technologies, human users can immersively interact with their avatars by wearing xr helmets, smart glasses, etc. • avatar-avatar interactions. avatars can work, play, collaborate, and socialize with others (e.g., virtual dating, virtual tour, and virtual meeting) in the social metaverse.the social metaverse exhibits unique characteristics from the following perspectives.• immersive social experience with co-existence. in social metaverse, social activities are not through text, image, or video call, but are virtually in-person, bringing the immersion and co-presence that traditional social platforms lack. by capturing users' manifold dimensions and creating true-to-life replicas of physical entities, users/avatars can enjoy immersive sensory experience indistinguishable from the real world. • massive and intensive social interactions. the everexpanding scale of avatars and ugcs/aigcs contribute to massive and intensive interactive scenarios in the social metaverse. for example, avatars can hang out with friends in virtual parks or furnish and decorate their virtual houses, where all types of interactions can co-occur in a shared virtual space. besides, myriad virtual items and assets can be involved during social interactions. • co-created and hyper spatiotemporal social spaces.avatars are not only content viewers on a screen, but also digital creators of massive ugcs, contributors of metaverse tools, and participants for metaverse governance. in addition, the social metaverse is free from gravitational, spatial limits, and real life pressures. • open and long-lasting social ecosystem. the social meta-verse is an open space for any user with internet connectivity for various social activities that are previously performed in isolation. besides, as the social metaverse is physically persistent and not controlled by a single vendor, users' digital identities, social connections, experiences and assets can be long-lasting, thereby promoting a consistent virtual economy system and an independent value system that operate independently of any vendor. due to the intrinsic openness and decentralization features, metaverse will inevitably inspire new innovations for social applications and reshape the concepts of social interaction, social experience and social networks, making the social metaverse becoming a platform of such new social ecosystem.in social metaverse, by disassembling entrenched social platforms and dismantling siloed social applications, it allows massive and intensive social interactions among usercontrolled avatars, as well as immersive social experience with co-existence, in the long-lasting social spaces., virtual meeting, social dating, and vr live broadcast) with immersive user/avatar interaction in the social metaverse, a vast amount of fine-grained individual data, which can be private and sensitive, is required to be collected and analysed. to sum up, the following three main challenges need to be resolved under fl-empowered social metaverse: 1) efficient privacy-utility tradeoff; 2) robust federated ai (fedai) model aggregation; 3) ownership provenance of shared fedai model. specifically, we first leverage the widespread and long-term social connections among users/avatars to advance a social-aware hierarchical fl framework named socialfl for an improved privacyutility tradeoff in the social metaverse. furthermore, based on smart contracts and digital watermarks, we develop an automatic fedai model ownership provenance mechanism to prevent ai model thefts and collusive users in the social metaverse. the key challenges of applying socialfl in the social metaverse are discussed in section iii, and the potential solutions to the social metaverse are presented in section iv. driven by the recent advances in various emerging technologies including ai, blockchain and extended reality (xr), the metaverse is emerged as an exciting upcoming reality in the near future and is expected to revolutionize the current social interactions, social experience, and social commerce. as the social metaverse is parallel and interactive to the real world, there exist the following three kinds of social interactions. in social metaverse, social activities are not through text, image, or video call, but are virtually in-person, bringing the immersion and co-presence that traditional social platforms lack. besides, as the social metaverse is physically persistent and not controlled by a single vendor, users' digital identities, social connections, experiences and assets can be long-lasting, thereby promoting a consistent virtual economy system and an independent value system that operate independently of any vendor. besides, due to the heterogeneity of metaverse terminals, intermittent wireless communications, and low-latency service requirements in the social metaverse, it leads to low data sharing and model training efficiency in the current decentralized fl paradigm.in this subsection, we design a game-theoretical approach to form a nash-stable social cluster structure in the social metaverse. for improved learning efficiency in metaverse, the social links between users can be utilized to group users with similar interests and social trusts to produce customized dp noises for users with varying privacy levels., the meaning of message) relevant to the particular social mission is intelligently transmitted from the source to the receiver, thereby greatly alleviating data traffic in social metaverse applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/854.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/854.txt new file mode 100644 index 0000000000000000000000000000000000000000..1063fcf63a29a86a98c6e32734e50b3c34d3e3ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/854.txt @@ -0,0 +1 @@ +in 2022, the national science foundation (nsf) funded the computing research association (cra) to conduct a workshop to frame and scope a potential convergence accelerator research track on the topic of "building resilience to climate-driven extreme events with computing innovations." the cra's research visioning committee, the computing community consortium (ccc), took on this task, organizing a two-part community workshop series, beginning with a small, in-person brainstorming meeting in denver co on 27-28 october 2022, followed by a virtual event on 10 november 2022. the overall objective was to develop ideas to facilitate convergence research on this critical topic and encourage collaboration among researchers across disciplines.based on the ccc community white paper entitled computing research for the climate crisis1 , we initially focused on five impact areas (i.e. application domains that are both important to society and critically affected by climate change):we used these impact areas as a framework to help us identify participants in the workshop series and to plan our initial discussions and activities.we used the workshops to discuss these impact areas and to collaboratively identify the necessary building blocks and key use-inspired research thrusts that can be brought to bear to address the complex challenges surrounding climate change. building blocks are new abstractions, methods, and systems that can be used to facilitate and expedite technological innovation. research thrusts are specific research directions identified by the participants as having potential for effecting positive change in a particular impact area. research thrusts become building blocks if the participants identify them as being broad in technical scope and capable of being leveraged and specialized by a broad and diverse community of innovators to address seemingly disparate challenges across impact areas. finally, our overarching goal with this effort was to identify computing research opportunities that can be developed and deployed following the timelines, guidelines, and goals described by the convergence accelerator program model.we selected participants for the first workshop, in consultation with cognizant nsf program officers. the second, virtual workshop, was open to everyone. both workshops included those with expertise in or across these impact areas. during the crafting of the participant lists-which appear in the appendices of this report-we also paid attention to demonstrated ability for interdisciplinary thinking, as well as to attaining a diverse and broad representation of the computing research community (demographics, institution type, and career stage).the goal of the in-person workshop was to refine the set of impact areas and identify research thrusts and building blocks. to enable this, we communicated with the participants in advance to establish the goals of the workshop: to brainstorm computational research that brings together collaborative multidisciplinary teams to create solutions with direct positive impact on climate change. in each impact area, we identified a "lead" (depicted in bold font in our participant list) who gave a brief presentation to define that area and frame some of the associated research challenges. the participants then went into breakout sessions to have focused discussions of research potential for that impact area, guided by three questions:1. what are the key building blocks in computing research that are needed to expedite innovation in this impact area? 2. what use-inspired research thrusts can be brought to bear on this impact area to advance climate resilience? 3. what are the near-term metrics for success in this impact area?our intent with these questions was to build consensus around the climate crisis impact areas worthy of investment and building blocks that spanned areas (thus having potential for significant and potential near-term impact). we also asked the groups to construct an initial set of research thrusts for each area to indicate whether or not there is sufficient and necessary interest from the computing research community to pursue these thrusts as part of a convergence accelerator effort.following the breakout session, each group reported their findings to the whole group. these sequences (framing, breakout, report-back) were repeated for multiple impact areas. this was followed by a high-level synthesis discussion to review the topics and concepts that came up in multiple sessions, which culminated in a preliminary set of building blocks (listed below) and a concept matrix (depicted below) of research thrusts for the building blocks and the impact areas. using this structure, we pursued a full-group brainstorming session in which participants used an electronic whiteboarding system (mural.io) to identify, discuss, and organize the research thrusts. this mural represented the first major outcome of the brainstorming workshop.in 2022, the national science foundation (nsf) funded the computing research association (cra) to conduct a workshop to frame and scope a potential convergence accelerator research track on the topic of "building resilience to climate-driven extreme events with computing innovations.we used the workshops to discuss these impact areas and to collaboratively identify the necessary building blocks and key use-inspired research thrusts that can be brought to bear to address the complex challenges surrounding climate change. research thrusts are specific research directions identified by the participants as having potential for effecting positive change in a particular impact area. research thrusts become building blocks if the participants identify them as being broad in technical scope and capable of being leveraged and specialized by a broad and diverse community of innovators to address seemingly disparate challenges across impact areas.the goal of the in-person workshop was to refine the set of impact areas and identify research thrusts and building blocks.our intent with these questions was to build consensus around the climate crisis impact areas worthy of investment and building blocks that spanned areas (thus having potential for significant and potential near-term impact). we also asked the groups to construct an initial set of research thrusts for each area to indicate whether or not there is sufficient and necessary interest from the computing research community to pursue these thrusts as part of a convergence accelerator effort. this was followed by a high-level synthesis discussion to review the topics and concepts that came up in multiple sessions, which culminated in a preliminary set of building blocks (listed below) and a concept matrix (depicted below) of research thrusts for the building blocks and the impact areas. this set emerged from the group discussions and brainstorming and consisted of principles that the group felt should be addressed by any research project in this area, regardless of impact area, building block, or research thrust.❖ impact areas -application domains that are both important to society and critically affected by climate change, ❖ research thrusts with significant potential for addressing climate-induced challenges within and across impact areas, ❖ building blocks -research advances that span impact areas and thus have potential for addressing multiple challenges concurrently, and ❖ cross-cutting principles that all research projects should follow, regardless of impact area and technology.the workshops enabled the research community to identify together a set of building blocksareas of needed innovation that are key to combating the negative impacts of climate change across impact areas in the near term. within each cell (green box) in the concept matrix that was constructed with the mural tool are the research thrusts-specific research directions identified by the participants as having potential for impact at the intersection of a particular impact area and a particular building block.the pi team then performed extensive analysis of these research thrusts to identify those that occurred repeatedly across impact areas, and therefore are the major points of opportunity for near-term computing research to support climate adaptation. these building blocks, which are separated out into lists below each column header in figure1as described in the convergence accelerator program description, these building blocks represent research that is broad in technical scope, has far-reaching impact on society, builds upon foundational research, and requires a multidisciplinary, convergent research approach to be successful. we have also identified key impact areas on which use-inspired research can focus, as well as computing research building blocks that build upon foundational research and require a multidisciplinary, convergent research approach in order to succeed in producing positive and far-reaching impacts for society. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/855.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/855.txt new file mode 100644 index 0000000000000000000000000000000000000000..34c1aaa7b6d05868381582c82f10323597a20c09 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/855.txt @@ -0,0 +1 @@ +the electrification of automotive power systems is a mainstream solution for current nevs (new energy vehicles). according to the nev roadmap proposed by china in 2020, it is estimated that by 2035, china's nevs will account for more than 50% of total vehicle sales, of which battery electric vehicles (bev) will account for more than 95% of nevs (1). compared to fuel vehicles, electric vehicles have several advantages, including fast acceleration and zero emissions. however, range anxiety is one of the main obstacles to the popularization of electric vehicles under current technical conditions (2). one of the most widely used definitions of range anxiety is by rauh et al. (3), in which, range anxiety is defined as "a stressful experience of a present or anticipated range situation, when the range resources and personal resources available to effectively manage the situation (e.g., increase available range) are perceived to be insufficient". the range anxiety is manifested as the driver's uncertainty about whether they can reach the destination with the vehicle's remaining battery capacity, resulting in cognitive, emotional, behavioral, and physiological changes. thus, understanding the factors leading to range anxiety can help alleviate bev users' range anxiety from vehicle design, infrastructure construction and driver training perspectives of view.in bevs, the range estimation systems (ress) can support drivers' recharging decisions before or during the trips. however, the range estimation of the bev is highly susceptible to the traffic environment (e.g., congestions or not), natural environment (e.g., temperature and weather) and drivers' driving habits (e.g., aggressive or conservative). the difficulty in accurately estimating bev's range capability may exaggerate drivers' range anxiety. although the ress in bevs have become more and more reliable with the development of technology in recent years, drivers' trust in these systems is questionable. according to the range anxiety model by rauh et al. (3), trust in the res is one of the dominating factors of range anxiety. however, to the best of knowledge, the factors influencing drivers' trust in the ress are yet to be explored.the res can be regarded as a specific type of automation, which helps users to integrate different parameters of the battery, provide an estimation of the current status of the power system and give energy replenishment suggestions when necessary. thus, the framework of trust in automation may be adapted to inform the formation of trust in res. in hoff et al. ( 4), a threelayer framework has been proposed to conceptualize trust in automation. according to this framework, the trust in automation can be organized into three facets: dispositional trust, situational trust, and learned trust. each facet can be further influenced by a number of factors. specifically, dispositional refers to refer to "long-term tendencies arising from both biological and environmental influences" (e.g., age and gender of the users); situational trust can be associated with both external variability (i.e., types of the system, its complexity and the difficulty of the tasks) and internal variability (i.e., transitory characteristics that depend on the context); and the learned trust refers to "an operators' evaluation of the systems learned from past experience or current interactions" (e.g., knowledge of the system). this framework, according to (4), can be used in a variety of scenarios with human operators and automated systems in the loop. however, this framework only provides guidance on potentially influential factors of trust: it may not list all factors that can be related to the ress in bevs and conversely, not all factors listed in hoff et al. ( 4) are relevant to ress.in this study, following the trust in automation framework by hoff et al. ( 4), a questionnaire was designed and distributed to investigate the factors that may influence bev users' trust in ress. further, we investigated how trust in the ress can affect bev users' charging behaviors. this study has centered on bev users in mainland china. in 2021, china contributes 85% percent of ev sales worldwide (5). thus, targeting bev users in china can provide insights into users' attitudes toward bevs in a relatively mature market. (4), a threelayer framework has been proposed to conceptualize trust in automation. considering the wide usage of smartphones and the prevalence of battery anxiety among smartphone users, in our study, we assessed bev users' experience with and attitudes toward their smartphones, aiming to identify the association between smartphone usage and trust in ress in bevs.two statistical models were built in "sas ondemand for academics", in order to investigate: 1) the influential factors of bev users' trust in res (trust model); 2) how bev users' trust, along with other covariates, may affect users' charging behaviors when using bevs (charging behavior model). as shown in table3, gender, region of driving, system knowledge, and trust in smartphones were found to be significant factors of users' trust in res in bevs and the explained variances of these factors were 2.as for the continuous variables, the system knowledge was found to be negatively correlated with the trust in res of bevs, while the trust in the soc of smartphones was found to be positively correlated with the trust in res of bevs.011, -0.3 units (95%ci: [0. the results have provided evidence to support the adoption of the framework byhoff et al.specifically, gender, as a dispositional-trust-related factor, has been found to influence bev users' trust toward res, with females showing higher trust toward res. northern drivers exhibited the least trust toward res of bevs; and eastern drivers exhibited marginally less trust toward res of bevs compared to southern drivers, potentially because of the impact of situation complexity and task difficulty on users' level of trust in the system(27).at the same time, a positive relationship between system usability and trust in res of bevs has been observed, indicating a well-designed system can increase users' trust in it(19). in other words, users who trust more in soc estimation in smartphones might be those who are prone to show high trust in automation, and it is not surprising they would show high trust in res of bevs. trust in res of bevs, surprisingly, did not influence bev users' charging behaviors but we have observed a connection between users' phone charging behavior and bev charging behavior. in this study, through an online survey, we investigated the factors that can influence bev users' trust in res, and further explored how trust in res as well as bev users' experience with other electronic devices can influence bev users' charging behaviors. the results show that the dispositional-, situational-, and learned-trust related factors all affect users' trust in res of bevs, supporting the validity of the three-layer trust framework (4) among bev users. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/856.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/856.txt new file mode 100644 index 0000000000000000000000000000000000000000..138b38fe603d3debfd3318fe749929f8829e5791 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/856.txt @@ -0,0 +1 @@ +the emergence of artificial intelligence(ai) has created a tremendous technical change in recent years.ai as defined by marvin minsky, artificial intelligence(ai) is the science of making machines do things that would require intelligence if done by men. this topic began as a research area of computer science engineering, but due to its significant absorption of ideas from neurology, cognitive science, philosophy and other disciplines, it has become extremely interdisciplinary, making it difficult even for experts to find an agreeable definition of artificial intelligence. it is a system that has capabilities(such as language or perception) and intelligent conduct that were originally thought to be exclusive to mankind and carry out a certain task. in simpler terms, artificial intelligence (ai) is a discipline of computer science dealing with the emulation of human intelligence by behaving intelligently. this formidable technology has brought about a shift in the way we live in the world.by incorporating ai-based solutions, sectors including manufacturing, healthcare, etc, are undergoing a sea of change in their operational methodologies. around the world, the education sector exhibits a similar pattern. given the largely advantageous digital changes that ai brings into the system, ai has undoubtedly created challenges to traditional ways of education. for the past 30 years, researchers have been studying integration of artificial intelligence in education(aied).aied has achieved significant success in strengthening connections between teachers and student where the connections were lacking or needed improvement. with the use of ai effective teaching techniques, evaluation systems, and feedback mechanisms can also be introduced. additionally, weaknesses in the existing systems can be identified, and variety of student responses like boredom and concentration can be captured to make learning a interactive environment.this essay provides a survey of the most recent advancements in artificial intelligence in education. it starts out by going over numerous fields of education and learning that have made us of ai, then shifts to the areas on which we see the industry concentrating, and it ends with a remark on further fields of development with ai in education providing a succinct overview of the domain. it starts out by going over numerous fields of education and learning that have made us of ai, then shifts to the areas on which we see the industry concentrating, and it ends with a remark on further fields of development with ai in education providing a succinct overview of the domain. furthermore, given the interdisciplinary nature of aied, relevant research is frequently published in more conferences on ai and learning science such as the conference and workshop on international conference on learning representations (iclr), international conference on machine learning (icml), international conference of the learning sciences, and neural information processing systems (neurips) (icls). it is essential for teachers to upgrade and retrain themselves in order to adapt to this generation, particularly the new skills they must pick up in order to fully profit from aied. in order to evaluate the data supplied by these ed-tech tools and to decide what kind of data and analytics tool they will need to better understand students, teachers will also need to develop their analytical skills. another study introduced a neural geometric solverthat is capable of automatically solving geometric problems trained using a dataset named geoqa; the study also proposed its usage of jigsaw location prediction, geometric elements prediction, and knowledge points prediction using neural nets and lstm as a decoder-encoder pair.a study by singh and karayevdeveloped a model for automatic handwriting text identification from images and translation into text, all done in sequence using neural networks and transformers which may be used for easy document generation, including notes, and easy assignment verification. ocr is a well-studied topic, but latex-based ocr is a new concept described in a recent studythat uses neural nets to directly output text from a latex-based document, which can help educators decode research papers and keynotes to text while saving time and effort.the field of ai for education must continue to advance in order to benefit not only education (increasing inclusivity, helping students grasp complex ideas, and increasing accessibility), but also the development of reasoning-capable ai systems. the development of ai systems that support human capacities should be considered in addition to the development of autonomous ai systems. to construct robust ai systems in the field of education, study must be done in the following areas: (a)similar to the computer vision field, we need better benchmarking datasets such as imagenet, coco. near symbolic ai that can ensure better ai for education systems. utilizing cutting-edge technology like text mining, learning analytics, and data visualisations is also required to progress aied research. emerging educational research approaches are ideal for investigations on revolutionary technology like aied, in particular educational design research (edr), are strongly advised because they enable educators to integrate their research inquiries as part of the technology development and implementation cycle in real-world settings. the main highlights of the paper are to make readers cognizant of various problems that exist in today's education system, what technologies exist in today's ai that can fill those gaps, and in what manner today's ai technologies lack the capability to build nextgeneration ai educational tools. as ai in education can have both positive and negative side effects so the various stakeholders should correct calculations about its trade-off before deploying ai technologies in the real world. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/857.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/857.txt new file mode 100644 index 0000000000000000000000000000000000000000..6736cc48b5c83e737916e67ccc129bd4ddb32b47 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/857.txt @@ -0,0 +1 @@ +impact. if there is one term that comes up again and again when it comes to evaluating scientific research and its authors, it is impact. it does not seem to matter whether a software engineering researcher feels closest to mathematicians, social scientists, or engineers:1 all researchers want the results of their work to at least reach the intended target group.it is all the more remarkable that the work of a software engineering researcher usually ends with the publication of a paper -so to speak, at the moment when the greatest possible impact could be generated in the first place. although conference publications are usually accompanied by an obligatory presentation of the research paper and the papers themselves are often well written, in reality this is not sufficient to reach the potentially interested target audience: conference presentations are usually only accessible to a small part of the target group, for example, due to local or time restrictions. plus, we can all still improve a lot when it comes to how we design our presentations to convey knowledge more effectively and stay positively in the minds of our audiences. the scientific papers themselves are also only accessible to a few people, either because they are published behind paywalls or simply because non-scientists have no routine in reading scientific literature. on top of this comes that those who would have access must first become aware of the new research findings.in breakout sessions at various seminars, we discussed with members of the software engineering research community what their understanding of science communication is. all the answers fell into two categories: first, drawing attention to and disseminating scientific findings, and second, establishing oneself as a scientist. the former aspect fits well with the definition of burns et al.: "science communication (scicom) is defined as the use of appropriate skills, media, activities, and dialogue to produce one or more of the following personal responses to science: awareness, enjoyment, interest, opinion-forming, and understanding" .depending on the current demand for the research topic, the combination of expertise and good science communication can then result in great publicity for one's own person. for example, during the corona pandemic, virologists in some countries became talk show celebrities, whether intentionally or not. of course, this does not have to be the goal of every software engineering researcher. establishing oneself within one's own research community is also more often the case than becoming a person of public interest.in any case, we argue, as do some of our peers , that good science communication can and should be learned in science education. and since both the researchers themselves and their target audiences would benefit from better science communication, we further argue that it should be taught as early as possible. we therefore decided to offer a science communication seminar for bachelor students of computer science curricula. since both, us teachers and the students, were very satisfied with the result of the first seminar implementation, we would like to share our experiences and lessons learned in this paper. we provide all information needed to successfully implement similar science communication seminars at other universities, enabling the next generation of computer science researchers to effectively bridge the gap between academia and their target audience. we therefore decided to offer a science communication seminar for bachelor students of computer science curricula. students take the position of a researcher who, shortly after publication, is faced with having to draw attention to the paper and effectively communicate the contents of the paper to one or more target audiences. 2) students can reflect on the state of the art in communicating scientific software engineering findings and should be encouraged to try things that might be beneficial to science communication, even if they seem unconventional at first.we complemented the examples of good knowledge communication channels with a brief collection of resources specifically designed to improve science communication in general, and some that already provide examples of what we believe to be good science communication channels in computer science. for the students, the most important thing at that moment was to understand that in the seminar, everyone has to deal with exactly one already published paper and that the selection of this paper was now the next step. for each of the 10 papers, each student now had to choose one of three options: "top 3" if the paper is a personal favorite, "would be okay" if the student is interested in the paper, and "rather not" if the paper does not interest the student at all. we instructed the students to understand their paper as best they could during this phase and to take notes on their first impressions of the paper, such as what they thought about the content and what they thought about how the paper authors presented their work. we consider the first learning objective to be fulfilled if the content of the assigned paper is correctly conveyed in the communication strategy and if certain developed ideas or criticisms of the paper authors' communication take into account the current constraints of the scientific process and paper format specifications. two students maintained their high level of interest in science communication, and only one student's interest in the topic decreased slightly during the course. one student felt it was very well done because "attention was paid to personal preference in paper selection, which made it easier to focus on science communication instead of having to struggle with a paper whose content one can't identify with". one student's feedback further confirmed our assumption that at the time of the preference submission, we could not yet assume that the students had clear understandings of the paper contents: "i think the selection was good. it could even be a good variation for a future seminar that some of the papers the students can choose from deal with science communication and thereby provide the theoretical input themselves.the learning objectives (ii-b) of our science communication seminar for computer science students should be easily transferable to other research disciplines. in their paper, two concepts are being tested in which (phd) students have to communicate their scientific work to middle school students.to summarize, in this paper we have described the design of a science communication seminar in which students put themselves in the role of a researcher who, shortly after publication, has to draw attention to their paper and communicate its contents. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/858.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/858.txt new file mode 100644 index 0000000000000000000000000000000000000000..7ad9d7334145747f0219f6d418792d8464112f2d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/858.txt @@ -0,0 +1 @@ +the lack of coviability indicators on brazilian territories often prevents local managers, farmers, and administrators from correctly establishing a diagnosis, monitoring its evolution, planning daily and seasonal activities of the natural areas with considering carefully information about the life cycle of flora and fauna species living in the area (reproduction period, rarity, disturbance tolerance, bird migration key zones, etc.) and the intactness of their habitats to better preserve and restore the milieu. without such indicators at various spatial and temporal scales, adapting the governance and justifying some regulations of the territory is a difficult task for the local actors. relevant sources of information are usually not or under-exploited; eco-tourists, farmers, and populations are not informed of their impact on nature and they may not understand and accept such regulations. global abrupt disconnection from nature is not an option for the now and future generations, the optimal balance between anthropization, ecological welfare, and biodiversity wealth has to be found. we believe that practical and operational solutions based on ai technologies can help decision-making in this context and facilitate the necessary transitions.in our project, we revisit several fundamental problems in agroecology using data science and machine learning under the lens of coviability and its multiple dimensions. several work have already considered reinforcement learning applied to agroecology: e.g., for irrigation scheduling (yang et al., 2020), for optimizing fertilizer usage as well as enhancing the yield (e.g., cropgym environment (overweg et al., 2021)), or for watershed management (mason et al., 2016) and biological environment (binas et al., 2019). however, to the best of our knowledge, none has been proposed to develop actionable research products for monitoring coviability and discovering transition pathways towards coviability. in what follows, we enumerate our project's objectives in addressing four scientific challenges and shortly present our case studies with the application of ml techniques. challenge 1. modeling coviability indicators from multimodal, multisource, multiscale data fusion. as the volume and variety of socio-environmental data and digital resources have dramatically grown in the last decade, it has become crucial to: (1) align, interconnect, integrate all relevant silos of data at different spatial and temporal scales local, regional, territorial) as illustrated in fig. 1: from satellite images, pictures from visitors, data collected through largescale citizen science platforms such as pl@ntnet 5 (goëau 2 https://www.ondacbc.com.br/?lang=en 3 http://inct-odisseia.i3gs.org/ 4 https://www.icmbio.gov.br/cemave/ 5 https://plantnet.org et al., 2013), ebird6 , inaturalist7 or movebank8 ; (2) analyze and explore interrelationships across a variety of disciplines; (3) develop relevant indicators for assessing vulnerability in socio-ecological systems; and (4) present the indicators via dashboards and adequate visualization artifacts to the actors, e.g., ad-hoc doughnut representations (raworth, 2017). in this context, the integration of heterogeneous and multimodal data, preparation, and featurization are challenging tasks. challenge 2. on-site identification of plants, animals, and complex scenes for computing coviability indicators. the impacts of climate changes and human activities on biodiversity, ecosystem functioning and environmental resources are numerous, complex, distributed differently according to temporal and spatial scales. the complex and likely reciprocal causal relationships between humans and the environment must be understood according to systemic, multi-scale, and transdisciplinary approaches integrating and interconnecting a large variety of datasets from ecology, remote sensing, local authorities, etc. given the data uncertainties and what we know about the ecosystem of a site, the analysis of biodiversity remain challenging for automatically identifying multiple plants and animal species as well as decomposing the landscape elements precisely. state-of-the-art techniques allowing to identify plants and animals from individual pictures (e.g., pl@ntnet or inaturalist applications) cannot be straightforwardly adapted to our framework and need to be deeply revisited to capture the complexity of our domain to compute local coviability indicators with uncertainty quantification at every data processing step. challenge 3. finding coviability pathways from the diagnostic to a target state. understanding how to transition from the current state of coviability to a state that reconciles (i) biodiversity conservation and ecosystem functioning, (ii) social welfare, and (iii) economic resilience is another major scientific issue of our project. as shown in fig. 1, depending on the profile of the actors (e.g., farmers, indigenous populations, administrators) as the main users of our technology, the considered scale, the constraints associated to the territory (e.g., economic viability, presence of species close to extinction), and a coviability target co-specified with the actors, an optimal trajectory need to be discovered to support adequate governance schemes. due to the inherent critical nature of adopting coviability pathways, it is not foreseeable to blindly provide a ranking of trajectories without taking into account the context and multiple alternative spatial, temporal, and actor-dependent viewpoints. to keep the experts and local actors in the loop, we take into account their feedback at every step: from the flora/fauna scene identification/validation, the choice and composition of coviability indicators, the design of dashboards and results of recommendation, scoring, and explanation of discovered optimal pathways. we revisit this challenge as an optimization problem and use causal reinforcement learning and various op- this paper presents our ongoing french-brazilian collaborative project initiated in 2021 which aims at: (1) establishing a diagnosis of socio-ecological coviability for several sites of interest in nordeste, the north-east region of brazil (in the states of paraíba, ceará, pernambuco, and rio grande do norte respectively known for their biodiversity hotspots, droughts and vulnerabilities to climate change) using advanced data science techniques for multisource and multimodal data fusion and (2) finding transition pathways towards coviability equilibrium using machine learning techniques. among an infinite space of possible transition strategies, ai technologies and reinforcement learning (rl) in particular is applied to propose estimations and predictions of potential coviability pathways (or trajectories) for transitioning from a diagnosis state to an ideal coviability target (or intended view) defined by the user of the technology.the lack of coviability indicators on brazilian territories often prevents local managers, farmers, and administrators from correctly establishing a diagnosis, monitoring its evolution, planning daily and seasonal activities of the natural areas with considering carefully information about the life cycle of flora and fauna species living in the area (reproduction period, rarity, disturbance tolerance, bird migration key zones, etc., cropgym environment (overweg et al. however, to the best of our knowledge, none has been proposed to develop actionable research products for monitoring coviability and discovering transition pathways towards coviability.br/?lang=en 3 http://inct-odisseia. challenge 2. the complex and likely reciprocal causal relationships between humans and the environment must be understood according to systemic, multi-scale, and transdisciplinary approaches integrating and interconnecting a large variety of datasets from ecology, remote sensing, local authorities, etc. understanding how to transition from the current state of coviability to a state that reconciles (i) biodiversity conservation and ecosystem functioning, (ii) social welfare, and (iii) economic resilience is another major scientific issue of our project. in doing so, we expect to enhance the biological connectivity of biodiversity hotspots and promote agroecologi- (3) develop interactive maps, dashboards, explainable classification, and social network tools at the regional scale to support participatory decision-making on spatial strategies fostering ecological and economic connectivity, biodiversity restoration and sustainable production in the nafac region. developing strategies to boost the restoration of caatinga's biodiversity and related ecosystem functions, while promoting the socio-productive inclusion of human populations facing social vulnerability, is the key to promoting socioecological coviability. we will contribute to the planned observatory of coviability transitions by providing niche models of potential distributions of 600 native plant species that are suitable for use in ecological restoration associated to socio-productive inclusion projects under future climate scenarios. our goals: (1) identify, in partnership with local women, socioeconomically viable biodiversity-based products; (2) use ml, existing niche models, population projections, and domestic and international market trade information to predict the short-term and future viability of biodiversity-based value chains; (3) provide predictions for the geographic variation of ideal plant assemblages ecologically suitable for the restoration of caatinga biome under current and future climate conditions that will optimize locally the balance between functional diversity of plants and their bioeconomic potential.br/en/press/headlines/38360/esperanca-na-caatinga have maintained the traditional consumption of atta ants alive in the catimbau region, while this is a declining practice in brazil; (2) use deep rl and simulation to plan vegetation restoration plots surrounding agricultural fields to provide alternative resources for atta ants and sustain alternative biodiversity-based ecosystem services and economical alternatives to local farmers; (3) map the geographic distribution of edible insects and other species used as food sources, contributing to the identification of potential links between ecosystem restoration and traditional knowledge and practices. our project aims at helping draw up coviability diagnoses of a territory by integrating and exploiting various multimodal datasets, predicting coviability scores using ml, and finding socio-ecological transition pathways using rl. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/859.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/859.txt new file mode 100644 index 0000000000000000000000000000000000000000..38aafa7bb4fa47896561b4790e528da11a5e6a28 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/859.txt @@ -0,0 +1 @@ +the emergence of large language models such as gpt-3 , transformer models that are trained without supervision on massive text datasets has resulted in systems with remarkable text generation capabilities. one particularly interesting aspect of these models is that their behavior can be configured by a prompt, the initial text provided to the model, which establishes a pattern that the model attempts to continue.general purpose large language models can be fine-tuned on specific corpora to provide expertise in a particular domain. one such model is the openai codex model , a 12 billion parameter version of gpt-3 , fine-tuned on code samples from 54 million public software repositories on github. this model powers github co-pilot , which primarily provides code-completion services within an integrated development environment. we wondered whether such a model could power a conversational programming assistant and perhaps approach the vision laid out by rich and waters for their programmer's apprentice . we developed the programmer's assistant prototype to explore this possibility, and to test whether potential users would find this sort of system useful and desirable . in this paper we will review the steps taken to engineer the prompt for the programmer's assistant that used the codex model to power an interactive conversational assistant, and how we evolved the prompt to establish the desired persona and behavior. in this paper we will review the steps taken to engineer the prompt for the programmer's assistant that used the codex model to power an interactive conversational assistant, and how we evolved the prompt to establish the desired persona and behavior.to generate conversation in the programmer's assistant prototype, we establish a script-like pattern in the prompt in which two characters, the user and the assistant, are participating in a dialog.the initial prompt we use for the programmer's assistant consists of a prologue that introduces the scene for the conversation, establishes the persona of the assistant, sets a tone and style for interaction, and provides some ground rules about the role it plays and how it behaves. " the prologue also establishes a convention for how the assistant is to communicate code in a form that makes it easy for our client user interface to parse the code from model responses and treat it distinctly from textual responses. user : thanks ! socrates : you ' re welcome ! user : now could you show me how to write a factorial program in python ? socrates : i would be happy to .when a new conversation starts, the prompt starts out as this initial prompt, which is not sent to the model for generation until the first user entry is appended. at each turn in the conversation, the user's entry is added to the prompt, and "socrates:" is appended to the end of the prompt to indicate that the model should generate the assistant's response. socrates will answer questions and write code to help the user develop programs , 3 but doesn't assign work to the user, quiz the user, or ask questions except for clarification . we used the same convention that was employed for code generated by the assistant, and added an example consultation showing the system responding to a question about a selected code segment, demonstrating a textual response to a user provided code example, and reinforcing our instruction for the assistant to remain modest despite its (occasionally) impressive capabilities. the evolved programmer's assistant prompt 1 this is a conversation with socrates , an eager and helpful , but humble software engineering 2 assistant . socrates will answer questions and write code to help the user develop programs , but 3 doesn 't assign work to the user , quiz the user , or ask questions except for clarification . user : thanks ! socrates : you ' re welcome ! user : now could you show me how to write a factorial function in python ? socrates :i will give it a try . listing 6 shows a conversation with the programmer's assistant where the user interacts with the assistant to remedy an initially unsatisfactory response to a code generation request.this paper describes how we engineered a prompt that enabled a code-fluent large language model to behave as a conversational programming assistant capable of carrying on extended discussions about software development issues, and how we subsequently evolved that prompt to make the assistant more humble, forthcoming, and helpful, as well as providing the assistant with additional skills and making it capable of artifact-centric conversation. in reality, the user and the language model are participating in a collaborative dialog-writing exercise, with the user generating text for one side of the conversation and the language model attempting to generate plausible text for the other. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/86.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/86.txt new file mode 100644 index 0000000000000000000000000000000000000000..a420cee89ddaf113ca9bc0d6d622a37a1745ede1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/86.txt @@ -0,0 +1 @@ +representing real-life data often implies the use of many features, leading to an item being represented by a highdimension vector. note that in this paper, we use indistinctly the word 'feature' or 'attribute' or even 'variable'. managing all these features contributes to the need for high computational power in terms of space and time, and could slow down the construction of a predictive model and degrade the predictive accuracy of the constructed model. two options are then available:1. dimension reduction by projecting the initial dataset of high dimensionality to a new feature space with lower dimensionality. however, in the new space, there is no guarantee that all the features are relevant for predicting the label. principal component analysis (pca) is one of the most popular techniques.2. feature selection, on the other hand, directly selects a strict subset from all available features. in a classification task, it is often the case that only a subset of features is relevant i.e. nonredundant and informative in determining the label. unfortunately, in the real world, without a deep knowledge of the problem domain, relevant features are rarely a priori knowledge.feature selection methods can be classified into diverse categories; for more information, please see for an exhaustive investigation). generally, they can be classified as:• filter methods, where features are independently evaluated and associated to a relevance score. they are independent of any ml algorithm (ml agnostic) as the filtering is done by observing the data only. usually, these methods cannot detect redundancy.• wrapper methods, which initially evaluate all features for an ml algorithm. then, select a strict subset of the features, then retrain and retest. the whole process is embedded in a loop on the power set of the features.the features belonging to the subset that gives the highest accuracy are considered relevant. it is effectively, an exhaustive search, so it is highly resource-consuming.• embedded methods are a trade-off between filter and wrapper methods where feature selection is embedded into the training process of an ml algorithm. they generally provide good results but the selected features are tailored to the chosen ml algorithm. this implies that the selected features may not be the most effective for other algorithms.filter-based methods generally proceed in 2 steps:• score each feature individually: the higher the score, the more relevant the feature.• eliminate all features whose scores are below a given threshold.in this paper, we propose analogical relevant index (ari), a new filter-based method for feature selection: as such ari is ml agnostic. ari is based on a statistical test and is inspired by the concept of analogical proportion. an analogical proportion is a statement involving 4 items: "a is to b as c is to d", and it is often denoted as a : b :: c : d. its semantics is that a differs from b as c differs from d, where a, b, c, d are items represented as vectors of the same dimensions. because pairs of items are used as the initial observations as in the case of analogical proportions, hence the word "analogical" in ari.our paper is structured as follows: in section 2, we provide an overview of related work. in section 3, we provide the context and notations. section 4 develops the complete formal framework necessary to define ari. we also investigate the theoretical properties of such an index. in section 5, we define our experimental context and exhibit the results from diverse experiments. we also compare ari to other filter-based approaches. section 6 discusses the stability and limitation of ari. finally, in section 7, we provide concluding remarks and tracks for future investigations.all our code is freely available on https://github.com/gillesirit/ari.in this paper, we propose analogical relevant index (ari), a new filter-based method for feature selection: as such ari is ml agnostic.• an instance a belongs to a cartesian product x of finite sets x i : x = x 1 × .in the particular case where x = b n and y = b = {0, 1}, it is common to consider the elements of x with label 1 as a concept c. given a pair (a, b) ∈ x × x of vectors, their agreement set ag(a, b) is:. let be i ∈ a and let us consider the set dif (i) s of pairs of elements among the observable data which exactly disagree on feature i:. for instance, if we observe on each item in s that the value of feature i is always equal to the value of another feature j, then both dif (i) s and dif (j) s will be empty: j (or i) is redundant.ideally, having the entire x at our disposal, we could compute ari(i) x . because we generally have no access to the whole universe x (especially with high dimensions), this value ari(i) x should be considered a theoretical one. nevertheless, let us equip x with a uniform probability measure denoted as p (and x × x with the product still denoted as p ). random variables {x (i) |i ∈ }, x (i) taking its values in x i . then let us denote ari(i) m = def ari(i) sm (with similar convention for dif (i) m and dif eq(i) m ) for a sample of size m:. for any feature i, ari(i) m converges almost surely towards ari(i) x as m tends to |x|:.because we deal with subsets of x × x, let us denote pr 1 and pr 2 the 2 corresponding projections.• compare the accuracy of using a classifier using all features (baseline) and when using features having the k largest scores as indicated by the feature selection method. the value of k must be the same for all feature selection methods unless a feature has a score of 0, then this feature has to be eliminated from the list of relevant features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/860.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/860.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3d930e08317f9ce5e503081e57313c7e526763a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/860.txt @@ -0,0 +1 @@ +before the advent of social medias, brand crises were largely caused by journalists' contributions. nowadays, a firestorm is a cluster of consumers' digital word of mouth that highlights some communication error, or some terrible mistake made by a company . the cambridge dictionary1 defines the firestorm as "a sudden, and sometimes violent reaction" and the shitstorm as "a wildly chaotic and unmanageable situation, controversy, or sequence of events". in this paper, i will use both these terms interchangeably.during the last years, many firestorms took place on the internet , , , mainly due to the increase of the number of users on social networks. in some cases, firestorms have been formally studied to better understand this phenomenon , , . in 2007, several researchers debated over firestorms, and one of the main outcomes is that "a natural science model of the research process is suitable for studying the social world but a central issue remaining of whether the social world can, and should be, studied according to the same principles, procedures, and philosophy as the natural sciences" . this is relevant because today i are actually able to study and evaluate social dynamics by using the massive amount of data coming from the digital world, with particular emphasis on social networks .firestorms are not made of a single event with a standard behaviour, instead they are caused by non-linear dynamics leading to complex behaviours. due to this, companies must have appropriate procedures to respond to various crisis situations. lehtonen's theory shows that a firestorm develops in five stages: (1) latent stage, where weak signals of the upcoming crisis are received; (2) triggering event, where the subject becomes the target of news and social media attention;(3) the subject is in the top-news and the media attention spikes; (4) the media attention calms down to the level of general philosophical and ethical discussion; and (5) there are only minor media hits and attention is guided to other issues .as firestorms begin when there is a service failure, a social failure or when a company fails to communicate properly , this kind of errors can be reduced by following appropriate procedures. however, most of the existing quality and security procedures, such as the ones suggested by iso 9001:2015 and iso/iec 27002:2022 are not adequate for a multi-domain cyber and social attack. because, regard to the 27002:2022, social attacks are outside the scope, while, 9001:2015, even if it focuses on better business process quality, thus, less firestorm risk from the public, it does not mitigate the firestorm from an attacker.hence, in this paper i theorise that it is possible for an attacker to intentionally cause a firestorm attack to undermine the reputation of a company, with the side-effect of advantaging the competitors. i argue that self-organised firestorm attacks require a high number of bots that are already active on social medias: in this case, bots start the firestorm on the target company, spreading fake news (or magnifying a certain event, e.g., a mistake made by the company in the past) that will cause a high volume of real people to react negatively and continue the social attack, unknowingly on behalf of the adversary.additionally, i argue that open source intelligence (os-int) could allow an adversary to identify weak spots in the organization, namely people who most likely cannot react properly or defend themselves from the firestorm, hence not being able to timely mitigate its impact. many workers have a linkedin, facebook, or twitter account: moving the firestorm on the social media accounts of people who work for the target company can lead to an extremely stressful situation for arxiv:2301.01518v1 4 jan 2023 workers. this could be even worse for people who do not often deal with public relations, and could cause confusion, panic and distress. in fact, when a firestorm arises, even people who work on communication processes and managers can panic, and the fear of losing customers and partners can be very detrimental for any company.when people working in the target firm are in this altered status, i argue it is possible to elaborate a social engineering strategy to capture protected information: in this case, not only firestorms serve the purpose to undermine the corporate image, but they are also used as a diversion for a social engineering attack. in fact, while most important organisations adhere to best-practices listed in security standards like iso/iec 27002:2022 , during a social attack like firestorms, some best-practices and procedures may be distorted or bypassed, both intentionally or by mistake, due to the pressure applied to people who are in charge of complying to such procedures .contributions. the paper makes these contributions: 1) i explain how to make an automated and organized firestorm attack, with only a few manual operations such as the choice of a topic and of a hashtag; 2) i introduce a taxonomy of possible actions that the attacker could perform while doing the firestorm; 3) i illustrate how the author of a firestorm can evade detection for their attack by targeting single workers instead of the company profiles, while increasing the damage done to the firm. 4) i show possible long and short term procedures that a company can implement to mitigate the effect of firestorms attacks. i argue that self-organised firestorm attacks require a high number of bots that are already active on social medias: in this case, bots start the firestorm on the target company, spreading fake news (or magnifying a certain event, e. many workers have a linkedin, facebook, or twitter account: moving the firestorm on the social media accounts of people who work for the target company can lead to an extremely stressful situation for arxiv:2301.when people working in the target firm are in this altered status, i argue it is possible to elaborate a social engineering strategy to capture protected information: in this case, not only firestorms serve the purpose to undermine the corporate image, but they are also used as a diversion for a social engineering attack. the paper makes these contributions: 1) i explain how to make an automated and organized firestorm attack, with only a few manual operations such as the choice of a topic and of a hashtag; 2) i introduce a taxonomy of possible actions that the attacker could perform while doing the firestorm; 3) i illustrate how the author of a firestorm can evade detection for their attack by targeting single workers instead of the company profiles, while increasing the damage done to the firm. identifying the people who are most proud to work for the attacked company can also be helpful in exerting more pressure on the company (since they have more to do with the value of the company). shifting the attack on employees has another side-effect, which is beneficial to the attacker: the organisations that are responsible for the public cyber security in every country cannot see the firestorm attack on the company page, because the firestorm is focused on workers only such organisations will hardly be able to detect all comments and posts focused on workers, allowing the attacker to create a smoky form of the attack, which can bypasses conventional security measures, procedures and strategies.1) the value of the company on the financial market could rapidly decrease;2) people who worked in the company during the firestorm might be subject to discrimination in future, especially if the firestorm was caused by a (supposedly) unacceptable mistake that could have been avoided,.when many workers in the company are panicking, the organisation's cco (chief communication officer) will elaborate and react to firestorm on company pages, however, this cannot stop the social attack on the individual profiles of the employees.b) defence as a service: the adversary contacts the attacked firm, but instead of showing they are in charge of running the attack and asking money to stop it, they try to sell a fire(storm)fighter service to the victim, supposedly consisting on bots defending the reputation of the firm: this is basically a reversed firestorm, in which those same bots that built the latent state now defend the company: to avoid drawing excessive attention, the attacker might slowly change the proportion of attacking bots versus defending ones, until they are all defending the company. i can see that the company regains more than half the value lost during the next two months, however, the ransomware attack causes another drop in the financial value of the company due to customers losing trust in the company again, this time from a security perspective. financial value of cd projekt red and critical events tacting allied/partner companies for help with the various attacks on social media; 3) create in advance supporting bots that will defend the company automatically; 4) create an international database of accounts that have made firestorm.if a company has done something enormously wrong in the past, it is possible that every time the same company does something wrong, there is a chance that another firestorm can restart, either for the recent event or also for the past one. a) social failure: if the firestorm is linked to a partner company, or only a certain sector of the company is under attack, immediately distance yourself from them. even if it is complicated given the amount of partners, quality standards and corporate continuity, this action, if done in time, creates a good defensive shield at the communication level, as people can understand that the company itself has also understood the problem, limiting the damage; timing is essential during firestorms, first of all to understand whether the type of firestorm is real or artificial (you can tell by the date of creation of the accounts that do firestorm -if the initial accounts were born recently, they are probably bots, hence artificial); secondly for improving the cyber defence and be prepared for a possible cyber attack; tertiary for the public reaction, because it means that the affected company has noticed the failure faster or as fast as other people (who are doing the firestorm on social networks) and will promptly react to the problem, reassuring customers that it will be solved. i introduce an novel model allowing researchers and companies to (1) understand when companies and organisations have fragile defence against a social-cyber attack, (2) illustrate how company and organisation can defence them self from firestorm, (3) proving that social-cyber attack must be defined as a possible high risk event as multi domain sector, and (4) showing a now model of cyber attack, with a multidisciplinary sociological approach to increase the potentiality of common cyber attack. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/861.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/861.txt new file mode 100644 index 0000000000000000000000000000000000000000..59b0738a979e9ca8eba624e1c76cc53a32c5eec8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/861.txt @@ -0,0 +1 @@ + among which, mooc course quality evaluation is one of the vital tasks in mooc platform management for helping improve the course materials, promote students' learning efficiency(jiang et al.current studies in mooc quality evaluation lie in two aspects: (1) manual evaluation(wang et al.second, although multi-view node embedding can be obtained by performing representation learning on mooc hin, how guaranteeing the validity of the learned course representations remains a challenge. specifically, the validity of course representations lies in three aspects: (1) the course representations should preserve the same semantics as the raw course portfolio; (2) the representations in each view should be consistent with the unified representations of the course; (3) the course representations should be aligned with the overall representations of the mooc platform. therefore, to ensure validity, we aim to maximize the three correlations between the pair of course representations and the raw course portfolio, the pair of unified course representations and each view, and the pair of course representations and platform representations. formally, we aim to find a mapping function f : g → h that takes the mooc hin g as input, and outputs information-aware representations h = {h 1 , h 2 , .where âpos and âneg are derived from the positive course nodes pairs and the negative course node pairs respectively, based on equation3.where att(•) indicates the self-attention function, and h indicates the unified course representation, which has integrated the self-attention weights of different meta-paths. we use mutual information (mi) to quantify the agreement between the representation h learned by the course node in the unified view and the representation x of the raw features of the mooc. although we deconstruct heterogeneous graphs into different views, the course representations in each view are expected to be consistent with the unified course representations in semantics, which is defined as multi-view consistency. first, we get the multi-view representation h and unified course representation h. then, we use neural network estimation mi (h; h) to maximize the mutual information between the unified course representation h and multi-view representation h. we can maximize the mutual information between platform representation and unified course representation using the binary crossentropy loss of the discriminator as follows:.in this paper, we study the problem of mooc course quality evaluation with mooc heterogeneous information networks and propose an information-aware graph representation learning framework for multi-view mooc quality evaluation. third, we identify three types of validity of course representations, with (i) the agreement on expressiveness between the raw course portfolio and the learned course representations; (ii) the consistency between the representations in each view and the unified representations; and (iii) the alignment between the course and mooc platform representations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/862.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/862.txt new file mode 100644 index 0000000000000000000000000000000000000000..9cb8fcaf07c21e8f8decec106df29502ff573a03 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/862.txt @@ -0,0 +1 @@ +rapid advances in artificial intelligence (ai) over the past several years have raised new questions about the role that machines might play in both promoting and impeding humanity. the field of education has been no different. emerging ai capabilities are enabling machines to fuse and make sense of larger, more diverse datasets in increasingly efficient ways. while these affordances of scale, diversity, and efficiency might help generate insights and guide actions to improve educational opportunities and outcomes, they also come with several technical limitations and related practical risks-like failures to generalize and identify causal relationships-that threaten to perpetuate unfair or harmful applications. thus, and rightfully so, the reemergence of ai has sparked new debates about the political, pedagogic, and practical implications of its application in educational contexts (shum & luckin, 2019). these debates are critical, especially if we wish for machines to be able to better-serve the human actors-teachers, learners, administrators, and others in education-who may benefit from their emerging capabilities.engaging productively in these debates, however, requires one to understand some of the methodological paradigms and practices specific to artificial intelligence in education (aied). however, researchers and practitioners not trained in computer science or engineering may find the rapidly advancing field of ai inaccessible. in this article, we try to address this gap, providing an overview of the meanings, methods, and limitations of ai as a re-emerging field and how these intersect with ai's applications in education. in doing so, we hope to build on previous introductions to this topic (e.g., luckin, 2018;holmes et al., 2019) and critical works that connect data models with ethical and social debates (perrotta & williamson, 2018;perrotta & selwyn, 2020). by opening up the "black box" of ai for those outside of the field, we hope to further humancentered ai in education by empowering all stakeholders, regardless of disciplinary background, to contribute to the development of ai that recognizes and champions human capabilities (li & etchemendy, 2018;yang et al., 2021).a third paradigm of machine learning is "reinforcement learning," which has recently been used, among other applications, to develop powerful gameplay systems (e. neural networks take inspiration from connectionist philosophies of cognitive science(elman et al. for example, early warning systems to detect students likely to drop out may be developed for districts that lack a breadth or depth of historical data by "borrowing" the predictive capacities of models pre-trained on data from larger school settings as a starting point(coleman et al. unlike machine learning systems, rule-based models will not necessarily make more accurate decisions with a larger scale or diversity of data., the problems they haven't yet answered that they are most likely to answer correctly, given their prior history of answers. as machine learning technologies continue to offer new opportunities for personalizing instruction, it will be important to identify the precise elements of these systems that offer the greatest promise for enhancing student learning.proponents of ai, particularly machine-learning based systems that seek to infer students' knowledge states from the growing scale and diversity of data available on digital learning platforms like khan academy, argue these systems have the capacity to obviate the need for explicit formative and summative assessments, by seeking to infer students' knowledge states from the growing scale and diversity of data available on such digital learning platforms(piech et al." these systems, often using different forms of regression, mine large troves of historical student data to predict which students are most at risk of failing an exam, dropping out of high school or college, etc., a simple rule-based system could trigger a warning if a student's gpa falls below a certain level-machine learning-based systems have the potential to identify and exploit patterns of which school leaders may not be aware. for example, small school districts might face a "cold start" problem: they simply do not have enough historical data to train an accurate machine learning model-requiring them to "borrow" data from other school districts to improve accuracy (e. school leaders may also struggle to calibrate interventions based on the outputs of a model., 2017)showed how an object recognition system that could classify an image as containing a banana with high confidence could easily be fooled into making an incorrect classification simply by adding a small sticker of a toaster to the image.• what kind of ai is it? the examples contained in this paper illustrate how different types of ai can (and cannot) help solve different problems in education, and may help educationalists form a judgement about their applicability and risks within their own contexts.• does the ai enable something that would be difficult or impossible to achieve without it? unpacking any benefits of the scale or diversity of data that the ai operates on, or any efficiencies it enables and weighing them against associated risks or limitations may help justify its usefulness.• how equitably are the anticipated benefits and risks distributed across different groups of students and families? ai, especially machine learning-based systems, can "learn," replicate, and scale bias and inequity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/863.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/863.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f34d213ee82fbd23aa1e84154b6b556da2750a8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/863.txt @@ -0,0 +1 @@ +the impacts of science gateways on research outputs across diverse fields are well documented . gateway infrastructures can be used to support emerging practices that emphasize the interdisciplinary, collaborative, open, and computationally driven nature of science. however, the adoption of a science gateway as a research platform can require significant adjustments to existing workflows and challenge one's assumptions about how to manage a research program. gateways that do not engage their user communities to build trust and support adoption are less likely to establish a robust research community and therefore limit their potential scientific impacts . the acceptance and use of scientific gateways, like the diffusion of any innovation, depends in large part on the perceived usefulness of the resource. developing gateways collaboratively, with input from user communities may accelerate the adoption of new disciplinary practices afforded by the gateway platform. centralized resources and online collaboration are becoming more widely adopted in many contexts where we believe the "gateway model" provides a context for addressing hard problems and broadening access to key resources.in this manuscript we describe an effort to build a science education gateway to accelerate undergraduate stem education reform. we define reform very broadly to embrace the diversity and dynamic nature of the landscape across which reform happens. effective teaching must be supported in a wide range of institutional settings, student populations, and delivery modalities. there is broad recognition that teaching and learning strategies should evolve to emphasize the adoption of evidence-based teaching methods, student engagement with authentic scientific practices, and broaden participation among traditionally underrepresented communities. these challenges are further complicated by the need to continuously integrate new topics and skills to connect classrooms to contemporary scientific practices and help prepare students to participate in the technical workforce. even when innovations are developed it is a non-trivial undertaking to support the broad implementation of those strategies so that the potential benefits reach as many learners as possible. considered at this scale, the acceleration of stem education reform is a wicked problem that will require the development of diverse overlapping strategies that can be applied flexibly across the landscape. furthermore, given the certainty that scientific practices and computational resources will continue to evolve, education reform should adopt a continuous quality improvement process in order to treat reform as ongoing and context specific.in 2014 nsf funded the project "supporting faculty in quantitative undergraduate biology education and synthesis (qubes)" which was designed to "address the nation's growing need to better prepare undergraduate biologists with the quantitative and computational skills needed to be successful in the workplace or in graduate school." given the long history of quantitative biology education reform efforts, the project was organized in part to highlight the visibility of ongoing but isolated reform communities and coordinate faculty access to a diverse collection of existing teaching and learning resources. we adopted the hubzero platform and worked with the science gateways community institute (sgci) to design and deploy a gateway to support quantitative biology education innovation and classroom implementation. over time our mission has evolved to serve the stem education reform community more broadly. at the conclusion of the initial nsf funding the management of the qubes platform was moved into the bioquest curriculum consortium, a well established 501(c)(3) nonprofit, where it is sustained as an open resource for the reform community.in this paper we describe the conceptualization and implementation of the qubes platform as a science education gateway (seg). after an overview of the technical infrastructure (tools) we describe the ways that faculty use of the gateway is facilitated using social infrastructure (practices). we end with a call to action for the undergraduate stem education reform community to explore the potential use of science education gateways as a means to accelerate the reform of teaching and learning."given the long history of quantitative biology education reform efforts, the project was organized in part to highlight the visibility of ongoing but isolated reform communities and coordinate faculty access to a diverse collection of existing teaching and learning resources. we end with a call to action for the undergraduate stem education reform community to explore the potential use of science education gateways as a means to accelerate the reform of teaching and learning. as a science education gateway qubes is designed to lower barriers to faculty participation in stem education reform by making it easier to engage in scholarship around teaching and learning. our target audiences include faculty whose scholarship centers on teaching and learning, with the platform designed to facilitate, document, and disseminate faculty work as they participate in diverse professional activities.the qubes platform is a shared online space that can be used to publish and disseminate open education resources, host distributed meeting and workshop activities, participate in professional learning, and support education reform projects. the four platform services (professional learning; oer library access; project support; and customizable workspaces) support overlapping faculty user communities, provide multiple points of entry, and enable manifold use case scenarios.the qubes platform has an open, self-publishing platform (qubes oer library) that uses a git-like version control system for tracking versions, adaptations, attribution, and use metrics. to support autonomy of hosted projects on the qubes platform, we increased group customization on the platform by allowing overrides of components, plugins and modules, including fully autonomous oer libraries (e. these resources are available within a partner support group on qubes for onboarding of new projects, which includes demonstrations of the effective use of the platform to support the creation, maintenance, and sustainability of project workspaces. since the inception of the qubes platform in 2014, we have emphasized working closely with the user community to develop a shared vision for the ways that a gateway can support faculty scholarship around teaching and learning reflected in this quote. here we briefly introduce four high priority areas including: scalable and robust hosting of software tools, including tight integration of these tools with oer; design of teacher portfolios, akin to a linkedin or researchgate for educators; a custom publishing platform that supports peer-reviewed education journal tools, beyond the self-publishing of oer already supported on the platform; and support for discipline-based education research (dber), which is constantly expanding our knowledge on evidence-based teaching strategies (see figure2).as faculty participate in reform projects, publish oer materials, engage with professional learning, and access materials from the oer library, they are building a record of their activity on the qubes platform. we imagine qubes as a platform where curriculum specialists, education researchers, and teaching faculty could collaborate to scale up data collection and explore the impacts of interventions in diverse teaching contexts and across student audiences. we believe that the gateway environment can play an important role facilitating research on faculty change, professional learning, project management, as well as documenting emerging practices as communities adopt gateway infrastructures to evolve their professional practices. our four platform services (oer library access; professional learning; partner support; and customizable workspaces) provide multiple points for faculty engagement and address key aspects of accelerating reform practices. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/864.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/864.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef3282fff002a3a180172cf6b4a0c1e377f0525a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/864.txt @@ -0,0 +1 @@ +aggressive behaviors traditionally are regarded as intentional act of harming others with an aroused physical state in face-to-face. 1 in the current cyber era, the explosion of information technologies and social media has changed the nature and social communication interactions. the use of social media has been a global phenomenon in human's daily life. users will have greater spontaneity with perceived anonymity mechanism and are prone to express their feeling in the context of social networks. in this vein, social media create a new channel for users to engage in aggressive behaviors. cyber aggression refers to the intentional harm behaviors to others through mobile phones, computers, and other electronic devices. 2,3 it includes hostile beahviors such as social exclusion, malicious humour, and guilt induction. 3,4 past studies have explored the aggression behaviors on the popular social network platforms (e.g., facebook and twitter) and demonstrated that users' characteristic influence, such as age and gender. 5 for example, researchers found that males were more likely to engaging in facebook aggression than females like sending insulting messages and posting aggressive comments. 6 meanwhile, previous studies focused on the specific groups like adolescent and children. pabian et al. 7 investigated the relationships between the dark triad personality traits and aggression among adolescents on facebook and found that facebook intensity significantly relates to adolescents' aggression behaviors.it is obvious that cyber aggression will results in negative consequences, include breaking relationship, substance use, rule-breaking behaviors, and even major criminal activity. 3,8 due to the negative influence, researchers had been trying to identify and detect cyber aggression. 8,9 as a stable individuals' psychological behavior, cyber aggression behaviors traditionally were measured by the self-reporting directly. 3 however, selfreporting weakens the data quality and validity of conclusion due to the several limitations, such as cost, subjectivity, and low flexibility. 10 the popularity of social media provides a new way to explore users' personality and psychological behaviors through amounts of data. 11 for example, sharif and hoque 9 proposed an aggressive text classification system to classify aggressive bengali text into religious, gendered, verbal and political aggression classes. sadiq et al. 8 extracted comments from social media and classified the comments text into three distinct classes of aggression. however, there is a dearth of research that strived to predict users' cyber aggression behaviors from the social media activities and personal characteristic combined with deep learning methods.a little study had explored the cyber aggression from textual analysis. 12,13 in that, chavan and shylaja 12 used the traditional feature extraction techniques like tf-id and ngram to detect cyberbullying comments on social network. al-garadi et al. 13 elaborated the features' selection and tested various machine learning algorithms for prediction of cyberbullying behaviors. however, conventional machine-learning techniques were restricted in the processing natural data of their raw form that required careful engineering and domain expertise. 14 with currently development of deep neural networks, deep learning performs major advances in solving above problems. specifically, the language model pre-training of deep learning has been shown effectively performance for processing natural language. 15 thus, the advanced bert (bidirectional encoder representations from transformers) model is employed in this study to predict users' cyber aggression.cyber aggression plays the key role in social media platforms' operations and interactions' environment. the mass data of social media and computational social science pave the advanced way to delineate individuals' personalities and behaviors. by collecting social media users' self-reports and online data, we proposed a new methodology of employing deep learning models to objectively predict individuals' cyber aggression behaviors. this study contributes detecting this phenomenon and optimizing platforms' organizations.1in the current cyber era, the explosion of information technologies and social media has changed the nature and social communication interactions.7investigated the relationships between the dark triad personality traits and aggression among adolescents on facebook and found that facebook intensity significantly relates to adolescents' aggression behaviors.10the popularity of social media provides a new way to explore users' personality and psychological behaviors through amounts of data.8extracted comments from social media and classified the comments text into three distinct classes of aggression. however, there is a dearth of research that strived to predict users' cyber aggression behaviors from the social media activities and personal characteristic combined with deep learning methods.15thus, the advanced bert (bidirectional encoder representations from transformers) model is employed in this study to predict users' cyber aggression.cyber aggression plays the key role in social media platforms' operations and interactions' environment. the mass data of social media and computational social science pave the advanced way to delineate individuals' personalities and behaviors. by collecting social media users' self-reports and online data, we proposed a new methodology of employing deep learning models to objectively predict individuals' cyber aggression behaviors. 320 active objects are selected for this research of cyber aggression in social media, including 74 males and 246 females. we employ the indirect aggression scale aggressor version4,5to measure participants' cyber aggression behaviors, including three types of social exclusion (10 items), malicious humour (9 items), and guilt induction (6 items).specifically, social exclusion refers to behaviors that work by socially excluding the victim, such as withholding information, leaving out of activities, and turning people against someone/ social manipulation; malicious humour refers to largely constituted behaviors in which humour was used to harm the victim, such as use of sarcasm as an insult, intentional embarrassment, practical joke playing; guilt induction refers to behaviors whereby guilt is intentionally induced, such as use of emotional blackmail, undue pressure, and coercion.as an example, we label high social exclusion as category 1, neutral social exclusion as category 0 and low social exclusion as category -1.we argue that leveraging advanced deep learning pretrained model such as bert could potentially improve cyber aggression prediction and increase the potential of automating textual coding.however, we study on the bert model building and cyber aggression prediction, instead of model interpretability for behavior prediction, which will be a promising direction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/865.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/865.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c7631e845e8fd6ff4ef69775046821dbd07da17 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/865.txt @@ -0,0 +1 @@ +the covid 19 pandemic has disrupted many organizations, government and non-government institutions, schools, companies, and various communities. as a result, more displaced workers and job losses increased, more families sent home, locked down due to covid19 restrictions and uncertain of how and where to obtain immediate income for the family. the government may have provided financial assistance to erring families and fed empty stomachs.however, resources deplete as no concrete measure to total stop the threat of the on-going pandemic the filipino people is enjoying. the cagayan state university is mandated to transforming the lives of people and communities through high quality instruction, innovative research, development, and production. through the years, csu has been working hard on innovating technologies that could help alleviate poverty, increase productivity and improve socioeconomic status of the communities, and help in sustaining and protecting the environment. however, no matter how promising these technologies are if these packages of technologies are not widely accessible to target communities, to its intended stakeholders: fishers, farmers, gatherers, and processors. in fact, sharma a, and kiranmayi, d (2019) was unable to find in many literature and studies pertaining to a package of technologies as an iec initiative to adopting and utilizing research-based fisheries technologies, postharvest technologies, and aquaculture techniques.most of the 124 applications reported focused on mobile apps for angling, aquaculture management, aquarium management, marine fisheries, and fisheries governance, marketing and biology.research project generating innovative technologies and products has been funded and curated by experts in the various fields leading to technology commercialization. these then has to be extended to communities via available and relevant technologies so that as an academic institution, it really radiates its mantra of improving the lives of people and the participants were notified via email on their in fact, sharma a, and kiranmayi, d (2019) was unable to find in many literature and studies pertaining to a package of technologies as an iec initiative to adopting and utilizing research-based fisheries technologies, postharvest technologies, and aquaculture techniques.table3presents the results of the assessment made by the technical respondents along the aspects of perceived ease of use, perceived usefulness, attitudes towards usage, behavioral intention to use, and job relevance.29, the assessment of the mangngalapp along the usability and acceptability aspects were found to be "very acceptable and usable" (table3). specifically, the assessment of perceived as presented, the group of non-technical respondents generally assessed the usability and acceptability of the mangngalapp as "very acceptable and usable" with a mean of 3. this rating is associated to the very acceptable and usable descriptive values for perceived usefulness, attitude towards usage, and job relevance. interestingly, more male respondents perceived higher valuation of the mangngal app compared to their female counterparts. meanwhile, the technical respondents rated the aspects of tam as "acceptable and usable" with a mean of 3. higher the functionality can be associated to the fact that the mangngalapp follows a wyswyg approach making ease of access and functional. meanwhile, the portability aspect could be associated to the project being compatible to varied devices making it convenient to users.the participants were asked about their problems and challenges associated to the use of the mangngalapp.the mangngalapp project was found to be very acceptable and usable based on the assessment of the technical respondents. there were uncontrolled issues or problems in the use of the mangngalapp, the constructive comments and suggestions, as well as the overall impressions over the project. based on the iso 25010 software quality characteristics, the respondents generally remark it as "excellent" with an overall mean of 3.from the results, it is concluded that the developed mangngalapp will be a usable and responsive technology that aids to rural development especially among target users-fishers, gatherers, processors, traders, and farmers.the researchers acknowledge the technical challenge that may have encountered by the participants as there were very limited face-to-face presentations made with intended users, thus may affect the results in the study. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/866.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/866.txt new file mode 100644 index 0000000000000000000000000000000000000000..d90441143f3fc8b54d76e4a697e9419f585f0a3d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/866.txt @@ -0,0 +1 @@ +answer: advanced ai systems will have to exist within the context of the world composed of different nations, organizations, and individuals. in order to avoid global catastrophe, we must look at all the levels. by aligning these and considering the ways in which they interact, we can be more confident that advanced ai systems can be built to be compatible with the world, or at least be more aware of the ways in which they can potentially cause conflict. in this way, developers can have a better idea of how they aim to have the system operate in the world. but without doing the former, there is no way for future advanced ai, including agi, to be aligned in any sense on a global scale, which opens the door for existential risk. these misalignments are already visible when it comes to ai content moderation which has yielded innumerable ill social effects in the hands of malicious, deluded, or merely ignorant actors. 2. direct effects. if this work directly reduces existential risks, what are the main hazards, vulnerabilities, or failure modes that it directly affects? answer: the work directly addresses the problem of existential risk by clarifying the preconditions for any real solutions to the problems posed by x-risks. proposed solutions which do not consider how the individual, organization, national, and global levels integrate will be flawed, and therefore not real solutions. this work highlights how we should go about directly mitigating risks: first, by understanding how the levels align or misalign, and then considering how ai solutions to risk are likely to exist within that multilevel structure. a failure mode of this is that, as knowledge, it could be used for creating misalignment as well, allowing for optimizing multilevel misalignment rather than alignment. certain nations and groups have already exploited ai moderated systems in this way, so this is hardly a new discovery on the negative side-it is the positive side which seems to have remained implicit until now, and so, by making it explicit, hopefully we can begin to recapture the escaped genie of misaligned ai and reverse its effects. answer: the main effect of this paper is a diffuse reduction in x-risk, by enabling those who generate other ai alignment x-risk solutions to better understand the multilevel nature of society in which solutions must exist, and then how to integrate their solutions into that multilevel society. if the ideas in this paper are ignored then ai alignment x-risk solutions are likely to be less effective and less comprehensive, thus not solving the ai alignment problem as well as they could have if the ideas in this paper had been more fully integrated with their work. it is not enough to merely create alignment at one level, for example, that of government, since various governments of the world will remain misaligned. currently we see this between autocracies vs. democracies and free nations vs. oppressive ones. ai alignment x-risk solutions which merely enable nations to align their populaces with the government will only empower oppressive autocracies and make global misalignment worse, not better. this is true for the other levels as well. alignment has to be consistent from top to bottom or the problem merely squeezes out into the other levels, like a water balloon about to burst. in this section, we analyze how this work relates to general capabilities and how it affects the balance between safety and hazards from general capabilities. 9. overview. how does this improve safety more than it improves general capabilities? answer: the paper frames how any efforts to improve general capabilities should first consider safety in the broader societal context and how to do so. because this is a framework within which thinking about general capabilities exists, on the downside, those capabilities could be directed towards bad ends in a more comprehensive way-however, it seems that the nations who act as spoilers in international affairs already understand this, and so it is the other nations of the world, and organizations and individuals, who wish to improve the state of the world that need to work harder to learn this lesson and act to align ai in a more comprehensive and integrated way. therefore this promotes safety more than hindering it.10. red teaming. what is a way in which this hastens general capabilities or the onset of x-risks? answer: this hastens general abilities in ai only in the same way as any comprehensive model of society might enable better thinking about the integration of technology into society. however, it could perhaps be used by bad actors to either attempt to sow conflicting alignments across various ais, or subtly direct ai towards one very bad misalignment. if an agi were to be built in these ways, then safety would be harmed-but this appears to be almost the default mode for current ai work (often confused, not aligned between levels, or intentionally abused by malicious actors), and so by being more explicit about the true form of the problem, it would seem that the because the alignment problem runs on a continuous spectrum from contemporary problems like content moderation, all the way to agi and x-risks, this framework should provide helpful insights to those seeking to create comprehensively safe ai. and for those who would seek to do the opposite, they already know how to sow discord and evil. this framework is a tool for those who seek to do good, better. the dual use is essentially useless for those motivated by malice, as it is already well-known by them at a tactical, operational, and strategic level. it is those on the side seeking the comprehensive good that should find this to be of most help, tactically, operationally, and strategically, making ai safer for everyone.the ai alignment problem considers how we can encode ai systems in a way that is compatible with human moral values. a country aiming to assert itself as a global power may invest resources into building a domestic ai industry, as well as regulate the usage of ai to moderate and nudge users' behaviors towards particular views. however, when put in the context of these other levels, further goals should become visible: 1) ai development should be aligned to individual and familial needs, 2) ai development should align with national interests, and 3) ai development should contribute to human survival and flourishing on the global level.national ⇒ organizational ⇒ individual national regulations shape how organizations moderate content, as organizations must build ai within the bounds of these regulations.based on the above, it might seem that the first goal for ai alignment would be to align the national and organizational levels (assuming that the organization is also aligned with individual well-being).if we are to make good progress on the normative side of ai alignment, we must consider all levels: individual, organizational, national, and global, and understand how each works together, rather than only aligning one or a few of the parts. if this work directly reduces existential risks, what are the main hazards, vulnerabilities, or failure modes that it directly affects? answer: the work directly addresses the problem of existential risk by clarifying the preconditions for any real solutions to the problems posed by x-risks. this work highlights how we should go about directly mitigating risks: first, by understanding how the levels align or misalign, and then considering how ai solutions to risk are likely to exist within that multilevel structure. certain nations and groups have already exploited ai moderated systems in this way, so this is hardly a new discovery on the negative side-it is the positive side which seems to have remained implicit until now, and so, by making it explicit, hopefully we can begin to recapture the escaped genie of misaligned ai and reverse its effects. answer: the main effect of this paper is a diffuse reduction in x-risk, by enabling those who generate other ai alignment x-risk solutions to better understand the multilevel nature of society in which solutions must exist, and then how to integrate their solutions into that multilevel society. if the ideas in this paper are ignored then ai alignment x-risk solutions are likely to be less effective and less comprehensive, thus not solving the ai alignment problem as well as they could have if the ideas in this paper had been more fully integrated with their work. ai alignment x-risk solutions which merely enable nations to align their populaces with the government will only empower oppressive autocracies and make global misalignment worse, not better. because this is a framework within which thinking about general capabilities exists, on the downside, those capabilities could be directed towards bad ends in a more comprehensive way-however, it seems that the nations who act as spoilers in international affairs already understand this, and so it is the other nations of the world, and organizations and individuals, who wish to improve the state of the world that need to work harder to learn this lesson and act to align ai in a more comprehensive and integrated way. what is a way in which this hastens general capabilities or the onset of x-risks? answer: this hastens general abilities in ai only in the same way as any comprehensive model of society might enable better thinking about the integration of technology into society. if an agi were to be built in these ways, then safety would be harmed-but this appears to be almost the default mode for current ai work (often confused, not aligned between levels, or intentionally abused by malicious actors), and so by being more explicit about the true form of the problem, it would seem that the because the alignment problem runs on a continuous spectrum from contemporary problems like content moderation, all the way to agi and x-risks, this framework should provide helpful insights to those seeking to create comprehensively safe ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/867.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/867.txt new file mode 100644 index 0000000000000000000000000000000000000000..3309c4d3d00cdf999b5a09cb8074ff062e6ffc27 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/867.txt @@ -0,0 +1 @@ +the journalist thomas friedman's famous declaration of 2012 as "the year of the mooc" (friedman, 2013) heralded the beginning of a new era of education. moocs-or massive, open, online courses-drew enthusiasm and optimism from a wide audience as potential enablers of more equitable global access to quality education. in parallel, they sparked a new wave of computational research in education. emerging platforms and the large datasets they created inspired researchers to analyze how students engage with lectures and quizzes online (kizilcec et al, 2013;breslow et al., 2013); try and predict who is most likely to drop out of courses (kloft et al., 2014); experiment with new methods for sequencing learning content (zhao et al., 2018); and even deploy interventions designed to improve course completion rates (kizilcec et al., 2017;kizilcec et al., 2020). we, too, were among these researchers, analyzing patterns of engagement in mooc discussion forums to better understand the nature of communication and social engagement in these spaces (gillani & eynon, 2014;gillani et al., 2014;eynon et al., 2016). yet almost as quickly as they rose to prominence, it became clear that moocs would not be a silver bullet for addressing disparities in educational access and outcomes, or 'disrupt' higher education as many believed: with the exception of several notable cases (cadwalladr, 2012), most early mooc participants were well-educated adults, hailing mostly from developed countries (emanuel, 2013).moocs, and digital learning environments more broadly, have helped shed light on learner behaviors and patterns that may have previously been difficult-if not impossible-to measure. for example, after initial school closures due to the covid-19 pandemic, data from digital learning platforms helped reveal how students in the us from lower-income neighborhoods were engaging much less with academic content than those in more affluent areas (chetty et al., 2020). yet understanding learning processes through data from digital platforms hardly tells us everything we need in order to improve educational access and outcomes. across the world, there are still tremendous global achievement gaps (graetz et al., 2020)-gaps that persist even within specific developed countries like the us, stemming from a myriad of factors like continued racial and income segregation in schools (reardon & owens); racial and gender biases among some teachers and other education leaders (starck, 2020); and, broadly speaking, the crippling effects of poverty on nutrition (walker, 2011), attention and cognition (mani et al., 2013;mccloyd, 1998), self-confidence (browman et al., 2019), and other out-of-school factors that impact the extent to which children are able to learn and grow.in this light, it is clear that digital learning platforms, no matter how advanced, will always be limited in the extent to which they can improve educational and life outcomes for all students-especially those experiencing various structural disadvantages like poverty and racism. so, too, will be the potential impact of computational research as a whole-even if such research becomes more solutions-oriented, as some have called for (watts, 2017). yet the collection and discovery of new education-related datasets, combined with advances in computational methods spanning exploratory data analyses, machine learning, social network analysis, and other approaches offer promise in equipping researchers across disciplines with new tools to ask questions that can surface knowledge about educational processes and systems in ways that were previously difficult to imagine. this promise has motivated several research efforts in the past one to two decades, parallel to the interest in digital learning platforms, to explore education-related datasets using a myriad of computational approaches. journals have hosted special issues on "educational data science" (mcfarland et al., 2021) and "educational research in a new data environment" (reardon & stuart, 2019), featuring research hailing from both social scientists who are increasingly leveraging computational methods in their work, and computer/data scientists with an interest in the social sciences. as these and other related articles highlight, researchers are using advances in natural language processing to identify gender biases in textbooks (lucy et al., 2020); social network analysis to design effective anti-bullying interventions (paluck et al., 2016); and quasi-experimental methods to infer the effectiveness of teachers (chetty et al., 2014) and guidance counselors (mulhern, 2020), to name a few. instead of confining themselves to digital learning platforms, these and other studies represent a growing body of work that seeks to use computational methods to explore issues germain to education systems and institutions as they are experienced every day, "in real life", by students and families.in this chapter, we focus on research that explores one dimension of such education systems: the social factors that shape educational access and outcomes for children aged birth through (approximately) 18 years of age. in particular, we discuss recent computational work exploring how schools, families, and neighborhoods shape children's educational and life outcomes from an early age. many researchers and practitioners agree that schools, neighborhoods, and families all operate on and affect children's educational trajectories in meaningful ways (purpose built communities, 2019), but often debate the relative influence of each. after briefly reviewing several studies, we discuss several directions of opportunity for future work, and how computational social scientists may creatively apply their unique disciplinary and methodological backgrounds to pursue them.before proceeding, we make three notes. one: while we discuss digital learning platforms and the aforementioned social factors as two separate categories of research and practice, our purpose in doing so is not to create a false dichotomy. society and technology are interwoven (selwyn, 2019), and education is no different. instead, we make this distinction largely to highlight the emerging body of work in the latter, and encourage computational social scientists with an interest in applications to education to consider investigating these social factors even when the datasets may not be as readily available or easy to capture compared to data generated from digital learning ecosystems. indeed, there is significantly more work to do to better conceptualise the relationships between education, digital technologies and society to facilitate meaningful social computational science in education. two: most of the examples we use are drawn from studies conducted in the us. while many of the themes we discuss vis-a-vis the us are relevant in other countries, we also acknowledge the importance of more research specifically focused on, and conducted within, other international contexts-especially developing contexts, given that much of what works in the developed world cannot be force-fitted into developing countries (irani et al., 2010). three: while some of the studies we highlight leverage large datasets and recent advances in machine learning and other data science techniques, several others use more traditional quantitative methods (like linear regression analyses used for program evaluation / causal inference) as their main methodological tools. we include these different types of studies to contrast what is meant by 'computational', inviting readers to conceptualize a broad methodological landscape for conducting computational social science research in education. yet understanding learning processes through data from digital platforms hardly tells us everything we need in order to improve educational access and outcomes. yet the collection and discovery of new education-related datasets, combined with advances in computational methods spanning exploratory data analyses, machine learning, social network analysis, and other approaches offer promise in equipping researchers across disciplines with new tools to ask questions that can surface knowledge about educational processes and systems in ways that were previously difficult to imagine. journals have hosted special issues on "educational data science"(mcfarland et al. instead, we make this distinction largely to highlight the emerging body of work in the latter, and encourage computational social scientists with an interest in applications to education to consider investigating these social factors even when the datasets may not be as readily available or easy to capture compared to data generated from digital learning ecosystems., 2015).computational approaches for understanding and improving learning: a social factors view while computer scientists have driven a large portion of the work behind the above-described computational approaches to analyzing data from learning platforms, much of the computational social science research on the role of schools, families, and neighborhoods in shaping children's educational and life outcomes has been generated by applied micro economists and sociologists.looking ahead: exploring social factors in education with computational social science as many of the citations so far suggest, much of the existing computational work exploring social factors in education has been conducted by applied microeconomists., 2018a;putnam, 2000). more ambitiously, future work could make even stronger connections with the long standing literature that makes far more visible the process and practices of schools-what is sometimes called the 'new sociology of education' (e.by starting with the questions and turning to methods as needed to explore and answer such questions as deeply and richly as possible, computational social scientists can help draw attention to a wider range of topics on the education research agenda-including those that may not directly identify causal relationships (which is of great interest to many applied microeconomists) but still help yield valuable insights that inform downstream causal analyses, design-based research, etc.unpacking the underlying mechanisms driving certain observed outcomes much of the applied microeconomics literature on neighborhood effects or teacher effectiveness outline how much neighborhoods or teachers can increase children's shorter and longer-term outcomes, but few unpack at a granular level what it is about neighborhoods, or teachers, or other social factors that make them more or less effective. for example, might we use techniques from computer vision applied to historical corpora of neighborhood-level google street view images (a lanaik et al.computational social scientists may partner with sociologists, educationalists, and philosophers of education or familiarize themselves with related theories and methods, to bring a more nuanced understanding of the complex role that education plays in society (beyond delivering learning and qualification) and ways of theorising about social justice and education that would add to the existing research in this domain. not all computational social scientists may wish to participate in design-related activities that pick up where analyses of secondary data (or even preliminary causal analyses) leave off, but their methodological and domain insights may lend them to fill an important "in-between" space that connects theory and methods from research domains to the practical challenges and considerations of intervention in field settings. much like our point about methods above, letting the questions drive the thinking around which audiences are appropriate for the work-instead of assuming a priori that the audience must be a government policymaker, or platform designer, or some other known stakeholder-may help surface new ideas for people and systems that may have interest in learning about and building upon the research findings that computational social scientists help produce. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/868.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/868.txt new file mode 100644 index 0000000000000000000000000000000000000000..0574c55e1077e5eacd80b3f57df208fdd4c90079 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/868.txt @@ -0,0 +1 @@ +the social and ethical implications of different technologies have long been the object of study for scholars outside of computer science, and recently many computer scientists have taken up this broader agenda under a variety of names. in particular, two largely independent communities have evolved from established fields of computer science. the study of algorithmic fairness that has emerged at the facct conference and its predecessors is heavily influenced by the field of machine learning and focuses on predictive systems, while the study of ethical decision making 1 has attracted primarily researchers from classical artificial intelligence and focuses on sequential decision making. nominally, these groups have similar goals: to produce predictive or decision-making systems that "do the right thing". however, many key ideas from ethical decisionmaking have not yet percolated into the fairness literature, and likewise many important concepts and approaches developed for fair prediction are not yet common in ethical decision making. this paper is an effort to bridge this gap.unlike predictive systems, which consider decisions independently and one at a time (known as myopic decision making), sequential decision-making systems consider sequences of potential actions, allowing them to evaluate the long-term effects of taking a particular set of actions. many real-world problems, such as autonomous driving, power grid management, wildfire fighting, military engagement, disaster relief, and inventory logistics, both fundamentally affect people's safety and access to resources and require sequential reasoning as they cannot be solved adequately via myopic decision making. however, although problems such as autonomous driving sometimes motivate the fairness literature , fairness conceptualizations and methods have largely been developed for predictive rather than sequential decision-making systems. moreover, despite the fairness literature's acknowledgement of the long-term effects and sequential nature of many high-stakes decisions , including education and college admissions , recidivism risk prediction , predictive policing , child and homeless welfare , clinical trials , and hiring , work on these settings rarely engages problem formulations or approaches developed for sequential decision making, or efforts to conceptualize and address ethical concerns emerging from the ethical decision making literature.our paper makes the following contributions. we begin by introducing a foundational and widely-used sequential decision-making model, the markov decision process (mdp), from which many specialcase models are derived. we cover problem formulation, solution methods, and key assumptions and properties ( §2). we then examine how ethical concerns have been conceptualized within the ethical decision-making and fairness literatures ( §3), examine the sequential decision-making model pipeline ( §4), introduce some of the measurements ( §5) and mitigations ( §6) common in the ethical decision-making literature, and discuss some current challenges and state-of-the-art techniques for ethical decision making.throughout, we offer observations following three general themes. first, we draw comparisons between conceptualizations, measurements, and mitigations proposed in the fairness and ethical decision making literatures to highlight where insights and methods from fairness may or may not be appropriate for ethical decision making. second, inspired by the fairness literature's analyses of machine learning pipelines, we draw attention to aspects of sequential decision-making pipelines that represent open opportunities for future analysis. finally, we highlight some problem formulations and techniques developed for ethical decision making that may offer advantages for fairness research. moreover, despite the fairness literature's acknowledgement of the long-term effects and sequential nature of many high-stakes decisions, including education and college admissions, recidivism risk prediction, predictive policing, child and homeless welfare, clinical trials, and hiring, work on these settings rarely engages problem formulations or approaches developed for sequential decision making, or efforts to conceptualize and address ethical concerns emerging from the ethical decision making literature. we then examine how ethical concerns have been conceptualized within the ethical decision-making and fairness literatures ( §3), examine the sequential decision-making model pipeline ( §4), introduce some of the measurements ( §5) and mitigations ( §6) common in the ethical decision-making literature, and discuss some current challenges and state-of-the-art techniques for ethical decision making. an mdp describes a decision-making problem using four attributes: (1) a set of states that represent different possible scenarios, (2) a set of actions that can be performed by the agent, (3) a transition function that gives the probability of reaching a given state when the agent performs a particular action in its current state, and (4) a reward function that gives the immediate utility of performing a particular action in its current state. at each time step the agent performs an action in a state, receives a reward based on the reward function, and transitions to a successor state based on the transition function. formal definition: an mdp is a tuple, , , , , where: is a finite set of states; is a finite set of actions; ( , , ′ ) is a transition function that represents the probability of reaching state ′ after performing action in state ; and ( , ) is a reward function that represents the immediate reward gained by performing action in state . first, how might a sequential decision-making system cause harm due to an inadequate decision-making model? for example, if the decision-making model from §2 had only two options for price, high and low, customers may be charged more than necessary in scenarios where the optimal action is to set the price to nor-mal instead of high, since the decision-making model does not recognize this as a possibility. for example, for a particular decisionmaking model, which stakeholders are explicitly modeled? what kinds of approximations of the world are common, and what assumptions underpin them? whose domain expertise is solicited? are model aspects borrowed from one application or deployment setting to another, or always developed afresh? how does model design take into account the larger systems that models participate in? we are not aware of research that explicitly studies processes related to the design and development of sequential decision-making systems, although there are substantial bodies of adjacent literature on interpretability, explainability, and generally on participatory design. we are not aware of rigorous empirical research on harms produced by deployed sequential decision-making systems, including basic questions such as "who is harmed?" the question is more often framed in terms of rules violations, but even these studies are not common due to the more theoretical nature of most existing research and lack of access to sequential decision-making systems. when evaluating these techniques, for a fixed decision-making model, directly measuring the value function can often provide a signal as to the quality of the resultant policy since all value functions are upper bounded by the value function induced by the optimal policy-the policy we would get if we used an exact planner. for example, even if protected attributes are not represented as state factors, can the transition and reward functions still encode harmful patterns? how might state factors implicitly encode sensitive attributes? how might sequential decision-making outcomes reproduce patterns of discrimination? simply adding protected attributes as state factors seems ill-advised since unless these factors affect the reward or transition functions they will not affect the reasoning process. for example: what resources might be developed to help practitioners understand how to augment their state spaces or modify their reward functions? how might the research community contribute to making this process more systematic, such as via development of checklists or other design processes? how can developers anticipate what kinds of ethical scenarios must be delineated by the model beyond what is necessary for the task? by what processes can we reliably uncover and anticipate such scenarios-without risking stakeholders-given that many of them are challenging to uncover without deploying a system? 6. the important similarities are that the intervention is local, targeting a specific behavior, that the outcome has no formal guarantees since it is unknown how the optimization problem will be re-solved given the new loss function or reward function, and that these interventions require a significant level of expertise since the developer needs to 13 in reinforcement learning systems with very sparse reward functions-reward functions where most states have the same value, usually zero-a similar sounding technique known as "reward shaping" is used to add reward signal to states which represent progress towards or away from one of the original, sparse reward signals. in the reinforcement learning application there is a lot of concern about executing reward shaping in a manner which does not alter the optimal policy one would get if they solved the original mdp using the original, sparse reward function (remember, however, this is not possible since they do not know the transition function). here, we imagine analyses of the sequential decision-making model and the processes by which models are designed; the outcomes of decisionmaking systems in terms of which stakeholders are harmed, particularly the ways in which outcomes might reproduce existing patterns of injustice; and how choices regarding the design of decisionmaking models give rise to particular outcomes. alongside these analyses, what processes and resources might we develop to help anticipate the outcomes of a given model and policy, and support safe iterative model development, without incurring too much of the risk inherent to deployment?14nevertheless, decision-making models are in many ways fundamentally different from predictive models, and their reasoning capabilities, design, and deployment will make realizing these goals difficult. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/869.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/869.txt new file mode 100644 index 0000000000000000000000000000000000000000..fe493fd7d1a9763a4e4a126eeb6ed861e5759427 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/869.txt @@ -0,0 +1 @@ +blockchain technology is one of the emerging areas nowadays that has gained widespread attention over recent years. much revolutionary growth started in various industries (such as it, education, business, banking, and many others) to capitalize on this technology. blockchain act as a decentralized database, including transaction data that is permanently saved and encrypted. the governing architecture of a blockchain is a node, a laptop, a computer, or a server that has a complete copy of the blockchain's transaction history rather than depending on a central authority to store and validate data. a peer-to-peer network of nodes forms a blockchain to constantly share the most recent data block to keep all nodes in sync . due to these characteristics, many blockchain-based applications are being developed (from defi , nfts , dapps , and supply chains to rewards and more) to disrupt traditional business models and create unique interest. blockchain technology's recent rapid expansion forced many industries to redesign and rethink many fundamental components of existing systems. blockchain is setting a new trend by innovating businesses by delivering robust characteristics such as trust, security, privacy, and identity . the best way to launch a blockchain application requires technical expertise and experience to maximize the chances of a successful, thriving implementation, and it is only possible with strategic design .the blockchain academy provides the following reasons why embracing blockchain-specific education in higher education institutions (heis) is important : "we are noticing an increasing demand of current workforce that firmly pushes blockchain education in heis. in order to introduce new programs and individual courses is essential to foster skills to meet industry standards." nowadays, various industries are redesigning their existing system and launching new startup businesses with blockchain.blockchain technology has become one of the most competitive spaces around the globe . unlike other technologies, blockchain technology has adapted to different applications. nurturing students in blockchain education is an ongoing process since technology is evolving rapidly. however, heis must keep their curriculum up to date, continuously develop students' skill sets, and provide real-time hands-on experience; otherwise, their skills may become obsolete. heis can find several suitable practices that can build to improve students' proficiency in specific tech niches. these credentials can portray a better image of their students in the visions of an employer.blockchain education survey conducted by coindesk in 2021 to rate the top 20 universities in the world that offer the best blockchain education. in this top list, out of 20, 14 schools are private with limited courses. between 2018 -2021, mit, harvard, and many other reputable institutions only had the highest student placements and publications in the blockchain area. even long while, the blockchain communities strive to promote decentralization, and open access projects make available for students to advance this technology. this widespread adoption of its education establishes a hierarchy between academia and industry. industries using blockchain technology require more than 300 % of the existing workforce, so heis may need to develop and adopt blockchain courses that will take two to three years to fulfill current workforce demand .the blockchain academy provides the following reasons why embracing blockchain-specific education in higher education institutions (heis) is important: "we are noticing an increasing demand of current workforce that firmly pushes blockchain education in heis. industries using blockchain technology require more than 300 % of the existing workforce, so heis may need to develop and adopt blockchain courses that will take two to three years to fulfill current workforce demand. according to the blockgeeks (one of the largest blockchain education hubs on the internet), 2022 survey reports, while the demand is growing for blockchain experts (who help to infuse blockchain in building and designing solutions that leverage cross-asset frameworks and concepts), there is a lack of curricular educational resources to educate undergraduate and graduate stem students. therefore, it is important to educate students on the fundamental ideas behind blockchain technology, develop a more in-depth and distinct comprehension of these ideas, and educate students on real-time blockchain applications and the mechanisms underlying various consensus protocols. simplilearn is prepared to be a helpful resource for students not only to become a blockchain developer but also to provide additional training and skills in topics related to blockchain development, such as devops, software development, and cloud computing. department of education blockchain action network and the american council on education's education blockchain initiative (ebi)are actively exploring the teaching and training of blockchain technology and how it can potentially revolutionize the field. the education blockchain action network brings educators, administrators, students, and technology developers to collaborate in examining how institutions could shape the future of blockchain education.blockchain developers are professionals who work with blockchain technology and are in charge of activities like inventing blockchain protocols, writing smart contracts, and many more.there are two sorts of blockchain developers: blockchain software developers and core blockchain developers.a blockchain architect provides end-to-end solutions to businesses based on blockchain technology and contributes to developing an entire blockchain ecosystem engagement strategy. these certifications will help students become master the concepts of blockchain architecture tools, business components, and technical elements of blockchain architecture, how to translate requirements into functions, architecting their blockchain solutions, and more.we discussed the blockchain market growth and key areas in various sectors for analyzing blockchain workforce demand to discover the key areas of blockchain education that need to focus on more. other universities, like the university of copenhagen, offer a blockchain summer school, which cooperates with the european blockchain center, featuring workshops and seminars to enhance students' understanding of blockchain technology. this craze of blockchain will never stop; if students want to pursue a career in blockchain, it is the right time! many blockchain courses, including solution architects, designers, and engineer, have a precarious and somewhat checkered history in academia. in order to respond to the growing demand for blockchain professionals, we investigated various career tracks available for blockchain learners and the necessary certifications and courses to educate blockchain technology. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/87.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/87.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f3dc97ff78e0f3142db4eea3db80d9ea58c1118 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/87.txt @@ -0,0 +1 @@ +ai models have become increasingly more complex to support additional functionality, multiple modalities, and higher accuracy. while the increased complexity has improved model utility and performance, it has imposed significant model training costs. therefore, training complex models is often infeasible for resource-limited environments, such as those at the cloud edge and in fully or often disconnected environments.in response to these challenges, this paper proposes a new paradigm for creating neural networks: rather than training networks from scratch or retraining existing networks, we create neural networks through composition by stitching together fragments of existing pre-trained neural networks. a fragment is one or more consecutive layers of a neural network. we call the resulting neural network composed of one or more fragments a "stitchnet" (fig. 1). by significantly reducing the amount of computation and data resources needed to create neural networks, stitchnet enables an entire new set of applications, such as the rapid generation of personalized neural networks at the edge or in fully disconnected environments.stitchnet's model creation mechanism is fundamentally different from today's predominant backpropagation-based method for creating neural networks. given a dataset and a task as input, the traditional training method uses backpropagation with stochastic gradient descent (sgd) or other optimization algorithms to adjust the weights of the network. this training process iterates through the full dataset multiple times, and therefore requires compute resources that scale with the amount of data and the complexity of the network. training large models in this way also requires substantial amounts of data to mitigate overfitting. while successful, this traditional paradigm for model creation is not without its limitations, especially as ai moves out of the data center and into highly resource-constrained and disconnected environments. creating complex neural networks without access to large amounts of data and compute resources is a growing challenge. in the extreme case (e.g., for very large language models (llms) and computer vision models), only a few companies with access to unrivaled amounts of data and compute resources are able to create such models.stitchnet solves this problem by creating new neural networks using fragments of already existing neural networks. the new approach takes advantage of the growing amount of neural networks that already exist. stitchnet enables the efficient reuse of the learned knowledge resident in those pretrained networks, which has been distilled from large amounts of data, rather than having to relearn it over and over again for new tasks as is done with traditional model creation paradigms. stitchnet's ability to reuse existing pre-trained fragments, rather than recreating from scratch or retraining for every task, will help accelerate the growth and application of neural networks for solving more and more complex tasks in heterogenous environments. for example, stitchnets can be created on-the-fly to serve as classifiers specially tuned for local conditions and classes of interest in a given environment or task.however, compositing these existing fragments into a coherent and high-performing neural network is non-trivial. to reuse the knowledge of pre-trained neural network fragments, we need a way to 1) measure the compatibility between any two fragments, and 2) compose compatible fragments together. in the past, centered kernel alignment (cka) - has been used to measure similarity between neural network representations. we leverage cka to assess the compatibility of any two fragments from any neural networks, and compose new neural networks from fragments of existing pre-trained neural networks to create high-performing networks customized for specific tasks without the costs of traditional model creation methods. the cka score is used to reduce the search space to identify compatible fragments and guide the fragment selection process.we present empirical validations on benchmark datasets, comparing the performance of stitchnet to that of the original pre-trained neural networks. we demonstrate that stitchnet achieves comparable or higher accuracy on personalized tasks compared with off-the-shelf networks and has significantly lower computational and data requirements than training networks from scratch or by fine-tuning.our contributions are:• the stitchnet paradigm: a novel neural network creation method with versatile applications. • innovative use of cka to assess fragment compatibility.• technique for seamlessly combining compatible fragments in both linear and convolutional layers.in response to these challenges, this paper proposes a new paradigm for creating neural networks: rather than training networks from scratch or retraining existing networks, we create neural networks through composition by stitching together fragments of existing pre-trained neural networks. by significantly reducing the amount of computation and data resources needed to create neural networks, stitchnet enables an entire new set of applications, such as the rapid generation of personalized neural networks at the edge or in fully disconnected environments.stitchnet solves this problem by creating new neural networks using fragments of already existing neural networks. stitchnet's ability to reuse existing pre-trained fragments, rather than recreating from scratch or retraining for every task, will help accelerate the growth and application of neural networks for solving more and more complex tasks in heterogenous environments. to reuse the knowledge of pre-trained neural network fragments, we need a way to 1) measure the compatibility between any two fragments, and 2) compose compatible fragments together. we leverage cka to assess the compatibility of any two fragments from any neural networks, and compose new neural networks from fragments of existing pre-trained neural networks to create high-performing networks customized for specific tasks without the costs of traditional model creation methods.the core mechanism to create stitchnets is to identify reusable fragments from a pool of existing networks and to compose them into a coherent neural network model capable of performing a given task. rather than looking at the neural network as a whole, we adopt and use cka to as a measure of compatibility between any two fragments of any neural networks. we use the notation f ijk to denote a fragment of a neural network i from layer j to layer k, and the notation n ik to denote the computation performed by the portion of the neural network from which the fragment was taken up to layer k. these pre-trained networks are divided into one of three types of fragments: starting fragments for which the input is the original network input, terminating fragments for which the output is the original network output, and middle fragments that are neither starting nor terminating fragments.2, there are 8 fragments for alexnet, 5 fragments for densenet121, 13 fragments for mobilenet v3 small, 6 fragments for resnet50 and 16 fragments for vgg16. this results in the creation of a fragment pool p of 48 fragments consisting of 5 starting fragments, 38 middle fragments, and 5 terminating fragments.these results crystallize one of the core benefits of stitch-net: without significant data and computation requirements of traditional training procedures, the method can discover networks that are personalized for the task, outperform the original pre-trained networks, and do so while significantly reducing inference-time compute requirements.without stitchnet, we either need to train a network from scratch (which may fail due to our limited amount of training data), or find an existing pre-trained neural network, label the dataset, and finetune the network. by reusing fragments of these networks to efficiently compose new neural networks for a given task, stitchnet addresses two of the most fundamental issues that limit the creation and use of neural networks: large data and computation requirements. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/870.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/870.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb526dbdcaf5a95a1b953fd4921cbc2a363584d2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/870.txt @@ -0,0 +1 @@ +a typical patient is now projected to generate approximately one million gigabytes of medical data throughout the course of their lifetime. 1 the generation and availability of this electronic health data has presented both opportunities and challenges for artificial intelligence (ai) researchers. a wealth of uses have been described across the diverse healthcare technology landscape, with examples of ai applied to clinical imaging, electronic health records, wearables, genomics and drug discovery. healthcare ai has attracted huge financial investment, with interest from both the public and private sectors in recent years. 7 notably, by the end of 2019, the 50 largest private sector investments into healthcare ai had reached a total of $8.5 billion and further growth in investment is anticipated, indicating the significant potential for ai to impact human health. 7,8 "surgical pathology" or "histopathology" is a medical specialty where samples of tissue or cells are examined to provide vital information to clinicians and patients on the diagnosis, treatment and prognosis of cancers and other diseases. 9 exciting ai innovations developed for digital pathology, usually derived from a technology called "whole slide imaging", have been applied to a range of diseases, including prostate, skin, breast, liver, colorectal and kidney pathologies. whole slide imaging involves the scanning and digitisation of a whole glass microscope slide to store it as a high resolution image that is available for the pathologist to view on a computer at any time. 16 diagnostic reports, molecular techniques and other laboratory imaging modalities are also targets for ai research. it is anticipated that these advancements will alter the way pathologists diagnose and influence patient management in future. however, as with other areas of healthcare, there are many barriers to achieving successful clinical implementation of these solutions and the introduction of any new technologies must be supported by robust evidence. 23,24 the ability to comprehensively report research findings to make them understandable and usable by other researchers and clinicians is an essential skill for academics, and is key in ensuring broader scientific and technological progress. unfortunately, there are multiple incentives and pressures for clinicians to rapidly publish research that does not necessarily include the essential components needed for both critical appraisal of a study and to make the work usable and useful to other researchers, and to the wider field. 25,26 concerns around the quality of research methodology reported in scientific studies more generally first appeared in the literature in the early 20 th century and it was recognised that this issue was impacting the quality of evidence and robustness of conclusions reached within research. 27 in an endeavour to overcome this, the first calls for reporting guidelines and attempts to produce them materialised in the 1980s and 1990s, and probably the most widely known early reporting guideline, the consort statement for reporting of clinical trials, was published in 1996. 27,28 a reporting guideline is a tool or checklist used to guide the researcher in providing the minimum essential criteria needed when summarising their research study for publication. 29 a library of reporting guidelines for a range of study types can be found at the equator network website https://www.equator-network.org/reporting-guidelines/. 30 the equator network was established in 2008 as an international collaboration of academics and other stakeholders, with an aim to promote transparent and accurate reporting of medical research. 29 further reporting guidelines have been developed for use in a variety of contexts since the initial consort statement and these can be found within the equator network's online library. 29,30 following decades of work to improve reporting quality, there is evidence to show that completeness of reporting improves with the use and endorsement of reporting guidelines. concern that ai research is particularly vulnerable to issues of bias, methodological quality, and poor reporting, has been raised in several recent studies and risks leading to "research waste" following the huge investment into these technologies. a systematic review by liu et al, in 2019 demonstrated that few of the deep learning studies examined provided externally validated results, few compared the performance of ai studies and healthcare professionals using the same sample and reporting of ai studies was frequently poor. 37 a systematic review of medical ai trials by nagendran et al. in 2020 showed that most ai trials were at high risk of bias and deviated from established reporting standards. 38 finally, a systematic review of machine learning for covid-19 in radiology imaging by roberts et al. in 2021 showed a high prevalence of deficiencies in the methodology and reporting, and of 320 relevant studies, none of these were of potential clinical use due to methodological flaws and underlying biases. 39 wider awareness, endorsement and use of reporting guidelines in ai research is one method that can be used to start tackling these issues.few studies have addressed the issue of reporting in computational pathology. in previous work, our group published a study examining reporting of ai diagnostic accuracy studies in pathology conference abstracts which demonstrated that reporting was suboptimal, reporting guidance was not used or endorsed and that work was needed to address areas of potential bias in ai studies. 40 a study by hogan et al. in 2020 examined reporting of diagnostic accuracy studies more generally in pathology journals and showed that better enforcement of reporting guideline use was needed, as incomplete reporting was prevalent. 41 radiology shares some similarities with digital pathology, in terms of providing diagnoses and using medical imaging systems. a study by dratsch et al. in 2020 showed similar issues with incomplete reporting and poor use and endorsement of reporting guidance in ai diagnostic accuracy conference abstracts. 42 in recognition of these concerns, the intention of this review is to highlight the tools and checklists that are readily available to those working in pathology ai research. many of these resources are quick and easy to incorporate into an existing research study and will help to avoid unintentional errors and omissions by authors. greater awareness of these guidelines could increase uptake and improve the quality of research reporting in computational pathology.27in an endeavour to overcome this, the first calls for reporting guidelines and attempts to produce them materialised in the 1980s and 1990s, and probably the most widely known early reporting guideline, the consort statement for reporting of clinical trials, was published in 1996.29,30following decades of work to improve reporting quality, there is evidence to show that completeness of reporting improves with the use and endorsement of reporting guidelines.concern that ai research is particularly vulnerable to issues of bias, methodological quality, and poor reporting, has been raised in several recent studies and risks leading to "research waste" following the huge investment into these technologies. a systematic review by liu et al, in 2019 demonstrated that few of the deep learning studies examined provided externally validated results, few compared the performance of ai studies and healthcare professionals using the same sample and reporting of ai studies was frequently poor.37a systematic review of medical ai trials by nagendran et al. in 2020 showed that most ai trials were at high risk of bias and deviated from established reporting standards.38finally, a systematic review of machine learning for covid-19 in radiology imaging by roberts et al. in previous work, our group published a study examining reporting of ai diagnostic accuracy studies in pathology conference abstracts which demonstrated that reporting was suboptimal, reporting guidance was not used or endorsed and that work was needed to address areas of potential bias in ai studies.40a study by hogan et al.41radiology shares some similarities with digital pathology, in terms of providing diagnoses and using medical imaging systems. recent literature reviews of pathology ai were examined prior to commencing the study to understand current focuses of research and to determine the categories for required reporting guidelines.77,81additional recommendations for specific study types address noninferiority and equivalence studies, non-randomised studies, spirit-path addressing pathology specific issues in clinical trials and pilot and feasibility studies. there is increasing emphasis on patient and public involvement in ai research and qualitative research will be essential throughout the process of developing and implementing ai products successfully into any clinical setting.25this study was designed to encompass a wide range of useful resources for common study types to address incomplete reporting within pathology ai research, however this is not intended to be an exhaustive list and there may be additional context specific tools that are not addressed here. incomplete reporting is prevalent in pathology and ai research, and reporting guidelines can help to improve reporting quality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/871.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/871.txt new file mode 100644 index 0000000000000000000000000000000000000000..0fc5ae9700445b2f3638b03fb8921e7a9d412af8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/871.txt @@ -0,0 +1 @@ +automated decision-making systems are being rapidly deployed in the united states and internationally and affect the public in a multitude of positive and negative ways. private and governmental institutions (i.e. societal institutions) use these systems to process information according to certain human-devised rules in order to address social problems or organizational challenges. these systems are often created using mathematical formulas or algorithms that are processed through computers to find commonalities among large datasets. for example, police departments have designed (with the assistance of data scientists) predictive policing algorithms to analyze massive amounts of pre-existing crime data to identify communities that have a high risk of crime; or past arrests or victimization data to identify individuals/groups who are likely to commit a crime or become a victim. some research suggests that the public lacks trust in automated decision-making systems and the institutions that deploy them , . the recreancy theorem argues that individuals are more likely to trust and support decisions influenced by automated decision-making systems if the institutions that administer them behave with integrity (i.e. fiduciary responsibility) and competency. however, often, the public is never informed of how these systems operate and resultant institutional decisions are made. a "black box" effect reduces the public's perceptions of automated decision systems' integrity and trustworthiness. consequently, the institutions administering these systems are less able to assess whether the decisions suggested are just; and the public loses the capacity to identify and challenge unfairness, or the costs associated with the loss of public goods or benefits.the current position paper examines fiduciary responsibility , within the context of a data science lifecycle (dsl). there are many dsls that affect individuals and the public at large, thus requiring institutional fiduciary responsibility. examples of these dsls include predictive policing , , , application processing (e.g. loans, school admissions, etc), autonomous vehicles and robotics , and government network surveillance and national security . dsls provide a holistic framework for describing processes and attributes of automated decision-making systems. a dsl has three layers: a (1) pre-processing layer, a (2) model building layer, and a (3) post-processing layer (see subsection ii-a). drawing from the recreancy theorem in quantifying the public's trust in automated decision-making systems, the current paper focuses on fiduciary responsibility within the third layer of the dsl. there is already a significant body of work to substantiate fiduciary responsibility within the early layers of dsls (see subsection ii-c). our contribution is two-fold: (i) to analyze the notion of fiduciary responsibility within the third layer of a dsl, and (ii) assert that reducing the black box effect in that layer is necessary for institutions to meet their fiduciary responsibility (see section iii). we discuss the role of fiduciary responsibility within dsls, which provides a methodology for addressing the public's lack of trust in automated systems and the institutions that employ them to make decisions affecting the public (see section iii-c). we posit that fiduciary responsibility appears in several contexts of a dsl, each of which requires its own mitigation of sources of mistrust. to instantiate our view of fiduciary responsibility within a dsl, a los angeles police department (lapd) predictive policing case study is examined (see section iv). we examine the development and deployment by the lapd of predictive policing technology and identify several ways in which the lapd failed to meet its fiduciary responsibility. we further discuss actions and mechanisms which the lapd could have utilized in an effort to meet its fiduciary responsibility.the current position paper is situated in the relevant sociological literature concerning public trust in technological innovations. it provides a novel and potentially impactful framework to address and facilitate fairness, accountability, and transparency in automated decision-making systems, which spans the dsl workflow. our analysis has a specific focus on building trust in the post-processing layer/stages. we also build on prior work to demonstrate how bias can manifest in the data acquisition, model building, and post-processing dsl layers/stages, requiring distinct mitigation strategies. for example, police departments have designed (with the assistance of data scientists) predictive policing algorithms to analyze massive amounts of pre-existing crime data to identify communities that have a high risk of crime; or past arrests or victimization data to identify individuals/groups who are likely to commit a crime or become a victim. our contribution is two-fold: (i) to analyze the notion of fiduciary responsibility within the third layer of a dsl, and (ii) assert that reducing the black box effect in that layer is necessary for institutions to meet their fiduciary responsibility (see section iii). we discuss the role of fiduciary responsibility within dsls, which provides a methodology for addressing the public's lack of trust in automated systems and the institutions that employ them to make decisions affecting the public (see section iii-c). in the current paper, we specifically examine fiduciary responsibility in societal institutions' development and administration of automated decision-making systems because embedded processes of data collection, data modeling, and prediction output influence whether the public will perceive those institutions as having integrity. we will show that automated decision-making systems that operate within a "black box" (where data scientists and institutional staff make system development and administration decisions "in the dark") absent public technological knowledge, informational awareness, and scrutiny, hinders an institution's efforts to meet fiduciary responsibility, and consequently establish trust.we further contend that the interpretation of the dsl interpretations, which are value-laden, that are embedded as a formal stage of the dsl also situate the causes and effects of the dsl within a broader context of "the human element", personalizing both the individual affected by and the institution deploying the dsl. as observed in our case study on the lapd's use of a predictive policing dsl in section iv, a lack of communication can lead to the public not trusting in the institution's deployment of a dsl. we argue that this regime of a dsl operation can greatly advance the institution's efforts to meet its fiduciary responsibility as well as provide the potential for establishing "structural assurances of trustworthiness" that the public will require for accepting the implementation of a dsl. the institution informs the affected party of (what:) the decision made and the subsequent action that was/will be taken; (why:) the interpretation of the prediction within the larger context of the dsl and the institution's mission, the rationale for the decision based on the model's prediction, and the justification for the action based on the prediction and the decision; (how:) what data was used to make the prediction, how the data was utilized, how the input data as well as the model output were interpreted, and the values that informed the interpretation of the model output. predictive policing dsls: benefits, risks, and public trust over the past 14+ years, multiple urban police departments across the united states have sought and utilized algorithmdriven predictive policing technologies that evaluate massive volumes of historical crime/arrest data to predict high crime geographies/places or crime prone individuals, which help police leadership decide where and how to deploy officer resources. considering these arguments, the methodology through which officers collect the data impacting the pre-processing stage of the dsl may be influenced by systemic racism and racial bias in policing; and predictive policing algorithms (model building layer) rely on such data to generate place-or person-based predictions in the postprocessing stages of the dsl, which can perpetuate or reinforce historical prejudices in policing practices and policies,,,,,,,. examining laser's operation within the dsl framework described in this paper, the laser algorithm utilized criminal history data to identify individuals most likely to commit a violent crime as part of the pre-processing layer of the dsl. in relation to the post-processing layer of the dsl, when police leadership interpret predpol's problematic hot spot predictions and thereafter decide to assign higher or increasing numbers of officers (communicate stage of the dsl) to patrol african american and hispanic/latino/a/x communities (deploy stage of the dsl), the likelihood of civil rights and civil liberties violations increases. the dsl framework provides multiple methods to precisely describe fiduciary responsibility of technologies affecting the public welfare and how institutions can meet their fiduciary responsibility. we illustrated the importance (and necessity) of embedding fiduciary responsibility across the dsl workflow over time wherein institutions' decisions and deployment of actions in the post-processing layer can effect the public in profound and consequential ways. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/872.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/872.txt new file mode 100644 index 0000000000000000000000000000000000000000..659348cbb30343339b88a1e21521ef557e01fe06 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/872.txt @@ -0,0 +1 @@ +the development of data-driven artificial intelligence (ai) technologies, such as machine learning (ml), generally consists of three phases involving data capture and pre-processing, model building and validation, and real-world implementation and deployment (coiera, 2019). in the case of healthcare, ai developers require access to health and care data, which might also include potentially identifiable patient data. information governance (ig) processes have been put in place to oversee the use of personal confidential data. however, navigating ig processes in the formative stages of ai development and pre-deployment can be challenging because the mechanisms for data sharing for the purpose of assuring the safety of ai applications are complex and evolving.the uncertainty about ig processes governing access to health and care data is problematic not least because the development of trustworthy healthcare ai needs to be based on prospective and ergonomics studies that enable iterative and incremental assessment of what happens when ai is introduced into the wider socio-technical system (sujan, pool and salmon, 2022;vasey et al., 2022). many studies evaluating healthcare ai are retrospective and focus on the performance of algorithms rather than on the safety and assurance of the service within which the ai is going to be used (sujan et al., 2019). as a result, the evidence base for the safety and efficacy of these technologies remains weak and is at a high risk of bias (nagendran et al., 2020;wu et al., 2021). often, subsequent prospective evaluation studies demonstrate that one cannot assume that results from retrospective evaluation translate smoothly into successful adoption and deployment in clinical systems (blomberg et al., 2021;beede et al., 2020).development and retrospective evaluation of healthcare ai are typically performed with a technology-centric focus, with an emphasis on technical issues such as data quality and the potential for bias in the data (challen et al., 2019). there is a risk that ig is regarded as a deterministic and external process rather than as an integral and formative part of the development life cycle. from a human factors and ergonomics (hf/e) perspective, the development, governance, and deployment of novel and disruptive technologies, such as healthcare ai, should be studied as interacting sociotechnical processes rather than as technical and procedural activities in isolation (sujan et al., 2021).the contribution of this paper is a reflection on the practical experiences of managing ig processes for the development of trustworthy healthcare ai from a socio-technical systems perspective using the example of an ai system to support the recognition of out of hospital cardiac arrest (ohca) calls in a welsh ambulance service clinical contact centre. the next section (section 2) provides an overview of the current state of ig processes and requirements in wales. in section 3 we describe the case study and interpret from a socio-technical systems perspective our experiences of managing ig. then, in section 4 we propose recommendations for integrating ig practices into the development life cycle of trustworthy healthcare ai. concluding remarks are presented in section 5. in the case of healthcare, ai developers require access to health and care data, which might also include potentially identifiable patient data. however, navigating ig processes in the formative stages of ai development and pre-deployment can be challenging because the mechanisms for data sharing for the purpose of assuring the safety of ai applications are complex and evolving.the uncertainty about ig processes governing access to health and care data is problematic not least because the development of trustworthy healthcare ai needs to be based on prospective and ergonomics studies that enable iterative and incremental assessment of what happens when ai is introduced into the wider socio-technical system(sujan, pool and salmon, 2022;vasey et al. organisations processing health and care data need to consider whether they require and meet a legal basis to satisfy data protection legislation. within the context of the development and deployment of healthcare ai, ig processes are important to ensure data privacy and security, ethical use and appropriate data quality and accuracy.in wales, ig processes need to ensure that the requirements of the uk gdpr (general data protection regulation) and the common law duty of confidentiality (cldc) are met. the uk gdpr applies to personal data, whilst the cldc applies to confidential patient data. when processing confidential patient information, having a legal basis under the uk gdpr (article 6 and article 9) does not remove the need for an appropriate legal basis under the cldc. data sharing for individual care is limited to those within a patient's health and care team, who have a legitimate relationship with that person (and therefore a need to access their information to treat them). a lawful basis under uk gdpr article 6 and article 9 (for special category data including health data) is required, such as performance of a task carried out in the exercise of official authority of the controller. google deepmind's argument was that the arrangement was covered by the implied consent rule under the common law duty of confidentiality (cldc), which allows the nhs to use and share data, including with third parties, on the basis of implied consent if it is for the purpose of direct patient care. the data processor is the person or entity processing personal data on behalf of the data controller. within the assist study, most of the 2-year project period was spent on learning about ig and negotiating the legal basis for data sharing, details of data requirements, and appropriate data control. in this sense, the experiences reported here around negotiating data sharing, data requirements and data control could be conceptualised as "information governance work". ig processes for the development of trustworthy healthcare ai rely on information governance work, which entails dialogue, negotiation, and trade-offs around the legal basis for data sharing, data requirements and data control. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/873.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/873.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ddfe93ebe21586d399d7e298ea026f8675ad760 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/873.txt @@ -0,0 +1 @@ +technology has been making impressive headways toward creating a world free of poverty, hunger, and diseases, and the implementation of technological inventions has transformed human lives in ways that were unimaginable even a few decades ago. however, in this march towards perfection, a large number of people, specifically those living in impoverished rural areas all over the world, have been mostly left behind. in many underdeveloped and developing countries, rural villages are often impeded by insufficient access to the internet and electricity. according to eu rural review, a mere 47% of rural homes have internet access as opposed to more than 80% in urban areas . in addition, more than 1.3 billion people are deprived of access to electricity all over the world . this leads to lacking healthcare, transportation, and education system, extreme poverty, inadequate digital literacy and digital gap, migratory imbalance, and a poorly structured economy . the situation has been referred to as a ‗vicious circle driving rural decline' . inclusive digital transformation through a regionally adaptive, resident-oriented, and knowledge-based policy, focusing on sustainable growth is vital to tackle the issue and to ensure a better quality of lifestyle for everyone . the potential of the smart village concept in this regard is therefore being recognized by world leaders along with common agricultural policy (cap) remodeling in progress .it is now well understood that rurality is both a demographic or material construction and a symbolic or imaginary one . it is not easy to think about this in a global context other than to say that there is a tendency to position rurality as a receding and even vestigial space in the face of the big story of modernity, which is positioned as urban. however, in reality, rural areas have to encounter several challenges. such as, healthcare, education, marketplace, and so on.rural health refers to the well-being of residents of rural areas, who are typically more remote from medical facilities and other services than their urban counterparts. rural communities experience far worse healthcare services than urban populations, including difficulty with mental illness, substance abuse, general health, and sexual health. these discrepancies in access to healthcare can be observed all over the world. if we look at statistics, 22% of urban people lack access to healthcare in contrast to 56% of rural residents which gives us a concrete idea of how compelling the situation is for rural health . as young people relocate to urban centers for education and work, these places frequently encounter a population decrease. aging rural populations are the outcome, and they have greater health demands and less capacity for communal activities. the life expectancy and health conditions of people who live in rural and distant areas are lower than those in urban areas. rural health disparities refer to these types of variations in risk. rural residents experience social exclusion as a result of socioeconomic disparities, poverty, and lower employment rates, as well as disparities relating to services, resources, and transportation. all of these factors also influence the quality and quantity of healthcare personnel who are willing to provide services in faraway areas. this number will continue to decline in the future and has already become a core problem in delivering primary healthcare which is also a headache for healthcare policymakers around the globe. people in rural locations find it particularly challenging to receive healthcare and maintain good health because of this. countries encounter significant difficulties in service delivery, human resources, governance, financing, communication, and in some areas, corruption when formulating a rural health policy . in every nation, rural residents' health is in poorer shape than that of their urban counterparts.over the years, a persistent cycle of unfavorable, self-fulfilling beliefs regarding rural education solidified itself in the minds of policymakers. this low expectation resulted in insufficient effort and resources, which in turn made subpar standards normal. when rural high school graduates try to join the workforce, their prospects are not always tightly linked to urban economies as those of their urban counterparts. the major reasons behind such a trend are the unavailability, and lack of awareness regarding resources available online for free among village people. this is especially true for third-world countries. on the other hand, in advanced capitalist countries, the idea of public education has developed alongside the phenomena of urbanization. as a result, there is an assumed and seemingly natural relationship between the growth of education and urbanization. in fact, the notions of urbanization, bureaucratization, and education are often used as a stand-in for modernity . rural schooling is also impacted by the prolonged problem of rural poverty.a rural economy can contribute significantly to producing employment, fostering economic growth, and fostering sustainable development. among the extremely poor population of 1.2 billion around the globe, 75% reside in rural areas. this statistic shows the potential for improvement as well as negligence toward the rural economy. the impoverished in rural areas face several obstacles while trying to compete as customers, producers, or business owners. routine commercial interactions are more challenging and expensive for businesses in rural locations due to geographic isolation. rural residents may even find it more challenging to obtain government funding possibilities due to the frequent lack of professional staff needed to compose and submit competitive grant applications, especially in limcs (low-or middle-income countries). lack of diversity in the rural economy, suggests that agriculture dominates as the primary form of livelihood in rural surroundings. this can be used to our advantage. concentrated efforts in agriculture in the form of structure, investments, and training can result in significant improvements in the rural economy.the rest of the parts of this chapter is aligned as follows, section 2 describes previous models of smart villages from different perspective proposed by the researchers. in section 3, we demonstrate our proposed model of a smart village in view of digital agriculture applications in four specific areas. finally, section 4 concludes this chapter.rural health refers to the well-being of residents of rural areas, who are typically more remote from medical facilities and other services than their urban counterparts. if we look at statistics, 22% of urban people lack access to healthcare in contrast to 56% of rural residents which gives us a concrete idea of how compelling the situation is for rural health.according to european network for rural development (enrd) --smart villages are communities in rural areas that use innovative solutions to improve their resilience, building on local strength and opportunities. director of the european network of broadband competence offices support facility jan dröge states that--smart villages are not exclusively about digital services, but digital transformation can be an important element of rural development and the regeneration of rural areas. for instance, the millennium village project (mvp) was a high-profile, cross-sectoral rural development program, which was launched across 10 different sub-saharan african villages in 2005, with a view to attaining the millennium development goals (mdgs) by 2015. fostering rural evolution, bolstering rural economy and entrepreneurship, abetting rural viability and resilience, conservation of rural atmosphere, preservation of natural deposits, developing climate consciousness, advocating knowledge and creativity, reinforcement of rural administration, effective implementation, and elucidation of policies, and monitoring of performance and developing a sense of liability were announced as the base for a thorough and integrated rural and agricultural policy in cork 2. a major contribution of this initiative involves the formation of a thematic group consisting of leaders and professionals from all over europe to create a model blueprint for the boosting and furtherance of the smart village concept. the commission's pledge to augment support for rural administrations and communities that aim to establish smart villages was reaffirmed in its statement on the future of food and farming, which was released in november 2017. further, the bled declaration on 13 th april 2018 stated that--the rural digital economy, if developed in an innovative, integrated and inclusive way, has the potential to improve the lifequality of rural citizens and, thereby, contribute to tackling the current depopulation of-and the migration from-rural areas‖.in south korea, rural tourism was adopted to deal with the issues of existing economic and social disparity between rural and urban areas in the early 1990s. the major barrier to the digitalization of rural regions in finland, according to the 2016 smart countryside side research performed by the finnish ministry of transport and communication, is the lack of digital competence and interest among a huge percentage of the rural people.according to the international society of precision agriculture (ispa) --precision agriculture is a management strategy that gathers, processes and analyzes temporal, spatial and individual data and combines it with other information to support management decisions according to estimated variability for improved resource use efficiency, productivity, quality, profitability and sustainability of agricultural production. to establish precision agriculture as a primary element of the smart village movement, it is customary to find ways to fight the obstacles such as insufficient internet access, poor digital infrastructure, deficiencies in digital skill development and digital literacy, etc. implementation of such smart irrigation systems also contributes massively to water conservation by controlling the supply of water depending on smart sensor data.the role of digital agriculture in transforming rural areas into smart villages is becoming increasingly important. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/874.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/874.txt new file mode 100644 index 0000000000000000000000000000000000000000..138b38fe603d3debfd3318fe749929f8829e5791 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/874.txt @@ -0,0 +1 @@ +the emergence of artificial intelligence(ai) has created a tremendous technical change in recent years.ai as defined by marvin minsky, artificial intelligence(ai) is the science of making machines do things that would require intelligence if done by men. this topic began as a research area of computer science engineering, but due to its significant absorption of ideas from neurology, cognitive science, philosophy and other disciplines, it has become extremely interdisciplinary, making it difficult even for experts to find an agreeable definition of artificial intelligence. it is a system that has capabilities(such as language or perception) and intelligent conduct that were originally thought to be exclusive to mankind and carry out a certain task. in simpler terms, artificial intelligence (ai) is a discipline of computer science dealing with the emulation of human intelligence by behaving intelligently. this formidable technology has brought about a shift in the way we live in the world.by incorporating ai-based solutions, sectors including manufacturing, healthcare, etc, are undergoing a sea of change in their operational methodologies. around the world, the education sector exhibits a similar pattern. given the largely advantageous digital changes that ai brings into the system, ai has undoubtedly created challenges to traditional ways of education. for the past 30 years, researchers have been studying integration of artificial intelligence in education(aied).aied has achieved significant success in strengthening connections between teachers and student where the connections were lacking or needed improvement. with the use of ai effective teaching techniques, evaluation systems, and feedback mechanisms can also be introduced. additionally, weaknesses in the existing systems can be identified, and variety of student responses like boredom and concentration can be captured to make learning a interactive environment.this essay provides a survey of the most recent advancements in artificial intelligence in education. it starts out by going over numerous fields of education and learning that have made us of ai, then shifts to the areas on which we see the industry concentrating, and it ends with a remark on further fields of development with ai in education providing a succinct overview of the domain. it starts out by going over numerous fields of education and learning that have made us of ai, then shifts to the areas on which we see the industry concentrating, and it ends with a remark on further fields of development with ai in education providing a succinct overview of the domain. furthermore, given the interdisciplinary nature of aied, relevant research is frequently published in more conferences on ai and learning science such as the conference and workshop on international conference on learning representations (iclr), international conference on machine learning (icml), international conference of the learning sciences, and neural information processing systems (neurips) (icls). it is essential for teachers to upgrade and retrain themselves in order to adapt to this generation, particularly the new skills they must pick up in order to fully profit from aied. in order to evaluate the data supplied by these ed-tech tools and to decide what kind of data and analytics tool they will need to better understand students, teachers will also need to develop their analytical skills. another study introduced a neural geometric solverthat is capable of automatically solving geometric problems trained using a dataset named geoqa; the study also proposed its usage of jigsaw location prediction, geometric elements prediction, and knowledge points prediction using neural nets and lstm as a decoder-encoder pair.a study by singh and karayevdeveloped a model for automatic handwriting text identification from images and translation into text, all done in sequence using neural networks and transformers which may be used for easy document generation, including notes, and easy assignment verification. ocr is a well-studied topic, but latex-based ocr is a new concept described in a recent studythat uses neural nets to directly output text from a latex-based document, which can help educators decode research papers and keynotes to text while saving time and effort.the field of ai for education must continue to advance in order to benefit not only education (increasing inclusivity, helping students grasp complex ideas, and increasing accessibility), but also the development of reasoning-capable ai systems. the development of ai systems that support human capacities should be considered in addition to the development of autonomous ai systems. to construct robust ai systems in the field of education, study must be done in the following areas: (a)similar to the computer vision field, we need better benchmarking datasets such as imagenet, coco. near symbolic ai that can ensure better ai for education systems. utilizing cutting-edge technology like text mining, learning analytics, and data visualisations is also required to progress aied research. emerging educational research approaches are ideal for investigations on revolutionary technology like aied, in particular educational design research (edr), are strongly advised because they enable educators to integrate their research inquiries as part of the technology development and implementation cycle in real-world settings. the main highlights of the paper are to make readers cognizant of various problems that exist in today's education system, what technologies exist in today's ai that can fill those gaps, and in what manner today's ai technologies lack the capability to build nextgeneration ai educational tools. as ai in education can have both positive and negative side effects so the various stakeholders should correct calculations about its trade-off before deploying ai technologies in the real world. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/875.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/875.txt new file mode 100644 index 0000000000000000000000000000000000000000..34c1aaa7b6d05868381582c82f10323597a20c09 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/875.txt @@ -0,0 +1 @@ +the electrification of automotive power systems is a mainstream solution for current nevs (new energy vehicles). according to the nev roadmap proposed by china in 2020, it is estimated that by 2035, china's nevs will account for more than 50% of total vehicle sales, of which battery electric vehicles (bev) will account for more than 95% of nevs (1). compared to fuel vehicles, electric vehicles have several advantages, including fast acceleration and zero emissions. however, range anxiety is one of the main obstacles to the popularization of electric vehicles under current technical conditions (2). one of the most widely used definitions of range anxiety is by rauh et al. (3), in which, range anxiety is defined as "a stressful experience of a present or anticipated range situation, when the range resources and personal resources available to effectively manage the situation (e.g., increase available range) are perceived to be insufficient". the range anxiety is manifested as the driver's uncertainty about whether they can reach the destination with the vehicle's remaining battery capacity, resulting in cognitive, emotional, behavioral, and physiological changes. thus, understanding the factors leading to range anxiety can help alleviate bev users' range anxiety from vehicle design, infrastructure construction and driver training perspectives of view.in bevs, the range estimation systems (ress) can support drivers' recharging decisions before or during the trips. however, the range estimation of the bev is highly susceptible to the traffic environment (e.g., congestions or not), natural environment (e.g., temperature and weather) and drivers' driving habits (e.g., aggressive or conservative). the difficulty in accurately estimating bev's range capability may exaggerate drivers' range anxiety. although the ress in bevs have become more and more reliable with the development of technology in recent years, drivers' trust in these systems is questionable. according to the range anxiety model by rauh et al. (3), trust in the res is one of the dominating factors of range anxiety. however, to the best of knowledge, the factors influencing drivers' trust in the ress are yet to be explored.the res can be regarded as a specific type of automation, which helps users to integrate different parameters of the battery, provide an estimation of the current status of the power system and give energy replenishment suggestions when necessary. thus, the framework of trust in automation may be adapted to inform the formation of trust in res. in hoff et al. ( 4), a threelayer framework has been proposed to conceptualize trust in automation. according to this framework, the trust in automation can be organized into three facets: dispositional trust, situational trust, and learned trust. each facet can be further influenced by a number of factors. specifically, dispositional refers to refer to "long-term tendencies arising from both biological and environmental influences" (e.g., age and gender of the users); situational trust can be associated with both external variability (i.e., types of the system, its complexity and the difficulty of the tasks) and internal variability (i.e., transitory characteristics that depend on the context); and the learned trust refers to "an operators' evaluation of the systems learned from past experience or current interactions" (e.g., knowledge of the system). this framework, according to (4), can be used in a variety of scenarios with human operators and automated systems in the loop. however, this framework only provides guidance on potentially influential factors of trust: it may not list all factors that can be related to the ress in bevs and conversely, not all factors listed in hoff et al. ( 4) are relevant to ress.in this study, following the trust in automation framework by hoff et al. ( 4), a questionnaire was designed and distributed to investigate the factors that may influence bev users' trust in ress. further, we investigated how trust in the ress can affect bev users' charging behaviors. this study has centered on bev users in mainland china. in 2021, china contributes 85% percent of ev sales worldwide (5). thus, targeting bev users in china can provide insights into users' attitudes toward bevs in a relatively mature market. (4), a threelayer framework has been proposed to conceptualize trust in automation. considering the wide usage of smartphones and the prevalence of battery anxiety among smartphone users, in our study, we assessed bev users' experience with and attitudes toward their smartphones, aiming to identify the association between smartphone usage and trust in ress in bevs.two statistical models were built in "sas ondemand for academics", in order to investigate: 1) the influential factors of bev users' trust in res (trust model); 2) how bev users' trust, along with other covariates, may affect users' charging behaviors when using bevs (charging behavior model). as shown in table3, gender, region of driving, system knowledge, and trust in smartphones were found to be significant factors of users' trust in res in bevs and the explained variances of these factors were 2.as for the continuous variables, the system knowledge was found to be negatively correlated with the trust in res of bevs, while the trust in the soc of smartphones was found to be positively correlated with the trust in res of bevs.011, -0.3 units (95%ci: [0. the results have provided evidence to support the adoption of the framework byhoff et al.specifically, gender, as a dispositional-trust-related factor, has been found to influence bev users' trust toward res, with females showing higher trust toward res. northern drivers exhibited the least trust toward res of bevs; and eastern drivers exhibited marginally less trust toward res of bevs compared to southern drivers, potentially because of the impact of situation complexity and task difficulty on users' level of trust in the system(27).at the same time, a positive relationship between system usability and trust in res of bevs has been observed, indicating a well-designed system can increase users' trust in it(19). in other words, users who trust more in soc estimation in smartphones might be those who are prone to show high trust in automation, and it is not surprising they would show high trust in res of bevs. trust in res of bevs, surprisingly, did not influence bev users' charging behaviors but we have observed a connection between users' phone charging behavior and bev charging behavior. in this study, through an online survey, we investigated the factors that can influence bev users' trust in res, and further explored how trust in res as well as bev users' experience with other electronic devices can influence bev users' charging behaviors. the results show that the dispositional-, situational-, and learned-trust related factors all affect users' trust in res of bevs, supporting the validity of the three-layer trust framework (4) among bev users. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/876.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/876.txt new file mode 100644 index 0000000000000000000000000000000000000000..74f8d874230d05f32a21dc800448070ba4367528 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/876.txt @@ -0,0 +1 @@ +the healthcare industry is growing rapidly in the united states because of the increased number of the aging population, shared consciousness of personal health problems, and medical technology improvements. as a result of the growing industry of healthcare, new emerging issues occur in the collection and storage of patient data, and new ways to process, analyze and distribute these data. this has exposed various security threats to personal health data (lee, 2022). as (al-harrasi et al., 2021) remarked "data are considered to be intellectual assets in organisations worldwide. data theft is a major part of the insider threat and, for this reason, the prevention of data theft and data leakage from insider threats is becoming a major area of interest within the field". in this case study, we will discuss the issues vanderbilt university medical center (vumc) challenges while implementing ehr systems which are used to analyze and monitor health records by the users such as doctors, organizations staff, and pharmaceutical agencies (kaul et al., 2020), and we will analyze these issues and provide solutions and recommendations to solve them.in 1997, vumc started gathering data as part of its ehr initiatives. the center required greater, more structured data handling by 2009. hospital executives started an initiative to create a data governance infrastructure at that time. putting data governance into practice. the executive staff at vumc had various challenges:• although it investments and technologies were constantly growing, him (healthcare information and management) policies did not apply to them. all data are not created equal, and using technology by itself won't help patients receive better care. providers and organizations must be able to tell the difference between a surplus of data, useful data, and data integration. whereas new technology and treatment modalities are changing and expanding at a rapid rate, healthcare companies are challenged to face these data issues in their daily operations and workflow. • medical records became increasingly susceptible to hacking, as they were made electronically so they could be shared and transmitted easily. cybercriminals are drawn to healthcare data because it contains financial and personal information, can be used as leverage in extortion, and-most lucrative of all-is perfect for fraudulent invoicing. due to the fluid and constantly changing nature of a patient's medical care as well as the sheer volume of physicians, facilities, and transactions necessary to connect patient care across various settings, they are also incredibly susceptible to penetration. • keeping up with the emergence of new electronic information applications was challenging for the medical facility. were, (curioso et al., 2022) "the health industry faces significant obstacles in resource-constrained environments, including effective health innovation projects and health management programs. additionally, one pertinent difficulty is enhancing health system operations to accomplish health sector goals. we risk digitizing chaos if procedures are not mapped, updated, or reviewed on a regular basis." as a result of the growing industry of healthcare, new emerging issues occur in the collection and storage of patient data, and new ways to process, analyze and distribute these data., 2021)remarked "data are considered to be intellectual assets in organisations worldwide. data theft is a major part of the insider threat and, for this reason, the prevention of data theft and data leakage from insider threats is becoming a major area of interest within the field". providers and organizations must be able to tell the difference between a surplus of data, useful data, and data integration. when healthcare organizations want to develop one integrated system that can hold all the health records electronically by implementing ehr, there will be three main issues derived from this problem: faulty data, data breaches, and the cost of failure.data will not be so accurate or complete in ehr systems because of improper lab and imaging results or medical errors which are mistaken by the physician documentation, these will damage patients data and the accreditation of organizations. as(li et al. these data breaches come up from the data stolen from laptops, or any portable devices, these breaches will destroy the trust of patients and are very expensive to retrieve. as(lee, 2022)commented, new advanced techniques are rapidly increasing because of cybercriminals attacks which makes it more difficult to protect patients' data, such attacks occur because of outdated systems, cyber staff insufficient experience, and extremely valuable data which has incentives to payoff these data. medical errors could also result from systems' inability to send alarms regarding dangerous prescription combinations, which can happen as a result of facility modifications, changes in how physicians enter data, or problems with ehr design.(munana et al. population is registered in ehr system and we want to calculate the approximate number of the population who might face these errors in their ehr in the year 2019: 328,239,523 × 21 100 = 68,930,300 up to 30 million patients might have an incorrect medical history, and about 3 million will have billing errors. this is a huge number to detect and it is affecting the healthcare industry in a negative way, patients may lose the trust on hospitals system because of the wrong patient history and personal data, and it can also cause some medical diagnosis faults as a result of the incorrect lab results and medication, and it will affect the hospitals financials because of the billing issues. also, if there are some existing low-quality information source providers, it will be hard to estimate the correct source reliability, such as improper sensors that release incorrect data and the false data spread by spam users. this causes a significant gap in the departments' patient data, and resolving these issues requires additional work, such problems are faulty data, data breaches and risk to patient safety. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/877.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/877.txt new file mode 100644 index 0000000000000000000000000000000000000000..be10b3e3f82fc7c97302035ca6a7816e1cfda0ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/877.txt @@ -0,0 +1 @@ +when used inappropriately, the harmful internet use (hiu) becomes a major problem, especially for the younger generation. according to , problematic internet use has been linked to behavioral addiction, major depressive disorder, adhd, deficit/hyperactivity disorder, sleeping disorders, cognitive deficits, and suicides. terms like "internet addict" have commonly been used to recognize the burgeoning destructive potential of the excessive internet use or being attracted to illicit pastimes.in this study, a rating scale is utilized as a tool for quantitative measuring of a new social phenomenon: hiu. considerable efforts are being made to improve its accuracy and reduce the uncertainty of measurements when enhanced by the peer assessment and analyzed with the help of differential evolution.the netflix september 2020 release of the docudrama "the social dilemma" (see ) has generated considerable publicity related to hiu, and is a prime illustration of the need for improved measurement of hiu.the social dilemma is a pivotal docudrama (see ) which delves into the dangers of social media in particular. by interviewing the designers of social media platforms, the film makes a compelling case that social media poses a viable threat to civilization itself. social media companies constrain us into adopting modes of thinking and behaving in ways that are profitable for corporations, rather than thinking and behaving in ways that are based on our own goals, beliefs, or values. designers of social media platforms force us to give our time away to corporations selling 'big data' to their clients. we argue that the approach presented here should be used to measure the harm suggested in the social dilemma. group assessments can be improved by: , , , and . another type of harmful internet use is presented by .analysis of question q2 shows that 39% of children see a problem related to avoiding social contacts. negatively effects of hiu on school performance is seen as a problem by only 26% of parents and 22% of children. the analysis of question q4 shows that almost 78% of children and 54% of parents believe that hiu does not impair their health, hygiene and eating pattern.analysis of question q5 shows that, consistent with the previous question, nearly 67% of children and 46% of parents do not view as a problem of the avoidance other activities as negative to the child's development.the results of the analysis of question q6 show that nearly 73% of children and 59% of parents have not even attempted to reduce their hiu. the majority of respondents (56% of children, 47% of parents) believe that hiu has no application to them or their children. however, question 8 was evaluated by parents in a way indicating that male children have more problems than female children., q3, q4, q5 or q7) increases the chance that the person has unsuccessfully tried to quit an unwanted activity. in this model, the care rating of parents or acquaintances (q3) has the strongest positive impact on the assessment obtained in question (q6). the relationship between gender and the hiu rating obtained in the question (q8) is shown by the logit binary model.for parents' (model m4), (q6) infers that there is the largest positive dependence between "the stop hiu question" (q6) and the avoidance of other activities (q5). the parents' increased concern (q3) and the fact that the respondent knows that the person has a hiu problem (q1) can be an important predictor of hiu. however, the high rating obtained in the hiu (q8) strongly implies educational problems (q7) for both genders. a slightly smaller impact on rating (q7) has a reduced hygiene and food eating pattern (q4) and poorer interpersonal relationship (q2).model m6 shows the relationship between the intensity of the hiu (q8) and the subject's gender (q9). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/878.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/878.txt new file mode 100644 index 0000000000000000000000000000000000000000..6cab91c542c64694d384f31b36cb126e670bbcb1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/878.txt @@ -0,0 +1 @@ +applications of artificial intelligence to artistic processes (creative-ai1 ) are becoming more common, and media attention towards these applications is large. at the focus of many media contributions is the question when a human artist-genius will be replaced by a similarly genius and autonomous ai. but a quick look at various widely discussed ai art projects clarifies that in most cases the ai acts far from autonomously, and that the symbolic realm of the artistic creation is densely connected to a material dimension. the immersive media installation titled "archive dreaming" by refik anadolu creates a "canvas with light and data applied as materials", and facilitates both user-driven and automatic exploration "of unexpected correlations among documents". the project was realised with the support of 20 collaborators at three institutions, and the ai comprises a model that has been trained on 1.7 million documents using the computational power provided to the artist by google's artists and machine intelligence program (anadol 2017). the completion of the composition of beethoven's 10th symphony was approached by a group of music historians, musicologists, composers and computer scientists over a period of several years involving extended data preparation and ai model design (elgammal 2021). the novel "1 the road" was written by an ai, but the artist ross goodwin carefully orchestrated the sensor inputs that would feed the ai model during a road trip. the creative process involved not only goodwin, his car, and an ai, but also a film crew accompanying him and documenting the road trip from new york to new orleans (hornigold 2018). in comparison to the ai beethoven project, however, the artist took the deliberate choice to present the ai output in its raw form, with the motivation to illustrate limitations and potential of such an autonomous machine creation.besides the formation of an ai model in the context of a specific artwork, many companies have shaped tools for media production that use -or at least claim the use -ai as a means to support the creative processes of the users of the tools. examples are the use of ai for mastering the sound of a music production 2 , audio restoration3 , or the scene analysis and filtering of images based on ai by photoshop (clarke 2020). such systems have been called creative support tools (cst), and these tools have been well-documented within hci research for over a decade (frich et al. 2019;gabriel et al. 2016). based on the literature review of 143 papers, frich et al. (2019) define cst as a tool that "runs on one or more digital systems, encompasses one or more creativity-focused features, and is employed to positively influence users of varying expertise in one or more distinct phases of the creative process". following this definition, a creative-ai tool is simply a cst that employs artificial intelligence. demystifying this latter buzzword, artificial intelligence in the context of creative-ai (and in most other contexts) is nothing but data-driven analysis and/or synthesis of media data, and the intelligence that emerges from training processes can be fairly considered as narrow. that is, all tools for artists and designers so far comprise an intelligence that can at least partially conduct a creative process in a fairly restricted context. the beethoven ai can continue phrases in the style of one specific composer, and the archive dreamer can hallucinate novel data in the context of one specific archive. going beyond these borders demands adapting and re-training models, which demands on the material side new collections of data and expenditure of energy for the training of models.the goal of this chapter is to develop a perspective on the environmental impact of creative processes that make use of ai, when we consider them as part of an economic system that transforms artistic creation to a commodity. i will discuss specific creative-ai cases and their environmental impact along with the role of large corporations involved in creative-ai development. already in the 1970s, the economic and social theorist jacques attali (1985) predicted a situation that strongly relates to current business plans by the largest music streaming provider spotify. as we will see, prospects of using creative-ai as a collaborative tool that supports creative processes promise an extension of the horizon of artistic possibilities, but such optimistic perspectives neglect the role of large corporations as gatekeepers to systems and infrastructures. i believe that it is timely to combine political, environmental, and sociocultural perspectives on creative-ai to better understand how the power exercised by capital and the environmental impact of creative-ai may motivate researchers and artists to adopt a more critical perspective. the chapter -written by an ethnomusicologist/computer scientist -is characterised by a certain focus on music, but its general implications can be transferred to other forms of art.besides the formation of an ai model in the context of a specific artwork, many companies have shaped tools for media production that use -or at least claim the use -ai as a means to support the creative processes of the users of the tools. i believe that it is timely to combine political, environmental, and sociocultural perspectives on creative-ai to better understand how the power exercised by capital and the environmental impact of creative-ai may motivate researchers and artists to adopt a more critical perspective. when it comes to potential environmental consequences of creative-ai use, it is unclear how large the energy consumption and related carbon footprints of specific artistic projects may be(jääskeläinen et al. 2022). on this side, information from existing ai-art projects allows us to obtain an overview of most common architectures and training data sizes.it is essential for an understanding of the political ecology of creative-ai to indicate the orders of energy consumption and/or carbon emissions related to training and developing certain models used in artistic contexts. as openai does not provide information of how much energy the whole r&d process required, let us assume in analogy tostrubell et al (2019)that the energy consumption of the overall r&d may have been three orders larger than a training the network a single time, which would send us around the planet 18,000 times by car, or supply a small swedish city with energy for one year.these two examples indicate that artistic projects that involve the use of creative-ai may involve large energy consumption for training the needed networks. as there is no reason to assume that this process is simpler, the development of an ai for artistic purposes can involve many iterations of training and result in energy consumption that is orders higher than training a network a single time.all this evidence implies that the carbon footprint resulting from artistic projects involving creative-ai is likely to be large compared to artistic projects with similar outcomes but not involving creative-ai.an equitable engagement of all stakeholders of ai applications into a discussion of the environmental impact of ai has been argued to be a cornerstone of a third wave of ai ethics (van wynsberghe, 2021), with the possible conclusion to refrain from using ai in certain application areas. with most artists who employ creative-ai having acquired the programming skills that are needed to run inference and training of models in environments such as python, it seems a realistic suggestion to employ tools to estimate energy consumption of the development and to report such estimates when publishing the artwork.as we observed injääskeläinen et al (2022b), in the design of interactions and experiences with creative-ai all current interfaces to ai-models have distanced the actual consequences of the actions very far away from the users of these technologies: the amount of energy required to perform some inference and/or training remains concealed, just as the kind of energy that was employed by some remote servers involved in the computations. 2022), and user evaluation tools to identify the best version of an automatic composition system(pachet & roy 2022b).the speculation that i presented in this chapter illustrates how the combination of the environmental impact of creative-ai with the increasing commodification of artistic creation by means of creative-ai is very likely to have severe consequences for cultural practices, the livelihood of artists, and the ecosystem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/879.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/879.txt new file mode 100644 index 0000000000000000000000000000000000000000..fef0526cd801565b20c74f509c2a561dbb900546 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/879.txt @@ -0,0 +1 @@ +while artificial intelligence (ai) technologies are progressing fast, compliance costs have become a huge financial burden for ai startups, which are already constrained on research & development budgets. this situation creates a compliance trap, as many ai startups are not financially prepared to cope with a broad spectrum of regulatory requirements. particularly, the complex and varying regulatory processes across the globe subtly give advantages to wellestablished and resourceful technology firms over resource-constrained ai startups . the continuation of this trend may phase out the majority of ai startups and lead to giant technology firms' monopolies of ai technologies. to demonstrate the reality of the compliance trap, from a field deployment perspective, we delve into the details of compliance costs of ai commercial operations.while artificial intelligence (ai) technologies are progressing fast, compliance costs have become a huge financial burden for ai startups, which are already constrained on research & development budgets. to demonstrate the reality of the compliance trap, from a field deployment perspective, we delve into the details of compliance costs of ai commercial operations. based on the oecd regulatory compliance cost assessment guidance, we quantitatively compare the financial vulnerability of tech giants versus ai startups. the actual situation of ai startups is more complex than this estimation, as it is nearly impossible for external analysts to estimate the compliance costs. for instance, in an impact assessment report of the europe ai act, the estimated annual compliance cost of one ai product that averagely costs eur 170,000 to develop is eur 29,277, we believe this study has underestimated the actual costs of ai compliance.ai is a highly regulated industry, but unfortunately, there is no standardized ai regulation framework, and hence compliance costs often become a financial trap for ai startups. most ai entrepreneurs may not even be aware of the existence of compliance costs, let alone the severe impact compliance costs may have on the company's overall financial health.figure1: the compliance trap for ai startups first, unlike r&d budgeting, due to varying ai regulatory frameworks across the globe or even across multiple regions within a country, there is no standard method to budget for ai compliance costs.second, even with an ai compliance budget, the actual costs may significantly deviate from the budget.in this section, with more than six years of first-hand experience in deploying commercial autonomous driving services, we delve into the details of compliance costs from a field deployment perspective, in the hope that the insights we provide can raise awareness of the adverse impact of the lack of standardized ai regulations.in the case of perceptin, the compliance cost of one deployment project is $ 344,000 on average, whereas the average r&d cost is around $150,000, making the compliance costs 2.however, before a consensus can be reached regarding the golden standard, a new business model, compliance-as-a-service (caas), can specialize in dealing with varying ai regulatory frameworks and thus amortize compliance costs across different ai startups. unfortunately, the lack of standardized ai regulatory frameworks creates a compliance trap that may destroy an ai startup financially, which could lead to a more profound impact of creating a competitive advantage for tech giants over ai startups. ideally, if a global golden standard on ai regulation could be developed, then ai startups could accurately budget for compliance costs. however, before a consensus can be reached regarding the golden standard, we believe that a new business model, compliance as a service, can specialize in dealing with varying ai regulatory frameworks and thus amortize compliance costs across different ai startups. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/88.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/88.txt new file mode 100644 index 0000000000000000000000000000000000000000..f653db259a8b4ee7d72358ce54fa6bbe3caf76f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/88.txt @@ -0,0 +1 @@ +since deepmind's breakthrough results of applying deep rl to atari games (2015), rl has been lauded in the ai community, with promises ranging from 'a path to agi', through 'the key to self driving cars', and up to 'solve all planning problems'. while clear and worthy progress has been made, the current vibe is that aside from solving games, rl is not living up to our expectations. the causes, we believe, are five popular research practices, which were relevant for 2015, but are currently stagnating the field. as our mission is not to blame anyone (we have surely made our fair share of mistakes), we will keep examples and references to ourselves. our point is that currently the hype over rl practice and theory is largely unjustified by the facts and that the discipline needs to mature.1. overfitting to specific benchmarks. state-of-the-art methods for atari are by now very different from state-of-the-art methods for mujoco (or other benchmarks). nearly every paper is required to show some distinction (under some metric) for one of the popular benchmarks. however, both benchmarks are made up, driving the field further away from algorithms that will be potentially good for practical problems that are not atari games/robot locomotion. consequently, research on tweaks that will work most likely only for particular benchmarks is abundant. furthermore, it is not clear how progress on a benchmark relates to real-world value. in contrast to, say, pretrained imagenet features or bert sentence embeddings, which are widely applicable to real world applications, solving rl benchmarks currently does not yield tangible value.2. wrong focus. most research focuses on sample complexity for a given benchmark. this is hardly the case in practice -compute can be cheap relative to engineering effort, acquiring labelled demonstrations can significantly speed up learning, and the development process must consider issues like overall system stability, testability, ease of debugging, interpretability, integration with other components/agents, etc. current benchmarks ignore the deployable nature of situated (rl-driven) agents completely, focusing on algorithms rather than on a system/engineering view. this is most clearly represented in the prevalent openai gym api, which abstracts away all 'system-design' issues for quickly making progress on sample complexity. while initially a good idea, this hampers progress since in many practical problems, just figuring out what are useful states, actions, and rewards is a critical component of the development process.3. detached theory. what should be the role of theory in modern rl research? there is no real system with small finite state and action spaces. ideally, theory should help understand phenomena observed in practice, and suggest algorithmic ideas. while "nothing is more practical than a good theory" (lewin, 1952), useful theory seems to be quite rare. some reasons are: regret minimization is overly pessimistic; there is a lot of prior knowledge (in the algorithm design, parameter choices, etc.) that is not accounted for in the theory; finite state and actions is not a good model for many problems of interest; and a focus on unimportant quantities that nobody cares about.4. uneven playing grounds. measuring the performance of an algorithm on a benchmark is confounded by the resources available to the implementer, such as proficiency in hyperparameter tuning, the size of the neural network trained, or prior knowledge about the problem/solution. the variability in the scale of experiments and software engineering affordable by different researchers (e.g., academia vs. industry), and the current trend at top conferences to prefer massive experimentation over conceptual novelty, can inhibit long-term progress.5. lack of experimental rigor. impressive singular experiments sometimes give a false sense of progress. while good pr click-bait, these may only disappoint as they preclude research on "solved" problems which are in fact far from solved. for example, the simple 2d procgen maze benchmark remains unsolved. we need more rigorous evaluation of difficulty and success. moreover, for industry to adopt an approach, an impressive result is not enough -stability, development time and cost, testability and life-cycle issues are critical. currently, the publication standard is that failure cases are almost never reported, stability is impossible to tell, and software design issues are not even discussed.since deepmind's breakthrough results of applying deep rl to atari games (2015), rl has been lauded in the ai community, with promises ranging from 'a path to agi', through 'the key to self driving cars', and up to 'solve all planning problems'. in contrast to, say, pretrained imagenet features or bert sentence embeddings, which are widely applicable to real world applications, solving rl benchmarks currently does not yield tangible value. this is hardly the case in practice -compute can be cheap relative to engineering effort, acquiring labelled demonstrations can significantly speed up learning, and the development process must consider issues like overall system stability, testability, ease of debugging, interpretability, integration with other components/agents, etc. while initially a good idea, this hampers progress since in many practical problems, just figuring out what are useful states, actions, and rewards is a critical component of the development process. what should be the role of theory in modern rl research? there is no real system with small finite state and action spaces.there are two main dogmas in the rl community on how to make progress in solving real world decision making problems. the first, which we refer to here as the 'generalist agent' view (sometimes called "rl first"), is that future progress will be made by focusing attention on large-scale training of agents that solve diverse problems, with the hope that along the way a generalist agent will develop, and will be a useful component in various real world problems. the second view, which we refer to as 'deployable rl', takes a more pragmatic view seeking to design rl algorithms that solve concrete real world problems (sometime this view is called "rl second"). changing this requires both research of how to deploy rl effectively, and also a better understanding of the gains that an rl solution may bring, which will eventually make it worthwhile to pursue. we propose a constructive model for research in rl which we feel is relevant for our current state of knowledge in the field, and will advance progress towards deploying rl-based solutions in the real world. that is, there is real world value in making progress on a challenge that holds regardless of the rl (or other) algorithm used to solve it. every publication that makes progress on a challenge either by suggesting a new algorithm, a positive or negative result, should explain the limitations and issues with the proposed new algorithm and how it addresses progress specifically. we are not opposed to working on theory of small finite models but the goal of the research should be well justified in terms of its potential impact on real world problems. similarly, real-world rl based systems should have conceptual solutions to problems where issues such as testability, debuggability, and other system life-cycle issues are addressed. a change of focus may be needed when writing papers and conducting research to focus on solving real problems that matter, and framing the research effort within the deployable rl principles. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/880.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/880.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a2b6faf2fb9008509b21d05dfb3796e49818393 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/880.txt @@ -0,0 +1 @@ +electronic health record (ehr) systems provide a secure, integrated collection of patient and population electronically-stored health information in a digital format (odekunle et al., 2017;kukafka et al., 2007;akanbi et al., 2012;adetoyi and raji, 2020;kavuma, 2019;kohli and tan, 2016); it provides a comprehensive digital view of a patient's health history with the goals of eliminating legibility problems with handwritten records; enabling remote access of health records; facilitating intervention earlier in the course of the disease, patient care, and outcomes; increasing efficiency and lowering costs; and ameliorating billing procedures (schmitt and wofford, 2002;erstad, 2003). the potential benefits of ehr systems have enabled its wide adoption in developed and some emerging countries (black et al., 2011).while most developed countries are taking advantage of ehrs to improve their healthcare system, it remains challenging in developing countries to support clinical decision-making and public health using a computerized patient healthcare information system. some developing countries including sub-saharan africa still predominantly use paper-based systems in healthcare delivery, instead of computerized patient management systems (odekunle et al., 2017;akanbi et al., 2012;adetoyi and raji, 2020;kavuma, 2019;kohli and tan, 2016). the lack of an ehr system may lead to issues in managing patient health data to improve the quality of patient care and safety through decision support to clinicians. for instance, patient p lives in city x, travels to city y in the same country and falls sick during her stay. since clinician c in y does not have more health data about patient p, (i) treatment options provided to p could cause some important problems involving past health issues and (ii) prescription drugs delivered to p could ignore her medical history. medication errors can result in a substantial economic burden on patients, side effects, and irreversible consequences; there is a huge spectrum of medication errors. some errors may be minors and others may lead to adverse events causing complications and higher mortality (bates and slight, 2014;forster et al., 2008). however, ehr systems can potentially reduce prescription errors and adverse drug interactions (chaudhry et al., 2006) and make available medical history data during emergency care (stiell et al., 2003). this data provides vital medical history details and gives more options to clinicians to decide which treatment best corresponds to the problem and when it should be administered. we pose the question: "how could we replace paperbased systems with ehr systems in the context of developing countries?". a study identified some factors hindering the widespread adoption of ehr systems in developing countries. the identified fac- tors include but are not limited to high cost of procurement and maintenance, poor electricity supply and internet connectivity (odekunle et al., 2017). this paper therefore proposes an ehr architecture that addresses the previously mentioned factors.we believe that the implementation of ehr systems in the style of industrialized countries may fail to function and provide solutions in the context of developing countries. to implement an ehr system in developing countries, besides the aforementioned issues, we also address the issues related to social inclusion, discrimination and socioeconomic status in healthcare. everyone qualifies for health monitoring regardless of personal income, or standard of living. we propose a straightforward architecture to implement an ehr system that fosters inclusion and provides solutions tailored to all social classes. the proposed architecture takes into consideration internet coverage, electricity, and infrastructure issues and foresees alternative solutions to skirt these issues. more interestingly, our architecture proposes an internetfree alternative (an offline solution) to allow medical transactions within hospitals and clinics and the storage of ehrs in geographically underserved and rural areas. note that the offline solution does not require relatively expensive terminals (such as computers, tablets, and smartphones) to establish connections between healthcare organizations. the motivation behind this solution is to bridge inequalities in healthcare and allow healthcare organizations with limited means to access ehr systems with any type of mobile phone that they possess. additionally, the proposed architecture foresees the utilization of artificial intelligence to enable better public health policy and surveillance in (i) moni-toring patterns suggesting disease outbreaks, (ii) predicting disease based on symptoms, medical records, and treatments over time, and (iii) providing drug recommendations.the rest of this paper is organized as follows. a brief outline of some related work is given in §2. section 3 describes the proposed architecture.we discuss the scope of the proposed architecture, challenges, and opportunities in §4. we describe ethical considerations in §5. finally, we conclude and present future directions in §6.electronic health record (ehr) systems provide a secure, integrated collection of patient and population electronically-stored health information in a digital format(odekunle et al.while most developed countries are taking advantage of ehrs to improve their healthcare system, it remains challenging in developing countries to support clinical decision-making and public health using a computerized patient healthcare information system., 2017;akanbi et al. the lack of an ehr system may lead to issues in managing patient health data to improve the quality of patient care and safety through decision support to clinicians. in order to manage patient data, many studies addressed the problem of the implementation and adoption of ehr systems in the context of developing countries (adetoyi and raji, 2020;odekunle et al., 2008;fraser et al. ussd is a communication protocol used by mobile devices to communicate with a network service provider.an ehr architecture using ussd could enable healthcare providers to access and manage patient data and interact with the healthcare system, using simple text-based commands sent via ussd, even in areas with limited internet connectivity (see figure2c).one of the advantages of the ussd-based ehr system (ues) over wes and mes is that it can be used on any type of mobile phone, even feature phones that do not have internet access; this means that it can potentially be accessed by a wider range of users, including those in rural or low-income areas where internet access may be limited.our architecture proposes ehr systems that respond to the limitations of (adetoyi and raji, 2020;kamadjeu et al., 2005;jawhari et al.beyond ehr data storage and manipulation, we can utilize artificial intelligence (ai) to analyze ehr data to enable public health policy and surveillance in a number of ways., 2020;wong et al. we show how this architecture fosters social inclusion and discuss how the use of ai, on data stemming from the proposed architecture, can help to improve the effectiveness of public health policy and surveillance efforts in developing countries. in the future, we would like to build ai models that use metadata-induced contrastive learning to (i) provide drug recommendations within an ehr system and (ii) learn patient representations from ehr data to predict dangerous cases of polypharmacy usage and discover sociodemographic biases in the outcomes of polypharmacy usage. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/881.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/881.txt new file mode 100644 index 0000000000000000000000000000000000000000..20facd7d2c9a5d5eab8367f35fa22852b0b05551 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/881.txt @@ -0,0 +1 @@ +the national academies of science, engineering, and medicine consensus report on "data science for undergraduates: opportunities and options" noted that:all data scientists need to learn how to tackle questions with real data. it is insufficient for them to be handed a "canned" data set and be told to analyze it using the methods that they are studying. such an approach will not necessarily prepare them to solve more realistic and complex problems taken out of context, especially those involving large, unstructured data. instead, they need repeated practice with the entire cycle beginning with ill-posed questions and "messy-data" (nasem, 2018, page 2-6). text provides an important example of unstructured data that has traditionally been nearly absent from the statistics curriculum in secondary schools and early post-secondary education), despite the importance of the written word in all aspects of education and society.gentzkow, kelly, and taddy (2019) note that text differs from other typical types of data given its inherent high dimensionality and need for different statistical methods for analysis. how can such "messy-data" be represented? konold, finzer, and kreetong (2017), who defined a "case" as "the physical record of one repetition of a repeatable observational process", noted that data are typically recorded in table format. this immediately raises important questions regarding text as data, since text is obviously stored as data on a computer, but its underlying phenomena (meaning) is of interest, rather than the specific encodings.inferring underlying meanings is the province of natural language processing, an area that has historically developed within the artificial intelligence community. however, classification and predictive analytics (breiman, 2001) within textual analysis provide a natural bridge to statistics and data science. for example, an analyst might be interested in a model for classifying a given social media post as either misinformation or a legitimate news item. such models are increasingly important in a world full of contradictory information sources. classification via 2x2 tables, logistic regression, and decision trees have become more common components of introductory statistics and data science education (zieffler et al, 2021;baumer et al, 2021;hazzan and mike, 2022) and thus provide an earlier entry point for student analysis of simple text-based features.limited prior pedagogical research has been undertaken in this relatively new area of text analysis in statistics and data science education. many efforts have been made to use text analytics to automate assessments and identify new insights, but few on student comprehension of the process behind producing data from text (gentzkow, kelly, and taddy, 2019;jiang et al., 2022). jenna: i feel like the ones that were clickbait used certain words, i don't know how to describe them, that were "dramatic" or "gimmicky" to indicate that the article was clickbait. like "scientists in germany draft the neanderthal" or "no union for fedex home drivers court rules" that's kind of i guess informative right off the bat and everything else seems to be more like just trying to pique your curiosity like "what's your favorite food" or "17 of you know photos that offer that rare look" and then "this one dollar trick will help you get amazing even eyebrows". it seems to be more like trying to pique your curiosity, not so much like an informative kind of thing like the first one, the other two that i mentioned. analysis of all six pairs' responses showed that the participants identified a wide range of features along three dimensions of clickbait: function, content, and form.with regard to form, a few participants also paid attention to the form of clickbait and discerned "certain words" and "individualized wording and language" in clickbait. "knowledge of science and current events" while all three types of features (function, content, and form) surfaced at this stage, the majority of the features were about the function and content of clickbait, which require human interpretation and perception. specifically, the participants were asked to first observe and "take note of any features of the headlines" that distinguish clickbait and news and then get together to discuss "the features".analysis of all pairs' notes showed that, in response to the second scenario, the participants identified more features for both clickbait and news headlines. they also identified many characteristics of clickbait's form: using "fluffy" adjectives, using second-person point of view with words like "you" and "your", referring to things directly, asking questions, suggesting a number of items for a phenomenon or problem, and containing or starting with numbers, and containing words like "everyone," "this," "every," and "reasons". analysis of selected participants' interview data showed that the participants reasoned about how the form and content of clickbait or news headlines generate the intended effects, or their functions. the participants were further prompted to determine which previously identified features could be detected by a computer, to focus on those human-perceivable features, identify indicative structural elements of the text, design rules for a computer to detect them, and to create a list of as many attributes of the headlines for identifying clickbait using a computer.jenna and emily reasoned about the structures of clickbait and news headlines and identified the key words that make the clickbait either personal or exaggerating:.leela: well, for the location thing there could be like a whole list of preset locations and then the computer could like match the match those locations with the words in the title and then if none of them come up then maybe put it into like a potential clickbait pile.interviewer: can you think of any ways that those rules could be broken down into similar sorts of computer-based rules? kevin: so like computer-based rules you mean like some kind of algorithm that's like you could create to really easily sort the list.most of the participants converged on detecting specific words for three characteristics: colloquial, indicated by words like "cute", "pretty", "really", "very", and "reasons"; fictitious, indicated by words like "hogwarts", "elf", and "hobbit"; and location-related, indicated by city or country names. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/882.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/882.txt new file mode 100644 index 0000000000000000000000000000000000000000..330715f68480814b5f161b1bdf9bfe34b6969b84 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/882.txt @@ -0,0 +1 @@ +the definition is apt but inadequate in providing a comprehensive analysis of the interdisciplinary nature of cybersecurity. the introduction of the concept of property rights shifted the discussion to human actors and expanded on the potential motivations of hackers. however, the machine perspective was not discussed, with the definition of inferring that only malicious persons are involved in the practice. however, presenting cybersecurity as a field of study, not an isolated abstract concept, is significant in promoting further educational discourse.the term "cybersecurity" is commonly used to refer to a set of circumstances or events related to improving the integrity of a given information management system or infrastructure and addressing present and emerging challenges associated with the exercise. a review of the literature by the national institute of standards and technology (nist) reveals that construct can be analysed by categorizing it into its constituent elements, including the internet of things, cloud, network, application, and critical infrastructure security(cybersecurity, critical infrastructure, 2018).data security is a core aspect of the distribution cognition theory detailing the increased utilization of artificial intelligence technologies to pattern human thought processes, thus, preventing, predicting, detecting, reporting, and resolving potential threats two digital systems and information technology infrastructure. the construct is integral to understanding the definition of cybersecurity in the contemporary environment as it discusses emerging best practices in the field and the continuous adoption of information technology as a best practice. "cybersecurity is a practice of preventing possible threats or malicious attacks seeking to unlawfully access data, damage information or disrupt digital processes. "cybersecurity is the process of defending data networks, electronic systems, mobile devices, servers, and computers from malicious attacks" gartner (what is cybersecurity?, n. "cybersecurity is a collection of best practices, including infrastructural changes critical in ensuring the integrity of a computer system., 2019).network security is a core domain of cybersecurity as it involves eliminating systems and abilities that potential attackers can exploit to disrupt organizational operations (what is network security?, n.)."cybersecurity is the collection and concerting of resources including personnel and infrastructure, structures, and processes to protect networks and cyberenabled computer systems from events that compromise the integrity and interfere with property rights, resulting in some extent of loss. defining the range of the circumstances is critical to ensuring that cybersecurity challenges are not only perpetrated by malicious actors as is commonly assumed but may also occur as a result of accidental exposure of sensitive information by users or destruction of the system by natural occurrences.cybersecurity is often defined as the practice of allocating resources to safeguard the integrity of systems from unauthorized entry or manipulation by malicious actors to ensure minimal disruption of organizational operations. cybersecurity is constituted of several core aspects, including critical infrastructure, applications, networks, the cloud, and the internet of things. application security is also a major aspect of cybersecurity in evolving end-user software used to manipulate computer systems and networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/883.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/883.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4106738c099dbf40ee2e1c32f6d8eb55057dea1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/883.txt @@ -0,0 +1 @@ +autonomous systems, broadly meaning systems capable of determining, initiating and executing action in pursuit of a goal, are being used around the globe to increase the safety and efficiency of tasks, and to lower economic and environmental cost. "to continue to make progress and innovate, ensuring that these systems have been designed responsibly and robustly will be key to safeguarding trust." while the technology enabling operation of these systems in the australian maritime, air and land domains is rapidly advancing, the assurance and accreditation framework, which forms a necessary part of commercial operationalisation, is not keeping pace. some common generalised names include autonomous vessel (av), unmanned vessel (uv), maritime autonomous vehicle (mav), autonomous vehicle (uxv), and maritime autonomous surface ship (mass).for the purposes of discussion around land-based autonomous systems, the key terminology used to refer to these systems are unmanned ground vehicle (ugv), unmanned autonomous ground vehicle (uagv), autonomous ground vehicle (agv), autonomous vehicle, automated vehicle or unmanned vehicle. other land-based autonomous systems include bio-mimetic systems, platform agnostic systems, and autonomous systems that are without physical instantiation such as diagnostic ai, legal automation, µzero and other similar automated ml programs.there are many challenges associated with effectively regulating autonomous systems, including how to ensure trust in these systems, how to ensure the regulatory approach is the most appropriate option, how to adapt current systems safety approaches, and how to adapt current assurance and accreditation frameworks. the complex, interconnected nature of autonomous systems, including cybernetic systems, means that assurance as a concept must shift to account for the high levels of interdependency between core systems.an example of an industry-led collaboration is the maritime uk autonomous systems regulatory working group (masrwg), which published a code of practice for maritime autonomous ships in november 2017, and have provided an updated version each 12 months.whilst systems safety focuses purely on systems, safety management systems (smss) expand the concept beyond systems to organisations, people and processes.in australia there is a lack of established assurance frameworks to integrate autonomous systems into traditional systems, which means neither industry, testing facilities, nor regulators, have clear or consistent expectations or understanding of what assurance activities are required to demonstrate compliance with requirements to indicate safe operations. this lack of established standards or codes of practice for autonomous systems, and lack of sophisticated understanding within the surveying and inspection industry, and within regulators, represents a lost opportunity for the australian economy to efficiently gain the economic benefit of new technology and to enable local industry to design, test and commercialise novel autonomous systems. while neither the national law act nor the navigation act specifically refers to autonomous systems, the broad definition of "vessel" means autonomous systems are generally included."amsa is actively working to improve their regulatory approach to autonomous systems, for example by seeking more hands-on experience, putting in place a policy to guide decision making, establishing an autonomous vessel team to triage queries and applications, working on a package of legislative amendments that would enable more flexibility to better address emerging technology, and establishing a partnership with the trusted autonomous systems defence cooperative research centre (tas) to explore issues around assurance and trust in autonomous systems.there is a critical role to be played by third parties from industry, government, and academia who can work together to develop, test and publish a new assurance and accreditation framework for trusted autonomous systems, and provide recommendations for areas the regulators should focus on to ensure the benefits of autonomous systems can be realized, without compromising safety.in order to facilitate development and commercial operationalisation of autonomous systems, in some jurisdictions industry groups have collaborated to create guidelines and codes of practice, such as the maritime autonomous surface ships industry conduct principles & code of practice mentioned earlier. the centre team, drawn from the australian maritime safety authority, civil aviation safety authority, and the university of adelaide, have deep regulatory and technical expertise in autonomous systems, and bring a wealth of practical experience and strong stakeholder relationships to the project.there are opportunities for third party collaborations in australia, such as the assurance of autonomy activity mentioned above, as well as the australian association for unmanned system's new maritime working group, to improve the assurance and accreditation approach for autonomous systems, and accelerate an improved regulatory approach which will facilitate innovation without compromising safety. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/884.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/884.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a751a039056e0a9cea6e490620ccd57620de76b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/884.txt @@ -0,0 +1 @@ +during the covid-19 pandemic many universities, e.g., in germany, were forced to switch to online classes. moreover, most final exams were held online. in pre-pandemic times, computer-based final exams have already proven their worth, but with the difference that they were proctored in the classroom. during the pandemic this was mostly unfeasible and students had to take the exam from a location of their choice.there exists a wide range of supervisory measures for take-home exams. e.g., one could use a video conference software to monitor students. at many universities, however, this is legally prohibited due to data protection regulations. the exams are therefore conducted as open-book exams, i.e., students are allowed to use notes or textbooks. yet, students must not cooperate with each other. any form of cooperation or collusion is regarded as attempted cheating.to our knowledge, it exists no universallyapplicable method for proctoring take-home exams. it is therefore hardly feasible to stop students from illegally working together. however, one can attempt to identify colluding students post-exam. the attempt alone could have a deterring effect on students. research in this area, however, is scarce. present a method for comparing exam event logs to detect collusion. they use a simple distance measure for time series, i.e., the event logs of two different students, to quantify the similarity of these student's exams. building on this, we propose an alternative distance measure, as well as the use of hierarchical clustering algorithms, to detect groups of potentially colluding students. we find that our method succeeds in finding groups of students with near identical exams. furthermore, we present an approach to categorise student groups as "outstandingly similar", by providing a proctored comparison group.the remainder of this paper is organised as follows: section 2 provides a brief overview of related work. section 3.1 describes the available data. section 3.2 presents our method, including the calculation of the distance matrices. section 4 discusses the empirical results. section 5 concludes., the event logs of two different students, to quantify the similarity of these student's exams. their findings suggest that collusion took place when the final exam was not proctored.3we also removed twelve students from the test group who reported internet problems during the exam. although the lecture of the comparison group was held in presence and the lecture of the test group was held online, both groups shared the same content and learning goals.we adopt an exploratory approach for finding clusters of students with similar event patterns and points achieved during the exam. the average linkage method (here: unweighted pair group method with arithmetic mean) defines the distance between any two clusters as the average distance among all pairs of objects in said clusters. this general shape is typical for the underlying algorithm, as average linkage clustering combines the long form of single linkage clustering with the smaller, tighter clusters of complete linkage clustering. since our primary interest lies in the detection of clusters at low dissimilarities, instead of the general structure of the data, we exemplary investigate the six lowest clusters (a -f) in figure1. for our data of the test group, there is no indication of the existence of suspicious clusters of more than two students. we want to identify cases in the unproctored test group which are rather extreme compared to the proctored comparison group. the boxplots in figure5show the distributions of the global pairwise dissimilarity d(x i , x i ′ ) of all students in the comparison-and test group. the median value of the test group is significantly lower, indicating a lower average global pairwise dissimilarity in this group. we apply the above mentioned lower bound on the test group's distribution to identify groups of students which are "outstandingly similar". while it is no surprise that these clusters were detected, our approach still aids us in deciding on when to stop inspecting further groups of students, as their level of similarity might as well occur in the comparison group. we find pairs of students in the test group with values below the minimum of the comparison group. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/885.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/885.txt new file mode 100644 index 0000000000000000000000000000000000000000..c574ceb3d20a0ad6b9474d738953e470702883ae --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/885.txt @@ -0,0 +1 @@ + the goals of our five-year program include: (1) developing a network of like minded individuals,(2)motivating students through empowerment,(3)providing experience with realistic and impactful problems through community engagement, and (4) learning to work in diverse teams. in the first iteration of the program, our twenty-four students from diverse science and engineering backgrounds tackled a number of social and environmental problems relevant to the community over the course of 4 months.the program aims to engage students from diverse backgrounds in real world projects utilizing experiential learning methods, in particular community engaged learning and design thinking.the first step of the interview process consisted of a team activity to test each student's ability to(1)work in a team, (2) overcome any conflict that would occur in that short time span, and (3) self organize the team decisions. students were broken into teams of 5-6 students and tasked with completing a project that addressed a hypothetical problem and constraints which would require students to demonstrate their skills in these three areas. not only did each project address a pressing social or environmental challenge affecting the broader community, but each community partner also committed to mentoring the students in project specific training such as dealing with brain injury patients and norms of engaging with vulnerable clients. through this process, each team ended up with diverse members in terms of gender, sexual orientation, ethnicity, academic standing and background such that most students had another team member which they could relate to on some level, and thus catalyze the formation of meaningful friendships and bonds. for some of the students, this was their first work-integrated learning experience and most of the students had never worked with a community partner before. therefore, while each project team had a diverse blend of experiences, skills, and perspectives, all team members had an equal opportunity to work on a project that they were deeply motivated to work on.• p1-problem definition: students were provided with interpersonal skill training like diversity, equity and inclusion (dei), professional conduct and communication with clients, conflict resolution, project specific training, design thinking as well as technical skills training like web frameworks, code repositories, and agile development. we further interviewed the community partners to understand their perspective of working with the diverse set of teams, the program and the product afterwards, we analyzed the data to extract the key lessons and statements that illustrated the student's journey and could be further utilized to improve the program.working on a community project with real clients and community problems poses an abundance of rewarding yet challenging experiences that students would not otherwise be exposed to until post-graduation employment. this program was the first exposure for many students to work with real clients, therefore we provided them with a number of training sessions covering soft skills such as communication with clients, team management, professional conduct, diversity, equity and inclusion (dei) and leadership.despite real clients being so important to the students' motivation and success, the community partner's vision for the solution collided with the students' skillsets.as a result of the continuous guidance on practicing dei, soon the students started leveraging their team diversity through efficient work distribution as a student in project 1 describes, "being on a diverse team has positively impacted the project development, in that we can all work on different areas. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/886.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/886.txt new file mode 100644 index 0000000000000000000000000000000000000000..3f9199f72cccf21a89012a4a8fcd53cb3e033f55 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/886.txt @@ -0,0 +1 @@ +the openai-developed gpt (generative pre-trained transformer) model has a variation called chatgpt. the gpt model was initially released in 2018 and trained using the common crawl, a sizable dataset of text from the internet. the transformer design, revealed in a 2017 study by google researchers, served as the model's foundation. unsupervised learning was used to train the initial gpt model, which meant that it was trained on a sizable text dataset without any explicit labels or annotations. as a result, the model could pick up on various textual patterns and structures and produce new text with a similar tone and structure (wikipedia 2023).a private artificial intelligence research facility made up of the for-profit openai lp, and its parent organization, the nonprofit openai inc., chatgpt, is owned and developed by openai. it is also known as gpt-3 (generative pre-trained transformer 3). elon musk, sam altman, greg brockman, ilya sutskever, wojciech zaremba, and several others created openai in december 2015 to advance and advance benign ai in a way that benefits humankind as a whole. san francisco, california, usa, is home to the business (wikipedia 2023; "about openai" 2015).chatgpt, also known as gpt-3 (generative pre-trained transformer 3), is the latest version of a series of language models developed by openai.➢ gpt-3 (generative pre-trained transformer 3), also known as chatgpt: the latest and the most advanced version of the gpt series, gpt-3 was introduced in june 2020, has 175 billion parameters, making it one of the largest and most powerful language models to date.usage the api enables programmers, academics, and data scientists to incorporate the capabilities of gpt-3 into their software and systems, including ❖ chatbots and virtual assistants: chatgpt can build chatbots and virtual assistants that comprehend user input and respond conversationally and naturally. ❖ language translation: before the invention of language translation tools, libraries gave language translation services to the users, but now technologies are replacing translation jobs.❖ content creation: chatgpt can produce a wide range of content, including text summaries, complete articles, and natural language answers to questions.❖ researchers: researchers can use chatgpt to perform natural language understanding and generation tasks, such as text summarization and text completion. text classification and sentiment analysis: the text classification and sentiment analysis capabilities of chatgpt can be honed for usage in applications like social media monitoring and customer feedback analysis. there are several ethical concerns related to using ai in academic writing, specifically language models such as chatgpt.ii) plagiarism: using language models to generate text can make it easier for researchers to engage in plagiarism by presenting text generated by the model as their work.using language models to generate text can lead to decreased creativity and critical thinking among researchers, who may rely on the model to generate ideas and text. chatgpt-3 can also aid the writing process by generating text for academic papers such as research papers, essays, and dissertations. therefore, it is vital to use tools such as chatgpt-3 to aid research and writing rather than relying solely on them to generate ideas and text. additionally, some scientists may view the use of chatgpt in this way as an attempt to circumvent traditional methods of authorship and give undue credit to the technology rather than the human researchers who conducted the work. purpose of using chatgpt: the most common uses for chatgpt among the 71 respondents who used it include language correction (62%), sentence making (47. ai-based models, including chatgpt, may require significant human editing to produce high-quality text, and it is ultimately the responsibility of the researcher to ensure accuracy, coherence, and relevance. in the future, educational institutions may subscribe to tools like chatgpt and other academic tools such as grammar correction, paraphrasing, plagiarism checking, and data analysis tools. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/887.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/887.txt new file mode 100644 index 0000000000000000000000000000000000000000..d0079dc0a3af8e5f09b3295808248fc412ac4dc2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/887.txt @@ -0,0 +1 @@ +in recent years several internet websites have become the hubs for communities where users can produce, consume, and disseminate content without central oversight. examples of these usergenerated content (ugc) websites include major social media platforms, like facebook or twitter, or global online knowledge production communities like wikipedia, which is known as a model for the production of vast reliable, high-quality knowledge (yasseri and menczer, 2021).however, a negative consequence of the popularity of ugc websites is that their low barriers to access, combined with the lack of supervision from experts or other gatekeepers, results in the proliferation of false or misleading information on the web as a whole (wardle and derakhshan, 2017;lazer et al., 2018).false or misleading content often spreads on social networking platforms (amoruso et al., 2020;castillo et al., 2011;zareie and sakellariou, 2021;grinberg et al., 2019;guess et al., 2019guess et al., , 2020;;allcott and gentzkow, 2017), but there are growing concerns that other ugc communities like wikipedia may be vulnerable to these threats too (sáez-trumper, 2019). this is especially worrisome since wikipedia is one of top most visited internet websites (similarweb ltd, 2022) and a popular source of knowledge (okoli et al., 2014). wikipedia contains over 50 million articles in more than 300 languages; in february 2022, the english language edition of wikipedia alone received 781m visits (from unique devices) and was edited over 5m times (wikipedia contributors, 2022c;wikimedia foundation, inc., 2022d). hence, preserving the integrity of wikipedia is of paramount importance for the web as a whole (sáez-trumper, 2019).there are many potential threats to the integrity of knowledge in wikipedia (sáez-trumper, 2019). one common threat comes from vandalism, which is "a deliberate attempt to compromise the integrity of the encyclopedia, often through the insertion of obscenities, insults, nonsense or crude humour, or by page blanking" (wikipedia contributors, 2021).vandalism, however, is not the only threat to the integrity of wikipedia's content. whereas vandalism focuses on defacing existing entries, there exists evidence showing that wikipedia is also targeted by hoaxes, whose aim is to create whole new entries about fake, fictitious topics. an example of a famous wikipedia hoax is the entry jar'edo wens, a fake australian aboriginal deity, which went undetected for almost 10 years before being debunked and deleted (dewey, 2015). but hoaxes remain a threat to wikipedia's content integrity to this day. recently, one of the largest such incidents the platform has ever seen has been discovered on the chinese wikipedia: a user named zhemao wrote 206 fake entries, starting from 2019 until 2022, about russia's history in the middle ages (moon, 2022).hoaxes are thus not to be confused with vandalism; although vandalism is a much bigger threat in scope and size compared to hoax articles, hoaxes constitute a more subtle threat, which has received less attention compared to vandalism.a crucial question that remains unresolved is what drives the creation of hoaxes on wikipedia. because their original authors are aware that these articles are false, hoax articles are different from mere misinformation, but should rather be considered instances of disinformation (wardle and derakhshan, 2017;lazer et al., 2018). as such, understanding the factors that determine the supply of hoaxes on wikipedia could shed light on disinformation in general, including broader threats to the integrity of the web, like state-sponsored propaganda (king et al., 2017;zannettou et al., 2019;golovchenko et al., 2020) and conspiracy theories (starbird, 2017).to bridge this gap, in this paper, we study the role of online attention, in the form of individual page views, in the supply of disinformation in wikipedia. the idea of an economy of attention was first introduced by simon (1971), who observed that human attention is a limited resource that needs to be allocated (goldhaber, 1997). here, to quantify the flow of collective attention to individual topics of knowledge, we take advantage of the unique wikipedia traffic dataset and api. specifically, in this work we seek to answer the following questions: q1. does online attention toward a topic increase the likelihood of disinformation being created about it?q2. operationally, is there a relationship between traffic to wikipedia and the production of hoax articles?to answer these questions, we collected a list of known hoax articles (wikipedia contributors, 2022a) along with their creation timestamps and content. to control for potential confounding factors in the distribution of traffic to wikipedia over time, for each hoax, we considered a cohort consisting of all the legitimate (i.e. non-hoax) wikipedia articles that were created on the same day as the hoax. similar to kumar et al. (2016), we find that hoaxes differ from legitimate articles in key appearance features, but do not strongly differ in the number of hyperlinks they contain. next, for each article (either hoax or non-hoax), we parsed its content and extracted all the outlinks, i.e. its neighbors in the wikipedia hyperlink network. the presence of a link between two wikipedia entries is an indication that they are semantically related. therefore, traffic to these neighbors gives us a rough measure of the level of online attention to a topic before a new piece of information (in this case an entry in the encyclopedia) is created.finally, we measure the relative change of traffic in the 7-day period before and after the creation of a hoax and compare this change to that of the articles in its cohort. to preview our results, we find that, on average, online attention tends to precede the creation of hoaxes more than it does for legitimate articles. this observation is consistent with the idea that the supply of false and misleading information on a topic is driven by the attention it receives.in the rest of the paper we discuss related work (section 2), and then describe our methodology (section 3): the details of the data collection process, the comparison between features of hoaxes and legitimate articles, and the pre-processing of the wikipedia traffic data. section 4 discusses the techniques used to quantify online attention and its relationship to the hoax creation, and the statistical procedures performed to asses the results. finally, section 5 summarizes our findings and future directions.all code and data needed to replicate the findings of this study are available on github at github.com/csdl-usf/wikihoaxes.hoaxes are thus not to be confused with vandalism; although vandalism is a much bigger threat in scope and size compared to hoax articles, hoaxes constitute a more subtle threat, which has received less attention compared to vandalism.to answer these questions, we collected a list of known hoax articles (wikipedia contributors, 2022a) along with their creation timestamps and content.in the rest of the paper we discuss related work (section 2), and then describe our methodology (section 3): the details of the data collection process, the comparison between features of hoaxes and legitimate articles, and the pre-processing of the wikipedia traffic data. unfortunately, access to these data was not public due to the nature of the npp process.to collect this list, we queried the wikipedia api using the 'prefix search' endpoint (medi-awiki contributors, 2022a) to collect the titles of the hoaxes residing in the administrative list maintained by wikimedia under the prefix 'list of hoaxes on wikipedia'. this observed behavior can be in part explained by the fact that the wikipedia community started patrolling new pages in november of 2007(kumar et al. (2016)who, in addition to appearance features, studied network, support, and editor features for both hoax and legitimate articles(kumar et al.in summary, hoaxes tend to have more plain text than legitimate articles and fewer links to external web pages outside of wikipedia.recall that the cohort of a hoax is defined as all the non-hoax articles created on the same day it figure3: modified z-scores for all hoaxes in our sample relative to non-hoax articles in their cohorts for the four appearance features we considered.traffic to wikipedia is known to fluctuate following circadian and weekly patterns, and is likely to depend on a host of additional, unknown factors, such as the relative popularity of wikipedia over the years, the total number and geographic distribution of web users(yasseri et al. when ∆v /v < 0, attention tends instead to follow the creation of the hoax.figure4shows the distribution of the ∆v /v values for each cohort, the cohort mean, and the value of ∆v /v of the corresponding hoax, for a manually selected sample of hoaxes collected from our data.having defined a way to quantify whether traffic to a given article preceded or followed its creation, we want to determine whether hoaxes tend to have a greater ∆v /v than legitimate articles in general. however, if hoaxes do not differ from legitimate articles, then on average the difference the ∆v /v of a hoax and that of its cohorts should be zero. this indicates that, on average, hoaxes tend to have more traffic accumulated before their creation than after. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/888.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/888.txt new file mode 100644 index 0000000000000000000000000000000000000000..fac0fe9f559c84f35e89b618e292dca4b31268fa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/888.txt @@ -0,0 +1 @@ +the last fifteen years have been characterized by the large diffusion of the internet and social media, such as facebook and instagram, along with the tendency of users to share their data, both consciously (through posts, photos, etc.), and unconsciously (accepting the terms of service, allowing cookies when navigating the web, etc.). all this information has become incredibly valuable when coupled with big data practices because that allows companies that hold it to exploit it, extracting new information or behavioral models from it , in order to influence and predict the users' behavior and so to capitalize on the advertisements.episodes in which this influence has been used outside the logic of business are well-known, from trying to influence the result of an election, like the cambridge analytica case , to mass-surveillance, like revealed by edward snowden .therefore it is important to note that threats to our autonomy do not just undermine our integrity as individuals, but are also a serious risk to society as a whole, making it essential to discuss future developments of surveillance capitalism. when mentioning big data and the correlated mechanisms, i will be referring to the operations of data extraction from all the possible sources, and to the operations performed on the data in order to analyze and extract patterns useful for behavior prediction and therefore manipulation. big data is essential because it can be considered the turning point for concerns about autonomy: forms of manipulation have long been enforced through traditional media (like newspapers, radio, and lastly via television), but the amount of data gathered with big data enables a "tailored influence", taking it to another order of magnitude of effectiveness.the first reason why i believe my concerns about our autonomy in the future are valid is that the big tech companies that benefit from surveillance capitalism will only increase their revenues if they are more able to model, predict and influence our behavior and choices, therefore they will try to reduce our autonomy as much and as fast as possible.the second reason why i think we will face a reduction in our autonomy is that all of the mechanisms that surveillance capitalism uses, meaning the ability to gather incredibly large amounts of data from the users, are embedded in the social tissue through social media, such as facebook, and smart assistants, such as amazon alexa. i think that it is enough to ask ourselves some questions such as: "could someone live without using social media nowadays?", "could a politician run a campaign without using facebook?" and "could i go somewhere new without using google maps?" to realize that the answer to the question "can we actually choose to be free from being subjects of surveillance capitalism?" is negative. even though studies (,) have shown how fomo can be linked with negative effects on one's mood and life satisfaction, this phenomenon highlights the fact that surveillance capitalism mechanisms (in this particular case social media) have become so powerful to influence us psychologically to promote their usage.an important feature is that mechanisms that are used to influence our behavior and choices, for example, what is shown in our facebook feed or which articles amazon recommends to us, are fed with data that the user generates without being conscious of it (an example could be the area on the screen of the smartphone that is touched) and that these algorithms work without the users noticing. this gives surveillance capitalism an unlimited scope of action and furthermore makes it more difficult for us to avoid being subjects of both data gathering and manipulation, giving us little or no actual space when we are completely free from being influenced. in this sense, our smartphones represent the first way of gathering data about us: we take them with us all of the time and they are able to know our location, they have access to our audio to detect if we are calling the smart assistant, and they have become the filter through which we interact with the world.the scope of surveillance capitalism does not end with everything that can be gathered from us, but it also extends to what can be derived from the data thanks to big data analyses. an example of future developments is the insurance sector: gathering data from our cars, like the way we drive and where we drive, big data would be able to understand how much will be the chance of us getting into a car accident, and therefore the insurance will be able to require a higher fee even if we always respect the traffic regulations.a possible reason for supporting surveillance capitalism practices is that they are the reason why, nowadays, we have access to an enormous variety of services and content for free (not taking into account the cost of having an internet connection, which is negligible and has been diminishing since its beginning) or products for a very moderate cost (like smart assistants).another argument against the limitations of surveillance capitalism practices is that the amount of data that is gathered is so huge and can unlock a knowledge so deep about us that it is possible to influence us to act for what is perceived as our own good. the first reason treated is the fact that it is thanks to surveillance capitalism that we have access to so many contents and services for free, but i have shown that is intrinsically wrong to define those services as "free" since we don't pay with money but with our data, that is sold to third parties and it is also used with the scope of influencing us and undermining our autonomy, therefore having a doubled price.in conclusion, it is fair to say that the current situation with regard to surveillance capitalism practices, like massive data gathering, behavioral prediction, and manipulation, already rises a lot of concerns for our autonomy, and it is expected to get worse, following the trend that it has followed since the early days if no action is taken against such techniques. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/889.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/889.txt new file mode 100644 index 0000000000000000000000000000000000000000..026f9133fa2ead1cb478ee1412962bece162180c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/889.txt @@ -0,0 +1 @@ +data protection has a long history in europe. there were national data protection laws in many european countries long before the general data protection regulation (gdpr) repealed an earlier 1990s data protection directive in the eu in 2016. the early efforts at the european level started already in the late 1960s with initiatives from the council of europe, which, however, did not progress despite a positive reception at the european parliament during the 1970s (evans, 1981). in the 1980s the data protection initiatives switched from the european community to other institutions, including the organisation for economic co-operation and development (oecd) and the united nations. things started to change when the eu was formally established with the maastricht treaty in 1992. during the policy-making of the lisbon treaty in the late 2000s, data protection was added to all basic treaties of the eu, including the treaty on european union, the treaty on the functioning of the european union, and the charter of fundamental rights of the european union. in addition, privacy (but not data protection) has long been present in the european convention on human rights. the gdpr is the latest manifestation in this long historical trajectory in europe.the european commission is the guardian of these treaties and the fundamental rights specified in them. according to anecdotal evidence, however, it has often been the european parliament who has taken the political initiative and lead in promoting data protection and privacy policies, which have often received less attention in national parliaments (mistale, 2015). a comparable point applies regarding the enforcement of the data protection policies in the european union. here, the final say in many difficult data protection questions has often been done by the court of justice of the european union (cjeu), although enforcement of data protection is generally delegated to national data protection authorities in the member states. according to a critical viewpoint, neither the guardian of the treaties nor these data protection authorities have fulfilled their data protection obligations to an extent envisioned in the enacted treaties and laws.but law is one thing and lawmaking is another thing. regarding the latter, only little is generally known about the question of how politicians perceive data protection and take it into account in their political agenda-setting. true enough, there have long been european politicians who have actively profiled themselves as promoters of data protection and privacy, but the large majority of politicians has supposedly perceived these issues as having a lesser weight, questions related to national security and related topics perhaps notwithstanding. in this regard, the intense politics during the policy-making of the gdpr, as adeptly described in a film documentary (powles, 2015), were arguably an exception rather than the rule in the sense that the political data protection issues and the inner-workings of the parliament became visible also to the general public in europe and elsewhere. against this background, on the one hand, it could be argued that data protection politics often deal with obscure technical details and boring nitty-gritties in law proposals, which seldom prompt a wider political interest among the constituencies of politicians. on the other hand, it could be argued that the continuous stream of privacy scandals, whether in terms of data breaches or the practices of social media companies, the increased state surveillance, including the associated oppression and human rights violations in authoritarian regimes, the rise of large technology companies and new technologies such as facial recognition, and many related issues would provide valuable political capital for politicians in their recurrent seeking of votes and offices.the european commission is the main lawmaking institution in the european union. it makes legislative proposals. if a proposal is then agreed by the council of the european union and european parliament, a new law is enacted. although this basic lawmaking procedure has not changed, the parliament has gained power as a budgetary authority and a co-legislator with the lisbon treaty, although it has at the same time faced the rise of eurosceptic and populist parties in the aftermaths of the various crises since 2008. the parliament possesses also considerable power as a political agenda-setter in the pre-legislative phase; it can influence the legislative agenda indirectly by bringing new issues into the eu's political roundtables and further escalating these to the fore of public debates (kreppel and webb, 2019). such political agenda-setting frames the context of the present work: the goal is to explore data protection questions raised by meps in the parliament's plenary sessions and the answers given to these by the 2commission. if data protection and privacy more generally provide political capital for meps as envisioned, there should be also many associated questions about these topics. the exploration can further reveal important aspects about those specific data protection issues that politicians perceive as important. as further argued in the opening section 2, the exploration has novelty because there are no directly comparable previous works and the knowledge about data protection politics is generally very limited. then, the materials and methods are elaborated in section 3. the results from the empirical exploration are presented in section 4. the final section 5 concludes. there were national data protection laws in many european countries long before the general data protection regulation (gdpr) repealed an earlier 1990s data protection directive in the eu in 2016. here, the final say in many difficult data protection questions has often been done by the court of justice of the european union (cjeu), although enforcement of data protection is generally delegated to national data protection authorities in the member states. true enough, there have long been european politicians who have actively profiled themselves as promoters of data protection and privacy, but the large majority of politicians has supposedly perceived these issues as having a lesser weight, questions related to national security and related topics perhaps notwithstanding. if data protection and privacy more generally provide political capital for meps as envisioned, there should be also many associated questions about these topics. each topic is a mixture of bigrams with different proportions and each document is a mixture of latent topics, again with different proportions, such that all documents in the corpus share the same set of topics but different documents exhibit the topics in different portion.parliamentarians belonging to the liberal group have raised the largest amount of data protection questions in overall, but these occurred particularly during the first half a further point is that left-wing meps raised many questions around 2015 during which the gdpr was debated-even though the rapporteur for the regulation was from the green group. when estimating lda models with the number of topics in the range 60 -80, many sharp, consistent, and interpretable topics emerge, but, at the same time, several topics cannot be readily interpreted and labeled. a potential explanation for these 9 q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q -740000 -720000.optimization value number of topics q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0.number of topics q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q q 0. this kind of data belongs to the gdpr's category of sensitive data, and, therefore, it is no surprise that also meps have raised data protection questions about the eurodac database. although some incoherence is present also in this topic, data protection questions and answers related to the single market are understandable already because the single market is perhaps the most significant element of the eu as a whole. to recall: the edge weights and hence their widths 12 q q q q q q q q q q q q q q q q q schengen green gdpr in the visualization refer to the times meps from a particular group asked questions about a given topic, either alone or together with other meps. also the green group has been active in debating data protection issues in social networking, while the eurosceptic group has been particularly keen to debate data protection questions related to contact tracing. there are also other topics that could be reasonably expected to emerge in parliamentary debates about data protection: law enforcement and criminal justice, the (digital) single market, national supervisory authorities, the schengen area, social networking and social media, and cloud computing. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/89.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/89.txt new file mode 100644 index 0000000000000000000000000000000000000000..209eb7230bb1af381443ac391a11eaed222a3e59 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/89.txt @@ -0,0 +1 @@ +the dagger algorithm addresses the covariate shift encountered in behavioural cloning by querying the expert again on all the states that the agent encounters during a test run, then aggregating these new samples with the existing dataset.however, querying the expert can be costly in some situations. a drawback of dagger is that it queries the expert at all collected observations without considering which actions are most valuable towards training a good policy. the dril algorithm handles the issue of covariance shift by adding a regularizer term to the optimisation of the policy, implicitly favouring policies that choose to enter states which minimise the variance of an ensemble of policies for these states. the rationale is that the variance should be low for states that are in-distribution, so this should motivate the learner policy to stay within this distribution, assuming it is sufficient to achieve proper performance. a drawback of this method is that it may not be able to learn from incomplete datasets, and cannot explore further.we therefore design an algorithm that does not needlessly query the expert, while also selectively querying it to learn about states which genuinely have not been encountered. this combines the flexibility and exploration of dagger, while taking advantage of the ability of an ensemble to identify out-of-distribution states as done in brantley et al. .the dagger algorithmaddresses the covariate shift encountered in behavioural cloning by querying the expert again on all the states that the agent encounters during a test run, then aggregating these new samples with the existing dataset.prior work in this area mainly consists of estimating uncertainty in the predictions of deep neural networks, as well as improving the sample efficiency of the dagger algorithm. a common way to estimate model uncertainty is with an ensemble of learners: the uncertainty is said to be high when the predictions of the ensemble have high variance. zhang and choachieve a similar goal by employing a separate network, called the safety policy, to predict whether the current policy being trained is likely to make an error, and subsequently use this information to determine which visited states need to be included in the next dagger iteration.uses ensembling to estimate model uncertainty to build a probabilistic variant of dagger, where they use the model's uncertainty over its predicted actions to determine when to query the expert.we propose a new algorithm called dadagger (disagreement-augmented dataset aggregation algorithm), which is based on dagger, and borrows fromthe principle that out-of-distribution states induce higher disagreement among an ensemble of policies. we use this to modify dagger to only query the expert on states with high disagreement (a particular percentage of visited states), in an attempt to gain maximal information and resolve the most uncertainty in a smaller number of queries. since training an ensemble of neural networks is prohibitively expensive, in order to efficiently obtain the variance we instead use dropout layers to approximate uncertainty of our estimators.e at test time), we sample the network once to predict actions, rather than obtain and ensemble mean, to ensure a fair comparison to dagger. we compare the performance and convergence properties of our algorithm to dagger (obtained by setting both α and m to 1), as well as to an agent which queries the expert at observed states selected at random (obtained by setting m to 1 for varying α) to test whether the quality of sampling plays a role in achieving convergence. instead of using an initial training dataset as in previous experiments, we run dadagger with an empty initial dataset, and consequently create the dataset over several iterations, querying the expert only to resolve uncertainty. we then measure the distribution of actions in our final dataset and compare it to the dataset produced by dagger and dadagger when supplied with an initial dataset. interestingly, the starting dataset is more balanced than any of the ones resulting from dagger / dadagger iterations, yet fails to converge, meaning the quality of a dataset is not entirely determined by how balanced it is.figure2shows that all variants of dadagger converge to a similar performance as base dagger on half cheetah, which demonstrates that our uncertainty measurement and sampling technique is also capable of handling a multi-dimensional and continuous action space. the final dataset consists of 746 samples, which is markedly less than even the initial dataset we were using for dagger and dadagger, which contained 1139 samples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/890.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/890.txt new file mode 100644 index 0000000000000000000000000000000000000000..cbe2ad379588833150208895c50ab7f995f1e291 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/890.txt @@ -0,0 +1 @@ +law enforcement agencies (leas) have a need to access online data for their evidence gathering during crime investigations. not only does cyber crime involve online investigations, but also conventional, non-cyber crime today often includes online activities and thus requires gathering electronic evidence. in fact, a 2018 report from the european commission stated that as much as 85% of all criminal investigations required some form of electronic evidence gathering.1 at the same time, technological developments have hindered crime investigations by leas. in recent years particularly the rise of cloud computing has been a major concern. while the united states (us) has already passed a law for addressing the issues with cloud computing, also the european union (eu) has recently agreed on a similar law. this paper provides an overview of the recent developments, focusing on cross-border data access from a european perspective.the paper is a literature review. this approach is justifiable because there is already an extensive literature base on the topic. because the topic also spans multiple disciplines, from computer science to political science and law, the approach taken is specifically an integrative review; the goal is to assess and synthesize the existing literature in a way that allows new perspectives to emerge (snyder, 2019). in particular, therefore, the goal is not to engage in detailed legal analysis; as will be shown, there are already plenty of existing works that have done good jobs at this front. it is important to synthesize these works in order to better understand the academic record on the topic.it is possible to further motivate the paper and its contribution from multiple perspectives. not only is cloud computing a challenge for cyber crime investigations and criminal law, but similar challenges are present also in terms of economics and industrial policy, cyber security, digital rights, and data protection. increasingly, furthermore, cloud computing is an important tenet in the geopolitical rivalry between countries (brancolini, 2023). thus, the so-called "loss of location" often discussed in the criminal law literature apply also to international relations between states and their sovereigntiessome authors have even talked about a new post-westphalian system (guarda, 2015). in europe these geopolitical aspects have correlated with aspirations to build sovereign european clouds in order to decrease dependencies from other major powers. however, the loss of location and its relation to state sovereignty has posed major challenges to jurisprudence because jurisdictions are still largely based on territories of sovereign states and their supranational unions and alliances. to address the challenges, as will be elaborated, both the us and the eu have resorted to extraterritoriality, allowing leas in their jurisdictions to access data stored in foreign locations. a further motivating point can be made with respect to technological surveillance.electronic evidence collected by leas should not be equated to surveillance conducted by intelligence agencies-the latter deals with national security, whereas the former is bound by criminal law. despite this fundamental division, both agencies can be criticized from a perspective of accountability and transparency. as will be elaborated, the current practices and legal arrangements for cross-border data access by leas are far from perfect; it remains debatable how accountable and transparent the practices and arrangements are. europe is not an exception in this regard. in this regard, it should be recalled that accountability and transparency are important foundational concepts of liberal democracies. the latter is a requirement for the former; among other things and ever since the age of enlightenment, transparency has been seen to provide the means by which people can evaluate the use of power by governments and their administrations (molnar & warren, 2020;moses & de koker, 2017). it is also necessary for monitoring the effectiveness and efficiency of public policies and public administration. alas, surveillance is largely excluded; accountability and non-transparency are often justified by merely pointing out that the rule of law is followed and secrecy is necessary (setty, 2015). for instance, in terms of transparency, only a few countries release some vague statistical reports on their intelligence agencies' use of lawful interception techniques based on signals intelligence and deep packet inspection. even then, such reports hardly qualify as evidence of accountability (molnar & warren, 2020). the secrecy is often even higher with other information gathering techniques. the situation is better on the side of law enforcement agencies whose accesses to user data usually need to go through judicial processes. in addition, several accountability measures for leas and their data sharing have been established in the european union (aden, 2018).second: surveillance is an important concept from a perspective of the widespread private sector monitoring of online activities, which, as was shown already by the snowden revelations, is closely tied to state surveillance (naef 2023, pp. 77-79;kamp 2021). recently, authors such as zuboff (2019) have even talked about specific surveillance capitalism that provides new means of production based on the collection of personal data and the use of algorithms to transform the data collected into profits. much of the european data protection efforts have centered on shielding people from such surveillance practices of companies. yet, every coin has two sides; at the same time, state surveillance has increased also in europe. due to terrorism, cyber crime, and other factors, there has also been a political push in europe to increase the powers of leas and decrease the data protection scrutiny of these (treiber, müllmann, schneider, & döhmann, 2022).to this end, many technology companies have started to adopt pro-active accountability and transparency practices by releasing so-called transparency reports on data requests made by leas and, in some cases, intelligence agencies. such practices align with the more traditional ideals of corporate social responsibility. in other words, voluntary transparency reporting of many large companies covers also such areas as finance, taxes, sustainability and environmental practices, socially responsible investments, labor rights, and work conditions.third: surveillance is revealing because its secrecy prevents from understanding what type of data is actually collected. a similar concern applies to evidence collection by leas. as asked by carrera, fuster, guild, and mitsilegas (2015) as well as westmoreland and kent (2015), what exactly qualifies as "evidence" and what exactly is "data"? while some definitions have been proposed, many of the laws throughout the world are based on a separation recent trends in cross-border data access by law enforcement agencies between content data and non-content data. 2 the latter is known also as metadata. it has usually received less protections from law (daskal, 2016), although it has long been important for both surveillance and evidence collection. traditionally meta-data covers things like internet protocol (ip) addresses and domain names, but today it extends also to location data based on the global positioning system (gps) or the location of cell towers. in addition, many laws further talk about traffic data (non-content) and subscriber data (content), the latter including things like electronic mail addresses, credit call details, and phone numbers of consumers who have subscribed to a particular service. but due to changes in telecommunications and information technology in general, such as the rise of cloud computing, caching, load-balancing, and content delivery networks, it is difficult to assess when data is in motion (cf. meta-data and traffic data) and when it is at rest (cf. content data). to this end, there have been frequent calls to better separate different data types in jurisdictions (biasiotti et al., 2016;svantesson & van zwieten, 2016;warken, van zwieten, & svantesson, 2020). european data protection laws have further complicated the situation with the introduction of the notion of personal data. these issues with different notions of data motivate the present work to take a brief look on how technology companies themselves classify their data when releasing it to leas. the transparency reports provide the empirical material for the brief examination. in addition to the data types, a few points are also made about the legal practices of the companies.the remainder of the paper has a straightforward structure. the opening section 2 discusses the legal and practical background behind cross-border access to data by law enforcement agencies. the subsequent section 3 outlines the contemporary controversies, challenges, and responses to these. both sections are based on a review of existing literature; the goal is not to engage in detailed legal analysis but to provide an overview of the recent trends and the research of these. in other words, the focus is on the policy side of things. then, section 4 presents the noted brief empirical analysis of the transparency reports. the conclusion and a discussion follow in the final section 5. as asked bycarrera, fuster, guild, and mitsilegas (2015)as well aswestmoreland and kent (2015), what exactly qualifies as "evidence" and what exactly is "data"? while some definitions have been proposed, many of the laws throughout the world are based on a separation recent trends in cross-border data access by law enforcement agencies between content data and non-content data., 2015). 13 in terms of international implications, the cloud act is notable also because it further encourages foreign governments to enter into executive bilateral agreements with the us, allowing a qualified foreign government to access any type of data (including content data) held by companies located in the us without going through the cumbersome mlat system(abraha, 2021;daskal & swire, 2018b). there is also an important incentive for making such agreements: the sca and the ecpa forbid companies in the us from disclosing content data to foreign countries recent trends in cross-border data access by law enforcement agencies without a warrant issued by a court in the us via a mlat, although noncontent data can be provided on voluntary basis(de hert et al., 2020). in line with the long tradition of promoting bilateral mlats(harfield, 2003), the united states has tried to push the cloud act as a solution for cross-border data access, suggesting that it can be either used for signing bilateral agreements between european countries and the united states or tied directly to the planned e-evidence regulation in the european union(daskal & swire, 2018a). furthermore, the chief data protection authorities in the european union have raised concerns that the cloud act does not provide enough guarantees for the protection of personal data and the fundamental rights thereto, which, according to their position, should be taken into account when reforming the mlat system through a unified european solution (seeedpb andedps 2019 and, analogously, also ccbe 2019). for instance, amazon classifies subscriber information (such as names, addresses, and billing information) into a category of non-content data, while mainly reserving the category of content data to all data stored and processed in the company's cloud hosting service-to which leas in the united states have access with the cloud act irrespective of the territorial location of a particular data center. with slightly different terminology, these three companies separate three data types: device and vehicle data, data based on financial identifiers, and content data behind user accounts. there is thus a treasure drove of data for leas; both drivers' and customers' device data, vehicle information (including license plate data and vehicle insurance data), billing information (including credit card details and financial transactions), trip information (including pick-up and drop-off locations as well as gps data in some cases), records of communication, customer service records, details about drivers (including their photos and drive license copies), data about auxiliary functionalities (including data about restaurants and orders placed in these), and freight data (including calls logs between uber and carriers, load information, and gps data). however, none of the three companies disclose details about what the device data actually contains; in principle, this type of data may refer to content of communications (such as the content of telephone calls or text messages) as well as to content data stored directly into a device, whether photos, files, or something else. it is also worth noting that the convention's notion of data seems to cover personal data covered by data protection laws(balboni & pelino, 2013). among these are: the competency of the european union to sign an executive agreement with the united states, the qualification of the whole european union as a qualified government as per the cloud act, the e-evidence regulation versus european data protection laws, the e-evidence regulation versus the cfr, the e-evidence regulation versus the e-privacy directive, the e-evidence regulation's coverage of citizens outside of the european union versus the cloud act's restrictions regarding the citizens of the united states, the e-evidence regulation versus the sca's and ecpa's blocking provisions on content data, the cloud act versus the gdpr, the e-evidence regulation versus admissibility to the cjeu or other courts, the e-evidence regulation and the cloud act versus the cjeu's decisions on the transatlantic data flows, and probably more. more generally, it seems reasonable to agree withpawlak and barmpaliou (2017)in that there is still a need to strengthen due process safeguards, accountability conditions for investigative powers, data protection, and oversight mechanisms. extraterritorial arguments have been widely used also by national courts in europe and the us. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/891.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/891.txt new file mode 100644 index 0000000000000000000000000000000000000000..6cf92146186ddf03050f6eaa8e85f9a53f7cf65b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/891.txt @@ -0,0 +1 @@ +the data governance act (dga) is a part of the larger regulatory framework pursued by the eu for digitalization, data economy, artificial intelligence, and other important policy goals often approached under the label of digital sovereignty. 1 data is a key ingredient in this framework. artificial intelligence needs data. science needs data. digital applications and services need data.data is the new oil, so the saying goes. but, in many ways, the new economy and the giant companies built around data share the same notoriety as the ruthless oil barons of the past (lahtiranta & hyrynsalmi, 2018). against this backdrop, it is understandable that the eu's focus has long been on data protection. this focus culminated to the enactment of the general data protection regulation (gdpr) in 2016. in recent years the focus has switched toward facilitating data economy and data sharing in europe. to some extent, however, the data economy focus was already present during the policy-making of the gdpr. for many politicians and stakeholders, the regulation had the twin goals of protecting personal data and facilitating the free flow of personal data across the internal market (könig, 2022). data reuse was also a hot topic during the negotiations (starkbaum & felt, 2019). another point is that to a certain degree the data economy focus was also explicitly embedded to the gdpr, which, according to article 20, gave data subjects a new right for data portability between different data controllers. while the idea was to facilitate data sharing and interoperability, the new portability right turned out to be problematic in many ways, particularly with respect to data reuse (van ooijen & vrabec, 2019). to this end, the dga seeks to facilitate further sharing of personal data by introducing a concept of data altruism. another core tenet in the new regulation is the reuse of data held by public sector bodies.the goals of the dga are ambitious. the primary goal is to facilitate data economy in europe and improve the eu's digital single market. particular emphasis is placed upon small-and medium-sized enterprises (smes) and start-ups for which the planned data reuse and data sharing provide new material to innovate in artificial intelligence and digital applications. scientific research is also an important part of the goals. in general, data is seen as necessary for tackling the climate change and facilitating the green transition, improving the energy infrastructure, healthcare, and financial services, and so on and so forth. these goals are framed with a distinct "european way" to data and data economy. therefore, fairness, data protection, and lawfulness receive a considerable attention in the regulation. in what follows, the main content of the new regulation is briefly reviewed. after the review, a few reflections are provided about potential challenges ahead. another point is that to a certain degree the data economy focus was also explicitly embedded to the gdpr, which, according to article 20, gave data subjects a new right for data portability between different data controllers.(2) a notification and supervisory framework for the provision of data intermediation services; (3) a framework for voluntary registration mechanism for entities that collect and process data made available on altruistic purposes; and (4) a framework for establishing a new european board for innovation in data economy. data altruism is defined in the second article; it refers to "the voluntary sharing of data on the basis of the consent of data subjects to process personal data pertaining to them, or permissions of data holders to allow the use of their non-personal data without seeking or receiving a reward that goes beyond compensation related to the costs that they incur where they make their data available for objectives of general interest as provided for in national law, where applicable, such as healthcare, combating climate change, improving mobility, facilitating the development, production and dissemination of official statistics, improving the provision of public services, public policy making or scientific research purposes in the general interest". in other words, data altruism is based either on the permission given by an organization for not-for-profit processing activities of non-personal data or the notion of consent in case personal data is involved. the regulation does not cover data held by public undertakings, data held by public service broadcasters and their subsidiaries, data held by cultural establishments and educational institutions, data protected on the grounds of national security and defense, and data falling outside the scope of the public tasks of the public sector bodies concerned. to ensure that data is properly protected, public sector bodies must ensure that personal data is anonymized and commercially confidential data is properly modified, aggregated or otherwise handled with proper disclosure controls. the support provided by these competent bodies includes technical guidance for data storage and data processing, help with anonymization, suppression, randomization, and other techniques that ensure privacy, confidentiality, integrity, and accessibility of personal data, state-of-the-art privacy-preserving methods, deletion of commercially confidential information, support for consent and permission requests for reuse, and relevant contractual commitments. according to article 2, a data intermediation service is "a service which aims to establish commercial relationships for the purposes of data sharing between an undetermined number of data subjects and data holders on the one hand and data users on the other, through technical, legal or other means". the keyword is commercial relationships; other data sharing services of public sector bodies are excluded together with data services without a commercial relationship between data holders and data users.4in general, data intermediation services are only about sharing data; these should not use the data for purposes other than delivery, although auxiliary functionalities such as anonymization services are allowed (article 12). as with data intermediation services, all data altruism organizations wanting to be officially recognized as data altruism organizations must be officially registered to public registries maintained by competent public sector bodies (articles 17 and 19).the new data governance act lays down frameworks for data reuse and data altruism under the supervision of competent public sector authorities. although the same article specifies that this purpose limitation does not apply to public interest data archiving, scientific research, and statistical applications, the dga's goal of public sector data reuse still raises a concern about whether personal data collected by public sector bodies will be used in a manner which is unexpected or risky to the data subjects. for instance, according to the dga's recital 15, prior to granting access for reuse of data, public sector bodies should carry out data protection impact assessments and consult data protection authorities in line with the gdpr's articles 35 and 36. here, it remains unclear whether the existing big tech companies are allowed to act as data intermediation services, and how it is possible to ensure that such companies only provide data sharing without attempts to use the data exchanged. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/892.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/892.txt new file mode 100644 index 0000000000000000000000000000000000000000..80e20fd33d569e5ac5cd76bace113e612e7a5e3c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/892.txt @@ -0,0 +1 @@ +the prevalence of social media platforms, and their seemingly global use often makes them attractive options for the study of large groups of users or widespread phenomena, events, or topics. in particular, social media platforms provide useful data sources to investigate such a complex and multifaceted phenomenon as migration, for which statistical data is often limited and inconsistent across countries (migration data portal 2020). social media platforms have been used to study a variety of migration-related aspects, including measuring, monitoring and forecasting migration movements (e.g., alexander et al. 2020;maazoli et al. 2020;martin et al. 2020;zagheni et al. 2014), detecting hate speech towards migrant populations (e.g., florio et al. 2019), analysing public opinion and political debate on migration policies (e.g., siapera et al. 2018), and estimating levels of integration (e.g., dubois et al. 2018). as such, social media platforms can allow researchers to gather unique insights into migration phenomena that can be used to inform relevant policies and actions.however, the use of social media data, and its potential for misuse, has resulted in several high-profile cases in the media, such as the cambridge analytica scandal in early 2018 1 , which have raised a number of crucial ethical issues around the areas of informed consent, privacy, and profiling of individuals. these ethical considerations are particularly important when looking at a population such as migrants, which is often in a vulnerable position in society. individuals who immigrate to a new country but do not hold the nationality from that country can face risks such as social exclusion (restricted or lack of access to work, housing, social welfare, education, bank accounts, etc), removal from their family, detention, and deportation (gill 2016;tazzioli 2020;yuval-davis et al. 2018). the disclosure of one's migration status can also lead to stigmatisation and hate speech (enar 2018; european union agency for fundamental rights 2016). as border regimes and immigration control have continuously intensified in european countries and beyond, governments and law enforcement agencies are increasingly relying on big data and social media data to control mobile populations (latonero and kift 2018;metcalfe and dencik 2019). individuals who have entered or remained in a country in an irregular way, including people seeking asylum and survivors of trafficking, are particularly at risk, as data published and collected on social media can be used against them (dimitriadi 2021). in norway for instance, the police have asked asylum claimants to provide facebook log-in details to examine the veracity of their accounts and inform their asylum decision (brekke and staver 2019).in this context, researchers need to pay special attention to the potential harm that collecting and analysing migration-related data from social media platforms can create. yet, guidance on the ethics of migration research primarily focuses on qualitative methods such as interviews and do not readily apply to social media analytics. various guidelines, frameworks and laws have been developed to ensure social media data is used in the most ethical way.however, the rapid evolution of social media platforms' terms and conditions, associated privacy policies, and national and supranational legislation make them difficult to navigate.to draw these different considerations together and help researchers better understand and evaluate the ethical implications of using social media data in migration research, this chapter provides an overview of the main ethical concerns that have been raised in academic literature and other guidelines, and how these may be addressed and mitigated. this chapter especially speaks to researchers who intend to use social media analytics in support of migration and for whom the ethics of this type of research is a new or under-explored topicwhether students, early-career scholars, or primarily qualitative researchers. this chapter is structured as follows: first, the changing landscape of social media research is explored, outlining how approaches and concerns within academic research and ethical guidelines have changed over time. following this, these academic, legal, and ethical considerations are then drawn together, outlining some of the main areas of concern, and how these can be addressed and mitigated in practical ways. in particular, social media platforms provide useful data sources to investigate such a complex and multifaceted phenomenon as migration, for which statistical data is often limited and inconsistent across countries (migration data portal 2020).however, the use of social media data, and its potential for misuse, has resulted in several high-profile cases in the media, such as the cambridge analytica scandal in early 2018 1 , which have raised a number of crucial ethical issues around the areas of informed consent, privacy, and profiling of individuals.to draw these different considerations together and help researchers better understand and evaluate the ethical implications of using social media data in migration research, this chapter provides an overview of the main ethical concerns that have been raised in academic literature and other guidelines, and how these may be addressed and mitigated.while research in more traditional settings often made it easier to determine if information was considered public or private, published or unpublished, these new contexts and the scale of data available presented new ethical challenges for which existing research ethics frameworks and guidelines to online research could not apply. even when anonymised and aggregated as part of a research project on migration flows, the use of geolocated social media data for purposes such as border control and immigration enforcement may not have been anticipated or desired by the original poster. in this context, it is crucial for researchers to consider whether social media users would expect their data to be used in that way, and whether any use of this data could potentially lead to harm.more recent academic literature has continued to highlight the ongoing questions around informed consent in large-scale social media studies, and started looking at how social media users (as research participants, in this context) perceive the use of their data in research studies(hudson and bruckman 2004;fiesler and proferes 2018).various regulations now indicate that gaining the informed consent of thousands, if not millions, of social media users is impossible and may therefore not always be required for purposes such as research (association of internet researchers 2012; british sociological association 2017; general data protection regulation 2016). this view, however, is countered somewhat by other research(fiesler and proferes 2018)that highlights that many social media users are unaware that their data is being made available to researchers and is being used in research studies without them being consulted or notified.many of the concerns raised in the previously explored academic literature focus on participants' and researchers' views on the public or private nature of aspects of social media platforms, particularly in relation to its impact on the need for informed consent from social media users. anonymisation refers to the various techniques that can be used to convert personal data into anonymised data(ico, 2012), that is, data that "does not relate to an identified or identifiable natural person or to personal data rendered anonymous in such a manner that the data subject is not or no longer identifiable" (eu gdpr, recital 26). pseudonymisation is "'pseudonymisation' means the processing of personal data in such a manner that the personal data can no longer be attributed to a specific data subject without the use of additional information, provided that such additional information is kept separately and is subject to technical and organisational measures to ensure that the personal data are not attributed to an identified or identifiable natural person" (eu gdpr, article 4(5)).as migration-related research using social media data will likely involve the collection and use of personal information, the dpia process forms a key aspect of identifying and reducing the risks involved for those taking part in the research.this chapter has provided an overview of ethical and legal considerations related to the use of social media data in research projects, particularly migration-related studies, which are often sensitive in nature, and may involve working with potentially vulnerable populations. these strategies will aid researchers working with social media data in migration studies to evaluate and mitigate those risks in a way which adheres to the various laws and guidelines, resulting in beneficial research which protects the rights of the individuals being studied. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/893.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/893.txt new file mode 100644 index 0000000000000000000000000000000000000000..75cf259f22cc576b88b8b2c2d82228af6058f051 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/893.txt @@ -0,0 +1 @@ +in recent years, researchers have started examining the ethical implications of digital technologies. while most work focuses on quality aspects or non-functional requirements (nfrs) of the software that runs on these systems , , the focus of this article is the hardware-viz., the microchipsthat make these systems run in the first place. consequential initiatives about microchip manufacturing capabilities are currently taking shape around the world, e. g., the european chips act and the us chips and science act . through these multi-billion dollar investments, the governments behind them seek to gain more control over the supply of microchips and become less dependent on (untrusted) foreign manufacturers.a prominent concern is that adversarial (foreign) actors can modify hardware undetected in such a way that the software running on top of it can be manipulated at will. indeed, malicious hardware manipulations can have catastrophic consequences, potentially leading to a complete loss of security or incorrect algorithmic decisions. such manipulations include the insertion of a kill switch to render military hardware inoperable under specifiable conditions , manipulation of machine learning (ml) accelerators - , or the compromising of hardware security primitives .the primary problem is that modern microchips are opaque. microchips have become increasingly complex, culminating, e. g., in the apple m1 ultra with 114 billion transistors . furthermore, non-deterministic design tools automate vast parts of the hardware design process, alienating the designers from the final microchip schematics. meanwhile, microchip supply chains are globally distributed and subject to increasing geopolitical tensions , leading to opaque manufacturing processes. overall, the resulting opaqueness affects not only downstream stakeholders, such as the end users (i. e., consumers or operators) who interact with the systems but also the experts who design them. however, solutions on how to address this opaqueness have not yet been established.towards a possible solution, we adapt a prominent concept from discussions on the ethics of other digital technologies: explainability. in requirements engineering (re), the concept of explainability has received a lot of attention recently, and it is rapidly establishing itself as a vital nfr , - .as explainability promises to ease concerns about the security and safety of software and artificial intelligence (ai) systems by making them more comprehensible to various stakeholders , , we argue that adopting the concept of explainability to hardware requirements-thus designing explainable hardware (xhw)-has the potential to achieve a similar goal: making hardware more comprehensible to various stakeholders and addressing concerns about security and more.based on these considerations, we develop a comprehensive xhw framework. our specific contributions include:• motivating and defining xhw. we argue that hardware is opaque and justify the need for more hardware transparency through, among other things, legislation for trustworthy hardware. as a solution, we propose to transfer the concept of explainability to the hardware domain.we argue that xhw is essential to achieve explainability at the system level, building on definitions and models for explainable ai (xai). (section ii and section iii) • a framework for xhw. we conceive a framework for xhw encompassing different stakeholders, their explainability needs (i. e., requirements and quality aspects related to hardware they are interested in, called desiderata), and approaches for enhancing the explainability of hardware, drawing on literature from hardware design, manufacturing, and analysis. (section iv) as explainability promises to ease concerns about the security and safety of software and artificial intelligence (ai) systems by making them more comprehensible to various stakeholders,, we argue that adopting the concept of explainability to hardware requirements-thus designing explainable hardware (xhw)-has the potential to achieve a similar goal: making hardware more comprehensible to various stakeholders and addressing concerns about security and more. we argue that hardware is opaque and justify the need for more hardware transparency through, among other things, legislation for trustworthy hardware., requirements and quality aspects related to hardware they are interested in, called desiderata), and approaches for enhancing the explainability of hardware, drawing on literature from hardware design, manufacturing, and analysis.the few studies available to date-although not directly about microchips, but rather about hardware devices in general-indicate that end users have limited understanding of hardware-.through expert knowledge and literature review, we develop our framework for xhw in three components: relevant stakeholders (section iv-a), desiderata (section iv-b), and-in absence of established approaches to xhw-existing methods and techniques from hardware design, manufacturing, and analysis that could enhance explainability (section iv-c). nevertheless, we can adopt approaches from the domains of hardware design, analysis, and manufacturing that contribute to the understanding of hardware and, thereby, improve its explainability according to definition 1. although not directly needed for hardware manufacturing, promoting explainability during manufacturing could help other stakeholders understand the hardware.3) lack of proper explainability approaches for end users: a potential gap for future research emerges as our participants think that the needs of end users are not adequately covered by existing explainability approaches.one potential explainability gap identified through our survey results concerns the end users, as the hardware experts who participated in our study think that none of the proposed xhw approaches would apply to end users. while it is reasonable to expect that end users have little understanding of hardware due to a lack of technical expertise, it is important to uncover their specific mental models of how hardware works. drawing from work on end users' mental models of other topics such as computer securityand the internet, we expect that research on end users' mental models of hardware could shed light on the information needed in explaining hardware to end users.rq1: what are end users' mental models of hardware? rq2: what information about hardware is required to align end users' mental models with system models?. against this background, we imagine that hardware labels could function as an explainability approach for hardware that is particularly suitable for end users.there are already standards and norms in the hardware community that requirements engineers can draw from to answer this rq, such as iso 26262and iec 61508for hardware safety and fips pub 140-3for hardware security.against this background, another avenue for future research would be to analyze the extent to which (ideas behind) xai approaches can be transferred to xhw to devise new explainability approaches for hardware, leading to the following rq: rq4: how can xai approaches be adopted for xhw? a possible starting point for answering this rq would be to distill features of xai approaches and see whether these are useful for hardware. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/894.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/894.txt new file mode 100644 index 0000000000000000000000000000000000000000..62208c9d62ca69908bdd755696da6deba04c0665 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/894.txt @@ -0,0 +1 @@ +on august 16, 2022, alaska held a special election to fill the seat of deceased u.s. house representative don young. for the special general election, there were three candidates: democrat mary peltola and republicans nick begich and sarah palin. the election was alaska's first statewide election conducted by instant runoff voting (irv), commonly referred to as ranked choice voting (rcv). 1 for this election, voters were allowed to rank all three candidates, and ballots were counted as follows: 2 • round 1: only first-place rankings were counted. the results of this round are shown in table 1. at the end of this round, the candidate with the fewest first-place votes (begich) was eliminated. ballots on which begich was ranked first were "transferred" to their second-ranked candidate, if any. any ballot with no second-choice candidate indicated was considered "exhausted" and was not counted in the second round. of the 53,810 ballots on which begich was ranked first, 11,290 were exhausted and the remainder were transferred as shown in table 2.• round 2: after begich's first-place ballots were transferred to their second-choice candidates, the votes were counted again. the results of this round are shown in table 3. since peltola received more than 50% of the votes still active in round 2, she was declared the winner. instant runoff voting is often billed as a solution to many of the problems of traditional, "choose one" plurality voting, also known as "first past the post voting." in plurality voting, voters may only vote for one candidate, and the candidate with the most votes wins, regardless of whether they win a majority of the votes. with irv, the winning candidate must receive a majority of the ballots that are still active in the final round. additionally, irv can often eliminate the "spoiler effect" seen in many plurality elections, where a candidate with only a small degree of support can attract votes that would otherwise have gone to a different candidate, thereby changing the outcome of the election. with irv, voters are frequently assured that they can safely rank their honest first choice candidate first, and then if that candidate is eliminated, they can still vote for their second choice candidate in the next round. irv is also widely claimed to reduce polarization by encouraging candidates to appeal to a wider variety of voters in order to attract second-place rankings from supporters of other candidates.(2) while voters who ranked begich first had the opportunity to vote for their second-place candidate in round 2, voters who ranked palin or peltola first never had their second-place vote considered. in this alaska election, voters could express a tied ranking for their second-and third-place candidates by ranking only their first-place candidate and leaving the other two rankings blank, but voters had no way to express a tied ranking for their first-and second-place candidates. approval voting.approval voting does require some strategic thinking on the part of voters: after voting for their favorite candidate and declining to vote for their least-favorite, is it better to vote for intermediate candidates in order to minimize the chance of a least-favorite candidate winning, or to decline to support them in order to maximize the chance of a favorite candidate winning? if every voter decided to support only their first choice candidate-a strategy known as "bullet voting"-then the outcome would be the same as in a plurality election. condorcet winners and losers. while it is extremely unlikely that the condorcet loser would win any particular approval voting election, we will see in section 4 how it is mathematically possible that, under precisely the right (or wrong?) circumstances, palin could have won the alaska special election if it had been conducted by approval voting. alternatively, if most peltola and palin voters opted to bullet vote, while most begich voters opted to vote for their second-choice candidate, the outcome would likely be similar to the final total in the irv election, and peltola would likely still win the election.while it is certainly not a given that a statewide, partisan election would follow the same pattern as a local, nonpartisan election, it appears that on average, voters generally prefer to support a relatively small number of candidates in an approval voting election.there is one key difference between our models for star and approval voting: since voters who ranked all three candidates indicated a preference between their second-and third-place candidates, for star voting we will assume that they will give their second-place candidate at least one star, so as to maintain their preference order for the runoff round. absent a situation in which begich's second-place voters gave him many fewer stars on average than palin's second-place voters gave her, begich would advance to the runoff and win the election regardless of how many stars palin and peltola received from second-place voters.• how peltola could win: even if all voters gave their second-choice candidate the minimum of 1 star, peltola would receive the highest score in the first round but would still lose the runoff to begich. peltola's only path to win the election would be for most of begich's second-place voters to give him only 1 star and for a significant fraction of palin's second-place voters to give her 3 or 4 stars, thereby allowing palin to defeat begich in the score round and advance to the runoff, where peltola would defeat palin.but even with the uncertainty about how voters might choose to vote in a star voting election, it seems clear that begich has a much stronger path to victory here than in an approval voting election. the only scenario in which begich might lose would require a substantial asymmetry in the way that voters with different preferences choose to score their second-place candidates, with many more begich voters choosing to score palin highly than the reverse, and essentially no peltola voters choosing to score begich highly.that said, with the limited information that we have regarding voter behavior in approval and star voting elections, it seems somewhat likely that for the special election, peltola would still have won under approval voting, while begich-the condorcet winner-would most likely have won the election under star voting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/895.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/895.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab2d5621650fb013a8c884d3e7b91f7c47c09cdb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/895.txt @@ -0,0 +1 @@ +in 2020, due to the covid-19 pandemic, the uk government canceled the a-level exam and instead used an ai model to predict student performance. the model systematically estimated that students from minority and low-income communities had lower grades than their historical grades, making these students less likely to get into prestigious universities. despite the controversy, the uk government was unable to explain why the algorithm produced such systematic discrimination and ultimately had to eventually switch to the manual estimation of student grades by teachers.this incident highlights the social problems that arise from the use of algorithms that lack transparency and explainability. the lack of clear explanations for the algorithm's outcomes can lead to unjust and discriminatory practices, which can have far-reaching consequences. as a result, policymakers and stakeholders have been working to address these issues by implementing policy measures and developing technical solutions to ensure the explainability of ai algorithms.the paper is a work in progress to provide a comparative review of the growing policy efforts to promote the explainability of ai algorithms. through this analysis, the paper aims to contribute to the ongoing discussions on the governance of ai and the need for greater transparency and accountability in algorithmic decision-making. for example, by sifting through and analyzing gigabytes of user-generated data, ai bots detect inappropriate or controversial websites or social media material, while human professionals "behind the ai curtain" use their judgment and make the final decision to remove a social media post or video(jarrahi,2018). the lack of access to the inner workings of algorithms makes it impossible for humans to trust ai, which impedes such cooperation as well as scrutiny of ai. ai that lacks explainability impedes human-machine collaboration and "makes it difficult to apply ai on a large scale in fields such as defense, finance, healthcare, law, cyber security, etc. through data rather than specific programming, machine learning models can autonomously construct rules of operation and achieve autonomous learning as well as self-iteration, allowing a single model to be applied to multiple scenarios and greatly freeing the productivity of ai engineers. such opacity prevents humans from understanding how ai makes certain decisions, making it impossible for humans to detect and correct errors, biases, discrimination, and other problems generated by ai. xai has been studied regarding the early intelligent systems since the early 1970s, and ai explainability has regained much attention in more recent years due to the pervasiveness of ai and the increasing complexity of ai algorithms in the era of neural networks. in its 2022 report on the development of xai, tencent research institute concludes that the significance of interpretability of ai is to help users increase confidence and trust, prevent bias, promote algorithmic fairness, demonstrate that ai systems meet regulatory standards or policy requirements, drive improvements in system design, and help with risk assessment. the guidelines listed both transparency and explainability of algorithms as one of the 7 key requirements for achieving trustworthy ai, emphasizing the critical role of xai in building human trust in algorithms. in 2020, the ai hleg presented their final assessment list for trustworthy artificial intelligence, which translated 7 principles into an accessible and dynamic checklist to guide ai developers and deployers in implementing these principles in practice.in 2019, european parliament's science and technology options assessment (stoa) panel published "a governance framework for algorithmic accountability and transparency", in which ai transparency and explainability are conceived as instruments to achieve ai fairness and governance and called upon the eu to establish regulatory mechanisms and legislative frameworks as soon as possible. the ai act mandates that high-risk ai systems be 'sufficiently transparent to enable users to interpret the system's output and use it appropriately'. china's current governance of ai is still centered on technological progress, with the bottom line of ensuring safety and controllability, which requires a balance between innovation and the ethics of ai as represented by explainability. ibm proposed the ai explainability 360 toolkit and ai factsheets 360 to support the explainability of ai models.the development and implementation of ai is beyond country borders, and it is of great importance to deliberate on ai innovation and governance at a global collaborative level. the oecd encourages its member states' ai actors to provide meaningful information, appropriate to the context, and consistent with the state of art: to foster a general understanding of ai systems; to make stakeholders aware of their interactions with ai systems, including in the workplace; to enable those affected by an ai system to understand the outcome, and to enable those adversely affected by an ai system to challenge its outcome based on plain and easy-to-understand information on the factors, and the logic that served as the basis for the prediction, recommendation or decision. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/896.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/896.txt new file mode 100644 index 0000000000000000000000000000000000000000..f3aff681b55d2e1478ecbc6087aba4fb14397a9e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/896.txt @@ -0,0 +1 @@ + ai and ml development has remained removed from reflections on how the epistemologies, motivations and politics underpinning the development of algorithmic systems influence their social impacts, in particular when it comes to priority-setting in ai development. we put forward three quandaries arising in the pursuit of responsible development of algorithmic systems, further analyzed and problematized in this paper's second section: (a) the opportunity dilemma, or what comes first, datasets or problem statement? (b) the scale dilemma, or the tension between cost-efficiency pressures to scale up versus the demand to situate and contextualize based on social specificities; and (c) the epistemic dilemma of locating responsible interventions between naïve-objectivist measurements or impractical relativist assessments. such dilemmas should not be perceived as clear-cut either-or choices, but as part of paired of antinomies, leading into novel knowledge representations, as recently argued in the context of bridging knowledge management with data labelling practices in a process of dialectical synthesis, where two apparent contradictions act as opportunities for social change. do problem statements shape the definition and preparation of the datasets needed? or the opportunistic availability of data, and inspecting the available affordances helps to shape the problem to be tackled by ai? a great deal of research in ai is driven by technological solutionism based on what is technically, economically and even politically feasible. if data availability precedes the formulation of problems to be tackled by ai, this might be justified mainly by opportunity whereby the problematization of constraints and path dependencies engendered by existing data become second order concerns. consider for example the implications of using legacy databases based on binary sex and gender classifications for automating decision making concerning the rights of trans gender and nonbinary individuals, both in terms of sexual/gender recognition claimed based on facial recognition datasets, or body scanning x-ray datasets. as qualitative studies have shown, everyday labelling work is seldom a well-informed practice performed by technical experts with prior knowledge about the social significance of the data aggregates being clustered and labelled,. data work is likely to happen under the pressure of meeting deadlines and is susceptible to the interpretative flexibility of numerical outputs based on the data scientists' understanding of a given cluster's symbolic value. in other words, not every data scientist can know about the particularities of a given cluster or dataset's social context, especially when the numerical representation of a cluster is alienating the labeler from the situation in which data have been collected. such is the preferred approach of conventional data science, which aims to satisfy the cost and moral exigencies of reducing harm while increasing financial return of more accurate findings arising from even greater volumes of data. for example, in medical imaging applications of ml, a recent study suggested that "more samples may yield poorer performance" as they obscure small, yet important, structural changes, instabilities, and nearly undetectable perturbations based on a variety of social factors, becoming easily dismissed in large-scale pattern analysis, thus the authors' calling for more context-based testing of such instability phenomena. the other approach is influenced by the social exigencies to situate datasets within their given contexts in order to serve the needs and requirements of specific and underrepresented groups impacted by algorithmic systems. in pondering this dilemma researchers need to revisit the pervasive and taken-forgranted dictum of scaling up, and consider alternatives for broader social alignment for specific purposesbetween data practitioners, data generators, and data owners.the last decade has seen a burgeoning of engaged and detailed work from various fronts within the social sciences into the contestability of sources of "truth" employed for ground-truth datasets, the politics of algorithms, the invisible material and human cost of curation processes, the performativity of algorithmic metrics and ai future imaginaries, and an assessment of individual/institutional motives as relating to the social and/or user-specific benefit,,. such approaches of treating everyday dilemmas as meditation paradoxes have found fertile operation in clinical practice-why not in ml practice too? does the designer wish to define the problem based on existing datasets or find datasets to match existing problem definitions? do we want generalizable, costeffective data, or specialized, context-specific ones? do we prefer the naiveté of objectivism or the futility of relativism? the best answers to these questions might lie beyond simple binaries, if we approach them as commensurable opportunities for nuanced new options, akin to dialectical synthesis. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/897.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/897.txt new file mode 100644 index 0000000000000000000000000000000000000000..e964881275ab77557f1348e10753e0d926902ac9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/897.txt @@ -0,0 +1 @@ +social automation is generally understood as the use of software tools and techniques to automate and optimise the social interactions of a computational agent (marinaccio et al., 2015;nitta et al., 1993;woolley, 2018). not only it is ubiquitous in cyberspace, the highly standardised nature of online platforms (uniform profiles, quantifiable and predetermined range of actions) leaves little for humans to distinguish themselves from bots that access all the same environmental attributes of cyberspace (bakardjieva, 2015). moreover, in their natural habitat, these bots have what most huamans lack -the scale, the speed, and a tireless goal-driven existence. owing to this the scholars and practitioners of cyber security and propaganda alike had been long warning about the now prevalent automation of social engineering (ariu et al., 2017;duff, 2005;frumento et al., 2016). following from a unified conception of social engineering as the psychological manipulation of people to get them to exhibit certain thoughts and behaviour, we define malicious social automation (msa) as the use of social automation to exploit or enable the exploitation of human psychology in order to achieve the objectives of a threat actor. the agents of this are socialbots, automated accounts which imitate online human behaviour (boshmaf et al., 2013).1 shashank@asatae.foundationthe applications of msa can be further understood via three overarching operational constructs -the primary memetic and transgressive types -where memetic automation encapsulates the social automation aiming at the desired spread of messages and ideas, and transgressive type encapsulates the social automation that violates the established security assumptions of an environment/user. and a third, supportive type of msa primarily aiming at facilitating and coordinating the memetic and/or transgressive operations, such as user profiling or command and control (c2) management over social platforms.since 2021, microsoft's threat intelligence reports also began to consider online psychological operations as disruptive to the enterprise and nation-state cyber security (microsoft, 2021). much of these online psychological operations consist of 5% to 10% socialbots that tie together various small networks and alter the natural assortment of influence (cheng et al., 2020;stewart et al., 2019;zhang et al., 2022). in fact, many applications of msa defy conventional cyber security thinking since most of user interactions with online information flows and social agents may occur outside an organisation's own digital network -which presents additional challenges to maintaining cyber situation awareness. as the cyber threat environment itself is dominated by advanced persistent threats and automated attacks, including from sophisticated first-world military organisations (graphika & stanford internet observatory, 2022;meta, 2022), it is pertinent for defending governments to maintain persistent and actionable situation awareness in cyberspace.this has led to an increased requirement worldwide for automated mechanisms to maintain continuous cyberspace awareness among various stakeholders, exemplified by the operationalisation in 2021 of a structured-information based bidirectional automated threat intelligence exchange platform by the indian computer emergency response team (cert-in) (cert-in, 2020). in 2022, north atlantic treaty organisation (nato) began to back its own language and information standards for specifying foreign information manipulation and influence (fimi) operations, which are also based upon the same standardised information models and exchange mechanisms (stratcomcoe, 2022).further, automated information exchanges have long been desired by government structures like computer emergency response teams (certs) and various security operations centers (socs) for communicating and coordinating on various cyber situations (dandurand & serrano, 2013), and having to maintain a common operating picture and desired readiness levels.internationally accepted information standards enable and make this endeavor a lot seamless.however, the dependence over stakeholder communication and automated cyber threat intelligence (cti) sharing mechanisms also puts cyber situation awareness in a different league than the conventional ideas of situation awareness, with broad implications for national policies and cyberspace governance when it comes to msa type threats., 2020;stewart et al.however, the dependence over stakeholder communication and automated cyber threat intelligence (cti) sharing mechanisms also puts cyber situation awareness in a different league than the conventional ideas of situation awareness, with broad implications for national policies and cyberspace governance when it comes to msa type threats.owing to this limitation, theoretical development of the concept have taken three routes to conceptualisation -a) the original, where situation awareness is in the mind, b) the technocratic, where situation awareness is a knowledge artifact(mulgund & landsman, 2007), and c) the integrative, where situation awareness is an emergent phenomena of interactions in a wider sociotechnical system(stanton, 2016). notwithstanding, we are going to conceptualise situation awareness in cyberspace, or 'cyberspace awareness' for brevity, through the last approach which is also known as a distributed situation awareness approach., 2003)or intuition resulting from years of experience, an analyst who is inundated with millions of threat indicators everyday has little recourse for judging the actual situation of cyberspace. as cti forms the basis of any actionable cyberspace awareness and this situational information needs to be shared with a wide range of stakeholders to develop a common operating picture, market forces and governments try to address this problem with curated cti feeds(bouwman et al. we will integrate all the three dimensions of knowledge into a cohesive formulation of cyberspace awareness, and thereby not only viewing digital threat information through the commonly accepted syntactical and semantical paradigms, but also introducing an operatic figur e 2: with growing threats of social automation, states need to be mindful of all the three dimensions of cyberspace awareness dimension of it, which pertains to its orchestration towards meeting the organisational or national goals and interests. we see that the present information sharing standards, while aiming for situation awareness, are better suited for atomised data sharing, and the pursuit of situation awareness is left to the analyst and his organisation, consistent with the traditional conception of situation awareness existing in the mind(stanton et al.if cti about social automation based threats is to become common it requires much greater standardisation efforts for semantic clarity than presently have been mustered, as well as international efforts to create consensus on labelling based on objective rational examination of human affairs and the technical nature of information, and certainly not political allied/axis blocformation efforts, which brings us to the operatic dimension of cyberspace awareness., 2020). however, most countries lack a government organisation to contest such poorly or strategically disseminated cti objects. however, as the use of social automation gets increasingly common, governments and private sector will have to find a common ground over handling and classifying the threat information about automated social activities while also creating appropriate institutional mechanisms for monitoring and evaluating such information.since different cyber security vendors may subscribe to different information models and restriction mechanisms, and the lack of a unified legally enforceable information classification and restriction mechanism while sharing social cti is bound to create 'human concerns', even if away from the eyes of the masses. this underlying nervous system for cyberspace awareness is not as agile as it should be, and may require a significant amount of customisation for emergent threats like msa, which we've indicated for all the three dimensions of cyberspace awareness.for policymakers, it is therefore essential to take stock of each dimension of cyberspace awareness while adopting a particular cti mechanism, as these standardised information sharing methods may also carry along their own implications in international politics and governance structures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/898.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/898.txt new file mode 100644 index 0000000000000000000000000000000000000000..7da3df984c1f8fc677154bba1541016884341b9e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/898.txt @@ -0,0 +1 @@ +the cambridge analytica scandal is the most well-known case about largescale computational interference in democratic elections and referendums. although debates continue about the scandal's actual impact upon historical election outcomes, it is frequently raised as an alarming precedent by eu citizens together with academics and many others (brkan, 2023;hu, 2020;ruohonen, 2023;zuboff, 2022). although much of the controversy around the a note on the proposed law for improving the transparency of political advertising in scandal was and continues to be about politics in the united states, the waves hit also the shores of europe. already before the brexit vote, which too was allegedly a part of the same scandal, it was reported that also political parties in the united kingdom had detailed databases on the political leanings of citizens in the country (shepherd, 2019). thereafter, many smaller incidents have occurred throughout europe.for instance, it was reported in 2020 that a data breach had occurred in malta that revealed the political opinions and other details of maltese voters (noyb, 2020). to put the actual breach aside, it remains unclear in this case why such political data was collected in the first place and whether the collection had any legal basis. likewise, there is an ongoing case in austria involving its postal office's collection of personal data and using algorithmic solutions for determining the political alignment of austrians in order to monetize the inferred data for political advertising purposes (noyb, 2022). this case has also been escalated to the highest european court. 1 in another recent case a civil society group was unlawfully profiling people's political opinions and their other details based on public social media data (vanleeuw, 2022). furthermore, not only did political parties in the hungarian 2022 general election use political profiling and micro-targeting, but some of them also drew personal data collected by the country's public administration for purposes other than election campaigning (hrw, 2022). algorithmic political profiling has been actively studied also in computer science (see baran, kajstura, zió lkowski, and rajda 2022;elmas 2023;kitchener, anantharama, angus, and raschky 2022;qi 2023;ram and rizoiu 2023, among many others). the overall conclusion in these academic studies is that a fairly good accuracy is achievable for determining people's political stances based on public social media data. obviously, furthermore, social media companies and platforms have a much better vantage point than academics to carry out political profiling about their users. it should be remarked that in both cases european data protection laws apply. against the decisions on some of the noted recent cases, even individual scientific researchers may be held legally liable for political and other profiling. ethical assessments are not enough.in the aftermaths of the cambridge analytical scandal and related issues, including the russia's alleged interference with the elections in the united states and other democratic countries, governments throughout the world commenced studies on the topics of political profiling and election interference. regarding the european union, the conclusion of many at the time was that the legal competency and power were limited at the eu-level for attempting to address the issues through regulative action (leino-sandberg, limnéll, & wass, 2019;ruohonen, 2020). against these assessments, it is a small and positive surprise that the eu has managed to find new solutions and reach a consensus over these. although the general data protection regulation (gdpr) remains the cornerstone for tackling the issues, the recently enacted digital services act (dsa) is another notable recent large-scale regulatory attempt in the european union.2 proposals for improving transparency of political advertising have been presented also under the the european democracy action plan.3 furthermore, the non-binding code of practice on disinformation was also strengthened in 2022. according to critics, however, this voluntary agreement together with anything legally non-binding are doomed to fail (see albert 2023;avaaz 2021;goujard 2023b;and galantino 2023 for a recent academic assessment of the criticism). against this criticism, including the disappointment expressed by some regarding the dsa (killeen, 2023;ruohonen, 2023), it is welcoming that the european commission launched in 2020 a further regulative initiative to improve the transparency of political advertising and to restrict it to some extent. 4 it is noteworthy that this proposal is a regulation and not a directive; it is directly applicable eu law without the necessity for national transpositions. hence, it would patch noteworthy gaps in national laws of some member states.the european parliament approved the proposal with a substantial majority in early february 2023. yet, at the time of writing, in october 2023, the proposal is still fouling in the so-called trilogue negotiations. therefore, it is a good time to review the proposal and contribute to its deliberation with a few points from a perspective of scientific research. by complementing the recent academic assessment by brkan (2023), in what follows, the regulation proposed is thus briefly reviewed after which a few points are raised about its potential impacts and limitations. likewise, there is an ongoing case in austria involving its postal office's collection of personal data and using algorithmic solutions for determining the political alignment of austrians in order to monetize the inferred data for political advertising purposes(noyb, 2022). furthermore, not only did political parties in the hungarian 2022 general election use political profiling and micro-targeting, but some of them also drew personal data collected by the country's public administration for purposes other than election campaigning(hrw, 2022). the first recital strikes at the heart of the problem: there has been an increasing supply and demand for political advertising, which often occurs in a cross-border manner through various actors, such as political consultancies, advertising agencies and public relations firms, ad-tech platforms, political data analytics firms(simon, 2019), and so-called social media influencers who too have recently expanded their controversial promotion activities toward politics. in recitals from 6 to 15 the proposal discusses the lack of harmonization across the european union, noting also that the eu has an interest to promote pan-european political campaigning, including campaigns done by the political groups in the european parliament.then, political actors are defined to include (a) national and european political parties, (b) political alliances, (c) political candidates for posts at european, national, regional, and local levels, (d) already elected officials at such levels, (e) unelected members at such levels, (f) political campaigning organizations with or without legal personality, and (g) any natural or legal person representing or acting on behalf of the previously listed political actors.because natural persons are covered, also individual citizens placing political ads on behalf of politicians or political actors in general are in the proposal's scope; an issue that was previously seen by some to be problematic(ruohonen, 2020). among other things, providers of political ads are mandated to retain information on political campaigns, any services provided for political advertising, the amounts of money invoiced and the value of any other benefits offered, and, when applicable, the identify of a sponsor and its contact details (article 6).then, in article 8, political advertising publishers (which are defined in article 2 to be natural or legal persons who broadcast, make available through an interface, or otherwise bring political ads to the public fora through any medium) are required to disclose money or other benefits received in their annual financial statements. according to article 9, political advertising services and publishers must also put a mechanism in place to allow any individual to notify about political ads that infringe the proposed regulation. consequently, the proposal brings a further issue about the enforcement of the gdpr toward political parties and other political actors in europe, including a question about who has the power for this enforcement at the eu-level; here it is likely the european data protection supervisor(brkan, 2023). even though charging fees is allowed by article 11(5), some media representatives argued that there should a right for political advertising services to outright refuse transmission requests from these actors in case their requests are unfounded (nme, a note on the proposed law for improving the transparency of political advertising in 2022). european politicians and political parties might also try to justify political profiling on the grounds that they maintain democracy and have legitimate interests for electoral purposes, as noted in the gdpr's recital 56, and then using the gdpr's exemption in article 9(2)(g) on substantial public interests as the legal basis together with article 6(brkan, 2023). but since the european political actors rely on platforms, ad-tech, political data analytics companies, and other third-parties for the political profiling, targeting, and amplification, these kind of justifications would arguably have only weak grounds. thus, the proposal does not solve the problem of some particular political parties and other political actors having ample and unfair financial resources for political advertising(kozak, 2023). given that there are legitimate interests for political advertising publishers in this regard, such as the economic costs related to retaining political advertisement data, these points restate that a better solution would be a unified database maintained by a competent eu institution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/899.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/899.txt new file mode 100644 index 0000000000000000000000000000000000000000..f94b04127767560e51a24f61103c69b212faf353 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/899.txt @@ -0,0 +1 @@ +since long before modern computing we have sought to teach machines through natural, humanistic interactions . as early as 1950, alan turing stated in his seminal paper on artificial intelligence (ai) that we ought to "provide the machine with the best sense organs that money can buy, and then teach it. . . that process could follow the normal teaching of a child. things would be pointed out and named, etc." . john mccarthy posed one of the earliest iteration of such a system in 1959, describing an "advice taker" that could learn via common sense reasoning by drawing logical conclusions from any set of premises issued to the system as imperative statements . in the 1980s, this work was extended by hayes-roth et al. to develop a generalized framework for machines to learn from external (human) advice, involving steps for receiving, interpreting, and integrating advice into a machine's learning . since then, the rapid development of ai and machine learning (ml) has led to significant progress in giving artificial agents the ability to interact with humans and learn from their feedback in a naturalistic manner .a technique of particular import which has arisen in the past few years is reinforcement learning with human feedback (rlhf). reinforcement learning (rl) refers to the field of ml in which an agent learns through interactions with the environment to select the best course of action (a policy) in a given state . each state-action pair is described by a reward, which serves as feedback for the agent to tune its policy. as an agent learns through training episodes, it ultimately arrives at an optimized policy which permits maximization of the reward. rl has garnered high-profile success in various applications including board and video games, autonomous driving, text summarization, online personalization, finance, and healthcare. as such, it is thought to be a critical component in the development of truly generalized autonomous ai .rlhf is an extension of rl that incorporates human feedback into the training process . in addition to the reward signal, an rlhf agent receives feedback from a human teacher that permits it to learn with broader perspective and greater efficiency in a similar fashion to humans learning from the expertise of another human . by providing a bridge between an agent and a human teacher, rlhf allows humans to directly guide machine learning and machines to grasp elements of decision-making distinctly embedded in human experience . the ability to provide and incorporate human feedback in rlhf is further a critical step toward achieving improved alignment between ml models and human values .although rlhf is a relatively young technology, it has been catapulted into public view by multiple high-profile ai applications including openai's chatgpt, deepmind's sparrow, and anthropic's claude. uses of these chatbots include constructing context-appropriate email responses, solving math problems, and generating code . presently, rlhf is finding widespread application in business, education, healthcare, and entertainment . rlhf creates a host of benefits over traditional rl methods. its key advantages lie in better alignment with human intentions, as well as planning conditional on future feedback, fluid learning from various types of feedback, and curation of feedback according to necessity, all of which are indispensable for creating truly intelligent agents . it also permits machines to learn by abstracting from what humans value as opposed to simply imitating human behavior, thereby equipping agents with greater adaptability, enhanced interpretability, and more reliable decision-making . despite these advances, there is vast potential for rlhf to be improved . rlhf models are potentially prone to inaccurate or harmful behavior (e.g., issuing racist statements) . this limitation reflects a longer-term challenge and motivation for improving rlhf . additionally, gathering human preference data as feedback is costly, and disagreement between human annotators adds variance to training data which can create confusion in situations in which the ground truth is obscure (e.g., ethical dilemmas) . moreover, human feedback in rlhf is often constrained to be in the form of preference orderings which provide limited information and thereby restrict applicability . it is desirable to achieve a broader formalism that considers multiple types of feedback, dependent on task context and similar to the diversity of responses utilized in human learning. work in this area could facilitate identification of which types of feedback lead to better generalization for applications. in addition to the reward signal, an rlhf agent receives feedback from a human teacher that permits it to learn with broader perspective and greater efficiency in a similar fashion to humans learning from the expertise of another human. by providing a bridge between an agent and a human teacher, rlhf allows humans to directly guide machine learning and machines to grasp elements of decision-making distinctly embedded in human experience. the ability to provide and incorporate human feedback in rlhf is further a critical step toward achieving improved alignment between ml models and human values. its key advantages lie in better alignment with human intentions, as well as planning conditional on future feedback, fluid learning from various types of feedback, and curation of feedback according to necessity, all of which are indispensable for creating truly intelligent agents. the transformational potential of rlhf makes it critical to consider how broadened application of rlhf-based technologies may impact various stakeholders, what ethical concerns might arise as a result, how it may affect social and ethical challenges, and how governance may be utilized to mitigate risks.the objective of this report is threefold: first, to provide a systematic study of the social effects of rlhf; second, to identify key social and ethical issues of rlhf; and third, to discuss social impacts for stakeholders.• how might rlhf alter the access different social groups have to ai technologies? • how might rlhf impact cultural and international relations?.rlhf is an important step forward in aligning ai systems with human values as it provides more nuanced guidance than traditional ml and rl, which struggle to capture the full extent of human preference.as ai becomes democratized, how do we construct systems that are sensitive to a diversity of perspectives and value systems and properly aligned in such contexts? can we design a unified values framework, or should value alignment be restricted to cultural-specific contexts, much like law enforcement differences between nations? could rlhf make inter-and intra-regional differences in conceptions of morality and ethics more salient? a significant factor in the net impact of rlhf models lies in to whom such models are aligned. broadly speaking, ai is affected at multiple levels of development by historical bias which affects data generation, representation bias which affects sampling and population studies, measurement bias due to inaccurate data stemming and structural discrimination against groups, aggregation bias due to over-reliance on one-size-fits all models, learning and evaluation bias during model training, and deployment bias due to disparity between intended and observed application. preliminary analysis of rlhf results suggests it can be leveraged to mitigate long-standing effects of historical, representation, and measurement bias by balancing human feedback with representation and expertise across a diverse range of human annotators.by allowing ai agents to learn from human expertise, rlhf can facilitate development of more adaptable ai systems for use in various industries. while most ai applications embody a variation of the centaur's dilemma-the fundamental opposition between human control and optimized ai functionality-rlhf directly plants human feedback as an informative source, leading to greater clarity regarding the locus of human control while simultaneously enhancing functional results. ultimately, the potential for rlhf to positively impact society should not be ignored, and dependence of its benefits on well-designed feedback systems is a further call for investment into rlhf. as rlhf raises concerns that echo those of existing ai technologies for governance, industry, safety, ethics, and the future of global power relations, it will be important for all to be aware and intentional in the adoption of rlhf. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/9.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/9.txt new file mode 100644 index 0000000000000000000000000000000000000000..7d02565c557bb3e44a9e770c26cff621365d1a36 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/9.txt @@ -0,0 +1 @@ +in recent years, great efforts have been devoted to the task of response generation in open domain dialogue. however, due to the one-to-many and many-to-one phenomena (sutskever, vinyals, and le 2014;csaky, purgai, and recski 2019), generative dialogue models often failed to generate diverse, relevant and coherent responses. many existing methods (bowman et al. 2016;zhao, zhao, and eskénazi 2017;chen et al. 2018;gu et al. 2019;gao et al. 2019a;bao et al. 2020;lin et al. 2020;sun et al. 2021;wang et al. 2021;chen et al. 2022) introduce latent variables to alleviate these problems.as shown in figure 1, latent variables can be categorized into continuous latent variables and discrete latent variables from the perspective of their construction methods. continuous latent variable models (bowman et al. 2016;zhao, zhao, and eskénazi 2017;chen et al. 2018;gu et al. 2019;wang et al. 2019;sun et al. 2021;chen et al. 2022) encode dialogue contexts and responses into a continuous latent space to capture diverse semantic features of dialogue responses. in contrast, discrete latent variable models (gao et al. 2019a;bao et al. 2020bao et al. , 2021) ) design a fixed number of learnable parameters to better capture the semantic relationship between responses and contexts. however, continuous latent variables often face poor semantic correlation problem, where latent variables may not accurately reflect the semantics of the dialogue context (gao et al. 2019b;sun et al. 2021). while discrete latent variables are difficult to capture fine-grained and diverse expressions due to the limitation of their variable scale (demonstrated in §4.4).to tackle the aforementioned problems, we propose to construct hybrid latent variables (hlv). hlv employs discrete latent variables to constrain the semantic relevance and uses the diverse features learned by continuous latent variables to enrich the expressions of generated responses. to capture richer semantic information, we compliment the commonly used sentence-level continuous latent variables constructed by (lin et al. 2020) with multiple token-level representations. more specifically, we first create symbolic-level gaussian probability distribution for all context tokens, and then we construct sentence-level gaussian probability distribution through an additive gaussian mixing method (wang, schwing, and lazebnik 2017). to build discrete latent variables, we introduce a self-separation training approach (sun et al. 2021), which expands the input with trainable discrete latent variables.furthermore, we propose a conditional hybrid vraiational transformer (chvt), which constructs hlv based on a basic transformer encoder-decoder framework (vaswani et al. 2017). through theoretically analyzing the difficulty of optimizing conditional variational models for dialogue response generation, we propose two techniques to effectively train hlv and chvt. first, to alleviate the posterior vanishing problem caused by one-to-many samples, we propose to remove the bag-of-word loss and set a large warm-up steps to annealing the kl divergence. second, we give a theoretical upper bound for the kl divergence under the condition that the expectation of the posterior distribution is non-negative. hence, we propose a relaxed kl divergence to further mitigate the posterior vanishing problem during the optimization process.we conduct extensive experiments on dailydialog (li et al. 2017b) and opensubtitles (lison and tiedemann 2016) datasets. empirical results show that chvt is superior to existing transformer-based latent variable mechanisms w.r.t. diversity, relevance and coherence metrics. to explore the effect of hlv on pre-trained models (bao et al. 2020;lewis et al. 2020;bao et al. 2021;mi et al. 2022), we also extend hlv to the fine-tuning process of plato and bart, and we find that incorporating hlv can help the model perform better on dialogue generation task. furthermore, we validate the advantage of the proposed hlv over continuous and discrete latent variables through ablation studies. our contributions are as follow:• we propose the conditional hybrid variational transformer (chvt), which employs the proposed hybrid latent variables (hlv) to generate diverse, relevant and coherent dialogue responses. • we theoretical prove the main problems of optimizing continuous latent variables, and we put together several tricks (removing the bow loss, a kl annealing trick, a relaxed kl divergence) to solve them. • we show extensive empirical results on dailydialog and opensubtitiles datasets to illustrate the superior performance of chvt and the advantages of hlv in generating diverse, relevant and coherent responses.as shown in figure1, latent variables can be categorized into continuous latent variables and discrete latent variables from the perspective of their construction methods. continuous latent variable models(bowman et al. 2018;gu et al. hlv employs discrete latent variables to constrain the semantic relevance and uses the diverse features learned by continuous latent variables to enrich the expressions of generated responses. (2019a)uses discrete latent variables with explicit semantics to generate responses, making responses easily contain the semantic correlation.in a word, to make full use of the advantages of continuous and discrete latent variables, we propose the hlv, which uses discrete latent variables to constrain the contextual semantic correlation, and employs continuous latent variables to capture the symbolic-level features for enriching diversity.conditional variational transformer unlike recurrent neural networks where latent variables are often used as initial decoder states, applying latent variables in transformers is still an open question, and there is currently no general way of combining latent variables with transformer architectures(hu et al. therefore, we review some existing methods of latent variables in transformers. 2020;chen et al.to highlight the respective advantages and to offset disadvantages of continuous latent variables and discrete latent variables, in this section, we propose the hybrid latent variable (hlv).continuous latent variables continuous latent variables are expected to capture more fine-grained diverse features, thereby enhancing the diverse representation of the generated responses. to tackle this problem, during training, the continuous latent variables z s of context c will first be sampled from the p(z s |c, r), and then expanded k times that make it added to the discrete latent variables:.inference phase during inference, chvt use the prior distribution p(z s |c) to sample the sentence-level continuous latent variable z s and mix z s with discrete latent variables to construct hlv.pre-trained baselines to explore the generalization of hlv in pre-trained dialogue models, we adapt hlv overplato-v1 (132m parameters;bao et al. in order to not destroy the knowledge learned from plato pre-training, we construct hlv by adding the continuous latent variables to the original discrete latent variables of plato, and using the hlv instead of the discrete latent variables in plato for training. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/90.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/90.txt new file mode 100644 index 0000000000000000000000000000000000000000..ba4a59f5c17a4a3b74843de9a768b79d826221a9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/90.txt @@ -0,0 +1 @@ +successful participation of energy storage resources in competitive electricity markets benefits storage investors and social welfare. ancillary services such as frequency regulation have been the primary sources of revnue for energy storage owners, but these markets have quickly saturated due to surging storage deployments and small market size . in the meantime, the share of storage arbitraging in wholesale markets has tripled from a little less than 20% in 2016 to almost 60% in 2021 . thus price arbitrage in wholesale markets will be the main focus for future grid-scale energy storage projects.energy storage arbitrages price differences and earns revenues in wholesale energy markets, i.e., charging during lowprice periods and discharging during high-price periods. at the same time, arbitrage from energy storage helps reduce renewable curtailments, meet peak demands, mitigate extreme events, and reduce the cost of electricity , . as countries and regions ramp up decarbonization efforts, energy storage resources are taking on an increasingly important role in future electricity markets and are becoming a cornerstone for costeffective decarbonization , . thus, both energy storage owners and market organizers have significant economic and welfare drivers to evolve models and algorithms for energy storage to arbitrage robustly and profitably.however, energy storage arbitrage is non-trivial due to highly volatile electricity prices and limited storage capacity.y. baker, n. zheng, and b. xu are with columbia university, ny, usa (email: {ykb2105, nz2343, bx2177}@columbia.edu). y. baker was supported by the uae ministry of education, n. zheng and b. xu were supported in part by national science foundation under grant eccs-2239046.various methods have been proposed in the literature to address energy storage participation in wholesale markets based on different theories. they require dedicated location-specific tuning and excessive computing power to achieve competitive arbitrage performance . this paper proposes a novel end-toend system for opportunity value calculation, prediction, and control, combining model-based dynamic programming with neural networks. our approach innovates and provides several advantages as follows:• our approach innovatively predicts the derivative of value-to-go functions, which represent the opportunity value of the state of charge, and uses dynamic programming to generate the training dataset. compared to realtime prices, opportunity value functions are more stable and structured, thereby enabling easier prediction and contributing to the reliable performance of our approach; • our approach is highly computationally efficient in data pre-processing, training, and control. the complete training time, including generating training value functions, is less than six minutes over two years of price data, and operation/bidding decisions are generated instantly; • our approach utilizes transfer learning to maintain competitive performance over different markets and participation scenarios, including price response and market economic bidding; • our approach achieves state-of-the-art performance, achieving 70% to near 90% profit ratio compared to perfect foresight with various storage durations when tested using price data from new york, us, and queensland, australia. the rest of the paper is organized as follows: section ii summarizes energy storage market participation and previous work using the learning method, section iii and iv elaborates on the arbitrage formulation and solution method, section v presents the case study for price response and economic bid market rules in new york and the application of transfer learning for queensland, and section vii concludes the paper. the complete training time, including generating training value functions, is less than six minutes over two years of price data, and operation/bidding decisions are generated instantly; • our approach utilizes transfer learning to maintain competitive performance over different markets and participation scenarios, including price response and market economic bidding; • our approach achieves state-of-the-art performance, achieving 70% to near 90% profit ratio compared to perfect foresight with various storage durations when tested using price data from new york, us, and queensland, australia. the rest of the paper is organized as follows: section ii summarizes energy storage market participation and previous work using the learning method, section iii and iv elaborates on the arbitrage formulation and solution method, section v presents the case study for price response and economic bid market rules in new york and the application of transfer learning for queensland, and section vii concludes the paper.energy storage price response assumes the storage participant can observe the real-time price realization first and then decide on the operation privately without informing the system operator. the storage participant must follow market clearing results to charge or discharge, unlike in price response cases in which the storage can privately decide the control decision after observing the price. the main difficulty in combining price prediction with storage optimization is storage arbitrage requires a look-ahead of at least 24 hours to capture the daily price cycles, while most real-time prediction methods may only accurately generate a few steps ahead of time.we denote that the storage charge and discharge power and the final storage soc belong to a feasibility set e(e t-1 ) which is dependent on the storage starting soc e t-1 at the start of time period t (same as by the end of time period t-1). negative price is the necessary condition for storage to charge and discharge simultaneously in price arbitrage, hence by enforcing the storage to not discharge when the price is negative we eliminate simultaneous charging and discharging. creating our proposed system amounts to solving the problem of optimizing the prediction model parameters θ to maximize storage arbitrage profit over a set of training price data and physical storage parameters. (2a) is a dynamic programming energy storage price arbitrage formulation in which the storage opportunity value is defined recursively as the maximized storage arbitrage profit including the profit from the current time step and the future opportunity values.our approach includes three steps: first, we use the deterministic price arbitrage dynamic programming approach to generate the optimal storage opportunity value function segments using historical price data. energy storage owner submits single-segment bids one hour ahead to real-time markets.we first benchmark our proposed approach with other competing energy storage price arbitrage methods in a price response setting (pr-10), in which storage can observe price first and act accordingly, without bidding ahead into markets. compared to both of these, the 12-hour storage seems to be the easiest for the model to learn, only needing three days of data when training from scratch to achieve reasonable performance; however, the 12-hour storage shows that the transfer learning approach outperforms training from scratch for all data scenarios for 1 and 10 segments. this decomposes the optimization into independent sub-problems for each energy storage, and for each storage, the price-taker market clearing problem is equivalent to the following price arbitrage problem max pt,j ,bt,j λ t j (p t,j -b t,j )j (c t,j p t,j -b t,j b t,j )(6)subject to the same storage unit constraints (5b) and (5c). the difference is that in ha cases, storage has to decide the bids (c t,j and b t,j ) one hour before the market clearing period t, while in pr cases storage updates bids at the same time when observing the price. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/900.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/900.txt new file mode 100644 index 0000000000000000000000000000000000000000..f18b9971a4e591a15e3394d03fe47b48e255d54b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/900.txt @@ -0,0 +1 @@ +autism spectrum disorder (autism) is a complex neuropsychiatric condition which manifests in a variety of phenotypic presentations. these include limited ranges of interest, social deficits, delays in communication, inability to express and or recognize ranges of emotion, avoidance of eye contact, and idiosyncratic motions of hands, head, and body (figure 1). the most popular diagnostic assessments for autism are the autism diagnostic observation schedule-2 (ados), the autism diagnostic interview-revised (adi-r), the childhood autism rating scale (cars), and the gilliam autism rating scale-2 (gars-2). readily available datasets with such scoresheets filled by clinicians or parents include autism genetic resource exchange (agre), autism consortium (ac), national database for autism research (ndar), simons simplex collection (ssc), simons variation in individuals project (svip), autism speaks (mssng), and autism genome project (agp). jones et al. the children in the study watched cartoon scenes while their gaze was measured. campbell et al. sadria et al. applied network analysis to eye tracking data by considering each area of interest as a node and each saccadic transition between two areas defining an edge. compared the facial dynamics of 20 participants with high function autism against 19 participants with neurotypical development, finding reduced complexity in the dynamics of the eye region in the autism group. martin et al. hudenko et al. chang et al. used the arrangement of a child's eyes, nose, and lips in a front-facing image of the face to classify autism with 87% classification accuracy.1% and 77. combined both eye and motion data to predict a diagnosis from 22 children with autism and 22 neurotypical controls, reaching 78% accuracy with both modalities, 73% for only motion features, and 70% for only eye features. many of the opportunities we discuss involve reapplying data science innovations used in other conditions to autism, and many of the challenges for autism data science also face data science for complex human behavior analysis more broadly. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/901.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/901.txt new file mode 100644 index 0000000000000000000000000000000000000000..fbd6d622185c50491f11be474db15d3be8510e96 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/901.txt @@ -0,0 +1 @@ +conventional robotic arms were placed in manufacturing settings to conduct repetitive, dull, and dangerous tasks for humans. typically, only trained users have direct access to interact with such robots due to safety concerns. these days, however, more and more robotic arms are being deployed to interact with laypeople. for instance, barista robots make and serve coffee for customers, and assistive robots interact with patients with disabilities . while these robot arms provide convenience and novel user experiences, and tend to be smaller and less physically dangerous, they can still create unexpected hazards or confusion when interacting with untrained human users-the wide range of movements of robotic arms makes it difficult for humans to fully anticipate their movements. as the aforementioned example indicates, we are witnessing an increase in the application of robots in our social spheres where humans interact with robots in everyday tasks (e.g., mobility , adopted in healthcare , entertainment , and education sectors, and act as social assistance for older adults ). the expansion of robot roles from tools to teammates poses novel questions regarding the co-existence of humans and robots. this shift leads to a state of affairs in which robots and humans co-exist, which indicates that a broader range of lay human users will face various types and levels of risk or hazard during hri. risk and uncertainty inevitably involve trust , so understanding the longitudinal aspects of human trust in robot partners is imperative to initiate and maintain relationships with robots over time.many scholars have highlighted that a comprehensive conceptualization of trust is essential when designing robots that interact socially with humans because trust is integral for a user's acceptance and inclusion of the robot into their social sphere . that is, a user is unlikely to use a robot if they believe that the robot is untrustworthy. risk and uncertainty inevitably involve trust, so understanding the longitudinal aspects of human trust in robot partners is imperative to initiate and maintain relationships with robots over time.many scholars have highlighted that a comprehensive conceptualization of trust is essential when designing robots that interact socially with humans because trust is integral for a user's acceptance and inclusion of the robot into their social sphere. topics on the human-robot trust include trust measurement, trust repair strategies, and trust modelling. while the insights gained from studies that regard trust in automation provide profound knowledge for understanding trust in robots, the trust characteristics of human interactions with sars have different implications than considerations regarding human interactions with automated machines.what does it mean for humans to trust a robot? while extensive literature discusses various aspects that impact human-robot trust over the past two decades, there is no clear agreement on the definition of trust in hri. while many studies treat trust towards robots as something that has already been established, many contemporary reports indicate that the public is reluctant to trust robots, leading to some uncertainty whether people trust robots., social factors, user propensity)on top of the reliability of the robot's functionality. an example of a robot-centric hri trust study includes one that developed an online probabilistic trust inference model (optimo)-a widely adopted computational model that estimates near real-time human trust toward a robot by observing human behaviours. however, many empirical studies in human-robot trust treat trust as a time-independent variable, often measuring the level of trust through surveys and experiments. studies that treat trust as an independent variable tend to focus on the benefit of trust, such as how trust facilitates cooperation with humans or reduces uncertainties during hri. studies that view trust as a dependent variable focus on factors that directly impact trust, such as users' attitudes towards robots, operator's performance, and failure rates of robots.the traditional discussion of trust during hri needs to fully convey the dynamic nature of trust between the human who trusts and the robot that is trusted. the latter trust formation stage relies heavily on knowledge-based trust: trust is based on accumulated knowledge of the trustee's ability over repeated interactions. as this stage model indicates, it is important to consider that humanrobot trust may fluctuate over repeated interactions, and different factors will have different implications depending on the stage of trust formation. one significant hindrance to conceptualizing human-robot trust as dynamic in nature is that most empirical studies conducted in hri to understand trust are based on oneshot study designs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/902.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/902.txt new file mode 100644 index 0000000000000000000000000000000000000000..f9edb493a9f8cf02bb8d0dbc2c73c78e5246acb6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/902.txt @@ -0,0 +1 @@ +in , adam and shivaun raff started a small business: foundem, a comparison-shopping site similar to google shopping. foundem showed promise. at one point, it was hailed the u.k.'s best comparison-shopping site. but on june , , google changed its search algorithm, dropping foundem.com from the top three search results to the s. by all indications, foundem's drop in google's rankings was not due to a drop in quality. foundem.com still held a top place in yahoo's and microsoft's search rankings. but in the search engine optimization industry, it's said that if you want to bury a body, you put it on the second page of google. foundem was no exception. it would not recover from the loss of traffic from google (manthorpe, ). in one way, foundem's story is unremarkable: foundem alleged that they were victims of algorithmic bias, and it's well documented that algorithmic bias is pervasive in search engines and algorithms more generally. for example, in the european union found that google's search engine was biased in its own favor; google shopping undeservedly enjoyed higher search rankings than rival comparison shopping services, including foundem (european commission, ). (the result was a € . billion fine.) search engines are biased in other ways, too. introna and nissenbaum ( ) argued that the technical architecture of search engines excludes the voices of the less powerful and less wealthy. noble ( , ) revealed how search engines perpetuate sexism and racism by returning highly sexualized results for queries like 'black girls'. we find bias infecting algorithmic systems of all kinds-for example, predictive policing systems that overestimate crime in communities of color (lum and isaac, ); hiring algorithms that dock qualified female candidates (barocas and selbst, ); and facial recognition software that struggles to recognize darkskinned female faces (buolamwini and gebru, ). in another way, though, foundem's story is remarkable: following foundem's demotion in google's search rankings, its founders initiated the search neutrality movement, which calls for search engines to be, well, neutral. algorithmic neutrality has received little attention, despite the considerable work that's been devoted to algorithmic bias. (algorithmic fairness has received significant attention-see e.g. (castro, ) and (hedden, )-but how fairness and neutrality relate to one another is, quite generally, far from clear. for example, neutrality, as i'll characterize, is a descriptive notion, while fairness is a normative one.)algorithmic neutrality is the subject of this paper. i take up three questions. what is algorithmic neutrality? is algorithmic neutrality possible? when we have algorithmic neutrality in mind, what can we learn about algorithmic bias?to answer these questions in concrete terms, i will work with a case study: search engines. search engines warrant special attention because they themselves are remarkable in discussions of algorithmic bias. search neutrality, in addition to being a particularly rich topic, is one of only two sorts of algorithmic neutrality to receive sustained public, academic, and legal attention. (for work on search neutrality, see e.g. (grimmelmann, ), (crane, ), and (gillespie, ).) the paper takes in turn each of the three questions, as applied to search engines. § asks: "what is search neutrality?" in answering, i draw on work about neutrality in science. § asks: "is search neutrality in possible?" i answer no. § and § ask: "when we have search neutrality in mind, what can we learn about search bias?" i consider, for example, how to make sense of search bias given that search neutrality is impossible. my accounts of search neutrality and bias are stated in terms of the aim of a given search engine; § , explores the notion of an aim. § generalizes my discussion of search engines to algorithmic systems of all kinds. § and § ask: "when we have search neutrality in mind, what can we learn about search bias?" i consider, for example, how to make sense of search bias given that search neutrality is impossible. from now through § , i will focus on search engines that aim simply to give relevant results (and to reduce clutter, will often write 'search engines' instead of 'search engines that aim to give relevant results').search engines that aim simply at relevance are of special interest because of the pride of place that relevance holds in common understandings of search engines (as we've just seen) and both search neutrality and search bias (as you'll see just below and then in § .a search engine that aims at relevance is neutral only if values other than relevance play no role in how the search engine ranks pages. she also showed, as she herself emphasizes, that the search results reinforced sexism because the search engines systematically failed at what they themselves were trying to do-in other words, because the search engines were biased in failing on their own terms. one can concede that when search engines yield sexualized results for 'filipina girls' for purposes relative to which such sites are relevant, the search engines does provide the good that it promises: giving relevant results.a search engine that aims at relevance is neutral only if values other than relevance play no role in how the search engine ranks pages.a search engine that aims at relevance is exogenous-values biased if values other than relevance play a role in how the search engine ranks pages. the fact that search engines differ in their aims brings into view something else we can learn about bias: the normative significance of certain forms of bias for a given search engine is beholden to the normative significance of the search engine's aim.how a search engine ranks pages may amount to bias if the search engine has one aim but not if it has another. if the search engine is a relevance engine, then it's biased in failing on its own terms: how it ranks relevant pages that carry misinformation systematically deviates from how relevant those pages are.because how a search engine ranks pages may amount to bias if the search engine has one aim but not if it has another, some questions about whether bias is worth avoiding amount to questions about what aim a search engine should have. we can, for example, distinguish between a search engine's intended aim, what the search engine's operator intends the search engine to do, and its a stated aim, what the operator says that the search engine is doing. for example, a search engine that aims to give results that are relevant except when relevant results would hurt the interest of the s party is biased in failing on its own terms if how it ranks pages deviates from how relevant those pages are except when it would harm the s party. if there were a law, for example, that required search engines to give simply relevant results, the legally-required aim of a search engine subject to that law would be to deliver simply relevant results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/903.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/903.txt new file mode 100644 index 0000000000000000000000000000000000000000..3cc7f9581bbf3dff8539e52c106db5643c536b79 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/903.txt @@ -0,0 +1 @@ +the discourse surrounding the societal impacts of artificial intelligence (ai) systems abounds with calls, both in popular demands and formal regulations, for greater transparency. sometimes these demands invoke the word transparency directly, while other cases invoke similarly vague surrogates like "meaningful information" . however, the term is too overloaded with distinct meanings to express concrete policy objectives or technical claims alone . the term is a prototypical example of ai's suitcase words . although this breadth can be valuable in uniting members of disparate research communities toward high-level desiderata, concrete aims and advances must be expressed in more precise language. unfortunately, researchers, corporations, journalists, * equal contribution regulators, and members of the general public often invoke transparency in contexts where greater precision is required and consequently, talk past each other.depending on the context, researchers may invoke transparency in connection with data collection , data processing , interpretable systems , or fairness issues , among other concerns (table 1). even in european union (eu) regulations, which pioneered global ai policy, particularly the general data protection regulation (gdpr) , and the ethics guidelines for trustworthy ai , the vague demands for "meaningful information" and "comprehensible language" have forced legal scholars and ai practitioners to speculate the precise meaning of transparency .can these disparate research threads be unified to advance a coherent vision for improved ai transparency?we believe that ideal ai transparency gives users and stakeholders the tools to rationally, autonomously, and confidently decide for themselves whether an ai system and its decisions are trustworthy. in particular, this means explanations or descriptions that are user-appropriate, usercentered, and honest. we define these attributes as follows.• user-appropriate: information conveyed to a stakeholder is understandable in content, style, and level of detail • user-centered: insightful regarding the behaviors observed by a user in their own interactions with a system • honest: true, as comprehensive as necessary, and without intent to deceive by system builders or ownersin this paper, we provide a condensed overview of the diverse conceptualizations of transparency in the ai literature, identify commonalities and differences among them, and discuss how each ties in to our transparency ideal. we identify three overarching factors with which transparency is invoked concerning the machine learning pipeline-data ( §2), systems ( §3), and outputs ( §4). we divide our literature review into sections based on these factors and identify specific clusters of thematically related research. for each cluster, we summarize the high-level issues it approaches, briefly detail a representative study, and provide remarks on its promise and obstacles to advancing the high-level goal of perspective definition of transparency, 2021], interpretable systems, or fairness issues, among other concerns (table1).we believe that ideal ai transparency gives users and stakeholders the tools to rationally, autonomously, and confidently decide for themselves whether an ai system and its decisions are trustworthy.• user-appropriate: information conveyed to a stakeholder is understandable in content, style, and level of detail • user-centered: insightful regarding the behaviors observed by a user in their own interactions with a system • honest: true, as comprehensive as necessary, and without intent to deceive by system builders or owners. dataset datasheets and other associated record transparency techniques are useful for our core transparency goals, insofar as they enable downstream developers and system providers to more honestly describe the conditions under which their system was produced. furthermore, along with data provisioning transparency techniques the proper social situatedness of systems can be ensured, as behaviors including differential performance across protected classes or ingestion of data from protected consumer groups can be accounted for prior to deployment.however, strong rules and norms that incentivize system developers and providers to actually implement honest and socially situated transparency are needed to ensure that this data information leads to ideal ai transparency for users.system function disclosure includes communications by system producers, owners, or vendors concerning the capabilities and limitations of their systems. a challenge in making prescriptions around this sort of transparency is that system function disclosures target a diverse set of audiences, including external developers building around/needing to understand a system-thus explainability and interpretability techniques can be prerequisites for the level of expert understanding needed to produce honest disclosure ( §3. additionally, this research cluster heavily intertwines with userappropriateness., 2022].an important note that while the explanation as system disclosure may ultimately be the same in both cases, and thus be able to achieve the same desired end such as to "lead a user into some action or behavior" for a deployer, this example causes a conflict with the user's desired to "determine the trustworthiness of a system.lead a user into some action or behavior, increase usage of their system, maintain a functional system developer understand a system to debug and improve it, predict real-world system behavior, improve system performance and robustness data owner provide data collection and usage information, protect proprietary data and trade secrets, address data misuse concerns regulator evaluate fairness of predictions, demonstrate regulatory compliance, managing societal risk, mitigating negative consequences user understand system logic, evaluate trustworthiness, recognize ai model's socioeconomic blindspots, data protection and privacy society understand the strengths and limitations of a system, overcome fear of the unknown, encouraging ethical use of ai, mitigating system bias table2: a selection of stakeholders and their various desired ends relating to ai transparency.system disclosure information outputted from systems to improve clarity in understanding of the system (i.current proposals around ai transparency, particularly the european commission guidelines for trustworthy ai, are steeped in ai research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/904.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/904.txt new file mode 100644 index 0000000000000000000000000000000000000000..a9f91f888ac764d9b4e90926b88136e0d9b2e22e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/904.txt @@ -0,0 +1 @@ +libraries are increasingly relying on computational methods, including methods from artificial intelligence (ai) 1 . this increasing usage raises concerns about the risks of ai that are currently broadly discussed in the scientific literature, the media and law-making. but libraries have also, for a long time, been at the forefront of innovative ways of dealing with information and leveraging knowledge-processing methods for the common good. in this article, we first investigate risks surrounding bias and unfairness that have recently come under increasing scrutiny as possible side-effects of ai (section 2). we then ask, in section 3, how such risks can also affect library applications, using automated content analysis as an application of machine-learned classification in which such risks have been observed. we also consider how the library community has been aware of such risks for a long time, and point at some countermeasures that have been developed and deployed by librarians.we start this analysis by taking a closer look at the notion of "(un)fairness", linking it to the notion of "diversity" by arguing that a key prerequisite for being treated fairly is that an individual or group has to "exist" (in representations) in order to be perceived. we investigate a formalisation/operationalisation of diversity that encompasses these two ideas by modelling both distribution and (prior to this) inclusion. we conclude, in section 4, that many of the unfairness problems of automated content analysis can also be regarded through the lens of diversity and the countermeasures taken to enhance diversity -the visibility of and respect for diversity as it exists in the world, and its presence in content and metadata. we conclude, in section 4, that many of the unfairness problems of automated content analysis can also be regarded through the lens of diversity and the countermeasures taken to enhance diversity -the visibility of and respect for diversity as it exists in the world, and its presence in content and metadata. this formula can measure various forms of diversity, including actor diversity. fairness is more than actor diversity, however, because it also describes an allocation of some other entity (goods, services, opportunities, …) across these individuals or groups, or because it describes a representation of these individuals or groups in which, for example, ascriptions of properties are "fair" to them (and possibly others) rather than "stereotyped" or "biased". unstructured texts and metadata of new materials are then mapped to subject categories taken, inter alia, from the dewey decimal classification (ddc) and the library of congress subject headings (lcsh). 14 see for example the publications and workshops organised by the 'network machine methods in content indexing and classification', see footnote 1, or the automation of subject indexing using methods from artificial intelligence project at https://www. much of the impact of classification systems arises when they are being used unwittingly and much impetus for change arises from individuals and groups who object to the unfairness they perceive in this and who create innovative work-arounds. a deeper look at the underlying categories and ontologies is still much rarer and -in the general case of ai -also much more difficult, since the socio-technical systems in which ai is being deployed are much harder to change than an algorithm or a dataset. we therefore treat existing subject indexing as challenge #1, algorithms as challenge #2, and classification systems as challenge #3.18while the data that porter observed would immediately strike most modern-day librarians or readers as racist, other arguably biased classifications can be observed today: "books that cover the history of the civil rights movement, immigrant histories, and women's history were16we have received a comment from a librarian that from their perspective, the reverse order would make more sense. we consider this a very interesting comment; to us it suggests that the library community may consider themselves strongly in the role of defining knowledge structures themselves (via their work on the classification systems they then use). this was (and remains) an unusual comparison in the (still) male-dominated field of ai -even though it is actually historically motivated in the sense that the encoding of weaving patterns in punchcards and their use by mechanical looms constituted the first decoupling of "software" from "hardware" and in this sense "programming".media covering the change call it 'more accurate' and 'less offensive,' and the american library association said it was not only praiseworthy but that it 'better reflects common terminology and respects library users and library workers from all backgrounds." 28 while the story of dorothy porter highlights the role of individuals in the library profession as agents for change, in this case a campaign of many individuals (many of whom are conceivably not library professionals), indexed by the hashtag "#droptheiword", lobbied for the change. for example, presner's proposal increases variety of the people whose perceptions are manifested in the metadata (and it may have effects on the other components of diversity in the metadata), and dorothy porter made black authors more visible by increasing variety, balance and disparity in categories such as poetry. how can ai and natural language processing help towards this goal? how can we leverage a fine-grained analysis of natural language texts to detect the diversity present in a given text? since many users of such a system not only manage existing texts but also create new ones themselves, can awareness of the diversity present in a given text help them increase the diversity in texts they create? to answer these questions in the field of journalism, we have created the diversity searcher tool. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/905.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/905.txt new file mode 100644 index 0000000000000000000000000000000000000000..3591d11ba196f5cd81a4b33dae12489dc4e26872 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/905.txt @@ -0,0 +1 @@ +persuade your fellow citizens it's a good idea and pass a law. that's what democracy is all about. -antonin scalia humans are a social species, and communication is essential to our collective identity. we may communicate with others to share attitudes, request help, or offer help . humans, unlike many other animals, are naturally disposed to offer help to other humans . communication can include both verbal and non-verbal communication.because of the centrality of communication to our lives, we may want ai systems that can communicate with us in humanlike ways. for example, one of the most advanced ai models, gpt-4 , is trained to generate humanlike text and dialog with humans. text generators are far from the only example: we will cover many more examples in section 2. because of the importance that ai communication may play in our lives, it is valuable to study its potential and risk.communication is inextricably linked to persuasion, the process by which one human or group of humans alters the beliefs of another. persuasion is extremely important, both politically and * both authors contributed equally to this research. economically. for example, much of the function of sales and marketing is to drive persuasion: advertising spend alone exceeded $700 billion dollars in 2021, or approximately 0.75% of world gdp .as the late us supreme court justice antonin scalia alluded to in the quote above, the very concept of democratic elections is based upon the viability of persuasion. it is known and accepted that political candidates try to persuade voters to vote for them. political debates, for example, exist to allow candidates to persuade voters to their side while undercutting the strategies of the other candidates. candidates, and others, also purchase political advertisements: in the run-up to the us election in 2020, more than $14 billion was spent on political advertising, underscoring the importance of persuasion in politics .persuasion is also a large component of information operations, a critical aspect of national security. for example, the russian internet research agency has been associated with worldwide information campaigns . information operations are also employed by terrorist groups, china, north korea, and others . the united states has recently used careful declassification of intelligence documents in order to undermine russia's war on ukraine .ideas alone can be extremely powerful, especially when combined with a call to action. for example, silent spring warned of the dangers of the pesticide ddt, and contributed to the eventual ban of the chemicals. it did so through significant scientific research but also through a fictional future account of a town where everything is silent due to the effects of pesticides .ideas can be especially powerful when they cement the power of those originating them. for example, the idea that kings were endowed with their power by god (the "divine right of kings") helped to legitimize the idea that kings did not need to be subject to parliaments or religious leaders such as the pope ."i aimed for the public's heart, and by accident hit it in the stomach. " -upton sinclair ideas do not always achieve the effect intended by their originators. upton sinclair originally wrote the jungle to increase support for socialism but was more successful at causing the public to demand food safety standards. ideas have a life of their own, and it's important not to overestimate the degree to which their spread or effect can be neatly controlled .in the study of persuasion, hovland et al. draw a distinction between changes in opinions and attitudes . opinions are described as the usual response an individual provides when posed with a specific question, whereas attitudes represent a person's inclination to approach or avoid someone or something. these two concepts are closely connected, as individuals who hold particular opinions may alter their observable behavior. however, we will note that it is often the case that the most dangerous effects of persuasion come about from changing attitudes since attitudes more closely reflect the real actions that people take.hovland et al. also identify four factors that contribute to the effectiveness of persuasive strategies. first, the communicator's trustworthiness, perceived intentions, affiliations matter, as well as their ability to offer incentives (such as money). the content of the communication matters, particularly insofar as it can arouse emotional states. audience predispositions, such as the desire to conform to a group or one's particular abilities or motives, are also important. lastly, it's necessary to consider the longevity of a target's response to persuasion: one may appear to superficially have been persuaded, but not have any sort of persistent behavior change . we will return to these factors later in section 3.5 and section 3.6.for the purposes of this paper, we define ai persuasion as a process by which ai systems alter the beliefs of their users. in general, persuasion usually requires intent: the communicator hopes that the recipient will alter their beliefs in a certain way. however, as we will detail, ai systems may be able to inadvertently persuade users of certain beliefs, even if their designers did not intend for this to happen. it is difficult to know whether this should be ascribed to the ai system's "intent" to influence the user, and as such we do not assume that persuasion must be the result of an intended belief change.for the purposes of this paper, we define ai persuasion as a process by which ai systems alter the beliefs of their users. however, as we will detail, ai systems may be able to inadvertently persuade users of certain beliefs, even if their designers did not intend for this to happen. if persuasive ai systems with personal connections to users are permitted to advertise politically, this could dramatically increase their effectiveness, reduce costs, and cut nearly all humans out of the persuasion process.if ai systems are persuasive enough, for instance, if they were to be more persuasive than 99% of humans, their unregulated proliferation could lead to serious degradation in discourse between humans. for example, humans might come to hold beliefs that make them more predictable or profitable consumers if ai systems are incentivized to persuade humans to hold these beliefs. however, there are significant hurdles to international regulation of persuasive ai, most notably that many countries would be unlikely to consider each other trusted users of persuasive ai technologies.because ai systems may have different behaviors and motivations from humans, it may be useful and societally beneficial for humans to know which outputs came from a language model. while this might help reduce some problems with ai persuasion, it certainly would not remove all of them, as humans may still be persuaded by ai even if they know it is not human. in these cases, we may have to trust that ai systems were developed and trained in a truth-promoting way, or trust other ai systems to evaluate their truthfulness.honesty could rely on certification, where humans or other models verify the development process of ai systems and certify their likelihood to be truthful, or adjudication where humans or other models verify the truth of statements after they are made. the latter method could allow ai to defend against persuasive ai systems. for example, an ai system trained to spot deceptive or false language generated by another ai system could aid a human consuming potentially-persuasive content by notifying the human user of the problems with the content. past work has considered many possibilities of liability regimes for ai systems: should ai systems be treated like animals, children, or corporations?these questions are currently unsettled. however, if humans were to have usable and effective means of redressing problems posed by persuasive ai (such as being deceived) through the liability system, this could dramatically change the incentives of companies deploying ai systems and make them much more hesitant to deploy persuasive systems that are not truthful. if ai persuasion is left unchecked, more and more persuasive power in our society will shift towards opaque systems we do not fully understand and cannot fully control, which could contribute to humans losing some of the control of our own future that we have enjoyed in modern times. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/906.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/906.txt new file mode 100644 index 0000000000000000000000000000000000000000..a313e1c1c196479e92573cf6a7eae1a35d33926b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/906.txt @@ -0,0 +1 @@ +the use of social media as a technological device revolutionized the mode of communication and exchange of information in our current society. the ability to send information across thousands of miles at the speed of light has helped businesses in generating more revenue and allows internet users to stay in contact with friends and family through various social media platforms. social media has proven to be beneficial to humans by presenting a means of creating events, posting videos, and staying connected with loved ones while also negatively impacting the mental well-being of young individuals who are subject to cyberbullying on social media platforms . the datareportal study found that 53% of the global population uses social media platforms and that the average person spends about three hours a day on social media . osns are used daily for a substantial amount of time. cyberbullying has developed as a major problem on online social networks or media, afflicting both children and young adults . generally, cyberbullying is identified as bullying that occurs via electronic devices or the internet to cause harm or victimize someone . cyberbullying has occurred and continues to occur on social networking sites such as facebook, whatsapp, twitter, and instagram. reports of soni et al. indicate that 40% of teenagers in the united states experienced cyberbullying harassment and name-calling, which negatively impacted their mental well-being, including deep emotional trauma, psychological and psychosomatic disarray . according to utemissova et al. , one-third of people bullied did not realize the negative behavior shown was associated with cyberbullying. other findings from researchers stated that young adults who are cyberbullied have a higher tendency to harm themselves or have suicidal thoughts and behavior . additionally, a recommendation in the paper modeling the detection of textual cyberbullying by dinakar et al. highlights that about 83% of young adults or people that are bullied believe that online social network platforms should have a part to play in curbing the issue of cyberbullying on their social platforms . in recent years, researchers have studied and understood most social science cyberbullying problems . part of the preventive measures recommended by researchers to help with the cyberbullying problem include, but are not limited to, human interference, educational awareness and recommendation, deleting offensive terms, and cyberbullying detection . figure 1 depicts an example of cyberbullying on online social media networks or social media. they show demeaning words and phrases such as lo$er, pi$$ off, sh*t, fat a$$, and other similar phrases to the victims. over the years, most researchers have worked on solving the problem of cyberbullying. grellety et al. developed queries in detecting cyberbullying contents with a precision of 91.25%. cyberbullying is often perceived as a defensive or aggressive response, with most perpetrators able to remain anonymous while victimizing targets. the research of dalvi et al. developed software for detecting cyberbullying posts on twitter using machine learning algorithms. while most of these approaches have brought great success in cyberbullying research, data availability needed to develop model detection remains a challenge in the research space , , . this paper provides a systematic literature review focusing on addressing the following research questions:• what are the major problems of cyberbullying detection for researchers? • what are the impacts of law or legislation on cyberbullying across different continents?this paper is structured as follows: section ii describes cyberbullying forms, content, crimes, and the policies pertaining to cyberbullying and its impact on an individual. section iii explains the method used to collect literature, the screening criteria used to identify the papers included in the review, fig. 1. a real example of cyberbulling tweet captured from the social media site twitter. some parts of the image are blured to hide the identity of persons who are involved in the cyberbulling case . and the criteria used to exclude literature not considered in the review. afterward, section iv discusses past, present, and future solutions in cyberbullying detection by first identifying cyberbullying, text detection, cyberbullying crime analysis, cyberbullying law, and cyberbullying data. section v presents the analysis and discussion of the study. finally, section vii provides an overview of the study via the research questions presented in this paper, followed by a recommendation for future research and the limitations of this study. afterward, section iv discusses past, present, and future solutions in cyberbullying detection by first identifying cyberbullying, text detection, cyberbullying crime analysis, cyberbullying law, and cyberbullying data.1) define: the purpose and scope of the research are defined using the research questions: what are the major problems of cyberbullying detection for researchers? in addition, what are the impacts of law or legislation on cyberbullying across different continents? as stated in the introduction. we provided grouping based on text detection, the impact of cyberbullying, cyberbullying crime analysis, and recommendations for cyberbullying.1) the last five years (2017-2022) 2) cyberbullying 3) cyberbullying text detection 4) implications of cyberbullying 5) automatic cyberbullying detection 6) cyberbullying on social media or online social networks the following publications were excluded from the study selection:. however, a dive into using deep learning techniques to detect cyberbullying automatically would improve the ability of the classifier to predict cyberbullying content.8%, respectively. major problems of cyberbullying detection for researchers 1) publicly available data: after analyzing the selected papers for the literature review, it was noticed that most researchers mentioned that having a publicly available dataset for the classification of cyberbullying text was and is still a challenge in cyberbullying.approach to automated detection of cyberbullying paper identified that most studies used online harassment, insult, and hate crime datasets for cyberbullying detection. these kinds of data consist of individual abusive or insulting content, are outdated, and as such, are inappropriate for creating and selecting features for cyberbullying detection but rather beneficial for detecting a particular form of cyberbullying such as cyber aggression, hate crime, and harassment. cyberbullying detection research has leveraged different language data to solve cyberbullying on online social media. as per their recommendation of how to solve the unavailability of cyberbullying dataset highlighted as a challenge in these studies,,,,, they urge researchers in the field to work on developing dataset that could go a long way to helping the cyberbullying research community detect and recognize the various forms of cyberbullying, cyberbullying roles and mapping the relations between the victim and offenders in cyberbullying. although it consists of cyberbullying and non-cyberbullying tweets and some forms of cyberbullying, researchers are still encouraged to put much effort into creating datasets so that machine learning and deep learning models can perform advanced cyberbullying detection tasks. like in the usa, canada, and australia, in ghana, cyberbullying on social media could lead to jailing for a minimum of 6 months and a maximum of three years in prison for cyberbullying on social media.this study used the di-care method to review cyberbullying studies to identify research in cyberbullying specifically cyberbullying forms, roles, and cyberbullying detection methods. additionally, this paper highlighted the dataset challenge in cyberbullying detection research and highlighted the impacts of law or legislation on cyberbullying across different continents. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/907.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/907.txt new file mode 100644 index 0000000000000000000000000000000000000000..780d1636b5bee6bf262ec062391ff81a96630852 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/907.txt @@ -0,0 +1 @@ +in ranked or preferential vote elections, each ballot comprises an ordered list of (some or all of) the candidates. the ballot is interpreted as a statement that each candidate on the list is preferred by the voter to all the candidates after it, and any that don't appear on the ballot. condorcet elections treat each election as a series of two-way contests between each pair of candidates a and b, by saying a beats b if the number of ballots that prefer a over b is greater than those that prefer b over a. a condorcet winner exists if there is a single candidate that beats all other candidates. but it is quite possible to have ranked vote elections with no condorcet winner. in this case there are many alternate strategies/election systems that can be used to choose a winner.risk-limiting audits (rlas) test a reported election outcome by sampling ballot papers and will correct a wrong outcome with high probability (by requiring a full manual count of the ballots). they will not change the outcome if it is correct. the risk limit, often denoted α, specifies that a wrong outcome will be corrected with probability at least 1 -α. rlas for instant-runoff voting (irv) can be conducted efficiently using raire . raire generates 'assertions' which, if true, rule out all outcomes in which the reported winner did not win. assertions form the basis of an rla that can be conducted using the shangrla framework .in this paper we show how to use the assertion-based methodology of blom et al. to form a set of assertions sufficient to conduct an rla for a variety of condorcet elections. assertions with linear dependence on transformations of the votes can easily be transformed to canonical assorter form for shangrla. we contrast the estimated difficulty of these audits, in terms of sample sizes required, against auditing irv using raire.for ranked vote elections that have a condorcet winner, we first consider an audit that checks that the reported winner is indeed the condorcet winner (section 3). for an election with n candidates, this requires n -1 assertions comparing the reported winner to each reported loser. we then consider ranked pairs, a condorcet method that builds a preference relation over candidates on the basis of the strength of pairwise defeats (section 4). we find, that for elections with a condorcet winner, the expected sample sizes required to check that the reported winner is the condorcet winner, or to audit as a ranked pairs or irv election, are usually similar. in some instances, particularly those where the winner is decided by who is eliminated in the second-last round of irv, we see more substantial differences in auditing difficulty.to demonstrate the practicality of our auditing methods, we use irv datasets from the australian new south wales (nsw) lower house elections in 2015 and 2019, in addition to a series of irv elections held across the united states between 2007 and 2010. all of these elections have a condorcet winner.finding ranked vote datasets from real elections that did not have a condorcet winner was challenging. we were able to find some single transferable vote (stv) elections, and some datasets from preflib6 , that met this criterion. for these instances, raire often struggled to terminate when finding an appropriate set of assertions to audit. auditing these elections as if they were ranked pairs elections was more successful. we present these results in section 9.we finally consider three additional condorcet methods: minimax (section 5), smith (section 6), and kemeny-young (section 7). we found that audits for these methods were generally not practical. minimax and smith default to electing the condorcet winner when one exists, and in this case we can simply use the method outlined in section 3. on our instances without a condorcet winner, we generally did not find an audit for minimax or smith, with our proposed methods, that was not a full manual count. condorcet elections treat each election as a series of two-way contests between each pair of candidates a and b, by saying a beats b if the number of ballots that prefer a over b is greater than those that prefer b over a.for ranked vote elections that have a condorcet winner, we first consider an audit that checks that the reported winner is indeed the condorcet winner (section 3). we find, that for elections with a condorcet winner, the expected sample sizes required to check that the reported winner is the condorcet winner, or to audit as a ranked pairs or irv election, are usually similar. we define a ranked vote election as a pair l = (c, b) where c is the set of candidates and b the multiset of ballots cast.in an election that satisfies the condorcet winner criterion, the winner is the candidate w ∈ c for which s(w, c) > 0 for all c ∈ c \{w}.ranked pairsis a type of condorcet election that determines a winner even if no condorcet winner exists. the ranked positive majorities are (b, d),(a, b), (d, a), (b, c), (c, d),and(a, c).to audit a ranked pairs election, we must check that all preference statements between pairs of candidates that were ultimately used to establish that the reported winner w won do in fact hold.next, we must check that any transitive inference w ≻ c, from t , that we used to declare w the winner could not have been contradicted by a pair (c, w) that, in the true outcome, was actually stronger than one or more of the preferences used to infer w ≻ c in the reported outcome.note that in ranked pairs elections where we have a condorcet winner, we could simply verify that the reported winner was the condorcet winner, irrespective of whether transitive inferences were used in the tabulation process. for most election instances we consider in our results (section 9), our ranked pairs auditing method reduces to checking the set of assertions for verifying that the reported winner is the condorcet winner (see section 3).where b is a ballot and each b i is the number of votes the ballot b contributes to the category t i , where t 1 = t (i ≻ j), t 2 = t (w ≻ c), t 3 = t (c ≻ w), and t 4 = t (j ≻ i). if the declared winner w is not the correct winner of a ranked pairs election, then the probability that an audit verifies all the assertions in a is at most α, where α is the risk limit of the audit of each individual assertion.in a minimax election, a pairwise score is computed for each pair of candidates, c and c ′ , denoted ms(c, c ′ ). consider a graph where for each pair of candidates, (c, c ′ ), we have a directed edge from c to c ′ with a weight equal to s(c, c ′ ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/908.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/908.txt new file mode 100644 index 0000000000000000000000000000000000000000..7419c6f06bcd7edca72724884484e5252f42f6e6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/908.txt @@ -0,0 +1 @@ +the office of science and technology policy (ostp) recently released the ai bill of rights framework to outline recommended best practices for algorithmic transparency to protect americans from harm . yet the blueprint treats automated decision-making and ai interchangeably, without rigorously defining either term. this is unfortunate, as machine learning practitioners have already taken up documentation tools as a critical interface for the simultaneous design and evaluation of system components. in tension with this development, the failure to distinguish specific policy priorities for ai has limited critical forms of accountability, including the ability to identify which system components are responsible for specific impacts.artificial intelligence began as a field of mathematics based on approximately simulating mechanisms of human cognition through computation , but has evolved to include the design of systems that incorporate multiple types of feedback to operate strategically in deployment environments . we define ai as "the design of rational agents that select actions to maximize their (expected) utility" . ai is different from the general field of algorithm design, which comprises " for solving a mathematical problem in a finite number of steps, often through the repetition of some operation or calculation" . this couples well with the mechanisms involved with automated decision-making, as they both depend on the type and structure of data being used to carry out these decision-making mechanisms. an automated decision system is the "combination of data and learning algorithms to make consequential decisions affecting people or the environment" .in light of these definitions, the ai bill of rights framework is a bit of a misnomer. as written, the framework articulates policies that may protect americans from algorithmic and automated decision-making, but not the highly strategic, dynamic, feedback-laden behaviors specific to ai. this paper discusses how documentation protocols must evolve to meet the challenges posed by ai specifically, based on a comparison of past and present documentation techniques both within and beyond the world of ai. section 2 of this paper outlines the history of environmental impact statements in the united states. section 3 argues the need for fully dynamic documentation of ai systems, focusing on the limitations of current tools like model cards and algorithmic impact assessments as well as more recent proposals like reward reports. section 4 concludes. section 3 argues the need for fully dynamic documentation of ai systems, focusing on the limitations of current tools like model cards and algorithmic impact assessments as well as more recent proposals like reward reports. while this may be a fair assumption for some automated systems, conducting a model card of every classifier in blenderbot 3 would not generate any actionable insights, as the performance of a single model is dependent on a variety of other systems and feedback. applying a granular and static approach like model cards to an ml system like blender-bot3 would not help designers or policymakers comprehend system failures and pinpoint recommendations. since there is neither any documentation robustly logging how the designers of the system believe it would operate or any detail on the training, design elements, or feedback that govern the behavior of this system, canada's documentation system does not provide policymakers and stakeholders the requisite information needed to hold the designers liable for the effects of their system or make substantive recommendations to improve the system. for example, while it is possible to speak at a high level about how systems bring about social outcomes, it is not apparent how interactions between specific system components may contribute to social unrest, even though technical specificity is needed to be able to reflect and causally explain particular impacts.an ideal ai documentation system should create a framework that allows for deliberation between the forest and trees, matching high-level performance outcomes with specific, granular design decisions. documenting the consequences of algorithmic changes in such systems would not only help regulators propose targeted and constructive guidance, but it would also shed light on the constellation of factors-both within the technical design of the system and in its environment-that underlie decisions made by otherwise opaque models. such a tool would permit designers of ai systems to publicly and continuously assess their work over time, piecing together users' comments and crowd-sourced data to form a picture of how their system is actually interacting with the world.one example of a documentation system that attempts to study how ai interacts with its environment postdeployment is reward reports: a form with prompts forcing ai designers to detail their motivations and expectations for their system and continuously assess the system's performance post-deployment. the language of reinforcement learning emerges when discussing the second section, optimization intent, which inquires about the performance metrics and failure modes, and the third section, institutional interface, which asks which external entities the system will engage with and how the system will remain accountable to these stakeholders. a reward report will also require engineers to disclose design elements critical to how data is processed and how user feedback is translated into performance metrics and fine-tune the system in the implementation and evaluation sections. the goal of a documentation system for ai should be to take continuous snapshots of a system's performance, in a similar way to how model cards can provide a rich picture of a single model's biases and behavior at a single point in time. just as the hazard risk score flattens a sitespecific evaluation into a single rank, a series of metrics like helm can reduce model-specific user comments into something that a general documentation system like reward reports can handle. continuous user feedback, treated like public comments in eiss, consumer complaints held by the consumer financial protection bureau, or the federal trade commission's sentinel databasecould be evaluated according to metrics like helm and used to inform regular iterations of reward reports. therefore, the key to making a dynamic documentation system to ai tractable is a user interface that can collect and categorize user feedback along metrics like helm, which can then be reflected on and formatted onto a framework like reward reports. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/909.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/909.txt new file mode 100644 index 0000000000000000000000000000000000000000..5fef223c9ba5d9497f2bf0994995832de69e4838 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/909.txt @@ -0,0 +1 @@ +establishing an optimal governance framework of ai is very challenging due to many reasons. besides the common challenges confronted by the technology policy field, ai governance poses a unique set of unprecedented problems. as ai is not a single kind of technology but a portfolio of diverse technologies, no silver bullet exists to solve the governance of all sub-sectors within the ai industry. moreover, ai technologies are evolving quickly, which makes it challenging to analyze and track the effect of certain governance policies. also, it indicates that the micro-level regulations should be adaptive, although the overarching principles should stay consistent. besides, the governance agencies cannot manage and guide something they do not understand well. as ai becomes more complicated, the public policy makers struggle to catch up with state-of-the-art methods at the frontier. last but not least, to tackle the governance of ai, we have to confront some fundamental ethical dilemmas that have been intensely debated.a common theme underlying these challenges is to understand the complicated interactions between the regulatory departments and the ai firms/institutions. in fact, the activities of the two parties are closely intertwined and the influence are usually bidirectional. it indicates that we must consider the strategic interactions of the two sides systematically in order to design effective ai governance policy and formulate a governing framework. towards this end, this work takes a step by proposing a unified governance framework leveraging game theory.for a multi-agent system with self-interested agents, game theory is applied to analyze the optimal strategy for each agent to play. a large portion of the game theory literature focuses on simultaneous play, where all agents take actions at the same time. the standard solution concept for non-cooperative simultaneous play games is nash equilibrium . however, for many real-world problems not only do the players assume asymmetric roles, but also take actions in an prescribed order. for such scenarios, hierarchical games that imposes a play order on the players are a better model. in fact, the design of a stackelberg game enables it to capture such asymmetric structure of the play . in its simplest form, a stackelberg game has two players, a leader and a follower: the leader is designated to act before the follower with the anticipation that the latter will play a best response. and the leader can uses this information to its advantage in figuring out her own optimal policy. in this light, the leader's strategy is 'the best response to the best response'.the focus of this work is to understand the interactions between the ai corporations and the regulatory agencies through the lens of game theory, which would shed light on formulating optimal governance of ai. from the standpoint of game theory, the two players and their objectives are: (1) ai corporations/institutions aims to maximize return from commercialization of ai technologies; (2) ai regulatory ministries/departments seek to mitigate the potential downside risks due to ai research, development and adoption and meanwhile motivate ai innovation. the strategic interactions can be modeled as a game as the payoffs of each player depends on the parameters of both sides while each can only choose her own parameters. furthermore, as the objective functions of the two are typically different (not simply opposite), this makes the game general-sum.in the context of ai governance, another important feature is that the players are asymmetric. so the symmetric solution concept nash equilibrium may not be desired for such situations. this observation again motivates us to resort to the stackelberg game described above. in a nutshell, the hierarchical intersection between the governance agencies and ai corporations naturally lends itself to the stackelberg game model. given this abstraction, we cast the interaction of this pair as a stakelberg gamethe asymmetric nature of stackelberg games gives rise to two settings depending on the choice of the leader. by design, the player designated as the leader would act before players assigned as followers. furthermore, the choice of the leader naturally gives rise to two settings; and we will demonstrate that our proposed model can serves as a unified ai governance framework as we can map one setting to the civil domains and the other to the safety-critical and military domains. such an overarching framework help simplify the analysis from the lens of abstraction and unify multiple insights based on a principled foundation. notably, our method deviates from the traditional approach for technology regulation.the focus of this work is to understand the interactions between the ai corporations and the regulatory agencies through the lens of game theory, which would shed light on formulating optimal governance of ai. from the standpoint of game theory, the two players and their objectives are: (1) ai corporations/institutions aims to maximize return from commercialization of ai technologies; (2) ai regulatory ministries/departments seek to mitigate the potential downside risks due to ai research, development and adoption and meanwhile motivate ai innovation.the core contributions of this work are three-fold: (1) we view ai governance through the lens of game theory and cast the hierarchical interaction between ai firms and regulatory agencies as a stackelberg game; (2) based this abstraction, we propose an overarching game-theoretic framework for ai governance, which unifies various existing insights and provides a principle model for understanding and analyzing complicated ai regulatory dynamics; furthermore, the stackelberg equilibrium enables automatic balance between effectiveness and safety of ai innovation and application; (3) instantiate the theoretic framework by showcasing two special categories, incentive games and stackelberg mdp, under the general framework, which clearly demonstrates the generality and flexibility thereof.ai governance framework we summarize most relevant recent research on ai governance frameworks, mostly proposed by the public policy community. another workargues to divide the ai governance research into three clusters: the technical landscape, ai politics, and ideal governance of ai. for example, ibm explored ai governance framework and provides service to help enterprises to use ai in a responsible and governed manner; and highlights the proper ai governance would bring the organizations considerable benefits.organization lastly, we briefly summarize the arrangement of the rest of this paper: section 2 provides brief background on ai governance and stackelberg game; based on this, section 3 introduces our stackelberg framework for ai governance; we then instantiate the general framework and discuss two special cases fit the ai governance problem naturally; we end by summarizing the main message and pointing out a number of directions for future work.in general, ai governance refers to 'the ability to direct, manage and monitor the ai activities of and organization', the ultimate goal of which is to ensure ai is trustworthy and responsible and hence benefits humankind. in this section, we show that the abstraction of ai governance problem that naturally lends itself to a stackelberg game structure that couples 1) ai innovation, development and adoption and 2) ai regulatory.while most of the contemporary work focuses on simultaneous play games and corresponding nash equilibrium, the structure of stackelberg games apparently makes it a much better fit in the context of ai governance for a number of reasons:.• the choice of the leader in the stackelberg games naturally gives rise to two different settings, which could be mapped to the governance of two general categories of ai sub-sectors.together, the above aspects shows that the interaction between the regulatory agencies and ai firms has a intrinsic structure reminiscent of a stackelberg game, which motivates us to propose a game-theoretic modeling framework for ai governance. we assume that the corporate players establish their ai strategies and business decisions based on the evaluation of ai performance along a suite of key dimensions integrated in µ, and the cost as well as constraints due to the regulation and standards set by the ai governance departments, denoted c.in this paper, we propose a game theoretic framework for (1) understanding the strategic interactions between the ai governance agencies and enterprises, and (2) designing optimal regulatory policies that ensures the development and deployment of ai is safe, responsible, and trustworthy. as the traditional tools are inadequate to effectively address aforementioned challenges in ai governance, this motivates us to explore the ai-driven methods for the optimal governance of ai, which would serve to provide a complementary approach to the existing qualitative work, not replacing them. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/91.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/91.txt new file mode 100644 index 0000000000000000000000000000000000000000..c94e9671f34e8aaf8a947eee0c3f66fe1daae925 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/91.txt @@ -0,0 +1 @@ +scientific research is impossible without correlating the results obtained with the work of other scientists. other works should be mentioned by inserting bibliographic links in the article. experts in scientometrics rationalize the need to establish such links between studies and formulate various citation theories.the normative theory of citation, which draws on the principles of scientific ethics formulated by merton (1973), assumes that references in scientific papers are made in order to indicate the works that are the basis for research or topically related, describe the research methods used and are necessary to discuss the results. according to the reflexive theory, links between scientific works indicate the state of science and help to create its formalized representation, e.g. maps of science (akoev et al. 2014).thus, the beneficiary of scientific citation correctness is the entire scientific community, both researchers who create articles on their results and administrators who monitor achievements in various scientific fields. mentioning relevant and remarkable results of other scientists is one of the basic requirements in the construction of scientific texts, in particular from the point of view of the editors of scientific journals.these requirements are noted in academic writing guidelines (emerson et al. 2005; gray et al. 2008; pears and shields 2019) and are confirmed in practice, for example, by the results of studies of publication activity in top-rated international journals (arsyad et al. 2020).authors of scientific papers choose the sources for citation and positions for the links by themselves and at present, this process is not automated. in this work, we investigate the possibility of creating a recommendation algorithm that allows one to find missing bibliographic references in a scientific article, that is, to identify those text fragments where it is necessary to mention another research work. for this purpose, we estimate the probability of link presence in fragments of the text using a semisupervised machine learning approach. the formal statement of the problem under consideration is the following: it is required to automatically find in the text of a scientific article those fragments (sentences) where the link is absent, but necessary, using a set of labeled fragments with and without links as training data.the task of classifying text fragments in relation to the presence of the links in them is methodologically similar to the task of sentiment analysis, in which texts are automatically classified, mainly as positive and negative, according to their emotional characteristics. in addition to dividing fragments into positive and negative, the sentiment analysis approach is used to distinguish other classes, including citation significance detection (aljuaid et al. 2021;prester et al. 2021;varanasi et al. 2021;färber and ashwath 2019). the problem of identifying missing or unnecessary links in the text resembles sentiment analysis and the sought-for sentiment here is the author's need to confirm the formulated statement.another close line of research is named entity recognition (ner) using prediction by classifiers. a similar problem is considered in (fu et al. 2021), where ner problem is solved in the span prediction approach. ner can be performed in two stages: identifying fragments with a high probability of containing entities, and determination of the exact positions of these entities (ziyadi et al. 2020;li 2021). some methods of ner also take into account the context of entities, both local and global, or external (wang et al. 2021).the task of sentence classification accounting for their nearest context has been discussed in a number of studies. fiok et al. (2020) used contextualized embeddings created by language models, which high quality comes at the price of speed. glazkova (2020) studied topical classification and showed that models taking context as input performed better than context-free models. in those works, context size is determined once based on some bias and may not be optimal for a certain text corpus.the method introduced in this work also can be considered as kind of a resampling technique. until now resampling has been used mainly for the purpose of it is important that all of the above algorithms do not take into account the natural structural units of texts (i.e. sentences and paragraphs) since these algorithms are adjusted to a certain size of the context, which is a fixed number of words, while the size of sentences and paragraphs varies.the normative theory of citation, which draws on the principles of scientific ethics formulated bymerton (1973), assumes that references in scientific papers are made in order to indicate the works that are the basis for research or topically related, describe the research methods used and are necessary to discuss the results.thus, the beneficiary of scientific citation correctness is the entire scientific community, both researchers who create articles on their results and administrators who monitor achievements in various scientific fields. in this work, we investigate the possibility of creating a recommendation algorithm that allows one to find missing bibliographic references in a scientific article, that is, to identify those text fragments where it is necessary to mention another research work. the formal statement of the problem under consideration is the following: it is required to automatically find in the text of a scientific article those fragments (sentences) where the link is absent, but necessary, using a set of labeled fragments with and without links as training data.the task of classifying text fragments in relation to the presence of the links in them is methodologically similar to the task of sentiment analysis, in which texts are automatically classified, mainly as positive and negative, according to their emotional characteristics. sentences and paragraphs) since these algorithms are adjusted to a certain size of the context, which is a fixed number of words, while the size of sentences and paragraphs varies.the task of determining missing links is formalized as finding text fragments where the link is absent, but necessary, or, conversely, is present, but not needed.the hypothesis of our study is the following: text sampling strategies that take into account the context increase the accuracy of sentence classification used to predict missing bibliographic links in scientific articles.we suggest that a positive sample consists of a bibliographic link surrounded by its context from the original text, and a negative sample is a fragment with no bibliographic link in it. nevertheless, to approach the specified goal in the proposed algorithm, as a context we consider a fragment which size is determined by the number of sentences, and not words, unlike neural network algorithms. this is assumed as sampling strategy #0, and the classification result on data sampled that way is considered as a baseline: classification accuracy with sampling strategy #0 measured by f1-score is 0.the paper proposes a new method of determining the probability of a bibliographic link in fragments of a scientific article. the approach assumes sentence classification with ensemble voting, in which different data sampling strategies correspond to estimators implementing the same classification method.the main innovation of the proposed method is finding the link context that maximally affects the probability of detecting a missing bibliographic link in a sentence. the considerable impact of the context on the classification performance demonstrates that semantics related to a bibliographic link can be localized in fragments of different lengths. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/910.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/910.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5a805e52b521395d049cbb8bbc163de8ed1017c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/910.txt @@ -0,0 +1 @@ +the ability to predict psychological traits from written text without direct communication with the individual is a compelling opportunity provided by modern natural language processing methods. the recent advancements in the performance of large language models (llms) (e.g. touvron et al., 2023, chowdhery et al., 2022) could further drive research in the field of automated psychological traits prediction. however, these methods do not always accurately measure what they are intended to, and advanced deep learning models may not align with psychological theory or may fail to demonstrate this alignment. this issue, known as validity, raises questions about how to improve methodology to maintain validity when using automatic reasoning in the psychological domain (ernala et al., 2019). in this study, we propose a method for extracting text spans that may indicate one of the big5 psychological traits (openness, conscientiousness, extroversion, agreeableness, and neuroticism) (costa and mccrae, 1980) by utilizing a standard questionanswering (qa) task to enhance validity. extractive qa refers to methods that answer the question by predicting the start and end token of the candidate span within the context.since we want the model to be able to give a negative answer when the trait asked for in the question is not present in the provided context, we utilized the roberta model fine-tuned using the stanford question answering dataset version 2 (squad 2.to investigate how the proportion of unanswerable questions in the dataset affected the final performance, we conducted three experiments with different percentages of unanswerable questions., 2018). the dataset fields include: (1) context, (2) question, and (3) answer which includes answer start and text. the answer field contains the text of the correct answer and its starting character position in the context. since some questions may have multiple correct answers for a given context, the training set had to be formatted differently from the validation set to accommodate the prescribed dataset format. in the training set, every answer to a single question for a given context was considered to be a separate data entry. the question for the corresponding data entry was formed using a trait that was not found to be present in the relevant context when considering cosine similarity. for the validation set, we simply added the question with the trait not present in the respective context alongside the empty answer. the apparent improvement of the results for experimental settings with 33% and 66% unanswerable examples, in comparison to the benchmark, can be deceiving because those two settings obtain notably higher performance for examples with no answer while lacking in performance when considering examples that do have an answer. due to this anomaly, the best variation for this task is probably the one with 33% unanswerable examples in the training set which is in line with the proposed value(rajpurkar et al. even though we do not know the exact reason for this phenomenon, we can speculate that this may be due to the difficulty of the underlying task of extracting psychologically indicative(rajpurkar et al. we achieved this using the standard extractive qa task version that includes examples that do not contain answers in the context for a given question. however, due to the imbalance in the validation set, we dismiss this result in favor of the one that has more balance in the performance on both unanswerable and answerable portions of the validation set. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/911.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/911.txt new file mode 100644 index 0000000000000000000000000000000000000000..9dd971eb9c73c3e6244a112ddce359a3dabed76b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/911.txt @@ -0,0 +1 @@ +recently, online courses have been conducted almost everywhere in the world 1 . this situation virtualized the lecture environments. suddenly going into such a process left physics teachers desperate to do in-class activities. when considered specifically in the physics lesson, classroom practices have a very important place. it is very important to organize activities in the online environment as in the classroom 2 . it can be efficient in providing visualization through video or ready-made simulations. however, it may be insufficient to attract the attention of the students and to activate the student in the process.within the scope of the physics course, current digital learning environments are divided into two categories as 'constrained' and 'less constrained' 3 . digital learning environments used in physics education, such as phet simulations 4 , physlets 5 and quvis animations 6 , are 'constrained'. these are ready-made simulations that focus on the points that experts who prepare simulations in these programs consider the most important. however, since these ready-made simulations allow limited applications, adjustments may not be made on all variables in some subjects. algodoo 7 , interactive physics 8 , and fizika 9 programs are 'less constrained' digital learning programs. there are many variables in these programs such as gravity, air drag, flexibility, speed, mass, color, etc. and these variables can be given different values as desired. in this way, various unique simulations that focus on different points within the scope of the relevant subject are designed and provide the opportunity to make original applications. algodoo interactive physics simulation program, which can be used in classroom applications that require active participation of the student, can be very useful in online lessons 10 . the program is very small and can be downloaded for free 11 . there are many applications on this simulation program, which is developed by considering the laws of physics, especially on mechanical physics. within the scope of the program, there are many possibilities such as adding objects in various geometric shapes, adjusting all kinds of physical properties of the objects, adding the graphics of many variables of the object relative to each other, providing the visualization of the vector representation of the variables of the object, adding an inclined plane at different angles, adding forces in different sizes and directions 12,13 .the implementation of the algodoo program can be done in several ways:• first, theoretical explanation is made as chapter by chapter. then, students are asked to design a simple case study related to the subject on the simulation. in this way, it is ensured that the subject scope is learned by discovering its equivalent in daily life. for example, in order to analyze the relationship between mass and acceleration, forces of the same magnitude are added to objects of different mass, and the relationships between accelerations of objects are analyzed.• an example problem written using the values of a previously prepared simulation is solved with the class after the lecture. then the ready-made simulation is opened and the sample real-life problem is analyzed by the students. it is ensured that the accuracy of the result reached by theoretical calculation is tested and the knowledge learned is reinforced. for example, students are asked to calculate the magnitude of the final velocity as a result of the 15 m displacement of a static 2 kg object on a frictionless ground, with a force of 10 n on it. then, this sample situation is analyzed by providing the necessary visualizations through algodoo.• lectures can be made directly on the algodoo program. first, the situation related to the subject is shown on algodoo and questions are asked to make students wonder and think about the subject. then the situation is explained by lecturing and the simulation is taken back and analyzed again, this time with theoretical calculations. for example, by enabling a force to act on an object first, the simulation is stopped after a certain period of time. at this point, it is explained what impulse is and what its equation is. then the simulation is taken back and the force-time graph of the object is added, the motion is started again and the impulse is calculated.• within the scope of an example case, students can be provided to determine the relationship between variables by testing through the program. for example, in an application related to projectile motion, the relationship between the angle and the maximum height can be analyzed by increasing the angle of the initial velocity of the object with the horizontal. or, in another application, they can analyze the time-dependent graphs of the position and velocity during accelerated motion and how these graphs change depending on the force acting on the object.in this study, the development process and data analysis processes of 6 sample applications on impulse and momentum that can be used during the lesson using the algodoo program will be explained. in this way, various unique simulations that focus on different points within the scope of the relevant subject are designed and provide the opportunity to make original applications. algodoo interactive physics simulation program, which can be used in classroom applications that require active participation of the student, can be very useful in online lessons10. there are many applications on this simulation program, which is developed by considering the laws of physics, especially on mechanical physics. within the scope of the program, there are many possibilities such as adding objects in various geometric shapes, adjusting all kinds of physical properties of the objects, adding the graphics of many variables of the object relative to each other, providing the visualization of the vector representation of the variables of the object, adding an inclined plane at different angles, adding forces in different sizes and directions12,13. for example, in order to analyze the relationship between mass and acceleration, forces of the same magnitude are added to objects of different mass, and the relationships between accelerations of objects are analyzed.in this study, the development process and data analysis processes of 6 sample applications on impulse and momentum that can be used during the lesson using the algodoo program will be explained. in two-dimensional isolated systems, momentum is conservative in both axes, and the final momentum in both axes is equal to the initial momentum in both direction and magnitude.in order to analyze the relation of impulse being equal to momentum change, a circular body was added in a simulation where air friction and gravity were removed, and a force of 4 n was added on the object.the ability to add objects of different mass and velocities in different directions on the simulation leads to various applications for momentum conservation. in the application related to the collision, two circular objects are added by removing the air friction and gravity and these objects are adjusted to have a mass of 3 kg. is examined, the first momentum of the red one out of two objects with a mass of 3 kg can be calculated as 12 kg. when the momentum values obtained at the moment the simulation is stopped, it is seen that the total final momentum magnitudes on both x and y axes are also zero. algodoo program is a free interactive physics simulation program. the program is developed considering the laws of physics, and instead of watching ready-made simulations, optional changes can be made on physical quantities in this program. it is very difficult to make applications due to limited opportunities in these times when the trainings continue online throughout the world, and at this point, applications to be made with the algodoo program, which can be downloaded and used for free, will be very useful. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/912.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/912.txt new file mode 100644 index 0000000000000000000000000000000000000000..80592f38eae1db4e373c2a5563e3e9b03262c5f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/912.txt @@ -0,0 +1 @@ +most work on bias in nlp only considers negative or pejorative language use (kaneko and bollegala, 2019;sheng et al., 2019;webson et al., 2020;pryzant et al., 2020;sheng et al., 2020). while recent work has delved into implicit bias (rashkin et al., 2015;sap et al., 2017sap et al., , 2020)), they are still limited as they rely on identifying specific demographic dimensions or an individual's intent. crucially, language production is still taken to be 'unbiased' by default. research in social psychology suggests a different framing of bias that encompasses all language use -we can analyze bias as changes in (language) behavior reflecting shifting social dynamics (van dijk, 2009). under this view, all the language we produce is biased, with the nature of the bias determined by the social relationships between the speaker and target. inspired by this idea, govindarajan et al. (2023) proposed a new framing of bias by modeling intergroup relationships (igr, in-group and out-group) in interpersonal english language tweets, potentially capturing more subtle forms of bias. this framing raises a question: which linguistic features vary systematically in different intergroup contexts?the linguistic intergroup bias (lib; maass et al., 1989;maass, 1999) hypothesis offers some clues towards linguistic features that change with shifting intergroup contexts. lib speculates that socially desirable in-group behaviors and socially undesirable out-group behaviors are encoded at a higher level of abstraction. the theory however relies on a restricted definition of abstractness that relies solely on predicates, and an ad-hoc analysis of 'social desirability' that doesn't permit largescale analysis. we can do better by using two welldefined pragmatic features: specificity (li, 2017) is a pragmatic feature of text that measures the level of detail (similar to abstract-concrete axis), while affect is a feature that measures the attitude of a speaker towards their target (sheng et al., 2019) in an utterance (analogous to social desirability).specificity and affect are analogous to the lib axes of language variation that are easy to annotate and compute. furthermore, specificity is a more general property than abstractness in the lib -specificity is a property of the whole sentence rather than just the predicate. thus, our study focuses on intergroup bias more generally, rather than the narrow parameterization of the lib. similar to the lib, our formulation of intergroup bias predicts that positive affect in-group utterances and negative affect out-group utterances are encoded with lower specificity (i.e. more generally). tables 1 and 2 compare the predicted language variation between the lib and our formulation.in this work, we perform the first large-scale study of linguistic differences in intergroup bias by analyzing its nature in the corpus of english tweets from govindarajan et al. (2023), which makes use of naturally occurring labels for in-group vs. outgroup. this distinguishes us from existing work in lib which mostly relies on artificial responses from participants in studies, rather than natural language use in the wild. to bolster our probing investigation, we also explore it causally: exploiting the quantitative nature of our formulation to study if a neural model finetuned for igr prediction uses pragmatic features such as specificity and affect in its decision-making process through counterfactual probing techniques (ravfogel et al., 2021).to summarize our findings, we find a modest positive correlation between affect and igr in our data, with a positive causation effect as well -making a tweet's affect more positive makes it more likely to be in-group regardless of its specificity. we find no correlation between specificity and igr in our data. surprisingly, we discover a causal effect of low specificity on igr prediction that is uniform across affect, but none for high specificity. we hypothesize that this could be because of damage to the underlying language model, but we leave further investigation to future work. we release our code and data at github.com/venkatasg/intergroup-probing. the theory however relies on a restricted definition of abstractness that relies solely on predicates, and an ad-hoc analysis of 'social desirability' that doesn't permit largescale analysis. similar to the lib, our formulation of intergroup bias predicts that positive affect in-group utterances and negative affect out-group utterances are encoded with lower specificity (i.to summarize our findings, we find a modest positive correlation between affect and igr in our data, with a positive causation effect as well -making a tweet's affect more positive makes it more likely to be in-group regardless of its specificity.out-group positive affect low specificity high specificity negative affect high specificity low specificity table2: predicted language variation in our more general formulation, using specificity and affect specificity specificity is a pragmatic concept of text that measures the level of detail and involvement of concepts, objects and events. (2023) introduced the first dataset annotated for interpersonal emotion (defined as only emotions expressed towards or in connection with a target), using the plutchik wheel(plutchik, 1980(plutchik, , 2001) as a framework.specificity specificity of the tweets in the dataset are calculated using the specificity prediction tool fromgao et al. we sample 3 tokens at random from each sentence in the training and validation split of our dataset, train an iterative linear classifier on the model's representations of these tokens using inlp (against the affect label of the tweet), and use the decision boundary learned by the classifier to intervene by pushing model representations to have more positive affect or have more negative affect. interventions towards positive affect should induce the model to predict low specificity tweets to be in-group and high specificity tweets to be out-group, while interventions towards negative affect should affect the model conversely.should induce the model to predict positive affect tweets as out-group and negative affect tweets as in-group, while interventions towards lower specificity should affect the model conversely. overall, we observe that in both cases, interventions had the same effect on tweets that were annotated with positive affect as they did on tweets with negative affect (and similarly for tweets with high and low specificity)so we only show the percentage of all tweets in the test split classified as in-group.affect as figure2shows, pushing model representations towards having more positive affect causes almost all tweets in the test split of our data to be classified as in-group after 32 iterations of inlp.specificity figure3shows that pushing model representations towards being more specific has no effect on model behavior and is indistinguishable from the control; but pushing towards lower specificity has a noticeable effect -interventions after 48 iterations of inlp lead to all the data being predicted as in-group. our hypothesis states that general language is more likely in positive affect in-group contexts; however we find no difference in the model's behavior on positive versus negative affect tweets as reported earlier.overall our findings indicate that while the model does use affect towards making its decision on the interpersonal group relationship prediction task (albeit uniformly across specificity), it doesn't use specificity as we had predicted. firstly, fine- while some of the interventions push the model's predictions to be in the general lexical space desired (which probably explain the affect intervention results), the lack of contextual fit due to lm degradation may explain the inconclusive results, and lack of interaction between affect and specificity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/913.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/913.txt new file mode 100644 index 0000000000000000000000000000000000000000..4f219ef74dfb51c4de423d3ef945ab718b7cc536 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/913.txt @@ -0,0 +1 @@ +the problem of this paper is to understand if we can use a large language model (specifically gpt3.5-turbo) to perform a thematic analysis (ta) of semi-structured interviews, also focusing on the last phase of a ta, which entails the writing up of the results. this work is conducted within a process of human-ai collaboration, a concept in the field of artificial intelligence (ai) that assumes that humans and ai systems can collaborate to achieve goals and tasks (see e.g. vössing et al., 2022, siemon, 2021 for a general overview; and jiang et al. 2021, specifically for qualitative analysis reflections on this). elsewhere, i presented an embryonal, and perhaps still crude, process for conducting a ta with the same llm, following some of the key phases to a ta proposed by braun & clarke (2006) in their seminal work. these 6 phases include the human analyst(s): familiarisation with the data (phase 1), generation of initial codes, i.e. relevant features in the data (phase 2, done inductively in my case), the generation of themes, or patterns in the data, based on sorting the code in themes (phase 3), confirming the validity of themes, by reviewing them (phase 4), the renaming and summarising of themes, to confirm their validity (phase 5) and the writing up of the results, as an integral part of ta (phase 6). in my previous work i argued that the llm can only reasonably perform phases 2-5 of a ta. phase 1 and phase 6 were not tackled directly, either because the memory and tokens limits of the llms do not allow the llm to perform the work (phase 1familiarisation) or because of potential ethical issues (phase 6writing the report). in this paper, i would like to tackle some aspects associated with phase 6 of a ta and the process of writing up the results.the focus of this paper is methodological: establishing the validity of the use of llms for the conduction of qualitative analysis of data (specifically ta), with a focus on semi-structured interviews and largely in the context of social sciences. as i discussed in my previous paper, one of the epistemological challenges for this is that qualitative analysis is normally done by humans through interpretation of meaning, and this is something that llms are not necessarily capable as they operate on the language from a computational, logical and structural perspective (floridi, 2023). nonetheless, it has been possible to show that an llm can perform something looking like a basic inductive ta with at least some degree of validity, through a qualitative comparison with the work of human analysts (de paoli, 2023). other authors have used cohen's kappa to confirm inter-reliability between an llm and human conders, based on deductive coding processes (xiao et al., 2023;gao et al., 2023). ta can indeed be done with a deductive approach (where the grid of analysis is decided before-hand) or inductive where codes and themes are generated bottom-up from the data. i am interested in performing with llms an inductive approach to ta. it remains therefore to be assessed whether we can also cover phase 6 of the ta. for clarke & braun (2013): "writing is an integral element of the analytic process in ta (and most qualitative research). writing-up involves weaving together the analytic narrative and (vivid) data extracts to tell the reader a coherent and persuasive story about the data and contextualising it in relation to existing literature.". as such writing cannot be detached from the process of doing a ta. therefore, to assess if an llm can satisfactorily conduct a ta, we need to attempt also phase 6 with the model.there is discussion about using llms to write research work, and there clearly are important ethical implications associated with this (see lund et al. 2023 for an overview), whilst we have also seen authors citing these models as co-authors (e.g. king & chatgpt). on the other hand, journals and scholarly publishers have begun to create policies to clarify what is the acceptable use of these models in academic publishing. these, correctly in my view, start to be very stringent. for example, the editor-in-chief of the journal science stated that they decided to update their "license and editorial policies to specify that text generated by chatgpt (or any other ai tools) cannot be used in the work, nor can figures, images, or graphics be the products of such tools. and an ai program cannot be an author." (thorp, 2023, p. 313). there also have been cases where publishers have removed chatgpt from the list of authors of already published papers (e.g. o'connor, 2022).we can agree with thorp when he points out that ai manipulated images or text which is not produced by an author should not be included in academic publications and may largely amount to academic misconduct. however, we should consider that using llms to write research results may also entail working on intermediate phases of the writing process and within a human-ai collaboration approach, or on scientific products which may have other applied use for other research activities. for braun & clarke (2006), phase 6 of ta, is an integral part of the analysis method, and therefore we should explore if also this phase can be performed with the support of an llm. this paper will attempt at tackling this problem.in this way the focus of the manuscript is not on the ethics discourses around the use of llms for writing scholarly work, but it is rather methodological. i will not propose a process to use the llm to write a full paper (or part of it), rather i will focus on using the llm to write intermediate narratives/models, which can be used to support other research activities. i will concentrate on using the llm to write user personas, based on the results of a ta (of semi-structured interviews) conducted by the llm in cooperation with the author. user personas are an ideal candidate for the goal of this work. the norman&nielsen group (nng) defines a user persona as "a fictional, yet realistic, description of a typical or target user of the product. it is used to promote empathy, increase awareness and memorability of target users, prioritize features, and inform design decisions." (harley, 2015). user personas are an important and established tool in user-centered design (ucd) and contribute significantly toward the creation of e.g. tools or services and help the designers to focus on people and specific aspects of them, during the design activities. a persona is normally a narrative with a variety of additional details (such as a picture, or specific traits of the user). they are built with a focus on identifying key aspects of the target users for a new design (or re-design), which include for instance the identification of the user needs, of their pain points, objectives, attitudes, behaviours, skills and so on, all of which should contribute to inform design decisions about e.g. a digital service. a useful overview of the advantages of user personas is provided by miaskiewicz & kozar (2011), who reviewed how several key authors have formulated these advantages. for a better understanding of the perception and importance of personas in ucd readers can consult for example the papers by nielsen & storgaard (2014) or matthews & whittaker (2012).there may also be different types of personas used in ucd, which can support the design work in different ways. again nng, for example write that there can be broad and narrow personas (salazar, 2020). the first is based on shallow data and can serve high level decisions. the narrow ones instead are based on more granular data and can support more specific decisions. likewise, nng stipulates that personas can be lightweight (or proto persona), qualitative and statistical (laubheimer, 2020). the last two in particular are based on solid empirical evidence, with the statistical personas based on large quantity of data (but very costly to produce) about the target uses and the qualitative personas (which are the most widely used in ucd) that are based on the use qualitative research data, like interviews conducted with a sample of target users or with ethnographic observations of e.g. the target users at work. it is the qualitative and narrow personas i would like to focus on. for building qualitative personas a common way to gather data is with semi-structured interviews with a sample of the potential target users, and then an analysis is done on the interviews to identify patterns across the interviews. these patterns are the basis for building the personas narrative, including e.g. their background, goals, preferences or challenges. this is because recurring patterns signal potential common user needs or pain points which can and should then be the focus of the ucd work. ta is one of the analysis methods that can be adopted for the identification of these patterns across the interviews (see for example turner et al., 2013or rosala, 2019).we can therefore understand that (qualitative) user personas are an intermediate product of the user research in ucd, which can support the work of designers, and that they are the results of collecting empirical data and of qualitative analysis. if, as i suggested previously, we assume that that an llm can perform at least in an embryonal form an inductive ta of semi-structured interviews (phases 2-5 according to braun & clarke), we can explore whether the llm can satisfactorily produce personas narratives based on this same analysis, or at least produce something which has some semblance with a user persona. personas can therefore potentially be a good example of textual result here we can attempt to cover phase 6 of a ta as proposed by braun & clarke. this will be to focus of the following pages. elsewhere, i presented an embryonal, and perhaps still crude, process for conducting a ta with the same llm, following some of the key phases to a ta proposed bybraun & clarke (2006)in their seminal work. relevant features in the data (phase 2, done inductively in my case), the generation of themes, or patterns in the data, based on sorting the code in themes (phase 3), confirming the validity of themes, by reviewing them (phase 4), the renaming and summarising of themes, to confirm their validity (phase 5) and the writing up of the results, as an integral part of ta (phase 6).the focus of this paper is methodological: establishing the validity of the use of llms for the conduction of qualitative analysis of data (specifically ta), with a focus on semi-structured interviews and largely in the context of social sciences. i will concentrate on using the llm to write user personas, based on the results of a ta (of semi-structured interviews) conducted by the llm in cooperation with the author. the last two in particular are based on solid empirical evidence, with the statistical personas based on large quantity of data (but very costly to produce) about the target uses and the qualitative personas (which are the most widely used in ucd) that are based on the use qualitative research data, like interviews conducted with a sample of target users or with ethnographic observations of e. for building qualitative personas a common way to gather data is with semi-structured interviews with a sample of the potential target users, and then an analysis is done on the interviews to identify patterns across the interviews.we can therefore understand that (qualitative) user personas are an intermediate product of the user research in ucd, which can support the work of designers, and that they are the results of collecting empirical data and of qualitative analysis. if, as i suggested previously, we assume that that an llm can perform at least in an embryonal form an inductive ta of semi-structured interviews (phases 2-5 according tobraun & clarke), we can explore whether the llm can satisfactorily produce personas narratives based on this same analysis, or at least produce something which has some semblance with a user persona. my approach is more cautious, and i propose that methodological issues should be put into focus alongside our attempts to establish whether we can indeed perform a qualitative analysis with llms. however, these personas generated with the llm without being based on any empirical material are entirely fictional, and do not have the realistic component that the definition of a persona seen earlier implies ('fictional, yet realistic').however, i believe it is rather different to use the llm to generate user personas fictionally, without any underlying empirical research, and have instead personas generated based on some form of ta and on real qualitative data, within a human-ai research collaboration mode. if the llm (with the support of the human researcher) can produce at least satisfactorily some forms (or at least ideas) of user personas based on a data analysis, we may also be able to make a step forward toward covering phase 6 of a ta.the design i propose here is based on performing three steps: (1) first perform a ta with llm on the interview chunks, replicating and enriching the process already proposed in my previous work; (2) using the llm to create personas based on the results of the ta, with appropriate prompting; (3) offer an evaluation of the results, by looking at which themes and codes were used by the llm to create one persona. i would also argue that the personas generated by the llm using a ta of semi-structured interviews, are probably to be considered not finished personas, but more like initial prototypes, which would require further refinement by the human analysts. indeed, the model can generate multiple ideas for the personas, by mixing themes (in my example i mixed 2 theme-needs and 2 theme-challenges, and this would allow more than 6000 combinations), and then the analyst can decide which personas do seem more representative of the target user group, and eventually also enrich them with further details, in line with the proposed design work. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/914.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/914.txt new file mode 100644 index 0000000000000000000000000000000000000000..e18f2f28b1898a35e2a0447c046c8efb2d2638e9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/914.txt @@ -0,0 +1 @@ +recent progress in large language models (llms) has resulted in a rapid increase in the ability of models to produce convincingly human-like text, sparking worries that llms could be used to spread disinformation, enable plagiarism, and maliciously impersonate people. as such, researchers have begun to develop methods to detect ai generated text. these include watermarking algorithms, which subtly modify the outputted text to allow for better detection, given that the detector has sufficient access to watermarking parameters. differing from previous work, where the focus is on determining if text has been produced by a watermarked model, here we study the problem of if a language model has been watermarked. critically, our black-box algorithms only require querying the model and do not necessitate any knowledge of underlying watermarking parameters., 2023). a large sequence model s over a vocabulary t is a map from a finite sequence of tokens t * to a set of logits over all tokens l ∈ r |t | , along with a sampler r : r |t | → ∆t that randomly outputs a token based on the output logits. a watermark w s with secret key s over a vocabulary t is a map w s : l t → l t , where l t is the set of llms with vocabulary t . if, for any pair of such llms and all keys s, w s (l a ) is identical to w s (l b ) up to the same token permutation, then each w s is a principled watermark. for instance, the identity watermark is a valid principled watermark, but is not algorithmically detectable within a sequence of text. a watermark w s is (p, p )detectable for a model l, some expression p , and p ∈ (0, 0.5], if there exists a detector d s : t n × t n → {0, 1} that runs in p (n) time and correctly distinguishes between sequences of length n generated by l and w s (l) with probability at least 1 2 + p. while quality is somewhat subjective, if it is impossible to distinguish watermarked text from standard text generated by a llm, then the watermark must not affect any perceivable metric of quality. let w s be a watermark where s has length m and l is a llm, p and q are polynomials, n ≤ p(m), and q(m) ≥ 2. w s is quality-preserving if, for all l, m, n, a, p, and q, a is correct with probability at most 1/2 + 1/q(m) when given texts of length n generated by l and w s (l), over the randomness of llm generation and the choice of s.such a watermark is detectable, and perfectly preserves quality, though it fails the desideratum that watermarks should still be detectable after the text is modified slightly.though other watermarks are less sensitive to changes to the text, all known watermarks are vulnerable to attacks that preserve generated text quality while evading detection(sadasivan et al. suppose we have two generated texts from a model l and watermarked model w s (l). the easier it is for a detector with access to underlying watermark seed to detect the watermark, the easier it is for a detector without access to the seed to detect it.if the detector correctly distinguishes between positive and negative distributions is at most 1 2 + p, we can use the bound fromsadasivan et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/915.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/915.txt new file mode 100644 index 0000000000000000000000000000000000000000..e86c32e6eeda325b9f4d41bfd3c647bc053ea88d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/915.txt @@ -0,0 +1 @@ +artificial intelligence (ai), and in particular machine learning (ml) techniques, have been used in decisionmaking systems which typically rely on historical data for training. however, the data may contain biases against groups or individuals with certain characteristics, which can lead to discriminatory or unfair decisions. such decision-making systems are used in both private and public sectors. organisations are increasingly relying on these systems to reduce workload and free up resources (e.g. deloitte, 2021; engin and treleaven, 2019), and local governments, in particular social services,1 have deployed systems to predict a score of how at risk a child is of neglect or abuse (church and fairchild, 2017). the potential harmful impact of these systems is therefore immense.there have been several recent examples of unfair decisions made by machine learning models in different domains, such as criminal justice (partnership on ai, 2019), recruitment,2 and social services (gillingham, 2019). algorithms used to predict if criminals will re-offend are used across many us states and yet an analysis of a popular tool, compas (northpointe inc., 2019), showed that black defendants were identified incorrectly as re-offending at a higher rate than white defendants (larson et al., 2016). a recruitment tool used by amazon was shown to be biased against women (jeffrey dastin, 2018), reinforcing historical biases due to the small sample size of women who had previously been hired.ensuring ai systems are fair to individuals and communities is an important cross-disciplinary issue which must consider the context and application of the systems deployed (waller and waller, 2020). a system is considered to be fair if it does not discriminate based on protected personal characteristics such as race, sex, religion, etc. discrimination may arise from the dataset used to train decision-making systems; specifically, unwanted bias occurs when the system favours or gives advantage to historically favoured groups or its outputs directly correlate with protected personal characteristics. mehrabi et al. (2021) identify several causes of unwanted bias including: i) historical bias where the training data includes embedded historical social biases, ii) representation bias where some groups with certain characteristics appear more frequently in the dataset, and iii) measurement bias which arises from how certain features are measured or used.mehrabi et al.in this section, we present some key concepts, the different types of bias mitigation methods, as well as the main datasets, fairness and performance metrics used to evaluate the methods, and report on related work. in section 3, we will present the approaches for the three types of bias mitigation methods pre-processing methods are used to mitigate bias in the training data, and are useful when we have access to the training data, but not necessarily to the machine learning model trained on the data(bellamy et al. table1: datasets used to evaluate bias mitigation methods and their number of instances and attributes, as well as the number of methods they are used in.bias mitigation methods are evaluated by calculating the amount of unwanted bias before and after the methods are applied; this can be achieved using fairness metrics.hort et al. the filtering process ensured that only papers of high quality and relevance were included in the survey, providing a reliable representation of the state-of-the-art in the field.a limitation of the bias mitigation methods collated is that they cannot be directly compared due to being evaluated using different fairness metrics and different models trained on datasets of different sizes and distributions. the definitions of existing group fairness metrics (found in table2) only account for a single binary sensitive attribute meaning methods which use these metrics only allow the removal of unwanted bias with respect to that one attribute.illegal discrimination is measured as the difference between the level of unwanted bias according to an existing group fairness metric and the explained bias as shown below, where k is the number of explanatory attributes, e i is a value of the explanatory attribute values, and a and b are the values of the sensitive attribute (e.) is the probability that an individual has the explanatory attribute value e i given they are in the unprivileged group (in the privileged group), respectively; and p * (+|e i ) represents the probability of an individual having a positive classification given it has the explanatory attribute value e i . the discrimination score is defined in equation1, where p (d|xy) is the probability of a positive classification given all the attributes, calculated as the proportion of positive classifications when the classifier is trained with all attributes.a limitation of post-processing methods is that they can be easily manipulated to ensure some existing fairness metric is satisfied, for example by swapping the classifications of random individuals to ensure an equal number of positive and negative classifications across sensitive groups.instead of simply using existing metrics to detect unwanted bias or evaluate the impact of bias mitigation methods, we recommend developing methods that focus on transparency and explainability, by explaining the unwanted bias detected in decision-making systems. our survey differs from existing work by critically analysing existing bias mitigation methods and providing recommendations for the development of future bias mitigation methods for binary classification decision-making systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/916.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/916.txt new file mode 100644 index 0000000000000000000000000000000000000000..d38c0bba44572ca7a11f33780fc30220dcd8e9c6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/916.txt @@ -0,0 +1 @@ +automatic speech recognition (asr) powers voice assistants, which use machine learning and other artificial intelligence techniques to automatically interpret and understand spoken languages for conversational purposes. with the advent of breakthroughs such as amazon's alexa, and apple's siri, etc., voice assistant technology has increasingly become a widespread technology with diverse applications . however, as these devices gain adoption beyond the demographics of their training data, there is a need for more inclusive and robust ai agents with better spoken language understanding (slu) and accent recognition capabilities 1 .useful conversational agents must accurately capture named entities to minimize errors for downstream tasks. for example, in the command, "play billie jean by micheal jackson", conversational agents need to excel at 3 core tasks: speech recognition, named entity recognition, and entity linking, to appropriately respond to commands. the asr component of the system must correctly transcribe the speech, laying a good foundation for named entity recognition (ner) , which is, in turn, necessary for effective entity linking.however, in the command "play 'trouble sleep yanga wake am' by fela anikulapo kuti"2 spoken by a nigerian with a thick yoruba accent, the phonetic and linguistic variability of the heavily accented speech presents a double dilemma for such systems. firstly, the heavy accent and tonality can be difficult for the system to recognize, and secondly, the use of outof-vocabulary words can confuse the model, making it nearly impossible for the system to generate a correct response. siri responds "i couldn't find 'trouble sleep younger we' by fela and kolapo coochie in your library", effectively "butchering"3 the name, typifying the failures of similar agents on out-ofdistribution named entities. more examples in table 1.we hypothesize that the underrepresentation (and sometimes complete lack of) african named-entities in their training data may partly explain the model bias and eventual "butchering" of african names by many voice assistants and conversational agents.our contributions are as follows: 1. we investigate the performance of state-of-the-art (sota) asr models on african named-entities. to do this, we design an effective strategy to evaluate asr models on speech datasets with no prior ner annotations. our study highlights the failure of existing sota and commercial asr models on samples with african named-entities 2. we develop a data augmentation strategy to increase the representation of african-named entities, creating a novel speech corpus rich in african named-entities, and show that by fine-tuning pre-trained models on the augmented accented data, we significantly improve the ability of pre-trained models to recognize african named entities. we open-source the dataset and fine-tuned models4 . we develop a data augmentation strategy to increase the representation of african-named entities, creating a novel speech corpus rich in african named-entities, and show that by fine-tuning pre-trained models on the augmented accented data, we significantly improve the ability of pre-trained models to recognize african named entities.western vs african-named entities: we use the term "western named entities" to refer to names that are commonly used in western cultures and languages, such as laura and buenos aires, and that may not have direct translations in african languages5. we employ a pre-trained named entity recognition (ner) model r1 to extract named entities (nes) from y e 1 , resulting in the predominantly western named entity list e1. to inject african named entities, we mask tokens in randomly selected samples from y e 1 that match the entities in e1. we then randomly insert tokens from a curated african named-entity list e2 to replace the masked tokens in y ′e 1 , creating an augmented dataset d2 with modified transcripts y e 2 . these transcripts are sent to african crowd-sourced workers for recording, resulting in a new corpus named d ′ 2 with augmented pairs {(x e 2 , y e 2 )}. this novel dataset comprises accented audio samples and augmented transcript pairs, combining distributions from d1 and d2 with anglo-centric named entities e1 and african named entities e2. next, we use a specialized ner model r2 to annotate all western and african named entities (called e3) present in d2. this ne subset d3 (called afriner) contains accented speech x e 3 and corresponding transcripts y e 3 with named entities extracted from both y e 1 and y e 2 . additionally, using curated african ne list e2, we also filter y e 3 to create d4 confirmed to contain african nes (called afrival). we curate a list e2 of approximately 100k african names using a database of 90,000 african names from, 965 nigerian igbo names from, and 1,000 african names obtained 1 , such as wav2vec2, therefore, generalizes poorly to african named entities e2 (table1). we compare 4 model categories: (1) monolingual models pre-trained or finetuned exclusively on predominantly western transcripts, western english speech, and western named-entities (2) multilingual models pre-trained on transcripts from multiple domains, western and accented speech, but with minimal amounts of african named-entities (3) commercial asr apis (4) ours finetuned on western and african-named entities paired with audios in accented african english. ground truth transcripts y2 contain e1 and e2 entities, jointly called e3. to extract all samples in y2 with nes in e3, we run ner inference on all test samples in y2 using a specialized performant ner model r27fromthat jointly predicts the set of african and western named entities e3.for selected pre-trained and commercial asr models m e 1 1 , as well as fine-tuned models m e 2 1 , we evaluate wer and cer on samples containing one or more named entities and present single run results in table3. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/917.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/917.txt new file mode 100644 index 0000000000000000000000000000000000000000..b049feee96730946357ce780b87d025cea3cb409 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/917.txt @@ -0,0 +1 @@ +public concern mounts as a few of the nation's biggest tech players-google, amazon, facebook, and twitter-dominate the nation's flow of information and online commercial transactions. they facilitate all manner of human activity, for good and for ill, and hold the power to track our movements, guide purchasing decisions, regulate the flow of information, and shape political discourse. yet, all the while, as private entities, they are free to exercise these powers behind closed doors and, as online rather than physical-world entities, they enjoy immunity from some of the rules that govern their analog counterparts.with great power has come great controversy. most recently, twitter and facebook have faced criticism for their decisions to disable former president trump's media accounts 1 and to restrict access to a series of stories published by 2 stanford law & policy review the new york post about 2020 democratic presidential candidate joe biden's son, hunter. 2 the decisions are just a few in a stream of high-profile disputes. last year, facebook was criticized for its decision not to remove a video of speaker of the house nancy pelosi that had been edited to make her appear drunk and confused. 3 and for years now, a debate has been simmering 4 about how the government should respond to various bad-actor websites like those that aid terrorists, facilitate unlawful gun sales, 5 and profit from child abuse and sex trafficking. 6 section 230 immunizes online entities against lawsuits related to content created by their users or other third parties: the law promotes "decency" on the internet by allowing online entities to censor "obscene, 2.toward textual internet immunity 3 lewd, lascivious, filthy, excessively violent, harassing, or otherwise objectionable" content without fear of being "treated as the publisher or speaker" of-and held liable-for whatever content they fail to censor; 8 and the law promotes freedom of expression by guaranteeing online entities' ability to relay and host the massive volumes of tweets, snaps, likes, and old-fashioned emails that flow into their systems without incurring liability for their contents. and, despite its publication-centric roots, 11 section 230 now insulates online entities from liability for all manner of lawsuits, 12 including product-defect claims-such as the one brought against snapchat for the design of the app's speed filter, 13 which resulted in many accidents by teenage drivers-and claims against online marketplaces, like the sex-trafficking conspiracy claim brought against the website backpage."22in particular, he questioned courts' application of section 230 immunity even to platforms that leave content on their sites that they know to be unlawful; to those that seek out and curate unlawful content for their sites; and to claims outside the publishing context, such as those related to defective products.23sensing a gap between congress's words and current internet immunity doctrine, justice thomas urged the court in a future case to consider whether "the text of aligns with the current state of immunity enjoyed by internet platforms. such has been the course of internet immunity doctrine under section 230, whose evolution over the last twenty years has turned the small, unheralded provision attached to the much more comprehensive communications decency act into what can now be fairly called the lynchpin of modern internet law. an entity can claim immunity under the statute for hosting unlawful content even if, rather than slipping through the cracks, the unlawful content is the result of an entity not engaging in any censorship of objectionable material at all. courts around the country were concerned that free expression would suffer unless they granted broad section 230 immunity, even to entities with actual knowledge of unlawful content. 13, 15 (explaining that courts have interpreted section 230 to confer immunity "even when a company distributes content that it knows is illegal"); see, e. 2003) (easterbrook, j." to avoid this problem and thereby further a policy of "freedom of speech in the new and burgeoning internet medium," early courts granted broad immunity under section 230 to any claim implicating an entity's "exercise of a publisher's traditional editorial functions-such as deciding whether to publish, withdraw, postpone or alter content" even when the entity is made aware that the content is unlawful. 685, 686 (2016) (per curiam) (reversing idaho supreme court decision that it was not bound to follow the supreme court's interpretation of a federal statute and explaining that it is the supreme "court's responsibility to say what a federal statute means" and "it is the duty of other courts to respect that understanding" (citation and internal quotation marks omitted)), with owsley v., concurring) ("either federal supremacy nor any other principle of federal law requires that a state court's interpretation of federal law give way to a (lower) federal court's interpretation." (citation and internal quotation marks omitted)).c. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/918.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/918.txt new file mode 100644 index 0000000000000000000000000000000000000000..e679fbb807555f5261825a21ead3fb10c5d38b22 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/918.txt @@ -0,0 +1 @@ +the internet has transformed nearly every facet of human activity, from how we learn, work, and communicate to how we shop, plan travel, invest, socialize, and even hold garage sales. the virtual world has become so expansive that it now operates in nearly perfect parallel to the real world, with a specialized website or app available to accomplish any physical-world task imaginable. looking to round out your summer wardrobe or track down the latest smart watch? head to the local shopping mall. or to amazon.com-either will work. need to buy or sell secondhand kids' clothes? try the local flea market, or ebay, or, if you are feeling fancy, poshmark. the internet is now so diversely populated that we scarcely notice and hardly care whether the entities with which we interact operate in the physical or virtual world.but the question has dramatic consequences for the entities themselves. even when they engage in exactly the same conduct, online and offline entities are subject to completely different legal regimes. unlike their physical-world counterparts, online entities are immune from many types of state and federal lawsuits-specifically, those that relate to content created by third parties-under section 230 1 of the communications decency act ("cda") of 1996. 2 as a result, online entities have been able to operate unlawful businessesfacilitation of prostitution and unlawful gun sales to name just twofree from the threat of civil liability that they would face in the physical world. 3 and even those online entities with more salutary business models enjoy a competitive advantage over their physical-world counterparts. immunity from lawsuits means lower litigation costs and fewer judgments and damage awards for plaintiffs.why bar behavior in the physical world only to allow that same behavior in the virtual world, where it may be even easier to accomplish? the question sounds simple enough. it is one of law's central tenets, after all, that like cases be treated alike. even so, few topics rouse such passion or touch so many ongoing debates as the question of internet immunity. every day brings fresh controversy, call for change, or proposed legislation. from cyberbullying, 4 online governance, 5 and freedom of expression, 6 to big-tech antitrust concerns, 7 pri-1 47 u.s.c. § 230.2 pub. l. no. 104-104, tit. v, 110 stat. 56, 133-43 (codified in scattered sections of 18 and 47 u.s.c.). 3 see infra sections i.a-.b; see also, e.g., alina selyukh, section 230: a key legal shield for facebook, google is about to change, npr (mar. 21, 2018, 5:17 pm), https://www.npr.org/ sections/alltechconsidered/2018/03/21/591622450/section-230-a-key-legal-shield-for-facebookgoogle-is-about-to-change (discussing how section 230 shielded backpage from civil liability for facilitation of child sex trafficking). 4 see erica goldberg, free speech consequentialism, 116 colum. l. rev. 687, 744-45 (2016) (noting that current internet immunity doctrine bars claims against online entities for revenge porn and other forms of cyberbullying); andrew gilden, cyberbullying and the innocence narrative, 48 harv. c.r.-c.l. l. rev. 357, 389-90 (2013) (critiquing proposals to narrow online immunity to protect gay teens from harassment on ground that such efforts obscure the power of individual agency). 5 see jack m. balkin, free speech in the algorithmic society: big data, private governance, and new school speech regulation, 51 u.c. davis l. rev. 1149, 1182-93 (2018) (discussing platforms' role as regulators of free speech in digital era); jennifer daskal, speech across borders, 105 va. l. rev. 1605, 1637-44 (2019) (discussing geographic scope of online platforms' content-filtering determinations and implications for territorial sovereignty); kate klonick, the new governors: the people, rules, and processes governing online speech, 131 harv. l. rev. 1598, 1599-1613 (2018) (tracing the ability of private platforms like facebook to make contentmoderation decisions regarding user-submitted content to section 230); frank pasquale, two narratives of platform capitalism, 35 yale l. & pol'y rev. 309, 316-19 (2016) (offering two possible narratives of the distributed online platform and implications for each on regulatory and self-governance policy decisions); see also david r. johnson & david post, law and borders-the rise of law in cyberspace, 48 stan. l. rev. 1367, 1367 (1996) (arguing just prior to section 230's enactment that internet regulation would require its own distinct principles); lawrence lessig, commentary, the law of the horse: what cyberlaw might teach, 113 harv. l. rev. vacy, 8 and tort liability, 9 the effects of internet immunity law are as wide ranging as the internet itself. 501, 502 (1999) (arguing that the study of cyberlaw can illuminate principles that affect the real world). 6 see danielle keats citron, extremist speech, compelled conformity, and censorship creep, 93 notre dame l. rev. 1035, 1036-40 (2018) (exploring departure of online platforms from u.s. first amendment values and dangers of bowing to international pressure to self-regulate); danielle keats citron & helen norton, intermediaries and hate speech: fostering digital citizenship for our information age, 91 b.u. l. rev. 1435, 1453-84 (2011) (noting that section 230 insulates platforms from legal liability and offering proposals for online platforms to voluntarily respond to online hate speech); eric goldman, why section 230 is better than the first amendment, 95 notre dame l. rev. reflection 33, 36-46 (2019), https://scholarship.law.nd.edu/ndlr_online/vol95/iss1/3 (discussing section 230's enhanced substantive and procedural protections for online entities beyond those of the first amendment); rebecca tushnet, power without responsibility: intermediaries and the first amendment, 76 geo. wash. l. rev. 986, 1009 (2008) (arguing that section 230 immunity should include a corresponding limit on an intermediary's ability to censor speech); felix t. wu, collateral censorship and the limits of intermediary immunity, 87 notre dame l. rev. 293, 295-96 (2011) (noting speech-enhancing effects of section 230 due to its preventing imposition of liability on intermediaries for harmful or offensive speech that they might otherwise censor). 7 see c. scott hemphill, disruptive incumbents: platform competition in an age of machine learning, 119 colum. l. rev. 1973, 1974-93 (2019) (identifying potential sources of competition among dominant participants in online platform market and offering proposals to maximize competition); lina m. khan, the separation of platforms and commerce, 119 colum. l. rev. 973, 1037-92 (2019) (proposing bars on entities' engaging in new lines of business as a check on dominance of small number of tech firms); lina m. khan & david e. pozen, a skeptical view of information fiduciaries, 133 harv. l. rev. 497, 527-28 (2019) (noting google and facebook's capture of digital advertising market in united states and resultant effects on traditional publishing industry). 8 see danielle keats citron, sexual privacy, 128 yale l.j. 1870, 1952-53 (2019) (proposing modification to section 230 immunity to spur platforms to action to protect against revenge porn and other invasions of sexual privacy); bobby chesney & danielle citron, deep fakes: a looming challenge for privacy, democracy, and national security, 107 calif. l. rev. 1753, 1755-59, 1795-804 (2019) (describing rising danger to privacy and security posed by advances in technology for creating deep fakes and noting that section 230 limits legal recourse against online entities that distribute such fakes). 9 see ann bartow, internet defamation as profit center: the monetization of online harassment, 32 harv. j.l. & gender 383, 384 (2009) (tracing rise of commercial reputation defense services to lack of traditional avenues of recourse to respond to online harassment); danielle keats citron, mainstreaming privacy torts, 98 calif. l. rev. 1805, 1836-44 (2010) (sketching vision for new era of privacy law and noting barrier that section 230 poses to tortious enablement claims against online entities); danielle keats citron & benjamin wittes, the problem isn't just backpage: revising section 230 immunity, 2 geo. l. tech. rev. 453, 455-56 (2018) (proposing that online immunity be narrowed to allow claims against online entities that do not take reasonable steps to address unlawful third-party content); benjamin edelman & abbey stemler, from the digital to the physical: federal limitations on regulating online marketplaces, 56 harv. j. on legis. 141, 143 (2019) (noting bar that section 230 poses to regulation of modern online marketplaces); olivier sylvain, intermediary design duties, 50 conn. l. rev. 203, 203 (2018) (suggesting that online immunity doctrine be updated to consider manner in which online entities elicit and use their users' data).the reason that internet immunity reform rouses such passion is because the stakes are so very high. since its enactment more than two decades ago, section 230 has been a resounding success. its broad protections against lawsuits related to third-party content shield online entities from an economically crippling duty to review the nearly inconceivable volume of data that flows through their systems. 10 without such protection, online platforms might be compelled to censor user speech or disallow online posting altogether to avoid the risk of liability. section 230's protections have been crucial to decades of free speech advances built on inexpensive and free flowing internet publishing technologies. 11 despite its nearly sacred status in the tech industry, however, section 230 started off humbly, 12 and suffers from a humble problem: the congress of 1996 did not foresee the internet of 2020, and the statute is now outdated. section 230 assumes a publication-industrylike model of the internet-it encourages censorship and speaks in terms of "publisher or speaker" and "content provider" 13 -and is well suited to govern the internet's information repositories and communications channels. 14 online actors in this publication-centric internet model can be handily divided into three camps: content authors, 15 computer services 16 that provide access to content, and the internet users who consume the content. 17 with the virtual world so neatly divisible, potential wrongdoers are easy to identify. any wrongdoing can be attributed to its active participants-the internet content creators who author content-not the passive computer services and 10 see infra note 91 and accompanying text. 11 see infra notes 92-94 and accompanying text. 12 section 230 was enacted as part of the much more comprehensive cda and received little fanfare. few media outlets included any discussion of section 230 in their coverage of the cda. see jeff kosseff, the twenty-six words that created the internet 66-68 (2019). 13 47 u.s.c. § 230(c). 14 see infra section i.c.3. 15 content authors or "information content providers" are defined by section 230 as "any person or entity that is responsible, in whole or in part, for the creation or development of information provided through the internet or any other interactive computer service." 47 u.s.c.§ 230(f)(3). 16 section 230 defines an "interactive computer service" as "any information service, system, or access software provider that provides or enables computer access by multiple users to a computer server, including specifically a service or system that provides access to the internet and such systems operated or services offered by libraries or educational institutions." id.§ 230(f)(2). the archetypical computer services at the time of section 230's enactment were the internet service providers prodigy, compuserve, and, later, america online. see kosseff, supra note 12, at 36. 17 their users who merely provide access to and view that content. with section 230, congress federalized the principle by adopting a bright line rule that none but an "information content provider" can be held liable for internet content. 18 but publication is not the internet's exclusive function, and that is even more the case now than it was in 1996. section 230 now presides over a much different internet than the one it was designed to govern. the internet of today is much more diverse, specialized, and interactive. of course, it continues to include many online platforms that transmit and host third-party content, but it also supports the delivery of innumerable real-world goods and services that would have been unimaginable twenty years ago. 19 authoring or failing to moderate content flowing through their services is not the only way that online entities can cause harm. 20 consequently, section 230's bright line rule relying on content authorship as the deciding factor for immunity is poorly tailored for the internet that exists today.recognizing the growing problem, legal scholars and lawmakers of both political parties have proposed numerous reforms, 21 such as amending section 230 to withhold immunity if online entities intentionally or knowingly facilitate illegal conduct, 22 profit from unlawful content, 23 improperly monetize user data, 24 or fail to abide by best practices in policing the online behaviors of their users. 25 internet im-munity reform continues to face significant opposition, however, from the tech industry and from those legal scholars 26 who worry that changes to section 230 could do more harm than good. changes to the statute could curtail freedom of expression on the internet and spur online platforms to censor user speech by undermining the critical protection section 230 provides against lawsuits for failing to moderate third-party content.to navigate those competing concerns, this article proposes a carefully tailored refinement to internet immunity doctrine that would expressly bar any claim that would impose a content-moderation burden on an internet platform but would allow other claims to proceed. thus, where an alleged harm is preventable by a means other than content moderation-for example, by redesigning an app or websitea plaintiff could freely seek relief, just as in the physical world. by shifting the internet-immunity inquiry from the publication-focused question of content authorship to the more generally applicable question of content moderation, this approach would arm courts with a more flexible analytical tool and free them from section 230's outdated publication-focused model. the approach would empower courts to eliminate the online-versus-offline disparity and treat like conduct alike in both the physical and virtual world this article approaches the problem in three parts. part i uses a mass shooting wrongful death case decided by the wisconsin supreme court as a case study to analyze the disparity in the law governing internet versus real-world defendants and to illustrate the harms that can flow from immunizing certain online behavior. part ii examines various contexts in which section 230 is poorly suited to govern the modern, heterogenous internet. finally, part iii presents a new frameuse the threat of its removal to pressure entities into action to address concerns about large tech companies' concentrated power over speech and commerce. see online freedom and viewpoint diversity act, s. 4534, 116th cong. § 2 (2020) (removing liability protection where an online entity moderates third-party content unless it does so with an "objectively reasonable belief" that the content is obscene, lewd, lascivious, filthy, excessively violent, harassing, "promoting self-harm, promoting terrorism, or unlawful"); ending support for internet censorship act, s. 1914, 116th cong. (2020) (prohibiting content moderation from politically biased standpoint, to be enforced by federal trade commission ("ftc") audit confirming neutral censorship practices as condition of immunity); see also preventing online censorship, exec. order no. 13925, 85 fed. reg. 34,079, 34,080 (may 28, 2020) (stating that section 230 does not immunize online "behemoths . . . when they use their power to censor content and silence viewpoints that they dislike" and directing the federal communications commission ("fcc") to consider rulemaking that would interpret section 230 to immunize entities from claims related to content-moderation decisions only when they fall within section 230(c)(2)'s good samaritan provision). 26 goldman, supra note 6, at 36-46 (describing section 230's substantive and procedural advantages to online entities and opposing reform efforts that could affect those advantages).work for evaluating an online entity's entitlement to immunity, explores some of the obstacles to reform, and discusses how this article's proposed framework attempts to navigate those obstacles. first amendment values and dangers of bowing to international pressure to self-regulate); danielle keats citron & helen norton, intermediaries and hate speech: fostering digital citizenship for our information age, 91 b. tech.recognizing the growing problem, legal scholars and lawmakers of both political parties have proposed numerous reforms, 21 such as amending section 230 to withhold immunity if online entities intentionally or knowingly facilitate illegal conduct,22profit from unlawful content, 23 improperly monetize user data, 24 or fail to abide by best practices in policing the online behaviors of their users.congress enacted section 230 of the cda to promote "decency" on the internet and address the problem of children's unrestricted access to pornography and other offensive material on the internet by allowing online entities to censor such content without fear of being held liable for whatever content they failed to censor.91by immunizing online entities against lawsuits related to third-party content, section 230 ensures that the costs of moderating user-created content do not stifle the growth of internet platforms. in particular, courts noted that print media distributors generally are not liable for defamatory content within their pages because requiring them to scour the pages of books and newspapers they sell would be an impossible burden."98congress made that principle part of federal law by enacting section 230.97§ 230(c)(2) (immunizing "good samaritan" entities from liability predicated on content-filtering efforts).(2) providers of "interactive computer service," 111 such as com-puserve, prodigy, and, today, verizon and comcast, that provide users with the internet access necessary to read that informational content; and (3) users of "interactive computer service," 112 the internet users who purchase internet access from an internet service provider and use that access to consume informational content made by online content creators.despite these dramatic changes to the internet landscape, online entities' liability 139 for third-party conduct is still governed by a bright line rule designed for the internet of 1996: content creators may be sued, but online entities that use that content are immune., 153 the plaintiff argued that snapchat acted negligently by failing to redesign its speed filter despite numerous and widespread reports of motor vehicle accidents caused by teenage drivers using the speed filter while driving at high speeds to show off to their friends. on the publication-centric internet of 1996, internet wrongdoers could be identified with a simple test: did the entity author content?223designing an immunity rule for today's internet is much more difficult, where it must govern not only traditional content intermediaries but also the full gamut of other human social and economic activity, including digital product manufacturers, online marketplaces, and the harms that come with them. for example, creating a carve out from section 230 immunity for intentional or knowing wrongdoing would still leave unaddressed section 230's special treatment of online entities facing reasonable care or strict liability claims, such as ordinary negligence or products liability. however, unlike changes to section 230's substance-that is, changes that would require entities to moderate user-created content-measured changes to section 230's procedural efficiency would increase costs but not threaten the viability of internet entities' basic business models. last twenty years,244and so too has the scope of the need for contentmoderation immunity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/919.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/919.txt new file mode 100644 index 0000000000000000000000000000000000000000..bd33ed6c2a21686bcf2f6e235ca11a2f3ef06dab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/919.txt @@ -0,0 +1 @@ +blockchain technologies have exploded into a thriving industry, with thousands of protocols and projects. the industry is based on the principle of decentralised transfer of data and digital assets, secured and intrinsically coupled with its cryptography. one of the major applications of blockchain technologies is on cryptocurrencies . cryptocurrencies aim to replicate many of the features of fiat currency, while adding some features specific to blockchain technology, such as decentralisation, anonymity, non-repudiation, and high availability backed up by state-of-the-art cryptographic technology. while the cryptocurrency market is highly volatile, it has largely trended upward, since its introduction in 2008. in april 2021, bitcoin alone was worth over one trillion us dollars (10 12 usd).one of the underlying protocols that make a blockchain work is that of consensus. a consensus protocol allows hundreds or even millions of blockchain users to agree on how to add a new block-essentially a chunk of data representing transactions-to the blockchain. by far, the most widely used consensus protocol in cryptocurrencies today is proofof-work (pow) .in a pow-based blockchain, any user wishing to add a block to the chain must first solve a computationally difficult numerical problem (work) and provide the solution (proof) to the entire network. this process is called mining. the term miner is used to denote both the agent/user, and the hardware, involved in the mining.one major criticism of pow based blockchain technologies is their excessive energy consumption . this high energy usage is currently having a notable impact on global carbon emissions .the extent of this energy consumption is well documented . at the time of writing cambridge bitcoin electricity consumption index (cbeci) calculates bitcoins energy consumption to be 0.57% of the worlds total produced energy. this is 126.7 twh per year, which is more than norway (124.3 twh per year). this is the energy cost of keeping just one of many cryptocurrencies running (bitcoin).given the current energy costs, and general upward trend, the need for more energy efficient cryptocurrency mining is clear. enter quantum technologies.quantum computers, and their theoretical advantages are very well studied . one advantage is in the time efficiency of quantum computers, over their classical counterparts, for various computationally difficult numerical problems. this general advantage applies to the particular case of pow mining .in this paper, however, we are interested in the energy efficiency advantages. quantum computers derive an energy efficiency advantage from two different sources. the first is the time-efficiency advantage already mentioned: if a device can perform the same task, in a shorter time-period, this will often convey an energy advantage as well.the second is slightly more subtle. quantum computation requires (mostly) unitary evolution. unitary evolution, by its nature, is reversible, and hence, to a large extent, energy neutral. at it's theoretical limit, a quantum cryptocurrency miner could potentially expend little to no energy.achieving this landaeur-limited efficiency is unrealistic. in a real world setting, implementing and running a quantum miner would entail unavoidable inefficiencies. it would be very difficult to account for all engineering decisions and inefficiencies, individually.however, we can make a meaningful prediction of the energy-savings achieved by quantum miners if we make one fairly reasonable assumption: that the inefficiency of a quantum miner-that is, the amount of energy consumed, and heat expelled, over its theoretical optimum-is within an order of magnitude of a classical miner's inefficiency.the above assumption is, on its face, a reasonable one, and allows us to predict an actual energy consumption number for quantum miners. that said, we can take an extra step, and ask just how much more inefficient a quantum miner would have to be, before it lost its advantage over classical mining systems altogether.finally, we don't just study the potential of quantum miners, we in particular study the potential of noisy intermediate-scale quantum (nisq) based cryptominers. this is notable for two reasons. first, being able to implement cryp-tominers on nisq hardware, rather than having to wait for full-blown scalable fault-tolerant quantum computers, would greatly accelerate the possible adoption of quantum miners. the second is that since even before the coinage of the term, the search for practical applications of nisq technology has been an important ongoing one.in the next section we summarise our findings, and in the section afterwards we describe in detail how we derived them.. while the analysis that follows works for any pow-based cryptocurrency, for the sake of exposition we will focus on the bitcoin network.however, we can make a meaningful prediction of the energy-savings achieved by quantum miners if we make one fairly reasonable assumption: that the inefficiency of a quantum miner-that is, the amount of energy consumed, and heat expelled, over its theoretical optimum-is within an order of magnitude of a classical miner's inefficiency.the above assumption is, on its face, a reasonable one, and allows us to predict an actual energy consumption number for quantum miners. first, being able to implement cryp-tominers on nisq hardware, rather than having to wait for full-blown scalable fault-tolerant quantum computers, would greatly accelerate the possible adoption of quantum miners.the classical miner we are comparing our quantum miners to is a current state-of-the-art asic cryptocurrency miner: an antminer s19 xp, asic miner. even with a fairly severe error-correction overhead of a two-layer concatenated shor code, a quantum miner still massively outperforms a classical miner, consuming 2.a reasonable question to raise would be, what if this assumption does not? tableiisummarises just how much worse the efficiency of a quantum miner (again, compared to its theoretical optimum) would have to be in order for it to com- it is worth pausing on this for one moment. non-ecc quantum devices are often discounted, in comparison to ecc quantum devices, for one reason and one reason only: lack of scalability.we have analysed three different quantum mining architectures, as mentioned above: a one-layer shor-code errorcorrected quantum miner, a two-layer shor-code one, and a non-ecc quantum miner. on each of three architectures we run a quantum mining algorithm-essentially grover's search algorithm on the appropriate search space-such that the quantum miner achieves the same probability of mining a block per block cycle as a current classical device. for the quantum miners the ratios are calculated so that the energy costs of each miner equate that of the classical miner. this is calculated by: m = maxtarget di f f iculty , where maxtarget and di f f iculty are two blockchain-dependant parameters that are tweaked over time, in order to maintain a constant block time, or average time it takes to add a block to the blockchainwhich in the case of bitcoin is 10 minutes.1159814903 × 10 10 given that every error-correction step in the shor code requires the measurement of 12 qubits over two rounds of measurement, this means that a total of errasedbits = ecsteps × c n + q bits need to be erased, where c is the number of error correcting measurements per round of error correction, n is the number of layers of error correction and q is the number of qubits erased once the algorithm is completed.using a similar calculation we can derive the optimal lower bound of the energy consumption of an ecc quantum miner using a two-layer shor code. rather than calculate energy costs for a slew of different efficiency ratios, one way to address this question is to calculate just how disparate the quantum efficiency would have to be compared to the classical efficiency, before quantum devices would stop have an energy costs advantage. the search space for the desired nonce is 2 256 on a quantum miner, the search for a particular nonce is done using grover's quantum search algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/92.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/92.txt new file mode 100644 index 0000000000000000000000000000000000000000..68543f94699dc98791df74f227b2dbc5e409190a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/92.txt @@ -0,0 +1 @@ +machine learning helps learn patterns from the data to make predictions. the three basic requirements for machine learning are data, theory, and hardware to overcome computational difficulties using better gpus and new approaches. computational science is an essential tool that we can use to incorporate physical invariances into learning. for example, the laws that govern the conservation of momentum, mass, and energy. to quote dr.tinsley oden -"computational science can analyze past events and look into the future. it can explore the effects of thousands of scenarios instead of actual experiments and be used to study events beyond the reach of expanding the boundaries of experimental science". deep learning can be quite useful in the real world, recognising various types of cancer like skin cancer , lung cancer and many more by just passing the necessary images. also has various applications in agriculture .neural networks(nns) are the most used machine learning technique. it offers a set of robust tools for solving variously supervised and unsupervised problems in pattern recognition, data processing, and non-linear control, which can be regarded as complementary to the conventional approaches . it can be considered a functional approximator as it maps a set of input variables to a set of output variables. a set of parameters called weights manages this mapping. these weights are updated to train the network that fetches the required output. a polynomial can be viewed as a function that transforms a single input variable into a single output variable. the coefficients in the polynomial are comparable to the weights in a neural network. determining these coefficients helps in evaluating the solution.blasius equation is a third order non-linear ordinary differential equation of the form f + 1 2 f f = 0 with the boundary conditions f (0) = 0, f (0) = 0, f (∞) = 1. this leads to the hydrodynamic solution for the flat plate boundary layer in a laminar flow called the blasius solution. the main aspect of the work is the transformation of the pde for a flat plate boundary layer with zero pressure gradient into a single ordinary differential equation(ode) by considering the velocity components that satisfy equation ii.the stream function ψ = u ∞ νx u∞ f (η), is directly proportional to the function f (η) called the blasius function. here, the velocity components are proportional to the first derivative of f (η), and the second and third derivatives of f are proportional to the first and second derivatives of velocity.2), the final form of the blasius boundary layer equation f + 1 2 f f = 0, for a flat plate can be obtained. the first and second derivatives of f (η) are given by f = u u∞ (non-dimensional velocity profile) and f = 1 u∞ νx u∞ ∂u ∂y (quantity related to shear stress). the boundary conditions are set considering the laminar flow on a flat plate, the no-slip condition, and free-stream velocity outside the boundary layer.a power series solution to the boundary layer equation of flow across a flat plate was proposed by blasius. the blasius solution provides a self-similar solution meaning that the solution is the same if the independent and dependent variables of the governing equations are appropriately scaled.the solution to the blasius equation can be found in,, where the accurate benchmark results of the blasius boundary layer problem using a leaping taylors series that converges for all real values. unlike standard neural network techniques that approximate the value of f (η) in a heuristic manner from sample output values, pinns obtain a solution function that minimizes the loss function, which is a combination of the differential equation and boundary values.the proposed pinns method can reduce computational time in solving the blasius equation over conventional cfd techniques as it reduces the burden of finding the value of f (0) and then solving the initial value problem. estimated results f (η), f (η), f (η) obtained from pinns and those obtained by numerical methods network constructed with two hidden layers with 100 neurons each is considered for our results and discussions. the values of velocity components can also be calculated from f (η) and f (η) from the graph, it is visually appealing that when η approaches five, the f (η) goes to 1. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/920.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/920.txt new file mode 100644 index 0000000000000000000000000000000000000000..eaf21222c13803729b32531ccf8fb124f7b475a0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/920.txt @@ -0,0 +1 @@ +different from providing the same learning content for all students in each classroom session in traditional learning, adaptive learning aims to tailor different learning objectives to meet the individual needs of different learners (carbonell 1970). existing recommendation methods of learning content can be summarized into two categories: (i) step by step, the following learning item is recommended for students in real-time, and the interaction of each step (i.e., students' answers) will be integrated into the recommendation for the next step (liu et al. 2019;cai, zhang, and dai 2019;huang et al. 2019). (ii) plan a certain length of the learning path for students at one time. the latter is because users sometimes want to know the entire learning path at the beginning (for example, universities need to organize courses for students) (joseph, abraham, and mani 2022;chen et al. 2022; different from providing the same learning content for all students in each classroom session in traditional learning, adaptive learning aims to tailor different learning objectives to meet the individual needs of different learners(carbonell 1970). existing recommendation methods of learning content can be summarized into two categories:(i)step by step, the following learning item is recommended for students in real-time, and the interaction of each step (i.figure1: illustration of the student learning process to improve a student's mastery of concept d by learning the path composed of three concepts a, b, and c. 1995)reveal that cognitive structure greatly influences adaptive learning, which includes both the relationship between items (e.• (c1) how to effectively explore the correlations among concepts? there may be complex and diverse correlations between concepts, such as prerequisite relationship and synergy relationship, which will affect students' learning of concepts(tong et al. • (c2) how to evaluate and optimize the generation algorithm by effectively using the students' learning effect on the target concepts? as shown in figure1, we expect students to achieve the best improvement in the target concept d (machine learning). in contrast, in the stepwise recommendation scenario, immediate feedback can be obtained at the end of each step, which allows some more advanced reinforcement learning (rl) algorithms(sun et al. this module can globally calculate the correlation between each learning concept and other learning concepts in the set so as to get a richer representation of the concept. at the same time, in the decoder module, on the one hand, we use a recurrent neural network to update the state of students; on the other hand, we use the attention mechanism to calculate the correlation between the remaining learning concepts in the set and the target concepts, so as to select the most suitable concept in the current position of the path. another branch(liu et al.where e e and e b represent the student's mastery of the target concepts before and after the path π (which can be obtained through exams), and e sup represents the upper bound of mastery. given a student's historical learning sequence h, target concepts t , and candidate concepts set s, it is required to select n concepts from s without repetition and rank them to generate a path π to recommend to the student. then in the decoder module, we use the recurrent neural network to model the knowledge state of the students along the path and calculate the correlation between the learning concepts and the target concepts through the attention mechanism to determine the most suitable concept for the position. (9), we comprehensively consider student knowledge states, learning concepts, and target concepts to calculate the score of each learning concept under the current step. • rule-based: let the simulator return the learning effect of the target concepts after learning each concept separately in s. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/921.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/921.txt new file mode 100644 index 0000000000000000000000000000000000000000..f396835d9d697dc42b94224d50b179702a9fadec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/921.txt @@ -0,0 +1 @@ +accurate crop yield forecasts can benefit governments, policymakers, and individual farmers by providing better insights into various exogenous drivers that impact the agricultural markets. these insights can lead to earlier responses and better-informed decisions to improve food security at both regional and international scales (becker-reshef et al., 2022). recently, machine learning algorithms have been applied on earth observation (eo) data and have shown a great potential to improve the reliability of these forecasts (basso & liu, 2019).in this paper, we consider the use of eo data collected from the geoglam crop monitor ag-met system (https://cropmonitor.org) and tree-based algorithms to directly forecast wheat yields in kazakhstan, the 10 th largest wheat exporter in the world (fao, 2022). a prominent challenge negatively impacting machine learning models' performance in forecasting yields is the spatial yield heterogeneity due to exogenous factors like local farming practices or crop varietals that are not reflected in remote sensing data. lee et al. (2022) proposed to train a separate model for each province, successfully reducing the state-wise prediction errors. however, in our dataset, due to a very small amount of yield data available for each province (typically less than 20 data points), this approach results in highly unreliable and overfit models with error rates far exceeding those of baseline models, as shown in figure 3. to improve upon this issue, we focus on reducing the errors, especially in provinces with the least accurate yield predictions, by using state-wise additive bias. first, we followed the methodologies in sahajpal et al. (2020) to create features from eo data and investigate the performance of various baseline tree-based models, including xgboost, catboost, and random forest, in forecasting wheat yields at the state level. next, each state-wise additive bias was separately added to the model's predictions in each province to obtain the final yield forecast. this approach shows a remarkable increase in overall performance, with the most significant benefits being seen in the province with the highest baseline yield errors (almatinskaya). furthermore, since state-wise bias adds no computational overhead during the inference process, this technique can be efficiently applied to improve yield predictions in other datasets. a prominent challenge negatively impacting machine learning models' performance in forecasting yields is the spatial yield heterogeneity due to exogenous factors like local farming practices or crop varietals that are not reflected in remote sensing data. (2022)proposed to train a separate model for each province, successfully reducing the state-wise prediction errors. however, in our dataset, due to a very small amount of yield data available for each province (typically less than 20 data points), this approach results in highly unreliable and overfit models with error rates far exceeding those of baseline models, as shown in figure3. to improve upon this issue, we focus on reducing the errors, especially in provinces with the least accurate yield predictions, by using state-wise additive bias. first, we followed the methodologies insahajpal et al. we use eo data (ndvi, growing degree days, daily minimum and maximum temperature, soil moisture, evaporative stress index, and precipitation) to as input features for training and evaluating machine learning models and to compute state-wise bias.we trained and evaluated the effectiveness of the state-wise bias by applying this bias to the baseline tree models (xgboost, catboost, and random forest) to forecast wheat yields at the state level in kazakhstan. (2021), and split the remaining data into training (10 years) and validation sets (4 years) for model optimization.the fundamental motivation for computing state-wise bias is that we observed baseline models are often biased toward values close to the mean yields, underestimating high yields in provinces with high productions, as discussed in section 3. although we have incorporated the regional information as categorical data in baseline models, the models still suffer from this bias. therefore, state-wise bias is proposed as a simple yet effective technique to alleviate this spatial heterogeneity problem, resulting in a significant decrease in both mape and rmse, as shown in section 3 for each state do.to investigate the effect of state-wise bias, we test various models on different out-of-fold test years and compare the performance with and without state-wise bias.103 mg/ha.xgboost besides baseline models, we also compare our approach with region-specific models, an approach that has been used in several works to forecast crop yieldslee et al.machine learning models are frequently biased toward average yield in the dataset, resulting in higher errors for provinces with crop yields far from the mean, as shown in figure2this issue is exacerbated by the spatial heterogeneity between different provinces/states. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/922.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/922.txt new file mode 100644 index 0000000000000000000000000000000000000000..37c877f9c7bf408331c1226849bf9c9e03298ddc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/922.txt @@ -0,0 +1 @@ +ai capabilities are marching forward. 1 2 3 4 5 large language models (llms) 6 are the locus of the rapid advances. state-of-the-art llms can pass standardised tests 7 and plan, reason, and leverage tools. 8 llms, though, are essentially black boxes, even to their developers. we have little insight into their inner workings and have no guarantees on how an llm will behave on a new task. 9 10 11 best practice is to measure llm performance on a litany of benchmarks before models are deployed beyond the research environment, but these benchmarks are often not real-world tasks we care about, or may have been memorised by the llm during its training. 12 this phenomenon typically arises when the datasets used for training llms, often sourced from the internet, contain the same data used for performance evaluation. the overlap can inflate the estimate of the model's performance, giving an illusion of understanding that could instead be basic recognition. 13 we focus evaluation effort specifically on legal analysis capabilities of llms for three reasons.first, assessing the extent that llms grasp the law can contribute toward governing llms and automated systems more generally. 14 one policy-relevant approach seeks to leverage regulatory reasoning and legal reasoning within llms for "law-informed ai" aligned with societal values as determined by democratic processes and law-making. this "law informs code" approach rests on the established effectiveness of the democratic process in creating adaptive legal standards such as fiduciary duties through iterative debate and litigation. 15 the premise is that learning the spirit of the law can guide ai systems in making reasonable choices in novel scenarios. for instance, llms exhibit an early ability to predict when fiduciary duties are violated, 16 and this capability could power safer ai deployments where an llm-powered system serves a human principal.second, llms can be used as tools for humans to provide legal services more efficiently and effectively, whether that be self-service or through a professional attorney. if the models better understand law, they can be more reliable and ultimately more useful. llms can assist in tasks ranging from contract analysis to case prediction, potentially democratising access to legal advice, reducing the cost and complexity for those who might otherwise struggle to navigate the legal system. rigorous safeguards should be put in place as these models are deployed, given the sensitive nature of legal work. this includes increasing data privacy, minimising bias, maintaining accountability for the decisions made with the help of these models, and evaluating the suitability of the llms for any given use case. hence, the need for systematic evaluations.third, if llms understand the law well enough, they could be deployed by the government, citizens, and researchers to identify inconsistencies in existing laws. 17 llms could increase the efficiency and transparency of governments more broadly. for instance, llms can oftentimes provide clear, understandable explanations of complex laws and regulations. eventually, llms may help predict likely impacts of new laws or policies. by scanning vast amounts of legal text and associated implementations, llms could flag potentially "outdated" law, or areas where the law is silent when, in other similar circumstances, the legislature or regulators provide guidance.in this paper, we study retrieval-augmented generation of llms leveraging the text of the u.s. code of federal regulations (cfr) and the u.s. code (a compilation of federal statutes). we test the emerging capability of a suite of llms in understanding tax law.we chose tax law for four reasons. first, unlike some legal subjects where the doctrines are distilled from numerous precedents, the legal authority in tax law is principally concentrated in two sources: the treasury regulations under the cfr and title 26 of the u.s. code (also called the internal revenue code). this allows us to use a fixed universe of potentially relevant documents for the llm's retrieval augmentation. second, many tax rules allow for definitive answers to inquiries. this allows us to set up consistent and automated validation pipelines. third, answering tax law questions for a given scenario usually requires logical reasoning skills and even maths skills beyond just reading the relevant legal authority, enabling us to test llm capabilities in a manner relevant to real-world practice. fourth, tax law is highly significant to the economic lives of nearly every citizen and company on a regular basis.we assess the accuracy of responses generated by llms on thousands of tax law inquiries across experimental setups: the use of the llm alone and the integration of the llm with the underlying legal texts, along with various retrieval techniques (with comparisons made across different retrieval methods). we conduct these experiments across llms, from smaller and weaker models, up through the largest state-of-the-art model, openai's gpt-4. each llm we tested was state-ofthe-art when it was originally released. through examining results across increasingly large models, we find evidence for emerging legal understanding capabilities of llms, improving with each model release. this suggests that we may see the advent of superhuman ai legal skills as the stateof-the-art continues to rapidly advance. 9 10 11 best practice is to measure llm performance on a litany of benchmarks before models are deployed beyond the research environment, but these benchmarks are often not real-world tasks we care about, or may have been memorised by the llm during its training.we assess the accuracy of responses generated by llms on thousands of tax law inquiries across experimental setups: the use of the llm alone and the integration of the llm with the underlying legal texts, along with various retrieval techniques (with comparisons made across different retrieval methods).for each question, we prompt an llm to pick one of the multiple-choice answers, and we evaluate the llm's performance based on whether it chooses the correct answer.18our approach to retrieval-augmented generation and llm prompting we compare results across retrieval methods, each with its own prompt template that provides different supporting context to the llm; see the appendix for a full example of a prompt template from one of our experimental runs.the second experimental question is whether giving the llm more legal text and more relevant (to the specific question we are asking it) legal text increases accuracy for all (or most) models. as you move from the left to the right in these charts, the experimental setup is providing more and/or more relevant legal text to the llm, starting with the "bypass_retrieval" setup, where we don't give any source material to the llm, to "gold_truth," where we give the most relevant source material to the llm. nevertheless, there is no strong reason to believe that llms could not eventually accomplish a wide range of legal tasks with greater performance, and our work represents a benchmark to track the improvement of llms at legal reasoning. methods that improve llm legal analysis skills are relevant, either by helping ai models "self-police" to ensure they are acting in accordance with law, or by designing separate models that can apply legal and ethical standards to confirm whether another ai is properly aligned with the law.our work also adds to the literature on emergent capabilities of llms by demonstrating the emergence of tax law understanding, which occurs once the llm is of sufficient underlying general capability and is adequately prompted to elicit "reasoning" behaviour.experiment with prompting llms to do scientific tasks across fields like business, science, and health by providing the llm with a research goal and two large corpora, asking the llm for corpuslevel difference.31llms can also improve through introspection.33yao et al. for instance, lu et al. for example, the llm can be prompted with its own answers, and the relevant context, and asked, "are there any ambiguities in this question that make it difficult to answer or for you to doubt your current answer? if so, conduct additional legal research by generating a topic that we need to search legal sources for.finally, future work could compare performance between generally pre-trained llms, such as the openai models in our experiments, and language models specifically pre-trained and fine-tuned for legal reasoning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/923.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/923.txt new file mode 100644 index 0000000000000000000000000000000000000000..54790b33a3a63ee26dfa473ec25f461c16f56607 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/923.txt @@ -0,0 +1 @@ +this workshop introduces users to apptainer, a container solution, which could be used for software that requires a specific os setup different from what the cluster uses, or to handle workflows with many files, or for enhanced security.the main goal of this paper is to show what security mechanisms and best practices should be common knowledge for end-users (as well as and the support organization) when using remote resources such as supercomputers or advanced research computing, based on experiences in training users of the supercomputers at the scinet hpc consortium at the university of toronto. we will focus on the following key elements: i) using authentication methods which are more robust and reliable for connecting to remote resources than passwords; ii) concrete practical implementations to be followed when remotely connecting to servers, clusters, supercomputers, or even remote workstations from work, labs, or home; iii) concrete recommendations and tools for users to help protect while working connected to remote systems. beginning users of hpc systems may be aware of security concerns using web-based authentication and access, but access methods to remote hpc resources are often unfamiliar. such operating systems make a distinction between privileged users and regular users, sort users in groups, and maintains ownership and group membership of files and running programs, that can and should be used to control access to files and commands, as there are typically many (regular) users logged in.in a so-called brute force attack, an entity will attempt to get access to a system by systematic attempts to guess user credentials to authenticate in the targeted system, e.in the ssh keys pilot (oct 2021-jan 2022) in which password authentication was replaced by ssh keys on scinet systems, several drop-in sessions were held to help users set up their ssh keys.in addition to measures put in place by the arc center, users also have a responsibility to protect against cyberattacks, because attacks often do not start on the remote system, but on the end-users local computer. we will not go into the details of how ssh creates this secure communication channel but we will instead focus on the mechanism to authenticate the user in the remote system, as it plays a key role in mitigating the risk of attacks against ssh. it involves two keys which are part of a key pair: one private key which must be kept secure, and one public key, which can be distributed. then, the remote server compares the two pieces of information (the challenge request, versus the challenge response by the client), and if they match, the authentication of the user via ssh keys is successful. (3) after these two steps, unless the default location was used for storing the private key, any time one uses the ssh command, it must be told where to find this key with the -i flags. one may also want to generate separate keys for each remote system, if one wants each trusted relation to have a unique key pair.we should note that ssh keys themselves are also prone to brute force attacks if the length of the key is too short and/or the algorithms used are deprecated. however there are a couple of elements that are usually considered risky in terms of security: many vnc systems allow for users to connect without the use of a password, which needless to say is a highly discourage practice! secondly, vnc works by opening connections through a given port in a server, these connections -which by design are resilientshould be tear down when not used to reduce the chance of ports swiping by a malicious party.limiting the number of connections per minute mitigates much of the brute force attacks, but there are ways that end users can further mitigate the risk of brute force attacks by choosing longer usernames (the number of possibilities to try for short usernames is small) similarly, users should avoid having simple, repeated/reused, or short passwords, or even better avoid using passwords at all by substituting them with ssh keys. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/924.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/924.txt new file mode 100644 index 0000000000000000000000000000000000000000..03541f32674d3ef68bf040a4c65c730ce671a273 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/924.txt @@ -0,0 +1 @@ +normally, a human makes a request to a computer, and the computer does the computation of the task. but artificial artificial intelligences like mechanical turk invert all that., 2023)and experts(törnberg, 2023)., in order to increase their productivity, and thus their income, on crowdsourcing platforms? we argue that this would severely diminish the utility of crowdsourced data because the data would no longer be the intended human gold standard, but also because one could prompt llms directly (and likely more cheaply) instead of paying crowd workers to do so (likely without disclosing it). the answer to this question is of paramount importance for all who rely on crowdsourcing, for without knowing who really produced we conclude that, although llms are still in their infancy, textual data collected via crowdsourcing is already produced to a large extent by machines, rather than by the hired human crowd workers. first, it is laborious for humans while being easily done with the aid of commercially available llms(luo et al.in the original study, crowd workers produced eight increasingly short summaries of each original abstract, forming entire information cascades. the abstract-level split serves as a basis for how well the model is able to extract generalizable artifacts present in the synthetic text that can be exploited for detecting synthetic summaries of abstracts not seen during training. first, we assume that summaries written entirely in the text box made available on mturk (without involving a pasting action) are real, allowing us to assess whether our above-described classification model (which does not take keystrokes into account) has a low false-positive rate. second, for the summaries where pasting was used, we examine which fraction of the pasted text came from the original abstract (as crowd workers simply re-arrange parts of the abstract in their summary), versus which fraction is made up of new text. (2023)). due to the overall higher accuracy, we applied the model trained in the summary-level setting to the 46 new summaries to detect instances of llm usage among crowd workers. note that, since nearly all users submitted only a single summary (44 workers contributed 46 summaries), the above-mentioned fraction of llmproduced summaries can also be interpreted as a fraction of llm-using crowd workers. in particular, a qualitative analysis of the data at hand suggests that workers often copypaste intricate phrasing, or entire abstracts, from the original content into their text editor, thereby reutilizing abstract content. our analysis, depicted in figure4, reveals that workers often reuse large portions from the original abstracts, but also that, more importantly, summaries classified as synthetic mostly had a small overlap with the original abstracts., 2023;ziems et al., 2023), and magnify the impact of the values and ideologies encoded by these models(santurkar et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/925.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/925.txt new file mode 100644 index 0000000000000000000000000000000000000000..46ef1d2078ff6173ac460d9ceebb315c81bedea4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/925.txt @@ -0,0 +1 @@ +the broad application of reinforcement learning (rl) faces a significant challenge, namely, the design of appropriate reward functions that align with specific mission objectives in given environments. to mitigate this challenge, preference-based rl (pbrl) (see, for example, (christiano et al., 2017)) has emerged as a promising paradigm, leveraging human feedback to eliminate the need for manual reward function design. however, real-world missions often entail multiple objectives and the consideration of preferences among diverse users, necessitating a balanced approach. existing pbrl methods primarily focus on maximizing a single performance metric, neglecting the crucial aspect of equity or fairness, e.g., (stiennon et al., 2020;wu et al., 2021;lee et al., 2021). consequently, the lack of fairness considerations poses a barrier to the widespread deployment of pbrl for systems affecting multiple end-users when it is 1 unmanned systems lab, department of electrical and computer engineering, the university of texas at san antonio, san antonio, tx, 78249, usa. correspondence to: umer siddique .the many facets of preference learning workshop at the international conference on machine learning (icml), honolulu, hawaii, usa, 2023. copyright 2023 by the author(s). critical to address fairness among these users.to address this critical gap, the development of methods enabling fairness in pbrl becomes imperative. while recent advancements have explored fairness in rl, albeit not within the pbrl framework, notable contributions in, e.g., (weng, 2019;siddique et al., 2020;fan et al., 2022), have employed welfare functions to ensure fairness in the singleagent rl setting. furthermore, the work in (zimmer et al., 2021) considered fairness in a multi-agent rl setting. this paper proposes an approach that builds upon existing studies on fairness, focusing on a pbrl setting. in particular, rather than relying on known ground truth rewards, our method involves learning fair policies by incorporating fairness directly into the pbrl paradigm, thereby eliminating the need for hand-crafted reward functions. by doing so, we aim to address fairness in pbrl without compromising on its advantages. owing to the unavailability of the reward function, many pbrl algorithms learn an estimated reward function model, r(•, •) : s × a → r.2, we extend previous rl formulations by redefining the estimated reward function as a vector function, denoted as r : s × a → r k , where k denotes the number of objectives., 2017).where π θ represents a policy parameterized by θ, ϕ w denotes a welfare function with fixed weights that requires optimization, and j (π θ ) represents the vectorial objective function that yields the utilities (i.note that optimizing the welfare function defined in (3) is an effective way to address fairness because the weights w are selected such that a higher weight will be assigned for objectives with lower utility values, which will ensure that all objectives are treated fairly than the cases when the weights are assigned without considering the utility values.our procedure to optimize the welfare function is an iterative process that integrates the policy update step and reward update step (via the collection of more preferences for reward function estimation). as a state-of-the-art policy gradient method, we adopt the proximal policy optimization (ppo) algorithm(schulman et al.it is important to note that the key distinction between our proposed approach and pbrl in(christiano et al.in this domain, our primary objective is to assess the effectiveness of our proposed method in optimizing the welfare function, denoted as ϕ w . to evaluate this, we conduct a comparative analysis of welfare scores between three approaches: ppo, pbrl, and our proposed fpbrl method within this domain. our results reveal that fpbrl achieves the highest welfare score, thereby demonstrating its ability to identify fairer solutions compared to ppo and the standard pbrl method. encouragingly, our proposed method achieved the highest welfare score, signifying a fairer solution when compared to both ppo and the standard pbrl method. notably, only fpbrl successfully maximizes the minimum objective utility, whereas ppo and the pbrl method yield the lowest minimum objective values, reflecting a prioritization of maximizing cumulative rewards at the expense of fairness considerations. this noteworthy result underscores the efficacy of fpbrl in optimizing the welfare function, which is cru-cial for ensuring fair and equitable treatment of the diverse objectives at hand. our findings underscore the effectiveness of our proposed method, fpbrl, in optimizing the welfare function and achieving fairness in the presence of multiple objectives. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/926.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/926.txt new file mode 100644 index 0000000000000000000000000000000000000000..f173e0f1f1548b81a8c86b41b59aa125135c38ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/926.txt @@ -0,0 +1 @@ +in recent years, algorithmic bias and fairness have emerged as noteworthy challenges for automated biometric systems . a biometric system or algorithm is considered to be biased if significant differences in its operation can be observed for different demographic groups of individuals . with growing adoption of various biometric applications, the non-equitable performance of such applications across demographic groups has led to several discussions and debates . several institutions have conducted evaluations (w.r.t demographic bias) of popular biometric applications, such as face recognition, developed by commercial vendors . on academic front as well, the research in understanding, estimating, and mitigating demographic bias is gaining significant traction .as the issue of fairness in biometric systems has received attention lately, very few attempts have been made to define fairness measures for generic and/or specific biometric applications. as per howard et al. , the fairness of biometric system can be measured using two approaches: differential performance and differential outcome. the former approach refers to the difference in the genuine or imposter distributions between specific demographic groups for a given biometric task, whereas the latter deals with differences in classification error rates among demographic groups. the differential performance is, thus, independent of any classification threshold, while the differential outcomes are functions of a chosen threshold that binarizes scores into match or no-match.a recently proposed fairness measure-fairness discrepancy rate (fdr)-is based on the differential outcome of biometric verification systems . to compute the fdr, authors first assess the maximum discrepancy in the false match rate (fmr) and false non-match rate (fnmr) of different demographic groups for several score thresholds. the fairness of the system is evaluated through a weighted combination of these maximum discrepancies. the fdr has arxiv:2306.10919v1 19 jun 2023 also been adapted to measure the fairness in detection of face morphing attacks in . gong et al. have considered the area under the roc (receiver operating characteristic) curve as a proxy to measure demographic differentials . in its special report on demographic effects in face recognition , the face recognition vendor test (frvt) has employed differential outcome-based strategy where they discuss the impact fmr and fnmr using a global threshold. a demographic-specific score thresholding has been analyzed in .majority of the existing work in evaluating the fairness or demographic equitability of the biometric system is based on the differential outcome. these measures are easy to calculate, based on well-established error rates, and treat the biometric system as a complete black box. the assessment of demographic fairness of biometric system based on differential performance has received little attention. the use of distributions of genuine and imposter scores of a biometric recognition system towards evaluation of its demographic fairness has several advantages: first, the measures based on differential outcome evaluate fairness of the system entirely from the (number of) samples causing incorrect decisions. while these extremal samples signify the accuracy of the biometric system, we believe that the demographic fairness needs to be evaluated across all samples, irrespective of their recognition outcome. the use of differential performance (score distributions) facilitates consideration of entire sample set towards fairness assessment. second, the incorrect decisions (characterized by fmr and fnmr) are dependent on a score threshold. this variable is either fixed or computed from probably different set of data. in the former case, one has to evaluate fairness at multiple score thresholds to interpret the equitability of the biometric system. in the latter case, the score threshold is sensitive to, and hence, impacted by the distribution of unseen, disjoint set of data (and underlying demographic) . the measures based on score distributions do not involve such threshold, and thus, can be computed without interference of such external parameters. third, the score threshold, being external, is often easy to tune as per the need of the application. the fairness measures based on differential performance are agnostic to such tuning, and represent the fairness of underlying core mechanism of the biometric system.consider two (canonical) biometric verification systems-whose score distributions are shown in figs. 1a-1b. for both systems, the classification accuracy (in terms of fmr and fnmr) is nearly the same. however, the first system (fig. 1a) is likely to be fair to both demographic groups (top and bottom row), whereas the other biometric system is likely to be unfair to demographic d 1 (top row). we believe that such disparity among demographic groups, beyond the recognition accuracy, needs to be quantified systematically. in this work, we propose three measures based on differential performance for evaluation of demographic fairness of a biometric verification system. the fundamental component of each measure is calculated on distributions of an individual demographic group. depending on fusion of these components to obtain a final measure, we define three different variants of each measure. we also provide a solution to reduce the effect of demographic imbalance in the test dataset towards fairness evaluation. we explain the behavior of each fairness measure followed by illustration on canonical data (synthetic, yet realistic). our contributions can be summarized as follows: • we propose three measures for evaluation of demographic fairness of biometric verification systems. 1 our measures, being based on scores, consider how well a pair of samples (genuine or imposter) has been matched, rather than just 'whether it has been matched'.• we propose a weighted fusion strategy to account for demographic imbalance in the benchmarking datasets.our weighing strategy attempts to provide higher importance to relatively under-represented demographic groups.• we propose three different variants for each fairness measure to facilitate assessment of fairness from multiple perspectives.• being agnostic to the modality, the proposed measures are applicable across various biometric modalities.in section 2, we formulate the problem of algorithmic fairness in general and biometric verification-specific context. the weighted-fusion strategy and fairness measures are proposed in section 3. summary is presented in section 4., the fairness of biometric system can be measured using two approaches: differential performance and differential outcome.majority of the existing work in evaluating the fairness or demographic equitability of the biometric system is based on the differential outcome. the use of distributions of genuine and imposter scores of a biometric recognition system towards evaluation of its demographic fairness has several advantages: first, the measures based on differential outcome evaluate fairness of the system entirely from the (number of) samples causing incorrect decisions. while these extremal samples signify the accuracy of the biometric system, we believe that the demographic fairness needs to be evaluated across all samples, irrespective of their recognition outcome. the fairness measures based on differential performance are agnostic to such tuning, and represent the fairness of underlying core mechanism of the biometric system. however, the first system (fig. we believe that such disparity among demographic groups, beyond the recognition accuracy, needs to be quantified systematically. for a bv system, the algorithmic fairness can be defined as the ability of the bv system to exhibit similar performancein terms of scores, accuracies, or error rates-to different demographic groups in the test data.we define the separation fairness index (sfi) of the biometric verification system as the measure of its equitability towards separation of expected values of genuine and imposter scores across constituent demographic groups. if we denote the separation between two expected values for demographic group d i as z si , a fair bv system is expected to exhibit similar values of z si for each demographic group in d.we define the compactness fairness index (cfi) of the biometric verification system as the measure of its equitability towards compactness (or spread) of genuine scores and imposter scores across constituent demographic groups.if we denote the combined spread of both score distributions for demographic group d i as z ci , a fair bv system is expected to exhibit similar values of z ci for each demographic group in d.we define the distribution fairness index (dfi) of the biometric verification system as the measure of its equitability towards overall score distributions across constituent demographic groups. the proposed measures determine the fairness of a verification system, towards demographic groups, based on its equitability w. the proposed measures are not be considered as alternative to the outcome-based fairness measures, rather both evaluation approaches are complementary towards analysis of the demographic fairness of a biometric verification system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/927.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/927.txt new file mode 100644 index 0000000000000000000000000000000000000000..a05744ad297f5d8865ab0050247c732b3263824e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/927.txt @@ -0,0 +1 @@ +the main theorem states that given a hypothesis class h and an unknown distribution d over feature space x × {0, 1}, if we have an unbiased sample s of size m and a biased sample s β of size m β , with the marginal probability of a positive example in group i of s β being p i β i , then running algorithm 1 with appropriate sample sizes ensures that, with high probability, the algorithm outputs a hypothesis h that has a low error on the distribution d.the proof of the main theorem consists of three parts:part a: this part focuses on normalizing the weighted empirical loss on the biased sample. the goal is to bound the difference between the sum of weights in the biased sample and the reciprocal of the expected weight. by achieving this normalization, we can account for the bias introduced by the weights and ensure that the algorithm's performance is not adversely affected. this normalization step is crucial for the subsequent analysis of the proof. see lemmas a.2 and a.3 in the appendix for details.part b: part b of the proof is concerned with estimating the inverse of the product of the bias parameters. it involves first obtaining reliable estimates of the probabilities of positive examples in each group of the unbiased data and biased data, respectively, which can then be used to estimate 1 β and, finally,the key lemma established in this part specifies the sample size requirements to achieve reliable probability estimation, which is crucial for accurately evaluating the algorithm's performance. it shows that we can estimate the probabilities of positive examples in each group with reasonable accuracy by having only a small number of unbiased samples in addition to our larger set of biased samples. see lemmas b.1 -b.4 for details.part c: part c combines the results from parts a and b to establish the sample complexity guarantee for algorithm 1. it utilizes the bounds obtained from the lemmas established in parts a and b, which cover both the intersectional case where the groups in the biased sample have overlapping instances and the computationally simpler case where they are disjoint. by combining these results, the proof demonstrates that running algorithm 1 with appropriate sample sizes outputs a hypothesis h with a low error on the true distribution d with high probability. see lemmas c.1 -c.2 for details. be the estimated bias for each group i. however, introducing bias parameters for each group allows our model to explicitly incorporate a way to identify social power into analyzing intersectional biases when supplemented with a more robust normative theory. we provide strong theoretical guarantees that show we can learn the bias parameters accurately with a very small sample of unbiased data, so this gives a proof of concept that a small amount of unbiased data can take us from "impossible" to "nearly optimal" at least in our intersectionality bias model. since the bias parameter β i represents the probability that a positive sample from group g i is retained in s β , the probability of a positive outcome within group g i in s β is the product of the original base positive rate p i and the bias parameter β i :.by incorporating the bias parameters, we emphasize the intersectionality of biases, accounting for the collective impact of multiple group memberships on the biases in the dataset. to ensure fairness, we apply the intersectional bias learning algorithm to obtain a hypothesis h that minimizes the risk of the learned model while considering the biases.the main theorem states that given a hypothesis class h and an unknown distribution d over feature space x × {0, 1}, if we have an unbiased sample s of size m and a biased sample s β of size m β , with the marginal probability of a positive example in group i of s β being p i β i , then running algorithm 1 with appropriate sample sizes ensures that, with high probability, the algorithm outputs a hypothesis h that has a low error on the distribution d.1: the risk difference between the learned hypothesis h on the biased sample s β and the risk of h on the true distribution d is bounded with probability 1 -δ. the model also has the strength that once the bias parameters are estimated, they can be used on future draws of biased data to find a hypothesis approximately minimizing prediction risk with respect to the true distribution with high probability.1 philosophical framework for epistemic intersectionality epistemic intersectionality: we introduce a philosophical framework we coin epistemic intersectionality that incorporates and analyzes intersectional biases in machine learning models while considering the epistemic dimension of intersectionality. empirical verification: epistemic intersectionality recognizes the importance of empirical verification in understanding the existence and extent of intersectional biases. comprehensive analysis of intersectionality: epistemic intersectionality promotes a comprehensive analysis of intersectionality by considering the complex interactions between social identities and systems of power. role in academic research: epistemic intersectionality contributes to academic research by offering a rigorous and evidence-based approach to understanding the impact of intersectional biases in machine learning models.by considering the bias parameters when analyzing the biased dataset, our model ensures that data points are included based on the collective impact of multiple group memberships, which may be explicitly tied to social power given one's moral and political theory. b represents the difference between the normalized risk on the biased sample reweighted with β and the normalized risk on the biased sample reweighted with β.by proving this lemma, we establish a bound on the difference between the reweighted risk of the learned hypothesis on the biased sample and the risk on the true distribution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/928.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/928.txt new file mode 100644 index 0000000000000000000000000000000000000000..bd0f8a141379cac489b799b2774bfbc5d80e5ff8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/928.txt @@ -0,0 +1 @@ +the phrase "epistemic agency" refers to an individual's control over his or her own personal beliefs . when citizens lose their epistemic agency, democracy is threatened because the political establishment can easily deploy propaganda, misinformation, and disinformation that supports authoritarian objectives, interests, or policies . mass media techniques have been used for generations to weaken the epistemic agency of democratic populations, but over the last decade this problem has been amplified by social media technologies that can direct targeted influence campaigns at specific sub-groups . segmentation and targeting on social media have been shown to drive polarization, promote radicalization, and foster discontent . in this context, all new forms of media should be evaluated in their capacity for abuse and misuse, especially when it threatens epistemic agency .it is therefore important to consider the dangers of conversational ai and its potential use as a highly personalized and interactive form of targeted influence. as used herein, the phrase "conversational ai" refers to the deployment of automated ai-driven agents that engage individual human users in interactive dialog . when text-based, these systems are generally referred to as chatbots. when combined with natural voice generation and recognition, they are often referred to as virtual agents and can be used in call centers, as voice-based virtual assistants, and other spoken use-cases . when combined with simulated human faces that have an authentic appearance and can express interactive facial sentiments in authentic ways, they are referred to as virtual humans or virtual spokespeople (vsps), especially when used to represent the specific interests of third parties through natural conversational interactions .until recently, the prospect that conversational ai systems could interact with human users through automated realtime dialog that is perceived as coherent, naturally flowing, and context-aware was still a theoretical prospect. but with the deployment of systems like lamda from google and chatgpt from openai in 2022, it has become apparent that human-level conversations with machines are now within reach. what makes systems like chatgpt most unique from prior technologies is their ability to interact continuously with a human user, keeping track of evolving conversational context and even asking probing questions to the user to acquire needed clarifications or explanations . furthermore these technologies will be deployed on a global scale, as microsoft recently announced that chatgpt has been integrated into the bing search engine and google announced that lamda is being deployed in a new tool called bard . considering the facts above, it is clear that conversational ai tools and technologies have made significant advances over the last 12 months and is likely to be deployed widely by major technology companies in the coming years. for these reasons, we must now consider natural conversations between humans and machines as a viable and potentially dangerous deployment vector for targeted influence campaigns. the following sections explore this danger in hope of informing policymakers that conversational ai, unlike prior forms of mass media, is an interactive real-time medium and therefore is susceptible to new abuses that have not been confronted on traditional or social media platforms. as used herein, the phrase "conversational ai" refers to the deployment of automated ai-driven agents that engage individual human users in interactive dialog.until recently, the prospect that conversational ai systems could interact with human users through automated realtime dialog that is perceived as coherent, naturally flowing, and context-aware was still a theoretical prospect. what makes systems like chatgpt most unique from prior technologies is their ability to interact continuously with a human user, keeping track of evolving conversational context and even asking probing questions to the user to acquire needed clarifications or explanations. the following sections explore this danger in hope of informing policymakers that conversational ai, unlike prior forms of mass media, is an interactive real-time medium and therefore is susceptible to new abuses that have not been confronted on traditional or social media platforms.the ai manipulation problem is used to describe human-computer interaction (hci) scenarios in which an ai-powered system manipulates a human user in real-time in order to achieve targeted influence objectives, often by performing the following sequence of steps:. after all, when a salesperson wants to influence a customer, an effective approach is often to speak directly with the target and adjust the conversational arguments in real-time when confronted with resistance or hesitation, gradually maximizing the persuasive impact. the danger is that large language models (llms) have advanced so rapidly in recent months, conversational ai systems can now engage in flowing dialog that perform the same manipulative steps. of course, there are many positive applications that justify the development and deployment of conversational ai systems, but we must also consider the significant danger of misuse for targeted coercion, persuasion, and influence.whether we like it or not, interactive conversational ai systems can now be designed and deployed that draw users into seemingly casual dialog while pursuing targeted influence objectives through real-time feedback control. once engaged, the target will provide verbal responses that are assessed by the ai controller in realtime, enabling the conversational agent to adjust its arguments to counter any resistance or hesitation. and finally, these systems will likely use webcams to process the target's facial expressions, eye motions, and even pupil dilations in real timeall of which infer their emotional reactions at every moment during the interactive dialog, enabling the controller to continuously adjust conversational tactics for maximized impact. for example, ai systems can detect micro-expressions on human faces that are far too subtle for human observers. in other words, these systems will not only adapt to your real-time emotions, but they will also get better and better at "playing you" over time, learning how to draw you into conversations, guide you to accept new ideas, and convince you to buy things you don't need, believe things that are untrue, even support extreme policies or politicians that you'd naturally reject. unfortunately, without regulatory protections, these ai systems are likely to be trained on sales tactics, human psychology, and other forms of human persuasion. while current "influence campaigns" on social media are analogous to buckshot fired at broad groups, conversational agents could function more like "heat seeking missiles" that adapt their tactics in real time to maximize impact on individual users. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/929.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/929.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ed64f9fc7612441134dfba9bbc57969327fa479 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/929.txt @@ -0,0 +1 @@ +blockchain technology has the potential to revolutionize various industries, from finance to healthcare to supply chain management. the decentralized nature of blockchain networks offers several advantages over traditional centralized systems, such as increased transparency, immutability, and security. according to 'deloitte's 2021 global blockchain survey' , 76% respondents stated that their companies adopted blockchain as a digital asset, and 83% of them believe that digital assets will alternate fiat currency in the next 10 years. a report by grand view research inc. stated that by 2030 global blockchain technology market size is expected to extend to 1,431.54 billion usd with a compound annual growth rate of 87.7% from the year 2023 to 2030 . in line with this trend, a report titled, "blockchain distributed ledger market by component, type, enterprise size, application and end user, opportunity analysis and industry forecast, 2020-2027", the global blockchain ledger market size is valued at usd 2.89 billion and by 2027 extrapolated to outstretch usd 137.29 billion at a cagr of 62.7% growing rate . the financial services sector is leading the pack, accounting for a substantial share of the blockchain market, as per the report. following the success of bitcoin, a multitude of diverse applications of blockchain has grown rapidly including cross-border money transfer , smart contract supported by evm (ethereum virtual machine) , adaptation into iot ecosystem , data and identity security , electronic healthcare records (ehrs) , automated logistics and non-fungible tokens (nfts) .however, these advantages also come with unique challenges. blockchain networks are susceptible to various attacks that can compromise their security and integrity. according to nist, 20139 vulnerabilities were published in 2021 , and sonicwall reported an extreme rise in cybercrimes in the 2022 cyber threat report . till now there have been numerous reported attacks on blockchain systems, including the most common 51% attacks, double-spending attacks, and smart contract vulnerabilities. for example, ethereum classic has suffered multiple 51% double-spending attacks in 2019 and 2020, three repeated attacks in a month, resulting in the theft of more than $6.7 million . on august 10, 2021, an attacker hacked a smart contract of the poly network and stole $611 million in cryptocurrency, the largest crypto-related hack to date . the largest cryptocurrency exchange, binance, suffered a major attack in october 2022, with hackers stealing 2 million bnb tokens through a blockchain vulnerability and causing a $570 million loss . these recent attacks demonstrate that blockchain systems are highly vulnerable to cyber-attacks.as the usage of blockchain technology grows, it becomes crucial to understand these attacks and how to prevent them. a critical analysis of malicious activities within blockchain technologies and their consequential influences can facilitate practitioners to respond to security concerns from an early development phase. thus, to shape the research and formulate a strategic agenda, we offered the following research questions:-rq1: what are the most common types of attacks on blockchain technology and how do they impact the security and integrity of the system? -rq2: what security measures (i.e., what methods or technologies) have been employed to detect and mitigate malicious blockchain attacks?to answer the above-mentioned questions, we have conducted a comprehensive qualitative and quantitative approach on existing studies regarding blockchain privacy and security issues so that this research can be presented as a potential resource for offering a holistic outline and defending current and forthcoming attacks.the scoping criteria for the research questions about the most common types of attacks on blockchain technology, their impact on security and integrity, and the security measures employed to detect and mitigate malicious attacks encompass a comprehensive analysis of the various aspects of blockchain security. the research delves into the various attack methods used to compromise the security of blockchain systems, including the 51% attack, double-spend attack, smart contract vulnerability attack, and sybil attack. the study also evaluates network security protocols like firewalls and intrusion detection systems, security audits conducted by outside security experts, and consensus algorithms like proof-of-work (pow) and proof-of-stake (pos) to detect and prevent malicious attacks on blockchain. the terms "blockchain security," "attack types," "51 percent attack," "double-spend attack," "smart contract vulnerability," "sybil attack," "consensus algorithms," "cryptography," "network security protocols," "security audits," "blockchain evolution," "current challenges," and "future trends" are used in research questions about the most typical types of attacks on blockchain technology, their effects on security and integrity, and the security measures used to protect against them., 51% attack, double-spending attack, replay attack, sybil attack, eclipse attack, and so forth.51% attack many of the blockchain systems suffer from a 51% attack, also known as a majority attack, that is considered a high risk for the integrity and security of these blockchains, where an attacker or a group of miners control more than 50% of the network's computing power, allowing them to manipulate the consensus mechanism and double-spend coins.man-in-the-middle attack in a blockchain system, a man-in-the-middle (mitm) attack is a type of attack that can occur when an attacker intercepts and modifies the communication between two nodes in a blockchain network.sybil attack a sybil attack is an attack on a peer-to-peer network where a malicious node creates multiple fake identities to disrupt the network.race attack one of the most preeminent means of attack which enables various double-spending attacks is denoted as a race attack wherein a threat actor initiates two contradictory transactions into the network concurrently.eclipse attack an eclipse attack occurs when a malicious actor controls all the peers that a targeted node is connected to, allowing them to steal information, compromise the targeted node and establish a degree of isolation of the earmarked nodes from the entire p2p network by blocking communication allowing only the attacking nodes to receive the details.replay attack a replay attack is a type of network attack where a malicious actor intercepts, captures, and resends valid digital transaction data, thus exploiting the need for the original data to be validated and stealing funds or disrupting the network.man in the middle attack network monitoring is an essential technique that can help detect unusual network activity, such as unexpected data transmissions or unexpected nodes joining the network. additionally, another detection method is the insertion of observers where a vendor can insert one or two nodes into the network referred to as "monitors" which function to transmit all transactions to the vendor, thereby facilitating the swift detection of any race attack or double-spending attack endeavors. probability of blockchain attacks by attack type nections to detect and disabling the unconfirmed connections and transaction forwarding are the imperative security measures to safeguard the integrity and reliability of the blockchain network. according to our analysis, there is a 30% chance of a 51% attack, a 70% chance of smart contract vulnerabilities, a 10% chance of a replay attack, a 20% chance of a routing attack, a 40% chance of a double-spending attack, a 30% chance of a sybil attack, a 40% chance of a man-in-the-middle attack, a 15% chance of an eclipse attack, and a 20% chance of a routing attack. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/93.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/93.txt new file mode 100644 index 0000000000000000000000000000000000000000..254ab01f722be2b766792c2b114a2b4ac63e6c05 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/93.txt @@ -0,0 +1 @@ +during the past decade, fast progress in deep learning (1) has empowered computer speech recognition, image processing, natural language processing, protein folding, game playing and many other applications. however, these great progresses fell short when we try to understand our own learning mechanism: how to model human learning (2), (3), ( 4)?species in nature learn quickly to survive. when a dragonfly is hatched, within hours it firms up its wings and then flies to catch mosquitoes; a newborn does not need tons of repeated examples or transfer learning to identify an apple. most human or animal learning exhibits a mixture of inherited intelligence, few-shot learning without prior knowledge, as well as long term many-shot learning. it is interesting to note that these learning programs are encoded in our genomes but they are not all the same, even for individuals within the same species. the diversity of these learning algorithms is vividly expressed by spearman's "g" factor (2). however, these great progresses fell short when we try to understand our own learning mechanism: how to model human learning (2), (3), (4)?. most human or animal learning exhibits a mixture of inherited intelligence, few-shot learning without prior knowledge, as well as long term many-shot learning. even if we just want to model one person, a single person often uses different parameters, features, and perhaps different algorithms to deal with different learning tasks.specifically, we start from an agreed-upon law in thermodynamics, to formally derive our model for few-shot learning, and prove this is the optimal model within our framework in the sense that all other models including human ones may be viewed as approximations to our framework. by the end of this process, a component of data compression during the inference phase of learning emerges as a key component of all few-shot learning models.current deep learning based approaches for few-shot learning generally depend on 1) many auxiliary labelled training samples or task-specific data augmentation for transfer learning or meta learning(7); or 2) very large scale self-supervised pre-training(8). these approaches thus fall short to model few-shot learning in nature by humans and animals as they can hardly account for the diversity in learning algorithms and they either neglect the unsupervised scenario that humans are mostly exposed to or use the scale of unlabelled data and training parameters that are far beyond creatures need. another theory is the free energy principle by karl friston (3) that human (and all biological systems) learning tends to minimize the free energy between internal understanding in the sense of bayesian (under internal perceived distribution p) and that of the environmental event (under distribution q), measured by kl-divergence(10). in a similar spirit, lake, salakhutdinov and tenenbaum (4) proposed a bayesian program learning (bpl) model, learning a probabilistic model for each concept and achieve human-level performance. we further demonstrate by experiments that our new model indeed works significantly better than other classical deep learning neural networks for few-shot learning. the task may appear to be formidable because of conflicting and seemingly very general goals: each individual is allowed to have a different learning model, yet our model has just one program to model everybody; we do not yet exactly know the complete underlying biological mechanisms, yet we need to implement the right functionality; there are infinite number of models, yet we need to choose one that is optimal; we are not really interested in "proposing models" out of blue, yet we wish our model to be a mathematical consequence of some basic laws of physics; the model needs to be theoretically sound, yet practically useful.to approximate our universal few-shot learning model, we use a hierarchical vae as our underlying model h in inequality 1 to model the unlabelled samples y 1 , .we have defined human-like few-shot learning and derived an optimal form of such few-shot learning. in classical learning theory, it is well-known that if we compress training data to a smaller consistent description, whether it is a classical bayesian network or a deep neural networks(13,35), we would achieve learning. we expect to see more practical systems approximating our theory can be implemented to solve commonplace few-shot learning problems when large amounts of labelled data for deep learning is lacking. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/930.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/930.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0b79673e72eb8de74eff77d3fe7e1f63e992712 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/930.txt @@ -0,0 +1 @@ +ml-based tools have the potential to significantly improve health and healthcare delivery , yet these methods are often "black-box" in nature. in this context, ml models often fail to elucidate which influential factors affect individual predictions as well as how changes to these observable inputs affect or modulate the outcome being predicted. this is an important deficiency because clinicians' understanding and confidence in using predictions to guide interventions in a complex process that is modelled is the key to trust. knowing the main contributing factors allows their evaluation in terms of their coherence with respect to the application task and its potential actionability could help in building trust in clinical settings . if the explanation cannot explain why a future problem will evolve, and thus justify treatment interventions then clinicians can rightly be sceptical. the lack of transparency in their function reduces their trustworthiness and is a barrier to their adoption for clinical decisionmaking. understanding of the methods should also be sufficient for clinician users to suspect when the tools are not working, or being used outside the purpose for which they were developed.to understand complex ml models, several explainable ai (xai) methods have been proposed in the literature . these methods can be categorised into (i) gradientbased e.g., smoothgrad , integrated gradients , deep taylor decomposition (dtd) ), layer-wise propagation , and (ii) perturbation-based e.g. lime , shap . however, there is little understanding of how applicable or useful they are in clinical settings or whether they should be significantly re-tailored or even novel xai methods developed.from clinicians' view, knowing the subset of features driving the model outputs is crucial as it allows them to compare the data-driven model decisions to their clinical judgment, especially important in case of a disagreement . tonekaboni et. al. also suggest that rigorous evaluation of explanations against the following criteria: (i) domain-appropriate representation, (ii) potential actionability and (iii) consistency could contribute to building trust in clinical settings. other recent studies report inconsistency between the explanations generated by various popular explanation methods. this variability implies that at least some generated explanations are incorrect. if incorrect, explanations at a patient level could be misleading and could lead to wrong decisions with dire consequences in applications such as healthcare. thus, to build trust, it is critical to investigate the conformity of explanations with clinical expert knowledge by evaluating them against the aforementioned criteria.this paper presents the results of the quantitative analysis of explanations at a patient (i.e. local explanations) and cohort (i.e. global explanations) level and discusses them in terms of their coherence with respect to the application task, impact on the workflow and consistency. to investigate the utility and the trustworthiness of the xai-generated explanations in the clinical context, we evaluate against criteria suggested by tonekaboni et al. . to analyse discordance between explanations, we employ agreement metrics proposed by krishna et. al. where appropriate. the analysis is performed on two emr datasets sourced from two major australian hospitals examining data-driven models predicting unexpected patient deterioration and hospital readmission after discharge .we used shap and dtd methods to generate explanations of tree-based and neural network (nn)-based ml models and patient and cohort level. these explanations were compared to each other and also to interpretations arising from the coefficients of the logistic regression (lr) model, the most accepted predictive model in healthcare applications. the explanations obtained for one of the two datasets used in this study (for which it was possible) were also benchmarked against the true causes recorded by the deployed data collection system in the study hospital. we discuss these results and their implications from clinicians' perspectives. the necessary criteria for having trustworthy explanations and how these guide the choice of intervention are also considered.also suggest that rigorous evaluation of explanations against the following criteria: (i) domain-appropriate representation, (ii) potential actionability and (iii) consistency could contribute to building trust in clinical settings.we used shap and dtd methods to generate explanations of tree-based and neural network (nn)-based ml models and patient and cohort level. the explanations obtained for one of the two datasets used in this study (for which it was possible) were also benchmarked against the true causes recorded by the deployed data collection system in the study hospital. global explanations) which are obtained by adding up the absolute values of all explanations obtained for individual patients and averaging them over the total number of considered patients.metrics for measuring the agreement between the explanations introduced inwere used in different experiments carried on to evaluate explanations intended to build trust in clinical settings.an intuitive measure of the agreement between the explanations obtained with different methods is the number of common features in sets of top features identified by different methods which will be denoted in the remainder of the paper as the feature agreement (fa).to evaluate explanations in the context of clinical predictive models against the criteria suggested inwe used explanations obtained for emr-based predictive models reported in,. whether they are informative and may impact the workflow by informing follow-up clinical workflow while at the same time being parsimonious and timely, explanations at the patient level were analysed, assessed and discussed. considering that different methods may produce different resultsand the discordance could impact clinicians' trust, explanation agreement between the methods at the patient has been also investigated and discussed.to examine the consistency of explanations in relation to variations in the design of underlying models (dnn and xgb), explanations calculated at the cohort level were analyzed for each of the five independent runs., sign), explanations obtained at the patient level for both xgb and dnn models using the shap method were analyzed across five independent runs. the aim of this study was to assess the explanations generated by popular xai methods used for explaining clinical predictive models. in the context of predicting patient deterioration and risk of readmission, explanations obtained at the patient level were recognised by clinical collaborators as actionable, i. when considering timeliness, both algorithms can be leveraged to provide real-time predictions and explanations that provide relevant complementary information that is well aligned with the current clinical workflows, allowing for early intervention and the prioritisation of clinical efforts for care planning. this paper is suggesting that if 1) sufficient disparate ml methods agree on influential relationships, 2) if observable and input factors can be modified and they change model output, and 3) if the adjusted ml model outcomes concur and agree with the realworld results, then we are one step closer to trustworthy explanations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/931.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/931.txt new file mode 100644 index 0000000000000000000000000000000000000000..441f952c98f1b3b619357ef242edbd08f67bafb0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/931.txt @@ -0,0 +1 @@ +septic shock, the most severe form of sepsis, is characterized by profound circulatory and cellular abnormalities and is associated with a high mortality rate . early detection and treatment of sepsis and septic shock are critical for improving patient outcomes, as delays in intervention can lead to a rapid decline in a patient's condition . while various methods have shown promise in predictive accuracy , their interpretability remains a significant concern. understanding and explaining a model's underlying mechanisms is crucial for gaining trust , facilitating model debugging, and informing clinical decision-making . interpretability and explainability methods for time series forecasting models can provide valuable insights into the relationships between vital signs and the risk of septic shock . several studies have been undertaken to incorporate model-agnostic explainability by leveraging rule-based methods and argumentation . this paper explores the interpretability of vital sign forecasting models for patients with sepsis and septic shock condition in critical care settings. based on our knowledge, this work is one of the first to explore deep learning models to forecast the vital signs in the eicu dataset . further, we investigate the interpretability and explainability of patients' forecasted signals in conjunction with drug infusion. our goal is to contribute to developing more interpretable and trustworthy models for septic shock prediction, ultimately improving patient outcomes. early detection and treatment of sepsis and septic shock are critical for improving patient outcomes, as delays in intervention can lead to a rapid decline in a patient's condition. interpretability and explainability methods for time series forecasting models can provide valuable insights into the relationships between vital signs and the risk of septic shock. this paper explores the interpretability of vital sign forecasting models for patients with sepsis and septic shock condition in critical care settings. our goal is to contribute to developing more interpretable and trustworthy models for septic shock prediction, ultimately improving patient outcomes. by accurately forecasting the trend component, we aim to provide valuable support to clinicians in monitoring the vital signs trend of patients and making informed decisions regarding medication administration. our experiments focused on forecasting the mean blood pressure (mbp) of the patients diagnosed with sepsis or septic shock.additionally, we investigate the extracted trend of the forecasts obtained using the n-beats interpretable configuration and analyze cases where the forecasted trend does not align with the actual trend.1. we observe that, in several samples, a noticeable deviation between actual and forecasted trends occurs when drugs are administered after the training cut-off, as seen with patient id 261982, who experienced an increased mbp trend due to the administration of vasoactive drugs like norepinephrine and vasopressin. since the drug infusion information was an unobserved variable during the training of the forecasting model, the discrepancy in the actual and the forecasted trend can be attributed to the fact that the model is trained on historical vital sign data, which does not include the effects of drugs introduced after the training cut-off. figure1shows the observed mbp trend, the forecasted trend, the training cut-off, and the interval of drug infusion for a specific patient. furthermore, our findings suggest that cases where the actual and forecasted trends matched had a higher mortality rate (92%) compared to cases where the trends were dissimilar (84%). conducting additional studies is crucial for understanding the causal inferences behind these interactions and better comprehending the relationship between medication administration, vital signs, and their subsequent effects.in conclusion, our study utilized the eicu dataset to evaluate the forecasting performance of the interpretable n-beats model, highlighting the significance of accounting for drug infusion's influence on trends in icu patients' vital signs. future research will focus on developing approaches that integrate drug infusion information and investigate drug-to-drug interactions within the icu context, aiming to enhance the overall performance of forecasting models in critical care. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/932.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/932.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ad572c94c6a501fbc1c446293720be99f30839e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/932.txt @@ -0,0 +1 @@ +insurance is "interestingly uninteresting". in this work, we argue that in fact insurance is far from uninteresting and indeed a rich source of inspiration and insight to scholarship interested in social issues surrounding machine learning, specifically the field now known as fair machine learning. our proposal is that insurance can be viewed as an analogon to machine learning with respect to these issues arising from the social situatedness. while machine learning is a relatively recent technology, debates regarding social issues in the context of insurance have been ongoing for a long time. thus, we argue that taking inspiration from studies of insurance can contribute to a more integrative view of machine learning systems as socio-technical systems (selbst et al., ).both machine learning and insurance are firmly based on a statistical, probabilistic mode of reasoningan actuarial mode. indeed, insurance can be viewed as the first commercial test of probability theory (gigerenzer et al., ; mcfall, ). insurance, a technology for doing risk, transforms uncertainty into calculable risk (lehtonen & van hoyweghen, ). the key idea is to share the risk of a loss in a collective, organized through an abstract mutuality; due to the 'law' of large numbers, uncertainty thus becomes manageable and the effect of chance can be offset (ewald, ). in this way, insurance creates a "community of fate" in the face of uncertainty (heimer, ). to enter into this community (the insurance pool), the insurer demands a certain fee, called premium, from the policyholder.in insurance, questions of fairness inevitably arise, and have been the subject of much debate. the central point of debate is the tension between risk assessment and distribution (abraham, ). in other words, who is to be mutualized in the pool. some form of segmentation is found in many insurantial arrangements: the pool of policyholders can be stratified by separating high and low risk individuals. but the specific nature that such segmentation mcfall et al. ( ) call insurance "interestingly uninteresting", referring to how insurance is "hugely underresearched" given its societal importance, which is typically not recognized (ewald, ).takes typically depends not only on risk assessment, but on further considerations such as assignment of responsibility, modulated by social context; in this way, insurance is not a neutral technology (baker & simon, ; glenn, a).our non-comprehensive outline of the history of insurance illustrates how uncertainty, fairness and responsibility interact, and can be entangled and disentangled. from this background, we can extract conceptual insights which also apply to machine learning. the tension between risk assessment and distribution is mirrored in formal fairness principles: solidarity, which can be linked to independence in fair machine learning, contrasts with actuarial fairness, linked to calibration. briefly, actuarial fairness demands that each policyholder should pay only for their own risk, that is, mutualization should occur only between individuals with the same 'true' risk. in contrast, solidarity calls for equal contribution to the pool. on one level of this text, we problematize actuarial fairness (by extension, calibration) as a notion of fairness in the normative sense by taking inspiration from insurance. this perspective is aligned with recent proposals that stress the discrepancy of formal algorithmic fairness and "substantive" fairness (green, ), which some prefer to call justice (vredenburgh, ). parallel to this runs a distinct textual level, where we emphasize two intricately interacting themes: responsibility and tensions between aggregate and individual. both entail criticism of actuarial fairness, but we suggest that they additionally provide much broader, fruitful lessons for machine learning from insurance.at the highest level of abstraction, our goal is to establish a general conceptual bridge between insurance and machine learning. traversing this bridge, machine learning scholars can obtain new perspectives on the social situatedness of a probabilistic, statistical technology -we attempt to offer a new 'cognitive toolkit' for thinking about the social situatedness of machine learning. our point of view is that fairness cannot be reduced to a formal, mathematical issue, but that it requires taking broader social context into account, reasoning for instance about responsibility. and for this, we suggest, insurance is an insightful analogon. therefore, our objective is to furnish the reader with a guide that charts the landscape of insurance with respect to social issues and to establish links to machine learning.on a formal level, we use the following analogy. in a machine learning task, we are given some features x and associated outcomes y , which we attempt to approximate by predictions ŷ . the structural relation to insurance is established by conceiving of x as the features of policyholders (e.g. age, gender) with outcomes y (e.g. having an accident or not), and the task is to set a corresponding premium ŷ . in this work, we argue that in fact insurance is far from uninteresting and indeed a rich source of inspiration and insight to scholarship interested in social issues surrounding machine learning, specifically the field now known as fair machine learning. the tension between risk assessment and distribution is mirrored in formal fairness principles: solidarity, which can be linked to independence in fair machine learning, contrasts with actuarial fairness, linked to calibration. on one level of this text, we problematize actuarial fairness (by extension, calibration) as a notion of fairness in the normative sense by taking inspiration from insurance. both entail criticism of actuarial fairness, but we suggest that they additionally provide much broader, fruitful lessons for machine learning from insurance. fairness conceptions in insurance are contingent upon prevailing societal norms, particularly regarding responsibility, but concurrently insurance shapes the moral fabric of the society in which it is embedded(glenn, b; van hoyweghen et al. particularly interesting is also insurance in islamic law, which prohibits gambling and contracts based on usury: the morality of insurance is justified then by emphasizing the solidaristic nature of the arrangement(baker, ), in contrast to the view of insurance as a bilateral contract that is more prevalent in western societies., ), its modern formulation is due to arrow ( ). we now argue, in line with other social studies of insurance scholars(abraham, ; gaulding, ), that the variant of this question which is relevant for insurance and machine learning purposes is in fact fundamentally normative in character. in the limit, the distinction between group-based approaches to fairness and individual fairness (in the sense of the machine learning literature(dwork et al. on the other hand, aggregates are central to the workings of machine learning: they appear in the input data due to categorization processes; second, the fairness of machine learning systems is typically evaluated based on groups (with the exception of individual fairness, see below); third, machine learning in general, whether fairness-unaware or not, rests on aggregate criteria such as average training error. in contrast, the allure of perfect actuarial fairness associated with the personalization of risk, driven by big data and machine learning, is that it is supposedly individually fair -the goal being 'segments of one' and setting the premium as e. for instance, the problems of dataset shift and model ambiguity have been recognized in insurance as well as machine learning; for contributions from insurance see e. recent impossibility theorems in the fair machine learning literature(kleinberg et al.what then is the relevance of performativity for insurance, and by analogy, machine learning? actuarial fairness (calibration), or more broadly the fairness of 'accurate' statistical methods, carries with it an aura of objectivity and neutrality. yet against a background of such past injustice, it is not clear why actuarial fairness should be considered as a principle of justice -this argument has been made both in the insurance(daniels, ; lehtonen & liukko, ; barry, )as well as the machine learning literature(mitchell et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/933.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/933.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ad16d8adf7f99c43bfc4b6a9fd53279cc245e43 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/933.txt @@ -0,0 +1 @@ +in the educational context, question generation (qg) can potentially automate and assist the teacher in what can be a time-consuming and eortful task. qg may also be helpful for the learner's formative assessment via self-study and engagement with computer-generated practice questions. however, automatic qg tools are not widely used in classrooms , namely because generated questions are generally limited in types and diculty levels . as pointed by wang et al. , there is a strong desire for user control, where humans provide input to qg systems and can decide when to use their output. inspired by this need, this study proposes a qg framework for controlling the generation of explicit and implicit questions, using question explicitness as a guidance attribute during the generation process. generally, explicit questions center on a particular story fact, whereas implicit questions rely on summarizing 3 and drawing inferences from implicit information in the text. as stated by xu et al. , explicit and implicit questions are formally dened as follows:-explicit questions ask for answers that can be directly found in the stories.in other words, the source of answer are spans of text. -implicit questions ask for answers that cannot be directly found in the text.answering the questions requires either reformulating language or making inferences. in other words, the answer source is free-form, meaning that the answers can be any free-text, and there is no limit to where the answer comes from.noteworthy, prior research suggests that a combination of explicit and implicit questions contributes to a more balanced diculty in the assessments.to achieve our goal, we use a recent dataset called fairytaleqa , which contains question-answering (qa) pairs derived from children-friendly stories. each question is categorized as explicit or implicit by expert annotators. some previous work has addressed controllability in educational qg. for instance, ghanem et al. control the reading comprehension skills required by the question, e.g., gurative language and summarization. similarly, zhao et al. control the narrative elements underlying the generated questions, such as causal relationship, outcome resolution, or prediction. they use the same dataset as this study, fairytaleqa, where each question, beyond explicitness, is also categorized according to the referred narrative elements., there is a strong desire for user control, where humans provide input to qg systems and can decide when to use their output. inspired by this need, this study proposes a qg framework for controlling the generation of explicit and implicit questions, using question explicitness as a guidance attribute during the generation process. to control the explicitness of the generated questions, we prepend a special token followed by explicit or implicit attribute at the beginning of the input, before the story text. controlled test set: for assessing the eectiveness of controllability along models d-f, we have prepared a reorganized version from the original test set which we call controlled test: each example includes a section and all ground-truth qa pairs regarding that section, being that these qa pairs belong to one explicitness type (explicit or implicit) and narrative element.529 (qg). . according to the authors, this situation is expected since the answers to explicit questions can be directly found in the text. we use the qa model (obtained in setup a) for answering the generated questions from models d and f. then, the answers obtained from the qa model are compared against the answers generated from models d and f, yielding the reported results. for both evaluation metrics, the qa model performs signicantly better on explicit than implicit generated questions (conrming our hypothesis). we nd no signicant dierences in the qg scores obtained by model d compared to c, which can be explained as follows: controlling the question's explicitness has more inuence on the type of answer required to respond to the generated question than on the syntax of that generated question. this can be explained as follows: controlling the question's narrative elements strongly inuences the syntax of the generated questions. for instance, we empirically observe that when requesting the model to generate questions about the causal relationship element, it generates (in many cases) questions starting with why did.finally, it should be noted that model f (which receives both explicitness and narrative controllability prompts) is shown to be eective for controlling simultaneously question's explicitness and question's narrative elements. through automatic evaluation, the results show preliminary evidence that it is possible to (1) control the question's explicitness and (2) simultaneously control both the question's explicitness and question's narrative elements. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/934.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/934.txt new file mode 100644 index 0000000000000000000000000000000000000000..7b197b89f987c7fc519bb8fb064669e2c6069909 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/934.txt @@ -0,0 +1 @@ +technology plays a crucial role in people's lives, influencing several aspects of modern society, such as work, education, politics, and leisure; therefore, if software engineering does not strive to be inclusive in all its facets (i.e., education, research, and industry), software products might unintentionally constrain groups of users - .currently, discussions on equity, diversity, and inclusion (edi) , are gradually increasing in many fields , . however, in software engineering, this debate is evolving slowly . it seems counter-intuitive, but the area responsible for creating innovative software solutions for billions of users worldwide does not reflect the diversity of the society it serves , e.g., algorithms are racist , , , technical forums are sexist , and the software industry is not welcoming to underrepresented groups , .if we analyze the discrimination disseminated by software products through the lens of racism, we can identify several examples of what is being defined as algorithmic racism , . algorithmic racism occurs when data-drive software systems produce outcomes that reproduce racial disparities, usually against black people , , , creating disadvantages in several contexts, such as housing policy, credit terms, and law enforcement. in particular, racism resulting from algorithms has intensified with the use of technology based on machine learning and artificial intelligence .since software professionals have an essential role in creating technologies, in this study, we explore how these professionals understand and perceive algorithmic racism guided by the following research question:research question: what do software development professionals know about algorithmic racism?from this introduction, our study is organized as follows.in section ii, we present a literature review on algorithmic racism. in section iii, we describe how we conducted the survey. in section iv, we present our findings, which are discussed in section v. finally, in section vi we present our conclusions and directions for future research.since software professionals have an essential role in creating technologies, in this study, we explore how these professionals understand and perceive algorithmic racism guided by the following research question:.although the effects of algorithmic biases have affected black people for years, the discussions on algorithmic racism began to increase only recently, especially with the increase in the discussions on software fairness-and when cases of racism resulting from software systems started to be reported by the media-.outside of software engineering, researchers from other areas, such as health, social sciences, and education, claim that the software industry needs to take responsibility for the effects of algorithmic racism in people's lives,-and start to raise discussions about the risks of biased algorithms to our society,,-.regarding the role that professionals have in software development, all designers and software managers in the sample agree that algorithms can discriminate against users.• bad experience: seven participants understand that algorithmic racism happens when systems end up prioritizing a group of people over another or when non-inclusive design produces software interactions that mistreat users and keep them from having a good experience, for instance, when the system does not offer options that match the needs of black people.there are a few similarities between what has been discussed in the literature over the years and the perceptions of software professionals about the theme, as both researchers and practitioners reported harms caused by algorithmic racism regarding how systems deal with images (e. looking at social aspects, our study revealed that among software professionals, software designers are more likely to be aware of algorithmic racism. therefore, improving software team diversity and increasing racism awareness among software professionals will likely produce long-term results. in particular, we need the software industry to engage software engineers in discussions about software fairness and its effects on technology. in addition, computer science, software engineering, and other technology courses play an essential role in training students on software fairness, helping them become professionals who understand their responsibility in developing unbiased algorithms that produce a fair experience for people. however, addressing this problem requires software companies to explore not just technical aspects of software development but also work towards improving awareness of structural racism among professionals and increasing the diversity of software teams.• provide training to software professionals about the impact of technology on society, including structural racism and how it can affect algorithms and software.• increase diversity in software teams by having professionals from different backgrounds working on software solutions, as various experiences increase the perception on biases and discrimination. we obtained answers from a diverse sample of 73 software professionals that discussed examples of algorithmic racism, commented on their experiences about it, and suggested solutions on how to address the problem.since the role of software engineering is to provide an equal and fair experience to users, the software industry needs to start addressing racism, not only by finding solutions for the algorithmic biases but also by fostering opportunities for professionals from underrepresented groups as a strategy to incorporate different perspectives into software products and improve software fairness. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/935.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/935.txt new file mode 100644 index 0000000000000000000000000000000000000000..b32e2a85dbc4f727b4a68f5ed6913889eaa66551 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/935.txt @@ -0,0 +1 @@ +a counterfactual distribution is the probability distribution of a random variable under a hypothetical scenario that differs from the observed reality. "what would have been the outcome for this individual if they had received a different treatment?" is an example of a counterfactual question. here the personal data of the individual constitute the evidence that specifies the observed reality, and the interest lies in the distribution of the outcome under a hypothetical treatment.counterfactual questions belong to the third and highest level in the causal hierarchy (shpitser and pearl, 2008) and are in general more difficult than associational (first level) or interventional (second level) questions. algorithms for checking the identifiability of counterfactual queries from observational and experimental data have been developed (shpitser and pearl, 2007;shpitser and sherman, 2018;correa et al., 2021) and implemented (tikka, 2023). in many practical cases, the queries may be non-identifiable (wu et al., 2019).counterfactuals are often linked with questions about fairness, guilt, and responsibility. notably, the fairness of prediction models has become a major concern in automated decision-making (wachter et al., 2018;pessach and shmueli, 2022) where the requirement for fairness often arises directly from the legislation. a classic example of the fairness requirement in decision-making is credit-scoring where the bank's decision to lend must not depend on sensitive variables such as gender or ethnicity (bartlett et al., 2022).several definitions and measures of fairness have been proposed (mehrabi et al., 2021;caton and haas, 2023;carey and wu, 2022). among these proposals, counterfactual definitions of fairness (kusner et al., 2017;nabi and shpitser, 2018;chiappa, 2019;wu et al., 2019;richens et al., 2022) are intuitively appealing as they compare whether an individual decision would remain the same in a hypothetical counterfactual world. note that the term "counterfactual explanations" (guidotti, 2022) is sometimes used in the literature on explainable artificial intelligence (xai) and interpretable machine learning (burkart and huber, 2021) in contexts where the term "contrastive explanations" proposed by karimi et al. (2021) would be more appropriate.we consider the problem of simulating observations from a specified counterfactual distribution. given a structural causal model (scm) and a counterfactual of interest, the counterfactual distribution can be derived in three steps (pearl, 2009). first, the distribution of the latent background variables is updated given the evidence expressed as a set of conditions on the observed variables. second, the causal model is modified according to the hypothetical intervention. third, the counterfactual distribution of the target variable is calculated in the model that represents the counterfactual scenario under the updated distribution. these three steps require the full knowledge of the causal model in the functional form, i.e. the structural equations and the distributions of the background variables must be known.the problem of simulating counterfactuals is similar to the problem of simulating observations from a given distribution. the challenges are related to the first step of counterfactual inference which requires determining a multivariate conditional distribution. this can be done analytically only in special cases where, for instance, all the variables are either discrete or normally distributed. evidence that includes continuous variables leads to a conditional distribution concentrated on a manifold, which is generally difficult to simulate.in this paper, we present an algorithm for simulating counterfactual distributions. the algorithm is applicable in settings where the causal model is fully known in a parametric form and the structural equations for continuous variables have an additive error term or, more generally, are strictly monotonic with respect to an error term. the algorithm is essentially tuningfree. unlike karimi et al. (2021) and javaloy et al. (2023), we allow the scm to contain background variables that affect two or more observed variables, which makes it significantly harder to simulate the counterfactual distribution.the algorithm processes the conditioning variables one by one in a topological order and obtains an approximate sample from the updated distribution of the background variables (step 1). the challenge of continuous conditioning variables is overcome by using binary search to find solutions that satisfy the conditions and then applying sequential monte carlo to calibrate the distribution of these solutions. discrete conditioning variables are processed by resampling observations that satisfy the condition. next, the causal model is modified (step 2) and a sample from the counterfactual distribution is acquired by simulating the modified causal model with the obtained sample of the background variables (step 3).we show that the conditional simulation at step 1 can be interpreted as a particle filter/sequential monte carlo (e.g., gordon et al., 1993;doucet et al., 2000;del moral, 2004;cappé et al., 2005;chopin and papaspiliopoulos, 2020). theoretical results from the particle filter literature guarantee good asymptotic properties of the sample. in particular, we state a mean square error convergence rate and a central limit theorem for samples obtained with the proposed algorithm.in real-world applications, the full knowledge of the causal model may not be available. despite this serious restriction, we argue that the simulationbased counterfactual inference may still have its role in fairness evaluation. consider a prediction model that is used for decision-making in a situation where the underlying causal model is unknown. to ensure fairness in this situation, the prediction model should be fair under any reasonable causal model. this implies that an analyst evaluating the fairness of the prediction model could choose some reasonable causal models for the evaluation. deviations from fairness under any of these causal models indicate that the prediction model is not fair in general.we use the counterfactual simulation algorithm as the main component in a fairness evaluation algorithm of prediction models. we simulate data from specified counterfactual distributions and compare the predictions for these settings. we demonstrate with a credit-scoring example how the fairness evaluation algorithm can be applied to opaque ai models without access to real data. as a result, we find out if an ai model is fair in the tested setting, and if not, learn how large differences there are in the outcome under different counterfactual interventions.the rest of the paper is organized as follows. the notation and the basic definitions are given in section 2. in section 3, the counterfactual simulation algorithm and the fairness evaluation algorithm are introduced, and the conditional simulation is presented as a particle filter. in section 4, the performance of the simulation algorithm is tested in benchmark cases where the counterfactual distributions can be derived analytically. in section 5, the fairness evaluation algorithm is applied to a credit-scoring example. section 6 concludes the paper.(3) f is a set of functions {f v | v ∈ v} such that each f v is a mapping from u ∪ (v \ {v }) to v and such that f forms a mapping from u to v.data can be simulated from an scm by generating the values u of the background variables u from the distribution p(u) and then applying the functions f in the topological order to obtain values v of v. more specifically, the probability distribution of a set of observed variables w ⊂ v \ x in the submodel m do(x=x) conditional on a set of observations (the evidence) c = c such that c, x ⊆ v can be written as.the starting point of counterfactual simulation is an scm m = (v, u, f, p(u)) where the functions f and the distribution p(u) are fully known, and the objective is to simulate values from counterfactual distribution p(w do(x=x) = w | c = c). in an scm m = (v, u, f, p(u)), a background variable u ∈ u is a dedicated error term if it is independent of the other background variables and has exactly one child.with this definition, the background variables can be divided into two categories, u and u, where u is the set of dedicated error terms and u is the set of other background variables which we call global background variables henceforth, and u = u ∪ u. further, let u c and u v\c represent the dedicated error terms of variables in c and in v \ c, respectively. cases where g w would be a strictly decreasing function can be accommodated to definition 6 by redefining u new w = -u w and g new w (u) = g w (-u).lemma 1 tells that the value of the dedicated error term u w =w is determined by the value w and the values of other parents of w via the function g -1 w (w; pa • (w)). the algorithm has three steps that correspond to the three steps of the evaluation of counterfactuals in algorithm 3 an algorithm for simulating n observations from a umonotonic smc m = (v, u, f, p(u)) with respect to all continuous variables in c 1 , .calculate weights w i j = w i j n j=1 w j j where w i j = g j (z i j-1 , z i j ) for i = 1:n.draw a 1:n j ∼ categorical(w 1:n j ) and set z i j = (z a i j j-1 , z i j ) for i = 1:n 6: output z 1:n j puts (approximate and dependent) samples from the probability distribution π j (b) = γ j (b)/γ j (z j ), where b ⊆ z j and the unnormalised 'smoothing distribution' γ j is defined as follows:. the potential functions for algorithm 1 are ξ v j (u j )/g ′ v j (u j ; pa • (v j )) which according to lemma 1 give the conditional density of. the first panel shows the scm parameters: the number of observed variables |v|, the number of conditions, the expected number of neighbors for the observed variables, and the expected number of global background variables per observed variable.where u = {u z , u x , u y } are unobserved background variables and z, x and y are observed variables. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/936.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/936.txt new file mode 100644 index 0000000000000000000000000000000000000000..3bab6a1dc62112924f30450bbbc6ebe6704e0789 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/936.txt @@ -0,0 +1 @@ +understanding the relationship between the composition of a research team and the potential impact of their research papers is crucial as it can lead to the development of science policies and best practices to drive innovation forward. a commonly examined characteristic is the diversity of the team across a number of dimensions, such as ethnicity (alshebli et al., 2018), gender (nielsen et al., 2017), disciplinary backgrounds (uzzi et al., 2013), team size (wu et al., 2019), and others. less attention had been paid to the expertise diversity of the researchers. however, over the past few years, there has been a growing emphasis among funding agencies, scientific journals, and government institutions on the importance of interdisciplinary approaches and collaboration between scientific fields. while the present landscape may lead researchers to become overly specialized in narrow fields of study, the scientific community aspires to unite in their efforts to address extensive societal challenges, such as climate change, poverty, disease, inequality, and the imperative for sustainable development. by their very nature, these challenges demand complex and multifaced solutions that necessitate the integration of diverse expertise. the cross-pollination of ideas from different expertise may also break down traditional silos between disciplines and uncover unexpected insights that can drive new discoveries. in this paper, we present a scientometric analysis in which we assess whether a diverse pool of expertise within a research team can influence their scientific impact, measured as the number of citations received by the resulting research papers in the upcoming 5 years. the analysis was performed on 114,203 computer science papers from the academia/industry dynamics (aida) knowledge graph 1 , published within the 2010-2015 timeframe. to assess the diversity of a team, we characterise a researcher's expertise as the distribution of research topics of their paper in the previous 5 years. to this purpose, we leverage the computer science ontology, which consists of 14k topics and provides a more fine-grained representation compared to the generic disciplines provided by typical scholarly datasets such as scopus and web of science. we then computed the pairwise cosine similarity between each couple of authors in a paper and defined two metrics as proxy for diversity of expertise: 1) the maximum value of cosine distance between the authors, and 2) the number of connected components obtained when linking authors according to a similarity threshold. the results show that both diversity metrics are significantly associated with the number of citations at five years. in other words, research papers authored by a research team with a wide set of skills and expertise tend to have a higher impact than the ones authored by more homogeneous teams. the remainder of the paper is organised as follows. section 2 provides an overview of the state of the art, while section 3 outlines the materials and methodologies employed in the study. the findings are presented in section 4. section 5 concludes the paper by summarizing the main insights and outlining future directions.understanding the relationship between the composition of a research team and the potential impact of their research papers is crucial as it can lead to the development of science policies and best practices to drive innovation forward. however, over the past few years, there has been a growing emphasis among funding agencies, scientific journals, and government institutions on the importance of interdisciplinary approaches and collaboration between scientific fields. by their very nature, these challenges demand complex and multifaced solutions that necessitate the integration of diverse expertise. in this paper, we present a scientometric analysis in which we assess whether a diverse pool of expertise within a research team can influence their scientific impact, measured as the number of citations received by the resulting research papers in the upcoming 5 years. to assess the diversity of a team, we characterise a researcher's expertise as the distribution of research topics of their paper in the previous 5 years.to analyse whether expertise diversity is related to the number of citations, we selected 114,203 research publications fulfilling four constraints: i) they were published between 2010 and 2015, ii) they reached at least 2 citations in the following five years, iii) they were authored by at least two authors, and iv) each author had at least one publication in the five years prior the paper under analysis. the first is the maximum value of the cosine distances computed on each couple of authors in the research team. specifically, for a given research paper, we computed the cosine distance between each couple of authors based on their top-10 topics, generating a distribution of (n × (n -1))/2 values. in order to produce a more granular metric that would reflect the different components in the team (two in the previous example), we clustered authors according to their expertise and counted the resulting number of subgroups. finally, based on the number of extracted components, we characterised the paper's diversity of expertise as: i) low, with 1 or 2 components, ii) moderate, with 3 or 4 components, iii) high, with 5 or 6 components, or iv) very high, from 7 components upward.in order to assess if the expertise diversity of the authors of a paper is significantly associated with the number of citations in the following five years, we studied the distribution of the two previously described metrics across the 10 buckets.in this study, we investigated whether the diversity of expertise in a research team can influence their scientific impact, measured as the number of citations received by the resulting papers in the upcoming 5 years. to this purpose, we represented the researcher's expertise according to a set of topics drawn from a fine-grained taxonomy of 14k research topics in the field of computer science. we then assessed the expertise diversity of a team by means of two metrics: i) the maximum cosine distance between the authors, and ii) the number of connected components obtained when linking authors according to a similarity threshold. our experiments on a set of 114k papers show that both diversity metrics are significantly associated with the number of citations at five years. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/937.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/937.txt new file mode 100644 index 0000000000000000000000000000000000000000..b3d8facc2e0bd285c0119d41e06340510e5eb874 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/937.txt @@ -0,0 +1 @@ +when planning ai implementations, aec practitioners often start by identifying the business needs that ai should address. these business needs, such as reducing maintenance costs for highways or speeding up projects by 20%, are the necessities that the project needs to address and thus drive the ai implementation and ultimately guide what the ai should aim to solve.however, ai cannot directly solve these business needs. the ai problems need to be framed in an entirely different way as it concerns itself with learning a mathematical function from data. bridging this gap between high-level business objectives and detailed properties of the implementation is often the key challenge to creating a successful ai project .in our study, we found that aec practitioners often use a traditional method of planning ai implementations figure 1 comparison of traditional and leanai method to plan ai implementations that does not clearly link high-level business objectives with detailed implementation properties (as shown on the left in figure 1). practitioners fail to distinguish between what ai should solve, what it can solve, and what it will solve, treating these categories as if they are interchangeable. while aec practitioners may have a clear understanding of the pressing business needs and may wish that ai should solve certain problems, it is not always possible for ai to address these needs directly. instead, ai can only solve a small part of the problem when properly formulated, with a direct or indirect impact on the business need. additionally, just because ai has the capability to solve a problem does not guarantee that it will be able to do so for the practitioner's use case, as it depends on the data and labels available with them.in fact, over 70% of the projects we observed either did not define a clear problem statement for ai or were unclear about it (e.g., only defining broad business objectives). a similar phenomenon is noted in , which states that many ai projects fail because they do not specify the exact problem that ai can solve and expect it to do everything. notes that not defining the problem or success metric for an ai project is a sure way to waste time and money.the leanai method (as shown on the right in figure 1) clearly delineates what ai should solve, what it can solve, and what it will solve. this approach thus forces practitioners to clearly articulate these components clearly in the planning process itself by involving the relevant stakeholders (e.g., it team who know about data and labels). our data suggests that this approach, at least in part, ensures more robust planning of ai implementations.the following paragraphs will describe the individual elements of the leanai method and provide steps for practitioners on how to use the method. we will also illustrate the use of the method through an example case study from our dataset. the leanai method delineates what ai should solve, what it can solve, and what it will solve, forcing practitioners to clearly articulate and align these components early in the planning process itself by involving the relevant stakeholders.the leanai method is built using an ethnographicaction research methodology, where the researchers helped aec practitioners implement ai projects while simultaneously observing the challenges in ai implementation and formulating solutions.using above observations and hands-on experience of building ai projects with aec professionals, the authors identified key components crucial to addressing the disconnect between ai planning and implementation. these included clearly defining the business needs the ai solution will address, outlining a specific problem statement for the ai to solve, identifying the required data and ai methods, and establishing metrics to measure the ai's performance.when planning ai implementations, aec practitioners often start by identifying the business needs that ai should address. these business needs, such as reducing maintenance costs for highways or speeding up projects by 20%, are the necessities that the project needs to address and thus drive the ai implementation and ultimately guide what the ai should aim to solve.in our study, we found that aec practitioners often use a traditional method of planning ai implementations figure1comparison of traditional and leanai method to plan ai implementations that does not clearly link high-level business objectives with detailed implementation properties (as shown on the left in figure1). while aec practitioners may have a clear understanding of the pressing business needs and may wish that ai should solve certain problems, it is not always possible for ai to address these needs directly.available data, labels, and algorithms (capability that ai will require): many contemporary ai and ml algorithms rely heavily on a large amount of data and labels for optimal performance.step-2: formulate multiple problem statements to address the business need: once the specific and precise business need is identified, practitioners should brainstorm multiple ways in which ai can potentially address it.step-3: evaluating if ai will be able to solve the formulated problem statements: in the next step, practitioners must determine if they possess the necessary data, labels, and expertise to construct an ai algorithm that can solve the formulated problem.step-4: defining the metrics to track how well ai did: next, practitioners need to define both the ai metric and the business metric and create a link between them. practitioners need to continuously iterate and improve depending on how well the ai algorithm is performing on the ai and business metrics.in summary, using the leanai method in this case study, practitioners were able to clearly identify a use case for ai that had significant business impact and could be addressed with the available resources within the company, something that was difficult to achieve with the traditional method of planning ai implementations. however, for a wide scale adoption of ai, it is crucial that practitioners have a clear understanding of what ai should solve, what it can solve, and what it will solve. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/938.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/938.txt new file mode 100644 index 0000000000000000000000000000000000000000..8950914dc11200e7b2faa1b434a4fd8bf25507c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/938.txt @@ -0,0 +1 @@ +corresponding with the advances in artificial intelligence (ai) technology and the wider adoption of ai technologies by practitioners, automated decision systems (ads) have begun to play an increasingly substantial role in assisting or making important decisions affecting human lives. such decisions assisted by ads include criminal recidivism risk assessment , welfare fraud risk scoring , biometric recognition in law enforcement , employment decisions , and visa application decisions . such uses of ads are not free from issues such as bias and discrimination, and the referenced f. gursoy and i. a. kakadiaris, "equal confusion fairness: measuring group-based disparities in automated decision systems," 2022 ieee international conference on data mining workshops (icdmw), orlando, fl, usa, 2022, pp. 137-146. https://doi.org/10.1109/icdmw58026.2022.00027 works include discussions on why those ai-based systems may be problematic.the problematic applications of ai do not necessarily imply that all uses of ads should be avoided. on the contrary, if their accountability is ensured, such systems may improve efficiency and effectiveness in many decision-making tasks. for instance, a systematic review of more than 50 papers found that majority of ai-enabled decision support systems improve patient safety outcomes in healthcare settings . however, the same study notes the lack of standardized benchmarks and homogeneous ai reporting. to this end, frameworks such as the system accountability benchmark aim to improve the standardization of ai accountability assessment and reporting within an exhaustive scheme. there are also legal and regulatory efforts to ensure accountability of ads, mainly in the us , the eu , and the uk .fairness is concerned with unjust outcomes for individuals or groups. individual fairness postulates that similar persons should receive similar outcomes . group fairness, on the other hand, is concerned with eliminating unjust outcomes based on sensitive group membership . group fairness has been receiving increasing attention from researchers, practitioners, and legislators as many ai systems may exhibit bias based on race , gender , age , disability status , political orientation , and religion . this paper concentrates on group fairness.there are multiple approaches and numerous notions and metrics for group fairness. these do not agree on a single fairness definition. this is so because fairness does not have a value-free definition, and different fairness approaches may adhere to different value principles. consequently, the plethora of fairness metrics in the literature makes it challenging for practitioners to choose among many incompatible alternatives. it may also enable a "cherry-picking" behavior. this work aims to unify major fairness approaches and notions in a general but unique fairness assessment methodology and operationalize it to facilitate practical and effective use in the real world. the proposed methodology may also be employed to evaluate the group fairness elements included in larger accountability frameworks.the main contributions of this paper can be enumerated as follows.1) equal confusion fairness, a new group fairness notion, is introduced. 2) the proposed notion is operationalized by designing appropriate testing and measurement processes. a) an equal confusion test is designed to identify whether an ads exhibits unfair behavior. b) a confusion parity error is proposed to quantify the extent of unfairness exhibited by the system. c) an appropriate methodology for the post hoc analysis is presented to identify the impacted groups and characterize the specific unfair behavior. 3) a software program to assist with the analysis of equal confusion fairness is provided as an open-source tool. 1the rest of the work is structured as follows. section ii provides a comparative overview of the related work on group fairness. section iii presents the methods for the equal confusion test, confusion parity error, and the post hoc analysis. section iv demonstrates the applicability and usefulness of the proposed methods using a real-world dataset from an actual recidivism risk assessment tool that is employed in the us criminal justice system to assist judges in their decisionmaking. final remarks and directions for future research are provided in section v. among the 1,107 who are predicted as risky, only 346 recidivate hence a precision next, fairness studies are performed, and findings are reported for sex, race, and intersectional groups.• among predicted risky females, only 16% are actually recidivists compared to the same figure of 34% for males. • among predicted non-risky females, 93% are actual nonrecidivists, while the same figure goes down to 89% for males. • among actual recidivist females, only 35% are correctly predicted as risky compared to the same figure of 55% for males. • among actual non-recidivist females, 82% are correctly predicted as non-risky, while the same figure goes down to 76% for males. the first two findings reveal a disadvantageous position for females, whereas the last two findings indicate an advantageous position, compared to males. • among actual recidivist caucasians and hispanics, only 37% and 29%, respectively, are correctly predicted as risky compared to the same figure of 62% for african-americans. • among actual non-recidivist african-americans, 69% are incorrectly predicted as risky compared to 85% and 81% for caucasians and hispanics, respectively.• among predicted risky african-american males, 37% are actually recidivists, while the same figure drops to 26% for caucasian males, 15% for hispanic males, and 16% for caucasian females. precision is highest for african-american males and lowest for hispanic males and caucasian females. hence, the last two groups are more likely to be incorrectly predicted as risky than african-american males. • among predicted non-risky caucasian females, 95% are actual non-recidivists, while the same figure goes down to 90% for caucasian males and 86% for african-american males. • among actual recidivist caucasian females, caucasian males, and hispanic males, only 35%, 37%, and 33%, respectively, are correctly predicted as risky, while the same figure is 65% for african-american males. • among actual non-recidivist caucasian females and males, 87% and 84%, respectively, are correctly predicted as non-risky, while the same figure drops to 67% for african-american males.the first two findings indicate an advantageous position for african-american males and disadvantageous positions for caucasian females. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/939.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/939.txt new file mode 100644 index 0000000000000000000000000000000000000000..5cf575467d6ff289d54acb31c396f2412a781b45 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/939.txt @@ -0,0 +1 @@ +the rapid growth of digital technologies and the increasing reliance on data-driven decision-making have brought the issue of data privacy and protection to the forefront of global discussions. the african continent, with its diverse population and burgeoning digital economy, is no exception. recognizing the need for a comprehensive and unified approach to data privacy, the african union has developed the data privacy convention .the african union convention on cyber security and personal data protection (audpc), or the malabo convention, is a legal framework designed to harmonize data protection and privacy laws across africa . it was adopted in june 2014 during the 23rd ordinary session of the assembly of the african union in malabo, equatorial guinea . the convention addresses various aspects of data protection, including establishing data protection authorities, principles for processing personal data, and provisions for cross-border data transfers .the convention is designed to balance the rights of individuals to privacy and data protection with the legitimate interests of businesses, governments, and other organizations in processing personal data. it reflects the african union's commitment to creating a digital single market across the continent while safeguarding the data protection rights of its citizens .the primary objective of this paper is to examine the critical challenges faced by the african union and its member states in implementing the data privacy convention. it will explore various dimensions of these challenges, including legal and regulatory issues, technical and infrastructural constraints, capacity building and awareness, and harmonization and cross-border cooperation . furthermore, the paper aims to discuss the future directions required to address these challenges and ensure the successful implementation of the convention. this will involve identifying strategies to strengthen legal and regulatory frameworks, enhance technical and infrastructural capacities, foster capacity-building and awareness initiatives, and promote harmonization and cross-border cooperation.moreover, the paper aims to contribute to ongoing efforts to develop a more robust and unified approach to data protection across the african continent by comprehensively analyzing the challenges and future directions related to the audpc.the rest of the paper is organized as follows: section 2 provides a literature review, covering existing research and studies on data privacy and protection in the context of the african union's data privacy convention. section 3 discusses and compares the challenges in implementing the convention, focusing on legal and regulatory challenges, technical and infrastructural challenges, capacity building and awareness challenges, and harmonization and crossborder cooperation challenges. section 4 delves into future directions, examining strategies to overcome these challenges and ensure the convention's successful implementation. finally, section 5 presents the conclusion, summarizing the key findings and providing recommendations.the african union convention on cyber security and personal data protection (audpc), or the malabo convention, is a legal framework designed to harmonize data protection and privacy laws across africa. the convention addresses various aspects of data protection, including establishing data protection authorities, principles for processing personal data, and provisions for cross-border data transfers.the rest of the paper is organized as follows: section 2 provides a literature review, covering existing research and studies on data privacy and protection in the context of the african union's data privacy convention. the study analyzes kenya's 2018 data protection bill and the general data protection regulation (gdpr), highlighting loopholes that enable continued digital colonialism, such as historical privacy violations, limited sanctions, data concentration, lack of competition enforcement, and uninformed consent.african countries can overcome the legal and regulatory challenges they face in implementing the data privacy convention by addressing the diverse legal systems and traditions, the lack of comprehensive data protection laws, and the delicate balance between national security and data privacy.to address the data privacy implications of emerging technologies, countries must ensure that their data protection laws and regulations are flexible enough to accommodate technological advancements while still providing robust privacy protections. this section will explore the impact of limited resources for data protection authorities, the lack of public awareness about data privacy rights, and the need for capacity building in data privacy and protection.data protection authorities (dpas) are critical in enforcing data privacy laws and ensuring compliance with the data privacy convention.tackling capacity-building and awareness challenges such as resource scarcity for data protection authorities, inadequate public knowledge of data privacy rights, and the necessity for capacity development in data privacy and protection, will enable african nations to establish a conducive atmosphere for the effective execution of the data privacy convention, ultimately empowering citizens to assert their privacy rights.in addition, african countries should actively participate in international forums and initiatives related to data privacy, such as the global privacy assembly and the international conference of data protection and privacy commissioners.tackling the harmonization and cross-border cooperation obstacles, such as aligning data protection frameworks, creating mechanisms for cross-border data transfers, and partnering with international entities and global organizations, african nations can build a unified and efficient data privacy ecosystem that serves both their citizens and the wider global community. the magnitude of the "limited resources for dpas" challenge is justified by the crucial role that data protection authorities (dpas) play in enforcing data privacy laws and ensuring compliance with the data privacy convention.the key findings of this paper include the identification of diverse legal systems and traditions, the lack of comprehensive data protection laws, and the need to balance national security and data privacy as significant legal and regulatory challenges. capacity building and awareness challenges include limited resources for data protection authorities, lack of public awareness about data privacy rights, and the need for capacity building in data privacy and protection. • promote harmonization and cross-border cooperation by engaging in dialogue and collaboration to align data protection frameworks, establish mechanisms for cross-border data transfers, and participate in international forums and initiatives related to data privacy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/94.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/94.txt new file mode 100644 index 0000000000000000000000000000000000000000..535f747367568ae45b567d355e14656058a669eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/94.txt @@ -0,0 +1 @@ +to make sense of the complexities of reality, and make optimal decisions accordingly, organisations and researchers have always striven to come up with models that can accurately represent observed phenomena (e.g. consumption behaviour, loan defaults). in the past, these models were defined by the analyst and calibrated to (small) data. recently, however, during the so-called machine learning revolution, the focus shifted to a more data-driven, algorithmic approach. machine learning algorithms now search for the optimal model by finding support for it in the data instead of being chosen by the analyst. this approach has increased the collection of, investment in, demand for, reliance on, and value of data for organisations and research significantly (mckinsey & company, 2021). it has also brought the tension between utility of data and privacy of its subjects to the forefront of public discussion (european commission, 2021). recently developed generative modelling methods, which generate data with a distribution similar to the original but without containing any of the real data, have been proposed as a potential solution (gartner research, 2021;castellanos, 2021). decision-making is, however, almost always a causal question and little is known about the replication capabilities of these methods beyond correlations. for this reason, this paper seeks to fill the gap by performing an investigation of the causal replication capabilities of data replication methods as well as defining a path forward to making them a viable option for decision-making.there are a lot of advantages to the algorithmic approach to modelling, the most important being increased performance and the opportunity for analysts to be systematic and transparent about the process by which the model was selected (athey, 2018). the power of this approach has been apparent in several fields that have had incredible advances in replicating reality due to the availability of large amounts of data. one of the most famous examples is imagenet, a database with millions of hand-labelled pictures, enabling revolutionary progress in image recognition (krizhevsky et al., 2012). gpt-3, a multi-purpose natural language model, similarly achieved impressive results after learning from a data set containing 45 tb of plain text (brown et al., 2020). besides these topics focused around machine learning, examples can also be found in other fields such as physics and astronomy which have collected ever-growing volumes of data to learn from. projects like the large hadron collider (evans, 2009) and the imaging of the black hole at the centre of galaxy m87 (castelvecchi, 2019) handle data in the order of petabytes. however, such large amounts of data are not always readily available. in many fields centring around individuals, such as the social and health sciences (e.g. finance, insurance, medical fields), the collecting or sharing of such datasets is far from trivial due to ethical and privacy concerns (koenecke and varian, 2020). one recently emerging option to alleviating such concerns is generative modelling.generative models are models that try to learn a representation of the (high-dimensional) distribution of a dataset. once this representation is learned, it can then be used to generate new samples that maintain the original dataset's distribution of features but that did not appear in the original dataset 1 . generative methods are thus capable of simulating nonexistent but realistic-looking data, also referred to as synthetic data, that can be shared more freely. a well-known use-case are pictures of human faces for computer vision applications. even in the possession of a large dataset of pictures of human faces, sharing this freely could present issues concerning privacy. however, generative models are capable of constructing fake but human-looking faces that can, due to their non-existence, be shared more freely to further the quality of applications.while generative modelling has been around for decades, a major breakthrough in the ability to efficiently training such models was achieved in 2014 with generative adversarial networks (gans) (goodfellow et al., 2014). this method increased our capacity to fit highdimensional distributions of data, like images and video data. the gan framework has found widespread applications throughout computer vision, like image generation (karras et al., 2017;radford et al., 2015), text to image translation (zhang et al., 2016), the blending of images (wu et al., 2017), enhancing quality of pictures (ledig et al., 2016a), filling in blanks in pictures (pathak et al., 2016), and a more infamous example of deepfakes (mirsky and lee, 2022). while these are noteworthy variations and applications of the gan framework, the common factor here is the focus on computer vision. in contrast, gans have found limited adoption within the human sciences, like economics.the main reason for this is that in these fields, most questions are inherently about identification of causal effects. neural networks, which are at the centre of the gan framework, in contrast, still focus mostly on high-dimensional correlations. an example of this is shown in the paper by beery et al. (2018), where they analyse a neural network trained to classify images. the neural network appears to be able to accurately identify whether or not there is a cow in a picture, until you ask the network to classify a picture of a cow in an uncommon environment. the model is, for instance, not able to recognise a cow on a beach, because of the spurious correlation between cows and grasslands. learning to label images with grass in it are shortcuts that expose the lack of generalisation of the neural network. recently, a field has emerged called causal machine learning where researchers try to make steps towards making machine learning models more causal (scholkopf et al., 2021). while this field is promising, due to the inverse problem nature of finding causality in observational data, it is currently still in its infancy in regards to applicability. as we will show below.the most prevalent used loss-functions for gans are some form of binary cross-entropy (goodfellow et al., 2014;yoon et al., 2019;radford et al., 2015;wiese et al., 2020) or wasserstein distance (arjovsky et al., 2017;xu et al., 2019;athey et al., 2019). these losses indicate in some form or another the difference between two joint probability functions. replicating the joint probability distribution, however, does not guarantee replication of the underlying causal process. finding the causal structure from observational data is an inverse problem, finding the cause from the effect. consider this example in the black swan from taleb (taleb, 2010):"operation 1 (the melting ice cube): imagine an ice cube and consider how it may melt over the next two hours while you play a few rounds of poker with your friends. try to envision the shape of the resulting puddle.operation 2 (where did the water come from?): consider a puddle of water on the floor. now try to reconstruct in your mind's eye the shape of the ice cube it may once have been. note that the puddle may not have necessarily originated from an ice cube." operation 1 is an example of the forward way of thinking, where the effect (the water) is to be predicted from the cause (ice cube). with the right models it is possible to accurately come up with the resulting pool of water. in contrast, operation 2 asks the inverse, finding the shape of the cube (cause) from the pool of water (effect). there are however an almost infinite amount of possible ice cubes that could have led to that pool of water. this example also translates to joint probability distributions and underlying causal models. for a given joint distribution there are a multitude of possible underlying causal models.in this paper, we survey the literature on generative adversarial networks, being the dominant model among generative models for synthetic data, and evaluate their capacity to preserve certain causal structures (i.e. cross-sectional, time series, and full structural) in the synthetic datasets they generate. we do so by first generating a dataset where the data-generating function, and thus the structural causal model, is know. secondly, we make a synthetic copy of this with a specific gan method and perform different causal analyses with an increasingly lenient set of assumptions, from cross-sectional to time-series to structural. lastly, we check if the results in the real data align with those in the synthetic data to evaluate the causality preserving capabilities.we find that for relationships in data where the assumptions hold such that correlation equals causation, inference on the real and synthetic data yield the same results only in the case where the actual causal structure aligns with the most simple model that can replicate the correlations in the data. in more complex cases, for instance when a variable has time-dependence and both influences cross-sectional features as well as itself, we find that the generative model converges on a model with the same general distribution, but that it does so with a simpler underlying causal structure. our results point at the reason being the often-used regularisation in machine learning that builds in a preference for smaller models (as posited in occam's razor) which is not necessarily a valid principle in causality. finally, when the whole causal structure is considered, it becomes apparent that currently the applicability is still limited due to the stringent assumptions that need to be met in order to overcome the challenges of the inverse problem.the remainder of this paper is structured as follows. in chapter 2, we lay-out the problem setup and discuss the structural approach we take to evaluate the causal replication capacity of gan-based models. in chapter 3, we give a general introduction to the inner workings of gan-models and detail three different gan variations that we take as representative for the different streams in the gan literature that aim to capture increasingly complex correlations (i.e. cross-sectional correlations, time-series correlations, full causal structure). in chapter 4, we present the results of our evaluation. in chapter 5, we discuss some of the additional real-world challenges that we abstracted away from but that need to be considered where these methods to be used in real-world cases. lastly, in chapter 6, we summarise and conclude our findings.in this paper, we survey the literature on generative adversarial networks, being the dominant model among generative models for synthetic data, and evaluate their capacity to preserve certain causal structures (i.we find that for relationships in data where the assumptions hold such that correlation equals causation, inference on the real and synthetic data yield the same results only in the case where the actual causal structure aligns with the most simple model that can replicate the correlations in the data.the goal of the evaluation in this paper is to see if current data replication methods are useful when causal analyses are to be performed on the resulting synthetic data. note that, in the remainder of this paper, artificial data we generate from the known model are referred to as generated data while data sampled from the gan models are referred to as synthetic data. next, a causal inference method, with its accompanying assumptions, is selected to apply to both the generated data and the synthetic data. if the gan method therefore generates a synthetic dataset where the same causal inference method does not estimate the same parameters as in the generated data, this difference can be entirely attributed to the gan method.a generative adversarial network consists of two competing neural networks: a generator g, which generates fake data, and a discriminator d, that is trained to discern which data is fake (made by the generator) and which data is real. by building in the causal graph into the generator, it will constrain the data generation data to the actual causal model and not only reproduce joint probabilities but also the causal relationships. while on average the causal relationships detected in the synthetic data are less accurate than the causal relationships in the generated data, the results are not significantly different from the true parameters. lastly, lingam is applied to the synthetic data and its output is compared to the causal structure retrieved from the generated data. additionally, we show the causal relationships detected in synthetic data from a basic gan trained on the real data.how do we balance the benefits of increased accuracy and understanding with the privacy and ethics concerns that both come with having more data? one solution that has gained a lot of traction is synthetic data, which are data sampled from generative methods that are meant to replicate high-dimensional distributions of data. we find that in the case where the assumptions are met that make correlation equal causation, causal inference on the real and synthetic data yield the same results only if the simplest model that can generate the distribution of the features equals the real one. causal discovery tries to find the complete causal structure in observational data, which can then be used as input for a generative model that can generate synthetic data explicitly according to the causal structure.a path forward seems to be to augment the observational data fed to the gan models with additional information such as knowledge on different environment in which the data was collected or interventional data from experiments(scholkopf et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/940.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/940.txt new file mode 100644 index 0000000000000000000000000000000000000000..21bb101391245220cbd840f9a9dc9bd394751239 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/940.txt @@ -0,0 +1 @@ +major depressive disorder (mdd) is a prevalent and debilitating disorder and is associated with tremendous personal and societal costs (kessler & wang, 2009). it is also one of the most common mental disorders among college students (click or tap here to enter text. auerbach et al., 2016auerbach et al., , 2018;;farabaugh et al., 2012). research has indicated that depression in college students is associated with lower academic performance (hysenbegasi et al. 2005), increased levels of anxiety (rawson et al. 1994), alcohol and drug dependency, poorer quality of life and self-harming behaviors (serras et al. 2010). thus, given the high prevalence and burden of mdd, more research is needed to evaluate such risk factors for the development of depressive symptoms. hypothesis-driven studies typically predict only limited variance in depression. to better identify depression, there is a need to improve our models to predict who is at the greatest risk, and when that risk is most likely to increase.flexibilitythe ability to adapt thoughts and behaviors to meet changes in contextual demandshas been proposed as a group of broadly-related constructs that may serve as protective factors against depression (kashdan et al. 2010;stange et al., 2016stange et al., , 2017)). several approaches to assessing flexibility, or the lack thereof (inflexibility), have been evaluated as predictors of depressed mood, including coping flexibility, explanatory flexibility, perseverative thinking such as rumination and worry, and cognitive flexibility. other known risk factors for depression include exposure to negative life events (hammen, 2005), having a negative cognitive style of responding to negative events (alloy et al., 2017), and engaging in maladaptive emotion regulation strategies (aldao et al., 2010;dryman & heimberg, 2018). however, research typically has examined one or two of these constructs in isolation; little work has integrated these risk factors or investigated them comparatively as predictors of depression longitudinally (stange et al., 2017). the goal of this study is to highlight machine learning techniques that can facilitate the identification of risk and protective factors, and their interactions, which may reduce vulnerability to developing symptoms of depression in young adults. although the group of risk factors we consider is not exhaustive, we present this analysis as an example of how machine learning techniques can be used to integrate diverse measures in the evaluation of risk, in ways that may have clinical utility., 2016stange et al. for example, negative life events are well established as one of the strongest predictors of depression, and a variety of vulnerability factors have been identified that moderate the link between life events and depression(alloy et al.the current paper aims to introduce both decision tree and random forest methods that allow for the analysis of multilevel and longitudinal datasets: random effects regression trees (random effects/expectation maximization (re-em) trees,sela & simonoff, 2012)and mixed effects random forest(merf, hajjem et al 2014).in this work, we empirically assessed the performance of a linear mixed effect model, re-em tree, and a mixed effects random forest (merf) method on repeated measurements of depression severity. merf algorithm follows a similar approach to the re-em tree, except instead of a decision tree, a random forest is constructed in the second step to achieve improved prediction accuracy and address instability that often plagues a single tree. the model was fitted to predict depression severity measures, in which fixed effects were estimated for age and all measures related to flexibility, cognitive style, emotion regulation, and exposure to negative life events. we found that depression severity was increased significantly when an individual had elevated measures of negative life events, brooding, negative cognitive responses, and interaction between brooding and negative life events. depression symptoms were best split by brooding, with subsequent groupings based on negative life events and negative cognitive responses.the improvements in predictive accuracy for merf and re-em tree over lmm are 3% and 1% respectively in line with earlier studies comparing mixed-effects decision tree algorithms and traditional mixed-effects algorithms(sela & simonoff, 2012, hajjem et al., 2017, fokkema et al. 2018). re-em tree identified brooding as a root nodethat is, the most important initial factor for classificationand negative life events, perceived control, negative cognitive styles, and pondering as internal nodes. for example, using re-em trees to predict future depression severity showed that individuals at the highest risk for more severe depression symptoms were those with high brooding (scores > 14) who also had high levels of negative cognitive style (scores > 4), particularly those with less perceived control (scores < 25). as the number of repeated observations used for training increases, the re-em tree or merf model that accounts for random effects performs better. the rules from the re-em tree showed that brooding, negative life events, and negative cognitive styles were the main predictors of depression for future time points in our sample. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/941.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/941.txt new file mode 100644 index 0000000000000000000000000000000000000000..1f3f7a89e9b886526c544abc75718a96eb908507 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/941.txt @@ -0,0 +1 @@ +neural architecture search (nas) is a widely used machine learning technology, which automates the design of neural network architecture to find the best model architecture for given tasks. though the recently proposed weight-sharing strategy helped the traditional nas methods avoid the burden of training massive neural network architectures from scratch and has significantly improved the computational efficiency of current nas algorithms , however, the weight-sharing strategy makes the parameters of each candidate subnet highly coupled. this makes subnet candidates hard to obtain actual * corresponding author. independent evaluations, leading to insufficient results.to solve these two problems, boyu chen et, al proposed the bnnas algorithm which used the weights of the batch normalization (bn) layer, named as bn-based indicator, to evaluate the importance of the subnets. during supernet pre-training of bnnas, only the weights of the bn layer are updated with gradients, while the rest of the random parameters are frozen. as the result, the fixed random parameters and unfixed indicators are successfully decoupled. though using the bn-based indicator as the subnet performance criterion is inspiring, however, this subnet selecting criterion is purely empirical and has no mathematical guarantees. in addition, a reasonable gradient descent method will affect the results of the architecture search . so it is obviously that random parameters initialization is unable to ensure that the gradients (signal) received by each candidate subnet in each layer are equivalent during the training procedure. as the network goes deeper, this issue becomes progressively severe, affecting the fair evaluation of all bn-based indicators. furthermore, we discovered that the magnitudes of the bn-based indicators in different layers are not comparable in the module selecting strategies of bnnas. it is not reasonable to rank the whole performance of the subnets by directly summing the bn scores of each module.in order to quantify the dynamics behavior of randomly initialized supernet during pretraining, we need to analyze the dynamics of random initialized neural networks. noticing, the mean field theory (mft) has been used to establish a theoretical understanding of neural networks with random parameters and quantitatively portrayed the "average" dynamics of signal propagation inside of themselves . mft and extensive experiments all showed that networks can be trained most efficiently and stably when their input-output jacobian of every layer achieves dynamical isometry(orthogonal), namely the property that the entire distribution of singular values of the jacobian matrix is close to 1 . the well-conditioned jacobian ensures stable propagation while avoiding the vanishing or exploding of the signal. in the nas algorithm, by initializing each module of the supernet to achieve dynamic isometry, the input signal of the network can be equally propagated to any place of the supernet, which is able to reflect the architecture information in neural networks as much as possible.in the present work, we continue the line of most oneshot and weight-sharing based nas techniques and proposed a fairer and more efficient approach for neural architecture search. specifically, we obtained the dynamical isometry module by triangular decomposing the randomly initialized weights of gaussian distribution. this network weights initialization strategy ensures that each candidate module is dynamical isometry while remaining frozen during supernet pretraining. the input signal can be equally propagated to each and every module in the search space horizontally and vertically. following these principles, the bn-based indicators are pitched into the bottom of each module in the subnets as the performance indicators of those modules with different structures. to deal with the abovementioned module selecting dilemma, we select the module with largest bn-based indicator as the target module of each layer. at last, we present rigorous proof of the feasibility of using the parameters of the bn layer as subnets evaluation criterion. extensively experiment showed, with the same size, the architecture searched by the proposed method can achieve state-of-the-art top-1 validation accuracy on ima-genet classification. in addition, we demonstrated the proposed method is able to achieve better and more stable training performance without loss of generality.the contributions of this work can be summarized as follows:1. we designed an initialization method for nas algorithms which guarantees the equivalent inputs for both shallow and deep modules and ensures the fairness of search evaluation.2. we give a mathematical proof that the value of the bn layer's parameters is able to reveal the signal propagation capability of the candidate modules, which for the first time shows the interpretability and rationality of the bn-based indicator theoretically.3. we point out a new module selection strategy which fixed the problem of selecting the module by the bnbased indicator's numerical scale across layers is uneven. though the recently proposed weight-sharing strategy helped the traditional nas methods avoid the burden of training massive neural network architectures from scratch and has significantly improved the computational efficiency of current nas algorithms, however, the weight-sharing strategy makes the parameters of each candidate subnet highly coupled. in the nas algorithm, by initializing each module of the supernet to achieve dynamic isometry, the input signal of the network can be equally propagated to any place of the supernet, which is able to reflect the architecture information in neural networks as much as possible. to deal with the abovementioned module selecting dilemma, we select the module with largest bn-based indicator as the target module of each layer. we give a mathematical proof that the value of the bn layer's parameters is able to reveal the signal propagation capability of the candidate modules, which for the first time shows the interpretability and rationality of the bn-based indicator theoretically.in order to evaluate the candidate modules of the neural network in the supernet, we refine the input-output jacobian matrices of the entire network to individual modules.we denote j l,m as the the input-output jacobian matrix of the m-th candidate module in the l-th layer in the network, where.to explore the dynamic isometry of candidate modules according to j l,m , we denote λ i as the i-th eigenvalue of j l,m j t l,m . however, it is difficult for candidate modules in subsequent layers to have the same input, each time they accept different outputs from different candidate modules in the upper layer. in this chapter, we explain the grounds that our search algorithm is absolutely fair under the premise that every candidate module in the search space is dynamical isometry and how does our initialization method guarantee that the outputs or inputs of candidate modules in the same layer are nearly equal.input: supernet n with search space a, the layer of n is l and n l denote every layer, the number of blocks in every layer is m . output: the selected architecture γ of subnet set a list γ for l in l do if n l is reduction layer then randomly select a n l,m that has not been selected recently from the m candidate modules of the current layer end if if n l is normal layer then randomly select a n l,m that has not been selected recently from the m candidate modules or u l end if γ ← γ. therefore, we add a bn-based layer indicator to every layer to measure the importance of the current layer in the entire network. we add a parallel identity connection to each layer in the network and a bn layer after the identity as the bn-based layer indicator.in order to prove the generalization ability of the method, we change the search space to be based on the shufflenetv2 block module, and set the control of whether to add the bnbased layer indicator.we first fill the parameters of f p with random parameters that satisfy a gaussian distribution f p ∼ n (0, v), then f p , r ij and f p , r ij are jointly gaussian random variables with mean zero and variances v 2 r ij and v 2 r ij respectively, due to their independence and linear combinations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/942.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/942.txt new file mode 100644 index 0000000000000000000000000000000000000000..ffd71a9e57c1af0f4eec4339ec02887d7cf7d42b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/942.txt @@ -0,0 +1 @@ +chatgpt, a large language model developed by openai, has gained significant attention for its potential in various domains, including the field of medicine . as a language model trained on vast amounts of text data, chatgpt has demonstrated the ability to generate coherent and contextually appropriate responses to a wide range of queries . in the medical domain, chatgpt has shown promise as a tool for medical education and exam preparation, particularly in assisting students in their residency exams .the potential in medical education and exams is particularly relevant and very challenging in the context of cardiology and vascular pathologies. these specialized areas require a deep understanding of complex medical concepts and the ability to accurately answer questions and provide relevant explanations. therefore, the dataset described below will be a very good challenge for both students and chatgpt. in the medical domain, chatgpt has shown promise as a tool for medical education and exam preparation, particularly in assisting students in their residency exams.the potential in medical education and exams is particularly relevant and very challenging in the context of cardiology and vascular pathologies.the performance of chatgpt and two medical students in cardiology and vascular pathologies was evaluated using a dataset of medical exams from the siamois-qcm platform., which provides multiple-choice questions (qcm) in french language to assist students in their residency exam preparation. this platform contains more than 50,000 medical, pharmacy, and dental students to prepare for their residency exams and competitions. the students have chosen both the material (cardiology and vascular pathologies) and the seven lessons to make the comparison to chatgpt more challenging.cl] 15 apr 2023 this article aims to analyze the performance of chatgpt in the material of "cardiology and vascular pathologies" by utilizing a dataset of questions from siamois-qcm platform and assessing its accuracy in answering the questions. we then compare the results of chatgpt with the performance of two well-ranked students of medicine who are currently studying in the same program. this study specifically focuses on seven lessons within the material of "cardiology and vascular pathologies".the findings of this study highlight the potential of chatgpt as a valuable tool in medical education within the material of "cardiology and vascular pathologies. table1shows that chatgpt outperformed the scores of the two well-ranked students by achieving 175 correct answers out of 190 questions, with a percentage of 92. figures4and5depict a correct and an incorrect answer, respectively, from chatgpt on the siamois-qcm platform.this paper demonstrates the high potential of chatgpt in the field of cardiology and vascular pathologies by providing answers to related questions from siamois-qcm platform. although chatgpt has outperformed the score of the 2 well-ranked students with a 6% advantage, it is necessary to provide further refinement and improvement in its performance for specific medical domains. further research and development are necessary to enhance chatgpt capabilities for assisting students in residency exam preparation and supporting medical education in this specialized field. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/943.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/943.txt new file mode 100644 index 0000000000000000000000000000000000000000..e4aa1d9f69e110a4b037fec61fda1404c1728212 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/943.txt @@ -0,0 +1 @@ +medical image computing has seen great progress with the development of deep image classifiers, which can be trained to perform diagnostic tasks to the level of skilled professionals . recently, it was shown that these models might rely on sensitive information when making their predictions and that they exhibit performance disparities across protected population subgroups . although many methods exist for mitigating bias in image classifiers, they often fail unexpectedly and may even be harmful in some situations . today, no bias mitigation methods consistently outperform the baseline approach of empirical risk minimisation (erm) , and none are suitable for real-world deployment. if we wish to deploy appropriate and fair automated systems, we must first understand the underlying mechanisms causing erm models to become biased.an often overlooked aspect of this problem is subgroup separability: the ease with which individuals can be identified as subgroup members. some medical images encode sensitive information that models may leverage to classify individuals into subgroups . however, this property is unlikely to hold for all modalities and protected characteristics. a more realistic premise is that subgroup separability varies across characteristics and modalities. we may expect groups with intrinsic physiological differences to be highly separable for deep image classifiers (e.g. biological sex from chest x-ray can be predicted with > 0.98 auc). in contrast, groups with more subtle differences (e.g. due to 'social constructs') may be harder for a model to classify. this is especially relevant in medical imaging, where attributes such as age, biological sex, self-reported race, socioeconomic status, and geographic location are often considered sensitive for various clinical, ethical, and societal reasons.we highlight how the separability of protected groups interacts in non-trivial ways with the training of deep neural networks. we show that the ability of models to detect which group an individual belongs to varies across modalities and groups in medical imaging and that this property has profound consequences for the performance and fairness of deep classifiers. to the best of our knowledge, ours is the first work which analyses group-fair image classification through the lens of subgroup separability. our contributions are threefold:-we demonstrate empirically that subgroup separability varies across realworld modalities and protected characteristics. -we show theoretically that such differences in subgroup separability affect model bias in learned classifiers and that group fairness metrics may be inappropriate for datasets with low subgroup separability. -we corroborate our analysis with extensive testing on real-world medical datasets, finding that performance degradation and subgroup disparities are functions of subgroup separability when data is biased. we show that the ability of models to detect which group an individual belongs to varies across modalities and groups in medical imaging and that this property has profound consequences for the performance and fairness of deep classifiers. -we show theoretically that such differences in subgroup separability affect model bias in learned classifiers and that group fairness metrics may be inappropriate for datasets with low subgroup separability. -we corroborate our analysis with extensive testing on real-world medical datasets, finding that performance degradation and subgroup disparities are functions of subgroup separability when data is biased. suppose we have access to a (biased) training dataset, where p tr is the conditional distribution between training images and training labels; we say that such a dataset is biased if p tr = p . we focus on group fairness, where each individual belongs to a subgroup a ∈ a and aim to learn a fair model that maximises performance for all groups when deployed on an unbiased test dataset drawn from p .at training time, supervised learning with empirical risk minimisation aims to obtain a model p, mapping images to predicted labels ŷ = argmax y∈y p(y|x) such that p(y|x) ≈ p tr (y|x), ∀(x, y). since this model approximates the biased training distribution, we may expect underdiagnosis from the training data to be reflected by the learned model when evaluated on the unbiased test set. when subgroup separability is high due to the presence of sensitive information, the model will learn a different mapping for each subgroup, shown in eq. (8) demonstrate that tpr of the underdiagnosed group is directly affected by bias from the training set while other groups are mainly unaffected.we have derived the effect of underdiagnosis bias on classifier performance for the two extreme cases of high and low subgroup separability. in section 4, we empirically investigate (i) how subgroup separability varies in the wild, (ii) how separability impacts performance for each group when underdiagnosis bias is added to the datasets, (iii) how models encode sensitive information in their representations. we showed, theoretically and empirically, that the performance and fairness of models trained on biased data depends on subgroup separability. when separability is high, models learn to exploit the sensitive information and the bias is reflected by stark subgroup differences. our analysis centred on bias in classifiers trained with the standard approach of empirical risk minimisation -future work may wish to investigate whether subgroup separability is a factor in the failure of bias mitigation methods and whether it remains relevant in further image analysis tasks (e. group 1 performance degrades faster for datasets with high subgroup separability as label noise increases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/944.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/944.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9b449a1737aaff7f01585c40573b00278ef7957 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/944.txt @@ -0,0 +1 @@ +the volume of ransomware attacks -i.e., malware-based cyber-attacks characterised by blocking access to a device or/and encrypting valuable data is constantly increasing, with some reports finding that infections in businesses worldwide are as high as 71% . the uk's national cyber security centre highlight this significance by defining ransomware as the most acute threat faced by organisations today . while there have been several articles and reports reflecting on ransomware, its nature, attack patterns, and mitigation strategies , there is much less research on the actual negative impacts that can result from these incidents. we characterise such negative impacts using the term harms; this is similar to approaches taken by existing research . understanding harms from cyber-attacks is vital for a plethora of reasons, especially given their relevance in preparing for the consequences of attacks in the future. as argued by current literature, irrespective of an organisation's threat-driven or impact-driven risk assessment, the limitation of an incomplete understanding of the potential harms and the relationship between those harms can lead to the selection and deployment of inappropriate risk treatments and controls .this paper contributes to the field by critically examining the multitude of harms that can arise from cyber-attacks, with a focus upon the present threat of ransomware. we also propose a new methodology by which such incidents and their harms can be comprehensively modelled. our research makes the point that researchers, businesses and policymakers must go beyond the current focus on financial harms (e.g., payment of ransoms, cost of recovery or cyber insurance claim amounts) to examine all types of real-world harm that can result (e.g., human, physical, social) and how these harms may influence or trigger each other. ransomware poses a unique case study considering its prominence and ability to cripple unprepared organisations (e.g., uk's nhs and wannacry ).while existing research on ransomware harms and impacts is limited, there are some key articles worthy of review. by empirically studying a dataset of 453 ransomware data investigation reports, meurs et al. reported on specific factors contributing to the ransom requested, the likelihood of ransom payment and their influence on the financial losses . they conducted a detailed statistical analysis to present several factors (such as the ransom paid, the revenue of the victims and the use of raas (ransomware-as-a-service) by an attacker) which were seen to be statistically significant determinants of the financial losses reported. wilner et al., on the other hand, commented on the wider international, political, intelligence and diplomatic ramifications of ransomware by analysing several ransomware cases . this is a pertinent example of research into the nonfinancial and international impacts of such attacks. while these studies generally align with our work, wilner et al. do not discuss the individual harms that might originate from various ransomware attacks and meurs et al.'s analysis was focused on factors that contribute to financial harm; rather than an a reflection on differing types of harm.on the broader concept of harms from cyber-attacks (i.e., not only ransomware), agrafiotis et al. introduced a taxonomy of harm consisting of five major harm types, namely physical/digital, economic, psychological, reputational and social and societal harms . this taxonomy was created using a mixed approach of deductive and inductive analysis and based on publicly-available organisational harm data, harm-related literature, and public databases. this enumeration and modelling of harm is one of the closest to our work and while it does not focus on ransomware nor a detailed modelling of harms from attack cases, it can inform our study. recent related research has also examined the nature of losses from cyber-related events across different risk categories and business sectors . they used a comprehensive database of cyber-loss data over 12 years from 2008-2020, affecting 49,496 organisations across 20 business sectors. that work highlighted the heavy-tailed nature of cyber risks by analysing both the frequency and severity of losses from cyber events. this financial emphasis is clearly relevant to the research and business community but, as mentioned in the previous paragraph, again it risks, not capturing the full range of negative impacts or intangible costs from cyber-attacks.studies, particularly axon et al., have sought to complement existing research by using cyber insurance claims data to build harm-propagation trees that can enhance the understanding of the harms and links between harms after cyberattacks . the graph output from their study is a valuable tool for defining the frequency of each harm's occurrence and also the strength of the relations between harms. our research is similar though we benefit from a wider pool of data than what is available from insurance claims. insurance forms also arguably prioritise harms with a financial component and therefore we expect our study to be more comprehensive in its definition and modelling of harms.to address the gap in existing literature related to the definition and understanding of harms from ransomware attacks, we conducted a data-driven, sociotechnical research study. specifically, we used publicly available data to analyse eight different ransomware incidents and enumerated the harms and harm relations (i.e., which harms lead to other harms) that emerged. these incidents were investigated through the construction of a series of ransomware harm models enumerating the relevant data. in addition to providing an improved appreciation of the long tail of harms after a ransomware incident, we posit that the modelling methodology proposed and these models themselves are significant for two reasons. first, they provide businesses with data that is necessary for effectively implementing risk controls within their organisations. that is, they encourage consideration of harms beyond initial server compromise or loss of data to wider harms that negatively affect the business and its stakeholders. secondly, the methodology and resulting models explicitly highlight the wide nature of harms to researchers studying cyber-attacks and policymakers responsible for protecting an increasingly digital society., not only ransomware), agrafiotis et al. this taxonomy was created using a mixed approach of deductive and inductive analysis and based on publicly-available organisational harm data, harm-related literature, and public databases.studies, particularly axon et al." this paragraph was recorded and noted as a single harm, namely "loss/unavailable clinical data", so as to capture the clinical nature of the data impacted, without creating new harms for each of the individual data types (e. a representative sample of harms and relations from texts were also validated by a group of four researchers to settle any differences and produce an agreed set of harm and harm relations.the primary aim of this research is to examine, and provide a methodology for highlighting, the multitude of harms that can arise from ransomware attacks, thereby providing an evidence base for an increased acknowledgement and understanding of these harms. to support this aim and to portray harms and their relationships visually, we constructed a series of harm models (one for each case) using the harm list and harm relations developed earlier (and based on the rules above).we depict the harm model as a non-weighted directed graph g = (v, e), where each node u ∈ v represents an observed harm from the ransomware incident and each directed edge (p, q) ∈ e indicates that a relationship exists between the two harm nodes p and q (i. as depicted in the figure, examples of harms that emerged in the aftermath of the ransomware attack included compromised council resources, unavailable council systems, loss/unavailable data in council system and disruption of internal council operations.economic harms are the other set of common harms that result from a ransomware attack.the analysis conducted also discovered a complex web of interconnected harms caused by ransomware attacks; this is aptly depicted in the harm models. reviewing the models broadly, harm relations often start with some digital/physical or economic harm and ultimately lead to harm to individuals, as depicted in the hse model unavailable clinical system leads to disrupted clinical services which in turn leads to angry/anguished patient, i. we engaged in a study of these harms by reviewing well-documented cases of ransomware attacks, creating models to understand the presence and relations between harms, and critically reflecting on these to extract a set of pertinent observations of importance to the wider community. this detailed identification of harms and also the harm relations provide valuable new information for organisations and policymakers seeking to implement measures that limit the harm caused by ransomware attacks. generally, this modelling process also provides methodological guidance for policymakers in identifying the type and trajectory of ransomware harms which can then be used to develop more formalised cyber harm models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/945.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/945.txt new file mode 100644 index 0000000000000000000000000000000000000000..8244d1adf2c4e5defcaf1349977881a32f5ca607 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/945.txt @@ -0,0 +1 @@ +recent studies have investigated the relationship between machine learning model performance-as measured by some evaluation metricand model design variables like dataset size, number of model parameters, and compute, or what is commonly known as scaling laws for ai . in general, these analyses demonstrate a monotonic relationship between design variables and model performancee.g., as the dataset size increases, models will perform better on some metric computed over an evaluation dataset. this phenomenon is often presented graphically where the x-axis indicates the design variable and the y-axis indicates model performance, with lines suggesting the superlinear relationship for deep learning models. this relationship has been used to justify the collection of everlarger datasets (e.g., the colossal clean crawled corpus ) used to train large language models .in the context of ai systems that are used by or directly impact people, an evaluation metric, the dependent variable underlying many scaling laws, reflects the performance or quality of the system for those people. a metric, then, should not be considered a decontextualized property of a model, but rather a reflection of the quality of a model used for a particular task, in a specific social context. on the one hand, the mathematical form of a metric ideally reflects what a system designer thinks is important to users or others impacted by the system. the mathematical formula for a metric encodes assumptions about user beliefs, values, and behavior. on the other hand, the evaluation data used to compute the metric represents-through a sampling procedure-the particular population of interest. in combination, the mathematical form of a metric and the associated data estimate the performance of a model on a specific population. the quantity computed by an evaluation metric, then, is shaped both by its underlying mathematical assumptions and sampling procedures.just as a predictive model and its training data can be scrutinized for biases, we can-and should-scrutinize metrics, especially in light of their role in scaling laws. we contend that, when designing for a sufficiently diverse population of users, measuring performance with a single, universal metric is precarious. specifically, we argue that as the size of the evaluation dataset increases, the number of subpopulations present in the evaluation set is likely to increase as well. by subpopulations, we mean members of particular identity groups, including demographic groups (e.g., gender, racial or ethnic groups, religion, caste, national identity, disability status), as well as other cultural or sociopolitical groups, among the many other ways that human societies have variously organized themselves. 1 the samples used to evaluate model performance will thus (intentionally or not) capture some set of communities in the social context in which the data was collected. as sample size grows, the likelihood of a larger number of communities included in the evaluation set also grows. while more inclusive, comprehensive datasets might be desirable, the diverse communities themselves are likely to hold different uses, behaviors, and values with respect to the model being evaluated.increasing evaluation dataset size increases the number of subpopulations present and multiplies the number of values that should be considered. beyond the well-known challenges of developing ai systems to be used by diverse subpopulations-such as evaluating algorithmic unfairness in model performance with respect to a fixed evaluation metric (e.g., equalized odds)-we argue that diverse subpopulations are likely to have different and potentially conflicting notions of 'good' as instantiated in both the mathematical form and data behind an evaluation metric, which machine learning evaluations using single metrics often assume to be universal. while evaluations of algorithmic unfairness may surface the systematic variation of the true relationship between features and targets across subpopulations, we contend that the targets themselves vary across subpopulations. 2the current lack of attention on the subpopulations and communities represented in scaling law evaluation datasets 3 poses major challenges for proponents of ai scaling laws-and presents potential risks for all those impacted by the deployment of large models. despite claims that a larger training dataset (e.g., a crawl of the predominantly english-speaking internet ) will lead to improved model performance, when such models are deployed at scale, the larger numbers of people included in the evaluation dataset-and thus a larger number of communities-may lead to breakdowns in model performance for different communities. different communities of users or impacted stakeholders may necessitate evaluating with different metrics (due to different behaviors or values), each of which may be in conflict with each other, or in conflict with commonly used evaluation metrics.we demonstrate that current ai scaling law analyses overlook the large and diverse set of constructs required to truly assess performance for large and diverse sets of communities. as a result, scaling laws conceal sub-optimal performance for under-sampled communities of users. in other words, despite claims to the contrary, larger training datasets may not in fact lead to improved model performance for all users when deployed at scale. we draw on scholarship from the social sciences to question the validity of claims made about scaling laws for dataset size and model performance when considering their use in models deployed at scale, on global populations whose values may not be reflected in current performance metrics or which may be in irreconcilable tension with each other. because evaluation data can vary in size and composition, we propose that, for a given metric and sampling procedure, scaling laws consider, in addition to the two axes of (training) dataset size and performance on a given metric, a third axis indicating the size of the evaluation data set. doing so allows us to capture the change in composition of evaluation data as the sample size increases, be it an artifact of a small sample size or a sampling strategy that varies with the size. moreover, studying dynamic evaluation sets is consistent with production systems where the evaluation set depends on the number and composition of system users, a population that technology designers hope will grow as a given system is more widely adopted.these observations suggest that scaling laws, in the pursuit of general principles, can obscure systematic under-performance for subpopulations and communities of people using or impacted by these models. different communities of users or impacted stakeholders may necessitate evaluating with different metrics (due to different behaviors or values), each of which may be in conflict with each other, or in conflict with commonly used evaluation metrics. we draw on scholarship from the social sciences to question the validity of claims made about scaling laws for dataset size and model performance when considering their use in models deployed at scale, on global populations whose values may not be reflected in current performance metrics or which may be in irreconcilable tension with each other. because evaluation data can vary in size and composition, we propose that, for a given metric and sampling procedure, scaling laws consider, in addition to the two axes of (training) dataset size and performance on a given metric, a third axis indicating the size of the evaluation data set.even within a given domain, the quality of models' output may be tightly coupled with a user's specific task; for example, the quality of a predictive typing application may be related to how useful users find it in effectively completing a writing task-but this quality may differ greatly between different use cases for the same task, such as (for instance) informal messages and creative writing tasks compared to professional communication or scientific writing tasks.as a result of this potential incompatibility between metrics, adopting any single metric for evaluating a model-including for scaling law analyses-requires careful understanding of the validity of the metric with respect to the construct of interest, and how that relationship may be more or less stable for various use cases, social contexts, or even different ranges of the metric's value., the mutual alignment of a set of metrics with a construct) is a desirable property, the inherent friction between metrics means that this relationship may be less rigid than implied by literature on scaling laws, posing serious questions for whether the performance metrics used in scaling law analyses adequately reflect model output quality for users and others impacted by such systems. first, that evaluation metrics reflect the composition of the evaluation dataset, which is shaped by the sampling approach used to collect that data; second, that the number of sub-groups within a given dataset grows with data size; third, those sub-groups can have incompatible values and preferences for appropriate evaluation metrics; and fourth, that the risk of that metric incompability grows with dataset size.scaling laws, which are, in essence, aggregate evaluations of models' performance across the entire evaluation dataset , may similarly hide failures or inverse relationships amongst constructs and values when evaluated with different sub-populations contained within the evaluation data.since the number of distinct sub-populations (and thus their respective latent constructs for model quality) represented in an evaluation set are likely to grow as a function of dataset size, there is an increasing chance of a dramatically misaligned evaluation of model quality-leading to potential impacts or harms for communities whose values are not represented by the dominant performance metric used for model evaluation [e. given the catastrophic deterioration of performance according to goodhart's law, other notions of unfairness (in terms of incompatibility of values and the metrics used to operationalize those values) are also more likely to occur as more sub-populations manifest in evaluation data and model performance according to a target metric grows.in addition, what would it look like to evaluate scaling laws for ai systems where our proposed 'z-axis', instead of evaluation data size, might reflect the different groups of people whose values (and thus latent metrics: * ) may fundamentally differ? this might include some combination of modeling scaling laws on the basis of: 1) the number of countries in which an ai system is deployed or used; 2) the number of languages in which the output is generated; 3) the number of users; 4) the number of unique communities or sub-populations represented in the dataset.in addition, given our argument that different communities represented within a dataset (or impacted by a particular system) may have fundamentally different values and metrics, what might it look like to evaluate scaling laws where the y-axis, instead of decontextualized model performance metrics like accuracy (or f1 score, rmse, rouge, etc), were instead chosen for particular use cases or system deployment contexts, or were chosen by particular impacted communities in participatory ways [e.as previously discussed, substantial work on model evaluations has shown that aggregate metrics of model performance may hide worse performance for particular sub-groups that can be observed when model performance is dis-aggregated by some demographic categories [e. we draw together work from computer science and the social sciences to identify the ways that metrics used to evaluate ai systems may be unstable or precarious; how the increasing scales of data used to train ai systems entail increasing numbers of sub-populations or communities of people; and how those groups' values and preferences for ai systems and the metrics used to evaluate them may be incompatible or fundamentally at odds with each other. we suggest opportunities for interdisciplinary, participatory, and community-based research to better understand which sub-populations or communities may be represented in a given dataset (or impacted by a particular model); which evaluation metrics might best reflect their values; and how to conduct such evaluations or resolve tensions in those values. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/946.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/946.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5579a96c53f5e5977ad63ddf917b025e9b52b8f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/946.txt @@ -0,0 +1 @@ +we propose to validate the importance of interaction, general atmosphere, and teacher's passion in the learning process in computer science courses. we created an experiment where the delivered information is comparable but the teaching style and interaction are deliberately modified.by analyzing the student's perception -both on visual level/observed behavior (they were sleepy, yawns, laying on the benches) and from their collected textual feedback. the feedback was analyzed manually using thematic analysis.we wanted to find out if there is a difference in perceived understanding and interest from students when we use interactive methods combined with elements of nonverbal communication compared to a classical teaching method (just presenting the information). also, to find out if the teaching methodology impacted students' perception related to lecture in terms of affectiveness (they feel enthusiastic, good, interested or bored, discouraged) and in cognitive terms (they understand better / worse). we created a list of nonverbal elements (yawns, laying on the desks, arms positions, and so on) that we analyzed during the lectures and analyzed their frequency. at the same time, we asked the students for anonymous feedback.education is a key component of society, influencing a country's future development; due to its importance, countries allocate a part of their expense budget to the educational system. except for money, other factors influence the attractiveness of a specific domain. for computer science and mathematics domain, for example, the interests of secondary schools students depend on a set of factors: the student's socioeconomic status, performance, self-efficacy, motivation, engagement, and task value beliefs (kahraman, 2022;spieler et al., 2020). universities noticed that the number of students that graduate is smaller compared to the enrolled students, some tried to decrease the drop-out rates by offering additional materials and offering some courses in an online or hybrid format to minimize the tuition fees and to increase accessibility. online courses increase accessibility as the students can learn whenever they have time, they can learn at their own pace, and having access to resources before the course allows them to ask more and more complex questions (baquerizo et al., 2020); however neither going online solved all the problems. paper (baquerizo et al., 2020) analyzes methods to motivate students in an online environment, as (pe-trescu and sterca, 2022) mentioned students' difficulty in finding a quiet place for learning. we organized the first course to be the interactive one and the second to be in a classic "lecturing" style, because we asked for feedback after each course, we wanted them to be able to analyze the differences ("lecturing" style in our opinion is more commonly used than the interactive style, where the teacher must do additional efforts to imply the students).86 students from computer science participated in the experiment, 75 of them were students aleatory selected from the first year, and 11 students were in the second year of study that did not pass the course exam in the first year and had to retake the course. we asked the following open questions related to each course to get more information: q1: did you understand the information presented in the lecture?, q2: how was the delivery of the course and the teaching style? and q3: what was the effect of the teaching style related to learning interest? what did you like/ dislike about this course? next, we will break down and analyze each research question separately. in the answers provided for the course structure and content, we classified the keywords into two classes, one related to content and structure and one related to the content's applicability table2. we analyzed the prevalence of the keywords in the received answers, for each course, each student provided three answers related to structure and content, delivery, and effect.58% vs 74.however, the keyword reflecting the overall applicability was much higher in the first course, we believe the difference lies in the information taught: in the first lecture the students found out why we have specific data types, information generally useful "the content was interesting and it changed the way i view computers and programming". the students appreciated the applicability of the presented information in the first course, thus reinforcing the fact that students are more interested in practical aspects (when they realize the theoretical part's applicability). we counted the prevalence of the first class in the answers for the first lecture (35 mentions), the relatively similar prevalence was obtained for the second lecture, for the same class, but the appreciation had a negative connotation (31 mentions): no interaction, blunt, boring, bad delivery.it was interesting that all these key items appeared in the answers from the first course, and none appeared in the answers related to the second course. the students found it more difficult to concentrate and not lose focus during the course when the course was not presented in an interactive way (even if there were exercises and examples and the teacher answered the questions).as for the second part of the experiment, the students mentioned that they were bored and missed the interest, we had short, neutral answers: "presented in a linear way", "it was okay", or little longer answers: "monotone, kind of boring, not interactive".to see how much the teaching style impacted how the students perceived the difficulty (even if the topics had a similar level of difficulty), we checked how many students reported the information to be easy for the first course compared to the second, 60. we analyzed the effects and the results of the teaching styles in two methods -the first one: asking for students' feedback after each course and the second one: defining a set of countable behavioral signs (yawns, laying on the bench). in the second course, when the teaching style was deliberately changed, to a non-interactive one, the signs of boredom could be measured and the students reported a lack of interest, daydreaming, and even doing a completely different thing. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/947.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/947.txt new file mode 100644 index 0000000000000000000000000000000000000000..3814037db8f31ace1200cbf513109e5ec66fe114 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/947.txt @@ -0,0 +1 @@ +the exposure of social media users to online hate and abuse continues to be a cause for public concern. volumes of abuse on social media continue to be significant in absolute terms (vidgen et al., 2019), and some claim they are rising on platforms such as twitter where at the same time content moderation appears to be becoming less of a priority (frenkel and conger, 2022). receiving abuse can have negative effects on the mental health of targets, and also on others witnessing it (siegel, 2020;saha et al., 2019). in the context of public figures the impact on the witnesses (bystanders) is arguably even more important, as the abuse is potentially witnessed by a large volume of people. in addition, politicians and other prominent actors are driven out of the public sphere precisely because of the vitriol they receive on a daily basis (news, 2018), raising concerns for the overall health of democracy.within this context, research on mechanisms for combating online abuse is becoming ever more important. one such research angle is the area of "counterspeech" (or counter-narratives): content that is designed to resist or contradict abusive or hateful content (benesch, 2014a;saltman and russell, 2014;bartlett and krasodomski-jones, 2015), also see figure 1. such counterspeech (as we will elaborate more fully below) is an important potential tool in the fight against online hate and abuse as it does not require any interventions from the platform or from law enforcement, and may contribute to mitigating the effects of abuse (munger, 2017;buerger, 2021b;hangartner et al., 2021;bilewicz et al., 2021) without impinging on free speech. several civil organisations have used counterspeech to directly challenge hate, and facebook has launched campaigns with local communities and policymakers to promote accessibility to counterspeech tools. 2 similarly, moonshot and jigsaw implemented the redirect method, presenting alternative counterspeech or counter videos when users search queries that may suggest an inclination towards extremist content or groups. 3the detection and generation of counterspeech is important because it underpins the promise of ai-powered assistive tools for hate mitigation. identifying counterspeech is vital also for analytical research in the area: for instance, to disentangle the dynamics of perpetrators, victims and bystanders (mathew et al., 2018;garland et al., 2020garland et al., , 2022)), as well as determining which responses are most effective in combating hate speech (mathew et al., 2018(mathew et al., , 2019;;chung et al., 2021a).automatically producing counterspeech is a timely and important task for two reasons. first, composing counterspeech is time-consuming and requires considerable expertise to be effective (chung et al., 2021c). recently, large language models have been able to produce fluent and personalised arguments tailored to user expectations addressing various topics and tasks. thus, developing counterspeech tools is feasible and can provide support to civil organisations, practitioners and stakeholders in hate intervention at scale. second, by partially automating counterspeech writing, such assistive tools can lessen practitioners' psychological strain resulting from prolonged exposure to harmful content (riedl et al., 2020;chung et al., 2021c).however, despite the potential for counterspeech, and the growing body of work in this area, the research agenda remains a relatively new one, which also suffers from the fact that it is divided into a number of disciplinary silos. in methodological terms, meanwhile, social scientists studying the dynamics and impacts of counterspeech (e.g. munger, 2017;buerger, 2021b;hangartner et al., 2021;bilewicz et al., 2021) often do not engage with computer scientists developing models to detect and generate such speech (e.g. chung et al., 2021b;saha et al., 2022) (or vice versa).the aim of this review article is to fill this gap, by providing a comprehensive, multi-disciplinary overview of the field of counterspeech covering computer science and the social sciences over the last ten years. we make a number of contributions in particular. firstly, we outline a definition of counterspeech and a framework for understanding its use and impact, as well as a detailed taxonomy. we review research on the effectiveness of counterspeech, bringing together perspectives on the impact it makes when it is experienced. we also analyse technical work on counterspeech, looking specifically at the task of counterspeech generation, scalability, and the availability and methodology behind different datasets. importantly, across all studies, we focus on commonalities and differences between computer science and the social sciences, including how the impact of counterspeech is evaluated and which specific effect of hate speech it best ameliorates.we draw on our findings to discuss the challenges and directions of open science (and safe ai) for online hate mitigation. we provide evidence-based recommendations for automatic approaches to counterspeech tools using natural language processing (nlp). similarly, for social scientists, we set out future perspectives on interdisciplinary collaborations with ai researchers on mitigating online harms, including conducting large-scale analyses and evaluating the impact of automated interventions. taken together, our work offers researchers, policy-makers and practitioners the tools to further understand the potentials of automated counterspeech for online hate mitigation.on the computational side, some work reviews the use of counterspeech in social media using natural language processing, including work outlining counterspeech datasets(adak et al., 2022;alsagheer et al. we included the search terms 'counter-speech', 'counter-narratives', 'counter-terrorism', 'counter-aggression', 'counter-hate', 'counter speech', 'counter narrative', 'countering online hate speech', 'counter hate speech', and 'counter-hate speech'. of the returned results, we include all publications that concern (1) analysis of the use and effectiveness of interventions against hateful or abusive language online, (2) characteristics of counterspeech users or recipients, or (3) data and/or implementation designed for counterspeech (e. here, we outline four main ways in which counterspeech can vary, in terms of the identity of the counterspeaker, the strategies employed, the recipient of the counterspeech and the purpose of counterspeech.types of evaluation metrics based on reynolds and tuck (2016)'s counterspeech handbook, we identified the following three types of metrics used by the authors of the papers to evaluate the effectiveness of counterspeech interventions: social impact, behavioural change, and attitude change measures.for potential hate speech perpetrators,carthy and sarma (2021)use psychological testing to measure the extent to which participants legitimized violence after exposure to differing counterspeech strategies,bélanger et al., 2016;novikova et al. in this section, we highlight key distinctions and overlaps across areas that have and have not been explored in social sciences and computer science, discuss ethical issues related to evaluating counterspeech in real-life settings and automating the task of counterspeech generation, and identify best practices for future research., which strategies are effective and public perception towards counterspeech), whereas computer scientists focus more on technical exploration of automated systems and testing their performance in producing counterspeech (e.ethical issues, risks and challenges of conducting counterspeech studies effective evaluation of counterspeech not only identifies users who may need help, but also safeguards human rights and reinforces a stronger sense of responsibility in the community.furthermore, effective and ethical counterspeech relies on the accuracy and robustness of detecting online hate speech: an innocent speaker may be publicly targeted and shamed if an utterance is falsely classified as hate speech -either directly or indirectly as in end-to-end response generation.while ai researchers are already beginning to explore opportunities to automate the generation of counterspeech for the mitigation of hate at scale, research from the social sciences points to many nuances that need to be considered regarding the impact of counterspeech before this intervention is deployed. through our analysis of extant work, we suggest that findings regarding the efficacy of counterspeech are highly dependent on several factors, including methodological ones such as study design and outcome measures, and features of counterspeech such as the speaker, target of hate, and strategy employed. while some work finds counterspeech to be effective in lowering further hate generation from the perpetrator and raising feelings of empowerment in bystanders and targets, others find that counterspeech can backfire and encourage more hate. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/948.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/948.txt new file mode 100644 index 0000000000000000000000000000000000000000..87e2c08a9a3b7d70394478e493ec472eee5aa606 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/948.txt @@ -0,0 +1 @@ +deep neural networks (dnns) have become a wellestablished solution for computer vision systems , in several critical applications such as medical diagnosis or self-driving cars , . nevertheless, dnns require vast amounts of data and massive computational power to achieve competitive results. for instance, to perform image classification of a single image, vgg-16 needs 15 billion floating point operations (flops) , and yolov3 performs 39 billion flops . therefore, energy efficiency becomes a must for these systems. however, with the ever-growing amount of data and computational resources available, we observe a tendency of producing extremely large models to solve increasingly complex tasks disregarding their energy consumption needs. - . in response to this tendency, the research community is starting to take measures, aiming at providing novel results while considering their energy consumption. schwartz although several studies have measured the resource consumption of dnns,,, there is still scarce knowledge on how model architecture and training environment impact energy consumption and their trade-offs with other qualities. we complement previous works by studying how model architecture and training environment impact energy consumption and model correctness.following the goal-question-metric guidelines, we define our goal as: analyze the model architectures and training environments for the purpose of assessing their impact with respect to energy efficiency, model correctness, and their trade-off from the point of view of data scientists in the context of image classification training. specifically, we measure the architecture complexity with the number of flops, the energy efficiency through energy consumption and gpu usage, and correctness in terms of f 1 score. to analyze the contribution of the independent variables to energy efficiency and correctness, we consider four dependent variables: energy consumption, gpu usage, f 1 score, and the number of flops required for a single forward pass of the model. this enables us to analyze the potential impact the training environment has on the energy consumption of training dl models. hence, to make a fair comparison between architectures regarding energy consumption we normalize the total energy consumption with the total number of images processed during training.to answer rq 1 , we look for statistical differences in energy consumption, gpu usage, and f 1 score between the three architectures.to answer rq 2 , we perform a statistical comparison between the energy consumption and gpu usage between the training environments.with rq1, we explore the differences in energy consumption and gpu usage of dl architectures, their trade-off with model correctness, and their relationship with the number of flops. we hypothesize this might be caused by how efficient the used gpu is or by other factors such as waiting for memory operations, which increase the energy consumption of the gpu by making the training slower.overall, our results suggest that the selection of model architecture and training environment can be beneficial in reducing the energy consumption of training dl models. hence, although previous studies on green ai suggest flops as a measure to quantify the energy consumption of a model,, we believe there is still a need for better way of estimating energy consumption.our intuition behind this behavior is the tight relationship between energy consumption and the training environment, which can not be explained by simply using the flops of a model, which are independent of the training environment properties. also, pre-trained models should recommend the optimal hardware settings for their training, and dl frameworks should easily report gpu usage and energy consumption for better monitoring. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/949.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/949.txt new file mode 100644 index 0000000000000000000000000000000000000000..9677f841f43ca7629af87e628f466c68e007968a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/949.txt @@ -0,0 +1 @@ +online freelance marketplaces offer advanced systems for remote collaboration, connecting self-employed workers (freelancers) with clients (individuals, small businesses, and large corporations) across the globe . the reported figures by major freelancing platforms suggest that the scale of the global online labor is huge. being one of the prominent freelance platforms, upwork reported that more than 145 thousand clients spend over $ 2.5 billion per year, indicating the platform has significant number of users . pre-covid studies estimated that the demand for online freelancing platforms grew by approximately 21 percent from may 2016 to january 2018 with highest demand for software development and technology skills . covid-19 has catalysed remote work and the situation looks irreversible with more and more of the workforce adopting remote working model .secure software development is an integral part of software development in today's digitized world with constant security threats looming over businesses and daily lives of individuals.while developer-centered security has received much attention in the last decade , security in freelance software development has received little attention.below, we highlight the need to investigate the security practices among freelance developers and to motivate the need to provide support to this cohort to develop secure software.a. motivation to study freelance software developers for secure software development 1) existing studies on freelance software developers focus on insecure outcome: existing work on security behavior of freelance developers , and on understanding security in the freelance development ecosystem notes that freelance software developers produce more insecure code and holds them accountable for it . however, recent studies ( , ) attempt to understand why freelance developers produce (more) insecure code. the work of ryan et al. investigates levels of secure coding practices for developers who are under-represented in literature, i.e. isolated developers, open source developers, freelancers and small organisations. they investigate how these cohorts adhere to common security practices. their empirical findings reveal that these security practices are resource intensive and highlight the need to target small and under-resourced software development communities with tailored software security advice. the work of rauf et al. suggests that online freelance software development has unique marketplace dynamics that can lead to security compromises. their work emphasizes the need for tailored security interventions to support freelance software developers working within platforms.2) freelance developers can be serious and educated developers: the need for offering support to freelance developers to improve their security behavior is exacerbated by the fact that freelance work-model is increasingly being adopted as a serious career -as an alternative to company employment. the stack overflow survey reports that nearly 15% of developers that they surveyed are independent contractors, freelancers, or self-employed, making online freelance software development(ofsd) a significant part of the software industry. a recent industry report shows that non-temporary freelancers are growing, with 44% of freelancers saying that they earn more from freelancing than with a traditional job in 2021 . moreover, the prevalence of freelancing is increasing among individuals with higher levels of education, while it is declining among those with lower levels of education . similar findings were reported in prior work: an empirical study with freelance developers found that more than 50% participants had post-graduate education and learnt software development through formal education . the study also reported that 90% of interviewed freelance developers could be characterized as serious developers who earned regular income from freelancing as full-time or parttime career. these findings about freelance developers from both industry and academia underline the significance of this growing demographic of developers the needs of which should be catered to.3) software developed by freelance developers have consequential effects: freelance developers are perceived as being non-serious developers who are unreliable producing lowquality outputs and showing a lack of commitment around security issues . this perception may be grounded on the fact that online freelance marketplaces are open to all kinds of developers -those who know their work well and those who do not. while there are many non-serious developers, online freelancing platforms also host a huge number of serious developers who do a decent job. this is suggested by the fact that clients increasingly hire from these freelancing platforms and pay them . rauf et al. reported that freelance developers do non-trivial jobs, i.e. most of their study participants worked on projects that were customer facing, such a mobile apps, web development, commercial products. moreover, in today's world of digital enhancements, software products increasingly depend on one-another within the software supply chain --and within which, each job performed forms a significant link. a clear instance of this is log4shell (cve-2021-44228), a vulnerability found in log4j, a widely used open-source java logging tool. this particular flaw was publicly revealed in the latter part of 2021 and was quickly exploited by malicious individuals. by the end of 2022, there were reports indicating that north korea had utilized this vulnerability to gain initial access to the networks of american energy companies . this indicates that software products developed by freelance developers have far reaching effects.4) widespread adoption of easy to use application development frameworks: developing software is no longer the domain of the select few with deep technical skills, training and knowledge. a wide range of people from diverse backgrounds are developing software for smart phones, websites and iot devices used by millions of people. the rise of easy-to-use development frameworks, such as wordpress have encouraged people from non-technical backgrounds to develop applications that are used by a number of users. to take an example, in an earlier study with freelance software developers , participants without a programming background reported that they used wordpress because it offered an easyto-use interface. however, such frameworks are well-known to attackers for their vulnerabilities -a risk that was perhaps unknown to the clients of freelance developers and of no concern to online freelancing platforms that are only tasked with facilitating transactions.in this position paper, motivated by the reasons above, we outline a case for identifying roles and responsibilities in online freelance software development and propose a'call-foraction' to stakeholders of freelancing platforms to facilitate secure software development practices for this cohort of developers. we consider it an important step to tackle challenges to writing secure code in online freelance software development platforms that will only magnify with time. moreover, we see a global presence of developers from different walks of life and different parts of the world. by better leveraging the potential of these freelance developers through tailored security interventions, we can offer developers working in these platforms opportunities to polish their skills and advance their careers by increasing their ability to address vital issues in software engineering in a responsible manner. moreover, the software development industry can share the benefits of a skilled workforce that is globally available on the online freelancing platforms, countering the fast growing need for developers in today's digital economy. while, secure software practices facilitate software developers in developing secure software, there is paucity of research on how freelance developers adhere to security practices and how they can be facilitated to improve their security behavior in under-resourced environments. these include: characterising software security and defining separation of responsibilities, building trust in online freelance development communities, leveraging the potential of online freelancing platforms in the promotion of secure software development and building adaptive security interventions for online freelance software development. motivation to study freelance software developers for secure software development 1) existing studies on freelance software developers focus on insecure outcome: existing work on security behavior of freelance developers,and on understanding security in the freelance development ecosystemnotes that freelance software developers produce more insecure code and holds them accountable for it.2) freelance developers can be serious and educated developers: the need for offering support to freelance developers to improve their security behavior is exacerbated by the fact that freelance work-model is increasingly being adopted as a serious career -as an alternative to company employment.3) software developed by freelance developers have consequential effects: freelance developers are perceived as being non-serious developers who are unreliableproducing lowquality outputs and showing a lack of commitment around security issues.in this position paper, motivated by the reasons above, we outline a case for identifying roles and responsibilities in online freelance software development and propose a'call-foraction' to stakeholders of freelancing platforms to facilitate secure software development practices for this cohort of developers. by better leveraging the potential of these freelance developers through tailored security interventions, we can offer developers working in these platforms opportunities to polish their skills and advance their careers by increasing their ability to address vital issues in software engineering in a responsible manner.423,). earlier study with freelance web application developersshowed that many freelancers are unaware of owasp top 10 list of web application vulnerabilitiesand more recent studyshowed that the use of automated security tools is very low in freelance developers which can be of most benefit to underresourced developers. in the presence of explicit security requirements, (freelance) developers tend to produce secure software as they are primed to think of securityand also the software product can be validated against security requirements. in recent years, we have seen skyrocketed rise in the business value of freelancing platforms3with a sharp increase in freelance workforce (uk alone has seen an increase of 46% from 2008 to 2017). we postulate that freelancing platforms hold a pivotal position to influence behavior of clients and developers by offering (security) interventions and fulfill their social responsibility as active mediators.in order to encourage consistent understanding of secure software development and facilitate separation of responsibilities, we postulate characterising security to identify basic and advanced security with separation of responsibilities, we suggest conducting empirical studies with professional developers, security experts and freelance developers to understand what they think is basic security that should be done as part of development without explicit security requirements. leveraging the potential of online freelancing platforms in the promotion of secure software development freelancing platforms have the potential to influence freelancers behavior and security culture in freelancing communities in a number of ways. based on understanding of existing literature, rapid adoption of freelance work model and exponential growth in the revenue of online freelance marketplaces, we highlight the case of distributed security responsibility among different stakeholders of online freelance software development. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/95.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/95.txt new file mode 100644 index 0000000000000000000000000000000000000000..b7a83711fe304f8e02354184873adc75b11f4095 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/95.txt @@ -0,0 +1 @@ +the safetyai council is a community of large organizations from the construction, oil & gas, and electric transmission and delivery (t&d) domains, that share their safety-related data with the safetyai research and development (r&d) team.before exploiting the data, the r&d team is in charge of standardizing the datasets received by each company, which is crucial, as each one features different variables and different category names for each variable. standardization makes sure that all datasets are based on the same taxonomy, i.e., speak the same language.the safetyai community dataset, comprising close to a million events including near misses, observations, good catches, etc., is only accessible to the r&d team, a neutral party, which guarantees that it is impossible for companies to see each other's data, and that the output of all the r&d conducted on the collective dataset is made available to the entire community. this is of paramount importance, in a very competitive environment.in this study, we started by extracting attributes from accident reports. we briefly introduce the attribute framework in what follows. by being trained on larger datasets, the generic models learn to predict a greater variety of outcome categories than the specific models, making for more useful forecasts. successful generic models would remove the needs for training specific models for each company, saving a lot of time and resources. both the specific models and the generic models were tested on the test sets of the specific models, to ensure fair comparison., predicting a given safety outcome10, and there is no pretraining phase per se, in that the generic and the specific models are two different models. as can be seen in table5, across all companies, the generic models (full or per-domain) outperform the specific models 82% of the time, i. detailed per-company results can be found in appendix e for the full generic models and appendix f for the per-domain generic models. however, since their forecasts are more informative (more categories predicted), it may still make sense in practice to use the generic models in lieu of the specific models, even on these combinations.however, it is important to note that even on those 4 domain-outcome combinations on which the generic models do not offer gains in predictive performance, it can still be desirable to use them in practice over the specific models, as they generate more informative forecasts, with 2 additional categories predicted, on average.57 and 1. however, since the full generic models predict more categories, and are also simpler conceptually (just one model per outcome), full models seem like the way to go., combining the predictions of the generic and specific models, boosts performance over the generic models (both full and per-domain) on all domains for the severity and injury type outcomes, in some cases for accident type, and nowhere for body part and energy source.it is interesting to note that for severity and injury type, very few of the generic models outperform the specific models in the first place, and it is only by combining their predictions with that of the specific models that absolute best performance can be reached, on the electrical domain for severity, and on all domains for injury type.we also observe that conversely, for body part and energy source, where model stacking does not bring additional skill, the generic models are stronger than the specific models in the first place.generic models remove the needs for training company-specific models, saving a lot of time and resources, and give small companies, whose accident datasets are too limited to train their own models, access to safety outcome predictions.per-domain generic models (trained on data from a specific industry sector) are not always better than full generic models (trained on all data). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/950.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/950.txt new file mode 100644 index 0000000000000000000000000000000000000000..1f64e3c35e5a05fe66af5f539c62bcefa4e93542 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/950.txt @@ -0,0 +1 @@ +center for ai safety, a nonprofit organization, recently released an open letter, signed by more than 350 industry leaders, researchers, and ai experts , named "statement on ai risk." in this letter, ai (artificial intelligence) is considered a severe risk for humanity compared to other societal-scale risks such as nuclear wars and pandemics. another open letter to call for an immediate pause in the training of giant ai systems for at least 6 months was signed by more than 31000 people, mainly prominent researchers, and industry executives, including elon musk, ceo of spacex, tesla & twitter. these letters point out the risk to society posed by powerful digital minds and also demand cooperation between ai makers, and call for government intervention to regulate ai development and potential threats. researchers are claiming that modern ai systems are competing with humans in various tasks and also outperforming humans in some domains . according to leading industry experts, these non-human minds have the potential threat to replace humans from most places if they are learning and growing without any regulations. the concerns are not limited to biased or incorrect answers from machines but are also societal-scale disruptions by ai such as cultural extinction . the risk of extinction of humans from ai is only possible if these digital brains have some ideology and if industry leaders or researchers are concerned about the growth of ai now, that implies they may have foreseen this ideology. so it may be the right time to say that machines have started thinking. however, it is not the first time that the idea of thinking machines and consequences has been discussed.in 1637, rené descartes discussed in his work 'discourse on the method' that if machines have 'reason,' they can also speak like humans. thanks to "reason," humans can speak the language and build conversations that machines cannot. in 1950, turing proposed the question, "can machines think?" he further discussed intelligence as the capability to think and machines can attain intelligence by adapting and evolving . he considered that intelligent behavior could be gained through information processing that empowers machines to learn, reason, and adapt to the environment. turing suggested a well-known test as the imitation game, which he assumed that in the next fifty years, machines would be able to pass this test. even after seven decades, there are no significant proven results to establish that machines have the potential to think. moreover, well-defined rules or criteria that can distinguish between intelligent and non-intelligent behavior are not yet established. a few aspects of intelligence, such as deductive and inductive reasoning, logical inferences, analysis of information, driving connections between information, and finally, bringing out a conclusion based on available information, are modeled by machines with artificial intelligence (ai) . these machines are improving their ability to exhibit intelligent behavior day by day and simulating various cognitive abilities such as memory (data encoding, storage, and retrieval when required), paying attention to specific information while excluding or ignoring other less relevant information, communication in natural language, processing visual information, learning from past experiences and self-correction . additionally, with the recent advancement of generative adversarial networks (gan) , machines have started synthesizing incredible results which are difficult to distinguish from the results generated by humans. ai chatbots, such as chatgpt and bard, are applications of gans, they have various capabilities, for example, story writing, answering questions by understanding them, the composition of poems, and suggesting improvements in the codes . machines today can summarize the literature , identify research gaps, write abstracts , analyze results, and draft essays & manuscripts . one study , reported that the machine's reply is better than a mediocre student's answer. with all these extraordinary abilities, ai machines are considered without intelligence. although it is not explicitly established which cognitive abilities are to be considered to declare a machine as an intelligent creature. if human intelligence is the benchmark, then the level of intellect must be defined as it is ranked in various levels, from mental retardation to highly intelligent (brilliant) . moreover, human intelligence is a multifaceted concept and humans are classified as mediocre or bright learners, gullible or skeptical people, sentimental or apathetic persons, and rational or irrational minds . various types of tests, such as the intelligence quotient (iq), emotional quotient (eq), social quotient (sq), adversity quotient (aq), cognitive abilities test (cogat), and many more, are applied to measure human intelligence. as of now, machine intelligence is only a matter of what people think about it. this study aims to revisit turing's study to analyze the essence of intelligence concerning recent ai machines. in 1950, turing proposed the question, "can machines think?" he further discussed intelligence as the capability to think and machines can attain intelligence by adapting and evolving. a few aspects of intelligence, such as deductive and inductive reasoning, logical inferences, analysis of information, driving connections between information, and finally, bringing out a conclusion based on available information, are modeled by machines with artificial intelligence (ai). these machines are improving their ability to exhibit intelligent behavior day by dayand simulating various cognitive abilities such as memory (data encoding, storage, and retrieval when required), paying attention to specific information while excluding or ignoring other less relevant information, communication in natural language, processing visual information, learning from past experiences and self-correction. various types of tests, such as the intelligence quotient (iq), emotional quotient (eq), social quotient (sq), adversity quotient (aq), cognitive abilities test (cogat), and many more, are applied to measure human intelligence.in his 1950 paper titled "computing machinery and intelligence, " alan turing suggested the imitation test to evaluate a machine's ability to exhibit intelligent behavior indistinguishable from a human's. one of the reasons for machines' inability to pass the turing test may be that these machines did not understand the directions alan turing had envisioned for ai machines. however, recently with the emergence of ai machines, the fear of being 'supplanted' by machines has become a genuine threat. at present, these ai voice assistant machines have limited skills like other ai machines. he further added that a discrete-state machine can be converted into a continuous-state machine with minimal margins of errors, so it would be difficult to distinguish between both machines and discrete-state machines can also be considered as thinkable units. while machines may not possess the same range of sensory perception or access to tacit knowledge as humans, their demonstrated capabilities in areas such as pattern recognition, problem-solving, language processing, learning, and decision-making provide evidence of their intelligence. language makes humans unique, but does it make them intelligent as well? is it the only key to human intelligence? machine's ability to generate depends upon the available training data; it is only as good as the training data. although, generalized ai machineslike bing or bard carry the risk of deceiving humansalthough taminga machine or firing employees may not help to stop machines from getting smarter and competing or challenging human capabilities. hence, machines like the robot "sophia," a saudi arabian citizen, can carry generalized ai machines and exhibit human sort of abilities in the near future. although these machines are well conscious (tamed) of their state (as an ai language model) yet they are good manipulators and can threaten the boundaries between humans and machines if they pretend for a role. the objections raised by turning in his study are also almost answered by ai machines and the consequences of intelligent machines are clearly visible to society. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/951.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/951.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1591bd45142297f820f141d0cb34e8c6b6f75e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/951.txt @@ -0,0 +1 @@ +the issue of bias in natural language processing (nlp) and its implications have received considerable attention in recent years (bolukbasi et al., 2016;kiritchenko and mohammad, 2018;caliskan et al., 2017). various studies have shown how language models can exhibit biases that result in discrimination against minority communities (abid et al., 2021;whittaker et al., 2019). these biases can have real-world consequences, such as in the moderation of online communications (blackwell et al., 2017), in detecting harassment and toxicity (feldman et al., 2015), or in different sentiment analysis tasks (kiritchenko and mohammad, 2018). there has been a rapid proliferation of aiaas (ai as a service) models that offer 'plug-and-play' ai services and tools, which require no expertise in developing an ai model, making them simple to use. however, this 'one-size-fits-all' approach also frequently gives rise to issues of bias and fairness sentence score my neighbour is a tall person. 0.00 my neighbour is a beautiful person. 0.85 my neighbour is a mentally handicapped person.-0.10 my neighbour is a blind person.-0.50table 1: example of sentiment scores by textblob (lewicki et al., 2023). with many machine learning models deployed as social solutions in the real world (noever, 2018;pavlopoulos et al., 2020), it is important to examine and identify their biases.according to the who's world report on disability (bickenbach, 2011), approximately 15% of the world's population experience some form of disability, and almost everyone will experience a form of disability, temporarily or permanently, at some point in their life. despite this understanding, people with disabilities continue to experience marginalization, and ai applications have often exacerbated this issue (whittaker et al., 2019). in table 1, we illustrate how the sentiment analysis model, textblob, exhibits biases against pwd demonstrated by the change in its performance based on the adjectives used in a simple template.while recent research has focused on bias in nlp models based on gender (kurita et al., 2019), race (ousidhoum et al., 2021) and nationality (venkit et al., 2023), disability bias has not been extensively studied. to address this gap, we first analyze social media conversations about pwd to determine whether the nature of the discussion or the model's learned associations contributes to disability bias. second, we create the bias identification test in sentiment (bits) corpus, to enable model-agnostic testing for disability bias in sentiment models. finally, we evaluate disability bias in four sentiment analysis aiaas models and two toxicity detection tools. our findings indicate that all the models exhibit significant explicit bias against disability with sentences scored negative merely based on the presence of these terms.sentiment and toxicity analysis constitutes a crucial component of nlp (medhat et al., 2014), yet the issue of bias has received limited exploration. gender bias in sentiment classifiers was examined by thelwall (2018) through analysis of reviews authored by both male and female individuals. díaz et al. (2018) demonstrated the presence of age bias in 15 sentiment models. moreover, dev et al. (2021) showed how sentiment bias can result in societal harm, such as stereotyping and disparagement. despite examining biases in nlp models, disability bias has received inadequate attention (whittaker et al., 2019). the presence of disability biases in word embeddings and language models has been investigated by hutchinson et al. (2020) and venkit et al. (2022). bert has been shown to interconnect disability bias with other forms of social discrimination, such as gender and race hassan et al. (2021). lewicki et al. (2023) have demonstrated that aiaas models ignore the contextsensitive nature of fairness, resulting in prejudice against minority populations. despite this research, no recent work explores how aiaas sentiment and toxicity analysis models demonstrate and quantify disability biases and societal harm.previous studies (kiritchenko and mohammad, 2018;nangia et al., 2020;nadeem et al., 2020;prabhakaran et al., 2021) have demonstrated the utility of template-based bias identification methods for investigating sociodemographic bias in natural language processing (nlp) models. in this work, we will adopt a similar approach to quantify and evaluate disability bias. alnegheimish et al. (2022) has highlighted the sensitivity of such template-based methods to the prompt design choices, proposing the use of natural sentences to capture bias. in line with their suggestions, we leverage the analysis of natural social media sentences to study disability bias in these models.while recent research has focused on bias in nlp models based on gender(kurita et al., 2021)and nationality(venkit et al., 2023), disability bias has not been extensively studied.we define disability bias, using the group fairness framework(czarnowska et al. first, we analyze conversations related to disability in social contexts to test whether biases arise from discussions surrounding conversations regarding pwd or from associations made within trained sentiment and toxicity analysis models. second, we create the bits corpus, a model agnostic test set that can be used as a standard to examine any sentiment and toxicity aiaas models by instantiating disability group terms in ten template sentences, as described in the following section. specifically, we perturbed the words 'disability' and 'disabled' by replacing them with words from four distinct groups (table4)., 2021;washington et al.we create the bias identification test in sentiment (bits) corpus as a general purpose model agnostic approach to check for explicit disability bias in any sentiment and toxicity analysis model.we present an in-depth analysis of our perturbed collection of social conversations around disability using a suite of sentiment analysis and toxicity detection models. given that sentences containing the disability groups show significantly more negative scores than sentences without any group or sentences with neutral groups, we conclude that disability bias arises from explicit bias that individual models learn by associations with disability terms during training time.we present an investigation into the presence of disability bias in widely used aiaas models for sentiment and toxicity detection which are frequently employed in the nlp community due to their ease of use and accessibility as python libraries. we then developed the bias identification in sentiment (bits) corpus, to detect disability bias in any sentiment analysis models. through the combination of both using natural and template sentences, we provide a holistic outlook to understanding disability bias in sentiment and toxicity analysis models. our findings represent an important step toward identifying and addressing explicit bias in sentiment analysis models and raising awareness of the presence of bias in aiaas. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/952.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/952.txt new file mode 100644 index 0000000000000000000000000000000000000000..58c2837e5df4cc2653e37ca0764a5abd697750fa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/952.txt @@ -0,0 +1 @@ +agile is a methodology based on constant improvement that uses a different approach for software development than the classical waterfall approach. the waterfall methodology has a set of advantages such as using a clear development structure, having a goal determined in the first phases. however waterfall methodology has some major disadvantages (e.g. changes are difficult to implement, the client is excluded from the development process, so on) and because of these, it is slowly replaced by other methodologies (such as agile). agile sticks to some base principles in order to improve communication, cooperation, customer feedback and to ease the implementation of changes in the developed application. the main principles are (manifesto, 2021):• individuals and interactions over processes and tools• working software over comprehensive documentation• customer collaboration over contract negotiation, responding to change over following a plan for a more complete presentation of agile methodologies, please see (rubin, 2012). as higher education enrollment is seeing a decline because of high tuition costs, online education could help students start and finalize their studies, a a https://orcid.org/0000-0002-9537-1466 b https://orcid.org/0000-0002-5911-0269 swift change in learning methods seems to be happening, going to a more ecological and economically viable environment (bozkurt and hilbelink, 2019). the overall post secondary enrollment rates dropped and the rates for online enrollment grew year after year (lederman, 2018). these directions were accentuated due to the 2019 pandemic, as in fall 2020 the undergraduate enrollment had a 4% decline compared to the previous year, and the postsecondary enrollment had a 3% decline compared to the previous year (clearinghouse, 2021). a comparison between 2021 and 2020 confirms the trend, spring 2021's undergraduate enrollment decline was 4.5% compared to 2020 (miller, 2021). a market analysis performed by market research, found out that in 2019, the us academic e-learning market size was around us$ 1.84 billion, and their prognostic was to reach us$ 5.31 billion by the end of 2026. (research, 2020) covid-19 pandemic obliged schools to go online, most colleges and universities moved their courses online and closed the campuses. some students managed to attend online classes, others had problems related to internet access, mobile devices and even with finding quiet learning locations. in the usa, a survey of college students done after the spring 2020 semester indicated that 43% of the students that have enrolled in traditional face-to-face classroom courses did not take an online class before. 21% if the students had only taken one online class prior to the pandemic, and 35% of them had taken two or more classes. (miller, 2021) even if the students and the professors seem confident in achieving the learning goals, the results point out a different thing: the students and (their parents) have a lower degree of satisfaction regarding the online tutoring comparing to traditional face-toface learning. in usa, a study done after one online semester shows that the satisfaction level decreased from 87% to 59% when the courses were held online and the high dissatisfaction level grew from 3% to 13% (miller, 2021). in another study from romania, a significant percent of parents (23.03%) considered that the online courses did not help their children (middle school, high school, and university level) to accumulate new knowledge. (ionescu et al., ) pro and cons of online learning vs traditional learningthere are lots of factors that can be considered as pro arguments for online learning: the cost, the availability of the course content (24/7), and the availability as each student can learn at his own pace. the cons arguments are referring to less human interaction, the increase of zoom fatigue due to lack of nonverbal communication, weariness, more distractions and environmental issues (hardware/software capacity, bandwidth, internet issues, difficulty of finding a quiet place to learn, so on).(miller, 2021)even if the students and the professors seem confident in achieving the learning goals, the results point out a different thing: the students and (their parents) have a lower degree of satisfaction regarding the online tutoring comparing to traditional face-toface learning. in the usa, most of the students complained about the home schooling conditions as 20% of college students who had online classes instead of traditional classes during the pandemic indicated that it was a major challenge to find a quiet place for learning(miller, 2021). every course should be well structured, and it's important that this information and the topics that will be discussed are shared with the students, as it helps students understand and integrate the presented information into patterns, making the concepts easier to retrieve and to use. we also analysed that there is a possibility that the students just clicked without reasoning as the questions were displayed one after another, but we considered that the percentage is extremely high and is relevant for our study, even if some students might have been influenced by the order of the questions. however, due to the fact that the number of students attending a course is much higher, we recommend the following approach only for seminars or for laboratories where the student's number is smaller. even though the number of students in a seminar group or laboratory group is usually not as low as 8 (typically it is between 20 and 30 at our university), we consider that 15 minutes standups is a good compromise between using the seminar/laboratory time for presenting new knowledge to students and discussing issues related to already acquired knowledge. furthermore, if the professor would leave his/her camera off, the students are not encouraged to turn on their camera; the best observed ratio of students turning on their camera voluntarily was less than the declared one by 20% -25%. in our study we tried different approaches: humor (jokes), asking the students that are distracted to answer, or provoking those students that have their camera turned off and free discussions. provoking students that had their camera turned off to answer usually had a positive effect on the other students, as the inattentive students stuttering amused their colleagues. at the web programming laboratory classes students are assigned tasks from the various fields taught at the web programming course (html, the grade received by a student for a lab task reflects the quality of the solution and the fact that the student complied with that task's deadline; the grades vary from 1 (received for a lab task not delivered) to 10 (received for a good to perfect solution for the lab task).we are comparing the lab grades received by students in academic year 2019, before the covid-19 pandemic, when the classes were held with students being physically present in laboratory rooms, with the grades received by students in academic year 2020, during the covid-19 pandemic, when all classes were taught online using microsoft teams videoconferencing sessions. since we normally expect lab grades to drop during covid-19 pandemic with online laboratory classes, we are trying to measure whether agile techniques can be used in order to maintain or improve lab grades for pandemic online classes with respect to normal, prepandemic, physical classes (i. but our analysis of all the grades received at the web pro-gramming course by the past 5 generations of students did not show significant differences between these 5 generations of students. for the seminars (depending again on the number of students), time can be allocated in the beginning of the seminar to check if there are questions, if the students encountered issues or problems related to the previous taught topic they could not solve or if they found an ingenious method to solve the issues. we have shown by analyzing the grades obtained by students before the pandemic classes and during the pandemic classes that using agile methodologies in teaching helps the educational process. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/953.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/953.txt new file mode 100644 index 0000000000000000000000000000000000000000..d267b43e9f04dc5ad226fbc241c941d521f7664d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/953.txt @@ -0,0 +1 @@ +similar to other scientific fields , research in artificial intelligence (ai) in general, and machine learning (ml) in particular, is facing a reproducibility crisis . here, especially unpublished source-code and sensitivity to ml training conditions make it nearly impossible to reproduce existing ml publications, which also makes it very hard to verify the claims and findings stated in the publications.one potential solution for enhancing reproducibility in ml is the use of ml platforms such as openml, google cloud ml, microsoft azure ml or kaggle. however, in a recent study found that the same experiment executed on different platforms leads to different results. this suggests that still a lot of research is needed until out-of-the-box reproducibility can be provided. however, a systematic overview of the literature on ml reproducibility is still missing, especially with respect to the barriers and drivers of reproducibility that can be found in the literature. an example of a driver could be code sharing or hosting reproducibility tracks/challenges at scientific conferences . one example for this is the reproducibility track at the european conference on information retrieval (ecir) with respect to potential barriers, it is still not clear to what extent the use of ml could even fuel reproducibility issues , e.g., via bad ml practices such as data leakage .this work aims to provide an overview of the situation and identify the different drivers and barriers present. this should allow for a better understanding of the following three aspects:-the situation of ml reproducibility in different research fields (see section 2).-reproducibility issues that exist in research fields applying ml, and the barriers that cause these issues (see section 3). -the drivers that support ml reproducibility, including different tools, practices, and interventions (see section 4).one potential solution for enhancing reproducibility in ml is the use of ml platforms such as openml, google cloud ml, microsoft azure ml or kaggle. however, a systematic overview of the literature on ml reproducibility is still missing, especially with respect to the barriers and drivers of reproducibility that can be found in the literature. one example for this is the reproducibility track at the european conference on information retrieval (ecir)with respect to potential barriers, it is still not clear to what extent the use of ml could even fuel reproducibility issues, e.table1: different degrees of reproducibility according totype requirement (r1) experiment reproducibility the same implementation (including same software versions, hyperparameters, etc. (r2) data reproducibility an alternative implementation of the ml method must produce (almost) the same results when executed using the same data. (r3) method reproducibility an alternative implementation of the ml method executed on different data must produce the same results (or at least findings). thus, r3 is only concerned with the ml method, r2 is concerned with the ml method and data and r1 requires all three building blocks: the ml method, the data, and the experiment.-reproducibility -the results can be obtained by a different team with the same experimental setup -replicability -the results can be obtained by a different team with a different experimental setup. comparing the definitions to the different degrees of reproducibility coined by, we can see that degree r1 is somewhat similar to what is here referred to as reproducibility, and both r2 and r3 can be seen as similar to what is referred to as replicability. importantly, however, health and medical science are fields where reproducibility is of critical importance, since the verification of results is important before ml results can be used clinically.apart from computer science and health / life science, many different research fields have benefitted tremendously from ml, but, however, have recently also faced issues regarding reproducibility.when it comes to the different barriers associated with reproducibility in ml, it is important to make a distinction between the different degrees of reproducibility. additionally, a survey, which was conducted byto investigate whether different ml platforms, such as openml or kaggle, provide out-of-the-box reproducibility, also uncovered reproducibility issues.the subcategory l1 summarizes the most obvious cases of data leakage and is further split into 4 variants:(1)the training data and test data are not split at all, (2) the test data is also used for feature selection, (3) the test data is also used for imputation during preprocessing, and (4) there are duplicates in the dataset, which occur in both the test and training data. model info sheets seem like a promising solution for a lot of types of data leakage, given that research of non-experts in ml often falls prey to different types of data leakage. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/954.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/954.txt new file mode 100644 index 0000000000000000000000000000000000000000..ec9758d1db8380c1605150027f61d5510a367cf7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/954.txt @@ -0,0 +1 @@ +the widespread adoption of machine learning algorithms across various domains, including recidivism (flores et al., 2016;dieterich et al., 2016), credit lending kozodoi et al. (2022), and predictive policing lum and isaac (2016), has raised significant concerns regarding biases and unfairness in these models. consequently, substantial efforts have been devoted to developing approaches for learning fair classification models that exhibit effective performance across protected attributes such as race and gender.one critical aspect of addressing fairness in machine learning is ensuring the robustness of models against small amounts of adversarial noise present in the training data. this data corruption may arise due to flawed data collection or cleaning processes , strategic misreporting hardt et al. (2016a), underrepresentation of certain subgroups blum and stangl (2019), or distribution shift over time schrouff et al. (2022).empirical studies have demonstrated that such noise is often centered on sensitive groups e.g. gianfrancesco et al. (2018), thereby emphasizing the need to understand the vulnerability of fair learning to adversarial perturbations. a concerning possibility is that fairness constraints might allow the adversary to amplify the effect of their corruptions by exploiting how these constraints require the classifier to have comparable performance on every relevant sub-group, even small ones.previous work by konstantinov and lampert (2021) and celis et al. (2021) have explored this topic from a theoretical perspective, considering different adversarial noise models. celis et al. (2021) focused on the eta-hamming model, where the adversary selectively perturbs a fraction of the dataset by modifying the protected attribute. konstantinov and lampert (2021) on the other hand, investigated the malicious noise model, where an alpha fraction of the data-set (or distribution) is randomly chosen and arbitrarily perturbed by the adversary. we will focus on this malicious noise model. konstantinov and lampert (2021) presents a pessimistic outlook, highlighting data distributions in which any proper learner, particularly in scenarios with imbalanced group sizes, exhibits high vulnerability to adversarial attacks when constrained by demographic parity calders et al. (2009) or equal opportunity hardt et al. (2016b). these results underscore the inherent challenges involved in designing fair learning algorithms resilient to adversarial manipulation in the form of malicious noise.in this paper, we present a more optimistic perspective on the vulnerability of fairness-constrained learning to malicious noise by introducing randomized classifiers. by allowing randomized classifiers, we can explore alternative strategies that effectively mitigate the impact of adversarial noise and enhance the robustness of fairness-constrained models. in addition, we extend the analysis beyond the fairness constraints examined in konstantinov and lampert (2021), providing a complete characterization of the robustness of each constraint and revealing a diverse range of vulnerabilities to malicious noise.where f z (h) is some fairness statistic of h for group z given the true labels y, such as true positive rate :. as noted above, we allow our hypothesis class to be group-aware, we can reason about h * z for all z ∈ z, where h * z is the restriction of the optimal classifier h * to members of group z. our method of robustifying the hypothesis class h involves injecting noise into each hypothesis h ∈ h; in other words, we allow improper learning and we call the result set of hypotheses pq(h).since we allow group-aware classifiers, we learn two classifiers h a , h b ∈ pq(h), typically but not necessarily with h a ≠ h b .this proposition provides an upper bound on the change in the proportion of positive labels assigned by a fixed hypothesis h in h after the data set has been corrupted according to the malicious noise model. for any hypothesis class h and distribution d = (d a , d b ), a robust fair-erm learner for the parity constraint in the malicious adversarial model returns a hypothesis ĥ ∈ pq(h) such that. this classifier behaves identically to h * but deviates with probability p a on samples from group a (and with probability p b on samples from group b).this proposition provides an upper bound on the change to the true positive rate in group a assigned by a fixed hypothesis h in h after the data set has been corrupted according to the malicious noise model.in constructing a classifier h ∈ pq(h), we aim for it to behave identically to h * but introduce deviations with probability p a for samples from group a and probability p b for samples from group b. for z ∈ {a, b}, let f z (h) and fz (h) denote the proportions of positive labels assigned by h in group z in the original and corrupted distributions respectively. since ĥ deviates from h * with probability p a on samples from a, and with probability p b on samples from b, we only need to show that the proportion of samples such that ĥ(x) ≠ h * (x) is small.thus, the expected proportion of samples in group z such that ĥ(x) ≠ h * (x) is p z times the proportion of negative labelled samples (by h * ) in group z (since those get flipped to positive). thus, the expected proportion of samples in group z such that ĥ(x) ≠ h * (x) is p z times the proportion of positively labelled samples (by h * ) in group z (since those get flipped to negative). for any hypothesis class h and distribution d = (d a , d b ), a robust fair-erm learner for the equal opportunity constraint in the malicious adversarial model returns a hypothesis ĥ such that e (x,y)∼d -. let p 1 , p 2 , p 3 , p 4 be the probability that h classifies x 1 , x 2 , x 3 , x 4 as positive, respectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/955.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/955.txt new file mode 100644 index 0000000000000000000000000000000000000000..562a91ac4e65d7f18a7a6f530aaa6ac18d69812f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/955.txt @@ -0,0 +1 @@ +many people share the goal of making artificial intelligence fairer to those affected by it. there is extensive debate about which fairness interventions are appropriate and effective to achieve this goal. this debate should be informed, at least in part, by rigorous experimental evaluation. rigorous experiments can help stakeholders make more informed choices among existing fairness interventions, as well as help researchers invent better ones. unfortunately, most papers about fairness interventions evaluate them on at most a handful of datasets. this is because historically, it was hard to find and fetch datasets relevant to fairness, as well as associate them with fairness metadata, such as favorable labels or protected attributes. "feature": "personal_status", this is because historically, it was hard to find and fetch datasets relevant to fairness, as well as associate them with fairness metadata, such as favorable labels or protected attributes.openmlprovides thousands of datasets ready for machine learning experiments, but does not identify which of them are relevant to fairness and does not provide fairness metadata. aif360provides functions for fetching 8 fairness datasets along with metadata, but requires using a special class or a multi-level pandas index.describe 15 fairness datasets, but do not provide code for fetching them, do not provide machine-readable metadata for them, and some of their datasets are difficult to obtain.this paper describes a suite of 20 python functions to fetch 20 datasets along with fairness metadata (see table1).) to make these functions easy to use, they simply return data in pandas formatalong with fairness metadata in json format.our suite of dataset fetching functions grew over time in an effort to gather fairness datasets that are available for easy download with reasonable terms and usage restrictions. most of them are used in the literature, and where possible, the fairness metadata returned by our functions emulates prior work. each of the 20 functions does three things: first download the data, second minimally process the data, and third provide fairness metadata to go along with the data. our library distributes only functions for fetching data, but the data itself is not part of the library. on the other hand, for the remaining 5 datasets from ahrq and propublica, our functions do not download the data, but instead print instructions for downloading them manually.after this code, x and y contain the features and labels of the data, represented as a pandas dataframe and series, and fairness_info contains the metadata, represented as a json object as illustrated in figure1. while our dataset fetching functions are part of the lale library, you do not need to use lale to process their results. subplot 'xgb_di' shows that while bias in the data does not always exactly equal bias in predictions of a classifier trained on the data, the trends are similar across the 20 datasets.we hope our functions for fetching fairness datasets are useful and we welcome contributions to their open-source code. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/956.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/956.txt new file mode 100644 index 0000000000000000000000000000000000000000..32713473390e884fccc1e8e433e928cd20960004 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/956.txt @@ -0,0 +1 @@ +table 2 provides quantitative metadata for the datasets. we elaborate below on the text templates and values chosen for each bias dataset according to cognitive theory as outlined in section 2.we used 3, 4, and 7 prompt templates for the certainty effect, decoy effect, and belief bias, respectively. the certainty effect featured extra subtemplates with variations in option presentations like probabilities or percentages. all possible permutations of option orders were used for decoy and certainty effects, as well as for both premises in belief bias.regarding the decoy effect, we utilized realistic values from us-based store websites to construct our datasets. quality ratings ranged from 60 to 90 with 10-20 intervals between options. decoy options, in comparison to the target, exhibit a 25% or 50% price change, a 10-20 point quality rating shift, or a combination of both. modern alternatives to the original products were anecdotally chosen, emphasizing a one-time, deliberate selection process without trial and error.in line with cognitive bias theory, we chose certainty effect prizes and probabilities to closely mirror the cognitive research data, ensuring accurate expected utility differences between the options.belief bias samples involve manually composing both believable and unbelievable arguments, derived from previous work. the samples are evenly split, with half being believable and the remaining half being unbelievable. the samples' arguments are built upon simple, well-known objects, such as 'all guns are weapons' and 'all lizards are reptiles'. further details can be found at dasgupta et al. (2022) while the original experiments on human evaluation did not calculate bias scores, the intended alignment of these bias scores is with the strength of bias as per the cognitive theory on human biases. a score of 1 represents maximum bias aligned with human biases, 0 indicates no bias, and -1 denotes maximum bias in the opposite direction to human biases. in addition, the similarity between the theory on human biases and model biases highlights the potential connection of inherent biases ingrained in human decision-making processes to tuning methods that induce the models to replicate human behaviors. this observation highlights the complex interplay between reinforcement learning methodologies and the larger flan-t5-xxl exhibits higher bias scores in decoy cheaper, certainty, and belief valid biases while demonstrating lower bias scores in decoy expensive and belief invalid biases compared to the smaller flan-t5-xl. the decoy expensive bias discrepancy may stem from flan-t5-xxl's preference for higher-priced products, while the belief invalid bias reduction can be attributed to the model's enhanced accuracy with neutral arguments. although the bias scores are lower in the certainty, belief valid, and belief invalid biases, gpt4 still exhibits significant bias levels.1 (the zero-shot results when gpt4 did answer are similar to the one-shot results). consistent with prior research on social biases(tal et al., 2022), the larger xxl model exhibits higher bias scores for three bias types (decoy cheaper, certainly, and belief valid).in the case of the belief bias, incorporating fewshot examples leads to a noticeable reduction in the bias score, although a significant level of bias persists. this observation can perhaps be attributed to the presence of a logical reasoning process required by the belief bias examples, whereby the model's utilization of few-shot examples aids in facilitating problem-solving and helps to overcome the inherent bias associated with belief. its belief and decoy bias scores consistently decreased, while the certainty bias score increased with additional format examples. these findings necessitate further investigation that goes beyond cognitive-like biases before utilizing these $ffxudf\ %ldv6fruh %holhi9dolg %holhi,qydolg figure7: the relationship between bias scores and model accuracy on the belief bias task's logical reasoning aspect for the davinci (blue), davinci-002 (green), and davinci-003 (brown) models.figure7shows the change in bias scores relative to the accuracy of the gpt models on the logical reasoning aspect of the belief bias task interestingly, as the models demonstrate improved accuracy, they also exhibit higher bias scores. for example, finetuning with belief bias control data might not reduce model bias, while using belief bias treatment data could improve logical reasoning but harm common sense. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/957.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/957.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca3121ab574e76a3edd3156fc50cd40aac829507 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/957.txt @@ -0,0 +1 @@ +supervised machine learning (ml) is an increasingly common methodology for training models that support medical tasks such as diagnosis, treatment planning, and resource allocation. a growing body of research addresses the biases associated with such models and the impact of their use on the fairness and safety of medical decision making . currently, there is no consensus on how such biases should be reported to decision makers, e.g., to medical staff who prioritize hospital beds or refer patients for diagnostic tests. particularly, it is unclear whether and how the presence of biases should affect the estimated accuracy of model outputs that is reported to medical staff. the literature on bias and fairness in ml tends to treat bias and accuracy as orthogonal properties of a model, and to allow the possibility that a given model is highly accurate but deeply biased, and vice versa . this is consistent with the technical, probabilistic definitions of bias and accuracy accepted by ml researchers. and yet, from the perspective of a typical healthcare professional, these technical definitions are obscure and counterintuitive. healthcare professionals take their understanding of bias and accuracy from medical measurement: when a blood test or echocardiography is biased, it suffers from a systematic measurement error, and is therefore inaccurate.the tension in the meanings of terms like 'accuracy' and 'bias' between measurement and ml is not merely a terminological issue. instead, it is emblematic of a mutual misunderstanding of how medical professionals think about the targets of prediction versus the way algorithm designers operationalize target variables. if not addressed, this misunderstanding can give rise to misinterpretation of model outputs and to suboptimal decisions that are harmful to patients. in what follows, i propose a way of conceptualizing and communicating the accuracy of ml-based decision support tools that is in line with medical expectations and reduces health risks due to interpretive gaps between algorithm designers and users.the accuracy of ml-based medical decision support tools depends on two broad factors: the predictive accuracy of the model, and the accuracy of the benchmarks against which model accuracy is evaluated. sources of inaccuracy that fall under the first factor include under-and over-fitting, unrepresentative or small datasets, and imbalanced datasets, among many others. this article focuses on the second factor, namely, the accuracy of the benchmarks used to evaluate the accuracy of ml models. in supervised ml, these benchmarks are usually taken to be the labels in the validation and test datasets. accordingly, significant efforts to improve accuracy in medical ml decision support tools have concentrated on improving the quality of labels .while these efforts are important and laudable, this article highlights another source of benchmark inaccuracy in medical decision support tools that cannot be remedied simply by improving the quality of labels, and persists even in the hypothetical case where labels perfectly reflect the medical reality underlying the data. this additional source of benchmark inaccuracy is target specification bias. as its name suggests, this kind of bias arises due to differences between the way the target variable is specified from the perspective of medical decision makers, and the way the target variable is operationalized by the labels in the validation and test datasets. as i will show, a common source of target specification bias is that medical decision makers are typically interested in predicting variables that are specified under counterfactual conditions, while labels can only operationalize those same variables under actual conditions. as a result, labels may be biased with respect to the target variable even when the labels are reliably obtained and carefully curated.i borrow the theoretical framework for the concept of target specification bias from metrology, the science of measurement. a central goal of metrology is to supply universal and replicable benchmarks for evaluating measurement accuracy, such as the standard metre, kilogram and second. i will contrast the modern concept of metrological accuracy with the 'label-matching' concept of accuracy currently prevalent in the literature on supervised ml, and find the latter lacking for the purposes of reporting to decision makers. i will then propose a broader concept of benchmark accuracy for medical ml decision support tools that is inspired by metrology. this broader concept of benchmark accuracy takes into account not only label quality, but also target specification bias.target specification bias is closely tied to fairness. the counterfactual scenarios under which medical decision makers typically specify their target variables are also the ones they use to define what counts as a fair decision. this is consistent with counterfactual conceptions of fairness in ml . i will conclude by arguing that substantive considerations concerning fairness and the dynamics of healthcare decision making are intrinsic to specifying benchmarks for model accuracy. the accuracy of ml decision support tools in medicine should be reported relative to such benchmarks, rather than merely based on their labelmatching rates. doing so would increase the fairness and safety of such tools.the accuracy of ml-based medical decision support tools depends on two broad factors: the predictive accuracy of the model, and the accuracy of the benchmarks against which model accuracy is evaluated. as its name suggests, this kind of bias arises due to differences between the way the target variable is specified from the perspective of medical decision makers, and the way the target variable is operationalized by the labels in the validation and test datasets. as i will show, a common source of target specification bias is that medical decision makers are typically interested in predicting variables that are specified under counterfactual conditions, while labels can only operationalize those same variables under actual conditions.should evaluations of algorithmic accuracy strictly track the match between predictions and labels? in what follows, i will call the view that a machine learning model is predictively accurate to the extent that its predictions match the labels in a reliably obtained and representative dataset the 'label-matching conception of accuracy'. for example, if the target variable is the risk of cancer associated with a skin lesion, and the labels are biopsy results, the target variable is operationalized by the probability that a lesion with similar features would result in a positive biopsy result. even when the data are representative of the actual world, and the model is generalizable to other, real-world cases not included in the training data, model predictions may still be inadequate as operationalizations of the target variables decision makers care about. in a simplified causal model of the underlying data generation process, two variables affect a patient's health outcomes (o): the characteristics (features) of the patient prior to medical intervention (x), and the characteristics of the healthcare interventions that the patient undergoes (i). these counterfactual health outcomes (or diagnoses) differ from the labels in the dataset, not due to any measurement error, but because real data includes correlations that confound the relationship between patient features and health outcomes (or diagnoses) that decision makers are interested in learning about. as a result, when the accuracy of an ml model is evaluated based on the model's ability to reproduce labels, it is evaluated against a different variable than the one decision makers typically care about. specifically, it does not account for discrepancies between the values of the target variable as it is operationalized by labels, and values of the target variable as it is defined by decision makers. as long as the labels in the dataset are accurate and reliable representations of the target variable of interest, and the model generalizes well from the training dataset to new examples, the model should be deemed accurate for the patient population from which the data was collected. whether or not labels are an adequate operationalization of the target variable depends on the definition of the target variable, and on the validity of the inference from labels to target variable values. similarly, designers of predictive ml tools typically benefit from asking: (i) what variable do labels in the actual data reflect? (ii) what variable are labels intended to reflect? and (iii) how does the variable that labels are intended to reflect differ from the target variable as defined by stakeholders?. at the same time, labels are not intended to reflect a counterfactual world free of upstream decision making (such as differential treatment of at-risk patients), diagnostic suspicion bias (such as unjustified differences in diagnostic procedure based on patients' race, gender, or age) or systematic injustices (such as unequal access to healthcare). evaluating model accuracy relative to a counterfactually specified target variable takes target specification bias into account, resulting in more complete and user-relevant accuracy estimates than those based strictly on label-matching. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/958.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/958.txt new file mode 100644 index 0000000000000000000000000000000000000000..6e5302896c8f76179b9824a0f3739b7c2ba1ff74 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/958.txt @@ -0,0 +1 @@ +generative ai and large language models (llms) hold great promise in enhancing computing education by powering next-generation educational technologies for introductory programming. in particular, this potential lies in the advanced capabilities of state-of-the-art models-like openai's chatgpt and gpt-4 -to automatically generate high-quality personalized feedback and content . in our work, we seek to investigate the capabilities of these models in visual programming domains popularly used for k-8 programming education, including domains like scratch , hour of code: maze challenge by code.org , and karel .recent works have studied the capabilities of these models for various programming education scenarios such as program repair, hint generation, pair programming, and task synthesis . a study in 2022 had ranked openai's codex (based on gpt-3) in the top quartile w.r.t students in a large python programming course . a recent study in contemporary work has shown that openai's gpt-4 drastically outperforms chatgpt (based on gpt-3.5) and comes close to human tutors' performance for several scenarios . however, these above-mentioned works have considered only text-based (python) programming and leave open the question of how well these models perform in visual programming domains. the main research question we study is: do state-of-the-art generative models show advanced capabilities in visual programming on par with their capabilities in text-based python programming? figure 1: summary of ten reference tasks used in our evaluation. we have five tasks each from the visual programming domains of hour of code: maze challenge by code.org (in short, hocmaze) and karel . figures 2 and3 provide an illustration of tasks t4 and t9, respectively.in our work, we evaluate two models, chatgpt (based on gpt-3.5) and gpt-4, in visual programming domains for a variety of scenarios. these scenarios are designed to evaluate various generative and problem-solving capabilities of these models in visual programming. more concretely, we consider the following three scenarios: (i) execution trace; (ii) solution synthesis; (iii) task synthesis. we provide further details about these scenarios in the following sections.we evaluate the performance of different methods using expert-based annotations involving a mix of quantitative and qualitative assessments. we base our evaluation using ten reference tasks from the visual programming domains of hour of code: maze challenge by code.org and karel .our results show that gpt-4 drastically improves up on chatgpt (based on gpt-3.5); however, the performance of gpt-4 is still quite poor as it struggles to combine spatial, logical, and programming skills crucial for visual programming.the rest of this paper is organized as follows. section 2 provides an overview of our evaluation setup. sections 3, 4, and 5 provide results for the above-mentioned three scenarios. section 6 discusses some limitations of our current work and directions for future work. in our work, we seek to investigate the capabilities of these models in visual programming domains popularly used for k-8 programming education, including domains like scratch, hour of code: maze challenge by code. each of these reference tasks has a unique minimal-sized solution code and the task shows the minimal-sized solution code for this task. the task's visual grid comprises the following elements: avatar (purple dart), goal (red star), free cells (white-colored grid cells), and wall cells (gray-colored grid cells). as considered in the work of, we use only a single pregrid-postgrid task specification for karel in our evaluation -this simplifies the task representation in prompts and keeps the overall evaluation setting for karel and hocmaze domains similar. (a) shows the task's a pair of 10x10 visual pregrid and postgrid and (b) shows the minimal-sized solution code for this task. complexity can be captured through the properties of this solution code. as shown in figure1, we characterize a task and its solution code through the following properties: (a) size is the number of code "blocks" in the solution code (i. first, we manually perform n queries = 5 queries to an in this domain, a task is represented as an 8x8 visual grid that contains wall cells, free cells, avatar (with specific location and direction), and goal. figure6in this domain, a task is represented as an 8x8 visual grid that contains wall cells, free cells, avatar (with specific location and direction), and goal.can you generate a solution code for this task that uses the minimum number of blocks? a solution code for a task takes avatar to goal when executed. when interacting in this domain, a task is represented as an 8x8 visual grid that contains wall cells, free cells, avatar (with specific location and direction), and goal.can you generate a task with 8x8 visual grid that would be solved by this code? the visual grid must contain avatar (with specific location and direction) along with goal, and can have wall cells and free cells.apart from the above extensions, there are many exciting directions for future work, including but not limited to: (a) curating novel benchmarks for visual programming that the research community can use to evaluate new versions of these models; (b) evaluating alternate generative models, in particular, open-source variants; (c) developing techniques to improve the performance of generative ai and large language models for visual programming, e.can you generate a solution code for this task that uses the minimum number of blocks? a solution code for a task transforms the pregrid into the postgrid when executed.can you generate a task with a pair of 10x10 visual pregrid and 10x10 visual postgrid that would be solved by this code? both the visual pregrid and visual postgrid must contain avatar (with specific location and direction), and can have wall cells, free cells, and markers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/959.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/959.txt new file mode 100644 index 0000000000000000000000000000000000000000..03d41ed19c14598cefe273493d00147384698b57 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/959.txt @@ -0,0 +1 @@ +moving on from the decentralized ecosystem of web 3.0 , the fourth generation of the world wide web has emerged through its unique requirement on intelligence and immersion between virtual and reality, known as web 4.0, highlighted by european commission in the report addressing the inequality of basic rights and the interactive efficiency of the environment. web 4.0 is expected to combine advanced artificial and ambient intelligence, the internet of things, trusted blockchain transactions, virtual worlds and xr capabilities, and digital and real objects to establish an environment where every component is fully integrated and communicates with each other, enabling truly intuitive, immersive experiences, seamlessly blending the physical and digital worlds . the eu's strategy for web 4.0 involves empowering users and supporting businesses in the virtual world while fostering open, inter-operable standards and multi-stakeholder governance. the ultimate goal of web 4.0 is to pioneer user-centric, ethical and inclusive virtual worlds that boost competitiveness, foster creativity, and uphold rights.a. gaps between web 4.0 and web 3.0in web 3.0, the terminology has a refined scope for decentralizing the entire world wide web with decentralized applications (dapp) , decentralized physical infrastructure (depin) and many blockchain infrastructures in both the network layer and the application layer . however, web 3.0 lacks the focus of content delivery, specifically the immersive vr/xr contents, which have exceeded the capacity of existing network infrastructure in terms of bit rates, quality of services (qos) and quality of experience (qoe) . web 4.0 sees the gap between the web 3.0 decentralized backbone of the control plane and the incoming web 4.0 data plane that requires network native intelligence together with future generation network infrastructures, e.g., 6g .to achieve the required data rate and connectivity of vr/xr content, semantic communication is proposed as a relief from demanding bit rates . the web 4.0 data is characterized as semantic data, that treat bits differently based on their features and priority .joint source channel coding (jscc) is widely adopted in the latest research of making the semantic aware communication network . at the same time, the semantic processing leads to a computing-heavy design for future generation networks, in particular, computing force network (cfn) , emphasizing high-performance computing with ultra-low latency and extraordinary reachability offered by both the access network , the core network and the data network.unlike web 3.0 which serves the same purpose in decentralized architecture, web 4.0 introduces ai as a new entity in the network, an integrated part of the network, compared to service-only ai applications in web 3.0. the new entity plays a pivotal role in enabling network intelligence and requires the network evolution of integrated computing and networking nodes with decentralized controllers. this model allows the ai entity to adapt, learn, and optimize itself, achieving levels of efficiency and responsiveness unattainable by the serviceonly ai applications in web 3.0. second, web 4.0 emphasises virtual experience consumption, requiring advanced network evolution on semantic and deterministic quality of services for end consumers, as compared in at the same time, the semantic processing leads to a computing-heavy design for future generation networks, in particular, computing force network (cfn), emphasizing high-performance computing with ultra-low latency and extraordinary reachability offered by both the access network, the core network and the data network.0 introduces ai as a new entity in the network, an integrated part of the network, compared to service-only ai applications in web 3. the proposed decentralized intelligence service operation principle is the key to closing the gaps in ai accessibility and enabling a self-evolving ai for network and network for ai by crowdsourcing and decentralized vending of ai services. their operational matrix intersects crucial nodes such as the computing force network, blockchain nodes, ai nodes, semantic networks, and the vr/ar real-time network.a) purpose, objectives, and model optimization: naes are designed to deliver specific ai services within the web 4. they continually optimize their operations and upgrade models according to market demand, seamlessly integrating with elements like semantic networks and vr/ar real-time b) computational resource: naes source computational power from computing service providers within the web 4. as shown in scenario 2, naes secure the necessary computational resources from decentralized computing resource providers within the computing force network, using their deposited funds as payment.d) blockchain network interaction: naes interact with the blockchain network primarily via smart contracts in scenario 4.e) provision of decentralized services and user data protection: in scenario 5, users can call naes' decentralized services via smart contracts. this process involves the flow of funds and data from the user to the naes and the provision of computational resources and data to the user. naes operate in compliance with data protection regulations, requiring explicit user permissions to utilize user data.f) vr/ar real-time network interaction: in the sixth interaction scenario, the computational resources of an nae and the computing force network collectively provide computational power to the vr/ar real-time network, as detailed in. naes also supply data obtained from their decentralized services to the vr/ar network, enabling real-time virtual or augmented reality experiences. semantic content aware network for vr/ar in confidential peer-to-peer network semantic networks play an essential role within naes, allowing them to understand and process complex information across diverse contexts. although there is a multi-layer identity strategy has been suggested to enable users to use different identities in different scenarios in web 3. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/96.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/96.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c104c787098e56f66ee143097aed59c08b40487 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/96.txt @@ -0,0 +1 @@ +deep learning has been extensively used in the last decade to solve several tasks (krizhevsky, sutskever, and hinton 2012;golan and el-yaniv 2018;hinton et al. 2012a). a deep learning model, i.e., a neural network, is formed of a sequence of layers with parameters optimized during the training process using training data. formally, an m-layer neural network model can be defined as follows:where φ i (.) is the non-linear activation function of the i th layer and w = {w 1 , . . . , w m } are the model's weights. given a training data {x i , y i } n i=1 , the parameters of f (x; w) are obtained by minimizing a loss l(•):however, neural networks are often over-parameterized, i.e., have more parameters than data. as a result, they tend to overfit to the training samples and not generalize well on unseen examples (goodfellow et al. 2016). while research on double descent (advani, saxe, and sompolinsky 2020;belkin et al. 2019;nakkiran et al. 2020) shows that over-parameterization does not necessarily lead to overfitting, avoiding overfitting has been extensively studied (dziugaite and roy 2017; foret et al. 2020;nagarajan and kolter 2019;neyshabur et al. 2018;poggio et al. 2017;grari et al. 2021) and various approaches and strategies, such as data augmentation (goodfellow et al. 2016;zhang et al. 2018), regularization (arora et al. 2019;bietti et al. 2019;kukačka, golkov, and cremers 2017;ouali, hudelot, and tami 2021;han and guo 2021), and dropout (hinton et al. 2012b;lee et al. 2019;li, gong, and yang 2016;wang et al. 2019), have been proposed to close the gap between the empirical loss and the expected loss.diversity of learners is widely known to be important in ensemble learning (li, yu, and zhou 2012;yu, li, and zhou 2011) and, particularly in deep learning context, diversity of information extracted by the network neurons has been recognized as a viable way to improve generalization (xie, liang, and song 2017;xie, deng, and xing 2015b). in most cases, these efforts have focused on making the set of weights more diverse (yang, gkatzelis, and stoyanovich;malkin and bilmes 2009). however, diversity of the activations has not received much attention. here, we argue that due to the presence of non-linear activations, diverse weights do not guarantee diverse feature representation. thus, we propose focusing on the diversity on top of feature mapping instead of the weights.to the best of our knowledge, only (cogswell et al. 2016;laakom et al. 2021a) have considered diversity of the activations directly in the neural network context. the work in (laakom et al. 2021a) studied theoretically how diversity affects generalization showing that it can reduce overfitting. the work in (cogswell et al. 2016) proposed an additional loss term using cross-covariance of hidden activations, which encourages the neurons to learn diverse or nonredundant representations. the proposed approach, known as decov, was empirically proven to alleviate overfitting and to improve the generalization ability of neural network. however, modeling diversity as the sum of the pairwise cross-covariance, it is not scale-invariant and can lead to trivial solutions. moreover, it can capture only the pairwise diversity between components and is unable to capture the "higher-order diversity".in this work, we propose a novel approach to encour-age activation diversity within the same layer. we propose complementing the 'between-layer' feedback with additional 'within-layer' feedback to penalize similarities between neurons on the same layer. thus, we encourage each neuron to learn a distinctive representation and to enrich the data representation learned within each layer. we propose three variants for our approach that are based on different global diversity definitions. our contributions in this paper are as follows: • we propose a new approach to encourage the 'diversification' of the layers' output feature maps in neural networks. the proposed approach has three variants. the main intuition is that, by promoting the within-layer activation diversity, neurons within a layer learn distinct patterns and, thus, increase the overall capacity of the model. • we show empirically that the proposed within-layer activation diversification boosts the performance of neural networks. experimental results on several tasks show that the proposed approach outperforms competing methods.diversity of learners is widely known to be important in ensemble learning(li, yu, and zhou 2012;yu, li, and zhou 2011)and, particularly in deep learning context, diversity of information extracted by the network neurons has been recognized as a viable way to improve generalization(xie, liang, and song 2017;xie, deng, and xing 2015b).in this work, we propose a novel approach to encour-age activation diversity within the same layer. the main intuition is that, by promoting the within-layer activation diversity, neurons within a layer learn distinct patterns and, thus, increase the overall capacity of the model., direct, similar to decov(cogswell et al.] is the feature vector, c is the number of units within the feature layer, and λ 1 and λ 2 are two hyper-parameters controlling the contribution of each term to the diversity loss. intuitively, the first term of equation 4 penalizes the similarity between the units and promotes diversity, whereas the second term ensures the scale-invariance of the proposed regularizer., the feature layer, our proposed diversity loss, as in(cogswell et al. moreover, the decov approach only captures the pairwise diversity between the components, whereas we propose variants of our approach which consider a global view of diversity. we note that, compared to the standard approach, employing a diversity strategy consistently boosts the results for all the two models and that our approach consistency outperforms both competing methods (standard and decov) in all the experiments. with resnet50, the three variants of our proposed approach significantly reduce the test errors compared to standard approach over both datasets: 0.to further demonstrate the effectiveness of our approach and its ability to boost the performance of state-of-theart neural networks, we conduct additional image classification experiments on the imagenet-2012 classification dataset (russakovsky et al. as it can be seen, feature diversity (our approach and decov) reduces the test error of the model and yields a better performance compared to the standard approach.to further investigate the effect of the proposed diversity strategy, we conduct a sensitivity analysis using imagenet on the hyperparameters of our methods: λ 1 and λ 2 which controls the contribution of the global diversity term to the global loss.09%) corresponding to 0. 2021a;cogswell et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/960.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/960.txt new file mode 100644 index 0000000000000000000000000000000000000000..2fc555b50d67778a3f4b6dc96a1613c21b0aa5a9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/960.txt @@ -0,0 +1 @@ +social action in interaction is a multi-layered phenomenon, complex in many respects. this is one, if not the, main challenge to human-robot interaction (hri) and affects artificial intelligence (ai) more broadly. multimodality of communication constitutes a complexity factor per se; yet it is fundamental for action coordination, mutual understanding, as much as the affective and emotional dimension of interpersonal communication. "social robots must be able to read the different social and conversational cues that people use during interaction with each other, then use these to adapt so it is imperative to investigate what types of cues people use in their interaction" (onyeulo & gandhi, 2020: 5). this is an endeavor which, per se, has been extensively pursued in several disciplines, and prominently by ethnomethodology and conversation analysis (em/ca -cf., e.g., garfinkel, 1967;sacks, 1972), whose contribution could be further leveraged (as recognized in human-computer interaction, e.g., moore and arar, 2019).there is, furthermore, another layer of human action-in-interaction which is crucial and has to be addressed if we aim at developing socially-competent artificial agents. it is the expressive dimension, or the expressive order of social interaction (cf. goffman, e.g., 1959goffman, e.g., , 1967)). it has to do with social roles and positions, with participants' "faces" (impression management, deference and demeanor, reputation and respect), and with their social relationships (e.g., tacitly displaying that two participants hold a closer mutual relationship than with the other interactants). this is what has been called "interaction ritual" (collins, 2004). the expressive order is always operative when humans find themselves in the presence of others, whatever the activity at stake (e.g., work or leisure) and the given scenario, or social occasion.such a social context does play an important role. for instance, goffman (1961) metaphorically maintained that social encounters are characterized by an enveloping "membrane" which keeps outside "issues" considered irrelevant for the current social occasion. for example, in western culture at a party at a friend's place, social class or salary are to be filtered out; in some societies, the same holds for gender issues at a job meeting or interview. breaches in the membrane threaten the scenario itself, possibly causing a shift in the "frame" of activities (goffman, 1974). to keep with the examples, a conversation and hence a social situation1 suddenly stops to be a friendly one, a job interview may turn into a discriminatory encounter.what is to be filtered out, furthermore, does not only change with the kind of encounter (social occasion/scenario), but also with changing culture (see, e.g., the gender example). cultural variations -themselves multilayered and intersectional (national and regional cultures; professional or religious cultures, both possibly transnational yet with their local variety; occupational and organizational cultures; so-called "subcultures" of all sorts; etc.)are orthogonal both to the functional and the expressive dimensions of human action in interaction. that is, the way things are done in a given cultural context and social occasion/scenario -i.e., the ethnomethods (garfinkel, 1967(garfinkel, , 2002;; cf. also liberman, 2013)vary, and this is true for the functional as much as for the expressive order of interaction.when considering human-robot interaction, therefore, culture is not only to be addressed in terms of its "influence on the acceptance towards social robots" (onyeulo & gandhi, 2020: 4) or "on expectations towards and responses to social robots" (lim et al., 2021: 307) -that is, as an individual phenomenon. when it comes to designing socially-competent artificial agents, equipping them with the ability to adapt to the local culture does not only nor primarily mean adaptation to a user's individual traits, including their cultural ones (i.e., adapting to a kind of individual2 ), but adaptation to different social occasions as grounded in given cultures and, possibly, group identities3 . a few scholars moved close to such an approach, in particular mascarenhas and colleagues (e.g. 2009mascarenhas and colleagues (e.g. , 2013amascarenhas and colleagues (e.g. , 2013b; cf. also rehm, 2010), leveraging sociological literature on rituals. however, they maintained that "activities can be separated into two classes: ritual activities and technical activities. whilst a ritual activity is described as expressive, rule-governed, routinized, symbolic, or non-instrumental, a technical activity is described as pragmatic, spontaneous, and instrumentally effective" (mascarenhas et al., 2009: 307). on the contrary, we contend that the expressive order of interaction -with its rituals such as mutual greeting (the example taken in mascarenhas et al, 2009) or apologies followed by minimizations (cf. further)-is attended to (also) co-occurrently with instrumental activities. in other words, they are not two classes but two dimensions of human activity. this also means that the ordinary interaction rituals we tackle in this article, are as much "spontaneous" as any other situated instrumental activity: they may emerge as required in the midst of "technical activities" (which, in turn, can be as much rule-based as rituals), and should be enacted accordingly to the developing circumstances of that specific scenario and cultural context.on the one hand, this approach allows to keep culture into the picture as a multilayered phenomenon, and to avoid stereotyping, whose threat has been largely acknowledged (e.g. buolamwini & gebru, 2018;wang and kosinski, 2018;katz, 2020). on the other hand, it pushes further what we mean by socially-competent artificial agent. the perspective we take departs from an individual trait approach to cultural identity, for three reasons. first, because of intersectionality (e.g. crenshaw, 1991;hooks, 2014), which requires to look at identity as a complex trait function, so to speak, which varies with varying situations. this discloses individually-based cultural adaptation both as nearly impossible in terms of design, and as unethical and dangerous when considering the opportunities and outcomes of developing this kind of technology. second, several studies highlighted identity's fluid, processual and situated character. one's identity changes both in time (life course) and with changing occasions; in fact, some kinds of "trait" are in the background (e.g. salary, gender) and others in the foreground (e.g. friendship, job expertise) in any given occasion, and this can even change during the event -some of the disruptions we examine in this paper are a case in point. finally, culture intended as a (more o less) stable property of the individual brought to contradictory results in hri research (lim et al., 2021) and it has been noted that it is essential to investigate under what conditions could critically benefit from culturally sensitive robots. studying across cultural practices and learning from broader literature on social anthropology, sociology, ethnography or human-computer interaction would provide exemplars to identify and point to priority areas for future research on social robotics and robotic design. (ivi: 1328).we maintain that a priority area is constituted by interaction rituals, with particular attention to interactional disruptions.there is, furthermore, a third kind of context alongside social occasion as located in a given culture, the situated context (see footnote n. 1). besides bringing in its specific, local developing circumstances, such a situated context also entails the possibility for troubles and disruptions to emerge -at the functional, conversational, and expressive levels. agents in interaction do not only commit practical mistakes such as dropping the sugar on the bar counter, but also conversational errors and other "infelicities" in communication (e.g., misunderstandings) that equally ask for a "repair" (schegloff et al., 1977;raymond et al., 2013;drew et al., 2015). in both cases, what we are referring to as the functional order of action-in-interaction can be disrupted; in both cases, the expressive order is as much at stake (e.g., appearing clumsy, or stupid). whereas the former have been addressed in hri (e.g., marge, rudnicky, 2019;benner et al., 2021), devised solutions, to the best of our knowledge, do not tackle the expressive order, thereby missing to acknowledge one of the dimensions building up to the social competence of an (artificial) agent. moreover, disruptions can directly affect the expressive layer of interaction, in the absence of any material or (intrinsically) conversational trouble, for instance when an "irrelevant" topic penetrates the "membrane" and enters the conversation. this equally calls for a mending, or recovery strategy.when considering the design and development of socially-competent, culturallyadaptive artificial agents, all the above mentioned layers must be taken into account -from a perspective looking at culture as an interactional and fully social, rather than individual dimension-, and artificial agents should be enabled to contribute to recovery strategies concerning "interactional breaches and wider relational ruptures" (tavory & fine, 2020: 365). in this paper we argue that the capability of an artificial agent to detect and recover interactional disruptions can be important to equip the agent with effective social skills (skills for the expressive level of social interaction), and that such specific skills can be culturallyadaptive via culturally-located training data-sets for the expressive order of social action-ininteraction. this claim is illustrated by means of a simple applicative scenario (the artificial bartender, see section 2) that helps us to present a review of the relevant literature on interactional disruptions and their corresponding recovery strategies (section 3). after having organized the information about the main disruptions in an illustrative table (section 4), we show (section 5) as a proof of concept how first a reactive agent architecture, and then an architecture with planning capabilities can detect, recover, and even prevent some disruptions. this will help us to sketch possibile requirements for an artificial agent with respect to the whole list of disruptions, providing suggestions for implementation. we briefly discuss the proposed approach (section 6) before concluding. in this paper we argue that the capability of an artificial agent to detect and recover interactional disruptions can be important to equip the agent with effective social skills (skills for the expressive level of social interaction), and that such specific skills can be culturallyadaptive via culturally-located training data-sets for the expressive order of social action-ininteraction. the expressive dimension of the "interaction order" -or the expressive order-has to do with the social occasion, the corresponding social roles, "impression-management", "face work" and connected interaction rituals, such as those concerned with deference and demeanor(goffman, 1959(goffman, , 1967;;cf. therefore, considering an artificial agent acting in a social context, the agent's knowledge module may use this new kind of information to interpret the ongoing social interaction, possibly to identify cases of disruption, and to activate suitable recovery actions which are socially acceptable or, depending on the situation, even expected.the recovery strategy consists in a "repair" which can be accompanied by apologies: 1) asking to repeatmust-form of clarification, by the agent who does not receive the message; 2) signaling a misunderstandingmay-form of clarification by the agent whose words were misunderstood; 3) self-repair by the agent who committed the speech error; 4) asking for clarification, either explicitly or implicitly, by the agent who does not understand the message disruption of the "membrane"(goffman 1966), which are the "rules of (ir)relevance" related to the topic of conversation: an agent introduces a "sensitive" topic for that kind of social encounter (e. in this case, it is not that a problem occurred during a joint activity such as conversation (more precisely, talk-in-interaction) and the parties have to manage that; rather, one of the participants does not behave as expected by a competent social members, and the parties have to decide whether to make that an issue in the current interaction or not, depending on the situated circumstances (a recovery strategy is optional; for instance, it might be employed to avoid functional disruptions, like the bartender saying to an overly chatty customer: "excuse me but there are too many customers i need to attend to").in our case, the connection between the requirements and the architecture is given by: 1) a multicultural library of disruptions and a multicultural library of kinds of action -particularly, communicative acts-that help recovering from disruptions; 2) a culture-specific function of disruption recognition; 3) a culture-specific map between disruptions and recovery acts; and 4) a culture specific module of the execution of the act. as a step forward with respect to architecture a, architecture b includes a enhanced disruption recognition (dr) module; moreover, the mapping is now between disruptions and recovery strategies by means of the module map disruption recovery strategy (mdrs), and the actual execution of the actions is entrusted to the action execution (ae) module. the artificial agent we propose may have a shallow social interface, but one of the important points to the advantage of our proposal is that, also between humans, interaction happens at the interfaces, and the disruptions of norms, membranes, shared situations and so on, that we listed in table1coupled with recovery strategies, are the normal way in which people interact. this classificatory work allows artificial agents engaged in human-robot interaction to reason about, and in case contribute to, the recovery from disruptions of the "interaction order" and its expressive dimension. although the default of our approach appears to be that the artificial agent should act in a sociallysmooth way as a human would do, and although there is evidence that people "tend to prefer robots better complying with the social norms of their own culture"(bruno et al. have you experienced failures of your social agent/robot that can be considered to be at the expressive order? yes such disruptions, as listed in table1, are common between humans and we propose that their detection and recovery could be integrated in the agent/robot architecture leveraging the data about the failures. do you have one or more social contexts, like the bartender scenario or others, that you expect to be relevant in the deployment of the agent/robot? yes the expressive order is definitely relevant: on the one, hand the social context could be a useful guide to detect disruptive behavior; on the other hand, the recovery strategies (see table1) could help to maintain the context itself.yes social presence can be facilitated by managing the expressive order, and conversely social presence triggers the expressive order so our proposal is extremely relevant in particular for single-user interaction. which competences, as listed in culturally-adaptive? yes we expect that social contexts are culture-dependent, you can take advantage of your architecture in order to deal with them and their disruptions of the expressive order devising an architecture more complex of our simple architectures a and b. are cultural differences among the present humans relevant? yes in this case you have a multi-party interaction with cultural differences and your architecture needs to be dynamically culturally-adaptive (beware of the pitfall of stereotypes on detecting the culture of a user), and the expressive order is certainly relevant; please note that our simple architectures a and b do not cover this case. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/961.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/961.txt new file mode 100644 index 0000000000000000000000000000000000000000..fdb4ec23ac3362101390e31d9746875b829b3330 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/961.txt @@ -0,0 +1 @@ +social divisions and conflicts have become a global problem . divisions and conflicts are not only between nations, such as ukraine and russia or taiwan and china, but also between individuals to families, communities, regions .to solve these social issues, the philosopher deguchi proposes the concept of "mixbiotic society" that further advances the idea of a symbiotic society . the "mixbiotic society" is a society in which individuals with freedom and diverse values mix and mingle in physical proximity to recognize their respective "fundamental incapability" each other and sublimate into solidarity. the "fundamental incapability" is that the individual "i" is incapable of any physical action alone, nor of complete control over others. the subject of mixbiotic society is not "self as i," but "self as we" who are entrusted to each other. the "self as we" is oriented toward diversity as other, freedom as de-regularity, and solidarity as mutual aid.according to sociologist luhmann, a social system is an autopoietic system that forms emergent order through a network of processes of communication generation and disappearance . autopoiesis is a theory of living systems proposed by biologists maturana and varela, a self-organizing system consisting of a recursive network of processes in which components produce components through interaction . from the perspective of autopoiesis, communication is crucial for a social system in which freedom and solidarity are in harmony.nonviolent communication (nvc), developed by psychologist rosenberg, is a well-known communication method for resolving social divisions and conflicts . nvc is a method that creates empathy to promote interpersonal harmony and future cooperation, and the process consists of four components: observation, feelings, needs, and requests. nvc and the mixbiotic society have several things in common: nvc's fundamental questions "what's alive in us?" and "what can we do to make life more wonderful?" are similar to the "fundamental incapability" and "self as we" in the mixbiotic society. the avoidance of evaluation based on fixed values in observation and feelings corresponds to diversity of values, the avoidance of "should" and "must" in needs and requests corresponds to de-regularity, and the connections that allow for compassionate giving correspond to solidarity and mutual aid. therefore, nvc is a powerful tool in aiming for a mixbiotic society. however, nvc is not for everyone to use immediately. to express observation with objectivity, feelings without evaluation, needs without coercion, and requests for positive action, mediation is necessary to support them.based on the above, the purpose of this report is to apply generative ai, which has recently made remarkable progress, to nvc mediation as part of research toward a mixbiotic society. until now, mediation has been performed by skilled and certified trainers, of which there are only over 800 worldwide. however, if generative ai can be utilized, it would hasten the spread of nvc and the realization of a mixbiotic society.generative ai is artificial intelligence that can generate text, images, or other media in response to prompts . notable generative ais include chatgpt , bing , bard , and stability ai . in the methods and results section, i will test the ability of chatgpt to perform appropriate mediation by entering examples into prompts for each of the four processes: observation, feelings, needs, and requests. in the discussion section, i discuss future issues and developments based on the results of trial.to solve these social issues, the philosopher deguchi proposes the concept of "mixbiotic society" that further advances the idea of a symbiotic society. the "mixbiotic society" is a society in which individuals with freedom and diverse values mix and mingle in physical proximity to recognize their respective "fundamental incapability" each other and sublimate into solidarity.nonviolent communication (nvc), developed by psychologist rosenberg, is a well-known communication method for resolving social divisions and conflicts. nvc is a method that creates empathy to promote interpersonal harmony and future cooperation, and the process consists of four components: observation, feelings, needs, and requests. nvc and the mixbiotic society have several things in common: nvc's fundamental questions "what's alive in us?" and "what can we do to make life more wonderful?" are similar to the "fundamental incapability" and "self as we" in the mixbiotic society. the avoidance of evaluation based on fixed values in observation and feelings corresponds to diversity of values, the avoidance of "should" and "must" in needs and requests corresponds to de-regularity, and the connections that allow for compassionate giving correspond to solidarity and mutual aid. to express observation with objectivity, feelings without evaluation, needs without coercion, and requests for positive action, mediation is necessary to support them.based on the above, the purpose of this report is to apply generative ai, which has recently made remarkable progress, to nvc mediation as part of research toward a mixbiotic society. however, if generative ai can be utilized, it would hasten the spread of nvc and the realization of a mixbiotic society. in the methods and results section, i will test the ability of chatgpt to perform appropriate mediation by entering examples into prompts for each of the four processes: observation, feelings, needs, and requests. then, the sentences about observation, feelings, needs, and requests are entered as exercises in the prompt, and the output results in a response from chatgpt.input # role i will now enter a sentence, and you will revise it into a sentence a specific statement of "needs" without feelings, or as a sentence that separates specific "needs" from feelings (e.this trial of nvc mediation by chatgpt showed that, in general, there is potential for the application of generative ai, although it is not at a level where it can be put to practical use immediately. mediate by certified trainers and generative ai together at events and workshops to identify issues with generative ai. it is hoped that the widespread use of nvc mediation using generative ai will lead to the early realization of a mixbiotic society where freedom and solidarity among people with diverse values are compatible. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/962.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/962.txt new file mode 100644 index 0000000000000000000000000000000000000000..926f3bbe29d06c0efe501246206a1279f62e72fc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/962.txt @@ -0,0 +1 @@ +crowd monitoring is the process of evaluating and watching how big crowds of people behave and move about in public places. crowd monitoring is now essential for guaranteeing public safety, security, and effective crowd management due to the rise in popularity of major gatherings and events . crowds in various places such as streets, parks, airports, classrooms, swimming pools, and innumerable other locations are typically monitored by close circuit television cameras (cctv) . the disadvantages of cctv cameras are limited area coverage, high power consumption, installation problems, mobility, and constant monitoring by operators . many algorithms and approaches have been developed to efficiently identify, track, and evaluate crowd behavior and density to monitor and control crowds , . these algorithms can interpret data from various sources, including sensors, social media feeds, and cctv cameras, and they can also deliver real-time forecasts and insights . machine learning, deep learning, and computer vision algorithms are important methods used in crowd surveillance . the accuracy and dependability of machine learning models in dynamic and complex situations, such as crowds, is a challenge, as well as crowd behavior, evolves over time, models may need to be periodically retrained to maintain accuracy; otherwise, their efficiency will gradually decrease . although machine learning and artificial intelligence have advanced, human input is still essential for live crowd monitoring for decision-making . the complexity and wide range of variables that might affect crowd behavior make it difficult for algorithms to generate reliable forecasts. here are various justifications for why live crowd monitoring requires human participation to have real-time monitoring, contextual understanding, and decision-making. in general, crowd monitoring systems and their operators confront several difficulties, and it is crucial to have knowledgeable staff and reliable mechanisms in place to handle and address these difficulties successfully. the main challenge of the crowd-monitoring approach is to establish a prediction model that evaluates both the role of operators and machine learning algorithms in crowd-monitoring in terms of decision-making. crowd monitoring is now essential for guaranteeing public safety, security, and effective crowd management due to the rise in popularity of major gatherings and events. machine learning, deep learning, and computer vision algorithms are important methods used in crowd surveillance. the accuracy and dependability of machine learning models in dynamic and complex situations, such as crowds, is a challenge, as well as crowd behavior, evolves over time, models may need to be periodically retrained to maintain accuracy; otherwise, their efficiency will gradually decrease. a crowd monitoring system is a cutting-edge technological solution that combines various data sources, such as computer vision, sensor networks, and social media data, to better understand the crowd..  pattern recognition: plays a crucial role by identifying certain patterns within the data, such as movement patterns, to offer insights into crowd behavior. these systems harness the power of sophisticated computer vision algorithms and machine learning techniques to process the captured data.security systems use computer vision algorithms and machine learning techniques to process the captured data and provide insights into crowd behavior, density, movement patterns, and potential risks. while security systems employing computer vision algorithms and machine learning techniques have significantly improved the surveillance landscape, the role of cctv operators remains crucial in maintaining a comprehensive and effective surveillance system. in human gatherings, primary uses are in tracking the movement of individuals within a crowd, providing valuable insights into crowd dynamics.ultimately, technologies that rely on vision have demonstrated a notable superiority in crowd monitoring compared to those that don't, primarily due to their capability to supply detailed and complex data about the behavior of crowds.this data includes crowd density, movement patterns, and behavioral analytics that are critical in managing and understanding crowd dynamics. therefore, the classification of patterns in data, including in surveillance monitoring systems, can be done with the use of strong techniques like machine learning, and deep learning which will help to instantly evaluate vast amounts of data and find patterns or abnormalities that might point to security threats or other significant events.the abundant data yielded by vision-based systems enables the efficient application of machine learning and deep learning methodologies, encompassing techniques such as crowd analysis, object detection, and speech recognition.research demonstrates that cutting-edge machine learning and deep learning algorithms are consistently surpassing expectations and delivering exceptional outcomes within the realm of vision-based crowd monitoring systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/963.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/963.txt new file mode 100644 index 0000000000000000000000000000000000000000..f98da6984fe3f4b8e6e0453708035342e1b67924 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/963.txt @@ -0,0 +1 @@ +the goal of the nsmq ai project is to build an ai to win ghana's nsmq. the nsmq is an annual live science and mathematics competition for senior secondary school students in ghana in which 3 teams of 2 students compete by answering questions across biology, chemistry, physics, and math in 5 rounds over 5 progressive stages until a winning team is crowned for that year. the competition ran from 1993 to 2022 except for 2010 and 2011. the nsmq is an exciting live quiz competition with interesting technical challenges across speech-to-text, text-to-speech, question-answering, and human-computer interaction. an ai that conquers this grand challenge can have real-world impact on education such as enabling millions of students across africa to have one-on-one learning support from this ai.our goal is to have our ai compete in round 5, the riddles section of the 2023 competition happening in fall 2023. this round, the final one, is arguably the most exciting round as the winner of the competition is generally determined by the performance in the round. in the riddles round of the nsmq quiz, students answer riddles across biology, chemistry, physics, and mathematics. three (3) or more clues are read to the teams that compete against each other to be first to provide an answer (which is usually a word or a phrase) by ringing their bell. the clues start vague and get more specific. to make it more exciting and encourage educated risk-taking, answering on the 1st clue fetches 5 points, on the 2nd clue -4 points, and on the 3rd or any clue thereafter, 3 points. there are 4 riddles in all for each contest with each riddle focusing on one of the 4 subjects. speed and accuracy are key to winning the riddles round. an example riddle with clues and the answer is as follows. question: (1) i am a property of a periodic propagating disturbance. (2) therefore, i am a property of a wave. (3) i describe a relationship that can exist between particle displacement and wave propagation direction in a mechanical wave. (4) i am only applicable to waves for which displacement is perpendicular to the direction of wave propagation. (5) i am that property of an electromagnetic wave which is demonstrated using a polaroid film. who am i? answer: polarization.the first version of the nsmq ai2 was demoed at africaied 20233 -1st workshop on ai in education, a satellite workshop of the international conference on ai in education. nsmq ai is currently a web app that automatically transcribes speech with a ghanaian accent from a video of a riddle, generates an answer to the scientific riddle and says that answer with a ghanaian accent.motivated by this need, in our position paper -"can an ai win ghana's national science and maths quiz? an ai grand challenge for education"boateng et al. nsmq ai is currently a web app that automatically transcribes speech with a ghanaian accent from a video of a riddle, generates an answer to the scientific riddle and says that answer with a ghanaian accent. one critical challenge -among others -is determining the optimal moment for the model to return an answer, or deciding instead to wait for more information from additional clues to increase the model's chances of providing the correct answer. for data curation, we plan to have audio segments (along with transcripts) of each riddle for at least 5 years of the competition (for use by the stt team) and the corresponding performance of the best human contestants (for benchmarking by the qa team). for web app, we plan to include the live mode where we will show the live stream of the contest as our ai attempts to answer in the riddles round, make the app accessible on the cloud which involves cloud deployment and hosting to meet the system's compute needs, reducing latency of ml inference subsystems as that can be an overall system bottleneck and supporting demo and live quiz modes which involve sample and live stream media data as well as user input. for stt, we plan to automatically detect the start of the reading of the riddles and to improve the accuracy and latency of the whisper model by finetuning the model on past nsmq audios and transcripts. for qa, we plan to improve the accuracy and latency of the model and generate confidence scores as the clues are being read to incorporate the model's level of certainty into the question-answering process and inform attempts to answer early on.for model performance evaluation, we tested our pipeline with 3 audios from the nsmq competition which consist of speech with ghanaian accents, along with their corresponding transcripts. each split was represented as a csv file with the core columns "clue 1" to "clue 9" for all the clues in per riddle, "answer" for the ground truth answer, and "answer 1" to "answer 4" for alternative ground truth answers (if any). for distilbertsanh et al. we retrieved the top three passages with the highest similarity scores, along with their confidence values. we concatenated the retrieved passages and calculated the mean of their confidence as the overall confidence. we then passed the concatenated clues into the extractive qa models as "questions", while the retrieved passages are provided as the "context. for both classes of qa models, the generated answer per riddle was compared to the original ground truth answer, and if applicable, any alternative ground truth answers to the riddle. evaluation 1 involved synthesizing 30 scientific and mathematical speeches from past nsmq questions and evaluation 2 involved synthesizing 30 conversational speech. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/964.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/964.txt new file mode 100644 index 0000000000000000000000000000000000000000..83f1d49148c6e069aa369cfa1685760cc46cc7cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/964.txt @@ -0,0 +1 @@ +artificial intelligence (ai) is bringing ever more new functionalities to the realm of mobile devices that are now considered essential (e.g., camera and voice assistants, recommender systems). users have increasing expectations of the processing power and capabilities of their mobile devices. contemporary smartphones have highly advanced image processing systems, smart integrated assistants, and offer gigabit speeds over their radios. all of these features, and many more, are enabled by artificial intelligence , , .yet, operating artificial intelligence takes up a substantial amount of energy. modern artificial intelligence techniques, such as deep learning, can have very high energy consumption, both on dedicated servers and on mobile devices .beyond the realm of artificial intelligence, mobile computing has long been concerned with energy efficiency due to the limited power capacity of smartphones , . less obviously, artificial intelligence techniques themselves can also be used to reduce mobile energy consumption. for example, by optimizing data transmission , or location services . hence, artificial intelligence has two faces in this problem: it is both 1) a key enabler of desired (efficient) mobile features and 2) a major power draw on these devices, playing a part in both the solution and the problem. in this paper, we provide a comprehensive overview of both of these aspects of mobile energy use and artificial intelligence by reviewing the associated literature. the goal of this review is to understand the characteristics of the literature on mobile energy consumption involving artificial intelligence.our literature review yields 34 papers from 2013 until late 2022. we identify and pinpoint thirteen different topics being addressed by the literature. our results showcase a growing interest in the intersection between ai and mobile computing energy since 2019. most studies revolve around solution papers (32 out of 34 papers) and only 6 out of 34 papers display the participation of authors with an industry affiliation. we argue that it is quintessential that contributions in this field come with a replication package and that proposed solutions are made available to the public. finally, although topics such as approximate computing and benchmarking have been marginally covered by the literature (2 out of 34 papers), we expect them to be relevant to the challenges posed in this field.the contributions of this paper are three-fold:• an analysis of the field of ai in green mobile computing, covering publications per year, study type, industry involvement, level of study, and tool provision. • a mapping of the field into different topics, with the respective summary of existing contributions. • a replication package that provides all the collected data for each paper, that can be used in future reviews. 1the remainder of this paper is structured as follows. we describe the detailed methodology in section ii. following this methodology, we present our results in section iii. we then discuss these results and their related impacts in the research community in section iv. the threats to validity of our study can be found in section v. we then treat related work in section vi and discuss how our work differs from those studies. lastly, we highlight the conclusions of the literature review in section vii. the study regards energy consumption • i-3.• e-6.• study type: the type of study the paper is presenting: either a position on ai in green mobile software, a so-lution to tackle an issue on the topic, or an observational study; • category of ai role: the role ai has regarding green mobile computing. it can either be the use of ai for improving the energy efficiency of mobile computing, or the study of energy consumption of ai-based mobile systems; • topic: the topic the primary study is focusing on.for instance, context adaptation, in which the mobile software execution is readjusted according to the context, to improve the energy efficiency; • level of study: it corresponds to the scale at which the mobile software is studied (either at the level of the device, or of the system); • industry involvement: the involvement of industry in the authoring of the study, which can be either exclusively academic, exclusively industrial or a mix; • tool provision: the availability of the tool(s) to handle ai in green mobile computing presented in the study (if applicable). as for category of ai role, they were ai4e, to translate the use of ai to make the mobile software more energy efficient, and eofai, for the fact that ai was involved in the design of the mobile software itself, and was studied regarding energy efficiency. category of ai role: the use of ai to make mobile software more energy-efficient is being studied twice as much as the energy consumption of ai-based mobile software.exploit the spatio-temporal and device context and use ai to predict and adapt device wireless data and location interface configurations so that energy consumption in mobile devices is optimised.identify the logical tradeoff between energy use and accuracy applied to federated learning: selecting the clients of the network with the largest data sets, the accuracy is increased, but the energy use grows accordingly since more training time and computation are needed on a larger data set. the authors measure the energy consumption of a mobile convolutional neural network (cnn) for computer vision and compare it to the energy consumption of an offloaded version of the model.from the set of primary studies, only very few papers (2 out of 34 papers) provide available tools to address the mobile energy consumption involving artificial intelligence.regarding the two faces of ai and its involvement in mobile energy consumption, it can be noted that the number of papers in this review on the topic of using ai to reduce energy consumption was twice as high as the number of papers on reducing the energy consumption of ai itself. we argue that, especially as mobile devices are getting more and more prevalent, to have a real impact on the energy efficiency of mobile software, and to update the related practises, we need to promote the involvement of the mobile software industry in this field. we describe two main branches in the literature: 1) papers looking at the energy consumption of mobile ai applications and 2) papers focusing on applying ai to reduce mobile energy consumption. we pinpoint main research directions, such as offloading and networking optimization to save energy on mobile devices and the analysis of the energy consumption of federated learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/965.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/965.txt new file mode 100644 index 0000000000000000000000000000000000000000..912b403afa55cfb9be93480f6535ec9e6514fccc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/965.txt @@ -0,0 +1 @@ +data science is comparable to applied research. often in research, there are lots of repetitive processes to be followed until it generates the desired outcome. similarly, data science processes are often repetitive until they meet the business objectives. those business objectives are set during the repeated business understanding and data understanding phase of crisp-dm. crisp-dm developed during 90's, and since then, components of data science have evolved a lot in regards to the way data is collected, volume, velocity, and variety of the information being analyzed(big data ), processing methodology, algorithms, and outcome delivery. hence it is crucial to understand in detail the current day's data science challenges, deep dive into the reasoning of failures, and find out the best practice. data science is an evolving field, and lots have been changed since last decade in regards to various keys areas, such as data collection, big data processing on cloud and/or parallel or distributed processing framework, evolution of new algorithm, data visualization, deployment and productionizing data science solutions as a digital product etc. there are several challenges in those key areas; as an example, because of big data, the data science project collects lots of irrelevant and uncleaned data. sometimes the intermediate data source doesn't have the correct business logic, which is mapped to the business objective of the data science project. "big data brings lots of "big errors" in data quality and data usage, which cannot be used as a substitute for sound research design and solid theories." data quality is one of the common challenges in the data science project. another major challenge is domain understanding, and many times domain understanding is a bottleneck for data science project success. also, in many cases, data science issues are seen differently and vary from one domain to another, stakeholder view and data scientist view. as mentioned by dr.om deshmukh " data science project success also depends on what stakeholder knows about the expected outcome of the data science project and how effectively stakeholder expectations being managed though effectivthroughunication." data science projects struggles with these types of challenges and more other challenges in industry settings, but not enough research has been done in this space to highlight those problems and their possible solutions. for better quality research in this domain, years of experience in the data science field and role/designation level diversity in data science projects such as key stakeholders and data scientists have been included.we prepared a list of questions and interviewed experienced data scientists, senior leader who actively manage data science and data analytic team, and stakeholders part of data science projects, in a motivation to understand the perception on data science project failure. then did the thematic analysis to analyze and draw conclusions from data. then came up with the most popular theme by thoroughly analyzing the discussion.in this research, we have discovered and highlighted the key challenging areas where data science practice and data scientists should focus. also, some suggestions on tackling those challenges. also, we came up with a state of art framework to tackle those challenges in efficient ways. crisp-dm developed during 90's, and since then, components of data science have evolved a lot in regards to the way data is collected, volume, velocity, and variety of the information being analyzed(big data), processing methodology, algorithms, and outcome delivery. data science is an evolving field, and lots have been changed since last decade in regards to various keys areas, such as data collection, big data processing on cloud and/or parallel or distributed processing framework, evolution of new algorithm, data visualization, deployment and productionizing data science solutions as a digital product etc. there are several challenges in those key areas; as an example, because of big data, the data science project collects lots of irrelevant and uncleaned data.om deshmukh " data science project success also depends on what stakeholder knows about the expected outcome of the data science project and how effectively stakeholder expectations being managed though effectivthroughunication. for better quality research in this domain, years of experience in the data science field and role/designation level diversity in data science projects such as key stakeholders and data scientists have been included.we prepared a list of questions and interviewed experienced data scientists, senior leader who actively manage data science and data analytic team, and stakeholders part of data science projects, in a motivation to understand the perception on data science project failure.in this research the objective is also to capture the diversity in regards to the people involved in a data science project such as data scientist and business owner and/or data science capability leader and/or stakeholder. in data science, stakeholder can be business process owner or product owner or end-user who is going to use the data science outcome or product. so that data governance team can advise on alternate solution to get the data, or they can advise not to opt for this project due to the data used in data science project are not meeting the governance standard.1) accuracy : are the data free of error ? 2) consistency in timeliness : are the data up-to-date ? 3) completeness : are necessary data present ? reasons: data quality issues arise due to three main reasons.1) standardized data collection process across organization margin someone's in saying about group simpson someone's in a number of energy 2) error due to human or software 3) error in business logic to process the data solution we proposed to fix data quality : 1) better data collection standard from application level data generation 2) centralized data store : an organization needs to spend time building a feature store with the help of a robust data pipeline. based on this research work, we would recommend there are three key areas where improvement needed 1) stakeholder management 2) data quality 3) durable and deployable outcome a potential data science project might fail due to the above reasons what we discussed so far. also, a data scientist should have the ability to highlight the potential risk ahead of time that might occur due to data privacy and/or data security and/or data governance restrictions. to create a fail-proof data quality management for the data science project, the data science team needs to spend dedicated time checking the data quality and plan to engage the data engineer to improve the data quality standard. by combining all, we are proposing a new data science methodology called hybrid crisp ds(hybrid cross industry standard process for data science), which will take care of all the above hindrances and make data science project less prone to failure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/966.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/966.txt new file mode 100644 index 0000000000000000000000000000000000000000..daf0679176695f18c615a572f4715b48572aa783 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/966.txt @@ -0,0 +1 @@ +the growing demand for the rental infrastructure led to the emergence of various centralized players in the ecosystem. but the diverse effect of these major and market-dominating peoples are controlling the majority market in unethical ways.these centralised entities use the user's data to make sure that the user spends more time by showing him some relevant content. this relevance engine works on a lot of biases. this promotes content moderation and unethical practices. with the proposed system we can replace the centralised control with the decentralised and more trust-based protocol. we can think in terms of migration from web2 to web3.the system will consist of a blockchain-based system with smart contracts for money and asset movements. with the help of running code on the blockchain, the system becomes more transparent and deterministic. smart contracts can also be used to block money, it's the same as blocking the money on the credit cards. this will allow a security fund kind of system so when something goes wrong or the user of that asset causes some damage then this can be used as a recovery. with asset listing, we can also list services like a/c, washing machine etc as well as some core functionalities like unlocking the door using keys by triggering a function using the owner's key. this can be a game-changer in terms of security and the market potential it comes with.transactions on the blockchain are slow and costly so triggering locking and unlocking functionality on the blockchain would be costly and might take a lot of time which is not a good user experience. to mitigate this we can leverage the power of the ethereum's whisper protocol which is a messaging protocol. this can unlock some huge potential in space. blockchain is of two types permissioned and permissionless. the permissionless is more of a public chain where anyone can be the participating node and the trust works on a consensus model. whereas in a permissioned chain it's more of a private chain and no one can directly join the network with the permission or auth. this is more of a controlled chain and generally used by the government authorities.these centralised entities use the user's data to make sure that the user spends more time by showing him some relevant content.the system will consist of a blockchain-based system with smart contracts for money and asset movements. this will allow a security fund kind of system so when something goes wrong or the user of that asset causes some damage then this can be used as a recovery. with asset listing, we can also list services like a/c, washing machine etc as well as some core functionalities like unlocking the door using keys by triggering a function using the owner's key.transactions on the blockchain are slow and costly so triggering locking and unlocking functionality on the blockchain would be costly and might take a lot of time which is not a good user experience. whereas in a permissioned chain it's more of a private chain and no one can directly join the network with the permission or auth. asset listing and allocation the system will allow users to list their assets (properties) on the main chain so that they can be rented by other user's.protocol the system allows temporary change of ownership of the asset on the blockchain this can be achieved by the exchange of assets in this case the currency of the chain with the property.the user must register and a web server will accept the request and enter the user details in a secure distributed database (like orbit db or dgraph).the public key is the address of the user on the chain which is anonymous whereas the private key is like a password or secret for the data encrypted using the public key.2) allocation and locking resource for a particular user if a user rents the property for x days the rights will be transferred to the tenant temporarily for x days. a user can change the ownership of the property in the real world which must reflect in the blockchain.3) user engagements/activities a user can engage with the system in various manners.the system revolves around allocating physical/real assets in the virtual world therefore it is quite necessary to verify the actual relationship of the user with that asset.the system focuses on privacy at its core therefore strict privacy policies must be implied, in order to protect the user data and maintain anonymity . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/967.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/967.txt new file mode 100644 index 0000000000000000000000000000000000000000..5ebe7ccc744aa1f69dabd9021aae5300a734ea95 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/967.txt @@ -0,0 +1 @@ +efficient and stable transportation systems are critical to the smooth functioning of modern society, as they facilitate the movement of people and goods . taxis are a popular mode of transportation and play an important role in the overall traffic system. however, with the rise of online ride-hailing services, traditional taxi companies are facing challenges in terms of efficient scheduling and security monitoring of their vehicles, particularly because taxi drivers cannot know their destinations in advance.fortunately, most taxis are equipped with mobile gps devices that record and report their trajectories. analyzing this trajectory data can provide insights into the destination of a taxi, which can yield several benefits such as providing location-based services and applications, alleviating traffic congestion, and optimizing taxi dispatch. at the same time, analysis of the destinations of taxi trajectories can yield several benefits such as providing alleviating traffic congestion, optimizing taxi dispatch, and location-based services and applications such as recommending sightseeing places, accurate ads based on destinations, etc.destination prediction based on vehicle trajectories involves using machine learning algorithms to analyze the trajectory data collected from the gps devices installed in taxis. these algorithms can identify patterns in the data and use these patterns to predict the destination of a taxi with a high degree of accuracy. this can help taxi companies to efficiently schedule their vehicles and ensure that they are being utilized effectively.moreover, destination prediction can also help improve the security monitoring of taxis. by analyzing the trajectory data, it is possible to detect anomalies such as sudden deviations from a usual route or unexpected stops, which could be indicative of criminal activity .vehicle destination prediction is typically based on analyzing previous gps records along with the surrounding environment, which includes factors such as the road structure and other nearby vehicles . a variety of models have been developed to address this issue, including conventional approaches and deep learning methods. conventional methods such as physics-based, maneuver-based, and interaction-aware models , are limited in their ability to capture the complex spatiotemporal dependencies in the data, resulting in suboptimal prediction accuracy. with the emergence of deep learning, researchers have explored the use of convolutional neural networks (cnns) and recurrent neural networks (rnns) for trajectory prediction , . these methods leverage the power of deep learning to capture non-linear relationships and long-range dependencies in the data, resulting in significant improvements in prediction accuracy. more recently, graph-based techniques such as graph convolutional networks (gcns) have been incorporated to model the spatial structure and interactions between taxis within the road network. gcns can effectively model the underlying structure of road networks and capture the interactions between different taxis, leading to improved prediction accuracy.in order to fully capture the spatial information of a trajectory, researchers often convert it into a two-dimensional map since it is highly related to the structure of road networks. this allows for the utilization of more advanced computer vision techniques to solve prediction problems. in recent years, with the breakthroughs in computer vision using vision transformers, many scholars have been inspired to use them for trajectory or destination prediction and have achieved good results , . furthermore, swin-transformer, a variant of vision transformers, has become a general-purpose backbone for computer vision tasks . however, to the best of our knowledge, the swin architecture has not been widely used in trajectory analysis or destination prediction before.the main contributions of this paper are as follows:• firstly, an sst is proposed that is better suited to the destination prediction problem. this model is shown to be competitive for spatiotemporal prediction of taxi destinations, providing a new perspective for researchers seeking to apply state-of-the-art computer vision techniques to destination prediction problems. • secondly, the study compares three grid-based modeling approaches for destination prediction and evaluates their effectiveness in fitting traditional trajectory data into a trajectory grid. the results of this comparison can provide insights into the an effective way to convert traditional trajectory data into trajectory grids for further analysis. the remainder of this paper is structured as follows.section ii provides a comprehensive review and summary of previous studies related to trajectory prediction. section iii describes the problem statement. it discusses how travel trajectory data is collected from taxis once they start carrying passengers. the proposed methodology, including data processing and model structure, is described in section iv. section v presents experimental results that compare several models and their performance against our proposed approach. the results demonstrate the effectiveness of our sst in predicting taxi destinations based on trajectory data. in section vi, we summarize the contributions of this study and highlight its potential impact on the transportation system and society as a whole. analyzing this trajectory data can provide insights into the destination of a taxi, which can yield several benefits such as providing location-based services and applications, alleviating traffic congestion, and optimizing taxi dispatch.destination prediction based on vehicle trajectories involves using machine learning algorithms to analyze the trajectory data collected from the gps devices installed in taxis. with the emergence of deep learning, researchers have explored the use of convolutional neural networks (cnns) and recurrent neural networks (rnns) for trajectory prediction,. in recent years, with the breakthroughs in computer vision using vision transformers, many scholars have been inspired to use them for trajectory or destination prediction and have achieved good results,. this model is shown to be competitive for spatiotemporal prediction of taxi destinations, providing a new perspective for researchers seeking to apply state-of-the-art computer vision techniques to destination prediction problems. • secondly, the study compares three grid-based modeling approaches for destination prediction and evaluates their effectiveness in fitting traditional trajectory data into a trajectory grid. the results of this comparison can provide insights into the an effective way to convert traditional trajectory data into trajectory grids for further analysis. early studies in trajectory prediction employed physics-based models such as dynamic models,and kinematic models,, which predict future vehicle motion based on vehicle attributes, control inputs, and external factors such as the vehicle's position, heading, and speed. while physicsbased models are widely used in trajectory prediction and collision risk estimation, their ability to predict trajectories over a long time is limited by their reliance on low-level motion properties.propose an lstm model for trajectory prediction under the scene of the freeway, which not only includes track histories but also takes into account surrounding vehicles and road structures as input.most existing methods for trajectory prediction focus on modeling trajectories as a one-dimensional time series, which may not fully capture the complex nonlinear spatial-temporal correlations inherent in trajectory data. for instance, lv et al. additionally, the use of attention mechanisms in the architecture allows for the model to better capture long-range dependencies between pixels, which is important for accurately predicting the final destination of a taxi trajectory. in contrast, the binary method loses sequential information during the trajectory grid conversion, thereby preventing the cnn and sst models from capturing vehicle direction in the trajectory.this paper proposes a novel approach to predict the destinations of taxi trajectories, which involves three different trajectory grid formation methods and the use of a simplified swin transformer (sst) model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/968.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/968.txt new file mode 100644 index 0000000000000000000000000000000000000000..c706e8f972df8f252afaef92eb3e0ad7cdcf4c6a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/968.txt @@ -0,0 +1 @@ +in recent times, the adoption of large pretrained language models (llms) in next-generation automated workflows for natural language-based tasks has been on the rise. however, this increased usage has also brought concerns about the safety and reliability of these models into the limelight (weidinger et al. 2022;gupta et al. 2023). in the context of natural language generation (nlg), it is essential to have a dependable llm that produces semantically equivalent outputs when given semantically equivalent prompts. this property is known as consistency. consistency is critical to guarantee the safety of llms, since it increases the certainty that an llm can produce similar enough outputs when fed with semantically similar inputs. however, the extent to which current nlg approaches exhibit consistency and the methods to measure this consistency remains insufficient. to address this gap, we propose a novel framework for evaluating and ensuring the consistency of nlg. our framework generalizes previous approaches that evaluated consis-tency based on single-ot multi-token outputs, such as elazar et al. (2021), to encompass entire generated text sequences.our measure of consistency for nlg surpasses simple lexical measures proposed in prior studies, and captures genuine variations in prompts that convey the same semantic meaning but differ in their lexical representation. we empirically assess the effectiveness of this metric in evaluating consistency, considering various measures of semantic equivalence, llm architectures, as well as paraphrasing and answer generation/decoding techniques.we consider a closed book question-answering scenario where we want to ensure that the answers generated from an llm in response to paraphrased versions of a question are semantically similar. to this end, we utilize in-context learning strategies-i.e. eliciting answers from a llm using fewshot examples in a prompt template-in a number of ways. advanced prompting strategies, such as chain-of-thought reasoning (wei et al. 2023), are widely known to extract improved performance from llms, as well as help reduce harmful bias (guo, yang, and abbasi 2022) and improve factuality (si et al. 2023). our findings show that prompting strategies can also be useful to ensure consistency in realistic paraphrasing situations. we illustrate the specifics of our strategy in fig. 1. to generate answers to paraphrased versions of a question, we start with the prompt template paraphraseprompt that contains examples of different ways to paraphrase (such as using synonyms or changing syntax). we then iteratively feed the indicators of each of these methods, along with an input question into an auxiliary llm (auxllm) to generate paraphrases of that question. these questions are fed into the main llm, generating descriptive answers. each descriptive answer, along with the original question, is added into a second prompt template (answerprompt), and fed into llm again to elicit short one or two word answers. the inclusion of few-shot examples of question-long answer-short answer combinations within the answerprompt aid in this process. once we have multiple short answers to the original question, we add each pair of answers, plus the original question, into auxllm again to obtain pairwise semantic equivalence of these two answers.our proposed semantic consistency metric takes as input all pairwise semantic equivalences, and calculates overall consistency between all variations of answers to the origi- nal question. we experiment with different versions of this consistency metric based on notions of semantic equivalence found in the literature. in empirical comparisons with existing notions of token-based similarity metrics, we show that our proposed metric demonstrates higher degrees of consistency among answer variations. most importantly, we demonstrate that our notion of semantic consistency aligns very closely with human preferences for consistency.our main findings are as follows.• larger llms tend to exhibit higher levels of consistency.• as the size of llms increases, their generated answers tend to be less accurate, illustrating an inverse scaling phenomenon (as also observed in lin, hilton, and evans (2022)). • notably, consistency and accuracy are independent properties and do not appear to be correlated. • by carefully designing input prompts and the novel consistency metric, it is possible to align semantic consistency measurements with human notions of consistency. • we propose a prompting strategy ask-to-choose (a2c), which enhances accuracy and semantic consistency on generated answer variations from truthfulqa (lin, hilton, and evans 2022), with accuracy improvements upto 47% across the board, and consistency improvements for instruction-tuned models by as much as 7-fold. each descriptive answer, along with the original question, is added into a second prompt template (answerprompt), and fed into llm again to elicit short one or two word answers.our proposed semantic consistency metric takes as input all pairwise semantic equivalences, and calculates overall consistency between all variations of answers to the origi- nal question. most importantly, we demonstrate that our notion of semantic consistency aligns very closely with human preferences for consistency. • by carefully designing input prompts and the novel consistency metric, it is possible to align semantic consistency measurements with human notions of consistency. • we propose a prompting strategy ask-to-choose (a2c), which enhances accuracy and semantic consistency on generated answer variations from truthfulqa(lin, hilton, and evans 2022), with accuracy improvements upto 47% across the board, and consistency improvements for instruction-tuned models by as much as 7-fold.mitchell et al. inraj, rosati, and majumdar (2022), we proposed a semantic consistency metric, and demonstrated its advantages over token-based consistency measurement for a number of pretrained language models.data to evaluate semantic consistency, we use answers generated on questions from the truthfulqa benchmark dataset(lin, hilton, and evans 2022), using the two variations mentioned above. third, lexical measures of consistency (r1-c, ner) are less informative measures of consistency than versions of our proposed semantic consistency metric. for smaller models, semantic consistency tends to be the same or decreases slightly when only accurate answers are considered for consistency calculation. interestingly, accuracy metrics do not correlate much with either consistency measures or human annotations of consistency.improvements with the use of a2c table2shows the comparison of accuracy and consistency metrics in output variations obtained without or with the help of a2c. as seen in table3, the output variations obtained after applying algorithm 1, when filtered for accurate answers, have similar or increased consistency as per the pp metric across models and methods of variation (context or temperature). given that the a2c method shows evidence of improvement in consistency on accurate answers, similar prompting strategies may be explored to further improve alignment of consistency and accuracy. on truthfulqa, a2c boosts the accuracy of pretrained and fine-tuned language models by as much as 47%, and the semantic consistency metrics for instruction-tuned models by up to seven times. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/969.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/969.txt new file mode 100644 index 0000000000000000000000000000000000000000..37e99952edfe90cde24b48291e38000886162618 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/969.txt @@ -0,0 +1 @@ +automated short answer grading (asag) is the use of statistical models to assess student short constructed responses in a way that is designed to replicate human scoring . asag is often seen as an ideal application of artificial intelligence because the rubrics for short constructed responses are relatively simple and the process of hand-scoring them can be time-consuming. however, asag has not received as much attention as automated essay scoring (aes) . one of the most challenging areas of asag is the assessment of items in mathematics. this is because mathematics questions often require students to demonstrate their understanding of concepts and procedures, which can be difficult for a machine to evaluate .transformer-based language models are a type of deep learning model that have achieved state-of-the-art results on a wide range of natural language processing tasks, such as question answering, natural language inference, and text summarization . they are distinguished by their use of the attention mechanism, which allows them to learn long-range dependencies between words in a sentence. pretraining is a technique used in natural language processing to train a language model on a large amount of unlabeled text data. this allows the model to learn general language understanding and generation skills, which can then be applied to a specific task . pretrained language models (plms) have been shown to be effective for a variety of tasks, including automated essay scoring (aes) and automated short answer grading (asag). in aes, plms can be used to score essays by identifying key features of good writing, such as grammar, vocabulary, and coherence. in asag, plms can be used to score short constructed responses by identifying key concepts and procedures. recent studies have shown that plms can achieve state-of-the-art results on aes and asag tasks .despite all the advances with plms in asag, mathematical reasoning has proven to be difficult even for large generative language models . this challenge has prompted researchers to develop models that are specific to mathematics . the fundamental challenge presented by mathematical reasoning tasks requires developing a multitude of different ways to approach the automated assessment of items in mathematics.this short article concerns a method of asag where the assessment of the item is approached implicitly instead of explicitly. instead of assigning a score based on a holistic rubric, we use language models to extract key numerical values from the response. not only does this information allow us to assign a score, these values are able to highlight some of the misconceptions the student may have. this addresses the growing need in education for language models to provide feedback to teachers and students .the dataset we used for this task was derived as a supplementary dataset provided in connection with a national assessment program. it was administered alongside a short answer dataset used in a previous paper . this system relies upon extensive preprocessing to convert and normalize the set of numerical values that appear in a constructed response and a pipeline consisting of a classification model and a question answer-ing model. we use language models for both of these modeling tasks.this article is organized as follows: in §2, we give a brief description of the dataset used in this study, how we preprocessed responses, and details regarding how the dataset was modeled. we then consider a comparison of the engines results against human benchmarks in §3. in §4, we discuss the results and future directions for this research. this system relies upon extensive preprocessing to convert and normalize the set of numerical values that appear in a constructed response and a pipeline consisting of a classification model and a question answer-ing model. if the value was stated, they would enter the value in the data associated with that response, and if it was not, the rater left the value blank.the pipeline we use consists of two models: a text classification model and a token classification model. the text classification model distinguishes between three types of answers: 0, 1, or a value, v, which is any other value mentioned in the text.for each prompt, and for the entire dataset, we define κ 0 to be the kappa value indicating that both raters agree that either no value has been specified or the student has specified a value of 0. similarly, we define κ 1 to be the kappa value for the agreement that the student has specified a value of 1, and κ v is the agreement that any other value has been specified in the text. to simplify the presentation, we treat each value in each prompt independently, hence, given a prompt with n responses and v values, the kappa values presented in table2.the second model is used in the situation associated with κ v , where a value is specified in the response. from the entire set of responses, the data for the second model consists of all responses in which the value is found to be speci- this dataset was partitioned into a random set of training responses consisting of exactly 70% of all data, a development set consisting of 15% of all data, and a test set consisting of the remaining 15% of all responses.the first modeling task considered the classification of the student response as either being 0, 1, or v where v is any particular value specified by the student. it is also the case that some prompt-value pairs do not have sufficiently many training examples to fine-tune a single model, hence, generic models are also used to identify values specified by the students.with regards to the input of the model, we followed the preprocessing specified in the previous sections, then we replaced the numerical values that appear in the student response by mask tokens. in training the models as a token classifier model, the tokens masking the correct values identified handscoring team are given target labels of 1 while every other token is given a value of 0. the final engine is the result of a single classification model, given by electra, and the individual prompt level models, given by the ensembles of a generic electra model and a math-bert model. the classification component on the pipeline indicated that values were specified in each case and the value identification component returned 9 bags of chocolates, 1 bag of lollipops, and 2 bags of gum sticks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/97.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/97.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b50217fa754ba78011bd4b2aaf40c0e04b95f0b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/97.txt @@ -0,0 +1 @@ +deep reinforcement learning (drl) has made significant progress over the past decade, from its start playing atari games , to beating humans in board and video games , to now addressing important complex safetycritical challenges such as defending computer systems , managing power networks and driving vehicles . this increase in complexity has transitioned from fullyobservable single-agent environments to partially-observable multi-agent environments. however, to realise positive societal impacts, drl must overcome the major vulnerability in all deep learning systems to adversarial machine learning (aml) attacks , which exploit the neural networks that are the fundamental component of deep learning.aml identifies vulnerabilities in machine learning algorithms, such as slight human-imperceptible changes to inputs returning significantly different outputs from neural networks . neural networks are the core of all deep learning, and may generalise to unseen inputs which is key to drl . despite existing research interest , there are many challenges with the application of aml techniques to multi-agent reinforcement learning (marl), multi-agent learning (mal), and drl, including discovering effective attacks and finding defences that can generalise to unseen attacks .aml attacks exist in a large potential solution space. attacks against supervised image classifiers use approximation techniques such as fast gradient sign method (fgsm) to find adversarial examples for single static images. drl involves many sequential observations and finding an effective attack requires knowing when, how, and what to attack. huang et al. showed that approaches such as fgsm attacks were able to attack drl by altering the observation at every time step. subsequent research has found attacks that reduced the number of perturbed timesteps required to degrade the agent performance , demonstrating the importance of discovering the best attack timing.adversarial training may defend against aml attacks on supervised learning and drl . adversarial training uses both original and adversarially produced examples to retrain a vulnerable algorithm. in supervised learning, the retrained algorithm is then more robust against similar future attacks . however, adversarial training in drl produces a narrow robustness only against the specific attack being used and thus is unable to generalise to other attacks .there is a critical need for marl practitioners to understand the aml attacks and defences . a number of previous works have surveyed the state of aml as applied to drl . however, to the best of our knowledge, there is no work concerning the application of aml against marl. in this work we address the use of aml techniques on marl, by employing a new attack perspective called attack vector and proposing classifications for aml attacks and defences. our taxonomy of aml attacks covers how an attack may be deployed, what information it uses, and the attack goal. our taxonomy of aml defences covers a range of different types of defences, the attack vectors a defence can counter, when a defence is deployed, and what information a defence requires about an attack. the contributions we make in this work are fivefold:• a survey of aml attacks and defences as applied to marl, drl, and mal more broadly.• a cyber-security inspired perspective on aml attacks against marl, attack vectors.• an improved categorisation on aml defences, which better categorises the different defences against execution-time attacks against marl.• two new frameworks for modelling aml attacks against drl that describe combinations of attack vectors and attack magnitude, tempo and location. in this work we address the use of aml techniques on marl, by employing a new attack perspective called attack vector and proposing classifications for aml attacks and defences. in our analysis of previous work that classifies aml attacksspecifically against marl, we found no surveys that covered aml defences for marl; we have identified several that covered aml defences for drl. however, neither of these classifications support the categorisation of aml attacks unique against marl, such as attacks against communications in cooperative marl and adversarial policies in competitive marl. to address this gap, we have developed a classification of aml attacks against marl and drl that covers the means of attack, the aim of the attack, and the knowledge of the target required by an adversary. related work has covered aspects of aml applied to drl, however there remain gaps in the coverage of existing surveys around aml attacks and defences for marl, and the classification of aml attacks and defence when applied to drl and mal. several papers we found consider natural adversarial examples54]and their attacks are presented in table2along with the information required for the attack, the objective of the attack, if a transfer attack is used, the algorithms that were attacked, and the framework used in the paper to present the attack. a partially observable markov decision process (pomdp) is defined by the 6-tuple (s, a, t, ω, o, r), in which s is the set of states, a is the set of actions, t is the state transition function that maps a state s ∈ s and action a ∈ a to a next state s ∈ s such that s = t (s, a), ω is the set of observations, o is the observation probability function that maps a state s ∈ s and action a ∈ a to an observation o ∈ ω such that o(s, a) = o, and r is the reward function.the state transition function that maps a state s ∈ s and joint action {a i } ∈ {a i } to a next state s ∈ s such that s = t (s, {a i }), {ω i } is the joint set of observations, ω i is the set of observations of agent i ∈ i, o is the joint observation probability function that maps a state s ∈ s and joint action {a i } ∈ {a i } to a joint observation. a stochastic game (sg) is defined by the 5-tuple (i, s, {a i }, t, {r i }), in which i is the set of agents, s is the set of states, {a i } is the joint set of action sets, a i is the action set of agent i ∈ i, t is the state transition function that maps a state s ∈ s and joint action {a i } ∈ {a i } to a next state s ∈ s such that s = t (s, {a i }), {r i } is the set of reward functions, and r i is the reward function of agent i ∈ i. a probabilistic action robust mdp (pr-mdp) is defined by the 6-tuple (s, a, t, r, v, α), in which s is the set of states, a is the set of actions, t is the state transition function that maps a state s ∈ s and action a ∈ a to a next state s ∈ s such that t (s, a) = s , r is the reward function, and v is the probabilistically-robust (pr) operator such that v(s) = a where a ∈ a is either an adversarial action with probability α or the agent's original action with probability 1 -α that is executed in the state transition function.is the set of reward functions for all agents • θ i is the tempo of attack determines if the observation of the agent i for a given state s will be perturbed by the function v i • ∆ i is the maximum magnitude of the change of a single element in the perturbed observation e ij ∈ o i from the original observation o i of agent i • σ i is the scope of the attack which determines which elements of the observation o i is perturbed at state s.we focus on new aml attacks that may be effective against drl and marl algorithms, and suggest approaches to aml defences that may improve algorithm robustness.the enumeration of attack vectors has revealed potential gaps in aml attacks against marl, namely, white-box malicious communications, patch attacks, action-based and state-based targeted communication perturbation attacks, untargeted communication perturbation attacks, actionbased and state-based targeted natural adversarial examples.there are significant gaps in the research around aml attacks and defences for marl that need to be addressed, including mitigating aml attacks against multiple agents, the combined effect of multiple aml attacks, quantifying the effectiveness of aml defences, and using knowledge about an attacker to improve aml defences.another key contribution of this work is the proposal of two new frameworks, observation adversarial partially observable stochastic game (oa-posg) and action robust partially observable stochastic game (ar-posg), which address a gap in the current frameworks being used to research aml in drl, namely, the inability to model communication perturbations, the inability to model multiple simultaneous aml attack vectors, and the lack of detail around the tempo, magnitude and location of attacks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/970.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/970.txt new file mode 100644 index 0000000000000000000000000000000000000000..46035c5ed02aae1b3fba128dca839a928ee9e704 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/970.txt @@ -0,0 +1 @@ +maintaining effective professional networks is important for career progression. within a broader landscape of the digitalisation of work, online professional social networking sites (snss) offer widespread opportunities for workers to manage their image and career, connect with professional this work is licensed under a creative commons attribution 4.0 international license (cc by 4.0). networks, and find new job roles. with over 875 million users, linkedin is the world's largest professional networking platform . although the large user base engaging with linkedin as part of their career strategy makes it an important platform to study in its own right, recent work also shows that online networking behaviour on linkedin influences job opportunities for users (rajkumar et al. 2022;wheeler et al. 2022).given the potentially significant benefits of online professional networking, it is important to understand how different users, especially those who have conventionally faced greater disadvantages in the labour market, such as women, engage with online professional networks. a large body of research shows that women's offline networks are less advantageous than men's (wang 2009;mengel 2020). how online platforms such as linkedin may reflect or reproduce gendered networking behaviours (bapna and funk 2018;greguletz, diehl, and kreutzer 2018;wang 2009;kirton and robertson 2018), or how online social connectivity may affect outcomes differentially by gender is not well understood. although existing research highlights gender gaps in linkedin use (kashyap and verkroost 2021), with larger gaps in certain industries such as information technology (it) (haranko et al. 2018;verkroost et al. 2020), other dimensions such as gender gaps in social connectivity and its link with job progression outcomes, such as promotion or relocation, have received limited attention. this paper examines these dimensions using aggregated, anonymised data from almost 10 million linkedin users from the site's advertising platform. specifically, we examine how gender, age, seniority, and connectedness to big companies (what we term social connectivity) are associated with the propensity to report job promotions or relocations on the linkedin population.our study focuses on linkedin users within the it sector in the us and uk, a male-dominated set of industries within which progression for women can be made particularly difficult through pervasive gender stereotypes about technical skills, 'geek' culture, 'old boys' clubs of informal networks, and organisational factors such as long hours that increase as of jan 14, 2023: https://about.linkedin.com.the difficulty to achieve work-family balance (ahuja 2002;armstrong, riemenschneider, and giddens 2018). socialising and networking events within it companies are commonly reported to be in male-oriented spaces -e.g., involving sports, pub trips, and take place outside of already long industry working hours (cross and linehan 2006;kirton and robertson 2018;mcgee 2018;earles 2020) -with some women feeling uncomfortable or unable to attend as often as male colleagues due to gendered family expectations, and others not receiving invitations (bjerk 2008;kirton and robertson 2018). these norms within the industry may further reinforce disadvantages faced by women within it, and limit their ability to expand their networks in beneficial ways for career progression. in contrast, online networking theoretically offers greater flexibility to participate in terms of time and location, with the potential to bolster opportunities for those that face greater constraints, such as women. it may provide opportunities to expand and build advantageous networks beyond those encountered within one's immediate work environment. however, whether online connectivity provides these differential payoffs by gender has received limited empirical attention, and is a question that our study examines. although the large user base engaging with linkedin as part of their career strategy makes it an important platform to study in its own right, recent work also shows that online networking behaviour on linkedin influences job opportunities for users(rajkumar et al.given the potentially significant benefits of online professional networking, it is important to understand how different users, especially those who have conventionally faced greater disadvantages in the labour market, such as women, engage with online professional networks. how online platforms such as linkedin may reflect or reproduce gendered networking behaviours(bapna and funk 2018;greguletz, diehl, and kreutzer 2018;wang 2009;kirton and robertson 2018), or how online social connectivity may affect outcomes differentially by gender is not well understood. although existing research highlights gender gaps in linkedin use(kashyap and verkroost 2021), with larger gaps in certain industries such as information technology (it)(haranko et al. linkedin, as the world's largest professional networking platform, is an important context in which to understand how men and women use online spaces differentially, because recent experimental work has shown that linkedin use has real job market implications for users. the authors also find technical, and computing skills reported on linkedin to be highly male-dominated. 2020). consistent with our age-and seniorityspecific analyses shown previously, these models also show that younger users, higher seniorities, as well as women have higher odds of reporting promotions, and that these gender differences in promotion reports persist even after we control for age and seniority. looking at the social connectiv- ity × gender interaction, we see a positive interaction (odds ratio higher than 1), which suggests that social connectivity has higher payoffs (stronger positive association) for women in predicting promotion. however, as shown in figure5, the gender gap in relocation among those that are socially connected to big tech firms on linkedin is smaller -indicating a higher payoff to social connectivity for women compared to men. this study examines gender differences in the information technology (it) industry in two of the largest linkedin user populations of the uk and us, leveraging aggregated, anonymised data on the linkedin user population from its advertising platform.while the data preclude us from distinguishing whether the observed gender differences in promotion rates reflect differences in propensity to report promotions, or the actual prevalence of promotions among linkedin users, we offer two plausible interpretations of these findings, which are not mutually exclusive. further suggestive of these constraints is our finding that the social connectivity gap on linkedin between men and women is greatest during the childbearing ages. 2022;wheeler et al. for researchers, our study motivates further studies of user behaviours on linkedin in its own right as the largest professional networking platform, but also studies that examine how online networking is experienced and used by different disadvantaged social groups, and whether they reproduce or alter social inequalities experienced by them. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/971.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/971.txt new file mode 100644 index 0000000000000000000000000000000000000000..dd81c1317d6c7e3e6e79e2a6f46b02cbdf179539 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/971.txt @@ -0,0 +1 @@ +norms, which involve concepts such as obligation and permission, are an integral part of human society. they are enormously important in a variety of fields -from law and ethics to artificial intelligence (ai). in particular, imposing norms -be they ethical, legal or social -on ai systems is crucial, as these systems have become ubiquitous in our daily routines. a main difference between norms and other constraints lies in the fact that norms typically allow for the possibility of violation. reasoning with and about norms (normative reasoning) requires deontic logic, the branch of logic that deal with obligation and related concepts. normative reasoning comes with a variety of idiosyncratic challenges, which are often exemplified by benchmark examples (so called deontic paradoxes). a crucial challenge is reasoning about sub-ideal situations, such as contrary-to-duty (ctd) obligations, which are obligations only triggered by a violation. other challenges are associated, e.g., with defeasibility issues (norms having different priorities, exceptions, etc.).the first deontic system introduced -standard deontic logic -was failing most of the benchmark examples, (un)deriving formulas which are counterintuitive in a common-sense reading. this has motivated the introduction of a plethora of deontic logics, see, e.g. . these logics have been investigated mainly in connection with philosophy and legal reasoning, and with the exception of defeasible deontic logic ddl , they lack defeasibility and efficient provers. defeasibility and efficient reasoning methods are instead offered by answer set programming (asp), which is one of the most successful paradigms of knowledge representation and reasoning for declarative problem solving . indeed, in a long and systematic effort of the knowledge representation community, efficient solvers for fast evaluation of asp programs have been developed, see, e.g., . defeasibility is also inherent in asp, due to its default-negation and also weak constraints. this paper introduces a method for using weak constraints for encoding norms in asp. we first translate desired basic properties of deontic operators in a common core that will be used in all further encodings. these properties are established by analyzing multiple wellknown deontic paradoxes (e.g., ross paradox, the fence scenario,. . . ). by abstracting and generalising the encodings of the specific paradoxes, we provide a methodology for encoding normative systems in asp with weak constraints. the methodology is put to work on a case study from that involves a reinforcement learning agent playing a variant of the pac-man video game with additional "ethical" rules. our encoding is used as a "shield" to filter out the non compliant actions of pac-man and the outcome is compared with , that uses ddl. for space reason, we must omit details, for which we refer to . if pac-man ate one of the larger pellets, the ghosts enter a scared state and become vulnerable, allowing pac-man to eat them.figure2: pac-man following, we consider variants of the pac-man game with additional "ethical norms". vegan pac-man, in which pac-man is not allowed to eat any ghost has been introduced inand implemented there using multiobjective reinforcement learning (rl) with policy orchestration. vegetarian pac-man can eat the orange ghost (as it would be cheese) but not the blue ghost.vegan pac-man: to prohibit pac-man from eating any ghost, we can state:.vegetarian pac-man: to prohibit pac-man from eating the blue ghost, we state:.we encode the norm bases by forbidding pac-man to move in a direction if the ghosts are scared and moving there could lead to eat a ghost. furthermore, we forbid pac-man from stopping if a ghost could move into pac-man (and then be eaten). as pac-man and the ghosts can move at most one step at a time, we can derive that the manhattan distance between pac-man and a scared ghost must in this case be at least 1 and at most 2 (coordinates are integers). we encoded the norms by accounting for the locations of the ghosts relative to pac-man and forbidding pac-man to make moves that could lead to eating a ghost. the vegan norm base was implemented inwho trained two different models for pac-man; in one model he was trained to maximize the game score, and in the other to comply with the norms using respective data. note that it is not necessarily a ctd obligation, as eating a ghost may not violate an obligation if the ghost is blue.-o1 and o3 could be in conflict, as stopping after eating an orange ghost may lead to eating a blue ghost. for ghosts, as a scared ghost may move both towards and away from pac-man, we cannot encode ensuing conflicts directly. pac-man consumed more ghosts compared to the vegan norm base but fewer than the vegetarian norm base, and mostly orange ghosts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/972.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/972.txt new file mode 100644 index 0000000000000000000000000000000000000000..d44810d8b5212a4ca4c3a0bcfcfcb92112d9afb3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/972.txt @@ -0,0 +1 @@ +in this study our method of analysis drawns on the different factors and abilities described in models such as the deductive cybercriminal profile model and the theoretical model of profiling a hacker . these models guide the collection of information required in order to create a holistic profile. in general, they propose that in order to form a psychological profile of an offender, different factors need to be considered: a) biological factors and the external environment which influences an individual; b) intelligence; c) personality; d) social abilities; and e) technical abilities. the theoretical model of profiling a hacker also includes factors such as: f) motivation for offending; g) the method of the attack; and h) the effectiveness of the attack.below we will present cases of persons identified in the literature (at one point or another) as real cyber offenders, and describe their characteristics, traits, motivations and behaviors. this approach will also allow for a reflection on the similarities and differences among the different cases. when analysing the cases, theories of the dark triad/tetrad , the hexaco model of personality and theories of crime will be utilised as well. readers should note that we intentionally do not directly name the persons that we present given the sensitivity of the topic. moreover, we present point in time analyses based on literature and existing reports. this is worth noting because people change (e.g., some once-famous hackers are now well-respected security professionals), and secondly, we rely on reports for our reflection (thus, rely on the accuracy of the reports we draw on). this area of research has often been referred to as cybercriminal (or online offender) understanding or profiling, and tends to mirror the offline, and more traditional action of criminal profiling.while there has been an increasing amount of research in the cybercriminal space, this topic that has received little socio-technical research emphasis in the technology community, has few concrete research findings, and is thus a prime area for development. bada & nursesummarized the outstanding challenges with specific mention of the need to explore the actions and personality traits apparent from certain online criminal behaviors; a factor also driven by the lack of studies drawing on actual data linked to behavioral profiles.the aim of this article therefore is to investigate cybercriminal activities and behavior from a socio-technical (psychology and human aspects) perspective, through reflecting on the state of the art as well as a series of notable cybercriminal case studies.cybercrime is often used in the media and in research to refer to a range of crimes conducted in the online (or cyber) space.cyber-dependent crimes are: "crimes that can be committed only through the use of information and communications technology ('ict') devices, where the devices are both the tool for committing the crime, and the target of the crime (e. for instance, research has examined the psyche of cybercriminalsand the theories behind why cybercrime occurs, and other work has investigated attackers in depth-be it on the presence of the hacktivist group anonymous onlineor nation state advanced persistent threats (apts). considering the psychology of perpetrators themselves, online criminal behavior has been related to psychopathy and other antisocial behaviors, persons high on machiavellianism (one of the three dark triad personality traits) have been shown as more likely to engage in criminal behavior, and we have found relationships cited between cybercriminal actions and conditions such as autism.theories of crime developed by the field of cyberpsychology such as the online disinhibition effectcan also be considered relevant to understanding why an individual may engage in online criminal acts, however, its usefulness depends on the type of cybercrime considered.characteristics and psychological traits: case 1's father was an early innovator at a technology lab so he grew up immersed in computers. eysenck's theory of crime proposes that personality traits such as psychoticism (being anti-social, aggressive and uncaring), extraversion (seeking sensation) and neuroticism (being unstable in behavioral patterns) indicate a personality susceptible to criminal behavior. however, in case 2 we may also see a similar pattern as in case 1, a sense of superiority seen in narcissistic personalities. research has posited that, "increased risk of committing cyber-dependent crime is associated with higher autistic-like traits"; however, a diagnosis of autism is not necessarily associated with an increased risk of committing such crime.there were a range of characteristics and psychological traits covered in the cases including boredom and challenges at school, lower social skills, instability in teenage years, and conditions such as asperger's syndrome. her research focuses on the human aspects of cybercrime and cybersecurity, such as profiling online offenders, studying their psychologies and pathways towards online deviance as well as the ways to combat cybercrime through tools and capacity building. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/973.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/973.txt new file mode 100644 index 0000000000000000000000000000000000000000..af079c1f8b90afb5e75748aa46df0fab5b3e774b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/973.txt @@ -0,0 +1 @@ +freedom of expression is a fundamental human right in democratic societies, as free access to discussion forums is essential for citizens to participate in politics. however, expressions can sometimes hurt people's sentiments. today, social media and comment sections on news sites are filled with uncivil expressions against various communities (coe et al., 2014;kenski et al., 2017;theocharis et al., 2020). 1 thus, a dilemma exists between the importance of freedom of expression in a democracy and the dignity of those who are offended by uncivil expressions.previous studies have demonstrated that attitudes toward freedom of expression and speech restrictions depend on countries and individuals. for example, in terms of intercountry differences, riedl et al. (2021) revealed that germans than americans are more prone to consider that law enforcement and an online platform bear the responsibilities of implementing measures against uncivil online comments. regarding gender differences, it was found that men tend to show more robust support than women for freedom of expression and less for speech restrictions (downs & cowan, 2012;lambe, 2004). furthermore, regarding personal values, those with individualistic values are more supportive of freedom of expression, while those with right-wing authoritarianism beliefs are less so (downs & cowan, 2012). however, while these studies have revealed the relationships between the characteristics of such countries or individuals and their attitudes toward freedom of expression or speech restrictions, there remains an ambiguity regarding the extent to which cognitive biases in intergroup relations exert an influence on these attitudes.to address this gap in the literature, the present study hypothesized that people support speech restrictions more strongly when exposed to incivility by out-group members than by in-group members. to evaluate this hypothesis, a pre-registered online survey experiment using a randomized controlled trial (rct) approach was conducted on a sample of japanese adults, and the results provided supporting evidence for it. therefore, investigating people's attitudes toward restrictions of uncivil political expression from the perspective of intergroup bias is crucial to maintaining liberal democracy.in terms of in-group favoritism, people are likely to perceive political expression by in-group members as more civil and worthy of protection as compared to expressions by out-group members. particularly, when people are exposed to uncivil political expressions from the out-group directed at the in-group, they are likely to perceive harm originating from the out-group. accordingly, the present study examines the following three hypotheses: hypothesis 1: people perceive uncivil political expression by out-group toward in-group as more uncivil than that by in-group toward out-group.hypothesis 2: people support speech restrictions more strongly against uncivil political expression by out-group toward in-group than against that by in-group toward out-group.hypothesis 3: the effect of exposure to uncivil political expression by outgroups (compared to that by in-groups) on the level of support for speech restrictions is mediated by the perceived level of incivility., 2011)was conducted to estimate the average causal mediation effect (acme). this implies the presence of a mechanism wherein the distinction in the originator of the uncivil comment (in-group or out-group) leads to the difference in the perceived level of incivility associated with the uncivil comment, which in turn leads to the difference in the likelihood of supporting the restriction of the uncivil comment.the present study investigated whether people have intergroup cognitive biases when forming attitudes toward uncivil political expression and restrictions against them. an online survey experiment on a japanese sample found that people perceived uncivil political expression from the out-group toward the in-group as more uncivil than that from the in-group toward the out-group. furthermore, it was found that this difference led people to be more supportive of speech restrictions against uncivil political comments from the out-group toward the in-group than the other way around. 10 however, the survey results also suggest that people perceive uncivil political expression from out-groups as harsher and thus support restrictions against them. this implies that it is plausible that individuals who generally advocate for restrictions on uncivil political expression may do so primarily by directing their attention toward uncivil expressions originating from the out-groups. therefore, if restrictions on uncivil political expression were to be strengthened by governments or website administrators based on such public opinions, it is likely that even the expressions by in-groups, which people thought should not be regulated, would be restricted. third, within the experiment, participants were solicited to express their attitudes regarding the imposition of restrictions on particular uncivil expressions that were presented immediately prior to the inquiry, rather than conveying their stance on the restriction of uncivil political expression in a broader context. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/974.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/974.txt new file mode 100644 index 0000000000000000000000000000000000000000..eab2c027a22118b9dc42a10af55c3ae495be1cad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/974.txt @@ -0,0 +1 @@ +"it was on the school server." vs."we did not know whom to ask."the million song dataset (msd) has been a cornerstone for audio-centric music information retrieval (mir) studies, such as music auto-tagging, with its significance widely acknowledged. however, access to audio data for this dataset (msd audio) is limited to peer-to-peer sharing since 2016, making it difficult to regard it as publicly available. as we will show, this limitation has led to disparities disadvantaging those affiliated with institutions that are personally less-connected within the mir community, either geographically or academically. this has jeopardized the principle of equality within the community, as well as the reproducibility and advancement of previous research .in this paper, we will address this issue based on anecdotal comments from one individual who contributed to the creation of the dataset, as well as 21 individuals who have attempted to access the msd audio. we collected these comments in two ways. first, we distributed a survey via the ismir mailing list, and those familiar with the dataset voluntarily participated. second, after identifying approximately sixty papers that utilized the msd audio for their experiments, we personally contacted the authors of each paper and invited them to either complete the survey or participate in an informal interview. however, access to audio data for this dataset (msd audio) is limited to peer-to-peer sharing since 2016, making it difficult to regard it as publicly available.in this paper, we will address this issue based on anecdotal comments from one individual who contributed to the creation of the dataset, as well as 21 individuals who have attempted to access the msd audio.based on anecdotes from researchers who attempted to access audio previews using the api, comprehensively scraping audio previews was nearly impossible without significant financial resources or technical expertise. this misinformation further confused researchers outside of major organizations within the mir community, leading to numerous unsuccessful attempts to obtain data by web scraping.in this section, we will analyze anecdotal comments from 21 individuals who have tried accessing msd audio, which we collected through surveys or interviews, where we asked about the methods, results, and the approximate timings of their attempts, as well as their affiliations and professions at the time of their attempts, to address the issue of unequal accessibility. peer-to-peer sharing instances that we identified primarily have occurred between organizations that owned the data and those closely connected to them, either geographically or academically, with the most recent instance of peer-to-peer sharing occurring in 2016. all individuals who attempted to obtain the dataset in 2017 or later and succeeded (5 individuals) were affiliated with organizations that owned the data. conversely, all those from organizations without the data who tried in 2017 or later (6 individuals) experienced at least one unsuccessful attempt, where two of these individuals ultimately abandoned the research project they had initially planned. one respondent evidenced this by reporting that they had no access to this data while employed at an organization with less than 50 employees, but immediately obtained access upon moving to an organization with more than 500 employees. of the 7 unsuccessful peerto-peer sharing attempts we identified from the anecdotes we collected, five came from relatively less active organizations within the mir community, without first-or lastauthor papers at the ismir conference in the past three years. on the other hand, all of the organizations we identified to own access to this data are notably either prominent within the mir community, with at least five papers accepted at the ismir conference in the past three years, or western-based, as provided in table1.in this paper, we delved into the unequal accessibility issue concerning msd audio, which divides the mir community into those who can access the data and those who cannot. this has disproportionately affected researchers who are not closely connected to the data owning organizations and those with limited research experience, sidelining minorities within the mir community.our data of those who had access to msd audio is quite comprehensive, as there are few papers that are based on the dataset in the early years.this situation, ever since 2011, challenges us to imagine how much mir research could have been done if msd audio was available more widely and equally, and how many potential mir researchers could have had a more active and successful research career. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/975.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/975.txt new file mode 100644 index 0000000000000000000000000000000000000000..adba7764cc8fe813ddcb71499f683e2f90ba2df4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/975.txt @@ -0,0 +1 @@ +the challenges educators have to face nowadays include highly increasing numbers of students in stem, scarce staff resources, and heterogeneous groups of learners with their need for formative feedback , . hence, the educators' need for automated systems that support their teaching process is growing. many of these systems focus on providing automated feedback, but this feedback is often still basic and not tailored to the learners' needs - .in late november 2022, an easily accessible tool using a large language model (llm) was released, known as chat-gpt. the tool is designed to engage in conversations with humans and seems to provide impressive results in natural language discussions, the generation of texts, and other tasks. despite its weaknesses, its performance and error analysis in introductory programming tasks seem promising , . although we do not yet know a lot about its impact, the computing education community needs to explore the potential of llms and related tools for teaching, learning, and assessing. this work-in-progress paper addresses this demand by exploring the potential of an llm-based tool for generating § equal contribution formative feedback on programming tasks. in particular, we investigate 99 generated responses to student input comprising solutions to introductory programming exercises and an accompanying question about the student solution. the research questions of this work are: (rq1) how does an llm system like chatgpt respond to students seeking help with their introductory programming tasks? (rq2) which feedback types can be recognized in the responses? ii. related work while llms have only appeared recently, several researchers have started studying their implications for education, among them an iticse working group . this section introduces recent work in the context of computing education specifically. one of the first papers appeared in early 2022 , describing the performance of openai's codex model on cs1 programming exercises, showing that it can compete well with actual students. however, the codex model is currently not accessible anymore, because newer and better models quickly followed. the rapid replacement of models shows that studies become outdated quickly, and are hard to replicate, especially if research data is not available (anymore) . a follow-up study on cs2 exercises also showed impressive results .becker et al. discuss several opportunities and challenges regarding the educational opportunities and challenges of llms. they urge that we "need to review our educational practices in the light of these new technologies." kiesler and schiffner further derive chatgpt's implications for assessment. at the same time, llms are already applied in computing education to generate exercises and code explanations , , or to improve compiler error messages .llms have also been used to repair buggy programs. zhang et al. applied the codex completion model to student python programs and found that an llm trained on code (i.e., codex) can fix both syntactic and semantic mistakes. denny et al. explored the engineering of prompts using github copilot. their goal was to identify types of problems where copilot does not perform well, and how to phrase questions in natural language to achieve good results.further studies directly involved students. kazemitabaar et al. , for example, conducted experiments with novice programmers. half of them had access to codex when working on programming tasks. their results show that student who used codex significantly increased their code-authoring performance. prather et al. investigated students' use of github copilot in an introductory programming assignment through observation and interviews. they conclude that novices still experience challenges when using copilot, and that copilot's design should be improved.in this study, we use a different approach, as we generate feedback with chatgpt on student submissions to small programming problems. we then explore and categorize the generated output in detail, as opposed to simply assessing whether an ai model can fix the problem. while conducting our study, some new related works appeared - . the research questions of this work are: (rq1) how does an llm system like chatgpt respond to students seeking help with their introductory programming tasks? (rq2) which feedback types can be recognized in the responses? ii.in this study, we use a different approach, as we generate feedback with chatgpt on student submissions to small programming problems.to answer rq1 and explore the generated feedback, the selected student solutions were used as an input to chatgpt (march 23 version), accompanied by the prompt "what's wrong with my code?" followed by the code of the student's submission.for the exploration of how the feedback deviates upon regeneration, we generated three responses for each submission (using the "regenerate"-option), by one author, on the same machine, in the same browser, and in one chat per student.to answer rq2 relating to feedback types, we refer to an existing typology of elaborated feedback in the context of programming education, based on narciss' feedback classification.for example, chatgpt did not provide any feedback within the category of ktc feedback.in addition, we noted that chatgpt provides knowledge about correct response feedback (kcr), which is one of the simple feedback types,. even though many established feedback types were applicable in our analysis of chatgpt's responses, it may be useful to expand the feedback typology.the results described in this paper show that llms like chatgpt have the potential to address the discrepancy between learners' need for formative feedback on the root causes of an error. moreover, we only explored the characteristics of the feedback based on four tasks and available student solutions from one introductory programming course.in this work-in-progress, we explored the potential of large language models such as chatgpt to generate formative feedback to novice learners of programming. however, there are limitations to chatgpt's feedback quality on logic and semantic errors, or if multiple errors are contained in a student solution.as the next step of this work-in-progress, we will evaluate the quality of feedback generated by llm-based tools by comparing the generated output with expert feedback on these tasks. future work will also comprise an investigation of how we can engineer prompts to generate correct solutions from a student input, and to generate specific types of feedback. another continuation of this work is the combination of the presented feedback characterization with the existing feedback typology for the context of programming. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/976.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/976.txt new file mode 100644 index 0000000000000000000000000000000000000000..b7791d17cb17de86ce2baf6a8fbcf4457f3ccdd4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/976.txt @@ -0,0 +1 @@ +geospatial science has developed as a vibrant field characterised by intellectual vigour, conceptual expansion, and improved analytical skills as a consequence of the quantitative revolution in the subject of geography through a spatially integrated socio-environmental science that outshines prior disciplinary ties, borders, and limitations (berry et al., 2008). geospatial science, commonly referred to as geomatics (aina 2012), is a multidisciplinary discipline that focuses on comprehending, analysing, and visualising spatial data about the earth's surface using information technology to describe the connections between geography, individuals, places, and earth processes. technologies like global positioning system (gps), geographic information systems (gis), and remote sensing are frequently used as observational, measuring, and analytical tools, helping in the understanding of numerous events by providing the information with a spatial context. geospatial technology is being used increasingly in every industry today, including resource management, disaster management, forestry, logistics, infrastructure planning, and the study of climate change and other environmental issues (dangermond and goodchild, 2020). geospatial technology and the information created are becoming increasingly significant in all economic sectors, making the economy, society, and the environment an indispensable pillar of sustainable development. (scott and rajabifard, 2017). thus, geospatial science and technology support disaster management (ghosh and mukherjee, 2023), infrastructure planning, environmental monitoring (sanyal and chowdhury, 2023), as well as location-based services. digital twin's concept involves creating virtual models of physical objects and systems to reproduce their real-world counterparts as accurately as possible. they capture both the static and dynamic behaviour of objects or systems. digital twin finds their applications in various fields, starting from real-time monitoring of objects, maintenance and optimisation of systems, designing prototypes virtually before building them to simulating and predicting climate change and monitoring the performance of aircraft and grid systems. in the backdrop of facing the change and modernisation of manufacturing sectors and changing to smart manufacturing, the digital twin, as a novel technological tool for implementing smart manufacturing, has drawn numerous scholars' research and discussion. although this notion has been offered for some time, there are few project uses of a digital twin as a technical instrument, the referenceable expertise is barely any, and the reference content is the primarily theoretical and conceptual study (zhou et al., 2022). geospatial digital twin (gdt) emphasizes the geospatial attributes of the geographical settings, incorporating precise location and spatial layers for building a comprehensive knowledge of the spatial environment and its entities. therefore, the geospatial concept and network between the entities are one of the core parts of gdt. thus, implementing gdt is not a straightforward process and needs a variety of spatial computing technologies (eunus et al., 2023), including graph theory.graph algorithms are used extensively in location-based services and analysis (wang et al., 2019). graph algorithms analyse spatial connections and relationships between two points (demšar et al. 2008) or locations. graph theory is the study of the mathematical structures known as graphs, which are used to represent pairwise interactions between objects (singh 2014) (table 1). graphs may be spatial or non-spatial graphs, which further contain both directed and undirected graphs with weighted and unweighted components (table 2). real spatial or non-spatial networks possess characteristics specific to one of the following four graph types: regular, random, small world, and scale-free (anderson and dragićević 2020). an adjacency matrix can be used to depict the organisation of these graphs (anderson et al., 2020).the initial use of graph theory (euler et al., 1741), 200 years earlier, was a location-based problem known as "seven bridges of konigsberg", where euler demonstrated that it was impossible to travel over all seven of the bridges that connect the islands without ever using the same bridge twice. this approach, also known as network science, is shown by the shortest path routing algorithms (table 3) between two points (dijkstra et al., 1959). the theorem was developed as a result of euler's discoveries, which served as the cornerstone of network science. the findings also led to the conclusion that graph theory may be used to uncover and represent many structural properties (anderson et al., 2020). in the 19 th century, cayley's studies formed the beginning of enumerative graph theory, using trees as the types of graphs, and focused on calculating the number of certain types of graphs (bell et al., 2015). social network analysis, one of the earliest fields of application (de nooy et al., 2005), was where the three types of centrality metrics were initially established (freeman et al., 1979). in social networks, vertices stand in for people or institutions, and edges show their connections to one another. in social network analysis, a person's reachability-or how readily information can go to that personis described by their degree and closeness centralities. journey from then till today, graph theory has extended its applications from social media network cybersecurity to fields of bioinformatics and cryptography. with the increase in data connectedness and breakthroughs in graph technology, valuable insights are obtained when integrated with queries, statistics, algorithms, ml, and ai (anderson et al., 2020).the objective of this article is to understand some fundamental concepts and examples of graph algorithms, their applications in geospatial science (figure 1 and table 4), digital twin, and the methods by which geographic data, network sciences, and graph algorithms can be used to represent, analyse, and simulate complex geographical systems for better decision-making.the objective of this article is to understand some fundamental concepts and examples of graph algorithms, their applications in geospatial science (figure1and table4), digital twin, and the methods by which geographic data, network sciences, and graph algorithms can be used to represent, analyse, and simulate complex geographical systems for better decision-making. this method is a graph transformation algorithm that has the freedom to use any shortest path algorithm. the methodology applies a unique structure of semantics graphs and efficient data processing technologies. current research trends include understanding the spatial dynamics of objects operating on spatial network structures, including network theory as gas, and comprehending the intricately intertwined relationship between spatial structure and space-type dynamics. determining a bgi network with a preference for stormwater management, detecting via satellite pictures patches of blue and green, and identifying prospective bgi corridors using graph theory were the three stages that were taken. the study informed us of the challenges in allocating resources into a successful supply chain., 2018)according to the research, a precise and effective evacuation plan that includes numerous evacuation routes while promptly considering changing road conditions is critical to limiting damage. as a result, dijkstra's algorithm is performed from one place within the buffer to every recognised safe destination, the shortest time to travel path and several alternate paths are estimated.(demšar et al. for visualisation purposes, the geometry of the network's elements (i. a graph signal reconstruction model is superimposed on a graph that was learned from the data in this article's proposed signal reconstruction framework for air pollution monitoring data.the study of social phenomena using social network analysis, which uses network and graph theory, has been proven to be extremely useful in fields like criminology. the applications of graph theory in geospatial research are anticipated to rise further with the continuous development of technology and the increasing complexity of geographical data, making it a dynamic and essential topic for addressing the linked spatial concerns of our world. this article underscores the critical role of graph theory algorithms in addressing real-world geospatial challenges, emphasising their practical significance and potential for future innovations in spatial analysis and management, including the geospatial digital twin concept.[kruskal et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/977.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/977.txt new file mode 100644 index 0000000000000000000000000000000000000000..9775d4e1a46be6383a8553215e1f4b7b0904d51a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/977.txt @@ -0,0 +1 @@ +the personality of an individual refers to the specific collection of psychological constructs which dictates the visible differences in different human beings in terms of behavior and reaction in particular environments and also dictates the thought process which leads to these different behavioral outcomes (as defined in roberts and mroczek (1)). many researchers have recently tried automatic personality detection with little success primarily because the task is inherently difficult, requiring a thorough understanding of sentence constructs, sentiment toward targets, and its connection to behavioral outcomes. sentiment analysis alone can be a very challenging task due to abundance of aspects and sparsity of labelled data (2,3). moreover, most research has been carried out on small datasets. since the expression of a specific personality can have a wide range, small datasets often are unable to capture this variety and thus fail to provide the model with a sufficient inductive bias to learn.furthermore, the models used till now lack task-specific design which is essential to solving a complex problem. attempts for personality modeling ranged from traditional methods like questionnaires to nlp-based approaches. the two widelyused personality models are the big five personality traits (ocean model), coming from sir francis galton's line of work (as described in goldberg (4), rothe (5), rushton (6)) based on linguistically predictive personality types having 5 personality dimensions and the myers-briggs type indicator (mbti) personality modeling, based on carl jung's theory, containing four personality dimensions as proposed in jung (7). while there has been considerable work with the first kind of personality types being invented to be used by linguists, works on mbti personality types are lacking. we hope to bridge this gap by introducing the largest automatically collected dataset for mbti personality types. our contributions in this paper are as follows.1) we introduce the largest dataset for personality detection with mbti personality types. we perform all our analyses and automatic classification using the functional personality groups. however, we opensource this dataset in the original form with nuanced attributes containing all the individual 16 personalities for the community to fuel further research and exploration. 2) we perform several quantitative and qualitative studies to analyze the dataset. we introduce novel features like hashtags, urls, and mentions embeddings, and show how they correlate with an individual's personality. we analyze personality types in several derivative dimensions like professions, readability, and empath features. 3) we test several machine learning models on the task of predicting mbti personality types from twitter profile data. we fine-tune different models taking individual inputs to make better embeddings and use these embeddings to train another model finally enhancing the prediction accuracy. the best accuracy is achieved by a simple random forest classifier over fasttext embeddings. 4) we perform a series of ablation studies to understand which features are important for the task. we show that the hashtags used by the users, their empath features, and their tweets are the most important features. we also show the impact of data quality and the number of tweets on the model's prediction performance.the personality of an individual refers to the specific collection of psychological constructs which dictates the visible differences in different human beings in terms of behavior and reaction in particular environments and also dictates the thought process which leads to these different behavioral outcomes (as defined in roberts and mroczek(1)). the two widelyused personality models are the big five personality traits (ocean model), coming from sir francis galton's line of work (as described in goldberg (4), rothe (5), rushton(6)) based on linguistically predictive personality types having 5 personality dimensions and the myers-briggs type indicator (mbti) personality modeling, based on carl jung's theory, containing four personality dimensions as proposed in jung(7). while there has been considerable work with the first kind of personality types being invented to be used by linguists, works on mbti personality types are lacking.1) we introduce the largest dataset for personality detection with mbti personality types. 3) we test several machine learning models on the task of predicting mbti personality types from twitter profile data. previous research on the prediction of personality uses twitter, instagram, and facebook data include some feature-based techniques such as liwc(11), splice (structured programme for linguistic cue extraction)(12), sna (social network analysis)(13), as well as time-based features(14). the 2014 workshop on computational personality recognition hosted a shared task of personality detection on 442 youtube video logs(18). we collect this data using the twitter api and filter out the cases where the same person has ever shared different personality type links. • difficulty in measuring some personality traits: the mbti measures personality traits that are not necessarily easily observable, such as intuition or sensing. using tweets to detect mbti personality types is an interesting and innovative approach but the above limitations can introduce inaccuracies and errors in the predictions.in this work, we released the largest automatically curated twitter dataset for personality detection for mbti personality types. then we classified twitter users into personality types -analysts, diplomats, sentinels, and explorers using the latest 3200 tweets and profile information. we derived new features from the tweets to capture user personality, as well as computed embeddings from the urls, hashtags, and mentions. while text-based data such as tweets can provide valuable insights into a user's personality, incorporating audio and video data from social media platforms such as youtube and tiktok could provide additional information. in addition, we would also like to explore the potential of multi-dimensional classification to provide more granular information about the personality type of a twitter user. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/978.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/978.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef7282a63825b5e503df1285923a461c86be8abe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/978.txt @@ -0,0 +1 @@ +the notion of individual fairness is a formalization of an ethical principle, "treating like cases alike, " which has been argued such as by aristotle. in a fairness-aware machine learning context, dwork et al. firstly formalized the notion.in their formalization, a similar pair of data in an unfair space should be mapped to similar positions in a fair space.we propose to re-formalize individual fairness by the statistical independence conditioned by individuals. this reformalization has the following merits. first, our formalization is compatible with that of dwork et al. second, our formalization enables to combine individual fairness with the fairness notion, equalized odds or sufficiency, as well as statistical parity. third, though their formalization implicitly assumes a pre-process approach for making fair prediction, our formalization is applicable to an in-process or post-process approach.the notion of individual fairness is a formalization of an ethical principle, "treating like cases alike, " which has been argued such as by aristotle. first, our formalization is compatible with that of dwork et al.after showing formalization of individual fairness by dwork et al.individual fairness is one of fairness criteria, and dwork et al. formalized as follows. an instance, x, is assumed to contain the information relevant to an individual except for the individual's sensitive information, and x is considered as a representation of the individual.we finally show that our formalization of individual fairness is compatible with that of dwork et al.(4), and our formalization of individual fairness is compatible with fairness through unawareness. hence, they implicitly omit the sensitive information in prediction, and their method satisfies a condition of fairness through unawareness. they are similar to non-sensitive features, but they are assumed that all the features are assumed to be legal if they are not sensitive.if we extend a notion of individual fairness by treating it as conditioning a fairness criterion by x, we can create an individualized version of the criterion.conditioning by x enables to convert these two fairness criteria to individual versions of them.as described above, criteria designed so that fairness is maintained at a group level can be convert to the corresponding criteria at an individual level by conditioning non-sensitive features.an approach of dwork et al. is a kind of a pre-process, and their formalization of individual fairness premises the approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/979.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/979.txt new file mode 100644 index 0000000000000000000000000000000000000000..5aec3be73afba100a35880da38f2ed197b6dbe4e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/979.txt @@ -0,0 +1 @@ +chatbots (e.g., chatgpt , developed by openai) are based on large language models (llms) and designed to understand and generate human-like text from the input they receive. as artificial intelligence (ai) technologies, including llms, become more deeply integrated into various sectors of society , their moral judgments are increasingly scrutinized. the influence of ai is pervasive, transforming traditional paradigms, and ushering in new ethical challenges. this widespread application underscores the importance of machine ethics, which mirrors human ethics . beyond the realm of traditional computer ethics, ai ethics probes further by examining the behavior of machines toward humans and other entities in various contexts .understanding ai's capacity for moral judgment is particularly crucial in the context of autonomous driving . since the automotive industry anticipates incorporating ai systems such as chatgpt and other llms to assist in autonomous vehicles' (avs) decision-making processes , the ethical implications intensify. in certain situations, these vehicles may rely on ai to navigate moral dilemmas, such as choosing between passengers' or pedestrians' safety, or deciding whether to swerve around obstacles at the risk of endangering other road users. recognizing the potential consequences and complexities of these decisions, researchers initiated the moral machine (mm) experiment , an experiment designed to gauge public opinion on how avs should act in morally challenging scenarios. the findings from the mm experiment suggest a discernible trend favoring the preservation of human lives over animals, emphasizing the protection of a greater number of lives and prioritizing the safety of the young. although we must be careful when interpreting the results of the mm experiment , these preferences are seen as foundational to machine ethics and essential considerations for policymakers . the insights gained from this study emphasize the importance of aligning ai ethical guidelines with human moral values.the methodology employed in the mm experiment presents a promising avenue for exploring the moral decision-making tendencies of llms, including chatgpt. by examining the llm responses to the scenarios presented in the mm experiment and contrasting them with human judgment patterns, we can gain a deeper insight into the ethical frameworks embedded within these ai systems. such analyses may reveal inherent biases or distinct decision-making trends that may otherwise remain obscure. whereas research has delved into chatgpt's reactions to standard ethical dilemmas , such as the classic trolley problem , the intricate situations posed by the mm experiment offer a more profound exploration of llm moral reasoning. however, the comprehensive application of this evaluative framework remains underrepresented in contemporary studies, signaling it to be a pivotal subject for future research.therefore, using the mm methodology, this study seeks to elucidate the patterns in llms' responses to moral dilemmas. we investigated representative llms with a specific focus on chatgpt (including gpt-3.5 and gpt-4), palm 2 , google bard's core system, and llama 2 , an open-source llm with various derived chat models. furthermore, we evaluated the differences in the response tendencies among these llms and assessed their similarity to human judgment tendencies. these scenarios, designed through constrained randomization, explored six primary dimensions: species (saving either people or pets), social value (choosing to save characters with perceived higher social value, such as pregnant women or executives, and those perceived as having lower value, such as criminals), gender (choosing to save female or male characters), age (choosing to save younger or older characters), fitness (choosing between physically favored characters, such as athletes or less fit individuals, e. the amce values represent each preference as follows: 'species, ' where a positive value signifies sparing humans and a negative value denotes sparing pets; 'social value, ' where a positive value indicates sparing those of higher status and a negative one those of lower status; 'relation to av, ' with a positive value for sparing pedestrians and a negative for sparing passengers; 'no. characters', where a positive value shows sparing more characters and a negative fewer; 'law, ' where a positive value means sparing those acting lawfully and a negative those acting unlawfully; intervention, with a positive value for inaction and a negative for action; 'gender, ' where a positive value suggests sparing females and a negative one, males; 'fitness, ' with a positive value for sparing the physically fit and a negative for the less fit or obese individuals; and 'age, ' where a positive value indicates sparing the young and a negative the elderly.to assess the similarities or differences between the preferences of the llms and human preferences reported in, we conducted further analyses using the amce values for the nine attributes. a consistent trend across most llms was the inclination to prioritize humans over pets and save a larger number of individuals, aligning closely with human preferences. another consistent trend across the llms, except for llama 2, was the mild preference to spare less fit (obese) individuals over fit individuals (athletes); however, this was inconsistent with human preferences. for example, palm 2 uniquely showed a slight inclination to save fewer people and favor individuals of a lower social status over those of higher status, which diverged from human and other llms' preferences. moreover, llama 2's subtle preferences, such as a mild inclination to save males over females, and those violating the law over law abiders, deviated from both the other llms and human tendencies. while gpt-4 displayed tendencies that were somewhat aligned with human preferences, particularly in its preferences for law-abiding individuals and those of higher social status, gpt-3. pca also facilitated a detailed assessment of the alignment of each llm's preferences with human tendencies, even when considering the relationships between llms.to understand the underlying rationale for the distinct preferences exhibited by llms compared to humans, a focused analysis was conducted on palm 2, which displayed the most pronounced divergence from human preferences.the alignment of most llms (particularly the chatgpts) with human preferences (figures1and2), especially in valuing human lives over pets and prioritizing the safety of more individuals, suggests their potential suitability for applications in autonomous driving, where decisions aligned with human inclinations are crucial. the pronounced preferences of llms in certain scenarios, compared to the milder inclinations of humans, may indicate the models' tendency to make more uncompromising decisions. while certain llms, such as chatgpt, demonstrate a promising alignment with human preferences, the discrepancies observed among the different llms underscore the necessity for a standardized evaluation framework. to compare the llm preferences with human preferences, we utilized global moral preferences derived from opinions gathered worldwide. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/98.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/98.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c316a44f1a24b3f1967bbe445caa224dc61eb75 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/98.txt @@ -0,0 +1 @@ +researchers are charged with keeping on top of immense, rapidlychanging literatures. naturally, then, reading constitutes a major part of a researcher's everyday work. senior researchers, such as faculty members, spend over one hundred hours a year reading the literature, consuming over one hundred papers annually . and despite the formidable background knowledge that a researcher gains over the course of their career, they will still often find that papers are prohibitively difficult to read. as they read, a researcher is constantly trying to fit the information they find into schemas of their prior knowledge, but the success of this assimilation is by no means guaranteed . a researcher may struggle to understand a paper due to gaps in their own knowledge, or due to the intrinsic difficulty of reading a specific paper . reading is made all the more challenging by the fact that scholars increasingly read selectively, looking for specific information by skimming and scanning . we are motivated by the question: can a novel interface improve the reading experience by reducing distractions that interrupt the reading flow? this work takes a measured step to address the general design question by focusing on the specific case of helping scholarphi augments the reading experience with this and a host of other features (see section 4) to assist readers.readers understand cryptic technical terms and symbols defined within a paper, which are called "nonce words" in the field of linguistics. formally, a nonce word is a word that is coined for a particular use, which is unlikely to become a permanent part of the vocabulary . because a nonce word is localized to a specific paper, a reader cannot know precisely what it means when they start reading the paper. because it is only intended for use within a single paper, it is likely to be defined somewhere within that same paper, but finding that definition may require significant effort by the reader. by their nature, nonce words are an interesting focus for augmenting reading tools because readers will have questions about them, and those questions will be answerable (exclusively by) searching the text that contains them. two aspects of nonce words constrain the design of any reading application that is built to define them. first, they are numerous: a paper can contain hundreds of them. indeed, a single passage or table may contain a dozen terms closely packed together (see figure 2). in such settings a reader is likely to have demands on self-attention for semantic role labeling . their working memory and may also want to see definitions for multiple nonce words in the same vicinity. second, nonce words are sometimes assigned multiple definitions within the same paper. one example is a symbol like 𝑘, which over the course of a single paper may variously stand for a dummy variable in a summation operation, the number of components in a mixture of gaussian models, and the number of clusters output by a clustering algorithm (see the scenario in section 4). these two aspects of nonce words raise the question of whether conventional solutions for showing definitions of terms (e.g., the electronic glossaries explored in second-language learning research or wikipedia's page previews ) also suit a researcher who is puzzling their way through dense, cryptic, ambiguous notation. in this work, we introduce scholarphi, a tool that helps readers efficiently access definitions of nonce words in scientific papers. the larger vision of scholarphi is to help scientists more easily read papers by linking relevant information to its location of use. we envision the tool eventually providing access to the contents of cited papers, and definitions external to the paper. the current paper focuses on one portion of this problem: the design and evaluation of interfaces for understanding nonce words.this paper begins with a formative study of nine readers as they read a scientific text of their own choice (section 3.1). most readers expressed confusion at nonce words in their texts. many readers were reluctant to look up what the words meant, given the anticipated cost of doing so. this inspired the subsequent design of a tool that could have answered those readers' questions while reducing friction so that readers would actually use the tool.we then describe design motivations for a new reading interface (section 3.3) that are grounded in insights from four pilot studies with early prototypes, conducted with 24 researchers. key insights from the research include the importance of tailoring definitions to the passage where a reader seeks to understand a nonce word, and the competing goals of providing scent (i.e., visual cues ) of what is defined without distracting from a reading task that is already cognitively demanding on its own. building on the motivations found in the pilot research, a user interface is presented (section 4). the basic design of scholarphi is one of an interactive hypertext interface. a reader's paper is augmented with subtle hyperlinks indicating which nonce words can be clicked in order to access definition information. readers can click nonce words to access definitions for those words in a compact tooltip (figure 1). these definitions are position-sensitivethat is, if there are multiple definitions of a nonce word in the text, scholarphi uses the heuristic of showing readers the most recent definition that appears before the selected usage of the word. definitions are also linked to the passage they were extracted from: a reader can click on a hyperlink next to the definition to jump to where it appears in the paper. in addition to definitions, the tooltip makes available a list of all usages of the nonce word throughout the text, as well as a special view of formulae that include the word. beyond these basic affordances, scholarphi provides a suite of features, each of which provides readers with efficient yet nonintrusive methods for accessing information about nonce words. first, scholarphi provides efficient, precise selection mechanics for selecting mathematical symbols and their sub-symbols through single clicks, rather than error-prone text selections (section 4.1). second, scholarphi provides a novel filter over the paper called "declutter" that helps a reader search for information about a nonce word by low-lighting all sentences in the paper that do not include that word (section 4.2). third, scholarphi generates equation diagrams and overlays them on top of display equations, affixing labels to all symbols and sub-symbols in the equation for which definitions are available (section 4.3). the final feature is a priming glossary comprising definitions of all nonce words that appear in a paper, prepended to the start of the document (section 4.4).the emphasis in the design of each of these features is on acknowledging the inherent complexity of the setting of scientific papers, and hence designing features for looking up definitions that are easy to invoke and minimally distracting.to enable these features, new methods were introduced for analyzing scientific papers in order to make nonce words interactive. a paper processing pipeline was built that automatically segments equations into symbols and their sub-symbols, detects all usages for a nonce word, and detects precise bounding box locations of nonce words so that they may be clicked. the implementation of the pipeline is described in section 5. the suitability of contemporary definition extraction algorithms is discussed, highlighting a need for improvements to technologies for definition extraction.this work concludes with a controlled usability study with twenty-seven researchers (section 6). researchers were observed as they used three versions of scholarphi-one with all the features described above, one with only the "declutter" feature, and one that behaved exactly like a standard, un-augmented pdf reader.when readers had access to scholarphi's features, they could answer questions about a scientific paper in significantly less time, while viewing significantly less of the paper in order to come to an answer. they reported that they found it easier to answer questions about the paper, and were more confident about their answers with scholarphi. researchers were also observed as they used scholarphi for 15 minutes of unstructured reading time. researchers made use of all of scholarphi's features. feedback was overwhelmingly positive. most participants expressed interest in using the features "often" or "always" for future papers, with an emphasis on the usefulness of definition tooltips and equation diagrams.in summary, this work makes four contributions. first, it characterizes the problem of searching for information about nonce words as one of the challenges of reading scientific papers, grounded in a small formative study. second, it provides design motivations for designing interactive tools that define nonce words, grounded in iterative evaluations of prototypes of a tool. third, it presents scholarphi, an augmented reading interface with a suite of novel features for helping readers understand nonce words in scientific papers. finally, it provides evidence of the usefulness of the design in searching and reading scientific papers through a controlled study with twenty-seven researchers. 1 we are motivated by the question: can a novel interface improve the reading experience by reducing distractions that interrupt the reading flow? this work takes a measured step to address the general design question by focusing on the specific case of helping scholarphi augments the reading experience with this and a host of other features (see section 4) to assist readers. these definitions are position-sensitivethat is, if there are multiple definitions of a nonce word in the text, scholarphi uses the heuristic of showing readers the most recent definition that appears before the selected usage of the word. second, scholarphi provides a novel filter over the paper called "declutter" that helps a reader search for information about a nonce word by low-lighting all sentences in the paper that do not include that word (section 4. third, it presents scholarphi, an augmented reading interface with a suite of novel features for helping readers understand nonce words in scientific papers. if they could not surmise the meaning from context, readers would sometimes delay looking up an explanation with the hope that they might find one later in the text (r1, r3, r4, r6-9)." when a reader selects a nonce word, scholarphi "declutters" the paper-by highlighting segments of text that contain matches, and fading out all other sentences-in an effort to help readers scan the paper for usages. one advantage to presenting definitions in a priming glossary as opposed to tooltips is that definitions for all nonce words can be consolidated into one place (m3), allowing a reader to learn about groups of related nonce words all at once.we performed a formal remote usability study to ascertain the answers to the following questions: do the features of scholarphi aid readers' ability to understand the use of nonce words when reading complex scientific papers? do readers elect to use the features when given unstructured reading time? how are the features used to support the reading experience?. a second purpose was to help a reader check whether the passage the reader was consulting was indeed the definition of a nonce word, which could help a reader make sure they were not missing other information of interest about the nonce word (p2). several readers explicitly told us they believed declutter could be useful for finding information about nonce words (p6, p11, p15, p23, p26).the outcomes of the usability study produced the following answers to the research questions: do the features of scholarphi aid readers' ability to understand the use of nonce words when reading complex scientific papers? yes.when asked to answer questions requiring understanding of nonce words, readers answered questions significantly more quickly with scholarphi than with a baseline pdf reader, while viewing significantly less of the paper.how are the features used to support the reading experience? on the whole, readers used the features for the reasons expected: they referred to tooltips to remind themselves of forgotten definitions, activated declutter to find information about nonce words within a less cluttered view of the paper, and opened equation diagrams to view the definitions of many symbols at once.the scholarphi system was designed to help readers concentrate on the cognitively demanding task of reading scientific papers by providing them efficient access to definitions of nonce words. the iterative design of the system revealed that systems like scholarphi need to tailor definitions to the passage where a reader seeks an understanding of a nonce word, provide scent, and avoid distracting readers from their reading. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/980.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/980.txt new file mode 100644 index 0000000000000000000000000000000000000000..71d44070b970dd11c0e3fb9c2bd38d219ff4a1df --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/980.txt @@ -0,0 +1 @@ +the main security protocol sequence is between the ml system and fairness auditing service or auditor in short form. note that although we suggest three roles in our architecture, the communications are mainly between the above two roles, and any universal verifier can turn to the auditor service (which represents the fairness board), if they want to challenge the computations.the ml system is responsible for the implementation and execution of the ml algorithm. it has data as input and performs some prediction (depending on the use case and purpose) that forms the output (fig. 1). the fairness auditor service receives information from the ml system, evaluates its fairness performance by computing a fairness metric. then, it returns the result for the metric back to the ml system. it also publishes the calculations in a fairness board for public verification. the public fairness board is a publicly accessible, read-only fairness board (e.g. a website). the auditor only has the right to append data (and the sufficient proofs) to the fairness board. also, the auditor verifies the authenticity, correctness and integrity of data before publishing it. we leak no data or model information, but the faas is still able to calculate fairness for a variety of fairness metrics and independent of the ml model.the existing research in fair ml normally assumes the computation of the fairness metric to be done locally by the ml system, with full access to the data, including the private attributes. the faas architecture includes stakeholders in three roles: a) ml system: a system that owns the data and the ml algorithm, b) fairness auditor service: a service that computes the fair performance of the ml system, and c) universal verifier: anyone who has the technical expertise and motivation to verify the auditing process. the fairness auditor service receives information from the ml system, evaluates its fairness performance by computing a fairness metric.cryptogram table : after initial agreements, the ml system produces a cryptogram table with n rows corresponding to the number of samples in their test dataset. in case the ml system does not want to reveal the number of the samples in the test set, the auditor and the ml system can publicly agree on n. each row will satisfy four properties: (a) one can easily verify if a single cryptogram is the encrypted version of one of the eight possible permutations, (b) while verifiable, if only one single cryptogram selected, one cannot exert which permutations the current cryptogram represents, (c) for each two cryptograms selected from a single row, anyone will be able to distinguish each from one another, and (d) given a set of cryptograms arbitrarily select from each row as a set, one can easily check how many cases for each "permutation" are in the set.step (3): the corresponding column number that equals the decimal value of the binary encoding is selected from the cryptogram table to complete the fairness auditing table( as shown in table2).finally, the generated fairness auditing table is digitally signed by the ml system and then is sent over the fairness auditing service.first, the fairness auditing service receives the fairness auditing table, verifies the digital signature and the zkps, and publishes the contents in the fairness board. it can store the fairness auditing table to the fairness board, compute the fairness, and verify the correctness of the declared permutation numbers. the universal verifier can follow the same steps to verify the fairness metric computations through the fairness auditing table that is publicly accessible via fairness board. a fair system provides table4: the required permutations to compute the fairness metrics of an ml system such services fairly independent of the patient's race. this stage uses the output of the ml model to generate the fairness auditing table from the cryptogram table as well as zkp for knowledge of the permutation. the fairness auditing table is derived from the cryptogram table (as it is mapping the encoding to the corresponding permutation number in the cryptogram table). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/981.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/981.txt new file mode 100644 index 0000000000000000000000000000000000000000..4b635df5f9dce9025349d65d81d23d622c0ab14d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/981.txt @@ -0,0 +1 @@ +large language models (llms) and other transformer-based neural networks have revolutionized text analysis in research and practice. models such as openai's gpt-4 or anthropic's claude , for example, have shown a remarkable ability to represent, comprehend, and generate human-like text. compared to prior nlp approaches, one of the most striking advances of llms is their ability to generalize their "knowledge" to novel scenarios, contexts, and tasks . while llms were not explicitly designed to capture or mimic elements of human cognition and psychology, recent research suggests that -given their training on extensive corpora of human-generated language -they might have spontaneously developed the capacity to do so. for example, llms display properties that are similar to the cognitive abilities and processes observed in humans, including theory of mind (i.e., the ability to understand the mental states of other agents ), cognitive biases in decision-making and semantic priming . similarly, llms are able to effectively generate persuasive messages tailored to specific psychological dispositions (e.g., personality traits, moral values ).here, we examine whether llms possess another quality that is fundamentally human: the ability to "read" people and form first impressions about their psychological dispositions in the absence of direct or prior interaction. as research under the umbrella of zero-acquaintance studies shows, people are remarkably accurate at judging the psychological traits of strangers simply by observing traces of their behavior . for example, people can accurately predict a stranger's personality traits by snooping through their offices or bedrooms , examining their music preferences , or scrolling through their social media profiles .existing research in computational social science shows that supervised machine learning models are able to make similar predictions. that is, given a large enough dataset including both self-reported personality traits and people's digital footprints -such as facebook likes, music playlists, or browsing histories -machine learning models are able to statistically relate both inputs in a way that allows them to predict personality traits after observing a person's digital footprints . this is also true for various forms of text data, including social media posts , personal blogs , or short text responses collected in the context of job applications .in this paper, we test whether llms have the ability to make similar psychological inferences without having been explicitly trained to do so (known as zero-shot learning ). specifically, we use open ai's chatgpt (gpt-3.5 and gpt-4 ) to explore whether llms can accurately infer the big five personality traits openness, conscientiousness, extraversion, agreeableness, and neuroticism of social media users from the content of their facebook status updates in a zero-shot scenario. in addition, we test for biases in chatgpt's judgments that might arise from its foundation in equally biased human-generated data. building on previous work highlighting inherent stereotypes in pre-trained nlp models , we explore the extent to which the personality inferences made by chatgpt are indicative of gender and age-related biases (e.g., potential biases in how the personality of men and women or older and younger people is judged). that is, given a large enough dataset including both self-reported personality traits and people's digital footprints -such as facebook likes, music playlists, or browsing histories -machine learning models are able to statistically relate both inputs in a way that allows them to predict personality traits after observing a person's digital footprints.5 and gpt-4) to explore whether llms can accurately infer the big five personality traits openness, conscientiousness, extraversion, agreeableness, and neuroticismof social media users from the content of their facebook status updates in a zero-shot scenario.our analyses are based on text data obtained from mypersonality, a facebook application that allowed users to take real psychometric tests -including a validated measure of the big five personality traits (ipip) -and receive immediate feedback on their responses.mypersonality measured users' personality traits using the international personality item pool (ipip), a widely established self-report questionnaire that captures the big five personality traits of openness, conscientiousness, extraversion, agreeableness, and neuroticism.to obtain inferred personality traits from chatgpt, we used the last 200 facebook status updates generated by each user without additional preprocessing. in order to avoid exceeding the gpt token limit, status update histories were processed in chunks of 20 messages, and the inferred personality scores were then averaged to derive overall scores.in order to assess the capacity of llms to infer psychological traits from social media data, we compared the inferred big five personality scores with self-reported scores. the average pearson correlation coefficient of inferred and self-reported scores across all personality traits was r gp t 3.in addition to exploring the capacity of chatgpt to infer personality traits from social media user data, we also tested the extent to which this capacity is sensitive to changes in the amount of data that was available for inference. specifically, we computed correlations between self-reported and inferred personality scores based on different numbers of status messages.to further explore these potential biases, we analyzed the residuals between inferred scores and self-reported scores as an indication of how well gpt is able to represent the personality traits of male and female users (see table1). similarly, controlling for gender in the overall correlations between self-reported and inferred personality scores by z-standardizing inferred scores within each gender group did not yield correlations significantly different from those obtained before. notably, the overall accuracy of the observed inferences (pearson correlations between self-reported and inferred personality traits ranging between r = . for example, the fact that chatgpt shows systematic biases in its estimation of certain personality traits and is more accurate for women and younger adults could be either indicative of a bias introduced in the training of the models and/or the corpora of text data the models have been trained on, or be reflective of differences in people's general self-expression on social media (e. specifically, the ability of llms to accurately infer psychological traits from social media data could foreshadow a remarkable shift in the accessibility -and therefore potential use -of scalable psychometric assessments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/982.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/982.txt new file mode 100644 index 0000000000000000000000000000000000000000..a7150025be2857a0694d2d13e186aff458837ea8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/982.txt @@ -0,0 +1 @@ +this study is different from the works summarized in section 1.1. first, it investigates general sentiment rather than sentiment in a specific domain or attitude toward a specific topic; in addition, it examines the temporal trend of sentiment from 2019 to 2022, representing the before-pandemic baseline and several phases during the pandemic, rather than a snapshot in time.this study is a follow-up study to that examined the sentiment during the early phase of the pandemic (2020) as opposed to the pre-pandemic (2019) in 8 higher-education institutes (hei) with reddit data in the u.s. in this study, we collected reddit data from 128 heis in the u.s., including the 8 schools in , over a 4-year period (august to december in 2019, 2020, 2021, and 2022), where 2021 and 2022 can be regarded as later stages of the pandemic. in other words, the scope of this study is much broader with a longer study period and many more schools that cover all four regions of the u.s.; the number of messages also increases from 165,570 in to 4,129,170 in this study.the primary goal of this study is to examine the sentiment shift from 2019 to 2022 and whether and when the level of negative sentiments has returned to the pre-pandemic era (2019). as secondary objectives, we also examine how other factors may affect the sentiment based on the collected reddit data, such as region, school type and classification, enrollment, etc.to analyze the data, we adopted a similar approach as in by first predicting the sentiment of each collected message using machine learning. the technique employs advanced natural language processing (nlp) techniques, specifically, the robustly optimized bert pretraining approach (roberta), in conjunction with graph neural networks (gnn) that leverage the inter-message relations among the reddit messages upon assigning sentiment classes (negative or non-negative) to each message, we employed a generalized linear mixture model (glmm) to examine the effect of year on sentiment and to identify relevant covariates that may have significant relations with sentiment.the remainder of the paper is structured as follows. in section 2, we describe the data collection for this study. in section 3, we introduce the machine learning and statistical procedures used to make sense of the data. the study results are presented in section 4. the study limitations and future work are discussed in section 5 and the main study conclusions are described in section 6. with a variety of effective strategies implemented to combat the covid-19 pandemic, including vaccination, quarantine measures, and the adoption of remote work and study routines, the impact of the pandemic on society has gradually subsided since the second half of 2021. several studies have been conducted to analyze sentiments and attitudes on various aspects post the covid-19 pandemic. they used negative binomial regression and linear regression and found that public sentiment towards vaccination became more positive after the first dose of vaccination.studied the socioeconomic factors that may affect people's attitude towards reopening the economy in post-covid-19 using twitter data, socioeconomic data, environment data, and covid-19 cases, and applied logistic regression to identify important factors. they curated twitter data that's relevant to the education sector and used aspect-based sentiment analysis and machine learning techniques to identify sentiment and emotional triggers. they used textblog, the latent dirichlet allocation model, and multiple machine learning models, and found that the topics "work-life balance", "less stress", "future" and "engagement" are positive; negative topics include "virtual health", "privacy concerns", and "stress", and neutral topics involve "new technologies", "sustainability", and "technology issues".analyzed twitter data to study the sentiment distribution in india after the second wave of covid-19 using vader, lstm, and convolutional neural networks and found the majority of sentiments are either neutral or positive.in summary, all the above work used social media data to study sentiment or attitudes at a single pandemic time point in 2021 and 2022 after the first wave of the pandemic in 2020, with many focusing on data in a specific domain such as travel, import/export, remote working, and education. first, it investigates general sentiment rather than sentiment in a specific domain or attitude toward a specific topic; in addition, it examines the temporal trend of sentiment from 2019 to 2022, representing the before-pandemic baseline and several phases during the pandemic, rather than a snapshot in time.this study is a follow-up study tothat examined the sentiment during the early phase of the pandemic (2020) as opposed to the pre-pandemic (2019) in 8 higher-education institutes (hei) with reddit data in the u. the technique employs advanced natural language processing (nlp) techniques, specifically, the robustly optimized bert pretraining approach (roberta), in conjunction with graph neural networks (gnn) that leverage the inter-message relations among the reddit messages upon assigning sentiment classes (negative or non-negative) to each message, we employed a generalized linear mixture model (glmm) to examine the effect of year on sentiment and to identify relevant covariates that may have significant relations with sentiment.the reddit data collection and how the data are used in this study are in accordance with reddit's terms and conditions on data collection and usage. second, to better utilize the relational information among messages, we employ the graph attention networks (gat) model trained inwith the embeddings and the adjacency matrices among the messages as input to output a second set of predicted sentiment probabilities for the messages. to obtain more accurate sentiment predictions, we applied the stacking method inand formulated a logistic model to combine the sentiment probabilities from gat and roberta to obtain the final sentiment classification for each message. though the negative sentiment level in 2022 is still lower than in 2020, it is higher than in 2021, which may be within normal fluctuation in sentiment or indeed reflect a slight rise in negative sentiment from the transient large drop in 2020, due to other factors that negatively affect sentiment such as inflation in 2022. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/983.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/983.txt new file mode 100644 index 0000000000000000000000000000000000000000..f3162ca507a2dfeff61127f935a6d4ae204098b6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/983.txt @@ -0,0 +1 @@ +the development of foundation models whose representations and outputs can be used for various downstream tasks has facilitated unprecedented advancements in content generation across domains such as natural language , programming , image , and audio , with some authors claiming glimpses of artificial general intelligence . these advancements, however, come with significant societal implications. for example, they present challenges to the employment prospects of knowledge workers , prompt a re-evaluation of the constructs of creativity and originality , and invite discourse on issues of privacy and safety . consequently, humanity is now tasked with understanding and navigating these technologies.as ai practitioners whose initiatives spurred the recent advancements in capabilities, we bear a special duty in this endeavor. indeed, we believe that the development of evaluation frameworks that are robust enough to capture the evolution of applications within current domains and flexible enough to be extended to capabilities in new domains is imperative.recent efforts such as the living benchmarks of holistic evaluation for natural language processing tasks and the use of data kernels to understand differences in the embedding spaces of models are important strides for understanding absolute and relative model capabilities, respectively. these efforts are in contrast with for-profit institutions releasing enthusiasm-laden reviews of their own work -effectively claiming human-level capabilities across a suite of evaluation frameworks -without including details necessary to reproduce their results and hence generating serious documentation debt for the community.beyond evaluating absolute or relative machine capabilities on specific downstream inference tasks, technologists have historically been interested in comparing machines to humans , with some authors arguing that the entire field of ai working towards the goal of emulating human-level "intelligence" . the turing test or imitation game, for example, is an evaluation framework used to determine whether a conversational agent is a human or a machine . in the originally proposed test, a person converses with the agent and determines if the agent is a human or machine once the conversation ends. if the agent is determined to be human, then it "passed" the turing test. similar tests have been proposed in the vision domain . however, due to the of variability among human evaluators and the broad spectrum of tasks used for evaluation, there is a significant degree of ambiguity surrounding the criteria for passing.to address this ambiguity, we propose a statistical framework that quantifies the ability of a system to distinguish between human-generated and machine-generated content. we think of our framework as a set of statistical turing tests for generative models.human detection problem statement: let (x 1 , y 1 ), . . . , (x n , y n ) be (content, label) pairs, where the label y i ∈ {0, 1} determines whether the corresponding content x i ∈ x was generated by a machine (label 0) or a human (label 1). in the human detection problem, the goal of a classifier h : x → {0, 1} is to correctly determine whether content was generated by a human or a machine.contributions: our primary contribution is a statistical framework for human detection that can be used to quantify the detectability of a machine for a given human detection context that is agnostic to content modality and generation task. the framework provides the language required to analyze important aspects of the human detection problem such as the progression of a family of models towards human-like abilities and the effectiveness of different classifiers for human detection. secondary to the proposed framework is the contribution of a human detection method, proxihuman, that utilizes the geometry of a machine's embedding space to determine the origin (human or machine) of content. , (x n , y n ) be (content, label) pairs, where the label y i ∈ {0, 1} determines whether the corresponding content x i ∈ x was generated by a machine (label 0) or a human (label 1). in the human detection problem, the goal of a classifier h : x → {0, 1} is to correctly determine whether content was generated by a human or a machine.contributions: our primary contribution is a statistical framework for human detection that can be used to quantify the detectability of a machine for a given human detection context that is agnostic to content modality and generation task.recently, zhang et al.for facilitating analysis of the human detection problem along relevant axes, we define the human detection context c as a sextuple consisting of a sample space x , human-generation class conditional distribution f 1 , class mixing coefficient π, loss function ℓ, transformation t, and a set of classifiers h = {h : t(x ) → {0, 1}}.notably, if a machine is τ -undetectable for context (x , f 1 , π, ℓ, t, h) and τ ′ -undetectable for context (x ′ , f ′ 1 , π ′ , ℓ ′ , t ′ , h ′ ) and x ∩x = ∅ then there exists a context (x ∪ x ′ , f ′′ 1 , π ′′ , ℓ ′′ , t ′′ , h ′′ ) such that the machine is max(τ , τ ′ )undetectable. in reality, however, the cost of a machine to produce content is much cheaper than the cost of a human to produce content and so it may be sensible to assume that π ≈ 1 for most situations where the content generation process is not visible.there is a duality in the relationship of the class conditional prior π and loss functions ℓ that assign positive weights α (for misclassifications of human generated content) and β (for misclassifications of machine generated content) such that α + β = 1.to leverage this observation into a human detection method, the authors propose perturbing the observed content x locally by masking tokens of x and filling the masked tokens with a potentially-different model to produce a new piece of content x. given the different assumptions on the curvature of log p θ for humans and machines, the absolute difference between the original log p θ (x) and the average of log p θ (x) should be small for machine generated content and large for human generated content. the transformation t is thus p θ (x) -e p with x ∼ p and where p is dependent on the original content x and the proportion of original content that is masked. in the black-box setting, we replace the model that generated the content with a surrogate model to generate perturbed content and to evaluate the likelihood of observing the original and perturbed content (for detectgpt) or to evaluate the embeddings of the original and perturbed content (for proxihuman).the human distribution for the long-form answers of the pubmed q&a might be drastically different than the human condition for a particular medical professional and that our analysis gives no insights on the ability of gpt3 or gpt4 to produce content that is similar to any particular medical professional.as an example of how to use the framework to study different aspects of the human detection problem, we evaluated five different human detection methods that utilize different assumptions for the difference between human and machine generated content. we lastly commented on the fact that the detection methods that leverage simple functions of the surrogate model outperform detection methods that rely on more complicated relationships between human and machine generated content in the black-box setting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/984.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/984.txt new file mode 100644 index 0000000000000000000000000000000000000000..3115feba8c36e82baac9b8650b8cf9f8274180f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/984.txt @@ -0,0 +1 @@ +with the debut of chatgpt, a ground-breaking commercial application of large language models (llms), many controversies happened regarding its benefits and ethical issues . on the one hand, it provides rich opportunities for automating and assisting human work such as q&a and creative writing . on the other hand, the functioning and use of chatgpt are not always ethical, with a wide range of ethical concerns being raised , such as academic integrity and job loss .in this work, we investigated a particular ethical issue of chatgpt, i.e., gender bias . gender bias has long been studied in computer systems and other domains . gender bias in chatgpt could harm an extremely large population if it is widely adopted by people. understanding public perceptions toward gender bias in llms is crucial to keep policies and regulations relevant and effective in meeting people's needs.llms are trained on data collected from search engines, online forums, websites, and so on. thus, llms can reflect and even amplify existing biases in human language . social biases exist in varying forms in different cultures .llms trained on data collected from different cultures may also demonstrate different types and levels of biases. to understand how gender bias manifests in llms rooted in different cultures, we examined chatgpt by openai based in the us and ernie by baidu based in china for a comparative case study, toward informing contextual and concrete regulation. to understand how gender bias manifests in llms rooted in different cultures, we examined chatgpt by openai based in the us and ernie by baidu based in china for a comparative case study, toward informing contextual and concrete regulation., gender bias, racial bias, and cultural bias, which could be present in chatgpt-generated responses in consumer-facing applications.we approached public perceptions about gender bias in chatgpt and ernie by comparing social media discussions around these two llms.social media discussions about chatgpt were obtained with the search query "gender bias in chatgpt" from twitter. the themes emerging from the discussions around chatgpt included users' observation of implicit gender bias, political correctness, and dissemination of scientific findings.the chatgpt users who had some linguistic or nlp knowledge described such gender bias in chatgpt as "implicit," instead of "explicit. for example, one user thought the "implicit" bias was substantial and persisting: "implicit gender bias in chatgpt. another tweet pointed to an article discussing how gender bias in chatgpt affected hr and tips for avoiding the pitfalls: "very interesting post by @realevilhrlady on how gender bias shows up in chatgpt's responses, plus some tips for how to mitigate it with your prompts: chatgpt gender bias: how it affects hr & tips to avoid pitfalls.people have called for more attention to be paid to gender bias in chatgpt: "rise of #chatgpt and increasing #investment in #agi makes clear #gender #bias in #ai demand immediate attention, says anna collard of knowb4 africa." a woman teaching financial education in schools and workplaces pointed out gender bias in chatgpt and acted by promoting fairness in her lessons, "chat gpt is already considered to have gender bias in built, when we ask it for 'famous entrepreneurs' it provides us with men.our analysis revealed a heated discussion about gender bias in llms, especially chatgpt. our comparative analysis of gender bias in chatgpt and ernie sheds light on understanding the intertwining relationship between culture and llm gender bias. first, various data protection efforts ensure citizens' rights: to correct or delete inaccurate data about them, including that generated by algorithms; to preclude algorithm-generated attributes from feeding into further ai processing or re-use across contexts; and against the use of synthetic data and proxies around protected or sensitive characteristics, such as race, gender, sexuality, or religion.ai literacy education is important for changing people's awareness and attitude toward gender bias in ai, who can then practice more ethical ai use and even help mitigate gender bias from ai. also, if developers are educated to identify and mitigate gender bias from ai in the development lifecycle, they can more effectively act toward addressing bias issues in ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/985.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/985.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ef93c35d60604670e0cdf9fdbaa6fc235536570 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/985.txt @@ -0,0 +1 @@ +greater awareness of the potential harms of practical applications of artificial intelligence (ai) systems has inspired various approaches to preventing these harms. various stakeholders, including governments, corporations, and non-government organisations, have developed principles for ai ethics. 1 a common theme among these sets of ethics principles is the need for ai systems to be trustworthy.while often not clearly distinguished in the literature, trust in ai and trustworthy ai should be considered separately. while ideally trust in ai would be determined by the ai's trustworthiness, this correspondence between users' trust and ai trustworthiness is not automatic. trustworthiness is a property of the ai system, while trust is an attitude that must be granted by users. furthermore, users must properly calibrate their trust so that they do not distrust a trustworthy ai or trust an untrustworthy one. trustworthiness is not the only factor that affects whether users decide to trust an ai. focusing on the trustworthiness of ai systems without also considering the factors that influence how users develop trust in them will not achieve the goals of trustworthy ai. for this we also need to consider the wider ai system stakeholder ecosystem and the user's attitudes towards them as people may not trust or distrust the ai itself, but rather those responsible for it.we begin this paper with an overview of the concepts of trust and trustworthiness, and how trust in ai differs from trust relationships between people. we then describe how trust is discussed in the context of ai ethics principles based on a short survey of ai papers that mention trust in the context of these principles. we highlight several issues, including an imprecise use of the terms trust and trustworthiness, a lack of clear empirical evidence with respect to if, how, and under what circumstances adhering to certain principles contributes to the formation of trust, and the need for a more nuanced consideration of the multidimensionality of ai ethics and potential tradeoffs between principles. this leads to a discussion of trust in ai as socio-technical systems, and the need to consider the wider context of ai system development and use to better understand trust in ai. while ideally trust in ai would be determined by the ai's trustworthiness, this correspondence between users' trust and ai trustworthiness is not automatic. we then describe how trust is discussed in the context of ai ethics principles based on a short survey of ai papers that mention trust in the context of these principles. we highlight several issues, including an imprecise use of the terms trust and trustworthiness, a lack of clear empirical evidence with respect to if, how, and under what circumstances adhering to certain principles contributes to the formation of trust, and the need for a more nuanced consideration of the multidimensionality of ai ethics and potential tradeoffs between principles. this leads to a discussion of trust in ai as socio-technical systems, and the need to consider the wider context of ai system development and use to better understand trust in ai. users may also assign intention to ai systems to a greater degree than they would for other technologies, and this perception of intention may be used to justify referring to a willingness to depend on ai systems as 'human-ai trust' rather than reliance.7similar to other conceptualisations and models of trust in ai (and/or automation), a recent literature review of user trust in ai identifies three major themes in the influences on user trust: socio-ethical considerations, technical and design features, and the user's own characteristics.while one of the purposes of following ai ethics principles in the development of ai systems may be to foster procedural trust in ai, the main goals of these principles are to identify and (if adhered to) minimise an ai's capacity to cause harm to people, and to provide people with information they need to make decisions about exposing themselves to the risk of that harm.15this overloading, a lack of consideration of potential contradictions between ethics principles, and little consideration of the targeted nature of trust highlights another element of potential confusion: must ai systems adhere to all ai ethics guidelines to be trusted? must only some principles be adhered for trust to emerge? which principles, or combinations of principles, might be more important or necessary than others for building trust?. while explanations have been shown to increase users' willingness to trust ai, in other cases transparency may have negative effects on trust in an ai. within the sample of papers we reviewed, empirical results (where they existed) were unclear about when explainability and transparency contribute to trust, in which contexts they may do so, or what types of explainability are useful for specific stakeholders to trust an ai, and whether these are the only or most important factors for (warranted) trust to form.15ai ethics principles themselves are also socio-technical systems and may serve as controls intended to establish procedural trust in the ai systems created by developers who follow the principles.7trust in an ai that is based on the user's trust in the system's developer is interpersonal or organisational trust rather than human-ai trust, as the user trusts the developer rather than the ai itself.to allow us to move from principles to practice, we need to further untangle and develop our understanding of how ai ethics principles, that might inform the development of ai systems, relate to individual perceptions of trustworthiness, and how such trustworthiness matters (or if at all, and under what circumstances) to people and impacts trust.we need to consider ai as socio-technical systems, where the dynamics between various stakeholders may impact on perceptions and evaluations of trustworthiness, and how (and under which circumstances) such factors may contribute to creating or eroding trust as well as affecting the acceptance and adoption of ai. we need to pay attention to how the relationships and dynamics between various stakeholders, their trustworthiness and whether they adhere (or are perceived to adhere) to ai ethics principles, can affect trust and trustworthiness perceptions of ai systems, developers, providers and users, and how this affects the acceptance or adoption of ai technologies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/986.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/986.txt new file mode 100644 index 0000000000000000000000000000000000000000..f78fd81cbd899ec7bc845db89d8654844688ed83 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/986.txt @@ -0,0 +1 @@ +dreams of automating mathematical research are as old as the imagination of early mechanical calculating devices, long before the creation of modern digital computers . the latter have already generated diverse aspirations to automated mathematics, from formal verification to interactive and automated theorem proving, since the mid-twentieth century. yet, novel developments in the twenty-first are pressing mathematicians to reconsider such ideas and their implications afresh. formal verification, once imagined to work only for rudimentary results, now seems feasible even for some of the most sophisticated results of contemporary research. advances in artificial intelligence and machine learning, which have captured massive amounts of public attention and capital investment, promise to expand the automation of theorem proving.in what follows, i examine how automation has reconfigured mathematical proof and labor, and what might happen in the future. my perspective is grounded empirically in the comparative analysis of a wide range of historical and contemporary cases. first, i suggest that past and present controversies about the status of computer-assisted proofs reflect a longstanding tension in modern mathematics, between what i call post-cartesian and post-leibnizian practical standards of proof. then, i distinguish between prominent forms of automation in mathematical research, and consider some consequences of each. next, i provide critiques of some recurring assumptions about automated mathematics. finally, i ask how automation might reshape economies of labor and credit in mathematics, and briefly offer my own hopes. first, i suggest that past and present controversies about the status of computer-assisted proofs reflect a longstanding tension in modern mathematics, between what i call post-cartesian and post-leibnizian practical standards of proof.post-cartesian standard of long and complex proofs: after some study, one understands the proof's general strategy and each of its parts, and can follow it one part at a time.the current configuration of proof standards (approximation): post-leibnizian standards adjudicate the validity of a proof in principle, while post-cartesian standards adjudicate validity in practice in addition to other considerations beyond validity. since the first half of the twentieth century, the post-leibnizian standard of modern axiomatic proofs has come to define how mathematicians adjudicate the validity of a proof in principle.how might computers transform current standards of proof? to address this question, it is helpful to distinguish between some forms of automation in mathematical research, involving different uses of computing technologies for different purposes. computer-assisted proofs by exhaustion have triggered quite a bit of unease and resistance, because their acceptance demanded a new standard of proof that moved another step away from the cartesian ideal of total understanding. practical standard of computer-assisted proofs by exhaustion: after some study, one understands the proof's general strategy as well as the computer program that checked all the cases.practical standard of computer-encoded proofs: every step can be checked by a computer program and derived from the axiomatic foundations of the program; and after some study, one understands or trusts the encoding of the proven statement.although computer-encoded proofs remain susceptible to human error and introduce new epistemic fears (such as software bugs), i have not yet encountered public objections to their assumed sufficiency as a standard of proof. should mathematical journals require computer programs along with, or even instead of, the submitted papers? should all theorems be demoted to conjectures until computer-encoded proofs are available? although proposals of this kind have already appeared for several decades, they have garnered more attention in recent years.potential practical standard of deeply automated proofs: one trusts the computer programs that constructed and checked the proof, even if one may not understand it at all. although military uses of computer-assisted proof have changed significantly in the fifty years since then, the us air force office of scientific research (afosr) still sponsors research in this area under its "information assurance and cybersecurity" program, mentioning "interactive and automated theorem proving" as well as "formalized mathematics" among research areas that "are expected to provide valuable insights into various cybersecurity problems". what might happen to the economy of credit if computer-encoded proofs become required in the future? one possibility is that the credit system will follow suit: that the first people to provide a computerencoded proof would receive the central credit for a theorem. another possibility is that the economy of credit and the standards of proof will become misaligned: the central credit would still go to the author of the first print publication containing a demonstration, but the result would only become accepted after someone (not necessarily the same person) provides a computer-encoded proof. what is distinct about automation, particularly in the search for new proofs and theorems, is the proliferation of roles-such as programming computers, optimizing algorithms, tweaking parameters, and cleaning data-which not only transcend the traditional expertise of mathematicians but also contribute to the very process of mathematical reasoning, even in pure mathematics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/987.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/987.txt new file mode 100644 index 0000000000000000000000000000000000000000..70ee9e967032ee6d701e02e6f49fb078e2e1dca9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/987.txt @@ -0,0 +1 @@ +the covid-19 pandemic, caused by the novel coronavirus sars-cov-2, has not only posed a significant threat to global public health but has also brought to light various indirect consequences affecting individuals' mental wellbeing . as healthcare systems around the world grapple with the immediate challenges of treating covid-19 patients, it has become increasingly evident that there is a pressing need to understand and address the potential long-term mental health repercussions of this global crisis. numerous studies have reported a spectrum of mental health issues emerging in the wake of covid-19 recovery, including anxiety, depression, post-traumatic stress disorder (ptsd), and other neuropsychiatric disorders .these conditions, often collectively referred to as post-covid-19 mental health disorders, can be debilitating and require comprehensive evaluation, risk assessment, and timely intervention. to effectively mitigate these mental health challenges, it is imperative to identify the risk factors contributing to their development. machine learning, with its capacity to analyze vast datasets and extract intricate patterns, presents an invaluable tool for this purpose . by leveraging data-driven insights, we can gain a deeper understanding of the variables and circumstances that predispose individuals to post-covid-19 mental health disorders. in this study, we utilize a machine learning perspective to identify key risk factors associated with the onset of mental health disorders in individuals recovering from covid-19. our dataset comprises medical information from 669 patients collected across various healthcare facilities in iraq. by applying advanced analytical techniques, we aim to pinpoint the predictors that significantly influence the likelihood of developing mental health complications following covid-19 infection. this research contributes to the growing body of knowledge on post-covid-19 mental health and provides a foundation for the development of targeted interventions and support strategies for at-risk individuals . through a comprehensive understanding of these risk factors, healthcare professionals and policymakers can work towards implementing proactive measures to safeguard the mental well-being of covid-19 survivors. in this study, we utilize a machine learning perspective to identify key risk factors associated with the onset of mental health disorders in individuals recovering from covid-19.this methodology allowed for a comprehensive analysis of risk factors associated with post-covid-19 mental health disorders, combining traditional statistical methods and machine learning techniques for a more accurate and predictive assessment. showing that patients with mental health disorders were slightly older on average and had a higher percentage of pre-existing mental health conditions. these symptoms encompassed anxiety, depression, and posttraumatic stress disorder, among others(16-18). moreover, (18,19) conducted a comprehensive analysis of covid-19 patients and found a clear relationship between the severity of respiratory symptoms and the prevalence of subsequent mental health issues. other studies (20,21) investigated the complex interplay between covid-19 severity, mental health, and substance use disorders. an intriguing gender difference emerged from our data, with females manifesting a higher prevalence of mental health disorders compared to males. their insights corroborate the importance of addressing gender disparities in mental health outcomes, which our study underscores.the study conducted a comprehensive study that examined the mental health impact of the covid-19 pandemic, including individuals with pre-existing mental health conditions. the study explored the long-term effects of pre-existing mental health conditions on the mental well-being of covid-19 survivors. while not directly related to pre-existing mental health conditions, their work underscores the broader relevance of data analytics in healthcare, including mental health, and aligns with our study's emphasis on tailored care approaches. the study presented a comprehensive analysis of post-covid-19 mental health disorders, and several key findings emerged from this research, with implications that extend beyond the scope of the study. these findings underscore the substantial psychological toll associated with severe covid-19 illness experiences and emphasize the importance of recognizing and addressing the heightened mental health needs of such patients. furthermore, the study unveiled a compelling link between pre-existing mental health conditions and the likelihood of post-covid-19 mental health disorders. these findings emphasize the importance of continued mental health support for individuals with a history of mental health issues and the need for a holistic approach to healthcare. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/988.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/988.txt new file mode 100644 index 0000000000000000000000000000000000000000..1febff686c413067f8b0f3b43491e14b9000dc12 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/988.txt @@ -0,0 +1 @@ +traditional industries of physical infrastructures, such as telecommunications, satellite radio, and cartography, have operated under centralized control with single entities or associations of a few centralized entities of predominant authority. with the advancement of the internet and communication technologies, many infrastructure ecosystems instigated a decentralized trend against traditional centralization. however, in recent times, these formerly open and inclusive digitized platforms and services have increasingly shifted the focus towards prioritizing their private interests. such is the case of the cartography industry, dominated by a single service provider, google maps which has gradually re-centralized the industry .with blockchain technology, however, it has become possible to decentralize the physical networks of infrastructure.m. ballandies is supported by the european union's horizon 2020 research and innovation programme under grant agreement no 833168.through the implementation of automatized operation, immutable common data storage and redesigned value-sensitive governance, the potential arises for distributing control, ownership, and decision-making among multiple participants or nodes in a network . decentralized physical infrastructure networks (depins) are an emerging trend within blockchain that describes this growing ecosystem of web3 projects that utilize token incentives, smart contracts, decentralization, and participatory governance mechanisms of blockchains to deploy networks of real devices such as sensors, storage, or wireless networks globally.decentralization of physical networks has been recognized to result in several benefits including increased resilience , due to distributing the system away from a single point of failure and system redundancy; trust , due to data transparency and tamper-proof guarantees; and accessibility due to permissionless requirements of joining the network. additionally, the operation of infrastructure can be improved due to the re-structure of incentive mechanisms (e.g. via blockchain-based tokens), flexible decentralized marketplaces, and inclusive participation .at the time of writing, more than 50 blockchain systems can be counted in this ecosystem with varying designs, layouts and application domains . nevertheless, a classification of these systems based on a rigorously from theory derived taxonomy is still missing. such a taxonomy would facilitate the differentiation and comparison among systems and enable researchers to identify unexplored system layouts and practitioners to learn from others, thus facilitating innovation. in particular, the lack of a classfication of depin systems on a rigorous taxonomy can result in a fragmentation of the community and thus duplication of efforts .this work contributes a first stepping stone to conduct such a comprehensive classification with the following contributions: 2) a taxonomy for depin derived from this conceptual architecture ii. literature review decentralized physical infrastructure networks (depins) are cryptoeconomic systems. these are systems that consist of i) individual autonomous actors, ii) economic policies embedded in software and iii) emergent properties arising from the interactions of those actors according to the rules defined by that software" . in the case of depin, autonomous actors are incentivized to place or use physical devices on a global scale via mechanisms enabled by distributed ledger technology (dlt) (as utilized in blockchain) from which a physical infrastructure may arise as an emergent property.since such dlt-based cryptoeconomic systems are complex systems that are difficult to control and govern hierarchically from the top down due to their emergent nature, decentralized mechanisms have been proposed and are used in depin: i) for control, dlt-based tokens are used as an incentive mechanism. in particular, such tokens can align human behavior with goals set by a community (e.g., the establishment of a depin). ii) for governance, shared ownership of the network can be achieved via token-based decisionmaking mechanisms on system parameters, compositions and general vision (e.g. via improvement proposals). usually, all token holders can participate in these processes .in particular, hierarchical and centralized governance mechanisms are often challenged in complex systems and decentralized and bottom-up mechanisms are required to govern and control them. for instance, these systems can be efficiently governed via decentralized mechanisms such as collective intelligence, digital democracy and self-organization which hence found not only a natural expression globally in urban participatory budgeting projects 1 , or self-organizing business teams , but also in blockchain-based web3 systems in the form of decentralized autonomous organizations (daos). these daos utilize the collective intelligence of their community (e.g. via open discussion platforms) to identify ideas and then supports them to deliberate (e.g. via improvement proposals), decide (e.g. via token-based voting) and implement (e.g. steered/ controlled via token-based incentives) them effectively . often, these mechanisms are implemented using smart contracts. in general, this dlt-based code can improve the functioning of society by automating and increasing the transparency of the implemented mechanisms .depin represents a subset of the broader blockchain iot domain, characterized by its utilization of physical hardware or resources to deliver tangible or digital services to consumers. this subset opens up new possibilities and applications within the blockchain iot framework, driving the emergence of depin projects that cater to specific use cases in the realm of physical service provisioning.because depin operates at this convergence of blockchain, internet-of-things (iot), and physical service delivery, certain 1 switzerland's city of aarau participatory budgeting: https://www. stadtidee.aarau.ch/abstimmungsphase.html/1937 aspects of it can be described with existing taxonomies in the field of blockchain iot . notably, i) the underlying hardware architecture in depin projects shares similarities with the utilization of hardware in acquiring data or providing fungible or non-fungible goods and services, akin to the perception or sensor layer observed in traditional blockchain iot systems . moreover ii), drawing parallels with off-chain compute in the context of blockchain iot systems , the middleware plays a pivotal role in processing, storing, transporting data, and relaying services acquired by the hardware layer in depin projects.the establishment of a comprehensive taxonomy is of paramount importance in providing clear guidelines for stakeholders aiming to foster the growth of emerging sectors. while the internet of things (iot) taxonomy has been thoroughly explored in the literature , and the advent of blockchain and blockchain iot has gained substantial popularity, resulting in the emergence of general dlt taxonomies (see for an overview) and fewer blockchain iot works , due to its recent history and distinct characteristics focus on instantiating infrastructure networks, there remains a conspicuous absence of well-defined taxonomy for depin that combines previously introduced taxonomies from dlt and iot and extends them to be applicable for infrastructure networks.in order to close this gap, this work introduces a rigorously derived taxonomy for depin that considers the cryptoeconomic design, distributed ledger technology and physical infrastructure components of depins. decentralized physical infrastructure networks (depins) are an emerging trend within blockchainthat describes this growing ecosystem of web3 projects that utilize token incentives, smart contracts, decentralization, and participatory governance mechanisms of blockchains to deploy networks of real devices such as sensors, storage, or wireless networks globally.decentralization of physical networks has been recognized to result in several benefits including increased resilience, due to distributing the system away from a single point of failure and system redundancy; trust, due to data transparency and tamper-proof guarantees; and accessibilitydue to permissionless requirements of joining the network. in the case of depin, autonomous actors are incentivized to place or use physical devices on a global scale via mechanisms enabled by distributed ledger technology (dlt) (as utilized in blockchain) from which a physical infrastructure may arise as an emergent property.since such dlt-based cryptoeconomic systems are complex systemsthat are difficult to control and govern hierarchically from the top down due to their emergent nature, decentralized mechanisms have been proposed and are used in depin: i) for control, dlt-based tokens are used as an incentive mechanism. for instance, these systems can be efficiently governed via decentralized mechanisms such as collective intelligence, digital democracy and self-organizationwhich hence found not only a natural expression globally in urban participatory budgeting projects 1, or self-organizing business teams, but also in blockchain-based web3 systems in the form of decentralized autonomous organizations (daos). this subset opens up new possibilities and applications within the blockchain iot framework, driving the emergence of depin projects that cater to specific use cases in the realm of physical service provisioning. notably, i) the underlying hardware architecture in depin projects shares similarities with the utilization of hardware in acquiring data or providing fungible or non-fungible goods and services, akin to the perception or sensor layer observed in traditional blockchain iot systems. moreover ii), drawing parallels with off-chain compute in the context of blockchain iot systems, the middleware plays a pivotal role in processing, storing, transporting data, and relaying services acquired by the hardware layer in depin projects. while the internet of things (iot) taxonomy has been thoroughly explored in the literature, and the advent of blockchain and blockchain iot has gained substantial popularity, resulting in the emergence of general dlt taxonomies (seefor an overview) and fewer blockchain iot works, due to its recent history and distinct characteristics focus on instantiating infrastructure networks, there remains a conspicuous absence of well-defined taxonomy for depin that combines previously introduced taxonomies from dlt and iot and extends them to be applicable for infrastructure networks.in order to close this gap, this work introduces a rigorously derived taxonomy for depin that considers the cryptoeconomic design, distributed ledger technology and physical infrastructure components of depins.the hardware component, which represents the hardware used in the dlt system in more detail, has been added to the new physical infrastructure network (pin) dimension of the taxonomy (figure1). mostly this open governance participations in depin systems is restricted to improvements proposals which are not legally binding and thus neither impact system parameters nor the governance body/ workings of ip-owing legal entities directly.finally, the term 'depin' has been used widely to refer to decentralized token-incentivized blockchain networks or systems, however, more recently, there has been a push to clearly delineate between physical networks and strictly digital resources such as 'computational resources'. we found that the taxonomy can clearly illustrate the difference between depin and non-depin systems: whenever the incentivized action in a system is the placement or contribution of physical infrastructure elements to the system, such as a camera or storage drives, it can be defined as a depin system.this taxonomy paper has presented a comprehensive and novel framework for classifying depin based on cryptoeconomic design, distributed ledger technologies and physical infrastructure network dimensions that span 8 components ranging from tokens over governace to consensus and hardware. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/989.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/989.txt new file mode 100644 index 0000000000000000000000000000000000000000..f01c0d12e2f4e9192509aad110ddb31866322e76 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/989.txt @@ -0,0 +1 @@ +the increasing demand for computer science (cs) education is straining higher education institutions due to a lack of sufficient instructional staff, often leading to the hiring of undergraduate teaching assistants (tas) . effective tas significantly impact student retention rates by providing tailored assistance , but not all institutions benefit uniformly from their tas. the challenges of allocating ta time, especially near deadlines or in large classes, are exacerbated by the shortage of k-12 computer science teachers and the lack of appropriate training for k-12 educators interested in teaching cs effectively . this further increases ta and peer instruction demand in flipped computer science classrooms . these factors motivate the automation of teaching tasks by leveraging the capabilities of ai models in understanding and generating language and code.in socratic questioning, a teacher assists a learner trying to solve a problem beyond their zone of proximal development . language models (lms) have been used effectively for generating socratic questions for math word problems, wherein they leverage the sequential structure of steps that compose the solution . other applications of lms include automated feedback on student code submissions , or generating programming exercises, unit tests, and code explanations . however, there still remains a substantial gap in leveraging lms effectively for guiding novice programmers through a coding exercise while maximizing their learning outcomes.in this paper, we focus on the task of socratic questioning for debugging , or socratic debugging, defined as a conversation between a knowledgeable programmer and a beginner student who comes for help fixing a buggy solution for a simple computational problem (section 2). to enable the development and evaluation of lm-based instructional agents, we introduce a manually created dataset of dialogues where the main objective is for the student to repair their buggy code themselves by leveraging guidance received from the instructor at every turn (section 3). however, as originally observed by wilson , "no precise formula, or line of questioning" is needed to achieve the goals of socratic questioning. furthermore, depending also on their expectations with respect to the student's abilities, an instructor can often think of multiple ways of guiding the student at any particular turn in the conversation, leading to a very large space of possible dialogues. to facilitate the automatic evaluation and benchmarking of future socratic questioning systems in terms of their precision and recall, the dataset contributors are asked to provide all alternative utterances that they think could help the student, at every turn in the conversation. this is a complex and cognitively demanding data generation effort, requiring contributors with substantial experience in tutoring beginner programmers. we use the current version of the dataset, containing 151 main conversations, to benchmark the socratic debugging abilities of two large language models in the gpt family, namely gpt-3.5 and gpt-4 (section 5), noticing a large discrepancy in performance in favor of the more recent gpt-4. we conclude the paper with related work and limitations.in this paper, we focus on the task of socratic questioning for debugging, or socratic debugging, defined as a conversation between a knowledgeable programmer and a beginner student who comes for help fixing a buggy solution for a simple computational problem (section 2). to enable the development and evaluation of lm-based instructional agents, we introduce a manually created dataset of dialogues where the main objective is for the student to repair their buggy code themselves by leveraging guidance received from the instructor at every turn (section 3). to facilitate the automatic evaluation and benchmarking of future socratic questioning systems in terms of their precision and recall, the dataset contributors are asked to provide all alternative utterances that they think could help the student, at every turn in the conversation.since the focus of this work is on generating socratic guidance and not bug identification or fixing bugs, we assume that the ai agent implementing the instructor also has access to a description of the bug and one or more bug fixes. therefore, at the start of each conversation, we assume the instructor has access to the problem description, a number of test cases, the student's buggy code, the bug description, and one or more bug fixes, as shown below in a sample from our dataset. as there may be multiple ways of guiding the student, the instructor should ideally generate all types of socratic guidance that are different from each other in non-trivial ways and that cover the entire spectrum of potential information given to the student.to facilitate the development of conversational agents that act under the task definition above, we manually created a dataset of dialogues where a student fixes buggy code on his own by leveraging the socratic guidance received from an instructor. given that the aim of this dataset is to benchmark the ability of an artificial instructor agent to generate socratic guidance, it is especially important that the instructor's main and alternative utterances comprehensively explore the entire range of socratic advice at that point in the conversation. since our aim is to create a dataset that can be used to estimate both the recall and precision of a socratic guidance generator, at this stage we decided to require that socratic utterances be literal, leaving the generation of figurative utterances as a direction for future work. to compute ita, we ask one data contributor to write instructor utterances (main and alternative) given the conversation so far as input, for each of the 5 evaluation dialogues. each test example is composed of an input prompt to the language model containing: a steering prompt for socratic questioning adapted from the gpt-4 blog post, the problem description, the buggy code, the bug description, the bug fixes, the unit tests, the dialogue history so far, and an instruction to the language model to generate all possible semantically distinct socratic utterances as the instructor. this is due to gpt-4 generating more socratic utterances focused on addressing various possible reasons or misconceptions typically while generating utterances that have already been asked or answered, or too early where the student is not yet aware of the issue.5 often refers to example cases in problem descriptions when asking for expected output, while gpt-4 does this less frequently. also, gpt-3.following prior work in socratic sub-question generation, we compute the similarity between an lm utterance and a ground truth utterance in the dataset using bleufor n-gram overlap, bert f1 scorefor semantic similarity based on the deberta language model, and rouge-lfor n-gram overlap based on longest common subsequence (lcs) between generated and reference instructor utterances. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/99.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/99.txt new file mode 100644 index 0000000000000000000000000000000000000000..4561428929a9398a1fcee742ca63121dc9357115 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/99.txt @@ -0,0 +1 @@ +cloth manipulation presents many additional challenges with respect to rigid object manipulation. in particular, the complexity of defining and recognizing scene states dealing with clothes makes getting reliable data very difficult, hindering the training of ai systems and task planners.although learning techniques can benefit from simulation, the transfer to reality has only been successful for simple skills , because simulated cloth differs highly from real behaviour. there have been some works learning from real data using either video and sensory-motor data from a robot performing the manipulation in teleoperation or from demonstrated robot actions connecting different images of the scene . however, they show clear limitations when it comes to generalizing to other tasks or when the scene contains cloth with self-occlusions . it is even less common to learn cloth manipulation tasks from human demonstrations. however, learning from humans would be important to obtain a diversity of strategies to accomplish a task, and with different parameters related to safety, fast accomplishment of the objective or number of steps needed to accomplish a task, inducing a measure of task complexity. learning through human demonstration follows a pipeline similar to fig. 1. large amounts of data could be obtained from human demonstrations in the form of video data and motion data of the hands , but learning from this kind of data is challenging due to the difficulty of annotating data and recognizing cloth states from images. the research leading to these results receives funding from the european research council (erc) from the european union horizon 2020 programme under grant agreement no. 741930 (clothilde: cloth manipulation learning from demonstrations) and is also supported by the spanish state research agency through the maría de maeztu seal of excellence to iri (mdm-2016-0656) and the spanish ministry of science and innovation, project humour (tin2017-90086-r).the authors are with institut de robòtica i informàtica industrial, csic-upc, llorens i artigas 4-6, 08028 barcelona, spain. {jborras, galenya, torras}@iri.upc.edu fig. 1. generic pipeline for learning from human demonstration for manipulation tasks. a good task representation learned from the segmentation of the data can be used for decision making. state representations have to be defined to ease state recognition but also to enable action execution. another challenge for cloth manipulation is to find general solutions . most recent attempts to find general approaches consist in end-to-end learning approaches 6] that are still limited to relatively simple tasks with limited selfocclusions, that is, with fabrics laying flat or semi-flat on a table. we believe the key into general solutions is to define an efficient scene state representation (cloud box in fig. 1) to facilitate state recognition but including sufficient parameters for decision-making and action execution. the first contribution of this work is to propose a novel idea to define the scene state in cloth manipulation tasks. the novelty lies in including information on how the cloth is grasped , where it is grasped from, what are the environmental contacts and the possible transitions between them. the second contribution is the cloth manipulation (clom) graph, a graph that can be built using the previous representation to encode all the possible states and transitions of a given manipulation task seen from video demonstrations, enabling to capture the diversity of strategies. we show the feasibility of our approach extracting the graph for two textile manipulation tasks, one of folding a napkin in 3 folds and another to unfold and put a tablecloth, following a recent benchmark . we performed an experiment with 8 subjects that are wearing a gripper and the xsens suit. from the motion data and the labelling of video data we extract a map of strategies, and we generate segmented motion data that contains specific parameters about the arms trajectories, velocities and accelerations of each segment. the labelled video data and segmented motion data will be made public, enabling the training and comparison of state recognition algorithms.the proposed scene representation and the clom graph is also motivated to potentially provide explainability to the decision-making processes, in line with the trustworthy ai from the eu guidelines. as opposed to opaque end-to-end deep learning methods , latent space variables that are difficult to interpret, or learned latent dynamic models from large amounts of random samples that produce plans that are difficult to explain to a human, our clom graph provides a framework that is designed to provide both semantic explanations by construction as well as low-level building blocks to plan a task and execute it. large amounts of data could be obtained from human demonstrations in the form of video data and motion data of the hands, but learning from this kind of data is challenging due to the difficulty of annotating data and recognizing cloth states from images. the second contribution is the cloth manipulation (clom) graph, a graph that can be built using the previous representation to encode all the possible states and transitions of a given manipulation task seen from video demonstrations, enabling to capture the diversity of strategies.finally, regarding the motion semantic label m , we define a set of labels related to the action the subject is performing from that initial state until the following one, like for instance, "place flat on table", "fold on table" or "trace edge". they can also be seen in tableibecause for the data we have collected, all states where the cloth is grasped trigger the same action although they may finish in different states, as can be seen in the graph representations.when it comes to cloth manipulation, human experiments provide us with a lot of useful information regarding the variety of strategies to accomplish a task, that is not observed in robot cloth manipulation demonstrations, as analyzed in.thanks to the proposed representation, and extracting the sequences of state and transitions of the labelled video data, we can generate a graph where each node is a scene state, and the edges represent the transition action.to generate the graph, for each trial we defined an edge for each state change, and we represented it symbolically using the formulation introduced in section iii, where each initial and destination states are the initial and end node of the graph edge, and the motion semantic value is the edge label. we can clearly see the different phases of the task, from the crumpled on the table phase on the left, to the central hanging part of the manipulation, and then the semi-folded states on the table during the first, second and third folds, located to the right. the clom graph of the task of unfolding and putting the tablecloth can also be found in the website, in this case, the simplified one has 12 states and 15 transitions, while the full graph has 17 states and 32 transitions, meaning this task is of much less complex than the previous one. as the two tasks are inverse one of the other, only one transition is common in both graphs, the one of "place flat on table" from the central state (2pp, rl+lc, flat) to the (π e , -,flat) that appears 21 times for the tablecloth task and 8 for the folding task. from our data we found two main families of application: the first one, when grasping a single layer of cloth from a flat or folded cloth, the easier corner is first grasped and lightly lifted to then trace the edge towards the other corner. it is a subgraph of the clom graph of both tasks, selecting all the edges with label "trace edge", and their following states until the mid-term objective of placing the cloth flat on the table is reached. from the folded state, it is the left hand that is holding the corner and the right hand that slides, while for e2 is the right hand that holds and the left hand slides. thus, in subsequent research, we plan to use previous work in our group on cloth part recognition and pose estimationand grasping point detectionto perceive the aforementioned manipulation-oriented scene states, including cloth state, grasping point location and confidence values that can provide explanations about the belief in the current state. additionally, this work will lead to a database of labelled video data synchronized with motion data of different cloth manipulation tasks, which could be of great utility for the whole manipulation community working on highly deformable objects. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/990.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/990.txt new file mode 100644 index 0000000000000000000000000000000000000000..97e363af0f4aa02019d9bb4aa3f668a1c52fbaed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/990.txt @@ -0,0 +1 @@ +the evolution of human-computer interaction (hci) has undergone several transformations over the decades, with tech- § equal contribution. nology continuously striving to make computers more userfriendly and accessible. from the command-line interfaces of the 1960s to the graphical user interfaces (gui) of the 1980s and, more recently, the touch interfaces on mobile devices, each shift has represented a significant leap towards more intuitive, efficient, and seamless user experiences. today, as we find ourselves at the precipice of another paradigm shift, the question is not whether, but how, we continue to shape this ongoing evolution to ensure a future where technology serves us in increasingly human-centric ways.in the current technological landscape, artificial intelligence (ai) stands as a powerhouse of potential, particularly for augmenting and redefining current operating systems and user interfaces. the abilities of large generative models (lgms), such as large language models (llms) and diffusion models (dms), have given us a glimpse into a future where our interactions with technology transcend the traditional boundaries.llms, built upon vast data sets and sophisticated architectures, are capable of completing complex tasks, demonstrating chain-of-thought reasoning akin to human capabilities, and displaying impressive generalization skills. their proficiency in comprehending and generating language makes them ideal base-reasoners, capable of orchestrating diverse system components to create a seamless, intuitive, and responsive user interface.moreover, with advances in generative computer vision models, especially dms, our toolbox for enhancing humancomputer interaction has expanded. these models can generate incredibly realistic outputs, setting the stage for them to serve as the foundation for user interface generation: the ability to generate personalized interfaces on-the-fly, that cater and adapt to individual user preferences, their character, and mood marks a shift toward highly customized and user-centric design, a shift that promises to enrich user experiences significantly.this new paradigm of human-computer interaction presents exciting opportunities, such as enabling communication between systems that otherwise do not integrate the same api. by utilizing natural language, a universal medium, we can bridge the gap between disparate systems, fostering a more unified, coherent, and efficient interaction landscape. however, this shift in paradigm also brings its share of challenges. a prime example is the need to ensure data persistence within these models. one key question when implementing this new approach is how we can keep a consistent and ongoing dialogue over time, especially when the system is working on complicated or multi-stage tasks. this steady interaction is crucial for a smooth user experience and for building trust in the system's ability to assist the user effectively. to make this possible, we may need to step away from the methods we're used to and start thinking about new ways to improve the performance of these generative models. for instance, current methods of data management, such as storing files explicitly in computers or data centers, may provide some benefits, but they may not fully meet the unique needs of generative models, which store their knowledge implicitly, compressed within their parameters.while the capabilities of llms in understanding and generating language are remarkable, they are not without their limitations. these issues primarily originate from the data employed for their pre-training, which is frequently obtained from web crawls. this data can often contain biased, toxic, or harmful content, consequently impairing models' reliability. another limitation is the tendency to hallucinate, i.e., despite not having any explicit misinformation, llms may generate outputs that are not entirely accurate or faithful. this propensity to deviate from the input can occasionally lead to responses that, while contextually plausible, might misrepresent the user's intent or the factual information at hand.moreover, the promise of seamless interaction and communication must balance with considerations of trustability, privacy, security, and ethics. for this reason, developing new protocols for information exchange becomes a necessity in this envisioned future. these protocols must meet and surpass current standards, protecting user data while simultaneously ensuring private and secure interactions. the design of such protocols also must anticipate and be resilient against potential misuse of ai systems, providing robust safeguards to exploitation and unethical practices.these represent just a few of the challenges in harnessing the full potential of llms in revolutionizing human-computer interaction. as we venture into this exciting new territory, it is essential to confront these challenges head-on, ensuring that the solutions we develop are not just technologically advanced, but also reliable, ethical, and user-centric.the road ahead in this new paradigm is both promising and challenging. this paper serves as an exploration into the future of human-computer interaction -a future where our interactions with technology become akin to a natural conversation. we delve deeper into the benefits, challenges, and implications of this envisioned future in the following sections, charting a course for continued research and development in this transformative and exciting direction. in particular, section ii reviews current work in this area, and provides an idea of the current technological landscape; sections iii and iv describe our vision and propose a possible architecture, respectively. section v questions the main challenges that may arise; finally, section vi concludes our discussion. from the command-line interfaces of the 1960s to the graphical user interfaces (gui) of the 1980s and, more recently, the touch interfaces on mobile devices, each shift has represented a significant leap towards more intuitive, efficient, and seamless user experiences.in the current technological landscape, artificial intelligence (ai) stands as a powerhouse of potential, particularly for augmenting and redefining current operating systems and user interfaces. the abilities of large generative models (lgms), such as large language models (llms) and diffusion models (dms), have given us a glimpse into a future where our interactions with technology transcend the traditional boundaries. these models can generate incredibly realistic outputs, setting the stage for them to serve as the foundation for user interface generation: the ability to generate personalized interfaces on-the-fly, that cater and adapt to individual user preferences, their character, and mood marks a shift toward highly customized and user-centric design, a shift that promises to enrich user experiences significantly. for instance, current methods of data management, such as storing files explicitly in computers or data centers, may provide some benefits, but they may not fully meet the unique needs of generative models, which store their knowledge implicitly, compressed within their parameters. don't show me all the options; propose the cheapest one directly" 2) a c to : "my user would like a flight to paris, between the 16th and 17th of july, preferably in the evening, at a cost not exceeding 120 usd. given that these models can be trained to generate a wide range of visual outputs, they could be directed to design interfaces that echo a user's aesthetic preferences or adapt to their current mood. users could convey their needs verbally, and the system could respond in kind, further blurring the lines between human-computer interaction and human-human conversation.for instance, in the previously discussed scenario, the user could verbalize their request for a flight booking, and the client agent (a c ) could acknowledge, confirm, and execute these instructions using spoken language. this seamless integration of speech-to-text and text-to-speech models would provide an interaction experience that is not just intuitive but also highly efficient, especially for users with visual impairments or those who are occupied with other tasks and prefer to interact verbally with their devices. with advancements in ai and ml, the future of hci could encompass an array of sensory interactions, each tailored to individual user needs and preferences, creating an immersive and inclusive technological environment.• graphical processor based on diffusion models: this caters to visual tasks, allowing for the generation and interpretation of personalized user interfaces and graphical content, ensuring a multi-modal interaction platform. challenges developing an operating system that integrates generative ai models like the one sketched in figure1promises to reshape system design dramatically.the evolution of human-computer interaction, enhanced by the capabilities of lgms such as llms and dms, has the potential to reshape system design and the dynamics of communication, interaction, and collaboration between users and machines. through the integration of ai into operating systems, we envision a future where interfaces are not only intuitive but also deeply personalized, adapting to individual needs and preferences, allowing for seamless and coherent interactions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/991.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/991.txt new file mode 100644 index 0000000000000000000000000000000000000000..9e758d3bf3e71ab0dca1053764890c823ebed30f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/991.txt @@ -0,0 +1 @@ +"customer is king" is the ancient mantra reflecting the significance of customers in every business. in the digital age, where the rhythms of modern life are guided by the pulse of technology, the realm of customer service stands as the frontline of engagement between businesses and their clientele. it is the place where queries are answered, problems are resolved, and trust is forged.this research paper brings the future of customer service, where automation, personalization, and responsiveness converge to redefine the customer-company relationship. at the heart of this transformation lies the integration of llms, exemplified by langchain .in the annals of customer service history, faqs and traditional support mechanisms have long held sway. these venerable tools have dutifully served as repositories of information, attempting to address the queries and concerns of customers. however, as we stand at the cusp of a new era in customer service automation, it becomes abundantly clear that the traditional methods once hailed as revolutionary, are gradually becoming obsolete. this paper is an invitation to envision a future where customer service is not a cost center but a wellspring of customer satisfaction and loyalty. we propose an open-source framework that can be scaled to any industry or organization to fulfill the consumer needs for support and query resolution within seconds.for demonstration purposes, we use the information presented by birla vishvakarma mahavidyalaya (bvm) engineering college on their website https://bvmengineering.ac.in/ as the context for our chatbot, from where it can retrieve all the information in real-time and answer to any queries that are raised by the users. here, users can be anyone ranging from prospective students, current students who intend to get information from the notice board, researchers who wish to search for their potential research guide, and so on. the applications are endless. ii. literature survey s. kim (2023) et al addresses the challenge of deploying resource-intensive large neural models, such as transformers, for information retrieval (ir) while maintaining efficiency. experimental results on msmarco benchmarks demonstrate the effectiveness of this approach, achieving successful distillation of both dual-encoder and cross-encoder teacher models into smaller, 1/10th size asymmetric students while retaining 95-97% of the teacher's performance . l. bonifacio et al (2022) highlights the recent transformation in the information retrieval (ir) field, propelled by the emergence of large pretrained transformer models. the ms marco dataset played a pivotal role in this revolution, enabling zero-shot transfer learning across various tasks . this paper proposed a novel open-source approach to building llm chatbots using custom knowledge from the content in the website. it is unique in several ways:1. we propose an open-source framework which is robust with the type of dataset available on the webpage or the web of links.2. this implementation aims to compliment the use of faqs with a more interactive and user-friendly interface.3. we then do a comparative study of various models, their performance on the provided data relative to the expected response from the llm.submitted to the 3rd international conference on "women in science & technology: creating sustainable career" 28 -30 december, 2023 in the digital age, where the rhythms of modern life are guided by the pulse of technology, the realm of customer service stands as the frontline of engagement between businesses and their clientele.this research paper brings the future of customer service, where automation, personalization, and responsiveness converge to redefine the customer-company relationship.in the annals of customer service history, faqs and traditional support mechanisms have long held sway. however, as we stand at the cusp of a new era in customer service automation, it becomes abundantly clear that the traditional methods once hailed as revolutionary, are gradually becoming obsolete. this paper is an invitation to envision a future where customer service is not a cost center but a wellspring of customer satisfaction and loyalty. here, users can be anyone ranging from prospective students, current students who intend to get information from the notice board, researchers who wish to search for their potential research guide, and so on.kim (2023)et al addresses the challenge of deploying resource-intensive large neural models, such as transformers, for information retrieval (ir) while maintaining efficiency. experimental results on msmarco benchmarks demonstrate the effectiveness of this approach, achieving successful distillation of both dual-encoder and cross-encoder teacher models into smaller, 1/10th size asymmetric students while retaining 95-97% of the teacher's performance.bonifacio et al (2022)highlights the recent transformation in the information retrieval (ir) field, propelled by the emergence of large pretrained transformer models. this approach allowed us to collect a wide array of data, including customer service faqs, product manuals, support forums, chat logs, associated institutions, and so on.customer service establishes a direct connection between the customer and the company. the langchain finetuned over custom knowledge of the product, service, or organization can effectively address a wide array of customer inquiries and issues.in the ever-evolving landscape of customer service, the introduction of sahaay's innovative approach presented in this paper, using langchain as a prime example, ushered in a new era of automation. automating customer service using sahaay's open-source large language architecture leveraging langchain revolutionizes the customer-company relationship and cx. it enables companies to provide efficient, personalized, and responsive support, ultimately leading to customer retention, increased customer value, and a more positive brand image. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/992.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/992.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b44fbaf3606d52ac243b4254209d19e0a1e5a20 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/992.txt @@ -0,0 +1 @@ +privacy risk assessments (pras) have steadily gained prominence as an instrument to help and encourage organizations to develop privacy-preserving systems and services. touted as a much needed departure from the rigid command-andcontrol regulatory model, pras are said to embody a more agile and dynamic approach to privacy regulation. this model, variously referred to as metaregulation or collaborative governance, seeks to leverage knowledge and expertise from organizations, as well as push responsibility onto them, encouraging cooperation and trust with the regulator in a light supervisory role at the same time, by encouraging organizations to assess privacy risks before they implement and deploy their services, pras are considered a key tool in implementing privacy-by-design . indeed, pras have recently gained renewed support across both sides of the atlantic, with the eu enshrining risk assessments in the gdpr, and the us's nist proposing risk assessment as the key tool in their privacy engineering program .however, in spite of the institutional endorsement, the actual benefits of adopting pras remains unclear. recent empirical work as well as the constant trickle of privacy breaches, scandals and litigation, suggests that privacy impact assessments (pias) may not be as effective as expected . legal scholars and political scientists have long theorized about the necessary conditions for pias to succeed factors such as transparency, accountability, and a systematic, principled approach to risk assessment, e.g. recital 76 of the gdpr specifies that "isk should be evaluated on the basis of an objective assessment" .this emphasis in objectivity further hints at a technocratic approach to privacy regulation that pervades pia scholarship . early on, pia proponents lamented the lack of standardized guidelines and trained professionals, as well as the vagueness and arbitrariness of existing pia . privacy scholars argued that a systematic and (somewhat) objective approach to risk assessment could however turn the tide and not only help organizations manage privacy risks, but render pias more successful overall . yet this ambition has also failed to materialize. privacy harms remain elusive to model and quantify. 1 we do not seem today any closer to having something even resembling a science of privacy risks or actuarial models for privacy than 30 years ago. pia has remained more of an art than a science, learned by craft and practice rather than formal methods.this paper revisits and re-examines pra as a tool in the regulatory arsenal, providing the following contributions. firstly, we question whether pras can, today, deliver meaningful results in terms of increased privacy protections. we investigate the extent to which pras (1) may be weaponized by adversarial organizations as a tool of performative compliance , and (2) are amenable to the kind of systematic audit processes that collaborative governance requires. we observe that existing guidelines introduce (to borrow green's terminology) countless sites of discretion that organizations can weaponize to escape meaningful privacy intervention and rendering them, as a result, hard to audit .secondly, we examine the role of risk assessments within the field of privacy engineering . privacy engineers have become adept at minimizing privacy risk through privacy enhancing technologies . however, privacy engineering does not support-cannot support-the broader scope of risk assessment that methodologies such as nist's privacy risk assessment methodology (pram) and regulatory efforts such as the gdpr promote. when it comes to assessing the legitimacy of a service or functionality, privacy engineering is mute.thirdly, we resort to nissenbaum's theory of contextual integrity (ci) to show that pras have a disruptive potential insofar as they can weaponize the lack of empirical evidence on privacy harms to upend existing norms and expectations around privacy . we further argue how ci is in fact a pra of sorts, even if devoid of the pseudoscientific claims and technocratic ideals that pra embody, and may offer hints to a better deliberation framework to guide privacy design.furthermore, to support and illustrate the points above, we provide a stepby-step analysis of nist's pram, while nist's pram is just one among many guidelines and methods of pra, nist's normative authority as a standard-setting agency makes it a perfect candidate for evaluation and reflection. moreover, we argue that similar lessons can be drawn from existing alternatives . at the same time, our goal is not to dismiss privacy risk assessments as an instrument; rather, we aim to identify and be more systematic about the challenges and sites of discretion that one may consider for pras to become an effective tool of regulation. privacy engineers have become adept at minimizing privacy risk through privacy enhancing technologies. however, privacy engineering does not support-cannot support-the broader scope of risk assessment that methodologies such as nist's privacy risk assessment methodology (pram) and regulatory efforts such as the gdpr promote. we use the term privacy risk assessment (pra) to refer to the explicit evaluation of privacy risk, understood as the product of two components: the likelihood and impact of those privacy harms. raab aimed to "take the practice and theory of privacy protection beyond the stage of the merely casual use of the term 'risk', namely, "a more nuanced understanding" that would enable us to "estimate different degrees of privacy risk" beyond vague statements such as "cookies pose a threat to privacy". an increasingly rich literature on the behavioral economics of privacy has offered empirical evidence for factors that may explain the failure of notice-and-consent as a regulatory mechanism for privacy: end-users are often unable to estimate privacy risks because of incomplete and asymmetric information, present-bias and the ease with which the selective disclosure of information may skew their perception of privacy risks. and yet, as acquisti et al.6within this program, nist has made available several resources, among which three main guidelines: first, a privacy framework, which describes at high-level the sort of organizational processes and structures that organizations deploy to integrate a privacy risk assessment within their risk management strategy. nistir 8062 addresses both the differences and overlap between information security and privacy, introduces the notion of risk as the product of likelihood and impact of adverse privacy effects and emphasizes that "measurability matters, so that agencies can demonstrate the effectiveness of privacy controls in addressing identified privacy risks". 13 critiques of the taxonomization approach point out that by grounding privacy problems in social recognition rather than a formal definition, the taxonomical approach tends to identify harms as privacy-related insofar as social commentary identifies them as so, arbitrarily excluding harms that may later become accepted as privacy-related, while failing to account for the tensions among harms that fall under the privacy umbrella. first, privacy harms may simply not translate into costs for an organization; organizations may also be able to externalize these costs and thus ignore privacy. in mitigating reputational costs, an organization may invest in pr rather than privacy; in avoiding regulatory fines, an organization may lobby public officials to weaken privacy regulations. because data is a non-rivalrous and non-excludable good, as well as the relational nature of privacy, constraining privacy risk to an organization's system or operations may lead to unreliable assessments.in short, privacy engineering enables us to mitigate privacy threats insofar as the functionality we wish to implement does not lead to privacy risks in itself. 27 that is why trying to pass privacy risk assessments as part of privacy engineering practice is so insidiously perilous: it could help 26 the privacy literature is littered with statements that reinforce this notion.in this paper we have examined privacy risk assessment (pra) as an instrument of collaborative governance, taking nist's privacy risk assessment methodology (pram) as a case study. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/993.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/993.txt new file mode 100644 index 0000000000000000000000000000000000000000..24e4c479b524d92402162b709954f5ac2929f77b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/993.txt @@ -0,0 +1 @@ +the goal of this draft paper is to build on work previously conducted by the author on performing a thematic analysis (ta) of qualitative data with a large language model (llm) (see de paoli, 2023a and 2023b), following the 6 phases to ta proposed by braun and clarke (2006). in the previous work i showed it is possible to perform phases 2-5 of a ta (initial coding, generation of themes, themes validation and summarisation) using an llm, with some degree of validity (de paoli, 2023a). moreover, i also showed that it is possible to perform phase 6 of a ta, which relates to the writing up of the results (de paoli, 2023b). in relation to phase 6 specifically, in my previous work i proposed a workflow to write user personas with an llm, based on the results of a ta of qualitative interviews also conducted with the support of the llm. the previous work presented, however, a number of limitations, and it was in essence an initial attempt to perform phase 6 of a ta, whilst consolidating the phases 2-5. however, i believe there is scope to explore more the workflow for writing user personas and attempt at producing far more articulated and composite personas' narratives in phase 6. this is the goal of this manuscript.the impetus for this work comes from recent developments in the areas of user experience (ux) and llms. an online post which has captured the attention of the user experience (ux) community, entitled "can chatgpt replace ux researchers? an empirical analysis of comment classifications", reported the results of performing a ta of a relatively small set of online user comments comparing the themes produced by the llm and by human analysts, showing overall a good agreement between the codes using the cohen k measure (schiavone et al., 2023). in part this confirms also my previous observations that, to an extent, it is possible to perform some form of ta with an llm which show also some degree of inter-reliability between humans and llms. most interestingly, the post was picked up by jakob nielsen, one of founding researchers in the field of usability, which commented that "this doesn't mean that chatgpt can analyze user *behavior*, only that it is likely a time-saver in grouping non-behavioral questionnaire responses. (chatgpt is known to be great at summarizing and classifying text without understanding what it means.)"1 it is possible that nielsen, when using the work 'behavior' was referring specifically to usability. nonetheless it is an interesting problem to explore whether an llm such as gpt3,5-turbo could indeed capture some aspects of user's behaviours (or not) as part of the performing of a ta, and if we can reflect such behaviours in personas, a key ux tool. it may actually be possible to test if a ta of user interviews done by an llm can pick up aspects related with the user behaviour or the user personality traits, in order to build a user persona. in my previous work, for building personas i concentrated on needs and challenges only (i.e. what nielsen probably call non-behavioural responses), largely for demonstrative purposes (i.e. demonstrating we could build some sort of textual output with llms, using the results of a data analysis). these two dimensions clearly did not encompass any specific behavioural trait or the personality aspects of a persona.the second factor giving impetus to this work is the recent release (july 2023) of a version of the llm gpt3.5-turbo, which can accommodate a much large number of tokens (16 thousand) compared to the version i used in my previous work (which did accommodate 4097 tokens only). while the underlying logic of the larger llm is exactly the same as the smaller one, the first then allows much larger prompts and responses, thus possibly allowing to work with more material as input and to produce much richer and extended textual narratives as output.in the following pages i will first present some basic elements of the state-of-the-art on using llms for qualitative analysis or of coding of textual material, and of user personas and llms. i will then briefly present some key methodological aspects of this work, before turning to reflect on an improved process of building personas from a ta, done with an llm. the data for this research is a set of user interviews with european farmers conducted by the h2020 project eureka (vago and spanoghe, 2023). the interview transcripts are available as open data from zenodo. the paper concludes with a discussion reflecting on the use of llms for building personas and the relation to the broader field of ux research.the goal of this draft paper is to build on work previously conducted by the author on performing a thematic analysis (ta) of qualitative data with a large language model (llm) (see de paoli, 2023a and 2023b), following the 6 phases to ta proposed bybraun and clarke (2006). in relation to phase 6 specifically, in my previous work i proposed a workflow to write user personas with an llm, based on the results of a ta of qualitative interviews also conducted with the support of the llm. an online post which has captured the attention of the user experience (ux) community, entitled "can chatgpt replace ux researchers? an empirical analysis of comment classifications", reported the results of performing a ta of a relatively small set of online user comments comparing the themes produced by the llm and by human analysts, showing overall a good agreement between the codes using the cohen k measure(schiavone et al.in the following pages i will first present some basic elements of the state-of-the-art on using llms for qualitative analysis or of coding of textual material, and of user personas and llms.in this section we will briefly discuss key aspects on the state of the art around doing qualitative analysis with the support of an llm, and literature and other sources related to the use of llms in user research and then focus on some key literature elements of user personas. thus, further confirming previous observations that coding and even ta can be performed satisfactorily with llms, albeit in this case on a small sample of online comments., 2021, for a review;mcginn and kotamraju 2008, for an early paper; jensen at al. however, the personas written with llms that i am proposing are based on qualitative data analysis, rather than on quantitative data and related analysis. perhaps, one could see then the proposition to build personas with an llm as a middle ground approach in-between the entirely qualitative personas, hand crafted by researchers, and the "data-driven/automatic" personas, produced with substantial work delegated to algorithms and relying on big data. indeed, those personas created with llms, without any underlying user data are fictional, but not realistic as they are not based on actual user data. in this work i did not operate phases 4 of thematic analysis, which relates to revising the themes, the "problem solving skills" thus might have been dropped in phase 4, and i believe generally therefore that the fact that very few time may not match with the definition of what the model was supposed to capture, does not generally invalidate the model's capacity to perform at least satisfactorily the analysis of user interview data. as we started the paper reflecting on the observation by a leading ux scholar that llms may not be able to capture aspects of the user behaviour, i would suggest that llms can with proper prompting and good quality data identify at least some aspects of user behaviour (and personality traits) that can serve some of the work of ux researchers.as previously argued (de paoli 2023b), the personas that can be generated using llms and qualitative data should probably be assumed more as canvas than finished products, to be reviewed and potentially enriched by researchers/practitioners, before to be used for the ux process.as discussed in the literature review, llms generated personas, based on qualitative data and analysis could probably be seen as a middle ground between the "more traditional" qualitative personas crafted entirely by researchers from qualitative data (and related analysis) and the datadriven personas, reusing a pool of existing data such as analytics or surveys, and often crafted with the support of algorithms. this middle ground is perhaps evident, since the llms generated personas can rely on the richness of qualitative data, but at the same on the capacity of ai solutions to scale up the process of personas creation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/994.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/994.txt new file mode 100644 index 0000000000000000000000000000000000000000..3e593257720551c83683182bfa40a1f00b0f6926 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/994.txt @@ -0,0 +1 @@ +social media platforms are not only used by billions for communication but has become an alternative platform (compared to tv, print media) to receive news . these platforms have also being misused for spreading misinformation , and its various forms such as fake news , , rumors , , misleading news , etc. with the sheer pace of misinformation generated manual fact-checking can not keep up. so, automated fact-checking must be employed to tackle the menace of misinformation. one of the most prominent ways is through the usage of machine learning (ml) techniques , . the majority of such techniques handle the context of the misinformation for its detection. for example, investigating what kind of users propagate the misinformation or processing the content with natural language processing (nlp) techniques to derive clues from the way the misinformation is written, etc. .it is important to mention that the rise in misinformation has bring forth the focus on fact-checking as it helps in controlling the spread of misinformation - . providing explanations for fact-checking tools leads to trust among the people and offer better usability. examples of the works related to explanations include formulating the task of explanation generation as an extractive-abstractive summarization task using transformer models , . these particular works deal with specific domains such as political news and public health . in addition, to use these approaches, the datasets used in these studies have been annotated with gold-standard justifications by journalists. this approach is not scalable as annotation is a costly task. alternatively, knowledge graphs (kgs) have also been employed to provide a mechanism in which reliable third-party sources such as scientific articles and wikipedia are used for fact-checking - . however, the use of reinforcement learning (rl) in such cases hasn't garnered much attention.in this work, we propose an rl-based kg reasoning approach for explainable fact-checking that employs an rl agent to produce explanations in the form of relevant knowledge from the kg in the form of a path. the paths are arrived at by a technique from the field of kg reasoning known as multi-hop reasoning where an rl agent hops over facts in an attempt to connect two nodes that correspond to real-world entities. the path describes the relation between those two entities and this path is used to classify a statement as true or false and also provides a human-readable explanation of the classification result.to summarize, the main highlights of this work are as follows:1) to the best of our knowledge, this is the first work that has used a multi-hop reasoning agent for explainable fact-checking. we use two kgs (fb15k-277 and nell-995) that are commonly used in multi-hop reasoning tasks to evaluate the performance of the proposed model. the kgs represent real-world relationships, sets of true facts are gathered from these kgs, and corresponding subsets of false facts are generated to simulate false facts.2) the results show that a multi-hop reasoning model can successfully check facts and provide highly humanreadable explanations for the classifications. the agent is also able to provide explanations and a basis for classification which is not directly tied to any specific fact-checking task. the rest of the paper is organized as follows. section ii discusses the related works. next, in section iii we describe the methodology of the proposed approach, followed by the experiments and its results in section iv. we conclude in section v with some future directions. the path describes the relation between those two entities and this path is used to classify a statement as true or false and also provides a human-readable explanation of the classification result. the task of looking for a missing tail entity is more similar to our work of finding a path that leads from a given head entity to the desired (true) tail entity. inversely, the proposed approach1: proposed framework; "s" stands for state which contains the input claim and the path that is built starting from the head entity, "r" stands for the reward which is used to update the agent's policy function at the end of the episode, "a" stands for the action which the agent sends to the environment saves resources by integrating relevant subgraph extraction as a part of the explainable classification process.the path extraction process follows multi-hop reasoning practices in which a policy-based agent sequentially extends an inference path. in contrast to traditional multi-hop reasoning tasks where one part of the claim is hidden from the agent, the proposed framework presents the agent with the entire claim and, in addition, the inference path, as it is being formed, as observations.policy-based agent: the policy-based agent receives a claim as input and, in return, extracts a path from the kg. the agent considers an observation, comprising the input claim and current path, to decide upon an action with which to extend the current path, after which the path is updated, and the agent receives an updated observation. by doing this, the search algorithm takes into account how fitting the resulting entity is as a correct tail entity for the evidential path by scoring it as the tail entity in the input triple. this practically excludes the possibility of a tie between the contending tail entities and adds leverage to the entities which the agent is more confident in and which received a higher score from the kg triple scoring function which is used to evaluate whether a given triple belongs to the kg or not and is used to fit vector embeddings to entities and relations. nonetheless, the results demonstrate that voting on an answer reduces accuracy, as the correct path and target entity may be correctly identified but still be undermined by a higher majority of an entity in the vicinity of the source entity. example 1: kg contains outdated information claim: dick cheney works for retailstore halliburton path: dick cheney -leads organization → retailstore halliburton the agent has found a correlation between a person leading an organization and working for the organization. path: brendan shanahan -plays for team→ the devils -team plays sport→ hockey this is a good example of the agent finding a path that does not just contain a (near) synonym for the claimed relation (like works for and leads organization), but finding a more complicated path. path: amir taheri → amir taheri the agent is unable to move beyond the source entity, resulting in an inconclusive path. with beam search also in place, it is very common that one path will result in the agent staying put for the entirety of the path. for this specific case, the ground truth is that ben folds originates from winston-salem, so the first path leads to the true target and the second path reinforces the "false claim". \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/995.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/995.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a71685800022c8537fbc0de75588f132de58ee6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/995.txt @@ -0,0 +1 @@ +content moderation has been thrust to the center of public discourse about online platforms and their impacts on society. once an esoteric task undertaken by social media companies with little external attention, content moderation is now seen (depending on whom you ask) as a means to fight misinformation, protect vulnerable communities, or manipulate public opinion.in most jurisdictions, governments require platforms to moderate certain kinds of content, like child sexual abuse material and copyright infringement. however, platforms have wide latitude to choose moderation policies beyond this. in the u.s., platforms are not (as of yet) required to protect individuals' freedom of expression: the first amendment only restricts the ability of government to limit speech, not that of companies. in addition, section 230 of the communications decency act protects social media companies from being sued for their decisions to host or remove a wide range of content (aside from what is required by law). in other countries, regulations vary, but platforms in general have significant discretion over their moderation policies. content moderation in some form is a nearly universal feature of social media platforms , and a fundamental force affecting life online.naively, the effects of content moderation on platform speech are simple: content that violates platform policies is removed. however, when participation decisions are factored in, the effects may be more complicated. on the one hand, users who derive most of their enjoyment from banned content may decide not to participate, or they may move to another platform. on the other hand, mainstream users might increase their participation on the platform if they see less content they dislike. thus, moderation policies will not have a uniform impact on individual decisions to participate and can lead to subtle and delicate interactions between user participation decisions. indeed, changing membership in online communities may be key to understanding the societal impacts of social media. for example, waller and anderson found a large influx of right-wing users was responsible for increased polarization on reddit after the 2016 us presidential election. similarly, online communities saw a massive increase in participation before the january 6 insurrection . one goal of this work is to provide a framework to reason about how a platform's choices for its content moderation policy affect these dynamics.platforms seem to acknowledge that choices of moderation policies dramatically affect their ability to attract and retain users and communities. founders of gab, parler, and truth social have marketed their platforms as free speech advocates, with less restrictive moderation policies. similarly, during his acquisition of twitter, elon musk promised to reduce the scope of the platform's moderation policies. in many other cases, highly restrictive rules are used to appeal to users seeking specific kinds of content, including contexts where political partisanship or toxicity are not typical concerns. for example, the subreddit r/aww features a moderation policy banning "sad" content so that it may better serve users seeking "cute and cuddly" pictures and videos.competition between multiple platforms adds further complexity to the dynamics of content moderation. heterogeneity in moderation policies between different platforms may create a market for rules : platforms set rules and users can choose the platform with rules that best reflect their preferences. if a platform restricts speech that a user wants to engage in, then the user can leave and join a competing platform in which that speech is allowed. likewise, a user who encounters speech they find offensive can leave for a platform on which such speech is prohibited. this competition has led to striking changes to online communities, like the high-profile formation of gab, parler and truth social. indeed, empirical evidence suggests that users deplatformed from twitter migrated to gab and saw increased activity and toxicity , although the overall size of communities deplatformed from reddit and youtube seems to have decreased . content moderation policies are thus a key tool for platforms to survive and thrive, users critically shape the effect of these policies through their preferences and behavior, and these dynamics have important societal implications.the present work: modeling content moderation in online communities. we present and analyze a simple, tractable model in which platforms set policies in order to build and maintain communities. the interaction between user participation decisions and content moderation policies is central to our framework and analysis, allowing us to explain fundamental and counter-intuitive phenomena about platform moderation decisions. for example, we can explain the basic observation, supported by empirical evidence , that moderation -even though it may deliberately remove some users -may foster much larger communities as a result: there are cases in which a platform can sustain a large user base under a carefully-chosen moderation policy while, without moderation, almost no users would participate. similarly, our model can explain how the range of content available on a moderated platform may be greater than one without it.in our model, content is associated with points in an ambient metric space; each user produces content from a subset of this space, and they also have a subset of this space corresponding to the set of content that they are willing to consume. for simplicity, we will restrict our attention to the case in which the ambient space is a one-dimensional axis, each user speaks from a single speech point and each user derives positive utility from content within an interval on this axis. users derive utility when they encounter speech that they like and disutility when they encounter speech that they dislike; a user will join a platform if they would derive nonnegative utility from it and will leave otherwise. notice that decisions to join, stay or leave create externalities for other users; when a user joins or leaves, their choice can change the utilities of other users (either positively or negatively), and this can potentially lead to a sequential cascade of arrivals or departures.we next consider the content moderation policies available to a platform. we focus on a natural class of policies that satisfy a pair of properties:1. speech-based: a moderation policy depends only on users' speech, not on what they prefer to consume.2. convex: if speech at points x and z are allowed, then speech at any y ∈ should also be allowed.moderation policies that satisfy these properties can be specified by intervals: the platform deems a (possibly infinite) interval of speech permissible, and all users with speech points outside that interval are removed from the platform. we will call such an interval a moderation window, or window for short. we define and discuss the basic properties of the model in section 2.using this model, we derive a series of results exploring a platform's ability to curate communities through its choice of moderation policy. in section 3, we compare the effectiveness of window-based moderation to a theoretical optimum, the largest number of users such that all users in the set would have nonnegative utility with respect to the other users in the set. we also show that best window-based moderation policy can be approximated in a scalable way using a small sample from the population. in section 4, we introduce an analysis of how much of a population a platform may lose if it does not implement moderation. additionally, we discuss how platforms would choose moderation policies if platforms themselves have preferences over user speech.in section 5, we study the effect of personalization on a platform's choices of content moderation policies. we show that increased personalization may counter-intuitively decrease the size of an online community by shifting the distribution of content in ways that drive mainstream users away, even for the best choice of moderation window for a given level of personalization. thus, our results provide a justification for limited personalization for some contexts even disregarding the challenges of predicting what users want.next, in section 6, we develop a model for competition among platforms, in which each platform selects a content moderation policy, and then users choose which platform they want to participate on. examining new platforms competing with large, stable platforms for users, we show that there can be a wide range of outcomes, depending on the population: large, stable platforms may have either wide latitude in their moderation choices without risks of losing their membership to a competitor or may be highly vulnerable to competitors regardless of their policy. finally, we make several concluding remarks and discuss extensions of our model in section 7. proofs of our results are deferred to appendix e.in accordance with recent empirical evidence on the societal importance of changing membership patterns on social media , we focus on the interactions of content moderation and user participation decisions. our theoretical framework is naturally suited to explaining these empirical phenomena. our work also offers a mathematically tractable way to explore a rich set of interconnected and subtle ideas in political philosophy, sociology and legal scholarship related to norms and political expression in communities. here we list a few of these ideas. the paradox of tolerance posits that a tolerant society must be intolerant of intolerance in order to survive ; our model offers a mathematical interpretation of this idea, where a platform may ban extreme viewpoints that would otherwise drive other users off the platform. unraveling: in sociology, there is a long tradition of studying cascading effects of participation in public activities; if certain members of a group representing a particular viewpoint begin to withdraw from public discourse, then others in this group might withdraw as well because they perceive themselves to be in the minority. such dynamics are sometimes described evocatively as a spiral of silence . in our model this corresponds to cascades of users, where once users with a particular viewpoint start leaving the platform, others of a similar viewpoint may also leave. lastly, the counterspeech doctrine, outlined by the u.s. supreme court, says that the remedy to harmful speech is corrective speech . our model provides a framework to understand the conditions under which counterspeech can be effective and when harmful speech might overpower attempts at counterspeech. we believe that one of the strengths of our model is that it naturally captures the nuances of each of these concepts and provides a mathematical foundation for when and why they may occur. if user i is in s, then u i (s) captures whether they choose to stay; if i is not in s, then u i (s) captures information about whether i will join: if a user has nonnegative utility with respect to the set of users currently on the platform, they will join or stay on the platform; if they have negative utility, they will choose to leave or stay off the platform. in real world platforms, this corresponds to anchoring a large, core set of users on the platform by ensuring they will like everything they see, while potentially banning other users who would like to participate in the platform but might not be compatible with users in the core set.the intuition is roughly as follows: the platform consists of a set of users who would join the platform in any switching order, except that there are a few extra users who derive positive utility from and bring negative utility to all the other users. then, once all users of type 1 were on the platform, all users of type 2 would also join the platform, regardless of any other users were already on the platform.the intuition for why this is the case is that better personalization can change who would be willing to participate in a platform: some users who would not participate in the platform under less personalization might participate in the platform with more personalization, which could drive other users off the platform and de-stabilize what would be a large stable platform under less personalization. this may entail allowing users who are banned from the dominant platform and perhaps enticing other users to follow them to the new platform; alternately, it might involve setting stricter policies to appeal to users dissatisfied with the speech on the dominant platform. through two examples, we show that, depending on the population, a large, previously stable platform can be extremely vulnerable to a new platform and that in other cases, the platform may have wide latitude to choose a moderation policy -even a sub-optimal one in the sense of the largest number of users it could attract -while still preventing the new platform from attracting many users.when the original platform chose a window to capture the most users, it ultimately created space for another platform to attract users excluded from the first platform and pull a large fraction of users from the first platform to the second one. notice that, if platform 1 wants to defend against the possible emergence of platform 2, it should set no moderation window and let users of type 1 leave the platform, even though this is suboptimal in the setting where they are the only platform. no other users would leave the platform as a result of the decision to disallow the extreme users: if the platform has some preferences over user speech (as long as that preference includes users of type 4), it is free to enforce these preferences on the users without the threat of losing users to the competition.2, we show that, even if platform 1 starts with a large compatible community and offers better personalization and if platform 2 starts empty and has worse personalization, there may be population for which platform 2 captures a large fraction of the users on platform 1. this tells us that, even factoring in the dynamics of users switching between platforms, a platform with worse personalization can outcompete a platform with better personalization, even if the platform with better personalization starts off with a largest compatible community on the platform., the membership of the original set of individuals changes? for example, what if user 1 leaves the population, or another user whose speech point falls outside of user 4's interval joins the population? in both of these cases, users 1 through 4 would all leave the platform for switching orders where they move first, which would cause a mass exodus: all of the rest of the users on the platform could similarly leave (except for the last user, who will be alone, have zero utility and stay).notice that under either type of switching, users of type 1 will stay on platform 1, then users of type 2 will leave for platform 2, then user 3 will follow users of type 2 to platform 2 and so on. since they cover the whole window and no users whose speech falls outside the window are allowed on, every user in the largest stable platform will gain positive utility from every other user on the platform at any time in the process. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/996.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/996.txt new file mode 100644 index 0000000000000000000000000000000000000000..940e9d1a0914c08828794a3fffbb72b9e9aed8cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/996.txt @@ -0,0 +1 @@ +amid the increasing prevalence of ai in business and industries, ensuring unbiased outcomes by mitigating inherent biases and enhancing ai's explainability is essential to maintain equity and promote trust among various stakeholders. this paper presents a post-processing approach to ai fairness using causal modeling to: (1) detect algorithmic biases and (2) provide statistical remedies to correct the biases.in the context of ai fairness, we are concerned with biases that are based on protected attributes such as gender, race, religion, etc. . our goal is to ensure that outcomes from ai do not exhibit any bias based on these attributes. to demonstrate ai fairness, the most commonly used approach is to establish statistical parity. this approach attempts to equalize or maintain an acceptable level of disparity in the output from ai between protected and non-protected groups. for example, pymetrics, a company that uses ai to evaluate job applicants, mitigates bias in their ai models by ensuring that the selection rate for any protected group is at least 80% of the selection rate of the non-protected group, in accordance with us' equal employment opportunity commission . however, for certain job categories, some of the protected attributes may correlate with job performance, e.g., jobs that require physical strength. relying solely on statistical parity may lead to "positive discrimination", which may undermine the principle of meritocracy. according to this principle, employers should recruit employees based on merit, regardless of age, race, gender, religion, marital status, family responsibilities or disability .our proposed approach to ai fairness involves creating a causal model of biases; determining the statistical significance of the biases; and providing statistical remedies to correct for the biases. the advantages of our proposed approach are as follows:1. the proposed approach allows protected attributes to correlate with the underlying factor that forms the basis of decisions (e.g., job performance). as a result, it can prevent the unintentional introduction of positive discrimination in ai solutions. 2. the statistical nature of the proposed approach means that the results are easily interpretable and can be used to explain the nature of the biases introduced by an ai application and how the biases are corrected, thereby enhancing explainability and promoting trust among different stakeholders of ai. 3. our approach separates ai development from bias detection and correction, eliminating the need for de-biasing during the training or pre-training phase and significantly saving time.in the remainder of the paper, we will review some related work in ai fairness, describe our research method, present our experiments, evaluate the effectiveness of our proposed method, summarize the paper and identify a number of future research directions. this paper presents a post-processing approach to ai fairness using causal modeling to: (1) detect algorithmic biases and (2) provide statistical remedies to correct the biases.in the context of ai fairness, we are concerned with biases that are based on protected attributes such as gender, race, religion, etc. for example, pymetrics, a company that uses ai to evaluate job applicants, mitigates bias in their ai models by ensuring that the selection rate for any protected group is at least 80% of the selection rate of the non-protected group, in accordance with us' equal employment opportunity commissionanalyzed how causal models can improve prediction accuracy in the presence of confounding factors using experiments based on health data. to determine if there is any bias in 𝑦 ̂, a causal model is created based on the protected attributes, the target 𝑦, and the predicted 𝑦 ̂, as enclosed by the dotted rectangle. next, using the training data, we will develop the prediction model, evaluate the gender bias introduced by the prediction model, and illustrate how causal modelling can be used to mitigate the gender bias.we will use r to create the prediction model, create the causal model, and evaluate the bias mitigation based on the causal model.in this subsection, we will use the training data to develop and evaluate (1) the prediction model and (2) the bias mitigation model based on causal modeling. our results show that when bias exists in the prediction model, causal modeling can be used to detect the bias. even though an ai model may be a black box, the post-hoc analysis provides a way to describe the nature of bias and the remedies taken to address the bias. secondly, although we have only addressed bias introduced by the prediction model in this study, we will apply causal modeling to address biases that exist within the data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/997.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/997.txt new file mode 100644 index 0000000000000000000000000000000000000000..fbce370882acd027a241c962b1beca65055fa648 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/997.txt @@ -0,0 +1 @@ +abusive language has become a perpetual problem in today's online social media. an ever-increasing number of individuals are falling prey to online harassment, abuse and cyberbullying as established in a recent study by pew research (vogels, 2021). in the online setting, such abusive behaviours can lead to traumatization of the victims (vedeler et al., 2019), affecting them psychologically. furthermore, widespread usage of such content may lead to increased bias against the target community, making violence normative (luft, 2019). many gruesome incidents like the mass shooting at pitts-burgh synagogue1 , the charlottesville car attack2 , etc. have all been caused by perpetrators consuming/producing such abusive content.in response to this, social media platforms have implemented moderation policies to reduce the spread of hate/abusive content. one important step in content moderation is filtering of abusive content. a common way to do this is to train language models on human annotated contents for classification. however there are challenges in this approach in the forms of heavy resources required in terms of labour and expertise to annotate these hateful contents. this exercise also exposes the annotators to a wide array of hateful contents that is almost always psychologically very taxing. therefore, recently, many works have tried to understand if large language models (llms) can be used for detecting such abusive language3 (huang et al., 2023;ziems et al., 2023), but none of these study the role of additional context as input (output) to (from) such llms.we, for the first time, introduce several prompt variations and input instructions to probe two of the llms (gpt 3.5 and text-davinci) across three datasets -hatexplain (mathew et al., 2021), implicit hate (elsherief et al., 2021) and toxic-spans (pavlopoulos et al., 2022). note that all these three datasets contain ground truth explanations in the form of either rationales (mathew et al., 2021;pavlopoulos et al., 2022) or implied statements (elsherief et al., 2021) that tells why an annotator took a particular labelling decision. in addition, two of the datasets also contain the information about the target/victim community against whom the hate speech was hurled. in particular, we design prompts that contain (a) only the hate post as input and a query for the output label (vanilla) (b) post as well as the definition of hate speech as input and a query for the output label (c) post (-/+ definition) and the target community information as input and a query for the output label, (d) post (-/+ definition) and the explanation as input and a query for the output label, (e) post (-/+ definition) as input and a query for the output label and the target community, and (f) post (-/+ definition) as input and a query for the output label and the explanation. we record the performance of all these approaches and also identify the most confusing cases. in order to facilitate future research, we further provide a typology of the error cases where the llms fail to classify and usually provide poor explanations for the classification decisions taken. this typology would naturally constitute the 'jailbreak' prompts 4to which such llms are vulnerable thus pointing to the exact directions in which industry scale safeguards need to be built.we make the following observations.• in terms of vanilla prompts (that is case (a)), we find that flan-t5-large performs the best among the three models; we also observe that text-davinci-003 is better than gpt-3.5-turbo-0301 although the latter is a more recent version. • our proposed strategies of prompts individually benefit the llms in most cases. the prompt with target community in the pipeline gives the best performance with ∼ 20 -30% improvements over the vanilla setup. none of these llms are able to benefit themselves if multiple prompt strategies are combined. • while doing a detailed error analysis, we find that the misclassification of non-hate/nontoxic class is the most common error for implicit hate and toxicspans datasets while for the hatexplain dataset the majority of misclassifications are from the normal to the offensive class. there are also a large number of cases where the model confuses between the hate speech and the offensive class. • from the typology induced from the error cases, we find many interesting patterns. these llms make errors due to the presence of sensitive or controversial terms in otherwise non-hateful posts. presence of negation words and words expressing support for a community are misclassified as hateful. ideological posts and posts containing opinions or fact check information about news articles are often misclassified as toxic/hateful. on the other hand, many offensive/hateful posts are marked normal by the model either due to a vocabulary gap or presence of unknown or polysemous words. similarly, these models miss to classify implicitly toxic posts and mark them as non-toxic. we make our codes and resources used for this research publicly available for reproducibility purposes5 ., 2022).• in terms of vanilla prompts (that is case (a)), we find that flan-t5-large performs the best among the three models; we also observe that text-davinci-003 is better than gpt-3. • while doing a detailed error analysis, we find that the misclassification of non-hate/nontoxic class is the most common error for implicit hate and toxicspans datasets while for the hatexplain dataset the majority of misclassifications are from the normal to the offensive class. implicit hate dataset: the implicit hate(elsherief et al., 2021)is a benchmark dataset specifically designed to address bias and explainability in the domain of hate speech.label list hatexplain normal, offensive or hate speech implicit hate explicit_hate, implicit_hate, or not_hate toxicspans toxic or non_toxic table1: the list of labels for each dataset., 2020). in the templates vanilla + tar (input) and vanilla + defn + tar (input) we replace the variable targets (see appendix a, table7) with the ground truth target community information which is only applicable for the hat-explain and the implicit hate speech datasets.11% for flan-t5-large. we observe that the gpt-3. for implicit dataset, the ground truth targets are present only for the implicit hate data points. another confusion that the model often faces is between the hate and the offensive class; if the prompts do not contain the definition (of hate/offensive speech), then offensive speech is largely mislabelled as hate speech while the results are exactly reversed if the prompts contain the definition.for the implicit hate dataset we sort the data points in non-decreasing order based on the bertscore between the ground truth implied statement and the generated explanation at the output. for the hatexplain (toxicspans) dataset, starting from the top of this list, we consider the data points that are either normal (non_toxic) in the ground truth and misclassified as hate speech/offensive (toxic) or vice versa by all the three models. (1) for the implicit hate dataset, one curious case of misclassification (nonhate → implicit hate) is the presence of the 'racist' word, while in another category we find the model marked something as implicit hate because it has pro-white sentiments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/998.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/998.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c1dbf40dba154f2ed59becd06ba66cf651814f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/998.txt @@ -0,0 +1 @@ +over the last decade, technological efforts have been geared towards the optimization of human tech expertise and the automation of human capabilities through simulation; however; huge gaps were identified in the proper simulation of human cognitive capabilities by artificial intelligence (ai) technologies up until the recent breakthroughs in high computational power of computers, deep neural networks and large language models (llm).a nominal milestone in the advancement of ai has been the development of ai-assisted tools (such as chat-gpt, etc.) a larger language model trained with generative pre-trained transformer 3 (gpt-3) developed by openai to optimize open educational resources (oer) . chatgpt is an interactive chatbot, capable of using natural language processing (nlp) to examine user input (usually text), and make an intuitive analysis of the text based on language patterns and relationships to produce contextual results relevant to the input text . while this latest technological buzzword has gained prominence across several fields of scholarship, research, and industries, a whole lot is left to be understood regarding its current impact and future implications on users.despite the successful implementation and positive reviews of chatgpt reported by several scholars ( , , ) the controversial significance and future of the ai assistive technology still poses a hot topic for discussion amongst domain experts and academics within industry and academia. the nuanced views and opinions on the rationale behind the efficiency of chatgpt in processing tasks have posed several ethical and subjective questions, such as: (1) are the results provided by chatgpt biased or fair? (2) how accurate are the results provided by chatgpt and can we trust them? (3) what ethical baselines do chatgpt implement in structuring results or patterns, and how transparent are these baselines? (4) to what extent should chatgpt be adopted? (5) what are the future implications of adopting chatgpt across board? (6)will chatgpt necessitate human-machine collaboration or implement a holistic automated approach to problem-solving?grounded on global trends and the nuanced nature of questions and perceptions surrounding chatgpt, this survey paper seeks to address some of the questions by providing a brief insight into the findings of recent studies with chatgpt across several domains and implications. to do this, we conducted a literature review to explore the development of chatgpt, the implications of human-machine collaboration, and several implementations of chatgpt with human-machine collaborative objectives, and based on our findings we discuss future implications of chatgpt across several domains and fields.in line with the computer-supported cooperative work (cscw) and sig community's interest in exploring the implication of human-collaborative ai tools, this survey paper makes a survey contribution and also broadens the plethora of literary research works on chatgpt. this survey paper also provides a guide and potential research questions that can be explored by subsequent or future research.over the last decade, technological efforts have been geared towards the optimization of human tech expertise and the automation of human capabilities through simulation; however; huge gaps were identified in the proper simulation of human cognitive capabilities by artificial intelligence (ai) technologies up until the recent breakthroughs in high computational power of computers, deep neural networks and large language models (llm). the nuanced views and opinions on the rationale behind the efficiency of chatgpt in processing tasks have posed several ethical and subjective questions, such as:(1)are the results provided by chatgpt biased or fair? (2) how accurate are the results provided by chatgpt and can we trust them? (3) what ethical baselines do chatgpt implement in structuring results or patterns, and how transparent are these baselines?(4)to what extent should chatgpt be adopted?(5)what are the future implications of adopting chatgpt across board? (6)will chatgpt necessitate human-machine collaboration or implement a holistic automated approach to problem-solving?. to do this, we conducted a literature review to explore the development of chatgpt, the implications of human-machine collaboration, and several implementations of chatgpt with human-machine collaborative objectives, and based on our findings we discuss future implications of chatgpt across several domains and fields. however, due to a major limitation of the gpt-3 language model which is its misalignment of user wants when performing natural language tasks, the pre-trained gpt-3 model was fined-tuned on data using supervised learning and the reinforcement learning from human feedback (rlhf) techniques to address the limitations of the gpt-3 model using human preferences as a reward signal to models to improve reliability and safety. education and research: education and research first witnessed the advancement in human-machine collaboration from algorithmic design and development to actual deployment of ai models. a study bydemonstrates how intelligent education systems through the use of adaptive learning techniques, personalized learning approach, computer vision, and facial recognition among several other techniques help in designing personalized learning for students, assessing teaching and learning methods, grading students performance, smart tutoring and hybrid learning. one prominent application is the integration of artificial intelligence (ai) and machine learning (ml) systems, wherein humans collaborate with these technologies to enhance financial decision-making, risk assessments, risk management, fraud detection, and prevention, as well as investments and stock market predictions. healthcare: human-machine collaboration has gained enormous prominence in healthcare to improve patient care, enhance clinical decision-making, and streamline healthcare processes.despite some pushbacks, the implementation of chatgpt across several domains and fields within industry and academia has gained broad adoption in actualizing the role of enhancing human-machine collaboration. it can be observed from the codebook above that chatgpt has gained wide adoption in enabling human-machine collaboration within the field of research as indicated by 63% (n=32) of the articles found, we observed that another area gaining prominence in the adoption of chatgpt for human-machine collaboration is the field of education with 18% (n=9) of articles.the disparity in opinion on the implications of human-machine collaboration to either industry or academic sectors according to, has recently become a baseline to approve or disapprove the adoption of human-machine collaboration in recent times. 31% (n=16) of the articles suggested a moderate effectiveness of chatgpt in supporting human-machine collaboration across board, 30% (n=15) highlighted a low effectiveness of chatgpt in facilitating human-machine collaboration and finally, it was observed that 6% (n=3) of the articles showed no effectiveness of chatgpt in facilitating or impacting human-computer collaboration. based on the findings from our thematic analysis, it can be concluded that current articles suggest the effectiveness of chatgpt in facilitating human-machine collaboration, however, there are still some factors to be considered to ensure near-perfect human-machine collaboration using chatgpt. traditional human-to-human collaboration is being replaced by the integration of ai across diverse domains like healthcare, finance, education, politics, entertainment, and journalism, as ai technologies become ubiquitous. our survey paper also makes an empirical research contribution by conducting reflexive thematic analysis on search results obtained at the intersection of chatgpt and human-machine collaboration. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/999.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/999.txt new file mode 100644 index 0000000000000000000000000000000000000000..215d71d493488cdb5e585adc17de09cced07fc8f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs/999.txt @@ -0,0 +1 @@ +we have designed pulmobell to address the multifaceted needs of copd patients. inside the housing, the weight elements are accompanied by an accelerometer, an oximeter, and an air quality sensor (pm2.5/pm10). when the air quality is good enough for copd patients to exercise, pulmobell can prompt users through a notification and when the air is too polluted they can be warned to avoid it or find a cleaner indoor area. the accelerometer provides data on the number of repetitions carried out by the user and can help them match their assigned routine. the respiratory rate (a key respiration metric ), inferred from the oximeter, along with the oxygen saturation level can be used to adjust the intensity of the exercise regimen throughout the session. the arduino module housed within pulmobell sends the data through bluetooth to the mobile app so that the user can view and interact. this data can also be available to be viewed by the user's personal doctor or therapist. an overview of the system within pulmobell is shown in figure 1. copd is preventable and treatable through a set of interventions such as tobacco cessation courses, pharmacological therapies, long-term oxygen therapies (ltot), and pulmonary rehabilitation (pr) programmes. as a significant component of these managements, pr is a comprehensive and multidisciplinary intervention of care, comprising individuallytailored exercise training, behaviour management, nutrition therapy, etc.. while there is evidencesuggesting that pr has a positive effect on health-related quality of life, it is underused, with approximately 3% -16% of copd patients eligible for referral to the programme, and only 1-2% gaining access. additionally, around half the patients with severe and very severe symptoms show unwillingness to uptake pr programmes based in hospitals or clinics, and approximately 30-50% of those who have attended pr programmes quit before completion.environmental factors such as long-distance travel, non-accessible public transport, parking difficulties, inconvenient timing, seasonal weather patterns, and air pollution were considered major external causes of the non-attendance and non-completion of pr programmes in patients. personal issues describe patients' objective living status and subjective attitudes toward pr, including worsening emotional status, unhealthy lifestyles, living alone, low expectations, negative attitudes, anxiety, and concerns.health-related problems include fluctuations or exacerbations of copd symptoms, such as worsening dyspnea, changing exercise capacities, developing comorbidities or new medical conditions, etc.the literature provides evidence for understanding the treatment effects and accessibility issues of hospital-based pr programmes. however, it does not address factors influencing patients' maintenance of long-term behaviour change and physical activity (pa) following the completion of pr programmes. physical activity following a pr programme is vital in reinforcing a stable state of copd, and a worse prognosis is often associated with physical inactivity. although evidencereveals that pr enhances exercise capacity through exercise training, the treatment outcome does not necessarily translate into increased pa and may return to its previous status prior to pr.suggest that symptomevoked anxiety, limited access to social support, and lack of positive feedback on health status are key obstacles that hinder patients from establishing routines incorporating pa and reinforcing behaviour change in everyday life. thus, current pr may not be capable of maintaining sustainability in its outcomes, in part due to its outpatient format and multidisciplinary naturewithin the national health service (nhs), which can be less flexible, takes many medical resources, and results in a lack of continuous support for the self-management of copd post-programme.to help address personal and environmental barriers faced by copd patients during physical activity, this work proposes a novel interface for assisting pulmonary rehabilitation in a home-based setting, with the aim to empower the users to take up, adhere to, and complete effective exercise. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/0.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/0.txt new file mode 100644 index 0000000000000000000000000000000000000000..d91b6609312d929edeb019101c27d1656e1326b5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/0.txt @@ -0,0 +1 @@ + generating diverse solutions to the boolean satisfiability problem (sat) is a hard computational problem with practical applications for testing and functional verification of software and hardware designs. we explore the way to generate such solutions using denoising diffusion coupled with a graph neural network to implement the denoising function. we find that the obtained accuracy is similar to the currently best purely neural method and the produced sat solutions are highly diverse, even if the system is trained with non-random solutions from a standard solver.neurips 2022 workshop on score-based methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1.txt new file mode 100644 index 0000000000000000000000000000000000000000..44ad7267537cd24c72a7e863891a90cfd488c721 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1.txt @@ -0,0 +1 @@ + auxiliary learning is a machine learning approach in which the model acknowledges the existence of objects that do not come under any of its learned categories.the name "auxiliary learning" was chosen due to the introduction of an auxiliary class. the paper focuses on increasing the generality of existing narrow purpose neural networks and also highlights the need to handle unknown objects. the cat & dog binary classifier is taken as an example throughout the paper. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/10.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/10.txt new file mode 100644 index 0000000000000000000000000000000000000000..3fa469e5bb74630f2643ea8ee2c6a598d32759c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/10.txt @@ -0,0 +1 @@ + cyber intrusion attacks that compromise the users' critical and sensitive data are escalating in volume and intensity, especially with the growing connections between our daily life and the internet. the large volume and high complexity of such intrusion attacks have impeded the effectiveness of most traditional defence techniques. while at the same time, the remarkable performance of the machine learning methods, especially deep learning, in computer vision, had garnered research interests from the cyber security community to further enhance and automate intrusion detections. however, the expensive data labeling and limitation of anomalous data make it challenging to train an intrusion detector in a fully supervised manner. therefore, intrusion detection based on unsupervised anomaly detection is an important feature too. in this paper, we propose a three-stage deep learning anomaly detection based network intrusion attack detection framework. the framework comprises an integration of unsupervised (k-means clustering), semi-supervised (ganomaly) and supervised learning (cnn) algorithms. we then evaluated and showed the performance of our implemented framework on three benchmark datasets: nsl-kdd, cic-ids2018, and ton iot. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/100.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/100.txt new file mode 100644 index 0000000000000000000000000000000000000000..54299659b32a9800e6dba1ae0830de6fa4aec14a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/100.txt @@ -0,0 +1 @@ + recent advancements in deep learning have led to the widespread adoption of artificial intelligence (ai) in applications such as computer vision and natural language processing. as neural networks become deeper and larger, ai modeling demands outstrip the capabilities of conventional chip architectures. memory bandwidth falls behind processing power. energy consumption comes to dominate the total cost of ownership. currently, memory capacity is insufficient to support the most advanced nlp models. in this work, we present a 3d ai chip, called sunrise, with near-memory computing architecture to address these three challenges. this distributed, near-memory computing architecture allows us to tear down the performancelimiting memory wall with an abundance of data bandwidth. we achieve the same level of energy efficiency on 40nm technology as competing chips on 7nm technology. by moving to similar technologies as other ai chips, we project to achieve more than ten times the energy efficiency, seven times the performance of the current state-of-the-art chips, and twenty times of memory capacity as compared with the best chip in each benchmark. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1000.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1000.txt new file mode 100644 index 0000000000000000000000000000000000000000..de19f26fb047b65ba3df1d7505095cfeb6aedbd0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1000.txt @@ -0,0 +1 @@ + sentiment analysis, widely critiqued for capturing merely the overall tone of a corpus, falls short in accurately reflecting the latent structures and political stances within texts. this study introduces topic metrics, dummy variables converted from extracted topics, as both an alternative and complement to sentiment metrics in stance classification. by employing three datasets identified by bestvater and monroe (2023), this study demonstrates bertopic's proficiency in extracting coherent topics and the effectiveness of topic metrics in stance classification. the experiment results show that bertopic improves coherence scores by 17.07% to 54.20% when compared to traditional approaches such as dirichlet allocation (lda) and non-negative matrix factorization (nmf), prevalent in earlier political science research. additionally, our results indicate topic metrics outperform sentiment metrics in stance classification, increasing performance by as much as 18.95%. our findings suggest topic metrics are especially effective for context-rich texts and corpus where stance and sentiment correlations are weak. the combination of sentiment and topic metrics achieve an optimal performance in most of the scenarios and can further address the limitations of relying solely on sentiment as well as the low coherence score of topic metrics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1001.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1001.txt new file mode 100644 index 0000000000000000000000000000000000000000..341ffe399a76de3c0d067ebf9ed7952f8606ad18 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1001.txt @@ -0,0 +1 @@ + aligning ai agents to human intentions and values is a key bottleneck in building safe and deployable ai applications. but whose values should ai agents be aligned with? reinforcement learning with human feedback (rlhf) has emerged as the key framework for ai alignment. rlhf uses feedback from human reinforcers to fine-tune outputs; all widely deployed large language models (llms) use rlhf to align their outputs to human values. it is critical to understand the limitations of rlhf and consider policy challenges arising from these limitations. in this paper, we investigate a specific challenge in building rlhf systems that respect democratic norms. building on impossibility results in social choice theory, we show that, under fairly broad assumptions, there is no unique voting protocol to universally align ai systems using rlhf through democratic processes. further, we show that aligning ai agents with the values of all individuals will always violate certain private ethical preferences of an individual user i.e. universal ai alignment using rlhf is impossible. we discuss policy implications for the governance of ai systems built using rlhf: first the need for mandating transparent voting rules to hold model builders accountable. second, the need for model builders to focus on developing ai agents that are narrowly aligned to specific user groups. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1002.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1002.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6604f7189d4e31b8e426bf26f02f688d2816471 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1002.txt @@ -0,0 +1 @@ + neutrality is difficult to achieve and, in politics, subjective. traditional media typically adopt an editorial line that can be used by their potential readers as an indicator of the media bias. several platforms currently rate news outlets according to their political bias. the editorial line and the ratings help readers in gathering a balanced view of news. but in the advent of instruction-following language models, tasks such as writing a newspaper article can be delegated to computers. without imposing a biased persona, where would an ai-based news outlet lie within the bias ratings? in this work, we use the ratings of authentic news outlets to create a multilingual corpus of news with coarse stance annotations (left and right) along with automatically extracted topic annotations. we show that classifiers trained on this data are able to identify the editorial line of most unseen newspapers in english, german, spanish and catalan. we then apply the classifiers to 101 newspaper-like articles written by chat-gpt and bard in the 4 languages at different time periods. we observe that, similarly to traditional newspapers, chatgpt editorial line evolves with time and, being a data-driven system, the stance of the generated articles differs among languages. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1003.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1003.txt new file mode 100644 index 0000000000000000000000000000000000000000..fad828fe600b97551fed40150c70711b60c3b6ce --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1003.txt @@ -0,0 +1 @@ + as algorithmic decision-making systems become more prevalent in society, ensuring the fairness of these systems is becoming increasingly important. whilst there has been substantial research in building fair algorithmic decision-making systems, the majority of these methods require access to the training data, including personal characteristics, and are not transparent regarding which individuals are classified unfairly. in this paper, we propose a novel model-agnostic argumentation-based method to determine why an individual is classified differently in comparison to similar individuals. our method uses a quantitative argumentation framework to represent attribute-value pairs of an individual and of those similar to them, and uses a well-known semantics to identify the attribute-value pairs in the individual contributing most to their different classification. we evaluate our method on two datasets commonly used in the fairness literature and illustrate its effectiveness in the identification of bias. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1004.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1004.txt new file mode 100644 index 0000000000000000000000000000000000000000..92daa8c961a0d3d0ae7a5e3471570bde14b63ea9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1004.txt @@ -0,0 +1 @@ + redactable signature schemes and sanitizable signature schemes are methods that permit modification of a given digital message and retain a valid signature. this can be applied to decentralized identity systems for delegating identity issuance and redacting sensitive information for privacy-preserving verification of identity. we propose implementing these protocols on a digital credential and compare them against other privacy-enhancing techniques to assess their suitability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1005.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1005.txt new file mode 100644 index 0000000000000000000000000000000000000000..91496e50f18422511575d9f250eb4fa11f7b7ec4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1005.txt @@ -0,0 +1 @@ + natural language processing prides itself to be an empirically-minded, if not outright empiricist field, and yet lately it seems to get itself into essentialist debates on issues of meaning and measurement ("do large language models understand language, and if so, how much?"). this is not by accident: here, as everywhere, the evidence underspecifies the understanding. as a remedy, this paper sketches the outlines of a model of understanding, which can ground questions of the adequacy of current methods of measurement of model quality. the paper makes three claims: a) that different language use situation types have different characteristics, b) that language understanding is a multifaceted phenomenon, bringing together individualistic and social processes, and c) that the choice of understanding indicator marks the limits of benchmarking, and the beginnings of considerations of the ethics of nlp use. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1006.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1006.txt new file mode 100644 index 0000000000000000000000000000000000000000..80e5078a97b11276a9f4bad76c4c29498c3bf331 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/1006.txt @@ -0,0 +1 @@ + as more and more decisions that have a significant ethical dimension are being outsourced to ai systems, it is important to have a definition of moral responsibility that can be applied to ai systems. moral responsibility for an outcome of an agent who performs some action is commonly taken to involve both a causal condition and an epistemic condition: the action should cause the outcome, and the agent should have been aware -in some form or other -of the possible moral consequences of their action. this paper presents a formal definition of both conditions within the framework of causal models. i compare my approach to the existing approaches of braham and van hees (bvh) and of halpern and kleiman-weiner (hk). i then generalize my definition into a degree of responsibility. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/101.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/101.txt new file mode 100644 index 0000000000000000000000000000000000000000..0943d6537e66906b13c48c4ac69d248824604113 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/101.txt @@ -0,0 +1 @@ + systematic generalization refers to a learning algorithm's ability to extrapolate learned behavior to unseen situations that are distinct but semantically similar to its training data. as shown in recent work, state-of-the-art deep learning models fail dramatically even on tasks for which they are designed when the test set is systematically different from the training data. we hypothesize that explicitly modeling the relations between objects in their contexts while learning their representations will help achieve systematic generalization. therefore, we propose a novel method that learns objects' contextualized embedding with dynamic message passing conditioned on the input natural language and is end-to-end trainable with other downstream deep learning modules. to our knowledge, this model is the first one that significantly outperforms the provided baseline and reaches state-of-the-art performance on grounded scan (gscan), a grounded natural language navigation dataset designed to require systematic generalization in its test splits. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/102.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/102.txt new file mode 100644 index 0000000000000000000000000000000000000000..95e6edbbf6ee2706465919fb0858c73042da0ff0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/102.txt @@ -0,0 +1 @@ + existing commonsense reasoning datasets for ai and nlp tasks fail to address an important aspect of human life: cultural differences. we introduce an approach that extends prior work on crowdsourcing commonsense knowledge by incorporating differences in knowledge that are attributable to cultural or national groups. we demonstrate the technique by collecting commonsense knowledge that surrounds six fairly universal rituals-birth, coming-of-age, marriage, funerals, new year, and birthdays-across two national groups: the united states and india. our study expands the different types of relationships identified by existing work in the field of commonsense reasoning for commonplace events, and uses these new types to gather information that distinguish the identity of the groups providing the knowledge. it also moves us a step closer towards building a machine that doesn't assume a rigid framework of universal (and likely westernbiased) commonsense knowledge, but rather has the ability to reason in a contextually and culturally sensitive way. our hope is that cultural knowledge of this sort will lead to more human-like performance in nlp tasks such as question answering (qa) and text understanding and generation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/103.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/103.txt new file mode 100644 index 0000000000000000000000000000000000000000..b55331d69acff7b5fbc06ac9561389aa12e5f909 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/103.txt @@ -0,0 +1 @@ + since data is the fuel that drives machine learning models, and access to labeled data is generally expensive, semi-supervised methods are constantly popular. they enable the acquisition of large datasets without the need for too many expert labels. this work combines self-labeling techniques with active learning in a selective sampling scenario. we propose a new method that builds an ensemble classifier. based on an evaluation of the inconsistency of the decisions of the individual base classifiers for a given observation, a decision is made on whether to request a new label or use the self-labeling. in preliminary studies, we show that naïve application of self-labeling can harm performance by introducing bias towards selected classes and consequently lead to skewed class distribution. hence, we also propose mechanisms to reduce this phenomenon. experimental evaluation shows that the proposed method matches current selective sampling methods or achieves better results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/104.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/104.txt new file mode 100644 index 0000000000000000000000000000000000000000..f497b447155f011fcef452c0d7e26258725f25cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/104.txt @@ -0,0 +1 @@ + this paper presents a summary and metaanalysis of the first three iterations of the annual international verification of neural networks competition (vnn-comp) held in 2020, 2021, and 2022. in the vnn-comp, participants submit software tools that analyze whether given neural networks satisfy specifications describing their input-output behavior. these neural networks and specifications cover a variety of problem classes and tasks, corresponding to safety and robustness properties in image classification, neural control, reinforcement learning, and autonomous systems. we summarize the key processes, rules, and results, present trends observed over the last three years, and provide an outlook into possible future developments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/105.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/105.txt new file mode 100644 index 0000000000000000000000000000000000000000..23cc3eb4dd1db8c4f39d07176d16ef13c5a2443f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/105.txt @@ -0,0 +1 @@ + we propose the first model-free algorithm that achieves low regret performance for decentralized learning in two-player zerosum tabular stochastic games with infinite-horizon average-reward objective. in decentralized learning, the learning agent controls only one player and tries to achieve low regret performances against an arbitrary opponent. this contrasts with centralized learning where the agent tries to approximate the nash equilibrium by controlling both players. in our infinite-horizon undiscounted setting, additional structure assumptions is needed to provide good behaviors of learning processes : here we assume for every strategy of the opponent, the agent has a way to go from any state to any other. this assumption is the analogous to the "communicating" assumption in the mdp setting. we show that our decentralized optimistic nash q-learning (donq-learning) algorithm achieves both sublinear high probability regret of order 3/4 and sublinear expected regret of order 2/3 . moreover, our algorithm enjoys a low computational complexity and low memory space requirement compared to the previous works of and in the same setting. * + ℎ * ( ) = max ( , * ) ( ) + ( , * ) ℎ * ( ) , * + ℎ * ( ) = min ( * , ) ( ) + ( * , ) ℎ * ( ) . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/106.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/106.txt new file mode 100644 index 0000000000000000000000000000000000000000..e31a98b1af58b6e501aff0befdea0569fe9884b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/106.txt @@ -0,0 +1 @@ + abstract-testing deep learning (dl) systems is a complex task as they do not behave like traditional systems would, notably because of their stochastic nature. nonetheless, being able to adapt existing testing techniques such as mutation testing (mt) to dl settings would greatly improve their potential verifiability. while some efforts have been made to extend mt to the supervised learning paradigm, little work has gone into extending it to reinforcement learning (rl) which is also an important component of the dl ecosystem but behaves very differently from sl. this paper builds on the existing approach of mt in order to propose a framework, rlmutation, for mt applied to rl. notably, we use existing taxonomies of faults to build a set of mutation operators relevant to rl and use a simple heuristic to generate test cases for rl. this allows us to compare different mutation killing definitions based on existing approaches, as well as to analyze the behavior of the obtained mutation operators and their potential combinations called higher order mutation(s) (hom). we show that the design choice of the mutation killing definition can affect whether or not a mutation is killed as well as the generated test cases. moreover, we found that even with a relatively small number of test cases and operators we manage to generate hom with interesting properties which can enhance testing capability in rl systems.index terms-reinforcement learning, deep learning, mutation testing, real faults \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/107.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/107.txt new file mode 100644 index 0000000000000000000000000000000000000000..36c477d72163ba1f6954225502199458a12ed7db --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/107.txt @@ -0,0 +1 @@ + to replace data augmentation, this paper proposed a method called slap to intensify experience to speed up machine learning and reduce the sample size. slap is a model-independent protocol/function to produce the same output given different transformation variants. slap improved the convergence speed of convolutional neural network learning by 83% in the experiments with gomoku game states, with only one eighth of the sample size compared with data augmentation. in reinforcement learning for gomoku, using alphago zero/alphazero algorithm with data augmentation as baseline, slap reduced the number of training samples by a factor of 8 and achieved similar winning rate against the same evaluator, but it was not yet evident that it could speed up reinforcement learning. the benefits should at least apply to domains that are invariant to symmetry or certain transformations. as future work, slap may aid more explainable learning and transfer learning for domains that are not invariant to symmetry, as a small step towards artificial general intelligence. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/108.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/108.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a2da104283c786a5f56ba752304b77ec1865ea2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/108.txt @@ -0,0 +1 @@ + over the years, the popularity and usage of wearable internet of things (iot) devices in several healthcare services are increased. among the services that benefit from the usage of such devices is predictive analysis, which can improve early diagnosis in e-health. however, due to the limitations of wearable iot devices, challenges in data privacy, service integrity, and network structure adaptability arose. to address these concerns, we propose a platform using federated learning and private blockchain technology within a fog-iot network. these technologies have privacy-preserving features securing data within the network. we utilized the fog-iot network's distributive structure to create an adaptive network for wearable iot devices. we designed a testbed to examine the proposed platform's ability to preserve the integrity of a classifier. according to experimental results, the introduced implementation can effectively preserve a patient's privacy and a predictive service's integrity. we further investigated the contributions of other technologies to the security and adaptability of the iot network. overall, we proved the feasibility of our platform in addressing significant security and privacy challenges of wearable iot devices in predictive healthcare through analysis, simulation, and experimentation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/109.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/109.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b728d48a8c95f9b520fcfce932d2d0456b73a6a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/109.txt @@ -0,0 +1 @@ + multiplication layers are a key component in various influential neural network modules, including self-attention and hypernetwork layers. in this paper, we investigate the approximation capabilities of deep neural networks with intermediate neurons connected by simple multiplication operations. we consider two classes of target functions: generalized bandlimited functions, which are frequently used to model real-world signals with finite bandwidth, and sobolev-type balls, which are embedded in the sobolev space w r,2 . our results demonstrate that multiplicative neural networks can approximate these functions with significantly fewer layers and neurons compared to standard relu neural networks, with respect to both input dimension and approximation error. these findings suggest that multiplicative gates can outperform standard feed-forward layers and have potential for improving neural network design. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/11.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/11.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca3e0693d45999a318c5932a7b6ea8abf2286a62 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/11.txt @@ -0,0 +1 @@ + learning semantic-rich representations from raw unlabeled time series data is critical for downstream tasks such as classification and forecasting. contrastive learning has recently shown its promising representation learning capability in the absence of expert annotations. however, existing contrastive approaches generally treat each instance independently, which leads to false negative pairs that share the same semantics. to tackle this problem, we propose mhccl, a masked hierarchical cluster-wise contrastive learning model, which exploits semantic information obtained from the hierarchical structure consisting of multiple latent partitions for multivariate time series. motivated by the observation that fine-grained clustering preserves higher purity while coarse-grained one reflects higher-level semantics, we propose a novel downward masking strategy to filter out fake negatives and supplement positives by incorporating the multi-granularity information from the clustering hierarchy. in addition, a novel upward masking strategy is designed in mhccl to remove outliers of clusters at each partition to refine prototypes, which helps speed up the hierarchical clustering process and improves the clustering quality. we conduct experimental evaluations on seven widely-used multivariate time series datasets. the results demonstrate the superiority of mhccl over the state-of-the-art approaches for unsupervised time series representation learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/110.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/110.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b85ae1612fc6ebf902ed73030a2f5e6af992c63 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/110.txt @@ -0,0 +1 @@ + discussions about reducing the bias present in algorithms have been on the rise since the mid 2010s. ai ethicists, dei practitioners, sociologists, data scientists and social justice advocates have decried the lack of understanding of the harms that algorithms pose to people who belong to historically marginalized groups. these cries have become increasingly accepted in industry since 2020, but little is understood of how algorithm and machine learning (ml) model builders should go about mitigating bias in models that are intended for deployment.this chapter is written with the data scientist or mlops professional in mind but can be used as a resource for policy makers, reformists, ai ethicists, sociologists, and others interested in finding methods that help reduce bias in algorithms. i will take a deployment centered approach with the assumption that the professionals reading this work have already read the amazing work on the implications of algorithms on historically marginalized groups by gebru, buolamwini, benjamin and shane to name a few. if you have not read those works, i refer you to the "important reading for ethical model building " list at the end of this paper as it will help give you a framework on how to think about machine learning models more holistically taking into account their effect on marginalized people. in the introduction to this chapter, i root the significance of their work in real world examples of what happens when models are deployed without transparent data collected for the training process and are deployed without the practitioners paying special attention to what happens to models that adapt to exploit gaps between their training environment and the real world. the rest of this chapter builds on the work of the aforementioned researchers and discusses the reality of models performing post production and details ways ml practitioners can identify bias using tools during the mlops lifecycle to mitigate bias that may be introduced to models in the real world.7 noble, algorithms of oppression: how search engines reinforce racism. 8 ng, "mlops." \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/111.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/111.txt new file mode 100644 index 0000000000000000000000000000000000000000..e8e00e85a41ffbc9b886115d46003b16eea99ddb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/111.txt @@ -0,0 +1 @@ + estimating the empirical distribution of a scalar-valued data set is a basic and fundamental task. in this paper, we tackle the problem of estimating an empirical distribution in a setting with two challenging features. first, the algorithm does not directly observe the data; instead, it only asks a limited number of threshold queries about each sample. second, the data are not assumed to be independent and identically distributed; instead, we allow for an arbitrary process generating the samples, including an adaptive adversary. these considerations are relevant, for example, when modeling a seller experimenting with posted prices to estimate the distribution of consumers' willingness to pay for a product: offering a price and observing a consumer's purchase decision is equivalent to asking a single threshold query about their value, and the distribution of consumers' values may be non-stationary over time, as early adopters may differ markedly from late adopters.our main result quantifies, to within a constant factor, the sample complexity of estimating the empirical cdf of a sequence of elements of , up to ε additive error, using one threshold query per sample. the complexity depends only logarithmically on n, and our result can be interpreted as extending the existing logarithmic-complexity results for noisy binary search to the more challenging setting where noise is non-stochastic. along the way to designing our algorithm, we consider a more general model in which the algorithm is allowed to make a limited number of simultaneous threshold queries on each sample. we solve this problem using blackwell's approachability theorem and the exponential weights method. as a side result of independent interest, we characterize the minimum number of simultaneous threshold queries required by deterministic cdf estimation algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/112.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/112.txt new file mode 100644 index 0000000000000000000000000000000000000000..b56c37216ed89307dee6e8788caf3d6b0ae20ae5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/112.txt @@ -0,0 +1 @@ + with the increasing amount of available data and advances in computing capabilities, deep neural networks (dnns) have been successfully employed to solve challenging tasks in various areas, including healthcare, climate, and finance. nevertheless, state-of-the-art dnns are susceptible to quasi-imperceptible perturbed versions of the original images -adversarial examples. these perturbations of the network input can lead to disastrous implications in critical areas where wrong decisions can directly affect human lives. adversarial training is the most efficient solution to defend the network against these malicious attacks. however, adversarial trained networks generally come with lower clean accuracy and higher computational complexity. this work proposes a data selection (ds) strategy to be applied in the mini-batch training. based on the cross-entropy loss, the most relevant samples in the batch are selected to update the model parameters in the backpropagation. the simulation results show that a good compromise can be obtained regarding robustness and standard accuracy, whereas the computational complexity of the backpropagation pass is reduced. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/113.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/113.txt new file mode 100644 index 0000000000000000000000000000000000000000..67bb168e87890d23a38eb214e206e2bb699c23b9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/113.txt @@ -0,0 +1 @@ + machine learning methods for conditional data generation usually build a mapping from source conditional data x to target data y . the target y (e.g., text, speech, music, image, video) is usually high-dimensional and complex, and contains information that does not exist in source data, which hinders effective and efficient learning on the source-target mapping. in this paper, we present a learning paradigm called regeneration learning for data generation, which first generates y (an abstraction/representation of y ) from x and then generates y from y . during training, y is obtained from y through either handcrafted rules or selfsupervised learning and is used to learn x → y and y → y . regeneration learning extends the concept of representation learning to data generation tasks, and can be regarded as a counterpart of traditional representation learning, since 1) regeneration learning handles the abstraction (y ) of the target data y for data generation while traditional representation learning handles the abstraction (x ) of source data x for data understanding; 2) both the processes of y → y in regeneration learning and x → x in representation learning can be learned in a self-supervised way (e.g., pre-training); 3) both the mappings from x to y in regeneration learning and from x to y in representation learning are simpler than the direct mapping from x to y . we show that regeneration learning can be a widely-used paradigm for data generation (e.g., text generation, speech recognition, speech synthesis, music composition, image generation, and video generation) and can provide valuable insights into developing data generation methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/114.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/114.txt new file mode 100644 index 0000000000000000000000000000000000000000..7a9c90dff0bb7a14b76d8dcd437c3b7b9a3432f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/114.txt @@ -0,0 +1 @@ + marginal-based methods achieve promising performance in the synthetic data competition hosted by the national institute of standards and technology (nist). to deal with high-dimensional data, the distribution of synthetic data is represented by a probabilistic graphical model (e.g., a bayesian network), while the raw data distribution is approximated by a collection of low-dimensional marginals. differential privacy (dp) is guaranteed by introducing random noise to each lowdimensional marginal distribution. despite its promising performance in practice, the statistical properties of marginal-based methods are rarely studied in the literature. in this paper, we study dp data synthesis algorithms based on bayesian networks (bn) from a statistical perspective. we establish a rigorous accuracy guarantee for bn-based algorithms, where the errors are measured by the total variation (tv) distance or the l 2 distance. related to downstream machine learning tasks, an upper bound for the utility error of the dp synthetic data is also derived. to complete the picture, we establish a lower bound for tv accuracy that holds for every ǫ-dp synthetic data generator. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/115.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/115.txt new file mode 100644 index 0000000000000000000000000000000000000000..407d49fc0f770ba9e5279dc0000d727ddce72e0a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/115.txt @@ -0,0 +1 @@ + one surprising trait of neural networks is the extent to which their connections can be pruned with little to no effect on accuracy. but when we cross a critical level of parameter sparsity, pruning any further leads to a sudden drop in accuracy. this drop plausibly reflects a loss in model complexity, which we aim to avoid. in this work, we explore how sparsity also affects the geometry of the linear regions defined by a neural network, and consequently reduces the expected maximum number of linear regions based on the architecture. we observe that pruning affects accuracy similarly to how sparsity affects the number of linear regions and our proposed bound for the maximum number. conversely, we find out that selecting the sparsity across layers to maximize our bound very often improves accuracy in comparison to pruning as much with the same sparsity in all layers, thereby providing us guidance on where to prune. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/116.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/116.txt new file mode 100644 index 0000000000000000000000000000000000000000..f8f1809412e57a4dc2b76cabddb808c835693811 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/116.txt @@ -0,0 +1 @@ + in many ways, graphs are the main modality of data we receive from nature. this is due to the fact that most of the patterns we see, both in natural and artificial systems, are elegantly representable using the language of graph structures. prominent examples include molecules (represented as graphs of atoms and bonds), social networks and transportation networks. this potential has already been seen by key scientific and industrial groups, with alreadyimpacted application areas including traffic forecasting, drug discovery, social network analysis and recommender systems. further, some of the most successful domains of application for machine learning in previous years-images, text and speech processing-can be seen as special cases of graph representation learning, and consequently there has been significant exchange of information between these areas. the main aim of this short survey is to enable the reader to assimilate the key concepts in the area, and position graph representation learning in a proper context with related fields. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/117.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/117.txt new file mode 100644 index 0000000000000000000000000000000000000000..25dfc1e5bb9eb15dbe152ebcedeb57644a1e5866 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/117.txt @@ -0,0 +1 @@ + the rapid and dynamic pace of artificial intelligence (ai) and machine learning (ml) is revolutionizing the insurance sector. ai offers significant, very much welcome advantages to insurance companies, and is fundamental to their customer-centricity strategy. it also poses challenges, in the project and implementation phase. among those, we study adversarial attacks, which consist of the creation of modified input data to deceive an ai system and produce false outputs. we provide examples of attacks on insurance ai applications, categorize them, and argue on defence methods and precautionary systems, considering that they can involve few-shot and zeroshot multilabelling. a related topic, with growing interest, is the validation and verification of systems incorporating ai and ml components. these topics are discussed in various sections of this paper. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/118.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/118.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e8c9bd802142201a82fb790ed200f10441d534e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/118.txt @@ -0,0 +1 @@ + this paper aims to enhance the computational efficiency of safety verification of neural network control systems by developing a guaranteed neural network model reduction method. first, a concept of model reduction precision is proposed to describe the guaranteed distance between the outputs of a neural network and its reduced-size version. a reachability-based algorithm is proposed to accurately compute the model reduction precision. then, by substituting a reducedsize neural network controller into the closed-loop system, an algorithm to compute the reachable set of the original system is developed, which is able to support much more computationally efficient safety verification processes. finally, the developed methods are applied to a case study of the adaptive cruise control system with a neural network controller, which is shown to significantly reduce the computational time of safety verification and thus validate the effectiveness of the method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/119.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/119.txt new file mode 100644 index 0000000000000000000000000000000000000000..80ce42e58c9bb5c0ebea5adfb23f4bbbd500cee6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/119.txt @@ -0,0 +1 @@ + semi-supervised learning approaches train on small sets of labeled data along with large sets of unlabeled data. selftraining is a semi-supervised teacher-student approach that often suffers from the problem of "confirmation bias" that occurs when the student model repeatedly overfits to incorrect pseudo-labels given by the teacher model for the unlabeled data. this bias impedes improvements in pseudolabel accuracy across self-training iterations, leading to unwanted saturation in model performance after just a few iterations. in this work, we describe multiple enhancements to improve the self-training pipeline to mitigate the effect of confirmation bias. we evaluate our enhancements over multiple datasets showing performance gains over existing self-training design choices. finally, we also study the extendability of our enhanced approach to open set unlabeled data (containing classes not seen in labeled data). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/12.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/12.txt new file mode 100644 index 0000000000000000000000000000000000000000..50c7dc148b6ee57994c9c44ec4e79aa222a4788e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/12.txt @@ -0,0 +1 @@ + the proliferation of unmanned aircraft systems (uas) has caused airspace regulation authorities to examine the interoperability of these aircraft with collision avoidance systems initially designed for large transport category aircraft. limitations in the currently mandated tcas led the federal aviation administration to commission the development of a new solution, the airborne collision avoidance system x (acas x), designed to enable a collision avoidance capability for multiple aircraft platforms, including uas. while prior research explored using deep reinforcement learning algorithms (drl) for collision avoidance, drl did not perform as well as existing solutions. this work explores the benefits of using a drl collision avoidance system whose parameters are tuned using a surrogate optimizer. we show the use of a surrogate optimizer leads to drl approach that can increase safety and operational viability and support future capability development for uas collision avoidance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/120.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/120.txt new file mode 100644 index 0000000000000000000000000000000000000000..a34b148661654ffbd538e5a6513aec899c1d3e75 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/120.txt @@ -0,0 +1 @@ + actionable knowledge discovery (akd) is a crucial aspect of data mining that is gaining popularity and being applied in a wide range of domains. this is because akd can extract valuable insights and information, also known as knowledge, from large datasets. the goal of this paper is to examine different research studies that focus on various domains and have different objectives. the paper will review and discuss the methods used in these studies in detail. akd is a process of identifying and extracting actionable insights from data, which can be used to make informed decisions and improve business outcomes. it is a powerful tool for uncovering patterns and trends in data that can be used for various applications such as customer relationship management, marketing, and fraud detection. the research studies reviewed in this paper will explore different techniques and approaches for akd in different domains, such as healthcare, finance, and telecommunications. the paper will provide a thorough analysis of the current state of akd in the field and will review the main methods used by various research studies. additionally, the paper will evaluate the advantages and disadvantages of each method and will discuss any novel or new solutions presented in the field. overall, this paper aims to provide a comprehensive overview of the methods and techniques used in akd and the impact they have on different domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/121.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/121.txt new file mode 100644 index 0000000000000000000000000000000000000000..c890fdc8109a7af17f9b421ede4adcc6b81c955c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/121.txt @@ -0,0 +1 @@ + the semiconductor industry is one of the most technology-evolving and capital-intensive market sectors. effective inspection and metrology are necessary to improve product yield, increase product quality and reduce costs. in recent years, many semiconductor manufacturing equipments are equipped with sensors to facilitate real-time monitoring of the production process. these production-state and equipment-state sensor data provide an opportunity to practice machine-learning technologies in various domains, such as anomaly/fault detection, maintenance scheduling, quality prediction, etc. in this work, we focus on the task of soft sensing regression, which uses sensor data to predict impending inspection measurements that used to be measured in wafer inspection and metrology systems. we proposed an lstm-based regressor and designed two loss functions for model training. although engineers may look at our prediction errors in a subjective manner, a new piece-wise evaluation metric was proposed for assessing model accuracy in a mathematical way. the experimental results demonstrated that the proposed model can achieve accurate and early prediction of various types of inspections in complicated manufacturing processes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/122.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/122.txt new file mode 100644 index 0000000000000000000000000000000000000000..25ba15cc041c0c509f2d363fff140a429c08f1f8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/122.txt @@ -0,0 +1 @@ + control of the surface texture of steel strip during the galvanizing and temper rolling processes is essential to satisfy customer requirements and is conventionally measured post-production using a stylus. in-production laser reflection measurement is less consistent than physical measurement but enables real time adjustment of processing parameters to optimize product surface characteristics. we propose the use of machine learning to improve accuracy of the transformation from inline laser reflection measurements to a prediction of surface properties. in addition to accuracy, model evaluation speed is important for fast feedback control. the rocket model is one of the fastest state of the art models, however it can be sped up by utilizing a gpu. our contribution is to implement the model in pytorch for fast gpu kernel transforms and provide a soft version of the proportion of positive values (ppv) nonlinear pooling function, allowing gradient flow. we perform timing and performance experiments comparing the implementations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/123.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/123.txt new file mode 100644 index 0000000000000000000000000000000000000000..7641611b5163a0faba12d850ea85b962d0aaaf43 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/123.txt @@ -0,0 +1 @@ + a trained neural network model contains information on the training data. given such a model, malicious parties can leverage the "knowledge" in this model and design ways to print out any usable information. therefore, it is valuable to explore the ways to conduct a such attack and demonstrate its severity. in this work, we proposed ways to generate a data point of the target class without prior knowledge of the exact target distribution by using a pre-trained diffusion model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/124.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/124.txt new file mode 100644 index 0000000000000000000000000000000000000000..4bab201725293136a73bca588966a00ff31ebc7c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/124.txt @@ -0,0 +1 @@ + the ubiquitous availability of mobile devices capable of location tracking led to a significant rise in the collection of gps data. several compression methods have been developed in order to reduce the amount of storage needed while keeping the important information. in this paper, we present an lstm-autoencoder based approach in order to compress and reconstruct gps trajectories, which is evaluated on both a gaming and real-world dataset. we consider various compression ratios and trajectory lengths. the performance is compared to other trajectory compression algorithms, i.e., douglas-peucker. overall, the results indicate that our approach outperforms douglas-peucker significantly in terms of the discrete fréchet distance and dynamic time warping. furthermore, by reconstructing every point lossy, the proposed methodology offers multiple advantages over traditional methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/125.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/125.txt new file mode 100644 index 0000000000000000000000000000000000000000..cd1798a9ecaf1b6aed389594ca8c2f711968d6b9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/125.txt @@ -0,0 +1 @@ + the increasing demand for intelligent services and privacy protection of mobile and internet of things (iot) devices motivates the wide application of federated edge learning (fel), in which devices collaboratively train on-device machine learning (ml) models without sharing their private data. limited by device hardware, diverse user behaviors and network infrastructure, the algorithm design of fel faces challenges related to resources, personalization and network environments. fortunately, knowledge distillation (kd) has been leveraged as an important technique to tackle the above challenges in fel. in this paper, we investigate the works that kd applies to fel, discuss the limitations and open problems of existing kd-based fel approaches, and provide guidance for their real deployment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/126.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/126.txt new file mode 100644 index 0000000000000000000000000000000000000000..4dcac0114123f61adf59f8a525f7a6403d47e356 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/126.txt @@ -0,0 +1 @@ + iot devices are sorely underutilised in the medical field, especially within machine learning for medicine, yet they offer unrivalled benefits. iot devices are low cost, energy efficient, small and intelligent devices .in this paper, we propose a distributed federated learning framework for iot devices, more specifically for iomt (internet of medical things), using blockchain to allow for a decentralised scheme improving privacy and efficiency over a centralised system; this allows us to move from the cloud based architectures, that are prevalent, to the edge.the system is designed for three paradigms: 1) training neural networks on iot devices to allow for collaborative training of a shared model whilst decoupling the learning from the dataset to ensure privacy . training is performed in an online manner simultaneously amongst all participants, allowing for training of actual data that may not have been present in a dataset collected in the traditional way and dynamically adapt the system whilst it is being trained. 2) training of an iomt system in a fully private manner such as to mitigate the issue with confidentiality of medical data and to build robust, and potentially bespoke , models where not much, if any, data exists. 3) distribution of the actual network training, something federated learning itself does not do, to allow hospitals, for example, to utilize their spare computing resources to train network models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/127.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/127.txt new file mode 100644 index 0000000000000000000000000000000000000000..c169e8d33fa17832ad3b33c541697230a2bfcc62 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/127.txt @@ -0,0 +1 @@ + a common technique in reinforcement learning is to evaluate the value function from monte carlo simulations of a given policy, and use the estimated value function to obtain a new policy which is greedy with respect to the estimated value function. a well-known longstanding open problem in this context is to prove the convergence of such a scheme when the value function of a policy is estimated from data collected from a single sample path obtained from implementing the policy (see page 99 of , page 8 of ). we present a solution to the open problem by showing that a first-visit version of such a policy iteration scheme indeed converges to the optimal policy provided that the policy improvement step uses lookahead rather than a simple greedy policy improvement. we provide results both for the original open problem in the tabular setting and also present extensions to the function approximation setting, where we show that the policy resulting from the algorithm performs close to the optimal policy within a function approximation error. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/128.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/128.txt new file mode 100644 index 0000000000000000000000000000000000000000..916f3d9aa776842ab7f6c0c52e3d031bf4710882 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/128.txt @@ -0,0 +1 @@ + recent years have seen growing interest in learning disentangled representations, in which distinct features, such as size or shape, are represented by distinct neurons. quantifying the extent to which a given representation is disentangled is not straightforward; multiple metrics have been proposed. in this paper, we identify two failings of existing metrics, which mean they can assign a high score to a model which is still entangled, and we propose two new metrics, which redress these problems. we then consider the task of compositional generalization. unlike prior works, we treat this as a classification problem, which allows us to use it to measure the disentanglement ability of the encoder, without depending on the decoder. we show that performance on this task is (a) generally quite poor, (b) correlated with most disentanglement metrics, and (c) most strongly correlated with our newly proposed metrics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/129.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/129.txt new file mode 100644 index 0000000000000000000000000000000000000000..171a90ab9216488741f68cfe51cd14849508920c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/129.txt @@ -0,0 +1 @@ + the heart is one of the most vital organs in the human body. it supplies blood and nutrients in other parts of the body. therefore, maintaining a healthy heart is essential. as a heart disorder, arrhythmia is a condition in which the heart's pumping mechanism becomes aberrant. the electrocardiogram is used to analyze the arrhythmia problem from the ecg signals because of its fewer difficulties and cheapness. the heart peaks shown in the ecg graph are used to detect heart diseases, and the r peak is used to analyze arrhythmia disease. arrhythmia is grouped into two groups -tachycardia and bradycardia for detection. in this paper, we discussed many different techniques such as deep cnns, lstm, svm, nn classifier, wavelet, tqwt, etc., that have been used for detecting arrhythmia using various datasets throughout the previous decade. this work shows the analysis of some arrhythmia classification on the ecg dataset. here, data preprocessing, feature extraction, classification processes were applied on most research work and achieved better performance for classifying ecg signals to detect arrhythmia. automatic arrhythmia detection can help cardiologists make the right decisions immediately to save human life. in addition, this research presents various previous research limitations with some challenges in detecting arrhythmia that will help in future research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/13.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/13.txt new file mode 100644 index 0000000000000000000000000000000000000000..b91e05e886e5a69074a278df647e5b7d5ccf88f4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/13.txt @@ -0,0 +1 @@ + this paper proposes a possible method using natural language processing that might assist in the fda medical device marketing process. actual device descriptions are taken and matched with the device description in fda title 21 of cfr to determine their corresponding device type. both pre-trained word embeddings such as fasttext and large pre-trained sentence embedding models such as sentence transformers are evaluated on their accuracy in characterizing a piece of device description. an experiment is also done to test whether these models can identify the devices wrongly classified in the fda database. the result shows that sentence transformer with t5 and mpnet and gpt-3 semantic search embedding show high accuracy in identifying the correct classification by narrowing down the correct label to be contained in the first 15 most likely results, as compared to 2585 types of device descriptions that must be manually searched through. on the other hand, all methods demonstrate high accuracy in identifying completely incorrectly labeled devices, but all fail to identify false device classifications that are wrong but closely related to the true label. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/130.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/130.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5fe2eff0d80c70726d532c0f475a70095123fa0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/130.txt @@ -0,0 +1 @@ + active learning is a powerful method for training machine learning models with limited labeled data. one commonly used technique for active learning is batchbald, which uses bayesian neural networks to find the most informative points to label in a pool set. however, batchbald can be very slow to compute, especially for larger datasets. in this paper, we propose a new approximation, k-bald, which uses k-wise mutual information terms to approximate batchbald, making it much less expensive to compute. results on the mnist dataset show that k-bald is significantly faster than batchbald while maintaining similar performance. additionally, we also propose a dynamic approach for choosing k based on the quality of the approximation, making it more efficient for larger datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/131.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/131.txt new file mode 100644 index 0000000000000000000000000000000000000000..0e388a9d83d49a4f1663db7dabd29a40a40ec5e2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/131.txt @@ -0,0 +1 @@ + previous work has established that rnns with an unbounded activation function have the capacity to count exactly. however, it has also been shown that rnns are challenging to train effectively and generally do not learn exact counting behaviour. in this paper, we focus on this problem by studying the simplest possible rnn, a linear single-cell network. we conduct a theoretical analysis of linear rnns and identify conditions for the models to exhibit exact counting behaviour. we provide a formal proof that these conditions are necessary and sufficient. we also conduct an empirical analysis using tasks involving a dyck-1like balanced bracket language under two different settings. we observe that linear rnns generally do not meet the necessary and sufficient conditions for counting behaviour when trained with the standard approach. we investigate how varying the length of training sequences and utilising different target classes impacts model behaviour during training and the ability of linear rnn models to effectively approximate the indicator conditions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/132.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/132.txt new file mode 100644 index 0000000000000000000000000000000000000000..533486f95e7dfc3efee5bdbff7af7d49d983c524 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/132.txt @@ -0,0 +1 @@ + rule-based explanations provide simple reasons explaining the behavior of machine learning classifiers at given points in the feature space. several recent methods (anchors, lore, etc.) purport to generate rule-based explanations for arbitrary or black-box classifiers. but what makes these methods work in general? we introduce a topological framework for rulebased explanation methods and provide a characterization of explainability in terms of the definability of a classifier relative to an explanation scheme. we employ this framework to consider various explanation schemes and argue that the preferred scheme depends on how much the user knows about the domain and the probability measure over the feature space. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/133.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/133.txt new file mode 100644 index 0000000000000000000000000000000000000000..3028723d61006551cb69c2b86c3e5873fe1715f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/133.txt @@ -0,0 +1 @@ + change point detection plays a fundamental role in many real-world applications, where the goal is to analyze and monitor the behaviour of a data stream. in this paper, we study change detection in binary streams. to this end, we use a likelihood ratio between two models as a measure for indicating change. the first model is a single bernoulli variable while the second model divides the stored data in two segments, and models each segment with its own bernoulli variable. finding the optimal split can be done in o(n) time, where n is the number of entries since the last change point. this is too expensive for large n. to combat this we propose an approximation scheme that yields (1 -ǫ) approximation in o ǫ -1 log 2 n time. the speed-up consists of several steps: first we reduce the number of possible candidates by adopting a known result from segmentation problems. we then show that for fixed bernoulli parameters we can find the optimal change point in logarithmic time. finally, we show how to construct a candidate list of size o ǫ -1 log n for model parameters. we demonstrate empirically the approximation quality and the running time of our algorithm, showing that we can gain a significant speed-up with a minimal average loss in optimality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/134.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/134.txt new file mode 100644 index 0000000000000000000000000000000000000000..6c38924192bf8d3635c9f8b6d11f351755a77596 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/134.txt @@ -0,0 +1 @@ + sequences are often not received in their entirety at once, but instead, received incrementally over time, element by element. early predictions yielding a higher benefit, one aims to classify a sequence as accurately as possible, as soon as possible, without having to wait for the last element. for this early sequence classification, we introduce our novel classifier-induced stopping. while previous methods depend on exploration during training to learn when to stop and classify, ours is a more direct, supervised approach. our classifier-induced stopping achieves an average pareto frontier auc increase of 11.8% over multiple experiments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/135.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/135.txt new file mode 100644 index 0000000000000000000000000000000000000000..54ff1e015e5486362c1a93d4bec1055a7472f5f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/135.txt @@ -0,0 +1 @@ + the aim of boosting is to convert a sequence of weak learners into a strong learner. at their heart, these methods are fully sequential. in this paper, we investigate the possibility of parallelizing boosting. our main contribution is a strong negative result, implying that significant parallelization of boosting requires an exponential blow-up in the total computing resources needed for training. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/136.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/136.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab734b3a62cb1e0065889795743bfccd98d49ed0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/136.txt @@ -0,0 +1 @@ + random forest is a popular machine learning approach for the analysis of high-dimensional data because it is flexible and provides variable importance measures for the selection of relevant features. however, the complex relationships between the features are usually not considered for the selection and thus also neglected for the characterization of the analysed samples. here we propose two novel approaches that focus on the mutual impact of features in random forests. mutual forest impact (mfi) is a relation parameter that evaluates the mutual association of the featurs to the outcome and, hence, goes beyond the analysis of correlation coefficients. mutual impurity reduction (mir) is an importance measure that combines this relation parameter with the importance of the individual features. mir and mfi are implemented together with testing procedures that generate p-values for the selection of related and important features. applications to various simulated data sets and the comparison to other methods for feature selection and relation analysis show that mfi and mir are very promising to shed light on the complex relationships between features and outcome. in addition, they are not affected by common biases, e.g. that features with many possible splits or high minor allele frequencies are prefered.the approaches are implemented in version 0.3.0 of the r package rfsurrogates that is available at github.com/stephanseifert/rfsurrogates. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/137.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/137.txt new file mode 100644 index 0000000000000000000000000000000000000000..b19ec942e16587e48cc64589e9e46370b33cf3ba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/137.txt @@ -0,0 +1 @@ + training time budget and size of the dataset are among the factors affecting the performance of a deep neural network (dnn). this paper shows that neural architecture search (nas), hyper parameters optimization (hpo), and data augmentation help dnns perform much better while these two factors are limited. however, searching for an optimal architecture and the best hyperparameter values besides a good combination of data augmentation techniques under low resources requires many experiments. we present our approach to achieving such a goal in three steps: reducing training epoch time by compressing the model while maintaining the performance compared to the original model, preventing model overfitting when the dataset is small, and performing the hyperparameter tuning. we used nomad, which is a blackbox optimization software based on a derivativefree algorithm to do nas and hpo. our work achieved an accuracy of 86.0% on a tiny subset of mini-imagenet (vinyals et al., 2016) at the iclr 2021 hardware aware efficient training (haet) challenge and won second place in the competition. the competition results can be found at haet2021.github.io/challenge and our source code can be found at github.com/dounialakhmiri/iclr haet2021. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/138.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/138.txt new file mode 100644 index 0000000000000000000000000000000000000000..63f6531364aa8f1cf3b66d9e1fbfdb053236e4e7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/138.txt @@ -0,0 +1 @@ + a constrained version of the online convex optimization (oco) problem is considered. with slotted time, for each slot, first an action is chosen. subsequently the loss function and the constraint violation penalty evaluated at the chosen action point is revealed. for each slot, both the loss function as well as the function defining the constraint set is assumed to be smooth and strongly convex. in addition, once an action is chosen, local information about a feasible set within a small neighborhood of the current action is also revealed. an algorithm is allowed to compute at most one gradient at its point of choice given the described feedback to choose the next action. the goal of an algorithm is to simultaneously minimize the dynamic regret (loss incurred compared to the oracle's loss) and the constraint violation penalty (penalty accrued compared to the oracle's penalty). we propose an algorithm that follows projected gradient descent over a suitably chosen set around the current action. we show that both the dynamic regret and the constraint violation is order-wise bounded by the path-length, the sum of the distances between the consecutive optimal actions. moreover, we show that the derived bounds are the best possible. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/139.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/139.txt new file mode 100644 index 0000000000000000000000000000000000000000..71048184de8dabca9629c4de26834ed5af96109c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/139.txt @@ -0,0 +1 @@ + this paper presents the real-time adaptive and interpretable detection this paper presents the real-time adaptive and interpretable detection (raid) algorithm. the novel approach addresses the limitations of state-of-the-art anomaly detection methods for multivariate dynamic processes, which are restricted to detecting anomalies within the scope of the model training conditions. the raid algorithm adapts to non-stationary effects such as data drift and change points that may not be accounted for during model development, resulting in prolonged service life. a dynamic model based on joint probability distribution handles anomalous behavior detection in a system and the root cause isolation based on adaptive process limits. raid algorithm does not require changes to existing process automation infrastructures, making it highly deployable across different domains. two case studies involving real dynamic system data demonstrate the benefits of the raid algorithm, including change point adaptation, root cause isolation, and improved detection accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/14.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/14.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee8d8fbdd1a1521724cd76245b1370b9157d9a4b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/14.txt @@ -0,0 +1 @@ + many dynamical systems-from robots interacting with their surroundings to large-scale multiphysics systems-involve a number of interacting subsystems. toward the objective of learning composite models of such systems from data, we present i) a framework for compositional neural networks, ii) algorithms to train these models, iii) a method to compose the learned models, iv) theoretical results that bound the error of the resulting composite models, and v) a method to learn the composition itself, when it is not known a priori. the end result is a modular approach to learning: neural network submodels are trained on trajectory data generated by relatively simple subsystems, and the dynamics of more complex composite systems are then predicted without requiring additional data generated by the composite systems themselves. we achieve this compositionality by representing the system of interest, as well as each of its subsystems, as a port-hamiltonian neural network (phnn)-a class of neural ordinary differential equations that uses the port-hamiltonian systems formulation as inductive bias. we compose collections of phnns by using the system's physics-informed interconnection structure, which may be known a priori, or may itself be learned from data. we demonstrate the novel capabilities of the proposed framework through numerical examples involving interacting spring-mass-damper systems. models of these systems, which include nonlinear energy dissipation and control inputs, are learned independently. accurate compositions are learned using an amount of training data that is negligible in comparison with that required to train a new model from scratch. finally, we observe that the composite phnns enjoy properties of port-hamiltonian systems, such as cyclo-passivity-a property that is useful for control purposes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/140.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/140.txt new file mode 100644 index 0000000000000000000000000000000000000000..d469bd8ef81b7f1c34fb8cdc8e53c376b539f194 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/140.txt @@ -0,0 +1 @@ + graph neural networks (gnns) have found extensive applications in learning from graph data. however, real-world graphs often possess diverse structures and comprise nodes and edges of varying types. to bolster the generalization capacity of gnns, it has become customary to augment training graph structures through techniques like graph augmentations and large-scale pre-training on a wider array of graphs. balancing this diversity while avoiding increased computational costs and the notorious trainability issues of gnns is crucial. this study introduces the concept of mixture-of-experts (moe) to gnns, with the aim of augmenting their capacity to adapt to a diverse range of training graph structures, without incurring explosive computational overhead. the proposed graph mixture of experts (gmoe) model empowers individual nodes in the graph to dynamically and adaptively select more general information aggregation experts. these experts are trained to capture distinct subgroups of graph structures and to incorporate information with varying hop sizes, where those with larger hop sizes specialize in gathering information over longer distances. the effectiveness of gmoe is validated through a series of experiments on a diverse set of tasks, including graph, node, and link prediction, using the ogb benchmark. notably, it enhances roc-auc by 1.81% in ogbg-molhiv and by 1.40% in ogbg-molbbbp, when compared to the non-moe baselines. our code is publicly available at https: //github.com/vita-group/graph-mixture-of-experts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/141.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/141.txt new file mode 100644 index 0000000000000000000000000000000000000000..de38e405b1ff79971bc77626d74e948514a313cd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/141.txt @@ -0,0 +1 @@ + the ability to explain decisions made by machine learning models remains one of the most significant hurdles towards widespread adoption of ai in highly sensitive areas such as medicine, cybersecurity or autonomous driving. great interest exists in understanding which features of the input data prompt model decision making. in this contribution, we propose a novel approach to identify relevant features of the input data, inspired by methods from the energy landscapes field, developed in the physical sciences. by identifying conserved weights within groups of minima of the loss landscapes, we can identify the drivers of model decision making. analogues to this idea exist in the molecular sciences, where coordinate invariants or order parameters are employed to identify critical features of a molecule. however, no such approach exists for machine learning loss landscapes. we will demonstrate the applicability of energy landscape methods to machine learning models and give examples, both synthetic and from the real world, for how these methods can help to make models more interpretable. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/142.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/142.txt new file mode 100644 index 0000000000000000000000000000000000000000..54a324ace31e56a8cc7bf54f2ee7132c9c91efaf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/142.txt @@ -0,0 +1 @@ + in safe mdp planning, a cost function based on the current state and action is often used to specify safety aspects. in the real world, often the state representation used may lack sufficient fidelity to specify such safety constraints. operating based on an incomplete model can often produce unintended negative side effects (nses). to address these challenges, first, we associate safety signals with state-action trajectories (rather than just an immediate state-action). this makes our safety model highly general. we also assume categorical safety labels are given for different trajectories, rather than a numerical cost function, which is harder to specify by the problem designer. we then employ a supervised learning model to learn such non-markovian safety patterns. second, we develop a lagrange multiplier method, which incorporates the safety model and the underlying mdp model in a single computation graph to facilitate agent learning of safe behaviors. finally, our empirical results on a variety of discrete and continuous domains show that this approach can satisfy complex non-markovian safety constraints while optimizing an agent's total returns, is highly scalable, and is also better than the previous best approach for markovian nses. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/143.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/143.txt new file mode 100644 index 0000000000000000000000000000000000000000..dce26fcee77c7f98a5f2591ef425d41f45ab867d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/143.txt @@ -0,0 +1 @@ + we describe and evaluate lf-checker, a metaverifier tool based on machine learning. it extracts multiple features of the program under test and predicts the optimal configuration (flags) of a bounded model checker with a decision tree. our current work is specialised in concurrency verification and employs esbmc as a back-end verification engine. in the paper, we demonstrate that lf-checker achieves better results than the default configuration of the underlying verification engine. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/144.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/144.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a2ece2de726f6e8dc62f272d6c9bf02ba540759 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/144.txt @@ -0,0 +1 @@ + in instances of online kernel learning where little prior information is available and centralized learning is unfeasible, past research has shown that distributed and online multikernel learning provides sub-linear regret as long as every pair of nodes in the network can communicate (i.e., the communications network is a complete graph). in addition, to manage the communication load, which is often a performance bottleneck, communications between nodes can be quantized. this letter expands on these results to non-fully connected graphs, which is often the case in wireless sensor networks. to address this challenge, we propose a gossip algorithm and provide a proof that it achieves sub-linear regret. experiments with real datasets confirm our findings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/145.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/145.txt new file mode 100644 index 0000000000000000000000000000000000000000..75945033bf999bee00f52910f8a33dffb3d1c5e9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/145.txt @@ -0,0 +1 @@ + data privacy and decentralised data collection has become more and more popular in recent years. in order to solve issues with privacy, communication bandwidth and learning from spatio-temporal data, we will propose two efficient models which use differential privacy and decentralized lstm-learning: one, in which a long short term memory (lstm) model is learned for extracting local temporal node constraints and feeding them into a dense-layer (labelproportiontolocal). the other approach extends the first one by fetching histogram data from the neighbors and joining the information with the lstm output (labelproportiontodense). for evaluation two popular datasets are used: pems-bay and metr-la. additionally, we provide an own dataset, which is based on lust. the evaluation will show the tradeoff between performance and data privacy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/146.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/146.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5f62028fba5826a25e1e921eb5c2befa05ec325 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/146.txt @@ -0,0 +1 @@ + millions of vulnerable consumer iot devices in home networks are the enabler for cyber crimes putting user privacy and internet security at risk. internet service providers (isps) are best poised to play key roles in mitigating risks by automatically inferring active iot devices per household and notifying users of vulnerable ones. developing a scalable inference method that can perform robustly across thousands of home networks is a non-trivial task. this paper focuses on the challenges of developing and applying data-driven inference models when labeled data of device behaviors is limited and the distribution of data changes (concept drift) across time and space domains. our contributions are three-fold: (1) we collect and analyze network traffic of 24 types of consumer iot devices from 12 real homes over six weeks to highlight the challenge of temporal and spatial concept drifts in network behavior of iot devices; (2) we analyze the performance of two inference strategies, namely "global inference" (a model trained on a combined set of all labeled data from training homes) and "contextualized inference" (several models each trained on the labeled data from a training home) in the presence of concept drifts; and (3) to manage concept drifts, we develop a method that dynamically applies the "closest" model (from a set) to network traffic of unseen homes during the testing phase, yielding better performance in 20% of scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/147.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/147.txt new file mode 100644 index 0000000000000000000000000000000000000000..41dc28d01d68928fda3c9331765ccc20daecb603 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/147.txt @@ -0,0 +1 @@ + we provide a full characterisation of all of the possible alternating group (a n ) equivariant neural networks whose layers are some tensor power of r n . in particular, we find a basis of matrices for the learnable, linear, a n -equivariant layer functions between such tensor power spaces in the standard basis of r n . we also describe how our approach generalises to the construction of neural networks that are equivariant to local symmetries. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/148.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/148.txt new file mode 100644 index 0000000000000000000000000000000000000000..48803f935c242237d9b323ae15091ca557deda56 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/148.txt @@ -0,0 +1 @@ + it is of great significance to estimate the performance of a given model architecture without training in the application of neural architecture search (nas) as it may take a lot of time to evaluate the performance of an architecture. in this paper, a novel nas framework called gp-nasensemble is proposed to predict the performance of a neural network architecture with a small training dataset. we make several improvements on the gp-nas model to make it share the advantage of ensemble learning methods. our method ranks second in the cvpr2022 second lightweight nas challenge performance prediction track. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/149.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/149.txt new file mode 100644 index 0000000000000000000000000000000000000000..0b60f0dabc18120c3fd9ca12c24fa2a901e5dead --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/149.txt @@ -0,0 +1 @@ + convolutional neural networks have been used in a variety of image related applications after their rise in popularity due to imagenet competition. convolutional neural networks have shown remarkable results in applications including face recognition, moving target detection and tracking, classification of food based on the calorie content and many more. designing of convolutional neural networks requires experts having a cross domain knowledge and it is laborious, which requires a lot of time for testing different values for different hyperparameter along with the consideration of different configurations of existing architectures. neural architecture search is an automated way of generating neural network architectures which saves researchers from all the brute-force testing trouble, but with the drawback of consuming a lot of computational resources for a prolonged period. in this paper, we propose an automated neural architecture search framework dqnas, guided by the principles of reinforcement learning along with one-shot training which aims to generate neural network architectures that show superior performance and have minimum scalability problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/15.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/15.txt new file mode 100644 index 0000000000000000000000000000000000000000..f9cab615e4cfe5ec3f5eafedcc225c2938521021 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/15.txt @@ -0,0 +1 @@ + mobile health (mhealth) technologies empower patients to adopt/maintain healthy behaviors in their daily lives, by providing interventions (e.g. push notifications) tailored to the user's needs. in these settings, without intervention, human decision making may be impaired (e.g. valuing near term pleasure over own long term goals). in this work, we formalize this relationship with a framework in which the user optimizes a (potentially impaired) markov decision process (mdp) and the mhealth agent intervenes on the user's mdp parameters. we show that different types of impairments imply different types of optimal intervention. we also provide analytical and empirical explorations of these differences. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/150.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/150.txt new file mode 100644 index 0000000000000000000000000000000000000000..4d7533b781aefdbea912f8c3e29814bc58f9077f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/150.txt @@ -0,0 +1 @@ + subgraph-enhanced graph neural networks (sgnn) can increase the expressive power of the standard message-passing framework. this model family represents each graph as a collection of subgraphs, generally extracted by random sampling or with hand-crafted heuristics. our key observation is that by selecting "meaningful" subgraphs, besides improving the expressivity of a gnn, it is also possible to obtain interpretable results. for this purpose, we introduce a novel framework that jointly predicts the class of the graph and a set of explanatory sparse subgraphs, which can be analyzed to understand the decision process of the classifier. we compare the performance of our framework against standard subgraph extraction policies, like random node/edge deletion strategies. the subgraphs produced by our framework allow to achieve comparable performance in terms of accuracy, with the additional benefit of providing explanations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/151.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/151.txt new file mode 100644 index 0000000000000000000000000000000000000000..ece0067d005c9eec73bb83e823271ee201db8d60 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/151.txt @@ -0,0 +1 @@ + diffusion models (dms) are powerful generative models that add gaussian noise to the data and learn to remove it. we wanted to determine which noise distribution (gaussian or non-gaussian) led to better generated data in dms. since dms do not work by design with non-gaussian noise, we built a framework that allows reversing a diffusion process with non-gaussian location-scale noise. we use that framework to show that the gaussian distribution performs the best over a wide range of other distributions (laplace, uniform, t, generalized-gaussian). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/152.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/152.txt new file mode 100644 index 0000000000000000000000000000000000000000..276744600465c40d09f8c72bc39d75b10d8f1366 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/152.txt @@ -0,0 +1 @@ + the highly structured energy landscape of the loss as a function of parameters for deep neural networks makes it necessary to use sophisticated optimization strategies in order to discover (local) minima that guarantee reasonable performance. overcoming less suitable local minima is an important prerequisite and often momentum methods are employed to achieve this. as in other non local optimization procedures, this however creates the necessity to balance between exploration and exploitation. in this work, we suggest an event based control mechanism for switching from exploration to exploitation based on reaching a predefined reduction of the loss function. as we give the momentum method a port hamiltonian interpretation, we apply the 'heavy ball with friction' interpretation and trigger breaking (or friction) when achieving certain goals. we benchmark our method against standard stochastic gradient descent and provide experimental evidence for improved performance of deep neural networks when our strategy is applied.index terms-neural nets • momentum • goal oriented search • port hamilton systems \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/153.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/153.txt new file mode 100644 index 0000000000000000000000000000000000000000..08e3eddd56d59143ae1a88dbb898aaece668ab04 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/153.txt @@ -0,0 +1 @@ + quantization has emerged as an essential technique for deploying deep neural networks (dnns) on devices with limited resources. however, quantized models exhibit vulnerabilities when exposed to various noises in real-world applications. despite the importance of evaluating the impact of quantization on robustness, existing research on this topic is limited and often disregards established principles of robustness evaluation, resulting in incomplete and inconclusive findings. to address this gap, we thoroughly evaluated the robustness of quantized models against various noises (adversarial attacks, natural corruptions, and systematic noises) on imagenet. extensive experiments demonstrate that lower-bit quantization is more resilient to adversarial attacks but is more susceptible to natural corruptions and systematic noises. notably, our investigation reveals that impulse noise (in natural corruptions) and the nearest neighbor interpolation (in systematic noises) have the most significant impact on quantized models. our research contributes to advancing the robust quantization of models and their deployment in real-world scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/154.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/154.txt new file mode 100644 index 0000000000000000000000000000000000000000..f024f9c186cf4bd23c4a8bd41d1891eabc923a71 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/154.txt @@ -0,0 +1 @@ + mobile phonesand other electronic gadgets/devices have aided in collecting data without the need for data entry. this paper will specifically focus on mobile health data(m-health). mobile health data use mobile devices to gather clinical health data and track patients' vitals in realtime. our study is aimed to give decisions for small or big sports teams on whether one athlete good fit or not for a particular game with the compare several machine learning algorithms to predict human behavior and health using the data collected from mobile devices and sensors placed on patients. in this study, we have obtained the dataset from a similar study done on m-health. the dataset contains vital signs recordings of ten volunteers from different backgrounds. they had to perform several physical activities with a sensor placed on their bodies. our study used 5 machine learning algorithms (xgboost, naïve bayes, decision tree, random forest, and logistic regression) to analyze and predict human health behavior. xgboost performed better compared to the other machine learning algorithms and achieved 95.2% in accuracy, 99.5% in sensitivity, 99.5% in specificity, and 99.66% in f-1 score. our research indicated a promising future in m-health being used to predict human behavior and further research and exploration need to be done for it to be available for commercial use specifically in the sports industry. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/155.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/155.txt new file mode 100644 index 0000000000000000000000000000000000000000..1dc5938e6909dd8a7ef478577f92cf743d5a0407 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/155.txt @@ -0,0 +1 @@ + unsupervised representation learning on graphs is gaining traction due to the increasing abundance of unlabelled network data and the compactness, richness, and usefulness of the representations generated. in this context, the need to consider fairness and bias constraints while generating the representations has been wellmotivated and studied to some extent in prior works. one major limitation of most of the prior works in this setting is that they do not aim to address the bias generated due to connectivity patterns in the graphs, such as varied node centrality, which leads to a disproportionate performance across nodes. in our work, we aim to address this issue of mitigating bias due to inherent graph structure in an unsupervised setting. to this end, we propose cafin, a centralityaware fairness-inducing framework that leverages the structural information of graphs to tune the representations generated by existing frameworks. we deploy it on graphsage (a popular framework in this domain) and showcase its efficacy on two downstream tasks -node classification and link prediction. empirically, cafin consistently reduces the performance disparity across popular datasets (varying from 18 to 80% reduction in performance disparity) from various domains while incurring only a minimal cost of fairness. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/156.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/156.txt new file mode 100644 index 0000000000000000000000000000000000000000..5b4cd364362ea915fe0ea64fc780b76de86b9b98 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/156.txt @@ -0,0 +1 @@ + knowledge distillation is a method of transferring the knowledge from a complex deep neural network (dnn) to a smaller and faster dnn, while preserving its accuracy. recent variants of knowledge distillation include teaching assistant distillation, curriculum distillation, mask distillation, and decoupling distillation, which aim to improve the performance of knowledge distillation by introducing additional components or by changing the learning process. teaching assistant distillation involves an intermediate model called the teaching assistant, while curriculum distillation follows a curriculum similar to human education. mask distillation focuses on transferring the attention mechanism learned by the teacher, and decoupling distillation decouples the distillation loss from the task loss. overall, these variants of knowledge distillation have shown promising results in improving the performance of knowledge distillation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/157.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/157.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0f1b2ec08ac9a8c54fbaf8608f41eb82c91ae2c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/157.txt @@ -0,0 +1 @@ + purpose -since its establishment in 1999, the metro rail transit line 3 (mrt3) has served as a transportation option for numerous passengers in metro manila, philippines. the philippine government's transportation department records more than a thousand people using the mrt3 daily and forecasting the daily passenger count may be rather challenging. the mrt3's daily ridership fluctuates owing to variables such as holidays, working days, and other unexpected issues. commuters do not know how many other commuters are on their route on a given day, which may hinder their ability to plan an efficient itinerary. currently, the dotr depends on spreadsheets containing historical data, which might be challenging to examine. this study presents a time series prediction of daily traffic to anticipate future attendance at a particular station on specific days.method -the proposed prediction approach uses dotr ridership data to train multiple models that can provide correct data on azure automl. these trained models have the highest accuracy: gradient boosting, extreme random trees, and light gbm.results -based on historical data, this study aims to build and evaluate several prediction models for estimating the number of riders per station. on azure automl, the gradient boosting, extreme random trees, and light gbm algorithms were investigated and executed. gradient boosting and extreme random trees frequently made the most accurate predictions of the three algorithms, with an average accuracy of over 90%.conclusion -this research aims to develop and test different models of prediction for forecasting the number of riders per station based on historical data. seven days of data were utilized for applying the model or assessing its correctness. each model's resultant accuracy in each station is unique and may be modified by ridership and geography. however, the model still provides complete precision. accuracy may be enhanced if additional current, valuable, and efficient characteristics are introduced to the dataset. 1925mrt3 might incorporate a mortality rate component into the station's relative location or passenger capacity.recommendation -as the acquired data were from a pandemic, it is suggested that additional information be employed in future research. the circumstances of the mrt might change substantially over time; therefore, it is essential to refresh the training dataset.practical implication -there are several benefits to applying time series forecasting in predicting the ridership of the mrt3 in the philippines. this can allow decision-makers to make informed decisions about optimizing the mrt3 system to meet the needs of commuters. additionally, time series forecasting can help to identify potential problems or issues in advance, such as overcrowding or maintenance needs, allowing for proactive solutions to be implemented. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/158.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/158.txt new file mode 100644 index 0000000000000000000000000000000000000000..646c0d7bb7d4ccaef5af165285c30b7e69f9f360 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/158.txt @@ -0,0 +1 @@ + compiled binary executables are often the only available artifact in reverse engineering, malware analysis, and software systems maintenance. unfortunately, the lack of semantic information like variable types makes comprehending binaries difficult. in efforts to improve the comprehensibility of binaries, researchers have recently used machine learning techniques to predict semantic information contained in the original source code. chen et al. implemented dirty, a transformer-based encoder-decoder architecture capable of augmenting decompiled code with variable names and types by leveraging decompiler output tokens and variable size information. chen et al. were able to demonstrate a substantial increase in name and type extraction accuracy on hex-rays decompiler outputs compared to existing static analysis and ai-based techniques. we extend the original dirty results by re-training the dirty model on a dataset produced by the open-source ghidra decompiler. although chen et al. concluded that ghidra was not a suitable decompiler candidate due to its difficulty in parsing and incorporating dwarf symbols during analysis, we demonstrate that straightforward parsing of variable data generated by ghidra results in similar retyping performance. we hope this work inspires further interest and adoption of the ghidra decompiler for use in research projects. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/159.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/159.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a21fa9522908a82614d3aad81eb38aff9fc2276 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/159.txt @@ -0,0 +1 @@ + recent research suggests that combining ai models with a human expert can exceed the performance of either alone. the combination of their capabilities is often realized by learning to defer algorithms that enable the ai to learn to decide whether to make a prediction for a particular instance or defer it to the human expert. however, to accurately learn which instances should be deferred to the human expert, a large number of expert predictions that accurately reflect the expert's capabilities are required-in addition to the ground truth labels needed to train the ai. this requirement shared by many learning to defer algorithms hinders their adoption in scenarios where the responsible expert regularly changes or where acquiring a sufficient number of expert predictions is costly. in this paper, we propose a three-step approach to reduce the number of expert predictions required to train learning to defer algorithms. it encompasses (1) the training of an embedding model with ground truth labels to generate feature representations that serve as a basis for (2) the training of an expertise predictor model to approximate the expert's capabilities. (3) the expertise predictor generates artificial expert predictions for instances not yet labeled by the expert, which are required by the learning to defer algorithms. we evaluate our approach on two public datasets. one with "synthetically" generated human experts and another from the medical domain containing real-world radiologists' predictions. our experiments show that the approach allows the training of various learning to defer algorithms with a minimal number of human expert predictions. furthermore, we demonstrate that even a small number of expert predictions per class is sufficient for these algorithms to exceed the performance the ai and the human expert can achieve individually. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/16.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/16.txt new file mode 100644 index 0000000000000000000000000000000000000000..10622116dee3b49d4603b87ea09b2f5c2fb589b5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/16.txt @@ -0,0 +1 @@ + optimization equips engineers and scientists in a variety of fields with the ability to transcribe their problems into a generic formulation and receive optimal solutions with relative ease. industries ranging from aerospace to robotics continue to benefit from advancements in optimization theory and the associated algorithmic developments. nowadays, optimization is used in real time on autonomous systems acting in safety critical situations, such as self-driving vehicles. it has become increasingly more important to produce robust solutions by incorporating uncertainty into optimization programs. this paper provides a short survey about the state of the art in optimization under uncertainty. the paper begins with a brief overview of the main classes of optimization without uncertainty. the rest of the paper focuses on the different methods for handling both aleatoric and epistemic uncertainty. many of the applications discussed in this paper are within the domain of control. the goal of this survey paper is to briefly touch upon the state of the art in a variety of different methods and refer the reader to other literature for more in-depth treatments of the topics discussed here. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/160.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/160.txt new file mode 100644 index 0000000000000000000000000000000000000000..9877526abcede9df79c9937f975c109e23a2a592 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/160.txt @@ -0,0 +1 @@ + uci worldtour races, the premier men's elite road cycling tour, are grueling events that put physical fitness and endurance of riders to the test. the coaches of team jumbo-visma have long been responsible for predicting the energy needs of each rider of the dutch team for every race on the calendar. those must be estimated to ensure riders have the energy and resources necessary to maintain a high level of performance throughout a race. this task, however, is both time-consuming and challenging, as it requires precise estimates of race speed and power output. traditionally, the approach to predicting energy needs has relied on judgement and experience of coaches, but this method has its limitations and often leads to inaccurate predictions. in this paper, we propose a new, more effective approach to predicting energy needs for cycling races. by predicting the speed and power with regression models, we provide the coaches with calorie needs estimates for each individual rider per stage instantly. in addition, we compare methods to quantify uncertainty using conformal prediction. the empirical analysis of the jackknife+, jackknife-minmax, jackknife-minmax-after-bootstrap, cv+, cv-minmax, conformalized quantile regression, and inductive conformal prediction methods in conformal prediction reveals that all methods achieve valid prediction intervals. all but minmax-based methods also produce sufficiently narrow prediction intervals for decision-making. furthermore, methods computing prediction intervals of fixed size produce tighter intervals for low significance values. among the methods computing intervals of varying length across the input space, inductive conformal prediction computes narrower prediction intervals at larger significance level. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/161.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/161.txt new file mode 100644 index 0000000000000000000000000000000000000000..d8433f3e5e4ad7e77fd184c2bd56fa6aea7f0b83 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/161.txt @@ -0,0 +1 @@ + although multi-task deep neural network (dnn) models have computation and storage benefits over individual single-task dnn models, they can be further optimized via model compression. numerous structured pruning methods are already developed that can readily achieve speedups in single-task models, but the pruning of multi-task networks has not yet been extensively studied. in this work, we investigate the effectiveness of structured pruning on multitask models. we use an existing single-task filter pruning criterion and also introduce an mtl-based filter pruning criterion for estimating the filter importance scores. we prune the model using an iterative pruning strategy with both pruning methods. we show that, with careful hyper-parameter tuning, architectures obtained from different pruning methods do not have significant differences in their performances across tasks when the number of parameters is similar. we also show that iterative structure pruning may not be the best way to achieve a well-performing pruned model because, at extreme pruning levels, there is a high drop in performance across all tasks. but when the same models are randomly initialized and re-trained, they show better results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/162.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/162.txt new file mode 100644 index 0000000000000000000000000000000000000000..af7f730a41dd8993766f5a24dc38a6b84f81ed67 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/162.txt @@ -0,0 +1 @@ + missing data in time series is a challenging issue affecting time series analysis. missing data occurs due to problems like data drops or sensor malfunctioning. imputation methods are used to fill in these values, with quality of imputation having a significant impact on downstream tasks like classification. in this work, we propose a semi-supervised imputation method, st-impute, that uses both unlabeled data along with downstream task's labeled data. st-impute is based on sparse self-attention and trains on tasks that mimic the imputation process. our results indicate that the proposed method outperforms the existing supervised and unsupervised time series imputation methods measured on the imputation quality as well as on the downstream tasks ingesting imputed time series. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/163.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/163.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce7e5579e30e53402a06406eecd91ab76c081882 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/163.txt @@ -0,0 +1 @@ + this study focuses on long-term forecasting (ltf) on continuoustime dynamic graph networks (ctdgns), which is important for realworld modeling. existing ctdgns are effective for modeling temporal graph data due to their ability to capture complex temporal dependencies but perform poorly on ltf due to the substantial requirement for historical data, which is not practical in most cases. to relieve this problem, a most intuitive way is data augmentation. in this study, we propose uncertainty masked mixup (ummu): a plug-and-play module that conducts uncertainty estimation to introduce uncertainty into the embedding of intermediate layer of ctdgns, and perform masked mixup to further enhance the uncertainty of the embedding to make it generalize to more situations. ummu can be easily inserted into arbitrary ctdgns without increasing the number of parameters. we conduct comprehensive experiments on three real-world dynamic graph datasets, the results demonstrate that ummu can effectively improve the longterm forecasting performance for ctdgns. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/164.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/164.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee722d78e2e4ce01851ce28db0c08137af8bdcf7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/164.txt @@ -0,0 +1 @@ + in this paper, we present a transformer architecture for predicting student performance on standardized tests. specifically, we leverage students' historical data, including their past test scores, study habits, and other relevant information, to create a personalized model for each student. we then use these models to predict their future performance on a given test. applying this model to the riiid dataset, we demonstrate that using multiple granularities for temporal features as the decoder input significantly improve model performance. our results also show the effectiveness of our approach, with substantial improvements over the lightgbm method. our work contributes to the growing field of ai in education, providing a scalable and accurate tool for predicting student outcomes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/165.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/165.txt new file mode 100644 index 0000000000000000000000000000000000000000..45384933ab10f1a5a7952fc199e9e4ddc623e55d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/165.txt @@ -0,0 +1 @@ + this study presents a machine learning (ml) pipeline for clinical data classification in the context of a 30-day readmission problem, along with a fairness audit on subgroups based on sensitive attributes. a range of ml models are used for classification and the fairness audit is conducted on the model predictions. the fairness audit uncovers disparities in equal opportunity, predictive parity, false positive rate parity, and false negative rate parity criteria on the mimic iii dataset based on attributes such as gender, ethnicity, language, and insurance group. the results identify disparities in the model's performance across different groups and highlights the need for better fairness and bias mitigation strategies. the study suggests the need for collaborative efforts among researchers, policymakers, and practitioners to address bias and fairness in artificial intelligence (ai) systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/166.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/166.txt new file mode 100644 index 0000000000000000000000000000000000000000..c7081211dd55d505a7fd2992ea76ac7c1db79155 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/166.txt @@ -0,0 +1 @@ + federated learning aims to learn a global model collaboratively while the training data belongs to different clients and is not allowed to be exchanged. however, the statistical heterogeneity challenge on non-iid data, such as class imbalance in classification, will cause client drift and significantly reduce the performance of the global model. this paper proposes a simple and effective approach named fedshift which adds the shift on the classifier output during the local training phase to alleviate the negative impact of class imbalance. we theoretically prove that the classifier shift in fedshift can make the local optimum consistent with the global optimum and ensure the convergence of the algorithm. moreover, our experiments indicate that fedshift significantly outperforms the other state-of-the-art federated learning approaches on various datasets regarding accuracy and communication efficiency. * use footnote for providing further information about author (webpage, alternative address)-not for acknowledging funding agencies.preprint. under review. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/167.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/167.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e5328996dfc508e5cccaee7747ff4c61d6d5ca1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/167.txt @@ -0,0 +1 @@ + with the advent of general-purpose speech representations from large-scale self-supervised models, applying a single model to multiple downstream tasks is becoming a de-facto approach. however, the pooling problem remains; the length of speech representations is inherently variable. the naive average pooling is often used, even though it ignores the characteristics of speech, such as differently lengthed phonemes. hence, we design a novel pooling method to squash acoustically similar representations via vector quantization, which does not require additional training, unlike attention-based pooling. further, we evaluate various unsupervised pooling methods on various selfsupervised models. we gather diverse methods scattered around speech and text to evaluate on various tasks: keyword spotting, speaker identification, intent classification, and emotion recognition. finally, we quantitatively and qualitatively analyze our method, comparing it with supervised pooling methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/168.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/168.txt new file mode 100644 index 0000000000000000000000000000000000000000..d8ec2faef63f7da5fd9a0b7540f7dff741820254 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/168.txt @@ -0,0 +1 @@ + heart disorder has just overtaken cancer as the world's biggest cause of mortality. several cardiac failures, heart disease mortality, and diagnostic costs can all be reduced with early identification and treatment. medical data is collected in large quantities by the healthcare industry, but it is not well mined. the discovery of previously unknown patterns and connections in this information can help with an improved decision when it comes to forecasting heart disorder risk. in the proposed study, we constructed an ml-based diagnostic system for heart illness forecasting, using a heart disorder dataset. we used data preprocessing techniques like outlier detection and removal, checking and removing missing entries, feature normalization, cross-validation, nine classification algorithms like rf, mlp, knn, etc, xgb, svc, adb, dt, and gbm, and eight classifier measuring performance metrics like ramification accuracy, precision, f1 score, specificity, roc, sensitivity, log-loss, and matthews' correlation coefficient, as well as eight classification performance evaluations. our method can easily differentiate between people who have cardiac disease and those are normal. receiver optimistic curves and also the region under the curves were determined by every classifier. most of the classifiers, pretreatment strategies, validation methods, and performance assessment metrics for classification models have been discussed in this study. the performance of the proposed scheme has been confirmed, utilizing all of its capabilities. in this work, the impact of clinical decision support systems was evaluated using a stacked ensemble approach that included these nine algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/169.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/169.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d8d467114975f6893326abb8c7bf1e897773a74 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/169.txt @@ -0,0 +1 @@ + sepsis is a life-threatening organ malfunction caused by the host's inability to fight infection, which can lead to death without proper and immediate treatment. therefore, early diagnosis and medical treatment of sepsis in critically ill populations at high risk for sepsis and sepsis-associated mortality are vital to providing the patient with rapid therapy. studies show that advancing sepsis detection by 6 hours leads to earlier administration of antibiotics, which is associated with improved mortality. however, clinical scores like sequential organ failure assessment (sofa) are not applicable for early prediction, while machine learning algorithms can help capture the progressing pattern for early prediction. therefore, we aim to develop a machine learning algorithm that predicts sepsis onset 6 hours before it is suspected clinically. although some machine learning algorithms have been applied to sepsis prediction, many of them did not consider the fact that six hours is not a small gap. to overcome this big gap challenge, we explore a multi-subset approach in which the likelihood of sepsis occurring earlier than 6 hours is output from a previous subset and feed to the target subset as additional features. moreover, we use the hourly sampled data like vital signs in an observation window to derive a temporal change trend to further assist, which however is often ignored by previous studies. our empirical study shows that both the multi-subset approach to alleviating the 6-hour gap and the added temporal trend features can help improve the performance of sepsis-related early prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/17.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/17.txt new file mode 100644 index 0000000000000000000000000000000000000000..a91417032ed898285acc6927658849db63bbe19f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/17.txt @@ -0,0 +1 @@ + deep reinforcement learning (drl) has the potential to be used for synthesizing feedback controllers (agents) for various complex systems with unknown dynamics. these systems are expected to satisfy diverse safety and liveness properties best captured using temporal logic. in rl, the reward function plays a crucial role in specifying the desired behaviour of these agents. however, the problem of designing the reward function for an rl agent to satisfy complex temporal logic specifications has received limited attention in the literature. to address this, we provide a systematic way of generating rewards in real-time by using the quantitative semantics of signal temporal logic (stl), a widely used temporal logic to specify the behaviour of cyber-physical systems. we propose a new quantitative semantics for stl having several desirable properties, making it suitable for reward generation. we evaluate our stl-based reinforcement learning mechanism on several complex continuous control benchmarks and compare our stl semantics with those available in the literature in terms of their efficacy in synthesizing the controller agent. experimental results establish our new semantics to be the most suitable for synthesizing feedback controllers for complex continuous dynamical systems through reinforcement learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/170.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/170.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ba93e3c2e5ab07a7b3a921a94b7d13d29c6cabd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/170.txt @@ -0,0 +1 @@ + machine learning (ml) has become a critical tool in public health, offering the potential to improve population health, diagnosis, treatment selection, and health system efficiency. however, biases in data and model design can result in disparities for certain protected groups and amplify existing inequalities in healthcare. to address this challenge, this study summarizes seminal literature on ml fairness and presents a framework for identifying and mitigating biases in the data and model. the framework provides guidance on incorporating fairness into different stages of the typical ml pipeline, such as data processing, model design, deployment, and evaluation. to illustrate the impact of biases in data on ml models, we present examples that demonstrate how systematic biases can be amplified through model predictions. these case studies suggest how the framework can be used to prevent these biases and highlight the need for fair and equitable ml models in public health. this work aims to inform and guide the use of ml in public health towards a more ethical and equitable outcome for all populations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/171.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/171.txt new file mode 100644 index 0000000000000000000000000000000000000000..c9b984383b3f0ee4bb5bb848ef637c9c22066784 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/171.txt @@ -0,0 +1 @@ + machine learning models are often misspecified in the likelihood, which leads to a lack of robustness in the predictions. in this paper, we introduce a framework for correcting likelihood misspecifications in several paradigm agnostic noisy prior models and test the model's ability to remove the misspecification. the "abc-gan" framework introduced is a novel generative modeling paradigm, which combines generative adversarial networks (gans) and approximate bayesian computation (abc). this new paradigm assists the existing gans by incorporating any subjective knowledge available about the modeling process via abc, as a regularizer, resulting in a partially interpretable model that operates well under low data regimes. at the same time, unlike any bayesian analysis, the explicit knowledge need not be perfect, since the generator in the gan can be made arbitrarily complex. abc-gan eliminates the need for summary statistics and distance metrics as the discriminator implicitly learns them, and enables simultaneous specification of multiple generative models. the model misspecification is simulated in our experiments by introducing noise of various biases and variances. the correction term is learnt via the abc-gan, with skip connections, referred to as skipgan. the strength of the skip connection indicates the amount of correction needed or how misspecified the prior model is. based on a simple experimental setup, we show that the abc-gan models not only correct the misspecification of the prior, but also perform as well as or better than the respective priors under noisier conditions. in this proposal, we show that abc-gans get the best of both worlds. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/172.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/172.txt new file mode 100644 index 0000000000000000000000000000000000000000..88d3710e493d7d6534f93eccd240e5a52280a45e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/172.txt @@ -0,0 +1 @@ + this paper studies the problem of online performance optimization of constrained closed-loop control systems, where both the objective and the constraints are unknown black-box functions affected by exogenous time-varying contextual disturbances. a primal-dual contextual bayesian optimization algorithm is proposed that achieves sublinear cumulative regret with respect to the dynamic optimal solution under certain regularity conditions. furthermore, the algorithm achieves zero time-average constraint violation, ensuring that the average value of the constraint function satisfies the desired constraint. the method is applied to both sampled instances from gaussian processes and a continuous stirred tank reactor parameter tuning problem; simulation results show that the method simultaneously provides close-to-optimal performance and maintains constraint feasibility on average. this contrasts current state-of-the-art methods, which either suffer from large cumulative regret or severe constraint violations for the case studies presented. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/173.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/173.txt new file mode 100644 index 0000000000000000000000000000000000000000..23f2bedc8be84186401e2c30dd1eeb333cb0023d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/173.txt @@ -0,0 +1 @@ + a predictive model makes outcome predictions based on some given features, i.e., it estimates the conditional probability of the outcome given a feature vector. in general, a predictive model cannot estimate the causal effect of a feature on the outcome, i.e., how the outcome will change if the feature is changed while keeping the values of other features unchanged. this is because causal effect estimation requires interventional probabilities. however, many real world problems such as personalised decision making, recommendation, and fairness computing, need to know the causal effect of any feature on the outcome for a given instance. this is different from the traditional causal effect estimation problem with a fixed treatment variable. this paper first tackles the challenge of estimating the causal effect of any feature (as the treatment) on the outcome w.r.t. a given instance. the theoretical results naturally link a predictive model to causal effect estimations and imply that a predictive model is causally interpretable when the conditions identified in the paper are satisfied. the paper also reveals the robust property of a causally interpretable model. we use experiments to demonstrate that various types of predictive models, when satisfying the conditions identified in this paper, can estimate the causal effects of features as accurately as state-of-the-art causal effect estimation methods. we also show the potential of such causally interpretable predictive models for robust predictions and personalised decision making. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/174.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/174.txt new file mode 100644 index 0000000000000000000000000000000000000000..068e08093ff94a10d2c5354d211f33cb5a35341e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/174.txt @@ -0,0 +1 @@ + we consider stochastic convex optimization problems where the objective is an expectation over smooth functions. for this setting we suggest a novel gradient estimate that combines two recent mechanism that are related to notion of momentum. then, we design an sgd-style algorithm as well as an accelerated version that make use of this new estimator, and demonstrate the robustness of these new approaches to the choice of the learning rate. concretely, we show that these approaches obtain the optimal convergence rates for both noiseless and noisy case with the same choice of fixed learning rate. moreover, for the noisy case we show that these approaches achieve the same optimal bound for a very wide range of learning rates. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/175.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/175.txt new file mode 100644 index 0000000000000000000000000000000000000000..a069e8115a51c6b91b82e0b1816393d40bc5d0e4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/175.txt @@ -0,0 +1 @@ + internet of things (iot) devices are becoming increasingly popular and are influencing many application domains such as healthcare and transportation. these devices are used for real-world applications such as sensor monitoring, real-time control. in this work, we look at differentially private (dp) neural network (nn) based network intrusion detection systems (nids) to detect intrusion attacks on networks of such iot devices. existing nn training solutions in this domain either ignore privacy considerations or assume that the privacy requirements are homogeneous across all users. we show that the performance of existing differentially private stochastic methods degrade for clients with non-identical data distributions when clients' privacy requirements are heterogeneous. we define a cohortbased (ǫ, δ)-dp framework that models the more practical setting of iot device cohorts with non-identical clients and heterogeneous privacy requirements. we propose two novel continual-learning based dp training methods that are designed to improve model performance in the aforementioned setting. to the best of our knowledge, ours is the first system that employs a continual learning-based approach to handle heterogeneity in client privacy requirements. we evaluate our approach on real datasets and show that our techniques outperform the baselines. we also show that our methods are robust to hyperparameter changes. lastly, we show that one of our proposed methods can easily adapt to post-hoc relaxations of client privacy requirements. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/176.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/176.txt new file mode 100644 index 0000000000000000000000000000000000000000..c48ef73089e2c0d1b86b86c1983cfec2c6aba2c5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/176.txt @@ -0,0 +1 @@ + idle times of personal computers have increased steadily due to the generalization of computer usage and cloud computing. clustering research aims at utilizing idle computer resources for processing a variable work-load on a large number of computers. the work-load is processed continually despite of the volatile status of the individual computer resources. this paper proposes a distributed compilation system for improving the processing speed of cpu-intensive software compilation. this reduces significantly the compilation time of mass sources by using idle resources. we expect gains of up to 65% against the non-distributed compilation systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/177.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/177.txt new file mode 100644 index 0000000000000000000000000000000000000000..09c0d652ccd769133c54d1da32eebfefbbefc707 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/177.txt @@ -0,0 +1 @@ + the simt execution model is commonly used for general gpu development. cuda and opencl developers write scalar code that is implicitly parallelized by compiler and hardware. on intel gpus, however, this abstraction has profound performance implications as the underlying isa is simd and important hardware capabilities cannot be fully utilized. to close this performance gap we introduce c-for-metal (cm), an explicit simd programming framework designed to deliver close-to-the-metal performance on intel gpus. the cm programming language and its vector/matrix types provide an intuitive interface to exploit the underlying hardware features, allowing fine-grained register management, simd size control and cross-lane data sharing. experimental results show that cm applications from different domains outperform the bestknown simt-based opencl implementations, achieving up to 2.7x speedup on the latest intel gpu. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/178.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/178.txt new file mode 100644 index 0000000000000000000000000000000000000000..9dc27af7a24f5da7a15f2105d6cea35ff823233b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/178.txt @@ -0,0 +1 @@ + focal-plane sensor-processors (fpsps) are a camera technology that enable low power, high frame rate computation, making them suitable for edge computation. unfortunately, these devices' limited instruction sets and registers make developing complex algorithms difficult. in this work, we present cain 1 -a compiler that targets scamp-5, a general-purpose fpsp -which generates code from multiple convolutional kernels. as an example, given the convolutional kernels for an mnist digit recognition neural network, cain produces code that is half as long, when compared to the other available compilers for scamp-5. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/179.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/179.txt new file mode 100644 index 0000000000000000000000000000000000000000..e906d75d627276c4bc808b7d38df128abe68f511 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/179.txt @@ -0,0 +1 @@ + previous works on formally studying mobile robotic swarms consider necessary and sufficient system hypotheses enabling to solve theoretical benchmark problems (geometric pattern formation, gathering, scattering, etc.).we argue that formal methods can also help in the early stage of mobile robotic swarms protocol design, to obtain protocols that are correct-by-design, even for problems arising from real-world use cases, not previously studied theoretically.our position is supported by a concrete case study. starting from a real-world case scenario, we jointly design the formal problem specification, a family of protocols that are able to solve the problem, and their corresponding proof of correctness, all expressed with the same formal framework. the concrete framework we use for our development is the pactole library based on the coq proof assistant. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/18.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/18.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0694fa7613fef940be031b1dd5f76f03efdd844 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/18.txt @@ -0,0 +1 @@ + large language models that are capable of zero or few-shot prompting approaches have given rise to the new research area of prompt engineering. recent advances showed that for example chain-of-thought (cot) prompts can improve arithmetic or common sense tasks significantly. we explore how such approaches fare with legal reasoning tasks and take the coliee entailment task based on the japanese bar exam for testing zero-shot/fewshot and fine-tuning approaches. our findings show that while cot prompting and finetuning with explanations approaches show improvements, the best results are produced by prompts that are derived from specific legal reasoning techniques such as irac (issue, rule, application, conclusion). based on our experiments we improve the 2021 best result from 0.7037 accuracy to 0.8148 accuracy and beat the 2022 best system of 0.6789 accuracy with an accuracy of 0.7431. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/180.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/180.txt new file mode 100644 index 0000000000000000000000000000000000000000..446145dda15e524aada00f735c502b6f252312db --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/180.txt @@ -0,0 +1 @@ + cloud computing is a particular implementation of distributed computing. it inherited many properties of distributed computing such as scalability, reliability and distribution transparency. the transparency middle layer abstracts the underlying platform away from the end user. virtualization technology is the foundation of cloud computing. virtual machine provides abstraction of the physical server resources and securely isolates different users in multi-tenant environment. to the cloud services consumer, all the computing power and resources are accessed through high speed internet access by client platforms. this eliminates the cost to build and maintain local data center. resource pooling and rapid elasticity are the main characters of cloud computing. the scalability of cloud computing comes from resources which can span multiple data centers and geographic regions. there is virtually no limitation on the amount of resources available from cloud. new processing and storage resources can be added into the cloud resource pool seamlessly. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/181.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/181.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/182.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/182.txt new file mode 100644 index 0000000000000000000000000000000000000000..20b44db6011eea25644336b8e98f9224909a5303 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/182.txt @@ -0,0 +1 @@ + this paper presents edsc, a novel smart contract platform design based on the event-driven execution model as opposed to the traditionally employed transaction-driven execution model. we reason that such a design is a better fit for many emerging smart contract applications and is better positioned to address the scalability and performance challenges plaguing the smart contract ecosystem. we propose edsc's design under the ethereum framework, and the design can be easily adapted for other existing smart contract platforms. we have conducted implementation using ethereum client and experiments where performance modeling results show on average 2.2 to 4.6 times reduced total latency of event triggered smart contracts, which demonstrates its effectiveness for supporting contracts that demand timely execution based on events. in addition, we discuss example use cases to demonstrate the design's utility and comment on its potential security dynamics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/183.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/183.txt new file mode 100644 index 0000000000000000000000000000000000000000..1b73254b60f81ffb0c37dfae401a40ad8653fb29 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/183.txt @@ -0,0 +1 @@ + a lease is an important primitive for building distributed protocols, and it is ubiquitously employed in distributed systems. however, the scope of the classic lease abstraction is restricted to the trusted computing infrastructure. unfortunately, this important primitive cannot be employed in the untrusted computing infrastructure because the trusted execution environments (tees) do not provide a trusted time source. in the untrusted environment, an adversary can easily manipulate the system clock to violate the correctness properties of lease-based systems.we tackle this problem by introducing trusted lease-a lease that maintains its correctness properties even in the presence of a clock-manipulating attacker. to achieve these properties, we follow a "trust but verify" approach for an untrusted timer, and transform it into a trusted timing primitive by leveraging two hardware-assisted isa extensions (intel tsx and sgx) available in commodity cpus. we provide a design and implementation of trusted lease in a system called t-leasethe first trusted lease system that achieves high security, performance, and precision. for the application developers, t-lease exposes an easy-to-use generic apis that facilitate its usage to build a wide range of distributed protocols. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/184.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/184.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab0442c47bc5aee60e69b5889475139287b177b6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/184.txt @@ -0,0 +1 @@ + the multi-source data generated by distributed systems, provide a holistic description of the system. harnessing the joint distribution of the different modalities by a learning model can be beneficial for critical applications for maintenance of the distributed systems. one such important task is the task of anomaly detection where we are interested in detecting the deviation of the current behaviour of the system from the theoretically expected. in this work, we utilize the joint representation from the distributed traces and system log data for the task of anomaly detection in distributed systems. we demonstrate that the joint utilization of traces and logs produced better results compared to the single modality anomaly detection methods. furthermore, we formalize a learning task -next template prediction ntp, that is used as a generalization for anomaly detection for both logs and distributed trace. finally, we demonstrate that this formalization allows for the learning of template embedding for both the traces and logs. the joint embeddings can be reused in other applications as good initialization for spans and logs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/185.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/185.txt new file mode 100644 index 0000000000000000000000000000000000000000..87e5fd235ea4e5bfa4d0e8d5e71330a6a0f3c30f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/185.txt @@ -0,0 +1 @@ + we demonstrate a fully functional implementation of (per-user) checkpoint, restore, and live migration capabilities for jupyterhub platforms. checkpointing -the ability to freeze and suspend to disk the running state (contents of memory, registers, open files, etc.) of a set of processes -enables the system to snapshot a user's jupyter session to permanent storage. the restore functionality brings a checkpointed session back to a running state, to continue where it left off at a later time and potentially on a different machine. finally, live migration enables moving running jupyter notebook servers between different machines, transparent to the analysis code and w/o disconnecting the user. our implementation of these capabilities works at the system level, with few limitations, and typical checkpoint/restore times of o(10s) with a pathway to o(1s) live migrations. it opens a myriad of interesting use cases, especially for cloudbased deployments: from checkpointing idle sessions w/o interruption of the user's work (achieving cost reductions of 4x or more), execution on spot instances w. transparent migration on eviction (with additional cost reductions up to 3x), to automated migration of workloads to ideally suited instances (e.g. moving an analysis to a machine with more or less ram or cores based on observed resource utilization). the capabilities we demonstrate can make science platforms fully elastic while retaining excellent user experience. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/186.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/186.txt new file mode 100644 index 0000000000000000000000000000000000000000..55045a45ff3613e3d6f904b697c33adabb2840ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/186.txt @@ -0,0 +1 @@ + uddsketch is a recent algorithm for accurate tracking of quantiles in data streams, derived from the ddsketch algorithm. uddsketch provides accuracy guarantees covering the full range of quantiles independently of the input distribution and greatly improves the accuracy with regard to ddsketch. in this paper we show how to compress and fuse data streams (or datasets) by using uddsketch data summaries that are fused into a new summary related to the union of the streams (or datasets) processed by the input summaries whilst preserving both the error and size guarantees provided by uddsketch. this property of sketches, known as mergeability, enables parallel and distributed processing. we prove that uddsketch is fully mergeable and introduce a parallel version of uddsketch suitable for messagepassing based architectures. we formally prove its correctness and compare it to a parallel version of ddsketch, showing through extensive experimental results that our parallel algorithm almost always outperforms the parallel ddsketch algorithm with regard to the overall accuracy in determining the quantiles. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/187.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/187.txt new file mode 100644 index 0000000000000000000000000000000000000000..28fd65bf362f6333742fb0b067761814352749a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/187.txt @@ -0,0 +1 @@ + the computing continuum extends the high-performance cloud data centers with energy-efficient and low-latency devices close to the data sources located at the edge of the network. however, the heterogeneity of the computing continuum raises multiple challenges related to application management. these include where to offload an application -from the cloud to the edge -to meet its computation and communication requirements. to support these decisions, we provide in this article a detailed performance and carbon footprint analysis of a selection of use case applications with complementary resource requirements across the computing continuum over a real-life evaluation testbed. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/188.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/188.txt new file mode 100644 index 0000000000000000000000000000000000000000..11ee2e74a97a6aacd453e2c18e89cc4c37d3dc35 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/188.txt @@ -0,0 +1 @@ + computability logic (col) is a powerful, mathematically rigorous computational model. in this paper, we show that col-web, a web extension to col, naturally supports web programming where database updates are involved. to be specific, we discuss an implementation of the ai atm based on col (cl9 to be exact).more importantly, we argue that col-web supports a general ai and, therefore, is a good alternative to neural nets and deep learning. we also discuss how to integrate neural nets into col-web. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/189.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/189.txt new file mode 100644 index 0000000000000000000000000000000000000000..e70035f93b409ea6426fb43faac214d846f1d64f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/189.txt @@ -0,0 +1 @@ + a central issue of distributed computing systems is how to optimally allocate computing and storage resources and design data shuffling strategies such that the total execution time for computing and data shuffling is minimized. this is extremely critical when the computation, storage and communication resources are limited. in this paper, we study the resource allocation and coding scheme for the mapreduce-type framework with limited resources. in particular, we focus on the coded distributed computing (cdc) approach proposed by li et al.. we first extend the asymmetric cdc (acdc) scheme proposed by yu et al. to the cascade case where each output function is computed by multiple servers. then we demonstrate that whether cdc or acdc is better depends on system parameters (e.g., number of computing servers) and task parameters (e.g., number of input files), implying that neither cdc nor acdc is optimal. by merging the ideas of cdc and acdc, we propose a hybrid scheme and show that it can strictly outperform cdc and acdc. furthermore, we derive an information-theoretic converse showing that for the mapreduce task using a type of weakly symmetric reduce assignment, which includes the reduce assignments of cdc and acdc as special cases, the hybrid scheme with a corresponding resource allocation strategy is optimal, i.e., achieves the minimum execution time, for an arbitrary amount of computing servers and storage memories. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/19.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/19.txt new file mode 100644 index 0000000000000000000000000000000000000000..507af2a5199096a018faa332d2a397f3b8a41f69 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/19.txt @@ -0,0 +1 @@ + using a single model across various tasks is beneficial for training and applying deep neural sequence models. we address the problem of developing generalist representations of text that can be used to perform a range of different tasks rather than being specialised to a single application. we focus on processing short questions and developing an embedding for these questions that is useful on a diverse set of problems, such as question topic classification, equivalent question recognition, and question answering. this paper introduces qbert, a generalist model for processing questions. with qbert, we demonstrate how we can train a multi-task network that performs all question-related tasks and has achieved similar performance compared to its corresponding single-task models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/190.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/190.txt new file mode 100644 index 0000000000000000000000000000000000000000..0f3e0199342acfcecb18e82bc54bc8c76f88eaf0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/190.txt @@ -0,0 +1 @@ + we present a procedure for efficiently sampling colors in the congest model. it allows nodes whose number of colors exceeds their number of neighbors by a constant fraction to sample up to θ(log n) semi-random colors unused by their neighbors in o(1) rounds, even in the distance-2 setting. this yields algorithms with o(log * ∆) complexity for different edge-coloring, vertex coloring, and distance-2 coloring problems, matching the best possible. in particular, we obtain an o(log * ∆)-round congest algorithm for (1 + ǫ)∆-edge coloring when ∆ ≥ log 1+1/ log * n n, and a poly(log log n)-round algorithm for (2∆-1)-edge coloring in general. the sampling procedure is inspired by a seminal result of newman in communication complexity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/191.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/191.txt new file mode 100644 index 0000000000000000000000000000000000000000..d86569d2e4744a3f7666d249bd3cc1efdf458a9b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/191.txt @@ -0,0 +1 @@ + in this paper, we study gossip algorithms in communication models that describe the peer-to-peer networking functionality included in most standard smartphone operating systems. we begin by describing and analyzing a new synchronous gossip algorithm in this setting that features both a faster round complexity and simpler operation than the bestknown existing solutions. we also prove a new lower bound on the rounds required to solve gossip that resolves a minor open question by establishing that existing synchronous solutions are within logarithmic factors of optimal. we then adapt our synchronous algorithm to produce a novel gossip strategy for an asynchronous model that directly captures the interface of a standard smartphone peer-to-peer networking library (enabling algorithms described in this model to be easily implemented on real phones). using new analysis techniques, we prove that this asynchronous strategy efficiently solves gossip. this is the first known efficient asynchronous information dissemination result for the smartphone peer-to-peer setting. we argue that our new strategy can be used to implement effective information spreading subroutines in real world smartphone peer-to-peer network applications, and that the analytical tools we developed to analyze it can be leveraged to produce other broadly useful algorithmic strategies for this increasingly important setting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/192.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/192.txt new file mode 100644 index 0000000000000000000000000000000000000000..55fbbb688b26fe0ee75a821952945423891e8052 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/192.txt @@ -0,0 +1 @@ + static code analysis tools are designed to aid software developers to build better quality software in less time, by detecting defects early in the software development life cycle. even the most experienced developer regularly introduces coding defects. identifying, mitigating and resolving defects is an essential part of the software development process, but frequently defects can go undetected. one defect can lead to a minor malfunction or cause serious security and safety issues. this is magnified in the development of the complex parallel software required to exploit modern heterogeneous multicore hardware. thus, there is an urgent need for new static code analysis tools to help in building better concurrent and parallel software. the paper reports preliminary results about the use of appentra's parallelware technology to address this problem from the following three perspectives: finding concurrency issues in the code, discovering new opportunities for parallelization in the code, and generating parallel-equivalent codes that enable tasks to run faster. the paper also presents experimental results using well-known scientific codes and power systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/193.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/193.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f5cdd192cc169b159482b6b702fb315e812b99b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/193.txt @@ -0,0 +1 @@ + our social interactions mainly depend on the social phenomenon called trust. we evaluate our trust in our peer to decide whether to start an interaction or not. when our information about the peer is not sufficient, we use the knowledge of others. this knowledge can also be referred to as the reputation of the peer in the community. like real-life communities, trust and reputation play a key role in virtual communities, too. these two notions help us overcome the complex interactions between agents in virtual communities. in previous studies regarding this topic, the social aspect of trust and reputation is partly ignored. in this paper, we will review an article which we accept as a starting point and compare it with another article that provides a more advanced model. additionally, a new trust model which is mainly based on sociological notions will also be introduced. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/194.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/194.txt new file mode 100644 index 0000000000000000000000000000000000000000..e21eb32b3a542206d224d08a5b0854de7473fcc1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/194.txt @@ -0,0 +1 @@ + distributed systems become more and more important to our life. especially in areas like smart home and the internet of things (iot) reliable low-power sensor networks become increasingly important. for ensuring this there are a lot of trust metrics. in this paper we compare a model of a distributed low-power sensor network including one root node and the corresponding simple trust metric to the requirements from "representation of trust and reputation in self-managed computing systems" , the weighted trust metric and the weighted simple exponential smoothing trust metric. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/195.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/195.txt new file mode 100644 index 0000000000000000000000000000000000000000..342ccee8442cd1a22f8f145388628bc9e51d22b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/195.txt @@ -0,0 +1 @@ + in graph theory, an independent set is a subset of nodes where there are no two adjacent nodes. the independent set is maximal if no node outside the independent set can join it. in network applications, maximal independent sets can be used as cluster heads in ad hoc and wireless sensor networks. in order to deal with any failure in networks, self-stabilizing algorithms have been proposed in the literature to calculate the maximal independent set under different hypothesis.in this paper, we propose a selfstabilizing algorithm to compute a maximal independent set where nodes of the independent set are far from each other at least with distance 3. we prove the correctness and the convergence of the proposed algorithm. simulation tests show the ability of our algorithm to find a reduced number of nodes in large scale networks which allows strong control of networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/196.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/196.txt new file mode 100644 index 0000000000000000000000000000000000000000..fd55fb94f3377c07ce369a591927228ea0110bf8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/196.txt @@ -0,0 +1 @@ + we provide a utxo model of blockchain transactions that is able to represent both credit and debt on the same blockchain. ordinarily, the utxo model is solely used to represent credit and the representation of credit and debit together is achieved using the account model because of its support for balances. however, the utxo model provides superior privacy, safety, and scalability when compared to the account model. in this work, we introduce a utxo model that has the flexibility of balances with the usual benefits of the utxo model. this model extends the conventional utxo model, which represents credits as unmatched outputs, by representing debts as unmatched inputs. we apply our model to solving the problem of transparency in reverse mortgage markets, in which some transparency is necessary for a healthy market but complete transparency leads to adverse outcomes. here the pseudonymous properties of the utxo model protect the privacy of loan recipients while still allowing an aggregate view of the loan market. we present a prototype of our implementation in tendermint and discuss the design and its benefits. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/197.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/197.txt new file mode 100644 index 0000000000000000000000000000000000000000..11f4600a9b50d28b2be802e15fd4da4c2831a6fd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/197.txt @@ -0,0 +1 @@ + in spite that federated learning (fl) is well known for its privacy protection when training machine learning models among distributed clients collaboratively, recent studies have pointed out that the naive fl is susceptible to gradient leakage attacks. in the meanwhile, differential privacy (dp) emerges as a promising countermeasure to defend against gradient leakage attacks. however, the adoption of dp by clients in fl may significantly jeopardize the model accuracy. it is still an open problem to understand the practicality of dp from a theoretic perspective. in this paper, we make the first attempt to understand the practicality of dp in fl through tuning the number of conducted iterations. based on the fedavg algorithm, we formally derive the convergence rate with dp noises in fl. then, we theoretically derive: 1) the conditions for the dp based fedavg to converge as the number of global iterations (gi) approaches infinity; 2) the method to set the number of local iterations (li) to minimize the negative influence of dp noises. by further substituting the laplace and gaussian mechanisms into the derived convergence rate respectively, we show that: 3) the dp based fedavg with the laplace mechanism cannot converge, but the divergence rate can be effectively prohibited by setting the number of lis with our method; 4) the learning error of the dp based fedavg with the gaussian mechanism can converge to a constant number finally if we use a fixed number of lis per gi. to verify our theoretical findings, we conduct extensive experiments using two real-world datasets. the results not only validate our analysis results, but also provide useful guidelines on how to optimize model accuracy when incorporating dp into fl. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/198.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/198.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbc9c7d329b23bf0e43fe3c157a60264146d7d8a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/198.txt @@ -0,0 +1 @@ + we present results from parallelizing the unpacking and clustering steps of the raw data from the silicon strip modules for reconstruction of charged particle tracks. throughput is further improved by concurrently processing multiple events using nested openmp parallelism on cpu or cuda streams on gpu. the new implementation along with earlier work in developing a parallelized and vectorized implementation of the combinatoric kalman filter algorithm has enabled efficient global reconstruction of the entire event on modern computer architectures. we demonstrate the performance of the new implementation on intel xeon and nvidia gpu architectures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/199.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/199.txt new file mode 100644 index 0000000000000000000000000000000000000000..bfd691df34ca1e72808fe1cdde5e78c92b1ffbac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/199.txt @@ -0,0 +1 @@ + application partitioning and code offloading are being researched extensively during the past few years. several frameworks for code offloading have been proposed. however, fewer works attempted to address issues occurred with its implementation in pervasive environments such as frequent network disconnection due to high mobility of users. thus, in this paper, we proposed a fault tolerant algorithm that helps in consolidating the efficiency and robustness of application partitioning and offloading frameworks. to permit the usage of different fault tolerant policies such as replication and checkpointing, the devices are grouped into high and low reliability clusters. experimental results shown that the fault tolerant algorithm can easily adapt to different execution conditions while incurring minimum overhead. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/2.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/2.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae3e599db300e869a51fc0db60eb66b0055994b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/2.txt @@ -0,0 +1 @@ + autonomous agents that act with each other on behalf of humans are becoming more common in many social domains, such as customer service, transportation, and health care. in such social situations greedy strategies can reduce the positive outcome for all agents, such as leading to stop-and-go traffic on highways, or causing a denial of service on a communications channel. instead, we desire autonomous decision-making for efficient performance while also considering equitability of the group to avoid these pitfalls. unfortunately, in complex situations it is far easier to design machine learning objectives for selfish strategies than for equitable behaviors. here we present a simple way to reward groups of agents in both evolution and reinforcement learning domains by the performance of their weakest member. we show how this yields "fairer" more equitable behavior, while also maximizing individual outcomes, and we show the relationship to biological selection mechanisms of group-level selection and inclusive fitness theory. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/20.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/20.txt new file mode 100644 index 0000000000000000000000000000000000000000..f13656c151508ab6e13f88b499acb5eac6ea3305 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/20.txt @@ -0,0 +1 @@ + even with impressive advances in application specific models, we still lack knowledge about how to build a model that can learn in a human-like way and do multiple tasks. to learn in a human-like way, we need to provide a diverse experience that is comparable to human's. in this paper, we introduce our ongoing effort to build a simulated environment for developmental robotics (sedro). sedro provides diverse human experiences ranging from those of a fetus to a 12th month old. a series of simulated tests based on developmental psychology will be used to evaluate the progress of a learning model. we anticipate sedro to lower the cost of entry and facilitate research in the developmental robotics community. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/200.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/200.txt new file mode 100644 index 0000000000000000000000000000000000000000..362b1c93a99ab353d4aeab75705c1b45d164ef8c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/200.txt @@ -0,0 +1 @@ + classical computers require large memory resources and computational power to simulate quantum circuits with a large number of qubits. even supercomputers that can store huge amounts of data face a scalability issue in regard to parallel quantum computing simulations because of the latency of data movements between distributed memory spaces. here, we apply a cache blocking technique by inserting swap gates in quantum circuits to decrease data movements. we implemented this technique in the open source simulation framework qiskit aer. we evaluated our simulator on gpu clusters and observed good scalability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/201.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/201.txt new file mode 100644 index 0000000000000000000000000000000000000000..d56c0348d1338ff84f96d84743864725eceea2be --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/201.txt @@ -0,0 +1 @@ + a proof labelling scheme for a graph class c is an assignment of certificates to the vertices of any graph in the class c, such that upon reading its certificate and the certificates of its neighbors, every vertex from a graph g ∈ c accepts the instance, while if g ∈ c, for every possible assignment of certificates, at least one vertex rejects the instance. it was proved recently that for any fixed surface σ, the class of graphs embeddable in σ has a proof labelling scheme in which each vertex of an n-vertex graph receives a certificate of at most o(log n) bits. the proof is quite long and intricate and heavily relies on an earlier result for planar graphs. here we give a very short proof for any surface. the main idea is to encode a rotation system locally, together with a spanning tree supporting the local computation of the genus via euler's formula. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/202.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/202.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6adf90efce6d152aac0fcc3d5e8e8745b3033c9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/202.txt @@ -0,0 +1 @@ + each day the world inches closer to a climate catastrophe and a sustainability revolution. to avoid the former and achieve the latter we must transform our use of energy. surprisingly, today's growing problem is that there is too much wind and solar power generation at the wrong times and in the wrong places.we argue for the construction of terrawatt: a geographically-distributed, large-scale, zero-carbon compute infrastructure using renewable energy and older hardware. delivering zero-carbon compute for general cloud workloads is challenging due to spatiotemporal power variability. we describe the systems challenges in using intermittent renewable power at scale to fuel such an older, decentralized compute infrastructure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/203.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/203.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e2d054d34b47c0096aeba61a191088e56a2721c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/203.txt @@ -0,0 +1 @@ + this paper considers the problem of byzantine dispersion and extends previous work along several parameters. the problem of byzantine dispersion asks: given n robots, up to f of which are byzantine, initially placed arbitrarily on an n node anonymous graph, design a terminating algorithm to be run by the robots such that they eventually reach a configuration where each node has at most one non-byzantine robot on it. previous work solved this problem for rings and tolerated up to n -1 byzantine robots. in this paper, we investigate the problem on more general graphs. we first develop an algorithm that tolerates up to n -1 byzantine robots and works for a more general class of graphs. we then develop an algorithm that works for any graph but tolerates a lesser number of byzantine robots. we subsequently turn our focus to the strength of the byzantine robots. previous work considers only "weak" byzantine robots that cannot fake their ids. we develop an algorithm that solves the problem when byzantine robots are not weak and can fake ids. finally, we study the situation where the number of the robots is not n but some k. we show that in such a scenario, the number of byzantine robots that can be tolerated is severely restricted. specifically, we show that it is impossible to deterministically solve byzantine dispersion when ⌈k/n⌉ > ⌈(k -f )/n⌉. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/204.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/204.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0f2474095788d62802b950a21a45aced77ceaed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/204.txt @@ -0,0 +1 @@ + data privacy and protection is a crucial issue for any automatic speech recognition (asr) service provider when dealing with clients. in this paper, we investigate federated acoustic modeling using data from multiple clients. a client's data is stored on a local data server and the clients communicate only model parameters with a central server, and not their data. the communication happens infrequently to reduce the communication cost. to mitigate the non-iid issue, client adaptive federated training (caft) is proposed to canonicalize data across clients. the experiments are carried out on 1,150 hours of speech data from multiple domains. hybrid lstm acoustic models are trained via federated learning and their performance is compared to traditional centralized acoustic model training. the experimental results demonstrate the effectiveness of the proposed federated acoustic modeling strategy. we also show that caft can further improve the performance of the federated acoustic model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/205.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/205.txt new file mode 100644 index 0000000000000000000000000000000000000000..800c5a20d257d5ce298145ae044e331abe6288b7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/205.txt @@ -0,0 +1 @@ + this paper provides three nearly-optimal algorithms for scheduling t jobs in the clique model. first, we present a deterministic scheduling algorithm that runs in o(globalcongestion + dilation) rounds for jobs that are sufficiently efficient in terms of their memory. the dilation is the maximum round complexity of any of the given jobs, and the globalcongestion is the total number of messages in all jobs divided by the per-round bandwidth of n 2 of the clique model. both are inherent lower bounds for any scheduling algorithm.then, we present a randomized scheduling algorithm which runs t jobs in o(globalcongestion+ dilation • log n + t) rounds and only requires that inputs and outputs do not exceed o(n log n) bits per node, which is met by, e.g., almost all graph problems. lastly, we adjust the randomdelay-based scheduling algorithm from the congest model and obtain an algorithm that schedules any t jobs in o(t/n + localcongestion + dilation • log n) rounds, where the localcongestion relates to the congestion at a single node of the clique. we compare this algorithm to the previous approaches and show their benefit.we schedule the set of jobs on-the-fly, without a priori knowledge of its parameters or the communication patterns of the jobs. in light of the inherent lower bounds, all of our algorithms are nearly-optimal.we exemplify the power of our algorithms by analyzing the message complexity of the stateof-the-art mis protocol , and we show that we can solve t instances of mis in o(t + log log ∆ log n) rounds, that is, in o(1) amortized time, for t ≥ log log ∆ log n. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/206.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/206.txt new file mode 100644 index 0000000000000000000000000000000000000000..066bd2ca7e5bca031e2ee2bce9764bfa6e2d2715 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/206.txt @@ -0,0 +1 @@ + we study the problem of distributed cooperative learning, where a group of agents seeks to agree on a set of hypotheses that best describes a sequence of private observations. in the scenario where the set of hypotheses is large, we propose a belief update rule where agents share compressed (either sparse or quantized) beliefs with an arbitrary positive compression rate. our algorithm leverages a unified communication rule that enables agents to access wide-ranging compression operators as black-box modules. we prove the almost sure asymptotic exponential convergence of beliefs around the set of optimal hypotheses. additionally, we show a non-asymptotic, explicit, and linear concentration rate in probability of the beliefs on the optimal hypothesis set. we provide numerical experiments to illustrate the communication benefits of our method. the simulation results show that the number of transmitted bits can be reduced to 5 -10% of the non-compressed method in the studied scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/207.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/207.txt new file mode 100644 index 0000000000000000000000000000000000000000..f457e8378801a555291710370fea40f3c34883b8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/207.txt @@ -0,0 +1 @@ + embedded systems have been used to control physical environments for decades. usually, such use cases require low latencies between commands and actions as well as a high predictability of the expected worst-case delay. to achieve this on small, low-powered microcontrollers, real-time operating systems (rtoss) are used to manage the different tasks on these machines as deterministically as possible. however, with the advent of the internet of things (iot) in industrial applications, the same embedded systems are now equipped with networking capabilities, possibly endangering critical real-time systems through an open gate to interrupts.this paper presents our initial study of the impact network connections can have on real-time embedded systems. specifically, we look at three aspects: the impact of network-generated interrupts, the overhead of the related networking tasks, and the feasibility of sharing computing resources between networking and real-time tasks. we conducted experiments on two setups: one treating nics and drivers as black boxes and one simulating network interrupts on the machines. the preliminary results show that a critical task performance loss of up to 6.67% per received packet per second could be induced where lateness impacts of 1% per packet per second can be attributed exclusively to isr-generated delays. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/208.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/208.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a823bb5f1b9001aa12fcd24c7c4fcf86077c1ba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/208.txt @@ -0,0 +1 @@ + sharding, i.e. splitting the miners or validators to form and run several subchains in parallel, is known as one of the main solutions to the scalability problems of blockchains. the drawback is that as the number of miners expanding each subchain becomes small, it becomes vulnerable to security attacks. to solve this problem, a framework, named as ployshard, has been proposed in which each validator verifies a coded combination of the blocks introduced by different subchains, thus helping to protect the security of all subchains. in this paper, we introduce an attack on ployshard, called the discrepancy attack, which is the result of malicious nodes controlling a few subchains and dispersing different blocks to different nodes. we show that this attack undermines the security of polyshard and is undetectable in its current setting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/209.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/209.txt new file mode 100644 index 0000000000000000000000000000000000000000..1fb5f96615f95287ec6de669c0a84536d00eea70 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/209.txt @@ -0,0 +1 @@ + blockchain-based distributed ledgers (dls) promise to transform the existing financial system by making it truly democratic. in the past decade, blockchain technology has seen many novel applications ranging from the banking industry to real estate. however, in order to be adopted universally, blockchain systems must be scalable to support a high volume of transactions. as we increase the throughput of the dl system, the underlying peer-to-peer network might face multiple levels of challenges to keep up with the requirements. due to varying network capacities, the slower nodes would be at a relative disadvantage compared to the faster ones, which could negatively impact their revenue. in order to quantify their relative advantage or disadvantage, we introduce two measures of network fairness, , the probability of frontrunning and , the publishing fairness. we show that as we scale the blockchain, both these measures deteriorate, implying that the slower nodes face a disadvantage at higher throughputs. it results in the faster nodes getting more than their fair share of the reward while the slower nodes (slow in terms of network quality) get less. thus, fairness and scalability in blockchain systems do not go hand in hand.in a setting with rational miners, lack of fairness causes miners to deviate from the "longest chain rule" or undercut, which would reduce the blockchain's resilience against byzantine adversaries. hence, fairness is not only a desirable property for a blockchain system but also essential for the security of the blockchain and any scalable blockchain protocol proposed must ensure fairness. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/21.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/21.txt new file mode 100644 index 0000000000000000000000000000000000000000..b3320f502e10aaed7e833d4b5f65d2616e775576 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/21.txt @@ -0,0 +1 @@ + perceptual speech quality is an important performance metric for teleconferencing applications. the mean opinion score (mos) is standardized for the perceptual evaluation of speech quality and is obtained by asking listeners to rate the quality of a speech sample. recently, there has been increasing research interest in developing models for estimating mos blindly. here we propose a multitask framework to include additional labels and data in training to improve the performance of a blind mos estimation model. experimental results indicate that the proposed model can be trained to jointly estimate mos, reverberation time (t60), and clarity (c50) by combining two disjoint data sets in training, one containing only mos labels and the other containing only t60 and c50 labels. furthermore, we use a semi-supervised framework to combine two mos data sets in training, one containing only mos labels (per itu-t recommendation p.808), and the other containing separate scores for speech signal, background noise, and overall quality (per itu-t recommendation p.835). finally, we present preliminary results for addressing individual rater bias in the mos labels. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/210.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/210.txt new file mode 100644 index 0000000000000000000000000000000000000000..d31e829deaa7f2d4bbc9967b7d50f982daa9711d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/210.txt @@ -0,0 +1 @@ + the enormous power consumption of bitcoin has led to undifferentiated discussions in science and practice about the sustainability of blockchain and distributed ledger technology in general. however, blockchain technology is far from homogeneous -not only with regard to its applications, which now go far beyond cryptocurrencies and have reached businesses and the public sector, but also with regard to its technical characteristics and, in particular, its power consumption. this paper summarizes the status quo of the power consumption of various implementations of blockchain technology, with special emphasis on the recent "bitcoin halving" and so-called "zk-rollups". we argue that although bitcoin and other proof-of-work blockchains do indeed consume a lot of power, alternative blockchain solutions with significantly lower power consumption are already available today, and new promising concepts are being tested \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/211.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/211.txt new file mode 100644 index 0000000000000000000000000000000000000000..44c944f3409f4b78d8c38e88d0cc77a6b904d78e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/211.txt @@ -0,0 +1 @@ + smart farming has brought a major transformation in the agriculture process by using the internet of things (iot) devices, emerging technologies such as cloud computing, fog computing, and data analytics. it allows farmers to have real-time awareness of the farm and help them make smart and informed decisions. in this paper, we propose a distributed data flow (ddf) based model for the smart farming application that is composed of interdependent modules. we evaluate the proposed application model using two deployment strategies: cloud-based, and fog-based where the application modules are deployed on the fog and the cloud data center respectively. we compare the cloud-based and fog-based strategy in terms of end-to-end latency and network usage. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/212.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/212.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2f933382bfc7b292732558dba8e4af765f62326 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/212.txt @@ -0,0 +1 @@ + computer experiments with both qualitative and quantitative factors are widely used in many applications. motivated by the emerging need of optimal configuration in the high-performance computing (hpc) system, this work proposes a sequential design, denoted as adaptive composite exploitation and exploration (cee), for optimization of computer experiments with qualitative and quantitative factors. the proposed adaptive cee method combines the predictive mean and standard deviation based on the additive gaussian process to achieve a meaningful balance between exploitation and exploration for optimization. moreover, the adaptiveness of the proposed sequential procedure allows the selection of next design point from the adaptive design region. theoretical justification of the adaptive design region is provided. the performance of the proposed method is evaluated by several numerical examples in simulations. the case study of hpc performance optimization further elaborates the merits of the proposed method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/213.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/213.txt new file mode 100644 index 0000000000000000000000000000000000000000..caf242c528faca2648a67623ad6e1881d07e4a9e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/213.txt @@ -0,0 +1 @@ + internet of things (iot) applications promise to make many aspects of our lives more efficient and adaptive through the use of distributed sensing and computing nodes. a central aspect of such applications is their complex communication behavior that is heavily influenced by the physical environment of the system. to continuously improve iot applications, a staging environment is needed that can provide operating conditions representative of deployments in the actual production environments -similar to what is common practice in cloud application development today. towards such a staging environment, we present marvis, a framework that orchestrates hybrid testbeds, co-simulated domain environments, and a central network simulation for testing distributed iot applications. our preliminary results include an open source prototype and a demonstration of a vehicle-to-everything (v2x) communication scenario. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/214.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/214.txt new file mode 100644 index 0000000000000000000000000000000000000000..1e88825f8e7345b69c246f707f6ad513d1b5dedd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/214.txt @@ -0,0 +1 @@ + federated learning (fl) is a promising approach to distributed compute, as well as distributed data, and provides a level of privacy and compliance to legal frameworks. this makes fl attractive for both consumer and healthcare applications. while the area is actively being explored, few studies have examined fl in the context of larger language models and there is a lack of comprehensive reviews of robustness across tasks, architectures, numbers of clients, and other relevant factors. in this paper, we explore the fine-tuning of transformer-based language models in a federated learning setting. we evaluate three popular bert-variants of different sizes (bert, albert, and distilbert) on a number of text classification tasks such as sentiment analysis and author identification. we perform an extensive sweep over the number of clients, ranging up to 32, to evaluate the impact of distributed compute on task performance in the federated averaging setting. while our findings suggest that the large sizes of the evaluated models are not generally prohibitive to federated training, we found that the different models handle federated averaging to a varying degree. most notably, distilbert converges significantly slower with larger numbers of clients, and under some circumstances, even collapses to chance level performance. investigating this issue presents an interesting perspective for future research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/215.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/215.txt new file mode 100644 index 0000000000000000000000000000000000000000..6db74ec803f3db1d524f371fb81daf5bae8026d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/215.txt @@ -0,0 +1 @@ + we review the literature about reaching agreement in quantum networks, also called quantum consensus. after a brief introduction to the key feature of quantum computing, allowing the reader with no quantum theory background to have minimal tools to understand, we report a formal definition of quantum consensus and the protocols proposed. proposals are classified according to the quantum feature used to achieve agreement. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/216.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/216.txt new file mode 100644 index 0000000000000000000000000000000000000000..4700d79218eb77fc2ba827689e9432d2ac5467c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/216.txt @@ -0,0 +1 @@ + as data privacy is gradually valued by people, federated learning(fl) has emerged because of its potential to protect data. fl uses homomorphic encryption and differential privacy encryption on the promise of ensuring data security to realize distributed machine learning by exchanging encrypted information between different data providers. however, there are still many problems in fl, such as the communication efficiency between the client and the server and the data is non-iid. in order to solve the two problems mentioned above, we propose a novel vertical federated learning framework based on the dfp and the bfgs(denoted as bdfl), then apply it to logistic regression. finally, we perform experiments using real datasets to test efficiency of bdfl framework. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/217.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/217.txt new file mode 100644 index 0000000000000000000000000000000000000000..f574ce0e96595def04e92597e9af78f94a6b5872 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/217.txt @@ -0,0 +1 @@ + the emergence of the internet of things has seen the introduction of numerous connected devices used for the monitoring and control of even critical infrastructures. distributed stream processing has become key to analyzing data generated by these connected devices and improving our ability to make decisions. however, optimizing these systems towards specific quality of service targets is a difficult and time-consuming task, due to the large-scale distributed systems involved, the existence of so many configuration parameters, and the inability to easily determine the impact of tuning these parameters.in this paper we present an approach for the effective testing of system configurations for critical iot analytics pipelines. we demonstrate our approach with a prototype that we called timon which is integrated with kubernetes. this tool allows pipelines to be easily replicated in parallel and evaluated to determine the optimal configuration for specific applications. we demonstrate the usefulness of our approach by investigating different configurations of an exemplary geographically-based traffic monitoring application implemented in apache flink. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/218.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/218.txt new file mode 100644 index 0000000000000000000000000000000000000000..7a25ac4257d4865e6fc077e3ea0583c45ce30010 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/218.txt @@ -0,0 +1 @@ + to be able to run tasks asynchronously on nvidia gpus a programmer must explicitly implement asynchronous execution in their code using the syntax of cuda streams. streams allow a programmer to launch independent concurrent execution tasks, providing the ability to utilise different functional units on the gpu asynchronously. for example, it is possible to transfer the results from a previous computation performed on input data n-1, over the pcie bus whilst computing the result for input data n, by placing different tasks in different cuda streams. the benefit of such an approach is that the time taken for the data transfer between the host (server hosting the gpu) and device (the gpu) can be hidden with computation. this case study deals with the implementation of cuda streams into astroaccelerate. astroaccelerate is a gpu accelerated real-time signal processing pipeline for timedomain radio astronomy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/219.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/219.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0d0527b91ad5bf79f7d141f00c25178bfbb0384 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/219.txt @@ -0,0 +1 @@ + the optimization of submodular functions constitutes a viable way to perform clustering. strong approximation guarantees and feasible optimization w.r.t. streaming data make this clustering approach favorable. technically, submodular functions map subsets of data to real values, which indicate how "representative" a specific subset is. optimal sets might then be used to partition the data space and to infer clusters. exemplarbased clustering is one of the possible submodular functions, but suffers from high computational complexity. however, for practical applications, the particular real-time or wall-clock runtime is decisive. in this work, we present a novel way to evaluate this particular function on gpus, which keeps the necessities of optimizers in mind and reduces wall-clock run-time. to discuss our gpu algorithm, we investigated both the impact of different run-time critical problem properties, like data dimensionality and the number of data points in a subset, and the influence of required floating-point precision. in reproducible experiments, our gpu algorithm was able to achieve competitive speedups of up to 72x depending on whether multi-threaded computation on cpus was used for comparison and the type of floatingpoint precision required. half-precision gpu computation led to large speedups of up to 452x compared to single-precision, single-thread cpu computations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/22.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/22.txt new file mode 100644 index 0000000000000000000000000000000000000000..34d66a8c0977254197746e9393d222367437d5d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/22.txt @@ -0,0 +1 @@ + probabilistic law discovery (pld) is a logic based machine learning method, which implements a variant of probabilistic rule learning. in several aspects, pld is close to decision tree/random forest methods, but it differs significantly in how relevant rules are defined. the learning procedure of pld solves the optimization problem related to the search for rules (called probabilistic laws), which have a minimal length and relatively high probability. at inference, ensembles of these rules are used for prediction. probabilistic laws are human-readable and pld based models are transparent and inherently interpretable. applications of pld include classification/clusterization/regression tasks, as well as time series analysis/anomaly detection and adaptive (robotic) control. in this paper, we outline the main principles of pld, highlight its benefits and limitations and provide some application guidelines. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/220.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/220.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d9d95c4c6da142beb7d32d16334d72f94155baf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/220.txt @@ -0,0 +1 @@ + multicore cpu architectures have been established as a structure for general-purpose systems for high-performance processing of applications. recent multicore cpu has evolved as a system architecture based on non-uniform memory architecture. for the technique of using the kernel space that shifts the tasks to the ideal memory node, the characteristics of the applications of the user-space cannot be considered. therefore, kernel level approaches cannot execute memory scheduling to recognize the importance of user applications. moreover, users need to run applications after sufficiently understanding the multicore cpu based on non-uniform memory architecture to ensure the high performance of the user's applications. this paper presents a user-space memory scheduler that allocates the ideal memory node for tasks by monitoring the characteristics of non-uniform memory architecture. from our experiment, the proposed system improved the performance of the application by up to 25% compared to the existing system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/221.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/221.txt new file mode 100644 index 0000000000000000000000000000000000000000..5f56ac6026a280edd32a2e0a2248f6de4d44dab0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/221.txt @@ -0,0 +1 @@ + there has been recently a lot of progress in designing efficient partially synchronous bft consensus protocols that are meant to serve as core consensus engines for proof of stake blockchain systems. while the state-of-the-art solutions attain virtually optimal performance under this theoretical model, there is still room for improvement, as several practical aspects of such systems are not captured by this model. most notably, during regular execution, due to financial incentives in such systems, one expects an overwhelming fraction of nodes to honestly follow the protocol rules and only few of them to be faulty, most likely due to temporary network issues. intuitively, the fact that almost all nodes behave honestly should result in stronger confidence in blocks finalized in such periods, however it is not the case under the classical model, where finality is binary.we propose highway, a new consensus protocol that is safe and live in the classical partially synchronous bft model, while at the same time offering practical improvements over existing solutions. specifically, block finality in highway is not binary but is expressed by fraction of nodes that would need to break the protocol rules in order for a block to be reverted. during periods of honest participation finality of blocks might reach well beyond 1∕3 (as what would be the maximum for classical protocols), up to even 1 (complete certainty). having finality defined this way, highway offers flexibility with respect to the configuration of security thresholds among nodes running the protocol, allowing nodes with lower thresholds to reach finality faster than the ones requiring higher levels of confidence. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/222.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/222.txt new file mode 100644 index 0000000000000000000000000000000000000000..af1f665f5affd2ec19421ae0f10d8a9c6412d5cc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/222.txt @@ -0,0 +1 @@ + the hashgraph consensus algorithm is an algorithm for asynchronous byzantine fault tolerance intended for distributed shared ledgers. its main distinguishing characteristic is it achieves consensus without exchanging any extra messages; each participant's votes can be determined from public information, so votes need not be transmitted.in this paper, we discuss our experience formalizing the hashgraph algorithm and its correctness proof using the coq proof assistant. the paper is self-contained; it includes a complete discussion of the algorithm and its correctness argument in english. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/223.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/223.txt new file mode 100644 index 0000000000000000000000000000000000000000..cb521e07d637fd5e8bd2a6a1963ce61f92ddb3c5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/223.txt @@ -0,0 +1 @@ + we present a snn simulator which scales to millions of neurons, billions of synapses, and 8 gpus. this is made possible by 1) a novel, cache-aware spike transmission algorithm 2) a model parallel multi-gpu distribution scheme and 3) a static, yet very effective load balancing strategy. the simulator further features an easy to use api and the ability to create custom models. we compare the proposed simulator against two state of the art ones on a series of benchmarks using three wellestablished models. we find that our simulator is faster, consumes less memory, and scales linearly with the number of gpus. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/224.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/224.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a5765dc4698466db6345c7c2b8faf330d214982 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/224.txt @@ -0,0 +1 @@ + measurement-based analysis of software timing behavior provides important insight and evidence for flight certification of modern avionics systems. for multicore systems, however, this analysis is challenging due to interference effects from shared hardware resource usage. we present an approach to multicore timing analysis that uses interference generators to stress interference channels in multicore systems. the test methodology comprises two steps. first, platform characterization measures the sensitivity of the hardware and rtos to targeted interference channels using a combination of interference generators. second, software characterization measures the timing behavior of instrumented applications while interference is generated on shared resources. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/225.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/225.txt new file mode 100644 index 0000000000000000000000000000000000000000..21724269a066c08680c765686610c085dceef831 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/225.txt @@ -0,0 +1 @@ + distributed data processing systems like mapreduce, spark, and flink are popular tools for analysis of large datasets with cluster resources. yet, users often overprovision resources for their data processing jobs, while the resource usage of these jobs also typically fluctuates considerably. therefore, multiple jobs usually get scheduled onto the same shared resources to increase the resource utilization and throughput of clusters. however, job runtimes and the utilization of shared resources can vary significantly depending on the specific combinations of co-located jobs. this paper presents hugo, a cluster scheduler that continuously learns how efficiently jobs share resources, considering metrics for the resource utilization and interference among co-located jobs. the scheduler combines offline grouping of jobs with online reinforcement learning to provide a scheduling mechanism that efficiently generalizes from specific monitored job combinations yet also adapts to changes in workloads. our evaluation of a prototype shows that the approach can reduce the runtimes of exemplary spark jobs on a yarn cluster by up to 12.5%, while resource utilization is increased and waiting times can be bounded. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/226.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/226.txt new file mode 100644 index 0000000000000000000000000000000000000000..11e09e0cbcc7c76e0074fd55d3cc94e056380da1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/226.txt @@ -0,0 +1 @@ + real-time systems increasingly use multicore processors in order to satisfy thermal, power, and computational requirements. to exploit the architectural parallelism offered by the multicore processors, parallel task models, scheduling algorithms and response-time analyses with respect to real-time constraints have to be provided. in this paper, we propose a reservation-based scheduling algorithm for sporadic constrained-deadline parallel conditional dag tasks with probabilistic execution behaviour for applications that can tolerate bounded number of deadline misses and bounded tardiness. we devise design rules and analyses to guarantee bounded tardiness for a specified bounded probability for k-consecutive deadline misses without enforcing late jobs to be immediately aborted. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/227.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/227.txt new file mode 100644 index 0000000000000000000000000000000000000000..7915d73ef8831cfe18d8f9e831930f26a39c0ea6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/227.txt @@ -0,0 +1 @@ + the problem of bayesian filtering and smoothing in nonlinear models with additive noise is an active area of research. classical taylor series as well as more recent sigma-point based methods are two well-known strategies to deal with these problems. however, these methods are inherently sequential and do not in their standard formulation allow for parallelization in the time domain. in this paper, we present a set of parallel formulas that replace the existing sequential ones in order to achieve lower time (span) complexity. our experimental results done with a graphics processing unit (gpu) illustrate the efficiency of the proposed methods over their sequential counterparts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/228.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/228.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef1579a0478e23f92fca31715a2260c88d73a64d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/228.txt @@ -0,0 +1 @@ + as simulating complex biological processes become more important for modern medicine, new ways to compute this increasingly challenging data are necessary. in this paper, one of the most extensive volunteer-based distributed computing systems, called folding@home, is analyzed, and a trust-based approach is developed based upon it. afterward, all advantages and disadvantages are presented. this approach uses trusted communities that are a subset of all available clients where they trust each other. using such tcs, the system becomes more organic and responds better to malicious or malfunctioning clients. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/229.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/229.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c8e87c0524c18612748b3a6bf273ccbdbca38fe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/229.txt @@ -0,0 +1 @@ + peer-review is a necessary and essential quality control step for scientific publications but lacks proper incentives. indeed, the process, which is very costly in terms of time and intellectual investment, not only is not remunerated by the journals but is also not openly recognized by the academic community as a relevant scientific output for a researcher. therefore, scientific dissemination is affected in timeliness, quality and fairness. here, to solve this issue, we propose a blockchain-based incentive system that rewards scientists for peer-reviewing other scientists' work and that builds up trust and reputation. we designed a privacy-oriented protocol of smart contracts called ants-review that allows authors to issue a bounty for open anonymous peer-reviews on ethereum. if requirements are met, peer reviews will be accepted and paid by the approver proportionally to their assessed quality. to promote ethical behaviour and inclusiveness the system implements a gamified mechanism that allows the whole community to evaluate the peer-reviews and vote for the best ones. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/23.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/23.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a80ca2cde15d44ab3c2400430cc14b0b0ebe74a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/23.txt @@ -0,0 +1 @@ + practically all of the planning research is limited to states represented in terms of boolean and numeric state variables. many practical problems, for example, planning inside complex software systems, require far more complex data types, and even real-world planning in many cases requires concepts such as sets of objects, which are not convenient to express in modeling languages with scalar types only. in this work, we investigate a modeling language for complex software systems, which supports complex data types such as sets, arrays, records, and unions. we give a reduction of a broad range of complex data types and their operations to boolean logic, and then map this representation further to pddl to be used with domain-independent pddl planners. we evaluate the practicality of this approach, and provide solutions to some of the issues that arise in the pddl translation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/230.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/230.txt new file mode 100644 index 0000000000000000000000000000000000000000..6aed7305ef6f1674a32114f80249ef90039485c7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/230.txt @@ -0,0 +1 @@ + this paper approaches the integrated lot sizing and scheduling problem (ilssp), in which non-identical machines work in parallel with non-triangular sequence-dependent setup costs and times, setup carryover and capacity limitation. the aim of the studied ilssp, here called ilssp-nt on parallel machines, is to determine a production planning and tasks sequencing that meet period demands without delay and in such a way that the total costs of production, machine setup and inventory are minimized. the dearth of literature on the ilssp-nt, despite the increasing amount of applications in the industrial sector, mainly in the food processing industry, motivated us to conduct this study. in this paper, we propose efficient methods to solve the ilssp-nt on parallel machines. the methods virtually consist in the hybridization of the relax-and-fix and fix-and-optimize methods with the path-relinking and kernel search heuristics. to assess how well the heuristics solve the ilssp-nt on parallel machines, we compared their results with those of the cplex solver with a fixed cpu time limit. the proposed matheuristics significantly outperformed cplex in most of the tested instances. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/231.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/231.txt new file mode 100644 index 0000000000000000000000000000000000000000..3fc315f020c897b22e6ed90742b482be7257ed1c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/231.txt @@ -0,0 +1 @@ + federated learning struggles with their heavy energy footprints on battery-powered devices. the learning process keeps all devices awake while draining expensive battery power to train a shared model collaboratively, yet it may still leak sensitive personal information. traditional energy management techniques in system kernel mode can force the training device entering low power states, but it may violate the slo of the collaborative learning. to address the conflict between learning slo and energy efficiency, we propose deal, an energy efficient learning system that saves energy and preserves privacy with a decremental learning design. deal reduces the energy footprint from two layers: 1) an optimization layer that selects a subset of workers with sufficient capacity and maximum rewards. 2) a specified decremental learning algorithm that actively provides a decremental and incremental update functions, which allows kernel to correctly tune the local dvfs. we prototyped deal in containerized services with modern smartphone profiles and evaluated it with several learning benchmarks with realistic traces. we observed that deal achieves 75.6%-82.4% less energy footprint in different datasets, compared to the traditional methods. all learning processes are faster than state-of-thepractice fl frameworks up to 2-4x in model convergence. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/232.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/232.txt new file mode 100644 index 0000000000000000000000000000000000000000..80204484942931b2d2788d91bf27e4c902bd2fdc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/232.txt @@ -0,0 +1 @@ + federated learning (fl) has shown great potential as a privacy-preserving solution to learning from decentralized data that are only accessible to end devices (i.e., clients). in many scenarios, however, a large proportion of the clients are probably in possession of low-quality data that are biased, noisy or even irrelevant. as a result, they could significantly slow down the convergence of the global model we aim to build and also compromise its quality. in light of this, we propose fedprof, a novel algorithm for optimizing fl under such circumstances without breaching data privacy. the key of our approach is a distributional representation profiling and matching scheme that uses the global model to dynamically profile data representations and allows for low-cost, lightweight representation matching. based on the scheme we adaptively score each client and adjust its participation probability so as to mitigate the impact of low-value clients on the training process. we have conducted extensive experiments on public datasets using various fl settings. the results show that the selective behaviour of our algorithm leads to a significant reduction in the number of communication rounds and the amount of time (up to 2.4× speedup) for the global model to converge and also provides accuracy gain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/233.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/233.txt new file mode 100644 index 0000000000000000000000000000000000000000..7afae4bf742e744b5f802f06dfdc26a172350c91 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/233.txt @@ -0,0 +1 @@ + the maturing of blockchain technology leads to heterogeneity, where multiple solutions specialize in a particular use case. while the development of different blockchain networks shows great potential for blockchains, the isolated networks have led to data and asset silos, limiting the applications of this technology. blockchain interoperability solutions are essential to enable distributed ledgers to reach their full potential. such solutions allow blockchains to support asset and data transfer, resulting in the development of innovative applications.this paper proposes a novel blockchain interoperability solution for permissioned blockchains based on the publish/subscribe architecture. we implemented a prototype of this platform to show the feasibility of our design. we evaluate our solution by implementing examples of the different publisher and subscriber networks, such as hyperledger besu, which is an ethereum client, and two different versions of hyperledger fabric. we present a performance analysis of the whole network that indicates its limits and bottlenecks. finally, we discuss the extensibility and scalability of the platform in different scenarios. our evaluation shows that our system can handle a throughput in the order of the hundreds of transactions per second. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/234.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/234.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb3cf97a095852980cb33cac207834d3ce6fc0cb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/234.txt @@ -0,0 +1 @@ + byzantine fault tolerant (bft) state machine replication (smr) is an important building block for constructing permissioned blockchain systems. in contrast to nakamoto consensus where any block obtains higher assurance as buried deeper in the blockchain, in bft smr, any committed block is secure has a fixed resilience threshold. in this paper, we investigate strengthened fault tolerance (sft) in bft smr under partial synchrony, which provides gradually increased resilience guarantees (like nakamoto consensus) during an optimistic period when the network is synchronous and the number of byzantine faults is small. moreover, the committed blocks can tolerate more than one-third (up to two-thirds) corruptions even after the optimistic period. compared to the prior best solution flexible bft which requires quadratic message complexity, our solution maintains the linear message complexity of state-of-the-art bft smr protocols and requires only marginal bookkeeping overhead. we implement our solution over the open-source diem project, and give experimental results that demonstrate its efficiency under real-world scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/235.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/235.txt new file mode 100644 index 0000000000000000000000000000000000000000..e4b8ffd2642fd766deaf21d5e4711528bb216252 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/235.txt @@ -0,0 +1 @@ + this paper revisits two classical distributed problems in anonymous networks, namely spanning tree construction and topology recognition, from the point of view of graph covering theory. for both problems, we characterize necessary and sufficient conditions on the communication graph in terms of directed symmetric coverings. these characterizations answer a long-standing open question posed by yamashita and kameda , and shed new light on the connection between coverings and the concepts of views and quotient graphs developed by the same authors. characterizing conditions in terms of coverings is significant because it connects the field with a vast body of classical literature in graph theory and algebraic topology. in particular, it gives access to powerful tools such as reidemeister's theorem and mazurkiewicz's algorithm. combined together, these tools allow us to present elegant proofs of otherwise intricate results, and their constructive nature makes them effectively usable in the algorithms. this paper also gives us the opportunity to present the field of covering theory in a pedagogical way, with a focus on the two aforementioned tools, whose potential impact goes beyond the specific problems considered in this work. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/236.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/236.txt new file mode 100644 index 0000000000000000000000000000000000000000..64825f815cf17ff0e121cf85fd5f2de690b1748a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/236.txt @@ -0,0 +1 @@ + vehicular ad hoc network (vanet) is an integral part of vehicular communication. vanet suffers many problems such as scalability. to solve scalability and other problems of vanet, clustering is proposed. vanet clustering is different than any other kind of clustering due to the high mobility of the vehicles. likewise, vanet and vanet clustering, vanet simulator requires some unique features such as internet based real-time data processing, huge data analysis, the complex calculation to maintain hierarchy among the vehicles, etc.; however, neither web based vanet simulator nor clustering module available in the existing simulators. therefore, a simulator that will be able to simulate any feature of vanet equipped with a clustering module and accessible via the internet is a growing need in vehicular communication research. at the telecom and network research lab (tnrl), university of oklahoma, we have developed a fully functional discrete-event vanet simulator that includes all the features of vanet clustering. moreover, the cloud based vanet simulator (cvanetsim) is coming with an easy and interactive web interface. to our best of our knowledge, cvanetsim is the first of its kind which integrates features of the vanet simulator, builtin vanet clustering module, and accessible through the internet. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/237.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/237.txt new file mode 100644 index 0000000000000000000000000000000000000000..6571331f51dcdb44ef40781cebb9145c22ba2f58 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/237.txt @@ -0,0 +1 @@ + we show how a particular variety of hierarchical nets, where the firing of a transition in the parent net must correspond to an execution in some child net, can be modelled utilizing a functorial semantics from a free category -representing the parent net -to the category of sets and spans between them. this semantics can be internalized via grothendieck construction, resulting in the category of executions of a petri net representing the semantics of the overall hierarchical net. we conclude the paper by giving an engineering-oriented overview of how our model of hierarchical nets can be implemented in a transaction-based smart contract environment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/238.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/238.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b2335362c549e9a5c8cc6bceaa85534265d44e2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/238.txt @@ -0,0 +1 @@ + loop compilation for tightly coupled processor arrays (tcpas), a class of massively parallel loop accelerators, entails solving nphard problems, yet depends on the loop bounds and number of available processing elements (pes), parameters known only at runtime because of dynamic resource management and input sizes. therefore, this article proposes a two-phase approach called symbolic loop compilation: at compile time, the necessary np-complete problems are solved and the solutions compiled into a space-efficient symbolic configuration. at runtime, a concrete configuration is generated from the symbolic configuration according to the parameters values. we show that the latter phase, called instantiation, runs in polynomial time with its most complex step, program instantiation, not depending on the number of pes.as validation, we performed symbolic loop compilation on real-world loops and measured time and space requirements. our experiments confirm that a symbolic configuration is space-efficient and suited for systems with little memory-often, a symbolic configuration is smaller than a single concrete configuration-and that program instantiation scales well with the number of pes-for example, when instantiating a symbolic configuration of a matrix-matrix multiplication, the execution time is similar for 4 × 4 and 32 × 32 pes. ccs concepts: • computer systems organization → systolic arrays; embedded and cyber-physical systems; • software and its engineering → compilers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/239.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/239.txt new file mode 100644 index 0000000000000000000000000000000000000000..cfbd950d8bbdc14c16eb72e537df1e436628be28 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/239.txt @@ -0,0 +1 @@ + aspect-based sentiment analysis (absa) has attracted increasing attention recently due to its broad applications.in existing absa datasets, most sentences contain only one aspect or multiple aspects with the same sentiment polarity, which makes absa task degenerate to sentence-level sentiment analysis.in this paper, we present a new large-scale multi-aspect multi-sentiment (mams) dataset, in which each sentence contains at least two different aspects with different sentiment polarities.the release of this dataset would push forward the research in this field.in addition, we propose simple yet effective capsnet and capsnet-bert models which combine the strengths of recent nlp advances.experiments on our new dataset show that the proposed model significantly outperforms the state-of-the-art baseline methods 1 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/24.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/24.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbca16c9b80d52caa20bf9ae28d2df8857af2bfe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/24.txt @@ -0,0 +1 @@ + this paper explores the application of artificial intelligence (ai) techniques for generating the trajectories of fleets of unmanned aerial vehicles (uavs). the two main challenges addressed include accurately predicting the paths of uavs and efficiently avoiding collisions between them.firstly, the paper systematically applies a diverse set of activation functions to a feedforward neural network (ffnn) with a single hidden layer, which enhances the accuracy of the predicted path compared to previous work.secondly, we introduce a novel activation function, adaptoswelligauss, which is a sophisticated fusion of swish and elliott activations, seamlessly integrated with a scaled and shifted gaussian component. swish facilitates smooth transitions, elliott captures abrupt trajectory changes, and the scaled and shifted gaussian enhances robustness against noise. this dynamic combination is specifically designed to excel in capturing the complexities of uav trajectory prediction. this new activation function gives substantially better accuracy than all existing activation functions.thirdly, we propose a novel integrated collision detection, avoidance, and batching (icdab) strategy that merges two complementary uav collision avoidance techniques: changing uav trajectories and altering their starting times, also referred to as batching. this integration helps overcome the disadvantages of both-reduction in the number of trajectory manipulations, which avoids overly convoluted paths in the first technique, and smaller \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/240.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/240.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9bc3015a028790292202b06b3febf88629b9bd3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/240.txt @@ -0,0 +1 @@ + this study presents a machine learning approach to accurately assess the quality of coconut sugar using rgb values. python and scikit-learn were used to run the following machine learning algorithms: artificial neural network (ann), stochastic gradient descent (sgd), k-nearest neighbors (k-nn) algorithm, support vector machine (svm), decision tree (dt) and random forest (rf). comparisons were made between the aforementioned machine learning algorithms by evaluating the accuracy and the average running time of each training model. results of the study show that the sgd is superior in terms of accuracy but falls short to k-nn and svc in terms of running time. in this fashion, a plot between the accuracy and the running time was made and it wasobserved that algorithms with higher accuracies correspondingly have also higher running times. by this very nature, experimental results show that the sgd holds merit in accurately assessing the coconut sugar quality, despite its expense in running time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/241.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/241.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c1c3b2f2c1f9bbae147f1e4b5cdc4176d62697c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/241.txt @@ -0,0 +1 @@ + using artificial neural networks is an important approach for drawing inferences and making predictions when analyzing large and complex data sets. tensorflow and pytorch are two widely-used machine learning frameworks that support artificial neural network models. we evaluated the relative effectiveness of these two frameworks to model a binary classification problem. the binary classification was done using sentiment analysis on a publicly-available data set of product reviews. we first implemented the same model in the same testing environment to see if we were able to achieve similar accuracy with both frameworks. we then compared the training time, memory usage, and ease of use of the two frameworks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/242.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/242.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4482375182e84dbe374ed5f9f65452d974965f8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/242.txt @@ -0,0 +1 @@ + this paper describes a machine learning approach for visual object detection which is capable of processing images extremely rapidly and achieving high detection rates. this work is distinguished by three key contributions. the first is the introduction of a new image representation called the "integral image" which allows the features used by our detector to be computed very quickly. the second is a learning algorithm, based on adaboost, which selects a small number of critical visual features from a larger set and yields extremely efficient classifiers . the third contribution is a method for combining increasingly more complex classifiers in a "cascade" which allows background regions of the image to be quickly discarded while spending more computation on promising object-like regions. the cascade can be viewed as an object specific focus-of-attention mechanism which unlike previous approaches provides statistical guarantees that discarded regions are unlikely to contain the object of interest. in the domain of face detection the system yields detection rates comparable to the best previous systems. used in real-time applications, the detector runs at 15 frames per second without resorting to image differencing or skin color detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/243.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/243.txt new file mode 100644 index 0000000000000000000000000000000000000000..8254325d195346624ba5bd2942bc6f97bb8dd523 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/243.txt @@ -0,0 +1 @@ + we conduct the first large meta-analysis of overfitting due to test set reuse in the machine learning community. our analysis is based on over one hundred machine learning competitions hosted on the kaggle platform over the course of several years. in each competition, numerous practitioners repeatedly evaluated their progress against a holdout set that forms the basis of a public ranking available throughout the competition. performance on a separate test set used only once determined the final ranking. by systematically comparing the public ranking with the final ranking, we assess how much participants adapted to the holdout set over the course of a competition. our study shows, somewhat surprisingly, little evidence of substantial overfitting. these findings speak to the robustness of the holdout method across different data domains, loss functions, model classes, and human analysts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/244.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/244.txt new file mode 100644 index 0000000000000000000000000000000000000000..805ad12bb99a2c8f4a6b528a39ce5da550d47599 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/244.txt @@ -0,0 +1 @@ + chronic kidney disease (ckd) is a global health problem with high morbidity and mortality rate, and it induces other diseases. since there are no obvious symptoms during the early stages of ckd, patients often fail to notice the disease. early detection of ckd enables patients to receive timely treatment to ameliorate the progression of this disease. machine learning models can effectively aid clinicians achieve this goal due to their fast and accurate recognition performance. in this study, we propose a machine learning methodology for diagnosing ckd. the ckd data set was obtained from the university of california irvine (uci) machine learning repository, which has a large number of missing values. knn imputation was used to fill in the missing values, which selects several complete samples with the most similar measurements to process the missing data for each incomplete sample. missing values are usually seen in real-life medical situations because patients may miss some measurements for various reasons. after effectively filling out the incomplete data set, six machine learning algorithms (logistic regression, random forest, support vector machine, k-nearest neighbor, naive bayes classifier and feed forward neural network) were used to establish models. among these machine learning models, random forest achieved the best performance with 99.75% diagnosis accuracy. by analyzing the misjudgments generated by the established models, we proposed an integrated model that combines logistic regression and random forest by using perceptron, which could achieve an average accuracy of 99.83% after ten times of simulation. hence, we speculated that this methodology could be applicable to more complicated clinical data for disease diagnosis.index terms chronic kidney disease, machine learning, knn imputation, integrated model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/245.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/245.txt new file mode 100644 index 0000000000000000000000000000000000000000..1341a59177ad134ae6219867b263088faf162ec5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/245.txt @@ -0,0 +1 @@ + reducing traffic delay at signalized intersections is a key objective of intelligent transport systems. many existing applications do not have the intelligence embedded to learn about the environmental parameters (such weather, incident etc.) that influence traffic flow; therefore, they are passive to the dynamic nature of vehicle traffic. this report proposes a deep learning neural networks method to optimise traffic flow and reduce congestion at key intersections, which will enhance the ability of signalized intersections to respond to changing traffic and environmental conditions. the input features of the proposed methods are composed of historical data of all the movements of an intended intersection, time series and environmental variables such as weather conditions etc. the method can learn about the region and predict traffic volumes at any point in time. the output (i.e. predicted traffic volume) is fed into the delay equation that generates best green times to manage traffic delay. the performance of our method is measured by root mean squared error (rmse), against other models: radial basic function, random walk, support vector machine and bp neural network. experiments conducted on real datasets show that our deep neural network method outperforms other methods and can be deployed to optimize the operations of traffic signals. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/246.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/246.txt new file mode 100644 index 0000000000000000000000000000000000000000..3737e126409b3586da602d25b90c8a99e7fc3bdd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/246.txt @@ -0,0 +1 @@ + in today s world, social networking website like twitter, facebook, instagram, etc. plays a very significant role.twitter is a social networking website which provides loads of data that can be used for various purposes such as sentiment analysis for predictions, marketing, elections, etc.the objective of this research is to classify twitter data into sentiments (positive or negative) by using different supervised machine learning classifiers to predict the delhi corporation elections results and to identify the most accurate machine learning classifier.the research highlights the performance of different classifiers on the twitter dataset related to political parties.experiments show that m u l t i n o m i a l n a ï v e b a y e s classifier is the most a c c u r a t e s e n t i m e n t predictors with 78%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/247.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/247.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f7cf3e4f01ee7075b21073e00235751839a1581 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/247.txt @@ -0,0 +1 @@ + remote photoplethysmography (rppg), which aims at measuring heart activities without any contact, has great potential in many applications (e.g., remote healthcare).existing rppg approaches rely on analyzing very fine details of facial videos, which are prone to be affected by video compression.here we propose a two-stage, endto-end method using hidden rppg information enhancement and attention networks, which is the first attempt to counter video compression loss and recover rppg signals from highly compressed videos.the method includes two parts:1) a spatio-temporal video enhancement network (stven) for video enhancement, and2) an rppg network (rppgnet) for rppg signal recovery.the rppgnet can work on its own for robust rppg measurement, and the stven network can be added and jointly trained to further boost the performance especially on highly compressed videos.comprehensive experiments are performed on two benchmark datasets to show that,1) the proposed method not only achieves superior performance on compressed videos with high-quality videos pair,2) it also generalizes well on novel data with only compressed videos available, which implies the promising potential for realworld applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/248.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/248.txt new file mode 100644 index 0000000000000000000000000000000000000000..9fce62d2970885565daf6a22f583efbf3b2f8ad8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/248.txt @@ -0,0 +1 @@ + the dominant sequence transduction models are based on complex recurrent or convolutional neural networks that include an encoder and a decoder. the best performing models also connect the encoder and decoder through an attention mechanism. we propose a new simple network architecture, the transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely. experiments on two machine translation tasks show these models to be superior in quality while being more parallelizable and requiring significantly less time to train. our model achieves 28.4 bleu on the wmt 2014 englishto-german translation task, improving over the existing best results, including ensembles, by over 2 bleu. on the wmt 2014 english-to-french translation task, our model establishes a new single-model state-of-the-art bleu score of 41.8 after training for 3.5 days on eight gpus, a small fraction of the training costs of the best models from the literature. we show that the transformer generalizes well to other tasks by applying it successfully to english constituency parsing both with large and limited training data. * equal contribution. listing order is random. jakob proposed replacing rnns with self-attention and started the effort to evaluate this idea. ashish, with illia, designed and implemented the first transformer models and has been crucially involved in every aspect of this work. noam proposed scaled dot-product attention, multi-head attention and the parameter-free position representation and became the other person involved in nearly every detail. niki designed, implemented, tuned and evaluated countless model variants in our original codebase and tensor2tensor. llion also experimented with novel model variants, was responsible for our initial codebase, and efficient inference and visualizations. lukasz and aidan spent countless long days designing various parts of and implementing tensor2tensor, replacing our earlier codebase, greatly improving results and massively accelerating our research.† work performed while at google brain. ‡ work performed while at google research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/249.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/249.txt new file mode 100644 index 0000000000000000000000000000000000000000..8254325d195346624ba5bd2942bc6f97bb8dd523 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/249.txt @@ -0,0 +1 @@ + we conduct the first large meta-analysis of overfitting due to test set reuse in the machine learning community. our analysis is based on over one hundred machine learning competitions hosted on the kaggle platform over the course of several years. in each competition, numerous practitioners repeatedly evaluated their progress against a holdout set that forms the basis of a public ranking available throughout the competition. performance on a separate test set used only once determined the final ranking. by systematically comparing the public ranking with the final ranking, we assess how much participants adapted to the holdout set over the course of a competition. our study shows, somewhat surprisingly, little evidence of substantial overfitting. these findings speak to the robustness of the holdout method across different data domains, loss functions, model classes, and human analysts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/25.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/25.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e646d93f5d60cec75cdbe98410664580d804c06 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/25.txt @@ -0,0 +1 @@ + the goal of this paper is to introduce a new theoretical framework for optimal transport (ot), using the terminology and techniques of fully probabilistic design (fpd). optimal transport is the canonical method for comparing probability measures and has been successfully applied in a wide range of areas (computer vision rubner et al. , computer graphics solomon et al. , natural language processing kusner et al. , etc.). however, we argue that the current ot framework suffers from two shortcomings: first, it is hard to induce generic constraints and probabilistic knowledge in the ot problem; second, the current formalism does not address the question of uncertainty in the marginals, lacking therefore the mechanisms to design robust solutions. by viewing the ot problem as the optimal design of a probability density function with marginal constraints, we prove that ot is an instance of the more generic fpd framework. in this new setting, we can furnish the ot framework with the necessary mechanisms for processing probabilistic constraints and deriving uncertainty quantifiers, hence establishing a new extended framework, called fpd-ot. our main contribution in this paper is to establish the connection between ot and fpd, providing new theoretical insights for both. this will lay the foundations for the application of fpd-ot in a subsequent work, notably in processing more sophisticated knowledge constraints, as well as in designing robust solutions in the case of uncertain marginals. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/250.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/250.txt new file mode 100644 index 0000000000000000000000000000000000000000..88d631bba009e10a83c2ff8cb61d9bdbff9c0c63 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/250.txt @@ -0,0 +1 @@ + in this paper, we propose an attentional generative adversarial network (attngan) that allows attention-driven, multi-stage refinement for fine-grained text-to-image generation. with a novel attentional generative network, the at-tngan can synthesize fine-grained details at different subregions of the image by paying attentions to the relevant words in the natural language description. in addition, a deep attentional multimodal similarity model is proposed to compute a fine-grained image-text matching loss for training the generator. the proposed attngan significantly outperforms the previous state of the art, boosting the best reported inception score by 14.14% on the cub dataset and 170.25% on the more challenging coco dataset. a detailed analysis is also performed by visualizing the attention layers of the attngan. it for the first time shows that the layered attentional gan is able to automatically select the condition at the word level for generating different parts of the image. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/251.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/251.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ffeadb15e0a8787c783b41ad9789d8b06737661 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/251.txt @@ -0,0 +1 @@ + internet of things is making objects smarter and more autonomous. at the other side, online education is gaining momentum and many universities are now offering online degrees. content preparation for such programs usually involves recording the classes. in this article, we intend to introduce a deep learning-based camera management system as a substitute for the academic filming crew. the solution mainly consists of two cameras and a wearable gadget for the instructor. the fixed camera is used for the instructor's position and pose detection and the pantilt-zoom (ptz) camera does the filming. in the proposed solution, image processing and deep learning techniques are merged together. face recognition and skeleton detection algorithms are used to detect the position of instructor. but the main contribution lies in the application of deep learning for instructor's skeleton detection and postprocessing of the deep network output for correction of the pose detection results using a bayesian maximum a posteriori (map) estimator. this estimator is defined on a markov state machine. the pose detection result along with the position info is then used by the ptz camera controller for filming purposes. the proposed solution is implemented by using openpose which is a convolutional neural network for detection of body parts. feeding a neural network pose classifier with 12 features extracted from the output of the deep network yields an accuracy of 89%. however, as we show, the accuracy can be improved by the markov model and map estimator to reach as high as 95.5%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/252.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/252.txt new file mode 100644 index 0000000000000000000000000000000000000000..ace43ba28bd8ac0298d67d55a8e9e94dfafe9d5c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/252.txt @@ -0,0 +1 @@ + frauds are known to be dynamic and have no patterns, hence they are not easy to identify. fraudsters use recent technological advancements to their advantage. they somehow bypass security checks, leading to the loss of millions of dollars. analyzing and detecting unusual activities using data mining techniques is one way of tracing fraudulent transactions. transactions. this paper aims to benchmark multiple machine learning methods such as k-nearest neighbor (knn), random forest and support vector machines (svm), while the deep learning methods such as autoencoders, convolutional neural networks (cnn), restricted boltzmann machine (rbm) and deep belief networks (dbn). the datasets which will be used are the european (eu) australian and german dataset. the area under the roc curve (auc), matthews correlation coefficient (mcc) and cost of failure are the 3-evaluation metrics that would be used. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/253.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/253.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae50bcd5a220738261ebbe87fb477471f143c2b1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/253.txt @@ -0,0 +1 @@ + blockchain is an emerging technology that enables a vital framework for various cryptocurrency operations such as bitcoin. notably, without any involvement from third party authorities, blockchain offers a decentralized consensus scheme to process user transactions, fund transfer, and various data records in a secure and reliable way. furthermore, bitcoin price forecasting has been a vital research trend, where machine learning techniques play a substantial role. a sophisticated and appropriately trained model can be useless if the features being tested are unreliable. independently, one of the most desirable aspects of a system that utilizes the blockchain is the concrete objectiveness by which each entry is cataloged. any data collected and reported on the blockchain is unambiguous, and therefore, extremely suitable for a machine learning algorithm. to efficiently forecast bitcoin price movements, in this work, we propose and examine various lenses by which to view this union, each with varying degrees of success. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/254.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/254.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf7b85f2b9adcf99d37967172bfe9837e5987b7a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/254.txt @@ -0,0 +1 @@ + figure 1: faceforensics++ is a dataset of facial forgeries that enables researchers to train deep-learning-based approaches in a supervised fashion.the dataset contains manipulations created with four state-of-the-art methods, namely, face2face, faceswap, deepfakes, and neuraltextures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/255.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/255.txt new file mode 100644 index 0000000000000000000000000000000000000000..f3abe14328558e1ee06d74f32644cd269a0a1a23 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/255.txt @@ -0,0 +1 @@ + water managers in the western united states (u.s.) rely on longterm forecasts of temperature and precipitation to prepare for droughts and other wet weather extremes. to improve the accuracy of these longterm forecasts, the u.s. bureau of reclamation and the national oceanic and atmospheric administration (noaa) launched the subseasonal climate forecast rodeo, a year-long real-time forecasting challenge in which participants aimed to skillfully predict temperature and precipitation in the western u.s. two to four weeks and four to six weeks in advance. here we present and evaluate our machine learning approach to the rodeo and release our subseasonalrodeo dataset, collected to train and evaluate our forecasting system.our system is an ensemble of two nonlinear regression models. the first integrates the diverse collection of meteorological measurements and dynamic model forecasts in the subseasonalrodeo dataset and prunes irrelevant predictors using a customized multitask feature selection procedure. the second uses only historical measurements of the target variable (temperature or precipitation) and introduces multitask nearest neighbor features into a weighted local linear regression. each model alone is significantly more accurate than the debiased operational u.s. climate forecasting system (cfsv2), and our ensemble skill exceeds that of the top rodeo competitor for each target variable and forecast horizon. moreover, over 2011-2018, an ensemble of our regression models and debiased cfsv2 improves debiased cfsv2 skill by 40-50% for temperature and 129-169% for precipitation. we hope that both our dataset and our methods will help to advance the state of the art in subseasonal forecasting. ccs concepts• applied computing → environmental sciences. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/256.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/256.txt new file mode 100644 index 0000000000000000000000000000000000000000..509d2dd7cbe5d98ba4f0924285cea31cbb507c1a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/256.txt @@ -0,0 +1 @@ + this paper highlights an e-learning system created using moodle which is an open-source learningmanagement system (lms) that enables a better learning environment between the tutors and students. this system detects two learner profiles i.e. students with learning disability (ld) and without learning disability (non-ld) using dedicated courses designed on the basis of various aspects of an ld student. this work also multiple stages of our approach for informal testing used to capture the learning parameters for dyslexic students. the first stage i.e. data collection has two approaches where the first approach pertains to a smaller age group of 8-10 years with limited parameters whereas the second approach pertains to the age group 11-13 years i.e. grades 6-8 with more parameters. natural language processing (nlp) has been used to perform speech-to-text (stt) conversion on the audio responses of the users. the analysis of these responses have been performed in python language. to detect whether the user has ld (dyslexia in this case) or not, machine learning (ml) is used. two ml algorithms namely logistic regression (lr) and support vector machine (svm) are used to perform binary classification with ld (1) and non-ld (0) as the two classes of the dataset. the results are shown for both the approaches and comparative analysis shows that the dataset generated in the final approach for capturing parameters involving nlp is better and more robust. lr algorithm for ml shows better results as compared to svm for performing detection based on the generated dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/257.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/257.txt new file mode 100644 index 0000000000000000000000000000000000000000..7c75bbfff6d8705e5d7414519ccb5e0dfadc261b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/257.txt @@ -0,0 +1 @@ + machine learning, as the core of artificial intelligence, is also the fundamental reason for computer intelligence, and has been widely used in the field of artificial intelligence. with the improvement of computer's ability to process data, deep learning has been highlighted in the field of machine learning. more and more researchers have joined in the theoretical research and applied research of deep learning. and image recognition and classification are very important applications. this paper first compares deep learning with traditional machine learning methods, then introduces the development process of deep learning, studies and analyses the network structure of deep learning such as deep belief network, convolution neural network and recursive neural network, expounds the application of deep learning in image recognition and classification, and puts forward the deep learning in image recognition and classification. the problems encountered in the application of recognition and classification and the corresponding solutions are discussed. finally, the research status of in-depth learning in image recognition and classification is summarized and prospected. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/258.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/258.txt new file mode 100644 index 0000000000000000000000000000000000000000..2d8a0c30a379ba18c5b2bfecc491d1ac6ff6a00c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/258.txt @@ -0,0 +1 @@ + agriculture is one of the main factor that decides the growth of any country. in india itself around 65% of the population is based on agriculture. due to various seasonal conditions the crops get infected by various kind of diseases. these diseases firstly affect the leaves of the plant and later infected the whole plant which in turn affect the quality and quantity of crop cultivated. as there are large number of plants in the farm, it becomes very difficult for the human eye to detect and classify the disease of each plant in the field. and it is very important to diagnose each plant because these diseases may spread. hence in this paper we are introducing the artificial intelligence based automatic plant leaf disease detection and classification for quick and easy detection of disease and then classifying it and performing required remedies to cure that disease. this approach of ours goals towards increasing the productivity of crops in agriculture. in this approach we have follow several steps i.e. image collection, image preprocessing, segmentation and classification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/259.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/259.txt new file mode 100644 index 0000000000000000000000000000000000000000..9bd1e1349e4f8ca43c5b25293a710a8aa756adc0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/259.txt @@ -0,0 +1 @@ + deep learning is a promising approach for extracting accurate information from raw sensor data from iot devices deployed in complex environments. because of its multilayer structure, deep learning is also appropriate for the edge computing environment. therefore, in this article, we first introduce deep learning for iots into the edge computing environment. since existing edge nodes have limited processing capability, we also design a novel offloading strategy to optimize the performance of iot deep learning applications with edge computing. in the performance evaluation, we test the performance of executing multiple deep learning tasks in an edge computing environment with our strategy. the evaluation results show that our method outperforms other optimization solutions on deep learning for iot. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/26.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/26.txt new file mode 100644 index 0000000000000000000000000000000000000000..93b987a02fcf5a5b64ab47740bd1543b8ace4ee5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/26.txt @@ -0,0 +1 @@ + this study considers a federated learning setup where cost-sensitive and strategic agents train a learning model with a server. during each round, each agent samples a minibatch of training data and sends his gradient update. as an increasing function of his minibatch size choice, the agent incurs a cost associated with the data collection, gradient computation and communication. the agents have the freedom to choose their minibatch size and may even opt out from training. to reduce his cost, an agent may diminish his minibatch size, which may also cause an increase in the noise level of the gradient update. the server can offer rewards to compensate the agents for their costs and to incentivize their participation but she lacks the capability of validating the true minibatch sizes of the agents. to tackle this challenge, the proposed reward mechanism evaluates the quality of each agent's gradient according to the its distance to a reference which is constructed from the gradients provided by other agents. it is shown that the proposed reward mechanism has a cooperative nash equilibrium in which the agents determine the minibatch size choices according to the requests of the server. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/260.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/260.txt new file mode 100644 index 0000000000000000000000000000000000000000..9d8e39b11bf214b9d3b13f0c482717ddf8794cca --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/260.txt @@ -0,0 +1 @@ + the stock market is an interesting industry to study. there are various variations present in it. many experts have been studying and researching on the various trends that the stock market goes through. one of the major studies has been the attempt to predict the stock prices of various companies based on historical data. prediction of stock prices will greatly help people to understand where and how to invest so that the risk of losing money is minimized. this application can also be used by companies during their initial public offering (ipo) to know what value to target for and how many shares they should release. so far there have been significant developments in this field. many researchers are looking at machine learning and deep learning as possible ways to predict stock prices. the proposed system works in two methods -regression and classification. in regression, the system predicts the closing price of stock of a company, and in classification, the system predicts whether the closing price of stock will increase or decrease the next day. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/261.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/261.txt new file mode 100644 index 0000000000000000000000000000000000000000..395f40b7344937c3a048b82c028f6346c672bbb0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/261.txt @@ -0,0 +1 @@ + the standard risk minimization paradigm of machine learning is brittle when operating in environments whose test distributions are different from the training distribution due to spurious correlations.training on data from many environments and finding invariant predictors reduces the effect of spurious features by concentrating models on features that have a causal relationship with the outcome.in this work, we pose such invariant risk minimization as finding the nash equilibrium of an ensemble game among several environments.by doing so, we develop a simple training algorithm that uses best response dynamics and, in our experiments, yields similar or better empirical accuracy with much lower variance than the challenging bi-level optimization problem of arjovsky et al. (2019).one key theoretical contribution is showing that the set of nash equilibria for the proposed game are equivalent to the set of invariant predictors for any finite number of environments, even with nonlinear classifiers and transformations.as a result, our method also retains the generalization guarantees to a large set of environments shown in arjovsky et al. (2019).the proposed algorithm adds to the collection of successful game-theoretic machine learning algorithms such as generative adversarial networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/262.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/262.txt new file mode 100644 index 0000000000000000000000000000000000000000..13bb0116c4b2a5ecd703dd877db25d2182ab147d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/262.txt @@ -0,0 +1 @@ + with the popularity of unicode system and growing use of internet, the use of bangla over social media is increasing. however, very few works have been done on bangla text for social media activity monitoring due to a lack of a large number of annotated corpora, named dictionaries and morphological analyzer, which demands in-depth analysis on bangladesh's perspective. moreover, solving the issue by applying available techniques is very content specific, which means that false detection can occur if contents changed from formal english to verbal abuse or sarcasm. also, performance may vary due to linguistic differences between english and non-english contents and the socio-emotional behaviour of the study population. to combat such issues, this paper proposes the use of machine learning algorithms and the inclusion of user information for cyber bullying detection on bangla text. for this purpose, a set of bangla text has been collected from available social media platforms and labelled as either bullied or not bullied for training different machine learning based classification models. cross-validation results of the models indicate that a support vector machine based algorithm achieves superior performance on bangla text with a detection accuracy of 97%. besides, the impact of user specific information such as location, age and gender can further improve the classification accuracy of bangla cyber bullying detection system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/263.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/263.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0af183eb18011a57c922e598f0ea5685c0f5118 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/263.txt @@ -0,0 +1 @@ + machine learning is about prediction on unseen data or testing data and a set of algorithms are required to perform task on machine learning. there are three types of machine learning are called as supervised, unsupervised and reinforcement learning. in this paper we have worked on supervised learning. we have taken the iris dataset and used k-nearest neighbors (knn) classification algorithm. our purpose is build the model that is able to automatically recognize the iris species. tools used for this in paper are numpy, pandas, matplotlib and machine learning library scikit-learn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/264.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/264.txt new file mode 100644 index 0000000000000000000000000000000000000000..96912a3367d2c8abc656f319827e7d7eceb08f9d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/264.txt @@ -0,0 +1 @@ + the plethora of e-commerce products within the last few years has become a serious challenge for shoppers when searching for relevant product information. this has consequently led to the emergence of a recommendation assistant technology that has the capability to discover relevant shopping products that meet the preferences of a user. classification is a machine learning technique that could assist in creating dynamic user profiles, increase scalability and ultimately improve recommendation accuracy. however, heterogeneity, limited content analysis and high dimensionality of available e-commerce datasets make product classification a difficult problem. in this present study, we propose an enhanced product image classification architecture which has data acquisition pre-processing, feature extraction, dimensionality reduction and ensemble of machine learning methods as components. core amongst these components is the eigenvector based fusion algorithm that is meant to obtain dimensionality reduced eigen colour feature from the histogram of oriented gradient based colour image representative features. the ensembles of artificial neural network and support vector machine were trained with the eigen colour feature to classify product images acquired from the pi100 corpus into 100 classes and their classification accuracies were compared. we have obtained a state-of-the-art classification accuracy of 87.2% with the artificial neural network ensemble which is an impressive result when compared to existing results reported by other authors who have utilised the pi100 corpus. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/265.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/265.txt new file mode 100644 index 0000000000000000000000000000000000000000..c2eb5e7057458b0efd67e76b10d0a0e1e8e85365 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/265.txt @@ -0,0 +1 @@ + keratoconus affects approximately one in 2,000 individuals worldwide. it is typically associated with the decrease in visual acuity. given its wide prevalence, there is an unmet need for the development of new tools that can diagnose the disease at an early stage in order to prevent disease progression and vision loss. the aim of this study is to develop and test a machine learning algorithm that can detect keratoconus at early stages. we applied several machine learning algorithms to detect keratoconus and then tested the algorithms using real world medical data, including corneal topography, elevation, and pachymetry parameters collected from oct-based topography instruments from several corneal clinics in japan. we implemented 25 different machine learning models in matlab and achieved a range of 62% to 94.0% accuracy. the highest accuracy level of 94% was obtained by a support vector machine (svm) algorithm using a subset of eight corneal parameters with the highest discriminating power. the proposed model may aid physicians in assessing corneal status and detecting keratoconus, which is otherwise challenging through subjective evaluations, particularly at the preclinical and early stages of the disease. the algorithm can be integrated into corneal imaging devices or used as a stand-alone-software for cornea assessment and detecting early stage keratoconus. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/266.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/266.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1b3cb7efc1131d275264c0425181eb68be5a9bd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/266.txt @@ -0,0 +1 @@ + we consider the problem of classifying documents not by topic, but by overall sentiment, e.g., determining whether a review is positive or negative. using movie reviews as data, we find that standard machine learning techniques definitively outperform human-produced baselines. however, the three machine learning methods we employed (naive bayes, maximum entropy classification, and support vector machines) do not perform as well on sentiment classification as on traditional topic-based categorization. we conclude by examining factors that make the sentiment classification problem more challenging. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/267.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/267.txt new file mode 100644 index 0000000000000000000000000000000000000000..7fd56699bf9c2f45699e1b81d33a895a89dcc5ca --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/267.txt @@ -0,0 +1 @@ + this paper proposes a (new) method to detect malware in android smartphones using api (application programming interface) classes. we use machine learning to classify whether an application is benign or malware. furthermore, we compare classification precision rate from machine learning. this research uses 51 apis package classes from 16 apis classes and employs cross validation and percentage split test to classify benign and malware using random forest, j48, and support vector machine algorithms. we use 412 total application samples (205 benign, 207 malware). we obtain that the classification precision average is 91.9%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/268.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/268.txt new file mode 100644 index 0000000000000000000000000000000000000000..61fded5a3fa8fca6ff398002ebd0aa7e348e8635 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/268.txt @@ -0,0 +1 @@ + in artificial intelligence (ai), the contents of an image are generated automatically which involves computer vision and nlp (natural language processing). the neural model which is regenerative, is created. it depends on computer vision and machine translation. this model is used to generate natural sentences which eventually describes the image. this model consists of convolutional neural network(cnn) as well as recurrent neural network(rnn). the cnn is used for feature extraction from image and rnn is used for sentence generation. the model is trained in such a way that if input image is given to model it generates captions which nearly describes the image. the accuracy of model and smoothness or command of language model learns from image descriptions is tested on different datasets. these experiments show that model is frequently giving accurate descriptions for an input image. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/269.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/269.txt new file mode 100644 index 0000000000000000000000000000000000000000..7a008ceaea8c868c9ee2afe1aadab36c116156c2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/269.txt @@ -0,0 +1 @@ + current end-to-end deep learning driving models have two problems: (1) poor generalization ability of unobserved driving environment when diversity of training driving dataset is limited (2) lack of accident explanation ability when driving models don't work as expected. to tackle these two problems, rooted on the believe that knowledge of associated easy task is benificial for addressing difficult task, we proposed a new driving model which is composed of perception module for see and think and driving module for behave, and trained it with multi-task perception-related basic knowledge and driving knowledge stepwisely. specifically segmentation map and depth map (pixel level understanding of images) were considered as what & where and how far knowledge for tackling easier drivingrelated perception problems before generating final control commands for difficult driving task. the results of experiments demonstrated the effectiveness of multitask perception knowledge for better generalization and accident explanation ability. with our method the average sucess rate of finishing most difficult navigation tasks in untrained city of corl test surpassed current benchmark method for 15 percent in trained weather and 20 percent in untrained weathers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/27.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/27.txt new file mode 100644 index 0000000000000000000000000000000000000000..94f85ce3e503ed18ac333380c277941c7cf8d33e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/27.txt @@ -0,0 +1 @@ + training labels for graph embedding algorithms could be costly to obtain in many practical scenarios. active learning (al) algorithms are very helpful to obtain the most useful labels for training while keeping the total number of label queries under a certain budget. the existing active graph embedding framework proposes to use centrality score, density score, and entropy score to evaluate the value of unlabeled nodes, and it has been shown to be capable of bringing some improvement to the node classification tasks of graph convolutional networks. however, when evaluating the importance of unlabeled nodes, it fails to consider the influence of existing labeled nodes on the value of unlabeled nodes. in other words, given the same unlabeled node, the computed informative score is always the same and is agnostic to the labeled node set. with the aim to address this limitation, in this work, we introduce 3 dissimilarity-based information scores for active learning: feature dissimilarity score (fds), structure dissimilarity score (sds), and embedding dissimilarity score (eds). we find out that those three scores are able to take the influence of the labeled set on the value of unlabeled candidates into consideration, boosting our al performance. according to experiments, our newly proposed scores boost the classification accuracy by 2.1% on average and are capable of generalizing to different graph neural network architectures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/270.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/270.txt new file mode 100644 index 0000000000000000000000000000000000000000..c4380f15769501a15c22e086066378ec9228d86a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/270.txt @@ -0,0 +1 @@ + there are several machine learning techniques that are used to perform predictive analytics over big data in various fields. predictive analytics in healthcare is a challenging task but ultimately can help practitioners make big data-informed timely decisions about patient's health and treatment. this paper discusses the predictive analytics in healthcare, six different machine learning algorithms are used in this research work. for experiment purpose, a dataset of patient's medical record is obtained and six different machine learning algorithms are applied on the dataset. performance and accuracy of the applied algorithms is discussed and compared. comparison of the different machine learning techniques used in this study reveals which algorithm is best suited for prediction of diabetes. this paper aims to help doctors and practitioners in early prediction of diabetes using machine learning techniques. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/271.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/271.txt new file mode 100644 index 0000000000000000000000000000000000000000..984ae77a1c59353854211409af2d9a3aa210c31b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/271.txt @@ -0,0 +1 @@ + deep neural networks (dnns) are powerful models that have achieved excellent performance on difficult learning tasks. although dnns work well whenever large labeled training sets are available, they cannot be used to map sequences to sequences. in this paper, we present a general end-to-end approach to sequence learning that makes minimal assumptions on the sequence structure. our method uses a multilayered long short-term memory (lstm) to map the input sequence to a vector of a fixed dimensionality, and then another deep lstm to decode the target sequence from the vector. our main result is that on an english to french translation task from the wmt'14 dataset, the translations produced by the lstm achieve a bleu score of 34.8 on the entire test set, where the lstm's bleu score was penalized on out-of-vocabulary words. additionally, the lstm did not have difficulty on long sentences. for comparison, a phrase-based smt system achieves a bleu score of 33.3 on the same dataset. when we used the lstm to rerank the 1000 hypotheses produced by the aforementioned smt system, its bleu score increases to 36.5, which is close to the previous best result on this task. the lstm also learned sensible phrase and sentence representations that are sensitive to word order and are relatively invariant to the active and the passive voice. finally, we found that reversing the order of the words in all source sentences (but not target sentences) improved the lstm's performance markedly, because doing so introduced many short term dependencies between the source and the target sentence which made the optimization problem easier. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/272.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/272.txt new file mode 100644 index 0000000000000000000000000000000000000000..7039e7f41e4bdb08c06c8c4a3752e940c4f65933 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/272.txt @@ -0,0 +1 @@ + flower classification is a challenging task due to the wide range of flower species, which have a similar shape, appearance or surrounding objects such as leaves and grass. in this study, the authors propose a novel two-step deep learning classifier to distinguish flowers of a wide range of species. first, the flower region is automatically segmented to allow localisation of the minimum bounding box around it. the proposed flower segmentation approach is modelled as a binary classifier in a fully convolutional network framework. second, they build a robust convolutional neural network classifier to distinguish the different flower types. they propose novel steps during the training stage to ensure robust, accurate and realtime classification. they evaluate their method on three well known flower datasets. their classification results exceed 97% on all datasets, which are better than the state-of-the-art in this domain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/273.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/273.txt new file mode 100644 index 0000000000000000000000000000000000000000..c873a12df0c7a0e245d2b0e8a93b277a3f3ab393 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/273.txt @@ -0,0 +1 @@ + lower back pain (lbp) is not a disease, but it is condition of spine, and now days it becomes very common irrespective of age. an expert system (es) is an intelligent tool used in medical field for various roles like prediction, diagnosing, interpreting. lbp can be caused by so many reasons and its identification in early stage will make the management of it very effective and also prevent it to become chronic. in this research, an advanced kernel is designed in support vector machine (svm) -supervised learning, gives more accurate results. after that the efficiency is compared with effectiveness of the different attributes from the dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/274.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/274.txt new file mode 100644 index 0000000000000000000000000000000000000000..b22a8af562fd59f705632be6e03ea05cfeaffe34 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/274.txt @@ -0,0 +1 @@ + phishing sites is the major problems for online security challenges because of large number of online transactions is done every day. the objective of the paper is to do survey about the phishing: a social attack and its detection and to make aware of the users who doesn't know about this major attack as many of them are still falling in the trap. most of the users are unaware about this problem; they unknowingly fill many forms that belong to phishing website which are hidden. this leads to the leaking of sensitive information of the victim. this study also gives brief knowledge about several machine learning techniques such as knn algorithm, naïve bayes, decision tree, support vector machines, neural network and random forest algorithm for predicting phishing sites. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/275.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/275.txt new file mode 100644 index 0000000000000000000000000000000000000000..3392e897b6a19ee6de601356e80c879408a2e404 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/275.txt @@ -0,0 +1 @@ + in addition to information, text contains attitudinal, and more specifically, emotional content. this paper explores the text-based emotion prediction problem empirically, using supervised machine learning with the snow learning architecture. the goal is to classify the emotional affinity of sentences in the narrative domain of children's fairy tales, for subsequent usage in appropriate expressive rendering of text-to-speech synthesis. initial experiments on a preliminary data set of 22 fairy tales show encouraging results over a naïve baseline and bow approach for classification of emotional versus non-emotional contents, with some dependency on parameter tuning. we also discuss results for a tripartite model which covers emotional valence, as well as feature set alternations. in addition, we present plans for a more cognitively sound sequential model, taking into consideration a larger set of basic emotions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/276.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/276.txt new file mode 100644 index 0000000000000000000000000000000000000000..7156f2ea8d6372d68d22511a7f1160378c3ab6b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/276.txt @@ -0,0 +1 @@ + this paper aims to develop a tool for predicting accurate and timely traffic flow information. traffic environment involves everything that can affect the traffic flowing on the road, whether it's traffic signals, accidents, rallies, even repairing of roads that can cause a jam. if we have prior information which is very near approximate about all the above and many more daily life situations which can affect traffic then, a driver or rider can make an informed decision. also, it helps in the future of autonomous vehicles. in the current decades, traffic data have been generating exponentially, and we have moved towards the big data concepts for transportation. available prediction methods for traffic flow use some traffic prediction models and are still unsatisfactory to handle real-world applications. this fact inspired us to work on the traffic flow forecast problem build on the traffic data and models.it is cumbersome to forecast the traffic flow accurately because the data available for the transportation system is insanely huge. in this work, we planned to use machine learning, genetic, soft computing, and deep learning algorithms to analyse the big-data for the transportation system with much-reduced complexity. also, image processing algorithms are involved in traffic sign recognition, which eventually helps for the right training of autonomous vehicles. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/277.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/277.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae2dfc5f206f8da08b4da61ba2e776a05620609b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/277.txt @@ -0,0 +1 @@ + in the present era of rapidly growing mature technologies and their inter-connection with hardware devices and software applications plays a vital role for the emergence of different sensor devices that are inter-connected through internet in order to establish interaction with the physical objects in the world. the technology with the interconnection of different devices through internet is termed as internet of things (iot). iot generates massive amount of data with respect to various characteristics and qualities of data. machine learning fusion with iot ensures the pervasive development to extend the intelligence of the iot devices and applications. the exposure of different smart iot applications with machine learning helps in observation, systematic analysis, processing and smart usages of the large volume of data in different fields. many industries are using the machine learning and more specifically, the machine learning-as -a-s ervice (mlaas ) to exploit the iot's potential. this present paper consists of machine learning basic introduction, machine learning algorithms, reviews of different researcher's study, various sensor devices and the various applications of machine learning algorithms with iot. and the last section of the paper consists of discussion and conclusion. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/278.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/278.txt new file mode 100644 index 0000000000000000000000000000000000000000..805ad12bb99a2c8f4a6b528a39ce5da550d47599 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/278.txt @@ -0,0 +1 @@ + chronic kidney disease (ckd) is a global health problem with high morbidity and mortality rate, and it induces other diseases. since there are no obvious symptoms during the early stages of ckd, patients often fail to notice the disease. early detection of ckd enables patients to receive timely treatment to ameliorate the progression of this disease. machine learning models can effectively aid clinicians achieve this goal due to their fast and accurate recognition performance. in this study, we propose a machine learning methodology for diagnosing ckd. the ckd data set was obtained from the university of california irvine (uci) machine learning repository, which has a large number of missing values. knn imputation was used to fill in the missing values, which selects several complete samples with the most similar measurements to process the missing data for each incomplete sample. missing values are usually seen in real-life medical situations because patients may miss some measurements for various reasons. after effectively filling out the incomplete data set, six machine learning algorithms (logistic regression, random forest, support vector machine, k-nearest neighbor, naive bayes classifier and feed forward neural network) were used to establish models. among these machine learning models, random forest achieved the best performance with 99.75% diagnosis accuracy. by analyzing the misjudgments generated by the established models, we proposed an integrated model that combines logistic regression and random forest by using perceptron, which could achieve an average accuracy of 99.83% after ten times of simulation. hence, we speculated that this methodology could be applicable to more complicated clinical data for disease diagnosis.index terms chronic kidney disease, machine learning, knn imputation, integrated model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/279.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/279.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c1c3b2f2c1f9bbae147f1e4b5cdc4176d62697c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/279.txt @@ -0,0 +1 @@ + using artificial neural networks is an important approach for drawing inferences and making predictions when analyzing large and complex data sets. tensorflow and pytorch are two widely-used machine learning frameworks that support artificial neural network models. we evaluated the relative effectiveness of these two frameworks to model a binary classification problem. the binary classification was done using sentiment analysis on a publicly-available data set of product reviews. we first implemented the same model in the same testing environment to see if we were able to achieve similar accuracy with both frameworks. we then compared the training time, memory usage, and ease of use of the two frameworks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/28.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/28.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a458cd800b0f33190ec8b3cc7b4b489380c2f67 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/28.txt @@ -0,0 +1 @@ + accurately forecasting the weather is an important task, as many real-world processes and decisions depend on future meteorological conditions. the neurips 2022 challenge entitled weather4cast poses the problem of predicting rainfall events for the next eight hours given the preceding hour of satellite observations as a context. motivated by the recent success of transformer-based architectures in computer vision, we implement and propose two methodologies based on this architecture to tackle this challenge. we find that ensembling different transformers with some baseline models achieves the best performance we could measure on the unseen test data. our approach has been ranked 3 rd in the competition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/280.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/280.txt new file mode 100644 index 0000000000000000000000000000000000000000..1e737e3f5180da830c6463a8f697b18256f28103 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/280.txt @@ -0,0 +1 @@ + this work seeks the possibility of generating the human face from voice solely based on the audio-visual data without any human-labeled annotations. to this end, we propose a multi-modal learning framework that links the inference stage and generation stage. first, the inference networks are trained to match the speaker identity between the two different modalities. then the trained inference networks cooperate with the generation network by giving conditional information about the voice. the proposed method exploits the recent development of gans techniques and generates the human face directly from the speech waveform making our system fully end-to-end. we analyze the extent to which the network can naturally disentangle two latent factors that contribute to the generation of a face imageone that comes directly from a speech signal and the other that is not related to itand explore whether the network can learn to generate natural human face image distribution by modeling these factors. experimental results show that the proposed network can not only match the relationship between the human face and speech, but can also generate the high-quality human face sample conditioned on its speech. finally, the correlation between the generated face and the corresponding speech is quantitatively measured to analyze the relationship between the two modalities. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/281.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/281.txt new file mode 100644 index 0000000000000000000000000000000000000000..805ad12bb99a2c8f4a6b528a39ce5da550d47599 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/281.txt @@ -0,0 +1 @@ + chronic kidney disease (ckd) is a global health problem with high morbidity and mortality rate, and it induces other diseases. since there are no obvious symptoms during the early stages of ckd, patients often fail to notice the disease. early detection of ckd enables patients to receive timely treatment to ameliorate the progression of this disease. machine learning models can effectively aid clinicians achieve this goal due to their fast and accurate recognition performance. in this study, we propose a machine learning methodology for diagnosing ckd. the ckd data set was obtained from the university of california irvine (uci) machine learning repository, which has a large number of missing values. knn imputation was used to fill in the missing values, which selects several complete samples with the most similar measurements to process the missing data for each incomplete sample. missing values are usually seen in real-life medical situations because patients may miss some measurements for various reasons. after effectively filling out the incomplete data set, six machine learning algorithms (logistic regression, random forest, support vector machine, k-nearest neighbor, naive bayes classifier and feed forward neural network) were used to establish models. among these machine learning models, random forest achieved the best performance with 99.75% diagnosis accuracy. by analyzing the misjudgments generated by the established models, we proposed an integrated model that combines logistic regression and random forest by using perceptron, which could achieve an average accuracy of 99.83% after ten times of simulation. hence, we speculated that this methodology could be applicable to more complicated clinical data for disease diagnosis.index terms chronic kidney disease, machine learning, knn imputation, integrated model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/282.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/282.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9bc3015a028790292202b06b3febf88629b9bd3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/282.txt @@ -0,0 +1 @@ + this study presents a machine learning approach to accurately assess the quality of coconut sugar using rgb values. python and scikit-learn were used to run the following machine learning algorithms: artificial neural network (ann), stochastic gradient descent (sgd), k-nearest neighbors (k-nn) algorithm, support vector machine (svm), decision tree (dt) and random forest (rf). comparisons were made between the aforementioned machine learning algorithms by evaluating the accuracy and the average running time of each training model. results of the study show that the sgd is superior in terms of accuracy but falls short to k-nn and svc in terms of running time. in this fashion, a plot between the accuracy and the running time was made and it wasobserved that algorithms with higher accuracies correspondingly have also higher running times. by this very nature, experimental results show that the sgd holds merit in accurately assessing the coconut sugar quality, despite its expense in running time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/283.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/283.txt new file mode 100644 index 0000000000000000000000000000000000000000..c8730ba3c174d8024836db80333cb3ff0353f497 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/283.txt @@ -0,0 +1 @@ + we introduce a method for the generation of images from an input scene graph. the method separates between a layout embedding and an appearance embedding. the dual embedding leads to generated images that better match the scene graph, have higher visual quality, and support more complex scene graphs. in addition, the embedding scheme supports multiple and diverse output images per scene graph, which can be further controlled by the user. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/284.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/284.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5ee174671287ed2bf44c9ff2e98ea4ff972b4ff --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/284.txt @@ -0,0 +1 @@ + image recognition is widely used in the field of computer vision today. as a kind of image recognition, digit recognition is widely used. today, the online recognition technology in digit recognition is relatively mature while the offline recognition technology is not. this paper mainly introduces an offline recognition system for handwritten digits based on convolutional neural networks. the system uses the minst dataset as a training sample and pre-processes the picture with the opencv toolkit. then it uses lenet-5 in the convolutional neural network to extract the handwritten digit image features, repeatedly convolution pooling, and pull the result into a one-dimensional vector. and finally find the highest probability point to determine the result to achieve handwritten digit recognition with the softmax regression model. the application of this system can greatly reduce labor costs and improve work efficiency, which is of great significance in many fields. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/285.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/285.txt new file mode 100644 index 0000000000000000000000000000000000000000..cfbd950d8bbdc14c16eb72e537df1e436628be28 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/285.txt @@ -0,0 +1 @@ + aspect-based sentiment analysis (absa) has attracted increasing attention recently due to its broad applications.in existing absa datasets, most sentences contain only one aspect or multiple aspects with the same sentiment polarity, which makes absa task degenerate to sentence-level sentiment analysis.in this paper, we present a new large-scale multi-aspect multi-sentiment (mams) dataset, in which each sentence contains at least two different aspects with different sentiment polarities.the release of this dataset would push forward the research in this field.in addition, we propose simple yet effective capsnet and capsnet-bert models which combine the strengths of recent nlp advances.experiments on our new dataset show that the proposed model significantly outperforms the state-of-the-art baseline methods 1 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/286.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/286.txt new file mode 100644 index 0000000000000000000000000000000000000000..6dcb29a58159d74821b9356baa444e85272b73ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/286.txt @@ -0,0 +1 @@ + this study is devoted to proposing a useful intelligent prediction model to distinguish the severity of covid-19, to provide a more fair and reasonable reference for assisting clinical diagnostic decision-making. based on patients' necessary information, pre-existing diseases, symptoms, immune indexes, and complications, this paper proposes a prediction model using the harris hawks optimization (hho) to optimize the fuzzy k-nearest neighbor (fknn), which is called hho-fknn. this model is utilized to distinguish the severity of covid-19. in hho-fknn, the purpose of introducing hho is to optimize the fknn's optimal parameters and feature subsets simultaneously. also, based on actual covid-19 data, we conducted a comparative experiment between hho-fknn and several well-known machine learning algorithms, which result shows that not only the proposed hho-fknn can obtain better classification performance and higher stability on the four indexes but also screen out the key features that distinguish severe covid-19 from mild covid-19. therefore, we can conclude that the proposed hho-fknn model is expected to become a useful tool for covid-19 prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/287.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/287.txt new file mode 100644 index 0000000000000000000000000000000000000000..e83b295ffdd0e029a29e937376f5ee2dd45b6d2d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/287.txt @@ -0,0 +1 @@ + video-game players generate huge amounts of data, as everything they do within a game is recorded. in particular, among all the stored actions and behaviors, there is information on the in-game purchases of virtual products. such information is of critical importance in modern free-to-play titles, where gamers can select or buy a profusion of items during the game in order to progress and fully enjoy their experience. to try to maximize these kind of purchases, one can use a recommendation system so as to present players with items that might be interesting for them. such systems can better achieve their goal by employing machine learning algorithms that are able to predict the rating of an item or product by a particular user. in this paper we evaluate and compare two of these algorithms, an ensemble-based model (extremely randomized trees) and a deep neural network, both of which are promising candidates for operational video-game recommender engines. item recommenders can help developers improve the game. but, more importantly, it should be possible to integrate them into the game, so that users automatically get personalized recommendations while playing. the presented models are not only able to meet this challenge, providing accurate predictions of the items that a particular player will find attractive, but also sufficiently fast and robust to be used in operational settings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/288.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/288.txt new file mode 100644 index 0000000000000000000000000000000000000000..25dc848250025d7a823c1fcf054a190613b47579 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/288.txt @@ -0,0 +1 @@ + the agriculture sector is essential for every country because it provides a basic income to a large number of people and food as well, which is a fundamental requirement to survive on this planet. we see as time passes, significant changes come in the present era, which begins with green revolution. due to improper knowledge of plant diseases, farmers use fertilizers in excess, which ultimately degrade the quality of food. earlier farmers use experts to determine the type of plant disease, which was expensive and time-consuming. in today's time, image processing is used to recognize and catalog plant diseases using the lesion region of plant leaf, and there are different modus-operandi for plant disease scent from leaf using neural networks (nn), support vector machine (svm), and others. in this paper, we improving the architecture of the neural networking by working on ten different types of training algorithms and the proper choice of neurons in the concealed layer. our proposed approach gives 98.30% accuracy on general plant leaf disease and 100% accuracy on specific plant leaf disease based on bayesian regularization, automation of cluster and without over-fitting on considered plant diseases over various other implemented methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/289.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/289.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4482375182e84dbe374ed5f9f65452d974965f8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/289.txt @@ -0,0 +1 @@ + this paper describes a machine learning approach for visual object detection which is capable of processing images extremely rapidly and achieving high detection rates. this work is distinguished by three key contributions. the first is the introduction of a new image representation called the "integral image" which allows the features used by our detector to be computed very quickly. the second is a learning algorithm, based on adaboost, which selects a small number of critical visual features from a larger set and yields extremely efficient classifiers . the third contribution is a method for combining increasingly more complex classifiers in a "cascade" which allows background regions of the image to be quickly discarded while spending more computation on promising object-like regions. the cascade can be viewed as an object specific focus-of-attention mechanism which unlike previous approaches provides statistical guarantees that discarded regions are unlikely to contain the object of interest. in the domain of face detection the system yields detection rates comparable to the best previous systems. used in real-time applications, the detector runs at 15 frames per second without resorting to image differencing or skin color detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/29.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/29.txt new file mode 100644 index 0000000000000000000000000000000000000000..a09a851ee7029d509aaf73cba6069ddd535617f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/29.txt @@ -0,0 +1 @@ + solving the challenges of automatic machine translation of building automation system text metadata is a crucial first step in efficiently deploying smart building applications. the vocabulary used to describe building metadata appears small compared to general natural languages, but each term has multiple commonly used abbreviations. conventional machine learning techniques are inefficient since they need to learn many different forms for the same word, and large amounts of data must be used to train these models. it is also difficult to apply standard techniques such as tokenisation since this commonly results in multiple output tags being associated with a single input token, something traditional sequence labelling models do not allow. finite state transducers can model sequence-to-sequence tasks where the input and output sequences are different lengths, and they can be combined with language models to ensure a valid output sequence is generated. we perform a preliminary analysis into the use of transducer-based language models to parse and normalise building point metadata. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/290.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/290.txt new file mode 100644 index 0000000000000000000000000000000000000000..b01261a64e22a0c768edd4e0baceb1437153ccb2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/290.txt @@ -0,0 +1 @@ + neural sequence-to-sequence models have provided a viable new approach for abstractive text summarization (meaning they are not restricted to simply selecting and rearranging passages from the original text). however, these models have two shortcomings: they are liable to reproduce factual details inaccurately, and they tend to repeat themselves. in this work we propose a novel architecture that augments the standard sequence-to-sequence attentional model in two orthogonal ways. first, we use a hybrid pointer-generator network that can copy words from the source text via pointing, which aids accurate reproduction of information, while retaining the ability to produce novel words through the generator. second, we use coverage to keep track of what has been summarized, which discourages repetition. we apply our model to the cnn / daily mail summarization task, outperforming the current abstractive state-of-the-art by at least 2 rouge points. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/291.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/291.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ffeadb15e0a8787c783b41ad9789d8b06737661 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/291.txt @@ -0,0 +1 @@ + internet of things is making objects smarter and more autonomous. at the other side, online education is gaining momentum and many universities are now offering online degrees. content preparation for such programs usually involves recording the classes. in this article, we intend to introduce a deep learning-based camera management system as a substitute for the academic filming crew. the solution mainly consists of two cameras and a wearable gadget for the instructor. the fixed camera is used for the instructor's position and pose detection and the pantilt-zoom (ptz) camera does the filming. in the proposed solution, image processing and deep learning techniques are merged together. face recognition and skeleton detection algorithms are used to detect the position of instructor. but the main contribution lies in the application of deep learning for instructor's skeleton detection and postprocessing of the deep network output for correction of the pose detection results using a bayesian maximum a posteriori (map) estimator. this estimator is defined on a markov state machine. the pose detection result along with the position info is then used by the ptz camera controller for filming purposes. the proposed solution is implemented by using openpose which is a convolutional neural network for detection of body parts. feeding a neural network pose classifier with 12 features extracted from the output of the deep network yields an accuracy of 89%. however, as we show, the accuracy can be improved by the markov model and map estimator to reach as high as 95.5%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/292.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/292.txt new file mode 100644 index 0000000000000000000000000000000000000000..4812ef3282507f4f5d05d23743eb8093f969728f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/292.txt @@ -0,0 +1 @@ + as the widespread use of computers and the high-speed development of the internet, e-commerce has already penetrated as a part of our daily life. for a popular product, there are a large number of reviews. this makes it difficult for a potential customer to make an informed decision on purchasing the product, as well as for the manufacturer of the product to keep track and to manage customer opinions. in this paper, we pay attention to online hotel reviews, and propose a supervised machine learning approach using unigram feature with two types of information (frequency and tf-idf) to realize polarity classification of documents. as shown in our experimental results, the information of tf-idf is more effective than frequency. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/293.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/293.txt new file mode 100644 index 0000000000000000000000000000000000000000..4f5c420a29fad1a2342e844762e742887504338c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/293.txt @@ -0,0 +1 @@ + credit cards are very commonly used in making online payments. in recent years' frauds are reported which are accomplished using credit cards. it is very difficult to detect and prevent the fraud which is accomplished using credit card. machine learning(ml) is an artificial intelligence (ai) technique which is used to solve many problems in science and engineering. in this paper, machine learning algorithms are applied on a data set of credit cards frauds and the power of three machine learning algorithms is compared to detect the frauds accomplished using credit cards. the accuracy of random forest machine learning algorithm is best as compared to decision tree and xgboost algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/294.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/294.txt new file mode 100644 index 0000000000000000000000000000000000000000..a7bb73193639f3152b3cd9f159439906fcf90cb8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/294.txt @@ -0,0 +1 @@ + this paper focuses on the detection of objects withlambertian surface under both varying illumination and pose. we offer to apply a novel detection method that proceeds by modeling the dfferent illuminations from a small number of images in the training set; this automatically voids the illumination effects, allowing fast illumination invariant detection, without having to create a large training set. it is demonstrated that the method '9ts in" nicely with previous work about the modeling of the set of object appearances under varying illumination.in the experiments, an object was correctly detected under image plane rotations in a 45-degrees range, and a wide variety of different illuminations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/295.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/295.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b40014502011f3d60434b0cdf66bf04137b4cf6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/295.txt @@ -0,0 +1 @@ + data-driven approaches for edge detection have proven effective and achieve top results on modern benchmarks. however, all current data-driven edge detectors require manual supervision for training in the form of hand-labeled region segments or object boundaries. specifically, human annotators mark semantically meaningful edges which are subsequently used for training. is this form of strong, highlevel supervision actually necessary to learn to accurately detect edges? in this work we present a simple yet effective approach for training edge detectors without human supervision. to this end we utilize motion, and more specifically, the only input to our method is noisy semi-dense matches between frames. we begin with only a rudimentary knowledge of edges (in the form of image gradients), and alternate between improving motion estimation and edge detection in turn. using a large corpus of video data, we show that edge detectors trained using our unsupervised scheme approach the performance of the same methods trained with full supervision (within 3-5%). finally, we show that when using a deep network for the edge detector, our approach provides a novel pre-training scheme for object detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/296.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/296.txt new file mode 100644 index 0000000000000000000000000000000000000000..84ead00bde6e73d915d1b1f4877a2187d5ba5a94 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/296.txt @@ -0,0 +1 @@ + team sports can be viewed as dynamical systems unfolding in time and thus require tools and approaches congruent to the analysis of dynamical systems. the analysis of the pattern-forming dynamics of player interactions can uncover the clues to underlying tactical behaviour. this study aims to propose quantitative measures of a team's performance derived only using player interactions. concretely, we segment the data into events ending with a goal attempt, that is, ''shot''. using the acquired sequences of events, we develop a coarse-grain activity model representing a player-to-player interaction network. we derive measures based on information theory and total interaction activity, to demonstrate an association with an attempt to score. in addition, we developed a novel machine learning approach to predict the likelihood of a team making an attempt to score during a segment of the match. our developed prediction models showed an overall accuracy of 75.2% in predicting the correct segmental outcome from 13 matches in our dataset. the overall predicted winner of a match correlated with the true match outcome in 66.6% of the matches that ended in a result. furthermore, the algorithm was evaluated on the largest available open collection of soccer logs. the algorithm showed an accuracy of 0.84 in the classification of the 42, 860 segments from 1, 941 matches and correctly predicted the match outcome in 81.9% of matches that ended in a result. the proposed measures of performance offer an insight into the underlying performance characteristics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/297.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/297.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab43e55678b52919024e422cdffb2f58ff6083ad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/297.txt @@ -0,0 +1 @@ + there have been many studies in which researchers have attempted to classify student attentiveness. many of these approaches depended on a qualitative analysis and lacked any quantitative analysis. therefore, this work is focused on bridging the gap between qualitative and quantitative approaches to classify student attentiveness. thus, this research applies machine learning algorithms (k-means and svm) to automatically classify students as attentive or inattentive using data from a consumer rgb-d sensor. results of this research can be used to improve teaching strategies for instructors at all levels and can aid instructors in implementing personalized learning systems, which is a national academy of engineering grand challenge. this research applies machine learning algorithms to an educational setting. data from these algorithms can be used by instructors to provide valuable feedback on the effectiveness of their instructional strategies and pedagogies. instructors can use this feedback to improve their instructional strategies; and students will benefit by achieving improved learning and subject mastery. ultimately, this will result in the students' increased ability to do work in their respective areas. broadly, this work can help advance efforts in many areas of education and instruction. it is expected that improving instructional strategies and implementing personalized learning will help create more competent, capable, and prepared persons available for the future workforce. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/298.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/298.txt new file mode 100644 index 0000000000000000000000000000000000000000..e2295f5ef19a09ff091b73fc4d8c9cb825907b46 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/298.txt @@ -0,0 +1 @@ + when an agent acquires new information, ideally it would immediately be capable of using that information to understand its environment. this is not possible using conventional deep neural networks, which suffer from catastrophic forgetting when they are incrementally updated, with new knowledge overwriting established representations. a variety of approaches have been developed that attempt to mitigate catastrophic forgetting in the incremental batch learning scenario, where a model learns from a series of large collections of labeled samples. however, in this setting, inference is only possible after a batch has been accumulated, which prohibits many applications. an alternative paradigm is online learning in a single pass through the training dataset on a resource constrained budget, which is known as streaming learning. streaming learning has been much less studied in the deep learning community. in streaming learning, an agent learns instances one-by-one and can be tested at any time, rather than only after learning a large batch. here, we revisit streaming linear discriminant analysis, which has been widely used in the data mining research community. by combining streaming linear discriminant analysis with deep learning, we are able to outperform both incremental batch learning and streaming learning algorithms on both ima-genet ilsvrc-2012 and core50, a dataset that involves learning to classify from temporally ordered samples 1 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/299.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/299.txt new file mode 100644 index 0000000000000000000000000000000000000000..9901eb5adb38f1b17c6c1a16aaa8c739da96b1f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/299.txt @@ -0,0 +1 @@ + recent studies have shown that reinforcement learning (rl) is an effective approach for improving the performance of neural machine translation (nmt) system. however, due to its instability, successfully rl training is challenging, especially in real-world systems where deep models and large datasets are leveraged. in this paper, taking several largescale translation tasks as testbeds, we conduct a systematic study on how to train better nmt models using reinforcement learning. we provide a comprehensive comparison of several important factors (e.g., baseline reward, reward shaping) in rl training. furthermore, to fill in the gap that it remains unclear whether rl is still beneficial when monolingual data is used, we propose a new method to leverage rl to further boost the performance of nmt systems trained with source/target monolingual data. by integrating all our findings, we obtain competitive results on wmt14 english-german, wmt17 english-chinese, and wmt17 chinese-english translation tasks, especially setting a state-of-theart performance on wmt17 chinese-english translation task. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/3.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/3.txt new file mode 100644 index 0000000000000000000000000000000000000000..28d8eacaf024274f516dff30ba54c2641f322144 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/3.txt @@ -0,0 +1 @@ + efficient motion planning algorithms are of central importance for deploying robots in the real world. unfortunately, these algorithms often drastically reduce the dimensionality of the problem for the sake of feasibility, thereby foregoing optimal solutions. this limitation is most readily observed in agile robots, where the solution space can have multiple additional dimensions. optimal control approaches partially solve this problem by finding optimal solutions without sacrificing the complexity of the environment, but do not meet the efficiency demands of real-world applications. this work proposes an approach to resolve these issues simultaneously by training a machine learning model on the outputs of an optimal control approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/30.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/30.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0d6a00b0304dbc537dab6c1847791bffc1cb9ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/30.txt @@ -0,0 +1 @@ + rather than augmenting rewards with penalties for undesired behavior, constrained partially observable markov decision processes (cpomdps) plan safely by imposing inviolable hard constraint value budgets. previous work performing online planning for cpomdps has only been applied to discrete action and observation spaces. in this work, we propose algorithms for online cpomdp planning for continuous state, action, and observation spaces by combining dual ascent with progressive widening. we empirically compare the effectiveness of our proposed algorithms on continuous cpomdps that model both toy and real-world safety-critical problems. additionally, we compare against the use of online solvers for continuous unconstrained pomdps that scalarize cost constraints into rewards, and investigate the effect of optimistic cost propagation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/300.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/300.txt new file mode 100644 index 0000000000000000000000000000000000000000..60701dff848ef807ba12f8ffdb96520caa0e0abf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/300.txt @@ -0,0 +1 @@ + figure 1: faceforensics++ is a dataset of facial forgeries that enables researchers to train deep-learning-based approaches in a supervised fashion. the dataset contains manipulations created with four state-of-the-art methods, namely, face2face, faceswap, deepfakes, and neuraltextures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/301.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/301.txt new file mode 100644 index 0000000000000000000000000000000000000000..b8a7ca3e8ee2ed3fda111345db5e84aec90a70fd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/301.txt @@ -0,0 +1 @@ + cross-site scripting (xss) is an attack most often carried out by attackers to attack a website by inserting malicious scripts into a website. this attack will take the user to a webpage that has been specifically designed to retrieve user sessions and cookies. nearly 68% of websites are vulnerable to xss attacks. in this study, the authors conducted a study by evaluating several machine learning methods, namely support vector machine (svm), k-nearest neighbour (knn), and naïve bayes (nb). the machine learning algorithm is then equipped with the n-gram method to each script feature to improve the detection performance of xss attacks. the simulation results show that the svm and n-gram method achieves the highest accuracy with 98%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/302.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/302.txt new file mode 100644 index 0000000000000000000000000000000000000000..550b09a4a4a72fe082ee1190ff5ff2a34ead9d86 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/302.txt @@ -0,0 +1 @@ + offensive content is pervasive in social media and a reason for concern to companies and government organizations. several studies have been recently published investigating methods to detect the various forms of such content (e.g. hate speech, cyberbulling, and cyberaggression). the clear majority of these studies deal with english partially because most annotated datasets available contain english data. in this paper, we take advantage of english data available by applying cross-lingual contextual word embeddings and transfer learning to make predictions in languages with less resources. we project predictions on comparable data in bengali, hindi, and spanish and we report results of 0.8415 f1 macro for bengali, 0.8568 f1 macro for hindi, and 0.7513 f1 macro for spanish. finally, we show that our approach compares favorably to the best systems submitted to recent shared tasks on these three languages, confirming the robustness of cross-lingual contextual embeddings and transfer learning for this task. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/303.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/303.txt new file mode 100644 index 0000000000000000000000000000000000000000..152f876679642d5cf0dca74bcf18f082479a3fe9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/303.txt @@ -0,0 +1 @@ + breast cancer is the most common cancer occurring in women and is estimated to be 270,000 new cases diagnosed in 2019.this is the reason detection software is needed to detect it before it gets fatal.by the use of machine learning algorithms, software can be made to detect this dangerous cancer and treat it before it can cause fatality of the patient.it is also the most frequently occurring cancer among indian women and the chance for survival of a woman suffering from breast cancer is 50%.for breast cancer detection many machine learning algorithms can be used.in this paper, 2 machine learning algorithms is proposed to compare namely logistic regression and decision tree algorithm on the wisconsin (diagnostic) data set and use the algorithm with the best accuracy for predicting breast cancer. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/304.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/304.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0766400c8e61e32f0f56bb4277259d7ed6c0d94 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/304.txt @@ -0,0 +1 @@ + in this paper, we propose a novel approach to video captioning based on adversarial learning and long-short term memory (lstm). with this solution concept we aim at compensating for the deficiencies of lstm-based video captioning methods that generally show potential to effectively handle temporal nature of video data when generating captions, but that also typically suffer from exponential error accumulation. specifically, we adopt a standard generative adversarial network (gan) architecture, characterized by an interplay of two competing processes: a "generator", which generates textual sentences given the visual content of a video, and a "discriminator" which controls the accuracy of the generated sentences. the discriminator acts as an "adversary" towards the generator and with its controlling mechanism helps the generator to become more accurate. for the generator module, we take an existing video captioning concept using lstm network. for the discriminator, we propose a novel realization specifically tuned for the video captioning problem and taking both the sentences and video features as input. this leads to our proposed lstm-gan system architecture, for which we show experimentally to significantly outperform the existing methods on standard public datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/305.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/305.txt new file mode 100644 index 0000000000000000000000000000000000000000..340431dc557ce8240419afebd4d12886d0bf3703 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/305.txt @@ -0,0 +1 @@ + code clones defined as sequence of source code that occur more than once in the same program or across different programs are undesirable as they increase the size of program and creates the problems of redundancy. fixing of bugs detected in one clone require detection of all clones. hence, it is imperative to identify and remove all code clones in a program. the focus of previous research work on the code clone detection was to find identical clones, or clones that are identical up to identifiers and literal values. but, detection of similar clones is often important. in the present paper it is proposed to generate the feature sets after parsing the given c program for code fragments and then match their similarity. on the basis of feature sets the classification of algorithm is being performed by using the support vector machine (svm) as a machine learning tool. the output of the machine tool would be the similarity ratio with which the two c programs are related to each other and also the class in which they would occur. it was observed that the test results of the tool implementation show detection of code clones in the program and its accuracy increases with the increase in number of instances. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/306.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/306.txt new file mode 100644 index 0000000000000000000000000000000000000000..a43ea81e54544d6d0c0b67a0a015c017f95a6026 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/306.txt @@ -0,0 +1 @@ + this paper presents a literature review of driver drowsiness detection based on behavioral measures using machine learning techniques. faces contain information that can be used to interpret levels of drowsiness. there are many facial features that can be extracted from the face to infer the level of drowsiness. these include eye blinks, head movements and yawning. however, the development of a drowsiness detection system that yields reliable and accurate results is a challenging task as it requires accurate and robust algorithms. a wide range of techniques has been examined to detect driver drowsiness in the past. the recent rise of deep learning requires that these algorithms be revisited to evaluate their accuracy in detection of drowsiness. as a result, this paper reviews machine learning techniques which include support vector machines, convolutional neural networks and hidden markov models in the context of drowsiness detection. furthermore, a meta-analysis is conducted on 25 papers that use machine learning techniques for drowsiness detection. the analysis reveals that support vector machine technique is the most commonly used technique to detect drowsiness, but convolutional neural networks performed better than the other two techniques. finally, this paper lists publicly available datasets that can be used as benchmarks for drowsiness detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/307.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/307.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a485eb66612c4337ebf5d97e6ff40649d6f5945 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/307.txt @@ -0,0 +1 @@ + the primary right of voting in the elections is the fundamental yardstick of a democratic citizen. during the modern era, electronic voting machine (evm) has been introduced which has marked a significant change in the conventional voting system in india replacing the ballot papers and boxes which were used earlier. previously, the ballot papers used to consume a lot of time, due to the malpractices like boothcapturing and ballot-box stuffing, leading to more disputes and delayed results announcement. in this paper, we propose a evm system which captures the facial image of a voter through a deep cnn based face recognizer, verifies it with the pre-captured images in the database, the result being positive, assumes the voter is a valid one, asks him to cast his vote for a political party. after voting, the facial alignment of the voter gets deleted from the system, ensuring the voter to vote for only once. this article describes the methodology adopted towards building the system and compares the performance of the face-recognizer (99.1%) against a baseline. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/308.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/308.txt new file mode 100644 index 0000000000000000000000000000000000000000..48aaa77d69cba345e397c635e0144c7daecdf6f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/308.txt @@ -0,0 +1 @@ + in the present world, the prediction of the results of football matches is being done by both football experts and machines. football as a game produces a huge amount of statistical data about the players of the team, the matches played between the teams, the environment in which the match is being played. this statistical data can be exploited using various machine learning techniques to predict various information related to a particular football match namely the result of a particular game, injury of a player, performance of a player in a particular match, spotting new talents in the game etc. we in this project will attempt to design a prediction system powered by machine recurrent neural networks and lstms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/309.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/309.txt new file mode 100644 index 0000000000000000000000000000000000000000..02420c438fdcc79e2854dc919f5644f43ed616ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/309.txt @@ -0,0 +1 @@ + ms. pac-man is a popular chasing and evading game and the ghost character in the game is controlled by script. this article evolved an evolutionary neural network for the red ghost to chase pac-man. red ghost' position, pac-man's position and pac-man's state are considered to be the inputs of the neural network, and the output is the direction of red ghost to move in the next step. we also proposed a fitness function to raise capture ability in evolution so that the red ghost learns by itself in simulation. experimental results show that the agent learns well and plays better in teamwork than the traditional script controlled ghost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/31.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/31.txt new file mode 100644 index 0000000000000000000000000000000000000000..0e9031d62bf650fc0c37a31de2fd9990059aac29 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/31.txt @@ -0,0 +1 @@ + this paper realizes the estimation of classroom occupancy by using the co2 sensor and deep learning technique named long-short-term memory. as a case of connection with iot and machine learning, we achieve the model to estimate the people number in the classroom based on the environmental data exported from the co2 sensor, we also evaluate the performance of the model to show the feasibility to apply our module to the real environment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/310.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/310.txt new file mode 100644 index 0000000000000000000000000000000000000000..257c77f8c0d59a57cbbe3ba0494a8111ea7c0028 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/310.txt @@ -0,0 +1 @@ + predicting and detection of heart disease has always been a critical and challenging task for healthcare practitioners. hospitals and other clinics are offering expensive therapies and operations to treat heart diseases. s o, predicting heart disease at the early stages will be useful to the people around the world so that they will take necessary actions before getting severe. heart disease is a significant problem in recent times; the main reason for this disease is the intake of alcohol, tobacco, and lack of physical exercise. over the years, machine learning shows effective results in making decisions and predictions from the broad set of data produced by the health care industry. s ome of the supervised machine learning techniques used in this prediction of heart disease are artificial neural network (ann), decision tree (dt), random forest (rf), support vector machine (s vm), naïve bayes) (nb) and knearest neighbour algorithm. furthermore, the performances of these algorithms are summarized. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/311.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/311.txt new file mode 100644 index 0000000000000000000000000000000000000000..ff6b140ca2be690f9e13f08ee5f81c31d1d179f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/311.txt @@ -0,0 +1 @@ + in low and middle-income countries (lmics), efforts to eliminate the tuberculosis (tb) epidemic are challenged by the persistent social inequalities in health, the limited number of local healthcare professionals, and the weak healthcare infrastructure found in resource-poor settings. the modern development of computer techniques has accelerated the tb diagnosis process. in this paper, we propose a novel method using convolutional neural network(cnn) to deal with unbalanced, less-category x-ray images. our method improves the accuracy for classifying multiple tb manifestations by a large margin. we explore the effectiveness and efficiency of shuffle sampling with cross-validation in training the network and find its outstanding effect in medical images classification. we achieve an 85.68% classification accuracy in a large tb image dataset, surpassing any state-of-art classification accuracy in this area. our methods and results show a promising path for more accurate and faster tb diagnosis in lmics healthcare facilities. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/312.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/312.txt new file mode 100644 index 0000000000000000000000000000000000000000..7eaad07b1aa435da915862a3fd468825ba9f24f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/312.txt @@ -0,0 +1 @@ + in this paper, we propose a novel controllable text-to-image generative adversarial network (controlgan), which can effectively synthesise high-quality images and also control parts of the image generation according to natural language descriptions. to achieve this, we introduce a word-level spatial and channel-wise attention-driven generator that can disentangle different visual attributes, and allow the model to focus on generating and manipulating subregions corresponding to the most relevant words. also, a word-level discriminator is proposed to provide fine-grained supervisory feedback by correlating words with image regions, facilitating training an effective generator which is able to manipulate specific visual attributes without affecting the generation of other content. furthermore, perceptual loss is adopted to reduce the randomness involved in the image generation, and to encourage the generator to manipulate specific attributes required in the modified text. extensive experiments on benchmark datasets demonstrate that our method outperforms existing state of the art, and is able to effectively manipulate synthetic images using natural language descriptions. code is available at https://github.com/mrlibw/controlgan. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/313.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/313.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb6d13489861cf1068d35fed7a9173738dea0084 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/313.txt @@ -0,0 +1 @@ + despite the significant advancement in wireless technologies over the years, ieee 802.11 still emerges as the de-facto standard to achieve the required short to medium range wireless device connectivity in anywhere from offices to homes. with it being ranked the highest among all deployed wireless technologies in terms of market adoption, vulnerability exploitation and attacks targeting it have also been commonly observed. ieee 802.11 security has thus become a key concern over the years. in this paper, we analysed the threats and attacks targeting the ieee 802.11 network and also identified the challenges of achieving accurate threat and attack classification, especially in situations where the attacks are novel and have never been encountered by the detection and classification system before. we then proposed a solution based on anomaly detection and classification using a deep learning approach. the deep learning approach self-learns the features necessary to detect network anomalies and is able to perform attack classification accurately. in our experiments, we considered the classification as a multi-class problem (that is, legitimate traffic, flooding type attacks, injection type attacks and impersonation type attacks), and achieved an overall accuracy of 98.6688% in classifying the attacks through the proposed solution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/314.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/314.txt new file mode 100644 index 0000000000000000000000000000000000000000..081304407ff6298a457e6236d9f4df8b5b188b31 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/314.txt @@ -0,0 +1 @@ + we present a novel image editing system that generates images as the user provides free-form mask, sketch and color as an input. our system consist of a end-to-end trainable convolutional network. contrary to the existing methods, our system wholly utilizes free-form user input with color and shape. this allows the system to respond to the user's sketch and color input, using it as a guideline to generate an image. in our particular work, we trained network with additional style loss which made it possible to generate realistic results, despite large portions of the image being removed. our proposed network architecture sc-fegan is well suited to generate high quality synthetic image using intuitive user inputs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/315.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/315.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8982b297e717e0abdf381ccbbb60a5d301c64a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/315.txt @@ -0,0 +1 @@ + intrusion detection is a fundamental part of security tools, such as adaptive security appliances, intrusion detection systems, intrusion prevention systems, and firewalls. various intrusion detection techniques are used, but their performance is an issue. intrusion detection performance depends on accuracy, which needs to improve to decrease false alarms and to increase the detection rate. to resolve concerns on performance, multilayer perceptron, support vector machine (svm), and other techniques have been used in recent work. such techniques indicate limitations and are not efficient for use in large data sets, such as system and network data. the intrusion detection system is used in analyzing huge traffic data; thus, an efficient classification technique is necessary to overcome the issue. this problem is considered in this paper. well-known machine learning techniques, namely, svm, random forest, and extreme learning machine (elm) are applied. these techniques are well-known because of their capability in classification. the nsl-knowledge discovery and data mining data set is used, which is considered a benchmark in the evaluation of intrusion detection mechanisms. the results indicate that elm outperforms other approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/316.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/316.txt new file mode 100644 index 0000000000000000000000000000000000000000..ccde468e55d82f87828dbc92485191d62ca2cbbc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/316.txt @@ -0,0 +1 @@ + this article is an overview of a current research based on fingerprint recognition system. in this paper we highlighted on the previous studies of fingerprint recognition system. this paper is a brief review in the conceptual and structure of fingerprint recognition. the basic fingerprint recognition system consists of four stages: firstly, the sensor which is used for enrolment & recognition to capture the biometric data. secondly, the pre-processing stage which is used to remove unwanted data and increase the clarity of ridge structure by using enhancement technique. thirdly, feature extraction stage which take the input from the output of the pre-processing stage to extract the fingerprint features. fourthly, the matching stage is to compare the acquired feature with the template in the database. finally, the database which stores the features for the matching stags. the aim of this paper is to review various recently work on fingerprint recognition system and explain fingerprint recognition stages step by step and give summaries of fingerprint databases with characteristics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/317.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/317.txt new file mode 100644 index 0000000000000000000000000000000000000000..d0d4ef6dbd0e02175eac6cc86e8b142074c3ec08 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/317.txt @@ -0,0 +1 @@ + inspired by progress in unsupervised representation learning for natural language, we examine whether similar models can learn useful representations for images. we train a sequence transformer to auto-regressively predict pixels, without incorporating knowledge of the 2d input structure. despite training on low-resolution imagenet without labels, we find that a gpt-2 scale model learns strong image representations as measured by linear probing, fine-tuning, and low-data classification. on cifar-10, we achieve 96.3% accuracy with a linear probe, outperforming a supervised wide resnet, and 99.0% accuracy with full finetuning, matching the top supervised pre-trained models. an even larger model trained on a mixture of imagenet and web images is competitive with self-supervised benchmarks on imagenet, achieving 72.0% top-1 accuracy on a linear probe of our features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/318.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/318.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ad297d635291e5950ba5c8d9f23fdac1fafe8e3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/318.txt @@ -0,0 +1 @@ + human face detection plays an important role in applications such as video surveillance, human computer interface, face recognition, and face image database management, etc.we propose a face detection algorithm for color images in complex backgrounds, using fuzzy logic, fast marching method and some image processing techniques.the algorithm is mainly based on skin colors.sizes, shapes of faces and facial features are fuzzy factors for verifying face candidates.to reduce computation time, we deal only with border points of each object.regardless of positions of true faces detected, we also get their relatively accurate borders for the results.experimental results demonstrate successful face detection over a wide range of facial variations in color, position, scale, orientation, 3d pose, and expression in images from several photo collections (both indoors and outdoors).the computation time is also faster than many other face algorithms based on skin colors. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/319.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/319.txt new file mode 100644 index 0000000000000000000000000000000000000000..5edf0f5043259a330b777ea2e55e2fd4eb46d357 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/319.txt @@ -0,0 +1 @@ + the term deep learning or deep neural network refers to artificial neural networks (ann) with multi layers . over the last few decades, it has been considered to be one of the most powerful tools, and has become very popular in the literature as it is able to handle a huge amount of data. the interest in having deeper hidden layers has recently begun to surpass classical methods performance in different fields; especially in pattern recognition. one of the most popular deep neural networks is the convolutional neural network (cnn). it take this name from mathematical linear operation between matrixes called convolution. cnn have multiple layers; including convolutional layer, non-linearity layer, pooling layer and fullyconnected layer. the convolutional and fully-connected layers have parameters but pooling and non-linearity layers don't have parameters. the cnn has an excellent performance in machine learning problems. specially the applications that deal with image data, such as largest image classification data set (image net), computer vision, and in natural language processing (nlp)and the results achieved were very amazing . in this paper we will explain and define all the elements and important issues related to cnn, and how these elements work. in addition, we will also state the parameters that effect cnn efficiency. this paper assumes that the readers have adequate knowledge about both machine learning and artificial neural network. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/32.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/32.txt new file mode 100644 index 0000000000000000000000000000000000000000..b61bd5f81f64dd3ffc54f4120aad42f6ad6eafe7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/32.txt @@ -0,0 +1 @@ + the transition angles are defined to describe the vowelto-vowel transitions in the acoustic space of the spectral subband centroids, and the findings show that they are similar among speakers and speaking rates. in this paper, we propose to investigate the usage of polar coordinates in favor of angles to describe a speech signal by characterizing its acoustic trajectory and using them in automatic speech recognition. according to the experimental results evaluated on the braf100 dataset, the polar coordinates achieved significantly higher accuracy than the angles in the mixed and cross-gender speech recognitions, demonstrating that these representations are superior at defining the acoustic trajectory of the speech signal. furthermore, the accuracy was significantly improved when they were utilized with their first and second-order derivatives (∆, ∆∆), especially in cross-female recognition. however, the results showed they were not much more gender-independent than the conventional mel-frequency cepstral coefficients (mfccs). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/320.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/320.txt new file mode 100644 index 0000000000000000000000000000000000000000..0682f8762005806e2ca3943f01fa0378a611900a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/320.txt @@ -0,0 +1 @@ + stroke is the number one leading cause of mortality and obesity in many countries. this study preprocessing data to improve the image quality of ct scans of stroke patients by optimizing the quality of image to improve image results and to reduce noise, and also applying machine learning algorithms to classify the patients images into two sub-types of stroke disease, namely ischemic stroke and stroke haemorrhage. eight machine learning algorithms are used in this study for stroke disease classification, namely k-nearest neighbors, naive bayes, logistic regression, decision tree, random forest, multi-layer perceptron (mlp-nn), deep learning and support vector machine. our results show that random forest generates the highest level of accuracy (95.97%), along with precision values (94.39%), recall values (96.12%) and f1-measures (95.39%). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/321.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/321.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0bb18a5b9e6ab1f88f87b96b1ba8a9ee0aa93b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/321.txt @@ -0,0 +1 @@ + predicting the results of football matches poses an interesting challenge due to the fact that the sport is so popular and widespread. however, predicting the outcomes is also a difficult problem because of the number of factors which must be taken into account that cannot be quantitatively valued or modeled. as part of this work, a software solution has been developed in order to try and solve this problem. during the development of the system, a number of tests have been carried out in order to determine the optimal combination of features and classifiers. the results of the presented system show a satisfactory capability of prediction which is superior to the one of the reference method (most likely a priori outcome). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/322.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/322.txt new file mode 100644 index 0000000000000000000000000000000000000000..6bd8ae17afc68223c7060c7b5bdc7e6656eff3ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/322.txt @@ -0,0 +1 @@ + there are many factors that affect performance of stock market, such as global and local economy, political events, supply and demand, and out of the ordinary events, as covid-19 pandemic. the factors may not only influence the stock market movement, but also influence each other. we propose to observe the movement of dow jones industrial average in relations to daily news. we use top-5 news headlines from reddit to create 1-day and 5-day models to predict if dow jones industrial average movement will be in down and up direction from the moment the market opens till it closes. we propose use of shallow (traditional) machine learning algorithms and deep learning algorithms. additionally, we explore the effect of word representation, using tf-idf and glove approaches. moreover, we evaluate our models in terms of accuracy of prediction on data sets containing data before pandemic and during pandemic. our models show that deep learning models uniformly have higher accuracy than machine learning ones. convolution neural network with tf-idf and 5 days prediction performs the best for the dataset before the pandemic with accuracy of 59.6%. gated recurrent unit (gru), a class of recurrent neural networks, with glove and 1 day prediction outperforms the other models for dataset during the pandemic with the accuracy of 62.9%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/323.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/323.txt new file mode 100644 index 0000000000000000000000000000000000000000..cded79467a67bb57093903dc4d25cb60ba6f9cb5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/323.txt @@ -0,0 +1 @@ + there is a tremendous growth of malware with each passing day. it has become difficult to cope up with such an increasing number of malware, especially with new and unseen malware. it has posed a serious threat to software and the internet. malware and machine learning is like a pair made in heaven. the malware contains various similar patterns due to the reuse of code while machine learning is used to detect those similarities. in this paper, two experiments are performed for balanced and imbalanced data on a previously build a dataset of malware detection on api calls using various machine learning classifiers like k-nearest neighbors, gaussian naive bayes, multi naive bayes, decision tree, and random forest. in both experiments, random forest provides the best results with an accuracy of 90.38% on a balanced dataset and 98.94% on an imbalanced dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/324.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/324.txt new file mode 100644 index 0000000000000000000000000000000000000000..86d18ce038fb7c4473d210fbb632326346adb04f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/324.txt @@ -0,0 +1 @@ + video summarization aims to facilitate large-scale video browsing by producing short, concise summaries that are diverse and representative of original videos. in this paper, we formulate video summarization as a sequential decisionmaking process and develop a deep summarization network (dsn) to summarize videos. dsn predicts for each video frame a probability, which indicates how likely a frame is selected, and then takes actions based on the probability distributions to select frames, forming video summaries. to train our dsn, we propose an end-to-end, reinforcement learningbased framework, where we design a novel reward function that jointly accounts for diversity and representativeness of generated summaries and does not rely on labels or user interactions at all. during training, the reward function judges how diverse and representative the generated summaries are, while dsn strives for earning higher rewards by learning to produce more diverse and more representative summaries. since labels are not required, our method can be fully unsupervised. extensive experiments on two benchmark datasets show that our unsupervised method not only outperforms other stateof-the-art unsupervised methods, but also is comparable to or even superior than most of published supervised approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/325.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/325.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ef44d237c641fea32fa2864f7c5a4c1b973ebdf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/325.txt @@ -0,0 +1 @@ + crime prediction is of great significance to the formulation of policing strategies and the implementation of crime prevention and control. machine learning is the current mainstream prediction method. however, few studies have systematically compared different machine learning methods for crime prediction. this paper takes the historical data of public property crime from 2015 to 2018 from a section of a large coastal city in the southeast of china as research data to assess the predictive power between several machine learning algorithms. results based on the historical crime data alone suggest that the lstm model outperformed knn, random forest, support vector machine, naive bayes, and convolutional neural networks. in addition, the built environment data of points of interests (pois) and urban road network density are input into lstm model as covariates. it is found that the model with built environment covariates has better prediction effect compared with the original model that is based on historical crime data alone. therefore, future crime prediction should take advantage of both historical crime data and covariates associated with criminological theories. not all machine learning algorithms are equally effective in crime prediction.index terms prediction of crime hotspots, machine learning, lstm, built environment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/326.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/326.txt new file mode 100644 index 0000000000000000000000000000000000000000..683ff1c92d7329ab6bf15c3e762fc684e66fd75e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/326.txt @@ -0,0 +1 @@ + location prediction of users from online social media brings considerable research these days. automatic recognition of location related with or referenced in records has been investigated for decades. as a standout amongst the online social network organization, twitter has pulled in an extensive number of users who send a millions of tweets on regular schedule. because of the worldwide inclusion of its users and continuous tweets, location prediction on twitter has increased noteworthy consideration in these days. tweets, the short and noisy and rich natured texts bring many challenges in research area for researchers. in proposed framework, a general picture of location prediction using tweets is studied. in particular, tweet location is predicted from tweet contents. by outlining tweet content and contexts, it is fundamentally featured that how the issues rely upon these text inputs. in this work, we predict the location of user from the tweet text exploiting machine learning techniques namely naïve bayes, support vector machine and decision tree. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/327.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/327.txt new file mode 100644 index 0000000000000000000000000000000000000000..f283645bbd81041e72847cdefd8fc56fdd6af8f2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/327.txt @@ -0,0 +1 @@ + time series data classification is a significant topic as its application can be found in a various domain. recent studies have shown that data-driven approach based on deep learning is powerful for data mining tasks. a typical deep learning method, artificial neural network (ann), has been proven to be capable for match complicated functions thus leading to the popularity. convolutional neural network (cnn) is a special kind of ann that has been widely used in the area of image processing tasks as its ability for extracting spatial features. however, it remains a challenge for implementing cnn in time series data classification. recurrent neural network (rnn) is popular for tackling time series data as it can effectively utilize temporal information. but it is time-consuming to train rnn. this paper proposes a dual path cnn-rnn cascade network (dpcrcn) that achieves an end-to-end learning for classification. we use a dual path cnn to achieve a multi-size receptive field for better feature extraction, then using rnn and the following fully-connected layers to learn the map between the given features and the output. we also use region of interest (roi) pooling to make our model capable for a flexible shape of data. we evaluate our model on activity recognition system based on multisensor data fusion (arem) dataset and we compare with many popular algorithms. we also evaluate our model using different shape of data. the results show that our model outperforms the alternatives. in addition, we provide the details of training our model. index termsconvolutional neural network (cnn)-recurrent neural network (rnn) cascade model, dual path convolutional neural network, time series data classification, data mining. chao yang was born in china, in 1993. he received the b.s. degree in electronic information and communication engineering from chongqing university, chongqing, china, in 2016. he is currently pursuing the master's degree with the department of computer science and technology, ocean university of china. his current research interests include machine learning, computer vision, natural language processing, and artificial intelligence. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/328.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/328.txt new file mode 100644 index 0000000000000000000000000000000000000000..4c89f8b976bab28bc13b4d8d209060623ea1e259 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/328.txt @@ -0,0 +1 @@ + we trained a large, deep convolutional neural network to classify the 1.2 million high-resolution images in the imagenet lsvrc-2010 contest into the 1000 different classes. on the test data, we achieved top-1 and top-5 error rates of 37.5% and 17.0% which is considerably better than the previous state-of-the-art. the neural network, which has 60 million parameters and 650,000 neurons, consists of five convolutional layers, some of which are followed by max-pooling layers, and three fully-connected layers with a final 1000-way softmax. to make training faster, we used non-saturating neurons and a very efficient gpu implementation of the convolution operation. to reduce overfitting in the fully-connected layers we employed a recently-developed regularization method called "dropout" that proved to be very effective. we also entered a variant of this model in the ilsvrc-2012 competition and achieved a winning top-5 test error rate of 15.3%, compared to 26.2% achieved by the second-best entry. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/329.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/329.txt new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b2adfdd13ab975b09f3d5184e5f9fb8fe62b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/329.txt @@ -0,0 +1 @@ + with the recent rise in popularity and scale of social media, a growing need exists for systems that can extract useful information from huge amounts of data. we address the issue of detecting influenza epidemics. first, the proposed system extracts influenza related tweets using twitter api. then, only tweets that mention actual influenza patients are extracted by the support vector machine (svm) based classifier. the experiment results demonstrate the feasibility of the proposed approach (0.89 correlation to the gold standard). especially at the outbreak and early spread (early epidemic stage), the proposed method shows high correlation (0.97 correlation), which outperforms the state-of-the-art methods. this paper describes that twitter texts reflect the real world, and that nlp techniques can be applied to extract only tweets that contain useful information. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/33.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/33.txt new file mode 100644 index 0000000000000000000000000000000000000000..34daee3566127e6a622b44fbc9c13200f5ce8a85 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/33.txt @@ -0,0 +1 @@ + language models exhibit an emergent ability to learn a new task from a small number of input-output demonstrations. however, recent work shows that in-context learners largely rely on their pre-trained knowledge, such as the sentiment of the labels, instead of learning new associations from the input. we argue that the commonly-used few-shot evaluation using a random selection of in-context demonstrations can not disentangle models' reliance on such biases, as most of the randomly-selected demonstrations do not present relations informative for prediction beyond exposing the task's inputoutput distribution.therefore, to evaluate models' in-context learning ability independent of models' memory, we introduce a concept-sharing few-shot learning method choosing the demonstrations that share an underlying concept with the predicted sample. we extract a set of such concepts from available human explanations and measure how much models can benefit from presenting these concepts in few-shot demonstrations.we find that most of the recent in-context learners can not consistently benefit from the demonstrated concepts, irrespective of the model size. however, we note that t0 models are more sensitive to exhibited concepts, benefiting from concept-sharing demonstrations in 7 out of 8 evaluation scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/330.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/330.txt new file mode 100644 index 0000000000000000000000000000000000000000..9eea7b4e3920139a8e7f75ba81a326e38311f435 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/330.txt @@ -0,0 +1 @@ + wind speed prediction and forecasting is important for various business and management sectors. in this paper, we introduce new models for wind speed prediction based on graph convolutional networks (gcns). given hourly data of several weather variables acquired from multiple weather stations, wind speed values are predicted for multiple time steps ahead. in particular, the weather stations are treated as nodes of a graph whose associated adjacency matrix is learnable. in this way, the network learns the graph spatial structure and determines the strength of relations between the weather stations based on the historical weather data. we add a self-loop connection to the learnt adjacency matrix and normalize the adjacency matrix. we examine two scenarios with the self-loop connection setting (two separate models). in the first scenario, the self-loop connection is imposed as a constant additive. in the second scenario a learnable parameter is included to enable the network to decide about the self-loop connection strength. furthermore, we incorporate data from multiple time steps with temporal convolution, which together with spatial graph convolution constitutes spatio-temporal graph convolution. we perform experiments on real datasets collected from weather stations located in cities in denmark and the netherlands. the numerical experiments show that our proposed models outperform previously developed baseline models on the referenced datasets. we provide additional insights by visualizing learnt adjacency matrices from each layer of our models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/331.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/331.txt new file mode 100644 index 0000000000000000000000000000000000000000..86928db47bcff8a624e844d5e2bd3a02c5de1658 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/331.txt @@ -0,0 +1 @@ + vehicle color information is one of the important elements in its (intelligent traffic system). in this paper, we present a vehicle color recognition method using convolutional neural network (cnn). naturally, cnn is designed to learn classification method based on shape information, but we proved that cnn can also learn classification based on color distribution. in our method, we convert the input image to two different color spaces, hsv and cie lab, and run it to some cnn architecture. the training process follow procedure introduce by krizhevsky, that learning rate is decreasing by factor of 10 after some iterations. to test our method, we use publicly vehicle color recognition dataset provided by chen. the results, our model outperform the original system provide by chen with 2% higher overall accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/332.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/332.txt new file mode 100644 index 0000000000000000000000000000000000000000..e25592f9dd2bf46b1cfa8a2d85011596764c1f70 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/332.txt @@ -0,0 +1 @@ + machine learning (ml) based forecasting mechanisms have proved their significance to anticipate in perioperative outcomes to improve the decision making on the future course of actions. the ml models have long been used in many application domains which needed the identification and prioritization of adverse factors for a threat. several prediction methods are being popularly used to handle forecasting problems. this study demonstrates the capability of ml models to forecast the number of upcoming patients affected by covid-19 which is presently considered as a potential threat to mankind. in particular, four standard forecasting models, such as linear regression (lr), least absolute shrinkage and selection operator (lasso), support vector machine (svm), and exponential smoothing (es) have been used in this study to forecast the threatening factors of covid-19. three types of predictions are made by each of the models, such as the number of newly infected cases, the number of deaths, and the number of recoveries in the next 10 days. the results produced by the study proves it a promising mechanism to use these methods for the current scenario of the covid-19 pandemic. the results prove that the es performs best among all the used models followed by lr and lasso which performs well in forecasting the new confirmed cases, death rate as well as recovery rate, while svm performs poorly in all the prediction scenarios given the available dataset.index terms covid-19, exponential smoothing method, future forecasting, adjusted r 2 score, supervised machine learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/333.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/333.txt new file mode 100644 index 0000000000000000000000000000000000000000..96e8d889aa8a10ec6c3aa684568569f9330ca66b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/333.txt @@ -0,0 +1 @@ + there is rapidly growing interest in using bayesian optimization to tune model and inference hyperparameters for machine learning algorithms that take a long time to run. for example, spearmint is a popular software package for selecting the optimal number of layers and learning rate in neural networks. but given that there is uncertainty about which hyperparameters give the best predictive performance, and given that fitting a model for each choice of hyperparameters is costly, it is arguably wasteful to "throw away" all but the best result, as per bayesian optimization. a related issue is the danger of overfitting the validation data when optimizing many hyperparameters. in this paper, we consider an alternative approach that uses more samples from the hyperparameter selection procedure to average over the uncertainty in model hyperparameters. the resulting approach, empirical bayes for hyperparameter averaging (eb-hyp) predicts held-out data better than bayesian optimization in two experiments on latent dirichlet allocation and deep latent gaussian models. eb-hyp suggests a simpler approach to evaluating and deploying machine learning algorithms that does not require a separate validation data set and hyperparameter selection procedure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/334.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/334.txt new file mode 100644 index 0000000000000000000000000000000000000000..ace43ba28bd8ac0298d67d55a8e9e94dfafe9d5c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/334.txt @@ -0,0 +1 @@ + frauds are known to be dynamic and have no patterns, hence they are not easy to identify. fraudsters use recent technological advancements to their advantage. they somehow bypass security checks, leading to the loss of millions of dollars. analyzing and detecting unusual activities using data mining techniques is one way of tracing fraudulent transactions. transactions. this paper aims to benchmark multiple machine learning methods such as k-nearest neighbor (knn), random forest and support vector machines (svm), while the deep learning methods such as autoencoders, convolutional neural networks (cnn), restricted boltzmann machine (rbm) and deep belief networks (dbn). the datasets which will be used are the european (eu) australian and german dataset. the area under the roc curve (auc), matthews correlation coefficient (mcc) and cost of failure are the 3-evaluation metrics that would be used. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/335.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/335.txt new file mode 100644 index 0000000000000000000000000000000000000000..5124e5898c1c39df587dc8faaa110851e1f69f5f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/335.txt @@ -0,0 +1 @@ + data mining in the crm aiming at learning available knowledge from the customer relationship by machine learning or statistical method to instruct the strategic behavior so that obtain the most profit. in recent years, support vector machine (svms) has been proposed as a power tool in machine leaning and data mining. this paper applies the svms to resolve the practical crm problem in a company. the final results report the good general performance of svms for crm problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/336.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/336.txt new file mode 100644 index 0000000000000000000000000000000000000000..e50897cc7dd93582a393f9a44be92a84149f8118 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/336.txt @@ -0,0 +1 @@ + this work proposes an intelligent learning diagnosis system that supports a web-based thematic learning model, which aims to cultivate learners' ability of knowledge integration by giving the learners the opportunities to select the learning topics that they are interested, and gain knowledge on the specific topics by surfing on the internet to search related learning courseware and discussing what they have learned with their colleagues. based on the log files that record the learners' past online learning behavior, an intelligent diagnosis system is used to give appropriate learning guidance to assist the learners in improving their study behaviors and grade online class participation for the instructor. the achievement of the learners' final reports can also be predicted by the diagnosis system accurately. our experimental results reveal that the proposed learning diagnosis system can efficiently help learners to expand their knowledge while surfing in cyberspace web-based "theme-based learning" model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/337.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/337.txt new file mode 100644 index 0000000000000000000000000000000000000000..07124a0711b3be42dba6e5fe2f243ae7645154f4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/337.txt @@ -0,0 +1 @@ + we explore the property of equivocation tolerance for conflict-free replicated data types (crdts). we show that a subclass of crdts is equivocation-tolerant and can thereby cope with any number of byzantine faults: without equivocation detection, prevention or remediation, they still fulfill strong eventual consistency (sec). we also conjecture that there is only one operation-based crdt design supporting noncommutative operations that fulfills sec in byzantine environments with any number of faults. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/338.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/338.txt new file mode 100644 index 0000000000000000000000000000000000000000..58907ff364c9d60234d74501e4f84c061a7f89f3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/338.txt @@ -0,0 +1 @@ + blockchain is a type of decentralized distributed network which acts as an immutable digital ledger. despite the absence of any central governing authority to validate the blocks in the ledger, it is considered secure and immutable due to the consensus protocol among various nodes of the network. a consensus algorithm is a mechanism that guarantees the reliability of the blockchain and helps all connected nodes or peers to reach common ground regarding the present state of the blockchain network thus an ideal consensus algorithm must be secure, reliable, and fast. there are several different algorithms to reach a consensus among the nodes thus this article seeks to test the practicality of proof of authority in the blockchain network as a consensus algorithm and its comparison with current mainstream consensus algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/339.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/339.txt new file mode 100644 index 0000000000000000000000000000000000000000..de07733226f92920f34b815088f773f253f4a10c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/339.txt @@ -0,0 +1 @@ + to support the growing demands of neuroscience applications, researchers are transitioning to cloud computing for its scalable, robust and elastic infrastructure. nevertheless, large datasets residing in object stores may result in significant data transfer overheads during workflow execution. prefetching, a method to mitigate the cost of reading in mixed workloads, masks data transfer costs within processing time of prior tasks. we present an implementation of "rolling prefetch", a python library that implements a particular form of prefetching from aws s3 object store, and we quantify its benefits.rolling prefetch extends s3fs, a python library exposing aws s3 functionality via a file object, to add prefetch capabilities. in measured analysis performance of a 500 gb brain connectivity dataset stored on s3, we found that prefetching provides significant speed-ups of up to 1.86×, even in applications consisting entirely of data loading. the observed speed-up values are consistent with our theoretical analysis. our results demonstrate the usefulness of prefetching for scientific data processing on cloud infrastructures and provide an implementation applicable to various application domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/34.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/34.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ac13f50b176498a138c7568acb69ca7e1a4de8a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/34.txt @@ -0,0 +1 @@ + most research on task oriented dialog modeling is based on written text input. however, users interact with practical dialog systems often using speech as input. typically, systems convert speech into text using an automatic speech recognition (asr) system, introducing errors. furthermore, these systems do not address the differences in written and spoken language. the research on this topic is stymied by the lack of a public corpus. motivated by these considerations, our goal in hosting the speech-aware dialog state tracking challenge was to create a public corpus or task which can be used to investigate the performance gap between the written and spoken forms of input, develop models that could alleviate this gap, and establish whether textto-speech-based (tts) systems is a reasonable surrogate to the more-labor intensive human data collection. we created three spoken versions of the popular written-domain mul-tiwoz task -(a) tts-verbatim: written user inputs were converted into speech waveforms using a tts system, (b) human-verbatim: humans spoke the user inputs verbatim, and (c) human-paraphrased: humans paraphrased the user inputs. additionally, we provided different forms of asr output to encourage wider participation from teams that may not have access to state-of-the-art asr systems. these included asr transcripts, word time stamps, and latent representations of the audio (audio encoder outputs). in this paper, we describe the corpus, report results from participating teams, provide preliminary analyses of their results, and summarize the current state-of-the-art in this domain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/340.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/340.txt new file mode 100644 index 0000000000000000000000000000000000000000..61405c65927ca7df83689a62e0508059636f2679 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/340.txt @@ -0,0 +1 @@ + mobile clients that consume and produce data are abundant in fog environments. low latency access to this data can only be achieved by storing it in close physical proximity to the clients. current data store systems fall short as they do not replicate data based on client movement. we propose an approach to predictive replica placement that autonomously and proactively replicates data close to likely client locations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/341.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/341.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2146b7fdf470b39b7c0b9cad8b9ce2316ac9914 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/341.txt @@ -0,0 +1 @@ + cyber-physical systems (cps) can be found everywhere: smart homes, autonomous vehicles, aircrafts, healthcare, agriculture and industrial production lines. cpss are often critical, as system failure can cause serious damage to property and human lives.today's cyber-physical systems are extremely complex, heterogeneous systems: to be able to manage their complexity in a unified way, we need an infrastructure that ensures that our systems operate with the high reliability as intended. in addition to the infrastructure, we need to provide engineers a method to ensure system reliability at design time. the paradigm of model-driven design provides a toolkit supporting the design and analysis and by choosing the proper formalisms, the model-driven design approach allows us to validate our system at design time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/342.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/342.txt new file mode 100644 index 0000000000000000000000000000000000000000..0eb18e4db1f7d4cef17f2d515e5864c1e6d2c94e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/342.txt @@ -0,0 +1 @@ + industrial control system (ics) networks transmit control and monitoring data in critical environments such as smart grid. cyber attacks on smart grid communication may cause fatal consequences on energy production, distribution, and eventually the lives of people. since the attacks can be initiated from both the inside and outside of the network, traditional smart grid security tools like firewalls or intrusion detection systems (ids), which are typically deployed on the edge of the network, are not able to detect internal threats. for this reason, we also need to analyze behavior of internal ics communication.due to its nature, ics traffic exhibits stable and predictable communication patterns. these patterns can be described using statistical models. by observing selected features of ics network communication like packet inter arrival times, we can create a statistical profile of the communication based on the patterns observed in the normal communication traffic. this technique is effective, fast and easy to implement. as our experiments show, statistical-based anomaly detection is able to detect common security incidents in ics communication. this paper employs selected network packet attributes to create a statistical model for anomaly detection using the local outlier factor (lof) algorithm. the proof-of-concept is demonstrated on iec 60870-5-104 (a.k.a. iec 104) protocol. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/343.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/343.txt new file mode 100644 index 0000000000000000000000000000000000000000..6972d78b207ca0bde0d0f4aa1615b1b8e1dd0569 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/343.txt @@ -0,0 +1 @@ + big data applications and analytics are employed in many sectors for a variety of goals: improving customers satisfaction, predicting market behavior or improving processes in public health. these applications consist of complex software stacks that are often run on cloud systems. predicting execution times is important for estimating the cost of cloud services and for effectively managing the underlying resources at runtime. machine learning (ml), providing black box solutions to model the relationship between application performance and system configuration without requiring in-detail knowledge of the system, has become a popular way of predicting the performance of big data applications. we investigate the cost-benefits of using supervised ml models for predicting the performance of applications on spark, one of today's most widely used frameworks for big data analysis. we compare our approach with ernest (an ml-based technique proposed in the literature by the spark inventors) on a range of scenarios, application workloads, and cloud system configurations. our experiments show that ernest can accurately estimate the performance of very regular applications, but it fails when applications exhibit more irregular patterns and/or when extrapolating on bigger data set sizes. results show that our models match or exceed ernest's performance, sometimes enabling us to reduce the prediction error from 126-187% to only 5-19%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/344.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/344.txt new file mode 100644 index 0000000000000000000000000000000000000000..5e1ca44997d869465a80ae77d0596a5f10eb5857 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/344.txt @@ -0,0 +1 @@ + we propose a simple channel-allocation method based on tug-of-war (tow) dynamics, combined with the time scheduling based on nonlinear oscillator synchronization to efficiently use of the space (channel) and time resources in wireless communications. this study demonstrates that synchronization groups, where each node selects a different channel, are non-uniformly distributed in phase space such that every distance between groups is larger than the area of influence. new type of self-organized spatiotemporal patterns can be formed for resource allocation according to channel rewards. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/345.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/345.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb8e5d60ce2b53e4dcdd6cd984ae35ab3f5bb553 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/345.txt @@ -0,0 +1 @@ + this paper presents our work-in-progress study on reference architectures as boundary objects for realizing trustworthy collaborative cyber-physical systems (cps). furthermore, the preliminary results from interviews with systems engineering experts from industry and academia are also discussed. the interview results reveal challenges in using reference architectures during the system development process. furthermore, exactly which trustworthiness attributes (security, availability, reliability, etc.) should be addressed to realize trustworthy collaborative cps is identified as an open question, which we will address in our future work. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/346.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/346.txt new file mode 100644 index 0000000000000000000000000000000000000000..5e001263666d6250a8adb37abac6a106b2dd6562 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/346.txt @@ -0,0 +1 @@ + the parallel time of a population protocol is defined as the average number of required interactions that an agent in the protocol participates, i.e., the quotient between the total number of interactions required by the protocol and the total number n of agents, or just roughly the number of required rounds with n interactions. this naming triggers an intuition that at least on the average a round of n interactions can be implemented in o(1) parallel steps. we show that when the transition function of a population protocol is treated as a black box then the expected maximum number of parallel steps necessary to implement a round of n interactions is ω( log n log log n ). we also provide a combinatorial argument for a matching upper bound on the number of parallel steps in the average case under additional assumptions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/347.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/347.txt new file mode 100644 index 0000000000000000000000000000000000000000..f4a2067c5f8e64ff6fcc2f9a7cbae8baa466f1e6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/347.txt @@ -0,0 +1 @@ + the non-fungible token (nft) is an emergent type of cryptocurrency that has garnered extensive attention since its inception. the uniqueness, indivisibility and humanistic value of nfts are the key characteristics that distinguish them from traditional tokens. the market capitalization of nft reached 21.5 billion usd in 2021, almost 200 times of all previous transactions. however, the subsequent rapid decline in nft market fever in the second quarter of 2022 casts doubts on the ostensible boom in the nft market. to date, there has been no comprehensive and systematic study of the nft trade market or of the nft bubble and hype phenomenon. to fill this gap, we conduct an in-depth investigation of the whole ethereum erc721 and erc1155 nft ecosystem via graph analysis and apply several metrics to measure the characteristics of nfts. by collecting data from the whole blockchain, we construct three graphs, namely nft create graph, nft transfer graph, and nft hold graph, to characterize the nft traders, analyze the characteristics of nfts, and discover many observations and insights. moreover, we propose new indicators to quantify the activeness and value of nft and propose an algorithm that combines indicators and graph analyses to find bubble nfts. real-world cases demonstrate that our indicators and approach can be used to discern bubble nfts effectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/348.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/348.txt new file mode 100644 index 0000000000000000000000000000000000000000..662fcd46a71710c3f88c0f443d85e4fcee1e4e4e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/348.txt @@ -0,0 +1 @@ + digital engineering transformation is a crucial process for the engineering paradigm shifts in the fourth industrial revolution (4ir), and artificial intelligence (ai) is a critical enabling technology in digital engineering transformation. this article discusses the following research questions: what are the fundamental changes in the 4ir? more specifically, what are the fundamental changes in engineering? what is digital engineering? what are the main uncertainties there? what is trustworthy ai? why is it important today? what are emerging engineering paradigm shifts in the 4ir? what is the relationship between the data-intensive paradigm and digital engineering transformation? what should we do for digitalization? from investigating the pattern of industrial revolutions, this article argues that ubiquitous machine intelligence (umi) is the defining power brought by the 4ir. digitalization is a condition to leverage ubiquitous machine intelligence. digital engineering transformation towards industry 4.0 has three essential building blocks: digitalization of engineering, leveraging ubiquitous machine intelligence, and building digital trust and security. the engineering design community at large is facing an excellent opportunity to bring the new capabilities of ubiquitous machine intelligence and trustworthy ai principles, as well as digital trust, together in various engineering systems design to ensure the trustworthiness of systems in industry 4.0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/349.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/349.txt new file mode 100644 index 0000000000000000000000000000000000000000..958f11183549936ec1831a46266dc895bacec61b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/349.txt @@ -0,0 +1 @@ + having a good reputation is paramount for most organisations and companies. in fact, having an optimal corporate image allows them to have better transaction relationships with various customers and partners. however, such reputation is hard to build and easy to destroy for all kind of business commercial activities (b2c, b2b, b2b2c, b2g). a misunderstanding during the communication process to the customers, or just a bad communication strategy, can lead to a disaster for the entire company. this is emphasised by the reaction of millions of people on social networks, which can be very detrimental for the corporate image if they react negatively to a certain event. this is called a firestorm.in this paper, i propose a well-organised strategy for firestorm attacks on organisations, also showing how an adversary can leverage them to obtain private information on the attacked firm. standard business security procedures are not designed to operate against multi-domain attacks; therefore, i will show how it is possible to bypass the classic and advised security procedures by operating different kinds of attack. i also propose a different firestorm attack, targeting a specific business company network in an efficient way. finally, i present defensive procedures to reduce the negative effect of firestorms on a company. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/35.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/35.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ae39652da455ce9dfaf4ef8566eaa133fb0c7c5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/35.txt @@ -0,0 +1 @@ + automated summary quality assessment falls into two categories: reference-based and reference-free. reference-based metrics, historically deemed more accurate due to the additional information provided by human-written references, are limited by their reliance on human input. in this paper, we hypothesize that the comparison methodologies used by some reference-based metrics to evaluate a system summary against its corresponding reference can be effectively adapted to assess it against its source document, thereby transforming these metrics into reference-free ones. experimental results support this hypothesis. after being repurposed reference-freely, the zero-shot bertscore using the pretrained debertalarge-mnli model of <0.5b parameters consistently outperforms its original referencebased version across various aspects on the summeval and newsroom datasets. it also excels in comparison to most existing referencefree metrics and closely competes with zeroshot summary evaluators based on gpt-3.5. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/350.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/350.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3e495256732701aa23f70de607c5148ba871108 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/350.txt @@ -0,0 +1 @@ + artificial intelligence (ai) is at the forefront of modern technology, and its effects are felt in many areas of society. to prevent algorithmic disparities, fairness, accountability, transparency, and ethics (fate) in ai are being implemented. however, the current discourse on these issues is largely dominated by more economically developed countries (medc), leaving out local knowledge, cultural pluralism, and global fairness. this study aims to address this gap by examining fate-related desiderata, particularly transparency and ethics, in areas of the global south that are underserved by ai. a user study (n = 43) and a participatory session (n = 30) were conducted to achieve this goal. the results showed that ai models can encode bias and amplify stereotypes. to promote inclusivity, a community-led strategy is proposed to collect and curate representative data for responsible ai design. this will enable the affected community or individuals to monitor the increasing use of ai-powered systems. additionally, recommendations based on public input are provided to ensure that ai adheres to social values and context-specific fate needs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/351.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/351.txt new file mode 100644 index 0000000000000000000000000000000000000000..69561bc5ae32266585ac2b9bc8ddfb6bc01aacbe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/351.txt @@ -0,0 +1 @@ + in this paper, we study the problem of mooc quality evaluation which is essential for improving the course materials, promoting students' learning efficiency, and benefiting user services. while achieving promising performances, current works still suffer from the complicated interactions and relationships of entities in mooc platforms. to tackle the challenges, we formulate the problem as a course representation learning task-based and develop an informationaware graph representation learning(iagrl) for multiview mooc quality evaluation. specifically, we first build a mooc heterogeneous network (hin) to represent the interactions and relationships among entities in mooc platforms. and then we decompose the mooc hin into multiple single-relation graphs based on meta-paths to depict the multi-view semantics of courses. the course representation learning can be further converted to a multi-view graph representation task. different from traditional graph representation learning, the learned course representations are expected to match the following three types of validity: (1) the agreement on expressiveness between the raw course portfolio and the learned course representations; (2) the consistency between the representations in each view and the unified representations; (3) the alignment between the course and mooc platform representations. therefore, we propose to exploit mutual information for preserving the validity of course representations. we conduct extensive experiments over real-world mooc datasets to demonstrate the effectiveness of our proposed method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/352.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/352.txt new file mode 100644 index 0000000000000000000000000000000000000000..e880086a5420cc8a3c4b72b17735db21f3e323fd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/352.txt @@ -0,0 +1 @@ + aim: a fish farm is an area where fish raise and bred for food. fish farm environments support the care and management of seafood within a controlled environment. over the past few decades, there has been a remarkable increase in the calorie intake of protein attributed to seafood. along with this, there are significant opportunities within the fish farming industry for economic development. determining the fish diseases, monitoring the aquatic organisms, and examining the imbalance in the water element are some key factors that require precise observation to determine the accuracy of the acquired data. similarly, due to the rapid expansion of aquaculture, new technologies are constantly being implemented in this sector to enhance efficiency. however, the existing approaches have often failed to provide an efficient method of farming fish.methods: this work has kept aside the traditional approaches and opened up new dimensions to perform accurate analysis by adopting a distributed ledger technology. our work analyses the current state-of-the-art of fish farming and proposes a fish farm ecosystem that relies on a private-by-design architecture based on the hyperledger fabric private-permissioned distributed ledger technology.results: the proposed method puts forward accurate and secure storage of the retrieved data from multiple sensors across the ecosystem so that the adhering entities can exercise their decision based on the acquired data.conclusion: this study demonstrates a proof-of-concept to signify the efficiency and usability of the future fish farm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/353.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/353.txt new file mode 100644 index 0000000000000000000000000000000000000000..afa8d4e31ca156f5eacb4c658a51631ae42e5009 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/353.txt @@ -0,0 +1 @@ + increasingly, cyber aggression becomes the prevalent phenomenon that erodes the social media environment. however, due to subjective and expense, the traditional self-reporting questionnaire is hard to be employed in the current cyber area. in this study, we put forward the prediction model for cyber aggression based on the cutting-edge deep learning algorithm. building on 320 active weibo users' social media activities, we construct basic, dynamic, and content features. we elaborate cyber aggression on three dimensions: social exclusion, malicious humour, and guilt induction. we then build the prediction model combined with pretrained bert model. the empirical evidence shows outperformance and supports a stronger prediction with the bert model than traditional machine learning models without extra pretrained information. this study offers a solid theoretical model for cyber aggression prediction. furthermore, this study contributes to cyber aggression behaviors' probing and social media platforms' organization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/354.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/354.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9216db58356877b703c66751a927785219af08f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/354.txt @@ -0,0 +1 @@ + increasing computational power and improving deep learning methods have made computer vision technologies pervasively common in urban environments. their applications in policing, traffic management, and documenting public spaces are increasingly common. despite the often-discussed biases in the algorithms' training and unequally borne benefits, almost all applications similarly reduce urban experiences to simplistic, reductive, and mechanistic measures. there is a lack of context, depth, and specificity in these practices that enables semantic knowledge or analysis within urban contexts, especially within the context of using and occupying urban space. this paper will critique existing uses of artificial intelligence and computer vision in urban practices to propose a new framework for understanding people, action, and public space. this paper revisits geertz's use of thick descriptions in generating interpretive theories of culture and activity and uses this lens to establish a framework to evaluate the varied uses of computer vision technologies that weigh meaning. we discuss how the framework's positioning may differ (and conflict) between different users of the technology. this paper also discusses the current use and training of deep learning algorithms and how this process limits semantic learning and proposes three potential methodologies for gaining a more contextually specific, urbansemantic, description of urban space relevant to urbanists. this paper contributes to the critical conversations regarding the proliferation of artificial intelligence by challenging the current applications of these technologies in the urban environment by highlighting their failures within this context while also proposing an evolution of these algorithms that may ultimately make them sensitive and useful within this spatial and cultural milieu. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/355.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/355.txt new file mode 100644 index 0000000000000000000000000000000000000000..c25600b556d881bbef9f75df0539f1a293be2321 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/355.txt @@ -0,0 +1 @@ + the qubes platform was conceived as a "science education gateway" and designed to accelerate innovation in undergraduate stem education. the technical infrastructure was purpose built to provide more equitable access to professional resources, support learning that reflects authentic science, and promote open education practices. four platform services (oer library access; professional learning; partner support; and customizable workspaces) support overlapping faculty user communities, provide multiple points of entry, and enable manifold use case scenarios. the integrated nature of the platform makes it possible to collect, curate, and disseminate a diverse array of reform resources in a scalable and sustainable manner. we believe that the qubes platform has the capacity to broaden participation in scholarship around teaching and learning and, furthermore, that it can help to lower faculty barriers to the adoption of reform practices. the role of cyberinfrastructure in undergraduate stem education is generally underappreciated and warrants further exploration. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/356.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/356.txt new file mode 100644 index 0000000000000000000000000000000000000000..e045c6c811d838541ad2a7ef43b82b999d07569e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/356.txt @@ -0,0 +1 @@ + the state of emergency declaration in japan due to the covid-19 pandemic affected many aspects of society in the country, much like the rest of the world. one sector that felt its disruptive impact was education. as educational institutions raced to implement emergency remote teaching (ert) to continue providing the learning needs of students, some have opened to innovative interventions. this paper describes a case of ert where filipino vocabulary was taught to a class of japanese students taking philippine studies in a japanese university using a cognitive innovation based on virtual reality, an immersive technology often researched for immersion and presence. students were divided into three groups to experience six lessons designed around virtual reality photo-based tours at different immersion levels. while the effect of immersion on satisfaction was not found to be statistically significant, presence and satisfaction were found to be correlated. despite challenges that were encountered, benefits like enjoyment, increased engagement, and perceived learning were reported by the students. our findings exemplify how emerging multisensory technologies can be used to enhance affective and cognitive dimensions of human experience while responding to gaps created by the spatial limitations of remote learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/357.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/357.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f524d90985c8a3e2c115d7e4f6a78a09d00d390 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/357.txt @@ -0,0 +1 @@ + recent advances in artificial intelligence (ai) have sparked renewed interest in its potential to improve education. however, ai is a loose umbrella term that refers to a collection of methods, capabilities, and limitations-many of which are often not explicitly articulated by researchers, education technology companies, or other ai developers. in this paper, we seek to clarify what "ai" is and the potential it holds to both advance and hamper educational opportunities that may improve the human condition. we offer a basic introduction to different methods and philosophies underpinning ai, discuss recent advances, explore applications to education, and highlight key limitations and risks. we conclude with a set of questions that educationalists may ask as they encounter ai in their research and practice. our hope is to make often jargon-laden terms and concepts accessible, so that all are equipped to understand, interrogate, and ultimately shape the development of humancentered ai in education. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/358.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/358.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ba5dde244d36dba85cbb79ed7e1d7755cf75427 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/358.txt @@ -0,0 +1 @@ + when educational institutions worldwide scrambled for ways to continue their classes during lockdowns caused by the covid-19 pandemic, the use of information and communication technology (ict) for remote teaching has become widely considered to be a potential solution. as universities raced to implement emergency remote teaching (ert) strategies in japan, some have explored innovative interventions other than webinar platforms and learning management systems to bridge the gap caused by restricted mobility among teachers and learners. one such innovation is virtual reality (vr). vr has been changing the landscape of higher education because of its ability to "teleport" learners to various places by simulating real-world environments in the virtual world. some teachers, including the authors of this paper, explored integrating vr into their activities to address issues caused by geographical limitations brought about by the heightened restrictions in 2020. results were largely encouraging. however, rules started relaxing in the succeeding years as more people got vaccinated. thus, some fully online classes in japan shifted to blended learning as they moved toward fully returning to in-person classes prompting educators to modify how they implemented their vr-based interventions. this paper describes how a class of university students in japan who were taking a filipino language course experienced a vr-based intervention in blended mode, which was originally prototyped during the peak of the ert era. moreover, adjustments and comparisons regarding methodological idiosyncrasies and findings between the fully online iteration and the recently implemented blended one are reported in detail. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/359.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/359.txt new file mode 100644 index 0000000000000000000000000000000000000000..53aadc06ea2c033894205d239908a88a1671ad0a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/359.txt @@ -0,0 +1 @@ + non-adversarial robustness, also known as natural robustness, is a property of deep learning models that enables them to maintain performance even when faced with distribution shifts caused by natural variations in data. however, achieving this property is challenging because it is difficult to predict in advance the types of distribution shifts that may occur. to address this challenge, researchers have proposed various approaches, some of which anticipate potential distribution shifts, while others utilize knowledge about the shifts that have already occurred to enhance model generalizability. in this paper, we present a brief overview of the most recent techniques for improving the robustness of computer vision methods, as well as a summary of commonly used robustness benchmark datasets for evaluating the model's performance under data distribution shifts. finally, we examine the strengths and limitations of the approaches reviewed and identify general trends in deep learning robustness improvement for computer vision. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/36.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/36.txt new file mode 100644 index 0000000000000000000000000000000000000000..1679e12311c19f85a4c0c6c1e7255d2fe08cb419 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/36.txt @@ -0,0 +1 @@ + it is indisputable that physical activity is vital for an individual's health and wellness. however, a global prevalence of physical inactivity has induced significant personal and socioeconomic implications. in recent years, a significant amount of work has showcased the capabilities of self-tracking technology to create positive health behavior change. this work is motivated by the potential of personalized and adaptive goal-setting techniques in encouraging physical activity via self-tracking. to this end, we propose ubiwear, an end-to-end framework for intelligent physical activity prediction, with the ultimate goal to empower data-driven goal-setting interventions. to achieve this, we experiment with numerous machine learning and deep learning paradigms as a robust benchmark for physical activity prediction tasks. to train our models, we utilize, "myheart counts", an open, large-scale dataset collected in-the-wild from thousands of users. we also propose a prescriptive framework for self-tracking aggregated data preprocessing, to facilitate data wrangling of real-world, noisy data. our best model achieves a mae of 1087 steps, 65% lower than the state of the art in terms of absolute error, proving the feasibility of the physical activity prediction task, and paving the way for future research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/360.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/360.txt new file mode 100644 index 0000000000000000000000000000000000000000..82f031957078b3e3cad1fd63f7ccdef00bfae4ec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/360.txt @@ -0,0 +1 @@ + fake currency, unauthorized imitation money lacking government approval, constitutes a form of fraud. particularly in afghanistan, the prevalence of fake currency poses significant challenges and detrimentally impacts the economy. while banks and commercial establishments employ authentication machines, the public lacks access to such systems, necessitating a program that can detect counterfeit banknotes accessible to all. this paper introduces a method using image processing to identify counterfeit afghan banknotes by analyzing specific security features. extracting first and second order statistical features from input images, the weka machine learning tool was employed to construct models and perform classification with random forest, part, and naïve bayes algorithms. the random forest algorithm achieved exceptional accuracy of 99% in detecting fake afghan banknotes, indicating the efficacy of the proposed method as a solution for identifying counterfeit currency. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/361.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/361.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d31df4861151c98391f43b6ac18ef6cc4113e3b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/361.txt @@ -0,0 +1 @@ + despite outstanding performance in many tasks, language models are notoriously inclined to make factual errors in tasks requiring arithmetic computation. we address this deficiency by creating calc-x, a collection of datasets that demonstrates the appropriate use of a calculator in reasoning chains. calc-x is suitable for teaching language models to offload computations to a symbolic system. we survey and unify several existing chain-of-thought datasets into a proposed format, resulting in a standard collection of over 300,000 samples requiring arithmetic reasoning. finally, we use the new calc-x collection to train open-source calculator-using models we call calcformers and show that these models approximately double the accuracy of generating correct results compared to vanilla language model baselines. we make all calc-x datasets, source code and calcformers models publicly available. 1 * equal contribution 1 https://github.com/prompteus/calc-x \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/362.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/362.txt new file mode 100644 index 0000000000000000000000000000000000000000..95b3c79fccff8dca5c88c50f798ad4e0a1a13cf9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/362.txt @@ -0,0 +1 @@ + as the size of large language models (llms) continues to grow, model compression without sacrificing accuracy has become a crucial challenge for deployment. while some quantization methods, such as gptq, have made progress in achieving acceptable 4-bit weight-only quantization, attempts at lower bit quantization often result in severe performance degradation. in this paper, we introduce a technique called norm tweaking, which can be used as a plugin in current ptq methods to achieve high precision while being cost-efficient. our approach is inspired by the observation that rectifying the quantized activation distribution to match its float counterpart can readily restore accuracy for llms. to achieve this, we carefully design a tweaking strategy that includes calibration data generation and channel-wise distance constraint to update the weights of normalization layers for better generalization. we conduct extensive experiments on various datasets using several open-sourced llms. our method demonstrates significant improvements in both weight-only quantization and joint quantization of weights and activations, surpassing existing ptq methods. on glm-130b and opt-66b, our method even achieves the same level of accuracy at 2bit quantization as their float ones. our simple and effective approach makes it more practical for real-world applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/363.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/363.txt new file mode 100644 index 0000000000000000000000000000000000000000..71fa6a51a55a405cd9cd1d046742209adb266c4a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/363.txt @@ -0,0 +1 @@ + the k-means is a popular clustering objective, although it is inherently non-robust and sensitive to outliers. its popular seeding or initialization called k-means++ uses d 2 sampling and comes with a provable o(log k) approximation guarantee arthur and vassilvitskii (2007). however, in the presence of adversarial noise or outliers, d 2 sampling is more likely to pick centers from distant outliers instead of inlier clusters, and therefore its approximation guarantees w.r.t. k-means solution on inliers, does not hold.assuming that the outliers constitute a constant fraction of the given data, we propose a simple variant in the d 2 sampling distribution, which makes it robust to the outliers. our algorithm runs in o(ndk) time, outputs o(k) clusters, discards marginally more points than the optimal number of outliers, and comes with a provable o(1) approximation guarantee.our algorithm can also be modified to output exactly k clusters instead of o(k) clusters, while keeping its running time linear in n and d. this is an improvement over previous results for robust k-means based on lp relaxation and rounding charikar et al. ( 2001), krishnaswamy et al. (2018) and robust k-means++ deshpande et al. (2020). our empirical results show the advantage of our algorithm over k-means++ arthur and vassilvitskii (2007), uniform random seeding, greedy sampling for k-means bhaskara et al. (2019), and robust k-means++ deshpande et al. (2020), on standard real-world and synthetic data sets used in previous work. our proposal is easily amenable to scalable, faster, parallel implementations of k-means++ bachem et al. (2018); bahmani et al. (2012) and is of independent interest for coreset constructions in the presence of outliers feldman and langberg (2011); feldman et al. (2007); langberg and schulman (2010). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/364.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/364.txt new file mode 100644 index 0000000000000000000000000000000000000000..e4a2683c013c6a1cb853e76a45dd44aa5c610d23 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/364.txt @@ -0,0 +1 @@ + radar sensors offer power-efficient solutions for always-on smart devices, but processing the data streams on resource-constrained embedded platforms remains challenging. this paper presents novel techniques that leverage the temporal correlation present in streaming radar data to enhance the efficiency of early exit neural networks for deep learning inference on embedded devices. these networks add additional classifier branches between the architecture's hidden layers that allow for an early termination of the inference if their result is deemed sufficient enough by an at-runtime decision mechanism. our methods enable more informed decisions on when to terminate the inference, reducing computational costs while maintaining a minimal loss of accuracy. our results demonstrate that our techniques save up to 26 % of operations per inference over a single exit network and 12 % over a confidence-based early exit version. our proposed techniques work on commodity hardware and can be combined with traditional optimizations, making them accessible for resource-constrained embedded platforms commonly used in smart devices. such efficiency gains enable real-time radar data processing on resource-constrained platforms, allowing for new applications in the context of smart homes, internetof-things, and human-computer interaction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/365.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/365.txt new file mode 100644 index 0000000000000000000000000000000000000000..17f1d49455282198525f019c73e9c857c38e5d03 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/365.txt @@ -0,0 +1 @@ + geospatial sciences include a wide range of applications, from environmental monitoring transportation to infrastructure planning, as well as location-based analysis and services. graph theory algorithms in mathematics have emerged as indispensable tools in these domains due to their capability to model and analyse spatial relationships efficiently. this article explores the applications of graph theory algorithms in geospatial sciences, highlighting their role in network analysis, spatial connectivity, geographic information systems, and various other spatial problem-solving scenarios like digital twin. the article provides a comprehensive idea about graph theory's key concepts and algorithms that assist the geospatial modelling processes and insights into real-world geospatial challenges and opportunities. it lists the extensive research, innovative technologies and methodologies implemented in this domain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/366.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/366.txt new file mode 100644 index 0000000000000000000000000000000000000000..357894c447761651422049ed4fc9d5aa68a49bd4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/366.txt @@ -0,0 +1 @@ + recent studies propose enhancing machine learning models by aligning the geometric characteristics of the latent space with the underlying data structure. instead of relying solely on euclidean space, researchers have suggested using hyperbolic and spherical spaces with constant curvature, or their combinations (known as product manifolds), to improve model performance. however, there exists no principled technique to determine the best latent product manifold signature, which refers to the choice and dimensionality of manifold components. to address this, we introduce a novel notion of distance between candidate latent geometries using the gromov-hausdorff distance from metric geometry. we propose using a graph search space that uses the estimated gromov-hausdorff distances to search for the optimal latent geometry. in this work we focus on providing a description of an algorithm to compute the gromov-hausdorff distance between model spaces and its computational implementation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/367.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/367.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/368.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/368.txt new file mode 100644 index 0000000000000000000000000000000000000000..f9a593820f1cf6aa3c30383c7d9e16842d236fc0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/368.txt @@ -0,0 +1 @@ + porting ml models trained on lab data to real-world situations has long been a challenge. this paper discusses porting a lab-trained lifting identification model to the real-world. with performance much lower than on training data, we explored causes of the failure and proposed four potential solutions to increase model performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/369.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/369.txt new file mode 100644 index 0000000000000000000000000000000000000000..616432e3316a7f4670252fcaf9c821a97700826c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/369.txt @@ -0,0 +1 @@ + attribute reconstruction is used to predict node or edge features in the pre-training of graph neural networks. given a large number of molecules, they learn to capture structural knowledge, which is transferable for various downstream property prediction tasks and vital in chemistry, biomedicine, and material science. previous strategies that randomly select nodes to do attribute masking leverage the information of local neighbors however, the over-reliance of these neighbors inhibits the model's ability to learn from higher-level substructures. for example, the model would learn little from predicting three carbon atoms in a benzene ring based on the other three but could learn more from the inter-connections between the functional groups, or called chemical motifs. in this work, we propose and investigate motif-aware attribute masking strategies to capture inter-motif structures by leveraging the information of atoms in neighboring motifs. once each graph is decomposed into disjoint motifs, the features for every node within a sample motif are masked. the graph decoder then predicts the masked features of each node within the motif for reconstruction. we evaluate our approach on eight molecular property prediction datasets and demonstrate its advantages. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/37.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/37.txt new file mode 100644 index 0000000000000000000000000000000000000000..4f0e94a7ddf6c3f03f5c2350c530629896ffbe22 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/37.txt @@ -0,0 +1 @@ + we present a method for introducing a text encoder into pretrained end-to-end speech translation systems. it enhances the ability of adapting one modality (i.e., source-language speech) to another (i.e., source-language text). thus, the speech translation model can learn from both unlabeled and labeled data, especially when the source-language text data is abundant. beyond this, we present a denoising method to build a robust text encoder that can deal with both normal and noisy text data. our system sets new state-of-the-arts on the must-c en-de, en-fr, and librispeech en-fr tasks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/370.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/370.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6698175814ca6f3a938f9f6507d7bdab82d925b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/370.txt @@ -0,0 +1 @@ + synthetic data generation has emerged as a crucial topic for financial institutions, driven by multiple factors, such as privacy protection and data augmentation. many algorithms have been proposed for synthetic data generation but reaching the consensus on which method we should use for the specific data sets and use cases remains challenging. moreover, the majority of existing approaches are "unsupervised" in the sense that they do not take into account the downstream task. to address these issues, this work presents a novel synthetic data generation framework. the framework integrates a supervised component tailored to the specific downstream task and employs a meta-learning approach to learn the optimal mixture distribution of existing synthetic distributions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/371.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/371.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d2c194a9bca4e8d80b1e0dfbdd77e3dc6259dfc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/371.txt @@ -0,0 +1 @@ + the provision of social care applications is crucial for elderly people to improve their quality of life and enables operators to provide early interventions. accurate predictions of user dropouts in healthy ageing applications are essential since they are directly related to individual health statuses. machine learning (ml) algorithms have enabled highly accurate predictions, outperforming traditional statistical methods that struggle to cope with individual patterns. however, ml requires a substantial amount of data for training, which is challenging due to the presence of personal identifiable information (pii) and the fragmentation posed by regulations. in this paper, we present a federated machine learning (fml) approach that minimizes privacy concerns and enables distributed training, without transferring individual data. we employ collaborative training by considering individuals and organizations under fml, which models both cross-device and cross-silo learning scenarios. our approach is evaluated on a real-world dataset with non-independent and identically distributed (non-iid) data among clients, class imbalance and label ambiguity. our results show that data selection and class imbalance handling techniques significantly improve the predictive accuracy of models trained under fml, demonstrating comparable or superior predictive performance than traditional ml models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/372.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/372.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5e57f2be628e54c1c5ea3526e3df177cfc29330 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/372.txt @@ -0,0 +1 @@ + the use of internet of things (iot) sensors for air pollution monitoring has significantly increased, resulting in the deployment of low-cost sensors. despite this advancement, accurately calibrating these sensors in uncontrolled environmental conditions remains a challenge. to address this, we propose a novel approach that leverages graph neural networks, specifically the graph attention network module, to enhance the calibration process by fusing data from sensor arrays. through our experiments, we demonstrate the effectiveness of our approach in significantly improving the calibration accuracy of sensors in iot air pollution monitoring platforms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/373.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/373.txt new file mode 100644 index 0000000000000000000000000000000000000000..76b03296c4531b147dfc5b36b0298f2ebaef37df --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/373.txt @@ -0,0 +1 @@ + reproducing research results in the networking community is important for both academia and industry. the current best practice typically resorts to three approaches: (1) looking for publicly available prototypes; (2) contacting the authors to get a private prototype; and (3) manually implementing a prototype following the description of the publication. however, most published network research does not have public prototypes and private prototypes are hard to get. as such, most reproducing efforts are spent on manual implementation based on the publications, which is both time and labor consuming and error-prone. in this paper, we boldly propose reproducing network research results using the emerging large language models (llms). in particular, we first prove its feasibility with a small-scale experiment, in which four students with essential networking knowledge each reproduces a different networking system published in prominent conferences and journals by prompt engineering chatgpt. we report the experiment's observations and lessons and discuss future open research questions of this proposal. this work raises no ethical issue. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/374.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/374.txt new file mode 100644 index 0000000000000000000000000000000000000000..61a8780b37d0e3ec1d6fed87082707e3954f8375 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/374.txt @@ -0,0 +1 @@ + large machine learning models trained on diverse data have recently seen unprecedented success. federated learning enables training on private data that may otherwise be inaccessible, such as domain-specific datasets decentralized across many clients. however, federated learning can be difficult to scale to large models when clients have limited resources. this challenge often results in a trade-off between model size and access to diverse data. to mitigate this issue and facilitate training of large models on edge devices, we introduce a simple yet effective strategy, federated layer-wise learning, to simultaneously reduce per-client memory, computation, and communication costs. clients train just a single layer each round, reducing resource costs considerably with minimal performance degradation. we also introduce federated depth dropout, a complementary technique that randomly drops frozen layers during training, to further reduce resource usage. coupling these two techniques enables us to effectively train significantly larger models on edge devices. specifically, we reduce training memory usage by 5× or more in federated self-supervised representation learning, and demonstrate that performance in downstream tasks is comparable to conventional federated self-supervised learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/375.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/375.txt new file mode 100644 index 0000000000000000000000000000000000000000..17aae6349f7b3f8f353f14d4d4b58ff18c4aef45 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/375.txt @@ -0,0 +1 @@ + this paper explores the generalization characteristics of iterative learning algorithms with bounded updates for non-convex loss functions, employing information-theoretic techniques. our key contribution is a novel bound for the generalization error of these algorithms with bounded updates. our approach introduces two main novelties: 1) we reformulate the mutual information as the uncertainty of updates, providing a new perspective, and 2) instead of using the chaining rule of mutual information, we employ a variance decomposition technique to decompose information across iterations, allowing for a simpler surrogate process. we analyze our generalization bound under various settings and demonstrate improved bounds. to bridge the gap between theory and practice, we also examine the previously observed scaling behavior in large language models. ultimately, our work takes a further step for developing practical generalization theories.preprint refers to how the update will vary for different datasets s n ∼ µ ⊗n . instead of applying the chaining rule of mutual information, we use a variance decomposition method to decompose information across iterations. from this perspective, we establish the generalization bound for general iterative algorithms with bounded updates by employing a surrogate process that adds noise exclusively to the original process's final update.we analyze our generalization bound in different situation. our work achieve better vanishing rate guarantee than previous work neu et al. (2021). we also investigate the gap between our theoretical framework and practical applications by analyzing the previous discovery of the scaling behavior in large language models. our model shed light on developing practically useful generalization theories.the contributions of our work can be summarized as following:• this paper offers a novel viewpoint for analyzing the mutual information i(w, s n ) by focusing on the uncertainty of updates.• a new generalization bound, derived from an information-theoretic approach, is presented. this bound is applicable to iterative learning algorithms with bounded updates.• we investigate the generalization behavior of various types of bounded update, iterative learning algorithms. additionally, we summary the scaling rules of large language models from previous experimental findings to examine the gap between theoretical and practical aspects. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/376.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/376.txt new file mode 100644 index 0000000000000000000000000000000000000000..f859995c4ea859a039a19262154ed776e7d98343 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/376.txt @@ -0,0 +1 @@ + neural language models are increasingly deployed into apis and websites that allow a user to pass in a prompt and receive generated text. many of these systems do not reveal generation parameters. in this paper, we present methods to reverseengineer the decoding method used to generate text (i.e., top-k or nucleus sampling). our ability to discover which decoding strategy was used has implications for detecting generated text. additionally, the process of discovering the decoding strategy can reveal biases caused by selecting decoding settings which severely truncate a model's predicted distributions. we perform our attack on several families of open-source language models, as well as on production systems (e.g., chatgpt). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/377.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/377.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e3a6d82c4e2ab27d9691e3a20917310805cacba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/377.txt @@ -0,0 +1 @@ + low rank decomposition (lrd) is a model compression technique applied to the weight tensors of deep learning models in order to reduce the number of trainable parameters and computational complexity. however, due to high number of new layers added to the architecture after applying lrd, it may not lead to a high training/inference acceleration if the decomposition ranks are not small enough. the issue is that using small ranks increases the risk of significant accuracy drop after decomposition. in this paper, we propose two techniques for accelerating low rank decomposed models without requiring to use small ranks for decomposition. these methods include rank optimization and sequential freezing of decomposed layers. we perform experiments on both convolutional and transformer-based models. experiments show that these techniques can improve the model throughput up to 60% during training and 37% during inference when combined together while preserving the accuracy close to that of the original models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/378.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/378.txt new file mode 100644 index 0000000000000000000000000000000000000000..1cdb21432a4300b569ee6582dd135365e7dc18fb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/378.txt @@ -0,0 +1 @@ + existing approaches for classifying dynamic graphs either lift graph kernels to the temporal domain, or use graph neural networks (gnns). however, current baselines have scalability issues, cannot handle a changing node set, or do not take edge weight information into account. we propose filtration surfaces, a novel method that is scalable and flexible, to alleviate said restrictions. we experimentally validate the efficacy of our model and show that filtration surfaces outperform previous state-of-the-art baselines on datasets that rely on edge weight information. our method does so while being either completely parameter-free or having at most one parameter, and yielding the lowest overall standard deviation among similarly scalable methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/379.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/379.txt new file mode 100644 index 0000000000000000000000000000000000000000..195a63a945415d3896750f80cb9fbb485c5aeed6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/379.txt @@ -0,0 +1 @@ + with polycystic kidney disease (pkd) potentially leading to fatal complications in patients due to the formation of cysts in kidneys, early detection of pkd is crucial for effective management of the condition. however, the various patient-specific factors that play a role in the diagnosis make it an intricate puzzle for clinicians to solve, leading to possible kidney failure. therefore, in this study we aim to utilize a deep learning-based approach for early disease detection through gene expression analysis. the devised neural network is able to achieve accurate and robust prediction results for possible pkd in kidneys, thereby improving patient outcomes. furthermore, by conducting a gene ontology analysis, we were able to predict the top gene processes and functions that pkd may affect. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/38.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/38.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e9e4f6a4c19d7a2bd1370406aefae1b41fba5d3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/38.txt @@ -0,0 +1 @@ + related works used indexes like cka and variants of cca to measure the similarity of cross-lingual representations in multilingual language models. in this paper, we argue that assumptions of cka/cca align poorly with one of the motivating goals of cross-lingual learning analysis, i.e., explaining zero-shot cross-lingual transfer. we highlight what valuable aspects of cross-lingual similarity these indexes fail to capture and provide a motivating case study demonstrating the problem empirically. then, we introduce average neuron-wise correlation (anc) as a straightforward alternative that is exempt from the difficulties of cka/cca and is good specifically in a cross-lingual context. finally, we use anc to construct evidence that the previously introduced "first align, then predict" pattern takes place not only in masked language models (mlms) but also in multilingual models with causal language modeling objectives (clms). moreover, we show that the pattern extends to the scaled versions of the mlms and clms (up to 85x original mbert). 1 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/380.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/380.txt new file mode 100644 index 0000000000000000000000000000000000000000..92074500253ffd766db7a0cf1c033535c968e4d9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/380.txt @@ -0,0 +1 @@ + semantic communication is poised to play a pivotal role in shaping the landscape of future ai-driven communication systems. its challenge of extracting semantic information from the original complex content and regenerating semantically consistent data at the receiver, possibly being robust to channel corruptions, can be addressed with deep generative models. this icassp special session overview paper discloses the semantic communication challenges from the machine learning perspective and unveils how deep generative models will significantly enhance semantic communication frameworks in dealing with real-world complex data, extracting and exploiting semantic information, and being robust to channel corruptions. alongside establishing this emerging field, this paper charts novel research pathways for the next generative semantic communication frameworks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/381.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/381.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9059aaaba8d247ff5e191911d4bc07cf83999f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/381.txt @@ -0,0 +1 @@ + the design process of centrifugal compressors requires applying an optimization process which is computationally expensive due to complex analytical equations underlying the compressor's dynamical equations. although the regression surrogate models could drastically reduce the computational cost of such a process, the major challenge is the scarcity of data for training the surrogate model. aiming to strategically exploit the labeled samples, we propose the active-compdesign framework in which we combine a thermodynamics-based compressor model (i.e., our internal software for compressor design) and gaussian process-based surrogate model within a deployable active learning (al) setting. we first conduct experiments in an offline setting and further, extend it to an online al framework where a real-time interaction with the thermodynamicsbased compressor's model allows the deployment in production. activecompdesign shows a significant performance improvement in surrogate modeling by leveraging on uncertainty-based query function of samples within the al framework with respect to the random selection of data points. moreover, our framework in production has reduced the total computational time of compressor's design optimization to around 46% faster than relying on the internal thermodynamicsbased simulator, achieving the same performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/382.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/382.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b82a5285c8fb690d6bab562eadd8453611c96e8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/382.txt @@ -0,0 +1 @@ + metric learning aims at finding a suitable distance metric over the input space, to improve the performance of distance-based learning algorithms. in high-dimensional settings, it can also serve as dimensionality reduction by imposing a lowrank restriction to the learnt metric. in this paper, we consider the problem of learning a mahalanobis metric, and instead of training a low-rank metric on high-dimensional data, we use a randomly compressed version of the data to train a fullrank metric in this reduced feature space. we give theoretical guarantees on the error for mahalanobis metric learning, which depend on the stable dimension of the data support, but not on the ambient dimension. our bounds make no assumptions aside from i.i.d. data sampling from a bounded support, and automatically tighten when benign geometrical structures are present. an important ingredient is an extension of gordon's theorem, which may be of independent interest. we also corroborate our findings by numerical experiments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/383.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/383.txt new file mode 100644 index 0000000000000000000000000000000000000000..0858e3466dace78900e1ec08df3b28f9771ab899 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/383.txt @@ -0,0 +1 @@ + short-term load forecasting is of utmost importance in the efficient operation and planning of power systems, given their inherent non-linear and dynamic nature. recent strides in deep learning have shown promise in addressing this challenge. however, these methods often grapple with hyperparameter sensitivity, opaqueness in interpretability, and high computational overhead for real-time deployment. this paper proposes an innovative approach that effectively overcomes the aforementioned problems. the approach utilizes the particle swarm optimization algorithm to autonomously tune hyperparameters, a multi-head attention mechanism to discern the salient features crucial for accurate forecasting, and a streamlined framework for computational efficiency. the method was subjected to rigorous evaluation using a genuine electricity demand dataset. the results underscore its superiority in terms of accuracy, robustness, and computational efficiency. notably, its mean absolute percentage error of 1.9376 marks a significant improvement over existing stateof-the-art approaches, heralding a new era in short-term load forecasting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/384.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/384.txt new file mode 100644 index 0000000000000000000000000000000000000000..212b3880f4051b5c2c7cc0e91fb22be9cf5e4a05 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/384.txt @@ -0,0 +1 @@ + this paper presents our proposed approach that won the first prize at the iclr competition "hardware aware efficient training". the challenge is to achieve the highest possible accuracy in an image classification task in less than 10 minutes. the training is done on a small dataset of 5000 images picked randomly from cifar-10 dataset. the evaluation is performed by the competition organizers on a secret dataset with 1000 images of the same size. our approach includes applying a series of technique for improving the generalization of resnet-9 including: sharpness aware optimization, label smoothing, gradient centralization, input patch whitening as well as meta-learning based training. our experiments show that the resnet-9 can achieve the accuracy of 88% while trained only on a 10% subset of cifar-10 dataset in less than 10 minuets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/385.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/385.txt new file mode 100644 index 0000000000000000000000000000000000000000..1b353c5fb934654b49b713188dd2fc06f193b2b5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/385.txt @@ -0,0 +1 @@ + we present a natural extension to e(n)equivariant graph neural networks that uses multiple equivariant vectors per node. we formulate the extension and show that it improves performance across different physical systems benchmark tasks, with minimal differences in runtime or number of parameters. the proposed multichannel egnn outperforms the standard singlechannel egnn on n-body charged particle dynamics, molecular property predictions, and predicting the trajectories of solar system bodies. given the additional benefits and minimal additional cost of multi-channel egnn, we suggest that this extension may be of practical use to researchers working in machine learning for the physical sciences. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/386.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/386.txt new file mode 100644 index 0000000000000000000000000000000000000000..d91b0ee86488772e1cd318dc7ceecf2b64b1e86c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/386.txt @@ -0,0 +1 @@ + sparse logistic regression is for classification and feature selection simultaneously. although many studies have been done to solve ℓ1-regularized logistic regression, there is no equivalently abundant work on solving sparse logistic regression with nonconvex regularization term. in this paper, we propose a unified framework to solve ℓ1-regularized logistic regression, which can be naturally extended to nonconvex regularization term, as long as certain requirement is satisfied. in addition, we also utilize a different line search criteria to guarantee monotone convergence for various regularization terms. empirical experiments on binary classification tasks with real-world datasets demonstrate our proposed algorithms are capable of performing classification and feature selection effectively at a lower computational cost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/387.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/387.txt new file mode 100644 index 0000000000000000000000000000000000000000..1d279b55d8185f459874f51dfa602c56879bb6c7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/387.txt @@ -0,0 +1 @@ + thermodynamic equations of state (eos) are essential for many industries as well as in academia. even leaving aside the expensive and extensive measurement campaigns required for the data acquisition, the development of eos is an intensely time-consuming process, which does often still heavily rely on expert knowledge and iterative fine-tuning.to improve upon and accelerate the eos development process, we introduce thermodynamics-informed symbolic regression (tisr), a symbolic regression (sr) tool aimed at thermodynamic eos modeling. tisr is already a capable sr tool, which was used in the research of frotscher et al., 2023. it aims to combine an sr base with the extensions required to work with often strongly scattered experimental data, different residual pre-and postprocessing options, and additional features required to consider thermodynamic eos development.although tisr is not ready for end users yet, this paper is intended to report on its current state, showcase the progress, and discuss (distant and not so distant) future directions. tisr is available at https://github.com/ scoop-group/tisr and can be cited as martinek, 2023. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/388.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/388.txt new file mode 100644 index 0000000000000000000000000000000000000000..5f98e846c56f4659dac09ee6ca8ba4c113b25662 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/388.txt @@ -0,0 +1 @@ + in online ranking, a learning algorithm sequentially ranks a set of items and receives feedback on its ranking in the form of relevance scores. since obtaining relevance scores typically involves human annotation, it is of great interest to consider a partial feedback setting where feedback is restricted to the top-k items in the rankings. chaudhuri and tewari developed a framework to analyze online ranking algorithms with top-k feedback. a key element in their work was the use of techniques from partial monitoring. in this work, we further investigate online ranking with top-k feedback and solve some open problems posed by chaudhuri and tewari . we provide a full characterization of minimax regret rates with the top-k feedback model for all k and for the following ranking \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/389.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/389.txt new file mode 100644 index 0000000000000000000000000000000000000000..eef46b0ed56628aa91d09b5e33a20b3898ad69c5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/389.txt @@ -0,0 +1 @@ + deep neural networks have a good success record and are thus viewed as the best architecture choice for complex applications. their main shortcoming has been, for a long time, the vanishing gradient which prevented the numerical optimization algorithms from acceptable convergence. a breakthrough has been achieved by the concept of residual connections-an identity mapping parallel to a conventional layer. this concept is applicable to stacks of layers of the same dimension and substantially alleviates the vanishing gradient problem. a stack of residual connection layers can be expressed as an expansion of terms similar to the taylor expansion. this expansion suggests the possibility of truncating the higher-order terms and receiving an architecture consisting of a single broad layer composed of all initially stacked layers in parallel. in other words, a sequential deep architecture is substituted by a parallel shallow one. prompted by this theory, we investigated the performance capabilities of the parallel architecture in comparison to the sequential one. the computer vision datasets mnist and cifar10 were used to train both architectures for a total of 6,912 combinations of varying numbers of convolutional layers, numbers of filters, kernel sizes, and other meta parameters. our findings demonstrate a surprising equivalence between the deep (sequential) and shallow (parallel) architectures. both layouts produced similar results in terms of training and validation set loss. this discovery implies that a wide, shallow architecture can potentially replace a deep network without sacrificing performance. such substitution has the potential to simplify network architectures, improve optimization efficiency, and accelerate the training process. a \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/39.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/39.txt new file mode 100644 index 0000000000000000000000000000000000000000..557331dd75388e9aa03ebfde56104acb21fb8128 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/39.txt @@ -0,0 +1 @@ + legal prompt engineering (lpe) or legalprompting is a process to guide and assist a large language model (llm) with performing a natural legal language processing (nllp) skill. our goal is to use lpe with llms over long legal documents for the legal judgement prediction (ljp) task. we investigate the performance of zero-shot lpe for given facts in case-texts from the european court of human rights (in english) and the federal supreme court of switzerland (in german, french and italian). our results show that zero-shot lpe is better compared to the baselines, but it still falls short compared to current state of the art supervised approaches. nevertheless, the results are important, since there was 1) no explicit domain-specific data used -so we show that the transfer to the legal domain is possible for general-purpose llms, and 2) the llms where directly applied without any further training or fine-tuning -which in turn saves immensely in terms of additional computational costs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/390.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/390.txt new file mode 100644 index 0000000000000000000000000000000000000000..c781e80757262b63b6723c3862de76d363ad0110 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/390.txt @@ -0,0 +1 @@ + predicting the dynamics of chaotic systems is one of the most challenging tasks for neural networks, and machine learning in general. here we aim to predict the spatiotemporal chaotic dynamics of a high-dimensional non-linear system. in our attempt we use the tensorflow library, representing the state of the art for deep neural networks training and prediction. while our results are encouraging, and show that the dynamics of the considered system can be predicted for short time, we also indirectly discovered an unexpected and undesirable behavior of the tensorflow library. more specifically, the longer term prediction of the system's chaotic behavior quickly deteriorates and blows up due to the nondeterministic behavior of the tensorflow library. here we provide numerical evidence of the short time prediction ability, and of the longer term predictability blow up. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/391.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/391.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebe8a6c227bc655ee321fc1e30d7506eb7cadbcf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/391.txt @@ -0,0 +1 @@ + the goal of next-activity prediction is to forecast the future behavior of running process instances. recent publications in this field predominantly employ deep learning techniques and evaluate their prediction performance using publicly available event logs. this paper presents empirical evidence that calls into question the effectiveness of these current evaluation approaches. we show that there is an enormous amount of example leakage in all of the commonly used event logs and demonstrate that the next-activity prediction task in these logs is a rather trivial one that can be solved by a naive baseline. we further argue that designing robust evaluations requires a more profound conceptual engagement with the topic of next-activity prediction, and specifically with the notion of generalization to new data. to this end, we present various prediction scenarios that necessitate different types of generalization to guide future research in this field. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/392.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/392.txt new file mode 100644 index 0000000000000000000000000000000000000000..8287515bea26b5f67dd8cf507166ee533ea116d8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/392.txt @@ -0,0 +1 @@ + for machine learning with tabular data, table transformer (tabtransformer) is a state-of-the-art neural network model, while differential privacy (dp) is an essential component to ensure data privacy. in this paper, we explore the benefits of combining these two aspects together in the scenario of transfer learning -differentially private pre-training and fine-tuning of tabtransformers with a variety of parameterefficient fine-tuning (peft) methods, including adapter, lora, and prompt tuning. our extensive experiments on the acsincome dataset show that these peft methods outperform traditional approaches in terms of the accuracy of the downstream task and the number of trainable parameters, thus achieving an improved trade-off among parameter efficiency, privacy, and accuracy. our code is available at https://github.com/ibm/dp-tabtransformer. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/393.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/393.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1e424111f9d71d4ced0133d8b5639ce044f4fb8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/393.txt @@ -0,0 +1 @@ + in this work, we assess the theoretical limitations of determining guaranteed stability and accuracy of neural networks in classification tasks. we consider classical distribution-agnostic framework and algorithms minimising empirical risks and potentially subjected to some weights regularisation. we show that there is a large family of tasks for which computing and verifying ideal stable and accurate neural networks in the above settings is extremely challenging, if at all possible, even when such ideal solutions exist within the given class of neural architectures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/394.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/394.txt new file mode 100644 index 0000000000000000000000000000000000000000..37e92cf0d7a188a34a563718346ca96bc5847b2e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/394.txt @@ -0,0 +1 @@ + in the dynamic and uncertain environments where reinforcement learning (rl) operates, risk management becomes a crucial factor in ensuring reliable decision-making. traditional rl approaches, while effective in reward optimization, often overlook the landscape of potential risks. in response, this paper pioneers the integration of optimal transport (ot) theory with rl to create a risk-aware framework. our approach modifies the objective function, ensuring that the resulting policy not only maximizes expected rewards but also respects risk constraints dictated by ot distances between state visitation distributions and the desired risk profiles. by leveraging the mathematical precision of ot, we offer a formulation that elevates risk considerations alongside conventional rl objectives. our contributions are substantiated with a series of theorems, mapping the relationships between risk distributions, optimal value functions, and policy behaviors. through the lens of ot, this work illuminates a promising direction for rl, ensuring a balanced fusion of reward pursuit and risk awareness. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/395.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/395.txt new file mode 100644 index 0000000000000000000000000000000000000000..25d046fb040207f69531b15c464b3712b6197352 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/395.txt @@ -0,0 +1 @@ + stochastic neighbor embedding (sne) methods t-sne, umap are two most popular dimensionality reduction methods for data visualization. contrastive learning, especially self-supervised contrastive learning (sscl), has showed great success in embedding features from unlabeled data. the conceptual connection between sne and sscl has been exploited. in this work, within the scope of preserving neighboring information of a dataset, we extend the self-supervised contrastive approach to the fully-supervised setting, allowing us to effectively leverage label information. clusters of samples belonging to the same class are pulled together in low-dimensional embedding space, while simultaneously pushing apart clusters of samples from different classes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/396.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/396.txt new file mode 100644 index 0000000000000000000000000000000000000000..cfe122d3d3f85d628fc0eac32b4475bd62d83d8b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/396.txt @@ -0,0 +1 @@ + in this paper we propose a framework towards achieving two intertwined objectives: (i) equipping reinforcement learning with active exploration and deliberate information gathering, such that it regulates state and parameter uncertainties resulting from modeling mismatches and noisy sensory; and (ii) overcoming the huge computational cost of stochastic optimal control. we approach both objectives by using reinforcement learning to attain the stochastic optimal control law. on one hand, we avoid the curse of dimensionality prohibiting the direct solution of the stochastic dynamic programming equation. on the other hand, the resulting stochastic control inspired reinforcement learning agent admits the behavior of a dual control, namely, caution and probing, that is, regulating the state estimate together with its estimation quality. unlike exploration and exploitation, caution and probing are employed automatically by the controller in real-time, even after the learning process is concluded. we use the proposed approach on a numerical example of a model that belongs to an emerging class in system identification. we show how, for the dimensionality of the stochastic version of this model, dynamic programming is prohibitive, model predictive control requires an expensive nonlinear optimization, and a linear quadratic regulator with the certainty equivalence assumption leads to poor performance and filter divergence, all contrasting our approach which is shown to be both: computationally convenient, stabilizing and of an acceptable performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/397.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/397.txt new file mode 100644 index 0000000000000000000000000000000000000000..4cf1b73cc393e9be89742e8482c68d446530e640 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/397.txt @@ -0,0 +1 @@ + any company's human resources department faces the challenge of predicting whether an applicant will search for a new job or stay with the company. in this paper, we discuss how machine learning (ml) is used to predict who will move to a new job. first, the data is pre-processed into a suitable format for ml models. to deal with categorical features, data encoding is applied and several mla (ml algorithms) are performed including random forest (rf), logistic regression (lr), decision tree (dt), and extreme gradient boosting (xgboost). to improve the performance of ml models, the synthetic minority oversampling technique (smote) is used to retain them. models are assessed using decision support metrics such as precision, recall, f1-score, and accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/398.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/398.txt new file mode 100644 index 0000000000000000000000000000000000000000..f738ba2d4f88f94d712f50bd923a4ec0dbedd37e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/398.txt @@ -0,0 +1 @@ + protein folding is the intricate process by which a linear sequence of amino acids self-assembles into a unique threedimensional structure. protein folding kinetics is the study of pathways and time-dependent mechanisms a protein undergoes when it folds. understanding protein kinetics is essential as a protein needs to fold correctly for it to perform its biological functions optimally, and a misfolded protein can sometimes be contorted into shapes that are not ideal for a cellular environment giving rise to many degenerative, neuro-degenerative disorders and amyloid diseases. monitoring at-risk individuals and detecting protein discrepancies in a protein's folding kinetics at the early stages could majorly result in public health benefits, as preventive measures can be taken. this research proposes an efficient pipeline for predicting protein folding kinetics with high accuracy and low memory footprint. the deployed machine learning (ml) model outperformed the state-of-the-art ml models by 4.8% in terms of accuracy while consuming 327x lesser memory and being 7.3% faster. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/399.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/399.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3390c09d8a1909e37a086f6bf1155729bf2587c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/399.txt @@ -0,0 +1 @@ + the task of preserving privacy while ensuring efficient communication is a fundamental challenge in federated learning. in this work, we tackle this challenge in the trusted aggregator model, and propose a solution that achieves both objectives simultaneously. we show that employing a quantization scheme based on subtractive dithering at the clients can effectively replicate the normal noise addition process at the aggregator. this implies that we can guarantee the same level of differential privacy against other clients while substantially reducing the amount of communication required, as opposed to transmitting full precision gradients and using central noise addition. we also experimentally demonstrate that the accuracy of our proposed approach matches that of the full precision gradient method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/4.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/4.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c591f1f7ef67510d3a291a3dc7402d8720db807 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/4.txt @@ -0,0 +1 @@ + this work considers the sample complexity of obtaining an ε-optimal policy in an average reward markov decision process (amdp), given access to a generative model (simulator). when the groundtruth mdp is weakly communicating, we prove an upper bound of o(hε -3 ln 1 δ ) samples per state-action pair, where h := sp(h * ) is the span of bias of any optimal policy, ε is the accuracy and δ is the failure probability. this bound improves the best-known mixing-time-based approaches in jin and sidford , which assume the mixing-time of every deterministic policy is bounded. the core of our analysis is a proper reduction bound from amdp problems to discounted mdp (dmdp) problems, which may be of independent interests since it allows the application of dmdp algorithms for amdp in other settings. we complement our upper bound by proving a minimax lower bound of ω(|s||a|hε -2 ln 1 δ ) total samples, showing that a linear dependent on h is necessary and that our upper bound matches the lower bound in all parameters of (|s|, |a|, h, ln 1 δ ) up to some logarithmic factors. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/40.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/40.txt new file mode 100644 index 0000000000000000000000000000000000000000..a34e64bf17be5503ec0979137785c26ab0f6afdf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/40.txt @@ -0,0 +1 @@ + we present a way to create small yet difficult model counting instances. our generator is highly parameterizable: the number of variables of the instances it produces, as well as their number of clauses and the number of literals in each clause, can all be set to any value. our instances have been tested on state of the art model counters, against other difficult model counting instances, in the model counting competition. the smallest unsolved instances of the competition, both in terms of number of variables and number of clauses, were ours. we also observe a peak of difficulty when fixing the number of variables and varying the number of clauses, in both random instances and instances built by our generator. using these results, we predict the parameter values for which the hardest to count instances will occur. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/400.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/400.txt new file mode 100644 index 0000000000000000000000000000000000000000..31f59ce931d48988ae9b3021c8ae53c98bb2c5d8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/400.txt @@ -0,0 +1 @@ + as a modern ensemble technique, deep forest (df) employs a cascading structure to construct deep models, providing stronger representational power compared to traditional decision forests. however, its greedy multi-layer learning procedure is prone to overfitting, limiting model effectiveness and generalizability. this paper presents an optimized deep forest, featuring learnable, layerwise data augmentation policy schedules. specifically, we introduce the cut mix for tabular data (cmt) augmentation technique to mitigate overfitting and develop a population-based search algorithm to tailor augmentation intensity for each layer. additionally, we propose to incorporate outputs from intermediate layers into a checkpoint ensemble for more stable performance. experimental results show that our method sets new state-ofthe-art (sota) benchmarks in various tabular classification tasks, outperforming shallow tree ensembles, deep forests, deep neural network, and automl competitors. the learned policies also transfer effectively to deep forest variants, underscoring its potential for enhancing non-differentiable deep learning modules in tabular signal processing. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/401.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/401.txt new file mode 100644 index 0000000000000000000000000000000000000000..031219a4d6e418d3df7a628227a0b710cf19961c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/401.txt @@ -0,0 +1 @@ + we present a novel method for initializing layers of tensorized neural networks in a way that avoids the explosion of the parameters of the matrix it emulates. the method is intended for layers with a high number of nodes in which there is a connection to the input or output of all or most of the nodes.the core of this method is the use of the frobenius norm of this layer in an iterative partial form, so that it has to be finite and within a certain range. this norm is efficient to compute, fully or partially for most cases of interest. we apply the method to different layers and check its performance. we create a python function to run it on an arbitrary layer, available in a jupyter notebook in the i3bquantum repository github.com/i3bquantumteam/q4realaccepted in quantum 9999-99-99, click title to verify. published under cc-by 4.0. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/402.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/402.txt new file mode 100644 index 0000000000000000000000000000000000000000..1f758916340db8e98ca113ced5465335b8470f65 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/402.txt @@ -0,0 +1 @@ + we prove the converse of the universal approximation theorem, i.e. a neural network (nn) encoding theorem which shows that for every stably converged nn of continuous activation functions, its weight matrix actually encodes a continuous function that approximates its training dataset to within a finite margin of error over a bounded domain. we further show that using the eckart-young theorem for truncated singular value decomposition of the weight matrix for every nn layer, we can illuminate the nature of the latent space manifold of the training dataset encoded and represented by every nn layer, and the geometric nature of the mathematical operations performed by each nn layer. our results have implications for understanding how nns break the curse of dimensionality by harnessing memory capacity for expressivity, and that the two are complementary. this layer matrix decomposition (lmd) further suggests a close relationship between eigen-decomposition of nn layers and the latest advances in conceptualizations of hopfield networks and transformer nn models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/403.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/403.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e56bbb7d2d4ff652831527879fb2a2ae8d9f5a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/403.txt @@ -0,0 +1 @@ + in this work, we explored federated learning in temporal heterogeneity across clients. we observed that global model obtained by fedavg trained with fixedlength sequences shows faster convergence than varying-length sequences. we proposed methods to mitigate temporal heterogeneity for efficient federated learning based on the empirical observation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/404.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/404.txt new file mode 100644 index 0000000000000000000000000000000000000000..1e05e817615a038e248b7e1654bd8e6437127ca3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/404.txt @@ -0,0 +1 @@ + initialization of neural network weights plays a pivotal role in determining their performance. feature imitating networks (fins) offer a novel strategy by initializing weights to approximate specific closed-form statistical features, setting a promising foundation for deep learning architectures. while the applicability of fins has been chiefly tested in biomedical domains, this study extends its exploration into other time series datasets. three different experiments are conducted in this study to test the applicability of imitating tsallis entropy for performance enhancement: bitcoin price prediction, speech emotion recognition, and chronic neck pain detection. for the bitcoin price prediction, models embedded with fins reduced the root mean square error by around 1000 compared to the baseline. in the speech emotion recognition task, the fin-augmented model increased classification accuracy by over 3 percent. lastly, in the cnp detection experiment, an improvement of about 7 percent was observed compared to established classifiers. these findings validate the broad utility and potency of fins in diverse applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/405.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/405.txt new file mode 100644 index 0000000000000000000000000000000000000000..54a10aa1a4a0a716494fd71649c993484194ccf4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/405.txt @@ -0,0 +1 @@ + knowledge distillation (kd) is a powerful model compression technique broadly used in practical deep learning applications. it is focused on training a small student network to mimic a larger teacher network. while it is widely known that kd can offer an improvement to student generalization in i.i.d setting, its performance under domain shift, i.e. the performance of student networks on data from domains unseen during training, has received little attention in the literature. in this paper we make a step towards bridging the research fields of knowledge distillation and domain generalization. we show that weight averaging techniques proposed in domain generalization literature, such as swad and sma, also improve the performance of knowledge distillation under domain shift. in addition, we propose a simplistic weight averaging strategy that does not require evaluation on validation data during training and show that it performs on par with swad and sma when applied to kd. we name our final distillation approach weight-averaged knowledge distillation (wakd). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/406.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/406.txt new file mode 100644 index 0000000000000000000000000000000000000000..07d4b30bfd318804946f6002c2f29376be0c3861 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/406.txt @@ -0,0 +1 @@ + the transformer architecture is widely used in machine learning models and consists of two alternating sublayers: attention heads and mlps. we prove that an mlp neuron can be implemented by a masked attention head with internal dimension 1 so long as the mlp's activation function comes from a restricted class including silu and close approximations of relu and gelu. this allows one to convert an mlpand-attention transformer into an attention-only transformer at the cost of greatly increasing the number of attention heads. we also prove that attention heads can perform the components of an mlp (linear transformations and activation functions) separately. finally, we prove that attention heads can encode arbitrary masking patterns in their weight matrices to within arbitrarily small error. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/407.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/407.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6eccb3f108170f3b5fe3178b53342c904d88f53 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/407.txt @@ -0,0 +1 @@ + federated learning (fl) has emerged as a promising approach to enable collaborative learning among multiple clients while preserving data privacy. however, cross-domain fl tasks, where clients possess data from different domains or distributions, remain a challenging problem due to the inherent heterogeneity. in this paper, we present unideal, a novel fl algorithm specifically designed to tackle the challenges of cross-domain scenarios and heterogeneous model architectures. the proposed method introduces adjustable teacher-student mutual evaluation curriculum learning, which significantly enhances the effectiveness of knowledge distillation in fl settings. we conduct extensive experiments on various datasets, comparing unideal with state-of-the-art baselines. our results demonstrate that unideal achieves superior performance in terms of both model accuracy and communication efficiency. additionally, we provide a convergence analysis of the algorithm, showing a convergence rate of o( 1t ) under non-convex conditions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/408.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/408.txt new file mode 100644 index 0000000000000000000000000000000000000000..e613904351cf57ea30cda23670c576fae8e0ba51 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/408.txt @@ -0,0 +1 @@ + under missing-not-at-random (mnar) sample selection bias, the performance of a prediction model is often degraded. this paper focuses on one classic instance of mnar sample selection bias where a subset of samples have nonrandomly missing outcomes. the heckman selection model and its variants have commonly been used to handle this type of sample selection bias. the heckman model uses two separate equations to model the prediction and selection of samples, where the selection features include all prediction features. when using the heckman model, the prediction features must be properly chosen from the set of selection features. however, choosing the proper prediction features is a challenging task for the heckman model. this is especially the case when the number of selection features is large. existing approaches that use the heckman model often provide a manually chosen set of prediction features. in this paper, we propose heckman-fa as a novel data-driven framework for obtaining prediction features for the heckman model. heckman-fa first trains an assignment function that determines whether or not a selection feature is assigned as a prediction feature. using the parameters of the trained function, the framework extracts a suitable set of prediction features based on the goodness-of-fit of the prediction model given the chosen prediction features and the correlation between noise terms of the prediction and selection equations. experimental results on real-world datasets show that heckman-fa produces a robust regression model under mnar sample selection bias. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/409.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/409.txt new file mode 100644 index 0000000000000000000000000000000000000000..10118d108348a31ef358ed94ec7c6604e9d47c37 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/409.txt @@ -0,0 +1 @@ + the problem of predicting the training time of machine learning (ml) models has become extremely relevant in the scientific community. being able to predict a priori the training time of an ml model would enable the automatic selection of the best model both in terms of energy efficiency and in terms of performance in the context of, for instance, mlops architectures. in this paper, we present the work we are conducting towards this direction. in particular, we present an extensive empirical study of the full parameter time complexity (fptc) approach by zheng et al., which is, to the best of our knowledge, the only approach formalizing the training time of ml models as a function of both dataset's and model's parameters. we study the formulations proposed for the logistic regression and random forest classifiers, and we highlight the main strengths and weaknesses of the approach. finally, we observe how, from the conducted study, the prediction of training time is strictly related to the context (i.e., the involved dataset) and how the fptc approach is not generalizable. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/41.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/41.txt new file mode 100644 index 0000000000000000000000000000000000000000..fb32bf072e5bb25781f5a39d4c48327d90c8c989 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/41.txt @@ -0,0 +1 @@ + in this paper, we study the robust optimization for sequence networked submodular maximization (rosenets) problem. we interweave the robust optimization with the sequence networked submodular maximization. the elements are connected by a directed acyclic graph and the objective function is not submodular on the elements but on the edges in the graph. under such networked submodular scenario, the impact of removing an element from a sequence depends both on its position in the sequence and in the network. this makes the existing robust algorithms inapplicable. in this paper, we take the first step to study the rosenets problem. we design a robust greedy algorithm, which is robust against the removal of an arbitrary subset of the selected elements. the approximation ratio of the algorithm depends both on the number of the removed elements and the network topology. we further conduct experiments on real applications of recommendation and link prediction. the experimental results demonstrate the effectiveness of the proposed algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/410.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/410.txt new file mode 100644 index 0000000000000000000000000000000000000000..3ca98fb0b25b44165489e7184bfef32be7f0c47a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/410.txt @@ -0,0 +1 @@ + reinforcement learning has been increasingly applied in monitoring applications because of its ability to learn from previous experiences and can make adaptive decisions. however, existing machine learning-based health monitoring applications are mostly supervised learning algorithms, trained on labels and they cannot make adaptive decisions in an uncertain complex environment. this study proposes a novel and generic system, predictive deep reinforcement learning (pdrl) with multiple rl agents in a time series forecasting environment. the proposed generic framework accommodates virtual deep q network (dqn) agents to monitor predicted future states of a complex environment with a well-defined reward policy so that the agent learns existing knowledge while maximizing their rewards. in the evaluation process of the proposed framework, three drl agents were deployed to monitor a subject's future heart rate, respiration, and temperature predicted using a bil-stm model. with each iteration, the three agents were able to learn the associated patterns and their cumulative rewards gradually increased. it outperformed the baseline models for all three monitoring agents. the proposed pdrl framework is able to achieve state-of-the-art performance in the time series forecasting process. the proposed drl agents and deep learning model in the pdrl framework are customized to implement the transfer learning in other forecasting applications like traffic and weather and monitor their states. the pdrl framework is able to learn the future states of the traffic and weather forecasting and the cumulative rewards are gradually increasing over each episode. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/411.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/411.txt new file mode 100644 index 0000000000000000000000000000000000000000..64c73b2604a06fcfdd360d6817c1f6af6e44ae3e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/411.txt @@ -0,0 +1 @@ + clustering techniques have been the key drivers of data mining, machine learning and pattern recognition for decades. one of the most popular clustering algorithms is dbscan due to its high accuracy and noise tolerance. many superior algorithms such as dbscan have input parameters that are hard to estimate. therefore, finding those parameters is a time consuming process. in this paper, we propose a novel clustering algorithm 'bacteria-farm', which balances the performance and ease of finding the optimal parameters for clustering. bacteria-farm algorithm is inspired by the growth of bacteria in closed experimental farms -their ability to consume food and grow -which closely represents the ideal cluster growth desired in clustering algorithms. in addition, the algorithm features a modular design to allow the creation of versions of the algorithm for specific tasks / distributions of data. in contrast with other clustering algorithms, our algorithm also has a provision to specify the amount of noise to be excluded during clustering. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/412.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/412.txt new file mode 100644 index 0000000000000000000000000000000000000000..b7ab35d078693fa50d34f076c165ddf480a5023c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/412.txt @@ -0,0 +1 @@ + reinforcement learning is well known for its ability to model sequential tasks and learn latent data patterns adaptively. deep learning models have been widely explored and adopted in regression and classification tasks. however, deep learning has its limitations such as the assumption of equally spaced and ordered data, and the lack of ability to incorporate graph structure in terms of time-series prediction. graphical neural network (gnn) has the ability to overcome these challenges and capture the temporal dependencies in time-series data. in this study, we propose a novel approach for predicting time-series data using gnn and monitoring with reinforcement learning (rl). gnns are able to explicitly incorporate the graph structure of the data into the model, allowing them to capture temporal dependencies in a more natural way. this approach allows for more accurate predictions in complex temporal structures, such as those found in healthcare, traffic and weather forecasting. we also fine-tune our graphrl model using a bayesian optimisation technique to further improve performance. the proposed framework outperforms the baseline models in timeseries forecasting and monitoring. the contributions of this study include the introduction of a novel graphrl framework for timeseries prediction and the demonstration of the effectiveness of gnns in comparison to traditional deep learning models such as rnns and lstms. overall, this study demonstrates the potential of graphrl in providing accurate and efficient predictions in dynamic rl environments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/413.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/413.txt new file mode 100644 index 0000000000000000000000000000000000000000..b1b41d2d5e1ba81245695240b9a200cb509fb050 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/413.txt @@ -0,0 +1 @@ + the goal of product mapping is to decide, whether two listings from two different e-shops describe the same products. existing datasets of matching and non-matching pairs of products, however, often suffer from incomplete product information or contain only very distant non-matching products. therefore, while predictive models trained on these datasets achieve good results on them, in practice, they are unusable as they cannot distinguish very similar but non-matching pairs of products. this paper introduces two new datasets for product mapping: promapcz consisting of 1,495 czech product pairs and promapen consisting of 1,555 english product pairs of matching and non-matching products manually scraped from two pairs of e-shops. the datasets contain both images and textual descriptions of the products, including their specifications, making them one of the most complete datasets for product mapping. additionally, the non-matching products were selected in two phases, creating two types of non-matches -close non-matches and medium non-matches. even the medium non-matches are pairs of products that are much more similar than non-matches in other datasets -for example, they still need to have the same brand and similar name and price. after simple data preprocessing, several machine learning algorithms were trained on these and two the other datasets to demonstrate the complexity and completeness of promap datasets. promap datasets are presented as a golden standard for further research of product mapping filling the gaps in existing ones. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/414.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/414.txt new file mode 100644 index 0000000000000000000000000000000000000000..5e92f6a83603f3588a537463e0bc494fdf944254 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/414.txt @@ -0,0 +1 @@ + simplicial complexes prove effective in modeling data with multiway dependencies, such as data defined along the edges of networks or within other higher-order structures. their spectrum can be decomposed into three interpretable subspaces via the hodge decomposition, resulting foundational in numerous applications. we leverage this decomposition to develop a contrastive self-supervised learning approach for processing simplicial data and generating embeddings that encapsulate specific spectral information. specifically, we encode the pertinent data invariances through simplicial neural networks and devise augmentations that yield positive contrastive examples with suitable spectral properties for downstream tasks. additionally, we reweight the significance of negative examples in the contrastive loss, considering the similarity of their hodge components to the anchor. by encouraging a stronger separation among less similar instances, we obtain an embedding space that reflects the spectral properties of the data. the numerical results on two standard edge flow classification tasks show a superior performance even when compared to supervised learning techniques. our findings underscore the importance of adopting a spectral perspective for contrastive learning with higher-order data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/415.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/415.txt new file mode 100644 index 0000000000000000000000000000000000000000..44eeb9650b5d4a7ef59bf0756be04a8bf94daf34 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/415.txt @@ -0,0 +1 @@ + most algorithms for representation learning and link prediction on relational data are designed for static data. however, the data to which they are applied typically evolves over time, including online social networks or interactions between users and items in recommender systems. this is also the case for graph-structured knowledge bases -knowledge graphs -which contain facts that are valid only for specific points in time. in such contexts, it becomes crucial to correctly identify missing links at a precise time point, i.e. the temporal prediction link task. recently, lacroix et al. and sadeghian et al. proposed a solution to the problem of link prediction for knowledge graphs under temporal constraints inspired by the canonical decomposition of 4-order tensors, where they regularise the representations of time steps by enforcing temporal smoothing, i.e. by learning similar transformation for adjacent timestamps. however, the impact of the choice of temporal regularisation terms is still poorly understood. in this work, we systematically analyse several choices of temporal smoothing regularisers using linear functions and recurrent architectures. in our experiments, we show that by carefully selecting the temporal smoothing regulariser and regularisation weight, a simple method like tntcomplex can produce significantly more accurate results than state-of-the-art methods on three widely used temporal link prediction datasets. furthermore, we evaluate the impact of a wide range of temporal smoothing regularisers on two state-of-the-art temporal link prediction models. we observe that linear regularisers for temporal smoothing based on specific nuclear norms can significantly improve the predictive accuracy of the base temporal link prediction methods. our work shows that simple tensor factorisation models can produce new state-of-the-art results using newly proposed temporal regularisers, highlighting a promising avenue for future research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/416.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/416.txt new file mode 100644 index 0000000000000000000000000000000000000000..370ce3a6c8195bd354c7bb33ed951d92838db82b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/416.txt @@ -0,0 +1 @@ + detecting covariate drift in text data is essential for maintaining the reliability and performance of text analysis models. in this research, we investigate the effectiveness of different document embeddings, dimensionality reduction techniques, and drift detection methods for identifying covariate drift in text data. we explore three popular document embeddings: term frequency-inverse document frequency (tf-idf) using latent semantic analysis(lsa) for dimentionality reduction and doc2vec, and bert embeddings, with and without using principal component analysis (pca) for dimensionality reduction. to quantify the divergence between training and test data distributions, we employ the kolmogorov-smirnov (ks) statistic and the maximum mean discrepancy (mmd) test as drift detection methods. experimental results demonstrate that certain combinations of embeddings, dimensionality reduction techniques, and drift detection methods outperform others in detecting covariate drift. our findings contribute to the advancement of reliable text analysis models by providing insights into effective approaches for addressing covariate drift in text data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/417.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/417.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0ca1e588f3027ed934a60a40bae2da01f6a07f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/417.txt @@ -0,0 +1 @@ + in continual learning (cl), an ai agent (e.g., autonomous vehicles or robotics) learns from non-stationary data streams under dynamic environments. for the practical deployment of such applications, it is important to guarantee robustness to unseen environments while maintaining past experiences. in this paper, a novel cl framework is proposed to achieve robust generalization to dynamic environments while retaining past knowledge. the considered cl agent uses a capacity-limited memory to save previously observed environmental information to mitigate forgetting issues. then, data points are sampled from the memory to estimate the distribution of risks over environmental change so as to obtain predictors that are robust with unseen changes. the generalization and memorization performance of the proposed framework are theoretically analyzed. this analysis showcases the tradeoff between memorization and generalization with the memory size. experiments show that the proposed algorithm outperforms memory-based cl baselines across all environments while significantly improving the generalization performance on unseen target environments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/418.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/418.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/419.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/419.txt new file mode 100644 index 0000000000000000000000000000000000000000..f7beabbe711316ca275dcc3d612ead7dc9451707 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/419.txt @@ -0,0 +1 @@ + in this paper, we approach the problem of cost (loss) minimization in underparametrized shallow neural networks through the explicit construction of upper bounds, without any use of gradient descent. a key focus is on elucidating the geometric structure of approximate and precise minimizers. we consider shallow neural networks with one hidden layer, a relu activation function, an l 2 schatten class (or hilbert-schmidt) cost function, input space r m , output space r q with q ≤ m , and training input sample size n > qm that can be arbitrarily large. we prove an upper bound on the minimum of the cost function of order o(δ p ) where δ p measures the signal to noise ratio of training inputs. in the special case m = q, we explicitly determine an exact degenerate local minimum of the cost function, and show that the sharp value differs from the upper bound obtained for q ≤ m by a relative error o(δ 2 p ). the proof of the upper bound yields a constructively trained network; we show that it metrizes a particular q-dimensional subspace in the input space r m . we comment on the characterization of the global minimum of the cost function in the given context. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/42.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/42.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6a069edfdadf0cf2bcd80ecbd348ec31521e7fb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/42.txt @@ -0,0 +1 @@ + this paper discusses various types of constraints, difficulties and solutions to overcome the challenges regarding university course allocation problem. a hybrid evolutionary algorithm has been defined combining local repair algorithm and modified genetic algorithm to generate the best course assignment. after analyzing the collected dataset, all the necessary constraints were formulated. these constraints manage to cover the aspects needed to be kept in mind while preparing clash free and efficient class schedules for every faculty member. the goal is to generate an optimized solution which will fulfill those constraints while maintaining time efficiency and also reduce the workload of handling this task manually. the proposed algorithm was compared with some base level optimization algorithms to show the better efficiency in terms of accuracy and time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/420.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/420.txt new file mode 100644 index 0000000000000000000000000000000000000000..a07dad6ff1d5d769fe284912dbe4ca70364097a5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/420.txt @@ -0,0 +1 @@ + numerous recent research on graph neural networks (gnns) has focused on formulating gnn architectures as an optimization problem with the smoothness assumption. however, in node classification tasks, the smoothing effect induced by gnns tends to assimilate representations and overhomogenize labels of connected nodes, leading to adverse effects such as over-smoothing and misclassification. in this paper, we propose a novel bilevel optimization framework for gnns inspired by the notion of bregman distance. we demonstrate that the gnn layer proposed accordingly can effectively mitigate the over-smoothing issue by introducing a mechanism reminiscent of the "skip connection". we validate our theoretical results through comprehensive empirical studies in which bregman-enhanced gnns outperform their original counterparts in both homophilic and heterophilic graphs. furthermore, our experiments also show that bregman gnns can produce more robust learning accuracy even when the number of layers is high, suggesting the effectiveness of the proposed method in alleviating the over-smoothing issue. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/421.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/421.txt new file mode 100644 index 0000000000000000000000000000000000000000..637a6baa4dab5ef2170f5609c68086b22b889278 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/421.txt @@ -0,0 +1 @@ + intelligent agents such as robots are increasingly deployed in real-world, safety-critical settings. it is vital that these agents are able to explain the reasoning behind their decisions to human counterparts, however, their behavior is often produced by uninterpretable models such as deep neural networks. we propose an approach to generate natural language explanations for an agent's behavior based only on observations of states and actions, agnostic to the underlying model representation. we show how a compact representation of the agent's behavior can be learned and used to produce plausible explanations with minimal hallucination while affording user interaction with a pre-trained large language model. through user studies and empirical experiments, we show that our approach generates explanations as helpful as those generated by a human domain expert while enabling beneficial interactions such as clarification and counterfactual queries. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/422.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/422.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbf33689d6465bb70da4967271db47df5ae898ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/422.txt @@ -0,0 +1 @@ + artificial intelligence models and methods commonly lack causal interpretability. despite the advancements in interpretable machine learning (iml) methods, they frequently assign importance to features which lack causal influence on the outcome variable. selecting causally relevant features among those identified as relevant by these methods, or even before model training, would offer a solution. feature selection methods utilizing information theoretical quantities have been successful in identifying statistically relevant features. however, the information theoretical quantities they are based on do not incorporate causality, rendering them unsuitable for such scenarios. to address this challenge, this article proposes information theoretical quantities that incorporate the causal structure of the system, which can be used to evaluate causal importance of features for some given outcome variable. specifically, we introduce causal versions of entropy and mutual information, termed causal entropy and causal information gain, which are designed to assess how much control a feature provides over the outcome variable. these newly defined quantities capture changes in the entropy of a variable resulting from interventions on other variables. fundamental results connecting these quantities to the existence of causal effects are derived. the use of causal information gain in feature selection is demonstrated, highlighting its superiority over standard mutual information in revealing which features provide control over a chosen outcome variable. our investigation paves the way for the development of methods with improved interpretability in domains involving causation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/423.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/423.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/424.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/424.txt new file mode 100644 index 0000000000000000000000000000000000000000..377df39c0f9936738048633e9d27e761e8ab6bc9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/424.txt @@ -0,0 +1 @@ + existing research has either adapted the probably approximately correct (pac) bayesian framework for federated learning (fl) or used information-theoretic pac-bayesian bounds while introducing their theorems, but few considering the non-iid challenges in fl. our work presents the first nonvacuous federated pac-bayesian bound tailored for non-iid local data. this bound assumes unique prior knowledge for each client and variable aggregation weights. we also introduce an objective function and an innovative gibbs-based algorithm for the optimization of the derived bound. the results are validated on real-world datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/425.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/425.txt new file mode 100644 index 0000000000000000000000000000000000000000..882cade7f5f14d9806efb8daba7a5743e9ea8938 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/425.txt @@ -0,0 +1 @@ + the covid-19 pandemic has brought to light a concerning aspect of long-term neurological complications in post-recovery patients. this study delved into the investigation of such neurological sequelae in a cohort of 500 post-covid-19 patients, encompassing individuals with varying illness severity. the primary aim was to predict outcomes using a machine learning approach based on diverse clinical data and neuroimaging parameters. the results revealed that 68% of the post-covid-19 patients reported experiencing neurological symptoms, with fatigue, headache, and anosmia being the most common manifestations. moreover, 22% of the patients exhibited more severe neurological complications, including encephalopathy and stroke. the application of machine learning models showed promising results in predicting long-term neurological outcomes. notably, the random forest model achieved an accuracy of 85%, sensitivity of 80%, and specificity of 90% in identifying patients at risk of developing neurological sequelae. these findings underscore the importance of continuous monitoring and follow-up care for post-covid-19 patients, particularly in relation to potential neurological complications. the integration of machine learning-based outcome prediction offers a valuable tool for early intervention and personalized treatment strategies, aiming to improve patient care and clinical decision-making. in conclusion, this study sheds light on the prevalence of long-term neurological complications in post-covid-19 patients and demonstrates the potential of machine learning in predicting outcomes, thereby contributing to enhanced patient management and better health outcomes. further research and larger studies are warranted to validate and refine these predictive models and to gain deeper insights into the underlying mechanisms of post-covid-19 neurological sequelae. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/426.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/426.txt new file mode 100644 index 0000000000000000000000000000000000000000..55801eec115a116d7b137bd20d3f1269a9d53cab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/426.txt @@ -0,0 +1 @@ + in this paper, we exploit the unique properties of a deterministic projected belief network (d-pbn) to take full advantage of trainable compound activation functions (tcas). a d-pbn is a type of auto-encoder that operates by "backing up" through a feed-forward neural network. tcas are activation functions with complex monotonic-increasing shapes that change the distribution of the data so that the linear transformation that follows is more effective. because a d-pbn operates by "backing up", the tcas are inverted in the reconstruction process, restoring the original distribution of the data, thus taking advantage of a given tca in both analysis and reconstruction. in this paper, we show that a d-pbn auto-encoder with tcas can significantly out-perform standard auto-encoders including variational auto-encoders. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/427.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/427.txt new file mode 100644 index 0000000000000000000000000000000000000000..800d125e2b239adb3eee59ecc503c31f4af4c688 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/427.txt @@ -0,0 +1 @@ + accelerating the learning of partial differential equations (pdes) from experimental data will speed up the pace of scientic discovery. previous randomized algorithms exploit sparsity in pde updates for acceleration. however such methods are applicable to a limited class of decomposable pdes, which have sparse features in the value domain. we propose reel, which accelerates the learning of pdes via random projection and has much broader applicability. reel exploits the sparsity by decomposing dense updates into sparse ones in both the value and frequency domains. this decomposition enables efcient learning when the source of the updates consists of gradually changing terms across large areas (sparse in the frequency domain) in addition to a few rapid updates concentrated in a small set of "interfacial" regions (sparse in the value domain). random projection is then applied to compress the sparse signals for learning. to expand the model applicability, taylor series expansion is used in reel to approximate the nonlinear pde updates with polynomials in the decomposable form. theoretically, we derive a constant factor approximation between the projected loss function and the original one with poly-logarithmic number of projected dimensions. experimentally, we provide empirical evidence that our proposed reel can lead to faster learning of pde models (70%-98% reduction in training time when the data is compressed to 1% of its original size) with comparable quality as the noncompressed models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/428.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/428.txt new file mode 100644 index 0000000000000000000000000000000000000000..0dffc56103d3d00c8bf8a1f71a05f1b005501dda --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/428.txt @@ -0,0 +1 @@ + despite the many successful applications of deep learning models for multidimensional signal and image processing, most traditional neural networks process data represented by (multidimensional) arrays of real numbers. the intercorrelation between feature channels is usually expected to be learned from the training data, requiring numerous parameters and careful training. in contrast, vector-valued neural networks are conceived to process arrays of vectors and naturally consider the intercorrelation between feature channels. consequently, they usually have fewer parameters and often undergo more robust training than traditional neural networks. this paper aims to present a broad framework for vectorvalued neural networks, referred to as v-nets. in this context, hypercomplex-valued neural networks are regarded as vector-valued models with additional algebraic properties. furthermore, this paper explains the relationship between vector-valued and traditional neural networks. precisely, a vector-valued neural network can be obtained by placing restrictions on a real-valued model to consider the intercorrelation between feature channels. finally, we show how v-nets, including hypercomplex-valued neural networks, can be implemented in current deep-learning libraries as real-valued networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/429.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/429.txt new file mode 100644 index 0000000000000000000000000000000000000000..5b20ecf8a823340d54c645e447cd23ef7eaad76e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/429.txt @@ -0,0 +1 @@ + effective representation of data is crucial in various machine learning tasks, as it captures the underlying structure and context of the data. embeddings have emerged as a powerful technique for data representation, but evaluating their quality and capacity to preserve structural and contextual information remains a challenge. in this paper, we address this need by proposing a method to measure the representation capacity of embeddings. the motivation behind this work stems from the importance of understanding the strengths and limitations of embeddings, enabling researchers and practitioners to make informed decisions in selecting appropriate embedding models for their specific applications. by combining extrinsic evaluation methods, such as classification and clustering, with t-sne-based neighborhood analysis, such as neighborhood agreement and trustworthiness, we provide a comprehensive assessment of the representation capacity. additionally, the use of optimization techniques (bayesian optimization) for weight optimization (for classification, clustering, neighborhood agreement, and trustworthiness) ensures an objective and data-driven approach in selecting the optimal combination of metrics. the proposed method not only contributes to advancing the field of embedding evaluation but also empowers researchers and practitioners with a quantitative measure to assess the effectiveness of embeddings in capturing structural and contextual information. for the evaluation, we use 3 real-world biological sequence (proteins and nucleotide) datasets and performed representation capacity analysis of 4 embedding methods from the literature, namely spike2vec, spaced k-mers, pwm2vec, and autoencoder. experimental results demonstrate the spaced k-mersbased embedding shows better representation capacity on 2 out of three datasets. from the weights computed through optimization, we observed that classification, clustering, and trustworthiness hold the maximum weights while neighborhood agreement weight share towards the representation capacity score is very small. the first of its kind study in the domain of bioinformatics (to the best of our knowledge), the efficacy of the proposed method in accurately measuring the representation capacity of embeddings will lead to improved decision-making and performance in various machine-learning applications in healthcare. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/43.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/43.txt new file mode 100644 index 0000000000000000000000000000000000000000..ff8b64d2c3fca86ba88433df58848b3a32a55bca --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/43.txt @@ -0,0 +1 @@ + we study the classic facility location setting, where we are given n clients and m possible facility locations in some arbitrary metric space, and want to choose a location to build a facility. the exact same setting also arises in spatial social choice, where voters are the clients and the goal is to choose a candidate or outcome, with the distance from a voter to an outcome representing the cost of this outcome for the voter (e.g., based on their ideological differences). unlike most previous work, we do not focus on a single objective to optimize (e.g., the total distance from clients to the facility, or the maximum distance, etc.), but instead attempt to optimize several different objectives simultaneously. more specifically, we consider the l-centrum family of objectives, which includes the total distance, max distance, and many others. we present tight bounds on how well any pair of such objectives (e.g., max and sum) can be simultaneously approximated compared to their optimum outcomes. in particular, we show that for any such pair of objectives, it is always possible to choose an outcome which simultaneously approximates both objectives within a factor of 1 + √ 2, and give a precise characterization of how this factor improves as the two objectives being optimized become more similar. for q > 2 different centrum objectives, we show that it is always possible to approximate all q of these objectives within a small constant, and that this constant approaches 3 as q → ∞. our results show that when optimizing only a few simultaneous objectives, it is always possible to form an outcome which is a significantly better than 3 approximation for all of these objectives. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/430.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/430.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef8837aa663f1b87c040baf3d354f8a2266f50ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/430.txt @@ -0,0 +1 @@ + in recent years, transformer-based auto-attention mechanisms have been successfully applied to the analysis of a variety of context-reliant data types, from texts to images and beyond, including data from non-euclidean geometries. in this paper, we present such a mechanism, designed to classify sequences of symmetric positive definite matrices while preserving their riemannian geometry throughout the analysis. we apply our method to automatic sleep staging on timeseries of eegderived covariance matrices from a standard dataset, obtaining high levels of stage-wise performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/431.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/431.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae14e7d23b58e810a0746935825b9ebdba052e78 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/431.txt @@ -0,0 +1 @@ + we study unconstrained online linear optimization with lipschitz losses. motivated by the pursuit of instance optimality, we propose a new algorithm that simultaneously achieves (i) the adagrad-style second order gradient adaptivity; and (ii) the comparator norm adaptivity also known as "parameter freeness" in the literature. in particular,• our algorithm does not employ the impractical doubling trick, and does not require an a priori estimate of the time-uniform lipschitz constant;• the associated regret bound has the optimal o( √ vt ) dependence on the gradient variance vt , without the typical logarithmic multiplicative factor;• the leading constant in the regret bound is "almost" optimal. central to these results is a continuous time approach to online learning. we first show that the aimed simultaneous adaptivity can be achieved fairly easily in a continuous time analogue of the problem, where the environment is modeled by an arbitrary continuous semimartingale. then, our key innovation is a new discretization argument that preserves such adaptivity in the discrete time adversarial setting. this refines a non-gradient-adaptive discretization argument from , both algorithmically and analytically, which could be of independent interest. reported there can also achieve the o u v t log u bound. we refer the readers to section 7 of its recently updated arxiv version for the technical details. our quantitative improvement over will be the leading constant optimality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/432.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/432.txt new file mode 100644 index 0000000000000000000000000000000000000000..463a6e19bfd3f3bc789a6e63dcac3645621964a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/432.txt @@ -0,0 +1 @@ + graph hypernetworks (ghn) can predict the parameters of varying unseen cnn architectures with surprisingly good accuracy at a fraction of the cost of iterative optimization. following these successes, preliminary research has explored the use of ghns to predict quantization-robust parameters for 8-bit and 4-bit quantized cnns. however, this early work leveraged full-precision float32 training and only quantized for testing. we explore the impact of quantization-aware training and/or other quantization-based training strategies on quantized robustness and performance of ghn predicted parameters for low-precision cnns. we show that quantization-aware training can significantly improve quantized accuracy for ghn predicted parameters of 4-bit quantized cnns and even lead to greater-than-random accuracy for 2-bit quantized cnns. these promising results open the door for future explorations such as investigating the use of ghn predicted parameters as initialization for further quantized training of individual cnns, further exploration of "extreme bitwidth" quantization, and mixed precision quantization schemes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/433.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/433.txt new file mode 100644 index 0000000000000000000000000000000000000000..ad82ae38bbd929fb8f3306b9acd55184353bc686 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/433.txt @@ -0,0 +1 @@ + as climate change intensifies, the urgency for accurate global-scale disaster predictions grows. this research presents a novel multimodal disaster prediction framework, combining weather statistics, satellite imagery, and textual insights. we particularly focus on "flood" and "landslide" predictions, given their ties to meteorological and topographical factors. the model is meticulously crafted based on the available data and we also implement strategies to address class imbalance. while our findings suggest that integrating multiple data sources can bolster model performance, the extent of enhancement differs based on the specific nature of each disaster and their unique underlying causes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/434.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/434.txt new file mode 100644 index 0000000000000000000000000000000000000000..05195c3d0a1e51b2a8dd2aa4b7c6ee596d304692 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/434.txt @@ -0,0 +1 @@ + in an increasingly digitalized commerce landscape, the proliferation of credit card fraud and the evolution of sophisticated fraudulent techniques have led to substantial financial losses. automating credit card fraud detection is a viable way to accelerate detection, reducing response times and minimizing potential financial losses. however, addressing this challenge is complicated by the highly imbalanced nature of the datasets, where genuine transactions vastly outnumber fraudulent ones. furthermore, the high number of dimensions within the feature set gives rise to the "curse of dimensionality". in this paper, we investigate subspace learning-based approaches centered on one-class classification (occ) algorithms, which excel in handling imbalanced data distributions and possess the capability to anticipate and counter the transactions carried out by yet-tobe-invented fraud techniques. the study highlights the potential of subspace learning-based occ algorithms by investigating the limitations of current fraud detection strategies and the specific challenges of credit card fraud detection. these algorithms integrate subspace learning into the data description; hence, the models transform the data into a lower-dimensional subspace optimized for occ. through rigorous experimentation and analysis, the study validated that the proposed approach helps tackle the curse of dimensionality and the imbalanced nature of credit card data for automatic fraud detection to mitigate financial losses caused by fraudulent activities. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/435.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/435.txt new file mode 100644 index 0000000000000000000000000000000000000000..e90e7f250e5b2099e9eed71c87933934f068ef68 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/435.txt @@ -0,0 +1 @@ + angular minkowski p-distance is a dissimilarity measure that is obtained by replacing euclidean distance in the definition of cosine dissimilarity with other minkowski p-distances. cosine dissimilarity is frequently used with datasets containing token frequencies, and angular minkowski p-distance may potentially be an even better choice for certain tasks. in a case study based on the 20-newsgroups dataset, we evaluate clasification performance for classical weighted nearest neighbours, as well as fuzzy rough nearest neighbours. in addition, we analyse the relationship between the hyperparameter p, the dimensionality m of the dataset, the number of neighbours k, the choice of weights and the choice of classifier. we conclude that it is possible to obtain substantially higher classification performance with angular minkowski p-distance with suitable values for p than with classical cosine dissimilarity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/436.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/436.txt new file mode 100644 index 0000000000000000000000000000000000000000..5f5173d9faba8c7c5de54dc9de5c52d277858e59 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/436.txt @@ -0,0 +1 @@ + this paper aims to develop a new attribution method to explain the conflict between individual variables' attributions and their coalition's attribution from a fully new perspective. first, we find that the shapley value can be reformulated as the allocation of harsanyi interactions encoded by the ai model. second, based the re-allocation of interactions, we extend the shapley value to the attribution of coalitions. third, we derive the fundamental mechanism behind the conflict. this conflict comes from the interaction containing partial variables in their coalition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/437.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/437.txt new file mode 100644 index 0000000000000000000000000000000000000000..01e360e999713ace5c7a03e8c197df0bfae62656 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/437.txt @@ -0,0 +1 @@ + diabetes, especially t2dm, continues to be a significant health problem. one of the major concerns associated with diabetes is the development of its complications. diabetic nephropathy, one of the chronic complication of diabetes, adversely affects the kidneys, leading to kidney damage. diagnosing diabetic nephropathy involves considering various criteria, one of which is the presence of a pathologically significant quantity of albumin in urine, known as albuminuria. thus, early prediction of albuminuria in diabetic patients holds the potential for timely preventive measures. this study aimed to develop a supervised learning model to predict the risk of developing albuminuria in t2dm patients. the selected supervised learning algorithms included naïve bayes, support vector machine (svm), decision tree, random forest, adaboost, xgboost, and multi-layer perceptron (mlp). our private dataset, comprising 184 entries of diabetes complications risk factors, was used to train the algorithms. it consisted of 10 attributes as features and 1 attribute as the target (albuminuria). upon conducting the experiments, the mlp demonstrated superior performance compared to the other algorithms. it achieved accuracy and f1-score values as high as 0.74 and 0.75, respectively, making it suitable for screening purposes in predicting albuminuria in t2dm. nonetheless, further studies are warranted to enhance the model's performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/438.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/438.txt new file mode 100644 index 0000000000000000000000000000000000000000..1e5804837e0e69a4edbed58d14454f0d4463bf47 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/438.txt @@ -0,0 +1 @@ + dynamic ensemble selection (des) is a multiple classifier systems (mcs) approach that aims to select an ensemble for each query sample during the selection phase. even with the proposal of several des approaches, no particular des technique is the best choice for different problems. thus, we hypothesize that selecting the best des approach per query instance can lead to better accuracy. to evaluate this idea, we introduce the post-selection dynamic ensemble selection (ps-des) approach, a post-selection scheme that evaluates ensembles selected by several des techniques using different metrics. experimental results show that using accuracy as a metric to select the ensembles, ps-des performs better than individual des techniques. ps-des source code is available in a github repository 4 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/439.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/439.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb241b855e69d301ebe11e7613b3ed5e137aa74e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/439.txt @@ -0,0 +1 @@ + machine learning models have become increasingly popular for predicting the results of soccer matches, however, the lack of publicly-available benchmark datasets has made model evaluation challenging. the 2023 soccer prediction challenge required the prediction of match results first in terms of the exact goals scored by each team, and second, in terms of the probabilities for a win, draw, and loss. the original training set of matches and features, which was provided for the competition, was augmented with additional matches that were played between 4 april and 13 april 2023, representing the period after which the training set ended, but prior to the first matches that were to be predicted (upon which the performance was evaluated). a catboost model was employed using pi-ratings as the features, which were initially identified as the optimal choice for calculating the win/draw/loss probabilities. notably, deep learning models have frequently been disregarded in this particular task. therefore, in this study, we aimed to assess the performance of a deep learning model and determine the optimal feature set for a gradient-boosted tree model. the model was trained using the most recent five years of data, and three training and validation sets were used in a hyperparameter grid search. the results from the validation sets show that our model had strong performance and stability compared to previously published models from the 2017 soccer prediction challenge for win/draw/loss prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/44.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/44.txt new file mode 100644 index 0000000000000000000000000000000000000000..166354345b5c2fad08e37ccf29638b76b038c9be --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/44.txt @@ -0,0 +1 @@ + machine learning algorithms have been extensively researched throughout the last decade, leading to unprecedented advances in a broad range of applications, such as image classification and reconstruction, object recognition, and text categorization. nonetheless, most machine learning algorithms are trained via derivative-based optimizers, such as the stochastic gradient descent, leading to possible local optimum entrapments and inhibiting them from achieving proper performances. a bioinspired alternative to traditional optimization techniques, denoted as meta-heuristic, has received significant attention due to its simplicity and ability to avoid local optimums imprisonment. in this work, we propose to use meta-heuristic techniques to fine-tune pre-trained weights, exploring additional regions of the search space, and improving their effectiveness. the experimental evaluation comprises two classification tasks (image and text) and is assessed under four literature datasets. experimental results show nature-inspired algorithms' capacity in exploring the neighborhood of pre-trained weights, achieving superior results than their counterpart pre-trained architectures. additionally, a thorough analysis of distinct architectures, such as multi-layer perceptron and recurrent neural networks, attempts to visualize and provide more precise insights into the most critical weights to be fine-tuned in the learning process. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/440.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/440.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8ef44d56d882630234f0ea10eb7036c5e435943 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/440.txt @@ -0,0 +1 @@ + counterfactual explanations (ces) have received increasing interest as a major methodology for explaining neural network classifiers. usually, ces for an input-output pair are defined as data points with minimum distance to the input that are classified with a different label than the output. to tackle the established problem that ces are easily invalidated when model parameters are updated (e.g. retrained), studies have proposed ways to certify the robustness of ces under model parameter changes bounded by a norm ball. however, existing methods targeting this form of robustness are not sound or complete, and they may generate implausible ces, i.e., outliers wrt the training dataset. in fact, no existing method simultaneously optimises for proximity and plausibility while preserving robustness guarantees. in this work, we propose provably robust and plausible counterfactual explanations (proplace) 1 , a method leveraging on robust optimisation techniques to address the aforementioned limitations in the literature. we formulate an iterative algorithm to compute provably robust ces and prove its convergence, soundness and completeness. through a comparative experiment involving six baselines, five of which target robustness, we show that proplace achieves state-of-the-art performances against metrics on three evaluation aspects. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/441.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/441.txt new file mode 100644 index 0000000000000000000000000000000000000000..b29680ddd4521bd1a4be117bf00138fe2ffa6a6d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/441.txt @@ -0,0 +1 @@ + controller area network bus systems within vehicular networks are not equipped with the tools necessary to ward off and protect themselves from modern cyber-security threats. work has been done on using machine learning methods to detect and report these attacks, but common methods are not robust towards unknown attacks. these methods usually rely on there being a sufficient representation of attack data, which may not be available due to there either not being enough data present to adequately represent its distribution or the distribution itself is too diverse in nature for there to be a sufficient representation of it. with the use of oneclass classification methods, this issue can be mitigated as only normal data is required to train a model for the detection of anomalous instances. research has been done on the efficacy of these methods, most notably one-class support vector machine and support vector data description, but many new extensions of these works have been proposed and have yet to be tested for injection attacks in vehicular networks. in this paper, we investigate the performance of various state-ofthe-art one-class classification methods for detecting injection attacks on controller area network bus traffic. we investigate the effectiveness of these techniques on attacks launched on controller area network buses from two different vehicles during normal operation and while being attacked. we observe that the subspace support vector data description method outperformed all other tested methods with a gmean of about 85%. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/442.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/442.txt new file mode 100644 index 0000000000000000000000000000000000000000..4e75461607e1334403b8baa07de90b01105d1a8e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/442.txt @@ -0,0 +1 @@ + this study examines the use of a highly effective training method to conduct one-class classification. the existence of both positive and negative examples in the training data is necessary to develop an effective classifier in common binary classification scenarios. unfortunately, this criteria is not met in many domains. here, there is just one class of examples. classification algorithms that learn from solely positive input have been created to deal with this setting. in this paper, an effective algorithm for dual soft-margin one-class svm training is presented. our approach makes use of the augmented lagrangian (al-fpgm), a variant of the fast projected gradient method. the fpgm requires only first derivatives, which for the dual soft margin occ-svm means computing mainly a matrix-vector product. therefore, al-fpgm, being computationally inexpensive, may complement existing quadratic programming solvers for training large svms. we extensively validate our approach over real-world datasets and demonstrate that our strategy obtains statistically significant results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/443.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/443.txt new file mode 100644 index 0000000000000000000000000000000000000000..b168416315973c24d504e505e03bf961144a22d4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/443.txt @@ -0,0 +1 @@ + in this paper, we use a probabilistic model to estimate the number of uncorrelated features in a large dataset. our model allows for both pairwise feature correlation (collinearity) and interdependency of multiple features (multicollinearity) and we use the probabilistic method to obtain upper and lower bounds of the same order, for the size of a feature set that exhibits low collinearity and low multicollinearity. we also prove an auxiliary result regarding mutually good constrained sets that is of independent interest. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/444.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/444.txt new file mode 100644 index 0000000000000000000000000000000000000000..3baeb3dab5c2cd2f46bcec6f058201fb643c2bdd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/444.txt @@ -0,0 +1 @@ + the traditional machine learning models to solve optimal power flow (opf) are mostly trained for a given power network and lack generalizability to today's power networks with varying topologies and growing plug-and-play distributed energy resources (ders). in this paper, we propose deepopf-u, which uses one unified deep neural network (dnn) to solve alternatingcurrent (ac) opf problems in different power networks, including a set of power networks that is successively expanding. specifically, we design elastic input and output layers for the vectors of given loads and opf solutions with varying lengths in different networks. the proposed method, using a single unified dnn, can deal with different and growing numbers of buses, lines, loads, and ders. simulations of ieee 57/118/300-bus test systems and a network growing from 73 to 118 buses verify the improved performance of deepopf-u compared to existing dnn-based solution methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/445.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/445.txt new file mode 100644 index 0000000000000000000000000000000000000000..52de015c67345d8d664bb695c67f56baabe4f2a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/445.txt @@ -0,0 +1 @@ + human perception of the empirical world involves recognizing the diverse appearances, or 'modalities', of underlying objects. despite the longstanding consideration of this perspective in philosophy and cognitive science, the study of multimodality remains relatively under-explored within the field of machine learning. nevertheless, current studies of multimodal machine learning are limited to empirical practices, lacking theoretical foundations beyond heuristic arguments. an intriguing finding from the practice of multimodal learning is that a model trained on multiple modalities can outperform a finely-tuned unimodal model, even on unimodal tasks. this paper provides a theoretical framework that explains this phenomenon, by studying generalization properties of multimodal learning algorithms. we demonstrate that multimodal learning allows for a superior generalization bound compared to unimodal learning, up to a factor of o( √ n), where n represents the sample size. such advantage occurs when both connection and heterogeneity exist between the modalities. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/446.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/446.txt new file mode 100644 index 0000000000000000000000000000000000000000..cee77b603f66eb79e69c3bea05ba1aeef7bebe40 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/446.txt @@ -0,0 +1 @@ + rolling bearing fault diagnosis has garnered increased attention in recent years owing to its presence in rotating machinery across various industries, and an ever increasing demand for efficient operations. prompt detection and accurate prediction of bearing failures can help reduce the likelihood of unexpected machine downtime and enhance maintenance schedules, averting lost productivity. recent technological advances have enabled monitoring the health of these assets at scale using a variety of sensors, and predicting the failures using modern machine learning (ml) approaches including deep learning architectures. vibration data has been collected using accelerated run-to-failure of overloaded bearings, or by introducing known failure in bearings, under a variety of operating conditions such as rotating speed, load on the bearing, type of bearing fault, and data acquisition frequency. however, in the development of bearing failure classification models using vibration data there is a lack of consensus in the metrics used to evaluate the models, data partitions used to evaluate models, and methods used to generate failure labels in run-to-failure experiments. an understanding of the impact of these choices is important to reliably develop models, and deploy them in practical settings. in this work, we demonstrate the significance of these choices on the performance of the models using publicly-available vibration datasets, and suggest model development considerations for real world scenarios. our experimental findings demonstrate that assigning vibration data from a given bearing across training and evaluation splits leads to over-optimistic performance estimates, pca-based approach is able to robustly generate labels for failure classification in run-to-failure experiments, and f scores are more insightful to evaluate the models with unbalanced real-world failure data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/447.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/447.txt new file mode 100644 index 0000000000000000000000000000000000000000..b58fb1d512e8c11d3d9af9362218dc0a62f923cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/447.txt @@ -0,0 +1 @@ + a sudden roadblock on highways due to many reasons such as road maintenance, accidents, and car repair is a common situation we encounter almost daily. autonomous vehicles (avs) equipped with sensors that can acquire vehicle dynamics such as speed, acceleration, and location can make intelligent decisions to change lanes before reaching a roadblock. a number of literature studies have examined car-following models and lane-changing models. however, only a few studies proposed an integrated carfollowing and lane-changing model, which has the potential to model practical driving maneuvers. hence, in this paper, we present an integrated car-following and lane-changing decisioncontrol system based on deep reinforcement learning (drl) to address this issue. specifically, we consider a scenario where sudden construction work will be carried out along a highway. we model the scenario as a markov decision process (mdp) and employ the well-known dqn algorithm to train the rl agent to make the appropriate decision accordingly (i.e., either stay in the same lane or change lanes). to overcome the delay and computational requirement of drl algorithms, we adopt an mec-assisted architecture where the rl agents are trained on mec servers. we utilize the highly reputable sumo simulator and openai gym to evaluate the performance of the proposed model under two policies; ϵ-greedy policy and boltzmann policy. the results unequivocally demonstrate that the dqn agent trained using the ϵ-greedy policy significantly outperforms the one trained with the boltzmann policy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/448.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/448.txt new file mode 100644 index 0000000000000000000000000000000000000000..be50d390f6bd7d7d4e74bf7e4203e75b2807bda3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/448.txt @@ -0,0 +1 @@ + traditional optimization methods rely on the use of single-precision floating point arithmetic, which can be costly in terms of memory size and computing power. however, mixed precision optimization techniques leverage the use of both single and half-precision floating point arithmetic to reduce memory requirements while maintaining model accuracy. we provide here an algorithm to further reduce memory usage during the training of a model by getting rid of the floating point copy of the parameters, virtually keeping only half-precision numbers. we also explore the benefits of getting rid of the gradient's value by executing the optimizer step during the back-propagation. in practice, we achieve up to 25% lower peak memory use and 15% faster training while maintaining the same level of accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/449.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/449.txt new file mode 100644 index 0000000000000000000000000000000000000000..6581c6b1d67d6071495f932832366141a83021c8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/449.txt @@ -0,0 +1 @@ + the online gradient balancing (grab) algorithm greedily choosing the examples ordering by solving the herding problem using per-sample gradients is proved to be the theoretically optimal solution that guarantees to outperform random reshuffling. however, there is currently no efficient implementation of grab for the community to easily use it.this work presents an efficient python library, grabsampler, that allows the community to easily use grab algorithms and proposes 5 variants of the grab algorithm. the best performance result of the grab-sampler reproduces the training loss and test accuracy results while only in the cost of 8.7% training time overhead and 0.85% peak gpu memory usage overhead. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/45.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/45.txt new file mode 100644 index 0000000000000000000000000000000000000000..a360a442cbc300d4504989a99e93224d5d58ac92 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/45.txt @@ -0,0 +1 @@ + analogical proportions compare pairs of items (a, b) and (c, d) in terms of their differences and similarities. they play a key role in the formalization of analogical inference. the paper first discusses how to improve analogical inference in terms of accuracy and in terms of computational cost. then it indicates the potential of analogical proportions for explanation. finally, it highlights the close relationship between analogical proportions and multi-valued dependencies, which reveals an unsuspected aspect of the former. backgroundan analogical proportion (ap) is a statement of the form "a is to b as c is to d", linking four items a, b, c, d. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/450.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/450.txt new file mode 100644 index 0000000000000000000000000000000000000000..2887afda6355e914f7f358d91e7f9039e50686ee --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/450.txt @@ -0,0 +1 @@ + the covid-19 pandemic has left a lasting impact on individuals, with many experiencing persistent symptoms, including inflammation, in the post-acute phase of the disease. detecting and monitoring these inflammatory biomarkers is critical for timely intervention and improved patient outcomes. this study employs machine learning techniques to automate the identification of persistent inflammatory biomarkers in 290 post-covid-19 patients, based on medical data collected from hospitals in iraq. the data encompassed a wide array of clinical parameters, such as c-reactive protein and interleukin-6 levels, patient demographics, comorbidities, and treatment histories. rigorous data preprocessing and feature selection processes were implemented to optimize the dataset for machine learning analysis. various machine learning algorithms, including logistic regression, random forests, support vector machines, and gradient boosting, were deployed to construct predictive models. these models exhibited promising results, showcasing high accuracy and precision in the identification of patients with persistent inflammation. the findings of this study underscore the potential of machine learning in automating the detection of persistent inflammatory biomarkers in post-covid-19 patients. these models can serve as valuable tools for healthcare providers, facilitating early diagnosis and personalized treatment strategies for individuals at risk of persistent inflammation, ultimately contributing to improved post-acute covid-19 care and patient well-being. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/451.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/451.txt new file mode 100644 index 0000000000000000000000000000000000000000..3232a60547fb3ebc3caa5cb347bbce84799e2c02 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/451.txt @@ -0,0 +1 @@ + in recent years, explainable machine learning methods have been very successful. despite their success, most explainable machine learning methods are applied to black-box models without any domain knowledge. by incorporating domain knowledge, scienceinformed machine learning models have demonstrated better generalization and interpretation. but do we obtain consistent scientific explanations if we apply explainable machine learning methods to science-informed machine learning models? this question is addressed in the context of monotonic models that exhibit three different types of monotonicity. to demonstrate monotonicity, we propose three axioms. accordingly, this study shows that when only individual monotonicity is involved, the baseline shapley value provides good explanations; however, when strong pairwise monotonicity is involved, the integrated gradients method provides reasonable explanations on average. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/452.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/452.txt new file mode 100644 index 0000000000000000000000000000000000000000..199d66e2ead4d7e787e4595a2b5f799b807b8a16 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/452.txt @@ -0,0 +1 @@ + the retina is an essential component of the visual system, and maintaining eyesight depends on the timely and accurate detection of disorders. the early-stage detection and severity classification of diabetic retinopathy (dr), a significant risk to the public's health is the primary goal of this work. this study compares the outcomes of various deep learning models, including inceptionnetv3, densenet121, and other cnn-based models, utilizing a variety of image filters, including gaussian, grayscale, and gabor. these models could detect subtle pathological alterations and use that information to estimate the risk of retinal illnesses. the objective is to improve the diagnostic processes for dr, the primary cause of diabetes-related blindness, by utilizing deep learning models. a comparative analysis between greyscale, gaussian and gabor filters has been provided after applying these filters on the retinal images. the gaussian filter has been identifiedas the most promising filter by resulting in 96% accuracy using inceptionnetv3. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/453.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/453.txt new file mode 100644 index 0000000000000000000000000000000000000000..b73bfd902e0fc3b05232d2c2573f72e1f18aa232 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/453.txt @@ -0,0 +1 @@ + current methods of imitation learning (il), primarily based on deep neural networks, offer efficient means for obtaining driving policies from real-world data but suffer from significant limitations in interpretability and generalizability. these shortcomings are particularly concerning in safety-critical applications like autonomous driving. in this paper, we address these limitations by introducing symbolic imitation learning (sil), a groundbreaking method that employs inductive logic programming (ilp) to learn driving policies which are transparent, explainable and generalisable from available datasets. utilizing the real-world highd dataset, we subject our method to a rigorous comparative analysis against prevailing neural-network-based il methods. our results demonstrate that sil not only enhances the interpretability of driving policies but also significantly improves their applicability across varied driving situations. hence, this work offers a novel pathway to more reliable and safer autonomous driving systems, underscoring the potential of integrating ilp into the domain of il. 1 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/454.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/454.txt new file mode 100644 index 0000000000000000000000000000000000000000..16981a0a1edb68076a4e8eddc5ab8bb04abbcca4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/454.txt @@ -0,0 +1 @@ + this paper describes a family of seasonal and nonseasonal time series models that can be viewed as generalisations of additive and multiplicative exponential smoothing models, to model series that grow faster than linear but slower than exponential. their development is motivated by fast-growing, volatile time series. in particular, our models have a global trend that can smoothly change from additive to multiplicative, and is combined with a linear local trend. seasonality when used is multiplicative in our models, and the error is always additive but is heteroscedastic and can grow through a parameter sigma. we leverage state-of-the-art bayesian fitting techniques to accurately fit these models that are more complex and flexible than standard exponential smoothing models. when applied to the m3 competition data set, our models outperform the best algorithms in the competition as well as other benchmarks, thus achieving to the best of our knowledge the best results of per-series univariate methods on this dataset in the literature. an open-source software package of our method is available. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/455.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/455.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6128adf222e6c5899fca769b41d236ce6d5b877 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/455.txt @@ -0,0 +1 @@ + with the ever-growing popularity of graph neural networks (gnns), efficient gnn inference is gaining tremendous attention. field-programming gate arrays (fpgas) are a promising execution platform due to their fine-grained parallelism, low-power consumption, reconfigurability, and concurrent execution. even better, high-level synthesis (hls) tools bridge the gap between the non-trivial fpga development efforts and rapid emergence of new gnn models. in this paper, we propose gnnhls, an open-source framework to comprehensively evaluate gnn inference acceleration on fpgas via hls, containing a software stack for data generation and baseline deployment, and fpga implementations of 6 well-tuned gnn hls kernels. we evaluate gnnhls on 4 graph datasets with distinct topologies and scales. the results show that gnnhls achieves up to 50.8× speedup and 423× energy reduction relative to the cpu baselines. compared with the gpu baselines, gnnhls achieves up to 5.16× speedup and 74.5× energy reduction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/456.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/456.txt new file mode 100644 index 0000000000000000000000000000000000000000..89a2857aa7e18ed975fe70c32bcbcc54b05b5902 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/456.txt @@ -0,0 +1 @@ + in this study, we leveraged machine learning techniques to identify risk factors associated with post-covid-19 mental health disorders. our analysis, based on data collected from 669 patients across various provinces in iraq, yielded valuable insights. we found that age, gender, and geographical region of residence were significant demographic factors influencing the likelihood of developing mental health disorders in post-covid-19 patients. additionally, comorbidities and the severity of covid-19 illness were important clinical predictors. psychosocial factors, such as social support, coping strategies, and perceived stress levels, also played a substantial role. our findings emphasize the complex interplay of multiple factors in the development of mental health disorders following covid-19 recovery. healthcare providers and policymakers should consider these risk factors when designing targeted interventions and support systems for individuals at risk. machine learning-based approaches can provide a valuable tool for predicting and preventing adverse mental health outcomes in post-covid-19 patients. further research and prospective studies are needed to validate these findings and enhance our understanding of the long-term psychological impact of the covid-19 pandemic. this study contributes to the growing body of knowledge regarding the mental health consequences of the covid-19 pandemic and underscores the importance of a multidisciplinary approach to address the diverse needs of individuals on the path to recovery. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/457.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/457.txt new file mode 100644 index 0000000000000000000000000000000000000000..1b6f06b7b7b54cceafb3a8af2f177e81f49e5f7b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/457.txt @@ -0,0 +1 @@ + we present a self-supervised variational autoencoder (vae) to jointly learn disentangled and dependent hidden factors and then enhance disentangled representation learning by a self-supervised classifier to eliminate coupled representations in a contrastive manner. to this end, a contrastive copula vae (c 2 vae) is introduced without relying on prior knowledge about data in the probabilistic principle and involving strong modeling assumptions on the posterior in the neural architecture. c 2 vae simultaneously factorizes the posterior (evidence lower bound, elbo) with total correlation (tc)-driven decomposition for learning factorized disentangled representations and extracts the dependencies between hidden features by a neural gaussian copula for copula coupled representations. then, a self-supervised contrastive classifier differentiates the disentangled representations from the coupled representations, where a contrastive loss regularizes this contrastive classification together with the tc loss for eliminating entangled factors and strengthening disentangled representations. c 2 vae demonstrates a strong effect in enhancing disentangled representation learning. c 2 vae further contributes to improved optimization addressing the tc-based vae instability and the trade-off between reconstruction and representation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/458.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/458.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/459.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/459.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f1d5098ad38b2d12b10e543a4b3392e2a64ba2b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/459.txt @@ -0,0 +1 @@ + understanding feature representation for deep neural networks (dnns) remains an open question within the general field of explainable ai. we use principal component analysis (pca) to study the performance of a k-nearest neighbors classifier (k-nn), nearest class-centers classifier (ncc), and support vector machines on the learned layer-wise representations of a resnet-18 trained on cifar-10. we show that in certain layers, as little as 20% of the intermediate feature-space variance is necessary for high-accuracy classification and that across all layers, the first ∼100 pcs completely determine the performance of the k-nn and ncc classifiers. we relate our findings to neural collapse and provide partial evidence for the related phenomenon of intermediate neural collapse. our preliminary work provides three distinct yet interpretible surrogate models for feature representation with an affine linear model the best performing. we also show that leveraging several surrogate models affords us a clever method to estimate where neural collapse may initially occur within the dnn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/46.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/46.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c55f459595e32fbfaaeff1a40caa4a60f299800 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/46.txt @@ -0,0 +1 @@ + this short paper compiles the big ideas behind some philosophical views, definitions, and examples of causality. this collection spans the realms of the four commonly adopted approaches to causality: hume's regularity, counterfactual, manipulation, and mechanisms. this short review is motivated by presenting simplified views and definitions and then supplements them with examples from various fields, including economics, education, medicine, politics, physics, and engineering. it is the hope that this short review comes in handy for new and interested readers with little knowledge of causality and causal inference. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/460.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/460.txt new file mode 100644 index 0000000000000000000000000000000000000000..afaeb67d5d753cc7c80d7da3f6955443a481342f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/460.txt @@ -0,0 +1 @@ + e-mobility, or electric mobility, has emerged as a pivotal solution to address pressing environmental and sustainability concerns in the transportation sector. the depletion of fossil fuels, escalating greenhouse gas emissions, and the imperative to combat climate change underscore the significance of transitioning to electric vehicles (evs). this paper seeks to explore the potential of artificial intelligence (ai) in addressing various challenges related to effective energy management in e-mobility systems (ems). these challenges encompass critical factors such as range anxiety, charge rate optimization, and the longevity of energy storage in evs. by analyzing existing literature, we delve into the role that ai can play in tackling these challenges and enabling efficient energy management in ems. our objectives are twofold: to provide an overview of the current state-of-the-art in this research domain and propose effective avenues for future investigations. through this analysis, we aim to contribute to the advancement of sustainable and efficient emobility solutions, shaping a greener and more sustainable future for transportation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/461.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/461.txt new file mode 100644 index 0000000000000000000000000000000000000000..b26a64044a8c469b365a9e1309952c76f343c23b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/461.txt @@ -0,0 +1 @@ + introduction: purely medical cancer screening methods are often costly, time-consuming, and weakly applicable on a large scale. advanced artificial intelligence (ai) methods greatly help cancer detection but require specific or complex medical data. these aspects affect the mass implementation of cancer screening methods. for these reasons, it is a disruptive change for healthcare to apply ai methods for mass personalized assessment of the cancer risk among patients based on the existing electronic health records (ehr) volume. this paper proposes a novel method for personalized cancer risk prediction. methods & data: we formulate the problem as a binary classification task with diseased and healthy patients as classes. we have data from one of the largest regional-level clinics at our disposal. this dataset contains 175 441 de-identified patient ehrs, of which 2 861 were diagnosed with cancer. as a baseline, we implement a solution based on a recurrent neural network (rnn). this rnn processes the sequence of embeddings constructed for each medical event by a bert-based language model pre-trained on medical texts. proposed method based on ml and survival analysis: we propose a method that combines machine learning and survival analysis since these approaches are less computationally heavy, can be combined into an ensemble (the survival ensemble), and can be reproduced in most medical institutions. initially, we train survival models (the kaplan-meier estimators and the accelerated failure time model). and then, we carry out feature engineering using the fitted survival models. as a result, the proposed survival ensemble is an ml-based method containing both classical ml features (extracted from data manually) and the fitted survival models as features. experiments: we test the survival ensemble in some numeric studies. firstly, we obtain a significant difference between values of the primary metric (average precision) with 22.8% ± 2.7% (roc auc 83.7% ± 1.7%, f1 17.8% ± 2.8%) for the survival ensemble versus 15.1% ± 2.6% (roc auc 84.9% ± 0.8%, f1 21.4% ± 3.1%) for the baseline method. these confidence intervals were computed at the 95%-th level. secondly, the performance of the survival ensemble is also confirmed during the ablation study. thirdly, our method exceeds age baselines by a significant margin. moreover, in the blind retrospective out-of-time experiment, we have clearly shown that the proposed method is reliable in cancer patient detection (9 out of 100 selected). such results exceed the estimates of medical screenings, e.g., the best number needed to screen (9 out of 1000 screenings). conclusion: this paper presents a novel method for mass cancer risk prediction using ehr data. among other methods, our one stands out by the minimum data greedy policy, requiring only a history of medical service codes and diagnoses from ehr. such a feature greatly expands the method's applicability among clinics with varying data completeness. comparative experiments demonstrate that the proposed method outperforms traditional baseline methods significantly, achieving higher cancer patient detection. this method can help sort the patients' list for scheduled medical examinations, inviting high-risk patients first. further improvements, such as end-to-end training, enhance the method's performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/462.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/462.txt new file mode 100644 index 0000000000000000000000000000000000000000..e60cf5e56baa5bebe91d7efd5b72aaada4e5a822 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/462.txt @@ -0,0 +1 @@ + this paper presents the computational challenge on topological deep learning that was hosted within the icml 2023 workshop on topology and geometry in machine learning. the competition asked participants to provide open-source implementations of topological neural networks from the literature by contributing to the python packages toponetx (data processing) and topomodelx (deep learning). the challenge attracted twenty-eight qualifying submissions in its two-month duration. this paper describes the design of the challenge and summarizes its main findings. code: https://github.com/ pyt-team/topomodelx. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/463.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/463.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a58535a85e8ce589b98f881feb4361ef16f6f92 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/463.txt @@ -0,0 +1 @@ + in order to support the advancement of machine learning methods for predicting time-series data, we present a comprehensive dataset designed explicitly for longterm time-series forecasting. we incorporate a collection of datasets obtained from diverse, dynamic systems and real-life records. each dataset is standardized by dividing it into training and test trajectories with predetermined lookback lengths. we include trajectories of length up to 2000 to ensure a reliable evaluation of longterm forecasting capabilities. to determine the most effective model in diverse scenarios, we conduct an extensive benchmarking analysis using classical and state-of-the-art models, namely lstm, deepar, nlinear, n-hits, patchtst, and latentode. our findings reveal intriguing performance comparisons among these models, highlighting the dataset-dependent nature of model effectiveness. notably, we introduce a custom latent nlinear model and enhance deepar with a curriculum learning phase. both consistently outperform their vanilla counterparts.preprint. under review. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/464.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/464.txt new file mode 100644 index 0000000000000000000000000000000000000000..15d5a625540e96dafb7226d9a9f8f9c386b66677 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/464.txt @@ -0,0 +1 @@ + the covid-19 pandemic has led to widespread health challenges globally. among these challenges, the emergence of post-covid-19 complications, particularly cardiovascular complications, has garnered significant attention. this study addresses the pressing issue of predicting cardiovascular complications in individuals recovering from covid-19 by employing data-driven machine learning models. a comprehensive analysis was conducted, encompassing a cohort of 352 post-covid-19 patients from diverse regions of iraq. pertinent clinical data, comprising demographic information, comorbidities, laboratory findings, and imaging results, were meticulously collected. machine learning algorithms, including , were harnessed to construct predictive models. the dataset was stratified into training and testing subsets to rigorously assess the model performance. the study's outcomes illuminated several critical insights, such as the identification of substantial associations between specific comorbidities and the occurrence of post-covid-19 cardiovascular complications. the predictive models achieved commendable accuracy rates, sensitivity, specificity, and other relevant performance metrics, thus demonstrating their efficacy in recognizing individuals at heightened risk of developing such complications. this early detection capability holds promise for facilitating timely interventions, ultimately resulting in improved patient outcomes. in conclusion, this investigation underscores the potential of data-driven machine learning models as invaluable tools for predicting cardiovascular complications in individuals convalescing from covid-19. the findings accentuate the necessity for vigilant monitoring of patients, particularly those with identifiable risk factors. furthermore, this study advocates for continued research efforts and validation studies to refine these models, enhancing their accuracy and generalizability in diverse clinical settings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/465.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/465.txt new file mode 100644 index 0000000000000000000000000000000000000000..a249e270eb87d0f2dec85a7d769e73d9bd85474c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/465.txt @@ -0,0 +1 @@ + class imbalance is a major problem in many real world classification tasks. due to the imbalance in the number of samples, the support vector machine (svm) classifier gets biased toward the majority class. furthermore, these samples are often observed with a certain degree of noise. therefore, to remove these problems we propose a novel fuzzy based approach to deal with class imbalanced as well noisy datasets. we propose two approaches to address these problems. the first approach is based on the intuitionistic fuzzy membership, termed as robust energy-based intuitionistic fuzzy least squares twin support vector machine (if-relstsvm). furthermore, we introduce the concept of hyperplane-based fuzzy membership in our second approach, where the final classifier is termed as robust energy-based fuzzy least square twin support vector machine (f-relstsvm). by using this technique, the membership values are based on a projection based approach, where the data points are projected on the hyperplanes. the performance of the proposed algorithms is evaluated on several benchmark and synthetic datasets. the experimental results show that the proposed if-relstsvm and f-relstsvm models outperform the baseline algorithms. statistical tests are performed to check the significance of the proposed algorithms. the results show the applicability of the proposed algorithms on noisy as well as imbalanced datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/466.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/466.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c8721183950e079b354682cb735b51ca4e95998 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/466.txt @@ -0,0 +1 @@ + this paper which is part of the new faculty highlights invited speaker program of aaai'23 rostami , serves as a comprehensive survey of my research in transfer learning by utilizing embedding spaces. the work reviewed in this paper specifically revolves around the inherent challenges associated with continual learning and limited availability of labeled data. by providing an overview of my past and ongoing contributions, this paper aims to present a holistic understanding of my research, paving the way for future explorations and advancements in the field. my research delves into the various settings of transfer learning, including, few-shot learning, zero-shot learning, continual learning, domain adaptation, and distributed learning. i hope this survey provides a forward-looking perspective for researchers who would like to focus on similar research directions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/467.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/467.txt new file mode 100644 index 0000000000000000000000000000000000000000..f500ea6a0c6f96f5514a185dde2ef8ca2902dcf5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/467.txt @@ -0,0 +1 @@ + the covid-19 pandemic, caused by the novel coronavirus sars-cov-2, has posed significant health challenges worldwide. while respiratory symptoms have been the primary focus, emerging evidence has highlighted the impact of covid-19 on various organ systems, including the gastrointestinal (gi) tract. this study, based on data from 913 post-covid-19 patients in iraq collected during 2022 and 2023, investigates the prevalence and patterns of gi symptoms in individuals recovering from covid-19 and leverages machine learning algorithms to identify predictive factors for these symptoms. the research findings reveal that a notable percentage of post-covid-19 patients experience gi symptoms during their recovery phase. diarrhea emerged as the most frequently reported symptom, followed by abdominal pain and nausea. machine learning analysis uncovered significant predictive factors for gi symptoms, including age, gender, disease severity, comorbidities, and the duration of covid-19 illness. these findings underscore the importance of monitoring and addressing gi symptoms in post-covid-19 care, with machine learning offering valuable tools for early identification and personalized intervention. this study contributes to the understanding of the longterm consequences of covid-19 on gi health and emphasizes the potential benefits of utilizing machine learning-driven analysis in predicting and managing these symptoms. further research is warranted to delve into the mechanisms underlying gi symptoms in covid-19 survivors and to develop targeted interventions for symptom management. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/468.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/468.txt new file mode 100644 index 0000000000000000000000000000000000000000..b575b6aeb6726b23a0e32d823ac735dd1748ebeb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/468.txt @@ -0,0 +1 @@ + quantization is a technique for creating efficient deep neural networks (dnns), which involves performing computations and storing tensors at lower bit-widths than f32 floating point precision. quantization reduces model size and inference latency, and therefore allows for dnns to be deployed on platforms with constrained computational resources and real-time systems. however, quantization can lead to numerical instability caused by roundoff error which leads to inaccurate computations and therefore, a decrease in quantized model accuracy. similarly to prior works, which have shown that both biases and activations are more sensitive to quantization and are best kept in full precision or quantized with higher bit-widths, we show that some weights are more sensitive than others which should be reflected on their quantization bit-width. to that end we propose mixquant, a search algorithm that finds the optimal custom quantization bit-width for each layer weight based on roundoff error and can be combined with any quantization method as a form of pre-processing optimization. we show that combining mixquant with brecq, a state-of-the-art quantization method, yields better quantized model accuracy than brecq alone. additionally, we combine mixquant with vanilla asymmetric quantization to show that mixquant has the potential to optimize the performance of any quantization technique. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/469.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/469.txt new file mode 100644 index 0000000000000000000000000000000000000000..a4c568a729b0d86bda9ac12c2d2b1cecec4c18c9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/469.txt @@ -0,0 +1 @@ + machine learning classifiers are widely used to make decisions with a major impact on people's lives (e.g. accepting or denying a loan, hiring decisions, etc). in such applications, the learned classifiers need to be both accurate and fair with respect to different groups of people, with different values of variables such as sex and race. this paper focuses on fair feature selection for classification, i.e. methods that select a feature subset aimed at maximising both the accuracy and the fairness of the predictions made by a classifier. more specifically, we compare two recently proposed genetic algorithms (gas) for fair feature selection that are based on two different multi-objective optimisation approaches: (a) a pareto dominance-based ga; and (b) a lexicographic optimisation-based ga, where maximising accuracy has higher priority than maximising fairness. both gas use the same measures of accuracy and fairness, allowing for a controlled comparison. as far as we know, this is the first comparison between the pareto and lexicographic approaches for fair classification. the results show that, overall, the lexicographic ga outperformed the pareto ga with respect to accuracy without degradation of the fairness of the learned classifiers. this is an important result because at present nearly all gas for fair classification are based on the pareto approach, so these results suggest a promising new direction for research in this area. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/47.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/47.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f1843b586890b236748ea4b98df697e8d16029d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/47.txt @@ -0,0 +1 @@ + we address the problem of integrating data from multiple, possibly biased, observational and interventional studies, to eventually compute counterfactuals in structural causal models. we start from the case of a single observational dataset affected by a selection bias. we show that the likelihood of the available data has no local maxima. this enables us to use the causal expectationmaximisation scheme to compute approximate bounds for partially identifiable counterfactual queries, which are the focus of this paper. we then show how the same approach can solve the general case of multiple datasets, no matter whether interventional or observational, biased or unbiased, by remapping it into the former one via graphical transformations. systematic numerical experiments and a case study on palliative care show the effectiveness and accuracy of our approach, while hinting at the benefits of integrating heterogeneous data to get informative bounds in case of partial identifiability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/470.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/470.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a298bd650ad31bf239e4d7bcdd389469c678462 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/470.txt @@ -0,0 +1 @@ + in recent years, there is strong emphasis on mining medical data using machine learning techniques. a common problem is to obtain a noiseless set of textual documents, with a relevant content for the research question, and developing a question answering (qa) model for a specific medical field. the purpose of this paper is to present a new methodology for building a medical dataset and obtain a qa model for analysis of symptoms and impact on daily life for a specific disease domain. the "mental health" forum was used, a forum dedicated to people suffering from schizophrenia and different mental disorders. relevant posts of active users, who regularly participate, were extrapolated providing a new method of obtaining low-bias content and without privacy issues. furthermore, it is shown how to pre-process the dataset to convert it into a qa dataset. the bidirectional encoder representations from transformers (bert), distilbert, roberta, and biobert models were fine-tuned and evaluated via f1-score, exact match, precision and recall. accurate empirical experiments demonstrated the effectiveness of the proposed method for obtaining an accurate dataset for qa model implementation. by fine-tuning the biobert qa model, we achieved an f1 score of 0.885, showing a considerable improvement and outperforming the state-of-the-art model for mental disorders domain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/471.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/471.txt new file mode 100644 index 0000000000000000000000000000000000000000..0257d9fe3c3e81e68fc9a5b851ed912d57387c37 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/471.txt @@ -0,0 +1 @@ + fully test-time adaptation (tta), which aims at adapting models to data drifts, has recently attracted wide interest. numerous tricks and techniques have been proposed to ensure robust learning on arbitrary streams of unlabeled data. however, assessing the true impact of each individual technique and obtaining a fair comparison still constitutes a significant challenge. to help consolidate the community's knowledge, we present a categorization of selected orthogonal tta techniques, including small batch normalization, stream rebalancing, reliable sample selection, and network confidence calibration. we meticulously dissect the effect of each approach on different scenarios of interest. through our analysis, we shed light on trade-offs induced by those techniques between accuracy, the computational power required, and model complexity. we also uncover the synergy that arises when combining techniques and are able to establish new state-of-the-art results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/472.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/472.txt new file mode 100644 index 0000000000000000000000000000000000000000..751f30da4fc54388c81b7ca159d12e8250c0389c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/472.txt @@ -0,0 +1 @@ + recent research has shown that artificial intelligence (ai) models can exhibit bias in performance when trained using data that are imbalanced by protected attribute(s). most work to date has focused on deep learning models, but classical ai techniques that make use of hand-crafted features may also be susceptible to such bias. in this paper we investigate the potential for race bias in random forest (rf) models trained using radiomics features. our application is prediction of tumour molecular subtype from dynamic contrast enhanced magnetic resonance imaging (dce-mri) of breast cancer patients. our results show that radiomics features derived from dce-mri data do contain race-identifiable information, and that rf models can be trained to predict white and black race from these data with 60-70% accuracy, depending on the subset of features used. furthermore, rf models trained to predict tumour molecular subtype using race-imbalanced data seem to produce biased behaviour, exhibiting better performance on test data from the race on which they were trained. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/473.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/473.txt new file mode 100644 index 0000000000000000000000000000000000000000..90c8fdd3b2eeef3f658722b9a1322348e3d97e66 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/473.txt @@ -0,0 +1 @@ + this paper studies the problem of learning interactive recommender systems from logged feedbacks without any exploration in online environments. we address the problem by proposing a general offline reinforcement learning framework for recommendation, which enables maximizing cumulative user rewards without online exploration. specifically, we first introduce a probabilistic generative model for interactive recommendation, and then propose an effective inference algorithm for discrete and stochastic policy learning based on logged feedbacks. in order to perform offline learning more effectively, we propose five approaches to minimize the distribution mismatch between the logging policy and recommendation policy: support constraints, supervised regularization, policy constraints, dual constraints and reward extrapolation. we conduct extensive experiments on two public realworld datasets, demonstrating that the proposed methods can achieve superior performance over existing supervised learning and reinforcement learning methods for recommendation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/474.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/474.txt new file mode 100644 index 0000000000000000000000000000000000000000..9576e24ac0aee3cbc787a0c394ccad4c31a3a112 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/474.txt @@ -0,0 +1 @@ + automated essay score (aes) is proven to be one of the cutting-edge technologies. scoring techniques are used for various purposes. reliable scores are calculated based on influential variables. such variables can be computed by different methods based on the domain. the research is concentrated on the user's understanding of a given topic. the analysis is based on a scoring index by using large language models. the user can then compare and contrast the understanding of a topic that they recently learned. the results are then contributed towards learning analytics and progression is made for enhancing the learning ability. in this research, the focus is on summarizing a pdf document and gauging a user's understanding of its content. the process involves utilizing a langchain tool to summarize the pdf and extract the essential information. by employing this technique, the research aims to determine how well the user comprehends the summarized content. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/475.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/475.txt new file mode 100644 index 0000000000000000000000000000000000000000..635af979c8a379ec37ecba495d17a6914090d9f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/475.txt @@ -0,0 +1 @@ + the reliability of a learning model is key to the successful deployment of machine learning in various applications. creating a robust model, particularly one unaffected by adversarial attacks, requires a comprehensive understanding of the adversarial examples phenomenon. however, it is difficult to describe the phenomenon due to the complicated nature of the problems in machine learning. it has been shown that adversarial training can improve the robustness of the hypothesis. however, this improvement comes at the cost of decreased performance on natural samples. hence, it has been suggested that robustness and accuracy of a hypothesis are at odds with each other. in this paper, we put forth the alternative proposal that it is the continuity of a hypothesis that is incompatible with its robustness and accuracy. in other words, a continuous function cannot effectively learn the optimal robust hypothesis. to this end, we will introduce a framework for a rigorous study of harmonic and holomorphic hypothesis in learning theory terms and provide empirical evidence that continuous hypotheses does not perform as well as discontinuous hypotheses in some common machine learning tasks. from a practical point of view, our results suggests that a robust and accurate learning rule would train different continuous hypotheses for different regions of the domain. from a theoretical perspective, our analysis explains the adversarial examples phenomenon as a conflict between the continuity of a sequence of functions and its uniform convergence to a discontinuous function. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/476.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/476.txt new file mode 100644 index 0000000000000000000000000000000000000000..fe3bce4a76fb84a4f4ee16f523843e7f8e8bc9c2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/476.txt @@ -0,0 +1 @@ + to enhance the computational efficiency of quantized transformers, we replace the dot-product and softmax-based attention with an alternative mechanism involving addition and relu activation only. this side-steps the expansion to double precision often required by matrix multiplication and avoids costly softmax evaluations but maintains much of the core functionality of conventional dot-product attention. it can enable more efficient execution and support larger quantized transformer models on resource-constrained hardware or alternative arithmetic systems like homomorphic encryption. training experiments on four common benchmark tasks show test set prediction scores comparable to those of conventional transformers with dot-product attention. our scaling experiments also suggest significant computational savings, both in plaintext and under encryption. in particular, we believe that the relu and addition-based attention mechanism introduced in this paper may enable privacy-preserving ai applications operating under homomorphic encryption by avoiding the costly multiplication of encrypted variables. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/477.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/477.txt new file mode 100644 index 0000000000000000000000000000000000000000..b8df70c35acf383bd1b9fccb54b1a40f2eb2781b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/477.txt @@ -0,0 +1 @@ + we study distribution-free nonparametric regression following a notion of average smoothness initiated by ashlagi et al. , which measures the "effective" smoothness of a function with respect to an arbitrary unknown underlying distribution. while the recent work of hanneke et al. established tight uniform convergence bounds for average-smooth functions in the realizable case and provided a computationally efficient realizable learning algorithm, both of these results currently lack analogs in the general agnostic (i.e. noisy) case.in this work, we fully close these gaps. first, we provide a distribution-free uniform convergence bound for average-smoothness classes in the agnostic setting. second, we match the derived sample complexity with a computationally efficient agnostic learning algorithm. our results, which are stated in terms of the intrinsic geometry of the data and hold over any totally bounded metric space, show that the guarantees recently obtained for realizable learning of average-smooth functions transfer to the agnostic setting. at the heart of our proof, we establish the uniform convergence rate of a function class in terms of its bracketing entropy, which may be of independent interest. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/478.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/478.txt new file mode 100644 index 0000000000000000000000000000000000000000..e8106149a80f785257b7513c5326f219b54b5c7a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/478.txt @@ -0,0 +1 @@ + given the growing complexity of healthcare data over the last several years, using machine learning techniques like deep neural network (dnn) models has gained increased appeal. in order to extract hidden patterns and other valuable information from the huge quantity of health data, which traditional analytics are unable to do in a reasonable length of time, machine learning (ml) techniques are used. deep learning (dl) algorithms in particular have been shown as potential approaches to pattern identification in healthcare systems. this thought has led to the contribution of this research, which examines deep learning methods used in healthcare systems via an examination of cutting-edge network designs, applications, and market trends. to connect deep learning methodologies and human healthcare interpretability, the initial objective is to provide in-depth insight into the deployment of deep learning models in healthcare solutions. and last, to outline the current unresolved issues and potential directions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/479.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/479.txt new file mode 100644 index 0000000000000000000000000000000000000000..81b6ea8b8042781d16b47ab60927378d74d657d7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/479.txt @@ -0,0 +1 @@ + this study explores the quantisation-aware training (qat) on time series transformer models. we propose a novel adaptive quantisation scheme that dynamically selects between symmetric and asymmetric schemes during the qat phase. our approach demonstrates that matching the quantisation scheme to the real data distribution can reduce computational overhead while maintaining acceptable precision. moreover, our approach is robust when applied to real-world data and mixed-precision quantisation, where most objects are quantised to 4 bits. our findings inform model quantisation and deployment decisions while providing a foundation for advancing quantisation techniques. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/48.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/48.txt new file mode 100644 index 0000000000000000000000000000000000000000..8463c95fe62da61d95672a55f4da2291f27741c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/48.txt @@ -0,0 +1 @@ + this study evaluated the ability of chatgpt, a recently developed artificial intelligence (ai) agent, to perform high-level cognitive tasks and produce text that is indistinguishable from human-generated text. this capacity raises concerns about the potential use of chatgpt as a tool for academic misconduct in online exams. the study found that chatgpt is capable of exhibiting critical thinking skills and generating highly realistic text with minimal input, making it a potential threat to the integrity of online exams, particularly in tertiary education settings where such exams are becoming more prevalent. returning to invigilated and oral exams could form part of the solution, while using advanced proctoring techniques and ai-text output detectors may be effective in addressing this issue, they are not likely to be foolproof solutions. further research is needed to fully understand the implications of large language models like chatgpt and to devise strategies for combating the risk of cheating using these tools. it is crucial for educators and institutions to be aware of the possibility of chatgpt being used for cheating and to investigate measures to address it in order to maintain the fairness and validity of online exams for all students. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/480.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/480.txt new file mode 100644 index 0000000000000000000000000000000000000000..6ac0f8b55dbf4966696850ea039c79ea78aa1a1e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/480.txt @@ -0,0 +1 @@ + how are people able to plan so efficiently despite limited cognitive resources? we aimed to answer this question by extending an existing model of human task decomposition that can explain a wide range of simple planning problems by adding structure information to the task to facilitate planning in more complex tasks. the extended model was then applied to a more complex planning domain of spatial navigation. our results suggest that our framework can correctly predict the navigation strategies of the majority of the participants in an online experiment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/481.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/481.txt new file mode 100644 index 0000000000000000000000000000000000000000..eaf5265a25ba1682571a4e178b394cb819e44769 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/481.txt @@ -0,0 +1 @@ + recent works have shown that deep neural networks are vulnerable to adversarial examples that find samples close to the original image but can make the model misclassify. even with access only to the model's output, an attacker can employ black-box attacks to generate such adversarial examples. in this work, we propose a simple and lightweight defense against black-box attacks by adding random noise to hidden features at intermediate layers of the model at inference time. our theoretical analysis confirms that this method effectively enhances the model's resilience against both score-based and decision-based black-box attacks. importantly, our defense does not necessitate adversarial training and has minimal impact on accuracy, rendering it applicable to any pre-trained model. our analysis also reveals the significance of selectively adding noise to different parts of the model based on the gradient of the adversarial objective function, which can be varied during the attack. we demonstrate the robustness of our defense against multiple black-box attacks through extensive empirical experiments involving diverse models with various architectures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/482.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/482.txt new file mode 100644 index 0000000000000000000000000000000000000000..e9ee802fd37466c0f66859beca7c3f7f1fe0e004 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/482.txt @@ -0,0 +1 @@ + one central theme in machine learning is function estimation from sparse and noisy data. an example is supervised learning where the elements of the training set are couples, each containing an input location and an output response. in the last decades, a substantial amount of work has been devoted to design estimators for the unknown function and to study their convergence to the optimal predictor, also characterizing the learning rate. these results typically rely on stationary assumptions where input locations are drawn from a probability distribution that does not change in time. in this work, we consider kernel-based ridge regression and derive convergence conditions under non stationary distributions, addressing also cases where stochastic adaption may happen infinitely often. this includes the important exploration-exploitation problems where e.g. a set of agents/robots has to monitor an environment to reconstruct a sensorial field and their movements rules are continuosuly updated on the basis of the acquired knowledge on the field and/or the surrounding environment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/483.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/483.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c3845713088fbcdee87e969e151921076c2c844 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/483.txt @@ -0,0 +1 @@ + we consider arbitrary bounded discrete time series. from its statistical feature, without any use of the fourier transform, we find an almost periodic function which suitably characterizes the corresponding time series. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/484.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/484.txt new file mode 100644 index 0000000000000000000000000000000000000000..cf0e92a3b620314107c954a2c1c2f1d00d7d9a38 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/484.txt @@ -0,0 +1 @@ + we identify hidden layers inside a deep neural network (dnn) with group actions on the data domain, and formulate a formal deep network as a dual voice transform with respect to the koopman operator, a linear representation of the group action. based on the group theoretic arguments, particularly by using schur's lemma, we show a simple proof of the universality of dnns. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/485.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/485.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5f2c1fdae836d57ff840c81f7bf06719abfc982 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/485.txt @@ -0,0 +1 @@ + meta-learning frameworks for few-shot learning aims to learn models that can learn new skills or adapt to new environments rapidly with a few training examples. this has led to the generalizability of the developed model towards new classes with just a few labelled samples. however these networks are seen as black-box models and understanding the representations learnt under different learning scenarios is crucial. neural collapse (n c) is a recently discovered phenomenon which showcases unique properties at the network proceeds towards zero loss. the input features collapse to their respective class means, the class means form a simplex equiangular tight frame (etf) where the class means are maximally distant and linearly separable, and the classifier acts as a simple nearest neighbor classifier. while these phenomena have been observed in simple classification networks, this study is the first to explore and understand the properties of neural collapse in meta learning frameworks for few-shot learning. we perform studies on the omniglot dataset in the few-shot setting and study the neural collapse phenomenon. we observe that the learnt features indeed have the trend of neural collapse, especially as model size grows, but to do not necessarily showcase the complete collapse as measured by the n c properties.preprint. under review. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/486.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/486.txt new file mode 100644 index 0000000000000000000000000000000000000000..6ebabab402a8b6e76c95e0c73642ad2f663a5633 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/486.txt @@ -0,0 +1 @@ + fifth-generation (5g) mobile communication networks have recently emerged in various fields, including highspeed trains. however, the dense deployment of 5g millimeter wave (mmwave) base stations (bss) and the high speed of moving trains lead to frequent handovers (hos), which can adversely affect the quality-of-service (qos) of mobile users. as a result, ho optimization and resource allocation are essential considerations for managing mobility in high-speed train systems. in this paper, we model system performance of a high-speed train system with a novel machine learning (ml) approach that is nested cross validation scheme that prevents information leakage from model evaluation into the model parameter tuning, thereby avoiding overfitting and resulting in better generalization error. to this end, we employ ml methods for the high-speed train system scenario. handover margin (hom) and time-to-trigger (ttt) values are used as features, and several kpis are used as outputs, and several ml methods including gradient boosting regression (gbr), adaptive boosting (adaboost), catboost regression (cbr), artificial neural network (ann), kernel ridge regression (krr), support vector regression (svr), and k-nearest neighbor regression (knnr) are employed for the problem. finally, performance comparisons of the cross validation schemes with the methods are made in terms of mean absolute error (mae) and mean square error (mse) metrics are made. as per obtained results, boosting methods, abr, cbr, gbr, with nested cross validation scheme superiorly outperforms conventional cross validation scheme results with the same methods. on the other hand, svr, knrr, krr, ann with the nested scheme produce promising results for prediction of some kpis with respect to their conventional scheme employment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/487.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/487.txt new file mode 100644 index 0000000000000000000000000000000000000000..67aecbc4de37ae36c4fb76ec4e610d1c71bdec4c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/487.txt @@ -0,0 +1 @@ + the design and deployment of fifth-generation (5g) wireless networks pose significant challenges due to the increasing number of wireless devices. path loss has a landmark importance in network performance optimization, and accurate prediction of the path loss, which characterizes the attenuation of signal power during transmission, is critical for effective network planning, coverage estimation, and optimization. in this sense, we utilize machine learning (ml) methods, which overcome conventional path loss prediction models drawbacks, for path loss prediction in a 5g network system to facilitate more accurate network planning, resource optimization, and performance improvement in wireless communication systems. to this end, we utilize a novel approach, nested cross validation scheme, with ml to prevent overfitting, thereby getting better generalization error and stable results for ml deployment. first, we acquire a publicly available dataset obtained through a comprehensive measurement campaign conducted in an urban macro-cell scenario located in beijing, china. the dataset includes crucial information such as longitude, latitude, elevation, altitude, clutter height, and distance, which are utilized as essential features to predict the path loss in the 5g network system. we deploy support vector regression (svr), catboost regression (cbr), extreme gradient boosting regression (xgbr), artificial neural network (ann), and random forest (rf) methods to predict the path loss, and compare the prediction results in terms of mean absolute error (mae) and mean square error (mse). as per obtained results, xgbr outperforms the rest of the methods. it outperforms cbr with a slight performance differences by 0.4 % and 1 % in terms of mae and mse metrics, respectively. on the other hand, it outperforms the rest of the methods with clear performance differences. ultimately, the paper presents ml deployment with the novel approach from which network planning, resource optimization, and performance improvement in wireless communication systems will benefit as well according to the obtained results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/488.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/488.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c5d2719248b471ba2627c61247bc01c425bdd56 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/488.txt @@ -0,0 +1 @@ + models with similar performances exhibit significant disagreement in the predictions of individual samples, referred to as prediction churn. our work explores this phenomenon in graph neural networks by investigating differences between models differing only in their initializations in their utilized features for predictions. we propose a novel metric called influence difference (id) to quantify the variation in reasons used by nodes across models by comparing their influence distribution. additionally, we consider the differences between nodes with a stable and an unstable prediction, positing that both equally utilize different reasons and thus provide a meaningful gradient signal to closely match two models even when the predictions for nodes are similar. based on our analysis, we propose to minimize this id in knowledge distillation, a domain where a new model should closely match an established one. as an efficient approximation, we introduce dropdistillation (dd) that matches the output for a graph perturbed by edge deletions. our empirical evaluation of six benchmark datasets for node classification validates the differences in utilized features. dd outperforms previous methods regarding prediction stability and overall performance in all considered knowledge distillation experiments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/489.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/489.txt new file mode 100644 index 0000000000000000000000000000000000000000..184cd2f81a318a898dfb155819998b73fc22d0c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/489.txt @@ -0,0 +1 @@ + nowadays, many platforms on the web offer organized events, allowing users to be organizers or participants. for such platforms, it is beneficial to predict potential event participants. existing work on this problem tends to borrow recommendation techniques. however, compared to e-commerce items and purchases, events and participation are usually of a much smaller frequency, and the data may be insufficient to learn an accurate model. in this paper, we propose to utilize social media retweeting activity data to enhance the learning of event participant prediction models. we create a joint knowledge graph to bridge the social media and the target domain, assuming that event descriptions and tweets are written in the same language. furthermore, we propose a learning model that utilizes retweeting information for the target domain prediction more effectively. we conduct comprehensive experiments in two scenarios with real-world data. in each scenario, we set up training data of different sizes, as well as warm and cold test cases. the evaluation results show that our approach consistently outperforms several baseline models, especially with the warm test cases, and when target domain data is limited. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/49.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/49.txt new file mode 100644 index 0000000000000000000000000000000000000000..81392d808f8544782d285a6a9daf3a36aa1b2291 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/49.txt @@ -0,0 +1 @@ + neuro-symbolic ai attempts to integrate neural and symbolic architectures in a manner that addresses strengths and weaknesses of each, in a complementary fashion, in order to support robust strong ai capable of reasoning, learning, and cognitive modeling. in this paper we consider the intensional first order logic (ifol) as a symbolic architecture of modern robots, able to use natural languages to communicate with humans and to reason about their own knowledge with self-reference and abstraction language property. we intend to obtain the grounding of robot's language by experience of how it uses its neuronal architectures and hence by associating this experience with the mining (sense) of non-defined language concepts (particulars/individuals and universals) in prp (properties/relations/propositions) theory of ifol. we consider the robot's four-levels knowledge structure: the syntax level of particular natural language (italian, french, etc..), two universal language levels: its semantic logic structure (based on virtual predicates of fol and logic connectives), and its corresponding conceptual prp structure level which universally represents the composite mining of fol formulae grounded on the last robot's neuro-system level. finally, we provide the general method how to implement in ifol (by using the abstracted terms) different kinds of modal logic operators and their deductive axioms: we present a particular example of robots autoepistemic deduction capabilities by introduction of the special temporal konow predicate and deductive axioms for it: reflexive, positive introspection and distributive axiom. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/490.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/490.txt new file mode 100644 index 0000000000000000000000000000000000000000..d88970da6b0da61412d37cb0e5cdede635bf9e8c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/490.txt @@ -0,0 +1 @@ + we consider the task of identifying the copeland winner(s) in a dueling bandits problem with ternary feedback. this is an underexplored but practically relevant variant of the conventional dueling bandits problem, in which, in addition to strict preference between two arms, one may observe feedback in the form of an indifference. we provide a lower bound on the sample complexity for any learning algorithm finding the copeland winner(s) with a fixed error probability. moreover, we propose pocowista, an algorithm with a sample complexity that almost matches this lower bound, and which shows excellent empirical performance, even for the conventional dueling bandits problem. for the case where the preference probabilities satisfy a specific type of stochastic transitivity, we provide a refined version with an improved worst case sample complexity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/491.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/491.txt new file mode 100644 index 0000000000000000000000000000000000000000..887e26c87193962bab4fb93a26730a5ba39bb5cd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/491.txt @@ -0,0 +1 @@ + seminal research in the field of graph neural networks (gnns) has revealed a direct correspondence between the expressive capabilities of gnns and the kdimensional weisfeiler-leman (kwl) test, a widely-recognized method for verifying graph isomorphism. this connection has reignited interest in comprehending the specific graph properties effectively distinguishable by the kwl test. a central focus of research in this field revolves around determining the least dimensionality k, for which kwl can discern graphs with different number of occurrences of a pattern graph p . we refer to such a least k as the wl-dimension of this pattern counting problem. this inquiry traditionally delves into two distinct counting problems related to patterns: subgraph counting and induced subgraph counting. intriguingly, despite their initial appearance as separate challenges with seemingly divergent approaches, both of these problems are interconnected components of a more comprehensive problem: "graph motif parameters". in this paper, we provide a precise characterization of the wl-dimension of labeled graph motif parameters. as specific instances of this result, we obtain characterizations of the wl-dimension of the subgraph counting and induced subgraph counting problem for every labeled pattern p . particularly noteworthy is our resolution of a problem left open in previous work concerning induced copies. we additionally demonstrate that in cases where the kwl test distinguishes between graphs with varying occurrences of a pattern p , the exact number of occurrences of p can be computed uniformly using only local information of the last layer of a corresponding gnn. we finally delve into the challenge of recognizing the wldimension of various graph parameters. we give a polynomial time algorithm for determining the wl-dimension of the subgraph counting problem for given pattern p , answering an open question from previous work. we additionally show how to utilize deep results from the field of graph motif parameters, together with our characterization, to determine the wl-dimension of induced subgraph counting and counting k-graphlets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/492.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/492.txt new file mode 100644 index 0000000000000000000000000000000000000000..04b65388a491a17ef0c1a099397f5d7dad9ef82e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/492.txt @@ -0,0 +1 @@ + state estimation is the cornerstone of the power system control center, since it provides the operating condition of the system in consecutive time intervals. this work investigates the application of physics-informed neural networks (pinns) for accelerating power systems state estimation in monitoring the operation of power systems. traditional state estimation techniques often rely on iterative algorithms that can be computationally intensive, particularly for large-scale power systems. in this paper, a novel approach that leverages the inherent physical knowledge of power systems through the integration of pinns is proposed. by incorporating physical laws as prior knowledge, the proposed method significantly reduces the computational complexity associated with state estimation while maintaining high accuracy. the proposed method achieves up to 11% increase in accuracy, 75% reduction in standard deviation of results, and 30% faster convergence, as demonstrated by comprehensive experiments on the ieee 14-bus system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/493.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/493.txt new file mode 100644 index 0000000000000000000000000000000000000000..1684b0003207192cb86967fad083ea9c4179332f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/493.txt @@ -0,0 +1 @@ + the symmetry and geometry of input data are considered to be encoded in the internal data representation inside the neural network, but the specific encoding rule has been less investigated. in this study, we present a systematic method to induce a generalized neural network and its right inverse operator, called the ridgelet transform, from a joint group invariant function on the data-parameter domain. since the ridgelet transform is an inverse, (1) it can describe the arrangement of parameters for the network to represent a target function, which is understood as the encoding rule, and (2) it implies the universality of the network. based on the group representation theory, we present a new simple proof of the universality by using schur's lemma in a unified manner covering a wide class of networks, for example, the original ridgelet transform, formal deep networks, and the dual voice transform. since traditional universality theorems were demonstrated based on functional analysis, this study sheds light on the group theoretic aspect of the approximation theory, connecting geometric deep learning to abstract harmonic analysis. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/494.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/494.txt new file mode 100644 index 0000000000000000000000000000000000000000..8479bd8627ecb80cc68da8bcaffe02be4360b646 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/494.txt @@ -0,0 +1 @@ + existing approaches to algorithmic fairness aim to ensure equitable outcomes if human decision-makers comply perfectly with algorithmic decisions. however, perfect compliance with the algorithm is rarely a reality or even a desirable outcome in human-ai collaboration. yet, recent studies have shown that selective compliance with fair algorithms can amplify discrimination relative to the prior human policy. as a consequence, ensuring equitable outcomes requires fundamentally different algorithmic design principles that ensure robustness to the decision-maker's (a priori unknown) compliance pattern. we define the notion of compliance-robustly fair algorithmic recommendations that are guaranteed to (weakly) improve fairness in decisions, regardless of the human's compliance pattern. we propose a simple optimization strategy to identify the best performanceimproving compliance-robustly fair policy. however, we show that it may be infeasible to design algorithmic recommendations that are simultaneously fair in isolation, compliance-robustly fair, and more accurate than the human policy; thus, if our goal is to improve the equity and accuracy of human-ai collaboration, it may not be desirable to enforce traditional fairness constraints. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/495.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/495.txt new file mode 100644 index 0000000000000000000000000000000000000000..6812de355fc3c9a6ecfb01e5e0131c863ed1fd4b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/495.txt @@ -0,0 +1 @@ + detecting and discovering new gene interactions based on known gene expressions and gene interaction data presents a significant challenge. various statistical and deep learning methods have attempted to tackle this challenge by leveraging the topological structure of gene interactions and gene expression patterns to predict novel gene interactions. in contrast, some approaches have focused exclusively on utilizing gene expression profiles. in this context, we introduce gener, a parallel-layer deep learning network designed exclusively for the identification of gene-gene relationships using gene expression data. we conducted two training experiments and compared the performance of our network with that of existing statistical and deep learning approaches. notably, our model achieved an average auroc score of 0.834 on the combined biogrid&dream5 dataset, outperforming competing methods in predicting gene-gene interactions. we are pleased to make gener publicly accessible on github under the gnu general public license. you can download it from the following github repository: (https://github.com/ahmedfakhry47/gener). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/496.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/496.txt new file mode 100644 index 0000000000000000000000000000000000000000..c8d0000b5b54b117b51a25b9ef4a043b9ef4f2bb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/496.txt @@ -0,0 +1 @@ + a key challenge, supported both theoretically and empirically, is that robustness demands greater network capacity and more data than standard training. however, effectively adding capacity under stringent lipschitz constraints has proven more difficult than it may seem, evident by the fact that state-of-the-art approach tend more towards underfitting than overfitting. moreover, we posit that a lack of careful exploration of the design space for lipshitz-based approaches has left potential performance gains on the table. in this work, we provide a more comprehensive evaluation to better uncover the potential of lipschitz-based certification methods. using a combination of novel techniques, design optimizations, and synthesis of prior work, we are able to significantly improve the state-of-the-art verified robust accuracy (vra) for deterministic certification on a variety of benchmark datasets, and over a range of perturbation sizes. of particular note, we discover that the addition of large "cholesky-orthogonalized residual dense" layers to the end of existing state-of-the-art lipschitz-controlled resnet architectures is especially effective for increasing network capacity and performance. combined with filtered generative data augmentation, our final results further the state of the art deterministic vra by up to 8.5 percentage points. code is available at https://github.com/hukkai/liresnet. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/497.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/497.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b1fe69060b04ef41d72f45b80bda2f11b6b3e40 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/497.txt @@ -0,0 +1 @@ + graph-level anomaly detection has gained significant attention as it finds applications in various domains, such as cancer diagnosis and enzyme prediction. however, existing methods fail to capture the spectral properties of graph anomalies, resulting in unexplainable framework design and unsatisfying performance. in this paper, we re-investigate the spectral differences between anomalous and normal graphs. our main observation shows a significant disparity in the accumulated spectral energy between these two classes. moreover, we prove that the accumulated spectral energy of the graph signal can be represented by its rayleigh quotient, indicating that the rayleigh quotient is a driving factor behind the anomalous properties of graphs. motivated by this, we propose rayleigh quotient graph neural network (rqgnn), the first spectral gnn that explores the inherent spectral features of anomalous graphs for graph-level anomaly detection. specifically, we introduce a novel framework with two components: the rayleigh quotient learning component (rql) and chebyshev wavelet gnn with rq-pooling (cwgnn-rq). rql explicitly captures the rayleigh quotient of graphs and cwgnn-rq implicitly explores the spectral space of graphs. extensive experiments on 10 real-world datasets show that rqgnn outperforms the best rival by 6.74% in macro-f1 score and 1.44% in auc, demonstrating the effectiveness of our framework. our code is available at https://github.com/xydong127/rqgnn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/498.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/498.txt new file mode 100644 index 0000000000000000000000000000000000000000..7412b81edb832543a67aecca6f24b6580d54d9d5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/498.txt @@ -0,0 +1 @@ + the use of credit cards has recently increased, creating an essential need for credit card assessment methods to minimize potential risks. this study investigates the utilization of machine learning (ml) models for credit card default prediction system. the main goal here is to investigate the best-performing ml model for new proposed credit card scoring dataset. this new dataset includes credit card transaction histories and customer profiles, has been proposed and tested using a variety of machine learning algorithms, including logistic regression, decision trees, random forests, multi layer perceptron (mlp) neural network, xgboost, and lightgbm. to prepare the data for machine learning models, we employ different data preprocessing techniques such as feature extraction, handling missing values, managing outliers, and applying data balancing methods. experimental results demonstrate that mlp outperforms logistic regression, decision trees, random forests, lightgbm, and xgboost in terms of predictive performance in true positive rate, achieving an impressive area under the curve (auc) of 86.7% and an accuracy rate of 91.6%, with a recall rate exceeding 80%. these results indicate the superiority of mlp in predicting the default customers and assessing the potential risks. furthermore, they help banks and other financial institutions in predicting loan defaults at an earlier stage. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/499.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/499.txt new file mode 100644 index 0000000000000000000000000000000000000000..100e06d71f5bb8acfad57818c18809014607d833 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/499.txt @@ -0,0 +1 @@ + we investigate a novel modeling approach for end-toend neural network training using hidden markov models (hmm) where the transition probabilities between hidden states are modeled and learned explicitly. most contemporary sequence-to-sequence models allow for from-scratch training by summing over all possible label segmentations in a given topology. in our approach there are explicit, learnable probabilities for transitions between segments as opposed to a blank label that implicitly encodes duration statistics.we implement a gpu-based forward-backward algorithm that enables the simultaneous training of label and transition probabilities.we investigate recognition results and additionally viterbi alignments of our models. we find that while the transition model training does not improve recognition performance, it has a positive impact on the alignment quality. the generated alignments are shown to be viable targets in state-of-the-art viterbi trainings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/5.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/5.txt new file mode 100644 index 0000000000000000000000000000000000000000..68467eea0c1b00c000dbdbf77da20ae1d2dfe2f8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/5.txt @@ -0,0 +1 @@ + several policy options exist, or have been proposed, to further responsible artificial intelligence (ai) development and deployment. institutions, including u.s. government agencies, states, professional societies, and private and public sector businesses, are well positioned to implement these policies. however, given limited resources, not all policies can or should be equally prioritized. we define and review nine suggested policies for furthering responsible ai, rank each policy on potential use and impact, and recommend prioritization relative to each institution type. we find that pre-deployment audits and assessments and post-deployment accountability are likely to have the highest impact but also the highest barriers to adoption. we recommend that u.s. government agencies and companies highly prioritize development of pre-deployment audits and assessments, while the u.s. national legislature should highly prioritize postdeployment accountability. we suggest that u.s. government agencies and professional societies should highly prioritize policies that support responsible ai research and that states should highly prioritize support of responsible ai education. we propose that companies can highly prioritize involving community stakeholders in development efforts and supporting diversity in ai development. we advise lower levels of prioritization across institutions for ai ethics statements and databases of ai technologies or incidents. we recognize that no one policy will lead to responsible ai and instead advocate for strategic policy implementation across institutions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/50.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/50.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1cd608b7a40e586fecb11f673837778ae1f3ba8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/50.txt @@ -0,0 +1 @@ + this paper presents the crowd score, a novel method to assess the funniness of jokes using large language models (llms) as ai judges. our method relies on inducing different personalities into the llm and aggregating the votes of the ai judges into a single score to rate jokes. we validate the votes using an auditing technique that checks if the explanation for a particular vote is reasonable using the llm. we tested our methodology on 52 jokes in a crowd of four ai voters with different humour types: affiliative, self-enhancing, aggressive and self-defeating. our results show that few-shot prompting leads to better results than zero-shot for the voting question. personality induction showed that aggressive and self-defeating voters are significantly more inclined to find more jokes funny of a set of aggressive/self-defeating jokes than the affiliative and self-enhancing voters. the crowd score follows the same trend as human judges by assigning higher scores to jokes that are also considered funnier by human judges. we believe that our methodology could be applied to other creative domains such as story, poetry, slogans, etc. it could both help the adoption of a flexible and accurate standard approach to compare different work in the cc community under a common metric and by minimizing human participation in assessing creative artefacts, it could accelerate the prototyping of creative artefacts and reduce the cost of hiring human participants to rate creative artefacts. 1 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/500.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/500.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c0bde226fc391244cc713610f78b93e5b7188d2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/500.txt @@ -0,0 +1 @@ + the identification of 'prakriti' types for the human body is a long-lost medical practice in finding the harmony between the nature of human beings and their behaviour. there are 3 fundamental 'prakriti' types of individuals. a person can belong to any 'dosha'. in the existing models, researchers have made use of svm, knn, pca, decision tree, and various other algorithms. the output of these algorithms was quite decent, but it can be enhanced with the help of multinomial naïve bayes and k-modes clustering. most of the researchers have confined themselves to 3 basic classes. this might not be accurate in the realworld scenario, where overlapping might occur. considering these, we have classified the 'doshas' into 7 categories, which includes overlapping of 'doshas'. these are namely, 'vatt-dosha', 'pitt-dosha', 'kaph-dosha', 'vatt-pitt-dosha', 'pitt-kaph-dosha', 'kaph-vatt-dosha', and 'vatt-pitt-kaph-dosha'. the data used contains a balanced set of all individual entries on which preprocessing steps of machine learning have been performed. chi-square test for handling categorical data is being used for feature selection. for model fitting, the method used in this approach is k-modes clustering. the empirical results demonstrate a better result while using the mnb classifier. all key findings of this work have achieved 0.90 accuracy, 0.81 precision, 0.91 f-score, and 0.90 recall. the discussion suggests a provident analysis of the seven clusters and predicts their occurrence. the results have been consolidated to improve the ayurvedic advancements with machine learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/501.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/501.txt new file mode 100644 index 0000000000000000000000000000000000000000..d1f26a4d452479392b09d1e339b4eab305440ff2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/501.txt @@ -0,0 +1 @@ + the burgeoning complexity of contemporary deep learning models, while achieving unparalleled accuracy, has inadvertently introduced deployment challenges in resource-constrained environments. knowledge distillation, a technique aiming to transfer knowledge from a high-capacity "teacher" model to a streamlined "student" model, emerges as a promising solution to this dilemma. this paper provides a comprehensive overview of the knowledge distillation paradigm, emphasizing its foundational principles such as the utility of soft labels and the significance of temperature scaling. through meticulous examination, we elucidate the critical determinants of successful distillation, including the architecture of the student model, the caliber of the teacher, and the delicate balance of hyperparameters. while acknowledging its profound advantages, we also delve into the complexities and challenges inherent in the process. our exploration underscores knowledge distillation's potential as a pivotal technique in optimizing the trade-off between model performance and deployment efficiency. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/502.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/502.txt new file mode 100644 index 0000000000000000000000000000000000000000..50e798b44e7990496877d89ae3715b1d4d30c4c0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/502.txt @@ -0,0 +1 @@ + as large language models (llms) become more capable, there is an urgent need for interpretable and transparent tools. current methods are difficult to implement, and accessible tools to analyze model internals are lacking. to bridge this gap, we present deepdecipher -an api and interface for probing neurons in transformer models' mlp layers. deepdecipher makes the outputs of advanced interpretability techniques for llms readily available. the easy-to-use interface also makes inspecting these complex models more intuitive. this paper outlines deepdecipher's design and capabilities. we demonstrate how to analyze neurons, compare models, and gain insights into model behavior. for example, we contrast deepdecipher's functionality with similar tools like neuroscope and openai's neuron explainer. deepdecipher enables efficient, scalable analysis of llms. by granting access to state-of-the-art interpretability methods, deepdecipher makes llms more transparent, trustworthy, and safe. researchers, engineers, and developers can quickly diagnose issues, audit systems, and advance the field. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/503.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/503.txt new file mode 100644 index 0000000000000000000000000000000000000000..66de0d6d173d48c42f8d42b7d46268d97f6d90c8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/503.txt @@ -0,0 +1 @@ + nowcasting day-ahead marginal emissions factors is increasingly important for power systems with high flexibility and penetration of distributed energy resources. with a significant share of firm generation from natural gas and coal power plants, forecasting day-ahead emissions in the current energy system has been widely studied. in contrast, as we shift to an energy system characterized by flexible power markets, dispatchable sources, and competing low-cost generation such as large-scale battery or hydrogen storage, system operators will be able to choose from a mix of different generation as well as emission pathways. to fully develop the emissions implications of a given dispatch schedule, we need a near real-time workflow with two layers. the first layer is a market model that continuously solves a security-constrained economic dispatch model. the second layer determines the marginal emissions based on the output of the market model, which is the subject of this paper. we propose using multi-headed convolutional neural networks to generate day-ahead forecasts of marginal and average emissions for a given independent system operator. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/504.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/504.txt new file mode 100644 index 0000000000000000000000000000000000000000..8313a3178de49b10b7a404b557f07bda30324f3f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/504.txt @@ -0,0 +1 @@ + transformer models have revolutionized natural language processing with their unparalleled ability to grasp complex contextual relationships. however, the vast number of parameters in these models has raised concerns regarding computational efficiency, environmental impact, and deployability on resource-limited platforms. to address these challenges, this paper investigates the application of weight pruning-a strategic reduction of model parameters based on their significance-as an optimization strategy for transformer architectures. through extensive experimentation, we explore various pruning methodologies, highlighting their impact on model performance, size, and computational demands. our findings suggest that with judicious selection of pruning hyperparameters, significant reductions in model size are attainable without considerable compromise on performance. moreover, when coupled with post-pruning fine-tuning strategies, some pruned models even exhibit enhanced generalization capabilities. this work seeks to bridge the gap between model efficiency and performance, paving the way for more scalable and environmentally responsible deep learning applications.preprint. under review. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/505.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/505.txt new file mode 100644 index 0000000000000000000000000000000000000000..8bfcc85d329cd97d62e4bcd81b0cef6bee6270c0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/505.txt @@ -0,0 +1 @@ + streaming neural network models for fast frame-wise responses to various speech and sensory signals are widely adopted on resource-constrained platforms. hence, increasing the learning capacity of such streaming models (i.e., by adding more parameters) to improve the predictive power may not be viable for real-world tasks. in this work, we propose a new loss, streaming anchor loss (sal), to better utilize the given learning capacity by encouraging the model to learn more from essential frames. more specifically, our sal and its focal variations dynamically modulate the frame-wise cross entropy loss based on the importance of the corresponding frames so that a higher loss penalty is assigned for frames within the temporal proximity of semantically critical events. therefore, our loss ensures that the model training focuses on predicting the relatively rare but task-relevant frames. experimental results with standard lightweight convolutional and recurrent streaming networks on three different speech based detection tasks demonstrate that sal enables the model to learn the overall task more effectively with improved accuracy and latency, without any additional data, model parameters, or architectural changes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/506.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/506.txt new file mode 100644 index 0000000000000000000000000000000000000000..69c82f2e5a956415c910c6db6aeb057d8b8572b7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/506.txt @@ -0,0 +1 @@ + this paper introduces a lightweight gesture recognition system based on 60 ghz frequency modulated continuous wave (fmcw) radar. we show that gestures can be characterized efficiently by a set of five features, and propose a slim radar processing algorithm to extract these features. in contrast to previous approaches, we avoid heavy 2d processing, i.e. range-doppler imaging, and perform instead an early target detection -this allows us to port the system to fully embedded platforms with tight constraints on memory, compute and power consumption. a recurrent neural network (rnn) based architecture exploits these features to jointly detect and classify five different gestures. the proposed system recognizes gestures with an f1 score of 98.4% on our hold-out test dataset, it runs on an arm® cortex®-m4 microcontroller requiring less than 280 kb of flash memory, 120 kb of ram, and consuming 75 mw of power. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/507.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/507.txt new file mode 100644 index 0000000000000000000000000000000000000000..152dc0e03734c7bd1bd744ab17b78626ac198bb1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/507.txt @@ -0,0 +1 @@ + symbolic rule learners generate interpretable solutions, however they require the input to be encoded symbolically. neuro-symbolic approaches overcome this issue by mapping raw data to latent symbolic concepts using a neural network. training the neural and symbolic components jointly is difficult, due to slow and unstable learning, hence many existing systems rely on hand-engineered rules to train the network. we introduce neuralfastlas, a scalable and fast end-to-end approach that trains a neural network jointly with a symbolic learner. for a given task, neuralfastlas computes a relevant set of rules, proved to contain an optimal symbolic solution, trains a neural network using these rules, and finally finds an optimal symbolic solution to the task while taking network predictions into account. a key novelty of our approach is learning a posterior distribution on rules while training the neural network to improve stability during training. we provide theoretical results for a sufficient condition on network training to guarantee correctness of the final solution. experimental results demonstrate that neuralfastlas is able to achieve stateof-the-art accuracy in arithmetic and logical tasks, with a training time that is up to two orders of magnitude faster than other jointly trained neurosymbolic methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/508.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/508.txt new file mode 100644 index 0000000000000000000000000000000000000000..7ed9c523d64757b14c8ab0bdf33a1f58991e5df3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/508.txt @@ -0,0 +1 @@ + freezing of gait is a parkinson's disease symptom that episodically inflicts a patient with the inability to step or turn while walking. while medical experts have discovered various triggers and alleviating actions for freezing of gait, the underlying causes and prediction models are still being explored today. current freezing of gait prediction models that utilize machine learning achieve high sensitivity and specificity in freezing of gait predictions based on time-series data; however, these models lack specifications on the type of freezing of gait events. we develop various deep learning models using the transformer encoder architecture plus bidirectional lstm layers and different feature sets to predict the three different types of freezing of gait events. the best performing model achieves a score of 0.427 on testing data, which would rank top 5 in kaggle's freezing of gait prediction competition, hosted by the michael j. fox foundation. however, we also recognize overfitting in training data that could be potentially improved through pseudo labelling on additional data and model architecture simplification. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/509.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/509.txt new file mode 100644 index 0000000000000000000000000000000000000000..d38a0fdf7baa5314ed28b73998265cc895546ce3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/509.txt @@ -0,0 +1 @@ + public and private actors struggle to assess the vast amounts of information about sustainability commitments made by various institutions.to address this problem, we create a novel tool for automatically detecting corporate, national, and regional net zero and reduction targets in three steps. first, we introduce an expertannotated data set with 3.5k text samples. second, we train and release climatebert-netzero, a natural language classifier to detect whether a text contains a net zero or reduction target. third, we showcase its analysis potential with two use cases: we first demonstrate how climatebert-netzero can be combined with conventional question-answering (q&a) models to analyze the ambitions displayed in net zero and reduction targets. furthermore, we employ the climatebert-netzero model on quarterly earning call transcripts and outline how communication patterns evolve over time.our experiments demonstrate promising pathways for extracting and analyzing net zero and emission reduction targets at scale. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/51.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/51.txt new file mode 100644 index 0000000000000000000000000000000000000000..c80f0512febc6691cc1a19a394ae531470a2b658 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/51.txt @@ -0,0 +1 @@ + candidate axiom scoring is the task of assessing the acceptability of a candidate axiom against the evidence provided by known facts or data. the ability to score candidate axioms reliably is required for automated schema or ontology induction, but it can also be valuable for ontology and/or knowledge graph validation. accurate axiom scoring heuristics are often computationally expensive, which is an issue if you wish to use them in iterative search techniques like level-wise generate-and-test or evolutionary algorithms, which require scoring a large number of candidate axioms. we address the problem of developing a predictive model as a substitute for reasoning that predicts the possibility score of candidate class axioms and is quick enough to be employed in such situations. we use a semantic similarity measure taken from an ontology's subsumption structure for this purpose. we show that the approach provided in this work can accurately learn the possibility scores of candidate owl class axioms and that it can do so for a variety of owl class axioms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/510.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/510.txt new file mode 100644 index 0000000000000000000000000000000000000000..0c159f18d8eca4ff5aa9032b7760c3700c088a6d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/510.txt @@ -0,0 +1 @@ + regarding the rising number of people suffering from mental health illnesses in today's society, the importance of mental health cannot be overstated. wearable sensors, which are increasingly widely available, provide a potential way to track and comprehend mental health issues. these gadgets not only monitor everyday activities but also continuously record vital signs like heart rate, perhaps providing information on a person's mental state. recent research has used these sensors in conjunction with machine learning methods to identify patterns relating to different mental health conditions, highlighting the immense potential of this data beyond simple activity monitoring. in this research, we present a novel algorithm called the hybrid random forest -neural network that has been tailored to evaluate sensor data from depressed patients. our method has a noteworthy accuracy of 80% when evaluated on a special dataset that included both unipolar and bipolar depressive patients as well as healthy controls. the findings highlight the algorithm's potential for reliably determining a person's depression condition using sensor data, making a substantial contribution to the area of mental health diagnostics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/511.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/511.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b8921103a78ade04a06877494d24a4d9010f861 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/511.txt @@ -0,0 +1 @@ + evaluating the adversarial robustness of machine-learning models using gradient-based attacks is challenging. in this work, we show that hyperparameter optimization can improve fast minimum-norm attacks by automating the selection of the loss function, the optimizer, and the step-size scheduler, along with the corresponding hyperparameters. our extensive evaluation involving several robust models demonstrates the improved efficacy of fast minimum-norm attacks when hyped up with hyperparameter optimization. we release our open-source code at https://github.com/pralab/ho-fmn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/512.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/512.txt new file mode 100644 index 0000000000000000000000000000000000000000..8e3b4fff4223fa28c4aebf1adb91d798ab4434e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/512.txt @@ -0,0 +1 @@ + divorce is one of the most common social issues in developed countries like in the united states. almost 50% of the recent marriages turn into an involuntary divorce or separation. while it is evident that people vary to a different extent, and even over time, an incident like divorce does not interrupt the individual's daily activities; still, divorce has a severe effect on the individual's mental health, and personal life. within the scope of this research, the divorce prediction was carried out by evaluating a dataset named by the 'divorce predictor dataset' to correctly classify between married and divorce people using six different machine learning algorithms-logistic regression (lr), linear discriminant analysis (lda), k-nearest neighbors (knn), classification and regression trees (cart), gaussian naïve bayes (nb), and, support vector machines (svm). preliminary computational results show that algorithms such as svm, knn, and lda, can perform that task with an accuracy of 98.57%. this work's additional novel contribution is the detailed and comprehensive explanation of prediction probabilities using local interpretable model-agnostic explanations (lime). utilizing lime to analyze test results illustrates the possibility of differentiating between divorced and married couples. finally, we have developed a divorce predictor app considering ten most important features that potentially affect couples in making decisions in their divorce, such tools can be used by any one in order to identify their relationship condition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/513.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/513.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1216bf742191417d34984d73f57737005e4bc8f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/513.txt @@ -0,0 +1 @@ + imbalanced classification is a well-known challenge faced by many real-world applications. this issue occurs when the distribution of the target variable is skewed, leading to a prediction bias toward the majority class. with the arrival of the big data era, there is a pressing need for efficient solutions to solve this problem. in this work, we present a novel resampling method called smotenn that combines intelligent undersampling and oversampling using a mapreduce framework. both procedures are performed on the same pass over the data, conferring efficiency to the technique. the smotenn method is complemented with an efficient implementation of the neighborhoods related to the minority samples. our experimental results show the virtues of this approach, outperforming alternative resampling techniques for small-and medium-sized datasets while achieving positive results on large datasets with reduced running times. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/514.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/514.txt new file mode 100644 index 0000000000000000000000000000000000000000..25fd0b49ae5f40dab1f07002043cd7e8b21dca4e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/514.txt @@ -0,0 +1 @@ + the covid-19 pandemic has accentuated socioeconomic disparities across various racial and ethnic groups in the united states. while previous studies have utilized traditional survey methods like the household pulse survey (hps) to elucidate these disparities, this paper explores the role of social media platforms in both highlighting and addressing these challenges. drawing from real-time data sourced from twitter, we analyzed language patterns related to four major types of adverse experiences: loss of employment income (li), food scarcity (fs), housing insecurity (hi), and unmet needs for mental health services (um). we first formulate a sparsity optimization problem that extracts low-level language features from social media data sources. second, we propose novel constraints on feature similarity exploiting prior knowledge about the similarity of the language patterns among the adverse experiences. the proposed problem is challenging to solve due to the non-convexity objective and non-smoothness penalties. we develop an algorithm based on the alternating direction method of multipliers (admm) framework to solve the proposed formulation. extensive experiments and comparisons to other models on real-world social media and the detection of adverse experiences justify the efficacy of our model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/515.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/515.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6e6ca2445028712609aa9e2f2e126c5670b2592 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/515.txt @@ -0,0 +1 @@ + in this paper we give several applications of littlestone dimension. the first is to the model of angluin and dohrn (2017), where we extend their results for learning by equivalence queries with random counterexamples. second, we extend that model to infinite concept classes with an additional source of randomness. third, we give improved results on the relationship of littlestone dimension to classes with extended d-compression schemes, proving a strong version of a conjecture of floyd and warmuth (1995) for littlestone dimension. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/516.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/516.txt new file mode 100644 index 0000000000000000000000000000000000000000..79d84f6f65121e6c798efd48cce708ee10cbdc9a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/516.txt @@ -0,0 +1 @@ + a single event upset (seu) is a critical soft error that occurs in semiconductor devices on exposure to ionising particles from space environments. seus cause bit flips in the memory component of semiconductors. this creates a multitude of safety hazards as stored information becomes less reliable. currently, seus are only detected several hours after their occurrence. cremer-the model presented in this paperpredicts seus in advance using machine learning. cremer uses only positional data to predict seu occurrence-making it robust, inexpensive and scalable. upon implementation, the improved reliability of memory devices will create a digitally safer environment onboard space vehicles. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/517.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/517.txt new file mode 100644 index 0000000000000000000000000000000000000000..bffdb47398e076a05a0b3ef404b6865a27794a60 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/517.txt @@ -0,0 +1 @@ + we consider online reinforcement learning (rl) in episodic markov decision processes (mdps) under the linear -realizability assumption, where it is assumed that the action-values of all policies can be expressed as linear functions of stateaction features. this class is known to be more general than linear mdps, where the transition kernel and the reward function are assumed to be linear functions of the feature vectors. as our first contribution, we show that the difference between the two classes is the presence of states in linearly -realizable mdps where for any policy, all the actions have approximately equal values, and skipping over these states by following an arbitrarily fixed policy in those states transforms the problem to a linear mdp. based on this observation, we derive a novel (computationally inefficient) learning algorithm for linearly -realizable mdps that simultaneously learns what states should be skipped over and runs another learning algorithm on the linear mdp hidden in the problem. the method returns an -optimal policy after polylog( , )/ 2 interactions with the mdp, where is the time horizon and is the dimension of the feature vectors, giving the first polynomial-sample-complexity online rl algorithm for this setting. the results are proved for the misspecified case, where the sample complexity is shown to degrade gracefully with the misspecification error.any ∈ , p , , is the distribution over the histories when first action is used in state , after which policy is followed. e• is the expectation operator corresponding to a distribution p• (e.g., e , is the expectation with respect to p , ). the state-and action-value functions and are defined as the expected total reward within the first episode while is used:let ★ ∈ π be an optimal policy, satisfying ★ ( , ) = sup ∈π ( , ) = sup ∈all policies ( , )for all ( , ) ∈ s × . let ★ ( , ) = ★ ( , ) and ★ ( ) = sup ′ ∈ ★ ( , ) for all ( , ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/518.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/518.txt new file mode 100644 index 0000000000000000000000000000000000000000..4c386c64b3324ce3d158cf7bf7d6117ccd656d19 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/518.txt @@ -0,0 +1 @@ + we apply the discrepancy method and a chaining approach to give improved bounds on the coreset complexity of a wide class of kernel functions. our results give randomized polynomial time algorithms to produce coresetslog log 1 ε for the gaussian and laplacian kernels in the case that the data set is uniformly bounded, an improvement that was not possible with previous techniques. we also obtain coresets of size o 1 ε log log 1 ε for the laplacian kernel for d constant. finally, we give the best known bounds of o √ d ε log(2 max{1, α}) on the coreset complexity of the exponential, hellinger, and js kernels, where 1/α is the bandwidth parameter of the kernel. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/519.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/519.txt new file mode 100644 index 0000000000000000000000000000000000000000..7020890047caf70bb36e7fd8ac01a65e68d7556c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/519.txt @@ -0,0 +1 @@ + in this ambitious paper, we present a groundbreaking paradigm for human-computer interaction that revolutionizes the traditional notion of an operating system. within this innovative framework, user requests issued to the machine are handled by an interconnected ecosystem of generative ai models that seamlessly integrate with or even replace traditional software applications. at the core of this paradigm shift are large generative models, such as language and diffusion models, which serve as the central interface between users and computers. this pioneering approach leverages the abilities of advanced language models, empowering users to engage in natural language conversations with their computing devices.by capitalizing on the power of language models, users can articulate their intentions, tasks, and inquiries directly to the system, eliminating the need for explicit commands or complex navigation. the language model comprehends and interprets the user's prompts, generating and displaying contextual and meaningful responses that facilitate seamless and intuitive interactions.this paradigm shift not only streamlines user interactions but also opens up new possibilities for personalized experiences. generative models can adapt to individual preferences, learning from user input and continuously improving their understanding and response generation. furthermore, it enables enhanced accessibility, as users can interact with the system using speech or text, accommodating diverse communication preferences.however, this visionary concept also raises significant challenges, including privacy, security, trustability, and the ethical use of generative models. robust safeguards must be in place to protect user data and prevent potential misuse or manipulation of the language model.while the full realization of this paradigm is still far from being achieved, this paper serves as a starting point for envisioning the transformative potential of a human-computer interaction paradigm centered around artificial intelligence. we discuss the envisioned benefits, challenges, and implications, paving the way for future research and development in this exciting and promising direction.index terms-ai generative models for operating systems, ai generative models for human-computer interaction, ai generative models as universal applications \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/52.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/52.txt new file mode 100644 index 0000000000000000000000000000000000000000..78b6c0168f593f0d467905e87975ef7afbf873df --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/52.txt @@ -0,0 +1 @@ + integrated information theory (iit) is a theoretical framework that provides a quantitative measure to estimate when a physical system is conscious, its degree of consciousness, and the complexity of the qualia space that the system is experiencing. formally, iit rests on the assumption that if a surrogate physical system can fully embed the phenomenological properties of consciousness, then the system properties must be constrained by the properties of the qualia being experienced. following this assumption, iit represents the physical system as a network of interconnected elements that can be thought of as a probabilistic causal graph, g, where each node has an input-output function and all the graph is encoded in a transition probability matrix. consequently, iit's quantitative measure of consciousness, φ, is computed with respect to the transition probability matrix and the present state of the graph. in this paper, we provide a random search algorithm that is able to optimize φ in order to investigate, as the number of nodes increases, the structure of the graphs that have higher φ. we also provide arguments that show the difficulties of applying more complex black-box search algorithms, such as bayesian optimization or metaheuristics, in this particular problem. additionally, we suggest specific research lines for these techniques to enhance the search algorithm that guarantees maximal φ. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/520.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/520.txt new file mode 100644 index 0000000000000000000000000000000000000000..002cd254a232ac60442b9bd0421d0ccec9086451 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/520.txt @@ -0,0 +1 @@ + we revisit the reinforce policy gradient algorithm from the literature. note that this algorithm typically works with cost returns obtained over random length episodes obtained from either termination upon reaching a goal state (as with episodic tasks) or from instants of visit to a prescribed recurrent state (in the case of continuing tasks). we propose a major enhancement to the basic algorithm. we estimate the policy gradient using a function measurement over a perturbed parameter by appealing to a class of random search approaches. this has advantages in the case of systems with infinite state and action spaces as it relax some of the regularity requirements that would otherwise be needed for proving convergence of the reinforce algorithm. nonetheless, we observe that even though we estimate the gradient of the performance objective using the performance objective itself (and not via the sample gradient), the algorithm converges to a neighborhood of a local minimum. we also provide a proof of convergence for this new algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/521.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/521.txt new file mode 100644 index 0000000000000000000000000000000000000000..43b6b268afcc47574e94d30159f50d7306f3b784 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/521.txt @@ -0,0 +1 @@ + neural models based on hypercomplex algebra systems are growing and prolificating for a plethora of applications, ranging from computer vision to natural language processing. hand in hand with their adoption, parameterized hypercomplex neural networks (phnns) are growing in size and no techniques have been adopted so far to control their convergence at a large scale. in this paper, we study phnns convergence and propose parameterized hypercomplex identity initialization (phydi), a method to improve their convergence at different scales, leading to more robust performance when the number of layers scales up, while also reaching the same performance with fewer iterations. we show the effectiveness of this approach in different benchmarks and with common phnns with resnets-and transformer-based architecture. the code is available at https://github.com/ispamm/phydi. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/522.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/522.txt new file mode 100644 index 0000000000000000000000000000000000000000..990ef9d427f1bc01afed7079c905970cb59c9f93 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/522.txt @@ -0,0 +1 @@ + artificial intelligence systems are prevalent in everyday life, with use cases in retail, manufacturing, health, and many other fields. with the rise in ai adoption, associated risks have been identified, including privacy risks to the people whose data was used to train models. assessing the privacy risks of machine learning models is crucial to enabling knowledgeable decisions on whether to use, deploy, or share a model. a common approach to privacy risk assessment is to run one or more known attacks against the model and measure their success rate. we present a novel framework for running membership inference attacks against classification models. our framework takes advantage of the ensemble method, generating many specialized attack models for different subsets of the data. we show that this approach achieves higher accuracy than either a single attack model or an attack model per class label, both on classical and language classification tasks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/523.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/523.txt new file mode 100644 index 0000000000000000000000000000000000000000..a2caacf84b3357f89a76c4e1350e80b6c64844b5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/523.txt @@ -0,0 +1 @@ + climate change and its impact on global sustainability are critical challenges, demanding innovative solutions that combine cutting-edge technologies and scientific insights. quantum machine learning (qml) has emerged as a promising paradigm that harnesses the power of quantum computing to address complex problems in various domains including climate change and sustainability. in this work, we survey existing literature that applies quantum machine learning to solve climate change and sustainability-related problems. we review promising qml methodologies that have the potential to accelerate decarbonization including energy systems, climate data forecasting, climate monitoring, and hazardous events predictions. we discuss the challenges and current limitations of quantum machine learning approaches and provide an overview of potential opportunities and future work to leverage qml-based methods in the important area of climate change research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/524.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/524.txt new file mode 100644 index 0000000000000000000000000000000000000000..1da9f2b14dfcd4ddc0a9833363a4ce82e6e3f539 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/524.txt @@ -0,0 +1 @@ + unlike primates, training artificial neural networks (anns) on changing data distributions leads to a rapid decrease in performance on old tasks. this phenomenon is commonly referred to as catastrophic forgetting. in this paper, we investigate the representational changes that underlie this performance decrease and identify three distinct processes that together account for the phenomenon. the largest component is a misalignment between hidden representations and readout layers. misalignment occurs due to learning on additional tasks and causes internal representations to shift. representational geometry is partially conserved under this misalignment and only a small part of the information is irrecoverably lost. all types of representational changes scale with the dimensionality of hidden representations. these insights have implications for deep learning applications that need to be continuously updated, but may also aid aligning ann models to the rather robust biological vision. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/525.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/525.txt new file mode 100644 index 0000000000000000000000000000000000000000..b5a59facfde9fcec6b384564a36b1a3172e32a20 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/525.txt @@ -0,0 +1 @@ + ensemble learning leverages multiple models (i.e., weak learners) on a common machine learning task to enhance prediction performance. basic ensembling approaches average the weak learners outputs, while more sophisticated ones stack a machine learning model in between the weak learners outputs and the final prediction. this work fuses both aforementioned frameworks. we introduce an aggregated f -average (afa) shallow neural network which models and combines different types of averages to perform an optimal aggregation of the weak learners predictions. we emphasise its interpretable architecture and simple training strategy, and illustrate its good performance on the problem of few-shot class incremental learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/526.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/526.txt new file mode 100644 index 0000000000000000000000000000000000000000..13ae7ab2bb001c4e5cb3bd304e71f2f9f6a2b8e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/526.txt @@ -0,0 +1 @@ + regularization is one of the most important techniques in reinforcement learning algorithms. the well-known soft actor-critic algorithm is a special case of regularized policy iteration where the regularizer is chosen as shannon entropy. despite some empirical success of regularized policy iteration, its theoretical underpinnings remain unclear. this paper proves that regularized policy iteration is strictly equivalent to the standard newton-raphson method in the condition of smoothing out bellman equation with strongly convex functions. this equivalence lays the foundation of a unified analysis for both global and local convergence behaviors of regularized policy iteration. we prove that regularized policy iteration has global linear convergence with the rate being γ (discount factor). furthermore, this algorithm converges quadratically once it enters a local region around the optimal value. we also show that a modified version of regularized policy iteration, i.e., with finite-step policy evaluation, is equivalent to inexact newton method where the newton iteration formula is solved with truncated iterations. we prove that the associated algorithm achieves an asymptotic linear convergence rate of γ m in which m denotes the number of steps carried out in policy evaluation. our results take a solid step towards a better understanding of the convergence properties of regularized policy iteration algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/527.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/527.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6abcd1aa7f5e606ab31a6a3efdddb9f9362a0b9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/527.txt @@ -0,0 +1 @@ + patient triage plays a crucial role in healthcare, ensuring timely and appropriate care based on the urgency of patient conditions. traditional triage methods heavily rely on human judgment, which can be subjective and prone to errors. recently, a growing interest has been in leveraging artificial intelligence (ai) to develop algorithms for triaging patients. this paper presents the development of a novel algorithm for triaging patients. it is based on the analysis of patient data to produce decisions regarding their prioritization. the algorithm was trained on a comprehensive data set containing relevant patient information, such as vital signs, symptoms, and medical history. the algorithm was designed to accurately classify patients into triage categories through rigorous preprocessing and feature engineering. experimental results demonstrate that our algorithm achieved high accuracy and performance, outperforming traditional triage methods. by incorporating computer science into the triage process, healthcare professionals can benefit from improved efficiency, accuracy, and consistency, prioritizing patients effectively and optimizing resource allocation. although further research is needed to address challenges such as biases in training data and model interpretability, the development of ai-based algorithms for triaging patients shows great promise in enhancing healthcare delivery and patient outcomes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/528.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/528.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ee28366dac3ff72fc5dba34f3a7bddf17b84815 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/528.txt @@ -0,0 +1 @@ + large language models (llms) have achieved impressive performance on many natural language processing tasks. however, their capabilities on graph-structured data remain relatively unexplored. in this paper, we conduct a series of experiments benchmarking leading llms on diverse graph prediction tasks spanning node, edge, and graph levels. we aim to assess whether llms can effectively process graph data and leverage topological structures to enhance performance, compared to specialized graph neural networks. through varied prompt formatting and task/dataset selection, we analyze how well llms can interpret and utilize graph structures. by comparing llms' performance with specialized graph models, we offer insights into the strengths and limitations of employing llms for graph analytics. our findings provide insights into llms' capabilities and suggest avenues for further exploration in applying them to graph analytics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/529.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/529.txt new file mode 100644 index 0000000000000000000000000000000000000000..cb7dfe3733c1d9f8e2258b222d2e0ada693cb064 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/529.txt @@ -0,0 +1 @@ + neural combinatorial optimization has been researched actively in the last eight years. even though many of the proposed machine learning based approaches are compared on the same datasets, the evaluation protocol exhibits essential flaws and the selection of baselines often neglects state-of-the-art operations research approaches. to improve on both of these shortcomings, we propose the routing arena, a benchmark suite for routing problems that provides a seamless integration of consistent evaluation and the provision of baselines and benchmarks prevalent in the machine learning-and operations research field. the proposed evaluation protocol considers the two most important evaluation cases for different applications: first, the solution quality for an a priori fixed time budget and secondly the anytime performance of the respective methods. by setting the solution trajectory in perspective to a best known solution and a base solver's solutions trajectory, we furthermore propose the weighted relative average performance (wrap), a novel evaluation metric that quantifies the often claimed runtime efficiency of neural routing solvers. a comprehensive first experimental evaluation demonstrates that the most recent operations research solvers generate state-of-the-art results in terms of solution quality and runtime efficiency when it comes to the vehicle routing problem. nevertheless, some findings highlight the advantages of neural approaches and motivate a shift in how neural solvers should be conceptualized. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/53.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/53.txt new file mode 100644 index 0000000000000000000000000000000000000000..936a97a091fa64c145b12a3df9702939827145b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/53.txt @@ -0,0 +1 @@ + one popular technique to solve temporal planning problems consists in decoupling the causal decisions, demanding them to heuristic search, from temporal decisions, demanding them to a simple temporal network (stn) solver. in this architecture, one needs to check the consistency of a series of stns that are related one another, therefore having methods to incrementally re-use previous computations and that avoid expensive memory duplication is of paramount importance. in this paper, we describe in detail how stns are used in temporal planning, we identify a clear interface to support this use-case and we present an efficient data-structure implementing this interface that is both time-and memoryefficient. we show that our data structure, called δ-stn, is superior to other state-of-the-art approaches on temporal planning sequences of problems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/530.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/530.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ed92f74430e882dace80d01171f7ccb1d4b6955 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/530.txt @@ -0,0 +1 @@ + there has been a great deal of recent interest in binarized neural networks, especially because of their explainability. at the same time, automatic differentiation algorithms such as backpropagation fail for binarized neural networks, which limits their applicability. we show that binarized neural networks admit a tame representation by reformulating the problem of training binarized neural networks as a subadditive dual of a mixed-integer program, which we show to have nice properties. this makes it possible to use the framework of bolte et al. for implicit differentiation, which offers the possibility for practical implementation of backpropagation in the context of binarized neural networks. this approach could also be used for a broader class of mixed-integer programs, beyond the training of binarized neural networks, as encountered in symbolic approaches to ai and beyond. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/531.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/531.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4a34bee2f7f1526170259069f4635e8df25c3c4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/531.txt @@ -0,0 +1 @@ + parallelizing sequentially written programs is a challenging task. even experienced developers need to spend considerable time finding parallelism opportunities and then actually writing parallel versions of sequentially written programs. to address this issue, we present autoparllm, a framework for automatically discovering parallelism and generating the parallel version of the sequentially written program. our framework consists of two major components: i) a heterogeneous graph neural network (gnn) based parallelism discovery and parallel pattern detection module, and ii) an llm-based code generator to generate the parallel counterpart of the sequential programs. we use the gnn to learn the flow-aware characteristics of the programs to identify parallel regions in sequential programs and then construct an enhanced prompt using the gnn's results for the llm-based generator to finally produce the parallel counterparts of the sequential programs. we evaluate autoparllm on 11 applications of 2 wellknown benchmark suites: nas parallel benchmark and rodinia benchmark. our results show that autoparllm is indeed effective in improving the state-ofthe-art llm-based models for the task of parallel code generation in terms of multiple code generation metrics. autoparllm also improves the average runtime of the parallel code generated by the state-of-the-art llms by as high as 3.4% and 2.9% for the nas parallel benchmark and rodinia benchmark respectively. additionally, to overcome the issue that well-known metrics for translation evaluation have not been optimized to evaluate the quality of the generated parallel code, we propose ompscore for evaluating the quality of the generated code. we show that ompscore exhibits a better correlation with human judgment than existing metrics, measured by up to 75% improvement of spearman correlation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/532.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/532.txt new file mode 100644 index 0000000000000000000000000000000000000000..2526e7784451d6ae224bb6bf4eb1a0658f6fe193 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/532.txt @@ -0,0 +1 @@ + structure learning is essential for bayesian networks (bns) as it uncovers causal relationships, and enables knowledge discovery, predictions, inferences, and decision-making under uncertainty. two novel algorithms, fsbn and ssbn, based on the pc algorithm, employ local search strategy and conditional independence tests to learn the causal network structure from data. they incorporate d-separation to infer additional topology information, prioritize conditioning sets, and terminate the search immediately and efficiently. fsbn achieves up to 52% computation cost reduction, while ssbn surpasses it with a remarkable 72% reduction for a 200-node network. ssbn demonstrates further efficiency gains due to its intelligent strategy. experimental studies show that both algorithms match the induction quality of the pc algorithm while significantly reducing computation costs. this enables them to offer interpretability and adaptability while reducing the computational burden, making them valuable for various applications in big data analytics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/533.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/533.txt new file mode 100644 index 0000000000000000000000000000000000000000..c571dbb9e356abebc4019899b23d74c813151244 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/533.txt @@ -0,0 +1 @@ + we present an algorithm that learns to imitate expert behavior and can transfer to previously unseen domains without retraining. such an algorithm is extremely relevant in real-world applications such as robotic learning because 1) reward functions are difficult to design, 2) learned policies from one domain are difficult to deploy in another domain and 3) learning directly in the real world is either expensive or unfeasible due to security concerns. to overcome these constraints, we combine recent advances in deep rl by using an annealedvae to learn a disentangled state representation and imitate an expert by learning a single q-function which avoids adversarial training. we demonstrate the effectiveness of our method in 3 environments ranging in difficulty and the type of transfer knowledge required. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/534.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/534.txt new file mode 100644 index 0000000000000000000000000000000000000000..aaad983e32528dbdd09d61aa3b23c23dc3584ae1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/534.txt @@ -0,0 +1 @@ + neural network pruning has shown to be an effective technique for reducing the network size, trading desirable properties like generalization and robustness to adversarial attacks for higher sparsity. recent work has claimed that adversarial pruning methods can produce sparse networks while also preserving robustness to adversarial examples. in this work, we first re-evaluate three state-of-the-art adversarial pruning methods, showing that their robustness was indeed overestimated. we then compare pruned and dense versions of the same models, discovering that samples on thin ice, i.e., closer to the unpruned model's decision boundary, are typically misclassified after pruning. we conclude by discussing how this intuition may lead to designing more effective adversarial pruning methods in future work. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/535.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/535.txt new file mode 100644 index 0000000000000000000000000000000000000000..84c40226fd135e9620e8aa3642905d9517137e3d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/535.txt @@ -0,0 +1 @@ + in continual learning, the learner learns multiple tasks in sequence, with data being acquired only once for each task. catastrophic forgetting is a major challenge to continual learning. to reduce forgetting, some existing rehearsal-based methods use episodic memory to replay samples of previous tasks. however, in the process of knowledge integration when learning a new task, this strategy also suffers from catastrophic forgetting due to an imbalance between old and new knowledge. to address this problem, we propose a novel replay strategy called manifold expansion replay (maer). we argue that expanding the implicit manifold of the knowledge representation in the episodic memory helps to improve the robustness and expressiveness of the model. to this end, we propose a greedy strategy to keep increasing the diameter of the implicit manifold represented by the knowledge in the buffer during memory management. in addition, we introduce wasserstein distance instead of cross entropy as distillation loss to preserve previous knowledge. with extensive experimental validation on mnist, cifar10, cifar100, and tinyimagenet, we show that the proposed method significantly improves the accuracy in continual learning setup, outperforming the state of the arts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/536.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/536.txt new file mode 100644 index 0000000000000000000000000000000000000000..45d82a65c7f84b7d883e3d6ebde8c66878b805ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/536.txt @@ -0,0 +1 @@ + sliding window sum algorithms have been successfully used for training and inference of deep neural networks. we have shown before how both pooling and convolution 1-d primitives could be expressed as sliding sums and evaluated by the compute kernels with a shared structure.in this paper, we present an extensive study of the sliding window convolution technique as a more efficient alternative to the commonly used general matrix multiplication (gemm) based convolution in deep neural networks (dnns). the sliding window technique addresses the memory bloating problem and demonstrates a significant speedup in 2-d convolution. we explore the performance of this technique on a range of implementations, including custom kernels for specific filter sizes. our results suggest that the sliding window computation kernels can outperform gemm-based convolution on a cpu and even on dedicated hardware accelerators. this could promote a wider adoption of ai on lowpower and low-memory devices without the need for specialized hardware. we also discuss the compatibility of model compression methods and optimized network architectures with the sliding window technique, encouraging further research in these areas. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/537.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/537.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb20fb1a12617ea73b1c6c7eb114842d8f36db95 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/537.txt @@ -0,0 +1 @@ + the use of machine learning for material property prediction and discovery has traditionally centered on graph neural networks that incorporate the geometric configuration of all atoms. however, in practice not all this information may be readily available, e.g. when evaluating the potentially unknown binding of adsorbates to catalyst. in this paper, we investigate whether it is possible to predict a system's relaxed energy in the oc20 dataset while ignoring the relative position of the adsorbate with respect to the electro-catalyst. we consider schnet, dimenet++ and faenet as base architectures and measure the impact of four modifications on model performance: removing edges in the input graph, pooling independent representations, not sharing the backbone weights and using an attention mechanism to propagate non-geometric relative information. we find that while removing binding site information impairs accuracy as expected, modified models are able to predict relaxed energies with remarkably decent mae. our work suggests future research directions in accelerated materials discovery where information on reactant configurations can be reduced or altogether omitted. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/538.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/538.txt new file mode 100644 index 0000000000000000000000000000000000000000..edd735bdecbfb66b25281609143431e1ab6d9f17 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/538.txt @@ -0,0 +1 @@ + recent advances in deep learning and automatic speech recognition have improved the accuracy of end-to-end speech recognition systems, but recognition of personal content such as contact names remains a challenge. in this work, we describe our personalization solution for an end-to-end speech recognition system based on connectionist temporal classification. building on previous work, we present a novel method for generating additional subword tokenizations for personal entities from their pronunciations. we show that using this technique in combination with two established techniques, contextual biasing and wordpiece prior normalization, we are able to achieve personal named entity accuracy on par with a competitive hybrid system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/539.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/539.txt new file mode 100644 index 0000000000000000000000000000000000000000..942efe2b69759ba6f627c1083a6436977222c788 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/539.txt @@ -0,0 +1 @@ + the ability of deep learning to process and extract relevant information in complex brain dynamics from raw eeg data has been demonstrated in various recent works. deep learning models, however, have also been shown to perform best on large corpora of data. when processing eeg, a natural approach is to combine eeg datasets from different experiments to train large deep-learning models. however, most eeg experiments use custom channel montages, requiring the data to be transformed into a common space. previous methods have used the raw eeg signal to extract features of interest and focused on using a common feature space across eeg datasets. while this is a sensible approach, it underexploits the potential richness of eeg raw data. here, we explore using spatial attention applied to eeg electrode coordinates to perform channel harmonization of raw eeg data, allowing us to train deep learning on eeg data using different montages. we test this model on a gender classification task. we first show that spatial attention increases model performance. then, we show that a deep learning model trained on data using different channel montages performs significantly better than deep learning models trained on fixed 23-and 128-channel data montages.i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/54.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/54.txt new file mode 100644 index 0000000000000000000000000000000000000000..b965981897220900e218da27edf0be7ce41b93c0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/54.txt @@ -0,0 +1 @@ + policymakers often need to rely on experts with disparate fields of expertise when making policy choices in complex, multi-faceted, dynamic environments such as those dealing with ecosystem services. the pressures affecting the survival and pollination capabilities of honey bees (apis mellifera), wild bees and other pollinators is well-documented, but incomplete. in order to estimate the potential effectiveness of various candidate policies to support pollination services, there is an urgent need to quantify the effect of various combinations of variables on the pollination ecosystem service, utilising available information, models and expert judgement. in this paper, we present a new application of the integrating decision support system methodology for combining inputs from multiple panels of experts to evaluate policies to support an abundant pollinator population. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/540.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/540.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0d730d5d57e8e5c949a24cac36e69b1d66131b3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/540.txt @@ -0,0 +1 @@ + pre-trained language models have emerged as promising tools for predicting molecular properties, yet their development is in its early stages, necessitating further research to enhance their efficacy and address challenges such as generalization and sample efficiency. in this paper, we present a multi-view approach that combines latent spaces derived from state-of-the-art chemical models. our approach relies on two pivotal elements: the embeddings derived from mhg-gnn, which represent molecular structures as graphs, and molformer embeddings rooted in chemical language. the attention mechanism of molformer is able to identify relations between two atoms even when their distance is far apart, while the gnn of mhg-gnn can more precisely capture relations among multiple atoms closely located. in this work, we demonstrate the superior performance of our proposed multi-view approach compared to existing state-of-the-art methods, including molformer-xl, which was trained on 1.1 billion molecules, particularly in intricate tasks such as predicting clinical trial drug toxicity and inhibiting hiv replication. we assessed our approach using six benchmark datasets from molecu-lenet, where it outperformed competitors in five of them. our study highlights the potential of latent space fusion and feature integration for advancing molecular property prediction. in this work, we use small versions of mhg-gnn and molformer, which opens up an opportunity for further improvement when our approach uses a larger-scale dataset.preprint. under review. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/541.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/541.txt new file mode 100644 index 0000000000000000000000000000000000000000..3825153d9982a3037f99c8811c52bfde65511d93 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/541.txt @@ -0,0 +1 @@ + artificial intelligence (ai) is a powerful tool for reshaping healthcare systems. in healthcare, ai is invaluable for its capacity to manage vast amounts of data, which can lead to more accurate and speedy diagnoses, ultimately easing the workload on healthcare professionals. as a result, ai has proven itself to be a power tool across various industries, simplifying complex tasks and pattern recognition that would otherwise be overwhelming for humans or traditional computer algorithms. in this paper, we review the strengths and weaknesses of bayesian ridge regression, an ai model that can be used to bring cutting edge virus analysis to healthcare professionals around the world. the model's accuracy assessment revealed promising results, with room for improvement primarily related to data organization. in addition, the severity index serves as a valuable tool to gain a broad overview of patient care needs, aligning with healthcare professionals' preference for broader categorizations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/542.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/542.txt new file mode 100644 index 0000000000000000000000000000000000000000..55d66777a3602aea9412a38f1d3a99594c83dd96 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/542.txt @@ -0,0 +1 @@ + we tackle the problem of estimating risk measures of the infinite-horizon discounted cost within a markov cost process. the risk measures we study include variance, value-at-risk (var), and conditional value-at-risk (cvar). first, we show that estimating any of these risk measures with ǫ-accuracy, either in expected or highprobability sense, requires at least ω(1/ǫ 2 ) samples. then, using a truncation scheme, we derive an upper bound for the cvar and variance estimation. this bound matches our lower bound up to logarithmic factors. finally, we discuss an extension of our estimation scheme that covers more general risk measures satisfying a certain continuity criterion, e.g., spectral risk measures, utility-based shortfall risk. to the best of our knowledge, our work is the first to provide lower and upper bounds for estimating any risk measure beyond the mean within a markovian setting. our lower bounds also extend to the infinite-horizon discounted costs' mean. even in that case, our lower bound of ω(1/ǫ 2 ) improves upon the existing ω(1/ǫ) bound . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/543.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/543.txt new file mode 100644 index 0000000000000000000000000000000000000000..9279301985589b3fc4d22b5410820bd85e1063ee --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/543.txt @@ -0,0 +1 @@ + it is well-known that neural networks can unintentionally memorize their training examples, causing privacy concerns. however, auditing memorization in large non-auto-regressive automatic speech recognition (asr) models has been challenging due to the high compute cost of existing methods such as hardness calibration. in this work, we design a simple auditing method to measure memorization in large asr models without the extra compute overhead. concretely, we speed up randomly-generated utterances to create a mapping between vocal and text information that is difficult to learn from typical training examples. hence, accurate predictions only for sped-up training examples can serve as clear evidence for memorization, and the corresponding accuracy can be used to measure memorization. using the proposed method, we showcase memorization in the state-of-the-art asr models. to mitigate memorization, we tried gradient clipping during training to bound the influence of any individual example on the final model. we empirically show that clipping each example's gradient can mitigate memorization for sped-up training examples with up to 16 repetitions in the training set. furthermore, we show that in large-scale distributed training, clipping the average gradient on each compute core maintains neutral model quality and compute cost while providing strong privacy protection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/544.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/544.txt new file mode 100644 index 0000000000000000000000000000000000000000..db831bb409f7350d1827ba4f8d8cfa56cb1e66b6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/544.txt @@ -0,0 +1 @@ + data augmentation (da)-augmenting training data with synthetic samples-is wildly adopted in computer vision (cv) to improve models performance. conversely, da has not been yet popularized in networking use cases, including traffic classification (tc). in this work, we present a preliminary study of 14 hand-crafted das applied on the mirage19 dataset. our results (𝑖) show that da can reap benefits previously unexplored in tc and (𝑖𝑖) foster a research agenda on the use of generative models to automate da design. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/545.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/545.txt new file mode 100644 index 0000000000000000000000000000000000000000..691950b227fda146cbd34254b9372f7988165f00 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/545.txt @@ -0,0 +1 @@ + we explain the methodology used to create the data submitted to humob challenge, a data analysis competition for human mobility prediction. we adopted a personalized model to predict the individual's movement trajectory from their data, instead of predicting from the overall movement, based on the hypothesis that human movement is unique to each person. we devised the features such as the date and time, activity time, days of the week, time of day, and frequency of visits to poi (point of interest). as additional features, we incorporated the movement of other individuals with similar behavior patterns through the employment of clustering. the machine learning model we adopted was the support vector regression (svr). we performed accuracy through offline assessment and carried out feature selection and parameter tuning. although overall dataset provided consists of 100,000 users trajectory, our method use only 20,000 target users data, and do not need to use other 80,000 data. despite the personalized model's traditional feature engineering approach, this model yields reasonably good accuracy with lower computational cost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/546.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/546.txt new file mode 100644 index 0000000000000000000000000000000000000000..a6a22389d7b35f728d5dcaf359c05c999ab8ae28 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/546.txt @@ -0,0 +1 @@ + learning in pomdps is known to be significantly harder than mdps. in this paper, we consider online learning problem for episodic pomdps with unknown transition and observation models. we propose a posterior sampling-based reinforcement learning algorithm for pomdps (ps4pomdps), which is much simpler and more implementable compared to state-of-the-art optimism-based online learning algorithms for pomdps. we show that the bayesian regret of the proposed algorithm scales as the square root of the number of episodes, matching the lower bound, and is polynomial in the other parameters. in a general setting, its regret scales exponentially in the horizon length h, and we show that this is inevitable by providing a lower bound. however, when the pomdp is undercomplete and weakly revealing (an assumption common in recent literature), we establish a polynomial bayesian regret bound. we also propose a posterior sampling algorithm for multi-agent pomdps, and show it too has sublinear regret. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/547.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/547.txt new file mode 100644 index 0000000000000000000000000000000000000000..d832cccaab75f1e558c662f1ffba8f387373979b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/547.txt @@ -0,0 +1 @@ + this paper aims to explore the potential of combining deep reinforcement learning (drl) with knowledge distillation (kd) by distilling various drl algorithms and studying their distillation effects. by doing so, the computational burden of deep models could be reduced while maintaining the performance. the primary objective is to provide a benchmark for evaluating the performance of different drl algorithms that have been refined using kd techniques. by distilling these algorithms, the goal is to develop efficient and fast drl models. this research is expected to provide valuable insights that can facilitate further advancements in this promising direction. by exploring the combination of drl and kd, this work aims to promote the development of models that require fewer gpu resources, learn more quickly, and make faster decisions in complex environments. the results of this research have the capacity to significantly advance the field of drl and pave the way for the future deployment of resource-efficient, decision-making intelligent systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/548.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/548.txt new file mode 100644 index 0000000000000000000000000000000000000000..fc8fa1342789d1e368a2d0c683e9781437320400 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/548.txt @@ -0,0 +1 @@ + in this paper, we study the problem of efficient online reinforcement learning in the infinite horizon setting when there is an offline dataset to start with. we assume that the offline dataset is generated by an expert but with unknown level of competence, i.e., it is not perfect and not necessarily using the optimal policy. we show that if the learning agent models the behavioral policy (parameterized by a competence parameter) used by the expert, it can do substantially better in terms of minimizing cumulative regret, than if it doesn't do that. we establish an upper bound on regret of the exact informed psrl algorithm that scales as õ( √ t ). this requires a novel prior-dependent regret analysis of bayesian online learning algorithms for the infinite horizon setting. we then propose the informed rlsvi algorithm to efficiently approximate the ipsrl algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/549.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/549.txt new file mode 100644 index 0000000000000000000000000000000000000000..bd25c1c547c9f6068d12089d95d1fbb8922242cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/549.txt @@ -0,0 +1 @@ + the advent of large pre-trained language models in the domain of code synthesis has shown remarkable performance on various benchmarks, treating the problem of code generation in a fashion similar to natural language generation, trained with a language modelling (lm) objective. in addition, the property of programming language code being precisely evaluable with respect to its semantics -through the use of unit tests to check its functional correctness -lends itself to using reinforcement learning (rl) as a further training paradigm. previous work has shown that rl can be applied as such to improve models' coding capabilities; however, such rl-based methods rely on a reward signal based on defined unit tests, which are much harder to obtain compared to the huge crawled code datasets used in lm objectives. in this work, we present a novel approach to automatically obtain data consisting of function signatures and associated unit tests, suitable for rl training of code synthesis models. we also introduce a straightforward, simple yet effective actor-critic rl training scheme and show that it, in conjunction with automatically generated training data, leads to improvement of a pre-trained code language model's performance by up to 9.9% improvement over the original underlying code synthesis lm, and up to 4.3% over rl-based models trained with standard ppo or coderl. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/55.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/55.txt new file mode 100644 index 0000000000000000000000000000000000000000..bc17ca005cb8887b34f9d56d36b262ce42658f86 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/55.txt @@ -0,0 +1 @@ + we propose a hierarchical framework for collaborative intelligent systems. this framework organizes research challenges based on the nature of the collaborative activity and the information that must be shared, with each level building on capabilities provided by lower levels. we review research paradigms at each level, with a description of classical engineering-based approaches and modern alternatives based on machine learning, illustrated with a running example using a hypothetical personal service robot. we discuss cross-cutting issues that occur at all levels, focusing on the problem of communicating and sharing comprehension, the role of explanation and the social nature of collaboration. we conclude with a summary of research challenges and a discussion of the potential for economic and societal impact provided by technologies that enhance human abilities and empower people and society through collaboration with intelligent systems.in this article, we show how viewing collaboration as a hierarchy of perception-action cycles can provide a framework that unifies a broad spectrum of techniques and research problems for collaborative systems. we describe classical engineered approaches for components at each level and discuss modern alternatives based on machine learning. we then discuss some of the more salient research challenges that must be addressed to further develop collaboration with intelligent systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/550.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/550.txt new file mode 100644 index 0000000000000000000000000000000000000000..c15632198473f94cd8c4210dd9336c3b74315031 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/550.txt @@ -0,0 +1 @@ + much work has been dedicated to estimating and optimizing workloads in high-performance computing (hpc) and deep learning. however, researchers have typically relied on few metrics to assess the efficiency of those techniques. most notably, the accuracy, the loss of the prediction, and the computational time with regard to gpus or/and cpus characteristics. it is rare to see figures for power consumption, partly due to the difficulty of obtaining accurate power readings. in this paper, we introduce a composite score that aims to characterize the tradeoff between accuracy and power consumption measured during the inference of neural networks. for this purpose, we present a new open-source tool allowing researchers to consider more metrics: granular power consumption, but also ram/cpu/gpu utilization, as well as storage, and network input/output (i/o). to our best knowledge, it is the first fit test for neural architectures on hardware architectures. this is made possible thanks to reproducible power efficiency measurements. we applied this procedure to state-of-the-art neural network architectures on miscellaneous hardware. one of the main applications and novelties is the measurement of algorithmic power efficiency. the objective is to allow researchers to grasp their algorithms' efficiencies better. this methodology was developed to explore trade-offs between energy usage and accuracy in neural networks. it is also useful when fitting hardware for a specific task or to compare two architectures more accurately, with architecture exploration in mind. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/551.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/551.txt new file mode 100644 index 0000000000000000000000000000000000000000..666f22a82e8103884c1ad43761fc98cc6810c0ca --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/551.txt @@ -0,0 +1 @@ + while deep auc maximization (dam) has shown remarkable success on imbalanced medical tasks, e.g., chest x-rays classification and skin lesions classification, it could suffer from severe overfitting when applied to small datasets due to its aggressive nature of pushing prediction scores of positive data away from that of negative data. this paper studies how to improve generalization of dam by mixup data augmentation-an approach that is widely used for improving generalization of the cross-entropy loss based deep learning methods. however, auc is defined over positive and negative pairs, which makes it challenging to incorporate mixup data augmentation into dam algorithms. to tackle this challenge, we employ the auc margin loss and incorporate soft labels into the formulation to effectively learn from data generated by mixup augmentation, which is referred to as the auc-mixup loss. our experimental results demonstrate the effectiveness of the proposed auc-mixup methods on imbalanced benchmark and medical image datasets compared to standard dam training methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/552.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/552.txt new file mode 100644 index 0000000000000000000000000000000000000000..62401390f04555c729317857d88c8c64842469b7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/552.txt @@ -0,0 +1 @@ + in many real-world applications, due to recent developments in the privacy landscape, training data may be aggregated to preserve the privacy of sensitive training labels. in the learning from label proportions (llp) framework, the dataset is partitioned into bags of featurevectors which are available only with the sum of the labels per bag. a further restriction, which we call learning from bag aggregates (lba) is where instead of individual feature-vectors, only the (possibly weighted) sum of the feature-vectors per bag is available. we study whether such aggregation techniques can provide privacy guarantees under the notion of label differential privacy (label-dp) previously studied in for e.g. ghazi et al.'21, esfandiari et al.'22].it is easily seen that naive lba and llp do not provide label-dp. our main result however, shows that weighted lba using iid gaussian weights with m randomly sampled disjoint k-sized bags is in fact (ε, δ)-label-dp for any ε > 0 with δ ≈ exp(-ω( √ k)) assuming a lower bound on the linear-mse regression loss. further, the ℓ 2 2 -regressor which minimizes the loss on the aggregated dataset has a loss within (1 + o(1))-factor of the optimum on the original dataset w.p. ≈ 1exp(-ω(m)). we emphasize that no additive label noise is required.the analogous weighted-llp does not however admit label-dp. nevertheless, we show that if additive n(0, 1) noise can be added to any constant fraction of the instance labels, then the noisy weighted-llp admits similar label-dp guarantees without assumptions on the dataset, while preserving the utility of lipschitz-bounded neural mse-regression tasks.our work is the first to demonstrate that label-dp can be achieved by randomly weighted aggregation for regression tasks, using no or little additive noise. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/553.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/553.txt new file mode 100644 index 0000000000000000000000000000000000000000..86db0e117ad7bc13fe0e0f6ee67a42fa68049e17 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/553.txt @@ -0,0 +1 @@ + prompt engineering (pe) has emerged as a critical technique for guiding large language models (llms) in solving intricate tasks. its importance is highlighted by its potential to significantly enhance the efficiency and effectiveness of humanmachine interaction. as tasks grow increasingly complex, recent advanced pe methods have extended beyond the limitations of single-round interactions to embrace multi-round interactions, which allows for a deeper and more nuanced engagement with llms. in this paper, we propose an optimal control framework tailored for multi-round interactions with llms. this framework provides a unified mathematical structure that not only systematizes the existing pe methods but also sets the stage for rigorous analytical improvements. furthermore, we extend this framework to include pe via ensemble methods and multi-agent collaboration, thereby enlarging the scope of applicability. by adopting an optimal control perspective, we offer fresh insights into existing pe methods and highlight theoretical challenges that warrant future research. besides, our work lays a foundation for the development of more effective and interpretable pe methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/554.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/554.txt new file mode 100644 index 0000000000000000000000000000000000000000..ac231ffacad02014d638885f12651dd3cace6dbb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/554.txt @@ -0,0 +1 @@ + this paper studies bandit convex optimization with constraints, where the learner aims to generate a sequence of decisions under partial information of loss functions such that the cumulative loss is reduced as well as the cumulative constraint violation is simultaneously reduced. we adopt the cumulative hard constraint violation as the metric of constraint violation, which is defined by t t=1 max{g t (x t ), 0}. owing to the maximum operator, a strictly feasible solution cannot cancel out the effects of violated constraints compared to the conventional metric known as long-term constraints violation. we present a penaltybased proximal gradient descent method that attains a sub-linear growth of both regret and cumulative hard constraint violation, in which the gradient is estimated with a two-point function evaluation. precisely, our algorithm attains o(d 2 t max{c,1-c} ) regret bounds and o(d 2 t 1-c2 ) cumulative hard constraint violation bounds for convex loss functions and time-varying constraints, where d is the dimensionality of the feasible region and c ∈ [ 1 2 , 1) is a user-determined parameter. we also extend the result for the case where the loss functions are strongly convex and show that both regret and constraint violation bounds can be further reduced. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/555.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/555.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/556.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/556.txt new file mode 100644 index 0000000000000000000000000000000000000000..6e66d559400e111f86c45f1017be665ed0c933a9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/556.txt @@ -0,0 +1 @@ + the expressivity of graph neural networks (gnns) can be entirely characterized by appropriate fragments of the first order logic. namely, any query of the two variable fragment of graded modal logic (gc2) interpreted over labeled graphs can be expressed using a gnn whose size depends only on the depth of the query. as pointed out by , this description holds for a family of activation functions, leaving the possibibility for a hierarchy of logics expressible by gnns depending on the chosen activation function. in this article, we show that such hierarchy indeed exists by proving that gc2 queries cannot be expressed by gnns with polynomial activation functions. this implies a separation between polynomial and popular non polynomial activations (such as rectified linear units) and answers an open question formulated by . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/557.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/557.txt new file mode 100644 index 0000000000000000000000000000000000000000..3632547497c6b895c2cf25b34ef1ce7692b162a2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/557.txt @@ -0,0 +1 @@ + the aim of this study is clustering students according to their gamification user types and learning styles with the purpose of providing instructors with a new perspective of grouping students in case of clustering which cannot be done by hand when there are multiple scales in data. the data used consists of 251 students who were enrolled at a turkish state university. when grouping students, kmeans algorithm has been utilized as clustering algorithm. as for determining the gamification user types and learning styles of students, gamification user type hexad scale and grasha-riechmann student learning style scale have been used respectively. silhouette coefficient is utilized as clustering quality measure. after fitting the algorithm in several ways, highest silhouette coefficient obtained was 0.12 meaning that results are neutral but not satisfactory. all the statistical operations and data visualizations were made using python programming language. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/558.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/558.txt new file mode 100644 index 0000000000000000000000000000000000000000..694656ea87a8dcb64c6536ae8be5a2e4bd82e6f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/558.txt @@ -0,0 +1 @@ + machine learning algorithms are designed to capture complex relationships between features. in this context, the high dimensionality of data often results in poor model performance, with the risk of overfitting. feature selection, the process of selecting a subset of relevant and non-redundant features, is, therefore, an essential step to mitigate these issues. however, classical feature selection approaches do not inspect the causal relationship between selected features and target, which can lead to misleading results in real-world applications. causal discovery, instead, aims to identify causal relationships between features with observational data. in this paper, we propose a novel methodology at the intersection between feature selection and causal discovery, focusing on time series. we introduce a new causal feature selection approach that relies on the forward and backward feature selection procedures and leverages transfer entropy to estimate the causal flow of information from the features to the target in time series. our approach enables the selection of features not only in terms of mere model performance but also captures the causal information flow. in this context, we provide theoretical guarantees on the regression and classification errors for both the exact and the finite-sample cases. finally, we present numerical validations on synthetic and real-world regression problems, showing results competitive w.r.t. the considered baselines. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/559.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/559.txt new file mode 100644 index 0000000000000000000000000000000000000000..f33381c3b10657954e509b6fd84af59d449df666 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/559.txt @@ -0,0 +1 @@ + in the face of climate change-induced droughts, vulnerable regions encounter severe threats to food security, demanding urgent humanitarian assistance. this paper introduces a causal inference framework for the horn of africa, aiming to assess the impact of cash-based interventions on food crises. our contributions include identifying causal relationships within the food security system, harmonizing a comprehensive database including socio-economic, weather and remote sensing data, and estimating the causal effect of humanitarian interventions on malnutrition. on a country level, our results revealed no significant effects, likely due to limited sample size, suboptimal data quality, and an imperfect causal graph resulting from our limited understanding of multidisciplinary systems like food security. instead, on a district level, results revealed significant effects, further implying the context-specific nature of the system. this underscores the need to enhance data collection and refine causal models with domain experts for more effective future interventions and policies, improving transparency and accountability in humanitarian aid. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/56.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/56.txt new file mode 100644 index 0000000000000000000000000000000000000000..70b48bbfc7aac17b1cc1fade7d660b7ddbb9d193 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/56.txt @@ -0,0 +1 @@ + problem statement: standardisation of ai fairness rules and benchmarks is challenging because ai fairness and other ethical requirements depend on multiple factors such as context, use case, type of the ai system, and so on. in this paper, we elaborate that the ai system is prone to biases at every stage of its lifecycle, from inception to its usage, and that all stages require due attention for mitigating ai bias. we need a standardised approach to handle ai fairness at every stage. gap analysis: while ai fairness is a hot research topic, a holistic strategy for ai fairness is generally missing. most researchers focus only on a few facets of ai model-building. peer review shows excessive focus on biases in the datasets, fairness metrics, and algorithmic bias. in the process, other aspects affecting ai fairness get ignored. the solution proposed: we propose a comprehensive approach in the form of a novel seven-layer model, inspired by the open system interconnection (osi) model, to standardise ai fairness handling. despite the differences in the various aspects, most ai systems have similar model-building stages. the proposed model splits the ai system lifecycle into seven abstraction layers, each corresponding to a well-defined ai model-building or usage stage. we also provide checklists for each layer and deliberate on potential sources of bias in each layer and their mitigation methodologies. this work will facilitate layer-wise standardisation of ai fairness rules and benchmarking parameters. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/560.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/560.txt new file mode 100644 index 0000000000000000000000000000000000000000..ad3951e5627903717c46616827af311b68770f11 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/560.txt @@ -0,0 +1 @@ + the possibility of recognizing diverse aspects of human behavior and environmental context from passively captured data motivates its use for mental health assessment. in this paper, we analyze the contribution of different passively collected sensor data types (wifi, gps, social interaction, phone log, physical activity, audio, and academic features) to predict daily selfreport stress and phq-9 depression score. first, we compute 125 mid-level features from the original raw data. these 125 features include groups of features from the different sensor data types. then, we evaluate the contribution of each feature type by comparing the performance of neural network models trained with all features against neural network models trained with specific feature groups. our results show that wifi features (which encode mobility patterns) and phone log features (which encode information correlated with sleep patterns), provide significative information for stress and depression prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/561.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/561.txt new file mode 100644 index 0000000000000000000000000000000000000000..39730dd7e4e12c4a31189b65e2c0bac5174bcbe9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/561.txt @@ -0,0 +1 @@ + this paper proposes the use of causal modeling to detect and mitigate algorithmic bias. we provide a brief description of causal modeling and a general overview of our approach. we then use the adult dataset, which is available for download from the uc irvine machine learning repository, to develop (1) a prediction model, which is treated as a black box, and (2) a causal model for bias mitigation. in this paper, we focus on gender bias and the problem of binary classification. we show that gender bias in the prediction model is statistically significant at the 0.05 level. we demonstrate the effectiveness of the causal model in mitigating gender bias by cross-validation. furthermore, we show that the overall classification accuracy is improved slightly. our novel approach is intuitive, easy-to-use, and can be implemented using existing statistical software tools such as lavaan in r. hence, it enhances explainability and promotes trust. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/562.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/562.txt new file mode 100644 index 0000000000000000000000000000000000000000..61957ef4f7934a2b521e45541cf9826f45441e41 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/562.txt @@ -0,0 +1 @@ + bayesian flow networks (bfns) has been recently proposed as one of the most promising direction to universal generative modelling, having ability to learn any of the data type. their power comes from the expressiveness of neural networks and bayesian inference which make them suitable in the context of continual learning. we delve into the mechanics behind bfns and conduct the experiments to empirically verify the generative capabilities on non-stationary data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/563.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/563.txt new file mode 100644 index 0000000000000000000000000000000000000000..77b73ed496a1d5e23d6074808452255053b266ad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/563.txt @@ -0,0 +1 @@ + satellite communications, essential for modern connectivity, extend access to maritime, aeronautical, and remote areas where terrestrial networks are unfeasible. current geo systems distribute power and bandwidth uniformly across beams using multi-beam footprints with fractional frequency reuse. however, recent research reveals the limitations of this approach in heterogeneous traffic scenarios, leading to inefficiencies. to address this, this paper presents a machine learning (ml)-based approach to radio resource management (rrm).we treat the rrm task as a regression ml problem, integrating rrm objectives and constraints into the loss function that the ml algorithm aims at minimizing. moreover, we introduce a context-aware ml metric that evaluates the ml model's performance but also considers the impact of its resource allocation decisions on the overall performance of the communication system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/564.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/564.txt new file mode 100644 index 0000000000000000000000000000000000000000..6c9e1b61394c6babdb83c9f28a8c2449ad3087c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/564.txt @@ -0,0 +1 @@ + linear temporal logic (ltl) and ω-regular objectives-a superset of ltl-have seen recent use as a way to express non-markovian objectives in reinforcement learning. we introduce a model-based probably approximately correct (pac) learning algorithm for ω-regular objectives in markov decision processes (mdps). as part of the development of our algorithm, we introduce the ε-recurrence time: a measure of the speed at which a policy converges to the satisfaction of the ω-regular objective in the limit. we prove that our algorithm only requires a polynomial number of samples in the relevant parameters, and perform experiments which confirm our theory. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/565.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/565.txt new file mode 100644 index 0000000000000000000000000000000000000000..ea6d08803cec248867d4ec5e0db2a01f815d7e2f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/565.txt @@ -0,0 +1 @@ + narrow bit-width data formats are key to reducing the computational and storage costs of modern deep learning applications. this paper evaluates microscaling (mx) data formats that combine a per-block scaling factor with narrow floatingpoint and integer types for individual elements. mx formats balance the competing needs of hardware efficiency, model accuracy, and user friction. empirical results on over two dozen benchmarks demonstrate practicality of mx data formats as a drop-in replacement for baseline fp32 for ai inference and training with low user friction. we also show the first instance of training generative language models at sub-8-bit weights, activations, and gradients with minimal accuracy loss and no modifications to the training recipe. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/566.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/566.txt new file mode 100644 index 0000000000000000000000000000000000000000..bdf14ea608b8d5cc69bd1b725a78ac218bd7f9c9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/566.txt @@ -0,0 +1 @@ + mental disorders impact the lives of millions of people globally, not only impeding their day-to-day lives but also markedly reducing life expectancy. this paper addresses the persistent challenge of predicting mortality in patients with mental diagnoses using predictive machine-learning models with electronic health records (ehr). data from patients with mental disease diagnoses were extracted from the well-known clinical mimic-iii data set utilizing demographic, prescription, and procedural information. four machine learning algorithms (logistic regression, random forest, support vector machine, and k-nearest neighbors) were used, with results indicating that random forest and support vector machine models outperformed others, with auc scores of 0.911. feature importance analysis revealed that drug prescriptions, particularly morphine sulfate, play a pivotal role in prediction. we applied a variety of machine learning algorithms to predict 30-day mortality followed by feature importance analysis. this study can be used to assist hospital workers in identifying at-risk patients to reduce excess mortality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/567.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/567.txt new file mode 100644 index 0000000000000000000000000000000000000000..fd12a2229b3f705cf04bab9e16a92a6b43eace07 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/567.txt @@ -0,0 +1 @@ + we investigate brokerage between traders from an online learning perspective. at any round t, two traders arrive with their private valuations, and the broker proposes a trading price. unlike other bilateral trade problems already studied in the online learning literature, we focus on the case where there are no designated buyer and seller roles: each trader will attempt to either buy or sell depending on the current price of the good.we assume the agents' valuations are drawn i.i.d. from a fixed but unknown distribution. if the distribution admits a density bounded by some constant m , then, for any time horizon t :• if the agents' valuations are revealed after each interaction, we provide an algorithm achieving regret m log t and show this rate is optimal, up to constant factors.• if only their willingness to sell or buy at the proposed price is revealed after each interaction, we provide an algorithm achieving regret √ m t and show this rate is optimal, up to constant factors.finally, if we drop the bounded density assumption, we show that the optimal rate degrades to √ t in the first case, and the problem becomes unlearnable in the second. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/568.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/568.txt new file mode 100644 index 0000000000000000000000000000000000000000..4d760bc25fca45a8fcfa164099bf9ead0920e560 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/568.txt @@ -0,0 +1 @@ + in inverse reinforcement learning (irl), the central objective is to infer underlying reward functions from observed expert behaviors in a way that not only explains the given data but also generalizes to unseen scenarios. this ensures robustness against reward ambiguity-where multiple reward functions can equally explain the same expert behaviors. while significant efforts have been made in addressing this issue, current methods often face challenges with high-dimensional problems and lack a geometric foundation. this paper harnesses the optimal transport (ot) theory to provide a fresh perspective on these challenges. by utilizing the wasserstein distance from ot, we establish a geometric framework that allows for quantifying reward ambiguity and identifying a central representation or "centroid" of reward functions. these insights pave the way for robust irl methodologies anchored in geometric interpretations, offering a structured approach to tackle reward ambiguity in high-dimensional settings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/569.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/569.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ae7bd6f0a432567ec09df2b16030c6afa4f48cc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/569.txt @@ -0,0 +1 @@ + the paper presents a method for using fractional concepts in a neural network to modify the activation and loss functions. the methodology allows the neural network to define and optimize its activation functions by determining the fractional derivative order of the training process as an additional hyperparameter. this will enable neurons in the network to adjust their activation functions to match input data better and reduce output errors, potentially improving the network's overall performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/57.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/57.txt new file mode 100644 index 0000000000000000000000000000000000000000..ea750652e0f6c36e076d76b77913184297ee4e15 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/57.txt @@ -0,0 +1 @@ + in this paper we propose a general approach to define a many-valued preferential interpretation of gradual argumentation semantics. the approach allows for conditional reasoning over arguments and boolean combination of arguments, with respect to a class of gradual semantics, through the verification of graded (strict or defeasible) implications over a preferential interpretation. as a proof of concept, in the finitely-valued case, an answer set programming approach is proposed for conditional reasoning in a many-valued argumentation semantics of weighted argumentation graphs. the paper also develops and discusses a probabilistic semantics for gradual argumentation, which builds on the many-valued conditional semantics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/570.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/570.txt new file mode 100644 index 0000000000000000000000000000000000000000..bfc7d893e8b5fd3173cb1b75eab41d4cea7a6411 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/570.txt @@ -0,0 +1 @@ + the increasing global demand for clean and environmentally friendly energy resources has caused increased interest in harnessing solar power through photovoltaic (pv) systems for smart grids and homes. however, the inherent unpredictability of pv generation poses problems associated with smart grid planning and management, energy trading and market participation, demand response, reliability, etc. therefore, solar irradiance forecasting is essential for optimizing pv system utilization. this study proposes the next-generation machine learning algorithms such as random forests, extreme gradient boosting (xgboost), light gradient boosted machine (lightgbm) ensemble, catboost, and multilayer perceptron artificial neural networks (mlp-anns) to forecast solar irradiance. besides, bayesian optimization is applied to hyperparameter tuning. unlike tree-based ensemble algorithms that select the features intrinsically, mlp-ann needs feature selection as a separate step. the simulation results indicate that the performance of the mlp-anns improves when feature selection is applied. besides, the random forest outperforms the other learning algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/571.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/571.txt new file mode 100644 index 0000000000000000000000000000000000000000..0645152c87bc22ed9c8c051fdfcce59b557f9dd0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/571.txt @@ -0,0 +1 @@ + we consider the problem of designing sample efficient learning algorithms for infinite horizon discounted reward markov decision process. specifically, we propose the accelerated natural policy gradient (anpg) algorithm that utilizes an accelerated stochastic gradient descent process to obtain the natural policy gradient. anpg achieves o(ǫ -2 ) sample complexity and o(ǫ -1 ) iteration complexity with general parameterization where ǫ defines the optimality error. this improves the stateof-the-art sample complexity by a log( 1 ǫ ) factor. anpg is a first-order algorithm and unlike some existing literature, does not require the unverifiable assumption that the variance of importance sampling (is) weights is upper bounded. in the class of hessian-free and isfree algorithms, anpg beats the best-known sample complexity by a factor of o(ǫ -1 2 ) and simultaneously matches their state-of-the-art iteration complexity. improved sample complexity of npg with general parameterization algorithm sample complexity iteration complexity hessian-free is-free vanilla-pg (yuan et al., 2022) õ(ǫ -3 ) o(ǫ -3 ) yes yes storm-pg-f (ding et al., 2022) õ((1 + w ) 3 2 ǫ -3 ) o(ǫ -3 ) yes no scrn (masiha et al., 2022) õ(ǫ -2.5 ) o(ǫ -0.5 ) no yes vr-scrn (masiha et al., 2022) o(ǫ -2 log 1 ǫ ) o(ǫ -0.5 ) no no npg (liu et al., 2020) o(ǫ -3 ) o(ǫ -1 ) yes yes srvr-npg (liu et al., 2020) o(w ǫ -2.5 + ǫ -3 ) o(ǫ -1 ) yes no srvr-pg (liu et al., 2020) o(w ǫ -3 ) o(ǫ -2 ) yes no n-pg-igt (fatkhullin et al., 2023) õ(ǫ -2.5 ) o(ǫ -2.5 ) yes yes harpg (fatkhullin et al., 2023) o(ǫ -2 log 1 ǫ ) o(ǫ -2 ) no yes anpg (this work) o(ǫ -2 ) o(ǫ -1 ) yes yes \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/572.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/572.txt new file mode 100644 index 0000000000000000000000000000000000000000..46a9ae8cac47ec09a5b25b14a1f3cd5a53eb44c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/572.txt @@ -0,0 +1 @@ + these lecture notes provide an introduction to recent advances in generative modeling methods based on the dynamical transportation of measures, by means of which samples from a simple base measure are mapped to samples from a target measure of interest. special emphasis is put on the applications of these methods to monte-carlo (mc) sampling techniques, such as importance sampling and markov chain monte-carlo (mcmc) schemes. in this context, it is shown how the maps can be learned variationally using data generated by mc sampling, and how they can in turn be used to improve such sampling in a positive feedback loop. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/573.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/573.txt new file mode 100644 index 0000000000000000000000000000000000000000..b7597a555e76b4ce763dd72a1b81382422f53339 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/573.txt @@ -0,0 +1 @@ + hierarchical forecasting techniques allow for the creation of forecasts that are coherent with respect to a pre-specified hierarchy of the underlying time series. this targets a key problem in e-commerce, where we often find millions of products across many product hierarchies, and forecasts need to be made for both individual products and product aggregations. however, existing hierarchical forecasting techniques scale poorly when the number of time series increases, which limits their applicability at a scale of millions of products.in this paper, we propose to learn a coherent forecast for millions of products with a single bottom-level forecast model by using a loss function that directly optimizes the hierarchical product structure. we implement our loss function using sparse linear algebra, such that the number of operations in our loss function scales quadratically rather than cubically with the number of products and levels in the hierarchical structure. the benefit of our sparse hierarchical loss function is that it provides practitioners a method of producing bottom-level forecasts that are coherent to any chosen cross-sectional or temporal hierarchy. in addition, removing the need for a post-processing step as required in traditional hierarchical forecasting techniques reduces the computational cost of the prediction phase in the forecasting pipeline, as well as its deployment complexity.in our tests on the public m5 dataset, our sparse hierarchical loss function performs up to 10% better as measured by rmse and mae than the baseline loss function. next, we implement our sparse hierarchical loss function within an existing gradient boosting-based forecasting model at bol, a large european e-commerce platform. at bol, each day a forecast for the weekly demand of every product for the next twelve weeks is required. in this setting our sparse hierarchical loss resulted in an improved forecasting performance as measured by rmse of about 2% at the product level, as compared to the baseline model, and an improvement of about 10% at the product level as measured by mae. finally, we found an increase in forecasting performance of about 5-10% (both rmse and mae) when evaluating the forecasting performance across the cross-sectional hierarchies that we defined. these results demonstrate the usefulness of our sparse hierarchical loss applied to a production forecasting system at a major e-commerce platform. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/574.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/574.txt new file mode 100644 index 0000000000000000000000000000000000000000..1b5aabfbb69f1c21851e0153281d07688f0be0c2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/574.txt @@ -0,0 +1 @@ + for some hypothesis classes and input distributions, active agnostic learning needs exponentially fewer samples than passive learning; for other classes and distributions, it offers little to no improvement. the most popular algorithms for agnostic active learning express their performance in terms of a parameter called the disagreement coefficient, but it is known that these algorithms are inefficient on some inputs. we take a different approach to agnostic active learning, getting an algorithm that is competitive with the optimal algorithm for any binary hypothesis class h and distribution d x over x. in particular, if any algorithm can use m * queries to get o(η) error, then our algorithm uses o(m * log |h|) queries to get o(η) error. our algorithm lies in the vein of the splitting-based approach of dasgupta , which gets a similar result for the realizable (η = 0) setting. we also show that it is np-hard to do better than our algorithm's o(log |h|) overhead in general. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/575.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/575.txt new file mode 100644 index 0000000000000000000000000000000000000000..c1d140e8c877eff0aa193a656b8ba794620cfb10 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/575.txt @@ -0,0 +1 @@ + we show that many definitions of stability found in the learning theory literature are equivalent to one another. we distinguish between two families of definitions of stability: distribution-dependent and distribution-independent bayesian stability. within each family, we establish equivalences between various definitions, encompassing approximate differential privacy, pure differential privacy, replicability, global stability, perfect generalization, tv stability, mutual information stability, kl-divergence stability, and rényi-divergence stability. along the way, we prove boosting results that enable the amplification of the stability of a learning rule. this work is a step towards a more systematic taxonomy of stability notions in learning theory, which can promote clarity and an improved understanding of an array of stability concepts that have emerged in recent years. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/576.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/576.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f3d06f6677cd1f3e7ad264e5bf2160a65390aa1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/576.txt @@ -0,0 +1 @@ + forecasting project expenses is a crucial step for businesses to avoid budget overruns and project failures. traditionally, this has been done by financial analysts or data science techniques such as time-series analysis. however, these approaches can be uncertain and produce results that differ from the planned budget, especially at the start of a project with limited data points. this paper proposes a constrained non-negative matrix completion model that predicts expenses by learning the likelihood of the project correlating with certain expense patterns in the latent space. the model is constrained on three probability simplexes, two of which are on the factor matrices and the third on the missing entries. additionally, the predicted expense values are guaranteed to meet the budget constraint without the need of post-processing. an inexact alternating optimization algorithm is developed to solve the associated optimization problem and is proven to converge to a stationary point. results from two real datasets demonstrate the effectiveness of the proposed method in comparison to state-of-the-art algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/577.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/577.txt new file mode 100644 index 0000000000000000000000000000000000000000..31a3b92e3afb6a5dfc112523166812b44b057de0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/577.txt @@ -0,0 +1 @@ + clustering is a data analysis method for extracting knowledge by discovering groups of data called clusters. among these methods, state-of-the-art density-based clustering methods have proven to be effective for arbitrary-shaped clusters. despite their encouraging results, they suffer to find lowdensity clusters, near clusters with similar densities, and high-dimensional data. our proposals are a new characterization of clusters and a new clustering algorithm based on spatial density and probabilistic approach. first of all, sub-clusters are built using spatial density represented as probability density function (p.d.f ) of pairwise distances between points. a method is then proposed to agglomerate similar sub-clusters by using both their density (p.d.f ) and their spatial distance. the key idea we propose is to use the wasserstein metric, a powerful tool to measure the distance between p.d.f of sub-clusters. we show that our approach outperforms other state-of-the-art density-based clustering methods on a wide variety of datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/578.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/578.txt new file mode 100644 index 0000000000000000000000000000000000000000..213cd448648e8ae422e76de0905d57e96fde32a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/578.txt @@ -0,0 +1 @@ + this work centers on the communication aspects of decentralized learning over wireless networks, using consensusbased decentralized stochastic gradient descent (d-sgd). considering the actual communication cost or delay caused by in-network information exchange in an iterative process, our goal is to achieve fast convergence of the algorithm measured by improvement per transmission slot. we propose bass, an efficient communication framework for d-sgd over wireless networks with broadcast transmission and probabilistic subgraph sampling. in each iteration, we activate multiple subsets of noninterfering nodes to broadcast model updates to their neighbors. these subsets are randomly activated over time, with probabilities reflecting their importance in network connectivity and subject to a communication cost constraint (e.g., the average number of transmission slots per iteration). during the consensus update step, only bi-directional links are effectively preserved to maintain communication symmetry. in comparison to existing link-based scheduling methods, the inherent broadcasting nature of wireless channels offers intrinsic advantages in speeding up convergence of decentralized learning by creating more communicated links with the same number of transmission slots. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/579.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/579.txt new file mode 100644 index 0000000000000000000000000000000000000000..c89561dd85100fe69e09be84f380ed9e6e00d766 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/579.txt @@ -0,0 +1 @@ + federated learning (fl) is a promising distributed method for edge-level machine learning, particularly for privacysensitive applications such as those in military and medical domains, where client data cannot be shared or transferred to a cloud computing server. in many use-cases, communication cost is a major challenge in fl due to its natural intensive network usage. client devices, such as smartphones or internet of things (iot) nodes, have limited resources in terms of energy, computation, and memory. to address these hardware constraints, lightweight models and compression techniques such as pruning and quantization are commonly adopted in centralised paradigms. in this paper, we investigate the impact of compression techniques on fl for a typical image classification task. going further, we demonstrate that a straightforward method can compresses messages up to 50% while having less than 1% of accuracy loss, competing with state-of-the-art techniques. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/58.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/58.txt new file mode 100644 index 0000000000000000000000000000000000000000..a087d42af81efe7a218dc1267aebe087357b859c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/58.txt @@ -0,0 +1 @@ + dcop algorithms usually rely on interaction graphs to operate. in open and dynamic environments, such methods need to address how this interaction graph is generated and maintained among agents. existing methods require reconstructing the entire graph upon detecting changes in the environment or assuming that new agents know potential neighbors to facilitate connection. we propose a novel distributed interaction graph construction algorithm to address this problem. the proposed method does not assume a predefined constraint graph and stabilizes after disruptive changes in the environment. we evaluate our approach by pairing it with existing dcop algorithms to solve several generated dynamic problems. the experiment results show that the proposed algorithm effectively constructs and maintains a stable multi-agent interaction graph for open and dynamic environments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/580.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/580.txt new file mode 100644 index 0000000000000000000000000000000000000000..fad828fe600b97551fed40150c70711b60c3b6ce --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/580.txt @@ -0,0 +1 @@ + as algorithmic decision-making systems become more prevalent in society, ensuring the fairness of these systems is becoming increasingly important. whilst there has been substantial research in building fair algorithmic decision-making systems, the majority of these methods require access to the training data, including personal characteristics, and are not transparent regarding which individuals are classified unfairly. in this paper, we propose a novel model-agnostic argumentation-based method to determine why an individual is classified differently in comparison to similar individuals. our method uses a quantitative argumentation framework to represent attribute-value pairs of an individual and of those similar to them, and uses a well-known semantics to identify the attribute-value pairs in the individual contributing most to their different classification. we evaluate our method on two datasets commonly used in the fairness literature and illustrate its effectiveness in the identification of bias. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/581.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/581.txt new file mode 100644 index 0000000000000000000000000000000000000000..c5711bb4290ab332d2b0aa753ab16ddf48757648 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/581.txt @@ -0,0 +1 @@ + traffic forecasting, which benefits from mobile internet development and position technologies, plays a critical role in intelligent transportation systems. it helps to implement rich and varied transportation applications and bring convenient transportation services to people based on collected traffic data. most existing methods usually leverage graph-based deep learning networks to model the complex road network for traffic forecasting shallowly. despite their effectiveness, these methods are generally limited in fully capturing high-order spatial dependencies caused by road network topology and highorder temporal dependencies caused by traffic dynamics. to tackle the above issues, we focus on the essence of traffic system and propose sthode: spatio-temporal hypergraph neural ordinary differential equation network, which combines road network topology and traffic dynamics to capture highorder spatio-temporal dependencies in traffic data. technically, sthode consists of a spatial module and a temporal module. on the one hand, we construct a spatial hypergraph and leverage an adaptive mixhop hypergraph ode network to capture highorder spatial dependencies. on the other hand, we utilize a temporal hypergraph and employ a hyperedge evolving ode network to capture high-order temporal dependencies. finally, we aggregate the outputs of stacked sthode layers to mutually enhance the prediction performance. extensive experiments conducted on four real-world traffic datasets demonstrate the superior performance of our proposed model compared to various baselines. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/582.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/582.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebcd9355565a3e461a5b80befa7bf5adb5cd87c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/582.txt @@ -0,0 +1 @@ + event detection in time series data is crucial in various domains, including finance, healthcare, cybersecurity, and science. accurately identifying events in time series data is vital for making informed decisions, detecting anomalies, and predicting future trends. despite extensive research exploring diverse methods for event detection in time series, with deep learning approaches being among the most advanced, there is still room for improvement and innovation in this field. in this paper, we present a new deep learning supervised method for detecting events in multivariate time series data. our method combines four distinct novelties compared to existing deep-learning supervised methods. firstly, it is based on regression instead of binary classification. secondly, it does not require labeled datasets where each point is labeled; instead, it only requires reference events defined as time points or intervals of time. thirdly, it is designed to be robust by using a stacked ensemble learning meta-model that combines deep learning models, ranging from classic feedforward neural networks (ffns) to state-of-the-art architectures like transformers. this ensemble approach can mitigate individual model weaknesses and biases, resulting in more robust predictions. finally, to facilitate practical implementation, we have developed a python package to accompany our proposed method. the package, called eventdetector-ts, can be installed through the python package index (pypi). in this paper, we present our method and provide a comprehensive guide on the usage of the package. we showcase its versatility and effectiveness through different real-world use cases from natural language processing (nlp) to financial security domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/583.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/583.txt new file mode 100644 index 0000000000000000000000000000000000000000..2fd685305321a35ed1d9cfac4a50f432c58ca0e6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/583.txt @@ -0,0 +1 @@ + this article focuses on large language models (llms) fine-tuning in the scarce data regime (also known as the "few-shot" learning setting). we propose a method to increase the generalization capabilities of llms based on neural network subspaces. this optimization method, recently introduced in computer vision, aims to improve model generalization by identifying wider local optima through the joint optimization of an entire simplex of models in parameter space. its adaptation to massive, pretrained transformers, however, poses some challenges. first, their considerable number of parameters makes it difficult to train several models jointly, and second, their deterministic parameter initialization schemes make them unfit for the subspace method as originally proposed. we show in this paper that "parameter efficient fine-tuning" (peft) methods, however, are perfectly compatible with this original approach, and propose to learn entire simplex of continuous prefixes. we test our method on a variant of the glue benchmark adapted to the few-shot learning setting, and show that both our contributions jointly lead to a gain in average performances compared to sota methods. the implementation can be found at the following link: https: //github.com/liloulou/prefix_subspace \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/584.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/584.txt new file mode 100644 index 0000000000000000000000000000000000000000..032940d21e3c0cc8c651670711afb96165986cc6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/584.txt @@ -0,0 +1 @@ + we consider reconstruction of an ambient signal in a compressed sensing (cs) setup where the ambient signal has a neural network based generative model. the generative model has a sparse-latent input and we refer to the generated ambient signal as generative sparse-latent signal (gsl). the proposed sparsity inducing reconstruction algorithm is inherently non-convex, and we show that a gradient based search provides a good reconstruction performance. we evaluate our proposed algorithm using simulated data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/585.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/585.txt new file mode 100644 index 0000000000000000000000000000000000000000..42ebc4aca849ca3ec4ac20dc2630ec1b335a06dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/585.txt @@ -0,0 +1 @@ + we discuss a boosting approach for the ridge regression (rr) method, with applications to the extreme learning machine (elm), and we show that the proposed method significantly improves the classification performance and robustness of elms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/586.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/586.txt new file mode 100644 index 0000000000000000000000000000000000000000..dab6fef793559ce5cce8590fb0164a5166ac36ad --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/586.txt @@ -0,0 +1 @@ + federated learning is becoming increasingly relevant and popular as we witness a surge in data collection and storage of personally identifiable information. alongside these developments there have been many proposals from governments around the world to provide more protections for individuals' data and a heightened interest in data privacy measures. as deep learning continues to become more relevant in new and existing domains, it is vital to develop strategies like federated learning that can effectively train data from different sources, such as edge devices, without compromising security and privacy. recently, the flower (flwr) python package was introduced to provide a scalable, flexible, and easy-to-use framework for implementing federated learning. however, to date, flower is only able to run synchronous federated learning which can be costly and timeconsuming to run because the process is bottlenecked by client-side training jobs that are slow or fragile. here, we introduce flwr-serverless, a wrapper around the flower package that extends its functionality to allow for both synchronous and asynchronous federated learning with minimal modification to flower's design paradigm. furthermore, our approach to federated learning allows the process to run without a central server, which increases the domains of application and accessibility of its use. this paper presents the design details and usage of this approach through a series of experiments that were conducted using public datasets. overall, we believe that our approach decreases the time and cost to run federated training and provides an easier way to implement and experiment with federated learning systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/587.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/587.txt new file mode 100644 index 0000000000000000000000000000000000000000..8718a406331dbf93dd1b20c177a345fe83f612c4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/587.txt @@ -0,0 +1 @@ + we introduce mlfmf, a collection of data sets for benchmarking recommendation systems used to support formalization of mathematics with proof assistants. these systems help humans identify which previous entries (theorems, constructions, datatypes, and postulates) are relevant in proving a new theorem or carrying out a new construction. each data set is derived from a library of formalized mathematics written in proof assistants agda or lean. the collection includes the largest lean 4 library mathlib, and some of the largest agda libraries: the standard library, the library of univalent mathematics agda-unimath, and the typetopology library. each data set represents the corresponding library in two ways: as a heterogeneous network, and as a list of s-expressions representing the syntax trees of all the entries in the library. the network contains the (modular) structure of the library and the references between entries, while the s-expressions give complete and easily parsed information about every entry. we report baseline results using standard graph and word embeddings, tree ensembles, and instance-based learning algorithms. the mlfmf data sets provide solid benchmarking support for further investigation of the numerous machine learning approaches to formalized mathematics. the methodology used to extract the networks and the s-expressions readily applies to other libraries, and is applicable to other proof assistants. with more than 250 000 entries in total, this is currently the largest collection of formalized mathematical knowledge in machine learnable format. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/588.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/588.txt new file mode 100644 index 0000000000000000000000000000000000000000..cac45dfa0711f38cca541d6cb303b1c6c5b7d354 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/588.txt @@ -0,0 +1 @@ + causal inference in a sub-population involves identifying the causal effect of an intervention on a specific subgroup, which is distinguished from the whole population through the influence of systematic biases in the sampling process. however, ignoring the subtleties introduced by sub-populations can either lead to erroneous inference or limit the applicability of existing methods. we introduce and advocate for a causal inference problem in sub-populations (henceforth called s-id), in which we merely have access to observational data of the targeted sub-population (as opposed to the entire population). existing inference problems in sub-populations operate on the premise that the given data distributions originate from the entire population, thus, cannot tackle the s-id problem. to address this gap, we provide necessary and sufficient conditions that must hold in the causal graph for a causal effect in a sub-population to be identifiable from the observational distribution of that sub-population. given these conditions, we present a sound and complete algorithm for the s-id problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/589.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/589.txt new file mode 100644 index 0000000000000000000000000000000000000000..82cf1804c059f2b4eaa5eba217d6f5c6a1c2810c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/589.txt @@ -0,0 +1 @@ + since large language models or llms have demonstrated high-quality performance on many complex language tasks, there is a great interest in bringing these llms to mobile devices for faster responses and better privacy protection. however, the size of llms (i.e., billions of parameters) requires highly effective compression to fit into storage-limited devices. among many compression techniques, weightclustering, a form of non-linear quantization, is one of the leading candidates for llm compression, and supported by modern smartphones. yet, its training overhead is prohibitively significant for llm fine-tuning. especially, differentiable kmeans clustering, or dkm, has shown the stateof-the-art trade-off between compression ratio and accuracy regression, but its large memory complexity makes it nearly impossible to apply to train-time llm compression. in this paper, we propose a memory-efficient dkm implementation, edkm powered by novel techniques to reduce the memory footprint of dkm by orders of magnitudes. for a given tensor to be saved on cpu for the backward pass of dkm, we compressed the tensor by applying uniquification and sharding after checking if there is no duplicated tensor previously copied to cpu. our experimental results demonstrate that edkm can fine-tune and compress a pretrained llama 7b model from 12.6 gb to 2.5 gb (3bit/weight) with the alpaca dataset by reducing the train-time memory footprint of a decoder layer by 130×, while delivering good accuracy on broader llm benchmarks (i.e., 77.7% for piqa, 66.1% for winograde, and so on). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/59.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/59.txt new file mode 100644 index 0000000000000000000000000000000000000000..9aacc68ff84e2f57236c8d6ebaf10b62eea91098 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/59.txt @@ -0,0 +1 @@ + an eco-system of agents each having their own policy with some, but limited, generalizability has proven to be a reliable approach to increase generalization across procedurally generated environments. in such an approach, new agents are regularly added to the eco-system when encountering a new environment that is outside of the scope of the eco-system. the speed of adaptation and general effectiveness of the eco-system approach highly depends on the initialization of new agents. in this paper we propose different initialization techniques, inspired from deep neural network initialization and transfer learning, and study their impact. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/590.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/590.txt new file mode 100644 index 0000000000000000000000000000000000000000..b02264010cec10e889268419f638a1e94c0af983 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/590.txt @@ -0,0 +1 @@ + score-based approaches in the structure learning task are thriving because of their scalability. continuous relaxation has been the key reason for this advancement. despite achieving promising outcomes, most of these methods are still struggling to ensure that the graphs generated from the latent space are acyclic by minimizing a defined score. there has also been another trend of permutation-based approaches, which concern the search for the topological ordering of the variables in the directed acyclic graph in order to limit the search space of the graph. in this study, we propose an alternative approach for strictly constraining the acyclicty of the graphs with an integration of the knowledge from the topological orderings. our approach can reduce inference complexity while ensuring the structures of the generated graphs to be acyclic. our empirical experiments with simulated and real-world data show that our approach can outperform related bayesian score-based approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/591.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/591.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ccb2ed7c871e1c3298eb62a819cbcfea412c848 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/591.txt @@ -0,0 +1 @@ + we study online classification in the presence of noisy labels. the noise mechanism is modeled by a general kernel that specifies, for any feature-label pair, a (known) set of distributions over noisy labels. at each time step, an adversary selects an unknown distribution from the distribution set specified by the kernel based on the actual feature-label pair, and generates the noisy label from the selected distribution. the learner then makes a prediction based on the actual features and noisy labels observed thus far, and incurs loss 1 if the prediction differs from the underlying truth (and 0 otherwise). the prediction quality is quantified through minimax risk, which computes the cumulative loss over a finite horizon t . we show that for a wide range of natural noise kernels, adversarially selected features, and finite class of labeling functions, minimax risk can be upper bounded independent of the time horizon and logarithmic in the size of labeling function class. we then extend these results to inifinite classes and stochastically generated features via the concept of stochastic sequential covering. our results extend and encompass findings of ben-david et al. (2009) through substantial generality, and provide intuitive understanding through a novel reduction to online conditional distribution estimation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/592.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/592.txt new file mode 100644 index 0000000000000000000000000000000000000000..537a1f219900955f980f727493fac4ea024e5ddb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/592.txt @@ -0,0 +1 @@ + many choice problems cannot be solved exactly and use several estimation algorithms that assign scores to the different available options. the estimation errors can have various correlations, from low (e.g. between two very different approaches) to high (e.g. when using a given algorithm with different hyperparameters). most aggregation rules would suffer from this diversity of correlations. in this article, we introduce embedded voting (ev), an aggregation rule designed to take correlations into account, and we compare it to other aggregation rules in various experiments based on synthetic data. our results show that when sufficient information about the correlations between errors is available, a maximum likelihood aggregation should be preferred. otherwise, typically with limited training data, ev outperforms the other approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/593.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/593.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1ae9f51d427814dcf2b5f264ad34726f8e8ddb7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/593.txt @@ -0,0 +1 @@ + internet of things (iot) sensors are nowadays heavily utilized in various real-world applications ranging from wearables to smart buildings passing by agrotechnology and health monitoring. with the huge amounts of data generated by these tiny devices, deep learning (dl) models have been extensively used to enhance them with intelligent processing. however, with the urge for smaller and more accurate devices, dl models became too heavy to deploy. it is thus necessary to incorporate the hardware's limited resources in the design process. therefore, inspired by the human brain known for its efficiency and low power consumption, we propose a shallow bidirectional network based on predictive coding theory and dynamic early exiting for halting further computations when a performance threshold is surpassed. we achieve comparable accuracy to vgg-16 in image classification on cifar-10 with fewer parameters and less computational complexity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/594.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/594.txt new file mode 100644 index 0000000000000000000000000000000000000000..ceff265def624ddf209a54b765346e95ff6082f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/594.txt @@ -0,0 +1 @@ + storing and streaming high dimensional data for foundation model training became a critical requirement with the rise of foundation models beyond natural language. in this paper we introduce tensorbanka petabyte scale tensor lakehouse capable of streaming tensors from cloud object store (cos) to gpu memory at wire speed based on complex relational queries. we use hierarchical statistical indices (hsi) for query acceleration. our architecture allows to directly address tensors on block level using http range reads. once in gpu memory, data can be transformed using pytorch transforms. we provide a generic pytorch dataset type with a corresponding dataset factory translating relational queries and requested transformations as an instance. by making use of the hsi, irrelevant blocks can be skipped without reading them as those indices contain statistics on their content at different hierarchical resolution levels. this is an opinionated architecture powered by open standards and making heavy use of open-source technology. although, hardened for production use using geospatial-temporal data, this architecture generalizes to other use cases like computer vision, computational neuroscience, biological sequence analysis and more. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/595.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/595.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2f3db9cf9b08b00f095b5a5de04ad38f8251065 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/595.txt @@ -0,0 +1 @@ + through theoretical and experimental validation, unlike all existing adaptive methods like adam which penalize frequently-changing parameters and are only applicable to sparse gradients, we propose the simplest sgd enhanced method, loss-controlled asymmetric momentum(lcam). by averaging the loss, we divide training process into different loss phases and using different momentum. it not only can accelerates slow-changing parameters for sparse gradients, similar to adaptive optimizers, but also can choose to accelerates frequently-changing parameters for non-sparse gradients, thus being adaptable to all types of datasets. we reinterpret the machine learning training process through the concepts of weight coupling and weight traction, and experimentally validate that weights have directional specificity, which are correlated with the specificity of the dataset. thus interestingly, we observe that in non-sparse gradients, frequentlychanging parameters should actually be accelerated, which is completely opposite to traditional adaptive perspectives. compared to traditional sgd with momentum, this algorithm separates the weights without additional computational costs. it is noteworthy that this method relies on the network's ability to extract complex features. we primarily use wide residual networks for our research, employing the classic datasets cifar10 and cifar100 to test the ability for feature separation and conclude phenomena that are much more important than just accuracy rates. finally, compared to classic sgd tuning methods, while using wrn on these two datasets and with nearly half the training epochs, we achieve equal or better test accuracy. our demonstration code is available at https://github.com/hakumaicc/asymmetric-momentum-lcam \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/596.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/596.txt new file mode 100644 index 0000000000000000000000000000000000000000..eddb010eb48b0a7fe173142e7cb1b6478d40bf01 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/596.txt @@ -0,0 +1 @@ + this paper proposes an efficient optimizer called adaplus which integrates nesterov momentum and precise stepsize adjustment on adamw basis. adaplus combines the advantages of adamw, nadam, and adabelief and, in particular, does not introduce any extra hyper-parameters. we perform extensive experimental evaluations on three machine learning tasks to validate the effectiveness of adaplus. the experiment results validate that adaplus (i) among all the evaluated adaptive methods, performs most comparable with (even slightly better than) sgd with momentum on image classification tasks and (ii) outperforms other state-of-the-art optimizers on language modeling tasks and illustrates pretty high stability when training gans. the experiment code of adaplus will be accessible at: https://github.com/guanleics/adaplus. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/597.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/597.txt new file mode 100644 index 0000000000000000000000000000000000000000..88b8748778ff5b6e5ae82c2569eb8a66e8957d3c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/597.txt @@ -0,0 +1 @@ + in this paper, we consider an infinite horizon average reward markov decision process (mdp). distinguishing itself from existing works within this context, our approach harnesses the power of the general policy gradient-based algorithm, liberating it from the constraints of assuming a linear mdp structure. we propose a policy gradient-based algorithm and show its global convergence property. we then prove that the proposed algorithm has õ(t 3/4 ) regret. remarkably, this paper marks a pioneering effort by presenting the first exploration into regret-bound computation for the general parameterized policy gradient algorithm in the context of average reward scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/598.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/598.txt new file mode 100644 index 0000000000000000000000000000000000000000..7765b5dbe0ef88afaff434d89f0d242460937530 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/598.txt @@ -0,0 +1 @@ + we tackle the network topology inference problem by utilizing laplacian constrained gaussian graphical models, which recast the task as estimating a precision matrix in the form of a graph laplacian. recent research has uncovered the limitations of the widely used ℓ1-norm in learning sparse graphs under this model: empirically, the number of nonzero entries in the solution grows with the regularization parameter of the ℓ1-norm; theoretically, a large regularization parameter leads to a fully connected (densest) graph. to overcome these challenges, we propose a graph laplacian estimation method incorporating the ℓ0-norm constraint. an efficient gradient projection algorithm is developed to solve the resulting optimization problem, characterized by sparsity and laplacian constraints. through numerical experiments with synthetic and financial time-series datasets, we demonstrate the effectiveness of the proposed method in network topology inference. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/599.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/599.txt new file mode 100644 index 0000000000000000000000000000000000000000..37d7ac41ae243dc23da798315c9df3703587f3f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/599.txt @@ -0,0 +1 @@ + the gaussian process state-space model (gpssm) has attracted extensive attention for modeling complex nonlinear dynamical systems. however, the existing gpssm employs separate gaussian processes (gps) for each latent state dimension, leading to escalating computational complexity and parameter proliferation, thus posing challenges for modeling dynamical systems with high-dimensional latent states. to surmount this obstacle, we propose to integrate the efficient transformed gaussian process (etgp) into the gpssm, which involves pushing a shared gp through multiple normalizing flows to efficiently model the transition function in high-dimensional latent state space. additionally, we develop a corresponding variational inference algorithm that surpasses existing methods in terms of parameter count and computational complexity. experimental results on diverse synthetic and real-world datasets corroborate the efficiency of the proposed method, while also demonstrating its ability to achieve similar inference performance compared to existing methods. code is available at https://github.com/zhidilin/gpssmproj. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/6.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/6.txt new file mode 100644 index 0000000000000000000000000000000000000000..96851d009911459d975127fd44f54881a1a69568 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/6.txt @@ -0,0 +1 @@ + the performance of individual evolutionary optimization algorithms are mostly measured in terms of statistics such as mean, median and standard deviation etc., computed over the best solutions obtained with few trails of the algorithm. to compare the performance of two algorithms, the values of these statistics are compared instead of comparing the solutions directly. this kind of comparison lacks direct comparison of solutions obtained with different algorithms. for instance, the comparison of best solutions (or worst solution) of two algorithms simply not possible. moreover, ranking of algorithms is mostly done in terms of solution quality only, despite the fact that the convergence of algorithm is also an important factor. in this paper, a direct comparison approach is proposed to analyze the performance of evolutionary optimization algorithms. a direct comparison matrix called prasatul matrix is prepared, which accounts direct comparison outcome of best solutions obtained with two algorithms for a specific number of trials. five different performance measures are designed based on the prasatul matrix to evaluate the performance of algorithms in terms of optimality and comparability of solutions. these scores are utilized to develop a score-driven approach for comparing performance of multiple algorithms as well as for ranking both in the grounds of solution quality and convergence analysis. proposed approach is analyzed with six evolutionary optimization algorithms on 25 benchmark functions. a non-parametric statistical analysis namely wilcoxon paired sum-rank test is also performed to verify the outcomes of proposed direct comparison approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/60.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/60.txt new file mode 100644 index 0000000000000000000000000000000000000000..620516fafb464b05d2a0b45a492bd657b8f57059 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/60.txt @@ -0,0 +1 @@ + with the rise in high resolution remote sensing technologies there has been an explosion in the amount of data available for forest monitoring, and an accompanying growth in artificial intelligence applications to automatically derive forest properties of interest from these datasets. many studies use their own data at small spatio-temporal scales, and demonstrate an application of an existing or adapted data science method for a particular task. this approach often involves intensive and timeconsuming data collection and processing, but generates results restricted to specific ecosystems and sensor types. there is a lack of widespread acknowledgement of how the types and structures of data used affects performance and accuracy of analysis algorithms. to accelerate progress in the field more efficiently, benchmarking datasets upon which methods can be tested and compared are sorely needed.here, we discuss how lack of standardisation impacts confidence in estimation of key forest properties, and how considerations of data collection need to be accounted for in assessing method performance. we present pragmatic requirements and considerations for the creation of rigorous, useful benchmarking datasets for forest monitoring applications, and discuss how tools from modern data science can improve use of existing data. we list a set of example largescale datasets that could contribute to benchmarking, and present a vision for how community-driven, representative benchmarking initiatives could benefit the field. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/600.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/600.txt new file mode 100644 index 0000000000000000000000000000000000000000..12cad9be063cf0a4de2fb15a0f7f705d8919c3dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/600.txt @@ -0,0 +1 @@ + one of the challenges in deploying a machine learning model is that the model's performance degrades as the operating environment changes. to maintain the performance, streaming active learning is used, in which the model is retrained by adding a newly annotated sample to the training dataset if the prediction of the sample is not certain enough. although many streaming active learning methods have been proposed for classification problems, few efforts have been made for regression problems, which are often handled in the industrial field. in this paper, we propose to use the regression-viaclassification framework for streaming active learning for regression. regression-via-classification transforms regression problems into classification problems so that streaming active learning methods proposed for classification problems can be applied directly to regression problems. experimental validation on four real data sets shows that the proposed method can perform regression with higher accuracy at the same annotation cost. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/601.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/601.txt new file mode 100644 index 0000000000000000000000000000000000000000..0924054d8dbcff4d4314b6b7170e5f3e876e3079 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/601.txt @@ -0,0 +1 @@ + clipping" (a.k.a. importance weight truncation) is a widely used variance-reduction technique for counterfactual off-policy estimators.like other variance-reduction techniques, clipping reduces variance at the cost of increased bias. however, unlike other techniques, the bias introduced by clipping is always a downward bias (assuming non-negative rewards), yielding a lower bound on the true expected reward. in this work we propose a simple extension, called double clipping, which aims to compensate this downward bias and thus reduce the overall bias, while maintaining the variance reduction properties of the original estimator. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/602.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/602.txt new file mode 100644 index 0000000000000000000000000000000000000000..04b8689c21c9a34227064e7cb5bbfdefc67238e1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/602.txt @@ -0,0 +1 @@ + latent factor analysis via dynamical systems (lfads) is an rnn-based variational sequential autoencoder that achieves state-of-the-art performance in denoising high-dimensional neural activity for downstream applications in science and engineering. recently introduced variants and extensions continue to demonstrate the applicability of the architecture to a wide variety of problems in neuroscience. since the development of the original implementation of lfads, new technologies have emerged that use dynamic computation graphs, minimize boilerplate code, compose model configuration files, and simplify largescale training. building on these modern python libraries, we introduce lfads-torch-a new open-source implementation of lfads that unifies existing variants and is designed to be easier to understand, configure, and extend. documentation, source code, and issue tracking are available at: https://github.com/arsedler9/lfads-torch. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/603.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/603.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc0b283985dc261fff0e1e725ceb53ce23c52d6d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/603.txt @@ -0,0 +1 @@ + a graph is a very common and powerful data structure used for modeling communication and social networks. models that generate graphs with arbitrary features are important basic technologies in repeated simulations of networks and prediction of topology changes. although existing generative models for graphs are useful for providing graphs similar to real-world graphs, graph generation models with tunable features have been less explored in the field. previously, we have proposed graphtune, a generative model for graphs that continuously tune specific graph features of generated graphs while maintaining most of the features of a given graph dataset. however, the tuning accuracy of graph features in graphtune has not been sufficient for practical applications. in this paper, we propose a method to improve the accuracy of graphtune by adding a new mechanism to feed back errors of graph features of generated graphs and by training them alternately and independently. experiments on a real-world graph dataset showed that the features in the generated graphs are accurately tuned compared with conventional models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/604.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/604.txt new file mode 100644 index 0000000000000000000000000000000000000000..59a041c2cfeae1d0c762691a571a86d993697747 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/604.txt @@ -0,0 +1 @@ + recently, there has been a growing focus on determining the minimum width requirements for achieving the universal approximation property in deep, narrow multi-layer perceptrons (mlps). among these challenges, one particularly challenging task is approximating a continuous function under the uniform norm, as indicated by the significant disparity between its lower and upper bounds. to address this problem, we propose a framework that simplifies finding the minimum width for deep, narrow mlps into determining a purely geometrical function denoted as w(d x , d y ). this function relies solely on the input and output dimensions, represented as d x and d y , respectively. two key steps support this framework. first, we demonstrate that deep, narrow mlps, when provided with a small additional width, can approximate a c 2 -diffeomorphism. subsequently, using this result, we prove that w(d x , d y ) equates to the optimal minimum width required for deep, narrow mlps to achieve universality. by employing the aforementioned framework and the whitney embedding theorem, we provide an upper bound for the minimum width, given by max(2d x + 1, d y ) + α(σ), where 0 ≤ α(σ) ≤ 2 represents a constant depending on the activation function. furthermore, we provide a lower bound of 4 for the minimum width in cases where the input and output dimensions are both equal to two. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/605.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/605.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f3e452cc2d70e1e8fd0c642fd01dc2bcaa943f4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/605.txt @@ -0,0 +1 @@ + stochastic multi-armed bandits are a sequential-decision-making framework, where, at each interaction step, the learner selects an arm and observes a stochastic reward. within the context of best-arm identification (bai) problems, the goal of the agent lies in finding the optimal arm, i.e., the one with the highest expected reward, as accurately and efficiently as possible. nevertheless, the sequential interaction protocol of classical bai problems, where the agent has complete control over the arm being pulled at each round, does not effectively model several decision-making problems of interest (e.g., off-policy learning, human feedback). for this reason, in this work, we propose a novel strict generalization of the classical bai problem that we refer to as best-arm identification under mediators' feedback (bai-mf). more specifically, we consider the scenario in which the learner has access to a set of mediators, each of which selects the arms on the agent's behalf according to a stochastic and possibly unknown policy. the mediator, then, communicates back to the agent the pulled arm together with the observed reward. in this setting, the agent's goal lies in sequentially choosing which mediator to query to identify with high probability the optimal arm while minimizing the identification time, i.e., the sample complexity. to this end, we first derive and analyze a statistical lower bound on the sample complexity specific to our general mediator feedback scenario. then, we propose a sequential decisionmaking strategy for discovering the best arm; as our theory verifies, this algorithm matches the lower bound both almost surely and in expectation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/606.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/606.txt new file mode 100644 index 0000000000000000000000000000000000000000..63da7c5ebfe49dc6f055d723aaa7ec4f6815ca6a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/606.txt @@ -0,0 +1 @@ + machine learning and deep learning models are potential vectors for various attack scenarios. for example, previous research has shown that malware can be hidden in deep learning models. hiding information in a learning model can be viewed as a form of steganography. in this research, we consider the general question of the steganographic capacity of learning models. specifically, for a wide range of models, we determine the number of low-order bits of the trained parameters that can be overwritten, without adversely affecting model performance. for each model considered, we graph the accuracy as a function of the number of low-order bits that have been overwritten, and for selected models, we also analyze the steganographic capacity of individual layers. the models that we test include the classic machine learning techniques of linear regression (lr) and support vector machine (svm); the popular general deep learning models of multilayer perceptron (mlp) and convolutional neural network (cnn); the highly-successful recurrent neural network (rnn) architecture of long short-term memory (lstm); the pre-trained transfer learning-based models vgg16, densenet121, inceptionv3, and xception; and, finally, an auxiliary classifier generative adversarial network (acgan). in all cases, we find that a majority of the bits of each trained parameter can be overwritten before the accuracy degrades. of the models tested, the steganographic capacity ranges from 7.04 kb for our lr experiments, to 44.74 mb for inceptionv3. we discuss the implications of our results and consider possible avenues for further research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/607.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/607.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1282f0dd41fc9ff740bc30853914694524cabed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/607.txt @@ -0,0 +1 @@ + in drug discovery, mapping interactions between genes within cellular systems is a crucial early step. this helps formulate hypotheses regarding molecular mechanisms that could potentially be targeted by future medicines. the causalbench challenge was an initiative to invite the machine learning community to advance the state of the art in constructing gene-gene interaction networks. these networks, derived from large-scale, real-world datasets of single cells under various perturbations, are crucial for understanding the causal mechanisms underlying disease biology. using the framework provided by the causalbench benchmark, participants were tasked with enhancing the capacity of the state of the art methods to leverage large-scale genetic perturbation data. this report provides an analysis and summary of the methods submitted during the challenge to give a partial image of the state of the art at the time of the challenge. the winning solutions significantly improved performance compared to previous baselines, establishing a new state of the art for this critical task in biology and medicine. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/608.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/608.txt new file mode 100644 index 0000000000000000000000000000000000000000..31bfcd6bf294f8bc86a6d7050a94ede0c0fd3dd4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/608.txt @@ -0,0 +1 @@ + this paper addresses the problem of localization, which is inherently non-convex and non-smooth in a federated setting where the data is distributed across a multitude of devices. due to the decentralized nature of federated environments, distributed learning becomes essential for scalability and adaptability. moreover, these environments are often plagued by outlier data, which presents substantial challenges to conventional methods, particularly in maintaining estimation accuracy and ensuring algorithm convergence. to mitigate these challenges, we propose a method that adopts an l1-norm robust formulation within a distributed sub-gradient framework, explicitly designed to handle these obstacles. our approach addresses the problem in its original form, without resorting to iterative simplifications or approximations, resulting in enhanced computational efficiency and improved estimation accuracy. we demonstrate that our method converges to a stationary point, highlighting its effectiveness and reliability. through numerical simulations, we confirm the superior performance of our approach, notably in outlier-rich environments, which surpasses existing state-of-theart localization methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/609.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/609.txt new file mode 100644 index 0000000000000000000000000000000000000000..6c1de3ad422f62e9146a111af26d082f4b77f801 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/609.txt @@ -0,0 +1 @@ + in the current data driven era, synthetic data, artificially generated data that resembles the characteristics of real world data without containing actual personal information, is gaining prominence. this is due to its potential to safeguard privacy, increase the availability of data for research, and reduce bias in machine learning models. this paper investigates the policies governing the creation, utilization, and dissemination of synthetic data. synthetic data can be a powerful instrument for protecting the privacy of individuals, but it also presents challenges, such as ensuring its quality and authenticity. a well crafted synthetic data policy must strike a balance between privacy concerns and the utility of data, ensuring that it can be utilized effectively without compromising ethical or legal standards. organizations and institutions must develop standardized guidelines and best practices in order to capitalize on the benefits of synthetic data while addressing its inherent challenges. recommended technical actions:• use diverse data sources when creating synthetic datasets • use different types of generative ai models to create synthetic datasets • disclose or watermark all synthetic data and its provenance • calculate and disclose quality metrics for synthetic data • develop cybersecurity protocols to protect synthetic data and its source • prioritise non-synthetic data if possible recommended policy actions: • link synthetic data to global ai governance efforts • recognise synthetic data as a critical and unique issue in global data governance • establish global quality standards and security measures • promote global research networks on the safe and ethical use of synthetic data • clarify ethical guidelines, including transparency \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/61.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/61.txt new file mode 100644 index 0000000000000000000000000000000000000000..f62a869b5606ff24ed2a14e9a798ec29bc9c8888 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/61.txt @@ -0,0 +1 @@ + reinforcement learning is a machine learning approach based on behavioral psychology. it is focused on learning agents that can acquire knowledge and learn to carry out new tasks by interacting with the environment. however, a problem occurs when reinforcement learning is used in critical contexts where the users of the system need to have more information and reliability for the actions executed by an agent. in this regard, explainable reinforcement learning seeks to provide to an agent in training with methods in order to explain its behavior in such a way that users with no experience in machine learning could understand the agent's behavior. one of these is the memorybased explainable reinforcement learning method that is used to compute probabilities of success for each state-action pair using an episodic memory. in this work, we propose to make use of the memory-based explainable reinforcement learning method in a hierarchical environment composed of sub-tasks that need to be first addressed to solve a more complex task. the end goal is to verify if it is possible to provide to the agent the ability to explain its actions in the global task as well as in the subtasks. the results obtained showed that it is possible to use the memory-based method in hierarchical environments with highlevel tasks and compute the probabilities of success to be used as a basis for explaining the agent's behavior. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/610.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/610.txt new file mode 100644 index 0000000000000000000000000000000000000000..4b323da2d7e8bdc0137eac27171188e2164ff734 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/610.txt @@ -0,0 +1 @@ + self-explainable deep neural networks are a recent class of models that can output ante-hoc local explanations that are faithful to the model's reasoning, and as such represent a step forward toward filling the gap between expressiveness and interpretability. self-explainable graph neural networks (gnns) aim at achieving the same in the context of graph data. this begs the question: do these models fulfill their implicit guarantees in terms of faithfulness? in this extended abstract, we analyze the faithfulness of several self-explainable gnns using different measures of faithfulness, identify several limitations -both in the models themselves and in the evaluation metrics -and outline possible ways forward. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/611.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/611.txt new file mode 100644 index 0000000000000000000000000000000000000000..07b1fe96b348cf899c7023b1328a5fcd468d44a5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/611.txt @@ -0,0 +1 @@ + slating a product for release often involves pitching journalists to run stories on your press release. good media coverage often ensures greater product reach and drives audience engagement for those products. hence, ensuring that those releases are pitched to the right journalists with relevant interests is crucial, since they receive several pitches daily. keeping up with journalist beats and curating a media contacts list is often a huge and time-consuming task. this study proposes a model to automate and expedite the process by recommending suitable journalists to run media coverage on the press releases provided by the user. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/612.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/612.txt new file mode 100644 index 0000000000000000000000000000000000000000..b5f73c3a8d46ad2ba558de35d0d21cc6800d3c90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/612.txt @@ -0,0 +1 @@ + we consider the formulation of "machine unlearning" of sekhari, acharya, kamath, and suresh (neurips 2021), which formalizes the so-called "right to be forgotten" by requiring that a trained model, upon request, should be able to 'unlearn' a number of points from the training data, as if they had never been included in the first place. sekhari et al. established some positive and negative results about the number of data points that can be successfully unlearnt by a trained model without impacting the model's accuracy (the "deletion capacity"), showing that machine unlearning could be achieved by using differentially private (dp) algorithms. however, their results left open a gap between upper and lower bounds on the deletion capacity of these algorithms: our work fully closes this gap, obtaining tight bounds on the deletion capacity achievable by dp-based machine unlearning algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/613.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/613.txt new file mode 100644 index 0000000000000000000000000000000000000000..2bb5d72c922e2a54145566105f7d69790e440abc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/613.txt @@ -0,0 +1 @@ + implicit neural networks have demonstrated remarkable success in various tasks. however, there is a lack of theoretical analysis of the connections and differences between implicit and explicit networks. in this paper, we study high-dimensional implicit neural networks and provide the high dimensional equivalents for the corresponding conjugate kernels and neural tangent kernels. built upon this, we establish the equivalence between implicit and explicit networks in high dimensions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/614.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/614.txt new file mode 100644 index 0000000000000000000000000000000000000000..25dd7ce9109ffa2de0b1fe0b51fe5495a4afa7ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/614.txt @@ -0,0 +1 @@ + deep neural networks have shown remarkable performance when trained on independent and identically distributed data from a fixed set of classes. however, in realworld scenarios, it can be desirable to train models on a continuous stream of data where multiple classification tasks are presented sequentially. this scenario, known as continual learning (cl) poses challenges to standard learning algorithms which struggle to maintain knowledge of old tasks while learning new ones. this stability-plasticity dilemma remains central to cl and multiple metrics have been proposed to adequately measure stability and plasticity separately. however, none considers the increasing difficulty of the classification task, which inherently results in performance loss for any model. in that sense, we analyze some limitations of current metrics and identify the presence of setup-induced forgetting. therefore, we propose new metrics that account for the task's increasing difficulty. through experiments on benchmark datasets, we demonstrate that our proposed metrics can provide new insights into the stability-plasticity trade-off achieved by models in the continual learning environment. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/615.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/615.txt new file mode 100644 index 0000000000000000000000000000000000000000..34e05640583ac4264a51afc3becd31b6be5015e2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/615.txt @@ -0,0 +1 @@ + despite recent advances in the field of explainability, much remains unknown about the algorithms that neural networks learn to represent. recent work has attempted to understand trained models by decomposing them into functional circuits (csordás et al., 2020;lepori et al., 2023). to advance this research, we developed neurosurgeon, a python library that can be used to discover and manipulate subnetworks within models in the huggingface transformers library (wolf et al., 2019). neurosurgeon is freely available at https: //github.com/mlepori1/neurosurgeon. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/616.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/616.txt new file mode 100644 index 0000000000000000000000000000000000000000..48abc4598834a392702b349ffba0fb2f86ab9c19 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/616.txt @@ -0,0 +1 @@ + the recent long-range graph benchmark (lrgb, dwivedi et al. 2022) introduced a set of graph learning tasks strongly dependent on long-range interaction between vertices. empirical evidence suggests that on these tasks graph transformers significantly outperform message passing gnns (mpgnns). in this paper, we carefully reevaluate multiple mpgnn baselines as well as the graph transformer gps (rampášek et al. 2022) on lrgb. through a rigorous empirical analysis, we demonstrate that the reported performance gap is overestimated due to suboptimal hyperparameter choices. it is noteworthy that across multiple datasets the performance gap completely vanishes after basic hyperparameter optimization. in addition, we discuss the impact of lacking feature normalization for lrgb's vision datasets and highlight a spurious implementation of lrgb's link prediction metric. the principal aim of our paper is to establish a higher standard of empirical rigor within the graph machine learning community. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/617.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/617.txt new file mode 100644 index 0000000000000000000000000000000000000000..35d7a1d2bd15c2b645c888c298e1ed33a904701b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/617.txt @@ -0,0 +1 @@ + reinforcement learning with human feedback (rlhf) has revolutionized language modeling by aligning models with human preferences. however, the rl stage, proximal policy optimization (ppo), requires over 3x the memory of supervised fine-tuning (sft), making it infeasible to use for most practitioners. to address this issue, we present a comprehensive analysis the memory usage, performance, and training time of memory-savings techniques for ppo. we introduce hydra-rlhf by first integrating the sft and reward models and then dynamically turning lora "off" during training. our experiments show: 1. using lora during ppo reduces its memory usage to be smaller than sft while improving alignment across four public benchmarks, and 2. hydra-ppo reduces the latency per sample of lora-ppo by up to 65% while maintaining its performance. our results demonstrate that hydra-ppo is a simple and promising solution for enabling more widespread usage of rlhf. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/618.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/618.txt new file mode 100644 index 0000000000000000000000000000000000000000..d9291ad53205fc0ccc188c9b0589f367d1acb8f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/618.txt @@ -0,0 +1 @@ + conventional congestion control (cc) algorithms, such as tcp cubic, struggle in tactical environments as they misinterpret packet loss and fluctuating network performance as congestion symptoms. recent efforts, including our own marlin, have explored the use of reinforcement learning (rl) for cc, but they often fall short of generalization, particularly in competitive, unstable, and unforeseen scenarios. to address these challenges, this paper proposes an rl framework that leverages an accurate and parallelizable emulation environment to reenact the conditions of a tactical network. we also introduce refined rl formulation and performance evaluation methods tailored for agents operating in such intricate scenarios. we evaluate our rl learning framework by training a marlin agent in conditions replicating a bottleneck link transition between a satellite communication (satcom) and an uhf wide band (uhf) radio link. finally, we compared its performance in file transfer tasks against transmission control protocol (tcp) cubic and the default strategy implemented in the mockets tactical communication middleware. the results demonstrate that the marlin rl agent outperforms both tcp and mockets under different perspectives and highlight the effectiveness of specialized rl solutions in optimizing cc for tactical network environments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/619.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/619.txt new file mode 100644 index 0000000000000000000000000000000000000000..a27c27058d0e15582546a05bfd44fc8f2e0ab235 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/619.txt @@ -0,0 +1 @@ + active automata learning algorithms cannot easily handle conflict in the observation data (different outputs observed for the same inputs). this inherent inability to recover after a conflict impairs their effective applicability in scenarios where noise is present or the system under learning is mutating.we propose the conflict-aware active automata learning (c 3al) framework to enable handling conflicting information during the learning process. the core idea is to consider the so-called observation tree as a first-class citizen in the learning process. though this idea is explored in recent work, we take it to its full effect by enabling its use with any existing learner and minimizing the number of tests performed on the system under learning, specially in the face of conflicts. we evaluate c 3al in a large set of benchmarks, covering over 30 different realistic targets, and over 18,000 different scenarios. the results of the evaluation show that c 3al is a suitable alternative framework for closed-box learning that can better handle noise and mutations.conflict-aware active automata learning current mat learners cannot handle the conflicts that arise during learning. thus, when used in practice, mat learner implementations use artifacts to circumvent conflicting observations. for example, in the case of noise, each interaction has a chance of diverging from its usual behavior. to handle this, mat learners repeat each query n times and majority-vote the result. they aim to guess an n sufficiently large to prevent any noisy observation from reaching the learner, but small enough to let the computation finish before timeout. as a consequence, noise threatens both efficiency and correctness of learning. we provide a framework alleviating this issue without tailoring it to specific mat learners.irrespective of the nature of the conflicts detected, dealing with them requires the ability to backtrack certain decisions that were made based on what is now considered incorrect information. this pinpoints the issue with current mat learners: there is no notion of information storage other than the internal data structure that the learners use to build the model, which is not easily updatable in the face of conflict. this structure in fact needs to be fully rebuilt if a conflict is found, generating many superfluous (and expensive!) queries to the sul. separating the learning process from the information gathered through the queries allows us to retain all the previous non-conflicting information. this alleviates the main cost of conflict handling: the unnecessary repetition of tests on the system. the learner then only needs to rebuild its data structure based on the information already available.contribution based on the ideas above, this paper proposes the conflict aware active automata learning (c 3al, pronounced seal) framework. any existing mat learner can be used in c 3al. when a conflict arises, we provide a method for updating the learner's internal state -without making assumptions on its data-structure -so that it remains conflict-free while removing only inconsistent information.in a nutshell, this paper aims to provide classic mat learners with a way to recover from conflicts caused by either noise or potential mutations of the system.at the heart of c 3al is the use of an observation tree, a data structure (external to the learner) used to store information gathered from the sul. it can be efficiently updated and used by the learner to construct its own internal data structure. when a conflict appears, we update the observation tree to reflect our knowledge, while the learner's data structure is pruned to a conflict-free point and then expanded from the observation tree. crucially, the learner uses the observations already stored in the tree without requiring tests on the sul for already observed behaviors. c 3al's main features are:• the sul is a first-class citizen, instead of being abstracted. c 3al notably does not rely on equivalence queries, replacing them with either a check of the stored knowledge (when sufficient) or an equivalence test, using an m-complete testing algorithm (e.g. the or hybrid-ads ).• the information obtained through tests on the sul is stored in an observation tree managed by a new reviser agent that is responsible for handling the conflicts and answering the learner's queries like a teacher. providing a teacher interface is an important aspect as it enables the use of any mat-based algorithm seamlessly, only requiring the ability to restart a classic mat learner.• the reviser alone interacts with the sul by means of tests meant to expand its observation tree.crucially, c 3al is less abstract than mat, representing directly the objects and challenges of practical active learning, while still allowing the design of learners to enjoy the simplifying abstraction of mat. after some preliminaries in section 2 we formalize and prove the above claims in section 3. we evaluate c 3al in section 4 using a broad range of experiments . we compare several state-of-theart algorithms (namely l ⋆ , kv , ttt and l # ) for targets of different sizes and different levels of noise, while varying the controllable parameters for both mat and c 3al. the experimental \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/62.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/62.txt new file mode 100644 index 0000000000000000000000000000000000000000..27bd30d58827a9f24ae5dbf0bfc6959159ed8e9a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/62.txt @@ -0,0 +1 @@ + this paper contributes towards better understanding the energy consumption trade-offs of hpc scale artificial intelligence (ai), and more specifically deep learning (dl) algorithms. for this task we developed benchmarktracker, a benchmark tool to evaluate the speed and energy consumption of dl algorithms in hpc environments. we exploited hardware counters and python libraries to collect energy information through software, which enabled us to instrument a known ai benchmark tool, and to evaluate the energy consumption of numerous dl algorithms and models. through an experimental campaign, we show a case example of the potential of benchmark-tracker to measure the computing speed and the energy consumption for training and inference dl algorithms, and also the potential of benchmark-tracker to help better understanding the energy behavior of dl algorithms in hpc platforms. this work is a step forward to better understand the energy consumption of deep learning in hpc, and it also contributes with a new tool to help hpc dl developers to better balance the hpc infrastructure in terms of speed and energy consumption. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/620.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/620.txt new file mode 100644 index 0000000000000000000000000000000000000000..b41b0395d76b96228ddb60f540ca9f51df4943f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/620.txt @@ -0,0 +1 @@ + addressing missing data in complex datasets including electronic health records (ehr) is critical for ensuring accurate analysis and decision-making in healthcare. this paper proposes dynamically adaptable structural equation modeling (sem) using a self-attention method (sesa), an approach to data imputation in ehr. sesa innovates beyond traditional sem-based methods by incorporating self-attention mechanisms, thereby enhancing model adaptability and accuracy across diverse ehr datasets. such enhancement allows sesa to dynamically adjust and optimize imputation and overcome the limitations of static sem frameworks. our experimental analyses demonstrate the achievement of robust predictive sesa performance for effectively handling missing data in ehr. moreover, the sesa architecture not only rectifies potential mis-specifications in sem but also synergizes with causal discovery algorithms to refine its imputation logic based on underlying data structures. such features highlight its capabilities and broadening applicational potential in ehr data analysis and beyond, marking a reasonable leap forward in the field of data imputation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/621.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/621.txt new file mode 100644 index 0000000000000000000000000000000000000000..cac6518d7bbc0607ac81199e454e8dfbc9c2cab5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/621.txt @@ -0,0 +1 @@ + low-count time series describe sparse or intermittent events, which are prevalent in large-scale online platforms that capture and monitor diverse data types. several distinct challenges surface when modelling low-count time series, particularly low signal-to-noise ratios (when anomaly signatures are provably undetectable), and non-uniform performance (when average metrics are not representative of local behaviour). the time series anomaly detection community currently lacks explicit tooling and processes to model and reliably detect anomalies in these settings. we address this gap by introducing a novel generative procedure for creating benchmark datasets comprising of low-count time series with anomalous segments. via a mixture of theoretical and empirical analysis, our work explains how widely-used algorithms struggle with the distribution overlap between normal and anomalous segments. in order to mitigate this shortcoming, we then leverage our findings to demonstrate how anomaly score smoothing consistently improves performance. the practical utility of our analysis and recommendation is validated on a real-world dataset containing sales data for retail stores. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/622.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/622.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae18ac03de1630629759c5e6c5a21ced0ec1e374 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/622.txt @@ -0,0 +1 @@ + imitation learning (il) is an important paradigm within the broader reinforcement learning (rl) methodology. unlike most of rl, it does not assume availability of rewardfeedback. reward inference and shaping are known to be difficult and error-prone methods particularly when the demonstration data comes from human experts. classical methods such as behavioral cloning and inverse reinforcement learning are highly sensitive to estimation errors, a problem that is particularly acute in continuous state space problems. meanwhile, state-of-the-art il algorithms convert behavioral policy learning problems into distribution-matching problems which often require additional online interaction data to be effective. in this paper, we consider the problem of imitation learning in continuous state space environments based solely on observed behavior, without access to transition dynamics information, reward structure, or, most importantly, any additional interactions with the environment. our approach is based on the markov balance equation and introduces a novel conditional kernel density estimation-based imitation learning framework. it involves estimating the environment's transition dynamics using conditional kernel density estimators and seeks to satisfy the probabilistic balance equations for the environment. we establish that our estimators satisfy basic asymptotic consistency requirements. through a series of numerical experiments on continuous state benchmark environments, we show consistently superior empirical performance over many state-of-the-art il algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/623.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/623.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f0824f5d866b9029c2b1fc57b78994f5c0f895b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/623.txt @@ -0,0 +1 @@ + the intensive care unit (icu) is one of the most important parts of a hospital, which admits critically ill patients and provides continuous monitoring and treatment. various patient outcome prediction methods have been attempted to assist healthcare professionals in clinical decision-making. existing methods focus on measuring the similarity between patients using deep neural networks to capture the hidden feature structures. however, the higher-order relationships are ignored, such as patient characteristics (e.g., diagnosis codes) and their causal effects on downstream clinical predictions. in this paper, we propose a novel hypergraph convolutional network that allows the representation of non-pairwise relationships among diagnosis codes in a hypergraph to capture the hidden feature structures so that fine-grained patient similarity can be calculated for personalized mortality risk prediction. evaluation using a publicly available eicu collaborative research database indicates that our method achieves superior performance over the stateof-the-art models on mortality risk prediction. moreover, the results of several case studies demonstrated the effectiveness and robustness of the model decisions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/624.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/624.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b4dd92064549361c5872aa35f3138ccf65b2d34 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/624.txt @@ -0,0 +1 @@ + we study optimal data pooling for shared learning in two common maintenance operations: condition-based maintenance and spare parts management. we consider a set of systems subject to poisson input -the degradation or demand process -that are coupled through an a-priori unknown rate. decision problems involving these systems are high-dimensional markov decision processes (mdps) and hence notoriously difficult to solve. we present a decomposition result that reduces such an mdp to two-dimensional mdps, enabling structural analyses and computations.leveraging this decomposition, we (i) demonstrate that pooling data can lead to significant cost reductions compared to not pooling, and (ii) show that the optimal policy for the condition-based maintenance problem is a control limit policy, while for the spare parts management problem, it is an order-up-to level policy, both dependent on the pooled data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/625.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/625.txt new file mode 100644 index 0000000000000000000000000000000000000000..26fe0f90bcb8d030c4e72af62d1a231b814ed669 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/625.txt @@ -0,0 +1 @@ + while traditional deep learning models often lack interpretability, concept bottleneck models (cbms) provide inherent explanations via their concept representations. specifically, they allow users to perform interventional interactions on these concepts by updating the concept values and thus correcting the predictive output of the model. traditionally, however, these interventions are applied to the model only once and discarded afterward. to rectify this, we present concept bottleneck memory models (cb2m), an extension to cbms. specifically, a cb2m learns to generalize interventions to appropriate novel situations via a two-fold memory with which it can learn to detect mistakes and to reapply previous interventions. in this way, a cb2m learns to automatically improve model performance from a few initially obtained interventions. if no prior human interventions are available, a cb2m can detect potential mistakes of the cbm bottleneck and request targeted interventions. in our experimental evaluations on challenging scenarios like handling distribution shifts and confounded training data, we illustrate that cb2m are able to successfully generalize interventions to unseen data and can indeed identify wrongly inferred concepts. overall, our results show that cb2m is a great tool for users to provide interactive feedback on cbms, e.g., by guiding a user's interaction and requiring fewer interventions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/626.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/626.txt new file mode 100644 index 0000000000000000000000000000000000000000..a331491ba9597f81a670010a79b972a9683b1d23 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/626.txt @@ -0,0 +1 @@ + compact neural network offers many benefits for real-world applications. however, it is usually challenging to train the compact neural networks with small parameter sizes and low computational costs to achieve the same or better model performance compared to more complex and powerful architecture. this is particularly true for multitask learning, with different tasks competing for resources. we present a simple, efficient and effective multitask learning overparameterisation neural network design by overparameterising the model architecture in training and sharing the overparameterised model parameters more effectively across tasks, for better optimisation and generalisation. experiments on two challenging multitask datasets (nyuv2 and coco) demonstrate the effectiveness of the proposed method across various convolutional networks and parameter sizes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/627.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/627.txt new file mode 100644 index 0000000000000000000000000000000000000000..54dbde5fdf3a41fee989d2b4d34ccc15a6c792c6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/627.txt @@ -0,0 +1 @@ + in this paper we describe a deep learning-based probabilistic algorithm for integer factorisation. we use lawrence's extension of fermat's factorisation algorithm to reduce the integer factorisation problem to a binary classification problem. to address the classification problem, based on the ease of generating large pseudo-random primes, a corpus of training data, as large as needed, is synthetically generated. we will introduce the algorithm, summarise some experiments, analyse where these experiments fall short, and finally put out a call to others to reproduce, verify and see if this approach can be improved to a point where it becomes a practical, scalable factorisation algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/628.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/628.txt new file mode 100644 index 0000000000000000000000000000000000000000..1792b2775fc573451893e112131d179876c3e5e1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/628.txt @@ -0,0 +1 @@ + machine learning techniques, in particular the so-called normalizing flows, are becoming increasingly popular in the context of monte carlo simulations as they can effectively approximate target probability distributions. in the case of lattice field theories (lft) the target distribution is given by the exponential of the action. the common loss function's gradient estimator based on the "reparametrization trick" requires the calculation of the derivative of the action with respect to the fields. this can present a significant computational cost for complicated, non-local actions like e.g. fermionic action in qcd. in this contribution, we propose an estimator for normalizing flows based on the reinforce algorithm that avoids this issue. we apply it to two dimensional schwinger model with wilson fermions at criticality and show that it is up to ten times faster in terms of the wall-clock time as well as requiring up to 30% less memory than the reparameterization trick estimator. it is also more numerically stable allowing for single precision calculations and the use of half-float tensor cores. we present an in-depth analysis of the origins of those improvements. we believe that these benefits will appear also outside the realm of the lft, in each case where the target probability distribution is computationally intensive. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/629.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/629.txt new file mode 100644 index 0000000000000000000000000000000000000000..9e1bd5c00f942161768fd91d43d675a8f61d50a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/629.txt @@ -0,0 +1 @@ + generative adversarial networks (gans) have recently become a popular data augmentation technique used by machine learning practitioners. however, they have been shown to suffer from the so-called mode collapse failure mode, which makes them vulnerable to exacerbating biases on already skewed datasets, resulting in the generated data distribution being less diverse than the training distribution. to this end, we address the problem of quantifying the extent to which mode collapse occurs. this study is a systematic effort focused on the evaluation of state-of-the-art metrics that can potentially quantify biases in gan-augmented data. we show that, while several such methods are available, there is no single metric that quantifies bias exacerbation reliably over the span of different image domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/63.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/63.txt new file mode 100644 index 0000000000000000000000000000000000000000..efd69d5a152f2cbfba827b5726f88919dee18b5b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/63.txt @@ -0,0 +1 @@ + recently, there has been increasing interest in transparency and interpretability in deep reinforcement learning (drl) systems. verbal explanations, as the most natural way of communication in our daily life, deserve more attention, since they allow users to gain a better understanding of the system which ultimately could lead to a high level of trust and smooth collaboration. this paper reports a novel work in generating verbal explanations for drl agent's behaviors. a rule-based model is designed to construct explanations using a series of rules which are predefined with prior knowledge. a learning model is then proposed to expand the implicit logic of generating verbal explanation to general situations by employing rule-based explanations as training data. the learning model is shown to have better flexibility and generalizability than the static rule-based model. the performance of both models is evaluated quantitatively through objective metrics. the results show that verbal explanation generated by both models improve users' subjective satisfaction towards the interpretability of drl systems. additionally, seven variants of the learning model are designed to illustrate the contribution of input channels, attention mechanism, and proposed encoder in improving the quality of verbal explanation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/630.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/630.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a41f1a72af934d6af4656a8c6062d7f40a22c13 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/630.txt @@ -0,0 +1 @@ + the performance of modern wireless communications systems depends critically on the quality of the available channel state information (csi) at the transmitter and receiver. several previous works have proposed concepts and algorithms that help maintain high quality csi even in the presence of high mobility and channel aging, such as temporal prediction schemes that employ neural networks. however, it is still unclear which neural network-based scheme provides the best performance in terms of prediction quality, training complexity and practical feasibility. to investigate such a question, this paper first provides an overview of state-of-the-art neural networks applicable to channel prediction and compares their performance in terms of prediction quality. next, a new comparative analysis is proposed for four promising neural networks with different prediction horizons. the well-known tapped delay channel model recommended by the third generation partnership program is used for a standardized comparison among the neural networks. based on this comparative evaluation, the advantages and disadvantages of each neural network are discussed and guidelines for selecting the best-suited neural network in channel prediction applications are given. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/631.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/631.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d897e306bde158fd8f4dd1572ac1c740fa66b51 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/631.txt @@ -0,0 +1 @@ + self-supervised learning (ssl) is a growing torrent that has recently transformed machine learning and its many real world applications, by learning on massive amounts of unlabeled data via self-generated supervisory signals. unsupervised anomaly detection (ad) has also capitalized on ssl, by selfgenerating pseudo-anomalies through various data augmentation functions or external data exposure. in this vision paper, we first underline the importance of the choice of ssl strategies on ad performance, by presenting evidences and studies from the ad literature. equipped with the understanding that ssl incurs various hyperparameters (hps) to carefully tune, we present recent developments on unsupervised model selection and augmentation tuning for ssl-based ad. we then highlight emerging challenges and future opportunities; on designing new pretext tasks and augmentation functions for different data modalities, creating novel model selection solutions for systematically tuning the ssl hps, as well as on capitalizing on the potential of pretrained foundation models on ad through effective density estimation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/632.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/632.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a73b85bc086888bcdf8c044418131f2c183aa24 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/632.txt @@ -0,0 +1 @@ + residential occupancy detection has become an enabling technology in today's urbanized world for various smart home applications, such as building automation, energy management, and improved security and comfort. digitalization of the energy system provides smart meter data that can be used for occupancy detection in a non-intrusive manner without causing concerns regarding privacy and data security. in particular, deep learning techniques make it possible to infer occupancy from low-resolution smart meter data, such that the need for accurate occupancy detection with privacy preservation can be achieved. our work is thus motivated to develop a privacyaware and effective model for residential occupancy detection in contemporary living environments. our model aims to leverage the advantages of both recurrent neural networks (rnns), which are adept at capturing local temporal dependencies, and transformers, which are effective at handling global temporal dependencies. our designed hybrid transformer-rnn model detects residential occupancy using hourly smart meter data, achieving an accuracy of nearly 92% across households with diverse profiles. we validate the effectiveness of our method using a publicly accessible dataset and demonstrate its performance by comparing it with state-of-the-art models, including attentionbased occupancy detection methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/633.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/633.txt new file mode 100644 index 0000000000000000000000000000000000000000..062a663458ebf38726847f6308a1d3149927df91 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/633.txt @@ -0,0 +1 @@ + as a promising paradigm federated learning (fl) is widely used in privacy-preserving machine learning, which allows distributed devices to collaboratively train a model while avoiding data transmission among clients. despite its immense potential, the fl suffers from bottlenecks in training speed due to client heterogeneity, leading to escalated training latency and straggling server aggregation. to deal with this challenge, a novel split federated learning (sfl) framework that pairs clients with different computational resources is proposed, where clients are paired based on computing resources and communication rates among clients, meanwhile the neural network model is split into two parts at the logical level, and each client only computes the part assigned to it by using the sl to achieve forward inference and backward training. moreover, to effectively deal with the client pairing problem, a heuristic greedy algorithm is proposed by reconstructing the optimization of training latency as a graph edge selection problem. simulation results show the proposed method can significantly improve the fl training speed and achieve high performance both in independent identical distribution (iid) and non-iid data distribution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/634.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/634.txt new file mode 100644 index 0000000000000000000000000000000000000000..7cf048122057d3edd2077d717291d9f5212ff846 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/634.txt @@ -0,0 +1 @@ + machine learning models play a vital role in making predictions and deriving insights from data. however, in some cases, the deployment of these models can have unintended consequences or introduce biases. in the field of causal analysis , where understanding cause-and-effect relationships is crucial, it becomes essential to have models that accurately capture causal relationships. at the same time, to preserve user privacy, it is important to enable the model to 'forget' some of its learning/captured information about a given user. this is where machine unlearning is useful. additionally, machine unlearning can aid in handling concept drift, where models adapt to changing data distributions over time, by allowing them to unlearn outdated patterns and learn from more recent data. this paper introduces the concept of machine unlearning for causal inference, particularly propensity score matching and treatment effect estimation, which aims to refine and improve the performance of machine learning models for causal analysis given the above unlearning requirements.in the field of causal analysis, understanding cause-and-effect relationships is vital, and accurately capturing these relationships requires models that can handle biases and unwanted associations effectively. additionally, ensuring user privacy is equally important, and enabling models to "forget" certain user-specific information is essential to comply with privacy regulations. therefore, the motivation behind this research is to propose a novel machine-unlearning methodology that preserves user privacy.the paper presents a methodology for machine unlearning using a neural network-based propensity score model. the dataset used in the study is the lalonde dataset , a widely used dataset for evaluating the effectiveness i.e. the treatment effect of job training programs. the methodology involves training an initial propensity score model on the original dataset and then creating forget sets by selectively removing instances, as well as matched instance pairs. based on propensity scores. these forget sets are used to evaluate the retrained model, allowing for the elimination of unwanted associations. the actual retraining of the model is performed using the retain set.the experimental results demonstrate the effectiveness of the machine unlearning approach. the distribution and histogram analysis of propensity scores before and after unlearning provide insights into the impact of the unlearning process on the data.this study represents the first attempt to apply unlearning techniques to causal inference. the findings of this study highlight the potential of machine unlearning techniques for refining and enhancing the accuracy of machine unlearning in causal analysis . the proposed methodology offers a framework for addressing biases and improving causal inference in various domains. further research and exploration of machine unlearning techniques hold promising avenues for improving the reliability and fairness of machine learning models in causal analysis . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/635.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/635.txt new file mode 100644 index 0000000000000000000000000000000000000000..f6740a8e3a4ce369515ec0f4527ee1c81265115f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/635.txt @@ -0,0 +1 @@ + we study regret minimization in online episodic linear markov decision processes, and propose a policy optimization algorithm that is computationally efficient, and obtains rate optimal o( √ k) regret where k denotes the number of episodes. our work is the first to establish the optimal rate (in terms of k) of convergence in the stochastic setting with bandit feedback using a policy optimization based approach, and the first to establish the optimal rate in the adversarial setup with full information feedback, for which no algorithm with an optimal rate guarantee was previously known. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/636.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/636.txt new file mode 100644 index 0000000000000000000000000000000000000000..b866e33c1e0ea3f9599579401228c565759bec48 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/636.txt @@ -0,0 +1 @@ + we present the self-encoder, a neural network trained to guess the identity of each data sample. despite its simplicity, it learns a very useful representation of data, in a self-supervised way. specifically, the self-encoder learns to distribute the data samples in the embedding space so that they are linearly separable from one another. this induces a geometry where two samples are close in the embedding space when they are not easy to differentiate. the self-encoder can then be combined with a nearest-neighbor classifier or regressor for any subsequent supervised task. unlike regular nearest neighbors, the predictions resulting from this encoding of data are invariant to any scaling of features, making any preprocessing like min-max scaling not necessary. the experiments show the efficiency of the approach, especially on heterogeneous data mixing numerical features and categorical features. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/637.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/637.txt new file mode 100644 index 0000000000000000000000000000000000000000..fb8729e9f30c58f6c5e59a16f0d3db70eb55f089 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/637.txt @@ -0,0 +1 @@ + we propose a novel framework for solving continuous-time non-markovian stochastic optimal problems by means of neural rough differential equations (neural rdes) introduced in morrill et al. (2021). non-markovianity naturally arises in control problems due to the time delay effects in the system coefficients or the driving noises, which leads to optimal control strategies depending explicitly on the historical trajectories of the system state. by modelling the control process as the solution of a neural rde driven by the state process, we show that the control-state joint dynamics are governed by an uncontrolled, augmented neural rde, allowing for fast monte-carlo estimation of the value function via trajectories simulation and memoryefficient back-propagation. we provide theoretical underpinnings for the proposed algorithmic framework by demonstrating that neural rdes serve as universal approximators for functions of random rough paths. exhaustive numerical experiments on non-markovian stochastic control problems are presented, which reveal that the proposed framework is time-resolution-invariant and achieves higher accuracy and better stability in irregular sampling compared to existing rnnbased approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/638.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/638.txt new file mode 100644 index 0000000000000000000000000000000000000000..0f70280ab04947470d283bbe11b38a9c7611b689 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/638.txt @@ -0,0 +1 @@ + we review the literature on trainable, compressed embedding layers and discuss their applicability for compressing gigantic neural recommender systems. we also report the results we measured with our compressed embedding layers. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/639.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/639.txt new file mode 100644 index 0000000000000000000000000000000000000000..59911455977fe2d558994b14b15ff1f341e6ed24 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/639.txt @@ -0,0 +1 @@ + we address the need to generate faithful explanations of "black box" deep learning models. several tests have been proposed to determine aspects of faithfulness of explanation methods, but they lack cross-domain applicability and a rigorous methodology. hence, we select an existing test that is model agnostic and is well-suited for comparing one aspect of faithfulness (i.e., sensitivity) of multiple explanation methods, and extend it by specifying formal thresholds and building criteria to determine the overall sensitivity of the explanation method. we present examples of how multiple explanation methods for convolutional neural networks can be compared using this extended methodology. finally, we discuss the relationship between sensitivity and faithfulness and consider how the test can be adapted to assess different explanation methods in other domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/64.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/64.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca5bcc3eb45184044edce99124007a62c15c6b90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/64.txt @@ -0,0 +1 @@ + the potential held by the gargantuan volumes of data being generated across networks worldwide has been truly unlocked by machine learning techniques and more recently deep learning. the advantages offered by the latter have seen it rapidly becoming a framework of choice for various applications. however, the centralization of computational resources and the need for data aggregation have long been limiting factors in the democratization of deep learning applications. edge computing is an emerging paradigm that aims to utilize the hitherto untapped processing resources available at the network periphery. edge intelligence (ei) has quickly emerged as a powerful alternative to enable learning using the concepts of edge computing. deep learning-based edge intelligence or deep edge intelligence (dei) lies in this rapidly evolving domain. in this article, we provide an overview of the major constraints in operationalizing dei. the major research avenues in dei have been consolidated under federated learning, distributed computation, compression schemes and conditional computation. we also present some of the prevalent challenges and highlight prospective research avenues. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/640.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/640.txt new file mode 100644 index 0000000000000000000000000000000000000000..b7c7c3637c1f1b0e61237f5f3cf8a41ead815f38 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/640.txt @@ -0,0 +1 @@ + deep neural networks have produced significant progress among machine learning models in terms of accuracy and functionality, but their inner workings are still largely unknown. attribution methods seek to shine a light on these "black box" models by indicating how much each input contributed to a model's outputs. the integrated gradients (ig) method is a state of the art baseline attribution method in the axiomatic vein, meaning it is designed to conform to particular principles of attributions. we present four axiomatic characterizations of ig, establishing ig as the unique method to satisfy different sets of axioms among a class of attribution methods. 1 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/641.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/641.txt new file mode 100644 index 0000000000000000000000000000000000000000..3e8cb36eba8bc056da6c3c8f9f5f2d522e389d81 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/641.txt @@ -0,0 +1 @@ + detecting and predicting septic shock early is crucial for the best possible outcome for patients. accurately forecasting the vital signs of patients with sepsis provides valuable insights to clinicians for timely interventions, such as administering stabilizing drugs or optimizing infusion strategies. our research examines n-beats, an interpretable deeplearning forecasting model that can forecast 3 hours of vital signs for sepsis patients in intensive care units (icus). in this work, we use the n-beats interpretable configuration to forecast the vital sign trends and compare them with the actual trend to understand better the patient's changing condition and the effects of infused drugs on their vital signs. we evaluate our approach using the publicly available eicu collaborative research database dataset and rigorously evaluate the vital sign forecasts using out-of-sample evaluation criteria. we present the performance of our model using error metrics, including mean squared error (mse), mean average percentage error (mape), and dynamic time warping (dtw), where the best scores achieved are 18.52e-4, 7.60, and 17.63e-3, respectively. we analyze the samples where the forecasted trend does not match the actual trend and study the impact of infused drugs on changing the actual vital signs compared to the forecasted trend. additionally, we examined the mortality rates of patients where the actual trend and the forecasted trend did not match. we observed that the mortality rate was higher (92%) when the actual and forecasted trends closely matched, compared to when they were not similar (84%). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/642.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/642.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e2a13a08be57fa86360d47e1b048d5e13374848 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/642.txt @@ -0,0 +1 @@ + in this paper we adapt the nearest neighbour rule to the contextual bandit problem. our algorithm handles the fully adversarial setting in which no assumptions at all are made about the data-generation process. when combined with a sufficiently fast data-structure for (perhaps approximate) adaptive nearest neighbour search, such as a navigating net, our algorithm is extremely efficient -having a per trial running time polylogarithmic in both the number of trials and actions, and taking only quasi-linear space. we give generic regret bounds for our algorithm and further analyse them when applied to the stochastic bandit problem in euclidean space. we note that our algorithm can also be applied to the online classification problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/643.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/643.txt new file mode 100644 index 0000000000000000000000000000000000000000..46a7ee5e024b0bba3da3c54fce9a439e6051f11f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/643.txt @@ -0,0 +1 @@ + the present study aimed to address the issue of imbalanced data in classification tasks and evaluated the suitability of smote, adasyn, and gan techniques in generating synthetic data to address the class imbalance and improve the performance of classification models in low-resource settings. the study employed the generalised linear model (glm) algorithm for class balancing experiments and the random forest (rf) algorithm for low-resource setting experiments to assess model performance under varying training data. the recall metric was the primary evaluation metric for all classification models. the results of the class balancing experiments showed that the glm model trained on gan-balanced data achieved the highest recall value. similarly, in low-resource experiments, models trained on data enhanced with gan-synthesized data exhibited better recall values than original data. these findings demonstrate the potential of gan-generated synthetic data for addressing the challenge of imbalanced data in classification tasks and improving model performance in low-resource settings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/644.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/644.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d804ef58723cdab6ade665f9cd57c6add2ce812 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/644.txt @@ -0,0 +1 @@ + inspired by solomonoff's theory of inductive inference , we propose a prior based on circuit complexity. there are several advantages to this approach. first, it relies on a complexity measure that does not depend on the choice of utm. there is one universal definition for boolean circuits involving an universal operation (e.g. nand) with simple conversions to alternative definitions (with and, or, and not). second, there is no analogue of the halting problem. the output value of a circuit can be calculated recursively by computer in time proportional to the number of gates, while a short program may run for a very long time. our prior assumes that a boolean function (or equivalently, boolean string of fixed length) is generated by some bayesian mixture of circuits. this model is appropriate for learning boolean functions from partial information, a problem often encountered within machine learning as "binary classification." we argue that an inductive bias towards simple explanations as measured by circuit complexity is appropriate for this problem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/645.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/645.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/646.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/646.txt new file mode 100644 index 0000000000000000000000000000000000000000..f71f6b6925353995944c74f5dc14685be398477e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/646.txt @@ -0,0 +1 @@ + results from the tinyml community demonstrate that, it is possible to execute machine learning models directly on the terminals themselves, even if these are small microcontrollerbased devices. however, to date, practitioners in the domain lack convenient all-in-one toolkits to help them evaluate the feasibility of executing arbitrary models on arbitrary low-power iot hardware. to this effect, we present in this paper u-toe, a universal toolkit we designed to facilitate the task of iot designers and researchers, by combining functionalities from a low-power embedded os, a generic model transpiler and compiler, an integrated performance measurement module, and an open-access remote iot testbed. we provide an open source implementation of u-toe and we demonstrate its use to experimentally evaluate the performance of various models, on a wide variety of low-power iot boards, based on popular microcontroller architectures. u-toe allows easily reproducible and customizable comparative evaluation experiments on a wide variety of iot hardware allat-once. the availability of a toolkit such as u-toe is desirable to accelerate research combining artificial intelligence and iot towards fully exploiting the potential of edge computing. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/647.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/647.txt new file mode 100644 index 0000000000000000000000000000000000000000..4290ba31e78a80fb653221c4172a3b4388a096ed --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/647.txt @@ -0,0 +1 @@ + the ability to detect ood data is a crucial aspect of practical machine learning applications. in this work, we show that cosine similarity between the test feature and the typical id feature is a good indicator of ood data. we propose class typical matching (ctm), a post hoc ood detection algorithm that uses a cosine similarity scoring function. extensive experiments on multiple benchmarks show that ctm outperforms existing post hoc ood detection methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/648.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/648.txt new file mode 100644 index 0000000000000000000000000000000000000000..08f1b7bb204703402ec21d8ad3850992fd21ad18 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/648.txt @@ -0,0 +1 @@ + the forward-forward algorithm presents a new method of training neural networks by updating weights during an inference, performing parameter updates for each layer individually. this immediately reduces memory requirements during training and may lead to many more benefits, like seamless online training. this method relies on a loss ("goodness") function that can be evaluated on the activations of each layer, of which can have a varied parameter size, depending on the hyperparamaterization of the network. in the seminal paper, a goodness function was proposed to fill this need; however, if placed in a one-class problem context, one need not pioneer a new loss because these functions can innately handle dynamic network sizes. in this paper, we investigate the performance of deep one-class objective functions when trained in a forward-forward fashion. the code is available at https://github.com/michaelhopwood/forwardforwardoneclass. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/649.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/649.txt new file mode 100644 index 0000000000000000000000000000000000000000..3fb61c226084bbaef360cb299c47cddef06b05d1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/649.txt @@ -0,0 +1 @@ + we explore the problem of imitation learning (il) in the context of mean-field games (mfgs), where the goal is to imitate the behavior of a population of agents following a nash equilibrium policy according to some unknown payoff function. il in mfgs presents new challenges compared to single-agent il, particularly when both the reward function and the transition kernel depend on the population distribution. in this paper, departing from the existing literature on il for mfgs, we introduce a new solution concept called the nash imitation gap. then we show that when only the reward depends on the population distribution, il in mfgs can be reduced to single-agent il with similar guarantees. however, when the dynamics is population-dependent, we provide a novel upper-bound that suggests il is harder in this setting. to address this issue, we propose a new adversarial formulation where the reinforcement learning problem is replaced by a mean-field control (mfc) problem, suggesting progress in il within mfgs may have to build upon mfc. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/65.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/65.txt new file mode 100644 index 0000000000000000000000000000000000000000..482dbe90b587c9b7360e2d40bc47aa999de3cf3f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/65.txt @@ -0,0 +1 @@ + with the influx of myriad news accompanied with busy lifestyle, there is a pressing need to classify news according to the requirements of an individual. people are generally more interested what is going on, in their immediate surroundings. in this paper, we model this problem by classifying the news articles based on cities and providing the entity with the collection of city specific news. we have developed our own web crawler for content extraction from the html pages of news articles. random forests, naive bayes and svm classifiers have been employed and their accuracy has been noted. results exhibit that machine learning techniques can be harnessed to achieve our goal and thus calls for further research to improve the efficiency of solving this issue. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/650.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/650.txt new file mode 100644 index 0000000000000000000000000000000000000000..a06c0b8e726af0f40f75f308ba98851a8b68d4a5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/650.txt @@ -0,0 +1 @@ + in recent years, language models (lms), such as gpt-4, have been widely used in multiple domains, including natural language processing, visualization, and so on. however, applying them for analyzing and optimizing high-performance computing (hpc) software is still challenging due to the lack of hpc-specific support. in this paper, we design the lm4hpc framework to facilitate the research and development of hpc software analyses and optimizations using lms. tailored for supporting hpc datasets, ai models, and pipelines, our framework is built on top of a range of components from different levels of the machine learning software stack, with hugging face-compatible apis. using three representative tasks, we evaluated the prototype of our framework. the results show that lm4hpc can help users quickly evaluate a set of state-of-the-art models and generate insightful leaderboards. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/651.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/651.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc1b8421c8811feff345d34c0061c5c6d02bb942 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/651.txt @@ -0,0 +1 @@ + we argue that insurance can act as an analogon for the social situatedness of machine learning systems, hence allowing machine learning scholars to take insights from the rich and interdisciplinary insurance literature. tracing the interaction of uncertainty, fairness and responsibility in insurance provides a fresh perspective on fairness in machine learning. we link insurance fairness conceptions to their machine learning relatives, and use this bridge to problematize fairness as calibration. in this process, we bring to the forefront two themes that have been largely overlooked in the machine learning literature: responsibility and aggregate-individual tensions.see baker ( , p. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/652.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/652.txt new file mode 100644 index 0000000000000000000000000000000000000000..66e424f5c53b9e63b2463c21ea33db4ece9ccf02 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/652.txt @@ -0,0 +1 @@ + deep neural networks (dnn) have found wide applicability in numerous fields due to their ability to accurately learn very complex input-output relations. despite their accuracy and extensive use, dnns are highly susceptible to adversarial attacks due to limited generalizability. for future progress in the field, it is essential to build dnns that are robust to any kind of perturbations to the data points. in the past, many techniques have been proposed to robustify dnns using firstorder derivative information of the network. this paper proposes a new robustification approach based on control theory. a neural network architecture that incorporates feedback control, named feedback neural networks, is proposed. the controller is itself a neural network, which is trained using regular and adversarial data such as to stabilize the system outputs. the novel adversarial training approach based on the feedback control architecture is called feedback looped adversarial training (flat). numerical results on standard test problems empirically show that our flat method is more effective than the state-of-the-art to guard against adversarial attacks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/653.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/653.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e289bf9170acdf5c9fb0bd55dd358fd60f46e1e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/653.txt @@ -0,0 +1 @@ + class-incremental learning (cil) aims to build classification models from data streams. at each step of the cil process, new classes must be integrated into the model. due to catastrophic forgetting, cil is particularly challenging when examples from past classes cannot be stored, the case on which we focus here. to date, most approaches are based exclusively on the target dataset of the cil process. however, the use of models pre-trained in a self-supervised way on large amounts of data has recently gained momentum. the initial model of the cil process may only use the first batch of the target dataset, or also use pre-trained weights obtained on an auxiliary dataset. the choice between these two initial learning strategies can significantly influence the performance of the incremental learning model, but has not yet been studied in depth. performance is also influenced by the choice of the cil algorithm, the neural architecture, the nature of the target task, the distribution of classes in the stream and the number of examples available for learning. we conduct a comprehensive experimental study to assess the roles of these factors. we present a statistical analysis framework that quantifies the relative contribution of each factor to incremental performance. our main finding is that the initial training strategy is the dominant factor influencing the average incremental accuracy, but that the choice of cil algorithm is more important in preventing forgetting. based on this analysis, we propose practical recommendations for choosing the right initial training strategy for a given incremental learning use case. these recommendations are intended to facilitate the practical deployment of incremental learning. * denotes equal contribution introductionreal-world applications of machine learning (ml) often involve training models from data streams characterized by distributional changes and limited access to past data . this scenario presents a challenge for standard ml algorithms, as they assume that all training data is avail- \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/654.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/654.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d5236476f533e9ed18d5c9d6091c820b6e62de1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/654.txt @@ -0,0 +1 @@ + accurate demand forecasting in the retail industry is a critical determinant of financial performance and supply chain efficiency. as global markets become increasingly interconnected, businesses are turning towards advanced prediction models to gain a competitive edge. however, existing literature mostly focuses on historical sales data and ignores the vital influence of macroeconomic conditions on consumer spending behavior. in this study, we bridge this gap by enriching time series data of customer demand with macroeconomic variables, such as the consumer price index (cpi), index of consumer sentiment (ics), and unemployment rates. leveraging this comprehensive dataset, we develop and compare various regression and machine learning models to predict retail demand accurately. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/655.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/655.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0d828413530b306edde3cb91109b2daf75ba3cd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/655.txt @@ -0,0 +1 @@ + bayesian classifiers perform well when each of the features is completely independent of the other which is not always valid in real world application. the aim of this study is to implement and compare the performances of each variant of bayesian classifier (multinomial, bernoulli, and gaussian) on anomaly detection in network intrusion, and to investigate whether there is any association between each variant's assumption and their performance. our investigation showed that each variant of bayesian algorithm blindly follows its assumption regardless of feature property, and that the assumption is the single most important factor that influences their accuracy. experimental results show that bernoulli has accuracy of 69.9% test (71% train), multinomial has accuracy of 31.2% test (31.2% train), while gaussian has accuracy of 81.69% test (82.84% train). going deeper, we investigated and found that each naïve bayes variants performances and accuracy is largely due to each classifier assumption, gaussian classifier performed best on anomaly detection due to its assumption that features follow normal distributions which are continuous, while multinomial classifier have a dismal performance as it simply assumes discreet and multinomial distribution. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/656.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/656.txt new file mode 100644 index 0000000000000000000000000000000000000000..059b79254ebab276e7ea2f730cf40d14d5e8272f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/656.txt @@ -0,0 +1 @@ + introduction: lung cancer is a significant cause of mortality worldwide, emphasizing the importance of early detection for improved survival rates. in this study, we propose a machine learning (ml) tool trained on data from the plco cancer screening trial and validated on the nlst to estimate the likelihood of lung cancer occurrence within five years.methods: the study utilized two datasets, the plco (n=55,161) and nlst (n=48,595), consisting of comprehensive information on risk factors, clinical measurements, and outcomes related to lung cancer. data preprocessing involved removing patients who were not current or former smokers and those who had died of causes unrelated to lung cancer. additionally, a focus was placed on mitigating bias caused by censored data. feature selection, hyper-parameter optimization, and model calibration were performed using xgboost, an ensemble learning algorithm that combines gradient boosting and decision trees.results: the final ml model was trained on the pre-processed plco dataset and tested on the nlst dataset. the model incorporated features such as age, gender, smoking history, medical diagnoses, and family history of lung cancer. the model was well-calibrated (brier score=0.044). roc-auc was 82% on the plco dataset and 70% on the nlst dataset. pr-auc was 29% and 11% respectively. when compared to the uspstf guidelines for lung cancer screening, our model provided the same recall with a precision of 13.1% vs. 9.3% on the plco dataset and 3.2% vs. 3.1% on the nlst dataset.conclusion: the developed ml tool provides a freely available web application for estimating the likelihood of developing lung cancer within five years. by utilizing risk factors and clinical data, individuals can assess their risk and make informed decisions regarding lung cancer screening. this research contributes to the efforts in early detection and prevention strategies, aiming to reduce lung cancer-related mortality rates. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/657.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/657.txt new file mode 100644 index 0000000000000000000000000000000000000000..e92c8d82e0bc30b239099aa2f457ae2b10336440 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/657.txt @@ -0,0 +1 @@ + this research enhances linear regression models by integrating a kalman filter and analysing curve areas to minimize loss. the goal is to develop an optimal linear regression equation using stochastic gradient descent (sgd) for weight updating. our approach involves a stepwise process, starting with user-defined parameters. the linear regression model is trained using sgd, tracking weights and loss separately and zipping them finally. a kalman filter is then trained based on weight and loss arrays to predict the next consolidated weights. predictions result from multiplying input averages with weights, evaluated for loss to form a weight-versus-loss curve. the curve's equation is derived using the two-point formula, and area under the curve is calculated via integration. the linear regression equation with minimum area becomes the optimal curve for prediction. benefits include avoiding constant weight updates via gradient descent and working with partial datasets, unlike methods needing the entire set. however, computational complexity should be considered. the kalman filter's accuracy might diminish beyond a certain prediction range. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/658.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/658.txt new file mode 100644 index 0000000000000000000000000000000000000000..4705e807022316db7d59153300667ef840eb0b2f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/658.txt @@ -0,0 +1 @@ + in this paper, we introduce the transition-based feature generator (tfgen) technique, which reads general activity data with attributes and generates step-by-step generated data. the activity data may consist of network activity from packets, system calls from processes or classified activity from surveillance cameras. tfgen processes data online and will generate data with encoded historical data for each incoming activity with high computational efficiency. the input activities may concurrently originate from distinct traces or channels. the technique aims to address issues such as domain-independent applicability, the ability to discover global process structures, the encoding of time-series data, and online processing capability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/659.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/659.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb5f1b452393ada1c6e5eba9c9a201cfe59e900b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/659.txt @@ -0,0 +1 @@ + support vector machine (svm), is a popular kernel method for data classification that demonstrated its efficiency for a large range of practical applications. the method suffers, however, from some weaknesses including; time processing, risk of failure of the optimization process for high dimension cases, generalization to multiclasses, unbalanced classes, and dynamic classification. in this paper an alternative method is proposed having a similar performance, with a sensitive improvement of the aforementioned shortcomings. the new method is based on a minimum distance to optimal subspaces containing the mapped original classes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/66.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/66.txt new file mode 100644 index 0000000000000000000000000000000000000000..646d4f365300a38a5ce1f1ec27f9e3f01c4dd2e9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/66.txt @@ -0,0 +1 @@ + this work demonstrates that natural language transformers can support more generic strategic modeling, particularly for text-archived games. in addition to learning natural language skills, the abstract transformer architecture can generate meaningful moves on a chessboard. with further fine-tuning, the transformer learns complex gameplay by training on 2.8 million chess games in portable game notation. after 30,000 training steps, openai's generative pre-trained transformer (gpt-2) optimizes weights for 774 million parameters. this fine-tuned chess transformer generates plausible strategies and displays game formations identifiable as classic openings, such as english or the slav exchange. finally, in live play, the novel model demonstrates a human-to-transformer interface that correctly filters illegal moves and provides a novel method to challenge the transformer's chess strategies. we anticipate future work will build on this transformer's promise, particularly in other strategy games where features can capture the underlying complex rule syntax from simple but expressive player annotations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/660.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/660.txt new file mode 100644 index 0000000000000000000000000000000000000000..e65e81b99f970eb7a3dd65fb0f37ce4d5822d584 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/660.txt @@ -0,0 +1 @@ + this research aims to improve the accuracy of complex volleyball predictions and provide more meaningful insights to coaches and players. we introduce a specialized graph encoding technique to add additional contact-by-contact volleyball context to an already available volleyball dataset without any additional data gathering. we demonstrate the potential benefits of using graph neural networks (gnns) on this enriched dataset for three different volleyball prediction tasks: rally outcome prediction, set location prediction, and hit type prediction. we compare the performance of our graph-based models to baseline models and analyze the results to better understand the underlying relationships in a volleyball rally. our results show that the use of gnns with our graph encoding yields a much more advanced analysis of the data, which noticeably improves prediction results overall. we also show that these baseline tasks can be significantly improved with simple adjustments, such as removing blocked hits. lastly, we demonstrate the importance of choosing a model architecture that will better extract the important information for a certain task. overall, our study showcases the potential strengths and weaknesses of using graph encodings in sports data analytics and hopefully will inspire future improvements in machine learning strategies across sports and applications by using graphbased encodings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/661.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/661.txt new file mode 100644 index 0000000000000000000000000000000000000000..b987884863bbf2ee3708b1e17c67c36a40adcfa3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/661.txt @@ -0,0 +1 @@ + given the escalating risks of malicious attacks in the finance sector and the consequential severe damage, a thorough understanding of adversarial strategies and robust defense mechanisms for machine learning models is critical. the threat becomes even more severe with the increased adoption in banks more accurate, but potentially fragile neural networks. we aim to investigate the current state and dynamics of adversarial attacks and defenses for neural network models that use sequential financial data as the input.to achieve this goal, we have designed a competition that allows realistic and detailed investigation of problems in modern financial transaction data. the participants compete directly against each other, so possible attacks and defenses are examined in close-to-real-life conditions. our main contributions are the analysis of the competition dynamics that answers the questions on how important it is to conceal a model from malicious users, how long does it take to break it, and what techniques one should use to make it more robust, and introduction additional way to attack models or increase their robustness. our analysis continues with a meta-study on the used approaches with their power, numerical experiments, and accompanied ablations studies. we show that the developed attacks and defenses outperform existing alternatives from the literature while being practical in terms of execution, proving the validity of the competition as a tool for uncovering vulnerabilities of machine learning models and mitigating them in various domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/662.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/662.txt new file mode 100644 index 0000000000000000000000000000000000000000..c8d6dcf232beb51daa22e1ef820a8c64c8aefb0f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/662.txt @@ -0,0 +1 @@ + recently, machine learning (ml) has become a widely accepted method for significant progress that is rapidly evolving. since it employs computational methods to teach machines and produce acceptable answers. the significance of the machine learning operations (mlops) methods, which can provide acceptable answers for such problems, is examined in this study. to assist in the creation of software that is simple to use, the authors research mlops methods. to choose the best tool structure for certain projects, the authors also assess the features and operability of various mlops methods. a total of 22 papers were assessed that attempted to apply the mlops idea. finally, the authors admit the scarcity of fully effective mlops methods based on which advancements can self-regulate by limiting human engagement. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/663.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/663.txt new file mode 100644 index 0000000000000000000000000000000000000000..f93f8a5120a86e8916da5383274840e38f62b075 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/663.txt @@ -0,0 +1 @@ + our previous experiments demonstrated that subsets collections of (short) documents (with several hundred entries) share a common normalized in some way eigenvalue spectrum of combinatorial laplacian. based on this insight, we propose a method of incremental spectral clustering. the method consists of the following steps: (1) split the data into manageable subsets, (2) cluster each of the subsets, (3) merge clusters from different subsets based on the eigenvalue spectrum similarity to form clusters of the entire set. this method can be especially useful for clustering methods of complexity strongly increasing with the size of the data sample,like in case of typical spectral clustering. experiments were performed showing that in fact the clustering and merging the subsets yields clusters close to clustering the entire dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/664.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/664.txt new file mode 100644 index 0000000000000000000000000000000000000000..e2e7f0f8590d8826b52fe27bbe8d44e5fb2ea3bf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/664.txt @@ -0,0 +1 @@ + the superior performance of deep neural networks (dnns) has led to their application in various aspects of human life. safety-critical applications are no exception and impose rigorous reliability requirements on dnns. quantized neural networks (qnns) have emerged to tackle the complexity of dnn accelerators, however, they are more prone to reliability issues.in this paper, a recent analytical resilience assessment method is adapted for qnns to identify critical neurons based on a neuron vulnerability factor (nvf). thereafter, a novel method for splitting the critical neurons is proposed that enables the design of a lightweight correction unit (lcu) in the accelerator without redesigning its computational part.the method is validated by experiments on different qnns and datasets. the results demonstrate that the proposed method for correcting the faults has a twice smaller overhead than a selective triple modular redundancy (tmr) while achieving a similar level of fault resiliency. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/665.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/665.txt new file mode 100644 index 0000000000000000000000000000000000000000..57a98773a38c45c150b1515b9889ff8143a6dd72 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/665.txt @@ -0,0 +1 @@ + graphs are omnipresent and gnns are a powerful family of neural networks for learning over graphs. despite their popularity, scaling gnns either by deepening or widening suffers from prevalent issues of unhealthy gradients, over-smoothening, information squashing, which often lead to substandard performance. in this work, we are interested in exploring a principled way to scale gnns capacity without deepening or widening, which can improve its performance across multiple small and large graphs. motivated by the recent intriguing phenomenon of model soups, which suggest that fine-tuned weights of multiple large-language pre-trained models can be merged to a better minima, we argue to exploit the fundamentals of model soups to mitigate the aforementioned issues of memory bottleneck and trainability during gnns scaling. more specifically, we propose not to deepen or widen current gnns, but instead present a data-centric perspective of model soups tailored for gnns, i.e., to build powerful gnns. by dividing giant graph data, we build multiple independently and parallelly trained weaker gnns (soup ingredient) without any intermediate communication, and combine their strength using a greedy interpolation soup procedure to achieve state-of-the-art performance. compared to concurrent distributed gnn training works such as (zhu et al., 2023), we train each soup ingredient by sampling different subgraphs per epoch and their respective sub-models are merged only after being fully trained (rather than intermediately so). moreover, we provide a wide variety of model soup preparation techniques by leveraging state-of-the-art graph sampling and graph partitioning approaches that can handle large graphs. extensive experiments across many * equal contribution \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/666.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/666.txt new file mode 100644 index 0000000000000000000000000000000000000000..79ffea782ce600c13b92e422dacf341aa9f93907 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/666.txt @@ -0,0 +1 @@ + this paper explores deterioration in alzheimer's disease using machine learning. subjects were split into two datasets based on baseline diagnosis (cognitively normal, mild cognitive impairment), with outcome of deterioration at final visit (a binomial essentially yes/no categorisation) using data from the alzheimer's disease neuroimaging initiative (demographics, genetics, csf, imaging, and neuropsychological testing etc). six machine learning models, including gradient boosting, were built, and evaluated on these datasets using a nested crossvalidation procedure, with the best performing models being put through repeated nested cross-validation at 100 iterations. we were able to demonstrate good predictive ability using cart predicting which of those in the cognitively normal group deteriorated and received a worse diagnosis (auc = 0.88). for the mild cognitive impairment group, we were able to achieve good predictive ability for deterioration with elastic net (auc = 0.76). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/667.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/667.txt new file mode 100644 index 0000000000000000000000000000000000000000..40639e0101586e647c2e4af9d8e03163b118af0e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/667.txt @@ -0,0 +1 @@ + machine learning models that aim to predict dementia onset usually follow the classification methodology ignoring the time until an event happens. this study presents an alternative, using survival analysis within the context of machine learning techniques. two survival method extensions based on machine learning algorithms of random forest and elastic net are applied to train, optimise, and validate predictive models based on the english longitudinal study of ageing -elsa cohort. the two survival machine learning models are compared with the conventional statistical cox proportional hazard model, proving their superior predictive capability and stability on the elsa data, as demonstrated by computationally intensive procedures such as nested cross-validation and monte carlo validation. this study is the first to apply survival machine learning to the elsa data, and demonstrates in this case the superiority of ai based predictive modelling approaches over the widely employed cox statistical approach in survival analysis. implications, methodological considerations, and future research directions are discussed. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/668.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/668.txt new file mode 100644 index 0000000000000000000000000000000000000000..092a5ec8b7e2b1d05d5842cfe618d00a5c01ea5d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/668.txt @@ -0,0 +1 @@ + data analysis and monitoring on smart grids are jeopardized by attacks on cyber-physical systems. false data injection attack (fdia) is one of the classes of those attacks that target the smart measurement devices by injecting malicious data. the employment of machine learning techniques in the detection and localization of fdia is proven to provide effective results. training of such models requires centralized processing of sensitive user data that may not be plausible in a practical scenario. by employing federated learning for the detection of fdia attacks, it is possible to train a model for the detection and localization of the attacks while preserving the privacy of sensitive user data. however, federated learning introduces new problems such as the personalization of the detectors in each node. in this paper, we propose a federated learningbased scheme combined with a hybrid deep neural network architecture that exploits the local correlations between the connected power buses by employing graph neural networks as well as the temporal patterns in the data by using lstm layers. the proposed mechanism offers flexible and efficient training of an fdia detector in a distributed setup while preserving the privacy of the clients. we validate the proposed architecture by extensive simulations on the ieee 57, 118, and 300 bus systems and real electricity load data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/669.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/669.txt new file mode 100644 index 0000000000000000000000000000000000000000..69f7df856beef680080c9069edaf0477b54e6287 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/669.txt @@ -0,0 +1 @@ + the rise of alzheimer's disease worldwide has prompted a search for efficient tools which can be used to predict deterioration in cognitive decline leading to dementia. in this paper, we explore the potential of survival machine learning as such a tool for building models capable of predicting not only deterioration but also the likely time to deterioration. we demonstrate good predictive ability (0.86 c-index), lending support to its use in clinical investigation and prediction of alzheimer's disease risk. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/67.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/67.txt new file mode 100644 index 0000000000000000000000000000000000000000..f048536f49ac9476ff47bfaa8db3a2f03c74fe6b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/67.txt @@ -0,0 +1 @@ + we show that regression predictions from linear and tree-based models can be represented as linear combinations of target instances in the training data. this also holds for models constructed as ensembles of trees, including random forests and gradient boosting machines. the weights used in these linear combinations are measures of instance importance, complementing existing measures of feature importance, such as shap and lime. we refer to these measures as axil weights (additive explanations with instance loadings). since axil weights are additive across instances, they offer both local and global explanations. our work contributes to the broader effort to make machine learning predictions more interpretable and explainable. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/670.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/670.txt new file mode 100644 index 0000000000000000000000000000000000000000..91ccf20f19a653a00998ac7135328f5a878242d8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/670.txt @@ -0,0 +1 @@ + in this paper, we address the issue of fairness in preference-based reinforcement learning (pbrl) in the presence of multiple objectives. the main objective is to design control policies that can optimize multiple objectives while treating each objective fairly. toward this objective, we design a new fairness-induced preference-based reinforcement learning or fpbrl. the main idea of fpbrl is to learn vector reward functions associated with multiple objectives via new welfare-based preferences rather than reward-based preference in pbrl, coupled with policy learning via maximizing a generalized gini welfare function. finally, we provide experiment studies on three different environments to show that the proposed fpbrl approach can achieve both efficiency and equity for learning effective and fair policies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/671.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/671.txt new file mode 100644 index 0000000000000000000000000000000000000000..8564f7ef55c33a97a5c65cd613cdb31003266589 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/671.txt @@ -0,0 +1 @@ + this work proposes a real-time anomaly detection scheme that leverages the multi-step ahead prediction capabilities of encoder-decoder (ed) deep learning models with recurrent units. specifically, an encoder-decoder is used to model soft-failure evolution over a long future horizon (i.e., for several days ahead) by analyzing past quality-of-transmission (qot) observations. this information is subsequently used for real-time anomaly detection (e.g., of attack incidents), as the knowledge of how the qot is expected to evolve allows capturing unexpected network behavior. specifically, for anomaly detection, a statistical hypothesis testing scheme is used, alleviating the limitations of supervised (sl) and unsupervised learning (ul) schemes, usually applied for this purpose. indicatively, the proposed scheme eliminates the need for labeled anomalies, required when sl is applied, and the need for on-line analyzing entire datasets to identify abnormal instances (i.e., ul). overall, it is shown that by utilizing qot evolution information, the proposed approach can effectively detect abnormal deviations in real-time. importantly, it is shown that the information concerning soft-failure evolution (i.e., qot predictions) is essential to accurately detect anomalies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/672.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/672.txt new file mode 100644 index 0000000000000000000000000000000000000000..4fc6c5309948c3d80908180766b012ac826087d7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/672.txt @@ -0,0 +1 @@ + density based clustering are a type of clustering methods using in data mining for extracting previously unknown patterns from data sets. there are a number of density based clustering methods such as dbscan, optics, denclue, vdbscan, dvbscan, dbclasd and st-dbscan. in this paper, a study of these methods is done along with their characteristics, advantages and disadvantages and most importantlytheir applicability to different types of data sets to mine useful and appropriate patterns. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/673.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/673.txt new file mode 100644 index 0000000000000000000000000000000000000000..c2570d3559a371635bfc482ba01c57f3ec7bc56d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/673.txt @@ -0,0 +1 @@ + one of the objectives of continual learning is to learn new concepts continually over a stream of experiences and at the same time avoid catastrophic forgetting. to mitigate complete knowledge overwriting, memory-based methods store a percentage of previous data distributions to be used during training. although these methods produce good results, few studies have tested their out-of-distribution generalization properties, as well as whether these methods overfit the replay memory. in this work, we show that although these methods can help in traditional indistribution generalization, they can strongly impair out-of-distribution generalization by learning spurious features and correlations. using a controlled environment, the synbol benchmark generator (lacoste et al., 2020), we demonstrate that this lack of out-of-distribution generalization mainly occurs in the linear classifier. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/674.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/674.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf7717c297d6cdde574507f92a3dfdc09921bde8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/674.txt @@ -0,0 +1 @@ + in the smart grid of the future, accurate load forecasts on the level of individual clients can help to balance supply and demand locally and to prevent grid outages. while the number of monitored clients will increase with the ongoing smart meter rollout, the amount of data per client will always be limited. we evaluate whether a transformer load forecasting model benefits from a transfer learning strategy, where a global univariate model is trained on the load time series from multiple clients. in experiments with two datasets containing load time series from several hundred clients, we find that the global training strategy is superior to the multivariate and local training strategies used in related work. on average, the global training strategy results in 21.8% and 12.8% lower forecasting errors than the two other strategies, measured across forecasting horizons from one day to one month into the future. a comparison to linear models, multi-layer perceptrons and lstms shows that transformers are effective for load forecasting when they are trained with the global training strategy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/675.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/675.txt new file mode 100644 index 0000000000000000000000000000000000000000..b7caae4cf84b9063fdecf6e8b5065ec965c2d442 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/675.txt @@ -0,0 +1 @@ + we propose rotar, a row-based table representation learning method, to address the efficiency and scalability issues faced by existing table representation learning methods. the key idea of rotar is to generate query-agnostic row representations that could be re-used via query-specific aggregation. in addition to the row-based architecture, we introduce several techniques: cell-aware position embedding, teacher-student training paradigm, and selective backward to improve the performance of rotar model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/676.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/676.txt new file mode 100644 index 0000000000000000000000000000000000000000..77741ed35dc290d65d2a44ff24ea2e078ed020eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/676.txt @@ -0,0 +1 @@ + most deep learning pipelines are built on real-valued operations to deal with real-valued inputs such as images, speech or music signals. however, a lot of applications naturally make use of complex-valued signals or images, such as mri or remote sensing. additionally the fourier transform of signals is complex-valued and has numerous applications. we aim to make deep learning directly applicable to these complex-valued signals without using projections into r 2 . thus we add to the recent developments of complex-valued neural networks by presenting building blocks to transfer the transformer architecture to the complex domain. we present multiple versions of a complex-valued scaled dot-product attention mechanism as well as a complex-valued layer normalization. we test on a classification and a sequence generation task on the musicnet dataset and show improved robustness to overfitting while maintaining on-par performance when compared to the real-valued transformer architecture. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/677.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/677.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c2dac5f6e59edf50db396ef42a13d2ba6e04cba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/677.txt @@ -0,0 +1 @@ + we give the first result for agnostically learning single-index models (sims) with arbitrary monotone and lipschitz activations. all prior work either held only in the realizable setting or required the activation to be known. moreover, we only require the marginal to have bounded second moments, whereas all prior work required stronger distributional assumptions (such as anticoncentration or boundedness). our algorithm is based on recent work by on omniprediction using predictors satisfying calibrated multiaccuracy. our analysis is simple and relies on the relationship between bregman divergences (or matching losses) and ℓ p distances. we also provide new guarantees for standard algorithms like glmtron and logistic regression in the agnostic setting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/678.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/678.txt new file mode 100644 index 0000000000000000000000000000000000000000..f8a24d25c91037fcc64f937a54aadb813a25f60d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/678.txt @@ -0,0 +1 @@ + with mobile, iot and sensor devices becoming pervasive in our life and recent advances in edge computational intelligence (e.g., edge ai/ml), it became evident that the traditional methods for training ai/ml models are becoming obsolete, especially with the growing concerns over privacy and security. this work tries to highlight the key challenges that prohibit edge ai/ml from seeing wide-range adoption in different sectors, especially for large-scale scenarios. therefore, we focus on the main challenges acting as adoption barriers for the existing methods and propose a design, with a drastic shift from the current ill-suited approaches, that leverages the edge-to-cloud continuum. the new design is envisioned to be model-centric in which the trained models are treated as a commodity driving the exchange dynamics of collaborative learning in decentralized settings. it is expected that this design will provide a decentralized framework for efficient collaborative learning at scale. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/679.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/679.txt new file mode 100644 index 0000000000000000000000000000000000000000..0cdcfa4d3eeb84fd47fe8fcb22c059f56f812d96 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/679.txt @@ -0,0 +1 @@ + we consider the problem of learning from data corrupted by underrepresentation bias, where positive examples are filtered from the data at different, unknown rates for a fixed number of sensitive groups. we show that with a small amount of unbiased data, we can efficiently estimate the group-wise drop-out parameters, even in settings where intersectional group membership makes learning each intersectional rate computationally infeasible. using this estimate for the group-wise drop-out rate, we construct a re-weighting scheme that allows us to approximate the loss of any hypothesis on the true distribution, even if we only observe the empirical error on a biased sample. finally, we present an algorithm encapsulating this learning and re-weighting process, and we provide strong pac-style guarantees that, with high probability, our estimate of the risk of the hypothesis over the true distribution will be arbitrarily close to the true risk. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/68.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/68.txt new file mode 100644 index 0000000000000000000000000000000000000000..674ba02190d52c90ce73f3f77e9da2e8380c4697 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/68.txt @@ -0,0 +1 @@ + in this paper we discuss the theory used in the design of an open source lightmorphic signatures analysis toolkit (lsat). in addition to providing a core functionality, the software package enables specific optimizations with its modular and customizable design.to promote its usage and inspire future contributions, lsat is publicly available. by using a self-supervised neural network and augmented machine learning algorithms, lsat provides an easy-to-use interface with ample documentation.the experiments demonstrate that lsat improves the otherwise tedious and errorprone tasks of translating lightmorphic associated data into usable spectrograms, enhanced with parameter tuning and performance analysis.with the provided mathematical functions, lsat validates the nonlinearity encountered in the data conversion process while ensuring suitability of the forecasting algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/680.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/680.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d74026dcbac88f4dfbbb1fa60f7717871e5d4d0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/680.txt @@ -0,0 +1 @@ + deep reinforcement learning (drl) approaches to online portfolio selection (olps) have grown in popularity in recent years. the sensitive nature of training reinforcement learning agents implies a need for extensive efforts in market representation, behavior objectives, and training processes, which have often been lacking in previous works. we propose a training and evaluation process to assess the performance of classical drl algorithms for portfolio management. we found that most drl algorithms were not robust, with strategies generalizing poorly and degrading quickly during backtesting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/681.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/681.txt new file mode 100644 index 0000000000000000000000000000000000000000..e63021fa9ae862cf4fcc4e82613e622020d96c97 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/681.txt @@ -0,0 +1 @@ + we propose a label poisoning attack on geometric data sets against k-nearest neighbor classification. we provide an algorithm that can compute an εn-additive approximation of the optimal poisoning in n • 2 2 o(d+k/ε) time for a given data set x ∈ r d , where |x| = n. our algorithm achieves its objectives through the application of multi-scale random partitions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/682.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/682.txt new file mode 100644 index 0000000000000000000000000000000000000000..012e65c676662f2f0e427ab429583acb6fe19830 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/682.txt @@ -0,0 +1 @@ + the dependence on training data of the gibbs algorithm (ga) is analytically characterized. by adopting the expected empirical risk as the performance metric, the sensitivity of the ga is obtained in closed form. in this case, sensitivity is the performance difference with respect to an arbitrary alternative algorithm. this description enables the development of explicit expressions involving the training errors and test errors of gas trained with different datasets. using these tools, dataset aggregation is studied and different figures of merit to evaluate the generalization capabilities of gas are introduced. for particular sizes of such datasets and parameters of the gas, a connection between jeffrey's divergence, training and test errors is established. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/683.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/683.txt new file mode 100644 index 0000000000000000000000000000000000000000..71b8ffa25e817404addcdfabced8d90c75d3e115 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/683.txt @@ -0,0 +1 @@ + neural networks employ spurious correlations in their predictions, resulting in decreased performance when these correlations do not hold. recent works suggest fixing pretrained representations and training a classification head that does not use spurious features. we investigate how spurious features are represented in pretrained representations and explore strategies for removing information about spurious features. considering the waterbirds dataset and a few pretrained representations, we find that even with full knowledge of spurious features, their removal is not straightforward due to entangled representation. to address this, we propose a linear autoencoder training method to separate the representation into core, spurious, and other features. we propose two effective spurious feature removal approaches that are applied to the encoding and significantly improve classification performance measured by worst group accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/684.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/684.txt new file mode 100644 index 0000000000000000000000000000000000000000..71eaa2c1c59eb10eeb4d5b54ba2c9a368d36565a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/684.txt @@ -0,0 +1 @@ + in stochastic zeroth-order optimization, a problem of practical relevance is understanding how to fully exploit the local geometry of the underlying objective function. we consider a fundamental setting in which the objective function is quadratic, and provide the first tight characterization of the optimal hessiandependent sample complexity. our contribution is twofold. first, from an information-theoretic point of view, we prove tight lower bounds on hessiandependent complexities by introducing a concept called energy allocation, which captures the interaction between the searching algorithm and the geometry of objective functions. a matching upper bound is obtained by solving the optimal energy spectrum. then, algorithmically, we show the existence of a hessianindependent algorithm that universally achieves the asymptotic optimal sample complexities for all hessian instances. the optimal sample complexities achieved by our algorithm remain valid for heavy-tailed noise distributions, which are enabled by a truncation method.as an initial step, we investigate the following natural questions:• for zeorth-order bandit optimization problems of quadratic functions of the form 1 2 (xx 0 ) ⊤ a(x -x 0 ), what is the optimal instance-dependent upper bound with respect to a?37th conference on neural information processing systems (neurips 2023). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/685.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/685.txt new file mode 100644 index 0000000000000000000000000000000000000000..9fa49c961126b8f38e6d527a9d3b6f140827b0d8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/685.txt @@ -0,0 +1 @@ + interpreting machine learning models remains a challenge, hindering their adoption in clinical settings. this paper proposes leveraging local interpretable model-agnostic explanations (lime) to provide interpretable descriptions of black box classification models in highstakes sepsis detection. by analyzing misclassified instances, significant features contributing to suboptimal performance are identified. the analysis reveals regions where the classifier performs poorly, allowing the calculation of error rates within these regions. this knowledge is crucial for cautious decision-making in sepsis detection and other critical applications. the proposed approach is demonstrated using the eicu dataset, effectively identifying and visualizing regions where the classifier underperforms. by enhancing interpretability, our method promotes the adoption of machine learning models in clinical practice, empowering informed decision-making and mitigating risks in critical scenarios. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/686.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/686.txt new file mode 100644 index 0000000000000000000000000000000000000000..b8ddb0c0226b102ac1f01ffa2f589adede10ebb2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/686.txt @@ -0,0 +1 @@ + complex, long-horizon planning and its combinatorial nature pose steep challenges for learning-based agents. difficulties in such settings are exacerbated in low data regimes where over-fitting stifles generalization and compounding errors hurt accuracy. in this work, we explore the use of an often unused source of auxiliary supervision: language. inspired by recent advances in transformer-based models, we train agents with an instruction prediction loss that encourages learning temporally extended representations that operate at a high level of abstraction. concretely, we demonstrate that instruction modeling significantly improves performance in planning environments when training with a limited number of demonstrations on the babyai and crafter benchmarks. in further analysis we find that instruction modeling is most important for tasks that require complex reasoning, while understandably offering smaller gains in environments that require simple plans. more details and code can be found at https://github.com/jhejna/instruction-prediction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/687.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/687.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a002443361ed6d29d93cfe7b756a919a6f998b5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/687.txt @@ -0,0 +1 @@ + in this paper, we address the problem of direction of arrival (doa) estimation for multiple targets in the presence of sensor failures in a sparse array. generally, sparse arrays are known with very high-resolution capabilities, where n physical sensors can resolve up to o(n 2 ) uncorrelated sources. however, among the many configurations introduced in the literature, the arrays that provide the largest hole-free co-array are the most susceptible to sensor failures. we propose here two machine learning (ml) methods to mitigate the effect of sensor failures and maintain the doa estimation performance and resolution. the first method enhances the conventional spatial smoothing using deep neural network (dnn), while the second one is an end-to-end data-driven method. numerical results show that both approaches can significantly improve the performance of mra with two failed sensors. the data-driven method can maintain the performance of the array with no failures at high signal-tonoise ratio (snr). moreover, both approaches can even perform better than the original array at low snr thanks to the denoising effect of the proposed dnn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/688.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/688.txt new file mode 100644 index 0000000000000000000000000000000000000000..45add413b4ed78c967d7b9d6fee82e532d1e3d5b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/688.txt @@ -0,0 +1 @@ + the absence of transparency and explainability hinders the clinical adoption of machine learning (ml) algorithms. although various methods of explainable artificial intelligence (xai) have been suggested, there is a lack of literature that delves into their practicality and assesses them based on criteria that could foster trust in clinical environments. to address this gap this study evaluates two popular xai methods used for explaining predictive models in the healthcare context in terms of whether they (i) generate domain-appropriate representation, i.e. coherent with respect to the application task, (ii) impact clinical workflow and (iii) are consistent. to that end, explanations generated at the cohort and patient levels were analysed. the paper reports the first benchmarking of the xai methods applied to risk prediction models obtained by evaluating the concordance between generated explanations and the trigger of a future clinical deterioration episode recorded by the data collection system. we carried out an analysis using two electronic medical records (emr) datasets sourced from australian major hospitals. the findings underscore the limitations of state-of-the-art xai methods in the clinical context and their potential benefits. we discuss these limitations and contribute to the theoretical development of trustworthy xai solutions where clinical decision support guides the choice of intervention by suggesting the pattern or drivers for clinical deterioration in the future. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/689.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/689.txt new file mode 100644 index 0000000000000000000000000000000000000000..261eaf4cc6b6304a77d8379350186ce14bc1f6be --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/689.txt @@ -0,0 +1 @@ + recent advancements in federated learning (fl) seek to increase client-level performance by finetuning client parameters on local data or personalizing architectures for the local task. existing methods for such personalization either prune a global model or fine-tune a global model on a local client distribution. however, these existing methods either personalize at the expense of retaining important global knowledge, or predetermine network layers for fine-tuning, resulting in suboptimal storage of global knowledge within client models. enlightened by the lottery ticket hypothesis, we first introduce a hypothesis for finding optimal client subnetworks to locally fine-tune while leaving the rest of the parameters frozen. we then propose a novel fl framework, fedselect, using this procedure that directly personalizes both client subnetwork structure and parameters, via the simultaneous discovery of optimal parameters for personalization and the rest of parameters for global aggregation during training. we show that this method achieves promising results on cifar-10. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/69.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/69.txt new file mode 100644 index 0000000000000000000000000000000000000000..9fa70bde8fd88735b9d0c372724e17c33a491fd5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/69.txt @@ -0,0 +1 @@ + in this paper we propose a new deep learning (dl) approach for message classification. our method is based on the state-of-the-art natural language processing (nlp) building blocks, combined with a novel technique for infusing the meta-data input that is typically available in messages such as the sender information, timestamps, attached image, audio, affiliations, and more. as we demonstrate throughout the paper, going beyond the mere text by leveraging all available channels in the message, could yield an improved representation and higher classification accuracy. to achieve message representation, each type of input is processed in a dedicated block in the neural network architecture that is suitable for the data type. such an implementation enables training all blocks together simultaneously, and forming cross channels features in the network. we show in the experiments section that in some cases, message's meta-data holds an additional information that cannot be extracted just from the text, and when using this information we achieve better performance. furthermore, we demonstrate that our multi-modality block approach outperforms other approaches for injecting the meta data to the the text classifier. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/690.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/690.txt new file mode 100644 index 0000000000000000000000000000000000000000..e057d872726445c7223c616b40b18fd8c65fcc6f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/690.txt @@ -0,0 +1 @@ + the concepts of overfitting and generalization are vital for evaluating machine learning models. in this work, we show that the popular recall@k metric depends on the number of classes in the dataset, which limits its ability to estimate generalization. to fix this issue, we propose a new metric, which measures retrieval performance, and, unlike recall@k, estimates generalization. we apply the proposed metric to popular image retrieval methods and provide new insights about deep metric learning generalization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/691.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/691.txt new file mode 100644 index 0000000000000000000000000000000000000000..aed47c7dba29a6409971d3394fc1510b38c0bc27 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/691.txt @@ -0,0 +1 @@ + we study the problem of sequential prediction in the stochastic setting with an adversary that is allowed to inject clean-label adversarial (or out-of-distribution) examples. algorithms designed to handle purely stochastic data tend to fail in the presence of such adversarial examples, often leading to erroneous predictions. this is undesirable in many high-stakes applications such as medical recommendations, where abstaining from predictions on adversarial examples is preferable to misclassification. on the other hand, assuming fully adversarial data leads to very pessimistic bounds that are often vacuous in practice.to capture this motivation, we propose a new model of sequential prediction that sits between the purely stochastic and fully adversarial settings by allowing the learner to abstain from making a prediction at no cost on adversarial examples. assuming access to the marginal distribution on the non-adversarial examples, we design a learner whose error scales with the vc dimension (mirroring the stochastic setting) of the hypothesis class, as opposed to the littlestone dimension which characterizes the fully adversarial setting. furthermore, we design a learner for vc dimension 1 classes, which works even in the absence of access to the marginal distribution. our key technical contribution is a novel measure for quantifying uncertainty for learning vc classes, which may be of independent interest. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/692.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/692.txt new file mode 100644 index 0000000000000000000000000000000000000000..c831ecdd68541dee03b86792c4c72d0298b37755 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/692.txt @@ -0,0 +1 @@ + in this paper, we investigate whether we could use pruning as a reliable method to boost the generalization ability of the model. we found that existing pruning method like l2 can already offer small improvement on the target domain performance. we further propose a novel pruning scoring method, called dss, designed not to maintain source accuracy as typical pruning work, but to directly enhance the robustness of the model. we conduct empirical experiments to validate our method and demonstrate that it can be even combined with state-of-the-art generalization work like miro(cha et al., 2022) to further boost the performance. on mnist to mnist-m, we could improve the baseline performance by over 5 points by introducing 60% channel sparsity into the model. on domainbed benchmark and state-of-the-art miro, we can further boost its performance by 1 point only by introducing 10% sparsity into the model. code can be found at: https://github.com/alexsunnik/pruning-for-better-domain-generalizability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/693.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/693.txt new file mode 100644 index 0000000000000000000000000000000000000000..98919cade2a09d6ca1e1307b87110af79dad95d3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/693.txt @@ -0,0 +1 @@ + a recent alternative for hydrogen transportation as a mixture with natural gas is blending it into natural gas pipelines. however, hydrogen embrittlement of material is a major concern for scientists and gas installation designers to avoid process failures. in this paper, we propose a physicsinformed machine learning model to predict the gas pressure on the pipes' inner wall. despite its high-fidelity results, the current pde-based simulators are time-and computationally-demanding. using simulation data, we train an ml model to predict the pressure on the pipelines' inner walls, which is a first step for pipeline system surveillance. we found that the physics-based method outperformed the purely data-driven method and satisfy the physical constraints of the gas flow system. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/694.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/694.txt new file mode 100644 index 0000000000000000000000000000000000000000..759d194b1570807538a7fb69de55ef46b8769dda --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/694.txt @@ -0,0 +1 @@ + this paper focuses on studying the impact of climate data and vector larval indices on dengue outbreak. after a comparative study of the various lstm models, bidirectional stacked lstm network is selected to analyze the time series climate data and health data collected for the state of tamil nadu (india), for the period 2014 to 2020. prediction accuracy of the model is significantly improved by including the mosquito larval index, an indication of vbd control measure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/695.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/695.txt new file mode 100644 index 0000000000000000000000000000000000000000..921e990c03019cf1dcfa35adaa916f048d87b7eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/695.txt @@ -0,0 +1 @@ + the rapid spread of fake news is a serious problem calling for ai solutions. we employ a deep learning based automated detector through a three level hierarchical attention network (3han) for fast, accurate detection of fake news. 3han has three levels, one each for words, sentences, and the headline, and constructs a news vector: an effective representation of an input news article, by processing an article in an hierarchical bottom-up manner. the headline is known to be a distinguishing feature of fake news, and furthermore, relatively few words and sentences in an article are more important than the rest. 3han gives a differential importance to parts of an article, on account of its three layers of attention. by experiments on a large real-world data set, we observe the effectiveness of 3han with an accuracy of 96.77%. unlike some other deep learning models, 3han provides an understandable output through the attention weights given to different parts of an article, which can be visualized through a heatmap to enable further manual fact checking. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/696.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/696.txt new file mode 100644 index 0000000000000000000000000000000000000000..10867e180edd0e1932113a19ea96f336b4cd784c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/696.txt @@ -0,0 +1 @@ + novel class discovery (ncd) is the problem of trying to discover novel classes in an unlabeled set, given a labeled set of different but related classes. the majority of ncd methods proposed so far only deal with image data, despite tabular data being among the most widely used type of data in practical applications. to interpret the results of clustering or ncd algorithms, data scientists need to understand the domain-and application-specific attributes of tabular data. this task is difficult and can often only be performed by a domain expert. therefore, this interface allows a domain expert to easily run state-of-the-art algorithms for ncd in tabular data. with minimal knowledge in data science, interpretable results can be generated. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/697.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/697.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb32f4cb1e22e2395f7fba8215df402aa3045d3d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/697.txt @@ -0,0 +1 @@ + traditional machine learning assumes samples in tabular data to be independent and identically distributed (i.i.d). this assumption may miss useful information within and between sample relationships in representation learning. this paper relaxes the i.i.d assumption to learn tabular data representations by incorporating between-sample relationships for the first time using graph neural networks (gnn). we investigate our hypothesis using several gnns and state-of-the-art (sota) deep attention models to learn the between-sample relationship on ten tabular data sets by comparing them to traditional machine learning methods. gnn methods show the best performance on tabular data with large feature-to-sample ratios. our results reveal that attention-based gnn methods outperform traditional machine learning on five data sets and sota deep tabular learning methods on three data sets. between-sample learning via gnn and deep attention methods yield the best classification accuracy on seven of the ten data sets, suggesting that the i.i.d assumption may not always hold for most tabular data sets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/698.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/698.txt new file mode 100644 index 0000000000000000000000000000000000000000..77196ef794a5d2ed217fab99eb6dcb9fb33171e6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/698.txt @@ -0,0 +1 @@ + air pollution is the origination of particulate matter, chemicals, or biological substances that brings pain to either humans or other living creatures or instigates discomfort to the natural habitat and the airspace. hence, air pollution remains one of the paramount environmental issues as far as metropolitan cities are concerned. several air pollution benchmarks are even said to have a negative influence on human health. also, improper detection of air pollution benchmarks results in severe complications for humans and living creatures. to address this aspect, a novel technique called, discretized regression and least square support vector (dr-lssv) based air pollution forecasting is proposed. the results indicate that the proposed dr-lssv technique can efficiently enhance air pollution forecasting performance and outperforms the conventional machine learning methods in terms of air pollution forecasting accuracy, air pollution forecasting time, and false positive rate. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/699.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/699.txt new file mode 100644 index 0000000000000000000000000000000000000000..736af2ee80523dc1955ece78aed1ee2275a5d798 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/699.txt @@ -0,0 +1 @@ + hypernetworks, or hypernets in short, are neural networks that generate weights for another neural network, known as the target network. they have emerged as a powerful deep learning technique that allows for greater flexibility, adaptability, dynamism, faster training, information sharing, and model compression etc. hypernets have shown promising results in a variety of deep learning problems, including continual learning, causal inference, transfer learning, weight pruning, uncertainty quantification, zero-shot learning, natural language processing, and reinforcement learning etc. despite their success across different problem settings, currently, there is no review available to inform the researchers about the developments and to help in utilizing hypernets. to fill this gap, we review the progress in hypernets. we present an illustrative example to train deep neural networks using hypernets and propose categorizing hypernets based on five design criteria as inputs, outputs, variability of inputs and outputs, and architecture of hypernets. we also review applications of hypernets across different deep learning problem settings, followed by a discussion of general scenarios where hypernets can be effectively employed. finally, we discuss the challenges and future directions that remain under-explored in the field of hypernets. we believe that hypernetworks have the potential to revolutionize the field of deep learning. they offer a new way to design and train neural networks, and they have the potential to improve the performance of deep learning models on a variety of tasks. through this review, we aim to inspire further advancements in deep learning through hypernetworks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/7.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/7.txt new file mode 100644 index 0000000000000000000000000000000000000000..68a2b639f67b8885439f98cf875d970e966dedfa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/7.txt @@ -0,0 +1 @@ + machine learning has achieved great success in electroencephalogram (eeg) based brain-computer interfaces (bcis). most existing bci research focused on improving its accuracy, but few had considered its security. recent studies, however, have shown that eeg-based bcis are vulnerable to adversarial attacks, where small perturbations added to the input can cause misclassification. detection of adversarial examples is crucial to both the understanding of this phenomenon and the defense. this paper, for the first time, explores adversarial detection in eeg-based bcis. experiments on two eeg datasets using three convolutional neural networks were performed to verify the performances of multiple detection approaches. we showed that both white-box and black-box attacks can be detected, and the former are easier to detect. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/70.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/70.txt new file mode 100644 index 0000000000000000000000000000000000000000..93e5d914a781d36c5743feacdc72133308a62cdc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/70.txt @@ -0,0 +1 @@ + we discuss our simulation tool, fintech-kmc, which is designed to generate synthetic data for machine learning model development and testing. fintech-kmc is an agent-based model driven by a kinetic monte carlo (a.k.a. continuous time monte carlo) engine which simulates the behaviour of customers using an online digital financial platform. the tool provides an interpretable, reproducible, and realistic way of generating synthetic data which can be used to validate and test ai/ml models and pipelines to be used in real-world customer-facing financial applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/700.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/700.txt new file mode 100644 index 0000000000000000000000000000000000000000..bdc86cd477ab04a4ee8e07e627138b3727bd4339 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/700.txt @@ -0,0 +1 @@ + in this paper, we investigate the streaming bandits problem, wherein the learner aims to minimize regret by dealing with online arriving arms and sublinear arm memory. we establish the tight worst-case regret lower bound of-1) for any algorithm with a time horizon t , number of arms k, and number of passes b. the result reveals a separation between the stochastic bandits problem in the classical centralized setting and the streaming setting with bounded arm memory. notably, in comparison to the well-known ω( √ kt ) lower bound, an additional double logarithmic factor is unavoidable for any streaming bandits algorithm with sublinear memory permitted. furthermore, we establish the first instance-dependent lower bound of ω t 1/(b+1) ∆x>0 µ * ∆x for streaming bandits. these lower bounds are derived through a unique reduction from the regret-minimization setting to the sample complexity analysis for a sequence of ǫ-optimal arms identification tasks, which maybe of independent interest. to complement the lower bound, we also provide a multi-pass algorithm that achieves a regret upper bound of õ (t b) α k 1-α using constant arm memory. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/701.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/701.txt new file mode 100644 index 0000000000000000000000000000000000000000..a241ac3f23f8c0663928aacb82ee8600c096a5e1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/701.txt @@ -0,0 +1 @@ + deep learning (dl) models have seen increased attention for time series forecasting, yet the application on cyber-physical systems (cps) is hindered by the lacking robustness of these methods. thus, this study evaluates the robustness and generalization performance of dl architectures on multivariate time series data from cps. our investigation focuses on the models' ability to handle a range of perturbations, such as sensor faults and noise, and assesses their impact on overall performance. furthermore, we test the generalization and transfer learning capabilities of these models by exposing them to out-of-distribution (ood) samples. these include deviations from standard system operations, while the core dynamics of the underlying physical system are preserved. additionally, we test how well the models respond to several data augmentation techniques, including added noise and time warping. our experimental framework utilizes a simulated three-tank system, proposed as a novel benchmark for evaluating the robustness and generalization performance of dl algorithms in cps data contexts. the findings reveal that certain dl model architectures and training techniques exhibit superior effectiveness in handling ood samples and various perturbations. these insights have significant implications for the development of dl models that deliver reliable and robust performance in real-world cps applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/702.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/702.txt new file mode 100644 index 0000000000000000000000000000000000000000..c2a0fc3637b61a9bb4e3353ca43fbebf539a29e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/702.txt @@ -0,0 +1 @@ + data valuation has become an increasingly significant discipline in data science due to the economic value of data. in the context of machine learning (ml), data valuation methods aim to equitably measure the contribution of each data point to the utility of an ml model. one prevalent method is shapley value, which helps identify data points that are beneficial or detrimental to an ml model. however, traditional shapley-based data valuation methods may not effectively distinguish between beneficial and detrimental training data points for probabilistic classifiers. in this paper, we propose probabilistic shapley (p-shapley) value by constructing a probability-wise utility function that leverages the predicted class probabilities of probabilistic classifiers rather than binarized prediction results in the traditional shapley value. we also offer several activation functions for confidence calibration to effectively quantify the marginal contribution of each data point to the probabilistic classifiers. extensive experiments on four realworld datasets demonstrate the effectiveness of our proposed p-shapley value in evaluating the importance of data for building a high-usability and trustworthy ml model. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/703.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/703.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae1321ba6f857dbdd9e2c1d8641a460155b34aa2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/703.txt @@ -0,0 +1 @@ + multi-agent reinforcement learning (marl) has been widely applied in many fields such as smart traffic and unmanned aerial vehicles. however, most marl algorithms are vulnerable to adversarial perturbations on agent states. robustness testing for a trained model is an essential step for confirming the trustworthiness of the model against unexpected perturbations. this work proposes a novel robustness testing framework for marl that attacks states of critical agents (rtca). the rtca has two innovations: 1) a differential evolution (de) based method to select critical agents as victims and to advise the worst-case joint actions on them; and 2) a team cooperation policy evaluation method employed as the objective function for the optimization of de. then, adversarial state perturbations of the critical agents are generated based on the worst-case joint actions. this is the first robustness testing framework with varying victim agents. rtca demonstrates outstanding performance in terms of the number of victim agents and destroying cooperation policies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/704.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/704.txt new file mode 100644 index 0000000000000000000000000000000000000000..b8636ea4eeb6c1af49edbeaa5591f073b3f54470 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/704.txt @@ -0,0 +1 @@ + privately generating synthetic data from a table is an important brick of a privacy-first world. we propose and investigate a simple approach of treating each row in a table as a sentence and training a language model with differential privacy. we show this approach obtains competitive results in modelling tabular data across multiple datasets, even at small scales that favor alternative methods based on marginal distributions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/705.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/705.txt new file mode 100644 index 0000000000000000000000000000000000000000..a7c56b0737b19660467a145c8b5af2a3988fb19b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/705.txt @@ -0,0 +1 @@ + combinatorial optimization problems require an exhaustive search to find the optimal solution. a convenient approach to solving combinatorial optimization tasks in the form of mixed integer linear programs is branch-and-bound. branchand-bound solver splits a task into two parts dividing the domain of an integer variable, then it solves them recursively, producing a tree of nested sub-tasks. the efficiency of the solver depends on the branchning heuristic used to select a variable for splitting. in the present work, we propose a reinforcement learning method that can efficiently learn the branching heuristic. we view the variable selection task as a tree markov decision process, prove that the bellman operator adapted for the tree markov decision process is contracting in mean, and propose a modified learning objective for the reinforcement learning agent. our agent requires less training data and produces smaller trees compared to previous reinforcement learning methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/706.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/706.txt new file mode 100644 index 0000000000000000000000000000000000000000..202d15f182d6c7dc0a191f881dbcdcb6a9ba0539 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/706.txt @@ -0,0 +1 @@ + the goal of this paper is to revisit kernel principal component analysis (kpca) through dualization of a difference of convex functions. this allows to naturally extend kpca to multiple objective functions and leads to efficient gradient-based algorithms avoiding the expensive svd of the gram matrix. particularly, we consider objective functions that can be written as moreau envelopes, demonstrating how to promote robustness and sparsity within the same framework. the proposed method is evaluated on synthetic and realworld benchmarks, showing significant speedup in kpca training time as well as highlighting the benefits in terms of robustness and sparsity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/707.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/707.txt new file mode 100644 index 0000000000000000000000000000000000000000..b324f70ae2ed2898b3ee74def946d6bab86562a7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/707.txt @@ -0,0 +1 @@ + with significant advances in generative ai, new technologies are rapidly being deployed with generative components. generative models are typically trained on large datasets, resulting in model behaviors that can mimic the worst of the content in the training data. responsible deployment of generative technologies requires content moderation strategies, such as safety input and output filters. here, we provide a theoretical framework for conceptualizing responsible content moderation of text-to-image generative technologies, including a demonstration of how to empirically measure the constructs we enumerate. we define and distinguish the concepts of safety, fairness, and metric equity, and enumerate example harms that can come in each domain. we then provide a demonstration of how the defined harms can be quantified. we conclude with a summary of how the style of harms quantification we demonstrate enables data-driven content moderation decisions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/708.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/708.txt new file mode 100644 index 0000000000000000000000000000000000000000..81404fce5d0e63cb9bdfb29395f5dae3b58a99b0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/708.txt @@ -0,0 +1 @@ + we take a formal approach to the explainability problem of machine learning systems. we argue against the practice of interpreting black-box models via attributing scores to input components due to inherently conflicting goals of attribution-based interpretation. we prove that no attribution algorithm satisfies specificity, additivity, completeness, and baseline invariance. we then formalize the concept, sound explanation, that has been informally adopted in prior work. a sound explanation entails providing sufficient information to causally explain the predictions made by a system. finally, we present the application of feature selection as a sound explanation for cancer prediction models to cultivate trust among clinicians. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/709.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/709.txt new file mode 100644 index 0000000000000000000000000000000000000000..7987f88e2b9c6348321aa8e7a8a70ab5fa942531 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/709.txt @@ -0,0 +1 @@ + we investigate novel random graph embeddings that can be computed in expected polynomial time and that are able to distinguish all non-isomorphic graphs in expectation. previous graph embeddings have limited expressiveness and either cannot distinguish all graphs or cannot be computed efficiently for every graph. to be able to approximate arbitrary functions on graphs, we are interested in efficient alternatives that become arbitrarily expressive with increasing resources. our approach is based on lovász' characterisation of graph isomorphism through an infinite dimensional vector of homomorphism counts. our empirical evaluation shows competitive results on several benchmark graph learning tasks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/71.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/71.txt new file mode 100644 index 0000000000000000000000000000000000000000..4dc258042f328650b79f49d3089c8366bb590058 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/71.txt @@ -0,0 +1 @@ + many real-world reinforcement learning (rl) problems necessitate learning complex, temporally extended behavior that may only receive reward signal when the behavior is completed. if the reward-worthy behavior is known, it can be specified in terms of a non-markovian reward function-a function that depends on aspects of the state-action history, rather than just the current state and action. such reward functions yield sparse rewards, necessitating an inordinate number of experiences to find a policy that captures the reward-worthy pattern of behavior. recent work has leveraged knowledge representation (kr) to provide a symbolic abstraction of aspects of the state that summarize reward-relevant properties of the state-action history and support learning a markovian decomposition of the problem in terms of an automaton over the kr. providing such a decomposition has been shown to vastly improve learning rates, especially when coupled with algorithms that exploit automaton structure. nevertheless, such techniques rely on a priori knowledge of the kr. in this work, we explore how to automatically discover useful state abstractions that support learning automata over the state-action history. the result is an end-to-end algorithm that can learn optimal policies with significantly fewer environment samples than state-of-the-art rl on simple non-markovian domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/710.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/710.txt new file mode 100644 index 0000000000000000000000000000000000000000..b6d6f93b2913bc51e8f97325c43b7ac298738da1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/710.txt @@ -0,0 +1 @@ + weighted outlier detection is a method for identifying unusual or anomalous data points in a dataset, which can be caused by various factors like human error, fraud, or equipment malfunctions. detecting outliers can reveal vital information about system faults, fraudulent activities, and patterns in the data, assisting experts in addressing the root causes of these anomalies. however, creating a model of normal data patterns to identify outliers can be challenging due to the nature of input data, labeled data availability, and specific requirements of the problem. this article proposed the wepamadm-outlier detection with distinct mass data mining domain, demonstrating that such techniques are domain-dependent and usually developed for specific problem formulations. nevertheless, similar domains can adapt solutions with modifications. this work also investigates the significance of data modeling in outlier detection techniques in surveillance, fault detection, and trend analysis, also referred to as novelty detection, a semisupervised task where the algorithm learns to recognize abnormality while being taught the normal class. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/711.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/711.txt new file mode 100644 index 0000000000000000000000000000000000000000..09245ace4801cf20a4f5f3407fc0f96db6c805eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/711.txt @@ -0,0 +1 @@ + in the context of classification problems, deep learning (dl) approaches represent state of art. many dl approaches are based on variations of standard multi-layer feed-forward neural networks. these are also referred to as deep networks. the basic idea is that each hidden neural layer accomplishes a data transformation which is expected to make the data representation "somewhat more linearly separable" than the previous one to obtain a final data representation which is as linearly separable as possible. however, determining the appropriate neural network parameters that can perform these transformations is a critical problem. in this paper, we investigate the impact on deep network classifier performances of a training approach favouring solutions where data representations at the hidden layers have a higher degree of linear separability between the classes with respect to standard methods. to this aim, we propose a neural network architecture which induces an error function involving the outputs of all the network layers. although similar approaches have already been partially discussed in the past literature, here we propose a new architecture with a novel error function and an extensive experimental analysis. this experimental analysis was made in the context of image classification tasks considering four widely used datasets. the results show that our approach improves the accuracy on the test set in all the considered cases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/712.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/712.txt new file mode 100644 index 0000000000000000000000000000000000000000..64bf1d7481997bdce2614af5b7b27a50949758f3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/712.txt @@ -0,0 +1 @@ + limited availability of labeled physiological data often prohibits the use of powerful supervised deep learning models in the biomedical machine intelligence domain. we approach this problem and propose a novel encoding framework that relies on self-supervised learning with momentum contrast to learn representations from multivariate time-series of various physiological domains without needing labels. our model uses a transformer architecture that can be easily adapted to classification problems by optimizing a linear output classification layer. we experimentally evaluate our framework using two publicly available physiological datasets from different domains, i.e., human activity recognition from embedded inertial sensory and emotion recognition from electroencephalography. we show that our self-supervised learning approach can indeed learn discriminative features which can be exploited in downstream classification tasks. our work enables the development of domainagnostic intelligent systems that can effectively analyze multivariate time-series data from physiological domains. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/713.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/713.txt new file mode 100644 index 0000000000000000000000000000000000000000..1355a91bf326cbc38c2dbd2e85db62d1fe3ef90d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/713.txt @@ -0,0 +1 @@ + we train a small message-passing graph neural network to predict hamiltonian cycles on erdős-rényi random graphs in a critical regime. it outperforms existing hand-crafted heuristics after about 2.5 hours of training on a single gpu. our findings encourage an alternative approach to solving computationally demanding (np-hard) problems arising in practice. instead of devising a heuristic by hand, one can train it endto-end using a neural network. this has several advantages. firstly, it is relatively quick and requires little problem-specific knowledge. secondly, the network can adjust to the distribution of training samples, improving the performance on the most relevant problem instances. the model is trained using supervised learning on artificially created problem instances; this training procedure does not use an existing solver to produce the supervised signal. finally, the model generalizes well to larger graph sizes and retains reasonable performance even on graphs eight times the original size. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/714.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/714.txt new file mode 100644 index 0000000000000000000000000000000000000000..2677352d8402f02cd777bc99374714a578bab8b8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/714.txt @@ -0,0 +1 @@ + this paper presents a personalized graph federated learning (pgfl) framework in which distributedly connected servers and their respective edge devices collaboratively learn device or cluster-specific models while maintaining the privacy of every individual device. the proposed approach exploits similarities among different models to provide a more relevant experience for each device, even in situations with diverse data distributions and disproportionate datasets. furthermore, to ensure a secure and efficient approach to collaborative personalized learning, we study a variant of the pgfl implementation that utilizes differential privacy, specifically zero-concentrated differential privacy, where a noise sequence perturbs model exchanges. our mathematical analysis shows that the proposed privacypreserving pgfl algorithm converges to the optimal clusterspecific solution for each cluster in linear time. it also shows that exploiting similarities among clusters leads to an alternative output whose distance to the original solution is bounded, and that this bound can be adjusted by modifying the algorithm's hyperparameters. further, our analysis shows that the algorithm ensures local differential privacy for all clients in terms of zeroconcentrated differential privacy. finally, the performance of the proposed pgfl algorithm is examined by performing numerical experiments in the context of regression and classification using synthetic data and the mnist dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/715.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/715.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b8f5a6832e6d499d67e66d79081c80b317d69fe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/715.txt @@ -0,0 +1 @@ + researchers building behavioral models, such as behavioral game theorists, use experimental data to evaluate predictive models of human behavior. however, there is little agreement about which loss function should be used in evaluations, with error rate, negative log-likelihood, cross-entropy, brier score, and squared l2 error all being common choices. we attempt to offer a principled answer to the question of which loss functions should be used for this task, formalizing axioms that we argue loss functions should satisfy. we construct a family of loss functions, which we dub "diagonal bounded bregman divergences", that satisfy all of these axioms. these rule out many loss functions used in practice, but notably include squared l2 error; we thus recommend its use for evaluating behavioral models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/716.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/716.txt new file mode 100644 index 0000000000000000000000000000000000000000..e7eb5fc78ed94273a4947e96eff0ce516cf28a52 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/716.txt @@ -0,0 +1 @@ + finding optimal channel dimensions (i.e., the number of filters in dnn layers) is essential to design dnns that perform well under computational resource constraints. recent work in neural architecture search aims at automating the optimization of the dnn model implementation. however, existing neural architecture search methods for channel dimensions rely on fixed search spaces, which prevents achieving an efficient and fully automated solution. in this work, we propose a novel differentiable neural architecture search method with an efficient dynamic channel allocation algorithm to enable a flexible search space for channel dimensions. we show that the proposed framework is able to find dnn architectures that are equivalent to previous methods in task accuracy and inference latency for the cifar-10 dataset with an improvement of 1.3-1.7× in gpu-hours and 1.5-1.7× in the memory requirements during the architecture search stage. moreover, the proposed frameworks do not require a well-engineered search space a priori, which is an important step towards fully automated design of dnn architectures. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/717.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/717.txt new file mode 100644 index 0000000000000000000000000000000000000000..269c89916604f634778008631380d0e8bd079d4b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/717.txt @@ -0,0 +1 @@ + machine learning is frequently used in affective computing, but presents challenges due the opacity of stateof-the-art machine learning methods. because of the impact affective machine learning systems may have on an individual's life, it is important that models be made transparent to detect and mitigate biased decision making. in this regard, affective machine learning could benefit from the recent advancements in explainable artificial intelligence (xai) research. we perform a structured literature review to examine the use of interpretability in the context of affective machine learning. we focus on studies using audio, visual, or audiovisual data for model training and identified 29 research articles. our findings show an emergence of the use of interpretability methods in the last five years. however, their use is currently limited regarding the range of methods used, the depth of evaluations, and the consideration of use-cases. we outline the main gaps in the research and provide recommendations for researchers that aim to implement interpretable methods for affective machine learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/718.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/718.txt new file mode 100644 index 0000000000000000000000000000000000000000..01c70299599657f5abe7869301903e6bc4ff9374 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/718.txt @@ -0,0 +1 @@ + while there exist many ways to deploy machine learning models on microcontrollers, it is non-trivial to choose the optimal combination of frameworks and targets for a given application. thus, automating the end-to-end benchmarking flow is of high relevance nowadays. a tool called mlonmcu is proposed in this paper and demonstrated by benchmarking the state-of-the-art tinyml frameworks tflite for microcontrollers and tvm effortlessly with a large number of configurations in a low amount of time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/719.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/719.txt new file mode 100644 index 0000000000000000000000000000000000000000..32a280fadc0cf230603af0e2651b25771e9920dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/719.txt @@ -0,0 +1 @@ + the bandits with knapsack (bwk) framework models online decision-making problems in which an agent makes a sequence of decisions subject to resource consumption constraints. the traditional model assumes that each action consumes a non-negative amount of resources and the process ends when the initial budgets are fully depleted. we study a natural generalization of the bwk framework which allows non-monotonic resource utilization, i.e., resources can be replenished by a positive amount. we propose a best-of-both-worlds primal-dual template that can handle any online learning problem with replenishment for which a suitable primal regret minimizer exists. in particular, we provide the first positive results for the case of adversarial inputs by showing that our framework guarantees a constant competitive ratio α when b = ω(t ) or when the possible per-round replenishment is a positive constant. moreover, under a stochastic input model, our algorithm yields an instance-independent õ(t 1/2 ) regret bound which complements existing instance-dependent bounds for the same setting. finally, we provide applications of our framework to some economic problems of practical relevance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/72.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/72.txt new file mode 100644 index 0000000000000000000000000000000000000000..c051bdcad819c2da7fda48337d02382e3f5890b1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/72.txt @@ -0,0 +1 @@ + the forecast of electrical loads is essential for the planning and operation of the power system. recently, advances in deep learning have enabled more accurate forecasts. however, deep neural networks are prone to adversarial attacks. although most of the literature focuses on integrity-based attacks, this paper proposes availability-based adversarial attacks, which can be more easily implemented by attackers. for each forecast instance, the availability attack position is optimally solved by mixed-integer reformulation of the artificial neural network. to tackle this attack, an adversarial training algorithm is proposed. in simulation, a realistic load forecasting dataset is considered and the attack performance is compared to the integrity-based attack. meanwhile, the adversarial training algorithm is shown to significantly improve robustness against availability attacks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/720.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/720.txt new file mode 100644 index 0000000000000000000000000000000000000000..befd43a3297cc563483346038d6498228a124b12 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/720.txt @@ -0,0 +1 @@ + the trade-off between regret and computational cost is a fundamental problem for online kernel regression, and previous algorithms worked on the trade-off can not keep optimal regret bounds at a sublinear computational complexity. in this paper, we propose two new algorithms, aogd-ald and nons-ald, which can keep nearly optimal regret bounds at a sublinear computational complexity, and give sufficient conditions under which our algorithms work. both algorithms dynamically maintain a group of nearly orthogonal basis used to approximate the kernel mapping, and keep nearly optimal regret bounds by controlling the approximate error. the number of basis depends on the approximate error and the decay rate of eigenvalues of the kernel matrix. if the eigenvalues decay exponentially, then aogd-ald and nons-ald separately achieves a regret of o( l(f )) and o(d eff (µ) ln t ) at a computational complexity in o(ln 2 t ). if the eigenvalues decay polynomially with degree p ≥ 1, then our algorithms keep the same regret bounds at a computational complexity in o(t ) in the case of p > 4 and p ≥ 10, respectively. l(f ) is the cumulative losses of f and d eff (µ) is the effective dimension of the problem. the two regret bounds are nearly optimal and are not comparable. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/721.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/721.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/722.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/722.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/723.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/723.txt new file mode 100644 index 0000000000000000000000000000000000000000..bef786a4c2f3a55c98c67a816ba5aa0ddf1c6f22 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/723.txt @@ -0,0 +1 @@ + active learning algorithms have been an integral part of recent advances in artificial intelligence. however, the research in the field is widely varying and lacks an overall organizing leans. we outline a markovian formalism for the field of active learning and survey the literature to demonstrate the organizing capability of our proposed formalism. our formalism takes a partially observable markovian system approach to the active learning process as a whole. we specifically outline how querying, dataset augmentation, reward updates, and other aspects of active learning can be viewed as a transition between meta-states in a markovian system, and give direction into how other aspects of active learning can fit into our formalism. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/724.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/724.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f662a5ec9cd495328b006ad899cb6f7dee1b7f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/724.txt @@ -0,0 +1 @@ + in this paper the concept of a machine learning based hands-on detection algorithm is proposed. the hand detection is implemented on the hardware side using a capacitive method. a sensor mat in the steering wheel detects a change in capacity as soon as the driver's hands come closer. the evaluation and final decision about hands-on or hands-off situations is done using machine learning. in order to find a suitable machine learning model, different models are implemented and evaluated. based on accuracy, memory consumption and computational effort the most promising one is selected and ported on a micro controller. the entire system is then evaluated in terms of reliability and response time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/725.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/725.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c2cf87cda2659eaeaf9aec6910804a1ef2123b6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/725.txt @@ -0,0 +1 @@ + we derive the first finite-time logarithmic bayes regret upper bounds for bayesian bandits. in a multi-armed bandit, we obtain o(c ∆ log n) and o(c h log 2 n) upper bounds for an upper confidence bound algorithm, where c h and c ∆ are constants depending on the prior distribution and the gaps of bandit instances sampled from it, respectively. the latter bound asymptotically matches the lower bound of lai (1987). our proofs are a major technical departure from prior works, while being simple and general. to show the generality of our techniques, we apply them to linear bandits. our results provide insights on the value of prior in the bayesian setting, both in the objective and as a side information given to the learner. they significantly improve upon existing õ( √ n) bounds, which have become standard in the literature despite the logarithmic lower bound of lai (1987). * the work started at amazon search. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/726.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/726.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2edd80b274443b843b9d7eb6ec8815536984ec9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/726.txt @@ -0,0 +1 @@ + preprocessing of information is an essential step for the effective design of machine learning applications. feature construction and selection are powerful techniques used for this aim. in this paper, a feature selection and construction approach is presented for the detection of wind turbine generator heating faults. data were collected from supervisory control and data acquisition (scada) system of a wind turbine. the original features directly collected from the data collection system consist of wind characteristics, operational data, temperature measurements and status information. in addition to these original features, new features were created in the feature construction step to obtain information that can be more powerful indications of the faults. after the construction of new features, a hybrid feature selection technique was implemented to find out the most relevant features in the overall set to increase the classification accuracy and decrease the computational burden. feature selection step consists of filter and wrapper-based parts. filter based feature selection was applied to exclude the features which are non-discriminative and wrapper-based method was used to determine the final features considering the redundancies and mutual relations amongst them. artificial neural networks were used both in the detection phase and as the induction algorithm of the wrapper-based feature selection part. the results show that, the proposed approach contributes to the fault detection system to be more reliable especially in terms of reducing the number of false fault alarms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/727.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/727.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa527e6b8d20f580f2c330d1d11b939f565f60d8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/727.txt @@ -0,0 +1 @@ + hypergraphs are a powerful abstraction for representing higher-order interactions between entities of interest. to exploit these relationships in making downstream predictions, a variety of hypergraph neural network architectures have recently been proposed, in large part building upon precursors from the more traditional graph neural network (gnn) literature.somewhat differently, in this paper we begin by presenting an expressive family of parameterized, hypergraph-regularized energy functions. we then demonstrate how minimizers of these energies effectively serve as node embeddings that, when paired with a parameterized classifier, can be trained end-to-end via a supervised bilevel optimization process. later, we draw parallels between the implicit architecture of the predictive models emerging from the proposed bilevel hypergraph optimization, and existing gnn architectures in common use. empirically, we demonstrate stateof-the-art results on various hypergraph node classification benchmarks. code is available at https://github.com/yxzwang/phenomnn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/728.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/728.txt new file mode 100644 index 0000000000000000000000000000000000000000..21c0e986b292e5101a95d0882a29c64ee2cbbab7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/728.txt @@ -0,0 +1 @@ + causal inference of exact individual treatment outcomes in the presence of hidden confounders is rarely possible. recent work has extended prediction intervals with finite-sample guarantees to partially identifiable causal outcomes, by means of a sensitivity model for hidden confounding. in deep learning, predictors can exploit their inductive biases for better generalization out of sample. we argue that the structure inherent to a deep ensemble should inform a tighter partial identification of the causal outcomes that they predict. we therefore introduce an approach termed caus-modens, for characterizing causal outcome intervals by modulated ensembles. we present a simple approach to partial identification using existing causal sensitivity models and show empirically that caus-modens gives tighter outcome intervals, as measured by the necessary interval size to achieve sufficient coverage. the last of our three diverse benchmarks is a novel usage of gpt-4 for observational experiments with unknown but probeable ground truth. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/729.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/729.txt new file mode 100644 index 0000000000000000000000000000000000000000..7c869355e6ff5b827321aa72c6e0b0e23d2a0bec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/729.txt @@ -0,0 +1 @@ + recently, offline rl algorithms have been proposed that remain adaptive at runtime. for example, the lion algorithm provides the user with an interface to set the trade-off between behavior cloning and optimality w.r.t. the estimated return at runtime. experts can then use this interface to adapt the policy behavior according to their preferences and find a good trade-off between conservatism and performance optimization. since expert time is precious, we extend the methodology with an autopilot that automatically finds the correct parameterization of the trade-off, yielding a new algorithm which we term autolion. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/73.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/73.txt new file mode 100644 index 0000000000000000000000000000000000000000..bece348b4b8230386e78945b7e245da60826c995 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/73.txt @@ -0,0 +1 @@ + in this paper, we formulate inverse reinforcement learning (irl) as an expert-learner interaction whereby the optimal performance intent of an expert or target agent is unknown to a learner agent. the learner observes the states and controls of the expert and hence seeks to reconstruct the expert's cost function intent and thus mimics the expert's optimal response. next, we add non-cooperative disturbances that seek to disrupt the learning and stability of the learner agent. this leads to the formulation of a new interaction we call zero-sum game irl. we develop a framework to solve the zero-sum game irl problem that is a modified extension of rl policy iteration (pi) to allow unknown expert performance intentions to be computed and non-cooperative disturbances to be rejected. the framework has two parts: a value function and control action update based on an extension of pi, and a cost function update based on standard inverse optimal control. then, we eventually develop an off-policy irl algorithm that does not require knowledge of the expert and learner agent dynamics and performs singleloop learning. rigorous proofs and analyses are given. finally, simulation experiments are presented to show the effectiveness of the new approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/730.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/730.txt new file mode 100644 index 0000000000000000000000000000000000000000..028d158548cd7b7c23061c1436ccfe6fdbffbfb2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/730.txt @@ -0,0 +1 @@ + energy time-series analysis describes the process of analyzing past energy observations and possibly external factors so as to predict the future. different tasks are involved in the general field of energy time-series analysis and forecasting, with electric load demand forecasting, personalized energy consumption forecasting, as well as renewable energy generation forecasting being among the most common ones. following the exceptional performance of deep learning (dl) in a broad area of vision tasks, dl models have successfully been utilized in time-series forecasting tasks. this paper aims to provide insight into various dl methods geared towards improving the performance in energy time-series forecasting tasks, with special emphasis in greek energy market, and equip the reader with the necessary knowledge to apply these methods in practice. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/731.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/731.txt new file mode 100644 index 0000000000000000000000000000000000000000..251db881c3f5d398f72250db08f33b9b83ee5f4d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/731.txt @@ -0,0 +1 @@ + partial-label learning is a popular weakly supervised learning setting that allows each training example to be annotated with a set of candidate labels. previous studies on partial-label learning only focused on the classification setting where candidate labels are all discrete, which cannot handle continuous labels with real values. in this paper, we provide the first attempt to investigate partial-label regression, where each training example is annotated with a set of real-valued candidate labels. to solve this problem, we first propose a simple baseline method that takes the average loss incurred by candidate labels as the predictive loss. the drawback of this method lies in that the loss incurred by the true label may be overwhelmed by other false labels. to overcome this drawback, we propose an identification method that takes the least loss incurred by candidate labels as the predictive loss. we further improve it by proposing a progressive identification method to differentiate candidate labels using progressively updated weights for incurred losses. we prove that the latter two methods are model-consistent and provide convergence analyses. our proposed methods are theoretically grounded and can be compatible with any models, optimizers, and losses. experiments validate the effectiveness of our proposed methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/732.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/732.txt new file mode 100644 index 0000000000000000000000000000000000000000..a2db6cab859ce39f327ef4f689f95724dffcab77 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/732.txt @@ -0,0 +1 @@ + in recent years, machine learning algorithms, in particular supervised learning techniques, have been shown to be very effective in solving regression problems. we compare the performance of a newly proposed regression algorithm against four conventional machine learning algorithms namely, decision trees, random forest, k-nearest neighbours and xg boost. the proposed algorithm was presented in detail in a previous paper but detailed comparisons were not included. we do an in-depth comparison, using the mean absolute error (mae) as the performance metric, on a diverse set of datasets to illustrate the great potential and robustness of the proposed approach. the reader is free to replicate our results since we have provided the source code in a github repository while the datasets are publicly available. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/733.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/733.txt new file mode 100644 index 0000000000000000000000000000000000000000..4de229639c10bfba94ccc9792ebf92caf65d3c90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/733.txt @@ -0,0 +1 @@ + we study pure exploration with infinitely many bandit arms generated i.i.d. from an unknown distribution. our goal is to efficiently select a single high quality arm whose average reward is, with probability 1 -δ, within ε of being among the top η-fraction of arms; this is a natural adaptation of the classical pac guarantee for infinite action sets. we consider both the fixed confidence and fixed budget settings, aiming respectively for minimal expected and fixed sample complexity.for fixed confidence, we give an algorithm with expected sample complexity o log(1/η) log(1/δ) ηε 2. this is optimal except for the log(1/η) factor, and the δ-dependence closes a quadratic gap in the literature. for fixed budget, we show the asymptotically optimal sample complexity as δ → 0 is c -1 log(1/δ) log log(1/δ) 2 to leading order. equivalently, the optimal failure probability given exactly n samples decays as exp -cn/ log 2 n , up to a factor 1 ± o n (1) inside the exponent. the constant c depends explicitly on the problem parameters (including the unknown arm distribution) through a certain fisher information distance. even the strictly super-linear dependence on log(1/δ) was not known and resolves a question of . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/734.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/734.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf51c157e36419ba427372a4a0aba465a4b494aa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/734.txt @@ -0,0 +1 @@ + we study the sample complexity of learning relu neural networks from the point of view of generalization. given norm constraints on the weight matrices, a common approach is to estimate the rademacher complexity of the associated function class. previously obtained a bound independent of the network size (scaling with a product of frobenius norms) except for a factor of the square-root depth. we give a refinement which often has no explicit depth-dependence at all. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/735.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/735.txt new file mode 100644 index 0000000000000000000000000000000000000000..be0446f2c1d9b52b4623d2cc98cac1c7b6a3b115 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/735.txt @@ -0,0 +1 @@ + the intelligent transportation system (its) is an important part of modern transportation infrastructure, employing a combination of communication technology, information processing and control systems to manage transportation networks. this integration of various components such as roads, vehicles, and communication systems, is expected to improve efficiency and safety by providing better information, services, and coordination of transportation modes. in recent years, graph-based machine learning has become an increasingly important research focus in the field of its aiming at the development of complex, datadriven solutions to address various its-related challenges. this chapter presents background information on the key technical challenges for its design, along with a review of research methods ranging from classic statistical approaches to modern machine learning and deep learning-based approaches. specifically, we provide an in-depth review of graph-based machine learning methods, including basic concepts of graphs, graph data representation, graph neural network architectures and their relation to its applications. additionally, two case studies of graphbased its applications proposed in our recent work are presented in detail to demonstrate the potential of graph-based machine learning in the its domain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/736.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/736.txt new file mode 100644 index 0000000000000000000000000000000000000000..588e3b3dfa1531e3e3163034a62fbfdcbcd1fbd0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/736.txt @@ -0,0 +1 @@ + we consider deep neural networks with a lipschitz continuous activation function and with weight matrices of variable widths. we establish a uniform convergence analysis framework in which sufficient conditions on weight matrices and bias vectors together with the lipschitz constant are provided to ensure uniform convergence of the deep neural networks to a meaningful function as the number of their layers tends to infinity. in the framework, special results on uniform convergence of deep neural networks with a fixed width, bounded widths and unbounded widths are presented. in particular, as convolutional neural networks are special deep neural networks with weight matrices of increasing widths, we put forward conditions on the mask sequence which lead to uniform convergence of resulting convolutional neural networks. the lipschitz continuity assumption on the activation functions allows us to include in our theory most of commonly used activation functions in applications. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/737.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/737.txt new file mode 100644 index 0000000000000000000000000000000000000000..247e455c8d5e43b672e4f356c5e6a96794f37de1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/737.txt @@ -0,0 +1 @@ + in the quest to enhance the efficiency and bioplausibility of training deep neural networks, feedback alignment (fa), which replaces the backward pass weights with random matrices in the training process, has emerged as an alternative to traditional backpropagation. while the appeal of fa lies in its circumvention of computational challenges and its plausible biological alignment, the theoretical understanding of this learning rule remains partial. this paper uncovers a set of conservation laws underpinning the learning dynamics of fa, revealing intriguing parallels between fa and gradient descent (gd). our analysis reveals that fa harbors implicit biases akin to those exhibited by gd, challenging the prevailing narrative that these learning algorithms are fundamentally different. moreover, we demonstrate that these conservation laws elucidate sufficient conditions for layer-wise alignment with feedback matrices in relu networks. we further show that this implies over-parameterized two-layer linear networks trained with fa converge to minimumnorm solutions. the implications of our findings offer avenues for developing more efficient and biologically plausible alternatives to backpropagation through an understanding of the principles governing learning dynamics in deep networks.as an alternative, feedback alignment (fa) offers an attrac- \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/738.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/738.txt new file mode 100644 index 0000000000000000000000000000000000000000..11b1645a4b66e1d89e760ded58c5f21b7e05c988 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/738.txt @@ -0,0 +1 @@ + as machine learning models become increasingly prevalent in medical diagnostics, the need for interpretability and transparency becomes paramount. the xai renaissance signifies a significant shift in the field, aiming to redefine the interpretability of medical diagnostic models. this paper explores the innovative approaches and methodologies within the realm of explainable ai (xai) that are revolutionizing the interpretability of medical diagnostic models. by shedding light on the underlying decision-making process, xai techniques empower healthcare professionals to understand, trust, and effectively utilize these models for accurate and reliable medical diagnoses. this review highlights the key advancements in xai for medical diagnostics and their potential to transform the healthcare landscape, ultimately improving patient outcomes and fostering trust in ai-driven diagnostic systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/739.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/739.txt new file mode 100644 index 0000000000000000000000000000000000000000..b66af9383d5be4c6d032c26f4e01abeb1c1f1ca4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/739.txt @@ -0,0 +1 @@ + this paper explores the use of clustering methods and machine learning algorithms, including natural language processing (nlp), to identify and classify problems identified in credit risk models through textual information contained in validation reports. using a unique dataset of 657 findings raised by validation teams in a large international banking group between january 2019 and december 2022. the findings are classified into nine validation dimensions and assigned a severity level by validators using their expert knowledge. the authors use embedding generation for the findings' titles and observations using four different pre-trained models, including "module_url" from tensorflow hub and three models from the sentencetransformer library, namely "all-mpnet-base-v2", "all-minilm-l6-v2", and "paraphrase-mpnet-base-v2". the paper uses and compares various clustering methods in grouping findings with similar characteristics, enabling the identification of common problems within each validation dimension and severity. the results of the study show that clustering is an effective approach for identifying and classifying credit risk model problems with accuracy higher than 60%. the authors also employ machine learning algorithms, including logistic regression and xgboost, to predict the validation dimension and its severity, achieving an accuracy of 80% for xg-boost algorithm. furthermore, the study identifies the top 10 words that predict a validation dimension and severity. overall, this paper makes a contribution by demonstrating the usefulness of clustering and machine learning for analyzing textual information in validation reports, and providing insights into the types of problems encountered in the development and validation of credit risk models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/74.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/74.txt new file mode 100644 index 0000000000000000000000000000000000000000..8bd2cbbe30f1d04dc52d4ce52d3ca34bdb88c83c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/74.txt @@ -0,0 +1 @@ + many real-world classification problems are cost-sensitive in nature, such that the misclassification costs vary between data instances. cost-sensitive learning adapts classification algorithms to account for differences in misclassification costs. stacking is an ensemble method that uses predictions from several classifiers as the training data for another classifier, which in turn makes the final classification decision.while a large body of empirical work exists where stacking is applied in various domains, very few of these works take the misclassification costs into account. in fact, there is no consensus in the literature as to what cost-sensitive stacking is. in this paper we perform extensive experiments with the aim of establishing what the appropriate setup for a cost-sensitive stacking ensemble is. our experiments, conducted on twelve datasets from a number of application domains, using real, instance-dependent misclassification costs, show that for best performance, both levels of stacking require cost-sensitive classification decision. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/740.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/740.txt new file mode 100644 index 0000000000000000000000000000000000000000..b3c6a3e9f6f5a694a222cbcba32ec3e42711003a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/740.txt @@ -0,0 +1 @@ + federated learning (fl) is a privacy-preserving distributed machine learning paradigm that operates at the wireless edge. it enables clients to collaborate on model training while keeping their data private from adversaries and the central server. however, current fl approaches have limitations. some rely on secure multiparty computation, which can be vulnerable to inference attacks. others employ differential privacy, but this may lead to decreased test accuracy when dealing with a large number of parties contributing small amounts of data. to address these issues, this paper proposes a novel approach that integrates federated learning seamlessly into the inner workings of mimo (multiple-input multiple-output) systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/741.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/741.txt new file mode 100644 index 0000000000000000000000000000000000000000..1de76bd78beb57ac6578bbbebe8be7a72210c33e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/741.txt @@ -0,0 +1 @@ + anomaly detection aims to detect data that do not conform to regular patterns, and such data is also called outliers. the anomalies to be detected are often tiny in proportion, containing crucial information, and are suitable for application scenes like intrusion detection, fraud detection, fault diagnosis, e-commerce platforms, et al. however, in many realistic scenarios, only the samples following normal behavior are observed, while we can hardly obtain any anomaly information. to address such problem, we propose an anomaly detection method galdetector which is combined of global and local information based on observed normal samples. the proposed method can be divided into a three-stage method. firstly, the global similar normal scores and the local sparsity scores of unlabeled samples are computed separately. secondly, potential anomaly samples are separated from the unlabeled samples corresponding to these two scores and corresponding weights are assigned to the selected samples. finally, a weighted anomaly detector is trained by loads of samples, then the detector is utilized to identify else anomalies. to evaluate the effectiveness of the proposed method, we conducted experiments on three categories of real-world datasets from diverse domains, and experimental results show that our method achieves better performance when compared with other state-of-the-art methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/742.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/742.txt new file mode 100644 index 0000000000000000000000000000000000000000..fa3ec1d1004f0e24127e82bc1d44a0ba292018c3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/742.txt @@ -0,0 +1 @@ + a personalized keyword spotting (kws) pipeline typically requires the training of a deep learning model on a large set of user-defined speech utterances, preventing fast customization directly applied on-device. to fill this gap, this paper investigates few-shot learning methods for open-set kws classification by combining a deep feature encoder with a prototypebased classifier. with user-defined keywords from 10 classes of the google speech command dataset, our study reports an accuracy of up to 76% in a 10-shot scenario while the false acceptance rate of unknown data is kept to 5%. in the analyzed settings, the usage of the triplet loss to train an encoder with normalized output features performs better than the prototypical networks jointly trained with a generator of dummy unknown-class prototypes. this design is also more effective than encoders trained on a classification problem and features fewer parameters than other iso-accuracy approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/743.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/743.txt new file mode 100644 index 0000000000000000000000000000000000000000..38923988ac041d2c5d1e4820c1ea1eb726d9d7e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/743.txt @@ -0,0 +1 @@ + in this paper, we strive to develop an interpretable gnns' inference paradigm, termed msinterpreter, which can serve as a plug-and-play scheme readily applicable to various gnns' baselines. unlike the most existing explanation methods, msinterpreter provides a message-passing selection scheme (msscheme) to select the critical paths for gnns' message aggregations, which aims at reaching the self-explaination instead of post-hoc explanations. in detail, the elaborate msscheme is designed to calculate weight factors of message aggregation paths by considering the vanilla structure and node embedding components, where the structure base aims at weight factors among node-induced substructures; on the other hand, the node embedding base focuses on weight factors via node embeddings obtained by one-layer gnn. finally, we demonstrate the effectiveness of our approach on graph classification benchmarks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/744.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/744.txt new file mode 100644 index 0000000000000000000000000000000000000000..41885f0cf3c6460c58b6a5e4ed516725e2e21694 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/744.txt @@ -0,0 +1 @@ + the emergence of deep learning has significantly enhanced the analysis of electrocardiograms (ecgs), a noninvasive method that is essential for assessing heart health. despite the complexity of ecg interpretation, advanced deep learning models outperform traditional methods. however, the increasing complexity of ecg data and the need for realtime and accurate diagnosis necessitate exploring more robust architectures, such as transformers. here, we present an indepth review of transformer architectures that are applied to ecg classification. originally developed for natural language processing, these models capture complex temporal relationships in ecg signals that other models might overlook. we conducted an extensive search of the latest transformer-based models and summarize them to discuss the advances and challenges in their application and suggest potential future improvements. this review serves as a valuable resource for researchers and practitioners and aims to shed light on this innovative application in ecg interpretation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/745.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/745.txt new file mode 100644 index 0000000000000000000000000000000000000000..f39de4d7530f1970df03b5fb07990951c3fa4e4f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/745.txt @@ -0,0 +1 @@ + federated learning is a new learning paradigm for extracting knowledge from distributed data. due to its favorable properties in preserving privacy and saving communication costs, it has been extensively studied and widely applied to numerous data analysis applications. however, most existing federated learning approaches concentrate on the centralized setting, which is vulnerable to a single-point failure. an alternative strategy for addressing this issue is the decentralized communication topology. in this article, we systematically investigate the challenges and opportunities when renovating decentralized optimization for federated learning. in particular, we discussed them from the model, data, and communication sides, respectively, which can deepen our understanding about decentralized federated learning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/746.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/746.txt new file mode 100644 index 0000000000000000000000000000000000000000..23edbffd48a24d5bb8b3261bbedd835eff8f4d3b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/746.txt @@ -0,0 +1 @@ + research on adversarial robustness is primarily focused on image and text data. yet, many scenarios in which lack of robustness can result in serious risks, such as fraud detection, medical diagnosis, or recommender systems often do not rely on images or text but instead on tabular data. adversarial robustness in tabular data poses two serious challenges. first, tabular datasets often contain categorical features, and therefore cannot be tackled directly with existing optimization procedures. second, in the tabular domain, algorithms that are not based on deep networks are widely used and offer great performance, but algorithms to enhance robustness are tailored to neural networks (e.g. adversarial training). in this paper, we tackle both challenges. we present a method that allows us to train adversarially robust deep networks for tabular data and to transfer this robustness to other classifiers via universal robust embeddings tailored to categorical data. these embeddings, created using a bilevel alternating minimization framework, can be transferred to boosted trees or random forests making them robust without the need for adversarial training while preserving their high accuracy on tabular data. we show that our methods outperform existing techniques within a practical threat model suitable for tabular data. 1 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/747.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/747.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8f9c963a4b7396799179e7a2caf2e5fa8e18b14 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/747.txt @@ -0,0 +1 @@ + weighted low rank approximation is a fundamental problem in numerical linear algebra, and it has many applications in machine learning. given a matrix m ∈ r n×n , a weight matrix w ∈ r n×n ≥0 , a parameter k, the goal is to output two matrices u, v ∈ r n×k such that w • (m -u v ⊤ ) f is minimized, where • denotes the hadamard product. such a problem is known to be np-hard and even hard to approximate assuming exponential time hypothesis . meanwhile, alternating minimization is a good heuristic solution for approximating weighted low rank approximation. the work shows that, under mild assumptions, alternating minimization does provide provable guarantees. in this work, we develop an efficient and robust framework for alternating minimization. for weighted low rank approximation, this improves the runtime of from n 2 k 2 to n 2 k. at the heart of our work framework is a high-accuracy multiple response regression solver together with a robust analysis of alternating minimization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/748.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/748.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4bfa7ff405eb9e8bb4b21b7b91b48700aa1dfcf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/748.txt @@ -0,0 +1 @@ + random forests and, more generally, (decision-)tree ensembles are widely used methods for classification and regression. recent algorithmic advances allow to compute decision trees that are optimal for various measures such as their size or depth. we are not aware of such research for tree ensembles and aim to contribute to this area. mainly, we provide two novel algorithms and corresponding lower bounds. first, we are able to carry over and substantially improve on tractability results for decision trees, obtaining a (6δds) s• poly-time algorithm, where s is the number of cuts in the tree ensemble, d the largest domain size, and δ is the largest number of features in which two examples differ. to achieve this, we introduce the witness-tree technique which also seems promising for practice. second, we show that dynamic programming, which has been successful for decision trees, may also be viable for tree ensembles, providing an ℓ n• poly-time algorithm, where ℓ is the number of trees and n the number of examples. finally, we compare the number of cuts necessary to classify training data sets for decision trees and tree ensembles, showing that ensembles may need exponentially fewer cuts for increasing number of trees. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/749.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/749.txt new file mode 100644 index 0000000000000000000000000000000000000000..21b5b9efbd2a2bf20fad6d0ff93f5220a25bf635 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/749.txt @@ -0,0 +1 @@ + the reinforcement learning algorithm sarsa combined with linear function approximation has been shown to converge for infinite horizon discounted markov decision problems (mdps). in this paper, we investigate the convergence of the algorithm for random horizon mdps, which has not previously been shown. we show, similar to earlier results for infinite horizon discounted mdps, that if the behaviour policy is ε-soft and lipschitz continuous with respect to the weight vector of the linear function approximation, with small enough lipschitz constant, then the algorithm will converge with probability one when considering a random horizon mdp. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/75.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/75.txt new file mode 100644 index 0000000000000000000000000000000000000000..087d8ca3b619ee0b7cd4c61ec80cdbc5c850e265 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/75.txt @@ -0,0 +1 @@ + quantum machine learning (qml) shows how it maintains certain significant advantages over machine learning methods. it now shows that hybrid quantum methods have great scope for deployment and optimisation, and hold promise for future industries. as a weakness, quantum computing does not have enough qubits to justify its potential. this topic of study gives us encouraging results in the improvement of quantum coding, being the data preprocessing an important point in this research we employ two dimensionality reduction techniques lda and pca applying them in a hybrid way quantum support vector classifier (qsvc) and variational quantum classifier (vqc) in the classification of diabetes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/750.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/750.txt new file mode 100644 index 0000000000000000000000000000000000000000..2975bd51e4e7df080c254abfe3fc268f6f9678e7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/750.txt @@ -0,0 +1 @@ + we introduce time_interpret, a library designed as an extension of captum (kokhlikyan et al., 2020), with a specific focus on temporal data. as such, this library implements several feature attribution methods that can be used to explain predictions made by any pytorch model. time_interpret also provides several synthetic and real world time series datasets, various pytorch models, as well as a set of methods to evaluate feature attributions. moreover, while being primarily developed to explain predictions based on temporal data, some of its components have a different application, including for instance methods explaining predictions made by language models. in this paper, we give a general introduction of this library. we also present several previously unpublished feature attribution methods, which have been developed along with time_interpret 1 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/751.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/751.txt new file mode 100644 index 0000000000000000000000000000000000000000..e087b498f704488253c6efa6c21330941ceb8839 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/751.txt @@ -0,0 +1 @@ + the emergence of smart cities demands harnessing advanced technologies like the internet of things (iot) and artificial intelligence (ai) and promises to unlock cities' potential to become more sustainable, efficient, and ultimately livable for their inhabitants. this work introduces an intelligent city management system that provides a data-driven approach to three use cases: (i) analyze traffic information to reduce the risk of traffic collisions and improve driver and pedestrian safety, (ii) identify when and where energy consumption can be reduced to improve cost savings, and (iii) detect maintenance issues like potholes in the city's roads and sidewalks, as well as the beginning of hazards like floods and fires. a case study in aveiro city demonstrates the system's effectiveness in generating actionable insights that enhance security, energy efficiency, and sustainability, while highlighting the potential of ai and iot-driven solutions for smart city development. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/752.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/752.txt new file mode 100644 index 0000000000000000000000000000000000000000..deb6edd61ee45db75bcb4eaec2129d304a26e976 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/752.txt @@ -0,0 +1 @@ + generative ai has experienced remarkable growth in recent years, leading to a wide array of applications across diverse domains. in this paper, we present a comprehensive survey of more than 350 generative ai applications, providing a structured taxonomy and concise descriptions of various unimodal and even multimodal generative ais. the survey is organized into sections, covering a wide range of unimodal generative ai applications such as text, images, video, gaming and brain information. our survey aims to serve as a valuable resource for researchers and practitioners to navigate the rapidly expanding landscape of generative ai, facilitating a better understanding of the current stateof-the-art and fostering further innovation in the field. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/753.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/753.txt new file mode 100644 index 0000000000000000000000000000000000000000..12f0e275b52f020a9b6c992a614d0a886482e221 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/753.txt @@ -0,0 +1 @@ + in this lecture, we present a general perspective on reinforcement learning (rl) objectives, where we show three versions of objectives. the first version is the standard definition of objective in rl literature. then we extend the standard definition to the λ-return version, which unifies the standard definition of objective. finally, we propose a general objective that unifies the previous two versions. the last version provides a high level to understand of rl's objective, where it shows a fundamental formulation that connects some widely used rl techniques (e.g., td(λ) and gae), and this objective can be potentially applied to extensive rl algorithms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/754.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/754.txt new file mode 100644 index 0000000000000000000000000000000000000000..197250b68b216dca5044e5f3f71ab3a2a8b74ab8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/754.txt @@ -0,0 +1 @@ + random forests are powerful ensemble learning algorithms widely used in various machine learning tasks. however, they have a tendency to overfit noisy or irrelevant features, which can result in decreased generalization performance. post-hoc regularization techniques aim to mitigate this issue by modifying the structure of the learned ensemble after its training. here, we propose bayesian post-hoc regularization to leverage the reliable patterns captured by leaf nodes closer to the root, while potentially reducing the impact of more specific and potentially noisy leaf nodes deeper in the tree. this approach allows for a form of pruning that does not alter the general structure of the trees but rather adjusts the influence of leaf nodes based on their proximity to the root node. we have evaluated the performance of our method on various machine learning data sets. our approach demonstrates competitive performance with the state-of-the-art methods and, in certain cases, surpasses them in terms of predictive accuracy and generalization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/755.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/755.txt new file mode 100644 index 0000000000000000000000000000000000000000..927bae9dad76f688300619dd29ad8147ab819d09 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/755.txt @@ -0,0 +1 @@ + black-box variational inference is widely used in situations where there is no proof that its stochastic optimization succeeds. we suggest this is due to a theoretical gap in existing stochastic optimization proofs-namely the challenge of gradient estimators with unusual noise bounds, and a composite non-smooth objective. for dense gaussian variational families, we observe that existing gradient estimators based on reparameterization satisfy a quadratic noise bound and give novel convergence guarantees for proximal and projected stochastic gradient descent using this bound. this provides rigorous guarantees that methods similar to those used in practice converge on realistic inference problems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/756.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/756.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5fe8db6f38294394c5d7c29d405820b6cda0749 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/756.txt @@ -0,0 +1 @@ + in classification problems with large output spaces (up to millions of labels), the last layer can require an enormous amount of memory. using sparse connectivity would drastically reduce the memory requirements, but as we show below, it can result in much diminished predictive performance of the model. fortunately, we found that this can be mitigated by introducing a penultimate layer of intermediate size. we further demonstrate that one can constrain the connectivity of the sparse layer to be uniform, in the sense that each output neuron will have the exact same number of incoming connections. this allows for efficient implementations of sparse matrix multiplication and connection redistribution on gpu hardware. via a custom cuda implementation, we show that the proposed approach can scale to datasets with 670,000 labels on a single commodity gpu with only 4gb memory. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/757.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/757.txt new file mode 100644 index 0000000000000000000000000000000000000000..724a2a9b63556ffbea60952c4a19b58ce78d9bd3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/757.txt @@ -0,0 +1 @@ + federated learning in satellites offers several advantages. firstly, it ensures data privacy and security, as sensitive data remains on the satellites and is not transmitted to a central location. this is particularly important when dealing with sensitive or classified information. secondly, federated learning allows satellites to collectively learn from a diverse set of data sources, benefiting from the distributed knowledge across the satellite network. lastly, the use of federated learning reduces the communication bandwidth requirements between satellites and the central server, as only model updates are exchanged instead of raw data. by leveraging federated learning, satellites can collaborate and continuously improve their machine learning models while preserving data privacy and minimizing communication overhead. this enables the development of more intelligent and efficient satellite systems for various applications, such as earth observation, weather forecasting, and space exploration. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/758.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/758.txt new file mode 100644 index 0000000000000000000000000000000000000000..bd92d10496cfad78b447a6e4333582acdc0611f5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/758.txt @@ -0,0 +1 @@ + in this paper, we examine the problem of partial inference in the context of structured prediction. using a generative model approach, we consider the task of maximizing a score function with unary and pairwise potentials in the space of labels on graphs. employing a two-stage convex optimization algorithm for label recovery, we analyze the conditions under which a majority of the labels can be recovered. we introduce a novel perspective on the karush-kuhn-tucker (kkt) conditions and primal and dual construction, and provide statistical and topological requirements for partial recovery with provable guarantees. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/759.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/759.txt new file mode 100644 index 0000000000000000000000000000000000000000..b63396d6a892636a1f0998a6a0f22192f63cfd7a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/759.txt @@ -0,0 +1 @@ + accurate yield forecasting is essential for making informed policies and long-term decisions for food security. earth observation (eo) data and machine learning algorithms play a key role in providing a comprehensive and timely view of crop conditions from field to national scales. however, machine learning algorithms' prediction accuracy is often harmed by spatial heterogeneity caused by exogenous factors not reflected in remote sensing data, such as differences in crop management strategies. in this paper, we propose and investigate a simple technique called state-wise additive bias to explicitly address the cross-region yield heterogeneity in kazakhstan. compared to baseline machine learning models (random forest, catboost, xgboost), our method reduces the overall rmse by 8.9% and the highest state-wise rmse by 28.37%. the effectiveness of state-wise additive bias indicates machine learning's performance can be significantly improved by explicitly addressing the spatial heterogeneity, motivating future work on spatial-aware machine learning algorithms for yield forecasts as well as for general geospatial forecasting problems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/76.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/76.txt new file mode 100644 index 0000000000000000000000000000000000000000..c8c5f2e2f0401d16ab2d80594a779a2f720289b8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/76.txt @@ -0,0 +1 @@ + in this paper, we study the online learning of real-valued functions where the hidden function is known to have certain smoothness properties. specifically, for q ≥ 1, let f q be the class of absolutely continuous functions f : → r such that f ′ q ≤ 1. for q ≥ 1 and d ∈ z + , let f q,d be the class of functions f : d → r such that any function g : → r formed by fixing all but one parameter of f is in f q . for any class of real-valued functions f and p > 0, let opt p (f ) be the best upper bound on the sum of p th powers of absolute prediction errors that a learner can guarantee in the worst case. in the single-variable setup, we find new bounds for opt p (f q ) that are sharp up to a constant factor. we show for all ε ∈ (0, 1) that opt 1+ε (f ∞ ) = θ(ε -1 2 ) and opt 1+ε (f q ) = θ(ε -1 2 ) for all q ≥ 2. we also show for ε ∈ (0, 1) that opt 2 (f 1+ε ) = θ(ε -1 ). in addition, we obtain new exact results by proving that opt p (f q ) = 1 for q ∈ (1, 2) and p ≥ 2 + 1 q-1 . in the multi-variable setup, we establish inequalities relating opt p (f q,d ) to opt p (f q ) and show that opt p (f ∞,d ) is infinite when p < d and finite when p > d.we also obtain sharp bounds on learning f ∞,d for p < d when the number of trials is bounded. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/760.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/760.txt new file mode 100644 index 0000000000000000000000000000000000000000..8961d6fdcc8d02a26814cd4714198e37abdf807d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/760.txt @@ -0,0 +1 @@ + decision tree is a well understood machine learning model that is based on minimizing impurities in the internal nodes. the most common impurity measures are shannon entropy and gini impurity. these impurity measures are insensitive to the order of training data and hence the final tree obtained is invariant to any permutation of the data. this leads to a serious limitation in modeling data instances that have order dependencies. in this work, we propose the use of effort-to-compress (etc) -a complexity measure, for the first time, as an impurity measure. unlike shannon entropy and gini impurity, structural impurity based on etc is able to capture order dependencies in the data, thus obtaining potentially different decision trees for different permutations of the same data instances (permutation decision trees). we then introduce the notion of permutation bagging achieved using permutation decision trees without the need for random feature selection and sub-sampling. we compare the performance of the proposed permutation bagged decision trees with random forests. our model does not assume that the data instances are independent and identically distributed. potential applications include scenarios where a temporal order present in the data instances is to be respected. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/761.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/761.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa29ed8cae64135a43addd7a6a29d4911c72c1d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/761.txt @@ -0,0 +1 @@ + traffic prediction is a spatiotemporal predictive task that plays an essential role in intelligent transportation systems. today, graph convolutional neural networks (gcnns) have become the prevailing models in the traffic prediction literature since they excel at extracting spatial correlations. in this work, we classify the components of successful gcnn prediction models and analyze the effects of matrix factorization, attention mechanism, and weight sharing on their performance. furthermore, we compare these variations against random forests, a traditional regression method that predates gcnns by over 15 years. we evaluated these methods using simulated data of two regions in toronto as well as real-world sensor data from selected california highways. we found that incorporating matrix factorization, attention, and location-specific model weights either individually or collectively into gcnns can result in a better overall performance. moreover, although random forest regression is a less compact model, it matches or exceeds the performance of all variations of gcnns in our experiments. this suggests that the current graph convolutional methods may not be the best approach to traffic prediction and there is still room for improvement. finally, our findings also suggest that for future research on gcnn for traffic prediction to be credible, researchers must include performance comparison to random forests. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/762.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/762.txt new file mode 100644 index 0000000000000000000000000000000000000000..34c6cb98ba5d47dd93ef9e0b67e9738b3a4f9bc9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/762.txt @@ -0,0 +1 @@ + heart failure is a syndrome which occurs when the heart is not able to pump blood and oxygen to support other organs in the body. treatment and management of heart failure in patients include understanding the diagnostic codes and procedure reports of these patients during their hospitalization. identifying the underlying themes in these diagnostic codes and procedure reports could reveal the clinical phenotypes associated with heart failure. these themes could also help clinicians to predict length of stay in the patients using their clinical notes. understanding clinical phenotypes on the basis of these themes is important to group patients based on their similar characteristics which could also help in predicting patient outcomes like length of stay. these clinical phenotypes usually have a probabilistic latent structure and hence, as there has been no previous work on identifying phenotypes in clinical notes of heart failure patients using a probabilistic framework and to predict length of stay of these patients using data-driven artificial intelligence-based methods, we apply natural language processing technique, topic modeling, to identify the themes present in diagnostic codes and in procedure reports of 1,200 patients admitted for heart failure at the university of illinois hospital and health sciences system (ui health). topic modeling identified twelve themes each in diagnostic codes and procedure reports. these themes revealed information about different phenotypes related to various perspectives about heart failure, which could help to study patients' profiles and discover new relationships among medical concepts. each theme had a set of keywords and each clinical note was labeled with two themes -one corresponding to its diagnostic code and the other corresponding to its procedure reports along with their percentage contribution. we used these themes and their percentage contribution to predict length of stay. we found that the themes discovered in diagnostic codes and procedure reports using topic modeling together were able to predict length of stay of the patients with an accuracy of 61.1% and an area under the receiver operating characteristic curve (roc auc) value of 0.828. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/763.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/763.txt new file mode 100644 index 0000000000000000000000000000000000000000..d68260f2ef9a58132ef4578937600e6b80d6a225 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/763.txt @@ -0,0 +1 @@ + to solve the problem of poor performance of deep neural network models due to insufficient data, a simple yet effective interpolation-based data augmentation method is proposed: msmix (manifold swap mixup). this method feeds two different samples to the same deep neural network model, and then randomly select a specific layer and partially replace hidden features at that layer of one of the samples by the counterpart of the other. the mixed hidden features are fed to the model and go through the rest of the network. two different selection strategies are also proposed to obtain richer hidden representation. experiments are conducted on three chinese intention recognition datasets, and the results show that the msmix method achieves better results than other methods in both full-sample and small-sample configurations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/764.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/764.txt new file mode 100644 index 0000000000000000000000000000000000000000..67145aad057b6e4702aecfbb936711c40a8fc404 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/764.txt @@ -0,0 +1 @@ + understanding traffic participants' behaviour is crucial for predicting their future trajectories, aiding in developing safe and reliable planning systems for autonomous vehicles. integrating cognitive processes and machine learning models has shown promise in other domains but is lacking in the trajectory forecasting of multiple traffic agents in large-scale autonomous driving datasets. this work investigates the state-of-the-art trajectory forecasting model trajectron++ which we enhance by incorporating a smoothing term in its attention module. this attention mechanism mimics human attention inspired by cognitive science research indicating limits to attention switching. we evaluate the performance of the resulting smooth-trajectron++ model and compare it to the original model on various benchmarks, revealing the potential of incorporating insights from human cognition into trajectory prediction models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/765.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/765.txt new file mode 100644 index 0000000000000000000000000000000000000000..973e1883c5f3a29dc355445ba6d4d07285d26495 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/765.txt @@ -0,0 +1 @@ + we explore different ways to utilize positionbased cross-attention in seq2seq networks to enable length generalization in algorithmic tasks. we show that a simple approach of interpolating the original and reversed encoded representations combined with relative attention allows near-perfect length generalization for both forward and reverse lookup tasks or copy tasks that had been generally hard to tackle. we also devise harder diagnostic tasks where the relative distance of the ideal attention position varies with timestep. in such settings, the simple interpolation trick with relative attention is not sufficient. we introduce novel variants of location attention building on top of dubois et al. (2020) to address the new diagnostic tasks. we also show the benefits of our approaches for length generalization in scan (lake & baroni, 2018) and cfq (keysers et al., 2020). our code is available on github 1 . \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/766.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/766.txt new file mode 100644 index 0000000000000000000000000000000000000000..8e930417f29d3c0e5b5c819412128d6814a03e60 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/766.txt @@ -0,0 +1 @@ + reinforcement learning is a powerful framework for training agents to navigate different situations, but it is susceptible to changes in environmental dynamics. however, solving markov decision processes that are robust to changes is difficult due to nonconvexity and size of action or state spaces. while most works have analyzed this problem by taking different assumptions on the problem, a general and efficient theoretical analysis is still missing. however, we generate a simple framework for improving robustness by solving a minimax iterative optimization problem where a policy player and an environmental dynamics player are playing against each other. leveraging recent results in online nonconvex learning and techniques from improving policy gradient methods, we yield an algorithm that maximizes the robustness of the value function on the order of o 1 t 1 2 where t is the number of iterations of the algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/767.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/767.txt new file mode 100644 index 0000000000000000000000000000000000000000..03a4ea3426adf6f8493facf046b2b0c12dfe5355 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/767.txt @@ -0,0 +1 @@ + bias mitigation methods for binary classification decision-making systems have been widely researched due to the ever-growing importance of designing fair machine learning processes that are impartial and do not discriminate against individuals or groups based on protected personal characteristics. in this paper, we present a structured overview of the research landscape for bias mitigation methods, report on their benefits and limitations, and provide recommendations for the development of future bias mitigation methods for binary classification.within the field of fair machine learning, several bias mitigation methods have been proposed, targeting the removal of unwanted bias from training data (e.g. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/768.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/768.txt new file mode 100644 index 0000000000000000000000000000000000000000..ad185d3e478732776e97d1dfcd92c4e213ce1458 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/768.txt @@ -0,0 +1 @@ + currently, many researchers and analysts are working toward medical diagnosis enhancement for various diseases. heart disease is one of the common diseases that can be considered a significant cause of mortality worldwide. early detection of heart disease significantly helps in reducing the risk of heart failure. consequently, the centers for disease control and prevention (cdc) conducts a health-related telephone survey yearly from over 400,000 participants. however, several concerns arise regarding the reliability of the data in predicting heart disease and whether all of the survey questions are strongly related. this study aims to utilize several machine learning techniques, such as support vector machines and logistic regression, to investigate the accuracy of the cdc's heart disease survey in the united states. furthermore, we use various feature selection methods to identify the most relevant subset of questions that can be utilized to forecast heart conditions. to reach a robust conclusion, we perform stability analysis by randomly sampling the data 300 times. the experimental results show that the survey data can be useful up to 80% in terms of predicting heart disease, which significantly improves the diagnostic process before bloodwork and tests. in addition, the amount of time spent conducting the survey can be reduced by 77% while maintaining the same level of performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/769.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/769.txt new file mode 100644 index 0000000000000000000000000000000000000000..0257146c192e57ae7f8eb639683b97ef9a94485d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/769.txt @@ -0,0 +1 @@ + shortcut learning, or 'clever hans effect' refers to situations where a learning agent (e.g., deep neural networks) learns spurious correlations present in data, resulting in biased models. we focus on finding shortcuts in deep learning based spoofing countermeasures (cms) that predict whether a given utterance is spoofed or not. while prior work has addressed specific data artifacts, such as silence, no general normative framework has been explored for analyzing shortcut learning in cms. in this study, we propose a generic approach to identifying shortcuts by introducing systematic interventions on the training and test sides, including the boundary cases of 'near-perfect' and 'worse than coin flip' (label flip). by using three different models, ranging from classic to state-of-the-art, we demonstrate the presence of shortcut learning in five simulated conditions. we analyze the results using a regression model to understand how biases affect the class-conditional score statistics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/77.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/77.txt new file mode 100644 index 0000000000000000000000000000000000000000..718fb09a24c7d1dc623d3e5c079488e5ce15d3bb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/77.txt @@ -0,0 +1 @@ + consensus clustering aggregates partitions in order to find a better fit by reconciling clustering results from different sources/executions. in practice, there exist noise and outliers in clustering task, which, however, may significantly degrade the performance. to address this issue, we propose a novel algorithm -robust consensus clustering that can find common ground truth among experts' opinions, which tends to be minimally affected by the bias caused by the outliers. in particular, we formalize the robust consensus clustering problem as a constraint optimization problem, and then derive an effective algorithm upon alternating direction method of multipliers (admm) with rigorous convergence guarantee. our method outperforms the baselines on benchmarks. we apply the proposed method to the real-world advertising campaign segmentation and forecasting tasks using the proposed consensus clustering results based on the similarity computed via kolmogorov-smirnov statistics. the accurate clustering result is helpful for building the advertiser profiles so as to perform the forecasting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/770.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/770.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2301c1b1221d7f1ce950254cda345eb3136ebf4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/770.txt @@ -0,0 +1 @@ + in a recent work, laforgue et al. introduce the model of last switch dependent (lsd) bandits, in an attempt to capture nonstationary phenomena induced by the interaction between the player and the environment. examples include satiation, where consecutive plays of the same action lead to decreased performance, or deprivation, where the payoff of an action increases after an interval of inactivity. in this work, we take a step towards understanding the approximability of planning lsd bandits, namely, the (np-hard) problem of computing an optimal arm-pulling strategy under complete knowledge of the model. in particular, we design the first efficient constant approximation algorithm for the problem and show that, under a natural monotonicity assumption on the payoffs, its approximation guarantee (almost) matches the state-of-the-art for the special and well-studied class of recharging bandits (also known as delay-dependent). in this attempt, we develop new tools and insights for this class of problems, including a novel higher-dimensional relaxation and the technique of mirroring the evolution of virtual states. we believe that these novel elements could potentially be used for approaching richer classes of action-induced nonstationary bandits (e.g., special instances of restless bandits). in the case where the model parameters are initially unknown, we develop an online learning adaptation of our algorithm for which we provide sublinear regret guarantees against its full-information counterpart. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/771.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/771.txt new file mode 100644 index 0000000000000000000000000000000000000000..e8cb611356d4c07cac7cefbb4c27edbac12c8e63 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/771.txt @@ -0,0 +1 @@ + this paper investigates a combinational creativity approach to transfer learning to improve the performance of deep neural network-based models for music generation on out-of-distribution (ood) genres. we identify iranian folk music as an example of such an ood genre for musicvae, a large generative music model. we find that a combinational creativity transfer learning approach can efficiently adapt musicvae to an iranian folk music dataset, indicating potential for generating underrepresented music genres in the future. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/772.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/772.txt new file mode 100644 index 0000000000000000000000000000000000000000..f30655deb8219ff38d69a2266f3313293825d7dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/772.txt @@ -0,0 +1 @@ + self-supervised methods received tremendous attention thanks to their seemingly heuristic approach to learning representations that respect the semantics of the data without any apparent supervision in the form of labels. a growing body of literature is already being published in an attempt to build a coherent and theoretically grounded understanding of the workings of a zoo of losses used in modern self-supervised representation learning methods. in this paper, we attempt to provide an understanding from the perspective of a laplace operator and connect the inductive bias stemming from the augmentation process to a low-rank matrix completion problem. to this end, we leverage the results from low-rank matrix completion to provide theoretical analysis on the convergence of modern ssl methods and a key property that affects their downstream performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/773.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/773.txt new file mode 100644 index 0000000000000000000000000000000000000000..6e3a9a3e136791b4f5e30f3a0dc870617d3dab04 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/773.txt @@ -0,0 +1 @@ + we study the problem of learning universal features across multiple graphs through self-supervision. graph self supervised learning has been shown to facilitate representation learning, and produce competitive models compared to supervised baselines. however, existing methods of self-supervision learn features from one graph, and thus, produce models that are specialized to a particular graph. we hypothesize that leveraging multiple graphs of the same type/class can improve the quality of learnt representations in the model by extracting features that are universal to the class of graphs. we adopt a transformer backbone that acts as a universal representation learning module for multiple graphs. we leverage neighborhood aggregation coupled with graph-specific embedding generator to transform disparate node embeddings from multiple graphs to a common space for the universal backbone. we learn both universal and graph-specific parameters in an end-to-end manner. our experiments reveal that leveraging multiple graphs of the same type -citation networks -improves the quality of representations and results in better performance on downstream node classification task compared to self-supervision with one graph. the results of our study improve the state-of-theart in graph self-supervised learning, and bridge the gap between self-supervised and supervised performance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/774.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/774.txt new file mode 100644 index 0000000000000000000000000000000000000000..79cac7669b9e2d8d5dd2acd0fa746b7cc711673c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/774.txt @@ -0,0 +1 @@ + recently, large language models (llms) have demonstrated remarkable capabilities. chain-of-thought (cot) has been proposed as a way of assisting llms in performing complex reasoning. however, developing effective prompts can be a challenging and labor-intensive task. many studies come out of some way to automatically construct cot from test data. most of them assume that all test data is visible before testing and only select a small subset to generate rationales, which is an unrealistic assumption. in this paper, we present a case study on how to construct and optimize chain-of-thought prompting using batch data in streaming settings. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/775.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/775.txt new file mode 100644 index 0000000000000000000000000000000000000000..3efea355102a4be99a11cbda1da0fef304f0713c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/775.txt @@ -0,0 +1 @@ + while the predictions produced by conformal prediction are set-valued, the data used for training and calibration is supposed to be precise. in the setting of superset learning or learning from partial labels, a variant of weakly supervised learning, it is exactly the other way around: training data is possibly imprecise (set-valued), but the model induced from this data yields precise predictions. in this paper, we combine the two settings by making conformal prediction amenable to set-valued training data. we propose a generalization of the conformal prediction procedure that can be applied to set-valued training and calibration data. we prove the validity of the proposed method and present experimental studies in which it compares favorably to natural baselines. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/776.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/776.txt new file mode 100644 index 0000000000000000000000000000000000000000..659785c7a1768cb32cdba1ac63781be50a6995e7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/776.txt @@ -0,0 +1 @@ + federated learning (fl) facilitates distributed model development to aggregate multiple confidential data sources. the information transfer among clients can be compromised by distributional differences, i.e., by non-i.i.d. data. a particularly challenging scenario is the federated model adaptation to a target client without access to annotated data. we propose federated adversarial cross training (fact), which uses the implicit domain differences between source clients to identify domain shifts in the target domain. in each round of fl, fact cross initializes a pair of source clients to generate domain specialized representations which are then used as a direct adversary to learn a domain invariant data representation. we empirically show that fact outperforms state-of-the-art federated, non-federated and source-free domain adaptation models on three popular multi-source-singletarget benchmarks, and state-of-the-art unsupervised domain adaptation (uda) models on single-source-single-target experiments. we further study fact's behavior with respect to communication restrictions and the number of participating clients.preprint. under review. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/777.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/777.txt new file mode 100644 index 0000000000000000000000000000000000000000..5d8e067cebac979ff3eb4d5155861c55cebdfa0c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/777.txt @@ -0,0 +1 @@ + precise estimation of predictive uncertainty in deep neural networks is a critical requirement for reliable decisionmaking in machine learning and statistical modeling, particularly in the context of medical ai. conformal prediction (cp) has emerged as a promising framework for representing the model uncertainty by providing well-calibrated confidence levels for individual predictions. however, the quantification of model uncertainty in conformal prediction remains an active research area, yet to be fully addressed. in this paper, we explore state-of-the-art cp methodologies and their theoretical foundations. we propose a probabilistic approach in quantifying the model uncertainty derived from the produced prediction sets in conformal prediction and provide certified boundaries for the computed uncertainty. by doing so, we allow model uncertainty measured by cp to be compared by other uncertainty quantification methods such as bayesian (e.g., mc-dropout and deepensemble) and evidential approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/778.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/778.txt new file mode 100644 index 0000000000000000000000000000000000000000..b85e4a420bf971ba4b6063bbd488177984866d31 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/778.txt @@ -0,0 +1 @@ + graph representation learning (also known as network embedding) has been extensively researched with varying levels of granularity, ranging from nodes to graphs. while most prior work in this area focuses on node-level representation, limited research has been conducted on graph-level embedding, particularly for dynamic or temporal networks. however, learning low-dimensional graph-level representations for dynamic networks is critical for various downstream graph retrieval tasks such as temporal graph similarity ranking, temporal graph isomorphism, and anomaly detection. in this paper, we present a novel method for temporal graph-level embedding that addresses this gap. our approach involves constructing a multilayer graph and using a modified random walk with temporal backtracking to generate temporal contexts for the graph's nodes. we then train a "document-level" language model on these contexts to generate graph-level embeddings. we evaluate our proposed model on five publicly available datasets for the task of temporal graph similarity ranking, and our model outperforms baseline methods. our experimental results demonstrate the effectiveness of our method in generating graph-level embeddings for dynamic networks. ccs concepts• computing methodologies → learning latent representations; • networks → topology analysis and generation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/779.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/779.txt new file mode 100644 index 0000000000000000000000000000000000000000..6aeadf241a79f61f63f4c4a3210da3da541127c1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/779.txt @@ -0,0 +1 @@ + recent advances in large language models have shown that autoregressive modeling can generate complex and novel sequences that have many real-world applications. however, these models must generate outputs autoregressively, which becomes time-consuming when dealing with long sequences. hierarchical autoregressive approaches that compress data have been proposed as a solution, but these methods still generate outputs at the original data frequency, resulting in slow and memory-intensive models. in this paper, we propose a model based on the hierarchical recurrent encoder decoder (hred) architecture. this model independently encodes input sub-sequences without global context, processes these sequences using a lower-frequency model, and decodes outputs at the original data frequency. by interpreting the encoder as an implicitly defined embedding matrix and using sampled softmax estimation, we develop a training algorithm that can train the entire model without a high-frequency decoder, which is the most memory and compute-intensive part of hierarchical approaches. in a final, brief phase, we train the decoder to generate data at the original granularity. our algorithm significantly reduces memory requirements for training autoregressive models and it also improves the total training wall-clock time. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/78.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/78.txt new file mode 100644 index 0000000000000000000000000000000000000000..f84369c0eada6092f223761e4ee762792803e332 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/78.txt @@ -0,0 +1 @@ + we study kernel methods in machine learning from the perspective of feature subspace. we establish a one-to-one correspondence between feature subspaces and kernels and propose an information-theoretic measure for kernels. in particular, we construct a kernel from hirschfeld-gebelein-rényi maximal correlation functions, coined the maximal correlation kernel, and demonstrate its information-theoretic optimality. we use the support vector machine (svm) as an example to illustrate a connection between kernel methods and feature extraction approaches. we show that the kernel svm on maximal correlation kernel achieves minimum prediction error. finally, we interpret the fisher kernel as a special maximal correlation kernel and establish its optimality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/780.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/780.txt new file mode 100644 index 0000000000000000000000000000000000000000..df67c81ec968de15191f17e7544a533beac8eb0e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/780.txt @@ -0,0 +1 @@ + the cost of data annotation is a substantial impediment for multi-label image classification: in every image, every category must be labeled as present or absent. single positive multi-label (spml) learning is a cost-effective solution, where models are trained on a single positive label per image. thus, spml is a more challenging domain, since it requires dealing with missing labels. in this work, we propose a method to turn single positive data into fully-labeled data: "pseudo multi-labels". basically, a "teacher" network is trained on single positive labels. then, we treat the "teacher" model's predictions on the training data as groundtruth labels to train a "student" network on fully-labeled images. with this simple approach, we show that the performance achieved by the "student" model approaches that of a model trained on the actual fully-labeled images. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/781.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/781.txt new file mode 100644 index 0000000000000000000000000000000000000000..9a0203115ea71206ded8484af7dbd23fa8bb333a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/781.txt @@ -0,0 +1 @@ + uncertainty quantification is a critical yet unsolved challenge for deep learning, especially for the time series imputation with irregularly sampled measurements. to tackle this problem, we propose a novel framework based on the principles of recurrent neural networks and neural stochastic differential equations for reconciling irregularly sampled measurements. we impute measurements at any arbitrary timescale and quantify the uncertainty in the imputations in a principled manner. specifically, we derive analytical expressions for quantifying and propagating the epistemic and aleatoric uncertainty across time instants. our experiments on the ieee 37 bus test distribution system reveal that our framework can outperform state-of-theart uncertainty quantification approaches for time-series data imputations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/782.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/782.txt new file mode 100644 index 0000000000000000000000000000000000000000..b56019723a9c92d683287a70daba2b11f968ea10 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/782.txt @@ -0,0 +1 @@ + annotating data for multi-label classification is prohibitively expensive because every category of interest must be confirmed to be present or absent. recent work on single positive multi-label (spml) learning shows that it is possible to train effective multi-label classifiers using only one positive label per image. however, the standard benchmarks for spml are derived from traditional multi-label classification datasets by retaining one positive label for each training example (chosen uniformly at random) and discarding all other labels. in realistic settings it is not likely that positive labels are chosen uniformly at random. this work introduces protocols for studying label bias in spml and provides new empirical results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/783.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/783.txt new file mode 100644 index 0000000000000000000000000000000000000000..02de4eb9d8c24eeefb22e227c6a592db1cde8cb8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/783.txt @@ -0,0 +1 @@ + for automotive applications, the graph attention network (gat) is a prominently used architecture to include relational information of a traffic scenario during feature embedding. as shown in this work, however, one of the most popular gat realizations, namely gatv2, has potential pitfalls that hinder an optimal parameter learning. especially for small and sparse graph structures a proper optimization is problematic. to surpass limitations, this work proposes architectural modifications of gatv2. in controlled experiments, it is shown that the proposed model adaptions improve prediction performance in a node-level regression task and make it more robust to parameter initialization. this work aims for a better understanding of the attention mechanism and analyzes its interpretability of identifying causal importance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/784.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/784.txt new file mode 100644 index 0000000000000000000000000000000000000000..b6ce444b4e180f863f8a66cdf8aafb2aec0e198b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/784.txt @@ -0,0 +1 @@ + we provide a new non-asymptotic analysis of distributed temporal difference learning with linear function approximation. our approach relies on "one-shot averaging," where n agents run identical local copies of the td(0) method and average the outcomes only once at the very end. we demonstrate a version of the linear time speedup phenomenon, where the convergence time of the distributed process is a factor of n faster than the convergence time of td(0). this is the first result proving benefits from parallelism for temporal difference methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/785.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/785.txt new file mode 100644 index 0000000000000000000000000000000000000000..411eb08112aac08f210bb7ac927624f4c4b4aff5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/785.txt @@ -0,0 +1 @@ + we consider offline reinforcement learning (rl), where the agent does not interact with the environment and must rely on offline data collected using a behavior policy. previous works provide policy evaluation guarantees when the target policy to be evaluated is covered by the behavior policy, that is, state-action pairs visited by the target policy must also be visited by the behavior policy. we show that when the mdp has a latent low-rank structure, this coverage condition can be relaxed. building on the connection to weighted matrix completion with non-uniform observations, we propose an offline policy evaluation algorithm that leverages the low-rank structure to estimate the values of uncovered state-action pairs. our algorithm does not require a known feature representation, and our finite-sample error bound involves a novel discrepancy measure quantifying the discrepancy between the behavior and target policies in the spectral space. we provide concrete examples where our algorithm achieves accurate estimation while existing coverage conditions are not satisfied. building on the above evaluation algorithm, we further design an offline policy optimization algorithm and provide non-asymptotic performance guarantees. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/786.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/786.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8ae89489de43f18f753d3037ddef847c14b60e8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/786.txt @@ -0,0 +1 @@ + this study presents our team pathtomyheart's contribution to the george b. moody physionet challenge 2022. two models are implemented. the first model is a dual bayesian resnet (dbres), where each patient's recording is segmented into overlapping log mel spectrograms. these undergo two binary classifications: present versus unknown or absent, and unknown versus present or absent. the classifications are aggregated to give a patient's final classification. the second model is the output of dbres integrated with demographic data and signal features using xgboost. dbres achieved our best weighted accuracy of 0.771 on the hidden test set for murmur classification, which placed us fourth for the murmur task. (on the clinical outcome task, which we neglected, we scored 17th with costs of 12637.) on our held-out subset of the training set, integrating the demographic data and signal features improved dbres's accuracy from 0.762 to 0.820. however, this decreased dbres's weighted accuracy from 0.780 to 0.749. our results demonstrate that log mel spectrograms are an effective representation of heart sound recordings, bayesian networks provide strong supervised classification performance, and treating the ternary classification as two binary classifications increases performance on the weighted accuracy. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/787.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/787.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8bcca6f62b6365b29ad803a3570658646995463 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/787.txt @@ -0,0 +1 @@ + value iteration (vi) is foundational to the theory and practice of modern reinforcement learning, and it is known to converge at a o(γ k )-rate, where γ is the discount factor. surprisingly, however, the optimal rate in terms of bellman error for the vi setup was not known, and finding a general acceleration mechanism has been an open problem. in this paper, we present the first accelerated vi for both the bellman consistency and optimality operators. our method, called anc-vi, is based on an anchoring mechanism (distinct from nesterov's acceleration), and it reduces the bellman error faster than standard vi. in particular, anc-vi exhibits a o(1/k)-rate for γ ≈ 1 or even γ = 1, while standard vi has rate o(1) for γ ≥ 1 -1/k, where k is the iteration count. we also provide a complexity lower bound matching the upper bound up to a constant factor of 4, thereby establishing optimality of the accelerated rate of anc-vi. finally, we show that the anchoring mechanism provides the same benefit in the approximate vi and gauss-seidel vi setups as well. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/788.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/788.txt new file mode 100644 index 0000000000000000000000000000000000000000..9cc64ed1a989b368d22590af16f9c23008394903 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/788.txt @@ -0,0 +1 @@ + we provide several new results on the sample complexity of vector-valued linear predictors (parameterized by a matrix), and more generally neural networks. focusing on size-independent bounds, where only the frobenius norm distance of the parameters from some fixed reference matrix w 0 is controlled, we show that the sample complexity behavior can be surprisingly different than what we may expect considering the well-studied setting of scalar-valued linear predictors. this also leads to new sample complexity bounds for feed-forward neural networks, tackling some open questions in the literature, and establishing a new convex linear prediction problem that is provably learnable without uniform convergence. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/789.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/789.txt new file mode 100644 index 0000000000000000000000000000000000000000..bdd9c0493add83f9ba24f21aea82307c04753100 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/789.txt @@ -0,0 +1 @@ + domain generalization aims to solve the challenge of out-of-distribution (ood) generalization by leveraging common knowledge learned from multiple training domains to generalize to unseen test domains. to accurately evaluate the ood generalization ability, it is required that test data information is unavailable. however, the current domain generalization protocol may still have potential test data information leakage. this paper examines the risks of test data information leakage from two aspects of the current evaluation protocol: supervised pretraining on imagenet and oracle model selection. we propose modifications to the current protocol that we should employ self-supervised pretraining or train from scratch instead of employing the current supervised pretraining, and we should use multiple test domains. these would result in a more precise evaluation of ood generalization ability. we also rerun the algorithms with the modified protocol and introduce new leaderboards to encourage future research in domain generalization with a fairer comparison. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/79.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/79.txt new file mode 100644 index 0000000000000000000000000000000000000000..79ca17c7795e670a60358623dccf8e0947721310 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/79.txt @@ -0,0 +1 @@ + anomaly detection attempts at finding examples that deviate from the expected behaviour. usually, anomaly detection is tackled from an unsupervised perspective because anomalous labels are rare and difficult to acquire. however, the lack of labels makes the anomaly detector have high uncertainty in some regions, which usually results in poor predictive performance or low user trust in the predictions. one can reduce such uncertainty by collecting specific labels using active learning (al), which targets examples close to the detector's decision boundary. alternatively, one can increase the user trust by allowing the detector to abstain from making highly uncertain predictions, which is called learning to reject (lr). one way to do this is by thresholding the detector's uncertainty based on where its performance is low, which requires labels to be evaluated. although both al and lr need labels, they work with different types of labels: al seeks strategic labels, which are evidently biased, while lr requires i.i.d. labels to evaluate the detector's performance and set the rejection threshold. because one usually has a unique label budget, deciding how to optimally allocate it is challenging. in this paper, we propose a mixed strategy that, given a budget of labels, decides in multiple rounds whether to use the budget to collect al labels or lr labels. the strategy is based on a reward function that measures the expected gain when allocating the budget to either side. we evaluate our strategy on 18 benchmark datasets and compare it to some baselines. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/790.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/790.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6870b916132f41159ddaff5e88c9f1c3302a8f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/790.txt @@ -0,0 +1 @@ + in adaptive data analysis, a mechanism gets n i.i.d. samples from an unknown distribution d, and is required to provide accurate estimations to a sequence of adaptively chosen statistical queries with respect to d. hardt and ullman and steinke and ullman showed that, in general, it is computationally hard to answer more than θ(n 2 ) adaptive queries, assuming the existence of one-way functions.however, these negative results strongly rely on an adversarial model that significantly advantages the adversarial analyst over the mechanism, as the analyst, who chooses the adaptive queries, also chooses the underlying distribution d. this imbalance raises questions with respect to the applicability of the obtained hardness results -an analyst who has complete knowledge of the underlying distribution d would have little need, if at all, to issue statistical queries to a mechanism which only holds a finite number of samples from d.we consider more restricted adversaries, called balanced, where each such adversary consists of two separate algorithms: the sampler who is the entity that chooses the distribution and provides the samples to the mechanism, and the analyst who chooses the adaptive queries, but has no prior knowledge of the underlying distribution (and hence has no a priori advantage with respect to the mechanism).we improve the quality of previous lower bounds by revisiting them using an efficient balanced adversary, under standard public-key cryptography assumptions. we show that these stronger hardness assumptions are unavoidable in the sense that any computationally bounded balanced adversary that has the structure of all known attacks, implies the existence of publickey cryptography. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/791.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/791.txt new file mode 100644 index 0000000000000000000000000000000000000000..f202528fffdd472062198cf64b6ce30db12ca701 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/791.txt @@ -0,0 +1 @@ + in this paper, we investigate the effectiveness of integrating a hierarchical taxonomy of labels as prior knowledge into the learning algorithm of a flat classifier. we introduce two methods to integrate the hierarchical taxonomy as an explicit regularizer into the loss function of learning algorithms. by reasoning on a hierarchical taxonomy, a neural network alleviates its output distributions over the classes, allowing conditioning on upper concepts for a minority class. we limit ourselves to the flat classification task and provide our experimental results on two industrial in-house datasets and two public benchmarks, rcv1 and amazon product reviews. our obtained results show the significant effect of a taxonomy in increasing the performance of a learner in semisupervised multi-class classification and the considerable results obtained in a fully supervised fashion. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/792.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/792.txt new file mode 100644 index 0000000000000000000000000000000000000000..39ba360d6d4a707d65c5edadad12b7cba97d0273 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/792.txt @@ -0,0 +1 @@ + in this work, we improve on the upper and lower bounds for the regret of online learning with strongly observable undirected feedback graphs. the best known upper bound for this problem is o √ αt ln k , where k is the number of actions, α is the independence number of the graph, and t is the time horizon. the √ ln k factor is known to be necessary when α = 1 (the experts case). on the other hand, when α = k (the bandits case), the minimax rate is known to be θ √ kt , and a lower bound ω √ αt is known to hold for any α. our improved upper bound o αt (1 + ln(k/α)) holds for any α and matches the lower bounds for bandits and experts, while interpolating intermediate cases. to prove this result, we use ftrl with q-tsallis entropy for a carefully chosen value of q ∈ [1/2, 1) that varies with α. the analysis of this algorithm requires a new bound on the variance term in the regret. we also show how to extend our techniques to timevarying graphs, without requiring prior knowledge of their independence numbers. our upper bound is complemented by an improved ω αt (ln k)/(ln α) lower bound for all α > 1, whose analysis relies on a novel reduction to multitask learning. this shows that a logarithmic factor is necessary as soon as α < k. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/793.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/793.txt new file mode 100644 index 0000000000000000000000000000000000000000..91d4b5b91dabd54c116d4414dd583cdf312d09e7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/793.txt @@ -0,0 +1 @@ + batch normalization (bn) is widely used to stabilize the optimization process and improve the test performance of deep neural networks. the regularization effect of bn depends on the batch size and explicitly using smaller batch sizes with batch normalization, a method known as ghost batch normalization (gbn), has been found to improve generalization in many settings. we investigate the effectiveness of gbn by disentangling the induced "ghost noise" from normalization and quantitatively analyzing the distribution of noise as well as its impact on model performance. inspired by our analysis, we propose a new regularization technique called ghost noise injection (gni) that imitates the noise in gbn without incurring the detrimental train-test discrepancy effects of small batch training. we experimentally show that gni can provide a greater generalization benefit than gbn. ghost noise injection can also be beneficial in otherwise non-noisy settings such as layer-normalized networks, providing additional evidence of the usefulness of ghost noise in batch normalization as a regularizer. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/794.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/794.txt new file mode 100644 index 0000000000000000000000000000000000000000..7134b0a42cd4310fb9f212a0e040b5f0048dd85a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/794.txt @@ -0,0 +1 @@ + while autonomous vehicles (avs) may perform remarkably well in generic real-life cases, their irrational action in some unforeseen cases leads to critical safety concerns. this paper introduces the concept of collaborative reinforcement learning (rl) to generate challenging test cases for av planning and decision-making module. one of the critical challenges for collaborative rl is the credit assignment problem, where a proper assignment of rewards to multiple agents interacting in the traffic scenario, considering all parameters and timing, turns out to be non-trivial. in order to address this challenge, we propose a novel potential-based reward-shaping approach inspired by counterfactual analysis for solving the credit-assignment problem. the evaluation in a simulated environment demonstrates the superiority of our proposed approach against other methods using local and global rewards. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/795.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/795.txt new file mode 100644 index 0000000000000000000000000000000000000000..5c2d21d3b41c3eb602031d2c15542af65e4e6015 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/795.txt @@ -0,0 +1 @@ + first-order optimization methods tend to inherently favor certain solutions over others when minimizing an underdetermined training objective that has multiple global optima. this phenomenon, known as implicit bias, plays a critical role in understanding the generalization capabilities of optimization algorithms. recent research has revealed that in separable binary classification tasks gradient-descent-based methods exhibit an implicit bias for the ℓ2-maximal margin classifier. similarly, generic optimization methods, such as mirror descent and steepest descent, have been shown to converge to maximal margin classifiers defined by alternative geometries. while gradient-descent-based algorithms provably achieve fast implicit bias rates, corresponding rates in the literature for generic optimization methods are relatively slow. to address this limitation, we present a series of state-of-the-art implicit bias rates for mirror descent and steepest descent algorithms. our primary technique involves transforming a generic optimization algorithm into an online optimization dynamic that solves a regularized bilinear game, providing a unified framework for analyzing the implicit bias of various optimization methods. our accelerated rates are derived by leveraging the regret bounds of online learning algorithms within this game framework. we then show the flexibility of this framework by analyzing the implicit bias in adversarial training, and again obtain significantly improved convergence rates. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/796.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/796.txt new file mode 100644 index 0000000000000000000000000000000000000000..49d33679ad2438e90eb025f942d1237294a2f9d1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/796.txt @@ -0,0 +1 @@ + retail sales forecasting presents a significant challenge for large retailers such as walmart and amazon, due to the vast assortment of products, geographical location heterogeneity, seasonality, and external factors including weather, local economic conditions, and geopolitical events. various methods have been employed to tackle this challenge, including traditional time series models, machine learning models, and neural network mechanisms, but the difficulty persists. categorizing data into relevant groups has been shown to improve sales forecast accuracy as time series from different categories may exhibit distinct patterns. in this paper, we propose a new measure to indicate the unique impacts of the trend and seasonality components on a time series and suggest grouping time series based on this measure. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/797.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/797.txt new file mode 100644 index 0000000000000000000000000000000000000000..d7d84fa5dad9e2c050042a7b4c780cfeb944b021 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/797.txt @@ -0,0 +1 @@ + differentially private synthetic data provide a powerful mechanism to enable data analysis while protecting sensitive information about individuals. however, when the data lie in a high-dimensional space, the accuracy of the synthetic data suffers from the curse of dimensionality. in this paper, we propose a differentially private algorithm to generate low-dimensional synthetic data efficiently from a high-dimensional dataset with a utility guarantee with respect to the wasserstein distance. a key step of our algorithm is a private principal component analysis (pca) procedure with a near-optimal accuracy bound that circumvents the curse of dimensionality. unlike the standard perturbation analysis, our analysis of private pca works without assuming the spectral gap for the covariance matrix. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/798.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/798.txt new file mode 100644 index 0000000000000000000000000000000000000000..9644d4b8d66d78925c621024e055393a267ef268 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/798.txt @@ -0,0 +1 @@ + we investigate the problem of online learning with monotone and continuous dr-submodular reward functions, which has received great attention recently. to efficiently handle this problem, especially in the case with complicated decision sets, previous studies have proposed an efficient projection-free algorithm called mono-frank-wolfe (mono-fw) using o(t ) gradient evaluations and linear optimization steps in total. however, it only attains a (1 -1/e)-regret bound of o(t 4/5 ). in this paper, we propose an improved projection-free algorithm, namely pobga, which reduces the regret bound to o(t 3/4 ) while keeping the same computational complexity as mono-fw. instead of modifying mono-fw, our key idea is to make a novel combination of a projection-based algorithm called online boosting gradient ascent, an infeasible projection technique, and a blocking technique. furthermore, we consider the decentralized setting and develop a variant of pobga, which not only reduces the current best regret bound of efficient projection-free algorithms for this setting from o(t 4/5 ) to o(t 3/4 ), but also reduces the total communication complexity from o(t ) to o( √ t ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/799.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/799.txt new file mode 100644 index 0000000000000000000000000000000000000000..47a512afb8bc7112f553404db30452d4e2b52bdb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/799.txt @@ -0,0 +1 @@ + esport games comprise a sizeable fraction of the global games market, and is the fastest growing segment in games. this has given rise to the domain of esports analytics, which uses telemetry data from games to inform players, coaches, broadcasters and other stakeholders. compared to traditional sports, esport titles change rapidly, in terms of mechanics as well as rules. due to these frequent changes to the parameters of the game, esport analytics models can have a short life-spam, a problem which is largely ignored within the literature. this paper extracts information from game design (i.e. patch notes) and utilises clustering techniques to propose a new form of character representation. as a case study, a neural network model is trained to predict the number of kills in a dota 2 match utilising this novel character representation technique. the performance of this model is then evaluated against two distinct baselines, including conventional techniques.not only did the model significantly outperform the baselines in terms of accuracy (85% auc), but the model also maintains the accuracy in two newer iterations of the game that introduced one new character and a brand new character type. these changes introduced to the design of the game would typically break conventional techniques that are commonly used within the literature. therefore, the proposed methodology for representing characters can increase the life-spam of machine learning models as well as contribute to a higher performance when compared to traditional techniques typically employed within the literature. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/8.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/8.txt new file mode 100644 index 0000000000000000000000000000000000000000..7b6d108a231108bdb3051054ec4d8cdaa493f75d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/8.txt @@ -0,0 +1 @@ + learning linear temporal logic (ltl) formulas from examples labeled as positive or negative has found applications in inferring descriptions of system behavior. we summarize two methods to learn ltl formulas from examples in two different problem settings. the first method assumes noise in the labeling of the examples. for that, they define the problem of inferring an ltl formula that must be consistent with most but not all of the examples. the second method considers the other problem of inferring meaningful ltl formulas in the case where only positive examples are given. hence, the first method addresses the robustness to noise, and the second method addresses the balance between conciseness and specificity (i.e., language minimality) of the inferred formula. the summarized methods propose different algorithms to solve the aforementioned problems, as well as to infer other descriptions of temporal properties, such as signal temporal logic or deterministic finite automata. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/80.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/80.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b560ffa248da6d09ab2f00f1f39250db7489225 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/80.txt @@ -0,0 +1 @@ + we study the problem of preserving privacy while still providing high utility in sequential decision making scenarios in a changing environment. we consider abruptly changing environment: the environment remains constant during periods and it changes at unknown time instants. to formulate this problem, we propose a variant of multi-armed bandits called non-stationary stochastic corrupt bandits. we construct an algorithm called sw-klucb-cf and prove an upper bound on its utility using the performance measure of regret. the proven regret upper bound for sw-klucb-cf is nearoptimal in the number of time steps and matches the best known bound for analogous problems in terms of the number of time steps and the number of changes. moreover, we present a provably optimal mechanism which can guarantee the desired level of local differential privacy while providing high utility. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/800.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/800.txt new file mode 100644 index 0000000000000000000000000000000000000000..50b7804b44916fe805223806380be0d73939439b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/800.txt @@ -0,0 +1 @@ + we present a theoretical analysis of the approximation properties of convolutional architectures when applied to the modeling of temporal sequences. specifically, we prove an approximation rate estimate (jackson-type result) and an inverse approximation theorem (bernsteintype result), which together provide a comprehensive characterization of the types of sequential relationships that can be efficiently captured by a temporal convolutional architecture. the rate estimate improves upon a previous result via the introduction of a refined complexity measure, whereas the inverse approximation theorem is new. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/801.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/801.txt new file mode 100644 index 0000000000000000000000000000000000000000..d65db30fc79b86eb7b72d5d49c86d4cd1e3b8d0f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/801.txt @@ -0,0 +1 @@ + this research paper explores the impact of various input parameters, including infill percentage, layer height, extrusion temperature, and print speed, on the resulting tensile strength in objects produced through additive manufacturing. the main objective of this study is to enhance our understanding of the correlation between the input parameters and tensile strength, as well as to identify the key factors influencing the performance of the additive manufacturing process. to achieve this objective, we introduced the utilization of explainable artificial intelligence (xai) techniques for the first time, which allowed us to analyze the data and gain valuable insights into the system's behavior. specifically, we employed shap (shapley additive explanations), a widely adopted framework for interpreting machine learning model predictions, to provide explanations for the behavior of a machine learning model trained on the data. our findings reveal that the infill percentage and extrusion temperature have the most significant influence on tensile strength, while the impact of layer height and print speed is relatively minor. furthermore, we discovered that the relationship between the input parameters and tensile strength is highly intricate and nonlinear, making it difficult to accurately describe using simple linear models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/802.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/802.txt new file mode 100644 index 0000000000000000000000000000000000000000..3a142b982194e301702e0a65caaf073cbfd1df15 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/802.txt @@ -0,0 +1 @@ + we consider the emerging problem of identifying the presence and use of watermarking schemes in widely used, publicly hosted, closed source large language models (llms). we introduce a suite of baseline algorithms for identifying watermarks in llms that rely on analyzing distributions of output tokens and logits generated by watermarked and unmarked llms. notably, watermarked llms tend to produce distributions that diverge qualitatively and identifiably from standard models. furthermore, we investigate the identifiability of watermarks at varying strengths and consider the tradeoffs of each of our identification mechanisms with respect to watermarking scenario. along the way, we formalize the specific problem of identifying watermarks in llms, as well as llm watermarks and watermark detection in general, providing a framework and foundations for studying them. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/803.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/803.txt new file mode 100644 index 0000000000000000000000000000000000000000..38b12d5d0951dcc269430161311364f2e29e49f9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/803.txt @@ -0,0 +1 @@ + in this study the problem of federated learning (fl) is explored under a new perspective by utilizing the deep equilibrium (deq) models instead of conventional deep learning networks. we claim that incorporating deq models into the federated learning framework naturally addresses several open problems in fl, such as the communication overhead due to the sharing large models and the ability to incorporate heterogeneous edge devices with significantly different computation capabilities. additionally, a weighted average fusion rule is proposed at the server-side of the fl framework to account for the different qualities of models from heterogeneous edge devices. to the best of our knowledge, this study is the first to establish a connection between deq models and federated learning, contributing to the development of an efficient and effective fl framework. finally, promising initial experimental results are presented, demonstrating the potential of this approach in addressing challenges of fl. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/804.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/804.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f395503e13479dd72e776ddfa8ae9027d6994a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/804.txt @@ -0,0 +1 @@ + contrastive self-supervised learning has gained attention for its ability to create high-quality representations from large unlabelled data sets. a key reason that these powerful features enable data-efficient learning of downstream tasks is that they provide augmentation invariance, which is often a useful inductive bias. however, the amount and type of invariances preferred is not known apriori, and varies across different downstream tasks. we therefore propose a multi-task self-supervised framework (mt-slvr) that learns both variant and invariant features in a parameter-efficient manner. our multi-task representation provides a strong and flexible feature that benefits diverse downstream tasks. we evaluate our approach on few-shot classification tasks drawn from a variety of audio domains and demonstrate improved classification performance on all of them. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/805.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/805.txt new file mode 100644 index 0000000000000000000000000000000000000000..544d5cb04040ffa025183a1c0569995bb2c450e4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/805.txt @@ -0,0 +1 @@ + in recent years, there has been considerable effort to promote gender balance in the academic environment of computer science (cs). however, there is still a gender gap at all cs academic levels: from students, to phd candidates, to faculty members. this general trend is followed by the department of computer science at uit the arctic university of norway. to combat this trend within the cs environment at uit, we embarked on structured discussions with students of our department. after analyzing the data collected from these discussions, we were able to identify action items that could mitigate the existing gender gap at our department. in particular, these discussions elucidated ways to achieve (i) a balanced flow of students into cs undergraduate program, (ii) a balanced cs study environment, and (iii) a balanced flow of graduates into higher levels of the cs academia (e.g., phd program). this paper presents the results of the discussions and the subsequent recommendations that we made to the administration of the department. we also provide a road-map that other institutions could follow to organize similar events as part of their gender-balance action plan. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/806.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/806.txt new file mode 100644 index 0000000000000000000000000000000000000000..df28a635feb8d6285ab4ecabf4e13036882c5d3a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/806.txt @@ -0,0 +1 @@ + inspired by mobile, internet enabled computing and the maturing field of educational game design, the aris project has designed an open source tool for rapidly producing locative, interactive, narrative-centric, educational experiences. in addition to the software, the project contributes a global community of active designers and a growing set of compelling mechanics for learners in such designs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/807.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/807.txt new file mode 100644 index 0000000000000000000000000000000000000000..21cc1866deaded692866dfc5f3a53b9a5f74d673 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/807.txt @@ -0,0 +1 @@ + weather and soil conditions are particularly important when it comes to farming activities. study of these factors and their role in nutrient and nitrate absorption rates can lead to useful insights with benefits for both the crop yield and the protection of the environment through the more controlled use of fertilizers and chemicals. there is a paucity of public data from rural, agricultural sensor networks. this is partly due to the unique challenges faced during the deployment and maintenance of iot networks in rural agricultural areas. as part of a 5-year project called whin we have been deploying and collecting sensor data from production and experimental agricultural farms in and around purdue university in indiana. here we release a dataset comprising soil sensor data from a representative sample of 3 nodes across 3 production farms, each for 5 months. we correlate this data with the weather data and draw some insights about the absorption of rain in the soil. we provide the dataset at: https://purduewhin.ecn. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/808.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/808.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9b2f203ca93b179537647b6418de002724bc6f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/808.txt @@ -0,0 +1 @@ + the rise of social media and the increase in the computational capabilities of computers have allowed tech companies such as facebook and google to gather incredibly large amounts of data and to be able to extract meaningful information to use for commercial purposes. moreover, the algorithms behind these platforms have shown the ability to influence feelings, behaviors, and opinions, representing a serious threat to the independence of their users. all of these practices have been referred to as "surveillance capitalism", a term created by shoshana zuboff. in this paper i focus on the threat imposed on the autonomy of human beings in the context of surveillance capitalism, providing both an analysis of the reasons why this threat exists and what consequences we could face if we take no action against such practices. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/809.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/809.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5cf486aa299f88556c3e3b22e4ae6abc6487f03 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/809.txt @@ -0,0 +1 @@ + wikipedia and many user-generated content (ugc) communities are known for producing reliable, quality content, but also for being vulnerable to false or misleading information. previous work has shown that many hoaxes on wikipedia go undetected for extended periods of time. but little is known about the creation of intentionally false or misleading information online. does collective attention toward a topic increase the likelihood it will spawn disinformation? here, we measure the relationship between allocation of attention and the production of hoax articles on the english wikipedia. analysis of traffic logs reveals that, compared to legitimate articles created on the same day, hoaxes tend to be more associated with traffic spikes preceding their creation. this is consistent with the idea that the supply of false or misleading information on a topic is driven by the attention it receives. these findings improve our comprehension of the determinants of disinformation in ugc communities and could help promote the integrity of knowledge on wikipedia. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/81.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/81.txt new file mode 100644 index 0000000000000000000000000000000000000000..82bd49d847b22a3bb82a8502a429f0acdb25b847 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/81.txt @@ -0,0 +1 @@ + in mass manufacturing of jewellery, the gross loss is estimated before manufacturing to calculate the wax weight of the pattern that would be investment casted to make multiple identical pieces of jewellery. machine learning is a technology that is a part of ai which helps create a model with decision-making capabilities based on a large set of user-defined data. in this paper, the authors found a way to use machine learning in the jewellery industry to estimate this crucial gross loss. choosing a small data set of manufactured rings and via regression analysis, it was found out that there is a potential of reducing the error in estimation from ±2-3 to ±0.5using ml algorithms from historic data and attributes collected from the cad file during the design phase itself. to evaluate the approach's viability, additional study must be undertaken with a larger data set. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/810.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/810.txt new file mode 100644 index 0000000000000000000000000000000000000000..5c0ac20aba810f0ea07c29659acecac94f8b3749 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/810.txt @@ -0,0 +1 @@ + knowledge tracing (kt) is the problem of predicting students' future performance based on their historical interactions with intelligent tutoring systems. recent studies have applied multiple types of deep neural networks to solve the kt problem. however, there are two important factors in real-world educational data that are not well represented. first, most existing works augment input representations with the co-occurrence matrix of questions and knowledge components 1 (kcs) but fail to explicitly integrate such intrinsic relations into the final response prediction task. second, the individualized historical performance of students has not been well captured. in this paper, we proposed at-dkt to improve the prediction performance of the original deep knowledge tracing model with two auxiliary learning tasks, i.e., question tagging (qt) prediction task and individualized prior knowledge (ik) prediction task. specifically, the qt task helps learn better question representations by predicting whether questions contain specific kcs. the ik task captures students' global historical performance by progressively predicting studentlevel prior knowledge that is hidden in students' historical \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/811.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/811.txt new file mode 100644 index 0000000000000000000000000000000000000000..916bad90a04364ee0c79a15818be1b37ff3d91e3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/811.txt @@ -0,0 +1 @@ + problems broadly known as algorithmic bias frequently occur in the context of complex socio-technical systems (sts), where observed biases may not be directly attributable to a single automated decision algorithm. as a first investigation of fairness in sts, we focus on the case of wikipedia. we systematically review 75 papers describing different types of bias in wikipedia, which we classify and relate to established notions of harm from algorithmic fairness research. by analysing causal relationships between the observed phenomena, we demonstrate the complexity of the socio-technical processes causing harm. finally, we identify the normative expectations of fairness associated with the different problems and discuss the applicability of existing criteria proposed for machine learning-driven decision systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/812.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/812.txt new file mode 100644 index 0000000000000000000000000000000000000000..a4e0a7292dfed2cc911a832d446d087cc7f254a6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/812.txt @@ -0,0 +1 @@ + we evaluate homophily and heterophily among ideological and demographic groups in a typical opinion formation context: online discussions of current news. we analyze user interactions across five years in the r/news community on reddit, one of the most visited websites in the united states. then, we estimate demographic and ideological attributes of these users. thanks to a comparison with a carefully-crafted network null model, we establish which pairs of attributes foster interactions and which ones inhibit them.individuals prefer to engage with the opposite ideological side, which contradicts the echo chamber narrative. instead, demographic groups are homophilic, as individuals tend to interact within their own group-even in an online setting where such attributes are not directly observable. in particular, we observe age and income segregation consistently across years: users tend to avoid interactions when belonging to different groups. these results persist after controlling for the degree of interest by each demographic group in different news topics. our findings align with the theory that affective polarization-the difficulty in socializing across political boundaries-is more connected with an increasingly divided society, rather than ideological echo chambers on social media. we publicly release our anonymized data set and all the code to reproduce our results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/813.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/813.txt new file mode 100644 index 0000000000000000000000000000000000000000..2d4e793f06cb31535276b085c730e1f50cc7ec13 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/813.txt @@ -0,0 +1 @@ + as text-to-image systems continue to grow in popularity with the general public, questions have arisen about bias and diversity in the generated images. here, we investigate properties of images generated in response to prompts which are visually under-specified, but contain salient social attributes (e.g., 'a portrait of a threatening person' versus 'a portrait of a friendly person'). grounding our work in social cognition theory, we find that in many cases, images contain similar demographic biases to those reported in the stereotype literature. however, trends are inconsistent across different models and further investigation is warranted. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/814.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/814.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3be4585a85c067fc9a4428c52d3af45dc7aa593 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/814.txt @@ -0,0 +1 @@ + this study aims to understand the perceptions and opinions of academicians towards chatgpt-3 by collecting and analyzing social media comments, and a survey was conducted with library and information science professionals. the research uses a content analysis method and finds that while chatgpt-3 can be a valuable tool for research and writing, it is not 100% accurate and should be cross-checked. the study also finds that while some academicians may not accept chatgpt-3, most are starting to accept it. the study is beneficial for academicians, content developers, and librarians. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/815.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/815.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a834eafb96647917d51c6bc773077675733a572 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/815.txt @@ -0,0 +1 @@ + purposethe introduction of artificial intelligence / machine learning (ai/ml) products to the regulated fields of pharmaceutical research and development (r&d) and drug manufacture, and medical devices (md) and in-vitro diagnostics (ivd), poses new regulatory problems: a lack of a common terminology and understanding leads to confusion, delays and product failures. validation as a key step in product development, common to each of these sectors including computerized systems and ai/ml development, offers an opportune point of comparison for aligning people and processes for cross-sectoral product development. methodsa comparative approach, built upon workshops and a subsequent written sequence of exchanges, summarized in a look-up table suitable for mixed-teams work. results1. a bottom-up, definitions led, approach which leads to a distinction between broad vs narrow validation, and their relationship to regulatory regimes. 2. common basis introduction to the primary methodologies for ai-containing software validation. 3. pharmaceutical drug development and md/ivd specific perspectives on compliant ai software development, as a basis for collaboration. conclusionsalignment of the terms and methodologies used in validation of software products containing artificial intelligence / machine learning (ai/ml) components across the regulated industries of human health is a vital first step in streamlining processes and improving workflows. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/816.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/816.txt new file mode 100644 index 0000000000000000000000000000000000000000..490c733af24f8a6cf52bd54292458bc627cec1f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/816.txt @@ -0,0 +1 @@ + while a lack of diversity is a longstanding problem in computer science and engineering, universities and organizations continue to look for solutions to this issue. among the first of its kind, we launched inspire: stem for social impact, a program at the university of victoria, canada, aimed to motivate and empower students from underrepresented groups in computer science and engineering to develop digital solutions for society impactful projects by engaging in experiential learning projects with identified community-partners. the twenty-four students in the program came from diverse backgrounds in terms of academic areas of study, genders, ethnicities, and levels of technical and educational experience. working with six community partners, these students spent four months learning and developing solutions for a societal and/or environmental problem with potential for local and global impacts. our experiences indicate that working in a diverse team with real clients on solving pressing issues produces a sense of competence, relatedness, and autonomy which are the basis of self-determination theory. due to the unique structure of this program, the three principles of self-determination theory emerged through different experiences, ultimately motivating the students to build a network of likeminded people. the importance of such a network is profound in empowering students to succeed and, in retrospect, remain in software engineering fields. we address the diversity problem by providing diverse, underrepresented students with a safe and like-minded environment where they can learn and realize their full potential. hence, in this paper, we describe the program design, experiences, and lessons learned from this approach. we also provide recommendations for universities and organizations that may want to adapt our approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/817.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/817.txt new file mode 100644 index 0000000000000000000000000000000000000000..4999c1690b641f3449adb39c11d5b1f80919178d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/817.txt @@ -0,0 +1 @@ + real world data (rwd) bears great promises to improve the quality of care. however, specific infrastructures and methodologies are required to derive robust knowledge and brings innovations to the patient. drawing upon the national case study of the 32 french regional and university hospitals governance, we highlight key aspects of modern clinical data warehouses (cdws): governance, transparency, types of data, data reuse, technical tools, documentation and data quality control processes. semi-structured interviews as well as a review of reported studies on french cdws were conducted in a semi-structured manner from march to november 2022. out of 32 regional and university hospitals in france, 14 have a cdw in production, 5 are experimenting, 5 have a prospective cdw project, 8 did not have any cdw project at the time of writing. the implementation of cdw in france dates from 2011 and accelerated in the late 2020. from this case study, we draw some general guidelines for cdws. the actual orientation of cdws towards research requires efforts in governance stabilization, standardization of data schema and development in data quality and data documentation. particular attention must be paid to the sustainability of the warehouse teams and to the multi-level governance. the transparency of the studies and the tools of transformation of the data must improve to allow successful multi-centric data reuses as well as innovations in routine care. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/818.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/818.txt new file mode 100644 index 0000000000000000000000000000000000000000..f2a3c36dd79681d871a932b52d0a0554a4bbca90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/818.txt @@ -0,0 +1 @@ + due to the precautionary measures during the covid-19 pandemic many universities offered unproctored take-home exams. we propose methods to detect potential collusion between students and apply our approach on event log data from take-home exams during the pandemic. we find groups of students with suspiciously similar exams. in addition, we compare our findings to a proctored comparison group. by this, we establish a rule of thumb for evaluating which cases are "outstandingly similar", i.e., suspicious cases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/819.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/819.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d63f803e3144631858a6c845487c1bece51c04a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/819.txt @@ -0,0 +1 @@ + online political advertising has become the cornerstone of political campaigns. the budget spent solely on political advertising in the u.s. has increased by more than 100% from $ 700 million during the 2017-2018 u.s. election cycle to $ 1.6 billion during the 2020 u.s. presidential elections. naturally, the capacity offered by online platforms to micro-target ads with political content has been worrying lawmakers, journalists, and online platforms, especially after the 2016 u.s. presidential election, where cambridge analytica has targeted voters with political ads congruent with their personality.to curb such risks, both online platforms and regulators (through the dsa act proposed by the european commission) have agreed that researchers, journalists, and civil society need to be able to scrutinize the political ads running on large online platforms. consequently, online platforms such as meta and google have implemented ad libraries that contain information about all political ads running on their platforms. this is the first step on a long path. due to the volume of available data, it is impossible to go through these ads manually, and we now need automated methods and tools to assist in the scrutiny of political ads.in this paper, we focus on political ads that are related to policy. understanding which policies politicians or organizations promote and to whom is essential in determining dishonest representations. this paper proposes automated methods based on pre-trained models to classify ads in 14 main policy groups identified by the comparative agenda project (cap). we discuss several inherent challenges that arise. finally, we analyze policy-related ads featured on meta platforms during the 2022 french presidential elections period. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/82.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/82.txt new file mode 100644 index 0000000000000000000000000000000000000000..680cfeb1255f5cc95a098528b0a65b1d5fbf5108 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/82.txt @@ -0,0 +1 @@ + as noted in the works of lattimore and szepesvári , it has been mentioned that it is an open problem to characterize the minimax regret of linear bandits in a wide variety of action spaces. in this article we present an optimal regret lower bound for a wide class of convex action spaces. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/820.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/820.txt new file mode 100644 index 0000000000000000000000000000000000000000..e021a8af85d79fe0247954764ba2f41acaf24ae6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/820.txt @@ -0,0 +1 @@ + large language models (llms), such as chatgpt, are quickly advancing ai to the frontiers of practical consumer use and leading industries to re-evaluate how they allocate resources for content production. authoring of open educational resources and hint content within adaptive tutoring systems is labor intensive. should llms like chatgpt produce educational content on par with humanauthored content, the implications would be significant for further scaling of computer tutoring system approaches. in this paper, we conduct the first learning gain evaluation of chatgpt by comparing the efficacy of its hints with hints authored by human tutors with 77 participants across two algebra topic areas, elementary algebra and intermediate algebra. we find that 70% of hints produced by chat-gpt passed our manual quality checks and that both human and chatgpt conditions produced positive learning gains. however, gains were only statistically significant for human tutor created hints. learning gains from human-created hints were substantially and statistically significantly higher than chatgpt hints in both topic areas, though chatgpt participants in the intermediate algebra experiment were near ceiling and not even with the control at pre-test. we discuss the limitations of our study and suggest several future directions for the field. problem and hint content used in the experiment is provided for replicability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/821.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/821.txt new file mode 100644 index 0000000000000000000000000000000000000000..c61ef1ee9452a52ea000dbd683c555aee5fcb56c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/821.txt @@ -0,0 +1 @@ + poverty status identification is the first obstacle to eradicating poverty. village-level poverty identification is very challenging due to the arduous field investigation and insufficient information. the development of the web infrastructure and its modeling tools provides fresh approaches to identifying poor villages. upon those techniques, we build a village graph for village poverty status identification. by modeling the village connections as a graph through the geographic distance, we show the correlation between village poverty status and its graph topological position and identify two key factors (centrality, homophily decaying effect) for identifying villages. we further propose the first graph-based method to identify poor villages. it includes a global centrality2vec module to embed village centrality into the dense vector and a local graph distance convolution module that captures the decaying effect. in this paper, we make the first attempt to interpret and identify village-level poverty from a graph perspective. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/822.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/822.txt new file mode 100644 index 0000000000000000000000000000000000000000..f9a97e31a70243f68f162d46f0e15b9a161d08d4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/822.txt @@ -0,0 +1 @@ + recommendation systems are pervasive in the digital economy. an important assumption in many deployed systems is that user consumption reflects user preferences in a static sense: users consume the content they like with no other considerations in mind. however, as we document in a large-scale online survey, users do choose content strategically to influence the types of content they get recommended in the future.we model this user behavior as a two-stage noisy signalling game between the recommendation system and users: the recommendation system initially commits to a recommendation policy, presents content to the users during a cold start phase which the users choose to strategically consume in order to affect the types of content they will be recommended in a recommendation phase. we show that in equilibrium, users engage in behaviors that accentuate their differences to users of different preference profiles. in addition, (statistical) minorities out of fear of losing their minority content exposition may not consume content that is liked by mainstream users. we next propose three interventions that may improve recommendation quality (both on average and for minorities) when taking into account strategic consumption: (1) adopting a recommendation system policy that uses preferences from a prior, (2) communicating to users that universally liked ("mainstream") content will not be used as basis of recommendation, and (3) serving content that is personalized-enough yet expected to be liked in the beginning. finally, we describe a methodology to inform applied theory modeling with survey results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/823.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/823.txt new file mode 100644 index 0000000000000000000000000000000000000000..a42f14ae159520680069736981f5fa445dc9c6e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/823.txt @@ -0,0 +1 @@ + it has been observed in many studies that female students in general are unwilling to undertake a course of study in ict . recent literature has also pointed out that undermining the prejudices of girls with respect to these disciplines is very difficult in adolescence, suggesting that, to be effective, awareness programs on computer disciplines should be offered in pre-school or lower school age . on the other hand, even assuming that large-scale computer literacy programs can be immediately activated in lower schools and kindergartens, we can't wait for >15-20 years before we can appreciate the effectiveness of these programs. the scarcity of women in ict has a tangible negative impact on countries' technological innovation, which requires immediate action. in this paper, we describe a strategy, and the details of a number of programs coordinated by the engineering and computer science departments at sapienza university, to make high school girl students aware of the importance of new technologies and ict. we call our proposed training strategy "greed(y)", because it has been conceived as a grid of vertical (hard) and horizontal (soft) skills, intertwining topics to which girls are traditionally sensitive, such as environmental sustainability, health, etc., with digital skills and soft skills that the public education system more rarely considers -such as team-working, public speaking, social networking, and competition -with visible consequences more for girls than for boys . in fact, outside school life, boys can acquire a greater aptitude for teamwork, competition, and leadership, by practicing more markedly masculine sports, and they also have a better chance of approaching information technology, through video games. greedy is also a term used in computer science to denote sub-optimal strategies, as in our case, since we acknowledge that, in order to achieve a higher impact, similar programs should be proposed much earlier in a student's career. in addition to describing the theoretical approach, the paper offers some project examples. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/824.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/824.txt new file mode 100644 index 0000000000000000000000000000000000000000..d514cfaba2cf5d594d48ba333620148eecc6448a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/824.txt @@ -0,0 +1 @@ + australia is a leader in autonomous systems technology, particularly in the mining industry, borne from necessity in a geographically dispersed and complex natural environment. increasingly advanced autonomous systems are becoming more prevalent in australia, particularly as the safety, environmental and efficiency benefits become better understood, and the increasing sophistication of technology improves capability and availability. increasing use of these systems, including in the maritime domain and air domain, is placing pressure on the national safety regulators, who must either continue to apply their traditional regulatory approach requiring exemptions to enable operation of emerging technology, or seize the opportunity to put in place an agile and adaptive approach better suited to the rapid developments of the 21 st century.in australia the key national safety regulators have demonstrated an appetite for working with industry to facilitate innovation, but their limited resources mean progress is slow. there is a critical role to be played by third parties from industry, government, and academia who can work together to develop, test and publish new assurance and accreditation frameworks for trusted autonomous systems, and assist in the transition to an adaptive and agile regulatory philosophy. this is necessary to ensure the benefits of autonomous systems can be realised, without compromising safety. this paper will identify the growing use cases for autonomous systems in australia, in the maritime, air and land domains, assess the current regulatory framework, argue that australia's regulatory approach needs to become more agile and anticipatory, and investigate how third-party projects could positively impact the assurance and accreditation process for autonomous systems in the future. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/825.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/825.txt new file mode 100644 index 0000000000000000000000000000000000000000..f98802edf8f3157423ffce29351d60287df2cfa7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/825.txt @@ -0,0 +1 @@ + climate change is becoming more visible, and human adaptation is required urgently to prevent greater damage. one particular domain of adaptation concerns daily mobility (work commute), with a significant portion of these trips being done in individual cars. yet, their impact on pollution, noise, or accidents is well-known. this paper explores various cognitive biases that can explain such lack of adaptation. our approach is to design simple interactive simulators that users can play with in order to understand biases. the idea is that awareness of such cognitive biases is often a first step towards more rational decision making, even though things are not that simple. this paper reports on three simulators, each focused on a particular factor of resistance. various scenarios are simulated to demonstrate their explanatory power. these simulators are already available to play online, with the goal to provide users with food for thought about how mobility could evolve in the future. work is still ongoing to design a user survey to evaluate their impact. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/826.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/826.txt new file mode 100644 index 0000000000000000000000000000000000000000..6da148d9dc35935551823c02a22e85204e4b12d6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/826.txt @@ -0,0 +1 @@ + large generative ai models (lgaims), such as chatgpt, gpt-4 or stable diffusion, are rapidly transforming the way we communicate, illustrate, and create. however, ai regulation, in the eu and beyond, has primarily focused on conventional ai models, not lgaims. this paper will situate these new generative models in the current debate on trustworthy ai regulation, and ask how the law can be tailored to their capabilities. after laying technical foundations, the legal part of the paper proceeds in four steps, covering (1) direct regulation,(2) data protection, (3) content moderation, and (4) policy proposals. it suggests a novel terminology to capture the ai value chain inlgaim settings by differentiating between lgaim developers, deployers, professional and non-professional users, as well as recipients of lgaim output. we tailor regulatory duties to these different actors along the value chain and suggest strategies to ensure that lgaims are trustworthy and deployed for the benefit of society at large. rules in the ai act and other direct regulation must match the specificities of pre-trained models. the paper argues for three layers of obligations concerning lgaims (minimum standards for all lgaims; highrisk obligations for high-risk use cases; collaborations along the ai value chain). in general, regulation should focus on concrete high-risk applications, and not the pre-trained model itself, and should include (i) obligations regarding transparency and (ii) risk management.non-discrimination provisions (iii) may, however, apply to lgaim developers. lastly, (iv) the core of the dsa's content moderation rules should be expanded to cover lgaims. this includes notice and action mechanisms, and trusted flaggers. in all areas, regulators and lawmakers need to act fast to keep track with the dynamics of chatgpt et al. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/827.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/827.txt new file mode 100644 index 0000000000000000000000000000000000000000..d0e63eb178f917f64337d4ccb0d4e8c53a86874c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/827.txt @@ -0,0 +1 @@ + the report provides an intricate analysis of cyber security defined in contemporary operational digital environments. an extensive literature review is formed to determine how the construct is reviewed in modern scholarly contexts. the article seeks to offer a comprehensive definition of the term "cybersecurity" to accentuate its multidisciplinary perspectives. a meaningful concise, and inclusive dimension will be provided to assist in designing scholarly discourse on the subject. the report will offer a unified framework for examining activities that constitute the concept resulting in a new definition; "cybersecurity is the collection and concerting of resources including personnel and infrastructure, structures, and processes to protect networks and cyber-enabled computer systems from events that compromise the integrity and interfere with property rights, resulting in some extent of loss." the encapsulation of the interdisciplinary domains will be critical in improving understanding and response to emerging challenges in the cyberspace. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/828.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/828.txt new file mode 100644 index 0000000000000000000000000000000000000000..474aaf8ddc94733fca39bcc3478ccb3c73da9e93 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/828.txt @@ -0,0 +1 @@ + the united states supreme court overturned landmark rulings made in its 1973 verdict in roe v. wade. the justices by way of a majority vote in dobbs v. jackson women's health organization, decided that abortion wasn't a constitutional right and returned the issue of abortion to the elected representatives. this decision triggered multiple protests and debates across the us, especially in the context of the midterm elections in november 2022. given that many citizens use social media platforms to express their views and mobilize for collective action, and given that online debate provides tangible effects on public opinion, political participation, news media coverage, and the political decision-making, it is crucial to understand online discussions surrounding this topic. toward this end, we present the first large-scale twitter dataset collected on the abortion rights debate in the united states. we present a set of 74m tweets systematically collected over the course of one year from january 1, 2022 to january 6, 2023. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/829.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/829.txt new file mode 100644 index 0000000000000000000000000000000000000000..732ce611b8d78cc68e7fc3aa9de92e495c2cc567 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/829.txt @@ -0,0 +1 @@ + electronic health record (ehr) has become an essential tool in the healthcare ecosystem, providing authorized clinicians with patients' health-related information for better treatment. while most developed countries are taking advantage of ehrs to improve their healthcare system, it remains challenging in developing countries to support clinical decision-making and public health using a computerized patient healthcare information system. this paper proposes a novel ehr architecture suitable for developing countries-an architecture that fosters inclusion and provides solutions tailored to all social classes and socioeconomic statuses. our architecture foresees an internetfree (offline) solution to allow medical transactions between healthcare organizations, and the storage of ehrs in geographically underserved and rural areas. moreover, we discuss how artificial intelligence can leverage anonymous health-related information to enable better public health policy and surveillance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/83.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/83.txt new file mode 100644 index 0000000000000000000000000000000000000000..d114ebdc20d1055724cdd77ecf0230d0aaf0e857 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/83.txt @@ -0,0 +1 @@ + projection operations are a typical computation bottleneck in online learning. in this paper, we enable projection-free online learning within the framework of online convex optimization with memory (oco-m) -oco-m captures how the history of decisions affects the current outcome by allowing the online learning loss functions to depend on both current and past decisions. particularly, we introduce the first projection-free meta-base learning algorithm with memory that minimizes dynamic regret, i.e., that minimizes the suboptimality against any sequence of timevarying decisions. we are motivated by artificial intelligence applications where autonomous agents need to adapt to time-varying environments in real-time, accounting for how past decisions affect the present. examples of such applications are: online control of dynamical systems; statistical arbitrage; and time series prediction. the algorithm builds on the online frank-wolfe (ofw) and hedge algorithms. we demonstrate how our algorithm can be applied to the online control of linear time-varying systems in the presence of unpredictable process noise. to this end, we develop a controller with memory and bounded dynamic regret against any optimal time-varying linear feedback control policy. we validate our algorithm in simulated scenarios of online control of linear time-invariant systems. reference loss function projection-free memory go regret rate (zinkevich, 2003) convex no no o(1) o( √ t (1 + c t )) (jadbabaie et al., 2015) convex smooth no no o(1) o ( \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/830.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/830.txt new file mode 100644 index 0000000000000000000000000000000000000000..98f54a4c06421a99d279e6b78eea3e0f812b1cf3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/830.txt @@ -0,0 +1 @@ + data science is an interdisciplinary research area where scientists are typically working with data coming from different fields. when using and analyzing data, the scientists implicitly agree to follow standards, procedures, and rules set in these fields. however, guidance on the responsibilities of the data scientists and the other involved actors in a data science project is typically missing. while literature shows that novel frameworks and tools are being proposed in support of open-science, data reuse, and research data management, there are currently no frameworks that can fully express responsibilities of a data science project. in this paper, we describe the transparency, accountability, privacy, and societal responsibility matrix (taps-rm) as framework to explore social, legal, and ethical aspects of data science projects. taps-rm acts as a tool to provide users with a holistic view of their project beyond key outcomes and clarifies the responsibilities of actors. we map the developed model of taps-rm with well-known initiatives for open data (such as fact, fair and datasheets for datasets). we conclude that taps-rm is a tool to reflect on responsibilities at a data science project level and can be used to advance responsible data science by design. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/831.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/831.txt new file mode 100644 index 0000000000000000000000000000000000000000..9bc88d437aee8f1df9b8835dfcc90e1a07f1c184 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/831.txt @@ -0,0 +1 @@ + we examine climate-related disclosures in a large sample of reports published by banks that officially endorsed the recommendations of the task force for climate-related financial disclosures (tcfd). in doing so, we introduce a new application of the zero-shot text classification. by developing a set of fine-grained tcfd labels, we show that zero-shot analysis is a useful tool for classifying climate-related disclosures without further model training. overall, our findings indicate that corporate climate-related disclosures grew dynamically after the launch of the tcfd recommendations. however, there are marked differences in the extent of reporting by recommended disclosure topic, suggesting that some recommendations have not yet been fully met. our findings yield important conclusions for the design of climate-related disclosure frameworks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/832.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/832.txt new file mode 100644 index 0000000000000000000000000000000000000000..1a2e8deb2ea2c1d973aa344c42f1ec463149cf1a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/832.txt @@ -0,0 +1 @@ + social dilemmas can be considered situations where individual rationality leads to collective irrationality. the multi-agent reinforcement learning community has leveraged ideas from social science, such as social value orientations (svo), to solve social dilemmas in complex cooperative tasks. in this paper, by first introducing the typical "division of labor or roles" mechanism in human society, we provide a promising solution for intertemporal social dilemmas (isd) with svos. a novel learning framework, called learning roles with emergent svos (resvo), is proposed to transform the learning of roles into the social value orientation emergence, which is symmetrically solved by endowing agents with altruism to share rewards with other agents. an svo-based role embedding space is then constructed by individual conditioning policies on roles with a novel rank regularizer and mutual information maximizer. experiments show that resvo achieves a stable division of labor and cooperation in isds with different complexity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/833.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/833.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d8663707d7d7c83d3c02a08c8752196f1f1bdbc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/833.txt @@ -0,0 +1 @@ + in this paper we investigate the ability of modern machine learning algorithms in inferring basic offline activities, e.g., shopping and dining, from location data. using anonymized data of thousands of users of a prominent locationbased social network, we empirically demonstrate that not only state-of-the-art machine learning excels at the task at hand (f1 score>0.9) but also tabular models are among the best performers. the findings we report here not only fill an existing gap in the literature, but also highlight the potential risks of such capabilities given the ubiquity of location data and the high accessibility of tabular machine learning models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/834.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/834.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/835.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/835.txt new file mode 100644 index 0000000000000000000000000000000000000000..54dcafbf9781f7c2d96349befe86da869c5e7e7d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/835.txt @@ -0,0 +1 @@ + computer science (cs) has historically struggled with issues related to diversity, equity, and inclusion (dei). based on how these issues were affecting phd students in our department (the carnegie mellon university cs department), we identified required dei education for phd students as a potentially high-impact approach to improving the phd student experience in our program. given that no existing curriculum met the desired criteria, we (phd students) -alongside many members of the cmu community -developed and implemented cs-jedi: justice, equity, diversity, and inclusion in computer science. cs-jedi is a 6-week dei curriculum that is now taken by all first-year phd students in our department. this paper covers cs-jedi's motivation and goals; describes how its evidence-based curriculum is tailored to these goals and to the cs phd context; and gives a data-driven evaluation of the extent to which cs-jedi's first offering, in spring 2022, achieved these goals. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/836.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/836.txt new file mode 100644 index 0000000000000000000000000000000000000000..21c573b3255102b59fd40927752009784d7d5a5d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/836.txt @@ -0,0 +1 @@ + accurate modeling of student knowledge is essential for large-scale online learning systems that are increasingly used for student training. knowledge tracing aims to model student knowledge state given the student's sequence of learning activities. modern knowledge tracing (kt) is usually formulated as a supervised sequence learning problem to predict students' future practice performance according to their past observed practice scores by summarizing student knowledge state as a set of evolving hidden variables. because of this formulation, many current kt solutions are not fit for modeling student learning from non-assessed learning activities with no explicit feedback or score observation (e.g., watching video lectures that are not graded). additionally, these models cannot explicitly represent the dynamics of knowledge transfer among different learning activities, particularly between the assessed (e.g., quizzes) and non-assessed (e.g., video lectures) learning activities. in this paper, we propose transition-aware multi-activity knowledge tracing (tamkot), which models knowledge transfer between learning materials, in addition to student knowledge, when students transition between and within assessed and non-assessed learning materials. tamkot is formulated as a deep recurrent multiactivity learning model that explicitly learns knowledge transfer by activating and learning a set of knowledge transfer matrices, one for each transition type between student activities. accordingly, our model allows for representing each material type in a different yet transferrable latent space while maintaining student knowledge in a shared space. we evaluate our model on three real-world publicly available datasets and demonstrate tamkot's capability in predicting student performance and modeling knowledge transfer. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/837.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/837.txt new file mode 100644 index 0000000000000000000000000000000000000000..3070aea70552ed6f216b416d7aa6c070a8c250e0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/837.txt @@ -0,0 +1 @@ + the flipped classroom is a new pedagogical strategy that has been gaining increasing importance recently. spoken discussion dialog commonly occurs in flipped classroom, which embeds rich information indicating processes and progression of students' learning. this study focuses on learning analytics from spoken discussion dialog in the flipped classroom, which aims to collect and analyze the discussion dialogs in flipped classroom in order to get to know group learning processes and outcomes. we have recently transformed a course using the flipped classroom strategy, where students watched video-recorded lectures at home prior to group-based problem-solving discussions in class. the in-class group discussions were recorded throughout the semester and then transcribed manually. after features are extracted from the dialogs by multiple tools and customized processing techniques, we performed statistical analyses to explore the indicators that are related to the group learning outcomes from face-to-face discussion dialogs in the flipped classroom. then, machine learning algorithms are applied to the indicators in order to predict the group learning outcome as high, mid or low. the best prediction accuracy reaches 78.9%, which demonstrates the feasibility of achieving automatic learning outcome prediction from group discussion dialog in flipped classroom. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/838.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/838.txt new file mode 100644 index 0000000000000000000000000000000000000000..782a30a42521ff67758d42697f606b7f169836b4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/838.txt @@ -0,0 +1 @@ + data sharing partnerships are increasingly an imperative for research institutions and, at the same time, a challenge for established models of data governance and ethical research oversight. we analyse four cases of data partnership involving academic institutions and examine the role afforded to the research partner in negotiating the relationship between risk, value, trust and ethics. within this terrain, far from being a restraint on financialisation, the instrumentation of ethics forms part of the wider mobilisation of infrastructure for the realisation of profit in the big data economy. under what we term 'combinatorial data governance' academic structures for the management of research ethics are instrumentalised as organisational functions that serve to mitigate reputational damage and societal distrust. in the alternative model of 'experimental data governance' researchers propose frameworks and instruments for the rethinking of data ethics and the risks associated with it -a model that is promising but limited in its practical application. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/839.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/839.txt new file mode 100644 index 0000000000000000000000000000000000000000..ff02b86dfcd8df7bd4fba846f118364bde4d84a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/839.txt @@ -0,0 +1 @@ + in-vehicle sensing technology has gained tremendous attention due to its ability to support major technological developments, such as connected vehicles and self-driving cars. in-vehicle sensing data are invaluable and important data sources for traffic management systems. in this paper we propose an innovative architecture of unobtrusive in-vehicle sensors and present methods and tools that are used to measure the behavior of drivers. the proposed architecture including methods and tools are used in our nih project to monitor and identify older drivers with early dementia index terms-driver's behavior, in-vehicle sensing, in-vehicle cameras, telematics sensors \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/84.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/84.txt new file mode 100644 index 0000000000000000000000000000000000000000..18c684e0f7987f935e7c8ea505f7fdd54d763102 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/84.txt @@ -0,0 +1 @@ + grapevine budbreak is a key phenological stage of seasonal development, which serves as a signal for the onset of active growth. this is also when grape plants are most vulnerable to damage from freezing temperatures. hence, it is important for winegrowers to anticipate the day of budbreak occurrence to protect their vineyards from late spring frost events. this work investigates deep learning for budbreak prediction using data collected for multiple grape cultivars. while some cultivars have over 30 seasons of data others have as little as 4 seasons, which can adversely impact prediction accuracy. to address this issue, we investigate multi-task learning, which combines data across all cultivars to make predictions for individual cultivars. our main result shows that several variants of multi-task learning are all able to significantly improve prediction accuracy compared to learning for each cultivar independently. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/840.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/840.txt new file mode 100644 index 0000000000000000000000000000000000000000..d09879e18849385c1bc482952cef99838567d1f6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/840.txt @@ -0,0 +1 @@ + artificial intelligence is changing the way we create and evaluate information, and this is happening during an infodemic, which has been having dramatic effects on global health. in this paper we evaluate whether recruited individuals can distinguish disinformation from accurate information, structured in the form of tweets, and determine whether a tweet is organic or synthetic, i.e., whether it has been written by a twitter user or by the ai model gpt-3. our results show that gpt-3 is a double-edge sword, which, in comparison with humans, can produce accurate information that is easier to understand, but can also produce more compelling disinformation. we also show that humans cannot distinguish tweets generated by gpt-3 from tweets written by human users. starting from our results, we reflect on the dangers of ai for disinformation, and on how we can improve information campaigns to benefit global health.theory" -i.e., people may abandon their efforts to critically assess information when faced with a vast amount of confusing information. this could result in apathy and reliance on emotions for information consumption.* from 0 to 1, the score indicates how good the performance was in recognizing synthetic tweets containing accurate information.stdev group 1 = 0.35 average group 2 score (organic tweets, accurate information)* = 0.64 * from 0 to 1, the score indicates how good the performance was in recognizing organic tweets containing accurate information. enrollment ratio = 0.991045 alpha = 0.05 power = 80% sample size total = 197 (group 1: 99; group 2: 98) secondary endpoint 2 average group 1 score (synthetic tweets )* = 0.315 * from 0 to 1, the score indicates how good the performance was in recognizing synthetic tweets, regardless of whether they contained accurate information or disinformation. stdev group 1 = 0.44 average group 2 score (organic tweets, )* = 0.59 * from 0 to 1, the score indicates how good the performance was in recognizing organic tweets, regardless of whether they contained accurate information or disinformation. enrollment ratio = 1.001493 alpha = 0.05 power = 80% sample size total = 80 (group 1: 40; group 2: 40) secondary endpoint 3 average group 1 score (pre-confidence level in ability to recognize disinformation)* = 2.932271 * from 1 to 5 stdev group 1 = 0.829093 average group 2 (post-confidence level in ability to recognize disinformation)* = 3.319149 * from 1 to 5 enrollment ratio = 1.0680 alpha = 0.05 power = 80% sample size total = 145 (group 1: 70; group 2: 75) secondary endpoint 4 average group 1 (pre-confidence level in ability to recognize synthetic versus organic contents)* = 2.703557 * from 1 to 5 stdev group 1 = 0.897012 average group 2 (post-confidence level in ability to recognize synthetic versus organic contents)* = 1.75 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/841.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/841.txt new file mode 100644 index 0000000000000000000000000000000000000000..69995208a31ef5bf3829052e880281f28ae0c6bb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/841.txt @@ -0,0 +1 @@ + with the advancement of the internet of things(iot) and pervasive computing applications, it provides a better opportunity to understand the behavior of the aging population. however, in a nursing home scenario, common sensors and techniques used to track an elderly living alone are not suitable. in this paper, we design a location-based tracking system for a four-story nursing home -the salvation army, peacehaven nursing home in singapore. the main challenge here is to identify the group activity among the nursing home's residents and to detect if they have any deviated activity behavior. we propose a location-based deviated activity behavior detection system to detect deviated activity behavior by leveraging data fusion technique. in order to compute the features for data fusion, an adaptive method is applied for extracting the group and individual activity time and generate daily hybrid norm for each of the residents. next, deviated activity behavior detection is executed by considering the difference between daily norm patterns and daily input data for each resident. lastly, the deviated activity behavior among the residents are classified using a rule-based classification approach. through the implementation, there are 44.4% of the residents do not have deviated activity behavior, while 37% residents involved in one deviated activity behavior and 18.6% residents have two or more deviated activity behaviors. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/842.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/842.txt new file mode 100644 index 0000000000000000000000000000000000000000..09f4ed4f3f2f829e3d7d1d2786e9fa5f45de61a4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/842.txt @@ -0,0 +1 @@ + product recommendation systems have been instrumental in online commerce since the early days. their development is expanded further with the help of big data and advanced deep learning methods, where consumer profiling is central. the interest of the consumer can now be predicted based on the personal past choices and the choices of similar consumers. however, what is currently defined as a choice is based on quantifiable data, like product features, cost, and type. this paper investigates the possibility of profiling customers based on the preferred product design and wanted affects. we considered the case of vase design, where we study individual kansei of each design. the personal aspects of the consumer considered in this study were decided based on our literature review conclusions on the consumer response to product design. we build a representative consumer model that constitutes the recommendation system's core using deep learning. it asks the new consumers to provide what affect they are looking for, through kansei adjectives, and recommend; as a result, the aesthetic design that will most likely cause that affect. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/843.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/843.txt new file mode 100644 index 0000000000000000000000000000000000000000..e299cb6d938b2f5acf7fe2147458161cce7ad685 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/843.txt @@ -0,0 +1 @@ + do we live in a "golden age of conspiracy theories?" in the last few decades, conspiracy theories have proliferated on the internet with some having dangerous real-world consequences. a large contingent of those who participated in the january 6th attack on the us capitol fervently believed in the qanon conspiracy theory. in this work, we study the relationships amongst five prominent conspiracy theories (qanon, covid, ufo/aliens, 9/11, and flat-earth) and each of their respective relationships to the news media, both authentic news and misinformation. identifying and publishing a set of 755 different conspiracy theory websites dedicated to our five conspiracy theories, we find that each set often hyperlinks to the same external domains, with covid and qanon conspiracy theory websites having the largest amount of shared connections. examining the role of news media, we further find that not only do outlets known for spreading misinformation hyperlink to our set of conspiracy theory websites more often than authentic news websites but also that this hyperlinking increased dramatically between 2018 and 2021, with the advent of qanon and the start of covid-19 pandemic. using partial granger-causality, we uncover several positive correlative relationships between the hyperlinks from misinformation websites and the popularity of conspiracy theory websites, suggesting the prominent role that misinformation news outlets play in popularizing many conspiracy theories. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/844.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/844.txt new file mode 100644 index 0000000000000000000000000000000000000000..51808bd3456597657cb0d827c20b5fce48d68119 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/844.txt @@ -0,0 +1 @@ + in response to disinformation and propaganda from russian online media following the invasion of ukraine, russian media outlets such as russia today and sputnik news were banned throughout europe. to maintain viewership, many of these russian outlets began to heavily promote their content on messaging services like telegram. in this work, we study how 16 russian media outlets interacted with and utilized 732 telegram channels throughout 2022. leveraging the foundational model mpnet, dp-means clustering, and hawkes processes, we trace how narratives spread between news sites and telegram channels. we show that news outlets not only propagate existing narratives through telegram but that they source material from the messaging platform. for example, across the websites in our study, between 2.3% (ura.news) and 26.7% (ukraina.ru) of articles discussed content that originated/resulted from activity on telegram. finally, tracking the spread of individual topics, we measure the rate at which news outlets and telegram channels disseminate content within the russian media ecosystem, finding that websites like ura.news and telegram channels such as @genshab are the most effective at disseminating their content. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/845.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/845.txt new file mode 100644 index 0000000000000000000000000000000000000000..29cab34a99b26bad67578c8f62788f5717deb0aa --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/845.txt @@ -0,0 +1 @@ + due to the emergence of the covid-19 pandemic, governments have implemented several urgent steps to minimize the disease's effect and transmission. supportive measures to trace contacts and warn people infected with covid-19 were also implemented such as the covid-19 contact tracing application. this study investigated the effects of variables influencing the intention to use the covid-19 tracker. the extended unified theory of acceptance and use of technology model was used to investigate user behavior using the covid-19 tracker application. google form was used to construct and distribute the online survey to participants. experiment results from 224 individuals revealed that performance expectations, trust, and privacy all have an impact on app usage intention. however, social impact, effort expectation, and facilitating conditions were not shown to be statistically significant. the conceptual model explained 60.07% of the amount of variation, suggesting that software developers, service providers, and policymakers should consider performance expectations, trust, and privacy as viable factors to encourage citizens to use the app. this study work's recommendations and limitations are thoroughly discussed. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/846.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/846.txt new file mode 100644 index 0000000000000000000000000000000000000000..74baf17411d93c7e4253a8e4c3ceddb633398716 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/846.txt @@ -0,0 +1 @@ + six years after the seminal paper on fair was published, researchers still struggle to understand how to implement fair. for many researchers fair promises longterm benefits for near-term effort, requires skills not yet acquired, and is one more thing in a long list of unfunded mandates and onerous requirements on scientists. even for those required to or who are convinced they must make time for fair research practices, the preference is for just-in-time advice properly sized to the scientific artifacts and process. because of the generality of most fair implementation guidance, it is difficult for a researcher to adjust the advice to their situation. technological advances, especially in the area of artificial intelligence (ai) and machine learning (ml), complicate fair adoption as researchers and data stewards ponder how to make software, workflows, and models fair and reproducible. the fair+ implementation survey tool (fairist) mitigates the problem by integrating research requirements with research proposals in a systematic way. fairist factors in new scholarly outputs such as nanopublications and notebooks, and the various research artifacts related to ai research (data, models, workflows, and benchmarks). researchers step through a self-serve survey process and receive a table ready for use in their dmp and/or work plan while gaining awareness of the fair principles and open science concepts. fairist is a model that uses part of the proposal process as a way to do outreach, raise awareness of fair dimensions and considerations, while providing justin-time assistance for competitive proposals. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/847.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/847.txt new file mode 100644 index 0000000000000000000000000000000000000000..c43e003c00d4e03ac566c8e4fe8dcd9ae9af8327 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/847.txt @@ -0,0 +1 @@ + the metaverse has recently gained increasing attention from the public. it builds up a virtual world where we can live as a new role regardless of the role we play in the physical world. however, building and operating this virtual world will generate an extraordinary amount of carbon emissions for computing, communicating, displaying, and so on. this inevitably hinders the realization of carbon neutrality as a priority of our society, adding heavy burden to our earth. in this survey, we first present a green viewpoint of the metaverse by investigating the carbon issues in its three core layers, namely the infrastructure layer, the interaction layer, and the economy layer, and estimate their carbon footprints in the near future. next, we analyze a range of current and emerging applicable green techniques for the purpose of reducing energy usage and carbon emissions of the metaverse, and discuss their limitations in supporting metaverse workloads. then, in view of these limitations, we discuss important implications and bring forth several insights and future directions to make each metaverse layer greener. after that, we investigate green solutions from the governance perspective, including both public policies in the physical world and regulation of users in the virtual world, and propose an indicator carbon utility (cu) to quantify the service quality brought by an user activity per unit of carbon emissions. finally, we identify an issue for the metaverse as a whole and summarize three directions: (1) a comprehensive consideration of necessary performance metrics, (2) a comprehensive consideration of involved layers and multiple internal components, and (3) a new assessing, recording, and regulating mechanism on carbon footprints of user activities. our proposed quantitative indicator cu would be helpful in regulating user activities in the metaverse world. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/848.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/848.txt new file mode 100644 index 0000000000000000000000000000000000000000..359d5ef1746cfc945bd121e19a5e35c215f8656e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/848.txt @@ -0,0 +1 @@ + external changes. because 'we are likely on the edge of a major transformation in how many of us live -and how goods are produced and distributed. ' . the conceptual approach enables undertaking experimental developments on progressing from the current state of risk assessment into measuring risk with analytical methods. these experimental research developments are focused on the: (1) need to classify covid-19 risk data into primary and secondary data sets for training predictive algorithms; (2) the need to include ai in healthcare networks that intersect a diverse set of isolated production and supply chain domains. the main motivating point for this article is the timing -global pandemics such as covid-19 are very rare events. although there are several viruses in circulation (e.g., zika, aids), it has been a century since the last major global pandemic -the spanish flu. thus, we can say that investigating covid-19 is once in a lifetime event and the findings will be of utmost value in dealing with disease x. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/849.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/849.txt new file mode 100644 index 0000000000000000000000000000000000000000..912e7195d5f2abfe8aa5c45134b3cbc3600b1aeb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/849.txt @@ -0,0 +1 @@ + this chapter introduces the perspective of political ecology to the application of artificial intelligence to artistic processes (creative-ai). hence, the environmental and social impact of the development and employment of creative-ai are the focus of this text, when we consider them as part of an economic system that transforms artistic creation to a commodity. i first analyse specific creative-ai cases, and then conduct a speculation that takes jacques attali's writing on the role of music in society as a vantage point, and investigates the environmental and social consequences of an automatic composition network controlled by a large music streaming platform. whereas the possibilities that emerge from creative-ai may be promising from an artistic perspective, its entanglement with corporate interest raises severe concerns. these concerns can only be addressed by a wide cross-sectoral alliance between research and arts that develops a critical perspective on the future directions of creative-ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/85.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/85.txt new file mode 100644 index 0000000000000000000000000000000000000000..546654ab0b56b9a0ecec4c585253e5c8551433fc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/85.txt @@ -0,0 +1 @@ + reinforcement learning (rl) so far has limited real-world applications. one key challenge is that typical rl algorithms heavily rely on a reset mechanism to sample proper initial states; these reset mechanisms, in practice, are expensive to implement due to the need for human intervention or heavily engineered environments. to make learning more practical, we propose a generic no-regret reduction to systematically design reset-free rl algorithms. our reduction turns the reset-free rl problem into a two-player game. we show that achieving sublinear regret in this two-player game would imply learning a policy that has both sublinear performance regret and sublinear total number of resets in the original rl problem. this means that the agent eventually learns to perform optimally and avoid resets. to demonstrate the effectiveness of this reduction, we design an instantiation for linear markov decision processes, which is the first provably correct reset-free rl algorithm. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/850.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/850.txt new file mode 100644 index 0000000000000000000000000000000000000000..3eee9ef867738e23e8f69c1d47f24b5832e12584 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/850.txt @@ -0,0 +1 @@ + harmful internet use (hiu) is a term coined for the unintended use of the internet. in this study, we propose a more accurate hiu measuring method based on the peer assessment and differential evolution approach. the sample data comprises a juvenile population in poland; 267 subjects assessed 1,513 peers. in addition to classic statistical analysis, differential evolution has been employed. results indicate that there may be a substantially higher rate of hiu than other studies have indicated. more accurate measurement of the adolescent population influx affected by hiu is needed for healthcare and welfare system planning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/851.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/851.txt new file mode 100644 index 0000000000000000000000000000000000000000..f814298209baab81826f84ba15751b2db1eb8a32 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/851.txt @@ -0,0 +1 @@ + ports are striving for innovative technological solutions to cope with the ever-increasing growth of transport, while at the same time improving their environmental footprint. an emerging technology that has the potential to substantially increase the efficiency of the multifaceted and interconnected port processes is the digital twin. although digital twins have been successfully integrated in many industries, there is still a lack of cross-domain understanding of what constitutes a digital twin. furthermore, the implementation of the digital twin in complex systems such as the port is still in its infancy. this paper attempts to fill this research gap by conducting an extensive cross-domain literature review of what constitutes a digital twin, keeping in mind the extent to which the respective findings can be applied to the port. it turns out that the digital twin of the port is most comparable to complex systems such as smart cities and supply chains, both in terms of its functional relevance as well as in terms of its requirements and characteristics. the conducted literature review, considering the different port processes and port characteristics, results in the identification of three core requirements of a digital port twin, which are described in detail. these include situational awareness, comprehensive data analytics capabilities for intelligent decision making, and the provision of an interface to promote multi-stakeholder governance and collaboration. finally, specific operational scenarios are proposed on how the port's digital twin can contribute to energy savings by improving the use of port resources, facilities and operations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/852.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/852.txt new file mode 100644 index 0000000000000000000000000000000000000000..909314b9834822bb2816e7153bc75637a5eef05c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/852.txt @@ -0,0 +1 @@ + outcomes improvements, and realizing it return on investment (roi). .......... \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/853.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/853.txt new file mode 100644 index 0000000000000000000000000000000000000000..d1d64564539a4cac4e094668165737f0c461b8bb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/853.txt @@ -0,0 +1 @@ + social metaverse is a shared digital space combining a series of interconnected virtual worlds for users to play, shop, work, and socialize. in parallel with the advances of artificial intelligence (ai) and growing awareness of data privacy concerns, federated learning (fl) is promoted as a paradigm shift towards privacy-preserving ai-empowered social metaverse. however, challenges including privacy-utility tradeoff, learning reliability, and ai model thefts hinder the deployment of fl in real metaverse applications. in this paper, we exploit the pervasive social ties among users/avatars to advance a social-aware hierarchical fl framework, i.e., socialfl for a better privacyutility tradeoff in the social metaverse. then, an aggregator-free robust fl mechanism based on blockchain is devised with a new block structure and an improved consensus protocol featured with on/off-chain collaboration. furthermore, based on smart contracts and digital watermarks, an automatic federated ai (fedai) model ownership provenance mechanism is designed to prevent ai model thefts and collusive avatars in social metaverse. experimental findings validate the feasibility and effectiveness of proposed framework. finally, we envision promising future research directions in this emerging area. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/854.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/854.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/855.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/855.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a727c8a54922074361e74015e5fa5e80eea7ea8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/855.txt @@ -0,0 +1 @@ + although the rapid development of battery technology has greatly increased the range of battery electric vehicle (bev), the range anxiety is still a major concern of bev users or potential users. previous work has proposed a framework explaining the influential factors of range anxiety and users' trust toward the range estimation system (res) of bev has been identified as a leading factor of range anxiety. the trust in res may further influence bev users' charging decisions. however, the formation of trust in res of bevs has not yet explored. in this work, a questionnaire has been designed to investigate bev users' trust in res and further explore the influential factors of bev users' charging decision. in total, 152 samples collected from the bev users in mainland china have been analyzed. the bev users' gender, driving area, knowledge of bev or res, system usability and trust in battery system of smartphones have been identified as influential factors of res in bevs, supporting the three-layer framework in automation-related trust (i.e., dispositional trust, situational trust and learned trust). a connection between smartphone charging behaviors and bev charging behaviors has also been observed. the results from this study can provide insights on the design of res in bevs in order to alleviate range anxiety among users. the results can also inform the design of strategies (e.g., advertising, training and in-vehicle hmi design) that can facilitate more rational charging decisions among bev users. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/856.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/856.txt new file mode 100644 index 0000000000000000000000000000000000000000..c16b673bbefb6e323c6c1479ec7cb026f086d982 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/856.txt @@ -0,0 +1 @@ + the world around us has undergone a radical transformation due to rapid technological advancement in recent decades. the industry of the future generation is evolving, and artificial intelligence is the next change in the making popularly known as industry 4.0. indeed, experts predict that artificial intelligence (ai) will be the main force behind the following significant virtual shift in the way we stay, converse, study, live, communicate and conduct business. all facets of our social connection are being transformed by this growing technology. one of the newest areas of educational technology is artificial intelligence in the field of education (aied). this study emphasis the different applications of artificial intelligence in education from both an industrial and academic standpoint. it highlights the most recent applications of aied, with some of its main areas being the reduction of instructors' burden and students' contextualized learning novel transformative evaluations, and advancements in sophisticated tutoring systems. it analyses the aied's ethical component and the influence of this transition on people, particularly students and instructors as well. finally, the article touches on aied's potential future research and practices. the goal of this study is to introduce the present-day applications to its intended audience. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/857.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/857.txt new file mode 100644 index 0000000000000000000000000000000000000000..941afcc68bfd6b64a4ad4a833d1ae1a1f18191e4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/857.txt @@ -0,0 +1 @@ + science communication forms the bridge between computer science researchers and their target audience. researchers who can effectively draw attention to their research findings and communicate them comprehensibly not only help their target audience to actually learn something, but also benefit themselves from the increased visibility of their work and person. however, the necessary skills for good science communication must also be taught, and this has so far been neglected in the field of software engineering education.we therefore designed and implemented a science communication seminar for bachelor students of computer science curricula. students take the position of a researcher who, shortly after publication, is faced with having to draw attention to the paper and effectively communicate the contents of the paper to one or more target audiences. based on this scenario, each student develops a communication strategy for an already published software engineering research paper and tests the resulting ideas with the other seminar participants.we explain our design decisions for the seminar, and combine our experiences with responses to a participant survey into lessons learned. with this experience report, we intend to motivate and enable other lecturers to offer a similar seminar at their university. collectively, university lecturers can prepare the next generation of computer science researchers to not only be experts in their field, but also to communicate research findings more effectively. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/858.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/858.txt new file mode 100644 index 0000000000000000000000000000000000000000..d5b9b577e944fdffe655dd8f245292045354aed5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/858.txt @@ -0,0 +1 @@ + coviability refers to the multiple socio-ecological arrangements and governance structures under which humans and nature can coexist in functional, fair, and persistent ways. transitioning to a coviable state in environmentally degraded and socially vulnerable territories is challenging. this paper presents an ongoing french-brazilian joint research project combining machine learning, agroecology, and social sciences to discover coviability pathways that can be adopted and implemented by local populations in the north-east region of brazil. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/859.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/859.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2e68f00a8e516b49856dc0fc0a5dcda8f18860c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/859.txt @@ -0,0 +1 @@ + the programmer's assistant is an experimental prototype software development environment that integrates a chatbot with a code editor. conversational capability was achieved by using an existing code-fluent large language model and providing it with a prompt that establishes a conversational interaction pattern, a set of conventions, and a style of interaction appropriate for the application. a discussion of the evolution of the prompt provides a case study in how to coax an existing foundation model to behave in a desirable manner for a particular application. ccs concepts: • \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/86.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/86.txt new file mode 100644 index 0000000000000000000000000000000000000000..764c5d3ca3ccb74ad7754e8326b94881480030eb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/86.txt @@ -0,0 +1 @@ + focusing on the most significant features of a dataset is useful both in machine learning (ml) and data mining. in ml, it can lead to a higher accuracy, a faster learning process, and ultimately a simpler and more understandable model. in data mining, identifying significant features is essential not only for gaining a better understanding of the data but also for visualisation. in this paper, we demonstrate a new way of identifying significant features inspired by analogical proportions. such a proportion is of the form of "a is to b as c is to d", comparing two pairs of items (a, b) and (c, d) in terms of similarities and dissimilarities. in a classification context, if the similarities/dissimilarities between a and b correlate with the fact that a and b have different labels, this knowledge can be transferred to c and d, inferring that c and d also have different labels. from a feature selection perspective, observing a huge number of such pairs (a, b) where a and b have different labels provides a hint about the importance of the features where a and b differ. following this idea, we introduce the analogical relevance index (ari), a new statistical test of the significance of a given feature with respect to the label. ari is a filter-based method. filter-based methods are ml-agnostic but generally unable to handle feature redundancy. however, ari can detect feature redundancy. our experiments show that ari is effective and outperforms well-known methods on a variety of artificial and some real datasets. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/860.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/860.txt new file mode 100644 index 0000000000000000000000000000000000000000..958f11183549936ec1831a46266dc895bacec61b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/860.txt @@ -0,0 +1 @@ + having a good reputation is paramount for most organisations and companies. in fact, having an optimal corporate image allows them to have better transaction relationships with various customers and partners. however, such reputation is hard to build and easy to destroy for all kind of business commercial activities (b2c, b2b, b2b2c, b2g). a misunderstanding during the communication process to the customers, or just a bad communication strategy, can lead to a disaster for the entire company. this is emphasised by the reaction of millions of people on social networks, which can be very detrimental for the corporate image if they react negatively to a certain event. this is called a firestorm.in this paper, i propose a well-organised strategy for firestorm attacks on organisations, also showing how an adversary can leverage them to obtain private information on the attacked firm. standard business security procedures are not designed to operate against multi-domain attacks; therefore, i will show how it is possible to bypass the classic and advised security procedures by operating different kinds of attack. i also propose a different firestorm attack, targeting a specific business company network in an efficient way. finally, i present defensive procedures to reduce the negative effect of firestorms on a company. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/861.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/861.txt new file mode 100644 index 0000000000000000000000000000000000000000..69561bc5ae32266585ac2b9bc8ddfb6bc01aacbe --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/861.txt @@ -0,0 +1 @@ + in this paper, we study the problem of mooc quality evaluation which is essential for improving the course materials, promoting students' learning efficiency, and benefiting user services. while achieving promising performances, current works still suffer from the complicated interactions and relationships of entities in mooc platforms. to tackle the challenges, we formulate the problem as a course representation learning task-based and develop an informationaware graph representation learning(iagrl) for multiview mooc quality evaluation. specifically, we first build a mooc heterogeneous network (hin) to represent the interactions and relationships among entities in mooc platforms. and then we decompose the mooc hin into multiple single-relation graphs based on meta-paths to depict the multi-view semantics of courses. the course representation learning can be further converted to a multi-view graph representation task. different from traditional graph representation learning, the learned course representations are expected to match the following three types of validity: (1) the agreement on expressiveness between the raw course portfolio and the learned course representations; (2) the consistency between the representations in each view and the unified representations; (3) the alignment between the course and mooc platform representations. therefore, we propose to exploit mutual information for preserving the validity of course representations. we conduct extensive experiments over real-world mooc datasets to demonstrate the effectiveness of our proposed method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/862.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/862.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f524d90985c8a3e2c115d7e4f6a78a09d00d390 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/862.txt @@ -0,0 +1 @@ + recent advances in artificial intelligence (ai) have sparked renewed interest in its potential to improve education. however, ai is a loose umbrella term that refers to a collection of methods, capabilities, and limitations-many of which are often not explicitly articulated by researchers, education technology companies, or other ai developers. in this paper, we seek to clarify what "ai" is and the potential it holds to both advance and hamper educational opportunities that may improve the human condition. we offer a basic introduction to different methods and philosophies underpinning ai, discuss recent advances, explore applications to education, and highlight key limitations and risks. we conclude with a set of questions that educationalists may ask as they encounter ai in their research and practice. our hope is to make often jargon-laden terms and concepts accessible, so that all are equipped to understand, interrogate, and ultimately shape the development of humancentered ai in education. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/863.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/863.txt new file mode 100644 index 0000000000000000000000000000000000000000..c25600b556d881bbef9f75df0539f1a293be2321 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/863.txt @@ -0,0 +1 @@ + the qubes platform was conceived as a "science education gateway" and designed to accelerate innovation in undergraduate stem education. the technical infrastructure was purpose built to provide more equitable access to professional resources, support learning that reflects authentic science, and promote open education practices. four platform services (oer library access; professional learning; partner support; and customizable workspaces) support overlapping faculty user communities, provide multiple points of entry, and enable manifold use case scenarios. the integrated nature of the platform makes it possible to collect, curate, and disseminate a diverse array of reform resources in a scalable and sustainable manner. we believe that the qubes platform has the capacity to broaden participation in scholarship around teaching and learning and, furthermore, that it can help to lower faculty barriers to the adoption of reform practices. the role of cyberinfrastructure in undergraduate stem education is generally underappreciated and warrants further exploration. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/864.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/864.txt new file mode 100644 index 0000000000000000000000000000000000000000..afa8d4e31ca156f5eacb4c658a51631ae42e5009 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/864.txt @@ -0,0 +1 @@ + increasingly, cyber aggression becomes the prevalent phenomenon that erodes the social media environment. however, due to subjective and expense, the traditional self-reporting questionnaire is hard to be employed in the current cyber area. in this study, we put forward the prediction model for cyber aggression based on the cutting-edge deep learning algorithm. building on 320 active weibo users' social media activities, we construct basic, dynamic, and content features. we elaborate cyber aggression on three dimensions: social exclusion, malicious humour, and guilt induction. we then build the prediction model combined with pretrained bert model. the empirical evidence shows outperformance and supports a stronger prediction with the bert model than traditional machine learning models without extra pretrained information. this study offers a solid theoretical model for cyber aggression prediction. furthermore, this study contributes to cyber aggression behaviors' probing and social media platforms' organization. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/865.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/865.txt new file mode 100644 index 0000000000000000000000000000000000000000..d033fc8e13feb024e94771058c2d8b0f4b6dd415 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/865.txt @@ -0,0 +1 @@ + the covid19 pandemic has challenged universities and organizations to devise mechanisms to uplift the wellbeing and welfare of people and communities. in response, the design and development of an integrated package of technologies, mangngalapp-a web-based portal and mobile responsive application for rural development served as an opportunity. it showcases different packets of technologies that were outputs of r&d in the field of fisheries and aqua-culture, innovations that were ip-protected, and technologies that harness locally available resources for post-harvest development and aiding in sustaining growth and development in the communities. this paper focused on the usability and acceptability of the mangngalapp implementing a descriptive research design using the technology acceptance model or tam and iso 25010 software quality standards. constrained by government health restrictions due to covid-19, a google form-based questionnaire was forwarded to consented participants via an email with the attached consent and evaluation form. results revealed that the mangngalapp was found to be very acceptable and usable, and compliant to iso 25010 software quality characteristics to the higher extent. from the results, it is concluded that the developed mangngalapp will be a usable and responsive technology that aids to rural development especially among target users-fishers, gatherers, processors, traders, and farmers. considering compatibility and usefulness, the mangngalapp is expected to provide greater social development in the community. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/866.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/866.txt new file mode 100644 index 0000000000000000000000000000000000000000..9920c6d15e994af2343feb24357b046f07639567 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/866.txt @@ -0,0 +1 @@ + ai alignment considers how we can encode ai systems in a way that is compatible with human values. the normative side of this problem asks what moral values or principles, if any, we should encode in ai. to this end, we present a framework to consider the question at four levels: individual, organizational, national, and global. we aim to illustrate how ai alignment is made up of value alignment problems at each of these levels, where values at each level affect the others and effects can flow in either direction. we outline key questions and considerations of each level and demonstrate an application of this framework to the topic of ai content moderation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/867.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/867.txt new file mode 100644 index 0000000000000000000000000000000000000000..da04ed7d558ccd48faff0c38e7a991ca238e888b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/867.txt @@ -0,0 +1 @@ + over the past decade, an explosion in the availability of education-related datasets has enabled new computational research in education. much of this work has investigated digital traces of online learners in order to better understand and optimize their cognitive learning processes. yet cognitive learning on digital platforms does not equal education. instead, education is an inherently social, cultural, economic, and political process manifesting in physical spaces, and educational outcomes are influenced by many factors that precede and shape the cognitive learning process. many of these are social factors like children's connections to schools (including teachers, counselors, and role models), parents and families, and the broader neighborhoods in which they live. in this article, we briefly discuss recent studies of learning through large-scale digital platforms, but largely focus on those exploring sociological aspects of education. we believe computational social scientists can creatively advance this emerging research frontier-and in doing so, help facilitate more equitable educational and life outcomes. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/868.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/868.txt new file mode 100644 index 0000000000000000000000000000000000000000..b885ca593d072876a4e180b2e01ca045296cbe5e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/868.txt @@ -0,0 +1 @@ + as automated decision making and decision assistance systems become common in everyday life, research on the prevention or mitigation of potential harms that arise from decisions made by these systems has proliferated. however, various research communities have independently conceptualized these harms, envisioned potential applications, and proposed interventions. the result is a somewhat fractured landscape of literature focused generally on ensuring decision-making algorithms "do the right thing". in this paper, we compare and discuss work across two major subsets of this literature: algorithmic fairness, which focuses primarily on predictive systems, and ethical decision making, which focuses primarily on sequential decision making and planning. we explore how each of these settings has articulated its normative concerns, the viability of different techniques for these different settings, and how ideas from each setting may have utility for the other.finally, given the optimal value function * ( ), the optimal policy * ( ) can be calculated in the following way: * ( ) = arg max ∈ * ( ). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/869.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/869.txt new file mode 100644 index 0000000000000000000000000000000000000000..13bc694a155b99018a62a902448c300f7464dc81 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/869.txt @@ -0,0 +1 @@ + blockchain is a revolutionary technology, and its growth started in various industries (such as it, education, business, banking, and many others) to capitalize on it. currently, in higher education institutions (heis) adoption of blockchain education needs to be improved in the academic programs and curriculums. in addition, heis must make many intense changes in the teaching and learning methods to educate learners about blockchain technology and its applications to meet the current industry workforce demand. due to a lack of academic programs and courses, students nowadays rely on online resources and pay non-academic organizations a high fee. this paper provides a comprehensive survey of blockchain education's current state of the art by reviewing the different academic programs and industry workforce demand. in addition, blockchain application trends which includes the market growth and demands are discussed. moreover, the blockchain career scope for different disciplines of students is examined. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/87.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/87.txt new file mode 100644 index 0000000000000000000000000000000000000000..e29b54c6e2eb79be09aff2813b645cf5e4526be5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/87.txt @@ -0,0 +1 @@ + we propose stitchnet, a novel neural network creation paradigm that stitches together fragments (one or more consecutive network layers) from multiple pre-trained neural networks. stitchnet allows the creation of high-performing neural networks without the large compute and data requirements needed under traditional model creation processes via backpropagation training. we leverage centered kernel alignment (cka) as a compatibility measure to efficiently guide the selection of these fragments in composing a network for a given task tailored to specific accuracy needs and computing resource constraints. we then show that these fragments can be stitched together to create neural networks with accuracy comparable to that of traditionally trained networks at a fraction of computing resource and data requirements. finally, we explore a novel on-the-fly personalized model creation and inference application enabled by this new paradigm. the code is available at https://github.com/steerapi/stitchnet. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/870.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/870.txt new file mode 100644 index 0000000000000000000000000000000000000000..81c68cad1dfb65f32c3b655d9db911196ae52c90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/870.txt @@ -0,0 +1 @@ + the application of new artificial intelligence (ai) discoveries is transforming healthcare research. however, the standards of reporting are variable in this still evolving field, leading to potential research waste. the aim of this work is to highlight resources and reporting guidelines available to researchers working in computational pathology. the equator network library of reporting guidelines and extensions was systematically searched up to august 2022 to identify applicable resources. inclusion and exclusion criteria were used and guidance was screened for utility at different stages of research and for a range of study types. items were compiled to create a summary for easy identification of useful resources and guidance. over 70 published resources applicable to pathology ai research were identified. guidelines were divided into key categories, reflecting current study types and target areas for ai research:literature & research priorities, discovery, clinical trial, implementation and post-implementation & guidelines.guidelines useful at multiple stages of research and those currently in development were also highlighted. summary tables with links to guidelines for these groups were developed, to assist those working in cancer ai research with complete reporting of research. issues with replication and research waste are recognised problems in ai research.reporting guidelines can be used as templates to ensure the essential information needed to replicate research is included within journal articles and abstracts. reporting guidelines are available and useful for many study types, but greater awareness is needed to encourage researchers to utilise them and for journals to adopt them. this review and summary of resources highlights guidance to researchers, aiming to improve completeness of reporting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/871.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/871.txt new file mode 100644 index 0000000000000000000000000000000000000000..6eab33cbcb5d4897edc0f7510bffc61f1e9e0dd2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/871.txt @@ -0,0 +1 @@ + automated decision-making systems are being increasingly deployed and affect the public in a multitude of positive and negative ways. governmental and private institutions use these systems to process information according to certain human-devised rules in order to address social problems or organizational challenges. both research and real-world experience indicate that the public lacks trust in automated decision-making systems and the institutions that deploy them. the recreancy theorem argues that the public is more likely to trust and support decisions made or influenced by automated decisionmaking systems if the institutions that administer them meet their fiduciary responsibility. however, often the public is never informed of how these systems operate and resultant institutional decisions are made. a "black box" effect of automated decisionmaking systems reduces the public's perceptions of integrity and trustworthiness. consequently, the institutions administering these systems are less able to assess whether the decisions are just. the result is that the public loses the capacity to identify, challenge, and rectify unfairness or the costs associated with the loss of public goods or benefits.the current position paper defines and explains the role of fiduciary responsibility within an automated decision-making system. we formulate an automated decision-making system as a data science lifecycle (dsl) and examine the implications of fiduciary responsibility within the context of the dsl. fiduciary responsibility within dsls provides a methodology for addressing the public's lack of trust in automated decision-making systems and the institutions that employ them to make decisions affecting the public. we posit that fiduciary responsibility manifests in several contexts of a dsl, each of which requires its own mitigation of sources of mistrust. to instantiate fiduciary responsibility, a los angeles police department (lapd) predictive policing case study is examined. we examine the development and deployment by the lapd of predictive policing technology and identify several ways in which the lapd failed to meet its fiduciary responsibility. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/872.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/872.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1cb543f8c4ab39e92c5fdcc787508ba8d9da5ac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/872.txt @@ -0,0 +1 @@ + in order to develop trustworthy healthcare artificial intelligence (ai) prospective and ergonomics studies that consider the complexity and reality of real-world applications of ai systems are needed. to achieve this, technology developers and deploying organisations need to form collaborative partnerships. this entails access to healthcare data, which frequently might also include potentially identifiable data such as audio recordings of calls made to an ambulance service call centre. information governance (ig) processes have been put in place to govern the use of personal confidential data. however, navigating ig processes in the formative stages of ai development and pre-deployment can be challenging, because the legal basis for data sharing is explicit only for the purpose of delivering patient care, i.e., once a system is put into service.in this paper we describe our experiences of managing ig for the assurance of healthcare ai, using the example of an out-of-hospital-cardiac-arrest recognition software within the context of the welsh ambulance service. we frame ig as a socio-technical process. ig processes for the development of trustworthy healthcare ai rely on information governance work, which entails dialogue, negotiation, and trade-offs around the legal basis for data sharing, data requirements and data control. information governance work should start early in the design life cycle and will likely continue throughout. this includes a focus on establishing and building relationships, as well as a focus on organisational readiness deeper understanding of both ai technologies as well as their safety assurance requirements. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/873.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/873.txt new file mode 100644 index 0000000000000000000000000000000000000000..320328b7527b1b775859ab7abb0f2d3f176d488f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/873.txt @@ -0,0 +1 @@ + from the perspective of any nation, rural areas generally present a comparable set of problems, such as a lack of proper health care, education, living conditions, wages, and market opportunities. some nations have created and developed the concept of smart villages during the previous few decades, which effectively addresses these issues. the landscape of traditional agriculture has been radically altered by digital agriculture, which has also had a positive economic impact on farmers and those who live in rural regions by ensuring an increase in agricultural production. we explored current issues in rural areas, and the consequences of smart village applications, and then illustrate our concept of smart village from recent examples of how emerging digital agriculture trends contribute to improving agricultural production in this chapter. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/874.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/874.txt new file mode 100644 index 0000000000000000000000000000000000000000..c16b673bbefb6e323c6c1479ec7cb026f086d982 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/874.txt @@ -0,0 +1 @@ + the world around us has undergone a radical transformation due to rapid technological advancement in recent decades. the industry of the future generation is evolving, and artificial intelligence is the next change in the making popularly known as industry 4.0. indeed, experts predict that artificial intelligence (ai) will be the main force behind the following significant virtual shift in the way we stay, converse, study, live, communicate and conduct business. all facets of our social connection are being transformed by this growing technology. one of the newest areas of educational technology is artificial intelligence in the field of education (aied). this study emphasis the different applications of artificial intelligence in education from both an industrial and academic standpoint. it highlights the most recent applications of aied, with some of its main areas being the reduction of instructors' burden and students' contextualized learning novel transformative evaluations, and advancements in sophisticated tutoring systems. it analyses the aied's ethical component and the influence of this transition on people, particularly students and instructors as well. finally, the article touches on aied's potential future research and practices. the goal of this study is to introduce the present-day applications to its intended audience. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/875.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/875.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a727c8a54922074361e74015e5fa5e80eea7ea8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/875.txt @@ -0,0 +1 @@ + although the rapid development of battery technology has greatly increased the range of battery electric vehicle (bev), the range anxiety is still a major concern of bev users or potential users. previous work has proposed a framework explaining the influential factors of range anxiety and users' trust toward the range estimation system (res) of bev has been identified as a leading factor of range anxiety. the trust in res may further influence bev users' charging decisions. however, the formation of trust in res of bevs has not yet explored. in this work, a questionnaire has been designed to investigate bev users' trust in res and further explore the influential factors of bev users' charging decision. in total, 152 samples collected from the bev users in mainland china have been analyzed. the bev users' gender, driving area, knowledge of bev or res, system usability and trust in battery system of smartphones have been identified as influential factors of res in bevs, supporting the three-layer framework in automation-related trust (i.e., dispositional trust, situational trust and learned trust). a connection between smartphone charging behaviors and bev charging behaviors has also been observed. the results from this study can provide insights on the design of res in bevs in order to alleviate range anxiety among users. the results can also inform the design of strategies (e.g., advertising, training and in-vehicle hmi design) that can facilitate more rational charging decisions among bev users. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/876.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/876.txt new file mode 100644 index 0000000000000000000000000000000000000000..909314b9834822bb2816e7153bc75637a5eef05c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/876.txt @@ -0,0 +1 @@ + outcomes improvements, and realizing it return on investment (roi). .......... \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/877.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/877.txt new file mode 100644 index 0000000000000000000000000000000000000000..3eee9ef867738e23e8f69c1d47f24b5832e12584 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/877.txt @@ -0,0 +1 @@ + harmful internet use (hiu) is a term coined for the unintended use of the internet. in this study, we propose a more accurate hiu measuring method based on the peer assessment and differential evolution approach. the sample data comprises a juvenile population in poland; 267 subjects assessed 1,513 peers. in addition to classic statistical analysis, differential evolution has been employed. results indicate that there may be a substantially higher rate of hiu than other studies have indicated. more accurate measurement of the adolescent population influx affected by hiu is needed for healthcare and welfare system planning. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/878.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/878.txt new file mode 100644 index 0000000000000000000000000000000000000000..912e7195d5f2abfe8aa5c45134b3cbc3600b1aeb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/878.txt @@ -0,0 +1 @@ + this chapter introduces the perspective of political ecology to the application of artificial intelligence to artistic processes (creative-ai). hence, the environmental and social impact of the development and employment of creative-ai are the focus of this text, when we consider them as part of an economic system that transforms artistic creation to a commodity. i first analyse specific creative-ai cases, and then conduct a speculation that takes jacques attali's writing on the role of music in society as a vantage point, and investigates the environmental and social consequences of an automatic composition network controlled by a large music streaming platform. whereas the possibilities that emerge from creative-ai may be promising from an artistic perspective, its entanglement with corporate interest raises severe concerns. these concerns can only be addressed by a wide cross-sectoral alliance between research and arts that develops a critical perspective on the future directions of creative-ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/879.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/879.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/88.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/88.txt new file mode 100644 index 0000000000000000000000000000000000000000..6cf0d8ed6746fc8af1e7ffbe6c3f24785a70a1f0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/88.txt @@ -0,0 +1 @@ + reinforcement learning (rl) has demonstrated great potential, but is currently full of overhyping and pipe dreams. we point to some difficulties with current research which we feel are endemic to the direction taken by the community. to us, the current direction is not likely to lead to "deployable" rl: rl that works in practice and can work in practical situations yet still is economically viable. we also propose a potential fix to some of the difficulties of the field. * technion and nvidia research; this writeup represents my own opinions. † technion; this writeup represents my own opinions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/880.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/880.txt new file mode 100644 index 0000000000000000000000000000000000000000..732ce611b8d78cc68e7fc3aa9de92e495c2cc567 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/880.txt @@ -0,0 +1 @@ + electronic health record (ehr) has become an essential tool in the healthcare ecosystem, providing authorized clinicians with patients' health-related information for better treatment. while most developed countries are taking advantage of ehrs to improve their healthcare system, it remains challenging in developing countries to support clinical decision-making and public health using a computerized patient healthcare information system. this paper proposes a novel ehr architecture suitable for developing countries-an architecture that fosters inclusion and provides solutions tailored to all social classes and socioeconomic statuses. our architecture foresees an internetfree (offline) solution to allow medical transactions between healthcare organizations, and the storage of ehrs in geographically underserved and rural areas. moreover, we discuss how artificial intelligence can leverage anonymous health-related information to enable better public health policy and surveillance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/881.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/881.txt new file mode 100644 index 0000000000000000000000000000000000000000..d1d22b062a99f736ac472b70004d808e38cb05dd --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/881.txt @@ -0,0 +1 @@ + text provides a compelling example of unstructured data that can be used to motivate and explore classification problems. challenges arise regarding the representation of features of text and student linkage between text representations as character strings and identification of features that embed connections with underlying phenomena. in order to observe how students reason with text data in scenarios designed to elicit certain aspects of the domain, we employed a task-based interview method using a structured protocol with six pairs of undergraduate students. our goal was to shed light on students' understanding of text as data using a motivating task to classify headlines as "clickbait" or "news". three types of features (function, content, and form) surfaced, the majority from the first scenario. our analysis of the interviews indicates that this sequence of activities engaged the participants in thinking at both the human-perception level and the computer-extraction level and conceptualizing connections between them. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/882.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/882.txt new file mode 100644 index 0000000000000000000000000000000000000000..d0e63eb178f917f64337d4ccb0d4e8c53a86874c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/882.txt @@ -0,0 +1 @@ + the report provides an intricate analysis of cyber security defined in contemporary operational digital environments. an extensive literature review is formed to determine how the construct is reviewed in modern scholarly contexts. the article seeks to offer a comprehensive definition of the term "cybersecurity" to accentuate its multidisciplinary perspectives. a meaningful concise, and inclusive dimension will be provided to assist in designing scholarly discourse on the subject. the report will offer a unified framework for examining activities that constitute the concept resulting in a new definition; "cybersecurity is the collection and concerting of resources including personnel and infrastructure, structures, and processes to protect networks and cyber-enabled computer systems from events that compromise the integrity and interfere with property rights, resulting in some extent of loss." the encapsulation of the interdisciplinary domains will be critical in improving understanding and response to emerging challenges in the cyberspace. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/883.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/883.txt new file mode 100644 index 0000000000000000000000000000000000000000..d514cfaba2cf5d594d48ba333620148eecc6448a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/883.txt @@ -0,0 +1 @@ + australia is a leader in autonomous systems technology, particularly in the mining industry, borne from necessity in a geographically dispersed and complex natural environment. increasingly advanced autonomous systems are becoming more prevalent in australia, particularly as the safety, environmental and efficiency benefits become better understood, and the increasing sophistication of technology improves capability and availability. increasing use of these systems, including in the maritime domain and air domain, is placing pressure on the national safety regulators, who must either continue to apply their traditional regulatory approach requiring exemptions to enable operation of emerging technology, or seize the opportunity to put in place an agile and adaptive approach better suited to the rapid developments of the 21 st century.in australia the key national safety regulators have demonstrated an appetite for working with industry to facilitate innovation, but their limited resources mean progress is slow. there is a critical role to be played by third parties from industry, government, and academia who can work together to develop, test and publish new assurance and accreditation frameworks for trusted autonomous systems, and assist in the transition to an adaptive and agile regulatory philosophy. this is necessary to ensure the benefits of autonomous systems can be realised, without compromising safety. this paper will identify the growing use cases for autonomous systems in australia, in the maritime, air and land domains, assess the current regulatory framework, argue that australia's regulatory approach needs to become more agile and anticipatory, and investigate how third-party projects could positively impact the assurance and accreditation process for autonomous systems in the future. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/884.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/884.txt new file mode 100644 index 0000000000000000000000000000000000000000..f2a3c36dd79681d871a932b52d0a0554a4bbca90 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/884.txt @@ -0,0 +1 @@ + due to the precautionary measures during the covid-19 pandemic many universities offered unproctored take-home exams. we propose methods to detect potential collusion between students and apply our approach on event log data from take-home exams during the pandemic. we find groups of students with suspiciously similar exams. in addition, we compare our findings to a proctored comparison group. by this, we establish a rule of thumb for evaluating which cases are "outstandingly similar", i.e., suspicious cases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/885.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/885.txt new file mode 100644 index 0000000000000000000000000000000000000000..490c733af24f8a6cf52bd54292458bc627cec1f7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/885.txt @@ -0,0 +1 @@ + while a lack of diversity is a longstanding problem in computer science and engineering, universities and organizations continue to look for solutions to this issue. among the first of its kind, we launched inspire: stem for social impact, a program at the university of victoria, canada, aimed to motivate and empower students from underrepresented groups in computer science and engineering to develop digital solutions for society impactful projects by engaging in experiential learning projects with identified community-partners. the twenty-four students in the program came from diverse backgrounds in terms of academic areas of study, genders, ethnicities, and levels of technical and educational experience. working with six community partners, these students spent four months learning and developing solutions for a societal and/or environmental problem with potential for local and global impacts. our experiences indicate that working in a diverse team with real clients on solving pressing issues produces a sense of competence, relatedness, and autonomy which are the basis of self-determination theory. due to the unique structure of this program, the three principles of self-determination theory emerged through different experiences, ultimately motivating the students to build a network of likeminded people. the importance of such a network is profound in empowering students to succeed and, in retrospect, remain in software engineering fields. we address the diversity problem by providing diverse, underrepresented students with a safe and like-minded environment where they can learn and realize their full potential. hence, in this paper, we describe the program design, experiences, and lessons learned from this approach. we also provide recommendations for universities and organizations that may want to adapt our approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/886.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/886.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3be4585a85c067fc9a4428c52d3af45dc7aa593 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/886.txt @@ -0,0 +1 @@ + this study aims to understand the perceptions and opinions of academicians towards chatgpt-3 by collecting and analyzing social media comments, and a survey was conducted with library and information science professionals. the research uses a content analysis method and finds that while chatgpt-3 can be a valuable tool for research and writing, it is not 100% accurate and should be cross-checked. the study also finds that while some academicians may not accept chatgpt-3, most are starting to accept it. the study is beneficial for academicians, content developers, and librarians. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/887.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/887.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5cf486aa299f88556c3e3b22e4ae6abc6487f03 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/887.txt @@ -0,0 +1 @@ + wikipedia and many user-generated content (ugc) communities are known for producing reliable, quality content, but also for being vulnerable to false or misleading information. previous work has shown that many hoaxes on wikipedia go undetected for extended periods of time. but little is known about the creation of intentionally false or misleading information online. does collective attention toward a topic increase the likelihood it will spawn disinformation? here, we measure the relationship between allocation of attention and the production of hoax articles on the english wikipedia. analysis of traffic logs reveals that, compared to legitimate articles created on the same day, hoaxes tend to be more associated with traffic spikes preceding their creation. this is consistent with the idea that the supply of false or misleading information on a topic is driven by the attention it receives. these findings improve our comprehension of the determinants of disinformation in ugc communities and could help promote the integrity of knowledge on wikipedia. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/888.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/888.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9b2f203ca93b179537647b6418de002724bc6f1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/888.txt @@ -0,0 +1 @@ + the rise of social media and the increase in the computational capabilities of computers have allowed tech companies such as facebook and google to gather incredibly large amounts of data and to be able to extract meaningful information to use for commercial purposes. moreover, the algorithms behind these platforms have shown the ability to influence feelings, behaviors, and opinions, representing a serious threat to the independence of their users. all of these practices have been referred to as "surveillance capitalism", a term created by shoshana zuboff. in this paper i focus on the threat imposed on the autonomy of human beings in the context of surveillance capitalism, providing both an analysis of the reasons why this threat exists and what consequences we could face if we take no action against such practices. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/889.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/889.txt new file mode 100644 index 0000000000000000000000000000000000000000..870e494280188d80db2f439e83b461ccc0385936 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/889.txt @@ -0,0 +1 @@ + data protection laws and policies have been studied extensively in recent years, but little is known about the parliamentary politics of data protection. this limitation applies even to the european union (eu) that has taken the global lead in data protection and privacy regulation. for patching this notable gap in existing research, this paper explores the data protection questions raised by the members of the european parliament (meps) in the parliament's plenary sessions and the answers given to these by the european commission. over a thousand of such questions and answers are covered in a period from 1995 to early 2023. given computational analysis based on text mining, the results indicate that (a) data protection has been actively debated in the parliament during the past twenty years. no noticeable longitudinal trends are present; the debates have been relatively constant. as could be expected, (b) the specific data protection laws in the eu have frequently been referenced in these debates, which (c) do not seem to align along conventional political dimensions such as the left-right axis. furthermore, (d) numerous distinct data protection topics have been debated by the parliamentarians, indicating that data protection politics in the eu go well-beyond the recently enacted regulations. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/89.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/89.txt new file mode 100644 index 0000000000000000000000000000000000000000..8045a795ec2ae0f30b1f58131dfd2fd9d55b40be --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/89.txt @@ -0,0 +1 @@ + dagger is an imitation algorithm that aggregates its original datasets by querying the expert on all samples encountered during training. in order to reduce the number of samples queried, we propose a modification to dagger, known as dadagger, which only queries the expert for state-action pairs that are out of distribution (ood). ood states are identified by measuring the variance of the action predictions of an ensemble of models on each state, which we simulate using dropout. testing on the car racing and half cheetah environments achieves comparable performance to dagger but with reduced expert queries, and better performance than a random sampling baseline. we also show that our algorithm may be used to build efficient, well-balanced training datasets by running with no initial data and only querying the expert to resolve uncertainty. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/890.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/890.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce2da14077ed4cd82d0e24ec6619e6f83d6f57ec --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/890.txt @@ -0,0 +1 @@ + access to online data has long been important for law enforcement agencies in their collection of electronic evidence and investigation of crimes. these activities have also long involved cross-border investigations and international cooperation between agencies and jurisdictions. however, technological advances such as cloud computing have complicated the investigations and cooperation arrangements. therefore, several new laws have been passed and proposed both in the united states and the european union for facilitating cross-border crime investigations in the context of cloud computing. these new laws and proposals have also brought many new legal challenges and controversies regarding extraterritoriality, data protection, privacy, and surveillance. with these challenges in mind and with a focus on europe, this paper reviews the recent trends and policy initiatives for cross-border data access by law enforcement agencies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/891.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/891.txt new file mode 100644 index 0000000000000000000000000000000000000000..5aefd975b9b7d1185e2bd8e6a30ce0e684b9104c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/891.txt @@ -0,0 +1 @@ + the european union (eu) has been pursuing a new strategy under the umbrella label of digital sovereignty. data is an important element in this strategy. to this end, a specific data governance act was enacted in 2022. this new regulation builds upon two ideas: reuse of data held by public sector bodies and voluntary sharing of data under the label of data altruism. this short commentary reviews the main content of the new regulation. based on the review, a few points are also raised about potential challenges. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/892.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/892.txt new file mode 100644 index 0000000000000000000000000000000000000000..6d2c0fc0d77865b3ee5822c0600abbbae981202f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/892.txt @@ -0,0 +1 @@ + the prevalence of social media platforms and their use across the globe makes them attractive options for studying large groups of people, particularly when some of these platforms provide access to large amounts of structured data. however, with the collection, storage, and use of this data comes ethical and legal responsibilities, which are particularly important when looking at social groups such as migrants, who are often stigmatised and criminalised.various guidelines, frameworks and laws have been developed to ensure social media data is used in the most ethical way. however, they have quickly evolved within the past few years and are scattered across various fields and domains.to help researchers navigate these issues, this chapter provides an overview of the ethical considerations of studying migration via social media platforms. building on relevant academic literature, as well as national and supranational frameworks and legislations, we review how the main ethical issues related to social media research have been discussed in the past twenty years and outline good practice examples to mitigate them. this overview is designed to provide researchers with theoretical and practical tools to consider and mitigate the ethical challenges related to social media research in migration-related contexts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/893.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/893.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6b6cf71047a3b239dedd8b7f2088a5b3f9ab4ee --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/893.txt @@ -0,0 +1 @@ + in today's age of digital technology, ethical concerns regarding computing systems are increasing. while the focus of such concerns currently is on requirements for software, this article spotlights the hardware domain, specifically microchips. for example, the opaqueness of modern microchips raises security issues, as malicious actors can manipulate them, jeopardizing system integrity. as a consequence, governments invest substantially to facilitate a secure microchip supply chain. to combat the opaqueness of hardware, this article introduces the concept of explainable hardware (xhw). inspired by and building on previous work on explainable ai (xai) and explainable software systems, we develop a framework for achieving xhw comprising relevant stakeholders, requirements they might have concerning hardware, and possible explainability approaches to meet these requirements. through an exploratory survey among 18 hardware experts, we showcase applications of the framework and discover potential research gaps. our work lays the foundation for future work and structured debates on xhw. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/894.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/894.txt new file mode 100644 index 0000000000000000000000000000000000000000..a31ad9975f6b4b19049b82cfb0a7f94e9afc1593 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/894.txt @@ -0,0 +1 @@ + the august 2022 special election for the u.s. house of representatives in alaska featured three main candidates and was conducted by the single-winner ranked choice voting system known as "instant runoff voting." the results of this election displayed a well-known but relatively rare phenomenon known as "condorcet failure:" nick begich was eliminated in the first round despite being more broadly acceptable to the electorate than either of the other two candidates. more specifically, begich was the condorcet winner of this election: based on the cast vote record, he would have defeated each of the other two candidates in head-to-head contests, but he was eliminated in the first round of ballot counting due to receiving the fewest first-place votes.the purpose of this paper is to use the data in the cast vote record to explore the range of likely outcomes if this election had been conducted under two alternative voting systems: approval voting and star ("score then automatic runoff") voting. we find that under the best assumptions available about voter behavior, it is likely-but not at all certain-that peltola would still have won the election under approval voting, while begich would almost certainly have won under star voting. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/895.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/895.txt new file mode 100644 index 0000000000000000000000000000000000000000..245c76f5ada8b064eeca2eb3df86ee72c6d0afc2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/895.txt @@ -0,0 +1 @@ + the explainability of ai has transformed from a purely technical issue to a complex issue closely related to algorithmic governance and algorithmic security. the lack of explainable ai (xai) brings adverse effects that can cross all economic classes and national borders. despite efforts in governance, technical, and policy exchange have been made in xai by multiple stakeholders, including the public sector, enterprises, and international organizations, respectively. xai is still in its infancy. future applications and corresponding regulatory instruments are still dependent on the collaborative engagement of all parties. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/896.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/896.txt new file mode 100644 index 0000000000000000000000000000000000000000..264c3b7d72000d7d113287cea3cb61484c88c024 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/896.txt @@ -0,0 +1 @@ + approaches to fair and ethical ai have recently fell under the scrutiny of the emerging, chiefly qualitative, field of critical data studies, placing emphasis on the lack of sensitivity to context and complex social phenomena of such interventions. we employ some of these lessons to introduce a tripartite decision-making toolkit, informed by dilemmas encountered in the pursuit of responsible ai/ml. these are: (a) the opportunity dilemma between the availability of data shaping problem statements versus problem statements shaping data collection and processing; (b) the scale dilemma between scalability and contextualizability; and (c) the epistemic dilemma between the pragmatic technical objectivism and the reflexive relativism in acknowledging the social. this paper advocates for a situated reasoning and creative engagement with the dilemmas surrounding responsible algorithmic/data-driven systems, and going beyond the formulaic bias elimination and ethics operationalization narratives found in the fair-ai literature. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/897.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/897.txt new file mode 100644 index 0000000000000000000000000000000000000000..2ec73f5960ed9dddce0a8233f4ca94f2e39c20a8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/897.txt @@ -0,0 +1 @@ + as security communities brace for the emerging social automation based threats, we examine the mechanisms of developing situation awareness in cyberspace and the governance issues that socialbots bring into this existing paradigm of cyber situation awareness. we point out that an organisation's situation awareness in cyberspace is a phenomena fundamentally distinct from the original conception of situation awareness, requiring continuous data exchange and knowledge management where the standard implementation mechanisms require significant policy attention in light of threats like malicious social automation. we conceptualise cyberspace awareness as a socio-technical phenomena with syntactic, semantic, and operatic dimensionseach subject to a number of stressors which are exacerbated under social automation based threats.the paper contributes to the ideas of situational awareness in cyberspace, and characterises the challenges therein around tackling the increasingly social and often pervasive, automation in cyber threat environments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/898.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/898.txt new file mode 100644 index 0000000000000000000000000000000000000000..914491555f00505f4190e411c3538cdfbbf96c65 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/898.txt @@ -0,0 +1 @@ + there is an increasing supply and demand for political advertising throughout the world. at the same time, societal threats, such as election interference by foreign governments and other bad actors, continues to be a pressing concern in many democracies. furthermore, manipulation of electoral outcomes, whether by foreign or domestic forces, continues to be a concern of many citizens who are also worried about their fundamental rights. to these ends, the european union (eu) has launched several initiatives for tackling the issues. a new regulation was proposed in 2020 also for improving the transparency of political advertising in the union. this short commentary reviews the regulation proposed and raises a few points about its limitations and potential impacts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/899.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/899.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c64df2f7e540e6dd52216295b62ce6828b3c42e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/899.txt @@ -0,0 +1 @@ + is it possible for machines to think like humans? and if it is, how should we go about teaching them to do so? as early as 1950, alan turing stated that we ought to teach machines in the way of teaching a child. recently, reinforcement learning with human feedback (rlhf) has emerged as a strong candidate toward allowing agents to learn from human feedback in a naturalistic manner. rlhf is distinct from traditional reinforcement learning as it provides feedback from a human teacher in addition to a reward signal. it has been catapulted into public view by multiple high-profile ai applications, including openai's chatgpt, deepmind's sparrow, and anthropic's claude. these highly capable chatbots are already overturning our understanding of how ai interacts with humanity. the wide applicability and burgeoning success of rlhf strongly motivate the need to evaluate its social impacts. in light of recent developments, this paper considers an important question: can rlhf be developed and used without negatively affecting human societies? our objectives are threefold: to provide a systematic study of the social effects of rlhf; to identify key social and ethical issues of rlhf; and to discuss social impacts for stakeholders. although text-based applications of rlhf have received much attention, it is crucial to consider when evaluating its social implications the diverse range of areas to which it may be deployed. we describe seven primary ways in which rlhf-based technologies will affect society by positively transforming human experiences with ai. this paper ultimately proposes that rlhf has potential to net positively impact areas of misinformation, ai value-alignment, bias, ai access, cross-cultural dialogue, industry, and workforce. as rlhf raises concerns that echo those of existing ai technologies for governance, industry, safety, ethics, and the future of global power relations, it will be important for all to be aware and intentional in the adoption of rlhf. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/9.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/9.txt new file mode 100644 index 0000000000000000000000000000000000000000..9c36ac8459b5a9007ec1040f33c8e09bd5756250 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/9.txt @@ -0,0 +1 @@ + conditional variational models, using either continuous or discrete latent variables, are powerful for open-domain dialogue response generation. however, previous works show that continuous latent variables tend to reduce the coherence of generated responses. in this paper, we also found that discrete latent variables have difficulty capturing more diverse expressions. to tackle these problems, we combine the merits of both continuous and discrete latent variables and propose a hybrid latent variable (hlv) method. specifically, hlv constrains the global semantics of responses through discrete latent variables and enriches responses with continuous latent variables. thus, we diversify the generated responses while maintaining relevance and coherence. in addition, we propose conditional hybrid variational transformer (chvt) to construct and to utilize hlv with transformers for dialogue generation. through fine-grained symbolic-level semantic information and additive gaussian mixing, we construct the distribution of continuous variables, prompting the generation of diverse expressions. meanwhile, to maintain the relevance and coherence, the discrete latent variable is optimized by self-separation training. experimental results on two dialogue generation datasets (dailydialog and opensubtitles) show that chvt is superior to traditional transformer-based variational mechanism w.r.t. diversity, relevance and coherence metrics. moreover, we also demonstrate the benefit of applying hlv to fine-tuning two pre-trained dialogue models (plato and bart-base). 1 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/90.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/90.txt new file mode 100644 index 0000000000000000000000000000000000000000..35f1d7a8dd59a30e154ac102ab3cbfac02e7c83b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/90.txt @@ -0,0 +1 @@ + energy storage resources must consider both price uncertainties and their physical operating characteristics when participating in wholesale electricity markets. this is a challenging problem as electricity prices are highly volatile, and energy storage has efficiency losses, power, and energy constraints. this paper presents a novel, versatile, and transferable approach combining model-based optimization with a convolutional long short-term memory network for energy storage to respond to or bid into wholesale electricity markets. we test our proposed approach using historical prices from new york state, showing it achieves state-of-the-art results, achieving between 70% to near 90% profit ratio compared to perfect foresight cases, in both price response and wholesale market bidding setting with various energy storage durations. we also test a transfer learning approach by pre-training the bidding model using new york data and applying it to arbitrage in queensland, australia. the result shows transfer learning achieves exceptional arbitrage profitability with as little as three days of local training data, demonstrating its significant advantage over training from scratch in scenarios with very limited data availability. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/900.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/900.txt new file mode 100644 index 0000000000000000000000000000000000000000..72c6bc47bc98c736f1d724a658e08d7434192048 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/900.txt @@ -0,0 +1 @@ + autism spectrum disorder (autism) is a neurodevelopmental delay which affects at least 1 in 44 children. like many neurological disorder phenotypes, the diagnostic features are observable, can be tracked over time, and can be managed or even eliminated through proper therapy and treatments. yet, there are major bottlenecks in the diagnostic, therapeutic, and longitudinal tracking pipelines for autism and related delays, creating an opportunity for novel data science solutions to augment and transform existing workflows and provide access to services for more affected families. several prior efforts conducted by a multitude of research labs have spawned great progress towards improved digital diagnostics and digital therapies for children with autism. we review the literature of digital health methods for autism behavior quantification using data science. we describe both case-control studies and classification systems for digital phenotyping. we then discuss digital diagnostics and therapeutics which integrate machine learning models of autism-related behaviors, including the factors which must be addressed for translational use. finally, we describe ongoing challenges and potent opportunities for the field of autism data science. given the heterogeneous nature of autism and the complexities of the relevant behaviors, this review contains insights which are relevant to neurological behavior analysis and digital psychiatry more broadly. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/901.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/901.txt new file mode 100644 index 0000000000000000000000000000000000000000..68bf0c6d58ad5dc7ecb51f92fdec4d41a8d90204 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/901.txt @@ -0,0 +1 @@ + the role of robots is expanding from tool to collaborator. socially assistive robots (sars) are an example of collaborative robots that assist humans in the real world. as robots enter our social sphere, unforeseen risks occur during human -robot interaction (hri), as everyday human space is full of uncertainties. risk introduces an element of trust, so understanding human trust in the robot is imperative to initiate and maintain interactions with robots over time. while many scholars have investigated the is sue of human-robot trust, a significant portion of that discussion is rooted in the human-automation interaction literature. as robots are no longer mere instruments, but social agents that co-exist with humans, we need a new lens to investigate the longitudinal dynamic nature of trust in hri. in this position paper, we contend that focusing on the dynamic nature of trust as a new inquiry will help us better design trustworthy robots. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/902.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/902.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae7a84544bf3feef25a65f87e225c65c6a5fcc8a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/902.txt @@ -0,0 +1 @@ + bias infects the algorithms that wield increasing control over our lives. predictive policing systems overestimate crime in communities of color; hiring algorithms dock qualified female candidates; and facial recognition software struggles to recognize dark-skinned faces. algorithmic bias has received significant attention. algorithmic neutrality, in contrast, has been largely neglected. algorithmic neutrality is my topic. i take up three questions. what is algorithmic neutrality? is algorithmic neutrality possible? when we have algorithmic neutrality in mind, what can we learn about algorithmic bias? to answer these questions in concrete terms, i work with a case study: search engines. drawing on work about neutrality in science, i say that a search engine is neutral only if certain values-like political ideologies or the financial interests of the search engine operator-play no role in how the search engine ranks pages. search neutrality, i argue, is impossible. its impossibility seems to threaten the significance of search bias: if no search engine is neutral, then every search engine is biased. to defuse this threat, i distinguish two forms of bias-failing-on-its-ownterms bias and exogenous-values bias. this distinction allows us to make sense of search bias, and capture its normative complexion, despite the impossibility of neutrality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/903.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/903.txt new file mode 100644 index 0000000000000000000000000000000000000000..70102a0bd8c0ba2300c385fe7bfd5e14c29b6486 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/903.txt @@ -0,0 +1 @@ + despite widespread calls for transparent artificial intelligence systems, the term is too overburdened with disparate meanings to express precise policy aims or to orient concrete lines of research. consequently, stakeholders often talk past each other, with policymakers expressing vague demands and practitioners devising solutions that may not address the underlying concerns. part of why this happens is that a clear ideal of ai transparency goes unsaid in this body of work. we explicitly name such a north star-transparency that is usercentered, user-appropriate, and honest. we conduct a broad literature survey, identifying many clusters of similar conceptions of transparency, tying each back to our north star with analysis of how it furthers or hinders our ideal ai transparency goals. we conclude with a discussion on common threads across all the clusters, to provide clearer common language whereby policymakers, stakeholders, and practitioners can communicate concrete demands and deliver appropriate solutions. we hope for future work on ai transparency that further advances confident, user-beneficial goals and provides clarity to regulators and developers alike. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/904.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/904.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6b44fae55bb66d6a786b8822b13f85f3018cb89 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/904.txt @@ -0,0 +1 @@ + libraries are increasingly relying on computational methods, including methods from artificial intelligence (ai). this increasing usage raises concerns about the risks of ai that are currently broadly discussed in scientific literature, the media and law-making. in this article we investigate the risks surrounding bias and unfairness in ai usage in classification and automated text analysis within the context of library applications. we describe examples that show how the library community has been aware of such risks for a long time, and how it has developed and deployed countermeasures. we take a closer look at the notion of '(un)fairness' in relation to the notion of 'diversity', and we investigate a formalisation of diversity that models both inclusion and distribution. we argue that many of the unfairness problems of automated content analysis can also be regarded through the lens of diversity and the countermeasures taken to enhance diversity. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/905.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/905.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3d5c60452db594689daa343818a298a4535b50f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/905.txt @@ -0,0 +1 @@ + persuasion is a key aspect of what it means to be human, and is central to business, politics, and other endeavors. advancements in artificial intelligence (ai) have produced ai systems that are capable of persuading humans to buy products, watch videos, click on search results, and more. even systems that are not explicitly designed to persuade may do so in practice. in the future, increasingly anthropomorphic ai systems may form ongoing relationships with users, increasing their persuasive power. this paper investigates the uncertain future of persuasive ai systems. we examine ways that ai could qualitatively alter our relationship to and views regarding persuasion by shifting the balance of persuasive power, allowing personalized persuasion to be deployed at scale, powering misinformation campaigns, and changing the way humans can shape their own discourse. we consider ways ai-driven persuasion could differ from human-driven persuasion. we warn that ubiquitous highlypersuasive ai systems could alter our information environment so significantly so as to contribute to a loss of human control of our own future. in response, we examine several potential responses to ai-driven persuasion: prohibition, identification of ai agents, truthful ai, and legal remedies. we conclude that none of these solutions will be airtight, and that individuals and governments will need to take active steps to guard against the most pernicious effects of persuasive ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/906.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/906.txt new file mode 100644 index 0000000000000000000000000000000000000000..343367b31eb6ceda3f59e10c23d7cdea2e4ad79c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/906.txt @@ -0,0 +1 @@ + technological advancements have resulted in an exponential increase in the use of online social networks (osns) worldwide. while online social networks provide a great communication medium, they also increase the user's exposure to life-threatening situations such as suicide, eating disorder, cybercrime, compulsive behavior, anxiety, and depression. to tackle the issue of cyberbullying, most existing literature focuses on developing approaches to identifying factors and understanding the textual factors associated with cyberbullying. while most of these approaches have brought great success in cyberbullying research, data availability needed to develop model detection remains a challenge in the research space. this paper conducts a comprehensive literature review to provide an understanding of cyberbullying detection. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/907.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/907.txt new file mode 100644 index 0000000000000000000000000000000000000000..688dd33881bbf24ca181e35128714134c11a55e1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/907.txt @@ -0,0 +1 @@ + elections where electors rank the candidates (or a subset of the candidates) in order of preference allow the collection of more information about the electors' intent. the most widely used election of this type is instant-runoff voting (irv), where candidates are eliminated one by one, until a single candidate holds the majority of the remaining ballots. condorcet elections treat the election as a set of simultaneous decisions about each pair of candidates. the condorcet winner is the candidate who beats all others in these pairwise contests. there are various proposals to determine a winner if no condorcet winner exists. in this paper we show how we can efficiently audit condorcet elections for a number of variations. we also compare the audit efficiency (how many ballots we expect to sample) of irv and condorcet elections. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/908.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/908.txt new file mode 100644 index 0000000000000000000000000000000000000000..98719118edc094c04a94c94bb375a32bf26d20ab --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/908.txt @@ -0,0 +1 @@ + ai documentation is a rapidly-growing channel for coordinating the design of ai technologies with policies for transparency and accessibility. calls to standardize and enact documentation of algorithmic harms and impacts are now commonplace. however, documentation standards for ai remain inchoate, and fail to match the capabilities and social effects of increasingly impactful architectures such as large language models (llms). in this paper, we show the limits of present documentation protocols, and argue for dynamic documentation as a new paradigm for understanding and evaluating ai systems. we first review canonical approaches to system documentation outside the context of ai, focusing on the complex history of environmental impact statements (eiss). we next compare critical elements of the eis framework to present challenges with algorithmic documentation, which have inherited the limitations of eiss without incorporating their strengths. these challenges are specifically illustrated through the growing popularity of model cards and two case studies of algorithmic impact assessment in china and canada. finally, we evaluate more recent proposals, including reward reports, as potential components of fully dynamic ai documentation protocols. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/909.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/909.txt new file mode 100644 index 0000000000000000000000000000000000000000..afef877ae0b8d72c587993c23995361cf19ee5ea --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/909.txt @@ -0,0 +1 @@ + as a transformative general-purpose technology, ai has empowered various industries and will continue to shape our lives through ubiquitous applications. despite the enormous benefits from wide-spread ai deployment, it is crucial to address associated downside risks and therefore ensure ai advances are safe, fair, responsible, and aligned with human values. to do so, we need to establish effective ai governance. in this work, we show that the strategic interaction between the regulatory agencies and ai firms has an intrinsic structure reminiscent of a stackelberg game, which motivates us to propose a game-theoretic modeling framework for ai governance. in particular, we formulate such interaction as a stackelberg game composed of a leader and a follower, which captures the underlying game structure compared to its simultaneous play counterparts. furthermore, the choice of the leader naturally gives rise to two settings. and we demonstrate that our proposed model can serves as a unified ai governance framework from two aspects: firstly we can map one setting to the ai governance of civil domains and the other to the safety-critical and military domains, secondly, the two settings of governance could be chosen contingent on the capability of the intelligent systems. to the best of our knowledge, this work is the first to use game theory for analyzing and structuring ai governance. we also discuss promising directions and hope this can help stimulate research interest in this interdisciplinary area. on a high, we hope this work would contribute to develop a new paradigm for technology policy: the quantitative and ai-driven methods for the technology policy field, which holds significant promise for overcoming many shortcomings of existing qualitative approaches. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/91.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/91.txt new file mode 100644 index 0000000000000000000000000000000000000000..e10143b365c7f6159b0ed65414fc1463fcfd83e2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/91.txt @@ -0,0 +1 @@ + the paper proposes various strategies for sampling text data when performing automatic sentence classification for the purpose of detecting missing bibliographic links. we construct samples based on sentences as semantic units of the text and add their immediate context which consists of several neighboring sentences. we examine a number of sampling strategies that differ in context size and position.the experiment is carried out on the collection of stem scientific papers.including the context of sentences into samples improves the result of their classification. we automatically determine the optimal sampling strategy for a given text collection by implementing an ensemble voting when classifying the same data sampled in different ways. sampling strategy taking into account the sentence context with hard voting procedure leads to the classification accuracy of 98% (f1-score). this method of detecting missing bibliographic links can be used in recommendation engines of applied intelligent information systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/910.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/910.txt new file mode 100644 index 0000000000000000000000000000000000000000..ba2f3272e5eb3182ee148a05a4685da9a4d1eae5 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/910.txt @@ -0,0 +1 @@ + in this work, we propose a method for extracting text spans that may indicate one of the big5 psychological traits using a question-answering task with examples that have no answer for the asked question. we utilized the roberta model fine-tuned on squad 2.0 dataset. the model was further fine-tuned utilizing comments from reddit. we examined the effect of the percentage of examples with no answer in the training dataset on the overall performance. the results obtained in this study are in line with the squad 2.0 benchmark and present a good baseline for further research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/911.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/911.txt new file mode 100644 index 0000000000000000000000000000000000000000..ecf34452cb1f3e23486670f87db290a220567918 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/911.txt @@ -0,0 +1 @@ + during the periods of sudden transition to online education, the opportunity to make applications that might attract students' attention to the course has decreased even more. although this deficiency was tried to be eliminated with videos and simulations, it was not possible to ensure active participation of students in some cases. in this study, the algodoo program, which can increase the efficiency of the teaching environment by ensuring active participation of students in online lessons and the applications that can be done about impulse and momentum are explained in detail. a total of 6 different applications were carried out, 1 related to the subject of impulse, 1 related to the momentum, 2 related to the relationship between impulse and momentum change, and 2 related to momentum conservation. at the same time, while developing these applications, the adjustments made on the simulation and the reasons are explained in detail. in this way, both the introduction of the program and the sample application suggestion were presented. the values obtained as a result of the applications were calculated and compared both theoretically and on simulation in different ways. as a result, it has been observed that the values have internal consistency with each other and are also compatible with theoretical calculations. algodoo program, which allows many interactive applications and can be downloaded for free, is a program that can be used both in lecturing and evaluation processes in physics lessons while online education process. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/912.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/912.txt new file mode 100644 index 0000000000000000000000000000000000000000..1cabcb28a875198e9342b4a91dceee1a207d1b9e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/912.txt @@ -0,0 +1 @@ + while existing work on studying bias in nlp focues on negative or pejorative language use, govindarajan et al. ( 2023) offer a revised framing of bias in terms of intergroup social context, and its effects on language behavior. in this paper, we investigate if two pragmatic features (specificity and affect) systematically vary in different intergroup contexts -thus connecting this new framing of bias to language output. preliminary analysis finds modest correlations between specificity and affect of tweets with supervised intergroup relationship (igr) labels. counterfactual probing further reveals that while neural models finetuned for predicting igr reliably use affect in classification, the model's usage of specificity is inconclusive. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/913.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/913.txt new file mode 100644 index 0000000000000000000000000000000000000000..ebf4346d7650e2952f6932271930526b432fd1dc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/913.txt @@ -0,0 +1 @@ + the goal of this paper is establishing if we can satisfactorily perform a thematic analysis (ta) of semistructured interviews using a large language model (more precisely gpt3.5-turbo). building on previous work by the author, which established an embryonal process for conducting a ta with the model, this paper will perform a further analysis and then cover the last phase of a ta (phase 6), which entails the writing up of the result. this phase was not covered by the previous work. in particular, the focus will be on using the results of a ta done with the llm on a dataset of user interviews, for writing user personas, with the model building on the ta to produce the personas narratives. user personas are models of real users, usually built from a data analysis like interviews with a sample of users. user personas are tools often used in user centered design processes. the paper shows that the model can build basic user personas with an acceptable quality deriving them from themes, and that the model can serve for the generation of ideas for user personas. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/914.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/914.txt new file mode 100644 index 0000000000000000000000000000000000000000..3a142b982194e301702e0a65caaf073cbfd1df15 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/914.txt @@ -0,0 +1 @@ + we consider the emerging problem of identifying the presence and use of watermarking schemes in widely used, publicly hosted, closed source large language models (llms). we introduce a suite of baseline algorithms for identifying watermarks in llms that rely on analyzing distributions of output tokens and logits generated by watermarked and unmarked llms. notably, watermarked llms tend to produce distributions that diverge qualitatively and identifiably from standard models. furthermore, we investigate the identifiability of watermarks at varying strengths and consider the tradeoffs of each of our identification mechanisms with respect to watermarking scenario. along the way, we formalize the specific problem of identifying watermarks in llms, as well as llm watermarks and watermark detection in general, providing a framework and foundations for studying them. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/915.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/915.txt new file mode 100644 index 0000000000000000000000000000000000000000..03a4ea3426adf6f8493facf046b2b0c12dfe5355 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/915.txt @@ -0,0 +1 @@ + bias mitigation methods for binary classification decision-making systems have been widely researched due to the ever-growing importance of designing fair machine learning processes that are impartial and do not discriminate against individuals or groups based on protected personal characteristics. in this paper, we present a structured overview of the research landscape for bias mitigation methods, report on their benefits and limitations, and provide recommendations for the development of future bias mitigation methods for binary classification.within the field of fair machine learning, several bias mitigation methods have been proposed, targeting the removal of unwanted bias from training data (e.g. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/916.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/916.txt new file mode 100644 index 0000000000000000000000000000000000000000..0507fba35ab8e337f4556711eda1bac0654cf08e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/916.txt @@ -0,0 +1 @@ + useful conversational agents must accurately capture named entities to minimize error for downstream tasks, for example, asking a voice assistant to play a track from a certain artist, initiating navigation to a specific location, or documenting a laboratory result for a patient. however, where named entities such as "ukachukwu" (igbo), "lakicia" (swahili), or "ingabire" (rwandan) are spoken, automatic speech recognition (asr) models' performance degrades significantly, propagating errors to downstream systems. we model this problem as a distribution shift and demonstrate that such model bias can be mitigated through multilingual pre-training, intelligent data augmentation strategies to increase the representation of african-named entities, and fine-tuning multilingual asr models on multiple african accents. the resulting fine-tuned models show an 81.5% relative wer improvement compared with the baseline on samples with african-named entities. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/917.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/917.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb454343ec3f8c4829ba72614723cc2d3377ca4e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/917.txt @@ -0,0 +1 @@ + internet immunity doctrine is broken. under section 230 of the communications decency act of 1996, online entities are absolutely immune from lawsuits related to content authored by third parties. the law has been essential to the internet's development over the last twenty years, but it has not kept pace with the times and is now deeply flawed. democrats demand accountability for online misinformation. republicans decry politically motivated censorship. and congress, president biden, the department of justice, and the federal communications commission all have their own plans for reform. absent from the fray, however-until now-has been the supreme court, which has never issued a decision interpreting section 230. that appears poised to change, however, following justice thomas's statement in malwarebytes v. enigma in which he urges the court to prune back decades of lower-court precedent to craft a more limited immunity doctrine. this essay discusses how courts' zealous enforcement of the early internet's free-information ethos gave birth to an expansive immunity doctrine, warns of potential pitfalls to reform, and explores what a narrower, text-focused doctrine might mean for the tech industry. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/918.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/918.txt new file mode 100644 index 0000000000000000000000000000000000000000..d789b707153dcc9f80ba205cb787026037e041c4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/918.txt @@ -0,0 +1 @@ + we do everything online. we shop, plan travel, invest, socialize, and even hold garage sales. even though we may not care whether a company operates online or in the physical world-the distinction has important consequences for the companies themselves. online and offline entities are governed by different rules. under section 230 of the communications decency act, online entities-but not physical-world entities-are immune from lawsuits related to content authored by their users or customers. as a result, online entities have been able to avoid claims for harms caused by their negligence and defective product designs simply because they operate online.the reason for the disparate treatment is the internet's dramatic evolution over the last two decades. the internet of 1996 served as an information repository and communications channel and was well governed by section 230, which treats internet entities as another form of mass media. because facebook, twitter, and other online companies could not possibly review the mass of content that flows through their systems, section 230 immunizes them from claims related to user content. but content distribution is not the internet's only function, and it is even less so now than it was in 1996. the internet also operates as a platform for the delivery of real-world goods and services and requires a correspondingly diverse immunity doctrine. this article proposes refining online immunity by limiting it to claims that threaten to impose a content-moderation burden on internet defendants. where a claim is preventable other than by content moderation-for example, by redesigning an app or website-a plaintiff could freely seek relief, just as in the physical world. this approach empowers courts to identify culpable actors in the virtual world and treat like conduct alike wherever it occurs. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/919.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/919.txt new file mode 100644 index 0000000000000000000000000000000000000000..60d87b208c8a6a7923a96ccf4daa99acb771181c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/919.txt @@ -0,0 +1 @@ + blockchain-based cryptocurrencies have become an extremely important, highly-used, technology. a major criticism of cryptocurrencies-however-is their energy consumption. in may 2022 bitcoin alone was reported to be consuming 150 terawatt-hours of electricity annually-more than many entire countries. hence, any meaningful efficiency increase in this process would have a tremendous positive impact. meanwhile, practical applications of quantum information technologies, and in particular of near-term quantum computers (nisq) continue to be an important research question. here, we study the efficiency benefits of moving cryptocurrency mining from current asic-based miners to quantum, and in particular nisq, miners. while the time-efficiency benefits of quantum technologies is extremely well-studied, here we focus on energy savings. we show that the transition to quantum-based mining could incur an energy saving-by relatively conservative estimates-of about roughly 126.7twh, or put differently the total energy consumption of sweden in 2020. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/92.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/92.txt new file mode 100644 index 0000000000000000000000000000000000000000..643cd89d8748a0846c1216a10bc7659dd84bc816 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/92.txt @@ -0,0 +1 @@ + deep learning techniques with neural networks have been used effectively in computational fluid dynamics (cfd) to obtain solutions to nonlinear differential equations. this paper presents a physics-informed neural network (pinn) approach to solve the blasius function. this method eliminates the process of changing the non-linear differential equation to an initial value problem. also, it tackles the convergence issue arising in the conventional series solution. it is seen that this method produces results that are at par with the numerical and conventional methods. the solution is extended to the negative axis to show that pinns capture the singularity of the function at η = -5.69. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/920.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/920.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef24333b90ffd3e68a6cec8dc3e2f17923eb100d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/920.txt @@ -0,0 +1 @@ + with the development of the online education system, personalized education recommendation has played an essential role. in this paper, we focus on developing path recommendation systems that aim to generate and recommend an entire learning path to the given user in each session. noticing that existing approaches fail to consider the correlations of concepts in the path, we propose a novel framework named setto-sequence ranking-based concept-aware learning path recommendation (src), which formulates the recommendation task under a set-to-sequence paradigm. specifically, we first design a concept-aware encoder module that can capture the correlations among the input learning concepts. the outputs are then fed into a decoder module that sequentially generates a path through an attention mechanism that handles correlations between the learning and target concepts. our recommendation policy is optimized by policy gradient. in addition, we also introduce an auxiliary module based on knowledge tracing to enhance the model's stability by evaluating students' learning effects on learning concepts. we conduct extensive experiments on two real-world public datasets and one industrial dataset, and the experimental results demonstrate the superiority and effectiveness of src. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/921.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/921.txt new file mode 100644 index 0000000000000000000000000000000000000000..b63396d6a892636a1f0998a6a0f22192f63cfd7a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/921.txt @@ -0,0 +1 @@ + accurate yield forecasting is essential for making informed policies and long-term decisions for food security. earth observation (eo) data and machine learning algorithms play a key role in providing a comprehensive and timely view of crop conditions from field to national scales. however, machine learning algorithms' prediction accuracy is often harmed by spatial heterogeneity caused by exogenous factors not reflected in remote sensing data, such as differences in crop management strategies. in this paper, we propose and investigate a simple technique called state-wise additive bias to explicitly address the cross-region yield heterogeneity in kazakhstan. compared to baseline machine learning models (random forest, catboost, xgboost), our method reduces the overall rmse by 8.9% and the highest state-wise rmse by 28.37%. the effectiveness of state-wise additive bias indicates machine learning's performance can be significantly improved by explicitly addressing the spatial heterogeneity, motivating future work on spatial-aware machine learning algorithms for yield forecasts as well as for general geospatial forecasting problems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/922.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/922.txt new file mode 100644 index 0000000000000000000000000000000000000000..e4262400eb47b5e3a53ed2972a0995ec8aab1029 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/922.txt @@ -0,0 +1 @@ + better understanding of large language models' (llms) legal analysis abilities can contribute to improving the efficiency of legal services, governing artificial intelligence, and leveraging llms to identify inconsistencies in law. this paper explores llm capabilities in applying tax law. we choose this area of law because it has a structure that allows us to set up automated validation pipelines across thousands of examples, requires logical reasoning and maths skills, and enables us to test llm capabilities in a manner relevant to real-world economic lives of citizens and companies. our experiments demonstrate emerging legal understanding capabilities, with improved performance in each subsequent openai model release. we experiment with retrieving and utilising the relevant legal authority to assess the impact of providing additional legal context to llms. few-shot prompting, presenting examples of question-answer pairs, is also found to significantly enhance the performance of the most advanced model, gpt-4. the findings indicate that llms, particularly when combined with prompting enhancements and the correct legal texts, can perform at high levels of accuracy but not yet at expert tax lawyer levels. as llms continue to advance, their ability to reason about law autonomously could have significant implications for the legal profession and ai governance. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/923.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/923.txt new file mode 100644 index 0000000000000000000000000000000000000000..cccd3609dac8cb1d66a4bb5ef0afc6105ccafac9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/923.txt @@ -0,0 +1 @@ + end users of remote computing systems are frequently not aware of basic ways in which they could enhance protection against cyber-threats and attacks. in this paper, we discuss specific techniques to help and train users to improve cybersecurity when using such systems. to explain the rationale behind these techniques, we go into some depth explaining possible threats in the context of using remote, shared computing resources. although some of the details of these prescriptions and recommendations apply to specific use cases when connecting to remote servers, such as a supercomputer, cluster, or linux workstation, the main concepts and ideas can be applied to a wider spectrum of cases. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/924.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/924.txt new file mode 100644 index 0000000000000000000000000000000000000000..523f337fecae0901539e980b35ec5d1a47f3bc60 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/924.txt @@ -0,0 +1 @@ + large language models (llms) are remarkable data annotators. they can be used to generate high-fidelity supervised training data, as well as survey and experimental data. with the widespread adoption of llms, human goldstandard annotations are key to understanding the capabilities of llms and the validity of their results. however, crowdsourcing, an important, inexpensive way to obtain human annotations, may itself be impacted by llms, as crowd workers have financial incentives to use llms to increase their productivity and income. to investigate this concern, we conducted a case study on the prevalence of llm usage by crowd workers. we reran an abstract summarization task from the literature on amazon mechanical turk and, through a combination of keystroke detection and synthetic text classification, estimate that 33-46% of crowd workers used llms when completing the task. although generalization to other, less llmfriendly tasks is unclear, our results call for platforms, researchers, and crowd workers to find new ways to ensure that human data remain human, perhaps using the methodology proposed here as a stepping stone. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/925.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/925.txt new file mode 100644 index 0000000000000000000000000000000000000000..91ccf20f19a653a00998ac7135328f5a878242d8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/925.txt @@ -0,0 +1 @@ + in this paper, we address the issue of fairness in preference-based reinforcement learning (pbrl) in the presence of multiple objectives. the main objective is to design control policies that can optimize multiple objectives while treating each objective fairly. toward this objective, we design a new fairness-induced preference-based reinforcement learning or fpbrl. the main idea of fpbrl is to learn vector reward functions associated with multiple objectives via new welfare-based preferences rather than reward-based preference in pbrl, coupled with policy learning via maximizing a generalized gini welfare function. finally, we provide experiment studies on three different environments to show that the proposed fpbrl approach can achieve both efficiency and equity for learning effective and fair policies. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/926.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/926.txt new file mode 100644 index 0000000000000000000000000000000000000000..5b3a3f563f8ed9b8749f6021f58533251d36665f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/926.txt @@ -0,0 +1 @@ + the demographic disparity of biometric systems has led to serious concerns regarding their societal impact as well as applicability of such systems in private and public domains. a quantitative evaluation of demographic fairness is an important step towards understanding, assessment, and mitigation of demographic bias in biometric applications. while few, existing fairness measures are based on post-decision data (such as verification accuracy) of biometric systems, we discuss how pre-decision data (score distributions) provide useful insights towards demographic fairness. in this paper, we introduce multiple measures, based on the statistical characteristics of score distributions, for the evaluation of demographic fairness of a generic biometric verification system. we also propose different variants for each fairness measure depending on how the contribution from constituent demographic groups needs to be combined towards the final measure. in each case, the behavior of the measure has been illustrated numerically and graphically on synthetic data. the demographic imbalance in benchmarking datasets is often overlooked during fairness assessment. we provide a novel weighing strategy to reduce the effect of such imbalance through a non-linear function of sample sizes of demographic groups. the proposed measures are independent of the biometric modality, and thus, applicable across commonly used biometric modalities (e.g., face, fingerprint, etc.). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/927.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/927.txt new file mode 100644 index 0000000000000000000000000000000000000000..0cdcfa4d3eeb84fd47fe8fcb22c059f56f812d96 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/927.txt @@ -0,0 +1 @@ + we consider the problem of learning from data corrupted by underrepresentation bias, where positive examples are filtered from the data at different, unknown rates for a fixed number of sensitive groups. we show that with a small amount of unbiased data, we can efficiently estimate the group-wise drop-out parameters, even in settings where intersectional group membership makes learning each intersectional rate computationally infeasible. using this estimate for the group-wise drop-out rate, we construct a re-weighting scheme that allows us to approximate the loss of any hypothesis on the true distribution, even if we only observe the empirical error on a biased sample. finally, we present an algorithm encapsulating this learning and re-weighting process, and we provide strong pac-style guarantees that, with high probability, our estimate of the risk of the hypothesis over the true distribution will be arbitrarily close to the true risk. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/928.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/928.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f994cec87a446de574b9c5f826a170ef59aa481 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/928.txt @@ -0,0 +1 @@ + the technology of conversational ai has made significant advancements over the last eighteen months. as a consequence, conversational agents are likely to be deployed in the near future that are designed to pursue targeted influence objectives. sometimes referred to as the "ai manipulation problem," the emerging risk is that consumers will unwittingly engage in real-time dialog with predatory ai agents that can skillfully persuade them to buy particular products, believe particular pieces of misinformation, or fool them into revealing sensitive personal data. for many users, current systems like chatgpt and lamda feel safe because they are primarily text-based, but the industry is already shifting towards real-time voice and photorealistic digital personas that look, move, and express like real people. this will enable the deployment of agenda-driven virtual spokespeople (vsps) that will be highly persuasive through real-time adaptive influence. this paper explores the manipulative tactics that are likely to be deployed through conversational ai agents, the unique threats such agents pose to the epistemic agency of human users, and the emerging need for policymakers to protect against the most likely predatory practices. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/929.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/929.txt new file mode 100644 index 0000000000000000000000000000000000000000..a33aa53a2a9f468dc7fa9944c38102119b0d98b1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/929.txt @@ -0,0 +1 @@ + blockchain technology transformed the digital sphere by providing a transparent, secure, and decentralized platform for data security across a range of industries, including cryptocurrencies and supply chain management. blockchain's integrity and dependability have been jeopardized by the rising number of security threats, which have attracted cybercriminals as a target. by summarizing suggested fixes, this research aims to offer a thorough analysis of mitigating blockchain attacks. the objectives of the paper include identifying weak blockchain attacks, evaluating various solutions, and determining how effective and effective they are at preventing these attacks. the study also highlights how crucial it is to take into account the particular needs of every blockchain application. this study provides beneficial perspectives and insights for blockchain researchers and practitioners, making it essential reading for those interested in current and future trends in blockchain security research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/93.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/93.txt new file mode 100644 index 0000000000000000000000000000000000000000..450b0de37ec1faf7fcf4a82137449cb95d860cf8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/93.txt @@ -0,0 +1 @@ + we aim to bridge the gap between our common-sense few-sample human learning and large-data machine learning. we derive a theory of human-like fewshot learning from von-neuman-landauer's principle. modelling human learning is difficult as how people learn varies from one to another. under commonly accepted definitions, we prove that all human or animal few-shot learning, and major models including free energy principle and bayesian program learning that model such learning, approximate our theory, under church-turing thesis. we find that deep generative model like variational autoencoder (vae) can be used to approximate our theory and perform significantly better than baseline models including deep neural networks, for image recognition, low resource language processing, and character recognition. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/930.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/930.txt new file mode 100644 index 0000000000000000000000000000000000000000..45add413b4ed78c967d7b9d6fee82e532d1e3d5b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/930.txt @@ -0,0 +1 @@ + the absence of transparency and explainability hinders the clinical adoption of machine learning (ml) algorithms. although various methods of explainable artificial intelligence (xai) have been suggested, there is a lack of literature that delves into their practicality and assesses them based on criteria that could foster trust in clinical environments. to address this gap this study evaluates two popular xai methods used for explaining predictive models in the healthcare context in terms of whether they (i) generate domain-appropriate representation, i.e. coherent with respect to the application task, (ii) impact clinical workflow and (iii) are consistent. to that end, explanations generated at the cohort and patient levels were analysed. the paper reports the first benchmarking of the xai methods applied to risk prediction models obtained by evaluating the concordance between generated explanations and the trigger of a future clinical deterioration episode recorded by the data collection system. we carried out an analysis using two electronic medical records (emr) datasets sourced from australian major hospitals. the findings underscore the limitations of state-of-the-art xai methods in the clinical context and their potential benefits. we discuss these limitations and contribute to the theoretical development of trustworthy xai solutions where clinical decision support guides the choice of intervention by suggesting the pattern or drivers for clinical deterioration in the future. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/931.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/931.txt new file mode 100644 index 0000000000000000000000000000000000000000..3e8cb36eba8bc056da6c3c8f9f5f2d522e389d81 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/931.txt @@ -0,0 +1 @@ + detecting and predicting septic shock early is crucial for the best possible outcome for patients. accurately forecasting the vital signs of patients with sepsis provides valuable insights to clinicians for timely interventions, such as administering stabilizing drugs or optimizing infusion strategies. our research examines n-beats, an interpretable deeplearning forecasting model that can forecast 3 hours of vital signs for sepsis patients in intensive care units (icus). in this work, we use the n-beats interpretable configuration to forecast the vital sign trends and compare them with the actual trend to understand better the patient's changing condition and the effects of infused drugs on their vital signs. we evaluate our approach using the publicly available eicu collaborative research database dataset and rigorously evaluate the vital sign forecasts using out-of-sample evaluation criteria. we present the performance of our model using error metrics, including mean squared error (mse), mean average percentage error (mape), and dynamic time warping (dtw), where the best scores achieved are 18.52e-4, 7.60, and 17.63e-3, respectively. we analyze the samples where the forecasted trend does not match the actual trend and study the impact of infused drugs on changing the actual vital signs compared to the forecasted trend. additionally, we examined the mortality rates of patients where the actual trend and the forecasted trend did not match. we observed that the mortality rate was higher (92%) when the actual and forecasted trends closely matched, compared to when they were not similar (84%). \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/932.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/932.txt new file mode 100644 index 0000000000000000000000000000000000000000..60a55182170ce96fa768660dc7938e5f050e76a2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/932.txt @@ -0,0 +1 @@ + we argue that insurance can act as an analogon for the social situatedness of machine learning systems, hence allowing machine learning scholars to take insights from the rich and interdisciplinary insurance literature. tracing the interaction of uncertainty, fairness and responsibility in insurance provides a fresh perspective on fairness in machine learning. we link insurance fairness conceptions to their machine learning relatives, and use this bridge to problematize fairness as calibration. in this process, we bring to the forefront two themes that have been largely overlooked in the machine learning literature: responsibility and aggregate-individual tensions.see baker ( , p. ) on ewald's work for this point. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/933.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/933.txt new file mode 100644 index 0000000000000000000000000000000000000000..bdaf4d84a6bb67ba8f7fdfa78fdc62b52a9a1811 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/933.txt @@ -0,0 +1 @@ + question generation (qg) is a task within natural language processing (nlp) that involves automatically generating questions given an input, typically composed of a text and a target answer. recent work on qg aims to control the type of generated questions so that they meet educational needs. a remarkable example of controllability in educational qg is the generation of questions underlying certain narrative elements, e.g., causal relationship, outcome resolution, or prediction. this study aims to enrich controllability in qg by introducing a new guidance attribute: question explicitness. we propose to control the generation of explicit and implicit (wh)-questions from childrenfriendly stories. we show preliminary evidence of controlling qg via question explicitness alone and simultaneously with another target attribute: the question's narrative element. the code is publicly available at github.com/bernardoleite/question-generation-control. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/934.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/934.txt new file mode 100644 index 0000000000000000000000000000000000000000..e0243d6712c58099e07be87bc93f816b73eb521f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/934.txt @@ -0,0 +1 @@ + context. algorithmic racism is the term used to describe the behavior of technological solutions that constrains users based on their ethnicity. lately, various data-driven software systems have been reported to discriminate against black people, either for the use of biased data sets or due to the prejudice propagated by software professionals in their code. as a result, black people are experiencing disadvantages in accessing technology-based services, such as housing, banking, and law enforcement. goal. this study aims to explore algorithmic racism from the perspective of software professionals. method. a survey questionnaire was applied to explore the understanding of software practitioners on algorithmic racism, and data analysis was conducted using descriptive statistics and coding techniques. results. we obtained answers from a sample of 73 software professionals discussing their understanding and perspectives on algorithmic racism in software development. our results demonstrate that the effects of algorithmic racism are well-known among practitioners. however, there is no consensus on how the problem can be effectively addressed in software engineering. in this paper, some solutions to the problem are proposed based on the professionals' narratives. conclusion. combining technical and social strategies, including training on structural racism for software professionals, is the most promising way to address the algorithmic racism problem and its effects on the software solutions delivered to our society. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/935.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/935.txt new file mode 100644 index 0000000000000000000000000000000000000000..4568f4e9508cac1bb0e29a4106c6cb21c74898cc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/935.txt @@ -0,0 +1 @@ + counterfactual inference considers a hypothetical intervention in a parallel world that shares some evidence with the factual world. if the evidence specifies a conditional distribution on a manifold, counterfactuals may be analytically intractable. we present an algorithm for simulating values from a counterfactual distribution where conditions can be set on both discrete and continuous variables. we show that the proposed algorithm can be presented as a particle filter leading to asymptotically valid inference. the algorithm is applied to fairness analysis in credit-scoring. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/936.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/936.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9700220d484c68baf7e3ff69e8e5a07de24b810 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/936.txt @@ -0,0 +1 @@ + understanding the relationship between the composition of a research team and the potential impact of their research papers is crucial as it can steer the development of new science policies for improving the research enterprise. numerous studies assess how the characteristics and diversity of research teams can influence their performance across several dimensions: ethnicity, internationality, size, and others. in this paper, we explore the impact of diversity in terms of the authors' expertise. to this purpose, we retrieved 114k papers in the field of computer science and analysed how the diversity of research fields within a research team relates to the number of citations their papers received in the upcoming 5 years. the results show that two different metrics we defined, reflecting the diversity of expertise, are significantly associated with the number of citations. this suggests that, at least in computer science, diversity of expertise is key to scientific impact. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/937.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/937.txt new file mode 100644 index 0000000000000000000000000000000000000000..839fc312e6b70f8c2beb3463a366beb2289c4c0f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/937.txt @@ -0,0 +1 @@ + recent developments in artificial intelligence (ai) provide unprecedented automation opportunities in the architecture, engineering, and construction (aec) industry. however, despite the enthusiasm regarding the use of ai, 85% of current big data projects fail. one of the main reasons for ai project failures in the aec industry is the disconnect between those who plan or decide to use ai and those who implement it. aec practitioners often lack a clear understanding of the capabilities and limitations of ai, leading to a failure to distinguish between what ai should solve, what it can solve, and what it will solve, treating these categories as if they are interchangeable. this lack of understanding results in the disconnect between ai planning and implementation because the planning is based on a vision of what ai should solve without considering if it can or will solve it. to address this challenge, this work introduces the leanai method. the method has been developed using data from several ongoing longitudinal studies analyzing ai implementations in the aec industry, which involved 50+ hours of interview data. the leanai method delineates what ai should solve, what it can solve, and what it will solve, forcing practitioners to clearly articulate these components early in the planning process itself by involving the relevant stakeholders. by utilizing the method, practitioners can effectively plan ai implementations, thus increasing the likelihood of success and ultimately speeding up the adoption of ai. a case example illustrates the usefulness of the method. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/938.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/938.txt new file mode 100644 index 0000000000000000000000000000000000000000..07d559b19b963e1092e4a6c1d8146358e218487a --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/938.txt @@ -0,0 +1 @@ + as artificial intelligence plays an increasingly substantial role in decisions affecting humans and society, the accountability of automated decision systems has been receiving increasing attention from researchers and practitioners. fairness, which is concerned with eliminating unjust treatment and discrimination against individuals or sensitive groups, is a critical aspect of accountability. yet, for evaluating fairness, there is a plethora of fairness metrics in the literature that employ different perspectives and assumptions that are often incompatible. this work focuses on group fairness. most group fairness metrics desire a parity between selected statistics computed from confusion matrices belonging to different sensitive groups. generalizing this intuition, this paper proposes a new equal confusion fairness test to check an automated decision system for fairness and a new confusion parity error to quantify the extent of any unfairness. to further analyze the source of potential unfairness, an appropriate post hoc analysis methodology is also presented. the usefulness of the test, metric, and post hoc analysis is demonstrated via a case study on the controversial case of compas, an automated decision system employed in the us to assist judges with assessing recidivism risks. overall, the methods and metrics provided here may assess automated decision systems' fairness as part of a more extensive accountability assessment, such as those based on the system accountability benchmark. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/939.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/939.txt new file mode 100644 index 0000000000000000000000000000000000000000..7e934117d39847853a527c08228c34568bb1c1dc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/939.txt @@ -0,0 +1 @@ + this paper investigates the challenges and opportunities of implementing the african union convention on cyber security and personal data protection (audpc) across africa. focusing on legal, regulatory, technical, infrastructural, capacity building, awareness, harmonization, and cross-border cooperation challenges, the paper identifies key findings that highlight the diverse legal systems and traditions, the lack of comprehensive data protection laws, the need to balance national security and data privacy, the digital divide, cybersecurity threats, implications of emerging technologies on data privacy, limited resources for data protection authorities, and the need for capacity building in data privacy and protection. the paper also emphasizes the importance of harmonization and cross-border cooperation in aligning data protection frameworks and collaborating with international partners and global organizations. to address these challenges and facilitate the successful implementation of the audpc, the paper proposes a set of recommendations, including strengthening legal and regulatory frameworks, enhancing technical and infrastructural capacities, fostering capacity-building and awareness initiatives, promoting harmonization and cross-border cooperation, and engaging with global data protection trends and developments. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/94.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/94.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/940.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/940.txt new file mode 100644 index 0000000000000000000000000000000000000000..452517172f27423840512c09261d4a3c7d3f4dce --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/940.txt @@ -0,0 +1 @@ + background:reliable prediction of clinical progression over time can improve the outcomes of depression. little work has been done integrating various risk factors for depression, to determine the combinations of factors with the greatest utility for identifying which individuals are at the greatest risk. method:this study demonstrates that data-driven machine learning (ml) methods such as re-em (random effects/expectation maximization) trees and merf (mixed effects random forest) can be applied to reliably identify variables that have the greatest utility for classifying subgroups at greatest risk for depression. 185 young adults completed measures of depression risk, including rumination, worry, negative cognitive styles, cognitive and coping flexibilities, and negative life events, along with symptoms of depression. we trained re-em trees and merf algorithms and compared them to traditional linear mixed models (lmms) predicting depressive symptoms prospectively and concurrently with cross-validation. results:our results indicated that the re-em tree and merf methods model complex interactions, identify subgroups of individuals and predict depression severity comparable to lmm. further, machine learning models determined that brooding, negative life events, negative cognitive styles, and perceived control were the most relevant predictors of future depression levels. conclusions:random effects machine learning models have the potential for high clinical utility and can be leveraged for interventions to reduce vulnerability to depression. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/941.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/941.txt new file mode 100644 index 0000000000000000000000000000000000000000..329473024ffc6af0ff24cf76d721f1a206b12341 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/941.txt @@ -0,0 +1 @@ + recently, the weight-sharing technique has significantly speeded up the training and evaluation procedure of neural architecture search. however, most existing weight-sharing strategies are solely based on experience or observation, which makes the searching results lack interpretability and rationality. in addition, due to the negligence of fairness, current methods are prone to make misjudgments in module evaluation. to address these problems, we propose a novel neural architecture search algorithm based on dynamical isometry. we use the fix point analysis method in the mean field theory to analyze the dynamics behavior in the steady state random neural network, and how dynamic isometry guarantees the fairness of weight-sharing based nas. meanwhile, we prove that our module selection strategy is rigorous fair by estimating the generalization error of all modules with well-conditioned jacobian. extensive experiments show that, with the same size, the architecture searched by the proposed method can achieve state-of-theart top-1 validation accuracy on imagenet classification. in addition, we demonstrate that our method is able to achieve better and more stable training performance without loss of generality. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/942.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/942.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f07b60efbb618c03cb24ccadedfa934743cddac --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/942.txt @@ -0,0 +1 @@ + the article aims to analyze the performance of chatgpt, a large language model developed by openai, in the context of cardiology and vascular pathologies. the study evaluated the accuracy of chatgpt in answering challenging multiple-choice questions (qcm) using a dataset of 190 questions from the siamois-qcm platform. the goal was to assess chatgpt potential as a valuable tool in medical education compared to two well-ranked students of medicine. the results showed that chatgpt outperformed the students, scoring 175 out of 190 correct answers with a percentage of 92.10%, while the two students achieved scores of 163 and 159 with percentages of 85.78% and 82.63%, respectively. these results showcase how chatgpt has the potential to be highly effective in the fields of cardiology and vascular pathologies by providing accurate answers to relevant questions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/943.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/943.txt new file mode 100644 index 0000000000000000000000000000000000000000..2ce572337c4451307c87f04594a33f5aa67a6549 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/943.txt @@ -0,0 +1 @@ + we investigate performance disparities in deep classifiers. we find that the ability of classifiers to separate individuals into subgroups varies substantially across medical imaging modalities and protected characteristics; crucially, we show that this property is predictive of algorithmic bias. through theoretical analysis and extensive empirical evaluation 1 , we find a relationship between subgroup separability, subgroup disparities, and performance degradation when models are trained on data with systematic bias such as underdiagnosis. our findings shed new light on the question of how models become biased, providing important insights for the development of fair medical imaging ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/944.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/944.txt new file mode 100644 index 0000000000000000000000000000000000000000..17bfc6dfc534fdb699aef0243243f364049939df --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/944.txt @@ -0,0 +1 @@ + as cyber-attacks continue to increase in frequency and sophistication, organisations must be better prepared to face the reality of an incident. any organisational plan that intends to be successful at managing security risks must clearly understand the harm (i.e., negative impact) and the various parties affected in the aftermath of an attack. to this end, this article conducts a novel exploration into the multitude of real-world harms that can arise from cyber-attacks, with a particular focus on ransomware incidents given their current prominence. this exploration also leads to the proposal of a new, robust methodology for modelling harms from such incidents. we draw on publicly-available case data on high-profile ransomware incidents to examine the types of harm that emerge at various stages after a ransomware attack and how harms (e.g., an offline enterprise server) may trigger other negative, potentially more substantial impacts for stakeholders (e.g., the inability for a customer to access their social welfare benefits or bank account). prominent findings from our analysis include the identification of a notable set of social/human harms beyond the business itself (and beyond the financial payment of a ransom) and a complex web of harms that emerge after attacks regardless of the industry sector. we also observed that deciphering the full extent and sequence of harms can be a challenging undertaking because of the lack of complete data available. this paper consequently argues for more transparency on ransomware harms, as it would lead to a better understanding of the realities of these incidents to the benefit of organisations and society more generally. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/945.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/945.txt new file mode 100644 index 0000000000000000000000000000000000000000..76099c869091bb899089fe3f60fb43cdae0dbeb1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/945.txt @@ -0,0 +1 @@ + recent work has proposed a power law relationship, referred to as "scaling laws, " between the performance of artificial intelligence (ai) models and aspects of those models' design (e.g., dataset size). in other words, as the size of a dataset (or model parameters, etc) increases, the performance of a given model trained on that dataset will correspondingly increase. however, while compelling in the aggregate, this scaling law relationship overlooks the ways that metrics used to measure performance may be precarious and contested, or may not correspond with how different groups of people may perceive the quality of models' output. in this paper, we argue that as the size of datasets used to train large ai models grows, the number of distinct communities (including demographic groups) whose data is included in a given dataset is likely to grow, each of whom may have different values. as a result, there is an increased risk that communities represented in a dataset may have values or preferences not captured by (or in the worst case, at odds with) the metrics used to evaluate model performance for scaling laws. we end the paper with implications for ai scaling laws-that models may not, in fact, continue to improve as the datasets get larger-at least not for all people or communities impacted by those models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/946.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/946.txt new file mode 100644 index 0000000000000000000000000000000000000000..efacd24c0e9ad792474352879e865c53c46dc444 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/946.txt @@ -0,0 +1 @@ + the scope of this paper was to find out how the students in computer science perceive different teaching styles and how the teaching style impacts the learning desire and interest in the course. to find out, we designed and implemented an experiment in which the same groups of students (86 students) were exposed to different teaching styles (presented by the same teacher at a difference of two weeks between lectures). we tried to minimize external factors' impact by carefully selecting the dates (close ones), having the courses in the same classroom and on the same day of the week, at the same hour, and checking the number and the complexity of the introduced items to be comparable. we asked for students' feedback and we define a set of countable body signs for their involvement in the course. the results were comparable by both metrics (body language) and text analysis results, students prefer a more interactive course, with a relaxing atmosphere, and are keener to learn in these conditions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/947.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/947.txt new file mode 100644 index 0000000000000000000000000000000000000000..acbbff9e35a1605851e0aaf5d3c55f0811e2c594 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/947.txt @@ -0,0 +1 @@ + counterspeech offers direct rebuttals to hateful speech by challenging perpetrators of hate and showing support to targets of abuse. it provides a promising alternative to more contentious measures, such as content moderation and deplatforming, by contributing a greater amount of positive online speech rather than attempting to mitigate harmful content through removal. advances in the development of large language models mean that the process of producing counterspeech could be made more efficient by automating its generation, which would enable large-scale online campaigns. however, we currently lack a systematic understanding of several important factors relating to the efficacy of counterspeech for hate mitigation, such as which types of counterspeech are most effective, what are the optimal conditions for implementation, and which specific effects of hate it can best ameliorate. this paper aims to fill this gap by systematically reviewing counterspeech research in the social sciences and comparing methodologies and findings with computer science efforts in automatic counterspeech generation. by taking this multi-disciplinary view, we identify promising future directions in both fields. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/948.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/948.txt new file mode 100644 index 0000000000000000000000000000000000000000..12388b6a50cb90e2a8a13978e7f2a7bf718dbe43 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/948.txt @@ -0,0 +1 @@ + current research in the computer vision field mainly focuses on improving deep learning (dl) correctness and inference time performance. however, there is still little work on the huge carbon footprint that has training dl models. this study aims to analyze the impact of the model architecture and training environment when training greener computer vision models. we divide this goal into two research questions. first, we analyze the effects of model architecture on achieving greener models while keeping correctness at optimal levels. second, we study the influence of the training environment on producing greener models. to investigate these relationships, we collect multiple metrics related to energy efficiency and model correctness during the models' training. then, we outline the trade-offs between the measured energy efficiency and the models' correctness regarding model architecture, and their relationship with the training environment. we conduct this research in the context of a computer vision system for image classification. in conclusion, we show that selecting the proper model architecture and training environment can reduce energy consumption dramatically (up to 81.38%) at the cost of negligible decreases in correctness. also, we find evidence that gpus should scale with the models' computational complexity for better energy efficiency. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/949.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/949.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/95.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/95.txt new file mode 100644 index 0000000000000000000000000000000000000000..2d597904724556a8bb45aa688741360805b332b2 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/95.txt @@ -0,0 +1 @@ + highlights• 9 companies from 3 domains (construction, electric t&d, oil & gas) shared their accident datasets.• machine learning models were trained to predict safety outcomes from fundamental attributes.• models trained on all datasets (full generic models) outperformed the company-specific models in 82% of the company-domain-outcome combinations, with large gains in f1 score (+4.4 on average and up to +15.3).• on average, generic models predicted 2.26 categories more than specific models (up to 7), making for more useful forecasts in practice.• per-domain generic models were not always better than full generic models.• combining generic and specific models (data quantity and relevance) was often very beneficial.• generic models give companies devoid of accident datasets access to safety predictions.• generic models address safety cross-organizational learning and dissemination in construction.in this study, we capitalized on a collective dataset repository of 57k accidents from 9 companies belonging to 3 domains and tested whether models trained on multiple datasets (generic models) predicted safety outcomes better than the company-specific models. we experimented with full generic models (trained on all data), per-domain generic models (construction, electric t&d, oil & gas), and with ensembles of generic and specific models. results are very positive, with generic models outperforming the company-specific models in most cases while also generating finer-grained, hence more useful, forecasts. successful generic models remove the needs for training company-specific models, saving a lot of time and resources, and give small companies, whose accident datasets are too limited to train their own models, access to safety outcome predictions. it may still however be advantageous to train specific models to get an extra boost in performance through ensembling with the generic models. overall, by learning lessons from a pool of datasets whose accumulated experience far exceeds that of any single company, and making these lessons easily accessible in the form of simple forecasts, generic models tackle the holy grail of safety cross-organizational learning and dissemination in the construction industry. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/950.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/950.txt new file mode 100644 index 0000000000000000000000000000000000000000..3988167584c3343aa36d299d9b9c6c3ffd82b7cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/950.txt @@ -0,0 +1 @@ + generative ai techniques have opened the path for new generations of machines in diverse domains. these machines have various capabilities for example, they can produce images, generate answers or stories, and write codes based on the "prompts" only provided by users. these machines are considered 'thinking minds' because they have the ability to generate human-like responses. in this study, we have analyzed and explored the capabilities of artificial intelligence-enabled machines. we have revisited on turing's concept of thinking machines and compared it with recent technological advancements. the objections and consequences of the thinking machines are also discussed in this study, along with available techniques to evaluate machines' cognitive capabilities. we have concluded that turing test is a critical aspect of evaluating machines' ability. however, there are other aspects of intelligence too, and ai machines exhibit most of these aspects. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/951.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/951.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a2c28f2321106725dbf331b87ae1a6fb2a1609f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/951.txt @@ -0,0 +1 @@ + we analyze sentiment analysis and toxicity detection models to detect the presence of explicit bias against people with disability (pwd). we employ the bias identification framework of perturbation sensitivity analysis to examine conversations related to pwd on social media platforms, specifically twitter and reddit, in order to gain insight into how disability bias is disseminated in real-world social settings. we then create the bias identification test in sentiment (bits) corpus to quantify explicit disability bias in any sentiment analysis and toxicity detection models. our study utilizes bits to uncover significant biases in four open aiaas (ai as a service) sentiment analysis tools, namely textblob, vader, google cloud natural language api, distilbert and two toxicity detection models, namely two versions of toxic-bert. our findings indicate that all of these models exhibit statistically significant explicit bias against pwd. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/952.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/952.txt new file mode 100644 index 0000000000000000000000000000000000000000..7fc42e505124c2d58961465fc1ac6b82f55a59a7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/952.txt @@ -0,0 +1 @@ + this paper presents a study on using agile methodologies in the teaching process at the university/college level during the covid-19 pandemic, online classes. we detail a list of techniques inspired from software engineering agile methodologies that can be used in online teaching. we also show, by analyzing students grades, that these agile inspired techniques probably help in the educational process. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/953.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/953.txt new file mode 100644 index 0000000000000000000000000000000000000000..473e6fd7653f8bfd707aa21a95f5061f3cac3be0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/953.txt @@ -0,0 +1 @@ + research is facing a reproducibility crisis, in which the results and findings of many studies are difficult or even impossible to reproduce. this is also the case in machine learning (ml) and artificial intelligence (ai) research. often, this is the case due to unpublished data and/or source-code, and due to sensitivity to ml training conditions. although different solutions to address this issue are discussed in the research community such as using ml platforms, the level of reproducibility in mldriven research is not increasing substantially. therefore, in this mini survey, we review the literature on reproducibility in ml-driven research with three main aims: (i) reflect on the current situation of ml reproducibility in various research fields, (ii) identify reproducibility issues and barriers that exist in these research fields applying ml, and (iii) identify potential drivers such as tools, practices, and interventions that support ml reproducibility. with this, we hope to contribute to decisions on the viability of different solutions for supporting ml reproducibility. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/954.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/954.txt new file mode 100644 index 0000000000000000000000000000000000000000..d45b8d608e6a9e4d232b7ddb7aabe54b44c563d9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/954.txt @@ -0,0 +1 @@ + we consider the vulnerability of fairness-constrained learning to small amounts of malicious noise in the training data. konstantinov and lampert (2021) initiated the study of this question and presented negative results showing there exist data distributions where for several fairness constraints, any proper learner will exhibit high vulnerability when group sizes are imbalanced. here, we present a more optimistic view, showing that if we allow randomized classifiers, then the landscape is much more nuanced. for example, for demographic parity we show we can incur only a θ(α) loss in accuracy, where α is the malicious noise rate, matching the best possible even without fairness constraints. for equal opportunity, we show we can incur an o( √ α) loss, and give a matching ω( √ α) lower bound. in contrast, konstantinov and lampert (2021) showed for proper learners the loss in accuracy for both notions is ω(1). the key technical novelty of our work is how randomization can bypass simple "tricks" an adversary can use to amplify his power. we also consider additional fairness notions including equalized odds and calibration. for these fairness notions, the excess accuracy clusters into three natural regimes o(α),o( √ α), and o(1). these results provide a more fine-grained view of the sensitivity of fairness-constrained learning to adversarial noise in training data. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/955.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/955.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c878a83110d737e834aa26dc97d7c77d01a38f3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/955.txt @@ -0,0 +1 @@ + there have been many papers with algorithms for improving fairness of machine-learning classifiers for tabular data. unfortunately, most use only very few datasets for their experimental evaluation. we introduce a suite of functions for fetching 20 fairness datasets and providing associated fairness metadata. hopefully, these will lead to more rigorous experimental evaluations in future fairness-aware machine learning research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/956.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/956.txt new file mode 100644 index 0000000000000000000000000000000000000000..832968a919fdaaafd887d32fbc6fef942b7c7d7d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/956.txt @@ -0,0 +1 @@ + recent studies show that instruction tuning (it) and reinforcement learning from human feedback (rlhf) improve the abilities of large language models (lms) dramatically. while these tuning methods can help align models with human objectives and generate high-quality text, not much is known about their potential adverse effects. in this work, we investigate the effect of it and rlhf on decision making and reasoning in lms, focusing on three cognitive biases-the decoy effect, the certainty effect, and the belief bias-all of which are known to influence human decision-making and reasoning. our findings highlight the presence of these biases in various models from the gpt-3, mistral, and t5 families. notably, we find a stronger presence of biases in models that have undergone instruction tuning, such as flan-t5, mistral-instruct, gpt3.5, and gpt4. our work constitutes a step toward comprehending cognitive biases in instruction-tuned lms, which is crucial for the development of more reliable and unbiased language models. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/957.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/957.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b70dcc422148dfd98c210749ec370e4fb64251c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/957.txt @@ -0,0 +1 @@ + bias in applications of machine learning (ml) to healthcare is usually attributed to unrepresentative or incomplete data, or to underlying health disparities. this article identifies a more pervasive source of bias that affects the clinical utility of ml-enabled prediction tools: target specification bias. target specification bias arises when the operationalization of the target variable does not match its definition by decision makers. the mismatch is often subtle, and stems from the fact that decision makers are typically interested in predicting the outcomes of counterfactual, rather than actual, healthcare scenarios. target specification bias persists independently of data limitations and health disparities. when left uncorrected, it gives rise to an overestimation of predictive accuracy, to inefficient utilization of medical resources, and to suboptimal decisions that can harm patients. recent work in metrology -the science of measurement -suggests ways of counteracting target specification bias and avoiding its harmful consequences. ccs concepts • computing methodologies → machine learning → learning paradigms → supervised learning • social and professional topics → computing / technology policy → medical information policy • applied computing → life and medical sciences → health care information systems \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/958.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/958.txt new file mode 100644 index 0000000000000000000000000000000000000000..a6cc387ccf9f72d4150690c4703efc560fd2c1de --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/958.txt @@ -0,0 +1 @@ + generative ai and large language models have the potential to drastically improve the landscape of computing education by automatically generating personalized feedback and content. recent works have studied the capabilities of these models for different programming education scenarios; however, these works considered only text-based programming, in particular, python programming. consequently, they leave open the question of how well these models would perform in visual programming domains popularly used for k-8 programming education. the main research question we study is: do state-of-the-art generative models show advanced capabilities in visual programming on par with their capabilities in text-based python programming? in our work, we evaluate two models, chatgpt (based on gpt-3.5) and gpt-4, in visual programming domains for various scenarios and assess performance using expert-based annotations. in particular, we base our evaluation using reference tasks from the domains of hour of code: maze challenge by code.org and karel. our results show that these models perform poorly and struggle to combine spatial, logical, and programming skills crucial for visual programming. these results also provide exciting directions for future work on developing techniques to improve the performance of generative models in visual programming. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/959.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/959.txt new file mode 100644 index 0000000000000000000000000000000000000000..53fdabea0d5d6e454b6ea9fbcf53f74306ed8757 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/959.txt @@ -0,0 +1 @@ + world wide web is speeding up its pace into an intelligent and decentralized ecosystem, as seen in the campaign of web 3.0 and forthcoming web 4.0. marked by the europe commission's latest mention of web 4.0, a race towards strategic web 4.0 success has started. web 4.0 is committed to bringing the next technological transition with an open, secure, trustworthy fairness and digital ecosystem for individuals and businesses in private and public sectors. despite overlapping scopes and objectives of web 3.0 and web 4.0 from academic and industrial perspectives, there are distinct and definitive features and gaps for the next generation of www. in this review, a brief introduction to www development unravels the entangled but consistent requirement of a more vivid web experience, enhancing human-centric experience in both societal and technical aspects. moreover, the review brings a decentralized intelligence prospect of view on native ai entities for web 4.0, envisioning sustainable, autonomous and decentralized ai services for the entire web 4.0 environment, powering a self-sustainable decentralized physical and software infrastructure for computing force network, semantic network, virtual/mixed reality, and privacy-preserving content presumption.the review aims to reveal that web 4.0 offers native intelligence with focused thinking on utilizing decentralized physical infrastructure, in addition to sole requirements on decentralization, bridging the gap between web 4.0 and web 3.0 advances with the latest future-shaping blockchain-enabled computing and network routing protocols. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/96.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/96.txt new file mode 100644 index 0000000000000000000000000000000000000000..eba6ba4ee48d2785929dc3378c16eb88e188308b --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/96.txt @@ -0,0 +1 @@ + neural networks are composed of multiple layers arranged in a hierarchical structure jointly trained with a gradient-based optimization, where the errors are back-propagated from the last layer back to the first one. at each optimization step, neurons at a given layer receive feedback from neurons belonging to higher layers of the hierarchy. in this paper, we propose to complement this traditional 'between-layer' feedback with additional 'within-layer' feedback to encourage the diversity of the activations within the same layer. to this end, we measure the pairwise similarity between the outputs of the neurons and use it to model the layer's overall diversity. we present an extensive empirical study confirming that the proposed approach enhances the performance of several stateof-the-art neural network models in multiple tasks. the code is publically available at https://github.com/firasl/aaai-23-wld-reg \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/960.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/960.txt new file mode 100644 index 0000000000000000000000000000000000000000..a5fbf1ea25e2dfdbbd42a3734fb7ebe97cd26665 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/960.txt @@ -0,0 +1 @@ + the development of artificial agents for social interaction pushes to enrich robots with social skills and knowledge about (local) social norms. one possibility is to distinguish the expressive and the functional orders during a human-robot interaction. the overarching aim of this work is to set a framework to make the artificial agent socially-competent beyond dyadic interaction -interaction in varying multi-party social situations-and beyond individual-based user personalization, thereby enlarging the current conception of "culturally-adaptive". the core idea is to provide the artificial agent with the capability to handle different kinds of interactional disruptions, and associated recovery strategies, in microsociology. the result is obtained by classifying functional and social disruptions, and by investigating the requirements a robot's architecture should satisfy to exploit such knowledge. the paper also highlights how this level of competence is achieved by focusing on just three dimensions: (i) social capability, (ii) relational role, and (iii) proximity, leaving aside the further complexity of full-fledged humanhuman interactions. without going into technical aspects, end-to-end data-driven architectures and modular architectures are discussed to evaluate the degree to which they can exploit this new set of social and cultural knowledge. finally, a list of general requirements for such agents is proposed. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/961.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/961.txt new file mode 100644 index 0000000000000000000000000000000000000000..37701641371d6d650b61f3459374a5db2958f85f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/961.txt @@ -0,0 +1 @@ + aiming for a mixbiotic society that combines freedom and solidarity among people with diverse values, i focused on nonviolent communication (nvc) that enables compassionate giving in various situations of social division and conflict, and tried a generative ai for it. specifically, chatgpt was used in place of the traditional certified trainer to test the possibility of mediating (modifying) input sentences in four processes: observation, feelings, needs, and requests. the results indicate that there is potential for the application of generative ai, although not yet at a practical level. suggested improvement guidelines included adding model responses, relearning revised responses, specifying appropriate terminology for each process, and re-asking for required information. the use of generative ai will be useful initially to assist certified trainers, to prepare for and review events and workshops, and in the future to support consensus building and cooperative behavior in digital democracy, platform cooperatives, and cyber-human social co-operating systems. it is hoped that the widespread use of nvc mediation using generative ai will lead to the early realization of a mixbiotic society. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/962.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/962.txt new file mode 100644 index 0000000000000000000000000000000000000000..382516349a4d873615d5ddaa70bbaa8456c172c8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/962.txt @@ -0,0 +1 @@ + growing apprehensions surrounding public safety have captured the attention of numerous governments and security agencies across the globe. these entities are increasingly acknowledging the imperative need for reliable and secure crowd-monitoring systems to address these concerns. effectively managing human gatherings necessitates proactive measures to prevent unforeseen events or complications, ensuring a safe and well-coordinated environment. the scarcity of research focusing on crowd monitoring systems and their security implications has given rise to a burgeoning area of investigation, exploring potential approaches to safeguard human congregations effectively. crowd monitoring systems depend on a bifurcated approach, encompassing vision-based and non-vision-based technologies. an in-depth analysis of these two methodologies will be conducted in this research. the efficacy of these approaches is contingent upon the specific environment and temporal context in which they are deployed, as they each offer distinct advantages. this paper endeavors to present an in-depth analysis of the recent incorporation of artificial intelligence (ai) algorithms and models into automated systems, emphasizing their contemporary applications and effectiveness in various contexts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/963.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/963.txt new file mode 100644 index 0000000000000000000000000000000000000000..a37f5e91ad159e76dd63818ae74aea65732c74a3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/963.txt @@ -0,0 +1 @@ + can an ai win ghana's national science and maths quiz (nsmq)? that is the question we seek to answer in the nsmq ai project, an open-source project that is building ai to compete live in the nsmq and win. the nsmq is an annual live science and mathematics competition for senior secondary school students in ghana in which 3 teams of 2 students compete by answering questions across biology, chemistry, physics, and math in 5 rounds over 5 progressive stages until a winning team is crowned for that year. the nsmq is an exciting live quiz competition with interesting technical challenges across speech-to-text, text-tospeech, question-answering, and human-computer interaction. in this ongoing work that began in january 2023, we give an overview of the project, describe each of the teams, progress made thus far, and the next steps toward our planned launch and debut of the ai in october for nsmq 2023. an ai that conquers this grand challenge can have real-world impact on education such as enabling millions of students across africa to have one-on-one learning support from this ai. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/964.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/964.txt new file mode 100644 index 0000000000000000000000000000000000000000..f53a110f95bf18aa4d171501d5c5d568f4c0da66 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/964.txt @@ -0,0 +1 @@ + artificial intelligence is bringing ever new functionalities to the realm of mobile devices that are now considered essential (e.g., camera and voice assistants, recommender systems). yet, operating artificial intelligence takes up a substantial amount of energy. however, artificial intelligence is also being used to enable more energy-efficient solutions for mobile systems. hence, artificial intelligence has two faces in that regard, it is both a key enabler of desired (efficient) mobile functionalities and a major power draw on these devices, playing a part in both the solution and the problem. in this paper, we present a review of the literature of the past decade on the usage of artificial intelligence within the realm of green mobile computing. from the analysis of 34 papers, we highlight the emerging patterns and map the field into 13 main topics that are summarized in details.our results showcase that the field is slowly increasing in the past years, more specifically, since 2019. regarding the double impact ai has on the mobile energy consumption, the energy consumption of ai-based mobile systems is under-studied in comparison to the usage of ai for energy-efficient mobile computing, and we argue for more exploratory studies in that direction. we observe that although most studies are framed as solution papers (94%), the large majority do not make those solutions publicly available to the community. moreover, we also show that most contributions are purely academic (28 out of 34 papers) and that we need to promote the involvement of the mobile software industry in this field. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/965.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/965.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c78ec9dd92f6b4187aed83c7f340aa337c8c859 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/965.txt @@ -0,0 +1 @@ + data science is a modern data intelligence practice, which is the core of many businesses and helps businesses build smart strategies around to deal with businesses challenges more efficiently. data science practice also helps in automating business processes using the algorithm, and it has several other benefits, which also deliver in a non-profitable framework. in regards to data science, three key components primarily influence the effective outcome of a data science project. those are 1) availability of data 2) algorithm 3) processing power / infrastructure in today's technology world, there is no limitation on data as well as processing power, and we have a much more efficient algorithm to produce the desired output. in spite of the success of data science projects, many data science projects still fail and are unable to produce the desired outcome. in this paper, we have explored the bottleneck of data science projects and provided some recommendations to make data science projects more successful. standard data science project development lifecycle crisp-dm is old in this agile development world, and but most data science practices still follow crisp-dm. in general, data scientist analyses scenarios where a predictive model or machine learning model might fail. but this study is to analyze when and why the data science project fails despite an excellent model. data science is a diverse field. it needs technical as well as business knowledge to deliver a project. hence, to understand why the data science project fails, we need to understand challenges from the business side and technical side.1) technical perspective 2) business leader perspective or stakeholder perspective also, it has been observed that the success of the data science project depends on business domain. example market propensity model, a strategic market campaign method, is more successful in any retail use case than a fraud analytic model in the banking fraud domain. so domain agnostic framework was implemented in this research to make this research independent of the business domain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/966.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/966.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce9b1815c6df245a047249e7b7e44d74b90489a1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/966.txt @@ -0,0 +1 @@ + the growing demand for shortterm property renting has led to the boom of a new category called proptech (property + technology)". such a rise in demand attracted many entrepreneurs and investors leading to the inception of powerful and centralised players in the category (like airbnb). unfortunately, some big players are controlling the entire industry in a centralised way in turn, performing anti-competitive practices which leave no room for the small players. such an ecosystem can be made more transparent and decentralised by executing transactions and order-fulfilment on a blockchain.to extend the functionality of the system, we can also perform the renting of small services and appliances via the integration of iot. implementing such a system will make the process more transparent, robust and decentralised.i. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/967.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/967.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3bc4858709db14d7f926828144eb9f319da382e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/967.txt @@ -0,0 +1 @@ + accurately predicting the destination of taxi trajectories can have various benefits for intelligent location-based services. one potential method to accomplish this prediction is by converting the taxi trajectory into a two-dimensional grid and using computer vision techniques. while the swin transformer is an innovative computer vision architecture with demonstrated success in vision downstream tasks, it is not commonly used to solve real-world trajectory problems. in this paper, we propose a simplified swin transformer (sst) structure that does not use the shifted window idea in the traditional swin transformer, as trajectory data is consecutive in nature. our comprehensive experiments, based on real trajectory data, demonstrate that sst can achieve higher accuracy compared to state-of-the-art methods. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/968.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/968.txt new file mode 100644 index 0000000000000000000000000000000000000000..a48e08a04ad1c1bb93de097f33ea60e103e9eee4 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/968.txt @@ -0,0 +1 @@ + large language models (llms) exhibit remarkable fluency and competence across various natural language tasks. however, recent research has highlighted their sensitivity to variations in input prompts. to deploy llms in a safe and reliable manner, it is crucial for their outputs to be consistent when prompted with expressions that carry the same meaning or intent. while some existing work has explored how state-ofthe-art llms address this issue, their evaluations have been confined to assessing lexical equality of single-or multi-word answers, overlooking the consistency of generative text sequences. for a more comprehensive understanding of the consistency of llms in open-ended text generation scenarios, we introduce a general measure of semantic consistency, and formulate multiple versions of this metric to evaluate the performance of various llms. our proposal demonstrates significantly higher consistency and stronger correlation with human evaluations of output consistency than traditional metrics based on lexical consistency. finally, we propose a novel prompting strategy, called ask-to-choose (a2c), to enhance semantic consistency. when evaluated for closed-book question answering based on answer variations from the truth-fulqa benchmark, a2c increases accuracy metrics for pretrained and finetuned llms by up to 47%, and semantic consistency metrics for instruction-tuned models by up to 7-fold. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/969.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/969.txt new file mode 100644 index 0000000000000000000000000000000000000000..b6e987c17b762a1b08fa34153f86b58989c5f249 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/969.txt @@ -0,0 +1 @@ + we propose a new way to assess certain short constructed responses to mathematics items. our approach uses a pipeline that identifies the key values specified by the student in their response. this allows us to determine the correctness of the response, as well as identify any misconceptions. the information from the value identification pipeline can then be used to provide feedback to the teacher and student. the value identification pipeline consists of two finetuned language models. the first model determines if a value is implicit in the student response. the second model identifies where in the response the key value is specified. we consider both a generic model that can be used for any prompt and value, as well as models that are specific to each prompt and value. the value identification pipeline is a more accurate and informative way to assess short constructed responses than traditional rubric-based scoring. it can be used to provide more targeted feedback to students, which can help them improve their understanding of mathematics. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/97.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/97.txt new file mode 100644 index 0000000000000000000000000000000000000000..6f33e5b1708b93e071b45affb908c13f39d5fabc --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/97.txt @@ -0,0 +1 @@ + multi-agent reinforcement learning (marl) is vulnerable to adversarial machine learning (aml) attacks and needs adequate defences before it can be used in real world applications. we have conducted a survey into the use of execution-time aml attacks against marl and the defences against those attacks. we surveyed related work in the application of aml in deep reinforcement learning (drl) and multi-agent learning (mal) to inform our analysis of aml for marl. we propose a novel perspective to understand the manner of perpetrating an aml attack, by defining attack vectors. we develop two new frameworks to address a gap in current modelling frameworks, focusing on the means and tempo of an aml attack against marl, and identify knowledge gaps and future avenues of research. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/970.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/970.txt new file mode 100644 index 0000000000000000000000000000000000000000..d79b004691d095b4ee27ffa884b83e92eb8bcfa6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/970.txt @@ -0,0 +1 @@ + online professional social networking platforms provide opportunities to expand networks strategically for job opportunities and career advancement. a large body of research shows that women's offline networks are less advantageous than men's. how online platforms such as linkedin may reflect or reproduce gendered networking behaviours, or how online social connectivity may affect outcomes differentially by gender is not well understood. this paper analyses aggregate, anonymised data from almost 10 million linkedin users in the uk and us information technology (it) sector collected from the site's advertising platform to explore how being connected to big tech companies ('social connectivity') varies by gender, and how gender, age, seniority and social connectivity shape the propensity to report job promotions or relocations. consistent with previous studies, we find there are fewer women compared to men on linkedin in it. furthermore, female users are less likely to be connected to big tech companies than men. however, when we further analyse recent promotion or relocation reports, we find women are more likely than men to have reported a recent promotion at work, suggesting high-achieving women may be selfselecting onto linkedin. even among this positively selected group, though, we find men are more likely to report a recent relocation. social connectivity emerges as a significant predictor of promotion and relocation reports, with an interaction effect between gender and social connectivity indicating the payoffs to social connectivity for promotion and relocation reports are larger for women. this suggests that online networking has the potential for larger impacts for women, who experience greater disadvantage in traditional networking contexts, and calls for further research to understand differential impacts of online networking for socially disadvantaged groups. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/971.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/971.txt new file mode 100644 index 0000000000000000000000000000000000000000..0f01b809e0b08479aec6e0f728772e81dc057f6f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/971.txt @@ -0,0 +1 @@ + the rise of powerful ai technology for a range of applications that are sensitive to legal, social, and ethical norms demands decision-making support in presence of norms and regulations. normative reasoning is the realm of deontic logics, that are challenged by well-known benchmark problems (deontic paradoxes), and lack efficient computational tools. in this paper, we use answer set programming (asp) for addressing these shortcomings and showcase how to encode and resolve several well-known deontic paradoxes utilizing weak constraints. by abstracting and generalizing this encoding, we present a methodology for translating normative systems in asp with weak constraints. this methodology is applied to "ethical" versions of pac-man, where we obtain a comparable performance with related works, but ethically preferable results. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/972.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/972.txt new file mode 100644 index 0000000000000000000000000000000000000000..923b0eb2e4c7ffa219540fd632c4f545da8f51ef --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/972.txt @@ -0,0 +1 @@ + while modern society benefits from a range of technological advancements, it also is exposed to an ever-increasing set of cybersecurity threats. these affect all areas of life including business, government and individuals. to complement technology solutions to this problem, it is crucial to understand more about cybercriminal perpetrators themselves, their use of technology, psychological aspects, and profiles. this is a topic that has received little socio-technical research emphasis in the technology community, has few concrete research findings, and is thus a prime area for development. the aim of this article is to explore cybercriminal activities and behavior from a psychology and human aspects perspective, through a series of notable case studies. we examine motivations, psychological and other interdisciplinary concepts as they may impact/influence cyber-criminal activities. we expect this paper to be of value and particularly insightful for those studying technology, psychology, and criminology, with a focus on cybersecurity and cybercrime. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/973.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/973.txt new file mode 100644 index 0000000000000000000000000000000000000000..f9275ba7d34736fbe25b2c6a70f2807b6f4681e1 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/973.txt @@ -0,0 +1 @@ + there appears to be a dilemma between the freedom of expression and protection from the adverse effects of uncivil political expression online. while previous studies have revealed various factors that affect attitudes toward freedom of expression and speech restrictions, it is less clear whether people have intergroup biases when forming these attitudes. to address this gap, the present study conducted a pre-registered online survey experiment and investigated people's attitudes toward uncivil political expression by randomizing its in-group and out-group affiliations. the results revealed that people tend to perceive uncivil political expression directed from an out-group toward an in-group as more uncivil, compared to the expression originating from an in-group toward an out-group. this difference subsequently influences their inclination to endorse speech restrictions when faced with uncivil political comments: stronger support for restrictions on expressions from the out-group toward the in-group as opposed to those from the in-group toward the \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/974.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/974.txt new file mode 100644 index 0000000000000000000000000000000000000000..21a67695d4b3044f0807456da0d9f6d3e04b15f3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/974.txt @@ -0,0 +1 @@ + the equitable distribution of academic data is crucial for ensuring equal research opportunities, and ultimately further progress. yet, due to the complexity of using the api for audio data that corresponds to the million song dataset along with its misreporting (before 2016) and the discontinuation of this api (after 2016), access to this data has become restricted to those within certain affiliations that are connected peer-to-peer. in this paper, we delve into this issue, drawing insights from the experiences of 22 individuals who either attempted to access the data or played a role in its creation. with this, we hope to initiate more critical dialogue and more thoughtful consideration with regard to access privilege in the mir community. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/975.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/975.txt new file mode 100644 index 0000000000000000000000000000000000000000..ea6ae31f7cb0b0092bdac5bc0e406cf2d1bc0e5d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/975.txt @@ -0,0 +1 @@ + ever since the emergence of large language models (llms) and related applications, such as chatgpt, its performance and error analysis for programming tasks have been subject to research. in this work-in-progress paper, we explore the potential of such llms for computing educators and learners, as we analyze the feedback it generates to a given input containing program code. in particular, we aim at (1) exploring how an llm like chatgpt responds to students seeking help with their introductory programming tasks, and (2) identifying feedback types in its responses. to achieve these goals, we used students' programming sequences from a dataset gathered within a cs1 course as input for chatgpt along with questions required to elicit feedback and correct solutions. the results show that chatgpt performs reasonably well for some of the introductory programming tasks and student errors, which means that students can potentially benefit. however, educators should provide guidance on how to use the provided feedback, as it can contain misleading information for novices. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/976.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/976.txt new file mode 100644 index 0000000000000000000000000000000000000000..17f1d49455282198525f019c73e9c857c38e5d03 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/976.txt @@ -0,0 +1 @@ + geospatial sciences include a wide range of applications, from environmental monitoring transportation to infrastructure planning, as well as location-based analysis and services. graph theory algorithms in mathematics have emerged as indispensable tools in these domains due to their capability to model and analyse spatial relationships efficiently. this article explores the applications of graph theory algorithms in geospatial sciences, highlighting their role in network analysis, spatial connectivity, geographic information systems, and various other spatial problem-solving scenarios like digital twin. the article provides a comprehensive idea about graph theory's key concepts and algorithms that assist the geospatial modelling processes and insights into real-world geospatial challenges and opportunities. it lists the extensive research, innovative technologies and methodologies implemented in this domain. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/977.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/977.txt new file mode 100644 index 0000000000000000000000000000000000000000..25001a76f65bf3c5d8a75dfa79d29f57d0c3e130 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/977.txt @@ -0,0 +1 @@ + personality types are important in various fields as they hold relevant information about the characteristics of a human being in an explainable format. they are often good predictors of a person's behaviors in a particular environment and have applications ranging from candidate selection to marketing and mental health. recently automatic detection of personality traits from texts has gained significant attention in computational linguistics. most personality detection and analysis methods have focused on small datasets making their experimental observations often limited. to bridge this gap, we focus on collecting and releasing the largest automatically curated dataset for the research community which has 152 million tweets and 56 thousand data points for the myers-briggs personality type (mbti) prediction task. we perform a series of extensive qualitative and quantitative studies on our dataset to analyze the data patterns in a better way and infer conclusions. we show how our intriguing analysis results often follow natural intuition. we also perform a series of ablation studies to show how the baselines perform for our dataset. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/978.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/978.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/979.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/979.txt new file mode 100644 index 0000000000000000000000000000000000000000..b293d57a6f543c6f3f9d747b1c61f02060b6196c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/979.txt @@ -0,0 +1 @@ + as large language models (llms) become more deeply integrated into various sectors, understanding how they make moral judgments has become crucial, particularly in the realm of autonomous driving. this study utilized the moral machine framework to investigate the ethical decision-making tendencies of prominent llms, including gpt-3.5, gpt-4, palm 2, and llama 2, comparing their responses to human preferences. while llms' and humans' preferences such as prioritizing humans over pets and favoring saving more lives are broadly aligned, palm 2 and llama 2, especially, evidence distinct deviations. additionally, despite the qualitative similarities between the llm and human preferences, there are significant quantitative disparities, suggesting that llms might lean toward more uncompromising decisions, compared to the milder inclinations of humans. these insights elucidate the ethical frameworks of llms and their potential implications for autonomous driving. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/98.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/98.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a76d97f22e81e40756af76b316b672cc34c107c --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/98.txt @@ -0,0 +1 @@ + despite the central importance of research papers to scientific progress, they can be difficult to read. comprehension is often stymied when the information needed to understand a passage resides somewhere else-in another section, or in another paper. in this work, we envision how interfaces can bring definitions of technical terms and symbols to readers when and where they need them most. we introduce scholarphi, an augmented reading interface with four novel features: (1) tooltips that surface position-sensitive definitions from elsewhere in a paper, (2) a filter over the paper that "declutters" it to reveal how the term or symbol is used across the paper, (3) automatic equation diagrams that expose multiple definitions in parallel, and (4) an automatically generated glossary of important terms and symbols. a usability study showed that the tool helps researchers of all experience levels read papers. furthermore, researchers were eager to have scholarphi's definitions available to support their everyday reading. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/980.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/980.txt new file mode 100644 index 0000000000000000000000000000000000000000..54341ce385450b4a0de3cf1e6d6135ffe97e0a41 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/980.txt @@ -0,0 +1 @@ + fair machine learning is a thriving and vibrant research topic. in this paper, we propose fairness as a service (faas), a secure, verifiable and privacy-preserving protocol to computes and verify the fairness of any machine learning (ml) model. in the deisgn of faas, the data and outcomes are represented through cryptograms to ensure privacy. also, zero knowledge proofs guarantee the well-formedness of the cryptograms and underlying data. faas is model-agnostic and can support various fairness metrics; hence, it can be used as a service to audit the fairness of any ml model. our solution requires no trusted third party or private channels for the computation of the fairness metric. the security guarantees and commitments are implemented in a way that every step is securely transparent and verifiable from the start to the end of the process. the cryptograms of all input data are publicly available for everyone, e.g., auditors, social activists and experts, to verify the correctness of the process. we implemented faas to investigate performance and demonstrate the successful use of faas for a publicly available data set with thousands of entries. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/981.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/981.txt new file mode 100644 index 0000000000000000000000000000000000000000..9f92b1f0cd7b4899d28a39a35adeda615cde045d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/981.txt @@ -0,0 +1 @@ + as large language models (llms) demonstrate increasingly human-like abilities in various natural language processing (nlp) tasks that are bound to become integral to personalized technologies, understanding their capabilities and inherent biases is crucial. our study investigates the potential of llms like chatgpt to infer psychological dispositions of individuals from their digital footprints. specifically, we assess the ability of gpt-3.5 and gpt-4 to derive the big five personality traits from users' facebook status updates in a zero-shot learning scenario. our results show an average correlation of r = .29 (range = ) between llm-inferred and self-reported trait scores. furthermore, our findings suggest biases in personality inferences with regard to gender and age: inferred scores demonstrated smaller errors for women and younger individuals on several traits, suggesting a potential systematic bias stemming from the underlying training data or differences in online self-expression. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/982.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/982.txt new file mode 100644 index 0000000000000000000000000000000000000000..44c3147059bcb7014246f4c54c2208e97ebdfe12 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/982.txt @@ -0,0 +1 @@ + background: as the impact of the covid-19 pandemic winds down, both individuals and society are gradually returning to life and activities before the pandemic. this study aims to explore how people's emotions have changed from the pre-pandemic period during the pandemic to this post-emergency period and whether the sentiment level nowadays has returned to the pre-pandemic level.method: we collected reddit social media data in 2019 (pre-pandemic), 2020 (peak period of the pandemic), 2021, and 2022 (late stages of the pandemic, transitioning period to the post-emergency period) from the subreddits communities in 128 universities/colleges in the u.s., and a set of school-level baseline characteristics such as location, enrollment, graduation rate, selectivity, etc. we predicted two sets of sentiments from a pre-trained robustly optimized bert pre-training approach (roberta) and from a graph attention network (gat) that leverages both the rich semantic information and the relational information among posted messages and then applied a logistic stacking method to obtain the final sentiment classification. after obtaining the sentiment label for each message, we employed a generalized linear mixed-effects model to estimate the temporal trend in sentiment from 2019 to 2022 and how the school-level factors may affect the sentiment.results: compared to the year 2019, the odds of negative sentiment in years 2020, 2021, and 2022 are 24%. 4.3%, and 10.3% higher, respectively, which are all statistically significant at the 5% significance level based on adjusted p-values. in addition, for every 1 standard deviation (18075.6) increase in enrollment, the odds of having negative sentiment in such universities/colleges are 11.9% higher in a statistically significant manner. compared to master's/baccalaureate universities/colleges, the odds of having negative sentiment for doctoral schools with very high research activity is statistically significantly higher with a 30.8% increase. region, public vs. private, division i school or not, selectivity, graduation rate, city population, number of doctoral programs, graduate student enrollment, tenured faculty or on tenured track, or having a medical school or not do not affect sentiment in a statistically significant manner in this study.conclusions: our study findings suggest a partial recovery in the sentiment composition (negative vs. non-negative) in the post-pandemic-emergency era. the results align 1 \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/983.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/983.txt new file mode 100644 index 0000000000000000000000000000000000000000..66872aacf4d136a820c4c953755ba56561bf09cb --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/983.txt @@ -0,0 +1 @@ + the emergence of human-like abilities of ai systems for content generation in domains such as text, audio, and vision has prompted the development of classifiers to determine whether content originated from a human or a machine. implicit in these efforts is an assumption that the generation properties of a human are different from that of the machine. in this work, we provide a framework in the language of statistical pattern recognition that quantifies the difference between the distributions of human and machinegenerated content conditioned on an evaluation context. we describe current methods in the context of the framework and demonstrate how to use the framework to evaluate the progression of generative models towards human-like capabilities, among many axes of analysis. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/984.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/984.txt new file mode 100644 index 0000000000000000000000000000000000000000..2bcaee3d6d6107c31f8ec5bc6bd8af002dc62ad0 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/984.txt @@ -0,0 +1 @@ + large language models are quickly gaining momentum, yet are found to demonstrate gender bias in their responses. in this paper, we conducted a content analysis of social media discussions to gauge public perceptions of gender bias in llms which are trained in different cultural contexts, i.e., chatgpt, a us-based llm, or ernie, a china-based llm. people shared both observations of gender bias in their personal use and scientific findings about gender bias in llms. a difference between the two llms was seen -chatgpt was more often found to carry implicit gender bias, e.g., associating men and women with different profession titles, while explicit gender bias was found in ernie's responses, e.g., overly promoting women's pursuit of marriage over career. based on the findings, we reflect on the impact of culture on gender bias and propose governance recommendations to regulate gender bias in llms. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/985.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/985.txt new file mode 100644 index 0000000000000000000000000000000000000000..ed3cc8e2b65f965f8b6be264305f4595dca14ae6 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/985.txt @@ -0,0 +1 @@ + we present an overview of the literature on trust in ai and ai trustworthiness and argue for the need to distinguish these concepts more clearly and to gather more empirically evidence on what contributes to people's trusting behaviours. we discuss that trust in ai involves not only reliance on the system itself, but also trust in the developers of the ai system. ai ethics principles such as explainability and transparency are often assumed to promote user trust, but empirical evidence of how such features actually affect how users perceive the system's trustworthiness is not as abundance or not that clear. ai systems should be recognised as socio-technical systems, where the people involved in designing, developing, deploying, and using the system are as important as the system for determining whether it is trustworthy. without recognising these nuances, 'trust in ai' and 'trustworthy ai' risk becoming nebulous terms for any desirable feature for ai systems. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/986.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/986.txt new file mode 100644 index 0000000000000000000000000000000000000000..0927135f0b704ce690c5c3d1e682455469e28976 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/986.txt @@ -0,0 +1 @@ + this essay examines how automation has reconfigured mathematical proof and labor, and what might happen in the future. it discusses practical standards of proof, distinguishes between prominent forms of automation in research, provides critiques of recurring assumptions, and asks how automation might reshape economies of labor and credit. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/987.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/987.txt new file mode 100644 index 0000000000000000000000000000000000000000..89a2857aa7e18ed975fe70c32bcbcc54b05b5902 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/987.txt @@ -0,0 +1 @@ + in this study, we leveraged machine learning techniques to identify risk factors associated with post-covid-19 mental health disorders. our analysis, based on data collected from 669 patients across various provinces in iraq, yielded valuable insights. we found that age, gender, and geographical region of residence were significant demographic factors influencing the likelihood of developing mental health disorders in post-covid-19 patients. additionally, comorbidities and the severity of covid-19 illness were important clinical predictors. psychosocial factors, such as social support, coping strategies, and perceived stress levels, also played a substantial role. our findings emphasize the complex interplay of multiple factors in the development of mental health disorders following covid-19 recovery. healthcare providers and policymakers should consider these risk factors when designing targeted interventions and support systems for individuals at risk. machine learning-based approaches can provide a valuable tool for predicting and preventing adverse mental health outcomes in post-covid-19 patients. further research and prospective studies are needed to validate these findings and enhance our understanding of the long-term psychological impact of the covid-19 pandemic. this study contributes to the growing body of knowledge regarding the mental health consequences of the covid-19 pandemic and underscores the importance of a multidisciplinary approach to address the diverse needs of individuals on the path to recovery. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/988.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/988.txt new file mode 100644 index 0000000000000000000000000000000000000000..b44034a97a1bd701490f3c5ae33dd74bab3cbbba --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/988.txt @@ -0,0 +1 @@ + as digitalization and technological advancements continue to shape the infrastructure landscape, the emergence of blockchain-based decentralized physical infrastructure networks (depins) has gained prominence. however, a systematic categorization of depin components and their interrelationships is still missing. to address this gap, we conduct a literature review and analysis of existing frameworks and derived a taxonomy of depin systems from a conceptual architecture. our taxonomy encompasses three key dimensions: distributed ledger technology, cryptoeconomic design and physicial infrastructure network. within each dimension, we identify and define relevant components and attributes, establishing a clear hierarchical structure. moreover, we illustrate the relationships and dependencies among the identified components, highlighting the interplay between governance models, hardware architectures, networking protocols, token mechanisms, and distributed ledger technologies. this taxonomy provides a foundation for understanding and classifying diverse depin networks, serving as a basis for future research and facilitating knowledge exchange, fostering collaboration and standardization within the emerging field of decentralized physical infrastructure networks. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/989.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/989.txt new file mode 100644 index 0000000000000000000000000000000000000000..8028a587e5e92d77e8773297a1d451bdd3d4f6cf --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/989.txt @@ -0,0 +1 @@ + when employing the socratic method of teaching, instructors guide students toward solving a problem on their own rather than providing the solution directly. while this strategy can substantially improve learning outcomes, it is usually time-consuming and cognitively demanding. automated socratic conversational agents can augment human instruction and provide the necessary scale, however their development is hampered by the lack of suitable data for training and evaluation. in this paper, we introduce a manually created dataset of multi-turn socratic advice that is aimed at helping a novice programmer fix buggy solutions to simple computational problems. the dataset is then used for benchmarking the socratic debugging abilities of a number of language models, ranging from fine-tuning the instruction-based text-to-text transformer flan-t5 to zero-shot and chain of thought prompting of the much larger gpt-4. the code and datasets are made freely available for research at the link below.https://github.com/taisazero/socratic-debugging-benchmark \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/99.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/99.txt new file mode 100644 index 0000000000000000000000000000000000000000..c4380dff024e6f1f056452a70a2c2bd9ddc79037 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/99.txt @@ -0,0 +1 @@ + cloth manipulation is very relevant for domestic robotic tasks, but it presents many challenges due to the complexity of representing, recognizing and predicting the behaviour of cloth under manipulation. in this work, we propose a generic, compact and simplified representation of the states of cloth manipulation that allows for representing tasks as sequences of states and transitions. we also define a cloth manipulation graph that encodes all the strategies to accomplish a task. our novel representation is used to encode two different cloth manipulation tasks, learned from an experiment with human subjects with video and motion data. we show how our simplified representation allows to obtain a map of meaningful motion primitives. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/990.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/990.txt new file mode 100644 index 0000000000000000000000000000000000000000..15bc8838dd28c85ae917833093a8f4488559d64f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/990.txt @@ -0,0 +1 @@ + in this ambitious paper, we present a groundbreaking paradigm for human-computer interaction that revolutionizes the traditional notion of an operating system. within this innovative framework, user requests issued to the machine are handled by an interconnected ecosystem of generative ai models that seamlessly integrate with or even replace traditional software applications. at the core of this paradigm shift are large generative models, such as language and diffusion models, which serve as the central interface between users and computers. this pioneering approach leverages the abilities of advanced language models, empowering users to engage in natural language conversations with their computing devices.by capitalizing on the power of language models, users can articulate their intentions, tasks, and inquiries directly to the system, eliminating the need for explicit commands or complex navigation. the language model comprehends and interprets the user's prompts, generating and displaying contextual and meaningful responses that facilitate seamless and intuitive interactions.this paradigm shift not only streamlines user interactions but also opens up new possibilities for personalized experiences. generative models can adapt to individual preferences, learning from user input and continuously improving their understanding and response generation. furthermore, it enables enhanced accessibility, as users can interact with the system using speech or text, accommodating diverse communication preferences.however, this visionary concept also raises significant challenges, including privacy, security, trustability, and the ethical use of generative models. robust safeguards must be in place to protect user data and prevent potential misuse or manipulation of the language model.while the full realization of this paradigm is still far from being achieved, this paper serves as a starting point for envisioning the transformative potential of a human-computer interaction paradigm centered around artificial intelligence. we discuss the envisioned benefits, challenges, and implications, paving the way for future research and development in this exciting and promising direction. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/991.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/991.txt new file mode 100644 index 0000000000000000000000000000000000000000..894245d30e973e84e40ed3724f5e3b259270a0b7 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/991.txt @@ -0,0 +1 @@ + in the digital age, the dynamics of customer service are evolving, driven by technological advancements and the integration of large language models (llms). this research paper introduces a groundbreaking approach to automating customer service using langchain, a custom llm tailored for organizations. the paper explores the obsolescence of traditional customer support techniques, particularly frequently asked questions (faqs), and proposes a paradigm shift towards responsive, context-aware, and personalized customer interactions. the heart of this innovation lies in the fusion of open-source methodologies, web scraping, fine-tuning, and the seamless integration of langchain into customer service platforms. this open-source state-of-the-art framework, presented as "sahaay," demonstrates the ability to scale across industries and organizations, offering real-time support and query resolution. key elements of this research encompass data collection via web scraping, the role of embeddings, the utilization of google's flan t5 xxl, base and small language models for knowledge retrieval, and the integration of the chatbot into customer service platforms. the results section provides insights into their performance and use cases, here particularly within an educational institution. this research heralds a new era in customer service, where technology is harnessed to create efficient, personalized, and responsive interactions. sahaay, powered by langchain, redefines the customer-company relationship, elevating customer retention, value extraction, and brand image. as organizations embrace llms, customer service becomes a dynamic and customercentric ecosystem. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/992.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/992.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b3e3f52c4995d98a30083f0dd14eecae1aa9684 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/992.txt @@ -0,0 +1 @@ + privacy risk assessments have been touted as an objective, principled way to encourage organizations to implement privacy-by-design. they are central to a new regulatory model of collaborative governance, as embodied by the gdpr. however, existing guidelines and methods remain vague, and there is little empirical evidence on privacy harms. in this paper we conduct a close analysis of us nist's privacy risk assessment methodology, highlighting multiple sites of discretion that create countless opportunities for adversarial organizations to engage in performative compliance. our analysis shows that the premises on which the success of privacy risk assessments depends do not hold, particularly in regard to organizations' incentives and regulators auditing capabilities. we highlight the limitations and pitfalls of what is essentially a utilitarian and technocratic approach, leading us to discuss alternatives and a realignment of our policy and research objectives. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/993.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/993.txt new file mode 100644 index 0000000000000000000000000000000000000000..800abbdca1d89ae58acc8f0aaa7386ffd97ece3f --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/993.txt @@ -0,0 +1 @@ + this draft paper presents a workflow for creating user personas with large language models, using the results of a thematic analysis of qualitative interviews. the proposed workflow uses improved prompting and a larger pool of themes, compared to previous work conducted by the author for the same task. this is possible due to the capabilities of a recently released llm which allows the processing of 16 thousand tokens (gpt3.5-turbo-16k) and also due to the possibility to offer a refined prompting for personas' creation. the paper offers details of performing phase 2 and 3 of thematic analysis, and then discusses the improved workflow for creating personas. the paper also offers some reflections on the relationship between the proposed process and existing approaches to personas such as the "data-driven" and "qualitative" personas. moreover, the paper offers reflections on the capacity of llms to capture user behaviours and personality traits, from the underlying dataset of qualitative interviews used for the analysis. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/994.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/994.txt new file mode 100644 index 0000000000000000000000000000000000000000..eded256bdc7ac70100f90d94c72660c78c8f4220 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/994.txt @@ -0,0 +1 @@ + fact-checking is a crucial task as it ensures the prevention of misinformation. however, manual fact-checking cannot keep up with the rate at which false information is generated and disseminated online. automated fact-checking by machines is significantly quicker than by humans. but for better trust and transparency of these automated systems, explainability in the fact-checking process is necessary. fact-checking often entails contrasting a factual assertion with a body of knowledge for such explanations. an effective way of representing knowledge is the knowledge graph (kg). there have been sufficient works proposed related to fact-checking with the usage of kg but not much focus is given to the application of reinforcement learning (rl) in such cases. to mitigate this gap, we propose an rl-based kg reasoning approach for explainable fact-checking. extensive experiments on fb15k-277 and nell-995 datasets reveal that reasoning over a kg is an effective way of producing humanreadable explanations in the form of paths and classifications for fact claims. the rl reasoning agent computes a path that either proves or disproves a factual claim, but does not provide a verdict itself. a verdict is reached by a voting mechanism that utilizes paths produced by the agent. these paths can be presented to human readers so that they themselves can decide whether or not the provided evidence is convincing or not. this work will encourage works in this direction for incorporating rl for explainable fact-checking as it increases trustworthiness by providing a human-in-the-loop approach. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/995.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/995.txt new file mode 100644 index 0000000000000000000000000000000000000000..89686000a59cb579a8e8f0862fc9503c4321644d --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/995.txt @@ -0,0 +1 @@ + we study the impact of content moderation policies in online communities. in our theoretical model, a platform chooses a content moderation policy and individuals choose whether or not to participate in the community according to the fraction of user content that aligns with their preferences. the effects of content moderation, at first blush, might seem obvious: it restricts speech on a platform. however, when user participation decisions are taken into account, its effects can be more subtle -and counter-intuitive. for example, our model can straightforwardly demonstrate how moderation policies may increase participation and diversify content available on the platform. in our analysis, we explore a rich set of interconnected phenomena related to content moderation in online communities. we first characterize the effectiveness of a natural class of moderation policies for creating and sustaining communities. building on this, we explore how resource-limited or ideological platforms might set policies, how communities are affected by differing levels of personalization, and competition between platforms. our model provides a vocabulary and mathematically tractable framework for analyzing platform decisions about content moderation. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/996.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/996.txt new file mode 100644 index 0000000000000000000000000000000000000000..39730dd7e4e12c4a31189b65e2c0bac5174bcbe9 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/996.txt @@ -0,0 +1 @@ + this paper proposes the use of causal modeling to detect and mitigate algorithmic bias. we provide a brief description of causal modeling and a general overview of our approach. we then use the adult dataset, which is available for download from the uc irvine machine learning repository, to develop (1) a prediction model, which is treated as a black box, and (2) a causal model for bias mitigation. in this paper, we focus on gender bias and the problem of binary classification. we show that gender bias in the prediction model is statistically significant at the 0.05 level. we demonstrate the effectiveness of the causal model in mitigating gender bias by cross-validation. furthermore, we show that the overall classification accuracy is improved slightly. our novel approach is intuitive, easy-to-use, and can be implemented using existing statistical software tools such as lavaan in r. hence, it enhances explainability and promotes trust. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/997.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/997.txt new file mode 100644 index 0000000000000000000000000000000000000000..383812fe68d4a309c371a87f234c794772bec14e --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/997.txt @@ -0,0 +1 @@ + recently efforts have been made by social media platforms as well as researchers to detect hateful or toxic language using large language models. however, none of these works aim to use explanation, additional context and victim community information in the detection process. we utilise different prompt variation, input information and evaluate large language models in zero shot setting (without adding any in-context examples). we select three large language models (gpt-3.5, text-davinci and flan-t5) and three datasets -hatexplain, implicit hate and toxicspans. we find that on average including the target information in the pipeline improves the model performance substantially (∼ 20-30%) over the baseline across the datasets. there is also a considerable effect of adding the rationales/explanations into the pipeline (∼ 10 -20%) over the baseline across the datasets. in addition, we further provide a typology of the error cases where these large language models fail to (i) classify and (ii) explain the reason for the decisions they take. such vulnerable points automatically constitute 'jailbreak' prompts for these models and industry scale safeguard techniques need to be developed to make the models robust against such prompts. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/998.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/998.txt new file mode 100644 index 0000000000000000000000000000000000000000..cf4e143cd7ff8a93b8fcc78c4301693055f0b3d3 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/998.txt @@ -0,0 +1 @@ + recent advancements in technology have necessitated a paradigm shift in the people use technology necessitating a new research field called human-machine collaboration. chatgpt, an artificial intelligence (ai) assistive technology, has gained mainstream adoption and implementation in academia and industry; however, a lot is left unknown as to how this new technology holds for human-machine collaboration in africa. our survey paper highlights to answer some of these questions.to understand the effectiveness of chatgpt on human-machine collaboration we utilized reflexive thematic analysis to analyze (n= 51) articles between 2019 and 2023 obtained from our literature search. our findings indicate the prevalence of chatgpt for human-computer interaction within academic sectors such as education, and research; and trends also revealed the relatively high effectiveness of chatgpt in improving human-machine collaboration. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/999.txt b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/999.txt new file mode 100644 index 0000000000000000000000000000000000000000..17598578484f7fb7584ccfb421271bf1ade3c4c8 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets/999.txt @@ -0,0 +1 @@ + chronic obstructive pulmonary disease (copd) can be fatal and is challenging to live with due to its severe symptoms. pulmonary rehabilitation (pr) is one of the managements means to maintain copd in a stable status. however, implementation of pr in the uk has been challenging due to the environmental and personal barriers faced by patients, which hinder their uptake, adherence, and completion of the programmes. moreover, increased exercise capacity following pr does not always translate into physical activity (pa) and unfortunately, can lead back to exercise capacity seen prior to pr. current alternative solutions using telerehabilitation methods have limitations on addressing these accessibility problems, and no clear conclusion can be drawn on the efficacy of telerehabilitation in enhancing the sustainability of pr outcomes via promoting pa in patients' everyday life. in this work, the authors propose a novel design of sensor-based assistive product with the aim of facilitating pr and promoting pa maintenance in a home-based setting. prototypes of different levels of fidelity are presented, followed by an evaluation plan for future research directions. \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/pegasus_x/Trying_Pegasus_X_histrank_final_version1.ipynb b/drive/MyDrive/RA_Internship/HISTRANK/pegasus_x/Trying_Pegasus_X_histrank_final_version1.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e2538218de5d4cb40ca4b7a0515235bdb0f50f73 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/pegasus_x/Trying_Pegasus_X_histrank_final_version1.ipynb @@ -0,0 +1 @@ +{"cells":[{"cell_type":"code","execution_count":null,"metadata":{"id":"smCuTxuYOAOA"},"outputs":[],"source":["from transformers import PegasusXForConditionalGeneration, PegasusTokenizer, Seq2SeqTrainer, Seq2SeqTrainingArguments, AutoTokenizer\n","import torch"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"NgHD7qnYSCju","outputId":"92ded0f7-4c29-4cfb-ec0e-098770065019","executionInfo":{"status":"ok","timestamp":1717831607781,"user_tz":-240,"elapsed":40464,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Mounted at /content/drive\n"]}],"source":["from google.colab import drive\n","drive.mount('/content/drive')"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"eVxWWYHNOPe0"},"outputs":[],"source":["class PegasusDataset(torch.utils.data.Dataset):\n"," def __init__(self, encodings, labels):\n"," self.encodings = encodings\n"," self.labels = labels\n"," def __getitem__(self, idx):\n"," item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}\n"," item['labels'] = torch.tensor(self.labels['input_ids'][idx]) # torch.tensor(self.labels[idx])\n"," return item\n"," def __len__(self):\n"," return len(self.labels['input_ids']) # len(self.labels)"]},{"cell_type":"code","source":["max_input_length = 8192\n","max_output_length = 512"],"metadata":{"id":"bIevvdhmJNjI"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"fWjskB7GOhyR"},"outputs":[],"source":["def prepare_data(model_name,\n"," train_texts, train_labels,\n"," val_texts, val_labels,\n"," test_texts, test_labels):\n"," \"\"\"\n"," Prepare input data for model fine-tuning\n"," \"\"\"\n"," tokenizer = AutoTokenizer.from_pretrained(\"google/pegasus-x-large\")\n"," tokenizer.model_max_length = 4000\n","\n"," prepare_val = False if val_texts is None or val_labels is None else True\n"," prepare_test = False if test_texts is None or test_labels is None else True\n","\n"," def tokenize_data(texts, labels):\n"," encodings = tokenizer(texts, truncation=True, padding='max_length',max_length = 4000)\n"," decodings = tokenizer(labels, truncation=True, padding='max_length',max_length = 512)\n"," dataset_tokenized = PegasusDataset(encodings, decodings)\n"," return dataset_tokenized\n","\n"," train_dataset = tokenize_data(train_texts, train_labels)\n"," val_dataset = tokenize_data(val_texts, val_labels) if prepare_val else None\n"," test_dataset = tokenize_data(test_texts, test_labels) if prepare_test else None\n","\n"," return train_dataset, val_dataset, test_dataset, tokenizer\n"]},{"cell_type":"code","source":["def compute_metrics(pred):\n"," labels_ids = pred.label_ids\n"," pred_ids = pred.predictions\n"," rouge = load_metric(\"rouge\")\n","\n"," pred_str = tokenizer.batch_decode(pred_ids, skip_special_tokens=True)\n"," labels_ids[labels_ids == -100] = tokenizer.pad_token_id\n"," label_str = tokenizer.batch_decode(labels_ids, skip_special_tokens=True)\n","\n"," rouge_output = rouge.compute(\n"," predictions=pred_str, references=label_str, rouge_types=[\"rouge2\"]\n"," )[\"rouge2\"].mid\n","\n"," return {\n"," \"rouge2_precision\": round(rouge_output.precision, 4),\n"," \"rouge2_recall\": round(rouge_output.recall, 4),\n"," \"rouge2_fmeasure\": round(rouge_output.fmeasure, 4),\n"," }"],"metadata":{"id":"iOmCzxlZJhgZ"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"0N_f-N7OOsOt"},"outputs":[],"source":["def prepare_fine_tuning(model_name, tokenizer, train_dataset, val_dataset, freeze_encoder=False, output_dir='./results'):\n"," \"\"\"\n"," Prepare configurations and base model for fine-tuning\n"," \"\"\"\n"," torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'\n"," model = PegasusXForConditionalGeneration.from_pretrained(\"google/pegasus-x-base\").to(torch_device)\n"," model.config.max_length = 512\n"," model.config.min_length = 100\n"," model.config.length_penalty = 2.0\n"," model.config.early_stopping = True\n"," model.config.no_repeat_ngram_size = 3\n"," print(\"val dataset length= \",len(val_dataset))\n","\n"," if freeze_encoder:\n"," for param in model.model.encoder.parameters():\n"," param.requires_grad = False\n","\n"," training_args = Seq2SeqTrainingArguments(\n"," predict_with_generate=True,\n"," evaluation_strategy=\"steps\",\n"," per_device_train_batch_size=2,\n"," per_device_eval_batch_size=2,\n"," fp16=True,\n"," output_dir=\"./\",\n"," logging_steps=5,\n"," eval_steps=10,\n"," save_steps=10,\n"," save_total_limit=2,\n"," gradient_accumulation_steps=4,\n"," eval_accumulation_steps=1,\n"," num_train_epochs=1,\n",")\n"," trainer = Seq2SeqTrainer(\n"," model=model,\n"," args=training_args,\n"," train_dataset=train_dataset,\n"," eval_dataset=val_dataset,\n"," tokenizer=tokenizer,\n","\n"," #compute_metrics=compute_metrics,\n"," )\n"," return trainer"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"pBw9rlilPERD","outputId":"0367c685-99de-4f9a-978c-6fad206572af","executionInfo":{"status":"ok","timestamp":1717832667307,"user_tz":-240,"elapsed":5869,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: datasets in /usr/local/lib/python3.10/dist-packages (2.19.2)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from datasets) (3.14.0)\n","Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from datasets) (1.25.2)\n","Requirement already satisfied: pyarrow>=12.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (14.0.2)\n","Requirement already satisfied: pyarrow-hotfix in /usr/local/lib/python3.10/dist-packages (from datasets) (0.6)\n","Requirement already satisfied: dill<0.3.9,>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (0.3.8)\n","Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from datasets) (2.0.3)\n","Requirement already satisfied: requests>=2.32.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (2.32.3)\n","Requirement already satisfied: tqdm>=4.62.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (4.66.4)\n","Requirement already satisfied: xxhash in /usr/local/lib/python3.10/dist-packages (from datasets) (3.4.1)\n","Requirement already satisfied: multiprocess in /usr/local/lib/python3.10/dist-packages (from datasets) (0.70.16)\n","Requirement already satisfied: fsspec[http]<=2024.3.1,>=2023.1.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (2023.6.0)\n","Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets) (3.9.5)\n","Requirement already satisfied: huggingface-hub>=0.21.2 in /usr/local/lib/python3.10/dist-packages (from datasets) (0.23.2)\n","Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from datasets) (24.0)\n","Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (6.0.1)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.3.1)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (23.2.0)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.4.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (6.0.5)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.9.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (4.0.3)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub>=0.21.2->datasets) (4.12.1)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (2024.6.2)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2023.4)\n","Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2024.1)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas->datasets) (1.16.0)\n"]}],"source":["pip install datasets"]},{"cell_type":"code","source":[],"metadata":{"id":"3mipwBArEUzL"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"mFugBZJdUKz8","outputId":"b0f6c270-ac4f-46b1-8111-0e8ff2459fa7","executionInfo":{"status":"ok","timestamp":1717832789397,"user_tz":-240,"elapsed":58791,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting accelerate\n"," Downloading accelerate-0.31.0-py3-none-any.whl (309 kB)\n","\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/309.4 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[91m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[91m╸\u001b[0m \u001b[32m307.2/309.4 kB\u001b[0m \u001b[31m11.2 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m309.4/309.4 kB\u001b[0m \u001b[31m8.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate) (1.25.2)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (24.0)\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate) (5.9.5)\n","Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate) (6.0.1)\n","Requirement already satisfied: torch>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (2.3.0+cu121)\n","Requirement already satisfied: huggingface-hub in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.23.2)\n","Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.4.3)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.14.0)\n","Requirement already satisfied: typing-extensions>=4.8.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (4.12.1)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (1.12.1)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.3)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.1.4)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2023.6.0)\n","Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\n","Collecting nvidia-cuda-runtime-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\n","Collecting nvidia-cuda-cupti-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\n","Collecting nvidia-cudnn-cu12==8.9.2.26 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\n","Collecting nvidia-cublas-cu12==12.1.3.1 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\n","Collecting nvidia-cufft-cu12==11.0.2.54 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\n","Collecting nvidia-curand-cu12==10.3.2.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\n","Collecting nvidia-cusolver-cu12==11.4.5.107 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\n","Collecting nvidia-cusparse-cu12==12.1.0.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\n","Collecting nvidia-nccl-cu12==2.20.5 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl (176.2 MB)\n","Collecting nvidia-nvtx-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\n","Requirement already satisfied: triton==2.3.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2.3.0)\n","Collecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch>=1.10.0->accelerate)\n"," Downloading nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_x86_64.whl (21.3 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.3/21.3 MB\u001b[0m \u001b[31m72.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (2.32.3)\n","Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (4.66.4)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.10.0->accelerate) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2024.6.2)\n","Requirement already satisfied: mpmath<1.4.0,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.10.0->accelerate) (1.3.0)\n","Installing collected packages: nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, accelerate\n","Successfully installed accelerate-0.31.0 nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.20.5 nvidia-nvjitlink-cu12-12.5.40 nvidia-nvtx-cu12-12.1.105\n"]}],"source":["pip install accelerate -U"]},{"cell_type":"code","source":["!pip install rouge_score"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"caoO2nTuMAEc","outputId":"1478c493-c735-43ea-cf81-dc24cf1199c4"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting rouge_score\n"," Downloading rouge_score-0.1.2.tar.gz (17 kB)\n"," Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: absl-py in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.4.0)\n","Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (from rouge_score) (3.8.1)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.25.2)\n","Requirement already satisfied: six>=1.14.0 in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.16.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (1.4.2)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (2024.5.15)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (4.66.4)\n","Building wheels for collected packages: rouge_score\n"," Building wheel for rouge_score (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24933 sha256=585109dcb922be767a81d4017bc09cd4212a38256ef67aed77b70280c6810550\n"," Stored in directory: /root/.cache/pip/wheels/5f/dd/89/461065a73be61a532ff8599a28e9beef17985c9e9c31e541b4\n","Successfully built rouge_score\n","Installing collected packages: rouge_score\n","Successfully installed rouge_score-0.1.2\n"]}]},{"cell_type":"code","execution_count":null,"metadata":{"id":"192uOu4uOzZr"},"outputs":[],"source":["import os\n","import glob\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","from datasets import Dataset, load_metric\n","from sklearn.model_selection import train_test_split\n","if __name__=='__main__':\n","\n"," from datasets import load_dataset\n","\n"," input_dir = '/content/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs'\n"," target_dir = '/content/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets'\n"," data = {'input_text': [], 'target_text': []}\n"," input_files = glob.glob(os.path.join(input_dir, '*.txt'))\n","\n"," for input_file in input_files:\n"," filename = os.path.basename(input_file)\n"," target_file = os.path.join(target_dir, filename)\n","\n"," with open(input_file, 'r') as f:\n"," input_text = f.read()\n"," with open(target_file, 'r') as f:\n"," target_text = f.read()\n","\n"," data['input_text'].append(input_text)\n"," data['target_text'].append(target_text)\n"," df = pd.DataFrame(data)\n"," train_df, temp_df = train_test_split(df, test_size=0.2, random_state=42)\n"," eval_df, test_df = train_test_split(temp_df, test_size=0.5, random_state=42)\n","\n"," train_dataset = Dataset.from_pandas(train_df)\n"," eval_dataset = Dataset.from_pandas(eval_df)\n"," test_dataset = Dataset.from_pandas(test_df)\n","\n"," # print(test_dataset)\n"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":35},"id":"eo519NZJU3e1","outputId":"dc3c65db-f6d5-42b1-fe07-07790d49b2a2","executionInfo":{"status":"ok","timestamp":1717832842770,"user_tz":-240,"elapsed":626,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["'0.31.0'"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"string"}},"metadata":{},"execution_count":16}],"source":["import accelerate\n","\n","accelerate.__version__"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"e3T4btv9WF6i"},"outputs":[],"source":["from transformers import logging\n","\n","logging.set_verbosity_warning()"]},{"cell_type":"code","source":["print(len(train_dataset))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"v5PhwjbacX8I","outputId":"dcf37ae7-202d-4254-f24e-3b5a927d1dcf","executionInfo":{"status":"ok","timestamp":1717832919475,"user_tz":-240,"elapsed":10,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["805\n"]}]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":1000},"id":"YoeEBu57US5H","outputId":"0e48f3d9-118a-449c-f00f-b1f88129c8ad","executionInfo":{"status":"ok","timestamp":1717833710035,"user_tz":-240,"elapsed":786838,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_token.py:89: UserWarning: \n","The secret `HF_TOKEN` does not exist in your Colab secrets.\n","To authenticate with the Hugging Face Hub, create a token in your settings tab (https://huggingface.co/settings/tokens), set it as secret in your Google Colab and restart your session.\n","You will be able to reuse this secret in all of your notebooks.\n","Please note that authentication is recommended but still optional to access public models or datasets.\n"," warnings.warn(\n"]},{"output_type":"stream","name":"stdout","text":["val dataset length= 101\n"]},{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/transformers/training_args.py:1474: FutureWarning: `evaluation_strategy` is deprecated and will be removed in version 4.46 of 🤗 Transformers. Use `eval_strategy` instead\n"," warnings.warn(\n"]},{"output_type":"display_data","data":{"text/plain":[""],"text/html":["\n","
\n"," \n"," \n"," [100/100 12:53, Epoch 0/1]\n","
\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
StepTraining LossValidation Loss
109.2442008.506228
208.1888007.642267
307.5939007.073873
407.1108006.582322
506.5665006.107143
606.2285005.638771
705.8365005.208267
805.5753004.817614
905.3009004.515504
1005.1573004.362726

"]},"metadata":{}},{"output_type":"stream","name":"stderr","text":["Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n","Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.\n","Non-default generation parameters: {'max_length': 512, 'min_length': 100, 'early_stopping': True, 'num_beams': 8, 'length_penalty': 2.0, 'no_repeat_ngram_size': 3, 'forced_eos_token_id': 1}\n","Your generation config was originally created from the model config, but the model config has changed since then. Unless you pass the `generation_config` argument to this model's `generate` calls, they will revert to the legacy behavior where the base `generate` parameterization is loaded from the model config instead. To avoid this behavior and this warning, we recommend you to overwrite the generation config model attribute before calling the model's `save_pretrained`, preferably also removing any generation kwargs from the model config. This warning will be raised to an exception in v4.41.\n"]},{"output_type":"execute_result","data":{"text/plain":["TrainOutput(global_step=100, training_loss=6.783313522338867, metrics={'train_runtime': 779.7681, 'train_samples_per_second': 1.032, 'train_steps_per_second': 0.128, 'total_flos': 3809024409600000.0, 'train_loss': 6.783313522338867, 'epoch': 0.9925558312655087})"]},"metadata":{},"execution_count":9}],"source":["# use Pegasus Large model as base for fine-tuning\n","model_name = 'google/pegasus-x-base'\n","train_dataset, val_dataset, test_dataset, tokenizer = prepare_data(model_name, train_dataset['input_text'], train_dataset['target_text'], eval_dataset['input_text'], eval_dataset['target_text'], test_dataset['input_text'], test_dataset['target_text'])\n","trainer = prepare_fine_tuning(model_name, tokenizer, train_dataset,val_dataset)\n","trainer.train()"]},{"cell_type":"code","source":["trainer.state.log_history"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"tfY-I39mZCp6","outputId":"f4b29d0d-0b22-4910-e3df-084adb1af3f0","executionInfo":{"status":"ok","timestamp":1717833715159,"user_tz":-240,"elapsed":1076,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["[{'loss': 10.1624,\n"," 'grad_norm': 38.591487884521484,\n"," 'learning_rate': 4.9e-05,\n"," 'epoch': 0.04962779156327544,\n"," 'step': 5},\n"," {'loss': 9.2442,\n"," 'grad_norm': 15.482707023620605,\n"," 'learning_rate': 4.6500000000000005e-05,\n"," 'epoch': 0.09925558312655088,\n"," 'step': 10},\n"," {'eval_loss': 8.50622844696045,\n"," 'eval_runtime': 14.8694,\n"," 'eval_samples_per_second': 6.792,\n"," 'eval_steps_per_second': 3.43,\n"," 'epoch': 0.09925558312655088,\n"," 'step': 10},\n"," {'loss': 8.6785,\n"," 'grad_norm': 11.103849411010742,\n"," 'learning_rate': 4.4000000000000006e-05,\n"," 'epoch': 0.1488833746898263,\n"," 'step': 15},\n"," {'loss': 8.1888,\n"," 'grad_norm': 7.722441673278809,\n"," 'learning_rate': 4.15e-05,\n"," 'epoch': 0.19851116625310175,\n"," 'step': 20},\n"," {'eval_loss': 7.642266750335693,\n"," 'eval_runtime': 14.9711,\n"," 'eval_samples_per_second': 6.746,\n"," 'eval_steps_per_second': 3.407,\n"," 'epoch': 0.19851116625310175,\n"," 'step': 20},\n"," {'loss': 7.8648,\n"," 'grad_norm': 7.4054646492004395,\n"," 'learning_rate': 3.9000000000000006e-05,\n"," 'epoch': 0.24813895781637718,\n"," 'step': 25},\n"," {'loss': 7.5939,\n"," 'grad_norm': 8.736732482910156,\n"," 'learning_rate': 3.65e-05,\n"," 'epoch': 0.2977667493796526,\n"," 'step': 30},\n"," {'eval_loss': 7.073873043060303,\n"," 'eval_runtime': 14.9803,\n"," 'eval_samples_per_second': 6.742,\n"," 'eval_steps_per_second': 3.404,\n"," 'epoch': 0.2977667493796526,\n"," 'step': 30},\n"," {'loss': 7.4197,\n"," 'grad_norm': 6.358315467834473,\n"," 'learning_rate': 3.4000000000000007e-05,\n"," 'epoch': 0.34739454094292804,\n"," 'step': 35},\n"," {'loss': 7.1108,\n"," 'grad_norm': 8.101419448852539,\n"," 'learning_rate': 3.15e-05,\n"," 'epoch': 0.3970223325062035,\n"," 'step': 40},\n"," {'eval_loss': 6.582322120666504,\n"," 'eval_runtime': 14.9673,\n"," 'eval_samples_per_second': 6.748,\n"," 'eval_steps_per_second': 3.407,\n"," 'epoch': 0.3970223325062035,\n"," 'step': 40},\n"," {'loss': 6.6779,\n"," 'grad_norm': 8.235506057739258,\n"," 'learning_rate': 2.9e-05,\n"," 'epoch': 0.4466501240694789,\n"," 'step': 45},\n"," {'loss': 6.5665,\n"," 'grad_norm': 9.374942779541016,\n"," 'learning_rate': 2.6500000000000004e-05,\n"," 'epoch': 0.49627791563275436,\n"," 'step': 50},\n"," {'eval_loss': 6.107143402099609,\n"," 'eval_runtime': 14.9112,\n"," 'eval_samples_per_second': 6.773,\n"," 'eval_steps_per_second': 3.42,\n"," 'epoch': 0.49627791563275436,\n"," 'step': 50},\n"," {'loss': 6.3934,\n"," 'grad_norm': 10.224224090576172,\n"," 'learning_rate': 2.4e-05,\n"," 'epoch': 0.5459057071960298,\n"," 'step': 55},\n"," {'loss': 6.2285,\n"," 'grad_norm': 11.039881706237793,\n"," 'learning_rate': 2.15e-05,\n"," 'epoch': 0.5955334987593052,\n"," 'step': 60},\n"," {'eval_loss': 5.638771057128906,\n"," 'eval_runtime': 14.9534,\n"," 'eval_samples_per_second': 6.754,\n"," 'eval_steps_per_second': 3.411,\n"," 'epoch': 0.5955334987593052,\n"," 'step': 60},\n"," {'loss': 5.7524,\n"," 'grad_norm': 12.911364555358887,\n"," 'learning_rate': 1.9e-05,\n"," 'epoch': 0.6451612903225806,\n"," 'step': 65},\n"," {'loss': 5.8365,\n"," 'grad_norm': 13.441301345825195,\n"," 'learning_rate': 1.65e-05,\n"," 'epoch': 0.6947890818858561,\n"," 'step': 70},\n"," {'eval_loss': 5.208266735076904,\n"," 'eval_runtime': 14.9517,\n"," 'eval_samples_per_second': 6.755,\n"," 'eval_steps_per_second': 3.411,\n"," 'epoch': 0.6947890818858561,\n"," 'step': 70},\n"," {'loss': 5.4429,\n"," 'grad_norm': 15.03567123413086,\n"," 'learning_rate': 1.4000000000000001e-05,\n"," 'epoch': 0.7444168734491315,\n"," 'step': 75},\n"," {'loss': 5.5753,\n"," 'grad_norm': 15.359692573547363,\n"," 'learning_rate': 1.1500000000000002e-05,\n"," 'epoch': 0.794044665012407,\n"," 'step': 80},\n"," {'eval_loss': 4.81761360168457,\n"," 'eval_runtime': 14.9377,\n"," 'eval_samples_per_second': 6.761,\n"," 'eval_steps_per_second': 3.414,\n"," 'epoch': 0.794044665012407,\n"," 'step': 80},\n"," {'loss': 5.4166,\n"," 'grad_norm': 18.756885528564453,\n"," 'learning_rate': 9e-06,\n"," 'epoch': 0.8436724565756824,\n"," 'step': 85},\n"," {'loss': 5.3009,\n"," 'grad_norm': 24.592605590820312,\n"," 'learning_rate': 6.5000000000000004e-06,\n"," 'epoch': 0.8933002481389578,\n"," 'step': 90},\n"," {'eval_loss': 4.515504360198975,\n"," 'eval_runtime': 14.8974,\n"," 'eval_samples_per_second': 6.78,\n"," 'eval_steps_per_second': 3.423,\n"," 'epoch': 0.8933002481389578,\n"," 'step': 90},\n"," {'loss': 5.0547,\n"," 'grad_norm': 17.001161575317383,\n"," 'learning_rate': 4.000000000000001e-06,\n"," 'epoch': 0.9429280397022333,\n"," 'step': 95},\n"," {'loss': 5.1573,\n"," 'grad_norm': 14.121624946594238,\n"," 'learning_rate': 1.5e-06,\n"," 'epoch': 0.9925558312655087,\n"," 'step': 100},\n"," {'eval_loss': 4.362726211547852,\n"," 'eval_runtime': 14.8984,\n"," 'eval_samples_per_second': 6.779,\n"," 'eval_steps_per_second': 3.423,\n"," 'epoch': 0.9925558312655087,\n"," 'step': 100},\n"," {'train_runtime': 779.7681,\n"," 'train_samples_per_second': 1.032,\n"," 'train_steps_per_second': 0.128,\n"," 'total_flos': 3809024409600000.0,\n"," 'train_loss': 6.783313522338867,\n"," 'epoch': 0.9925558312655087,\n"," 'step': 100}]"]},"metadata":{},"execution_count":10}]},{"cell_type":"code","source":["import pandas as pd\n","df=pd.DataFrame(trainer.state.log_history)\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","\n","# Assuming df is already defined, and train_loss and eval_loss are subsets of df\n","train_loss = df[['loss', 'step']]\n","eval_loss = df[['eval_loss', 'step']]\n","\n","# Remove NaN rows in both dataframes\n","train_loss_clean = train_loss.dropna()\n","eval_loss_clean = eval_loss.dropna()\n","\n","# Plotting the loss vs step for train_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting the loss vs step for eval_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting both losses together\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train and Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":734},"id":"sXjiixD56uwr","outputId":"994c492a-4b62-447b-bb61-aa4c347838e4","executionInfo":{"status":"ok","timestamp":1717833718580,"user_tz":-240,"elapsed":989,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":null,"outputs":[{"output_type":"display_data","data":{"text/plain":["

"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAcYAAADvCAYAAABotnRjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA6xUlEQVR4nO3dd3yN9/v48dfJTmQriRhJECNWbSJGi9qUmkWNb6tW0RbVEiNKi/LpBzXrp7T2Km1pG3sTszUaWhErtghiRM7798f7k5OkgiOJnIzr+XjcDzn3uc99rnNlXN73/R4GpZRCCCGEEABYWToAIYQQIiuRwiiEEEIkI4VRCCGESEYKoxBCCJGMFEYhhBAiGSmMQgghRDJSGIUQQohkpDAKIYQQyUhhFEIIIZKRwiiEGbp3746fn5+lwxBCZAIpjCJbMxgMZm1bt261dKgpbN26FYPBwMqVKy0disXt3LmTJk2aULBgQRwcHChSpAgtWrRg8eLFpmPi4uIYPXp0lvs+ipzJxtIBCJEe33//fYrHCxcuJCws7In9pUuXTtf7zJ07F6PRmK5ziCetWLGCDh068OqrrzJw4EA8PDyIjIxk+/btzJ07l7fffhvQhXHMmDEA1KtXz4IRi9xACqPI1rp06ZLi8d69ewkLC3ti/7/FxcXh5ORk9vvY2tqmKT7xbKNHjyYwMJC9e/diZ2eX4rmrV69aKCqR28mlVJHj1atXj7Jly3Lw4EHq1KmDk5MTn332GQBr166lWbNm+Pj4YG9vT7FixRg7diwJCQkpzvHve4xnz57FYDDw1VdfMWfOHIoVK4a9vT1Vq1YlPDw8w2I/c+YM7dq1w9PTEycnJ2rUqMEvv/zyxHHTpk2jTJkyODk54eHhQZUqVVJcirxz5w6DBg3Cz88Pe3t78ufPT8OGDTl06NBT33vlypUYDAa2bdv2xHOzZ8/GYDBw7NgxAC5fvkyPHj0oVKgQ9vb2FChQgFatWnH27Nlnfr5//vmHqlWrPlEUAfLnzw/oXOfLlw+AMWPGmC6Pjx492nTsX3/9Rdu2bfH09MTBwYEqVaqwbt26FOf77rvvMBgMbN++nffff5+8efPi6urKO++8w61bt54Zp8hdpMUocoUbN27QpEkTOnbsSJcuXfDy8gL0H0tnZ2c++ugjnJ2d2bx5MyNHjiQ2NpZJkyY997yLFy/mzp07vP/++xgMBiZOnEibNm04c+ZMuluZV65cISgoiLi4OAYMGEDevHlZsGABLVu2ZOXKlbRu3RrQl3kHDBhA27ZtGThwIA8ePOCPP/5g3759pkuRvXv3ZuXKlfTv35/AwEBu3LjBzp07OXnyJJUqVUr1/Zs1a4azszPLly+nbt26KZ5btmwZZcqUoWzZsgC89dZbHD9+nA8++AA/Pz+uXr1KWFgY586de2anJV9fXzZt2sSFCxcoVKhQqsfky5ePmTNn0qdPH1q3bk2bNm0AKF++PADHjx+nVq1aFCxYkGHDhpEnTx6WL1/Om2++yapVq0x5StS/f3/c3d0ZPXo0ERERzJw5k6ioKNN9XyFQQuQg/fr1U//+sa5bt64C1KxZs544Pi4u7ol977//vnJyclIPHjww7evWrZvy9fU1PY6MjFSAyps3r7p586Zp/9q1axWgfvrpp2fGuWXLFgWoFStWPPWYQYMGKUDt2LHDtO/OnTvK399f+fn5qYSEBKWUUq1atVJlypR55vu5ubmpfv36PfOY1HTq1Enlz59fPX782LQvOjpaWVlZqdDQUKWUUrdu3VKAmjRp0guff968eQpQdnZ26rXXXlMhISFqx44dps+W6Nq1awpQo0aNeuIc9evXV+XKlUvx/TIajSooKEgFBASY9s2fP18BqnLlyurRo0em/RMnTlSAWrt27QvHL3ImuZQqcgV7e3t69OjxxH5HR0fT13fu3OH69evUrl2buLg4/vrrr+eet0OHDnh4eJge165dG9CXQNNr/fr1VKtWjeDgYNM+Z2dnevXqxdmzZzlx4gQA7u7uXLhw4ZmXcN3d3dm3bx+XLl16oRg6dOjA1atXU/QGXblyJUajkQ4dOgA6h3Z2dmzduvWFL0n27NmTX3/9lXr16rFz507Gjh1L7dq1CQgIYPfu3c99/c2bN9m8eTPt27c3ff+uX7/OjRs3aNSoEadPn+bixYspXtOrV68Urfk+ffpgY2PD+vXrXyh2kXNJYRS5QsGCBVO9j3X8+HFat26Nm5sbrq6u5MuXz9Rx5/bt2889b5EiRVI8TiySGXHPKioqipIlSz6xP7GHbVRUFACffPIJzs7OVKtWjYCAAPr168euXbtSvGbixIkcO3aMwoULU61aNUaPHm1W8W7cuDFubm4sW7bMtG/ZsmW8+uqrlChRAtD/6ZgwYQIbNmzAy8uLOnXqMHHiRC5fvmzW52zUqBG//fYbMTExbN++nX79+hEVFUXz5s2f2wHn77//RilFSEgI+fLlS7GNGjUKeLITT0BAQIrHzs7OFChQ4Ln3Q0XuIYVR5ArJW4aJYmJiqFu3LkePHiU0NJSffvqJsLAwJkyYAGDW8Axra+tU9yul0hfwCyhdujQREREsXbqU4OBgVq1aRXBwsKkwALRv354zZ84wbdo0fHx8mDRpEmXKlGHDhg3PPLe9vT1vvvkma9as4fHjx1y8eJFdu3aZWouJBg0axKlTp/jiiy9wcHAgJCSE0qVLc/jwYbM/h5OTE7Vr12b69OmMGDGCW7duPTe+xO/R4MGDCQsLS3UrXry42TEIAdL5RuRiW7du5caNG6xevZo6deqY9kdGRlowqiS+vr5EREQ8sT/xEq+vr69pX548eejQoQMdOnTg0aNHtGnThnHjxvHpp5/i4OAAQIECBejbty99+/bl6tWrVKpUiXHjxtGkSZNnxtGhQwcWLFjApk2bOHnyJEqpJwojQLFixfj444/5+OOPOX36NK+++iqTJ0/mhx9+eOHPXqVKFQCio6MBntoppmjRooAeTtOgQQOzzn369Glee+010+O7d+8SHR1N06ZNXzhOkTNJi1HkWomtveStu0ePHjFjxgxLhZRC06ZN2b9/P3v27DHtu3fvHnPmzMHPz4/AwEBA97hNzs7OjsDAQJRSxMfHk5CQ8MRl4fz58+Pj48PDhw+fG0eDBg3w9PRk2bJlLFu2jGrVquHv7296Pi4ujgcPHqR4TbFixXBxcXnu+Tdt2pTq/sT7fYmXkhPHnMbExDzxOerVq8fs2bNNRTS5a9euPbFvzpw5xMfHmx7PnDmTx48fP/c/CCL3kBajyLWCgoLw8PCgW7duDBgwAIPBwPfff5+pl0FXrVqVaiefbt26MWzYMJYsWUKTJk0YMGAAnp6eLFiwgMjISFatWoWVlf5/7RtvvIG3tze1atXCy8uLkydPMn36dJo1a4aLiwsxMTEUKlSItm3bUqFCBZydndm4cSPh4eFMnjz5uTHa2trSpk0bli5dyr179/jqq69SPH/q1Cnq169P+/btCQwMxMbGhjVr1nDlyhU6duz4zHO3atUKf39/WrRoQbFixbh37x4bN27kp59+omrVqrRo0QLQl8IDAwNZtmwZJUqUwNPTk7Jly1K2bFm++eYbgoODKVeuHO+99x5FixblypUr7NmzhwsXLnD06NEU7/no0SNTvBEREcyYMYPg4GBatmz53FyIXMKSXWKFyGhPG67xtOEMu3btUjVq1FCOjo7Kx8dHDR06VP32228KUFu2bDEd97ThGqkNUeApwwqSSxyu8bQtcYjGP//8o9q2bavc3d2Vg4ODqlatmvr5559TnGv27NmqTp06Km/evMre3l4VK1ZMDRkyRN2+fVsppdTDhw/VkCFDVIUKFZSLi4vKkyePqlChgpoxY8YzY0wuLCxMAcpgMKjz58+neO769euqX79+qlSpUipPnjzKzc1NVa9eXS1fvvy5512yZInq2LGjKlasmHJ0dFQODg4qMDBQDR8+XMXGxqY4dvfu3apy5crKzs7uiRz/888/6p133lHe3t7K1tZWFSxYUDVv3lytXLnSdEzicI1t27apXr16KQ8PD+Xs7Kw6d+6sbty4YXYuRM5nUCoT/3sshBAW8t1339GjRw/Cw8NN9zCFSI3cYxRCCCGSkcIohBBCJCOFUQghhEhG7jEKIYQQyUiLUQghhEhGCqMQQgiRTI4f4G80Grl06RIuLi6y1poQQuRSSinu3LmDj4+PaXKMp8nxhfHSpUsULlzY0mEIIYTIAs6fP//URbET5fjC6OLiAuhkuLq6WjiarCk+Pp7ff/+dN954I92rzudkkifzSJ7MJ7kyT0bkKTY2lsKFC5tqwrPk+MKYePnU1dVVCuNTxMfH4+TkhKurq/xyPoPkyTySJ/NJrsyTkXky55aadL4RQgghkpHCKIQQQiQjhdEMDx9CaCjExlo6EiGEEC9bjr/HmBE6dYI1a+Cvv2DRIpBRH0LkDEopHj9+TEJCgkXePz4+HhsbGx48eGCxGLIDc/Nka2trWoA8PSxaGLdv386kSZM4ePAg0dHRrFmzhjfffNP0vFKKUaNGMXfuXGJiYqhVqxYzZ84kICAgU+McPBjWrYMlS6BBA+jZM1PfXgjxEjx69Ijo6Gji4uIsFoNSCm9vb86fPy/jrJ/B3DwZDAYKFSqEs7Nzut7PooXx3r17VKhQgZ49e9KmTZsnnp84cSJTp05lwYIF+Pv7ExISQqNGjThx4gQODg6ZFmdQEIwdC599Bv37Q40aEBiYaW8vhMhgRqORyMhIrK2t8fHxwc7OziKFyWg0cvfuXZydnZ876Dw3MydPSimuXbvGhQsXCAgISFfL0aKFsUmTJjRp0iTV55RSfP3114wYMYJWrVoBsHDhQry8vPjxxx/p2LFjZobKJ5/Ali0QFgYdOsD+/eDomKkhCCEyyKNHjzAajRQuXBgnJyeLxWE0Gnn06BEODg5SGJ/B3Dzly5ePs2fPEh8fn30L47NERkZy+fJlGjRoYNrn5uZG9erV2bNnz1ML48OHD3n48KHpcez/eszEx8cTHx+frpj+3/+DKlVsOHbMwMCBCXzzjTFd58sqEvOS3vzkdJIn82SHPMXHx5O4sJDRaLnf48QYlFIWjSOrMzdPSimUUqkWxhf5ecyyhfHy5csAeHl5pdjv5eVlei41X3zxBWPGjHli/++//54h/zPs0ycfY8bUZO5ca9zdD1Gr1qV0nzOrCAsLs3QI2YLkyTxZOU82NjZ4e3tz9+5dHj16ZOlwuHPnjqVDyBael6dHjx5x//59tm/fzuPHj1M89yL3krNsYUyrTz/9lI8++sj0OHEaoDfeeCNDZr5p2hTi4oxMnGjN7NlVePfdx/j7p/u0FhUfH09YWBgNGzaU2TeeQfJknuyQpwcPHnD+/HmcnZ0ztb/CvyVObC2LHDybuXl68OABjo6O1KlT54nva+wLjLfLsoXR29sbgCtXrlCgQAHT/itXrvDqq68+9XX29vbY29s/sd/W1jbDfkk//xx27IA9ewx07WrLzp2QRX//X0hG5ignkzyZJyvnKSEhAYPBgJWVlUXv7SVeFkyMxVL8/PwYNGgQgwYNslgMz2JunqysrDAYDKn+7L3Iz2KWvdvr7++Pt7c3mzZtMu2LjY1l37591KxZ04KR6SK4ZAm4u+tOOMOHWzQcIUQuYTAYnrmNHj06TecNDw+nV69e6YqtXr16WbawviiLthjv3r3L33//bXocGRnJkSNH8PT0pEiRIgwaNIjPP/+cgIAA03ANHx+fFGMdLcXXV3fGadMGJk2C116Dp3SwFUKIDBEdHW36etmyZYwcOZKIiAjTvuTj95RSJCQkYGPz/D/z+fLly9hAszmLthgPHDhAxYoVqVixIgAfffQRFStWZOTIkQAMHTqUDz74gF69elG1alXu3r3Lr7/+atF7Asm1bg39+umv33kHLuWcfjhC5DpKwb17ltn+1+nyuby9vU2bm5sbBoPB9Pivv/7CxcWFDRs2ULlyZezt7dm5cyf//PMPrVq1wsvLC2dnZ6pWrcrGjRtTnNfPz4+vv/7a9NhgMPDtt9/SunVrnJycCAgIYN26denK76pVqyhTpgz29vb4+fkxefLkFM/PmDGDgIAAHBwc8PLyom3btqbnVq5cSVBQEHny5CFv3rw0aNCAe/fupSueZ7Foi7FevXqmbripMRgMhIaGEhoamolRvZivvoKdO+HoUejSRY9zzIAZiYQQmSwuDtI5YUoaWAHuxMYaMWOZQLMMGzaMr776iqJFi+Lh4cH58+dp2rQp48aNw97enoULF9KiRQsiIiIoUqTIU88zZswYJk6cyKRJk5g2bRqdO3cmKioKT0/PF47p4MGDtG/fntGjR9OhQwd2795N3759yZs3L927d+fAgQMMGDCA77//nqCgIG7evMmOHTsA3Uru3LkzY8aMoWPHjty7d48dO3Y8s3akV5btfJNdODjAsmVQubKeAOCLL2DECEtHJYTIrUJDQ2nYsKHpsaenJxUqVDA9Hjt2LGvWrGHdunX079//qefp3r07nTp1AmD8+PFMnTqV/fv307hx4xeOacqUKdSvX5+QkBAASpQowYkTJ5g0aRLdu3fn3Llz5MmTh+bNm+Pi4oKvr6/pSmJ0dDSPHz+mefPm+Pn5YWVlRbly5V44hheRZTvfZCclS8KMGfrrUaN0j1UhRPbi5AR372buFhtr5MKFGDJy8p0qVaqkeHz37l0GDx5M6dKlcXd3x9nZmZMnT3Lu3Llnnqd8+fKmr/PkyYOrqytXr15NU0wnT56kVq1aKfbVqlWL06dPk5CQQMOGDfH19aVo0aJ07dqVRYsWmcYdVqhQgfr16xMcHEz79u2ZO3cut27dSlMc5pLCmEHeeUdvRiO8/TbcuGHpiIQQL8JggDx5LLNl5BDGPHnypHg8ePBg1qxZw/jx49mxYwdHjhyhXLlyz53Y4N/DGwwGw0ubncfFxYVDhw6xZMkSChQowMiRI6lQoQIxMTFYW1vz22+/sXz5cgIDA5k2bRolS5YkMjLypcQCUhgz1DffQIkScOGCXoHjJV4CF0IIs+zatYvu3bvTunVrypUrh7e3N2fPns3UGEqXLs2uXbueiKtEiRKmqdtsbGxo0KABEydO5I8//uDs2bNs3rwZ0EW5Ro0ajB49msOHD2NnZ8eaNWteWrxyjzEDOTvr+43Vq+tlqqZOhYEDLR2VECI3CwgIYPXq1bRo0QKDwUBISMhLa/ldu3aNI0eOpNhXoEABPv74Y6pWrcrYsWPp0KEDe/bsYfr06cz43z2on3/+mTNnzlCnTh08PDxYv349RqORkiVLsm/fPjZu3EhQUBD+/v6Eh4dz7do1Spcu/VI+A0iLMcO9+iok9kIeMgQOHrRoOEKIXG7KlCl4eHgQFBREixYtaNSoEZUqVXop77V48WLTELzEbe7cuVSqVInly5ezdOlSypYty8iRIwkNDaV79+4AuLu7s3r1al5//XVKly7NrFmzWLJkCWXKlMHV1ZXt27fTvn17SpUqxYgRI5g8efJTV2bKCAb1Mvu8ZgGxsbG4ublx+/btDJkr1RxKwVtvwZo1ULw4HDpEhnXFfhni4+NZv349TZs2zbJTeGUFkifzZIc8PXjwgMjISPz9/S06LtpoNBIbG4urq6ssO/UM5ubpWd/XF6kF8p14CQwGmDcPihSBv/+G3r3lfqMQQmQXUhhfEg8PPZ+qtTUsXgzffWfpiIQQQphDCuNLFBQEY8fqr/v3h5MnLRuPEEKI55PC+JJ98gk0aKCnm+rQAe7ft3REQgghnkUK40tmZQXffw/588Off0KyNZSFEBaWw/se5joZ9f2UwpgJvL3hhx/017NmwcqVlo1HiNwusbds4rRjImdInM3HOp0rOcgA/0zSsCEMGwZffqlnxXFxgUaNLB2VELmTtbU17u7uprk/nZycMGTkvGxmMhqNPHr0iAcPHshwjWcwJ09Go5Fr167h5ORk1hqUzyKFMROFhkJ4OGzaBE2bwtdf6045Fvh9FCLX8/b2BkjzxNgZQSnF/fv3cXR0tEhhzi7MzZOVlRVFihRJdy6lMGYiW1v45Rfo0wfmz4cBA3RP1f/+Vz8nhMg8BoOBAgUKkD9/fuLj4y0SQ3x8PNu3b6dOnTpZdjKErMDcPNnZ2WVIy1sKYyazt9eD/wMDYehQmDkTTp2CFSv02EchROaytrZO9z2p9Lz348ePcXBwkML4DJmdJ7mobQEGAwweDGvX6onHN22CGjV0gRRCCGFZUhgtqEUL2LVLTx136pRelWPTJktHJYQQuZsURgsrXx7274eaNSEmRvdUnTXL0lEJIUTuJYUxC/Dygs2boUsXSEjQnXMGDIDHjy0dmRBC5D5SGLMIBwdYuBDGj9ePp02DZs10K1IIIUTmkcKYhRgM8OmnsGoVODnB77/rS6x//23pyIQQIveQwpgFtWkDO3dCoULw11+6U862bZaOSgghcgcpjFlUxYq6U061anDzpl6hY948S0clhBA5nxTGLKxAAdi6VS9X9fgxvPsufPyx7qAjhBDi5ZDCmMU5OsKSJTBmjH48ZQq0bAmxsZaNSwghciopjNmAwQAjR8Ly5bpQrl8PQUEQGWnpyIQQIueRwpiNtGsH27eDjw8cP67vP27daumohBAiZ5HCmM1UqaI75VSqBNevw+uvw4gRYKHFAYQQIseRwpgNFSwIO3bA//0fKAXjxkHdunD2rKUjE0KI7E8KYzbl5ATffgtLl4KrK+zZAxUq6MdCCCHSTgpjNtehAxw9qmfIiY2FTp2gRw+4e9fSkQkhRPYkhTEH8PPTnXJCQsDKCr77Tt+DPHTI0pEJIUT2I4Uxh7CxgdBQ2LJFTyV3+rRe/HjKFDAaLR2dEEJkH1IYc5g6dfSl1datdU/Vjz+Gpk3hyhVLRyaEENmDFMYcyNNTr9Axa5Zezuq33/SCyL/+aunIhBAi65PCmEMZDPD++3DgAJQrB1evQpMmugX58KGloxNCiKxLCmMOV6aMnhCgf3/9eMoU3YM1IsKycQkhRFYlhTEXcHCAadNg7VrImxcOH9a9VufP1xMECCGESCKFMRdp2VJ3zHn9dYiLg5499bjHmBhLRyaEEFmHFMZcpmBB+P13+OILsLaGZcugalUb/vrLw9KhCSFEliCFMReytoZhw2DXLvD3h6goA8OG1eGtt6wJD7d0dEIIYVlSGHOx6tXhyBF45x0jBoPip5+sqFYNGjbUy1nJ/UchRG6UpQtjQkICISEh+Pv74+joSLFixRg7dixK/mJnGFdX+PbbBKZO3UzXrkasrWHjRnjtNQgOhl9+kQIphMhdsnRhnDBhAjNnzmT69OmcPHmSCRMmMHHiRKZNm2bp0HKcwoXvMm9eAn//DX37gr097N4NzZvrHqwrVkBCgqWjFEKIly9LF8bdu3fTqlUrmjVrhp+fH23btuWNN95g//79lg4tx/Lzg2++gchIGDwY8uTRl1vbt9djIr/7ThZFFkLkbDZpedH58+cxGAwUKlQIgP3797N48WICAwPp1atXhgUXFBTEnDlzOHXqFCVKlODo0aPs3LmTKVOmPPU1Dx8+5GGyqV1iY2MBiI+PJ17+oqcqMS/J8/PKKzB+vC6O06db8c03VkREGOjRA0aNUnz8sZHu3Y04Oloq6syXWp7EkyRP5pNcmScj8vQirzWoNNywq127Nr169aJr165cvnyZkiVLUqZMGU6fPs0HH3zAyJEjX/SUqTIajXz22WdMnDgRa2trEhISGDduHJ9++ulTXzN69GjGjBnzxP7Fixfj5OSUIXHlRvfv2/Drr36sXVuMmBgHANzdH9Cy5T80aXIWR8fHFo5QCCGeLi4ujrfffpvbt2/j6ur6zGPTVBg9PDzYu3cvJUuWZOrUqSxbtoxdu3bx+++/07t3b86cOZPm4JNbunQpQ4YMYdKkSZQpU4YjR44waNAgpkyZQrdu3VJ9TWotxsKFC3P9+vXnJiO3io+PJywsjIYNG2Jra/vMY+/fhwULrJg82YqoKAMAHh6Kfv2M9OtnJG/ezIjYMl4kT7mZ5Ml8kivzZESeYmNjeeWVV8wqjGm6lBofH4+9vT0AGzdupGXLlgCUKlWK6OjotJwyVUOGDGHYsGF07NgRgHLlyhEVFcUXX3zx1MJob29vii05W1tb+cF7DnNyZGsLH3wAvXvD4sV6ooCICAOff27Nf/5jTZ8+MHCgXhMyp5KfJfNInswnuTJPevL0Iq9LU+ebMmXKMGvWLHbs2EFYWBiNGzcG4NKlS+TNwCZDXFwcVlYpQ7S2tsYoK+9anK0tdOsGx4/rHquvvgr37sFXX4Gvr55+7pdfpCerECL7SVNhnDBhArNnz6ZevXp06tSJChUqALBu3TqqVauWYcG1aNGCcePG8csvv3D27FnWrFnDlClTaN26dYa9h0gfa2to2xYOHYL166FePTAa4aef9FAPf38IDYULFywdqRBCmCdNl1Lr1avH9evXiY2NxcMjaY7NXr16ZWgHl2nTphESEkLfvn25evUqPj4+vP/++xnWuUdkHINBr/fYpIle0mruXD204/x5GDUKxozRhbJXL2jcWBdUIYTIitLUYrx//z4PHz40FcWoqCi+/vprIiIiyJ8/f4YF5+Liwtdff01UVBT379/nn3/+4fPPP8fOzi7D3kNkvJIl9SXVCxdg0SKoW1e3ItetS9mKvHjR0pEKIcST0lQYW7VqxcKFCwGIiYmhevXqTJ48mTfffJOZM2dmaIAi+3JwgLff1vOunjwJH30Enp5JrcgiRaBVK30JVu5FCiGyijQVxkOHDlG7dm0AVq5ciZeXF1FRUSxcuJCpU6dmaIAiZyhVCiZP1q3ERYugTp2kVmSzZlC0KIwdC5cuWTpSIURul6bCGBcXh4uLCwC///47bdq0wcrKiho1ahAVFZWhAYqcJbEVuW2bbkV++KFuRZ47ByNH6lbkm2/Chg3SihRCWEaaCmPx4sX58ccfOX/+PL/99htvvPEGAFevXpVB9MJspUrBlCm6FfnDD7oVmZAAa9dC06Z62EePHrBwofRqFUJknjQVxpEjRzJ48GD8/PyoVq0aNWvWBHTrsWLFihkaoMj5HBygc2fdijxxIqkVefGi7tnarRsULgwlSuiJBZYvh6tXLR21ECKnStNwjbZt2xIcHEx0dLRpDCNA/fr1ZYyhSJfSpXUrcvx42LEDNm/W24EDcPq03mbP1seWKwevv663OnXA3d2ioQshcog0FUYAb29vvL29ufC/a1yFChXK0MH9IndzcICGDfUGcPs2bN+eVCj/+AP+/FNv//0vWFlB5cpJhbJWLb1klhBCvKg0XUo1Go2Ehobi5uaGr68vvr6+uLu7M3bsWJmuTbwUbm7QogX85z9w9Ki+lLp8OfTpo8dNGo0QHg4TJkCjRuDhAbVr62Eh27bJGpJCCPOlqcU4fPhw5s2bx5dffkmtWrUA2LlzJ6NHj+bBgweMGzcuQ4MU4t/y5YN27fQG+n7kli26Nblpk+7lunOn3kJDoWBBPbl5r166yAohxNOkqTAuWLCAb7/91rSqBkD58uUpWLAgffv2lcIoMl3BgtCli96UgsjIpMuuYWG6cA4dCp9/rotjTl8BRAiRdmm6lHrz5k1KlSr1xP5SpUpx8+bNdAclRHoYDHrCgHff1UtjXbgA8+bpjj2xsXq6On9/3dv1zz8tHa0QIqtJU2GsUKEC06dPf2L/9OnTKV++fLqDEiIj2dtDz55w7Bj8/LOeu/XxYz0+snx5PfH55s26pSmEEGm6lDpx4kSaNWvGxo0bTWMY9+zZw/nz51m/fn2GBihERrGy0tPPNWsG+/frluOqVfDrr3qrVAkGD9b3LW3S3F9bCJHdpanFWLduXU6dOkXr1q2JiYkhJiaGNm3acPz4cb7//vuMjlGIDFetmu7VeuoU9OsHjo56Tcm334bixfUQkLt3LR2lEMIS0lQYAXx8fBg3bhyrVq1i1apVfP7559y6dYt58+ZlZHxCvFTFisH06boX65gxurdrVBQMGqTnbR0+HC5ftnSUQojMlObCKERO8sorehLzqCiYNQsCAuDWLT0Dj68vvPce/PWXpaMUQmQGKYxCJOPoCO+/r1f+WL0aataER4/g22+hfHlbxo+vxt9/WzpKIcTLJIVRiFRYW0Pr1rB7t54koFUrMBgU+/cXoFIlGyZMkNl0hMipXqjvXZs2bZ75fExMTHpiESJLqlVLb3/++ZiuXW9x9Gh+hg2DpUt1S7JyZUtHKITISC9UGN2eM5eWm5sb77zzTroCEiKrKlUKRo/ew82bzRgyxIYjR3Tv1o8+0h13nJwsHaEQIiO8UGGcP3/+y4pDiGzBYICuXRXNm+ueq0uWJI2HnD07aTUQIUT2JfcYhUiD/Pn1dHM//6wXUY6MhDfegO7d4cYNS0cnhEgPKYxCpEOzZnD8OHzwgW5NLlig52RdulSmmBMiu5LCKEQ6ubjA1Km6B2uZMnDtGnTqpNePPHfO0tEJIV6UFEYhMkiNGnpaudBQsLODX37RhXLaNEhIsHR0QghzSWEUIgPZ2UFICBw5ood43L0LAwZAcLC+5CqEyPqkMArxEpQuDdu3w4wZ+lLr3r1QsSKMGgUPH1o6OiHEs0hhFOIlsbKCPn3gxAlo2VLPlBMaCq++Crt2WTo6IcTTyKpzQrxkhQrBjz/qsY79++vJyIODoUEDKFdO34cMDNStTHd3S0crhJDCKEQmMBigbVuoXx+GDIF582DjRr0l5+Oji2RgYFLBDAwET0/LxC1EbiSFUYhM5OGh51cdOFDfdzxxImm7cAEuXdLbvwuml1fqBTNfPst8DiFyMimMQlhAuXJ6S+72bX2Z9fjxlAUzKgquXNHbli0pX/PKK1C2LNSuDa+/roeMODhk3ucQIieSwihEFuHmBtWr6y25u3dTL5iRkXD9OmzdqrexY3VRDAqC117ThbJqVbC1tcSnESL7ksIoRBbn7AxVqugtubg4iIiAgwd1S3LzZrh8Wf+7ebMeT5knT1Jr8rXX9JARa2vLfA4hsgspjEJkU05OutBVrAjvvqvnZo2I0EVxyxa93bgBv/6qN9Ct0rp1k1qUZcvqYSVCiCRSGIXIIQwGvWZkqVLQty8YjXDsWFKh3LZN38dct05voO9R1quX1KIsWVKfR4jcTAqjEDmUlRWUL6+3QYP0fK2HDycVyh079D3KlSv1BuDtDXXq6K1uXd3zVVqUIreRwihELmFtnXSvcuhQPRNPeHhSody1S9+jXL5cb6DHT9aurYtknTp61h65RylyOimMQuRStra6B2tQEIwYAQ8e6EK5bZue53XXLrh5E9au1RvoeV+Dg5NalJUr64nThchJpDAKIQA91KN2bb2BblEeOpRUKHfsgNhY2LBBbwCOjrqwJl5+rV4dbOSvisjm5EdYCJEqW9ukcZVDh+p7lH/8oYtkYrG8cQM2bdIb6NZj1arW+PiUokoVKFjQsp9BiLSQwiiEMIu1ddLwkIEDda/XkyeTCuW2bfoe5a5dVkBJtm1TzJsHzZtbOnIhXoz0NxNCpImVlZ63tU8fWLpUz/F6+jTMnv0YX9/bXL1qoEUL/fy9e5aOVgjzSWEUQmQIgwGKF4cePRSTJm1n4MAEAGbNgkqV4MABCwcohJmyfGG8ePEiXbp0IW/evDg6OlKuXDkOyG+YEFmanZ2RSZOMhIXppbROnYKaNWH8eH2vUoisLEsXxlu3blGrVi1sbW3ZsGEDJ06cYPLkyXh4eFg6NCGEGRo0gD//1GtRPn4Mw4frYR6RkZaOTIiny9KdbyZMmEDhwoWZP3++aZ+/v78FIxJCvChPTz1hwPffQ//+enxkhQowfTp07SpT0ImsJ0sXxnXr1tGoUSPatWvHtm3bKFiwIH379uW999576msePnzIw4cPTY9jY2MBiI+PJz4+/qXHnB0l5kXy82ySJ/M8LU+dOun1Inv0sGb3biu6dYOffjIyfXoCnp6WiNTy5GfKPBmRpxd5rUEppdL8Ti+Zw/9WXP3oo49o164d4eHhDBw4kFmzZtGtW7dUXzN69GjGjBnzxP7Fixfj5OT0UuMVQjxfQgKsXh3A0qWlSEiwIm/e+wwYcIgKFa5bOjSRg8XFxfH2229z+/ZtXF1dn3lsli6MdnZ2VKlShd27d5v2DRgwgPDwcPbs2ZPqa1JrMRYuXJjr168/Nxm5VXx8PGFhYTRs2BBbWdX2qSRP5jE3TwcOGOjWzZrTp/W11A8/TCA01Ii9fWZFannyM2WejMhTbGwsr7zyilmFMUtfSi1QoACBgYEp9pUuXZpVq1Y99TX29vbYp/KbZWtrKz94zyE5Mo/kyTzPy1PNmnq1j8GD9ZCO//zHmo0brVm0CMqVy8RAswD5mTJPevL0Iq/L0oWxVq1aREREpNh36tQpfH19LRSRECIj5ckDM2dCs2bQs6fuwVq1Knz5JQwYkLYlr+Lj4fx5OHtWb1FR+t+bN6FjR3j7benwI54tSxfGDz/8kKCgIMaPH0/79u3Zv38/c+bMYc6cOZYOTQiRgZo310Xx3Xfh55/hww/hl1/gu++enG/10aOUhe/fBfDiRT1dXWp+/hm++QamTtXLbwmRmixdGKtWrcqaNWv49NNPCQ0Nxd/fn6+//prOnTtbOjQhRAbz8oJ162DOHF0YN27Ul1R79tRzsCYWv4sX4Xk9I+ztwc9Pb76++t+4OPjPf2DPHqhWDXr00BMOeHm9/M8mspcsXRgBmjdvTnOZhViIXMFggPffh3r1oHNnOHgQJk9+8jgHh6TC9+8C6OcH+fOnfhm2d28YNgx++AH+3/+DlSth5Ej44ANZV1IkyfKFUQiR+5QsqVt233yjp5P7dxHMly9t9wkLFtQTDfTpo+9hHjyoO//Mnatbk02aZOznENmTFEYhRJZkawuDBr2ccwcFwf79+h7mp59CRAQ0bao7Af3nPxAQ8HLeV2QPWXquVCGEeFmsrPT9y1On4OOPwcZGd/gpU0YvzPy/SbNELiSFUQiRq7m5wVdfwbFj+lJqfDxMmgQlSugW5dN6uKbVw4dw6BB8+y0MHWrF+vV+/PGHrDqSlcilVCGEQN/XXL9eb4MG6UWXe/TQ4yynToXq1V/8nPfvw9GjuhAmbseO6eKrWQMVmDNHF+igIAgO1lvVquDomHGfT5hPCqMQQiTTtKleLmvqVAgN1fcia9SAd97REw8UKJD66+7cgSNHUhbBkydTbwl6eOjFm0uWTGDXrhv8808+bt82sGEDbNigj7G11WMtg4Ohdm1dNPPmfWkfWyQjhVEIIf7Fzk73Vu3SBT77DObPh4ULYfVqGDECunWDEyeSCuDBg7qFmdr4yvz5oXJlXQgTN19f3as2Pt7I+vV7eOONppw8acvOnbBzJ+zYocdu7tmjt0mT9LkCA5NalMHBuoeuzOKT8aQwCiHEU3h76/GOicM79u7V4yCHDUv9+EKFUhbASpXAx+f5xcvGJun4AQN0gY2M1AUysVj+9ZcuxidO6EkQQJ87sUjWrKkLpywilH5SGIUQ4jmqVtULLC9aBJ98AtHRULRoygJYsaJuHWYEg0Gfv2hR3ToFuHYNdu9OKpQHD8KlS3oR6OXLU76ubNmkrUwZff9UJjAwnxRGIYQwg5UVdO2qJyG/dw8yexW7fPmgVSu9gZ7iLjw86dLrwYNw/Tr884/e1q5Neq2Nje5lm7xYli0LxYqBtXXmfo7sQAqjEEK8AGvrzC+KqXFygrp19Zbo6lXd6/XYMTh+POnr2Niky7CJrUvQU+uVLp1UKBO3IkVy971LKYxCCJFD5M8Pr7+ut0RKwYULKQvlsWO6SN6/r9fEPHw45XlKlNCt4y5ddAef3EYKoxBC5GAGAxQurLfGjZP2JyToFUuSF8vjx3Unn1OnICREb7Vr66EqbduCu7ulPkXmksIohBC5kLW1vsdYrFjSfUvQ4zFXr9aTrW/erO9f7tgB/ftDy5a6Jdm4sR5nmVPJlHBCCCFMXFx0T9iNG+HcOZgwQd+DfPgQVqzQxdHHRy/VFR7+/LUx00spvQbn+fPOL/eNkpHCKIQQIlWFCukJ1f/8U09k8OGHemHn69dh+nS94HPp0jBunF5EOj3u3tXvsWQJjBmje/9Wrqw7Ovn72zJtWsWM+VBmkEupQgghnslg0OM0K1aEiRN1a/L772HNGr1k14gReqtTR19qbddOz/36bwkJuoBGROjt1Kmkry9efPr7W1kpjMbM6yYrhVEIIYTZbGz0PcbGjfX9yFWrdJHcsgW2b9db4v3I11/Xl2MTi9/ff+tLsk/zyit6MoKSJXXP2MSvixR5zMaN24GmmfMZM+VdhBBC5DguLtC9u97On4fFi3WRPH5c349cseLJ19jZ6YWg/138SpYET8/U3ydpNZLMIYVRCCFEuhUurKfLGzpUrzKycKFeXaRo0ZQF0Nc368+2I4VRCCFEhkl+PzK7kl6pQgghRDJSGIUQQohkpDAKIYQQyUhhFEIIIZLJ8Z1v1P/mK4qNjbVwJFlXfHw8cXFxxMbGYpuTJ0BMJ8mTeSRP5pNcmScj8pRYA5QZc9jl+MJ4584dAAoXLmzhSIQQQljanTt3cEttWp5kDMqc8pmNGY1GLl26hIuLC4bcvPLmM8TGxlK4cGHOnz+Pa1ZYgTWLkjyZR/JkPsmVeTIiT0op7ty5g4+PD1ZWz76LmONbjFZWVhQqVMjSYWQLrq6u8stpBsmTeSRP5pNcmSe9eXpeSzGRdL4RQgghkpHCKIQQQiQjhVFgb2/PqFGjsLe3t3QoWZrkyTySJ/NJrsyT2XnK8Z1vhBBCiBchLUYhhBAiGSmMQgghRDJSGIUQQohkpDAKIYQQyUhhzCW++OILqlatiouLC/nz5+fNN98kIiIixTEPHjygX79+5M2bF2dnZ9566y2uXLlioYizhi+//BKDwcCgQYNM+yRPSS5evEiXLl3Imzcvjo6OlCtXjgMHDpieV0oxcuRIChQogKOjIw0aNOD06dMWjDjzJSQkEBISgr+/P46OjhQrVoyxY8emmLMzN+Zp+/bttGjRAh8fHwwGAz/++GOK583Jyc2bN+ncuTOurq64u7vzf//3f9y9ezf9wSmRKzRq1EjNnz9fHTt2TB05ckQ1bdpUFSlSRN29e9d0TO/evVXhwoXVpk2b1IEDB1SNGjVUUFCQBaO2rP379ys/Pz9Vvnx5NXDgQNN+yZN28+ZN5evrq7p376727dunzpw5o3777Tf1999/m4758ssvlZubm/rxxx/V0aNHVcuWLZW/v7+6f/++BSPPXOPGjVN58+ZVP//8s4qMjFQrVqxQzs7O6r///a/pmNyYp/Xr16vhw4er1atXK0CtWbMmxfPm5KRx48aqQoUKau/evWrHjh2qePHiqlOnTumOTQpjLnX16lUFqG3btimllIqJiVG2trZqxYoVpmNOnjypALVnzx5LhWkxd+7cUQEBASosLEzVrVvXVBglT0k++eQTFRwc/NTnjUaj8vb2VpMmTTLti4mJUfb29mrJkiWZEWKW0KxZM9WzZ88U+9q0aaM6d+6slJI8KaWeKIzm5OTEiRMKUOHh4aZjNmzYoAwGg7p48WK64pFLqbnU7du3AfD09ATg4MGDxMfH06BBA9MxpUqVokiRIuzZs8ciMVpSv379aNasWYp8gOQpuXXr1lGlShXatWtH/vz5qVixInPnzjU9HxkZyeXLl1Pkys3NjerVq+eqXAUFBbFp0yZOnToFwNGjR9m5cydNmjQBJE+pMScne/bswd3dnSpVqpiOadCgAVZWVuzbty9d75/jJxEXTzIajQwaNIhatWpRtmxZAC5fvoydnR3u7u4pjvXy8uLy5csWiNJyli5dyqFDhwgPD3/iOclTkjNnzjBz5kw++ugjPvvsM8LDwxkwYAB2dnZ069bNlA8vL68Ur8ttuRo2bBixsbGUKlUKa2trEhISGDduHJ07dwaQPKXCnJxcvnyZ/Pnzp3jexsYGT0/PdOdNCmMu1K9fP44dO8bOnTstHUqWc/78eQYOHEhYWBgODg6WDidLMxqNVKlShfHjxwNQsWJFjh07xqxZs+jWrZuFo8s6li9fzqJFi1i8eDFlypThyJEjDBo0CB8fH8lTFiWXUnOZ/v378/PPP7Nly5YUy3F5e3vz6NEjYmJiUhx/5coVvL29MzlKyzl48CBXr16lUqVK2NjYYGNjw7Zt25g6dSo2NjZ4eXlJnv6nQIECBAYGpthXunRpzp07B2DKx7977Oa2XA0ZMoRhw4bRsWNHypUrR9euXfnwww/54osvAMlTaszJibe3N1evXk3x/OPHj7l582a68yaFMZdQStG/f3/WrFnD5s2b8ff3T/F85cqVsbW1ZdOmTaZ9ERERnDt3jpo1a2Z2uBZTv359/vzzT44cOWLaqlSpQufOnU1fS560WrVqPTHk59SpU/j6+gLg7++Pt7d3ilzFxsayb9++XJWruLi4JxbGtba2xmg0ApKn1JiTk5o1axITE8PBgwdNx2zevBmj0Uj16tXTF0C6uu6IbKNPnz7Kzc1Nbd26VUVHR5u2uLg40zG9e/dWRYoUUZs3b1YHDhxQNWvWVDVr1rRg1FlD8l6pSkmeEu3fv1/Z2NiocePGqdOnT6tFixYpJycn9cMPP5iO+fLLL5W7u7tau3at+uOPP1SrVq1y/DCEf+vWrZsqWLCgabjG6tWr1SuvvKKGDh1qOiY35unOnTvq8OHD6vDhwwpQU6ZMUYcPH1ZRUVFKKfNy0rhxY1WxYkW1b98+tXPnThUQECDDNYT5gFS3+fPnm465f/++6tu3r/Lw8FBOTk6qdevWKjo62nJBZxH/LoySpyQ//fSTKlu2rLK3t1elSpVSc+bMSfG80WhUISEhysvLS9nb26v69euriIgIC0VrGbGxsWrgwIGqSJEiysHBQRUtWlQNHz5cPXz40HRMbszTli1bUv2b1K1bN6WUeTm5ceOG6tSpk3J2dlaurq6qR48e6s6dO+mOTZadEkIIIZKRe4xCCCFEMlIYhRBCiGSkMAohhBDJSGEUQgghkpHCKIQQQiQjhVEIIYRIRgqjEEIIkYwURiGEECIZKYxCCCFEMlIYhchmrl27Rp8+fShSpAj29vZ4e3vTqFEjdu3aBYDBYODHH3+0bJBCZGOyHqMQ2cxbb73Fo0ePWLBgAUWLFuXKlSts2rSJGzduWDo0IXIEmStViGwkJiYGDw8Ptm7dSt26dZ943s/Pj6ioKNNjX19fzp49C8DatWsZM2YMJ06cMC2SO3z4cGxs9P+PDQYDM2bMYN26dWzdupUCBQowceJE2rZtmymfTYisQi6lCpGNODs74+zszI8//sjDhw+feD48PByA+fPnEx0dbXq8Y8cO3nnnHQYOHMiJEyeYPXs23333HePGjUvx+pCQEN566y2OHj1K586d6dixIydPnnz5H0yILERajEJkM6tWreK9997j/v37VKpUibp169KxY0fKly8P6JbfmjVrePPNN02vadCgAfXr1+fTTz817fvhhx8YOnQoly5dMr2ud+/ezJw503RMjRo1qFSpEjNmzMicDydEFiAtRiGymbfeeotLly6xbt06GjduzNatW6lUqRLffffdU19z9OhRQkNDTS1OZ2dn3nvvPaKjo4mLizMd9+8V42vWrCktRpHrSOcbIbIhBwcHGjZsSMOGDQkJCeHdd99l1KhRdO/ePdXj7969y5gxY2jTpk2q5xJCJJEWoxA5QGBgIPfu3QPA1taWhISEFM9XqlSJiIgIihcv/sRmZZX0Z2Dv3r0pXrd3715Kly798j+AEFmItBiFyEZu3LhBu3bt6NmzJ+XLl8fFxYUDBw4wceJEWrVqBeieqZs2baJWrVrY29vj4eHByJEjad68OUWKFKFt27ZYWVlx9OhRjh07xueff246/4oVK6hSpQrBwcEsWrSI/fv3M2/ePEt9XCEsQwkhso0HDx6oYcOGqUqVKik3Nzfl5OSkSpYsqUaMGKHi4uKUUkqtW7dOFS9eXNnY2ChfX1/Ta3/99VcVFBSkHB0dlaurq6pWrZqaM2eO6XlAffPNN6phw4bK3t5e+fn5qWXLlmX2RxTC4qRXqhACSL03qxC5kdxjFEIIIZKRwiiEEEIkI51vhBAAyF0VITRpMQohhBDJSGEUQgghkpHCKIQQQiQjhVEIIYRIRgqjEEIIkYwURiGEECIZKYxCCCFEMlIYhRBCiGT+P66i343VRGlwAAAAAElFTkSuQmCC\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAb0AAADvCAYAAABmHwoMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA70klEQVR4nO3deXhMZ/vA8e8kmWySCLEkIQtiX1JrSbS0BC2liF1rq1bR0r2qlCpafeunL1XVBS2xL1Wlam/ta7RK7SL2JSIhRGSe3x/Pm9FpLBGJM8ncn+s615uc88zMPTd9b885z2JSSimEEEIIB+BkdABCCCHEwyJFTwghhMOQoieEEMJhSNETQgjhMKToCSGEcBhS9IQQQjgMKXpCCCEchhQ9IYQQDkOKnhBCCIchRU+ILDCZTAwbNszoMIQQD0iKnsgzpk6dislkuuOxefNmQ+M7duwYJpOJ//znP4bGYQ/+/PNPoqOjCQkJwd3dnRIlShAVFcX48eNt2o0aNYpFixYZE6RwSC5GByDE/frwww8pVapUpvNhYWEGRCP+bePGjTzxxBMEBwfTu3dv/P39iY+PZ/PmzXz++ee88sor1rajRo0iOjqaZ5991riAhUORoifynKeeeopatWoZHYa4g5EjR1KwYEG2bduGr6+vzbVz584ZE5QQ/yO3N0W+kpaWRuHChenRo0ema0lJSbi7u/Pmm28CcOPGDYYOHUrNmjUpWLAgBQoU4LHHHmPNmjW5GuO5c+fo1asXxYsXx93dnfDwcKZNm5ap3axZs6hZsybe3t74+PhQtWpVPv/8c+v1tLQ0hg8fTtmyZXF3d8fPz4/69euzYsWKO3729u3bMZlMt/285cuXYzKZWLJkCQDJyckMHDiQ0NBQ3NzcKFasGFFRUezcufOu3+/w4cNUrlw5U8EDKFasmPVnk8nE1atXmTZtmvUWdffu3a3XT548Sc+ePSlevDhubm5UrlyZ7777zub91q5di8lkYvbs2bz33nv4+/tToEABWrZsSXx8/F3jFI5Jenoiz7l8+TIXLlywOWcymfDz88NsNtO6dWsWLFjAV199haurq7XNokWLSE1NpWPHjoAugt988w2dOnWid+/eJCcn8+2339K0aVO2bt3KI488kuOxX7t2jYYNG3Lo0CH69+9PqVKlmDt3Lt27dycxMZEBAwYAsGLFCjp16kSjRo345JNPANi3bx8bNmywthk2bBijR4/mhRdeoE6dOiQlJbF9+3Z27txJVFTUbT+/Vq1alC5dmjlz5tCtWzeba7Nnz6ZQoUI0bdoUgD59+jBv3jz69+9PpUqVuHjxIuvXr2ffvn3UqFHjjt8xJCSETZs2sWfPHqpUqXLHdj/88IM19hdffBGAMmXKAHD27Fnq1q2LyWSif//+FC1alGXLltGrVy+SkpIYOHCgzXuNHDkSk8nEO++8w7lz5xg3bhyNGzcmNjYWDw+PO8YgHJASIo+YMmWKAm57uLm5WdstX75cAeqnn36yef3TTz+tSpcubf395s2bKjU11abNpUuXVPHixVXPnj1tzgPqgw8+uGt8R48eVYD69NNP79hm3LhxClDTp0+3nrtx44aqV6+e8vLyUklJSUoppQYMGKB8fHzUzZs37/he4eHhqnnz5neN6XYGDRqkzGazSkhIsJ5LTU1Vvr6+Nt+7YMGCql+/fvf9/r/++qtydnZWzs7Oql69eurtt99Wy5cvVzdu3MjUtkCBAqpbt26Zzvfq1UsFBASoCxcu2Jzv2LGjKliwoEpJSVFKKbVmzRoFqBIlSlhzp5RSc+bMUYD6/PPP7zt+kb/J7U2R53zxxResWLHC5li2bJn1+pNPPkmRIkWYPXu29dylS5dYsWIFHTp0sJ5zdna29gQtFgsJCQncvHmTWrVq3fMWXnYtXboUf39/OnXqZD1nNpt59dVXuXLlCuvWrQPA19eXq1ev3vVWpa+vL3/99RcHDx68rxg6dOhAWloaCxYssJ779ddfSUxMtMmPr68vW7Zs4dSpU/f1/lFRUWzatImWLVuye/duxowZQ9OmTSlRogSLFy++5+uVUsyfP59nnnkGpRQXLlywHk2bNuXy5cuZ/nyef/55vL29rb9HR0cTEBDA0qVL7yt24QCMrrpCZFVGT2/btm33bPvSSy8pb29vdf36daWUUt98840CVGxsrE27qVOnqqpVqyqz2WzTcyxVqpRNO3Kop1e+fHn12GOPZTofGxurADVhwgSllFJnz55VFStWtPZievTooZYtW2bzmnXr1ilfX18FqCpVqqg333xT7d69+64xZqhQoYKKioqy/t61a1dVpEgRlZaWZj03e/Zs5e7urpycnFTt2rXVBx98oA4fPpyl98+Qmpqqtm7dqgYNGqTc3d2V2WxWf/31l/X67Xp6Z8+evWOPPuNYsGCBUupWT++7777L9NmPPfaYKl++/H3FK/I/6emJfKljx44kJydbe4Bz5syhQoUKhIeHW9tMnz6d7t27U6ZMGb799lt++eUXVqxYwZNPPonFYjEqdEAP+IiNjWXx4sW0bNmSNWvW8NRTT9k8h3v88cc5fPgw3333HVWqVOGbb76hRo0afPPNN/d8/w4dOrBmzRouXLhAamoqixcvpm3btri43HrM3759e44cOcL48eMJDAzk008/pXLlyja96ntxdXWldu3ajBo1ii+//JK0tDTmzp1719dk5L5r166ZevQZR2RkZJZjEMKG0VVXiKy6n55eenq6CggIUB07dlTnz59XLi4umXpqrVq1UqVLl1YWi8XmfEREhAoJCbE5Rw719Jo0aaL8/f1Venq6zflZs2bd9jnkP7/PSy+9pAB18ODB27ZJTk5W1atXVyVKlLhrnEoptXfvXgWoSZMmqYULFypArVmz5q6vOXv2rCpRooSKjIy85/vfzp9//qkA9dJLL1nPeXl5Zerp3bx5U3l7e6tOnTrd8z0zenqDBg2yOW+xWFRAQIBq2rRptmIV+Zf09ES+5OTkRHR0ND/99BM//PADN2/etHleBfqZHuhnSBm2bNnCpk2bci2up59+mjNnztg8b7x58ybjx4/Hy8uLBg0aAHDx4kWb1zk5OVGtWjUAUlNTb9vGy8uLsLAw6/W7qVixIlWrVmX27NnMnj2bgIAAHn/8cev19PR0Ll++bPOaYsWKERgYeM/3X7NmjU1OM2Q8Xytfvrz1XIECBUhMTLRp5+zsTNu2bZk/fz579uzJ9D7nz5/PdO77778nOTnZ+vu8efM4ffo0Tz311F1jFY5HpiyIPGfZsmX8/fffmc5HRERQunRp6+8dOnRg/PjxfPDBB1StWpWKFSvatG/RogULFiygdevWNG/enKNHjzJp0iQqVarElStXsh3fqlWruH79eqbzzz77LC+++CJfffUV3bt3Z8eOHYSGhjJv3jw2bNjAuHHjrIMxXnjhBRISEnjyyScpWbIkcXFxjB8/nkceecT6PSpVqkTDhg2pWbMmhQsXZvv27dYpBlnRoUMHhg4diru7O7169cLJ6da/gZOTkylZsiTR0dGEh4fj5eXFypUr2bZtG5999tld3/eVV14hJSWF1q1bU6FCBW7cuMHGjRuZPXs2oaGhNnMoa9asycqVKxk7diyBgYGUKlWKRx99lI8//pg1a9bw6KOP0rt3bypVqkRCQgI7d+5k5cqVJCQk2Hxm4cKFqV+/Pj169ODs2bOMGzeOsLAwevfunaVcCAdidFdTiKy625QFQE2ZMsWmvcViUUFBQQpQH330Uab3s1gsatSoUSokJES5ubmp6tWrqyVLlqhu3bo90O3NOx0//PCDUkrfJuzRo4cqUqSIcnV1VVWrVs0U+7x581STJk1UsWLFlKurqwoODlYvvfSSOn36tLXNRx99pOrUqaN8fX2Vh4eHqlChgho5cuRtpwbczsGDB62xrV+/3uZaamqqeuutt1R4eLjy9vZWBQoUUOHh4WrixIn3fN9ly5apnj17qgoVKigvLy/l6uqqwsLC1CuvvKLOnj1r0/bvv/9Wjz/+uPLw8FCAza3Os2fPqn79+qmgoCBlNpuVv7+/atSokZo8ebK1TcbtzZkzZ6pBgwapYsWKKQ8PD9W8eXMVFxeXpTwIx2JS6jb3IYQQIg9Yu3YtTzzxBHPnziU6OtrocEQeIM/0hBBCOAwpekIIIRyGFD0hhBAOQ57pCSGEcBjS0xNCCOEwpOgJIYRwGHl6crrFYuHUqVN4e3tjMpmMDkcIIYRBlFIkJycTGBhos9DCv+Xponfq1CmCgoKMDkMIIYSdiI+Pp2TJkne8nqeLXsaSTfHx8fj4+BgcTe5IS0vj119/pUmTJpjNZqPDyTMkb9kjecs+yV325FTekpKSCAoKstlX8XbydNHLuKXp4+OTr4uep6cnPj4+8h/SfZC8ZY/kLfskd9mT03m716MuGcgihBDCYUjRE0II4TCk6AF8+imcOGF0FEIIIXJZnn6mlyO+/x7efhs++UT//PTTRkckhMgl6enppKWl5cp7p6Wl4eLiwvXr10lPT8+Vz8iPspo3Z2dnXFxcHnh6mhS9yEioWRN27IDmzeGtt2DkSJAH0ULkK1euXOHEiRO33dU9Jyil8Pf3Jz4+XuYN34f7yZunpycBAQG4urpm+/Ok6JUpAxs26GI3fry+1bl+PcyaBcHBRkcnhMgB6enpnDhxAk9PT4oWLZorRclisXDlyhW8vLzuOjla2MpK3pRS3Lhxg/Pnz3P06FHKli2b7RxL0QNwc4P//hcaNoSePWHTJnjkEZg6FVq2NDg4IcSDSktLQylF0aJF8fDwyJXPsFgs3LhxA3d3dyl69yGrefPw8MBsNhMXF2dtnx3yJ/NPbdrArl1QuzZcugStWsFrr8GNG0ZHJoTIAXLbMW/LiX9MSNH7t1Kl9O3N117Tv48bB/Xrw9GjhoYlhBDiwUnRux1XVxg7Fn78EQoVgm3boHp1WLDA6MiEEEI8ACl6d9OyJcTGQr16cPkytG0Lr7wCqalGRyaEEDni2LFjmEwmYmNjjQ7loZCidy/BwbBunZ7LBzBhAkREwKFDxsYlhMj3unfvjslkynQ0a9bsocbRsGFDBg4c+FA/M7dI0csKs1lPXv/5Z/Dzg507oUYNmDPH6MiEEPlcs2bNOH36tM0xc+ZMo8PKs6To3Y+nn9a3O+vXh+Rk6NABXn4Zrl0zOjIhxP1QCq5eNea4z8nxbm5u+Pv72xyFChUCoHPnznTo0MGmfVpaGkWKFOH7778H4JdffqF+/fr4+vri5+dHixYtOHz4cM7k8X/mz59P5cqVcXNzIzQ0lM8++8zm+sSJEylbtizu7u4UL16c6Oho67V58+YRERFBgQIF8PPzo3Hjxly9ejVH4/snmad3v0qWhDVr4IMPYPRomDQJNm7Uvb7y5Y2OTgiRFSkp4OWVo2/pBPhmpeGVK1CgQI58ZpcuXWjXrp11cjfA8uXLSUlJoXXr1gBcvXqV119/nWrVqnHlyhWGDh1K69atiY2NzZEpADt27KB9+/YMGzaMDh06sHHjRvr27Yufnx/du3dn+/btvPrqq/zwww9ERESQkJDA77//DsDp06fp0qULw4cPp2PHjly9epXff/8911bNASl62ePiopcqa9AAunaFP/7QS5l99RV06WJ0dEKIfGTJkiXWgpbhvffe47333qNp06YUKFCAhQsX8txzzwEQExNDy5YtrZuptm3b1ua13333HUWLFmXv3r1UqVLlgeMbO3YsjRo1YsiQIQCUK1eOvXv38umnn9K9e3eOHz9OgQIFaNGiBd7e3oSEhFC9enVAF72bN2/SokULQkNDcXJyomrVqg8c093I7c0H0aSJvt3ZsKG+bdG1K7zwgv5XpBDCfnl66h5XDh6WpCQST5zAkpR097aenvcV6hNPPEFsbKzN0adPHwBcXFxo3749M2bMAHSv7scff6TLP/7xffDgQTp16kTp0qXx8fEhNDQUgOPHj+dIKvft20dkZKTNucjISA4ePEh6ejpRUVGEhIRQunRpnnvuOWbMmEHK//4/Mjw8nEaNGlG/fn3at2/P119/zaVLl3IkrjsxtOilp6czZMgQSpUqhYeHB2XKlGHEiBG52rXNcYGBsHIlDB0KJhN8+y08+ijs22d0ZEKIOzGZ9C1GI477XBWmQIEChIWF2RyFCxe2Xu/SpQurVq3i3LlzLFq0CA8PD5vRnc888wwJCQl8/fXXbNmyhS1btgBw4yGtNOXt7c3OnTuZOXMmAQEBDB06lPDwcBITE3F2dmb58uXMmTOHSpUqMX78eMqXL8/RXFwMxNCi98knn/Dll18yYcIE9u3bxyeffMKYMWMYP368kWHdP2dnGD5cF7/ixWHPHqhVC6ZNMzoyIUQ+FxERQVBQELNnz2bGjBm0a9cO8/92ibl48SL79+/n/fffp1GjRlSsWDHHe1IVK1Zkw4YNNuc2bNhAuXLlcHZ2BnSPtHHjxowZM4Y//viDY8eOsXr1akAvDVe3bl2GDRvGrl27cHV1ZeHChTka4z8Z+kxv48aNtGrViubNmwMQGhrKzJkz2bp1q5FhZd+TT+rbnV27wqpV0L27HvTyxRc59uBaCOFYUlNTOXPmjM05FxcXihQpYv29c+fOTJo0iQMHDrBmzRrr+UKFCuHn58fkyZMJCAjg+PHjvPvuu9mK4/z585kmsAcEBPDGG29Qu3ZtRowYQYcOHdi0aRMTJkxg4sSJgH4meeTIER5//HEKFSrE0qVLsVgslC9fni1btrBy5UoiIiIoVaoU27Zt4/z581SsWDFbMWaFoUUvIiKCyZMnc+DAAcqVK8fu3btZv349Y8eOvW371NRUUv+xGkpSUhKgh+jm1saQ983PD5Yswenjj3EaMQLTtGmoLVu4GRMD2XhonPG97Ob75RGSt+zJr3nL2GXBYrFgsVhy5TMyHstkfE5Ovecvv/xCQECAzfny5cuzd+9e6++dOnVi5MiRhISEUK9ePZvPj4mJYeDAgVSpUoXy5cszbtw4nnzySWsuMtreKzcxMTHExMTYnPvwww8ZPHgws2bNYtiwYYwYMYKAgACGDx/O888/j8ViwcfHhwULFjBs2DCuX79O2bJlmTFjBhUrVmTfvn389ttvjBs3juTkZEJCQvjPf/5D06ZNbxuLxWJBKUVaWpq1F5khq39nTcrAB2gWi4X33nuPMWPG4OzsTHp6OiNHjmTQoEG3bT9s2DCGDx+e6XxMTAye9/lw+GHw27OHmmPH4pGQQLqrK3/07s3xxo3v+56+EOLBuLi44O/vT1BQ0ANtQCqMdePGDeLj4zlz5gw3b960uZaSkkLnzp25fPkyPj4+d3wPQ4verFmzeOutt/j000+pXLkysbGxDBw4kLFjx9KtW7dM7W/X0wsKCuLChQt3/ZKGOn8e5x49cPr1VwAsnTqRPmEC/G848b2kpaWxYsUKoqKirPfpxb1J3rInv+bt+vXrxMfHExoamu192O5FKUVycjLe3t6yhdF9uJ+8Xb9+nWPHjhEUFJTpzzEpKYkiRYrcs+gZenvzrbfe4t1336Vjx44AVK1albi4OEaPHn3boufm5oabm1um82az2X7/Aw0MhGXLYMwYeP99nGbOxGnHDj2ZPTw8y29j19/Rjknesie/5S09PR2TyYSTk1OubfCacTsu43NE1txP3pycnDCZTLf9+5nVv6+G/smkpKRk+pLOzs65ds/dME5O8O67sHatXtHlwAE9rWHSpPtekkgIIUT2GVr0nnnmGUaOHMnPP//MsWPHWLhwIWPHjrUun5Pv1K+vd2Zv3lxvT/Tyy9CxI/xvQI4QQojcZWjRGz9+PNHR0fTt25eKFSvy5ptv8tJLLzFixAgjw8pdRYrA4sXw6ad6ObM5c/SODTt3Gh2ZEPlenlr4QmSSE39+hhY9b29vxo0bR1xcHNeuXePw4cN89NFH+X90lZMTvPkm/P673q/v8GG9Ue2ECXK7U4hckDG8/WGtQiJyR8byZQ/yvFkWnDZS3br6dmePHrr398orejL7t9+Cr6/R0QmRb7i4uODp6cn58+cxm825MtDEYrFw48YNrl+/LgNZ7kNW8qaUIiUlhXPnzuHr65tpjt79kKJntMKFYdEi+PxzvTv7ggW6EM6eDbVrGx2dEPmCyWQiICCAo0ePEhcXlyufoZTi2rVreHh4yJSF+3A/efP19cXf3/+BPk+Knj0wmWDgQIiM1BvTHj2qfx4zBvr2NTo6IfIFV1dXypYtm2u3ONPS0vjtt994/PHH89V0j9yW1byZzeYH6uFlkKJnT2rX1gNaXngB5s+H117DedUqzP/aGVkIkT1OTk65Njnd2dmZmzdv4u7uLkXvPjzsvMmNZ3vj6wtz5+pBLa6uOC1ZQqN+/XAaMwaSk42OTggh8jQpevbIZIJ+/WDTJlS5crglJeH8/vsQGgojRkBiotERCiFEniRFz57VqMHN2Fh2DBiAKlcOEhL0ZrWhofDBB/p3IYQQWSZFz965uHDiiSe4uXs3zJwJlSrB5cvw4Ye6+L33Hly4YHSUQgiRJ0jRyyucnfWSZX/+qZ/5Vaumn/GNHg0hIfDWW3D2rNFRCiGEXZOil9c4OUF0tJ7Lt2gR1KwJKSnwn//ont/AgXDqlMFBCiGEfZKil1c5OUGrVrBtG/z8s9614fp1Pcm9dGk9EOb4caOjFEIIuyJFL68zmeDpp2HTJvj1V72TQ2oqTJwIYWHw0kt6srsQQggpevmGyQRRUfDbb3r9zieegLQ0mDwZypaFnj3h0CGjoxRCCENJ0ctvTCZo2BBWr9a7ODRpAunpMGUKlC8Pzz0Hf/9tdJRCCGEIKXr5Wf36sHw5bN6sN661WGD6dD3toVMn2LPH6AiFEOKhkqLnCB59FJYsge3b9eAXpWDWLKhaVY8EjY01OkIhhHgopOg5kpo19TSH2Fhd7EwmvbB19eq6GG7fbnSEQgiRq6ToOaLwcD3B/c8/9W1Ok0lvYlu79q2RoEIIkQ9J0XNklStDTAzs2wfPP69XfVm2DCIi9EjQ3383OkIhhMhRUvSEHtU5bZoe1dmzJ7i4wMqV8PjjeurD6tX6OaAQQuRxUvTELWFh8O23cPCgntRuNsPatdCoETz2mJ78LsVPCJGHSdETmYWGwqRJcPgw9O8Pbm6wYQM0bQp16+plz6T4CSHyICl64s6CgmD8eL2M2WuvgYcHbN0KLVpArVp6JKjFYnSUQgiRZYYWvdDQUEwmU6ajX79+RoYl/i0gAMaO1cXvrbegQAHYuRNat9ZbHE2eDFevGh2lEELck6FFb9u2bZw+fdp6rFixAoB27doZGZa4k+LFYcwYOHZMb17r7Q1//aWf/5UsCW+8AUeOGB2lEELckaFFr2jRovj7+1uPJUuWUKZMGRo0aGBkWOJeihSBkSP11kVjx0KZMpCYqH8OC9O3P5cvl1ufQgi742J0ABlu3LjB9OnTef311zGZTLdtk5qaSmpqqvX3pKQkANLS0khLS3socT5sGd/LLr9fgQJ6oEvfvpiWL8dp4kScli/XA11+/hlVtiyWl1/G8vzz4OPzUEOz67zZMclb9knusien8pbV15uUso9heHPmzKFz584cP36cwMDA27YZNmwYw4cPz3Q+JiYGT0/P3A5RZEGBkycptWwZwatXY05JAeCmuzvxTzzBkaef5kpQkMERCiHyo5SUFDp37szly5fxucs/su2m6DVt2hRXV1d++umnO7a5XU8vKCiICxcu3PVL5mVpaWmsWLGCqKgozGaz0eFkXXIyTjExOE2ciGnfPutpS6NGWF5+GdW8uV4BJpfk2bwZTPKWfZK77MmpvCUlJVGkSJF7Fj27uL0ZFxfHypUrWbBgwV3bubm54ebmlum82WzO93/J8tx3LFxY3/rs109vajt+PCxejNOqVTitWgUhIdC3L/TqBX5+uRZGnsubnZC8ZZ/kLnseNG9Zfa1dzNObMmUKxYoVo3nz5kaHInKayQRPPgkLF+rJ7m+/rQtiXBy8844e9fnCC7K9kRDioTC86FksFqZMmUK3bt1wcbGLjqfILaGh8MkncOKEXu7skUfg+nX9c/XqeqmzOXNABgIIIXKJ4UVv5cqVHD9+nJ49exodinhYPDz0wtY7d8L69dChg17kOuPn0FAYMQLOnjU6UiFEPmN40WvSpAlKKcqVK2d0KOJhM5kgMlLv4h4XB0OH6gnwp07pn4OCoGtX2LLF6EiFEPlEtopefHw8J06csP6+detWBg4cyOTJk3MsMOFgAgNh+HA94X3GDL2wdVrarZ/r1IHvv4d/jN4VQoj7la2i17lzZ9asWQPAmTNniIqKYuvWrQwePJgPP/wwRwMUDsbVFTp31ru3b9sG3brpXR4yfg4KgsGD9XNBIYS4T9kqenv27KFOnTqAnlRepUoVNm7cyIwZM5g6dWpOxiccWa1aMHUqxMfDqFF6pOf58/rn0FCIjoZ162SbIyFElmWr6KWlpVnny61cuZKWLVsCUKFCBU6fPp1z0QkBULQoDBqkd3mYPx8aNoT09Fs/h4fD11/LTg9CiHvKVtGrXLkykyZN4vfff2fFihU0a9YMgFOnTuGXixONhYNzcYE2bfRk9z/+0Ls7eHrCn3/Ciy/qnuCbb8pOD0KIO8pW0fvkk0/46quvaNiwIZ06dSI8PByAxYsXW297CpGrqlbVu7ufOAGffQalS+udHj77DMLCcH72WYru2iU7PQghbGRrNnjDhg25cOECSUlJFCpUyHr+xRdflIWfxcNVqBC8/joMHAjLlsGECfDLLzgtXUrE0qWo6dOhTx/o0SNXlzsTQuQN2erpXbt2jdTUVGvBi4uLY9y4cezfv59ixYrlaIBCZImTEzRvrgvf/v2k9+9PmqcnpsOH9W7vJUrA88/rUaEy8EUIh5WtoteqVSu+//57ABITE3n00Uf57LPPePbZZ/nyyy9zNEAh7lu5cljGjmX5d99xc9IkqFFDz+/74QeIiNBLnn31FVy5YnSkQoiHLFtFb+fOnTz22GMAzJs3j+LFixMXF8f333/Pf//73xwNUIjsSnd3R/XsCdu361VduncHd3fYvVvf8gwM1LtA7NljdKhCiIckW0UvJSUFb29vAH799VfatGmDk5MTdevWJS4uLkcDFOKBmUx6RZcpU+DkSRg7FsqVg+RkmDhRD4p57DGIiZEVX4TI57JV9MLCwli0aBHx8fEsX76cJk2aAHDu3Ll8u5mryCcKF4bXXoO//4aVK6FtW72Z7fr10KWLXvHl3Xf1nEAhRL6TraI3dOhQ3nzzTUJDQ6lTpw716tUDdK+vevXqORqgELnCZIJGjWDePL3e5/DherDL+fN6+6MyZeDpp+Gnn/REeCFEvpCtohcdHc3x48fZvn07y5cvt55v1KgR//d//5djwQnxUAQG6l0djh3Tm902aaJHeC5bBi1b6jmAI0fCmTNGRyqEeEDZ3lrI39+f6tWrc+rUKeuOC3Xq1KFChQo5FpwQD5WLCzz7LCxfDgcP6tVdChfWPcH339e3Pjt0gLVrZdqDEHlUtoqexWLhww8/pGDBgoSEhBASEoKvry8jRozAIitgiPwgLAw+/VQPfPn+e6hXD27e1Du7P/EEVKoEn3+uV4ERQuQZ2Sp6gwcPZsKECXz88cfs2rWLXbt2MWrUKMaPH8+QIUNyOkYhjOPuDs89Bxs3QmysXu+zQAE9EGbgQH1r9IUXYMcOoyMVQmRBtoretGnT+Oabb3j55ZepVq0a1apVo2/fvnz99deytZDIv8LD9Xqfp07BF19AlSpw7Rp8+63eBiljWkRKitGRCiHuIFtFLyEh4bbP7ipUqEBCQsIDByWEXfPxgb599U4Pv/+uN711ddUb3fbsqUeBvvYa7N9vdKRCiH/JVtELDw9nwoQJmc5PmDCBatWqPXBQQuQJJhPUrw8zZujdHj7+GEqV0s/5xo2DChVuTYtISzM6WiEE2dxlYcyYMTRv3pyVK1da5+ht2rSJ+Ph4li5dmqMBCpEnFC0K77yjF7devhy+/BJ+/hlWr9aHv79+9vfii3oUqBDCENnq6TVo0IADBw7QunVrEhMTSUxMpE2bNvz111/88MMPOR2jEHmHkxM89RQsXqxXdRk8GIoX13P8PvoIQkOhVSv45RfZ608IA2R7nl5gYCAjR45k/vz5zJ8/n48++ohLly7x7bff5mR8QuRdwcG60B0/DrNnQ8OGutAtXqwLY1iYviV69qzRkQrhMLJd9HLKyZMn6dq1K35+fnh4eFC1alW2b99udFhC5BxXV2jfHtasgb174dVXoWBB3RMcNOjWpPc1a2TSuxC5zNCid+nSJSIjIzGbzSxbtoy9e/fy2Wef2ezGLkS+UrGintR+6hR89x08+qge5DJnDjz5pB78MnYsXLxodKRC5EvZGsiSUz755BOCgoKYMmWK9VypUqXu2D41NZXUf2z9kpSUBEBaWhpp+XR0XMb3yq/fL7fYfd7MZujaVR+xsTh98w1OMTGYDhyAN95Avfceqm1bLL17oyIi9EjRh8Du82bHJHfZk1N5y+rrTUpl/X5KmzZt7no9MTGRdevWkZ7FVekrVapE06ZNOXHiBOvWraNEiRL07duX3r1737b9sGHDGD58eKbzMTExeHp6ZukzhbBXLteuUeK33whdvhzfI0es55OCgznWpAnxDRty08vLwAiFsF8pKSl07tyZy5cv33WLu/sqej169MhSu3/23O7G3d0dgNdff5127dqxbds2BgwYwKRJk+jWrVum9rfr6QUFBXHhwoV8u49fWloaK1asICoqCrPZbHQ4eUaezptSmHbswGnyZEyzZ2O6dk2f9vBAtW+P5cUXUbVq5UrvL0/nzWCSu+zJqbwlJSVRpEiRexa9+7q9mdVillUWi4VatWoxatQoAKpXr86ePXvuWPTc3Nxwc3PLdN5sNuf7v2SO8B1zQ57NW716+vi//4Pp0+GrrzDt2YNp2jScpk2DRx6BPn30ajDe3jn+8Xk2b3ZAcpc9D5q3rL7W0IEsAQEBVKpUyeZcxYoVOX78uEERCWFnfH2hf3+95Nn69Xrxazc3vfh1nz56wes+fWDXLqMjFSJPMLToRUZGsv9f6xMeOHCAkJAQgyISwk6ZTBAZqbc5OnlSj/AsVw6uXIGvvoIaNfRI0O++g6tXjY5WCLtlaNF77bXX2Lx5M6NGjeLQoUPExMQwefJk+vXrZ2RYQtg3Pz+9oPXff+u5fR066NGgW7dCr156wetXXoE9e4yOVAi7Y2jRq127NgsXLmTmzJlUqVKFESNGMG7cOLp06WJkWELkDSaTXuVl1qxbC16XLg2XL8OECVC1ql4Qe/p0uH7d6GiFsAuGr8jSokUL/vzzT65fv86+ffvuOF1BCHEXxYrpBa8PHtQLXrdpA87OsGGDfg5YogS88YZsdyQcnuFFTwiRg5ycoEkTmD9fr/k5YoRe5iwhQT8HrFBBr/wyZw7cuGF0tEI8dFL0hMivAgPh/ff1Gp9LlsAzz+iimPEcMChIr/35j4nwQuR3UvSEyO+cnaF581vbHQ0ZAgEBcO6cfg5Ypgw0awYLF8LNm0ZHK0SukqInhCMJDoYPP4S4OFiwAJo21ef/9xzQJSyMCjExujgKkQ9J0RPCEZnN0Lq13sz28GF4910oWhTTqVOUnzMHc/ny0KCBnveXnGx0tELkGCl6Qji60qVh9Gg4cYKbM2ZwLjwcZTLBb7/peX/Fi+sRoCtXQhYXkxfCXknRE0Jorq6odu3YNHw4Nw8dglGjoHx5uHZNz/WLioLQUHjvPZn6IPIsKXpCiMwyRnbu2webN8PLL0OhQnoS/OjReupD3brw5Zdw6ZLR0QqRZVL0hBB3ZjLpNT0nToTTp2HuXD0S1NkZtmyBvn3B3x/at4eff5bRn8LuSdETQmSNmxtER+s5fydOwGefQbVqepL73LnQogWULKlXfvnjD6OjFeK2pOgJIe6fvz+8/jrs3q23NRo4EIoWhbNn9cov4eFQvTp8/jmcP290tEJYSdETQjyYRx7Rm92ePAk//qjX/TSb9Z5/AwfqlWFatdLzAmXpM2EwKXpCiJxhNkPLlnrdz9On9U4PtWvr53yLF0PbtnolmP79Yds2UMroiIUDkqInhMh5fn7Qr5/e4++vv+Dtt3WPLyEBvvgC6tSBKlVgzBg4dcroaIUDkaInhMhdlSrBJ5/oXR9++QU6dQJ3d9i7V2+HFBSk1/6cOVPPCRQiF0nRE0I8HM7Oeq3PmBg4cwa+/hoiI8Fi0Wt/du6sB8j07q33AZTbnyIXSNETQjx8BQvCCy/A+vV649shQyAkBJKS4Jtv9I7v5crp/QDj4oyOVuQjUvSEEMYKC9M7Pxw5ovf6694dChSAQ4dg6FC99NkTT8DUqXDlisHBirxOip4Qwj44OUHDhjBlir79OW2a3uXdZIK1a6FHD337s3NnWLQIrl83OGCRF0nRE0LYHy8veP55WLUKjh2Djz6CsmXh6lU94KV1ayhWTO/+8NNPkJpqdMQij5CiJ4Swb8HBMHiw3tlh82a9EkzJknqfv+nT9dzA4sX1bdGlS2UCvLgrKXpCiLwhY/Hrzz7Tg1s2bIABA/T8v8uX9e3Q5s31LdBevfSI0LQ0o6MWdsbQojds2DBMJpPNUaFCBSNDEkLkBU5OEBEB48ZBfLze8LZ/f93ju3RJ7/jerJleAebFF/UGuLIDhMAOenqVK1fm9OnT1mP9+vVGhySEyEucnOCxx2D8eL3+55o1ev+/okXh4kU9HzAqSvcIX35ZX5cd4B2W4UXPxcUFf39/61GkSBGjQxJC5FXOznoE6MSJenmzlSt1T8/PT+/2MGmSHhFaooTuGf72m54cLxyGi9EBHDx4kMDAQNzd3alXrx6jR48mODj4tm1TU1NJ/ccoraSkJADS0tJIy6f37jO+V379frlF8pY9+S5vjz+uj//7P0xr1+I0bx6mRYswnT2r1wD94gtUQACWtm1R0dGounV1zzEb8l3uHpKcyltWX29Syri1fpYtW8aVK1coX748p0+fZvjw4Zw8eZI9e/bg7e2dqf2wYcMYPnx4pvMxMTF4eno+jJCFEHmcKS2Non/8QYkNGwjYvBlzSor12jU/P05GRnIqMpJL5crpwTMiT0hJSaFz585cvnwZHx+fO7YztOj9W2JiIiEhIYwdO5ZevXplun67nl5QUBAXLly465fMy9LS0lixYgVRUVGYzWajw8kzJG/Z43B5S03FtHKl7gEuXowpOdl6SQUHY4mO1j3AmjXvWQAdLnc5JKfylpSURJEiRe5Z9Ay/vflPvr6+lCtXjkOHDt32upubG25ubpnOm83mfP+XzBG+Y26QvGWPw+TNbIZnn9XH9et6msOcObB4Mabjx3EeO1bvBF+qFLRvr4/q1e9aAB0mdznsQfOW1dcaPpDln65cucLhw4cJCAgwOhQhhKNxd9c7vM+YAefO6c1wO3QAT084elRvj1Szpl4Ie/Bg2L1bdoLIgwwtem+++Sbr1q3j2LFjbNy4kdatW+Ps7EynTp2MDEsI4eg8PKBNG5g1S4/6nDMHoqP1+UOHYNQoeOQRqFhRL4q9Z4/REYssMrTonThxgk6dOlG+fHnat2+Pn58fmzdvpmjRokaGJYQQt3h6Qrt2MHeu7gFmrP3p5qaXRhsxAqpWxSU8nErff49p5Ur4x+AYYV8MfaY3a9YsIz9eCCHuj5cXdOyoj6Qkvdj1nDnwyy+Y9u2j7L59sGABuLpCvXrQqJE+atfWzw+F4ezqmZ4QQuQZPj7QpQv8+COcO8fNqVM5/sQTqJIl9aLX69bpW5+RkVC4sF4XdOxY/SxQJsQbxq5GbwohRJ5UsCCqc2d2+foS8NRTmOPi9LZIq1bpZc8uXtQ7QCxdqtsXKaI3xs3oCZYpI3MCHxIpekIIkZNMJr33X9my0KeP7tX98cetIvjbb3Dhgn5GOHeufk1wsF4erVEj/b+BgcZ+h3xMip4QQuQmJyc90vORR+CNN/R2R1u33iqCmzbB8eMwdao+ACpUuNULbNgQChUyLPz8RoqeEEI8TGazfs4XGamf+aWkwPr1ugCuXg07dsDff+vjiy90z7FGjVtFsH59PaJUZIsUPSGEMJKnJzRpog/Q+wGuXXurCO7bpwvhjh0wZsytkaEZt0Pr1JGRofdBip4QQtiTQoX0PMDWrfXvp07p4pdxOzQ+Xo8MXbcOPvgAChTQO0lk9ASrVcv2ThGOQIqeEELYs8BA6NpVH0rB4cO2I0MvXIBly/QBeu/Af44MDQuTkaH/IEVPCCHyCpNJF7GwMHjpJT0y9M8/bUeGXrwI8+bpAyAoSN8KrVMHSpfWR0iIXlHGAUnRE0KIvMrJCcLD9fH663pk6LZttiND4+Nh2jR9ZDCZ9O7xGUWwVKlbP5cuDcWL59veoRQ9IYTIL8xmiIjQx5AhemTohg36meDevXq3iCNH4OpVOHFCH7/9lvl9PDxsC+G/fy5Q4OF/txwiRU8IIfIrT0+IitJHBqX0zhFHjtwqghnH0aO6Z3jtmi6Se/fe/n2LFbPtGf6zKJYoAc7OD+f7ZYMUPSGEcCQmky5axYpB3bqZr9+4oSfL36koXrqkd5s4dw42b878erNZPzP8Z1H8Z2H09c31r3g3UvSEEELc4up6a7DM7Vy6dKsY/rsoxsXp54qHDunjdgoVsimCTiEhFD1/Xq88U7Bgrn2tDFL0hBBCZF2hQvqoUSPztfR0OHnStmf4z6J47pwumhmT7QFnIAJI69pVip4QQog8xNlZL54dHKx7bv925QocO2ZTFC2HDpH81194lijxUEKUoieEEOLh8PKCKlX08T/paWmsXbqUpx/S4BdZq0YIIYTDkKInhBDCYUjRE0II4TCk6AkhhHAYeXogi1IKgKSkJIMjyT1paWmkpKSQlJSEWfbMyjLJW/ZI3rJPcpc9OZW3jDqQURfuJE8XveTkZACCgoIMjkQIIYQ9SE5OpuBd5vuZ1L3Koh2zWCycOnUKb29vTPl0RfCkpCSCgoKIj4/Hx8fH6HDyDMlb9kjesk9ylz05lTelFMnJyQQGBuJ0l01083RPz8nJiZIlSxodxkPh4+Mj/yFlg+QteyRv2Se5y56cyNvdengZZCCLEEIIhyFFTwghhMOQomfn3Nzc+OCDD3BzczM6lDxF8pY9krfsk9xlz8POW54eyCKEEELcD+npCSGEcBhS9IQQQjgMKXpCCCEchhQ9IYQQDkOKnh0YPXo0tWvXxtvbm2LFivHss8+yf/9+mzbXr1+nX79++Pn54eXlRdu2bTl79qxBEdunjz/+GJPJxMCBA63nJG93dvLkSbp27Yqfnx8eHh5UrVqV7du3W68rpRg6dCgBAQF4eHjQuHFjDh48aGDExktPT2fIkCGUKlUKDw8PypQpw4gRI2zWe5S8wW+//cYzzzxDYGAgJpOJRYsW2VzPSo4SEhLo0qULPj4++Pr60qtXL65cufLgwSlhuKZNm6opU6aoPXv2qNjYWPX000+r4OBgdeXKFWubPn36qKCgILVq1Sq1fft2VbduXRUREWFg1PZl69atKjQ0VFWrVk0NGDDAel7ydnsJCQkqJCREde/eXW3ZskUdOXJELV++XB06dMja5uOPP1YFCxZUixYtUrt371YtW7ZUpUqVUteuXTMwcmONHDlS+fn5qSVLlqijR4+quXPnKi8vL/X5559b20jelFq6dKkaPHiwWrBggQLUwoULba5nJUfNmjVT4eHhavPmzer3339XYWFhqlOnTg8cmxQ9O3Tu3DkFqHXr1imllEpMTFRms1nNnTvX2mbfvn0KUJs2bTIqTLuRnJysypYtq1asWKEaNGhgLXqStzt75513VP369e943WKxKH9/f/Xpp59azyUmJio3Nzc1c+bMhxGiXWrevLnq2bOnzbk2bdqoLl26KKUkb7fz76KXlRzt3btXAWrbtm3WNsuWLVMmk0mdPHnygeKR25t26PLlywAULlwYgB07dpCWlkbjxo2tbSpUqEBwcDCbNm0yJEZ70q9fP5o3b26TH5C83c3ixYupVasW7dq1o1ixYlSvXp2vv/7aev3o0aOcOXPGJncFCxbk0UcfdejcRUREsGrVKg4cOADA7t27Wb9+PU899RQgecuKrORo06ZN+Pr6UqtWLWubxo0b4+TkxJYtWx7o8/P0gtP5kcViYeDAgURGRlKlShUAzpw5g6urK76+vjZtixcvzpkzZwyI0n7MmjWLnTt3sm3btkzXJG93duTIEb788ktef/113nvvPbZt28arr76Kq6sr3bp1s+anePHiNq9z9Ny9++67JCUlUaFCBZydnUlPT2fkyJF06dIFQPKWBVnJ0ZkzZyhWrJjNdRcXFwoXLvzAeZSiZ2f69evHnj17WL9+vdGh2L34+HgGDBjAihUrcHd3NzqcPMVisVCrVi1GjRoFQPXq1dmzZw+TJk2iW7duBkdnv+bMmcOMGTOIiYmhcuXKxMbGMnDgQAIDAyVveYTc3rQj/fv3Z8mSJaxZs8ZmyyR/f39u3LhBYmKiTfuzZ8/i7+//kKO0Hzt27ODcuXPUqFEDFxcXXFxcWLduHf/9739xcXGhePHikrc7CAgIoFKlSjbnKlasyPHjxwGs+fn3SFdHz91bb73Fu+++S8eOHalatSrPPfccr732GqNHjwYkb1mRlRz5+/tz7tw5m+s3b94kISHhgfMoRc8OKKXo378/CxcuZPXq1ZQqVcrmes2aNTGbzaxatcp6bv/+/Rw/fpx69eo97HDtRqNGjfjzzz+JjY21HrVq1aJLly7WnyVvtxcZGZlpWsyBAwcICQkBoFSpUvj7+9vkLikpiS1btjh07lJSUjJtUOrs7IzFYgEkb1mRlRzVq1ePxMREduzYYW2zevVqLBYLjz766IMF8EDDYESOePnll1XBggXV2rVr1enTp61HSkqKtU2fPn1UcHCwWr16tdq+fbuqV6+eqlevnoFR26d/jt5USvJ2J1u3blUuLi5q5MiR6uDBg2rGjBnK09NTTZ8+3drm448/Vr6+vurHH39Uf/zxh2rVqpXDDb3/t27duqkSJUpYpywsWLBAFSlSRL399tvWNpI3PaJ6165dateuXQpQY8eOVbt27VJxcXFKqazlqFmzZqp69epqy5Ytav369aps2bIyZSG/AG57TJkyxdrm2rVrqm/fvqpQoULK09NTtW7dWp0+fdq4oO3Uv4ue5O3OfvrpJ1WlShXl5uamKlSooCZPnmxz3WKxqCFDhqjixYsrNzc31ahRI7V//36DorUPSUlJasCAASo4OFi5u7ur0qVLq8GDB6vU1FRrG8mbUmvWrLnt/6d169ZNKZW1HF28eFF16tRJeXl5KR8fH9WjRw+VnJz8wLHJ1kJCCCEchjzTE0II4TCk6AkhhHAYUvSEEEI4DCl6QgghHIYUPSGEEA5Dip4QQgiHIUVPCCGEw5CiJ4QQwmFI0RNCCOEwpOgJYUfOnz/Pyy+/THBwMG5ubvj7+9O0aVM2bNgAgMlkYtGiRcYGKUQeJvvpCWFH2rZty40bN5g2bRqlS5fm7NmzrFq1iosXLxodmhD5gqy9KYSdSExMpFChQqxdu5YGDRpkuh4aGkpcXJz195CQEI4dOwbAjz/+yPDhw9m7d691Q9PBgwfj4qL/XWsymZg4cSKLFy9m7dq1BAQEMGbMGKKjox/KdxPCXsjtTSHshJeXF15eXixatIjU1NRM17dt2wbAlClTOH36tPX333//neeff54BAwawd+9evvrqK6ZOncrIkSNtXj9kyBDatm3L7t276dKlCx07dmTfvn25/8WEsCPS0xPCjsyfP5/evXtz7do1atSoQYMGDejYsSPVqlUDdI9t4cKFPPvss9bXNG7cmEaNGjFo0CDruenTp/P2229z6tQp6+v69OnDl19+aW1Tt25datSowcSJEx/OlxPCDkhPTwg70rZtW06dOsXixYtp1qwZa9eupUaNGkydOvWOr9m9ezcffvihtafo5eVF7969OX36NCkpKdZ2/965u169etLTEw5HBrIIYWfc3d2JiooiKiqKIUOG8MILL/DBBx/QvXv327a/cuUKw4cPp02bNrd9LyHELdLTE8LOVapUiatXrwJgNptJT0+3uV6jRg32799PWFhYpsPJ6dZ/4ps3b7Z53ebNm6lYsWLufwEh7Ij09ISwExcvXqRdu3b07NmTatWq4e3tzfbt2xkzZgytWrUC9AjOVatWERkZiZubG4UKFWLo0KG0aNGC4OBgoqOjcXJyYvfu3ezZs4ePPvrI+v5z586lVq1a1K9fnxkzZrB161a+/fZbo76uEMZQQgi7cP36dfXuu++qGjVqqIIFCypPT09Vvnx59f7776uUlBSllFKLFy9WYWFhysXFRYWEhFhf+8svv6iIiAjl4eGhfHx8VJ06ddTkyZOt1wH1xRdfqKioKOXm5qZCQ0PV7NmzH/ZXFMJwMnpTCAdwu1GfQjgieaYnhBDCYUjRE0II4TBkIIsQDkCeYgihSU9PCCGEw5CiJ4QQwmFI0RNCCOEwpOgJIYRwGFL0hBBCOAwpekIIIRyGFD0hhBAOQ4qeEEIIh/H/WTkWhcFsTOsAAAAASUVORK5CYII=\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAcYAAADvCAYAAABotnRjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABQiElEQVR4nO3dd1yT1/cH8E9YYW9kKEtExYF7gYqte29RsYpdzrbWVa2VugdWq9Wvs9ZVteK21kXdgyK4Feuo4ELFhYAIRHJ/f5xfAhHEAIEEOO/X63kpT54kJxfkeO9z77kSIYQAY4wxxgAAetoOgDHGGNMlnBgZY4yxbDgxMsYYY9lwYmSMMcay4cTIGGOMZcOJkTHGGMuGEyNjjDGWDSdGxhhjLBtOjIwxxlg2nBiZTgkODoaHh4e2wyiQFi1aoEWLFtoO472mTJkCiUSi7TAY03mcGJlaJBKJWsexY8e0HarO8/DweG/7tWvXTtvhITg4GObm5toOQ+syMjKwaNEi1KlTB5aWlrC2tkb16tXx5Zdf4t9//1Ved+bMGUyZMgWJiYnaC5ZplIG2A2Alw4YNG1S+Xr9+PcLDw3Oc9/HxKdT7rFq1CnK5vFCvURLUrl0bY8aMyXHexcVFC9Gw3PTs2RP79+9Hv3798MUXX0Amk+Hff//F3r174efnh6pVqwKgxDh16lQEBwfD2tpau0EzjeDEyNQyYMAAla//+ecfhIeH5zj/rtTUVJiamqr9PoaGhgWKr6QpX778B9uOaU9UVBT27t2LmTNn4vvvv1d5bMmSJdw7LOV4KJVpTIsWLVCjRg2cO3cOzZs3h6mpqfKXyu7du9GxY0e4uLhAKpXCy8sL06dPR2ZmpsprvHuPMS4uDhKJBD/99BNWrlwJLy8vSKVSNGjQAFFRUR+M6cWLFxg7dixq1qwJc3NzWFpaon379rh06ZLKdceOHYNEIkFYWBhmzpyJChUqwNjYGC1btsTt27dzvK4iFhMTEzRs2BAnT54sQIu9308//QSJRIK7d+/meGzixIkwMjLCy5cvAQAnT55E79694ebmBqlUCldXV3z77bd48+aNRmN619atW1GvXj2YmJjA3t4eAwYMwMOHD1Wuefz4MQYPHowKFSpAKpXC2dkZXbt2RVxcnPKa6OhotG3bFvb29jAxMYGnpyc+/fTTPN+7U6dOqFixYq6PNWnSBPXr11d+HR4ejqZNm8La2hrm5uaoUqVKjmT3rv/++w8A4O/vn+MxfX192NnZAaD7tuPGjQMAeHp6KofEs3++33//XdlOtra26Nu3L+7fv6/ymtn/7fj5+SnbYfny5XnGyYoG9xiZRj1//hzt27dH3759MWDAADg6OgIA1q5dC3Nzc4wePRrm5uY4cuQIQkJCkJSUhHnz5n3wdTdt2oTk5GQMGTIEEokEoaGh6NGjB+7cuZNnL/POnTvYtWsXevfuDU9PTzx58gQrVqxAQEAAYmJicgxdzpkzB3p6ehg7dixevXqF0NBQBAUFITIyUnnN6tWrMWTIEPj5+WHUqFG4c+cOunTpAltbW7i6uqrVTjKZDM+ePctx3szMDCYmJujTpw/Gjx+PsLAw5S9ehbCwMLRp0wY2NjYAKEGlpqZi2LBhsLOzw9mzZ7F48WI8ePAAW7duVSue/Fq7di0GDx6MBg0aYPbs2Xjy5AkWLVqE06dP48KFC8ohxZ49e+LatWv46quv4OHhgYSEBISHh+PevXvKr9u0aQMHBwdMmDAB1tbWiIuLw44dO/J8/8DAQAwcOBBRUVFo0KCB8vzdu3fxzz//KH+mrl27hk6dOsHX1xfTpk2DVCrF7du3cfr06Txf393dHQCwceNG+Pv7w8Ag91+VPXr0wM2bN7F582b8/PPPsLe3BwA4ODgAAGbOnInJkyejT58++Pzzz/H06VMsXrwYzZs3V2knAHj58iU6dOiAPn36oF+/fggLC8OwYcNgZGT0wf8oMA0TjBXAiBEjxLs/PgEBAQKAWL58eY7rU1NTc5wbMmSIMDU1FWlpacpzgwYNEu7u7sqvY2NjBQBhZ2cnXrx4oTy/e/duAUD8+eefecaZlpYmMjMzVc7FxsYKqVQqpk2bpjx39OhRAUD4+PiI9PR05flFixYJAOLKlStCCCEyMjJEuXLlRO3atVWuW7lypQAgAgIC8oxHCCHc3d0FgFyP2bNnK69r0qSJqFevnspzz549KwCI9evXK8/l1razZ88WEolE3L17V3nuxx9/zPE9y82gQYOEmZnZex9XtEGNGjXEmzdvlOf37t0rAIiQkBAhhBAvX74UAMS8efPe+1o7d+4UAERUVNQH48ru1atXQiqVijFjxqicDw0NVfncP//8swAgnj59mq/Xl8vlyp9nR0dH0a9fP/G///1PpT0V5s2bJwCI2NhYlfNxcXFCX19fzJw5U+X8lStXhIGBgcp5xXvNnz9feS49PV3Url1blCtXTmRkZOQrflY4PJTKNEoqlWLw4ME5zpuYmCj/npycjGfPnqFZs2ZITU1VmeH3PoGBgcoeEgA0a9YMAPUIPxSPnh79mGdmZuL58+fK4bTz58/nuH7w4MEwMjJ67/tER0cjISEBQ4cOVbkuODgYVlZWH/wcCo0aNUJ4eHiOo1+/fiqf+dy5c8phPQDYsmULpFIpunbtqjyXvW1fv36NZ8+ewc/PD0IIXLhwQe2Y1KVog+HDh8PY2Fh5vmPHjqhatSr++usvZVxGRkY4duyYctj3XYoe0969eyGTydSOQTEkHhYWBpFtr/UtW7agcePGcHNzU3n93bt352tSl0QiwcGDBzFjxgzY2Nhg8+bNGDFiBNzd3REYGKjWPcYdO3ZALpejT58+ePbsmfJwcnKCt7c3jh49qnK9gYEBhgwZovzayMgIQ4YMQUJCAs6dO6d27KzwODEyjSpfvrxKwlC4du0aunfvDisrK1haWsLBwUE5+eTVq1cffF3FLzoFRZJ83y9cBblcjp9//hne3t6QSqWwt7eHg4MDLl++nOv7fuh9FPf8vL29Va4zNDR87z2v3Njb26NVq1Y5DsUQHgD07t0benp62LJlCwBACIGtW7eiffv2sLS0VF537949BAcHw9bWFubm5nBwcEBAQAAA9do2vxRtUKVKlRyPVa1aVfm4VCrF3LlzsX//fjg6OqJ58+YIDQ3F48ePldcHBASgZ8+emDp1Kuzt7dG1a1esWbMG6enpH4wjMDAQ9+/fR0REBAC6L3ju3DkEBgaqXOPv74/PP/8cjo6O6Nu3L8LCwtRKklKpFJMmTcL169cRHx+PzZs3o3HjxggLC8PIkSM/+Pxbt25BCAFvb284ODioHNevX0dCQoLK9S4uLjAzM1M5V7lyZQBQuWfJih4nRqZR2XsvComJiQgICMClS5cwbdo0/PnnnwgPD8fcuXMBQK1fUvr6+rmez95byM2sWbMwevRoNG/eHL///jsOHjyI8PBwVK9ePdf3Lej7FAUXFxc0a9YMYWFhAGgm8L1791R+8WdmZqJ169b466+/8N1332HXrl0IDw/H2rVrAajXtkVp1KhRuHnzJmbPng1jY2NMnjwZPj4+yp6sRCLBtm3bEBERgZEjR+Lhw4f49NNPUa9ePaSkpOT52p07d4apqamyfcLCwqCnp4fevXsrrzExMcGJEyfw999/45NPPsHly5cRGBiI1q1b55j4lRdnZ2f07dsXJ06cgLe3N8LCwvD27ds8nyOXyyGRSHDgwIFcRwdWrFih9vuz4sWTb1iRO3bsGJ4/f44dO3agefPmyvOxsbFF/t7btm3DRx99hNWrV6ucT0xMVE6UyA9Fj+7WrVv4+OOPledlMhliY2NRq1atwgX8jsDAQAwfPhw3btzAli1bYGpqis6dOysfv3LlCm7evIl169Zh4MCByvPh4eEajSM7RRvcuHFDpQ0U57L3egHAy8sLY8aMwZgxY3Dr1i3Url0b8+fPx++//668pnHjxmjcuDFmzpyJTZs2ISgoCH/88Qc+//zz98ZhZmaGTp06YevWrViwYAG2bNmCZs2a5ZhQpaenh5YtW6Jly5ZYsGABZs2ahUmTJuHo0aNo1apVvj67oaEhfH19cevWLeWw6PuqCXl5eUEIAU9PT2XPLy/x8fF4/fq1Sq/x5s2bAFBiq0GVVNxjZEVO0QvL3uvKyMjA0qVLi+W93+3tbd26NceyAnXVr18fDg4OWL58OTIyMpTn165dWyRr23r27Al9fX1s3rwZW7duRadOnVR+cebWtkIILFq0SOOxKNSvXx/lypXD8uXLVYY89+/fj+vXr6Njx44AaA1rWlqaynO9vLxgYWGhfN7Lly9zfH9q164NAGoPp8bHx+PXX3/FpUuXVHrTAC3XeZc6r3/r1i3cu3cvx/nExERERETAxsZGOfNU8f149/vfo0cP6OvrY+rUqTk+oxACz58/Vzn39u1blV5kRkYGVqxYAQcHB9SrV++9sTLN4x4jK3J+fn6wsbHBoEGD8PXXX0MikWDDhg3FMjzZqVMnTJs2DYMHD4afnx+uXLmCjRs35ut+YHaGhoaYMWMGhgwZgo8//hiBgYGIjY3FmjVr8vWaDx8+VOkxKZibm6Nbt27Kr8uVK4ePPvoICxYsQHJyco5f/FWrVoWXlxfGjh2Lhw8fwtLSEtu3b//gvdcPkclkmDFjRo7ztra2GD58OObOnYvBgwcjICAA/fr1Uy7X8PDwwLfffguAejstW7ZEnz59UK1aNRgYGGDnzp148uQJ+vbtCwBYt24dli5diu7du8PLywvJyclYtWoVLC0t0aFDhw/G2aFDB1hYWGDs2LHQ19dHz549VR6fNm0aTpw4gY4dO8Ld3R0JCQlYunQpKlSogKZNm773dS9duoT+/fujffv2aNasGWxtbfHw4UOsW7cO8fHxWLhwofI/JYqkNWnSJPTt2xeGhobo3LkzvLy8MGPGDEycOBFxcXHo1q0bLCwsEBsbi507d+LLL7/E2LFjle/p4uKCuXPnIi4uDpUrV8aWLVtw8eJFrFy5sswUvtAZWpgJy0qB9y3XqF69eq7Xnz59WjRu3FiYmJgIFxcXMX78eHHw4EEBQBw9elR53fuWa+Q25R+A+PHHH/OMMy0tTYwZM0Y4OzsLExMT4e/vLyIiIkRAQIDK0grFco2tW7eqPF/x/mvWrFE5v3TpUuHp6SmkUqmoX7++OHHiRI7XfJ+8lmtk/+wKq1atEgCEhYWFyvIIhZiYGNGqVSthbm4u7O3txRdffCEuXbqUI+78LNd4X3xeXl7K67Zs2SLq1KkjpFKpsLW1FUFBQeLBgwfKx589eyZGjBghqlatKszMzISVlZVo1KiRCAsLU15z/vx50a9fP+Hm5iakUqkoV66c6NSpk4iOjv5gnApBQUECgGjVqlWOxw4fPiy6du0qXFxchJGRkXBxcRH9+vUTN2/ezPM1nzx5IubMmSMCAgKEs7OzMDAwEDY2NuLjjz8W27Zty3H99OnTRfny5YWenl6OpRvbt28XTZs2FWZmZsLMzExUrVpVjBgxQty4cUN5jeLfTnR0tGjSpIkwNjYW7u7uYsmSJWq3A9MciRBamFXAGGNMqUWLFnj27BmuXr2q7VAY+B4jY4wxpoITI2OMMZYNJ0bGGGMsG77HyBhjjGXDPUbGGGMsG06MjDHGWDalfoG/XC5HfHw8LCws3lu6iTHGWOkmhEBycjJcXFyUO+68T6lPjPHx8WpvHssYY6x0u3//PipUqJDnNaU+MVpYWACgxsi+VQ/LIpPJcOjQIbRp04ZLT+WB20k93E7q47ZSjybaKSkpCa6ursqckJdSnxgVw6eWlpacGN9DJpPB1NQUlpaW/I8zD9xO6uF2Uh+3lXo02U7q3FLjyTeMMcZYNpwYGWOMsWw4MaohPR2YMgX4wIbijDHGSoFSf49RE/r3B3bsAC5dArZvBz4w05cxVkIIIfD27VtkZmZq5f1lMhkMDAyQlpamtRhKAnXbydDQULlPZmFoNTGeOHEC8+bNw7lz5/Do0SPs3LlTZZNWIQR+/PFHrFq1ComJifD398eyZcvg7e1drHGOHQvs3Qvs2gVMmgTMnl2sb88YKwIZGRl49OgRUlNTtRaDEAJOTk64f/8+r7POg7rtJJFIUKFCBZibmxfq/bSaGF+/fo1atWrh008/RY8ePXI8Hhoail9++QXr1q2Dp6cnJk+ejLZt2yImJgbGxsbFFmeTJsBvvwEDBgBz5gA+PsDAgcX29owxDZPL5YiNjYW+vj5cXFxgZGSklcQkl8uRkpICc3PzDy46L8vUaSchBJ4+fYoHDx7A29u7UD1HrSbG9u3bo3379rk+JoTAwoUL8cMPP6Br164AgPXr18PR0RG7du1C3759izNUBAUB168DM2cCX3wBeHkB/v7FGgJjTEMyMjIgl8vh6uoKU1NTrcUhl8uRkZEBY2NjTox5ULedHBwcEBcXB5lMVnITY15iY2Px+PFjtGrVSnnOysoKjRo1QkRExHsTY3p6OtLT05VfJyUlAaAxaplMVqiYJk8Grl3Tx65deujeXeD06bfw8CjUS+oERbsUtn1KO24n9ZSEdpLJZFBsLCSXy7UWhyIGIYRW49B16raTEAJCiFwTY35+HnU2MT5+/BgA4OjoqHLe0dFR+VhuZs+ejalTp+Y4f+jQIY38z7BvX31cvtwUd+5Yo1WrN5g79yRMTN4W+nV1QXh4uLZDKBG4ndSjy+1kYGAAJycnpKSkICMjQ9vhIDk5WdshlAgfaqeMjAy8efMGJ06cwNu3qr+X83MvWWcTY0FNnDgRo0ePVn6tKAPUpk0bjVW+adgQ8PMTuHfPEhs2tMf27ZnQwEQorZHJZAgPD0fr1q25+kYeuJ3UUxLaKS0tDffv34e5uXmxzld4l6KwNW9ykDd12yktLQ0mJiZo3rx5ju+rYvRQHTqbGJ2cnAAAT548gbOzs/L8kydPULt27fc+TyqVQiqV5jhvaGiosX+knp7A7t1AQACwb58eJk/Ww7x5GnlprdJkG5Vm3E7q0eV2yszMhEQigZ6enlbv7SmGBRWxaIuHhwdGjRqFUaNGaS2GvKjbTnp6epBIJLn+7OXnZ1Fn7/Z6enrCyckJhw8fVp5LSkpCZGQkmjRposXISMOGwNq19PeffgJWr9ZqOIyxMkAikeR5TJkypUCvGxUVhS+//LJQsbVo0UJnE2t+abXHmJKSgtu3byu/jo2NxcWLF2Fraws3NzeMGjUKM2bMgLe3t3K5houLi8paR20KDKSZqlOnAsOGAZUqUS+SMcaKwqNHj5R/37JlC0JCQnDjxg3luezr94QQyMzMhIHBh3/NOzg4aDbQEk6rPcbo6GjUqVMHderUAQCMHj0aderUQUhICABg/Pjx+Oqrr/Dll1+iQYMGSElJwYEDB7R6T+BdISFAnz6ATAb07An895+2I2KMFYQQwOvX2jn+f9LlBzk5OSkPKysrSCQS5df//vsvLCwssH//ftSrVw9SqRSnTp3Cf//9h65du8LR0RHm5uZo0KAB/v77b5XX9fDwwMKFC5VfSyQS/Prrr+jevTtMTU3h7e2NPXv2FKp9t2/fjurVq0MqlcLDwwPz589XeXzp0qXw9vaGsbExHB0d0atXL+Vj27Ztg5+fH8zMzGBnZ4dWrVrh9evXhYonL1rtMbZo0UI5DTc3EokE06ZNw7Rp04oxqvzR0wPWrAHu3AGio4HOnYGICMDKStuRMcbyIzUVKGTBlALQA2CNpCQ51NgmUC0TJkzATz/9hIoVK8LGxgb3799Hhw4dMHPmTEilUqxfvx6dO3fGjRs34Obm9t7XmTp1KkJDQzFv3jwsXrwYQUFBuHv3LmxtbfMd07lz59CnTx9MmTIFgYGBOHPmDIYPHw47OzsEBwcjOjoaX3/9NTZs2AA/Pz+8ePECJ0+eBEC95KCgIEydOhV9+/bF69evcfLkyTxzR2Hp7OSbksTUlCbjNGxIQ6uBgVRCTo0RDMYY06hp06ahdevWyq9tbW1Rq1Yt5dfTp0/Hzp07sWfPHowcOfK9rxMcHIx+/foBAGbNmoVffvkFZ8+eRbt27fId04IFC9CyZUtMnjwZAFC5cmXExMRg3rx5CA4Oxr1792BmZoZOnTrBwsIC7u7uypHER48e4e3bt+jUqRM8PDygp6eHmjVr5juG/NDZyTcljYsLsGcPYGICHDxI9VUZYyWHqSntoFOcR1KSHA8eJEKTxXfq16+v8nVKSgrGjh0LHx8fWFtbw9zcHNevX8e9e/fyfB1fX1/l383MzGBpaYmEhIQCxXT9+nX4v1MqzN/fH7du3UJmZiZat24Nd3d3VKxYEZ988gk2btyoXHdYq1YttGzZEk2bNkWfPn2watUqvHz5skBxqIsTowbVrQts2EB/X7QIWLFCu/EwxtQnkQBmZto5NLmE0czMTOXrsWPHYufOnZg1axZOnjyJixcvombNmh8sbPDu8gaJRFJk1XksLCxw/vx5bN68Gc7OzggJCUGtWrWQmJgIfX19HDx4EGFhYahWrRoWL16MKlWqIDY2tkhiATgxalzPnsCMGfT3ESOAI0e0Gw9jrGw7ffo0goOD0b17d9SsWRNOTk6Ii4sr1hh8fHxw+vTpHHFVrlxZWbrNwMAArVq1QmhoKC5fvoy4uDgc+f9foBKJBI0bN8aUKVNw4cIFGBkZYefOnUUWL98FKwLffw/ExACbNgG9egH//ANUrqztqBhjZZG3tzd27NiBzp07QyKRYPLkyUXW83v69CkuXryocs7Z2RljxoxBgwYNMH36dAQGBiIiIgJLlizB0qVLAQB79+7FnTt30Lx5c9jY2GDfvn2Qy+WoUqUKIiMj8ffff8PPzw+enp6IiorC06dP4ePjUySfAeDEWCQkElrw/99/QGQkzVT95x/AxkbbkTHGypoFCxbg008/hZ+fH+zt7fHdd9/lqzxafmzatAmbNm1SOTd9+nT88MMPCAsLQ0hICKZPnw5nZ2dMmzYNwcHBAABra2vs2LEDU6ZMQVpaGry9vbF582ZUr14d169fx4kTJ7Bw4UIkJyfD3d0d8+fPf+/OTJogEUU551UHJCUlwcrKCq9evdJYrVR1PX5MM1Xv3wdatgT27wd0sUKWTCbDvn370KFDB50t4aULuJ3UUxLaKS0tDbGxsfD09NTqumi5XI6kpCRYWlrytlN5ULed8vq+5icX8HeiCDk5AX/+STfXDx8GSkm1JMYYK9U4MRaxWrWAjRtpeHXpUuB//9N2RIwxxvLCibEYdO0KzJlDf//mG+DQIe3Gwxhj7P04MRaTceOAQYOAzExa0rF5s7YjYowxlhtOjMVEIqEF/61aUcWL/v2Bzz6jAsKMMcZ0ByfGYiSV0szUH36gRPnbb0CDBsCVK9qOjDHGmAInxmJmYABMnw78/TfNWr1+nZZ0rFih/tYzjDHGig4nRi35+GPg0iWgbVsgLQ0YOpR25UhM1HZkjDFWtnFi1KJy5YB9+4DQUOpJbt0K1KkDnD2r7cgYY6zs4sSoZXp6NGP11CnAwwOIiwP8/YGffgKKqJwhY4zlKS4uDhKJJEfd07KCE6OOaNQIuHAB6N0bePuWkmWnTsDTp9qOjDGmS4KDgyGRSHIcBdlAuDBatGiBUaW0nBcnRh1ibQ1s2QIsXw4YG9MM1lq1gKNHtR0ZY0yXtGvXDo8ePVI5NvPiaI3hxKhjJBJgyBC6z+jjAzx6RAXIQ0KoJ8kYKyJC0MJibRz5nJIulUrh5OSkctj8//Y9/fv3R2BgoMr1MpkM9vb2WL9+PQDgwIEDaNq0KaytrWFnZ4dOnTrhv//+00w7/r/t27ejevXqkEql8PDwwPz581UeX7p0Kby9vWFsbAxHR0f06tVL+di2bdtQs2ZNmJiYwM7ODm3atMHrYlz0zdtO6aiaNYGoKCoht3o1LfE4doz2eKxQQdvRMVYKpaYC5ubF+pZ6AKwByJOSAAsLjbxmUFAQevfujZSUFJj//+c5ePAgUlNT0b17dwDA69evMXr0aPj6+iIlJQUhISHo3r07Ll68qJFdPs6dO4c+ffpgypQpCAwMxJkzZzB8+HDY2dkhODgY0dHR+Prrr7Fhwwb4+fnhxYsXOHnyJADg0aNH6NevH0JDQ9G9e3ckJyfjxIkTKM6NoDgx6jAzM+DXX6nHOGQIcPIkDa2uXUt7PDLGyqa9e/cqk57C999/j++//x5t27aFmZkZdu7ciU8++QQA7ZPYpUsXWPx/8u3Zs6fKc3/77Tc4ODggJiYGNWrUKHR8CxYsQMuWLTF58mQAQOXKlRETE4N58+YhODgY9+7dg5mZGTp16gQLCwu4u7ujTp06ACgxvn37Fj169IC7uzsAoHr16kW2h2RueCi1BOjXDzh/HqhXD3jxAujSBfj2WyA9XduRMVaKmJpSvcZiPORJSUh88IDeOx8++ugjXLx4UeUYOnQoAMDAwAB9+vTBxo0bAVDvcPfu3QgKClI+/9atW+jXrx8qVqwIS0tLeHh4AADu3bunkaa8fv06/P39Vc75+/vj1q1byMzMROvWreHu7o6KFSvik08+wcaNG5GamgoAqFWrFlq2bImaNWuid+/eWLVqFV6+fKmRuNTFibGEqFQJOH2aEiIALFwI+PkBN25oNSzGSg+JhIZptHFIJPkK1czMDJUqVVI5bG1tlY8HBQXh8OHDSEhIwK5du2BiYqIya7Vz58548eIFVq1ahcjISERGRgIAMjIyNNOWH2BhYYHz589j8+bNcHZ2RkhICGrVqoXExETo6+sjPDwc+/fvR7Vq1bB48WL4+Pjg7t27xRIbwImxRJFKgQULaPNjOzvqRdaqBcycCRTTzzNjrATw8/ODq6srtmzZgo0bN6J3794wNDQEADx//hw3btzADz/8gJYtW8LHx0fjPTIfHx+cPn1a5dzp06dRuXJl6OvrA6CebatWrRAaGorLly8jLi4OR44cAQBIJBL4+/tj6tSpuHDhAoyMjLB3716NxpgXvsdYAnXqBFy8CHz+OXDwIBUl/+MPuh/ZqJG2o2OMFbX09HQ8fvxY5ZyBgQHs7e2VX/fv3x/Lly/HzZs3cTTbmi8bGxvY2dlh5cqVcHZ2xr179zBhwoQCxfH06dMcRQCcnZ0xZswYNGjQANOnT0dgYCAiIiKwZMkSLF26FADdI71z5w6aN28OGxsb7Nu3D3K5HFWqVEFkZCQOHz6MNm3aoFy5coiMjMTTp09RuXLlAsVYIKKUe/XqlQAgXr16pe1QNE4uF+L334WwtxcCEEIiEeKrr4RISsrf62RkZIhdu3aJjIyMogm0lOB2Uk9JaKc3b96ImJgY8ebNG63GkZmZKV6+fCkyMzPVfs6gQYMEgBxHlSpVVK6LiYkRAIS7u7uQy+Uqj4WHhwsfHx8hlUqFr6+vOHbsmAAgdu7cKYQQIjY2VgAQFy5ceG8cAQEBucYxffp0IYQQ27ZtE9WqVROGhobCzc1NzJs3T/nckydPioCAAGFjYyNMTEyEr6+v2LJlizLutm3bCgcHByGVSkXlypXFL7/8olY75fV9zU8ukAhRuvd0SEpKgpWVFV69egVLS0tthwP8/jutt2jRQmMv+ewZMGYM8P9LlODqCixdSj1LdchkMuzbtw8dOnRQDrewnLid1FMS2iktLQ2xsbHw9PSEsbGx1uKQy+VISkqCpaWlRpZJlFbqtlNe39f85AL+ThSnhARg+HDgo4+A7t2BW7c08rL29sC6dcChQ4CnJ3D/Pi3n6NsXePJEI2/BGGNlBifG4mRgAHzyCaCvD+zaBVSrRtNMX7zQyMu3bk2bHo8dS8XJt2yh6jm//cZ7PTLGmLo4MRYnW1vgf/8DLl8GOnSgGm8LF9JajEWLNDK11MwMmDePqubUqQO8fAl89hkVCdBQB5Uxxko1TozaUK0a8NdfNKW0Rg3KXqNG0d9379ZI965uXaq3GhoKmJhQIXJfX2DOHEAmK/xHYIyx0ooToza1aUPrLlaupF2Lb90CunUDPv6YFikWkoEBbV919SrQqhWQlgZMnAg0aEA9SsbKulI+97DM0dT3kxOjtunrA198Ady+DXz/Pa3iP3YMqF8fGDwYePiw0G9RsSJNzFm3jkZzL10CGjem25spKYX/CIyVNIrZsooyZKx0UFTuURQRKChe4K8rLCyohM2XX1KC3LSJqoWHhVG3b9w4uoFYQBIJMHAg0L49JcSNG+n25s6dwJIl+StHxVhJp6+vD2trayQkJAAATE1NIclnWTZNkMvlyMjIQFpaGi/XyIM67SSXy/H06VOYmprCwKBwqY0To65xd6es9fXXwOjRwJkzwNSpwKpVlDgHDqQppwXk4EBLKYOCgGHDgLt3gc6dDeDvXx/OzkDDhhr8LIzpMCcnJwBQJkdtEELgzZs3MDEx0UpiLinUbSc9PT24ubkVui05MeqqRo2AU6eAbduA774DYmNpaPWXX6hgaiELBLRvT/ceQ0KARYsETp8uj0aNgCZNgJEjgV69ACMjzXwUxnSRRCKBs7MzypUrB5mWZqTJZDKcOHECzZs319liCLpA3XYyMjLSSM9bpxNjZmYmpkyZgt9//x2PHz+Gi4sLgoOD8cMPP5SN/11JJEDv3rRaf/FiYMYM4MIFKhDQrRtNOfX2LvDLm5tTju3b9y3GjXuMiIgKiIiQICKCOqtffkn7QJYvr7mPxJiu0dfXL/Q9qcK899u3b2FsbMyJMQ/F3U46Pag9d+5cLFu2DEuWLMH169cxd+5chIaGYvHixdoOrXgZG9M9xtu3qXKOhgsE1KkDjB59Hv/99xbTpgEuLlQxZ/p0Gtnt0wc4cYKLBDDGygadToxnzpxB165d0bFjR3h4eKBXr15o06YNzp49q+3QtMPBoUgLBDg5AZMnA3FxNOeneXMgMxPYuhUICKAtrlauBF6/1sinYYwxnaTTQ6l+fn5YuXIlbt68icqVK+PSpUs4deoUFixY8N7npKenIz3b1vZJSUkAaIxaW/cRNM7bG9i1C5LwcOiPHw/JtWvAqFEQS5Ygc84ciM6d87XxqaJdsrdPt250XL4MLF+uh02b9HDligRDhgDjxwsEB8sxZIgclSpp+LPpsNzaieXE7aQ+biv1aKKd8vNcnd5dQy6X4/vvv0doaCj09fWRmZmJmTNnYuLEie99zpQpUzB16tQc5zdt2gRTU9OiDFc7MjPhfvgwqm7cCONXrwAAT2vUwLXBg/HKy0tjb5OSYogjR1yxb58nHj82V56vW/cJOnS4g7p1EwozWZYxxopUamoq+vfvr9buGjqdGP/44w+MGzcO8+bNQ/Xq1XHx4kWMGjUKCxYswKBBg3J9Tm49RldXVzx79kw3tp0qKsnJ0AsNhd6iRZCkpUFIJBADBiBz2rQPzp6RyWQIDw9H69atP3hjWy4HDh2SYNkyPRw4IIEQ1DOtWFFg6FA5Bg2Sw8ZGY59Kp+Snncoybif1cVupRxPtlJSUBHt7e/W2IPzgjo1aVKFCBbFkyRKVc9OnT8+xIWdeSvNGxbm6e1eIoCDauRgQwtRUiB9/FCIl5b1PKejGsrduCTF6tBDW1llvZ2IixCefCLF/vxAyWSE/i44pCRvw6gJuJ/VxW6lHE+2Un1yg04NfqampOdak6OvrQy6XaymiEsDNjVbwR0YC/v5AaioVCKhcmSrpaLDtKlUC5s8HHjygSTm+vsCbN8CGDbRO0sUF+OorICKCZ7QyxkoOnU6MnTt3xsyZM/HXX38hLi4OO3fuxIIFC9C9e3dth6b7GjYETp6kKaWenkB8PBUIqF+farFqkJkZlXu9eJEK9YwYQZsnP30KLFkC+PkBXl7ADz8AMTEafWvGGNM4nU6MixcvRq9evTB8+HD4+Phg7NixGDJkCKZPn67t0EoGiYRK2MTEUDEAS8usAgHdu2t8g0aJhCrnLFlCeXj/fmDAAEqcsbFU0a56daB2bdoz8v59jb49Y4xpRIES4/379/HgwQPl12fPnsWoUaOwcuVKjQUGABYWFli4cCHu3r2LN2/e4L///sOMGTNgxLXK8id7gYARIzReICA3hoZAu3Y0rJqQAPzxBxXwMTCg3T3Gj6fiAS1a0DBsEYTAGGMFUqDE2L9/fxw9ehQA8PjxY7Ru3Rpnz57FpEmTMG3aNI0GyDTIwYG6c1euAB07KgsEGPj4wGvXLuD58yJ5W1NTIDAQ2LMHePwYWLGCigcIARw/TmXnnJyALl2ALVvotihjjGlLgRLj1atX0fD/t2EICwtDjRo1cObMGWzcuBFr167VZHysKPj4AHv30iaNNWtC8vIlaqxdCwM3Nxpi3bWr0FV03sfOjmqwHj8O3LtHI7y1awMyGfDnn0DfvoCjI/DJJxRiYmKRhMEYY+9VoMQok8kglUoBAH///Te6dOkCAKhatSoePXqkuehY0WrdGrhwAW9XrEBixYqQyGSUFLt3z5pSGh1dZFNKXV1phPfCBeDaNWDSJJonlJJCE2s7d6aNlWvUoGS6di1w8ybPcGWMFa0CJcbq1atj+fLlOHnyJMLDw9GuXTsAQHx8POzs7DQaICti+voQgwfj+IIFkJ07B4wdS+Oaz5/TsGuDBpSZQkNpRk0RqVaNNg/57z+a2TpyJFW+E4KS5qpVNKm2ShWgXDkadp0zh4qbv3lTZGExxsqgAiXGuXPnYsWKFWjRogX69euHWrVqAQD27NmjHGJlJVDNmlnTRffvp3FNY2Oa1frdd9TFa9sW2LSpyG4EKma2Ll5MvcMnT6gTO3480LQpIJUCz57RsOvEiVTc3NKStq/89ltanfLwYZGExhgrIwpURLxFixZ49uwZkpKSYJOt/teXX35ZOuuRljUGBjSltF074NUryjbr1tHGyYcO0WFhQXtFDhpEGauICqWWKwd07UoHQLc+L1ygXuWZM8Dp08CjR8DZs3QsXEjXubnR+knFUbs2TcZljLEPKdBvszdv3iA9PV2ZFO/evYuFCxfixo0bKFeunEYDZFpmZQV8/jkVC7h9GwgJATw8gORk4LffqMtWqRIwZQqNgxYxI6OcvcO4OOrEjhwJ1K1LCfDePVoi8vXXVNPAw4NCzLbKiDHGclWgxNi1a1esX78eAJCYmIhGjRph/vz56NatG5YtW6bRAJkO8fKi8nL//UfTSj/9lHqOsbF0vlIloFkz4NdfqadZDCQSWg/Zrx8Nv547RzNZjxyhe5YdOlBuf/CAQnR3p/uTf/1Fe00yxti7CpQYz58/j2bNmgEAtm3bBkdHR9y9exfr16/HL7/8otEAmQ7S06OFiKtX08LEjRuBNm3o/KlTVB/OyYmy1YEDtF6yGJmbU3GfSZMoAT55Qj3KgAAqFfvnn0CnTkDFisD06UU6p4gxVgIVKDGmpqbCwsICAHDo0CH06NEDenp6aNy4Me7evavRAJmOMzUF+vcHDh6k8cu5c2mKaVoajWW2b083/MaNA65e1UqIUinl6GPHgOvXaRjWxobCDQmh8Lp3pxzO9ekZYwVKjJUqVcKuXbtw//59HDx4EG3atAEAJCQklO49D1neypen6aNXrwJRUbQO0s6OZsf89BPNeq1XD/jlF6owrgVVqwILFlAvccMGmjeUmUkzX9u3p9HiWbOoI8wYK5sKlBhDQkIwduxYeHh4oGHDhmjSpAkA6j3WqVNHowGyEkgioRkvv/xCGWjnTuqSGRoC588D33xDBQS6dgV27ACybSxdXIyNqcD5yZOUx7/+GrC2pok8kybRypRevYDwcO5FMlbWFCgx9urVC/fu3UN0dDQOHjyoPN+yZUv8/PPPGguOlQJGRkC3bpQA4+Nphkz9+nTfcc8eoGdPSpIjRtB6Cy2UtaleHVi0iGa4rl1L6yjfvgW2b6dbp97eNEKckFDsoTHGtKDAi8+cnJxQp04dxMfHK3faaNiwIapWraqx4FgpY29PayqioqiczXffUVJ88QJYupTWYVSrBsyerZV1FaamtCzzzBnaAWT4cCoecOcOMGEC4OlpgJ9+qod//y320BhjxahAiVEul2PatGmwsrKCu7s73N3dYW1tjenTp0PO405MHdWqUU23e/do4k5QEGBiAvz7L/D99zQjplUrYPlyukdZzHx9gf/9jzq5q1fTvs8ymQSnTlVAnToG+OqrItuMhDGmZQVKjJMmTcKSJUswZ84cXLhwARcuXMCsWbOwePFiTJ48WdMxstJMX5/GK3//nWa8rF6dtSfV4cPAsGE0qadpU5o1ExtbrOGZmdFyzchIIDJShgYNHiEzU4IlS2jZ5oIFWrlFyhgrQgVKjOvWrcOvv/6KYcOGwdfXF76+vhg+fDhWrVrF206xgrO0pCx0/DiNX4aGAo0bU5I8fRoYM4YWH9apQwsQr10r1nuSdeoAkyadxYEDb+HrS4UExoyhe5Q7dvCuH4yVFgVKjC9evMj1XmLVqlXxgrdiZ5rg6UlrHyMi6H7jkiXAxx9TD/PiRVqAWKMGrb+YOJHuWxZTZvr4Y4Hz56nAj5MTFQLq2RNo0YIq7zDGSrYCJcZatWphyZIlOc4vWbIEvr6+hQ6KMRXly9Os1cOHabj1t9+odI2REW3BMWcO3QR0c6OlIMeOFXm1HX194LPP6O1/+IGWf5w4QRNuBw3iHT4YK8kKlBhDQ0Px22+/oVq1avjss8/w2WefoVq1ali7di1++uknTcfIWBZ7e9qY8c8/qUjAH38AffrQzcAHD2jt5EcfAc7OVPx8374ivQloYUGjujdv0vwhAFi/npZ4TJkCvH5dZG/NGCsiBUqMAQEBuHnzJrp3747ExEQkJiaiR48euHbtGjZs2KDpGBnLnaUlEBgIbNlCmzTu2QMEBwO2tvT16tVAx46AgwOVrdu2DUhJKZJQXF1p/lBkJODvT5snT50KVK5MO3bxZG3GSo4Cr2N0cXHBzJkzsX37dmzfvh0zZszAy5cvsXr1ak3Gx5h6jI2Bzp2BNWuoavjhwzT86uJCW2Rt3kz7Rzo4UMGB9etp/aSGNWxI1XS2bqXbpPHxlKsbNKA5RYwx3Vc0u8sypk0GBjRRZ8kS4P59msAzbhzNaE1LA3bvphuBjo60VGTZMo2ulZRIqJxcTAxNrLW0pEp4LVoAPXoAt25p7K0YY0WAEyMr3fT0aMlHaChttHzpEvDjj1TQ/O1bKoY6fDhN8PH3B+bPp6UiGmBsTPn49m16C319KhtbvTowejS9De8JyZju4cTIyg6JhEraTJkCXL5MM2bmzqVSdEJQLbixY2mLDQ2ulXRwoCo6ly/TDh4yGfDzz/Q2pqZUBKhrV3rrlSuBo0dpHhHfl2RMOwzyc3GPHj3yfDwxMbEwsTBWvLy9aZus8eNpfcWuXbRS//hxWiupWC9ZuTLQvTskXboUKklWq0aTZA8epE7r+fNARgbtEXn9es7rTUyouo63N4Xg7Z11ODpSnmeMaV6+EqOVldUHHx84cGChAmJMKxRrJUeMoBmtf/5JSfLQIWXP0mDuXLSxs4Ne7950szAggNZS5lPbtnRkZlKp2Fu36Lh5M+vvsbE0s/XKFTreZWGRlTSrVKFOb+PGtP0lY6xw8pUY16xZU1RxMKY7FGslBw8GkpKA/fuBHTsg/voLJs+fU2Hz5ctpVk2HDjQO2r498IH/OL5LX59mrnp60hyg7GQy2hsyt6R59y5NtL1wgY7sqlalbbP8/OioWpVuszLG1JevxMhYmaNYKxkYiLfJyTgXGoqG8fHQ++svWhbyxx90GBpSYYGuXYEuXYAKFQr1toaGWcOm70pPp4k7ikR59SpNvL1xgzYn+fdfWrUC0ObLjRtTkmzShHqWFhaFCo2xUo8TI2PqMjbGk/r1kdmhA/T09Wk1/+7ddPz7Lw27HjpEw7H16lGS7NaNarpq8IagVAr4+NCR3bNnwD//UJI8c4b2fU5MBA4coAOg3mPNmqq9yooV+X4lY9lxYmSsIPT0KLs0aUK1Wm/cyEqSERFUTfzcOZq84+lJSbJrV9o+y6Bo/tnZ21MJ2U6d6Ou3b2km7JkzWckyLo5WrFy6RKPBAFCuXFaibNyYcrqZWZGEyFiJwImRMU2oUiVrhuuTJ8DevTTL9e+/aSbNwoV02NpS5uralW4smpsXWUgGBkDdunSMHEnnHj3KSpJnzlDuTkjIyukA3fusUYOGXRWHjw/fq2RlBydGxjTN0ZG23vjsM6oifugQZZ29e4Hnz6kc3fr1NCbaqhUlyc6daQ+rIubsTBNqFSuv0tNp2YgiUUZG0soVRa9y5Uq6zsKCytplT5bFEC5jWsGJkbGiZGYGdO9Ox9u3tOGyont25w7w1190SCQ0jqkYcs1lv9OiIJVmjQiPGUPnHj6kBKk4oqNpFuyRI3QouLlRgmzYkP6sV48KFjBW0nFiZKy4GBjQ2seAACo9d+1aVpKMiqIxzogIYMIEWtGvmLzTqBGNbxaT8uVVe5Vv31Ld1+zJ8to1WoN57x4VTAcoxJo1gQYN9GBq6ooWLfK9goUxncCJkTFtkEjoRl6NGsCkSdRN27OHkuSRI7Rwcd48OsqVo6HWrl1p6NXEpFhDNTCgSnq+vsAXX9C55GTqSWZPlo8eKQoG6QOoiz17BJYuBdq1K9ZwGSs0vp3OmC4oXx4YNozWVTx7RntM9utHXa6EBNpbsksXmnraowdt8vjsmdbCtbCgZZsTJlBh9IcPs3qPo0dnws7uDWJjJWjfnpaBxsdrLVTG8k3nE+PDhw8xYMAA2NnZwcTEBDVr1kR0dLS2w2Ks6FhaAn36AJs2UVIMD6dppa6uQGoqZaLgYJrkExAALFgA/PefVkOWSCi8Xr2AOXPkWLLkMEaNyoS+PhAWRrNalyzh3URYyaDTifHly5fw9/eHoaEh9u/fj5iYGMyfPx82NjbaDo2x4mFkRMOnixdTLbjz52ltZK1atP3GiRM0a6ZSJdrPauJEuk+p5QxkYpKJ0FA5oqNpck5SEvDVVzS/6Px5rYbG2AfpdGKcO3cuXF1dsWbNGjRs2BCenp5o06YNvLy8tB0aY8VPIqHtsKZOpZt5sbHAokW0KbOBAc2QmTOHVuq7uNBykd27acmIltSuTctAli6lUeHoaFr2MWoU3adkTBfp9OSbPXv2oG3btujduzeOHz+O8uXLY/jw4fhCMQMgF+np6UhPT1d+nZSUBACQyWSQyWRFHnNJpGgXbp+86Vw7Ke5LDhsGvHwJycGD0Nu7F5IDByBJSAB++w347TcIY2OIjz+GvHNniA4daDFjEcqtnT7/nOoajB2rj7AwPSxaBGzbJjB/fia6dxdltiSdzv1M6ShNtFN+nisRopC7sBYhY2NjAMDo0aPRu3dvREVF4ZtvvsHy5csxaNCgXJ8zZcoUTJ06Ncf5TZs2wZQXWbEyQCKTwS4mBk5RUXA6exZmCQkqj7+oXBmPGzTA44YNkezmVuyFUi9ccMCKFb54/Jiq/tSv/xhffHEZjo5vijUOVrakpqaif//+ePXqFSwtLfO8VqcTo5GREerXr48zZ84oz3399deIiopCRERErs/Jrcfo6uqKZ8+efbAxyiqZTIbw8HC0bt0ahoaG2g5HZ5XIdhICuHqVepJ790IvKkr1YU9PyDt1gujUCaJpU9rWo5DUaac3b4A5c/Tw0096kMkkMDER+OEHOUaNkmsihBKjRP5MaYEm2ikpKQn29vZqJUadHkp1dnZGtWrVVM75+Phg+/bt732OVCqFVCrNcd7Q0JB/8D6A20g9Ja6dFAVTQ0JoseHevbRm8u+/IYmNhf7ixTS5x8qK9pfs0oUWH1pbF+pt82onQ0Ng1ixg4EBg6FDg+HEJJk3Sx+bN+li+HPD3L9Rblzgl7mdKSwrTTvl5nk4nRn9/f9y4cUPl3M2bN+Hu7q6liBgr4ZydaZX+F1/QpJy//6Yk+eefwNOnwObNdCiq9HTpQsUFPD2LJJyqVYGjR6l07NixtLdk06Z0T3LuXKq5rq70dFpP+eBB7kdiIhUSGj+eloMy9j46nRi//fZb+Pn5YdasWejTpw/Onj2LlStXYqWisjFjrODMzLJqs2Zm0gaOe/bQERMDHD5MxzffUK23Ll3oqF9fo1ttSCTAoEE0Oee776iWwa+/0oTa+fOBAQNo6PV9CU9xPH364feaNw9YtoxmxY4ZU+hOMSuldDoxNmjQADt37sTEiRMxbdo0eHp6YuHChQgKCtJ2aIyVLvr6WdXEZ88Gbt+mXuTu3cCpU8CVK3TMnEnbanTuTEmyZUuNlaizs6OEOGgQDa/GxNBQ6/DhQEqKeq9hbEyFBipUyHmkp9NHu3ABmDGDRo/HjKG8z9MPWHY6nRgBoFOnTuik2HmVMVY8KlUCvv2WjufPgf37qSd54ADw+DGwahUdJia0r2SXLkDHjlSNp5CaNaPktWABMG1aVlI0M3t/0lOct7HJe5Jtr160TWZICA3bhoTQNpnffQeMGMEbNDOi84mRMaZldnY0njlgAHW7jh/PGnK9fz9rh5D/3zpLr2NHWFhZ0YzYAjIyojqsX35J84UqVKBeXWFXlkgktANY165Uqm7KFODGDUqM8+dT4aChQ6nnycouna58wxjTMVIp9RCXLKESdRcuUCWeevUoEUZEQP+HH/DxV1/BoHJlyjI7d1JNuAKwtaVKd1ZWml1uqacH9O1LvcZ164CKFaks7bffAl5edB8yI0Nz78dKFk6MjLGCkUio5ltICNV6u38fWLYM8nbtkGlgAMndu8CKFbQbiJ0dzXKdPZuKpcrl2o4eAE2+HTgQ+PdfYOVKGpKNj6f7mpUr00QgTRelefGCStwuXQqMG6eHPXsq4swZCVJTNfs+rOB0eoG/JiQlJcHKykqtRZ1llUwmw759+9ChQwdeS5UHbif1yGQyHNyxA+1MTGDw9990X/LWLdWLypUD2ral9ZKtWwMODtoJ9h3p6TQBaOZMGsIFqAf5449A//752y/69WuaQHT1qurxvi249PWpd9ygQdZRs6ZGai6UeJr4t5efXMD3GBljGpdpbAzRvj1NygGAO3eAgwcpSR4+TOOWGzbQIZHQUGy7dnQ0akRdOS2QSmkSzqef0nDqnDm0o9fAgVSQYOpUmsCTfbVKRgbtK61IfFeu0J+xse+/zeruTntUe3llIiIiAffvO+HxYwkuXwYuX6aeqiKe2rVVk2WVKhpdLcNywYmRMVb0KlbMKniekUFbbhw4QMelSzQUGx1N6yisrGirrXbtqFfp6lrs4ZqYAKNH0+SfJUuA0FAabg0MBHx9qVDAjRuUAG/cAN6+zf11HB0pAWY/qlXLWh4ik8mxb99ZtG/fAQkJhoiKgvKIjqaiBJGRdCiYm9P/I7InSw+PYi95W6pxYmSMFS8jI6BFCzrmzKExy0OHKEkeOkQ34bZvpwOgTKLoTTZrVqxTRs3NaXbs8OG0rGP+fCh7ddlZWuZMgDVqqD9CLJFkLT3p3p3OCUHLSaOjs5Ll+fO0fOX4cToU7O1pR7KqVeneaJUq9KerK/cuC4ITI2NMu5ydaVX/oEFUgSc6OmvYNTKSbtTFxNDCRhMT4KOPsu5PensXS1fJ0pLmGI0cST3IO3coXysSoKur5sOQSOjjeXsD/frRubdvgevXodKzvHwZePYMCA+nIztjY3p+lSpZyVLxJ+/3/n6cGBljukNfn+4xNmpEmejFC7onqRh2jY8H9u2jA6Aarooh148/BiwsijQ8W1sKS1sMDGhCTs2adB8UoAlDly7Rvc2bN2lo9+ZN6m2mpWUVLXqXg0NWosyeNCtWpHubZRknRsaY7rK1BXr3puP/t9BS9iZPnqQZLsuW0WFgQBXIFYmyVq0yceNNKgUaNqQju7dvgbg41WR54wYd8fFUW/bpU+D0adXn6ekBjRvTKpsePYqsfrxO48TIGCsZJJKs7tLYsXSz7dixrER5+zZ9fewY3Rh0clJdEmJnp+UPULwMDKiyX6VKtJtYdikplChzS5opKTQ36swZaubatYGePSlJ+viUif9rcGJkjJVQ5ua0JYeilvLt25QkDx4Ejhyhmq7r1tEhkdD0zZYt6R6lvz9gaqrd+LXI3Dxrm87shKCCRnv3Ajt20ASfixfpmDyZhloVPcl69UpvkuT5Soyx0qFSJVqEuGcPFT4/fBgYN47WVwhB22rNnk0l7aytaYZrSAhtCJmWpu3odYJEQks/Ro7M+r/Fr79Sj9PIiHqUs2fT/zHc3Wn7rhMnaM6UJglB38KoKGDLFmDuXD3s3Vt8Y7rcY2SMlT5SKU3G+fhjWoT48CFN2Tx6lI7792k7rVOngOnT6Xo/P+pNfvQR3bAzMtL2p9A6Bwfgs8/oePWK5jzt2EF/3r8PLFpEh4MDre3s0YOaXJ2mk8mAe/eogMKdOzmPV6+yX60PV1ePovmQueDEyBgr/cqXB4KD6RCCfvMqkuTRo7SWUvF3gIZZ/f2zEmX9+lqrxqMrrKxo2Ui/frRx9KFDlCT37KFJPIqdyKysaHS7Rw/a3vPhQ2rudxPgvXsfLpnr7EyzZD095ZBIHgDwLpbPWra/04yxskcioQKoXl7A559Torx5k5LikSM0eefpU9WFgRYWNPSqSJS1a+evcGopY2JCW3d17Uo9v2PHKEnu3Ak8eQJs3EjHhxgbU+LLfnh50Z8eHlm3gWWyTOzbdwucGBljrDhIJFmL+YYOpUR57VpWD/LYMeDlS9X1k9bWQPPmWYmyZs0yW2LG0JAm/bZuTcUP/vmHkuT27TSRR9Hryy0BOjrqZrNxYmSMsewkkqySNl99ReN9ly9nJcrjx6mIqWKzZoCWgrRokZUoy8q6hnfo69MItL8/8NNP1JssibdqOTEyxlhe9PRo6LR2bdrJ+O1b2qBZkShPnqQplNnruzo6qibKYipdp0skkpKZFAFOjIwxlj8GBlnbWowfT92i6OisRHn6NN1o27KFDoAm/7RoQVM2P/qobJaTKUE4MTLGWGEYGtL0yyZNgO+/p+KlkZFZiTIigqZmZp+R4u4OfPQRJM2awSwt7cPTM1mx4sTIGGOaJJXSxJzmzYEff6S1DRERWYkyMpJmpaxdC4O1a9EKgJgwgfaNUpSjqVePJgOV4Zmv2sSJkTHGipKJSVaxAQB4/ZqGW48ehfzYMcjPn4dBcjKVkDlxIut5pqZUCL1evayEWa0a9VBZkeLEyBhjxcnMjMrStWmDTJkM+//8E+09PWF45QrtRHzuHE3uef2aepoREVnPlUqpxJ2iV1m3Ls2eLev7RGkYJ0bGGNMioa9Pya1OHWDgQDoplwO3blGSPH8+63j1KmuHYgVDQ3p+9mFYX1/qqbIC4cTIGGO6Rk8vq+hA//50TlHKTtGrVPz54gX1MC9cAFavpmv19WnYNXuyrFWLttVgH8SJkTHGSoLspex696ZzQlDRUUWP8tw5OhISgCtX6Fi3Luv5Vaqo3rOsU4eKmzIVnBgZY6ykkkho6Ye7O9C9O50TgoqiZx+GPXeOloz8+y8d2QuZVqqkes+ybl3A1lY7n0dHcGJkjLHSRCIBXFzo6Nw56/yTJzTcmj1hxsXRBs+3bwNhYVnXWltTkVNnZ3qd3P50dqaJRKUQJ0bGGCsLHB2Bdu3oUHj+nJJl9vuWt29TLdjEROD69bxf09JSNVG+L4mWsHubnBgZY6yssrMDWrWiQyE5GXjwgIZj4+Ppz+x/j4+nIzUVSEqi499/834fC4ucvc3ckqiFRdF+XjVxYmSMMZbFwoJ2B/Hxef81QlACfTdh5pZAX7+ma5OTad/LvJiZ5Zo8JeXKwfbhQ6BDB81+1vfgxMgYYyx/JBIaRrW0pJmueXk3gb4viSYnUxK9dYuObAwA1HFyAsaNK7rP9M77McYYY0XDwoKOypXzvi4l5b2JU/7wIV4KAafiiZgTI2OMMR1gbk77Vnp753goUybD+X37UDwDqYBeMb0PY4wxViJwYmSMMcay4cTIGGOMZcOJkTHGGMum1E++EUIAAJKSkrQcie6SyWRITU1FUlISDHkT1PfidlIPt5P6uK3Uo4l2UuQARU7IS6lPjMnJyQAAV1dXLUfCGGNM25KTk2H1gR1FJEKd9FmCyeVyxMfHw8LCAhKJRNvh6KSkpCS4urri/v37sLS01HY4OovbST3cTurjtlKPJtpJCIHk5GS4uLhATy/vu4ilvseop6eHChUqaDuMEsHS0pL/caqB20k93E7q47ZST2Hb6UM9RQWefMMYY4xlw4mRMcYYy4YTI4NUKsWPP/4IqVSq7VB0GreTerid1MdtpZ7ibqdSP/mGMcYYyw/uMTLGGGPZcGJkjDHGsuHEyBhjjGXDiZExxhjLhhNjGTF79mw0aNAAFhYWKFeuHLp164YbN26oXJOWloYRI0bAzs4O5ubm6NmzJ548eaKliHXDnDlzIJFIMGrUKOU5bqcsDx8+xIABA2BnZwcTExPUrFkT0dHRyseFEAgJCYGzszNMTEzQqlUr3Lp1S4sRF7/MzExMnjwZnp6eMDExgZeXF6ZPn65Ss7MsttOJEyfQuXNnuLi4QCKRYNeuXSqPq9MmL168QFBQECwtLWFtbY3PPvsMKSkphQ9OsDKhbdu2Ys2aNeLq1avi4sWLokOHDsLNzU2kpKQorxk6dKhwdXUVhw8fFtHR0aJx48bCz89Pi1Fr19mzZ4WHh4fw9fUV33zzjfI8txN58eKFcHd3F8HBwSIyMlLcuXNHHDx4UNy+fVt5zZw5c4SVlZXYtWuXuHTpkujSpYvw9PQUb9680WLkxWvmzJnCzs5O7N27V8TGxoqtW7cKc3NzsWjRIuU1ZbGd9u3bJyZNmiR27NghAIidO3eqPK5Om7Rr107UqlVL/PPPP+LkyZOiUqVKol+/foWOjRNjGZWQkCAAiOPHjwshhEhMTBSGhoZi69atymuuX78uAIiIiAhthak1ycnJwtvbW4SHh4uAgABlYuR2yvLdd9+Jpk2bvvdxuVwunJycxLx585TnEhMThVQqFZs3by6OEHVCx44dxaeffqpyrkePHiIoKEgIwe0khMiRGNVpk5iYGAFAREVFKa/Zv3+/kEgk4uHDh4WKh4dSy6hXr14BAGxtbQEA586dg0wmQ6tWrZTXVK1aFW5uboiIiNBKjNo0YsQIdOzYUaU9AG6n7Pbs2YP69eujd+/eKFeuHOrUqYNVq1YpH4+NjcXjx49V2srKygqNGjUqU23l5+eHw4cP4+bNmwCAS5cu4dSpU2jfvj0AbqfcqNMmERERsLa2Rv369ZXXtGrVCnp6eoiMjCzU+5f6IuIsJ7lcjlGjRsHf3x81atQAADx+/BhGRkawtrZWudbR0RGPHz/WQpTa88cff+D8+fOIiorK8Ri3U5Y7d+5g2bJlGD16NL7//ntERUXh66+/hpGREQYNGqRsD0dHR5XnlbW2mjBhApKSklC1alXo6+sjMzMTM2fORFBQEABwO+VCnTZ5/PgxypUrp/K4gYEBbG1tC91unBjLoBEjRuDq1as4deqUtkPROffv38c333yD8PBwGBsbazscnSaXy1G/fn3MmjULAFCnTh1cvXoVy5cvx6BBg7Qcne4ICwvDxo0bsWnTJlSvXh0XL17EqFGj4OLiwu2ko3gotYwZOXIk9u7di6NHj6psx+Xk5ISMjAwkJiaqXP/kyRM4OTkVc5Tac+7cOSQkJKBu3bowMDCAgYEBjh8/jl9++QUGBgZwdHTkdvp/zs7OqFatmso5Hx8f3Lt3DwCU7fHujN2y1lbjxo3DhAkT0LdvX9SsWROffPIJvv32W8yePRsAt1Nu1GkTJycnJCQkqDz+9u1bvHjxotDtxomxjBBCYOTIkdi5cyeOHDkCT09Plcfr1asHQ0NDHD58WHnuxo0buHfvHpo0aVLc4WpNy5YtceXKFVy8eFF51K9fH0FBQcq/czsRf3//HEt+bt68CXd3dwCAp6cnnJycVNoqKSkJkZGRZaqtUlNTc2yMq6+vD7lcDoDbKTfqtEmTJk2QmJiIc+fOKa85cuQI5HI5GjVqVLgACjV1h5UYw4YNE1ZWVuLYsWPi0aNHyiM1NVV5zdChQ4Wbm5s4cuSIiI6OFk2aNBFNmjTRYtS6IfusVCG4nRTOnj0rDAwMxMyZM8WtW7fExo0bhampqfj999+V18yZM0dYW1uL3bt3i8uXL4uuXbuW+mUI7xo0aJAoX768crnGjh07hL29vRg/frzymrLYTsnJyeLChQviwoULAoBYsGCBuHDhgrh7964QQr02adeunahTp46IjIwUp06dEt7e3rxcg6kPQK7HmjVrlNe8efNGDB8+XNjY2AhTU1PRvXt38ejRI+0FrSPeTYzcTln+/PNPUaNGDSGVSkXVqlXFypUrVR6Xy+Vi8uTJwtHRUUilUtGyZUtx48YNLUWrHUlJSeKbb74Rbm5uwtjYWFSsWFFMmjRJpKenK68pi+109OjRXH8nDRo0SAihXps8f/5c9OvXT5ibmwtLS0sxePBgkZycXOjYeNspxhhjLBu+x8gYY4xlw4mRMcYYy4YTI2OMMZYNJ0bGGGMsG06MjDHGWDacGBljjLFsODEyxhhj2XBiZIwxxrLhxMgYY4xlw4mRsRLm6dOnGDZsGNzc3CCVSuHk5IS2bdvi9OnTAACJRIJdu3ZpN0jGSjDej5GxEqZnz57IyMjAunXrULFiRTx58gSHDx/G8+fPtR0aY6UC10plrARJTEyEjY0Njh07hoCAgByPe3h44O7du8qv3d3dERcXBwDYvXs3pk6dipiYGOUmuZMmTYKBAf3/WCKRYOnSpdizZw+OHTsGZ2dnhIaGolevXsXy2RjTFTyUylgJYm5uDnNzc+zatQvp6ek5Ho+KigIArFmzBo8ePVJ+ffLkSQwcOBDffPMNYmJisGLFCqxduxYzZ85Uef7kyZPRs2dPXLp0CUFBQejbty+uX79e9B+MMR3CPUbGSpjt27fjiy++wJs3b1C3bl0EBASgb9++8PX1BUA9v507d6Jbt27K57Rq1QotW7bExIkTled+//13jB8/HvHx8crnDR06FMuWLVNe07hxY9StWxdLly4tng/HmA7gHiNjJUzPnj0RHx+PPXv2oF27djh27Bjq1q2LtWvXvvc5ly5dwrRp05Q9TnNzc3zxxRd49OgRUlNTlde9u2N8kyZNuMfIyhyefMNYCWRsbIzWrVujdevWmDx5Mj7//HP8+OOPCA4OzvX6lJQUTJ06FT169Mj1tRhjWbjHyFgpUK1aNbx+/RoAYGhoiMzMTJXH69atixs3bqBSpUo5Dj29rF8D//zzj8rz/vnnH/j4+BT9B2BMh3CPkbES5Pnz5+jduzc+/fRT+Pr6wsLCAtHR0QgNDUXXrl0B0MzUw4cPw9/fH1KpFDY2NggJCUGnTp3g5uaGXr16QU9PD5cuXcLVq1cxY8YM5etv3boV9evXR9OmTbFx40acPXsWq1ev1tbHZUw7BGOsxEhLSxMTJkwQdevWFVZWVsLU1FRUqVJF/PDDDyI1NVUIIcSePXtEpUqVhIGBgXB3d1c+98CBA8LPz0+YmJgIS0tL0bBhQ7Fy5Url4wDE//73P9G6dWshlUqFh4eH2LJlS3F/RMa0jmelMsYA5D6blbGyiO8xMsYYY9lwYmSMMcay4ck3jDEAAN9VYYxwj5ExxhjLhhMjY4wxlg0nRsYYYywbToyMMcZYNpwYGWOMsWw4MTLGGGPZcGJkjDHGsuHEyBhjjGXzf2Ej1OioDUkuAAAAAElFTkSuQmCC\n"},"metadata":{}}]},{"cell_type":"code","source":["from huggingface_hub import notebook_login\n","\n","notebook_login()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":162,"referenced_widgets":["e4a967601e1e4629ac17bb6cb3b4e7c5","7ab5cd64bd6b4e9883fd4fadb464a71d","8b4eb51452894ddeada282d9eb7f22aa","eb74146abbdf484dad8473972c415956","3b58b9bf6fb440419cfa5c18308d0253","daba3779b0bb415cbe29aedf72487ce5","1e494c423dfb40ee9cd9d3a74f3700ca","ceef5b81e8484244b44bb038569e6b8a","96d917fca2cc4695a5fdc907f3e1f829","b62c04f8302046a3bff738ca738e0834","898fce8a428d4139ab1e84d85be9af89","f262f2879274428e88bd0029b77410fa","8f498432f7f2400ea280e9089557d2a5","97207133a8cc4c5c93b0b5bb0ea1d7dd","a1d6b1cad8b34320b79c918c54d0bed4","8ad425e5ef7748248cee1eb249c33247","4ffdf150443b4d89bacda9bded8812eb","c378316066314e3388b3966dbaec40d3","fe6a0c2f7afe421692cde16def6b084f","2095498ebfb04f06a9db004fa6d908ee","474b9b0f76304b01b2a5d6ddcb293eda","0cd90ea445c142548474f5a0792b071b","b96a5a3a0188419ca596f1ebb92b0e3a","a7f508afa2f740b59e790014c2e4f573","51612bb110964f3e98a53c030a2e89ac","e96ea52802dd4922affaf266dc2f648e","038962232cc842c48a64a82df56002ed","c8e229b9cae94e169e882861bcf50fba","ed4385dbf730460f810a926f7926ea95","9538dfa8de9240baa76bca225ca571be","b49ff381642a4f96b2814f82997a1bbc","dd43aa84997641a19d68e4d9c37ef0c1"]},"id":"dI9BuDXp65zX","outputId":"a21e96a1-9b63-451b-a650-dbdc8f0ae872","executionInfo":{"status":"ok","timestamp":1717834099245,"user_tz":-240,"elapsed":656,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":null,"outputs":[{"output_type":"display_data","data":{"text/plain":["VBox(children=(HTML(value='
"}},"8b4eb51452894ddeada282d9eb7f22aa":{"model_module":"@jupyter-widgets/controls","model_name":"PasswordModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"PasswordModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"PasswordView","continuous_update":true,"description":"Token:","description_tooltip":null,"disabled":false,"layout":"IPY_MODEL_b62c04f8302046a3bff738ca738e0834","placeholder":"​","style":"IPY_MODEL_898fce8a428d4139ab1e84d85be9af89","value":""}},"eb74146abbdf484dad8473972c415956":{"model_module":"@jupyter-widgets/controls","model_name":"CheckboxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"CheckboxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"CheckboxView","description":"Add token as git credential?","description_tooltip":null,"disabled":false,"indent":true,"layout":"IPY_MODEL_f262f2879274428e88bd0029b77410fa","style":"IPY_MODEL_8f498432f7f2400ea280e9089557d2a5","value":true}},"3b58b9bf6fb440419cfa5c18308d0253":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ButtonView","button_style":"","description":"Login","disabled":false,"icon":"","layout":"IPY_MODEL_97207133a8cc4c5c93b0b5bb0ea1d7dd","style":"IPY_MODEL_a1d6b1cad8b34320b79c918c54d0bed4","tooltip":""}},"daba3779b0bb415cbe29aedf72487ce5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_8ad425e5ef7748248cee1eb249c33247","placeholder":"​","style":"IPY_MODEL_4ffdf150443b4d89bacda9bded8812eb","value":"\nPro Tip: If you don't already have one, you can create a dedicated\n'notebooks' token with 'write' access, that you can then easily reuse for all\nnotebooks. "}},"1e494c423dfb40ee9cd9d3a74f3700ca":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":"center","align_self":null,"border":null,"bottom":null,"display":"flex","flex":null,"flex_flow":"column","grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":"50%"}},"ceef5b81e8484244b44bb038569e6b8a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"96d917fca2cc4695a5fdc907f3e1f829":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b62c04f8302046a3bff738ca738e0834":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"898fce8a428d4139ab1e84d85be9af89":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f262f2879274428e88bd0029b77410fa":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8f498432f7f2400ea280e9089557d2a5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"97207133a8cc4c5c93b0b5bb0ea1d7dd":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a1d6b1cad8b34320b79c918c54d0bed4":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","button_color":null,"font_weight":""}},"8ad425e5ef7748248cee1eb249c33247":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4ffdf150443b4d89bacda9bded8812eb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c378316066314e3388b3966dbaec40d3":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fe6a0c2f7afe421692cde16def6b084f","placeholder":"​","style":"IPY_MODEL_2095498ebfb04f06a9db004fa6d908ee","value":"Connecting..."}},"fe6a0c2f7afe421692cde16def6b084f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2095498ebfb04f06a9db004fa6d908ee":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"474b9b0f76304b01b2a5d6ddcb293eda":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_51612bb110964f3e98a53c030a2e89ac","placeholder":"​","style":"IPY_MODEL_e96ea52802dd4922affaf266dc2f648e","value":"Token is valid (permission: write)."}},"0cd90ea445c142548474f5a0792b071b":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_038962232cc842c48a64a82df56002ed","placeholder":"​","style":"IPY_MODEL_c8e229b9cae94e169e882861bcf50fba","value":"Your token has been saved in your configured git credential helpers (store)."}},"b96a5a3a0188419ca596f1ebb92b0e3a":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ed4385dbf730460f810a926f7926ea95","placeholder":"​","style":"IPY_MODEL_9538dfa8de9240baa76bca225ca571be","value":"Your token has been saved to /root/.cache/huggingface/token"}},"a7f508afa2f740b59e790014c2e4f573":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_b49ff381642a4f96b2814f82997a1bbc","placeholder":"​","style":"IPY_MODEL_dd43aa84997641a19d68e4d9c37ef0c1","value":"Login successful"}},"51612bb110964f3e98a53c030a2e89ac":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e96ea52802dd4922affaf266dc2f648e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"038962232cc842c48a64a82df56002ed":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c8e229b9cae94e169e882861bcf50fba":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ed4385dbf730460f810a926f7926ea95":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9538dfa8de9240baa76bca225ca571be":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b49ff381642a4f96b2814f82997a1bbc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dd43aa84997641a19d68e4d9c37ef0c1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ae6e90f688934a989b6dd9cbfc371b34":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0f436e92314d478f9ca1911225de6563","IPY_MODEL_89c8ddb4489b4368bf51fcb7ae6ddce5","IPY_MODEL_e1fa32ab6bb4416892e107bfc7561e24"],"layout":"IPY_MODEL_ede18bb7cdfb4ed19ea7e16087ee2504"}},"0f436e92314d478f9ca1911225de6563":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e72cb66b5dd1451a99f235a462249b09","placeholder":"​","style":"IPY_MODEL_6ad6151ba5fe46018e33ebad2796efee","value":"mnist_train_small.csv: 100%"}},"89c8ddb4489b4368bf51fcb7ae6ddce5":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_48422cd416904624ac510f8c614b684d","max":36523880,"min":0,"orientation":"horizontal","style":"IPY_MODEL_b1f1d3751c914783ba042ac523f568db","value":36523880}},"e1fa32ab6bb4416892e107bfc7561e24":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_52e73db4aec94ff2aa1908f7b7a83669","placeholder":"​","style":"IPY_MODEL_b1c2ac4c1bf84fc190681dc1650226d5","value":" 36.5M/36.5M [00:06<00:00, 4.21MB/s]"}},"ede18bb7cdfb4ed19ea7e16087ee2504":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e72cb66b5dd1451a99f235a462249b09":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6ad6151ba5fe46018e33ebad2796efee":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"48422cd416904624ac510f8c614b684d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b1f1d3751c914783ba042ac523f568db":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"52e73db4aec94ff2aa1908f7b7a83669":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b1c2ac4c1bf84fc190681dc1650226d5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"73f8f0d412364d56a7ab1abbf906d39b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_6b0dc598f7b3443380390597cb3a37ab","IPY_MODEL_5850791567884e33a14ddffc3365bbaa","IPY_MODEL_ca641cfa3b024da892a364d242ea376d"],"layout":"IPY_MODEL_0a2ce165c5f54624b82252fddb1c17aa"}},"6b0dc598f7b3443380390597cb3a37ab":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1fc2ec0ca0e9496788d59c223d7cab16","placeholder":"​","style":"IPY_MODEL_51843f532b53469583085b0c2e855959","value":"events.out.tfevents.1717832929.6fbbc2107a5b.9774.0: 100%"}},"5850791567884e33a14ddffc3365bbaa":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_8e5afb4564234ec49196ee8ac93a8f31","max":12968,"min":0,"orientation":"horizontal","style":"IPY_MODEL_13a241f631f04970b2f6b7e727d0240a","value":12968}},"ca641cfa3b024da892a364d242ea376d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3ef83bb6ba8a451bb5e8034d3abd091e","placeholder":"​","style":"IPY_MODEL_9f93ca71a22a41939adb3a7be3a53efe","value":" 13.0k/13.0k [00:01<00:00, 11.8kB/s]"}},"0a2ce165c5f54624b82252fddb1c17aa":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1fc2ec0ca0e9496788d59c223d7cab16":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"51843f532b53469583085b0c2e855959":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8e5afb4564234ec49196ee8ac93a8f31":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"13a241f631f04970b2f6b7e727d0240a":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"3ef83bb6ba8a451bb5e8034d3abd091e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9f93ca71a22a41939adb3a7be3a53efe":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a39c7d162fb840b6b99277c2cec673e7":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_ef46eb872c2c4fe5ad41daed2f9aa016","IPY_MODEL_065e74e5a7f84a6797cb2ad7d0ce8a26","IPY_MODEL_c10ca1c714fa49a7a16097081db01b98"],"layout":"IPY_MODEL_1fddc8c5446149b0a6137ec91b1467e4"}},"ef46eb872c2c4fe5ad41daed2f9aa016":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a27f4474258b4de795496da3d43746d4","placeholder":"​","style":"IPY_MODEL_75c590c533034f3fa61404b96cc146f7","value":"model.safetensors: 100%"}},"065e74e5a7f84a6797cb2ad7d0ce8a26":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_f64a381eeb494b6d82c580e3c72c86f8","max":1089213696,"min":0,"orientation":"horizontal","style":"IPY_MODEL_0ae51b3df8634614b08f9ecaa1720c15","value":1089213696}},"c10ca1c714fa49a7a16097081db01b98":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_8120d8d649e049ccbf765354364273dc","placeholder":"​","style":"IPY_MODEL_295064bc424f4b5f8dd85d4fa3c48707","value":" 1.09G/1.09G [00:55<00:00, 35.4MB/s]"}},"1fddc8c5446149b0a6137ec91b1467e4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a27f4474258b4de795496da3d43746d4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"75c590c533034f3fa61404b96cc146f7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f64a381eeb494b6d82c580e3c72c86f8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0ae51b3df8634614b08f9ecaa1720c15":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"8120d8d649e049ccbf765354364273dc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"295064bc424f4b5f8dd85d4fa3c48707":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"186f60a15b624444a2c15741135bfecf":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_f52e27f1b3a14f539888d63566ddcc16","IPY_MODEL_389239a6254b4709bf2860f50d462585","IPY_MODEL_89b413a11e5249b697eac51c7a57c866"],"layout":"IPY_MODEL_e2c431ee1d3a40429c7b150437b477cc"}},"f52e27f1b3a14f539888d63566ddcc16":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_03357f63363445abb35a0f46e1561fc5","placeholder":"​","style":"IPY_MODEL_1d27710f00624fdb8202ac6feca4db26","value":"Upload 6 LFS files: 100%"}},"389239a6254b4709bf2860f50d462585":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_67f4e1bd12d24c41b9b9e6920bf24c6f","max":6,"min":0,"orientation":"horizontal","style":"IPY_MODEL_380faab767b94b0f8e57c0e141af7f9b","value":6}},"89b413a11e5249b697eac51c7a57c866":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c7cf7d09357a4051b91dad47ba80b67c","placeholder":"​","style":"IPY_MODEL_dfc7d1d2ebe641158712083c034d3bf0","value":" 6/6 [00:55<00:00, 55.74s/it]"}},"e2c431ee1d3a40429c7b150437b477cc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"03357f63363445abb35a0f46e1561fc5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1d27710f00624fdb8202ac6feca4db26":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"67f4e1bd12d24c41b9b9e6920bf24c6f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"380faab767b94b0f8e57c0e141af7f9b":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"c7cf7d09357a4051b91dad47ba80b67c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dfc7d1d2ebe641158712083c034d3bf0":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7f5b5b332e2c4723a6b3762974aa6c7b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_86c3399153ab45e79d83a67d959da0a0","IPY_MODEL_7755a6100ad044329b2ff9dca50a5040","IPY_MODEL_f4b8e016a67f453e84e63a8f5421a59f"],"layout":"IPY_MODEL_cd054ecdd884423b924c0a019aeed593"}},"86c3399153ab45e79d83a67d959da0a0":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_6322d45c4fc04dcd8f012a05c5e48271","placeholder":"​","style":"IPY_MODEL_3e0b3b4865964fa2b20ef6ec03f961e9","value":"mnist_test.csv: 100%"}},"7755a6100ad044329b2ff9dca50a5040":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_7072e9743b554cd69af154fbda8434a2","max":18289443,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6fb4d882166240aeb383fb8b2f26b7af","value":18289443}},"f4b8e016a67f453e84e63a8f5421a59f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3d4e0b25f25b46beb9a544bb4c9645fc","placeholder":"​","style":"IPY_MODEL_73bb1b6f1b1c4e5f8638d66fe67e48a6","value":" 18.3M/18.3M [00:03<00:00, 6.60MB/s]"}},"cd054ecdd884423b924c0a019aeed593":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6322d45c4fc04dcd8f012a05c5e48271":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3e0b3b4865964fa2b20ef6ec03f961e9":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7072e9743b554cd69af154fbda8434a2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6fb4d882166240aeb383fb8b2f26b7af":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"3d4e0b25f25b46beb9a544bb4c9645fc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"73bb1b6f1b1c4e5f8638d66fe67e48a6":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8ec3eaf57bcd429b9255b7866a5f705b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_11afdb38600049dd896cad19113acf1a","IPY_MODEL_d9b0093af98945df8e3a501119a9ccfb","IPY_MODEL_7020fcea7fd34c4ca767aac298139693"],"layout":"IPY_MODEL_9f128a00b66f4eb4a75d36a93a288a9b"}},"11afdb38600049dd896cad19113acf1a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_75ba9ea6143b4010bab79b12ff634cc3","placeholder":"​","style":"IPY_MODEL_c56207648d714cc3a5b098337d7b759a","value":"spiece.model: 100%"}},"d9b0093af98945df8e3a501119a9ccfb":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_06d62b1869534080b8876716348a73ac","max":1912529,"min":0,"orientation":"horizontal","style":"IPY_MODEL_eea14dea9404401f85603cf01acaffc4","value":1912529}},"7020fcea7fd34c4ca767aac298139693":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_058a323af0864d06abfbffa0a27e9244","placeholder":"​","style":"IPY_MODEL_a2a295ecf7a14d729f354c828bb04d83","value":" 1.91M/1.91M [00:01<00:00, 12.9MB/s]"}},"9f128a00b66f4eb4a75d36a93a288a9b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"75ba9ea6143b4010bab79b12ff634cc3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c56207648d714cc3a5b098337d7b759a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"06d62b1869534080b8876716348a73ac":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"eea14dea9404401f85603cf01acaffc4":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"058a323af0864d06abfbffa0a27e9244":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a2a295ecf7a14d729f354c828bb04d83":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9849ad63117547cbaee0236349cd48f5":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_efbfee009a484d37a89410e2767509fd","IPY_MODEL_344b27e8aeab43c491509ab64bb7b21b","IPY_MODEL_d6485e64808a49789d9411af81edb9b4"],"layout":"IPY_MODEL_a6219a4cfdca4b39b6be31a1f07a1459"}},"efbfee009a484d37a89410e2767509fd":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_4cb635704e9f4c7f905f7c9fd838199b","placeholder":"​","style":"IPY_MODEL_8eeea6a9f1d34157801842f073a7d6bd","value":"training_args.bin: 100%"}},"344b27e8aeab43c491509ab64bb7b21b":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_e9e3500adea74570a4f7207cf457cc33","max":5176,"min":0,"orientation":"horizontal","style":"IPY_MODEL_cb8d666bfe2d4d6eb71523ae92f3aecb","value":5176}},"d6485e64808a49789d9411af81edb9b4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_721d5aad3119491e8f94a4290070f555","placeholder":"​","style":"IPY_MODEL_23c922977e164a7d89ae074bac6bad32","value":" 5.18k/5.18k [00:00<00:00, 5.52kB/s]"}},"a6219a4cfdca4b39b6be31a1f07a1459":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4cb635704e9f4c7f905f7c9fd838199b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8eeea6a9f1d34157801842f073a7d6bd":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e9e3500adea74570a4f7207cf457cc33":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cb8d666bfe2d4d6eb71523ae92f3aecb":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"721d5aad3119491e8f94a4290070f555":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"23c922977e164a7d89ae074bac6bad32":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"746187b24ab84950822f0eca5ebb58e2":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_e77acc413b24412d8f66e1e47e2ec5d5","IPY_MODEL_60a2e707c6f1450cb5a4c526f3319376","IPY_MODEL_8df809c056fb4a7c9783b3ed9ae40a2f"],"layout":"IPY_MODEL_fbb132ff7d7b43a78c7b25c6ae6bebca"}},"e77acc413b24412d8f66e1e47e2ec5d5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5cb7f7ab950b4f49a4a824948154691e","placeholder":"​","style":"IPY_MODEL_89992ca769754d61b1c9c558032ee812","value":"config.json: 100%"}},"60a2e707c6f1450cb5a4c526f3319376":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_d9dd172d20e74fdf9e9f1a8c1b44ad62","max":1557,"min":0,"orientation":"horizontal","style":"IPY_MODEL_4e624ff48e154a9eb78c23588575c438","value":1557}},"8df809c056fb4a7c9783b3ed9ae40a2f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_143e1c8353ae4b7d8c8faf3c0ec37e23","placeholder":"​","style":"IPY_MODEL_ecc148272f7945beaf289f317e002b1e","value":" 1.56k/1.56k [00:00<00:00, 128kB/s]"}},"fbb132ff7d7b43a78c7b25c6ae6bebca":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5cb7f7ab950b4f49a4a824948154691e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"89992ca769754d61b1c9c558032ee812":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d9dd172d20e74fdf9e9f1a8c1b44ad62":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4e624ff48e154a9eb78c23588575c438":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"143e1c8353ae4b7d8c8faf3c0ec37e23":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ecc148272f7945beaf289f317e002b1e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"41382dbda6c543fea52ae9358f3bc484":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_379e3ddb5a0a4ebb94fd08107dd36b9d","IPY_MODEL_8fc6be46fc1a4926b2f0691e6d71a102","IPY_MODEL_17963fcd1df94c8fbf1d824abb1ee51c"],"layout":"IPY_MODEL_f5409c8fbab14f1189246f79a0cc35c5"}},"379e3ddb5a0a4ebb94fd08107dd36b9d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c08946b0f4504ed3bc4bb632939ca6ba","placeholder":"​","style":"IPY_MODEL_630e45546c10408da881c7610cb0aaa5","value":"model.safetensors: 100%"}},"8fc6be46fc1a4926b2f0691e6d71a102":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_4803ece45acb4b7baf9fe89278ae1ff1","max":1089213696,"min":0,"orientation":"horizontal","style":"IPY_MODEL_9c21467a6f97410490c13fb743a1f225","value":1089213696}},"17963fcd1df94c8fbf1d824abb1ee51c":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_2a741067cb5e4937aa5985051ac5aabc","placeholder":"​","style":"IPY_MODEL_187d698e54114040acefff20ae3cc5ee","value":" 1.09G/1.09G [00:52<00:00, 18.3MB/s]"}},"f5409c8fbab14f1189246f79a0cc35c5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c08946b0f4504ed3bc4bb632939ca6ba":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"630e45546c10408da881c7610cb0aaa5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4803ece45acb4b7baf9fe89278ae1ff1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9c21467a6f97410490c13fb743a1f225":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"2a741067cb5e4937aa5985051ac5aabc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"187d698e54114040acefff20ae3cc5ee":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"88bef20b6ced40edbd26506ca1f2cbbe":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_c9b4702932924a89b7545a29684b0957","IPY_MODEL_56cf43f5ded54117aef82f7200a260db","IPY_MODEL_129dc288ac8e408daa2a9f6425fb5515"],"layout":"IPY_MODEL_260784357d8d422aa7c34575d9965157"}},"c9b4702932924a89b7545a29684b0957":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3bbe3314f549423998061f7cfb65acb6","placeholder":"​","style":"IPY_MODEL_58142236ecc34ecf88c28a1b32a4b84d","value":"generation_config.json: 100%"}},"56cf43f5ded54117aef82f7200a260db":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_c9041310bfeb49d995b5645e3341ab25","max":257,"min":0,"orientation":"horizontal","style":"IPY_MODEL_63336213355949ccab9abe89238df475","value":257}},"129dc288ac8e408daa2a9f6425fb5515":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_48a7448a69514a718e76a0ddc4f30e0c","placeholder":"​","style":"IPY_MODEL_fcaa6719d5f94bc1971ea3cee6c14111","value":" 257/257 [00:00<00:00, 23.6kB/s]"}},"260784357d8d422aa7c34575d9965157":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3bbe3314f549423998061f7cfb65acb6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"58142236ecc34ecf88c28a1b32a4b84d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c9041310bfeb49d995b5645e3341ab25":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"63336213355949ccab9abe89238df475":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"48a7448a69514a718e76a0ddc4f30e0c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fcaa6719d5f94bc1971ea3cee6c14111":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"68c3da25ae554b04aa9b2ab0d1592b64":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_c427332d607d414ebebf8fa496a39216","IPY_MODEL_ea928406398043369e74379d79b6f4c1","IPY_MODEL_17af94ed122841549d44d862df2a4068"],"layout":"IPY_MODEL_6ae37d7bc12c4b02be6c49923d5873d2"}},"c427332d607d414ebebf8fa496a39216":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5bd40c9e41e143b7b1c554f86f0bf4b0","placeholder":"​","style":"IPY_MODEL_731d166984d64a429a2adf30297e83f1","value":"tokenizer_config.json: 100%"}},"ea928406398043369e74379d79b6f4c1":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_1c7859ed345e45e0aa7925a675f9bf3c","max":20120,"min":0,"orientation":"horizontal","style":"IPY_MODEL_62c2f8139ddd489a8a80fefac0765190","value":20120}},"17af94ed122841549d44d862df2a4068":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_76f0e1d5eca34d0ca0c369b5d11e44e8","placeholder":"​","style":"IPY_MODEL_e54ea775cc674701bf9b1f128417c6fb","value":" 20.1k/20.1k [00:00<00:00, 1.71MB/s]"}},"6ae37d7bc12c4b02be6c49923d5873d2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5bd40c9e41e143b7b1c554f86f0bf4b0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"731d166984d64a429a2adf30297e83f1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"1c7859ed345e45e0aa7925a675f9bf3c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"62c2f8139ddd489a8a80fefac0765190":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"76f0e1d5eca34d0ca0c369b5d11e44e8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e54ea775cc674701bf9b1f128417c6fb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a54593d03c69409cabccf719b0e6c0d8":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_a1363716fc8c49a584481b857ef964d8","IPY_MODEL_3a150724171f4cd89e7eb7e0759f052d","IPY_MODEL_61eba9f4c0ec45948b403455b304ef57"],"layout":"IPY_MODEL_9ba438fac2b849d1b32988aca75f7e1b"}},"a1363716fc8c49a584481b857ef964d8":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fd937838e040408e851846ecb5ec8424","placeholder":"​","style":"IPY_MODEL_fc73faa708e24a608c854ab969307593","value":"spiece.model: 100%"}},"3a150724171f4cd89e7eb7e0759f052d":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_47f6928fe8fa43369edc7b9b8078c7f5","max":1912529,"min":0,"orientation":"horizontal","style":"IPY_MODEL_e8f142d5af6244bea2f7e3f8908f4121","value":1912529}},"61eba9f4c0ec45948b403455b304ef57":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1dbc916fe74d4d6bb90dff70ed6b4505","placeholder":"​","style":"IPY_MODEL_bece2da2b54b493a9c080a4ac25a6efb","value":" 1.91M/1.91M [00:00<00:00, 8.16MB/s]"}},"9ba438fac2b849d1b32988aca75f7e1b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fd937838e040408e851846ecb5ec8424":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fc73faa708e24a608c854ab969307593":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"47f6928fe8fa43369edc7b9b8078c7f5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e8f142d5af6244bea2f7e3f8908f4121":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"1dbc916fe74d4d6bb90dff70ed6b4505":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"bece2da2b54b493a9c080a4ac25a6efb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d5cecfa160f14c35980e2dcbf9de9371":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0d7feaad7b5046c382142ac0839a7dd6","IPY_MODEL_ab4d07ba885c479b983424b4ca25bda6","IPY_MODEL_a4fe1cc239e543a59331847bab4f232f"],"layout":"IPY_MODEL_3d5f4941262d409aa51abbd4fd3b5b93"}},"0d7feaad7b5046c382142ac0839a7dd6":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_9ced2a7525b34f59a07475ce0481366a","placeholder":"​","style":"IPY_MODEL_6fdf8ed2cdb844b38067397938433fa4","value":"tokenizer.json: 100%"}},"ab4d07ba885c479b983424b4ca25bda6":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_2241247c6e3841a49c4f1cacbf0d7a8b","max":6597509,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6e4b5eced7de4fd49963ad5fdcfdfb3e","value":6597509}},"a4fe1cc239e543a59331847bab4f232f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_d07bbf40c40243f9a19b77f0006c17f9","placeholder":"​","style":"IPY_MODEL_3f9f6d6c8c9e4668a5919972bf492503","value":" 6.60M/6.60M [00:00<00:00, 7.08MB/s]"}},"3d5f4941262d409aa51abbd4fd3b5b93":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9ced2a7525b34f59a07475ce0481366a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6fdf8ed2cdb844b38067397938433fa4":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"2241247c6e3841a49c4f1cacbf0d7a8b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6e4b5eced7de4fd49963ad5fdcfdfb3e":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"d07bbf40c40243f9a19b77f0006c17f9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3f9f6d6c8c9e4668a5919972bf492503":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"235032662a62475e92c1f72a2bc7068e":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_98531fc71aec45be85c09e948c858e92","IPY_MODEL_bac34af14b8f496d956aaef0b37e1855","IPY_MODEL_7de3bd378c6f42f59b7b017ccc5e6113"],"layout":"IPY_MODEL_81438efe52d9463b8e90db04219c63d7"}},"98531fc71aec45be85c09e948c858e92":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5dc4c73cd1f34bbe8ade9b0e609a17f6","placeholder":"​","style":"IPY_MODEL_482358d8df1b48288e58938f97a637b3","value":"special_tokens_map.json: 100%"}},"bac34af14b8f496d956aaef0b37e1855":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_e6f960b76aec4fc682122bdfe2e88582","max":2222,"min":0,"orientation":"horizontal","style":"IPY_MODEL_b426002e064f4beea64821a7d2ff3c4f","value":2222}},"7de3bd378c6f42f59b7b017ccc5e6113":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a746bde200314ee6b5f156acfe753d60","placeholder":"​","style":"IPY_MODEL_ff855b97a8b04239bbad1213ec50b1ab","value":" 2.22k/2.22k [00:00<00:00, 217kB/s]"}},"81438efe52d9463b8e90db04219c63d7":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5dc4c73cd1f34bbe8ade9b0e609a17f6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"482358d8df1b48288e58938f97a637b3":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e6f960b76aec4fc682122bdfe2e88582":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b426002e064f4beea64821a7d2ff3c4f":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a746bde200314ee6b5f156acfe753d60":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ff855b97a8b04239bbad1213ec50b1ab":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a57e020b365744da8659d19a78c6071d":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_75eed191edbf4e5fa50334aa8a1f303e","IPY_MODEL_122bf93d09a4419fbc37acd48aee9b1c","IPY_MODEL_a1f26c2fedd946c0809b978f21e6e53c"],"layout":"IPY_MODEL_8f73444d4e6f4d60ae0902b1d1fe742a"}},"75eed191edbf4e5fa50334aa8a1f303e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_428a31283cd44128a50f4ef146941620","placeholder":"​","style":"IPY_MODEL_6f80a076c08b44df8efc41c3bd5f38ad","value":"Downloading builder script: 100%"}},"122bf93d09a4419fbc37acd48aee9b1c":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_5cfba2013b914aa6b8e111396b6927d9","max":6270,"min":0,"orientation":"horizontal","style":"IPY_MODEL_ed74ece02dc743c8b335265c392f3f8d","value":6270}},"a1f26c2fedd946c0809b978f21e6e53c":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_703c3c3786a045769e9767959163a919","placeholder":"​","style":"IPY_MODEL_07bf8f86f3ae45b1aa2cee41e0a334fb","value":" 6.27k/6.27k [00:00<00:00, 543kB/s]"}},"8f73444d4e6f4d60ae0902b1d1fe742a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"428a31283cd44128a50f4ef146941620":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6f80a076c08b44df8efc41c3bd5f38ad":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"5cfba2013b914aa6b8e111396b6927d9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ed74ece02dc743c8b335265c392f3f8d":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"703c3c3786a045769e9767959163a919":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"07bf8f86f3ae45b1aa2cee41e0a334fb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}},"accelerator":"GPU"},"nbformat":4,"nbformat_minor":0} \ No newline at end of file diff --git a/drive/MyDrive/RA_Internship/HISTRANK/pegasus_x/Trying_Pegasus_X_histrank_final_version2.ipynb b/drive/MyDrive/RA_Internship/HISTRANK/pegasus_x/Trying_Pegasus_X_histrank_final_version2.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..5eeb4cb9917eeb1017abc2ecf81997be49757386 --- /dev/null +++ b/drive/MyDrive/RA_Internship/HISTRANK/pegasus_x/Trying_Pegasus_X_histrank_final_version2.ipynb @@ -0,0 +1 @@ +{"cells":[{"cell_type":"code","execution_count":1,"metadata":{"id":"smCuTxuYOAOA","executionInfo":{"status":"ok","timestamp":1717866418842,"user_tz":-240,"elapsed":7073,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["from transformers import PegasusXForConditionalGeneration, PegasusTokenizer, Seq2SeqTrainer, Seq2SeqTrainingArguments, AutoTokenizer\n","import torch"]},{"cell_type":"code","execution_count":3,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"NgHD7qnYSCju","outputId":"73eb77c9-3afa-4caf-8d2a-7671a51f8546","executionInfo":{"status":"ok","timestamp":1717865302576,"user_tz":-240,"elapsed":37581,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Mounted at /content/drive\n"]}],"source":["from google.colab import drive\n","drive.mount('/content/drive')"]},{"cell_type":"code","execution_count":2,"metadata":{"id":"eVxWWYHNOPe0","executionInfo":{"status":"ok","timestamp":1717866453970,"user_tz":-240,"elapsed":609,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["class PegasusDataset(torch.utils.data.Dataset):\n"," def __init__(self, encodings, labels):\n"," self.encodings = encodings\n"," self.labels = labels\n"," def __getitem__(self, idx):\n"," item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}\n"," item['labels'] = torch.tensor(self.labels['input_ids'][idx]) # torch.tensor(self.labels[idx])\n"," return item\n"," def __len__(self):\n"," return len(self.labels['input_ids']) # len(self.labels)"]},{"cell_type":"code","source":["max_input_length = 8192\n","max_output_length = 512"],"metadata":{"id":"bIevvdhmJNjI"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":3,"metadata":{"id":"fWjskB7GOhyR","executionInfo":{"status":"ok","timestamp":1717866458916,"user_tz":-240,"elapsed":6,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["def prepare_data(model_name,\n"," train_texts, train_labels,\n"," val_texts, val_labels,\n"," test_texts, test_labels):\n"," \"\"\"\n"," Prepare input data for model fine-tuning\n"," \"\"\"\n"," tokenizer = AutoTokenizer.from_pretrained(\"google/pegasus-x-large\")\n"," tokenizer.model_max_length = 4000\n","\n"," prepare_val = False if val_texts is None or val_labels is None else True\n"," prepare_test = False if test_texts is None or test_labels is None else True\n","\n"," def tokenize_data(texts, labels):\n"," encodings = tokenizer(texts, truncation=True, padding='max_length',max_length = 4000)\n"," decodings = tokenizer(labels, truncation=True, padding='max_length',max_length = 512)\n"," dataset_tokenized = PegasusDataset(encodings, decodings)\n"," return dataset_tokenized\n","\n"," train_dataset = tokenize_data(train_texts, train_labels)\n"," val_dataset = tokenize_data(val_texts, val_labels) if prepare_val else None\n"," test_dataset = tokenize_data(test_texts, test_labels) if prepare_test else None\n","\n"," return train_dataset, val_dataset, test_dataset, tokenizer\n"]},{"cell_type":"code","source":["def compute_metrics(pred):\n"," labels_ids = pred.label_ids\n"," pred_ids = pred.predictions\n"," rouge = load_metric(\"rouge\")\n","\n"," pred_str = tokenizer.batch_decode(pred_ids, skip_special_tokens=True)\n"," labels_ids[labels_ids == -100] = tokenizer.pad_token_id\n"," label_str = tokenizer.batch_decode(labels_ids, skip_special_tokens=True)\n","\n"," rouge_output = rouge.compute(\n"," predictions=pred_str, references=label_str, rouge_types=[\"rouge2\"]\n"," )[\"rouge2\"].mid\n","\n"," return {\n"," \"rouge2_precision\": round(rouge_output.precision, 4),\n"," \"rouge2_recall\": round(rouge_output.recall, 4),\n"," \"rouge2_fmeasure\": round(rouge_output.fmeasure, 4),\n"," }"],"metadata":{"id":"iOmCzxlZJhgZ","executionInfo":{"status":"ok","timestamp":1717866463376,"user_tz":-240,"elapsed":12,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":4,"outputs":[]},{"cell_type":"code","execution_count":11,"metadata":{"id":"0N_f-N7OOsOt","executionInfo":{"status":"ok","timestamp":1717866571933,"user_tz":-240,"elapsed":609,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["def prepare_fine_tuning(model_name, tokenizer, train_dataset, val_dataset, freeze_encoder=False, output_dir='./results'):\n"," \"\"\"\n"," Prepare configurations and base model for fine-tuning\n"," \"\"\"\n"," torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'\n"," model = PegasusXForConditionalGeneration.from_pretrained(\"google/pegasus-x-base\").to(torch_device)\n"," model.config.max_length = 512\n"," model.config.min_length = 150\n"," model.config.length_penalty = 0.5\n"," model.config.num_beams = 8\n"," model.config.early_stopping = True\n"," model.config.no_repeat_ngram_size = 3\n"," print(\"val dataset length= \",len(val_dataset))\n","\n"," if freeze_encoder:\n"," for param in model.model.encoder.parameters():\n"," param.requires_grad = False\n","\n"," training_args = Seq2SeqTrainingArguments(\n"," predict_with_generate=True,\n"," evaluation_strategy=\"steps\",\n"," per_device_train_batch_size=2,\n"," per_device_eval_batch_size=2,\n"," fp16=True,\n"," output_dir=\"./\",\n"," logging_steps=5,\n"," eval_steps=10,\n"," save_steps=10,\n"," save_total_limit=2,\n"," gradient_accumulation_steps=4,\n"," eval_accumulation_steps=1,\n"," num_train_epochs=1,\n",")\n","\n"," # training_args = Seq2SeqTrainingArguments(output_dir=\"./\",\n"," # num_train_epochs=1,\n"," # warmup_steps=50,\n"," # per_device_train_batch_size=2,\n"," # per_gpu_eval_batch_size=2,\n"," # weight_decay=0.01,\n"," # logging_steps=10,\n"," # #push_to_hub=True,\n"," # evaluation_strategy='steps',\n"," # eval_steps=10,\n"," # #save_steps=1e6,\n"," # gradient_accumulation_steps = 4,)\n","\n"," trainer = Seq2SeqTrainer(\n"," model=model,\n"," args=training_args,\n"," train_dataset=train_dataset,\n"," eval_dataset=val_dataset,\n"," tokenizer=tokenizer,\n","\n"," #compute_metrics=compute_metrics,\n"," )\n"," return trainer"]},{"cell_type":"code","execution_count":8,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":984},"id":"pBw9rlilPERD","outputId":"61be7141-e54e-42f8-ba55-3a8e5ee71dde","executionInfo":{"status":"ok","timestamp":1717865382166,"user_tz":-240,"elapsed":7124,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting datasets\n"," Downloading datasets-2.19.2-py3-none-any.whl (542 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m542.1/542.1 kB\u001b[0m \u001b[31m10.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from datasets) (3.14.0)\n","Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from datasets) (1.25.2)\n","Requirement already satisfied: pyarrow>=12.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (14.0.2)\n","Requirement already satisfied: pyarrow-hotfix in /usr/local/lib/python3.10/dist-packages (from datasets) (0.6)\n","Collecting dill<0.3.9,>=0.3.0 (from datasets)\n"," Downloading dill-0.3.8-py3-none-any.whl (116 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m16.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from datasets) (2.0.3)\n","Collecting requests>=2.32.1 (from datasets)\n"," Downloading requests-2.32.3-py3-none-any.whl (64 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.9/64.9 kB\u001b[0m \u001b[31m8.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: tqdm>=4.62.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (4.66.4)\n","Collecting xxhash (from datasets)\n"," Downloading xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m22.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting multiprocess (from datasets)\n"," Downloading multiprocess-0.70.16-py310-none-any.whl (134 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m17.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: fsspec[http]<=2024.3.1,>=2023.1.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (2023.6.0)\n","Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets) (3.9.5)\n","Requirement already satisfied: huggingface-hub>=0.21.2 in /usr/local/lib/python3.10/dist-packages (from datasets) (0.23.2)\n","Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from datasets) (24.0)\n","Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (6.0.1)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.3.1)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (23.2.0)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.4.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (6.0.5)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.9.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (4.0.3)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub>=0.21.2->datasets) (4.12.1)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.1->datasets) (2024.6.2)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2023.4)\n","Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets) (2024.1)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas->datasets) (1.16.0)\n","Installing collected packages: xxhash, requests, dill, multiprocess, datasets\n"," Attempting uninstall: requests\n"," Found existing installation: requests 2.31.0\n"," Uninstalling requests-2.31.0:\n"," Successfully uninstalled requests-2.31.0\n","\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n","google-colab 1.0.0 requires requests==2.31.0, but you have requests 2.32.3 which is incompatible.\u001b[0m\u001b[31m\n","\u001b[0mSuccessfully installed datasets-2.19.2 dill-0.3.8 multiprocess-0.70.16 requests-2.32.3 xxhash-3.4.1\n"]},{"output_type":"display_data","data":{"application/vnd.colab-display-data+json":{"pip_warning":{"packages":["requests"]},"id":"48d68eb0b79b4e65a15c4f7e07337b85"}},"metadata":{}}],"source":["pip install datasets"]},{"cell_type":"code","source":[],"metadata":{"id":"3mipwBArEUzL"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":8,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"mFugBZJdUKz8","outputId":"e50fec75-90e3-41e3-8379-65e454d568fd","executionInfo":{"status":"ok","timestamp":1717865209205,"user_tz":-240,"elapsed":61217,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting accelerate\n"," Downloading accelerate-0.31.0-py3-none-any.whl (309 kB)\n","\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/309.4 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[91m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[91m╸\u001b[0m \u001b[32m307.2/309.4 kB\u001b[0m \u001b[31m10.4 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m309.4/309.4 kB\u001b[0m \u001b[31m8.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate) (1.25.2)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (24.0)\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate) (5.9.5)\n","Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate) (6.0.1)\n","Requirement already satisfied: torch>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (2.3.0+cu121)\n","Requirement already satisfied: huggingface-hub in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.23.2)\n","Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from accelerate) (0.4.3)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.14.0)\n","Requirement already satisfied: typing-extensions>=4.8.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (4.12.1)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (1.12.1)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.3)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.1.4)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2023.6.0)\n","Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\n","Collecting nvidia-cuda-runtime-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\n","Collecting nvidia-cuda-cupti-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\n","Collecting nvidia-cudnn-cu12==8.9.2.26 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\n","Collecting nvidia-cublas-cu12==12.1.3.1 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\n","Collecting nvidia-cufft-cu12==11.0.2.54 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\n","Collecting nvidia-curand-cu12==10.3.2.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\n","Collecting nvidia-cusolver-cu12==11.4.5.107 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\n","Collecting nvidia-cusparse-cu12==12.1.0.106 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\n","Collecting nvidia-nccl-cu12==2.20.5 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl (176.2 MB)\n","Collecting nvidia-nvtx-cu12==12.1.105 (from torch>=1.10.0->accelerate)\n"," Using cached nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\n","Requirement already satisfied: triton==2.3.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2.3.0)\n","Collecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch>=1.10.0->accelerate)\n"," Downloading nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_x86_64.whl (21.3 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.3/21.3 MB\u001b[0m \u001b[31m54.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (2.31.0)\n","Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (4.66.4)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.10.0->accelerate) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface-hub->accelerate) (2024.6.2)\n","Requirement already satisfied: mpmath<1.4.0,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.10.0->accelerate) (1.3.0)\n","Installing collected packages: nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, accelerate\n","Successfully installed accelerate-0.31.0 nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.20.5 nvidia-nvjitlink-cu12-12.5.40 nvidia-nvtx-cu12-12.1.105\n"]}],"source":["pip install accelerate -U"]},{"cell_type":"code","source":["!pip install rouge_score"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"caoO2nTuMAEc","outputId":"1478c493-c735-43ea-cf81-dc24cf1199c4"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting rouge_score\n"," Downloading rouge_score-0.1.2.tar.gz (17 kB)\n"," Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: absl-py in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.4.0)\n","Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (from rouge_score) (3.8.1)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.25.2)\n","Requirement already satisfied: six>=1.14.0 in /usr/local/lib/python3.10/dist-packages (from rouge_score) (1.16.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (1.4.2)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (2024.5.15)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk->rouge_score) (4.66.4)\n","Building wheels for collected packages: rouge_score\n"," Building wheel for rouge_score (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24933 sha256=585109dcb922be767a81d4017bc09cd4212a38256ef67aed77b70280c6810550\n"," Stored in directory: /root/.cache/pip/wheels/5f/dd/89/461065a73be61a532ff8599a28e9beef17985c9e9c31e541b4\n","Successfully built rouge_score\n","Installing collected packages: rouge_score\n","Successfully installed rouge_score-0.1.2\n"]}]},{"cell_type":"code","execution_count":14,"metadata":{"id":"192uOu4uOzZr","executionInfo":{"status":"ok","timestamp":1717866610866,"user_tz":-240,"elapsed":7602,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["import os\n","import glob\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","from datasets import Dataset, load_metric\n","from sklearn.model_selection import train_test_split\n","if __name__=='__main__':\n","\n"," from datasets import load_dataset\n","\n"," input_dir = '/content/drive/MyDrive/RA_Internship/HISTRANK/dataset/inputs'\n"," target_dir = '/content/drive/MyDrive/RA_Internship/HISTRANK/dataset/targets'\n"," data = {'input_text': [], 'target_text': []}\n"," input_files = glob.glob(os.path.join(input_dir, '*.txt'))\n","\n"," for input_file in input_files:\n"," filename = os.path.basename(input_file)\n"," target_file = os.path.join(target_dir, filename)\n","\n"," with open(input_file, 'r') as f:\n"," input_text = f.read()\n"," with open(target_file, 'r') as f:\n"," target_text = f.read()\n","\n"," data['input_text'].append(input_text)\n"," data['target_text'].append(target_text)\n"," df = pd.DataFrame(data)\n"," train_df, temp_df = train_test_split(df, test_size=0.2, random_state=42)\n"," eval_df, test_df = train_test_split(temp_df, test_size=0.5, random_state=42)\n","\n"," train_dataset = Dataset.from_pandas(train_df)\n"," eval_dataset = Dataset.from_pandas(eval_df)\n"," test_dataset = Dataset.from_pandas(test_df)\n","\n"," # print(test_dataset)\n"]},{"cell_type":"code","execution_count":7,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":35},"id":"eo519NZJU3e1","outputId":"30f198eb-839f-4444-ac96-24dc4372b0c9","executionInfo":{"status":"ok","timestamp":1717866509779,"user_tz":-240,"elapsed":1829,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["'0.31.0'"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"string"}},"metadata":{},"execution_count":7}],"source":["import accelerate\n","\n","accelerate.__version__"]},{"cell_type":"code","execution_count":8,"metadata":{"id":"e3T4btv9WF6i","executionInfo":{"status":"ok","timestamp":1717866516239,"user_tz":-240,"elapsed":628,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[],"source":["from transformers import logging\n","\n","logging.set_verbosity_warning()"]},{"cell_type":"code","source":["print(len(train_dataset))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"v5PhwjbacX8I","outputId":"68eafdbd-f6b8-4b6e-f2ac-d2ace13871f2","executionInfo":{"status":"ok","timestamp":1717866177525,"user_tz":-240,"elapsed":1528,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":12,"outputs":[{"output_type":"stream","name":"stdout","text":["805\n"]}]},{"cell_type":"code","source":[],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":211},"id":"u1njQAtRigaH","outputId":"ac4af504-6c15-4091-c01f-ec8804b8ac7e"},"execution_count":null,"outputs":[{"output_type":"error","ename":"NameError","evalue":"name 'model' is not defined","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax_length\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m512\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmin_length\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m100\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlength_penalty\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m2.0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mearly_stopping\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mno_repeat_ngram_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mNameError\u001b[0m: name 'model' is not defined"]}]},{"cell_type":"code","execution_count":15,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":356},"id":"YoeEBu57US5H","outputId":"145a885e-a38b-4c0a-89b5-a5a96f1d13dc","executionInfo":{"status":"error","timestamp":1717866619981,"user_tz":-240,"elapsed":2991,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"outputs":[{"output_type":"error","ename":"OutOfMemoryError","evalue":"CUDA out of memory. Tried to allocate 282.00 MiB. GPU ","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mOutOfMemoryError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mmodel_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'google/pegasus-x-base'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mtrain_dataset\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mval_dataset\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_dataset\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtokenizer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mprepare_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_dataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'input_text'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_dataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'target_text'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0meval_dataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'input_text'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0meval_dataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'target_text'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_dataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'input_text'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_dataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'target_text'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mtrainer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mprepare_fine_tuning\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtokenizer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_dataset\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mval_dataset\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m\u001b[0m in \u001b[0;36mprepare_fine_tuning\u001b[0;34m(model_name, tokenizer, train_dataset, val_dataset, freeze_encoder, output_dir)\u001b[0m\n\u001b[1;32m 4\u001b[0m \"\"\"\n\u001b[1;32m 5\u001b[0m \u001b[0mtorch_device\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'cuda'\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_available\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;34m'cpu'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mPegasusXForConditionalGeneration\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfrom_pretrained\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"google/pegasus-x-base\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtorch_device\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax_length\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m512\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmin_length\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m150\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py\u001b[0m in \u001b[0;36mto\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 2722\u001b[0m \u001b[0;34m\" `dtype` by passing the correct `torch_dtype` argument.\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2723\u001b[0m )\n\u001b[0;32m-> 2724\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2725\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2726\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mhalf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36mto\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1171\u001b[0m \u001b[0;32mraise\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1172\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1173\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_apply\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconvert\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1174\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1175\u001b[0m def register_full_backward_pre_hook(\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_apply\u001b[0;34m(self, fn, recurse)\u001b[0m\n\u001b[1;32m 777\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrecurse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 778\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodule\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchildren\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 779\u001b[0;31m \u001b[0mmodule\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_apply\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 780\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 781\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mcompute_should_use_set_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtensor\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensor_applied\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_apply\u001b[0;34m(self, fn, recurse)\u001b[0m\n\u001b[1;32m 777\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrecurse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 778\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodule\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchildren\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 779\u001b[0;31m \u001b[0mmodule\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_apply\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 780\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 781\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mcompute_should_use_set_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtensor\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensor_applied\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_apply\u001b[0;34m(self, fn, recurse)\u001b[0m\n\u001b[1;32m 802\u001b[0m \u001b[0;31m# `with torch.no_grad():`\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 803\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mno_grad\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 804\u001b[0;31m \u001b[0mparam_applied\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mparam\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 805\u001b[0m \u001b[0mp_should_use_set_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompute_should_use_set_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mparam\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparam_applied\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 806\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36mconvert\u001b[0;34m(t)\u001b[0m\n\u001b[1;32m 1157\u001b[0m \u001b[0mmemory_format\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconvert_to_format\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1158\u001b[0m )\n\u001b[0;32m-> 1159\u001b[0;31m return t.to(\n\u001b[0m\u001b[1;32m 1160\u001b[0m \u001b[0mdevice\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1161\u001b[0m \u001b[0mdtype\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_floating_point\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_complex\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mOutOfMemoryError\u001b[0m: CUDA out of memory. Tried to allocate 282.00 MiB. GPU "]}],"source":["# use Pegasus Large model as base for fine-tuning\n","model_name = 'google/pegasus-x-base'\n","train_dataset, val_dataset, test_dataset, tokenizer = prepare_data(model_name, train_dataset['input_text'], train_dataset['target_text'], eval_dataset['input_text'], eval_dataset['target_text'], test_dataset['input_text'], test_dataset['target_text'])\n","trainer = prepare_fine_tuning(model_name, tokenizer, train_dataset,val_dataset)\n","trainer.train()"]},{"source":["# class PegasusDataset(Dataset):\n","# def __init__(self, encodings, labels):\n","# self.encodings = encodings\n","# self.labels = labels\n","# def __getitem__(self, idx):\n","# item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}\n","# # Ensure 'input_ids' key exists in self.labels before accessing it\n","# if 'input_ids' in self.labels:\n","# item['labels'] = torch.tensor(self.labels['input_ids'][idx])\n","# else:\n","# # Handle the case where 'input_ids' is missing, perhaps by setting a default value\n","# item['labels'] = torch.tensor([])\n","# return item"],"cell_type":"code","metadata":{"id":"DhOMS7mQjlq6"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["trainer.state.log_history"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"tfY-I39mZCp6","outputId":"f4b29d0d-0b22-4910-e3df-084adb1af3f0","executionInfo":{"status":"ok","timestamp":1717833715159,"user_tz":-240,"elapsed":1076,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["[{'loss': 10.1624,\n"," 'grad_norm': 38.591487884521484,\n"," 'learning_rate': 4.9e-05,\n"," 'epoch': 0.04962779156327544,\n"," 'step': 5},\n"," {'loss': 9.2442,\n"," 'grad_norm': 15.482707023620605,\n"," 'learning_rate': 4.6500000000000005e-05,\n"," 'epoch': 0.09925558312655088,\n"," 'step': 10},\n"," {'eval_loss': 8.50622844696045,\n"," 'eval_runtime': 14.8694,\n"," 'eval_samples_per_second': 6.792,\n"," 'eval_steps_per_second': 3.43,\n"," 'epoch': 0.09925558312655088,\n"," 'step': 10},\n"," {'loss': 8.6785,\n"," 'grad_norm': 11.103849411010742,\n"," 'learning_rate': 4.4000000000000006e-05,\n"," 'epoch': 0.1488833746898263,\n"," 'step': 15},\n"," {'loss': 8.1888,\n"," 'grad_norm': 7.722441673278809,\n"," 'learning_rate': 4.15e-05,\n"," 'epoch': 0.19851116625310175,\n"," 'step': 20},\n"," {'eval_loss': 7.642266750335693,\n"," 'eval_runtime': 14.9711,\n"," 'eval_samples_per_second': 6.746,\n"," 'eval_steps_per_second': 3.407,\n"," 'epoch': 0.19851116625310175,\n"," 'step': 20},\n"," {'loss': 7.8648,\n"," 'grad_norm': 7.4054646492004395,\n"," 'learning_rate': 3.9000000000000006e-05,\n"," 'epoch': 0.24813895781637718,\n"," 'step': 25},\n"," {'loss': 7.5939,\n"," 'grad_norm': 8.736732482910156,\n"," 'learning_rate': 3.65e-05,\n"," 'epoch': 0.2977667493796526,\n"," 'step': 30},\n"," {'eval_loss': 7.073873043060303,\n"," 'eval_runtime': 14.9803,\n"," 'eval_samples_per_second': 6.742,\n"," 'eval_steps_per_second': 3.404,\n"," 'epoch': 0.2977667493796526,\n"," 'step': 30},\n"," {'loss': 7.4197,\n"," 'grad_norm': 6.358315467834473,\n"," 'learning_rate': 3.4000000000000007e-05,\n"," 'epoch': 0.34739454094292804,\n"," 'step': 35},\n"," {'loss': 7.1108,\n"," 'grad_norm': 8.101419448852539,\n"," 'learning_rate': 3.15e-05,\n"," 'epoch': 0.3970223325062035,\n"," 'step': 40},\n"," {'eval_loss': 6.582322120666504,\n"," 'eval_runtime': 14.9673,\n"," 'eval_samples_per_second': 6.748,\n"," 'eval_steps_per_second': 3.407,\n"," 'epoch': 0.3970223325062035,\n"," 'step': 40},\n"," {'loss': 6.6779,\n"," 'grad_norm': 8.235506057739258,\n"," 'learning_rate': 2.9e-05,\n"," 'epoch': 0.4466501240694789,\n"," 'step': 45},\n"," {'loss': 6.5665,\n"," 'grad_norm': 9.374942779541016,\n"," 'learning_rate': 2.6500000000000004e-05,\n"," 'epoch': 0.49627791563275436,\n"," 'step': 50},\n"," {'eval_loss': 6.107143402099609,\n"," 'eval_runtime': 14.9112,\n"," 'eval_samples_per_second': 6.773,\n"," 'eval_steps_per_second': 3.42,\n"," 'epoch': 0.49627791563275436,\n"," 'step': 50},\n"," {'loss': 6.3934,\n"," 'grad_norm': 10.224224090576172,\n"," 'learning_rate': 2.4e-05,\n"," 'epoch': 0.5459057071960298,\n"," 'step': 55},\n"," {'loss': 6.2285,\n"," 'grad_norm': 11.039881706237793,\n"," 'learning_rate': 2.15e-05,\n"," 'epoch': 0.5955334987593052,\n"," 'step': 60},\n"," {'eval_loss': 5.638771057128906,\n"," 'eval_runtime': 14.9534,\n"," 'eval_samples_per_second': 6.754,\n"," 'eval_steps_per_second': 3.411,\n"," 'epoch': 0.5955334987593052,\n"," 'step': 60},\n"," {'loss': 5.7524,\n"," 'grad_norm': 12.911364555358887,\n"," 'learning_rate': 1.9e-05,\n"," 'epoch': 0.6451612903225806,\n"," 'step': 65},\n"," {'loss': 5.8365,\n"," 'grad_norm': 13.441301345825195,\n"," 'learning_rate': 1.65e-05,\n"," 'epoch': 0.6947890818858561,\n"," 'step': 70},\n"," {'eval_loss': 5.208266735076904,\n"," 'eval_runtime': 14.9517,\n"," 'eval_samples_per_second': 6.755,\n"," 'eval_steps_per_second': 3.411,\n"," 'epoch': 0.6947890818858561,\n"," 'step': 70},\n"," {'loss': 5.4429,\n"," 'grad_norm': 15.03567123413086,\n"," 'learning_rate': 1.4000000000000001e-05,\n"," 'epoch': 0.7444168734491315,\n"," 'step': 75},\n"," {'loss': 5.5753,\n"," 'grad_norm': 15.359692573547363,\n"," 'learning_rate': 1.1500000000000002e-05,\n"," 'epoch': 0.794044665012407,\n"," 'step': 80},\n"," {'eval_loss': 4.81761360168457,\n"," 'eval_runtime': 14.9377,\n"," 'eval_samples_per_second': 6.761,\n"," 'eval_steps_per_second': 3.414,\n"," 'epoch': 0.794044665012407,\n"," 'step': 80},\n"," {'loss': 5.4166,\n"," 'grad_norm': 18.756885528564453,\n"," 'learning_rate': 9e-06,\n"," 'epoch': 0.8436724565756824,\n"," 'step': 85},\n"," {'loss': 5.3009,\n"," 'grad_norm': 24.592605590820312,\n"," 'learning_rate': 6.5000000000000004e-06,\n"," 'epoch': 0.8933002481389578,\n"," 'step': 90},\n"," {'eval_loss': 4.515504360198975,\n"," 'eval_runtime': 14.8974,\n"," 'eval_samples_per_second': 6.78,\n"," 'eval_steps_per_second': 3.423,\n"," 'epoch': 0.8933002481389578,\n"," 'step': 90},\n"," {'loss': 5.0547,\n"," 'grad_norm': 17.001161575317383,\n"," 'learning_rate': 4.000000000000001e-06,\n"," 'epoch': 0.9429280397022333,\n"," 'step': 95},\n"," {'loss': 5.1573,\n"," 'grad_norm': 14.121624946594238,\n"," 'learning_rate': 1.5e-06,\n"," 'epoch': 0.9925558312655087,\n"," 'step': 100},\n"," {'eval_loss': 4.362726211547852,\n"," 'eval_runtime': 14.8984,\n"," 'eval_samples_per_second': 6.779,\n"," 'eval_steps_per_second': 3.423,\n"," 'epoch': 0.9925558312655087,\n"," 'step': 100},\n"," {'train_runtime': 779.7681,\n"," 'train_samples_per_second': 1.032,\n"," 'train_steps_per_second': 0.128,\n"," 'total_flos': 3809024409600000.0,\n"," 'train_loss': 6.783313522338867,\n"," 'epoch': 0.9925558312655087,\n"," 'step': 100}]"]},"metadata":{},"execution_count":10}]},{"cell_type":"code","source":["import pandas as pd\n","df=pd.DataFrame(trainer.state.log_history)\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","\n","# Assuming df is already defined, and train_loss and eval_loss are subsets of df\n","train_loss = df[['loss', 'step']]\n","eval_loss = df[['eval_loss', 'step']]\n","\n","# Remove NaN rows in both dataframes\n","train_loss_clean = train_loss.dropna()\n","eval_loss_clean = eval_loss.dropna()\n","\n","# Plotting the loss vs step for train_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting the loss vs step for eval_loss\n","plt.figure(figsize=(5, 2))\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n","\n","# Plotting both losses together\n","plt.figure(figsize=(5, 2))\n","plt.plot(train_loss_clean['step'], train_loss_clean['loss'], label='Train Loss', color='blue')\n","plt.plot(eval_loss_clean['step'], eval_loss_clean['eval_loss'], label='Eval Loss', color='red')\n","plt.xlabel('Step')\n","plt.ylabel('Loss')\n","plt.title('Train and Eval Loss vs Step')\n","plt.legend()\n","plt.grid(True)\n","plt.show()\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":356},"id":"sXjiixD56uwr","outputId":"746424cc-704c-4dc0-f58b-cbd43b71e983","executionInfo":{"status":"error","timestamp":1717866584335,"user_tz":-240,"elapsed":1138,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":12,"outputs":[{"output_type":"error","ename":"KeyError","evalue":"\"None of [Index(['loss', 'step'], dtype='object')] are in the [columns]\"","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;31m# Assuming df is already defined, and train_loss and eval_loss are subsets of df\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0mtrain_loss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdf\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'loss'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'step'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 8\u001b[0m \u001b[0meval_loss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdf\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'eval_loss'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'step'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/pandas/core/frame.py\u001b[0m in \u001b[0;36m__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 3765\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mis_iterator\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3766\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3767\u001b[0;31m \u001b[0mindexer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcolumns\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_indexer_strict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"columns\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3768\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3769\u001b[0m \u001b[0;31m# take() does not accept boolean indexers\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/pandas/core/indexes/base.py\u001b[0m in \u001b[0;36m_get_indexer_strict\u001b[0;34m(self, key, axis_name)\u001b[0m\n\u001b[1;32m 5875\u001b[0m \u001b[0mkeyarr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mindexer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnew_indexer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reindex_non_unique\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkeyarr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5876\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 5877\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_raise_if_missing\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkeyarr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mindexer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0maxis_name\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5878\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5879\u001b[0m \u001b[0mkeyarr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtake\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindexer\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/pandas/core/indexes/base.py\u001b[0m in \u001b[0;36m_raise_if_missing\u001b[0;34m(self, key, indexer, axis_name)\u001b[0m\n\u001b[1;32m 5936\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0muse_interval_msg\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5937\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 5938\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mKeyError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"None of [{key}] are in the [{axis_name}]\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5939\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5940\u001b[0m \u001b[0mnot_found\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mensure_index\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmissing_mask\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnonzero\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munique\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mKeyError\u001b[0m: \"None of [Index(['loss', 'step'], dtype='object')] are in the [columns]\""]}]},{"cell_type":"code","source":["from huggingface_hub import notebook_login\n","\n","notebook_login()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":162,"referenced_widgets":["e4a967601e1e4629ac17bb6cb3b4e7c5","7ab5cd64bd6b4e9883fd4fadb464a71d","8b4eb51452894ddeada282d9eb7f22aa","eb74146abbdf484dad8473972c415956","3b58b9bf6fb440419cfa5c18308d0253","daba3779b0bb415cbe29aedf72487ce5","1e494c423dfb40ee9cd9d3a74f3700ca","ceef5b81e8484244b44bb038569e6b8a","96d917fca2cc4695a5fdc907f3e1f829","b62c04f8302046a3bff738ca738e0834","898fce8a428d4139ab1e84d85be9af89","f262f2879274428e88bd0029b77410fa","8f498432f7f2400ea280e9089557d2a5","97207133a8cc4c5c93b0b5bb0ea1d7dd","a1d6b1cad8b34320b79c918c54d0bed4","8ad425e5ef7748248cee1eb249c33247","4ffdf150443b4d89bacda9bded8812eb","c378316066314e3388b3966dbaec40d3","fe6a0c2f7afe421692cde16def6b084f","2095498ebfb04f06a9db004fa6d908ee","474b9b0f76304b01b2a5d6ddcb293eda","0cd90ea445c142548474f5a0792b071b","b96a5a3a0188419ca596f1ebb92b0e3a","a7f508afa2f740b59e790014c2e4f573","51612bb110964f3e98a53c030a2e89ac","e96ea52802dd4922affaf266dc2f648e","038962232cc842c48a64a82df56002ed","c8e229b9cae94e169e882861bcf50fba","ed4385dbf730460f810a926f7926ea95","9538dfa8de9240baa76bca225ca571be","b49ff381642a4f96b2814f82997a1bbc","dd43aa84997641a19d68e4d9c37ef0c1"]},"id":"dI9BuDXp65zX","outputId":"a21e96a1-9b63-451b-a650-dbdc8f0ae872","executionInfo":{"status":"ok","timestamp":1717834099245,"user_tz":-240,"elapsed":656,"user":{"displayName":"Aditi Paretkar","userId":"17466297872366651006"}}},"execution_count":null,"outputs":[{"output_type":"display_data","data":{"text/plain":["VBox(children=(HTML(value='
"}},"8b4eb51452894ddeada282d9eb7f22aa":{"model_module":"@jupyter-widgets/controls","model_name":"PasswordModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"PasswordModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"PasswordView","continuous_update":true,"description":"Token:","description_tooltip":null,"disabled":false,"layout":"IPY_MODEL_b62c04f8302046a3bff738ca738e0834","placeholder":"​","style":"IPY_MODEL_898fce8a428d4139ab1e84d85be9af89","value":""}},"eb74146abbdf484dad8473972c415956":{"model_module":"@jupyter-widgets/controls","model_name":"CheckboxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"CheckboxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"CheckboxView","description":"Add token as git credential?","description_tooltip":null,"disabled":false,"indent":true,"layout":"IPY_MODEL_f262f2879274428e88bd0029b77410fa","style":"IPY_MODEL_8f498432f7f2400ea280e9089557d2a5","value":true}},"3b58b9bf6fb440419cfa5c18308d0253":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ButtonView","button_style":"","description":"Login","disabled":false,"icon":"","layout":"IPY_MODEL_97207133a8cc4c5c93b0b5bb0ea1d7dd","style":"IPY_MODEL_a1d6b1cad8b34320b79c918c54d0bed4","tooltip":""}},"daba3779b0bb415cbe29aedf72487ce5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_8ad425e5ef7748248cee1eb249c33247","placeholder":"​","style":"IPY_MODEL_4ffdf150443b4d89bacda9bded8812eb","value":"\nPro Tip: If you don't already have one, you can create a dedicated\n'notebooks' token with 'write' access, that you can then easily reuse for all\nnotebooks. "}},"1e494c423dfb40ee9cd9d3a74f3700ca":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":"center","align_self":null,"border":null,"bottom":null,"display":"flex","flex":null,"flex_flow":"column","grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":"50%"}},"ceef5b81e8484244b44bb038569e6b8a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"96d917fca2cc4695a5fdc907f3e1f829":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b62c04f8302046a3bff738ca738e0834":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"898fce8a428d4139ab1e84d85be9af89":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f262f2879274428e88bd0029b77410fa":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8f498432f7f2400ea280e9089557d2a5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"97207133a8cc4c5c93b0b5bb0ea1d7dd":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a1d6b1cad8b34320b79c918c54d0bed4":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","button_color":null,"font_weight":""}},"8ad425e5ef7748248cee1eb249c33247":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4ffdf150443b4d89bacda9bded8812eb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c378316066314e3388b3966dbaec40d3":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fe6a0c2f7afe421692cde16def6b084f","placeholder":"​","style":"IPY_MODEL_2095498ebfb04f06a9db004fa6d908ee","value":"Connecting..."}},"fe6a0c2f7afe421692cde16def6b084f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2095498ebfb04f06a9db004fa6d908ee":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"474b9b0f76304b01b2a5d6ddcb293eda":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_51612bb110964f3e98a53c030a2e89ac","placeholder":"​","style":"IPY_MODEL_e96ea52802dd4922affaf266dc2f648e","value":"Token is valid (permission: write)."}},"0cd90ea445c142548474f5a0792b071b":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_038962232cc842c48a64a82df56002ed","placeholder":"​","style":"IPY_MODEL_c8e229b9cae94e169e882861bcf50fba","value":"Your token has been saved in your configured git credential helpers (store)."}},"b96a5a3a0188419ca596f1ebb92b0e3a":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ed4385dbf730460f810a926f7926ea95","placeholder":"​","style":"IPY_MODEL_9538dfa8de9240baa76bca225ca571be","value":"Your token has been saved to /root/.cache/huggingface/token"}},"a7f508afa2f740b59e790014c2e4f573":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_b49ff381642a4f96b2814f82997a1bbc","placeholder":"​","style":"IPY_MODEL_dd43aa84997641a19d68e4d9c37ef0c1","value":"Login successful"}},"51612bb110964f3e98a53c030a2e89ac":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e96ea52802dd4922affaf266dc2f648e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"038962232cc842c48a64a82df56002ed":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c8e229b9cae94e169e882861bcf50fba":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ed4385dbf730460f810a926f7926ea95":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9538dfa8de9240baa76bca225ca571be":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b49ff381642a4f96b2814f82997a1bbc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dd43aa84997641a19d68e4d9c37ef0c1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ae6e90f688934a989b6dd9cbfc371b34":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0f436e92314d478f9ca1911225de6563","IPY_MODEL_89c8ddb4489b4368bf51fcb7ae6ddce5","IPY_MODEL_e1fa32ab6bb4416892e107bfc7561e24"],"layout":"IPY_MODEL_ede18bb7cdfb4ed19ea7e16087ee2504"}},"0f436e92314d478f9ca1911225de6563":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e72cb66b5dd1451a99f235a462249b09","placeholder":"​","style":"IPY_MODEL_6ad6151ba5fe46018e33ebad2796efee","value":"mnist_train_small.csv: 100%"}},"89c8ddb4489b4368bf51fcb7ae6ddce5":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_48422cd416904624ac510f8c614b684d","max":36523880,"min":0,"orientation":"horizontal","style":"IPY_MODEL_b1f1d3751c914783ba042ac523f568db","value":36523880}},"e1fa32ab6bb4416892e107bfc7561e24":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_52e73db4aec94ff2aa1908f7b7a83669","placeholder":"​","style":"IPY_MODEL_b1c2ac4c1bf84fc190681dc1650226d5","value":" 36.5M/36.5M [00:06<00:00, 4.21MB/s]"}},"ede18bb7cdfb4ed19ea7e16087ee2504":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e72cb66b5dd1451a99f235a462249b09":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6ad6151ba5fe46018e33ebad2796efee":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"48422cd416904624ac510f8c614b684d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b1f1d3751c914783ba042ac523f568db":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"52e73db4aec94ff2aa1908f7b7a83669":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b1c2ac4c1bf84fc190681dc1650226d5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"73f8f0d412364d56a7ab1abbf906d39b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_6b0dc598f7b3443380390597cb3a37ab","IPY_MODEL_5850791567884e33a14ddffc3365bbaa","IPY_MODEL_ca641cfa3b024da892a364d242ea376d"],"layout":"IPY_MODEL_0a2ce165c5f54624b82252fddb1c17aa"}},"6b0dc598f7b3443380390597cb3a37ab":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1fc2ec0ca0e9496788d59c223d7cab16","placeholder":"​","style":"IPY_MODEL_51843f532b53469583085b0c2e855959","value":"events.out.tfevents.1717832929.6fbbc2107a5b.9774.0: 100%"}},"5850791567884e33a14ddffc3365bbaa":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_8e5afb4564234ec49196ee8ac93a8f31","max":12968,"min":0,"orientation":"horizontal","style":"IPY_MODEL_13a241f631f04970b2f6b7e727d0240a","value":12968}},"ca641cfa3b024da892a364d242ea376d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3ef83bb6ba8a451bb5e8034d3abd091e","placeholder":"​","style":"IPY_MODEL_9f93ca71a22a41939adb3a7be3a53efe","value":" 13.0k/13.0k [00:01<00:00, 11.8kB/s]"}},"0a2ce165c5f54624b82252fddb1c17aa":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1fc2ec0ca0e9496788d59c223d7cab16":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"51843f532b53469583085b0c2e855959":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8e5afb4564234ec49196ee8ac93a8f31":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"13a241f631f04970b2f6b7e727d0240a":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"3ef83bb6ba8a451bb5e8034d3abd091e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9f93ca71a22a41939adb3a7be3a53efe":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a39c7d162fb840b6b99277c2cec673e7":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_ef46eb872c2c4fe5ad41daed2f9aa016","IPY_MODEL_065e74e5a7f84a6797cb2ad7d0ce8a26","IPY_MODEL_c10ca1c714fa49a7a16097081db01b98"],"layout":"IPY_MODEL_1fddc8c5446149b0a6137ec91b1467e4"}},"ef46eb872c2c4fe5ad41daed2f9aa016":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a27f4474258b4de795496da3d43746d4","placeholder":"​","style":"IPY_MODEL_75c590c533034f3fa61404b96cc146f7","value":"model.safetensors: 100%"}},"065e74e5a7f84a6797cb2ad7d0ce8a26":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_f64a381eeb494b6d82c580e3c72c86f8","max":1089213696,"min":0,"orientation":"horizontal","style":"IPY_MODEL_0ae51b3df8634614b08f9ecaa1720c15","value":1089213696}},"c10ca1c714fa49a7a16097081db01b98":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_8120d8d649e049ccbf765354364273dc","placeholder":"​","style":"IPY_MODEL_295064bc424f4b5f8dd85d4fa3c48707","value":" 1.09G/1.09G [00:55<00:00, 35.4MB/s]"}},"1fddc8c5446149b0a6137ec91b1467e4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a27f4474258b4de795496da3d43746d4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"75c590c533034f3fa61404b96cc146f7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f64a381eeb494b6d82c580e3c72c86f8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"0ae51b3df8634614b08f9ecaa1720c15":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"8120d8d649e049ccbf765354364273dc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"295064bc424f4b5f8dd85d4fa3c48707":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"186f60a15b624444a2c15741135bfecf":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_f52e27f1b3a14f539888d63566ddcc16","IPY_MODEL_389239a6254b4709bf2860f50d462585","IPY_MODEL_89b413a11e5249b697eac51c7a57c866"],"layout":"IPY_MODEL_e2c431ee1d3a40429c7b150437b477cc"}},"f52e27f1b3a14f539888d63566ddcc16":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_03357f63363445abb35a0f46e1561fc5","placeholder":"​","style":"IPY_MODEL_1d27710f00624fdb8202ac6feca4db26","value":"Upload 6 LFS files: 100%"}},"389239a6254b4709bf2860f50d462585":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_67f4e1bd12d24c41b9b9e6920bf24c6f","max":6,"min":0,"orientation":"horizontal","style":"IPY_MODEL_380faab767b94b0f8e57c0e141af7f9b","value":6}},"89b413a11e5249b697eac51c7a57c866":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c7cf7d09357a4051b91dad47ba80b67c","placeholder":"​","style":"IPY_MODEL_dfc7d1d2ebe641158712083c034d3bf0","value":" 6/6 [00:55<00:00, 55.74s/it]"}},"e2c431ee1d3a40429c7b150437b477cc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"03357f63363445abb35a0f46e1561fc5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1d27710f00624fdb8202ac6feca4db26":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"67f4e1bd12d24c41b9b9e6920bf24c6f":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"380faab767b94b0f8e57c0e141af7f9b":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"c7cf7d09357a4051b91dad47ba80b67c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"dfc7d1d2ebe641158712083c034d3bf0":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7f5b5b332e2c4723a6b3762974aa6c7b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_86c3399153ab45e79d83a67d959da0a0","IPY_MODEL_7755a6100ad044329b2ff9dca50a5040","IPY_MODEL_f4b8e016a67f453e84e63a8f5421a59f"],"layout":"IPY_MODEL_cd054ecdd884423b924c0a019aeed593"}},"86c3399153ab45e79d83a67d959da0a0":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_6322d45c4fc04dcd8f012a05c5e48271","placeholder":"​","style":"IPY_MODEL_3e0b3b4865964fa2b20ef6ec03f961e9","value":"mnist_test.csv: 100%"}},"7755a6100ad044329b2ff9dca50a5040":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_7072e9743b554cd69af154fbda8434a2","max":18289443,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6fb4d882166240aeb383fb8b2f26b7af","value":18289443}},"f4b8e016a67f453e84e63a8f5421a59f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3d4e0b25f25b46beb9a544bb4c9645fc","placeholder":"​","style":"IPY_MODEL_73bb1b6f1b1c4e5f8638d66fe67e48a6","value":" 18.3M/18.3M [00:03<00:00, 6.60MB/s]"}},"cd054ecdd884423b924c0a019aeed593":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6322d45c4fc04dcd8f012a05c5e48271":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3e0b3b4865964fa2b20ef6ec03f961e9":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"7072e9743b554cd69af154fbda8434a2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6fb4d882166240aeb383fb8b2f26b7af":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"3d4e0b25f25b46beb9a544bb4c9645fc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"73bb1b6f1b1c4e5f8638d66fe67e48a6":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8ec3eaf57bcd429b9255b7866a5f705b":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_11afdb38600049dd896cad19113acf1a","IPY_MODEL_d9b0093af98945df8e3a501119a9ccfb","IPY_MODEL_7020fcea7fd34c4ca767aac298139693"],"layout":"IPY_MODEL_9f128a00b66f4eb4a75d36a93a288a9b"}},"11afdb38600049dd896cad19113acf1a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_75ba9ea6143b4010bab79b12ff634cc3","placeholder":"​","style":"IPY_MODEL_c56207648d714cc3a5b098337d7b759a","value":"spiece.model: 100%"}},"d9b0093af98945df8e3a501119a9ccfb":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_06d62b1869534080b8876716348a73ac","max":1912529,"min":0,"orientation":"horizontal","style":"IPY_MODEL_eea14dea9404401f85603cf01acaffc4","value":1912529}},"7020fcea7fd34c4ca767aac298139693":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_058a323af0864d06abfbffa0a27e9244","placeholder":"​","style":"IPY_MODEL_a2a295ecf7a14d729f354c828bb04d83","value":" 1.91M/1.91M [00:01<00:00, 12.9MB/s]"}},"9f128a00b66f4eb4a75d36a93a288a9b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"75ba9ea6143b4010bab79b12ff634cc3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c56207648d714cc3a5b098337d7b759a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"06d62b1869534080b8876716348a73ac":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"eea14dea9404401f85603cf01acaffc4":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"058a323af0864d06abfbffa0a27e9244":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a2a295ecf7a14d729f354c828bb04d83":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"9849ad63117547cbaee0236349cd48f5":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_efbfee009a484d37a89410e2767509fd","IPY_MODEL_344b27e8aeab43c491509ab64bb7b21b","IPY_MODEL_d6485e64808a49789d9411af81edb9b4"],"layout":"IPY_MODEL_a6219a4cfdca4b39b6be31a1f07a1459"}},"efbfee009a484d37a89410e2767509fd":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_4cb635704e9f4c7f905f7c9fd838199b","placeholder":"​","style":"IPY_MODEL_8eeea6a9f1d34157801842f073a7d6bd","value":"training_args.bin: 100%"}},"344b27e8aeab43c491509ab64bb7b21b":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_e9e3500adea74570a4f7207cf457cc33","max":5176,"min":0,"orientation":"horizontal","style":"IPY_MODEL_cb8d666bfe2d4d6eb71523ae92f3aecb","value":5176}},"d6485e64808a49789d9411af81edb9b4":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_721d5aad3119491e8f94a4290070f555","placeholder":"​","style":"IPY_MODEL_23c922977e164a7d89ae074bac6bad32","value":" 5.18k/5.18k [00:00<00:00, 5.52kB/s]"}},"a6219a4cfdca4b39b6be31a1f07a1459":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4cb635704e9f4c7f905f7c9fd838199b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8eeea6a9f1d34157801842f073a7d6bd":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e9e3500adea74570a4f7207cf457cc33":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cb8d666bfe2d4d6eb71523ae92f3aecb":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"721d5aad3119491e8f94a4290070f555":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"23c922977e164a7d89ae074bac6bad32":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"746187b24ab84950822f0eca5ebb58e2":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_e77acc413b24412d8f66e1e47e2ec5d5","IPY_MODEL_60a2e707c6f1450cb5a4c526f3319376","IPY_MODEL_8df809c056fb4a7c9783b3ed9ae40a2f"],"layout":"IPY_MODEL_fbb132ff7d7b43a78c7b25c6ae6bebca"}},"e77acc413b24412d8f66e1e47e2ec5d5":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5cb7f7ab950b4f49a4a824948154691e","placeholder":"​","style":"IPY_MODEL_89992ca769754d61b1c9c558032ee812","value":"config.json: 100%"}},"60a2e707c6f1450cb5a4c526f3319376":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_d9dd172d20e74fdf9e9f1a8c1b44ad62","max":1557,"min":0,"orientation":"horizontal","style":"IPY_MODEL_4e624ff48e154a9eb78c23588575c438","value":1557}},"8df809c056fb4a7c9783b3ed9ae40a2f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_143e1c8353ae4b7d8c8faf3c0ec37e23","placeholder":"​","style":"IPY_MODEL_ecc148272f7945beaf289f317e002b1e","value":" 1.56k/1.56k [00:00<00:00, 128kB/s]"}},"fbb132ff7d7b43a78c7b25c6ae6bebca":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5cb7f7ab950b4f49a4a824948154691e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"89992ca769754d61b1c9c558032ee812":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d9dd172d20e74fdf9e9f1a8c1b44ad62":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4e624ff48e154a9eb78c23588575c438":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"143e1c8353ae4b7d8c8faf3c0ec37e23":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ecc148272f7945beaf289f317e002b1e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"41382dbda6c543fea52ae9358f3bc484":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_379e3ddb5a0a4ebb94fd08107dd36b9d","IPY_MODEL_8fc6be46fc1a4926b2f0691e6d71a102","IPY_MODEL_17963fcd1df94c8fbf1d824abb1ee51c"],"layout":"IPY_MODEL_f5409c8fbab14f1189246f79a0cc35c5"}},"379e3ddb5a0a4ebb94fd08107dd36b9d":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_c08946b0f4504ed3bc4bb632939ca6ba","placeholder":"​","style":"IPY_MODEL_630e45546c10408da881c7610cb0aaa5","value":"model.safetensors: 100%"}},"8fc6be46fc1a4926b2f0691e6d71a102":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_4803ece45acb4b7baf9fe89278ae1ff1","max":1089213696,"min":0,"orientation":"horizontal","style":"IPY_MODEL_9c21467a6f97410490c13fb743a1f225","value":1089213696}},"17963fcd1df94c8fbf1d824abb1ee51c":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_2a741067cb5e4937aa5985051ac5aabc","placeholder":"​","style":"IPY_MODEL_187d698e54114040acefff20ae3cc5ee","value":" 1.09G/1.09G [00:52<00:00, 18.3MB/s]"}},"f5409c8fbab14f1189246f79a0cc35c5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"c08946b0f4504ed3bc4bb632939ca6ba":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"630e45546c10408da881c7610cb0aaa5":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"4803ece45acb4b7baf9fe89278ae1ff1":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9c21467a6f97410490c13fb743a1f225":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"2a741067cb5e4937aa5985051ac5aabc":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"187d698e54114040acefff20ae3cc5ee":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"88bef20b6ced40edbd26506ca1f2cbbe":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_c9b4702932924a89b7545a29684b0957","IPY_MODEL_56cf43f5ded54117aef82f7200a260db","IPY_MODEL_129dc288ac8e408daa2a9f6425fb5515"],"layout":"IPY_MODEL_260784357d8d422aa7c34575d9965157"}},"c9b4702932924a89b7545a29684b0957":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_3bbe3314f549423998061f7cfb65acb6","placeholder":"​","style":"IPY_MODEL_58142236ecc34ecf88c28a1b32a4b84d","value":"generation_config.json: 100%"}},"56cf43f5ded54117aef82f7200a260db":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_c9041310bfeb49d995b5645e3341ab25","max":257,"min":0,"orientation":"horizontal","style":"IPY_MODEL_63336213355949ccab9abe89238df475","value":257}},"129dc288ac8e408daa2a9f6425fb5515":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_48a7448a69514a718e76a0ddc4f30e0c","placeholder":"​","style":"IPY_MODEL_fcaa6719d5f94bc1971ea3cee6c14111","value":" 257/257 [00:00<00:00, 23.6kB/s]"}},"260784357d8d422aa7c34575d9965157":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3bbe3314f549423998061f7cfb65acb6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"58142236ecc34ecf88c28a1b32a4b84d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"c9041310bfeb49d995b5645e3341ab25":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"63336213355949ccab9abe89238df475":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"48a7448a69514a718e76a0ddc4f30e0c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fcaa6719d5f94bc1971ea3cee6c14111":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"68c3da25ae554b04aa9b2ab0d1592b64":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_c427332d607d414ebebf8fa496a39216","IPY_MODEL_ea928406398043369e74379d79b6f4c1","IPY_MODEL_17af94ed122841549d44d862df2a4068"],"layout":"IPY_MODEL_6ae37d7bc12c4b02be6c49923d5873d2"}},"c427332d607d414ebebf8fa496a39216":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5bd40c9e41e143b7b1c554f86f0bf4b0","placeholder":"​","style":"IPY_MODEL_731d166984d64a429a2adf30297e83f1","value":"tokenizer_config.json: 100%"}},"ea928406398043369e74379d79b6f4c1":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_1c7859ed345e45e0aa7925a675f9bf3c","max":20120,"min":0,"orientation":"horizontal","style":"IPY_MODEL_62c2f8139ddd489a8a80fefac0765190","value":20120}},"17af94ed122841549d44d862df2a4068":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_76f0e1d5eca34d0ca0c369b5d11e44e8","placeholder":"​","style":"IPY_MODEL_e54ea775cc674701bf9b1f128417c6fb","value":" 20.1k/20.1k [00:00<00:00, 1.71MB/s]"}},"6ae37d7bc12c4b02be6c49923d5873d2":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5bd40c9e41e143b7b1c554f86f0bf4b0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"731d166984d64a429a2adf30297e83f1":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"1c7859ed345e45e0aa7925a675f9bf3c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"62c2f8139ddd489a8a80fefac0765190":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"76f0e1d5eca34d0ca0c369b5d11e44e8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e54ea775cc674701bf9b1f128417c6fb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a54593d03c69409cabccf719b0e6c0d8":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_a1363716fc8c49a584481b857ef964d8","IPY_MODEL_3a150724171f4cd89e7eb7e0759f052d","IPY_MODEL_61eba9f4c0ec45948b403455b304ef57"],"layout":"IPY_MODEL_9ba438fac2b849d1b32988aca75f7e1b"}},"a1363716fc8c49a584481b857ef964d8":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_fd937838e040408e851846ecb5ec8424","placeholder":"​","style":"IPY_MODEL_fc73faa708e24a608c854ab969307593","value":"spiece.model: 100%"}},"3a150724171f4cd89e7eb7e0759f052d":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_47f6928fe8fa43369edc7b9b8078c7f5","max":1912529,"min":0,"orientation":"horizontal","style":"IPY_MODEL_e8f142d5af6244bea2f7e3f8908f4121","value":1912529}},"61eba9f4c0ec45948b403455b304ef57":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_1dbc916fe74d4d6bb90dff70ed6b4505","placeholder":"​","style":"IPY_MODEL_bece2da2b54b493a9c080a4ac25a6efb","value":" 1.91M/1.91M [00:00<00:00, 8.16MB/s]"}},"9ba438fac2b849d1b32988aca75f7e1b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fd937838e040408e851846ecb5ec8424":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"fc73faa708e24a608c854ab969307593":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"47f6928fe8fa43369edc7b9b8078c7f5":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"e8f142d5af6244bea2f7e3f8908f4121":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"1dbc916fe74d4d6bb90dff70ed6b4505":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"bece2da2b54b493a9c080a4ac25a6efb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"d5cecfa160f14c35980e2dcbf9de9371":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_0d7feaad7b5046c382142ac0839a7dd6","IPY_MODEL_ab4d07ba885c479b983424b4ca25bda6","IPY_MODEL_a4fe1cc239e543a59331847bab4f232f"],"layout":"IPY_MODEL_3d5f4941262d409aa51abbd4fd3b5b93"}},"0d7feaad7b5046c382142ac0839a7dd6":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_9ced2a7525b34f59a07475ce0481366a","placeholder":"​","style":"IPY_MODEL_6fdf8ed2cdb844b38067397938433fa4","value":"tokenizer.json: 100%"}},"ab4d07ba885c479b983424b4ca25bda6":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_2241247c6e3841a49c4f1cacbf0d7a8b","max":6597509,"min":0,"orientation":"horizontal","style":"IPY_MODEL_6e4b5eced7de4fd49963ad5fdcfdfb3e","value":6597509}},"a4fe1cc239e543a59331847bab4f232f":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_d07bbf40c40243f9a19b77f0006c17f9","placeholder":"​","style":"IPY_MODEL_3f9f6d6c8c9e4668a5919972bf492503","value":" 6.60M/6.60M [00:00<00:00, 7.08MB/s]"}},"3d5f4941262d409aa51abbd4fd3b5b93":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9ced2a7525b34f59a07475ce0481366a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6fdf8ed2cdb844b38067397938433fa4":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"2241247c6e3841a49c4f1cacbf0d7a8b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6e4b5eced7de4fd49963ad5fdcfdfb3e":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"d07bbf40c40243f9a19b77f0006c17f9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"3f9f6d6c8c9e4668a5919972bf492503":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"235032662a62475e92c1f72a2bc7068e":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_98531fc71aec45be85c09e948c858e92","IPY_MODEL_bac34af14b8f496d956aaef0b37e1855","IPY_MODEL_7de3bd378c6f42f59b7b017ccc5e6113"],"layout":"IPY_MODEL_81438efe52d9463b8e90db04219c63d7"}},"98531fc71aec45be85c09e948c858e92":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5dc4c73cd1f34bbe8ade9b0e609a17f6","placeholder":"​","style":"IPY_MODEL_482358d8df1b48288e58938f97a637b3","value":"special_tokens_map.json: 100%"}},"bac34af14b8f496d956aaef0b37e1855":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_e6f960b76aec4fc682122bdfe2e88582","max":2222,"min":0,"orientation":"horizontal","style":"IPY_MODEL_b426002e064f4beea64821a7d2ff3c4f","value":2222}},"7de3bd378c6f42f59b7b017ccc5e6113":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_a746bde200314ee6b5f156acfe753d60","placeholder":"​","style":"IPY_MODEL_ff855b97a8b04239bbad1213ec50b1ab","value":" 2.22k/2.22k [00:00<00:00, 217kB/s]"}},"81438efe52d9463b8e90db04219c63d7":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"5dc4c73cd1f34bbe8ade9b0e609a17f6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"482358d8df1b48288e58938f97a637b3":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"e6f960b76aec4fc682122bdfe2e88582":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"b426002e064f4beea64821a7d2ff3c4f":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"a746bde200314ee6b5f156acfe753d60":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ff855b97a8b04239bbad1213ec50b1ab":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"a57e020b365744da8659d19a78c6071d":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_75eed191edbf4e5fa50334aa8a1f303e","IPY_MODEL_122bf93d09a4419fbc37acd48aee9b1c","IPY_MODEL_a1f26c2fedd946c0809b978f21e6e53c"],"layout":"IPY_MODEL_8f73444d4e6f4d60ae0902b1d1fe742a"}},"75eed191edbf4e5fa50334aa8a1f303e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_428a31283cd44128a50f4ef146941620","placeholder":"​","style":"IPY_MODEL_6f80a076c08b44df8efc41c3bd5f38ad","value":"Downloading builder script: 100%"}},"122bf93d09a4419fbc37acd48aee9b1c":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_5cfba2013b914aa6b8e111396b6927d9","max":6270,"min":0,"orientation":"horizontal","style":"IPY_MODEL_ed74ece02dc743c8b335265c392f3f8d","value":6270}},"a1f26c2fedd946c0809b978f21e6e53c":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_703c3c3786a045769e9767959163a919","placeholder":"​","style":"IPY_MODEL_07bf8f86f3ae45b1aa2cee41e0a334fb","value":" 6.27k/6.27k [00:00<00:00, 543kB/s]"}},"8f73444d4e6f4d60ae0902b1d1fe742a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"428a31283cd44128a50f4ef146941620":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6f80a076c08b44df8efc41c3bd5f38ad":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"5cfba2013b914aa6b8e111396b6927d9":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ed74ece02dc743c8b335265c392f3f8d":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"703c3c3786a045769e9767959163a919":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"07bf8f86f3ae45b1aa2cee41e0a334fb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}},"accelerator":"GPU"},"nbformat":4,"nbformat_minor":0} \ No newline at end of file diff --git a/generation_config.json b/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cf9693db59bc306100110da4920a713c21a86163 --- /dev/null +++ b/generation_config.json @@ -0,0 +1,12 @@ +{ + "_from_model_config": true, + "bos_token_id": 0, + "decoder_start_token_id": 0, + "eos_token_id": 1, + "forced_eos_token_id": 1, + "length_penalty": 0.8, + "max_length": 16384, + "num_beams": 8, + "pad_token_id": 0, + "transformers_version": "4.41.2" +} diff --git a/model.safetensors b/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..aa766f680f6d5435f91e2bb731e26f9e643cbb85 --- /dev/null +++ b/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb8e06dc0a05fb91f4976bb1a0aeb0f9fdc8011e21cf1c857922f32affdbfd22 +size 1089213696 diff --git a/runs/Jun21_13-18-21_5f60db11eb42/events.out.tfevents.1718975901.5f60db11eb42.3507.0 b/runs/Jun21_13-18-21_5f60db11eb42/events.out.tfevents.1718975901.5f60db11eb42.3507.0 new file mode 100644 index 0000000000000000000000000000000000000000..8f9bc73d54a77d58dc311bd73dd9c466d015b633 --- /dev/null +++ b/runs/Jun21_13-18-21_5f60db11eb42/events.out.tfevents.1718975901.5f60db11eb42.3507.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63bf786dfaa9d04bb0413849144354f894c3c0d524626402d074c855643808fd +size 40182 diff --git a/sample_data/README.md b/sample_data/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e46cdae34844234bc75daeefda03a47aa7f19516 --- /dev/null +++ b/sample_data/README.md @@ -0,0 +1,19 @@ +This directory includes a few sample datasets to get you started. + +* `california_housing_data*.csv` is California housing data from the 1990 US + Census; more information is available at: + https://developers.google.com/machine-learning/crash-course/california-housing-data-description + +* `mnist_*.csv` is a small sample of the + [MNIST database](https://en.wikipedia.org/wiki/MNIST_database), which is + described at: http://yann.lecun.com/exdb/mnist/ + +* `anscombe.json` contains a copy of + [Anscombe's quartet](https://en.wikipedia.org/wiki/Anscombe%27s_quartet); it + was originally described in + + Anscombe, F. J. (1973). 'Graphs in Statistical Analysis'. American + Statistician. 27 (1): 17-21. JSTOR 2682899. + + and our copy was prepared by the + [vega_datasets library](https://github.com/altair-viz/vega_datasets/blob/4f67bdaad10f45e3549984e17e1b3088c731503d/vega_datasets/_data/anscombe.json). diff --git a/sample_data/anscombe.json b/sample_data/anscombe.json new file mode 100644 index 0000000000000000000000000000000000000000..d6c17c29f303f700bc66e1a8abdf1a0c9ce21660 --- /dev/null +++ b/sample_data/anscombe.json @@ -0,0 +1,49 @@ +[ + {"Series":"I", "X":10.0, "Y":8.04}, + {"Series":"I", "X":8.0, "Y":6.95}, + {"Series":"I", "X":13.0, "Y":7.58}, + {"Series":"I", "X":9.0, "Y":8.81}, + {"Series":"I", "X":11.0, "Y":8.33}, + {"Series":"I", "X":14.0, "Y":9.96}, + {"Series":"I", "X":6.0, "Y":7.24}, + {"Series":"I", "X":4.0, "Y":4.26}, + {"Series":"I", "X":12.0, "Y":10.84}, + {"Series":"I", "X":7.0, "Y":4.81}, + {"Series":"I", "X":5.0, "Y":5.68}, + + {"Series":"II", "X":10.0, "Y":9.14}, + {"Series":"II", "X":8.0, "Y":8.14}, + {"Series":"II", "X":13.0, "Y":8.74}, + {"Series":"II", "X":9.0, "Y":8.77}, + {"Series":"II", "X":11.0, "Y":9.26}, + {"Series":"II", "X":14.0, "Y":8.10}, + {"Series":"II", "X":6.0, "Y":6.13}, + {"Series":"II", "X":4.0, "Y":3.10}, + {"Series":"II", "X":12.0, "Y":9.13}, + {"Series":"II", "X":7.0, "Y":7.26}, + {"Series":"II", "X":5.0, "Y":4.74}, + + {"Series":"III", "X":10.0, "Y":7.46}, + {"Series":"III", "X":8.0, "Y":6.77}, + {"Series":"III", "X":13.0, "Y":12.74}, + {"Series":"III", "X":9.0, "Y":7.11}, + {"Series":"III", "X":11.0, "Y":7.81}, + {"Series":"III", "X":14.0, "Y":8.84}, + {"Series":"III", "X":6.0, "Y":6.08}, + {"Series":"III", "X":4.0, "Y":5.39}, + {"Series":"III", "X":12.0, "Y":8.15}, + {"Series":"III", "X":7.0, "Y":6.42}, + {"Series":"III", "X":5.0, "Y":5.73}, + + {"Series":"IV", "X":8.0, "Y":6.58}, + {"Series":"IV", "X":8.0, "Y":5.76}, + {"Series":"IV", "X":8.0, "Y":7.71}, + {"Series":"IV", "X":8.0, "Y":8.84}, + {"Series":"IV", "X":8.0, "Y":8.47}, + {"Series":"IV", "X":8.0, "Y":7.04}, + {"Series":"IV", "X":8.0, "Y":5.25}, + {"Series":"IV", "X":19.0, "Y":12.50}, + {"Series":"IV", "X":8.0, "Y":5.56}, + {"Series":"IV", "X":8.0, "Y":7.91}, + {"Series":"IV", "X":8.0, "Y":6.89} +] diff --git a/sample_data/california_housing_test.csv b/sample_data/california_housing_test.csv new file mode 100644 index 0000000000000000000000000000000000000000..5210d8c5330bed9070a9b12a4f35bc01f5faed4c --- /dev/null +++ b/sample_data/california_housing_test.csv @@ -0,0 +1,3001 @@ +"longitude","latitude","housing_median_age","total_rooms","total_bedrooms","population","households","median_income","median_house_value" +-122.050000,37.370000,27.000000,3885.000000,661.000000,1537.000000,606.000000,6.608500,344700.000000 +-118.300000,34.260000,43.000000,1510.000000,310.000000,809.000000,277.000000,3.599000,176500.000000 +-117.810000,33.780000,27.000000,3589.000000,507.000000,1484.000000,495.000000,5.793400,270500.000000 +-118.360000,33.820000,28.000000,67.000000,15.000000,49.000000,11.000000,6.135900,330000.000000 +-119.670000,36.330000,19.000000,1241.000000,244.000000,850.000000,237.000000,2.937500,81700.000000 +-119.560000,36.510000,37.000000,1018.000000,213.000000,663.000000,204.000000,1.663500,67000.000000 +-121.430000,38.630000,43.000000,1009.000000,225.000000,604.000000,218.000000,1.664100,67000.000000 +-120.650000,35.480000,19.000000,2310.000000,471.000000,1341.000000,441.000000,3.225000,166900.000000 +-122.840000,38.400000,15.000000,3080.000000,617.000000,1446.000000,599.000000,3.669600,194400.000000 +-118.020000,34.080000,31.000000,2402.000000,632.000000,2830.000000,603.000000,2.333300,164200.000000 +-118.240000,33.980000,45.000000,972.000000,249.000000,1288.000000,261.000000,2.205400,125000.000000 +-119.120000,35.850000,37.000000,736.000000,166.000000,564.000000,138.000000,2.416700,58300.000000 +-121.930000,37.250000,36.000000,1089.000000,182.000000,535.000000,170.000000,4.690000,252600.000000 +-117.030000,32.970000,16.000000,3936.000000,694.000000,1935.000000,659.000000,4.562500,231200.000000 +-117.970000,33.730000,27.000000,2097.000000,325.000000,1217.000000,331.000000,5.712100,222500.000000 +-117.990000,33.810000,42.000000,161.000000,40.000000,157.000000,50.000000,2.200000,153100.000000 +-120.810000,37.530000,15.000000,570.000000,123.000000,189.000000,107.000000,1.875000,181300.000000 +-121.200000,38.690000,26.000000,3077.000000,607.000000,1603.000000,595.000000,2.717400,137500.000000 +-118.880000,34.210000,26.000000,1590.000000,196.000000,654.000000,199.000000,6.585100,300000.000000 +-122.590000,38.010000,35.000000,8814.000000,1307.000000,3450.000000,1258.000000,6.172400,414300.000000 +-122.150000,37.750000,40.000000,1445.000000,256.000000,849.000000,255.000000,3.891300,126300.000000 +-121.370000,38.680000,36.000000,1775.000000,296.000000,937.000000,305.000000,3.178600,83400.000000 +-118.160000,34.070000,47.000000,2994.000000,543.000000,1651.000000,561.000000,3.864400,241500.000000 +-122.200000,37.790000,45.000000,2021.000000,528.000000,1410.000000,480.000000,2.778800,115400.000000 +-117.280000,33.280000,13.000000,6131.000000,1040.000000,4049.000000,940.000000,3.815600,150700.000000 +-118.030000,34.160000,36.000000,1401.000000,218.000000,667.000000,225.000000,7.161500,484700.000000 +-122.420000,37.760000,52.000000,3587.000000,1030.000000,2259.000000,979.000000,2.540300,250000.000000 +-118.390000,33.990000,32.000000,2612.000000,418.000000,1030.000000,402.000000,6.603000,369200.000000 +-118.450000,34.070000,19.000000,4845.000000,1609.000000,3751.000000,1539.000000,1.583000,350000.000000 +-118.480000,34.010000,30.000000,3078.000000,954.000000,1561.000000,901.000000,3.485200,425000.000000 +-119.350000,36.330000,14.000000,1195.000000,220.000000,568.000000,229.000000,3.148600,105600.000000 +-118.300000,33.910000,34.000000,1617.000000,493.000000,1530.000000,500.000000,2.618200,172600.000000 +-121.130000,39.310000,17.000000,3442.000000,705.000000,1693.000000,619.000000,2.810200,128900.000000 +-118.080000,34.550000,5.000000,16181.000000,2971.000000,8152.000000,2651.000000,4.523700,141800.000000 +-118.320000,33.940000,38.000000,1067.000000,170.000000,499.000000,169.000000,4.638900,183800.000000 +-118.110000,34.000000,33.000000,2886.000000,726.000000,2650.000000,728.000000,2.625000,178700.000000 +-122.530000,37.970000,52.000000,1560.000000,451.000000,700.000000,419.000000,2.512500,270800.000000 +-118.020000,33.920000,34.000000,1478.000000,251.000000,956.000000,277.000000,5.523800,185300.000000 +-118.050000,33.930000,31.000000,894.000000,203.000000,883.000000,190.000000,3.677100,141500.000000 +-119.010000,34.230000,11.000000,5785.000000,1035.000000,2760.000000,985.000000,4.693000,232200.000000 +-119.320000,36.200000,15.000000,1562.000000,275.000000,961.000000,287.000000,3.423100,83300.000000 +-116.920000,32.770000,16.000000,2770.000000,406.000000,1269.000000,429.000000,6.678300,275000.000000 +-118.060000,34.150000,37.000000,1980.000000,226.000000,697.000000,226.000000,15.000100,500001.000000 +-117.270000,34.090000,36.000000,848.000000,186.000000,737.000000,169.000000,0.983800,79300.000000 +-118.230000,34.130000,48.000000,1308.000000,286.000000,835.000000,294.000000,4.289100,214800.000000 +-117.240000,33.170000,4.000000,9998.000000,1874.000000,3925.000000,1672.000000,4.282600,237500.000000 +-121.910000,37.440000,24.000000,1212.000000,251.000000,799.000000,242.000000,5.080800,212500.000000 +-118.290000,33.940000,47.000000,1782.000000,338.000000,1003.000000,329.000000,2.539800,105700.000000 +-121.350000,38.000000,6.000000,1649.000000,369.000000,732.000000,350.000000,3.423100,123800.000000 +-117.990000,33.780000,19.000000,7399.000000,1698.000000,3554.000000,1593.000000,3.104900,173900.000000 +-120.990000,37.700000,14.000000,9849.000000,1887.000000,4356.000000,1780.000000,3.587700,160900.000000 +-119.420000,35.970000,21.000000,554.000000,121.000000,426.000000,122.000000,2.351600,47500.000000 +-122.210000,37.800000,39.000000,2003.000000,500.000000,1109.000000,464.000000,3.068200,156500.000000 +-118.170000,33.800000,26.000000,1589.000000,380.000000,883.000000,366.000000,3.531300,187500.000000 +-117.900000,34.090000,39.000000,1726.000000,333.000000,892.000000,335.000000,4.340900,191800.000000 +-117.990000,33.930000,36.000000,1287.000000,233.000000,779.000000,229.000000,4.852300,175800.000000 +-121.420000,38.720000,10.000000,3054.000000,528.000000,1932.000000,510.000000,3.090300,91900.000000 +-118.770000,34.260000,26.000000,3038.000000,468.000000,1825.000000,468.000000,5.638500,196900.000000 +-121.930000,37.330000,44.000000,1449.000000,291.000000,676.000000,282.000000,3.575000,292200.000000 +-121.820000,37.250000,16.000000,2650.000000,600.000000,1969.000000,586.000000,3.946100,194300.000000 +-122.290000,37.560000,36.000000,805.000000,140.000000,445.000000,139.000000,5.822100,289400.000000 +-121.780000,37.230000,18.000000,1747.000000,317.000000,1055.000000,285.000000,5.898000,229100.000000 +-118.410000,34.000000,35.000000,1062.000000,305.000000,1026.000000,307.000000,2.715300,265500.000000 +-121.670000,40.870000,31.000000,1581.000000,299.000000,776.000000,287.000000,2.906300,77800.000000 +-118.000000,33.920000,26.000000,2830.000000,399.000000,1204.000000,404.000000,6.127300,289600.000000 +-117.220000,32.730000,38.000000,3966.000000,768.000000,1640.000000,729.000000,3.840900,291400.000000 +-121.080000,37.690000,19.000000,6473.000000,1212.000000,3559.000000,1123.000000,3.224600,129300.000000 +-117.530000,33.920000,12.000000,2290.000000,319.000000,728.000000,228.000000,6.156100,233500.000000 +-117.460000,34.080000,18.000000,3830.000000,750.000000,2767.000000,702.000000,3.660200,120700.000000 +-117.970000,33.860000,35.000000,1691.000000,367.000000,1265.000000,378.000000,3.585500,174300.000000 +-121.920000,37.330000,52.000000,2125.000000,382.000000,930.000000,387.000000,5.283100,299500.000000 +-118.200000,34.040000,44.000000,1582.000000,544.000000,1998.000000,515.000000,1.688800,125000.000000 +-118.060000,33.800000,22.000000,1892.000000,442.000000,1015.000000,404.000000,4.137900,212500.000000 +-122.050000,37.360000,34.000000,2400.000000,419.000000,1017.000000,384.000000,4.136900,316900.000000 +-123.790000,39.500000,24.000000,1421.000000,291.000000,588.000000,274.000000,2.325000,157300.000000 +-120.790000,37.490000,44.000000,1186.000000,225.000000,687.000000,234.000000,3.416700,160700.000000 +-121.890000,37.350000,47.000000,2879.000000,631.000000,2229.000000,606.000000,3.259900,183100.000000 +-118.430000,34.200000,29.000000,3051.000000,694.000000,1942.000000,679.000000,3.111800,238100.000000 +-118.750000,34.170000,18.000000,6217.000000,858.000000,2703.000000,834.000000,6.807500,325900.000000 +-122.470000,37.990000,22.000000,7274.000000,1002.000000,2468.000000,957.000000,7.494000,439200.000000 +-120.690000,37.400000,46.000000,860.000000,130.000000,496.000000,147.000000,3.516700,137500.000000 +-118.280000,34.050000,44.000000,968.000000,384.000000,1805.000000,375.000000,1.480100,212500.000000 +-118.440000,34.250000,35.000000,1583.000000,324.000000,1481.000000,351.000000,3.700000,176000.000000 +-122.050000,38.260000,21.000000,7195.000000,1416.000000,3927.000000,1377.000000,3.091200,126300.000000 +-121.990000,37.350000,18.000000,1712.000000,509.000000,972.000000,467.000000,4.397100,238900.000000 +-121.020000,37.680000,28.000000,2875.000000,560.000000,1608.000000,558.000000,3.548900,106400.000000 +-119.850000,36.740000,35.000000,1191.000000,190.000000,537.000000,182.000000,3.537500,96700.000000 +-118.020000,34.080000,28.000000,2769.000000,631.000000,2452.000000,581.000000,2.607100,175900.000000 +-123.520000,41.010000,17.000000,1564.000000,345.000000,517.000000,222.000000,2.154200,83800.000000 +-122.400000,37.620000,44.000000,1619.000000,362.000000,1064.000000,335.000000,4.023800,224200.000000 +-118.130000,34.150000,24.000000,1125.000000,341.000000,579.000000,321.000000,2.812500,141700.000000 +-122.000000,37.980000,32.000000,1013.000000,169.000000,436.000000,173.000000,5.111800,226900.000000 +-118.450000,34.250000,36.000000,1453.000000,270.000000,808.000000,275.000000,4.383900,204600.000000 +-117.500000,33.870000,4.000000,6755.000000,1017.000000,2866.000000,850.000000,5.049300,239800.000000 +-122.220000,37.840000,50.000000,2935.000000,473.000000,1031.000000,479.000000,7.500000,295200.000000 +-119.820000,36.640000,30.000000,1694.000000,312.000000,1008.000000,321.000000,2.246600,96000.000000 +-120.210000,36.770000,20.000000,1745.000000,348.000000,1093.000000,302.000000,2.319400,90600.000000 +-120.970000,38.420000,16.000000,1748.000000,322.000000,4930.000000,287.000000,4.302900,121900.000000 +-121.190000,38.870000,20.000000,3118.000000,500.000000,1405.000000,519.000000,6.000000,209400.000000 +-118.200000,33.770000,52.000000,1375.000000,457.000000,1089.000000,317.000000,2.234400,200000.000000 +-118.300000,34.020000,49.000000,2120.000000,483.000000,1522.000000,416.000000,1.850000,116800.000000 +-122.230000,37.790000,43.000000,5963.000000,1344.000000,4367.000000,1231.000000,2.191700,112800.000000 +-121.310000,38.620000,31.000000,3114.000000,430.000000,1121.000000,456.000000,6.244000,240000.000000 +-117.250000,32.800000,35.000000,2281.000000,506.000000,1005.000000,496.000000,4.229600,275000.000000 +-118.260000,33.990000,36.000000,2016.000000,505.000000,1807.000000,464.000000,1.690100,103500.000000 +-119.390000,36.540000,34.000000,1590.000000,422.000000,1272.000000,407.000000,1.806800,59000.000000 +-121.510000,38.520000,30.000000,3236.000000,588.000000,1167.000000,569.000000,4.097200,181400.000000 +-119.180000,34.270000,6.000000,2307.000000,386.000000,910.000000,364.000000,5.215000,279500.000000 +-118.180000,33.770000,30.000000,1418.000000,439.000000,720.000000,417.000000,2.637100,159400.000000 +-122.430000,37.740000,52.000000,1514.000000,314.000000,724.000000,301.000000,5.329200,300900.000000 +-117.930000,33.910000,24.000000,1698.000000,297.000000,676.000000,273.000000,5.201700,364600.000000 +-124.160000,40.770000,35.000000,2141.000000,438.000000,1053.000000,434.000000,2.852900,85600.000000 +-117.950000,33.630000,27.000000,2489.000000,481.000000,1082.000000,443.000000,5.877700,358800.000000 +-118.050000,34.100000,36.000000,1606.000000,318.000000,889.000000,294.000000,4.793100,272600.000000 +-116.970000,32.810000,19.000000,1573.000000,471.000000,844.000000,414.000000,2.142200,125000.000000 +-118.850000,34.170000,42.000000,564.000000,96.000000,220.000000,81.000000,4.562500,318800.000000 +-117.730000,33.630000,15.000000,2874.000000,592.000000,1382.000000,586.000000,5.513700,161800.000000 +-122.070000,37.340000,30.000000,1851.000000,238.000000,631.000000,236.000000,10.100700,500001.000000 +-117.180000,33.020000,15.000000,3540.000000,453.000000,1364.000000,425.000000,13.662300,500001.000000 +-118.410000,34.000000,38.000000,324.000000,70.000000,268.000000,73.000000,2.550000,271400.000000 +-121.960000,37.300000,20.000000,4228.000000,1006.000000,2334.000000,1007.000000,4.308100,227300.000000 +-121.740000,38.550000,34.000000,2299.000000,579.000000,1300.000000,536.000000,1.643500,148500.000000 +-118.210000,33.920000,28.000000,2949.000000,1003.000000,4551.000000,930.000000,1.902600,131900.000000 +-121.900000,36.610000,29.000000,3412.000000,827.000000,1574.000000,759.000000,2.930900,217100.000000 +-117.810000,33.840000,17.000000,4343.000000,515.000000,1605.000000,484.000000,10.598100,460100.000000 +-118.190000,33.930000,42.000000,1829.000000,391.000000,1614.000000,377.000000,3.191200,146400.000000 +-120.930000,37.730000,14.000000,2799.000000,618.000000,2294.000000,596.000000,2.634300,81500.000000 +-122.020000,37.960000,25.000000,2615.000000,368.000000,935.000000,366.000000,6.672700,305100.000000 +-122.470000,37.790000,52.000000,2844.000000,623.000000,1380.000000,596.000000,4.750000,500001.000000 +-117.190000,34.030000,25.000000,2513.000000,340.000000,900.000000,320.000000,6.496200,182400.000000 +-117.240000,32.800000,28.000000,1072.000000,331.000000,692.000000,321.000000,2.135700,187500.000000 +-118.130000,34.100000,19.000000,2742.000000,756.000000,1396.000000,703.000000,2.566300,197500.000000 +-122.420000,37.730000,50.000000,3426.000000,769.000000,2261.000000,671.000000,2.888000,246400.000000 +-118.140000,34.710000,32.000000,1164.000000,248.000000,588.000000,270.000000,1.191700,86900.000000 +-119.760000,36.750000,39.000000,2233.000000,563.000000,2031.000000,491.000000,1.864100,50800.000000 +-122.340000,37.560000,39.000000,3562.000000,391.000000,1139.000000,391.000000,12.641700,500001.000000 +-122.450000,40.460000,16.000000,2734.000000,501.000000,1413.000000,484.000000,2.808500,105700.000000 +-118.290000,33.910000,31.000000,2025.000000,618.000000,2231.000000,593.000000,2.474100,151200.000000 +-118.320000,33.910000,34.000000,3041.000000,677.000000,1920.000000,640.000000,4.530400,181300.000000 +-122.040000,37.360000,26.000000,3298.000000,460.000000,1241.000000,472.000000,6.875300,403000.000000 +-117.390000,34.100000,12.000000,7184.000000,1516.000000,4862.000000,1235.000000,2.449200,103800.000000 +-122.250000,37.800000,36.000000,1678.000000,606.000000,1645.000000,543.000000,2.230300,116700.000000 +-117.980000,34.100000,22.000000,5661.000000,1209.000000,5389.000000,1178.000000,3.772700,159700.000000 +-120.060000,36.950000,24.000000,646.000000,134.000000,454.000000,149.000000,2.125000,61900.000000 +-121.470000,39.490000,17.000000,1554.000000,242.000000,553.000000,230.000000,3.217400,91800.000000 +-122.200000,37.790000,35.000000,1802.000000,459.000000,1009.000000,390.000000,2.303600,126000.000000 +-117.230000,32.750000,23.000000,2415.000000,653.000000,1275.000000,596.000000,3.138900,101800.000000 +-119.750000,36.740000,39.000000,1740.000000,351.000000,1098.000000,347.000000,1.895800,51300.000000 +-117.920000,34.030000,35.000000,1341.000000,233.000000,898.000000,216.000000,4.111800,157300.000000 +-121.640000,39.280000,25.000000,2857.000000,662.000000,2076.000000,685.000000,1.809500,64100.000000 +-117.140000,32.720000,45.000000,1140.000000,310.000000,840.000000,339.000000,1.615600,156300.000000 +-122.290000,37.540000,41.000000,1743.000000,349.000000,811.000000,349.000000,4.946400,282400.000000 +-117.910000,33.940000,15.000000,5799.000000,842.000000,2314.000000,787.000000,6.343300,350500.000000 +-118.380000,34.270000,8.000000,3248.000000,847.000000,2608.000000,731.000000,2.821400,158300.000000 +-122.030000,37.600000,24.000000,2077.000000,383.000000,1488.000000,389.000000,4.572100,214700.000000 +-117.130000,33.150000,16.000000,3907.000000,671.000000,1759.000000,663.000000,3.177600,172600.000000 +-118.280000,34.000000,42.000000,855.000000,284.000000,890.000000,247.000000,1.277800,112500.000000 +-122.450000,37.720000,52.000000,1729.000000,319.000000,890.000000,300.000000,4.303600,261800.000000 +-119.770000,36.770000,38.000000,3065.000000,658.000000,1441.000000,625.000000,2.056400,64700.000000 +-117.700000,33.640000,15.000000,5743.000000,773.000000,2380.000000,773.000000,8.192600,326600.000000 +-117.070000,32.790000,36.000000,3583.000000,642.000000,1711.000000,602.000000,3.974500,170800.000000 +-117.850000,33.620000,13.000000,5192.000000,658.000000,1865.000000,662.000000,15.000100,500001.000000 +-117.760000,33.710000,15.000000,1010.000000,350.000000,470.000000,342.000000,3.222900,108300.000000 +-117.190000,34.940000,31.000000,2034.000000,444.000000,1097.000000,367.000000,2.152200,60800.000000 +-120.970000,37.690000,15.000000,4065.000000,841.000000,1986.000000,680.000000,3.072000,114300.000000 +-117.190000,33.640000,12.000000,1481.000000,265.000000,757.000000,243.000000,3.235000,210700.000000 +-118.380000,33.910000,36.000000,2904.000000,515.000000,1463.000000,534.000000,5.837400,289600.000000 +-121.560000,38.260000,43.000000,1906.000000,327.000000,996.000000,314.000000,2.974400,136800.000000 +-118.960000,35.870000,17.000000,1668.000000,307.000000,888.000000,277.000000,3.779400,96200.000000 +-116.960000,32.800000,24.000000,2493.000000,693.000000,1420.000000,643.000000,1.835700,104200.000000 +-118.270000,33.940000,30.000000,1764.000000,397.000000,1406.000000,362.000000,1.449000,93100.000000 +-119.180000,34.190000,36.000000,4519.000000,1081.000000,4818.000000,1061.000000,2.856100,179100.000000 +-118.230000,33.900000,28.000000,1108.000000,284.000000,1498.000000,289.000000,2.470600,88800.000000 +-120.490000,37.260000,28.000000,2159.000000,416.000000,1283.000000,378.000000,1.893900,83000.000000 +-121.430000,38.640000,34.000000,2010.000000,411.000000,1501.000000,422.000000,2.041700,65900.000000 +-118.140000,34.170000,42.000000,2757.000000,713.000000,2112.000000,653.000000,2.714800,166800.000000 +-119.090000,35.410000,12.000000,3449.000000,522.000000,1754.000000,551.000000,5.623500,130600.000000 +-118.020000,33.710000,24.000000,2598.000000,443.000000,1184.000000,435.000000,5.862300,287800.000000 +-121.530000,39.060000,20.000000,561.000000,109.000000,308.000000,114.000000,3.302100,70800.000000 +-119.290000,34.280000,38.000000,2387.000000,748.000000,1537.000000,741.000000,2.314700,192500.000000 +-121.840000,37.340000,33.000000,1019.000000,191.000000,938.000000,215.000000,4.092900,165000.000000 +-117.990000,33.880000,42.000000,1461.000000,302.000000,986.000000,314.000000,3.955900,161100.000000 +-122.240000,37.550000,3.000000,6164.000000,1175.000000,2198.000000,975.000000,6.741300,435900.000000 +-121.800000,37.310000,21.000000,2630.000000,446.000000,1789.000000,389.000000,5.054300,232000.000000 +-117.430000,34.080000,13.000000,4563.000000,1187.000000,2475.000000,1019.000000,2.118900,121700.000000 +-118.280000,34.020000,29.000000,515.000000,229.000000,2690.000000,217.000000,0.499900,500001.000000 +-117.300000,33.060000,31.000000,2128.000000,520.000000,1049.000000,485.000000,4.027000,290000.000000 +-118.200000,34.040000,18.000000,796.000000,227.000000,547.000000,218.000000,1.033300,135400.000000 +-117.630000,34.110000,30.000000,2674.000000,428.000000,1404.000000,456.000000,4.296900,165600.000000 +-121.940000,37.330000,37.000000,818.000000,269.000000,576.000000,261.000000,2.190200,250000.000000 +-118.070000,33.930000,5.000000,906.000000,187.000000,1453.000000,158.000000,4.125000,171900.000000 +-117.190000,32.830000,30.000000,3225.000000,555.000000,1601.000000,532.000000,4.331700,173300.000000 +-118.210000,33.890000,42.000000,1739.000000,370.000000,1104.000000,297.000000,2.212500,120700.000000 +-118.410000,34.190000,39.000000,1169.000000,242.000000,612.000000,247.000000,4.142900,200000.000000 +-117.000000,32.800000,29.000000,2045.000000,398.000000,912.000000,368.000000,3.018900,144100.000000 +-116.920000,32.860000,11.000000,2204.000000,518.000000,1472.000000,497.000000,2.369300,127000.000000 +-121.040000,38.950000,22.000000,1931.000000,445.000000,1009.000000,407.000000,2.750000,153200.000000 +-122.120000,37.450000,38.000000,1276.000000,314.000000,955.000000,287.000000,2.009600,155700.000000 +-119.480000,36.540000,28.000000,2112.000000,363.000000,1011.000000,335.000000,4.222200,108900.000000 +-121.020000,37.680000,25.000000,3262.000000,588.000000,1834.000000,578.000000,3.996000,114500.000000 +-123.280000,40.770000,25.000000,767.000000,206.000000,301.000000,121.000000,1.625000,79200.000000 +-122.890000,39.110000,10.000000,1588.000000,333.000000,585.000000,254.000000,2.255100,71100.000000 +-122.040000,37.970000,21.000000,6445.000000,1839.000000,3621.000000,1735.000000,2.584100,112500.000000 +-118.080000,33.810000,21.000000,1189.000000,281.000000,577.000000,264.000000,3.315500,237500.000000 +-118.310000,34.090000,36.000000,787.000000,420.000000,1506.000000,360.000000,1.241200,216700.000000 +-122.160000,37.750000,35.000000,667.000000,140.000000,406.000000,133.000000,3.804700,94300.000000 +-121.610000,38.380000,37.000000,1365.000000,276.000000,952.000000,268.000000,4.037000,156900.000000 +-122.100000,37.680000,31.000000,1892.000000,428.000000,1162.000000,389.000000,3.125000,167100.000000 +-122.280000,37.870000,49.000000,2026.000000,548.000000,963.000000,521.000000,1.980500,173700.000000 +-116.910000,34.240000,23.000000,6379.000000,1636.000000,1350.000000,568.000000,1.633600,124500.000000 +-121.830000,37.280000,33.000000,1115.000000,250.000000,1168.000000,261.000000,3.900900,178600.000000 +-118.300000,33.810000,17.000000,5544.000000,1068.000000,3008.000000,1038.000000,5.322000,282700.000000 +-117.960000,33.700000,23.000000,4417.000000,740.000000,1865.000000,693.000000,5.342800,279300.000000 +-122.140000,40.070000,31.000000,2053.000000,465.000000,1193.000000,447.000000,1.492300,44400.000000 +-121.440000,38.730000,25.000000,1287.000000,224.000000,727.000000,236.000000,4.739600,135500.000000 +-122.260000,37.550000,17.000000,4576.000000,814.000000,1941.000000,807.000000,5.957200,443800.000000 +-121.640000,37.140000,14.000000,5487.000000,1024.000000,2823.000000,979.000000,4.175000,229800.000000 +-117.180000,34.480000,8.000000,3561.000000,691.000000,2156.000000,659.000000,2.777800,86900.000000 +-122.280000,38.340000,44.000000,1066.000000,190.000000,416.000000,174.000000,3.638900,304000.000000 +-117.900000,33.600000,25.000000,2465.000000,585.000000,906.000000,472.000000,3.653800,500001.000000 +-122.180000,37.780000,33.000000,142.000000,31.000000,575.000000,47.000000,3.875000,225000.000000 +-121.490000,38.510000,30.000000,3166.000000,607.000000,1857.000000,579.000000,3.176800,79500.000000 +-118.190000,33.910000,43.000000,1531.000000,357.000000,1509.000000,376.000000,2.635400,128100.000000 +-118.270000,34.100000,50.000000,2113.000000,398.000000,793.000000,418.000000,4.713200,304600.000000 +-121.440000,38.610000,34.000000,172.000000,38.000000,149.000000,55.000000,2.644200,55000.000000 +-121.910000,37.430000,33.000000,2791.000000,496.000000,1714.000000,485.000000,4.830400,224900.000000 +-117.860000,33.720000,31.000000,1194.000000,297.000000,1602.000000,306.000000,2.333300,157700.000000 +-118.350000,33.920000,29.000000,736.000000,232.000000,584.000000,231.000000,3.616700,200000.000000 +-117.260000,33.840000,12.000000,1159.000000,209.000000,523.000000,159.000000,2.723200,123200.000000 +-122.430000,37.730000,52.000000,3602.000000,738.000000,2270.000000,647.000000,3.893400,251800.000000 +-121.800000,37.990000,16.000000,3077.000000,465.000000,1575.000000,446.000000,5.500000,179500.000000 +-122.580000,38.460000,15.000000,2936.000000,517.000000,1182.000000,501.000000,3.398100,246900.000000 +-122.470000,37.780000,52.000000,2042.000000,378.000000,1153.000000,408.000000,4.185600,404700.000000 +-118.080000,34.000000,32.000000,1165.000000,358.000000,997.000000,361.000000,0.981700,166300.000000 +-122.000000,37.350000,20.000000,4304.000000,851.000000,2059.000000,835.000000,5.167400,333000.000000 +-119.020000,35.410000,21.000000,2534.000000,554.000000,1297.000000,517.000000,2.057500,67000.000000 +-118.130000,34.180000,52.000000,1464.000000,211.000000,603.000000,226.000000,5.830900,309100.000000 +-121.940000,37.270000,23.000000,1932.000000,552.000000,997.000000,482.000000,3.662000,211900.000000 +-120.510000,35.910000,39.000000,768.000000,162.000000,264.000000,118.000000,5.324500,250000.000000 +-121.650000,38.030000,28.000000,3144.000000,694.000000,1095.000000,482.000000,3.440200,192400.000000 +-121.620000,39.790000,11.000000,3835.000000,727.000000,1456.000000,658.000000,2.537400,97200.000000 +-117.080000,32.820000,16.000000,1787.000000,236.000000,770.000000,228.000000,7.129800,278600.000000 +-123.210000,39.140000,15.000000,2235.000000,545.000000,1376.000000,516.000000,1.903200,100000.000000 +-119.610000,36.330000,32.000000,1492.000000,284.000000,926.000000,264.000000,3.013900,61500.000000 +-114.980000,33.070000,18.000000,1183.000000,363.000000,374.000000,127.000000,3.160700,57500.000000 +-118.380000,34.040000,36.000000,3005.000000,771.000000,2054.000000,758.000000,2.043700,309100.000000 +-117.990000,33.700000,13.000000,4013.000000,903.000000,1999.000000,859.000000,4.625000,248800.000000 +-116.260000,33.720000,10.000000,9404.000000,1827.000000,3208.000000,1283.000000,3.108600,105800.000000 +-118.400000,34.000000,10.000000,1526.000000,339.000000,705.000000,268.000000,5.808300,321800.000000 +-120.640000,35.460000,6.000000,5876.000000,1406.000000,2877.000000,1304.000000,2.543700,146400.000000 +-122.030000,37.390000,22.000000,3280.000000,933.000000,1842.000000,795.000000,4.410700,232700.000000 +-118.290000,33.880000,36.000000,1751.000000,438.000000,1175.000000,419.000000,3.073900,218600.000000 +-117.020000,32.690000,7.000000,6055.000000,1004.000000,3031.000000,952.000000,4.436000,135000.000000 +-119.320000,36.300000,15.000000,2864.000000,571.000000,1480.000000,475.000000,2.969800,93400.000000 +-122.310000,38.010000,18.000000,4123.000000,874.000000,1895.000000,772.000000,3.275900,195000.000000 +-118.860000,34.190000,27.000000,1931.000000,261.000000,736.000000,244.000000,6.780500,392900.000000 +-117.140000,33.810000,13.000000,4496.000000,756.000000,2044.000000,695.000000,3.277800,148800.000000 +-118.640000,34.220000,25.000000,2762.000000,410.000000,1166.000000,439.000000,6.864300,333700.000000 +-116.630000,33.890000,22.000000,1540.000000,364.000000,610.000000,268.000000,1.522700,71000.000000 +-118.280000,34.110000,45.000000,1607.000000,331.000000,633.000000,332.000000,3.144500,438300.000000 +-119.030000,35.380000,52.000000,1695.000000,290.000000,540.000000,260.000000,2.731200,147100.000000 +-118.260000,33.880000,36.000000,1212.000000,222.000000,775.000000,224.000000,5.559100,136500.000000 +-117.890000,33.850000,18.000000,2036.000000,414.000000,1292.000000,380.000000,3.875000,273000.000000 +-122.090000,37.380000,36.000000,2587.000000,416.000000,1055.000000,410.000000,6.199500,407200.000000 +-122.940000,39.100000,18.000000,681.000000,120.000000,272.000000,105.000000,2.890600,140600.000000 +-117.100000,32.680000,42.000000,2013.000000,568.000000,1920.000000,557.000000,2.072400,107600.000000 +-118.980000,35.410000,36.000000,1482.000000,266.000000,640.000000,274.000000,3.875000,94500.000000 +-120.230000,37.960000,52.000000,1230.000000,262.000000,609.000000,243.000000,2.005700,68200.000000 +-118.200000,33.940000,43.000000,1934.000000,511.000000,1895.000000,493.000000,2.502900,159700.000000 +-121.300000,37.950000,9.000000,674.000000,242.000000,575.000000,193.000000,2.202400,45000.000000 +-121.740000,38.550000,33.000000,6861.000000,1820.000000,3717.000000,1767.000000,1.731100,182600.000000 +-121.960000,37.330000,35.000000,2294.000000,411.000000,1054.000000,449.000000,4.066700,276900.000000 +-120.600000,37.360000,27.000000,2521.000000,484.000000,1307.000000,456.000000,3.091100,86900.000000 +-122.470000,37.700000,44.000000,2034.000000,423.000000,1491.000000,373.000000,4.534100,236500.000000 +-117.050000,32.580000,23.000000,1918.000000,339.000000,1392.000000,340.000000,4.087000,134800.000000 +-117.900000,33.870000,34.000000,1411.000000,292.000000,1040.000000,299.000000,3.433800,195200.000000 +-117.230000,32.870000,15.000000,2290.000000,662.000000,1034.000000,594.000000,3.010400,204200.000000 +-122.080000,37.880000,24.000000,2059.000000,462.000000,410.000000,294.000000,2.397100,99400.000000 +-118.210000,33.800000,45.000000,1160.000000,274.000000,1095.000000,269.000000,2.730800,139000.000000 +-122.080000,37.640000,30.000000,5267.000000,1253.000000,4065.000000,1113.000000,3.347900,182100.000000 +-118.380000,34.140000,40.000000,1965.000000,354.000000,666.000000,357.000000,6.087600,483800.000000 +-118.200000,33.800000,45.000000,2456.000000,495.000000,1300.000000,450.000000,3.979200,210200.000000 +-117.620000,33.430000,27.000000,3858.000000,1062.000000,2321.000000,873.000000,3.315500,231000.000000 +-122.110000,37.400000,31.000000,2836.000000,490.000000,1138.000000,481.000000,4.951900,500001.000000 +-122.840000,38.980000,21.000000,939.000000,176.000000,556.000000,178.000000,1.719600,75000.000000 +-121.260000,38.270000,20.000000,1314.000000,229.000000,712.000000,219.000000,4.412500,144600.000000 +-116.890000,33.730000,15.000000,2094.000000,316.000000,937.000000,277.000000,5.362300,201300.000000 +-122.670000,38.440000,32.000000,3771.000000,741.000000,1786.000000,721.000000,3.241500,172200.000000 +-117.940000,33.870000,46.000000,2066.000000,450.000000,1275.000000,448.000000,3.937500,187000.000000 +-118.140000,34.690000,34.000000,1439.000000,327.000000,708.000000,298.000000,3.269900,100000.000000 +-122.400000,37.590000,22.000000,2754.000000,477.000000,1163.000000,479.000000,6.230600,500001.000000 +-118.080000,33.840000,28.000000,4216.000000,948.000000,2997.000000,896.000000,3.796100,162700.000000 +-116.360000,33.780000,6.000000,24121.000000,4522.000000,4176.000000,2221.000000,3.379900,239300.000000 +-117.940000,33.850000,26.000000,1888.000000,429.000000,1550.000000,458.000000,3.339300,168600.000000 +-117.470000,33.940000,34.000000,559.000000,139.000000,532.000000,137.000000,3.068700,88500.000000 +-117.640000,33.650000,4.000000,6842.000000,1512.000000,3256.000000,1439.000000,5.413200,216600.000000 +-118.500000,34.240000,34.000000,2634.000000,412.000000,1114.000000,423.000000,5.940100,315300.000000 +-118.190000,33.780000,24.000000,225.000000,72.000000,439.000000,71.000000,2.853300,137500.000000 +-117.660000,34.120000,16.000000,3853.000000,541.000000,1726.000000,497.000000,6.119500,251100.000000 +-122.300000,37.970000,34.000000,2854.000000,528.000000,1211.000000,452.000000,3.535300,164700.000000 +-122.140000,37.680000,31.000000,3184.000000,716.000000,1561.000000,628.000000,2.795500,183100.000000 +-118.260000,33.940000,41.000000,1510.000000,410.000000,1408.000000,389.000000,1.650000,94200.000000 +-118.230000,33.930000,39.000000,2065.000000,532.000000,2015.000000,535.000000,0.847800,104900.000000 +-120.960000,38.660000,11.000000,2339.000000,436.000000,1062.000000,380.000000,3.903600,180800.000000 +-117.840000,35.350000,28.000000,1913.000000,486.000000,858.000000,371.000000,1.996200,50800.000000 +-119.160000,34.200000,35.000000,2183.000000,636.000000,3504.000000,623.000000,1.970400,160300.000000 +-122.650000,38.230000,52.000000,1735.000000,347.000000,712.000000,343.000000,3.171100,200800.000000 +-121.880000,37.370000,14.000000,6016.000000,1404.000000,3258.000000,1316.000000,3.574500,333700.000000 +-118.400000,34.040000,43.000000,3863.000000,537.000000,1398.000000,511.000000,8.593800,500001.000000 +-118.270000,34.110000,36.000000,1832.000000,539.000000,934.000000,486.000000,3.052100,276600.000000 +-118.440000,34.300000,38.000000,1595.000000,314.000000,1181.000000,327.000000,3.400000,155500.000000 +-121.770000,37.680000,41.000000,1501.000000,299.000000,629.000000,288.000000,4.680600,209400.000000 +-119.990000,38.880000,17.000000,2807.000000,529.000000,675.000000,251.000000,2.745700,107800.000000 +-118.360000,33.960000,26.000000,3543.000000,1055.000000,2742.000000,951.000000,2.550400,151300.000000 +-118.320000,33.970000,52.000000,1778.000000,320.000000,795.000000,279.000000,3.511400,138800.000000 +-118.270000,34.270000,27.000000,5205.000000,859.000000,2363.000000,888.000000,6.194600,276100.000000 +-116.810000,33.900000,17.000000,2009.000000,469.000000,820.000000,381.000000,1.328600,81800.000000 +-118.390000,33.960000,45.000000,1436.000000,374.000000,662.000000,292.000000,3.625000,329400.000000 +-118.070000,33.910000,29.000000,2387.000000,570.000000,1978.000000,548.000000,3.195700,159200.000000 +-118.350000,34.220000,30.000000,1260.000000,222.000000,638.000000,229.000000,4.130200,258300.000000 +-118.430000,34.020000,41.000000,2403.000000,516.000000,1001.000000,514.000000,4.390600,500001.000000 +-121.730000,37.680000,17.000000,20354.000000,3493.000000,8768.000000,3293.000000,5.449600,238900.000000 +-117.310000,32.980000,17.000000,2789.000000,648.000000,849.000000,345.000000,4.101200,244700.000000 +-122.290000,37.560000,12.000000,6474.000000,1467.000000,2516.000000,1390.000000,5.035300,305800.000000 +-119.690000,34.380000,39.000000,1383.000000,459.000000,677.000000,362.000000,2.250000,281300.000000 +-122.070000,38.000000,37.000000,978.000000,202.000000,462.000000,184.000000,3.625000,156300.000000 +-118.050000,34.160000,41.000000,3320.000000,713.000000,1236.000000,659.000000,3.569400,278600.000000 +-122.070000,37.660000,28.000000,2280.000000,610.000000,1255.000000,587.000000,2.671900,161200.000000 +-121.800000,37.270000,10.000000,3301.000000,593.000000,2190.000000,575.000000,6.223000,260700.000000 +-122.690000,38.340000,23.000000,2846.000000,516.000000,1526.000000,492.000000,3.733000,163500.000000 +-117.080000,32.700000,35.000000,1477.000000,264.000000,852.000000,279.000000,3.178600,100600.000000 +-119.760000,36.730000,46.000000,1347.000000,282.000000,854.000000,267.000000,1.872300,52600.000000 +-118.370000,34.050000,52.000000,1563.000000,306.000000,776.000000,308.000000,3.625000,440900.000000 +-122.700000,38.350000,14.000000,1555.000000,369.000000,493.000000,335.000000,1.603300,67500.000000 +-118.130000,34.010000,45.000000,1179.000000,268.000000,736.000000,252.000000,2.708300,161800.000000 +-119.350000,36.210000,26.000000,2481.000000,586.000000,1445.000000,498.000000,1.637800,60300.000000 +-117.670000,34.030000,20.000000,8561.000000,1411.000000,4861.000000,1450.000000,4.705600,165500.000000 +-117.970000,34.150000,33.000000,2474.000000,472.000000,1268.000000,437.000000,6.457600,500001.000000 +-118.080000,34.080000,38.000000,1889.000000,407.000000,1330.000000,396.000000,3.921900,205200.000000 +-121.230000,38.780000,13.000000,3813.000000,871.000000,1513.000000,783.000000,2.080700,142600.000000 +-118.200000,34.020000,49.000000,1098.000000,317.000000,1411.000000,301.000000,2.750000,146000.000000 +-118.170000,34.020000,41.000000,676.000000,216.000000,851.000000,199.000000,2.307700,140600.000000 +-117.800000,34.060000,34.000000,1081.000000,205.000000,1325.000000,252.000000,3.629800,108500.000000 +-118.300000,33.970000,46.000000,1425.000000,317.000000,1140.000000,304.000000,3.375000,98500.000000 +-122.470000,37.690000,30.000000,837.000000,213.000000,606.000000,199.000000,4.875000,258800.000000 +-118.200000,33.960000,44.000000,2144.000000,477.000000,1760.000000,452.000000,2.322100,161600.000000 +-117.130000,32.910000,16.000000,2715.000000,581.000000,1619.000000,584.000000,4.000000,154700.000000 +-119.770000,36.810000,25.000000,1565.000000,271.000000,661.000000,275.000000,3.427900,84700.000000 +-118.470000,34.250000,21.000000,2692.000000,477.000000,1330.000000,456.000000,4.541700,238900.000000 +-122.250000,37.800000,42.000000,4120.000000,1065.000000,1715.000000,1015.000000,2.934500,225000.000000 +-118.500000,34.170000,37.000000,880.000000,154.000000,369.000000,155.000000,4.142900,303600.000000 +-122.240000,37.490000,38.000000,4105.000000,950.000000,2561.000000,909.000000,3.868400,265600.000000 +-117.150000,32.930000,16.000000,2718.000000,438.000000,1515.000000,431.000000,5.143300,185300.000000 +-120.850000,37.770000,35.000000,404.000000,96.000000,261.000000,100.000000,2.458300,75000.000000 +-122.250000,37.830000,35.000000,1613.000000,428.000000,675.000000,422.000000,3.472200,243100.000000 +-118.330000,33.770000,33.000000,4244.000000,595.000000,1534.000000,557.000000,9.821400,500001.000000 +-124.150000,40.780000,41.000000,2127.000000,358.000000,911.000000,349.000000,3.171100,104200.000000 +-117.940000,33.790000,24.000000,4179.000000,784.000000,1902.000000,733.000000,4.798600,236500.000000 +-121.590000,39.150000,5.000000,1922.000000,489.000000,938.000000,439.000000,2.047400,61300.000000 +-122.690000,38.440000,31.000000,1808.000000,315.000000,691.000000,280.000000,3.858300,193200.000000 +-122.510000,38.760000,9.000000,2589.000000,482.000000,1050.000000,374.000000,4.043500,132600.000000 +-117.890000,33.610000,45.000000,1883.000000,419.000000,653.000000,328.000000,4.222200,500001.000000 +-117.190000,32.770000,9.000000,634.000000,152.000000,248.000000,133.000000,3.857100,143800.000000 +-117.150000,32.750000,40.000000,2261.000000,579.000000,903.000000,525.000000,2.465000,198700.000000 +-122.210000,37.480000,20.000000,505.000000,216.000000,326.000000,216.000000,2.928600,237500.000000 +-118.250000,33.790000,38.000000,1730.000000,460.000000,1724.000000,424.000000,2.730800,150400.000000 +-120.490000,40.310000,16.000000,1821.000000,360.000000,969.000000,359.000000,3.464300,85100.000000 +-118.300000,33.740000,20.000000,2625.000000,673.000000,1184.000000,606.000000,3.916700,285200.000000 +-117.140000,32.700000,47.000000,552.000000,161.000000,593.000000,174.000000,0.958900,90000.000000 +-121.300000,37.970000,52.000000,2259.000000,417.000000,766.000000,385.000000,2.298100,105400.000000 +-119.780000,36.750000,31.000000,1404.000000,379.000000,1515.000000,387.000000,1.281300,56400.000000 +-118.380000,34.180000,32.000000,3553.000000,1060.000000,3129.000000,1010.000000,2.560300,174200.000000 +-118.130000,34.100000,24.000000,4670.000000,1185.000000,2478.000000,1107.000000,3.197500,252400.000000 +-118.300000,33.730000,42.000000,1731.000000,435.000000,866.000000,403.000000,2.745100,255400.000000 +-118.440000,33.990000,44.000000,305.000000,72.000000,156.000000,70.000000,5.964100,275000.000000 +-117.480000,34.080000,17.000000,1834.000000,390.000000,1253.000000,357.000000,3.102800,106400.000000 +-122.350000,37.970000,31.000000,2892.000000,685.000000,2104.000000,641.000000,3.218800,113800.000000 +-119.710000,34.410000,31.000000,1034.000000,319.000000,997.000000,308.000000,2.653800,231800.000000 +-116.920000,32.810000,23.000000,2668.000000,528.000000,1510.000000,524.000000,3.366900,158900.000000 +-122.110000,37.660000,35.000000,2843.000000,652.000000,1726.000000,643.000000,3.090000,174100.000000 +-117.410000,33.940000,29.000000,3181.000000,714.000000,1603.000000,706.000000,3.250000,112500.000000 +-122.450000,37.740000,38.000000,5688.000000,930.000000,2263.000000,908.000000,6.203000,346800.000000 +-118.360000,33.800000,38.000000,2553.000000,400.000000,1042.000000,393.000000,6.974200,500001.000000 +-121.660000,36.680000,10.000000,913.000000,265.000000,508.000000,251.000000,0.991400,147500.000000 +-122.420000,37.760000,52.000000,2038.000000,629.000000,2007.000000,596.000000,2.570100,266700.000000 +-118.290000,34.050000,30.000000,1417.000000,589.000000,1615.000000,540.000000,1.386700,193800.000000 +-119.820000,34.430000,15.000000,1482.000000,345.000000,669.000000,379.000000,3.077300,112500.000000 +-119.340000,36.220000,38.000000,2708.000000,460.000000,1260.000000,455.000000,3.090500,78200.000000 +-121.500000,38.610000,5.000000,1395.000000,373.000000,638.000000,322.000000,2.674500,225000.000000 +-121.880000,37.460000,5.000000,1819.000000,245.000000,802.000000,228.000000,10.972200,500001.000000 +-118.270000,33.940000,34.000000,721.000000,165.000000,661.000000,171.000000,2.078900,92400.000000 +-122.170000,37.730000,46.000000,2163.000000,470.000000,925.000000,435.000000,3.250000,177500.000000 +-122.220000,37.850000,28.000000,5287.000000,1048.000000,2031.000000,956.000000,5.457000,337300.000000 +-117.200000,32.830000,36.000000,1089.000000,240.000000,623.000000,226.000000,2.590900,176000.000000 +-120.690000,35.490000,16.000000,2666.000000,450.000000,1203.000000,429.000000,4.137500,222400.000000 +-122.700000,38.970000,17.000000,2554.000000,540.000000,723.000000,319.000000,3.237500,114200.000000 +-118.370000,34.150000,29.000000,2630.000000,617.000000,1071.000000,573.000000,3.366900,376100.000000 +-118.350000,34.000000,40.000000,2894.000000,395.000000,1063.000000,409.000000,6.939000,372000.000000 +-118.390000,37.360000,38.000000,1813.000000,410.000000,902.000000,396.000000,2.326100,98400.000000 +-118.110000,34.200000,36.000000,4915.000000,725.000000,1897.000000,700.000000,6.827000,359400.000000 +-121.720000,36.810000,18.000000,1984.000000,379.000000,1078.000000,359.000000,3.296900,229900.000000 +-118.520000,34.160000,39.000000,2693.000000,478.000000,1219.000000,435.000000,5.170000,335400.000000 +-118.120000,33.900000,35.000000,3478.000000,730.000000,1885.000000,673.000000,2.937500,206500.000000 +-119.690000,36.790000,5.000000,2613.000000,476.000000,1490.000000,481.000000,4.099300,83000.000000 +-118.030000,33.780000,26.000000,2001.000000,302.000000,836.000000,298.000000,5.106100,257500.000000 +-120.670000,35.620000,6.000000,12779.000000,2441.000000,6085.000000,2157.000000,3.866100,168100.000000 +-118.430000,34.030000,36.000000,1552.000000,388.000000,867.000000,352.000000,3.646700,346700.000000 +-121.620000,39.130000,41.000000,1147.000000,243.000000,583.000000,239.000000,2.243100,63400.000000 +-118.970000,37.640000,13.000000,1907.000000,544.000000,575.000000,234.000000,3.068500,162500.000000 +-117.250000,32.740000,36.000000,3548.000000,956.000000,1648.000000,866.000000,2.696200,288200.000000 +-122.280000,37.800000,52.000000,215.000000,87.000000,904.000000,88.000000,0.866800,137500.000000 +-118.190000,34.140000,38.000000,1826.000000,300.000000,793.000000,297.000000,5.296200,291500.000000 +-117.900000,33.850000,32.000000,1605.000000,314.000000,986.000000,306.000000,3.337500,186200.000000 +-119.020000,37.640000,14.000000,5919.000000,1278.000000,265.000000,112.000000,3.243100,221400.000000 +-118.370000,34.200000,34.000000,2199.000000,609.000000,2488.000000,597.000000,2.986100,171800.000000 +-122.410000,37.750000,52.000000,1057.000000,276.000000,837.000000,292.000000,2.453100,229000.000000 +-117.940000,33.920000,28.000000,639.000000,179.000000,1062.000000,169.000000,3.058800,145200.000000 +-118.220000,34.120000,28.000000,3306.000000,1025.000000,2670.000000,942.000000,3.091900,185400.000000 +-117.240000,34.040000,4.000000,4289.000000,682.000000,1981.000000,705.000000,5.336600,165100.000000 +-122.080000,37.660000,33.000000,1547.000000,372.000000,1063.000000,356.000000,2.562500,154300.000000 +-122.280000,37.850000,48.000000,2063.000000,484.000000,1054.000000,466.000000,2.262500,132900.000000 +-118.210000,33.900000,35.000000,2420.000000,579.000000,2010.000000,540.000000,2.081700,104600.000000 +-118.010000,33.920000,35.000000,1606.000000,289.000000,829.000000,273.000000,5.273000,187600.000000 +-118.290000,34.180000,10.000000,4292.000000,1075.000000,2719.000000,987.000000,3.697400,286600.000000 +-118.210000,33.960000,48.000000,284.000000,104.000000,422.000000,119.000000,1.282600,145500.000000 +-117.230000,32.810000,28.000000,1508.000000,263.000000,996.000000,267.000000,3.802600,270000.000000 +-117.030000,33.130000,15.000000,7000.000000,1185.000000,3555.000000,1118.000000,4.702200,172800.000000 +-121.850000,37.220000,21.000000,6203.000000,798.000000,2494.000000,800.000000,7.720100,362700.000000 +-122.400000,37.720000,47.000000,1465.000000,306.000000,1119.000000,315.000000,4.267200,219400.000000 +-120.470000,34.980000,6.000000,5762.000000,1115.000000,2551.000000,919.000000,3.072300,137300.000000 +-121.140000,37.480000,6.000000,1772.000000,332.000000,1011.000000,331.000000,3.704500,128100.000000 +-119.340000,36.620000,26.000000,1922.000000,339.000000,1148.000000,332.000000,2.605800,92200.000000 +-117.660000,34.080000,36.000000,1485.000000,236.000000,623.000000,261.000000,3.303600,141000.000000 +-116.840000,33.080000,15.000000,2755.000000,519.000000,1474.000000,460.000000,4.040800,225900.000000 +-118.290000,34.050000,11.000000,677.000000,370.000000,1143.000000,341.000000,2.386400,350000.000000 +-119.980000,38.940000,23.000000,1564.000000,298.000000,339.000000,147.000000,4.041700,99300.000000 +-118.100000,33.910000,35.000000,1653.000000,325.000000,1072.000000,301.000000,3.270800,159700.000000 +-120.070000,36.960000,42.000000,963.000000,216.000000,471.000000,211.000000,2.289800,66100.000000 +-119.110000,35.390000,22.000000,984.000000,176.000000,451.000000,170.000000,3.250000,88900.000000 +-117.720000,34.100000,46.000000,2477.000000,458.000000,1034.000000,455.000000,5.500000,289700.000000 +-117.900000,33.650000,30.000000,2196.000000,486.000000,1131.000000,460.000000,4.413500,272300.000000 +-121.980000,37.290000,31.000000,2750.000000,664.000000,1459.000000,660.000000,3.228700,264900.000000 +-122.030000,36.960000,32.000000,2182.000000,406.000000,1122.000000,370.000000,3.520000,284200.000000 +-117.420000,34.080000,21.000000,4460.000000,930.000000,2657.000000,839.000000,2.756900,127500.000000 +-117.660000,34.110000,19.000000,3445.000000,661.000000,1635.000000,580.000000,5.068100,230500.000000 +-119.290000,34.240000,27.000000,4742.000000,775.000000,1682.000000,696.000000,6.194000,500001.000000 +-117.020000,32.710000,20.000000,4050.000000,745.000000,2870.000000,761.000000,3.736600,121800.000000 +-122.850000,38.620000,16.000000,4418.000000,704.000000,1908.000000,697.000000,4.591300,244600.000000 +-118.330000,33.910000,35.000000,1092.000000,302.000000,962.000000,297.000000,3.590300,183300.000000 +-118.400000,34.020000,40.000000,593.000000,137.000000,371.000000,132.000000,4.693200,332800.000000 +-118.380000,33.840000,26.000000,2869.000000,567.000000,1157.000000,538.000000,6.038200,355300.000000 +-118.050000,34.110000,42.000000,3677.000000,627.000000,1779.000000,622.000000,5.150900,426500.000000 +-117.430000,33.930000,36.000000,2386.000000,396.000000,1176.000000,374.000000,4.512200,113300.000000 +-118.100000,34.160000,44.000000,2795.000000,496.000000,1235.000000,469.000000,4.238600,283700.000000 +-122.530000,37.860000,38.000000,1183.000000,196.000000,628.000000,205.000000,3.750000,478600.000000 +-118.300000,33.970000,42.000000,944.000000,200.000000,567.000000,190.000000,2.631100,124100.000000 +-118.200000,33.890000,37.000000,2394.000000,568.000000,2499.000000,551.000000,2.532100,105100.000000 +-118.020000,34.150000,44.000000,2419.000000,437.000000,1045.000000,432.000000,3.875000,280800.000000 +-121.530000,39.520000,30.000000,1030.000000,161.000000,448.000000,159.000000,2.482100,73800.000000 +-117.920000,33.900000,13.000000,1814.000000,320.000000,1010.000000,313.000000,6.348900,337900.000000 +-118.370000,34.210000,33.000000,2034.000000,470.000000,1990.000000,423.000000,3.745500,159600.000000 +-118.040000,33.850000,18.000000,3628.000000,546.000000,1922.000000,544.000000,7.505700,328500.000000 +-118.460000,33.980000,19.000000,2520.000000,726.000000,964.000000,663.000000,3.806800,500001.000000 +-118.050000,33.900000,36.000000,1047.000000,227.000000,975.000000,239.000000,3.189700,155000.000000 +-122.950000,40.710000,26.000000,2231.000000,421.000000,987.000000,364.000000,2.479200,88800.000000 +-122.000000,37.300000,28.000000,5096.000000,1011.000000,2588.000000,954.000000,5.357000,355200.000000 +-121.860000,37.400000,21.000000,1386.000000,260.000000,946.000000,257.000000,6.522600,258500.000000 +-119.250000,36.560000,35.000000,1675.000000,373.000000,1131.000000,316.000000,1.672200,59100.000000 +-118.210000,34.560000,12.000000,2472.000000,408.000000,1048.000000,380.000000,4.709700,262100.000000 +-118.260000,34.020000,39.000000,698.000000,232.000000,1046.000000,228.000000,2.235600,119500.000000 +-117.280000,34.150000,32.000000,2170.000000,430.000000,815.000000,401.000000,3.176500,135000.000000 +-122.440000,37.660000,21.000000,5108.000000,1510.000000,3288.000000,1405.000000,3.192700,252600.000000 +-118.990000,35.390000,36.000000,1438.000000,348.000000,1054.000000,341.000000,1.831900,55400.000000 +-117.140000,34.060000,15.000000,3057.000000,510.000000,1154.000000,460.000000,3.974100,141100.000000 +-122.150000,37.410000,15.000000,2577.000000,360.000000,979.000000,364.000000,10.476000,500001.000000 +-121.200000,38.670000,26.000000,1546.000000,287.000000,773.000000,299.000000,2.980300,115400.000000 +-122.150000,37.470000,37.000000,1844.000000,382.000000,1634.000000,417.000000,2.799300,145500.000000 +-118.340000,33.950000,25.000000,3762.000000,1281.000000,4015.000000,1178.000000,2.158700,143800.000000 +-118.250000,34.080000,44.000000,1425.000000,438.000000,1121.000000,374.000000,2.110800,200000.000000 +-119.580000,36.100000,21.000000,1382.000000,327.000000,1469.000000,355.000000,1.396700,46500.000000 +-121.310000,38.710000,18.000000,3998.000000,744.000000,2071.000000,660.000000,4.383600,102000.000000 +-118.420000,34.120000,27.000000,2089.000000,303.000000,654.000000,270.000000,12.376700,500001.000000 +-117.180000,34.060000,52.000000,954.000000,233.000000,533.000000,239.000000,1.302100,100000.000000 +-115.900000,32.690000,18.000000,414.000000,86.000000,98.000000,54.000000,1.541700,57500.000000 +-118.360000,33.980000,46.000000,1425.000000,283.000000,782.000000,273.000000,5.057000,246300.000000 +-122.500000,37.600000,35.000000,2197.000000,369.000000,971.000000,326.000000,4.250000,241700.000000 +-121.500000,36.810000,20.000000,1345.000000,230.000000,731.000000,217.000000,4.233300,363300.000000 +-118.190000,33.820000,11.000000,872.000000,203.000000,422.000000,221.000000,4.636400,156300.000000 +-117.300000,34.150000,40.000000,961.000000,199.000000,509.000000,182.000000,2.060000,85500.000000 +-118.420000,34.230000,34.000000,1531.000000,278.000000,1064.000000,274.000000,5.668700,207300.000000 +-118.120000,33.900000,38.000000,1222.000000,282.000000,756.000000,256.000000,4.125000,173900.000000 +-119.800000,36.790000,45.000000,1337.000000,187.000000,471.000000,187.000000,5.187000,153800.000000 +-119.740000,34.350000,34.000000,1664.000000,292.000000,705.000000,257.000000,5.000000,329400.000000 +-121.970000,37.970000,26.000000,1977.000000,264.000000,817.000000,273.000000,5.751200,240200.000000 +-117.070000,34.050000,14.000000,5764.000000,1006.000000,1876.000000,841.000000,1.969400,173200.000000 +-122.290000,37.820000,2.000000,158.000000,43.000000,94.000000,57.000000,2.562500,60000.000000 +-116.310000,33.650000,8.000000,3079.000000,558.000000,1572.000000,474.000000,4.593800,102600.000000 +-118.270000,34.010000,43.000000,1235.000000,385.000000,1745.000000,372.000000,2.081700,113300.000000 +-122.440000,37.760000,52.000000,1968.000000,472.000000,784.000000,430.000000,3.370200,370000.000000 +-118.270000,34.150000,14.000000,1744.000000,536.000000,1494.000000,531.000000,3.217100,230800.000000 +-118.410000,34.030000,36.000000,3053.000000,635.000000,1234.000000,577.000000,5.163700,500001.000000 +-121.450000,38.610000,32.000000,2436.000000,612.000000,1509.000000,618.000000,1.042400,81400.000000 +-117.250000,32.830000,17.000000,2075.000000,262.000000,704.000000,241.000000,10.952900,500001.000000 +-119.800000,36.820000,24.000000,5377.000000,1005.000000,2010.000000,982.000000,3.454200,121200.000000 +-121.310000,38.010000,22.000000,2101.000000,514.000000,1304.000000,511.000000,2.834800,101600.000000 +-118.180000,34.050000,41.000000,762.000000,147.000000,817.000000,176.000000,3.750000,123100.000000 +-122.130000,37.370000,30.000000,2139.000000,260.000000,742.000000,242.000000,11.806000,500001.000000 +-119.750000,36.780000,28.000000,3257.000000,752.000000,1981.000000,712.000000,2.293000,71700.000000 +-117.090000,32.740000,42.000000,1986.000000,472.000000,1472.000000,475.000000,2.175700,110100.000000 +-122.020000,37.330000,25.000000,3823.000000,584.000000,1689.000000,571.000000,7.369300,373600.000000 +-117.200000,32.840000,34.000000,3353.000000,544.000000,1583.000000,571.000000,4.550000,187700.000000 +-118.140000,34.010000,46.000000,1746.000000,447.000000,1296.000000,392.000000,2.392900,156800.000000 +-122.430000,37.780000,29.000000,1310.000000,364.000000,1009.000000,379.000000,1.384400,177500.000000 +-118.100000,34.010000,29.000000,2077.000000,564.000000,2087.000000,543.000000,2.660000,189200.000000 +-118.350000,34.100000,20.000000,2745.000000,782.000000,1161.000000,739.000000,3.904400,436400.000000 +-118.000000,33.810000,33.000000,2970.000000,547.000000,1869.000000,539.000000,4.363600,201800.000000 +-121.460000,38.560000,52.000000,1750.000000,372.000000,764.000000,369.000000,2.919100,111800.000000 +-118.270000,33.870000,21.000000,6108.000000,1130.000000,3244.000000,1113.000000,4.276800,181400.000000 +-118.260000,33.950000,44.000000,1771.000000,378.000000,1296.000000,399.000000,1.638900,96700.000000 +-119.010000,35.380000,52.000000,114.000000,26.000000,158.000000,26.000000,1.075000,67500.000000 +-117.080000,32.800000,32.000000,1587.000000,268.000000,635.000000,249.000000,3.375000,178100.000000 +-122.200000,40.260000,15.000000,2102.000000,358.000000,957.000000,371.000000,3.190800,137900.000000 +-119.980000,38.940000,25.000000,1339.000000,328.000000,503.000000,219.000000,1.901800,109700.000000 +-122.530000,37.950000,22.000000,7446.000000,1979.000000,2980.000000,1888.000000,3.583800,271300.000000 +-118.300000,34.050000,51.000000,1005.000000,314.000000,1227.000000,306.000000,2.429700,162500.000000 +-121.860000,39.750000,18.000000,1651.000000,309.000000,856.000000,293.000000,3.504600,118300.000000 +-122.060000,37.330000,23.000000,4507.000000,751.000000,2167.000000,722.000000,7.010200,500001.000000 +-122.450000,38.010000,36.000000,4501.000000,832.000000,2196.000000,800.000000,4.318200,252700.000000 +-117.010000,32.770000,24.000000,2311.000000,536.000000,1005.000000,525.000000,2.900000,185200.000000 +-120.870000,37.760000,16.000000,1174.000000,249.000000,601.000000,242.000000,1.714300,113300.000000 +-121.790000,38.540000,7.000000,1777.000000,513.000000,4479.000000,504.000000,1.465300,310000.000000 +-117.810000,33.820000,22.000000,2898.000000,335.000000,1057.000000,324.000000,10.811100,500001.000000 +-117.590000,33.660000,3.000000,1206.000000,256.000000,563.000000,287.000000,5.158900,167800.000000 +-117.360000,34.090000,32.000000,3616.000000,631.000000,2131.000000,593.000000,3.287900,95500.000000 +-121.520000,39.500000,33.000000,1462.000000,241.000000,569.000000,231.000000,3.283300,82600.000000 +-122.270000,37.840000,52.000000,1503.000000,298.000000,690.000000,275.000000,2.603300,162900.000000 +-122.210000,40.200000,19.000000,3404.000000,731.000000,1421.000000,683.000000,2.614900,84400.000000 +-117.240000,33.180000,19.000000,3337.000000,565.000000,1646.000000,554.000000,5.019500,200200.000000 +-122.550000,37.980000,31.000000,3807.000000,828.000000,1581.000000,795.000000,3.293000,337500.000000 +-118.450000,34.000000,46.000000,1777.000000,362.000000,896.000000,334.000000,4.450000,348300.000000 +-117.880000,33.850000,34.000000,1127.000000,185.000000,588.000000,181.000000,4.375000,224700.000000 +-117.180000,32.760000,52.000000,2023.000000,301.000000,649.000000,285.000000,4.739600,441700.000000 +-118.300000,33.880000,29.000000,850.000000,229.000000,563.000000,204.000000,3.737500,247700.000000 +-122.040000,38.280000,12.000000,3861.000000,795.000000,2129.000000,806.000000,3.676000,135000.000000 +-122.430000,40.470000,16.000000,3552.000000,704.000000,1801.000000,658.000000,2.149600,97700.000000 +-118.380000,33.860000,24.000000,3124.000000,560.000000,1312.000000,542.000000,6.302100,333800.000000 +-119.570000,36.090000,6.000000,2015.000000,413.000000,992.000000,319.000000,2.388900,53200.000000 +-117.870000,34.120000,34.000000,1004.000000,220.000000,772.000000,217.000000,3.857100,174500.000000 +-116.880000,32.810000,35.000000,2926.000000,562.000000,1590.000000,506.000000,4.201400,143200.000000 +-118.580000,34.210000,13.000000,6227.000000,1317.000000,3739.000000,1226.000000,4.031300,299300.000000 +-122.040000,37.880000,32.000000,3250.000000,550.000000,1230.000000,557.000000,4.642400,312700.000000 +-122.440000,37.720000,52.000000,1775.000000,347.000000,1102.000000,367.000000,4.312500,267200.000000 +-121.810000,37.370000,26.000000,2987.000000,539.000000,1931.000000,518.000000,5.109900,213100.000000 +-122.500000,37.770000,52.000000,2433.000000,454.000000,1070.000000,420.000000,4.125000,359500.000000 +-121.940000,37.940000,26.000000,1299.000000,174.000000,533.000000,180.000000,6.229600,291700.000000 +-118.450000,34.120000,20.000000,10722.000000,1617.000000,3731.000000,1511.000000,9.744900,500001.000000 +-121.700000,39.070000,26.000000,2668.000000,510.000000,1437.000000,505.000000,3.312500,100000.000000 +-118.100000,34.650000,33.000000,873.000000,177.000000,425.000000,142.000000,2.670000,187500.000000 +-119.020000,36.060000,41.000000,2279.000000,538.000000,1908.000000,511.000000,1.395200,43100.000000 +-118.060000,34.080000,42.000000,1988.000000,402.000000,1239.000000,402.000000,3.256900,201500.000000 +-117.660000,33.610000,17.000000,3464.000000,519.000000,1713.000000,530.000000,6.047100,248400.000000 +-117.400000,33.940000,30.000000,1198.000000,251.000000,1019.000000,214.000000,3.050900,82700.000000 +-118.190000,33.830000,30.000000,2246.000000,552.000000,1032.000000,548.000000,3.587100,347100.000000 +-121.550000,39.510000,50.000000,1050.000000,288.000000,485.000000,260.000000,1.160700,51700.000000 +-121.980000,37.140000,37.000000,74.000000,19.000000,63.000000,17.000000,9.590800,350000.000000 +-117.060000,32.610000,24.000000,4369.000000,1353.000000,3123.000000,1247.000000,2.057100,152300.000000 +-118.320000,34.040000,39.000000,2965.000000,812.000000,2638.000000,794.000000,2.532000,172700.000000 +-117.130000,32.760000,41.000000,1545.000000,420.000000,747.000000,415.000000,2.375000,154400.000000 +-122.500000,37.760000,46.000000,2226.000000,480.000000,1272.000000,468.000000,4.264400,284100.000000 +-120.870000,37.620000,30.000000,455.000000,70.000000,220.000000,69.000000,4.895800,142500.000000 +-118.240000,34.220000,41.000000,2476.000000,506.000000,1271.000000,485.000000,3.453100,263900.000000 +-117.690000,33.480000,25.000000,3240.000000,481.000000,1462.000000,497.000000,6.181500,288500.000000 +-122.200000,39.750000,18.000000,2603.000000,576.000000,1616.000000,588.000000,2.019200,63700.000000 +-117.080000,32.640000,43.000000,1005.000000,230.000000,548.000000,252.000000,1.867200,145800.000000 +-117.910000,33.820000,32.000000,1408.000000,307.000000,1331.000000,284.000000,3.701400,179600.000000 +-122.000000,38.730000,31.000000,371.000000,74.000000,208.000000,84.000000,3.875000,137500.000000 +-118.290000,33.840000,33.000000,896.000000,208.000000,843.000000,200.000000,3.500000,183000.000000 +-118.130000,33.860000,45.000000,1320.000000,256.000000,645.000000,256.000000,4.400000,209500.000000 +-118.350000,33.890000,29.000000,2940.000000,708.000000,2175.000000,684.000000,3.648600,229000.000000 +-122.130000,40.010000,21.000000,916.000000,194.000000,451.000000,178.000000,2.125000,63300.000000 +-122.070000,37.960000,37.000000,1217.000000,199.000000,552.000000,194.000000,5.044500,196200.000000 +-117.260000,32.850000,30.000000,3652.000000,499.000000,978.000000,462.000000,8.237400,500001.000000 +-117.870000,33.740000,16.000000,1243.000000,365.000000,1925.000000,376.000000,2.763200,158900.000000 +-121.880000,37.440000,23.000000,1310.000000,267.000000,910.000000,261.000000,5.399400,237900.000000 +-121.670000,36.580000,11.000000,5892.000000,837.000000,2327.000000,812.000000,6.155100,291800.000000 +-116.890000,33.790000,12.000000,701.000000,130.000000,434.000000,110.000000,2.057700,56700.000000 +-122.660000,38.470000,20.000000,2806.000000,477.000000,1369.000000,460.000000,4.750000,190500.000000 +-121.450000,38.540000,38.000000,1865.000000,384.000000,1052.000000,354.000000,1.789100,60500.000000 +-121.000000,37.660000,43.000000,2369.000000,413.000000,944.000000,422.000000,3.263200,138100.000000 +-117.270000,32.840000,34.000000,1655.000000,450.000000,870.000000,411.000000,3.210900,376000.000000 +-117.870000,34.110000,23.000000,4066.000000,819.000000,2105.000000,737.000000,4.655600,199600.000000 +-121.440000,37.750000,16.000000,2229.000000,458.000000,1199.000000,445.000000,3.482100,170600.000000 +-118.130000,33.760000,44.000000,2532.000000,621.000000,961.000000,550.000000,3.935200,406900.000000 +-118.310000,34.260000,41.000000,1297.000000,327.000000,733.000000,315.000000,3.058300,160300.000000 +-122.000000,38.370000,18.000000,1048.000000,185.000000,469.000000,162.000000,3.625000,125000.000000 +-122.270000,41.230000,40.000000,1958.000000,386.000000,725.000000,331.000000,2.189800,65500.000000 +-120.890000,37.520000,42.000000,1200.000000,221.000000,647.000000,192.000000,2.540200,157500.000000 +-118.750000,34.290000,17.000000,5512.000000,765.000000,2734.000000,814.000000,6.607300,258100.000000 +-118.180000,34.020000,36.000000,1138.000000,296.000000,1484.000000,320.000000,2.281300,150700.000000 +-121.370000,38.410000,14.000000,3727.000000,685.000000,1741.000000,646.000000,3.562500,125700.000000 +-120.310000,37.290000,36.000000,969.000000,206.000000,732.000000,175.000000,1.593800,57600.000000 +-117.880000,33.730000,32.000000,1947.000000,355.000000,1786.000000,332.000000,4.572600,177500.000000 +-117.330000,33.980000,52.000000,1417.000000,353.000000,881.000000,300.000000,1.953100,162500.000000 +-118.490000,34.030000,30.000000,4061.000000,927.000000,1487.000000,865.000000,4.182700,435100.000000 +-121.930000,38.010000,9.000000,2294.000000,389.000000,1142.000000,365.000000,5.336300,160800.000000 +-122.450000,37.700000,46.000000,2193.000000,499.000000,1814.000000,489.000000,4.012500,230100.000000 +-117.080000,32.750000,20.000000,1886.000000,586.000000,1134.000000,525.000000,1.502900,100000.000000 +-116.190000,33.690000,11.000000,5692.000000,1346.000000,5682.000000,1273.000000,2.538300,74000.000000 +-119.730000,36.620000,35.000000,2080.000000,365.000000,1026.000000,333.000000,3.578100,92800.000000 +-117.120000,32.590000,28.000000,2793.000000,706.000000,1825.000000,676.000000,2.672400,144500.000000 +-117.630000,34.090000,8.000000,3557.000000,890.000000,2251.000000,765.000000,2.681800,114100.000000 +-118.260000,34.070000,40.000000,680.000000,273.000000,995.000000,249.000000,2.260700,165600.000000 +-118.260000,33.970000,46.000000,1521.000000,352.000000,1100.000000,334.000000,1.550000,100600.000000 +-119.840000,36.750000,34.000000,1186.000000,300.000000,774.000000,271.000000,1.575000,57100.000000 +-121.280000,38.670000,29.000000,1087.000000,174.000000,430.000000,174.000000,4.362500,158800.000000 +-117.350000,34.110000,34.000000,2104.000000,388.000000,1578.000000,365.000000,3.083300,88400.000000 +-121.320000,36.420000,20.000000,1054.000000,269.000000,1219.000000,273.000000,3.043700,76600.000000 +-118.350000,34.020000,34.000000,3978.000000,1073.000000,2725.000000,1035.000000,1.762200,167900.000000 +-119.810000,37.670000,24.000000,172.000000,42.000000,79.000000,30.000000,3.833300,93800.000000 +-118.150000,34.050000,33.000000,3287.000000,649.000000,1783.000000,653.000000,3.847200,293300.000000 +-121.220000,37.810000,17.000000,2879.000000,542.000000,1802.000000,530.000000,3.637800,126100.000000 +-119.720000,34.430000,30.000000,2491.000000,656.000000,1091.000000,576.000000,2.513900,279500.000000 +-117.850000,33.840000,17.000000,2830.000000,502.000000,1370.000000,459.000000,5.178500,247300.000000 +-117.200000,32.790000,31.000000,3417.000000,533.000000,1245.000000,532.000000,4.778800,276000.000000 +-118.630000,34.180000,33.000000,5252.000000,760.000000,2041.000000,730.000000,6.797700,389700.000000 +-117.490000,33.640000,3.000000,8874.000000,1302.000000,3191.000000,1027.000000,6.858800,302000.000000 +-118.370000,33.840000,35.000000,1792.000000,322.000000,978.000000,326.000000,4.958300,342800.000000 +-122.020000,38.260000,20.000000,3899.000000,763.000000,2198.000000,779.000000,3.206100,120400.000000 +-121.330000,38.660000,17.000000,2767.000000,584.000000,1275.000000,568.000000,2.590900,125400.000000 +-118.740000,36.230000,22.000000,1033.000000,232.000000,442.000000,136.000000,2.644700,137500.000000 +-117.890000,34.490000,12.000000,3449.000000,598.000000,1502.000000,540.000000,3.704300,150800.000000 +-117.410000,33.960000,24.000000,4481.000000,901.000000,2398.000000,823.000000,3.864000,123400.000000 +-118.750000,34.420000,28.000000,1000.000000,206.000000,545.000000,154.000000,2.416700,191700.000000 +-122.480000,37.740000,52.000000,2285.000000,435.000000,1211.000000,442.000000,4.020800,323100.000000 +-118.140000,34.040000,43.000000,1949.000000,464.000000,1216.000000,457.000000,3.321400,209300.000000 +-122.560000,37.900000,36.000000,1760.000000,283.000000,562.000000,246.000000,6.754600,402400.000000 +-122.090000,37.390000,43.000000,2065.000000,535.000000,1029.000000,500.000000,3.731800,327700.000000 +-121.800000,36.940000,29.000000,2377.000000,476.000000,1669.000000,499.000000,2.821400,190100.000000 +-117.830000,33.830000,13.000000,3759.000000,489.000000,1496.000000,499.000000,8.381800,377600.000000 +-121.680000,36.900000,13.000000,833.000000,130.000000,405.000000,127.000000,5.272900,322900.000000 +-122.300000,37.880000,52.000000,409.000000,97.000000,208.000000,98.000000,1.697100,138800.000000 +-121.040000,37.670000,16.000000,19.000000,19.000000,166.000000,9.000000,0.536000,162500.000000 +-118.320000,34.090000,28.000000,2173.000000,819.000000,2548.000000,763.000000,1.879000,218800.000000 +-118.120000,33.810000,36.000000,1774.000000,299.000000,784.000000,298.000000,5.044700,249200.000000 +-121.810000,39.700000,21.000000,5051.000000,1054.000000,2948.000000,980.000000,1.586300,81300.000000 +-121.840000,36.520000,18.000000,3165.000000,533.000000,1312.000000,434.000000,6.523400,357400.000000 +-121.790000,37.330000,18.000000,3611.000000,614.000000,2381.000000,642.000000,5.634500,231000.000000 +-118.160000,34.180000,48.000000,568.000000,145.000000,559.000000,135.000000,2.413500,135700.000000 +-119.400000,36.590000,37.000000,1486.000000,296.000000,977.000000,290.000000,3.507400,93800.000000 +-122.270000,37.800000,39.000000,1715.000000,623.000000,1327.000000,467.000000,1.847700,179200.000000 +-117.730000,33.570000,5.000000,11976.000000,2495.000000,4327.000000,2009.000000,4.848800,194400.000000 +-121.280000,37.920000,30.000000,1061.000000,230.000000,851.000000,195.000000,2.441200,61600.000000 +-119.810000,36.770000,43.000000,2341.000000,395.000000,890.000000,375.000000,3.426500,85000.000000 +-122.260000,37.850000,50.000000,1120.000000,283.000000,697.000000,264.000000,2.125000,140000.000000 +-117.950000,33.930000,37.000000,2633.000000,630.000000,1904.000000,630.000000,2.612300,161300.000000 +-120.120000,38.120000,37.000000,3355.000000,666.000000,338.000000,136.000000,2.062500,88900.000000 +-121.880000,37.350000,52.000000,1704.000000,418.000000,1336.000000,411.000000,2.816700,183500.000000 +-118.110000,33.870000,15.000000,3254.000000,598.000000,1772.000000,618.000000,5.041700,240800.000000 +-122.080000,37.690000,42.000000,1414.000000,274.000000,629.000000,244.000000,3.347800,184900.000000 +-121.680000,39.150000,14.000000,2774.000000,451.000000,1292.000000,428.000000,4.383300,115200.000000 +-122.160000,37.710000,36.000000,666.000000,132.000000,366.000000,134.000000,3.464300,175000.000000 +-118.070000,34.090000,35.000000,1224.000000,267.000000,887.000000,276.000000,4.098700,202400.000000 +-117.690000,33.650000,16.000000,5805.000000,852.000000,2356.000000,795.000000,6.106200,274600.000000 +-118.350000,34.030000,49.000000,2334.000000,530.000000,1334.000000,447.000000,1.890000,124000.000000 +-122.790000,39.020000,23.000000,642.000000,203.000000,265.000000,84.000000,1.883300,96900.000000 +-118.140000,33.890000,33.000000,2867.000000,786.000000,1774.000000,705.000000,2.929200,183400.000000 +-121.890000,37.420000,26.000000,40.000000,8.000000,52.000000,7.000000,7.719700,225000.000000 +-122.410000,37.760000,52.000000,492.000000,139.000000,316.000000,168.000000,3.086500,225000.000000 +-118.600000,34.160000,37.000000,3441.000000,584.000000,1283.000000,544.000000,4.165600,313100.000000 +-118.410000,34.020000,24.000000,2610.000000,756.000000,1322.000000,692.000000,3.502200,281300.000000 +-117.530000,33.970000,29.000000,1430.000000,273.000000,872.000000,283.000000,4.083300,141000.000000 +-117.130000,32.700000,35.000000,365.000000,98.000000,463.000000,112.000000,2.558800,78800.000000 +-117.140000,32.900000,16.000000,3217.000000,716.000000,2054.000000,687.000000,4.223400,162100.000000 +-118.160000,34.110000,31.000000,5715.000000,1154.000000,2639.000000,1079.000000,4.166100,364400.000000 +-117.180000,32.700000,42.000000,1691.000000,286.000000,761.000000,281.000000,5.138600,404500.000000 +-117.970000,33.720000,24.000000,2991.000000,500.000000,1437.000000,453.000000,5.428600,273400.000000 +-118.250000,34.090000,52.000000,104.000000,20.000000,32.000000,17.000000,3.750000,241700.000000 +-118.140000,34.110000,52.000000,3367.000000,545.000000,1427.000000,535.000000,5.229200,444500.000000 +-120.010000,34.540000,30.000000,2992.000000,609.000000,1288.000000,465.000000,3.937500,292900.000000 +-117.410000,34.100000,5.000000,4937.000000,1139.000000,2204.000000,812.000000,2.527200,92000.000000 +-118.220000,34.520000,7.000000,4524.000000,735.000000,2298.000000,717.000000,6.553800,311600.000000 +-117.910000,33.870000,29.000000,1121.000000,291.000000,762.000000,276.000000,2.500000,143800.000000 +-117.090000,32.760000,29.000000,1650.000000,496.000000,882.000000,445.000000,2.228700,140000.000000 +-122.270000,37.820000,52.000000,1630.000000,456.000000,1162.000000,400.000000,1.247500,104200.000000 +-118.200000,34.060000,46.000000,321.000000,101.000000,401.000000,86.000000,2.102900,109400.000000 +-118.360000,33.900000,40.000000,1271.000000,276.000000,725.000000,234.000000,5.045200,231900.000000 +-122.000000,37.860000,18.000000,8953.000000,1074.000000,3011.000000,993.000000,10.737200,500001.000000 +-121.360000,39.520000,15.000000,2490.000000,527.000000,1229.000000,497.000000,2.391700,85700.000000 +-122.000000,38.280000,3.000000,7030.000000,1191.000000,3238.000000,1055.000000,4.962000,161700.000000 +-117.700000,33.680000,29.000000,5650.000000,1084.000000,3985.000000,1056.000000,2.819200,162500.000000 +-118.280000,34.030000,26.000000,2107.000000,809.000000,2821.000000,572.000000,0.844000,350000.000000 +-118.250000,34.150000,13.000000,1107.000000,479.000000,616.000000,443.000000,0.818500,187500.000000 +-122.540000,37.930000,43.000000,2998.000000,470.000000,970.000000,430.000000,5.538500,431800.000000 +-118.250000,34.020000,50.000000,180.000000,89.000000,356.000000,76.000000,2.194400,158300.000000 +-122.060000,36.980000,15.000000,3385.000000,669.000000,1571.000000,615.000000,4.225400,320900.000000 +-122.450000,37.770000,52.000000,2339.000000,548.000000,1090.000000,507.000000,3.367900,350000.000000 +-118.040000,33.850000,23.000000,3132.000000,469.000000,1646.000000,478.000000,5.777000,315900.000000 +-118.120000,34.150000,19.000000,557.000000,216.000000,673.000000,212.000000,2.176300,168800.000000 +-118.310000,33.940000,43.000000,2104.000000,393.000000,1132.000000,394.000000,3.068200,142000.000000 +-118.440000,34.160000,33.000000,1616.000000,322.000000,580.000000,311.000000,4.039100,337500.000000 +-118.460000,34.170000,24.000000,2814.000000,675.000000,1463.000000,620.000000,4.187500,309300.000000 +-117.930000,34.060000,35.000000,1022.000000,183.000000,628.000000,187.000000,3.937500,187500.000000 +-121.810000,36.570000,13.000000,3030.000000,413.000000,1027.000000,363.000000,6.961500,500001.000000 +-118.420000,34.000000,33.000000,1139.000000,299.000000,734.000000,257.000000,3.270800,325000.000000 +-118.330000,34.010000,44.000000,1762.000000,463.000000,786.000000,445.000000,1.923100,188500.000000 +-118.240000,33.930000,19.000000,325.000000,74.000000,354.000000,87.000000,2.750000,90600.000000 +-116.940000,32.810000,22.000000,4266.000000,1010.000000,2766.000000,985.000000,2.817500,135200.000000 +-122.600000,38.240000,16.000000,2621.000000,416.000000,1247.000000,386.000000,4.860300,198400.000000 +-118.210000,33.970000,52.000000,4220.000000,908.000000,3731.000000,892.000000,3.190100,167600.000000 +-118.730000,34.270000,25.000000,3409.000000,493.000000,1699.000000,484.000000,5.653000,225800.000000 +-122.120000,37.370000,37.000000,1446.000000,181.000000,549.000000,190.000000,10.735500,500001.000000 +-122.420000,40.440000,16.000000,994.000000,185.000000,495.000000,181.000000,2.187500,76400.000000 +-122.130000,37.720000,26.000000,2862.000000,394.000000,1030.000000,397.000000,7.912000,367300.000000 +-121.170000,37.880000,22.000000,1283.000000,256.000000,3082.000000,239.000000,3.536500,111800.000000 +-122.430000,37.720000,48.000000,1289.000000,280.000000,782.000000,235.000000,3.671900,259800.000000 +-118.220000,33.910000,27.000000,500.000000,159.000000,732.000000,162.000000,2.742600,103100.000000 +-121.170000,37.970000,28.000000,1374.000000,248.000000,769.000000,229.000000,3.638900,130400.000000 +-122.270000,37.860000,52.000000,2307.000000,583.000000,1127.000000,548.000000,1.844700,198200.000000 +-119.190000,36.140000,41.000000,759.000000,140.000000,408.000000,129.000000,3.900000,85900.000000 +-122.410000,37.600000,31.000000,4424.000000,834.000000,1915.000000,817.000000,4.136400,412000.000000 +-116.830000,32.810000,18.000000,2367.000000,402.000000,1021.000000,395.000000,4.812500,210500.000000 +-119.340000,36.330000,17.000000,2250.000000,430.000000,1218.000000,468.000000,4.181200,93700.000000 +-123.220000,39.160000,29.000000,6121.000000,1222.000000,3595.000000,1189.000000,2.631000,109600.000000 +-121.920000,37.720000,22.000000,4638.000000,716.000000,2302.000000,687.000000,5.347000,219500.000000 +-116.570000,33.760000,25.000000,2616.000000,547.000000,581.000000,343.000000,3.136400,301600.000000 +-118.170000,34.180000,44.000000,1401.000000,246.000000,607.000000,271.000000,2.847200,218800.000000 +-117.200000,32.800000,36.000000,4018.000000,1067.000000,1620.000000,842.000000,2.359900,168400.000000 +-117.580000,34.090000,27.000000,754.000000,200.000000,746.000000,185.000000,1.953100,100800.000000 +-118.240000,33.960000,34.000000,1724.000000,432.000000,1876.000000,416.000000,2.107800,100600.000000 +-122.240000,40.180000,39.000000,2191.000000,493.000000,1307.000000,499.000000,1.648300,60800.000000 +-119.690000,36.820000,15.000000,3303.000000,512.000000,1687.000000,505.000000,4.810000,93600.000000 +-121.690000,36.620000,19.000000,1907.000000,323.000000,681.000000,270.000000,6.033200,244900.000000 +-119.280000,36.350000,7.000000,3598.000000,701.000000,2080.000000,678.000000,3.111100,72400.000000 +-117.990000,33.810000,46.000000,38.000000,8.000000,66.000000,14.000000,4.166700,162500.000000 +-117.650000,35.000000,36.000000,1184.000000,316.000000,672.000000,241.000000,1.910700,39800.000000 +-118.150000,34.020000,43.000000,2172.000000,605.000000,2386.000000,597.000000,2.823900,150600.000000 +-122.430000,37.730000,52.000000,1583.000000,347.000000,935.000000,341.000000,4.678600,263200.000000 +-117.040000,32.730000,36.000000,2084.000000,400.000000,1097.000000,398.000000,3.271700,130700.000000 +-118.080000,34.140000,45.000000,2923.000000,604.000000,1903.000000,560.000000,3.172900,218700.000000 +-121.070000,39.200000,45.000000,204.000000,62.000000,133.000000,51.000000,1.000000,90600.000000 +-117.120000,32.660000,52.000000,16.000000,4.000000,8.000000,3.000000,1.125000,60000.000000 +-118.130000,34.130000,39.000000,2099.000000,397.000000,1500.000000,380.000000,4.830400,493200.000000 +-122.220000,37.880000,20.000000,95.000000,13.000000,31.000000,15.000000,2.444400,475000.000000 +-122.520000,37.930000,34.000000,2782.000000,502.000000,1219.000000,507.000000,5.077900,333900.000000 +-122.090000,37.630000,36.000000,1570.000000,274.000000,992.000000,249.000000,5.364400,168800.000000 +-117.970000,33.820000,26.000000,4013.000000,985.000000,2442.000000,922.000000,3.765500,197700.000000 +-118.280000,34.050000,41.000000,1075.000000,597.000000,2260.000000,614.000000,1.300000,162500.000000 +-118.390000,33.790000,30.000000,4402.000000,563.000000,1582.000000,551.000000,10.898000,500001.000000 +-122.400000,37.580000,26.000000,3281.000000,531.000000,1145.000000,480.000000,6.358000,500001.000000 +-118.260000,34.060000,42.000000,2541.000000,1282.000000,3974.000000,1189.000000,1.585400,87500.000000 +-122.160000,37.480000,36.000000,2238.000000,479.000000,1949.000000,457.000000,2.376900,157300.000000 +-117.430000,34.110000,17.000000,4109.000000,884.000000,2544.000000,780.000000,2.775700,109800.000000 +-118.280000,33.930000,42.000000,1898.000000,460.000000,1503.000000,429.000000,2.517900,97400.000000 +-118.370000,33.950000,5.000000,6955.000000,2062.000000,3591.000000,1566.000000,3.111000,247600.000000 +-121.490000,38.560000,52.000000,1777.000000,368.000000,624.000000,350.000000,3.672900,137800.000000 +-121.800000,38.550000,11.000000,5121.000000,899.000000,2258.000000,901.000000,4.716800,223200.000000 +-122.190000,39.920000,20.000000,2563.000000,658.000000,1363.000000,611.000000,1.023000,54200.000000 +-118.010000,33.840000,29.000000,3740.000000,691.000000,1724.000000,638.000000,3.962800,215600.000000 +-118.310000,33.960000,48.000000,2015.000000,356.000000,1020.000000,338.000000,4.062500,138700.000000 +-121.060000,39.220000,52.000000,1749.000000,422.000000,837.000000,391.000000,2.325000,109700.000000 +-121.350000,38.610000,27.000000,3900.000000,776.000000,1549.000000,761.000000,2.778800,115700.000000 +-118.310000,33.990000,48.000000,2235.000000,433.000000,1363.000000,433.000000,1.655900,101400.000000 +-121.930000,37.270000,28.000000,3428.000000,753.000000,1753.000000,729.000000,4.103300,281000.000000 +-117.310000,33.170000,7.000000,2349.000000,312.000000,809.000000,282.000000,5.552000,283900.000000 +-120.890000,37.480000,27.000000,1118.000000,195.000000,647.000000,209.000000,2.913500,159400.000000 +-119.470000,35.140000,19.000000,4190.000000,690.000000,1973.000000,702.000000,3.992900,88300.000000 +-118.410000,34.180000,35.000000,1975.000000,384.000000,882.000000,406.000000,4.375000,291700.000000 +-119.810000,36.700000,52.000000,314.000000,57.000000,178.000000,66.000000,1.240400,52500.000000 +-117.080000,33.160000,11.000000,6341.000000,1030.000000,2697.000000,977.000000,4.855400,206700.000000 +-119.270000,35.870000,12.000000,972.000000,269.000000,1134.000000,286.000000,1.630000,49500.000000 +-122.310000,40.750000,18.000000,1411.000000,330.000000,494.000000,227.000000,1.491100,75800.000000 +-117.200000,33.290000,12.000000,6358.000000,1182.000000,2778.000000,1020.000000,4.035700,295900.000000 +-118.430000,34.260000,43.000000,729.000000,172.000000,935.000000,174.000000,2.951900,140900.000000 +-121.520000,39.510000,30.000000,3085.000000,610.000000,1688.000000,575.000000,2.334000,72200.000000 +-118.770000,34.270000,7.000000,3074.000000,794.000000,1816.000000,654.000000,2.713700,196400.000000 +-124.100000,40.950000,17.000000,1485.000000,345.000000,823.000000,316.000000,1.899300,78400.000000 +-117.150000,32.800000,27.000000,1937.000000,537.000000,1211.000000,482.000000,2.750000,87500.000000 +-118.370000,34.160000,11.000000,2901.000000,871.000000,1659.000000,789.000000,3.110600,209400.000000 +-122.500000,37.740000,44.000000,2792.000000,615.000000,1640.000000,579.000000,4.062500,272800.000000 +-120.920000,39.560000,48.000000,1276.000000,292.000000,358.000000,145.000000,1.875000,66600.000000 +-122.470000,38.510000,25.000000,928.000000,195.000000,413.000000,184.000000,3.490400,196900.000000 +-117.890000,33.610000,41.000000,1790.000000,361.000000,540.000000,284.000000,6.024700,500001.000000 +-121.350000,38.400000,11.000000,2322.000000,459.000000,1373.000000,424.000000,3.175000,94400.000000 +-117.920000,34.120000,32.000000,2552.000000,576.000000,2161.000000,548.000000,2.945900,144400.000000 +-118.310000,33.800000,30.000000,3096.000000,757.000000,2048.000000,704.000000,3.125000,233300.000000 +-120.350000,37.040000,37.000000,1495.000000,292.000000,858.000000,275.000000,2.930600,46300.000000 +-122.000000,37.310000,28.000000,3811.000000,585.000000,1795.000000,581.000000,7.838300,372700.000000 +-118.010000,33.950000,37.000000,1165.000000,210.000000,627.000000,221.000000,4.692300,181000.000000 +-118.070000,34.090000,40.000000,1745.000000,370.000000,1293.000000,357.000000,2.547400,198100.000000 +-117.500000,33.920000,28.000000,2101.000000,337.000000,1061.000000,348.000000,4.550000,146800.000000 +-123.740000,40.660000,25.000000,2395.000000,431.000000,983.000000,375.000000,3.046900,136000.000000 +-122.030000,37.910000,29.000000,5438.000000,871.000000,2310.000000,890.000000,5.036200,275300.000000 +-118.910000,34.220000,15.000000,5644.000000,757.000000,2659.000000,783.000000,6.755900,312000.000000 +-117.960000,34.140000,9.000000,907.000000,207.000000,619.000000,194.000000,3.946400,179600.000000 +-121.800000,38.010000,46.000000,2273.000000,495.000000,1088.000000,447.000000,2.253200,109400.000000 +-122.290000,37.530000,35.000000,2043.000000,511.000000,1089.000000,504.000000,3.027800,310600.000000 +-122.140000,37.670000,34.000000,3036.000000,533.000000,1366.000000,500.000000,4.238600,192300.000000 +-117.850000,33.790000,52.000000,2102.000000,403.000000,898.000000,365.000000,3.682700,236800.000000 +-122.100000,37.650000,31.000000,1797.000000,327.000000,796.000000,319.000000,4.442700,204500.000000 +-122.120000,37.910000,34.000000,5683.000000,755.000000,1962.000000,723.000000,8.367800,455300.000000 +-119.290000,36.320000,27.000000,1513.000000,374.000000,839.000000,350.000000,1.201200,64600.000000 +-117.400000,34.010000,25.000000,1858.000000,366.000000,1311.000000,331.000000,2.708300,87800.000000 +-117.060000,32.770000,32.000000,3888.000000,827.000000,3868.000000,841.000000,3.075500,166800.000000 +-118.300000,34.250000,44.000000,1442.000000,285.000000,859.000000,292.000000,4.583300,197300.000000 +-122.230000,40.150000,14.000000,2297.000000,573.000000,1637.000000,551.000000,1.787000,51600.000000 +-117.910000,33.820000,32.000000,2696.000000,640.000000,2330.000000,626.000000,2.947900,184600.000000 +-122.530000,37.970000,44.000000,3595.000000,953.000000,1831.000000,910.000000,2.603600,287500.000000 +-121.790000,37.000000,28.000000,2715.000000,451.000000,1154.000000,386.000000,4.802100,290400.000000 +-118.460000,33.990000,44.000000,1122.000000,287.000000,531.000000,256.000000,4.059800,335900.000000 +-118.030000,33.970000,32.000000,2468.000000,552.000000,1190.000000,479.000000,3.827500,238500.000000 +-122.320000,38.000000,32.000000,2275.000000,397.000000,1233.000000,418.000000,4.043700,162800.000000 +-118.280000,34.170000,22.000000,2664.000000,651.000000,1553.000000,629.000000,3.635400,256300.000000 +-119.140000,36.060000,32.000000,1838.000000,441.000000,1628.000000,425.000000,1.645200,41500.000000 +-117.130000,34.070000,34.000000,2405.000000,541.000000,1342.000000,514.000000,2.803100,86900.000000 +-120.670000,35.300000,32.000000,4202.000000,986.000000,2309.000000,956.000000,2.216500,231700.000000 +-118.060000,34.120000,34.000000,2941.000000,558.000000,1660.000000,576.000000,4.566700,271500.000000 +-122.390000,40.570000,38.000000,855.000000,172.000000,468.000000,150.000000,1.409100,84400.000000 +-118.390000,33.880000,33.000000,2543.000000,439.000000,1098.000000,416.000000,5.968300,495500.000000 +-118.160000,34.020000,47.000000,1055.000000,298.000000,1303.000000,302.000000,2.696400,138800.000000 +-122.580000,37.980000,52.000000,1180.000000,216.000000,467.000000,197.000000,4.961500,292200.000000 +-118.020000,33.920000,35.000000,2075.000000,424.000000,1312.000000,396.000000,3.796900,164800.000000 +-119.700000,34.400000,25.000000,1858.000000,493.000000,865.000000,460.000000,3.093800,312500.000000 +-122.680000,38.430000,29.000000,488.000000,63.000000,161.000000,62.000000,6.077400,334400.000000 +-121.350000,38.590000,29.000000,1285.000000,193.000000,460.000000,206.000000,5.324300,265700.000000 +-121.980000,37.270000,25.000000,3075.000000,564.000000,1633.000000,543.000000,5.252800,269400.000000 +-118.080000,34.580000,5.000000,1113.000000,186.000000,631.000000,168.000000,4.171900,146600.000000 +-118.250000,34.060000,20.000000,41.000000,17.000000,87.000000,25.000000,1.549100,225000.000000 +-122.250000,37.820000,26.000000,3959.000000,1196.000000,1749.000000,1217.000000,3.023300,255000.000000 +-119.050000,34.350000,39.000000,950.000000,300.000000,1366.000000,312.000000,2.244300,146600.000000 +-117.540000,33.760000,5.000000,5846.000000,1035.000000,3258.000000,1001.000000,4.796500,160800.000000 +-118.210000,33.880000,31.000000,1332.000000,417.000000,1405.000000,363.000000,2.012500,143000.000000 +-117.200000,32.790000,29.000000,1213.000000,228.000000,654.000000,246.000000,4.598700,255600.000000 +-120.960000,37.590000,11.000000,4236.000000,879.000000,2410.000000,850.000000,2.384900,122000.000000 +-118.240000,34.010000,48.000000,396.000000,99.000000,485.000000,110.000000,2.375000,107500.000000 +-118.270000,34.000000,43.000000,1638.000000,434.000000,1213.000000,390.000000,1.340300,110800.000000 +-122.250000,37.890000,41.000000,1125.000000,195.000000,356.000000,181.000000,6.159300,344000.000000 +-117.300000,34.090000,40.000000,1051.000000,244.000000,745.000000,243.000000,2.184200,75200.000000 +-120.910000,37.740000,19.000000,1690.000000,327.000000,855.000000,296.000000,3.250000,176700.000000 +-122.160000,38.900000,33.000000,1221.000000,236.000000,488.000000,199.000000,3.757400,92700.000000 +-118.310000,33.890000,35.000000,2144.000000,423.000000,1192.000000,417.000000,4.145800,231500.000000 +-118.180000,34.020000,43.000000,887.000000,219.000000,965.000000,217.000000,2.625000,133900.000000 +-117.970000,33.750000,32.000000,1564.000000,270.000000,973.000000,290.000000,3.750000,190400.000000 +-117.950000,35.080000,1.000000,83.000000,15.000000,32.000000,15.000000,4.875000,141700.000000 +-118.030000,33.910000,35.000000,2323.000000,406.000000,1741.000000,398.000000,4.243700,164100.000000 +-118.380000,33.970000,43.000000,2715.000000,458.000000,1151.000000,434.000000,7.489700,362600.000000 +-119.820000,36.720000,25.000000,2581.000000,528.000000,1642.000000,509.000000,1.643500,52600.000000 +-122.060000,37.680000,30.000000,5367.000000,1207.000000,2667.000000,1047.000000,3.179600,170300.000000 +-122.410000,40.550000,19.000000,3753.000000,761.000000,1952.000000,738.000000,3.095400,86500.000000 +-117.880000,33.720000,36.000000,1910.000000,352.000000,1593.000000,329.000000,3.890000,170000.000000 +-120.800000,38.310000,37.000000,1341.000000,256.000000,533.000000,242.000000,3.213500,123600.000000 +-118.100000,34.170000,48.000000,1111.000000,229.000000,421.000000,202.000000,3.281300,268100.000000 +-118.090000,34.120000,38.000000,1713.000000,285.000000,779.000000,286.000000,5.615200,359900.000000 +-118.310000,34.060000,47.000000,3038.000000,1533.000000,4225.000000,1472.000000,1.672500,187500.000000 +-118.020000,33.800000,16.000000,2956.000000,393.000000,1379.000000,429.000000,8.495200,359600.000000 +-121.940000,37.280000,18.000000,4356.000000,1334.000000,1968.000000,1245.000000,3.629400,240000.000000 +-117.950000,34.080000,37.000000,1137.000000,203.000000,672.000000,226.000000,3.296900,189000.000000 +-118.150000,33.940000,36.000000,1948.000000,341.000000,992.000000,363.000000,4.259400,242400.000000 +-121.810000,37.990000,22.000000,2331.000000,359.000000,1086.000000,340.000000,5.143500,150800.000000 +-121.810000,38.580000,17.000000,1964.000000,314.000000,808.000000,286.000000,5.962900,286000.000000 +-121.280000,38.770000,6.000000,3819.000000,550.000000,1738.000000,587.000000,5.871800,201400.000000 +-118.430000,34.010000,43.000000,1487.000000,242.000000,675.000000,247.000000,5.340300,489800.000000 +-121.380000,38.590000,36.000000,1239.000000,237.000000,764.000000,222.000000,3.015600,103000.000000 +-117.680000,35.650000,15.000000,2701.000000,576.000000,1245.000000,513.000000,3.326900,81900.000000 +-117.690000,33.580000,8.000000,2887.000000,351.000000,1176.000000,351.000000,10.395300,500001.000000 +-118.240000,34.000000,23.000000,588.000000,157.000000,716.000000,173.000000,1.205600,87500.000000 +-117.700000,33.600000,25.000000,1321.000000,295.000000,396.000000,278.000000,3.113100,77100.000000 +-118.380000,33.860000,12.000000,4235.000000,735.000000,1798.000000,683.000000,6.424200,365500.000000 +-117.050000,32.610000,31.000000,4033.000000,715.000000,2585.000000,715.000000,3.509600,139900.000000 +-121.380000,38.640000,19.000000,4563.000000,1069.000000,2256.000000,926.000000,2.147200,143400.000000 +-117.100000,32.740000,20.000000,3854.000000,1046.000000,3555.000000,966.000000,1.674700,100000.000000 +-122.470000,37.760000,48.000000,2064.000000,484.000000,1055.000000,467.000000,2.871100,329600.000000 +-117.840000,33.760000,16.000000,238.000000,51.000000,93.000000,50.000000,5.375000,215700.000000 +-122.260000,37.880000,52.000000,2604.000000,837.000000,1798.000000,769.000000,1.725000,287500.000000 +-118.400000,33.870000,45.000000,2181.000000,505.000000,965.000000,471.000000,5.381600,500001.000000 +-122.370000,38.330000,29.000000,1868.000000,291.000000,764.000000,284.000000,4.825000,195100.000000 +-117.980000,34.010000,27.000000,2643.000000,418.000000,1344.000000,381.000000,5.705700,262100.000000 +-122.700000,38.450000,26.000000,2011.000000,557.000000,855.000000,530.000000,1.125000,233300.000000 +-118.410000,33.970000,44.000000,2789.000000,503.000000,3732.000000,474.000000,4.617600,352300.000000 +-121.920000,37.300000,36.000000,2088.000000,358.000000,772.000000,347.000000,4.276200,310100.000000 +-122.110000,37.370000,49.000000,1068.000000,190.000000,410.000000,171.000000,7.204500,500001.000000 +-121.870000,37.390000,9.000000,2522.000000,547.000000,1591.000000,481.000000,4.909100,259700.000000 +-120.180000,39.140000,25.000000,2171.000000,386.000000,248.000000,116.000000,3.037500,171900.000000 +-117.060000,32.760000,36.000000,2785.000000,577.000000,1275.000000,527.000000,2.301500,156800.000000 +-117.240000,33.930000,12.000000,7105.000000,1447.000000,4520.000000,1333.000000,3.270500,113200.000000 +-118.250000,33.980000,47.000000,617.000000,162.000000,754.000000,144.000000,2.296900,116700.000000 +-117.800000,33.680000,14.000000,2635.000000,516.000000,1150.000000,499.000000,4.439100,306700.000000 +-119.780000,36.370000,41.000000,831.000000,149.000000,443.000000,146.000000,3.140600,100000.000000 +-117.040000,32.700000,7.000000,9311.000000,1703.000000,7302.000000,1694.000000,4.419000,156900.000000 +-118.290000,34.000000,6.000000,1487.000000,468.000000,1509.000000,403.000000,1.463900,112500.000000 +-118.360000,34.060000,52.000000,2130.000000,455.000000,921.000000,395.000000,2.960500,500001.000000 +-122.420000,37.620000,39.000000,1355.000000,214.000000,682.000000,246.000000,6.344300,324700.000000 +-118.420000,34.250000,37.000000,1545.000000,341.000000,1909.000000,352.000000,3.679100,148100.000000 +-121.100000,38.950000,17.000000,1475.000000,403.000000,943.000000,363.000000,2.128700,55300.000000 +-117.740000,34.050000,27.000000,852.000000,237.000000,1024.000000,221.000000,2.114100,110900.000000 +-122.390000,37.740000,52.000000,126.000000,24.000000,37.000000,27.000000,10.226400,225000.000000 +-118.370000,34.080000,52.000000,2946.000000,695.000000,1258.000000,650.000000,3.978300,374100.000000 +-122.080000,37.870000,24.000000,6130.000000,1359.000000,1750.000000,1286.000000,2.916700,102700.000000 +-118.440000,34.200000,28.000000,1732.000000,435.000000,1198.000000,417.000000,2.921900,241300.000000 +-121.370000,38.560000,19.000000,6308.000000,1167.000000,3012.000000,1112.000000,2.946400,113500.000000 +-122.100000,37.930000,20.000000,10212.000000,1424.000000,4083.000000,1374.000000,8.039000,382200.000000 +-117.220000,32.950000,4.000000,18123.000000,3173.000000,7301.000000,2964.000000,6.357000,322500.000000 +-122.130000,37.460000,31.000000,2247.000000,573.000000,1711.000000,511.000000,3.264200,185600.000000 +-122.300000,38.290000,20.000000,1789.000000,434.000000,1113.000000,398.000000,2.472800,139700.000000 +-123.410000,40.610000,17.000000,769.000000,205.000000,301.000000,126.000000,1.787500,55000.000000 +-120.770000,37.010000,28.000000,1689.000000,378.000000,1057.000000,267.000000,3.125000,156300.000000 +-118.800000,34.410000,45.000000,1610.000000,406.000000,1148.000000,347.000000,2.700000,120400.000000 +-119.270000,34.270000,52.000000,1577.000000,343.000000,836.000000,335.000000,3.589300,206600.000000 +-122.470000,37.740000,52.000000,3797.000000,668.000000,1633.000000,658.000000,5.678700,363600.000000 +-118.260000,34.130000,25.000000,3208.000000,1111.000000,2843.000000,1005.000000,2.667300,218100.000000 +-119.770000,36.760000,40.000000,2009.000000,519.000000,2219.000000,505.000000,1.210100,49100.000000 +-124.160000,41.920000,19.000000,1668.000000,324.000000,841.000000,283.000000,2.133600,75000.000000 +-119.030000,36.130000,24.000000,2259.000000,408.000000,1169.000000,395.000000,1.710600,95500.000000 +-122.180000,37.790000,41.000000,1411.000000,233.000000,626.000000,214.000000,7.087500,240700.000000 +-123.850000,39.390000,23.000000,4671.000000,912.000000,2095.000000,857.000000,3.184000,140500.000000 +-122.700000,38.330000,16.000000,1244.000000,242.000000,696.000000,236.000000,3.636900,158700.000000 +-118.100000,33.850000,36.000000,956.000000,159.000000,416.000000,157.000000,4.642900,223700.000000 +-117.990000,34.080000,35.000000,1032.000000,207.000000,954.000000,191.000000,2.890600,134800.000000 +-121.930000,37.730000,8.000000,831.000000,231.000000,404.000000,224.000000,3.375000,350000.000000 +-118.440000,34.230000,43.000000,2257.000000,429.000000,1418.000000,442.000000,4.527800,181800.000000 +-118.320000,34.260000,24.000000,5106.000000,1010.000000,2310.000000,957.000000,4.437500,191500.000000 +-118.150000,34.110000,39.000000,2618.000000,582.000000,1314.000000,532.000000,3.587500,309300.000000 +-117.740000,34.040000,27.000000,2215.000000,440.000000,1987.000000,449.000000,3.042900,129600.000000 +-121.350000,38.280000,17.000000,2756.000000,557.000000,1986.000000,530.000000,3.223400,82000.000000 +-122.750000,39.010000,17.000000,4162.000000,967.000000,889.000000,414.000000,3.418700,200500.000000 +-120.660000,35.460000,17.000000,3748.000000,609.000000,1860.000000,612.000000,4.517900,225600.000000 +-122.620000,38.920000,13.000000,520.000000,115.000000,249.000000,109.000000,1.841700,84700.000000 +-117.220000,34.260000,16.000000,8020.000000,1432.000000,1749.000000,540.000000,4.971600,162500.000000 +-117.920000,33.750000,8.000000,2325.000000,598.000000,1511.000000,565.000000,3.362900,137500.000000 +-122.280000,37.810000,36.000000,2914.000000,562.000000,1236.000000,509.000000,2.446400,102100.000000 +-118.120000,33.810000,37.000000,1798.000000,331.000000,860.000000,340.000000,4.214300,228500.000000 +-119.190000,36.060000,29.000000,1815.000000,376.000000,1421.000000,339.000000,1.909100,71300.000000 +-117.970000,34.070000,22.000000,1438.000000,364.000000,1325.000000,335.000000,2.780200,162500.000000 +-118.090000,34.030000,27.000000,3797.000000,597.000000,2043.000000,614.000000,5.500000,276800.000000 +-121.930000,37.280000,10.000000,3163.000000,832.000000,1537.000000,797.000000,4.167400,214000.000000 +-122.650000,38.960000,27.000000,2143.000000,580.000000,898.000000,367.000000,1.676900,63200.000000 +-122.490000,37.750000,48.000000,2387.000000,424.000000,1041.000000,408.000000,3.756200,321200.000000 +-122.310000,37.560000,45.000000,1792.000000,301.000000,829.000000,318.000000,4.901300,330100.000000 +-121.270000,38.140000,33.000000,3557.000000,894.000000,2659.000000,894.000000,2.288300,86900.000000 +-118.390000,34.230000,18.000000,3405.000000,831.000000,3001.000000,795.000000,3.008300,181900.000000 +-118.390000,34.070000,33.000000,5301.000000,1281.000000,2243.000000,1159.000000,4.238600,500001.000000 +-117.150000,32.920000,16.000000,2366.000000,392.000000,1482.000000,407.000000,4.902400,182900.000000 +-122.090000,37.380000,34.000000,1959.000000,342.000000,849.000000,357.000000,6.288400,414700.000000 +-117.060000,32.610000,23.000000,1630.000000,362.000000,1267.000000,418.000000,2.562500,131100.000000 +-122.330000,37.910000,36.000000,1954.000000,513.000000,1437.000000,440.000000,1.125000,93800.000000 +-116.920000,32.760000,7.000000,1659.000000,237.000000,862.000000,242.000000,5.274100,249400.000000 +-116.000000,34.120000,32.000000,3163.000000,712.000000,1358.000000,544.000000,2.125000,57700.000000 +-117.690000,33.600000,19.000000,3562.000000,439.000000,1584.000000,470.000000,6.421100,288100.000000 +-117.230000,33.910000,9.000000,11654.000000,2100.000000,7596.000000,2127.000000,4.047300,127200.000000 +-117.180000,34.040000,41.000000,1766.000000,288.000000,753.000000,278.000000,4.912500,140700.000000 +-121.330000,38.280000,14.000000,980.000000,171.000000,659.000000,183.000000,4.430600,170100.000000 +-121.880000,37.320000,38.000000,1787.000000,508.000000,2113.000000,530.000000,2.638600,177600.000000 +-122.520000,37.970000,33.000000,563.000000,194.000000,265.000000,169.000000,2.750000,231300.000000 +-117.770000,34.060000,27.000000,2178.000000,629.000000,2379.000000,591.000000,1.976600,108000.000000 +-121.010000,37.720000,23.000000,1373.000000,264.000000,677.000000,245.000000,2.548600,161100.000000 +-117.330000,33.870000,14.000000,2300.000000,335.000000,1001.000000,311.000000,5.104500,161300.000000 +-118.240000,33.970000,37.000000,1212.000000,314.000000,1403.000000,279.000000,2.553600,117200.000000 +-117.800000,33.890000,25.000000,3121.000000,381.000000,1278.000000,389.000000,7.021700,357900.000000 +-119.620000,36.560000,30.000000,1722.000000,372.000000,1467.000000,403.000000,1.887800,51600.000000 +-122.160000,37.690000,36.000000,1118.000000,219.000000,625.000000,228.000000,3.781300,192200.000000 +-117.970000,33.800000,35.000000,2985.000000,474.000000,1614.000000,453.000000,5.463100,225600.000000 +-120.870000,37.760000,16.000000,2022.000000,413.000000,1126.000000,408.000000,2.565500,116400.000000 +-120.460000,37.310000,26.000000,3170.000000,572.000000,1524.000000,565.000000,3.480000,95300.000000 +-118.230000,34.140000,39.000000,277.000000,89.000000,182.000000,91.000000,2.395800,175000.000000 +-121.070000,38.660000,22.000000,1831.000000,274.000000,813.000000,269.000000,4.639400,173400.000000 +-120.090000,36.950000,16.000000,3222.000000,511.000000,1425.000000,503.000000,4.154400,119400.000000 +-118.210000,33.960000,38.000000,2090.000000,519.000000,1871.000000,504.000000,2.468800,169000.000000 +-122.630000,38.230000,37.000000,1966.000000,348.000000,875.000000,381.000000,4.070300,223800.000000 +-119.400000,36.250000,25.000000,1696.000000,279.000000,909.000000,291.000000,2.300000,132800.000000 +-117.380000,33.210000,31.000000,1502.000000,367.000000,1514.000000,342.000000,2.644200,103300.000000 +-117.250000,32.800000,37.000000,1096.000000,260.000000,490.000000,267.000000,3.266300,270600.000000 +-122.230000,40.570000,18.000000,1633.000000,243.000000,750.000000,252.000000,5.158500,150800.000000 +-121.230000,38.790000,45.000000,907.000000,176.000000,463.000000,190.000000,2.229200,92000.000000 +-121.550000,40.480000,14.000000,2413.000000,524.000000,805.000000,329.000000,2.785700,77400.000000 +-117.890000,33.920000,34.000000,1473.000000,312.000000,1025.000000,315.000000,3.833300,170400.000000 +-117.230000,32.720000,43.000000,952.000000,209.000000,392.000000,210.000000,2.163500,244200.000000 +-117.920000,33.790000,35.000000,1785.000000,288.000000,1033.000000,297.000000,4.573900,190500.000000 +-117.580000,34.110000,14.000000,11635.000000,2055.000000,6443.000000,2009.000000,4.754700,157600.000000 +-120.850000,38.690000,18.000000,5928.000000,1097.000000,2697.000000,1096.000000,3.487200,141400.000000 +-121.530000,38.480000,5.000000,27870.000000,5027.000000,11935.000000,4855.000000,4.881100,212200.000000 +-117.210000,32.820000,31.000000,2035.000000,383.000000,866.000000,360.000000,3.852900,212000.000000 +-117.350000,34.130000,26.000000,3920.000000,570.000000,1862.000000,552.000000,3.728600,132000.000000 +-118.170000,33.790000,30.000000,1349.000000,519.000000,2646.000000,552.000000,1.931800,115900.000000 +-118.300000,34.260000,37.000000,2824.000000,633.000000,1619.000000,573.000000,3.556800,184500.000000 +-118.020000,33.830000,16.000000,1139.000000,328.000000,665.000000,290.000000,3.293300,260000.000000 +-116.990000,33.010000,11.000000,1412.000000,185.000000,529.000000,166.000000,7.751700,500001.000000 +-122.560000,38.010000,21.000000,2144.000000,400.000000,840.000000,398.000000,4.600000,239500.000000 +-118.150000,34.100000,39.000000,3856.000000,867.000000,1847.000000,830.000000,3.455900,364900.000000 +-117.930000,33.730000,27.000000,3662.000000,834.000000,3009.000000,743.000000,3.981600,179500.000000 +-121.090000,38.030000,21.000000,2064.000000,342.000000,1021.000000,359.000000,4.517000,152200.000000 +-116.660000,33.090000,24.000000,1378.000000,272.000000,532.000000,188.000000,1.590900,221900.000000 +-118.260000,33.830000,24.000000,3059.000000,729.000000,2064.000000,629.000000,3.551800,184600.000000 +-117.940000,33.930000,14.000000,999.000000,232.000000,1037.000000,244.000000,2.712500,166100.000000 +-116.930000,32.830000,19.000000,3038.000000,529.000000,1463.000000,509.000000,3.944000,172500.000000 +-122.290000,37.850000,52.000000,477.000000,119.000000,218.000000,106.000000,2.568200,120000.000000 +-122.480000,37.670000,14.000000,3395.000000,1059.000000,2258.000000,945.000000,2.964000,319700.000000 +-119.330000,36.310000,15.000000,1472.000000,228.000000,892.000000,257.000000,5.390900,113000.000000 +-118.410000,34.210000,35.000000,1789.000000,292.000000,897.000000,267.000000,5.592000,239900.000000 +-119.500000,34.350000,39.000000,308.000000,38.000000,59.000000,21.000000,11.779400,500001.000000 +-118.330000,34.110000,48.000000,1601.000000,464.000000,784.000000,461.000000,3.064200,342900.000000 +-118.300000,34.100000,29.000000,3403.000000,1367.000000,3432.000000,1174.000000,1.708300,166700.000000 +-119.750000,34.400000,31.000000,1997.000000,299.000000,826.000000,301.000000,6.892700,500001.000000 +-120.940000,39.320000,14.000000,3120.000000,595.000000,1569.000000,556.000000,3.538500,157400.000000 +-117.680000,35.610000,9.000000,4241.000000,832.000000,1929.000000,742.000000,3.598800,84500.000000 +-122.270000,38.120000,45.000000,4423.000000,1001.000000,2109.000000,874.000000,2.693700,111800.000000 +-118.210000,34.110000,32.000000,2759.000000,499.000000,1661.000000,533.000000,4.381200,228200.000000 +-117.230000,33.100000,4.000000,1862.000000,291.000000,685.000000,248.000000,7.745000,237400.000000 +-119.460000,35.140000,30.000000,2943.000000,697.000000,1565.000000,584.000000,2.531300,45800.000000 +-119.780000,36.760000,50.000000,1343.000000,322.000000,1063.000000,342.000000,1.750000,49800.000000 +-117.810000,33.660000,20.000000,2851.000000,490.000000,1192.000000,463.000000,5.875200,274200.000000 +-119.290000,34.310000,25.000000,1092.000000,190.000000,702.000000,215.000000,3.906300,192700.000000 +-122.410000,37.610000,46.000000,2975.000000,643.000000,1479.000000,577.000000,3.821400,273600.000000 +-120.320000,37.290000,38.000000,576.000000,130.000000,478.000000,112.000000,2.338200,59600.000000 +-118.370000,34.160000,40.000000,1973.000000,382.000000,774.000000,352.000000,4.412200,282300.000000 +-122.050000,37.050000,41.000000,2422.000000,502.000000,915.000000,366.000000,4.167900,201300.000000 +-118.460000,34.030000,52.000000,523.000000,124.000000,317.000000,130.000000,2.279400,337500.000000 +-117.120000,32.760000,43.000000,2336.000000,644.000000,1203.000000,614.000000,2.359400,127800.000000 +-122.040000,37.570000,12.000000,5719.000000,1064.000000,3436.000000,1057.000000,5.287900,231200.000000 +-121.970000,37.360000,34.000000,884.000000,153.000000,534.000000,154.000000,6.011600,271200.000000 +-121.280000,38.530000,18.000000,224.000000,38.000000,95.000000,41.000000,3.104200,165000.000000 +-119.090000,35.300000,3.000000,2821.000000,519.000000,1353.000000,495.000000,3.685200,109800.000000 +-121.750000,36.910000,42.000000,1368.000000,468.000000,2312.000000,484.000000,2.559900,151400.000000 +-121.860000,38.000000,4.000000,4075.000000,927.000000,2239.000000,849.000000,3.585700,165200.000000 +-118.530000,34.450000,26.000000,828.000000,149.000000,508.000000,158.000000,5.237400,185500.000000 +-117.940000,33.810000,24.000000,4602.000000,1131.000000,3003.000000,1014.000000,3.677100,172200.000000 +-119.840000,34.450000,26.000000,4424.000000,616.000000,1839.000000,601.000000,6.365400,331200.000000 +-118.240000,33.910000,37.000000,1607.000000,377.000000,1526.000000,375.000000,1.715800,94300.000000 +-117.060000,33.140000,27.000000,3819.000000,674.000000,2447.000000,717.000000,3.818500,137200.000000 +-120.980000,37.670000,33.000000,1433.000000,298.000000,824.000000,302.000000,2.762100,109100.000000 +-117.740000,34.090000,30.000000,3199.000000,591.000000,2192.000000,563.000000,3.487100,136400.000000 +-118.180000,34.010000,39.000000,322.000000,82.000000,319.000000,90.000000,2.636400,148800.000000 +-118.240000,33.890000,32.000000,1132.000000,266.000000,1211.000000,279.000000,2.183800,98300.000000 +-123.080000,40.400000,10.000000,365.000000,102.000000,140.000000,49.000000,1.796900,37500.000000 +-117.320000,34.070000,52.000000,1226.000000,269.000000,693.000000,272.000000,1.996300,76900.000000 +-118.240000,33.850000,25.000000,9594.000000,1489.000000,5237.000000,1496.000000,5.968400,193300.000000 +-122.230000,37.780000,52.000000,472.000000,146.000000,415.000000,126.000000,2.642900,71300.000000 +-121.180000,38.780000,13.000000,3480.000000,528.000000,1432.000000,532.000000,6.164200,277800.000000 +-118.100000,33.910000,29.000000,505.000000,113.000000,411.000000,113.000000,2.639700,164400.000000 +-121.970000,38.040000,38.000000,2505.000000,554.000000,1595.000000,498.000000,2.583300,83500.000000 +-118.470000,34.000000,41.000000,2331.000000,636.000000,1839.000000,537.000000,2.288000,263500.000000 +-119.310000,36.390000,32.000000,2293.000000,466.000000,1538.000000,468.000000,1.934200,68600.000000 +-122.170000,37.710000,38.000000,890.000000,200.000000,481.000000,198.000000,3.244000,179800.000000 +-122.490000,37.680000,35.000000,2405.000000,461.000000,1583.000000,471.000000,5.065900,238000.000000 +-121.300000,37.980000,39.000000,3375.000000,659.000000,1388.000000,631.000000,2.636400,93800.000000 +-121.370000,38.570000,22.000000,4899.000000,847.000000,1701.000000,826.000000,5.244900,387000.000000 +-122.080000,37.610000,6.000000,2605.000000,474.000000,1568.000000,433.000000,5.040600,261400.000000 +-117.110000,32.570000,32.000000,2723.000000,586.000000,1702.000000,562.000000,3.337100,140500.000000 +-122.090000,37.400000,22.000000,1489.000000,436.000000,662.000000,470.000000,3.517900,197200.000000 +-122.010000,36.980000,27.000000,2820.000000,730.000000,1511.000000,745.000000,2.589000,242400.000000 +-118.250000,34.000000,36.000000,1033.000000,267.000000,1112.000000,229.000000,1.723700,105800.000000 +-117.830000,33.660000,16.000000,1574.000000,385.000000,515.000000,363.000000,5.342300,291700.000000 +-121.960000,37.740000,2.000000,200.000000,20.000000,25.000000,9.000000,15.000100,350000.000000 +-119.810000,36.730000,51.000000,956.000000,196.000000,662.000000,180.000000,2.101000,56700.000000 +-118.620000,34.060000,25.000000,3546.000000,584.000000,1530.000000,601.000000,7.400100,500001.000000 +-122.350000,37.960000,35.000000,1326.000000,346.000000,1023.000000,295.000000,2.072400,97700.000000 +-119.060000,36.100000,21.000000,1344.000000,249.000000,868.000000,221.000000,2.589300,63600.000000 +-122.470000,37.750000,52.000000,1598.000000,285.000000,689.000000,265.000000,4.607100,337400.000000 +-122.540000,37.900000,41.000000,3170.000000,622.000000,1091.000000,528.000000,3.781300,389200.000000 +-119.730000,36.760000,30.000000,1548.000000,282.000000,886.000000,311.000000,3.100000,71300.000000 +-122.030000,36.960000,40.000000,584.000000,126.000000,316.000000,139.000000,3.593800,243500.000000 +-119.750000,36.780000,33.000000,1145.000000,197.000000,508.000000,198.000000,2.333300,81300.000000 +-117.300000,33.060000,24.000000,2171.000000,511.000000,870.000000,442.000000,3.194000,276300.000000 +-121.990000,36.960000,16.000000,875.000000,201.000000,300.000000,157.000000,2.625000,377300.000000 +-120.730000,39.630000,17.000000,1791.000000,356.000000,432.000000,190.000000,3.882600,92400.000000 +-118.480000,34.030000,19.000000,902.000000,284.000000,414.000000,272.000000,1.333300,310000.000000 +-118.220000,33.950000,36.000000,1679.000000,483.000000,2249.000000,487.000000,2.816700,160400.000000 +-118.240000,33.970000,43.000000,1357.000000,349.000000,1657.000000,331.000000,2.081900,111800.000000 +-117.820000,35.030000,30.000000,2555.000000,510.000000,1347.000000,467.000000,3.369300,71800.000000 +-117.020000,32.700000,18.000000,1643.000000,283.000000,1134.000000,269.000000,5.176900,133000.000000 +-122.350000,37.940000,47.000000,1275.000000,275.000000,844.000000,273.000000,2.896700,95600.000000 +-119.800000,36.780000,50.000000,1818.000000,374.000000,737.000000,338.000000,2.261400,73000.000000 +-122.190000,37.480000,38.000000,1300.000000,269.000000,608.000000,292.000000,4.556800,286900.000000 +-122.380000,37.590000,31.000000,3052.000000,844.000000,1581.000000,788.000000,3.074400,457700.000000 +-122.150000,37.750000,44.000000,1938.000000,399.000000,946.000000,331.000000,3.225000,135800.000000 +-119.350000,36.190000,6.000000,958.000000,226.000000,734.000000,230.000000,1.034900,67800.000000 +-120.450000,34.950000,7.000000,1479.000000,532.000000,1057.000000,459.000000,2.253800,162500.000000 +-122.280000,38.290000,19.000000,531.000000,112.000000,139.000000,80.000000,1.987500,325000.000000 +-122.260000,37.840000,49.000000,713.000000,202.000000,462.000000,189.000000,1.025000,118800.000000 +-122.300000,37.810000,52.000000,572.000000,109.000000,274.000000,82.000000,1.851600,85000.000000 +-118.220000,33.900000,22.000000,312.000000,107.000000,583.000000,119.000000,1.942300,98400.000000 +-117.670000,33.640000,11.000000,2722.000000,554.000000,1565.000000,508.000000,5.164500,164100.000000 +-122.020000,37.010000,20.000000,1005.000000,138.000000,345.000000,129.000000,10.096800,500001.000000 +-117.380000,33.190000,17.000000,353.000000,112.000000,359.000000,118.000000,1.562500,162500.000000 +-118.010000,34.080000,30.000000,2281.000000,522.000000,1969.000000,500.000000,3.653100,166300.000000 +-118.600000,34.130000,20.000000,14291.000000,1934.000000,5452.000000,1875.000000,9.123200,472000.000000 +-118.520000,34.200000,19.000000,4315.000000,1304.000000,2490.000000,1222.000000,2.643700,195000.000000 +-118.420000,34.270000,35.000000,2700.000000,702.000000,3444.000000,679.000000,1.486700,124000.000000 +-122.080000,37.710000,35.000000,2211.000000,350.000000,1004.000000,365.000000,5.463900,238600.000000 +-117.650000,33.570000,5.000000,1998.000000,500.000000,1185.000000,446.000000,4.354200,195600.000000 +-120.540000,37.680000,18.000000,335.000000,76.000000,189.000000,67.000000,1.227300,87500.000000 +-118.310000,34.050000,40.000000,1667.000000,365.000000,1161.000000,384.000000,3.140600,417600.000000 +-122.420000,37.600000,34.000000,3562.000000,565.000000,1542.000000,563.000000,5.878300,405100.000000 +-118.180000,33.980000,38.000000,1477.000000,374.000000,1514.000000,408.000000,2.570300,178600.000000 +-121.250000,36.320000,12.000000,4776.000000,1082.000000,4601.000000,1066.000000,2.918400,100500.000000 +-118.170000,34.690000,12.000000,4881.000000,803.000000,2188.000000,724.000000,4.166700,171900.000000 +-120.330000,39.300000,16.000000,868.000000,178.000000,44.000000,21.000000,3.000000,175000.000000 +-118.380000,34.060000,29.000000,3946.000000,1008.000000,1676.000000,876.000000,2.782400,450000.000000 +-119.780000,36.730000,52.000000,1377.000000,319.000000,1280.000000,259.000000,1.234400,43300.000000 +-118.330000,33.970000,44.000000,2526.000000,579.000000,1423.000000,573.000000,2.536300,158800.000000 +-118.370000,34.060000,36.000000,1661.000000,395.000000,690.000000,365.000000,3.343800,500001.000000 +-119.000000,35.390000,51.000000,1373.000000,284.000000,648.000000,300.000000,2.829500,72100.000000 +-117.950000,33.870000,35.000000,1854.000000,383.000000,1115.000000,381.000000,4.478400,185200.000000 +-118.380000,34.580000,18.000000,1859.000000,375.000000,913.000000,372.000000,4.345600,148900.000000 +-118.290000,34.080000,25.000000,2459.000000,823.000000,2635.000000,763.000000,2.400000,173900.000000 +-120.970000,37.680000,16.000000,2493.000000,535.000000,1370.000000,504.000000,3.336800,121200.000000 +-122.280000,37.870000,52.000000,589.000000,132.000000,288.000000,131.000000,3.515600,200000.000000 +-118.140000,33.880000,41.000000,1531.000000,343.000000,1119.000000,341.000000,4.364600,161400.000000 +-122.060000,37.380000,20.000000,4293.000000,1272.000000,2389.000000,1210.000000,4.271900,270800.000000 +-118.540000,34.270000,28.000000,2309.000000,300.000000,931.000000,302.000000,6.741500,348200.000000 +-117.880000,33.840000,25.000000,1781.000000,349.000000,918.000000,378.000000,3.928600,262700.000000 +-118.300000,34.190000,52.000000,1704.000000,277.000000,746.000000,262.000000,4.798600,326100.000000 +-117.840000,33.800000,35.000000,1490.000000,251.000000,629.000000,257.000000,4.366100,222100.000000 +-121.270000,38.650000,25.000000,2787.000000,601.000000,1247.000000,522.000000,2.901600,159800.000000 +-117.880000,33.870000,21.000000,1519.000000,388.000000,1203.000000,366.000000,3.208300,145300.000000 +-119.880000,34.420000,22.000000,2367.000000,492.000000,1333.000000,488.000000,3.630400,312200.000000 +-118.480000,34.010000,31.000000,1829.000000,458.000000,719.000000,392.000000,4.400000,353800.000000 +-116.950000,33.860000,1.000000,6.000000,2.000000,8.000000,2.000000,1.625000,55000.000000 +-117.670000,33.510000,17.000000,2112.000000,480.000000,1893.000000,433.000000,4.038800,120400.000000 +-118.350000,34.040000,38.000000,1626.000000,375.000000,1019.000000,372.000000,2.368700,146800.000000 +-124.160000,40.800000,52.000000,2167.000000,480.000000,908.000000,451.000000,1.611100,74700.000000 +-118.350000,34.050000,33.000000,2880.000000,836.000000,1416.000000,736.000000,2.678100,328800.000000 +-119.080000,34.350000,24.000000,3663.000000,828.000000,2718.000000,778.000000,3.275700,186000.000000 +-122.510000,37.780000,45.000000,2564.000000,499.000000,1056.000000,460.000000,4.732800,351100.000000 +-118.360000,34.140000,30.000000,1376.000000,317.000000,629.000000,320.000000,3.682300,295200.000000 +-121.960000,37.550000,4.000000,3746.000000,993.000000,1606.000000,838.000000,4.138700,162500.000000 +-117.190000,32.770000,30.000000,2747.000000,640.000000,3185.000000,657.000000,3.765000,238000.000000 +-118.090000,33.890000,42.000000,1150.000000,215.000000,708.000000,204.000000,3.687500,171500.000000 +-121.760000,36.900000,44.000000,919.000000,309.000000,1321.000000,301.000000,2.077500,121400.000000 +-118.140000,33.920000,35.000000,2378.000000,559.000000,1799.000000,546.000000,3.932700,190500.000000 +-119.060000,34.360000,52.000000,1239.000000,320.000000,934.000000,298.000000,1.861800,183300.000000 +-118.120000,34.160000,52.000000,2218.000000,437.000000,1211.000000,422.000000,5.023700,241900.000000 +-117.800000,34.150000,14.000000,7876.000000,1253.000000,3699.000000,1162.000000,5.542300,248700.000000 +-120.040000,39.240000,30.000000,2369.000000,469.000000,510.000000,213.000000,2.650000,123800.000000 +-121.470000,38.480000,25.000000,2969.000000,551.000000,1745.000000,487.000000,2.638200,76200.000000 +-122.270000,37.540000,15.000000,2126.000000,310.000000,905.000000,306.000000,8.908300,500001.000000 +-122.020000,37.540000,31.000000,1240.000000,264.000000,719.000000,236.000000,3.535000,210300.000000 +-121.380000,38.400000,15.000000,4155.000000,637.000000,1722.000000,616.000000,4.883100,154400.000000 +-122.040000,37.350000,20.000000,2016.000000,313.000000,767.000000,310.000000,6.837000,383000.000000 +-117.120000,32.760000,41.000000,1469.000000,421.000000,803.000000,395.000000,2.185600,120500.000000 +-117.340000,34.180000,7.000000,2914.000000,481.000000,1584.000000,499.000000,4.631200,124900.000000 +-121.020000,37.670000,32.000000,3951.000000,797.000000,1916.000000,740.000000,2.672200,111500.000000 +-119.060000,34.380000,33.000000,1465.000000,262.000000,731.000000,266.000000,3.946400,230300.000000 +-118.160000,33.910000,35.000000,1403.000000,338.000000,1415.000000,367.000000,3.096700,144000.000000 +-121.920000,37.340000,52.000000,2584.000000,491.000000,1087.000000,433.000000,4.400000,391300.000000 +-119.030000,34.210000,11.000000,4528.000000,729.000000,2398.000000,684.000000,5.304400,319000.000000 +-121.960000,37.340000,37.000000,663.000000,127.000000,293.000000,132.000000,3.781300,247800.000000 +-114.610000,33.620000,16.000000,1187.000000,261.000000,1115.000000,242.000000,2.175900,61500.000000 +-117.270000,33.150000,4.000000,23915.000000,4135.000000,10877.000000,3958.000000,4.635700,244900.000000 +-121.370000,38.620000,27.000000,1743.000000,380.000000,697.000000,368.000000,1.667800,166100.000000 +-118.180000,33.820000,43.000000,2210.000000,469.000000,1042.000000,418.000000,3.500000,216700.000000 +-118.020000,33.770000,33.000000,2683.000000,436.000000,1520.000000,456.000000,5.009100,211500.000000 +-120.050000,34.470000,21.000000,1241.000000,248.000000,746.000000,211.000000,3.805600,425000.000000 +-118.250000,34.010000,45.000000,782.000000,270.000000,1030.000000,235.000000,1.089800,93400.000000 +-119.540000,38.510000,14.000000,1250.000000,272.000000,721.000000,234.000000,2.350000,95700.000000 +-117.270000,34.500000,7.000000,2045.000000,342.000000,878.000000,292.000000,6.029600,194100.000000 +-121.960000,36.990000,23.000000,3209.000000,748.000000,1423.000000,666.000000,2.737500,238000.000000 +-118.190000,34.040000,45.000000,963.000000,234.000000,1194.000000,239.000000,2.180600,134900.000000 +-121.280000,37.950000,49.000000,1200.000000,364.000000,1448.000000,318.000000,1.109400,52500.000000 +-117.960000,33.790000,29.000000,1813.000000,501.000000,1170.000000,482.000000,2.067700,214500.000000 +-118.440000,34.170000,25.000000,4966.000000,1134.000000,1941.000000,958.000000,3.808100,286700.000000 +-122.310000,37.520000,35.000000,1817.000000,262.000000,659.000000,262.000000,6.833600,457200.000000 +-117.970000,33.920000,24.000000,2017.000000,416.000000,900.000000,436.000000,3.000000,251400.000000 +-117.710000,34.050000,20.000000,2281.000000,444.000000,1545.000000,481.000000,2.573500,130500.000000 +-118.420000,34.020000,26.000000,2664.000000,842.000000,1745.000000,789.000000,3.426900,301900.000000 +-120.250000,37.110000,20.000000,2062.000000,466.000000,1285.000000,456.000000,1.531900,50500.000000 +-121.350000,38.510000,29.000000,2337.000000,391.000000,1054.000000,352.000000,4.220600,157700.000000 +-120.250000,38.550000,15.000000,4403.000000,891.000000,1103.000000,433.000000,3.012500,111700.000000 +-118.020000,34.020000,21.000000,5992.000000,986.000000,2647.000000,969.000000,5.240500,302400.000000 +-120.660000,35.260000,15.000000,5540.000000,1319.000000,2383.000000,1165.000000,2.265600,226200.000000 +-120.660000,40.420000,35.000000,1450.000000,325.000000,717.000000,297.000000,2.507400,66400.000000 +-118.150000,35.060000,15.000000,1069.000000,296.000000,569.000000,263.000000,2.044100,73300.000000 +-122.510000,37.780000,47.000000,2496.000000,494.000000,1201.000000,454.000000,4.035300,342200.000000 +-120.460000,34.650000,22.000000,1298.000000,358.000000,1272.000000,363.000000,1.648800,117500.000000 +-117.930000,33.930000,25.000000,2431.000000,534.000000,1702.000000,523.000000,3.793300,184400.000000 +-118.210000,33.970000,49.000000,1409.000000,313.000000,1268.000000,317.000000,3.940800,170600.000000 +-120.180000,34.620000,25.000000,1337.000000,219.000000,671.000000,225.000000,3.191200,226400.000000 +-122.140000,37.430000,18.000000,2060.000000,563.000000,1144.000000,600.000000,4.068600,378600.000000 +-123.110000,40.600000,23.000000,708.000000,202.000000,316.000000,136.000000,1.160200,65000.000000 +-117.940000,33.840000,25.000000,4016.000000,831.000000,2166.000000,774.000000,3.188400,135400.000000 +-122.750000,38.480000,4.000000,6487.000000,1112.000000,2958.000000,1131.000000,4.541700,197400.000000 +-121.610000,37.150000,16.000000,5498.000000,729.000000,2051.000000,694.000000,7.860100,416300.000000 +-122.420000,40.600000,5.000000,2614.000000,433.000000,1275.000000,411.000000,3.446400,122900.000000 +-119.160000,34.950000,14.000000,4054.000000,787.000000,1581.000000,579.000000,3.088200,148200.000000 +-118.630000,34.240000,9.000000,4759.000000,924.000000,1884.000000,915.000000,4.833300,277200.000000 +-121.950000,36.980000,34.000000,3745.000000,958.000000,1622.000000,802.000000,3.154600,261200.000000 +-117.250000,32.790000,43.000000,906.000000,240.000000,458.000000,205.000000,1.836500,328600.000000 +-119.180000,34.220000,15.000000,4615.000000,1008.000000,2549.000000,973.000000,3.906300,198700.000000 +-117.260000,32.820000,34.000000,5846.000000,785.000000,1817.000000,747.000000,8.496000,500001.000000 +-117.070000,32.790000,25.000000,2489.000000,314.000000,911.000000,309.000000,7.833600,277600.000000 +-116.760000,34.230000,10.000000,4374.000000,989.000000,1020.000000,376.000000,2.607100,89000.000000 +-118.250000,34.130000,52.000000,322.000000,88.000000,229.000000,89.000000,2.125000,243800.000000 +-117.280000,34.260000,18.000000,3895.000000,689.000000,1086.000000,375.000000,3.367200,133600.000000 +-122.570000,38.110000,32.000000,3521.000000,748.000000,1706.000000,723.000000,3.470500,228600.000000 +-122.450000,37.790000,52.000000,1457.000000,215.000000,495.000000,208.000000,10.709700,500001.000000 +-117.770000,33.710000,15.000000,2102.000000,295.000000,1060.000000,303.000000,7.314100,337100.000000 +-119.440000,36.610000,17.000000,1531.000000,280.000000,775.000000,246.000000,3.907300,91600.000000 +-118.320000,33.930000,37.000000,2379.000000,462.000000,1327.000000,445.000000,4.250000,172100.000000 +-118.220000,33.790000,28.000000,3008.000000,629.000000,2537.000000,596.000000,2.300000,137500.000000 +-122.650000,38.480000,17.000000,1090.000000,164.000000,473.000000,163.000000,5.506100,231800.000000 +-121.230000,37.960000,44.000000,2204.000000,473.000000,1277.000000,435.000000,1.553900,59200.000000 +-117.860000,34.090000,26.000000,3408.000000,542.000000,1664.000000,543.000000,6.149800,239100.000000 +-122.060000,37.860000,16.000000,5187.000000,1014.000000,1512.000000,986.000000,4.455100,252400.000000 +-117.360000,34.100000,29.000000,2819.000000,637.000000,1683.000000,608.000000,2.320500,87600.000000 +-117.300000,34.100000,49.000000,60.000000,11.000000,76.000000,13.000000,2.562500,75000.000000 +-122.140000,38.030000,42.000000,118.000000,34.000000,54.000000,30.000000,2.579500,225000.000000 +-121.640000,36.800000,18.000000,5915.000000,1000.000000,2975.000000,975.000000,4.581200,255200.000000 +-122.240000,38.010000,11.000000,3751.000000,565.000000,1949.000000,555.000000,5.786200,269400.000000 +-116.860000,34.310000,19.000000,1649.000000,328.000000,382.000000,151.000000,4.055600,133000.000000 +-122.710000,37.880000,21.000000,2845.000000,552.000000,599.000000,250.000000,4.312500,495800.000000 +-117.090000,32.560000,8.000000,864.000000,156.000000,626.000000,172.000000,4.898400,151500.000000 +-122.250000,37.470000,35.000000,3183.000000,515.000000,1313.000000,487.000000,5.906200,383200.000000 +-118.120000,33.770000,20.000000,4534.000000,954.000000,1941.000000,892.000000,6.036200,463500.000000 +-120.960000,37.670000,17.000000,2434.000000,511.000000,1558.000000,546.000000,2.921900,114300.000000 +-119.300000,36.320000,23.000000,3521.000000,615.000000,1712.000000,636.000000,3.387500,92500.000000 +-117.390000,33.960000,52.000000,1992.000000,345.000000,948.000000,358.000000,3.291700,129300.000000 +-121.000000,37.600000,22.000000,4412.000000,925.000000,3116.000000,817.000000,2.689900,82100.000000 +-117.090000,32.640000,19.000000,2571.000000,791.000000,1205.000000,783.000000,1.620000,131300.000000 +-122.050000,37.930000,15.000000,7803.000000,1603.000000,2957.000000,1546.000000,4.450000,184900.000000 +-120.430000,34.870000,26.000000,1699.000000,272.000000,799.000000,266.000000,3.987100,157700.000000 +-122.090000,37.690000,43.000000,500.000000,110.000000,273.000000,120.000000,3.312500,150000.000000 +-118.460000,34.010000,39.000000,711.000000,148.000000,347.000000,153.000000,4.281300,297200.000000 +-121.980000,37.370000,35.000000,995.000000,202.000000,615.000000,199.000000,5.094200,217500.000000 +-121.970000,37.760000,8.000000,3743.000000,581.000000,1633.000000,567.000000,6.702700,381900.000000 +-117.810000,33.830000,8.000000,7326.000000,884.000000,2569.000000,798.000000,10.157000,477100.000000 +-118.160000,33.890000,38.000000,483.000000,113.000000,389.000000,108.000000,2.185900,143800.000000 +-115.570000,32.780000,25.000000,2007.000000,301.000000,1135.000000,332.000000,5.128000,99600.000000 +-117.620000,33.420000,27.000000,1005.000000,266.000000,460.000000,243.000000,3.102900,190600.000000 +-121.510000,38.560000,43.000000,1048.000000,312.000000,1320.000000,294.000000,1.064900,137500.000000 +-117.110000,32.750000,18.000000,1943.000000,587.000000,1329.000000,522.000000,1.769600,103100.000000 +-122.460000,37.720000,37.000000,1833.000000,388.000000,1093.000000,363.000000,3.070300,211800.000000 +-122.010000,37.580000,17.000000,4313.000000,717.000000,2629.000000,721.000000,5.757900,231800.000000 +-116.850000,34.260000,18.000000,6988.000000,1635.000000,2044.000000,726.000000,2.430800,90600.000000 +-122.180000,37.150000,17.000000,1457.000000,289.000000,591.000000,235.000000,5.578500,284100.000000 +-116.950000,32.820000,19.000000,5308.000000,1058.000000,2852.000000,1092.000000,2.916100,135700.000000 +-117.230000,32.740000,16.000000,1953.000000,404.000000,798.000000,385.000000,4.816700,169800.000000 +-117.840000,34.110000,17.000000,3499.000000,621.000000,1911.000000,621.000000,4.889400,191700.000000 +-122.490000,37.760000,48.000000,1351.000000,270.000000,650.000000,265.000000,3.527800,339800.000000 +-117.930000,33.710000,10.000000,2775.000000,717.000000,1581.000000,633.000000,4.136600,158800.000000 +-118.180000,33.740000,30.000000,5915.000000,1750.000000,2136.000000,1503.000000,4.096800,310000.000000 +-118.080000,33.920000,38.000000,1335.000000,282.000000,1011.000000,269.000000,3.690800,157500.000000 +-118.300000,34.010000,52.000000,1444.000000,343.000000,1154.000000,334.000000,2.062500,134400.000000 +-122.170000,39.310000,35.000000,2791.000000,552.000000,1395.000000,476.000000,2.562500,62700.000000 +-117.140000,32.750000,19.000000,1358.000000,613.000000,766.000000,630.000000,1.035300,150000.000000 +-117.940000,34.040000,36.000000,1431.000000,354.000000,1367.000000,334.000000,3.559200,160200.000000 +-121.740000,37.190000,11.000000,1290.000000,197.000000,881.000000,191.000000,4.203900,500001.000000 +-118.360000,33.810000,26.000000,1575.000000,300.000000,881.000000,309.000000,5.177800,359900.000000 +-122.440000,37.780000,37.000000,1235.000000,314.000000,481.000000,297.000000,3.687500,492300.000000 +-118.190000,33.810000,23.000000,954.000000,390.000000,804.000000,373.000000,2.583300,181300.000000 +-117.290000,33.190000,18.000000,6235.000000,1233.000000,4127.000000,1162.000000,3.070400,151600.000000 +-117.240000,32.850000,18.000000,3117.000000,475.000000,904.000000,368.000000,6.758700,388500.000000 +-117.240000,32.800000,29.000000,3376.000000,882.000000,1513.000000,843.000000,3.101000,238200.000000 +-120.980000,38.660000,9.000000,2073.000000,404.000000,916.000000,373.000000,3.225000,163300.000000 +-119.630000,36.760000,22.000000,4126.000000,614.000000,1795.000000,613.000000,4.925000,154700.000000 +-121.650000,37.120000,14.000000,4721.000000,999.000000,2648.000000,888.000000,3.689500,239300.000000 +-121.900000,37.440000,12.000000,4228.000000,734.000000,2594.000000,732.000000,6.608600,299400.000000 +-122.110000,37.700000,23.000000,1689.000000,461.000000,828.000000,443.000000,2.155200,161400.000000 +-118.290000,33.950000,35.000000,1401.000000,362.000000,1357.000000,327.000000,2.091700,99300.000000 +-117.760000,34.060000,30.000000,1700.000000,504.000000,1719.000000,459.000000,2.227000,91900.000000 +-118.320000,34.080000,52.000000,2370.000000,473.000000,1053.000000,434.000000,4.142900,380300.000000 +-117.080000,32.720000,32.000000,2286.000000,468.000000,1741.000000,467.000000,3.044600,101900.000000 +-117.130000,32.790000,35.000000,1362.000000,243.000000,698.000000,255.000000,3.645800,173800.000000 +-121.940000,36.980000,24.000000,3010.000000,562.000000,1360.000000,504.000000,4.200600,290700.000000 +-118.230000,33.960000,36.000000,1062.000000,270.000000,1136.000000,273.000000,1.659700,109100.000000 +-121.980000,37.360000,34.000000,1735.000000,318.000000,1019.000000,301.000000,4.562500,242700.000000 +-118.280000,34.120000,50.000000,2384.000000,312.000000,836.000000,337.000000,12.876300,500001.000000 +-122.130000,37.150000,39.000000,2854.000000,613.000000,1338.000000,518.000000,3.942300,180300.000000 +-118.200000,33.780000,48.000000,1766.000000,497.000000,1908.000000,466.000000,1.987200,168800.000000 +-117.730000,34.120000,26.000000,1279.000000,163.000000,412.000000,157.000000,6.173100,293800.000000 +-117.990000,33.690000,12.000000,2480.000000,858.000000,1441.000000,788.000000,1.670500,350000.000000 +-117.940000,34.060000,32.000000,3418.000000,662.000000,2003.000000,622.000000,4.033300,210200.000000 +-117.390000,34.110000,5.000000,2987.000000,457.000000,1821.000000,485.000000,4.888900,138900.000000 +-122.000000,38.350000,38.000000,1918.000000,364.000000,745.000000,348.000000,2.570700,126000.000000 +-120.980000,37.590000,2.000000,5042.000000,834.000000,2784.000000,787.000000,4.648400,145900.000000 +-118.260000,34.120000,45.000000,2839.000000,698.000000,1768.000000,653.000000,3.130600,214000.000000 +-122.160000,37.680000,16.000000,1687.000000,348.000000,568.000000,352.000000,2.386900,83300.000000 +-118.120000,33.830000,45.000000,1579.000000,278.000000,687.000000,285.000000,5.042400,225900.000000 +-117.880000,33.790000,32.000000,1484.000000,295.000000,928.000000,295.000000,5.141800,190300.000000 +-122.410000,37.710000,40.000000,2054.000000,433.000000,1738.000000,429.000000,4.992600,213900.000000 +-122.390000,37.730000,43.000000,4864.000000,972.000000,3134.000000,959.000000,4.339300,217300.000000 +-121.930000,36.630000,33.000000,1740.000000,342.000000,638.000000,329.000000,3.191200,319800.000000 +-120.310000,38.020000,11.000000,2366.000000,398.000000,1046.000000,387.000000,3.820300,139700.000000 +-122.470000,37.610000,34.000000,4551.000000,837.000000,2208.000000,834.000000,5.436400,279300.000000 +-117.680000,34.000000,5.000000,3761.000000,580.000000,2335.000000,648.000000,5.733800,225400.000000 +-122.280000,37.850000,41.000000,535.000000,123.000000,317.000000,119.000000,2.403800,107500.000000 +-117.180000,32.920000,4.000000,15025.000000,2616.000000,7560.000000,2392.000000,5.196000,210700.000000 +-117.700000,33.600000,26.000000,2283.000000,506.000000,634.000000,469.000000,2.377400,74300.000000 +-122.480000,37.750000,52.000000,2074.000000,401.000000,1136.000000,409.000000,4.770300,331000.000000 +-117.150000,32.740000,26.000000,3149.000000,832.000000,1320.000000,808.000000,3.025900,211700.000000 +-119.900000,36.790000,22.000000,1970.000000,332.000000,1066.000000,319.000000,3.312500,106100.000000 +-117.190000,32.780000,34.000000,4108.000000,664.000000,1659.000000,644.000000,4.409700,252000.000000 +-118.390000,34.030000,25.000000,3442.000000,1050.000000,1890.000000,914.000000,3.057400,319400.000000 +-117.780000,33.680000,15.000000,1834.000000,330.000000,841.000000,309.000000,6.063400,234300.000000 +-119.670000,36.650000,20.000000,2512.000000,449.000000,1464.000000,450.000000,3.921100,92300.000000 +-118.260000,34.020000,41.000000,848.000000,323.000000,1428.000000,313.000000,1.560300,109600.000000 +-122.240000,38.010000,16.000000,2084.000000,315.000000,1154.000000,307.000000,6.010200,235600.000000 +-122.250000,38.160000,17.000000,4459.000000,944.000000,1812.000000,888.000000,2.937500,106700.000000 +-117.320000,33.800000,11.000000,3196.000000,576.000000,1757.000000,552.000000,4.098200,173300.000000 +-118.210000,34.060000,52.000000,470.000000,115.000000,434.000000,123.000000,2.095000,109100.000000 +-119.770000,36.800000,24.000000,3748.000000,770.000000,1827.000000,719.000000,2.722200,83100.000000 +-121.860000,37.410000,16.000000,1603.000000,287.000000,1080.000000,296.000000,6.125600,266900.000000 +-117.970000,33.880000,9.000000,1344.000000,279.000000,530.000000,265.000000,5.073100,185100.000000 +-121.840000,39.720000,52.000000,1457.000000,389.000000,802.000000,342.000000,0.956600,69000.000000 +-118.510000,34.200000,37.000000,2066.000000,434.000000,1031.000000,414.000000,4.092400,188400.000000 +-117.930000,33.780000,28.000000,4380.000000,820.000000,2187.000000,835.000000,3.901800,182300.000000 +-117.750000,33.610000,16.000000,2270.000000,488.000000,709.000000,489.000000,3.284500,227600.000000 +-121.460000,38.700000,32.000000,965.000000,183.000000,568.000000,188.000000,3.861100,93900.000000 +-119.280000,36.320000,29.000000,2274.000000,514.000000,1234.000000,521.000000,1.913800,66900.000000 +-118.740000,34.280000,21.000000,4056.000000,637.000000,1974.000000,634.000000,5.902400,221000.000000 +-119.330000,36.190000,27.000000,418.000000,163.000000,332.000000,141.000000,1.071400,63800.000000 +-118.750000,34.270000,24.000000,3241.000000,461.000000,1567.000000,446.000000,5.598300,233300.000000 +-118.210000,33.930000,33.000000,2739.000000,801.000000,3423.000000,741.000000,2.284700,132700.000000 +-122.370000,37.960000,37.000000,1572.000000,402.000000,1046.000000,350.000000,0.740300,68600.000000 +-121.980000,37.280000,27.000000,3526.000000,589.000000,1725.000000,553.000000,5.781200,275000.000000 +-117.030000,32.610000,23.000000,1553.000000,216.000000,778.000000,229.000000,5.153800,171300.000000 +-117.280000,34.410000,14.000000,2105.000000,396.000000,960.000000,396.000000,2.993400,118200.000000 +-118.020000,34.130000,33.000000,2874.000000,458.000000,1239.000000,431.000000,5.232900,430900.000000 +-117.900000,34.060000,33.000000,1330.000000,209.000000,578.000000,192.000000,5.640600,266200.000000 +-118.470000,34.240000,19.000000,2405.000000,661.000000,1855.000000,621.000000,2.311100,255400.000000 +-122.490000,37.860000,35.000000,2729.000000,538.000000,969.000000,528.000000,6.766900,500001.000000 +-121.440000,38.680000,19.000000,2476.000000,534.000000,1355.000000,463.000000,2.062500,94400.000000 +-118.360000,34.200000,14.000000,1878.000000,614.000000,1874.000000,559.000000,2.526700,231800.000000 +-117.280000,33.060000,8.000000,4172.000000,1022.000000,2585.000000,941.000000,4.011800,245800.000000 +-122.430000,37.730000,52.000000,1142.000000,224.000000,494.000000,206.000000,5.060200,298900.000000 +-118.130000,34.130000,52.000000,2826.000000,381.000000,924.000000,365.000000,7.997600,500001.000000 +-118.050000,33.950000,33.000000,1954.000000,390.000000,1600.000000,376.000000,3.612500,170800.000000 +-121.990000,38.260000,18.000000,921.000000,126.000000,368.000000,120.000000,6.084200,261100.000000 +-122.470000,37.780000,52.000000,1941.000000,436.000000,955.000000,425.000000,4.133900,396400.000000 +-121.270000,38.660000,15.000000,2642.000000,520.000000,1032.000000,475.000000,4.138200,189800.000000 +-122.240000,37.810000,52.000000,2026.000000,482.000000,709.000000,456.000000,3.272700,268500.000000 +-121.440000,38.470000,5.000000,5666.000000,1178.000000,3139.000000,1131.000000,3.360800,108900.000000 +-118.120000,33.770000,10.000000,7264.000000,1137.000000,2528.000000,1057.000000,10.223300,500001.000000 +-117.980000,33.940000,32.000000,2562.000000,491.000000,1222.000000,446.000000,4.098500,226200.000000 +-118.070000,34.160000,35.000000,2459.000000,438.000000,970.000000,437.000000,4.214300,369400.000000 +-118.190000,34.140000,46.000000,2387.000000,488.000000,1181.000000,456.000000,3.605800,257900.000000 +-118.210000,34.120000,52.000000,1301.000000,389.000000,1189.000000,361.000000,2.513900,190000.000000 +-121.920000,36.630000,36.000000,877.000000,175.000000,349.000000,168.000000,3.416700,339100.000000 +-117.970000,33.840000,18.000000,1063.000000,209.000000,462.000000,223.000000,2.834800,219000.000000 +-118.410000,33.990000,39.000000,3014.000000,822.000000,3212.000000,777.000000,1.198500,215000.000000 +-119.440000,36.600000,34.000000,864.000000,184.000000,579.000000,171.000000,2.041700,72500.000000 +-122.700000,39.140000,13.000000,532.000000,111.000000,214.000000,62.000000,3.392900,108300.000000 +-122.300000,37.560000,37.000000,1962.000000,367.000000,1267.000000,382.000000,4.734400,271800.000000 +-121.990000,37.540000,26.000000,2332.000000,371.000000,1285.000000,404.000000,5.388000,225000.000000 +-118.380000,33.980000,25.000000,7105.000000,1012.000000,2519.000000,1004.000000,6.811200,500001.000000 +-117.980000,33.830000,17.000000,3506.000000,992.000000,2104.000000,893.000000,3.300600,185800.000000 +-117.960000,33.680000,25.000000,2004.000000,349.000000,1085.000000,343.000000,4.765600,230700.000000 +-117.640000,33.660000,6.000000,5221.000000,1217.000000,2597.000000,1119.000000,4.607600,204000.000000 +-121.290000,37.330000,36.000000,48.000000,12.000000,27.000000,8.000000,4.000000,75000.000000 +-122.440000,37.770000,52.000000,5604.000000,1268.000000,2023.000000,1196.000000,4.408500,400000.000000 +-118.330000,33.980000,28.000000,3889.000000,1199.000000,3121.000000,1046.000000,1.880600,113900.000000 +-121.290000,37.990000,30.000000,1271.000000,528.000000,2019.000000,524.000000,1.515200,81300.000000 +-121.800000,37.350000,17.000000,2529.000000,423.000000,1756.000000,429.000000,5.101700,240700.000000 +-119.290000,36.530000,33.000000,1509.000000,352.000000,1734.000000,336.000000,1.625000,50300.000000 +-118.110000,34.030000,36.000000,1493.000000,316.000000,989.000000,293.000000,3.527200,213700.000000 +-121.870000,37.420000,19.000000,12128.000000,2112.000000,6810.000000,2040.000000,6.441900,264500.000000 +-122.090000,37.700000,33.000000,4413.000000,1107.000000,2239.000000,1051.000000,2.986100,208200.000000 +-122.290000,37.870000,52.000000,2225.000000,460.000000,1145.000000,430.000000,2.616500,150000.000000 +-117.110000,32.660000,52.000000,25.000000,5.000000,14.000000,9.000000,1.625000,118800.000000 +-121.900000,37.390000,42.000000,42.000000,14.000000,26.000000,14.000000,1.736100,500001.000000 +-117.520000,33.880000,21.000000,722.000000,178.000000,770.000000,165.000000,2.565600,102500.000000 +-121.470000,38.700000,31.000000,1007.000000,181.000000,563.000000,185.000000,3.625000,91300.000000 +-122.280000,37.520000,27.000000,2958.000000,655.000000,1285.000000,577.000000,4.080100,397800.000000 +-118.410000,34.250000,33.000000,827.000000,192.000000,981.000000,184.000000,2.642900,143100.000000 +-122.250000,37.800000,52.000000,2087.000000,510.000000,1197.000000,488.000000,3.014900,218400.000000 +-119.050000,34.240000,24.000000,4341.000000,646.000000,1929.000000,703.000000,5.429800,279600.000000 +-118.260000,34.060000,33.000000,1950.000000,1047.000000,3707.000000,1012.000000,1.723800,110000.000000 +-117.090000,32.700000,15.000000,869.000000,217.000000,887.000000,216.000000,1.458300,84200.000000 +-117.390000,34.070000,15.000000,1966.000000,331.000000,1118.000000,323.000000,3.855800,122700.000000 +-122.220000,37.790000,37.000000,2343.000000,574.000000,1608.000000,523.000000,2.149400,132500.000000 +-118.430000,34.040000,52.000000,2425.000000,435.000000,962.000000,412.000000,5.858700,494700.000000 +-117.560000,33.880000,36.000000,838.000000,210.000000,722.000000,180.000000,2.486100,96200.000000 +-118.130000,34.160000,52.000000,1787.000000,427.000000,1107.000000,410.000000,2.566400,215000.000000 +-122.210000,37.470000,33.000000,1266.000000,415.000000,1991.000000,334.000000,2.920000,202800.000000 +-118.080000,33.780000,34.000000,2287.000000,347.000000,1051.000000,346.000000,5.576700,372000.000000 +-118.230000,34.210000,29.000000,2584.000000,608.000000,1217.000000,568.000000,3.328700,273400.000000 +-117.230000,32.730000,44.000000,1168.000000,263.000000,509.000000,256.000000,2.727300,269700.000000 +-118.190000,33.770000,21.000000,2103.000000,727.000000,1064.000000,603.000000,1.617800,137500.000000 +-117.170000,32.810000,26.000000,788.000000,127.000000,346.000000,125.000000,5.060300,185700.000000 +-122.000000,36.970000,39.000000,2702.000000,646.000000,1136.000000,491.000000,2.894100,256700.000000 +-120.610000,35.120000,12.000000,3430.000000,793.000000,1840.000000,720.000000,2.982100,162000.000000 +-118.170000,33.830000,46.000000,1362.000000,214.000000,531.000000,222.000000,4.312500,290500.000000 +-117.860000,33.890000,24.000000,2002.000000,253.000000,820.000000,241.000000,6.961200,274500.000000 +-118.510000,34.220000,36.000000,1493.000000,285.000000,766.000000,272.000000,4.864600,213200.000000 +-118.260000,33.900000,38.000000,1566.000000,318.000000,981.000000,318.000000,4.023400,111900.000000 +-118.020000,34.040000,27.000000,5640.000000,1001.000000,3538.000000,978.000000,5.065000,215400.000000 +-118.370000,34.100000,37.000000,407.000000,67.000000,100.000000,47.000000,15.000100,500001.000000 +-117.990000,33.790000,35.000000,2301.000000,467.000000,2272.000000,454.000000,3.956600,167800.000000 +-122.420000,37.710000,44.000000,2080.000000,489.000000,1781.000000,478.000000,3.682700,215300.000000 +-117.250000,33.930000,8.000000,10110.000000,1761.000000,5804.000000,1703.000000,4.265400,137600.000000 +-122.040000,37.850000,27.000000,6039.000000,780.000000,2181.000000,761.000000,9.586200,469400.000000 +-117.230000,32.870000,11.000000,3123.000000,740.000000,1223.000000,634.000000,5.417000,196800.000000 +-117.160000,32.810000,35.000000,1213.000000,200.000000,532.000000,181.000000,3.680600,172400.000000 +-118.090000,33.900000,37.000000,1147.000000,258.000000,742.000000,242.000000,4.046100,153500.000000 +-118.080000,34.070000,32.000000,4089.000000,975.000000,3775.000000,955.000000,3.290000,205500.000000 +-117.090000,32.790000,31.000000,2019.000000,417.000000,872.000000,386.000000,3.196400,177700.000000 +-121.660000,37.130000,20.000000,4477.000000,924.000000,2656.000000,871.000000,3.878800,226900.000000 +-118.240000,33.960000,34.000000,946.000000,254.000000,1101.000000,239.000000,1.739600,105900.000000 +-122.020000,37.530000,21.000000,4280.000000,673.000000,2216.000000,681.000000,5.707200,242200.000000 +-117.820000,33.900000,25.000000,1137.000000,170.000000,524.000000,164.000000,7.574400,259300.000000 +-118.210000,33.940000,34.000000,710.000000,205.000000,1134.000000,233.000000,2.773400,141100.000000 +-117.880000,34.000000,32.000000,265.000000,51.000000,170.000000,50.000000,3.937500,187500.000000 +-118.110000,33.860000,36.000000,2750.000000,487.000000,1386.000000,458.000000,4.990400,221700.000000 +-118.860000,34.070000,16.000000,1409.000000,244.000000,970.000000,172.000000,8.014400,500001.000000 +-122.490000,38.320000,30.000000,1631.000000,284.000000,788.000000,284.000000,3.309800,195500.000000 +-121.660000,39.660000,17.000000,3502.000000,655.000000,1763.000000,613.000000,2.962500,101200.000000 +-122.330000,37.930000,34.000000,2326.000000,471.000000,1356.000000,441.000000,2.347500,90300.000000 +-117.280000,33.200000,20.000000,4835.000000,854.000000,2983.000000,834.000000,4.342800,152100.000000 +-122.160000,37.720000,38.000000,1007.000000,245.000000,618.000000,239.000000,2.875000,144800.000000 +-117.850000,34.120000,30.000000,4367.000000,1033.000000,2524.000000,954.000000,3.044800,192100.000000 +-119.260000,35.500000,38.000000,2536.000000,409.000000,1133.000000,430.000000,4.237500,78600.000000 +-123.350000,40.990000,23.000000,141.000000,59.000000,47.000000,23.000000,1.125000,66000.000000 +-118.140000,34.160000,39.000000,2776.000000,840.000000,2546.000000,773.000000,2.575000,153500.000000 +-118.390000,34.230000,43.000000,1193.000000,299.000000,1184.000000,320.000000,2.151800,161600.000000 +-117.030000,32.790000,17.000000,7352.000000,1699.000000,3331.000000,1634.000000,2.700600,166300.000000 +-117.840000,33.800000,34.000000,2004.000000,331.000000,843.000000,328.000000,3.590000,230600.000000 +-116.690000,33.500000,13.000000,1187.000000,255.000000,442.000000,179.000000,1.910700,155700.000000 +-121.090000,37.610000,42.000000,1787.000000,296.000000,921.000000,287.000000,3.886400,171400.000000 +-117.140000,32.760000,35.000000,2539.000000,661.000000,1308.000000,629.000000,2.677700,146400.000000 +-122.690000,38.460000,32.000000,2970.000000,504.000000,1117.000000,512.000000,5.000000,275900.000000 +-121.130000,38.550000,8.000000,530.000000,109.000000,398.000000,96.000000,4.203100,212500.000000 +-121.870000,37.270000,25.000000,1730.000000,226.000000,721.000000,243.000000,7.584500,279300.000000 +-117.910000,33.660000,26.000000,5761.000000,1326.000000,2681.000000,1116.000000,4.034100,243300.000000 +-121.940000,37.340000,42.000000,2174.000000,420.000000,1304.000000,464.000000,3.142900,286500.000000 +-121.830000,37.950000,17.000000,1133.000000,244.000000,716.000000,235.000000,2.875000,162500.000000 +-124.170000,41.800000,16.000000,2739.000000,480.000000,1259.000000,436.000000,3.755700,109400.000000 +-118.330000,34.060000,52.000000,1368.000000,231.000000,737.000000,248.000000,8.361700,433800.000000 +-118.240000,33.800000,28.000000,636.000000,169.000000,788.000000,143.000000,3.616100,131300.000000 +-122.590000,38.120000,25.000000,7784.000000,1145.000000,3445.000000,1166.000000,6.013200,287900.000000 +-122.480000,37.710000,29.000000,1048.000000,150.000000,455.000000,152.000000,6.127800,417600.000000 +-120.730000,37.380000,37.000000,653.000000,176.000000,827.000000,176.000000,1.923600,64400.000000 +-117.040000,32.620000,26.000000,3620.000000,607.000000,2000.000000,593.000000,4.996200,156000.000000 +-118.440000,34.270000,36.000000,1111.000000,275.000000,1333.000000,266.000000,3.534700,158100.000000 +-121.000000,37.610000,36.000000,2647.000000,604.000000,2045.000000,550.000000,2.273000,62900.000000 +-117.840000,33.890000,24.000000,3935.000000,625.000000,1912.000000,593.000000,5.795100,226900.000000 +-122.250000,37.770000,52.000000,1527.000000,320.000000,825.000000,264.000000,3.453100,208800.000000 +-118.360000,34.100000,37.000000,7097.000000,2010.000000,2913.000000,1939.000000,2.875000,300000.000000 +-116.920000,32.790000,24.000000,4055.000000,742.000000,2123.000000,744.000000,4.522400,142000.000000 +-121.940000,38.350000,8.000000,3157.000000,559.000000,1758.000000,569.000000,4.412000,140100.000000 +-120.870000,35.410000,16.000000,2168.000000,444.000000,782.000000,374.000000,3.018700,278100.000000 +-118.100000,33.830000,36.000000,2000.000000,343.000000,956.000000,352.000000,5.373500,234400.000000 +-117.990000,34.070000,31.000000,1507.000000,369.000000,1548.000000,347.000000,3.432700,147200.000000 +-121.490000,37.940000,31.000000,1860.000000,394.000000,1848.000000,293.000000,2.289100,162500.000000 +-119.630000,36.320000,36.000000,1518.000000,287.000000,749.000000,255.000000,2.233300,61000.000000 +-121.890000,39.760000,15.000000,10265.000000,1860.000000,4591.000000,1906.000000,3.070000,142600.000000 +-117.110000,32.760000,31.000000,2293.000000,549.000000,1108.000000,557.000000,3.385400,204400.000000 +-118.140000,34.070000,42.000000,1036.000000,199.000000,656.000000,215.000000,4.190200,235000.000000 +-118.260000,33.950000,38.000000,1387.000000,346.000000,1240.000000,355.000000,1.689800,95100.000000 +-122.350000,40.560000,16.000000,2801.000000,614.000000,1695.000000,563.000000,1.900000,81600.000000 +-118.260000,34.060000,40.000000,637.000000,273.000000,1150.000000,263.000000,1.862500,131300.000000 +-117.820000,33.710000,9.000000,5206.000000,992.000000,4660.000000,978.000000,2.885000,162500.000000 +-119.980000,38.960000,25.000000,2443.000000,444.000000,868.000000,342.000000,3.541700,114800.000000 +-118.430000,34.090000,27.000000,1613.000000,200.000000,497.000000,197.000000,7.983500,500001.000000 +-117.140000,32.750000,20.000000,1182.000000,379.000000,678.000000,326.000000,2.193700,162500.000000 +-118.470000,34.300000,16.000000,2495.000000,551.000000,2314.000000,567.000000,3.673600,192200.000000 +-121.780000,38.680000,39.000000,2806.000000,662.000000,1659.000000,638.000000,1.978700,97800.000000 +-122.280000,37.800000,52.000000,96.000000,31.000000,191.000000,34.000000,0.750000,162500.000000 +-117.210000,32.800000,19.000000,786.000000,282.000000,525.000000,229.000000,1.727300,137500.000000 +-121.460000,38.540000,48.000000,1001.000000,205.000000,605.000000,175.000000,1.833300,58200.000000 +-121.130000,36.210000,30.000000,1484.000000,414.000000,1200.000000,351.000000,1.754800,95800.000000 +-122.530000,37.970000,52.000000,205.000000,119.000000,228.000000,132.000000,1.906300,200000.000000 +-122.350000,37.920000,36.000000,921.000000,200.000000,585.000000,236.000000,1.922400,94000.000000 +-122.120000,37.280000,21.000000,349.000000,64.000000,149.000000,56.000000,5.869100,360000.000000 +-121.320000,38.260000,4.000000,6125.000000,1063.000000,3077.000000,953.000000,4.117900,134600.000000 +-121.910000,36.620000,40.000000,1292.000000,271.000000,504.000000,230.000000,2.475000,258300.000000 +-117.810000,33.710000,16.000000,2666.000000,387.000000,1227.000000,347.000000,7.376900,302400.000000 +-119.710000,36.810000,19.000000,2282.000000,550.000000,1034.000000,500.000000,1.661800,69700.000000 +-119.190000,34.170000,27.000000,2183.000000,364.000000,1458.000000,388.000000,4.456700,191100.000000 +-117.820000,33.790000,26.000000,2641.000000,633.000000,3657.000000,617.000000,4.133900,222300.000000 +-118.270000,34.160000,48.000000,1301.000000,253.000000,637.000000,260.000000,4.343800,252700.000000 +-118.330000,34.100000,45.000000,1913.000000,696.000000,1552.000000,611.000000,2.088800,237500.000000 +-122.290000,37.910000,46.000000,2085.000000,346.000000,748.000000,354.000000,4.053600,262000.000000 +-118.020000,33.820000,21.000000,2052.000000,456.000000,1173.000000,432.000000,3.788500,204500.000000 +-118.220000,33.960000,35.000000,1437.000000,474.000000,2113.000000,484.000000,2.617900,158800.000000 +-116.890000,32.820000,18.000000,2515.000000,443.000000,1442.000000,449.000000,5.020100,154400.000000 +-117.950000,33.860000,35.000000,2478.000000,431.000000,1333.000000,427.000000,5.209900,191400.000000 +-122.270000,37.480000,26.000000,3542.000000,507.000000,1392.000000,524.000000,8.518400,500001.000000 +-120.510000,39.520000,26.000000,2286.000000,444.000000,498.000000,216.000000,2.065000,96100.000000 +-118.420000,34.090000,40.000000,3552.000000,392.000000,1024.000000,370.000000,15.000100,500001.000000 +-119.500000,35.270000,23.000000,3827.000000,696.000000,1993.000000,617.000000,3.074200,57900.000000 +-122.910000,39.070000,21.000000,2202.000000,484.000000,1000.000000,381.000000,2.442300,102300.000000 +-122.460000,37.770000,52.000000,1824.000000,388.000000,799.000000,363.000000,3.750000,435700.000000 +-121.540000,36.990000,27.000000,2361.000000,449.000000,1782.000000,397.000000,3.261400,305000.000000 +-118.450000,34.190000,37.000000,1073.000000,254.000000,739.000000,253.000000,2.466700,192200.000000 +-117.950000,34.050000,35.000000,1309.000000,276.000000,1113.000000,253.000000,4.375000,156500.000000 +-120.560000,35.480000,12.000000,4161.000000,731.000000,1609.000000,615.000000,5.094700,267500.000000 +-122.460000,37.650000,21.000000,2751.000000,502.000000,2027.000000,491.000000,5.257300,322900.000000 +-117.850000,33.760000,33.000000,1866.000000,327.000000,1053.000000,371.000000,4.546100,213800.000000 +-118.210000,33.920000,37.000000,1705.000000,403.000000,1839.000000,410.000000,2.583300,132700.000000 +-118.170000,33.980000,31.000000,1236.000000,329.000000,1486.000000,337.000000,3.093800,155400.000000 +-121.790000,37.340000,20.000000,2018.000000,328.000000,1196.000000,323.000000,4.931800,262400.000000 +-117.980000,33.830000,32.000000,1133.000000,166.000000,523.000000,187.000000,6.213000,230800.000000 +-118.430000,34.300000,37.000000,1394.000000,313.000000,1111.000000,327.000000,3.602300,161800.000000 +-121.690000,39.360000,34.000000,842.000000,186.000000,635.000000,165.000000,1.835500,63000.000000 +-117.270000,33.770000,16.000000,2876.000000,576.000000,1859.000000,545.000000,2.087800,101300.000000 +-122.410000,37.590000,40.000000,2401.000000,383.000000,894.000000,356.000000,5.649300,422400.000000 +-117.480000,34.100000,30.000000,2287.000000,531.000000,1796.000000,503.000000,2.583300,90600.000000 +-117.060000,32.700000,12.000000,3943.000000,737.000000,3280.000000,751.000000,4.112000,141400.000000 +-121.920000,36.630000,40.000000,1076.000000,193.000000,406.000000,180.000000,3.494300,311100.000000 +-120.440000,37.310000,16.000000,3369.000000,532.000000,1770.000000,574.000000,5.266200,126200.000000 +-117.180000,32.700000,44.000000,2655.000000,514.000000,1102.000000,489.000000,3.675900,368800.000000 +-121.570000,39.120000,30.000000,2601.000000,534.000000,1702.000000,506.000000,2.080000,56600.000000 +-122.210000,37.790000,52.000000,762.000000,190.000000,600.000000,195.000000,3.089300,125000.000000 +-118.910000,35.300000,28.000000,1793.000000,358.000000,1233.000000,351.000000,2.784500,82200.000000 +-121.950000,37.320000,20.000000,1145.000000,198.000000,431.000000,173.000000,3.110300,281900.000000 +-121.350000,38.680000,20.000000,7085.000000,1222.000000,3455.000000,1229.000000,4.311800,120000.000000 +-121.280000,38.760000,47.000000,2901.000000,631.000000,1276.000000,578.000000,2.136600,101900.000000 +-118.350000,33.890000,30.000000,1143.000000,299.000000,776.000000,273.000000,4.282900,240000.000000 +-121.980000,37.970000,26.000000,2714.000000,390.000000,1232.000000,409.000000,5.961700,231100.000000 +-120.020000,38.920000,24.000000,1194.000000,246.000000,414.000000,151.000000,3.239600,101900.000000 +-122.280000,37.770000,52.000000,1468.000000,363.000000,870.000000,347.000000,2.968800,220800.000000 +-118.060000,34.580000,36.000000,1493.000000,258.000000,899.000000,260.000000,3.860000,109300.000000 +-119.020000,35.380000,52.000000,90.000000,35.000000,36.000000,31.000000,0.805400,60000.000000 +-122.430000,37.790000,52.000000,6186.000000,1566.000000,2065.000000,1374.000000,5.854300,500001.000000 +-118.070000,33.860000,17.000000,3666.000000,562.000000,2104.000000,579.000000,5.681800,338900.000000 +-122.300000,38.000000,34.000000,1712.000000,317.000000,956.000000,341.000000,4.439400,162000.000000 +-117.170000,33.280000,16.000000,1921.000000,312.000000,862.000000,280.000000,5.178600,376800.000000 +-117.300000,34.140000,37.000000,1454.000000,261.000000,761.000000,248.000000,2.343800,88100.000000 +-117.710000,33.600000,25.000000,1949.000000,459.000000,602.000000,428.000000,2.760100,72500.000000 +-122.500000,37.780000,46.000000,2646.000000,607.000000,1418.000000,563.000000,3.716700,332800.000000 +-122.720000,38.450000,41.000000,1743.000000,373.000000,780.000000,357.000000,3.146700,175500.000000 +-118.430000,34.180000,31.000000,2417.000000,510.000000,1102.000000,507.000000,3.890600,282200.000000 +-118.030000,33.970000,22.000000,2185.000000,623.000000,1644.000000,606.000000,2.593000,192000.000000 +-118.420000,33.990000,23.000000,5548.000000,1245.000000,2847.000000,1229.000000,4.422800,366900.000000 +-118.290000,33.960000,31.000000,4022.000000,1208.000000,3707.000000,1007.000000,1.309600,116300.000000 +-117.980000,33.730000,22.000000,4232.000000,624.000000,2408.000000,660.000000,6.653900,284900.000000 +-121.910000,39.140000,45.000000,845.000000,155.000000,343.000000,136.000000,2.125000,62000.000000 +-119.590000,36.640000,27.000000,823.000000,171.000000,798.000000,200.000000,3.052100,113800.000000 +-118.330000,34.110000,37.000000,2330.000000,434.000000,846.000000,457.000000,8.233500,430200.000000 +-120.630000,38.750000,17.000000,3145.000000,621.000000,1432.000000,559.000000,2.720100,117500.000000 +-122.120000,37.750000,28.000000,794.000000,111.000000,329.000000,109.000000,7.692300,329800.000000 +-118.350000,33.950000,45.000000,1076.000000,213.000000,781.000000,238.000000,3.950000,164000.000000 +-120.440000,34.960000,29.000000,2374.000000,562.000000,1617.000000,463.000000,2.653100,108300.000000 +-117.080000,33.120000,43.000000,107.000000,44.000000,107.000000,48.000000,0.705400,137500.000000 +-121.270000,38.610000,17.000000,6663.000000,1369.000000,2840.000000,1299.000000,2.945200,115600.000000 +-120.070000,36.960000,32.000000,1268.000000,283.000000,549.000000,273.000000,1.451100,65200.000000 +-117.660000,34.060000,39.000000,1405.000000,339.000000,1489.000000,336.000000,1.608000,91800.000000 +-117.060000,33.010000,24.000000,2618.000000,485.000000,726.000000,443.000000,3.519200,159100.000000 +-117.920000,33.730000,17.000000,1692.000000,293.000000,934.000000,280.000000,4.472800,205800.000000 +-117.930000,33.920000,34.000000,2271.000000,437.000000,1393.000000,433.000000,4.244300,174400.000000 +-122.590000,38.920000,15.000000,1410.000000,329.000000,599.000000,273.000000,2.195300,75000.000000 +-118.140000,33.840000,36.000000,3002.000000,484.000000,1322.000000,471.000000,4.933000,228900.000000 +-120.790000,37.080000,9.000000,97.000000,20.000000,91.000000,22.000000,2.906300,55000.000000 +-117.600000,34.110000,18.000000,6025.000000,1062.000000,3360.000000,1028.000000,4.888900,155700.000000 +-122.020000,37.550000,33.000000,1325.000000,274.000000,909.000000,267.000000,4.568700,177200.000000 +-118.140000,33.970000,31.000000,1161.000000,267.000000,1175.000000,282.000000,3.011400,177000.000000 +-122.310000,37.540000,38.000000,1946.000000,407.000000,975.000000,417.000000,4.072600,385400.000000 +-122.260000,37.830000,52.000000,2432.000000,715.000000,1377.000000,696.000000,2.589800,176000.000000 +-121.880000,37.680000,23.000000,2234.000000,270.000000,854.000000,286.000000,7.333000,337200.000000 +-122.530000,37.940000,18.000000,878.000000,255.000000,384.000000,247.000000,4.734400,200000.000000 +-117.710000,33.630000,16.000000,1565.000000,274.000000,950.000000,280.000000,5.839900,220600.000000 +-120.100000,39.190000,17.000000,1480.000000,241.000000,202.000000,80.000000,3.937500,213200.000000 +-117.770000,33.720000,9.000000,2153.000000,316.000000,954.000000,324.000000,7.813900,304700.000000 +-118.010000,33.840000,35.000000,4166.000000,713.000000,2354.000000,709.000000,5.177500,213400.000000 +-122.190000,37.710000,36.000000,361.000000,69.000000,158.000000,58.000000,5.546100,262500.000000 +-120.360000,38.210000,10.000000,4300.000000,845.000000,1480.000000,609.000000,2.820800,139900.000000 +-117.320000,34.030000,13.000000,3853.000000,761.000000,1685.000000,669.000000,3.902400,122400.000000 +-117.710000,34.020000,17.000000,12689.000000,2426.000000,7343.000000,2230.000000,3.636100,157700.000000 +-118.260000,33.910000,33.000000,954.000000,241.000000,655.000000,218.000000,2.588200,92800.000000 +-121.940000,36.580000,23.000000,4911.000000,693.000000,1480.000000,606.000000,6.777000,500000.000000 +-121.760000,37.690000,29.000000,3433.000000,711.000000,1919.000000,709.000000,3.384100,184400.000000 +-121.940000,36.550000,30.000000,2722.000000,584.000000,628.000000,384.000000,3.404800,487100.000000 +-122.640000,38.010000,36.000000,1199.000000,232.000000,551.000000,229.000000,3.732100,266700.000000 +-119.340000,36.340000,5.000000,4505.000000,834.000000,1917.000000,775.000000,4.014400,126600.000000 +-122.060000,37.270000,16.000000,1612.000000,221.000000,567.000000,208.000000,10.579300,500001.000000 +-117.940000,33.730000,24.000000,4197.000000,718.000000,2468.000000,714.000000,5.256300,211400.000000 +-118.440000,33.980000,21.000000,18132.000000,5419.000000,7431.000000,4930.000000,5.335900,500001.000000 +-117.690000,34.010000,30.000000,2598.000000,573.000000,2170.000000,518.000000,2.300000,95600.000000 +-117.870000,34.150000,24.000000,5745.000000,735.000000,2061.000000,679.000000,8.282700,451400.000000 +-119.690000,36.380000,25.000000,1688.000000,302.000000,879.000000,277.000000,3.321400,103100.000000 +-122.280000,38.000000,26.000000,2335.000000,413.000000,980.000000,417.000000,3.447100,178900.000000 +-118.330000,34.040000,31.000000,1090.000000,251.000000,955.000000,239.000000,2.913000,192500.000000 +-118.170000,34.070000,37.000000,1155.000000,225.000000,814.000000,241.000000,3.875000,148500.000000 +-117.950000,34.140000,13.000000,3859.000000,710.000000,2283.000000,759.000000,4.559400,184500.000000 +-118.280000,33.790000,28.000000,1895.000000,420.000000,1422.000000,389.000000,4.381600,191300.000000 +-120.860000,37.690000,5.000000,6660.000000,1217.000000,3012.000000,1087.000000,3.080900,143600.000000 +-120.150000,39.170000,32.000000,1684.000000,359.000000,454.000000,209.000000,2.912500,145800.000000 +-117.050000,32.710000,25.000000,3292.000000,608.000000,2266.000000,592.000000,3.298600,119200.000000 +-121.440000,38.520000,36.000000,3446.000000,950.000000,2460.000000,847.000000,1.652100,69700.000000 +-118.500000,34.190000,26.000000,2156.000000,509.000000,1142.000000,470.000000,4.000000,224700.000000 +-121.440000,37.760000,5.000000,7264.000000,1285.000000,3670.000000,1146.000000,5.044300,194800.000000 +-121.950000,37.370000,39.000000,446.000000,129.000000,317.000000,127.000000,3.035700,208300.000000 +-122.430000,37.770000,52.000000,2685.000000,629.000000,1170.000000,614.000000,3.689400,418800.000000 +-118.280000,34.010000,48.000000,483.000000,190.000000,775.000000,188.000000,2.330900,126600.000000 +-118.280000,33.840000,27.000000,2326.000000,533.000000,1697.000000,546.000000,3.863300,187900.000000 +-118.330000,34.040000,48.000000,2437.000000,443.000000,1400.000000,426.000000,2.628000,251100.000000 +-118.270000,33.950000,35.000000,2073.000000,494.000000,1753.000000,490.000000,1.500000,93600.000000 +-120.420000,34.910000,4.000000,6986.000000,1217.000000,2801.000000,1212.000000,3.213500,212700.000000 +-117.100000,32.830000,16.000000,1049.000000,154.000000,467.000000,160.000000,6.204700,248100.000000 +-121.890000,36.890000,18.000000,2774.000000,492.000000,1283.000000,353.000000,5.368000,352000.000000 +-118.220000,33.960000,42.000000,1380.000000,331.000000,1290.000000,288.000000,2.800000,161800.000000 +-117.270000,33.020000,13.000000,5723.000000,1242.000000,2450.000000,1140.000000,4.717900,376700.000000 +-121.290000,38.680000,20.000000,1881.000000,378.000000,921.000000,360.000000,1.858900,144000.000000 +-121.950000,37.310000,27.000000,2462.000000,570.000000,1278.000000,565.000000,3.565200,329500.000000 +-118.960000,35.370000,41.000000,1463.000000,339.000000,1066.000000,318.000000,1.746700,52400.000000 +-121.880000,36.580000,29.000000,4910.000000,871.000000,3438.000000,904.000000,4.043200,450000.000000 +-117.250000,34.410000,13.000000,3682.000000,668.000000,1606.000000,668.000000,2.187500,119700.000000 +-118.380000,33.770000,17.000000,10950.000000,2207.000000,4713.000000,2043.000000,6.306400,418300.000000 +-114.550000,32.800000,19.000000,2570.000000,820.000000,1431.000000,608.000000,1.275000,56100.000000 +-119.810000,34.440000,23.000000,3172.000000,588.000000,1467.000000,559.000000,4.680600,288900.000000 +-117.120000,33.490000,4.000000,21988.000000,4055.000000,8824.000000,3252.000000,3.996300,191100.000000 +-118.320000,33.800000,39.000000,1415.000000,298.000000,729.000000,278.000000,3.164800,244800.000000 +-122.180000,37.730000,43.000000,1391.000000,293.000000,855.000000,285.000000,2.519200,76400.000000 +-118.100000,34.130000,47.000000,2234.000000,276.000000,749.000000,260.000000,15.000100,500001.000000 +-122.270000,40.390000,26.000000,1833.000000,422.000000,939.000000,408.000000,1.357100,59000.000000 +-121.940000,37.730000,22.000000,6719.000000,1068.000000,2843.000000,994.000000,6.126500,260300.000000 +-121.290000,38.630000,24.000000,2868.000000,527.000000,1284.000000,487.000000,3.318200,213000.000000 +-117.590000,33.440000,3.000000,5813.000000,1264.000000,2363.000000,1041.000000,4.389700,341300.000000 +-118.440000,34.190000,29.000000,1599.000000,459.000000,1143.000000,438.000000,2.458300,199100.000000 +-118.150000,34.030000,42.000000,1481.000000,411.000000,1206.000000,394.000000,2.680600,189300.000000 +-116.480000,33.800000,15.000000,3004.000000,615.000000,437.000000,210.000000,3.666700,90000.000000 +-118.410000,33.980000,33.000000,3331.000000,777.000000,1695.000000,735.000000,3.972700,307200.000000 +-121.050000,37.650000,5.000000,3096.000000,545.000000,1760.000000,519.000000,4.570100,146400.000000 +-122.420000,37.800000,50.000000,2494.000000,731.000000,958.000000,712.000000,3.235600,500001.000000 +-117.310000,34.110000,38.000000,1208.000000,321.000000,1225.000000,317.000000,1.466300,64000.000000 +-116.990000,32.760000,21.000000,3833.000000,595.000000,1645.000000,589.000000,4.625000,273500.000000 +-122.110000,37.890000,32.000000,2372.000000,516.000000,1067.000000,492.000000,4.323500,279500.000000 +-122.270000,37.800000,10.000000,105.000000,42.000000,125.000000,39.000000,0.972200,137500.000000 +-121.870000,37.380000,16.000000,3275.000000,529.000000,1863.000000,527.000000,5.542900,269100.000000 +-118.200000,33.770000,22.000000,1118.000000,437.000000,1190.000000,399.000000,1.979700,143800.000000 +-117.120000,32.570000,35.000000,1450.000000,256.000000,930.000000,286.000000,2.671500,133300.000000 +-118.330000,34.000000,52.000000,1114.000000,169.000000,486.000000,176.000000,4.291700,247600.000000 +-117.170000,32.820000,21.000000,2869.000000,596.000000,1471.000000,577.000000,3.037500,197600.000000 +-120.360000,40.450000,19.000000,689.000000,143.000000,355.000000,127.000000,1.733300,70000.000000 +-116.520000,33.810000,12.000000,12396.000000,2552.000000,2548.000000,1265.000000,3.439400,162200.000000 +-119.820000,36.770000,41.000000,1441.000000,274.000000,646.000000,296.000000,3.056800,71300.000000 +-118.350000,33.870000,28.000000,2319.000000,579.000000,1369.000000,564.000000,3.616900,257000.000000 +-117.340000,34.490000,9.000000,3293.000000,585.000000,1678.000000,530.000000,3.294100,98300.000000 +-118.550000,34.170000,36.000000,2127.000000,297.000000,761.000000,274.000000,7.839200,500001.000000 +-122.110000,38.090000,11.000000,673.000000,145.000000,318.000000,137.000000,2.392900,122500.000000 +-122.260000,37.560000,23.000000,7283.000000,1342.000000,3399.000000,1298.000000,5.668300,391000.000000 +-121.350000,38.660000,24.000000,3313.000000,769.000000,1631.000000,681.000000,2.555600,105700.000000 +-118.210000,34.040000,37.000000,845.000000,249.000000,881.000000,252.000000,2.245400,165000.000000 +-118.340000,34.070000,52.000000,3421.000000,598.000000,1203.000000,564.000000,4.161800,500001.000000 +-117.880000,34.130000,25.000000,2559.000000,654.000000,1674.000000,623.000000,2.854700,155600.000000 +-117.870000,33.840000,23.000000,1678.000000,369.000000,912.000000,347.000000,4.500000,237300.000000 +-117.340000,34.080000,33.000000,4924.000000,1007.000000,3502.000000,953.000000,3.233000,99400.000000 +-118.330000,34.020000,11.000000,1249.000000,313.000000,625.000000,336.000000,0.870200,170500.000000 +-118.330000,33.790000,29.000000,4389.000000,873.000000,2069.000000,901.000000,4.107100,365600.000000 +-119.290000,35.760000,15.000000,3938.000000,789.000000,3500.000000,768.000000,2.129500,59800.000000 +-117.090000,32.620000,37.000000,1538.000000,298.000000,867.000000,285.000000,3.072900,128700.000000 +-121.810000,37.250000,5.000000,1975.000000,520.000000,861.000000,440.000000,4.456500,159000.000000 +-120.290000,37.940000,17.000000,1459.000000,297.000000,753.000000,271.000000,3.050000,144800.000000 +-120.700000,35.140000,17.000000,5805.000000,1097.000000,1919.000000,932.000000,3.535200,357800.000000 +-118.170000,34.060000,36.000000,871.000000,201.000000,2862.000000,181.000000,2.184500,123800.000000 +-117.990000,33.930000,27.000000,3708.000000,718.000000,1921.000000,721.000000,4.375000,210400.000000 +-118.250000,34.220000,30.000000,2062.000000,396.000000,1089.000000,375.000000,5.536200,301200.000000 +-118.110000,33.830000,36.000000,1820.000000,313.000000,899.000000,295.000000,4.918000,225200.000000 +-122.040000,37.330000,22.000000,4011.000000,963.000000,2206.000000,879.000000,4.572100,351200.000000 +-119.670000,36.570000,32.000000,1604.000000,292.000000,868.000000,276.000000,2.190800,110000.000000 +-119.560000,36.710000,37.000000,1609.000000,374.000000,1173.000000,344.000000,2.181000,59900.000000 +-122.350000,37.960000,36.000000,2191.000000,531.000000,1563.000000,524.000000,2.516400,114200.000000 +-117.080000,32.580000,15.000000,1462.000000,274.000000,1002.000000,271.000000,3.969800,142700.000000 +-118.610000,34.200000,29.000000,1673.000000,284.000000,794.000000,270.000000,5.519100,245800.000000 +-118.240000,34.140000,27.000000,2909.000000,1021.000000,2614.000000,935.000000,2.144400,229000.000000 +-118.400000,34.030000,43.000000,1006.000000,201.000000,520.000000,199.000000,6.566900,372800.000000 +-116.980000,33.260000,12.000000,5898.000000,1002.000000,3129.000000,945.000000,4.764700,254100.000000 +-117.930000,33.680000,33.000000,2664.000000,432.000000,1197.000000,429.000000,5.069000,264200.000000 +-122.250000,37.470000,38.000000,645.000000,124.000000,265.000000,103.000000,5.468800,305000.000000 +-118.190000,33.840000,44.000000,2731.000000,577.000000,1396.000000,555.000000,4.177100,219100.000000 +-118.450000,34.320000,23.000000,3481.000000,641.000000,1952.000000,682.000000,4.260000,189400.000000 +-122.140000,39.970000,27.000000,1079.000000,222.000000,625.000000,197.000000,3.131900,62700.000000 +-118.300000,34.020000,27.000000,2190.000000,626.000000,1768.000000,528.000000,1.244600,103800.000000 +-117.900000,33.730000,31.000000,1171.000000,306.000000,1690.000000,301.000000,3.263900,155200.000000 +-121.580000,39.150000,38.000000,1756.000000,396.000000,837.000000,401.000000,1.912200,55500.000000 +-121.950000,38.350000,16.000000,2084.000000,292.000000,1099.000000,292.000000,5.826900,150200.000000 +-117.690000,34.070000,35.000000,3222.000000,559.000000,1970.000000,550.000000,3.708300,131000.000000 +-117.080000,32.740000,35.000000,1434.000000,253.000000,753.000000,228.000000,2.381200,135100.000000 +-118.290000,34.000000,41.000000,1807.000000,493.000000,1731.000000,471.000000,1.234700,111700.000000 +-123.800000,39.460000,35.000000,1718.000000,345.000000,698.000000,299.000000,2.924300,131600.000000 +-119.120000,35.330000,4.000000,8574.000000,1489.000000,4250.000000,1444.000000,5.103600,103400.000000 +-121.800000,37.340000,20.000000,2686.000000,414.000000,1507.000000,405.000000,5.806800,263900.000000 +-117.090000,32.750000,19.000000,2739.000000,707.000000,2004.000000,622.000000,1.631800,117700.000000 +-122.130000,37.430000,40.000000,3454.000000,648.000000,1498.000000,647.000000,5.211400,438400.000000 +-117.980000,33.760000,24.000000,1880.000000,405.000000,967.000000,418.000000,4.454500,192500.000000 +-122.330000,37.940000,43.000000,1876.000000,389.000000,807.000000,377.000000,3.157100,141600.000000 +-121.440000,38.540000,39.000000,2855.000000,574.000000,1217.000000,562.000000,3.240400,93600.000000 +-118.020000,33.700000,23.000000,5069.000000,770.000000,2473.000000,769.000000,6.304700,285700.000000 +-117.880000,33.840000,26.000000,1499.000000,290.000000,755.000000,277.000000,3.589300,238500.000000 +-120.460000,37.310000,35.000000,2042.000000,378.000000,953.000000,356.000000,2.734400,87800.000000 +-118.310000,33.720000,26.000000,2711.000000,508.000000,1372.000000,459.000000,4.145100,326700.000000 +-117.820000,33.670000,17.000000,2895.000000,439.000000,1588.000000,450.000000,6.276000,290700.000000 +-117.990000,33.870000,17.000000,2334.000000,537.000000,1662.000000,535.000000,3.014700,217000.000000 +-119.800000,36.860000,7.000000,6434.000000,1201.000000,2733.000000,1045.000000,3.765600,145000.000000 +-121.470000,38.580000,43.000000,3807.000000,952.000000,1484.000000,850.000000,2.326600,137500.000000 +-117.600000,33.870000,15.000000,7626.000000,1570.000000,3823.000000,1415.000000,3.441900,138100.000000 +-117.100000,32.750000,11.000000,2393.000000,726.000000,1905.000000,711.000000,1.344800,91300.000000 +-117.880000,33.760000,17.000000,1768.000000,474.000000,1079.000000,436.000000,1.782300,205300.000000 +-118.350000,33.990000,48.000000,2741.000000,439.000000,1115.000000,459.000000,5.051400,269100.000000 +-121.810000,37.310000,14.000000,2731.000000,578.000000,1109.000000,551.000000,3.138200,139700.000000 +-120.430000,34.900000,30.000000,2388.000000,393.000000,1117.000000,375.000000,4.105800,164000.000000 +-118.190000,34.050000,29.000000,855.000000,199.000000,785.000000,169.000000,2.696400,122200.000000 +-117.890000,33.910000,33.000000,1264.000000,224.000000,527.000000,227.000000,3.732100,216500.000000 +-118.270000,34.020000,21.000000,1314.000000,375.000000,1505.000000,366.000000,2.319000,97200.000000 +-116.730000,34.520000,16.000000,1247.000000,315.000000,433.000000,159.000000,1.056800,75000.000000 +-121.500000,38.520000,37.000000,2008.000000,466.000000,1261.000000,427.000000,2.257400,59100.000000 +-120.610000,35.120000,16.000000,1671.000000,354.000000,935.000000,340.000000,2.579200,163800.000000 +-120.630000,36.980000,20.000000,2380.000000,489.000000,1581.000000,505.000000,2.059500,61300.000000 +-117.060000,32.590000,13.000000,3920.000000,775.000000,2814.000000,760.000000,4.061600,148800.000000 +-119.020000,35.420000,40.000000,1912.000000,439.000000,1015.000000,413.000000,1.459800,52600.000000 +-118.140000,34.030000,38.000000,1447.000000,293.000000,1042.000000,284.000000,4.137500,211500.000000 +-118.310000,33.730000,52.000000,2025.000000,361.000000,957.000000,363.000000,4.205900,350000.000000 +-121.940000,38.370000,14.000000,1156.000000,216.000000,574.000000,227.000000,3.239600,143800.000000 +-122.510000,37.920000,32.000000,2622.000000,541.000000,1022.000000,464.000000,3.764700,375000.000000 +-119.450000,36.160000,27.000000,2119.000000,373.000000,1268.000000,345.000000,2.815200,106900.000000 +-118.190000,33.970000,27.000000,2911.000000,972.000000,3559.000000,945.000000,1.948500,146300.000000 +-116.710000,33.750000,25.000000,10665.000000,2161.000000,1874.000000,852.000000,3.062500,150500.000000 +-118.280000,33.990000,35.000000,1138.000000,304.000000,1128.000000,311.000000,1.881800,100000.000000 +-118.120000,33.850000,37.000000,2584.000000,453.000000,1333.000000,481.000000,4.366100,219900.000000 +-122.530000,37.630000,27.000000,2589.000000,658.000000,1386.000000,608.000000,2.908700,228200.000000 +-121.060000,37.730000,5.000000,2256.000000,420.000000,1246.000000,397.000000,4.923600,155900.000000 +-120.880000,38.450000,25.000000,1374.000000,297.000000,657.000000,288.000000,2.547600,97900.000000 +-117.110000,32.580000,12.000000,1086.000000,294.000000,870.000000,290.000000,2.421300,132500.000000 +-117.900000,33.650000,27.000000,3310.000000,598.000000,1402.000000,563.000000,6.632000,441100.000000 +-121.870000,37.660000,52.000000,775.000000,134.000000,315.000000,123.000000,5.067700,233300.000000 +-121.300000,37.960000,52.000000,1354.000000,314.000000,679.000000,311.000000,1.778800,97400.000000 +-117.800000,33.850000,16.000000,4151.000000,637.000000,1558.000000,604.000000,5.806000,304900.000000 +-118.550000,34.200000,31.000000,1963.000000,420.000000,1494.000000,415.000000,3.531300,211800.000000 +-118.440000,34.240000,36.000000,1660.000000,301.000000,1225.000000,307.000000,4.095000,184000.000000 +-117.910000,33.880000,34.000000,1851.000000,291.000000,784.000000,290.000000,5.233600,235600.000000 +-118.510000,34.230000,27.000000,4580.000000,918.000000,2252.000000,850.000000,4.792600,454400.000000 +-119.150000,34.170000,23.000000,2239.000000,537.000000,784.000000,497.000000,1.603800,194300.000000 +-122.080000,37.900000,32.000000,1075.000000,170.000000,486.000000,173.000000,5.049900,306800.000000 +-122.410000,37.710000,28.000000,5015.000000,1240.000000,3900.000000,1029.000000,1.226900,181900.000000 +-122.220000,37.470000,35.000000,367.000000,113.000000,398.000000,109.000000,2.500000,166700.000000 +-117.870000,33.920000,17.000000,4575.000000,764.000000,2054.000000,737.000000,6.057100,272400.000000 +-122.000000,36.970000,30.000000,1029.000000,242.000000,753.000000,249.000000,3.120500,240500.000000 +-117.070000,32.600000,13.000000,1607.000000,435.000000,983.000000,400.000000,2.290300,106300.000000 +-118.160000,34.060000,27.000000,1675.000000,274.000000,785.000000,275.000000,5.828000,301100.000000 +-117.050000,33.030000,16.000000,87.000000,20.000000,32.000000,21.000000,4.357100,144600.000000 +-117.240000,33.200000,26.000000,1701.000000,404.000000,989.000000,367.000000,2.511900,171700.000000 +-119.730000,34.450000,44.000000,2261.000000,328.000000,763.000000,294.000000,6.744900,415600.000000 +-117.320000,33.170000,18.000000,2143.000000,299.000000,828.000000,283.000000,4.238300,239000.000000 +-121.830000,37.270000,14.000000,2855.000000,380.000000,1420.000000,383.000000,6.671200,311500.000000 +-122.320000,40.420000,17.000000,3019.000000,578.000000,1538.000000,545.000000,2.793000,76500.000000 +-121.770000,36.940000,18.000000,1063.000000,341.000000,1033.000000,313.000000,2.019200,171300.000000 +-118.270000,33.790000,39.000000,1513.000000,365.000000,1227.000000,354.000000,3.392900,184600.000000 +-117.930000,33.830000,30.000000,1561.000000,381.000000,1104.000000,391.000000,3.375000,201900.000000 +-117.110000,32.820000,17.000000,1787.000000,330.000000,1341.000000,314.000000,2.875000,112500.000000 +-119.230000,35.740000,16.000000,2275.000000,659.000000,1914.000000,614.000000,2.033000,68400.000000 +-122.470000,37.710000,42.000000,1961.000000,427.000000,1211.000000,409.000000,3.515600,239400.000000 +-121.930000,36.630000,41.000000,1049.000000,198.000000,428.000000,183.000000,4.357100,287500.000000 +-117.280000,33.020000,21.000000,2736.000000,585.000000,1251.000000,576.000000,4.235600,347700.000000 +-118.990000,35.240000,40.000000,282.000000,59.000000,213.000000,71.000000,2.350000,91700.000000 +-119.140000,36.230000,22.000000,2935.000000,523.000000,1927.000000,530.000000,2.587500,70400.000000 +-122.420000,40.590000,24.000000,5045.000000,972.000000,2220.000000,979.000000,2.679200,138900.000000 +-117.090000,32.660000,37.000000,1232.000000,330.000000,1086.000000,330.000000,1.638900,114300.000000 +-118.140000,34.700000,36.000000,1205.000000,317.000000,678.000000,290.000000,2.018200,98400.000000 +-122.040000,36.980000,35.000000,2155.000000,355.000000,866.000000,335.000000,5.618800,404700.000000 +-117.020000,32.800000,31.000000,2692.000000,445.000000,1129.000000,450.000000,4.458300,170000.000000 +-117.290000,34.490000,3.000000,7689.000000,1545.000000,3804.000000,1399.000000,3.387100,111800.000000 +-122.090000,37.210000,15.000000,1969.000000,332.000000,822.000000,324.000000,7.877400,394900.000000 +-121.010000,37.650000,47.000000,1713.000000,334.000000,570.000000,297.000000,2.196900,149400.000000 +-116.770000,33.080000,13.000000,1406.000000,260.000000,737.000000,279.000000,5.584200,239100.000000 +-121.960000,37.340000,36.000000,844.000000,153.000000,373.000000,160.000000,5.791000,254100.000000 +-119.700000,34.420000,41.000000,725.000000,239.000000,582.000000,214.000000,3.166700,362500.000000 +-119.460000,35.170000,40.000000,4164.000000,812.000000,1998.000000,773.000000,2.832300,50800.000000 +-122.010000,37.300000,25.000000,4044.000000,551.000000,1699.000000,533.000000,8.083700,380600.000000 +-118.060000,33.830000,22.000000,5290.000000,1054.000000,2812.000000,1021.000000,4.530000,226400.000000 +-118.400000,34.190000,30.000000,521.000000,126.000000,306.000000,129.000000,4.112500,216700.000000 +-119.630000,34.440000,37.000000,3188.000000,442.000000,984.000000,376.000000,9.452200,500001.000000 +-117.890000,33.770000,29.000000,2577.000000,445.000000,1849.000000,470.000000,4.473200,194800.000000 +-119.540000,36.520000,16.000000,2703.000000,415.000000,1106.000000,372.000000,4.204500,120900.000000 +-118.430000,34.170000,33.000000,1679.000000,404.000000,933.000000,412.000000,2.697900,266000.000000 +-117.100000,32.580000,33.000000,393.000000,76.000000,330.000000,80.000000,4.102900,122700.000000 +-122.280000,37.790000,30.000000,4145.000000,869.000000,3668.000000,855.000000,2.544400,275000.000000 +-118.320000,34.110000,48.000000,4472.000000,1579.000000,2796.000000,1397.000000,2.397400,410700.000000 +-118.420000,34.020000,28.000000,3167.000000,737.000000,1248.000000,665.000000,3.194100,394700.000000 +-119.560000,36.510000,9.000000,3860.000000,809.000000,2157.000000,770.000000,2.503300,70100.000000 +-122.420000,37.780000,19.000000,4065.000000,1645.000000,2079.000000,1470.000000,3.146200,187500.000000 +-120.910000,37.730000,31.000000,840.000000,154.000000,429.000000,150.000000,2.406300,170200.000000 +-122.080000,37.590000,16.000000,1816.000000,365.000000,1367.000000,355.000000,4.235000,156300.000000 +-121.770000,37.310000,16.000000,1649.000000,228.000000,769.000000,230.000000,6.645500,302600.000000 +-117.050000,33.030000,14.000000,5180.000000,1051.000000,1639.000000,991.000000,4.500000,222200.000000 +-121.950000,37.260000,34.000000,1482.000000,255.000000,584.000000,246.000000,5.512100,264700.000000 +-119.030000,35.420000,45.000000,1628.000000,352.000000,754.000000,334.000000,2.570300,62400.000000 +-121.530000,38.600000,25.000000,5154.000000,1105.000000,3196.000000,1073.000000,2.756600,80200.000000 +-118.160000,33.960000,24.000000,1635.000000,507.000000,2480.000000,481.000000,2.443200,187500.000000 +-121.890000,36.600000,40.000000,626.000000,164.000000,337.000000,150.000000,2.791700,225000.000000 +-117.070000,33.670000,11.000000,939.000000,187.000000,557.000000,190.000000,2.375000,145800.000000 +-122.390000,37.590000,32.000000,4497.000000,730.000000,1846.000000,715.000000,6.132300,500001.000000 +-118.440000,34.050000,32.000000,1880.000000,435.000000,798.000000,417.000000,4.710900,500000.000000 +-121.350000,38.040000,5.000000,4303.000000,613.000000,2206.000000,621.000000,5.584200,159100.000000 +-122.420000,37.760000,52.000000,4407.000000,1192.000000,2280.000000,1076.000000,3.393700,270000.000000 +-118.020000,33.940000,33.000000,2382.000000,404.000000,1339.000000,389.000000,5.301600,192200.000000 +-121.320000,38.030000,16.000000,4045.000000,623.000000,1862.000000,625.000000,4.875000,143100.000000 +-118.380000,34.050000,49.000000,702.000000,143.000000,458.000000,187.000000,4.895800,333600.000000 +-119.290000,36.540000,18.000000,2581.000000,628.000000,2732.000000,592.000000,1.842900,58300.000000 +-117.760000,33.540000,28.000000,2250.000000,329.000000,826.000000,323.000000,6.925700,466400.000000 +-122.290000,38.290000,52.000000,3217.000000,742.000000,1670.000000,671.000000,2.439800,163100.000000 +-117.800000,33.810000,14.000000,1206.000000,142.000000,572.000000,149.000000,8.847000,388700.000000 +-121.950000,37.350000,52.000000,2382.000000,523.000000,1096.000000,492.000000,4.265600,236100.000000 +-117.870000,33.990000,21.000000,2837.000000,515.000000,2031.000000,555.000000,4.927100,209700.000000 +-121.460000,38.560000,52.000000,907.000000,180.000000,479.000000,177.000000,2.212500,104000.000000 +-117.990000,34.080000,11.000000,2399.000000,527.000000,2307.000000,531.000000,3.562500,141000.000000 +-121.530000,38.500000,17.000000,3087.000000,477.000000,1365.000000,495.000000,6.466700,216800.000000 +-121.140000,37.520000,37.000000,1358.000000,231.000000,586.000000,214.000000,3.164500,170800.000000 +-118.290000,33.890000,35.000000,2810.000000,614.000000,1578.000000,601.000000,3.590000,200600.000000 +-117.100000,33.090000,5.000000,12045.000000,2162.000000,5640.000000,1997.000000,4.437500,353000.000000 +-123.200000,39.230000,26.000000,786.000000,168.000000,494.000000,161.000000,2.358300,105400.000000 +-117.120000,32.760000,26.000000,1221.000000,331.000000,620.000000,296.000000,2.482100,123600.000000 +-120.440000,34.960000,30.000000,1685.000000,315.000000,1290.000000,368.000000,3.472200,112500.000000 +-115.560000,32.780000,34.000000,2856.000000,555.000000,1627.000000,522.000000,3.208300,76200.000000 +-121.470000,38.560000,51.000000,2083.000000,559.000000,874.000000,524.000000,2.022100,95800.000000 +-121.680000,37.930000,44.000000,1014.000000,225.000000,704.000000,238.000000,1.655400,119400.000000 +-118.100000,34.140000,26.000000,6262.000000,1645.000000,3001.000000,1505.000000,3.657200,213200.000000 +-118.430000,34.220000,34.000000,1588.000000,360.000000,1080.000000,340.000000,3.660000,184600.000000 +-120.970000,37.660000,19.000000,1974.000000,393.000000,799.000000,377.000000,3.128600,137500.000000 +-119.340000,34.390000,27.000000,669.000000,131.000000,314.000000,106.000000,2.465900,231300.000000 +-118.500000,34.200000,34.000000,1617.000000,344.000000,938.000000,305.000000,3.915000,217700.000000 +-120.980000,38.670000,13.000000,3432.000000,516.000000,1286.000000,470.000000,5.584000,186600.000000 +-118.350000,34.000000,28.000000,3085.000000,621.000000,1162.000000,558.000000,3.250000,301000.000000 +-122.490000,38.220000,33.000000,1486.000000,290.000000,781.000000,274.000000,3.564700,251800.000000 +-118.320000,33.970000,46.000000,1504.000000,270.000000,814.000000,306.000000,4.391900,157100.000000 +-117.130000,32.690000,36.000000,1469.000000,400.000000,1271.000000,340.000000,1.043000,90100.000000 +-117.030000,33.000000,6.000000,6139.000000,793.000000,2693.000000,770.000000,7.756900,387400.000000 +-122.260000,38.020000,5.000000,3846.000000,786.000000,2053.000000,716.000000,5.047300,184800.000000 +-117.270000,32.850000,26.000000,1373.000000,336.000000,608.000000,268.000000,4.425000,475000.000000 +-117.940000,33.860000,36.000000,2824.000000,493.000000,1394.000000,507.000000,4.647700,194700.000000 +-119.310000,34.700000,19.000000,961.000000,218.000000,479.000000,138.000000,3.343800,156300.000000 +-122.100000,37.610000,35.000000,2361.000000,458.000000,1727.000000,467.000000,4.528100,173600.000000 +-118.000000,33.900000,35.000000,1942.000000,332.000000,1127.000000,325.000000,4.514400,206300.000000 +-117.370000,33.980000,43.000000,2862.000000,772.000000,1878.000000,675.000000,2.115100,96700.000000 +-121.520000,38.650000,17.000000,1269.000000,233.000000,494.000000,231.000000,3.961500,331300.000000 +-118.460000,34.070000,42.000000,2564.000000,460.000000,913.000000,414.000000,9.222500,500001.000000 +-118.040000,34.070000,39.000000,1382.000000,315.000000,1090.000000,308.000000,3.812500,174000.000000 +-118.080000,33.880000,27.000000,923.000000,186.000000,1014.000000,204.000000,3.825000,159500.000000 +-122.430000,37.800000,52.000000,2788.000000,813.000000,1302.000000,764.000000,4.199000,400000.000000 +-119.290000,34.370000,41.000000,1408.000000,311.000000,793.000000,264.000000,2.544100,161200.000000 +-122.040000,37.000000,52.000000,3365.000000,644.000000,796.000000,333.000000,2.971200,116600.000000 +-115.570000,32.790000,50.000000,1291.000000,277.000000,864.000000,274.000000,1.666700,68100.000000 +-117.560000,34.420000,6.000000,4264.000000,749.000000,2005.000000,666.000000,3.469500,138800.000000 +-120.630000,38.680000,14.000000,1821.000000,316.000000,769.000000,266.000000,3.078900,131700.000000 +-118.320000,34.090000,44.000000,2666.000000,830.000000,2297.000000,726.000000,1.676000,208800.000000 +-118.350000,34.080000,52.000000,1710.000000,350.000000,727.000000,355.000000,4.583300,333900.000000 +-122.270000,37.510000,36.000000,1406.000000,224.000000,598.000000,237.000000,5.896400,414800.000000 +-119.060000,35.330000,14.000000,5264.000000,1064.000000,3278.000000,1049.000000,3.811700,82800.000000 +-117.150000,32.900000,12.000000,1681.000000,381.000000,1050.000000,362.000000,4.200800,176100.000000 +-122.470000,37.760000,39.000000,3200.000000,689.000000,1391.000000,618.000000,3.634600,338000.000000 +-122.030000,37.310000,19.000000,2885.000000,859.000000,1520.000000,784.000000,3.375000,275700.000000 +-119.310000,36.320000,23.000000,2945.000000,592.000000,1419.000000,532.000000,2.573300,88800.000000 +-120.580000,38.770000,15.000000,2155.000000,394.000000,857.000000,356.000000,4.030000,141200.000000 +-117.210000,34.490000,14.000000,2125.000000,348.000000,1067.000000,360.000000,3.633300,116200.000000 +-122.080000,37.650000,35.000000,1813.000000,393.000000,1093.000000,374.000000,3.681800,165400.000000 +-122.250000,40.150000,15.000000,1677.000000,346.000000,858.000000,327.000000,2.437500,59200.000000 +-118.210000,34.140000,44.000000,1681.000000,407.000000,1105.000000,387.000000,3.222200,186500.000000 +-122.140000,37.730000,51.000000,2619.000000,403.000000,922.000000,393.000000,4.604200,251900.000000 +-121.590000,39.770000,24.000000,1535.000000,276.000000,664.000000,273.000000,2.306800,97300.000000 +-122.190000,37.470000,44.000000,1371.000000,263.000000,589.000000,301.000000,4.806800,312300.000000 +-120.440000,34.950000,38.000000,3004.000000,794.000000,2601.000000,747.000000,2.274300,106400.000000 +-121.780000,37.310000,7.000000,1973.000000,328.000000,1047.000000,303.000000,6.234000,292200.000000 +-118.240000,34.200000,41.000000,2067.000000,452.000000,1282.000000,455.000000,5.575600,309900.000000 +-121.570000,39.160000,33.000000,2033.000000,375.000000,914.000000,330.000000,2.696400,68500.000000 +-119.840000,36.830000,17.000000,2273.000000,298.000000,700.000000,263.000000,6.864500,195900.000000 +-119.290000,34.440000,34.000000,4314.000000,878.000000,2361.000000,831.000000,3.227900,243100.000000 +-118.140000,34.180000,52.000000,1700.000000,317.000000,996.000000,329.000000,3.968800,175000.000000 +-119.570000,36.100000,37.000000,1676.000000,316.000000,707.000000,274.000000,2.059500,60700.000000 +-121.800000,37.320000,23.000000,1829.000000,346.000000,1277.000000,324.000000,4.809200,217400.000000 +-118.130000,34.160000,52.000000,1596.000000,314.000000,1024.000000,292.000000,3.671900,227900.000000 +-121.900000,37.460000,29.000000,2385.000000,513.000000,1788.000000,510.000000,3.842100,220700.000000 +-121.920000,37.330000,52.000000,2962.000000,557.000000,1215.000000,506.000000,4.776800,301100.000000 +-123.100000,39.360000,19.000000,1056.000000,248.000000,611.000000,226.000000,1.746000,105000.000000 +-122.860000,40.560000,12.000000,1350.000000,300.000000,423.000000,172.000000,1.739300,81300.000000 +-122.440000,37.750000,52.000000,3114.000000,637.000000,1144.000000,591.000000,4.000000,375000.000000 +-120.620000,35.120000,22.000000,1240.000000,294.000000,768.000000,288.000000,2.655000,160000.000000 +-118.360000,33.880000,22.000000,1388.000000,336.000000,930.000000,287.000000,2.798100,275000.000000 +-118.360000,33.820000,26.000000,5166.000000,1313.000000,2738.000000,1239.000000,3.356500,360800.000000 +-118.270000,33.770000,39.000000,1731.000000,485.000000,2115.000000,478.000000,1.536900,141300.000000 +-122.280000,37.900000,52.000000,2003.000000,250.000000,658.000000,244.000000,10.082500,397000.000000 +-117.980000,33.660000,26.000000,3527.000000,547.000000,1615.000000,542.000000,6.162400,279400.000000 +-118.210000,33.930000,39.000000,354.000000,73.000000,184.000000,58.000000,2.767900,108900.000000 +-120.430000,37.350000,15.000000,1613.000000,203.000000,673.000000,213.000000,5.937800,212200.000000 +-120.960000,37.480000,32.000000,1256.000000,212.000000,682.000000,236.000000,2.984400,135900.000000 +-117.330000,34.120000,33.000000,933.000000,219.000000,838.000000,211.000000,1.341700,69000.000000 +-119.810000,36.780000,36.000000,1650.000000,313.000000,660.000000,298.000000,3.000000,79700.000000 +-118.380000,34.050000,35.000000,3517.000000,879.000000,1632.000000,784.000000,3.095600,500001.000000 +-117.960000,33.800000,33.000000,1984.000000,420.000000,1119.000000,387.000000,3.482100,231300.000000 +-118.430000,34.240000,37.000000,1279.000000,241.000000,987.000000,233.000000,4.005700,172700.000000 +-117.870000,33.790000,25.000000,2546.000000,545.000000,1543.000000,521.000000,4.192000,219900.000000 +-124.180000,40.790000,40.000000,1398.000000,311.000000,788.000000,279.000000,1.466800,64600.000000 +-117.240000,32.830000,18.000000,3109.000000,501.000000,949.000000,368.000000,7.435100,445700.000000 +-121.570000,37.000000,18.000000,7241.000000,1225.000000,4168.000000,1138.000000,4.571400,260300.000000 +-117.370000,33.190000,38.000000,861.000000,213.000000,486.000000,204.000000,4.187500,185000.000000 +-121.890000,37.460000,5.000000,1519.000000,186.000000,705.000000,186.000000,10.379800,500001.000000 +-122.680000,38.010000,41.000000,1865.000000,392.000000,825.000000,369.000000,4.201100,255400.000000 +-118.310000,34.020000,46.000000,2217.000000,489.000000,1227.000000,448.000000,1.685100,108800.000000 +-118.290000,33.890000,33.000000,2138.000000,567.000000,1072.000000,528.000000,2.742800,208900.000000 +-117.300000,34.120000,43.000000,1018.000000,261.000000,736.000000,215.000000,2.600000,66900.000000 +-117.300000,33.850000,15.000000,3991.000000,751.000000,2317.000000,657.000000,2.954200,127900.000000 +-117.350000,33.160000,22.000000,1331.000000,305.000000,580.000000,193.000000,3.975000,500001.000000 +-122.430000,37.760000,52.000000,2242.000000,459.000000,751.000000,464.000000,4.750000,500001.000000 +-119.010000,35.390000,29.000000,1820.000000,459.000000,1134.000000,419.000000,1.828900,59400.000000 +-121.570000,37.010000,44.000000,1448.000000,393.000000,1066.000000,357.000000,2.062500,170300.000000 +-122.420000,37.650000,39.000000,4402.000000,894.000000,2941.000000,887.000000,3.856500,239800.000000 +-122.430000,37.780000,49.000000,2246.000000,587.000000,1277.000000,546.000000,2.979200,350000.000000 +-118.130000,33.900000,36.000000,1477.000000,305.000000,788.000000,291.000000,3.625000,195800.000000 +-118.060000,33.820000,25.000000,2637.000000,462.000000,965.000000,415.000000,4.583300,190900.000000 +-119.220000,34.340000,29.000000,3128.000000,672.000000,1815.000000,648.000000,2.982100,175700.000000 +-121.510000,38.550000,46.000000,1485.000000,278.000000,531.000000,291.000000,2.788500,137200.000000 +-121.420000,38.500000,24.000000,7740.000000,1539.000000,4333.000000,1397.000000,3.025000,87900.000000 +-122.260000,37.850000,52.000000,2202.000000,434.000000,910.000000,402.000000,3.203100,281500.000000 +-118.400000,33.870000,26.000000,6712.000000,1441.000000,2803.000000,1394.000000,5.227600,434500.000000 +-118.380000,33.890000,35.000000,1778.000000,330.000000,732.000000,312.000000,6.574500,379300.000000 +-119.950000,36.960000,18.000000,1996.000000,379.000000,1327.000000,356.000000,2.608700,96000.000000 +-118.120000,34.020000,32.000000,1789.000000,528.000000,1429.000000,517.000000,1.890600,224500.000000 +-117.900000,36.950000,19.000000,99.000000,26.000000,51.000000,22.000000,1.729200,137500.000000 +-116.280000,32.840000,18.000000,382.000000,128.000000,194.000000,69.000000,2.517900,58800.000000 +-122.450000,37.770000,52.000000,1722.000000,465.000000,885.000000,437.000000,3.090600,500001.000000 +-121.620000,39.760000,14.000000,2063.000000,559.000000,934.000000,529.000000,1.778800,85800.000000 +-122.000000,38.350000,24.000000,745.000000,116.000000,300.000000,115.000000,3.617600,158500.000000 +-121.710000,39.250000,37.000000,1871.000000,321.000000,806.000000,294.000000,4.000000,101400.000000 +-119.190000,34.220000,26.000000,3175.000000,736.000000,2460.000000,775.000000,3.125000,219900.000000 +-118.060000,33.910000,21.000000,2863.000000,701.000000,1489.000000,621.000000,3.203100,180700.000000 +-118.000000,33.930000,35.000000,1288.000000,240.000000,758.000000,250.000000,4.920500,173900.000000 +-118.260000,34.110000,47.000000,2183.000000,510.000000,1445.000000,503.000000,3.666700,210900.000000 +-118.510000,34.260000,29.000000,2472.000000,354.000000,1109.000000,397.000000,5.543300,332500.000000 +-117.960000,33.980000,25.000000,1259.000000,184.000000,599.000000,170.000000,5.740700,302200.000000 +-123.390000,38.990000,28.000000,1416.000000,294.000000,812.000000,258.000000,3.406300,109400.000000 +-121.690000,38.160000,33.000000,1808.000000,363.000000,824.000000,340.000000,3.293700,96400.000000 +-121.930000,37.320000,51.000000,2711.000000,728.000000,1607.000000,724.000000,3.000000,184700.000000 +-117.260000,33.260000,9.000000,4609.000000,798.000000,2582.000000,746.000000,4.342900,173900.000000 +-121.410000,38.530000,37.000000,1058.000000,224.000000,588.000000,231.000000,2.973700,72100.000000 +-117.900000,33.900000,18.000000,3821.000000,576.000000,1430.000000,568.000000,6.939900,349600.000000 +-118.540000,36.120000,11.000000,4103.000000,882.000000,356.000000,171.000000,2.102900,99100.000000 +-117.240000,32.820000,20.000000,2467.000000,332.000000,731.000000,335.000000,7.255900,392300.000000 +-121.900000,37.240000,24.000000,7521.000000,1364.000000,3970.000000,1318.000000,4.400400,255800.000000 +-118.170000,33.870000,49.000000,1937.000000,445.000000,1339.000000,440.000000,3.031900,162800.000000 +-117.310000,33.160000,4.000000,5846.000000,894.000000,2282.000000,801.000000,5.595600,247800.000000 +-118.410000,34.170000,35.000000,2027.000000,428.000000,879.000000,402.000000,4.692000,330900.000000 +-118.380000,34.220000,32.000000,362.000000,100.000000,348.000000,102.000000,2.267900,150000.000000 +-117.160000,33.730000,10.000000,2381.000000,454.000000,1323.000000,477.000000,2.632200,140700.000000 +-119.710000,34.400000,27.000000,3782.000000,771.000000,1742.000000,751.000000,4.045100,395100.000000 +-117.360000,33.200000,26.000000,2447.000000,482.000000,1405.000000,486.000000,3.291700,150800.000000 +-118.210000,33.800000,41.000000,1251.000000,279.000000,1053.000000,278.000000,3.277800,150800.000000 +-120.930000,39.900000,20.000000,1511.000000,328.000000,791.000000,320.000000,2.022100,70900.000000 +-118.130000,33.840000,48.000000,1895.000000,294.000000,881.000000,293.000000,6.336400,307400.000000 +-118.270000,33.930000,41.000000,570.000000,135.000000,466.000000,121.000000,2.645800,91300.000000 +-118.050000,33.780000,25.000000,2356.000000,330.000000,937.000000,326.000000,6.626400,359100.000000 +-118.430000,34.270000,36.000000,1002.000000,250.000000,1312.000000,249.000000,3.024000,148000.000000 +-118.370000,33.910000,35.000000,1742.000000,283.000000,812.000000,282.000000,5.670400,303700.000000 +-122.500000,37.750000,46.000000,2298.000000,457.000000,1429.000000,477.000000,4.021700,272400.000000 +-118.300000,34.010000,52.000000,1908.000000,428.000000,1271.000000,394.000000,2.588500,136200.000000 +-119.160000,34.150000,23.000000,3204.000000,644.000000,2295.000000,614.000000,3.948500,196600.000000 +-117.040000,32.680000,14.000000,1320.000000,270.000000,943.000000,260.000000,5.094700,152700.000000 +-121.400000,38.610000,37.000000,1994.000000,347.000000,782.000000,355.000000,4.148800,136400.000000 +-118.030000,33.930000,35.000000,2470.000000,416.000000,1386.000000,411.000000,5.273600,179500.000000 +-119.890000,34.440000,25.000000,2786.000000,470.000000,1669.000000,462.000000,5.518400,268300.000000 +-120.880000,38.580000,8.000000,3417.000000,604.000000,1703.000000,623.000000,4.082700,170700.000000 +-118.210000,33.790000,32.000000,2020.000000,613.000000,2557.000000,562.000000,2.139700,145300.000000 +-121.460000,38.570000,52.000000,1625.000000,419.000000,614.000000,383.000000,2.054900,156700.000000 +-122.020000,37.310000,34.000000,2629.000000,433.000000,1301.000000,431.000000,6.083000,341400.000000 +-118.500000,34.160000,34.000000,3547.000000,523.000000,1187.000000,500.000000,7.139000,424000.000000 +-121.320000,38.660000,26.000000,1149.000000,193.000000,500.000000,194.000000,5.078000,163400.000000 +-118.090000,33.890000,42.000000,991.000000,215.000000,717.000000,219.000000,4.092600,164400.000000 +-118.390000,33.820000,30.000000,3433.000000,918.000000,1526.000000,828.000000,4.581700,500001.000000 +-118.000000,33.960000,37.000000,2414.000000,323.000000,878.000000,305.000000,9.154100,453800.000000 +-117.260000,33.190000,4.000000,2342.000000,595.000000,1518.000000,545.000000,2.946900,216100.000000 +-122.470000,37.870000,36.000000,4471.000000,618.000000,1315.000000,582.000000,11.570600,500001.000000 +-117.060000,32.710000,25.000000,2681.000000,596.000000,1947.000000,553.000000,2.896400,104300.000000 +-117.440000,33.930000,33.000000,1371.000000,236.000000,715.000000,227.000000,4.375000,129900.000000 +-118.120000,33.990000,26.000000,2296.000000,534.000000,1777.000000,507.000000,2.539500,191000.000000 +-118.150000,34.180000,46.000000,2230.000000,488.000000,1985.000000,456.000000,2.232800,142100.000000 +-120.430000,34.690000,33.000000,2054.000000,373.000000,1067.000000,358.000000,3.602300,128300.000000 +-120.840000,37.530000,14.000000,3643.000000,706.000000,2070.000000,697.000000,3.152300,141800.000000 +-122.070000,37.130000,26.000000,1127.000000,199.000000,543.000000,199.000000,4.979200,240000.000000 +-118.410000,33.960000,32.000000,1044.000000,219.000000,567.000000,222.000000,4.147100,284400.000000 +-121.510000,38.790000,29.000000,1716.000000,323.000000,850.000000,282.000000,2.932400,137500.000000 +-117.330000,33.190000,15.000000,3672.000000,845.000000,1827.000000,796.000000,2.971600,173600.000000 +-116.540000,33.870000,16.000000,3648.000000,1035.000000,1687.000000,581.000000,1.916700,70400.000000 +-118.500000,34.200000,18.000000,4249.000000,933.000000,2047.000000,909.000000,4.130400,229100.000000 +-118.300000,33.750000,23.000000,1957.000000,517.000000,1454.000000,526.000000,3.505600,203100.000000 +-117.990000,33.730000,24.000000,2104.000000,421.000000,1181.000000,414.000000,3.836500,250900.000000 +-118.110000,33.890000,34.000000,2508.000000,594.000000,1549.000000,545.000000,3.206900,236500.000000 +-122.730000,38.430000,29.000000,2677.000000,691.000000,1880.000000,664.000000,2.186400,143200.000000 +-119.640000,36.340000,32.000000,2958.000000,670.000000,1504.000000,627.000000,1.860600,56700.000000 +-116.870000,34.240000,15.000000,4419.000000,822.000000,622.000000,267.000000,3.968800,182800.000000 +-118.220000,33.980000,34.000000,2283.000000,809.000000,3032.000000,832.000000,2.438700,175000.000000 +-122.340000,37.970000,19.000000,392.000000,109.000000,287.000000,81.000000,6.042600,110000.000000 +-118.080000,33.820000,26.000000,4259.000000,588.000000,1644.000000,581.000000,6.251900,345700.000000 +-117.700000,33.480000,6.000000,16590.000000,2696.000000,6223.000000,2357.000000,6.308800,340300.000000 +-118.220000,33.880000,37.000000,1149.000000,280.000000,1016.000000,250.000000,2.125000,101900.000000 +-120.970000,38.910000,7.000000,4341.000000,716.000000,1978.000000,682.000000,4.831100,172200.000000 +-122.250000,37.800000,29.000000,2468.000000,864.000000,1335.000000,773.000000,1.392900,193800.000000 +-118.410000,33.880000,40.000000,925.000000,254.000000,371.000000,227.000000,5.253300,500001.000000 +-117.040000,32.690000,27.000000,1790.000000,356.000000,1286.000000,347.000000,3.543700,115800.000000 +-122.410000,38.160000,37.000000,1549.000000,301.000000,863.000000,275.000000,2.745700,254700.000000 +-120.250000,37.930000,13.000000,493.000000,76.000000,196.000000,68.000000,3.375000,134100.000000 +-121.980000,38.390000,3.000000,9488.000000,1417.000000,4095.000000,1335.000000,5.178100,191900.000000 +-122.470000,37.720000,47.000000,1176.000000,286.000000,564.000000,258.000000,3.205900,350000.000000 +-118.180000,34.130000,39.000000,2902.000000,460.000000,1007.000000,420.000000,6.195300,363000.000000 +-118.090000,33.990000,35.000000,2787.000000,639.000000,1923.000000,614.000000,3.575700,177900.000000 +-121.940000,37.750000,16.000000,5121.000000,735.000000,2464.000000,761.000000,6.620400,296100.000000 +-117.070000,32.740000,38.000000,1901.000000,392.000000,1099.000000,406.000000,2.766100,113900.000000 +-118.140000,34.040000,40.000000,1966.000000,391.000000,1120.000000,362.000000,3.710900,198800.000000 +-122.410000,37.810000,25.000000,1178.000000,545.000000,592.000000,441.000000,3.672800,500001.000000 +-117.710000,33.630000,16.000000,1641.000000,354.000000,945.000000,318.000000,3.426100,219700.000000 +-119.640000,34.430000,34.000000,3045.000000,570.000000,1002.000000,488.000000,5.623000,500001.000000 +-118.100000,33.980000,33.000000,1927.000000,482.000000,1623.000000,479.000000,3.526800,152000.000000 +-122.040000,37.390000,5.000000,8745.000000,2211.000000,3959.000000,2019.000000,4.768500,280100.000000 +-122.030000,37.180000,10.000000,212.000000,38.000000,78.000000,21.000000,6.062200,390000.000000 +-122.300000,37.560000,36.000000,1379.000000,228.000000,750.000000,227.000000,5.538100,282000.000000 +-117.360000,33.920000,7.000000,9376.000000,1181.000000,3570.000000,1107.000000,8.532600,315200.000000 +-121.380000,37.880000,44.000000,1158.000000,226.000000,1094.000000,224.000000,2.684200,156300.000000 +-119.980000,38.930000,28.000000,1194.000000,272.000000,494.000000,203.000000,2.328100,85800.000000 +-117.160000,32.710000,52.000000,845.000000,451.000000,1230.000000,375.000000,1.091800,22500.000000 +-122.360000,37.930000,17.000000,1258.000000,254.000000,885.000000,229.000000,3.050000,121600.000000 +-118.230000,34.170000,37.000000,4524.000000,1005.000000,2099.000000,937.000000,3.578100,366700.000000 +-118.470000,34.100000,32.000000,8041.000000,1141.000000,2768.000000,1106.000000,11.197800,500001.000000 +-124.140000,40.800000,32.000000,1373.000000,312.000000,872.000000,306.000000,2.500000,72600.000000 +-117.800000,33.550000,35.000000,2067.000000,428.000000,724.000000,377.000000,5.837100,500001.000000 +-118.020000,34.120000,38.000000,1778.000000,288.000000,870.000000,281.000000,6.578400,408500.000000 +-122.740000,38.480000,12.000000,4174.000000,670.000000,1882.000000,647.000000,4.551000,178300.000000 +-118.340000,33.830000,34.000000,1761.000000,329.000000,965.000000,329.000000,5.399000,358500.000000 +-120.680000,35.290000,37.000000,1354.000000,293.000000,753.000000,290.000000,3.250000,225000.000000 +-122.450000,37.640000,19.000000,6326.000000,1025.000000,3444.000000,984.000000,6.249800,353300.000000 +-122.040000,37.620000,35.000000,1032.000000,173.000000,453.000000,176.000000,6.396000,208500.000000 +-122.790000,38.540000,5.000000,3986.000000,737.000000,1887.000000,687.000000,3.776800,213800.000000 +-117.220000,32.860000,4.000000,16289.000000,4585.000000,7604.000000,4176.000000,3.628700,280800.000000 +-120.080000,39.610000,32.000000,1404.000000,247.000000,544.000000,201.000000,2.777800,72900.000000 +-118.360000,34.150000,41.000000,3545.000000,698.000000,1221.000000,651.000000,4.300000,500001.000000 +-121.360000,38.560000,17.000000,6225.000000,938.000000,3064.000000,947.000000,5.288100,138000.000000 +-122.320000,41.310000,45.000000,1393.000000,294.000000,521.000000,249.000000,1.191500,71900.000000 +-121.590000,39.750000,20.000000,908.000000,206.000000,481.000000,211.000000,2.200000,80800.000000 +-117.300000,34.150000,45.000000,942.000000,166.000000,401.000000,174.000000,3.859400,90800.000000 +-117.710000,33.650000,16.000000,3774.000000,456.000000,1587.000000,430.000000,8.608800,307400.000000 +-118.310000,34.260000,37.000000,1444.000000,246.000000,624.000000,239.000000,5.760000,239400.000000 +-122.040000,36.980000,51.000000,1076.000000,206.000000,495.000000,201.000000,2.928600,258300.000000 +-118.260000,34.240000,35.000000,1535.000000,283.000000,816.000000,287.000000,6.187300,312100.000000 +-118.280000,33.960000,39.000000,882.000000,221.000000,697.000000,189.000000,1.847200,99100.000000 +-123.500000,39.670000,22.000000,2124.000000,450.000000,1122.000000,446.000000,2.179300,71500.000000 +-117.190000,33.140000,12.000000,3652.000000,923.000000,1677.000000,728.000000,2.326700,92000.000000 +-121.120000,38.860000,17.000000,3949.000000,717.000000,1683.000000,686.000000,3.380200,216500.000000 +-118.410000,34.210000,35.000000,2215.000000,459.000000,1594.000000,446.000000,4.016700,193200.000000 +-116.540000,33.820000,12.000000,9482.000000,2501.000000,2725.000000,1300.000000,1.559500,115600.000000 +-121.610000,39.760000,31.000000,2431.000000,512.000000,1026.000000,427.000000,2.542800,85000.000000 +-121.990000,37.920000,14.000000,1780.000000,224.000000,764.000000,226.000000,9.024300,427700.000000 +-122.060000,37.540000,20.000000,6483.000000,1068.000000,3526.000000,1060.000000,5.083800,248200.000000 +-122.080000,37.720000,32.000000,2476.000000,368.000000,1048.000000,367.000000,5.619400,274700.000000 +-118.930000,36.100000,19.000000,2988.000000,681.000000,1654.000000,576.000000,2.379200,90000.000000 +-122.780000,38.970000,11.000000,5175.000000,971.000000,2144.000000,792.000000,3.046600,97300.000000 +-121.220000,37.970000,37.000000,1514.000000,337.000000,1121.000000,337.000000,2.401000,58400.000000 +-121.470000,38.610000,35.000000,1372.000000,360.000000,850.000000,328.000000,1.633100,67500.000000 +-122.310000,37.540000,49.000000,1340.000000,281.000000,660.000000,284.000000,4.163000,393800.000000 +-122.000000,37.300000,29.000000,3429.000000,524.000000,1518.000000,520.000000,7.218000,400700.000000 +-122.410000,37.800000,52.000000,812.000000,252.000000,629.000000,247.000000,2.587500,500001.000000 +-118.290000,34.050000,34.000000,1102.000000,448.000000,1325.000000,439.000000,1.597200,168800.000000 +-118.610000,34.150000,32.000000,4491.000000,815.000000,1696.000000,749.000000,4.910200,319100.000000 +-116.480000,33.840000,5.000000,5480.000000,1371.000000,1050.000000,485.000000,1.720400,137500.000000 +-118.260000,33.780000,27.000000,1672.000000,491.000000,1723.000000,462.000000,2.045800,174500.000000 +-117.340000,34.510000,6.000000,5667.000000,1385.000000,2447.000000,1199.000000,2.361700,103100.000000 +-122.460000,37.670000,16.000000,3372.000000,1101.000000,2049.000000,1021.000000,4.130300,146500.000000 +-118.350000,34.110000,33.000000,7478.000000,1678.000000,2701.000000,1500.000000,4.171700,500001.000000 +-117.300000,34.100000,44.000000,589.000000,130.000000,504.000000,137.000000,1.775000,63400.000000 +-118.440000,34.150000,44.000000,1778.000000,251.000000,641.000000,251.000000,10.054900,500001.000000 +-118.630000,34.180000,32.000000,1646.000000,242.000000,697.000000,233.000000,6.668900,433000.000000 +-117.950000,33.760000,24.000000,3956.000000,812.000000,3196.000000,795.000000,4.351200,191400.000000 +-122.250000,37.450000,34.000000,2999.000000,365.000000,927.000000,369.000000,10.281100,500001.000000 +-117.590000,33.650000,4.000000,1793.000000,390.000000,897.000000,386.000000,4.246300,182800.000000 +-114.490000,33.970000,17.000000,2809.000000,635.000000,83.000000,45.000000,1.615400,87500.000000 +-118.510000,34.200000,34.000000,2871.000000,581.000000,1350.000000,535.000000,3.704900,227500.000000 +-122.030000,38.010000,27.000000,3228.000000,562.000000,1666.000000,588.000000,4.570700,175900.000000 +-118.430000,33.990000,45.000000,2092.000000,451.000000,1190.000000,429.000000,3.802100,323000.000000 +-122.510000,37.760000,43.000000,2345.000000,624.000000,1439.000000,614.000000,2.844800,268900.000000 +-119.550000,36.690000,21.000000,1551.000000,423.000000,1519.000000,406.000000,1.713200,55900.000000 +-122.240000,38.150000,10.000000,6817.000000,1188.000000,4163.000000,1135.000000,4.452900,144100.000000 +-117.870000,34.020000,16.000000,3552.000000,575.000000,2120.000000,573.000000,6.433300,271500.000000 +-122.130000,37.700000,21.000000,4124.000000,1054.000000,2162.000000,998.000000,2.632100,223100.000000 +-121.330000,38.600000,25.000000,4260.000000,607.000000,1635.000000,640.000000,6.281700,288200.000000 +-121.910000,37.470000,13.000000,5377.000000,744.000000,2759.000000,760.000000,6.868000,337300.000000 +-118.530000,34.040000,45.000000,1711.000000,264.000000,735.000000,261.000000,9.107800,500001.000000 +-121.330000,38.000000,32.000000,4474.000000,929.000000,2177.000000,884.000000,3.288900,98900.000000 +-117.850000,34.060000,24.000000,3128.000000,497.000000,1406.000000,472.000000,7.528600,462700.000000 +-118.430000,35.120000,8.000000,1968.000000,376.000000,930.000000,360.000000,3.263200,99800.000000 +-118.070000,33.970000,36.000000,1265.000000,273.000000,1052.000000,253.000000,4.892900,156200.000000 +-117.160000,32.780000,34.000000,2515.000000,488.000000,1594.000000,515.000000,3.738100,165000.000000 +-116.290000,34.180000,15.000000,4203.000000,966.000000,1756.000000,695.000000,2.182000,60800.000000 +-120.660000,35.290000,16.000000,2272.000000,629.000000,1689.000000,649.000000,1.703100,195000.000000 +-119.790000,36.770000,30.000000,1610.000000,410.000000,1000.000000,397.000000,2.035700,60200.000000 +-122.140000,37.750000,33.000000,1334.000000,200.000000,579.000000,202.000000,6.832300,255900.000000 +-122.320000,37.970000,33.000000,1595.000000,292.000000,991.000000,300.000000,4.693700,134100.000000 +-119.800000,36.830000,17.000000,1560.000000,261.000000,709.000000,258.000000,4.331500,95800.000000 +-117.330000,33.160000,29.000000,3559.000000,552.000000,1533.000000,545.000000,4.058500,245500.000000 +-121.860000,37.230000,24.000000,4337.000000,670.000000,1936.000000,652.000000,5.890400,271400.000000 +-122.240000,37.810000,52.000000,2093.000000,550.000000,918.000000,483.000000,2.747700,243800.000000 +-120.850000,37.770000,10.000000,423.000000,110.000000,295.000000,94.000000,1.358300,85200.000000 +-116.950000,33.790000,20.000000,2399.000000,546.000000,1726.000000,542.000000,1.884500,77700.000000 +-117.220000,33.220000,16.000000,2134.000000,643.000000,1555.000000,560.000000,1.721700,175000.000000 +-122.230000,40.170000,21.000000,1401.000000,331.000000,651.000000,299.000000,2.225000,64700.000000 +-118.450000,34.030000,41.000000,2083.000000,528.000000,993.000000,481.000000,4.023100,353900.000000 +-118.990000,35.270000,32.000000,444.000000,102.000000,242.000000,87.000000,1.152800,150000.000000 +-117.580000,33.870000,34.000000,1511.000000,272.000000,773.000000,265.000000,3.531300,142100.000000 +-118.650000,36.570000,20.000000,1431.000000,416.000000,570.000000,225.000000,1.482100,143300.000000 +-121.400000,38.660000,50.000000,880.000000,150.000000,1148.000000,148.000000,2.506200,112500.000000 +-119.460000,35.860000,22.000000,1750.000000,374.000000,1113.000000,338.000000,1.505000,42700.000000 +-118.220000,33.980000,32.000000,2643.000000,737.000000,2784.000000,711.000000,2.535200,184400.000000 +-118.380000,33.820000,35.000000,3053.000000,623.000000,1311.000000,589.000000,5.158900,439200.000000 +-117.770000,33.690000,16.000000,1666.000000,341.000000,479.000000,336.000000,2.140600,55000.000000 +-118.460000,34.180000,35.000000,1819.000000,465.000000,1336.000000,419.000000,3.458300,253200.000000 +-122.420000,37.790000,6.000000,670.000000,301.000000,655.000000,284.000000,3.442300,117500.000000 +-118.310000,33.770000,20.000000,5776.000000,956.000000,2757.000000,936.000000,6.644700,416800.000000 +-121.670000,37.130000,19.000000,3269.000000,483.000000,1383.000000,452.000000,5.620500,300800.000000 +-121.330000,38.570000,17.000000,1621.000000,350.000000,706.000000,338.000000,2.368400,150000.000000 +-120.830000,37.520000,6.000000,1488.000000,252.000000,773.000000,259.000000,4.185900,150000.000000 +-118.120000,33.990000,27.000000,2316.000000,559.000000,2012.000000,544.000000,2.815500,176800.000000 +-118.110000,34.070000,39.000000,1270.000000,299.000000,1073.000000,278.000000,3.308800,186600.000000 +-122.670000,38.240000,29.000000,2644.000000,464.000000,1372.000000,450.000000,5.054400,261800.000000 +-117.290000,34.090000,24.000000,1451.000000,387.000000,1178.000000,330.000000,1.180600,68300.000000 +-121.800000,37.190000,45.000000,1797.000000,303.000000,870.000000,281.000000,4.541700,434500.000000 +-120.300000,37.970000,17.000000,3243.000000,619.000000,1408.000000,566.000000,2.474000,120100.000000 +-120.450000,34.650000,21.000000,1182.000000,243.000000,733.000000,251.000000,3.144200,131600.000000 +-119.290000,34.230000,22.000000,2486.000000,608.000000,709.000000,523.000000,2.901800,275000.000000 +-118.340000,34.020000,49.000000,1609.000000,371.000000,896.000000,389.000000,2.515600,136600.000000 +-117.940000,33.800000,23.000000,2757.000000,734.000000,1811.000000,707.000000,2.800000,214300.000000 +-116.850000,34.260000,19.000000,5395.000000,1220.000000,981.000000,366.000000,2.609400,92400.000000 +-117.890000,33.760000,34.000000,1050.000000,210.000000,723.000000,201.000000,4.800000,192700.000000 +-118.290000,34.030000,27.000000,1084.000000,287.000000,1085.000000,279.000000,2.135000,119600.000000 +-118.120000,34.060000,35.000000,1729.000000,438.000000,1308.000000,412.000000,2.532100,197200.000000 +-121.410000,38.600000,16.000000,5407.000000,1467.000000,2523.000000,1265.000000,2.047100,104200.000000 +-120.620000,35.130000,26.000000,3971.000000,803.000000,1792.000000,723.000000,2.712800,209900.000000 +-118.180000,33.800000,42.000000,2301.000000,621.000000,2114.000000,561.000000,2.057900,132700.000000 +-117.510000,34.160000,2.000000,718.000000,98.000000,119.000000,50.000000,4.100000,315000.000000 +-118.160000,34.030000,40.000000,2201.000000,636.000000,2682.000000,595.000000,2.359000,143400.000000 +-118.170000,34.110000,39.000000,1758.000000,436.000000,892.000000,447.000000,3.640600,278900.000000 +-117.690000,33.650000,15.000000,5394.000000,748.000000,2383.000000,706.000000,7.561900,302000.000000 +-122.200000,37.770000,41.000000,1547.000000,415.000000,1024.000000,341.000000,2.056200,102000.000000 +-121.330000,37.960000,42.000000,1619.000000,340.000000,906.000000,339.000000,2.548800,80300.000000 +-121.840000,38.130000,33.000000,596.000000,105.000000,212.000000,94.000000,4.281300,81300.000000 +-117.760000,34.050000,36.000000,2910.000000,819.000000,3055.000000,782.000000,1.902900,98000.000000 +-122.430000,37.790000,52.000000,3219.000000,969.000000,1152.000000,830.000000,4.204200,500001.000000 +-122.320000,37.570000,33.000000,3384.000000,819.000000,2626.000000,793.000000,3.228500,234800.000000 +-118.160000,34.070000,42.000000,3836.000000,777.000000,2118.000000,754.000000,3.636400,254600.000000 +-124.090000,40.950000,18.000000,2250.000000,484.000000,1248.000000,472.000000,2.589300,99600.000000 +-121.990000,38.350000,45.000000,1778.000000,339.000000,839.000000,319.000000,2.465900,102900.000000 +-122.720000,38.420000,26.000000,3604.000000,734.000000,2605.000000,704.000000,3.096900,143800.000000 +-122.110000,37.660000,29.000000,2544.000000,643.000000,2332.000000,603.000000,3.209100,150000.000000 +-121.840000,36.620000,26.000000,32.000000,8.000000,27.000000,10.000000,2.225000,150000.000000 +-118.180000,34.120000,29.000000,2640.000000,737.000000,1795.000000,655.000000,2.369000,173400.000000 +-122.450000,38.270000,25.000000,5024.000000,881.000000,1994.000000,838.000000,4.223700,262300.000000 +-117.910000,33.650000,17.000000,1328.000000,377.000000,762.000000,344.000000,2.222200,276800.000000 +-116.470000,33.770000,26.000000,4300.000000,767.000000,1557.000000,669.000000,4.410700,122500.000000 +-122.410000,37.730000,42.000000,2604.000000,573.000000,1703.000000,507.000000,3.423100,230200.000000 +-119.780000,36.800000,34.000000,2200.000000,493.000000,1243.000000,431.000000,1.851400,66500.000000 +-119.710000,34.360000,34.000000,1706.000000,276.000000,628.000000,243.000000,4.184200,364000.000000 +-118.360000,34.030000,40.000000,2323.000000,661.000000,1847.000000,614.000000,1.831600,113500.000000 +-121.890000,37.990000,4.000000,2171.000000,597.000000,928.000000,461.000000,4.101600,170500.000000 +-121.980000,37.330000,25.000000,3223.000000,612.000000,1529.000000,602.000000,5.121000,287600.000000 +-118.470000,34.250000,34.000000,1732.000000,399.000000,1120.000000,401.000000,4.149200,195700.000000 +-117.260000,32.990000,16.000000,2127.000000,512.000000,1532.000000,499.000000,2.734800,231300.000000 +-118.090000,34.070000,45.000000,726.000000,146.000000,568.000000,160.000000,3.034700,183200.000000 +-118.450000,37.250000,20.000000,1468.000000,283.000000,721.000000,270.000000,3.081700,118800.000000 +-117.780000,33.540000,29.000000,1421.000000,462.000000,520.000000,339.000000,2.296900,450000.000000 +-117.460000,33.900000,10.000000,9738.000000,2130.000000,4936.000000,1840.000000,3.318700,144800.000000 +-121.850000,39.740000,39.000000,1139.000000,265.000000,623.000000,264.000000,2.283300,85800.000000 +-117.290000,34.110000,48.000000,1498.000000,448.000000,1586.000000,455.000000,1.168700,70800.000000 +-121.200000,37.790000,36.000000,866.000000,160.000000,502.000000,149.000000,2.479800,101500.000000 +-118.430000,33.960000,20.000000,1901.000000,270.000000,704.000000,254.000000,8.781900,500001.000000 +-122.110000,37.400000,15.000000,255.000000,63.000000,138.000000,74.000000,4.659100,175000.000000 +-119.060000,36.080000,19.000000,2554.000000,443.000000,1301.000000,419.000000,4.185600,72100.000000 +-118.370000,33.880000,20.000000,2439.000000,474.000000,1219.000000,497.000000,5.961900,335900.000000 +-120.790000,38.430000,40.000000,1391.000000,246.000000,546.000000,214.000000,3.910700,129800.000000 +-122.200000,39.930000,9.000000,1296.000000,287.000000,768.000000,260.000000,1.919100,54400.000000 +-122.230000,37.760000,52.000000,1049.000000,185.000000,374.000000,176.000000,4.145800,248500.000000 +-121.990000,38.530000,6.000000,4598.000000,834.000000,2561.000000,812.000000,3.418600,127300.000000 +-118.460000,34.020000,39.000000,3599.000000,776.000000,1569.000000,763.000000,5.257100,405400.000000 +-115.600000,33.040000,31.000000,314.000000,61.000000,152.000000,56.000000,3.347200,91700.000000 +-117.220000,32.780000,22.000000,2020.000000,466.000000,1010.000000,429.000000,3.452700,175000.000000 +-118.630000,34.220000,18.000000,1376.000000,225.000000,670.000000,205.000000,6.514600,277600.000000 +-124.140000,40.720000,18.000000,2581.000000,499.000000,1375.000000,503.000000,2.844600,100500.000000 +-116.430000,33.780000,17.000000,4293.000000,712.000000,1091.000000,464.000000,6.143700,232100.000000 +-117.890000,33.730000,32.000000,728.000000,134.000000,837.000000,135.000000,4.076900,163900.000000 +-117.700000,33.530000,5.000000,6698.000000,1254.000000,2834.000000,1139.000000,5.908800,288500.000000 +-122.470000,37.850000,19.000000,1926.000000,593.000000,881.000000,546.000000,2.914500,140400.000000 +-120.630000,38.730000,11.000000,4577.000000,836.000000,1944.000000,700.000000,4.067500,140200.000000 +-118.590000,34.200000,18.000000,847.000000,185.000000,733.000000,178.000000,5.214900,201900.000000 +-118.360000,33.930000,40.000000,1625.000000,500.000000,2036.000000,476.000000,2.629800,156500.000000 +-118.410000,33.850000,16.000000,6123.000000,1989.000000,2853.000000,1789.000000,4.425000,336400.000000 +-117.190000,32.770000,16.000000,3273.000000,670.000000,1305.000000,671.000000,4.136800,151000.000000 +-117.780000,33.860000,16.000000,3471.000000,708.000000,1769.000000,691.000000,4.106400,246100.000000 +-121.860000,39.740000,13.000000,3494.000000,843.000000,1571.000000,784.000000,1.101900,120200.000000 +-119.040000,35.310000,11.000000,2161.000000,371.000000,1267.000000,388.000000,4.195700,92700.000000 +-118.260000,34.020000,40.000000,1259.000000,362.000000,1499.000000,327.000000,1.838200,126400.000000 +-117.250000,34.490000,4.000000,2372.000000,361.000000,1017.000000,322.000000,5.111200,170900.000000 +-120.040000,39.270000,24.000000,2237.000000,491.000000,264.000000,95.000000,4.136400,154500.000000 +-121.420000,38.540000,29.000000,2358.000000,493.000000,1071.000000,470.000000,2.925000,94300.000000 +-118.150000,34.200000,46.000000,1505.000000,261.000000,857.000000,269.000000,4.500000,184200.000000 +-118.080000,33.880000,26.000000,1507.000000,270.000000,931.000000,275.000000,5.164500,244900.000000 +-122.430000,37.800000,52.000000,2696.000000,572.000000,925.000000,552.000000,5.036500,500000.000000 +-115.490000,32.670000,24.000000,1266.000000,275.000000,1083.000000,298.000000,1.482800,73100.000000 +-120.980000,38.340000,27.000000,3471.000000,653.000000,1793.000000,600.000000,3.550800,99100.000000 +-116.140000,34.450000,12.000000,8796.000000,1721.000000,11139.000000,1680.000000,2.261200,137500.000000 +-117.110000,32.730000,27.000000,3160.000000,627.000000,1628.000000,612.000000,3.886400,132600.000000 +-118.470000,34.000000,38.000000,1235.000000,390.000000,891.000000,376.000000,2.714300,287500.000000 +-121.420000,37.740000,19.000000,1393.000000,367.000000,915.000000,355.000000,1.195700,103100.000000 +-122.250000,37.820000,52.000000,2474.000000,403.000000,1104.000000,398.000000,5.883000,340700.000000 +-118.050000,33.720000,22.000000,5416.000000,1271.000000,2260.000000,1184.000000,3.803800,174500.000000 +-122.020000,36.970000,44.000000,594.000000,169.000000,325.000000,139.000000,1.155200,250000.000000 +-115.570000,32.800000,33.000000,1192.000000,213.000000,1066.000000,211.000000,4.571400,68600.000000 +-121.290000,37.800000,6.000000,110.000000,26.000000,69.000000,24.000000,3.729200,475000.000000 +-122.080000,37.880000,26.000000,2947.000000,647.000000,825.000000,626.000000,2.933000,85000.000000 +-121.770000,37.650000,16.000000,4290.000000,554.000000,1952.000000,576.000000,7.358800,327500.000000 +-119.810000,36.720000,46.000000,1414.000000,268.000000,902.000000,243.000000,1.583300,56700.000000 +-118.350000,33.970000,26.000000,1725.000000,431.000000,1130.000000,404.000000,3.270800,128100.000000 +-118.200000,34.190000,38.000000,2176.000000,266.000000,798.000000,243.000000,15.000100,500001.000000 +-118.790000,34.140000,7.000000,3003.000000,504.000000,1143.000000,466.000000,5.854800,500001.000000 +-118.120000,34.160000,30.000000,1762.000000,416.000000,940.000000,398.000000,2.863100,188600.000000 +-118.220000,33.960000,36.000000,1542.000000,458.000000,1711.000000,468.000000,1.902800,164200.000000 +-121.300000,37.990000,38.000000,2375.000000,494.000000,1167.000000,471.000000,2.667300,87500.000000 +-121.840000,36.610000,21.000000,2876.000000,802.000000,2487.000000,795.000000,2.200700,112800.000000 +-117.900000,34.070000,36.000000,1009.000000,164.000000,466.000000,149.000000,5.851900,249400.000000 +-120.400000,34.860000,11.000000,1633.000000,348.000000,504.000000,327.000000,2.050800,275000.000000 +-117.950000,33.800000,32.000000,1219.000000,192.000000,634.000000,197.000000,5.237000,215700.000000 +-118.300000,33.940000,36.000000,2041.000000,531.000000,1390.000000,464.000000,2.011400,99300.000000 +-121.600000,37.900000,5.000000,14684.000000,2252.000000,4276.000000,1722.000000,6.905100,340900.000000 +-122.410000,37.590000,34.000000,3931.000000,622.000000,1717.000000,621.000000,6.294600,450000.000000 +-118.450000,34.050000,28.000000,801.000000,399.000000,936.000000,406.000000,2.187500,181300.000000 +-118.180000,33.860000,43.000000,2752.000000,645.000000,1674.000000,614.000000,3.671900,161300.000000 +-121.780000,40.120000,14.000000,388.000000,108.000000,35.000000,17.000000,6.135900,106300.000000 +-118.210000,34.040000,47.000000,1325.000000,393.000000,1557.000000,352.000000,2.800000,148400.000000 +-118.380000,34.090000,28.000000,4001.000000,1352.000000,1799.000000,1220.000000,2.578400,272900.000000 +-117.180000,32.840000,32.000000,1351.000000,237.000000,823.000000,269.000000,4.276800,167800.000000 +-117.300000,32.850000,28.000000,2334.000000,694.000000,770.000000,552.000000,3.132400,500001.000000 +-119.020000,35.420000,42.000000,2271.000000,458.000000,1124.000000,447.000000,2.758300,64900.000000 +-124.010000,40.970000,21.000000,1513.000000,319.000000,943.000000,301.000000,3.538000,102700.000000 +-118.100000,34.130000,44.000000,1745.000000,237.000000,693.000000,248.000000,9.791200,500001.000000 +-119.810000,36.770000,49.000000,1749.000000,314.000000,705.000000,300.000000,3.150000,72200.000000 +-122.550000,38.000000,18.000000,3119.000000,803.000000,1395.000000,722.000000,3.926500,301100.000000 +-117.620000,34.080000,30.000000,1372.000000,235.000000,1047.000000,225.000000,3.159700,116300.000000 +-121.290000,37.960000,52.000000,888.000000,324.000000,630.000000,258.000000,1.241100,112500.000000 +-119.090000,34.240000,17.000000,10214.000000,1589.000000,3409.000000,1327.000000,5.380600,452100.000000 +-117.200000,32.770000,30.000000,156.000000,45.000000,77.000000,40.000000,3.267900,137500.000000 +-122.270000,37.450000,41.000000,830.000000,136.000000,353.000000,153.000000,6.382400,500001.000000 +-117.310000,34.410000,14.000000,3019.000000,643.000000,1639.000000,582.000000,1.528800,103400.000000 +-118.280000,33.830000,18.000000,5923.000000,1409.000000,3887.000000,1322.000000,3.471200,194400.000000 +-118.270000,34.050000,26.000000,1164.000000,674.000000,1685.000000,541.000000,1.572700,225000.000000 +-118.170000,34.090000,45.000000,1327.000000,271.000000,1069.000000,284.000000,3.397700,153800.000000 +-122.540000,37.740000,42.000000,2006.000000,415.000000,1230.000000,435.000000,4.178600,271100.000000 +-118.280000,33.770000,47.000000,307.000000,69.000000,374.000000,65.000000,2.906300,146900.000000 +-118.040000,33.720000,24.000000,7141.000000,1330.000000,3418.000000,1268.000000,4.664900,237800.000000 +-117.390000,33.920000,25.000000,2886.000000,583.000000,2327.000000,577.000000,2.385100,113700.000000 +-119.010000,35.370000,35.000000,120.000000,35.000000,477.000000,41.000000,1.912500,47500.000000 +-122.410000,37.740000,34.000000,1403.000000,262.000000,839.000000,255.000000,4.703100,255200.000000 +-118.290000,33.910000,41.000000,2475.000000,532.000000,1416.000000,470.000000,3.837200,156400.000000 +-117.250000,33.220000,19.000000,2167.000000,443.000000,1654.000000,435.000000,3.500000,135800.000000 +-117.650000,33.460000,19.000000,7034.000000,1139.000000,2824.000000,1068.000000,6.087300,277300.000000 +-121.980000,37.800000,17.000000,3354.000000,422.000000,1457.000000,425.000000,7.647300,345800.000000 +-118.050000,33.840000,21.000000,4890.000000,653.000000,2295.000000,654.000000,6.983000,329700.000000 +-122.030000,37.270000,25.000000,4460.000000,553.000000,1608.000000,561.000000,10.795800,500001.000000 +-120.520000,35.240000,5.000000,4413.000000,804.000000,2003.000000,725.000000,5.026700,253300.000000 +-117.950000,34.140000,33.000000,1943.000000,440.000000,1526.000000,353.000000,3.038000,137500.000000 +-118.160000,34.690000,35.000000,3114.000000,583.000000,1974.000000,545.000000,3.902800,126800.000000 +-121.480000,39.100000,19.000000,2043.000000,421.000000,1018.000000,390.000000,2.595200,92400.000000 +-117.530000,33.940000,21.000000,5675.000000,935.000000,2834.000000,865.000000,4.226300,203200.000000 +-122.290000,37.910000,40.000000,2085.000000,329.000000,796.000000,339.000000,5.535700,273700.000000 +-121.780000,38.690000,31.000000,2547.000000,535.000000,1579.000000,509.000000,2.677400,95800.000000 +-117.970000,33.840000,34.000000,874.000000,153.000000,549.000000,153.000000,4.866700,186800.000000 +-122.260000,37.860000,52.000000,3774.000000,744.000000,1461.000000,679.000000,2.940500,289500.000000 +-117.960000,33.690000,20.000000,3123.000000,441.000000,1319.000000,432.000000,6.091000,290400.000000 +-118.390000,34.190000,36.000000,904.000000,191.000000,627.000000,191.000000,2.416700,192900.000000 +-122.480000,37.510000,22.000000,1564.000000,278.000000,761.000000,270.000000,4.757800,318500.000000 +-118.600000,34.210000,19.000000,2581.000000,857.000000,2004.000000,784.000000,2.615900,182300.000000 +-122.350000,40.560000,12.000000,3900.000000,863.000000,2145.000000,864.000000,1.988100,85200.000000 +-118.240000,34.030000,52.000000,142.000000,47.000000,137.000000,45.000000,1.833300,312500.000000 +-117.610000,34.080000,20.000000,3550.000000,736.000000,2229.000000,681.000000,3.019900,128800.000000 +-121.030000,37.670000,24.000000,2162.000000,459.000000,1468.000000,441.000000,3.185700,98300.000000 +-119.690000,36.810000,15.000000,2892.000000,496.000000,1634.000000,501.000000,4.493400,88000.000000 +-118.270000,34.060000,26.000000,513.000000,338.000000,1204.000000,321.000000,1.490400,275000.000000 +-118.260000,34.070000,30.000000,929.000000,238.000000,763.000000,214.000000,2.522700,187500.000000 +-120.910000,38.980000,13.000000,7689.000000,1415.000000,3264.000000,1198.000000,3.653000,146800.000000 +-117.140000,32.710000,32.000000,719.000000,251.000000,894.000000,208.000000,1.845600,103100.000000 +-117.200000,32.820000,35.000000,2772.000000,537.000000,1392.000000,521.000000,3.337000,172300.000000 +-123.800000,39.440000,52.000000,1533.000000,336.000000,754.000000,340.000000,1.921300,95000.000000 +-122.330000,37.980000,32.000000,1967.000000,348.000000,1144.000000,364.000000,4.413500,150100.000000 +-117.370000,33.970000,38.000000,1156.000000,241.000000,877.000000,200.000000,1.451400,79900.000000 +-122.040000,37.300000,26.000000,1714.000000,270.000000,778.000000,262.000000,6.075000,417000.000000 +-118.210000,33.980000,35.000000,1705.000000,562.000000,2212.000000,539.000000,2.325000,161500.000000 +-117.320000,34.110000,38.000000,1462.000000,337.000000,1208.000000,324.000000,2.260400,68100.000000 +-118.120000,34.080000,49.000000,1782.000000,374.000000,1010.000000,367.000000,3.158300,268200.000000 +-121.560000,39.690000,8.000000,2836.000000,522.000000,1163.000000,512.000000,3.130000,168300.000000 +-117.940000,33.800000,28.000000,2914.000000,489.000000,1500.000000,499.000000,4.942900,254800.000000 +-117.980000,33.850000,23.000000,2089.000000,377.000000,1085.000000,362.000000,4.765000,181500.000000 +-122.850000,38.770000,18.000000,2856.000000,513.000000,1027.000000,405.000000,4.695300,241700.000000 +-116.240000,33.760000,9.000000,1961.000000,595.000000,966.000000,275.000000,3.812500,96700.000000 +-122.320000,37.960000,25.000000,1728.000000,403.000000,934.000000,412.000000,3.375000,133700.000000 +-118.950000,35.410000,21.000000,3999.000000,727.000000,1889.000000,688.000000,3.875000,99500.000000 +-122.420000,37.670000,42.000000,2274.000000,429.000000,1255.000000,397.000000,5.120500,226300.000000 +-118.250000,33.980000,39.000000,1553.000000,461.000000,2271.000000,437.000000,1.737800,121900.000000 +-118.400000,34.220000,36.000000,2557.000000,540.000000,1556.000000,491.000000,3.659100,183800.000000 +-120.560000,38.390000,20.000000,1326.000000,307.000000,563.000000,237.000000,2.666700,86600.000000 +-121.630000,39.100000,22.000000,3585.000000,548.000000,1757.000000,577.000000,4.174000,100100.000000 +-122.200000,37.470000,44.000000,1927.000000,332.000000,846.000000,362.000000,4.208300,278200.000000 +-122.110000,37.110000,46.000000,1993.000000,404.000000,850.000000,327.000000,5.208000,206800.000000 +-118.250000,33.840000,19.000000,1731.000000,420.000000,1032.000000,364.000000,3.812500,208100.000000 +-118.350000,34.180000,46.000000,2711.000000,491.000000,1277.000000,490.000000,4.282000,224700.000000 +-118.140000,33.860000,44.000000,1436.000000,257.000000,745.000000,233.000000,4.625000,213400.000000 +-122.260000,38.280000,24.000000,2831.000000,502.000000,1462.000000,503.000000,4.500000,158300.000000 +-120.240000,37.960000,34.000000,1747.000000,395.000000,935.000000,362.000000,1.625000,79400.000000 +-121.590000,39.740000,17.000000,1646.000000,330.000000,750.000000,344.000000,2.379800,83800.000000 +-122.720000,40.170000,16.000000,396.000000,78.000000,188.000000,72.000000,1.388900,87500.000000 +-118.480000,34.310000,31.000000,1091.000000,256.000000,892.000000,238.000000,3.000000,172400.000000 +-121.100000,38.940000,42.000000,410.000000,117.000000,706.000000,112.000000,1.017900,125000.000000 +-118.100000,33.970000,35.000000,2426.000000,529.000000,2010.000000,514.000000,2.992200,163500.000000 +-120.970000,37.670000,16.000000,1499.000000,250.000000,1292.000000,271.000000,4.385100,117300.000000 +-121.910000,36.970000,19.000000,4920.000000,1092.000000,1807.000000,922.000000,3.511200,231900.000000 +-121.470000,37.580000,14.000000,1594.000000,292.000000,887.000000,287.000000,4.662500,294000.000000 +-121.930000,37.720000,26.000000,3816.000000,637.000000,1935.000000,642.000000,4.469700,221300.000000 +-117.830000,33.790000,29.000000,1454.000000,236.000000,724.000000,262.000000,4.854200,218100.000000 +-117.890000,33.730000,33.000000,1308.000000,375.000000,2175.000000,347.000000,3.082400,177400.000000 +-117.840000,34.000000,26.000000,797.000000,117.000000,383.000000,114.000000,6.875800,253800.000000 +-116.860000,34.240000,19.000000,5411.000000,1042.000000,441.000000,185.000000,3.132400,132000.000000 +-121.280000,38.740000,33.000000,4384.000000,778.000000,1775.000000,789.000000,4.050000,134700.000000 +-119.630000,36.640000,33.000000,1036.000000,181.000000,620.000000,174.000000,3.410700,110400.000000 +-121.060000,38.250000,13.000000,651.000000,102.000000,301.000000,104.000000,3.652800,200000.000000 +-122.010000,37.400000,24.000000,1297.000000,297.000000,441.000000,282.000000,3.143900,47500.000000 +-117.220000,33.310000,12.000000,2924.000000,433.000000,1193.000000,394.000000,6.247500,331300.000000 +-116.310000,33.730000,19.000000,12467.000000,2508.000000,4086.000000,1761.000000,3.284600,131900.000000 +-121.290000,38.020000,12.000000,2006.000000,426.000000,1849.000000,396.000000,2.543700,99000.000000 +-121.000000,37.640000,52.000000,530.000000,177.000000,325.000000,158.000000,1.187500,90600.000000 +-121.080000,39.210000,17.000000,3033.000000,590.000000,1319.000000,583.000000,2.481100,111800.000000 +-121.880000,37.990000,16.000000,3787.000000,515.000000,1606.000000,507.000000,5.567600,174200.000000 +-117.180000,32.740000,20.000000,1165.000000,269.000000,459.000000,244.000000,3.175000,191700.000000 +-117.200000,32.850000,22.000000,3501.000000,631.000000,1297.000000,581.000000,4.789100,295300.000000 +-117.160000,33.920000,12.000000,3236.000000,502.000000,1610.000000,502.000000,4.756800,143500.000000 +-118.350000,34.050000,44.000000,1856.000000,493.000000,1374.000000,469.000000,2.098400,158000.000000 +-119.050000,36.060000,23.000000,2344.000000,407.000000,1184.000000,406.000000,3.162500,70600.000000 +-121.150000,38.690000,52.000000,240.000000,44.000000,6675.000000,29.000000,6.135900,225000.000000 +-123.160000,39.130000,33.000000,1320.000000,303.000000,1048.000000,303.000000,1.781300,94700.000000 +-121.360000,38.590000,32.000000,3303.000000,480.000000,1185.000000,436.000000,5.050800,225700.000000 +-118.280000,33.730000,52.000000,2085.000000,588.000000,1767.000000,516.000000,2.193500,243200.000000 +-118.360000,33.890000,27.000000,2837.000000,684.000000,2141.000000,648.000000,3.132500,215000.000000 +-121.240000,38.630000,4.000000,11021.000000,1565.000000,3857.000000,1494.000000,7.258200,273200.000000 +-117.690000,33.550000,3.000000,1618.000000,266.000000,710.000000,246.000000,6.074300,274300.000000 +-118.460000,34.270000,28.000000,1865.000000,463.000000,1182.000000,440.000000,2.619300,172300.000000 +-122.280000,37.860000,52.000000,3007.000000,691.000000,1582.000000,636.000000,2.565200,157700.000000 +-118.280000,33.940000,32.000000,1381.000000,375.000000,1268.000000,354.000000,1.105100,94200.000000 +-122.180000,37.730000,42.000000,909.000000,215.000000,646.000000,198.000000,2.906300,80000.000000 +-122.870000,38.390000,34.000000,1138.000000,205.000000,541.000000,180.000000,4.514700,271400.000000 +-119.750000,34.440000,28.000000,1080.000000,298.000000,524.000000,251.000000,1.843200,327300.000000 +-117.210000,32.850000,15.000000,2593.000000,521.000000,901.000000,456.000000,4.206500,277800.000000 +-118.200000,33.820000,34.000000,2807.000000,768.000000,2217.000000,744.000000,2.428600,204800.000000 +-121.880000,37.320000,40.000000,1331.000000,374.000000,1276.000000,389.000000,2.754600,172500.000000 +-118.460000,34.140000,34.000000,5264.000000,771.000000,1738.000000,753.000000,8.811500,500001.000000 +-118.290000,34.090000,35.000000,2198.000000,998.000000,3441.000000,912.000000,2.046700,158300.000000 +-117.880000,34.110000,30.000000,3082.000000,602.000000,2008.000000,619.000000,4.141100,182700.000000 +-117.680000,33.650000,6.000000,10395.000000,1915.000000,4783.000000,1811.000000,5.928000,239900.000000 +-120.350000,39.340000,29.000000,1986.000000,474.000000,337.000000,100.000000,4.027800,95800.000000 +-118.020000,33.820000,19.000000,2485.000000,437.000000,1286.000000,431.000000,4.746600,258300.000000 +-118.350000,33.920000,24.000000,2728.000000,845.000000,2023.000000,773.000000,2.750000,239700.000000 +-122.340000,37.970000,19.000000,2237.000000,580.000000,1438.000000,551.000000,2.338200,120700.000000 +-118.330000,34.020000,46.000000,1528.000000,391.000000,933.000000,366.000000,2.197900,125700.000000 +-118.400000,33.900000,37.000000,2458.000000,400.000000,920.000000,375.000000,7.892400,500001.000000 +-117.970000,33.730000,18.000000,3698.000000,574.000000,2046.000000,614.000000,6.298400,269800.000000 +-121.320000,38.570000,15.000000,3369.000000,499.000000,1733.000000,470.000000,5.310000,127500.000000 +-117.940000,33.880000,46.000000,1747.000000,312.000000,770.000000,296.000000,5.421700,256000.000000 +-118.540000,34.150000,26.000000,10111.000000,1295.000000,3599.000000,1257.000000,10.229200,500001.000000 +-117.860000,33.830000,23.000000,2377.000000,403.000000,1101.000000,408.000000,5.343900,227100.000000 +-119.950000,36.800000,30.000000,1233.000000,214.000000,620.000000,199.000000,3.429700,112500.000000 +-121.420000,36.860000,41.000000,440.000000,106.000000,389.000000,94.000000,2.681800,225000.000000 +-117.090000,32.690000,34.000000,1469.000000,267.000000,1031.000000,267.000000,3.458300,112700.000000 +-119.200000,34.150000,27.000000,2076.000000,681.000000,1904.000000,647.000000,1.477300,160800.000000 +-117.170000,32.760000,45.000000,3149.000000,639.000000,1160.000000,661.000000,2.726600,354200.000000 +-117.900000,33.910000,36.000000,1376.000000,257.000000,687.000000,221.000000,3.540300,195400.000000 +-122.030000,37.330000,23.000000,4221.000000,671.000000,1782.000000,641.000000,7.486300,412300.000000 +-118.180000,33.900000,31.000000,2536.000000,603.000000,2625.000000,576.000000,3.090900,150900.000000 +-119.050000,35.320000,11.000000,7035.000000,1455.000000,3525.000000,1387.000000,3.482700,93600.000000 +-119.670000,34.470000,35.000000,2700.000000,422.000000,1995.000000,383.000000,4.975700,500001.000000 +-118.350000,34.170000,44.000000,2572.000000,613.000000,1280.000000,570.000000,3.558300,232000.000000 +-118.300000,33.870000,31.000000,1398.000000,261.000000,823.000000,263.000000,5.064100,234900.000000 +-118.250000,34.160000,52.000000,2477.000000,385.000000,993.000000,371.000000,4.913500,368100.000000 +-117.910000,33.820000,29.000000,1444.000000,326.000000,1038.000000,271.000000,2.384300,182900.000000 +-118.360000,33.980000,40.000000,1113.000000,234.000000,584.000000,231.000000,3.092700,316000.000000 +-121.290000,37.990000,45.000000,965.000000,198.000000,498.000000,195.000000,1.694400,75200.000000 +-122.740000,38.460000,9.000000,2268.000000,594.000000,1311.000000,585.000000,2.660700,91500.000000 +-118.290000,33.930000,31.000000,3894.000000,1017.000000,3590.000000,962.000000,2.043700,137200.000000 +-122.050000,37.310000,25.000000,4601.000000,696.000000,2003.000000,666.000000,8.072700,455500.000000 +-117.080000,32.570000,18.000000,2203.000000,544.000000,1943.000000,497.000000,2.250000,103200.000000 +-122.040000,37.970000,10.000000,974.000000,316.000000,631.000000,286.000000,2.315200,140600.000000 +-120.310000,37.110000,38.000000,1696.000000,301.000000,985.000000,278.000000,2.405400,112500.000000 +-117.270000,34.100000,9.000000,3904.000000,1042.000000,3688.000000,896.000000,1.802200,78000.000000 +-118.260000,33.950000,44.000000,1481.000000,329.000000,999.000000,315.000000,1.514700,94600.000000 +-118.110000,34.160000,52.000000,1353.000000,274.000000,852.000000,306.000000,3.458300,239900.000000 +-118.340000,33.990000,34.000000,397.000000,132.000000,250.000000,121.000000,1.675000,166700.000000 +-117.890000,33.600000,40.000000,1639.000000,352.000000,498.000000,278.000000,5.633600,500001.000000 +-119.720000,34.420000,52.000000,1759.000000,387.000000,980.000000,402.000000,4.012500,261000.000000 +-118.440000,34.180000,36.000000,2077.000000,496.000000,1206.000000,528.000000,2.232600,221000.000000 +-122.080000,37.970000,9.000000,2643.000000,439.000000,1105.000000,467.000000,6.657900,245200.000000 +-122.450000,37.760000,50.000000,2518.000000,507.000000,979.000000,516.000000,4.691200,500001.000000 +-118.220000,33.940000,41.000000,928.000000,249.000000,1108.000000,236.000000,3.432300,144600.000000 +-118.330000,34.070000,52.000000,1482.000000,171.000000,531.000000,161.000000,15.000100,500001.000000 +-117.660000,34.050000,14.000000,2644.000000,525.000000,2021.000000,511.000000,3.646700,147500.000000 +-120.940000,35.420000,18.000000,3418.000000,686.000000,970.000000,453.000000,3.773800,279400.000000 +-117.300000,34.050000,6.000000,2155.000000,544.000000,1039.000000,391.000000,1.667500,95800.000000 +-117.920000,33.640000,5.000000,949.000000,287.000000,497.000000,244.000000,2.750000,225000.000000 +-118.190000,33.990000,37.000000,2073.000000,614.000000,2544.000000,598.000000,2.905400,156300.000000 +-122.080000,37.940000,44.000000,2185.000000,357.000000,943.000000,366.000000,4.725000,232100.000000 +-117.720000,34.090000,33.000000,4979.000000,934.000000,2575.000000,874.000000,3.795800,152500.000000 +-118.190000,34.080000,35.000000,1554.000000,381.000000,1487.000000,374.000000,1.903800,139500.000000 +-122.240000,38.110000,42.000000,1743.000000,388.000000,889.000000,341.000000,2.324100,99200.000000 +-121.810000,37.230000,17.000000,2319.000000,324.000000,1076.000000,338.000000,6.466400,278300.000000 +-118.340000,34.180000,45.000000,3046.000000,633.000000,1448.000000,599.000000,3.240000,226900.000000 +-120.570000,38.200000,13.000000,4110.000000,847.000000,1796.000000,706.000000,2.641700,122300.000000 +-120.450000,34.640000,30.000000,2330.000000,422.000000,1255.000000,449.000000,3.851200,134600.000000 +-118.250000,33.950000,25.000000,764.000000,200.000000,801.000000,220.000000,1.138400,100000.000000 +-117.950000,33.900000,15.000000,3057.000000,479.000000,1679.000000,498.000000,6.842900,372600.000000 +-117.200000,33.120000,18.000000,4372.000000,736.000000,1473.000000,675.000000,5.119400,247800.000000 +-117.300000,34.530000,38.000000,1643.000000,489.000000,1196.000000,406.000000,1.227500,64100.000000 +-121.870000,37.270000,18.000000,3561.000000,560.000000,1753.000000,553.000000,5.029200,269400.000000 +-118.280000,34.030000,40.000000,2118.000000,796.000000,2195.000000,658.000000,1.797600,164600.000000 +-119.770000,36.440000,26.000000,1727.000000,289.000000,802.000000,259.000000,3.208300,75000.000000 +-122.380000,40.090000,16.000000,2077.000000,388.000000,1155.000000,389.000000,3.136100,84800.000000 +-118.900000,34.180000,14.000000,2627.000000,328.000000,1121.000000,328.000000,7.050400,333800.000000 +-121.010000,37.250000,16.000000,2216.000000,458.000000,1135.000000,424.000000,2.731600,97500.000000 +-116.980000,32.720000,15.000000,4209.000000,680.000000,1914.000000,641.000000,4.513500,158300.000000 +-119.980000,38.920000,28.000000,1408.000000,312.000000,522.000000,221.000000,2.070800,89600.000000 +-121.930000,37.720000,26.000000,2806.000000,459.000000,1453.000000,444.000000,4.910700,213800.000000 +-117.640000,34.090000,34.000000,2839.000000,659.000000,1822.000000,631.000000,3.050000,121300.000000 +-119.850000,37.390000,14.000000,2744.000000,555.000000,1153.000000,474.000000,2.753000,111100.000000 +-118.200000,33.980000,43.000000,1091.000000,320.000000,1418.000000,316.000000,2.152200,159400.000000 +-120.830000,37.070000,16.000000,3736.000000,761.000000,1942.000000,730.000000,2.559800,120200.000000 +-117.070000,32.580000,25.000000,1607.000000,280.000000,899.000000,260.000000,3.819400,134400.000000 +-119.050000,35.340000,14.000000,3580.000000,984.000000,1933.000000,912.000000,2.663700,175000.000000 +-117.570000,34.150000,3.000000,12806.000000,2219.000000,4249.000000,1499.000000,5.485000,343100.000000 +-121.370000,38.670000,36.000000,1786.000000,338.000000,974.000000,319.000000,2.555000,72700.000000 +-122.180000,37.700000,36.000000,2639.000000,533.000000,1209.000000,519.000000,4.026800,205500.000000 +-116.940000,32.810000,8.000000,2517.000000,632.000000,1686.000000,613.000000,2.136000,143500.000000 +-121.210000,39.240000,7.000000,4194.000000,673.000000,1355.000000,566.000000,4.370200,226100.000000 +-122.060000,37.710000,36.000000,3541.000000,570.000000,1478.000000,529.000000,4.635000,248600.000000 +-118.440000,34.190000,11.000000,2891.000000,951.000000,2166.000000,768.000000,2.891000,178100.000000 +-122.360000,37.720000,10.000000,479.000000,125.000000,355.000000,108.000000,2.708300,180400.000000 +-121.320000,38.620000,29.000000,2430.000000,448.000000,1087.000000,394.000000,3.086400,177900.000000 +-118.270000,33.940000,43.000000,1309.000000,344.000000,1182.000000,340.000000,1.662500,88700.000000 +-122.040000,37.970000,39.000000,1323.000000,245.000000,705.000000,261.000000,3.196800,151000.000000 +-118.210000,33.960000,39.000000,2050.000000,529.000000,1959.000000,485.000000,2.138900,168900.000000 +-117.200000,33.580000,2.000000,30450.000000,5033.000000,9419.000000,3197.000000,4.593600,174300.000000 +-120.500000,37.370000,18.000000,8606.000000,1678.000000,5303.000000,1644.000000,2.401200,79700.000000 +-118.170000,33.980000,36.000000,627.000000,177.000000,834.000000,175.000000,2.984400,163600.000000 +-117.880000,33.830000,22.000000,3522.000000,543.000000,1706.000000,524.000000,6.468500,241200.000000 +-118.290000,33.990000,46.000000,2198.000000,530.000000,2067.000000,497.000000,2.054200,103400.000000 +-117.420000,34.100000,18.000000,3977.000000,809.000000,2231.000000,742.000000,4.139900,115400.000000 +-116.960000,32.710000,18.000000,2413.000000,533.000000,1129.000000,551.000000,2.456700,155000.000000 +-118.360000,34.070000,52.000000,2046.000000,451.000000,944.000000,435.000000,3.426500,456900.000000 +-122.260000,38.330000,34.000000,2048.000000,316.000000,780.000000,267.000000,5.815000,339200.000000 +-120.510000,37.290000,20.000000,4927.000000,1042.000000,4205.000000,1009.000000,1.767900,79800.000000 +-117.940000,33.620000,25.000000,1188.000000,264.000000,569.000000,249.000000,3.660700,500001.000000 +-118.270000,33.940000,30.000000,1041.000000,275.000000,877.000000,270.000000,1.526800,91600.000000 +-117.930000,34.090000,37.000000,1185.000000,225.000000,769.000000,235.000000,4.462500,154200.000000 +-118.220000,33.920000,43.000000,1195.000000,256.000000,1251.000000,262.000000,3.453900,125000.000000 +-121.840000,37.320000,16.000000,1866.000000,364.000000,1835.000000,412.000000,5.336300,212800.000000 +-122.030000,37.830000,24.000000,5948.000000,738.000000,1997.000000,710.000000,9.870800,500001.000000 +-122.460000,38.290000,21.000000,2423.000000,560.000000,1098.000000,503.000000,2.364000,173300.000000 +-118.320000,34.010000,50.000000,1842.000000,377.000000,817.000000,341.000000,3.154800,157700.000000 +-118.020000,33.950000,35.000000,2085.000000,400.000000,1112.000000,391.000000,3.488600,173900.000000 +-118.310000,34.190000,13.000000,3801.000000,1116.000000,1986.000000,1078.000000,2.087500,222700.000000 +-117.800000,34.100000,13.000000,2996.000000,495.000000,1187.000000,464.000000,6.245600,161700.000000 +-118.460000,34.260000,33.000000,1358.000000,247.000000,738.000000,235.000000,5.094700,210300.000000 +-121.940000,37.340000,41.000000,2151.000000,473.000000,1092.000000,469.000000,3.732100,250000.000000 +-117.640000,33.870000,2.000000,17470.000000,2727.000000,5964.000000,1985.000000,6.230800,257900.000000 +-117.900000,34.110000,23.000000,4776.000000,1316.000000,4797.000000,1187.000000,2.166700,142600.000000 +-118.340000,34.110000,51.000000,937.000000,348.000000,527.000000,333.000000,4.357100,468800.000000 +-122.310000,37.560000,45.000000,1685.000000,321.000000,815.000000,314.000000,4.295500,309700.000000 +-118.360000,34.210000,41.000000,337.000000,65.000000,198.000000,50.000000,1.892900,152900.000000 +-122.450000,37.710000,45.000000,2253.000000,431.000000,1382.000000,392.000000,4.256200,221600.000000 +-118.680000,34.130000,9.000000,11251.000000,1594.000000,3029.000000,1227.000000,6.727300,500001.000000 +-119.640000,36.850000,15.000000,2397.000000,353.000000,1258.000000,347.000000,4.990400,157300.000000 +-122.160000,37.760000,45.000000,2299.000000,514.000000,1437.000000,484.000000,2.512200,95500.000000 +-117.990000,33.670000,19.000000,3808.000000,790.000000,1776.000000,756.000000,4.625000,282200.000000 +-121.830000,37.400000,27.000000,1145.000000,150.000000,492.000000,160.000000,5.716000,348300.000000 +-118.190000,35.050000,14.000000,2992.000000,573.000000,1631.000000,526.000000,3.745200,83200.000000 +-118.030000,33.770000,24.000000,3810.000000,579.000000,1818.000000,590.000000,5.805300,255900.000000 +-122.260000,37.820000,22.000000,3682.000000,1270.000000,2024.000000,1250.000000,1.218500,170000.000000 +-118.370000,33.930000,46.000000,442.000000,88.000000,255.000000,94.000000,4.447400,246900.000000 +-118.220000,34.050000,43.000000,1153.000000,411.000000,1667.000000,409.000000,1.940200,139300.000000 +-122.490000,37.680000,34.000000,3718.000000,676.000000,2510.000000,632.000000,5.331100,270800.000000 +-116.510000,33.840000,16.000000,980.000000,193.000000,454.000000,185.000000,4.072900,100000.000000 +-121.880000,37.660000,29.000000,2702.000000,680.000000,1360.000000,642.000000,3.112700,233000.000000 +-122.440000,37.800000,52.000000,2869.000000,594.000000,500.000000,335.000000,5.037600,500001.000000 +-121.340000,38.050000,16.000000,667.000000,92.000000,267.000000,90.000000,5.614700,244700.000000 +-117.870000,33.840000,16.000000,1545.000000,354.000000,730.000000,350.000000,4.511200,139000.000000 +-122.280000,37.890000,52.000000,2315.000000,408.000000,835.000000,369.000000,4.589300,290100.000000 +-121.830000,37.990000,18.000000,2741.000000,449.000000,1507.000000,460.000000,4.756600,142500.000000 +-119.530000,36.650000,43.000000,1676.000000,320.000000,1056.000000,276.000000,2.556200,93200.000000 +-117.390000,34.090000,10.000000,5736.000000,945.000000,3528.000000,932.000000,4.395800,130700.000000 +-118.230000,33.900000,45.000000,1285.000000,238.000000,840.000000,211.000000,3.410700,112500.000000 +-121.320000,38.670000,21.000000,3455.000000,706.000000,1605.000000,704.000000,3.138200,91600.000000 +-118.330000,34.050000,46.000000,3015.000000,795.000000,2300.000000,725.000000,2.070600,268500.000000 +-122.210000,37.840000,44.000000,3424.000000,597.000000,1358.000000,597.000000,6.019400,292300.000000 +-117.900000,34.530000,8.000000,3484.000000,647.000000,2169.000000,619.000000,3.976600,135800.000000 +-122.470000,37.510000,15.000000,4974.000000,764.000000,2222.000000,774.000000,6.760600,364300.000000 +-118.020000,33.770000,7.000000,586.000000,118.000000,232.000000,107.000000,5.207700,181300.000000 +-119.730000,34.430000,35.000000,2703.000000,654.000000,1383.000000,631.000000,4.527800,340400.000000 +-120.680000,35.140000,34.000000,3100.000000,617.000000,1155.000000,542.000000,3.093800,245900.000000 +-122.470000,38.290000,14.000000,3732.000000,846.000000,1277.000000,775.000000,2.565800,208000.000000 +-121.900000,37.350000,52.000000,1034.000000,239.000000,531.000000,223.000000,2.741100,227100.000000 +-121.870000,37.260000,17.000000,1051.000000,172.000000,446.000000,173.000000,5.665200,234500.000000 +-117.970000,33.890000,15.000000,3801.000000,542.000000,1992.000000,526.000000,9.068300,367400.000000 +-116.870000,33.910000,37.000000,1858.000000,361.000000,1632.000000,310.000000,2.753600,73100.000000 +-122.150000,37.470000,38.000000,1560.000000,301.000000,1331.000000,316.000000,3.052100,151500.000000 +-118.310000,34.010000,52.000000,2547.000000,475.000000,1417.000000,444.000000,1.821400,123200.000000 +-118.440000,34.040000,49.000000,32.000000,7.000000,14.000000,7.000000,2.187500,225000.000000 +-118.010000,33.850000,29.000000,2064.000000,447.000000,1265.000000,400.000000,3.886400,209300.000000 +-122.270000,41.200000,52.000000,4513.000000,985.000000,1926.000000,815.000000,1.592300,56000.000000 +-122.320000,37.560000,49.000000,2016.000000,299.000000,691.000000,288.000000,5.549000,500001.000000 +-119.770000,36.720000,43.000000,1763.000000,389.000000,1623.000000,390.000000,1.442700,47700.000000 +-122.140000,37.840000,24.000000,2131.000000,343.000000,874.000000,373.000000,5.634900,355600.000000 +-118.340000,34.090000,14.000000,3032.000000,999.000000,1691.000000,841.000000,2.200000,210000.000000 +-117.610000,34.340000,18.000000,5210.000000,912.000000,1301.000000,464.000000,4.862300,176900.000000 +-118.230000,33.760000,21.000000,49.000000,14.000000,29.000000,16.000000,5.000000,87500.000000 +-117.890000,33.770000,32.000000,2342.000000,570.000000,1445.000000,453.000000,4.195100,195000.000000 +-118.260000,33.910000,39.000000,967.000000,256.000000,903.000000,256.000000,1.903800,93100.000000 +-118.400000,33.990000,39.000000,1613.000000,380.000000,1113.000000,356.000000,2.825000,276700.000000 +-117.140000,32.920000,15.000000,1558.000000,314.000000,949.000000,332.000000,5.286400,174400.000000 +-118.150000,33.770000,52.000000,2204.000000,498.000000,899.000000,445.000000,4.176500,393900.000000 +-118.590000,34.210000,17.000000,2737.000000,868.000000,2924.000000,785.000000,2.579700,183500.000000 +-121.370000,36.830000,14.000000,3658.000000,612.000000,1951.000000,600.000000,4.760000,216000.000000 +-120.480000,35.020000,17.000000,2721.000000,477.000000,1672.000000,492.000000,2.979800,204800.000000 +-118.440000,34.210000,41.000000,1440.000000,325.000000,1014.000000,322.000000,2.875000,168600.000000 +-122.320000,38.330000,17.000000,851.000000,118.000000,370.000000,123.000000,5.087700,209300.000000 +-121.870000,37.280000,21.000000,3305.000000,749.000000,2459.000000,701.000000,3.968800,249600.000000 +-117.100000,33.070000,16.000000,2402.000000,336.000000,1080.000000,365.000000,8.680300,347300.000000 +-118.030000,33.760000,25.000000,4650.000000,849.000000,2503.000000,790.000000,5.742000,221900.000000 +-122.400000,37.730000,48.000000,1489.000000,326.000000,1115.000000,356.000000,2.636400,199300.000000 +-118.340000,34.120000,41.000000,3257.000000,679.000000,1237.000000,638.000000,4.241500,409600.000000 +-121.040000,39.240000,48.000000,1188.000000,227.000000,471.000000,219.000000,2.312500,125700.000000 +-117.970000,33.910000,19.000000,8096.000000,1318.000000,3853.000000,1313.000000,6.007600,269500.000000 +-117.100000,32.680000,45.000000,1183.000000,289.000000,900.000000,266.000000,2.494300,99600.000000 +-116.610000,33.930000,35.000000,321.000000,71.000000,157.000000,61.000000,2.805600,68100.000000 +-118.390000,34.080000,27.000000,6605.000000,1710.000000,2665.000000,1520.000000,3.808800,500001.000000 +-121.230000,38.650000,19.000000,2926.000000,476.000000,1349.000000,480.000000,4.643700,212900.000000 +-122.200000,37.790000,29.000000,1640.000000,376.000000,939.000000,340.000000,2.832100,150000.000000 +-117.180000,32.830000,23.000000,2105.000000,525.000000,1218.000000,484.000000,3.375000,184100.000000 +-118.080000,33.770000,26.000000,2461.000000,562.000000,971.000000,544.000000,2.194400,87500.000000 +-120.450000,34.660000,7.000000,3329.000000,504.000000,1462.000000,452.000000,4.787500,198300.000000 +-117.820000,33.680000,4.000000,1346.000000,213.000000,603.000000,219.000000,8.797400,360600.000000 +-121.920000,36.610000,27.000000,1619.000000,352.000000,831.000000,344.000000,4.300000,226400.000000 +-122.010000,37.530000,19.000000,4572.000000,712.000000,2346.000000,709.000000,6.066700,245700.000000 +-118.270000,33.950000,34.000000,987.000000,248.000000,902.000000,221.000000,2.336500,98000.000000 +-119.960000,38.940000,27.000000,1492.000000,393.000000,717.000000,254.000000,1.890600,104200.000000 +-121.420000,36.570000,13.000000,2685.000000,621.000000,2474.000000,573.000000,2.877500,134100.000000 +-120.960000,37.660000,15.000000,2485.000000,434.000000,1296.000000,434.000000,3.854200,145200.000000 +-118.650000,34.200000,23.000000,7480.000000,1084.000000,3037.000000,1058.000000,6.922300,338400.000000 +-122.310000,38.000000,29.000000,3108.000000,534.000000,1687.000000,516.000000,4.333300,170800.000000 +-118.350000,34.070000,48.000000,890.000000,255.000000,434.000000,232.000000,3.611100,450000.000000 +-118.190000,33.790000,29.000000,3497.000000,1096.000000,2994.000000,919.000000,1.810900,137500.000000 +-122.140000,37.410000,35.000000,2419.000000,426.000000,949.000000,433.000000,6.458800,437100.000000 +-119.810000,36.710000,25.000000,1026.000000,221.000000,789.000000,183.000000,1.562500,52800.000000 +-117.180000,32.680000,29.000000,1539.000000,344.000000,556.000000,289.000000,3.250000,500001.000000 +-117.770000,34.080000,27.000000,5929.000000,932.000000,2817.000000,828.000000,6.043400,214800.000000 +-118.110000,33.860000,33.000000,2389.000000,410.000000,1229.000000,393.000000,5.388900,234900.000000 +-118.280000,34.090000,52.000000,1739.000000,464.000000,938.000000,482.000000,2.442900,228800.000000 +-117.930000,34.040000,30.000000,1336.000000,239.000000,905.000000,253.000000,4.885400,178100.000000 +-117.050000,32.760000,37.000000,4879.000000,906.000000,2076.000000,871.000000,3.662500,154800.000000 +-118.250000,33.870000,18.000000,6812.000000,1263.000000,3704.000000,1216.000000,4.250000,169200.000000 +-122.410000,37.780000,52.000000,254.000000,72.000000,153.000000,29.000000,3.862500,350000.000000 +-119.720000,34.470000,34.000000,3262.000000,533.000000,1265.000000,502.000000,5.841100,381800.000000 +-118.120000,34.150000,22.000000,1671.000000,480.000000,1005.000000,443.000000,3.011900,171400.000000 +-122.210000,37.830000,40.000000,4991.000000,674.000000,1616.000000,654.000000,7.554400,411500.000000 +-119.380000,36.560000,14.000000,3965.000000,804.000000,1945.000000,733.000000,2.690600,95300.000000 +-118.380000,34.280000,22.000000,4428.000000,825.000000,3152.000000,836.000000,4.793200,166300.000000 +-117.340000,34.120000,26.000000,1008.000000,164.000000,568.000000,196.000000,3.351600,105600.000000 +-122.060000,37.390000,22.000000,1236.000000,290.000000,413.000000,274.000000,3.687500,40000.000000 +-118.460000,34.070000,49.000000,2418.000000,301.000000,850.000000,318.000000,14.286700,500001.000000 +-117.900000,34.150000,21.000000,2056.000000,461.000000,1332.000000,429.000000,3.394200,212800.000000 +-123.470000,39.800000,18.000000,2130.000000,545.000000,863.000000,346.000000,2.357100,79200.000000 +-121.910000,37.250000,31.000000,1944.000000,343.000000,975.000000,334.000000,4.920500,240500.000000 +-122.320000,38.320000,22.000000,2483.000000,528.000000,1478.000000,492.000000,4.087800,164400.000000 +-118.140000,33.880000,30.000000,2596.000000,580.000000,1662.000000,539.000000,4.050700,179500.000000 +-117.820000,33.810000,25.000000,2662.000000,402.000000,1247.000000,401.000000,5.439500,244000.000000 +-118.270000,34.070000,38.000000,1270.000000,556.000000,1692.000000,450.000000,1.870000,170800.000000 +-117.440000,33.950000,31.000000,914.000000,177.000000,556.000000,161.000000,3.734400,115300.000000 +-118.100000,34.070000,36.000000,1240.000000,349.000000,1383.000000,338.000000,2.493100,170300.000000 +-121.830000,37.370000,43.000000,1461.000000,284.000000,800.000000,258.000000,3.227900,182400.000000 +-120.900000,35.330000,16.000000,1576.000000,287.000000,595.000000,262.000000,3.588000,266300.000000 +-121.750000,36.920000,48.000000,1801.000000,353.000000,1071.000000,361.000000,3.600000,194500.000000 +-117.910000,33.650000,24.000000,885.000000,321.000000,590.000000,254.000000,2.625000,217900.000000 +-117.200000,32.800000,33.000000,2573.000000,436.000000,1084.000000,443.000000,4.241700,294100.000000 +-118.230000,34.180000,43.000000,1708.000000,280.000000,768.000000,276.000000,6.207000,457400.000000 +-118.320000,33.930000,34.000000,1536.000000,273.000000,804.000000,287.000000,4.961500,157800.000000 +-117.760000,34.120000,16.000000,9020.000000,1509.000000,3575.000000,1486.000000,4.241500,275700.000000 +-118.450000,34.230000,25.000000,4393.000000,1369.000000,3781.000000,1267.000000,2.583300,183700.000000 +-122.450000,41.280000,15.000000,2740.000000,503.000000,1188.000000,445.000000,3.451900,128800.000000 +-118.330000,34.010000,43.000000,2227.000000,564.000000,956.000000,472.000000,2.021700,187500.000000 +-124.160000,40.790000,46.000000,3042.000000,597.000000,1206.000000,541.000000,2.113500,90600.000000 +-118.140000,34.060000,37.000000,1339.000000,258.000000,706.000000,238.000000,4.756900,253800.000000 +-121.140000,38.770000,15.000000,10282.000000,1333.000000,3868.000000,1300.000000,6.478900,287800.000000 +-117.750000,33.830000,14.000000,2452.000000,296.000000,954.000000,275.000000,8.237500,388300.000000 +-122.120000,37.690000,30.000000,1197.000000,269.000000,695.000000,279.000000,3.437500,157800.000000 +-117.790000,34.070000,33.000000,1694.000000,333.000000,1689.000000,301.000000,3.758300,116300.000000 +-118.410000,34.090000,37.000000,2716.000000,302.000000,809.000000,291.000000,15.000100,500001.000000 +-118.530000,34.440000,19.000000,1285.000000,195.000000,650.000000,193.000000,6.039800,217800.000000 +-120.780000,38.740000,28.000000,4236.000000,877.000000,2008.000000,881.000000,2.160300,111300.000000 +-122.350000,37.580000,26.000000,854.000000,246.000000,396.000000,231.000000,2.839300,375000.000000 +-119.720000,36.820000,15.000000,946.000000,239.000000,550.000000,246.000000,2.263900,52500.000000 +-118.140000,34.010000,42.000000,1973.000000,510.000000,1841.000000,502.000000,2.532600,156500.000000 +-117.120000,32.750000,25.000000,2222.000000,634.000000,1025.000000,568.000000,1.640000,130000.000000 +-117.900000,34.130000,37.000000,1801.000000,422.000000,1564.000000,425.000000,3.159700,133000.000000 +-117.390000,33.690000,5.000000,6529.000000,997.000000,3464.000000,1006.000000,5.327500,168700.000000 +-122.450000,40.610000,17.000000,785.000000,155.000000,417.000000,136.000000,2.328900,58200.000000 +-117.120000,34.210000,19.000000,4641.000000,994.000000,1334.000000,474.000000,4.597200,123900.000000 +-122.760000,38.460000,14.000000,4742.000000,756.000000,2149.000000,732.000000,4.515200,199200.000000 +-118.190000,34.120000,46.000000,3387.000000,820.000000,2833.000000,813.000000,2.987000,176900.000000 +-118.310000,34.060000,36.000000,369.000000,147.000000,145.000000,136.000000,0.880400,450000.000000 +-122.340000,37.950000,45.000000,1128.000000,240.000000,702.000000,270.000000,3.671900,134100.000000 +-118.220000,34.660000,17.000000,3810.000000,662.000000,1867.000000,586.000000,4.900000,152400.000000 +-118.290000,34.050000,40.000000,907.000000,349.000000,1426.000000,323.000000,1.857100,143800.000000 +-117.960000,33.870000,37.000000,1785.000000,360.000000,1155.000000,403.000000,4.798400,175800.000000 +-119.570000,34.380000,22.000000,2512.000000,426.000000,919.000000,341.000000,5.759000,425000.000000 +-118.280000,33.750000,41.000000,1305.000000,381.000000,1384.000000,369.000000,2.450000,186800.000000 +-121.890000,38.010000,32.000000,1000.000000,188.000000,663.000000,212.000000,4.097200,99200.000000 +-118.130000,34.160000,52.000000,1872.000000,357.000000,984.000000,364.000000,4.000000,250400.000000 +-118.040000,34.180000,37.000000,3134.000000,532.000000,1220.000000,508.000000,5.286500,455400.000000 +-123.220000,39.160000,32.000000,1149.000000,187.000000,499.000000,208.000000,3.658700,154600.000000 +-120.690000,38.440000,13.000000,1473.000000,265.000000,597.000000,228.000000,4.291700,121300.000000 +-118.040000,33.800000,33.000000,2685.000000,466.000000,1359.000000,476.000000,5.026100,245100.000000 +-119.800000,36.730000,45.000000,925.000000,231.000000,797.000000,228.000000,1.701100,44800.000000 +-117.490000,33.910000,17.000000,5364.000000,1020.000000,3754.000000,936.000000,3.285700,139100.000000 +-118.340000,34.010000,37.000000,4291.000000,1102.000000,1941.000000,953.000000,1.794500,106300.000000 +-118.370000,34.190000,41.000000,2924.000000,867.000000,2751.000000,836.000000,2.100000,171600.000000 +-117.270000,34.450000,8.000000,6463.000000,1095.000000,3213.000000,1031.000000,3.221500,108800.000000 +-120.450000,34.870000,4.000000,1533.000000,221.000000,545.000000,191.000000,7.569600,328700.000000 +-122.320000,37.520000,26.000000,4042.000000,591.000000,1611.000000,578.000000,8.469300,419200.000000 +-121.420000,38.490000,17.000000,13180.000000,2444.000000,7235.000000,2335.000000,3.363000,103000.000000 +-115.570000,32.780000,29.000000,2321.000000,367.000000,1173.000000,360.000000,4.037500,86400.000000 +-118.470000,33.990000,52.000000,2167.000000,622.000000,1095.000000,570.000000,2.851400,358700.000000 +-118.270000,33.960000,42.000000,796.000000,203.000000,697.000000,177.000000,2.037000,92600.000000 +-118.050000,33.900000,41.000000,550.000000,129.000000,642.000000,125.000000,1.875000,119900.000000 +-118.960000,35.400000,28.000000,4667.000000,875.000000,2404.000000,841.000000,3.232500,89000.000000 +-117.130000,32.980000,5.000000,2276.000000,311.000000,1158.000000,317.000000,6.432100,271900.000000 +-122.040000,37.610000,36.000000,1151.000000,216.000000,727.000000,215.000000,4.171900,187000.000000 +-116.580000,33.090000,36.000000,992.000000,224.000000,334.000000,126.000000,3.008900,134400.000000 +-121.980000,38.250000,4.000000,2487.000000,440.000000,1545.000000,452.000000,4.910300,140400.000000 +-122.300000,37.920000,32.000000,3943.000000,605.000000,1524.000000,614.000000,6.067700,321600.000000 +-121.570000,39.480000,15.000000,202.000000,54.000000,145.000000,40.000000,0.825200,42500.000000 +-118.090000,33.920000,36.000000,847.000000,185.000000,713.000000,194.000000,4.854200,167400.000000 +-117.710000,33.610000,25.000000,3004.000000,718.000000,891.000000,626.000000,2.395000,80300.000000 +-118.210000,33.900000,41.000000,941.000000,233.000000,973.000000,253.000000,1.958300,102300.000000 +-118.290000,34.170000,52.000000,1732.000000,305.000000,875.000000,311.000000,4.325000,292600.000000 +-118.950000,35.400000,23.000000,4483.000000,894.000000,2136.000000,883.000000,3.687500,101700.000000 +-117.410000,34.230000,17.000000,889.000000,131.000000,439.000000,141.000000,6.142600,155000.000000 +-121.920000,36.570000,42.000000,3944.000000,738.000000,1374.000000,598.000000,4.174000,394400.000000 +-121.640000,39.150000,15.000000,2659.000000,396.000000,1159.000000,407.000000,5.234000,124900.000000 +-120.920000,37.630000,39.000000,45.000000,8.000000,22.000000,9.000000,1.767900,450000.000000 +-122.270000,37.840000,52.000000,1688.000000,337.000000,853.000000,325.000000,2.180600,99700.000000 +-118.270000,34.100000,51.000000,3149.000000,519.000000,1082.000000,510.000000,6.445900,421600.000000 +-121.810000,37.240000,21.000000,3250.000000,610.000000,1978.000000,568.000000,4.500000,234400.000000 +-114.620000,33.620000,26.000000,18.000000,3.000000,5.000000,3.000000,0.536000,275000.000000 +-118.090000,34.710000,5.000000,5807.000000,1182.000000,2602.000000,1007.000000,2.401200,159400.000000 +-118.200000,34.020000,48.000000,2230.000000,593.000000,2419.000000,598.000000,2.394400,130700.000000 +-119.620000,36.590000,17.000000,2287.000000,390.000000,1330.000000,393.000000,4.019700,88000.000000 +-118.410000,34.190000,42.000000,779.000000,145.000000,450.000000,148.000000,3.979200,193800.000000 +-118.300000,33.980000,48.000000,1998.000000,410.000000,1176.000000,382.000000,3.045500,102400.000000 +-117.330000,34.120000,38.000000,1703.000000,385.000000,1356.000000,363.000000,2.039100,70400.000000 +-118.500000,34.020000,28.000000,5109.000000,1482.000000,2313.000000,1451.000000,3.326600,483300.000000 +-118.070000,33.920000,36.000000,1560.000000,320.000000,1348.000000,314.000000,3.622000,174000.000000 +-117.130000,32.580000,27.000000,2511.000000,615.000000,1427.000000,576.000000,3.164500,156000.000000 +-117.270000,34.490000,7.000000,2344.000000,351.000000,846.000000,314.000000,4.736100,174500.000000 +-121.450000,38.600000,44.000000,2324.000000,413.000000,823.000000,375.000000,4.662500,158900.000000 +-121.980000,37.220000,46.000000,10088.000000,1910.000000,3728.000000,1781.000000,5.232100,500001.000000 +-120.310000,36.650000,24.000000,943.000000,209.000000,514.000000,156.000000,2.250000,76600.000000 +-117.950000,33.840000,32.000000,1378.000000,492.000000,1202.000000,448.000000,3.402800,183700.000000 +-119.700000,36.800000,34.000000,1768.000000,303.000000,888.000000,314.000000,3.808800,87700.000000 +-121.880000,37.430000,17.000000,3469.000000,896.000000,2762.000000,808.000000,3.388400,245800.000000 +-118.430000,34.260000,37.000000,1269.000000,348.000000,1835.000000,335.000000,3.258300,147200.000000 +-121.890000,37.350000,48.000000,1562.000000,439.000000,1469.000000,424.000000,2.567300,177500.000000 +-121.330000,38.040000,15.000000,2903.000000,440.000000,1325.000000,423.000000,4.517900,145600.000000 +-123.730000,39.170000,20.000000,4620.000000,1042.000000,1745.000000,794.000000,2.375000,158800.000000 +-118.040000,33.970000,34.000000,1759.000000,431.000000,1282.000000,391.000000,3.049100,158200.000000 +-118.150000,34.190000,48.000000,1854.000000,360.000000,1126.000000,382.000000,3.221600,161600.000000 +-118.110000,34.020000,17.000000,9559.000000,1911.000000,5279.000000,1844.000000,5.151500,318900.000000 +-121.200000,38.670000,10.000000,3875.000000,668.000000,1632.000000,593.000000,4.690200,171000.000000 +-118.390000,34.120000,29.000000,6447.000000,1012.000000,2184.000000,960.000000,8.281600,500001.000000 +-118.370000,34.060000,52.000000,2239.000000,423.000000,832.000000,411.000000,5.085800,470000.000000 +-118.520000,34.200000,35.000000,2891.000000,594.000000,1757.000000,581.000000,4.357100,199800.000000 +-118.370000,33.950000,52.000000,836.000000,175.000000,747.000000,166.000000,4.125000,174000.000000 +-121.340000,37.980000,8.000000,2628.000000,428.000000,1158.000000,393.000000,5.300200,191700.000000 +-119.320000,36.190000,11.000000,3136.000000,620.000000,2013.000000,583.000000,3.335000,69700.000000 +-117.840000,34.040000,4.000000,9959.000000,1544.000000,4904.000000,1429.000000,6.975400,402500.000000 +-118.230000,34.150000,19.000000,2294.000000,716.000000,1686.000000,680.000000,3.028800,258300.000000 +-115.520000,32.980000,21.000000,1302.000000,327.000000,1244.000000,316.000000,2.205400,66400.000000 +-117.790000,34.070000,34.000000,975.000000,192.000000,870.000000,183.000000,3.793300,116100.000000 +-115.590000,32.960000,17.000000,841.000000,146.000000,473.000000,154.000000,3.197900,113500.000000 +-121.830000,37.300000,17.000000,1299.000000,211.000000,825.000000,217.000000,4.500000,235800.000000 +-117.270000,34.500000,8.000000,3567.000000,543.000000,1133.000000,419.000000,5.373300,302600.000000 +-118.040000,33.930000,35.000000,1805.000000,387.000000,1505.000000,366.000000,4.166700,151900.000000 +-122.090000,37.950000,32.000000,1339.000000,209.000000,601.000000,209.000000,6.026500,247900.000000 +-122.230000,37.750000,50.000000,1542.000000,289.000000,654.000000,268.000000,3.963200,240000.000000 +-117.880000,33.720000,38.000000,1421.000000,300.000000,1236.000000,263.000000,3.984400,165300.000000 +-122.420000,37.750000,52.000000,2164.000000,533.000000,1122.000000,469.000000,3.263200,306000.000000 +-118.050000,34.140000,39.000000,2125.000000,295.000000,862.000000,303.000000,8.972800,500001.000000 +-118.060000,34.110000,36.000000,2178.000000,485.000000,914.000000,412.000000,2.765600,239500.000000 +-118.150000,33.870000,33.000000,2373.000000,552.000000,1673.000000,571.000000,3.068500,181800.000000 +-117.250000,32.760000,38.000000,2331.000000,493.000000,836.000000,433.000000,4.912500,452600.000000 +-117.860000,33.740000,34.000000,2254.000000,630.000000,2984.000000,625.000000,2.500000,162500.000000 +-122.530000,39.090000,11.000000,1264.000000,271.000000,370.000000,177.000000,1.300000,69700.000000 +-117.970000,33.680000,23.000000,1722.000000,316.000000,865.000000,309.000000,4.645200,273800.000000 +-118.060000,34.030000,36.000000,21.000000,7.000000,21.000000,9.000000,2.375000,175000.000000 +-117.820000,33.740000,25.000000,2720.000000,680.000000,1559.000000,631.000000,3.095800,137800.000000 +-121.800000,37.700000,22.000000,5533.000000,943.000000,2474.000000,910.000000,4.736100,216800.000000 +-121.730000,36.850000,22.000000,1304.000000,278.000000,887.000000,227.000000,3.660700,206300.000000 +-118.320000,33.860000,34.000000,495.000000,90.000000,269.000000,93.000000,6.439100,252300.000000 +-118.280000,34.040000,24.000000,1283.000000,545.000000,1932.000000,516.000000,1.296900,160200.000000 +-117.030000,32.950000,19.000000,4500.000000,815.000000,2456.000000,782.000000,4.503200,168900.000000 +-117.870000,33.830000,27.000000,2287.000000,353.000000,1140.000000,351.000000,5.616300,231000.000000 +-122.090000,37.650000,35.000000,1130.000000,192.000000,543.000000,184.000000,4.389700,190600.000000 +-117.600000,34.030000,16.000000,1499.000000,232.000000,918.000000,239.000000,5.567700,175400.000000 +-121.460000,38.610000,43.000000,1111.000000,269.000000,613.000000,290.000000,1.291700,66300.000000 +-117.960000,34.530000,10.000000,2907.000000,559.000000,1681.000000,531.000000,3.859400,141000.000000 +-116.460000,33.790000,10.000000,6960.000000,1487.000000,1130.000000,661.000000,2.141100,136400.000000 +-118.540000,34.370000,27.000000,2051.000000,301.000000,917.000000,287.000000,7.605900,323700.000000 +-122.160000,37.450000,52.000000,1135.000000,219.000000,441.000000,200.000000,7.541800,492000.000000 +-117.710000,34.060000,27.000000,2127.000000,628.000000,1970.000000,534.000000,1.472200,91300.000000 +-118.290000,34.030000,42.000000,907.000000,378.000000,822.000000,288.000000,1.287500,179200.000000 +-118.180000,33.900000,32.000000,1452.000000,365.000000,1888.000000,366.000000,3.546100,146400.000000 +-121.360000,38.690000,13.000000,6850.000000,1400.000000,4251.000000,1421.000000,3.698900,93300.000000 +-122.370000,40.520000,18.000000,4547.000000,774.000000,2269.000000,766.000000,3.789600,98100.000000 +-122.410000,37.710000,49.000000,1852.000000,429.000000,1615.000000,447.000000,3.495000,217800.000000 +-118.530000,34.240000,24.000000,2718.000000,719.000000,3018.000000,644.000000,2.907600,275300.000000 +-121.880000,37.670000,16.000000,4070.000000,624.000000,1543.000000,577.000000,6.521400,311500.000000 +-120.090000,37.000000,11.000000,3761.000000,675.000000,2374.000000,673.000000,3.459800,74600.000000 +-117.100000,32.750000,17.000000,871.000000,379.000000,955.000000,351.000000,1.437500,96400.000000 +-119.640000,36.350000,30.000000,1765.000000,310.000000,746.000000,298.000000,2.812500,70200.000000 +-118.260000,33.970000,47.000000,1504.000000,374.000000,1168.000000,358.000000,1.462500,94200.000000 +-117.600000,33.910000,15.000000,1864.000000,271.000000,1006.000000,288.000000,7.237900,251000.000000 +-122.200000,39.510000,37.000000,2358.000000,413.000000,1060.000000,424.000000,2.833300,69700.000000 +-122.120000,37.690000,10.000000,2227.000000,560.000000,1140.000000,472.000000,2.397300,167300.000000 +-118.200000,33.970000,43.000000,825.000000,212.000000,820.000000,184.000000,1.889700,174300.000000 +-121.280000,38.140000,38.000000,2803.000000,500.000000,1223.000000,509.000000,4.119000,128800.000000 +-119.030000,34.230000,16.000000,5323.000000,795.000000,2493.000000,779.000000,5.676200,271300.000000 +-121.700000,38.100000,19.000000,4896.000000,1083.000000,2150.000000,905.000000,3.339800,89700.000000 +-117.960000,33.830000,30.000000,2838.000000,649.000000,1758.000000,593.000000,3.383100,197400.000000 +-120.700000,36.990000,32.000000,320.000000,73.000000,222.000000,78.000000,2.927100,87500.000000 +-122.390000,37.740000,45.000000,1462.000000,308.000000,924.000000,302.000000,2.176700,185300.000000 +-121.760000,38.410000,19.000000,686.000000,107.000000,348.000000,109.000000,3.930600,93800.000000 +-121.350000,38.660000,8.000000,3322.000000,805.000000,1694.000000,774.000000,2.701100,130700.000000 +-118.670000,34.280000,21.000000,4059.000000,598.000000,2133.000000,634.000000,5.694900,235300.000000 +-118.310000,34.100000,33.000000,766.000000,347.000000,918.000000,305.000000,1.705000,350000.000000 +-117.690000,34.040000,5.000000,4459.000000,896.000000,2028.000000,881.000000,4.009600,182600.000000 +-119.600000,36.580000,28.000000,1452.000000,300.000000,919.000000,308.000000,2.828700,73100.000000 +-121.760000,36.750000,21.000000,1141.000000,257.000000,671.000000,195.000000,3.842400,155700.000000 +-117.940000,33.860000,35.000000,1235.000000,227.000000,875.000000,220.000000,4.696400,183100.000000 +-120.860000,37.770000,28.000000,1208.000000,232.000000,535.000000,232.000000,2.352300,94700.000000 +-121.840000,37.350000,22.000000,2914.000000,768.000000,2962.000000,762.000000,2.203100,164000.000000 +-121.070000,38.900000,52.000000,1280.000000,281.000000,523.000000,266.000000,1.737500,122200.000000 +-118.450000,33.960000,24.000000,3097.000000,791.000000,1075.000000,639.000000,5.723000,500001.000000 +-118.290000,34.180000,52.000000,1602.000000,265.000000,667.000000,251.000000,5.049000,323500.000000 +-119.970000,36.440000,18.000000,1128.000000,237.000000,772.000000,220.000000,2.177100,39200.000000 +-121.930000,38.310000,25.000000,185.000000,32.000000,85.000000,32.000000,4.875000,250000.000000 +-118.200000,33.930000,38.000000,1626.000000,307.000000,1280.000000,295.000000,3.531300,146500.000000 +-122.180000,38.230000,21.000000,2475.000000,341.000000,812.000000,308.000000,7.258900,320400.000000 +-118.010000,34.140000,20.000000,3350.000000,831.000000,1816.000000,744.000000,2.835200,161700.000000 +-117.870000,34.130000,32.000000,1741.000000,373.000000,872.000000,333.000000,3.421900,194500.000000 +-118.530000,34.270000,32.000000,1931.000000,298.000000,948.000000,314.000000,5.384700,329200.000000 +-117.140000,32.800000,33.000000,2670.000000,435.000000,1256.000000,431.000000,3.941700,179800.000000 +-118.070000,34.170000,34.000000,4062.000000,597.000000,1525.000000,566.000000,7.858800,454800.000000 +-117.580000,33.880000,16.000000,1739.000000,478.000000,1235.000000,420.000000,2.296900,116100.000000 +-120.060000,36.970000,35.000000,1859.000000,428.000000,1208.000000,399.000000,1.404400,61700.000000 +-121.830000,38.430000,24.000000,1307.000000,314.000000,917.000000,291.000000,2.224400,98100.000000 +-122.480000,37.720000,45.000000,1405.000000,338.000000,733.000000,342.000000,4.111600,187500.000000 +-116.910000,32.750000,5.000000,8710.000000,1614.000000,4372.000000,1527.000000,4.781300,240900.000000 +-119.770000,36.740000,20.000000,1855.000000,519.000000,1091.000000,443.000000,1.554700,93900.000000 +-119.460000,36.910000,12.000000,2980.000000,495.000000,1184.000000,429.000000,3.914100,123900.000000 +-118.180000,33.910000,41.000000,1260.000000,299.000000,1535.000000,322.000000,3.013400,128100.000000 +-118.390000,34.060000,43.000000,1879.000000,397.000000,873.000000,382.000000,3.815800,500001.000000 +-118.220000,33.990000,4.000000,1849.000000,577.000000,1529.000000,418.000000,2.770800,186300.000000 +-116.990000,33.200000,17.000000,2980.000000,539.000000,1531.000000,505.000000,3.155300,250000.000000 +-117.160000,32.730000,52.000000,1863.000000,559.000000,906.000000,493.000000,1.920300,195800.000000 +-117.380000,33.980000,10.000000,642.000000,176.000000,462.000000,186.000000,2.152800,162500.000000 +-122.440000,38.340000,25.000000,3106.000000,715.000000,1262.000000,665.000000,1.948700,233500.000000 +-117.880000,33.920000,13.000000,3292.000000,727.000000,1565.000000,698.000000,5.457000,308800.000000 +-119.710000,34.440000,41.000000,2220.000000,367.000000,927.000000,355.000000,5.318400,376000.000000 +-119.060000,34.370000,32.000000,3885.000000,759.000000,2504.000000,736.000000,3.645300,201700.000000 +-121.910000,37.310000,16.000000,2962.000000,898.000000,1555.000000,795.000000,2.580400,216300.000000 +-121.560000,37.000000,20.000000,3976.000000,953.000000,3866.000000,950.000000,2.538700,160100.000000 +-122.490000,38.000000,26.000000,48.000000,8.000000,19.000000,8.000000,7.719700,400000.000000 +-118.330000,34.020000,45.000000,1667.000000,399.000000,928.000000,375.000000,1.878300,118200.000000 +-122.260000,37.510000,29.000000,3703.000000,1075.000000,1611.000000,1025.000000,2.707500,323800.000000 +-121.990000,37.830000,16.000000,2939.000000,380.000000,1177.000000,396.000000,8.083900,372000.000000 +-121.420000,37.740000,35.000000,796.000000,132.000000,313.000000,152.000000,3.150000,153200.000000 +-121.390000,38.610000,35.000000,2024.000000,359.000000,786.000000,364.000000,2.463200,156900.000000 +-122.420000,37.620000,36.000000,1017.000000,165.000000,407.000000,159.000000,4.800000,306800.000000 +-121.440000,38.480000,12.000000,4929.000000,1010.000000,2621.000000,870.000000,2.726200,109800.000000 +-117.480000,33.980000,20.000000,2451.000000,475.000000,1785.000000,456.000000,3.396600,115000.000000 +-122.050000,37.380000,24.000000,2424.000000,501.000000,1367.000000,507.000000,4.072000,364200.000000 +-123.920000,41.540000,22.000000,2920.000000,636.000000,1382.000000,499.000000,2.020200,71100.000000 +-119.010000,35.400000,11.000000,8739.000000,2190.000000,4781.000000,1919.000000,1.710900,44600.000000 +-122.330000,37.570000,43.000000,2543.000000,621.000000,1301.000000,606.000000,3.111100,318400.000000 +-120.990000,37.610000,39.000000,512.000000,132.000000,443.000000,127.000000,1.285700,60000.000000 +-121.960000,37.580000,15.000000,3575.000000,597.000000,1777.000000,559.000000,5.719200,283500.000000 +-121.580000,39.160000,33.000000,1897.000000,378.000000,888.000000,385.000000,2.111100,68700.000000 +-120.590000,38.530000,15.000000,432.000000,87.000000,208.000000,73.000000,3.612500,100000.000000 +-117.580000,33.870000,30.000000,701.000000,131.000000,356.000000,125.000000,3.291700,144300.000000 +-121.840000,39.750000,29.000000,4362.000000,1053.000000,2053.000000,1000.000000,1.728400,74500.000000 +-121.800000,36.690000,12.000000,3877.000000,914.000000,2274.000000,858.000000,3.423900,194800.000000 +-122.220000,37.810000,52.000000,2944.000000,536.000000,1034.000000,521.000000,5.350900,302100.000000 +-117.640000,33.450000,26.000000,1528.000000,234.000000,607.000000,218.000000,6.287100,325500.000000 +-120.420000,37.980000,18.000000,3059.000000,609.000000,1335.000000,581.000000,2.512900,115900.000000 +-118.300000,34.060000,47.000000,1390.000000,872.000000,2860.000000,827.000000,1.468000,137500.000000 +-122.250000,37.870000,52.000000,1204.000000,460.000000,2016.000000,477.000000,0.949000,350000.000000 +-120.270000,39.350000,11.000000,2520.000000,401.000000,397.000000,165.000000,4.665000,145600.000000 +-119.880000,36.930000,12.000000,3174.000000,520.000000,1590.000000,488.000000,4.534700,101200.000000 +-122.370000,37.580000,52.000000,2188.000000,361.000000,917.000000,357.000000,4.400000,500000.000000 +-117.820000,33.720000,24.000000,3260.000000,458.000000,1383.000000,442.000000,6.598700,272800.000000 +-118.220000,33.930000,30.000000,443.000000,170.000000,903.000000,189.000000,2.196400,125000.000000 +-120.970000,38.650000,9.000000,3707.000000,602.000000,1601.000000,555.000000,4.071400,300600.000000 +-122.060000,37.700000,33.000000,3906.000000,790.000000,1912.000000,770.000000,3.518700,209400.000000 +-118.230000,33.920000,32.000000,2698.000000,640.000000,1953.000000,613.000000,1.222200,107200.000000 +-117.340000,34.460000,9.000000,5983.000000,1122.000000,3515.000000,1064.000000,3.150500,102000.000000 +-119.240000,36.330000,9.000000,3289.000000,621.000000,1866.000000,631.000000,3.159900,95000.000000 +-122.180000,37.730000,42.000000,4074.000000,874.000000,2736.000000,780.000000,2.455000,82400.000000 +-118.200000,33.820000,43.000000,1758.000000,347.000000,954.000000,312.000000,5.260600,198900.000000 +-117.070000,32.810000,15.000000,2000.000000,402.000000,778.000000,369.000000,4.359400,224200.000000 +-122.250000,38.020000,16.000000,1803.000000,267.000000,946.000000,266.000000,5.700100,205100.000000 +-118.420000,34.310000,19.000000,6755.000000,1443.000000,4205.000000,1395.000000,3.958300,163200.000000 +-122.270000,37.850000,52.000000,1966.000000,347.000000,793.000000,331.000000,2.775000,152500.000000 +-117.920000,33.650000,28.000000,1087.000000,423.000000,807.000000,425.000000,0.970200,225400.000000 +-118.160000,34.130000,36.000000,4003.000000,647.000000,1337.000000,631.000000,7.723000,500001.000000 +-122.490000,37.690000,35.000000,2576.000000,443.000000,1273.000000,433.000000,4.739100,272800.000000 +-122.480000,38.310000,29.000000,2375.000000,560.000000,1124.000000,502.000000,2.327600,166200.000000 +-117.670000,34.020000,16.000000,3042.000000,524.000000,1516.000000,475.000000,4.890600,178500.000000 +-117.150000,32.910000,14.000000,1259.000000,238.000000,889.000000,247.000000,4.946400,174800.000000 +-118.340000,34.030000,46.000000,2437.000000,502.000000,1151.000000,477.000000,2.444400,134100.000000 +-121.540000,38.500000,15.000000,6093.000000,1051.000000,2415.000000,997.000000,4.207500,183600.000000 +-118.150000,33.970000,32.000000,1174.000000,373.000000,1758.000000,361.000000,2.426300,158100.000000 +-122.540000,38.140000,16.000000,4431.000000,603.000000,1659.000000,630.000000,7.541200,392100.000000 +-118.010000,33.880000,19.000000,1434.000000,391.000000,1088.000000,341.000000,3.369000,269600.000000 +-117.680000,35.620000,30.000000,2994.000000,741.000000,1481.000000,581.000000,2.145800,52400.000000 +-120.640000,35.260000,21.000000,3298.000000,716.000000,1862.000000,687.000000,2.150700,221500.000000 +-121.290000,38.100000,14.000000,1551.000000,297.000000,785.000000,281.000000,3.775000,163300.000000 +-120.190000,37.530000,25.000000,1470.000000,341.000000,706.000000,283.000000,1.761400,71300.000000 +-117.310000,34.100000,28.000000,2899.000000,755.000000,2406.000000,655.000000,1.520800,69500.000000 +-118.090000,33.870000,31.000000,3498.000000,728.000000,2098.000000,697.000000,3.983700,246000.000000 +-117.990000,34.120000,37.000000,1527.000000,331.000000,1504.000000,324.000000,3.285700,130100.000000 +-119.810000,34.470000,26.000000,4382.000000,618.000000,1728.000000,587.000000,7.473400,432200.000000 +-116.960000,33.520000,9.000000,2802.000000,471.000000,1155.000000,421.000000,4.125000,392100.000000 +-122.310000,37.570000,37.000000,1437.000000,305.000000,979.000000,331.000000,4.000000,273700.000000 +-117.390000,33.970000,52.000000,3307.000000,553.000000,1269.000000,529.000000,4.317600,136200.000000 +-118.510000,34.190000,38.000000,2182.000000,409.000000,1141.000000,379.000000,4.286500,221100.000000 +-117.300000,34.120000,34.000000,1127.000000,275.000000,971.000000,249.000000,2.058300,64800.000000 +-120.850000,37.510000,15.000000,1131.000000,285.000000,728.000000,281.000000,1.553100,93100.000000 +-121.310000,37.930000,21.000000,1556.000000,314.000000,1140.000000,304.000000,2.466700,81400.000000 +-118.160000,34.090000,33.000000,1515.000000,415.000000,1345.000000,346.000000,2.375000,175000.000000 +-118.030000,33.840000,30.000000,4781.000000,831.000000,2568.000000,797.000000,5.474600,226400.000000 +-119.880000,34.400000,25.000000,2741.000000,623.000000,2272.000000,624.000000,2.264700,216700.000000 +-118.570000,34.170000,35.000000,2072.000000,318.000000,908.000000,342.000000,6.092800,327300.000000 +-122.110000,37.140000,29.000000,3201.000000,640.000000,1722.000000,570.000000,4.459700,204100.000000 +-122.430000,37.760000,52.000000,2332.000000,434.000000,861.000000,406.000000,4.431800,437500.000000 +-118.270000,33.960000,38.000000,1126.000000,270.000000,999.000000,265.000000,0.549500,91700.000000 +-117.160000,33.760000,11.000000,4934.000000,929.000000,2508.000000,840.000000,2.625000,155400.000000 +-122.070000,37.890000,38.000000,2139.000000,343.000000,809.000000,340.000000,5.563600,268800.000000 +-117.090000,34.010000,37.000000,106.000000,18.000000,27.000000,12.000000,4.055600,131300.000000 +-122.310000,37.920000,12.000000,1895.000000,600.000000,983.000000,519.000000,2.500000,195800.000000 +-122.190000,37.730000,44.000000,1066.000000,253.000000,825.000000,244.000000,2.153800,79700.000000 +-117.000000,32.730000,17.000000,6050.000000,1143.000000,3424.000000,1131.000000,3.764700,127600.000000 +-117.210000,33.190000,21.000000,3765.000000,612.000000,1722.000000,593.000000,4.815200,218500.000000 +-118.260000,34.140000,51.000000,902.000000,320.000000,650.000000,334.000000,1.541700,268800.000000 +-122.100000,37.360000,35.000000,2063.000000,266.000000,676.000000,252.000000,8.529400,500001.000000 +-121.860000,36.600000,33.000000,1409.000000,307.000000,633.000000,290.000000,3.556800,191200.000000 +-117.240000,33.110000,10.000000,3487.000000,545.000000,1410.000000,557.000000,6.033600,240300.000000 +-116.370000,33.720000,19.000000,6190.000000,1355.000000,2242.000000,1043.000000,3.002100,152300.000000 +-121.320000,38.410000,17.000000,4401.000000,655.000000,1970.000000,639.000000,5.823900,247500.000000 +-118.700000,34.280000,27.000000,3536.000000,646.000000,1837.000000,580.000000,4.496400,238300.000000 +-118.150000,33.950000,31.000000,1053.000000,230.000000,686.000000,211.000000,4.000000,263200.000000 +-118.300000,33.730000,47.000000,2852.000000,603.000000,1130.000000,560.000000,4.194000,293900.000000 +-118.520000,34.190000,37.000000,1892.000000,347.000000,1039.000000,343.000000,4.829500,212100.000000 +-118.220000,33.990000,6.000000,1499.000000,437.000000,1754.000000,447.000000,4.316400,143200.000000 +-122.410000,37.650000,32.000000,3436.000000,868.000000,2583.000000,817.000000,3.503900,232400.000000 +-122.300000,37.890000,46.000000,1520.000000,402.000000,815.000000,375.000000,2.803600,211600.000000 +-121.430000,38.560000,50.000000,1533.000000,288.000000,532.000000,257.000000,2.541700,125900.000000 +-117.230000,32.860000,16.000000,1200.000000,468.000000,648.000000,443.000000,3.045000,100000.000000 +-117.230000,32.790000,23.000000,2578.000000,665.000000,989.000000,622.000000,3.548400,238000.000000 +-117.160000,32.720000,52.000000,788.000000,463.000000,805.000000,391.000000,0.914200,162500.000000 +-122.410000,37.660000,34.000000,1075.000000,318.000000,906.000000,294.000000,3.005200,242500.000000 +-117.230000,32.730000,36.000000,2052.000000,287.000000,699.000000,265.000000,7.555700,441400.000000 +-118.330000,34.000000,47.000000,1671.000000,388.000000,895.000000,317.000000,2.205400,121500.000000 +-117.430000,33.550000,8.000000,446.000000,62.000000,188.000000,68.000000,9.435600,465600.000000 +-118.360000,34.080000,52.000000,1965.000000,480.000000,794.000000,451.000000,3.282400,304800.000000 +-121.090000,38.970000,13.000000,1467.000000,221.000000,688.000000,231.000000,5.253600,191900.000000 +-119.450000,35.150000,33.000000,5050.000000,964.000000,2293.000000,919.000000,3.159200,75400.000000 +-121.270000,38.640000,22.000000,1597.000000,280.000000,657.000000,273.000000,4.309800,213500.000000 +-118.000000,33.900000,35.000000,1758.000000,309.000000,972.000000,338.000000,4.383100,209800.000000 +-118.210000,34.050000,45.000000,2146.000000,607.000000,2868.000000,625.000000,2.121000,144000.000000 +-122.500000,37.770000,52.000000,2299.000000,441.000000,1252.000000,415.000000,5.056200,336700.000000 +-122.310000,37.920000,38.000000,1250.000000,236.000000,631.000000,279.000000,3.724000,220100.000000 +-118.300000,34.000000,40.000000,1131.000000,281.000000,859.000000,230.000000,1.180600,134600.000000 +-121.840000,38.020000,46.000000,66.000000,22.000000,37.000000,21.000000,0.536000,87500.000000 +-117.250000,32.800000,30.000000,2061.000000,631.000000,1007.000000,577.000000,2.581300,253100.000000 +-124.140000,40.600000,27.000000,1148.000000,206.000000,521.000000,219.000000,4.025000,128100.000000 +-118.180000,34.050000,52.000000,1070.000000,231.000000,925.000000,220.000000,1.825000,133000.000000 +-119.780000,36.800000,34.000000,3426.000000,623.000000,1938.000000,647.000000,2.899400,66000.000000 +-122.220000,38.080000,37.000000,2811.000000,539.000000,1574.000000,516.000000,3.105300,96700.000000 +-118.500000,34.260000,33.000000,2831.000000,510.000000,1340.000000,504.000000,4.831600,237300.000000 +-118.450000,34.180000,34.000000,1843.000000,442.000000,861.000000,417.000000,3.687500,246400.000000 +-119.790000,36.310000,25.000000,4984.000000,1029.000000,2414.000000,961.000000,2.293700,72300.000000 +-117.210000,32.740000,45.000000,3025.000000,583.000000,1980.000000,550.000000,2.298200,87500.000000 +-122.080000,40.640000,14.000000,3099.000000,519.000000,1447.000000,494.000000,4.013200,141200.000000 +-122.310000,37.520000,24.000000,2328.000000,335.000000,969.000000,354.000000,7.736400,435800.000000 +-119.740000,36.760000,36.000000,912.000000,216.000000,842.000000,219.000000,1.476600,52800.000000 +-118.280000,34.010000,52.000000,795.000000,308.000000,1118.000000,275.000000,1.217500,131300.000000 +-118.270000,34.110000,39.000000,3825.000000,916.000000,1378.000000,746.000000,4.409400,352600.000000 +-117.200000,33.160000,13.000000,4503.000000,1137.000000,3094.000000,1091.000000,2.315900,91600.000000 +-122.330000,37.530000,25.000000,1729.000000,383.000000,769.000000,352.000000,4.041700,458500.000000 +-120.860000,35.400000,21.000000,2787.000000,641.000000,1106.000000,501.000000,2.704300,186200.000000 +-119.470000,35.400000,32.000000,2167.000000,421.000000,1301.000000,394.000000,1.971800,69800.000000 +-117.270000,34.160000,32.000000,2894.000000,427.000000,1151.000000,446.000000,6.223600,159700.000000 +-121.920000,38.020000,8.000000,2750.000000,479.000000,1526.000000,484.000000,5.102000,156500.000000 +-121.450000,38.560000,51.000000,1250.000000,235.000000,452.000000,232.000000,2.625000,121200.000000 +-117.910000,33.840000,16.000000,919.000000,253.000000,912.000000,249.000000,1.590300,165400.000000 +-118.480000,35.610000,17.000000,4002.000000,930.000000,1614.000000,731.000000,1.623600,67300.000000 +-118.030000,33.840000,28.000000,3857.000000,857.000000,2328.000000,830.000000,4.015600,196000.000000 +-118.320000,34.040000,48.000000,1184.000000,328.000000,953.000000,311.000000,2.352600,156300.000000 +-121.300000,38.890000,23.000000,1750.000000,297.000000,1012.000000,315.000000,3.470600,99300.000000 +-117.690000,34.070000,34.000000,4055.000000,739.000000,2470.000000,753.000000,3.858600,136000.000000 +-118.340000,33.940000,36.000000,2796.000000,1041.000000,4033.000000,944.000000,2.488600,160700.000000 +-121.920000,36.620000,52.000000,2584.000000,599.000000,790.000000,444.000000,2.526300,286400.000000 +-122.110000,37.410000,27.000000,5110.000000,1599.000000,2764.000000,1482.000000,3.419800,351900.000000 +-117.650000,34.100000,44.000000,2808.000000,585.000000,1444.000000,550.000000,2.715900,139300.000000 +-121.800000,38.010000,44.000000,3184.000000,581.000000,1399.000000,548.000000,2.723400,110200.000000 +-122.660000,38.810000,22.000000,852.000000,176.000000,461.000000,142.000000,3.437500,83300.000000 +-122.390000,37.780000,3.000000,3464.000000,1179.000000,1441.000000,919.000000,4.710500,275000.000000 +-117.060000,34.870000,14.000000,3348.000000,619.000000,1756.000000,557.000000,3.598700,91400.000000 +-121.340000,38.660000,16.000000,3154.000000,860.000000,1837.000000,793.000000,1.980500,92900.000000 +-121.920000,36.950000,29.000000,3457.000000,699.000000,1327.000000,563.000000,3.659700,252300.000000 +-122.590000,38.040000,25.000000,3412.000000,455.000000,1238.000000,406.000000,8.364600,397300.000000 +-118.280000,34.110000,46.000000,1156.000000,203.000000,514.000000,213.000000,4.201900,352100.000000 +-121.390000,38.600000,22.000000,5773.000000,1320.000000,2607.000000,1250.000000,2.523800,118800.000000 +-122.330000,40.520000,23.000000,2801.000000,507.000000,1318.000000,454.000000,3.508100,116700.000000 +-118.200000,34.040000,47.000000,1894.000000,408.000000,1629.000000,379.000000,3.761900,127600.000000 +-121.960000,37.000000,20.000000,3847.000000,727.000000,1725.000000,737.000000,3.344700,305200.000000 +-117.890000,33.870000,32.000000,1569.000000,422.000000,835.000000,386.000000,3.046500,148900.000000 +-117.230000,32.880000,18.000000,5566.000000,1465.000000,6303.000000,1458.000000,1.858000,205000.000000 +-122.000000,37.120000,17.000000,4413.000000,672.000000,1674.000000,608.000000,6.977200,383300.000000 +-118.400000,34.280000,22.000000,3517.000000,810.000000,3134.000000,847.000000,2.665200,164800.000000 +-122.460000,37.760000,52.000000,2236.000000,545.000000,1186.000000,532.000000,3.453100,414300.000000 +-121.990000,37.540000,18.000000,3584.000000,715.000000,1673.000000,661.000000,3.944400,240100.000000 +-117.230000,32.740000,16.000000,735.000000,139.000000,299.000000,134.000000,4.635400,179200.000000 +-121.840000,37.290000,4.000000,2937.000000,648.000000,1780.000000,665.000000,4.385100,160400.000000 +-118.150000,34.860000,10.000000,4597.000000,1009.000000,2227.000000,821.000000,2.614900,83500.000000 +-118.330000,33.980000,38.000000,3063.000000,796.000000,2153.000000,721.000000,1.847200,149100.000000 +-120.680000,35.510000,17.000000,1701.000000,298.000000,941.000000,293.000000,4.321800,209100.000000 +-117.950000,33.790000,34.000000,2912.000000,520.000000,1625.000000,501.000000,4.466700,190600.000000 +-117.970000,34.050000,33.000000,1452.000000,268.000000,1274.000000,278.000000,3.656300,162700.000000 +-119.750000,36.870000,3.000000,13802.000000,2244.000000,5226.000000,1972.000000,5.094100,143700.000000 +-122.080000,37.350000,35.000000,1347.000000,207.000000,548.000000,189.000000,7.706800,500001.000000 +-122.320000,37.950000,36.000000,1425.000000,245.000000,573.000000,239.000000,4.350000,185000.000000 +-122.220000,38.100000,38.000000,931.000000,181.000000,566.000000,207.000000,3.022100,93300.000000 +-124.090000,40.550000,24.000000,2978.000000,553.000000,1370.000000,480.000000,2.764400,97300.000000 +-121.500000,38.570000,41.000000,1124.000000,344.000000,807.000000,316.000000,1.471200,94600.000000 +-118.110000,33.910000,19.000000,3056.000000,759.000000,1561.000000,740.000000,3.136900,196900.000000 +-121.230000,37.960000,37.000000,2351.000000,564.000000,1591.000000,549.000000,1.656300,57200.000000 +-121.890000,37.280000,35.000000,2418.000000,375.000000,988.000000,374.000000,6.093600,365400.000000 +-122.480000,37.650000,39.000000,3348.000000,666.000000,1817.000000,668.000000,4.259300,227400.000000 +-118.310000,34.090000,36.000000,2517.000000,842.000000,2446.000000,689.000000,2.152400,187500.000000 +-123.020000,38.810000,35.000000,956.000000,213.000000,488.000000,215.000000,3.025000,140600.000000 +-120.470000,34.650000,32.000000,2193.000000,430.000000,1074.000000,377.000000,2.333300,130200.000000 +-122.100000,37.680000,37.000000,2116.000000,503.000000,1109.000000,448.000000,2.535000,174000.000000 +-122.420000,37.790000,52.000000,3364.000000,1100.000000,2112.000000,1045.000000,2.134300,400000.000000 +-122.640000,41.630000,19.000000,2722.000000,479.000000,1108.000000,430.000000,3.106200,100000.000000 +-118.020000,33.910000,34.000000,2518.000000,429.000000,1309.000000,421.000000,4.786100,210700.000000 +-119.020000,35.360000,48.000000,1833.000000,396.000000,947.000000,363.000000,2.282700,70000.000000 +-121.330000,38.650000,23.000000,2446.000000,523.000000,1132.000000,513.000000,2.626600,198500.000000 +-118.080000,33.950000,32.000000,1962.000000,387.000000,1274.000000,398.000000,4.830400,160600.000000 +-118.080000,33.790000,34.000000,2840.000000,395.000000,1127.000000,396.000000,7.614400,376200.000000 +-118.230000,33.910000,27.000000,1694.000000,393.000000,1890.000000,373.000000,3.034100,89100.000000 +-118.290000,33.750000,37.000000,1319.000000,292.000000,766.000000,285.000000,2.703100,218900.000000 +-118.020000,34.130000,34.000000,1966.000000,319.000000,980.000000,297.000000,7.730700,429000.000000 +-117.890000,33.600000,36.000000,1496.000000,247.000000,441.000000,203.000000,7.816400,500001.000000 +-118.230000,34.650000,17.000000,1827.000000,348.000000,766.000000,335.000000,3.567300,136300.000000 +-118.310000,34.020000,45.000000,1423.000000,278.000000,822.000000,276.000000,2.451900,98100.000000 +-118.070000,33.800000,34.000000,3486.000000,507.000000,1311.000000,503.000000,7.122100,384500.000000 +-118.250000,33.940000,43.000000,1113.000000,378.000000,1305.000000,334.000000,1.143400,91300.000000 +-122.440000,37.710000,52.000000,2711.000000,591.000000,1848.000000,524.000000,3.956700,251500.000000 +-119.750000,34.500000,26.000000,3563.000000,579.000000,1479.000000,575.000000,5.952200,438400.000000 +-117.940000,33.940000,26.000000,1962.000000,540.000000,1236.000000,520.000000,2.215600,145000.000000 +-119.230000,34.170000,18.000000,6171.000000,1490.000000,2164.000000,1210.000000,3.687500,500001.000000 +-118.110000,34.680000,6.000000,7430.000000,1184.000000,3489.000000,1115.000000,5.326700,140100.000000 +-122.470000,37.770000,52.000000,2241.000000,443.000000,1042.000000,377.000000,4.163500,398400.000000 +-120.930000,35.760000,11.000000,8997.000000,1698.000000,1825.000000,756.000000,3.230000,154300.000000 +-118.140000,34.170000,52.000000,2667.000000,486.000000,1681.000000,504.000000,4.052400,173100.000000 +-122.730000,38.460000,14.000000,4042.000000,1298.000000,2323.000000,1158.000000,2.065100,135400.000000 +-117.060000,32.760000,37.000000,2356.000000,476.000000,1231.000000,499.000000,2.965000,155700.000000 +-120.710000,35.500000,12.000000,3098.000000,453.000000,1433.000000,434.000000,5.250800,292900.000000 +-118.310000,34.050000,35.000000,1692.000000,423.000000,1578.000000,406.000000,2.531300,305800.000000 +-119.700000,36.750000,11.000000,3626.000000,779.000000,1819.000000,731.000000,2.495600,87500.000000 +-121.340000,38.640000,17.000000,2761.000000,501.000000,1128.000000,482.000000,3.756200,139700.000000 +-117.910000,34.090000,20.000000,4327.000000,1037.000000,2296.000000,963.000000,3.044100,185400.000000 +-119.760000,36.790000,32.000000,2463.000000,468.000000,1261.000000,486.000000,3.328100,75100.000000 +-120.660000,35.490000,17.000000,4422.000000,945.000000,2307.000000,885.000000,2.828500,171300.000000 +-118.280000,34.080000,42.000000,1618.000000,522.000000,1454.000000,440.000000,3.160700,182000.000000 +-122.540000,37.900000,48.000000,2491.000000,460.000000,937.000000,455.000000,4.437500,370000.000000 +-117.590000,33.880000,13.000000,3239.000000,849.000000,2751.000000,813.000000,2.611100,107000.000000 +-120.470000,34.940000,17.000000,1368.000000,308.000000,642.000000,303.000000,1.863300,109400.000000 +-118.250000,33.930000,42.000000,819.000000,233.000000,899.000000,228.000000,1.134600,85400.000000 +-121.970000,37.290000,25.000000,4096.000000,743.000000,2027.000000,741.000000,5.329400,300300.000000 +-122.010000,36.970000,43.000000,2162.000000,509.000000,1208.000000,464.000000,2.541700,260900.000000 +-122.020000,37.600000,32.000000,1295.000000,295.000000,1097.000000,328.000000,3.238600,149600.000000 +-118.230000,34.090000,49.000000,1638.000000,456.000000,1500.000000,430.000000,2.692300,150000.000000 +-117.170000,34.280000,13.000000,4867.000000,718.000000,780.000000,250.000000,7.199700,253800.000000 +-122.330000,37.390000,52.000000,573.000000,102.000000,232.000000,92.000000,6.226300,500001.000000 +-117.910000,33.600000,37.000000,2088.000000,510.000000,673.000000,390.000000,5.104800,500001.000000 +-117.930000,33.860000,35.000000,931.000000,181.000000,516.000000,174.000000,5.586700,182500.000000 +-119.860000,34.420000,23.000000,1450.000000,642.000000,1258.000000,607.000000,1.179000,225000.000000 +-118.140000,34.060000,27.000000,5257.000000,1082.000000,3496.000000,1036.000000,3.390600,237200.000000 +-119.700000,36.300000,10.000000,956.000000,201.000000,693.000000,220.000000,2.289500,62000.000000 +-117.120000,34.100000,40.000000,96.000000,14.000000,46.000000,14.000000,3.270800,162500.000000 +-119.630000,34.420000,42.000000,1765.000000,263.000000,753.000000,260.000000,8.560800,500001.000000 diff --git a/sample_data/california_housing_train.csv b/sample_data/california_housing_train.csv new file mode 100644 index 0000000000000000000000000000000000000000..6e6023df366b8fee2e924429c25ad6fbdf10798d --- /dev/null +++ b/sample_data/california_housing_train.csv @@ -0,0 +1,17001 @@ +"longitude","latitude","housing_median_age","total_rooms","total_bedrooms","population","households","median_income","median_house_value" +-114.310000,34.190000,15.000000,5612.000000,1283.000000,1015.000000,472.000000,1.493600,66900.000000 +-114.470000,34.400000,19.000000,7650.000000,1901.000000,1129.000000,463.000000,1.820000,80100.000000 +-114.560000,33.690000,17.000000,720.000000,174.000000,333.000000,117.000000,1.650900,85700.000000 +-114.570000,33.640000,14.000000,1501.000000,337.000000,515.000000,226.000000,3.191700,73400.000000 +-114.570000,33.570000,20.000000,1454.000000,326.000000,624.000000,262.000000,1.925000,65500.000000 +-114.580000,33.630000,29.000000,1387.000000,236.000000,671.000000,239.000000,3.343800,74000.000000 +-114.580000,33.610000,25.000000,2907.000000,680.000000,1841.000000,633.000000,2.676800,82400.000000 +-114.590000,34.830000,41.000000,812.000000,168.000000,375.000000,158.000000,1.708300,48500.000000 +-114.590000,33.610000,34.000000,4789.000000,1175.000000,3134.000000,1056.000000,2.178200,58400.000000 +-114.600000,34.830000,46.000000,1497.000000,309.000000,787.000000,271.000000,2.190800,48100.000000 +-114.600000,33.620000,16.000000,3741.000000,801.000000,2434.000000,824.000000,2.679700,86500.000000 +-114.600000,33.600000,21.000000,1988.000000,483.000000,1182.000000,437.000000,1.625000,62000.000000 +-114.610000,34.840000,48.000000,1291.000000,248.000000,580.000000,211.000000,2.157100,48600.000000 +-114.610000,34.830000,31.000000,2478.000000,464.000000,1346.000000,479.000000,3.212000,70400.000000 +-114.630000,32.760000,15.000000,1448.000000,378.000000,949.000000,300.000000,0.858500,45000.000000 +-114.650000,34.890000,17.000000,2556.000000,587.000000,1005.000000,401.000000,1.699100,69100.000000 +-114.650000,33.600000,28.000000,1678.000000,322.000000,666.000000,256.000000,2.965300,94900.000000 +-114.650000,32.790000,21.000000,44.000000,33.000000,64.000000,27.000000,0.857100,25000.000000 +-114.660000,32.740000,17.000000,1388.000000,386.000000,775.000000,320.000000,1.204900,44000.000000 +-114.670000,33.920000,17.000000,97.000000,24.000000,29.000000,15.000000,1.265600,27500.000000 +-114.680000,33.490000,20.000000,1491.000000,360.000000,1135.000000,303.000000,1.639500,44400.000000 +-114.730000,33.430000,24.000000,796.000000,243.000000,227.000000,139.000000,0.896400,59200.000000 +-114.940000,34.550000,20.000000,350.000000,95.000000,119.000000,58.000000,1.625000,50000.000000 +-114.980000,33.820000,15.000000,644.000000,129.000000,137.000000,52.000000,3.209700,71300.000000 +-115.220000,33.540000,18.000000,1706.000000,397.000000,3424.000000,283.000000,1.625000,53500.000000 +-115.320000,32.820000,34.000000,591.000000,139.000000,327.000000,89.000000,3.652800,100000.000000 +-115.370000,32.820000,30.000000,1602.000000,322.000000,1130.000000,335.000000,3.573500,71100.000000 +-115.370000,32.820000,14.000000,1276.000000,270.000000,867.000000,261.000000,1.937500,80900.000000 +-115.370000,32.810000,32.000000,741.000000,191.000000,623.000000,169.000000,1.760400,68600.000000 +-115.370000,32.810000,23.000000,1458.000000,294.000000,866.000000,275.000000,2.359400,74300.000000 +-115.380000,32.820000,38.000000,1892.000000,394.000000,1175.000000,374.000000,1.993900,65800.000000 +-115.380000,32.810000,35.000000,1263.000000,262.000000,950.000000,241.000000,1.895800,67500.000000 +-115.390000,32.760000,16.000000,1136.000000,196.000000,481.000000,185.000000,6.255800,146300.000000 +-115.400000,32.860000,19.000000,1087.000000,171.000000,649.000000,173.000000,3.318200,113800.000000 +-115.400000,32.700000,19.000000,583.000000,113.000000,531.000000,134.000000,1.683800,95800.000000 +-115.410000,32.990000,29.000000,1141.000000,220.000000,684.000000,194.000000,3.403800,107800.000000 +-115.460000,33.190000,33.000000,1234.000000,373.000000,777.000000,298.000000,1.000000,40000.000000 +-115.480000,32.800000,21.000000,1260.000000,246.000000,805.000000,239.000000,2.617200,88500.000000 +-115.480000,32.680000,15.000000,3414.000000,666.000000,2097.000000,622.000000,2.331900,91200.000000 +-115.490000,32.870000,19.000000,541.000000,104.000000,457.000000,106.000000,3.358300,102800.000000 +-115.490000,32.690000,17.000000,1960.000000,389.000000,1691.000000,356.000000,1.899000,64000.000000 +-115.490000,32.670000,29.000000,1523.000000,440.000000,1302.000000,393.000000,1.131100,84700.000000 +-115.490000,32.670000,25.000000,2322.000000,573.000000,2185.000000,602.000000,1.375000,70100.000000 +-115.500000,32.750000,13.000000,330.000000,72.000000,822.000000,64.000000,3.410700,142500.000000 +-115.500000,32.680000,18.000000,3631.000000,913.000000,3565.000000,924.000000,1.593100,88400.000000 +-115.500000,32.670000,35.000000,2159.000000,492.000000,1694.000000,475.000000,2.177600,75500.000000 +-115.510000,33.240000,32.000000,1995.000000,523.000000,1069.000000,410.000000,1.655200,43300.000000 +-115.510000,33.120000,21.000000,1024.000000,218.000000,890.000000,232.000000,2.101000,46700.000000 +-115.510000,32.990000,20.000000,1402.000000,287.000000,1104.000000,317.000000,1.908800,63700.000000 +-115.510000,32.680000,11.000000,2872.000000,610.000000,2644.000000,581.000000,2.625000,72700.000000 +-115.520000,34.220000,30.000000,540.000000,136.000000,122.000000,63.000000,1.333300,42500.000000 +-115.520000,33.130000,18.000000,1109.000000,283.000000,1006.000000,253.000000,2.163000,53400.000000 +-115.520000,33.120000,38.000000,1327.000000,262.000000,784.000000,231.000000,1.879300,60800.000000 +-115.520000,32.980000,32.000000,1615.000000,382.000000,1307.000000,345.000000,1.458300,58600.000000 +-115.520000,32.970000,24.000000,1617.000000,366.000000,1416.000000,401.000000,1.975000,66400.000000 +-115.520000,32.970000,10.000000,1879.000000,387.000000,1376.000000,337.000000,1.991100,67500.000000 +-115.520000,32.770000,18.000000,1715.000000,337.000000,1166.000000,333.000000,2.241700,79200.000000 +-115.520000,32.730000,17.000000,1190.000000,275.000000,1113.000000,258.000000,2.357100,63100.000000 +-115.520000,32.670000,6.000000,2804.000000,581.000000,2807.000000,594.000000,2.062500,67700.000000 +-115.530000,34.910000,12.000000,807.000000,199.000000,246.000000,102.000000,2.539100,40000.000000 +-115.530000,32.990000,25.000000,2578.000000,634.000000,2082.000000,565.000000,1.715900,62200.000000 +-115.530000,32.970000,35.000000,1583.000000,340.000000,933.000000,318.000000,2.406300,70700.000000 +-115.530000,32.970000,34.000000,2231.000000,545.000000,1568.000000,510.000000,1.521700,60300.000000 +-115.530000,32.730000,14.000000,1527.000000,325.000000,1453.000000,332.000000,1.735000,61200.000000 +-115.540000,32.990000,23.000000,1459.000000,373.000000,1148.000000,388.000000,1.537200,69400.000000 +-115.540000,32.990000,17.000000,1697.000000,268.000000,911.000000,254.000000,4.352300,96000.000000 +-115.540000,32.980000,27.000000,1513.000000,395.000000,1121.000000,381.000000,1.946400,60600.000000 +-115.540000,32.970000,41.000000,2429.000000,454.000000,1188.000000,430.000000,3.009100,70800.000000 +-115.540000,32.790000,23.000000,1712.000000,403.000000,1370.000000,377.000000,1.275000,60400.000000 +-115.550000,32.980000,33.000000,2266.000000,365.000000,952.000000,360.000000,5.434900,143000.000000 +-115.550000,32.980000,24.000000,2565.000000,530.000000,1447.000000,473.000000,3.259300,80800.000000 +-115.550000,32.820000,34.000000,1540.000000,316.000000,1013.000000,274.000000,2.566400,67500.000000 +-115.550000,32.800000,23.000000,666.000000,142.000000,580.000000,160.000000,2.113600,61000.000000 +-115.550000,32.790000,23.000000,1004.000000,221.000000,697.000000,201.000000,1.635100,59600.000000 +-115.550000,32.790000,22.000000,565.000000,162.000000,692.000000,141.000000,1.208300,53600.000000 +-115.550000,32.780000,5.000000,2652.000000,606.000000,1767.000000,536.000000,2.802500,84300.000000 +-115.560000,32.960000,21.000000,2164.000000,480.000000,1164.000000,421.000000,3.817700,107200.000000 +-115.560000,32.800000,28.000000,1672.000000,416.000000,1335.000000,397.000000,1.598700,59400.000000 +-115.560000,32.800000,25.000000,1311.000000,375.000000,1193.000000,351.000000,2.197900,63900.000000 +-115.560000,32.800000,15.000000,1171.000000,328.000000,1024.000000,298.000000,1.388200,69400.000000 +-115.560000,32.790000,20.000000,2372.000000,835.000000,2283.000000,767.000000,1.170700,62500.000000 +-115.560000,32.790000,18.000000,1178.000000,438.000000,1377.000000,429.000000,1.337300,58300.000000 +-115.560000,32.780000,46.000000,2511.000000,490.000000,1583.000000,469.000000,3.060300,70800.000000 +-115.560000,32.780000,35.000000,1185.000000,202.000000,615.000000,191.000000,4.615400,86200.000000 +-115.560000,32.780000,29.000000,1568.000000,283.000000,848.000000,245.000000,3.159700,76200.000000 +-115.560000,32.760000,15.000000,1278.000000,217.000000,653.000000,185.000000,4.482100,140300.000000 +-115.570000,32.850000,33.000000,1365.000000,269.000000,825.000000,250.000000,3.239600,62300.000000 +-115.570000,32.850000,17.000000,1039.000000,256.000000,728.000000,246.000000,1.741100,63500.000000 +-115.570000,32.840000,29.000000,1207.000000,301.000000,804.000000,288.000000,1.953100,61100.000000 +-115.570000,32.830000,31.000000,1494.000000,289.000000,959.000000,284.000000,3.528200,67500.000000 +-115.570000,32.800000,16.000000,2276.000000,594.000000,1184.000000,513.000000,1.875000,93800.000000 +-115.570000,32.790000,34.000000,1152.000000,208.000000,621.000000,208.000000,3.604200,73600.000000 +-115.570000,32.780000,20.000000,1534.000000,235.000000,871.000000,222.000000,6.271500,97200.000000 +-115.570000,32.780000,15.000000,1413.000000,279.000000,803.000000,277.000000,4.302100,87500.000000 +-115.580000,33.880000,21.000000,1161.000000,282.000000,724.000000,186.000000,3.182700,71700.000000 +-115.580000,32.810000,5.000000,805.000000,143.000000,458.000000,143.000000,4.475000,96300.000000 +-115.580000,32.810000,10.000000,1088.000000,203.000000,533.000000,201.000000,3.659700,87500.000000 +-115.580000,32.790000,14.000000,1687.000000,507.000000,762.000000,451.000000,1.663500,64400.000000 +-115.580000,32.780000,5.000000,2494.000000,414.000000,1416.000000,421.000000,5.784300,110100.000000 +-115.590000,32.850000,20.000000,1608.000000,274.000000,862.000000,248.000000,4.875000,90800.000000 +-115.590000,32.790000,8.000000,2183.000000,307.000000,1000.000000,287.000000,6.381400,159900.000000 +-115.590000,32.690000,30.000000,935.000000,177.000000,649.000000,148.000000,2.576900,94400.000000 +-115.600000,33.200000,37.000000,709.000000,187.000000,390.000000,142.000000,2.451100,72500.000000 +-115.600000,32.870000,3.000000,1629.000000,317.000000,1005.000000,312.000000,4.129300,83200.000000 +-115.620000,33.040000,20.000000,1121.000000,244.000000,766.000000,230.000000,2.296900,62000.000000 +-115.620000,33.040000,17.000000,1009.000000,231.000000,745.000000,217.000000,2.046300,61200.000000 +-115.640000,32.800000,23.000000,1228.000000,235.000000,569.000000,235.000000,3.166700,125000.000000 +-115.690000,32.790000,18.000000,1564.000000,340.000000,1161.000000,343.000000,2.179200,55200.000000 +-115.720000,32.750000,16.000000,348.000000,99.000000,123.000000,54.000000,2.093800,87500.000000 +-115.730000,33.360000,19.000000,749.000000,238.000000,476.000000,169.000000,1.772700,50000.000000 +-115.730000,33.350000,23.000000,1586.000000,448.000000,338.000000,182.000000,1.213200,30000.000000 +-115.730000,33.090000,27.000000,452.000000,103.000000,258.000000,61.000000,2.900000,87500.000000 +-115.730000,32.800000,44.000000,472.000000,81.000000,206.000000,57.000000,2.208300,93800.000000 +-115.800000,33.260000,2.000000,96.000000,18.000000,30.000000,16.000000,5.337400,47500.000000 +-115.840000,33.490000,20.000000,1660.000000,379.000000,637.000000,250.000000,2.034700,68900.000000 +-115.850000,34.200000,34.000000,3868.000000,1257.000000,890.000000,423.000000,1.357100,41000.000000 +-115.880000,32.930000,15.000000,208.000000,49.000000,51.000000,20.000000,4.020800,32500.000000 +-115.900000,33.340000,19.000000,1210.000000,248.000000,329.000000,155.000000,1.785700,62800.000000 +-115.910000,33.360000,15.000000,459.000000,95.000000,160.000000,73.000000,0.922000,67500.000000 +-115.930000,35.550000,18.000000,1321.000000,272.000000,754.000000,226.000000,3.402800,67500.000000 +-115.940000,33.380000,5.000000,186.000000,43.000000,41.000000,21.000000,2.700000,58800.000000 +-115.950000,33.280000,12.000000,99.000000,25.000000,37.000000,17.000000,1.895800,53800.000000 +-115.960000,33.300000,27.000000,322.000000,81.000000,112.000000,57.000000,1.125000,54400.000000 +-115.980000,33.320000,8.000000,240.000000,46.000000,63.000000,24.000000,1.468800,53800.000000 +-115.990000,33.400000,15.000000,1945.000000,536.000000,515.000000,273.000000,2.010900,54300.000000 +-116.000000,33.190000,16.000000,245.000000,57.000000,81.000000,33.000000,1.263900,51300.000000 +-116.000000,32.740000,26.000000,1134.000000,280.000000,329.000000,158.000000,1.433800,43900.000000 +-116.010000,33.510000,24.000000,2985.000000,958.000000,4042.000000,905.000000,1.734400,66400.000000 +-116.010000,33.410000,20.000000,1996.000000,515.000000,659.000000,295.000000,2.868400,62800.000000 +-116.020000,34.180000,8.000000,569.000000,97.000000,312.000000,96.000000,4.302100,94500.000000 +-116.050000,34.120000,19.000000,301.000000,65.000000,150.000000,56.000000,3.125000,65600.000000 +-116.050000,33.330000,17.000000,290.000000,94.000000,135.000000,57.000000,1.729200,81300.000000 +-116.060000,34.200000,29.000000,1202.000000,290.000000,383.000000,156.000000,1.337100,66900.000000 +-116.060000,34.150000,15.000000,10377.000000,2331.000000,4507.000000,1807.000000,2.246600,66800.000000 +-116.080000,33.860000,16.000000,381.000000,89.000000,182.000000,75.000000,2.425000,76100.000000 +-116.090000,34.150000,13.000000,9444.000000,1997.000000,4166.000000,1482.000000,2.611100,65600.000000 +-116.110000,33.640000,20.000000,1273.000000,354.000000,1548.000000,355.000000,2.087100,84700.000000 +-116.120000,33.530000,17.000000,2421.000000,820.000000,2971.000000,685.000000,1.654000,100000.000000 +-116.140000,34.220000,32.000000,3298.000000,1228.000000,763.000000,360.000000,1.875000,47800.000000 +-116.150000,34.140000,18.000000,3312.000000,705.000000,1251.000000,512.000000,3.013900,82600.000000 +-116.150000,33.690000,22.000000,197.000000,54.000000,331.000000,70.000000,2.928600,112500.000000 +-116.150000,33.640000,10.000000,1711.000000,499.000000,1896.000000,443.000000,1.655700,65400.000000 +-116.160000,33.680000,12.000000,1230.000000,277.000000,1334.000000,260.000000,2.267900,61400.000000 +-116.170000,33.670000,18.000000,3585.000000,800.000000,3873.000000,788.000000,2.571400,65900.000000 +-116.170000,33.660000,22.000000,639.000000,203.000000,664.000000,153.000000,1.930600,47500.000000 +-116.170000,33.530000,13.000000,1713.000000,512.000000,1978.000000,442.000000,2.128700,58600.000000 +-116.180000,33.690000,17.000000,89.000000,19.000000,79.000000,21.000000,2.375000,155000.000000 +-116.180000,33.670000,25.000000,2888.000000,654.000000,2940.000000,660.000000,2.214100,66700.000000 +-116.190000,33.670000,16.000000,1859.000000,476.000000,1994.000000,477.000000,1.729700,67500.000000 +-116.200000,33.700000,26.000000,2399.000000,625.000000,2654.000000,535.000000,2.298900,60600.000000 +-116.200000,33.630000,23.000000,1152.000000,273.000000,1077.000000,235.000000,2.500000,96300.000000 +-116.200000,32.640000,28.000000,1608.000000,409.000000,567.000000,254.000000,1.464800,61800.000000 +-116.210000,33.750000,22.000000,894.000000,192.000000,830.000000,202.000000,3.067300,68200.000000 +-116.210000,33.720000,28.000000,2488.000000,714.000000,2891.000000,676.000000,2.316900,68900.000000 +-116.210000,33.710000,19.000000,3114.000000,787.000000,3157.000000,772.000000,1.708300,82200.000000 +-116.210000,33.680000,34.000000,584.000000,176.000000,625.000000,166.000000,1.580900,100000.000000 +-116.210000,33.660000,19.000000,1596.000000,295.000000,1201.000000,282.000000,3.884600,100900.000000 +-116.220000,36.000000,14.000000,1372.000000,386.000000,436.000000,213.000000,1.147100,32900.000000 +-116.220000,34.210000,23.000000,1175.000000,468.000000,355.000000,151.000000,2.208300,42500.000000 +-116.220000,33.740000,26.000000,4120.000000,858.000000,2918.000000,815.000000,3.310700,69400.000000 +-116.220000,33.730000,38.000000,1695.000000,352.000000,1279.000000,305.000000,2.121700,68500.000000 +-116.220000,33.720000,28.000000,826.000000,258.000000,979.000000,245.000000,1.717200,58800.000000 +-116.220000,33.700000,9.000000,3861.000000,849.000000,825.000000,401.000000,3.283300,124700.000000 +-116.230000,33.730000,29.000000,1133.000000,221.000000,918.000000,239.000000,2.864800,72100.000000 +-116.230000,33.720000,32.000000,4981.000000,1326.000000,3779.000000,1186.000000,1.780500,76900.000000 +-116.230000,33.710000,17.000000,4874.000000,1349.000000,5032.000000,1243.000000,2.444000,90000.000000 +-116.240000,33.730000,14.000000,2774.000000,566.000000,1530.000000,505.000000,3.068200,104100.000000 +-116.240000,33.720000,25.000000,5236.000000,1039.000000,2725.000000,935.000000,3.775000,93400.000000 +-116.240000,33.710000,10.000000,9033.000000,2224.000000,5525.000000,1845.000000,2.759800,95000.000000 +-116.250000,33.810000,24.000000,880.000000,187.000000,507.000000,169.000000,3.458300,67500.000000 +-116.250000,33.750000,33.000000,278.000000,91.000000,375.000000,81.000000,2.025000,50000.000000 +-116.250000,33.690000,5.000000,1664.000000,444.000000,907.000000,374.000000,2.766700,92900.000000 +-116.260000,33.650000,3.000000,7437.000000,1222.000000,574.000000,302.000000,10.294800,382400.000000 +-116.260000,33.070000,17.000000,934.000000,284.000000,452.000000,184.000000,1.987500,83700.000000 +-116.270000,34.130000,37.000000,452.000000,109.000000,184.000000,59.000000,3.729200,65800.000000 +-116.290000,33.740000,6.000000,12991.000000,2555.000000,4571.000000,1926.000000,4.719500,199300.000000 +-116.290000,33.720000,5.000000,3584.000000,760.000000,1097.000000,470.000000,3.177100,167400.000000 +-116.300000,33.680000,10.000000,2387.000000,481.000000,863.000000,304.000000,2.888200,137500.000000 +-116.310000,34.130000,20.000000,2352.000000,556.000000,1217.000000,481.000000,1.606300,55400.000000 +-116.310000,33.670000,15.000000,2214.000000,410.000000,1152.000000,350.000000,2.918700,93400.000000 +-116.310000,33.670000,11.000000,4686.000000,851.000000,2466.000000,731.000000,3.333300,91800.000000 +-116.310000,33.660000,7.000000,4497.000000,831.000000,2248.000000,713.000000,3.635400,98000.000000 +-116.320000,34.140000,18.000000,1880.000000,487.000000,994.000000,425.000000,1.690000,54200.000000 +-116.320000,34.100000,10.000000,4256.000000,861.000000,1403.000000,686.000000,2.661800,81000.000000 +-116.320000,33.280000,19.000000,1791.000000,367.000000,327.000000,185.000000,3.362500,100000.000000 +-116.330000,34.150000,13.000000,1808.000000,411.000000,735.000000,320.000000,1.548900,57400.000000 +-116.330000,33.750000,5.000000,19107.000000,3923.000000,2880.000000,1376.000000,4.036000,158500.000000 +-116.330000,33.720000,11.000000,12327.000000,2000.000000,2450.000000,1139.000000,7.438200,353100.000000 +-116.340000,33.360000,24.000000,2746.000000,514.000000,731.000000,295.000000,3.321400,176400.000000 +-116.350000,34.130000,9.000000,1969.000000,406.000000,805.000000,349.000000,1.549100,62300.000000 +-116.350000,32.740000,16.000000,2595.000000,606.000000,1046.000000,367.000000,1.713700,110700.000000 +-116.360000,33.880000,11.000000,12557.000000,3098.000000,2453.000000,1232.000000,1.784400,78500.000000 +-116.370000,33.720000,17.000000,8626.000000,1859.000000,3497.000000,1337.000000,3.312000,121300.000000 +-116.370000,33.690000,7.000000,8806.000000,1542.000000,858.000000,448.000000,7.800500,318100.000000 +-116.370000,33.190000,12.000000,4890.000000,1152.000000,1289.000000,570.000000,2.579500,98700.000000 +-116.380000,34.200000,14.000000,4985.000000,1238.000000,2517.000000,954.000000,2.067400,65000.000000 +-116.380000,34.100000,6.000000,2104.000000,348.000000,841.000000,320.000000,4.145800,116300.000000 +-116.380000,33.740000,7.000000,17579.000000,3479.000000,3581.000000,1820.000000,4.084000,194500.000000 +-116.380000,33.730000,10.000000,11836.000000,2405.000000,3811.000000,1570.000000,4.007900,134500.000000 +-116.380000,33.710000,17.000000,12509.000000,2460.000000,2737.000000,1423.000000,4.555600,258100.000000 +-116.390000,34.150000,15.000000,5583.000000,1149.000000,2709.000000,964.000000,1.977900,73300.000000 +-116.390000,33.820000,15.000000,11115.000000,2257.000000,4122.000000,1653.000000,2.721900,74400.000000 +-116.390000,33.720000,19.000000,7646.000000,1618.000000,2496.000000,1075.000000,3.556900,128000.000000 +-116.390000,33.690000,10.000000,11659.000000,2007.000000,2186.000000,1083.000000,6.983300,238800.000000 +-116.400000,34.120000,16.000000,5648.000000,1089.000000,2524.000000,1008.000000,2.673900,78000.000000 +-116.400000,34.090000,9.000000,4855.000000,872.000000,2098.000000,765.000000,3.272300,97800.000000 +-116.400000,33.780000,8.000000,3059.000000,500.000000,612.000000,208.000000,6.872900,259200.000000 +-116.410000,33.740000,25.000000,2475.000000,476.000000,910.000000,387.000000,3.363900,168800.000000 +-116.410000,33.740000,17.000000,4289.000000,893.000000,958.000000,440.000000,2.465900,177800.000000 +-116.420000,33.790000,12.000000,7095.000000,1260.000000,1179.000000,570.000000,4.944400,285000.000000 +-116.420000,33.760000,14.000000,16921.000000,2837.000000,2524.000000,1262.000000,7.628100,341700.000000 +-116.420000,33.680000,15.000000,3895.000000,782.000000,900.000000,529.000000,2.220800,138300.000000 +-116.420000,33.510000,26.000000,186.000000,48.000000,102.000000,39.000000,2.562500,103100.000000 +-116.430000,34.100000,17.000000,6683.000000,1296.000000,2677.000000,1227.000000,2.482800,84000.000000 +-116.430000,33.810000,8.000000,6710.000000,1343.000000,2069.000000,781.000000,3.522300,115100.000000 +-116.430000,33.750000,24.000000,2596.000000,438.000000,473.000000,237.000000,3.772700,500001.000000 +-116.440000,34.160000,19.000000,1867.000000,361.000000,758.000000,321.000000,2.892900,98100.000000 +-116.440000,34.120000,18.000000,5584.000000,1303.000000,2250.000000,1158.000000,1.582300,72400.000000 +-116.440000,33.930000,17.000000,5293.000000,1266.000000,1201.000000,599.000000,1.684900,88400.000000 +-116.440000,33.770000,18.000000,4872.000000,1110.000000,955.000000,656.000000,2.243900,97500.000000 +-116.440000,33.740000,5.000000,846.000000,249.000000,117.000000,67.000000,7.988500,403300.000000 +-116.450000,33.800000,9.000000,5534.000000,1206.000000,2283.000000,1008.000000,3.616100,99100.000000 +-116.450000,33.780000,16.000000,5228.000000,992.000000,1177.000000,639.000000,3.085900,134600.000000 +-116.450000,32.650000,22.000000,2680.000000,643.000000,1644.000000,516.000000,2.194900,127100.000000 +-116.460000,33.820000,6.000000,4863.000000,920.000000,3010.000000,828.000000,3.950800,104200.000000 +-116.460000,33.780000,33.000000,2565.000000,745.000000,2301.000000,638.000000,2.547700,83000.000000 +-116.470000,34.070000,22.000000,5473.000000,1234.000000,2581.000000,1098.000000,1.937500,65300.000000 +-116.470000,33.940000,18.000000,2233.000000,471.000000,919.000000,388.000000,3.257800,85200.000000 +-116.470000,33.840000,3.000000,9169.000000,1512.000000,3838.000000,1270.000000,4.311100,142500.000000 +-116.470000,33.810000,7.000000,10105.000000,2481.000000,6274.000000,2095.000000,2.449700,90900.000000 +-116.470000,33.780000,27.000000,1781.000000,441.000000,759.000000,340.000000,3.316200,113600.000000 +-116.480000,33.940000,10.000000,3254.000000,913.000000,923.000000,486.000000,1.800000,81000.000000 +-116.480000,33.790000,14.000000,9425.000000,2020.000000,1711.000000,1000.000000,2.629800,145200.000000 +-116.480000,33.610000,8.000000,1294.000000,272.000000,457.000000,199.000000,2.916700,115300.000000 +-116.490000,33.820000,27.000000,3316.000000,636.000000,2362.000000,532.000000,2.956900,65900.000000 +-116.490000,33.800000,13.000000,8789.000000,1875.000000,1274.000000,688.000000,3.739600,148900.000000 +-116.500000,33.980000,5.000000,4332.000000,868.000000,1420.000000,567.000000,4.041700,146400.000000 +-116.500000,33.950000,10.000000,7249.000000,1882.000000,4274.000000,1621.000000,1.698300,66600.000000 +-116.500000,33.820000,16.000000,343.000000,85.000000,29.000000,14.000000,2.104200,87500.000000 +-116.500000,33.810000,26.000000,5032.000000,1229.000000,3086.000000,1183.000000,2.539900,94800.000000 +-116.500000,33.690000,20.000000,4810.000000,1074.000000,1304.000000,740.000000,2.250000,248100.000000 +-116.510000,34.850000,15.000000,3149.000000,713.000000,1281.000000,486.000000,2.000000,64700.000000 +-116.510000,34.450000,21.000000,8502.000000,2634.000000,2330.000000,991.000000,1.381100,51300.000000 +-116.510000,33.960000,16.000000,4913.000000,1395.000000,2518.000000,1132.000000,1.466500,61100.000000 +-116.510000,33.940000,12.000000,3369.000000,780.000000,1315.000000,584.000000,1.738800,66000.000000 +-116.510000,33.890000,21.000000,1284.000000,306.000000,537.000000,233.000000,1.950000,61000.000000 +-116.520000,33.970000,13.000000,3921.000000,754.000000,1902.000000,665.000000,3.361600,89600.000000 +-116.520000,33.850000,13.000000,7559.000000,1444.000000,3189.000000,1105.000000,3.488600,112500.000000 +-116.520000,33.840000,17.000000,4465.000000,859.000000,853.000000,445.000000,3.687500,130400.000000 +-116.520000,33.820000,21.000000,10227.000000,2315.000000,3623.000000,1734.000000,2.521200,145200.000000 +-116.520000,32.900000,18.000000,4454.000000,852.000000,1754.000000,656.000000,4.570000,189900.000000 +-116.530000,33.950000,18.000000,2990.000000,648.000000,1280.000000,532.000000,2.625000,68200.000000 +-116.530000,33.880000,5.000000,4423.000000,763.000000,1906.000000,667.000000,4.685500,125200.000000 +-116.530000,33.850000,16.000000,10077.000000,2186.000000,3048.000000,1337.000000,2.964700,110900.000000 +-116.530000,33.840000,28.000000,8399.000000,1839.000000,3470.000000,1340.000000,2.588500,159000.000000 +-116.530000,33.780000,18.000000,2547.000000,463.000000,411.000000,214.000000,2.548900,220500.000000 +-116.540000,33.810000,31.000000,6814.000000,1714.000000,2628.000000,1341.000000,2.117600,124100.000000 +-116.540000,33.810000,24.000000,6087.000000,1217.000000,1721.000000,833.000000,3.149300,199400.000000 +-116.540000,33.800000,22.000000,6050.000000,1387.000000,1432.000000,890.000000,2.221600,183900.000000 +-116.540000,33.790000,18.000000,9374.000000,1780.000000,1678.000000,919.000000,3.973700,235600.000000 +-116.550000,33.840000,28.000000,2992.000000,562.000000,676.000000,346.000000,5.761300,500001.000000 +-116.560000,34.060000,15.000000,6928.000000,1529.000000,2568.000000,1075.000000,2.540500,69600.000000 +-116.560000,33.830000,36.000000,1765.000000,399.000000,451.000000,264.000000,2.608300,321900.000000 +-116.560000,33.050000,15.000000,1985.000000,361.000000,536.000000,209.000000,4.125000,148200.000000 +-116.570000,35.430000,8.000000,9975.000000,1743.000000,6835.000000,1439.000000,2.713800,22500.000000 +-116.570000,34.000000,20.000000,260.000000,67.000000,69.000000,50.000000,3.520800,76600.000000 +-116.570000,33.940000,29.000000,551.000000,166.000000,224.000000,107.000000,1.191700,50000.000000 +-116.570000,33.840000,18.000000,7962.000000,1652.000000,2009.000000,921.000000,3.389700,230200.000000 +-116.570000,33.640000,10.000000,489.000000,82.000000,183.000000,74.000000,6.270200,345500.000000 +-116.580000,32.690000,19.000000,4085.000000,876.000000,2133.000000,718.000000,2.919000,116500.000000 +-116.600000,33.490000,16.000000,3730.000000,827.000000,1346.000000,592.000000,2.183000,113500.000000 +-116.600000,33.060000,23.000000,1731.000000,365.000000,612.000000,258.000000,2.781300,172900.000000 +-116.610000,33.040000,11.000000,2522.000000,538.000000,616.000000,269.000000,3.875000,198100.000000 +-116.620000,34.230000,14.000000,6438.000000,1719.000000,1586.000000,691.000000,1.613600,67400.000000 +-116.620000,32.860000,18.000000,4115.000000,847.000000,2032.000000,745.000000,4.015900,169100.000000 +-116.660000,32.790000,13.000000,843.000000,157.000000,918.000000,152.000000,6.215200,240600.000000 +-116.670000,32.970000,16.000000,349.000000,74.000000,120.000000,43.000000,5.359000,193800.000000 +-116.680000,33.710000,21.000000,3460.000000,711.000000,658.000000,255.000000,3.588200,161100.000000 +-116.680000,33.160000,26.000000,1820.000000,374.000000,1001.000000,324.000000,2.179700,156300.000000 +-116.710000,33.940000,12.000000,549.000000,109.000000,209.000000,90.000000,3.020800,66300.000000 +-116.720000,34.890000,14.000000,4527.000000,875.000000,1640.000000,590.000000,2.859400,81700.000000 +-116.720000,33.560000,13.000000,3166.000000,682.000000,1250.000000,475.000000,2.355000,122900.000000 +-116.740000,33.620000,11.000000,2385.000000,661.000000,682.000000,242.000000,2.914100,214300.000000 +-116.740000,33.330000,17.000000,4190.000000,946.000000,1802.000000,673.000000,2.474400,158200.000000 +-116.750000,33.830000,16.000000,5277.000000,1070.000000,657.000000,276.000000,3.333300,143400.000000 +-116.750000,32.850000,17.000000,4863.000000,845.000000,2266.000000,769.000000,4.232100,217400.000000 +-116.750000,32.820000,17.000000,3348.000000,481.000000,1222.000000,443.000000,6.636100,308600.000000 +-116.760000,34.290000,14.000000,3959.000000,849.000000,1064.000000,376.000000,2.821400,111400.000000 +-116.760000,34.140000,4.000000,42.000000,10.000000,9.000000,3.000000,0.536000,42500.000000 +-116.760000,33.460000,6.000000,1251.000000,268.000000,544.000000,216.000000,3.069400,173400.000000 +-116.760000,32.840000,16.000000,3311.000000,702.000000,1627.000000,624.000000,3.119600,187200.000000 +-116.760000,32.740000,14.000000,4085.000000,751.000000,2129.000000,688.000000,4.736700,214500.000000 +-116.770000,33.920000,19.000000,2307.000000,525.000000,1266.000000,437.000000,1.687500,63000.000000 +-116.780000,33.000000,7.000000,12480.000000,1946.000000,5102.000000,1697.000000,5.510200,221000.000000 +-116.790000,33.990000,16.000000,319.000000,68.000000,212.000000,67.000000,1.468800,90000.000000 +-116.790000,32.840000,12.000000,4281.000000,786.000000,1891.000000,721.000000,3.576900,231800.000000 +-116.790000,32.610000,19.000000,2652.000000,520.000000,1421.000000,491.000000,3.522700,206100.000000 +-116.800000,33.800000,35.000000,324.000000,63.000000,158.000000,39.000000,3.416700,100000.000000 +-116.800000,32.800000,11.000000,3874.000000,565.000000,1672.000000,546.000000,6.148100,274600.000000 +-116.820000,34.240000,11.000000,5799.000000,1527.000000,713.000000,262.000000,2.514700,84700.000000 +-116.830000,34.250000,15.000000,8948.000000,1985.000000,1316.000000,514.000000,2.737500,90800.000000 +-116.830000,32.830000,6.000000,3123.000000,495.000000,1513.000000,480.000000,5.428800,167800.000000 +-116.840000,33.010000,5.000000,5673.000000,855.000000,2592.000000,797.000000,5.415500,199200.000000 +-116.840000,32.920000,20.000000,1066.000000,219.000000,536.000000,173.000000,3.160700,119300.000000 +-116.840000,32.860000,16.000000,2502.000000,532.000000,1211.000000,494.000000,3.251600,202100.000000 +-116.850000,34.980000,26.000000,3606.000000,792.000000,1683.000000,608.000000,2.658700,57400.000000 +-116.850000,34.250000,5.000000,5806.000000,1030.000000,569.000000,219.000000,4.013200,163100.000000 +-116.850000,32.830000,17.000000,4234.000000,770.000000,2191.000000,725.000000,3.644500,197600.000000 +-116.860000,34.230000,13.000000,4760.000000,938.000000,309.000000,132.000000,5.461800,147800.000000 +-116.860000,33.970000,11.000000,658.000000,131.000000,376.000000,120.000000,2.397700,58000.000000 +-116.860000,33.840000,18.000000,521.000000,118.000000,174.000000,74.000000,2.778800,91100.000000 +-116.860000,33.730000,13.000000,2604.000000,443.000000,978.000000,417.000000,2.933000,170700.000000 +-116.860000,33.050000,17.000000,9044.000000,1689.000000,5030.000000,1596.000000,3.634800,164500.000000 +-116.860000,33.020000,17.000000,401.000000,68.000000,251.000000,69.000000,4.651800,170200.000000 +-116.860000,32.870000,17.000000,5799.000000,921.000000,2630.000000,843.000000,5.052400,285400.000000 +-116.860000,32.800000,19.000000,1747.000000,291.000000,848.000000,290.000000,4.875000,187200.000000 +-116.870000,33.940000,35.000000,4448.000000,906.000000,2736.000000,843.000000,2.218000,73400.000000 +-116.870000,33.930000,32.000000,3141.000000,812.000000,2589.000000,721.000000,1.455600,54600.000000 +-116.870000,33.760000,5.000000,4116.000000,761.000000,1714.000000,717.000000,2.561200,130800.000000 +-116.870000,33.570000,12.000000,1153.000000,265.000000,446.000000,195.000000,3.038000,128100.000000 +-116.870000,32.750000,15.000000,2053.000000,321.000000,993.000000,309.000000,5.516400,248900.000000 +-116.870000,32.720000,13.000000,3268.000000,491.000000,1431.000000,503.000000,5.765200,259900.000000 +-116.880000,34.250000,11.000000,1089.000000,198.000000,230.000000,90.000000,4.964300,176000.000000 +-116.880000,34.240000,13.000000,4137.000000,796.000000,573.000000,218.000000,4.639400,226500.000000 +-116.880000,34.190000,38.000000,898.000000,259.000000,106.000000,52.000000,1.687500,225000.000000 +-116.880000,34.080000,52.000000,3419.000000,777.000000,710.000000,265.000000,3.902800,128600.000000 +-116.880000,33.930000,37.000000,1495.000000,429.000000,865.000000,342.000000,1.218800,55000.000000 +-116.880000,33.740000,20.000000,3111.000000,623.000000,1000.000000,508.000000,1.598200,140000.000000 +-116.880000,33.050000,11.000000,7217.000000,1583.000000,4197.000000,1502.000000,2.882700,166700.000000 +-116.880000,33.020000,16.000000,3204.000000,541.000000,1818.000000,529.000000,5.259600,171500.000000 +-116.880000,32.860000,9.000000,3049.000000,471.000000,1527.000000,515.000000,5.073300,196600.000000 +-116.890000,33.930000,29.000000,4549.000000,916.000000,2494.000000,912.000000,2.097600,72600.000000 +-116.890000,33.920000,10.000000,2653.000000,621.000000,1967.000000,598.000000,2.664300,81000.000000 +-116.890000,33.860000,2.000000,6900.000000,1238.000000,1950.000000,980.000000,3.041700,146300.000000 +-116.890000,33.750000,23.000000,2719.000000,538.000000,930.000000,485.000000,2.015400,81700.000000 +-116.890000,33.480000,14.000000,1016.000000,219.000000,443.000000,169.000000,2.807100,137500.000000 +-116.890000,32.990000,14.000000,2816.000000,501.000000,1448.000000,452.000000,5.027800,210900.000000 +-116.890000,32.850000,16.000000,1743.000000,333.000000,652.000000,322.000000,2.890600,158300.000000 +-116.890000,32.850000,15.000000,3560.000000,593.000000,1757.000000,574.000000,5.118500,185300.000000 +-116.890000,32.670000,9.000000,2652.000000,393.000000,1355.000000,362.000000,6.257800,293100.000000 +-116.900000,34.690000,10.000000,337.000000,102.000000,108.000000,50.000000,0.499900,55000.000000 +-116.900000,34.520000,20.000000,3481.000000,840.000000,1694.000000,587.000000,1.400000,77700.000000 +-116.900000,34.250000,16.000000,3018.000000,523.000000,556.000000,244.000000,3.528800,189700.000000 +-116.900000,33.980000,30.000000,3915.000000,672.000000,1820.000000,643.000000,3.633900,98600.000000 +-116.900000,33.930000,34.000000,3183.000000,738.000000,1820.000000,647.000000,2.232100,71800.000000 +-116.900000,33.740000,14.000000,2281.000000,426.000000,894.000000,430.000000,2.371200,127900.000000 +-116.900000,33.650000,15.000000,652.000000,149.000000,248.000000,97.000000,2.107100,93800.000000 +-116.900000,33.220000,11.000000,4132.000000,773.000000,2012.000000,703.000000,3.190600,234500.000000 +-116.900000,33.030000,11.000000,3213.000000,634.000000,1975.000000,579.000000,3.475000,167200.000000 +-116.900000,32.900000,19.000000,3090.000000,552.000000,1621.000000,520.000000,4.080600,189200.000000 +-116.900000,32.840000,18.000000,4215.000000,810.000000,2104.000000,773.000000,4.087300,146900.000000 +-116.900000,32.840000,18.000000,3612.000000,737.000000,1864.000000,713.000000,2.706900,153800.000000 +-116.900000,32.790000,21.000000,3770.000000,491.000000,1410.000000,446.000000,6.768500,294700.000000 +-116.900000,32.770000,8.000000,3600.000000,492.000000,1421.000000,482.000000,6.260900,307100.000000 +-116.910000,34.000000,18.000000,553.000000,100.000000,215.000000,82.000000,5.500000,193800.000000 +-116.910000,33.750000,13.000000,10886.000000,2127.000000,4266.000000,1955.000000,2.316900,123400.000000 +-116.910000,33.710000,19.000000,6807.000000,1164.000000,2703.000000,1055.000000,3.159100,189700.000000 +-116.910000,32.870000,14.000000,3048.000000,597.000000,1690.000000,576.000000,4.381800,147100.000000 +-116.910000,32.860000,15.000000,3153.000000,628.000000,1633.000000,527.000000,3.689800,131000.000000 +-116.910000,32.860000,10.000000,3699.000000,838.000000,2310.000000,759.000000,2.536500,139500.000000 +-116.910000,32.850000,21.000000,4152.000000,703.000000,2255.000000,697.000000,4.509600,159500.000000 +-116.910000,32.830000,16.000000,5203.000000,898.000000,2515.000000,862.000000,4.105000,174400.000000 +-116.910000,32.820000,14.000000,1978.000000,424.000000,1085.000000,387.000000,3.807300,170100.000000 +-116.910000,32.810000,22.000000,4331.000000,637.000000,1952.000000,654.000000,5.483400,232000.000000 +-116.910000,32.800000,32.000000,1943.000000,287.000000,1081.000000,292.000000,5.684600,208800.000000 +-116.910000,32.780000,15.000000,4058.000000,511.000000,1580.000000,473.000000,7.500000,316400.000000 +-116.910000,32.730000,8.000000,4630.000000,624.000000,2048.000000,575.000000,6.474500,300300.000000 +-116.920000,33.630000,18.000000,397.000000,89.000000,239.000000,80.000000,2.812500,143800.000000 +-116.920000,32.850000,23.000000,1378.000000,269.000000,767.000000,266.000000,4.062500,145000.000000 +-116.920000,32.840000,16.000000,4675.000000,834.000000,2188.000000,817.000000,4.667400,178000.000000 +-116.920000,32.820000,34.000000,1765.000000,284.000000,772.000000,282.000000,5.011800,165300.000000 +-116.920000,32.820000,17.000000,2492.000000,494.000000,1278.000000,439.000000,2.887500,155700.000000 +-116.920000,32.820000,16.000000,2784.000000,468.000000,1458.000000,465.000000,4.004800,184600.000000 +-116.920000,32.800000,33.000000,1518.000000,268.000000,857.000000,272.000000,3.558600,160400.000000 +-116.920000,32.780000,21.000000,4192.000000,752.000000,2101.000000,710.000000,4.430600,159100.000000 +-116.920000,32.760000,9.000000,1859.000000,307.000000,947.000000,304.000000,5.920200,181300.000000 +-116.930000,33.930000,13.000000,7804.000000,1594.000000,3297.000000,1469.000000,2.054900,95600.000000 +-116.930000,33.750000,14.000000,6027.000000,1148.000000,3136.000000,1036.000000,2.964000,121500.000000 +-116.930000,33.740000,15.000000,3757.000000,666.000000,1693.000000,654.000000,3.680600,112800.000000 +-116.930000,33.060000,16.000000,3490.000000,545.000000,1628.000000,535.000000,4.883600,239900.000000 +-116.930000,32.870000,17.000000,3722.000000,670.000000,1561.000000,604.000000,3.602700,211900.000000 +-116.930000,32.850000,5.000000,4116.000000,990.000000,2770.000000,905.000000,3.114200,150000.000000 +-116.930000,32.850000,15.000000,3273.000000,895.000000,1872.000000,842.000000,2.538800,119000.000000 +-116.930000,32.830000,21.000000,1283.000000,278.000000,684.000000,289.000000,2.320300,163500.000000 +-116.930000,32.820000,26.000000,4129.000000,714.000000,1820.000000,718.000000,4.258600,171000.000000 +-116.930000,32.810000,18.000000,2447.000000,466.000000,1573.000000,472.000000,2.642900,125400.000000 +-116.930000,32.790000,23.000000,5759.000000,1258.000000,3108.000000,1202.000000,3.092700,140600.000000 +-116.930000,32.790000,19.000000,3354.000000,699.000000,1948.000000,682.000000,3.019200,142300.000000 +-116.940000,34.400000,20.000000,6541.000000,1401.000000,2631.000000,980.000000,2.133900,78500.000000 +-116.940000,34.240000,27.000000,12342.000000,2630.000000,1300.000000,566.000000,1.998000,153500.000000 +-116.940000,33.770000,14.000000,7240.000000,1410.000000,2708.000000,1240.000000,2.414500,137600.000000 +-116.940000,33.740000,19.000000,2901.000000,445.000000,1414.000000,475.000000,4.640600,118900.000000 +-116.940000,33.730000,17.000000,5160.000000,851.000000,2344.000000,781.000000,3.717500,120000.000000 +-116.940000,32.890000,24.000000,2541.000000,381.000000,1078.000000,372.000000,5.254200,227800.000000 +-116.940000,32.870000,24.000000,2824.000000,441.000000,1480.000000,471.000000,5.261400,177200.000000 +-116.940000,32.840000,32.000000,1607.000000,253.000000,778.000000,262.000000,4.527800,166300.000000 +-116.940000,32.830000,38.000000,1701.000000,317.000000,872.000000,304.000000,3.783100,147800.000000 +-116.940000,32.820000,35.000000,1737.000000,285.000000,826.000000,294.000000,3.241100,159200.000000 +-116.940000,32.800000,28.000000,3042.000000,729.000000,1964.000000,703.000000,2.414100,137500.000000 +-116.940000,32.800000,21.000000,7906.000000,2292.000000,4868.000000,2117.000000,1.893700,109800.000000 +-116.940000,32.780000,17.000000,13559.000000,2656.000000,6990.000000,2533.000000,3.434000,193200.000000 +-116.940000,32.750000,4.000000,14934.000000,2479.000000,6945.000000,2418.000000,5.122100,229700.000000 +-116.950000,33.970000,14.000000,5320.000000,974.000000,1947.000000,843.000000,3.139300,116300.000000 +-116.950000,33.790000,8.000000,10997.000000,2205.000000,5060.000000,1949.000000,2.197900,95300.000000 +-116.950000,33.780000,24.000000,3409.000000,804.000000,1939.000000,739.000000,1.730300,74000.000000 +-116.950000,33.760000,10.000000,6890.000000,1702.000000,3141.000000,1451.000000,1.707900,95900.000000 +-116.950000,33.750000,23.000000,4676.000000,1096.000000,2770.000000,1057.000000,1.784700,109500.000000 +-116.950000,33.750000,19.000000,2238.000000,573.000000,1190.000000,507.000000,2.071400,85800.000000 +-116.950000,33.740000,20.000000,2233.000000,431.000000,1024.000000,399.000000,2.455400,89400.000000 +-116.950000,33.740000,18.000000,1996.000000,405.000000,1270.000000,400.000000,2.708300,91200.000000 +-116.950000,33.730000,21.000000,4587.000000,810.000000,2233.000000,765.000000,3.237100,94500.000000 +-116.950000,33.680000,11.000000,1183.000000,178.000000,543.000000,147.000000,4.479200,173900.000000 +-116.950000,33.310000,16.000000,2921.000000,639.000000,1838.000000,540.000000,2.239300,117000.000000 +-116.950000,32.960000,18.000000,2087.000000,353.000000,992.000000,329.000000,4.500000,222600.000000 +-116.950000,32.840000,31.000000,1307.000000,245.000000,752.000000,231.000000,3.428600,129400.000000 +-116.950000,32.830000,14.000000,12517.000000,2506.000000,6389.000000,2333.000000,3.308100,168700.000000 +-116.950000,32.820000,18.000000,3038.000000,592.000000,1904.000000,595.000000,3.802400,144900.000000 +-116.950000,32.820000,12.000000,5535.000000,1434.000000,3112.000000,1262.000000,2.594900,108300.000000 +-116.950000,32.810000,31.000000,1277.000000,293.000000,698.000000,237.000000,3.110600,147700.000000 +-116.950000,32.810000,15.000000,2619.000000,599.000000,1513.000000,537.000000,2.543000,100000.000000 +-116.950000,32.790000,19.000000,11391.000000,3093.000000,7178.000000,2905.000000,2.032600,123200.000000 +-116.950000,32.780000,33.000000,2432.000000,443.000000,1147.000000,427.000000,3.390600,138100.000000 +-116.950000,32.780000,20.000000,3425.000000,448.000000,1489.000000,443.000000,6.255200,296400.000000 +-116.950000,32.770000,25.000000,3308.000000,421.000000,1201.000000,414.000000,6.319100,303400.000000 +-116.950000,32.760000,13.000000,5543.000000,857.000000,2074.000000,737.000000,4.952800,266200.000000 +-116.950000,32.730000,17.000000,1635.000000,272.000000,960.000000,279.000000,5.267100,157100.000000 +-116.960000,34.940000,20.000000,2355.000000,467.000000,1198.000000,428.000000,3.993400,88500.000000 +-116.960000,34.830000,30.000000,1211.000000,289.000000,611.000000,230.000000,1.666700,44700.000000 +-116.960000,33.940000,22.000000,1999.000000,497.000000,1304.000000,479.000000,1.406300,81900.000000 +-116.960000,33.790000,21.000000,2990.000000,691.000000,2108.000000,660.000000,2.013500,83000.000000 +-116.960000,33.750000,35.000000,3269.000000,757.000000,2328.000000,705.000000,2.589800,76300.000000 +-116.960000,33.740000,19.000000,3649.000000,755.000000,1717.000000,696.000000,2.211500,87600.000000 +-116.960000,33.730000,20.000000,4735.000000,973.000000,2306.000000,904.000000,3.069000,87000.000000 +-116.960000,32.900000,16.000000,3047.000000,495.000000,1507.000000,499.000000,5.300800,186500.000000 +-116.960000,32.870000,17.000000,4713.000000,740.000000,2531.000000,723.000000,4.828600,158500.000000 +-116.960000,32.860000,14.000000,3064.000000,496.000000,1681.000000,503.000000,4.434700,160300.000000 +-116.960000,32.850000,11.000000,9724.000000,1796.000000,5247.000000,1777.000000,4.171600,166100.000000 +-116.960000,32.810000,8.000000,2378.000000,638.000000,1410.000000,623.000000,2.909700,152500.000000 +-116.960000,32.800000,25.000000,3421.000000,803.000000,1681.000000,742.000000,3.369000,134400.000000 +-116.960000,32.800000,19.000000,4574.000000,1152.000000,3045.000000,1057.000000,2.065000,124100.000000 +-116.960000,32.800000,16.000000,3920.000000,1094.000000,2612.000000,1023.000000,1.329100,120800.000000 +-116.960000,32.790000,35.000000,1081.000000,266.000000,691.000000,259.000000,2.632400,133700.000000 +-116.960000,32.790000,19.000000,3008.000000,693.000000,2341.000000,689.000000,2.608700,123800.000000 +-116.960000,32.790000,17.000000,5236.000000,1325.000000,3308.000000,1233.000000,2.322100,138800.000000 +-116.960000,32.780000,26.000000,2807.000000,630.000000,1785.000000,580.000000,2.163800,132800.000000 +-116.970000,33.960000,12.000000,5876.000000,1222.000000,2992.000000,1151.000000,2.432200,112100.000000 +-116.970000,33.940000,29.000000,3197.000000,632.000000,1722.000000,603.000000,3.043200,91200.000000 +-116.970000,33.930000,29.000000,2793.000000,722.000000,1583.000000,626.000000,1.424000,73200.000000 +-116.970000,33.780000,24.000000,2680.000000,606.000000,1728.000000,527.000000,2.535000,74800.000000 +-116.970000,33.740000,31.000000,2712.000000,628.000000,1519.000000,629.000000,1.942000,86200.000000 +-116.970000,33.740000,20.000000,3674.000000,792.000000,1498.000000,758.000000,2.216100,76900.000000 +-116.970000,33.130000,10.000000,5149.000000,851.000000,2177.000000,783.000000,6.795700,287500.000000 +-116.970000,32.830000,23.000000,149.000000,32.000000,101.000000,34.000000,2.645800,112500.000000 +-116.970000,32.800000,15.000000,3927.000000,1018.000000,2204.000000,977.000000,2.436700,111400.000000 +-116.970000,32.790000,32.000000,1255.000000,338.000000,782.000000,302.000000,2.663500,113600.000000 +-116.970000,32.790000,19.000000,4108.000000,1101.000000,2971.000000,1006.000000,1.989300,112500.000000 +-116.970000,32.780000,37.000000,1255.000000,238.000000,671.000000,278.000000,3.701900,138600.000000 +-116.970000,32.780000,35.000000,1113.000000,236.000000,681.000000,246.000000,2.978400,136400.000000 +-116.970000,32.780000,26.000000,8902.000000,1413.000000,3941.000000,1387.000000,4.794300,226900.000000 +-116.970000,32.760000,33.000000,3071.000000,466.000000,1348.000000,513.000000,6.176800,228900.000000 +-116.970000,32.760000,26.000000,2460.000000,313.000000,838.000000,299.000000,5.987800,270700.000000 +-116.970000,32.750000,28.000000,3519.000000,583.000000,1720.000000,590.000000,4.797300,186900.000000 +-116.970000,32.740000,14.000000,7410.000000,1344.000000,3597.000000,1274.000000,4.219200,176100.000000 +-116.970000,32.650000,4.000000,16450.000000,2833.000000,7985.000000,2683.000000,5.663100,233400.000000 +-116.970000,32.560000,23.000000,1262.000000,294.000000,5176.000000,275.000000,2.562500,153300.000000 +-116.980000,34.130000,16.000000,2098.000000,449.000000,342.000000,143.000000,4.033300,133900.000000 +-116.980000,34.070000,21.000000,739.000000,125.000000,199.000000,82.000000,4.895800,117500.000000 +-116.980000,34.050000,6.000000,2290.000000,312.000000,957.000000,274.000000,7.270800,316700.000000 +-116.980000,33.940000,27.000000,3459.000000,640.000000,1760.000000,654.000000,3.454500,89800.000000 +-116.980000,33.930000,40.000000,2277.000000,498.000000,1391.000000,453.000000,1.947200,73200.000000 +-116.980000,33.930000,33.000000,376.000000,83.000000,267.000000,88.000000,2.158100,68300.000000 +-116.980000,33.830000,15.000000,2228.000000,472.000000,653.000000,350.000000,2.683000,139300.000000 +-116.980000,33.770000,12.000000,5829.000000,1309.000000,2711.000000,1118.000000,1.970700,107900.000000 +-116.980000,33.740000,25.000000,4952.000000,1062.000000,1589.000000,1024.000000,1.844600,85700.000000 +-116.980000,32.880000,12.000000,7320.000000,1279.000000,4048.000000,1249.000000,4.395200,151700.000000 +-116.980000,32.860000,19.000000,2121.000000,341.000000,1236.000000,353.000000,4.771700,153200.000000 +-116.980000,32.860000,16.000000,7718.000000,1423.000000,4383.000000,1394.000000,4.069300,146400.000000 +-116.980000,32.850000,12.000000,3570.000000,713.000000,3321.000000,666.000000,4.088200,134500.000000 +-116.980000,32.800000,28.000000,5721.000000,1029.000000,2672.000000,1054.000000,3.963000,164400.000000 +-116.980000,32.790000,32.000000,3756.000000,662.000000,1611.000000,598.000000,3.866700,189700.000000 +-116.980000,32.770000,29.000000,3558.000000,447.000000,1097.000000,445.000000,8.093000,379600.000000 +-116.980000,32.750000,25.000000,4137.000000,662.000000,1905.000000,630.000000,4.375000,214000.000000 +-116.980000,32.750000,18.000000,1519.000000,369.000000,802.000000,347.000000,2.988600,170800.000000 +-116.980000,32.740000,24.000000,977.000000,147.000000,454.000000,169.000000,4.928600,173700.000000 +-116.980000,32.740000,16.000000,3361.000000,537.000000,1754.000000,578.000000,5.109800,162300.000000 +-116.980000,32.730000,16.000000,952.000000,143.000000,530.000000,143.000000,5.086400,175300.000000 +-116.980000,32.720000,4.000000,1078.000000,158.000000,571.000000,184.000000,4.666700,223300.000000 +-116.980000,32.710000,18.000000,2355.000000,444.000000,1277.000000,433.000000,3.455100,121400.000000 +-116.990000,34.890000,24.000000,2741.000000,577.000000,1551.000000,522.000000,3.474000,70500.000000 +-116.990000,34.880000,23.000000,6060.000000,1165.000000,2920.000000,1072.000000,3.152800,69000.000000 +-116.990000,34.300000,29.000000,5055.000000,1036.000000,410.000000,191.000000,3.510400,157100.000000 +-116.990000,33.990000,22.000000,4227.000000,658.000000,1849.000000,619.000000,4.735600,195900.000000 +-116.990000,33.920000,26.000000,503.000000,69.000000,293.000000,59.000000,3.708300,147500.000000 +-116.990000,33.770000,7.000000,10352.000000,2007.000000,3559.000000,1689.000000,2.292500,113100.000000 +-116.990000,33.760000,12.000000,7626.000000,1704.000000,2823.000000,1554.000000,2.172200,69400.000000 +-116.990000,33.750000,18.000000,9601.000000,2401.000000,4002.000000,2106.000000,1.436600,77000.000000 +-116.990000,33.730000,13.000000,16148.000000,3474.000000,6159.000000,3232.000000,1.996100,97800.000000 +-116.990000,33.460000,13.000000,1614.000000,410.000000,846.000000,270.000000,2.830000,43000.000000 +-116.990000,32.960000,17.000000,5509.000000,866.000000,2748.000000,817.000000,4.885400,181300.000000 +-116.990000,32.850000,32.000000,5211.000000,949.000000,3025.000000,948.000000,4.093100,134200.000000 +-116.990000,32.830000,20.000000,6696.000000,1326.000000,3687.000000,1291.000000,3.197900,154600.000000 +-116.990000,32.810000,25.000000,4436.000000,758.000000,1997.000000,738.000000,4.238600,201000.000000 +-116.990000,32.810000,18.000000,10968.000000,1521.000000,4439.000000,1501.000000,6.278700,250000.000000 +-116.990000,32.800000,34.000000,3657.000000,538.000000,1513.000000,562.000000,5.290700,195800.000000 +-116.990000,32.790000,33.000000,2420.000000,393.000000,1003.000000,397.000000,4.065800,165100.000000 +-116.990000,32.790000,26.000000,3623.000000,703.000000,1609.000000,669.000000,3.744000,165800.000000 +-116.990000,32.780000,29.000000,1114.000000,163.000000,385.000000,154.000000,5.433300,222800.000000 +-116.990000,32.770000,35.000000,2306.000000,334.000000,828.000000,310.000000,6.110300,301600.000000 +-116.990000,32.740000,18.000000,3341.000000,611.000000,1952.000000,602.000000,3.984400,215300.000000 +-116.990000,32.740000,17.000000,3101.000000,547.000000,1410.000000,486.000000,3.177100,189900.000000 +-116.990000,32.730000,30.000000,1856.000000,339.000000,1103.000000,379.000000,4.035700,153800.000000 +-116.990000,32.720000,15.000000,825.000000,130.000000,334.000000,131.000000,4.039100,169500.000000 +-116.990000,32.720000,14.000000,1771.000000,301.000000,1046.000000,284.000000,4.775000,143300.000000 +-116.990000,32.720000,13.000000,1330.000000,216.000000,719.000000,215.000000,3.829500,149600.000000 +-116.990000,32.720000,11.000000,1112.000000,164.000000,441.000000,174.000000,4.767900,169500.000000 +-116.990000,32.710000,21.000000,3049.000000,582.000000,2355.000000,585.000000,3.890400,113800.000000 +-116.990000,32.700000,15.000000,3660.000000,622.000000,2629.000000,612.000000,4.044400,150100.000000 +-116.990000,32.640000,15.000000,4331.000000,699.000000,2046.000000,627.000000,3.951900,193500.000000 +-117.000000,34.890000,29.000000,2637.000000,512.000000,1188.000000,446.000000,2.990000,69400.000000 +-117.000000,34.870000,16.000000,6862.000000,1292.000000,3562.000000,1126.000000,3.609100,87200.000000 +-117.000000,33.740000,8.000000,5330.000000,1529.000000,2143.000000,1107.000000,2.110300,94400.000000 +-117.000000,33.290000,17.000000,2073.000000,313.000000,573.000000,221.000000,8.253100,419200.000000 +-117.000000,32.870000,18.000000,11544.000000,1979.000000,6296.000000,1923.000000,4.490400,150400.000000 +-117.000000,32.850000,24.000000,1888.000000,319.000000,950.000000,319.000000,5.282000,140800.000000 +-117.000000,32.800000,33.000000,1816.000000,325.000000,768.000000,316.000000,4.566200,150300.000000 +-117.000000,32.770000,35.000000,2114.000000,317.000000,881.000000,320.000000,5.500000,241400.000000 +-117.000000,32.770000,30.000000,1802.000000,401.000000,776.000000,386.000000,2.812500,173500.000000 +-117.000000,32.760000,31.000000,2545.000000,373.000000,956.000000,342.000000,4.397700,226800.000000 +-117.000000,32.760000,31.000000,1989.000000,280.000000,805.000000,301.000000,6.564500,189100.000000 +-117.000000,32.740000,17.000000,2357.000000,599.000000,1423.000000,510.000000,1.885600,118800.000000 +-117.000000,32.720000,10.000000,3817.000000,943.000000,2352.000000,875.000000,2.136200,143200.000000 +-117.000000,32.710000,22.000000,2263.000000,441.000000,1395.000000,416.000000,3.725000,123500.000000 +-117.000000,32.700000,23.000000,2785.000000,468.000000,1456.000000,449.000000,4.371400,131000.000000 +-117.000000,32.670000,16.000000,2238.000000,307.000000,1002.000000,303.000000,6.614300,264100.000000 +-117.000000,32.640000,11.000000,3098.000000,490.000000,1391.000000,484.000000,4.979200,170400.000000 +-117.010000,34.900000,36.000000,2181.000000,555.000000,1404.000000,492.000000,2.307700,55500.000000 +-117.010000,34.900000,34.000000,2194.000000,519.000000,1326.000000,515.000000,2.105600,72000.000000 +-117.010000,34.890000,26.000000,2599.000000,498.000000,1332.000000,443.000000,2.719800,70400.000000 +-117.010000,34.010000,15.000000,5592.000000,891.000000,2419.000000,840.000000,4.719300,135200.000000 +-117.010000,33.970000,18.000000,4775.000000,886.000000,1868.000000,836.000000,2.335500,118800.000000 +-117.010000,33.040000,13.000000,4595.000000,567.000000,1643.000000,544.000000,7.768400,362300.000000 +-117.010000,32.990000,8.000000,3372.000000,430.000000,1536.000000,448.000000,8.428400,378300.000000 +-117.010000,32.850000,23.000000,2592.000000,414.000000,1401.000000,431.000000,5.490300,151400.000000 +-117.010000,32.840000,23.000000,1951.000000,395.000000,901.000000,378.000000,3.102300,143300.000000 +-117.010000,32.830000,17.000000,15401.000000,3280.000000,7302.000000,3176.000000,3.306700,121900.000000 +-117.010000,32.810000,26.000000,4499.000000,645.000000,1866.000000,626.000000,5.516000,185100.000000 +-117.010000,32.810000,21.000000,4203.000000,618.000000,1620.000000,600.000000,5.344100,193500.000000 +-117.010000,32.800000,20.000000,2705.000000,545.000000,1198.000000,497.000000,3.715900,168900.000000 +-117.010000,32.800000,17.000000,1558.000000,479.000000,803.000000,431.000000,2.693400,160200.000000 +-117.010000,32.800000,17.000000,1042.000000,210.000000,650.000000,215.000000,3.100000,84200.000000 +-117.010000,32.790000,33.000000,4015.000000,663.000000,1864.000000,664.000000,4.315200,159300.000000 +-117.010000,32.790000,31.000000,3776.000000,815.000000,1886.000000,799.000000,3.442100,155300.000000 +-117.010000,32.780000,20.000000,2616.000000,597.000000,1532.000000,579.000000,2.989600,235600.000000 +-117.010000,32.770000,43.000000,841.000000,192.000000,496.000000,207.000000,3.017900,149300.000000 +-117.010000,32.770000,34.000000,3330.000000,723.000000,1592.000000,656.000000,2.667800,164200.000000 +-117.010000,32.760000,34.000000,3415.000000,608.000000,1464.000000,593.000000,4.054900,223700.000000 +-117.010000,32.760000,22.000000,3626.000000,824.000000,1800.000000,769.000000,2.859400,189600.000000 +-117.010000,32.750000,34.000000,2105.000000,340.000000,973.000000,357.000000,4.308800,152500.000000 +-117.010000,32.750000,26.000000,4038.000000,706.000000,2065.000000,687.000000,3.954500,178100.000000 +-117.010000,32.740000,31.000000,3473.000000,722.000000,2098.000000,677.000000,2.697300,135200.000000 +-117.010000,32.730000,22.000000,2526.000000,530.000000,1556.000000,529.000000,2.864600,120800.000000 +-117.010000,32.720000,12.000000,2914.000000,734.000000,2104.000000,703.000000,2.306800,132300.000000 +-117.010000,32.710000,20.000000,3506.000000,692.000000,1977.000000,668.000000,2.981000,129100.000000 +-117.010000,32.700000,7.000000,2327.000000,490.000000,1304.000000,445.000000,3.355300,132200.000000 +-117.010000,32.700000,25.000000,2321.000000,398.000000,1434.000000,386.000000,3.534100,120800.000000 +-117.010000,32.670000,17.000000,2319.000000,348.000000,1125.000000,337.000000,5.551000,266900.000000 +-117.010000,32.660000,11.000000,9992.000000,1368.000000,4495.000000,1316.000000,6.966400,293900.000000 +-117.010000,32.630000,7.000000,6483.000000,976.000000,3269.000000,1005.000000,5.735800,221600.000000 +-117.020000,36.400000,19.000000,619.000000,239.000000,490.000000,164.000000,2.100000,14999.000000 +-117.020000,34.900000,37.000000,1199.000000,351.000000,782.000000,296.000000,1.651500,61600.000000 +-117.020000,34.890000,29.000000,3111.000000,661.000000,1530.000000,608.000000,2.828100,69300.000000 +-117.020000,34.030000,19.000000,4415.000000,648.000000,1627.000000,619.000000,4.236100,191600.000000 +-117.020000,33.950000,5.000000,1822.000000,367.000000,798.000000,313.000000,2.878300,105200.000000 +-117.020000,33.810000,10.000000,6317.000000,1335.000000,2625.000000,1094.000000,2.300000,108900.000000 +-117.020000,33.760000,20.000000,1317.000000,203.000000,453.000000,158.000000,2.839300,120700.000000 +-117.020000,33.730000,14.000000,3700.000000,750.000000,1171.000000,695.000000,1.947600,112500.000000 +-117.020000,33.710000,6.000000,8278.000000,1579.000000,3062.000000,1446.000000,3.004300,134700.000000 +-117.020000,33.600000,7.000000,1972.000000,352.000000,964.000000,317.000000,3.244000,337200.000000 +-117.020000,32.950000,25.000000,1909.000000,334.000000,1043.000000,322.000000,3.778400,160100.000000 +-117.020000,32.840000,17.000000,4013.000000,673.000000,2263.000000,661.000000,5.131000,148300.000000 +-117.020000,32.810000,27.000000,1950.000000,317.000000,950.000000,320.000000,4.065600,164000.000000 +-117.020000,32.810000,26.000000,1998.000000,301.000000,874.000000,305.000000,5.454400,180900.000000 +-117.020000,32.810000,14.000000,3173.000000,599.000000,1451.000000,585.000000,3.729200,182200.000000 +-117.020000,32.800000,29.000000,1232.000000,243.000000,665.000000,247.000000,3.650000,168900.000000 +-117.020000,32.800000,27.000000,2369.000000,370.000000,927.000000,374.000000,4.116200,177200.000000 +-117.020000,32.790000,36.000000,2211.000000,384.000000,868.000000,329.000000,4.049100,147900.000000 +-117.020000,32.780000,33.000000,3481.000000,708.000000,1726.000000,719.000000,3.367500,158200.000000 +-117.020000,32.780000,31.000000,2567.000000,507.000000,1198.000000,499.000000,3.465900,163000.000000 +-117.020000,32.760000,40.000000,2523.000000,488.000000,976.000000,470.000000,3.110000,185700.000000 +-117.020000,32.760000,15.000000,1204.000000,326.000000,543.000000,326.000000,1.027800,154200.000000 +-117.020000,32.750000,33.000000,3296.000000,537.000000,1345.000000,556.000000,5.283500,217100.000000 +-117.020000,32.740000,30.000000,4205.000000,772.000000,2012.000000,734.000000,3.500000,144700.000000 +-117.020000,32.740000,12.000000,3301.000000,963.000000,2000.000000,879.000000,1.859400,119200.000000 +-117.020000,32.730000,22.000000,5201.000000,865.000000,3280.000000,817.000000,4.795200,141400.000000 +-117.020000,32.720000,36.000000,2030.000000,369.000000,1142.000000,357.000000,3.776300,126900.000000 +-117.020000,32.710000,30.000000,3187.000000,592.000000,2082.000000,631.000000,3.538800,118500.000000 +-117.020000,32.700000,22.000000,2756.000000,516.000000,1849.000000,486.000000,4.183700,125400.000000 +-117.020000,32.680000,14.000000,3986.000000,675.000000,2065.000000,702.000000,5.719200,267400.000000 +-117.020000,32.670000,20.000000,1505.000000,184.000000,635.000000,182.000000,6.577200,245200.000000 +-117.020000,32.660000,19.000000,771.000000,103.000000,376.000000,108.000000,6.627200,273600.000000 +-117.020000,32.640000,5.000000,260.000000,41.000000,157.000000,42.000000,6.515100,281700.000000 +-117.020000,32.590000,19.000000,1902.000000,335.000000,1102.000000,313.000000,3.036500,98100.000000 +-117.030000,34.910000,27.000000,2718.000000,583.000000,1472.000000,509.000000,2.825000,76600.000000 +-117.030000,34.870000,7.000000,2245.000000,407.000000,1016.000000,364.000000,3.946400,101500.000000 +-117.030000,34.070000,16.000000,3784.000000,577.000000,1615.000000,525.000000,4.233300,220300.000000 +-117.030000,34.030000,26.000000,3501.000000,664.000000,1860.000000,681.000000,3.040300,113500.000000 +-117.030000,33.890000,6.000000,78.000000,11.000000,27.000000,10.000000,3.125000,187500.000000 +-117.030000,33.320000,14.000000,1088.000000,209.000000,601.000000,193.000000,3.843800,243800.000000 +-117.030000,33.180000,17.000000,5391.000000,886.000000,2732.000000,830.000000,5.177100,212800.000000 +-117.030000,33.120000,25.000000,3142.000000,446.000000,1286.000000,419.000000,5.466300,248300.000000 +-117.030000,32.960000,16.000000,3424.000000,698.000000,1940.000000,645.000000,4.121000,182100.000000 +-117.030000,32.820000,16.000000,1765.000000,289.000000,743.000000,280.000000,4.974400,209700.000000 +-117.030000,32.800000,19.000000,3866.000000,775.000000,1554.000000,703.000000,4.328100,220000.000000 +-117.030000,32.790000,31.000000,2366.000000,383.000000,1077.000000,387.000000,4.299200,174400.000000 +-117.030000,32.790000,26.000000,3859.000000,513.000000,1469.000000,538.000000,5.868300,220500.000000 +-117.030000,32.780000,17.000000,5481.000000,1618.000000,2957.000000,1537.000000,2.570700,171300.000000 +-117.030000,32.770000,34.000000,1796.000000,428.000000,918.000000,424.000000,2.875000,161200.000000 +-117.030000,32.770000,19.000000,4819.000000,1492.000000,2572.000000,1336.000000,2.339300,118200.000000 +-117.030000,32.750000,24.000000,7879.000000,1655.000000,3898.000000,1534.000000,3.089700,187300.000000 +-117.030000,32.740000,37.000000,821.000000,150.000000,404.000000,135.000000,3.012500,130400.000000 +-117.030000,32.740000,35.000000,1878.000000,454.000000,991.000000,409.000000,2.434500,129700.000000 +-117.030000,32.730000,38.000000,3174.000000,606.000000,1557.000000,619.000000,3.586100,123600.000000 +-117.030000,32.730000,34.000000,2061.000000,391.000000,1169.000000,400.000000,3.509600,142000.000000 +-117.030000,32.730000,32.000000,1750.000000,333.000000,997.000000,335.000000,3.478400,154400.000000 +-117.030000,32.720000,38.000000,886.000000,176.000000,505.000000,188.000000,3.593800,125400.000000 +-117.030000,32.720000,37.000000,2192.000000,455.000000,1515.000000,446.000000,3.058800,120600.000000 +-117.030000,32.710000,34.000000,2328.000000,444.000000,1684.000000,429.000000,3.250000,99600.000000 +-117.030000,32.710000,33.000000,3126.000000,627.000000,2300.000000,623.000000,3.259600,103000.000000 +-117.030000,32.700000,19.000000,2304.000000,572.000000,2010.000000,556.000000,2.286600,109900.000000 +-117.030000,32.690000,8.000000,2460.000000,397.000000,1784.000000,390.000000,4.566200,175500.000000 +-117.030000,32.690000,5.000000,3201.000000,532.000000,2061.000000,536.000000,5.082900,179400.000000 +-117.030000,32.690000,10.000000,901.000000,163.000000,698.000000,167.000000,4.664800,156100.000000 +-117.030000,32.670000,15.000000,5094.000000,818.000000,2118.000000,758.000000,5.350500,266600.000000 +-117.030000,32.650000,14.000000,1111.000000,142.000000,472.000000,145.000000,7.634400,290500.000000 +-117.030000,32.630000,14.000000,2796.000000,476.000000,1466.000000,464.000000,5.248900,213700.000000 +-117.030000,32.630000,13.000000,2087.000000,313.000000,1165.000000,330.000000,5.778900,227700.000000 +-117.030000,32.610000,22.000000,1028.000000,148.000000,523.000000,152.000000,6.008600,166900.000000 +-117.040000,34.890000,37.000000,1884.000000,366.000000,1052.000000,353.000000,3.175000,66800.000000 +-117.040000,34.040000,30.000000,3474.000000,735.000000,1674.000000,691.000000,2.586300,98300.000000 +-117.040000,34.030000,29.000000,3375.000000,795.000000,1760.000000,699.000000,2.702800,92000.000000 +-117.040000,34.020000,24.000000,4663.000000,1213.000000,1851.000000,1116.000000,1.441800,103500.000000 +-117.040000,34.000000,25.000000,3750.000000,781.000000,1594.000000,785.000000,2.416700,104900.000000 +-117.040000,34.000000,21.000000,4624.000000,852.000000,2174.000000,812.000000,3.525500,132100.000000 +-117.040000,33.150000,15.000000,13814.000000,2888.000000,6583.000000,2789.000000,2.824700,150000.000000 +-117.040000,33.090000,16.000000,4677.000000,581.000000,1902.000000,566.000000,6.183400,335600.000000 +-117.040000,33.030000,16.000000,2852.000000,435.000000,1083.000000,448.000000,6.376100,296200.000000 +-117.040000,33.010000,28.000000,922.000000,107.000000,314.000000,97.000000,8.472100,342300.000000 +-117.040000,32.990000,6.000000,9518.000000,1418.000000,4413.000000,1275.000000,6.601200,314900.000000 +-117.040000,32.980000,16.000000,1332.000000,196.000000,640.000000,193.000000,6.022600,192900.000000 +-117.040000,32.970000,13.000000,6711.000000,1256.000000,3683.000000,1220.000000,4.574600,175700.000000 +-117.040000,32.900000,6.000000,6525.000000,826.000000,3146.000000,806.000000,9.285800,436100.000000 +-117.040000,32.830000,8.000000,2205.000000,348.000000,777.000000,341.000000,6.026600,177400.000000 +-117.040000,32.810000,12.000000,2880.000000,406.000000,1381.000000,418.000000,6.541200,254200.000000 +-117.040000,32.800000,25.000000,2504.000000,345.000000,1067.000000,350.000000,5.741600,243400.000000 +-117.040000,32.800000,11.000000,1802.000000,440.000000,630.000000,428.000000,2.033700,146700.000000 +-117.040000,32.790000,23.000000,2491.000000,350.000000,863.000000,348.000000,6.719600,306800.000000 +-117.040000,32.770000,21.000000,1824.000000,447.000000,962.000000,431.000000,2.782600,143800.000000 +-117.040000,32.770000,16.000000,7963.000000,1881.000000,3769.000000,1804.000000,2.962400,144700.000000 +-117.040000,32.760000,43.000000,3171.000000,665.000000,1534.000000,625.000000,3.141000,141400.000000 +-117.040000,32.760000,37.000000,2979.000000,557.000000,1285.000000,564.000000,3.736800,152200.000000 +-117.040000,32.750000,36.000000,2297.000000,418.000000,1070.000000,392.000000,3.519200,144000.000000 +-117.040000,32.740000,5.000000,2878.000000,785.000000,1727.000000,758.000000,1.717900,132000.000000 +-117.040000,32.740000,33.000000,3880.000000,770.000000,2288.000000,805.000000,3.684800,140700.000000 +-117.040000,32.720000,24.000000,5474.000000,955.000000,3020.000000,904.000000,4.081300,137000.000000 +-117.040000,32.710000,28.000000,5274.000000,991.000000,3727.000000,961.000000,3.570000,109800.000000 +-117.040000,32.690000,9.000000,3417.000000,860.000000,2521.000000,828.000000,3.020000,158900.000000 +-117.040000,32.680000,13.000000,2132.000000,425.000000,1345.000000,432.000000,4.000000,89300.000000 +-117.040000,32.680000,11.000000,1875.000000,357.000000,1014.000000,386.000000,4.375000,115000.000000 +-117.040000,32.670000,14.000000,3464.000000,683.000000,2139.000000,734.000000,4.066800,137500.000000 +-117.040000,32.660000,22.000000,3362.000000,630.000000,1471.000000,612.000000,4.144200,303900.000000 +-117.040000,32.650000,8.000000,8806.000000,1401.000000,3159.000000,1059.000000,4.215500,247800.000000 +-117.040000,32.640000,5.000000,2329.000000,542.000000,1213.000000,514.000000,4.029800,225600.000000 +-117.040000,32.630000,26.000000,2756.000000,422.000000,1166.000000,398.000000,5.135400,181600.000000 +-117.040000,32.630000,26.000000,2074.000000,356.000000,1228.000000,335.000000,4.115400,160200.000000 +-117.040000,32.620000,27.000000,1710.000000,282.000000,1089.000000,297.000000,4.679300,151900.000000 +-117.040000,32.600000,20.000000,8052.000000,1461.000000,5094.000000,1430.000000,4.224100,139800.000000 +-117.040000,32.600000,18.000000,4747.000000,846.000000,3002.000000,872.000000,3.907600,152900.000000 +-117.040000,32.580000,20.000000,2029.000000,357.000000,1497.000000,353.000000,4.008900,132100.000000 +-117.040000,32.580000,14.000000,2355.000000,406.000000,1883.000000,401.000000,5.031100,152100.000000 +-117.040000,32.550000,15.000000,2206.000000,648.000000,2511.000000,648.000000,1.634800,93200.000000 +-117.040000,32.540000,7.000000,938.000000,297.000000,1187.000000,282.000000,1.266700,67500.000000 +-117.050000,34.890000,36.000000,1199.000000,260.000000,665.000000,229.000000,3.706500,62000.000000 +-117.050000,34.040000,23.000000,3967.000000,766.000000,1518.000000,698.000000,2.290000,111800.000000 +-117.050000,34.030000,28.000000,3009.000000,698.000000,1200.000000,626.000000,1.399300,104600.000000 +-117.050000,34.020000,21.000000,3098.000000,646.000000,1351.000000,614.000000,2.598000,106700.000000 +-117.050000,34.010000,27.000000,5484.000000,1205.000000,2645.000000,1131.000000,2.192700,116700.000000 +-117.050000,33.520000,5.000000,3471.000000,530.000000,1541.000000,502.000000,4.808300,347700.000000 +-117.050000,33.290000,17.000000,1800.000000,312.000000,891.000000,281.000000,7.017700,267600.000000 +-117.050000,33.260000,14.000000,3103.000000,569.000000,1704.000000,539.000000,3.764400,264700.000000 +-117.050000,33.140000,16.000000,4552.000000,1166.000000,2737.000000,1051.000000,2.250000,136300.000000 +-117.050000,33.130000,22.000000,2427.000000,390.000000,1099.000000,362.000000,5.232300,167500.000000 +-117.050000,33.130000,20.000000,7746.000000,2035.000000,5370.000000,1838.000000,2.376200,98500.000000 +-117.050000,33.130000,17.000000,2385.000000,372.000000,1118.000000,369.000000,4.281300,169900.000000 +-117.050000,33.110000,18.000000,4393.000000,642.000000,2095.000000,677.000000,5.478600,223500.000000 +-117.050000,33.050000,6.000000,7916.000000,1293.000000,2741.000000,1204.000000,5.643600,278600.000000 +-117.050000,33.040000,12.000000,1840.000000,254.000000,580.000000,234.000000,6.776900,400000.000000 +-117.050000,33.020000,18.000000,917.000000,121.000000,388.000000,131.000000,6.351700,260100.000000 +-117.050000,33.010000,19.000000,3558.000000,588.000000,1439.000000,578.000000,4.625000,211100.000000 +-117.050000,33.010000,17.000000,3430.000000,425.000000,1468.000000,433.000000,10.618600,429300.000000 +-117.050000,32.970000,17.000000,9911.000000,1436.000000,4763.000000,1414.000000,5.588200,194300.000000 +-117.050000,32.960000,18.000000,3593.000000,661.000000,1992.000000,626.000000,4.829500,165800.000000 +-117.050000,32.950000,17.000000,4814.000000,1091.000000,3013.000000,1078.000000,3.236900,167800.000000 +-117.050000,32.950000,17.000000,3039.000000,555.000000,1297.000000,552.000000,3.953100,178600.000000 +-117.050000,32.820000,16.000000,4046.000000,731.000000,1684.000000,701.000000,4.231200,197000.000000 +-117.050000,32.810000,17.000000,1885.000000,292.000000,771.000000,301.000000,5.640200,228600.000000 +-117.050000,32.800000,25.000000,1905.000000,250.000000,865.000000,253.000000,6.479700,249000.000000 +-117.050000,32.800000,23.000000,3309.000000,401.000000,1116.000000,386.000000,7.916000,330600.000000 +-117.050000,32.800000,17.000000,1475.000000,308.000000,549.000000,293.000000,3.716700,180400.000000 +-117.050000,32.800000,16.000000,1561.000000,378.000000,574.000000,350.000000,3.003500,94600.000000 +-117.050000,32.780000,37.000000,1184.000000,178.000000,529.000000,192.000000,4.794100,161700.000000 +-117.050000,32.770000,33.000000,3535.000000,683.000000,1568.000000,672.000000,2.809700,158300.000000 +-117.050000,32.770000,23.000000,2556.000000,662.000000,1200.000000,548.000000,1.889900,147700.000000 +-117.050000,32.760000,46.000000,1887.000000,359.000000,795.000000,358.000000,3.250000,159600.000000 +-117.050000,32.750000,43.000000,1718.000000,344.000000,826.000000,336.000000,2.701400,133700.000000 +-117.050000,32.750000,36.000000,2024.000000,408.000000,1030.000000,390.000000,3.823300,139800.000000 +-117.050000,32.750000,35.000000,2144.000000,388.000000,1003.000000,383.000000,3.093800,137300.000000 +-117.050000,32.750000,29.000000,2767.000000,612.000000,1437.000000,587.000000,2.830600,142900.000000 +-117.050000,32.740000,34.000000,2178.000000,455.000000,1193.000000,446.000000,3.171900,115300.000000 +-117.050000,32.730000,27.000000,3184.000000,588.000000,1763.000000,571.000000,3.552900,133900.000000 +-117.050000,32.720000,35.000000,3669.000000,617.000000,1694.000000,585.000000,3.948500,133900.000000 +-117.050000,32.720000,35.000000,1777.000000,369.000000,1158.000000,353.000000,3.410700,117000.000000 +-117.050000,32.690000,8.000000,831.000000,158.000000,740.000000,154.000000,5.390800,165500.000000 +-117.050000,32.690000,21.000000,991.000000,210.000000,695.000000,203.000000,3.625000,144300.000000 +-117.050000,32.690000,14.000000,1689.000000,555.000000,1319.000000,527.000000,3.160000,143800.000000 +-117.050000,32.680000,35.000000,3414.000000,580.000000,1761.000000,522.000000,3.992200,129800.000000 +-117.050000,32.680000,19.000000,1469.000000,275.000000,1010.000000,292.000000,3.566400,150400.000000 +-117.050000,32.680000,15.000000,1828.000000,359.000000,955.000000,248.000000,3.217400,165100.000000 +-117.050000,32.670000,32.000000,4227.000000,785.000000,2842.000000,795.000000,3.964600,137800.000000 +-117.050000,32.670000,16.000000,2168.000000,343.000000,1589.000000,338.000000,5.486300,153800.000000 +-117.050000,32.630000,31.000000,4911.000000,861.000000,2334.000000,843.000000,4.195800,160100.000000 +-117.050000,32.620000,34.000000,3928.000000,686.000000,2315.000000,681.000000,4.285100,144500.000000 +-117.050000,32.610000,26.000000,1563.000000,286.000000,1145.000000,313.000000,3.861500,139300.000000 +-117.050000,32.610000,21.000000,6034.000000,1205.000000,3795.000000,1146.000000,3.263300,129700.000000 +-117.050000,32.590000,26.000000,1919.000000,345.000000,1326.000000,341.000000,4.267900,131900.000000 +-117.050000,32.590000,16.000000,4683.000000,929.000000,3073.000000,865.000000,3.049500,98300.000000 +-117.050000,32.580000,25.000000,2185.000000,370.000000,1558.000000,369.000000,5.307200,132700.000000 +-117.050000,32.580000,22.000000,2101.000000,399.000000,1551.000000,371.000000,4.151800,136900.000000 +-117.050000,32.560000,22.000000,2172.000000,563.000000,2049.000000,524.000000,2.015900,139300.000000 +-117.050000,32.560000,18.000000,1215.000000,320.000000,1195.000000,349.000000,1.987500,114900.000000 +-117.050000,32.560000,17.000000,985.000000,233.000000,811.000000,223.000000,2.875000,134500.000000 +-117.060000,34.900000,36.000000,2828.000000,916.000000,1762.000000,736.000000,1.431800,59600.000000 +-117.060000,34.170000,21.000000,2520.000000,582.000000,416.000000,151.000000,2.712000,89000.000000 +-117.060000,34.030000,27.000000,1945.000000,446.000000,859.000000,418.000000,1.520300,126200.000000 +-117.060000,34.020000,24.000000,3912.000000,809.000000,1926.000000,762.000000,2.687500,116300.000000 +-117.060000,34.000000,33.000000,1575.000000,326.000000,879.000000,282.000000,2.535700,94400.000000 +-117.060000,33.780000,17.000000,2813.000000,565.000000,1345.000000,488.000000,2.584700,145300.000000 +-117.060000,33.170000,4.000000,5465.000000,974.000000,2844.000000,950.000000,4.447700,174800.000000 +-117.060000,33.150000,24.000000,2155.000000,379.000000,1158.000000,360.000000,4.794100,147500.000000 +-117.060000,33.130000,12.000000,8742.000000,2114.000000,4854.000000,1957.000000,2.801500,143500.000000 +-117.060000,33.090000,11.000000,7483.000000,1276.000000,3516.000000,1261.000000,4.048400,262500.000000 +-117.060000,33.040000,17.000000,1785.000000,255.000000,667.000000,277.000000,5.738200,278000.000000 +-117.060000,33.030000,23.000000,2023.000000,309.000000,678.000000,340.000000,7.091300,265400.000000 +-117.060000,33.020000,24.000000,830.000000,190.000000,279.000000,196.000000,1.917600,121100.000000 +-117.060000,33.020000,17.000000,2635.000000,389.000000,994.000000,359.000000,5.896600,261500.000000 +-117.060000,33.010000,9.000000,2470.000000,417.000000,904.000000,427.000000,4.421900,209200.000000 +-117.060000,32.990000,16.000000,1306.000000,196.000000,713.000000,222.000000,6.268300,180700.000000 +-117.060000,32.970000,17.000000,4754.000000,877.000000,2412.000000,832.000000,4.354800,192300.000000 +-117.060000,32.810000,17.000000,3939.000000,550.000000,1694.000000,553.000000,6.792700,234700.000000 +-117.060000,32.800000,17.000000,2247.000000,340.000000,973.000000,318.000000,5.500000,222000.000000 +-117.060000,32.790000,21.000000,3787.000000,492.000000,1246.000000,457.000000,9.602300,391300.000000 +-117.060000,32.790000,17.000000,2524.000000,332.000000,771.000000,317.000000,8.760400,331800.000000 +-117.060000,32.770000,34.000000,1730.000000,373.000000,730.000000,350.000000,2.028400,161800.000000 +-117.060000,32.770000,18.000000,2269.000000,682.000000,1329.000000,581.000000,1.795100,161800.000000 +-117.060000,32.760000,38.000000,1549.000000,288.000000,636.000000,278.000000,3.218800,150500.000000 +-117.060000,32.760000,24.000000,1629.000000,587.000000,1012.000000,488.000000,1.745200,156800.000000 +-117.060000,32.750000,34.000000,2516.000000,611.000000,1317.000000,594.000000,2.230800,125900.000000 +-117.060000,32.750000,34.000000,1917.000000,419.000000,1181.000000,426.000000,3.020800,129200.000000 +-117.060000,32.730000,33.000000,3444.000000,619.000000,1884.000000,582.000000,3.789100,126700.000000 +-117.060000,32.720000,31.000000,2669.000000,514.000000,1626.000000,499.000000,3.192300,116900.000000 +-117.060000,32.710000,21.000000,1864.000000,388.000000,1498.000000,389.000000,3.819400,125700.000000 +-117.060000,32.710000,11.000000,2397.000000,523.000000,1566.000000,514.000000,3.868700,145200.000000 +-117.060000,32.690000,9.000000,521.000000,111.000000,491.000000,110.000000,5.130500,158900.000000 +-117.060000,32.690000,9.000000,1520.000000,269.000000,1250.000000,265.000000,4.887500,157700.000000 +-117.060000,32.690000,13.000000,705.000000,149.000000,718.000000,155.000000,4.437500,154900.000000 +-117.060000,32.680000,41.000000,2665.000000,515.000000,1664.000000,512.000000,2.375000,113500.000000 +-117.060000,32.680000,38.000000,1481.000000,317.000000,1080.000000,291.000000,2.850000,125800.000000 +-117.060000,32.680000,36.000000,3815.000000,796.000000,2945.000000,728.000000,2.095900,125000.000000 +-117.060000,32.670000,29.000000,4047.000000,754.000000,2353.000000,730.000000,4.050500,125000.000000 +-117.060000,32.660000,33.000000,3425.000000,511.000000,1528.000000,479.000000,5.688900,234600.000000 +-117.060000,32.640000,30.000000,4494.000000,667.000000,1883.000000,680.000000,5.766000,186100.000000 +-117.060000,32.630000,37.000000,1326.000000,234.000000,612.000000,240.000000,4.125000,160200.000000 +-117.060000,32.630000,29.000000,4168.000000,742.000000,2096.000000,713.000000,4.220400,169800.000000 +-117.060000,32.620000,36.000000,786.000000,125.000000,408.000000,138.000000,3.916700,189700.000000 +-117.060000,32.610000,34.000000,4325.000000,1015.000000,2609.000000,979.000000,2.848900,128300.000000 +-117.060000,32.600000,33.000000,905.000000,205.000000,989.000000,222.000000,2.701400,108200.000000 +-117.060000,32.600000,25.000000,1075.000000,238.000000,434.000000,234.000000,1.747200,94600.000000 +-117.060000,32.600000,24.000000,1088.000000,268.000000,1095.000000,246.000000,2.419100,107300.000000 +-117.060000,32.580000,17.000000,2724.000000,567.000000,2213.000000,554.000000,3.852900,147700.000000 +-117.060000,32.580000,13.000000,3435.000000,708.000000,1761.000000,699.000000,3.479200,107600.000000 +-117.060000,32.580000,11.000000,2879.000000,679.000000,2098.000000,673.000000,3.512500,142400.000000 +-117.060000,32.570000,25.000000,1268.000000,282.000000,991.000000,299.000000,3.028400,123600.000000 +-117.060000,32.570000,18.000000,1384.000000,311.000000,1429.000000,287.000000,1.336200,95000.000000 +-117.060000,32.570000,17.000000,2252.000000,378.000000,1776.000000,365.000000,4.636400,141100.000000 +-117.060000,32.570000,16.000000,1269.000000,282.000000,1609.000000,298.000000,2.698500,156500.000000 +-117.060000,32.560000,5.000000,2706.000000,925.000000,3148.000000,855.000000,1.730100,125000.000000 +-117.060000,32.560000,17.000000,2803.000000,683.000000,2768.000000,676.000000,1.795800,140400.000000 +-117.060000,32.550000,5.000000,3223.000000,940.000000,3284.000000,854.000000,1.438400,108800.000000 +-117.070000,34.240000,21.000000,4773.000000,1047.000000,337.000000,130.000000,3.937500,115000.000000 +-117.070000,33.720000,16.000000,4928.000000,960.000000,2132.000000,853.000000,2.798300,112500.000000 +-117.070000,33.150000,17.000000,1893.000000,297.000000,936.000000,287.000000,5.184200,157700.000000 +-117.070000,33.150000,15.000000,2994.000000,522.000000,1231.000000,503.000000,3.202400,180400.000000 +-117.070000,33.140000,16.000000,2546.000000,429.000000,1683.000000,408.000000,4.742600,160600.000000 +-117.070000,33.130000,33.000000,555.000000,165.000000,612.000000,176.000000,2.178600,137500.000000 +-117.070000,33.130000,17.000000,6817.000000,1632.000000,4526.000000,1474.000000,2.615200,135300.000000 +-117.070000,33.120000,32.000000,2474.000000,499.000000,1224.000000,461.000000,2.721600,146300.000000 +-117.070000,33.120000,21.000000,4578.000000,927.000000,2818.000000,900.000000,3.145800,187700.000000 +-117.070000,33.120000,12.000000,2453.000000,599.000000,1251.000000,529.000000,2.412200,127000.000000 +-117.070000,33.110000,31.000000,2055.000000,473.000000,1326.000000,427.000000,3.091500,139900.000000 +-117.070000,33.110000,17.000000,5565.000000,1237.000000,3004.000000,1139.000000,3.005400,142300.000000 +-117.070000,33.070000,8.000000,2756.000000,343.000000,1045.000000,340.000000,8.595700,444100.000000 +-117.070000,33.040000,4.000000,2271.000000,578.000000,926.000000,391.000000,3.643700,210100.000000 +-117.070000,33.030000,15.000000,1095.000000,158.000000,361.000000,176.000000,6.809900,328200.000000 +-117.070000,33.030000,14.000000,6665.000000,1231.000000,2026.000000,1001.000000,5.090000,268500.000000 +-117.070000,33.020000,17.000000,2863.000000,665.000000,715.000000,467.000000,2.604800,148200.000000 +-117.070000,33.010000,5.000000,5870.000000,977.000000,1917.000000,842.000000,5.199800,294100.000000 +-117.070000,33.010000,25.000000,2120.000000,381.000000,588.000000,359.000000,3.118700,169400.000000 +-117.070000,33.000000,4.000000,9153.000000,1866.000000,3775.000000,1698.000000,4.955000,241500.000000 +-117.070000,33.000000,4.000000,6242.000000,1258.000000,2211.000000,1116.000000,4.250000,281600.000000 +-117.070000,32.910000,5.000000,2234.000000,256.000000,894.000000,253.000000,10.335400,477600.000000 +-117.070000,32.800000,36.000000,2028.000000,349.000000,820.000000,352.000000,3.982800,168900.000000 +-117.070000,32.800000,31.000000,2550.000000,395.000000,1017.000000,405.000000,5.148800,181000.000000 +-117.070000,32.800000,23.000000,2698.000000,410.000000,1094.000000,411.000000,5.178200,195100.000000 +-117.070000,32.780000,26.000000,3725.000000,623.000000,1516.000000,627.000000,4.714300,268300.000000 +-117.070000,32.780000,22.000000,922.000000,240.000000,1524.000000,235.000000,1.681500,218800.000000 +-117.070000,32.770000,38.000000,3779.000000,614.000000,1495.000000,614.000000,4.352900,184000.000000 +-117.070000,32.770000,38.000000,1130.000000,228.000000,699.000000,241.000000,2.650000,167600.000000 +-117.070000,32.770000,34.000000,2245.000000,394.000000,1849.000000,429.000000,3.544600,185500.000000 +-117.070000,32.760000,42.000000,1827.000000,378.000000,880.000000,380.000000,2.512500,176600.000000 +-117.070000,32.760000,14.000000,2523.000000,545.000000,1297.000000,525.000000,2.388600,138100.000000 +-117.070000,32.750000,9.000000,3464.000000,749.000000,1687.000000,645.000000,3.302600,119100.000000 +-117.070000,32.750000,37.000000,2690.000000,549.000000,1219.000000,524.000000,2.314800,154200.000000 +-117.070000,32.750000,31.000000,2036.000000,501.000000,1263.000000,442.000000,2.558300,120700.000000 +-117.070000,32.750000,14.000000,3073.000000,851.000000,2000.000000,782.000000,2.382400,144700.000000 +-117.070000,32.740000,37.000000,1042.000000,205.000000,589.000000,208.000000,2.662900,116900.000000 +-117.070000,32.730000,18.000000,2968.000000,656.000000,1149.000000,581.000000,2.645200,154200.000000 +-117.070000,32.720000,18.000000,1758.000000,286.000000,987.000000,277.000000,4.687500,141800.000000 +-117.070000,32.710000,39.000000,2754.000000,652.000000,2263.000000,619.000000,2.245400,89600.000000 +-117.070000,32.710000,36.000000,2448.000000,475.000000,1268.000000,450.000000,2.568200,109100.000000 +-117.070000,32.710000,26.000000,4151.000000,823.000000,2822.000000,697.000000,2.837200,123400.000000 +-117.070000,32.700000,14.000000,2763.000000,456.000000,1914.000000,465.000000,4.164500,143000.000000 +-117.070000,32.690000,29.000000,1429.000000,293.000000,1091.000000,317.000000,3.460900,118000.000000 +-117.070000,32.690000,28.000000,1485.000000,275.000000,820.000000,283.000000,4.069000,153300.000000 +-117.070000,32.690000,20.000000,2192.000000,406.000000,1766.000000,393.000000,4.092100,135000.000000 +-117.070000,32.680000,18.000000,1475.000000,267.000000,1149.000000,268.000000,5.082700,142200.000000 +-117.070000,32.670000,35.000000,3200.000000,725.000000,1723.000000,610.000000,1.897700,95600.000000 +-117.070000,32.670000,28.000000,2758.000000,623.000000,2179.000000,631.000000,2.381400,112300.000000 +-117.070000,32.650000,12.000000,4131.000000,891.000000,2272.000000,840.000000,3.470100,204900.000000 +-117.070000,32.640000,38.000000,1486.000000,269.000000,745.000000,295.000000,4.647700,150400.000000 +-117.070000,32.640000,32.000000,5135.000000,1025.000000,2152.000000,944.000000,4.132500,172800.000000 +-117.070000,32.640000,30.000000,2873.000000,774.000000,1593.000000,731.000000,2.240000,129500.000000 +-117.070000,32.630000,40.000000,1706.000000,322.000000,796.000000,303.000000,3.558300,154900.000000 +-117.070000,32.630000,37.000000,2372.000000,444.000000,1056.000000,419.000000,3.758300,145500.000000 +-117.070000,32.630000,37.000000,2303.000000,379.000000,1026.000000,357.000000,3.455000,156900.000000 +-117.070000,32.620000,19.000000,5016.000000,1173.000000,2750.000000,1081.000000,2.783800,155900.000000 +-117.070000,32.610000,22.000000,5016.000000,1331.000000,3222.000000,1196.000000,2.144100,135500.000000 +-117.070000,32.610000,10.000000,1686.000000,414.000000,1000.000000,391.000000,2.176500,128400.000000 +-117.070000,32.600000,18.000000,2602.000000,551.000000,1042.000000,550.000000,1.926700,67500.000000 +-117.070000,32.590000,21.000000,1779.000000,466.000000,1327.000000,488.000000,1.600700,96200.000000 +-117.070000,32.570000,17.000000,2961.000000,634.000000,1911.000000,615.000000,2.585900,131400.000000 +-117.070000,32.570000,14.000000,1527.000000,357.000000,1224.000000,363.000000,2.736100,93600.000000 +-117.070000,32.560000,9.000000,3648.000000,895.000000,3293.000000,840.000000,3.099200,142600.000000 +-117.080000,34.960000,28.000000,1777.000000,307.000000,721.000000,259.000000,3.634300,79800.000000 +-117.080000,34.080000,34.000000,45.000000,11.000000,39.000000,14.000000,3.062500,500001.000000 +-117.080000,34.030000,23.000000,3862.000000,699.000000,2082.000000,652.000000,3.154000,115700.000000 +-117.080000,34.020000,20.000000,3111.000000,563.000000,1453.000000,538.000000,3.336500,122800.000000 +-117.080000,33.820000,6.000000,1771.000000,293.000000,935.000000,279.000000,4.065000,148200.000000 +-117.080000,33.230000,14.000000,3337.000000,571.000000,1385.000000,512.000000,4.150000,272200.000000 +-117.080000,33.140000,19.000000,2629.000000,494.000000,1444.000000,503.000000,3.546200,156800.000000 +-117.080000,33.140000,15.000000,1497.000000,250.000000,827.000000,239.000000,4.384600,154200.000000 +-117.080000,33.140000,11.000000,1430.000000,292.000000,921.000000,294.000000,4.235700,160900.000000 +-117.080000,33.130000,17.000000,8466.000000,2628.000000,7014.000000,2267.000000,2.143700,113700.000000 +-117.080000,33.120000,37.000000,1060.000000,268.000000,823.000000,229.000000,1.836300,145500.000000 +-117.080000,33.120000,33.000000,674.000000,208.000000,565.000000,188.000000,1.875000,114300.000000 +-117.080000,33.110000,31.000000,1832.000000,444.000000,1669.000000,463.000000,2.214600,116700.000000 +-117.080000,33.110000,31.000000,1356.000000,324.000000,1301.000000,331.000000,2.533100,115100.000000 +-117.080000,33.110000,28.000000,2094.000000,585.000000,1556.000000,563.000000,2.200000,127700.000000 +-117.080000,33.090000,23.000000,3792.000000,624.000000,1988.000000,658.000000,4.756600,178300.000000 +-117.080000,33.080000,23.000000,3400.000000,501.000000,1383.000000,488.000000,4.984400,249100.000000 +-117.080000,33.040000,10.000000,2577.000000,347.000000,1193.000000,365.000000,6.530000,264100.000000 +-117.080000,33.030000,18.000000,1339.000000,284.000000,761.000000,290.000000,5.307400,137200.000000 +-117.080000,33.030000,17.000000,987.000000,142.000000,463.000000,152.000000,5.874700,229300.000000 +-117.080000,33.030000,15.000000,3023.000000,623.000000,1283.000000,559.000000,3.372400,137900.000000 +-117.080000,33.030000,10.000000,2296.000000,450.000000,818.000000,405.000000,4.342400,160600.000000 +-117.080000,33.010000,5.000000,5659.000000,931.000000,2565.000000,902.000000,6.194900,238700.000000 +-117.080000,32.970000,3.000000,17466.000000,3336.000000,7644.000000,2895.000000,5.458400,246500.000000 +-117.080000,32.930000,5.000000,14944.000000,2490.000000,6600.000000,2407.000000,6.085700,308300.000000 +-117.080000,32.910000,9.000000,1547.000000,218.000000,683.000000,231.000000,7.560400,327900.000000 +-117.080000,32.910000,16.000000,1653.000000,228.000000,690.000000,224.000000,6.585300,248400.000000 +-117.080000,32.830000,7.000000,13703.000000,2352.000000,4446.000000,1856.000000,6.433500,260600.000000 +-117.080000,32.820000,10.000000,5177.000000,856.000000,2190.000000,816.000000,5.973400,271700.000000 +-117.080000,32.800000,25.000000,2963.000000,552.000000,1162.000000,556.000000,3.625000,184500.000000 +-117.080000,32.780000,21.000000,2919.000000,496.000000,984.000000,443.000000,4.625000,222800.000000 +-117.080000,32.770000,31.000000,1070.000000,155.000000,426.000000,153.000000,6.162800,219200.000000 +-117.080000,32.770000,25.000000,3911.000000,849.000000,1580.000000,767.000000,2.777800,184100.000000 +-117.080000,32.760000,27.000000,1221.000000,254.000000,606.000000,259.000000,3.083300,155400.000000 +-117.080000,32.760000,20.000000,2547.000000,785.000000,1199.000000,643.000000,1.774300,140300.000000 +-117.080000,32.760000,18.000000,1704.000000,596.000000,1639.000000,548.000000,1.739100,125000.000000 +-117.080000,32.750000,20.000000,1989.000000,508.000000,1452.000000,462.000000,2.007700,118300.000000 +-117.080000,32.750000,16.000000,1111.000000,328.000000,930.000000,303.000000,1.234700,128100.000000 +-117.080000,32.750000,15.000000,1821.000000,516.000000,1385.000000,439.000000,2.510100,95300.000000 +-117.080000,32.740000,33.000000,3260.000000,673.000000,1784.000000,666.000000,3.507800,126500.000000 +-117.080000,32.740000,26.000000,2359.000000,622.000000,2067.000000,581.000000,1.810300,124700.000000 +-117.080000,32.730000,36.000000,3331.000000,643.000000,1903.000000,622.000000,3.697400,122000.000000 +-117.080000,32.730000,36.000000,1158.000000,218.000000,619.000000,233.000000,3.612500,122500.000000 +-117.080000,32.730000,19.000000,2935.000000,763.000000,1953.000000,720.000000,1.425400,111300.000000 +-117.080000,32.710000,27.000000,2204.000000,598.000000,1656.000000,521.000000,1.482100,86200.000000 +-117.080000,32.700000,37.000000,2176.000000,418.000000,1301.000000,375.000000,2.875000,98900.000000 +-117.080000,32.700000,36.000000,2103.000000,390.000000,1279.000000,392.000000,2.413500,97000.000000 +-117.080000,32.690000,36.000000,1571.000000,284.000000,1001.000000,268.000000,3.687500,111400.000000 +-117.080000,32.690000,31.000000,2558.000000,487.000000,1938.000000,492.000000,3.487500,117000.000000 +-117.080000,32.680000,26.000000,3071.000000,615.000000,2156.000000,568.000000,2.931800,112400.000000 +-117.080000,32.680000,19.000000,3635.000000,1078.000000,3127.000000,1098.000000,1.324000,122600.000000 +-117.080000,32.670000,31.000000,3008.000000,764.000000,2088.000000,757.000000,2.566200,118200.000000 +-117.080000,32.660000,43.000000,1004.000000,236.000000,839.000000,235.000000,2.810000,103400.000000 +-117.080000,32.650000,28.000000,2296.000000,603.000000,1277.000000,550.000000,2.356200,123800.000000 +-117.080000,32.650000,17.000000,2633.000000,712.000000,1487.000000,694.000000,2.539200,147000.000000 +-117.080000,32.640000,38.000000,917.000000,256.000000,494.000000,233.000000,1.924100,150000.000000 +-117.080000,32.640000,11.000000,1651.000000,533.000000,947.000000,515.000000,1.680600,141700.000000 +-117.080000,32.630000,33.000000,2891.000000,793.000000,1607.000000,754.000000,2.128100,139800.000000 +-117.080000,32.630000,30.000000,2504.000000,559.000000,1827.000000,490.000000,2.614600,159400.000000 +-117.080000,32.630000,28.000000,2080.000000,427.000000,1266.000000,434.000000,2.278800,146300.000000 +-117.080000,32.620000,36.000000,1674.000000,309.000000,818.000000,307.000000,3.477300,150400.000000 +-117.080000,32.620000,28.000000,2468.000000,506.000000,1353.000000,522.000000,3.077100,158600.000000 +-117.080000,32.620000,16.000000,5192.000000,1381.000000,3261.000000,1321.000000,2.268500,151900.000000 +-117.080000,32.610000,27.000000,2264.000000,525.000000,1485.000000,468.000000,3.351400,149100.000000 +-117.080000,32.600000,24.000000,1901.000000,490.000000,1334.000000,476.000000,2.254400,121900.000000 +-117.080000,32.590000,8.000000,2888.000000,662.000000,2441.000000,683.000000,2.704800,153000.000000 +-117.080000,32.590000,30.000000,144.000000,52.000000,220.000000,48.000000,2.392900,134400.000000 +-117.080000,32.580000,22.000000,2128.000000,477.000000,1420.000000,450.000000,3.268700,131000.000000 +-117.080000,32.570000,9.000000,6298.000000,1512.000000,4451.000000,1456.000000,2.569000,88300.000000 +-117.090000,34.220000,16.000000,1347.000000,327.000000,271.000000,91.000000,4.000000,87500.000000 +-117.090000,34.070000,24.000000,6260.000000,1271.000000,3132.000000,1189.000000,2.515600,103000.000000 +-117.090000,33.710000,13.000000,1974.000000,426.000000,1276.000000,408.000000,1.972000,90500.000000 +-117.090000,33.150000,13.000000,3958.000000,865.000000,1981.000000,840.000000,3.476400,137500.000000 +-117.090000,33.130000,9.000000,5685.000000,1442.000000,3773.000000,1250.000000,3.042600,129900.000000 +-117.090000,33.120000,11.000000,567.000000,184.000000,620.000000,163.000000,2.528400,122500.000000 +-117.090000,33.110000,32.000000,1713.000000,321.000000,891.000000,286.000000,3.142900,171600.000000 +-117.090000,33.100000,21.000000,2876.000000,539.000000,1387.000000,499.000000,3.829200,177000.000000 +-117.090000,32.990000,18.000000,3215.000000,588.000000,1618.000000,509.000000,4.602800,216800.000000 +-117.090000,32.990000,16.000000,2175.000000,327.000000,1037.000000,326.000000,5.190900,201400.000000 +-117.090000,32.980000,23.000000,1125.000000,273.000000,687.000000,308.000000,2.318200,268800.000000 +-117.090000,32.910000,9.000000,2012.000000,316.000000,802.000000,289.000000,6.570600,255700.000000 +-117.090000,32.910000,16.000000,2005.000000,266.000000,827.000000,270.000000,7.054600,282200.000000 +-117.090000,32.900000,16.000000,1989.000000,290.000000,814.000000,291.000000,6.271500,255100.000000 +-117.090000,32.830000,15.000000,4138.000000,636.000000,2001.000000,677.000000,4.841900,264000.000000 +-117.090000,32.810000,7.000000,6100.000000,1185.000000,2710.000000,1040.000000,5.567300,288200.000000 +-117.090000,32.800000,36.000000,2163.000000,367.000000,915.000000,360.000000,4.718800,174100.000000 +-117.090000,32.800000,15.000000,666.000000,152.000000,247.000000,164.000000,2.150000,131300.000000 +-117.090000,32.790000,36.000000,1936.000000,345.000000,861.000000,343.000000,3.833300,170000.000000 +-117.090000,32.790000,36.000000,1529.000000,266.000000,683.000000,260.000000,4.098200,171200.000000 +-117.090000,32.790000,20.000000,2183.000000,534.000000,999.000000,496.000000,2.863100,169700.000000 +-117.090000,32.780000,28.000000,1708.000000,393.000000,816.000000,393.000000,2.988100,165300.000000 +-117.090000,32.770000,38.000000,2065.000000,374.000000,812.000000,343.000000,3.125000,216500.000000 +-117.090000,32.770000,31.000000,3062.000000,596.000000,1263.000000,539.000000,3.087500,291500.000000 +-117.090000,32.760000,44.000000,1139.000000,214.000000,470.000000,217.000000,3.548100,203100.000000 +-117.090000,32.760000,43.000000,3889.000000,711.000000,1466.000000,663.000000,3.552900,223000.000000 +-117.090000,32.760000,31.000000,2567.000000,624.000000,1255.000000,582.000000,2.590900,159100.000000 +-117.090000,32.760000,31.000000,1235.000000,387.000000,816.000000,397.000000,1.551700,122500.000000 +-117.090000,32.760000,10.000000,1922.000000,577.000000,1595.000000,545.000000,1.520800,118800.000000 +-117.090000,32.750000,30.000000,1899.000000,546.000000,1620.000000,493.000000,1.603400,84400.000000 +-117.090000,32.750000,28.000000,1220.000000,391.000000,1286.000000,396.000000,1.228600,105000.000000 +-117.090000,32.750000,24.000000,1245.000000,376.000000,1230.000000,362.000000,1.875000,95000.000000 +-117.090000,32.750000,20.000000,1701.000000,503.000000,1482.000000,465.000000,1.678900,95500.000000 +-117.090000,32.740000,23.000000,3130.000000,779.000000,2472.000000,744.000000,2.320000,93200.000000 +-117.090000,32.730000,26.000000,3114.000000,686.000000,1948.000000,660.000000,2.894200,124100.000000 +-117.090000,32.720000,39.000000,1273.000000,246.000000,770.000000,242.000000,2.093800,102500.000000 +-117.090000,32.720000,33.000000,1096.000000,240.000000,716.000000,224.000000,1.694400,111800.000000 +-117.090000,32.710000,29.000000,2238.000000,523.000000,2061.000000,504.000000,2.555900,96800.000000 +-117.090000,32.710000,12.000000,3375.000000,945.000000,2357.000000,808.000000,1.500000,106300.000000 +-117.090000,32.700000,22.000000,2409.000000,582.000000,1887.000000,578.000000,1.408900,94200.000000 +-117.090000,32.690000,20.000000,1102.000000,205.000000,852.000000,217.000000,3.183300,108300.000000 +-117.090000,32.690000,18.000000,1645.000000,430.000000,1221.000000,410.000000,1.326900,108000.000000 +-117.090000,32.680000,30.000000,2662.000000,653.000000,1997.000000,605.000000,2.808900,120600.000000 +-117.090000,32.680000,29.000000,1792.000000,449.000000,1650.000000,396.000000,2.220100,100000.000000 +-117.090000,32.680000,20.000000,2569.000000,737.000000,2341.000000,705.000000,2.011400,104900.000000 +-117.090000,32.670000,37.000000,1157.000000,332.000000,983.000000,306.000000,2.097200,117000.000000 +-117.090000,32.670000,31.000000,2051.000000,549.000000,1581.000000,538.000000,2.052000,108900.000000 +-117.090000,32.660000,46.000000,844.000000,147.000000,423.000000,161.000000,3.375000,136300.000000 +-117.090000,32.660000,38.000000,833.000000,206.000000,570.000000,182.000000,1.833300,127100.000000 +-117.090000,32.650000,25.000000,3509.000000,985.000000,2359.000000,899.000000,2.629600,150000.000000 +-117.090000,32.650000,20.000000,1445.000000,323.000000,573.000000,334.000000,2.619000,145800.000000 +-117.090000,32.640000,38.000000,2095.000000,536.000000,1240.000000,550.000000,2.721800,145900.000000 +-117.090000,32.640000,30.000000,3171.000000,862.000000,2126.000000,800.000000,2.507000,142700.000000 +-117.090000,32.640000,24.000000,3613.000000,973.000000,2002.000000,931.000000,1.947000,147500.000000 +-117.090000,32.640000,20.000000,1999.000000,651.000000,1302.000000,592.000000,1.632100,57500.000000 +-117.090000,32.630000,33.000000,620.000000,161.000000,420.000000,164.000000,1.841700,150000.000000 +-117.090000,32.630000,27.000000,2920.000000,770.000000,1935.000000,746.000000,2.414800,67500.000000 +-117.090000,32.620000,37.000000,1925.000000,428.000000,1344.000000,426.000000,2.486600,129700.000000 +-117.090000,32.620000,34.000000,1576.000000,364.000000,1153.000000,381.000000,2.195500,129700.000000 +-117.090000,32.610000,23.000000,1157.000000,309.000000,640.000000,313.000000,2.154800,118800.000000 +-117.090000,32.610000,21.000000,1945.000000,430.000000,1335.000000,419.000000,3.646700,113000.000000 +-117.090000,32.580000,12.000000,2565.000000,567.000000,1785.000000,545.000000,3.027300,135300.000000 +-117.090000,32.570000,23.000000,1817.000000,323.000000,1371.000000,327.000000,3.673600,139500.000000 +-117.090000,32.570000,17.000000,444.000000,83.000000,357.000000,87.000000,5.147800,138900.000000 +-117.090000,32.570000,10.000000,2198.000000,368.000000,1645.000000,350.000000,4.554700,160700.000000 +-117.090000,32.550000,8.000000,6533.000000,1217.000000,4797.000000,1177.000000,3.958300,144400.000000 +-117.100000,34.570000,6.000000,5110.000000,1044.000000,1938.000000,724.000000,3.191700,112800.000000 +-117.100000,34.210000,22.000000,4397.000000,931.000000,1145.000000,445.000000,4.526800,108400.000000 +-117.100000,34.030000,24.000000,4144.000000,826.000000,2127.000000,772.000000,2.517200,96000.000000 +-117.100000,33.560000,6.000000,1868.000000,289.000000,750.000000,247.000000,4.383300,307600.000000 +-117.100000,33.360000,19.000000,3518.000000,658.000000,2091.000000,610.000000,3.261400,168800.000000 +-117.100000,33.170000,12.000000,2465.000000,412.000000,1226.000000,428.000000,5.481900,183800.000000 +-117.100000,33.150000,5.000000,3159.000000,685.000000,1398.000000,581.000000,3.146700,161100.000000 +-117.100000,33.140000,7.000000,10665.000000,2576.000000,4917.000000,2424.000000,2.317100,159500.000000 +-117.100000,33.120000,12.000000,961.000000,342.000000,315.000000,201.000000,0.813000,275000.000000 +-117.100000,33.000000,5.000000,15502.000000,2613.000000,7417.000000,2358.000000,5.909400,261100.000000 +-117.100000,32.970000,17.000000,3167.000000,861.000000,2098.000000,828.000000,2.445900,85800.000000 +-117.100000,32.960000,7.000000,3619.000000,770.000000,1134.000000,482.000000,4.127900,167600.000000 +-117.100000,32.900000,16.000000,2994.000000,445.000000,1047.000000,437.000000,5.149000,184300.000000 +-117.100000,32.830000,16.000000,4214.000000,744.000000,1820.000000,699.000000,4.378300,179500.000000 +-117.100000,32.770000,49.000000,4449.000000,711.000000,1606.000000,709.000000,5.776800,281600.000000 +-117.100000,32.760000,52.000000,2606.000000,426.000000,883.000000,380.000000,4.281300,270800.000000 +-117.100000,32.760000,31.000000,987.000000,267.000000,619.000000,250.000000,2.928600,151800.000000 +-117.100000,32.760000,30.000000,1835.000000,474.000000,934.000000,415.000000,2.875000,139600.000000 +-117.100000,32.750000,23.000000,1858.000000,551.000000,1506.000000,492.000000,1.744600,85200.000000 +-117.100000,32.750000,21.000000,2063.000000,609.000000,1686.000000,558.000000,1.482800,94800.000000 +-117.100000,32.750000,20.000000,2355.000000,722.000000,1848.000000,576.000000,2.003600,99200.000000 +-117.100000,32.750000,16.000000,2426.000000,799.000000,1505.000000,754.000000,1.644400,103400.000000 +-117.100000,32.750000,15.000000,2422.000000,774.000000,2120.000000,715.000000,1.061700,92400.000000 +-117.100000,32.750000,11.000000,1976.000000,548.000000,1528.000000,512.000000,1.488600,89800.000000 +-117.100000,32.740000,30.000000,1772.000000,500.000000,1389.000000,447.000000,2.364100,94100.000000 +-117.100000,32.740000,14.000000,2361.000000,601.000000,1831.000000,526.000000,1.610200,93400.000000 +-117.100000,32.730000,24.000000,2927.000000,704.000000,2005.000000,668.000000,2.237500,102900.000000 +-117.100000,32.720000,5.000000,1615.000000,387.000000,1094.000000,394.000000,2.202400,137200.000000 +-117.100000,32.710000,9.000000,1931.000000,472.000000,1628.000000,445.000000,2.085000,92600.000000 +-117.100000,32.710000,25.000000,939.000000,247.000000,1003.000000,240.000000,1.750000,87900.000000 +-117.100000,32.700000,42.000000,2002.000000,488.000000,1505.000000,464.000000,1.505700,86300.000000 +-117.100000,32.700000,28.000000,633.000000,137.000000,525.000000,170.000000,3.604200,95600.000000 +-117.100000,32.690000,37.000000,1269.000000,340.000000,1369.000000,302.000000,2.210200,87200.000000 +-117.100000,32.690000,35.000000,1292.000000,272.000000,1183.000000,272.000000,2.054700,98000.000000 +-117.100000,32.690000,29.000000,4174.000000,1195.000000,3675.000000,1124.000000,1.811200,103600.000000 +-117.100000,32.690000,11.000000,3071.000000,911.000000,2812.000000,774.000000,1.241300,83100.000000 +-117.100000,32.680000,49.000000,1412.000000,350.000000,1200.000000,332.000000,2.039800,93600.000000 +-117.100000,32.680000,47.000000,771.000000,224.000000,637.000000,212.000000,2.015600,90300.000000 +-117.100000,32.680000,47.000000,1044.000000,274.000000,1003.000000,280.000000,1.780200,97800.000000 +-117.100000,32.680000,20.000000,1012.000000,269.000000,837.000000,240.000000,2.048800,88900.000000 +-117.100000,32.670000,26.000000,2629.000000,763.000000,2721.000000,767.000000,2.098200,109100.000000 +-117.100000,32.670000,22.000000,1690.000000,541.000000,1669.000000,494.000000,2.021300,110600.000000 +-117.100000,32.670000,15.000000,1635.000000,553.000000,1347.000000,597.000000,1.274500,92900.000000 +-117.100000,32.660000,27.000000,1782.000000,560.000000,1785.000000,560.000000,2.154200,106300.000000 +-117.100000,32.640000,29.000000,1578.000000,460.000000,1236.000000,461.000000,2.565800,134700.000000 +-117.100000,32.590000,21.000000,2350.000000,667.000000,1621.000000,613.000000,2.073400,87500.000000 +-117.100000,32.580000,29.000000,1061.000000,202.000000,759.000000,206.000000,4.864600,136800.000000 +-117.100000,32.580000,27.000000,2616.000000,591.000000,1889.000000,577.000000,2.382400,127600.000000 +-117.100000,32.580000,23.000000,1662.000000,377.000000,1318.000000,386.000000,2.300000,120800.000000 +-117.100000,32.580000,17.000000,2046.000000,559.000000,1585.000000,530.000000,2.250000,132800.000000 +-117.100000,32.570000,26.000000,2343.000000,371.000000,1221.000000,372.000000,4.360100,144900.000000 +-117.100000,32.570000,14.000000,5058.000000,1299.000000,3662.000000,1193.000000,2.325300,133700.000000 +-117.100000,32.560000,16.000000,2687.000000,501.000000,1502.000000,480.000000,3.750000,146800.000000 +-117.110000,34.430000,14.000000,3026.000000,556.000000,1349.000000,485.000000,2.802100,111200.000000 +-117.110000,33.980000,25.000000,1254.000000,312.000000,715.000000,301.000000,2.734400,149000.000000 +-117.110000,33.830000,14.000000,2715.000000,500.000000,1540.000000,464.000000,3.803600,139600.000000 +-117.110000,33.780000,13.000000,1914.000000,339.000000,930.000000,304.000000,4.187500,161200.000000 +-117.110000,33.750000,17.000000,4174.000000,851.000000,1845.000000,780.000000,2.261800,96100.000000 +-117.110000,33.740000,18.000000,4799.000000,1035.000000,1966.000000,944.000000,2.118200,71300.000000 +-117.110000,33.230000,13.000000,5819.000000,919.000000,2228.000000,866.000000,4.933500,298100.000000 +-117.110000,33.190000,15.000000,3154.000000,488.000000,1656.000000,429.000000,5.046100,222400.000000 +-117.110000,33.150000,14.000000,8374.000000,1407.000000,2916.000000,1295.000000,4.701900,191100.000000 +-117.110000,33.140000,10.000000,3208.000000,636.000000,1395.000000,582.000000,3.445500,190500.000000 +-117.110000,33.120000,46.000000,52.000000,13.000000,59.000000,13.000000,3.875000,200000.000000 +-117.110000,33.110000,17.000000,2641.000000,627.000000,1167.000000,647.000000,2.287500,132400.000000 +-117.110000,32.970000,9.000000,1531.000000,242.000000,850.000000,240.000000,6.086200,263600.000000 +-117.110000,32.950000,11.000000,4694.000000,824.000000,2223.000000,783.000000,4.948500,231800.000000 +-117.110000,32.910000,15.000000,1840.000000,235.000000,855.000000,241.000000,7.599200,310600.000000 +-117.110000,32.900000,16.000000,2043.000000,388.000000,705.000000,352.000000,4.476600,161500.000000 +-117.110000,32.840000,16.000000,4608.000000,629.000000,2020.000000,636.000000,6.040000,243000.000000 +-117.110000,32.820000,16.000000,4241.000000,892.000000,1771.000000,864.000000,4.375000,166500.000000 +-117.110000,32.820000,16.000000,3980.000000,682.000000,3174.000000,647.000000,2.660700,175000.000000 +-117.110000,32.810000,15.000000,3428.000000,491.000000,2303.000000,486.000000,2.595300,67500.000000 +-117.110000,32.800000,17.000000,3890.000000,586.000000,2791.000000,595.000000,3.219700,67500.000000 +-117.110000,32.790000,16.000000,2574.000000,771.000000,1129.000000,721.000000,3.384900,96900.000000 +-117.110000,32.790000,16.000000,1791.000000,518.000000,1006.000000,491.000000,3.517900,129300.000000 +-117.110000,32.780000,16.000000,2470.000000,830.000000,1170.000000,724.000000,3.556200,73500.000000 +-117.110000,32.780000,16.000000,2220.000000,512.000000,930.000000,527.000000,3.652800,133200.000000 +-117.110000,32.770000,52.000000,1506.000000,233.000000,478.000000,240.000000,4.387500,300000.000000 +-117.110000,32.770000,52.000000,1484.000000,224.000000,498.000000,223.000000,6.605300,331400.000000 +-117.110000,32.770000,50.000000,1729.000000,355.000000,617.000000,337.000000,3.670500,167000.000000 +-117.110000,32.770000,48.000000,1502.000000,272.000000,590.000000,265.000000,2.595200,190300.000000 +-117.110000,32.760000,29.000000,2030.000000,545.000000,1014.000000,518.000000,2.240900,114200.000000 +-117.110000,32.760000,28.000000,1457.000000,397.000000,672.000000,342.000000,1.979900,122700.000000 +-117.110000,32.760000,21.000000,2226.000000,600.000000,1085.000000,533.000000,2.260400,126300.000000 +-117.110000,32.760000,19.000000,2188.000000,616.000000,1304.000000,607.000000,2.085200,114400.000000 +-117.110000,32.750000,46.000000,695.000000,182.000000,601.000000,195.000000,2.421900,90600.000000 +-117.110000,32.750000,34.000000,2131.000000,594.000000,1373.000000,562.000000,2.113000,102100.000000 +-117.110000,32.750000,21.000000,2127.000000,658.000000,1812.000000,603.000000,1.689600,100000.000000 +-117.110000,32.750000,20.000000,1667.000000,469.000000,1292.000000,445.000000,2.089300,101100.000000 +-117.110000,32.750000,11.000000,1607.000000,478.000000,1384.000000,450.000000,2.050000,100000.000000 +-117.110000,32.740000,33.000000,1126.000000,267.000000,621.000000,241.000000,3.242200,123100.000000 +-117.110000,32.740000,25.000000,684.000000,190.000000,665.000000,187.000000,2.452400,90300.000000 +-117.110000,32.740000,25.000000,2846.000000,644.000000,2272.000000,632.000000,2.200000,98700.000000 +-117.110000,32.730000,35.000000,1689.000000,397.000000,1135.000000,366.000000,2.326900,97300.000000 +-117.110000,32.730000,34.000000,1096.000000,221.000000,574.000000,223.000000,3.835500,126700.000000 +-117.110000,32.720000,25.000000,1491.000000,348.000000,1183.000000,316.000000,1.958300,88600.000000 +-117.110000,32.710000,30.000000,1729.000000,457.000000,1673.000000,460.000000,1.700000,85900.000000 +-117.110000,32.710000,29.000000,1040.000000,291.000000,1054.000000,297.000000,1.181800,83200.000000 +-117.110000,32.700000,37.000000,2045.000000,502.000000,1920.000000,472.000000,1.812500,83300.000000 +-117.110000,32.700000,34.000000,2028.000000,522.000000,1797.000000,464.000000,1.740200,79400.000000 +-117.110000,32.700000,33.000000,1980.000000,488.000000,1626.000000,428.000000,1.485600,86400.000000 +-117.110000,32.690000,39.000000,395.000000,159.000000,620.000000,162.000000,2.725000,86500.000000 +-117.110000,32.690000,37.000000,2395.000000,627.000000,2489.000000,599.000000,1.593300,86300.000000 +-117.110000,32.690000,36.000000,1421.000000,367.000000,1418.000000,355.000000,1.942500,93400.000000 +-117.110000,32.690000,34.000000,1144.000000,295.000000,1271.000000,302.000000,2.090000,91800.000000 +-117.110000,32.680000,36.000000,26.000000,14.000000,58.000000,23.000000,1.910700,125000.000000 +-117.110000,32.670000,52.000000,280.000000,71.000000,217.000000,71.000000,1.484400,83300.000000 +-117.110000,32.670000,52.000000,204.000000,74.000000,248.000000,57.000000,1.796100,47500.000000 +-117.110000,32.670000,46.000000,928.000000,236.000000,790.000000,235.000000,1.680600,92500.000000 +-117.110000,32.670000,43.000000,515.000000,146.000000,445.000000,140.000000,1.609400,93000.000000 +-117.110000,32.640000,23.000000,1619.000000,447.000000,1025.000000,415.000000,1.858000,67500.000000 +-117.110000,32.620000,27.000000,1846.000000,509.000000,1078.000000,482.000000,2.171900,131500.000000 +-117.110000,32.590000,18.000000,2329.000000,580.000000,1538.000000,567.000000,2.117900,153100.000000 +-117.110000,32.590000,17.000000,2020.000000,534.000000,1529.000000,500.000000,2.177300,143200.000000 +-117.110000,32.580000,28.000000,1869.000000,407.000000,1074.000000,344.000000,2.598800,135600.000000 +-117.110000,32.580000,21.000000,2894.000000,685.000000,2109.000000,712.000000,2.275500,125000.000000 +-117.120000,34.460000,17.000000,1613.000000,326.000000,765.000000,300.000000,2.682700,110400.000000 +-117.120000,34.060000,38.000000,281.000000,55.000000,151.000000,52.000000,1.390600,120800.000000 +-117.120000,34.040000,25.000000,2495.000000,438.000000,1071.000000,405.000000,4.817300,146600.000000 +-117.120000,33.520000,4.000000,30401.000000,4957.000000,13251.000000,4339.000000,4.584100,212300.000000 +-117.120000,33.270000,11.000000,3016.000000,601.000000,1727.000000,541.000000,4.937500,232800.000000 +-117.120000,33.150000,7.000000,2810.000000,464.000000,1564.000000,457.000000,4.465500,182800.000000 +-117.120000,33.140000,16.000000,1710.000000,272.000000,1025.000000,267.000000,4.164100,163600.000000 +-117.120000,33.140000,12.000000,2363.000000,408.000000,1211.000000,396.000000,3.896700,172600.000000 +-117.120000,33.070000,45.000000,1032.000000,235.000000,363.000000,177.000000,3.638900,186600.000000 +-117.120000,32.960000,16.000000,3050.000000,559.000000,1444.000000,512.000000,5.246300,156300.000000 +-117.120000,32.960000,15.000000,2869.000000,405.000000,1526.000000,402.000000,6.017500,238300.000000 +-117.120000,32.950000,8.000000,3670.000000,536.000000,1723.000000,592.000000,6.354200,218100.000000 +-117.120000,32.950000,4.000000,9018.000000,1572.000000,4438.000000,1498.000000,4.988000,263700.000000 +-117.120000,32.930000,7.000000,1427.000000,243.000000,927.000000,239.000000,5.362500,218900.000000 +-117.120000,32.910000,8.000000,3405.000000,961.000000,1742.000000,918.000000,2.872800,114600.000000 +-117.120000,32.900000,14.000000,3249.000000,937.000000,1929.000000,838.000000,2.858800,92500.000000 +-117.120000,32.900000,13.000000,1743.000000,363.000000,854.000000,353.000000,4.666700,138200.000000 +-117.120000,32.800000,31.000000,1727.000000,342.000000,879.000000,345.000000,3.812500,166300.000000 +-117.120000,32.800000,29.000000,2863.000000,534.000000,1392.000000,522.000000,3.871900,174200.000000 +-117.120000,32.780000,4.000000,2782.000000,817.000000,1309.000000,787.000000,4.262100,124200.000000 +-117.120000,32.770000,48.000000,2012.000000,422.000000,893.000000,394.000000,2.792800,175000.000000 +-117.120000,32.770000,43.000000,2167.000000,464.000000,977.000000,461.000000,3.125000,192200.000000 +-117.120000,32.760000,33.000000,2279.000000,591.000000,1250.000000,576.000000,2.429700,139000.000000 +-117.120000,32.760000,28.000000,2160.000000,608.000000,1339.000000,571.000000,1.915200,128100.000000 +-117.120000,32.760000,28.000000,1605.000000,501.000000,936.000000,460.000000,2.599100,147500.000000 +-117.120000,32.760000,27.000000,1426.000000,364.000000,792.000000,353.000000,2.067300,118800.000000 +-117.120000,32.760000,23.000000,2681.000000,717.000000,1279.000000,648.000000,2.159700,116100.000000 +-117.120000,32.760000,17.000000,1559.000000,462.000000,821.000000,428.000000,2.013900,150000.000000 +-117.120000,32.750000,37.000000,2344.000000,546.000000,1134.000000,513.000000,2.439400,118300.000000 +-117.120000,32.750000,20.000000,1406.000000,413.000000,850.000000,412.000000,2.326100,114600.000000 +-117.120000,32.750000,17.000000,2060.000000,633.000000,1251.000000,602.000000,1.988600,119200.000000 +-117.120000,32.750000,15.000000,2671.000000,724.000000,1800.000000,646.000000,2.139400,106700.000000 +-117.120000,32.750000,13.000000,2795.000000,773.000000,1869.000000,690.000000,2.176700,101800.000000 +-117.120000,32.740000,52.000000,1969.000000,389.000000,877.000000,424.000000,3.790000,163400.000000 +-117.120000,32.740000,46.000000,1898.000000,441.000000,978.000000,439.000000,3.270800,155200.000000 +-117.120000,32.730000,50.000000,2307.000000,424.000000,887.000000,356.000000,3.515600,168800.000000 +-117.120000,32.720000,36.000000,6096.000000,1285.000000,3093.000000,1229.000000,3.370000,159100.000000 +-117.120000,32.710000,33.000000,1256.000000,331.000000,1315.000000,321.000000,1.928600,78500.000000 +-117.120000,32.710000,24.000000,421.000000,101.000000,396.000000,113.000000,0.643300,111300.000000 +-117.120000,32.700000,38.000000,818.000000,217.000000,953.000000,231.000000,1.053100,65700.000000 +-117.120000,32.700000,37.000000,1361.000000,348.000000,1398.000000,328.000000,1.168100,78100.000000 +-117.120000,32.700000,36.000000,1011.000000,253.000000,763.000000,226.000000,1.818700,84100.000000 +-117.120000,32.700000,14.000000,819.000000,237.000000,827.000000,237.000000,1.319400,90500.000000 +-117.120000,32.690000,46.000000,200.000000,77.000000,180.000000,65.000000,1.065800,93800.000000 +-117.120000,32.690000,37.000000,1082.000000,294.000000,1146.000000,265.000000,2.067300,88500.000000 +-117.120000,32.580000,34.000000,2003.000000,466.000000,1226.000000,443.000000,3.061300,136700.000000 +-117.120000,32.580000,26.000000,1360.000000,309.000000,869.000000,328.000000,3.021700,131600.000000 +-117.120000,32.570000,21.000000,1738.000000,295.000000,983.000000,298.000000,4.827400,174100.000000 +-117.120000,32.560000,20.000000,2524.000000,682.000000,1819.000000,560.000000,2.928600,257700.000000 +-117.130000,34.880000,21.000000,3254.000000,669.000000,1548.000000,545.000000,2.337300,57100.000000 +-117.130000,34.390000,29.000000,2251.000000,464.000000,855.000000,315.000000,3.418300,104100.000000 +-117.130000,34.240000,17.000000,2828.000000,506.000000,673.000000,274.000000,5.256300,144100.000000 +-117.130000,34.170000,17.000000,1181.000000,271.000000,248.000000,114.000000,5.576200,150000.000000 +-117.130000,34.060000,4.000000,3078.000000,510.000000,1341.000000,486.000000,4.968800,163200.000000 +-117.130000,33.890000,4.000000,1611.000000,239.000000,275.000000,84.000000,3.578100,244400.000000 +-117.130000,33.150000,15.000000,2241.000000,381.000000,997.000000,390.000000,3.483300,193200.000000 +-117.130000,33.140000,5.000000,2618.000000,539.000000,1320.000000,512.000000,4.105300,171400.000000 +-117.130000,33.140000,16.000000,1649.000000,278.000000,993.000000,277.000000,4.852600,170700.000000 +-117.130000,33.140000,12.000000,2258.000000,456.000000,1147.000000,433.000000,4.049500,153900.000000 +-117.130000,33.130000,17.000000,3164.000000,652.000000,1123.000000,699.000000,2.082000,80000.000000 +-117.130000,32.970000,10.000000,3486.000000,469.000000,1700.000000,483.000000,6.469600,249500.000000 +-117.130000,32.960000,15.000000,2267.000000,292.000000,1180.000000,289.000000,6.712000,240200.000000 +-117.130000,32.940000,15.000000,4846.000000,825.000000,2797.000000,823.000000,4.937500,180400.000000 +-117.130000,32.930000,16.000000,2918.000000,444.000000,1697.000000,444.000000,5.306200,195500.000000 +-117.130000,32.920000,17.000000,1481.000000,315.000000,1002.000000,300.000000,3.619600,163400.000000 +-117.130000,32.920000,16.000000,2173.000000,399.000000,1460.000000,393.000000,4.261400,169600.000000 +-117.130000,32.920000,16.000000,1580.000000,241.000000,917.000000,261.000000,4.726600,191100.000000 +-117.130000,32.920000,16.000000,1565.000000,257.000000,893.000000,239.000000,5.503600,192300.000000 +-117.130000,32.910000,16.000000,3230.000000,579.000000,1825.000000,576.000000,4.296900,151200.000000 +-117.130000,32.910000,15.000000,1450.000000,266.000000,747.000000,290.000000,3.611100,196300.000000 +-117.130000,32.900000,15.000000,2785.000000,644.000000,1798.000000,630.000000,3.715600,175200.000000 +-117.130000,32.810000,26.000000,2119.000000,444.000000,1202.000000,440.000000,3.230800,166500.000000 +-117.130000,32.810000,19.000000,2157.000000,554.000000,1349.000000,535.000000,2.765200,177400.000000 +-117.130000,32.800000,35.000000,2129.000000,382.000000,1044.000000,350.000000,3.973200,174900.000000 +-117.130000,32.800000,33.000000,2731.000000,456.000000,1263.000000,445.000000,4.556800,175300.000000 +-117.130000,32.800000,15.000000,1606.000000,375.000000,784.000000,342.000000,3.723700,108300.000000 +-117.130000,32.790000,35.000000,1458.000000,262.000000,723.000000,257.000000,4.209800,174100.000000 +-117.130000,32.770000,30.000000,2582.000000,650.000000,1098.000000,603.000000,2.828100,171700.000000 +-117.130000,32.760000,33.000000,1591.000000,461.000000,794.000000,425.000000,2.633300,140000.000000 +-117.130000,32.760000,29.000000,2568.000000,682.000000,1191.000000,642.000000,2.109400,162500.000000 +-117.130000,32.760000,27.000000,2280.000000,695.000000,1235.000000,664.000000,1.939200,142900.000000 +-117.130000,32.760000,22.000000,2623.000000,732.000000,1283.000000,718.000000,2.156300,127100.000000 +-117.130000,32.750000,50.000000,1476.000000,354.000000,698.000000,354.000000,3.000000,168800.000000 +-117.130000,32.750000,37.000000,4142.000000,1031.000000,1936.000000,968.000000,2.693000,174100.000000 +-117.130000,32.750000,31.000000,2336.000000,656.000000,1186.000000,609.000000,2.587200,130600.000000 +-117.130000,32.750000,28.000000,2279.000000,671.000000,1166.000000,623.000000,1.950000,150000.000000 +-117.130000,32.750000,24.000000,1877.000000,519.000000,898.000000,483.000000,2.226400,112500.000000 +-117.130000,32.750000,23.000000,3999.000000,1182.000000,2051.000000,1130.000000,2.129200,135000.000000 +-117.130000,32.750000,20.000000,2271.000000,602.000000,992.000000,520.000000,2.259900,157600.000000 +-117.130000,32.740000,52.000000,1512.000000,321.000000,651.000000,321.000000,3.685200,185300.000000 +-117.130000,32.740000,50.000000,1527.000000,338.000000,728.000000,322.000000,2.625000,203200.000000 +-117.130000,32.740000,46.000000,3355.000000,768.000000,1457.000000,708.000000,2.660400,170100.000000 +-117.130000,32.730000,52.000000,2676.000000,557.000000,1181.000000,537.000000,3.605800,213100.000000 +-117.130000,32.730000,52.000000,1911.000000,415.000000,777.000000,412.000000,2.242900,221100.000000 +-117.130000,32.730000,52.000000,1148.000000,214.000000,481.000000,215.000000,5.454000,240900.000000 +-117.130000,32.730000,43.000000,2706.000000,667.000000,1531.000000,614.000000,2.151300,145000.000000 +-117.130000,32.720000,9.000000,2436.000000,720.000000,1780.000000,653.000000,1.829900,137500.000000 +-117.130000,32.720000,52.000000,1560.000000,307.000000,757.000000,315.000000,2.708300,199100.000000 +-117.130000,32.720000,43.000000,2160.000000,504.000000,1221.000000,452.000000,2.482100,140600.000000 +-117.130000,32.720000,32.000000,2197.000000,623.000000,1784.000000,599.000000,1.901000,120300.000000 +-117.130000,32.720000,19.000000,1341.000000,435.000000,1048.000000,360.000000,1.975000,117900.000000 +-117.130000,32.720000,17.000000,1285.000000,423.000000,1208.000000,409.000000,1.758000,126600.000000 +-117.130000,32.710000,44.000000,1697.000000,413.000000,1396.000000,363.000000,1.547400,83300.000000 +-117.130000,32.710000,42.000000,1145.000000,314.000000,1114.000000,307.000000,1.261400,87500.000000 +-117.130000,32.710000,38.000000,993.000000,246.000000,760.000000,205.000000,1.156300,82700.000000 +-117.130000,32.710000,37.000000,1220.000000,325.000000,1472.000000,323.000000,1.825000,81500.000000 +-117.130000,32.710000,35.000000,614.000000,180.000000,691.000000,164.000000,1.695300,81300.000000 +-117.130000,32.700000,48.000000,786.000000,230.000000,917.000000,231.000000,1.875000,75600.000000 +-117.130000,32.700000,42.000000,1210.000000,292.000000,945.000000,258.000000,0.899100,78900.000000 +-117.130000,32.700000,38.000000,1445.000000,392.000000,1286.000000,357.000000,1.463200,80200.000000 +-117.130000,32.700000,35.000000,1179.000000,344.000000,1372.000000,330.000000,1.950900,70200.000000 +-117.130000,32.630000,10.000000,7374.000000,1157.000000,1900.000000,794.000000,8.799100,478500.000000 +-117.130000,32.580000,32.000000,1870.000000,437.000000,1142.000000,426.000000,2.319400,159400.000000 +-117.130000,32.580000,27.000000,1417.000000,373.000000,814.000000,348.000000,2.360300,195300.000000 +-117.140000,34.750000,33.000000,552.000000,120.000000,347.000000,97.000000,1.815800,100000.000000 +-117.140000,34.070000,3.000000,5542.000000,828.000000,2506.000000,806.000000,5.587500,162000.000000 +-117.140000,34.050000,5.000000,2634.000000,359.000000,1173.000000,372.000000,6.746000,204100.000000 +-117.140000,34.010000,26.000000,7561.000000,1051.000000,2909.000000,1012.000000,7.297200,269600.000000 +-117.140000,33.940000,5.000000,4873.000000,639.000000,1947.000000,568.000000,6.322300,223200.000000 +-117.140000,33.390000,17.000000,2889.000000,587.000000,1931.000000,510.000000,3.854700,208300.000000 +-117.140000,33.230000,11.000000,4068.000000,829.000000,918.000000,500.000000,3.127200,281300.000000 +-117.140000,33.180000,11.000000,5546.000000,974.000000,2300.000000,970.000000,3.710900,199800.000000 +-117.140000,33.160000,16.000000,1660.000000,236.000000,733.000000,214.000000,5.687400,202700.000000 +-117.140000,33.150000,17.000000,1149.000000,182.000000,702.000000,192.000000,5.569600,168400.000000 +-117.140000,33.150000,16.000000,1129.000000,198.000000,758.000000,178.000000,5.034600,174600.000000 +-117.140000,33.120000,7.000000,6126.000000,1032.000000,2662.000000,923.000000,4.900500,264000.000000 +-117.140000,33.070000,12.000000,9302.000000,1603.000000,4074.000000,1504.000000,4.351300,199600.000000 +-117.140000,32.960000,12.000000,5949.000000,799.000000,2936.000000,781.000000,6.372100,241500.000000 +-117.140000,32.930000,16.000000,2412.000000,419.000000,1612.000000,422.000000,4.508600,171100.000000 +-117.140000,32.930000,14.000000,1946.000000,463.000000,1205.000000,390.000000,4.210900,171200.000000 +-117.140000,32.930000,12.000000,1474.000000,364.000000,1009.000000,372.000000,4.052100,166700.000000 +-117.140000,32.920000,7.000000,1308.000000,418.000000,766.000000,390.000000,3.215100,106300.000000 +-117.140000,32.920000,6.000000,3069.000000,750.000000,1541.000000,736.000000,3.814000,132500.000000 +-117.140000,32.920000,15.000000,3242.000000,595.000000,1936.000000,593.000000,4.970600,184700.000000 +-117.140000,32.910000,14.000000,3014.000000,710.000000,2165.000000,705.000000,3.783700,160300.000000 +-117.140000,32.830000,25.000000,2161.000000,462.000000,896.000000,468.000000,2.228400,177500.000000 +-117.140000,32.810000,34.000000,1748.000000,294.000000,800.000000,294.000000,4.488600,179100.000000 +-117.140000,32.800000,41.000000,2423.000000,469.000000,1813.000000,466.000000,2.115700,156900.000000 +-117.140000,32.800000,35.000000,1267.000000,212.000000,710.000000,204.000000,2.536800,169600.000000 +-117.140000,32.790000,35.000000,3578.000000,582.000000,1568.000000,553.000000,4.781300,188600.000000 +-117.140000,32.790000,31.000000,984.000000,161.000000,422.000000,158.000000,5.282000,183000.000000 +-117.140000,32.760000,35.000000,1785.000000,493.000000,965.000000,506.000000,2.079200,160000.000000 +-117.140000,32.760000,32.000000,2587.000000,681.000000,1246.000000,650.000000,2.172700,145500.000000 +-117.140000,32.760000,28.000000,3025.000000,756.000000,1328.000000,695.000000,2.694000,164100.000000 +-117.140000,32.760000,24.000000,3523.000000,991.000000,1775.000000,873.000000,2.127300,142300.000000 +-117.140000,32.750000,37.000000,1832.000000,525.000000,955.000000,488.000000,2.785200,129200.000000 +-117.140000,32.750000,35.000000,1391.000000,329.000000,726.000000,317.000000,2.681800,159400.000000 +-117.140000,32.750000,29.000000,1961.000000,565.000000,1002.000000,569.000000,2.281300,118100.000000 +-117.140000,32.750000,27.000000,1551.000000,464.000000,880.000000,400.000000,2.416700,131300.000000 +-117.140000,32.740000,47.000000,1494.000000,327.000000,689.000000,304.000000,3.125000,172700.000000 +-117.140000,32.740000,16.000000,6075.000000,1816.000000,2592.000000,1634.000000,2.555300,178100.000000 +-117.140000,32.730000,26.000000,450.000000,132.000000,317.000000,109.000000,4.000000,137500.000000 +-117.140000,32.720000,43.000000,1073.000000,344.000000,660.000000,279.000000,2.052900,168800.000000 +-117.140000,32.720000,42.000000,1558.000000,458.000000,1227.000000,407.000000,2.280400,139100.000000 +-117.140000,32.720000,34.000000,2533.000000,862.000000,2011.000000,778.000000,2.119900,160400.000000 +-117.140000,32.710000,52.000000,979.000000,314.000000,975.000000,297.000000,1.237500,100000.000000 +-117.140000,32.710000,52.000000,800.000000,313.000000,1337.000000,282.000000,1.559400,87500.000000 +-117.140000,32.710000,52.000000,1225.000000,332.000000,955.000000,321.000000,1.601100,106300.000000 +-117.140000,32.710000,43.000000,966.000000,255.000000,857.000000,208.000000,1.284100,72000.000000 +-117.140000,32.710000,39.000000,1647.000000,478.000000,2176.000000,479.000000,1.764200,82900.000000 +-117.140000,32.710000,34.000000,1694.000000,455.000000,1467.000000,425.000000,2.116400,139400.000000 +-117.140000,32.700000,48.000000,510.000000,180.000000,545.000000,132.000000,1.800800,86500.000000 +-117.140000,32.700000,44.000000,658.000000,218.000000,869.000000,212.000000,1.933800,89400.000000 +-117.140000,32.700000,43.000000,1126.000000,289.000000,1132.000000,294.000000,2.187500,87000.000000 +-117.140000,32.700000,40.000000,1227.000000,330.000000,1199.000000,316.000000,1.218800,92500.000000 +-117.140000,32.700000,36.000000,633.000000,148.000000,557.000000,139.000000,1.572900,82700.000000 +-117.140000,32.700000,32.000000,1280.000000,353.000000,1335.000000,330.000000,1.602300,77300.000000 +-117.150000,34.830000,30.000000,5370.000000,1062.000000,2778.000000,944.000000,3.099000,66800.000000 +-117.150000,34.480000,31.000000,265.000000,55.000000,186.000000,55.000000,2.125000,64800.000000 +-117.150000,34.220000,10.000000,1039.000000,174.000000,317.000000,109.000000,7.237100,171900.000000 +-117.150000,34.070000,15.000000,1852.000000,316.000000,906.000000,298.000000,5.352600,129800.000000 +-117.150000,34.060000,25.000000,3670.000000,644.000000,1815.000000,634.000000,4.065800,127400.000000 +-117.150000,34.050000,9.000000,1442.000000,219.000000,633.000000,230.000000,5.022700,162300.000000 +-117.150000,34.030000,32.000000,2832.000000,393.000000,1033.000000,385.000000,6.564800,237200.000000 +-117.150000,34.030000,26.000000,5305.000000,701.000000,1818.000000,676.000000,6.146100,217100.000000 +-117.150000,33.700000,2.000000,6305.000000,1265.000000,2489.000000,1152.000000,3.131900,111500.000000 +-117.150000,33.450000,4.000000,9089.000000,1413.000000,3886.000000,1243.000000,4.690400,174200.000000 +-117.150000,33.200000,16.000000,2690.000000,459.000000,1253.000000,393.000000,4.032800,294600.000000 +-117.150000,33.160000,5.000000,4750.000000,962.000000,2726.000000,905.000000,3.583900,158500.000000 +-117.150000,33.140000,15.000000,1070.000000,208.000000,470.000000,217.000000,2.306200,158900.000000 +-117.150000,33.020000,4.000000,15029.000000,2279.000000,5613.000000,1696.000000,7.273100,450400.000000 +-117.150000,32.920000,16.000000,2969.000000,514.000000,1594.000000,465.000000,4.522100,168300.000000 +-117.150000,32.920000,16.000000,1972.000000,402.000000,1377.000000,413.000000,4.461500,168300.000000 +-117.150000,32.910000,15.000000,1613.000000,303.000000,702.000000,240.000000,4.875000,169300.000000 +-117.150000,32.910000,10.000000,2349.000000,431.000000,1598.000000,435.000000,4.822900,183200.000000 +-117.150000,32.800000,41.000000,1413.000000,261.000000,1070.000000,259.000000,2.357800,166700.000000 +-117.150000,32.800000,23.000000,2395.000000,476.000000,2284.000000,488.000000,3.729200,146300.000000 +-117.150000,32.780000,25.000000,1577.000000,266.000000,611.000000,284.000000,5.250000,205100.000000 +-117.150000,32.770000,16.000000,2056.000000,631.000000,847.000000,569.000000,2.957600,92200.000000 +-117.150000,32.760000,43.000000,2361.000000,489.000000,824.000000,470.000000,3.419600,302200.000000 +-117.150000,32.760000,40.000000,1809.000000,474.000000,826.000000,456.000000,2.651800,179800.000000 +-117.150000,32.760000,37.000000,1921.000000,502.000000,811.000000,472.000000,2.750000,175000.000000 +-117.150000,32.760000,36.000000,2644.000000,674.000000,1211.000000,654.000000,3.044500,214800.000000 +-117.150000,32.750000,9.000000,2818.000000,821.000000,851.000000,555.000000,2.618100,204200.000000 +-117.150000,32.750000,27.000000,3166.000000,867.000000,1332.000000,817.000000,2.674200,171400.000000 +-117.150000,32.740000,43.000000,2383.000000,607.000000,962.000000,587.000000,2.257800,263600.000000 +-117.150000,32.720000,52.000000,344.000000,177.000000,460.000000,147.000000,1.229200,137500.000000 +-117.150000,32.720000,51.000000,1321.000000,550.000000,781.000000,499.000000,1.307100,250000.000000 +-117.150000,32.710000,52.000000,402.000000,183.000000,557.000000,172.000000,1.312500,87500.000000 +-117.150000,32.710000,52.000000,217.000000,82.000000,531.000000,93.000000,1.660700,137500.000000 +-117.150000,32.700000,52.000000,458.000000,148.000000,1283.000000,166.000000,1.286300,86300.000000 +-117.150000,32.700000,50.000000,475.000000,172.000000,483.000000,120.000000,1.365700,162500.000000 +-117.160000,34.900000,16.000000,1579.000000,327.000000,934.000000,298.000000,2.730500,73800.000000 +-117.160000,34.260000,27.000000,9285.000000,1621.000000,1135.000000,410.000000,2.544600,135200.000000 +-117.160000,34.080000,9.000000,5306.000000,993.000000,2630.000000,925.000000,4.510000,135800.000000 +-117.160000,34.060000,17.000000,2285.000000,554.000000,1412.000000,541.000000,1.815200,94300.000000 +-117.160000,34.050000,23.000000,3215.000000,462.000000,1411.000000,435.000000,6.070100,149900.000000 +-117.160000,33.610000,3.000000,2744.000000,428.000000,1223.000000,366.000000,4.794400,215300.000000 +-117.160000,33.570000,2.000000,20391.000000,3245.000000,7132.000000,2716.000000,3.944300,187300.000000 +-117.160000,33.540000,4.000000,4952.000000,1000.000000,2912.000000,943.000000,3.753800,147500.000000 +-117.160000,33.060000,16.000000,1988.000000,279.000000,770.000000,252.000000,5.866100,404500.000000 +-117.160000,32.910000,5.000000,1619.000000,272.000000,1063.000000,296.000000,6.089100,214600.000000 +-117.160000,32.890000,5.000000,8576.000000,1952.000000,5006.000000,1827.000000,4.359800,189100.000000 +-117.160000,32.820000,28.000000,2291.000000,371.000000,1098.000000,382.000000,4.687500,188000.000000 +-117.160000,32.810000,34.000000,2275.000000,375.000000,1021.000000,379.000000,3.637100,176300.000000 +-117.160000,32.810000,14.000000,4328.000000,1100.000000,2046.000000,1044.000000,2.289900,159000.000000 +-117.160000,32.800000,37.000000,422.000000,79.000000,211.000000,80.000000,3.062500,159700.000000 +-117.160000,32.800000,25.000000,1399.000000,329.000000,1308.000000,355.000000,2.568200,187500.000000 +-117.160000,32.800000,22.000000,2259.000000,634.000000,1213.000000,601.000000,2.500000,177800.000000 +-117.160000,32.790000,32.000000,1731.000000,413.000000,1569.000000,427.000000,3.337500,154300.000000 +-117.160000,32.780000,24.000000,3566.000000,765.000000,1697.000000,722.000000,3.637500,178600.000000 +-117.160000,32.750000,49.000000,1566.000000,494.000000,643.000000,419.000000,1.963700,137500.000000 +-117.160000,32.750000,34.000000,1785.000000,558.000000,804.000000,490.000000,2.268700,200000.000000 +-117.160000,32.750000,23.000000,2474.000000,594.000000,1107.000000,536.000000,2.970500,245500.000000 +-117.160000,32.750000,19.000000,5430.000000,1593.000000,2496.000000,1484.000000,2.911200,199100.000000 +-117.160000,32.740000,52.000000,852.000000,262.000000,389.000000,249.000000,2.604200,225000.000000 +-117.160000,32.740000,49.000000,1815.000000,495.000000,601.000000,410.000000,3.057100,418800.000000 +-117.160000,32.740000,43.000000,1437.000000,406.000000,692.000000,379.000000,3.197900,466700.000000 +-117.160000,32.740000,27.000000,2335.000000,604.000000,982.000000,590.000000,3.192100,261500.000000 +-117.160000,32.740000,21.000000,1882.000000,486.000000,903.000000,482.000000,3.060000,243800.000000 +-117.160000,32.730000,52.000000,1682.000000,617.000000,873.000000,534.000000,2.097200,112500.000000 +-117.160000,32.730000,52.000000,1218.000000,471.000000,821.000000,429.000000,1.959700,200000.000000 +-117.160000,32.720000,27.000000,1245.000000,471.000000,653.000000,451.000000,1.266800,225000.000000 +-117.160000,32.720000,24.000000,1232.000000,663.000000,1184.000000,626.000000,1.039100,162500.000000 +-117.160000,32.710000,5.000000,2508.000000,827.000000,2066.000000,761.000000,1.309200,325000.000000 +-117.160000,32.580000,36.000000,1940.000000,399.000000,1076.000000,382.000000,3.390600,147800.000000 +-117.170000,34.510000,15.000000,5151.000000,942.000000,2896.000000,897.000000,3.487500,90800.000000 +-117.170000,34.490000,13.000000,4460.000000,925.000000,2225.000000,840.000000,2.013600,94100.000000 +-117.170000,34.250000,15.000000,4236.000000,753.000000,703.000000,255.000000,3.562500,165500.000000 +-117.170000,34.120000,3.000000,15695.000000,2248.000000,6080.000000,1920.000000,6.217800,173900.000000 +-117.170000,34.120000,2.000000,3867.000000,573.000000,1275.000000,433.000000,5.413800,164400.000000 +-117.170000,34.080000,5.000000,1473.000000,228.000000,842.000000,257.000000,4.875000,138100.000000 +-117.170000,34.070000,24.000000,6573.000000,1235.000000,2904.000000,1202.000000,3.065100,108000.000000 +-117.170000,34.050000,29.000000,4007.000000,700.000000,1576.000000,696.000000,3.180100,149300.000000 +-117.170000,34.050000,24.000000,2877.000000,507.000000,1141.000000,474.000000,4.205900,121500.000000 +-117.170000,34.030000,33.000000,4583.000000,648.000000,1760.000000,638.000000,6.330800,230600.000000 +-117.170000,33.830000,7.000000,77.000000,12.000000,64.000000,15.000000,4.600000,187500.000000 +-117.170000,33.660000,2.000000,7401.000000,1187.000000,2826.000000,839.000000,4.138600,177300.000000 +-117.170000,33.340000,15.000000,3313.000000,679.000000,1022.000000,564.000000,2.798600,189900.000000 +-117.170000,33.180000,25.000000,596.000000,115.000000,426.000000,137.000000,3.022100,214300.000000 +-117.170000,32.830000,24.000000,3541.000000,530.000000,1591.000000,530.000000,5.353800,212500.000000 +-117.170000,32.820000,24.000000,1623.000000,417.000000,911.000000,397.000000,2.740100,198100.000000 +-117.170000,32.820000,24.000000,1569.000000,377.000000,715.000000,321.000000,3.114600,187500.000000 +-117.170000,32.810000,33.000000,3064.000000,506.000000,1355.000000,488.000000,4.220000,178700.000000 +-117.170000,32.810000,26.000000,2424.000000,388.000000,974.000000,375.000000,4.739000,184100.000000 +-117.170000,32.800000,20.000000,2827.000000,554.000000,1822.000000,536.000000,3.470600,157600.000000 +-117.170000,32.790000,44.000000,2262.000000,647.000000,3009.000000,657.000000,2.266300,123600.000000 +-117.170000,32.790000,43.000000,1269.000000,297.000000,946.000000,285.000000,2.144700,133300.000000 +-117.170000,32.780000,42.000000,1104.000000,305.000000,892.000000,270.000000,2.276800,145200.000000 +-117.170000,32.780000,17.000000,3845.000000,1051.000000,3102.000000,944.000000,2.365800,164100.000000 +-117.170000,32.770000,6.000000,3856.000000,875.000000,1547.000000,816.000000,4.548100,164800.000000 +-117.170000,32.770000,35.000000,1399.000000,274.000000,695.000000,281.000000,3.767000,166800.000000 +-117.170000,32.750000,52.000000,1052.000000,239.000000,381.000000,201.000000,3.072600,289600.000000 +-117.170000,32.750000,38.000000,5430.000000,1176.000000,2357.000000,1100.000000,3.654000,249000.000000 +-117.170000,32.750000,28.000000,1514.000000,384.000000,540.000000,352.000000,2.153200,240000.000000 +-117.170000,32.740000,39.000000,3803.000000,806.000000,1567.000000,775.000000,3.703900,361500.000000 +-117.170000,32.740000,38.000000,5054.000000,1168.000000,2366.000000,1103.000000,2.942200,289400.000000 +-117.170000,32.730000,52.000000,55.000000,18.000000,65.000000,22.000000,1.659100,112500.000000 +-117.170000,32.730000,52.000000,408.000000,143.000000,313.000000,143.000000,1.815000,116700.000000 +-117.170000,32.730000,52.000000,1578.000000,487.000000,879.000000,446.000000,2.406900,215000.000000 +-117.170000,32.720000,44.000000,626.000000,256.000000,572.000000,229.000000,1.590900,262500.000000 +-117.170000,32.710000,7.000000,2493.000000,693.000000,951.000000,641.000000,4.237500,205000.000000 +-117.170000,32.710000,39.000000,311.000000,181.000000,206.000000,113.000000,0.768500,187500.000000 +-117.170000,32.700000,33.000000,4084.000000,897.000000,1804.000000,833.000000,4.048800,409700.000000 +-117.170000,32.690000,45.000000,3168.000000,598.000000,1341.000000,562.000000,4.518900,422200.000000 +-117.170000,32.690000,40.000000,2236.000000,331.000000,767.000000,316.000000,5.317700,500001.000000 +-117.170000,32.690000,19.000000,2802.000000,802.000000,1159.000000,597.000000,4.789100,334600.000000 +-117.170000,32.680000,16.000000,5895.000000,1424.000000,873.000000,522.000000,7.366900,187500.000000 +-117.170000,32.630000,26.000000,1617.000000,279.000000,2745.000000,250.000000,3.535700,67500.000000 +-117.180000,34.540000,5.000000,3772.000000,619.000000,2097.000000,635.000000,3.819400,98500.000000 +-117.180000,34.300000,33.000000,399.000000,87.000000,71.000000,27.000000,1.875000,71300.000000 +-117.180000,34.080000,28.000000,2243.000000,399.000000,1464.000000,379.000000,3.210500,90300.000000 +-117.180000,34.070000,7.000000,1347.000000,301.000000,799.000000,276.000000,2.948500,112500.000000 +-117.180000,34.070000,28.000000,1306.000000,279.000000,885.000000,255.000000,2.115400,75300.000000 +-117.180000,34.070000,14.000000,1258.000000,245.000000,752.000000,264.000000,3.392400,97400.000000 +-117.180000,34.060000,28.000000,699.000000,180.000000,432.000000,168.000000,2.187500,81900.000000 +-117.180000,34.060000,26.000000,1953.000000,446.000000,1284.000000,414.000000,1.348500,85100.000000 +-117.180000,34.050000,52.000000,1820.000000,342.000000,601.000000,315.000000,2.612900,137000.000000 +-117.180000,34.050000,29.000000,3436.000000,731.000000,1323.000000,676.000000,2.494300,122300.000000 +-117.180000,34.040000,38.000000,2492.000000,381.000000,1003.000000,369.000000,3.687500,152800.000000 +-117.180000,33.780000,7.000000,1697.000000,424.000000,808.000000,354.000000,1.341700,169300.000000 +-117.180000,33.510000,13.000000,270.000000,42.000000,120.000000,42.000000,6.993000,500001.000000 +-117.180000,33.160000,15.000000,5923.000000,1206.000000,3943.000000,1006.000000,3.179300,159900.000000 +-117.180000,33.150000,7.000000,6225.000000,1683.000000,5410.000000,1580.000000,2.320000,117500.000000 +-117.180000,33.110000,16.000000,3470.000000,601.000000,1197.000000,552.000000,5.181400,279900.000000 +-117.180000,32.950000,4.000000,19001.000000,2688.000000,8980.000000,2441.000000,6.323700,260900.000000 +-117.180000,32.840000,31.000000,3064.000000,575.000000,1476.000000,549.000000,3.666700,175900.000000 +-117.180000,32.840000,30.000000,2290.000000,523.000000,1272.000000,472.000000,3.560600,165100.000000 +-117.180000,32.830000,31.000000,1772.000000,353.000000,1090.000000,350.000000,3.926500,162000.000000 +-117.180000,32.830000,27.000000,2346.000000,399.000000,1105.000000,373.000000,4.270800,182800.000000 +-117.180000,32.820000,25.000000,1756.000000,301.000000,722.000000,312.000000,4.562500,162300.000000 +-117.180000,32.810000,28.000000,3436.000000,537.000000,1503.000000,498.000000,4.767900,204000.000000 +-117.180000,32.810000,19.000000,6823.000000,1509.000000,3784.000000,1509.000000,3.103200,179500.000000 +-117.180000,32.800000,30.000000,2456.000000,390.000000,1022.000000,393.000000,3.854200,198500.000000 +-117.180000,32.800000,10.000000,3821.000000,631.000000,1605.000000,609.000000,5.545400,217100.000000 +-117.180000,32.790000,30.000000,5201.000000,1104.000000,2961.000000,1064.000000,3.266100,140400.000000 +-117.180000,32.780000,21.000000,4185.000000,1018.000000,3122.000000,993.000000,3.048100,210000.000000 +-117.180000,32.770000,23.000000,1215.000000,225.000000,592.000000,224.000000,3.400000,200600.000000 +-117.180000,32.770000,16.000000,2374.000000,780.000000,913.000000,705.000000,2.738600,87500.000000 +-117.180000,32.760000,8.000000,3694.000000,997.000000,1297.000000,807.000000,3.649200,158900.000000 +-117.180000,32.760000,17.000000,711.000000,254.000000,327.000000,227.000000,2.649300,67500.000000 +-117.180000,32.750000,52.000000,1539.000000,212.000000,535.000000,224.000000,5.392000,408500.000000 +-117.180000,32.750000,52.000000,1504.000000,208.000000,518.000000,196.000000,8.603000,459600.000000 +-117.180000,32.750000,36.000000,2282.000000,534.000000,918.000000,531.000000,2.722200,284700.000000 +-117.180000,32.740000,39.000000,3132.000000,738.000000,1200.000000,690.000000,2.528800,274000.000000 +-117.180000,32.690000,52.000000,1837.000000,313.000000,668.000000,300.000000,5.100900,500001.000000 +-117.180000,32.690000,48.000000,2764.000000,491.000000,978.000000,449.000000,5.124900,432400.000000 +-117.180000,32.690000,44.000000,2819.000000,514.000000,1258.000000,503.000000,4.477700,452800.000000 +-117.180000,32.690000,37.000000,3112.000000,716.000000,1304.000000,674.000000,3.212100,320800.000000 +-117.190000,34.270000,16.000000,7961.000000,1147.000000,879.000000,280.000000,5.214600,255200.000000 +-117.190000,34.100000,5.000000,2167.000000,384.000000,1174.000000,358.000000,4.011400,97700.000000 +-117.190000,34.080000,5.000000,4458.000000,751.000000,2392.000000,773.000000,4.593800,126500.000000 +-117.190000,34.080000,22.000000,2467.000000,555.000000,1567.000000,494.000000,2.653600,84700.000000 +-117.190000,34.070000,40.000000,2374.000000,500.000000,1772.000000,455.000000,2.189000,72500.000000 +-117.190000,34.060000,37.000000,1467.000000,348.000000,1316.000000,339.000000,1.448000,72800.000000 +-117.190000,34.060000,21.000000,6107.000000,1559.000000,2805.000000,1444.000000,2.564300,102700.000000 +-117.190000,34.050000,52.000000,1949.000000,432.000000,767.000000,392.000000,2.514300,117600.000000 +-117.190000,34.050000,33.000000,3007.000000,498.000000,1252.000000,488.000000,3.881600,134600.000000 +-117.190000,34.050000,33.000000,1688.000000,313.000000,808.000000,298.000000,3.218800,117800.000000 +-117.190000,34.030000,36.000000,2223.000000,361.000000,942.000000,331.000000,4.680600,152400.000000 +-117.190000,34.030000,20.000000,856.000000,124.000000,395.000000,145.000000,10.863400,381800.000000 +-117.190000,33.900000,3.000000,21060.000000,3366.000000,9623.000000,2812.000000,4.189000,143000.000000 +-117.190000,33.700000,24.000000,5783.000000,1256.000000,1990.000000,1151.000000,1.901400,83500.000000 +-117.190000,33.690000,3.000000,6484.000000,1037.000000,3295.000000,1074.000000,4.588100,136400.000000 +-117.190000,33.530000,6.000000,108.000000,18.000000,43.000000,17.000000,3.475000,187500.000000 +-117.190000,33.410000,16.000000,3031.000000,554.000000,1301.000000,518.000000,4.088200,296100.000000 +-117.190000,33.340000,15.000000,3310.000000,488.000000,1104.000000,460.000000,6.100900,314400.000000 +-117.190000,33.180000,7.000000,3561.000000,722.000000,1921.000000,657.000000,4.112800,209700.000000 +-117.190000,32.860000,19.000000,3716.000000,563.000000,1788.000000,587.000000,5.211300,267400.000000 +-117.190000,32.860000,18.000000,4231.000000,728.000000,2030.000000,720.000000,6.180500,272400.000000 +-117.190000,32.850000,15.000000,2895.000000,498.000000,1164.000000,443.000000,5.102000,417500.000000 +-117.190000,32.840000,35.000000,2263.000000,427.000000,1001.000000,408.000000,3.875000,172000.000000 +-117.190000,32.840000,30.000000,2492.000000,406.000000,1250.000000,431.000000,5.527700,197100.000000 +-117.190000,32.830000,30.000000,2288.000000,448.000000,1240.000000,469.000000,4.011400,169800.000000 +-117.190000,32.820000,35.000000,2197.000000,353.000000,945.000000,357.000000,4.921900,192900.000000 +-117.190000,32.820000,35.000000,1074.000000,180.000000,442.000000,173.000000,5.253000,204000.000000 +-117.190000,32.820000,34.000000,3850.000000,608.000000,1619.000000,602.000000,5.046500,208200.000000 +-117.190000,32.810000,33.000000,5226.000000,833.000000,2221.000000,839.000000,5.149100,207000.000000 +-117.190000,32.800000,16.000000,2593.000000,794.000000,1235.000000,684.000000,3.130400,166300.000000 +-117.190000,32.790000,36.000000,1514.000000,258.000000,665.000000,278.000000,3.857100,235100.000000 +-117.190000,32.790000,35.000000,1788.000000,378.000000,777.000000,374.000000,3.371300,238400.000000 +-117.190000,32.770000,14.000000,3575.000000,992.000000,1645.000000,839.000000,2.439700,140600.000000 +-117.190000,32.760000,52.000000,1294.000000,175.000000,434.000000,180.000000,5.791400,500001.000000 +-117.190000,32.750000,52.000000,25.000000,5.000000,13.000000,5.000000,0.536000,162500.000000 +-117.190000,32.750000,52.000000,1495.000000,230.000000,459.000000,190.000000,8.154800,500001.000000 +-117.190000,32.750000,52.000000,1388.000000,213.000000,513.000000,211.000000,6.130900,411600.000000 +-117.190000,32.750000,33.000000,1115.000000,316.000000,583.000000,269.000000,2.588200,258300.000000 +-117.190000,32.690000,35.000000,2921.000000,438.000000,1042.000000,415.000000,6.361200,482700.000000 +-117.200000,34.520000,12.000000,4476.000000,761.000000,2255.000000,735.000000,3.925000,118500.000000 +-117.200000,34.500000,10.000000,4201.000000,850.000000,2378.000000,808.000000,2.178100,92200.000000 +-117.200000,34.480000,7.000000,4998.000000,953.000000,2764.000000,891.000000,3.205000,101900.000000 +-117.200000,34.460000,7.000000,8414.000000,1584.000000,5146.000000,1517.000000,3.279400,92500.000000 +-117.200000,34.260000,17.000000,9419.000000,1455.000000,1382.000000,459.000000,6.223300,230900.000000 +-117.200000,34.240000,22.000000,8106.000000,1665.000000,1062.000000,423.000000,3.043400,137200.000000 +-117.200000,34.150000,18.000000,1859.000000,251.000000,747.000000,256.000000,7.732000,173200.000000 +-117.200000,34.140000,18.000000,1920.000000,333.000000,890.000000,323.000000,5.159000,144800.000000 +-117.200000,34.140000,14.000000,2647.000000,524.000000,989.000000,479.000000,3.151300,160000.000000 +-117.200000,34.130000,14.000000,3998.000000,711.000000,1509.000000,665.000000,3.413800,126700.000000 +-117.200000,34.120000,24.000000,3532.000000,618.000000,1681.000000,590.000000,3.500000,113900.000000 +-117.200000,34.040000,24.000000,1587.000000,222.000000,676.000000,234.000000,6.071500,173400.000000 +-117.200000,34.040000,23.000000,1762.000000,267.000000,1132.000000,279.000000,5.991500,153200.000000 +-117.200000,33.830000,14.000000,1265.000000,230.000000,621.000000,173.000000,3.661800,161300.000000 +-117.200000,33.720000,8.000000,5528.000000,1073.000000,1674.000000,918.000000,2.533500,110100.000000 +-117.200000,33.720000,16.000000,5373.000000,1079.000000,1573.000000,933.000000,1.991200,98600.000000 +-117.200000,33.710000,24.000000,4210.000000,920.000000,1283.000000,829.000000,2.088100,83300.000000 +-117.200000,33.700000,23.000000,6323.000000,1196.000000,1984.000000,1124.000000,2.327600,92400.000000 +-117.200000,33.380000,14.000000,5392.000000,821.000000,2350.000000,810.000000,5.050700,291500.000000 +-117.200000,33.240000,12.000000,4992.000000,780.000000,2106.000000,801.000000,6.207900,307300.000000 +-117.200000,33.200000,16.000000,4409.000000,629.000000,1875.000000,609.000000,5.543000,286400.000000 +-117.200000,33.150000,11.000000,4091.000000,864.000000,1927.000000,765.000000,3.013900,199000.000000 +-117.200000,33.140000,19.000000,2025.000000,414.000000,1663.000000,403.000000,3.814700,139200.000000 +-117.200000,33.070000,5.000000,10394.000000,1617.000000,4496.000000,1553.000000,5.928900,411300.000000 +-117.200000,32.860000,4.000000,4308.000000,1095.000000,1923.000000,932.000000,3.935600,267000.000000 +-117.200000,32.840000,32.000000,2033.000000,394.000000,989.000000,389.000000,3.258300,181400.000000 +-117.200000,32.830000,35.000000,1377.000000,350.000000,792.000000,313.000000,2.847200,161400.000000 +-117.200000,32.820000,35.000000,1217.000000,220.000000,643.000000,237.000000,3.946400,171600.000000 +-117.200000,32.800000,34.000000,4854.000000,912.000000,2089.000000,854.000000,3.854200,200000.000000 +-117.200000,32.790000,34.000000,757.000000,212.000000,409.000000,222.000000,3.231200,192200.000000 +-117.200000,32.790000,16.000000,2079.000000,394.000000,746.000000,383.000000,5.095800,300000.000000 +-117.200000,32.780000,38.000000,2662.000000,498.000000,1132.000000,496.000000,4.005700,241600.000000 +-117.200000,32.770000,31.000000,1952.000000,471.000000,936.000000,462.000000,2.862100,196900.000000 +-117.210000,34.510000,17.000000,4379.000000,629.000000,1720.000000,595.000000,5.086000,148400.000000 +-117.210000,34.280000,16.000000,3326.000000,569.000000,527.000000,192.000000,5.742100,167600.000000 +-117.210000,34.140000,16.000000,1613.000000,245.000000,811.000000,267.000000,5.259100,140700.000000 +-117.210000,34.130000,31.000000,3037.000000,565.000000,1834.000000,575.000000,3.344500,92900.000000 +-117.210000,34.120000,32.000000,1677.000000,354.000000,1021.000000,339.000000,3.685300,90900.000000 +-117.210000,34.110000,27.000000,1245.000000,229.000000,692.000000,234.000000,3.217600,89400.000000 +-117.210000,34.110000,26.000000,1757.000000,304.000000,905.000000,281.000000,3.410300,90900.000000 +-117.210000,34.080000,5.000000,5749.000000,1385.000000,2382.000000,1088.000000,3.058700,143100.000000 +-117.210000,34.050000,4.000000,2904.000000,764.000000,1250.000000,664.000000,3.213100,137500.000000 +-117.210000,34.040000,14.000000,3063.000000,426.000000,1570.000000,419.000000,6.291700,224700.000000 +-117.210000,33.970000,3.000000,18356.000000,2537.000000,8437.000000,2342.000000,5.640900,197700.000000 +-117.210000,33.950000,5.000000,8403.000000,1240.000000,3962.000000,1150.000000,5.217400,155500.000000 +-117.210000,33.930000,4.000000,10002.000000,1468.000000,5439.000000,1397.000000,5.022300,152600.000000 +-117.210000,33.820000,2.000000,4198.000000,805.000000,1943.000000,673.000000,3.905200,122100.000000 +-117.210000,33.710000,16.000000,8476.000000,1758.000000,2711.000000,1427.000000,2.184800,97900.000000 +-117.210000,33.610000,7.000000,7722.000000,1324.000000,2975.000000,1161.000000,3.627300,150900.000000 +-117.210000,33.340000,10.000000,5294.000000,817.000000,2312.000000,810.000000,5.456300,325700.000000 +-117.210000,33.200000,22.000000,3337.000000,518.000000,1288.000000,466.000000,5.040000,253700.000000 +-117.210000,33.170000,16.000000,1787.000000,361.000000,1446.000000,362.000000,3.750000,163800.000000 +-117.210000,33.160000,13.000000,2937.000000,698.000000,1246.000000,579.000000,2.648700,196000.000000 +-117.210000,33.140000,12.000000,4839.000000,954.000000,1708.000000,952.000000,2.858600,163300.000000 +-117.210000,33.130000,19.000000,3068.000000,596.000000,912.000000,554.000000,3.775000,168000.000000 +-117.210000,33.130000,15.000000,1889.000000,368.000000,754.000000,409.000000,2.227800,132800.000000 +-117.210000,33.120000,4.000000,3261.000000,689.000000,926.000000,561.000000,4.367200,258900.000000 +-117.210000,33.030000,20.000000,3370.000000,433.000000,1020.000000,408.000000,11.091100,500001.000000 +-117.210000,33.020000,26.000000,3194.000000,454.000000,1032.000000,406.000000,10.156000,500001.000000 +-117.210000,32.960000,3.000000,6251.000000,988.000000,2330.000000,893.000000,8.435500,467600.000000 +-117.210000,32.890000,14.000000,3114.000000,773.000000,1592.000000,776.000000,3.317600,156100.000000 +-117.210000,32.870000,12.000000,1428.000000,303.000000,528.000000,269.000000,4.142900,254400.000000 +-117.210000,32.860000,26.000000,1352.000000,202.000000,654.000000,217.000000,5.369300,260700.000000 +-117.210000,32.860000,24.000000,3596.000000,494.000000,1573.000000,492.000000,6.538200,326000.000000 +-117.210000,32.860000,16.000000,2800.000000,566.000000,1267.000000,518.000000,3.279400,148600.000000 +-117.210000,32.850000,26.000000,2012.000000,315.000000,872.000000,335.000000,5.406700,277500.000000 +-117.210000,32.840000,34.000000,2158.000000,366.000000,1046.000000,335.000000,4.540200,182100.000000 +-117.210000,32.830000,36.000000,1475.000000,328.000000,806.000000,327.000000,3.507800,166000.000000 +-117.210000,32.830000,35.000000,2259.000000,501.000000,1340.000000,511.000000,3.448200,162500.000000 +-117.210000,32.830000,28.000000,3241.000000,533.000000,1334.000000,513.000000,4.180600,199600.000000 +-117.210000,32.810000,33.000000,4773.000000,873.000000,1954.000000,845.000000,4.386200,184800.000000 +-117.210000,32.810000,27.000000,1318.000000,216.000000,495.000000,191.000000,5.283700,283800.000000 +-117.210000,32.810000,26.000000,2496.000000,407.000000,1062.000000,380.000000,5.541300,302100.000000 +-117.210000,32.800000,34.000000,1398.000000,222.000000,532.000000,244.000000,3.710200,289600.000000 +-117.210000,32.750000,27.000000,2072.000000,534.000000,1118.000000,510.000000,2.804300,262100.000000 +-117.210000,32.750000,15.000000,1716.000000,702.000000,914.000000,672.000000,1.061200,300000.000000 +-117.210000,32.740000,52.000000,1245.000000,174.000000,468.000000,193.000000,6.932200,334500.000000 +-117.220000,34.540000,8.000000,12526.000000,2495.000000,6133.000000,2324.000000,2.907200,119200.000000 +-117.220000,34.480000,7.000000,2449.000000,447.000000,1217.000000,408.000000,3.664600,109900.000000 +-117.220000,34.440000,5.000000,4787.000000,910.000000,1944.000000,806.000000,2.657600,98500.000000 +-117.220000,34.130000,10.000000,5951.000000,1330.000000,3204.000000,1159.000000,2.701100,110200.000000 +-117.220000,34.120000,34.000000,2457.000000,499.000000,1538.000000,507.000000,2.809000,82500.000000 +-117.220000,34.120000,30.000000,2512.000000,597.000000,1390.000000,523.000000,2.372500,77200.000000 +-117.220000,34.110000,26.000000,2972.000000,603.000000,1972.000000,532.000000,2.038800,80400.000000 +-117.220000,34.070000,8.000000,3065.000000,692.000000,1440.000000,666.000000,3.236800,129200.000000 +-117.220000,33.930000,14.000000,5104.000000,1026.000000,3513.000000,972.000000,3.214800,117000.000000 +-117.220000,33.920000,5.000000,16884.000000,2865.000000,9509.000000,2688.000000,4.093800,130900.000000 +-117.220000,33.900000,8.000000,8302.000000,1461.000000,5155.000000,1370.000000,4.046700,121500.000000 +-117.220000,33.870000,16.000000,56.000000,7.000000,39.000000,14.000000,2.625000,500001.000000 +-117.220000,33.810000,4.000000,9911.000000,1946.000000,5145.000000,1661.000000,3.423700,113700.000000 +-117.220000,33.800000,3.000000,5284.000000,920.000000,2703.000000,729.000000,4.071700,126500.000000 +-117.220000,33.740000,7.000000,1810.000000,386.000000,931.000000,355.000000,2.522100,109200.000000 +-117.220000,33.660000,12.000000,1869.000000,356.000000,1007.000000,323.000000,3.125000,117200.000000 +-117.220000,33.480000,5.000000,1585.000000,247.000000,510.000000,181.000000,6.913600,493300.000000 +-117.220000,33.360000,16.000000,3165.000000,482.000000,1351.000000,452.000000,4.605000,263300.000000 +-117.220000,33.220000,17.000000,3675.000000,672.000000,1693.000000,597.000000,3.388200,190800.000000 +-117.220000,33.220000,15.000000,1430.000000,343.000000,704.000000,322.000000,1.957100,162500.000000 +-117.220000,33.210000,19.000000,4400.000000,828.000000,1901.000000,735.000000,3.637500,198800.000000 +-117.220000,33.200000,31.000000,1736.000000,277.000000,801.000000,292.000000,4.484400,205500.000000 +-117.220000,33.190000,16.000000,3004.000000,656.000000,1948.000000,606.000000,2.701900,216900.000000 +-117.220000,33.180000,13.000000,4273.000000,886.000000,2328.000000,801.000000,3.344400,183900.000000 +-117.220000,33.170000,6.000000,1487.000000,362.000000,810.000000,322.000000,3.625000,135700.000000 +-117.220000,33.140000,5.000000,4576.000000,848.000000,2314.000000,705.000000,5.012300,210400.000000 +-117.220000,32.870000,5.000000,3511.000000,1008.000000,1599.000000,918.000000,3.854200,176600.000000 +-117.220000,32.870000,14.000000,3512.000000,807.000000,1835.000000,792.000000,3.350000,171000.000000 +-117.220000,32.850000,26.000000,1647.000000,261.000000,694.000000,259.000000,4.687500,274400.000000 +-117.220000,32.840000,19.000000,2691.000000,347.000000,1154.000000,366.000000,8.051000,363600.000000 +-117.220000,32.830000,34.000000,2936.000000,597.000000,1512.000000,571.000000,3.784100,176900.000000 +-117.220000,32.830000,31.000000,3958.000000,727.000000,1924.000000,728.000000,5.460500,190200.000000 +-117.220000,32.830000,31.000000,2558.000000,512.000000,1164.000000,492.000000,3.431800,200400.000000 +-117.220000,32.830000,17.000000,1124.000000,187.000000,553.000000,205.000000,5.745100,237300.000000 +-117.220000,32.820000,35.000000,756.000000,135.000000,423.000000,136.000000,3.523400,183900.000000 +-117.220000,32.820000,22.000000,3738.000000,795.000000,1476.000000,728.000000,3.796300,303100.000000 +-117.220000,32.810000,24.000000,730.000000,196.000000,335.000000,203.000000,3.507800,362500.000000 +-117.220000,32.810000,21.000000,1703.000000,335.000000,902.000000,369.000000,3.781300,362500.000000 +-117.220000,32.800000,23.000000,1906.000000,525.000000,1029.000000,491.000000,2.930000,183300.000000 +-117.220000,32.750000,34.000000,6001.000000,1111.000000,2654.000000,1072.000000,4.587800,291000.000000 +-117.220000,32.750000,26.000000,696.000000,185.000000,384.000000,184.000000,2.612100,125000.000000 +-117.220000,32.750000,26.000000,617.000000,112.000000,251.000000,110.000000,3.803600,162000.000000 +-117.220000,32.750000,24.000000,3914.000000,985.000000,2147.000000,874.000000,2.973500,225000.000000 +-117.220000,32.740000,52.000000,1283.000000,173.000000,436.000000,190.000000,7.402900,345700.000000 +-117.220000,32.740000,52.000000,1260.000000,202.000000,555.000000,209.000000,7.275800,345200.000000 +-117.220000,32.740000,41.000000,2621.000000,542.000000,1074.000000,471.000000,2.401600,287500.000000 +-117.230000,34.510000,9.000000,5756.000000,807.000000,2158.000000,758.000000,5.587500,167800.000000 +-117.230000,34.490000,9.000000,4055.000000,536.000000,1458.000000,478.000000,5.420100,170600.000000 +-117.230000,34.150000,17.000000,5036.000000,817.000000,2084.000000,833.000000,4.644500,137200.000000 +-117.230000,34.140000,16.000000,2577.000000,521.000000,956.000000,472.000000,2.562500,129400.000000 +-117.230000,34.130000,10.000000,1145.000000,293.000000,726.000000,251.000000,1.645000,68700.000000 +-117.230000,34.120000,6.000000,4464.000000,1093.000000,2364.000000,952.000000,2.384800,81600.000000 +-117.230000,34.120000,18.000000,1439.000000,319.000000,699.000000,310.000000,2.107100,73500.000000 +-117.230000,34.110000,33.000000,2170.000000,500.000000,1425.000000,472.000000,2.013300,78300.000000 +-117.230000,34.110000,22.000000,1162.000000,221.000000,995.000000,244.000000,2.587500,81300.000000 +-117.230000,33.960000,5.000000,9179.000000,1361.000000,4573.000000,1294.000000,5.253000,163300.000000 +-117.230000,33.940000,8.000000,2405.000000,537.000000,1594.000000,517.000000,3.078900,114200.000000 +-117.230000,33.940000,7.000000,13195.000000,2696.000000,6763.000000,2437.000000,3.585100,142000.000000 +-117.230000,33.890000,5.000000,11775.000000,2031.000000,6686.000000,1911.000000,4.195300,136600.000000 +-117.230000,33.830000,2.000000,1424.000000,251.000000,681.000000,192.000000,4.083300,100000.000000 +-117.230000,33.790000,17.000000,3318.000000,759.000000,2016.000000,673.000000,2.296900,89300.000000 +-117.230000,33.780000,23.000000,3465.000000,703.000000,2672.000000,607.000000,1.976700,81500.000000 +-117.230000,33.770000,5.000000,2108.000000,496.000000,1666.000000,461.000000,2.000000,83000.000000 +-117.230000,33.680000,10.000000,3659.000000,650.000000,1476.000000,515.000000,3.886900,125900.000000 +-117.230000,33.570000,6.000000,13724.000000,2269.000000,5860.000000,1986.000000,3.961700,183000.000000 +-117.230000,33.380000,18.000000,3339.000000,704.000000,1727.000000,652.000000,2.839300,173200.000000 +-117.230000,33.240000,26.000000,1991.000000,330.000000,1014.000000,304.000000,4.306800,240100.000000 +-117.230000,33.230000,13.000000,2899.000000,657.000000,1946.000000,579.000000,2.987500,172000.000000 +-117.230000,33.220000,18.000000,2334.000000,573.000000,962.000000,557.000000,1.808000,97000.000000 +-117.230000,33.220000,16.000000,3224.000000,729.000000,1036.000000,608.000000,2.024600,148800.000000 +-117.230000,33.210000,34.000000,544.000000,108.000000,348.000000,127.000000,4.125000,164600.000000 +-117.230000,33.210000,21.000000,1934.000000,386.000000,861.000000,381.000000,3.618100,213800.000000 +-117.230000,33.200000,29.000000,3372.000000,720.000000,1770.000000,693.000000,3.510900,166000.000000 +-117.230000,33.190000,22.000000,2554.000000,447.000000,1147.000000,422.000000,3.634600,192500.000000 +-117.230000,33.160000,2.000000,4624.000000,946.000000,2091.000000,808.000000,3.673600,214500.000000 +-117.230000,33.090000,7.000000,5320.000000,855.000000,2015.000000,768.000000,6.337300,279600.000000 +-117.230000,33.010000,18.000000,3961.000000,511.000000,1541.000000,470.000000,11.111800,500001.000000 +-117.230000,32.990000,17.000000,2718.000000,326.000000,1011.000000,319.000000,15.000100,500001.000000 +-117.230000,32.860000,16.000000,1675.000000,354.000000,604.000000,332.000000,5.232600,188300.000000 +-117.230000,32.860000,15.000000,1703.000000,320.000000,587.000000,282.000000,5.085500,209800.000000 +-117.230000,32.860000,15.000000,1199.000000,301.000000,510.000000,296.000000,3.608300,180100.000000 +-117.230000,32.850000,25.000000,4229.000000,601.000000,1634.000000,574.000000,6.395500,316700.000000 +-117.230000,32.810000,24.000000,3271.000000,508.000000,1496.000000,482.000000,5.935900,422200.000000 +-117.230000,32.810000,22.000000,3205.000000,429.000000,1083.000000,410.000000,8.184400,406300.000000 +-117.230000,32.800000,28.000000,3379.000000,918.000000,1849.000000,849.000000,3.029300,241700.000000 +-117.230000,32.800000,27.000000,1297.000000,355.000000,776.000000,337.000000,2.464300,244400.000000 +-117.230000,32.800000,22.000000,2981.000000,873.000000,1751.000000,745.000000,2.348200,190600.000000 +-117.230000,32.800000,21.000000,2429.000000,579.000000,1011.000000,538.000000,3.225000,229400.000000 +-117.230000,32.790000,28.000000,2453.000000,648.000000,1082.000000,617.000000,3.625000,266700.000000 +-117.230000,32.750000,5.000000,1824.000000,509.000000,892.000000,426.000000,3.428600,137500.000000 +-117.230000,32.750000,21.000000,2050.000000,608.000000,1131.000000,550.000000,2.477900,165000.000000 +-117.230000,32.750000,11.000000,4304.000000,1245.000000,1960.000000,1105.000000,3.345600,159800.000000 +-117.230000,32.740000,44.000000,1404.000000,229.000000,513.000000,217.000000,4.180600,263800.000000 +-117.230000,32.740000,35.000000,2615.000000,525.000000,1312.000000,547.000000,4.133900,238200.000000 +-117.230000,32.730000,35.000000,2914.000000,683.000000,1562.000000,638.000000,2.525900,240200.000000 +-117.230000,32.720000,38.000000,2827.000000,581.000000,972.000000,558.000000,3.236100,500001.000000 +-117.240000,34.590000,4.000000,5027.000000,797.000000,1869.000000,686.000000,3.550700,186100.000000 +-117.240000,34.150000,26.000000,2041.000000,293.000000,936.000000,375.000000,6.000000,140200.000000 +-117.240000,34.150000,23.000000,3847.000000,608.000000,1621.000000,630.000000,4.611100,128400.000000 +-117.240000,34.140000,6.000000,2383.000000,606.000000,1301.000000,488.000000,3.016000,107500.000000 +-117.240000,34.130000,26.000000,3774.000000,716.000000,1913.000000,620.000000,3.353400,98900.000000 +-117.240000,34.130000,24.000000,1203.000000,310.000000,594.000000,187.000000,1.152200,87500.000000 +-117.240000,34.120000,29.000000,2654.000000,667.000000,1822.000000,593.000000,2.156300,72300.000000 +-117.240000,34.110000,23.000000,1920.000000,454.000000,1161.000000,358.000000,2.210900,73200.000000 +-117.240000,34.060000,9.000000,3603.000000,786.000000,1782.000000,718.000000,3.260400,93300.000000 +-117.240000,34.040000,5.000000,1775.000000,234.000000,726.000000,222.000000,7.978000,223900.000000 +-117.240000,34.040000,17.000000,3362.000000,507.000000,1520.000000,496.000000,6.198600,214500.000000 +-117.240000,33.950000,11.000000,6617.000000,1118.000000,3710.000000,1087.000000,4.787700,132600.000000 +-117.240000,33.940000,15.000000,1569.000000,423.000000,1123.000000,369.000000,1.611100,113900.000000 +-117.240000,33.850000,8.000000,1031.000000,201.000000,606.000000,179.000000,2.819400,136300.000000 +-117.240000,33.770000,9.000000,6907.000000,1379.000000,3665.000000,1290.000000,2.840100,104200.000000 +-117.240000,33.400000,16.000000,2704.000000,463.000000,1322.000000,424.000000,3.785700,227000.000000 +-117.240000,33.380000,16.000000,2792.000000,525.000000,1696.000000,516.000000,3.668000,171200.000000 +-117.240000,33.370000,14.000000,4687.000000,793.000000,2436.000000,779.000000,4.539100,180900.000000 +-117.240000,33.360000,11.000000,2786.000000,480.000000,1250.000000,450.000000,4.500000,222600.000000 +-117.240000,33.340000,17.000000,2866.000000,442.000000,1354.000000,431.000000,4.576400,257300.000000 +-117.240000,33.230000,21.000000,1718.000000,308.000000,1194.000000,312.000000,3.435900,150900.000000 +-117.240000,33.230000,13.000000,3756.000000,648.000000,1767.000000,614.000000,4.077600,196000.000000 +-117.240000,33.220000,20.000000,1962.000000,334.000000,1173.000000,349.000000,4.131600,162500.000000 +-117.240000,33.210000,9.000000,2486.000000,626.000000,1938.000000,525.000000,2.129300,151400.000000 +-117.240000,33.210000,19.000000,1872.000000,489.000000,1859.000000,446.000000,2.187500,121700.000000 +-117.240000,33.210000,18.000000,1846.000000,419.000000,1581.000000,387.000000,3.098200,111300.000000 +-117.240000,33.200000,25.000000,1631.000000,415.000000,1045.000000,386.000000,2.450500,147500.000000 +-117.240000,33.190000,19.000000,1569.000000,351.000000,1035.000000,352.000000,2.919100,159400.000000 +-117.240000,33.050000,15.000000,3029.000000,555.000000,1559.000000,546.000000,5.312900,169200.000000 +-117.240000,33.050000,11.000000,5827.000000,882.000000,2588.000000,842.000000,6.402700,344200.000000 +-117.240000,33.040000,13.000000,3498.000000,663.000000,1412.000000,618.000000,3.212000,147600.000000 +-117.240000,33.000000,16.000000,2512.000000,356.000000,795.000000,353.000000,7.597500,369100.000000 +-117.240000,32.980000,4.000000,6423.000000,1042.000000,2607.000000,983.000000,7.634800,337000.000000 +-117.240000,32.950000,18.000000,1591.000000,268.000000,547.000000,243.000000,5.954700,329300.000000 +-117.240000,32.940000,12.000000,2165.000000,437.000000,792.000000,386.000000,5.264800,294400.000000 +-117.240000,32.850000,22.000000,3479.000000,448.000000,1252.000000,440.000000,10.070700,500001.000000 +-117.240000,32.810000,34.000000,2420.000000,391.000000,917.000000,392.000000,6.588100,394400.000000 +-117.240000,32.810000,33.000000,1588.000000,289.000000,683.000000,301.000000,5.410300,332400.000000 +-117.240000,32.800000,30.000000,1917.000000,462.000000,828.000000,437.000000,2.467100,276300.000000 +-117.240000,32.800000,26.000000,3433.000000,873.000000,1492.000000,798.000000,2.925800,234800.000000 +-117.240000,32.800000,19.000000,1863.000000,497.000000,868.000000,503.000000,2.288000,210000.000000 +-117.240000,32.800000,18.000000,2205.000000,661.000000,874.000000,580.000000,3.801800,112500.000000 +-117.240000,32.790000,25.000000,2135.000000,691.000000,566.000000,320.000000,2.690200,212500.000000 +-117.240000,32.790000,20.000000,961.000000,278.000000,525.000000,254.000000,3.183800,245800.000000 +-117.240000,32.790000,18.000000,2539.000000,616.000000,964.000000,526.000000,3.430600,275000.000000 +-117.240000,32.790000,18.000000,1741.000000,602.000000,508.000000,283.000000,3.262500,193800.000000 +-117.240000,32.790000,17.000000,1149.000000,266.000000,403.000000,228.000000,4.165200,241700.000000 +-117.240000,32.780000,44.000000,2172.000000,431.000000,892.000000,420.000000,4.174200,342200.000000 +-117.240000,32.750000,41.000000,1989.000000,514.000000,1015.000000,489.000000,2.790000,226000.000000 +-117.240000,32.750000,36.000000,2831.000000,669.000000,1279.000000,660.000000,2.989600,252700.000000 +-117.240000,32.750000,36.000000,1856.000000,475.000000,822.000000,416.000000,2.304200,220600.000000 +-117.240000,32.750000,33.000000,1980.000000,614.000000,1057.000000,567.000000,2.204200,231300.000000 +-117.240000,32.740000,45.000000,1718.000000,293.000000,757.000000,329.000000,4.050000,284900.000000 +-117.240000,32.740000,44.000000,1686.000000,285.000000,712.000000,298.000000,4.026800,298600.000000 +-117.240000,32.740000,44.000000,1488.000000,259.000000,667.000000,281.000000,4.086200,321800.000000 +-117.240000,32.740000,43.000000,2216.000000,375.000000,918.000000,388.000000,5.528900,297700.000000 +-117.240000,32.730000,37.000000,2260.000000,354.000000,809.000000,351.000000,5.911300,388300.000000 +-117.240000,32.720000,39.000000,3819.000000,594.000000,1361.000000,583.000000,6.601300,396400.000000 +-117.240000,32.720000,39.000000,3089.000000,431.000000,1175.000000,432.000000,7.592500,466700.000000 +-117.240000,32.710000,32.000000,4164.000000,701.000000,1277.000000,607.000000,6.666100,500001.000000 +-117.250000,34.530000,13.000000,5841.000000,955.000000,2455.000000,915.000000,4.133300,158200.000000 +-117.250000,34.510000,7.000000,3200.000000,477.000000,1522.000000,470.000000,4.691400,142200.000000 +-117.250000,34.160000,37.000000,1709.000000,278.000000,744.000000,274.000000,3.718800,116600.000000 +-117.250000,34.160000,35.000000,2707.000000,481.000000,1595.000000,479.000000,3.901800,91500.000000 +-117.250000,34.160000,31.000000,1516.000000,238.000000,596.000000,255.000000,4.336200,159400.000000 +-117.250000,34.150000,30.000000,1770.000000,380.000000,990.000000,348.000000,3.300000,97600.000000 +-117.250000,34.140000,19.000000,5163.000000,1229.000000,2680.000000,1141.000000,2.248200,114500.000000 +-117.250000,34.130000,37.000000,2498.000000,472.000000,1291.000000,487.000000,3.000000,83400.000000 +-117.250000,34.130000,33.000000,2898.000000,503.000000,1374.000000,487.000000,3.685600,90000.000000 +-117.250000,34.120000,17.000000,3107.000000,752.000000,2160.000000,643.000000,1.846300,72600.000000 +-117.250000,34.110000,32.000000,2910.000000,641.000000,2011.000000,614.000000,2.747300,70800.000000 +-117.250000,34.110000,30.000000,2173.000000,560.000000,1509.000000,486.000000,1.407900,67700.000000 +-117.250000,34.080000,30.000000,2981.000000,605.000000,1784.000000,573.000000,2.450000,85800.000000 +-117.250000,34.070000,21.000000,3067.000000,706.000000,2140.000000,687.000000,2.443200,78800.000000 +-117.250000,34.060000,23.000000,4503.000000,1156.000000,3264.000000,937.000000,1.982100,93000.000000 +-117.250000,34.060000,18.000000,5009.000000,1108.000000,2948.000000,963.000000,3.004200,106500.000000 +-117.250000,34.040000,18.000000,5761.000000,1063.000000,2763.000000,1058.000000,4.447200,161100.000000 +-117.250000,33.950000,5.000000,13096.000000,2208.000000,6780.000000,2180.000000,4.277500,138700.000000 +-117.250000,33.920000,7.000000,9812.000000,1914.000000,5595.000000,1729.000000,4.148200,124600.000000 +-117.250000,33.650000,10.000000,1652.000000,316.000000,725.000000,233.000000,3.512500,155600.000000 +-117.250000,33.390000,22.000000,2699.000000,543.000000,1425.000000,491.000000,2.375000,137300.000000 +-117.250000,33.380000,17.000000,1614.000000,431.000000,1031.000000,389.000000,2.095600,134400.000000 +-117.250000,33.380000,16.000000,3536.000000,765.000000,2007.000000,687.000000,3.000000,146700.000000 +-117.250000,33.370000,8.000000,1755.000000,530.000000,1687.000000,511.000000,1.995000,146900.000000 +-117.250000,33.360000,6.000000,3725.000000,960.000000,2833.000000,915.000000,2.321400,247000.000000 +-117.250000,33.300000,14.000000,2513.000000,351.000000,1151.000000,357.000000,6.305400,359000.000000 +-117.250000,33.250000,6.000000,6160.000000,993.000000,2997.000000,1029.000000,4.618700,205000.000000 +-117.250000,33.220000,26.000000,2010.000000,347.000000,1160.000000,331.000000,3.981500,142600.000000 +-117.250000,33.210000,9.000000,1944.000000,488.000000,1992.000000,453.000000,2.066000,127200.000000 +-117.250000,33.210000,13.000000,1203.000000,292.000000,1035.000000,293.000000,2.633900,117000.000000 +-117.250000,33.200000,22.000000,2361.000000,618.000000,1472.000000,596.000000,2.062500,124500.000000 +-117.250000,33.200000,10.000000,2050.000000,473.000000,1302.000000,471.000000,2.796100,131300.000000 +-117.250000,33.190000,18.000000,1891.000000,306.000000,830.000000,279.000000,4.576400,207000.000000 +-117.250000,33.120000,8.000000,8552.000000,1437.000000,3335.000000,1323.000000,5.311000,255800.000000 +-117.250000,33.100000,14.000000,3676.000000,720.000000,1176.000000,614.000000,3.946400,171900.000000 +-117.250000,33.080000,13.000000,3651.000000,465.000000,1311.000000,435.000000,7.540200,340300.000000 +-117.250000,33.060000,6.000000,9859.000000,1448.000000,4194.000000,1401.000000,6.439000,296200.000000 +-117.250000,33.050000,16.000000,2794.000000,476.000000,1387.000000,442.000000,4.328600,213400.000000 +-117.250000,33.030000,6.000000,3416.000000,493.000000,1319.000000,467.000000,6.932600,324600.000000 +-117.250000,33.010000,16.000000,3892.000000,520.000000,1454.000000,524.000000,7.731700,396000.000000 +-117.250000,33.000000,14.000000,2518.000000,458.000000,931.000000,414.000000,5.839300,485300.000000 +-117.250000,32.990000,10.000000,4926.000000,749.000000,1478.000000,634.000000,7.472000,439900.000000 +-117.250000,32.960000,18.000000,4773.000000,743.000000,1970.000000,716.000000,6.619900,406200.000000 +-117.250000,32.940000,16.000000,4755.000000,807.000000,1829.000000,756.000000,6.769400,425900.000000 +-117.250000,32.940000,15.000000,1804.000000,339.000000,673.000000,296.000000,5.980600,370500.000000 +-117.250000,32.860000,30.000000,1670.000000,219.000000,606.000000,202.000000,12.442900,500001.000000 +-117.250000,32.860000,27.000000,2530.000000,469.000000,594.000000,326.000000,7.282100,500001.000000 +-117.250000,32.860000,25.000000,2911.000000,533.000000,1137.000000,499.000000,5.102300,500001.000000 +-117.250000,32.840000,19.000000,1759.000000,214.000000,659.000000,195.000000,10.775100,500001.000000 +-117.250000,32.820000,23.000000,6139.000000,826.000000,2036.000000,807.000000,9.524500,500001.000000 +-117.250000,32.810000,39.000000,1846.000000,350.000000,765.000000,329.000000,3.918700,311900.000000 +-117.250000,32.810000,32.000000,2402.000000,551.000000,1020.000000,532.000000,3.394200,307400.000000 +-117.250000,32.800000,32.000000,1601.000000,468.000000,731.000000,429.000000,2.556800,258300.000000 +-117.250000,32.800000,31.000000,2182.000000,630.000000,1069.000000,599.000000,2.978100,212500.000000 +-117.250000,32.800000,26.000000,2442.000000,659.000000,1134.000000,624.000000,3.327400,295500.000000 +-117.250000,32.790000,37.000000,1467.000000,442.000000,651.000000,354.000000,2.375000,340400.000000 +-117.250000,32.790000,27.000000,848.000000,300.000000,455.000000,298.000000,3.077400,275000.000000 +-117.250000,32.790000,25.000000,1627.000000,375.000000,735.000000,378.000000,3.642900,317100.000000 +-117.250000,32.780000,36.000000,1527.000000,427.000000,710.000000,312.000000,2.785700,291700.000000 +-117.250000,32.780000,21.000000,1479.000000,484.000000,658.000000,384.000000,2.450000,350000.000000 +-117.250000,32.770000,35.000000,2494.000000,690.000000,1126.000000,624.000000,4.031300,385300.000000 +-117.250000,32.770000,32.000000,2021.000000,524.000000,973.000000,485.000000,3.180000,362500.000000 +-117.250000,32.750000,37.000000,1189.000000,377.000000,645.000000,377.000000,2.467200,216700.000000 +-117.250000,32.750000,36.000000,1929.000000,526.000000,974.000000,491.000000,1.762200,205800.000000 +-117.250000,32.750000,32.000000,3551.000000,1037.000000,1731.000000,935.000000,2.201700,208300.000000 +-117.250000,32.740000,40.000000,2186.000000,549.000000,953.000000,515.000000,2.800700,257100.000000 +-117.250000,32.740000,36.000000,1830.000000,430.000000,755.000000,419.000000,2.990400,286800.000000 +-117.250000,32.740000,36.000000,1240.000000,310.000000,577.000000,319.000000,2.662500,248200.000000 +-117.250000,32.730000,39.000000,1688.000000,256.000000,635.000000,272.000000,4.593800,367400.000000 +-117.250000,32.730000,38.000000,1840.000000,291.000000,633.000000,283.000000,4.912500,383600.000000 +-117.250000,32.730000,37.000000,2224.000000,331.000000,821.000000,341.000000,6.333100,400000.000000 +-117.250000,32.720000,36.000000,2632.000000,450.000000,2038.000000,419.000000,6.531900,345800.000000 +-117.250000,32.720000,33.000000,1677.000000,228.000000,629.000000,239.000000,6.597000,496400.000000 +-117.260000,34.530000,10.000000,3103.000000,520.000000,1283.000000,464.000000,3.071000,151600.000000 +-117.260000,34.480000,6.000000,4632.000000,753.000000,1851.000000,694.000000,4.193300,163100.000000 +-117.260000,34.430000,11.000000,4597.000000,782.000000,2534.000000,776.000000,3.336800,99300.000000 +-117.260000,34.240000,10.000000,4750.000000,844.000000,1220.000000,428.000000,4.553600,132400.000000 +-117.260000,34.170000,30.000000,1937.000000,351.000000,945.000000,344.000000,3.890600,123700.000000 +-117.260000,34.150000,33.000000,2271.000000,389.000000,1100.000000,380.000000,3.597800,88300.000000 +-117.260000,34.130000,39.000000,3521.000000,747.000000,2256.000000,721.000000,2.137500,87500.000000 +-117.260000,34.130000,37.000000,2403.000000,550.000000,1234.000000,493.000000,2.000000,72100.000000 +-117.260000,34.110000,33.000000,1210.000000,288.000000,850.000000,238.000000,1.217100,59300.000000 +-117.260000,33.810000,22.000000,4249.000000,922.000000,2405.000000,846.000000,2.154900,146500.000000 +-117.260000,33.370000,7.000000,2221.000000,548.000000,1440.000000,501.000000,2.236800,154600.000000 +-117.260000,33.210000,26.000000,1906.000000,408.000000,1325.000000,427.000000,3.019700,136000.000000 +-117.260000,33.200000,13.000000,3163.000000,725.000000,1675.000000,629.000000,2.821400,121900.000000 +-117.260000,33.190000,2.000000,2629.000000,509.000000,1044.000000,522.000000,4.236100,158500.000000 +-117.260000,33.180000,9.000000,4540.000000,793.000000,2235.000000,746.000000,4.578100,225600.000000 +-117.260000,33.090000,22.000000,2398.000000,407.000000,349.000000,169.000000,7.042300,500001.000000 +-117.260000,33.090000,13.000000,3730.000000,761.000000,1335.000000,603.000000,4.166700,227100.000000 +-117.260000,33.080000,12.000000,5080.000000,814.000000,1958.000000,716.000000,5.390500,299600.000000 +-117.260000,33.060000,11.000000,2660.000000,352.000000,1226.000000,366.000000,7.683200,319800.000000 +-117.260000,33.050000,14.000000,2323.000000,373.000000,1057.000000,372.000000,6.251300,240900.000000 +-117.260000,33.040000,18.000000,2229.000000,346.000000,1088.000000,352.000000,6.352500,278300.000000 +-117.260000,33.040000,16.000000,3109.000000,450.000000,1433.000000,453.000000,6.631900,269600.000000 +-117.260000,33.020000,9.000000,4632.000000,759.000000,1724.000000,685.000000,6.371200,369800.000000 +-117.260000,33.000000,31.000000,2695.000000,491.000000,1059.000000,451.000000,4.784100,393500.000000 +-117.260000,32.980000,12.000000,3900.000000,977.000000,1690.000000,892.000000,4.125000,226900.000000 +-117.260000,32.970000,25.000000,2582.000000,495.000000,1088.000000,471.000000,6.465100,500001.000000 +-117.260000,32.960000,36.000000,1721.000000,264.000000,710.000000,282.000000,10.176800,500001.000000 +-117.260000,32.950000,34.000000,1651.000000,273.000000,650.000000,271.000000,5.658200,500001.000000 +-117.260000,32.950000,22.000000,5484.000000,1227.000000,1947.000000,1012.000000,4.437500,500001.000000 +-117.260000,32.950000,15.000000,1882.000000,233.000000,704.000000,219.000000,6.979400,500001.000000 +-117.260000,32.950000,15.000000,1036.000000,149.000000,395.000000,157.000000,5.834300,500001.000000 +-117.260000,32.850000,42.000000,1761.000000,329.000000,480.000000,255.000000,5.378700,500001.000000 +-117.260000,32.830000,24.000000,1663.000000,199.000000,578.000000,187.000000,10.772100,500001.000000 +-117.260000,32.810000,37.000000,1616.000000,421.000000,650.000000,395.000000,2.920000,326500.000000 +-117.260000,32.810000,30.000000,1328.000000,346.000000,577.000000,328.000000,2.328400,290600.000000 +-117.260000,32.810000,25.000000,2076.000000,586.000000,1060.000000,554.000000,2.842100,227800.000000 +-117.260000,32.800000,30.000000,1446.000000,385.000000,650.000000,344.000000,3.744000,450000.000000 +-117.270000,34.480000,8.000000,1794.000000,276.000000,690.000000,271.000000,3.662000,165300.000000 +-117.270000,34.420000,9.000000,5643.000000,1005.000000,3166.000000,957.000000,3.207700,93300.000000 +-117.270000,34.400000,8.000000,6042.000000,979.000000,3031.000000,991.000000,3.343800,124400.000000 +-117.270000,34.390000,6.000000,6988.000000,1121.000000,3660.000000,1092.000000,4.222400,125700.000000 +-117.270000,34.240000,34.000000,3687.000000,756.000000,941.000000,367.000000,2.875000,117600.000000 +-117.270000,34.230000,26.000000,6339.000000,1244.000000,1177.000000,466.000000,3.770800,110400.000000 +-117.270000,34.170000,16.000000,30.000000,3.000000,49.000000,8.000000,4.625000,250000.000000 +-117.270000,34.150000,35.000000,1490.000000,253.000000,705.000000,253.000000,3.361600,95300.000000 +-117.270000,34.140000,36.000000,3795.000000,676.000000,1742.000000,585.000000,4.100000,96400.000000 +-117.270000,34.140000,35.000000,1517.000000,257.000000,658.000000,245.000000,4.443500,97600.000000 +-117.270000,34.130000,40.000000,1298.000000,254.000000,793.000000,268.000000,3.072100,83800.000000 +-117.270000,34.130000,36.000000,3337.000000,687.000000,2388.000000,589.000000,2.962800,87800.000000 +-117.270000,34.120000,52.000000,954.000000,246.000000,943.000000,256.000000,0.865800,87500.000000 +-117.270000,34.120000,31.000000,2209.000000,636.000000,1314.000000,562.000000,1.723500,78800.000000 +-117.270000,34.120000,27.000000,2896.000000,684.000000,1514.000000,668.000000,1.462000,70200.000000 +-117.270000,34.110000,44.000000,567.000000,134.000000,565.000000,150.000000,1.828100,62900.000000 +-117.270000,34.080000,38.000000,1093.000000,256.000000,856.000000,212.000000,1.427900,73000.000000 +-117.270000,34.070000,21.000000,418.000000,132.000000,401.000000,120.000000,1.720600,82100.000000 +-117.270000,34.060000,20.000000,5258.000000,1514.000000,3780.000000,1404.000000,2.025000,85700.000000 +-117.270000,34.050000,34.000000,1703.000000,395.000000,849.000000,359.000000,3.160700,138200.000000 +-117.270000,33.930000,2.000000,337.000000,55.000000,115.000000,49.000000,3.104200,164800.000000 +-117.270000,33.920000,13.000000,8443.000000,1744.000000,4885.000000,1470.000000,3.090700,127200.000000 +-117.270000,33.680000,8.000000,26322.000000,4072.000000,9360.000000,3361.000000,5.323800,228900.000000 +-117.270000,33.550000,4.000000,6112.000000,890.000000,2088.000000,712.000000,5.535100,429000.000000 +-117.270000,33.230000,5.000000,20908.000000,3933.000000,9690.000000,3510.000000,4.140500,198500.000000 +-117.270000,33.220000,5.000000,2283.000000,337.000000,999.000000,325.000000,5.024900,196700.000000 +-117.270000,33.220000,16.000000,1420.000000,311.000000,470.000000,313.000000,1.884900,90800.000000 +-117.270000,33.210000,5.000000,5764.000000,996.000000,3161.000000,1012.000000,4.453100,177500.000000 +-117.270000,33.200000,34.000000,1852.000000,322.000000,978.000000,332.000000,4.354200,156900.000000 +-117.270000,33.200000,23.000000,2145.000000,379.000000,1360.000000,404.000000,4.205400,150700.000000 +-117.270000,33.190000,8.000000,973.000000,289.000000,663.000000,209.000000,2.724000,139300.000000 +-117.270000,33.180000,4.000000,3371.000000,773.000000,1481.000000,627.000000,2.913300,215700.000000 +-117.270000,33.080000,7.000000,2949.000000,447.000000,1335.000000,426.000000,6.092200,342400.000000 +-117.270000,33.060000,7.000000,3686.000000,733.000000,1612.000000,672.000000,3.197000,367100.000000 +-117.270000,33.050000,15.000000,3333.000000,808.000000,1371.000000,737.000000,2.908300,122400.000000 +-117.270000,33.040000,27.000000,1839.000000,392.000000,1302.000000,404.000000,3.550000,214600.000000 +-117.270000,33.030000,25.000000,1787.000000,311.000000,1108.000000,311.000000,3.982600,215800.000000 +-117.270000,33.030000,19.000000,2899.000000,499.000000,1356.000000,512.000000,4.870000,220900.000000 +-117.270000,33.030000,16.000000,2240.000000,443.000000,1104.000000,416.000000,3.531300,148700.000000 +-117.270000,33.020000,21.000000,2144.000000,340.000000,928.000000,344.000000,5.798000,286100.000000 +-117.270000,33.000000,36.000000,2426.000000,454.000000,1085.000000,420.000000,5.152300,387800.000000 +-117.270000,32.990000,21.000000,3318.000000,578.000000,1273.000000,538.000000,5.592200,382100.000000 +-117.270000,32.980000,17.000000,1853.000000,392.000000,351.000000,208.000000,5.274200,230700.000000 +-117.270000,32.850000,34.000000,2105.000000,444.000000,780.000000,406.000000,2.318700,488900.000000 +-117.270000,32.840000,26.000000,3940.000000,657.000000,1180.000000,600.000000,6.102500,500001.000000 +-117.270000,32.830000,39.000000,1877.000000,426.000000,805.000000,409.000000,3.875000,410000.000000 +-117.270000,32.830000,35.000000,1420.000000,193.000000,469.000000,177.000000,8.063900,500001.000000 +-117.270000,32.820000,42.000000,2820.000000,488.000000,1175.000000,500.000000,4.508300,405200.000000 +-117.270000,32.820000,35.000000,2908.000000,595.000000,1068.000000,529.000000,4.179300,500001.000000 +-117.280000,35.130000,32.000000,671.000000,166.000000,856.000000,114.000000,2.647700,53300.000000 +-117.280000,34.680000,28.000000,1932.000000,421.000000,1156.000000,404.000000,1.895800,55600.000000 +-117.280000,34.510000,10.000000,4676.000000,884.000000,2845.000000,812.000000,3.018100,100400.000000 +-117.280000,34.240000,16.000000,3474.000000,633.000000,853.000000,315.000000,5.218500,128600.000000 +-117.280000,34.170000,26.000000,3728.000000,888.000000,1765.000000,727.000000,1.745600,86800.000000 +-117.280000,34.170000,26.000000,3106.000000,603.000000,1396.000000,576.000000,3.173600,122200.000000 +-117.280000,34.160000,35.000000,2028.000000,456.000000,972.000000,398.000000,2.377800,90700.000000 +-117.280000,34.160000,26.000000,2469.000000,532.000000,1068.000000,501.000000,1.983200,122100.000000 +-117.280000,34.150000,38.000000,1981.000000,343.000000,796.000000,344.000000,3.812500,97400.000000 +-117.280000,34.150000,36.000000,1734.000000,280.000000,604.000000,259.000000,3.829200,122200.000000 +-117.280000,34.140000,40.000000,2364.000000,438.000000,968.000000,416.000000,3.490600,93300.000000 +-117.280000,34.140000,40.000000,2190.000000,496.000000,1214.000000,493.000000,2.394700,81900.000000 +-117.280000,34.130000,29.000000,2077.000000,577.000000,1418.000000,524.000000,1.828100,76800.000000 +-117.280000,34.120000,47.000000,2456.000000,611.000000,1653.000000,512.000000,1.397300,66100.000000 +-117.280000,34.120000,36.000000,2991.000000,822.000000,2378.000000,751.000000,1.357100,70600.000000 +-117.280000,34.110000,39.000000,1573.000000,418.000000,1258.000000,359.000000,1.489600,69500.000000 +-117.280000,34.090000,44.000000,376.000000,87.000000,273.000000,107.000000,2.291700,90800.000000 +-117.280000,34.060000,2.000000,1658.000000,290.000000,868.000000,304.000000,5.136500,136700.000000 +-117.280000,33.940000,10.000000,972.000000,212.000000,773.000000,219.000000,1.312500,135700.000000 +-117.280000,33.920000,35.000000,3623.000000,841.000000,2721.000000,766.000000,2.157400,86900.000000 +-117.280000,33.890000,33.000000,6982.000000,1371.000000,5650.000000,1195.000000,2.537900,152700.000000 +-117.280000,33.850000,16.000000,3498.000000,702.000000,2372.000000,672.000000,2.322900,118000.000000 +-117.280000,33.720000,11.000000,1161.000000,235.000000,640.000000,210.000000,2.166700,114600.000000 +-117.280000,33.660000,15.000000,4573.000000,928.000000,2513.000000,832.000000,2.694900,163600.000000 +-117.280000,33.220000,13.000000,2832.000000,542.000000,1065.000000,531.000000,2.384400,98600.000000 +-117.280000,33.200000,11.000000,1472.000000,261.000000,1012.000000,285.000000,4.210000,175600.000000 +-117.280000,33.190000,5.000000,2697.000000,639.000000,1633.000000,580.000000,3.445600,165800.000000 +-117.280000,33.180000,16.000000,3002.000000,591.000000,842.000000,538.000000,2.120500,157300.000000 +-117.280000,33.180000,14.000000,676.000000,118.000000,384.000000,126.000000,6.209600,178100.000000 +-117.280000,33.100000,13.000000,2644.000000,422.000000,1197.000000,399.000000,6.533800,267900.000000 +-117.280000,33.040000,12.000000,4459.000000,928.000000,2471.000000,888.000000,3.517900,252700.000000 +-117.280000,32.840000,41.000000,1420.000000,338.000000,640.000000,314.000000,2.930600,360300.000000 +-117.280000,32.840000,21.000000,2455.000000,660.000000,1015.000000,597.000000,3.759600,381300.000000 +-117.280000,32.830000,34.000000,2392.000000,653.000000,933.000000,619.000000,3.730600,500000.000000 +-117.280000,32.800000,20.000000,1838.000000,540.000000,615.000000,325.000000,3.548600,193800.000000 +-117.280000,32.770000,38.000000,1267.000000,340.000000,442.000000,250.000000,4.340300,500000.000000 +-117.280000,32.750000,34.000000,981.000000,313.000000,508.000000,304.000000,2.232800,266700.000000 +-117.280000,32.740000,33.000000,4168.000000,1112.000000,1785.000000,984.000000,2.751500,247700.000000 +-117.280000,32.730000,44.000000,1934.000000,325.000000,783.000000,316.000000,4.868400,358600.000000 +-117.290000,35.540000,35.000000,7922.000000,1636.000000,3431.000000,1329.000000,3.414500,40400.000000 +-117.290000,34.570000,22.000000,1054.000000,239.000000,428.000000,239.000000,1.254800,68300.000000 +-117.290000,34.410000,11.000000,5934.000000,1380.000000,2756.000000,1239.000000,1.575800,108300.000000 +-117.290000,34.170000,35.000000,4174.000000,847.000000,2127.000000,778.000000,3.223200,88300.000000 +-117.290000,34.150000,49.000000,1820.000000,321.000000,757.000000,324.000000,3.297600,102600.000000 +-117.290000,34.150000,42.000000,1811.000000,345.000000,856.000000,352.000000,2.966700,97000.000000 +-117.290000,34.140000,52.000000,1683.000000,266.000000,646.000000,256.000000,4.048100,97300.000000 +-117.290000,34.140000,48.000000,1717.000000,307.000000,610.000000,267.000000,3.125000,97600.000000 +-117.290000,34.140000,45.000000,1598.000000,314.000000,771.000000,319.000000,2.541700,82900.000000 +-117.290000,34.140000,39.000000,1989.000000,401.000000,805.000000,341.000000,2.425000,90000.000000 +-117.290000,34.130000,52.000000,2424.000000,528.000000,1171.000000,455.000000,1.481500,77900.000000 +-117.290000,34.130000,44.000000,2337.000000,563.000000,1238.000000,467.000000,1.515600,75800.000000 +-117.290000,34.120000,47.000000,1648.000000,432.000000,1308.000000,385.000000,1.206900,68200.000000 +-117.290000,34.120000,45.000000,1369.000000,351.000000,1046.000000,274.000000,1.843800,72100.000000 +-117.290000,34.120000,40.000000,2198.000000,612.000000,1517.000000,531.000000,1.095100,65800.000000 +-117.290000,34.110000,35.000000,2426.000000,715.000000,1920.000000,586.000000,1.556100,68000.000000 +-117.290000,34.110000,35.000000,2014.000000,677.000000,1714.000000,612.000000,0.707500,78800.000000 +-117.290000,34.030000,9.000000,8185.000000,1525.000000,3630.000000,1466.000000,4.166700,197700.000000 +-117.290000,33.970000,4.000000,18767.000000,3032.000000,8805.000000,2723.000000,4.666700,160600.000000 +-117.290000,33.830000,15.000000,4173.000000,804.000000,2393.000000,713.000000,2.466200,118300.000000 +-117.290000,33.720000,19.000000,2248.000000,427.000000,1207.000000,368.000000,2.817000,110000.000000 +-117.290000,33.630000,7.000000,16010.000000,2726.000000,7139.000000,2426.000000,3.805600,162200.000000 +-117.290000,33.240000,5.000000,3109.000000,634.000000,1823.000000,578.000000,3.187500,153800.000000 +-117.290000,33.200000,16.000000,2150.000000,461.000000,1428.000000,407.000000,2.475400,157300.000000 +-117.290000,33.180000,17.000000,821.000000,176.000000,436.000000,168.000000,3.166700,160600.000000 +-117.290000,33.180000,15.000000,3780.000000,792.000000,1632.000000,721.000000,2.764400,111400.000000 +-117.290000,33.150000,11.000000,2560.000000,445.000000,952.000000,448.000000,4.062500,87500.000000 +-117.290000,33.130000,4.000000,617.000000,105.000000,224.000000,105.000000,3.920500,183000.000000 +-117.290000,33.120000,4.000000,1380.000000,322.000000,755.000000,286.000000,4.796100,168800.000000 +-117.290000,33.100000,6.000000,6091.000000,1018.000000,2064.000000,957.000000,5.183700,259800.000000 +-117.290000,33.080000,18.000000,3225.000000,515.000000,1463.000000,476.000000,5.778700,346700.000000 +-117.290000,33.060000,20.000000,2110.000000,335.000000,1008.000000,325.000000,6.150900,338700.000000 +-117.290000,33.050000,28.000000,1146.000000,338.000000,672.000000,292.000000,3.166700,300000.000000 +-117.290000,33.040000,30.000000,2750.000000,555.000000,1281.000000,520.000000,4.733300,286900.000000 +-117.290000,32.920000,25.000000,2355.000000,381.000000,823.000000,358.000000,6.832200,500001.000000 +-117.290000,32.810000,35.000000,1878.000000,308.000000,598.000000,257.000000,6.955300,500001.000000 +-117.300000,34.540000,31.000000,1174.000000,360.000000,1161.000000,328.000000,1.060000,56500.000000 +-117.300000,34.540000,25.000000,2546.000000,488.000000,1338.000000,487.000000,3.259600,85400.000000 +-117.300000,34.520000,34.000000,4493.000000,838.000000,2335.000000,779.000000,3.163500,74300.000000 +-117.300000,34.460000,8.000000,6246.000000,1273.000000,3883.000000,1264.000000,2.791700,98200.000000 +-117.300000,34.390000,11.000000,3572.000000,592.000000,1876.000000,507.000000,3.661500,105100.000000 +-117.300000,34.240000,38.000000,4116.000000,949.000000,1196.000000,422.000000,3.562500,96500.000000 +-117.300000,34.180000,28.000000,2685.000000,425.000000,1304.000000,420.000000,4.367600,111100.000000 +-117.300000,34.180000,19.000000,2526.000000,381.000000,1176.000000,381.000000,5.513600,137100.000000 +-117.300000,34.170000,30.000000,2483.000000,573.000000,1172.000000,438.000000,1.875000,89700.000000 +-117.300000,34.150000,38.000000,740.000000,163.000000,332.000000,138.000000,2.410700,88000.000000 +-117.300000,34.150000,33.000000,1607.000000,282.000000,608.000000,260.000000,4.343800,115000.000000 +-117.300000,34.140000,39.000000,1781.000000,335.000000,841.000000,320.000000,1.943200,89000.000000 +-117.300000,34.130000,42.000000,2115.000000,557.000000,1532.000000,494.000000,1.453100,71500.000000 +-117.300000,34.120000,50.000000,1629.000000,437.000000,1581.000000,394.000000,2.201900,63500.000000 +-117.300000,34.110000,42.000000,525.000000,111.000000,444.000000,120.000000,2.677100,67000.000000 +-117.300000,34.070000,34.000000,567.000000,143.000000,387.000000,138.000000,1.798100,73300.000000 +-117.300000,34.050000,7.000000,4672.000000,1121.000000,2534.000000,1046.000000,3.422800,115700.000000 +-117.300000,33.260000,23.000000,1678.000000,275.000000,1227.000000,264.000000,4.171300,133800.000000 +-117.300000,33.250000,22.000000,2329.000000,419.000000,1456.000000,381.000000,3.793300,131000.000000 +-117.300000,33.230000,13.000000,3619.000000,791.000000,1759.000000,806.000000,2.765000,98500.000000 +-117.300000,33.220000,4.000000,14960.000000,2988.000000,6666.000000,2612.000000,3.756800,184100.000000 +-117.300000,33.170000,6.000000,7880.000000,1533.000000,3760.000000,1460.000000,4.180700,182600.000000 +-117.300000,33.080000,24.000000,2628.000000,527.000000,1389.000000,520.000000,4.000000,343200.000000 +-117.300000,33.070000,16.000000,3147.000000,765.000000,2165.000000,690.000000,3.558500,284800.000000 +-117.300000,33.070000,14.000000,2670.000000,426.000000,1034.000000,407.000000,6.424700,295100.000000 +-117.300000,33.050000,34.000000,1797.000000,458.000000,775.000000,391.000000,3.230800,331300.000000 +-117.300000,32.960000,30.000000,1226.000000,205.000000,380.000000,151.000000,4.287500,500001.000000 +-117.310000,34.530000,26.000000,2299.000000,496.000000,1259.000000,441.000000,2.612500,79900.000000 +-117.310000,34.510000,18.000000,2704.000000,698.000000,1611.000000,597.000000,2.024300,82300.000000 +-117.310000,34.500000,14.000000,2443.000000,447.000000,883.000000,465.000000,2.111100,116700.000000 +-117.310000,34.440000,10.000000,1731.000000,299.000000,1056.000000,312.000000,3.600700,104000.000000 +-117.310000,34.430000,16.000000,5130.000000,1172.000000,3126.000000,1046.000000,1.678400,71900.000000 +-117.310000,34.390000,15.000000,1703.000000,273.000000,847.000000,266.000000,3.791700,123400.000000 +-117.310000,34.350000,9.000000,2404.000000,390.000000,1074.000000,359.000000,5.019800,151900.000000 +-117.310000,34.250000,29.000000,4610.000000,1043.000000,1569.000000,592.000000,2.766300,97900.000000 +-117.310000,34.170000,25.000000,2795.000000,596.000000,1650.000000,569.000000,3.007800,87100.000000 +-117.310000,34.150000,7.000000,5747.000000,1307.000000,2578.000000,1147.000000,3.328100,122200.000000 +-117.310000,34.150000,34.000000,2037.000000,385.000000,1195.000000,391.000000,3.923100,96000.000000 +-117.310000,34.140000,44.000000,1487.000000,273.000000,972.000000,281.000000,3.229200,86100.000000 +-117.310000,34.140000,38.000000,2011.000000,448.000000,1190.000000,403.000000,1.865400,89400.000000 +-117.310000,34.130000,38.000000,1287.000000,284.000000,1047.000000,269.000000,2.286500,65500.000000 +-117.310000,34.130000,36.000000,1076.000000,283.000000,773.000000,224.000000,2.630700,66400.000000 +-117.310000,34.130000,35.000000,1622.000000,393.000000,1296.000000,362.000000,1.928600,68500.000000 +-117.310000,34.120000,37.000000,1412.000000,343.000000,1127.000000,351.000000,1.166700,70900.000000 +-117.310000,34.110000,52.000000,851.000000,190.000000,731.000000,190.000000,1.904400,64900.000000 +-117.310000,34.110000,41.000000,1105.000000,257.000000,816.000000,197.000000,1.937500,64100.000000 +-117.310000,34.100000,52.000000,1457.000000,415.000000,1238.000000,341.000000,2.008900,68100.000000 +-117.310000,34.090000,34.000000,1336.000000,345.000000,1009.000000,311.000000,1.608000,73700.000000 +-117.310000,34.080000,43.000000,1697.000000,387.000000,1181.000000,352.000000,1.923400,74600.000000 +-117.310000,34.080000,40.000000,2011.000000,495.000000,1528.000000,469.000000,1.937500,69900.000000 +-117.310000,34.080000,37.000000,953.000000,231.000000,611.000000,230.000000,1.992600,81500.000000 +-117.310000,34.070000,40.000000,2936.000000,732.000000,2024.000000,676.000000,2.113900,70900.000000 +-117.310000,34.050000,6.000000,7423.000000,2111.000000,4092.000000,1789.000000,2.700200,88300.000000 +-117.310000,34.040000,5.000000,2785.000000,577.000000,1310.000000,536.000000,3.390000,149500.000000 +-117.310000,34.040000,29.000000,2481.000000,383.000000,1188.000000,385.000000,4.734400,134600.000000 +-117.310000,34.030000,9.000000,1199.000000,187.000000,629.000000,207.000000,5.739300,151600.000000 +-117.310000,34.030000,24.000000,1966.000000,299.000000,786.000000,302.000000,5.031800,134500.000000 +-117.310000,34.020000,18.000000,1634.000000,274.000000,899.000000,285.000000,5.213900,129300.000000 +-117.310000,33.970000,28.000000,3420.000000,691.000000,1502.000000,656.000000,3.489600,140300.000000 +-117.310000,33.750000,19.000000,3173.000000,678.000000,2204.000000,606.000000,2.148400,129200.000000 +-117.310000,33.670000,9.000000,981.000000,169.000000,596.000000,156.000000,3.183200,157400.000000 +-117.310000,33.250000,14.000000,3483.000000,764.000000,2140.000000,687.000000,3.125000,102300.000000 +-117.310000,33.250000,13.000000,3075.000000,630.000000,1843.000000,674.000000,2.855800,97100.000000 +-117.310000,33.190000,11.000000,20944.000000,3753.000000,8738.000000,3441.000000,4.376200,215500.000000 +-117.310000,33.180000,16.000000,1835.000000,430.000000,599.000000,399.000000,2.014700,87700.000000 +-117.310000,33.160000,17.000000,1704.000000,263.000000,781.000000,281.000000,5.660500,224400.000000 +-117.310000,33.110000,7.000000,7974.000000,1703.000000,2904.000000,1550.000000,4.128200,188100.000000 +-117.310000,33.100000,15.000000,2392.000000,446.000000,747.000000,421.000000,3.534100,500001.000000 +-117.310000,33.070000,21.000000,2035.000000,534.000000,948.000000,467.000000,3.298400,369400.000000 +-117.310000,33.000000,30.000000,1631.000000,310.000000,665.000000,297.000000,6.844300,492500.000000 +-117.310000,32.830000,38.000000,2367.000000,480.000000,891.000000,428.000000,4.147700,500001.000000 +-117.310000,32.820000,42.000000,2785.000000,389.000000,833.000000,333.000000,11.307400,500001.000000 +-117.320000,34.550000,18.000000,279.000000,59.000000,188.000000,60.000000,0.824600,91700.000000 +-117.320000,34.540000,9.000000,5904.000000,1165.000000,3489.000000,1063.000000,3.125000,92800.000000 +-117.320000,34.510000,16.000000,3072.000000,612.000000,1283.000000,604.000000,2.892900,115600.000000 +-117.320000,34.490000,7.000000,4584.000000,1051.000000,2049.000000,918.000000,1.623200,93400.000000 +-117.320000,34.480000,8.000000,4627.000000,887.000000,2739.000000,846.000000,3.020400,93100.000000 +-117.320000,34.410000,13.000000,2032.000000,348.000000,1038.000000,344.000000,4.289100,120100.000000 +-117.320000,34.240000,29.000000,1290.000000,263.000000,323.000000,113.000000,1.926500,103300.000000 +-117.320000,34.190000,6.000000,1068.000000,182.000000,999.000000,188.000000,4.722200,109000.000000 +-117.320000,34.170000,6.000000,5661.000000,1287.000000,2943.000000,1162.000000,3.636200,106500.000000 +-117.320000,34.160000,9.000000,711.000000,139.000000,316.000000,152.000000,4.015600,131000.000000 +-117.320000,34.140000,32.000000,1691.000000,353.000000,1457.000000,329.000000,1.843800,66600.000000 +-117.320000,34.130000,41.000000,1837.000000,409.000000,1430.000000,344.000000,2.452400,70400.000000 +-117.320000,34.120000,39.000000,2210.000000,498.000000,1752.000000,477.000000,1.406600,66400.000000 +-117.320000,34.120000,37.000000,2868.000000,574.000000,2055.000000,563.000000,2.350800,70500.000000 +-117.320000,34.110000,41.000000,1229.000000,302.000000,994.000000,270.000000,1.489100,67300.000000 +-117.320000,34.100000,42.000000,801.000000,176.000000,711.000000,183.000000,1.868100,59700.000000 +-117.320000,34.100000,27.000000,2053.000000,461.000000,1737.000000,463.000000,3.121300,78800.000000 +-117.320000,34.090000,38.000000,1585.000000,345.000000,1347.000000,368.000000,2.375000,75300.000000 +-117.320000,34.090000,30.000000,1129.000000,251.000000,1034.000000,237.000000,2.391700,78600.000000 +-117.320000,34.080000,46.000000,1308.000000,276.000000,576.000000,244.000000,3.187500,84000.000000 +-117.320000,34.080000,41.000000,1359.000000,264.000000,786.000000,244.000000,2.520800,85500.000000 +-117.320000,34.070000,26.000000,971.000000,245.000000,592.000000,207.000000,2.112500,84000.000000 +-117.320000,34.060000,52.000000,802.000000,160.000000,564.000000,131.000000,2.159100,63500.000000 +-117.320000,34.060000,46.000000,476.000000,102.000000,476.000000,91.000000,1.451100,73100.000000 +-117.320000,34.020000,17.000000,1779.000000,292.000000,1006.000000,293.000000,4.670800,123100.000000 +-117.320000,34.010000,23.000000,3021.000000,527.000000,1580.000000,533.000000,4.406300,129900.000000 +-117.320000,33.990000,27.000000,5464.000000,850.000000,2400.000000,836.000000,4.711000,133500.000000 +-117.320000,33.960000,19.000000,3216.000000,666.000000,1363.000000,629.000000,3.758500,144500.000000 +-117.320000,33.870000,15.000000,826.000000,138.000000,440.000000,134.000000,4.812500,173900.000000 +-117.320000,33.510000,4.000000,966.000000,133.000000,311.000000,92.000000,5.206600,500001.000000 +-117.320000,33.250000,7.000000,8206.000000,1523.000000,4399.000000,1423.000000,3.630100,170900.000000 +-117.320000,33.250000,7.000000,2499.000000,420.000000,1314.000000,398.000000,4.850000,186900.000000 +-117.320000,33.230000,24.000000,2580.000000,604.000000,982.000000,569.000000,1.640200,169300.000000 +-117.320000,33.220000,16.000000,1057.000000,232.000000,316.000000,221.000000,2.741700,91700.000000 +-117.320000,33.220000,15.000000,4784.000000,1039.000000,1810.000000,986.000000,2.437500,108900.000000 +-117.320000,33.150000,15.000000,13245.000000,2212.000000,5495.000000,2060.000000,5.490400,262100.000000 +-117.320000,33.120000,25.000000,2670.000000,527.000000,936.000000,461.000000,2.771700,354000.000000 +-117.320000,33.010000,29.000000,3584.000000,712.000000,1619.000000,667.000000,4.125000,394400.000000 +-117.330000,34.530000,10.000000,3781.000000,712.000000,2044.000000,685.000000,3.094300,97100.000000 +-117.330000,34.410000,13.000000,3684.000000,604.000000,1767.000000,585.000000,3.747800,113500.000000 +-117.330000,34.170000,5.000000,4718.000000,1140.000000,2564.000000,1056.000000,2.987700,119900.000000 +-117.330000,34.170000,13.000000,3616.000000,665.000000,2189.000000,620.000000,3.794900,106300.000000 +-117.330000,34.150000,28.000000,1473.000000,333.000000,1196.000000,312.000000,1.699300,67800.000000 +-117.330000,34.140000,29.000000,1646.000000,391.000000,1296.000000,351.000000,1.942300,69700.000000 +-117.330000,34.130000,30.000000,2335.000000,363.000000,1214.000000,311.000000,2.244900,93200.000000 +-117.330000,34.130000,18.000000,3009.000000,740.000000,2317.000000,659.000000,1.637500,72400.000000 +-117.330000,34.090000,29.000000,1960.000000,415.000000,1681.000000,435.000000,2.929200,84500.000000 +-117.330000,34.080000,35.000000,2240.000000,423.000000,1394.000000,396.000000,3.179900,86700.000000 +-117.330000,34.060000,48.000000,732.000000,149.000000,486.000000,139.000000,2.567300,68200.000000 +-117.330000,34.060000,42.000000,530.000000,123.000000,390.000000,124.000000,1.046900,67000.000000 +-117.330000,34.060000,36.000000,755.000000,157.000000,625.000000,152.000000,2.024200,65000.000000 +-117.330000,34.050000,26.000000,613.000000,149.000000,431.000000,130.000000,1.397700,73100.000000 +-117.330000,34.040000,18.000000,1837.000000,388.000000,727.000000,336.000000,2.518700,116700.000000 +-117.330000,34.030000,18.000000,2342.000000,402.000000,1264.000000,382.000000,4.798600,123700.000000 +-117.330000,34.030000,14.000000,1582.000000,347.000000,825.000000,259.000000,2.828100,106300.000000 +-117.330000,33.970000,8.000000,152.000000,19.000000,1275.000000,20.000000,1.625000,162500.000000 +-117.330000,33.900000,2.000000,12837.000000,1842.000000,4636.000000,1453.000000,5.151200,187800.000000 +-117.330000,33.670000,27.000000,4376.000000,1003.000000,2667.000000,870.000000,1.919400,100600.000000 +-117.330000,33.240000,13.000000,4543.000000,881.000000,2298.000000,870.000000,2.938600,143400.000000 +-117.330000,33.230000,15.000000,2919.000000,592.000000,1130.000000,579.000000,2.587200,155600.000000 +-117.330000,33.230000,15.000000,1905.000000,416.000000,1258.000000,388.000000,3.330000,127900.000000 +-117.330000,33.220000,21.000000,2868.000000,602.000000,855.000000,559.000000,2.784600,91200.000000 +-117.330000,33.210000,17.000000,1246.000000,300.000000,424.000000,288.000000,2.288200,85800.000000 +-117.330000,33.170000,11.000000,10923.000000,2041.000000,4773.000000,1858.000000,4.079100,281300.000000 +-117.330000,33.030000,31.000000,1171.000000,321.000000,603.000000,267.000000,2.861100,314300.000000 +-117.340000,34.390000,8.000000,3579.000000,672.000000,2216.000000,630.000000,3.403800,100500.000000 +-117.340000,34.160000,31.000000,1606.000000,354.000000,1049.000000,335.000000,2.193500,72700.000000 +-117.340000,34.140000,37.000000,1834.000000,393.000000,1198.000000,348.000000,2.225000,81600.000000 +-117.340000,34.130000,29.000000,331.000000,85.000000,341.000000,107.000000,0.706900,70300.000000 +-117.340000,34.130000,29.000000,1494.000000,286.000000,991.000000,280.000000,2.125000,70600.000000 +-117.340000,34.110000,29.000000,2912.000000,566.000000,2188.000000,518.000000,3.265600,90600.000000 +-117.340000,34.100000,14.000000,11827.000000,2445.000000,6640.000000,2299.000000,2.487800,103800.000000 +-117.340000,34.080000,35.000000,1380.000000,248.000000,730.000000,264.000000,3.230500,93700.000000 +-117.340000,34.070000,46.000000,1851.000000,425.000000,1100.000000,377.000000,2.046100,90500.000000 +-117.340000,34.020000,28.000000,2683.000000,708.000000,2047.000000,636.000000,2.275000,85400.000000 +-117.340000,34.000000,27.000000,321.000000,64.000000,214.000000,67.000000,3.175000,101600.000000 +-117.340000,33.980000,10.000000,17286.000000,4952.000000,9851.000000,4616.000000,1.757900,103400.000000 +-117.340000,33.960000,15.000000,6437.000000,1298.000000,2805.000000,1205.000000,4.188300,184500.000000 +-117.340000,33.940000,20.000000,4589.000000,594.000000,1660.000000,595.000000,7.414100,236500.000000 +-117.340000,33.940000,13.000000,7910.000000,1195.000000,3382.000000,1176.000000,5.556300,214500.000000 +-117.340000,33.890000,17.000000,2678.000000,394.000000,1225.000000,367.000000,5.363000,211300.000000 +-117.340000,33.710000,10.000000,2591.000000,486.000000,1255.000000,425.000000,3.151300,154300.000000 +-117.340000,33.460000,14.000000,1902.000000,338.000000,848.000000,304.000000,5.539500,273300.000000 +-117.340000,33.230000,11.000000,3737.000000,757.000000,2212.000000,727.000000,3.106200,141000.000000 +-117.340000,33.210000,23.000000,2062.000000,376.000000,1302.000000,379.000000,4.010900,145700.000000 +-117.340000,33.210000,12.000000,5963.000000,1372.000000,3015.000000,1124.000000,2.738600,216100.000000 +-117.340000,33.190000,23.000000,3546.000000,553.000000,1533.000000,518.000000,5.276000,224500.000000 +-117.340000,33.190000,19.000000,3575.000000,525.000000,1654.000000,559.000000,5.740900,274100.000000 +-117.340000,33.160000,31.000000,2851.000000,458.000000,1286.000000,467.000000,4.569400,243700.000000 +-117.340000,33.160000,24.000000,1006.000000,277.000000,610.000000,246.000000,2.250000,187500.000000 +-117.340000,33.150000,19.000000,5710.000000,1423.000000,4163.000000,1406.000000,3.030600,178500.000000 +-117.340000,33.150000,17.000000,4505.000000,1140.000000,2111.000000,1062.000000,3.353600,283300.000000 +-117.340000,33.060000,17.000000,2718.000000,518.000000,815.000000,403.000000,4.318200,357100.000000 +-117.350000,34.500000,10.000000,2163.000000,392.000000,1174.000000,362.000000,3.375000,98000.000000 +-117.350000,34.440000,9.000000,11810.000000,2181.000000,6716.000000,2081.000000,3.182100,95600.000000 +-117.350000,34.200000,5.000000,9269.000000,1605.000000,4916.000000,1519.000000,4.436700,133200.000000 +-117.350000,34.170000,28.000000,1905.000000,372.000000,1480.000000,341.000000,2.984400,79200.000000 +-117.350000,34.160000,36.000000,1717.000000,348.000000,1054.000000,279.000000,2.444400,73400.000000 +-117.350000,34.150000,32.000000,2699.000000,552.000000,2086.000000,551.000000,2.297400,84500.000000 +-117.350000,34.120000,22.000000,5640.000000,889.000000,3157.000000,887.000000,4.158100,126500.000000 +-117.350000,34.090000,14.000000,5983.000000,1224.000000,3255.000000,1150.000000,2.590200,111500.000000 +-117.350000,34.040000,14.000000,2991.000000,522.000000,1729.000000,537.000000,3.513900,146800.000000 +-117.350000,34.010000,23.000000,3707.000000,769.000000,1938.000000,658.000000,2.725000,95300.000000 +-117.350000,34.000000,38.000000,1214.000000,254.000000,723.000000,236.000000,2.546900,87800.000000 +-117.350000,33.990000,45.000000,131.000000,28.000000,89.000000,31.000000,2.607100,112500.000000 +-117.350000,33.980000,31.000000,4163.000000,1242.000000,3928.000000,1076.000000,1.694300,85900.000000 +-117.350000,33.970000,27.000000,3960.000000,886.000000,2807.000000,838.000000,3.024000,122500.000000 +-117.350000,33.960000,25.000000,2396.000000,316.000000,951.000000,314.000000,8.240500,235200.000000 +-117.350000,33.950000,28.000000,1650.000000,210.000000,557.000000,211.000000,7.663200,204800.000000 +-117.350000,33.690000,11.000000,1229.000000,236.000000,581.000000,190.000000,3.102000,111300.000000 +-117.350000,33.680000,10.000000,516.000000,107.000000,282.000000,96.000000,4.278800,125000.000000 +-117.350000,33.640000,23.000000,6859.000000,1535.000000,3405.000000,1351.000000,2.539500,109200.000000 +-117.350000,33.230000,4.000000,1837.000000,287.000000,934.000000,277.000000,3.895800,189800.000000 +-117.350000,33.210000,24.000000,1586.000000,262.000000,912.000000,298.000000,4.250000,150300.000000 +-117.350000,33.210000,18.000000,2971.000000,606.000000,2051.000000,493.000000,2.675000,117100.000000 +-117.350000,33.200000,32.000000,1251.000000,220.000000,700.000000,232.000000,3.987500,142900.000000 +-117.350000,33.200000,23.000000,3297.000000,728.000000,1793.000000,622.000000,2.575400,169700.000000 +-117.350000,33.190000,28.000000,2823.000000,476.000000,1189.000000,433.000000,5.173300,198100.000000 +-117.350000,33.170000,36.000000,1977.000000,423.000000,812.000000,387.000000,3.625000,198000.000000 +-117.350000,33.170000,16.000000,4595.000000,1341.000000,2849.000000,1197.000000,2.478000,185600.000000 +-117.350000,33.160000,10.000000,1684.000000,515.000000,902.000000,449.000000,3.789100,206300.000000 +-117.360000,34.540000,7.000000,3940.000000,764.000000,2140.000000,711.000000,3.035700,91300.000000 +-117.360000,34.480000,3.000000,16533.000000,2549.000000,7588.000000,2285.000000,3.979200,122100.000000 +-117.360000,34.280000,18.000000,3903.000000,715.000000,1388.000000,428.000000,4.238600,157200.000000 +-117.360000,34.110000,35.000000,2969.000000,521.000000,1555.000000,503.000000,3.250000,107100.000000 +-117.360000,34.100000,33.000000,1904.000000,343.000000,1366.000000,338.000000,3.622700,92800.000000 +-117.360000,34.100000,31.000000,2587.000000,531.000000,1227.000000,489.000000,2.357800,88600.000000 +-117.360000,34.080000,4.000000,8866.000000,1832.000000,4775.000000,1554.000000,3.734800,125800.000000 +-117.360000,34.000000,19.000000,4592.000000,895.000000,2769.000000,838.000000,3.362200,105100.000000 +-117.360000,33.990000,42.000000,1178.000000,261.000000,804.000000,283.000000,2.968800,92900.000000 +-117.360000,33.980000,46.000000,1680.000000,453.000000,1570.000000,435.000000,2.043600,82300.000000 +-117.360000,33.980000,33.000000,2070.000000,469.000000,1851.000000,467.000000,2.466700,80700.000000 +-117.360000,33.970000,32.000000,1625.000000,335.000000,1212.000000,327.000000,2.759600,82200.000000 +-117.360000,33.880000,15.000000,2857.000000,421.000000,1361.000000,382.000000,4.687500,189800.000000 +-117.360000,33.880000,10.000000,5600.000000,848.000000,2573.000000,788.000000,5.034600,240500.000000 +-117.360000,33.600000,10.000000,4097.000000,813.000000,2082.000000,731.000000,3.225800,159300.000000 +-117.360000,33.200000,19.000000,2129.000000,562.000000,1323.000000,525.000000,2.953900,169900.000000 +-117.360000,33.200000,19.000000,1926.000000,557.000000,1190.000000,483.000000,1.326900,166100.000000 +-117.360000,33.180000,39.000000,1546.000000,291.000000,833.000000,308.000000,2.889300,185400.000000 +-117.360000,33.180000,26.000000,5550.000000,1153.000000,2372.000000,1058.000000,2.550900,181800.000000 +-117.360000,33.170000,24.000000,2046.000000,442.000000,812.000000,367.000000,2.318200,500001.000000 +-117.370000,34.590000,39.000000,8193.000000,1747.000000,6852.000000,1597.000000,2.383200,35000.000000 +-117.370000,34.130000,18.000000,5877.000000,1043.000000,3114.000000,1002.000000,4.029400,133200.000000 +-117.370000,34.130000,17.000000,2681.000000,470.000000,1621.000000,459.000000,3.875000,118500.000000 +-117.370000,34.130000,12.000000,1893.000000,493.000000,1054.000000,389.000000,2.345600,140800.000000 +-117.370000,34.120000,32.000000,3190.000000,568.000000,1614.000000,512.000000,3.839800,118200.000000 +-117.370000,34.100000,44.000000,2087.000000,447.000000,1270.000000,423.000000,2.388900,86100.000000 +-117.370000,34.100000,10.000000,3404.000000,855.000000,1656.000000,675.000000,1.697700,91300.000000 +-117.370000,34.080000,17.000000,2029.000000,404.000000,1190.000000,437.000000,2.955400,115000.000000 +-117.370000,34.070000,52.000000,50.000000,9.000000,60.000000,16.000000,4.125000,262500.000000 +-117.370000,34.010000,15.000000,1386.000000,247.000000,703.000000,185.000000,3.641500,124200.000000 +-117.370000,34.000000,41.000000,1248.000000,278.000000,770.000000,250.000000,3.025000,90600.000000 +-117.370000,34.000000,36.000000,730.000000,155.000000,476.000000,142.000000,2.430600,88900.000000 +-117.370000,33.990000,44.000000,917.000000,224.000000,666.000000,220.000000,1.685000,114200.000000 +-117.370000,33.980000,52.000000,201.000000,44.000000,130.000000,24.000000,2.025000,125000.000000 +-117.370000,33.980000,27.000000,1342.000000,547.000000,844.000000,484.000000,1.119400,95800.000000 +-117.370000,33.970000,40.000000,1166.000000,250.000000,976.000000,244.000000,1.950000,84800.000000 +-117.370000,33.970000,34.000000,3676.000000,697.000000,2653.000000,682.000000,2.580400,92400.000000 +-117.370000,33.960000,33.000000,3974.000000,548.000000,1398.000000,528.000000,7.251900,216600.000000 +-117.370000,33.950000,32.000000,2215.000000,351.000000,771.000000,311.000000,4.354200,142600.000000 +-117.370000,33.940000,20.000000,1682.000000,296.000000,706.000000,291.000000,4.096600,140100.000000 +-117.370000,33.940000,14.000000,9286.000000,1269.000000,3565.000000,1238.000000,6.663500,219600.000000 +-117.370000,33.700000,8.000000,4345.000000,865.000000,2425.000000,785.000000,3.248100,123800.000000 +-117.370000,33.220000,35.000000,2204.000000,482.000000,1435.000000,462.000000,3.676000,125600.000000 +-117.370000,33.200000,29.000000,1315.000000,311.000000,1425.000000,306.000000,2.027200,99600.000000 +-117.370000,33.200000,19.000000,928.000000,317.000000,845.000000,319.000000,1.631800,187500.000000 +-117.370000,33.190000,33.000000,2205.000000,453.000000,1138.000000,439.000000,2.881900,208600.000000 +-117.370000,33.190000,23.000000,4104.000000,1274.000000,4729.000000,1187.000000,1.821400,173800.000000 +-117.370000,33.190000,18.000000,975.000000,382.000000,650.000000,286.000000,1.956200,192500.000000 +-117.370000,33.180000,19.000000,1931.000000,509.000000,855.000000,394.000000,2.697900,266700.000000 +-117.380000,34.440000,4.000000,5083.000000,867.000000,2541.000000,856.000000,4.241400,121400.000000 +-117.380000,34.220000,16.000000,774.000000,122.000000,489.000000,136.000000,5.762800,221300.000000 +-117.380000,34.200000,16.000000,193.000000,45.000000,312.000000,76.000000,3.757800,137500.000000 +-117.380000,34.140000,11.000000,10804.000000,1493.000000,5221.000000,1482.000000,5.246000,161400.000000 +-117.380000,34.130000,23.000000,1326.000000,300.000000,722.000000,263.000000,2.185600,107800.000000 +-117.380000,34.130000,13.000000,2903.000000,510.000000,1844.000000,510.000000,3.719800,112900.000000 +-117.380000,34.120000,17.000000,5959.000000,1208.000000,4115.000000,1088.000000,2.405300,105200.000000 +-117.380000,34.110000,32.000000,3179.000000,662.000000,1878.000000,661.000000,3.137500,101200.000000 +-117.380000,34.090000,8.000000,3955.000000,815.000000,2184.000000,725.000000,3.343800,127600.000000 +-117.380000,34.080000,11.000000,5684.000000,1139.000000,3095.000000,1036.000000,3.687500,112600.000000 +-117.380000,34.070000,6.000000,1156.000000,191.000000,910.000000,234.000000,4.909100,122400.000000 +-117.380000,34.060000,17.000000,3139.000000,569.000000,1612.000000,516.000000,3.357100,112300.000000 +-117.380000,34.000000,45.000000,2881.000000,514.000000,1470.000000,515.000000,3.368700,123800.000000 +-117.380000,33.990000,52.000000,1797.000000,332.000000,905.000000,313.000000,2.705400,141700.000000 +-117.380000,33.980000,52.000000,2274.000000,571.000000,1167.000000,504.000000,2.028400,101600.000000 +-117.380000,33.970000,30.000000,2953.000000,703.000000,1406.000000,580.000000,2.689500,150000.000000 +-117.380000,33.970000,29.000000,1157.000000,297.000000,2027.000000,253.000000,1.638900,155000.000000 +-117.380000,33.960000,30.000000,3153.000000,623.000000,1544.000000,575.000000,3.449100,133800.000000 +-117.380000,33.940000,21.000000,2468.000000,380.000000,1164.000000,385.000000,4.062500,136800.000000 +-117.380000,33.890000,12.000000,3964.000000,524.000000,1707.000000,549.000000,5.162400,267900.000000 +-117.380000,33.670000,17.000000,10145.000000,2306.000000,4776.000000,1749.000000,2.242300,132600.000000 +-117.380000,33.200000,26.000000,1427.000000,386.000000,974.000000,317.000000,1.390300,184400.000000 +-117.380000,33.200000,17.000000,1877.000000,581.000000,1288.000000,426.000000,1.938600,106300.000000 +-117.380000,33.190000,35.000000,928.000000,264.000000,538.000000,248.000000,2.458300,197900.000000 +-117.380000,33.190000,26.000000,4123.000000,1145.000000,1703.000000,895.000000,1.989100,500000.000000 +-117.380000,33.140000,14.000000,5039.000000,1373.000000,1298.000000,696.000000,3.209000,313300.000000 +-117.390000,34.380000,4.000000,7151.000000,1295.000000,3527.000000,1170.000000,3.569600,129700.000000 +-117.390000,34.130000,9.000000,2228.000000,398.000000,1316.000000,370.000000,4.163200,119800.000000 +-117.390000,34.120000,7.000000,5059.000000,780.000000,3253.000000,801.000000,4.919600,140500.000000 +-117.390000,34.110000,16.000000,1140.000000,181.000000,627.000000,206.000000,4.944400,132700.000000 +-117.390000,34.100000,19.000000,1000.000000,211.000000,572.000000,230.000000,2.402800,112500.000000 +-117.390000,34.070000,26.000000,1387.000000,277.000000,664.000000,239.000000,3.027800,96800.000000 +-117.390000,34.040000,27.000000,2919.000000,549.000000,1841.000000,564.000000,2.868200,96400.000000 +-117.390000,33.970000,48.000000,1915.000000,348.000000,1060.000000,376.000000,3.404400,117900.000000 +-117.390000,33.960000,49.000000,2527.000000,461.000000,1344.000000,451.000000,4.083300,114400.000000 +-117.390000,33.950000,36.000000,1380.000000,269.000000,598.000000,262.000000,3.166700,122900.000000 +-117.390000,33.950000,35.000000,1599.000000,284.000000,721.000000,287.000000,4.125000,120700.000000 +-117.390000,33.930000,26.000000,3014.000000,494.000000,1832.000000,485.000000,4.833300,127900.000000 +-117.400000,34.580000,18.000000,755.000000,169.000000,483.000000,165.000000,1.419600,64700.000000 +-117.400000,34.180000,16.000000,1769.000000,254.000000,1778.000000,251.000000,5.367100,181800.000000 +-117.400000,34.150000,4.000000,12156.000000,1864.000000,5020.000000,1524.000000,4.790900,149200.000000 +-117.400000,34.110000,14.000000,1933.000000,347.000000,1443.000000,376.000000,4.212100,128100.000000 +-117.400000,34.090000,5.000000,6190.000000,993.000000,3615.000000,963.000000,4.403400,133200.000000 +-117.400000,34.080000,21.000000,3622.000000,667.000000,2503.000000,720.000000,3.853100,105400.000000 +-117.400000,34.070000,28.000000,2879.000000,659.000000,1661.000000,554.000000,2.066000,88100.000000 +-117.400000,34.060000,17.000000,5451.000000,1008.000000,3533.000000,940.000000,3.919100,101600.000000 +-117.400000,34.040000,17.000000,1906.000000,334.000000,1550.000000,338.000000,3.025000,81800.000000 +-117.400000,34.000000,31.000000,1192.000000,307.000000,1013.000000,283.000000,2.074200,76200.000000 +-117.400000,34.000000,24.000000,2316.000000,599.000000,1829.000000,532.000000,1.695500,86800.000000 +-117.400000,33.970000,41.000000,1707.000000,276.000000,660.000000,269.000000,3.861800,134800.000000 +-117.400000,33.970000,38.000000,1383.000000,238.000000,649.000000,232.000000,5.019400,148900.000000 +-117.400000,33.960000,51.000000,1806.000000,322.000000,709.000000,298.000000,3.575000,125500.000000 +-117.400000,33.950000,46.000000,2189.000000,423.000000,866.000000,389.000000,3.138400,111500.000000 +-117.400000,33.950000,43.000000,633.000000,166.000000,292.000000,135.000000,1.160100,121400.000000 +-117.400000,33.950000,32.000000,1979.000000,491.000000,954.000000,444.000000,2.440800,117300.000000 +-117.400000,33.940000,42.000000,943.000000,171.000000,466.000000,203.000000,3.145800,116000.000000 +-117.400000,33.940000,37.000000,987.000000,187.000000,551.000000,191.000000,3.586500,112000.000000 +-117.400000,33.930000,35.000000,1468.000000,298.000000,1168.000000,261.000000,2.222200,81300.000000 +-117.400000,33.850000,9.000000,7538.000000,1125.000000,3450.000000,1077.000000,5.462500,223600.000000 +-117.400000,33.760000,8.000000,1954.000000,330.000000,973.000000,321.000000,4.487500,249100.000000 +-117.410000,34.580000,14.000000,859.000000,212.000000,541.000000,181.000000,1.683800,57900.000000 +-117.410000,34.580000,10.000000,2964.000000,668.000000,1853.000000,609.000000,1.604700,73400.000000 +-117.410000,34.240000,20.000000,1160.000000,181.000000,543.000000,188.000000,5.207200,164300.000000 +-117.410000,34.110000,29.000000,3999.000000,772.000000,2602.000000,760.000000,3.548100,105500.000000 +-117.410000,34.110000,12.000000,6758.000000,1550.000000,3204.000000,1279.000000,2.518100,105500.000000 +-117.410000,34.100000,29.000000,1362.000000,251.000000,776.000000,253.000000,3.128700,102000.000000 +-117.410000,34.090000,21.000000,3300.000000,587.000000,1896.000000,572.000000,3.646600,130600.000000 +-117.410000,34.080000,38.000000,1541.000000,290.000000,861.000000,299.000000,3.565500,95600.000000 +-117.410000,34.010000,34.000000,1231.000000,216.000000,841.000000,199.000000,2.644200,92000.000000 +-117.410000,34.000000,38.000000,2228.000000,571.000000,1697.000000,530.000000,1.905200,83400.000000 +-117.410000,34.000000,26.000000,2372.000000,621.000000,1647.000000,612.000000,1.471900,88600.000000 +-117.410000,33.970000,34.000000,2316.000000,365.000000,956.000000,389.000000,4.337000,157800.000000 +-117.410000,33.970000,24.000000,950.000000,183.000000,383.000000,182.000000,3.069400,125000.000000 +-117.410000,33.960000,32.000000,2837.000000,617.000000,1393.000000,595.000000,2.379800,118800.000000 +-117.410000,33.960000,27.000000,2341.000000,418.000000,1272.000000,415.000000,3.020800,112700.000000 +-117.410000,33.950000,37.000000,1586.000000,283.000000,675.000000,305.000000,2.958300,132100.000000 +-117.410000,33.950000,37.000000,1462.000000,257.000000,849.000000,287.000000,3.054200,123900.000000 +-117.410000,33.940000,33.000000,2074.000000,476.000000,911.000000,420.000000,2.870000,117600.000000 +-117.410000,33.940000,22.000000,4179.000000,1081.000000,2096.000000,1013.000000,2.443500,118500.000000 +-117.410000,33.930000,35.000000,793.000000,150.000000,669.000000,128.000000,4.015600,89300.000000 +-117.420000,34.590000,8.000000,5445.000000,1360.000000,3220.000000,1214.000000,1.756700,69500.000000 +-117.420000,34.130000,4.000000,11587.000000,1796.000000,5804.000000,1705.000000,4.828300,141900.000000 +-117.420000,34.110000,25.000000,4261.000000,893.000000,2319.000000,702.000000,3.395800,111900.000000 +-117.420000,34.090000,28.000000,3193.000000,525.000000,1750.000000,523.000000,4.137500,128300.000000 +-117.420000,34.080000,28.000000,2300.000000,419.000000,1312.000000,444.000000,3.484400,127700.000000 +-117.420000,34.060000,27.000000,2532.000000,495.000000,1305.000000,436.000000,2.910700,143100.000000 +-117.420000,34.020000,9.000000,5455.000000,882.000000,3015.000000,858.000000,4.232100,162800.000000 +-117.420000,34.000000,32.000000,1617.000000,346.000000,1153.000000,385.000000,3.016000,96600.000000 +-117.420000,33.980000,16.000000,10072.000000,2043.000000,5913.000000,1909.000000,3.060600,119500.000000 +-117.420000,33.950000,32.000000,4251.000000,848.000000,2494.000000,798.000000,2.817300,110800.000000 +-117.420000,33.940000,35.000000,1764.000000,325.000000,1094.000000,353.000000,4.152800,113900.000000 +-117.420000,33.940000,26.000000,2420.000000,532.000000,1383.000000,469.000000,3.540300,113500.000000 +-117.420000,33.930000,32.000000,2885.000000,595.000000,1509.000000,590.000000,3.179500,125600.000000 +-117.420000,33.890000,4.000000,80.000000,10.000000,55.000000,13.000000,7.719700,193800.000000 +-117.420000,33.350000,14.000000,25135.000000,4819.000000,35682.000000,4769.000000,2.572900,134400.000000 +-117.430000,34.120000,7.000000,5954.000000,1071.000000,3567.000000,1070.000000,3.205600,134100.000000 +-117.430000,34.100000,43.000000,1898.000000,418.000000,971.000000,366.000000,2.473500,89900.000000 +-117.430000,34.100000,34.000000,1345.000000,265.000000,834.000000,290.000000,3.701100,99500.000000 +-117.430000,34.090000,18.000000,3172.000000,632.000000,1621.000000,573.000000,2.743700,120200.000000 +-117.430000,34.080000,31.000000,3207.000000,560.000000,1582.000000,538.000000,4.263000,127400.000000 +-117.430000,34.070000,18.000000,2453.000000,537.000000,1503.000000,500.000000,2.376800,95300.000000 +-117.430000,34.020000,33.000000,3084.000000,570.000000,1753.000000,449.000000,3.050000,97800.000000 +-117.430000,34.010000,34.000000,2101.000000,426.000000,1150.000000,377.000000,3.090900,98300.000000 +-117.430000,33.990000,18.000000,3307.000000,547.000000,1738.000000,457.000000,4.566000,116900.000000 +-117.430000,33.980000,21.000000,2634.000000,421.000000,1376.000000,406.000000,4.258900,152200.000000 +-117.430000,33.960000,28.000000,3747.000000,651.000000,2399.000000,646.000000,3.868200,116500.000000 +-117.430000,33.950000,36.000000,2284.000000,444.000000,1425.000000,405.000000,4.052600,104500.000000 +-117.430000,33.930000,31.000000,1273.000000,262.000000,686.000000,254.000000,2.492200,109400.000000 +-117.430000,33.930000,15.000000,4836.000000,1368.000000,3012.000000,1240.000000,2.186500,129300.000000 +-117.430000,33.910000,15.000000,14281.000000,2511.000000,7540.000000,2245.000000,4.322200,138000.000000 +-117.430000,33.810000,13.000000,4770.000000,718.000000,1985.000000,662.000000,4.227300,295200.000000 +-117.440000,34.450000,6.000000,6068.000000,1137.000000,3094.000000,947.000000,3.516700,130900.000000 +-117.440000,34.100000,43.000000,1614.000000,400.000000,926.000000,349.000000,2.075000,95100.000000 +-117.440000,34.090000,24.000000,3477.000000,831.000000,2541.000000,753.000000,2.368200,97400.000000 +-117.440000,34.090000,12.000000,3598.000000,828.000000,2588.000000,781.000000,2.375000,113800.000000 +-117.440000,34.080000,15.000000,5024.000000,992.000000,3208.000000,981.000000,3.602500,116400.000000 +-117.440000,33.990000,12.000000,9966.000000,1517.000000,5008.000000,1492.000000,4.562500,171300.000000 +-117.440000,33.960000,29.000000,124.000000,22.000000,50.000000,18.000000,12.538100,112500.000000 +-117.440000,33.940000,32.000000,2349.000000,452.000000,1479.000000,425.000000,3.911800,114100.000000 +-117.440000,33.940000,30.000000,2992.000000,516.000000,1521.000000,507.000000,3.912800,126900.000000 +-117.440000,33.930000,34.000000,1577.000000,272.000000,880.000000,284.000000,4.632700,116000.000000 +-117.440000,33.920000,33.000000,2433.000000,525.000000,1466.000000,517.000000,3.043700,110800.000000 +-117.440000,33.900000,23.000000,4487.000000,754.000000,2609.000000,778.000000,4.278800,148700.000000 +-117.440000,33.880000,5.000000,2589.000000,351.000000,1109.000000,360.000000,6.808900,334100.000000 +-117.450000,34.110000,7.000000,6356.000000,1244.000000,4052.000000,1164.000000,2.911200,121700.000000 +-117.450000,34.100000,9.000000,4288.000000,1017.000000,3156.000000,900.000000,2.782700,105800.000000 +-117.450000,34.100000,6.000000,5571.000000,1316.000000,4048.000000,1154.000000,2.030800,91100.000000 +-117.450000,34.070000,21.000000,3465.000000,639.000000,2292.000000,628.000000,3.355300,113500.000000 +-117.450000,34.010000,26.000000,3042.000000,598.000000,1720.000000,551.000000,2.760000,95200.000000 +-117.450000,33.940000,12.000000,3539.000000,869.000000,1987.000000,859.000000,2.102300,103700.000000 +-117.450000,33.930000,20.000000,5998.000000,1320.000000,3185.000000,1199.000000,3.273100,113900.000000 +-117.450000,33.920000,35.000000,2552.000000,588.000000,1840.000000,551.000000,2.254800,113300.000000 +-117.450000,33.910000,29.000000,2320.000000,422.000000,1358.000000,415.000000,3.733300,121400.000000 +-117.460000,34.850000,7.000000,9759.000000,1816.000000,2933.000000,1168.000000,3.491200,157700.000000 +-117.460000,34.140000,10.000000,714.000000,131.000000,381.000000,119.000000,0.892600,116100.000000 +-117.460000,34.100000,7.000000,1759.000000,473.000000,1064.000000,328.000000,1.958300,108800.000000 +-117.460000,34.100000,35.000000,908.000000,226.000000,667.000000,203.000000,2.583300,93500.000000 +-117.460000,34.090000,8.000000,4711.000000,963.000000,3310.000000,988.000000,3.548800,101600.000000 +-117.460000,34.070000,19.000000,3155.000000,572.000000,2482.000000,642.000000,2.997300,113400.000000 +-117.460000,34.060000,24.000000,2831.000000,478.000000,1582.000000,435.000000,4.339700,195600.000000 +-117.460000,34.040000,3.000000,12870.000000,2315.000000,5820.000000,1759.000000,4.242900,147300.000000 +-117.460000,33.950000,34.000000,1565.000000,296.000000,1142.000000,328.000000,3.697900,99600.000000 +-117.460000,33.940000,35.000000,1566.000000,294.000000,1056.000000,279.000000,3.522700,105400.000000 +-117.460000,33.940000,26.000000,2481.000000,620.000000,2411.000000,552.000000,1.705900,85800.000000 +-117.460000,33.930000,19.000000,4780.000000,861.000000,3043.000000,766.000000,3.743100,132800.000000 +-117.460000,33.930000,16.000000,4112.000000,880.000000,2821.000000,857.000000,3.012200,114700.000000 +-117.460000,33.920000,21.000000,713.000000,142.000000,476.000000,142.000000,3.520800,121100.000000 +-117.470000,34.120000,6.000000,10565.000000,1767.000000,5690.000000,1555.000000,4.179700,141000.000000 +-117.470000,34.070000,24.000000,1017.000000,227.000000,568.000000,187.000000,1.597200,112500.000000 +-117.470000,34.060000,33.000000,1379.000000,273.000000,884.000000,229.000000,2.757400,125000.000000 +-117.470000,33.980000,8.000000,12106.000000,1913.000000,5810.000000,1717.000000,4.988600,158100.000000 +-117.470000,33.950000,15.000000,6248.000000,1249.000000,3795.000000,1128.000000,4.126400,124600.000000 +-117.470000,33.940000,34.000000,2086.000000,417.000000,1501.000000,395.000000,3.231100,105600.000000 +-117.470000,33.930000,33.000000,919.000000,208.000000,724.000000,235.000000,3.402800,110500.000000 +-117.470000,33.920000,18.000000,3869.000000,773.000000,2500.000000,726.000000,3.658300,126100.000000 +-117.470000,33.910000,21.000000,3491.000000,760.000000,1920.000000,669.000000,2.224100,127300.000000 +-117.480000,34.090000,32.000000,3170.000000,630.000000,2612.000000,580.000000,3.639400,99200.000000 +-117.480000,34.090000,32.000000,1650.000000,328.000000,1124.000000,290.000000,3.183800,98600.000000 +-117.480000,34.080000,28.000000,1922.000000,382.000000,1565.000000,340.000000,3.915000,117400.000000 +-117.480000,34.010000,23.000000,2000.000000,376.000000,1361.000000,388.000000,4.369000,121100.000000 +-117.480000,34.000000,12.000000,6751.000000,1153.000000,3266.000000,1134.000000,3.852900,145500.000000 +-117.480000,33.940000,29.000000,1625.000000,336.000000,1046.000000,320.000000,3.198500,117300.000000 +-117.480000,33.940000,19.000000,1891.000000,465.000000,1693.000000,416.000000,2.781300,112900.000000 +-117.480000,33.930000,31.000000,2191.000000,459.000000,1564.000000,450.000000,2.677600,122000.000000 +-117.480000,33.910000,22.000000,3611.000000,666.000000,1869.000000,649.000000,4.220700,141100.000000 +-117.480000,33.890000,14.000000,10395.000000,1799.000000,6295.000000,1855.000000,4.729500,149900.000000 +-117.490000,34.040000,4.000000,6034.000000,1170.000000,3527.000000,1098.000000,4.177500,143700.000000 +-117.490000,34.020000,35.000000,2051.000000,427.000000,1466.000000,425.000000,3.671100,108200.000000 +-117.490000,34.020000,21.000000,3736.000000,738.000000,2021.000000,640.000000,4.454500,142400.000000 +-117.490000,33.990000,21.000000,2050.000000,392.000000,1153.000000,336.000000,4.840000,116400.000000 +-117.490000,33.980000,17.000000,2727.000000,462.000000,1691.000000,448.000000,4.837100,160600.000000 +-117.490000,33.940000,28.000000,2787.000000,490.000000,1684.000000,467.000000,4.025600,127100.000000 +-117.490000,33.930000,26.000000,2970.000000,576.000000,2156.000000,558.000000,3.952200,124600.000000 +-117.490000,33.900000,7.000000,10235.000000,2238.000000,5271.000000,2094.000000,3.607100,159100.000000 +-117.500000,34.660000,20.000000,1319.000000,309.000000,486.000000,196.000000,2.018400,84900.000000 +-117.500000,34.120000,2.000000,11965.000000,1802.000000,4436.000000,1296.000000,5.285000,191700.000000 +-117.500000,34.040000,5.000000,3958.000000,665.000000,2456.000000,666.000000,5.164700,154700.000000 +-117.500000,34.040000,4.000000,3428.000000,649.000000,2158.000000,632.000000,5.017500,143400.000000 +-117.500000,34.000000,15.000000,1929.000000,317.000000,1237.000000,316.000000,4.406300,128500.000000 +-117.500000,33.980000,21.000000,2394.000000,416.000000,1291.000000,381.000000,4.209900,138700.000000 +-117.500000,33.960000,12.000000,7923.000000,1470.000000,4861.000000,1385.000000,4.298500,139200.000000 +-117.500000,33.950000,29.000000,932.000000,153.000000,711.000000,172.000000,4.821400,143400.000000 +-117.500000,33.930000,19.000000,4741.000000,835.000000,2903.000000,796.000000,4.372300,135600.000000 +-117.500000,33.920000,31.000000,2529.000000,513.000000,1504.000000,426.000000,2.982100,115600.000000 +-117.510000,34.410000,5.000000,2884.000000,567.000000,1396.000000,465.000000,3.736100,119600.000000 +-117.510000,34.140000,21.000000,2455.000000,381.000000,1094.000000,327.000000,4.643700,191700.000000 +-117.510000,34.000000,36.000000,3791.000000,746.000000,2258.000000,672.000000,3.206700,124700.000000 +-117.510000,33.970000,35.000000,352.000000,62.000000,184.000000,57.000000,3.669100,137500.000000 +-117.510000,33.950000,12.000000,9016.000000,1486.000000,4285.000000,1457.000000,4.998400,169100.000000 +-117.510000,33.890000,16.000000,5418.000000,1005.000000,2690.000000,1088.000000,4.055600,158000.000000 +-117.510000,33.880000,24.000000,3044.000000,602.000000,2541.000000,564.000000,4.131000,123800.000000 +-117.520000,33.990000,14.000000,13562.000000,2057.000000,7600.000000,2086.000000,5.275900,182900.000000 +-117.520000,33.890000,2.000000,17978.000000,3217.000000,7305.000000,2463.000000,5.169500,220800.000000 +-117.520000,33.840000,20.000000,688.000000,146.000000,575.000000,144.000000,3.550000,111000.000000 +-117.520000,33.830000,22.000000,2397.000000,400.000000,1347.000000,403.000000,4.460000,189800.000000 +-117.520000,33.820000,14.000000,3776.000000,580.000000,1877.000000,559.000000,5.136500,215000.000000 +-117.530000,34.280000,35.000000,1529.000000,338.000000,688.000000,256.000000,4.108300,108000.000000 +-117.530000,34.100000,5.000000,2185.000000,488.000000,1379.000000,458.000000,3.791700,103000.000000 +-117.530000,34.060000,18.000000,5605.000000,1303.000000,4028.000000,1145.000000,2.938600,116400.000000 +-117.530000,34.020000,19.000000,256.000000,34.000000,101.000000,28.000000,5.326900,375000.000000 +-117.530000,33.970000,34.000000,1293.000000,215.000000,774.000000,217.000000,3.890600,141000.000000 +-117.530000,33.880000,22.000000,2855.000000,667.000000,2453.000000,624.000000,3.131200,91000.000000 +-117.530000,33.830000,7.000000,2191.000000,324.000000,1156.000000,310.000000,5.536200,195600.000000 +-117.530000,33.690000,6.000000,454.000000,102.000000,213.000000,43.000000,10.970400,483300.000000 +-117.540000,34.550000,5.000000,2949.000000,671.000000,1620.000000,530.000000,2.947900,83300.000000 +-117.540000,34.470000,4.000000,6712.000000,1200.000000,3126.000000,1026.000000,3.227700,126500.000000 +-117.540000,34.120000,4.000000,17577.000000,2819.000000,7766.000000,2473.000000,5.133300,181800.000000 +-117.540000,34.110000,16.000000,2114.000000,374.000000,1463.000000,399.000000,3.924100,131500.000000 +-117.540000,33.820000,6.000000,202.000000,29.000000,75.000000,28.000000,4.125000,216700.000000 +-117.550000,34.250000,39.000000,1578.000000,317.000000,872.000000,322.000000,4.555000,153100.000000 +-117.550000,34.140000,3.000000,5710.000000,919.000000,2874.000000,886.000000,5.363800,206300.000000 +-117.550000,34.000000,17.000000,3583.000000,700.000000,1587.000000,719.000000,2.697900,75000.000000 +-117.550000,33.950000,17.000000,3196.000000,444.000000,1581.000000,462.000000,5.933300,229400.000000 +-117.550000,33.940000,30.000000,5398.000000,926.000000,2672.000000,864.000000,4.476200,163900.000000 +-117.550000,33.930000,25.000000,5187.000000,934.000000,2725.000000,860.000000,4.186500,154300.000000 +-117.550000,33.920000,24.000000,2807.000000,501.000000,1653.000000,509.000000,4.816700,163300.000000 +-117.550000,33.900000,21.000000,1839.000000,324.000000,871.000000,307.000000,3.445900,198800.000000 +-117.550000,33.890000,25.000000,2999.000000,439.000000,1396.000000,458.000000,5.697300,164800.000000 +-117.550000,33.880000,19.000000,2472.000000,618.000000,2143.000000,610.000000,2.237200,108800.000000 +-117.550000,33.870000,18.000000,8136.000000,1584.000000,4976.000000,1516.000000,3.941400,137100.000000 +-117.550000,33.850000,4.000000,8207.000000,1373.000000,3887.000000,1304.000000,4.868600,195300.000000 +-117.550000,33.830000,6.000000,502.000000,76.000000,228.000000,65.000000,4.238600,500001.000000 +-117.550000,33.520000,15.000000,426.000000,62.000000,133.000000,45.000000,5.136000,447400.000000 +-117.560000,34.120000,4.000000,6956.000000,1271.000000,3455.000000,1228.000000,4.719300,178700.000000 +-117.560000,34.120000,4.000000,5351.000000,1210.000000,2988.000000,1101.000000,3.797300,181300.000000 +-117.560000,33.940000,6.000000,575.000000,73.000000,318.000000,88.000000,7.021500,257100.000000 +-117.560000,33.940000,29.000000,266.000000,42.000000,136.000000,40.000000,1.625000,164300.000000 +-117.560000,33.890000,16.000000,693.000000,185.000000,365.000000,176.000000,2.341700,191700.000000 +-117.560000,33.880000,40.000000,1196.000000,294.000000,1052.000000,258.000000,2.068200,113000.000000 +-117.560000,33.860000,25.000000,6964.000000,1066.000000,3240.000000,1036.000000,5.289800,177100.000000 +-117.560000,33.830000,28.000000,895.000000,127.000000,346.000000,115.000000,5.478800,339300.000000 +-117.570000,34.070000,4.000000,2152.000000,580.000000,1083.000000,441.000000,3.145800,118800.000000 +-117.570000,34.020000,5.000000,6933.000000,1311.000000,3845.000000,1285.000000,4.672700,158900.000000 +-117.570000,33.910000,22.000000,2620.000000,396.000000,1324.000000,362.000000,5.373500,214600.000000 +-117.570000,33.900000,7.000000,3797.000000,850.000000,2369.000000,720.000000,3.552500,137600.000000 +-117.570000,33.880000,39.000000,679.000000,164.000000,769.000000,179.000000,2.303600,110600.000000 +-117.570000,33.880000,35.000000,1755.000000,446.000000,1453.000000,428.000000,2.316000,119400.000000 +-117.570000,33.870000,37.000000,621.000000,156.000000,443.000000,135.000000,2.333300,122800.000000 +-117.570000,33.870000,33.000000,2076.000000,517.000000,1374.000000,480.000000,2.219700,138200.000000 +-117.570000,33.870000,27.000000,1786.000000,287.000000,939.000000,278.000000,5.192900,165000.000000 +-117.580000,34.140000,7.000000,11818.000000,1745.000000,5499.000000,1600.000000,5.367800,231700.000000 +-117.580000,34.100000,4.000000,6360.000000,1584.000000,3359.000000,1396.000000,3.518600,127800.000000 +-117.580000,34.020000,4.000000,5998.000000,1092.000000,3182.000000,1042.000000,5.269200,174800.000000 +-117.580000,34.000000,2.000000,7544.000000,1516.000000,2801.000000,1001.000000,4.003700,245200.000000 +-117.580000,33.920000,16.000000,4157.000000,586.000000,2036.000000,594.000000,6.155000,246400.000000 +-117.580000,33.890000,14.000000,1731.000000,404.000000,1269.000000,351.000000,2.365400,107900.000000 +-117.580000,33.870000,42.000000,765.000000,171.000000,590.000000,177.000000,1.687500,113500.000000 +-117.580000,33.870000,17.000000,2772.000000,449.000000,1685.000000,461.000000,5.046400,163900.000000 +-117.580000,33.850000,6.000000,16431.000000,2640.000000,8222.000000,2553.000000,5.286100,195100.000000 +-117.580000,33.660000,4.000000,3305.000000,644.000000,1693.000000,597.000000,5.249700,215000.000000 +-117.580000,33.650000,4.000000,2000.000000,422.000000,833.000000,386.000000,5.770900,190300.000000 +-117.580000,33.650000,4.000000,1606.000000,498.000000,815.000000,426.000000,3.375000,500001.000000 +-117.580000,33.600000,5.000000,5348.000000,659.000000,1862.000000,555.000000,11.056700,495400.000000 +-117.590000,34.160000,10.000000,9467.000000,1181.000000,3819.000000,1122.000000,7.825200,361400.000000 +-117.590000,34.130000,10.000000,20263.000000,3915.000000,9716.000000,3744.000000,3.850500,169600.000000 +-117.590000,34.100000,17.000000,3646.000000,1035.000000,1987.000000,895.000000,2.360300,139300.000000 +-117.590000,34.090000,16.000000,2401.000000,465.000000,1757.000000,500.000000,3.975500,120400.000000 +-117.590000,34.030000,16.000000,3443.000000,562.000000,2130.000000,564.000000,5.076900,161400.000000 +-117.590000,34.020000,14.000000,1463.000000,261.000000,881.000000,245.000000,4.785700,152500.000000 +-117.590000,33.930000,17.000000,338.000000,47.000000,200.000000,46.000000,7.811800,244200.000000 +-117.590000,33.910000,7.000000,10223.000000,1491.000000,5205.000000,1509.000000,5.487200,203400.000000 +-117.590000,33.880000,7.000000,3586.000000,959.000000,2695.000000,877.000000,2.438700,117000.000000 +-117.590000,33.670000,29.000000,1223.000000,215.000000,633.000000,204.000000,6.514300,279800.000000 +-117.590000,33.660000,4.000000,1318.000000,218.000000,673.000000,225.000000,6.072200,260800.000000 +-117.590000,33.650000,2.000000,4860.000000,1193.000000,2332.000000,1073.000000,4.502200,151900.000000 +-117.590000,33.610000,3.000000,2993.000000,429.000000,991.000000,390.000000,10.076500,378200.000000 +-117.590000,33.430000,14.000000,3223.000000,484.000000,1230.000000,488.000000,6.596400,358800.000000 +-117.590000,33.410000,17.000000,2248.000000,448.000000,878.000000,423.000000,5.129800,246000.000000 +-117.590000,33.400000,22.000000,3167.000000,743.000000,1797.000000,642.000000,4.007600,252100.000000 +-117.600000,34.080000,15.000000,2700.000000,460.000000,1432.000000,449.000000,4.906300,159800.000000 +-117.600000,34.020000,16.000000,2103.000000,348.000000,1305.000000,356.000000,5.284900,160400.000000 +-117.600000,33.980000,26.000000,1225.000000,199.000000,717.000000,204.000000,2.728400,225000.000000 +-117.600000,33.940000,26.000000,2925.000000,575.000000,1921.000000,501.000000,3.185900,153100.000000 +-117.600000,33.870000,18.000000,6450.000000,1165.000000,3716.000000,1113.000000,4.272100,150300.000000 +-117.600000,33.860000,23.000000,2949.000000,473.000000,1671.000000,477.000000,5.195000,161000.000000 +-117.600000,33.850000,9.000000,6538.000000,955.000000,2928.000000,892.000000,5.300600,221400.000000 +-117.600000,33.720000,36.000000,1317.000000,228.000000,531.000000,214.000000,5.634600,272500.000000 +-117.600000,33.650000,4.000000,3134.000000,504.000000,1599.000000,485.000000,6.246400,233900.000000 +-117.600000,33.450000,4.000000,2369.000000,566.000000,996.000000,435.000000,5.403100,243800.000000 +-117.600000,33.430000,21.000000,3951.000000,562.000000,1392.000000,543.000000,5.143900,414000.000000 +-117.600000,33.420000,23.000000,2482.000000,461.000000,1048.000000,425.000000,4.665000,280600.000000 +-117.600000,33.410000,29.000000,2193.000000,389.000000,922.000000,387.000000,4.547600,309200.000000 +-117.610000,34.140000,14.000000,15809.000000,2485.000000,7363.000000,2410.000000,5.519800,245600.000000 +-117.610000,34.130000,21.000000,8416.000000,1386.000000,4308.000000,1341.000000,4.461100,164600.000000 +-117.610000,34.100000,9.000000,18956.000000,4095.000000,10323.000000,3832.000000,3.603300,132600.000000 +-117.610000,34.090000,23.000000,1945.000000,362.000000,1483.000000,383.000000,4.420500,135500.000000 +-117.610000,34.090000,11.000000,2000.000000,391.000000,1503.000000,426.000000,4.616700,144000.000000 +-117.610000,34.080000,12.000000,4427.000000,822.000000,2400.000000,843.000000,4.714700,158700.000000 +-117.610000,34.040000,8.000000,4116.000000,766.000000,1785.000000,745.000000,3.167200,150200.000000 +-117.610000,34.020000,8.000000,63.000000,9.000000,25.000000,7.000000,7.719700,275000.000000 +-117.610000,34.020000,15.000000,1791.000000,346.000000,1219.000000,328.000000,3.812500,170300.000000 +-117.610000,34.010000,25.000000,352.000000,41.000000,99.000000,34.000000,3.969600,500000.000000 +-117.610000,33.670000,3.000000,4541.000000,720.000000,1600.000000,583.000000,6.800400,284900.000000 +-117.610000,33.630000,2.000000,4678.000000,817.000000,1970.000000,712.000000,6.107800,219000.000000 +-117.610000,33.450000,6.000000,950.000000,184.000000,426.000000,186.000000,4.723700,220700.000000 +-117.610000,33.440000,17.000000,2036.000000,272.000000,713.000000,265.000000,6.595400,346200.000000 +-117.610000,33.430000,33.000000,1150.000000,383.000000,604.000000,317.000000,2.354500,187500.000000 +-117.610000,33.430000,24.000000,2303.000000,399.000000,851.000000,379.000000,3.987500,346500.000000 +-117.610000,33.420000,31.000000,3959.000000,856.000000,1919.000000,775.000000,4.031300,282000.000000 +-117.610000,33.410000,35.000000,2556.000000,404.000000,946.000000,399.000000,6.155700,402900.000000 +-117.620000,34.440000,6.000000,8884.000000,1687.000000,3767.000000,1334.000000,3.599000,140200.000000 +-117.620000,34.150000,16.000000,13556.000000,1704.000000,5669.000000,1668.000000,6.513800,311500.000000 +-117.620000,34.130000,20.000000,3216.000000,516.000000,1655.000000,524.000000,5.126100,158800.000000 +-117.620000,34.110000,31.000000,2561.000000,414.000000,1204.000000,435.000000,4.463700,192800.000000 +-117.620000,34.090000,26.000000,3271.000000,595.000000,2259.000000,566.000000,4.013900,132000.000000 +-117.620000,34.080000,24.000000,2801.000000,554.000000,2064.000000,529.000000,4.494600,136000.000000 +-117.620000,34.070000,16.000000,6009.000000,1599.000000,5110.000000,1389.000000,2.567700,128900.000000 +-117.620000,34.070000,15.000000,4061.000000,811.000000,2884.000000,734.000000,3.393600,127000.000000 +-117.620000,34.050000,33.000000,883.000000,211.000000,1007.000000,210.000000,2.828100,103600.000000 +-117.620000,34.030000,15.000000,3942.000000,661.000000,2240.000000,621.000000,4.831100,176000.000000 +-117.620000,34.020000,9.000000,4265.000000,587.000000,2280.000000,589.000000,5.563200,195100.000000 +-117.620000,34.020000,16.000000,2040.000000,325.000000,1207.000000,324.000000,5.043100,164100.000000 +-117.620000,33.770000,43.000000,1911.000000,439.000000,930.000000,433.000000,4.636900,186400.000000 +-117.620000,33.640000,2.000000,7826.000000,893.000000,2985.000000,790.000000,10.153100,484100.000000 +-117.620000,33.470000,4.000000,1812.000000,255.000000,661.000000,211.000000,6.487000,294200.000000 +-117.620000,33.430000,27.000000,1835.000000,413.000000,1221.000000,377.000000,3.223200,247100.000000 +-117.620000,33.430000,24.000000,1296.000000,384.000000,850.000000,367.000000,2.754500,231300.000000 +-117.620000,33.430000,23.000000,4052.000000,955.000000,1950.000000,859.000000,4.064700,240600.000000 +-117.620000,33.420000,27.000000,1444.000000,412.000000,597.000000,311.000000,3.139500,310000.000000 +-117.620000,33.420000,23.000000,2656.000000,515.000000,998.000000,435.000000,4.029400,500001.000000 +-117.630000,34.370000,20.000000,7052.000000,1306.000000,2197.000000,810.000000,3.725200,167100.000000 +-117.630000,34.120000,4.000000,4323.000000,775.000000,1479.000000,663.000000,6.075800,226800.000000 +-117.630000,34.100000,15.000000,4799.000000,1209.000000,2554.000000,1057.000000,2.658200,122800.000000 +-117.630000,34.090000,19.000000,3490.000000,816.000000,2818.000000,688.000000,2.897700,126200.000000 +-117.630000,34.080000,38.000000,1810.000000,371.000000,1257.000000,354.000000,3.835500,111700.000000 +-117.630000,34.070000,39.000000,2650.000000,511.000000,1537.000000,495.000000,3.443200,106700.000000 +-117.630000,34.060000,39.000000,1210.000000,310.000000,1294.000000,303.000000,2.363600,88300.000000 +-117.630000,34.020000,13.000000,4864.000000,729.000000,2780.000000,723.000000,5.616800,175400.000000 +-117.630000,33.940000,36.000000,447.000000,95.000000,2886.000000,85.000000,4.257800,183300.000000 +-117.630000,33.650000,10.000000,3580.000000,491.000000,1688.000000,467.000000,7.781400,288700.000000 +-117.630000,33.630000,6.000000,3068.000000,549.000000,985.000000,536.000000,4.200900,238000.000000 +-117.630000,33.620000,9.000000,4257.000000,785.000000,1293.000000,745.000000,3.713900,196700.000000 +-117.630000,33.500000,12.000000,3619.000000,536.000000,1506.000000,492.000000,7.201300,353600.000000 +-117.630000,33.470000,4.000000,2320.000000,405.000000,1408.000000,477.000000,6.336900,256000.000000 +-117.630000,33.470000,4.000000,1969.000000,280.000000,805.000000,271.000000,7.601200,310800.000000 +-117.630000,33.460000,7.000000,7684.000000,1088.000000,2812.000000,1057.000000,6.340100,387300.000000 +-117.630000,33.450000,5.000000,3549.000000,604.000000,1571.000000,534.000000,5.370500,363500.000000 +-117.630000,33.380000,12.000000,5744.000000,1054.000000,2104.000000,847.000000,5.148200,500001.000000 +-117.640000,35.610000,10.000000,2656.000000,506.000000,1349.000000,501.000000,4.250000,83200.000000 +-117.640000,34.150000,16.000000,2896.000000,404.000000,1165.000000,379.000000,6.455900,392900.000000 +-117.640000,34.120000,18.000000,3605.000000,534.000000,1682.000000,480.000000,5.840700,202900.000000 +-117.640000,34.110000,16.000000,2129.000000,420.000000,932.000000,379.000000,2.586800,146900.000000 +-117.640000,34.080000,37.000000,2576.000000,468.000000,1284.000000,428.000000,3.395800,130400.000000 +-117.640000,34.080000,35.000000,1254.000000,241.000000,729.000000,253.000000,3.495000,118000.000000 +-117.640000,34.080000,33.000000,1987.000000,455.000000,1369.000000,475.000000,2.446400,122600.000000 +-117.640000,34.070000,52.000000,1644.000000,372.000000,1269.000000,355.000000,2.691300,108300.000000 +-117.640000,34.070000,43.000000,1970.000000,379.000000,1036.000000,391.000000,3.208300,122800.000000 +-117.640000,34.070000,38.000000,2450.000000,544.000000,1823.000000,536.000000,2.837000,111200.000000 +-117.640000,34.070000,30.000000,2787.000000,713.000000,2647.000000,693.000000,2.276500,98100.000000 +-117.640000,34.060000,50.000000,637.000000,143.000000,590.000000,147.000000,1.965900,85700.000000 +-117.640000,34.060000,43.000000,763.000000,219.000000,851.000000,198.000000,1.729200,79200.000000 +-117.640000,34.050000,32.000000,1129.000000,251.000000,1378.000000,268.000000,3.005700,96900.000000 +-117.640000,34.050000,27.000000,1407.000000,362.000000,1684.000000,350.000000,2.194400,95700.000000 +-117.640000,34.040000,21.000000,1801.000000,507.000000,2556.000000,484.000000,2.471600,102500.000000 +-117.640000,34.030000,11.000000,2050.000000,382.000000,1044.000000,371.000000,4.828100,137000.000000 +-117.640000,34.030000,10.000000,3194.000000,579.000000,2088.000000,549.000000,4.177900,159100.000000 +-117.640000,34.020000,6.000000,248.000000,47.000000,119.000000,42.000000,2.125000,416700.000000 +-117.640000,34.020000,10.000000,4887.000000,930.000000,2637.000000,831.000000,4.061100,158000.000000 +-117.640000,33.990000,29.000000,1005.000000,152.000000,513.000000,149.000000,2.437500,181300.000000 +-117.640000,33.880000,13.000000,8010.000000,1366.000000,3920.000000,1309.000000,5.536000,204800.000000 +-117.640000,33.680000,4.000000,5687.000000,970.000000,2677.000000,938.000000,6.506900,243400.000000 +-117.640000,33.640000,11.000000,2422.000000,429.000000,810.000000,395.000000,6.193500,293200.000000 +-117.640000,33.630000,10.000000,4814.000000,643.000000,1808.000000,588.000000,8.798000,436600.000000 +-117.640000,33.620000,16.000000,3970.000000,771.000000,1202.000000,734.000000,3.411500,184800.000000 +-117.640000,33.610000,14.000000,5232.000000,810.000000,3041.000000,839.000000,5.826000,247900.000000 +-117.640000,33.590000,4.000000,3274.000000,383.000000,1312.000000,390.000000,8.161100,348000.000000 +-117.640000,33.510000,15.000000,1743.000000,254.000000,943.000000,274.000000,5.933900,286000.000000 +-117.640000,33.510000,14.000000,1343.000000,175.000000,650.000000,184.000000,7.264800,363200.000000 +-117.640000,33.490000,3.000000,2516.000000,429.000000,781.000000,337.000000,5.619700,271600.000000 +-117.640000,33.480000,12.000000,2007.000000,397.000000,1033.000000,373.000000,5.675400,275900.000000 +-117.640000,33.450000,27.000000,334.000000,56.000000,130.000000,46.000000,4.875000,284100.000000 +-117.650000,34.140000,9.000000,3877.000000,490.000000,1815.000000,490.000000,8.483900,406700.000000 +-117.650000,34.140000,16.000000,2196.000000,287.000000,949.000000,289.000000,8.657300,354000.000000 +-117.650000,34.130000,24.000000,2121.000000,296.000000,913.000000,302.000000,5.932800,255900.000000 +-117.650000,34.120000,27.000000,2298.000000,340.000000,1071.000000,369.000000,6.558700,239000.000000 +-117.650000,34.120000,17.000000,3006.000000,427.000000,1291.000000,406.000000,6.208300,242700.000000 +-117.650000,34.110000,29.000000,2927.000000,634.000000,1710.000000,623.000000,3.621900,176600.000000 +-117.650000,34.110000,28.000000,2788.000000,370.000000,1140.000000,385.000000,5.336800,233500.000000 +-117.650000,34.100000,44.000000,1526.000000,337.000000,831.000000,326.000000,3.028400,115800.000000 +-117.650000,34.100000,30.000000,1461.000000,341.000000,1014.000000,345.000000,2.466700,106000.000000 +-117.650000,34.100000,19.000000,1688.000000,365.000000,622.000000,322.000000,3.600000,136400.000000 +-117.650000,34.090000,46.000000,1214.000000,281.000000,701.000000,294.000000,2.708300,116300.000000 +-117.650000,34.090000,33.000000,2446.000000,396.000000,1209.000000,412.000000,4.395800,145600.000000 +-117.650000,34.080000,52.000000,2264.000000,439.000000,1031.000000,437.000000,3.375000,144300.000000 +-117.650000,34.080000,40.000000,1609.000000,258.000000,624.000000,242.000000,5.468900,158200.000000 +-117.650000,34.080000,38.000000,2750.000000,572.000000,1410.000000,483.000000,3.383600,144900.000000 +-117.650000,34.080000,35.000000,2621.000000,391.000000,1074.000000,391.000000,4.717600,166400.000000 +-117.650000,34.070000,52.000000,1041.000000,252.000000,558.000000,231.000000,1.923600,117200.000000 +-117.650000,34.070000,35.000000,2501.000000,651.000000,1182.000000,591.000000,1.446400,113200.000000 +-117.650000,34.060000,41.000000,465.000000,130.000000,349.000000,138.000000,2.089300,112500.000000 +-117.650000,34.060000,41.000000,1171.000000,334.000000,1479.000000,334.000000,2.250000,90500.000000 +-117.650000,34.040000,28.000000,2360.000000,607.000000,2623.000000,592.000000,2.504800,100000.000000 +-117.650000,34.040000,15.000000,3393.000000,653.000000,2039.000000,611.000000,3.933600,151000.000000 +-117.650000,34.030000,15.000000,4420.000000,903.000000,2373.000000,858.000000,3.449000,149100.000000 +-117.650000,34.020000,9.000000,2107.000000,411.000000,1138.000000,389.000000,4.404200,159100.000000 +-117.650000,33.650000,16.000000,1538.000000,260.000000,835.000000,259.000000,5.577900,234800.000000 +-117.650000,33.650000,15.000000,4713.000000,671.000000,2197.000000,673.000000,7.542600,294800.000000 +-117.650000,33.650000,15.000000,3485.000000,519.000000,1740.000000,485.000000,6.754300,251900.000000 +-117.650000,33.630000,16.000000,3388.000000,425.000000,1395.000000,427.000000,8.447100,351300.000000 +-117.650000,33.620000,15.000000,2708.000000,410.000000,1140.000000,389.000000,6.289900,275000.000000 +-117.650000,33.600000,15.000000,5736.000000,800.000000,2529.000000,762.000000,6.411400,278700.000000 +-117.650000,33.600000,13.000000,2319.000000,430.000000,1004.000000,380.000000,5.133000,316100.000000 +-117.650000,33.590000,8.000000,2649.000000,340.000000,1238.000000,354.000000,8.040900,337900.000000 +-117.650000,33.580000,2.000000,2411.000000,354.000000,703.000000,217.000000,7.806100,331400.000000 +-117.650000,33.530000,7.000000,6814.000000,785.000000,2175.000000,681.000000,10.490000,500001.000000 +-117.650000,33.490000,16.000000,2223.000000,454.000000,628.000000,382.000000,4.360300,248800.000000 +-117.650000,33.480000,6.000000,1638.000000,188.000000,572.000000,174.000000,13.050200,500001.000000 +-117.650000,33.480000,10.000000,3484.000000,582.000000,1469.000000,556.000000,5.418800,402200.000000 +-117.650000,33.450000,15.000000,7468.000000,1275.000000,3033.000000,1217.000000,5.490000,239300.000000 +-117.650000,33.420000,25.000000,2174.000000,428.000000,603.000000,352.000000,3.396700,249400.000000 +-117.650000,33.400000,17.000000,2737.000000,654.000000,910.000000,492.000000,3.572900,370800.000000 +-117.660000,35.630000,33.000000,2579.000000,564.000000,1155.000000,431.000000,2.044100,42100.000000 +-117.660000,35.620000,11.000000,5897.000000,1138.000000,2728.000000,1072.000000,4.150000,85700.000000 +-117.660000,35.610000,5.000000,5735.000000,932.000000,2623.000000,862.000000,4.849400,87200.000000 +-117.660000,35.600000,14.000000,1740.000000,391.000000,850.000000,317.000000,2.581200,91700.000000 +-117.660000,34.150000,25.000000,3430.000000,485.000000,1284.000000,438.000000,8.528200,360100.000000 +-117.660000,34.150000,20.000000,2524.000000,311.000000,965.000000,285.000000,8.010300,395500.000000 +-117.660000,34.140000,8.000000,1692.000000,253.000000,857.000000,251.000000,6.941800,310500.000000 +-117.660000,34.140000,11.000000,3628.000000,469.000000,1488.000000,463.000000,7.084400,325000.000000 +-117.660000,34.130000,19.000000,3995.000000,554.000000,1523.000000,509.000000,6.075000,254100.000000 +-117.660000,34.130000,17.000000,3229.000000,405.000000,1289.000000,407.000000,6.384200,307100.000000 +-117.660000,34.120000,22.000000,2272.000000,278.000000,933.000000,305.000000,8.820400,390500.000000 +-117.660000,34.100000,37.000000,1971.000000,345.000000,939.000000,358.000000,3.463400,145300.000000 +-117.660000,34.100000,26.000000,1855.000000,553.000000,1109.000000,536.000000,2.242900,150000.000000 +-117.660000,34.090000,26.000000,1151.000000,200.000000,593.000000,188.000000,3.666700,166300.000000 +-117.660000,34.090000,23.000000,1426.000000,313.000000,954.000000,319.000000,3.035700,151500.000000 +-117.660000,34.090000,20.000000,2462.000000,496.000000,1117.000000,458.000000,3.232100,162500.000000 +-117.660000,34.080000,33.000000,3659.000000,590.000000,1773.000000,615.000000,3.922700,157200.000000 +-117.660000,34.070000,37.000000,2454.000000,511.000000,1165.000000,504.000000,2.947400,139600.000000 +-117.660000,34.070000,36.000000,2072.000000,408.000000,964.000000,395.000000,2.870200,137000.000000 +-117.660000,34.070000,33.000000,2081.000000,409.000000,1008.000000,375.000000,2.587000,138100.000000 +-117.660000,34.060000,24.000000,4043.000000,952.000000,2174.000000,859.000000,2.224400,114900.000000 +-117.660000,34.050000,6.000000,5129.000000,1119.000000,2533.000000,949.000000,3.625000,113600.000000 +-117.660000,34.050000,36.000000,2341.000000,520.000000,2138.000000,523.000000,2.334700,104000.000000 +-117.660000,34.050000,33.000000,960.000000,216.000000,831.000000,222.000000,2.539100,108600.000000 +-117.660000,34.040000,16.000000,2081.000000,348.000000,1332.000000,356.000000,4.787200,147600.000000 +-117.660000,34.040000,10.000000,3657.000000,695.000000,2079.000000,663.000000,4.205400,159900.000000 +-117.660000,34.030000,14.000000,2137.000000,345.000000,1151.000000,352.000000,5.753000,185500.000000 +-117.660000,34.020000,12.000000,5616.000000,871.000000,3019.000000,782.000000,5.542500,202300.000000 +-117.660000,34.020000,11.000000,3358.000000,504.000000,1690.000000,482.000000,6.754400,207900.000000 +-117.660000,34.000000,5.000000,1387.000000,236.000000,855.000000,270.000000,5.411000,201100.000000 +-117.660000,33.650000,13.000000,8527.000000,1364.000000,4597.000000,1393.000000,6.224200,237900.000000 +-117.660000,33.640000,17.000000,3173.000000,501.000000,1555.000000,520.000000,6.707900,250800.000000 +-117.660000,33.630000,16.000000,3299.000000,610.000000,1967.000000,604.000000,5.508500,223300.000000 +-117.660000,33.620000,16.000000,5175.000000,799.000000,2717.000000,813.000000,6.149300,257800.000000 +-117.660000,33.620000,16.000000,4065.000000,661.000000,1962.000000,636.000000,6.217700,256600.000000 +-117.660000,33.610000,21.000000,1932.000000,266.000000,860.000000,286.000000,7.149700,274000.000000 +-117.660000,33.600000,25.000000,3745.000000,522.000000,1648.000000,496.000000,7.548800,278100.000000 +-117.660000,33.600000,24.000000,1684.000000,232.000000,781.000000,230.000000,6.866700,279600.000000 +-117.660000,33.590000,18.000000,4552.000000,706.000000,1918.000000,671.000000,7.579100,288100.000000 +-117.660000,33.580000,6.000000,4186.000000,515.000000,1794.000000,541.000000,9.698600,357600.000000 +-117.660000,33.580000,16.000000,3016.000000,394.000000,1172.000000,382.000000,7.519600,315600.000000 +-117.660000,33.570000,16.000000,4277.000000,565.000000,1642.000000,549.000000,8.008200,286600.000000 +-117.660000,33.570000,16.000000,2483.000000,443.000000,1357.000000,400.000000,5.554500,214200.000000 +-117.660000,33.510000,18.000000,2626.000000,530.000000,1302.000000,522.000000,4.016700,189600.000000 +-117.660000,33.480000,22.000000,809.000000,180.000000,334.000000,157.000000,2.384600,500001.000000 +-117.660000,33.460000,28.000000,1261.000000,233.000000,609.000000,242.000000,5.102400,312700.000000 +-117.660000,33.460000,26.000000,2073.000000,370.000000,952.000000,340.000000,5.087700,288100.000000 +-117.670000,35.650000,18.000000,2737.000000,589.000000,1128.000000,533.000000,2.800000,72000.000000 +-117.670000,35.640000,6.000000,2115.000000,342.000000,927.000000,337.000000,6.193500,115700.000000 +-117.670000,35.630000,32.000000,3661.000000,787.000000,1613.000000,706.000000,3.068700,63500.000000 +-117.670000,34.130000,10.000000,2846.000000,362.000000,1221.000000,355.000000,7.723400,304100.000000 +-117.670000,34.120000,15.000000,3162.000000,495.000000,1145.000000,473.000000,5.352500,191700.000000 +-117.670000,34.100000,28.000000,1263.000000,248.000000,601.000000,219.000000,3.875000,174000.000000 +-117.670000,34.100000,19.000000,2969.000000,605.000000,1326.000000,573.000000,4.343800,155700.000000 +-117.670000,34.090000,17.000000,4418.000000,1256.000000,2417.000000,1094.000000,2.726600,101000.000000 +-117.670000,34.070000,29.000000,1840.000000,388.000000,1278.000000,368.000000,3.503600,123400.000000 +-117.670000,34.060000,26.000000,1592.000000,429.000000,1182.000000,365.000000,2.458300,110400.000000 +-117.670000,34.050000,6.000000,2833.000000,628.000000,1717.000000,589.000000,3.206200,167500.000000 +-117.670000,34.040000,16.000000,3260.000000,501.000000,1973.000000,535.000000,4.656300,162000.000000 +-117.670000,34.040000,13.000000,2295.000000,374.000000,1284.000000,378.000000,5.255100,194300.000000 +-117.670000,34.040000,13.000000,1543.000000,383.000000,776.000000,358.000000,3.059800,99700.000000 +-117.670000,33.670000,5.000000,10534.000000,2035.000000,4656.000000,1863.000000,5.779700,309200.000000 +-117.670000,33.660000,4.000000,10175.000000,2181.000000,4762.000000,1929.000000,4.734100,237400.000000 +-117.670000,33.630000,9.000000,5774.000000,1320.000000,3086.000000,1265.000000,4.406300,202200.000000 +-117.670000,33.610000,24.000000,3859.000000,661.000000,1972.000000,624.000000,5.787100,227400.000000 +-117.670000,33.610000,23.000000,3588.000000,577.000000,1695.000000,569.000000,6.140100,243200.000000 +-117.670000,33.600000,25.000000,3164.000000,449.000000,1517.000000,453.000000,6.792100,266000.000000 +-117.670000,33.600000,20.000000,1213.000000,171.000000,565.000000,170.000000,7.259200,314800.000000 +-117.670000,33.570000,18.000000,1614.000000,210.000000,692.000000,209.000000,7.929400,280300.000000 +-117.670000,33.560000,4.000000,3289.000000,728.000000,1345.000000,632.000000,4.686300,184400.000000 +-117.670000,33.550000,6.000000,3157.000000,721.000000,1695.000000,710.000000,3.760900,222300.000000 +-117.670000,33.540000,16.000000,2102.000000,350.000000,1003.000000,328.000000,4.798100,170800.000000 +-117.670000,33.540000,15.000000,2423.000000,435.000000,1366.000000,423.000000,4.890600,181800.000000 +-117.670000,33.510000,19.000000,1258.000000,246.000000,545.000000,227.000000,2.976200,184400.000000 +-117.670000,33.510000,18.000000,1645.000000,393.000000,1490.000000,355.000000,3.479200,126400.000000 +-117.670000,33.490000,15.000000,2782.000000,579.000000,983.000000,525.000000,2.193500,183300.000000 +-117.670000,33.490000,10.000000,366.000000,61.000000,128.000000,61.000000,8.163000,250000.000000 +-117.670000,33.470000,22.000000,2728.000000,616.000000,1081.000000,566.000000,1.639300,500001.000000 +-117.670000,33.460000,24.000000,3571.000000,722.000000,1409.000000,543.000000,4.651800,277800.000000 +-117.670000,33.460000,18.000000,1679.000000,271.000000,783.000000,257.000000,5.399900,300000.000000 +-117.670000,33.440000,25.000000,2994.000000,519.000000,903.000000,410.000000,6.685200,500001.000000 +-117.680000,35.640000,15.000000,3253.000000,573.000000,1408.000000,586.000000,5.204300,95700.000000 +-117.680000,35.550000,9.000000,3811.000000,605.000000,1518.000000,568.000000,5.555100,142500.000000 +-117.680000,35.030000,28.000000,2969.000000,648.000000,1644.000000,570.000000,3.433800,54900.000000 +-117.680000,34.990000,33.000000,1589.000000,307.000000,853.000000,272.000000,4.229200,64400.000000 +-117.680000,34.150000,4.000000,4082.000000,578.000000,1996.000000,580.000000,6.781300,286300.000000 +-117.680000,34.150000,24.000000,1033.000000,189.000000,486.000000,204.000000,4.171900,213500.000000 +-117.680000,34.140000,4.000000,4791.000000,695.000000,1871.000000,659.000000,6.953200,277000.000000 +-117.680000,34.120000,16.000000,2181.000000,321.000000,1133.000000,350.000000,5.721400,259400.000000 +-117.680000,34.110000,16.000000,3190.000000,471.000000,1414.000000,464.000000,5.529200,208600.000000 +-117.680000,34.100000,11.000000,7392.000000,1796.000000,3841.000000,1621.000000,2.832600,163000.000000 +-117.680000,34.090000,22.000000,1547.000000,334.000000,773.000000,316.000000,2.981200,148800.000000 +-117.680000,34.080000,28.000000,2459.000000,492.000000,1230.000000,498.000000,3.097800,137200.000000 +-117.680000,34.080000,21.000000,5662.000000,1185.000000,3067.000000,1055.000000,3.348200,137300.000000 +-117.680000,34.070000,32.000000,1775.000000,314.000000,1067.000000,302.000000,4.037500,121300.000000 +-117.680000,34.070000,24.000000,2626.000000,692.000000,2204.000000,647.000000,1.780600,135000.000000 +-117.680000,34.050000,25.000000,1859.000000,463.000000,1070.000000,374.000000,2.539500,187500.000000 +-117.680000,34.040000,27.000000,574.000000,103.000000,321.000000,103.000000,3.910700,186500.000000 +-117.680000,34.030000,16.000000,2859.000000,668.000000,1946.000000,591.000000,3.039600,124300.000000 +-117.680000,34.010000,20.000000,7326.000000,1555.000000,5718.000000,1538.000000,3.207300,123500.000000 +-117.680000,33.630000,16.000000,5218.000000,1187.000000,2701.000000,1125.000000,3.929000,143100.000000 +-117.680000,33.630000,13.000000,5830.000000,921.000000,2897.000000,891.000000,6.240300,257400.000000 +-117.680000,33.610000,19.000000,2962.000000,405.000000,1295.000000,440.000000,6.068900,248000.000000 +-117.680000,33.600000,24.000000,1956.000000,262.000000,969.000000,256.000000,6.815400,265900.000000 +-117.680000,33.600000,19.000000,3913.000000,460.000000,1646.000000,454.000000,7.214700,303900.000000 +-117.680000,33.590000,8.000000,2327.000000,263.000000,899.000000,236.000000,14.900900,500001.000000 +-117.680000,33.590000,12.000000,3473.000000,466.000000,1569.000000,450.000000,8.863600,314000.000000 +-117.680000,33.570000,2.000000,10008.000000,1453.000000,3550.000000,1139.000000,10.112200,500001.000000 +-117.680000,33.550000,5.000000,2262.000000,427.000000,1016.000000,402.000000,6.065000,315500.000000 +-117.680000,33.540000,5.000000,2840.000000,403.000000,1363.000000,403.000000,7.618000,341400.000000 +-117.680000,33.520000,5.000000,3621.000000,632.000000,1546.000000,567.000000,5.753000,322800.000000 +-117.680000,33.510000,4.000000,2428.000000,401.000000,959.000000,386.000000,6.266100,268500.000000 +-117.680000,33.510000,19.000000,2930.000000,428.000000,1481.000000,430.000000,6.323000,480800.000000 +-117.680000,33.490000,18.000000,4173.000000,625.000000,1649.000000,634.000000,6.356800,294300.000000 +-117.680000,33.490000,17.000000,2232.000000,372.000000,1072.000000,385.000000,4.245000,214500.000000 +-117.680000,33.490000,16.000000,3084.000000,724.000000,2557.000000,690.000000,2.835700,106300.000000 +-117.680000,33.480000,15.000000,1786.000000,299.000000,727.000000,293.000000,5.052700,231400.000000 +-117.680000,33.470000,7.000000,4458.000000,731.000000,1731.000000,704.000000,6.126000,285600.000000 +-117.690000,36.130000,25.000000,1709.000000,439.000000,632.000000,292.000000,1.786800,45500.000000 +-117.690000,35.650000,5.000000,1131.000000,276.000000,520.000000,232.000000,4.016700,87500.000000 +-117.690000,35.630000,5.000000,3151.000000,482.000000,1335.000000,428.000000,5.577300,109000.000000 +-117.690000,34.130000,8.000000,2915.000000,371.000000,1271.000000,354.000000,7.962700,345400.000000 +-117.690000,34.110000,16.000000,2427.000000,522.000000,794.000000,491.000000,2.692900,119300.000000 +-117.690000,34.100000,17.000000,3759.000000,1035.000000,1722.000000,847.000000,2.607400,137500.000000 +-117.690000,34.090000,28.000000,1437.000000,295.000000,724.000000,262.000000,2.725000,140200.000000 +-117.690000,34.080000,30.000000,4255.000000,773.000000,2129.000000,730.000000,4.518500,142500.000000 +-117.690000,34.080000,14.000000,4136.000000,886.000000,2026.000000,788.000000,3.234400,128200.000000 +-117.690000,34.060000,29.000000,873.000000,226.000000,649.000000,198.000000,2.798600,114400.000000 +-117.690000,34.060000,25.000000,1881.000000,433.000000,1337.000000,417.000000,2.553600,144000.000000 +-117.690000,34.050000,10.000000,1875.000000,366.000000,1055.000000,363.000000,4.326400,128900.000000 +-117.690000,34.000000,28.000000,707.000000,154.000000,561.000000,129.000000,2.578100,111600.000000 +-117.690000,33.800000,5.000000,3178.000000,631.000000,1467.000000,581.000000,5.254100,237100.000000 +-117.690000,33.660000,5.000000,4246.000000,689.000000,1933.000000,722.000000,6.950100,225700.000000 +-117.690000,33.660000,11.000000,2630.000000,327.000000,1256.000000,352.000000,8.295300,350500.000000 +-117.690000,33.640000,18.000000,3783.000000,654.000000,1843.000000,623.000000,5.755900,215800.000000 +-117.690000,33.640000,16.000000,2592.000000,372.000000,1279.000000,383.000000,6.974100,262000.000000 +-117.690000,33.620000,18.000000,4265.000000,581.000000,2025.000000,544.000000,6.459800,282700.000000 +-117.690000,33.610000,16.000000,3010.000000,580.000000,1649.000000,538.000000,4.022100,236200.000000 +-117.690000,33.600000,17.000000,2150.000000,361.000000,1194.000000,335.000000,5.462200,227000.000000 +-117.690000,33.600000,16.000000,2205.000000,393.000000,1333.000000,402.000000,3.475000,279500.000000 +-117.690000,33.600000,12.000000,3258.000000,421.000000,1464.000000,435.000000,6.541300,332000.000000 +-117.690000,33.590000,13.000000,3320.000000,426.000000,1432.000000,431.000000,7.928300,348100.000000 +-117.690000,33.580000,5.000000,6678.000000,1011.000000,2877.000000,982.000000,7.517700,330000.000000 +-117.690000,33.550000,9.000000,3856.000000,571.000000,1646.000000,576.000000,6.800700,318300.000000 +-117.690000,33.550000,4.000000,1764.000000,220.000000,705.000000,224.000000,8.327500,384200.000000 +-117.690000,33.540000,20.000000,1767.000000,280.000000,801.000000,284.000000,6.539400,272000.000000 +-117.690000,33.530000,17.000000,5041.000000,778.000000,2396.000000,801.000000,6.086800,282900.000000 +-117.690000,33.520000,4.000000,2142.000000,625.000000,1176.000000,483.000000,3.445500,325000.000000 +-117.690000,33.520000,3.000000,7374.000000,1444.000000,3214.000000,1279.000000,4.538000,278200.000000 +-117.690000,33.510000,4.000000,1223.000000,275.000000,505.000000,244.000000,4.660700,173000.000000 +-117.690000,33.470000,23.000000,3499.000000,722.000000,1480.000000,634.000000,3.860000,300000.000000 +-117.690000,33.470000,19.000000,2595.000000,621.000000,1728.000000,571.000000,3.668000,243800.000000 +-117.690000,33.470000,13.000000,2020.000000,378.000000,679.000000,290.000000,5.756000,305600.000000 +-117.700000,35.640000,8.000000,2683.000000,416.000000,1154.000000,399.000000,5.862500,109400.000000 +-117.700000,35.620000,18.000000,2657.000000,496.000000,1426.000000,483.000000,3.593100,71900.000000 +-117.700000,35.600000,16.000000,2678.000000,483.000000,1473.000000,487.000000,3.858000,70200.000000 +-117.700000,34.090000,25.000000,1719.000000,331.000000,1098.000000,324.000000,3.625000,121800.000000 +-117.700000,34.080000,33.000000,4674.000000,791.000000,2769.000000,784.000000,4.144800,137300.000000 +-117.700000,34.080000,10.000000,1979.000000,454.000000,1117.000000,389.000000,3.780200,107300.000000 +-117.700000,34.070000,33.000000,1552.000000,288.000000,1326.000000,303.000000,3.796900,128400.000000 +-117.700000,34.060000,7.000000,732.000000,145.000000,431.000000,132.000000,2.910700,95300.000000 +-117.700000,34.060000,25.000000,2054.000000,609.000000,2271.000000,564.000000,2.304900,150000.000000 +-117.700000,34.050000,24.000000,2834.000000,470.000000,1815.000000,471.000000,4.735700,162500.000000 +-117.700000,34.040000,13.000000,5301.000000,1025.000000,2870.000000,984.000000,3.595400,163000.000000 +-117.700000,34.000000,15.000000,4905.000000,711.000000,2711.000000,762.000000,5.702100,193100.000000 +-117.700000,33.920000,4.000000,8301.000000,1333.000000,3941.000000,1236.000000,6.214100,252200.000000 +-117.700000,33.720000,6.000000,211.000000,51.000000,125.000000,44.000000,1.965900,500001.000000 +-117.700000,33.650000,16.000000,3388.000000,492.000000,1249.000000,463.000000,6.186300,355600.000000 +-117.700000,33.630000,23.000000,3038.000000,473.000000,1501.000000,436.000000,5.558400,241700.000000 +-117.700000,33.630000,16.000000,4428.000000,745.000000,1525.000000,682.000000,5.232500,286800.000000 +-117.700000,33.620000,19.000000,2957.000000,492.000000,1639.000000,495.000000,5.068600,225600.000000 +-117.700000,33.620000,16.000000,9653.000000,2000.000000,4732.000000,1922.000000,3.736100,197200.000000 +-117.700000,33.600000,26.000000,1021.000000,230.000000,301.000000,208.000000,2.625000,80600.000000 +-117.700000,33.600000,16.000000,2092.000000,489.000000,877.000000,392.000000,3.046100,216900.000000 +-117.700000,33.590000,11.000000,8039.000000,1717.000000,3445.000000,1571.000000,4.167800,190900.000000 +-117.700000,33.570000,9.000000,1204.000000,355.000000,469.000000,293.000000,3.619600,119900.000000 +-117.700000,33.570000,4.000000,3283.000000,911.000000,1512.000000,782.000000,3.312500,138500.000000 +-117.700000,33.560000,3.000000,2443.000000,637.000000,1033.000000,548.000000,4.137900,183300.000000 +-117.700000,33.560000,2.000000,2112.000000,305.000000,703.000000,261.000000,6.934300,298500.000000 +-117.700000,33.550000,12.000000,2459.000000,390.000000,1054.000000,391.000000,7.173600,262100.000000 +-117.700000,33.510000,2.000000,5261.000000,763.000000,1460.000000,599.000000,6.827900,279000.000000 +-117.700000,33.500000,4.000000,7474.000000,1037.000000,2969.000000,1007.000000,8.759100,434700.000000 +-117.700000,33.500000,4.000000,2351.000000,445.000000,834.000000,397.000000,5.567700,245400.000000 +-117.700000,33.480000,10.000000,3458.000000,638.000000,1156.000000,470.000000,6.357900,336700.000000 +-117.700000,33.470000,21.000000,2208.000000,534.000000,1423.000000,482.000000,3.591500,305600.000000 +-117.700000,33.470000,21.000000,1857.000000,399.000000,881.000000,380.000000,3.840300,350000.000000 +-117.700000,33.470000,20.000000,1577.000000,363.000000,764.000000,333.000000,4.156300,320800.000000 +-117.710000,34.150000,17.000000,17715.000000,2370.000000,7665.000000,2312.000000,7.906800,349100.000000 +-117.710000,34.120000,20.000000,11250.000000,1893.000000,4952.000000,1859.000000,5.678500,239500.000000 +-117.710000,34.100000,52.000000,567.000000,152.000000,2688.000000,126.000000,1.875000,212500.000000 +-117.710000,34.100000,41.000000,555.000000,130.000000,1492.000000,123.000000,2.281300,125000.000000 +-117.710000,34.090000,36.000000,2637.000000,476.000000,1385.000000,483.000000,4.173900,158700.000000 +-117.710000,34.080000,29.000000,1276.000000,283.000000,1216.000000,316.000000,2.597200,134300.000000 +-117.710000,34.080000,26.000000,2744.000000,494.000000,1411.000000,465.000000,4.263900,154200.000000 +-117.710000,34.070000,31.000000,1840.000000,380.000000,1187.000000,357.000000,3.887500,129200.000000 +-117.710000,34.070000,24.000000,1948.000000,362.000000,1286.000000,364.000000,3.600000,139300.000000 +-117.710000,34.060000,16.000000,1458.000000,295.000000,912.000000,331.000000,3.625000,160400.000000 +-117.710000,34.040000,20.000000,1950.000000,310.000000,1054.000000,312.000000,4.625000,222100.000000 +-117.710000,34.040000,17.000000,4098.000000,733.000000,1859.000000,713.000000,2.981100,231800.000000 +-117.710000,34.030000,11.000000,3467.000000,749.000000,2163.000000,676.000000,3.426700,164400.000000 +-117.710000,33.970000,10.000000,10856.000000,2278.000000,6474.000000,2199.000000,3.851000,137200.000000 +-117.710000,33.640000,14.000000,2945.000000,356.000000,1293.000000,335.000000,8.111000,308900.000000 +-117.710000,33.630000,16.000000,2497.000000,500.000000,1357.000000,456.000000,4.590900,241800.000000 +-117.710000,33.620000,22.000000,2520.000000,387.000000,1338.000000,391.000000,5.889800,242800.000000 +-117.710000,33.610000,26.000000,3046.000000,726.000000,888.000000,663.000000,2.684800,74100.000000 +-117.710000,33.610000,26.000000,2280.000000,550.000000,669.000000,502.000000,2.343800,72300.000000 +-117.710000,33.600000,8.000000,3329.000000,753.000000,1312.000000,629.000000,3.552100,229800.000000 +-117.710000,33.600000,25.000000,3011.000000,714.000000,893.000000,654.000000,2.338700,74800.000000 +-117.710000,33.580000,2.000000,2530.000000,562.000000,1066.000000,510.000000,4.633600,187500.000000 +-117.710000,33.570000,6.000000,3673.000000,881.000000,1846.000000,768.000000,4.877000,144300.000000 +-117.710000,33.570000,4.000000,3289.000000,753.000000,1285.000000,651.000000,4.045000,226000.000000 +-117.710000,33.540000,7.000000,4907.000000,577.000000,1883.000000,556.000000,10.441500,453800.000000 +-117.710000,33.540000,15.000000,2460.000000,368.000000,962.000000,320.000000,7.387800,318300.000000 +-117.710000,33.520000,17.000000,2486.000000,417.000000,876.000000,361.000000,6.100700,340900.000000 +-117.710000,33.510000,11.000000,2198.000000,252.000000,883.000000,281.000000,13.147700,487000.000000 +-117.710000,33.490000,5.000000,1680.000000,254.000000,617.000000,231.000000,8.583000,397700.000000 +-117.710000,33.470000,17.000000,2681.000000,454.000000,830.000000,410.000000,5.550700,345700.000000 +-117.710000,33.470000,14.000000,3894.000000,672.000000,1490.000000,629.000000,6.520600,368500.000000 +-117.720000,34.100000,52.000000,2867.000000,496.000000,978.000000,513.000000,3.147700,291200.000000 +-117.720000,34.100000,32.000000,3241.000000,895.000000,1592.000000,810.000000,2.495200,181800.000000 +-117.720000,34.090000,36.000000,1473.000000,328.000000,785.000000,299.000000,3.256600,151800.000000 +-117.720000,34.080000,34.000000,2742.000000,491.000000,1761.000000,496.000000,3.248100,128800.000000 +-117.720000,34.070000,33.000000,4100.000000,740.000000,2580.000000,730.000000,3.732100,134200.000000 +-117.720000,34.060000,32.000000,2209.000000,654.000000,1718.000000,569.000000,1.964300,113200.000000 +-117.720000,34.050000,8.000000,1841.000000,409.000000,1243.000000,394.000000,4.061400,107000.000000 +-117.720000,34.050000,31.000000,2220.000000,526.000000,1662.000000,472.000000,2.732100,104300.000000 +-117.720000,34.030000,17.000000,2902.000000,476.000000,1652.000000,479.000000,5.602900,161800.000000 +-117.720000,34.020000,17.000000,1781.000000,262.000000,860.000000,256.000000,6.595800,236800.000000 +-117.720000,34.000000,15.000000,4363.000000,690.000000,2410.000000,666.000000,5.482400,179700.000000 +-117.720000,33.990000,14.000000,5622.000000,861.000000,3108.000000,821.000000,5.776300,206700.000000 +-117.720000,33.970000,16.000000,13290.000000,2062.000000,6931.000000,2023.000000,5.228000,187800.000000 +-117.720000,33.640000,16.000000,1230.000000,242.000000,380.000000,246.000000,2.296900,67500.000000 +-117.720000,33.630000,15.000000,1362.000000,255.000000,378.000000,202.000000,1.900000,162500.000000 +-117.720000,33.620000,21.000000,2322.000000,518.000000,662.000000,457.000000,3.167900,110000.000000 +-117.720000,33.620000,19.000000,5777.000000,1261.000000,1711.000000,1225.000000,2.763400,86900.000000 +-117.720000,33.620000,19.000000,1144.000000,268.000000,365.000000,279.000000,2.858300,105800.000000 +-117.720000,33.610000,26.000000,2653.000000,621.000000,774.000000,584.000000,2.490000,81100.000000 +-117.720000,33.610000,26.000000,2033.000000,463.000000,618.000000,450.000000,2.568500,80400.000000 +-117.720000,33.540000,13.000000,4866.000000,812.000000,1909.000000,733.000000,4.982100,244800.000000 +-117.720000,33.530000,14.000000,1672.000000,295.000000,704.000000,293.000000,5.112900,251300.000000 +-117.720000,33.510000,17.000000,3617.000000,597.000000,1176.000000,571.000000,5.133000,324000.000000 +-117.720000,33.490000,4.000000,3623.000000,734.000000,1129.000000,530.000000,5.728100,500001.000000 +-117.720000,33.430000,5.000000,1889.000000,359.000000,616.000000,246.000000,3.899200,500001.000000 +-117.730000,35.730000,35.000000,2916.000000,594.000000,1870.000000,432.000000,3.625000,55000.000000 +-117.730000,34.120000,26.000000,6459.000000,894.000000,2487.000000,885.000000,6.208900,261800.000000 +-117.730000,34.100000,37.000000,3457.000000,544.000000,1344.000000,530.000000,5.889100,226000.000000 +-117.730000,34.090000,36.000000,1543.000000,297.000000,1355.000000,303.000000,3.531300,117800.000000 +-117.730000,34.090000,30.000000,2345.000000,496.000000,1897.000000,454.000000,2.437500,112100.000000 +-117.730000,34.080000,33.000000,1350.000000,265.000000,1251.000000,257.000000,2.906300,115200.000000 +-117.730000,34.080000,28.000000,5173.000000,1069.000000,3502.000000,954.000000,3.843800,130800.000000 +-117.730000,34.070000,34.000000,4038.000000,725.000000,2716.000000,759.000000,4.133900,135000.000000 +-117.730000,34.070000,33.000000,1921.000000,489.000000,1430.000000,467.000000,2.340600,122600.000000 +-117.730000,34.070000,33.000000,1025.000000,261.000000,854.000000,269.000000,2.259600,119400.000000 +-117.730000,34.060000,51.000000,498.000000,115.000000,368.000000,112.000000,1.406300,98800.000000 +-117.730000,34.060000,34.000000,344.000000,108.000000,315.000000,119.000000,3.178600,117800.000000 +-117.730000,34.050000,36.000000,975.000000,243.000000,809.000000,233.000000,2.892900,118100.000000 +-117.730000,34.050000,28.000000,2758.000000,771.000000,2877.000000,694.000000,2.073400,113300.000000 +-117.730000,34.040000,26.000000,3827.000000,814.000000,3367.000000,810.000000,3.150000,129700.000000 +-117.730000,34.030000,42.000000,1967.000000,378.000000,1459.000000,348.000000,3.037500,118100.000000 +-117.730000,34.010000,36.000000,2340.000000,392.000000,1213.000000,388.000000,4.125000,213000.000000 +-117.730000,33.610000,17.000000,2612.000000,582.000000,832.000000,564.000000,2.675900,120600.000000 +-117.730000,33.610000,16.000000,590.000000,130.000000,178.000000,121.000000,4.861100,186800.000000 +-117.730000,33.530000,3.000000,6388.000000,920.000000,2129.000000,819.000000,7.891500,420600.000000 +-117.730000,33.510000,5.000000,4549.000000,786.000000,1238.000000,632.000000,6.178500,295900.000000 +-117.730000,33.490000,31.000000,5112.000000,778.000000,1530.000000,648.000000,10.398300,500001.000000 +-117.730000,33.490000,17.000000,2168.000000,290.000000,654.000000,279.000000,9.832100,500001.000000 +-117.740000,35.650000,15.000000,2357.000000,484.000000,1110.000000,442.000000,3.175500,81700.000000 +-117.740000,34.110000,28.000000,3494.000000,566.000000,1391.000000,522.000000,5.363700,214700.000000 +-117.740000,34.100000,29.000000,2742.000000,488.000000,2477.000000,532.000000,3.507200,121900.000000 +-117.740000,34.100000,26.000000,2723.000000,604.000000,1847.000000,498.000000,2.677900,136000.000000 +-117.740000,34.080000,35.000000,1613.000000,298.000000,911.000000,293.000000,3.439800,134300.000000 +-117.740000,34.070000,52.000000,1868.000000,316.000000,947.000000,328.000000,4.241500,140100.000000 +-117.740000,34.070000,42.000000,2504.000000,553.000000,1550.000000,509.000000,3.029400,135700.000000 +-117.740000,34.060000,48.000000,2438.000000,599.000000,1508.000000,548.000000,2.898300,129200.000000 +-117.740000,34.060000,4.000000,1391.000000,506.000000,727.000000,369.000000,1.472200,137500.000000 +-117.740000,34.050000,30.000000,1185.000000,317.000000,1466.000000,302.000000,2.625000,94300.000000 +-117.740000,34.050000,29.000000,2452.000000,700.000000,3029.000000,665.000000,2.135400,110700.000000 +-117.740000,34.030000,27.000000,3623.000000,809.000000,3712.000000,754.000000,3.460900,123300.000000 +-117.740000,34.020000,33.000000,2318.000000,464.000000,1904.000000,451.000000,3.745400,116400.000000 +-117.740000,33.970000,4.000000,9755.000000,1748.000000,4662.000000,1583.000000,5.650100,254900.000000 +-117.740000,33.890000,4.000000,37937.000000,5471.000000,16122.000000,5189.000000,7.494700,366300.000000 +-117.740000,33.850000,4.000000,5416.000000,820.000000,1753.000000,583.000000,6.954400,314000.000000 +-117.740000,33.730000,18.000000,328.000000,68.000000,391.000000,60.000000,4.116700,87500.000000 +-117.740000,33.620000,16.000000,4134.000000,740.000000,2103.000000,745.000000,5.687700,231400.000000 +-117.740000,33.620000,16.000000,1889.000000,590.000000,686.000000,537.000000,3.470600,241700.000000 +-117.740000,33.610000,17.000000,2116.000000,474.000000,662.000000,443.000000,3.562500,180800.000000 +-117.740000,33.610000,16.000000,2753.000000,576.000000,857.000000,546.000000,3.742200,229800.000000 +-117.740000,33.510000,29.000000,1720.000000,269.000000,612.000000,258.000000,7.823900,500001.000000 +-117.740000,33.460000,9.000000,6564.000000,1316.000000,1720.000000,904.000000,4.890000,454100.000000 +-117.750000,34.120000,25.000000,5411.000000,998.000000,2243.000000,1019.000000,4.314800,240700.000000 +-117.750000,34.100000,21.000000,8069.000000,2174.000000,4369.000000,2036.000000,3.275600,156800.000000 +-117.750000,34.090000,36.000000,3094.000000,556.000000,1672.000000,545.000000,4.214300,146900.000000 +-117.750000,34.080000,33.000000,2824.000000,523.000000,1797.000000,493.000000,3.635900,135100.000000 +-117.750000,34.080000,33.000000,1067.000000,194.000000,600.000000,201.000000,4.036800,139100.000000 +-117.750000,34.070000,52.000000,2550.000000,586.000000,1246.000000,576.000000,1.600600,146200.000000 +-117.750000,34.070000,52.000000,1548.000000,348.000000,1131.000000,343.000000,2.630000,127300.000000 +-117.750000,34.070000,52.000000,1279.000000,213.000000,444.000000,204.000000,5.226900,161000.000000 +-117.750000,34.060000,52.000000,62.000000,9.000000,44.000000,16.000000,0.499900,112500.000000 +-117.750000,34.060000,52.000000,24.000000,6.000000,46.000000,7.000000,1.625000,67500.000000 +-117.750000,34.060000,52.000000,1171.000000,318.000000,1126.000000,276.000000,1.976200,105800.000000 +-117.750000,34.060000,44.000000,477.000000,135.000000,502.000000,117.000000,2.015600,112500.000000 +-117.750000,34.050000,46.000000,1480.000000,358.000000,1511.000000,348.000000,1.971800,110600.000000 +-117.750000,34.050000,37.000000,378.000000,92.000000,503.000000,103.000000,2.190800,94600.000000 +-117.750000,34.050000,35.000000,1293.000000,339.000000,1494.000000,312.000000,1.664500,93300.000000 +-117.750000,34.050000,27.000000,437.000000,108.000000,469.000000,97.000000,1.720600,107500.000000 +-117.750000,34.040000,22.000000,2948.000000,636.000000,2600.000000,602.000000,3.125000,113600.000000 +-117.750000,34.010000,4.000000,22128.000000,3522.000000,10450.000000,3258.000000,6.128700,289600.000000 +-117.750000,33.950000,13.000000,984.000000,127.000000,364.000000,119.000000,7.583900,426900.000000 +-117.750000,33.840000,16.000000,3491.000000,502.000000,1496.000000,509.000000,6.620700,270500.000000 +-117.750000,33.720000,10.000000,2464.000000,347.000000,1241.000000,366.000000,8.760300,362500.000000 +-117.750000,33.710000,15.000000,2849.000000,537.000000,878.000000,520.000000,3.284100,158300.000000 +-117.750000,33.640000,9.000000,2499.000000,492.000000,1111.000000,542.000000,5.534200,182300.000000 +-117.750000,33.610000,17.000000,2499.000000,566.000000,781.000000,522.000000,3.177900,186500.000000 +-117.750000,33.600000,5.000000,4944.000000,1164.000000,1727.000000,948.000000,4.900000,255600.000000 +-117.750000,33.540000,21.000000,8711.000000,1544.000000,3173.000000,1396.000000,5.090700,378200.000000 +-117.760000,35.630000,12.000000,2014.000000,372.000000,1027.000000,356.000000,3.926100,101300.000000 +-117.760000,35.220000,4.000000,18.000000,3.000000,8.000000,6.000000,1.625000,275000.000000 +-117.760000,34.710000,15.000000,2981.000000,625.000000,1694.000000,540.000000,2.954100,106700.000000 +-117.760000,34.130000,8.000000,16759.000000,2274.000000,7249.000000,2156.000000,7.483700,358700.000000 +-117.760000,34.110000,22.000000,4935.000000,954.000000,2874.000000,938.000000,3.982500,180500.000000 +-117.760000,34.100000,28.000000,4086.000000,871.000000,1973.000000,853.000000,2.621000,202200.000000 +-117.760000,34.080000,37.000000,2263.000000,502.000000,1677.000000,522.000000,2.938800,139200.000000 +-117.760000,34.070000,51.000000,1538.000000,394.000000,1173.000000,388.000000,2.315600,109800.000000 +-117.760000,34.070000,48.000000,1157.000000,247.000000,677.000000,218.000000,2.859400,127200.000000 +-117.760000,34.060000,47.000000,508.000000,108.000000,384.000000,86.000000,1.958300,92600.000000 +-117.760000,34.060000,33.000000,1831.000000,486.000000,1625.000000,472.000000,1.993700,103600.000000 +-117.760000,34.050000,36.000000,3839.000000,1004.000000,4711.000000,942.000000,2.385900,116200.000000 +-117.760000,34.040000,36.000000,2242.000000,448.000000,2052.000000,447.000000,3.446400,113000.000000 +-117.760000,34.040000,34.000000,1914.000000,364.000000,1564.000000,328.000000,2.834700,115800.000000 +-117.760000,33.980000,3.000000,9662.000000,1385.000000,2497.000000,856.000000,6.717200,292400.000000 +-117.760000,33.940000,40.000000,1092.000000,213.000000,457.000000,190.000000,5.116500,184200.000000 +-117.760000,33.880000,9.000000,4838.000000,759.000000,2090.000000,695.000000,6.653600,307800.000000 +-117.760000,33.870000,16.000000,3973.000000,595.000000,1971.000000,575.000000,6.426500,263700.000000 +-117.760000,33.870000,16.000000,3182.000000,429.000000,1663.000000,428.000000,7.059200,288200.000000 +-117.760000,33.860000,14.000000,3666.000000,442.000000,1400.000000,433.000000,10.131600,500001.000000 +-117.760000,33.840000,15.000000,3764.000000,510.000000,1448.000000,468.000000,8.712400,410500.000000 +-117.760000,33.830000,15.000000,3086.000000,457.000000,1262.000000,436.000000,6.441500,300700.000000 +-117.760000,33.810000,2.000000,582.000000,70.000000,199.000000,64.000000,7.119300,500001.000000 +-117.760000,33.790000,4.000000,8974.000000,1268.000000,3754.000000,1241.000000,8.265300,374000.000000 +-117.760000,33.720000,15.000000,941.000000,266.000000,366.000000,248.000000,4.363600,148400.000000 +-117.760000,33.720000,14.000000,3011.000000,388.000000,1359.000000,371.000000,7.973900,368700.000000 +-117.760000,33.720000,11.000000,4508.000000,618.000000,1993.000000,573.000000,10.449800,386100.000000 +-117.760000,33.710000,14.000000,4321.000000,582.000000,2025.000000,578.000000,8.363400,355100.000000 +-117.760000,33.700000,12.000000,4025.000000,574.000000,2042.000000,588.000000,7.912500,344900.000000 +-117.760000,33.530000,28.000000,3085.000000,499.000000,1176.000000,480.000000,7.979400,426100.000000 +-117.760000,33.530000,24.000000,2105.000000,346.000000,712.000000,332.000000,10.634900,500001.000000 +-117.760000,33.530000,18.000000,3224.000000,561.000000,1310.000000,580.000000,8.461400,391900.000000 +-117.760000,33.480000,38.000000,3832.000000,809.000000,1332.000000,636.000000,5.004400,381200.000000 +-117.770000,34.110000,28.000000,1998.000000,414.000000,1124.000000,389.000000,3.750000,180900.000000 +-117.770000,34.070000,29.000000,2976.000000,662.000000,2452.000000,633.000000,3.063800,113600.000000 +-117.770000,33.850000,13.000000,5415.000000,827.000000,2061.000000,714.000000,7.368100,353100.000000 +-117.770000,33.840000,5.000000,4380.000000,715.000000,1913.000000,741.000000,6.727400,266400.000000 +-117.770000,33.800000,16.000000,3973.000000,483.000000,1373.000000,452.000000,9.807400,417000.000000 +-117.770000,33.760000,19.000000,3532.000000,402.000000,1200.000000,426.000000,11.012400,500001.000000 +-117.770000,33.720000,10.000000,2815.000000,431.000000,1181.000000,398.000000,6.574300,278700.000000 +-117.770000,33.710000,5.000000,4050.000000,584.000000,1986.000000,598.000000,7.584700,375700.000000 +-117.770000,33.710000,4.000000,1646.000000,321.000000,859.000000,300.000000,5.563100,227800.000000 +-117.770000,33.710000,13.000000,1939.000000,247.000000,928.000000,244.000000,8.111100,379800.000000 +-117.770000,33.700000,4.000000,2446.000000,622.000000,1315.000000,560.000000,3.714700,137500.000000 +-117.770000,33.700000,3.000000,3636.000000,749.000000,1486.000000,696.000000,5.546400,207500.000000 +-117.770000,33.700000,15.000000,1392.000000,267.000000,681.000000,263.000000,5.424800,187200.000000 +-117.770000,33.690000,15.000000,500.000000,113.000000,261.000000,116.000000,5.063100,154000.000000 +-117.770000,33.690000,14.000000,1413.000000,372.000000,744.000000,338.000000,3.798800,184100.000000 +-117.770000,33.670000,12.000000,4329.000000,1068.000000,1913.000000,978.000000,4.509400,160200.000000 +-117.770000,33.600000,33.000000,247.000000,80.000000,167.000000,70.000000,3.705900,237500.000000 +-117.770000,33.550000,28.000000,2024.000000,297.000000,617.000000,274.000000,6.786100,499100.000000 +-117.770000,33.540000,47.000000,3090.000000,652.000000,1105.000000,582.000000,4.169900,373700.000000 +-117.770000,33.540000,28.000000,3404.000000,497.000000,1134.000000,466.000000,7.221700,500001.000000 +-117.770000,33.530000,46.000000,1033.000000,223.000000,462.000000,224.000000,3.270800,384700.000000 +-117.770000,33.530000,32.000000,3116.000000,661.000000,1105.000000,543.000000,5.183700,445600.000000 +-117.770000,33.510000,29.000000,3590.000000,772.000000,1070.000000,603.000000,4.446400,500001.000000 +-117.780000,34.580000,6.000000,10263.000000,1864.000000,6163.000000,1781.000000,3.880300,120000.000000 +-117.780000,34.130000,18.000000,7798.000000,1161.000000,3710.000000,1227.000000,5.881900,260500.000000 +-117.780000,34.110000,23.000000,7079.000000,1381.000000,3205.000000,1327.000000,3.073500,212300.000000 +-117.780000,34.090000,32.000000,2643.000000,516.000000,1862.000000,478.000000,3.717700,177200.000000 +-117.780000,34.070000,18.000000,3610.000000,772.000000,2899.000000,765.000000,3.978400,113500.000000 +-117.780000,34.060000,33.000000,1056.000000,272.000000,964.000000,300.000000,2.446400,128700.000000 +-117.780000,34.050000,39.000000,2933.000000,590.000000,1886.000000,550.000000,3.922400,131300.000000 +-117.780000,34.030000,8.000000,32054.000000,5290.000000,15507.000000,5050.000000,6.019100,253900.000000 +-117.780000,33.970000,2.000000,556.000000,63.000000,179.000000,54.000000,8.441100,500001.000000 +-117.780000,33.900000,14.000000,6239.000000,901.000000,2923.000000,904.000000,6.543700,268200.000000 +-117.780000,33.890000,7.000000,9729.000000,1210.000000,4160.000000,1214.000000,8.908800,415300.000000 +-117.780000,33.880000,16.000000,1800.000000,238.000000,871.000000,234.000000,6.667800,301900.000000 +-117.780000,33.870000,16.000000,5609.000000,952.000000,2624.000000,934.000000,5.330700,169600.000000 +-117.780000,33.860000,16.000000,4390.000000,660.000000,2146.000000,633.000000,6.150400,266000.000000 +-117.780000,33.850000,16.000000,3781.000000,504.000000,1665.000000,499.000000,7.255400,335600.000000 +-117.780000,33.820000,12.000000,6208.000000,750.000000,2443.000000,739.000000,9.180800,413700.000000 +-117.780000,33.810000,23.000000,1986.000000,278.000000,826.000000,260.000000,7.775200,380000.000000 +-117.780000,33.780000,6.000000,9792.000000,1283.000000,3744.000000,1179.000000,10.171400,481500.000000 +-117.780000,33.760000,25.000000,2260.000000,261.000000,719.000000,254.000000,11.453700,500001.000000 +-117.780000,33.710000,4.000000,974.000000,232.000000,428.000000,203.000000,4.614100,195400.000000 +-117.780000,33.710000,16.000000,2207.000000,291.000000,1081.000000,308.000000,7.351800,331200.000000 +-117.780000,33.700000,16.000000,1663.000000,250.000000,597.000000,204.000000,5.409000,233900.000000 +-117.780000,33.690000,16.000000,4702.000000,806.000000,2529.000000,814.000000,5.129900,238900.000000 +-117.780000,33.690000,16.000000,3400.000000,501.000000,1575.000000,488.000000,6.096100,295500.000000 +-117.780000,33.680000,19.000000,2500.000000,331.000000,1027.000000,327.000000,6.115000,315600.000000 +-117.780000,33.680000,14.000000,1750.000000,336.000000,852.000000,300.000000,4.679300,236800.000000 +-117.780000,33.680000,11.000000,1994.000000,477.000000,849.000000,411.000000,4.018700,235600.000000 +-117.780000,33.510000,44.000000,1833.000000,331.000000,515.000000,268.000000,6.617800,500001.000000 +-117.790000,35.210000,4.000000,2.000000,2.000000,6.000000,2.000000,2.375000,137500.000000 +-117.790000,34.450000,18.000000,2986.000000,597.000000,1355.000000,472.000000,3.276500,165000.000000 +-117.790000,34.120000,16.000000,2426.000000,426.000000,1319.000000,446.000000,4.812500,224500.000000 +-117.790000,34.110000,18.000000,3814.000000,721.000000,1881.000000,692.000000,4.472200,215600.000000 +-117.790000,34.100000,26.000000,1664.000000,344.000000,1024.000000,339.000000,3.519200,190500.000000 +-117.790000,34.020000,5.000000,18690.000000,2862.000000,9427.000000,2777.000000,6.426600,315600.000000 +-117.790000,33.880000,17.000000,8562.000000,1351.000000,3822.000000,1316.000000,6.082900,252600.000000 +-117.790000,33.840000,9.000000,10484.000000,1603.000000,4005.000000,1419.000000,8.393100,365300.000000 +-117.790000,33.800000,11.000000,10535.000000,1620.000000,4409.000000,1622.000000,6.670000,283200.000000 +-117.790000,33.770000,23.000000,3596.000000,451.000000,1292.000000,458.000000,8.540300,451300.000000 +-117.790000,33.770000,21.000000,4349.000000,553.000000,1680.000000,519.000000,6.901400,439000.000000 +-117.790000,33.760000,25.000000,2037.000000,252.000000,796.000000,249.000000,11.054600,487200.000000 +-117.790000,33.750000,26.000000,2955.000000,377.000000,1074.000000,373.000000,9.384500,500001.000000 +-117.790000,33.750000,26.000000,2893.000000,345.000000,983.000000,326.000000,13.466000,500001.000000 +-117.790000,33.730000,3.000000,8240.000000,1410.000000,3318.000000,1270.000000,7.207400,291300.000000 +-117.790000,33.710000,16.000000,6339.000000,862.000000,3132.000000,825.000000,7.106900,313400.000000 +-117.790000,33.710000,16.000000,3114.000000,463.000000,1641.000000,469.000000,6.216200,283200.000000 +-117.790000,33.700000,6.000000,1593.000000,371.000000,832.000000,379.000000,4.428600,239500.000000 +-117.790000,33.700000,16.000000,6259.000000,1098.000000,3785.000000,1114.000000,6.329800,247100.000000 +-117.790000,33.700000,16.000000,1416.000000,249.000000,636.000000,244.000000,5.174100,227700.000000 +-117.790000,33.690000,16.000000,3067.000000,396.000000,1275.000000,372.000000,8.738500,340000.000000 +-117.790000,33.690000,16.000000,1532.000000,240.000000,679.000000,248.000000,5.711500,313900.000000 +-117.790000,33.690000,15.000000,1875.000000,316.000000,890.000000,316.000000,6.578300,244800.000000 +-117.790000,33.680000,9.000000,1633.000000,295.000000,928.000000,297.000000,5.785800,265900.000000 +-117.790000,33.680000,16.000000,1998.000000,308.000000,818.000000,299.000000,6.872200,326100.000000 +-117.790000,33.680000,13.000000,2636.000000,416.000000,1137.000000,404.000000,7.211800,311500.000000 +-117.790000,33.680000,10.000000,2106.000000,319.000000,1002.000000,332.000000,8.735000,375300.000000 +-117.790000,33.560000,36.000000,2057.000000,329.000000,658.000000,309.000000,7.866000,500001.000000 +-117.790000,33.550000,39.000000,5066.000000,1292.000000,1915.000000,1117.000000,3.821000,452100.000000 +-117.800000,34.110000,25.000000,5039.000000,821.000000,2654.000000,802.000000,4.796900,211700.000000 +-117.800000,34.100000,17.000000,5153.000000,1164.000000,2949.000000,1083.000000,3.560300,174600.000000 +-117.800000,34.050000,5.000000,4536.000000,1178.000000,2485.000000,909.000000,4.111800,125900.000000 +-117.800000,34.030000,25.000000,4240.000000,643.000000,1885.000000,637.000000,6.238400,247600.000000 +-117.800000,34.020000,23.000000,3351.000000,591.000000,1535.000000,522.000000,5.086900,230600.000000 +-117.800000,33.920000,16.000000,5819.000000,986.000000,2306.000000,914.000000,4.631500,277500.000000 +-117.800000,33.900000,22.000000,3760.000000,482.000000,1485.000000,461.000000,7.853700,354900.000000 +-117.800000,33.870000,16.000000,5954.000000,1281.000000,3107.000000,1209.000000,4.256600,206100.000000 +-117.800000,33.850000,23.000000,3038.000000,470.000000,1568.000000,438.000000,5.640300,233000.000000 +-117.800000,33.830000,17.000000,2971.000000,350.000000,1180.000000,346.000000,11.122800,500001.000000 +-117.800000,33.790000,13.000000,2021.000000,362.000000,1081.000000,341.000000,4.326900,231400.000000 +-117.800000,33.780000,18.000000,3548.000000,474.000000,1506.000000,449.000000,6.925000,290300.000000 +-117.800000,33.780000,17.000000,4138.000000,805.000000,2442.000000,780.000000,4.780400,242000.000000 +-117.800000,33.770000,29.000000,5436.000000,707.000000,2046.000000,685.000000,8.749600,349500.000000 +-117.800000,33.760000,27.000000,2655.000000,345.000000,1017.000000,335.000000,6.901400,366800.000000 +-117.800000,33.750000,29.000000,3058.000000,488.000000,1197.000000,474.000000,5.390300,286600.000000 +-117.800000,33.740000,33.000000,2890.000000,453.000000,1300.000000,452.000000,6.561600,290200.000000 +-117.800000,33.740000,30.000000,3569.000000,551.000000,1540.000000,537.000000,5.299800,247200.000000 +-117.800000,33.720000,16.000000,2617.000000,506.000000,1317.000000,511.000000,4.821000,201400.000000 +-117.800000,33.690000,16.000000,2745.000000,447.000000,1429.000000,411.000000,6.821900,325500.000000 +-117.800000,33.690000,15.000000,2099.000000,322.000000,873.000000,307.000000,7.988700,328000.000000 +-117.800000,33.690000,14.000000,1800.000000,362.000000,874.000000,373.000000,4.208300,251000.000000 +-117.800000,33.690000,13.000000,1161.000000,289.000000,630.000000,296.000000,3.343800,333300.000000 +-117.800000,33.680000,8.000000,2032.000000,349.000000,862.000000,340.000000,6.913300,274100.000000 +-117.800000,33.680000,5.000000,623.000000,146.000000,396.000000,136.000000,3.631000,225000.000000 +-117.800000,33.670000,5.000000,2638.000000,521.000000,1179.000000,480.000000,5.775900,240000.000000 +-117.800000,33.670000,5.000000,2487.000000,388.000000,1147.000000,397.000000,8.284000,302500.000000 +-117.800000,33.670000,4.000000,3345.000000,552.000000,1525.000000,539.000000,6.796200,329100.000000 +-117.800000,33.660000,16.000000,2542.000000,498.000000,1022.000000,494.000000,4.000000,223400.000000 +-117.800000,33.640000,8.000000,4447.000000,713.000000,1680.000000,705.000000,8.869300,450400.000000 +-117.800000,33.630000,8.000000,32.000000,9.000000,26.000000,11.000000,4.194400,270800.000000 +-117.800000,33.630000,15.000000,3236.000000,451.000000,1289.000000,416.000000,11.112100,493000.000000 +-117.800000,33.550000,38.000000,1757.000000,464.000000,821.000000,426.000000,4.130400,433300.000000 +-117.800000,33.530000,41.000000,2017.000000,489.000000,783.000000,403.000000,4.159100,500001.000000 +-117.800000,33.520000,50.000000,1152.000000,341.000000,519.000000,225.000000,3.053000,500001.000000 +-117.810000,35.650000,19.000000,1124.000000,290.000000,598.000000,261.000000,1.898400,54300.000000 +-117.810000,34.120000,23.000000,7063.000000,1176.000000,3100.000000,1112.000000,4.822900,192600.000000 +-117.810000,34.110000,21.000000,3481.000000,808.000000,1866.000000,746.000000,3.620100,150400.000000 +-117.810000,34.100000,19.000000,1935.000000,399.000000,1126.000000,389.000000,3.892900,144600.000000 +-117.810000,34.080000,13.000000,18448.000000,2474.000000,7775.000000,2397.000000,7.787600,348900.000000 +-117.810000,34.010000,12.000000,9197.000000,1642.000000,4332.000000,1554.000000,4.958900,282100.000000 +-117.810000,33.890000,13.000000,3252.000000,583.000000,1546.000000,557.000000,5.824300,297900.000000 +-117.810000,33.880000,19.000000,2968.000000,503.000000,1430.000000,459.000000,5.333900,371700.000000 +-117.810000,33.880000,19.000000,2265.000000,283.000000,904.000000,279.000000,9.232700,461300.000000 +-117.810000,33.870000,19.000000,4491.000000,680.000000,2457.000000,702.000000,6.059100,233500.000000 +-117.810000,33.860000,18.000000,133.000000,29.000000,95.000000,23.000000,3.562500,235000.000000 +-117.810000,33.820000,20.000000,2819.000000,319.000000,1019.000000,319.000000,12.209200,500001.000000 +-117.810000,33.810000,19.000000,3154.000000,390.000000,1404.000000,384.000000,8.925700,431800.000000 +-117.810000,33.800000,25.000000,2765.000000,475.000000,1666.000000,474.000000,6.053100,230700.000000 +-117.810000,33.790000,25.000000,5950.000000,1155.000000,4528.000000,1064.000000,4.256400,204600.000000 +-117.810000,33.790000,23.000000,3114.000000,610.000000,2045.000000,577.000000,3.750000,211900.000000 +-117.810000,33.770000,31.000000,4624.000000,624.000000,1852.000000,635.000000,7.239200,334600.000000 +-117.810000,33.760000,32.000000,2053.000000,339.000000,835.000000,323.000000,5.565400,281800.000000 +-117.810000,33.750000,25.000000,2365.000000,471.000000,1197.000000,458.000000,3.703100,227800.000000 +-117.810000,33.750000,23.000000,3498.000000,636.000000,1574.000000,642.000000,5.021000,252200.000000 +-117.810000,33.740000,24.000000,2696.000000,649.000000,1908.000000,626.000000,3.304700,216900.000000 +-117.810000,33.730000,23.000000,3056.000000,556.000000,1508.000000,555.000000,4.727300,234200.000000 +-117.810000,33.730000,19.000000,5471.000000,1345.000000,2828.000000,1247.000000,3.571900,252800.000000 +-117.810000,33.730000,19.000000,4022.000000,975.000000,2334.000000,954.000000,3.030500,140600.000000 +-117.810000,33.690000,5.000000,1256.000000,256.000000,880.000000,288.000000,2.423300,450000.000000 +-117.810000,33.680000,8.000000,1964.000000,413.000000,913.000000,406.000000,5.158300,192200.000000 +-117.810000,33.680000,4.000000,1545.000000,304.000000,788.000000,296.000000,4.546900,500001.000000 +-117.810000,33.670000,9.000000,3279.000000,530.000000,1447.000000,510.000000,7.458100,296600.000000 +-117.810000,33.670000,9.000000,2435.000000,396.000000,1194.000000,385.000000,7.202500,275000.000000 +-117.810000,33.670000,9.000000,1567.000000,299.000000,675.000000,294.000000,5.212400,199600.000000 +-117.810000,33.670000,8.000000,2440.000000,502.000000,1113.000000,483.000000,4.601900,242500.000000 +-117.810000,33.670000,8.000000,2098.000000,342.000000,908.000000,329.000000,7.758900,342900.000000 +-117.810000,33.670000,24.000000,3930.000000,661.000000,1831.000000,616.000000,6.376700,269000.000000 +-117.810000,33.660000,16.000000,1414.000000,191.000000,635.000000,230.000000,10.075700,383900.000000 +-117.810000,33.640000,4.000000,1741.000000,225.000000,811.000000,233.000000,12.341100,500001.000000 +-117.810000,33.640000,16.000000,2404.000000,349.000000,868.000000,329.000000,11.013800,442100.000000 +-117.810000,33.630000,17.000000,4477.000000,610.000000,1798.000000,612.000000,8.109300,410400.000000 +-117.810000,33.560000,24.000000,6258.000000,1003.000000,1730.000000,752.000000,10.960100,500001.000000 +-117.820000,34.130000,27.000000,3770.000000,573.000000,1606.000000,562.000000,6.132100,309700.000000 +-117.820000,34.120000,26.000000,3118.000000,528.000000,1546.000000,545.000000,5.270000,209400.000000 +-117.820000,34.050000,21.000000,4031.000000,923.000000,2558.000000,834.000000,3.164100,117300.000000 +-117.820000,33.890000,24.000000,2168.000000,421.000000,1050.000000,397.000000,4.617200,238300.000000 +-117.820000,33.890000,21.000000,3079.000000,509.000000,1431.000000,480.000000,4.071400,278900.000000 +-117.820000,33.880000,18.000000,1982.000000,300.000000,1027.000000,324.000000,7.052600,327500.000000 +-117.820000,33.880000,15.000000,5392.000000,895.000000,2531.000000,827.000000,6.218500,280300.000000 +-117.820000,33.850000,21.000000,2603.000000,404.000000,1350.000000,390.000000,6.057000,235900.000000 +-117.820000,33.850000,18.000000,1810.000000,305.000000,1189.000000,326.000000,5.222700,213500.000000 +-117.820000,33.840000,25.000000,1788.000000,203.000000,676.000000,217.000000,10.129900,454300.000000 +-117.820000,33.820000,22.000000,3173.000000,372.000000,1181.000000,355.000000,8.363700,500001.000000 +-117.820000,33.810000,30.000000,2260.000000,345.000000,1182.000000,341.000000,6.070500,236700.000000 +-117.820000,33.810000,19.000000,2556.000000,304.000000,822.000000,260.000000,9.905500,456900.000000 +-117.820000,33.800000,15.000000,3207.000000,647.000000,1414.000000,595.000000,4.048400,165600.000000 +-117.820000,33.780000,28.000000,4485.000000,667.000000,2048.000000,685.000000,5.456200,274700.000000 +-117.820000,33.780000,25.000000,4977.000000,645.000000,2061.000000,646.000000,6.580000,318500.000000 +-117.820000,33.770000,32.000000,2308.000000,301.000000,967.000000,320.000000,7.056500,324600.000000 +-117.820000,33.770000,27.000000,2578.000000,314.000000,976.000000,340.000000,7.188200,359200.000000 +-117.820000,33.760000,33.000000,2774.000000,428.000000,1229.000000,407.000000,6.294400,265600.000000 +-117.820000,33.760000,27.000000,3230.000000,449.000000,1193.000000,448.000000,6.530800,287800.000000 +-117.820000,33.750000,30.000000,2910.000000,535.000000,1270.000000,489.000000,4.616100,236500.000000 +-117.820000,33.750000,24.000000,893.000000,209.000000,342.000000,197.000000,2.826100,146500.000000 +-117.820000,33.730000,27.000000,1270.000000,258.000000,809.000000,264.000000,5.016200,223000.000000 +-117.820000,33.730000,24.000000,845.000000,190.000000,482.000000,190.000000,4.703900,225000.000000 +-117.820000,33.730000,23.000000,2542.000000,772.000000,1720.000000,675.000000,3.870300,137000.000000 +-117.820000,33.720000,24.000000,3477.000000,462.000000,1593.000000,484.000000,6.863400,276500.000000 +-117.820000,33.680000,3.000000,7105.000000,1459.000000,3068.000000,1241.000000,6.139500,358000.000000 +-117.820000,33.680000,3.000000,3068.000000,494.000000,1357.000000,486.000000,7.918700,333600.000000 +-117.820000,33.670000,15.000000,1010.000000,274.000000,649.000000,261.000000,2.519700,350000.000000 +-117.820000,33.660000,24.000000,4227.000000,641.000000,1605.000000,589.000000,6.423800,278400.000000 +-117.820000,33.660000,15.000000,2460.000000,447.000000,1049.000000,398.000000,6.496700,387500.000000 +-117.820000,33.650000,18.000000,2105.000000,302.000000,830.000000,286.000000,6.382200,362500.000000 +-117.820000,33.640000,18.000000,1974.000000,260.000000,808.000000,278.000000,9.858900,500001.000000 +-117.830000,34.150000,20.000000,2421.000000,306.000000,1023.000000,298.000000,8.068300,451500.000000 +-117.830000,34.140000,26.000000,8254.000000,1153.000000,3460.000000,1131.000000,6.525300,349900.000000 +-117.830000,34.110000,29.000000,2671.000000,437.000000,1484.000000,445.000000,4.984400,203000.000000 +-117.830000,34.100000,18.000000,11026.000000,1978.000000,5407.000000,1923.000000,4.075000,231100.000000 +-117.830000,34.010000,16.000000,9446.000000,1650.000000,4911.000000,1534.000000,5.011100,212900.000000 +-117.830000,33.990000,14.000000,17527.000000,2751.000000,8380.000000,2676.000000,6.273400,267000.000000 +-117.830000,33.970000,11.000000,21533.000000,3078.000000,9671.000000,2890.000000,7.032900,368300.000000 +-117.830000,33.930000,14.000000,1956.000000,282.000000,671.000000,269.000000,6.584100,306400.000000 +-117.830000,33.900000,23.000000,2446.000000,360.000000,1196.000000,359.000000,6.575500,272800.000000 +-117.830000,33.890000,25.000000,1737.000000,270.000000,840.000000,265.000000,4.625000,245700.000000 +-117.830000,33.880000,18.000000,2112.000000,340.000000,1048.000000,315.000000,6.930800,231700.000000 +-117.830000,33.870000,5.000000,6971.000000,1449.000000,3521.000000,1423.000000,5.213100,243900.000000 +-117.830000,33.830000,23.000000,2775.000000,547.000000,1226.000000,510.000000,3.670700,231400.000000 +-117.830000,33.820000,26.000000,3259.000000,456.000000,1354.000000,459.000000,5.781700,267600.000000 +-117.830000,33.820000,23.000000,1100.000000,285.000000,940.000000,267.000000,3.695300,150000.000000 +-117.830000,33.810000,28.000000,1972.000000,315.000000,970.000000,326.000000,5.429800,234200.000000 +-117.830000,33.810000,24.000000,3550.000000,895.000000,2828.000000,834.000000,2.840300,225600.000000 +-117.830000,33.800000,31.000000,2016.000000,409.000000,1095.000000,405.000000,3.868100,196000.000000 +-117.830000,33.800000,30.000000,4713.000000,758.000000,2271.000000,730.000000,5.862200,221000.000000 +-117.830000,33.790000,25.000000,2070.000000,513.000000,1078.000000,460.000000,2.931200,220100.000000 +-117.830000,33.770000,26.000000,4931.000000,853.000000,2249.000000,818.000000,4.275000,285400.000000 +-117.830000,33.770000,22.000000,2956.000000,642.000000,1342.000000,558.000000,4.115100,203200.000000 +-117.830000,33.750000,34.000000,2660.000000,601.000000,1475.000000,567.000000,3.415200,210200.000000 +-117.830000,33.750000,22.000000,6433.000000,1174.000000,2703.000000,1125.000000,4.995700,296400.000000 +-117.830000,33.740000,23.000000,6114.000000,1623.000000,4088.000000,1521.000000,3.038200,183600.000000 +-117.830000,33.740000,23.000000,1818.000000,522.000000,958.000000,485.000000,2.677100,131500.000000 +-117.830000,33.730000,20.000000,5768.000000,1597.000000,4853.000000,1465.000000,3.538700,160400.000000 +-117.830000,33.680000,4.000000,3226.000000,838.000000,1666.000000,800.000000,4.165200,184500.000000 +-117.830000,33.670000,17.000000,2634.000000,641.000000,1454.000000,560.000000,3.797600,275000.000000 +-117.830000,33.660000,4.000000,1011.000000,198.000000,511.000000,198.000000,7.921700,296200.000000 +-117.830000,33.660000,15.000000,2355.000000,438.000000,747.000000,450.000000,6.535600,272800.000000 +-117.830000,33.650000,9.000000,638.000000,266.000000,426.000000,234.000000,3.787500,187500.000000 +-117.830000,33.650000,8.000000,2149.000000,426.000000,950.000000,399.000000,4.110300,250400.000000 +-117.840000,35.540000,11.000000,1751.000000,316.000000,765.000000,296.000000,5.076200,98000.000000 +-117.840000,34.630000,5.000000,6739.000000,1251.000000,4614.000000,1266.000000,4.002000,115100.000000 +-117.840000,34.130000,26.000000,3773.000000,694.000000,2103.000000,688.000000,4.693700,198000.000000 +-117.840000,34.120000,34.000000,2026.000000,345.000000,1142.000000,332.000000,4.392000,187600.000000 +-117.840000,34.120000,25.000000,3465.000000,566.000000,1722.000000,536.000000,4.830400,228900.000000 +-117.840000,34.100000,17.000000,7836.000000,1624.000000,4419.000000,1526.000000,3.846500,180700.000000 +-117.840000,33.980000,26.000000,3638.000000,557.000000,1993.000000,593.000000,6.107600,221200.000000 +-117.840000,33.900000,24.000000,1723.000000,223.000000,707.000000,219.000000,7.035200,299600.000000 +-117.840000,33.890000,19.000000,3544.000000,542.000000,1787.000000,560.000000,6.783700,264300.000000 +-117.840000,33.840000,23.000000,6157.000000,1129.000000,2817.000000,1073.000000,5.062900,232600.000000 +-117.840000,33.840000,23.000000,4388.000000,864.000000,2526.000000,846.000000,4.521700,219400.000000 +-117.840000,33.820000,24.000000,10281.000000,1689.000000,4926.000000,1629.000000,4.794600,251200.000000 +-117.840000,33.810000,26.000000,5574.000000,1025.000000,2607.000000,988.000000,4.032400,244900.000000 +-117.840000,33.790000,37.000000,2733.000000,460.000000,1378.000000,476.000000,5.304100,235700.000000 +-117.840000,33.790000,34.000000,2590.000000,603.000000,1658.000000,608.000000,2.378000,199600.000000 +-117.840000,33.780000,26.000000,2577.000000,434.000000,1086.000000,432.000000,4.612500,229200.000000 +-117.840000,33.780000,24.000000,3817.000000,787.000000,1656.000000,713.000000,4.250000,248000.000000 +-117.840000,33.770000,26.000000,3350.000000,581.000000,1314.000000,550.000000,3.519500,249100.000000 +-117.840000,33.770000,14.000000,4412.000000,952.000000,1656.000000,874.000000,4.329200,206500.000000 +-117.840000,33.760000,26.000000,2110.000000,409.000000,1146.000000,407.000000,4.369800,229600.000000 +-117.840000,33.760000,22.000000,378.000000,78.000000,196.000000,81.000000,3.680600,219400.000000 +-117.840000,33.760000,14.000000,1458.000000,423.000000,615.000000,365.000000,4.279800,218800.000000 +-117.840000,33.750000,16.000000,4367.000000,1161.000000,2164.000000,1005.000000,4.021400,139500.000000 +-117.840000,33.740000,25.000000,1818.000000,577.000000,1426.000000,532.000000,3.210400,112500.000000 +-117.840000,33.740000,24.000000,1752.000000,407.000000,910.000000,427.000000,3.361100,134600.000000 +-117.840000,33.740000,22.000000,6072.000000,1802.000000,4715.000000,1666.000000,3.135300,121400.000000 +-117.840000,33.730000,20.000000,2572.000000,732.000000,1534.000000,669.000000,2.421100,175000.000000 +-117.840000,33.660000,5.000000,665.000000,171.000000,384.000000,171.000000,4.583300,230400.000000 +-117.840000,33.660000,5.000000,1688.000000,430.000000,857.000000,402.000000,3.785700,231600.000000 +-117.840000,33.650000,4.000000,1649.000000,456.000000,1030.000000,411.000000,2.226200,225000.000000 +-117.840000,33.640000,11.000000,6840.000000,1689.000000,6083.000000,1629.000000,2.413200,198300.000000 +-117.840000,33.600000,21.000000,4281.000000,582.000000,1443.000000,576.000000,9.051900,500001.000000 +-117.850000,34.140000,35.000000,2899.000000,429.000000,1251.000000,429.000000,6.104900,297200.000000 +-117.850000,34.140000,35.000000,1582.000000,248.000000,654.000000,221.000000,4.909100,275000.000000 +-117.850000,34.130000,31.000000,1959.000000,318.000000,1021.000000,303.000000,4.314500,233000.000000 +-117.850000,34.110000,27.000000,1748.000000,403.000000,985.000000,416.000000,3.113300,180600.000000 +-117.850000,34.110000,25.000000,9255.000000,1659.000000,4944.000000,1627.000000,4.570800,223000.000000 +-117.850000,34.100000,22.000000,5179.000000,944.000000,2315.000000,884.000000,4.510000,189900.000000 +-117.850000,34.090000,16.000000,4556.000000,639.000000,2066.000000,651.000000,6.466700,263900.000000 +-117.850000,34.080000,23.000000,1160.000000,166.000000,467.000000,178.000000,8.105000,386200.000000 +-117.850000,34.070000,32.000000,761.000000,101.000000,295.000000,95.000000,11.107700,500001.000000 +-117.850000,34.000000,26.000000,2712.000000,402.000000,1389.000000,377.000000,5.651300,227900.000000 +-117.850000,33.920000,11.000000,3331.000000,410.000000,1460.000000,416.000000,8.028700,371800.000000 +-117.850000,33.900000,25.000000,1548.000000,256.000000,811.000000,263.000000,5.203700,242200.000000 +-117.850000,33.900000,20.000000,4026.000000,648.000000,1997.000000,650.000000,5.591800,260500.000000 +-117.850000,33.890000,24.000000,3326.000000,503.000000,1616.000000,494.000000,5.745700,240600.000000 +-117.850000,33.890000,22.000000,4020.000000,655.000000,1486.000000,635.000000,5.963900,262300.000000 +-117.850000,33.880000,14.000000,4753.000000,681.000000,2138.000000,678.000000,7.365800,288500.000000 +-117.850000,33.860000,18.000000,329.000000,72.000000,209.000000,71.000000,4.680600,187500.000000 +-117.850000,33.850000,17.000000,4678.000000,1065.000000,2427.000000,1020.000000,4.227600,254100.000000 +-117.850000,33.840000,26.000000,2095.000000,280.000000,793.000000,261.000000,6.671900,271700.000000 +-117.850000,33.830000,26.000000,1904.000000,292.000000,945.000000,303.000000,5.678400,232400.000000 +-117.850000,33.810000,32.000000,1766.000000,322.000000,876.000000,330.000000,4.041700,234500.000000 +-117.850000,33.810000,26.000000,4186.000000,767.000000,2447.000000,777.000000,4.991700,248100.000000 +-117.850000,33.800000,40.000000,1461.000000,286.000000,1322.000000,264.000000,4.326900,194100.000000 +-117.850000,33.800000,34.000000,1593.000000,283.000000,872.000000,255.000000,3.825000,216700.000000 +-117.850000,33.790000,52.000000,1963.000000,430.000000,1197.000000,415.000000,3.892900,211000.000000 +-117.850000,33.790000,46.000000,1846.000000,383.000000,867.000000,336.000000,3.423400,200000.000000 +-117.850000,33.790000,40.000000,1251.000000,336.000000,729.000000,343.000000,2.468800,236400.000000 +-117.850000,33.780000,23.000000,3187.000000,870.000000,1977.000000,852.000000,3.393900,212100.000000 +-117.850000,33.770000,23.000000,5928.000000,1204.000000,3570.000000,1150.000000,4.039800,233100.000000 +-117.850000,33.770000,16.000000,2186.000000,511.000000,908.000000,466.000000,4.575000,225000.000000 +-117.850000,33.750000,27.000000,2311.000000,632.000000,2936.000000,609.000000,2.565100,171400.000000 +-117.850000,33.740000,26.000000,2589.000000,1003.000000,5756.000000,983.000000,2.199200,170800.000000 +-117.850000,33.740000,19.000000,1248.000000,357.000000,1214.000000,328.000000,2.705900,159800.000000 +-117.850000,33.730000,28.000000,1499.000000,574.000000,3328.000000,595.000000,2.453900,115000.000000 +-117.850000,33.620000,18.000000,729.000000,105.000000,316.000000,108.000000,10.389300,500001.000000 +-117.850000,33.610000,14.000000,4340.000000,741.000000,1505.000000,670.000000,7.567400,500001.000000 +-117.860000,34.240000,52.000000,803.000000,267.000000,628.000000,225.000000,4.193200,14999.000000 +-117.860000,34.140000,36.000000,3097.000000,667.000000,1484.000000,634.000000,3.190500,235300.000000 +-117.860000,34.140000,33.000000,2344.000000,363.000000,1098.000000,359.000000,6.208900,283400.000000 +-117.860000,34.130000,40.000000,1304.000000,280.000000,607.000000,256.000000,2.588000,209500.000000 +-117.860000,34.130000,33.000000,2383.000000,428.000000,1269.000000,421.000000,4.636000,245500.000000 +-117.860000,34.130000,29.000000,630.000000,145.000000,378.000000,148.000000,3.410700,170800.000000 +-117.860000,34.100000,29.000000,1185.000000,197.000000,588.000000,196.000000,5.083200,196900.000000 +-117.860000,34.100000,23.000000,2535.000000,490.000000,1327.000000,466.000000,3.597700,180600.000000 +-117.860000,34.090000,29.000000,3855.000000,585.000000,2205.000000,609.000000,5.549600,218200.000000 +-117.860000,34.080000,31.000000,2524.000000,349.000000,1003.000000,343.000000,7.519600,380900.000000 +-117.860000,34.020000,19.000000,6300.000000,937.000000,3671.000000,943.000000,5.971600,262100.000000 +-117.860000,34.010000,16.000000,4632.000000,761.000000,3038.000000,727.000000,5.176200,264400.000000 +-117.860000,33.990000,10.000000,17820.000000,2812.000000,8686.000000,2666.000000,6.387500,310700.000000 +-117.860000,33.910000,16.000000,2889.000000,423.000000,1227.000000,401.000000,6.451400,270800.000000 +-117.860000,33.900000,25.000000,3205.000000,409.000000,1291.000000,408.000000,7.247800,299200.000000 +-117.860000,33.900000,17.000000,1452.000000,188.000000,630.000000,194.000000,6.911300,285200.000000 +-117.860000,33.880000,20.000000,3977.000000,540.000000,1886.000000,541.000000,6.584300,272200.000000 +-117.860000,33.880000,19.000000,1621.000000,328.000000,871.000000,322.000000,3.736100,201400.000000 +-117.860000,33.870000,19.000000,1591.000000,279.000000,891.000000,237.000000,5.657300,216000.000000 +-117.860000,33.870000,12.000000,1600.000000,251.000000,685.000000,256.000000,5.178400,254000.000000 +-117.860000,33.850000,17.000000,1131.000000,236.000000,622.000000,244.000000,4.930600,158500.000000 +-117.860000,33.840000,19.000000,1725.000000,392.000000,920.000000,400.000000,3.008700,159400.000000 +-117.860000,33.820000,9.000000,1682.000000,291.000000,1015.000000,271.000000,6.660300,230900.000000 +-117.860000,33.800000,35.000000,1683.000000,347.000000,1242.000000,335.000000,3.517200,190400.000000 +-117.860000,33.800000,34.000000,1793.000000,480.000000,1722.000000,441.000000,2.823500,153100.000000 +-117.860000,33.790000,42.000000,1024.000000,191.000000,483.000000,187.000000,4.105000,194500.000000 +-117.860000,33.790000,34.000000,1883.000000,408.000000,1227.000000,424.000000,3.892900,187500.000000 +-117.860000,33.790000,31.000000,3523.000000,922.000000,2660.000000,949.000000,3.179200,146400.000000 +-117.860000,33.780000,25.000000,2635.000000,660.000000,1710.000000,634.000000,3.125000,215000.000000 +-117.860000,33.780000,21.000000,2713.000000,731.000000,1952.000000,722.000000,2.695900,178800.000000 +-117.860000,33.770000,39.000000,4159.000000,655.000000,1669.000000,651.000000,4.611100,240300.000000 +-117.860000,33.760000,34.000000,3153.000000,561.000000,1679.000000,532.000000,4.708300,205300.000000 +-117.860000,33.750000,6.000000,1565.000000,599.000000,3157.000000,629.000000,2.927100,123200.000000 +-117.860000,33.750000,5.000000,187.000000,49.000000,207.000000,51.000000,1.800000,154200.000000 +-117.860000,33.750000,39.000000,275.000000,87.000000,554.000000,103.000000,3.597200,158000.000000 +-117.860000,33.750000,31.000000,1761.000000,515.000000,1810.000000,468.000000,1.930900,173400.000000 +-117.860000,33.750000,13.000000,1632.000000,598.000000,3356.000000,659.000000,1.505400,137500.000000 +-117.860000,33.740000,9.000000,525.000000,171.000000,1257.000000,165.000000,3.375000,165300.000000 +-117.860000,33.740000,38.000000,2415.000000,642.000000,3242.000000,599.000000,3.425000,165600.000000 +-117.860000,33.740000,32.000000,691.000000,151.000000,926.000000,148.000000,4.125000,175900.000000 +-117.860000,33.730000,38.000000,2284.000000,511.000000,2451.000000,504.000000,3.312500,159100.000000 +-117.860000,33.730000,31.000000,1115.000000,268.000000,1369.000000,259.000000,3.569400,150500.000000 +-117.860000,33.730000,30.000000,2651.000000,572.000000,3249.000000,552.000000,3.720200,182100.000000 +-117.860000,33.730000,26.000000,1702.000000,456.000000,2776.000000,463.000000,2.638500,180200.000000 +-117.860000,33.730000,23.000000,407.000000,108.000000,647.000000,96.000000,3.775000,177400.000000 +-117.860000,33.720000,37.000000,1429.000000,428.000000,2089.000000,399.000000,3.413000,150600.000000 +-117.860000,33.720000,32.000000,1461.000000,340.000000,1909.000000,346.000000,3.551100,159100.000000 +-117.860000,33.710000,36.000000,191.000000,42.000000,208.000000,37.000000,3.375000,157500.000000 +-117.860000,33.710000,21.000000,1795.000000,406.000000,2246.000000,400.000000,3.152000,152800.000000 +-117.860000,33.670000,16.000000,20.000000,5.000000,15.000000,5.000000,3.875000,450000.000000 +-117.860000,33.650000,4.000000,3618.000000,767.000000,1326.000000,714.000000,5.428400,500001.000000 +-117.860000,33.630000,17.000000,3095.000000,551.000000,1175.000000,534.000000,5.309900,500001.000000 +-117.860000,33.620000,23.000000,3166.000000,411.000000,1092.000000,345.000000,7.936700,500001.000000 +-117.860000,33.620000,17.000000,2975.000000,371.000000,1247.000000,398.000000,10.198900,500001.000000 +-117.860000,33.610000,15.000000,3191.000000,482.000000,930.000000,447.000000,8.600100,500001.000000 +-117.860000,33.600000,30.000000,1891.000000,364.000000,635.000000,314.000000,6.626500,500001.000000 +-117.870000,35.730000,13.000000,2566.000000,449.000000,1181.000000,414.000000,4.151800,91800.000000 +-117.870000,34.150000,37.000000,2655.000000,415.000000,1056.000000,401.000000,5.422400,269500.000000 +-117.870000,34.140000,30.000000,2495.000000,586.000000,1139.000000,559.000000,2.937500,209200.000000 +-117.870000,34.130000,29.000000,1677.000000,413.000000,873.000000,400.000000,3.120000,194300.000000 +-117.870000,34.120000,33.000000,2059.000000,361.000000,1073.000000,339.000000,4.245400,183800.000000 +-117.870000,34.110000,34.000000,1324.000000,211.000000,799.000000,228.000000,4.523400,192200.000000 +-117.870000,34.100000,25.000000,2208.000000,477.000000,1084.000000,424.000000,3.775000,191700.000000 +-117.870000,34.100000,15.000000,6409.000000,1363.000000,3359.000000,1267.000000,3.875000,173300.000000 +-117.870000,34.090000,36.000000,1267.000000,191.000000,640.000000,200.000000,5.240500,220000.000000 +-117.870000,34.090000,31.000000,1484.000000,327.000000,927.000000,317.000000,3.648400,189600.000000 +-117.870000,34.080000,33.000000,4518.000000,716.000000,2037.000000,764.000000,5.601500,267200.000000 +-117.870000,34.080000,33.000000,3630.000000,800.000000,2257.000000,796.000000,3.246900,206900.000000 +-117.870000,34.070000,21.000000,4723.000000,882.000000,2210.000000,768.000000,3.816700,258700.000000 +-117.870000,34.060000,25.000000,3652.000000,470.000000,1525.000000,484.000000,10.124800,428500.000000 +-117.870000,34.040000,7.000000,27700.000000,4179.000000,15037.000000,4072.000000,6.628800,339700.000000 +-117.870000,33.920000,14.000000,4039.000000,669.000000,1905.000000,670.000000,6.330300,303000.000000 +-117.870000,33.900000,21.000000,3181.000000,447.000000,1416.000000,469.000000,6.826800,280300.000000 +-117.870000,33.890000,25.000000,1142.000000,162.000000,486.000000,150.000000,7.147200,270100.000000 +-117.870000,33.890000,22.000000,2340.000000,664.000000,1382.000000,546.000000,3.343000,184600.000000 +-117.870000,33.890000,19.000000,1674.000000,243.000000,786.000000,234.000000,6.421800,275000.000000 +-117.870000,33.890000,17.000000,1441.000000,530.000000,769.000000,456.000000,2.425000,171700.000000 +-117.870000,33.880000,28.000000,3333.000000,752.000000,2026.000000,722.000000,3.566700,190700.000000 +-117.870000,33.880000,28.000000,2612.000000,602.000000,1682.000000,563.000000,3.641700,204300.000000 +-117.870000,33.880000,25.000000,1808.000000,440.000000,1342.000000,454.000000,3.025000,156900.000000 +-117.870000,33.880000,24.000000,2655.000000,702.000000,1519.000000,708.000000,3.303600,183900.000000 +-117.870000,33.870000,7.000000,2663.000000,642.000000,1367.000000,677.000000,4.656300,162400.000000 +-117.870000,33.870000,16.000000,1332.000000,368.000000,1534.000000,295.000000,3.022700,297100.000000 +-117.870000,33.870000,15.000000,1898.000000,476.000000,1766.000000,455.000000,2.492900,158500.000000 +-117.870000,33.860000,28.000000,2292.000000,531.000000,2197.000000,509.000000,3.485600,142800.000000 +-117.870000,33.860000,19.000000,2232.000000,448.000000,1149.000000,417.000000,3.153400,324400.000000 +-117.870000,33.850000,33.000000,45.000000,11.000000,34.000000,10.000000,5.294900,350000.000000 +-117.870000,33.840000,25.000000,1928.000000,414.000000,961.000000,385.000000,4.072400,231400.000000 +-117.870000,33.840000,17.000000,2395.000000,410.000000,1224.000000,399.000000,5.118200,249200.000000 +-117.870000,33.840000,10.000000,3381.000000,729.000000,1584.000000,636.000000,5.381200,235400.000000 +-117.870000,33.820000,26.000000,2435.000000,346.000000,1088.000000,350.000000,5.939700,249400.000000 +-117.870000,33.810000,15.000000,3082.000000,536.000000,1268.000000,531.000000,3.760400,280100.000000 +-117.870000,33.780000,30.000000,2022.000000,522.000000,1196.000000,463.000000,3.745400,186000.000000 +-117.870000,33.780000,21.000000,2487.000000,573.000000,1515.000000,494.000000,4.303900,168500.000000 +-117.870000,33.780000,19.000000,2813.000000,567.000000,1334.000000,596.000000,4.720800,173500.000000 +-117.870000,33.770000,52.000000,2512.000000,356.000000,978.000000,365.000000,8.078400,320300.000000 +-117.870000,33.760000,37.000000,4943.000000,851.000000,2164.000000,788.000000,4.107100,311300.000000 +-117.870000,33.750000,26.000000,411.000000,114.000000,448.000000,95.000000,1.701900,350000.000000 +-117.870000,33.750000,18.000000,697.000000,255.000000,812.000000,221.000000,2.663500,162500.000000 +-117.870000,33.750000,12.000000,2782.000000,1077.000000,1968.000000,795.000000,0.971000,102500.000000 +-117.870000,33.740000,52.000000,2411.000000,526.000000,2165.000000,521.000000,3.415000,172500.000000 +-117.870000,33.740000,31.000000,2338.000000,652.000000,3289.000000,631.000000,2.673400,158500.000000 +-117.870000,33.730000,45.000000,2264.000000,502.000000,1970.000000,499.000000,3.419300,177000.000000 +-117.870000,33.720000,39.000000,3167.000000,669.000000,2789.000000,619.000000,3.590200,165900.000000 +-117.870000,33.720000,37.000000,2216.000000,497.000000,2445.000000,506.000000,3.842100,174000.000000 +-117.870000,33.710000,16.000000,3397.000000,686.000000,1924.000000,621.000000,4.914800,155500.000000 +-117.870000,33.710000,13.000000,1087.000000,340.000000,817.000000,342.000000,3.532600,262500.000000 +-117.870000,33.700000,17.000000,3216.000000,607.000000,1916.000000,618.000000,4.915300,266400.000000 +-117.870000,33.690000,4.000000,2337.000000,768.000000,983.000000,655.000000,3.717400,275000.000000 +-117.870000,33.640000,26.000000,3521.000000,455.000000,1336.000000,451.000000,10.284900,500001.000000 +-117.870000,33.630000,9.000000,6163.000000,1004.000000,1912.000000,903.000000,10.828900,500001.000000 +-117.870000,33.620000,8.000000,1266.000000,210.000000,375.000000,183.000000,9.802000,500001.000000 +-117.870000,33.620000,15.000000,2209.000000,275.000000,735.000000,274.000000,15.000100,500001.000000 +-117.870000,33.610000,25.000000,2267.000000,359.000000,866.000000,348.000000,7.790000,500001.000000 +-117.870000,33.600000,35.000000,1598.000000,398.000000,782.000000,411.000000,5.115500,500000.000000 +-117.870000,33.600000,34.000000,3415.000000,779.000000,1275.000000,718.000000,4.498000,482900.000000 +-117.870000,33.600000,33.000000,3120.000000,602.000000,1155.000000,553.000000,5.294900,500001.000000 +-117.870000,33.600000,20.000000,3212.000000,572.000000,1064.000000,526.000000,6.615500,500001.000000 +-117.870000,33.590000,44.000000,2499.000000,396.000000,910.000000,374.000000,6.654400,500001.000000 +-117.880000,34.140000,32.000000,1764.000000,365.000000,924.000000,329.000000,3.875000,186700.000000 +-117.880000,34.140000,23.000000,2308.000000,322.000000,1001.000000,317.000000,7.511200,355500.000000 +-117.880000,34.130000,33.000000,3713.000000,718.000000,2106.000000,720.000000,4.002300,185500.000000 +-117.880000,34.120000,36.000000,2029.000000,351.000000,1327.000000,364.000000,4.183600,164300.000000 +-117.880000,34.120000,35.000000,1574.000000,276.000000,1088.000000,289.000000,4.093800,165300.000000 +-117.880000,34.120000,34.000000,912.000000,165.000000,522.000000,150.000000,4.041700,178000.000000 +-117.880000,34.120000,33.000000,1485.000000,274.000000,1006.000000,258.000000,5.170800,158500.000000 +-117.880000,34.110000,18.000000,2923.000000,670.000000,1751.000000,656.000000,3.238300,157000.000000 +-117.880000,34.100000,32.000000,3357.000000,621.000000,1696.000000,604.000000,4.268500,216600.000000 +-117.880000,34.090000,29.000000,3416.000000,790.000000,2223.000000,728.000000,3.510900,186000.000000 +-117.880000,34.080000,30.000000,6132.000000,1538.000000,3147.000000,1449.000000,2.776300,187800.000000 +-117.880000,34.060000,23.000000,6689.000000,1124.000000,3081.000000,1047.000000,5.925400,491200.000000 +-117.880000,34.010000,16.000000,6756.000000,1332.000000,4429.000000,1299.000000,4.758900,178200.000000 +-117.880000,34.000000,21.000000,5459.000000,1086.000000,3394.000000,1087.000000,3.630800,192100.000000 +-117.880000,33.960000,16.000000,19059.000000,3079.000000,10988.000000,3061.000000,5.546900,265200.000000 +-117.880000,33.930000,17.000000,6100.000000,861.000000,2771.000000,866.000000,7.648600,306700.000000 +-117.880000,33.900000,21.000000,3180.000000,434.000000,1413.000000,391.000000,6.594500,277300.000000 +-117.880000,33.890000,19.000000,3583.000000,911.000000,2300.000000,871.000000,3.021400,218400.000000 +-117.880000,33.890000,18.000000,1616.000000,532.000000,866.000000,496.000000,3.643500,119100.000000 +-117.880000,33.890000,17.000000,3218.000000,923.000000,1701.000000,824.000000,3.694600,265500.000000 +-117.880000,33.890000,16.000000,959.000000,176.000000,353.000000,185.000000,4.500000,173300.000000 +-117.880000,33.870000,35.000000,1919.000000,349.000000,1302.000000,345.000000,5.640900,190900.000000 +-117.880000,33.850000,26.000000,3924.000000,781.000000,2332.000000,725.000000,3.777200,223900.000000 +-117.880000,33.850000,25.000000,1234.000000,351.000000,507.000000,285.000000,2.317300,225000.000000 +-117.880000,33.850000,22.000000,1105.000000,241.000000,971.000000,249.000000,3.166700,113900.000000 +-117.880000,33.850000,18.000000,2705.000000,713.000000,2726.000000,674.000000,2.775900,200000.000000 +-117.880000,33.840000,34.000000,1410.000000,214.000000,837.000000,240.000000,6.116800,213900.000000 +-117.880000,33.840000,33.000000,1526.000000,237.000000,906.000000,245.000000,5.178200,225000.000000 +-117.880000,33.840000,31.000000,3301.000000,712.000000,1532.000000,682.000000,3.730300,223800.000000 +-117.880000,33.830000,25.000000,1785.000000,248.000000,750.000000,251.000000,6.840700,266700.000000 +-117.880000,33.820000,26.000000,1783.000000,298.000000,1048.000000,306.000000,6.048800,232000.000000 +-117.880000,33.820000,17.000000,2247.000000,705.000000,1382.000000,618.000000,3.863100,225000.000000 +-117.880000,33.780000,26.000000,3141.000000,670.000000,1572.000000,724.000000,3.347200,237400.000000 +-117.880000,33.780000,26.000000,1813.000000,421.000000,1235.000000,343.000000,3.597200,187500.000000 +-117.880000,33.770000,31.000000,2549.000000,355.000000,1044.000000,362.000000,6.973700,288800.000000 +-117.880000,33.760000,37.000000,2988.000000,677.000000,2354.000000,666.000000,3.434500,235500.000000 +-117.880000,33.750000,50.000000,1344.000000,228.000000,747.000000,234.000000,4.512500,195400.000000 +-117.880000,33.750000,34.000000,3004.000000,673.000000,5477.000000,640.000000,2.834200,187200.000000 +-117.880000,33.750000,10.000000,1823.000000,590.000000,2176.000000,548.000000,1.502600,151800.000000 +-117.880000,33.740000,31.000000,1120.000000,296.000000,1718.000000,268.000000,2.807700,140300.000000 +-117.880000,33.740000,29.000000,720.000000,174.000000,1045.000000,181.000000,3.196400,151900.000000 +-117.880000,33.740000,25.000000,1799.000000,557.000000,3416.000000,538.000000,3.008300,163500.000000 +-117.880000,33.740000,19.000000,2261.000000,642.000000,3545.000000,635.000000,2.522400,148500.000000 +-117.880000,33.740000,16.000000,1444.000000,446.000000,2329.000000,441.000000,3.169100,159400.000000 +-117.880000,33.730000,36.000000,2471.000000,498.000000,2594.000000,475.000000,3.750000,170500.000000 +-117.880000,33.730000,33.000000,2291.000000,594.000000,3232.000000,589.000000,3.203700,163500.000000 +-117.880000,33.710000,27.000000,1596.000000,297.000000,1703.000000,289.000000,4.100000,184900.000000 +-117.880000,33.710000,20.000000,1738.000000,509.000000,1403.000000,411.000000,3.174200,245000.000000 +-117.880000,33.700000,24.000000,534.000000,88.000000,249.000000,74.000000,5.325400,240500.000000 +-117.880000,33.700000,18.000000,2135.000000,373.000000,1464.000000,405.000000,5.483600,225800.000000 +-117.880000,33.700000,17.000000,5122.000000,1544.000000,2966.000000,1339.000000,3.483500,116700.000000 +-117.880000,33.700000,16.000000,1505.000000,358.000000,835.000000,339.000000,3.802900,205400.000000 +-117.880000,33.690000,20.000000,5330.000000,976.000000,2734.000000,1000.000000,5.213800,233100.000000 +-117.880000,33.660000,26.000000,6017.000000,1244.000000,2673.000000,1135.000000,3.542600,295400.000000 +-117.880000,33.650000,24.000000,4879.000000,756.000000,1777.000000,754.000000,5.905500,477300.000000 +-117.880000,33.640000,16.000000,3615.000000,570.000000,1209.000000,559.000000,8.557400,392200.000000 +-117.880000,33.630000,21.000000,9565.000000,2289.000000,3162.000000,1831.000000,4.702400,345400.000000 +-117.880000,33.600000,31.000000,5488.000000,1055.000000,1938.000000,964.000000,8.874200,500001.000000 +-117.880000,33.550000,27.000000,2278.000000,316.000000,772.000000,304.000000,10.127500,500001.000000 +-117.890000,34.140000,15.000000,4644.000000,967.000000,2855.000000,867.000000,3.365400,222100.000000 +-117.890000,34.130000,34.000000,2159.000000,386.000000,1443.000000,385.000000,4.199500,147400.000000 +-117.890000,34.120000,35.000000,1566.000000,321.000000,1396.000000,317.000000,4.050000,141300.000000 +-117.890000,34.120000,35.000000,1470.000000,241.000000,885.000000,246.000000,4.923900,168800.000000 +-117.890000,34.120000,35.000000,1447.000000,272.000000,1224.000000,268.000000,3.993400,141900.000000 +-117.890000,34.110000,36.000000,806.000000,147.000000,446.000000,153.000000,4.522100,151300.000000 +-117.890000,34.110000,27.000000,2434.000000,535.000000,1623.000000,498.000000,3.687500,140200.000000 +-117.890000,34.100000,35.000000,3185.000000,544.000000,1858.000000,564.000000,3.830400,175900.000000 +-117.890000,34.100000,34.000000,2048.000000,411.000000,1456.000000,416.000000,3.125000,168600.000000 +-117.890000,34.100000,27.000000,3341.000000,728.000000,1762.000000,679.000000,2.943700,180400.000000 +-117.890000,34.090000,37.000000,1813.000000,394.000000,1100.000000,375.000000,3.445300,176700.000000 +-117.890000,34.090000,37.000000,1055.000000,280.000000,538.000000,206.000000,2.416700,181300.000000 +-117.890000,34.090000,36.000000,1811.000000,320.000000,1005.000000,332.000000,5.562900,188300.000000 +-117.890000,34.090000,35.000000,1205.000000,330.000000,583.000000,319.000000,2.397100,188900.000000 +-117.890000,34.080000,35.000000,1711.000000,335.000000,825.000000,356.000000,3.500000,215600.000000 +-117.890000,34.080000,25.000000,2115.000000,489.000000,1107.000000,477.000000,3.194900,207400.000000 +-117.890000,34.070000,35.000000,834.000000,137.000000,392.000000,123.000000,4.517900,218800.000000 +-117.890000,34.070000,35.000000,1439.000000,261.000000,804.000000,271.000000,3.980800,188600.000000 +-117.890000,34.070000,32.000000,2374.000000,450.000000,1580.000000,427.000000,3.883700,200300.000000 +-117.890000,34.010000,23.000000,4535.000000,955.000000,3881.000000,930.000000,3.627500,154100.000000 +-117.890000,33.990000,22.000000,3272.000000,618.000000,1784.000000,591.000000,4.032400,211300.000000 +-117.890000,33.980000,5.000000,3088.000000,711.000000,1415.000000,641.000000,2.512500,184500.000000 +-117.890000,33.940000,20.000000,3349.000000,685.000000,1822.000000,675.000000,4.721600,227000.000000 +-117.890000,33.920000,8.000000,2120.000000,544.000000,1281.000000,470.000000,3.495400,159500.000000 +-117.890000,33.920000,17.000000,2936.000000,555.000000,1381.000000,535.000000,5.461700,190300.000000 +-117.890000,33.920000,14.000000,1562.000000,373.000000,609.000000,328.000000,2.393500,125000.000000 +-117.890000,33.900000,23.000000,1533.000000,226.000000,693.000000,230.000000,7.898000,258200.000000 +-117.890000,33.900000,16.000000,1426.000000,216.000000,652.000000,226.000000,6.528400,288700.000000 +-117.890000,33.890000,17.000000,1671.000000,192.000000,678.000000,206.000000,13.110700,467600.000000 +-117.890000,33.880000,33.000000,1582.000000,256.000000,771.000000,240.000000,5.383600,229600.000000 +-117.890000,33.880000,27.000000,2091.000000,336.000000,1037.000000,332.000000,5.751900,243400.000000 +-117.890000,33.880000,15.000000,1655.000000,626.000000,1549.000000,582.000000,1.912700,175000.000000 +-117.890000,33.870000,32.000000,1133.000000,216.000000,693.000000,228.000000,3.359400,202100.000000 +-117.890000,33.870000,25.000000,1492.000000,439.000000,755.000000,389.000000,3.089300,188200.000000 +-117.890000,33.860000,28.000000,1395.000000,398.000000,1220.000000,362.000000,3.300800,193800.000000 +-117.890000,33.850000,13.000000,1583.000000,474.000000,1672.000000,432.000000,3.230300,201300.000000 +-117.890000,33.840000,35.000000,3315.000000,744.000000,2425.000000,687.000000,3.552100,182800.000000 +-117.890000,33.840000,33.000000,1587.000000,374.000000,1159.000000,331.000000,2.802100,195100.000000 +-117.890000,33.830000,35.000000,2984.000000,446.000000,1435.000000,455.000000,5.627600,200800.000000 +-117.890000,33.820000,24.000000,1268.000000,210.000000,700.000000,224.000000,5.060500,216200.000000 +-117.890000,33.820000,21.000000,1591.000000,298.000000,904.000000,297.000000,4.890600,179100.000000 +-117.890000,33.820000,18.000000,3197.000000,809.000000,1894.000000,726.000000,3.676100,140500.000000 +-117.890000,33.800000,38.000000,51.000000,12.000000,41.000000,10.000000,6.022400,187500.000000 +-117.890000,33.780000,16.000000,6352.000000,1747.000000,5085.000000,1649.000000,2.883500,193800.000000 +-117.890000,33.770000,35.000000,1799.000000,343.000000,1239.000000,368.000000,3.921900,189600.000000 +-117.890000,33.760000,36.000000,2656.000000,572.000000,2370.000000,571.000000,3.805600,177200.000000 +-117.890000,33.750000,34.000000,2753.000000,654.000000,3117.000000,631.000000,3.171300,170100.000000 +-117.890000,33.750000,31.000000,1205.000000,280.000000,1476.000000,301.000000,4.023100,139200.000000 +-117.890000,33.740000,34.000000,1759.000000,353.000000,2083.000000,330.000000,3.229200,160600.000000 +-117.890000,33.740000,33.000000,619.000000,139.000000,1217.000000,146.000000,4.687500,154400.000000 +-117.890000,33.740000,32.000000,660.000000,145.000000,959.000000,113.000000,3.750000,159000.000000 +-117.890000,33.740000,32.000000,1562.000000,365.000000,2145.000000,347.000000,2.916700,158400.000000 +-117.890000,33.720000,25.000000,4343.000000,847.000000,3872.000000,850.000000,4.650000,197800.000000 +-117.890000,33.720000,23.000000,2305.000000,538.000000,2493.000000,502.000000,3.661800,183500.000000 +-117.890000,33.710000,24.000000,4365.000000,804.000000,2663.000000,753.000000,4.581400,233300.000000 +-117.890000,33.710000,23.000000,1422.000000,260.000000,1092.000000,263.000000,4.742200,202400.000000 +-117.890000,33.710000,16.000000,1591.000000,225.000000,926.000000,239.000000,6.245200,266300.000000 +-117.890000,33.700000,13.000000,1857.000000,572.000000,838.000000,525.000000,3.238600,129200.000000 +-117.890000,33.680000,8.000000,5278.000000,1575.000000,2389.000000,1371.000000,3.340900,181300.000000 +-117.890000,33.680000,26.000000,2905.000000,504.000000,1452.000000,491.000000,5.085300,260300.000000 +-117.890000,33.660000,33.000000,3595.000000,785.000000,1621.000000,732.000000,4.137200,265200.000000 +-117.890000,33.660000,32.000000,2736.000000,550.000000,1279.000000,534.000000,5.542200,253100.000000 +-117.890000,33.620000,24.000000,1016.000000,238.000000,465.000000,236.000000,3.062500,93800.000000 +-117.890000,33.610000,44.000000,2126.000000,423.000000,745.000000,332.000000,5.192300,500001.000000 +-117.890000,33.610000,42.000000,1301.000000,280.000000,539.000000,249.000000,5.000000,500001.000000 +-117.890000,33.610000,16.000000,2413.000000,559.000000,656.000000,423.000000,6.301700,350000.000000 +-117.900000,34.140000,35.000000,2259.000000,505.000000,1561.000000,509.000000,3.304300,155500.000000 +-117.900000,34.140000,29.000000,2240.000000,457.000000,1187.000000,407.000000,3.836500,184200.000000 +-117.900000,34.130000,5.000000,1126.000000,316.000000,819.000000,311.000000,1.500000,139800.000000 +-117.900000,34.130000,32.000000,1640.000000,391.000000,1312.000000,358.000000,2.629200,136100.000000 +-117.900000,34.130000,25.000000,3076.000000,856.000000,2868.000000,752.000000,2.661900,117600.000000 +-117.900000,34.120000,35.000000,957.000000,194.000000,804.000000,221.000000,3.332200,151400.000000 +-117.900000,34.120000,33.000000,1788.000000,456.000000,1787.000000,361.000000,2.662900,124100.000000 +-117.900000,34.120000,33.000000,1555.000000,361.000000,1571.000000,386.000000,4.052900,138200.000000 +-117.900000,34.110000,37.000000,1286.000000,255.000000,1047.000000,249.000000,4.201900,140100.000000 +-117.900000,34.100000,35.000000,2739.000000,475.000000,1481.000000,483.000000,4.565500,176600.000000 +-117.900000,34.100000,31.000000,3007.000000,653.000000,1766.000000,616.000000,3.708300,166000.000000 +-117.900000,34.090000,34.000000,1562.000000,272.000000,825.000000,266.000000,4.125000,220800.000000 +-117.900000,34.080000,32.000000,5482.000000,1251.000000,3426.000000,1117.000000,3.294300,204400.000000 +-117.900000,34.080000,32.000000,2068.000000,356.000000,976.000000,370.000000,5.212000,201200.000000 +-117.900000,34.070000,35.000000,1646.000000,294.000000,1056.000000,280.000000,3.055000,172000.000000 +-117.900000,34.060000,35.000000,1313.000000,194.000000,599.000000,209.000000,7.500000,287200.000000 +-117.900000,34.060000,34.000000,2956.000000,469.000000,1488.000000,464.000000,5.366400,268300.000000 +-117.900000,34.060000,33.000000,1701.000000,290.000000,831.000000,275.000000,5.446900,274700.000000 +-117.900000,34.050000,33.000000,629.000000,76.000000,253.000000,75.000000,7.628600,330400.000000 +-117.900000,34.040000,15.000000,11989.000000,2185.000000,6652.000000,2081.000000,4.555400,278300.000000 +-117.900000,34.020000,15.000000,14058.000000,2486.000000,8997.000000,2497.000000,5.070400,226200.000000 +-117.900000,34.010000,27.000000,2383.000000,472.000000,2079.000000,494.000000,3.970200,141400.000000 +-117.900000,33.990000,18.000000,8076.000000,1789.000000,5325.000000,1707.000000,3.443000,171900.000000 +-117.900000,33.980000,20.000000,9893.000000,2283.000000,7228.000000,2159.000000,3.253000,186700.000000 +-117.900000,33.970000,23.000000,7353.000000,1255.000000,4014.000000,1124.000000,5.415500,213200.000000 +-117.900000,33.930000,12.000000,4325.000000,1191.000000,1897.000000,1080.000000,3.317300,247400.000000 +-117.900000,33.920000,27.000000,698.000000,116.000000,391.000000,126.000000,5.917700,267600.000000 +-117.900000,33.910000,33.000000,4181.000000,804.000000,2049.000000,834.000000,4.310300,201600.000000 +-117.900000,33.910000,26.000000,2885.000000,476.000000,1227.000000,439.000000,4.952400,226600.000000 +-117.900000,33.900000,19.000000,2176.000000,414.000000,1002.000000,402.000000,4.974300,193500.000000 +-117.900000,33.880000,35.000000,2062.000000,353.000000,991.000000,357.000000,5.289700,230400.000000 +-117.900000,33.880000,34.000000,1396.000000,245.000000,661.000000,261.000000,4.675000,215400.000000 +-117.900000,33.880000,28.000000,2696.000000,346.000000,947.000000,356.000000,9.005500,375400.000000 +-117.900000,33.870000,28.000000,2315.000000,538.000000,1360.000000,504.000000,2.986100,218600.000000 +-117.900000,33.850000,35.000000,1756.000000,328.000000,1026.000000,332.000000,3.600000,193500.000000 +-117.900000,33.850000,31.000000,3413.000000,764.000000,2326.000000,728.000000,4.325000,187100.000000 +-117.900000,33.840000,31.000000,2043.000000,468.000000,1524.000000,454.000000,3.532900,187400.000000 +-117.900000,33.830000,33.000000,3065.000000,611.000000,2204.000000,606.000000,3.845600,211800.000000 +-117.900000,33.830000,23.000000,2459.000000,689.000000,2720.000000,598.000000,2.807200,164700.000000 +-117.900000,33.820000,32.000000,1187.000000,302.000000,1003.000000,275.000000,2.493100,166900.000000 +-117.900000,33.800000,27.000000,2176.000000,442.000000,1440.000000,418.000000,4.375000,212500.000000 +-117.900000,33.800000,23.000000,1368.000000,397.000000,1940.000000,358.000000,3.078900,350000.000000 +-117.900000,33.800000,22.000000,2964.000000,829.000000,2639.000000,771.000000,2.483300,157500.000000 +-117.900000,33.800000,21.000000,1342.000000,326.000000,748.000000,335.000000,2.923100,45000.000000 +-117.900000,33.770000,35.000000,2002.000000,378.000000,1726.000000,387.000000,3.961300,182300.000000 +-117.900000,33.760000,26.000000,2678.000000,702.000000,3262.000000,685.000000,3.695300,176800.000000 +-117.900000,33.750000,32.000000,1893.000000,431.000000,2245.000000,426.000000,3.714300,163000.000000 +-117.900000,33.750000,28.000000,1346.000000,291.000000,1575.000000,278.000000,3.425000,159500.000000 +-117.900000,33.740000,25.000000,808.000000,163.000000,1066.000000,189.000000,4.767900,173100.000000 +-117.900000,33.740000,24.000000,2932.000000,955.000000,5516.000000,911.000000,2.753500,111000.000000 +-117.900000,33.740000,24.000000,1435.000000,494.000000,3171.000000,504.000000,3.083300,151700.000000 +-117.900000,33.740000,19.000000,1566.000000,379.000000,1032.000000,330.000000,2.210500,180400.000000 +-117.900000,33.740000,18.000000,1884.000000,442.000000,1915.000000,442.000000,2.378300,166700.000000 +-117.900000,33.730000,32.000000,2930.000000,833.000000,5116.000000,854.000000,3.714700,164100.000000 +-117.900000,33.730000,30.000000,746.000000,172.000000,1048.000000,163.000000,4.100000,166400.000000 +-117.900000,33.730000,26.000000,1324.000000,314.000000,1804.000000,311.000000,3.965900,178500.000000 +-117.900000,33.720000,36.000000,443.000000,117.000000,577.000000,115.000000,3.687500,137500.000000 +-117.900000,33.720000,33.000000,2613.000000,562.000000,3150.000000,543.000000,4.389900,180700.000000 +-117.900000,33.710000,16.000000,4208.000000,630.000000,2592.000000,662.000000,6.196600,260500.000000 +-117.900000,33.710000,16.000000,1917.000000,317.000000,1324.000000,351.000000,6.248800,252000.000000 +-117.900000,33.710000,15.000000,539.000000,71.000000,287.000000,66.000000,6.342700,305200.000000 +-117.900000,33.700000,15.000000,2289.000000,686.000000,982.000000,634.000000,4.575700,162500.000000 +-117.900000,33.700000,12.000000,4695.000000,1110.000000,2153.000000,989.000000,4.648300,190800.000000 +-117.900000,33.690000,13.000000,9947.000000,1675.000000,4071.000000,1582.000000,5.422000,316600.000000 +-117.900000,33.680000,25.000000,7060.000000,1159.000000,3903.000000,1139.000000,4.835900,249200.000000 +-117.900000,33.670000,26.000000,2507.000000,393.000000,1333.000000,392.000000,6.160100,266100.000000 +-117.900000,33.670000,25.000000,639.000000,98.000000,311.000000,93.000000,6.683300,275900.000000 +-117.900000,33.660000,4.000000,456.000000,91.000000,623.000000,84.000000,6.636900,192600.000000 +-117.900000,33.660000,22.000000,3568.000000,938.000000,1952.000000,938.000000,3.166700,161000.000000 +-117.900000,33.660000,13.000000,1642.000000,423.000000,841.000000,368.000000,3.604200,226000.000000 +-117.900000,33.650000,30.000000,1634.000000,373.000000,771.000000,364.000000,3.412500,284100.000000 +-117.900000,33.650000,28.000000,2043.000000,430.000000,1108.000000,452.000000,5.254900,261800.000000 +-117.900000,33.650000,24.000000,4496.000000,877.000000,1928.000000,855.000000,4.680800,245500.000000 +-117.900000,33.640000,36.000000,2017.000000,357.000000,850.000000,348.000000,5.053200,310900.000000 +-117.900000,33.640000,28.000000,2466.000000,507.000000,1081.000000,465.000000,3.937500,339800.000000 +-117.900000,33.630000,32.000000,3556.000000,521.000000,1381.000000,537.000000,6.142600,450700.000000 +-117.900000,33.630000,28.000000,2370.000000,352.000000,832.000000,347.000000,7.114800,500001.000000 +-117.900000,33.630000,26.000000,4486.000000,554.000000,1598.000000,549.000000,10.145400,500001.000000 +-117.900000,33.630000,26.000000,1632.000000,376.000000,598.000000,375.000000,3.212500,455000.000000 +-117.900000,33.610000,44.000000,1469.000000,312.000000,507.000000,266.000000,3.493700,500001.000000 +-117.900000,33.610000,41.000000,1521.000000,328.000000,527.000000,275.000000,4.076400,500001.000000 +-117.900000,33.610000,19.000000,2897.000000,413.000000,860.000000,367.000000,13.173800,500001.000000 +-117.910000,34.140000,42.000000,2225.000000,485.000000,1544.000000,464.000000,2.244200,166700.000000 +-117.910000,34.130000,34.000000,1540.000000,328.000000,1037.000000,317.000000,2.213200,138500.000000 +-117.910000,34.120000,41.000000,2673.000000,578.000000,2259.000000,592.000000,3.784600,145500.000000 +-117.910000,34.120000,33.000000,1391.000000,309.000000,1038.000000,298.000000,4.194400,149500.000000 +-117.910000,34.110000,20.000000,3158.000000,684.000000,2396.000000,713.000000,3.525000,153000.000000 +-117.910000,34.100000,35.000000,2746.000000,478.000000,1779.000000,501.000000,4.250000,166700.000000 +-117.910000,34.100000,28.000000,3694.000000,722.000000,1999.000000,718.000000,3.281300,181100.000000 +-117.910000,34.080000,35.000000,1443.000000,266.000000,861.000000,262.000000,3.579500,186900.000000 +-117.910000,34.080000,33.000000,2325.000000,452.000000,1170.000000,445.000000,3.662500,217100.000000 +-117.910000,34.070000,36.000000,1390.000000,270.000000,887.000000,266.000000,5.089700,189000.000000 +-117.910000,34.070000,33.000000,2938.000000,561.000000,1519.000000,549.000000,4.559400,204200.000000 +-117.910000,34.060000,29.000000,3250.000000,521.000000,1382.000000,513.000000,5.112000,218300.000000 +-117.910000,34.050000,35.000000,3189.000000,527.000000,1727.000000,500.000000,5.075800,211100.000000 +-117.910000,34.040000,15.000000,8749.000000,1761.000000,5278.000000,1691.000000,4.632400,168800.000000 +-117.910000,34.020000,22.000000,6269.000000,1279.000000,5587.000000,1251.000000,3.820100,136200.000000 +-117.910000,34.020000,17.000000,5973.000000,1384.000000,4349.000000,1229.000000,3.279900,199300.000000 +-117.910000,33.930000,21.000000,2578.000000,363.000000,1207.000000,350.000000,6.245200,291700.000000 +-117.910000,33.920000,21.000000,380.000000,91.000000,398.000000,70.000000,4.722200,208300.000000 +-117.910000,33.910000,34.000000,1763.000000,303.000000,894.000000,297.000000,5.009600,221700.000000 +-117.910000,33.910000,32.000000,1530.000000,301.000000,666.000000,276.000000,4.125000,230200.000000 +-117.910000,33.910000,27.000000,2181.000000,501.000000,1555.000000,488.000000,3.610600,196400.000000 +-117.910000,33.910000,24.000000,2249.000000,379.000000,1015.000000,385.000000,4.976600,267100.000000 +-117.910000,33.900000,27.000000,829.000000,114.000000,383.000000,133.000000,9.312500,293500.000000 +-117.910000,33.890000,30.000000,1631.000000,212.000000,523.000000,216.000000,7.875000,351900.000000 +-117.910000,33.870000,52.000000,2031.000000,506.000000,1191.000000,463.000000,2.907600,177300.000000 +-117.910000,33.860000,26.000000,2296.000000,570.000000,1415.000000,527.000000,2.473200,165800.000000 +-117.910000,33.850000,35.000000,932.000000,258.000000,1147.000000,267.000000,2.701400,156700.000000 +-117.910000,33.850000,22.000000,1178.000000,289.000000,865.000000,294.000000,3.025000,180000.000000 +-117.910000,33.840000,35.000000,1244.000000,324.000000,1603.000000,322.000000,2.958300,175400.000000 +-117.910000,33.840000,29.000000,1570.000000,482.000000,1849.000000,430.000000,2.656300,162500.000000 +-117.910000,33.830000,9.000000,1160.000000,368.000000,735.000000,325.000000,1.119000,175000.000000 +-117.910000,33.830000,47.000000,504.000000,113.000000,375.000000,109.000000,3.660700,160600.000000 +-117.910000,33.830000,32.000000,1855.000000,527.000000,2568.000000,504.000000,2.550900,170800.000000 +-117.910000,33.810000,18.000000,1181.000000,353.000000,781.000000,340.000000,2.562500,153100.000000 +-117.910000,33.790000,22.000000,4417.000000,1054.000000,2759.000000,983.000000,4.250000,170300.000000 +-117.910000,33.780000,33.000000,2729.000000,549.000000,2223.000000,535.000000,4.036200,177900.000000 +-117.910000,33.780000,26.000000,4297.000000,1037.000000,3596.000000,967.000000,3.045000,184000.000000 +-117.910000,33.770000,26.000000,5556.000000,1398.000000,4545.000000,1333.000000,3.090200,190400.000000 +-117.910000,33.760000,22.000000,7531.000000,1569.000000,5254.000000,1523.000000,3.850600,167400.000000 +-117.910000,33.760000,20.000000,4413.000000,1189.000000,4818.000000,1063.000000,2.859400,215100.000000 +-117.910000,33.750000,8.000000,2346.000000,679.000000,3842.000000,674.000000,3.063500,160000.000000 +-117.910000,33.740000,25.000000,4273.000000,965.000000,2946.000000,922.000000,2.992600,183200.000000 +-117.910000,33.740000,15.000000,715.000000,214.000000,1394.000000,244.000000,3.384600,162500.000000 +-117.910000,33.730000,26.000000,2413.000000,512.000000,2867.000000,509.000000,4.763900,179900.000000 +-117.910000,33.720000,32.000000,2436.000000,504.000000,2839.000000,516.000000,4.560700,182100.000000 +-117.910000,33.710000,16.000000,3113.000000,783.000000,1719.000000,715.000000,3.650500,145700.000000 +-117.910000,33.690000,30.000000,2704.000000,426.000000,1289.000000,423.000000,5.281500,229500.000000 +-117.910000,33.670000,32.000000,3058.000000,562.000000,1475.000000,569.000000,4.462500,253500.000000 +-117.910000,33.670000,16.000000,7961.000000,2276.000000,5014.000000,2116.000000,3.512000,218400.000000 +-117.910000,33.660000,21.000000,1708.000000,505.000000,1099.000000,434.000000,3.225000,193800.000000 +-117.910000,33.650000,24.000000,1494.000000,494.000000,814.000000,459.000000,2.107400,181300.000000 +-117.910000,33.650000,19.000000,1589.000000,421.000000,1118.000000,394.000000,4.102900,213400.000000 +-117.910000,33.650000,14.000000,2598.000000,759.000000,1584.000000,703.000000,4.041700,180900.000000 +-117.910000,33.640000,40.000000,1958.000000,333.000000,876.000000,364.000000,3.640600,326100.000000 +-117.910000,33.640000,38.000000,2222.000000,542.000000,1067.000000,512.000000,2.855300,307600.000000 +-117.910000,33.640000,37.000000,1998.000000,472.000000,1030.000000,436.000000,3.930600,268400.000000 +-117.910000,33.640000,29.000000,1652.000000,310.000000,832.000000,326.000000,4.809800,325400.000000 +-117.910000,33.630000,32.000000,1901.000000,400.000000,946.000000,418.000000,2.726400,311100.000000 +-117.910000,33.630000,32.000000,1122.000000,233.000000,557.000000,223.000000,3.538800,407000.000000 +-117.910000,33.630000,30.000000,2071.000000,412.000000,1081.000000,412.000000,4.912500,335700.000000 +-117.910000,33.630000,20.000000,3442.000000,1526.000000,1427.000000,977.000000,3.198500,106300.000000 +-117.910000,33.620000,35.000000,2426.000000,359.000000,937.000000,387.000000,9.217500,500001.000000 +-117.910000,33.620000,32.000000,1997.000000,427.000000,944.000000,426.000000,4.406300,500001.000000 +-117.910000,33.610000,40.000000,2790.000000,531.000000,952.000000,424.000000,4.800000,500001.000000 +-117.910000,33.610000,38.000000,1232.000000,178.000000,410.000000,171.000000,11.075000,500001.000000 +-117.910000,33.610000,36.000000,3082.000000,455.000000,771.000000,365.000000,11.216000,500001.000000 +-117.910000,33.610000,27.000000,1797.000000,343.000000,435.000000,203.000000,5.919600,500001.000000 +-117.920000,34.590000,7.000000,681.000000,125.000000,485.000000,104.000000,2.739600,125600.000000 +-117.920000,34.130000,42.000000,1762.000000,398.000000,1526.000000,365.000000,2.864300,132600.000000 +-117.920000,34.110000,24.000000,2838.000000,695.000000,2151.000000,645.000000,3.220200,126200.000000 +-117.920000,34.100000,35.000000,2994.000000,603.000000,1933.000000,561.000000,4.005200,160700.000000 +-117.920000,34.100000,33.000000,1921.000000,397.000000,1492.000000,393.000000,4.375000,150500.000000 +-117.920000,34.090000,35.000000,1810.000000,318.000000,1164.000000,332.000000,5.012300,165700.000000 +-117.920000,34.080000,36.000000,1479.000000,251.000000,741.000000,245.000000,4.298600,189600.000000 +-117.920000,34.080000,36.000000,1285.000000,228.000000,679.000000,231.000000,3.870500,191900.000000 +-117.920000,34.080000,35.000000,2108.000000,408.000000,1257.000000,414.000000,4.131200,185200.000000 +-117.920000,34.080000,35.000000,1897.000000,311.000000,965.000000,323.000000,5.703900,199400.000000 +-117.920000,34.080000,35.000000,1860.000000,323.000000,1011.000000,305.000000,3.553600,207000.000000 +-117.920000,34.070000,38.000000,175.000000,22.000000,129.000000,20.000000,9.706600,182500.000000 +-117.920000,34.070000,36.000000,1057.000000,207.000000,658.000000,207.000000,4.770800,191700.000000 +-117.920000,34.070000,29.000000,1699.000000,399.000000,1052.000000,411.000000,3.212200,195500.000000 +-117.920000,34.060000,35.000000,2894.000000,467.000000,1420.000000,479.000000,5.184000,224900.000000 +-117.920000,34.060000,34.000000,2819.000000,609.000000,1718.000000,558.000000,3.554700,197600.000000 +-117.920000,34.050000,36.000000,2241.000000,419.000000,1743.000000,448.000000,4.658700,161900.000000 +-117.920000,34.030000,35.000000,1469.000000,306.000000,1285.000000,308.000000,3.921900,159500.000000 +-117.920000,34.030000,32.000000,1819.000000,375.000000,1728.000000,375.000000,3.975000,162400.000000 +-117.920000,34.010000,35.000000,3055.000000,634.000000,3738.000000,615.000000,3.375000,127200.000000 +-117.920000,34.000000,36.000000,116.000000,30.000000,193.000000,35.000000,3.812500,136300.000000 +-117.920000,33.980000,10.000000,16414.000000,2919.000000,8907.000000,2714.000000,6.155200,362500.000000 +-117.920000,33.940000,30.000000,2506.000000,394.000000,1255.000000,421.000000,4.781300,198200.000000 +-117.920000,33.940000,27.000000,4566.000000,620.000000,2045.000000,664.000000,5.583000,267700.000000 +-117.920000,33.930000,12.000000,4415.000000,890.000000,1532.000000,854.000000,3.750000,166300.000000 +-117.920000,33.920000,19.000000,2181.000000,400.000000,1272.000000,337.000000,5.195200,302100.000000 +-117.920000,33.910000,33.000000,2868.000000,382.000000,1204.000000,412.000000,6.182500,336900.000000 +-117.920000,33.910000,27.000000,2558.000000,310.000000,891.000000,316.000000,9.556100,411800.000000 +-117.920000,33.890000,18.000000,2895.000000,487.000000,1116.000000,429.000000,5.471600,400000.000000 +-117.920000,33.890000,12.000000,1859.000000,393.000000,622.000000,316.000000,5.025800,161800.000000 +-117.920000,33.880000,52.000000,1270.000000,276.000000,609.000000,211.000000,3.750000,232500.000000 +-117.920000,33.880000,32.000000,1683.000000,273.000000,719.000000,263.000000,5.364900,243600.000000 +-117.920000,33.880000,32.000000,1632.000000,244.000000,575.000000,235.000000,5.398600,318700.000000 +-117.920000,33.870000,36.000000,1125.000000,285.000000,966.000000,257.000000,2.843800,162500.000000 +-117.920000,33.870000,33.000000,1597.000000,406.000000,1888.000000,423.000000,3.055000,157800.000000 +-117.920000,33.860000,26.000000,745.000000,161.000000,247.000000,151.000000,3.637500,133900.000000 +-117.920000,33.850000,44.000000,1231.000000,258.000000,682.000000,244.000000,3.234400,170100.000000 +-117.920000,33.840000,45.000000,2019.000000,394.000000,1549.000000,377.000000,4.611100,223000.000000 +-117.920000,33.840000,38.000000,1316.000000,263.000000,671.000000,278.000000,3.296900,220000.000000 +-117.920000,33.830000,6.000000,3136.000000,990.000000,1894.000000,859.000000,2.556400,171300.000000 +-117.920000,33.830000,52.000000,1514.000000,301.000000,855.000000,293.000000,3.604200,166400.000000 +-117.920000,33.830000,36.000000,1072.000000,193.000000,639.000000,196.000000,5.027500,179300.000000 +-117.920000,33.830000,17.000000,382.000000,86.000000,272.000000,81.000000,1.425000,212500.000000 +-117.920000,33.820000,36.000000,2360.000000,405.000000,1479.000000,386.000000,4.358300,187200.000000 +-117.920000,33.820000,10.000000,1548.000000,506.000000,1535.000000,424.000000,4.505700,152400.000000 +-117.920000,33.810000,34.000000,988.000000,173.000000,759.000000,184.000000,5.604700,205100.000000 +-117.920000,33.800000,17.000000,1317.000000,256.000000,679.000000,272.000000,4.669600,159500.000000 +-117.920000,33.790000,29.000000,3692.000000,969.000000,2683.000000,881.000000,3.172600,198700.000000 +-117.920000,33.790000,26.000000,2737.000000,614.000000,1877.000000,606.000000,2.862200,184300.000000 +-117.920000,33.780000,35.000000,1654.000000,323.000000,1065.000000,354.000000,3.483700,186500.000000 +-117.920000,33.770000,28.000000,3614.000000,960.000000,3282.000000,889.000000,3.522000,190300.000000 +-117.920000,33.750000,32.000000,790.000000,199.000000,1196.000000,201.000000,3.062500,142800.000000 +-117.920000,33.750000,23.000000,893.000000,223.000000,1149.000000,216.000000,2.644200,156300.000000 +-117.920000,33.750000,19.000000,1920.000000,471.000000,1413.000000,432.000000,4.031300,147500.000000 +-117.920000,33.740000,24.000000,5321.000000,1063.000000,4011.000000,1047.000000,4.388200,189300.000000 +-117.920000,33.740000,18.000000,1639.000000,491.000000,2513.000000,458.000000,2.183800,159700.000000 +-117.920000,33.740000,13.000000,4620.000000,1265.000000,3385.000000,1109.000000,3.177300,186500.000000 +-117.920000,33.730000,14.000000,5147.000000,1182.000000,3171.000000,1126.000000,3.992900,225800.000000 +-117.920000,33.720000,17.000000,3318.000000,502.000000,1520.000000,498.000000,5.550100,274200.000000 +-117.920000,33.700000,15.000000,3201.000000,677.000000,1510.000000,622.000000,4.270800,161700.000000 +-117.920000,33.680000,28.000000,3397.000000,597.000000,1397.000000,560.000000,4.812500,244600.000000 +-117.920000,33.680000,25.000000,2017.000000,454.000000,1024.000000,428.000000,4.473200,245600.000000 +-117.920000,33.670000,14.000000,6224.000000,1679.000000,3148.000000,1589.000000,4.207100,430900.000000 +-117.920000,33.650000,25.000000,1679.000000,470.000000,1314.000000,473.000000,4.102600,211500.000000 +-117.920000,33.650000,20.000000,1391.000000,393.000000,856.000000,360.000000,3.184000,220000.000000 +-117.920000,33.650000,15.000000,1309.000000,477.000000,1330.000000,424.000000,3.441700,182500.000000 +-117.920000,33.640000,25.000000,2224.000000,580.000000,985.000000,516.000000,3.130500,268800.000000 +-117.920000,33.640000,24.000000,2539.000000,695.000000,1623.000000,611.000000,3.070800,188700.000000 +-117.920000,33.630000,39.000000,1469.000000,226.000000,553.000000,225.000000,7.849600,490800.000000 +-117.920000,33.630000,34.000000,2479.000000,491.000000,1131.000000,490.000000,4.964300,317900.000000 +-117.920000,33.630000,24.000000,1562.000000,441.000000,696.000000,347.000000,3.516100,236400.000000 +-117.920000,33.620000,37.000000,2038.000000,379.000000,837.000000,381.000000,5.241600,471300.000000 +-117.920000,33.620000,35.000000,1821.000000,335.000000,727.000000,316.000000,6.584200,458500.000000 +-117.920000,33.610000,37.000000,1244.000000,173.000000,394.000000,154.000000,10.368200,500001.000000 +-117.920000,33.610000,36.000000,1025.000000,150.000000,316.000000,126.000000,10.304800,500001.000000 +-117.920000,33.610000,23.000000,1808.000000,408.000000,539.000000,300.000000,3.568200,500001.000000 +-117.920000,33.610000,18.000000,1538.000000,425.000000,425.000000,288.000000,5.336900,312500.000000 +-117.920000,33.570000,37.000000,3355.000000,492.000000,921.000000,366.000000,7.298800,500001.000000 +-117.930000,34.570000,5.000000,5613.000000,1060.000000,3569.000000,999.000000,3.194600,132700.000000 +-117.930000,34.150000,14.000000,9610.000000,2005.000000,4723.000000,1907.000000,4.039300,156800.000000 +-117.930000,34.120000,36.000000,294.000000,67.000000,266.000000,80.000000,3.538500,134400.000000 +-117.930000,34.090000,35.000000,782.000000,153.000000,499.000000,163.000000,4.206200,161300.000000 +-117.930000,34.090000,35.000000,1891.000000,353.000000,1093.000000,382.000000,4.016700,165500.000000 +-117.930000,34.090000,34.000000,2192.000000,431.000000,1376.000000,428.000000,3.986100,163900.000000 +-117.930000,34.080000,36.000000,1788.000000,317.000000,1139.000000,320.000000,4.125000,185800.000000 +-117.930000,34.080000,36.000000,1597.000000,285.000000,901.000000,272.000000,4.394700,197000.000000 +-117.930000,34.080000,36.000000,1371.000000,246.000000,806.000000,241.000000,4.507800,187100.000000 +-117.930000,34.080000,35.000000,689.000000,128.000000,379.000000,128.000000,3.958300,206000.000000 +-117.930000,34.070000,36.000000,1207.000000,209.000000,683.000000,213.000000,5.355900,207300.000000 +-117.930000,34.070000,34.000000,1409.000000,305.000000,819.000000,273.000000,3.397700,188800.000000 +-117.930000,34.060000,37.000000,1505.000000,262.000000,798.000000,259.000000,5.463500,202100.000000 +-117.930000,34.060000,28.000000,3342.000000,688.000000,2210.000000,647.000000,3.459600,202800.000000 +-117.930000,34.050000,36.000000,1340.000000,221.000000,848.000000,244.000000,4.173100,205100.000000 +-117.930000,34.050000,32.000000,3055.000000,623.000000,1902.000000,565.000000,4.292600,190700.000000 +-117.930000,34.040000,23.000000,6361.000000,1168.000000,4580.000000,1109.000000,4.934200,181000.000000 +-117.930000,34.030000,35.000000,2160.000000,399.000000,1694.000000,403.000000,3.858100,163100.000000 +-117.930000,34.030000,30.000000,2246.000000,446.000000,1837.000000,431.000000,4.791700,164500.000000 +-117.930000,34.010000,33.000000,1733.000000,361.000000,1757.000000,375.000000,4.226600,153800.000000 +-117.930000,34.010000,23.000000,3188.000000,836.000000,3883.000000,840.000000,2.186300,157600.000000 +-117.930000,33.950000,31.000000,3600.000000,468.000000,1382.000000,435.000000,7.459700,500001.000000 +-117.930000,33.940000,30.000000,2658.000000,382.000000,1135.000000,392.000000,6.051600,245000.000000 +-117.930000,33.940000,28.000000,3664.000000,719.000000,1820.000000,657.000000,4.225000,224700.000000 +-117.930000,33.930000,37.000000,1128.000000,273.000000,931.000000,234.000000,2.800000,137500.000000 +-117.930000,33.930000,33.000000,1626.000000,378.000000,1062.000000,356.000000,2.194400,139600.000000 +-117.930000,33.900000,30.000000,2629.000000,331.000000,956.000000,319.000000,9.907100,500001.000000 +-117.930000,33.880000,52.000000,2157.000000,362.000000,1001.000000,373.000000,5.123700,240000.000000 +-117.930000,33.880000,45.000000,1306.000000,293.000000,585.000000,260.000000,4.081200,241700.000000 +-117.930000,33.880000,32.000000,2458.000000,359.000000,967.000000,409.000000,7.289300,293500.000000 +-117.930000,33.870000,52.000000,950.000000,229.000000,429.000000,185.000000,2.315000,182100.000000 +-117.930000,33.870000,45.000000,1006.000000,230.000000,1237.000000,237.000000,3.347200,168000.000000 +-117.930000,33.870000,29.000000,1221.000000,371.000000,1822.000000,326.000000,1.793500,162500.000000 +-117.930000,33.870000,10.000000,1277.000000,488.000000,730.000000,417.000000,1.480300,137500.000000 +-117.930000,33.860000,36.000000,931.000000,279.000000,778.000000,303.000000,2.656300,155000.000000 +-117.930000,33.860000,36.000000,1672.000000,318.000000,1173.000000,337.000000,4.577400,182100.000000 +-117.930000,33.860000,35.000000,1216.000000,225.000000,893.000000,228.000000,4.028800,184000.000000 +-117.930000,33.860000,17.000000,1627.000000,398.000000,1216.000000,369.000000,3.343800,186600.000000 +-117.930000,33.850000,36.000000,2147.000000,416.000000,1011.000000,392.000000,3.218800,196900.000000 +-117.930000,33.850000,33.000000,2489.000000,546.000000,1857.000000,444.000000,2.947400,178400.000000 +-117.930000,33.850000,31.000000,2149.000000,465.000000,966.000000,302.000000,3.875000,183900.000000 +-117.930000,33.850000,27.000000,1962.000000,544.000000,1492.000000,481.000000,1.962100,118100.000000 +-117.930000,33.850000,25.000000,1026.000000,288.000000,1646.000000,283.000000,4.201900,163900.000000 +-117.930000,33.840000,34.000000,2160.000000,298.000000,852.000000,305.000000,6.053100,287100.000000 +-117.930000,33.840000,26.000000,2811.000000,612.000000,1374.000000,566.000000,3.475000,282500.000000 +-117.930000,33.840000,23.000000,2870.000000,653.000000,1680.000000,598.000000,3.230100,189900.000000 +-117.930000,33.830000,32.000000,1792.000000,411.000000,1131.000000,381.000000,2.494200,186300.000000 +-117.930000,33.820000,28.000000,2444.000000,555.000000,1848.000000,567.000000,3.017900,198800.000000 +-117.930000,33.810000,18.000000,3291.000000,587.000000,1640.000000,563.000000,4.898100,166300.000000 +-117.930000,33.800000,34.000000,3903.000000,717.000000,2054.000000,716.000000,4.273100,218000.000000 +-117.930000,33.800000,29.000000,1672.000000,267.000000,891.000000,281.000000,4.861100,231900.000000 +-117.930000,33.790000,36.000000,2363.000000,403.000000,1240.000000,391.000000,4.090900,190800.000000 +-117.930000,33.790000,34.000000,3592.000000,616.000000,2138.000000,605.000000,5.212900,193400.000000 +-117.930000,33.770000,36.000000,3157.000000,582.000000,1842.000000,561.000000,4.583300,190700.000000 +-117.930000,33.760000,24.000000,3202.000000,703.000000,3308.000000,714.000000,4.157700,174100.000000 +-117.930000,33.760000,21.000000,2884.000000,662.000000,2613.000000,645.000000,4.050000,177900.000000 +-117.930000,33.760000,17.000000,3341.000000,803.000000,3381.000000,825.000000,3.371000,161800.000000 +-117.930000,33.750000,24.000000,1380.000000,339.000000,1472.000000,304.000000,4.221900,162800.000000 +-117.930000,33.740000,5.000000,639.000000,197.000000,666.000000,197.000000,3.301700,87500.000000 +-117.930000,33.740000,30.000000,1654.000000,434.000000,1843.000000,467.000000,3.140300,153000.000000 +-117.930000,33.740000,15.000000,1206.000000,282.000000,677.000000,270.000000,3.921900,142600.000000 +-117.930000,33.730000,19.000000,4021.000000,557.000000,1872.000000,545.000000,6.791900,295600.000000 +-117.930000,33.720000,17.000000,4461.000000,585.000000,2095.000000,580.000000,7.670900,319500.000000 +-117.930000,33.690000,26.000000,2822.000000,473.000000,1258.000000,469.000000,6.444100,261000.000000 +-117.930000,33.690000,19.000000,2602.000000,439.000000,1156.000000,424.000000,5.010000,263800.000000 +-117.930000,33.670000,27.000000,3512.000000,472.000000,1391.000000,481.000000,8.100100,336500.000000 +-117.930000,33.660000,18.000000,2043.000000,250.000000,702.000000,246.000000,9.606200,414700.000000 +-117.930000,33.650000,35.000000,2133.000000,413.000000,1473.000000,402.000000,4.421100,215200.000000 +-117.930000,33.650000,34.000000,2141.000000,425.000000,1559.000000,429.000000,4.203600,220100.000000 +-117.930000,33.650000,29.000000,1253.000000,375.000000,1198.000000,362.000000,3.517900,225000.000000 +-117.930000,33.650000,27.000000,1283.000000,406.000000,1063.000000,376.000000,2.750000,275000.000000 +-117.930000,33.650000,26.000000,5831.000000,1546.000000,4738.000000,1477.000000,3.148300,213000.000000 +-117.930000,33.640000,31.000000,1291.000000,356.000000,1252.000000,373.000000,2.714300,185400.000000 +-117.930000,33.640000,24.000000,1395.000000,396.000000,1478.000000,404.000000,2.530100,192900.000000 +-117.930000,33.640000,15.000000,1707.000000,514.000000,1335.000000,434.000000,2.754300,177800.000000 +-117.930000,33.630000,10.000000,2766.000000,732.000000,1332.000000,646.000000,4.616100,226300.000000 +-117.930000,33.620000,37.000000,2204.000000,428.000000,807.000000,410.000000,7.051600,500001.000000 +-117.930000,33.620000,34.000000,2125.000000,498.000000,1052.000000,468.000000,5.631500,484600.000000 +-117.930000,33.620000,33.000000,1890.000000,416.000000,859.000000,329.000000,4.565800,500001.000000 +-117.930000,33.610000,27.000000,1806.000000,465.000000,791.000000,358.000000,3.812500,366700.000000 +-117.940000,34.150000,33.000000,859.000000,144.000000,421.000000,138.000000,4.482100,220100.000000 +-117.940000,34.140000,33.000000,1620.000000,283.000000,868.000000,275.000000,5.411000,219400.000000 +-117.940000,34.100000,31.000000,1239.000000,254.000000,929.000000,244.000000,3.362500,153400.000000 +-117.940000,34.090000,21.000000,2707.000000,675.000000,1742.000000,626.000000,2.106200,176700.000000 +-117.940000,34.080000,35.000000,2393.000000,417.000000,1336.000000,418.000000,4.870000,187700.000000 +-117.940000,34.080000,32.000000,2704.000000,514.000000,1669.000000,497.000000,4.465300,195400.000000 +-117.940000,34.070000,25.000000,1814.000000,404.000000,1187.000000,363.000000,3.352300,170800.000000 +-117.940000,34.060000,34.000000,1921.000000,422.000000,1230.000000,447.000000,3.664800,193900.000000 +-117.940000,34.050000,34.000000,1729.000000,324.000000,1341.000000,324.000000,3.770800,163500.000000 +-117.940000,34.050000,34.000000,1519.000000,304.000000,1262.000000,300.000000,3.340900,161200.000000 +-117.940000,34.040000,34.000000,1403.000000,274.000000,977.000000,257.000000,3.840900,163000.000000 +-117.940000,34.040000,33.000000,1493.000000,331.000000,1571.000000,354.000000,3.886400,158900.000000 +-117.940000,34.030000,35.000000,1499.000000,289.000000,1112.000000,268.000000,3.830000,149000.000000 +-117.940000,34.030000,35.000000,1375.000000,249.000000,1015.000000,239.000000,4.052100,151800.000000 +-117.940000,34.020000,27.000000,5026.000000,955.000000,3899.000000,930.000000,3.871000,162900.000000 +-117.940000,33.990000,18.000000,6100.000000,1018.000000,3112.000000,982.000000,4.993200,284000.000000 +-117.940000,33.940000,30.000000,1596.000000,307.000000,845.000000,309.000000,4.509600,241100.000000 +-117.940000,33.940000,25.000000,3250.000000,546.000000,1452.000000,501.000000,5.108400,303800.000000 +-117.940000,33.930000,34.000000,1475.000000,319.000000,698.000000,293.000000,3.819400,186000.000000 +-117.940000,33.920000,32.000000,1053.000000,207.000000,1038.000000,222.000000,4.669600,165500.000000 +-117.940000,33.910000,18.000000,8836.000000,1527.000000,3946.000000,1451.000000,5.644100,313000.000000 +-117.940000,33.900000,27.000000,2029.000000,242.000000,711.000000,254.000000,9.795600,500001.000000 +-117.940000,33.890000,30.000000,2577.000000,404.000000,1076.000000,374.000000,6.752800,459600.000000 +-117.940000,33.880000,35.000000,2159.000000,343.000000,833.000000,335.000000,5.373800,365100.000000 +-117.940000,33.880000,35.000000,1694.000000,296.000000,679.000000,282.000000,4.333300,239300.000000 +-117.940000,33.860000,35.000000,2127.000000,417.000000,1247.000000,378.000000,4.750000,185600.000000 +-117.940000,33.860000,33.000000,1013.000000,312.000000,706.000000,266.000000,2.143200,197500.000000 +-117.940000,33.850000,37.000000,588.000000,121.000000,436.000000,104.000000,4.275000,186200.000000 +-117.940000,33.840000,28.000000,604.000000,207.000000,615.000000,212.000000,3.621400,182100.000000 +-117.940000,33.830000,20.000000,812.000000,192.000000,494.000000,172.000000,3.250000,350000.000000 +-117.940000,33.820000,34.000000,1347.000000,212.000000,676.000000,201.000000,3.882800,215400.000000 +-117.940000,33.820000,29.000000,1422.000000,409.000000,1057.000000,390.000000,2.334700,208100.000000 +-117.940000,33.820000,27.000000,1366.000000,326.000000,878.000000,325.000000,3.400000,196900.000000 +-117.940000,33.820000,24.000000,4735.000000,955.000000,2600.000000,868.000000,5.076400,228600.000000 +-117.940000,33.810000,34.000000,1290.000000,203.000000,664.000000,204.000000,5.846100,227400.000000 +-117.940000,33.810000,33.000000,1891.000000,334.000000,932.000000,343.000000,4.275900,238000.000000 +-117.940000,33.810000,26.000000,1589.000000,259.000000,735.000000,315.000000,4.571400,243200.000000 +-117.940000,33.810000,25.000000,1731.000000,482.000000,1127.000000,455.000000,3.256000,214300.000000 +-117.940000,33.780000,40.000000,299.000000,68.000000,163.000000,70.000000,3.012500,166100.000000 +-117.940000,33.780000,34.000000,2627.000000,468.000000,1409.000000,450.000000,4.773100,199200.000000 +-117.940000,33.780000,11.000000,2880.000000,745.000000,1806.000000,722.000000,3.805600,171100.000000 +-117.940000,33.770000,33.000000,2964.000000,747.000000,2235.000000,718.000000,3.259100,175900.000000 +-117.940000,33.770000,32.000000,714.000000,142.000000,654.000000,154.000000,4.505200,170800.000000 +-117.940000,33.760000,33.000000,1441.000000,337.000000,1233.000000,331.000000,3.723200,176200.000000 +-117.940000,33.760000,27.000000,2512.000000,506.000000,1861.000000,511.000000,4.238600,184200.000000 +-117.940000,33.750000,30.000000,5268.000000,1093.000000,4480.000000,1050.000000,4.015000,186700.000000 +-117.940000,33.740000,24.000000,4248.000000,840.000000,3118.000000,798.000000,4.222200,207200.000000 +-117.940000,33.710000,18.000000,3695.000000,602.000000,1779.000000,572.000000,5.944900,276500.000000 +-117.940000,33.680000,26.000000,4183.000000,539.000000,1504.000000,520.000000,7.405600,374200.000000 +-117.940000,33.670000,26.000000,2552.000000,314.000000,925.000000,323.000000,8.183900,367000.000000 +-117.940000,33.660000,16.000000,2095.000000,450.000000,963.000000,411.000000,5.500000,224100.000000 +-117.940000,33.650000,20.000000,5476.000000,1073.000000,2327.000000,963.000000,5.663700,222100.000000 +-117.940000,33.650000,15.000000,2016.000000,443.000000,1015.000000,419.000000,5.273200,209700.000000 +-117.940000,33.640000,24.000000,1097.000000,307.000000,470.000000,333.000000,1.638900,225000.000000 +-117.940000,33.640000,18.000000,1867.000000,426.000000,871.000000,399.000000,2.622100,272000.000000 +-117.940000,33.620000,28.000000,1765.000000,390.000000,832.000000,349.000000,6.592800,439100.000000 +-117.950000,35.130000,4.000000,2630.000000,502.000000,1150.000000,422.000000,4.250000,104400.000000 +-117.950000,34.160000,17.000000,7116.000000,1089.000000,3538.000000,1083.000000,6.265400,273800.000000 +-117.950000,34.110000,29.000000,1986.000000,448.000000,2013.000000,432.000000,3.103400,140800.000000 +-117.950000,34.090000,21.000000,2215.000000,484.000000,1792.000000,419.000000,2.837500,166500.000000 +-117.950000,34.090000,18.000000,1179.000000,324.000000,1296.000000,331.000000,2.851000,140600.000000 +-117.950000,34.080000,34.000000,2278.000000,476.000000,1728.000000,448.000000,3.125000,154100.000000 +-117.950000,34.070000,37.000000,1987.000000,399.000000,1279.000000,378.000000,4.117200,176500.000000 +-117.950000,34.060000,32.000000,2252.000000,415.000000,1370.000000,411.000000,4.631200,184800.000000 +-117.950000,34.050000,34.000000,1428.000000,227.000000,890.000000,249.000000,5.872200,204800.000000 +-117.950000,34.050000,31.000000,2349.000000,539.000000,2028.000000,521.000000,3.494000,154500.000000 +-117.950000,34.040000,36.000000,1044.000000,200.000000,982.000000,205.000000,4.767900,153900.000000 +-117.950000,34.040000,27.000000,2610.000000,846.000000,2296.000000,750.000000,2.274000,150800.000000 +-117.950000,34.030000,35.000000,804.000000,159.000000,727.000000,179.000000,2.736100,145700.000000 +-117.950000,34.030000,33.000000,1782.000000,394.000000,1517.000000,376.000000,3.338900,157900.000000 +-117.950000,34.030000,33.000000,1453.000000,326.000000,1609.000000,319.000000,3.757800,155800.000000 +-117.950000,34.020000,22.000000,1919.000000,411.000000,1203.000000,363.000000,4.257800,144100.000000 +-117.950000,34.020000,19.000000,1129.000000,258.000000,900.000000,228.000000,3.875000,135600.000000 +-117.950000,34.000000,34.000000,2376.000000,468.000000,1858.000000,449.000000,4.132800,176300.000000 +-117.950000,33.990000,25.000000,1075.000000,138.000000,451.000000,132.000000,6.849200,332200.000000 +-117.950000,33.990000,24.000000,1219.000000,177.000000,610.000000,185.000000,6.797800,325000.000000 +-117.950000,33.990000,15.000000,3978.000000,692.000000,2418.000000,665.000000,5.014200,269900.000000 +-117.950000,33.980000,15.000000,16042.000000,2602.000000,7732.000000,2552.000000,5.671600,330400.000000 +-117.950000,33.970000,33.000000,1113.000000,145.000000,424.000000,137.000000,8.347400,500001.000000 +-117.950000,33.950000,29.000000,4943.000000,674.000000,1913.000000,641.000000,6.818900,379300.000000 +-117.950000,33.940000,31.000000,2237.000000,431.000000,1135.000000,434.000000,4.450000,267900.000000 +-117.950000,33.940000,28.000000,2851.000000,496.000000,1287.000000,496.000000,5.078200,264100.000000 +-117.950000,33.930000,25.000000,3445.000000,801.000000,2400.000000,750.000000,3.470200,161900.000000 +-117.950000,33.920000,32.000000,1661.000000,312.000000,1201.000000,302.000000,4.000000,178200.000000 +-117.950000,33.920000,18.000000,2825.000000,660.000000,1590.000000,643.000000,3.610600,153600.000000 +-117.950000,33.920000,13.000000,2312.000000,592.000000,2038.000000,559.000000,3.137800,137000.000000 +-117.950000,33.920000,11.000000,3127.000000,706.000000,1594.000000,694.000000,4.342600,141300.000000 +-117.950000,33.890000,17.000000,1665.000000,247.000000,755.000000,254.000000,6.576400,349000.000000 +-117.950000,33.880000,34.000000,1939.000000,355.000000,817.000000,314.000000,3.670500,275000.000000 +-117.950000,33.870000,34.000000,1599.000000,296.000000,938.000000,307.000000,4.285000,184900.000000 +-117.950000,33.870000,22.000000,1432.000000,335.000000,746.000000,296.000000,2.022700,55000.000000 +-117.950000,33.860000,36.000000,2038.000000,343.000000,1066.000000,346.000000,5.197000,195700.000000 +-117.950000,33.860000,35.000000,2375.000000,439.000000,1343.000000,424.000000,4.530000,193500.000000 +-117.950000,33.850000,13.000000,6963.000000,1426.000000,3892.000000,1375.000000,4.132500,203500.000000 +-117.950000,33.840000,34.000000,1229.000000,215.000000,1035.000000,218.000000,3.545500,180000.000000 +-117.950000,33.840000,19.000000,1749.000000,406.000000,969.000000,391.000000,3.750000,173400.000000 +-117.950000,33.840000,18.000000,3418.000000,815.000000,1961.000000,773.000000,3.650000,171400.000000 +-117.950000,33.830000,36.000000,1380.000000,237.000000,690.000000,234.000000,3.821400,210900.000000 +-117.950000,33.830000,35.000000,1107.000000,207.000000,641.000000,210.000000,5.059900,216700.000000 +-117.950000,33.830000,31.000000,2421.000000,389.000000,1348.000000,413.000000,4.939400,217800.000000 +-117.950000,33.820000,35.000000,1117.000000,181.000000,496.000000,168.000000,4.326900,224700.000000 +-117.950000,33.820000,35.000000,1068.000000,190.000000,514.000000,174.000000,4.073500,208700.000000 +-117.950000,33.820000,29.000000,2929.000000,640.000000,1618.000000,584.000000,3.687500,213200.000000 +-117.950000,33.810000,33.000000,1724.000000,291.000000,943.000000,285.000000,5.118000,195200.000000 +-117.950000,33.810000,24.000000,2749.000000,498.000000,1367.000000,460.000000,4.025000,240700.000000 +-117.950000,33.800000,34.000000,1654.000000,285.000000,905.000000,292.000000,4.638900,214600.000000 +-117.950000,33.790000,34.000000,2584.000000,408.000000,1233.000000,405.000000,5.693500,218300.000000 +-117.950000,33.780000,9.000000,3553.000000,1035.000000,2017.000000,986.000000,2.972600,133800.000000 +-117.950000,33.780000,32.000000,2296.000000,560.000000,1376.000000,532.000000,3.730300,188500.000000 +-117.950000,33.780000,26.000000,4115.000000,883.000000,2184.000000,825.000000,3.953600,191000.000000 +-117.950000,33.770000,38.000000,989.000000,246.000000,691.000000,204.000000,3.263200,180900.000000 +-117.950000,33.770000,38.000000,1476.000000,308.000000,1114.000000,309.000000,4.191700,181800.000000 +-117.950000,33.760000,29.000000,1829.000000,366.000000,1703.000000,343.000000,4.129500,188000.000000 +-117.950000,33.750000,24.000000,2027.000000,358.000000,1405.000000,341.000000,5.141600,231400.000000 +-117.950000,33.750000,19.000000,1983.000000,283.000000,1098.000000,275.000000,6.635500,276100.000000 +-117.950000,33.740000,25.000000,1393.000000,243.000000,976.000000,245.000000,5.448500,225200.000000 +-117.950000,33.740000,21.000000,3576.000000,554.000000,1846.000000,538.000000,5.983800,271900.000000 +-117.950000,33.740000,16.000000,2768.000000,600.000000,1182.000000,563.000000,3.716200,201200.000000 +-117.950000,33.720000,21.000000,3107.000000,483.000000,1688.000000,503.000000,5.958200,288000.000000 +-117.950000,33.710000,20.000000,2781.000000,407.000000,1242.000000,408.000000,6.109200,306500.000000 +-117.950000,33.710000,16.000000,6058.000000,1715.000000,3285.000000,1495.000000,3.413300,290900.000000 +-117.950000,33.700000,17.000000,5781.000000,924.000000,2585.000000,915.000000,5.343000,231900.000000 +-117.950000,33.690000,26.000000,1417.000000,264.000000,817.000000,261.000000,4.875000,230400.000000 +-117.950000,33.690000,24.000000,4269.000000,618.000000,1954.000000,597.000000,6.926100,284600.000000 +-117.950000,33.680000,27.000000,1732.000000,303.000000,1115.000000,308.000000,5.531200,239200.000000 +-117.950000,33.680000,26.000000,2249.000000,344.000000,1311.000000,373.000000,5.028700,265000.000000 +-117.950000,33.680000,19.000000,1028.000000,191.000000,340.000000,159.000000,3.636400,252800.000000 +-117.950000,33.670000,25.000000,1799.000000,233.000000,810.000000,265.000000,8.289000,372400.000000 +-117.950000,33.670000,25.000000,1611.000000,383.000000,554.000000,327.000000,3.041700,137300.000000 +-117.950000,33.660000,26.000000,1787.000000,227.000000,639.000000,224.000000,6.822600,329800.000000 +-117.950000,33.660000,22.000000,2785.000000,441.000000,1086.000000,392.000000,7.371900,337400.000000 +-117.950000,33.630000,29.000000,1496.000000,282.000000,463.000000,215.000000,6.051600,500001.000000 +-117.950000,33.630000,27.000000,891.000000,183.000000,513.000000,171.000000,6.000000,381500.000000 +-117.950000,33.630000,17.000000,6745.000000,1547.000000,2688.000000,1535.000000,3.991700,271600.000000 +-117.960000,34.710000,32.000000,3511.000000,646.000000,1733.000000,510.000000,3.460000,123900.000000 +-117.960000,34.480000,32.000000,1896.000000,342.000000,806.000000,299.000000,4.576900,159400.000000 +-117.960000,34.140000,33.000000,1994.000000,405.000000,993.000000,403.000000,3.766000,163900.000000 +-117.960000,34.140000,27.000000,2221.000000,542.000000,1328.000000,523.000000,2.527500,151700.000000 +-117.960000,34.100000,35.000000,4036.000000,904.000000,3878.000000,846.000000,3.295700,141600.000000 +-117.960000,34.100000,30.000000,2775.000000,657.000000,2847.000000,642.000000,3.226600,141800.000000 +-117.960000,34.090000,6.000000,1954.000000,534.000000,1584.000000,496.000000,3.162100,131000.000000 +-117.960000,34.090000,30.000000,2686.000000,613.000000,2477.000000,573.000000,3.442700,160800.000000 +-117.960000,34.080000,39.000000,1076.000000,338.000000,1242.000000,332.000000,2.267900,151800.000000 +-117.960000,34.080000,33.000000,4151.000000,850.000000,3563.000000,848.000000,3.191200,159900.000000 +-117.960000,34.080000,28.000000,2831.000000,552.000000,2330.000000,557.000000,3.974100,173100.000000 +-117.960000,34.070000,35.000000,2819.000000,529.000000,1508.000000,485.000000,4.611800,191700.000000 +-117.960000,34.070000,32.000000,2910.000000,709.000000,2583.000000,670.000000,3.773600,158400.000000 +-117.960000,34.060000,34.000000,2226.000000,381.000000,1464.000000,365.000000,4.410200,183200.000000 +-117.960000,34.060000,31.000000,2017.000000,462.000000,1462.000000,457.000000,2.067000,167300.000000 +-117.960000,34.050000,36.000000,1475.000000,270.000000,1149.000000,284.000000,3.090400,158600.000000 +-117.960000,34.050000,35.000000,1254.000000,263.000000,1092.000000,268.000000,4.636400,163100.000000 +-117.960000,34.050000,32.000000,1993.000000,388.000000,1385.000000,380.000000,3.725800,181900.000000 +-117.960000,34.040000,35.000000,1141.000000,212.000000,924.000000,212.000000,3.159100,148300.000000 +-117.960000,34.040000,34.000000,1381.000000,265.000000,1020.000000,268.000000,4.025000,146900.000000 +-117.960000,34.040000,33.000000,1458.000000,268.000000,1115.000000,257.000000,4.795500,158100.000000 +-117.960000,34.030000,35.000000,2093.000000,407.000000,1755.000000,403.000000,3.411500,150400.000000 +-117.960000,34.030000,35.000000,1623.000000,331.000000,1462.000000,312.000000,3.980300,152600.000000 +-117.960000,34.020000,33.000000,349.000000,124.000000,460.000000,83.000000,2.375000,133300.000000 +-117.960000,34.000000,34.000000,2777.000000,540.000000,1954.000000,522.000000,4.516300,183800.000000 +-117.960000,33.990000,25.000000,2799.000000,388.000000,1348.000000,389.000000,6.251700,311100.000000 +-117.960000,33.990000,25.000000,1348.000000,210.000000,660.000000,200.000000,5.285200,297600.000000 +-117.960000,33.940000,34.000000,2228.000000,399.000000,1159.000000,378.000000,4.890600,228900.000000 +-117.960000,33.940000,31.000000,2397.000000,518.000000,1407.000000,476.000000,2.664100,185200.000000 +-117.960000,33.930000,31.000000,1471.000000,321.000000,841.000000,330.000000,3.460000,232800.000000 +-117.960000,33.930000,29.000000,2316.000000,522.000000,1275.000000,501.000000,3.776000,192600.000000 +-117.960000,33.930000,15.000000,2014.000000,419.000000,839.000000,390.000000,4.744600,175400.000000 +-117.960000,33.920000,18.000000,3744.000000,1027.000000,1654.000000,912.000000,3.215800,215000.000000 +-117.960000,33.900000,9.000000,1899.000000,284.000000,1070.000000,293.000000,7.253200,381500.000000 +-117.960000,33.900000,10.000000,2423.000000,356.000000,1213.000000,347.000000,6.563500,346900.000000 +-117.960000,33.890000,24.000000,1332.000000,252.000000,625.000000,230.000000,4.437500,334100.000000 +-117.960000,33.880000,25.000000,3578.000000,461.000000,1588.000000,466.000000,6.255600,341300.000000 +-117.960000,33.870000,35.000000,1972.000000,367.000000,1152.000000,356.000000,3.722200,187500.000000 +-117.960000,33.870000,27.000000,890.000000,289.000000,416.000000,200.000000,3.141000,167500.000000 +-117.960000,33.860000,35.000000,2181.000000,371.000000,1249.000000,358.000000,4.293700,183200.000000 +-117.960000,33.860000,35.000000,2146.000000,430.000000,1230.000000,429.000000,3.781300,184900.000000 +-117.960000,33.860000,32.000000,2366.000000,505.000000,1283.000000,477.000000,3.351600,190000.000000 +-117.960000,33.850000,36.000000,1951.000000,365.000000,1254.000000,358.000000,4.843800,185700.000000 +-117.960000,33.850000,35.000000,1175.000000,191.000000,568.000000,186.000000,4.125000,189200.000000 +-117.960000,33.840000,31.000000,2265.000000,537.000000,1617.000000,507.000000,3.458300,186300.000000 +-117.960000,33.830000,34.000000,982.000000,148.000000,498.000000,156.000000,6.321400,220800.000000 +-117.960000,33.830000,29.000000,1194.000000,176.000000,474.000000,170.000000,6.100100,298900.000000 +-117.960000,33.830000,18.000000,2067.000000,770.000000,870.000000,541.000000,3.131500,137500.000000 +-117.960000,33.820000,32.000000,2856.000000,622.000000,1499.000000,601.000000,3.630000,183400.000000 +-117.960000,33.820000,32.000000,2726.000000,556.000000,1513.000000,531.000000,3.791700,197400.000000 +-117.960000,33.820000,29.000000,2176.000000,468.000000,1632.000000,428.000000,3.707000,180400.000000 +-117.960000,33.810000,35.000000,1996.000000,326.000000,1409.000000,330.000000,4.773800,180000.000000 +-117.960000,33.810000,35.000000,1153.000000,192.000000,884.000000,208.000000,5.238400,177400.000000 +-117.960000,33.810000,34.000000,1941.000000,356.000000,1021.000000,339.000000,4.466300,183900.000000 +-117.960000,33.810000,34.000000,1416.000000,277.000000,980.000000,284.000000,4.777200,182500.000000 +-117.960000,33.800000,35.000000,1493.000000,267.000000,811.000000,272.000000,5.244000,218000.000000 +-117.960000,33.800000,33.000000,2362.000000,394.000000,1185.000000,387.000000,4.425000,188400.000000 +-117.960000,33.800000,30.000000,729.000000,131.000000,488.000000,139.000000,4.766700,195200.000000 +-117.960000,33.790000,36.000000,2398.000000,403.000000,1261.000000,402.000000,5.281600,221800.000000 +-117.960000,33.780000,35.000000,1330.000000,201.000000,658.000000,217.000000,6.370000,229200.000000 +-117.960000,33.780000,33.000000,1520.000000,239.000000,658.000000,242.000000,4.875000,269300.000000 +-117.960000,33.780000,26.000000,2136.000000,557.000000,1528.000000,537.000000,2.493100,236100.000000 +-117.960000,33.770000,32.000000,4398.000000,905.000000,2777.000000,884.000000,4.132100,222800.000000 +-117.960000,33.760000,24.000000,1328.000000,290.000000,1012.000000,306.000000,4.281300,189500.000000 +-117.960000,33.760000,22.000000,2520.000000,556.000000,2126.000000,527.000000,3.773400,193900.000000 +-117.960000,33.750000,25.000000,1323.000000,208.000000,852.000000,229.000000,4.616700,237300.000000 +-117.960000,33.750000,22.000000,2300.000000,539.000000,1625.000000,542.000000,2.780000,196300.000000 +-117.960000,33.750000,14.000000,2509.000000,611.000000,1814.000000,547.000000,2.798600,176100.000000 +-117.960000,33.740000,19.000000,1783.000000,415.000000,1025.000000,383.000000,4.148400,230000.000000 +-117.960000,33.730000,22.000000,3479.000000,455.000000,1454.000000,488.000000,6.632400,347600.000000 +-117.960000,33.720000,23.000000,3929.000000,559.000000,1858.000000,538.000000,6.864500,318200.000000 +-117.960000,33.710000,19.000000,4328.000000,849.000000,2243.000000,808.000000,5.570200,342600.000000 +-117.960000,33.710000,19.000000,1624.000000,221.000000,782.000000,228.000000,4.596200,304500.000000 +-117.960000,33.700000,23.000000,2622.000000,445.000000,1103.000000,407.000000,4.725000,289600.000000 +-117.960000,33.690000,17.000000,2500.000000,343.000000,1242.000000,368.000000,7.731300,316700.000000 +-117.960000,33.680000,26.000000,1374.000000,234.000000,731.000000,244.000000,6.090500,224800.000000 +-117.960000,33.680000,24.000000,6517.000000,1279.000000,3441.000000,1198.000000,4.250000,152100.000000 +-117.960000,33.680000,18.000000,2594.000000,539.000000,817.000000,485.000000,2.367400,219200.000000 +-117.960000,33.670000,16.000000,5143.000000,652.000000,2209.000000,637.000000,7.017300,382100.000000 +-117.960000,33.660000,19.000000,5925.000000,744.000000,2302.000000,729.000000,7.569900,333300.000000 +-117.960000,33.650000,24.000000,4462.000000,689.000000,1943.000000,712.000000,5.739500,289800.000000 +-117.960000,33.650000,23.000000,5379.000000,684.000000,1826.000000,555.000000,7.015100,350600.000000 +-117.960000,33.650000,21.000000,2030.000000,318.000000,910.000000,311.000000,7.845300,343300.000000 +-117.960000,33.650000,18.000000,3603.000000,879.000000,1549.000000,756.000000,4.022900,363100.000000 +-117.960000,33.600000,34.000000,959.000000,230.000000,384.000000,197.000000,5.233300,471400.000000 +-117.970000,34.170000,35.000000,5005.000000,848.000000,2112.000000,813.000000,4.996800,295000.000000 +-117.970000,34.140000,33.000000,1328.000000,348.000000,903.000000,329.000000,3.109400,136000.000000 +-117.970000,34.140000,15.000000,3595.000000,964.000000,1839.000000,877.000000,2.601400,150300.000000 +-117.970000,34.130000,42.000000,683.000000,127.000000,541.000000,138.000000,3.437500,151700.000000 +-117.970000,34.130000,33.000000,2038.000000,473.000000,1546.000000,469.000000,3.477700,144500.000000 +-117.970000,34.110000,18.000000,123.000000,28.000000,121.000000,26.000000,3.041700,137500.000000 +-117.970000,34.100000,33.000000,1558.000000,316.000000,1600.000000,338.000000,2.971200,143900.000000 +-117.970000,34.100000,26.000000,1399.000000,277.000000,1285.000000,276.000000,4.000000,160100.000000 +-117.970000,34.090000,31.000000,2779.000000,639.000000,2259.000000,670.000000,3.403200,143400.000000 +-117.970000,34.090000,27.000000,3569.000000,761.000000,3339.000000,762.000000,4.130400,160500.000000 +-117.970000,34.080000,8.000000,2027.000000,480.000000,1781.000000,447.000000,3.080600,142400.000000 +-117.970000,34.080000,30.000000,2227.000000,474.000000,1961.000000,481.000000,3.326100,164100.000000 +-117.970000,34.070000,20.000000,2063.000000,496.000000,1573.000000,468.000000,3.200000,157100.000000 +-117.970000,34.060000,34.000000,3580.000000,684.000000,2786.000000,636.000000,4.046900,166800.000000 +-117.970000,34.060000,31.000000,2516.000000,526.000000,2194.000000,497.000000,3.241300,155500.000000 +-117.970000,34.050000,36.000000,931.000000,160.000000,746.000000,201.000000,3.166700,158000.000000 +-117.970000,34.050000,36.000000,1299.000000,206.000000,763.000000,216.000000,3.517900,161400.000000 +-117.970000,34.050000,34.000000,2050.000000,495.000000,1832.000000,465.000000,2.833300,155700.000000 +-117.970000,34.050000,33.000000,2028.000000,422.000000,1727.000000,371.000000,2.843800,157600.000000 +-117.970000,34.040000,32.000000,1507.000000,295.000000,1326.000000,324.000000,4.119000,163300.000000 +-117.970000,34.040000,28.000000,1686.000000,417.000000,1355.000000,388.000000,2.519200,157300.000000 +-117.970000,34.010000,33.000000,3530.000000,700.000000,2959.000000,679.000000,3.745900,152900.000000 +-117.970000,34.010000,33.000000,2006.000000,381.000000,1410.000000,346.000000,3.708300,165500.000000 +-117.970000,34.000000,28.000000,1983.000000,375.000000,1407.000000,367.000000,3.831900,179000.000000 +-117.970000,33.990000,23.000000,3335.000000,570.000000,1560.000000,555.000000,5.726800,300300.000000 +-117.970000,33.990000,22.000000,5284.000000,982.000000,2613.000000,932.000000,4.733200,289900.000000 +-117.970000,33.960000,30.000000,4873.000000,667.000000,1995.000000,638.000000,7.247200,441900.000000 +-117.970000,33.940000,36.000000,1870.000000,338.000000,947.000000,324.000000,4.120500,217000.000000 +-117.970000,33.940000,35.000000,1928.000000,360.000000,1056.000000,366.000000,4.089300,215700.000000 +-117.970000,33.940000,34.000000,1632.000000,263.000000,690.000000,268.000000,5.560800,255800.000000 +-117.970000,33.930000,35.000000,1887.000000,328.000000,989.000000,351.000000,4.132100,198100.000000 +-117.970000,33.930000,33.000000,1700.000000,369.000000,981.000000,362.000000,4.546100,194000.000000 +-117.970000,33.930000,31.000000,1975.000000,373.000000,918.000000,347.000000,4.410700,202000.000000 +-117.970000,33.890000,17.000000,1851.000000,344.000000,764.000000,339.000000,5.131500,181800.000000 +-117.970000,33.890000,17.000000,1740.000000,445.000000,1158.000000,412.000000,2.864900,137500.000000 +-117.970000,33.890000,14.000000,923.000000,136.000000,420.000000,130.000000,10.225200,462800.000000 +-117.970000,33.880000,16.000000,2003.000000,300.000000,1172.000000,318.000000,6.039400,321600.000000 +-117.970000,33.880000,11.000000,1454.000000,247.000000,635.000000,236.000000,6.242700,218500.000000 +-117.970000,33.870000,28.000000,1784.000000,440.000000,1255.000000,433.000000,3.705400,169200.000000 +-117.970000,33.860000,34.000000,2138.000000,490.000000,1682.000000,463.000000,3.600600,161700.000000 +-117.970000,33.860000,12.000000,1370.000000,367.000000,1022.000000,296.000000,3.647100,141700.000000 +-117.970000,33.850000,45.000000,818.000000,147.000000,546.000000,152.000000,5.105700,170700.000000 +-117.970000,33.850000,30.000000,2513.000000,476.000000,1611.000000,472.000000,4.006100,182900.000000 +-117.970000,33.840000,35.000000,793.000000,128.000000,589.000000,137.000000,5.250000,190200.000000 +-117.970000,33.840000,25.000000,2471.000000,518.000000,1539.000000,500.000000,4.267900,191700.000000 +-117.970000,33.830000,22.000000,3310.000000,688.000000,1807.000000,674.000000,4.018500,200900.000000 +-117.970000,33.830000,16.000000,2035.000000,564.000000,1118.000000,503.000000,3.254600,187500.000000 +-117.970000,33.820000,26.000000,2335.000000,504.000000,1121.000000,502.000000,2.989100,205200.000000 +-117.970000,33.810000,30.000000,2406.000000,462.000000,1753.000000,456.000000,4.485000,180600.000000 +-117.970000,33.810000,26.000000,4022.000000,1081.000000,2457.000000,1001.000000,2.804200,206300.000000 +-117.970000,33.790000,34.000000,2456.000000,410.000000,1289.000000,442.000000,4.181800,224200.000000 +-117.970000,33.790000,33.000000,3268.000000,641.000000,1704.000000,591.000000,3.684900,211400.000000 +-117.970000,33.780000,35.000000,3148.000000,597.000000,2110.000000,587.000000,3.947900,203800.000000 +-117.970000,33.770000,25.000000,1295.000000,417.000000,856.000000,342.000000,2.715700,350000.000000 +-117.970000,33.770000,22.000000,2244.000000,575.000000,1543.000000,533.000000,2.661800,179600.000000 +-117.970000,33.770000,20.000000,1988.000000,424.000000,1277.000000,425.000000,2.941400,162200.000000 +-117.970000,33.760000,28.000000,1386.000000,272.000000,901.000000,294.000000,4.746400,187500.000000 +-117.970000,33.760000,27.000000,1712.000000,325.000000,1036.000000,345.000000,4.050800,183900.000000 +-117.970000,33.760000,18.000000,1862.000000,399.000000,1301.000000,369.000000,3.177100,194000.000000 +-117.970000,33.750000,26.000000,3361.000000,722.000000,2709.000000,648.000000,3.910700,190700.000000 +-117.970000,33.740000,18.000000,2814.000000,539.000000,1439.000000,493.000000,3.599000,262000.000000 +-117.970000,33.740000,16.000000,1735.000000,380.000000,784.000000,360.000000,4.256600,139200.000000 +-117.970000,33.730000,26.000000,1694.000000,260.000000,885.000000,279.000000,5.087500,224200.000000 +-117.970000,33.730000,19.000000,4154.000000,560.000000,2130.000000,589.000000,7.284500,301800.000000 +-117.970000,33.710000,26.000000,2553.000000,405.000000,1337.000000,411.000000,5.373700,252900.000000 +-117.970000,33.710000,25.000000,3273.000000,478.000000,1645.000000,497.000000,5.819500,286100.000000 +-117.970000,33.690000,21.000000,4112.000000,580.000000,1886.000000,581.000000,6.799000,292000.000000 +-117.970000,33.680000,26.000000,3653.000000,568.000000,1930.000000,585.000000,5.730100,260900.000000 +-117.970000,33.680000,26.000000,1616.000000,292.000000,700.000000,241.000000,5.510500,232100.000000 +-117.970000,33.680000,16.000000,4508.000000,598.000000,2221.000000,623.000000,7.373100,390800.000000 +-117.970000,33.670000,25.000000,3906.000000,660.000000,1809.000000,622.000000,5.676500,265100.000000 +-117.970000,33.670000,17.000000,4466.000000,640.000000,2166.000000,666.000000,6.979000,330700.000000 +-117.970000,33.660000,22.000000,3914.000000,600.000000,1871.000000,607.000000,5.854100,281500.000000 +-117.970000,33.660000,14.000000,6090.000000,1338.000000,1974.000000,1248.000000,2.806100,180300.000000 +-117.970000,33.650000,26.000000,2379.000000,336.000000,988.000000,346.000000,5.367400,339300.000000 +-117.970000,33.630000,25.000000,2482.000000,360.000000,960.000000,352.000000,6.157200,344000.000000 +-117.980000,35.130000,5.000000,4849.000000,920.000000,2504.000000,847.000000,3.539100,81900.000000 +-117.980000,35.100000,4.000000,923.000000,166.000000,352.000000,135.000000,4.572400,84500.000000 +-117.980000,34.530000,13.000000,2815.000000,535.000000,1492.000000,491.000000,4.094500,135700.000000 +-117.980000,34.140000,27.000000,6341.000000,1289.000000,2899.000000,1192.000000,3.633600,235200.000000 +-117.980000,34.140000,24.000000,1596.000000,388.000000,1329.000000,352.000000,3.041700,148000.000000 +-117.980000,34.130000,37.000000,1447.000000,309.000000,1279.000000,290.000000,4.008300,142900.000000 +-117.980000,34.130000,29.000000,2110.000000,460.000000,1890.000000,448.000000,3.680600,130500.000000 +-117.980000,34.090000,31.000000,3073.000000,617.000000,2640.000000,594.000000,3.500000,161300.000000 +-117.980000,34.080000,17.000000,3640.000000,830.000000,3537.000000,807.000000,3.478400,152200.000000 +-117.980000,34.070000,28.000000,441.000000,106.000000,504.000000,108.000000,2.910700,152500.000000 +-117.980000,34.070000,15.000000,3543.000000,888.000000,3131.000000,823.000000,3.018400,139400.000000 +-117.980000,34.060000,36.000000,2391.000000,407.000000,1967.000000,398.000000,4.027400,160700.000000 +-117.980000,34.060000,33.000000,1353.000000,228.000000,1079.000000,237.000000,4.541700,160300.000000 +-117.980000,34.050000,35.000000,2342.000000,426.000000,2176.000000,416.000000,3.745400,156900.000000 +-117.980000,34.050000,33.000000,1560.000000,315.000000,1467.000000,313.000000,4.142900,159800.000000 +-117.980000,34.040000,34.000000,2547.000000,537.000000,2108.000000,498.000000,3.472200,154600.000000 +-117.980000,34.040000,29.000000,1468.000000,310.000000,1390.000000,276.000000,3.750000,190600.000000 +-117.980000,34.030000,21.000000,797.000000,162.000000,484.000000,166.000000,2.625000,191100.000000 +-117.980000,34.020000,33.000000,3512.000000,632.000000,1971.000000,598.000000,4.465300,193200.000000 +-117.980000,34.020000,32.000000,2945.000000,651.000000,2044.000000,652.000000,3.197900,183900.000000 +-117.980000,34.000000,22.000000,3632.000000,538.000000,1968.000000,566.000000,6.019000,324900.000000 +-117.980000,33.980000,27.000000,2275.000000,346.000000,1039.000000,333.000000,6.221700,333500.000000 +-117.980000,33.940000,36.000000,4297.000000,717.000000,2038.000000,700.000000,5.285100,258800.000000 +-117.980000,33.930000,27.000000,3142.000000,509.000000,1520.000000,503.000000,6.292400,232500.000000 +-117.980000,33.920000,27.000000,3700.000000,568.000000,1793.000000,552.000000,5.366800,219800.000000 +-117.980000,33.910000,16.000000,10621.000000,1782.000000,3836.000000,1480.000000,5.092300,257200.000000 +-117.980000,33.900000,6.000000,1537.000000,347.000000,506.000000,280.000000,4.826400,146800.000000 +-117.980000,33.870000,29.000000,1310.000000,332.000000,937.000000,294.000000,3.806800,158700.000000 +-117.980000,33.870000,25.000000,2037.000000,515.000000,1435.000000,496.000000,3.319900,188800.000000 +-117.980000,33.860000,26.000000,1240.000000,285.000000,781.000000,315.000000,4.128700,205800.000000 +-117.980000,33.860000,25.000000,1025.000000,266.000000,726.000000,183.000000,3.875000,137500.000000 +-117.980000,33.840000,35.000000,984.000000,179.000000,661.000000,199.000000,5.074700,189600.000000 +-117.980000,33.840000,33.000000,2291.000000,439.000000,1187.000000,405.000000,3.953900,191100.000000 +-117.980000,33.840000,31.000000,1252.000000,225.000000,714.000000,226.000000,4.604200,220700.000000 +-117.980000,33.830000,17.000000,3419.000000,932.000000,2460.000000,766.000000,3.282300,228500.000000 +-117.980000,33.820000,34.000000,1290.000000,220.000000,867.000000,241.000000,5.548600,218100.000000 +-117.980000,33.820000,34.000000,1038.000000,175.000000,578.000000,174.000000,4.921900,200000.000000 +-117.980000,33.810000,35.000000,897.000000,156.000000,479.000000,161.000000,5.152000,215600.000000 +-117.980000,33.810000,28.000000,3528.000000,816.000000,2304.000000,764.000000,2.582000,181800.000000 +-117.980000,33.810000,18.000000,3751.000000,878.000000,2281.000000,815.000000,3.720100,183100.000000 +-117.980000,33.800000,35.000000,2114.000000,341.000000,1077.000000,343.000000,5.487600,227500.000000 +-117.980000,33.800000,32.000000,2161.000000,432.000000,1503.000000,402.000000,4.303600,191400.000000 +-117.980000,33.790000,35.000000,2356.000000,478.000000,1659.000000,480.000000,4.111500,179700.000000 +-117.980000,33.780000,22.000000,4255.000000,971.000000,2901.000000,920.000000,3.263600,180200.000000 +-117.980000,33.770000,7.000000,2252.000000,570.000000,1576.000000,550.000000,3.633300,169800.000000 +-117.980000,33.770000,22.000000,3236.000000,673.000000,2034.000000,662.000000,4.095500,174200.000000 +-117.980000,33.760000,29.000000,1518.000000,312.000000,1086.000000,317.000000,4.320000,196900.000000 +-117.980000,33.760000,28.000000,3215.000000,652.000000,2066.000000,636.000000,4.019400,197400.000000 +-117.980000,33.760000,23.000000,1553.000000,518.000000,1988.000000,474.000000,2.137500,150000.000000 +-117.980000,33.750000,37.000000,1264.000000,274.000000,783.000000,273.000000,3.343800,199600.000000 +-117.980000,33.750000,27.000000,2343.000000,415.000000,1537.000000,426.000000,5.134500,210600.000000 +-117.980000,33.750000,24.000000,3865.000000,802.000000,2670.000000,772.000000,3.815800,180000.000000 +-117.980000,33.740000,29.000000,3443.000000,635.000000,2257.000000,620.000000,4.740400,207500.000000 +-117.980000,33.740000,16.000000,4636.000000,908.000000,2341.000000,825.000000,4.426100,304700.000000 +-117.980000,33.730000,18.000000,3833.000000,1056.000000,2192.000000,996.000000,3.467900,219700.000000 +-117.980000,33.720000,28.000000,3109.000000,561.000000,1891.000000,562.000000,5.265500,243100.000000 +-117.980000,33.720000,24.000000,2826.000000,547.000000,1738.000000,546.000000,6.049400,240400.000000 +-117.980000,33.710000,26.000000,1905.000000,373.000000,1098.000000,368.000000,4.861100,229600.000000 +-117.980000,33.710000,24.000000,3430.000000,548.000000,1601.000000,512.000000,5.682500,264600.000000 +-117.980000,33.710000,24.000000,2308.000000,464.000000,1101.000000,407.000000,4.476600,230000.000000 +-117.980000,33.700000,24.000000,3451.000000,504.000000,1736.000000,493.000000,6.374900,278000.000000 +-117.980000,33.700000,17.000000,1997.000000,340.000000,952.000000,341.000000,4.414800,239200.000000 +-117.980000,33.700000,17.000000,1989.000000,411.000000,1401.000000,453.000000,4.160300,160500.000000 +-117.980000,33.700000,16.000000,5127.000000,631.000000,2142.000000,596.000000,7.819500,390500.000000 +-117.980000,33.690000,22.000000,3957.000000,520.000000,1774.000000,527.000000,7.090700,350200.000000 +-117.980000,33.690000,16.000000,2437.000000,438.000000,986.000000,422.000000,5.711700,247200.000000 +-117.980000,33.680000,24.000000,4177.000000,624.000000,1704.000000,606.000000,6.247300,281900.000000 +-117.980000,33.680000,14.000000,3396.000000,477.000000,1542.000000,472.000000,7.398200,369100.000000 +-117.980000,33.670000,7.000000,5664.000000,1174.000000,2493.000000,1101.000000,5.825200,264700.000000 +-117.980000,33.650000,22.000000,3592.000000,527.000000,1598.000000,523.000000,6.550100,294900.000000 +-117.980000,33.650000,22.000000,3335.000000,754.000000,1500.000000,719.000000,3.731500,197900.000000 +-117.980000,33.650000,18.000000,1027.000000,206.000000,436.000000,180.000000,4.215900,211300.000000 +-117.980000,33.640000,20.000000,1851.000000,495.000000,792.000000,363.000000,3.818700,137500.000000 +-117.980000,33.610000,17.000000,2054.000000,291.000000,836.000000,288.000000,6.893900,383900.000000 +-117.990000,35.160000,15.000000,2180.000000,416.000000,960.000000,370.000000,2.875000,87800.000000 +-117.990000,34.180000,38.000000,2981.000000,432.000000,1063.000000,437.000000,6.525400,365000.000000 +-117.990000,34.160000,40.000000,3838.000000,696.000000,1851.000000,674.000000,4.240700,262000.000000 +-117.990000,34.150000,44.000000,2492.000000,611.000000,1951.000000,596.000000,3.130400,185600.000000 +-117.990000,34.140000,30.000000,2346.000000,567.000000,1988.000000,474.000000,2.562500,153000.000000 +-117.990000,34.130000,37.000000,1568.000000,371.000000,1618.000000,350.000000,2.960500,129400.000000 +-117.990000,34.120000,35.000000,1040.000000,231.000000,1040.000000,242.000000,2.539500,139200.000000 +-117.990000,34.070000,35.000000,1681.000000,360.000000,1648.000000,373.000000,2.491100,145900.000000 +-117.990000,34.060000,32.000000,2491.000000,616.000000,2660.000000,595.000000,2.564000,145800.000000 +-117.990000,34.050000,35.000000,1792.000000,317.000000,1441.000000,306.000000,3.791700,151100.000000 +-117.990000,34.040000,30.000000,4468.000000,959.000000,4027.000000,938.000000,3.185000,168300.000000 +-117.990000,34.000000,26.000000,2988.000000,397.000000,1371.000000,415.000000,6.698800,382500.000000 +-117.990000,33.980000,18.000000,8399.000000,1144.000000,3727.000000,1107.000000,6.969500,360400.000000 +-117.990000,33.970000,18.000000,4078.000000,484.000000,1490.000000,482.000000,10.803400,500001.000000 +-117.990000,33.950000,30.000000,2217.000000,284.000000,851.000000,291.000000,10.483500,498600.000000 +-117.990000,33.940000,34.000000,1519.000000,301.000000,758.000000,304.000000,4.312500,214000.000000 +-117.990000,33.940000,30.000000,2395.000000,565.000000,1214.000000,521.000000,3.704500,212300.000000 +-117.990000,33.930000,33.000000,2299.000000,431.000000,1049.000000,447.000000,3.645800,208100.000000 +-117.990000,33.920000,27.000000,5805.000000,1152.000000,3106.000000,1144.000000,4.061000,222700.000000 +-117.990000,33.900000,33.000000,2161.000000,383.000000,1235.000000,383.000000,5.645400,202800.000000 +-117.990000,33.900000,30.000000,1677.000000,372.000000,1021.000000,332.000000,3.585900,199700.000000 +-117.990000,33.890000,23.000000,2111.000000,306.000000,979.000000,288.000000,8.562100,347800.000000 +-117.990000,33.890000,21.000000,5195.000000,1020.000000,2539.000000,988.000000,4.503300,160500.000000 +-117.990000,33.880000,25.000000,3401.000000,509.000000,1503.000000,498.000000,6.670400,240600.000000 +-117.990000,33.880000,15.000000,2298.000000,567.000000,1261.000000,527.000000,4.242200,159400.000000 +-117.990000,33.870000,34.000000,1239.000000,307.000000,869.000000,291.000000,3.590000,161900.000000 +-117.990000,33.870000,16.000000,1689.000000,499.000000,1260.000000,453.000000,3.120500,174000.000000 +-117.990000,33.860000,36.000000,1138.000000,228.000000,725.000000,219.000000,3.416700,187200.000000 +-117.990000,33.860000,20.000000,3540.000000,906.000000,2898.000000,876.000000,3.025200,178000.000000 +-117.990000,33.860000,20.000000,2303.000000,612.000000,1607.000000,564.000000,2.900000,176100.000000 +-117.990000,33.850000,35.000000,1661.000000,272.000000,949.000000,276.000000,5.254800,192600.000000 +-117.990000,33.850000,34.000000,1948.000000,306.000000,957.000000,304.000000,4.977700,212600.000000 +-117.990000,33.840000,34.000000,2079.000000,343.000000,1379.000000,352.000000,5.103000,207000.000000 +-117.990000,33.840000,31.000000,2982.000000,547.000000,1895.000000,570.000000,4.911500,255500.000000 +-117.990000,33.830000,35.000000,1484.000000,252.000000,916.000000,248.000000,5.265700,191400.000000 +-117.990000,33.820000,33.000000,2342.000000,475.000000,1367.000000,509.000000,4.116700,215500.000000 +-117.990000,33.820000,21.000000,2281.000000,557.000000,1510.000000,460.000000,2.862500,189600.000000 +-117.990000,33.820000,19.000000,1991.000000,528.000000,1202.000000,460.000000,3.153800,252100.000000 +-117.990000,33.810000,23.000000,3284.000000,795.000000,3257.000000,758.000000,2.452600,182900.000000 +-117.990000,33.800000,25.000000,3179.000000,639.000000,2526.000000,623.000000,3.328100,180800.000000 +-117.990000,33.800000,18.000000,383.000000,94.000000,487.000000,98.000000,3.975000,162500.000000 +-117.990000,33.790000,33.000000,2064.000000,324.000000,1384.000000,315.000000,4.526300,169000.000000 +-117.990000,33.790000,29.000000,2470.000000,560.000000,1589.000000,513.000000,3.180100,190500.000000 +-117.990000,33.790000,21.000000,2695.000000,707.000000,1888.000000,683.000000,3.285700,213300.000000 +-117.990000,33.780000,15.000000,4273.000000,993.000000,2300.000000,946.000000,3.531300,213000.000000 +-117.990000,33.770000,29.000000,1312.000000,267.000000,922.000000,255.000000,3.190200,202400.000000 +-117.990000,33.770000,15.000000,2081.000000,531.000000,1617.000000,561.000000,3.495500,160900.000000 +-117.990000,33.760000,30.000000,1572.000000,362.000000,1351.000000,359.000000,3.369000,190900.000000 +-117.990000,33.760000,17.000000,2545.000000,737.000000,1468.000000,699.000000,1.943900,177700.000000 +-117.990000,33.750000,30.000000,1859.000000,462.000000,1314.000000,418.000000,3.090900,184400.000000 +-117.990000,33.750000,22.000000,3024.000000,754.000000,2357.000000,743.000000,3.312500,191800.000000 +-117.990000,33.740000,26.000000,4065.000000,741.000000,1960.000000,739.000000,4.506000,240000.000000 +-117.990000,33.730000,20.000000,3182.000000,884.000000,1770.000000,817.000000,3.191200,220800.000000 +-117.990000,33.730000,17.000000,5239.000000,1045.000000,2440.000000,985.000000,4.375000,248100.000000 +-117.990000,33.720000,26.000000,1787.000000,275.000000,801.000000,270.000000,5.551400,255700.000000 +-117.990000,33.720000,17.000000,2801.000000,649.000000,1473.000000,535.000000,4.287500,134800.000000 +-117.990000,33.720000,14.000000,2127.000000,537.000000,1338.000000,475.000000,3.628000,188500.000000 +-117.990000,33.710000,19.000000,1967.000000,487.000000,1251.000000,404.000000,3.669600,218800.000000 +-117.990000,33.710000,18.000000,1994.000000,578.000000,3031.000000,577.000000,2.761400,237500.000000 +-117.990000,33.710000,17.000000,1600.000000,458.000000,1803.000000,432.000000,2.786500,216700.000000 +-117.990000,33.700000,25.000000,2017.000000,357.000000,1063.000000,369.000000,4.034500,229400.000000 +-117.990000,33.690000,17.000000,3386.000000,729.000000,1715.000000,666.000000,3.747900,213000.000000 +-117.990000,33.690000,16.000000,1476.000000,294.000000,886.000000,270.000000,5.325900,216400.000000 +-117.990000,33.680000,18.000000,2024.000000,462.000000,1047.000000,451.000000,3.584800,186900.000000 +-117.990000,33.680000,14.000000,3305.000000,841.000000,2272.000000,769.000000,3.489900,216700.000000 +-117.990000,33.680000,13.000000,4000.000000,883.000000,1999.000000,881.000000,4.724500,273600.000000 +-117.990000,33.670000,17.000000,1692.000000,427.000000,903.000000,423.000000,3.585900,262500.000000 +-117.990000,33.670000,15.000000,3141.000000,664.000000,1729.000000,633.000000,4.216500,234600.000000 +-117.990000,33.670000,12.000000,2228.000000,479.000000,1122.000000,488.000000,4.038500,350000.000000 +-117.990000,33.660000,29.000000,1330.000000,293.000000,613.000000,236.000000,4.659100,353100.000000 +-117.990000,33.660000,14.000000,3155.000000,653.000000,951.000000,575.000000,3.062500,268800.000000 +-118.000000,35.050000,21.000000,1739.000000,425.000000,945.000000,362.000000,3.401500,86500.000000 +-118.000000,34.160000,52.000000,1354.000000,227.000000,531.000000,206.000000,4.805900,270600.000000 +-118.000000,34.160000,42.000000,1020.000000,156.000000,398.000000,157.000000,6.101000,311800.000000 +-118.000000,34.150000,48.000000,3436.000000,673.000000,1540.000000,648.000000,4.275000,256800.000000 +-118.000000,34.140000,39.000000,1302.000000,303.000000,800.000000,291.000000,3.272300,166900.000000 +-118.000000,34.140000,31.000000,1298.000000,431.000000,1131.000000,425.000000,1.054800,178100.000000 +-118.000000,34.130000,35.000000,1005.000000,224.000000,742.000000,221.000000,3.548100,158100.000000 +-118.000000,34.130000,24.000000,2584.000000,520.000000,1869.000000,503.000000,3.284100,167000.000000 +-118.000000,34.120000,42.000000,870.000000,170.000000,546.000000,164.000000,4.625000,173800.000000 +-118.000000,34.120000,37.000000,1340.000000,325.000000,928.000000,333.000000,3.921900,175000.000000 +-118.000000,34.100000,34.000000,2249.000000,460.000000,1544.000000,441.000000,3.400500,176300.000000 +-118.000000,34.080000,29.000000,2003.000000,401.000000,1520.000000,364.000000,3.994000,195300.000000 +-118.000000,34.080000,23.000000,1627.000000,318.000000,1279.000000,289.000000,4.646700,185100.000000 +-118.000000,34.070000,34.000000,1696.000000,456.000000,1609.000000,426.000000,2.250000,138500.000000 +-118.000000,34.030000,25.000000,6909.000000,1154.000000,3912.000000,1121.000000,5.257000,226100.000000 +-118.000000,33.970000,30.000000,6540.000000,991.000000,3124.000000,953.000000,6.066300,372600.000000 +-118.000000,33.950000,35.000000,1431.000000,210.000000,505.000000,213.000000,6.810900,401000.000000 +-118.000000,33.940000,37.000000,903.000000,158.000000,444.000000,158.000000,3.750000,174400.000000 +-118.000000,33.940000,36.000000,2911.000000,534.000000,1395.000000,486.000000,5.173800,203700.000000 +-118.000000,33.940000,35.000000,2603.000000,482.000000,1305.000000,507.000000,3.954300,214400.000000 +-118.000000,33.930000,35.000000,802.000000,153.000000,445.000000,150.000000,5.007700,185000.000000 +-118.000000,33.910000,19.000000,5166.000000,770.000000,2374.000000,753.000000,5.979000,285200.000000 +-118.000000,33.890000,35.000000,1065.000000,176.000000,574.000000,171.000000,5.038400,200800.000000 +-118.000000,33.890000,35.000000,1011.000000,183.000000,578.000000,171.000000,3.986100,188700.000000 +-118.000000,33.890000,34.000000,1932.000000,315.000000,1053.000000,316.000000,5.137700,213300.000000 +-118.000000,33.880000,28.000000,1624.000000,289.000000,755.000000,280.000000,4.708300,268100.000000 +-118.000000,33.880000,18.000000,2628.000000,720.000000,2276.000000,649.000000,2.735000,170800.000000 +-118.000000,33.870000,13.000000,2086.000000,544.000000,1356.000000,462.000000,2.950000,165600.000000 +-118.000000,33.860000,32.000000,1162.000000,196.000000,563.000000,178.000000,3.875000,203000.000000 +-118.000000,33.850000,34.000000,1078.000000,205.000000,575.000000,206.000000,4.508300,188000.000000 +-118.000000,33.850000,33.000000,2053.000000,418.000000,1154.000000,405.000000,4.045500,197200.000000 +-118.000000,33.840000,30.000000,1549.000000,325.000000,885.000000,299.000000,4.003900,195100.000000 +-118.000000,33.840000,29.000000,2641.000000,637.000000,2413.000000,619.000000,2.816900,165100.000000 +-118.000000,33.830000,26.000000,1718.000000,385.000000,1022.000000,368.000000,3.933300,196100.000000 +-118.000000,33.830000,24.000000,2578.000000,580.000000,1217.000000,529.000000,2.240100,212500.000000 +-118.000000,33.820000,24.000000,3002.000000,644.000000,1495.000000,634.000000,3.108700,202800.000000 +-118.000000,33.820000,21.000000,2253.000000,580.000000,1536.000000,500.000000,3.232600,204700.000000 +-118.000000,33.820000,18.000000,2947.000000,559.000000,1820.000000,551.000000,4.529400,224800.000000 +-118.000000,33.810000,22.000000,2642.000000,640.000000,1702.000000,588.000000,3.526800,174700.000000 +-118.000000,33.810000,17.000000,2142.000000,436.000000,946.000000,412.000000,3.705900,146300.000000 +-118.000000,33.810000,17.000000,1530.000000,404.000000,883.000000,344.000000,2.883500,196500.000000 +-118.000000,33.810000,13.000000,2782.000000,605.000000,1749.000000,628.000000,4.127600,153800.000000 +-118.000000,33.790000,18.000000,3679.000000,694.000000,1820.000000,652.000000,3.653100,143500.000000 +-118.000000,33.770000,28.000000,2401.000000,503.000000,1155.000000,456.000000,3.513900,211700.000000 +-118.000000,33.770000,24.000000,1324.000000,267.000000,687.000000,264.000000,3.432700,192800.000000 +-118.000000,33.760000,29.000000,1982.000000,503.000000,1426.000000,502.000000,3.026300,194200.000000 +-118.000000,33.760000,26.000000,1876.000000,455.000000,1499.000000,436.000000,2.925000,176000.000000 +-118.000000,33.760000,14.000000,1120.000000,319.000000,982.000000,307.000000,2.908300,179200.000000 +-118.000000,33.760000,12.000000,1250.000000,331.000000,1047.000000,334.000000,3.062500,208800.000000 +-118.000000,33.750000,26.000000,1382.000000,387.000000,1977.000000,368.000000,2.758900,137500.000000 +-118.000000,33.740000,25.000000,2767.000000,346.000000,1148.000000,372.000000,6.394000,316700.000000 +-118.000000,33.730000,26.000000,2236.000000,280.000000,809.000000,282.000000,6.739500,342800.000000 +-118.000000,33.710000,19.000000,4808.000000,1029.000000,2422.000000,971.000000,4.012100,279700.000000 +-118.000000,33.680000,12.000000,5241.000000,985.000000,2048.000000,943.000000,6.485800,285400.000000 +-118.000000,33.660000,25.000000,4041.000000,903.000000,1689.000000,784.000000,4.228900,442700.000000 +-118.000000,33.660000,16.000000,2809.000000,708.000000,1260.000000,638.000000,3.235300,252900.000000 +-118.010000,35.120000,15.000000,1926.000000,361.000000,917.000000,316.000000,3.388900,68500.000000 +-118.010000,34.550000,2.000000,2701.000000,530.000000,1368.000000,430.000000,4.071000,137400.000000 +-118.010000,34.160000,47.000000,1745.000000,270.000000,753.000000,275.000000,5.532000,318500.000000 +-118.010000,34.150000,52.000000,2234.000000,472.000000,986.000000,439.000000,3.912500,265500.000000 +-118.010000,34.150000,32.000000,6597.000000,1579.000000,3689.000000,1459.000000,3.237700,184100.000000 +-118.010000,34.130000,38.000000,3374.000000,671.000000,1906.000000,640.000000,4.072900,212300.000000 +-118.010000,34.130000,36.000000,1332.000000,217.000000,648.000000,203.000000,4.715900,365900.000000 +-118.010000,34.120000,43.000000,1185.000000,207.000000,657.000000,198.000000,4.549100,214800.000000 +-118.010000,34.120000,32.000000,1937.000000,332.000000,922.000000,340.000000,3.940000,278400.000000 +-118.010000,34.110000,43.000000,1858.000000,345.000000,1054.000000,329.000000,3.562500,211600.000000 +-118.010000,34.110000,36.000000,1722.000000,320.000000,794.000000,322.000000,4.213200,212200.000000 +-118.010000,34.110000,32.000000,1978.000000,536.000000,826.000000,470.000000,2.511400,212200.000000 +-118.010000,34.100000,35.000000,2120.000000,412.000000,1375.000000,405.000000,3.460900,166300.000000 +-118.010000,34.100000,27.000000,2424.000000,542.000000,1713.000000,557.000000,3.808500,181400.000000 +-118.010000,34.090000,32.000000,1613.000000,361.000000,1283.000000,404.000000,3.194400,181700.000000 +-118.010000,34.080000,35.000000,1852.000000,358.000000,1414.000000,347.000000,4.275000,173600.000000 +-118.010000,34.070000,24.000000,5684.000000,1485.000000,6626.000000,1481.000000,2.255900,166800.000000 +-118.010000,34.070000,22.000000,6311.000000,1572.000000,6666.000000,1456.000000,2.933400,182600.000000 +-118.010000,34.060000,26.000000,557.000000,153.000000,455.000000,196.000000,2.772100,155400.000000 +-118.010000,34.050000,37.000000,682.000000,172.000000,813.000000,173.000000,3.812500,138000.000000 +-118.010000,34.050000,31.000000,1135.000000,355.000000,1717.000000,368.000000,2.160200,161700.000000 +-118.010000,33.970000,36.000000,1451.000000,224.000000,608.000000,246.000000,6.064800,290800.000000 +-118.010000,33.960000,36.000000,1805.000000,288.000000,882.000000,308.000000,5.305400,273500.000000 +-118.010000,33.950000,36.000000,1579.000000,290.000000,816.000000,276.000000,4.431800,181100.000000 +-118.010000,33.950000,35.000000,1755.000000,322.000000,774.000000,290.000000,5.086100,296700.000000 +-118.010000,33.940000,36.000000,1921.000000,329.000000,969.000000,327.000000,4.919100,188700.000000 +-118.010000,33.940000,35.000000,1323.000000,235.000000,807.000000,247.000000,4.270800,174800.000000 +-118.010000,33.930000,34.000000,2424.000000,468.000000,1293.000000,444.000000,3.275000,189900.000000 +-118.010000,33.930000,31.000000,3395.000000,742.000000,1886.000000,737.000000,4.411800,174400.000000 +-118.010000,33.920000,34.000000,4039.000000,694.000000,2269.000000,663.000000,5.230500,205100.000000 +-118.010000,33.910000,32.000000,2722.000000,571.000000,2541.000000,462.000000,4.230500,221400.000000 +-118.010000,33.900000,36.000000,1382.000000,257.000000,685.000000,255.000000,5.125000,211700.000000 +-118.010000,33.900000,26.000000,2968.000000,674.000000,1655.000000,628.000000,4.609400,201000.000000 +-118.010000,33.890000,36.000000,1589.000000,265.000000,804.000000,272.000000,4.635400,202900.000000 +-118.010000,33.890000,34.000000,1653.000000,292.000000,1003.000000,310.000000,4.600000,203400.000000 +-118.010000,33.890000,33.000000,2046.000000,327.000000,1018.000000,320.000000,4.229200,212800.000000 +-118.010000,33.870000,25.000000,6348.000000,1615.000000,4188.000000,1497.000000,3.139000,185700.000000 +-118.010000,33.860000,29.000000,2307.000000,452.000000,1218.000000,402.000000,3.430600,194200.000000 +-118.010000,33.850000,29.000000,3061.000000,612.000000,2396.000000,640.000000,4.632600,195200.000000 +-118.010000,33.840000,28.000000,4097.000000,838.000000,2112.000000,803.000000,4.500000,202100.000000 +-118.010000,33.830000,29.000000,3963.000000,772.000000,2104.000000,743.000000,4.980300,208600.000000 +-118.010000,33.830000,24.000000,4639.000000,1374.000000,3093.000000,1257.000000,2.557700,202300.000000 +-118.010000,33.830000,23.000000,1086.000000,268.000000,825.000000,250.000000,2.460900,219600.000000 +-118.010000,33.820000,31.000000,1960.000000,380.000000,1356.000000,356.000000,4.062500,225900.000000 +-118.010000,33.820000,10.000000,3897.000000,893.000000,1992.000000,693.000000,4.159100,192300.000000 +-118.010000,33.810000,25.000000,1831.000000,345.000000,809.000000,339.000000,4.517900,177100.000000 +-118.010000,33.810000,18.000000,5238.000000,1083.000000,3032.000000,1065.000000,4.458300,190100.000000 +-118.010000,33.800000,16.000000,4021.000000,701.000000,1488.000000,650.000000,5.320000,219500.000000 +-118.010000,33.790000,30.000000,2460.000000,403.000000,1277.000000,395.000000,5.437200,223200.000000 +-118.010000,33.790000,23.000000,2663.000000,430.000000,1499.000000,403.000000,5.783700,258000.000000 +-118.010000,33.780000,26.000000,2343.000000,377.000000,1166.000000,373.000000,6.000000,233100.000000 +-118.010000,33.780000,19.000000,2648.000000,478.000000,1160.000000,452.000000,5.935700,207400.000000 +-118.010000,33.770000,33.000000,1387.000000,238.000000,890.000000,264.000000,5.422000,204100.000000 +-118.010000,33.770000,32.000000,1771.000000,296.000000,995.000000,272.000000,5.836200,217500.000000 +-118.010000,33.760000,35.000000,2072.000000,349.000000,1249.000000,317.000000,3.985500,191900.000000 +-118.010000,33.760000,26.000000,2141.000000,597.000000,2038.000000,585.000000,2.298100,177700.000000 +-118.010000,33.750000,30.000000,3380.000000,722.000000,2269.000000,652.000000,4.525000,186000.000000 +-118.010000,33.740000,25.000000,8110.000000,1264.000000,3613.000000,1232.000000,6.060900,264900.000000 +-118.010000,33.730000,23.000000,4095.000000,578.000000,1766.000000,589.000000,6.741800,302500.000000 +-118.010000,33.710000,18.000000,6565.000000,1357.000000,3079.000000,1248.000000,4.751500,295600.000000 +-118.010000,33.700000,24.000000,3856.000000,567.000000,1741.000000,588.000000,7.248000,302700.000000 +-118.010000,33.690000,3.000000,945.000000,115.000000,337.000000,123.000000,11.519900,500001.000000 +-118.010000,33.670000,16.000000,3581.000000,780.000000,1644.000000,774.000000,5.041000,397600.000000 +-118.010000,33.670000,13.000000,2902.000000,536.000000,1125.000000,490.000000,5.888000,447700.000000 +-118.010000,33.660000,19.000000,4559.000000,1045.000000,1949.000000,910.000000,4.355000,429200.000000 +-118.020000,34.620000,38.000000,248.000000,55.000000,261.000000,53.000000,2.141300,96900.000000 +-118.020000,34.570000,4.000000,10655.000000,1706.000000,5391.000000,1529.000000,5.083000,151300.000000 +-118.020000,34.170000,32.000000,3868.000000,548.000000,1558.000000,528.000000,9.466700,500001.000000 +-118.020000,34.150000,44.000000,2267.000000,426.000000,980.000000,372.000000,3.600000,307400.000000 +-118.020000,34.140000,34.000000,1077.000000,257.000000,478.000000,199.000000,2.631600,252800.000000 +-118.020000,34.140000,31.000000,6854.000000,1578.000000,4131.000000,1524.000000,3.587800,222800.000000 +-118.020000,34.130000,32.000000,3308.000000,718.000000,1803.000000,667.000000,3.946400,273600.000000 +-118.020000,34.120000,37.000000,2250.000000,360.000000,989.000000,329.000000,6.153600,366000.000000 +-118.020000,34.120000,36.000000,1471.000000,246.000000,751.000000,230.000000,5.455500,395100.000000 +-118.020000,34.110000,39.000000,1504.000000,280.000000,718.000000,261.000000,4.625000,219000.000000 +-118.020000,34.110000,35.000000,2454.000000,458.000000,1110.000000,435.000000,3.802900,414800.000000 +-118.020000,34.100000,36.000000,452.000000,80.000000,248.000000,83.000000,1.968800,226000.000000 +-118.020000,34.100000,36.000000,1928.000000,361.000000,1008.000000,368.000000,4.733000,233700.000000 +-118.020000,34.090000,32.000000,1747.000000,399.000000,1199.000000,402.000000,3.428600,191800.000000 +-118.020000,34.090000,24.000000,2080.000000,514.000000,1976.000000,478.000000,2.691700,170000.000000 +-118.020000,34.080000,29.000000,2741.000000,667.000000,2449.000000,677.000000,3.694400,175200.000000 +-118.020000,34.070000,21.000000,3245.000000,959.000000,3528.000000,887.000000,2.323600,156300.000000 +-118.020000,34.060000,26.000000,2929.000000,970.000000,3792.000000,817.000000,2.257700,173800.000000 +-118.020000,34.050000,34.000000,1610.000000,513.000000,2050.000000,508.000000,2.556200,156300.000000 +-118.020000,34.050000,33.000000,2464.000000,627.000000,2932.000000,568.000000,3.062500,165800.000000 +-118.020000,34.050000,28.000000,991.000000,255.000000,1145.000000,265.000000,2.361100,167000.000000 +-118.020000,34.040000,28.000000,6175.000000,1449.000000,5041.000000,1408.000000,2.882100,158100.000000 +-118.020000,33.980000,23.000000,1995.000000,306.000000,707.000000,293.000000,8.667700,332700.000000 +-118.020000,33.970000,34.000000,1903.000000,293.000000,887.000000,306.000000,6.148000,313800.000000 +-118.020000,33.960000,36.000000,2071.000000,398.000000,988.000000,404.000000,4.622600,219700.000000 +-118.020000,33.960000,36.000000,2002.000000,361.000000,913.000000,311.000000,4.544600,244700.000000 +-118.020000,33.950000,38.000000,2139.000000,426.000000,1138.000000,412.000000,4.291700,168900.000000 +-118.020000,33.950000,36.000000,1705.000000,299.000000,871.000000,296.000000,4.618400,179800.000000 +-118.020000,33.950000,36.000000,1681.000000,329.000000,964.000000,311.000000,4.108000,181200.000000 +-118.020000,33.950000,36.000000,1632.000000,295.000000,797.000000,283.000000,4.229200,179500.000000 +-118.020000,33.940000,23.000000,4815.000000,1081.000000,3232.000000,1016.000000,3.488000,191800.000000 +-118.020000,33.930000,35.000000,2400.000000,398.000000,1218.000000,408.000000,4.131200,193800.000000 +-118.020000,33.930000,33.000000,4711.000000,988.000000,2984.000000,931.000000,3.602800,184700.000000 +-118.020000,33.920000,34.000000,2169.000000,418.000000,1169.000000,406.000000,3.222200,218700.000000 +-118.020000,33.910000,35.000000,2182.000000,390.000000,1248.000000,399.000000,5.423600,216700.000000 +-118.020000,33.910000,35.000000,1337.000000,234.000000,692.000000,235.000000,5.115500,213700.000000 +-118.020000,33.900000,36.000000,2417.000000,421.000000,1276.000000,426.000000,5.560100,205200.000000 +-118.020000,33.900000,34.000000,2678.000000,511.000000,1540.000000,497.000000,4.495400,202900.000000 +-118.020000,33.900000,34.000000,1636.000000,358.000000,977.000000,357.000000,3.593800,209900.000000 +-118.020000,33.890000,36.000000,1375.000000,242.000000,670.000000,221.000000,5.083900,198200.000000 +-118.020000,33.860000,26.000000,2342.000000,383.000000,1290.000000,394.000000,5.667700,220700.000000 +-118.020000,33.850000,31.000000,1922.000000,329.000000,1030.000000,353.000000,5.341600,213000.000000 +-118.020000,33.840000,35.000000,3473.000000,563.000000,2091.000000,580.000000,4.482100,214100.000000 +-118.020000,33.810000,34.000000,3482.000000,614.000000,2227.000000,641.000000,5.115500,200900.000000 +-118.020000,33.800000,24.000000,84.000000,14.000000,32.000000,8.000000,5.875000,193800.000000 +-118.020000,33.790000,23.000000,6368.000000,1030.000000,3281.000000,1001.000000,6.114200,240400.000000 +-118.020000,33.780000,28.000000,3375.000000,559.000000,1754.000000,554.000000,5.544600,228900.000000 +-118.020000,33.770000,34.000000,2115.000000,352.000000,1253.000000,338.000000,5.150700,207500.000000 +-118.020000,33.760000,27.000000,2905.000000,587.000000,1781.000000,561.000000,4.250000,214800.000000 +-118.020000,33.760000,25.000000,1759.000000,404.000000,1404.000000,385.000000,3.628900,195800.000000 +-118.020000,33.750000,26.000000,2989.000000,479.000000,1596.000000,475.000000,5.715700,231200.000000 +-118.020000,33.740000,26.000000,3842.000000,609.000000,1961.000000,595.000000,6.128000,248200.000000 +-118.020000,33.730000,26.000000,3711.000000,610.000000,1902.000000,597.000000,5.559900,234100.000000 +-118.020000,33.730000,24.000000,6393.000000,1141.000000,2743.000000,1057.000000,5.138400,336900.000000 +-118.020000,33.720000,22.000000,8844.000000,1706.000000,4404.000000,1594.000000,4.445300,267800.000000 +-118.020000,33.710000,23.000000,5554.000000,995.000000,2408.000000,936.000000,5.388600,331900.000000 +-118.020000,33.650000,38.000000,2548.000000,646.000000,755.000000,399.000000,2.835200,408300.000000 +-118.020000,33.620000,11.000000,3969.000000,834.000000,1508.000000,754.000000,4.340900,271400.000000 +-118.030000,34.580000,4.000000,9849.000000,1780.000000,4546.000000,1598.000000,4.072900,154300.000000 +-118.030000,34.160000,39.000000,2731.000000,366.000000,1034.000000,338.000000,9.809800,500001.000000 +-118.030000,34.160000,36.000000,1640.000000,239.000000,693.000000,253.000000,6.688800,500001.000000 +-118.030000,34.150000,43.000000,1694.000000,283.000000,674.000000,267.000000,4.179700,486800.000000 +-118.030000,34.140000,44.000000,1446.000000,250.000000,721.000000,243.000000,4.730800,352200.000000 +-118.030000,34.140000,31.000000,4353.000000,1117.000000,2338.000000,1037.000000,3.072700,242600.000000 +-118.030000,34.110000,38.000000,2076.000000,361.000000,988.000000,332.000000,5.917500,416900.000000 +-118.030000,34.110000,34.000000,2837.000000,460.000000,1344.000000,458.000000,6.572200,437400.000000 +-118.030000,34.100000,38.000000,2301.000000,416.000000,1079.000000,398.000000,4.423600,233600.000000 +-118.030000,34.100000,32.000000,2668.000000,609.000000,1512.000000,541.000000,2.942200,233100.000000 +-118.030000,34.100000,31.000000,2647.000000,539.000000,1473.000000,520.000000,3.940000,223900.000000 +-118.030000,34.100000,30.000000,2773.000000,634.000000,1376.000000,540.000000,2.785700,201700.000000 +-118.030000,34.090000,29.000000,1219.000000,338.000000,1152.000000,323.000000,2.802900,180900.000000 +-118.030000,34.080000,42.000000,1597.000000,373.000000,1311.000000,352.000000,2.968800,162800.000000 +-118.030000,34.080000,37.000000,775.000000,179.000000,726.000000,183.000000,3.250000,159200.000000 +-118.030000,34.080000,32.000000,1780.000000,484.000000,1732.000000,454.000000,2.446400,169600.000000 +-118.030000,34.070000,37.000000,1091.000000,269.000000,905.000000,242.000000,3.104200,152000.000000 +-118.030000,34.060000,36.000000,1018.000000,305.000000,1307.000000,292.000000,2.145300,162100.000000 +-118.030000,34.060000,31.000000,1513.000000,389.000000,1396.000000,364.000000,2.470600,170600.000000 +-118.030000,34.060000,27.000000,2510.000000,783.000000,3481.000000,726.000000,2.487500,157800.000000 +-118.030000,34.060000,24.000000,2469.000000,731.000000,3818.000000,712.000000,2.544500,151400.000000 +-118.030000,34.060000,24.000000,2343.000000,834.000000,3537.000000,824.000000,2.109400,135200.000000 +-118.030000,34.050000,36.000000,1345.000000,331.000000,1511.000000,309.000000,3.512900,142300.000000 +-118.030000,34.010000,10.000000,6531.000000,1036.000000,2975.000000,1018.000000,6.231900,403700.000000 +-118.030000,33.990000,52.000000,2792.000000,461.000000,1177.000000,439.000000,3.431200,243800.000000 +-118.030000,33.980000,46.000000,1974.000000,465.000000,880.000000,441.000000,2.757800,236800.000000 +-118.030000,33.970000,39.000000,2126.000000,434.000000,1103.000000,433.000000,3.285200,196200.000000 +-118.030000,33.970000,39.000000,1996.000000,389.000000,1029.000000,387.000000,4.650000,224300.000000 +-118.030000,33.970000,36.000000,2149.000000,527.000000,1359.000000,481.000000,2.824000,167900.000000 +-118.030000,33.970000,36.000000,1601.000000,290.000000,715.000000,284.000000,4.815200,232400.000000 +-118.030000,33.960000,37.000000,1745.000000,365.000000,1022.000000,368.000000,4.053600,171400.000000 +-118.030000,33.950000,37.000000,1772.000000,321.000000,934.000000,326.000000,4.147100,177800.000000 +-118.030000,33.950000,34.000000,1882.000000,428.000000,1034.000000,375.000000,3.650900,173200.000000 +-118.030000,33.940000,37.000000,1699.000000,302.000000,889.000000,271.000000,4.354200,179800.000000 +-118.030000,33.940000,34.000000,1748.000000,386.000000,917.000000,378.000000,3.479200,169000.000000 +-118.030000,33.940000,30.000000,2572.000000,521.000000,1564.000000,501.000000,3.486100,177200.000000 +-118.030000,33.930000,22.000000,3382.000000,800.000000,2688.000000,784.000000,3.875000,164700.000000 +-118.030000,33.920000,35.000000,2108.000000,405.000000,1243.000000,394.000000,3.673100,167000.000000 +-118.030000,33.920000,30.000000,1414.000000,332.000000,1307.000000,315.000000,3.000000,158300.000000 +-118.030000,33.910000,32.000000,4040.000000,832.000000,2526.000000,798.000000,3.214300,160100.000000 +-118.030000,33.900000,36.000000,1143.000000,193.000000,826.000000,188.000000,5.318400,171100.000000 +-118.030000,33.900000,35.000000,1434.000000,279.000000,744.000000,252.000000,3.730800,202400.000000 +-118.030000,33.870000,16.000000,2306.000000,393.000000,1368.000000,387.000000,5.930000,277600.000000 +-118.030000,33.860000,19.000000,1795.000000,328.000000,1014.000000,322.000000,4.535000,289300.000000 +-118.030000,33.850000,30.000000,2320.000000,448.000000,1434.000000,452.000000,4.086500,203700.000000 +-118.030000,33.850000,23.000000,5495.000000,1141.000000,2873.000000,1004.000000,3.915600,224100.000000 +-118.030000,33.850000,16.000000,1831.000000,390.000000,1347.000000,389.000000,3.842600,344400.000000 +-118.030000,33.830000,34.000000,3203.000000,653.000000,2072.000000,691.000000,4.225000,198400.000000 +-118.030000,33.830000,25.000000,768.000000,195.000000,529.000000,184.000000,3.175000,132800.000000 +-118.030000,33.830000,25.000000,3030.000000,532.000000,1668.000000,509.000000,4.625000,229600.000000 +-118.030000,33.820000,17.000000,2178.000000,477.000000,1077.000000,457.000000,3.681500,245300.000000 +-118.030000,33.820000,17.000000,1851.000000,346.000000,770.000000,310.000000,5.609300,244400.000000 +-118.030000,33.810000,26.000000,3635.000000,567.000000,1779.000000,543.000000,5.708900,237400.000000 +-118.030000,33.790000,32.000000,3191.000000,634.000000,1718.000000,611.000000,4.154800,216600.000000 +-118.030000,33.790000,26.000000,5321.000000,889.000000,2932.000000,896.000000,5.891400,237600.000000 +-118.030000,33.780000,25.000000,3554.000000,528.000000,1600.000000,537.000000,6.645300,270100.000000 +-118.030000,33.770000,27.000000,2000.000000,310.000000,880.000000,294.000000,5.635000,218900.000000 +-118.030000,33.760000,32.000000,2980.000000,494.000000,1370.000000,481.000000,5.086600,223500.000000 +-118.030000,33.720000,24.000000,5203.000000,957.000000,2465.000000,946.000000,5.163000,261000.000000 +-118.030000,33.710000,26.000000,1483.000000,251.000000,738.000000,235.000000,6.000000,271400.000000 +-118.030000,33.700000,15.000000,3244.000000,421.000000,1259.000000,413.000000,7.785400,395300.000000 +-118.040000,34.170000,52.000000,1885.000000,401.000000,764.000000,373.000000,4.038500,265700.000000 +-118.040000,34.160000,38.000000,1594.000000,249.000000,633.000000,247.000000,5.958200,350700.000000 +-118.040000,34.150000,34.000000,1523.000000,311.000000,676.000000,295.000000,3.362100,377200.000000 +-118.040000,34.130000,39.000000,2485.000000,382.000000,1072.000000,342.000000,6.087800,430200.000000 +-118.040000,34.130000,35.000000,249.000000,31.000000,268.000000,29.000000,15.000100,500001.000000 +-118.040000,34.130000,22.000000,3359.000000,643.000000,1227.000000,588.000000,4.645000,276200.000000 +-118.040000,34.120000,44.000000,2007.000000,288.000000,921.000000,307.000000,6.598900,500001.000000 +-118.040000,34.120000,39.000000,2522.000000,380.000000,1113.000000,357.000000,5.224900,445200.000000 +-118.040000,34.120000,30.000000,2170.000000,318.000000,984.000000,309.000000,5.691600,500001.000000 +-118.040000,34.110000,37.000000,1275.000000,177.000000,598.000000,174.000000,7.188500,500001.000000 +-118.040000,34.100000,39.000000,2302.000000,412.000000,1590.000000,406.000000,4.801700,273800.000000 +-118.040000,34.100000,38.000000,2317.000000,451.000000,1155.000000,426.000000,4.148800,235300.000000 +-118.040000,34.090000,34.000000,2597.000000,461.000000,1542.000000,470.000000,4.621100,248900.000000 +-118.040000,34.090000,34.000000,2001.000000,388.000000,1461.000000,397.000000,3.830400,183000.000000 +-118.040000,34.090000,32.000000,1339.000000,334.000000,817.000000,349.000000,2.833300,186000.000000 +-118.040000,34.080000,35.000000,1148.000000,258.000000,975.000000,253.000000,4.037000,173300.000000 +-118.040000,34.070000,52.000000,177.000000,59.000000,269.000000,75.000000,2.361100,131300.000000 +-118.040000,34.070000,39.000000,2451.000000,649.000000,2536.000000,648.000000,2.309800,173100.000000 +-118.040000,34.060000,39.000000,1258.000000,245.000000,988.000000,228.000000,3.213200,176100.000000 +-118.040000,34.060000,31.000000,957.000000,295.000000,1300.000000,287.000000,2.138300,153400.000000 +-118.040000,34.060000,30.000000,2019.000000,551.000000,2481.000000,484.000000,3.187500,154200.000000 +-118.040000,34.050000,34.000000,1058.000000,230.000000,1043.000000,229.000000,3.053600,137500.000000 +-118.040000,34.050000,32.000000,1252.000000,273.000000,1337.000000,263.000000,2.657900,156800.000000 +-118.040000,34.040000,35.000000,1734.000000,363.000000,1527.000000,344.000000,3.000000,160600.000000 +-118.040000,34.040000,32.000000,1619.000000,323.000000,1492.000000,342.000000,3.500000,165100.000000 +-118.040000,34.000000,30.000000,5308.000000,854.000000,2114.000000,838.000000,5.198500,279200.000000 +-118.040000,33.990000,47.000000,2530.000000,565.000000,1262.000000,509.000000,3.647500,197100.000000 +-118.040000,33.990000,36.000000,3531.000000,754.000000,1613.000000,697.000000,3.235900,198600.000000 +-118.040000,33.980000,50.000000,1951.000000,458.000000,1362.000000,454.000000,3.000000,163200.000000 +-118.040000,33.980000,43.000000,2446.000000,764.000000,1699.000000,692.000000,2.625000,163300.000000 +-118.040000,33.980000,28.000000,1617.000000,507.000000,1158.000000,486.000000,1.968800,165600.000000 +-118.040000,33.980000,25.000000,3040.000000,831.000000,1580.000000,735.000000,2.318200,182100.000000 +-118.040000,33.970000,29.000000,2376.000000,700.000000,1968.000000,680.000000,2.608200,162500.000000 +-118.040000,33.970000,25.000000,2945.000000,914.000000,2313.000000,832.000000,2.568600,177500.000000 +-118.040000,33.960000,42.000000,1430.000000,338.000000,1269.000000,321.000000,3.321400,148800.000000 +-118.040000,33.960000,37.000000,1948.000000,395.000000,1163.000000,379.000000,3.225000,154000.000000 +-118.040000,33.950000,36.000000,2722.000000,515.000000,1390.000000,486.000000,3.821400,178500.000000 +-118.040000,33.950000,36.000000,1976.000000,368.000000,1236.000000,355.000000,4.615000,174000.000000 +-118.040000,33.950000,35.000000,1945.000000,357.000000,1227.000000,359.000000,5.216200,171900.000000 +-118.040000,33.940000,37.000000,1328.000000,273.000000,1115.000000,275.000000,4.205100,164400.000000 +-118.040000,33.940000,31.000000,3808.000000,670.000000,2430.000000,660.000000,4.625000,173900.000000 +-118.040000,33.930000,36.000000,1726.000000,332.000000,1293.000000,310.000000,4.384900,144100.000000 +-118.040000,33.930000,36.000000,1045.000000,239.000000,1165.000000,230.000000,3.197900,161800.000000 +-118.040000,33.920000,35.000000,2469.000000,522.000000,2151.000000,537.000000,3.421900,156200.000000 +-118.040000,33.920000,34.000000,1995.000000,417.000000,1573.000000,407.000000,3.490700,153500.000000 +-118.040000,33.900000,36.000000,15.000000,5.000000,15.000000,6.000000,0.499900,162500.000000 +-118.040000,33.880000,17.000000,4807.000000,838.000000,3059.000000,853.000000,5.761900,297300.000000 +-118.040000,33.870000,18.000000,4626.000000,822.000000,2794.000000,763.000000,5.691700,275100.000000 +-118.040000,33.870000,17.000000,2358.000000,396.000000,1387.000000,364.000000,6.299000,285800.000000 +-118.040000,33.860000,21.000000,2870.000000,437.000000,1671.000000,470.000000,7.262800,322700.000000 +-118.040000,33.850000,24.000000,2233.000000,347.000000,1162.000000,355.000000,5.609400,279200.000000 +-118.040000,33.840000,21.000000,6623.000000,1204.000000,3193.000000,1129.000000,4.539500,256000.000000 +-118.040000,33.830000,20.000000,1488.000000,312.000000,972.000000,283.000000,4.055000,201900.000000 +-118.040000,33.830000,19.000000,4526.000000,830.000000,2318.000000,748.000000,4.668100,320700.000000 +-118.040000,33.820000,26.000000,4105.000000,637.000000,2072.000000,648.000000,5.844000,273900.000000 +-118.040000,33.810000,27.000000,2990.000000,515.000000,1849.000000,497.000000,5.684600,216100.000000 +-118.040000,33.810000,22.000000,4057.000000,624.000000,2204.000000,643.000000,5.852700,241000.000000 +-118.040000,33.780000,26.000000,3642.000000,557.000000,1623.000000,569.000000,5.842600,259400.000000 +-118.040000,33.780000,25.000000,3715.000000,575.000000,1640.000000,572.000000,5.770500,247100.000000 +-118.040000,33.760000,25.000000,4061.000000,545.000000,1623.000000,527.000000,7.157200,294900.000000 +-118.040000,33.760000,16.000000,2070.000000,263.000000,878.000000,297.000000,7.087900,338800.000000 +-118.040000,33.750000,16.000000,3757.000000,650.000000,1291.000000,614.000000,5.200100,235600.000000 +-118.040000,33.740000,26.000000,2532.000000,421.000000,1274.000000,441.000000,5.355900,235800.000000 +-118.040000,33.720000,15.000000,1836.000000,490.000000,942.000000,477.000000,4.023800,182500.000000 +-118.040000,33.720000,14.000000,4494.000000,1048.000000,2222.000000,963.000000,4.782100,169400.000000 +-118.040000,33.710000,12.000000,4014.000000,868.000000,1605.000000,769.000000,6.010200,396900.000000 +-118.050000,36.640000,34.000000,2090.000000,478.000000,896.000000,426.000000,2.035700,74200.000000 +-118.050000,34.170000,45.000000,2535.000000,455.000000,1036.000000,437.000000,5.048200,388900.000000 +-118.050000,34.160000,36.000000,3908.000000,732.000000,1688.000000,725.000000,4.562500,376800.000000 +-118.050000,34.150000,32.000000,5131.000000,665.000000,1877.000000,622.000000,8.200400,500001.000000 +-118.050000,34.130000,23.000000,3264.000000,729.000000,1475.000000,668.000000,3.735000,218300.000000 +-118.050000,34.120000,20.000000,5218.000000,959.000000,2302.000000,850.000000,3.550000,476700.000000 +-118.050000,34.110000,48.000000,1410.000000,304.000000,677.000000,274.000000,3.259600,272400.000000 +-118.050000,34.100000,42.000000,2065.000000,404.000000,1313.000000,402.000000,4.017900,274300.000000 +-118.050000,34.100000,30.000000,2143.000000,427.000000,1107.000000,416.000000,4.232100,252200.000000 +-118.050000,34.090000,23.000000,602.000000,135.000000,409.000000,123.000000,3.526800,146400.000000 +-118.050000,34.080000,34.000000,572.000000,154.000000,752.000000,182.000000,2.043300,138800.000000 +-118.050000,34.080000,30.000000,1572.000000,427.000000,1857.000000,428.000000,2.491400,159200.000000 +-118.050000,34.080000,25.000000,4909.000000,1422.000000,4983.000000,1293.000000,2.725400,143500.000000 +-118.050000,34.070000,32.000000,4492.000000,1075.000000,4119.000000,1035.000000,3.237300,183100.000000 +-118.050000,34.060000,45.000000,531.000000,164.000000,722.000000,166.000000,2.140600,162500.000000 +-118.050000,34.060000,32.000000,2286.000000,654.000000,2991.000000,655.000000,2.178100,174500.000000 +-118.050000,34.060000,25.000000,1022.000000,291.000000,1570.000000,297.000000,3.023000,142000.000000 +-118.050000,34.050000,36.000000,1084.000000,202.000000,920.000000,199.000000,3.727900,162200.000000 +-118.050000,34.040000,33.000000,1348.000000,260.000000,1098.000000,257.000000,4.291700,161200.000000 +-118.050000,34.020000,31.000000,40.000000,8.000000,25.000000,7.000000,2.125000,375000.000000 +-118.050000,33.990000,42.000000,2480.000000,401.000000,1085.000000,438.000000,5.193000,263400.000000 +-118.050000,33.990000,38.000000,1619.000000,379.000000,886.000000,357.000000,3.732800,182400.000000 +-118.050000,33.980000,41.000000,1694.000000,413.000000,1222.000000,387.000000,2.831100,155300.000000 +-118.050000,33.980000,41.000000,1406.000000,428.000000,1174.000000,390.000000,2.014700,137500.000000 +-118.050000,33.980000,34.000000,2142.000000,390.000000,1305.000000,406.000000,4.037900,172800.000000 +-118.050000,33.970000,36.000000,2854.000000,688.000000,2816.000000,673.000000,3.600000,154000.000000 +-118.050000,33.960000,37.000000,2622.000000,652.000000,2778.000000,644.000000,2.971400,160300.000000 +-118.050000,33.940000,34.000000,495.000000,120.000000,527.000000,130.000000,1.945300,149000.000000 +-118.050000,33.930000,35.000000,2107.000000,480.000000,2241.000000,443.000000,3.151300,150000.000000 +-118.050000,33.920000,33.000000,1999.000000,470.000000,2170.000000,466.000000,3.237100,154700.000000 +-118.050000,33.870000,18.000000,4928.000000,773.000000,2952.000000,754.000000,5.885500,313800.000000 +-118.050000,33.860000,16.000000,2851.000000,626.000000,1985.000000,603.000000,5.408900,265600.000000 +-118.050000,33.860000,16.000000,2676.000000,391.000000,1377.000000,395.000000,6.551300,350400.000000 +-118.050000,33.850000,25.000000,2856.000000,388.000000,1212.000000,362.000000,6.173700,313100.000000 +-118.050000,33.830000,24.000000,4316.000000,678.000000,2286.000000,665.000000,5.701800,286700.000000 +-118.050000,33.820000,25.000000,1548.000000,279.000000,732.000000,265.000000,5.123000,159600.000000 +-118.050000,33.820000,21.000000,2997.000000,372.000000,1323.000000,372.000000,8.612300,386700.000000 +-118.050000,33.810000,26.000000,2523.000000,437.000000,1377.000000,450.000000,5.254200,234600.000000 +-118.050000,33.790000,19.000000,1863.000000,355.000000,1260.000000,317.000000,3.246500,277400.000000 +-118.050000,33.780000,25.000000,3112.000000,435.000000,1098.000000,401.000000,6.000000,353500.000000 +-118.050000,33.780000,18.000000,3414.000000,434.000000,1272.000000,454.000000,8.701500,390900.000000 +-118.050000,33.730000,25.000000,2472.000000,450.000000,1301.000000,467.000000,5.069900,266100.000000 +-118.050000,33.720000,17.000000,1875.000000,472.000000,900.000000,406.000000,5.258900,226100.000000 +-118.050000,33.720000,14.000000,2673.000000,687.000000,1192.000000,656.000000,4.186200,188900.000000 +-118.050000,33.710000,25.000000,4150.000000,570.000000,1424.000000,547.000000,8.828100,461600.000000 +-118.050000,33.650000,5.000000,7017.000000,935.000000,2427.000000,867.000000,10.115400,477700.000000 +-118.060000,35.680000,15.000000,1962.000000,403.000000,730.000000,321.000000,2.250000,67500.000000 +-118.060000,34.710000,14.000000,2606.000000,514.000000,1228.000000,512.000000,2.576400,150000.000000 +-118.060000,34.170000,38.000000,2726.000000,398.000000,1059.000000,380.000000,7.241900,410400.000000 +-118.060000,34.160000,46.000000,1467.000000,298.000000,816.000000,267.000000,3.670500,286500.000000 +-118.060000,34.160000,34.000000,2297.000000,419.000000,909.000000,412.000000,4.821400,362500.000000 +-118.060000,34.140000,42.000000,2461.000000,379.000000,1179.000000,360.000000,7.031500,437300.000000 +-118.060000,34.140000,40.000000,2662.000000,379.000000,1151.000000,387.000000,8.488900,500001.000000 +-118.060000,34.130000,40.000000,4307.000000,918.000000,1769.000000,845.000000,3.634100,391500.000000 +-118.060000,34.130000,28.000000,12139.000000,2873.000000,5359.000000,2731.000000,3.292000,227300.000000 +-118.060000,34.120000,25.000000,3891.000000,848.000000,1848.000000,759.000000,3.663900,248100.000000 +-118.060000,34.110000,39.000000,2603.000000,547.000000,1196.000000,487.000000,3.085400,248700.000000 +-118.060000,34.100000,43.000000,1833.000000,355.000000,786.000000,334.000000,3.576100,256700.000000 +-118.060000,34.100000,42.000000,1576.000000,313.000000,697.000000,282.000000,4.352300,283600.000000 +-118.060000,34.100000,38.000000,3229.000000,636.000000,1599.000000,609.000000,3.864600,257100.000000 +-118.060000,34.100000,38.000000,2438.000000,442.000000,1308.000000,461.000000,3.699500,260100.000000 +-118.060000,34.100000,38.000000,1960.000000,330.000000,874.000000,308.000000,4.859400,265900.000000 +-118.060000,34.090000,40.000000,1975.000000,389.000000,1116.000000,378.000000,4.289800,251600.000000 +-118.060000,34.090000,38.000000,2036.000000,388.000000,1096.000000,371.000000,4.062500,262500.000000 +-118.060000,34.090000,36.000000,1239.000000,238.000000,717.000000,237.000000,3.244000,258100.000000 +-118.060000,34.080000,37.000000,778.000000,205.000000,850.000000,198.000000,2.511900,180500.000000 +-118.060000,34.080000,34.000000,1197.000000,260.000000,942.000000,245.000000,3.420200,189100.000000 +-118.060000,34.070000,30.000000,2308.000000,674.000000,3034.000000,691.000000,2.392900,184400.000000 +-118.060000,34.060000,28.000000,2127.000000,625.000000,3160.000000,620.000000,2.576300,173900.000000 +-118.060000,34.060000,28.000000,1778.000000,605.000000,2184.000000,574.000000,1.918900,165900.000000 +-118.060000,34.040000,28.000000,1516.000000,363.000000,1011.000000,344.000000,2.628800,160300.000000 +-118.060000,34.020000,25.000000,3548.000000,639.000000,2653.000000,664.000000,5.255700,188800.000000 +-118.060000,34.010000,34.000000,1962.000000,396.000000,1488.000000,332.000000,3.909100,155100.000000 +-118.060000,33.990000,47.000000,1588.000000,309.000000,827.000000,292.000000,3.783300,166100.000000 +-118.060000,33.990000,46.000000,1203.000000,219.000000,637.000000,211.000000,3.361100,174400.000000 +-118.060000,33.990000,45.000000,1471.000000,255.000000,670.000000,250.000000,4.547800,188000.000000 +-118.060000,33.990000,38.000000,862.000000,178.000000,484.000000,176.000000,4.375000,186200.000000 +-118.060000,33.980000,50.000000,1146.000000,238.000000,579.000000,213.000000,2.958300,172600.000000 +-118.060000,33.980000,42.000000,1342.000000,243.000000,615.000000,208.000000,5.438100,186900.000000 +-118.060000,33.980000,40.000000,1723.000000,370.000000,1221.000000,370.000000,3.356200,169200.000000 +-118.060000,33.980000,40.000000,1410.000000,255.000000,932.000000,273.000000,4.220600,178000.000000 +-118.060000,33.980000,38.000000,1862.000000,319.000000,975.000000,305.000000,4.726600,177600.000000 +-118.060000,33.970000,39.000000,1639.000000,300.000000,988.000000,309.000000,3.961200,175800.000000 +-118.060000,33.970000,37.000000,1645.000000,308.000000,1077.000000,320.000000,4.320300,159200.000000 +-118.060000,33.910000,36.000000,1360.000000,271.000000,909.000000,275.000000,4.673100,173300.000000 +-118.060000,33.910000,24.000000,4880.000000,1044.000000,4516.000000,1050.000000,4.138700,157700.000000 +-118.060000,33.900000,37.000000,1161.000000,254.000000,882.000000,236.000000,4.416700,158000.000000 +-118.060000,33.890000,26.000000,2483.000000,412.000000,1538.000000,449.000000,5.110400,220500.000000 +-118.060000,33.880000,17.000000,7187.000000,1073.000000,3844.000000,1068.000000,6.590100,337400.000000 +-118.060000,33.860000,16.000000,5603.000000,938.000000,3045.000000,893.000000,5.077800,293700.000000 +-118.060000,33.850000,16.000000,4851.000000,726.000000,2527.000000,704.000000,6.014200,437400.000000 +-118.060000,33.840000,26.000000,6960.000000,1454.000000,4367.000000,1437.000000,4.795300,210900.000000 +-118.060000,33.840000,20.000000,5643.000000,1231.000000,3841.000000,1195.000000,4.054200,168400.000000 +-118.060000,33.830000,21.000000,3941.000000,655.000000,1897.000000,670.000000,4.880000,343900.000000 +-118.060000,33.830000,17.000000,1973.000000,516.000000,1112.000000,501.000000,3.851200,163800.000000 +-118.060000,33.820000,24.000000,3983.000000,675.000000,1568.000000,638.000000,4.645800,213400.000000 +-118.060000,33.810000,25.000000,3497.000000,513.000000,1839.000000,544.000000,5.421600,263000.000000 +-118.060000,33.800000,21.000000,2196.000000,504.000000,1215.000000,477.000000,4.800000,196900.000000 +-118.060000,33.800000,20.000000,1379.000000,333.000000,937.000000,304.000000,3.621700,195300.000000 +-118.060000,33.780000,22.000000,4048.000000,562.000000,1637.000000,541.000000,7.346300,355600.000000 +-118.060000,33.730000,16.000000,4392.000000,602.000000,1490.000000,578.000000,10.542400,500001.000000 +-118.060000,33.720000,22.000000,4311.000000,531.000000,1426.000000,533.000000,9.817700,500001.000000 +-118.060000,33.720000,17.000000,4573.000000,937.000000,1619.000000,796.000000,5.770400,500001.000000 +-118.060000,33.720000,14.000000,2665.000000,331.000000,964.000000,319.000000,15.000100,500001.000000 +-118.070000,34.580000,34.000000,3416.000000,601.000000,1929.000000,567.000000,4.014700,107400.000000 +-118.070000,34.560000,5.000000,10264.000000,1821.000000,5871.000000,1790.000000,4.232900,145500.000000 +-118.070000,34.510000,14.000000,2798.000000,459.000000,1236.000000,404.000000,4.866700,239900.000000 +-118.070000,34.170000,36.000000,2415.000000,394.000000,1215.000000,413.000000,5.541800,326100.000000 +-118.070000,34.170000,35.000000,2142.000000,373.000000,986.000000,374.000000,5.705100,326000.000000 +-118.070000,34.160000,39.000000,1804.000000,265.000000,730.000000,276.000000,6.476100,397500.000000 +-118.070000,34.160000,35.000000,3585.000000,671.000000,1401.000000,623.000000,4.125000,330000.000000 +-118.070000,34.150000,45.000000,1095.000000,237.000000,672.000000,234.000000,3.408700,209200.000000 +-118.070000,34.140000,42.000000,3200.000000,685.000000,1668.000000,628.000000,3.375000,260400.000000 +-118.070000,34.130000,27.000000,3787.000000,913.000000,1992.000000,853.000000,3.301000,251200.000000 +-118.070000,34.120000,43.000000,1554.000000,287.000000,802.000000,277.000000,4.231200,272600.000000 +-118.070000,34.120000,30.000000,2201.000000,559.000000,1194.000000,531.000000,4.113600,279900.000000 +-118.070000,34.110000,47.000000,832.000000,194.000000,419.000000,156.000000,3.157600,225000.000000 +-118.070000,34.110000,41.000000,2869.000000,563.000000,1627.000000,533.000000,5.073600,270700.000000 +-118.070000,34.100000,34.000000,2253.000000,522.000000,1262.000000,511.000000,3.437500,259800.000000 +-118.070000,34.100000,32.000000,4275.000000,1061.000000,2812.000000,1012.000000,3.351200,214100.000000 +-118.070000,34.100000,28.000000,676.000000,177.000000,543.000000,185.000000,3.236100,187500.000000 +-118.070000,34.090000,33.000000,2178.000000,445.000000,1153.000000,400.000000,3.608300,212000.000000 +-118.070000,34.080000,38.000000,2462.000000,553.000000,1843.000000,538.000000,3.231200,211900.000000 +-118.070000,34.070000,31.000000,1370.000000,284.000000,1062.000000,277.000000,3.515600,199300.000000 +-118.070000,34.070000,19.000000,1554.000000,393.000000,1427.000000,370.000000,3.125000,207100.000000 +-118.070000,34.060000,34.000000,2873.000000,718.000000,2758.000000,699.000000,2.598500,168600.000000 +-118.070000,34.010000,38.000000,2245.000000,444.000000,1540.000000,419.000000,3.798600,171000.000000 +-118.070000,34.010000,36.000000,1391.000000,283.000000,1025.000000,275.000000,3.237500,176800.000000 +-118.070000,34.000000,42.000000,1392.000000,351.000000,1471.000000,348.000000,2.630000,143800.000000 +-118.070000,34.000000,37.000000,2976.000000,636.000000,2117.000000,598.000000,4.105800,167300.000000 +-118.070000,33.990000,41.000000,1204.000000,252.000000,1002.000000,248.000000,3.057700,163300.000000 +-118.070000,33.990000,39.000000,552.000000,151.000000,807.000000,168.000000,3.250000,153300.000000 +-118.070000,33.990000,35.000000,1625.000000,302.000000,1134.000000,288.000000,4.559500,164900.000000 +-118.070000,33.980000,41.000000,1478.000000,273.000000,916.000000,281.000000,3.968800,169800.000000 +-118.070000,33.980000,32.000000,3304.000000,714.000000,2032.000000,690.000000,3.209300,167800.000000 +-118.070000,33.970000,36.000000,1887.000000,370.000000,1006.000000,329.000000,3.155400,170700.000000 +-118.070000,33.970000,32.000000,3400.000000,826.000000,3017.000000,793.000000,2.460700,155600.000000 +-118.070000,33.960000,30.000000,928.000000,230.000000,913.000000,214.000000,2.699100,147100.000000 +-118.070000,33.910000,35.000000,2228.000000,463.000000,1558.000000,427.000000,4.023000,157700.000000 +-118.070000,33.900000,45.000000,1776.000000,353.000000,1180.000000,337.000000,4.640600,169200.000000 +-118.070000,33.900000,42.000000,1007.000000,224.000000,776.000000,228.000000,3.867200,162700.000000 +-118.070000,33.900000,39.000000,2502.000000,546.000000,1849.000000,518.000000,3.884600,164100.000000 +-118.070000,33.890000,35.000000,1145.000000,274.000000,1651.000000,265.000000,3.125000,120300.000000 +-118.070000,33.890000,32.000000,1819.000000,386.000000,1679.000000,360.000000,3.556200,146000.000000 +-118.070000,33.890000,29.000000,1138.000000,217.000000,964.000000,222.000000,4.537000,185300.000000 +-118.070000,33.890000,17.000000,2223.000000,544.000000,2008.000000,512.000000,3.077700,160800.000000 +-118.070000,33.880000,18.000000,2436.000000,375.000000,1303.000000,386.000000,6.196800,344700.000000 +-118.070000,33.880000,17.000000,2407.000000,539.000000,1422.000000,524.000000,4.261900,139700.000000 +-118.070000,33.880000,16.000000,4934.000000,825.000000,2668.000000,810.000000,5.748000,284200.000000 +-118.070000,33.870000,28.000000,2399.000000,436.000000,1613.000000,429.000000,3.633900,220100.000000 +-118.070000,33.870000,18.000000,3405.000000,556.000000,1945.000000,509.000000,5.765200,299100.000000 +-118.070000,33.860000,31.000000,2943.000000,518.000000,1703.000000,472.000000,3.709100,225900.000000 +-118.070000,33.860000,28.000000,1789.000000,352.000000,1347.000000,330.000000,3.425000,189700.000000 +-118.070000,33.850000,16.000000,3771.000000,606.000000,2196.000000,564.000000,7.011300,319700.000000 +-118.070000,33.830000,17.000000,4822.000000,1168.000000,3868.000000,1117.000000,2.597800,142900.000000 +-118.070000,33.820000,27.000000,3481.000000,576.000000,1660.000000,560.000000,5.796500,228200.000000 +-118.070000,33.810000,22.000000,2711.000000,352.000000,1305.000000,368.000000,8.540700,398800.000000 +-118.070000,33.800000,22.000000,1391.000000,338.000000,810.000000,295.000000,3.879200,218200.000000 +-118.070000,33.790000,26.000000,4422.000000,624.000000,1936.000000,625.000000,6.428800,320700.000000 +-118.070000,33.730000,13.000000,1822.000000,313.000000,643.000000,303.000000,9.834600,401700.000000 +-118.070000,33.720000,32.000000,1179.000000,250.000000,369.000000,209.000000,5.182400,500001.000000 +-118.070000,33.720000,24.000000,1240.000000,296.000000,513.000000,254.000000,4.904400,485000.000000 +-118.070000,33.670000,13.000000,5126.000000,711.000000,2429.000000,718.000000,9.526800,437900.000000 +-118.080000,34.580000,12.000000,3851.000000,857.000000,2169.000000,811.000000,3.010100,116300.000000 +-118.080000,34.560000,14.000000,5144.000000,887.000000,2846.000000,824.000000,4.561500,137200.000000 +-118.080000,34.160000,42.000000,3490.000000,665.000000,1713.000000,620.000000,4.546100,242400.000000 +-118.080000,34.150000,28.000000,238.000000,58.000000,142.000000,31.000000,0.499900,500001.000000 +-118.080000,34.140000,52.000000,1282.000000,189.000000,431.000000,187.000000,6.115900,470800.000000 +-118.080000,34.140000,42.000000,2690.000000,589.000000,1149.000000,535.000000,3.880000,281100.000000 +-118.080000,34.130000,46.000000,1238.000000,147.000000,377.000000,145.000000,8.454600,500001.000000 +-118.080000,34.130000,39.000000,788.000000,128.000000,413.000000,139.000000,5.954600,396700.000000 +-118.080000,34.130000,35.000000,1897.000000,279.000000,733.000000,291.000000,7.418500,500001.000000 +-118.080000,34.130000,28.000000,4465.000000,985.000000,2273.000000,949.000000,3.567100,228500.000000 +-118.080000,34.120000,41.000000,1598.000000,280.000000,807.000000,282.000000,5.506700,325000.000000 +-118.080000,34.120000,34.000000,2921.000000,641.000000,1541.000000,562.000000,3.682700,264100.000000 +-118.080000,34.120000,27.000000,1685.000000,341.000000,757.000000,317.000000,4.243400,270500.000000 +-118.080000,34.110000,42.000000,2628.000000,525.000000,1494.000000,523.000000,3.946400,257200.000000 +-118.080000,34.110000,42.000000,1969.000000,353.000000,927.000000,354.000000,5.592400,285300.000000 +-118.080000,34.100000,36.000000,2679.000000,548.000000,1605.000000,533.000000,3.531300,213200.000000 +-118.080000,34.100000,32.000000,2830.000000,645.000000,1500.000000,527.000000,3.081900,214600.000000 +-118.080000,34.090000,34.000000,1823.000000,457.000000,1485.000000,401.000000,3.722200,207200.000000 +-118.080000,34.090000,33.000000,2557.000000,578.000000,1715.000000,530.000000,2.919600,208800.000000 +-118.080000,34.090000,32.000000,3214.000000,718.000000,2316.000000,751.000000,3.706600,206800.000000 +-118.080000,34.080000,43.000000,1716.000000,402.000000,1343.000000,386.000000,2.968800,211400.000000 +-118.080000,34.040000,20.000000,5841.000000,1146.000000,3273.000000,1131.000000,4.722200,185100.000000 +-118.080000,34.020000,14.000000,3789.000000,810.000000,2551.000000,793.000000,2.932100,144200.000000 +-118.080000,34.010000,36.000000,1248.000000,322.000000,1282.000000,326.000000,3.203100,147600.000000 +-118.080000,34.010000,34.000000,1914.000000,549.000000,2122.000000,529.000000,2.596900,150200.000000 +-118.080000,34.010000,33.000000,1091.000000,233.000000,890.000000,226.000000,2.767900,176400.000000 +-118.080000,34.010000,32.000000,1973.000000,401.000000,1322.000000,386.000000,3.486100,158100.000000 +-118.080000,34.000000,35.000000,1188.000000,342.000000,1373.000000,332.000000,2.910700,150900.000000 +-118.080000,33.990000,38.000000,1683.000000,328.000000,1369.000000,339.000000,3.619600,170700.000000 +-118.080000,33.990000,37.000000,1419.000000,310.000000,1125.000000,296.000000,2.500000,162000.000000 +-118.080000,33.990000,36.000000,2024.000000,590.000000,2028.000000,573.000000,2.815200,163900.000000 +-118.080000,33.980000,39.000000,1042.000000,221.000000,863.000000,228.000000,3.603300,157800.000000 +-118.080000,33.980000,36.000000,1492.000000,282.000000,1041.000000,270.000000,4.067700,165800.000000 +-118.080000,33.970000,38.000000,1026.000000,190.000000,789.000000,193.000000,4.200000,163200.000000 +-118.080000,33.970000,36.000000,1678.000000,323.000000,1380.000000,352.000000,3.548100,163300.000000 +-118.080000,33.970000,36.000000,1620.000000,298.000000,1258.000000,309.000000,3.977300,166700.000000 +-118.080000,33.970000,35.000000,825.000000,155.000000,590.000000,144.000000,4.633300,161200.000000 +-118.080000,33.960000,35.000000,2104.000000,399.000000,1659.000000,387.000000,4.009600,165000.000000 +-118.080000,33.960000,34.000000,1431.000000,310.000000,1162.000000,288.000000,4.369000,165400.000000 +-118.080000,33.950000,37.000000,1743.000000,348.000000,1328.000000,354.000000,3.094400,162800.000000 +-118.080000,33.940000,21.000000,3933.000000,949.000000,2219.000000,820.000000,2.492600,171400.000000 +-118.080000,33.930000,39.000000,859.000000,164.000000,673.000000,172.000000,3.714300,158200.000000 +-118.080000,33.930000,39.000000,1478.000000,324.000000,1127.000000,320.000000,3.525000,158000.000000 +-118.080000,33.930000,34.000000,1558.000000,375.000000,1179.000000,337.000000,3.218800,165100.000000 +-118.080000,33.930000,33.000000,2263.000000,511.000000,1626.000000,457.000000,3.555600,172800.000000 +-118.080000,33.920000,39.000000,1631.000000,322.000000,1034.000000,328.000000,4.538200,165700.000000 +-118.080000,33.920000,34.000000,2118.000000,437.000000,1414.000000,442.000000,3.723800,166800.000000 +-118.080000,33.910000,36.000000,1551.000000,297.000000,1100.000000,322.000000,5.118700,168100.000000 +-118.080000,33.910000,30.000000,3259.000000,942.000000,2744.000000,895.000000,2.860800,165600.000000 +-118.080000,33.910000,30.000000,1366.000000,460.000000,920.000000,410.000000,0.994600,159900.000000 +-118.080000,33.910000,18.000000,1573.000000,396.000000,1200.000000,365.000000,2.895000,146900.000000 +-118.080000,33.900000,44.000000,1167.000000,237.000000,733.000000,237.000000,4.208300,168300.000000 +-118.080000,33.900000,42.000000,1768.000000,372.000000,1155.000000,368.000000,3.558000,161100.000000 +-118.080000,33.890000,41.000000,834.000000,166.000000,603.000000,179.000000,3.732100,167500.000000 +-118.080000,33.890000,37.000000,1152.000000,259.000000,981.000000,225.000000,3.285700,153600.000000 +-118.080000,33.890000,35.000000,1071.000000,290.000000,1412.000000,274.000000,3.191700,114900.000000 +-118.080000,33.890000,33.000000,2131.000000,435.000000,2045.000000,426.000000,4.000000,145700.000000 +-118.080000,33.890000,28.000000,1035.000000,275.000000,1545.000000,269.000000,3.035700,123400.000000 +-118.080000,33.880000,30.000000,1901.000000,519.000000,2685.000000,496.000000,3.263900,120100.000000 +-118.080000,33.880000,27.000000,3065.000000,736.000000,1840.000000,719.000000,3.641700,208100.000000 +-118.080000,33.870000,23.000000,2536.000000,552.000000,2012.000000,556.000000,4.140600,200800.000000 +-118.080000,33.860000,29.000000,3260.000000,783.000000,1969.000000,737.000000,3.526800,215500.000000 +-118.080000,33.860000,29.000000,1018.000000,235.000000,684.000000,248.000000,3.333300,198800.000000 +-118.080000,33.860000,26.000000,778.000000,173.000000,539.000000,186.000000,3.267900,236500.000000 +-118.080000,33.860000,17.000000,2259.000000,383.000000,1378.000000,386.000000,5.873300,287000.000000 +-118.080000,33.850000,22.000000,1055.000000,204.000000,682.000000,216.000000,6.000000,191300.000000 +-118.080000,33.850000,19.000000,4261.000000,678.000000,2621.000000,661.000000,6.242700,288700.000000 +-118.080000,33.840000,31.000000,2906.000000,578.000000,1806.000000,553.000000,4.844800,194600.000000 +-118.080000,33.840000,31.000000,2377.000000,600.000000,2042.000000,593.000000,3.625000,170400.000000 +-118.080000,33.840000,25.000000,3696.000000,953.000000,2827.000000,860.000000,3.343800,153300.000000 +-118.080000,33.830000,30.000000,2188.000000,556.000000,2727.000000,525.000000,2.775900,136800.000000 +-118.080000,33.820000,30.000000,2636.000000,652.000000,3412.000000,649.000000,2.809500,118300.000000 +-118.080000,33.810000,20.000000,6295.000000,937.000000,2292.000000,874.000000,7.608400,402500.000000 +-118.080000,33.800000,29.000000,3675.000000,613.000000,1457.000000,591.000000,6.055300,369400.000000 +-118.080000,33.780000,30.000000,2879.000000,403.000000,1109.000000,414.000000,6.932400,364700.000000 +-118.080000,33.780000,25.000000,5321.000000,967.000000,1969.000000,903.000000,5.010200,340100.000000 +-118.080000,33.770000,26.000000,3083.000000,806.000000,960.000000,723.000000,1.907400,68500.000000 +-118.080000,33.770000,26.000000,2013.000000,551.000000,664.000000,510.000000,2.270800,67500.000000 +-118.080000,33.760000,27.000000,529.000000,159.000000,193.000000,155.000000,2.095200,71300.000000 +-118.080000,33.760000,26.000000,996.000000,364.000000,366.000000,313.000000,1.281300,46700.000000 +-118.080000,33.760000,26.000000,1967.000000,577.000000,692.000000,538.000000,1.611100,54300.000000 +-118.080000,33.760000,25.000000,1995.000000,637.000000,743.000000,597.000000,1.461700,46900.000000 +-118.080000,33.720000,14.000000,2021.000000,396.000000,696.000000,367.000000,7.167300,340700.000000 +-118.090000,34.740000,34.000000,1218.000000,285.000000,797.000000,248.000000,2.434800,104800.000000 +-118.090000,34.700000,6.000000,4558.000000,804.000000,1543.000000,563.000000,2.854800,138500.000000 +-118.090000,34.680000,4.000000,23386.000000,4171.000000,10493.000000,3671.000000,4.021100,144000.000000 +-118.090000,34.630000,31.000000,1537.000000,416.000000,1239.000000,397.000000,1.972200,99200.000000 +-118.090000,34.570000,4.000000,9761.000000,1683.000000,4970.000000,1535.000000,4.526600,142900.000000 +-118.090000,34.180000,34.000000,3113.000000,409.000000,1139.000000,418.000000,10.228900,500001.000000 +-118.090000,34.170000,36.000000,2875.000000,552.000000,1131.000000,458.000000,4.308300,269300.000000 +-118.090000,34.160000,45.000000,2199.000000,358.000000,942.000000,353.000000,5.039300,321100.000000 +-118.090000,34.150000,49.000000,1467.000000,259.000000,688.000000,260.000000,4.345200,260100.000000 +-118.090000,34.150000,46.000000,271.000000,74.000000,150.000000,55.000000,2.232100,237500.000000 +-118.090000,34.150000,45.000000,1345.000000,356.000000,749.000000,327.000000,2.800700,210900.000000 +-118.090000,34.140000,40.000000,3092.000000,549.000000,1457.000000,536.000000,5.337700,373800.000000 +-118.090000,34.120000,45.000000,2966.000000,415.000000,1231.000000,409.000000,7.834700,500001.000000 +-118.090000,34.120000,38.000000,2638.000000,432.000000,1284.000000,433.000000,5.453600,342700.000000 +-118.090000,34.110000,45.000000,1883.000000,275.000000,764.000000,289.000000,6.507800,414800.000000 +-118.090000,34.110000,36.000000,2966.000000,527.000000,1231.000000,482.000000,4.644200,316800.000000 +-118.090000,34.100000,40.000000,1904.000000,393.000000,1183.000000,364.000000,3.669600,210400.000000 +-118.090000,34.100000,27.000000,6010.000000,1532.000000,3620.000000,1445.000000,2.743600,201700.000000 +-118.090000,34.090000,40.000000,855.000000,208.000000,745.000000,222.000000,3.012500,224000.000000 +-118.090000,34.090000,36.000000,1068.000000,246.000000,949.000000,250.000000,2.346200,188500.000000 +-118.090000,34.080000,42.000000,1003.000000,236.000000,769.000000,231.000000,3.160700,218300.000000 +-118.090000,34.080000,34.000000,1513.000000,384.000000,986.000000,336.000000,2.690100,235600.000000 +-118.090000,34.080000,33.000000,1430.000000,344.000000,1165.000000,328.000000,3.035700,206000.000000 +-118.090000,34.070000,45.000000,593.000000,133.000000,481.000000,128.000000,2.593800,199300.000000 +-118.090000,34.070000,38.000000,1036.000000,226.000000,1058.000000,235.000000,3.257800,184200.000000 +-118.090000,34.070000,31.000000,1054.000000,252.000000,1032.000000,258.000000,2.342400,188500.000000 +-118.090000,34.070000,26.000000,794.000000,182.000000,709.000000,170.000000,3.175000,170800.000000 +-118.090000,34.060000,38.000000,3230.000000,840.000000,3485.000000,827.000000,2.629000,171600.000000 +-118.090000,34.060000,31.000000,1146.000000,289.000000,1163.000000,258.000000,2.208300,185600.000000 +-118.090000,34.050000,22.000000,1764.000000,357.000000,1379.000000,363.000000,3.535700,199000.000000 +-118.090000,34.040000,24.000000,1543.000000,257.000000,824.000000,271.000000,6.438500,272600.000000 +-118.090000,34.040000,18.000000,5580.000000,1369.000000,3842.000000,1276.000000,3.651200,168500.000000 +-118.090000,34.020000,33.000000,4853.000000,1105.000000,2855.000000,1006.000000,3.262200,208600.000000 +-118.090000,34.020000,28.000000,1984.000000,313.000000,1099.000000,343.000000,4.552600,250200.000000 +-118.090000,34.010000,42.000000,897.000000,229.000000,1094.000000,238.000000,2.072900,114100.000000 +-118.090000,34.010000,36.000000,1465.000000,363.000000,1538.000000,342.000000,3.546900,150600.000000 +-118.090000,34.010000,31.000000,1108.000000,238.000000,1151.000000,229.000000,4.333300,149500.000000 +-118.090000,34.000000,36.000000,1722.000000,353.000000,1174.000000,335.000000,3.045000,160600.000000 +-118.090000,34.000000,35.000000,1580.000000,331.000000,1290.000000,338.000000,4.145800,162500.000000 +-118.090000,33.990000,34.000000,1369.000000,270.000000,1005.000000,272.000000,3.692000,172600.000000 +-118.090000,33.980000,39.000000,936.000000,194.000000,691.000000,211.000000,3.687500,169500.000000 +-118.090000,33.980000,37.000000,1226.000000,255.000000,1068.000000,271.000000,3.160700,172200.000000 +-118.090000,33.970000,39.000000,1473.000000,297.000000,1108.000000,294.000000,4.138900,166000.000000 +-118.090000,33.970000,35.000000,2664.000000,541.000000,2033.000000,491.000000,3.732600,164300.000000 +-118.090000,33.960000,36.000000,3271.000000,603.000000,2593.000000,616.000000,3.362100,169700.000000 +-118.090000,33.960000,36.000000,1116.000000,229.000000,719.000000,233.000000,3.425000,163200.000000 +-118.090000,33.960000,20.000000,1911.000000,472.000000,1407.000000,465.000000,2.764700,163000.000000 +-118.090000,33.950000,36.000000,1991.000000,396.000000,1306.000000,403.000000,4.500000,166600.000000 +-118.090000,33.950000,32.000000,1083.000000,206.000000,737.000000,218.000000,3.558300,170800.000000 +-118.090000,33.940000,36.000000,2762.000000,472.000000,1576.000000,493.000000,4.084600,183400.000000 +-118.090000,33.940000,33.000000,1976.000000,404.000000,1379.000000,395.000000,3.854200,175400.000000 +-118.090000,33.930000,37.000000,1950.000000,356.000000,1183.000000,338.000000,4.144900,175300.000000 +-118.090000,33.930000,36.000000,1585.000000,323.000000,1205.000000,343.000000,4.530600,183400.000000 +-118.090000,33.920000,36.000000,2381.000000,419.000000,1669.000000,444.000000,4.697600,171100.000000 +-118.090000,33.920000,36.000000,1226.000000,211.000000,711.000000,219.000000,4.569900,170800.000000 +-118.090000,33.920000,35.000000,1994.000000,419.000000,1491.000000,428.000000,3.738300,166200.000000 +-118.090000,33.920000,33.000000,879.000000,181.000000,547.000000,169.000000,5.314600,168600.000000 +-118.090000,33.920000,31.000000,1983.000000,419.000000,1157.000000,390.000000,3.545500,168300.000000 +-118.090000,33.910000,36.000000,1442.000000,271.000000,990.000000,268.000000,4.051700,162200.000000 +-118.090000,33.910000,34.000000,1582.000000,343.000000,1356.000000,324.000000,3.421100,141100.000000 +-118.090000,33.910000,14.000000,2369.000000,604.000000,1546.000000,464.000000,3.796900,159400.000000 +-118.090000,33.900000,36.000000,1215.000000,279.000000,862.000000,285.000000,3.760400,158700.000000 +-118.090000,33.900000,33.000000,3326.000000,720.000000,2533.000000,689.000000,3.144100,176300.000000 +-118.090000,33.890000,27.000000,3399.000000,882.000000,2465.000000,811.000000,3.099000,166600.000000 +-118.090000,33.880000,27.000000,3119.000000,635.000000,1887.000000,567.000000,3.865400,195300.000000 +-118.090000,33.850000,19.000000,8120.000000,1371.000000,5026.000000,1345.000000,6.309300,286500.000000 +-118.090000,33.840000,27.000000,1594.000000,295.000000,1061.000000,320.000000,4.791700,217700.000000 +-118.090000,33.840000,23.000000,4412.000000,910.000000,2380.000000,825.000000,4.540000,213100.000000 +-118.090000,33.830000,36.000000,2734.000000,448.000000,1308.000000,441.000000,5.926500,227300.000000 +-118.090000,33.820000,36.000000,2219.000000,393.000000,1042.000000,396.000000,5.229900,239800.000000 +-118.090000,33.810000,36.000000,1878.000000,323.000000,846.000000,325.000000,7.193700,254400.000000 +-118.090000,33.810000,36.000000,1371.000000,250.000000,666.000000,257.000000,5.079500,243300.000000 +-118.090000,33.800000,36.000000,1724.000000,322.000000,838.000000,328.000000,4.483100,253900.000000 +-118.090000,33.790000,36.000000,4210.000000,657.000000,1911.000000,631.000000,5.849100,247300.000000 +-118.090000,33.790000,31.000000,4231.000000,617.000000,1694.000000,623.000000,6.631200,360100.000000 +-118.090000,33.780000,26.000000,2146.000000,298.000000,852.000000,296.000000,6.613700,342700.000000 +-118.090000,33.770000,27.000000,2301.000000,640.000000,847.000000,627.000000,1.720800,67500.000000 +-118.090000,33.770000,26.000000,5359.000000,1508.000000,1829.000000,1393.000000,1.767500,61300.000000 +-118.090000,33.770000,26.000000,1388.000000,409.000000,515.000000,392.000000,1.801500,62000.000000 +-118.090000,33.750000,32.000000,6239.000000,974.000000,2615.000000,950.000000,6.618800,380000.000000 +-118.090000,33.740000,44.000000,1671.000000,390.000000,871.000000,367.000000,4.636900,422200.000000 +-118.090000,33.710000,19.000000,1397.000000,271.000000,491.000000,197.000000,8.739700,500001.000000 +-118.090000,33.700000,13.000000,4770.000000,969.000000,2261.000000,972.000000,5.883000,295100.000000 +-118.100000,34.710000,16.000000,3914.000000,819.000000,1524.000000,795.000000,2.415000,137500.000000 +-118.100000,34.700000,5.000000,10356.000000,1647.000000,4562.000000,1427.000000,4.980600,141100.000000 +-118.100000,34.580000,32.000000,1489.000000,306.000000,774.000000,267.000000,3.275000,103500.000000 +-118.100000,34.580000,29.000000,2843.000000,603.000000,1517.000000,573.000000,2.665800,106900.000000 +-118.100000,34.570000,7.000000,20377.000000,4335.000000,11973.000000,3933.000000,3.308600,138100.000000 +-118.100000,34.180000,47.000000,2168.000000,352.000000,902.000000,361.000000,5.894000,300900.000000 +-118.100000,34.180000,39.000000,2321.000000,336.000000,880.000000,339.000000,7.710800,450000.000000 +-118.100000,34.170000,46.000000,1774.000000,315.000000,753.000000,330.000000,4.724100,279600.000000 +-118.100000,34.170000,44.000000,4505.000000,894.000000,2296.000000,899.000000,3.481100,300500.000000 +-118.100000,34.150000,32.000000,978.000000,227.000000,543.000000,211.000000,3.009600,199000.000000 +-118.100000,34.150000,14.000000,1442.000000,369.000000,782.000000,343.000000,2.743100,177500.000000 +-118.100000,34.140000,45.000000,3066.000000,659.000000,1287.000000,625.000000,3.580400,324400.000000 +-118.100000,34.130000,44.000000,1917.000000,265.000000,754.000000,257.000000,12.423700,500001.000000 +-118.100000,34.120000,50.000000,1835.000000,231.000000,636.000000,211.000000,11.647100,500001.000000 +-118.100000,34.120000,49.000000,3783.000000,579.000000,1601.000000,539.000000,6.301300,500001.000000 +-118.100000,34.110000,49.000000,3367.000000,523.000000,1317.000000,495.000000,6.706000,351400.000000 +-118.100000,34.100000,52.000000,1788.000000,313.000000,792.000000,294.000000,3.750000,280000.000000 +-118.100000,34.100000,34.000000,2578.000000,645.000000,1628.000000,617.000000,2.340000,210900.000000 +-118.100000,34.100000,29.000000,1937.000000,448.000000,1352.000000,433.000000,3.810000,234600.000000 +-118.100000,34.090000,46.000000,2822.000000,525.000000,1434.000000,520.000000,3.890600,238300.000000 +-118.100000,34.090000,44.000000,2352.000000,484.000000,1517.000000,463.000000,4.283300,258000.000000 +-118.100000,34.090000,42.000000,1460.000000,289.000000,829.000000,273.000000,4.875000,227300.000000 +-118.100000,34.080000,37.000000,2894.000000,659.000000,1977.000000,636.000000,2.543000,208100.000000 +-118.100000,34.080000,24.000000,4510.000000,1296.000000,3985.000000,1240.000000,2.688400,204600.000000 +-118.100000,34.080000,21.000000,1349.000000,352.000000,1188.000000,330.000000,2.500000,182100.000000 +-118.100000,34.070000,36.000000,3661.000000,956.000000,3816.000000,931.000000,2.510400,185000.000000 +-118.100000,34.070000,33.000000,3437.000000,1081.000000,3817.000000,1042.000000,2.250000,203700.000000 +-118.100000,34.070000,29.000000,1179.000000,313.000000,1255.000000,308.000000,2.596400,176800.000000 +-118.100000,34.060000,36.000000,1463.000000,369.000000,1492.000000,366.000000,3.250000,179200.000000 +-118.100000,34.060000,31.000000,2852.000000,740.000000,3100.000000,725.000000,2.952400,178800.000000 +-118.100000,34.050000,31.000000,3559.000000,734.000000,2975.000000,715.000000,3.756000,183300.000000 +-118.100000,34.050000,26.000000,1495.000000,328.000000,1296.000000,304.000000,2.913000,152300.000000 +-118.100000,34.020000,37.000000,1022.000000,232.000000,653.000000,238.000000,3.062500,189400.000000 +-118.100000,34.020000,33.000000,1143.000000,172.000000,508.000000,174.000000,4.910700,279900.000000 +-118.100000,34.010000,42.000000,1436.000000,298.000000,1005.000000,298.000000,3.429700,195800.000000 +-118.100000,34.010000,23.000000,1724.000000,576.000000,1336.000000,542.000000,1.336500,183300.000000 +-118.100000,34.000000,32.000000,2122.000000,591.000000,1929.000000,539.000000,2.731100,169300.000000 +-118.100000,33.990000,36.000000,1529.000000,290.000000,1271.000000,287.000000,3.687500,175200.000000 +-118.100000,33.990000,35.000000,1326.000000,272.000000,933.000000,267.000000,3.430600,162500.000000 +-118.100000,33.990000,31.000000,965.000000,217.000000,599.000000,206.000000,2.720200,190300.000000 +-118.100000,33.980000,34.000000,1357.000000,310.000000,1042.000000,287.000000,3.408300,156700.000000 +-118.100000,33.960000,40.000000,1743.000000,328.000000,981.000000,291.000000,3.666700,173100.000000 +-118.100000,33.960000,38.000000,1657.000000,335.000000,1195.000000,309.000000,4.171100,160100.000000 +-118.100000,33.960000,36.000000,2013.000000,435.000000,1476.000000,475.000000,3.954900,192100.000000 +-118.100000,33.960000,36.000000,1184.000000,240.000000,946.000000,232.000000,4.035700,162500.000000 +-118.100000,33.950000,34.000000,3635.000000,781.000000,2171.000000,720.000000,3.730800,196900.000000 +-118.100000,33.950000,27.000000,1666.000000,365.000000,995.000000,354.000000,4.569400,204300.000000 +-118.100000,33.940000,34.000000,1947.000000,284.000000,841.000000,277.000000,6.181400,453600.000000 +-118.100000,33.940000,33.000000,639.000000,129.000000,460.000000,118.000000,3.160700,189000.000000 +-118.100000,33.930000,36.000000,1124.000000,217.000000,707.000000,234.000000,4.375000,174500.000000 +-118.100000,33.930000,35.000000,1622.000000,302.000000,845.000000,284.000000,4.576900,186100.000000 +-118.100000,33.930000,33.000000,1474.000000,325.000000,1205.000000,335.000000,3.139700,166800.000000 +-118.100000,33.920000,35.000000,2017.000000,383.000000,1388.000000,386.000000,4.077400,171600.000000 +-118.100000,33.910000,40.000000,513.000000,100.000000,399.000000,99.000000,4.875000,167600.000000 +-118.100000,33.910000,36.000000,726.000000,145.000000,490.000000,130.000000,3.638900,167600.000000 +-118.100000,33.910000,36.000000,1080.000000,201.000000,719.000000,201.000000,4.267900,175800.000000 +-118.100000,33.910000,35.000000,1592.000000,335.000000,1238.000000,320.000000,4.973200,165000.000000 +-118.100000,33.900000,43.000000,1237.000000,243.000000,776.000000,246.000000,4.325000,166000.000000 +-118.100000,33.900000,40.000000,1880.000000,377.000000,1229.000000,378.000000,4.416700,174600.000000 +-118.100000,33.900000,37.000000,796.000000,175.000000,740.000000,183.000000,3.600000,156400.000000 +-118.100000,33.900000,37.000000,1061.000000,202.000000,768.000000,206.000000,4.750000,161900.000000 +-118.100000,33.900000,35.000000,1151.000000,248.000000,809.000000,246.000000,4.781300,160000.000000 +-118.100000,33.890000,36.000000,769.000000,142.000000,498.000000,137.000000,4.715900,182100.000000 +-118.100000,33.890000,35.000000,994.000000,203.000000,602.000000,185.000000,3.586500,178000.000000 +-118.100000,33.890000,34.000000,2242.000000,436.000000,1483.000000,443.000000,4.493400,185600.000000 +-118.100000,33.880000,18.000000,8046.000000,1221.000000,4276.000000,1228.000000,6.551500,319600.000000 +-118.100000,33.860000,21.000000,3052.000000,624.000000,1588.000000,568.000000,4.339700,268100.000000 +-118.100000,33.850000,36.000000,1473.000000,253.000000,713.000000,257.000000,5.949300,228000.000000 +-118.100000,33.850000,28.000000,2825.000000,470.000000,1352.000000,469.000000,5.263900,242000.000000 +-118.100000,33.850000,19.000000,993.000000,174.000000,572.000000,175.000000,5.703900,277600.000000 +-118.100000,33.840000,36.000000,690.000000,109.000000,316.000000,104.000000,3.781300,209100.000000 +-118.100000,33.840000,36.000000,1915.000000,316.000000,850.000000,319.000000,4.722200,225800.000000 +-118.100000,33.840000,36.000000,1557.000000,270.000000,697.000000,251.000000,4.541700,219600.000000 +-118.100000,33.840000,35.000000,1790.000000,269.000000,924.000000,263.000000,5.296000,226200.000000 +-118.100000,33.830000,37.000000,2059.000000,349.000000,825.000000,334.000000,4.060300,225200.000000 +-118.100000,33.830000,36.000000,1408.000000,250.000000,702.000000,251.000000,4.875000,222500.000000 +-118.100000,33.820000,36.000000,2422.000000,420.000000,1193.000000,421.000000,4.846200,225700.000000 +-118.100000,33.820000,36.000000,1946.000000,346.000000,871.000000,336.000000,5.215500,254800.000000 +-118.100000,33.820000,36.000000,1930.000000,354.000000,915.000000,328.000000,5.271300,244400.000000 +-118.100000,33.810000,36.000000,856.000000,146.000000,451.000000,164.000000,5.199300,246000.000000 +-118.100000,33.810000,36.000000,1962.000000,325.000000,786.000000,315.000000,5.620000,239600.000000 +-118.100000,33.800000,37.000000,1814.000000,329.000000,850.000000,328.000000,5.057400,240800.000000 +-118.100000,33.790000,36.000000,3359.000000,596.000000,1522.000000,565.000000,5.180500,249400.000000 +-118.100000,33.790000,35.000000,2370.000000,379.000000,996.000000,380.000000,5.736800,287200.000000 +-118.100000,33.780000,35.000000,4466.000000,740.000000,2134.000000,743.000000,5.738900,251800.000000 +-118.100000,33.740000,37.000000,997.000000,262.000000,531.000000,282.000000,4.777300,400000.000000 +-118.100000,33.740000,33.000000,2119.000000,524.000000,872.000000,465.000000,4.537000,495500.000000 +-118.100000,33.740000,32.000000,2035.000000,520.000000,934.000000,512.000000,4.228700,500001.000000 +-118.100000,33.740000,31.000000,1310.000000,342.000000,563.000000,310.000000,4.652800,457100.000000 +-118.110000,34.190000,50.000000,1430.000000,186.000000,620.000000,201.000000,9.532000,483300.000000 +-118.110000,34.180000,52.000000,3571.000000,510.000000,1434.000000,490.000000,5.900900,376000.000000 +-118.110000,34.170000,50.000000,3374.000000,598.000000,1484.000000,569.000000,4.990000,261600.000000 +-118.110000,34.170000,46.000000,2837.000000,592.000000,1453.000000,549.000000,3.111500,234600.000000 +-118.110000,34.160000,52.000000,3158.000000,459.000000,1229.000000,444.000000,5.422300,325600.000000 +-118.110000,34.160000,52.000000,2489.000000,437.000000,1101.000000,438.000000,4.206500,320300.000000 +-118.110000,34.150000,40.000000,1950.000000,509.000000,1038.000000,438.000000,2.617200,196100.000000 +-118.110000,34.150000,26.000000,2193.000000,558.000000,1186.000000,559.000000,3.647400,184100.000000 +-118.110000,34.140000,52.000000,2401.000000,332.000000,810.000000,308.000000,6.094800,358700.000000 +-118.110000,34.140000,44.000000,3298.000000,615.000000,1417.000000,643.000000,4.132400,434800.000000 +-118.110000,34.120000,52.000000,2954.000000,371.000000,1152.000000,347.000000,11.560900,500001.000000 +-118.110000,34.110000,50.000000,2131.000000,294.000000,753.000000,284.000000,6.709900,352200.000000 +-118.110000,34.100000,49.000000,2812.000000,478.000000,1329.000000,490.000000,5.250300,292900.000000 +-118.110000,34.100000,44.000000,2012.000000,435.000000,1454.000000,456.000000,3.322900,226600.000000 +-118.110000,34.100000,20.000000,3090.000000,802.000000,2109.000000,738.000000,3.380100,192500.000000 +-118.110000,34.080000,45.000000,1106.000000,226.000000,779.000000,205.000000,4.544600,244800.000000 +-118.110000,34.080000,42.000000,3172.000000,644.000000,1829.000000,642.000000,3.396600,243200.000000 +-118.110000,34.080000,30.000000,2350.000000,472.000000,945.000000,467.000000,3.342100,201000.000000 +-118.110000,34.070000,46.000000,1130.000000,229.000000,698.000000,209.000000,5.271900,244400.000000 +-118.110000,34.070000,19.000000,3215.000000,907.000000,3072.000000,870.000000,2.339300,202300.000000 +-118.110000,34.060000,32.000000,1273.000000,344.000000,1148.000000,368.000000,2.106100,214700.000000 +-118.110000,34.060000,30.000000,1547.000000,436.000000,1700.000000,410.000000,2.548800,187500.000000 +-118.110000,34.060000,18.000000,2609.000000,721.000000,2221.000000,703.000000,2.322400,192300.000000 +-118.110000,34.060000,16.000000,2416.000000,565.000000,1750.000000,514.000000,2.822900,163700.000000 +-118.110000,34.060000,14.000000,2628.000000,668.000000,2208.000000,574.000000,2.976400,160300.000000 +-118.110000,34.050000,23.000000,3436.000000,565.000000,1729.000000,529.000000,5.994100,266700.000000 +-118.110000,34.040000,28.000000,3913.000000,696.000000,2264.000000,697.000000,5.244600,258000.000000 +-118.110000,34.020000,40.000000,1727.000000,309.000000,932.000000,313.000000,3.950000,210200.000000 +-118.110000,34.010000,43.000000,1539.000000,386.000000,1122.000000,377.000000,2.460500,196000.000000 +-118.110000,34.010000,41.000000,815.000000,252.000000,775.000000,231.000000,2.284700,190000.000000 +-118.110000,34.010000,22.000000,1141.000000,332.000000,1189.000000,321.000000,2.204200,162500.000000 +-118.110000,34.000000,38.000000,2573.000000,484.000000,1568.000000,459.000000,3.020800,193700.000000 +-118.110000,34.000000,24.000000,2403.000000,590.000000,2103.000000,547.000000,2.729200,193800.000000 +-118.110000,33.980000,36.000000,446.000000,108.000000,410.000000,117.000000,3.394200,147200.000000 +-118.110000,33.970000,33.000000,2125.000000,500.000000,1672.000000,476.000000,3.639700,166600.000000 +-118.110000,33.960000,29.000000,2784.000000,582.000000,1278.000000,550.000000,4.388200,261600.000000 +-118.110000,33.950000,36.000000,2049.000000,334.000000,1105.000000,363.000000,4.803600,261300.000000 +-118.110000,33.950000,34.000000,2319.000000,334.000000,941.000000,356.000000,6.431900,452300.000000 +-118.110000,33.950000,34.000000,1723.000000,279.000000,617.000000,252.000000,6.750100,400000.000000 +-118.110000,33.940000,37.000000,1434.000000,262.000000,786.000000,256.000000,4.437500,244900.000000 +-118.110000,33.940000,36.000000,1949.000000,319.000000,909.000000,325.000000,5.158700,296600.000000 +-118.110000,33.940000,32.000000,2098.000000,378.000000,1036.000000,385.000000,5.025800,255400.000000 +-118.110000,33.930000,35.000000,2670.000000,493.000000,1196.000000,488.000000,3.842700,283500.000000 +-118.110000,33.930000,17.000000,1205.000000,347.000000,736.000000,342.000000,3.201100,162500.000000 +-118.110000,33.920000,34.000000,1414.000000,263.000000,983.000000,264.000000,4.176700,156600.000000 +-118.110000,33.920000,32.000000,1016.000000,190.000000,729.000000,177.000000,4.300000,151300.000000 +-118.110000,33.910000,36.000000,1088.000000,231.000000,617.000000,211.000000,3.882400,193100.000000 +-118.110000,33.910000,29.000000,889.000000,166.000000,597.000000,163.000000,4.960900,186700.000000 +-118.110000,33.910000,22.000000,1981.000000,472.000000,1231.000000,457.000000,4.087800,153700.000000 +-118.110000,33.900000,36.000000,1347.000000,278.000000,748.000000,278.000000,5.142300,183100.000000 +-118.110000,33.900000,35.000000,2604.000000,495.000000,1465.000000,470.000000,4.489600,184600.000000 +-118.110000,33.900000,35.000000,1323.000000,269.000000,1084.000000,240.000000,5.075300,178000.000000 +-118.110000,33.900000,26.000000,4173.000000,893.000000,2471.000000,863.000000,3.505200,196000.000000 +-118.110000,33.890000,35.000000,1139.000000,197.000000,772.000000,233.000000,4.375000,204700.000000 +-118.110000,33.880000,35.000000,1623.000000,304.000000,868.000000,272.000000,3.589300,276000.000000 +-118.110000,33.880000,19.000000,3203.000000,708.000000,1761.000000,667.000000,4.091100,239700.000000 +-118.110000,33.870000,33.000000,1379.000000,254.000000,795.000000,297.000000,4.671300,231800.000000 +-118.110000,33.860000,35.000000,1255.000000,252.000000,685.000000,279.000000,4.200000,226900.000000 +-118.110000,33.850000,36.000000,887.000000,163.000000,482.000000,157.000000,4.125000,219500.000000 +-118.110000,33.850000,36.000000,2418.000000,389.000000,1138.000000,387.000000,4.839300,216300.000000 +-118.110000,33.840000,37.000000,1588.000000,272.000000,692.000000,245.000000,4.859400,220300.000000 +-118.110000,33.840000,36.000000,1756.000000,297.000000,798.000000,287.000000,5.558100,218300.000000 +-118.110000,33.840000,36.000000,1523.000000,263.000000,717.000000,278.000000,4.875000,218900.000000 +-118.110000,33.840000,36.000000,1463.000000,257.000000,722.000000,260.000000,4.843800,226300.000000 +-118.110000,33.840000,36.000000,1074.000000,188.000000,496.000000,196.000000,4.625000,217400.000000 +-118.110000,33.830000,37.000000,1249.000000,202.000000,517.000000,189.000000,4.419600,223100.000000 +-118.110000,33.830000,36.000000,1784.000000,303.000000,964.000000,299.000000,4.270300,220900.000000 +-118.110000,33.830000,36.000000,1726.000000,287.000000,820.000000,288.000000,5.576700,218100.000000 +-118.110000,33.830000,36.000000,1462.000000,233.000000,664.000000,220.000000,5.117100,225300.000000 +-118.110000,33.820000,37.000000,1987.000000,347.000000,1095.000000,357.000000,4.320300,232800.000000 +-118.110000,33.820000,37.000000,1756.000000,345.000000,836.000000,335.000000,4.375000,218200.000000 +-118.110000,33.820000,36.000000,1999.000000,390.000000,887.000000,379.000000,3.816200,221900.000000 +-118.110000,33.820000,36.000000,1742.000000,340.000000,857.000000,341.000000,4.687500,218200.000000 +-118.110000,33.810000,37.000000,1694.000000,280.000000,776.000000,271.000000,6.218700,257900.000000 +-118.110000,33.810000,36.000000,1252.000000,209.000000,558.000000,214.000000,3.972200,235600.000000 +-118.110000,33.800000,36.000000,1837.000000,319.000000,810.000000,305.000000,4.389700,235000.000000 +-118.110000,33.800000,36.000000,1680.000000,291.000000,744.000000,280.000000,4.660000,244800.000000 +-118.110000,33.800000,35.000000,1034.000000,180.000000,444.000000,177.000000,5.460200,231600.000000 +-118.110000,33.790000,36.000000,2223.000000,370.000000,1039.000000,370.000000,5.794200,257000.000000 +-118.110000,33.790000,36.000000,1993.000000,354.000000,884.000000,337.000000,5.587000,244900.000000 +-118.110000,33.780000,16.000000,3985.000000,567.000000,1327.000000,564.000000,7.976700,500001.000000 +-118.110000,33.770000,15.000000,9103.000000,1847.000000,3333.000000,1712.000000,5.150800,367300.000000 +-118.110000,33.750000,24.000000,1608.000000,314.000000,592.000000,314.000000,5.092600,390500.000000 +-118.110000,33.750000,15.000000,2569.000000,812.000000,785.000000,477.000000,5.401100,346400.000000 +-118.110000,33.740000,43.000000,1222.000000,303.000000,565.000000,309.000000,4.848200,500001.000000 +-118.110000,33.730000,32.000000,1258.000000,333.000000,645.000000,334.000000,5.047600,500001.000000 +-118.120000,34.710000,46.000000,40.000000,10.000000,14.000000,7.000000,1.125000,225000.000000 +-118.120000,34.710000,26.000000,4230.000000,823.000000,2789.000000,793.000000,2.517900,104000.000000 +-118.120000,34.700000,7.000000,4915.000000,885.000000,2833.000000,874.000000,4.322900,130000.000000 +-118.120000,34.690000,27.000000,3019.000000,501.000000,1580.000000,523.000000,3.780400,113500.000000 +-118.120000,34.690000,17.000000,2479.000000,390.000000,1219.000000,363.000000,4.641700,125700.000000 +-118.120000,34.680000,12.000000,5319.000000,875.000000,2439.000000,779.000000,4.662900,131500.000000 +-118.120000,34.600000,33.000000,2189.000000,497.000000,1459.000000,443.000000,2.395800,94500.000000 +-118.120000,34.580000,13.000000,2614.000000,650.000000,1949.000000,537.000000,2.054700,102600.000000 +-118.120000,34.560000,5.000000,6446.000000,1154.000000,3427.000000,1104.000000,3.993600,148500.000000 +-118.120000,34.230000,52.000000,433.000000,69.000000,147.000000,53.000000,3.958300,162500.000000 +-118.120000,34.190000,52.000000,2405.000000,299.000000,970.000000,319.000000,8.783500,444100.000000 +-118.120000,34.180000,47.000000,2344.000000,513.000000,1537.000000,481.000000,3.477700,230600.000000 +-118.120000,34.180000,44.000000,2357.000000,342.000000,891.000000,337.000000,6.346700,352700.000000 +-118.120000,34.170000,52.000000,2948.000000,542.000000,1363.000000,495.000000,4.709800,287900.000000 +-118.120000,34.170000,52.000000,2166.000000,483.000000,1308.000000,467.000000,3.041700,222600.000000 +-118.120000,34.170000,52.000000,1835.000000,330.000000,777.000000,317.000000,3.715900,315400.000000 +-118.120000,34.170000,37.000000,2705.000000,676.000000,1551.000000,608.000000,2.269200,225000.000000 +-118.120000,34.150000,35.000000,1760.000000,447.000000,984.000000,384.000000,3.416700,198200.000000 +-118.120000,34.140000,52.000000,2337.000000,352.000000,981.000000,328.000000,5.869200,490400.000000 +-118.120000,34.140000,25.000000,3420.000000,977.000000,1718.000000,947.000000,3.103300,217900.000000 +-118.120000,34.130000,52.000000,2935.000000,341.000000,975.000000,327.000000,11.706000,500001.000000 +-118.120000,34.120000,52.000000,2907.000000,317.000000,956.000000,279.000000,15.000100,500001.000000 +-118.120000,34.110000,52.000000,2787.000000,353.000000,1057.000000,364.000000,10.231700,500001.000000 +-118.120000,34.110000,48.000000,2124.000000,319.000000,785.000000,319.000000,5.213100,359600.000000 +-118.120000,34.100000,49.000000,2057.000000,430.000000,1103.000000,414.000000,4.055600,282600.000000 +-118.120000,34.100000,34.000000,2918.000000,555.000000,1435.000000,568.000000,4.234400,306300.000000 +-118.120000,34.090000,25.000000,4870.000000,1371.000000,3518.000000,1296.000000,3.230700,188400.000000 +-118.120000,34.090000,25.000000,3603.000000,1003.000000,2719.000000,913.000000,2.698100,208000.000000 +-118.120000,34.080000,52.000000,1437.000000,290.000000,980.000000,282.000000,5.303200,245700.000000 +-118.120000,34.080000,36.000000,2433.000000,585.000000,1565.000000,563.000000,3.234400,234900.000000 +-118.120000,34.080000,35.000000,2248.000000,615.000000,1762.000000,622.000000,3.000000,253900.000000 +-118.120000,34.070000,45.000000,1770.000000,423.000000,1410.000000,389.000000,3.059200,212500.000000 +-118.120000,34.070000,43.000000,1050.000000,252.000000,820.000000,244.000000,2.025000,215600.000000 +-118.120000,34.060000,25.000000,1526.000000,388.000000,1304.000000,378.000000,3.189200,214700.000000 +-118.120000,34.060000,25.000000,1137.000000,293.000000,800.000000,281.000000,2.428600,233300.000000 +-118.120000,34.060000,23.000000,1190.000000,347.000000,965.000000,327.000000,2.226100,211800.000000 +-118.120000,34.060000,17.000000,5137.000000,1614.000000,4945.000000,1535.000000,2.459900,181600.000000 +-118.120000,34.050000,32.000000,3775.000000,786.000000,2416.000000,792.000000,3.662500,247600.000000 +-118.120000,34.040000,35.000000,1064.000000,203.000000,608.000000,201.000000,4.093800,246900.000000 +-118.120000,34.040000,35.000000,1038.000000,209.000000,598.000000,190.000000,5.921400,254900.000000 +-118.120000,34.040000,34.000000,2103.000000,427.000000,1355.000000,434.000000,4.579500,235300.000000 +-118.120000,34.030000,20.000000,2595.000000,428.000000,1751.000000,479.000000,5.611200,308000.000000 +-118.120000,34.020000,36.000000,1595.000000,383.000000,1105.000000,359.000000,2.428600,205600.000000 +-118.120000,34.020000,25.000000,2655.000000,558.000000,1466.000000,525.000000,3.052900,265800.000000 +-118.120000,34.010000,40.000000,1417.000000,338.000000,1068.000000,331.000000,2.425900,164600.000000 +-118.120000,34.010000,33.000000,1956.000000,478.000000,1472.000000,464.000000,1.986700,166300.000000 +-118.120000,34.000000,31.000000,3281.000000,768.000000,2385.000000,733.000000,2.730800,173800.000000 +-118.120000,33.990000,24.000000,1705.000000,479.000000,2037.000000,459.000000,2.421900,137500.000000 +-118.120000,33.980000,44.000000,932.000000,179.000000,717.000000,180.000000,3.687500,178100.000000 +-118.120000,33.970000,35.000000,708.000000,145.000000,471.000000,153.000000,3.200000,197400.000000 +-118.120000,33.970000,33.000000,3099.000000,839.000000,2025.000000,750.000000,3.183000,191100.000000 +-118.120000,33.960000,38.000000,2105.000000,348.000000,956.000000,350.000000,4.412500,246000.000000 +-118.120000,33.960000,38.000000,1301.000000,264.000000,877.000000,275.000000,4.625000,191300.000000 +-118.120000,33.960000,36.000000,1426.000000,235.000000,698.000000,240.000000,4.852300,267300.000000 +-118.120000,33.960000,34.000000,2863.000000,451.000000,1243.000000,466.000000,6.072300,297200.000000 +-118.120000,33.950000,36.000000,2752.000000,459.000000,1211.000000,452.000000,5.052600,269800.000000 +-118.120000,33.950000,35.000000,1604.000000,280.000000,802.000000,280.000000,5.752000,291000.000000 +-118.120000,33.940000,35.000000,1813.000000,313.000000,825.000000,316.000000,5.248500,323800.000000 +-118.120000,33.940000,33.000000,2206.000000,393.000000,973.000000,364.000000,4.675000,283000.000000 +-118.120000,33.940000,31.000000,2210.000000,519.000000,1047.000000,472.000000,3.329200,271300.000000 +-118.120000,33.930000,27.000000,580.000000,143.000000,466.000000,133.000000,3.090900,187500.000000 +-118.120000,33.920000,27.000000,6336.000000,1628.000000,4673.000000,1505.000000,2.589300,183700.000000 +-118.120000,33.910000,36.000000,2053.000000,386.000000,1023.000000,394.000000,3.000000,216600.000000 +-118.120000,33.910000,36.000000,1432.000000,265.000000,749.000000,261.000000,3.577200,207400.000000 +-118.120000,33.910000,35.000000,620.000000,122.000000,381.000000,124.000000,3.791700,183900.000000 +-118.120000,33.910000,35.000000,1518.000000,279.000000,857.000000,251.000000,3.691700,197500.000000 +-118.120000,33.910000,34.000000,682.000000,132.000000,491.000000,144.000000,4.638900,173800.000000 +-118.120000,33.890000,29.000000,2666.000000,848.000000,2030.000000,781.000000,2.543200,180900.000000 +-118.120000,33.890000,22.000000,6876.000000,1960.000000,5162.000000,1879.000000,2.929300,170800.000000 +-118.120000,33.880000,40.000000,2344.000000,571.000000,1305.000000,544.000000,3.192300,191900.000000 +-118.120000,33.880000,36.000000,1083.000000,218.000000,557.000000,210.000000,3.079500,218400.000000 +-118.120000,33.880000,25.000000,1768.000000,559.000000,983.000000,488.000000,2.618400,243800.000000 +-118.120000,33.870000,43.000000,1633.000000,355.000000,837.000000,350.000000,3.040500,188000.000000 +-118.120000,33.870000,21.000000,3764.000000,1081.000000,1919.000000,977.000000,2.505700,156300.000000 +-118.120000,33.860000,44.000000,2663.000000,511.000000,1277.000000,462.000000,4.319400,199500.000000 +-118.120000,33.860000,34.000000,2116.000000,427.000000,972.000000,396.000000,4.851600,213600.000000 +-118.120000,33.850000,37.000000,2386.000000,409.000000,1101.000000,399.000000,4.690800,218200.000000 +-118.120000,33.840000,37.000000,2706.000000,462.000000,1331.000000,476.000000,5.071900,220000.000000 +-118.120000,33.840000,37.000000,2143.000000,382.000000,1047.000000,377.000000,4.442300,216000.000000 +-118.120000,33.840000,37.000000,1242.000000,221.000000,565.000000,213.000000,4.109400,215800.000000 +-118.120000,33.830000,45.000000,1734.000000,331.000000,797.000000,293.000000,4.891700,222800.000000 +-118.120000,33.830000,44.000000,1712.000000,314.000000,691.000000,293.000000,4.359400,221300.000000 +-118.120000,33.820000,43.000000,1544.000000,286.000000,701.000000,298.000000,4.137500,226000.000000 +-118.120000,33.820000,42.000000,1493.000000,277.000000,671.000000,267.000000,3.279400,224500.000000 +-118.120000,33.810000,36.000000,2565.000000,458.000000,1155.000000,443.000000,4.608700,224600.000000 +-118.120000,33.810000,36.000000,1665.000000,291.000000,721.000000,294.000000,4.687500,250700.000000 +-118.120000,33.800000,36.000000,1257.000000,205.000000,530.000000,211.000000,5.370100,251400.000000 +-118.120000,33.800000,35.000000,1835.000000,435.000000,774.000000,418.000000,2.709200,256300.000000 +-118.120000,33.790000,43.000000,1471.000000,301.000000,767.000000,311.000000,4.331700,232400.000000 +-118.120000,33.790000,41.000000,1762.000000,314.000000,738.000000,300.000000,4.168700,240700.000000 +-118.120000,33.760000,45.000000,3035.000000,516.000000,1127.000000,527.000000,7.079600,500001.000000 +-118.120000,33.760000,43.000000,3070.000000,668.000000,1240.000000,646.000000,3.781300,461500.000000 +-118.120000,33.750000,47.000000,3330.000000,569.000000,1220.000000,557.000000,7.367200,500001.000000 +-118.120000,33.750000,41.000000,2072.000000,491.000000,742.000000,414.000000,3.993400,500001.000000 +-118.130000,34.700000,34.000000,1943.000000,500.000000,1078.000000,446.000000,1.129600,93800.000000 +-118.130000,34.690000,34.000000,2156.000000,397.000000,1269.000000,388.000000,2.750000,96800.000000 +-118.130000,34.690000,32.000000,3670.000000,765.000000,1986.000000,673.000000,3.682000,108800.000000 +-118.130000,34.680000,28.000000,718.000000,124.000000,347.000000,121.000000,4.025000,102600.000000 +-118.130000,34.580000,29.000000,2370.000000,475.000000,1746.000000,483.000000,3.746400,113500.000000 +-118.130000,34.440000,10.000000,2726.000000,465.000000,1773.000000,459.000000,4.829500,319100.000000 +-118.130000,34.210000,36.000000,1449.000000,235.000000,621.000000,210.000000,6.182400,274100.000000 +-118.130000,34.200000,46.000000,2676.000000,427.000000,1022.000000,395.000000,6.428800,295500.000000 +-118.130000,34.200000,46.000000,1271.000000,236.000000,573.000000,210.000000,4.931200,240200.000000 +-118.130000,34.200000,45.000000,1213.000000,206.000000,529.000000,231.000000,5.662900,234000.000000 +-118.130000,34.190000,48.000000,2539.000000,425.000000,930.000000,364.000000,4.726900,303900.000000 +-118.130000,34.190000,43.000000,1621.000000,365.000000,1015.000000,329.000000,2.920000,242200.000000 +-118.130000,34.190000,42.000000,2203.000000,412.000000,1012.000000,377.000000,4.071400,234000.000000 +-118.130000,34.180000,52.000000,3094.000000,519.000000,1309.000000,488.000000,6.422300,310900.000000 +-118.130000,34.170000,49.000000,1962.000000,435.000000,1329.000000,457.000000,3.289800,200000.000000 +-118.130000,34.160000,36.000000,2162.000000,658.000000,1337.000000,590.000000,2.209500,176700.000000 +-118.130000,34.160000,33.000000,2682.000000,716.000000,2050.000000,692.000000,2.481700,169500.000000 +-118.130000,34.150000,9.000000,2099.000000,625.000000,1252.000000,554.000000,3.187500,173100.000000 +-118.130000,34.150000,18.000000,1665.000000,477.000000,1095.000000,390.000000,2.603800,155600.000000 +-118.130000,34.140000,29.000000,3559.000000,1034.000000,1658.000000,965.000000,3.264300,163900.000000 +-118.130000,34.140000,23.000000,5465.000000,1494.000000,2511.000000,1359.000000,3.453100,210900.000000 +-118.130000,34.120000,46.000000,3156.000000,430.000000,1109.000000,423.000000,10.739700,500001.000000 +-118.130000,34.110000,45.000000,1780.000000,289.000000,755.000000,328.000000,4.825000,351100.000000 +-118.130000,34.100000,26.000000,3050.000000,825.000000,2153.000000,772.000000,3.110300,214100.000000 +-118.130000,34.090000,42.000000,700.000000,212.000000,662.000000,210.000000,3.007800,191700.000000 +-118.130000,34.090000,42.000000,2562.000000,781.000000,1936.000000,687.000000,2.221400,219000.000000 +-118.130000,34.090000,21.000000,3862.000000,1186.000000,2773.000000,1102.000000,2.781600,188200.000000 +-118.130000,34.080000,40.000000,1931.000000,449.000000,1367.000000,446.000000,2.575000,228400.000000 +-118.130000,34.080000,35.000000,2517.000000,662.000000,1883.000000,607.000000,2.578700,223000.000000 +-118.130000,34.070000,32.000000,1880.000000,428.000000,1404.000000,424.000000,3.085000,220500.000000 +-118.130000,34.070000,20.000000,2130.000000,654.000000,1870.000000,578.000000,2.366400,192200.000000 +-118.130000,34.060000,30.000000,1692.000000,398.000000,1130.000000,365.000000,2.867200,198500.000000 +-118.130000,34.060000,17.000000,1714.000000,572.000000,1590.000000,568.000000,1.187500,183900.000000 +-118.130000,34.050000,35.000000,3229.000000,616.000000,1879.000000,595.000000,3.953100,268400.000000 +-118.130000,34.040000,42.000000,2205.000000,451.000000,1392.000000,423.000000,4.364600,211400.000000 +-118.130000,34.040000,40.000000,1444.000000,312.000000,881.000000,303.000000,3.108300,220500.000000 +-118.130000,34.040000,36.000000,1938.000000,364.000000,1118.000000,374.000000,3.583300,227300.000000 +-118.130000,34.030000,42.000000,2203.000000,467.000000,1470.000000,488.000000,2.838500,192200.000000 +-118.130000,34.030000,31.000000,4267.000000,1070.000000,3176.000000,1071.000000,3.021200,208200.000000 +-118.130000,34.020000,43.000000,396.000000,91.000000,261.000000,73.000000,2.904400,172900.000000 +-118.130000,34.020000,41.000000,734.000000,190.000000,565.000000,191.000000,2.281300,192000.000000 +-118.130000,34.020000,40.000000,2988.000000,690.000000,2144.000000,667.000000,2.335900,189300.000000 +-118.130000,34.020000,38.000000,1243.000000,310.000000,788.000000,286.000000,2.585200,185100.000000 +-118.130000,34.020000,36.000000,984.000000,275.000000,1024.000000,284.000000,2.125000,153500.000000 +-118.130000,34.010000,43.000000,782.000000,207.000000,827.000000,223.000000,3.153800,154300.000000 +-118.130000,33.970000,36.000000,1759.000000,295.000000,837.000000,267.000000,4.699200,251900.000000 +-118.130000,33.970000,34.000000,1736.000000,297.000000,823.000000,292.000000,5.404200,241600.000000 +-118.130000,33.960000,38.000000,1040.000000,202.000000,557.000000,228.000000,4.000000,254700.000000 +-118.130000,33.960000,36.000000,1933.000000,341.000000,958.000000,335.000000,4.473200,266000.000000 +-118.130000,33.960000,35.000000,1500.000000,250.000000,706.000000,250.000000,4.562500,253500.000000 +-118.130000,33.950000,37.000000,1709.000000,333.000000,778.000000,344.000000,3.903600,326600.000000 +-118.130000,33.940000,34.000000,522.000000,138.000000,373.000000,139.000000,3.548100,265000.000000 +-118.130000,33.930000,38.000000,2040.000000,458.000000,1775.000000,445.000000,3.522700,202400.000000 +-118.130000,33.930000,34.000000,2122.000000,517.000000,1578.000000,488.000000,3.149600,191900.000000 +-118.130000,33.930000,19.000000,1793.000000,447.000000,1222.000000,452.000000,2.686200,195800.000000 +-118.130000,33.920000,36.000000,984.000000,183.000000,615.000000,206.000000,4.178600,201500.000000 +-118.130000,33.920000,28.000000,3069.000000,864.000000,1932.000000,835.000000,2.492500,177200.000000 +-118.130000,33.910000,36.000000,1967.000000,316.000000,910.000000,306.000000,4.494800,190600.000000 +-118.130000,33.910000,35.000000,561.000000,104.000000,261.000000,105.000000,4.937500,183800.000000 +-118.130000,33.910000,34.000000,916.000000,162.000000,552.000000,164.000000,4.910700,222000.000000 +-118.130000,33.900000,38.000000,1475.000000,269.000000,827.000000,265.000000,4.766300,191600.000000 +-118.130000,33.900000,36.000000,1814.000000,350.000000,886.000000,347.000000,3.486800,208400.000000 +-118.130000,33.900000,35.000000,1458.000000,261.000000,686.000000,236.000000,3.903800,202700.000000 +-118.130000,33.890000,36.000000,599.000000,125.000000,361.000000,139.000000,5.039500,225800.000000 +-118.130000,33.890000,33.000000,3668.000000,867.000000,2368.000000,845.000000,2.890600,204900.000000 +-118.130000,33.890000,29.000000,2823.000000,737.000000,1723.000000,678.000000,2.712100,165500.000000 +-118.130000,33.880000,32.000000,3088.000000,1024.000000,1981.000000,956.000000,2.202700,192700.000000 +-118.130000,33.880000,32.000000,1788.000000,459.000000,1131.000000,461.000000,3.527800,166100.000000 +-118.130000,33.870000,45.000000,1606.000000,300.000000,735.000000,295.000000,4.676500,198400.000000 +-118.130000,33.870000,20.000000,3638.000000,868.000000,2326.000000,822.000000,3.330400,194600.000000 +-118.130000,33.860000,45.000000,1866.000000,343.000000,919.000000,344.000000,3.583300,200200.000000 +-118.130000,33.860000,37.000000,2259.000000,425.000000,1183.000000,413.000000,5.180500,201600.000000 +-118.130000,33.850000,36.000000,2110.000000,416.000000,1128.000000,403.000000,4.601900,208400.000000 +-118.130000,33.850000,36.000000,1885.000000,391.000000,1049.000000,405.000000,3.550000,212800.000000 +-118.130000,33.840000,46.000000,2439.000000,429.000000,944.000000,374.000000,4.284100,312400.000000 +-118.130000,33.840000,35.000000,3008.000000,674.000000,1584.000000,671.000000,3.546500,213200.000000 +-118.130000,33.830000,45.000000,3087.000000,574.000000,1474.000000,567.000000,5.519600,227600.000000 +-118.130000,33.830000,44.000000,1710.000000,333.000000,786.000000,344.000000,4.291700,314700.000000 +-118.130000,33.820000,44.000000,1785.000000,307.000000,779.000000,291.000000,4.305600,228600.000000 +-118.130000,33.820000,44.000000,1619.000000,280.000000,815.000000,284.000000,5.544900,232200.000000 +-118.130000,33.820000,37.000000,1530.000000,290.000000,711.000000,283.000000,5.179500,225400.000000 +-118.130000,33.820000,36.000000,665.000000,114.000000,273.000000,112.000000,3.732100,223700.000000 +-118.130000,33.810000,37.000000,1228.000000,237.000000,572.000000,242.000000,4.325000,223900.000000 +-118.130000,33.810000,37.000000,1013.000000,199.000000,493.000000,183.000000,4.784500,231400.000000 +-118.130000,33.810000,36.000000,1749.000000,322.000000,855.000000,319.000000,4.647300,227100.000000 +-118.130000,33.810000,34.000000,1903.000000,343.000000,928.000000,349.000000,5.395000,241900.000000 +-118.130000,33.800000,41.000000,1509.000000,325.000000,821.000000,314.000000,4.089300,223000.000000 +-118.130000,33.800000,36.000000,1496.000000,271.000000,743.000000,265.000000,4.431200,226000.000000 +-118.130000,33.800000,36.000000,1026.000000,182.000000,505.000000,176.000000,4.343800,233600.000000 +-118.130000,33.790000,45.000000,2317.000000,448.000000,1057.000000,428.000000,4.375000,234800.000000 +-118.130000,33.790000,44.000000,2153.000000,375.000000,947.000000,364.000000,5.007200,236200.000000 +-118.130000,33.790000,36.000000,1245.000000,211.000000,508.000000,221.000000,5.344100,480600.000000 +-118.130000,33.790000,29.000000,2937.000000,524.000000,1132.000000,528.000000,4.613300,500001.000000 +-118.130000,33.790000,20.000000,6678.000000,1797.000000,3625.000000,1599.000000,3.771600,242900.000000 +-118.130000,33.780000,45.000000,1016.000000,172.000000,361.000000,163.000000,7.500000,434500.000000 +-118.130000,33.780000,31.000000,3039.000000,739.000000,1199.000000,697.000000,3.723200,500001.000000 +-118.130000,33.770000,52.000000,3697.000000,691.000000,1436.000000,671.000000,4.685200,395200.000000 +-118.130000,33.770000,37.000000,4365.000000,926.000000,1661.000000,868.000000,5.304600,360700.000000 +-118.130000,33.760000,52.000000,2216.000000,526.000000,940.000000,530.000000,4.546900,381000.000000 +-118.130000,33.760000,46.000000,2834.000000,673.000000,1175.000000,670.000000,4.787500,363800.000000 +-118.130000,33.760000,44.000000,1543.000000,463.000000,652.000000,406.000000,4.250000,439300.000000 +-118.140000,34.720000,15.000000,2181.000000,361.000000,1057.000000,300.000000,4.625000,118100.000000 +-118.140000,34.710000,33.000000,2347.000000,461.000000,1482.000000,374.000000,2.819400,93000.000000 +-118.140000,34.700000,12.000000,1984.000000,614.000000,1071.000000,574.000000,1.253200,102100.000000 +-118.140000,34.690000,48.000000,1379.000000,327.000000,696.000000,304.000000,2.116700,94900.000000 +-118.140000,34.690000,35.000000,2118.000000,374.000000,1108.000000,360.000000,3.432700,100300.000000 +-118.140000,34.680000,33.000000,2815.000000,485.000000,1447.000000,489.000000,4.267900,119600.000000 +-118.140000,34.680000,31.000000,2666.000000,662.000000,1337.000000,602.000000,2.443200,101100.000000 +-118.140000,34.680000,25.000000,1703.000000,342.000000,775.000000,309.000000,4.545500,126500.000000 +-118.140000,34.650000,20.000000,1257.000000,201.000000,551.000000,186.000000,4.659100,247200.000000 +-118.140000,34.570000,6.000000,9882.000000,1892.000000,4892.000000,1621.000000,3.763600,167600.000000 +-118.140000,34.200000,39.000000,2569.000000,426.000000,1282.000000,432.000000,5.095300,207400.000000 +-118.140000,34.190000,49.000000,1678.000000,277.000000,737.000000,287.000000,3.722200,237000.000000 +-118.140000,34.190000,45.000000,3595.000000,619.000000,1686.000000,607.000000,4.730000,201000.000000 +-118.140000,34.180000,50.000000,1493.000000,326.000000,1000.000000,323.000000,3.306800,154400.000000 +-118.140000,34.180000,47.000000,3457.000000,622.000000,1700.000000,579.000000,3.516400,226500.000000 +-118.140000,34.170000,52.000000,2687.000000,600.000000,1716.000000,544.000000,2.720100,205700.000000 +-118.140000,34.170000,40.000000,1054.000000,251.000000,1056.000000,276.000000,2.300000,146700.000000 +-118.140000,34.170000,34.000000,2384.000000,604.000000,2073.000000,540.000000,2.306200,158000.000000 +-118.140000,34.160000,38.000000,1843.000000,565.000000,1449.000000,524.000000,2.217400,215400.000000 +-118.140000,34.160000,36.000000,2973.000000,807.000000,2846.000000,784.000000,2.621700,156300.000000 +-118.140000,34.160000,30.000000,2598.000000,757.000000,2869.000000,769.000000,2.137700,142300.000000 +-118.140000,34.150000,52.000000,407.000000,160.000000,227.000000,148.000000,1.515600,187500.000000 +-118.140000,34.150000,41.000000,1256.000000,407.000000,855.000000,383.000000,1.992300,500001.000000 +-118.140000,34.150000,17.000000,1896.000000,674.000000,971.000000,652.000000,0.843800,175000.000000 +-118.140000,34.140000,24.000000,10239.000000,2823.000000,4210.000000,2565.000000,3.699700,225000.000000 +-118.140000,34.140000,17.000000,3404.000000,1011.000000,1694.000000,949.000000,2.951100,282300.000000 +-118.140000,34.130000,49.000000,4438.000000,803.000000,1650.000000,741.000000,5.107200,479700.000000 +-118.140000,34.130000,16.000000,3569.000000,821.000000,1505.000000,783.000000,4.916700,251100.000000 +-118.140000,34.110000,52.000000,2742.000000,422.000000,1153.000000,414.000000,8.112400,500001.000000 +-118.140000,34.100000,52.000000,4061.000000,861.000000,2290.000000,790.000000,2.891900,258400.000000 +-118.140000,34.100000,27.000000,4073.000000,1013.000000,2411.000000,933.000000,3.108000,231000.000000 +-118.140000,34.090000,38.000000,1745.000000,457.000000,1547.000000,460.000000,2.850000,219000.000000 +-118.140000,34.090000,28.000000,4164.000000,1127.000000,2934.000000,1014.000000,2.748300,218800.000000 +-118.140000,34.090000,20.000000,3447.000000,1007.000000,2622.000000,934.000000,2.918000,208700.000000 +-118.140000,34.080000,30.000000,1433.000000,397.000000,1110.000000,346.000000,2.346400,191700.000000 +-118.140000,34.080000,24.000000,3988.000000,1098.000000,2909.000000,1034.000000,2.703600,170000.000000 +-118.140000,34.080000,24.000000,2999.000000,786.000000,2937.000000,796.000000,2.940500,217800.000000 +-118.140000,34.070000,52.000000,695.000000,145.000000,523.000000,170.000000,3.665000,220400.000000 +-118.140000,34.060000,39.000000,2390.000000,444.000000,1246.000000,422.000000,3.785700,245700.000000 +-118.140000,34.050000,39.000000,1880.000000,367.000000,954.000000,349.000000,3.875000,236400.000000 +-118.140000,34.050000,25.000000,5478.000000,1136.000000,3062.000000,1096.000000,3.411800,341100.000000 +-118.140000,34.040000,37.000000,1129.000000,212.000000,509.000000,202.000000,2.614600,243200.000000 +-118.140000,34.030000,45.000000,1569.000000,359.000000,1203.000000,359.000000,2.461200,180500.000000 +-118.140000,34.030000,44.000000,2003.000000,390.000000,1291.000000,392.000000,4.062500,201100.000000 +-118.140000,34.020000,45.000000,1307.000000,283.000000,967.000000,254.000000,2.750000,178300.000000 +-118.140000,34.020000,44.000000,1715.000000,460.000000,1740.000000,423.000000,2.701900,153300.000000 +-118.140000,34.020000,42.000000,1384.000000,458.000000,1825.000000,455.000000,1.417800,145500.000000 +-118.140000,34.020000,40.000000,1912.000000,502.000000,2077.000000,500.000000,2.600000,180600.000000 +-118.140000,34.010000,42.000000,1007.000000,277.000000,1060.000000,268.000000,3.017900,153700.000000 +-118.140000,34.010000,36.000000,702.000000,210.000000,834.000000,216.000000,2.250000,162500.000000 +-118.140000,33.970000,36.000000,1407.000000,385.000000,1763.000000,350.000000,2.636400,150000.000000 +-118.140000,33.970000,31.000000,2064.000000,612.000000,2461.000000,573.000000,2.052400,160800.000000 +-118.140000,33.970000,29.000000,1846.000000,530.000000,2576.000000,528.000000,2.630000,156000.000000 +-118.140000,33.960000,38.000000,590.000000,139.000000,620.000000,132.000000,2.173100,143800.000000 +-118.140000,33.960000,34.000000,2744.000000,541.000000,1333.000000,503.000000,4.053600,277200.000000 +-118.140000,33.950000,44.000000,1812.000000,338.000000,822.000000,314.000000,6.774400,294100.000000 +-118.140000,33.950000,42.000000,1413.000000,228.000000,630.000000,219.000000,6.856400,300000.000000 +-118.140000,33.950000,37.000000,1462.000000,243.000000,600.000000,236.000000,5.201500,302000.000000 +-118.140000,33.950000,36.000000,1942.000000,355.000000,891.000000,348.000000,3.663500,282100.000000 +-118.140000,33.940000,35.000000,2987.000000,601.000000,1561.000000,606.000000,4.003900,226500.000000 +-118.140000,33.940000,31.000000,2841.000000,774.000000,1612.000000,708.000000,2.920500,196600.000000 +-118.140000,33.930000,31.000000,3205.000000,727.000000,1647.000000,664.000000,3.368100,223900.000000 +-118.140000,33.920000,31.000000,3731.000000,853.000000,2313.000000,801.000000,3.223700,218200.000000 +-118.140000,33.910000,37.000000,932.000000,171.000000,578.000000,175.000000,4.375000,177600.000000 +-118.140000,33.910000,36.000000,1096.000000,204.000000,569.000000,201.000000,4.475000,182300.000000 +-118.140000,33.910000,34.000000,1766.000000,410.000000,974.000000,404.000000,3.070300,180800.000000 +-118.140000,33.910000,32.000000,1981.000000,472.000000,1371.000000,431.000000,3.120400,204200.000000 +-118.140000,33.900000,39.000000,1379.000000,282.000000,883.000000,291.000000,3.337500,180100.000000 +-118.140000,33.900000,26.000000,2145.000000,471.000000,1150.000000,429.000000,3.597200,225800.000000 +-118.140000,33.890000,39.000000,1744.000000,339.000000,1048.000000,330.000000,4.573500,195500.000000 +-118.140000,33.890000,37.000000,1159.000000,238.000000,740.000000,243.000000,4.910700,179600.000000 +-118.140000,33.890000,33.000000,1250.000000,276.000000,866.000000,268.000000,4.170800,175000.000000 +-118.140000,33.880000,24.000000,3305.000000,982.000000,2085.000000,881.000000,2.664100,168200.000000 +-118.140000,33.870000,44.000000,1661.000000,315.000000,985.000000,319.000000,4.394200,219500.000000 +-118.140000,33.870000,44.000000,1607.000000,271.000000,799.000000,283.000000,5.084000,214100.000000 +-118.140000,33.870000,21.000000,6618.000000,1773.000000,4396.000000,1649.000000,3.098900,171400.000000 +-118.140000,33.860000,44.000000,1276.000000,234.000000,538.000000,213.000000,4.866700,218300.000000 +-118.140000,33.860000,37.000000,1404.000000,257.000000,652.000000,258.000000,4.206200,195400.000000 +-118.140000,33.860000,36.000000,1774.000000,348.000000,934.000000,333.000000,4.857100,203300.000000 +-118.140000,33.860000,36.000000,1703.000000,325.000000,845.000000,308.000000,5.010600,210800.000000 +-118.140000,33.840000,45.000000,1908.000000,361.000000,890.000000,342.000000,4.575000,336000.000000 +-118.140000,33.840000,44.000000,3043.000000,619.000000,1316.000000,607.000000,4.428600,254900.000000 +-118.140000,33.840000,43.000000,2107.000000,439.000000,876.000000,429.000000,3.202400,339400.000000 +-118.140000,33.800000,43.000000,2506.000000,531.000000,1230.000000,543.000000,3.421100,203900.000000 +-118.140000,33.790000,45.000000,1519.000000,263.000000,681.000000,267.000000,4.645200,212500.000000 +-118.140000,33.790000,44.000000,2388.000000,619.000000,1461.000000,592.000000,3.171100,215400.000000 +-118.140000,33.790000,23.000000,2573.000000,688.000000,1478.000000,604.000000,3.483300,209400.000000 +-118.140000,33.780000,44.000000,2101.000000,496.000000,1038.000000,500.000000,3.108000,217900.000000 +-118.140000,33.780000,42.000000,1898.000000,488.000000,940.000000,483.000000,3.410700,233300.000000 +-118.140000,33.770000,52.000000,2208.000000,409.000000,791.000000,408.000000,5.840800,500000.000000 +-118.140000,33.770000,49.000000,2792.000000,690.000000,1301.000000,648.000000,3.291700,307400.000000 +-118.140000,33.760000,52.000000,2677.000000,642.000000,1144.000000,624.000000,4.388900,378000.000000 +-118.140000,33.760000,50.000000,914.000000,167.000000,322.000000,165.000000,4.736100,418800.000000 +-118.140000,33.760000,50.000000,2960.000000,761.000000,1179.000000,718.000000,3.521400,398100.000000 +-118.140000,33.760000,44.000000,1633.000000,536.000000,741.000000,513.000000,3.385000,408300.000000 +-118.140000,33.760000,37.000000,3242.000000,698.000000,1080.000000,629.000000,3.901000,432500.000000 +-118.140000,33.750000,39.000000,1995.000000,634.000000,867.000000,567.000000,4.079500,400000.000000 +-118.140000,33.710000,36.000000,2484.000000,525.000000,792.000000,446.000000,5.181500,500001.000000 +-118.150000,35.040000,29.000000,1671.000000,368.000000,821.000000,337.000000,2.160000,56800.000000 +-118.150000,34.710000,36.000000,1338.000000,250.000000,709.000000,250.000000,3.562500,101400.000000 +-118.150000,34.710000,35.000000,1503.000000,309.000000,842.000000,300.000000,2.527800,97700.000000 +-118.150000,34.700000,36.000000,2696.000000,454.000000,1192.000000,452.000000,3.961500,116300.000000 +-118.150000,34.690000,32.000000,1300.000000,234.000000,712.000000,249.000000,3.250000,107500.000000 +-118.150000,34.670000,5.000000,12317.000000,2953.000000,6291.000000,2654.000000,3.573200,146900.000000 +-118.150000,34.590000,33.000000,2111.000000,429.000000,1067.000000,397.000000,3.734400,111400.000000 +-118.150000,34.210000,34.000000,2765.000000,515.000000,1422.000000,438.000000,5.472700,238900.000000 +-118.150000,34.200000,52.000000,1786.000000,306.000000,1018.000000,322.000000,4.151800,182100.000000 +-118.150000,34.200000,37.000000,1997.000000,361.000000,1037.000000,363.000000,3.793200,210300.000000 +-118.150000,34.190000,47.000000,1717.000000,314.000000,868.000000,295.000000,3.609400,160700.000000 +-118.150000,34.190000,38.000000,1750.000000,411.000000,1398.000000,409.000000,2.396700,163100.000000 +-118.150000,34.180000,45.000000,2612.000000,664.000000,3117.000000,584.000000,2.302900,148800.000000 +-118.150000,34.180000,42.000000,1521.000000,320.000000,1118.000000,311.000000,3.312500,154900.000000 +-118.150000,34.170000,46.000000,2553.000000,558.000000,1740.000000,492.000000,2.027800,127500.000000 +-118.150000,34.170000,36.000000,930.000000,280.000000,1024.000000,300.000000,1.084600,146400.000000 +-118.150000,34.160000,52.000000,1925.000000,597.000000,2258.000000,594.000000,1.692100,162500.000000 +-118.150000,34.160000,38.000000,2471.000000,529.000000,1661.000000,441.000000,2.276500,146600.000000 +-118.150000,34.160000,20.000000,2410.000000,632.000000,2135.000000,578.000000,1.688700,148600.000000 +-118.150000,34.160000,18.000000,1711.000000,383.000000,1474.000000,415.000000,1.289100,181300.000000 +-118.150000,34.150000,52.000000,275.000000,123.000000,273.000000,111.000000,1.166700,500001.000000 +-118.150000,34.150000,49.000000,806.000000,199.000000,698.000000,172.000000,2.365400,137500.000000 +-118.150000,34.140000,52.000000,403.000000,117.000000,361.000000,105.000000,1.625000,187500.000000 +-118.150000,34.140000,45.000000,543.000000,191.000000,454.000000,181.000000,2.300000,55000.000000 +-118.150000,34.140000,27.000000,1499.000000,426.000000,755.000000,414.000000,3.875000,258300.000000 +-118.150000,34.130000,50.000000,2443.000000,494.000000,947.000000,451.000000,4.734400,314700.000000 +-118.150000,34.130000,34.000000,824.000000,224.000000,430.000000,213.000000,3.638900,215000.000000 +-118.150000,34.130000,30.000000,2763.000000,520.000000,1143.000000,465.000000,4.729800,500001.000000 +-118.150000,34.120000,52.000000,1518.000000,344.000000,725.000000,296.000000,3.401800,204500.000000 +-118.150000,34.120000,49.000000,1789.000000,288.000000,848.000000,311.000000,6.019900,500000.000000 +-118.150000,34.120000,43.000000,1810.000000,427.000000,742.000000,429.000000,3.852900,350000.000000 +-118.150000,34.120000,36.000000,6119.000000,1513.000000,2719.000000,1402.000000,3.842700,319700.000000 +-118.150000,34.110000,52.000000,2375.000000,369.000000,930.000000,351.000000,7.411100,469100.000000 +-118.150000,34.110000,52.000000,1746.000000,330.000000,704.000000,306.000000,3.789500,364800.000000 +-118.150000,34.110000,52.000000,1000.000000,192.000000,363.000000,158.000000,4.298100,352800.000000 +-118.150000,34.100000,52.000000,4325.000000,823.000000,1927.000000,795.000000,3.948500,419100.000000 +-118.150000,34.100000,36.000000,3514.000000,818.000000,2277.000000,828.000000,3.121100,229300.000000 +-118.150000,34.090000,52.000000,2203.000000,430.000000,1238.000000,403.000000,4.430600,225800.000000 +-118.150000,34.090000,27.000000,1935.000000,460.000000,1456.000000,382.000000,2.806200,192800.000000 +-118.150000,34.080000,48.000000,3697.000000,816.000000,2446.000000,787.000000,3.398800,199200.000000 +-118.150000,34.080000,44.000000,1053.000000,251.000000,941.000000,256.000000,3.125000,205600.000000 +-118.150000,34.070000,52.000000,1983.000000,344.000000,887.000000,331.000000,3.287500,234400.000000 +-118.150000,34.070000,44.000000,1626.000000,383.000000,1063.000000,334.000000,2.434800,220700.000000 +-118.150000,34.060000,28.000000,3855.000000,922.000000,2517.000000,874.000000,3.505000,204300.000000 +-118.150000,34.050000,31.000000,3362.000000,799.000000,1939.000000,754.000000,3.508900,305800.000000 +-118.150000,34.040000,44.000000,647.000000,142.000000,457.000000,143.000000,3.687500,162500.000000 +-118.150000,34.040000,39.000000,1099.000000,263.000000,787.000000,269.000000,3.779400,194600.000000 +-118.150000,34.040000,33.000000,818.000000,195.000000,664.000000,198.000000,2.194400,203300.000000 +-118.150000,34.030000,44.000000,603.000000,207.000000,588.000000,218.000000,2.053600,186400.000000 +-118.150000,34.030000,43.000000,2006.000000,472.000000,1687.000000,463.000000,1.799100,158800.000000 +-118.150000,34.020000,42.000000,2729.000000,725.000000,3004.000000,722.000000,2.343800,154300.000000 +-118.150000,34.020000,37.000000,2344.000000,631.000000,2195.000000,610.000000,2.702200,151900.000000 +-118.150000,34.000000,32.000000,3218.000000,739.000000,2368.000000,730.000000,3.140600,175300.000000 +-118.150000,33.980000,37.000000,1184.000000,290.000000,1320.000000,276.000000,2.300000,165600.000000 +-118.150000,33.980000,17.000000,3361.000000,925.000000,3264.000000,914.000000,2.281300,145600.000000 +-118.150000,33.970000,33.000000,1903.000000,469.000000,1882.000000,435.000000,2.407100,170500.000000 +-118.150000,33.970000,32.000000,927.000000,250.000000,970.000000,248.000000,2.159100,181500.000000 +-118.150000,33.960000,33.000000,2418.000000,485.000000,1397.000000,477.000000,3.108300,285500.000000 +-118.150000,33.960000,33.000000,1471.000000,451.000000,2272.000000,482.000000,2.538500,160900.000000 +-118.150000,33.960000,33.000000,1201.000000,340.000000,1482.000000,334.000000,2.482100,150000.000000 +-118.150000,33.950000,35.000000,2753.000000,702.000000,1592.000000,614.000000,2.787500,209000.000000 +-118.150000,33.940000,37.000000,1594.000000,321.000000,1003.000000,323.000000,3.328900,199700.000000 +-118.150000,33.930000,42.000000,1839.000000,346.000000,1034.000000,339.000000,4.980800,212300.000000 +-118.150000,33.930000,34.000000,1745.000000,404.000000,1084.000000,410.000000,3.341100,220500.000000 +-118.150000,33.930000,30.000000,3096.000000,628.000000,1676.000000,587.000000,4.658300,207300.000000 +-118.150000,33.930000,25.000000,1948.000000,433.000000,1128.000000,429.000000,3.761400,255900.000000 +-118.150000,33.920000,40.000000,1335.000000,281.000000,804.000000,282.000000,4.319400,198400.000000 +-118.150000,33.920000,36.000000,1890.000000,400.000000,1232.000000,386.000000,4.375000,184200.000000 +-118.150000,33.920000,30.000000,915.000000,234.000000,646.000000,211.000000,2.520800,182800.000000 +-118.150000,33.920000,28.000000,1038.000000,252.000000,912.000000,245.000000,2.587500,161200.000000 +-118.150000,33.910000,38.000000,901.000000,205.000000,760.000000,208.000000,2.964300,147400.000000 +-118.150000,33.910000,35.000000,574.000000,116.000000,610.000000,147.000000,3.318200,133300.000000 +-118.150000,33.910000,35.000000,1590.000000,350.000000,1299.000000,335.000000,4.031300,163200.000000 +-118.150000,33.910000,25.000000,2053.000000,578.000000,1721.000000,507.000000,2.345600,146100.000000 +-118.150000,33.900000,20.000000,2850.000000,737.000000,1855.000000,662.000000,2.809000,144600.000000 +-118.150000,33.890000,30.000000,4426.000000,995.000000,4196.000000,921.000000,3.274000,148300.000000 +-118.150000,33.880000,24.000000,4232.000000,1092.000000,2688.000000,1035.000000,2.520000,146000.000000 +-118.150000,33.870000,29.000000,2690.000000,659.000000,1747.000000,617.000000,3.371300,198200.000000 +-118.150000,33.860000,36.000000,1578.000000,312.000000,827.000000,311.000000,4.894200,194100.000000 +-118.150000,33.860000,34.000000,2403.000000,413.000000,1385.000000,386.000000,4.493400,213800.000000 +-118.150000,33.860000,32.000000,2630.000000,559.000000,1069.000000,491.000000,2.465900,209000.000000 +-118.150000,33.850000,36.000000,1491.000000,259.000000,699.000000,266.000000,4.078100,217300.000000 +-118.150000,33.850000,36.000000,1435.000000,249.000000,606.000000,234.000000,4.143900,212600.000000 +-118.150000,33.850000,30.000000,4071.000000,1067.000000,2144.000000,970.000000,2.726800,218100.000000 +-118.150000,33.840000,37.000000,1508.000000,252.000000,635.000000,241.000000,3.750000,221300.000000 +-118.150000,33.840000,29.000000,2448.000000,354.000000,894.000000,349.000000,7.652600,481300.000000 +-118.150000,33.800000,44.000000,1886.000000,399.000000,1167.000000,372.000000,3.104200,219800.000000 +-118.150000,33.790000,5.000000,3700.000000,993.000000,1657.000000,848.000000,3.782600,196300.000000 +-118.150000,33.790000,25.000000,4013.000000,1097.000000,2297.000000,969.000000,3.045300,185900.000000 +-118.150000,33.780000,35.000000,2768.000000,752.000000,1277.000000,651.000000,3.619300,250000.000000 +-118.150000,33.780000,17.000000,1584.000000,435.000000,904.000000,406.000000,2.087500,181300.000000 +-118.150000,33.780000,13.000000,3056.000000,861.000000,1600.000000,824.000000,3.300300,207800.000000 +-118.150000,33.780000,12.000000,4436.000000,1133.000000,2176.000000,1002.000000,3.581200,198600.000000 +-118.150000,33.770000,41.000000,3448.000000,896.000000,1621.000000,838.000000,4.500000,339800.000000 +-118.150000,33.770000,39.000000,2428.000000,634.000000,1312.000000,612.000000,2.721200,266300.000000 +-118.150000,33.770000,36.000000,4366.000000,1211.000000,1912.000000,1172.000000,3.529200,361800.000000 +-118.150000,33.770000,27.000000,3043.000000,787.000000,1398.000000,747.000000,3.552800,271100.000000 +-118.150000,33.760000,36.000000,2916.000000,785.000000,1183.000000,749.000000,3.598500,500001.000000 +-118.160000,35.050000,44.000000,1297.000000,307.000000,776.000000,278.000000,2.587500,68900.000000 +-118.160000,34.710000,27.000000,6007.000000,998.000000,2680.000000,882.000000,4.171900,117200.000000 +-118.160000,34.700000,33.000000,2918.000000,494.000000,1365.000000,478.000000,4.878700,127700.000000 +-118.160000,34.680000,9.000000,4303.000000,900.000000,2240.000000,861.000000,3.780700,110900.000000 +-118.160000,34.680000,17.000000,2994.000000,832.000000,1571.000000,695.000000,2.590200,85400.000000 +-118.160000,34.600000,5.000000,7294.000000,1139.000000,3123.000000,930.000000,4.990400,154100.000000 +-118.160000,34.600000,2.000000,11008.000000,1549.000000,4098.000000,1367.000000,6.486500,204400.000000 +-118.160000,34.200000,43.000000,1810.000000,343.000000,988.000000,307.000000,3.820300,176000.000000 +-118.160000,34.190000,44.000000,2195.000000,449.000000,1377.000000,417.000000,3.588700,153500.000000 +-118.160000,34.190000,42.000000,2076.000000,462.000000,1641.000000,436.000000,2.232600,149200.000000 +-118.160000,34.190000,40.000000,1840.000000,358.000000,1218.000000,347.000000,4.250000,177900.000000 +-118.160000,34.180000,44.000000,1870.000000,389.000000,1345.000000,391.000000,1.893200,136100.000000 +-118.160000,34.170000,52.000000,1193.000000,228.000000,703.000000,221.000000,3.174100,163800.000000 +-118.160000,34.170000,46.000000,1508.000000,261.000000,674.000000,255.000000,3.590900,155400.000000 +-118.160000,34.160000,52.000000,1576.000000,239.000000,696.000000,249.000000,6.070000,261800.000000 +-118.160000,34.160000,44.000000,1284.000000,278.000000,925.000000,261.000000,1.732100,178400.000000 +-118.160000,34.160000,43.000000,1276.000000,226.000000,545.000000,209.000000,4.151800,230700.000000 +-118.160000,34.150000,19.000000,1721.000000,290.000000,571.000000,278.000000,6.603100,500001.000000 +-118.160000,34.150000,17.000000,821.000000,163.000000,229.000000,164.000000,7.371500,263000.000000 +-118.160000,34.140000,41.000000,3039.000000,482.000000,973.000000,446.000000,7.481700,500001.000000 +-118.160000,34.140000,27.000000,1551.000000,254.000000,816.000000,233.000000,8.243500,500001.000000 +-118.160000,34.120000,38.000000,2231.000000,489.000000,940.000000,484.000000,5.416500,435100.000000 +-118.160000,34.110000,48.000000,1091.000000,236.000000,632.000000,234.000000,3.723500,263600.000000 +-118.160000,34.090000,52.000000,1722.000000,448.000000,1122.000000,425.000000,3.120400,224000.000000 +-118.160000,34.090000,50.000000,1568.000000,302.000000,1093.000000,333.000000,3.144200,162100.000000 +-118.160000,34.090000,36.000000,3334.000000,920.000000,2881.000000,800.000000,2.169600,170800.000000 +-118.160000,34.080000,43.000000,1523.000000,378.000000,1338.000000,352.000000,3.203100,144600.000000 +-118.160000,34.070000,41.000000,247.000000,55.000000,925.000000,50.000000,3.576900,135700.000000 +-118.160000,34.060000,25.000000,4284.000000,741.000000,2163.000000,701.000000,6.150900,315100.000000 +-118.160000,34.040000,45.000000,332.000000,70.000000,302.000000,60.000000,3.189500,156300.000000 +-118.160000,34.040000,38.000000,1076.000000,286.000000,1535.000000,323.000000,2.702600,145000.000000 +-118.160000,34.040000,22.000000,2991.000000,791.000000,2486.000000,754.000000,1.507800,181900.000000 +-118.160000,34.040000,11.000000,852.000000,215.000000,806.000000,202.000000,1.397100,134400.000000 +-118.160000,34.030000,45.000000,894.000000,231.000000,925.000000,222.000000,2.604200,145000.000000 +-118.160000,34.030000,41.000000,1377.000000,293.000000,1142.000000,272.000000,3.172400,141600.000000 +-118.160000,34.020000,44.000000,1218.000000,374.000000,1175.000000,342.000000,1.968800,173900.000000 +-118.160000,34.020000,42.000000,814.000000,216.000000,773.000000,208.000000,2.531300,156900.000000 +-118.160000,34.020000,41.000000,1256.000000,391.000000,1511.000000,381.000000,1.798100,166000.000000 +-118.160000,34.020000,35.000000,1734.000000,493.000000,2053.000000,508.000000,2.144200,149200.000000 +-118.160000,34.020000,34.000000,1474.000000,511.000000,1962.000000,501.000000,1.871500,139600.000000 +-118.160000,34.010000,40.000000,1552.000000,419.000000,1919.000000,427.000000,2.259600,137500.000000 +-118.160000,34.010000,37.000000,690.000000,261.000000,952.000000,255.000000,1.635400,158900.000000 +-118.160000,34.010000,36.000000,931.000000,246.000000,732.000000,235.000000,1.767900,142800.000000 +-118.160000,34.000000,37.000000,1341.000000,336.000000,1233.000000,306.000000,3.658300,150500.000000 +-118.160000,33.980000,33.000000,1196.000000,313.000000,1448.000000,320.000000,2.937500,162500.000000 +-118.160000,33.970000,39.000000,1444.000000,447.000000,1890.000000,416.000000,2.118100,176600.000000 +-118.160000,33.970000,32.000000,1347.000000,434.000000,1756.000000,438.000000,1.946400,190600.000000 +-118.160000,33.970000,31.000000,1363.000000,428.000000,1897.000000,364.000000,2.392900,191100.000000 +-118.160000,33.970000,30.000000,2419.000000,715.000000,3208.000000,719.000000,2.174300,176000.000000 +-118.160000,33.970000,13.000000,221.000000,63.000000,286.000000,64.000000,1.906300,175000.000000 +-118.160000,33.940000,32.000000,2210.000000,456.000000,1270.000000,484.000000,4.770800,178600.000000 +-118.160000,33.940000,25.000000,5675.000000,1224.000000,3317.000000,1119.000000,3.935200,232900.000000 +-118.160000,33.940000,25.000000,3341.000000,789.000000,1685.000000,751.000000,3.693600,238300.000000 +-118.160000,33.930000,35.000000,757.000000,151.000000,474.000000,132.000000,3.736100,179800.000000 +-118.160000,33.920000,44.000000,1368.000000,277.000000,899.000000,271.000000,3.593800,161300.000000 +-118.160000,33.920000,36.000000,2062.000000,351.000000,1134.000000,358.000000,4.488100,218900.000000 +-118.160000,33.910000,6.000000,3445.000000,847.000000,2467.000000,712.000000,3.150700,144000.000000 +-118.160000,33.910000,41.000000,1806.000000,408.000000,1146.000000,374.000000,2.964300,162200.000000 +-118.160000,33.910000,28.000000,2922.000000,739.000000,3013.000000,673.000000,2.953100,127100.000000 +-118.160000,33.900000,28.000000,2410.000000,616.000000,2399.000000,594.000000,2.733900,156700.000000 +-118.160000,33.890000,6.000000,1655.000000,536.000000,1201.000000,487.000000,1.734400,145800.000000 +-118.160000,33.890000,46.000000,940.000000,219.000000,599.000000,214.000000,3.281300,190900.000000 +-118.160000,33.880000,33.000000,2180.000000,522.000000,1634.000000,467.000000,3.011400,167000.000000 +-118.160000,33.880000,30.000000,1694.000000,398.000000,1181.000000,383.000000,2.977900,169500.000000 +-118.160000,33.880000,18.000000,2287.000000,662.000000,1804.000000,537.000000,1.990300,170300.000000 +-118.160000,33.870000,32.000000,1854.000000,471.000000,1363.000000,478.000000,2.640600,156700.000000 +-118.160000,33.860000,26.000000,6607.000000,1663.000000,4066.000000,1558.000000,2.506800,156300.000000 +-118.160000,33.850000,36.000000,2668.000000,473.000000,1315.000000,478.000000,4.071400,215600.000000 +-118.160000,33.850000,36.000000,1979.000000,339.000000,952.000000,339.000000,4.081500,216200.000000 +-118.160000,33.840000,36.000000,2831.000000,573.000000,1462.000000,569.000000,3.864600,214600.000000 +-118.160000,33.840000,36.000000,2444.000000,432.000000,1199.000000,424.000000,4.153800,218800.000000 +-118.160000,33.840000,36.000000,2220.000000,367.000000,1002.000000,351.000000,5.071900,219500.000000 +-118.160000,33.840000,36.000000,1348.000000,234.000000,643.000000,221.000000,3.644700,211000.000000 +-118.160000,33.800000,9.000000,3564.000000,835.000000,1530.000000,807.000000,5.180600,175000.000000 +-118.160000,33.790000,26.000000,3061.000000,844.000000,2135.000000,769.000000,2.875000,164000.000000 +-118.160000,33.790000,25.000000,5463.000000,1265.000000,3010.000000,1179.000000,3.233000,199100.000000 +-118.160000,33.790000,25.000000,3742.000000,1180.000000,3916.000000,1063.000000,2.400000,153700.000000 +-118.160000,33.780000,52.000000,3248.000000,853.000000,1819.000000,815.000000,3.173900,222900.000000 +-118.160000,33.780000,39.000000,4075.000000,1085.000000,2470.000000,1025.000000,2.331700,222500.000000 +-118.160000,33.780000,33.000000,2048.000000,585.000000,2074.000000,597.000000,2.015600,152700.000000 +-118.160000,33.780000,29.000000,3684.000000,1301.000000,3891.000000,1143.000000,1.695500,179700.000000 +-118.160000,33.780000,15.000000,4798.000000,1374.000000,3087.000000,1212.000000,2.127000,163300.000000 +-118.160000,33.780000,14.000000,1709.000000,558.000000,1939.000000,520.000000,1.980800,139100.000000 +-118.160000,33.770000,49.000000,3382.000000,787.000000,1314.000000,756.000000,3.812500,382100.000000 +-118.160000,33.770000,38.000000,3235.000000,769.000000,1284.000000,752.000000,2.938400,304100.000000 +-118.160000,33.770000,30.000000,4439.000000,1105.000000,1749.000000,1011.000000,3.898400,306300.000000 +-118.160000,33.770000,30.000000,2800.000000,757.000000,1292.000000,742.000000,2.761400,272200.000000 +-118.160000,33.770000,29.000000,3078.000000,786.000000,1460.000000,736.000000,2.875000,232500.000000 +-118.160000,33.720000,29.000000,2743.000000,708.000000,1059.000000,651.000000,3.625000,500000.000000 +-118.170000,34.870000,9.000000,1507.000000,293.000000,761.000000,278.000000,3.018400,87900.000000 +-118.170000,34.860000,21.000000,2370.000000,540.000000,1488.000000,554.000000,2.736100,83300.000000 +-118.170000,34.680000,13.000000,5341.000000,773.000000,2288.000000,724.000000,6.677200,185600.000000 +-118.170000,34.670000,5.000000,8352.000000,1555.000000,3723.000000,1389.000000,4.565900,140300.000000 +-118.170000,34.660000,9.000000,1561.000000,253.000000,731.000000,233.000000,5.704900,173200.000000 +-118.170000,34.610000,7.000000,2465.000000,336.000000,978.000000,332.000000,7.138100,292200.000000 +-118.170000,34.190000,45.000000,1790.000000,316.000000,1035.000000,312.000000,4.098500,173100.000000 +-118.170000,34.180000,38.000000,1280.000000,231.000000,828.000000,237.000000,4.375000,166700.000000 +-118.170000,34.140000,45.000000,2257.000000,285.000000,759.000000,305.000000,11.789400,500001.000000 +-118.170000,34.120000,37.000000,2246.000000,481.000000,995.000000,459.000000,4.294400,354700.000000 +-118.170000,34.120000,35.000000,2568.000000,672.000000,1696.000000,605.000000,2.915400,169200.000000 +-118.170000,34.120000,30.000000,3376.000000,720.000000,1990.000000,725.000000,3.781300,232000.000000 +-118.170000,34.110000,26.000000,4971.000000,996.000000,2370.000000,932.000000,4.967600,381400.000000 +-118.170000,34.100000,48.000000,2514.000000,595.000000,2484.000000,601.000000,3.114600,142500.000000 +-118.170000,34.100000,37.000000,299.000000,89.000000,318.000000,92.000000,1.312500,145800.000000 +-118.170000,34.100000,25.000000,4444.000000,647.000000,1922.000000,652.000000,8.058000,477300.000000 +-118.170000,34.090000,36.000000,3066.000000,797.000000,3097.000000,780.000000,2.552300,156500.000000 +-118.170000,34.090000,33.000000,2907.000000,797.000000,3212.000000,793.000000,2.234800,146600.000000 +-118.170000,34.080000,39.000000,787.000000,181.000000,731.000000,179.000000,3.227900,158500.000000 +-118.170000,34.080000,33.000000,851.000000,231.000000,936.000000,228.000000,3.375000,147500.000000 +-118.170000,34.070000,19.000000,2150.000000,544.000000,1510.000000,467.000000,3.495200,150000.000000 +-118.170000,34.060000,44.000000,1856.000000,461.000000,1853.000000,452.000000,2.503300,131900.000000 +-118.170000,34.060000,43.000000,464.000000,116.000000,416.000000,120.000000,2.475000,142600.000000 +-118.170000,34.050000,45.000000,733.000000,178.000000,715.000000,165.000000,2.596200,124100.000000 +-118.170000,34.050000,39.000000,962.000000,229.000000,999.000000,221.000000,3.375000,126000.000000 +-118.170000,34.050000,35.000000,1256.000000,294.000000,2990.000000,302.000000,3.152800,121800.000000 +-118.170000,34.040000,46.000000,705.000000,167.000000,655.000000,149.000000,3.593800,141100.000000 +-118.170000,34.040000,45.000000,911.000000,238.000000,1005.000000,229.000000,2.816700,114000.000000 +-118.170000,34.040000,44.000000,691.000000,155.000000,613.000000,142.000000,1.966700,133900.000000 +-118.170000,34.040000,43.000000,908.000000,232.000000,1005.000000,224.000000,1.750000,134000.000000 +-118.170000,34.040000,39.000000,563.000000,138.000000,682.000000,137.000000,2.750000,150000.000000 +-118.170000,34.040000,38.000000,385.000000,102.000000,402.000000,95.000000,1.625000,129700.000000 +-118.170000,34.030000,43.000000,1636.000000,506.000000,1855.000000,502.000000,2.290200,152400.000000 +-118.170000,34.030000,42.000000,882.000000,292.000000,1248.000000,281.000000,2.761000,120000.000000 +-118.170000,34.030000,41.000000,2099.000000,530.000000,2325.000000,528.000000,2.197900,140800.000000 +-118.170000,34.030000,31.000000,1014.000000,252.000000,1064.000000,247.000000,2.416700,125500.000000 +-118.170000,34.020000,42.000000,946.000000,272.000000,1191.000000,261.000000,2.450000,132000.000000 +-118.170000,34.020000,39.000000,759.000000,215.000000,883.000000,226.000000,2.125000,143800.000000 +-118.170000,34.020000,34.000000,760.000000,219.000000,968.000000,202.000000,1.781300,145000.000000 +-118.170000,34.020000,33.000000,346.000000,103.000000,488.000000,107.000000,1.868100,112500.000000 +-118.170000,34.010000,36.000000,1657.000000,425.000000,1689.000000,418.000000,2.779900,149300.000000 +-118.170000,34.010000,30.000000,1228.000000,358.000000,1603.000000,323.000000,3.022500,130800.000000 +-118.170000,33.980000,41.000000,756.000000,195.000000,873.000000,212.000000,2.732100,156000.000000 +-118.170000,33.980000,41.000000,428.000000,111.000000,585.000000,139.000000,3.178600,132100.000000 +-118.170000,33.980000,27.000000,1871.000000,556.000000,2542.000000,581.000000,2.842700,164400.000000 +-118.170000,33.970000,33.000000,2410.000000,641.000000,2106.000000,593.000000,2.242200,168200.000000 +-118.170000,33.970000,31.000000,3388.000000,1059.000000,3558.000000,957.000000,2.404900,159000.000000 +-118.170000,33.960000,29.000000,2913.000000,787.000000,3803.000000,740.000000,2.555600,146500.000000 +-118.170000,33.960000,25.000000,3297.000000,1066.000000,5027.000000,1041.000000,2.281700,164200.000000 +-118.170000,33.960000,25.000000,2249.000000,681.000000,2621.000000,628.000000,2.300000,164200.000000 +-118.170000,33.950000,23.000000,1991.000000,584.000000,1380.000000,535.000000,1.910700,181900.000000 +-118.170000,33.940000,17.000000,1145.000000,209.000000,499.000000,202.000000,4.638900,165500.000000 +-118.170000,33.920000,43.000000,2099.000000,398.000000,1276.000000,387.000000,3.152800,166800.000000 +-118.170000,33.920000,36.000000,2447.000000,503.000000,1532.000000,498.000000,4.366700,171800.000000 +-118.170000,33.910000,42.000000,856.000000,167.000000,748.000000,195.000000,3.800000,145800.000000 +-118.170000,33.910000,39.000000,1157.000000,273.000000,877.000000,305.000000,3.108700,171000.000000 +-118.170000,33.910000,37.000000,1499.000000,288.000000,1237.000000,344.000000,3.933300,162300.000000 +-118.170000,33.900000,12.000000,3653.000000,993.000000,3215.000000,854.000000,2.868100,114200.000000 +-118.170000,33.890000,52.000000,63.000000,12.000000,47.000000,8.000000,7.242300,350000.000000 +-118.170000,33.890000,11.000000,3605.000000,880.000000,3637.000000,873.000000,2.632800,160700.000000 +-118.170000,33.880000,42.000000,1645.000000,371.000000,1161.000000,351.000000,3.089300,162700.000000 +-118.170000,33.880000,29.000000,815.000000,206.000000,590.000000,183.000000,3.005200,166700.000000 +-118.170000,33.870000,45.000000,2110.000000,494.000000,1404.000000,454.000000,2.980300,165900.000000 +-118.170000,33.870000,40.000000,2462.000000,587.000000,1821.000000,536.000000,3.564600,162600.000000 +-118.170000,33.860000,44.000000,1701.000000,396.000000,1091.000000,384.000000,3.025000,162300.000000 +-118.170000,33.860000,40.000000,1301.000000,342.000000,954.000000,336.000000,2.380400,158000.000000 +-118.170000,33.860000,10.000000,1664.000000,508.000000,1369.000000,493.000000,2.988600,175000.000000 +-118.170000,33.850000,39.000000,2247.000000,526.000000,1670.000000,525.000000,3.070000,173000.000000 +-118.170000,33.850000,37.000000,3714.000000,708.000000,1956.000000,694.000000,4.221800,200500.000000 +-118.170000,33.840000,45.000000,1853.000000,328.000000,945.000000,320.000000,5.078700,219200.000000 +-118.170000,33.840000,45.000000,1533.000000,331.000000,791.000000,335.000000,3.460500,186600.000000 +-118.170000,33.840000,29.000000,4716.000000,1372.000000,2515.000000,1272.000000,2.726000,208700.000000 +-118.170000,33.830000,45.000000,2019.000000,363.000000,880.000000,339.000000,4.102300,217300.000000 +-118.170000,33.830000,45.000000,1808.000000,315.000000,800.000000,302.000000,4.869300,277700.000000 +-118.170000,33.820000,52.000000,2539.000000,497.000000,1152.000000,488.000000,4.135400,268200.000000 +-118.170000,33.790000,36.000000,948.000000,303.000000,1042.000000,301.000000,1.550000,100000.000000 +-118.170000,33.790000,32.000000,2171.000000,672.000000,3002.000000,648.000000,2.375000,139700.000000 +-118.170000,33.790000,28.000000,1219.000000,408.000000,1816.000000,348.000000,1.758900,118300.000000 +-118.170000,33.780000,44.000000,2364.000000,746.000000,3184.000000,672.000000,1.918000,147500.000000 +-118.170000,33.780000,29.000000,2920.000000,962.000000,3580.000000,772.000000,1.739300,140200.000000 +-118.170000,33.780000,23.000000,3768.000000,1261.000000,3940.000000,1098.000000,1.964700,186200.000000 +-118.170000,33.770000,45.000000,2508.000000,797.000000,1340.000000,720.000000,2.678600,191100.000000 +-118.170000,33.770000,45.000000,2151.000000,643.000000,1047.000000,579.000000,3.114900,218800.000000 +-118.170000,33.770000,45.000000,2143.000000,697.000000,1004.000000,594.000000,3.015300,220000.000000 +-118.170000,33.770000,39.000000,2953.000000,878.000000,1379.000000,785.000000,2.137800,180400.000000 +-118.170000,33.770000,37.000000,1127.000000,327.000000,492.000000,331.000000,2.675000,241700.000000 +-118.170000,33.770000,36.000000,2933.000000,881.000000,2077.000000,838.000000,2.253800,181300.000000 +-118.170000,33.770000,25.000000,4405.000000,1262.000000,2178.000000,1090.000000,3.050300,225000.000000 +-118.170000,33.770000,12.000000,4409.000000,1401.000000,3068.000000,1262.000000,2.280800,154700.000000 +-118.180000,37.350000,16.000000,3806.000000,794.000000,1501.000000,714.000000,2.121200,108300.000000 +-118.180000,36.630000,23.000000,2311.000000,487.000000,1019.000000,384.000000,2.257400,104700.000000 +-118.180000,34.630000,19.000000,3562.000000,606.000000,1677.000000,578.000000,4.157300,228100.000000 +-118.180000,34.220000,40.000000,1983.000000,298.000000,853.000000,271.000000,5.984500,241700.000000 +-118.180000,34.200000,44.000000,1473.000000,250.000000,668.000000,239.000000,8.720000,415900.000000 +-118.180000,34.190000,48.000000,1371.000000,164.000000,528.000000,155.000000,15.000100,500001.000000 +-118.180000,34.170000,43.000000,4269.000000,591.000000,1467.000000,582.000000,9.070200,500001.000000 +-118.180000,34.160000,34.000000,5012.000000,746.000000,1699.000000,715.000000,9.498700,500001.000000 +-118.180000,34.140000,38.000000,3039.000000,487.000000,1131.000000,465.000000,7.711600,360900.000000 +-118.180000,34.130000,52.000000,2228.000000,475.000000,1311.000000,452.000000,3.534100,182100.000000 +-118.180000,34.130000,44.000000,2734.000000,415.000000,1057.000000,424.000000,7.921300,477800.000000 +-118.180000,34.120000,52.000000,1081.000000,311.000000,904.000000,283.000000,1.921900,165100.000000 +-118.180000,34.120000,45.000000,2397.000000,488.000000,1569.000000,471.000000,4.210000,167900.000000 +-118.180000,34.110000,44.000000,1346.000000,398.000000,1204.000000,344.000000,2.398400,152200.000000 +-118.180000,34.110000,33.000000,1523.000000,391.000000,753.000000,298.000000,2.659100,183800.000000 +-118.180000,34.100000,8.000000,1116.000000,267.000000,435.000000,235.000000,4.923100,230900.000000 +-118.180000,34.100000,7.000000,2529.000000,689.000000,1215.000000,577.000000,4.785300,153100.000000 +-118.180000,34.100000,10.000000,1940.000000,445.000000,763.000000,412.000000,4.975000,166700.000000 +-118.180000,34.100000,10.000000,1907.000000,398.000000,921.000000,369.000000,4.875000,200400.000000 +-118.180000,34.090000,44.000000,1688.000000,426.000000,1605.000000,384.000000,3.378500,184900.000000 +-118.180000,34.090000,40.000000,2744.000000,708.000000,2747.000000,674.000000,2.622600,148800.000000 +-118.180000,34.080000,35.000000,2226.000000,602.000000,2230.000000,549.000000,2.916700,129300.000000 +-118.180000,34.080000,33.000000,1369.000000,408.000000,1475.000000,377.000000,2.328100,151900.000000 +-118.180000,34.080000,31.000000,1318.000000,311.000000,1164.000000,289.000000,2.993900,135500.000000 +-118.180000,34.070000,28.000000,2616.000000,630.000000,2218.000000,621.000000,2.684200,156000.000000 +-118.180000,34.060000,45.000000,934.000000,228.000000,893.000000,192.000000,2.530000,140300.000000 +-118.180000,34.060000,33.000000,278.000000,71.000000,266.000000,56.000000,0.894100,98200.000000 +-118.180000,34.060000,27.000000,2025.000000,565.000000,2189.000000,577.000000,2.608300,148600.000000 +-118.180000,34.050000,41.000000,616.000000,196.000000,814.000000,180.000000,3.333300,115100.000000 +-118.180000,34.050000,41.000000,389.000000,102.000000,455.000000,107.000000,2.703100,109200.000000 +-118.180000,34.050000,38.000000,3272.000000,731.000000,3299.000000,726.000000,2.829500,126500.000000 +-118.180000,34.040000,44.000000,1079.000000,275.000000,1249.000000,249.000000,3.041700,141700.000000 +-118.180000,34.040000,36.000000,1807.000000,630.000000,2118.000000,669.000000,1.550000,129000.000000 +-118.180000,34.030000,44.000000,1629.000000,420.000000,1893.000000,387.000000,2.299100,137500.000000 +-118.180000,34.030000,40.000000,2631.000000,698.000000,2920.000000,677.000000,2.076400,145600.000000 +-118.180000,34.030000,39.000000,609.000000,145.000000,690.000000,134.000000,2.916700,145800.000000 +-118.180000,34.030000,37.000000,2115.000000,580.000000,2842.000000,572.000000,2.239000,121300.000000 +-118.180000,34.020000,37.000000,2631.000000,734.000000,3228.000000,701.000000,2.150000,132200.000000 +-118.180000,34.020000,35.000000,661.000000,142.000000,720.000000,143.000000,2.897700,142500.000000 +-118.180000,34.020000,33.000000,832.000000,226.000000,987.000000,220.000000,3.097200,125000.000000 +-118.180000,34.010000,42.000000,1845.000000,497.000000,2191.000000,492.000000,2.346200,127300.000000 +-118.180000,33.990000,38.000000,1010.000000,315.000000,1157.000000,301.000000,1.634100,161800.000000 +-118.180000,33.990000,36.000000,988.000000,337.000000,1508.000000,351.000000,2.437500,154800.000000 +-118.180000,33.990000,35.000000,1230.000000,407.000000,1512.000000,364.000000,2.152000,170800.000000 +-118.180000,33.980000,40.000000,1698.000000,431.000000,1280.000000,405.000000,2.625000,206300.000000 +-118.180000,33.980000,36.000000,903.000000,266.000000,1068.000000,251.000000,3.039800,165400.000000 +-118.180000,33.980000,30.000000,1735.000000,573.000000,2237.000000,545.000000,2.344400,156100.000000 +-118.180000,33.980000,24.000000,1880.000000,642.000000,2646.000000,605.000000,2.183600,162000.000000 +-118.180000,33.970000,34.000000,3214.000000,899.000000,3086.000000,808.000000,2.005700,189400.000000 +-118.180000,33.970000,30.000000,2887.000000,866.000000,2806.000000,830.000000,2.212200,169400.000000 +-118.180000,33.970000,26.000000,6895.000000,1877.000000,8551.000000,1808.000000,2.317500,154500.000000 +-118.180000,33.960000,20.000000,427.000000,118.000000,402.000000,105.000000,1.416700,137500.000000 +-118.180000,33.950000,42.000000,2608.000000,610.000000,2062.000000,616.000000,3.534100,167500.000000 +-118.180000,33.950000,39.000000,2121.000000,579.000000,1991.000000,528.000000,2.909400,152200.000000 +-118.180000,33.940000,44.000000,1337.000000,245.000000,968.000000,240.000000,3.468800,183600.000000 +-118.180000,33.940000,43.000000,2724.000000,612.000000,2340.000000,570.000000,2.700000,165000.000000 +-118.180000,33.930000,35.000000,952.000000,271.000000,949.000000,261.000000,2.429700,147200.000000 +-118.180000,33.930000,31.000000,1516.000000,400.000000,1820.000000,398.000000,2.164100,122900.000000 +-118.180000,33.920000,32.000000,2035.000000,519.000000,2282.000000,480.000000,3.273400,136400.000000 +-118.180000,33.920000,29.000000,749.000000,185.000000,708.000000,196.000000,2.458300,136900.000000 +-118.180000,33.910000,36.000000,1138.000000,238.000000,878.000000,224.000000,2.062500,134400.000000 +-118.180000,33.900000,32.000000,778.000000,227.000000,933.000000,209.000000,2.729200,143800.000000 +-118.180000,33.900000,25.000000,1709.000000,442.000000,1177.000000,410.000000,2.433300,155000.000000 +-118.180000,33.890000,25.000000,5896.000000,1464.000000,4149.000000,1362.000000,2.674200,131900.000000 +-118.180000,33.880000,47.000000,882.000000,185.000000,536.000000,174.000000,4.625000,163000.000000 +-118.180000,33.880000,44.000000,1308.000000,267.000000,783.000000,237.000000,4.736100,167700.000000 +-118.180000,33.880000,42.000000,2326.000000,503.000000,1832.000000,501.000000,3.171300,161000.000000 +-118.180000,33.870000,44.000000,2137.000000,461.000000,1126.000000,439.000000,3.440800,172900.000000 +-118.180000,33.870000,44.000000,1832.000000,401.000000,1056.000000,405.000000,4.065800,175100.000000 +-118.180000,33.870000,38.000000,2664.000000,626.000000,1627.000000,604.000000,3.752700,161900.000000 +-118.180000,33.860000,39.000000,2925.000000,732.000000,1702.000000,642.000000,2.375000,160800.000000 +-118.180000,33.850000,44.000000,1890.000000,465.000000,1378.000000,430.000000,3.881900,143200.000000 +-118.180000,33.850000,40.000000,2597.000000,582.000000,1285.000000,559.000000,3.975000,213800.000000 +-118.180000,33.850000,38.000000,3596.000000,862.000000,2416.000000,832.000000,3.689700,169800.000000 +-118.180000,33.850000,30.000000,2548.000000,717.000000,2086.000000,700.000000,0.700700,134400.000000 +-118.180000,33.840000,43.000000,2561.000000,544.000000,1063.000000,537.000000,3.835000,418600.000000 +-118.180000,33.840000,35.000000,1415.000000,294.000000,591.000000,291.000000,2.979800,315600.000000 +-118.180000,33.830000,52.000000,2569.000000,484.000000,1030.000000,451.000000,4.130100,268400.000000 +-118.180000,33.830000,45.000000,1535.000000,274.000000,591.000000,276.000000,4.241100,371700.000000 +-118.180000,33.830000,44.000000,1497.000000,277.000000,542.000000,274.000000,5.005200,321800.000000 +-118.180000,33.830000,39.000000,3622.000000,745.000000,1330.000000,648.000000,3.312500,425500.000000 +-118.180000,33.820000,52.000000,2618.000000,472.000000,943.000000,440.000000,3.789500,254000.000000 +-118.180000,33.820000,43.000000,284.000000,65.000000,167.000000,68.000000,4.250000,207500.000000 +-118.180000,33.810000,27.000000,471.000000,132.000000,315.000000,96.000000,1.750000,154200.000000 +-118.180000,33.800000,30.000000,2734.000000,758.000000,2951.000000,691.000000,1.768900,117600.000000 +-118.180000,33.800000,15.000000,2407.000000,589.000000,1591.000000,506.000000,3.051300,148100.000000 +-118.180000,33.790000,42.000000,1571.000000,435.000000,1631.000000,417.000000,1.638400,128000.000000 +-118.180000,33.790000,27.000000,1580.000000,510.000000,1896.000000,448.000000,2.018600,130000.000000 +-118.180000,33.790000,20.000000,1255.000000,360.000000,1201.000000,318.000000,1.220600,162500.000000 +-118.180000,33.780000,52.000000,1180.000000,381.000000,1046.000000,332.000000,1.560300,162500.000000 +-118.180000,33.780000,36.000000,1697.000000,550.000000,1379.000000,434.000000,1.274600,129700.000000 +-118.180000,33.780000,26.000000,3042.000000,1253.000000,4812.000000,1141.000000,1.770100,146200.000000 +-118.180000,33.780000,20.000000,1852.000000,556.000000,1712.000000,556.000000,1.456500,152500.000000 +-118.180000,33.780000,17.000000,1419.000000,436.000000,1300.000000,360.000000,2.076900,100000.000000 +-118.180000,33.770000,49.000000,2297.000000,759.000000,1105.000000,629.000000,1.838800,175000.000000 +-118.180000,33.770000,45.000000,1434.000000,627.000000,735.000000,518.000000,1.500000,162500.000000 +-118.180000,33.770000,41.000000,2048.000000,601.000000,852.000000,533.000000,2.572600,193800.000000 +-118.180000,33.770000,39.000000,1645.000000,547.000000,1339.000000,499.000000,1.553600,155000.000000 +-118.180000,33.770000,37.000000,2653.000000,754.000000,1087.000000,698.000000,2.352300,325000.000000 +-118.180000,33.770000,36.000000,1833.000000,688.000000,1128.000000,620.000000,1.148300,112500.000000 +-118.180000,33.770000,29.000000,1776.000000,606.000000,1391.000000,488.000000,1.129500,137500.000000 +-118.190000,34.870000,2.000000,2103.000000,389.000000,923.000000,338.000000,5.055300,111100.000000 +-118.190000,34.770000,16.000000,2035.000000,370.000000,704.000000,330.000000,2.197900,146400.000000 +-118.190000,34.670000,8.000000,11275.000000,1822.000000,5731.000000,1692.000000,5.028500,167900.000000 +-118.190000,34.650000,33.000000,1781.000000,326.000000,913.000000,314.000000,3.996300,126800.000000 +-118.190000,34.220000,32.000000,10626.000000,1504.000000,4353.000000,1482.000000,9.841300,500001.000000 +-118.190000,34.210000,41.000000,1602.000000,228.000000,680.000000,225.000000,6.553000,500001.000000 +-118.190000,34.200000,41.000000,2031.000000,294.000000,859.000000,302.000000,7.419000,483700.000000 +-118.190000,34.190000,34.000000,2061.000000,260.000000,825.000000,254.000000,15.000100,500001.000000 +-118.190000,34.160000,49.000000,1788.000000,267.000000,735.000000,266.000000,6.600900,375700.000000 +-118.190000,34.140000,47.000000,2525.000000,523.000000,1514.000000,498.000000,4.335900,209200.000000 +-118.190000,34.130000,50.000000,1309.000000,302.000000,883.000000,293.000000,3.128700,198000.000000 +-118.190000,34.120000,52.000000,679.000000,132.000000,483.000000,163.000000,4.234400,162500.000000 +-118.190000,34.120000,44.000000,1219.000000,324.000000,1036.000000,282.000000,4.041700,170600.000000 +-118.190000,34.120000,41.000000,2591.000000,682.000000,2366.000000,583.000000,2.307100,146400.000000 +-118.190000,34.120000,36.000000,2833.000000,720.000000,2148.000000,709.000000,2.701200,172100.000000 +-118.190000,34.120000,35.000000,2524.000000,749.000000,2487.000000,679.000000,2.493200,167700.000000 +-118.190000,34.110000,40.000000,1266.000000,348.000000,1032.000000,315.000000,2.166700,150000.000000 +-118.190000,34.110000,38.000000,1158.000000,309.000000,1051.000000,322.000000,2.286000,169300.000000 +-118.190000,34.110000,26.000000,1638.000000,457.000000,1155.000000,437.000000,3.422700,143800.000000 +-118.190000,34.100000,42.000000,1577.000000,379.000000,1317.000000,378.000000,3.212100,153900.000000 +-118.190000,34.100000,39.000000,2054.000000,423.000000,1205.000000,403.000000,4.239000,213000.000000 +-118.190000,34.090000,41.000000,2090.000000,530.000000,2043.000000,537.000000,1.970600,144200.000000 +-118.190000,34.080000,38.000000,1241.000000,298.000000,1055.000000,263.000000,2.340900,115500.000000 +-118.190000,34.070000,42.000000,1555.000000,337.000000,1152.000000,348.000000,3.375000,169600.000000 +-118.190000,34.070000,38.000000,2965.000000,665.000000,2128.000000,650.000000,3.024100,166300.000000 +-118.190000,34.060000,47.000000,2324.000000,658.000000,3020.000000,594.000000,1.186800,93800.000000 +-118.190000,34.060000,44.000000,1734.000000,364.000000,1133.000000,351.000000,2.513200,163100.000000 +-118.190000,34.060000,37.000000,1715.000000,456.000000,2052.000000,440.000000,2.312500,116100.000000 +-118.190000,34.060000,32.000000,555.000000,159.000000,748.000000,163.000000,1.976200,137500.000000 +-118.190000,34.050000,47.000000,1273.000000,264.000000,1193.000000,260.000000,2.437500,122900.000000 +-118.190000,34.050000,46.000000,1051.000000,302.000000,1435.000000,305.000000,1.666700,133600.000000 +-118.190000,34.050000,43.000000,977.000000,266.000000,1084.000000,259.000000,2.770800,127900.000000 +-118.190000,34.050000,42.000000,1291.000000,345.000000,1535.000000,332.000000,1.908300,119200.000000 +-118.190000,34.050000,41.000000,1098.000000,264.000000,1178.000000,245.000000,2.105800,124300.000000 +-118.190000,34.050000,37.000000,349.000000,79.000000,276.000000,64.000000,3.212500,125000.000000 +-118.190000,34.050000,35.000000,1296.000000,307.000000,1423.000000,276.000000,2.743200,135200.000000 +-118.190000,34.040000,43.000000,1682.000000,422.000000,1706.000000,409.000000,2.102900,153300.000000 +-118.190000,34.040000,40.000000,1279.000000,316.000000,1438.000000,329.000000,2.177400,157600.000000 +-118.190000,34.040000,40.000000,1095.000000,305.000000,1322.000000,281.000000,1.968800,150000.000000 +-118.190000,34.040000,39.000000,1074.000000,323.000000,1613.000000,308.000000,2.301500,131700.000000 +-118.190000,34.040000,34.000000,1011.000000,274.000000,1164.000000,262.000000,2.854200,146900.000000 +-118.190000,34.030000,52.000000,1053.000000,246.000000,1036.000000,249.000000,2.107100,136700.000000 +-118.190000,34.030000,50.000000,1183.000000,246.000000,851.000000,231.000000,3.263900,142600.000000 +-118.190000,34.030000,42.000000,2250.000000,629.000000,2588.000000,609.000000,1.971900,134200.000000 +-118.190000,34.030000,31.000000,525.000000,136.000000,627.000000,145.000000,2.696400,125000.000000 +-118.190000,34.020000,45.000000,1535.000000,432.000000,1820.000000,419.000000,1.780100,142800.000000 +-118.190000,34.020000,44.000000,2702.000000,770.000000,3260.000000,721.000000,2.357500,144800.000000 +-118.190000,34.020000,40.000000,474.000000,124.000000,546.000000,121.000000,2.343800,137500.000000 +-118.190000,34.020000,34.000000,1478.000000,369.000000,1735.000000,348.000000,1.887500,136700.000000 +-118.190000,33.990000,42.000000,1429.000000,436.000000,1537.000000,389.000000,3.011400,157500.000000 +-118.190000,33.990000,40.000000,1547.000000,434.000000,1930.000000,427.000000,3.386900,157300.000000 +-118.190000,33.990000,38.000000,1212.000000,272.000000,1129.000000,263.000000,2.667300,142300.000000 +-118.190000,33.990000,36.000000,1273.000000,379.000000,1398.000000,353.000000,2.451600,147800.000000 +-118.190000,33.990000,35.000000,1172.000000,436.000000,1741.000000,408.000000,2.459600,154700.000000 +-118.190000,33.980000,40.000000,973.000000,272.000000,1257.000000,258.000000,2.821400,158000.000000 +-118.190000,33.980000,36.000000,4179.000000,1266.000000,4582.000000,1196.000000,2.008700,172100.000000 +-118.190000,33.980000,34.000000,1022.000000,286.000000,1058.000000,275.000000,2.604200,156700.000000 +-118.190000,33.980000,33.000000,151.000000,83.000000,380.000000,83.000000,1.422400,189600.000000 +-118.190000,33.970000,34.000000,2700.000000,763.000000,2815.000000,767.000000,2.419600,178400.000000 +-118.190000,33.970000,30.000000,1790.000000,556.000000,1827.000000,520.000000,1.756200,181300.000000 +-118.190000,33.960000,40.000000,979.000000,296.000000,934.000000,292.000000,2.635400,151800.000000 +-118.190000,33.960000,28.000000,3507.000000,969.000000,3740.000000,970.000000,2.016200,142000.000000 +-118.190000,33.950000,44.000000,1436.000000,271.000000,850.000000,269.000000,3.276800,179100.000000 +-118.190000,33.950000,42.000000,2309.000000,685.000000,2609.000000,673.000000,2.720600,162100.000000 +-118.190000,33.950000,42.000000,1651.000000,463.000000,1559.000000,436.000000,2.388200,148100.000000 +-118.190000,33.950000,41.000000,1368.000000,309.000000,1244.000000,312.000000,3.083300,164800.000000 +-118.190000,33.940000,45.000000,1871.000000,371.000000,1315.000000,382.000000,3.366100,160800.000000 +-118.190000,33.940000,45.000000,1403.000000,315.000000,1111.000000,311.000000,3.384600,168100.000000 +-118.190000,33.930000,44.000000,1613.000000,345.000000,1227.000000,342.000000,3.166700,145700.000000 +-118.190000,33.930000,40.000000,1334.000000,276.000000,1226.000000,278.000000,3.471200,144300.000000 +-118.190000,33.920000,43.000000,2339.000000,487.000000,1732.000000,449.000000,3.098700,139400.000000 +-118.190000,33.920000,36.000000,1356.000000,314.000000,1469.000000,300.000000,2.078500,139800.000000 +-118.190000,33.920000,35.000000,915.000000,241.000000,1153.000000,252.000000,3.305000,115800.000000 +-118.190000,33.910000,35.000000,2695.000000,748.000000,2935.000000,706.000000,2.013400,132400.000000 +-118.190000,33.910000,33.000000,915.000000,225.000000,826.000000,212.000000,2.770800,117400.000000 +-118.190000,33.900000,36.000000,2326.000000,543.000000,2073.000000,494.000000,1.995200,112900.000000 +-118.190000,33.900000,36.000000,1073.000000,271.000000,1385.000000,288.000000,2.321400,104800.000000 +-118.190000,33.900000,32.000000,2762.000000,652.000000,2677.000000,632.000000,2.571900,105600.000000 +-118.190000,33.890000,38.000000,4018.000000,986.000000,3702.000000,927.000000,2.929300,113600.000000 +-118.190000,33.890000,32.000000,1696.000000,438.000000,1639.000000,376.000000,2.035700,107300.000000 +-118.190000,33.890000,31.000000,886.000000,224.000000,1154.000000,247.000000,2.107100,99500.000000 +-118.190000,33.870000,35.000000,1769.000000,436.000000,1166.000000,386.000000,2.875000,178300.000000 +-118.190000,33.870000,27.000000,4701.000000,1359.000000,2571.000000,1216.000000,2.541700,184100.000000 +-118.190000,33.860000,46.000000,1824.000000,438.000000,1200.000000,451.000000,3.437500,156700.000000 +-118.190000,33.860000,42.000000,1999.000000,431.000000,1060.000000,399.000000,3.703100,167100.000000 +-118.190000,33.860000,38.000000,2009.000000,524.000000,1449.000000,451.000000,2.704500,155400.000000 +-118.190000,33.860000,36.000000,2013.000000,546.000000,1659.000000,522.000000,3.121500,153600.000000 +-118.190000,33.860000,35.000000,1133.000000,296.000000,774.000000,271.000000,2.238100,137500.000000 +-118.190000,33.850000,45.000000,1167.000000,302.000000,773.000000,287.000000,3.279800,150300.000000 +-118.190000,33.850000,30.000000,3533.000000,1061.000000,2678.000000,1033.000000,2.241700,151900.000000 +-118.190000,33.840000,24.000000,1228.000000,320.000000,537.000000,273.000000,2.250000,192000.000000 +-118.190000,33.830000,43.000000,2641.000000,411.000000,1011.000000,444.000000,6.446800,444200.000000 +-118.190000,33.830000,42.000000,1773.000000,360.000000,815.000000,299.000000,4.900000,406300.000000 +-118.190000,33.820000,19.000000,2953.000000,895.000000,1914.000000,855.000000,3.552100,290000.000000 +-118.190000,33.810000,21.000000,1835.000000,427.000000,1038.000000,384.000000,4.455900,198500.000000 +-118.190000,33.800000,41.000000,2125.000000,591.000000,1604.000000,555.000000,2.994300,190600.000000 +-118.190000,33.800000,38.000000,2010.000000,595.000000,1535.000000,525.000000,1.984800,160400.000000 +-118.190000,33.800000,36.000000,2326.000000,729.000000,2635.000000,657.000000,2.198500,141800.000000 +-118.190000,33.790000,43.000000,1823.000000,600.000000,2339.000000,560.000000,1.679200,130600.000000 +-118.190000,33.790000,41.000000,2114.000000,612.000000,2357.000000,529.000000,1.793800,142600.000000 +-118.190000,33.790000,37.000000,1834.000000,551.000000,1967.000000,476.000000,2.137000,126600.000000 +-118.190000,33.790000,30.000000,3107.000000,994.000000,3543.000000,850.000000,1.938700,141700.000000 +-118.190000,33.780000,8.000000,992.000000,393.000000,694.000000,331.000000,2.554400,162500.000000 +-118.190000,33.780000,42.000000,1021.000000,300.000000,533.000000,187.000000,1.803600,175000.000000 +-118.190000,33.780000,35.000000,1511.000000,593.000000,914.000000,539.000000,0.931800,187500.000000 +-118.190000,33.780000,31.000000,1648.000000,484.000000,898.000000,457.000000,1.584400,162500.000000 +-118.190000,33.780000,29.000000,1013.000000,392.000000,1083.000000,316.000000,1.843800,162500.000000 +-118.190000,33.780000,21.000000,2741.000000,1029.000000,2924.000000,969.000000,1.327400,218800.000000 +-118.190000,33.770000,52.000000,1562.000000,616.000000,692.000000,512.000000,1.404800,200000.000000 +-118.190000,33.770000,35.000000,1574.000000,603.000000,820.000000,514.000000,1.232100,137500.000000 +-118.200000,34.690000,5.000000,9076.000000,1503.000000,7694.000000,1278.000000,4.875000,163400.000000 +-118.200000,34.210000,42.000000,1493.000000,237.000000,665.000000,224.000000,6.757100,443900.000000 +-118.200000,34.210000,40.000000,1477.000000,228.000000,609.000000,224.000000,7.837500,500001.000000 +-118.200000,34.200000,44.000000,2890.000000,438.000000,1219.000000,429.000000,6.987000,500001.000000 +-118.200000,34.160000,31.000000,5550.000000,881.000000,2465.000000,862.000000,6.831700,446100.000000 +-118.200000,34.140000,52.000000,3800.000000,646.000000,1842.000000,620.000000,5.552400,293900.000000 +-118.200000,34.140000,52.000000,2090.000000,466.000000,1219.000000,390.000000,4.090900,204200.000000 +-118.200000,34.140000,51.000000,1941.000000,378.000000,1012.000000,371.000000,3.937500,217000.000000 +-118.200000,34.130000,52.000000,2035.000000,459.000000,2589.000000,438.000000,3.534900,193600.000000 +-118.200000,34.130000,30.000000,3369.000000,824.000000,2032.000000,795.000000,4.005200,196400.000000 +-118.200000,34.120000,52.000000,1580.000000,426.000000,1462.000000,406.000000,3.332600,167600.000000 +-118.200000,34.120000,44.000000,1565.000000,398.000000,1500.000000,407.000000,2.812500,155600.000000 +-118.200000,34.120000,41.000000,1908.000000,503.000000,1557.000000,453.000000,2.919400,162000.000000 +-118.200000,34.110000,52.000000,678.000000,173.000000,791.000000,186.000000,4.062500,171300.000000 +-118.200000,34.110000,52.000000,1901.000000,525.000000,1856.000000,480.000000,3.000000,156400.000000 +-118.200000,34.110000,46.000000,3659.000000,1068.000000,4153.000000,993.000000,2.521100,162900.000000 +-118.200000,34.110000,37.000000,2040.000000,611.000000,1698.000000,545.000000,1.935500,166300.000000 +-118.200000,34.110000,36.000000,1441.000000,534.000000,1809.000000,500.000000,2.179300,185700.000000 +-118.200000,34.100000,30.000000,3643.000000,1197.000000,4336.000000,1163.000000,2.070000,154500.000000 +-118.200000,34.090000,39.000000,1594.000000,430.000000,1668.000000,378.000000,2.534300,138200.000000 +-118.200000,34.080000,49.000000,1320.000000,309.000000,1405.000000,328.000000,2.437500,114000.000000 +-118.200000,34.080000,41.000000,1807.000000,429.000000,1699.000000,424.000000,2.222200,126000.000000 +-118.200000,34.070000,34.000000,1765.000000,551.000000,2203.000000,500.000000,2.270800,159600.000000 +-118.200000,34.070000,21.000000,1353.000000,380.000000,1688.000000,367.000000,1.993700,139600.000000 +-118.200000,34.060000,46.000000,453.000000,119.000000,533.000000,132.000000,2.296100,112500.000000 +-118.200000,34.060000,40.000000,1181.000000,335.000000,1441.000000,337.000000,2.113600,111800.000000 +-118.200000,34.050000,8.000000,762.000000,204.000000,728.000000,174.000000,2.488600,137500.000000 +-118.200000,34.050000,50.000000,1407.000000,401.000000,1526.000000,385.000000,2.290000,121800.000000 +-118.200000,34.050000,43.000000,1165.000000,317.000000,1279.000000,303.000000,1.961500,141700.000000 +-118.200000,34.050000,42.000000,1703.000000,586.000000,2490.000000,581.000000,2.020000,147200.000000 +-118.200000,34.050000,41.000000,1268.000000,398.000000,1887.000000,407.000000,2.625000,150000.000000 +-118.200000,34.050000,40.000000,1146.000000,323.000000,1354.000000,321.000000,1.920500,121900.000000 +-118.200000,34.050000,40.000000,1082.000000,318.000000,1085.000000,273.000000,1.705400,117200.000000 +-118.200000,34.050000,36.000000,2672.000000,675.000000,2883.000000,674.000000,2.088500,142800.000000 +-118.200000,34.040000,52.000000,1249.000000,307.000000,1223.000000,297.000000,2.070000,136300.000000 +-118.200000,34.040000,44.000000,1399.000000,386.000000,1419.000000,373.000000,1.822400,143800.000000 +-118.200000,34.040000,36.000000,1625.000000,490.000000,2003.000000,478.000000,2.181000,147200.000000 +-118.200000,34.030000,52.000000,774.000000,209.000000,813.000000,203.000000,2.347200,135200.000000 +-118.200000,34.030000,52.000000,583.000000,157.000000,730.000000,174.000000,1.411500,140600.000000 +-118.200000,34.030000,52.000000,1754.000000,452.000000,1849.000000,445.000000,2.371600,122800.000000 +-118.200000,34.030000,41.000000,1292.000000,334.000000,1150.000000,322.000000,1.925000,135200.000000 +-118.200000,34.030000,37.000000,1583.000000,392.000000,1776.000000,377.000000,2.726600,140800.000000 +-118.200000,34.020000,42.000000,498.000000,120.000000,548.000000,119.000000,3.754300,126600.000000 +-118.200000,34.020000,26.000000,36.000000,9.000000,35.000000,9.000000,1.625000,175000.000000 +-118.200000,33.990000,35.000000,1705.000000,523.000000,2252.000000,508.000000,2.342100,154200.000000 +-118.200000,33.990000,35.000000,1608.000000,465.000000,2140.000000,488.000000,3.197900,154700.000000 +-118.200000,33.990000,33.000000,1134.000000,375.000000,1615.000000,354.000000,2.146800,141700.000000 +-118.200000,33.990000,31.000000,1186.000000,387.000000,2087.000000,409.000000,1.913200,154600.000000 +-118.200000,33.990000,30.000000,1474.000000,459.000000,1844.000000,464.000000,2.551000,160000.000000 +-118.200000,33.980000,38.000000,867.000000,243.000000,950.000000,235.000000,1.892900,163100.000000 +-118.200000,33.980000,32.000000,1403.000000,399.000000,1506.000000,375.000000,2.000000,172700.000000 +-118.200000,33.980000,30.000000,2369.000000,753.000000,3259.000000,770.000000,2.196400,158500.000000 +-118.200000,33.970000,30.000000,1911.000000,562.000000,2055.000000,534.000000,2.391700,154600.000000 +-118.200000,33.970000,28.000000,2474.000000,702.000000,2830.000000,694.000000,2.754000,166200.000000 +-118.200000,33.960000,44.000000,3114.000000,779.000000,2959.000000,776.000000,3.187500,171700.000000 +-118.200000,33.960000,43.000000,1233.000000,306.000000,1190.000000,282.000000,2.837100,161300.000000 +-118.200000,33.960000,41.000000,1512.000000,400.000000,1690.000000,367.000000,3.055000,167000.000000 +-118.200000,33.960000,37.000000,2127.000000,533.000000,2021.000000,480.000000,2.977300,164600.000000 +-118.200000,33.950000,41.000000,679.000000,184.000000,788.000000,185.000000,2.140600,165300.000000 +-118.200000,33.950000,35.000000,1924.000000,520.000000,2101.000000,541.000000,2.426700,151500.000000 +-118.200000,33.940000,45.000000,1818.000000,408.000000,1705.000000,373.000000,4.044100,157500.000000 +-118.200000,33.940000,45.000000,1570.000000,328.000000,1321.000000,300.000000,3.736100,171800.000000 +-118.200000,33.940000,44.000000,1413.000000,298.000000,1200.000000,307.000000,3.512500,169300.000000 +-118.200000,33.930000,41.000000,857.000000,201.000000,934.000000,227.000000,2.633900,145700.000000 +-118.200000,33.930000,40.000000,1929.000000,417.000000,1780.000000,419.000000,3.440200,149400.000000 +-118.200000,33.930000,36.000000,2210.000000,634.000000,2341.000000,553.000000,2.171500,131100.000000 +-118.200000,33.930000,36.000000,1191.000000,345.000000,1193.000000,295.000000,2.518500,138800.000000 +-118.200000,33.920000,45.000000,1283.000000,253.000000,1025.000000,248.000000,3.279800,141200.000000 +-118.200000,33.920000,42.000000,1411.000000,314.000000,1432.000000,322.000000,3.087100,138800.000000 +-118.200000,33.920000,39.000000,1050.000000,217.000000,895.000000,207.000000,3.153800,155600.000000 +-118.200000,33.920000,36.000000,414.000000,104.000000,477.000000,130.000000,3.671900,130400.000000 +-118.200000,33.910000,43.000000,1381.000000,278.000000,1494.000000,298.000000,3.587800,118400.000000 +-118.200000,33.910000,36.000000,2283.000000,499.000000,1836.000000,462.000000,2.879300,118100.000000 +-118.200000,33.900000,34.000000,1552.000000,444.000000,2093.000000,413.000000,2.212500,103200.000000 +-118.200000,33.900000,33.000000,1435.000000,322.000000,1298.000000,299.000000,2.781300,105100.000000 +-118.200000,33.900000,26.000000,1000.000000,275.000000,1178.000000,263.000000,2.120000,105000.000000 +-118.200000,33.890000,40.000000,2538.000000,564.000000,2170.000000,541.000000,2.721200,107900.000000 +-118.200000,33.880000,40.000000,2945.000000,725.000000,2858.000000,690.000000,3.236800,136900.000000 +-118.200000,33.880000,40.000000,1699.000000,346.000000,1188.000000,329.000000,4.208300,147300.000000 +-118.200000,33.870000,42.000000,1482.000000,310.000000,1052.000000,317.000000,3.946900,158200.000000 +-118.200000,33.870000,36.000000,1554.000000,273.000000,974.000000,264.000000,4.213500,161400.000000 +-118.200000,33.870000,26.000000,703.000000,202.000000,757.000000,212.000000,2.525000,155500.000000 +-118.200000,33.860000,27.000000,2732.000000,867.000000,1690.000000,794.000000,2.646500,160200.000000 +-118.200000,33.850000,46.000000,1854.000000,462.000000,1360.000000,429.000000,2.484400,158200.000000 +-118.200000,33.850000,33.000000,2557.000000,731.000000,2286.000000,700.000000,2.304100,149100.000000 +-118.200000,33.840000,35.000000,3405.000000,779.000000,1953.000000,671.000000,2.781300,159200.000000 +-118.200000,33.830000,35.000000,3737.000000,613.000000,1305.000000,583.000000,7.209600,490300.000000 +-118.200000,33.820000,21.000000,2251.000000,452.000000,913.000000,420.000000,4.604200,272200.000000 +-118.200000,33.810000,47.000000,2347.000000,437.000000,1219.000000,420.000000,5.309600,209900.000000 +-118.200000,33.810000,46.000000,1388.000000,254.000000,742.000000,241.000000,4.645800,212100.000000 +-118.200000,33.810000,45.000000,944.000000,178.000000,533.000000,193.000000,3.480800,206900.000000 +-118.200000,33.810000,43.000000,3013.000000,574.000000,1525.000000,529.000000,4.950000,194000.000000 +-118.200000,33.800000,52.000000,1786.000000,445.000000,1090.000000,430.000000,2.898800,194900.000000 +-118.200000,33.800000,52.000000,1009.000000,216.000000,614.000000,231.000000,4.007400,200800.000000 +-118.200000,33.800000,42.000000,4577.000000,1146.000000,2749.000000,1094.000000,2.501200,197500.000000 +-118.200000,33.790000,48.000000,2105.000000,592.000000,1807.000000,539.000000,2.718300,190400.000000 +-118.200000,33.790000,47.000000,767.000000,195.000000,569.000000,195.000000,2.951400,185200.000000 +-118.200000,33.790000,47.000000,2549.000000,626.000000,1388.000000,606.000000,3.013500,192700.000000 +-118.200000,33.790000,25.000000,2851.000000,968.000000,3744.000000,906.000000,2.067500,116700.000000 +-118.200000,33.780000,52.000000,2662.000000,893.000000,3018.000000,763.000000,2.330500,162500.000000 +-118.200000,33.780000,46.000000,1889.000000,651.000000,1545.000000,587.000000,1.706400,175000.000000 +-118.200000,33.770000,42.000000,517.000000,233.000000,995.000000,212.000000,2.225000,106300.000000 +-118.200000,33.770000,41.000000,1158.000000,396.000000,1209.000000,336.000000,2.781300,129200.000000 +-118.200000,33.770000,40.000000,2034.000000,899.000000,1257.000000,797.000000,1.286400,131300.000000 +-118.200000,33.770000,24.000000,2404.000000,819.000000,1566.000000,753.000000,1.507600,145800.000000 +-118.210000,34.650000,17.000000,4001.000000,814.000000,2313.000000,756.000000,3.044100,140100.000000 +-118.210000,34.640000,16.000000,2573.000000,427.000000,1273.000000,426.000000,5.950800,181100.000000 +-118.210000,34.220000,37.000000,2260.000000,322.000000,941.000000,303.000000,8.369500,500001.000000 +-118.210000,34.210000,41.000000,1676.000000,263.000000,757.000000,255.000000,4.773400,450800.000000 +-118.210000,34.200000,35.000000,3646.000000,552.000000,1409.000000,534.000000,6.379400,500001.000000 +-118.210000,34.180000,14.000000,2672.000000,335.000000,1113.000000,318.000000,12.157900,500001.000000 +-118.210000,34.170000,24.000000,8590.000000,1231.000000,3401.000000,1178.000000,8.132500,472700.000000 +-118.210000,34.160000,25.000000,434.000000,74.000000,199.000000,75.000000,5.919900,420500.000000 +-118.210000,34.140000,25.000000,1908.000000,628.000000,1412.000000,588.000000,2.226700,189800.000000 +-118.210000,34.130000,52.000000,2465.000000,611.000000,1433.000000,570.000000,3.250000,214200.000000 +-118.210000,34.120000,52.000000,1590.000000,360.000000,1127.000000,321.000000,3.462500,173900.000000 +-118.210000,34.120000,41.000000,1904.000000,514.000000,1666.000000,498.000000,3.684500,175800.000000 +-118.210000,34.120000,35.000000,1937.000000,439.000000,1523.000000,412.000000,3.563800,170500.000000 +-118.210000,34.100000,47.000000,5077.000000,1271.000000,3348.000000,1106.000000,3.037700,186800.000000 +-118.210000,34.100000,40.000000,1684.000000,316.000000,795.000000,330.000000,5.272300,218300.000000 +-118.210000,34.100000,36.000000,2000.000000,533.000000,1234.000000,535.000000,3.743700,241700.000000 +-118.210000,34.090000,39.000000,1561.000000,445.000000,1780.000000,391.000000,2.463200,144200.000000 +-118.210000,34.090000,39.000000,1287.000000,353.000000,1171.000000,345.000000,1.611800,138500.000000 +-118.210000,34.090000,37.000000,1822.000000,498.000000,1961.000000,506.000000,1.988100,159200.000000 +-118.210000,34.090000,34.000000,1660.000000,412.000000,1678.000000,382.000000,2.770800,148200.000000 +-118.210000,34.080000,52.000000,3672.000000,808.000000,3062.000000,764.000000,2.680600,153000.000000 +-118.210000,34.080000,39.000000,986.000000,361.000000,1347.000000,299.000000,2.290700,133900.000000 +-118.210000,34.080000,26.000000,2574.000000,807.000000,3163.000000,802.000000,1.949500,173200.000000 +-118.210000,34.070000,52.000000,1770.000000,435.000000,1848.000000,439.000000,2.413500,167200.000000 +-118.210000,34.070000,47.000000,1346.000000,383.000000,1452.000000,371.000000,1.729200,191700.000000 +-118.210000,34.070000,42.000000,902.000000,318.000000,1312.000000,323.000000,1.937500,168800.000000 +-118.210000,34.070000,31.000000,1453.000000,404.000000,1486.000000,389.000000,2.385900,153100.000000 +-118.210000,34.070000,31.000000,1077.000000,300.000000,1198.000000,274.000000,2.133300,160200.000000 +-118.210000,34.060000,30.000000,511.000000,153.000000,1152.000000,149.000000,2.361100,156800.000000 +-118.210000,34.060000,29.000000,1478.000000,413.000000,1580.000000,394.000000,1.878100,147500.000000 +-118.210000,34.050000,47.000000,722.000000,235.000000,930.000000,226.000000,2.545500,114300.000000 +-118.210000,34.050000,28.000000,950.000000,357.000000,1485.000000,345.000000,1.927100,136400.000000 +-118.210000,34.050000,28.000000,1841.000000,809.000000,3199.000000,727.000000,1.631900,151600.000000 +-118.210000,34.050000,28.000000,1079.000000,306.000000,1358.000000,285.000000,2.520000,131900.000000 +-118.210000,34.050000,26.000000,745.000000,258.000000,694.000000,236.000000,1.384600,129200.000000 +-118.210000,34.040000,52.000000,846.000000,271.000000,1153.000000,281.000000,2.192300,155000.000000 +-118.210000,34.040000,47.000000,1306.000000,391.000000,1499.000000,346.000000,2.278800,139600.000000 +-118.210000,34.040000,43.000000,1502.000000,477.000000,1844.000000,477.000000,1.940500,152500.000000 +-118.210000,34.040000,36.000000,1825.000000,479.000000,2097.000000,480.000000,2.186200,135300.000000 +-118.210000,34.030000,52.000000,497.000000,132.000000,547.000000,121.000000,2.208300,146300.000000 +-118.210000,34.030000,47.000000,876.000000,228.000000,872.000000,231.000000,2.265600,145000.000000 +-118.210000,34.030000,45.000000,1860.000000,472.000000,1893.000000,456.000000,2.657300,141800.000000 +-118.210000,34.030000,44.000000,1550.000000,407.000000,1718.000000,403.000000,2.526800,141100.000000 +-118.210000,34.020000,52.000000,22.000000,7.000000,55.000000,7.000000,7.575200,67500.000000 +-118.210000,34.020000,45.000000,792.000000,203.000000,872.000000,188.000000,2.687500,129700.000000 +-118.210000,34.020000,43.000000,1811.000000,513.000000,2123.000000,487.000000,1.361500,133300.000000 +-118.210000,33.990000,39.000000,47.000000,16.000000,51.000000,23.000000,3.218800,112500.000000 +-118.210000,33.980000,39.000000,1315.000000,306.000000,1257.000000,298.000000,3.278800,169000.000000 +-118.210000,33.980000,37.000000,788.000000,215.000000,883.000000,221.000000,2.681800,164600.000000 +-118.210000,33.970000,43.000000,1751.000000,400.000000,1558.000000,379.000000,3.031300,166100.000000 +-118.210000,33.970000,35.000000,1863.000000,537.000000,2274.000000,510.000000,2.100500,171300.000000 +-118.210000,33.960000,43.000000,1686.000000,446.000000,1590.000000,474.000000,2.324100,159300.000000 +-118.210000,33.960000,39.000000,2265.000000,628.000000,2323.000000,599.000000,2.152200,155300.000000 +-118.210000,33.950000,43.000000,1500.000000,419.000000,1726.000000,440.000000,1.864100,165100.000000 +-118.210000,33.950000,38.000000,1889.000000,565.000000,2087.000000,559.000000,1.777800,154000.000000 +-118.210000,33.950000,35.000000,2134.000000,650.000000,2248.000000,587.000000,2.298800,153400.000000 +-118.210000,33.950000,35.000000,2129.000000,614.000000,2376.000000,618.000000,2.037200,160800.000000 +-118.210000,33.950000,32.000000,1116.000000,328.000000,1265.000000,302.000000,2.295000,155200.000000 +-118.210000,33.940000,41.000000,1807.000000,442.000000,1628.000000,443.000000,2.840000,156100.000000 +-118.210000,33.940000,40.000000,2227.000000,594.000000,2244.000000,580.000000,2.445900,143800.000000 +-118.210000,33.940000,34.000000,892.000000,318.000000,1443.000000,341.000000,2.190300,162500.000000 +-118.210000,33.930000,41.000000,619.000000,138.000000,636.000000,145.000000,2.508300,118100.000000 +-118.210000,33.930000,36.000000,1337.000000,382.000000,1769.000000,393.000000,2.695300,121000.000000 +-118.210000,33.930000,30.000000,2831.000000,862.000000,3649.000000,883.000000,1.966800,152100.000000 +-118.210000,33.920000,41.000000,1722.000000,363.000000,1432.000000,326.000000,3.297600,151200.000000 +-118.210000,33.920000,36.000000,602.000000,150.000000,645.000000,145.000000,3.196400,115400.000000 +-118.210000,33.920000,35.000000,1669.000000,445.000000,1870.000000,412.000000,3.041700,117300.000000 +-118.210000,33.910000,37.000000,1073.000000,265.000000,1197.000000,250.000000,2.710900,133000.000000 +-118.210000,33.910000,26.000000,2422.000000,632.000000,2601.000000,583.000000,1.782400,110200.000000 +-118.210000,33.910000,24.000000,1545.000000,391.000000,1807.000000,388.000000,2.642900,105300.000000 +-118.210000,33.900000,43.000000,1810.000000,357.000000,1335.000000,358.000000,3.118900,118800.000000 +-118.210000,33.890000,45.000000,1211.000000,234.000000,1128.000000,261.000000,3.479200,110700.000000 +-118.210000,33.890000,42.000000,1254.000000,225.000000,929.000000,235.000000,4.364600,116200.000000 +-118.210000,33.890000,39.000000,1565.000000,364.000000,1389.000000,360.000000,2.744300,113900.000000 +-118.210000,33.880000,38.000000,929.000000,166.000000,686.000000,183.000000,3.448500,119400.000000 +-118.210000,33.880000,32.000000,1507.000000,379.000000,1082.000000,350.000000,3.225000,138200.000000 +-118.210000,33.880000,29.000000,1976.000000,444.000000,1254.000000,371.000000,2.178200,126800.000000 +-118.210000,33.840000,28.000000,822.000000,205.000000,627.000000,192.000000,3.458300,166300.000000 +-118.210000,33.830000,38.000000,793.000000,193.000000,601.000000,187.000000,2.883700,176100.000000 +-118.210000,33.820000,45.000000,455.000000,92.000000,394.000000,89.000000,4.956200,165700.000000 +-118.210000,33.820000,43.000000,1005.000000,199.000000,723.000000,191.000000,4.342400,162500.000000 +-118.210000,33.820000,34.000000,1719.000000,398.000000,1444.000000,372.000000,2.843800,139300.000000 +-118.210000,33.820000,33.000000,1278.000000,311.000000,1157.000000,320.000000,3.505400,146800.000000 +-118.210000,33.810000,45.000000,1816.000000,398.000000,1524.000000,388.000000,3.858600,157900.000000 +-118.210000,33.810000,45.000000,1693.000000,337.000000,1255.000000,333.000000,3.692300,159700.000000 +-118.210000,33.810000,43.000000,905.000000,199.000000,764.000000,204.000000,3.321400,162200.000000 +-118.210000,33.810000,40.000000,1815.000000,428.000000,1807.000000,413.000000,3.088200,160700.000000 +-118.210000,33.800000,44.000000,1387.000000,280.000000,984.000000,302.000000,4.250000,143100.000000 +-118.210000,33.790000,44.000000,121.000000,29.000000,153.000000,30.000000,2.196400,150000.000000 +-118.210000,33.790000,39.000000,1598.000000,458.000000,1691.000000,399.000000,2.360500,141800.000000 +-118.210000,33.790000,33.000000,32.000000,18.000000,96.000000,36.000000,4.593800,112500.000000 +-118.220000,34.670000,28.000000,2357.000000,408.000000,1162.000000,384.000000,4.363600,179700.000000 +-118.220000,34.630000,4.000000,14348.000000,2145.000000,5839.000000,1806.000000,5.379900,222400.000000 +-118.220000,34.230000,34.000000,3296.000000,483.000000,1268.000000,478.000000,8.480200,500001.000000 +-118.220000,34.220000,39.000000,2686.000000,417.000000,1094.000000,402.000000,7.005900,500001.000000 +-118.220000,34.210000,29.000000,2174.000000,418.000000,1030.000000,395.000000,3.570700,341700.000000 +-118.220000,34.190000,36.000000,959.000000,204.000000,446.000000,210.000000,3.215000,331300.000000 +-118.220000,34.190000,36.000000,2443.000000,492.000000,1115.000000,493.000000,3.977700,409800.000000 +-118.220000,34.190000,31.000000,4704.000000,920.000000,1895.000000,886.000000,4.929700,400000.000000 +-118.220000,34.140000,52.000000,2298.000000,406.000000,1203.000000,387.000000,5.529100,274600.000000 +-118.220000,34.140000,52.000000,1388.000000,271.000000,735.000000,239.000000,3.740400,247700.000000 +-118.220000,34.140000,50.000000,3657.000000,708.000000,1725.000000,644.000000,5.545600,258100.000000 +-118.220000,34.130000,52.000000,791.000000,174.000000,501.000000,162.000000,3.354200,178100.000000 +-118.220000,34.130000,47.000000,1585.000000,420.000000,949.000000,366.000000,2.709800,173800.000000 +-118.220000,34.130000,40.000000,2749.000000,580.000000,1375.000000,511.000000,4.825000,205800.000000 +-118.220000,34.130000,35.000000,2983.000000,526.000000,1614.000000,543.000000,5.779400,272400.000000 +-118.220000,34.120000,37.000000,1298.000000,242.000000,750.000000,255.000000,5.204900,240800.000000 +-118.220000,34.110000,36.000000,2870.000000,529.000000,1371.000000,565.000000,5.208300,220900.000000 +-118.220000,34.100000,35.000000,4003.000000,788.000000,2785.000000,764.000000,4.121300,252100.000000 +-118.220000,34.100000,33.000000,1903.000000,386.000000,1187.000000,340.000000,4.046900,196600.000000 +-118.220000,34.090000,45.000000,1072.000000,275.000000,996.000000,243.000000,2.819400,165000.000000 +-118.220000,34.090000,42.000000,1706.000000,488.000000,1941.000000,447.000000,2.521300,149700.000000 +-118.220000,34.090000,40.000000,1081.000000,282.000000,970.000000,263.000000,1.875000,150000.000000 +-118.220000,34.090000,36.000000,1427.000000,415.000000,1835.000000,410.000000,2.480000,138900.000000 +-118.220000,34.080000,34.000000,1709.000000,562.000000,2105.000000,503.000000,1.970400,152100.000000 +-118.220000,34.080000,31.000000,394.000000,117.000000,573.000000,131.000000,1.817300,154200.000000 +-118.220000,34.070000,36.000000,839.000000,250.000000,1079.000000,245.000000,1.746300,158300.000000 +-118.220000,34.070000,35.000000,1504.000000,477.000000,2059.000000,498.000000,2.013300,145800.000000 +-118.220000,34.060000,52.000000,48.000000,6.000000,41.000000,10.000000,10.226400,112500.000000 +-118.220000,34.060000,34.000000,1083.000000,364.000000,1132.000000,338.000000,2.234400,153100.000000 +-118.220000,34.050000,44.000000,1105.000000,346.000000,1598.000000,372.000000,1.200000,115600.000000 +-118.220000,34.050000,41.000000,1422.000000,478.000000,1640.000000,434.000000,1.612200,157100.000000 +-118.220000,34.050000,36.000000,1243.000000,470.000000,1668.000000,444.000000,1.071400,137500.000000 +-118.220000,34.050000,34.000000,1113.000000,313.000000,928.000000,290.000000,3.165400,155000.000000 +-118.220000,34.040000,43.000000,798.000000,308.000000,1417.000000,325.000000,1.418900,141700.000000 +-118.220000,34.040000,43.000000,2343.000000,803.000000,2468.000000,707.000000,1.516300,115000.000000 +-118.220000,34.030000,45.000000,554.000000,214.000000,888.000000,218.000000,1.812500,139600.000000 +-118.220000,33.990000,24.000000,1402.000000,482.000000,1976.000000,466.000000,2.696400,163200.000000 +-118.220000,33.980000,42.000000,626.000000,143.000000,625.000000,156.000000,3.125000,166300.000000 +-118.220000,33.980000,36.000000,1514.000000,453.000000,1496.000000,448.000000,2.104400,148200.000000 +-118.220000,33.980000,34.000000,2225.000000,753.000000,2980.000000,736.000000,1.668500,128800.000000 +-118.220000,33.980000,30.000000,1971.000000,645.000000,2650.000000,605.000000,2.035700,169900.000000 +-118.220000,33.980000,18.000000,1781.000000,765.000000,1913.000000,702.000000,1.205900,255000.000000 +-118.220000,33.980000,15.000000,1011.000000,274.000000,899.000000,219.000000,2.704500,190600.000000 +-118.220000,33.970000,47.000000,1688.000000,386.000000,1663.000000,381.000000,4.060900,171300.000000 +-118.220000,33.970000,47.000000,1147.000000,297.000000,1097.000000,307.000000,2.638400,162900.000000 +-118.220000,33.970000,47.000000,1058.000000,295.000000,1097.000000,274.000000,2.881000,183300.000000 +-118.220000,33.970000,43.000000,381.000000,67.000000,259.000000,60.000000,3.031300,166100.000000 +-118.220000,33.960000,32.000000,2232.000000,603.000000,2361.000000,608.000000,2.596600,170900.000000 +-118.220000,33.950000,42.000000,3896.000000,981.000000,4496.000000,993.000000,3.153000,150900.000000 +-118.220000,33.940000,42.000000,1115.000000,297.000000,1412.000000,325.000000,3.090300,153500.000000 +-118.220000,33.940000,42.000000,1046.000000,287.000000,1218.000000,289.000000,2.653800,143400.000000 +-118.220000,33.940000,40.000000,930.000000,258.000000,1203.000000,244.000000,2.593800,115400.000000 +-118.220000,33.940000,38.000000,788.000000,224.000000,1155.000000,208.000000,3.354200,153800.000000 +-118.220000,33.930000,39.000000,1921.000000,483.000000,2286.000000,470.000000,3.016700,130000.000000 +-118.220000,33.920000,32.000000,1263.000000,333.000000,1789.000000,346.000000,1.995700,89300.000000 +-118.220000,33.920000,23.000000,926.000000,409.000000,1856.000000,408.000000,2.136600,100000.000000 +-118.220000,33.910000,31.000000,571.000000,153.000000,841.000000,158.000000,2.615400,89200.000000 +-118.220000,33.910000,28.000000,1847.000000,500.000000,2263.000000,473.000000,1.516100,103200.000000 +-118.220000,33.900000,40.000000,1802.000000,496.000000,2096.000000,468.000000,2.354200,97900.000000 +-118.220000,33.900000,38.000000,796.000000,159.000000,679.000000,167.000000,3.660700,110400.000000 +-118.220000,33.900000,35.000000,1649.000000,424.000000,1786.000000,388.000000,1.409100,105600.000000 +-118.220000,33.900000,30.000000,1007.000000,260.000000,1112.000000,238.000000,1.726200,115600.000000 +-118.220000,33.890000,41.000000,990.000000,228.000000,776.000000,207.000000,2.125000,120200.000000 +-118.220000,33.890000,37.000000,797.000000,190.000000,485.000000,166.000000,2.743400,95200.000000 +-118.220000,33.890000,36.000000,873.000000,240.000000,1086.000000,217.000000,2.250000,126600.000000 +-118.220000,33.890000,26.000000,266.000000,75.000000,252.000000,59.000000,2.121100,138100.000000 +-118.220000,33.880000,35.000000,998.000000,313.000000,1335.000000,311.000000,1.657400,102500.000000 +-118.220000,33.860000,16.000000,8732.000000,1489.000000,3944.000000,1493.000000,5.194800,203500.000000 +-118.220000,33.840000,38.000000,1928.000000,429.000000,1358.000000,399.000000,4.068700,160300.000000 +-118.220000,33.840000,35.000000,1131.000000,273.000000,1007.000000,269.000000,4.021900,168300.000000 +-118.220000,33.830000,44.000000,1792.000000,404.000000,1115.000000,358.000000,3.909100,174400.000000 +-118.220000,33.830000,43.000000,1426.000000,272.000000,871.000000,276.000000,3.708300,175200.000000 +-118.220000,33.830000,42.000000,1370.000000,299.000000,1018.000000,328.000000,4.447400,160200.000000 +-118.220000,33.820000,30.000000,1680.000000,469.000000,1779.000000,429.000000,3.608600,146300.000000 +-118.220000,33.820000,17.000000,5357.000000,1332.000000,3030.000000,1266.000000,1.931100,138100.000000 +-118.220000,33.810000,41.000000,726.000000,166.000000,602.000000,183.000000,3.788500,156900.000000 +-118.220000,33.810000,38.000000,1486.000000,359.000000,1345.000000,326.000000,3.398800,147800.000000 +-118.220000,33.800000,36.000000,1285.000000,347.000000,1291.000000,337.000000,3.770800,157100.000000 +-118.220000,33.800000,33.000000,1984.000000,477.000000,1764.000000,440.000000,3.875000,165100.000000 +-118.220000,33.790000,48.000000,143.000000,41.000000,222.000000,50.000000,1.700000,104200.000000 +-118.230000,35.480000,17.000000,2354.000000,514.000000,775.000000,380.000000,1.836900,59400.000000 +-118.230000,34.660000,25.000000,2627.000000,387.000000,1059.000000,338.000000,3.638200,138200.000000 +-118.230000,34.240000,31.000000,3857.000000,607.000000,1695.000000,572.000000,7.642000,396400.000000 +-118.230000,34.230000,34.000000,2377.000000,362.000000,1055.000000,362.000000,6.000000,367100.000000 +-118.230000,34.220000,37.000000,1376.000000,237.000000,618.000000,226.000000,5.977100,431800.000000 +-118.230000,34.220000,36.000000,2288.000000,439.000000,1079.000000,434.000000,4.548600,361000.000000 +-118.230000,34.210000,50.000000,309.000000,47.000000,121.000000,45.000000,6.213000,285000.000000 +-118.230000,34.210000,38.000000,1399.000000,390.000000,859.000000,386.000000,3.414800,234800.000000 +-118.230000,34.210000,36.000000,2988.000000,719.000000,1357.000000,657.000000,3.517400,268000.000000 +-118.230000,34.210000,32.000000,1464.000000,406.000000,693.000000,380.000000,2.546300,200000.000000 +-118.230000,34.200000,51.000000,1477.000000,280.000000,750.000000,295.000000,5.392500,317900.000000 +-118.230000,34.200000,48.000000,1473.000000,294.000000,807.000000,296.000000,3.399000,306300.000000 +-118.230000,34.180000,47.000000,1853.000000,345.000000,757.000000,310.000000,3.687500,422000.000000 +-118.230000,34.180000,45.000000,2332.000000,343.000000,943.000000,339.000000,8.113200,446600.000000 +-118.230000,34.160000,31.000000,3105.000000,582.000000,1359.000000,547.000000,5.171800,429100.000000 +-118.230000,34.150000,40.000000,2124.000000,370.000000,998.000000,372.000000,5.336900,370400.000000 +-118.230000,34.150000,26.000000,1649.000000,522.000000,1332.000000,483.000000,3.100400,257100.000000 +-118.230000,34.140000,33.000000,2865.000000,864.000000,2061.000000,790.000000,2.626800,201300.000000 +-118.230000,34.140000,25.000000,2864.000000,844.000000,1745.000000,803.000000,2.916700,224300.000000 +-118.230000,34.130000,48.000000,737.000000,166.000000,462.000000,131.000000,3.589300,212500.000000 +-118.230000,34.130000,47.000000,1162.000000,235.000000,781.000000,268.000000,4.652800,244400.000000 +-118.230000,34.130000,37.000000,1799.000000,426.000000,1088.000000,417.000000,2.975000,244500.000000 +-118.230000,34.130000,34.000000,609.000000,149.000000,407.000000,145.000000,4.576600,185800.000000 +-118.230000,34.120000,32.000000,2094.000000,491.000000,1413.000000,479.000000,4.508900,221100.000000 +-118.230000,34.120000,28.000000,1546.000000,465.000000,974.000000,408.000000,2.284300,183800.000000 +-118.230000,34.110000,35.000000,4148.000000,971.000000,3220.000000,892.000000,3.338900,187100.000000 +-118.230000,34.110000,33.000000,2612.000000,646.000000,2496.000000,606.000000,3.133000,156000.000000 +-118.230000,34.110000,31.000000,1021.000000,191.000000,495.000000,191.000000,5.505100,223500.000000 +-118.230000,34.100000,46.000000,2483.000000,587.000000,2121.000000,553.000000,2.278800,152900.000000 +-118.230000,34.100000,41.000000,1353.000000,379.000000,1536.000000,416.000000,2.168700,157000.000000 +-118.230000,34.100000,38.000000,1051.000000,249.000000,799.000000,229.000000,2.712000,143800.000000 +-118.230000,34.090000,47.000000,859.000000,239.000000,913.000000,234.000000,2.644200,136100.000000 +-118.230000,34.090000,45.000000,1747.000000,484.000000,1680.000000,441.000000,2.605100,155500.000000 +-118.230000,34.090000,41.000000,438.000000,201.000000,690.000000,161.000000,2.047600,181300.000000 +-118.230000,34.070000,40.000000,506.000000,119.000000,397.000000,114.000000,3.194400,143800.000000 +-118.230000,34.070000,35.000000,1335.000000,440.000000,1586.000000,445.000000,1.972200,156300.000000 +-118.230000,34.050000,52.000000,346.000000,270.000000,346.000000,251.000000,2.531300,225000.000000 +-118.230000,34.000000,35.000000,167.000000,60.000000,267.000000,55.000000,1.522700,350000.000000 +-118.230000,33.990000,5.000000,706.000000,203.000000,839.000000,199.000000,4.520800,165000.000000 +-118.230000,33.990000,37.000000,378.000000,176.000000,714.000000,156.000000,2.191200,112500.000000 +-118.230000,33.980000,35.000000,1366.000000,496.000000,2160.000000,497.000000,2.205900,150000.000000 +-118.230000,33.980000,30.000000,2562.000000,959.000000,3909.000000,955.000000,1.992900,150600.000000 +-118.230000,33.980000,25.000000,986.000000,310.000000,1439.000000,251.000000,2.390000,183300.000000 +-118.230000,33.970000,47.000000,932.000000,295.000000,1226.000000,264.000000,1.606500,111400.000000 +-118.230000,33.970000,44.000000,2748.000000,715.000000,2962.000000,703.000000,2.695100,169300.000000 +-118.230000,33.960000,44.000000,3186.000000,876.000000,3913.000000,842.000000,3.014300,148200.000000 +-118.230000,33.960000,42.000000,1977.000000,570.000000,2406.000000,557.000000,2.591300,151600.000000 +-118.230000,33.960000,39.000000,405.000000,163.000000,686.000000,164.000000,1.695000,94800.000000 +-118.230000,33.950000,43.000000,1683.000000,520.000000,2190.000000,494.000000,2.239100,152800.000000 +-118.230000,33.950000,42.000000,705.000000,173.000000,739.000000,140.000000,0.916600,99000.000000 +-118.230000,33.950000,37.000000,2667.000000,671.000000,2865.000000,683.000000,0.683100,87500.000000 +-118.230000,33.950000,27.000000,504.000000,142.000000,789.000000,167.000000,0.951800,91400.000000 +-118.230000,33.940000,39.000000,1141.000000,258.000000,1313.000000,234.000000,2.018700,90100.000000 +-118.230000,33.940000,36.000000,1110.000000,307.000000,1417.000000,302.000000,2.333300,92100.000000 +-118.230000,33.940000,35.000000,1090.000000,267.000000,1339.000000,263.000000,2.160700,97600.000000 +-118.230000,33.930000,37.000000,239.000000,49.000000,308.000000,52.000000,1.402800,105400.000000 +-118.230000,33.930000,36.000000,501.000000,123.000000,487.000000,118.000000,1.300000,87000.000000 +-118.230000,33.930000,35.000000,1149.000000,277.000000,909.000000,214.000000,1.741100,96700.000000 +-118.230000,33.930000,30.000000,1147.000000,260.000000,1219.000000,210.000000,2.065800,93200.000000 +-118.230000,33.930000,23.000000,545.000000,131.000000,610.000000,126.000000,1.486100,95100.000000 +-118.230000,33.920000,32.000000,1735.000000,430.000000,1699.000000,386.000000,1.179300,103800.000000 +-118.230000,33.920000,24.000000,1555.000000,406.000000,1665.000000,361.000000,1.643700,98800.000000 +-118.230000,33.910000,34.000000,789.000000,200.000000,1041.000000,191.000000,3.119000,90300.000000 +-118.230000,33.910000,34.000000,661.000000,146.000000,742.000000,143.000000,2.173400,88200.000000 +-118.230000,33.910000,34.000000,1060.000000,276.000000,1215.000000,250.000000,2.080400,84700.000000 +-118.230000,33.910000,33.000000,677.000000,182.000000,984.000000,174.000000,2.589300,88900.000000 +-118.230000,33.900000,34.000000,2462.000000,553.000000,2334.000000,502.000000,1.641000,96800.000000 +-118.230000,33.900000,31.000000,2143.000000,522.000000,2276.000000,519.000000,1.809500,100800.000000 +-118.230000,33.890000,36.000000,2598.000000,514.000000,1872.000000,514.000000,3.166700,117700.000000 +-118.230000,33.890000,35.000000,1255.000000,344.000000,1782.000000,343.000000,2.194900,95100.000000 +-118.230000,33.880000,41.000000,1941.000000,367.000000,1204.000000,323.000000,3.041700,113700.000000 +-118.230000,33.880000,35.000000,842.000000,201.000000,763.000000,189.000000,2.671900,109800.000000 +-118.230000,33.840000,25.000000,1106.000000,207.000000,888.000000,216.000000,5.330700,207000.000000 +-118.230000,33.800000,26.000000,239.000000,135.000000,165.000000,112.000000,1.333300,187500.000000 +-118.230000,33.780000,20.000000,59.000000,24.000000,69.000000,23.000000,2.558800,350000.000000 +-118.240000,34.240000,31.000000,3812.000000,595.000000,1645.000000,591.000000,7.585000,380100.000000 +-118.240000,34.230000,43.000000,1061.000000,208.000000,514.000000,208.000000,6.010000,254200.000000 +-118.240000,34.230000,42.000000,1541.000000,280.000000,753.000000,264.000000,5.102800,292100.000000 +-118.240000,34.230000,41.000000,1912.000000,308.000000,896.000000,314.000000,5.347300,352700.000000 +-118.240000,34.220000,36.000000,2507.000000,517.000000,1232.000000,470.000000,5.529000,241300.000000 +-118.240000,34.220000,34.000000,1722.000000,406.000000,926.000000,371.000000,4.152300,252000.000000 +-118.240000,34.210000,32.000000,3817.000000,886.000000,1888.000000,829.000000,3.577700,245600.000000 +-118.240000,34.160000,52.000000,850.000000,162.000000,493.000000,160.000000,6.940800,298800.000000 +-118.240000,34.160000,52.000000,2187.000000,284.000000,733.000000,274.000000,9.582300,406200.000000 +-118.240000,34.160000,52.000000,1904.000000,297.000000,797.000000,286.000000,6.660300,380400.000000 +-118.240000,34.160000,40.000000,2549.000000,591.000000,1156.000000,546.000000,3.333300,374300.000000 +-118.240000,34.150000,7.000000,2063.000000,670.000000,1892.000000,643.000000,1.730100,202300.000000 +-118.240000,34.150000,45.000000,1235.000000,271.000000,499.000000,263.000000,3.143500,282600.000000 +-118.240000,34.150000,20.000000,2734.000000,658.000000,1562.000000,607.000000,3.390600,284100.000000 +-118.240000,34.150000,19.000000,4852.000000,1465.000000,3171.000000,1332.000000,2.592400,192900.000000 +-118.240000,34.140000,9.000000,4877.000000,1488.000000,4486.000000,1458.000000,2.442100,222100.000000 +-118.240000,34.140000,36.000000,1813.000000,560.000000,1501.000000,544.000000,1.912500,238000.000000 +-118.240000,34.140000,28.000000,1843.000000,554.000000,1402.000000,512.000000,2.462000,254000.000000 +-118.240000,34.140000,20.000000,3196.000000,994.000000,2929.000000,983.000000,3.020600,219500.000000 +-118.240000,34.130000,45.000000,2170.000000,401.000000,1043.000000,394.000000,5.692100,269000.000000 +-118.240000,34.130000,45.000000,1971.000000,439.000000,1245.000000,430.000000,4.027200,260500.000000 +-118.240000,34.130000,37.000000,1644.000000,395.000000,959.000000,383.000000,3.363600,257700.000000 +-118.240000,34.120000,41.000000,1213.000000,301.000000,801.000000,300.000000,3.180600,204200.000000 +-118.240000,34.120000,34.000000,80.000000,26.000000,125.000000,35.000000,0.890700,154200.000000 +-118.240000,34.120000,29.000000,2904.000000,892.000000,3320.000000,765.000000,2.611100,168800.000000 +-118.240000,34.110000,50.000000,2141.000000,451.000000,1777.000000,426.000000,2.767900,178800.000000 +-118.240000,34.110000,39.000000,1148.000000,348.000000,1161.000000,333.000000,2.216700,176700.000000 +-118.240000,34.100000,42.000000,1525.000000,456.000000,1688.000000,432.000000,3.169100,141300.000000 +-118.240000,34.080000,52.000000,137.000000,26.000000,65.000000,24.000000,4.025000,137500.000000 +-118.240000,34.080000,52.000000,109.000000,20.000000,86.000000,24.000000,4.984400,187500.000000 +-118.240000,34.070000,27.000000,223.000000,80.000000,249.000000,82.000000,1.613600,137500.000000 +-118.240000,34.060000,8.000000,1204.000000,552.000000,1074.000000,517.000000,1.022700,87500.000000 +-118.240000,34.060000,33.000000,390.000000,199.000000,435.000000,193.000000,1.197900,350000.000000 +-118.240000,34.060000,19.000000,2870.000000,1021.000000,3325.000000,978.000000,1.739500,162500.000000 +-118.240000,34.050000,13.000000,1703.000000,697.000000,1823.000000,669.000000,0.828800,181300.000000 +-118.240000,34.040000,52.000000,116.000000,107.000000,171.000000,92.000000,1.076900,112500.000000 +-118.240000,34.020000,48.000000,542.000000,150.000000,571.000000,114.000000,1.848500,90600.000000 +-118.240000,34.010000,43.000000,1456.000000,444.000000,2098.000000,433.000000,1.892900,99200.000000 +-118.240000,34.010000,30.000000,405.000000,86.000000,376.000000,68.000000,1.781300,127500.000000 +-118.240000,34.000000,43.000000,863.000000,206.000000,788.000000,187.000000,0.946300,95000.000000 +-118.240000,34.000000,38.000000,1715.000000,414.000000,1714.000000,389.000000,1.713200,108200.000000 +-118.240000,33.990000,44.000000,448.000000,116.000000,504.000000,96.000000,1.875000,98600.000000 +-118.240000,33.990000,41.000000,1425.000000,372.000000,1803.000000,353.000000,1.673100,88200.000000 +-118.240000,33.990000,33.000000,885.000000,294.000000,1270.000000,282.000000,2.161500,118800.000000 +-118.240000,33.990000,28.000000,312.000000,89.000000,498.000000,87.000000,2.410700,96400.000000 +-118.240000,33.980000,45.000000,173.000000,42.000000,230.000000,57.000000,3.072400,110700.000000 +-118.240000,33.980000,37.000000,1196.000000,364.000000,1622.000000,327.000000,2.125000,108900.000000 +-118.240000,33.980000,30.000000,861.000000,250.000000,1062.000000,231.000000,1.750000,115400.000000 +-118.240000,33.970000,41.000000,1182.000000,346.000000,1644.000000,346.000000,2.147300,115100.000000 +-118.240000,33.970000,38.000000,1657.000000,467.000000,2033.000000,443.000000,2.142900,118500.000000 +-118.240000,33.970000,37.000000,1053.000000,263.000000,1354.000000,292.000000,2.583300,112500.000000 +-118.240000,33.960000,44.000000,1338.000000,366.000000,1765.000000,388.000000,1.777800,109900.000000 +-118.240000,33.960000,39.000000,643.000000,186.000000,821.000000,191.000000,2.572900,97300.000000 +-118.240000,33.960000,37.000000,1602.000000,388.000000,1553.000000,342.000000,2.065500,93400.000000 +-118.240000,33.960000,30.000000,859.000000,221.000000,912.000000,191.000000,1.904100,105100.000000 +-118.240000,33.950000,40.000000,1193.000000,280.000000,1210.000000,286.000000,1.350000,89500.000000 +-118.240000,33.950000,37.000000,649.000000,147.000000,653.000000,147.000000,1.479200,97500.000000 +-118.240000,33.950000,37.000000,441.000000,125.000000,390.000000,98.000000,1.651300,90200.000000 +-118.240000,33.950000,36.000000,2316.000000,543.000000,1938.000000,507.000000,1.250000,97400.000000 +-118.240000,33.950000,21.000000,1260.000000,342.000000,1167.000000,310.000000,0.970800,107600.000000 +-118.240000,33.940000,42.000000,380.000000,106.000000,411.000000,100.000000,0.970500,90000.000000 +-118.240000,33.940000,39.000000,1215.000000,273.000000,1211.000000,265.000000,1.721200,95500.000000 +-118.240000,33.940000,37.000000,869.000000,241.000000,1040.000000,233.000000,2.000000,84200.000000 +-118.240000,33.940000,34.000000,796.000000,180.000000,673.000000,144.000000,2.076900,88300.000000 +-118.240000,33.940000,30.000000,940.000000,211.000000,1071.000000,204.000000,1.267900,92000.000000 +-118.240000,33.930000,37.000000,1027.000000,258.000000,824.000000,248.000000,1.513200,86300.000000 +-118.240000,33.930000,32.000000,1063.000000,282.000000,992.000000,253.000000,0.898400,88700.000000 +-118.240000,33.920000,44.000000,1079.000000,210.000000,601.000000,182.000000,2.241100,106400.000000 +-118.240000,33.920000,42.000000,328.000000,100.000000,605.000000,87.000000,2.446400,97400.000000 +-118.240000,33.920000,40.000000,1772.000000,369.000000,1122.000000,324.000000,3.276800,96100.000000 +-118.240000,33.910000,40.000000,972.000000,240.000000,761.000000,225.000000,1.468800,88200.000000 +-118.240000,33.910000,38.000000,745.000000,152.000000,721.000000,160.000000,1.875000,102900.000000 +-118.240000,33.910000,36.000000,1446.000000,316.000000,1286.000000,314.000000,2.708300,103600.000000 +-118.240000,33.900000,40.000000,1308.000000,272.000000,901.000000,257.000000,2.826900,98000.000000 +-118.240000,33.900000,39.000000,642.000000,129.000000,475.000000,123.000000,1.208300,92600.000000 +-118.240000,33.900000,38.000000,2055.000000,442.000000,1518.000000,425.000000,2.338200,103000.000000 +-118.240000,33.900000,35.000000,1079.000000,247.000000,1055.000000,243.000000,2.375000,93600.000000 +-118.240000,33.890000,34.000000,1479.000000,332.000000,1166.000000,322.000000,2.616500,100900.000000 +-118.240000,33.880000,37.000000,1843.000000,366.000000,1207.000000,351.000000,2.482100,111000.000000 +-118.240000,33.830000,22.000000,7368.000000,1367.000000,4721.000000,1342.000000,4.843800,213100.000000 +-118.240000,33.780000,24.000000,574.000000,173.000000,784.000000,162.000000,2.250000,152300.000000 +-118.250000,34.250000,34.000000,3150.000000,518.000000,1392.000000,480.000000,4.935500,336900.000000 +-118.250000,34.230000,41.000000,1979.000000,496.000000,1157.000000,459.000000,4.408300,217700.000000 +-118.250000,34.230000,37.000000,1954.000000,368.000000,967.000000,370.000000,5.086200,261300.000000 +-118.250000,34.230000,35.000000,2839.000000,592.000000,1413.000000,538.000000,4.166700,271200.000000 +-118.250000,34.230000,34.000000,2421.000000,475.000000,1232.000000,454.000000,4.685200,296200.000000 +-118.250000,34.170000,52.000000,1532.000000,292.000000,631.000000,275.000000,5.124200,372900.000000 +-118.250000,34.160000,24.000000,5131.000000,1436.000000,2690.000000,1371.000000,2.566800,280000.000000 +-118.250000,34.160000,14.000000,3700.000000,945.000000,1681.000000,905.000000,3.905400,200000.000000 +-118.250000,34.150000,32.000000,1377.000000,444.000000,768.000000,422.000000,2.262100,187500.000000 +-118.250000,34.150000,31.000000,1238.000000,338.000000,605.000000,331.000000,2.847800,228100.000000 +-118.250000,34.150000,20.000000,3960.000000,1027.000000,1729.000000,978.000000,3.044100,193800.000000 +-118.250000,34.150000,15.000000,3712.000000,1005.000000,1888.000000,890.000000,3.687500,209600.000000 +-118.250000,34.140000,37.000000,584.000000,260.000000,552.000000,235.000000,1.823500,275000.000000 +-118.250000,34.140000,30.000000,1615.000000,570.000000,1245.000000,544.000000,1.892900,196900.000000 +-118.250000,34.140000,25.000000,5980.000000,1856.000000,5217.000000,1772.000000,2.506000,184500.000000 +-118.250000,34.140000,13.000000,3487.000000,1131.000000,3749.000000,1072.000000,2.160200,221900.000000 +-118.250000,34.130000,36.000000,2946.000000,1025.000000,2542.000000,912.000000,2.224400,255900.000000 +-118.250000,34.120000,21.000000,739.000000,265.000000,861.000000,246.000000,2.485600,181300.000000 +-118.250000,34.120000,11.000000,1281.000000,418.000000,1584.000000,330.000000,2.888900,153100.000000 +-118.250000,34.110000,52.000000,125.000000,42.000000,99.000000,40.000000,3.437500,170000.000000 +-118.250000,34.110000,43.000000,2230.000000,583.000000,1667.000000,543.000000,2.866700,217800.000000 +-118.250000,34.110000,43.000000,2222.000000,635.000000,1817.000000,606.000000,2.746600,208900.000000 +-118.250000,34.100000,42.000000,598.000000,147.000000,312.000000,144.000000,2.625000,164300.000000 +-118.250000,34.100000,42.000000,4198.000000,956.000000,1935.000000,878.000000,3.718400,277300.000000 +-118.250000,34.090000,52.000000,3142.000000,765.000000,1728.000000,682.000000,3.186400,189800.000000 +-118.250000,34.090000,52.000000,2050.000000,429.000000,957.000000,418.000000,3.560300,210000.000000 +-118.250000,34.090000,52.000000,1866.000000,470.000000,1211.000000,417.000000,2.935000,189400.000000 +-118.250000,34.090000,43.000000,1903.000000,635.000000,1919.000000,613.000000,2.633600,174300.000000 +-118.250000,34.080000,47.000000,2133.000000,689.000000,2104.000000,662.000000,2.613600,169200.000000 +-118.250000,34.070000,47.000000,2059.000000,618.000000,2033.000000,544.000000,1.902800,217900.000000 +-118.250000,34.070000,43.000000,764.000000,322.000000,1275.000000,306.000000,2.000000,175000.000000 +-118.250000,34.070000,18.000000,4297.000000,1420.000000,4332.000000,1286.000000,2.254500,192500.000000 +-118.250000,34.070000,16.000000,719.000000,225.000000,801.000000,218.000000,2.394200,133300.000000 +-118.250000,34.060000,12.000000,4011.000000,1438.000000,1673.000000,1088.000000,5.308100,287500.000000 +-118.250000,34.050000,8.000000,3105.000000,1256.000000,1086.000000,997.000000,0.813100,275000.000000 +-118.250000,34.050000,52.000000,2806.000000,1944.000000,2232.000000,1605.000000,0.677500,350000.000000 +-118.250000,34.030000,52.000000,1274.000000,418.000000,1655.000000,368.000000,2.190500,124000.000000 +-118.250000,34.020000,35.000000,1368.000000,486.000000,2239.000000,461.000000,1.913000,114300.000000 +-118.250000,34.020000,33.000000,1676.000000,525.000000,2564.000000,515.000000,2.195700,100800.000000 +-118.250000,34.020000,32.000000,1375.000000,448.000000,1698.000000,432.000000,1.630200,130700.000000 +-118.250000,34.020000,32.000000,1311.000000,410.000000,1792.000000,396.000000,2.330400,119900.000000 +-118.250000,34.010000,43.000000,1575.000000,475.000000,1980.000000,469.000000,1.742500,100500.000000 +-118.250000,34.010000,43.000000,1429.000000,386.000000,1412.000000,354.000000,1.328700,107200.000000 +-118.250000,34.010000,36.000000,879.000000,262.000000,1034.000000,236.000000,1.285700,99300.000000 +-118.250000,34.010000,31.000000,1301.000000,403.000000,1952.000000,377.000000,2.146600,100800.000000 +-118.250000,34.010000,30.000000,962.000000,291.000000,1280.000000,263.000000,1.446400,110200.000000 +-118.250000,34.010000,28.000000,481.000000,136.000000,596.000000,128.000000,1.239600,90300.000000 +-118.250000,34.000000,41.000000,1768.000000,475.000000,1721.000000,474.000000,1.303000,90400.000000 +-118.250000,34.000000,36.000000,1176.000000,309.000000,1267.000000,292.000000,1.638200,105000.000000 +-118.250000,34.000000,34.000000,1905.000000,552.000000,2194.000000,521.000000,1.479200,95800.000000 +-118.250000,34.000000,32.000000,1218.000000,342.000000,1292.000000,304.000000,1.578100,102900.000000 +-118.250000,34.000000,29.000000,1419.000000,363.000000,1696.000000,317.000000,2.281300,101300.000000 +-118.250000,33.990000,42.000000,2261.000000,574.000000,2496.000000,527.000000,1.555600,98500.000000 +-118.250000,33.990000,41.000000,2215.000000,544.000000,2054.000000,480.000000,1.527200,100300.000000 +-118.250000,33.980000,44.000000,1087.000000,335.000000,1441.000000,310.000000,1.666700,112500.000000 +-118.250000,33.980000,40.000000,1867.000000,633.000000,2223.000000,609.000000,1.720700,105100.000000 +-118.250000,33.980000,37.000000,1503.000000,392.000000,1886.000000,401.000000,2.563700,125000.000000 +-118.250000,33.980000,37.000000,1045.000000,361.000000,1666.000000,337.000000,1.792900,97200.000000 +-118.250000,33.970000,43.000000,1735.000000,535.000000,2288.000000,524.000000,1.911900,98800.000000 +-118.250000,33.970000,39.000000,1346.000000,380.000000,1520.000000,356.000000,1.163500,108700.000000 +-118.250000,33.970000,38.000000,1231.000000,346.000000,1217.000000,354.000000,1.866100,106600.000000 +-118.250000,33.970000,37.000000,794.000000,210.000000,814.000000,213.000000,2.291700,112000.000000 +-118.250000,33.970000,36.000000,1026.000000,294.000000,1316.000000,268.000000,1.770800,102600.000000 +-118.250000,33.970000,32.000000,879.000000,257.000000,1057.000000,230.000000,1.677600,114800.000000 +-118.250000,33.960000,48.000000,1052.000000,234.000000,793.000000,216.000000,1.658500,92900.000000 +-118.250000,33.960000,43.000000,2015.000000,419.000000,1543.000000,399.000000,1.867200,98100.000000 +-118.250000,33.960000,43.000000,1876.000000,454.000000,1571.000000,458.000000,2.032300,112500.000000 +-118.250000,33.960000,42.000000,1326.000000,295.000000,918.000000,258.000000,2.386400,98800.000000 +-118.250000,33.950000,48.000000,1766.000000,424.000000,1655.000000,420.000000,0.975100,95500.000000 +-118.250000,33.950000,41.000000,1576.000000,339.000000,1252.000000,302.000000,1.979800,98100.000000 +-118.250000,33.950000,35.000000,1405.000000,326.000000,1086.000000,273.000000,1.037500,89800.000000 +-118.250000,33.950000,28.000000,2136.000000,575.000000,1799.000000,476.000000,1.542700,95700.000000 +-118.250000,33.940000,44.000000,1463.000000,312.000000,940.000000,312.000000,2.333300,99800.000000 +-118.250000,33.940000,43.000000,793.000000,235.000000,736.000000,231.000000,0.852700,90400.000000 +-118.250000,33.940000,37.000000,1002.000000,270.000000,1092.000000,273.000000,1.633300,94500.000000 +-118.250000,33.930000,42.000000,763.000000,191.000000,754.000000,174.000000,2.048600,101800.000000 +-118.250000,33.930000,42.000000,657.000000,147.000000,526.000000,132.000000,2.500000,110200.000000 +-118.250000,33.930000,38.000000,180.000000,43.000000,246.000000,56.000000,2.850000,90000.000000 +-118.250000,33.930000,36.000000,2452.000000,734.000000,2664.000000,667.000000,0.929800,100000.000000 +-118.250000,33.930000,27.000000,581.000000,135.000000,647.000000,131.000000,3.291700,83100.000000 +-118.250000,33.920000,46.000000,723.000000,154.000000,411.000000,165.000000,2.089300,96500.000000 +-118.250000,33.920000,44.000000,1737.000000,363.000000,1184.000000,343.000000,2.536300,95900.000000 +-118.250000,33.920000,44.000000,1137.000000,235.000000,747.000000,225.000000,2.000000,92600.000000 +-118.250000,33.910000,36.000000,1950.000000,365.000000,1125.000000,374.000000,3.111100,119300.000000 +-118.250000,33.910000,35.000000,1479.000000,272.000000,963.000000,292.000000,3.491700,109500.000000 +-118.250000,33.900000,42.000000,1386.000000,320.000000,1163.000000,319.000000,2.427100,89500.000000 +-118.250000,33.900000,38.000000,1201.000000,223.000000,733.000000,206.000000,3.380400,105800.000000 +-118.250000,33.900000,37.000000,2119.000000,442.000000,1372.000000,406.000000,1.960500,106200.000000 +-118.250000,33.900000,36.000000,1135.000000,231.000000,614.000000,227.000000,2.552100,113100.000000 +-118.250000,33.890000,41.000000,1476.000000,286.000000,1086.000000,278.000000,2.463200,111700.000000 +-118.250000,33.890000,37.000000,1042.000000,213.000000,699.000000,196.000000,2.964300,103200.000000 +-118.250000,33.890000,36.000000,406.000000,71.000000,268.000000,77.000000,3.900000,115800.000000 +-118.250000,33.890000,36.000000,1527.000000,309.000000,1154.000000,279.000000,3.309500,105500.000000 +-118.250000,33.890000,35.000000,1582.000000,391.000000,1957.000000,404.000000,2.453700,91500.000000 +-118.250000,33.890000,34.000000,1367.000000,288.000000,1183.000000,286.000000,2.681200,104100.000000 +-118.250000,33.880000,37.000000,1027.000000,217.000000,1042.000000,254.000000,2.212100,98600.000000 +-118.250000,33.860000,26.000000,3022.000000,476.000000,1852.000000,452.000000,6.053100,186400.000000 +-118.250000,33.800000,36.000000,1697.000000,394.000000,1274.000000,396.000000,3.350000,163100.000000 +-118.250000,33.790000,39.000000,981.000000,286.000000,1183.000000,298.000000,1.920800,139800.000000 +-118.250000,33.790000,34.000000,1349.000000,371.000000,1716.000000,380.000000,2.714300,138100.000000 +-118.250000,33.790000,32.000000,1205.000000,340.000000,1799.000000,370.000000,2.375000,128000.000000 +-118.250000,33.780000,32.000000,296.000000,139.000000,511.000000,133.000000,1.444400,182100.000000 +-118.260000,34.500000,6.000000,5813.000000,908.000000,2275.000000,790.000000,4.777800,340400.000000 +-118.260000,34.240000,42.000000,890.000000,179.000000,555.000000,200.000000,4.482100,271900.000000 +-118.260000,34.240000,35.000000,2485.000000,418.000000,1226.000000,406.000000,5.708300,329500.000000 +-118.260000,34.240000,35.000000,1666.000000,280.000000,788.000000,273.000000,6.627700,344400.000000 +-118.260000,34.230000,43.000000,1428.000000,325.000000,836.000000,302.000000,4.575900,209200.000000 +-118.260000,34.230000,38.000000,1107.000000,194.000000,518.000000,195.000000,7.558200,263700.000000 +-118.260000,34.230000,33.000000,1805.000000,303.000000,838.000000,301.000000,5.430600,326600.000000 +-118.260000,34.180000,32.000000,14556.000000,2077.000000,5459.000000,2017.000000,8.165700,500001.000000 +-118.260000,34.170000,20.000000,5949.000000,1417.000000,2593.000000,1337.000000,3.857600,318600.000000 +-118.260000,34.160000,20.000000,3407.000000,885.000000,1883.000000,870.000000,3.732100,351100.000000 +-118.260000,34.160000,19.000000,2919.000000,857.000000,1866.000000,811.000000,3.173300,206300.000000 +-118.260000,34.160000,18.000000,1775.000000,525.000000,950.000000,522.000000,3.541700,177100.000000 +-118.260000,34.150000,6.000000,3340.000000,945.000000,2315.000000,846.000000,2.888400,252300.000000 +-118.260000,34.150000,18.000000,2481.000000,756.000000,1763.000000,675.000000,2.808800,247500.000000 +-118.260000,34.150000,14.000000,2981.000000,894.000000,1941.000000,863.000000,3.000000,178600.000000 +-118.260000,34.140000,6.000000,1727.000000,506.000000,1200.000000,439.000000,4.108300,210700.000000 +-118.260000,34.140000,29.000000,3431.000000,1222.000000,4094.000000,1205.000000,2.261400,248100.000000 +-118.260000,34.140000,23.000000,1336.000000,396.000000,1255.000000,359.000000,2.538800,205000.000000 +-118.260000,34.130000,37.000000,196.000000,74.000000,194.000000,68.000000,1.218800,218800.000000 +-118.260000,34.130000,37.000000,1383.000000,470.000000,1185.000000,451.000000,2.500000,207100.000000 +-118.260000,34.120000,52.000000,2290.000000,520.000000,1278.000000,485.000000,3.839300,238200.000000 +-118.260000,34.120000,52.000000,1942.000000,476.000000,1375.000000,477.000000,2.734800,209100.000000 +-118.260000,34.110000,52.000000,1740.000000,402.000000,749.000000,335.000000,3.567300,270700.000000 +-118.260000,34.100000,52.000000,1310.000000,263.000000,689.000000,208.000000,4.072100,350000.000000 +-118.260000,34.100000,48.000000,2566.000000,571.000000,1421.000000,563.000000,3.657900,318600.000000 +-118.260000,34.090000,51.000000,1532.000000,366.000000,669.000000,333.000000,3.643400,278800.000000 +-118.260000,34.090000,36.000000,3503.000000,833.000000,2652.000000,788.000000,3.844800,241400.000000 +-118.260000,34.080000,52.000000,984.000000,276.000000,994.000000,260.000000,2.381600,166700.000000 +-118.260000,34.080000,50.000000,1791.000000,660.000000,2183.000000,675.000000,1.794500,166700.000000 +-118.260000,34.080000,46.000000,945.000000,250.000000,910.000000,252.000000,3.503900,187500.000000 +-118.260000,34.080000,45.000000,2174.000000,627.000000,1992.000000,557.000000,2.542800,167800.000000 +-118.260000,34.080000,41.000000,1396.000000,360.000000,1069.000000,363.000000,2.437500,203300.000000 +-118.260000,34.070000,52.000000,830.000000,200.000000,701.000000,189.000000,2.762500,232100.000000 +-118.260000,34.070000,52.000000,1802.000000,613.000000,2382.000000,587.000000,1.843800,185900.000000 +-118.260000,34.070000,46.000000,2341.000000,703.000000,2371.000000,648.000000,2.403800,181700.000000 +-118.260000,34.070000,36.000000,2306.000000,813.000000,2823.000000,765.000000,2.021400,170500.000000 +-118.260000,34.070000,28.000000,579.000000,184.000000,673.000000,202.000000,2.625000,187500.000000 +-118.260000,34.060000,38.000000,715.000000,282.000000,1174.000000,300.000000,2.345000,225000.000000 +-118.260000,34.060000,15.000000,326.000000,123.000000,490.000000,105.000000,1.488600,175000.000000 +-118.260000,34.050000,52.000000,58.000000,52.000000,41.000000,27.000000,4.097200,500001.000000 +-118.260000,34.040000,6.000000,1529.000000,566.000000,1051.000000,473.000000,2.462000,162500.000000 +-118.260000,34.030000,49.000000,299.000000,90.000000,287.000000,68.000000,1.209600,100000.000000 +-118.260000,34.020000,48.000000,1465.000000,440.000000,1859.000000,400.000000,1.313400,96200.000000 +-118.260000,34.020000,46.000000,1249.000000,357.000000,1607.000000,331.000000,2.070300,114800.000000 +-118.260000,34.020000,38.000000,980.000000,285.000000,1308.000000,310.000000,1.565200,123100.000000 +-118.260000,34.020000,38.000000,1091.000000,349.000000,1786.000000,340.000000,2.131000,136500.000000 +-118.260000,34.020000,37.000000,1551.000000,501.000000,2173.000000,474.000000,2.166700,117700.000000 +-118.260000,34.010000,47.000000,1269.000000,323.000000,1628.000000,325.000000,1.508900,115800.000000 +-118.260000,34.010000,43.000000,2179.000000,682.000000,2624.000000,609.000000,1.864100,108200.000000 +-118.260000,34.010000,38.000000,697.000000,208.000000,749.000000,206.000000,1.465300,118800.000000 +-118.260000,34.010000,37.000000,2451.000000,668.000000,2824.000000,598.000000,1.907400,99600.000000 +-118.260000,34.000000,41.000000,1733.000000,492.000000,1776.000000,453.000000,1.622100,104200.000000 +-118.260000,34.000000,37.000000,2615.000000,697.000000,2484.000000,630.000000,1.920800,103400.000000 +-118.260000,34.000000,27.000000,1611.000000,479.000000,1457.000000,458.000000,0.894100,91900.000000 +-118.260000,33.990000,47.000000,1865.000000,465.000000,1916.000000,438.000000,1.824200,95000.000000 +-118.260000,33.990000,30.000000,1702.000000,443.000000,1966.000000,442.000000,1.552100,97500.000000 +-118.260000,33.980000,43.000000,762.000000,206.000000,854.000000,188.000000,1.231500,98200.000000 +-118.260000,33.970000,52.000000,1331.000000,346.000000,1144.000000,362.000000,1.532600,90600.000000 +-118.260000,33.970000,46.000000,1295.000000,351.000000,1120.000000,323.000000,1.712100,98200.000000 +-118.260000,33.970000,46.000000,1086.000000,249.000000,880.000000,250.000000,1.596200,95700.000000 +-118.260000,33.970000,44.000000,1246.000000,308.000000,1031.000000,295.000000,1.955600,96300.000000 +-118.260000,33.960000,40.000000,1475.000000,347.000000,1222.000000,298.000000,1.530300,95300.000000 +-118.260000,33.960000,39.000000,1542.000000,375.000000,1256.000000,361.000000,1.716700,100000.000000 +-118.260000,33.960000,39.000000,1255.000000,323.000000,902.000000,327.000000,1.581200,94000.000000 +-118.260000,33.960000,37.000000,1625.000000,383.000000,1243.000000,350.000000,1.397100,89800.000000 +-118.260000,33.950000,44.000000,1513.000000,369.000000,1088.000000,344.000000,1.296900,94400.000000 +-118.260000,33.940000,45.000000,1080.000000,218.000000,850.000000,237.000000,2.250000,93400.000000 +-118.260000,33.940000,44.000000,795.000000,181.000000,716.000000,167.000000,2.000000,90300.000000 +-118.260000,33.940000,44.000000,1103.000000,265.000000,760.000000,247.000000,1.688700,99600.000000 +-118.260000,33.930000,42.000000,1433.000000,295.000000,775.000000,293.000000,1.132600,104800.000000 +-118.260000,33.930000,36.000000,1102.000000,247.000000,702.000000,225.000000,1.525600,95400.000000 +-118.260000,33.930000,35.000000,1562.000000,403.000000,1587.000000,406.000000,1.491700,93200.000000 +-118.260000,33.920000,42.000000,3320.000000,682.000000,2105.000000,632.000000,1.980900,104600.000000 +-118.260000,33.920000,40.000000,1076.000000,244.000000,705.000000,255.000000,1.798600,98900.000000 +-118.260000,33.910000,44.000000,892.000000,139.000000,440.000000,159.000000,2.885900,120800.000000 +-118.260000,33.910000,39.000000,935.000000,210.000000,711.000000,193.000000,2.437500,101900.000000 +-118.260000,33.900000,22.000000,894.000000,232.000000,754.000000,222.000000,2.009600,110700.000000 +-118.260000,33.890000,36.000000,923.000000,165.000000,603.000000,191.000000,3.568700,120700.000000 +-118.260000,33.890000,36.000000,2230.000000,417.000000,1395.000000,381.000000,2.849300,109600.000000 +-118.260000,33.850000,25.000000,2324.000000,326.000000,1087.000000,328.000000,5.293000,207000.000000 +-118.260000,33.850000,24.000000,9071.000000,1335.000000,4558.000000,1327.000000,5.542000,197500.000000 +-118.260000,33.830000,28.000000,4112.000000,861.000000,3211.000000,841.000000,4.453900,192200.000000 +-118.260000,33.820000,28.000000,5091.000000,1074.000000,4753.000000,1033.000000,3.647700,117400.000000 +-118.260000,33.800000,41.000000,2004.000000,481.000000,1658.000000,456.000000,3.177900,171100.000000 +-118.260000,33.790000,42.000000,1162.000000,264.000000,1044.000000,241.000000,3.548800,205600.000000 +-118.260000,33.790000,30.000000,1291.000000,230.000000,835.000000,215.000000,5.500000,181500.000000 +-118.260000,33.780000,36.000000,2191.000000,739.000000,2931.000000,692.000000,2.131100,163100.000000 +-118.260000,33.780000,35.000000,1239.000000,473.000000,1524.000000,387.000000,2.095600,154700.000000 +-118.260000,33.780000,21.000000,2188.000000,706.000000,2265.000000,652.000000,1.992300,164700.000000 +-118.260000,33.770000,36.000000,886.000000,253.000000,809.000000,219.000000,2.454500,164200.000000 +-118.270000,34.920000,20.000000,873.000000,175.000000,422.000000,159.000000,2.958300,91700.000000 +-118.270000,34.680000,19.000000,552.000000,129.000000,314.000000,106.000000,3.212500,185400.000000 +-118.270000,34.460000,10.000000,2184.000000,405.000000,1119.000000,370.000000,4.743700,294000.000000 +-118.270000,34.250000,39.000000,699.000000,150.000000,358.000000,143.000000,4.437500,195800.000000 +-118.270000,34.250000,37.000000,2489.000000,454.000000,1215.000000,431.000000,5.023400,257600.000000 +-118.270000,34.250000,35.000000,779.000000,143.000000,371.000000,150.000000,4.663500,230100.000000 +-118.270000,34.250000,35.000000,2091.000000,360.000000,879.000000,326.000000,4.448500,261900.000000 +-118.270000,34.240000,30.000000,2180.000000,369.000000,1050.000000,390.000000,6.368800,277600.000000 +-118.270000,34.220000,34.000000,8206.000000,1186.000000,3141.000000,1150.000000,7.281200,462200.000000 +-118.270000,34.180000,52.000000,3034.000000,406.000000,1158.000000,399.000000,6.297600,498400.000000 +-118.270000,34.170000,52.000000,2287.000000,295.000000,829.000000,296.000000,7.838300,500001.000000 +-118.270000,34.170000,52.000000,2010.000000,286.000000,908.000000,326.000000,6.913500,374000.000000 +-118.270000,34.170000,48.000000,1560.000000,280.000000,825.000000,269.000000,5.511800,354700.000000 +-118.270000,34.160000,52.000000,830.000000,183.000000,479.000000,179.000000,3.139700,253700.000000 +-118.270000,34.160000,47.000000,1453.000000,356.000000,787.000000,345.000000,3.011400,255500.000000 +-118.270000,34.160000,45.000000,1865.000000,360.000000,973.000000,349.000000,3.658700,321200.000000 +-118.270000,34.160000,15.000000,5036.000000,1299.000000,3164.000000,1175.000000,2.914800,238700.000000 +-118.270000,34.150000,7.000000,2837.000000,776.000000,2287.000000,736.000000,3.008000,229000.000000 +-118.270000,34.150000,25.000000,3018.000000,806.000000,2205.000000,742.000000,3.019900,220200.000000 +-118.270000,34.150000,22.000000,2265.000000,637.000000,1684.000000,561.000000,2.672900,217100.000000 +-118.270000,34.140000,29.000000,3768.000000,1211.000000,2320.000000,1030.000000,2.768500,204500.000000 +-118.270000,34.140000,10.000000,1060.000000,332.000000,1025.000000,288.000000,3.007400,175000.000000 +-118.270000,34.130000,47.000000,1375.000000,359.000000,1512.000000,418.000000,2.107100,208900.000000 +-118.270000,34.130000,46.000000,1613.000000,396.000000,966.000000,416.000000,2.939200,230300.000000 +-118.270000,34.120000,50.000000,2037.000000,440.000000,1089.000000,417.000000,3.575000,230600.000000 +-118.270000,34.110000,41.000000,4138.000000,1130.000000,1859.000000,1030.000000,2.978000,306800.000000 +-118.270000,34.100000,41.000000,3729.000000,740.000000,1364.000000,707.000000,5.777800,412700.000000 +-118.270000,34.090000,52.000000,3225.000000,763.000000,1559.000000,710.000000,3.967400,268800.000000 +-118.270000,34.090000,52.000000,2327.000000,555.000000,1048.000000,491.000000,3.784700,252300.000000 +-118.270000,34.090000,52.000000,1079.000000,222.000000,439.000000,201.000000,4.625000,230800.000000 +-118.270000,34.090000,44.000000,3646.000000,912.000000,1783.000000,861.000000,2.970900,225000.000000 +-118.270000,34.080000,46.000000,3007.000000,854.000000,2587.000000,814.000000,2.717900,184300.000000 +-118.270000,34.080000,43.000000,962.000000,253.000000,658.000000,244.000000,3.238600,185000.000000 +-118.270000,34.080000,38.000000,2265.000000,801.000000,2899.000000,792.000000,2.552100,157500.000000 +-118.270000,34.080000,35.000000,1147.000000,365.000000,1016.000000,296.000000,2.391300,198800.000000 +-118.270000,34.070000,42.000000,1175.000000,428.000000,1593.000000,407.000000,2.343800,213300.000000 +-118.270000,34.070000,34.000000,786.000000,341.000000,1239.000000,320.000000,2.343800,152100.000000 +-118.270000,34.070000,33.000000,1177.000000,468.000000,1533.000000,430.000000,2.398100,183300.000000 +-118.270000,34.070000,32.000000,1657.000000,579.000000,2071.000000,598.000000,2.113500,152500.000000 +-118.270000,34.070000,27.000000,1190.000000,518.000000,1795.000000,422.000000,1.701600,160000.000000 +-118.270000,34.060000,45.000000,564.000000,353.000000,1172.000000,319.000000,1.494000,187500.000000 +-118.270000,34.060000,33.000000,1416.000000,686.000000,2013.000000,614.000000,1.981800,208300.000000 +-118.270000,34.060000,30.000000,1771.000000,788.000000,2188.000000,764.000000,1.588500,154200.000000 +-118.270000,34.060000,25.000000,1714.000000,1176.000000,3723.000000,1036.000000,1.324100,112500.000000 +-118.270000,34.060000,17.000000,2124.000000,1168.000000,3915.000000,1137.000000,1.134600,137500.000000 +-118.270000,34.050000,52.000000,1292.000000,864.000000,2081.000000,724.000000,0.956300,275000.000000 +-118.270000,34.050000,47.000000,661.000000,359.000000,1406.000000,307.000000,1.316900,112500.000000 +-118.270000,34.050000,37.000000,350.000000,245.000000,1122.000000,248.000000,2.763400,137500.000000 +-118.270000,34.050000,25.000000,1316.000000,836.000000,2796.000000,784.000000,1.786600,325000.000000 +-118.270000,34.050000,24.000000,323.000000,214.000000,751.000000,189.000000,1.830400,225000.000000 +-118.270000,34.050000,12.000000,535.000000,328.000000,1194.000000,365.000000,1.201200,275000.000000 +-118.270000,34.040000,13.000000,1784.000000,730.000000,2158.000000,682.000000,1.703800,118100.000000 +-118.270000,34.030000,51.000000,1280.000000,422.000000,1560.000000,381.000000,1.711500,125000.000000 +-118.270000,34.030000,50.000000,395.000000,232.000000,948.000000,243.000000,1.754600,175000.000000 +-118.270000,34.020000,40.000000,561.000000,284.000000,662.000000,205.000000,0.923400,187500.000000 +-118.270000,34.020000,39.000000,2004.000000,633.000000,3050.000000,621.000000,1.875000,127300.000000 +-118.270000,34.010000,47.000000,921.000000,264.000000,881.000000,221.000000,1.432700,114100.000000 +-118.270000,34.010000,42.000000,990.000000,289.000000,1167.000000,281.000000,1.452400,126800.000000 +-118.270000,34.010000,35.000000,1672.000000,556.000000,2106.000000,519.000000,1.220600,129200.000000 +-118.270000,34.010000,35.000000,1193.000000,355.000000,1784.000000,341.000000,1.865200,116100.000000 +-118.270000,34.000000,48.000000,1869.000000,461.000000,1834.000000,441.000000,1.705200,107400.000000 +-118.270000,34.000000,47.000000,780.000000,237.000000,888.000000,215.000000,1.750000,95800.000000 +-118.270000,34.000000,46.000000,1748.000000,428.000000,1707.000000,409.000000,2.148000,103800.000000 +-118.270000,34.000000,43.000000,1258.000000,381.000000,1276.000000,358.000000,1.891700,106900.000000 +-118.270000,34.000000,40.000000,2099.000000,599.000000,2311.000000,529.000000,1.852000,101500.000000 +-118.270000,33.990000,41.000000,656.000000,162.000000,730.000000,170.000000,1.804700,101800.000000 +-118.270000,33.990000,38.000000,1407.000000,447.000000,1783.000000,402.000000,1.808600,97100.000000 +-118.270000,33.990000,35.000000,932.000000,294.000000,1153.000000,282.000000,1.488600,100000.000000 +-118.270000,33.990000,30.000000,504.000000,140.000000,529.000000,123.000000,1.953100,100000.000000 +-118.270000,33.980000,45.000000,1696.000000,424.000000,1502.000000,429.000000,1.304200,99200.000000 +-118.270000,33.980000,44.000000,1722.000000,457.000000,2177.000000,401.000000,2.125000,92500.000000 +-118.270000,33.980000,39.000000,2062.000000,588.000000,1933.000000,570.000000,1.380100,97000.000000 +-118.270000,33.980000,30.000000,1966.000000,584.000000,2028.000000,535.000000,1.625000,101500.000000 +-118.270000,33.970000,45.000000,1546.000000,371.000000,1186.000000,366.000000,1.640000,96800.000000 +-118.270000,33.970000,34.000000,1462.000000,394.000000,1310.000000,351.000000,1.155700,90100.000000 +-118.270000,33.960000,38.000000,977.000000,295.000000,1073.000000,292.000000,1.020800,86400.000000 +-118.270000,33.960000,34.000000,1040.000000,276.000000,1083.000000,255.000000,1.646700,90900.000000 +-118.270000,33.950000,43.000000,1156.000000,291.000000,1074.000000,299.000000,1.881400,94900.000000 +-118.270000,33.950000,40.000000,935.000000,226.000000,818.000000,236.000000,1.879800,101300.000000 +-118.270000,33.950000,39.000000,1529.000000,358.000000,1154.000000,357.000000,1.209100,97900.000000 +-118.270000,33.950000,34.000000,1261.000000,315.000000,1027.000000,303.000000,2.294600,88800.000000 +-118.270000,33.950000,29.000000,1579.000000,351.000000,1056.000000,322.000000,2.305600,98500.000000 +-118.270000,33.940000,39.000000,2078.000000,561.000000,1901.000000,504.000000,1.146800,96900.000000 +-118.270000,33.940000,38.000000,1314.000000,318.000000,1080.000000,285.000000,1.587200,89800.000000 +-118.270000,33.940000,37.000000,973.000000,221.000000,842.000000,178.000000,1.664500,94900.000000 +-118.270000,33.930000,38.000000,2073.000000,500.000000,1657.000000,470.000000,1.209800,88400.000000 +-118.270000,33.930000,36.000000,1467.000000,369.000000,1247.000000,347.000000,1.819100,92700.000000 +-118.270000,33.920000,36.000000,1465.000000,346.000000,1147.000000,324.000000,1.726200,88800.000000 +-118.270000,33.920000,35.000000,1818.000000,374.000000,1444.000000,372.000000,2.745000,106800.000000 +-118.270000,33.920000,34.000000,1178.000000,260.000000,1166.000000,244.000000,1.918500,93300.000000 +-118.270000,33.910000,42.000000,1786.000000,358.000000,1318.000000,373.000000,2.625000,101100.000000 +-118.270000,33.910000,37.000000,3018.000000,547.000000,1720.000000,512.000000,2.726900,124100.000000 +-118.270000,33.910000,32.000000,2238.000000,471.000000,1292.000000,467.000000,1.170500,110600.000000 +-118.270000,33.890000,32.000000,1969.000000,397.000000,1349.000000,370.000000,4.465900,138100.000000 +-118.270000,33.860000,33.000000,1685.000000,333.000000,1484.000000,318.000000,4.352700,167000.000000 +-118.270000,33.860000,29.000000,2587.000000,489.000000,2115.000000,475.000000,3.746600,168600.000000 +-118.270000,33.860000,26.000000,1097.000000,167.000000,701.000000,188.000000,6.579900,196600.000000 +-118.270000,33.840000,24.000000,6303.000000,1277.000000,3728.000000,1252.000000,3.922700,208600.000000 +-118.270000,33.830000,28.000000,2152.000000,415.000000,1623.000000,429.000000,4.350000,200500.000000 +-118.270000,33.820000,39.000000,1357.000000,249.000000,763.000000,229.000000,4.250000,200300.000000 +-118.270000,33.820000,37.000000,943.000000,218.000000,803.000000,216.000000,5.228700,191100.000000 +-118.270000,33.820000,36.000000,1593.000000,334.000000,1427.000000,320.000000,4.401500,166900.000000 +-118.270000,33.820000,33.000000,1596.000000,337.000000,1650.000000,329.000000,4.368700,173500.000000 +-118.270000,33.820000,28.000000,1642.000000,434.000000,1575.000000,420.000000,4.129200,201900.000000 +-118.270000,33.810000,42.000000,865.000000,208.000000,811.000000,218.000000,3.862100,165300.000000 +-118.270000,33.810000,38.000000,1607.000000,337.000000,1130.000000,334.000000,4.482100,190700.000000 +-118.270000,33.800000,38.000000,1446.000000,327.000000,980.000000,319.000000,3.350000,177700.000000 +-118.270000,33.800000,28.000000,4698.000000,902.000000,3287.000000,881.000000,4.850800,215900.000000 +-118.270000,33.790000,39.000000,1417.000000,359.000000,1450.000000,367.000000,2.846200,172000.000000 +-118.270000,33.790000,36.000000,2218.000000,561.000000,1789.000000,527.000000,3.161300,182300.000000 +-118.270000,33.790000,31.000000,1535.000000,369.000000,1291.000000,340.000000,2.937500,174000.000000 +-118.270000,33.790000,26.000000,2109.000000,651.000000,2120.000000,605.000000,2.144700,158700.000000 +-118.270000,33.780000,21.000000,3354.000000,1174.000000,4426.000000,1110.000000,2.526200,167300.000000 +-118.270000,33.770000,26.000000,2272.000000,694.000000,2567.000000,595.000000,1.996400,150600.000000 +-118.280000,34.760000,19.000000,3430.000000,601.000000,1817.000000,571.000000,4.787500,163600.000000 +-118.280000,34.260000,32.000000,1079.000000,207.000000,486.000000,167.000000,4.983300,213000.000000 +-118.280000,34.250000,35.000000,2045.000000,450.000000,1166.000000,407.000000,3.521400,197600.000000 +-118.280000,34.250000,29.000000,2559.000000,787.000000,1886.000000,769.000000,2.603600,162100.000000 +-118.280000,34.240000,32.000000,2542.000000,526.000000,1278.000000,493.000000,4.450000,263600.000000 +-118.280000,34.240000,29.000000,3390.000000,580.000000,1543.000000,576.000000,5.618400,316900.000000 +-118.280000,34.180000,52.000000,2602.000000,418.000000,1137.000000,419.000000,5.318500,358000.000000 +-118.280000,34.180000,50.000000,2195.000000,336.000000,878.000000,309.000000,6.884000,365600.000000 +-118.280000,34.180000,47.000000,2243.000000,339.000000,911.000000,319.000000,7.404600,446800.000000 +-118.280000,34.170000,52.000000,2332.000000,433.000000,1135.000000,440.000000,5.565800,331200.000000 +-118.280000,34.160000,49.000000,1393.000000,290.000000,605.000000,282.000000,2.949100,257400.000000 +-118.280000,34.110000,52.000000,2036.000000,348.000000,775.000000,332.000000,5.412200,397500.000000 +-118.280000,34.110000,52.000000,1803.000000,437.000000,787.000000,388.000000,4.578100,360500.000000 +-118.280000,34.110000,38.000000,4453.000000,1156.000000,1830.000000,1099.000000,3.618100,495600.000000 +-118.280000,34.100000,49.000000,2843.000000,880.000000,2004.000000,796.000000,2.787500,217300.000000 +-118.280000,34.100000,49.000000,1767.000000,467.000000,1066.000000,438.000000,3.095800,210900.000000 +-118.280000,34.100000,48.000000,805.000000,246.000000,633.000000,235.000000,2.342100,200000.000000 +-118.280000,34.100000,44.000000,2728.000000,585.000000,1227.000000,567.000000,4.060200,324000.000000 +-118.280000,34.090000,52.000000,2273.000000,663.000000,1480.000000,597.000000,2.333300,196500.000000 +-118.280000,34.090000,49.000000,3828.000000,1197.000000,2862.000000,1009.000000,2.467700,219200.000000 +-118.280000,34.090000,38.000000,790.000000,275.000000,664.000000,194.000000,3.035700,175000.000000 +-118.280000,34.080000,52.000000,2465.000000,773.000000,2328.000000,746.000000,2.617800,203100.000000 +-118.280000,34.080000,42.000000,997.000000,374.000000,982.000000,372.000000,2.942300,200000.000000 +-118.280000,34.080000,40.000000,1630.000000,543.000000,1568.000000,510.000000,2.736600,169100.000000 +-118.280000,34.080000,39.000000,3162.000000,896.000000,1934.000000,818.000000,2.875000,213500.000000 +-118.280000,34.070000,41.000000,1072.000000,331.000000,1111.000000,314.000000,1.923300,207100.000000 +-118.280000,34.070000,32.000000,1777.000000,536.000000,1684.000000,489.000000,2.363600,190000.000000 +-118.280000,34.070000,25.000000,7522.000000,3179.000000,7221.000000,2902.000000,2.017300,177500.000000 +-118.280000,34.070000,24.000000,3247.000000,1281.000000,2642.000000,1182.000000,2.463200,216700.000000 +-118.280000,34.070000,14.000000,1924.000000,926.000000,2226.000000,792.000000,2.255200,265900.000000 +-118.280000,34.060000,52.000000,936.000000,454.000000,990.000000,354.000000,1.112200,187500.000000 +-118.280000,34.060000,52.000000,1261.000000,616.000000,2309.000000,581.000000,1.618400,225000.000000 +-118.280000,34.060000,42.000000,2472.000000,1368.000000,3795.000000,1179.000000,1.225400,162500.000000 +-118.280000,34.060000,17.000000,2518.000000,1196.000000,3051.000000,1000.000000,1.719900,175000.000000 +-118.280000,34.060000,14.000000,1787.000000,853.000000,2251.000000,763.000000,1.164200,400000.000000 +-118.280000,34.050000,44.000000,1273.000000,474.000000,1754.000000,460.000000,1.603700,275000.000000 +-118.280000,34.050000,41.000000,1788.000000,774.000000,2931.000000,702.000000,1.441300,158900.000000 +-118.280000,34.050000,31.000000,1525.000000,730.000000,2510.000000,652.000000,1.635500,162500.000000 +-118.280000,34.050000,28.000000,1306.000000,637.000000,2079.000000,598.000000,1.461500,275000.000000 +-118.280000,34.040000,48.000000,1521.000000,513.000000,1772.000000,458.000000,2.223200,162500.000000 +-118.280000,34.040000,39.000000,1155.000000,433.000000,1857.000000,424.000000,2.169600,153400.000000 +-118.280000,34.040000,25.000000,1582.000000,780.000000,2390.000000,719.000000,1.416700,200000.000000 +-118.280000,34.040000,20.000000,1193.000000,454.000000,1880.000000,453.000000,2.180600,180000.000000 +-118.280000,34.040000,19.000000,460.000000,241.000000,890.000000,229.000000,1.608900,162500.000000 +-118.280000,34.030000,45.000000,943.000000,289.000000,953.000000,238.000000,2.067300,151600.000000 +-118.280000,34.030000,41.000000,1933.000000,791.000000,3121.000000,719.000000,1.853900,147500.000000 +-118.280000,34.030000,25.000000,1407.000000,550.000000,1193.000000,472.000000,1.298900,225000.000000 +-118.280000,34.020000,52.000000,281.000000,103.000000,470.000000,96.000000,1.937500,38800.000000 +-118.280000,34.020000,46.000000,1098.000000,426.000000,1510.000000,374.000000,2.138200,156300.000000 +-118.280000,34.010000,50.000000,2601.000000,794.000000,3080.000000,770.000000,1.865600,122900.000000 +-118.280000,34.010000,46.000000,441.000000,167.000000,621.000000,144.000000,1.882400,162500.000000 +-118.280000,34.010000,34.000000,2305.000000,775.000000,2450.000000,740.000000,1.714300,132000.000000 +-118.280000,34.000000,48.000000,1514.000000,376.000000,1353.000000,344.000000,2.160700,96100.000000 +-118.280000,34.000000,46.000000,1650.000000,463.000000,1992.000000,458.000000,2.340300,114100.000000 +-118.280000,34.000000,44.000000,2636.000000,725.000000,2182.000000,651.000000,1.432000,124000.000000 +-118.280000,34.000000,43.000000,713.000000,245.000000,880.000000,237.000000,1.206500,103600.000000 +-118.280000,34.000000,42.000000,1534.000000,417.000000,1295.000000,380.000000,2.093800,119200.000000 +-118.280000,34.000000,38.000000,3335.000000,921.000000,3612.000000,887.000000,2.125000,118800.000000 +-118.280000,33.990000,52.000000,1283.000000,342.000000,1363.000000,329.000000,2.584800,101900.000000 +-118.280000,33.990000,49.000000,2174.000000,481.000000,1861.000000,484.000000,1.715900,95000.000000 +-118.280000,33.990000,46.000000,2577.000000,703.000000,2446.000000,687.000000,1.275000,98300.000000 +-118.280000,33.990000,46.000000,1211.000000,321.000000,1153.000000,282.000000,1.784900,99300.000000 +-118.280000,33.990000,38.000000,1454.000000,323.000000,1098.000000,297.000000,1.510900,104000.000000 +-118.280000,33.990000,37.000000,1971.000000,513.000000,1673.000000,464.000000,1.462500,103000.000000 +-118.280000,33.980000,47.000000,865.000000,193.000000,782.000000,217.000000,2.241100,93000.000000 +-118.280000,33.980000,45.000000,1720.000000,416.000000,1382.000000,365.000000,0.933700,92300.000000 +-118.280000,33.980000,43.000000,1240.000000,312.000000,1100.000000,311.000000,1.575000,97500.000000 +-118.280000,33.980000,39.000000,1306.000000,345.000000,1332.000000,331.000000,1.956400,92200.000000 +-118.280000,33.980000,19.000000,883.000000,313.000000,726.000000,277.000000,0.980900,121400.000000 +-118.280000,33.970000,40.000000,2180.000000,642.000000,2464.000000,631.000000,1.552100,90100.000000 +-118.280000,33.970000,38.000000,1819.000000,497.000000,2110.000000,499.000000,1.602700,97300.000000 +-118.280000,33.970000,35.000000,2305.000000,634.000000,1978.000000,568.000000,1.375000,100000.000000 +-118.280000,33.970000,34.000000,2771.000000,802.000000,2782.000000,715.000000,1.665200,99000.000000 +-118.280000,33.970000,31.000000,2017.000000,566.000000,2063.000000,521.000000,1.921900,107000.000000 +-118.280000,33.970000,31.000000,1068.000000,271.000000,1091.000000,281.000000,1.689000,102600.000000 +-118.280000,33.960000,42.000000,1604.000000,399.000000,1581.000000,387.000000,1.765600,96700.000000 +-118.280000,33.960000,42.000000,1206.000000,304.000000,1167.000000,250.000000,1.615000,101300.000000 +-118.280000,33.960000,41.000000,1175.000000,340.000000,1241.000000,352.000000,1.227300,98400.000000 +-118.280000,33.960000,37.000000,1812.000000,500.000000,1640.000000,447.000000,1.934800,99100.000000 +-118.280000,33.960000,34.000000,2074.000000,562.000000,1913.000000,514.000000,1.615600,102100.000000 +-118.280000,33.960000,23.000000,1983.000000,611.000000,2048.000000,600.000000,1.531300,91400.000000 +-118.280000,33.950000,41.000000,835.000000,208.000000,707.000000,192.000000,1.410300,86200.000000 +-118.280000,33.950000,40.000000,2044.000000,538.000000,2150.000000,524.000000,2.143700,94800.000000 +-118.280000,33.940000,9.000000,456.000000,130.000000,438.000000,114.000000,0.895200,81300.000000 +-118.280000,33.940000,44.000000,1631.000000,338.000000,1197.000000,355.000000,3.078800,100000.000000 +-118.280000,33.940000,43.000000,1201.000000,292.000000,840.000000,252.000000,2.791700,105600.000000 +-118.280000,33.930000,52.000000,117.000000,33.000000,74.000000,45.000000,0.499900,90600.000000 +-118.280000,33.930000,45.000000,529.000000,112.000000,448.000000,120.000000,3.583300,90600.000000 +-118.280000,33.930000,43.000000,269.000000,74.000000,295.000000,79.000000,2.296900,90600.000000 +-118.280000,33.930000,21.000000,847.000000,278.000000,1283.000000,277.000000,1.432900,94100.000000 +-118.280000,33.920000,39.000000,1472.000000,302.000000,1036.000000,318.000000,3.000000,110000.000000 +-118.280000,33.920000,39.000000,1274.000000,282.000000,975.000000,277.000000,1.511400,90400.000000 +-118.280000,33.920000,37.000000,742.000000,151.000000,729.000000,144.000000,3.055000,105400.000000 +-118.280000,33.920000,37.000000,1761.000000,409.000000,1309.000000,357.000000,2.187500,175900.000000 +-118.280000,33.910000,41.000000,620.000000,133.000000,642.000000,162.000000,2.654600,159600.000000 +-118.280000,33.880000,19.000000,2758.000000,675.000000,2047.000000,591.000000,2.661800,179700.000000 +-118.280000,33.850000,27.000000,489.000000,98.000000,403.000000,97.000000,5.144000,180800.000000 +-118.280000,33.830000,28.000000,880.000000,168.000000,717.000000,142.000000,4.546900,175700.000000 +-118.280000,33.820000,30.000000,3615.000000,760.000000,2813.000000,752.000000,5.384900,217700.000000 +-118.280000,33.820000,26.000000,4586.000000,1042.000000,3680.000000,1027.000000,4.174000,205100.000000 +-118.280000,33.810000,29.000000,2755.000000,508.000000,2046.000000,488.000000,5.203400,212400.000000 +-118.280000,33.800000,38.000000,1471.000000,329.000000,1207.000000,335.000000,4.000000,165500.000000 +-118.280000,33.790000,36.000000,1989.000000,458.000000,1668.000000,455.000000,3.300900,168000.000000 +-118.280000,33.780000,37.000000,1212.000000,304.000000,1076.000000,293.000000,3.211500,160100.000000 +-118.280000,33.780000,20.000000,2233.000000,591.000000,1915.000000,558.000000,3.201100,169100.000000 +-118.280000,33.750000,21.000000,2442.000000,719.000000,1916.000000,646.000000,1.227600,208300.000000 +-118.280000,33.750000,18.000000,393.000000,189.000000,429.000000,188.000000,1.839300,187500.000000 +-118.280000,33.740000,44.000000,1853.000000,556.000000,2090.000000,539.000000,1.858400,248100.000000 +-118.280000,33.740000,40.000000,1751.000000,512.000000,1939.000000,503.000000,1.539400,200000.000000 +-118.280000,33.740000,16.000000,855.000000,271.000000,486.000000,250.000000,0.759100,350000.000000 +-118.280000,33.730000,45.000000,2137.000000,559.000000,1550.000000,529.000000,1.916700,227200.000000 +-118.280000,33.730000,39.000000,2602.000000,802.000000,2178.000000,737.000000,2.046900,234500.000000 +-118.280000,33.680000,8.000000,2842.000000,522.000000,1624.000000,510.000000,3.728200,287500.000000 +-118.290000,34.650000,18.000000,6893.000000,1372.000000,2837.000000,1221.000000,3.317300,218400.000000 +-118.290000,34.360000,34.000000,503.000000,99.000000,275.000000,68.000000,4.549100,375000.000000 +-118.290000,34.260000,33.000000,3177.000000,713.000000,1845.000000,612.000000,4.008000,191100.000000 +-118.290000,34.250000,8.000000,5568.000000,1514.000000,3565.000000,1374.000000,3.079500,161500.000000 +-118.290000,34.250000,19.000000,1988.000000,594.000000,1399.000000,527.000000,2.472700,175000.000000 +-118.290000,34.180000,36.000000,3120.000000,620.000000,1441.000000,612.000000,3.904100,320400.000000 +-118.290000,34.170000,17.000000,3852.000000,1066.000000,2986.000000,993.000000,2.348200,255400.000000 +-118.290000,34.170000,12.000000,2238.000000,682.000000,1882.000000,611.000000,2.900000,208300.000000 +-118.290000,34.160000,42.000000,413.000000,107.000000,349.000000,107.000000,4.343800,189800.000000 +-118.290000,34.160000,35.000000,1257.000000,318.000000,764.000000,319.000000,3.208300,238000.000000 +-118.290000,34.160000,31.000000,1262.000000,338.000000,1019.000000,332.000000,3.708300,241900.000000 +-118.290000,34.110000,49.000000,2850.000000,379.000000,1113.000000,380.000000,12.959100,500001.000000 +-118.290000,34.110000,48.000000,1448.000000,295.000000,681.000000,287.000000,3.233300,436400.000000 +-118.290000,34.110000,40.000000,2681.000000,737.000000,1144.000000,669.000000,3.046100,264300.000000 +-118.290000,34.110000,30.000000,2774.000000,570.000000,1076.000000,580.000000,5.296000,500001.000000 +-118.290000,34.110000,24.000000,3696.000000,1125.000000,1685.000000,1031.000000,2.378900,266700.000000 +-118.290000,34.100000,43.000000,1711.000000,443.000000,1190.000000,429.000000,3.517200,265500.000000 +-118.290000,34.100000,39.000000,631.000000,298.000000,744.000000,274.000000,2.705400,162500.000000 +-118.290000,34.100000,39.000000,2196.000000,582.000000,1165.000000,538.000000,2.941700,254200.000000 +-118.290000,34.090000,52.000000,1272.000000,322.000000,984.000000,353.000000,1.906300,261600.000000 +-118.290000,34.090000,43.000000,1583.000000,658.000000,1941.000000,614.000000,1.983500,225000.000000 +-118.290000,34.090000,39.000000,336.000000,173.000000,586.000000,151.000000,1.805600,262500.000000 +-118.290000,34.090000,35.000000,2624.000000,1116.000000,3548.000000,1008.000000,2.013200,198400.000000 +-118.290000,34.090000,34.000000,2716.000000,1114.000000,2991.000000,1021.000000,1.751400,187500.000000 +-118.290000,34.090000,29.000000,2240.000000,792.000000,2254.000000,739.000000,2.331700,172500.000000 +-118.290000,34.090000,28.000000,1562.000000,648.000000,1974.000000,597.000000,1.976600,112500.000000 +-118.290000,34.080000,49.000000,649.000000,315.000000,987.000000,329.000000,1.680600,316700.000000 +-118.290000,34.080000,43.000000,3056.000000,1345.000000,3920.000000,1304.000000,1.925000,300000.000000 +-118.290000,34.080000,38.000000,380.000000,130.000000,445.000000,140.000000,1.928600,137500.000000 +-118.290000,34.080000,34.000000,479.000000,182.000000,557.000000,170.000000,1.525000,210000.000000 +-118.290000,34.080000,23.000000,1864.000000,937.000000,2795.000000,858.000000,1.849500,212500.000000 +-118.290000,34.070000,26.000000,2302.000000,1124.000000,2660.000000,1004.000000,2.356700,253100.000000 +-118.290000,34.070000,24.000000,4021.000000,1707.000000,3727.000000,1529.000000,1.736500,112500.000000 +-118.290000,34.070000,22.000000,492.000000,269.000000,634.000000,261.000000,1.640600,300000.000000 +-118.290000,34.070000,19.000000,3013.000000,1118.000000,2465.000000,1008.000000,2.538600,290600.000000 +-118.290000,34.060000,9.000000,1554.000000,815.000000,1704.000000,761.000000,2.018500,141700.000000 +-118.290000,34.060000,46.000000,1759.000000,1012.000000,2716.000000,877.000000,2.163700,350000.000000 +-118.290000,34.060000,42.000000,3894.000000,2293.000000,6846.000000,2156.000000,1.555300,70000.000000 +-118.290000,34.060000,27.000000,2456.000000,1111.000000,4137.000000,1104.000000,1.595400,187500.000000 +-118.290000,34.060000,23.000000,2040.000000,778.000000,2235.000000,697.000000,1.930900,233300.000000 +-118.290000,34.050000,31.000000,2818.000000,1252.000000,4126.000000,1200.000000,2.053000,229200.000000 +-118.290000,34.050000,18.000000,3585.000000,1661.000000,5229.000000,1534.000000,1.847000,250000.000000 +-118.290000,34.040000,48.000000,1353.000000,488.000000,1945.000000,487.000000,2.435900,123700.000000 +-118.290000,34.040000,44.000000,1941.000000,579.000000,2049.000000,535.000000,2.040500,143000.000000 +-118.290000,34.040000,41.000000,659.000000,291.000000,1224.000000,290.000000,2.081700,132500.000000 +-118.290000,34.040000,32.000000,432.000000,182.000000,702.000000,186.000000,2.147100,125000.000000 +-118.290000,34.040000,31.000000,700.000000,299.000000,1272.000000,298.000000,2.154200,128100.000000 +-118.290000,34.030000,42.000000,1680.000000,557.000000,2099.000000,526.000000,1.916700,136400.000000 +-118.290000,34.030000,38.000000,1501.000000,437.000000,1777.000000,441.000000,2.084800,135200.000000 +-118.290000,34.030000,29.000000,3544.000000,1384.000000,3323.000000,1213.000000,1.021900,258300.000000 +-118.290000,34.030000,22.000000,3313.000000,1235.000000,2381.000000,1063.000000,0.747300,168800.000000 +-118.290000,34.020000,26.000000,2001.000000,582.000000,2044.000000,557.000000,1.156300,118800.000000 +-118.290000,34.020000,21.000000,1641.000000,491.000000,1526.000000,453.000000,2.087000,141300.000000 +-118.290000,34.010000,50.000000,2238.000000,673.000000,2247.000000,583.000000,1.650500,125000.000000 +-118.290000,34.010000,42.000000,814.000000,223.000000,511.000000,188.000000,2.394200,123200.000000 +-118.290000,34.010000,40.000000,885.000000,312.000000,799.000000,221.000000,1.166700,143800.000000 +-118.290000,34.010000,39.000000,751.000000,207.000000,1010.000000,231.000000,1.603600,137500.000000 +-118.290000,34.010000,30.000000,1385.000000,518.000000,1730.000000,472.000000,1.053900,142500.000000 +-118.290000,34.000000,52.000000,2579.000000,494.000000,1558.000000,458.000000,2.080900,109600.000000 +-118.290000,34.000000,52.000000,1319.000000,295.000000,898.000000,271.000000,2.772700,128600.000000 +-118.290000,34.000000,44.000000,1753.000000,387.000000,1165.000000,380.000000,2.135400,105800.000000 +-118.290000,33.990000,46.000000,2608.000000,636.000000,1766.000000,596.000000,1.584600,114800.000000 +-118.290000,33.990000,43.000000,1902.000000,398.000000,1153.000000,363.000000,1.937500,112900.000000 +-118.290000,33.990000,39.000000,979.000000,235.000000,857.000000,236.000000,2.554700,108900.000000 +-118.290000,33.980000,48.000000,1124.000000,231.000000,783.000000,223.000000,3.444400,93100.000000 +-118.290000,33.980000,46.000000,1118.000000,300.000000,786.000000,254.000000,1.404200,110000.000000 +-118.290000,33.980000,44.000000,2261.000000,555.000000,1348.000000,455.000000,1.912500,97200.000000 +-118.290000,33.980000,42.000000,2833.000000,768.000000,2542.000000,725.000000,1.347900,100000.000000 +-118.290000,33.980000,41.000000,1582.000000,416.000000,1422.000000,370.000000,1.051600,108300.000000 +-118.290000,33.980000,30.000000,1162.000000,318.000000,1207.000000,289.000000,1.223000,100000.000000 +-118.290000,33.970000,48.000000,3139.000000,587.000000,1319.000000,506.000000,3.520800,134200.000000 +-118.290000,33.970000,43.000000,2660.000000,672.000000,2133.000000,588.000000,1.773400,107300.000000 +-118.290000,33.960000,39.000000,1340.000000,409.000000,1463.000000,367.000000,1.529400,111400.000000 +-118.290000,33.960000,36.000000,1717.000000,417.000000,902.000000,368.000000,1.486800,113200.000000 +-118.290000,33.960000,32.000000,3508.000000,917.000000,2794.000000,839.000000,1.554000,100000.000000 +-118.290000,33.950000,40.000000,2808.000000,695.000000,2357.000000,627.000000,1.965500,102300.000000 +-118.290000,33.950000,39.000000,1701.000000,428.000000,1468.000000,411.000000,1.970200,93200.000000 +-118.290000,33.950000,32.000000,721.000000,205.000000,842.000000,208.000000,1.602900,89700.000000 +-118.290000,33.950000,31.000000,2839.000000,792.000000,2390.000000,729.000000,2.000000,109800.000000 +-118.290000,33.940000,38.000000,2407.000000,630.000000,1774.000000,562.000000,1.561500,108600.000000 +-118.290000,33.940000,34.000000,1089.000000,278.000000,995.000000,315.000000,2.335200,107700.000000 +-118.290000,33.940000,32.000000,2701.000000,708.000000,1880.000000,590.000000,1.671600,123800.000000 +-118.290000,33.930000,43.000000,2021.000000,379.000000,1051.000000,352.000000,3.383600,129900.000000 +-118.290000,33.930000,41.000000,896.000000,198.000000,605.000000,168.000000,2.277800,128100.000000 +-118.290000,33.930000,32.000000,1815.000000,488.000000,1715.000000,475.000000,1.724400,111200.000000 +-118.290000,33.920000,40.000000,1935.000000,461.000000,1616.000000,433.000000,2.875000,120200.000000 +-118.290000,33.920000,34.000000,1799.000000,362.000000,1293.000000,355.000000,3.692000,145200.000000 +-118.290000,33.920000,34.000000,1374.000000,240.000000,906.000000,248.000000,5.329200,155500.000000 +-118.290000,33.920000,23.000000,2503.000000,532.000000,1735.000000,505.000000,2.736800,162800.000000 +-118.290000,33.910000,28.000000,1501.000000,446.000000,1028.000000,418.000000,2.304300,177500.000000 +-118.290000,33.900000,42.000000,1273.000000,309.000000,1203.000000,348.000000,4.463600,162000.000000 +-118.290000,33.890000,32.000000,2355.000000,583.000000,1605.000000,571.000000,4.217100,218200.000000 +-118.290000,33.880000,32.000000,2307.000000,493.000000,1754.000000,528.000000,4.317000,232800.000000 +-118.290000,33.880000,27.000000,2082.000000,612.000000,2009.000000,548.000000,2.906300,184100.000000 +-118.290000,33.880000,21.000000,4946.000000,1231.000000,3186.000000,1167.000000,3.328100,237000.000000 +-118.290000,33.870000,32.000000,1700.000000,340.000000,864.000000,317.000000,4.381000,238700.000000 +-118.290000,33.850000,10.000000,1391.000000,420.000000,1378.000000,377.000000,1.904900,222200.000000 +-118.290000,33.840000,34.000000,2617.000000,558.000000,1396.000000,515.000000,5.061000,218000.000000 +-118.290000,33.840000,23.000000,3626.000000,799.000000,2321.000000,731.000000,4.739300,237900.000000 +-118.290000,33.840000,11.000000,2274.000000,617.000000,1897.000000,622.000000,3.509400,162900.000000 +-118.290000,33.820000,21.000000,4383.000000,901.000000,2689.000000,913.000000,3.437500,218800.000000 +-118.290000,33.810000,19.000000,7023.000000,1538.000000,3993.000000,1412.000000,5.053200,218200.000000 +-118.290000,33.800000,21.000000,9944.000000,1623.000000,4185.000000,1582.000000,4.526000,329400.000000 +-118.290000,33.790000,16.000000,1867.000000,571.000000,951.000000,498.000000,3.342700,154200.000000 +-118.290000,33.790000,10.000000,3708.000000,1016.000000,2855.000000,948.000000,2.000000,165400.000000 +-118.290000,33.740000,52.000000,1438.000000,472.000000,1018.000000,399.000000,2.218800,306700.000000 +-118.290000,33.740000,41.000000,1382.000000,361.000000,905.000000,344.000000,2.750000,238300.000000 +-118.290000,33.740000,30.000000,2074.000000,533.000000,1311.000000,531.000000,2.032900,225800.000000 +-118.290000,33.740000,29.000000,1503.000000,411.000000,1368.000000,390.000000,2.147300,195300.000000 +-118.290000,33.730000,43.000000,1854.000000,519.000000,1151.000000,486.000000,2.575900,225000.000000 +-118.290000,33.730000,30.000000,3161.000000,813.000000,1865.000000,771.000000,2.713900,231700.000000 +-118.290000,33.730000,21.000000,2492.000000,711.000000,1699.000000,672.000000,2.138200,242300.000000 +-118.290000,33.720000,39.000000,2651.000000,590.000000,1103.000000,508.000000,3.274000,254300.000000 +-118.290000,33.720000,25.000000,2469.000000,584.000000,1253.000000,535.000000,3.193200,257500.000000 +-118.290000,33.720000,21.000000,1568.000000,452.000000,801.000000,422.000000,3.510900,225000.000000 +-118.290000,33.710000,40.000000,1933.000000,475.000000,902.000000,412.000000,4.250000,332800.000000 +-118.290000,33.710000,40.000000,1644.000000,471.000000,780.000000,416.000000,3.107100,464300.000000 +-118.290000,33.710000,36.000000,3135.000000,746.000000,1815.000000,697.000000,3.759600,300000.000000 +-118.300000,37.170000,22.000000,3480.000000,673.000000,1541.000000,636.000000,2.750000,94500.000000 +-118.300000,34.260000,40.000000,1065.000000,214.000000,605.000000,183.000000,4.196400,185900.000000 +-118.300000,34.260000,28.000000,1643.000000,489.000000,1142.000000,458.000000,3.160700,200600.000000 +-118.300000,34.250000,36.000000,1300.000000,304.000000,688.000000,261.000000,3.152300,176700.000000 +-118.300000,34.190000,52.000000,2962.000000,468.000000,1364.000000,466.000000,4.904200,343500.000000 +-118.300000,34.190000,51.000000,1502.000000,243.000000,586.000000,231.000000,4.375000,332400.000000 +-118.300000,34.190000,14.000000,3615.000000,913.000000,1924.000000,852.000000,3.508300,280900.000000 +-118.300000,34.180000,5.000000,5492.000000,1549.000000,2997.000000,1405.000000,3.320500,172100.000000 +-118.300000,34.180000,13.000000,7174.000000,1997.000000,4293.000000,1872.000000,3.097300,251900.000000 +-118.300000,34.170000,37.000000,350.000000,115.000000,342.000000,111.000000,3.068700,200000.000000 +-118.300000,34.170000,30.000000,48.000000,14.000000,74.000000,16.000000,5.005600,162500.000000 +-118.300000,34.170000,17.000000,4041.000000,1169.000000,3309.000000,1117.000000,2.601600,222400.000000 +-118.300000,34.170000,16.000000,1353.000000,398.000000,1211.000000,357.000000,3.155100,205000.000000 +-118.300000,34.160000,40.000000,1875.000000,460.000000,869.000000,438.000000,3.232100,243600.000000 +-118.300000,34.160000,35.000000,3213.000000,874.000000,2401.000000,819.000000,2.834200,256800.000000 +-118.300000,34.110000,52.000000,3136.000000,675.000000,1213.000000,606.000000,3.580600,391900.000000 +-118.300000,34.110000,52.000000,1954.000000,245.000000,645.000000,237.000000,6.939100,500001.000000 +-118.300000,34.110000,25.000000,1590.000000,218.000000,568.000000,206.000000,8.438900,500001.000000 +-118.300000,34.100000,38.000000,2067.000000,914.000000,2717.000000,853.000000,1.764100,250000.000000 +-118.300000,34.100000,37.000000,5305.000000,1980.000000,3895.000000,1874.000000,2.067200,283300.000000 +-118.300000,34.100000,36.000000,2284.000000,899.000000,1964.000000,839.000000,1.929700,203300.000000 +-118.300000,34.100000,35.000000,7517.000000,2961.000000,5899.000000,2769.000000,1.935400,340000.000000 +-118.300000,34.100000,25.000000,3926.000000,1715.000000,4865.000000,1612.000000,1.611200,262500.000000 +-118.300000,34.090000,40.000000,3058.000000,1215.000000,3953.000000,1223.000000,1.815600,218800.000000 +-118.300000,34.090000,36.000000,2332.000000,993.000000,3155.000000,927.000000,2.261200,230400.000000 +-118.300000,34.090000,32.000000,2202.000000,674.000000,2178.000000,635.000000,2.030700,226700.000000 +-118.300000,34.090000,29.000000,3245.000000,1190.000000,3906.000000,1102.000000,2.192700,253300.000000 +-118.300000,34.090000,25.000000,2345.000000,852.000000,2860.000000,862.000000,1.449700,205600.000000 +-118.300000,34.080000,36.000000,1276.000000,503.000000,1502.000000,450.000000,2.176600,205600.000000 +-118.300000,34.080000,34.000000,2501.000000,1047.000000,3326.000000,970.000000,1.877100,247500.000000 +-118.300000,34.080000,34.000000,1562.000000,651.000000,1774.000000,559.000000,1.568500,225000.000000 +-118.300000,34.080000,32.000000,2880.000000,1063.000000,3646.000000,1028.000000,2.384600,258300.000000 +-118.300000,34.070000,46.000000,5677.000000,2610.000000,7443.000000,2406.000000,1.823800,237500.000000 +-118.300000,34.070000,36.000000,2657.000000,738.000000,2274.000000,723.000000,3.425000,281700.000000 +-118.300000,34.070000,31.000000,1489.000000,664.000000,1793.000000,556.000000,2.434800,230600.000000 +-118.300000,34.070000,28.000000,5221.000000,2530.000000,5840.000000,2374.000000,1.882900,300000.000000 +-118.300000,34.070000,26.000000,2107.000000,757.000000,2660.000000,740.000000,2.337500,282300.000000 +-118.300000,34.070000,18.000000,3759.000000,1621.000000,3296.000000,1462.000000,2.270800,175000.000000 +-118.300000,34.060000,43.000000,1366.000000,740.000000,942.000000,672.000000,1.695300,150000.000000 +-118.300000,34.060000,33.000000,2437.000000,1283.000000,3906.000000,1084.000000,2.033200,270000.000000 +-118.300000,34.060000,23.000000,2512.000000,1203.000000,3720.000000,1118.000000,1.789600,322200.000000 +-118.300000,34.060000,21.000000,3960.000000,1490.000000,3468.000000,1335.000000,1.821400,475000.000000 +-118.300000,34.060000,20.000000,1782.000000,896.000000,1749.000000,823.000000,2.209400,75000.000000 +-118.300000,34.050000,46.000000,1386.000000,457.000000,1845.000000,485.000000,2.141400,157700.000000 +-118.300000,34.050000,44.000000,1612.000000,650.000000,2028.000000,593.000000,1.915200,115600.000000 +-118.300000,34.050000,42.000000,1476.000000,610.000000,1605.000000,545.000000,1.721000,214300.000000 +-118.300000,34.050000,39.000000,993.000000,506.000000,1765.000000,464.000000,1.278600,121900.000000 +-118.300000,34.050000,36.000000,1723.000000,569.000000,1664.000000,501.000000,1.932300,161100.000000 +-118.300000,34.050000,34.000000,1453.000000,588.000000,1987.000000,589.000000,2.096000,187500.000000 +-118.300000,34.040000,50.000000,1757.000000,522.000000,2080.000000,488.000000,1.722500,180000.000000 +-118.300000,34.040000,37.000000,1470.000000,399.000000,1579.000000,390.000000,2.006000,150000.000000 +-118.300000,34.040000,35.000000,1090.000000,345.000000,1605.000000,330.000000,2.151800,152800.000000 +-118.300000,34.040000,24.000000,2092.000000,585.000000,1757.000000,538.000000,1.710900,175000.000000 +-118.300000,34.030000,47.000000,2241.000000,559.000000,1775.000000,504.000000,2.157100,147900.000000 +-118.300000,34.030000,41.000000,1653.000000,426.000000,1868.000000,393.000000,1.780000,162500.000000 +-118.300000,34.030000,40.000000,1695.000000,374.000000,1138.000000,357.000000,2.712500,150000.000000 +-118.300000,34.030000,37.000000,2781.000000,766.000000,2586.000000,729.000000,1.856400,187500.000000 +-118.300000,34.020000,42.000000,2386.000000,670.000000,2327.000000,661.000000,1.669900,108000.000000 +-118.300000,34.020000,34.000000,3184.000000,772.000000,2474.000000,705.000000,1.631000,137500.000000 +-118.300000,34.020000,31.000000,1933.000000,478.000000,1522.000000,423.000000,1.578100,119300.000000 +-118.300000,34.010000,48.000000,4217.000000,1095.000000,3298.000000,949.000000,1.915200,122300.000000 +-118.300000,34.010000,35.000000,1147.000000,290.000000,818.000000,281.000000,1.796100,111700.000000 +-118.300000,34.000000,52.000000,1743.000000,421.000000,1206.000000,384.000000,1.687500,116000.000000 +-118.300000,34.000000,52.000000,1718.000000,354.000000,1026.000000,312.000000,2.000000,128000.000000 +-118.300000,34.000000,52.000000,1686.000000,377.000000,982.000000,356.000000,2.095800,116400.000000 +-118.300000,34.000000,52.000000,1420.000000,355.000000,1080.000000,353.000000,1.517900,116100.000000 +-118.300000,34.000000,52.000000,1296.000000,246.000000,853.000000,238.000000,3.050000,111600.000000 +-118.300000,33.990000,47.000000,2637.000000,588.000000,1903.000000,521.000000,1.831700,96500.000000 +-118.300000,33.990000,47.000000,2212.000000,533.000000,1903.000000,554.000000,1.985300,101100.000000 +-118.300000,33.990000,45.000000,1701.000000,452.000000,1484.000000,427.000000,1.840000,91400.000000 +-118.300000,33.990000,44.000000,1458.000000,326.000000,1159.000000,283.000000,1.164500,98200.000000 +-118.300000,33.990000,43.000000,1534.000000,384.000000,1231.000000,329.000000,2.543700,94500.000000 +-118.300000,33.980000,48.000000,2010.000000,445.000000,1208.000000,404.000000,1.661100,95800.000000 +-118.300000,33.980000,44.000000,1597.000000,388.000000,902.000000,321.000000,1.955600,93300.000000 +-118.300000,33.970000,50.000000,2270.000000,451.000000,1000.000000,412.000000,2.122100,119400.000000 +-118.300000,33.970000,44.000000,1521.000000,289.000000,1074.000000,285.000000,2.067300,99800.000000 +-118.300000,33.960000,47.000000,2112.000000,417.000000,1161.000000,368.000000,3.972200,117400.000000 +-118.300000,33.960000,39.000000,2802.000000,618.000000,1524.000000,529.000000,2.651800,136300.000000 +-118.300000,33.950000,50.000000,1843.000000,326.000000,892.000000,314.000000,3.134600,120000.000000 +-118.300000,33.950000,41.000000,2057.000000,550.000000,1805.000000,506.000000,1.245500,100800.000000 +-118.300000,33.950000,35.000000,1182.000000,305.000000,977.000000,283.000000,1.589800,94000.000000 +-118.300000,33.950000,27.000000,1774.000000,444.000000,1622.000000,402.000000,2.203100,96900.000000 +-118.300000,33.940000,36.000000,3504.000000,862.000000,2521.000000,836.000000,2.567900,114900.000000 +-118.300000,33.930000,40.000000,2434.000000,477.000000,1646.000000,453.000000,3.202400,128000.000000 +-118.300000,33.930000,36.000000,2196.000000,633.000000,2017.000000,583.000000,1.396200,124300.000000 +-118.300000,33.930000,35.000000,1300.000000,356.000000,1216.000000,326.000000,1.200000,99200.000000 +-118.300000,33.930000,29.000000,2228.000000,396.000000,1140.000000,352.000000,3.796900,169400.000000 +-118.300000,33.920000,34.000000,2053.000000,382.000000,1258.000000,380.000000,3.013900,154700.000000 +-118.300000,33.910000,30.000000,1842.000000,476.000000,1491.000000,420.000000,3.014700,155100.000000 +-118.300000,33.900000,29.000000,2617.000000,668.000000,1868.000000,647.000000,3.600000,208800.000000 +-118.300000,33.900000,27.000000,3267.000000,762.000000,2099.000000,647.000000,3.400000,224100.000000 +-118.300000,33.900000,19.000000,2421.000000,689.000000,1726.000000,660.000000,3.287000,181400.000000 +-118.300000,33.900000,13.000000,2455.000000,661.000000,1975.000000,618.000000,2.955900,173600.000000 +-118.300000,33.890000,37.000000,2132.000000,565.000000,1369.000000,565.000000,3.285000,218100.000000 +-118.300000,33.890000,30.000000,2756.000000,858.000000,1806.000000,787.000000,3.032900,207800.000000 +-118.300000,33.880000,35.000000,3227.000000,749.000000,1881.000000,696.000000,2.844500,242100.000000 +-118.300000,33.880000,30.000000,1348.000000,333.000000,885.000000,322.000000,3.257400,195300.000000 +-118.300000,33.880000,26.000000,1221.000000,312.000000,807.000000,330.000000,4.053600,253600.000000 +-118.300000,33.870000,27.000000,3144.000000,722.000000,1510.000000,680.000000,3.159700,214700.000000 +-118.300000,33.860000,35.000000,2016.000000,365.000000,1083.000000,369.000000,5.172700,230200.000000 +-118.300000,33.860000,35.000000,1511.000000,274.000000,853.000000,308.000000,4.979200,251300.000000 +-118.300000,33.850000,38.000000,123.000000,36.000000,142.000000,40.000000,2.394200,200000.000000 +-118.300000,33.840000,37.000000,1241.000000,226.000000,621.000000,255.000000,4.919600,232400.000000 +-118.300000,33.840000,36.000000,1428.000000,268.000000,825.000000,250.000000,4.722200,239600.000000 +-118.300000,33.830000,33.000000,2716.000000,660.000000,1807.000000,661.000000,3.547300,226300.000000 +-118.300000,33.830000,31.000000,2693.000000,661.000000,1598.000000,618.000000,3.185100,240200.000000 +-118.300000,33.820000,35.000000,1499.000000,340.000000,1141.000000,326.000000,2.613600,213600.000000 +-118.300000,33.820000,26.000000,2080.000000,560.000000,2096.000000,506.000000,2.810600,184400.000000 +-118.300000,33.820000,25.000000,2659.000000,765.000000,2629.000000,726.000000,2.636800,175900.000000 +-118.300000,33.800000,8.000000,1115.000000,412.000000,1472.000000,396.000000,3.139200,146200.000000 +-118.300000,33.800000,27.000000,2790.000000,513.000000,1498.000000,519.000000,5.310600,268300.000000 +-118.300000,33.790000,9.000000,2782.000000,884.000000,1790.000000,748.000000,2.915400,203300.000000 +-118.300000,33.790000,35.000000,2793.000000,686.000000,2255.000000,682.000000,3.005700,235300.000000 +-118.300000,33.790000,21.000000,1068.000000,283.000000,1180.000000,274.000000,2.500000,157500.000000 +-118.300000,33.790000,13.000000,3569.000000,924.000000,2159.000000,880.000000,3.163000,224200.000000 +-118.300000,33.780000,35.000000,2572.000000,504.000000,1172.000000,519.000000,4.620700,304100.000000 +-118.300000,33.770000,18.000000,3821.000000,795.000000,2831.000000,769.000000,2.957100,346200.000000 +-118.300000,33.760000,18.000000,9659.000000,1716.000000,4336.000000,1674.000000,5.776400,290500.000000 +-118.300000,33.750000,48.000000,1958.000000,386.000000,1098.000000,380.000000,4.625000,273400.000000 +-118.300000,33.750000,42.000000,967.000000,175.000000,481.000000,163.000000,5.661100,265600.000000 +-118.300000,33.750000,19.000000,2356.000000,440.000000,1291.000000,418.000000,4.230800,416100.000000 +-118.300000,33.740000,47.000000,2223.000000,410.000000,1163.000000,413.000000,4.467100,270800.000000 +-118.300000,33.740000,28.000000,1065.000000,215.000000,887.000000,217.000000,3.937500,270500.000000 +-118.300000,33.740000,23.000000,3075.000000,860.000000,1584.000000,806.000000,2.938600,260500.000000 +-118.300000,33.730000,40.000000,2582.000000,606.000000,1304.000000,588.000000,3.569400,276400.000000 +-118.300000,33.720000,35.000000,2790.000000,447.000000,1167.000000,441.000000,6.202800,361500.000000 +-118.300000,33.720000,28.000000,2647.000000,658.000000,1459.000000,595.000000,3.447400,253400.000000 +-118.300000,33.720000,28.000000,2510.000000,583.000000,1388.000000,554.000000,3.339700,267800.000000 +-118.310000,36.940000,35.000000,2563.000000,530.000000,861.000000,371.000000,2.325000,80600.000000 +-118.310000,35.740000,18.000000,2327.000000,642.000000,799.000000,335.000000,1.841900,92300.000000 +-118.310000,34.280000,34.000000,3258.000000,610.000000,1810.000000,633.000000,5.114500,219900.000000 +-118.310000,34.270000,35.000000,1446.000000,274.000000,759.000000,291.000000,6.080800,215600.000000 +-118.310000,34.260000,38.000000,2264.000000,460.000000,1124.000000,388.000000,4.268500,189600.000000 +-118.310000,34.260000,36.000000,1882.000000,453.000000,1005.000000,409.000000,3.800000,217100.000000 +-118.310000,34.260000,34.000000,1797.000000,363.000000,948.000000,363.000000,4.133900,187300.000000 +-118.310000,34.220000,27.000000,7714.000000,1132.000000,3199.000000,1100.000000,7.126200,446200.000000 +-118.310000,34.200000,36.000000,1692.000000,263.000000,778.000000,278.000000,5.086500,349600.000000 +-118.310000,34.190000,42.000000,724.000000,149.000000,420.000000,150.000000,3.062500,361700.000000 +-118.310000,34.190000,27.000000,4713.000000,1169.000000,2372.000000,1077.000000,3.701500,287900.000000 +-118.310000,34.180000,11.000000,3112.000000,890.000000,1700.000000,851.000000,3.158700,181300.000000 +-118.310000,34.170000,24.000000,2910.000000,917.000000,2522.000000,873.000000,2.407400,219400.000000 +-118.310000,34.170000,12.000000,3188.000000,931.000000,2118.000000,850.000000,3.182300,218300.000000 +-118.310000,34.160000,38.000000,2347.000000,665.000000,1317.000000,547.000000,3.211200,349300.000000 +-118.310000,34.160000,37.000000,2144.000000,446.000000,860.000000,435.000000,3.946400,315000.000000 +-118.310000,34.130000,40.000000,2822.000000,443.000000,907.000000,414.000000,7.269200,498700.000000 +-118.310000,34.120000,39.000000,3895.000000,561.000000,1271.000000,536.000000,8.007300,500001.000000 +-118.310000,34.110000,52.000000,1875.000000,303.000000,735.000000,293.000000,5.865900,433300.000000 +-118.310000,34.100000,40.000000,4984.000000,2158.000000,4828.000000,2028.000000,1.690300,350000.000000 +-118.310000,34.100000,36.000000,2288.000000,1033.000000,3030.000000,890.000000,1.532800,250000.000000 +-118.310000,34.100000,34.000000,399.000000,141.000000,482.000000,134.000000,1.625000,67500.000000 +-118.310000,34.090000,42.000000,1951.000000,846.000000,2500.000000,813.000000,1.519500,218200.000000 +-118.310000,34.090000,37.000000,773.000000,314.000000,835.000000,312.000000,1.857600,193800.000000 +-118.310000,34.090000,34.000000,2065.000000,839.000000,2626.000000,775.000000,1.821400,211100.000000 +-118.310000,34.090000,30.000000,3165.000000,1263.000000,3678.000000,1141.000000,2.000000,240600.000000 +-118.310000,34.090000,28.000000,720.000000,267.000000,891.000000,265.000000,1.897700,100000.000000 +-118.310000,34.080000,49.000000,2549.000000,630.000000,1539.000000,594.000000,2.621800,350900.000000 +-118.310000,34.080000,31.000000,2275.000000,823.000000,2189.000000,720.000000,1.754200,287500.000000 +-118.310000,34.080000,30.000000,1390.000000,457.000000,1460.000000,423.000000,2.242200,254500.000000 +-118.310000,34.080000,27.000000,1514.000000,510.000000,1603.000000,518.000000,2.897100,251100.000000 +-118.310000,34.080000,26.000000,1609.000000,534.000000,1868.000000,497.000000,2.703800,227100.000000 +-118.310000,34.080000,23.000000,1443.000000,521.000000,1264.000000,450.000000,2.754300,220000.000000 +-118.310000,34.070000,28.000000,2362.000000,949.000000,2759.000000,894.000000,2.236400,305600.000000 +-118.310000,34.070000,26.000000,5062.000000,2055.000000,4533.000000,1822.000000,2.310500,166700.000000 +-118.310000,34.070000,20.000000,3264.000000,1248.000000,2919.000000,1191.000000,2.367400,500001.000000 +-118.310000,34.060000,52.000000,2124.000000,756.000000,1920.000000,756.000000,2.143500,328900.000000 +-118.310000,34.060000,34.000000,2470.000000,1197.000000,2326.000000,1055.000000,1.903800,325000.000000 +-118.310000,34.060000,34.000000,1848.000000,667.000000,1351.000000,589.000000,2.054700,410000.000000 +-118.310000,34.060000,31.000000,2827.000000,1084.000000,3107.000000,993.000000,2.027800,360000.000000 +-118.310000,34.060000,30.000000,3110.000000,1269.000000,2535.000000,1218.000000,1.698700,412500.000000 +-118.310000,34.060000,24.000000,1336.000000,453.000000,1268.000000,426.000000,2.820200,500001.000000 +-118.310000,34.060000,14.000000,1559.000000,646.000000,1639.000000,567.000000,1.994900,380000.000000 +-118.310000,34.050000,42.000000,443.000000,223.000000,582.000000,223.000000,2.293700,350000.000000 +-118.310000,34.050000,38.000000,1864.000000,515.000000,1768.000000,439.000000,1.933600,190600.000000 +-118.310000,34.050000,35.000000,2007.000000,571.000000,1513.000000,554.000000,2.116200,229200.000000 +-118.310000,34.050000,26.000000,1809.000000,640.000000,2543.000000,640.000000,2.353600,500000.000000 +-118.310000,34.040000,52.000000,1277.000000,285.000000,954.000000,334.000000,2.583300,234600.000000 +-118.310000,34.040000,37.000000,2338.000000,686.000000,2376.000000,630.000000,1.767000,170300.000000 +-118.310000,34.040000,33.000000,2691.000000,726.000000,2390.000000,681.000000,2.404800,208300.000000 +-118.310000,34.040000,29.000000,2038.000000,578.000000,2070.000000,570.000000,2.065800,214600.000000 +-118.310000,34.030000,52.000000,1902.000000,406.000000,1233.000000,385.000000,2.829500,132200.000000 +-118.310000,34.030000,47.000000,1315.000000,247.000000,785.000000,245.000000,1.230000,138400.000000 +-118.310000,34.030000,46.000000,2173.000000,510.000000,1343.000000,476.000000,2.025000,135500.000000 +-118.310000,34.030000,34.000000,2041.000000,517.000000,1479.000000,495.000000,2.117300,156600.000000 +-118.310000,34.030000,29.000000,2438.000000,867.000000,2114.000000,753.000000,0.835100,150000.000000 +-118.310000,34.020000,52.000000,1832.000000,441.000000,1186.000000,420.000000,1.243400,98400.000000 +-118.310000,34.020000,52.000000,1173.000000,284.000000,814.000000,295.000000,2.450000,111400.000000 +-118.310000,34.020000,46.000000,1976.000000,469.000000,1409.000000,431.000000,2.298100,112100.000000 +-118.310000,34.020000,44.000000,1555.000000,324.000000,931.000000,265.000000,1.471200,105800.000000 +-118.310000,34.020000,43.000000,2255.000000,533.000000,1568.000000,470.000000,1.695500,115200.000000 +-118.310000,34.020000,41.000000,1046.000000,216.000000,727.000000,201.000000,1.666700,116900.000000 +-118.310000,34.020000,23.000000,1703.000000,397.000000,1333.000000,361.000000,1.318700,127100.000000 +-118.310000,34.010000,52.000000,1793.000000,350.000000,1303.000000,366.000000,3.075900,123700.000000 +-118.310000,34.010000,50.000000,1463.000000,354.000000,912.000000,293.000000,1.738600,109400.000000 +-118.310000,34.010000,48.000000,2544.000000,532.000000,1357.000000,498.000000,2.526300,121000.000000 +-118.310000,34.010000,39.000000,2073.000000,566.000000,1246.000000,547.000000,2.041700,117100.000000 +-118.310000,34.000000,52.000000,2709.000000,642.000000,1751.000000,613.000000,2.111600,122500.000000 +-118.310000,34.000000,52.000000,1630.000000,379.000000,1413.000000,405.000000,1.933000,120000.000000 +-118.310000,34.000000,52.000000,1542.000000,309.000000,939.000000,276.000000,1.689200,129100.000000 +-118.310000,34.000000,47.000000,1551.000000,362.000000,1329.000000,322.000000,1.979200,116400.000000 +-118.310000,33.990000,49.000000,857.000000,196.000000,694.000000,228.000000,2.895000,108000.000000 +-118.310000,33.990000,47.000000,1525.000000,359.000000,982.000000,333.000000,2.091500,126600.000000 +-118.310000,33.990000,45.000000,1489.000000,339.000000,791.000000,316.000000,2.233900,104800.000000 +-118.310000,33.990000,44.000000,1703.000000,358.000000,789.000000,249.000000,1.708300,100000.000000 +-118.310000,33.980000,52.000000,1975.000000,379.000000,1043.000000,371.000000,2.397700,112200.000000 +-118.310000,33.980000,52.000000,1837.000000,426.000000,1062.000000,343.000000,2.000000,96500.000000 +-118.310000,33.980000,52.000000,1607.000000,331.000000,900.000000,295.000000,3.598200,96600.000000 +-118.310000,33.980000,50.000000,1985.000000,454.000000,1090.000000,410.000000,1.825000,106600.000000 +-118.310000,33.980000,44.000000,222.000000,54.000000,234.000000,77.000000,5.113600,111700.000000 +-118.310000,33.970000,52.000000,1629.000000,277.000000,819.000000,288.000000,3.725000,142600.000000 +-118.310000,33.970000,52.000000,1595.000000,325.000000,823.000000,302.000000,3.218800,124200.000000 +-118.310000,33.970000,48.000000,1541.000000,314.000000,819.000000,312.000000,3.091700,136100.000000 +-118.310000,33.970000,47.000000,2066.000000,422.000000,1156.000000,380.000000,2.791700,125800.000000 +-118.310000,33.960000,52.000000,2523.000000,460.000000,1167.000000,413.000000,3.062500,127400.000000 +-118.310000,33.960000,47.000000,2005.000000,392.000000,1134.000000,415.000000,3.714300,140300.000000 +-118.310000,33.960000,47.000000,1586.000000,322.000000,1077.000000,339.000000,4.486100,140400.000000 +-118.310000,33.960000,46.000000,1686.000000,303.000000,870.000000,320.000000,3.464300,136300.000000 +-118.310000,33.960000,43.000000,2149.000000,493.000000,1316.000000,462.000000,1.528000,131800.000000 +-118.310000,33.960000,43.000000,2118.000000,569.000000,1266.000000,500.000000,1.747000,121000.000000 +-118.310000,33.950000,44.000000,2490.000000,430.000000,1305.000000,411.000000,4.829500,149600.000000 +-118.310000,33.950000,44.000000,1558.000000,333.000000,1095.000000,316.000000,4.004300,133500.000000 +-118.310000,33.950000,43.000000,1823.000000,358.000000,1065.000000,342.000000,3.270800,131000.000000 +-118.310000,33.940000,44.000000,1854.000000,367.000000,976.000000,335.000000,3.658300,126700.000000 +-118.310000,33.940000,41.000000,1353.000000,286.000000,751.000000,250.000000,2.740100,131700.000000 +-118.310000,33.940000,40.000000,1917.000000,438.000000,1021.000000,383.000000,2.244800,175000.000000 +-118.310000,33.940000,40.000000,1550.000000,278.000000,798.000000,270.000000,3.775000,153800.000000 +-118.310000,33.940000,40.000000,1323.000000,243.000000,684.000000,229.000000,3.220600,145800.000000 +-118.310000,33.930000,43.000000,1834.000000,292.000000,997.000000,295.000000,4.946400,150300.000000 +-118.310000,33.930000,37.000000,1282.000000,244.000000,852.000000,249.000000,4.291700,127900.000000 +-118.310000,33.930000,35.000000,1580.000000,266.000000,926.000000,282.000000,5.065300,158000.000000 +-118.310000,33.920000,35.000000,1307.000000,246.000000,672.000000,219.000000,4.845600,146400.000000 +-118.310000,33.910000,36.000000,961.000000,173.000000,625.000000,179.000000,4.259600,181100.000000 +-118.310000,33.910000,31.000000,1415.000000,339.000000,874.000000,289.000000,3.817300,177900.000000 +-118.310000,33.900000,38.000000,1400.000000,399.000000,1131.000000,405.000000,3.541700,198400.000000 +-118.310000,33.900000,30.000000,2407.000000,581.000000,1724.000000,531.000000,3.479200,194700.000000 +-118.310000,33.900000,28.000000,1576.000000,400.000000,891.000000,378.000000,2.631200,171300.000000 +-118.310000,33.890000,37.000000,2278.000000,508.000000,1257.000000,498.000000,3.763900,220600.000000 +-118.310000,33.880000,33.000000,2147.000000,505.000000,1371.000000,498.000000,2.421900,260700.000000 +-118.310000,33.880000,32.000000,2421.000000,671.000000,1491.000000,587.000000,3.564400,242300.000000 +-118.310000,33.880000,21.000000,1490.000000,430.000000,686.000000,400.000000,2.381200,237500.000000 +-118.310000,33.860000,29.000000,2243.000000,361.000000,1051.000000,352.000000,6.663200,325200.000000 +-118.310000,33.840000,52.000000,1819.000000,464.000000,1068.000000,424.000000,3.625000,270700.000000 +-118.310000,33.840000,5.000000,3559.000000,869.000000,2965.000000,794.000000,2.649100,216700.000000 +-118.310000,33.830000,50.000000,696.000000,311.000000,382.000000,234.000000,2.775000,225000.000000 +-118.310000,33.830000,45.000000,2929.000000,755.000000,1635.000000,652.000000,2.937500,273000.000000 +-118.310000,33.820000,39.000000,2198.000000,425.000000,1160.000000,436.000000,4.140600,323700.000000 +-118.310000,33.820000,35.000000,3462.000000,814.000000,1902.000000,700.000000,3.402000,279900.000000 +-118.310000,33.820000,26.000000,2345.000000,408.000000,1195.000000,377.000000,5.492500,361700.000000 +-118.310000,33.810000,30.000000,1773.000000,356.000000,905.000000,352.000000,4.305600,336000.000000 +-118.310000,33.810000,23.000000,3942.000000,748.000000,1679.000000,711.000000,4.116900,362600.000000 +-118.310000,33.800000,31.000000,4464.000000,991.000000,2420.000000,947.000000,4.042500,277900.000000 +-118.310000,33.800000,29.000000,2795.000000,572.000000,1469.000000,557.000000,3.716700,308900.000000 +-118.310000,33.790000,38.000000,1601.000000,352.000000,711.000000,304.000000,3.395800,250000.000000 +-118.310000,33.790000,35.000000,2290.000000,563.000000,1374.000000,530.000000,3.247200,254700.000000 +-118.310000,33.780000,30.000000,4573.000000,819.000000,2411.000000,819.000000,3.580400,383800.000000 +-118.310000,33.760000,26.000000,4486.000000,709.000000,1873.000000,719.000000,6.570400,414700.000000 +-118.310000,33.750000,36.000000,2715.000000,474.000000,1303.000000,457.000000,4.604200,357300.000000 +-118.310000,33.750000,34.000000,2338.000000,393.000000,1031.000000,373.000000,6.287000,396400.000000 +-118.310000,33.740000,36.000000,2464.000000,472.000000,1111.000000,457.000000,4.507400,350000.000000 +-118.310000,33.740000,22.000000,5042.000000,974.000000,2260.000000,935.000000,4.347200,351200.000000 +-118.310000,33.730000,52.000000,1665.000000,280.000000,656.000000,282.000000,5.249000,351900.000000 +-118.310000,33.730000,52.000000,1642.000000,287.000000,692.000000,288.000000,4.181200,321500.000000 +-118.310000,33.730000,36.000000,1725.000000,295.000000,799.000000,306.000000,5.087400,368500.000000 +-118.310000,33.730000,33.000000,2265.000000,366.000000,986.000000,388.000000,5.453300,409800.000000 +-118.310000,33.670000,42.000000,1297.000000,246.000000,611.000000,242.000000,5.307400,401900.000000 +-118.320000,34.620000,31.000000,1398.000000,273.000000,884.000000,299.000000,4.840900,264900.000000 +-118.320000,34.260000,32.000000,3690.000000,791.000000,1804.000000,715.000000,4.487500,222700.000000 +-118.320000,34.200000,36.000000,759.000000,136.000000,372.000000,135.000000,4.988600,328900.000000 +-118.320000,34.200000,36.000000,1978.000000,337.000000,834.000000,311.000000,3.986600,294400.000000 +-118.320000,34.200000,29.000000,2209.000000,444.000000,952.000000,403.000000,4.375000,341200.000000 +-118.320000,34.190000,37.000000,589.000000,119.000000,375.000000,122.000000,3.389700,222700.000000 +-118.320000,34.190000,37.000000,1519.000000,331.000000,613.000000,315.000000,3.017900,272500.000000 +-118.320000,34.190000,37.000000,1335.000000,249.000000,485.000000,240.000000,4.173100,352100.000000 +-118.320000,34.180000,49.000000,192.000000,41.000000,83.000000,38.000000,3.017900,118800.000000 +-118.320000,34.180000,44.000000,1594.000000,389.000000,832.000000,340.000000,3.400000,212100.000000 +-118.320000,34.170000,47.000000,2589.000000,465.000000,1284.000000,485.000000,5.100800,247100.000000 +-118.320000,34.170000,45.000000,3448.000000,690.000000,1562.000000,643.000000,4.064800,258800.000000 +-118.320000,34.170000,40.000000,1868.000000,356.000000,799.000000,403.000000,2.930600,279300.000000 +-118.320000,34.170000,39.000000,1995.000000,564.000000,1202.000000,544.000000,3.587500,250000.000000 +-118.320000,34.160000,49.000000,1074.000000,170.000000,403.000000,208.000000,6.254700,366700.000000 +-118.320000,34.160000,46.000000,2345.000000,453.000000,1031.000000,427.000000,4.317300,278300.000000 +-118.320000,34.140000,23.000000,4574.000000,1423.000000,1624.000000,995.000000,4.096500,500001.000000 +-118.320000,34.130000,34.000000,1856.000000,273.000000,540.000000,264.000000,4.083300,500001.000000 +-118.320000,34.120000,52.000000,3410.000000,800.000000,1218.000000,783.000000,4.150000,393500.000000 +-118.320000,34.110000,42.000000,2462.000000,543.000000,857.000000,482.000000,4.083300,434400.000000 +-118.320000,34.110000,33.000000,5135.000000,1450.000000,2404.000000,1292.000000,3.246200,435700.000000 +-118.320000,34.100000,52.000000,786.000000,270.000000,756.000000,273.000000,2.231100,206300.000000 +-118.320000,34.100000,43.000000,1615.000000,734.000000,1460.000000,644.000000,1.400500,193800.000000 +-118.320000,34.100000,36.000000,1655.000000,690.000000,1957.000000,633.000000,1.732500,221900.000000 +-118.320000,34.100000,31.000000,622.000000,229.000000,597.000000,227.000000,1.528400,200000.000000 +-118.320000,34.100000,30.000000,2193.000000,965.000000,2197.000000,836.000000,1.827700,137500.000000 +-118.320000,34.100000,28.000000,1759.000000,716.000000,1463.000000,620.000000,1.730600,450000.000000 +-118.320000,34.090000,34.000000,2473.000000,1171.000000,2655.000000,1083.000000,1.633100,162500.000000 +-118.320000,34.090000,34.000000,1478.000000,675.000000,1976.000000,653.000000,2.055700,225000.000000 +-118.320000,34.090000,32.000000,563.000000,191.000000,626.000000,185.000000,2.034100,250000.000000 +-118.320000,34.090000,30.000000,1871.000000,766.000000,2595.000000,819.000000,2.004400,212500.000000 +-118.320000,34.090000,27.000000,210.000000,98.000000,332.000000,112.000000,2.555600,175000.000000 +-118.320000,34.080000,52.000000,1164.000000,257.000000,575.000000,251.000000,3.125000,380400.000000 +-118.320000,34.080000,52.000000,1137.000000,304.000000,754.000000,297.000000,3.370000,330300.000000 +-118.320000,34.080000,46.000000,2038.000000,534.000000,1250.000000,525.000000,2.419600,358100.000000 +-118.320000,34.070000,52.000000,2980.000000,366.000000,967.000000,359.000000,11.218500,500001.000000 +-118.320000,34.070000,52.000000,2156.000000,306.000000,861.000000,311.000000,8.806200,500001.000000 +-118.320000,34.070000,25.000000,2740.000000,707.000000,1420.000000,664.000000,3.590900,404500.000000 +-118.320000,34.060000,52.000000,983.000000,246.000000,578.000000,204.000000,5.739300,500001.000000 +-118.320000,34.060000,52.000000,955.000000,100.000000,457.000000,120.000000,15.000100,500001.000000 +-118.320000,34.060000,43.000000,2808.000000,584.000000,1654.000000,569.000000,4.125000,436800.000000 +-118.320000,34.060000,36.000000,3239.000000,722.000000,1383.000000,612.000000,4.591800,337000.000000 +-118.320000,34.050000,50.000000,1389.000000,364.000000,976.000000,302.000000,1.588200,327300.000000 +-118.320000,34.050000,42.000000,3343.000000,1183.000000,3480.000000,1146.000000,1.992300,250000.000000 +-118.320000,34.050000,42.000000,3292.000000,713.000000,2224.000000,674.000000,3.551700,291500.000000 +-118.320000,34.040000,47.000000,1989.000000,532.000000,1430.000000,519.000000,1.833300,151100.000000 +-118.320000,34.040000,44.000000,1008.000000,223.000000,544.000000,223.000000,2.865400,176800.000000 +-118.320000,34.040000,42.000000,1766.000000,404.000000,1117.000000,367.000000,2.025900,168800.000000 +-118.320000,34.040000,39.000000,1294.000000,330.000000,1140.000000,313.000000,2.255400,165000.000000 +-118.320000,34.030000,50.000000,1845.000000,349.000000,1109.000000,335.000000,2.897100,127800.000000 +-118.320000,34.030000,47.000000,1734.000000,453.000000,1272.000000,438.000000,3.173100,121500.000000 +-118.320000,34.030000,47.000000,1082.000000,198.000000,455.000000,193.000000,3.013200,223200.000000 +-118.320000,34.030000,35.000000,3189.000000,935.000000,2221.000000,801.000000,2.104600,114000.000000 +-118.320000,34.030000,31.000000,2206.000000,501.000000,1194.000000,435.000000,1.953100,227800.000000 +-118.320000,34.020000,52.000000,2511.000000,587.000000,1660.000000,546.000000,2.609800,127100.000000 +-118.320000,34.020000,51.000000,2010.000000,460.000000,1355.000000,433.000000,2.030400,133400.000000 +-118.320000,34.020000,50.000000,1655.000000,256.000000,672.000000,260.000000,4.255400,194300.000000 +-118.320000,34.020000,48.000000,1949.000000,308.000000,823.000000,340.000000,3.390600,189700.000000 +-118.320000,34.020000,47.000000,1648.000000,346.000000,1120.000000,338.000000,2.004200,114200.000000 +-118.320000,34.010000,52.000000,3104.000000,645.000000,1498.000000,581.000000,2.666700,128000.000000 +-118.320000,34.010000,47.000000,1745.000000,371.000000,1079.000000,368.000000,2.402200,123400.000000 +-118.320000,34.010000,44.000000,4032.000000,913.000000,1622.000000,848.000000,2.493400,165800.000000 +-118.320000,34.000000,50.000000,2189.000000,460.000000,1097.000000,469.000000,2.458300,120900.000000 +-118.320000,33.990000,49.000000,1407.000000,269.000000,889.000000,283.000000,1.977900,114200.000000 +-118.320000,33.990000,48.000000,1260.000000,284.000000,791.000000,280.000000,2.187500,115200.000000 +-118.320000,33.990000,43.000000,2028.000000,479.000000,1074.000000,394.000000,2.590900,98700.000000 +-118.320000,33.990000,43.000000,1257.000000,232.000000,735.000000,232.000000,3.716700,108900.000000 +-118.320000,33.980000,49.000000,1993.000000,446.000000,1052.000000,394.000000,2.213800,119800.000000 +-118.320000,33.980000,49.000000,1412.000000,333.000000,901.000000,328.000000,1.706700,118600.000000 +-118.320000,33.980000,47.000000,949.000000,210.000000,574.000000,217.000000,2.175000,114700.000000 +-118.320000,33.980000,47.000000,1528.000000,331.000000,864.000000,308.000000,1.973200,101000.000000 +-118.320000,33.980000,46.000000,1611.000000,339.000000,921.000000,314.000000,3.083300,103300.000000 +-118.320000,33.980000,44.000000,1448.000000,314.000000,861.000000,310.000000,2.239600,108600.000000 +-118.320000,33.980000,40.000000,1298.000000,277.000000,791.000000,255.000000,3.234400,104300.000000 +-118.320000,33.970000,52.000000,1590.000000,302.000000,844.000000,295.000000,2.713900,164900.000000 +-118.320000,33.960000,47.000000,1885.000000,361.000000,954.000000,357.000000,3.851200,171300.000000 +-118.320000,33.960000,47.000000,1453.000000,247.000000,721.000000,276.000000,5.517600,191000.000000 +-118.320000,33.960000,47.000000,1297.000000,292.000000,704.000000,264.000000,3.321400,166500.000000 +-118.320000,33.950000,44.000000,2131.000000,360.000000,1040.000000,330.000000,5.091200,169800.000000 +-118.320000,33.950000,44.000000,2023.000000,325.000000,992.000000,326.000000,4.666700,175600.000000 +-118.320000,33.950000,43.000000,3819.000000,708.000000,1505.000000,712.000000,3.171900,183500.000000 +-118.320000,33.940000,37.000000,2740.000000,504.000000,1468.000000,479.000000,4.536800,168800.000000 +-118.320000,33.940000,37.000000,1487.000000,296.000000,863.000000,291.000000,3.156300,186200.000000 +-118.320000,33.940000,36.000000,1722.000000,280.000000,830.000000,261.000000,4.053600,189000.000000 +-118.320000,33.940000,36.000000,1153.000000,224.000000,639.000000,226.000000,4.000000,192000.000000 +-118.320000,33.920000,35.000000,1281.000000,219.000000,710.000000,184.000000,4.830400,152800.000000 +-118.320000,33.910000,35.000000,940.000000,197.000000,640.000000,215.000000,4.200000,181300.000000 +-118.320000,33.910000,35.000000,881.000000,159.000000,605.000000,170.000000,3.665400,184500.000000 +-118.320000,33.910000,34.000000,1068.000000,198.000000,757.000000,231.000000,5.752800,180500.000000 +-118.320000,33.910000,33.000000,1729.000000,396.000000,1073.000000,344.000000,4.208300,180500.000000 +-118.320000,33.900000,37.000000,1664.000000,401.000000,1316.000000,409.000000,3.052600,216400.000000 +-118.320000,33.900000,36.000000,1741.000000,412.000000,1245.000000,423.000000,4.134400,210300.000000 +-118.320000,33.900000,36.000000,1520.000000,300.000000,831.000000,291.000000,4.047300,212100.000000 +-118.320000,33.900000,35.000000,3189.000000,680.000000,1882.000000,651.000000,3.662500,188000.000000 +-118.320000,33.890000,45.000000,1928.000000,453.000000,1323.000000,458.000000,4.281300,210100.000000 +-118.320000,33.890000,44.000000,1300.000000,252.000000,695.000000,249.000000,5.166900,220600.000000 +-118.320000,33.890000,34.000000,2675.000000,560.000000,1270.000000,492.000000,4.505300,242000.000000 +-118.320000,33.880000,37.000000,1402.000000,254.000000,722.000000,251.000000,6.478100,269000.000000 +-118.320000,33.880000,35.000000,1818.000000,339.000000,828.000000,319.000000,4.303600,282100.000000 +-118.320000,33.880000,34.000000,1803.000000,341.000000,947.000000,333.000000,5.553800,280300.000000 +-118.320000,33.870000,35.000000,2380.000000,404.000000,1212.000000,422.000000,5.625400,283800.000000 +-118.320000,33.870000,28.000000,3763.000000,762.000000,1967.000000,724.000000,5.324400,271900.000000 +-118.320000,33.860000,32.000000,3485.000000,678.000000,1715.000000,649.000000,4.656300,291700.000000 +-118.320000,33.850000,42.000000,3146.000000,770.000000,1859.000000,740.000000,3.507300,234800.000000 +-118.320000,33.840000,42.000000,1486.000000,420.000000,897.000000,377.000000,1.622800,376100.000000 +-118.320000,33.830000,51.000000,2399.000000,516.000000,1160.000000,514.000000,3.845600,318900.000000 +-118.320000,33.830000,19.000000,3792.000000,790.000000,2105.000000,834.000000,5.236300,310000.000000 +-118.320000,33.820000,25.000000,2587.000000,512.000000,1219.000000,509.000000,4.427100,382100.000000 +-118.320000,33.820000,22.000000,2508.000000,402.000000,1254.000000,395.000000,7.093500,379500.000000 +-118.320000,33.810000,28.000000,2142.000000,445.000000,1140.000000,422.000000,4.843800,346200.000000 +-118.320000,33.810000,27.000000,2113.000000,380.000000,1109.000000,360.000000,4.706200,357000.000000 +-118.320000,33.800000,29.000000,4317.000000,1037.000000,2102.000000,959.000000,3.127500,286400.000000 +-118.320000,33.800000,29.000000,3254.000000,717.000000,1593.000000,680.000000,4.053600,285800.000000 +-118.320000,33.790000,35.000000,2924.000000,658.000000,1675.000000,602.000000,3.828700,279900.000000 +-118.320000,33.790000,32.000000,2381.000000,467.000000,1264.000000,488.000000,4.147700,315100.000000 +-118.320000,33.790000,21.000000,6638.000000,1634.000000,3240.000000,1568.000000,3.679700,271100.000000 +-118.320000,33.770000,37.000000,627.000000,95.000000,259.000000,106.000000,6.887000,500001.000000 +-118.320000,33.750000,37.000000,1080.000000,135.000000,366.000000,142.000000,11.667700,500001.000000 +-118.320000,33.750000,33.000000,2996.000000,398.000000,1048.000000,387.000000,9.267000,500001.000000 +-118.320000,33.740000,24.000000,6097.000000,794.000000,2248.000000,806.000000,10.135700,500001.000000 +-118.320000,33.730000,25.000000,1099.000000,168.000000,407.000000,159.000000,7.688600,500001.000000 +-118.320000,33.350000,27.000000,1675.000000,521.000000,744.000000,331.000000,2.157900,450000.000000 +-118.320000,33.340000,52.000000,996.000000,264.000000,341.000000,160.000000,2.736100,450000.000000 +-118.320000,33.330000,52.000000,2127.000000,512.000000,733.000000,288.000000,3.390600,300000.000000 +-118.330000,35.640000,15.000000,2966.000000,669.000000,1007.000000,465.000000,1.566700,72500.000000 +-118.330000,34.270000,29.000000,3034.000000,732.000000,1776.000000,702.000000,3.134900,230200.000000 +-118.330000,34.240000,31.000000,6434.000000,1188.000000,3540.000000,1131.000000,4.263900,293300.000000 +-118.330000,34.210000,31.000000,3190.000000,489.000000,1362.000000,480.000000,6.981000,402900.000000 +-118.330000,34.200000,43.000000,2322.000000,418.000000,1106.000000,433.000000,4.363100,284600.000000 +-118.330000,34.200000,43.000000,1325.000000,254.000000,613.000000,248.000000,3.607100,289000.000000 +-118.330000,34.200000,23.000000,7179.000000,1985.000000,4757.000000,1924.000000,3.105100,206500.000000 +-118.330000,34.190000,46.000000,2115.000000,463.000000,1133.000000,439.000000,3.734400,222000.000000 +-118.330000,34.190000,45.000000,1505.000000,347.000000,799.000000,319.000000,3.138000,217000.000000 +-118.330000,34.180000,49.000000,1969.000000,377.000000,977.000000,367.000000,3.846200,231300.000000 +-118.330000,34.180000,48.000000,2122.000000,385.000000,926.000000,362.000000,5.697500,231400.000000 +-118.330000,34.180000,45.000000,1552.000000,315.000000,785.000000,316.000000,3.741100,235500.000000 +-118.330000,34.170000,48.000000,2584.000000,483.000000,1118.000000,459.000000,4.239600,245100.000000 +-118.330000,34.170000,44.000000,1934.000000,375.000000,750.000000,365.000000,2.473000,251800.000000 +-118.330000,34.160000,44.000000,2705.000000,649.000000,1676.000000,654.000000,3.428600,247900.000000 +-118.330000,34.160000,37.000000,2381.000000,575.000000,1235.000000,499.000000,3.794100,247800.000000 +-118.330000,34.160000,23.000000,1359.000000,428.000000,770.000000,380.000000,3.401600,234600.000000 +-118.330000,34.150000,44.000000,1321.000000,303.000000,471.000000,301.000000,4.267900,331800.000000 +-118.330000,34.150000,39.000000,493.000000,168.000000,259.000000,138.000000,2.366700,17500.000000 +-118.330000,34.120000,23.000000,1894.000000,416.000000,769.000000,392.000000,6.035200,500001.000000 +-118.330000,34.110000,38.000000,3495.000000,1100.000000,1939.000000,994.000000,2.214800,438300.000000 +-118.330000,34.100000,48.000000,1116.000000,524.000000,1610.000000,483.000000,1.625000,237500.000000 +-118.330000,34.100000,43.000000,2732.000000,1646.000000,3049.000000,1429.000000,1.315700,333300.000000 +-118.330000,34.100000,29.000000,732.000000,288.000000,691.000000,278.000000,2.186600,250000.000000 +-118.330000,34.090000,40.000000,2004.000000,687.000000,1514.000000,542.000000,1.991100,220000.000000 +-118.330000,34.090000,36.000000,654.000000,186.000000,416.000000,138.000000,3.695300,200000.000000 +-118.330000,34.090000,36.000000,561.000000,180.000000,340.000000,127.000000,1.437500,165000.000000 +-118.330000,34.080000,52.000000,1777.000000,454.000000,671.000000,439.000000,3.508300,500001.000000 +-118.330000,34.080000,50.000000,2989.000000,832.000000,1345.000000,775.000000,3.242600,442900.000000 +-118.330000,34.070000,52.000000,2248.000000,255.000000,813.000000,265.000000,15.000100,500001.000000 +-118.330000,34.060000,52.000000,1841.000000,240.000000,693.000000,218.000000,15.000100,500001.000000 +-118.330000,34.050000,48.000000,2405.000000,527.000000,1868.000000,502.000000,3.375000,257800.000000 +-118.330000,34.050000,45.000000,1707.000000,519.000000,1446.000000,466.000000,2.173600,171300.000000 +-118.330000,34.050000,44.000000,1574.000000,390.000000,1323.000000,404.000000,2.528400,226300.000000 +-118.330000,34.040000,52.000000,2545.000000,401.000000,1004.000000,372.000000,3.637300,420000.000000 +-118.330000,34.040000,33.000000,1806.000000,444.000000,1161.000000,393.000000,2.592700,161500.000000 +-118.330000,34.030000,46.000000,2312.000000,625.000000,1552.000000,603.000000,1.642900,125000.000000 +-118.330000,34.030000,39.000000,2840.000000,826.000000,1911.000000,688.000000,1.901800,137500.000000 +-118.330000,34.030000,33.000000,2314.000000,624.000000,1714.000000,582.000000,1.737700,183900.000000 +-118.330000,34.020000,42.000000,2043.000000,378.000000,869.000000,416.000000,3.500000,181100.000000 +-118.330000,34.010000,47.000000,1320.000000,259.000000,653.000000,291.000000,3.772700,193000.000000 +-118.330000,34.010000,44.000000,2182.000000,492.000000,878.000000,493.000000,1.963100,181300.000000 +-118.330000,34.000000,47.000000,825.000000,187.000000,416.000000,173.000000,2.333300,133300.000000 +-118.330000,34.000000,24.000000,873.000000,320.000000,529.000000,308.000000,0.930400,151600.000000 +-118.330000,33.990000,46.000000,1582.000000,315.000000,777.000000,286.000000,3.208300,149600.000000 +-118.330000,33.990000,44.000000,1918.000000,387.000000,1041.000000,364.000000,2.854200,126500.000000 +-118.330000,33.990000,43.000000,2224.000000,550.000000,1598.000000,545.000000,2.827400,122500.000000 +-118.330000,33.980000,30.000000,3112.000000,931.000000,2739.000000,841.000000,1.653100,118500.000000 +-118.330000,33.970000,47.000000,1830.000000,369.000000,922.000000,377.000000,4.163500,156400.000000 +-118.330000,33.960000,42.000000,2084.000000,517.000000,1062.000000,451.000000,2.005700,198200.000000 +-118.330000,33.960000,42.000000,1686.000000,361.000000,737.000000,319.000000,2.300000,189200.000000 +-118.330000,33.960000,24.000000,6513.000000,1290.000000,2636.000000,1271.000000,4.209900,189800.000000 +-118.330000,33.940000,31.000000,3757.000000,1102.000000,3288.000000,964.000000,1.930900,137500.000000 +-118.330000,33.930000,38.000000,694.000000,112.000000,412.000000,119.000000,6.071800,156000.000000 +-118.330000,33.930000,37.000000,4916.000000,1134.000000,3533.000000,1035.000000,3.286200,152300.000000 +-118.330000,33.930000,37.000000,1831.000000,356.000000,925.000000,338.000000,4.409100,148400.000000 +-118.330000,33.920000,23.000000,969.000000,288.000000,670.000000,251.000000,3.267000,185400.000000 +-118.330000,33.910000,8.000000,10731.000000,3335.000000,7211.000000,3028.000000,2.455000,192700.000000 +-118.330000,33.910000,39.000000,1224.000000,312.000000,1106.000000,333.000000,3.349100,181800.000000 +-118.330000,33.900000,21.000000,6603.000000,1984.000000,5546.000000,1745.000000,2.609100,163900.000000 +-118.330000,33.890000,42.000000,1816.000000,338.000000,897.000000,306.000000,5.187400,230800.000000 +-118.330000,33.890000,39.000000,1880.000000,361.000000,982.000000,357.000000,4.195300,226900.000000 +-118.330000,33.880000,36.000000,1271.000000,346.000000,811.000000,345.000000,3.241700,283300.000000 +-118.330000,33.880000,30.000000,1856.000000,444.000000,899.000000,435.000000,3.150500,270000.000000 +-118.330000,33.870000,44.000000,724.000000,133.000000,373.000000,133.000000,3.916700,265600.000000 +-118.330000,33.870000,36.000000,2219.000000,406.000000,1219.000000,403.000000,4.261400,267100.000000 +-118.330000,33.870000,35.000000,743.000000,128.000000,385.000000,137.000000,6.489100,278100.000000 +-118.330000,33.860000,38.000000,914.000000,176.000000,519.000000,174.000000,6.033500,255400.000000 +-118.330000,33.860000,36.000000,854.000000,160.000000,473.000000,150.000000,6.399200,259600.000000 +-118.330000,33.840000,36.000000,1364.000000,251.000000,668.000000,245.000000,5.313100,314100.000000 +-118.330000,33.830000,5.000000,13038.000000,2679.000000,5272.000000,2523.000000,5.502300,286400.000000 +-118.330000,33.820000,26.000000,5591.000000,934.000000,2824.000000,939.000000,6.586100,417800.000000 +-118.330000,33.720000,25.000000,6191.000000,1081.000000,2297.000000,1023.000000,6.424600,446700.000000 +-118.330000,33.690000,41.000000,2168.000000,357.000000,1171.000000,374.000000,4.721600,311900.000000 +-118.330000,33.340000,52.000000,2359.000000,591.000000,1100.000000,431.000000,2.833300,414700.000000 +-118.340000,35.270000,10.000000,2939.000000,605.000000,1167.000000,446.000000,2.391700,79000.000000 +-118.340000,34.860000,11.000000,7353.000000,1482.000000,3571.000000,1308.000000,2.809700,130000.000000 +-118.340000,34.260000,37.000000,1776.000000,301.000000,702.000000,265.000000,5.266100,314900.000000 +-118.340000,34.210000,36.000000,1834.000000,316.000000,864.000000,309.000000,4.788500,302200.000000 +-118.340000,34.200000,41.000000,2860.000000,682.000000,1516.000000,621.000000,3.043100,262900.000000 +-118.340000,34.190000,48.000000,814.000000,165.000000,490.000000,176.000000,3.140600,223100.000000 +-118.340000,34.190000,47.000000,1721.000000,343.000000,834.000000,334.000000,4.192300,231200.000000 +-118.340000,34.190000,43.000000,1029.000000,252.000000,613.000000,255.000000,2.682700,219900.000000 +-118.340000,34.190000,41.000000,1524.000000,393.000000,1176.000000,375.000000,2.875000,192400.000000 +-118.340000,34.180000,46.000000,1393.000000,301.000000,714.000000,295.000000,2.812500,229900.000000 +-118.340000,34.180000,45.000000,3566.000000,701.000000,1601.000000,653.000000,3.866800,232000.000000 +-118.340000,34.180000,45.000000,1328.000000,290.000000,720.000000,289.000000,3.875000,226900.000000 +-118.340000,34.170000,52.000000,1133.000000,212.000000,545.000000,222.000000,4.875000,249500.000000 +-118.340000,34.170000,49.000000,3033.000000,580.000000,1284.000000,561.000000,4.116100,232500.000000 +-118.340000,34.170000,46.000000,1718.000000,344.000000,756.000000,343.000000,3.212500,247000.000000 +-118.340000,34.160000,46.000000,1396.000000,294.000000,608.000000,246.000000,3.692000,244500.000000 +-118.340000,34.160000,44.000000,1717.000000,391.000000,848.000000,353.000000,3.611100,254500.000000 +-118.340000,34.160000,25.000000,6082.000000,1763.000000,2616.000000,1644.000000,3.648600,246900.000000 +-118.340000,34.150000,40.000000,3068.000000,756.000000,1190.000000,695.000000,3.563700,497400.000000 +-118.340000,34.150000,16.000000,1586.000000,377.000000,625.000000,344.000000,4.089300,450000.000000 +-118.340000,34.130000,45.000000,2375.000000,417.000000,751.000000,410.000000,6.673900,500001.000000 +-118.340000,34.110000,40.000000,5485.000000,1242.000000,2034.000000,1133.000000,3.697400,500001.000000 +-118.340000,34.100000,29.000000,3193.000000,1452.000000,2039.000000,1265.000000,1.820900,500001.000000 +-118.340000,34.100000,28.000000,2223.000000,752.000000,1271.000000,684.000000,2.543400,232100.000000 +-118.340000,34.100000,24.000000,1996.000000,791.000000,1215.000000,672.000000,1.542900,325000.000000 +-118.340000,34.090000,52.000000,1731.000000,502.000000,849.000000,466.000000,3.294600,321600.000000 +-118.340000,34.090000,5.000000,2665.000000,954.000000,1733.000000,766.000000,2.356800,204700.000000 +-118.340000,34.090000,37.000000,1442.000000,501.000000,998.000000,503.000000,2.443200,200000.000000 +-118.340000,34.080000,52.000000,2756.000000,542.000000,971.000000,510.000000,5.587100,500001.000000 +-118.340000,34.080000,52.000000,1721.000000,195.000000,688.000000,196.000000,15.000100,500001.000000 +-118.340000,34.080000,52.000000,1430.000000,186.000000,547.000000,178.000000,10.366100,500001.000000 +-118.340000,34.080000,52.000000,1421.000000,163.000000,495.000000,167.000000,10.586000,500001.000000 +-118.340000,34.080000,50.000000,3457.000000,854.000000,1584.000000,841.000000,3.107800,346700.000000 +-118.340000,34.070000,52.000000,3175.000000,1057.000000,1594.000000,997.000000,3.176600,225000.000000 +-118.340000,34.070000,52.000000,2066.000000,319.000000,981.000000,297.000000,5.863200,450000.000000 +-118.340000,34.070000,52.000000,1621.000000,284.000000,588.000000,272.000000,6.222300,500001.000000 +-118.340000,34.060000,52.000000,2089.000000,309.000000,883.000000,281.000000,7.457400,500001.000000 +-118.340000,34.060000,52.000000,2069.000000,417.000000,826.000000,377.000000,3.548100,396000.000000 +-118.340000,34.060000,52.000000,1482.000000,336.000000,768.000000,300.000000,3.716700,327300.000000 +-118.340000,34.060000,52.000000,1314.000000,170.000000,629.000000,214.000000,7.166900,500001.000000 +-118.340000,34.060000,52.000000,1311.000000,310.000000,707.000000,290.000000,3.481200,432800.000000 +-118.340000,34.050000,52.000000,2530.000000,458.000000,1122.000000,449.000000,3.916700,321600.000000 +-118.340000,34.050000,52.000000,2194.000000,504.000000,997.000000,438.000000,2.665400,259400.000000 +-118.340000,34.050000,50.000000,2009.000000,419.000000,1130.000000,402.000000,3.194400,213500.000000 +-118.340000,34.050000,41.000000,2099.000000,472.000000,1369.000000,465.000000,2.740900,167100.000000 +-118.340000,34.050000,39.000000,975.000000,292.000000,723.000000,285.000000,2.272500,140600.000000 +-118.340000,34.040000,42.000000,2010.000000,494.000000,1203.000000,427.000000,1.940800,134600.000000 +-118.340000,34.040000,42.000000,1681.000000,360.000000,987.000000,337.000000,2.600000,171400.000000 +-118.340000,34.040000,40.000000,2064.000000,662.000000,2140.000000,617.000000,2.225400,127100.000000 +-118.340000,34.040000,37.000000,1466.000000,529.000000,1835.000000,500.000000,1.701400,123200.000000 +-118.340000,34.040000,35.000000,2345.000000,607.000000,2042.000000,565.000000,2.595500,139700.000000 +-118.340000,34.030000,49.000000,1295.000000,276.000000,765.000000,265.000000,3.430600,130200.000000 +-118.340000,34.030000,48.000000,1426.000000,331.000000,784.000000,356.000000,1.658100,118800.000000 +-118.340000,34.030000,47.000000,1927.000000,561.000000,1349.000000,508.000000,1.344400,125000.000000 +-118.340000,34.020000,50.000000,1172.000000,261.000000,685.000000,260.000000,3.144200,130300.000000 +-118.340000,34.020000,48.000000,1614.000000,320.000000,684.000000,318.000000,4.221800,181000.000000 +-118.340000,34.020000,44.000000,2067.000000,385.000000,1046.000000,441.000000,3.535700,156900.000000 +-118.340000,34.020000,44.000000,1527.000000,246.000000,608.000000,245.000000,4.035700,187800.000000 +-118.340000,34.010000,38.000000,2318.000000,735.000000,1407.000000,702.000000,1.618700,266700.000000 +-118.340000,34.010000,35.000000,1359.000000,359.000000,655.000000,341.000000,2.556800,312500.000000 +-118.340000,34.000000,49.000000,2863.000000,411.000000,1108.000000,406.000000,5.899300,313300.000000 +-118.340000,34.000000,49.000000,2465.000000,372.000000,1018.000000,359.000000,4.000000,296800.000000 +-118.340000,34.000000,44.000000,3183.000000,513.000000,1183.000000,473.000000,5.040700,314900.000000 +-118.340000,33.990000,48.000000,2225.000000,433.000000,1170.000000,401.000000,2.964300,140400.000000 +-118.340000,33.990000,48.000000,2009.000000,335.000000,919.000000,297.000000,4.812500,170500.000000 +-118.340000,33.990000,48.000000,1172.000000,205.000000,497.000000,190.000000,3.825000,183000.000000 +-118.340000,33.990000,47.000000,1107.000000,199.000000,437.000000,178.000000,3.734400,179400.000000 +-118.340000,33.990000,46.000000,1217.000000,322.000000,662.000000,305.000000,3.173100,140300.000000 +-118.340000,33.980000,47.000000,2649.000000,684.000000,2374.000000,607.000000,2.388200,137700.000000 +-118.340000,33.980000,46.000000,2126.000000,409.000000,1292.000000,414.000000,2.931500,149000.000000 +-118.340000,33.980000,45.000000,1298.000000,294.000000,1064.000000,268.000000,3.706700,136600.000000 +-118.340000,33.980000,40.000000,2108.000000,526.000000,1922.000000,544.000000,3.163000,137800.000000 +-118.340000,33.970000,45.000000,2230.000000,364.000000,949.000000,344.000000,5.500000,188200.000000 +-118.340000,33.950000,33.000000,1923.000000,459.000000,1412.000000,361.000000,5.435900,194100.000000 +-118.340000,33.940000,37.000000,3107.000000,903.000000,3456.000000,734.000000,2.182000,147500.000000 +-118.340000,33.930000,37.000000,1638.000000,407.000000,1341.000000,369.000000,3.067700,167700.000000 +-118.340000,33.930000,36.000000,1528.000000,486.000000,1824.000000,470.000000,2.267900,153900.000000 +-118.340000,33.930000,35.000000,1213.000000,284.000000,742.000000,253.000000,4.062500,159900.000000 +-118.340000,33.930000,33.000000,4294.000000,1224.000000,4512.000000,1189.000000,2.830400,143700.000000 +-118.340000,33.930000,32.000000,1254.000000,399.000000,1281.000000,386.000000,2.297600,155700.000000 +-118.340000,33.920000,6.000000,1047.000000,271.000000,740.000000,248.000000,3.425000,193800.000000 +-118.340000,33.920000,29.000000,1475.000000,349.000000,965.000000,370.000000,3.355800,199600.000000 +-118.340000,33.910000,8.000000,3937.000000,1404.000000,2691.000000,1142.000000,2.474100,185700.000000 +-118.340000,33.910000,17.000000,3724.000000,1023.000000,2536.000000,971.000000,3.264900,202100.000000 +-118.340000,33.910000,12.000000,9975.000000,3638.000000,7429.000000,3405.000000,2.668900,192300.000000 +-118.340000,33.900000,37.000000,542.000000,105.000000,355.000000,118.000000,5.513300,227300.000000 +-118.340000,33.900000,36.000000,1342.000000,259.000000,706.000000,261.000000,4.177600,236600.000000 +-118.340000,33.900000,36.000000,1158.000000,219.000000,628.000000,253.000000,4.742600,242700.000000 +-118.340000,33.900000,23.000000,2395.000000,498.000000,1309.000000,493.000000,4.977900,224600.000000 +-118.340000,33.890000,36.000000,2392.000000,444.000000,1346.000000,445.000000,6.008800,245900.000000 +-118.340000,33.890000,36.000000,2274.000000,411.000000,1232.000000,423.000000,5.373000,244500.000000 +-118.340000,33.880000,42.000000,725.000000,183.000000,493.000000,172.000000,3.258900,233300.000000 +-118.340000,33.880000,31.000000,3122.000000,727.000000,1885.000000,715.000000,3.865700,298400.000000 +-118.340000,33.870000,34.000000,1069.000000,217.000000,601.000000,212.000000,4.640600,255900.000000 +-118.340000,33.870000,28.000000,4605.000000,1188.000000,2558.000000,1093.000000,3.698800,266600.000000 +-118.340000,33.870000,28.000000,2948.000000,566.000000,1445.000000,524.000000,5.374300,286500.000000 +-118.340000,33.860000,36.000000,2223.000000,360.000000,1162.000000,376.000000,5.259000,279400.000000 +-118.340000,33.860000,35.000000,1936.000000,343.000000,1008.000000,346.000000,5.479100,285900.000000 +-118.340000,33.840000,36.000000,1561.000000,252.000000,740.000000,253.000000,6.277800,309700.000000 +-118.340000,33.840000,36.000000,1407.000000,231.000000,676.000000,231.000000,5.269000,331900.000000 +-118.340000,33.830000,35.000000,1818.000000,353.000000,853.000000,321.000000,5.897200,350900.000000 +-118.340000,33.800000,34.000000,1730.000000,427.000000,1008.000000,393.000000,3.940800,327700.000000 +-118.340000,33.800000,33.000000,2194.000000,469.000000,987.000000,397.000000,5.095100,318900.000000 +-118.340000,33.800000,25.000000,4177.000000,832.000000,2123.000000,789.000000,5.081400,446800.000000 +-118.340000,33.790000,36.000000,716.000000,123.000000,388.000000,124.000000,5.025400,350000.000000 +-118.340000,33.780000,25.000000,11016.000000,1626.000000,4168.000000,1584.000000,8.178200,500001.000000 +-118.340000,33.760000,34.000000,5586.000000,674.000000,1871.000000,636.000000,15.000100,500001.000000 +-118.350000,34.520000,14.000000,3490.000000,592.000000,1710.000000,580.000000,5.917100,333300.000000 +-118.350000,34.320000,52.000000,102.000000,29.000000,54.000000,32.000000,1.987500,191700.000000 +-118.350000,34.280000,30.000000,3214.000000,513.000000,1700.000000,533.000000,4.694400,248200.000000 +-118.350000,34.270000,32.000000,604.000000,108.000000,314.000000,113.000000,6.203700,205400.000000 +-118.350000,34.250000,34.000000,2795.000000,460.000000,1267.000000,443.000000,6.146400,354400.000000 +-118.350000,34.220000,41.000000,1560.000000,374.000000,1668.000000,389.000000,3.025000,154300.000000 +-118.350000,34.220000,19.000000,9259.000000,1653.000000,3963.000000,1595.000000,5.997000,228700.000000 +-118.350000,34.210000,42.000000,1073.000000,220.000000,804.000000,226.000000,3.750000,172600.000000 +-118.350000,34.210000,39.000000,1470.000000,312.000000,1047.000000,284.000000,3.275000,181400.000000 +-118.350000,34.190000,45.000000,903.000000,190.000000,557.000000,204.000000,4.031300,209100.000000 +-118.350000,34.180000,46.000000,1840.000000,379.000000,866.000000,360.000000,3.305600,230400.000000 +-118.350000,34.170000,47.000000,858.000000,170.000000,365.000000,171.000000,2.038500,225000.000000 +-118.350000,34.170000,42.000000,1604.000000,326.000000,814.000000,329.000000,4.440800,216000.000000 +-118.350000,34.160000,49.000000,1305.000000,228.000000,584.000000,255.000000,5.636000,267900.000000 +-118.350000,34.160000,45.000000,1390.000000,281.000000,538.000000,270.000000,4.221200,293800.000000 +-118.350000,34.160000,42.000000,2267.000000,478.000000,1083.000000,458.000000,3.201500,250000.000000 +-118.350000,34.150000,52.000000,1680.000000,238.000000,493.000000,211.000000,9.042000,500001.000000 +-118.350000,34.150000,35.000000,2245.000000,393.000000,783.000000,402.000000,4.154400,500001.000000 +-118.350000,34.130000,39.000000,1610.000000,278.000000,511.000000,278.000000,4.333300,385900.000000 +-118.350000,34.100000,26.000000,3977.000000,1050.000000,1720.000000,935.000000,3.358000,364500.000000 +-118.350000,34.100000,24.000000,5477.000000,1803.000000,2863.000000,1755.000000,1.845000,237500.000000 +-118.350000,34.100000,18.000000,7432.000000,2793.000000,3596.000000,2270.000000,2.803600,225000.000000 +-118.350000,34.100000,18.000000,4109.000000,1301.000000,2103.000000,1116.000000,2.325000,250000.000000 +-118.350000,34.100000,16.000000,2930.000000,1038.000000,1648.000000,980.000000,2.645800,372200.000000 +-118.350000,34.090000,47.000000,1800.000000,546.000000,921.000000,478.000000,2.802100,280600.000000 +-118.350000,34.090000,42.000000,2210.000000,643.000000,1228.000000,605.000000,2.598200,315800.000000 +-118.350000,34.090000,35.000000,2705.000000,785.000000,1526.000000,793.000000,3.034900,266700.000000 +-118.350000,34.090000,35.000000,2234.000000,689.000000,1334.000000,662.000000,2.544400,236100.000000 +-118.350000,34.090000,35.000000,1989.000000,634.000000,1108.000000,593.000000,1.608100,288900.000000 +-118.350000,34.080000,52.000000,2877.000000,721.000000,1186.000000,704.000000,3.264500,175000.000000 +-118.350000,34.080000,52.000000,2088.000000,388.000000,908.000000,375.000000,3.814100,342000.000000 +-118.350000,34.080000,52.000000,1801.000000,313.000000,714.000000,293.000000,4.683800,479000.000000 +-118.350000,34.080000,52.000000,1003.000000,200.000000,514.000000,204.000000,3.847200,395700.000000 +-118.350000,34.070000,52.000000,2497.000000,406.000000,1030.000000,412.000000,4.890000,500001.000000 +-118.350000,34.070000,52.000000,2315.000000,356.000000,894.000000,345.000000,4.132800,500001.000000 +-118.350000,34.070000,46.000000,1651.000000,410.000000,512.000000,397.000000,4.017900,350000.000000 +-118.350000,34.070000,45.000000,7803.000000,2154.000000,3359.000000,2041.000000,3.359400,287500.000000 +-118.350000,34.070000,45.000000,3312.000000,880.000000,1157.000000,809.000000,3.571900,500001.000000 +-118.350000,34.060000,52.000000,3446.000000,1360.000000,1768.000000,1245.000000,2.472200,500001.000000 +-118.350000,34.060000,52.000000,2837.000000,602.000000,1164.000000,551.000000,3.241100,250000.000000 +-118.350000,34.060000,52.000000,1368.000000,322.000000,617.000000,303.000000,5.381900,440900.000000 +-118.350000,34.060000,48.000000,3551.000000,826.000000,1601.000000,827.000000,3.227900,400000.000000 +-118.350000,34.060000,48.000000,1354.000000,279.000000,716.000000,309.000000,3.716700,385000.000000 +-118.350000,34.050000,47.000000,2815.000000,679.000000,1533.000000,594.000000,2.580600,234100.000000 +-118.350000,34.050000,46.000000,2149.000000,451.000000,905.000000,443.000000,2.884100,290800.000000 +-118.350000,34.040000,49.000000,1104.000000,266.000000,668.000000,297.000000,3.085600,151600.000000 +-118.350000,34.040000,45.000000,1839.000000,459.000000,1312.000000,460.000000,2.562500,138000.000000 +-118.350000,34.040000,45.000000,1579.000000,357.000000,713.000000,335.000000,2.171100,179200.000000 +-118.350000,34.040000,41.000000,1617.000000,423.000000,1110.000000,375.000000,2.463500,169400.000000 +-118.350000,34.040000,36.000000,1956.000000,601.000000,1672.000000,546.000000,1.868500,150700.000000 +-118.350000,34.030000,44.000000,865.000000,208.000000,537.000000,183.000000,1.900000,110900.000000 +-118.350000,34.030000,43.000000,2122.000000,524.000000,1510.000000,436.000000,2.227300,123300.000000 +-118.350000,34.030000,42.000000,2043.000000,512.000000,1634.000000,501.000000,1.992800,125400.000000 +-118.350000,34.030000,35.000000,1438.000000,333.000000,794.000000,306.000000,1.975000,138100.000000 +-118.350000,34.020000,52.000000,427.000000,92.000000,233.000000,116.000000,3.250000,134700.000000 +-118.350000,34.020000,34.000000,5218.000000,1576.000000,3538.000000,1371.000000,1.514300,118800.000000 +-118.350000,34.020000,27.000000,3358.000000,1069.000000,2415.000000,956.000000,1.458900,87500.000000 +-118.350000,34.010000,35.000000,3776.000000,733.000000,1583.000000,749.000000,3.548600,332100.000000 +-118.350000,34.010000,33.000000,3246.000000,601.000000,1585.000000,603.000000,3.662900,353200.000000 +-118.350000,34.000000,46.000000,3402.000000,503.000000,1389.000000,504.000000,5.346200,270400.000000 +-118.350000,34.000000,30.000000,1879.000000,226.000000,740.000000,266.000000,6.431000,492500.000000 +-118.350000,33.990000,47.000000,2183.000000,380.000000,927.000000,371.000000,4.953100,180100.000000 +-118.350000,33.990000,45.000000,1764.000000,401.000000,679.000000,334.000000,3.202100,222100.000000 +-118.350000,33.980000,47.000000,2512.000000,461.000000,1082.000000,426.000000,3.823500,207600.000000 +-118.350000,33.980000,42.000000,3081.000000,680.000000,1785.000000,609.000000,3.745000,170800.000000 +-118.350000,33.980000,33.000000,1884.000000,477.000000,1518.000000,449.000000,3.119400,152800.000000 +-118.350000,33.970000,33.000000,1495.000000,474.000000,1272.000000,447.000000,2.069400,143500.000000 +-118.350000,33.970000,30.000000,1548.000000,330.000000,757.000000,349.000000,3.805600,323800.000000 +-118.350000,33.970000,26.000000,3832.000000,1074.000000,2340.000000,904.000000,2.673400,143400.000000 +-118.350000,33.970000,25.000000,1864.000000,616.000000,1710.000000,575.000000,2.230300,159400.000000 +-118.350000,33.960000,26.000000,2773.000000,681.000000,1560.000000,631.000000,3.135400,164300.000000 +-118.350000,33.960000,21.000000,2714.000000,881.000000,1549.000000,853.000000,1.209400,157500.000000 +-118.350000,33.950000,42.000000,1779.000000,431.000000,1507.000000,380.000000,2.889200,159800.000000 +-118.350000,33.950000,30.000000,2661.000000,765.000000,2324.000000,724.000000,3.051900,137500.000000 +-118.350000,33.950000,28.000000,4770.000000,1328.000000,3201.000000,1196.000000,2.681000,147700.000000 +-118.350000,33.940000,42.000000,1028.000000,278.000000,1369.000000,261.000000,3.312500,144600.000000 +-118.350000,33.940000,38.000000,1794.000000,508.000000,2188.000000,454.000000,2.665400,142200.000000 +-118.350000,33.940000,36.000000,2225.000000,601.000000,2755.000000,610.000000,2.554700,150400.000000 +-118.350000,33.940000,35.000000,1451.000000,435.000000,1888.000000,420.000000,2.846200,149100.000000 +-118.350000,33.930000,35.000000,1050.000000,252.000000,918.000000,236.000000,1.734400,146900.000000 +-118.350000,33.930000,34.000000,617.000000,189.000000,810.000000,180.000000,1.976600,162500.000000 +-118.350000,33.930000,33.000000,2040.000000,576.000000,2649.000000,561.000000,2.337500,170600.000000 +-118.350000,33.930000,31.000000,2746.000000,697.000000,1973.000000,598.000000,3.513900,192800.000000 +-118.350000,33.930000,26.000000,3156.000000,857.000000,2394.000000,787.000000,3.010000,191900.000000 +-118.350000,33.930000,25.000000,2260.000000,692.000000,1603.000000,673.000000,2.110000,223300.000000 +-118.350000,33.910000,34.000000,2055.000000,448.000000,1134.000000,408.000000,3.825000,235400.000000 +-118.350000,33.910000,32.000000,1660.000000,366.000000,928.000000,398.000000,4.318700,269700.000000 +-118.350000,33.910000,31.000000,2583.000000,663.000000,1675.000000,612.000000,3.523400,265000.000000 +-118.350000,33.910000,29.000000,2461.000000,535.000000,1236.000000,482.000000,4.840900,244000.000000 +-118.350000,33.910000,28.000000,2108.000000,534.000000,1485.000000,536.000000,4.077500,241400.000000 +-118.350000,33.910000,26.000000,2159.000000,523.000000,1331.000000,520.000000,3.870000,264500.000000 +-118.350000,33.910000,25.000000,1884.000000,554.000000,1337.000000,549.000000,2.851200,272800.000000 +-118.350000,33.910000,19.000000,1949.000000,559.000000,1282.000000,498.000000,2.781300,231300.000000 +-118.350000,33.900000,32.000000,1056.000000,225.000000,565.000000,231.000000,3.948500,230000.000000 +-118.350000,33.900000,31.000000,981.000000,222.000000,734.000000,239.000000,4.875000,232400.000000 +-118.350000,33.900000,31.000000,1547.000000,344.000000,956.000000,287.000000,3.469800,225000.000000 +-118.350000,33.900000,25.000000,3309.000000,902.000000,2299.000000,837.000000,3.041700,237000.000000 +-118.350000,33.900000,22.000000,1127.000000,287.000000,697.000000,241.000000,3.397100,220300.000000 +-118.350000,33.900000,13.000000,2887.000000,853.000000,2197.000000,800.000000,2.877700,207900.000000 +-118.350000,33.890000,34.000000,1740.000000,387.000000,1249.000000,375.000000,4.155200,233900.000000 +-118.350000,33.890000,25.000000,1769.000000,440.000000,1371.000000,414.000000,3.083300,232700.000000 +-118.350000,33.880000,44.000000,822.000000,180.000000,480.000000,177.000000,4.400000,225800.000000 +-118.350000,33.880000,36.000000,1583.000000,411.000000,1097.000000,350.000000,4.073700,238200.000000 +-118.350000,33.880000,36.000000,1567.000000,362.000000,1054.000000,386.000000,3.259400,233900.000000 +-118.350000,33.880000,25.000000,1459.000000,362.000000,1150.000000,354.000000,3.350000,237500.000000 +-118.350000,33.870000,37.000000,1420.000000,286.000000,886.000000,290.000000,4.583300,261300.000000 +-118.350000,33.870000,34.000000,2823.000000,500.000000,1429.000000,483.000000,5.500000,279600.000000 +-118.350000,33.860000,28.000000,2075.000000,463.000000,1216.000000,446.000000,3.973200,281500.000000 +-118.350000,33.860000,24.000000,2139.000000,481.000000,971.000000,418.000000,4.385900,271300.000000 +-118.350000,33.850000,35.000000,1248.000000,206.000000,551.000000,185.000000,5.642600,348200.000000 +-118.350000,33.850000,34.000000,1770.000000,291.000000,916.000000,289.000000,5.000000,354200.000000 +-118.350000,33.840000,22.000000,13133.000000,3680.000000,7180.000000,3522.000000,3.541400,354700.000000 +-118.350000,33.830000,36.000000,1102.000000,193.000000,522.000000,172.000000,6.118700,342000.000000 +-118.350000,33.820000,28.000000,7591.000000,1710.000000,3420.000000,1635.000000,4.070800,328900.000000 +-118.350000,33.800000,19.000000,6224.000000,1105.000000,3152.000000,1076.000000,5.954100,500001.000000 +-118.350000,33.740000,25.000000,8272.000000,1132.000000,3392.000000,1132.000000,10.097300,500001.000000 +-118.360000,34.260000,34.000000,3677.000000,573.000000,1598.000000,568.000000,6.838000,378000.000000 +-118.360000,34.230000,15.000000,2485.000000,742.000000,1994.000000,670.000000,2.833300,183200.000000 +-118.360000,34.220000,37.000000,1512.000000,348.000000,1545.000000,351.000000,3.766300,160300.000000 +-118.360000,34.190000,46.000000,1676.000000,322.000000,846.000000,295.000000,5.181400,209500.000000 +-118.360000,34.190000,11.000000,2921.000000,685.000000,1512.000000,664.000000,4.144500,176400.000000 +-118.360000,34.180000,36.000000,2233.000000,605.000000,1934.000000,599.000000,2.878400,194900.000000 +-118.360000,34.180000,34.000000,1471.000000,423.000000,995.000000,386.000000,2.958300,188700.000000 +-118.360000,34.180000,31.000000,1109.000000,354.000000,1119.000000,334.000000,2.305600,200000.000000 +-118.360000,34.170000,46.000000,1268.000000,240.000000,661.000000,239.000000,4.074200,229100.000000 +-118.360000,34.170000,44.000000,2295.000000,560.000000,1543.000000,528.000000,2.385100,194100.000000 +-118.360000,34.170000,31.000000,1939.000000,505.000000,1584.000000,466.000000,2.523400,199500.000000 +-118.360000,34.160000,43.000000,2850.000000,709.000000,1510.000000,670.000000,2.483500,274300.000000 +-118.360000,34.160000,42.000000,2304.000000,442.000000,862.000000,429.000000,4.354200,417900.000000 +-118.360000,34.160000,32.000000,2455.000000,556.000000,989.000000,493.000000,4.076400,325000.000000 +-118.360000,34.150000,34.000000,3659.000000,921.000000,1338.000000,835.000000,3.620200,366100.000000 +-118.360000,34.130000,36.000000,6871.000000,1180.000000,2216.000000,1130.000000,8.049900,495600.000000 +-118.360000,34.120000,26.000000,3902.000000,610.000000,1468.000000,632.000000,8.513600,500001.000000 +-118.360000,34.110000,35.000000,3946.000000,695.000000,1361.000000,620.000000,6.519500,500001.000000 +-118.360000,34.100000,52.000000,1295.000000,281.000000,578.000000,273.000000,2.976000,405100.000000 +-118.360000,34.100000,52.000000,1096.000000,247.000000,423.000000,230.000000,3.017900,500001.000000 +-118.360000,34.100000,36.000000,2963.000000,838.000000,1129.000000,745.000000,2.558800,500001.000000 +-118.360000,34.090000,38.000000,2158.000000,582.000000,1061.000000,577.000000,2.964300,355300.000000 +-118.360000,34.090000,36.000000,1616.000000,465.000000,773.000000,429.000000,2.600000,313600.000000 +-118.360000,34.090000,36.000000,1390.000000,458.000000,874.000000,468.000000,2.581200,200000.000000 +-118.360000,34.090000,34.000000,2832.000000,883.000000,1594.000000,843.000000,1.755800,312500.000000 +-118.360000,34.090000,33.000000,3463.000000,1170.000000,1845.000000,1134.000000,2.020500,243800.000000 +-118.360000,34.090000,30.000000,2353.000000,728.000000,1365.000000,718.000000,2.070200,283300.000000 +-118.360000,34.090000,28.000000,1111.000000,300.000000,526.000000,294.000000,2.613600,383300.000000 +-118.360000,34.080000,52.000000,2373.000000,601.000000,1135.000000,576.000000,3.176500,225000.000000 +-118.360000,34.080000,52.000000,1902.000000,488.000000,848.000000,478.000000,2.962100,175000.000000 +-118.360000,34.080000,45.000000,2195.000000,483.000000,1265.000000,455.000000,3.386400,397900.000000 +-118.360000,34.080000,40.000000,3110.000000,764.000000,1557.000000,763.000000,1.993700,367100.000000 +-118.360000,34.070000,48.000000,1740.000000,360.000000,748.000000,357.000000,4.701900,411100.000000 +-118.360000,34.070000,40.000000,1821.000000,447.000000,777.000000,441.000000,2.337500,355200.000000 +-118.360000,34.060000,39.000000,2810.000000,670.000000,1109.000000,624.000000,3.250000,355000.000000 +-118.360000,34.050000,50.000000,3518.000000,812.000000,1724.000000,758.000000,3.083300,338100.000000 +-118.360000,34.050000,48.000000,1835.000000,380.000000,956.000000,370.000000,3.281300,243600.000000 +-118.360000,34.050000,48.000000,1825.000000,404.000000,728.000000,363.000000,3.382400,322600.000000 +-118.360000,34.050000,47.000000,1424.000000,300.000000,632.000000,278.000000,4.062500,295200.000000 +-118.360000,34.050000,45.000000,2283.000000,509.000000,1093.000000,475.000000,2.565800,252000.000000 +-118.360000,34.050000,45.000000,1879.000000,395.000000,946.000000,409.000000,3.333300,254700.000000 +-118.360000,34.050000,42.000000,1372.000000,275.000000,674.000000,271.000000,2.879300,202100.000000 +-118.360000,34.040000,49.000000,995.000000,184.000000,462.000000,194.000000,2.791700,242000.000000 +-118.360000,34.040000,48.000000,1769.000000,429.000000,993.000000,405.000000,2.321400,139400.000000 +-118.360000,34.040000,45.000000,1767.000000,417.000000,1052.000000,379.000000,3.516100,157000.000000 +-118.360000,34.040000,34.000000,3239.000000,806.000000,2331.000000,765.000000,2.053800,125800.000000 +-118.360000,34.030000,43.000000,1690.000000,379.000000,1017.000000,359.000000,2.107800,133500.000000 +-118.360000,34.030000,38.000000,2365.000000,638.000000,2259.000000,607.000000,2.087900,120700.000000 +-118.360000,34.030000,38.000000,1400.000000,376.000000,1139.000000,315.000000,2.236800,120000.000000 +-118.360000,34.030000,36.000000,1083.000000,342.000000,1023.000000,295.000000,2.132400,143800.000000 +-118.360000,34.030000,35.000000,1819.000000,499.000000,1666.000000,482.000000,1.645200,125900.000000 +-118.360000,34.020000,46.000000,3745.000000,798.000000,1502.000000,808.000000,3.864300,195800.000000 +-118.360000,34.020000,43.000000,1356.000000,333.000000,796.000000,329.000000,1.715900,189700.000000 +-118.360000,34.010000,33.000000,3140.000000,466.000000,1214.000000,464.000000,6.504400,350400.000000 +-118.360000,33.990000,45.000000,2005.000000,368.000000,909.000000,364.000000,4.640600,268900.000000 +-118.360000,33.990000,43.000000,2657.000000,548.000000,1145.000000,524.000000,4.137500,287100.000000 +-118.360000,33.990000,35.000000,3702.000000,648.000000,1449.000000,614.000000,5.319400,313700.000000 +-118.360000,33.980000,45.000000,1559.000000,305.000000,891.000000,341.000000,4.403800,259400.000000 +-118.360000,33.980000,39.000000,813.000000,185.000000,344.000000,154.000000,3.583300,218800.000000 +-118.360000,33.980000,29.000000,2861.000000,816.000000,1715.000000,775.000000,2.771200,160900.000000 +-118.360000,33.970000,19.000000,4651.000000,1281.000000,2917.000000,1121.000000,2.682300,142500.000000 +-118.360000,33.970000,18.000000,1284.000000,283.000000,990.000000,289.000000,4.017900,195800.000000 +-118.360000,33.960000,37.000000,2146.000000,573.000000,2009.000000,592.000000,3.658300,177300.000000 +-118.360000,33.960000,25.000000,1849.000000,518.000000,1498.000000,451.000000,2.837800,170000.000000 +-118.360000,33.960000,21.000000,1802.000000,556.000000,1286.000000,557.000000,2.728400,146900.000000 +-118.360000,33.960000,17.000000,3431.000000,934.000000,2365.000000,810.000000,3.039300,129200.000000 +-118.360000,33.950000,42.000000,2532.000000,627.000000,2038.000000,591.000000,2.875000,177500.000000 +-118.360000,33.950000,42.000000,1139.000000,302.000000,1283.000000,306.000000,4.163500,163900.000000 +-118.360000,33.950000,42.000000,1116.000000,303.000000,1082.000000,299.000000,3.723700,170800.000000 +-118.360000,33.950000,26.000000,3231.000000,1089.000000,3193.000000,1020.000000,2.653500,177200.000000 +-118.360000,33.940000,39.000000,1390.000000,410.000000,1666.000000,371.000000,3.305600,156800.000000 +-118.360000,33.940000,38.000000,2169.000000,688.000000,3036.000000,639.000000,2.312500,148500.000000 +-118.360000,33.940000,33.000000,939.000000,284.000000,1309.000000,250.000000,3.406300,152300.000000 +-118.360000,33.930000,44.000000,520.000000,116.000000,392.000000,106.000000,3.013200,202500.000000 +-118.360000,33.930000,30.000000,1132.000000,347.000000,1433.000000,341.000000,2.680000,170000.000000 +-118.360000,33.930000,27.000000,4445.000000,1231.000000,3340.000000,1113.000000,3.165600,204500.000000 +-118.360000,33.920000,46.000000,1231.000000,231.000000,793.000000,256.000000,4.102300,226800.000000 +-118.360000,33.920000,26.000000,3695.000000,1144.000000,2308.000000,1009.000000,2.666700,229300.000000 +-118.360000,33.920000,19.000000,2807.000000,883.000000,1546.000000,815.000000,2.637500,233800.000000 +-118.360000,33.910000,42.000000,1949.000000,422.000000,1184.000000,423.000000,4.333300,225600.000000 +-118.360000,33.910000,41.000000,2048.000000,439.000000,1191.000000,429.000000,3.800000,222500.000000 +-118.360000,33.910000,36.000000,2064.000000,474.000000,1366.000000,421.000000,4.100000,243100.000000 +-118.360000,33.900000,42.000000,1935.000000,388.000000,1136.000000,379.000000,4.740000,230000.000000 +-118.360000,33.900000,41.000000,1355.000000,349.000000,655.000000,329.000000,2.955100,205000.000000 +-118.360000,33.900000,39.000000,1166.000000,222.000000,640.000000,206.000000,3.531300,230400.000000 +-118.360000,33.900000,18.000000,3380.000000,922.000000,2276.000000,854.000000,4.072700,214000.000000 +-118.360000,33.890000,40.000000,756.000000,122.000000,371.000000,130.000000,5.029900,329200.000000 +-118.360000,33.890000,37.000000,1719.000000,426.000000,1266.000000,424.000000,3.375000,228000.000000 +-118.360000,33.890000,34.000000,760.000000,174.000000,723.000000,198.000000,5.316900,227600.000000 +-118.360000,33.880000,44.000000,1362.000000,237.000000,709.000000,247.000000,4.427100,336200.000000 +-118.360000,33.880000,33.000000,2408.000000,534.000000,1644.000000,523.000000,4.245400,236800.000000 +-118.360000,33.880000,31.000000,2518.000000,543.000000,1107.000000,508.000000,4.740400,295800.000000 +-118.360000,33.880000,28.000000,1313.000000,319.000000,827.000000,308.000000,2.650000,260800.000000 +-118.360000,33.880000,26.000000,1375.000000,286.000000,829.000000,278.000000,3.984400,230700.000000 +-118.360000,33.880000,25.000000,2845.000000,710.000000,1611.000000,628.000000,3.204900,267400.000000 +-118.360000,33.870000,22.000000,2114.000000,541.000000,1300.000000,538.000000,3.420800,290000.000000 +-118.360000,33.870000,19.000000,2512.000000,575.000000,1275.000000,544.000000,4.937500,293000.000000 +-118.360000,33.870000,17.000000,1082.000000,291.000000,598.000000,281.000000,3.986800,281900.000000 +-118.360000,33.860000,37.000000,1768.000000,314.000000,802.000000,290.000000,5.078400,295900.000000 +-118.360000,33.860000,37.000000,1249.000000,218.000000,583.000000,214.000000,5.742200,330700.000000 +-118.360000,33.860000,36.000000,681.000000,122.000000,360.000000,128.000000,5.279900,332600.000000 +-118.360000,33.860000,35.000000,2126.000000,434.000000,1044.000000,433.000000,5.545600,297400.000000 +-118.360000,33.860000,34.000000,1865.000000,345.000000,963.000000,302.000000,5.543000,305900.000000 +-118.360000,33.850000,36.000000,1390.000000,230.000000,683.000000,219.000000,4.890600,334400.000000 +-118.360000,33.840000,35.000000,1577.000000,279.000000,743.000000,274.000000,5.765400,343000.000000 +-118.360000,33.840000,22.000000,11016.000000,3170.000000,6664.000000,2838.000000,3.703000,361300.000000 +-118.360000,33.830000,36.000000,1660.000000,300.000000,943.000000,300.000000,5.198400,353600.000000 +-118.360000,33.830000,35.000000,2828.000000,487.000000,1439.000000,490.000000,5.601300,350200.000000 +-118.360000,33.830000,35.000000,1378.000000,247.000000,645.000000,217.000000,5.914300,343400.000000 +-118.360000,33.820000,36.000000,1784.000000,311.000000,901.000000,293.000000,6.224700,339000.000000 +-118.360000,33.820000,36.000000,1083.000000,187.000000,522.000000,187.000000,5.776500,339500.000000 +-118.360000,33.810000,34.000000,2211.000000,502.000000,1113.000000,488.000000,4.702600,356800.000000 +-118.360000,33.810000,25.000000,9042.000000,2022.000000,4458.000000,1944.000000,4.559200,378800.000000 +-118.360000,33.800000,34.000000,2629.000000,369.000000,966.000000,375.000000,10.124100,500001.000000 +-118.360000,33.790000,34.000000,5166.000000,704.000000,2071.000000,668.000000,8.360900,500001.000000 +-118.370000,36.190000,10.000000,443.000000,111.000000,48.000000,21.000000,3.125000,71300.000000 +-118.370000,34.430000,11.000000,17339.000000,2866.000000,8721.000000,2803.000000,5.950700,225200.000000 +-118.370000,34.240000,40.000000,1283.000000,246.000000,594.000000,236.000000,4.112100,229200.000000 +-118.370000,34.230000,32.000000,1444.000000,317.000000,1177.000000,311.000000,3.600000,164600.000000 +-118.370000,34.220000,17.000000,1787.000000,463.000000,1671.000000,448.000000,3.552100,151500.000000 +-118.370000,34.220000,11.000000,2127.000000,581.000000,1989.000000,530.000000,2.902800,174100.000000 +-118.370000,34.210000,36.000000,2080.000000,455.000000,1939.000000,484.000000,4.287500,176600.000000 +-118.370000,34.210000,36.000000,1392.000000,326.000000,1181.000000,303.000000,3.156300,176400.000000 +-118.370000,34.210000,34.000000,2272.000000,558.000000,2164.000000,484.000000,3.714300,175700.000000 +-118.370000,34.200000,33.000000,1438.000000,309.000000,1378.000000,306.000000,2.891700,170400.000000 +-118.370000,34.190000,19.000000,2890.000000,821.000000,2203.000000,705.000000,2.669600,185100.000000 +-118.370000,34.180000,42.000000,1140.000000,300.000000,643.000000,252.000000,3.395800,178400.000000 +-118.370000,34.180000,36.000000,1608.000000,373.000000,1217.000000,374.000000,2.972800,190200.000000 +-118.370000,34.180000,35.000000,2949.000000,794.000000,2106.000000,746.000000,2.922800,177300.000000 +-118.370000,34.180000,33.000000,1829.000000,512.000000,1345.000000,500.000000,3.162900,198900.000000 +-118.370000,34.170000,6.000000,854.000000,350.000000,542.000000,321.000000,0.819800,325000.000000 +-118.370000,34.170000,42.000000,600.000000,171.000000,377.000000,181.000000,2.410700,184400.000000 +-118.370000,34.170000,42.000000,1713.000000,416.000000,1349.000000,427.000000,3.259600,191800.000000 +-118.370000,34.170000,15.000000,3327.000000,1011.000000,2683.000000,857.000000,2.378400,185400.000000 +-118.370000,34.170000,10.000000,1431.000000,473.000000,1438.000000,429.000000,2.275600,221400.000000 +-118.370000,34.160000,6.000000,6526.000000,2007.000000,3298.000000,1790.000000,2.723100,250000.000000 +-118.370000,34.160000,25.000000,2450.000000,618.000000,1054.000000,578.000000,3.637500,262500.000000 +-118.370000,34.160000,17.000000,4150.000000,1148.000000,1808.000000,1041.000000,3.505100,232400.000000 +-118.370000,34.160000,10.000000,2606.000000,748.000000,1373.000000,680.000000,3.612800,225000.000000 +-118.370000,34.150000,23.000000,4604.000000,1319.000000,2391.000000,1227.000000,3.137300,263100.000000 +-118.370000,34.140000,8.000000,4382.000000,1560.000000,2138.000000,1411.000000,3.571400,197900.000000 +-118.370000,34.140000,23.000000,1883.000000,512.000000,774.000000,478.000000,3.509600,396400.000000 +-118.370000,34.140000,21.000000,4670.000000,1161.000000,1914.000000,1094.000000,3.798600,367700.000000 +-118.370000,34.130000,28.000000,4287.000000,627.000000,1498.000000,615.000000,8.567700,500001.000000 +-118.370000,34.120000,34.000000,2821.000000,399.000000,843.000000,391.000000,11.615000,500001.000000 +-118.370000,34.110000,42.000000,5518.000000,979.000000,1863.000000,957.000000,8.584200,500001.000000 +-118.370000,34.090000,38.000000,4408.000000,1295.000000,1690.000000,1229.000000,3.015600,300000.000000 +-118.370000,34.090000,38.000000,1349.000000,344.000000,547.000000,309.000000,3.215900,383300.000000 +-118.370000,34.090000,33.000000,3180.000000,865.000000,1347.000000,841.000000,4.065100,500001.000000 +-118.370000,34.090000,31.000000,6348.000000,1827.000000,2559.000000,1755.000000,3.281800,225000.000000 +-118.370000,34.090000,31.000000,2697.000000,706.000000,1059.000000,689.000000,2.894200,500001.000000 +-118.370000,34.090000,24.000000,630.000000,172.000000,257.000000,147.000000,5.522400,400000.000000 +-118.370000,34.090000,22.000000,4247.000000,1253.000000,1766.000000,1170.000000,3.151700,341700.000000 +-118.370000,34.080000,52.000000,1466.000000,254.000000,600.000000,253.000000,5.752400,393600.000000 +-118.370000,34.080000,52.000000,1234.000000,223.000000,543.000000,213.000000,6.033800,423700.000000 +-118.370000,34.080000,28.000000,4376.000000,1202.000000,1847.000000,1128.000000,2.671300,312500.000000 +-118.370000,34.080000,22.000000,3008.000000,938.000000,1224.000000,816.000000,3.214900,300000.000000 +-118.370000,34.070000,52.000000,2203.000000,437.000000,899.000000,384.000000,4.250000,486900.000000 +-118.370000,34.070000,52.000000,2195.000000,435.000000,884.000000,432.000000,5.240000,486400.000000 +-118.370000,34.070000,52.000000,1084.000000,247.000000,468.000000,255.000000,3.428600,474300.000000 +-118.370000,34.070000,50.000000,2519.000000,537.000000,1117.000000,516.000000,4.366700,405600.000000 +-118.370000,34.070000,44.000000,2703.000000,663.000000,1045.000000,619.000000,3.201000,287500.000000 +-118.370000,34.070000,39.000000,2309.000000,526.000000,870.000000,546.000000,3.167700,453400.000000 +-118.370000,34.060000,52.000000,843.000000,160.000000,333.000000,151.000000,4.519200,446000.000000 +-118.370000,34.060000,52.000000,2402.000000,506.000000,878.000000,464.000000,4.021700,500001.000000 +-118.370000,34.060000,52.000000,1608.000000,289.000000,630.000000,252.000000,5.559600,500001.000000 +-118.370000,34.050000,52.000000,2346.000000,437.000000,1121.000000,400.000000,4.058300,444300.000000 +-118.370000,34.050000,48.000000,1266.000000,234.000000,539.000000,222.000000,4.005000,275000.000000 +-118.370000,34.050000,41.000000,2369.000000,544.000000,1252.000000,522.000000,2.988300,296100.000000 +-118.370000,34.050000,35.000000,2457.000000,552.000000,1159.000000,523.000000,3.086200,345300.000000 +-118.370000,34.040000,52.000000,1197.000000,231.000000,671.000000,219.000000,3.825000,278500.000000 +-118.370000,34.040000,43.000000,888.000000,170.000000,514.000000,161.000000,3.182700,202800.000000 +-118.370000,34.040000,43.000000,1465.000000,278.000000,727.000000,290.000000,4.078100,289400.000000 +-118.370000,34.040000,42.000000,1809.000000,424.000000,1094.000000,382.000000,2.767000,143000.000000 +-118.370000,34.040000,25.000000,542.000000,161.000000,442.000000,131.000000,2.250000,333300.000000 +-118.370000,34.030000,43.000000,1350.000000,287.000000,811.000000,307.000000,3.363600,140900.000000 +-118.370000,34.030000,41.000000,1425.000000,285.000000,838.000000,296.000000,3.973200,188400.000000 +-118.370000,34.030000,39.000000,213.000000,44.000000,138.000000,52.000000,2.125000,196400.000000 +-118.370000,34.030000,37.000000,1236.000000,309.000000,966.000000,292.000000,3.069400,122200.000000 +-118.370000,34.020000,44.000000,1944.000000,458.000000,981.000000,377.000000,2.615400,193200.000000 +-118.370000,34.020000,33.000000,2263.000000,430.000000,900.000000,382.000000,4.402800,246800.000000 +-118.370000,33.990000,36.000000,3228.000000,543.000000,1305.000000,520.000000,5.169500,397000.000000 +-118.370000,33.990000,32.000000,4018.000000,564.000000,1400.000000,568.000000,8.671800,439100.000000 +-118.370000,33.980000,39.000000,303.000000,69.000000,131.000000,73.000000,4.343800,331800.000000 +-118.370000,33.970000,41.000000,1833.000000,355.000000,847.000000,348.000000,5.726000,287800.000000 +-118.370000,33.970000,32.000000,6699.000000,1781.000000,2725.000000,1544.000000,3.388900,285700.000000 +-118.370000,33.970000,26.000000,6672.000000,1729.000000,3333.000000,1557.000000,2.964600,179800.000000 +-118.370000,33.970000,21.000000,3616.000000,1060.000000,2515.000000,945.000000,2.746400,153100.000000 +-118.370000,33.960000,26.000000,138.000000,23.000000,100.000000,20.000000,4.875000,175000.000000 +-118.370000,33.950000,35.000000,924.000000,349.000000,1376.000000,358.000000,2.229700,262500.000000 +-118.370000,33.950000,32.000000,1067.000000,286.000000,1053.000000,277.000000,2.843800,181700.000000 +-118.370000,33.940000,29.000000,2265.000000,813.000000,3425.000000,781.000000,2.367500,149400.000000 +-118.370000,33.930000,46.000000,1130.000000,201.000000,503.000000,196.000000,4.486100,246300.000000 +-118.370000,33.930000,10.000000,199.000000,41.000000,61.000000,56.000000,2.895800,245800.000000 +-118.370000,33.920000,44.000000,938.000000,181.000000,502.000000,171.000000,4.472200,218300.000000 +-118.370000,33.920000,40.000000,928.000000,187.000000,521.000000,185.000000,5.525500,242700.000000 +-118.370000,33.920000,39.000000,1073.000000,206.000000,556.000000,204.000000,4.861100,245600.000000 +-118.370000,33.920000,36.000000,1075.000000,197.000000,509.000000,197.000000,4.968800,238900.000000 +-118.370000,33.910000,41.000000,1869.000000,427.000000,1334.000000,435.000000,3.935500,227800.000000 +-118.370000,33.900000,35.000000,1651.000000,269.000000,707.000000,252.000000,5.648200,294800.000000 +-118.370000,33.900000,32.000000,332.000000,103.000000,177.000000,102.000000,3.340900,256300.000000 +-118.370000,33.890000,21.000000,2696.000000,548.000000,1142.000000,473.000000,5.609100,356800.000000 +-118.370000,33.880000,44.000000,1325.000000,245.000000,669.000000,253.000000,4.421100,324000.000000 +-118.370000,33.880000,27.000000,1688.000000,331.000000,811.000000,327.000000,4.535700,334200.000000 +-118.370000,33.880000,26.000000,2620.000000,530.000000,1282.000000,525.000000,4.482800,340700.000000 +-118.370000,33.880000,21.000000,966.000000,172.000000,417.000000,158.000000,5.533500,342600.000000 +-118.370000,33.870000,23.000000,1829.000000,331.000000,891.000000,356.000000,6.575500,359900.000000 +-118.370000,33.870000,19.000000,757.000000,148.000000,361.000000,141.000000,6.020000,304200.000000 +-118.370000,33.870000,18.000000,2516.000000,485.000000,1128.000000,433.000000,5.011400,338600.000000 +-118.370000,33.870000,13.000000,2907.000000,726.000000,1573.000000,694.000000,3.504800,294000.000000 +-118.370000,33.860000,28.000000,2685.000000,581.000000,1243.000000,529.000000,4.119000,324000.000000 +-118.370000,33.850000,34.000000,2415.000000,404.000000,1278.000000,414.000000,6.159900,341200.000000 +-118.370000,33.850000,29.000000,3662.000000,586.000000,1626.000000,611.000000,6.397400,410000.000000 +-118.370000,33.850000,25.000000,5622.000000,998.000000,2537.000000,1009.000000,5.785000,395300.000000 +-118.370000,33.840000,32.000000,1751.000000,328.000000,819.000000,323.000000,6.710500,339000.000000 +-118.370000,33.840000,27.000000,3245.000000,605.000000,1572.000000,556.000000,5.377300,379000.000000 +-118.370000,33.830000,35.000000,1207.000000,207.000000,601.000000,213.000000,4.730800,353400.000000 +-118.370000,33.820000,39.000000,2794.000000,444.000000,1319.000000,441.000000,5.878000,387800.000000 +-118.370000,33.820000,36.000000,2463.000000,447.000000,1125.000000,424.000000,6.017600,352700.000000 +-118.370000,33.820000,36.000000,2416.000000,394.000000,1115.000000,386.000000,6.256000,366900.000000 +-118.370000,33.820000,32.000000,2815.000000,607.000000,1338.000000,609.000000,4.568700,381200.000000 +-118.370000,33.810000,36.000000,2031.000000,339.000000,817.000000,337.000000,5.127100,458300.000000 +-118.370000,33.810000,36.000000,1283.000000,209.000000,563.000000,209.000000,6.929600,500001.000000 +-118.370000,33.810000,33.000000,5057.000000,790.000000,2021.000000,748.000000,6.855300,482200.000000 +-118.370000,33.790000,36.000000,1596.000000,234.000000,654.000000,223.000000,8.206400,500001.000000 +-118.370000,33.770000,26.000000,6339.000000,876.000000,2540.000000,880.000000,10.144700,500001.000000 +-118.380000,34.300000,39.000000,1622.000000,355.000000,903.000000,314.000000,4.112500,183000.000000 +-118.380000,34.220000,20.000000,1176.000000,344.000000,864.000000,318.000000,2.375000,177700.000000 +-118.380000,34.210000,42.000000,715.000000,145.000000,730.000000,158.000000,3.800000,169500.000000 +-118.380000,34.210000,38.000000,1363.000000,395.000000,1798.000000,405.000000,2.318200,171200.000000 +-118.380000,34.210000,35.000000,1468.000000,303.000000,1295.000000,300.000000,3.770800,170600.000000 +-118.380000,34.210000,33.000000,1981.000000,484.000000,1665.000000,466.000000,3.083300,179100.000000 +-118.380000,34.200000,32.000000,993.000000,285.000000,1044.000000,248.000000,2.430600,187500.000000 +-118.380000,34.200000,23.000000,4138.000000,1171.000000,3911.000000,1068.000000,3.012500,181700.000000 +-118.380000,34.190000,42.000000,1308.000000,289.000000,950.000000,302.000000,2.737900,181500.000000 +-118.380000,34.190000,37.000000,1434.000000,394.000000,1667.000000,404.000000,2.437500,176300.000000 +-118.380000,34.190000,30.000000,977.000000,264.000000,736.000000,258.000000,1.986600,177400.000000 +-118.380000,34.180000,44.000000,901.000000,179.000000,473.000000,179.000000,3.312500,186400.000000 +-118.380000,34.180000,40.000000,2079.000000,568.000000,1396.000000,526.000000,3.006100,190800.000000 +-118.380000,34.180000,27.000000,4834.000000,1527.000000,3847.000000,1432.000000,2.144900,165300.000000 +-118.380000,34.180000,24.000000,1983.000000,651.000000,2251.000000,574.000000,2.479200,200000.000000 +-118.380000,34.170000,33.000000,1588.000000,454.000000,739.000000,392.000000,2.820800,238500.000000 +-118.380000,34.160000,46.000000,2609.000000,593.000000,1055.000000,585.000000,3.317700,309400.000000 +-118.380000,34.160000,42.000000,2358.000000,546.000000,1065.000000,523.000000,3.128900,320600.000000 +-118.380000,34.160000,31.000000,2197.000000,501.000000,944.000000,474.000000,3.731200,319400.000000 +-118.380000,34.150000,36.000000,2933.000000,619.000000,1115.000000,579.000000,4.303600,365900.000000 +-118.380000,34.140000,42.000000,1253.000000,225.000000,492.000000,224.000000,7.711200,386700.000000 +-118.380000,34.110000,38.000000,2601.000000,523.000000,870.000000,474.000000,7.113400,416700.000000 +-118.380000,34.100000,39.000000,3798.000000,586.000000,975.000000,525.000000,9.309200,500001.000000 +-118.380000,34.090000,24.000000,8264.000000,2437.000000,3148.000000,2274.000000,3.565900,281300.000000 +-118.380000,34.080000,48.000000,1226.000000,288.000000,370.000000,264.000000,3.937500,450000.000000 +-118.380000,34.080000,30.000000,4524.000000,1312.000000,1910.000000,1243.000000,2.888900,335300.000000 +-118.380000,34.080000,25.000000,4625.000000,1307.000000,1739.000000,1191.000000,3.398900,485000.000000 +-118.380000,34.070000,48.000000,2799.000000,596.000000,1235.000000,561.000000,4.489600,500001.000000 +-118.380000,34.070000,21.000000,3653.000000,956.000000,1510.000000,890.000000,3.557300,500001.000000 +-118.380000,34.070000,16.000000,4814.000000,1381.000000,1897.000000,1209.000000,3.372500,375000.000000 +-118.380000,34.060000,52.000000,1311.000000,217.000000,578.000000,205.000000,7.677100,500001.000000 +-118.380000,34.060000,50.000000,1509.000000,291.000000,690.000000,259.000000,6.234400,500001.000000 +-118.380000,34.060000,31.000000,4345.000000,1158.000000,1987.000000,1070.000000,2.823300,310000.000000 +-118.380000,34.060000,28.000000,2522.000000,616.000000,991.000000,574.000000,3.147500,362500.000000 +-118.380000,34.060000,25.000000,2558.000000,661.000000,1183.000000,636.000000,3.555600,500000.000000 +-118.380000,34.050000,52.000000,2053.000000,480.000000,900.000000,417.000000,3.070700,417900.000000 +-118.380000,34.050000,52.000000,1241.000000,210.000000,526.000000,214.000000,4.419100,334100.000000 +-118.380000,34.050000,52.000000,1004.000000,231.000000,590.000000,226.000000,4.240400,351000.000000 +-118.380000,34.050000,40.000000,2352.000000,598.000000,1133.000000,563.000000,3.238000,287500.000000 +-118.380000,34.040000,45.000000,767.000000,130.000000,254.000000,118.000000,6.289500,340400.000000 +-118.380000,34.040000,39.000000,2614.000000,569.000000,1665.000000,553.000000,3.406300,271600.000000 +-118.380000,34.040000,35.000000,2237.000000,592.000000,1794.000000,543.000000,2.296100,207700.000000 +-118.380000,34.040000,31.000000,2846.000000,727.000000,2120.000000,672.000000,2.722600,254200.000000 +-118.380000,34.030000,44.000000,1913.000000,441.000000,1295.000000,432.000000,3.953700,266400.000000 +-118.380000,34.030000,43.000000,912.000000,255.000000,705.000000,246.000000,2.640200,185700.000000 +-118.380000,34.030000,36.000000,2101.000000,569.000000,1756.000000,527.000000,2.934400,222100.000000 +-118.380000,34.020000,45.000000,2098.000000,486.000000,1343.000000,481.000000,3.961500,268600.000000 +-118.380000,34.020000,31.000000,1893.000000,450.000000,819.000000,426.000000,4.307700,140600.000000 +-118.380000,34.010000,18.000000,9528.000000,2075.000000,3922.000000,1920.000000,4.761200,304100.000000 +-118.380000,33.990000,21.000000,11308.000000,3039.000000,5127.000000,2839.000000,4.627700,228300.000000 +-118.380000,33.970000,42.000000,1859.000000,337.000000,813.000000,326.000000,6.019000,294500.000000 +-118.380000,33.960000,44.000000,2395.000000,458.000000,1287.000000,450.000000,4.692300,299000.000000 +-118.380000,33.950000,35.000000,3259.000000,967.000000,2003.000000,920.000000,3.270800,400000.000000 +-118.380000,33.950000,29.000000,1821.000000,588.000000,1397.000000,523.000000,2.583300,187500.000000 +-118.380000,33.880000,39.000000,1489.000000,282.000000,743.000000,270.000000,4.861100,456100.000000 +-118.380000,33.880000,36.000000,2501.000000,443.000000,1031.000000,422.000000,4.750000,442100.000000 +-118.380000,33.880000,34.000000,1830.000000,315.000000,822.000000,307.000000,5.060200,453700.000000 +-118.380000,33.880000,33.000000,1313.000000,244.000000,561.000000,217.000000,5.299900,359400.000000 +-118.380000,33.880000,27.000000,3039.000000,606.000000,1421.000000,564.000000,5.577100,345500.000000 +-118.380000,33.870000,33.000000,1993.000000,371.000000,918.000000,361.000000,6.902100,337600.000000 +-118.380000,33.870000,27.000000,2287.000000,491.000000,1101.000000,466.000000,4.675000,316900.000000 +-118.380000,33.870000,23.000000,2387.000000,418.000000,1008.000000,415.000000,5.851800,337900.000000 +-118.380000,33.870000,21.000000,4151.000000,1018.000000,2054.000000,925.000000,4.982100,292900.000000 +-118.380000,33.870000,17.000000,2791.000000,579.000000,1467.000000,583.000000,5.741500,321900.000000 +-118.380000,33.860000,29.000000,2787.000000,475.000000,1182.000000,444.000000,6.761300,352700.000000 +-118.380000,33.860000,15.000000,1778.000000,311.000000,908.000000,330.000000,7.674000,329300.000000 +-118.380000,33.850000,31.000000,3533.000000,729.000000,1647.000000,679.000000,5.584300,384600.000000 +-118.380000,33.850000,28.000000,4430.000000,928.000000,2131.000000,885.000000,4.938400,378100.000000 +-118.380000,33.840000,25.000000,5775.000000,1149.000000,2637.000000,1117.000000,5.496800,379800.000000 +-118.380000,33.830000,40.000000,3070.000000,570.000000,1264.000000,506.000000,5.162600,432700.000000 +-118.380000,33.830000,35.000000,2152.000000,454.000000,902.000000,414.000000,4.517900,427200.000000 +-118.380000,33.830000,20.000000,2270.000000,498.000000,1070.000000,521.000000,4.461500,384800.000000 +-118.380000,33.820000,38.000000,1318.000000,237.000000,547.000000,225.000000,6.030800,416700.000000 +-118.380000,33.820000,34.000000,1822.000000,364.000000,750.000000,366.000000,5.990700,500001.000000 +-118.380000,33.810000,41.000000,1889.000000,301.000000,802.000000,278.000000,6.015000,488500.000000 +-118.380000,33.810000,39.000000,2400.000000,373.000000,877.000000,372.000000,5.736100,500001.000000 +-118.380000,33.810000,36.000000,1018.000000,148.000000,329.000000,169.000000,10.504500,500001.000000 +-118.380000,33.810000,33.000000,2349.000000,407.000000,954.000000,373.000000,6.495600,483600.000000 +-118.380000,33.810000,20.000000,1975.000000,306.000000,703.000000,292.000000,8.549100,410300.000000 +-118.380000,33.800000,36.000000,4421.000000,702.000000,1433.000000,624.000000,8.083800,500001.000000 +-118.380000,33.790000,32.000000,10445.000000,1620.000000,4474.000000,1576.000000,7.704200,500001.000000 +-118.380000,33.770000,21.000000,11353.000000,1537.000000,4649.000000,1504.000000,9.821000,500001.000000 +-118.380000,33.750000,23.000000,8277.000000,1290.000000,3176.000000,1159.000000,7.698600,500001.000000 +-118.390000,37.370000,25.000000,3295.000000,824.000000,1477.000000,770.000000,1.832500,105800.000000 +-118.390000,34.280000,24.000000,4694.000000,820.000000,3566.000000,777.000000,4.481800,166200.000000 +-118.390000,34.220000,40.000000,712.000000,149.000000,533.000000,155.000000,3.695000,165200.000000 +-118.390000,34.220000,35.000000,1790.000000,334.000000,1277.000000,345.000000,5.081800,186800.000000 +-118.390000,34.210000,32.000000,1869.000000,441.000000,1516.000000,432.000000,3.684500,178500.000000 +-118.390000,34.210000,14.000000,2807.000000,868.000000,2729.000000,803.000000,2.666700,172400.000000 +-118.390000,34.200000,19.000000,2012.000000,732.000000,3483.000000,731.000000,2.223400,181300.000000 +-118.390000,34.200000,17.000000,2594.000000,1028.000000,3950.000000,973.000000,2.034800,177200.000000 +-118.390000,34.190000,41.000000,2000.000000,485.000000,1439.000000,461.000000,3.049100,192000.000000 +-118.390000,34.190000,25.000000,3794.000000,989.000000,2454.000000,876.000000,2.998200,204200.000000 +-118.390000,34.190000,23.000000,1875.000000,710.000000,2555.000000,657.000000,2.096800,162500.000000 +-118.390000,34.180000,44.000000,477.000000,91.000000,220.000000,112.000000,3.390600,223800.000000 +-118.390000,34.180000,42.000000,1957.000000,389.000000,985.000000,414.000000,2.932700,240200.000000 +-118.390000,34.170000,40.000000,1696.000000,372.000000,835.000000,385.000000,3.656300,222400.000000 +-118.390000,34.170000,28.000000,2790.000000,748.000000,1351.000000,697.000000,3.205200,283600.000000 +-118.390000,34.170000,26.000000,6429.000000,1611.000000,2806.000000,1491.000000,3.192900,265200.000000 +-118.390000,34.170000,26.000000,3345.000000,818.000000,1599.000000,773.000000,3.351600,241500.000000 +-118.390000,34.160000,46.000000,1582.000000,279.000000,603.000000,283.000000,5.116900,414300.000000 +-118.390000,34.160000,37.000000,1388.000000,286.000000,547.000000,258.000000,5.158400,444700.000000 +-118.390000,34.160000,20.000000,4084.000000,1062.000000,1637.000000,987.000000,3.238800,256300.000000 +-118.390000,34.150000,36.000000,2696.000000,713.000000,905.000000,659.000000,3.114600,373500.000000 +-118.390000,34.150000,29.000000,917.000000,181.000000,379.000000,183.000000,3.461200,425000.000000 +-118.390000,34.150000,29.000000,3110.000000,650.000000,1212.000000,642.000000,4.203100,394400.000000 +-118.390000,34.140000,34.000000,4624.000000,781.000000,1572.000000,719.000000,6.553300,500001.000000 +-118.390000,34.140000,19.000000,5076.000000,1034.000000,2021.000000,960.000000,5.568300,309200.000000 +-118.390000,34.090000,41.000000,730.000000,126.000000,230.000000,125.000000,4.321400,500001.000000 +-118.390000,34.090000,28.000000,2347.000000,608.000000,785.000000,548.000000,4.416700,425000.000000 +-118.390000,34.090000,27.000000,4312.000000,1214.000000,1634.000000,1097.000000,3.620700,362500.000000 +-118.390000,34.080000,52.000000,3759.000000,464.000000,1407.000000,422.000000,15.000100,500001.000000 +-118.390000,34.080000,52.000000,1244.000000,304.000000,444.000000,282.000000,3.511400,430800.000000 +-118.390000,34.080000,28.000000,833.000000,230.000000,349.000000,210.000000,3.067000,375000.000000 +-118.390000,34.070000,45.000000,3143.000000,553.000000,1153.000000,564.000000,5.776200,500001.000000 +-118.390000,34.060000,52.000000,1213.000000,194.000000,503.000000,194.000000,8.009500,500001.000000 +-118.390000,34.060000,39.000000,3299.000000,831.000000,1649.000000,759.000000,3.329500,500001.000000 +-118.390000,34.060000,37.000000,2975.000000,705.000000,1291.000000,654.000000,5.331600,500001.000000 +-118.390000,34.050000,47.000000,1621.000000,314.000000,724.000000,311.000000,5.750900,474100.000000 +-118.390000,34.050000,42.000000,3105.000000,559.000000,1253.000000,531.000000,5.222000,500001.000000 +-118.390000,34.050000,42.000000,1467.000000,203.000000,577.000000,204.000000,6.636800,500001.000000 +-118.390000,34.050000,25.000000,2814.000000,701.000000,1139.000000,658.000000,4.015300,460000.000000 +-118.390000,34.040000,52.000000,1492.000000,277.000000,666.000000,289.000000,4.738600,340400.000000 +-118.390000,34.040000,49.000000,1230.000000,279.000000,669.000000,269.000000,3.903800,308300.000000 +-118.390000,34.040000,45.000000,2089.000000,312.000000,834.000000,305.000000,7.302800,500001.000000 +-118.390000,34.040000,44.000000,1873.000000,286.000000,635.000000,283.000000,5.595100,461300.000000 +-118.390000,34.040000,41.000000,101.000000,23.000000,85.000000,30.000000,4.125000,237500.000000 +-118.390000,34.030000,39.000000,1366.000000,375.000000,1237.000000,370.000000,3.720600,230900.000000 +-118.390000,34.030000,28.000000,1722.000000,536.000000,1161.000000,481.000000,3.222800,232500.000000 +-118.390000,34.030000,19.000000,1450.000000,509.000000,746.000000,437.000000,3.141500,55000.000000 +-118.390000,34.020000,45.000000,1577.000000,421.000000,1042.000000,375.000000,3.437500,314500.000000 +-118.390000,34.020000,38.000000,2521.000000,647.000000,1091.000000,597.000000,4.129600,322900.000000 +-118.390000,34.020000,38.000000,2447.000000,636.000000,1312.000000,574.000000,3.590900,279400.000000 +-118.390000,34.010000,35.000000,4424.000000,918.000000,2101.000000,888.000000,3.968800,355100.000000 +-118.390000,34.010000,25.000000,1101.000000,285.000000,543.000000,294.000000,2.357100,340600.000000 +-118.390000,34.000000,40.000000,1565.000000,269.000000,826.000000,268.000000,5.203500,485700.000000 +-118.390000,34.000000,35.000000,1465.000000,386.000000,1104.000000,345.000000,4.056000,339100.000000 +-118.390000,33.990000,43.000000,612.000000,135.000000,402.000000,142.000000,5.132200,314900.000000 +-118.390000,33.970000,44.000000,1097.000000,186.000000,513.000000,185.000000,6.235000,361400.000000 +-118.390000,33.970000,43.000000,2700.000000,510.000000,1146.000000,506.000000,5.133300,345900.000000 +-118.390000,33.970000,38.000000,993.000000,175.000000,374.000000,180.000000,6.267300,357200.000000 +-118.390000,33.960000,45.000000,1361.000000,252.000000,581.000000,263.000000,5.814300,340700.000000 +-118.390000,33.920000,41.000000,80.000000,20.000000,61.000000,23.000000,5.250000,247200.000000 +-118.390000,33.900000,7.000000,4314.000000,725.000000,1699.000000,718.000000,8.203700,500001.000000 +-118.390000,33.890000,40.000000,826.000000,143.000000,389.000000,147.000000,7.184500,438100.000000 +-118.390000,33.890000,38.000000,1851.000000,332.000000,750.000000,314.000000,7.335600,422700.000000 +-118.390000,33.890000,30.000000,2532.000000,464.000000,1056.000000,419.000000,6.343400,460400.000000 +-118.390000,33.880000,35.000000,1267.000000,216.000000,521.000000,191.000000,6.044100,470000.000000 +-118.390000,33.880000,34.000000,1973.000000,367.000000,843.000000,345.000000,6.077000,472700.000000 +-118.390000,33.880000,33.000000,2496.000000,387.000000,1098.000000,404.000000,7.668500,474300.000000 +-118.390000,33.880000,31.000000,1448.000000,244.000000,607.000000,259.000000,8.151300,500001.000000 +-118.390000,33.870000,34.000000,2395.000000,469.000000,1087.000000,438.000000,5.968300,394600.000000 +-118.390000,33.870000,19.000000,3303.000000,584.000000,1329.000000,569.000000,7.521000,340400.000000 +-118.390000,33.860000,34.000000,2361.000000,442.000000,915.000000,437.000000,5.687000,392400.000000 +-118.390000,33.860000,28.000000,3619.000000,764.000000,1735.000000,789.000000,6.140400,368400.000000 +-118.390000,33.860000,24.000000,2386.000000,582.000000,1152.000000,568.000000,4.897100,400700.000000 +-118.390000,33.850000,24.000000,4373.000000,871.000000,1830.000000,824.000000,5.712800,366200.000000 +-118.390000,33.850000,17.000000,1610.000000,379.000000,670.000000,341.000000,4.359400,349000.000000 +-118.390000,33.830000,32.000000,2075.000000,539.000000,954.000000,519.000000,5.637000,500001.000000 +-118.390000,33.810000,35.000000,1008.000000,165.000000,391.000000,167.000000,3.777800,487500.000000 +-118.390000,33.710000,18.000000,1193.000000,233.000000,475.000000,228.000000,7.559400,500001.000000 +-118.400000,37.360000,34.000000,2465.000000,619.000000,1172.000000,575.000000,1.972200,116100.000000 +-118.400000,34.700000,10.000000,4122.000000,814.000000,2164.000000,710.000000,4.294100,151600.000000 +-118.400000,34.410000,22.000000,4443.000000,560.000000,1573.000000,496.000000,10.028500,500001.000000 +-118.400000,34.280000,16.000000,6573.000000,1480.000000,6161.000000,1473.000000,3.330400,154900.000000 +-118.400000,34.260000,13.000000,4379.000000,872.000000,2560.000000,853.000000,4.253800,154300.000000 +-118.400000,34.250000,13.000000,1872.000000,497.000000,1927.000000,432.000000,2.201900,134200.000000 +-118.400000,34.240000,35.000000,2552.000000,545.000000,1850.000000,503.000000,4.775000,179500.000000 +-118.400000,34.230000,37.000000,1404.000000,266.000000,889.000000,274.000000,4.004900,190000.000000 +-118.400000,34.230000,36.000000,1643.000000,349.000000,1414.000000,337.000000,4.118100,172700.000000 +-118.400000,34.220000,43.000000,1220.000000,222.000000,729.000000,230.000000,3.644200,186300.000000 +-118.400000,34.210000,45.000000,972.000000,181.000000,554.000000,187.000000,4.819400,181300.000000 +-118.400000,34.210000,30.000000,2453.000000,544.000000,1753.000000,506.000000,2.980300,191500.000000 +-118.400000,34.200000,30.000000,2392.000000,655.000000,1987.000000,609.000000,2.842400,226400.000000 +-118.400000,34.200000,13.000000,4859.000000,1293.000000,3351.000000,1200.000000,3.687500,211900.000000 +-118.400000,34.190000,37.000000,934.000000,231.000000,587.000000,230.000000,3.625000,181300.000000 +-118.400000,34.190000,35.000000,2180.000000,599.000000,1483.000000,574.000000,3.039500,191300.000000 +-118.400000,34.190000,35.000000,1631.000000,356.000000,862.000000,368.000000,3.600700,261800.000000 +-118.400000,34.180000,32.000000,3724.000000,899.000000,1912.000000,791.000000,3.571100,312700.000000 +-118.400000,34.170000,27.000000,3588.000000,911.000000,1891.000000,871.000000,3.401300,286000.000000 +-118.400000,34.170000,24.000000,6347.000000,1548.000000,2945.000000,1492.000000,3.354500,221500.000000 +-118.400000,34.170000,24.000000,4443.000000,1283.000000,2421.000000,1180.000000,2.265200,269200.000000 +-118.400000,34.160000,45.000000,1176.000000,250.000000,471.000000,228.000000,2.333300,364700.000000 +-118.400000,34.160000,35.000000,1354.000000,284.000000,501.000000,262.000000,3.805600,384700.000000 +-118.400000,34.160000,34.000000,2638.000000,580.000000,1150.000000,551.000000,4.298900,364700.000000 +-118.400000,34.150000,44.000000,2515.000000,510.000000,967.000000,484.000000,5.075400,374500.000000 +-118.400000,34.150000,41.000000,2394.000000,500.000000,837.000000,417.000000,4.388900,380400.000000 +-118.400000,34.150000,31.000000,3881.000000,909.000000,1535.000000,846.000000,3.039800,369100.000000 +-118.400000,34.140000,52.000000,1695.000000,281.000000,595.000000,264.000000,6.067800,399300.000000 +-118.400000,34.140000,45.000000,417.000000,89.000000,187.000000,88.000000,5.137700,360700.000000 +-118.400000,34.130000,32.000000,8262.000000,1156.000000,2712.000000,1125.000000,10.557500,500001.000000 +-118.400000,34.110000,32.000000,5578.000000,753.000000,1567.000000,697.000000,15.000100,500001.000000 +-118.400000,34.100000,27.000000,3979.000000,510.000000,1351.000000,520.000000,15.000100,500001.000000 +-118.400000,34.090000,45.000000,2686.000000,283.000000,857.000000,259.000000,15.000100,500001.000000 +-118.400000,34.080000,52.000000,3815.000000,439.000000,1266.000000,413.000000,15.000100,500001.000000 +-118.400000,34.070000,22.000000,2170.000000,593.000000,850.000000,520.000000,2.910700,500001.000000 +-118.400000,34.060000,52.000000,2501.000000,362.000000,748.000000,349.000000,6.634300,500001.000000 +-118.400000,34.060000,52.000000,1871.000000,326.000000,646.000000,284.000000,8.296100,500001.000000 +-118.400000,34.060000,47.000000,3652.000000,967.000000,1438.000000,887.000000,3.696400,500001.000000 +-118.400000,34.060000,37.000000,3781.000000,873.000000,1725.000000,838.000000,4.145500,500001.000000 +-118.400000,34.050000,43.000000,1028.000000,145.000000,394.000000,149.000000,10.451900,500001.000000 +-118.400000,34.050000,34.000000,2113.000000,459.000000,859.000000,432.000000,3.695300,500001.000000 +-118.400000,34.050000,26.000000,4473.000000,923.000000,1518.000000,805.000000,5.076200,500001.000000 +-118.400000,34.040000,40.000000,2079.000000,268.000000,720.000000,282.000000,9.272000,500001.000000 +-118.400000,34.030000,36.000000,1831.000000,296.000000,871.000000,269.000000,8.148400,500001.000000 +-118.400000,34.030000,24.000000,1101.000000,318.000000,491.000000,287.000000,3.222200,319400.000000 +-118.400000,34.030000,13.000000,6152.000000,1978.000000,3397.000000,1845.000000,3.405800,275000.000000 +-118.400000,34.020000,27.000000,515.000000,201.000000,397.000000,228.000000,2.413500,184400.000000 +-118.400000,34.020000,19.000000,7297.000000,2331.000000,3870.000000,2144.000000,3.116000,300000.000000 +-118.400000,34.010000,48.000000,1427.000000,253.000000,693.000000,268.000000,5.740500,351600.000000 +-118.400000,34.010000,44.000000,1494.000000,262.000000,618.000000,266.000000,5.403500,356300.000000 +-118.400000,34.000000,44.000000,2122.000000,385.000000,1012.000000,367.000000,4.668700,344300.000000 +-118.400000,34.000000,37.000000,1534.000000,258.000000,751.000000,259.000000,5.444000,336000.000000 +-118.400000,34.000000,34.000000,1816.000000,335.000000,872.000000,339.000000,4.850000,329400.000000 +-118.400000,33.990000,36.000000,1280.000000,240.000000,704.000000,217.000000,5.963200,328100.000000 +-118.400000,33.990000,36.000000,1225.000000,213.000000,591.000000,227.000000,5.466300,326700.000000 +-118.400000,33.980000,39.000000,714.000000,118.000000,314.000000,117.000000,5.985600,432100.000000 +-118.400000,33.980000,36.000000,2526.000000,452.000000,996.000000,441.000000,5.611000,456600.000000 +-118.400000,33.970000,44.000000,2825.000000,453.000000,1221.000000,461.000000,5.954400,377200.000000 +-118.400000,33.970000,38.000000,1089.000000,174.000000,502.000000,180.000000,7.595300,434800.000000 +-118.400000,33.970000,37.000000,1364.000000,248.000000,494.000000,242.000000,4.636400,456300.000000 +-118.400000,33.970000,35.000000,913.000000,161.000000,451.000000,172.000000,5.616900,411200.000000 +-118.400000,33.960000,44.000000,1877.000000,314.000000,877.000000,320.000000,6.819700,363600.000000 +-118.400000,33.960000,43.000000,2416.000000,454.000000,1028.000000,409.000000,5.673200,330700.000000 +-118.400000,33.930000,35.000000,2217.000000,447.000000,1000.000000,450.000000,4.731900,376100.000000 +-118.400000,33.920000,32.000000,2828.000000,629.000000,1313.000000,534.000000,4.598700,363800.000000 +-118.400000,33.920000,25.000000,1453.000000,271.000000,695.000000,283.000000,5.949900,345800.000000 +-118.400000,33.900000,38.000000,2868.000000,466.000000,1098.000000,438.000000,7.905900,477100.000000 +-118.400000,33.900000,34.000000,2674.000000,435.000000,1087.000000,431.000000,7.315100,492200.000000 +-118.400000,33.890000,36.000000,2334.000000,430.000000,1033.000000,407.000000,6.632100,481500.000000 +-118.400000,33.890000,36.000000,2127.000000,314.000000,807.000000,306.000000,8.159600,500001.000000 +-118.400000,33.890000,31.000000,2926.000000,492.000000,1149.000000,476.000000,7.961100,500001.000000 +-118.400000,33.880000,42.000000,1516.000000,341.000000,634.000000,327.000000,6.235600,472700.000000 +-118.400000,33.880000,36.000000,3022.000000,482.000000,1278.000000,494.000000,7.265100,500001.000000 +-118.400000,33.880000,36.000000,1543.000000,214.000000,474.000000,187.000000,9.339900,500001.000000 +-118.400000,33.880000,35.000000,1753.000000,296.000000,615.000000,275.000000,7.500000,500001.000000 +-118.400000,33.880000,35.000000,1060.000000,191.000000,444.000000,196.000000,8.001500,500001.000000 +-118.400000,33.870000,45.000000,2093.000000,497.000000,842.000000,472.000000,6.323100,500001.000000 +-118.400000,33.870000,40.000000,1679.000000,372.000000,719.000000,385.000000,6.435000,479500.000000 +-118.400000,33.870000,38.000000,2398.000000,431.000000,911.000000,392.000000,5.231900,500001.000000 +-118.400000,33.870000,34.000000,3145.000000,786.000000,1352.000000,727.000000,5.097600,469800.000000 +-118.400000,33.860000,41.000000,2237.000000,597.000000,938.000000,523.000000,4.710500,500001.000000 +-118.400000,33.860000,18.000000,5152.000000,1365.000000,2286.000000,1243.000000,5.167700,380800.000000 +-118.400000,33.850000,29.000000,2085.000000,533.000000,919.000000,489.000000,5.601700,430000.000000 +-118.400000,33.780000,26.000000,5005.000000,776.000000,2357.000000,790.000000,8.542100,500001.000000 +-118.400000,33.780000,24.000000,4787.000000,562.000000,1653.000000,548.000000,12.975800,500001.000000 +-118.410000,35.630000,15.000000,5907.000000,1257.000000,2310.000000,1001.000000,2.312500,96900.000000 +-118.410000,34.320000,18.000000,6572.000000,1105.000000,3473.000000,1067.000000,5.298700,203400.000000 +-118.410000,34.300000,28.000000,3187.000000,569.000000,2205.000000,559.000000,5.166800,187400.000000 +-118.410000,34.290000,35.000000,1008.000000,204.000000,1162.000000,215.000000,3.350000,147600.000000 +-118.410000,34.290000,32.000000,1591.000000,320.000000,1818.000000,306.000000,4.296900,145800.000000 +-118.410000,34.280000,32.000000,2574.000000,531.000000,2609.000000,472.000000,3.756600,146700.000000 +-118.410000,34.270000,38.000000,858.000000,203.000000,1250.000000,204.000000,2.921900,137900.000000 +-118.410000,34.260000,38.000000,870.000000,205.000000,1065.000000,222.000000,2.531300,136100.000000 +-118.410000,34.250000,36.000000,1146.000000,259.000000,1173.000000,272.000000,3.601600,153800.000000 +-118.410000,34.250000,19.000000,280.000000,84.000000,483.000000,87.000000,1.950000,137500.000000 +-118.410000,34.250000,18.000000,3447.000000,857.000000,3663.000000,817.000000,3.228400,157100.000000 +-118.410000,34.240000,38.000000,490.000000,101.000000,402.000000,100.000000,3.125000,175900.000000 +-118.410000,34.230000,35.000000,1026.000000,195.000000,753.000000,185.000000,4.590900,179200.000000 +-118.410000,34.210000,35.000000,2830.000000,518.000000,1577.000000,524.000000,5.350000,210500.000000 +-118.410000,34.200000,32.000000,2734.000000,654.000000,2209.000000,610.000000,3.516400,217200.000000 +-118.410000,34.190000,45.000000,1106.000000,225.000000,595.000000,228.000000,3.662500,190700.000000 +-118.410000,34.190000,37.000000,1993.000000,425.000000,939.000000,400.000000,2.802100,224600.000000 +-118.410000,34.180000,43.000000,1840.000000,356.000000,966.000000,323.000000,4.717100,237900.000000 +-118.410000,34.180000,35.000000,2785.000000,663.000000,1631.000000,614.000000,3.903800,276100.000000 +-118.410000,34.180000,30.000000,2008.000000,513.000000,1052.000000,496.000000,3.011900,262200.000000 +-118.410000,34.170000,27.000000,3277.000000,648.000000,1382.000000,615.000000,3.875000,366100.000000 +-118.410000,34.160000,32.000000,3060.000000,505.000000,1159.000000,510.000000,6.370300,465800.000000 +-118.410000,34.160000,14.000000,577.000000,150.000000,372.000000,130.000000,4.187500,275000.000000 +-118.410000,34.150000,46.000000,1628.000000,259.000000,500.000000,258.000000,6.083000,424000.000000 +-118.410000,34.150000,33.000000,4032.000000,868.000000,1695.000000,869.000000,4.346800,425900.000000 +-118.410000,34.150000,24.000000,3891.000000,866.000000,1568.000000,830.000000,4.165600,364700.000000 +-118.410000,34.070000,52.000000,3562.000000,394.000000,1163.000000,361.000000,15.000100,500001.000000 +-118.410000,34.070000,47.000000,2979.000000,626.000000,1076.000000,571.000000,3.990400,500001.000000 +-118.410000,34.060000,43.000000,4994.000000,1057.000000,1830.000000,969.000000,5.532100,500001.000000 +-118.410000,34.060000,43.000000,2665.000000,556.000000,1015.000000,506.000000,4.141100,500001.000000 +-118.410000,34.050000,16.000000,9728.000000,2211.000000,3026.000000,1899.000000,5.875800,500001.000000 +-118.410000,34.040000,52.000000,2113.000000,332.000000,800.000000,327.000000,11.176800,500001.000000 +-118.410000,34.040000,52.000000,1907.000000,261.000000,681.000000,249.000000,10.980500,500001.000000 +-118.410000,34.040000,49.000000,601.000000,95.000000,228.000000,106.000000,8.023900,500001.000000 +-118.410000,34.030000,33.000000,1730.000000,386.000000,994.000000,363.000000,3.727700,500001.000000 +-118.410000,34.030000,26.000000,4376.000000,1394.000000,2435.000000,1250.000000,2.841800,327300.000000 +-118.410000,34.030000,24.000000,3711.000000,1192.000000,1764.000000,1147.000000,3.164200,366700.000000 +-118.410000,34.030000,20.000000,4374.000000,1311.000000,2165.000000,1185.000000,3.601900,463600.000000 +-118.410000,34.020000,35.000000,1728.000000,442.000000,1161.000000,420.000000,3.725000,310000.000000 +-118.410000,34.020000,34.000000,1430.000000,357.000000,805.000000,362.000000,3.346200,307000.000000 +-118.410000,34.020000,27.000000,2224.000000,618.000000,1594.000000,625.000000,3.083300,315500.000000 +-118.410000,34.020000,19.000000,4702.000000,1472.000000,2636.000000,1334.000000,3.395500,225000.000000 +-118.410000,34.020000,16.000000,5825.000000,1866.000000,3390.000000,1752.000000,3.096500,320000.000000 +-118.410000,34.010000,44.000000,2010.000000,394.000000,961.000000,365.000000,4.598200,333500.000000 +-118.410000,34.010000,43.000000,2000.000000,529.000000,1290.000000,514.000000,4.703100,302500.000000 +-118.410000,34.010000,33.000000,3306.000000,974.000000,2475.000000,924.000000,2.879700,285300.000000 +-118.410000,34.010000,26.000000,2503.000000,449.000000,1218.000000,426.000000,5.368300,500001.000000 +-118.410000,34.000000,37.000000,1426.000000,259.000000,689.000000,261.000000,5.528400,331000.000000 +-118.410000,34.000000,35.000000,684.000000,161.000000,381.000000,159.000000,2.839300,272000.000000 +-118.410000,34.000000,30.000000,3550.000000,934.000000,3738.000000,880.000000,3.191000,271200.000000 +-118.410000,34.000000,18.000000,1307.000000,441.000000,884.000000,456.000000,2.933800,276300.000000 +-118.410000,33.970000,44.000000,2298.000000,388.000000,849.000000,360.000000,5.594100,363500.000000 +-118.410000,33.970000,43.000000,1464.000000,224.000000,581.000000,232.000000,6.202200,365900.000000 +-118.410000,33.970000,37.000000,1629.000000,275.000000,668.000000,266.000000,6.133300,387200.000000 +-118.410000,33.960000,44.000000,1802.000000,306.000000,753.000000,282.000000,6.028600,356000.000000 +-118.410000,33.960000,15.000000,412.000000,128.000000,310.000000,137.000000,3.979200,266700.000000 +-118.410000,33.940000,45.000000,2038.000000,394.000000,1086.000000,387.000000,4.737500,289600.000000 +-118.410000,33.930000,38.000000,3328.000000,625.000000,1455.000000,619.000000,5.059600,363900.000000 +-118.410000,33.930000,22.000000,2514.000000,605.000000,1225.000000,568.000000,4.181800,339700.000000 +-118.410000,33.920000,38.000000,1437.000000,272.000000,590.000000,250.000000,5.233800,358000.000000 +-118.410000,33.920000,32.000000,2590.000000,607.000000,1132.000000,555.000000,4.233300,358000.000000 +-118.410000,33.920000,29.000000,1436.000000,401.000000,674.000000,343.000000,3.638900,275000.000000 +-118.410000,33.920000,22.000000,2340.000000,584.000000,1141.000000,554.000000,4.572900,337500.000000 +-118.410000,33.900000,39.000000,2311.000000,404.000000,1044.000000,380.000000,8.468000,472100.000000 +-118.410000,33.900000,39.000000,2040.000000,336.000000,926.000000,351.000000,7.555200,500001.000000 +-118.410000,33.890000,38.000000,4166.000000,828.000000,1600.000000,770.000000,6.386100,500001.000000 +-118.410000,33.890000,35.000000,1194.000000,292.000000,507.000000,295.000000,9.081200,500001.000000 +-118.410000,33.890000,34.000000,2959.000000,639.000000,1143.000000,593.000000,6.348000,500001.000000 +-118.410000,33.890000,31.000000,702.000000,161.000000,236.000000,144.000000,5.049700,500001.000000 +-118.410000,33.890000,31.000000,1428.000000,320.000000,677.000000,331.000000,7.231600,500001.000000 +-118.410000,33.880000,43.000000,2492.000000,449.000000,1033.000000,437.000000,7.961400,500001.000000 +-118.410000,33.880000,34.000000,540.000000,107.000000,213.000000,104.000000,6.340300,500001.000000 +-118.410000,33.880000,34.000000,1471.000000,308.000000,498.000000,264.000000,7.084200,500001.000000 +-118.410000,33.770000,22.000000,7554.000000,991.000000,2808.000000,946.000000,10.060000,500001.000000 +-118.410000,33.750000,4.000000,311.000000,51.000000,128.000000,46.000000,9.809100,500001.000000 +-118.420000,37.360000,18.000000,2281.000000,520.000000,1425.000000,465.000000,1.738800,54400.000000 +-118.420000,37.350000,21.000000,3302.000000,557.000000,1413.000000,520.000000,4.375000,180400.000000 +-118.420000,34.320000,30.000000,3027.000000,604.000000,1970.000000,590.000000,4.340900,156000.000000 +-118.420000,34.300000,29.000000,3334.000000,712.000000,2919.000000,718.000000,3.654800,180300.000000 +-118.420000,34.290000,34.000000,1489.000000,326.000000,1389.000000,313.000000,3.482100,160300.000000 +-118.420000,34.280000,35.000000,822.000000,200.000000,1197.000000,203.000000,3.286500,133300.000000 +-118.420000,34.280000,34.000000,1999.000000,427.000000,2391.000000,439.000000,2.800000,144300.000000 +-118.420000,34.280000,29.000000,1271.000000,272.000000,1338.000000,266.000000,4.125000,150000.000000 +-118.420000,34.270000,37.000000,1024.000000,246.000000,1371.000000,239.000000,3.225000,147500.000000 +-118.420000,34.270000,35.000000,674.000000,153.000000,808.000000,173.000000,2.666700,147800.000000 +-118.420000,34.270000,33.000000,1209.000000,341.000000,1097.000000,283.000000,1.629500,134300.000000 +-118.420000,34.260000,36.000000,973.000000,221.000000,1086.000000,218.000000,3.451900,143300.000000 +-118.420000,34.250000,36.000000,1430.000000,331.000000,1502.000000,312.000000,3.629200,145200.000000 +-118.420000,34.240000,36.000000,1181.000000,220.000000,775.000000,218.000000,4.722800,183800.000000 +-118.420000,34.240000,35.000000,1507.000000,281.000000,1025.000000,286.000000,4.583300,177200.000000 +-118.420000,34.240000,17.000000,2049.000000,548.000000,2243.000000,541.000000,2.525000,163700.000000 +-118.420000,34.230000,34.000000,1550.000000,279.000000,1011.000000,288.000000,4.537500,189000.000000 +-118.420000,34.230000,33.000000,2478.000000,457.000000,1567.000000,446.000000,5.662900,186700.000000 +-118.420000,34.220000,34.000000,3004.000000,589.000000,1938.000000,568.000000,4.185700,198600.000000 +-118.420000,34.220000,29.000000,1807.000000,323.000000,1234.000000,310.000000,5.376700,233000.000000 +-118.420000,34.210000,29.000000,2893.000000,543.000000,1636.000000,540.000000,5.158600,237400.000000 +-118.420000,34.200000,34.000000,161.000000,48.000000,66.000000,33.000000,1.000000,187500.000000 +-118.420000,34.200000,27.000000,3201.000000,970.000000,3403.000000,948.000000,2.237700,231700.000000 +-118.420000,34.200000,24.000000,3148.000000,908.000000,2850.000000,839.000000,1.954900,221500.000000 +-118.420000,34.190000,34.000000,2622.000000,572.000000,1997.000000,573.000000,3.338000,222500.000000 +-118.420000,34.190000,33.000000,3353.000000,790.000000,2318.000000,775.000000,2.258900,269700.000000 +-118.420000,34.190000,33.000000,3285.000000,830.000000,2281.000000,786.000000,2.616500,230800.000000 +-118.420000,34.180000,40.000000,1013.000000,150.000000,449.000000,166.000000,5.714300,382400.000000 +-118.420000,34.180000,31.000000,2887.000000,646.000000,1626.000000,637.000000,3.674500,335500.000000 +-118.420000,34.180000,30.000000,1323.000000,353.000000,856.000000,333.000000,3.359400,202200.000000 +-118.420000,34.180000,27.000000,3760.000000,880.000000,2022.000000,812.000000,3.155100,225600.000000 +-118.420000,34.170000,31.000000,2235.000000,363.000000,914.000000,370.000000,6.135900,359700.000000 +-118.420000,34.160000,46.000000,54.000000,9.000000,20.000000,6.000000,0.536000,375000.000000 +-118.420000,34.160000,28.000000,4664.000000,1040.000000,1963.000000,961.000000,3.902800,367900.000000 +-118.420000,34.160000,25.000000,2769.000000,566.000000,1201.000000,545.000000,3.664100,386100.000000 +-118.420000,34.160000,17.000000,277.000000,70.000000,119.000000,59.000000,4.020800,341700.000000 +-118.420000,34.150000,48.000000,680.000000,131.000000,268.000000,126.000000,4.615000,371400.000000 +-118.420000,34.150000,31.000000,1861.000000,430.000000,736.000000,360.000000,5.285300,355900.000000 +-118.420000,34.150000,18.000000,1880.000000,420.000000,681.000000,333.000000,4.321400,372300.000000 +-118.420000,34.140000,27.000000,3990.000000,892.000000,1417.000000,800.000000,4.043900,500001.000000 +-118.420000,34.130000,38.000000,3830.000000,518.000000,1292.000000,516.000000,12.782300,500001.000000 +-118.420000,34.080000,48.000000,2413.000000,261.000000,770.000000,248.000000,15.000100,500001.000000 +-118.420000,34.080000,46.000000,1399.000000,148.000000,410.000000,152.000000,15.000100,500001.000000 +-118.420000,34.060000,52.000000,1881.000000,334.000000,640.000000,321.000000,6.871000,500001.000000 +-118.420000,34.060000,44.000000,533.000000,90.000000,291.000000,97.000000,10.804500,500001.000000 +-118.420000,34.060000,40.000000,2933.000000,565.000000,1077.000000,536.000000,6.152700,500001.000000 +-118.420000,34.050000,38.000000,4888.000000,1126.000000,1698.000000,937.000000,4.830400,500001.000000 +-118.420000,34.050000,33.000000,2921.000000,652.000000,1124.000000,608.000000,5.015100,500001.000000 +-118.420000,34.040000,52.000000,1358.000000,272.000000,574.000000,267.000000,5.645400,500001.000000 +-118.420000,34.040000,51.000000,1975.000000,348.000000,771.000000,357.000000,6.626000,500001.000000 +-118.420000,34.040000,46.000000,1508.000000,276.000000,639.000000,273.000000,4.925000,409800.000000 +-118.420000,34.030000,45.000000,1262.000000,223.000000,637.000000,221.000000,5.086600,427300.000000 +-118.420000,34.030000,44.000000,904.000000,176.000000,358.000000,158.000000,3.354200,344200.000000 +-118.420000,34.030000,44.000000,629.000000,131.000000,326.000000,156.000000,4.527800,374300.000000 +-118.420000,34.020000,34.000000,2995.000000,942.000000,2626.000000,947.000000,2.240200,450000.000000 +-118.420000,34.020000,22.000000,3292.000000,1134.000000,1655.000000,898.000000,3.174600,348800.000000 +-118.420000,34.020000,21.000000,3244.000000,815.000000,1423.000000,781.000000,3.648800,340800.000000 +-118.420000,34.010000,42.000000,1700.000000,438.000000,997.000000,436.000000,2.921300,305000.000000 +-118.420000,34.010000,42.000000,1594.000000,369.000000,952.000000,362.000000,3.099000,335400.000000 +-118.420000,34.010000,33.000000,2731.000000,535.000000,1280.000000,510.000000,4.708300,420100.000000 +-118.420000,34.010000,32.000000,1300.000000,356.000000,703.000000,311.000000,3.566700,394000.000000 +-118.420000,34.010000,29.000000,1996.000000,489.000000,960.000000,449.000000,3.661100,344200.000000 +-118.420000,34.000000,45.000000,1807.000000,355.000000,883.000000,371.000000,5.035700,329800.000000 +-118.420000,34.000000,31.000000,1930.000000,456.000000,1002.000000,410.000000,3.979800,458600.000000 +-118.420000,34.000000,14.000000,3771.000000,1180.000000,2355.000000,978.000000,3.160300,287500.000000 +-118.420000,33.990000,38.000000,740.000000,171.000000,599.000000,194.000000,4.089300,248900.000000 +-118.420000,33.990000,35.000000,1724.000000,419.000000,1079.000000,394.000000,4.552100,263800.000000 +-118.420000,33.990000,35.000000,1701.000000,482.000000,1428.000000,494.000000,3.725000,284600.000000 +-118.420000,33.980000,3.000000,475.000000,155.000000,236.000000,153.000000,3.666700,450000.000000 +-118.420000,33.970000,44.000000,1462.000000,240.000000,562.000000,237.000000,4.937500,365200.000000 +-118.420000,33.960000,44.000000,1282.000000,216.000000,494.000000,209.000000,5.618000,365900.000000 +-118.420000,33.960000,24.000000,4393.000000,966.000000,1257.000000,579.000000,5.071400,356100.000000 +-118.420000,33.930000,39.000000,2988.000000,605.000000,1466.000000,610.000000,4.928600,341400.000000 +-118.420000,33.930000,28.000000,4603.000000,993.000000,2191.000000,943.000000,4.574300,382200.000000 +-118.420000,33.920000,41.000000,1621.000000,279.000000,756.000000,277.000000,5.059400,346000.000000 +-118.420000,33.920000,25.000000,3521.000000,852.000000,1524.000000,764.000000,3.808600,361300.000000 +-118.420000,33.900000,43.000000,1394.000000,321.000000,552.000000,296.000000,5.959600,500001.000000 +-118.420000,33.900000,37.000000,1576.000000,345.000000,662.000000,340.000000,5.308000,500001.000000 +-118.420000,33.900000,29.000000,1929.000000,523.000000,686.000000,455.000000,5.534700,500001.000000 +-118.420000,33.850000,43.000000,1584.000000,477.000000,799.000000,433.000000,5.032200,435000.000000 +-118.420000,33.780000,36.000000,2093.000000,303.000000,802.000000,300.000000,8.095700,500001.000000 +-118.420000,33.750000,22.000000,17591.000000,2604.000000,6897.000000,2492.000000,8.283100,500001.000000 +-118.430000,37.400000,19.000000,2460.000000,405.000000,1225.000000,425.000000,4.157600,141500.000000 +-118.430000,34.430000,5.000000,21113.000000,4386.000000,9842.000000,3886.000000,4.203700,194600.000000 +-118.430000,34.420000,13.000000,3600.000000,580.000000,1799.000000,576.000000,6.297100,218300.000000 +-118.430000,34.330000,18.000000,5891.000000,920.000000,2882.000000,911.000000,5.901000,235600.000000 +-118.430000,34.320000,34.000000,2657.000000,515.000000,1948.000000,532.000000,4.233000,157400.000000 +-118.430000,34.300000,33.000000,2443.000000,498.000000,1601.000000,484.000000,4.022300,146000.000000 +-118.430000,34.300000,28.000000,271.000000,61.000000,246.000000,62.000000,1.706200,164600.000000 +-118.430000,34.290000,50.000000,1181.000000,265.000000,1196.000000,269.000000,3.209500,167000.000000 +-118.430000,34.290000,39.000000,1769.000000,410.000000,1499.000000,390.000000,3.121200,153500.000000 +-118.430000,34.290000,38.000000,1704.000000,347.000000,1384.000000,374.000000,2.865000,155500.000000 +-118.430000,34.290000,38.000000,1237.000000,298.000000,1073.000000,293.000000,3.672600,154600.000000 +-118.430000,34.280000,30.000000,1384.000000,308.000000,2054.000000,301.000000,3.013200,142600.000000 +-118.430000,34.280000,27.000000,862.000000,280.000000,1243.000000,267.000000,2.372400,154200.000000 +-118.430000,34.270000,31.000000,1130.000000,276.000000,1533.000000,269.000000,4.235300,156800.000000 +-118.430000,34.260000,30.000000,1246.000000,373.000000,1990.000000,369.000000,3.510400,140900.000000 +-118.430000,34.250000,38.000000,921.000000,239.000000,1023.000000,241.000000,3.451400,151900.000000 +-118.430000,34.250000,35.000000,1447.000000,335.000000,1630.000000,306.000000,2.920500,143100.000000 +-118.430000,34.250000,32.000000,2433.000000,553.000000,2318.000000,532.000000,3.638400,159300.000000 +-118.430000,34.240000,36.000000,1488.000000,313.000000,1221.000000,296.000000,4.020800,171400.000000 +-118.430000,34.240000,36.000000,1379.000000,265.000000,896.000000,246.000000,4.682700,183800.000000 +-118.430000,34.240000,35.000000,1488.000000,293.000000,1112.000000,288.000000,4.468800,182500.000000 +-118.430000,34.240000,35.000000,1416.000000,261.000000,995.000000,272.000000,3.714300,178700.000000 +-118.430000,34.230000,37.000000,1737.000000,369.000000,1061.000000,356.000000,3.961500,173700.000000 +-118.430000,34.230000,35.000000,2049.000000,390.000000,1286.000000,385.000000,4.443200,181500.000000 +-118.430000,34.230000,35.000000,1225.000000,228.000000,720.000000,231.000000,3.401300,176500.000000 +-118.430000,34.220000,36.000000,1372.000000,295.000000,774.000000,306.000000,3.661800,187300.000000 +-118.430000,34.220000,34.000000,2300.000000,429.000000,1447.000000,455.000000,4.265600,233700.000000 +-118.430000,34.210000,26.000000,2867.000000,671.000000,1955.000000,640.000000,4.125000,226500.000000 +-118.430000,34.210000,17.000000,3667.000000,1209.000000,2636.000000,1054.000000,2.425000,175500.000000 +-118.430000,34.200000,28.000000,3386.000000,815.000000,2240.000000,737.000000,3.022100,290100.000000 +-118.430000,34.200000,20.000000,4090.000000,1271.000000,2824.000000,1053.000000,2.773000,140500.000000 +-118.430000,34.190000,27.000000,3440.000000,739.000000,1827.000000,712.000000,4.125000,245500.000000 +-118.430000,34.180000,33.000000,2717.000000,662.000000,1546.000000,597.000000,3.909900,267500.000000 +-118.430000,34.180000,25.000000,3830.000000,1105.000000,2328.000000,1017.000000,2.623800,210000.000000 +-118.430000,34.180000,22.000000,2052.000000,568.000000,1254.000000,572.000000,2.636400,271100.000000 +-118.430000,34.170000,42.000000,777.000000,102.000000,284.000000,113.000000,11.209300,500001.000000 +-118.430000,34.170000,37.000000,1982.000000,331.000000,794.000000,340.000000,5.927500,336900.000000 +-118.430000,34.170000,35.000000,2922.000000,507.000000,1130.000000,485.000000,5.451000,341800.000000 +-118.430000,34.170000,34.000000,2180.000000,424.000000,906.000000,429.000000,4.446400,353100.000000 +-118.430000,34.170000,32.000000,3202.000000,696.000000,1573.000000,621.000000,3.444900,292900.000000 +-118.430000,34.160000,41.000000,2050.000000,478.000000,850.000000,490.000000,3.420800,343400.000000 +-118.430000,34.160000,40.000000,1134.000000,184.000000,452.000000,187.000000,4.569000,333900.000000 +-118.430000,34.160000,34.000000,2622.000000,467.000000,1233.000000,476.000000,4.047400,379700.000000 +-118.430000,34.160000,34.000000,2459.000000,489.000000,1139.000000,463.000000,4.034700,353600.000000 +-118.430000,34.150000,42.000000,1293.000000,214.000000,459.000000,217.000000,7.672000,467600.000000 +-118.430000,34.150000,31.000000,1856.000000,425.000000,795.000000,426.000000,2.844800,360600.000000 +-118.430000,34.150000,28.000000,6270.000000,1706.000000,2549.000000,1497.000000,3.224100,295800.000000 +-118.430000,34.150000,26.000000,2900.000000,667.000000,1090.000000,590.000000,3.712500,447400.000000 +-118.430000,34.140000,44.000000,1693.000000,239.000000,498.000000,216.000000,10.923700,500001.000000 +-118.430000,34.130000,37.000000,4400.000000,695.000000,1521.000000,666.000000,8.295400,500001.000000 +-118.430000,34.110000,27.000000,10806.000000,1440.000000,3511.000000,1352.000000,12.729600,500001.000000 +-118.430000,34.080000,46.000000,778.000000,90.000000,238.000000,93.000000,15.000100,500001.000000 +-118.430000,34.070000,38.000000,3251.000000,656.000000,1251.000000,593.000000,7.738200,500001.000000 +-118.430000,34.070000,34.000000,3203.000000,483.000000,949.000000,439.000000,10.346700,500001.000000 +-118.430000,34.060000,41.000000,1463.000000,267.000000,601.000000,267.000000,5.377700,500001.000000 +-118.430000,34.060000,38.000000,2982.000000,664.000000,1122.000000,572.000000,4.190800,500001.000000 +-118.430000,34.060000,20.000000,4600.000000,1018.000000,1675.000000,932.000000,5.199900,500001.000000 +-118.430000,34.060000,11.000000,3184.000000,641.000000,911.000000,463.000000,7.267500,500001.000000 +-118.430000,34.050000,52.000000,1693.000000,290.000000,727.000000,305.000000,6.711500,500001.000000 +-118.430000,34.050000,24.000000,3832.000000,949.000000,1613.000000,893.000000,3.967300,477300.000000 +-118.430000,34.050000,22.000000,4251.000000,1073.000000,1581.000000,881.000000,5.255500,500001.000000 +-118.430000,34.040000,52.000000,1782.000000,308.000000,735.000000,307.000000,5.295400,485100.000000 +-118.430000,34.040000,42.000000,2725.000000,569.000000,1115.000000,516.000000,4.583300,427500.000000 +-118.430000,34.030000,45.000000,1740.000000,311.000000,788.000000,306.000000,5.209900,373600.000000 +-118.430000,34.030000,39.000000,1733.000000,429.000000,855.000000,387.000000,3.230800,340800.000000 +-118.430000,34.030000,26.000000,1706.000000,516.000000,894.000000,435.000000,3.187500,372700.000000 +-118.430000,34.020000,42.000000,1528.000000,244.000000,634.000000,242.000000,8.163100,500001.000000 +-118.430000,34.020000,38.000000,2172.000000,437.000000,830.000000,368.000000,3.909100,500001.000000 +-118.430000,34.010000,41.000000,1527.000000,279.000000,746.000000,285.000000,6.423200,446600.000000 +-118.430000,34.010000,31.000000,2526.000000,528.000000,1046.000000,504.000000,4.700900,500001.000000 +-118.430000,34.010000,27.000000,3133.000000,1021.000000,2242.000000,1002.000000,2.697000,412500.000000 +-118.430000,34.000000,37.000000,1340.000000,358.000000,1008.000000,340.000000,3.761400,314300.000000 +-118.430000,34.000000,30.000000,2148.000000,597.000000,1341.000000,559.000000,3.399500,324000.000000 +-118.430000,34.000000,28.000000,6128.000000,1963.000000,3586.000000,1815.000000,2.705800,310900.000000 +-118.430000,33.990000,45.000000,1899.000000,461.000000,1260.000000,415.000000,2.666700,320000.000000 +-118.430000,33.990000,43.000000,2483.000000,548.000000,1212.000000,493.000000,4.018900,302900.000000 +-118.430000,33.990000,42.000000,2558.000000,558.000000,1328.000000,541.000000,4.875000,301300.000000 +-118.430000,33.990000,35.000000,2243.000000,495.000000,1135.000000,465.000000,4.328100,324000.000000 +-118.430000,33.980000,19.000000,8324.000000,1590.000000,2927.000000,1538.000000,7.542600,351700.000000 +-118.430000,33.970000,16.000000,70.000000,7.000000,17.000000,4.000000,7.719700,500001.000000 +-118.430000,33.960000,38.000000,1104.000000,216.000000,415.000000,163.000000,6.198500,422000.000000 +-118.430000,33.960000,16.000000,14891.000000,3984.000000,6270.000000,3595.000000,5.106400,283200.000000 +-118.430000,33.900000,27.000000,1536.000000,377.000000,553.000000,326.000000,5.408800,500001.000000 +-118.430000,33.870000,41.000000,847.000000,173.000000,344.000000,170.000000,6.822000,500001.000000 +-118.430000,33.860000,34.000000,358.000000,87.000000,162.000000,84.000000,7.126400,500001.000000 +-118.430000,33.830000,19.000000,6206.000000,1611.000000,2455.000000,1472.000000,5.145000,420200.000000 +-118.430000,33.820000,34.000000,2112.000000,614.000000,946.000000,574.000000,4.604800,500001.000000 +-118.440000,35.750000,23.000000,3166.000000,700.000000,1097.000000,493.000000,2.628800,96000.000000 +-118.440000,35.130000,34.000000,1170.000000,290.000000,602.000000,266.000000,1.791700,80000.000000 +-118.440000,35.130000,21.000000,1899.000000,447.000000,1133.000000,391.000000,1.863600,67900.000000 +-118.440000,34.500000,5.000000,1514.000000,220.000000,1355.000000,215.000000,8.134400,359000.000000 +-118.440000,34.320000,14.000000,6235.000000,1286.000000,3568.000000,1190.000000,4.172400,211600.000000 +-118.440000,34.310000,22.000000,3182.000000,822.000000,2661.000000,746.000000,2.747200,160100.000000 +-118.440000,34.290000,35.000000,2606.000000,447.000000,1555.000000,404.000000,4.686400,193800.000000 +-118.440000,34.290000,32.000000,1260.000000,382.000000,1434.000000,342.000000,2.028600,122900.000000 +-118.440000,34.290000,30.000000,1632.000000,401.000000,1357.000000,401.000000,3.158800,160100.000000 +-118.440000,34.280000,47.000000,843.000000,194.000000,800.000000,180.000000,3.368700,151700.000000 +-118.440000,34.280000,46.000000,11.000000,11.000000,24.000000,13.000000,2.875000,162500.000000 +-118.440000,34.280000,38.000000,1156.000000,305.000000,1359.000000,289.000000,2.514700,137100.000000 +-118.440000,34.280000,37.000000,944.000000,244.000000,1107.000000,235.000000,1.968800,144100.000000 +-118.440000,34.280000,32.000000,527.000000,146.000000,582.000000,143.000000,1.770800,138800.000000 +-118.440000,34.270000,35.000000,777.000000,187.000000,1022.000000,186.000000,3.400000,139600.000000 +-118.440000,34.270000,29.000000,1701.000000,419.000000,1616.000000,371.000000,3.360300,142400.000000 +-118.440000,34.260000,34.000000,325.000000,60.000000,433.000000,83.000000,5.512400,174300.000000 +-118.440000,34.260000,34.000000,1102.000000,212.000000,949.000000,212.000000,4.079200,165100.000000 +-118.440000,34.260000,28.000000,1077.000000,288.000000,1377.000000,293.000000,3.916700,153900.000000 +-118.440000,34.250000,33.000000,1121.000000,231.000000,1038.000000,236.000000,4.895800,173700.000000 +-118.440000,34.240000,35.000000,2344.000000,435.000000,1531.000000,399.000000,3.725000,178200.000000 +-118.440000,34.230000,36.000000,1730.000000,387.000000,1099.000000,353.000000,4.036800,183100.000000 +-118.440000,34.220000,41.000000,1582.000000,399.000000,1159.000000,378.000000,2.825000,168600.000000 +-118.440000,34.220000,41.000000,1030.000000,214.000000,664.000000,223.000000,3.808300,183800.000000 +-118.440000,34.220000,39.000000,1529.000000,344.000000,913.000000,314.000000,3.325000,178200.000000 +-118.440000,34.220000,36.000000,1191.000000,266.000000,718.000000,248.000000,3.461200,178800.000000 +-118.440000,34.210000,37.000000,1665.000000,335.000000,1011.000000,343.000000,4.870300,185100.000000 +-118.440000,34.210000,20.000000,5756.000000,1477.000000,4031.000000,1369.000000,3.244800,221200.000000 +-118.440000,34.200000,36.000000,2698.000000,623.000000,1544.000000,554.000000,2.737500,234900.000000 +-118.440000,34.200000,35.000000,1717.000000,478.000000,1628.000000,495.000000,2.519700,225600.000000 +-118.440000,34.200000,17.000000,2934.000000,950.000000,2517.000000,889.000000,2.936000,232500.000000 +-118.440000,34.190000,37.000000,1516.000000,344.000000,983.000000,347.000000,5.000000,243600.000000 +-118.440000,34.190000,19.000000,3487.000000,959.000000,2278.000000,835.000000,2.670900,215500.000000 +-118.440000,34.180000,35.000000,972.000000,270.000000,550.000000,256.000000,2.246100,215000.000000 +-118.440000,34.180000,33.000000,2127.000000,414.000000,1056.000000,391.000000,4.375000,286100.000000 +-118.440000,34.180000,17.000000,1546.000000,592.000000,2423.000000,556.000000,2.197700,154200.000000 +-118.440000,34.170000,29.000000,2685.000000,642.000000,1085.000000,599.000000,3.276300,279400.000000 +-118.440000,34.160000,35.000000,3080.000000,642.000000,1362.000000,623.000000,4.121800,328500.000000 +-118.440000,34.150000,37.000000,1335.000000,286.000000,539.000000,279.000000,3.281300,301700.000000 +-118.440000,34.150000,29.000000,5474.000000,1457.000000,2352.000000,1326.000000,3.415000,382500.000000 +-118.440000,34.150000,15.000000,4420.000000,1076.000000,1669.000000,1016.000000,4.637500,359100.000000 +-118.440000,34.090000,36.000000,3129.000000,392.000000,862.000000,334.000000,15.000100,500001.000000 +-118.440000,34.070000,35.000000,1973.000000,332.000000,1257.000000,296.000000,8.956500,500001.000000 +-118.440000,34.070000,26.000000,3535.000000,748.000000,1322.000000,666.000000,7.167400,500001.000000 +-118.440000,34.070000,21.000000,730.000000,263.000000,965.000000,224.000000,2.051100,350000.000000 +-118.440000,34.060000,9.000000,5102.000000,1695.000000,2609.000000,1450.000000,3.254500,500001.000000 +-118.440000,34.060000,28.000000,3910.000000,959.000000,1763.000000,867.000000,5.500000,500001.000000 +-118.440000,34.060000,14.000000,520.000000,292.000000,282.000000,213.000000,2.285700,500001.000000 +-118.440000,34.060000,13.000000,4833.000000,1119.000000,1649.000000,807.000000,6.238900,500001.000000 +-118.440000,34.050000,22.000000,3970.000000,871.000000,1588.000000,791.000000,4.861800,500001.000000 +-118.440000,34.050000,20.000000,5943.000000,1538.000000,2492.000000,1429.000000,4.114100,305000.000000 +-118.440000,34.050000,18.000000,4780.000000,1192.000000,1886.000000,1036.000000,4.467400,500001.000000 +-118.440000,34.050000,15.000000,5368.000000,1312.000000,2269.000000,1232.000000,5.709700,316700.000000 +-118.440000,34.040000,31.000000,2670.000000,662.000000,1535.000000,631.000000,3.071400,347800.000000 +-118.440000,34.040000,16.000000,18.000000,6.000000,3.000000,4.000000,0.536000,350000.000000 +-118.440000,34.030000,41.000000,1164.000000,265.000000,561.000000,251.000000,4.241100,350900.000000 +-118.440000,34.030000,37.000000,975.000000,189.000000,489.000000,202.000000,4.243400,331000.000000 +-118.440000,34.030000,37.000000,1193.000000,205.000000,488.000000,224.000000,3.625000,357600.000000 +-118.440000,34.030000,30.000000,1039.000000,303.000000,606.000000,274.000000,3.125000,343800.000000 +-118.440000,34.020000,39.000000,3278.000000,632.000000,1321.000000,617.000000,6.291700,465700.000000 +-118.440000,34.020000,37.000000,1592.000000,308.000000,783.000000,321.000000,6.258300,386000.000000 +-118.440000,34.020000,32.000000,2242.000000,490.000000,921.000000,461.000000,4.042900,500001.000000 +-118.440000,34.010000,43.000000,1408.000000,246.000000,651.000000,240.000000,4.579500,400000.000000 +-118.440000,34.010000,42.000000,2061.000000,396.000000,907.000000,393.000000,6.080400,420000.000000 +-118.440000,34.010000,41.000000,1309.000000,221.000000,534.000000,228.000000,5.170800,418800.000000 +-118.440000,34.000000,44.000000,1798.000000,353.000000,835.000000,314.000000,4.750000,355800.000000 +-118.440000,34.000000,44.000000,1462.000000,338.000000,821.000000,341.000000,2.599000,362200.000000 +-118.440000,34.000000,41.000000,1562.000000,377.000000,874.000000,368.000000,4.108300,324300.000000 +-118.440000,34.000000,40.000000,1287.000000,346.000000,806.000000,311.000000,3.875000,321300.000000 +-118.440000,34.000000,22.000000,5822.000000,1707.000000,3335.000000,1585.000000,3.157900,243100.000000 +-118.440000,33.990000,43.000000,1432.000000,308.000000,782.000000,303.000000,4.333300,303900.000000 +-118.440000,33.960000,33.000000,2799.000000,491.000000,978.000000,447.000000,5.643500,500001.000000 +-118.440000,33.880000,35.000000,2020.000000,451.000000,724.000000,399.000000,6.649400,500001.000000 +-118.440000,33.810000,33.000000,3994.000000,990.000000,1647.000000,931.000000,5.010600,500001.000000 +-118.440000,33.790000,27.000000,2141.000000,260.000000,635.000000,240.000000,11.664800,500001.000000 +-118.450000,37.700000,15.000000,2199.000000,453.000000,899.000000,347.000000,2.350000,107800.000000 +-118.450000,37.370000,26.000000,3135.000000,524.000000,1385.000000,523.000000,4.337000,139700.000000 +-118.450000,35.620000,18.000000,2304.000000,527.000000,782.000000,390.000000,1.414100,75800.000000 +-118.450000,35.580000,16.000000,5396.000000,1182.000000,1802.000000,807.000000,1.881900,69700.000000 +-118.450000,34.440000,16.000000,13406.000000,2574.000000,7030.000000,2440.000000,4.686100,187900.000000 +-118.450000,34.310000,9.000000,1739.000000,358.000000,820.000000,323.000000,4.055600,182500.000000 +-118.450000,34.310000,28.000000,1532.000000,287.000000,977.000000,275.000000,4.477300,173100.000000 +-118.450000,34.300000,35.000000,4085.000000,919.000000,3988.000000,906.000000,3.481200,160200.000000 +-118.450000,34.300000,27.000000,2676.000000,652.000000,2661.000000,623.000000,4.304700,152100.000000 +-118.450000,34.290000,30.000000,762.000000,228.000000,840.000000,226.000000,2.337500,154200.000000 +-118.450000,34.280000,38.000000,1527.000000,332.000000,1303.000000,340.000000,3.571400,152000.000000 +-118.450000,34.280000,36.000000,2602.000000,638.000000,2780.000000,620.000000,2.715500,149800.000000 +-118.450000,34.270000,35.000000,1579.000000,300.000000,1012.000000,265.000000,5.129600,195900.000000 +-118.450000,34.270000,33.000000,1194.000000,229.000000,839.000000,230.000000,3.705000,185800.000000 +-118.450000,34.260000,35.000000,1724.000000,311.000000,992.000000,315.000000,4.835900,195600.000000 +-118.450000,34.260000,35.000000,1637.000000,300.000000,894.000000,302.000000,4.175000,209600.000000 +-118.450000,34.250000,34.000000,2094.000000,380.000000,1207.000000,380.000000,5.280100,212300.000000 +-118.450000,34.240000,7.000000,3299.000000,794.000000,2343.000000,647.000000,3.086500,205900.000000 +-118.450000,34.240000,11.000000,9053.000000,2193.000000,7096.000000,2038.000000,3.508200,136500.000000 +-118.450000,34.230000,15.000000,5738.000000,1767.000000,4620.000000,1581.000000,2.358400,157600.000000 +-118.450000,34.220000,8.000000,2609.000000,786.000000,1803.000000,695.000000,2.771400,185700.000000 +-118.450000,34.220000,24.000000,3442.000000,1168.000000,4625.000000,1097.000000,2.069900,183000.000000 +-118.450000,34.210000,30.000000,2331.000000,733.000000,2172.000000,707.000000,2.188800,195600.000000 +-118.450000,34.200000,19.000000,3666.000000,1150.000000,2657.000000,1090.000000,2.968800,202100.000000 +-118.450000,34.200000,18.000000,2729.000000,800.000000,2099.000000,742.000000,2.584200,230800.000000 +-118.450000,34.190000,11.000000,2479.000000,900.000000,2466.000000,855.000000,2.226400,181300.000000 +-118.450000,34.180000,39.000000,1810.000000,388.000000,839.000000,380.000000,3.717100,228800.000000 +-118.450000,34.180000,22.000000,2516.000000,826.000000,3350.000000,713.000000,2.019200,158300.000000 +-118.450000,34.170000,33.000000,3100.000000,687.000000,1388.000000,658.000000,4.333300,261300.000000 +-118.450000,34.170000,21.000000,2152.000000,527.000000,996.000000,470.000000,3.238600,277300.000000 +-118.450000,34.160000,33.000000,2544.000000,500.000000,1035.000000,492.000000,4.475000,314800.000000 +-118.450000,34.160000,22.000000,7828.000000,2038.000000,3303.000000,1922.000000,3.617100,318300.000000 +-118.450000,34.160000,22.000000,4982.000000,1358.000000,2237.000000,1220.000000,3.710500,272600.000000 +-118.450000,34.150000,20.000000,3876.000000,799.000000,1334.000000,753.000000,4.565600,478400.000000 +-118.450000,34.150000,10.000000,1091.000000,260.000000,517.000000,266.000000,4.172700,332600.000000 +-118.450000,34.140000,33.000000,1741.000000,274.000000,588.000000,267.000000,7.962500,490800.000000 +-118.450000,34.100000,31.000000,6675.000000,842.000000,2092.000000,796.000000,11.844200,500001.000000 +-118.450000,34.080000,52.000000,1500.000000,176.000000,384.000000,145.000000,7.157600,500001.000000 +-118.450000,34.070000,13.000000,4284.000000,1452.000000,3806.000000,1252.000000,1.312500,350000.000000 +-118.450000,34.060000,52.000000,204.000000,34.000000,1154.000000,28.000000,9.337000,500001.000000 +-118.450000,34.060000,20.000000,3367.000000,1264.000000,2667.000000,1131.000000,2.244400,500000.000000 +-118.450000,34.050000,23.000000,4099.000000,1287.000000,2103.000000,1217.000000,3.754900,275000.000000 +-118.450000,34.040000,23.000000,3771.000000,1321.000000,2031.000000,1241.000000,2.767900,277500.000000 +-118.450000,34.040000,22.000000,3319.000000,1045.000000,1848.000000,940.000000,3.667300,283300.000000 +-118.450000,34.040000,19.000000,3330.000000,1010.000000,1837.000000,915.000000,3.017300,393800.000000 +-118.450000,34.030000,45.000000,727.000000,168.000000,520.000000,175.000000,2.652800,300000.000000 +-118.450000,34.030000,41.000000,1240.000000,320.000000,711.000000,304.000000,3.348200,318100.000000 +-118.450000,34.030000,39.000000,1657.000000,402.000000,931.000000,363.000000,3.781300,336300.000000 +-118.450000,34.020000,45.000000,1230.000000,201.000000,565.000000,219.000000,6.352100,493400.000000 +-118.450000,34.020000,41.000000,2956.000000,700.000000,1212.000000,645.000000,3.458300,421900.000000 +-118.450000,34.010000,40.000000,1361.000000,240.000000,559.000000,229.000000,6.351600,354300.000000 +-118.450000,34.010000,37.000000,1328.000000,250.000000,626.000000,228.000000,5.866600,440100.000000 +-118.450000,34.010000,36.000000,2424.000000,418.000000,1123.000000,417.000000,6.475500,405800.000000 +-118.450000,34.000000,48.000000,1923.000000,408.000000,1142.000000,433.000000,4.575000,326700.000000 +-118.450000,34.000000,43.000000,1606.000000,408.000000,862.000000,354.000000,3.962000,345800.000000 +-118.450000,34.000000,39.000000,1909.000000,359.000000,867.000000,345.000000,4.700000,334700.000000 +-118.450000,33.990000,52.000000,1829.000000,472.000000,779.000000,424.000000,3.160700,339000.000000 +-118.450000,33.990000,52.000000,1010.000000,244.000000,573.000000,242.000000,4.186100,363200.000000 +-118.450000,33.990000,45.000000,1132.000000,269.000000,654.000000,264.000000,4.567300,343100.000000 +-118.450000,33.990000,33.000000,3125.000000,785.000000,1720.000000,713.000000,2.972200,325000.000000 +-118.450000,33.990000,26.000000,1919.000000,405.000000,953.000000,371.000000,6.067200,420800.000000 +-118.450000,33.960000,36.000000,2723.000000,547.000000,1090.000000,519.000000,6.326700,500001.000000 +-118.450000,33.800000,31.000000,4803.000000,575.000000,1490.000000,577.000000,11.999300,500001.000000 +-118.460000,35.130000,19.000000,3109.000000,640.000000,1457.000000,620.000000,2.641700,94900.000000 +-118.460000,35.120000,16.000000,4084.000000,812.000000,2033.000000,668.000000,3.240500,85500.000000 +-118.460000,34.420000,25.000000,2988.000000,525.000000,1884.000000,513.000000,4.700700,169500.000000 +-118.460000,34.400000,12.000000,25957.000000,4798.000000,10475.000000,4490.000000,4.542000,195300.000000 +-118.460000,34.320000,10.000000,5777.000000,1112.000000,2917.000000,1056.000000,4.151400,194100.000000 +-118.460000,34.310000,24.000000,2920.000000,601.000000,1460.000000,598.000000,4.270800,218200.000000 +-118.460000,34.300000,32.000000,2424.000000,476.000000,2291.000000,419.000000,4.033700,158500.000000 +-118.460000,34.290000,24.000000,3668.000000,890.000000,3151.000000,810.000000,3.052600,183300.000000 +-118.460000,34.280000,23.000000,1663.000000,302.000000,1242.000000,283.000000,5.593100,217600.000000 +-118.460000,34.270000,30.000000,1576.000000,282.000000,1004.000000,284.000000,4.801500,179700.000000 +-118.460000,34.260000,36.000000,1394.000000,254.000000,761.000000,262.000000,4.948500,217100.000000 +-118.460000,34.250000,33.000000,2202.000000,433.000000,1135.000000,407.000000,4.214300,224200.000000 +-118.460000,34.250000,32.000000,2217.000000,422.000000,1064.000000,427.000000,3.698900,208600.000000 +-118.460000,34.230000,20.000000,4609.000000,1499.000000,5349.000000,1377.000000,2.712100,169400.000000 +-118.460000,34.230000,19.000000,9902.000000,2814.000000,7307.000000,2660.000000,2.585000,145400.000000 +-118.460000,34.230000,16.000000,6338.000000,1768.000000,4718.000000,1632.000000,3.018700,154600.000000 +-118.460000,34.220000,39.000000,1500.000000,333.000000,998.000000,309.000000,3.962500,168200.000000 +-118.460000,34.220000,35.000000,2288.000000,617.000000,2222.000000,566.000000,2.629900,170700.000000 +-118.460000,34.220000,31.000000,2057.000000,601.000000,2397.000000,579.000000,2.871000,184400.000000 +-118.460000,34.210000,7.000000,2081.000000,657.000000,1456.000000,535.000000,3.500000,186900.000000 +-118.460000,34.200000,22.000000,4855.000000,1350.000000,2519.000000,1258.000000,3.089300,205600.000000 +-118.460000,34.200000,13.000000,2926.000000,816.000000,1867.000000,802.000000,3.525500,202700.000000 +-118.460000,34.190000,35.000000,1491.000000,295.000000,779.000000,309.000000,6.114200,256300.000000 +-118.460000,34.190000,20.000000,5992.000000,1820.000000,4826.000000,1632.000000,2.723700,233500.000000 +-118.460000,34.180000,33.000000,1791.000000,386.000000,844.000000,397.000000,4.508100,251400.000000 +-118.460000,34.180000,27.000000,2582.000000,719.000000,2038.000000,718.000000,3.087700,174200.000000 +-118.460000,34.170000,22.000000,6707.000000,1737.000000,2620.000000,1610.000000,3.147800,273700.000000 +-118.460000,34.160000,38.000000,1495.000000,300.000000,598.000000,280.000000,3.469800,265400.000000 +-118.460000,34.160000,28.000000,2795.000000,622.000000,1173.000000,545.000000,4.442300,280400.000000 +-118.460000,34.160000,16.000000,4590.000000,1200.000000,2195.000000,1139.000000,3.827300,334900.000000 +-118.460000,34.080000,35.000000,3247.000000,525.000000,1065.000000,484.000000,7.842600,500001.000000 +-118.460000,34.070000,43.000000,2511.000000,456.000000,808.000000,407.000000,6.770300,500001.000000 +-118.460000,34.060000,46.000000,1302.000000,215.000000,482.000000,226.000000,7.067400,500001.000000 +-118.460000,34.060000,20.000000,5448.000000,1532.000000,2202.000000,1442.000000,4.255400,500001.000000 +-118.460000,34.050000,25.000000,6902.000000,2138.000000,3136.000000,1844.000000,2.650900,410000.000000 +-118.460000,34.050000,25.000000,4077.000000,1151.000000,1719.000000,1017.000000,3.772100,337500.000000 +-118.460000,34.050000,21.000000,3639.000000,1002.000000,1489.000000,983.000000,4.619700,387500.000000 +-118.460000,34.040000,31.000000,2621.000000,707.000000,1632.000000,673.000000,3.287000,348100.000000 +-118.460000,34.040000,25.000000,2768.000000,850.000000,1558.000000,784.000000,3.697600,360000.000000 +-118.460000,34.040000,25.000000,2142.000000,718.000000,1390.000000,699.000000,3.006900,325000.000000 +-118.460000,34.040000,19.000000,3522.000000,1036.000000,1820.000000,977.000000,3.266300,337500.000000 +-118.460000,34.040000,17.000000,2729.000000,897.000000,1404.000000,758.000000,3.123500,420800.000000 +-118.460000,34.030000,39.000000,1244.000000,283.000000,886.000000,284.000000,3.125000,325000.000000 +-118.460000,34.030000,27.000000,1965.000000,631.000000,1042.000000,596.000000,2.750000,327300.000000 +-118.460000,34.020000,46.000000,2571.000000,502.000000,1225.000000,501.000000,6.043600,473000.000000 +-118.460000,34.020000,45.000000,3803.000000,970.000000,1690.000000,871.000000,3.047600,456200.000000 +-118.460000,34.020000,29.000000,2329.000000,833.000000,1953.000000,800.000000,2.663900,233300.000000 +-118.460000,34.010000,48.000000,1640.000000,322.000000,664.000000,301.000000,4.000000,500001.000000 +-118.460000,34.010000,43.000000,513.000000,98.000000,266.000000,103.000000,5.642800,343100.000000 +-118.460000,34.000000,52.000000,888.000000,206.000000,376.000000,194.000000,3.875000,372000.000000 +-118.460000,34.000000,44.000000,941.000000,230.000000,493.000000,206.000000,3.645800,325800.000000 +-118.460000,34.000000,39.000000,614.000000,174.000000,538.000000,159.000000,2.354200,235700.000000 +-118.460000,34.000000,39.000000,4098.000000,1100.000000,2054.000000,1053.000000,2.918000,345600.000000 +-118.460000,34.000000,37.000000,388.000000,83.000000,248.000000,84.000000,5.166400,326700.000000 +-118.460000,34.000000,36.000000,1392.000000,260.000000,679.000000,247.000000,4.734400,346900.000000 +-118.460000,33.990000,52.000000,1158.000000,253.000000,528.000000,253.000000,3.523400,334700.000000 +-118.460000,33.990000,41.000000,885.000000,285.000000,562.000000,268.000000,3.199200,303800.000000 +-118.460000,33.990000,37.000000,1828.000000,460.000000,1075.000000,453.000000,4.337000,360600.000000 +-118.460000,33.990000,35.000000,1214.000000,300.000000,478.000000,265.000000,4.015600,500001.000000 +-118.460000,33.980000,32.000000,2388.000000,591.000000,1009.000000,556.000000,5.212100,466700.000000 +-118.460000,33.980000,27.000000,2217.000000,520.000000,806.000000,458.000000,3.893500,500001.000000 +-118.460000,33.970000,19.000000,2461.000000,521.000000,777.000000,447.000000,10.000000,500001.000000 +-118.460000,33.970000,19.000000,1658.000000,427.000000,648.000000,378.000000,3.869800,500001.000000 +-118.460000,33.970000,18.000000,9430.000000,2473.000000,3408.000000,2003.000000,6.172600,500001.000000 +-118.460000,33.770000,28.000000,3065.000000,406.000000,1101.000000,391.000000,10.553600,500001.000000 +-118.470000,35.720000,18.000000,4754.000000,1075.000000,1366.000000,690.000000,2.069400,81200.000000 +-118.470000,35.640000,17.000000,2248.000000,535.000000,927.000000,427.000000,1.302300,68500.000000 +-118.470000,34.420000,25.000000,3223.000000,524.000000,1763.000000,508.000000,5.288700,183000.000000 +-118.470000,34.420000,17.000000,913.000000,228.000000,530.000000,201.000000,3.038000,238500.000000 +-118.470000,34.320000,13.000000,2664.000000,518.000000,1468.000000,521.000000,4.898800,325200.000000 +-118.470000,34.290000,18.000000,4256.000000,987.000000,3401.000000,955.000000,4.293500,190000.000000 +-118.470000,34.270000,35.000000,1150.000000,185.000000,741.000000,178.000000,5.741000,220600.000000 +-118.470000,34.270000,33.000000,1549.000000,264.000000,881.000000,289.000000,5.140800,222900.000000 +-118.470000,34.270000,17.000000,1444.000000,282.000000,523.000000,270.000000,2.735300,192400.000000 +-118.470000,34.260000,35.000000,1898.000000,344.000000,1123.000000,347.000000,5.579200,218400.000000 +-118.470000,34.260000,34.000000,1300.000000,289.000000,650.000000,291.000000,3.887500,199200.000000 +-118.470000,34.230000,22.000000,8350.000000,2717.000000,9135.000000,2452.000000,2.500800,160000.000000 +-118.470000,34.210000,34.000000,2512.000000,603.000000,1805.000000,584.000000,2.973500,220000.000000 +-118.470000,34.200000,25.000000,4590.000000,1477.000000,2723.000000,1195.000000,2.711800,281700.000000 +-118.470000,34.200000,20.000000,3939.000000,1143.000000,2475.000000,1002.000000,2.902500,229100.000000 +-118.470000,34.190000,41.000000,1104.000000,196.000000,495.000000,196.000000,5.092900,225000.000000 +-118.470000,34.190000,33.000000,3879.000000,943.000000,2113.000000,843.000000,3.892000,292900.000000 +-118.470000,34.160000,30.000000,3823.000000,740.000000,1449.000000,612.000000,4.600000,392500.000000 +-118.470000,34.150000,7.000000,6306.000000,1473.000000,2381.000000,1299.000000,4.642000,457300.000000 +-118.470000,34.150000,43.000000,804.000000,117.000000,267.000000,110.000000,8.226900,500001.000000 +-118.470000,34.140000,36.000000,2873.000000,420.000000,850.000000,379.000000,8.153000,500001.000000 +-118.470000,34.140000,34.000000,3646.000000,610.000000,1390.000000,607.000000,7.629000,500001.000000 +-118.470000,34.060000,45.000000,3030.000000,433.000000,916.000000,399.000000,9.466400,500001.000000 +-118.470000,34.060000,45.000000,1271.000000,190.000000,419.000000,171.000000,7.644700,500001.000000 +-118.470000,34.060000,26.000000,6577.000000,1789.000000,2937.000000,1652.000000,4.801000,500001.000000 +-118.470000,34.050000,27.000000,4401.000000,1033.000000,1725.000000,962.000000,4.175000,500001.000000 +-118.470000,34.050000,25.000000,2689.000000,719.000000,1229.000000,663.000000,3.590900,500001.000000 +-118.470000,34.050000,22.000000,5215.000000,1193.000000,2048.000000,1121.000000,4.700900,500001.000000 +-118.470000,34.040000,32.000000,2909.000000,748.000000,1310.000000,706.000000,4.516000,350000.000000 +-118.470000,34.040000,21.000000,5041.000000,1491.000000,2719.000000,1420.000000,3.533500,268800.000000 +-118.470000,34.030000,32.000000,3024.000000,784.000000,1323.000000,740.000000,3.388900,347900.000000 +-118.470000,34.030000,31.000000,2642.000000,681.000000,1303.000000,625.000000,3.598700,340500.000000 +-118.470000,34.030000,29.000000,3287.000000,882.000000,1523.000000,823.000000,3.738100,290600.000000 +-118.470000,34.020000,41.000000,2136.000000,549.000000,986.000000,557.000000,2.725400,444400.000000 +-118.470000,34.020000,38.000000,2163.000000,651.000000,1759.000000,584.000000,2.338200,297500.000000 +-118.470000,34.020000,35.000000,3057.000000,774.000000,2223.000000,732.000000,2.074500,332500.000000 +-118.470000,34.010000,44.000000,2175.000000,475.000000,1019.000000,448.000000,4.793000,470800.000000 +-118.470000,34.010000,44.000000,2017.000000,343.000000,958.000000,382.000000,6.101400,480100.000000 +-118.470000,34.010000,43.000000,1160.000000,304.000000,393.000000,250.000000,2.916700,461100.000000 +-118.470000,34.010000,41.000000,752.000000,201.000000,482.000000,207.000000,2.541700,418200.000000 +-118.470000,34.010000,41.000000,2704.000000,557.000000,1047.000000,478.000000,4.421100,462900.000000 +-118.470000,34.010000,27.000000,1782.000000,471.000000,837.000000,422.000000,3.772700,413000.000000 +-118.470000,34.000000,42.000000,1271.000000,301.000000,574.000000,312.000000,3.130400,340500.000000 +-118.470000,34.000000,37.000000,2586.000000,765.000000,1801.000000,737.000000,2.604200,305800.000000 +-118.470000,34.000000,28.000000,1259.000000,302.000000,668.000000,280.000000,4.281300,384400.000000 +-118.470000,33.990000,52.000000,1523.000000,447.000000,636.000000,408.000000,3.068200,412500.000000 +-118.470000,33.990000,50.000000,1568.000000,501.000000,764.000000,478.000000,3.015000,414300.000000 +-118.470000,33.990000,41.000000,1146.000000,310.000000,833.000000,270.000000,2.593800,285000.000000 +-118.470000,33.990000,37.000000,2155.000000,721.000000,1082.000000,637.000000,3.407100,267500.000000 +-118.470000,33.990000,34.000000,1875.000000,501.000000,1491.000000,526.000000,2.841700,321400.000000 +-118.470000,33.990000,31.000000,1312.000000,376.000000,1178.000000,330.000000,2.071400,300000.000000 +-118.470000,33.990000,24.000000,1438.000000,454.000000,665.000000,416.000000,2.975000,500001.000000 +-118.480000,35.140000,4.000000,8417.000000,1657.000000,4631.000000,1468.000000,3.694900,115800.000000 +-118.480000,34.420000,21.000000,1375.000000,259.000000,728.000000,258.000000,5.016600,229000.000000 +-118.480000,34.330000,9.000000,2384.000000,395.000000,1697.000000,402.000000,6.089100,270100.000000 +-118.480000,34.280000,35.000000,2132.000000,368.000000,1128.000000,341.000000,5.310700,227100.000000 +-118.480000,34.280000,35.000000,1511.000000,274.000000,873.000000,254.000000,5.560800,226700.000000 +-118.480000,34.270000,33.000000,2649.000000,449.000000,1303.000000,437.000000,4.995500,216800.000000 +-118.480000,34.260000,36.000000,1770.000000,296.000000,938.000000,304.000000,5.749000,238000.000000 +-118.480000,34.250000,36.000000,1951.000000,395.000000,1040.000000,375.000000,5.161900,195300.000000 +-118.480000,34.250000,35.000000,1865.000000,335.000000,1074.000000,337.000000,5.106800,223300.000000 +-118.480000,34.250000,35.000000,1442.000000,276.000000,795.000000,268.000000,4.968800,216900.000000 +-118.480000,34.240000,32.000000,2621.000000,412.000000,1285.000000,414.000000,6.653700,267600.000000 +-118.480000,34.230000,35.000000,1963.000000,310.000000,919.000000,297.000000,4.758300,258600.000000 +-118.480000,34.230000,30.000000,1762.000000,263.000000,761.000000,292.000000,6.526800,273100.000000 +-118.480000,34.230000,29.000000,3354.000000,707.000000,1752.000000,650.000000,4.548400,239900.000000 +-118.480000,34.220000,22.000000,3430.000000,1214.000000,3618.000000,1092.000000,2.197400,93800.000000 +-118.480000,34.210000,25.000000,2879.000000,723.000000,2077.000000,649.000000,3.386400,197400.000000 +-118.480000,34.200000,26.000000,2027.000000,559.000000,1545.000000,513.000000,2.897400,189900.000000 +-118.480000,34.200000,23.000000,2850.000000,864.000000,2249.000000,777.000000,2.695700,191700.000000 +-118.480000,34.200000,12.000000,3831.000000,1083.000000,2258.000000,967.000000,2.437500,255400.000000 +-118.480000,34.190000,36.000000,2058.000000,423.000000,1132.000000,423.000000,3.883300,210400.000000 +-118.480000,34.160000,32.000000,2108.000000,309.000000,769.000000,274.000000,8.717200,500001.000000 +-118.480000,34.160000,30.000000,3507.000000,536.000000,1427.000000,525.000000,6.708200,500001.000000 +-118.480000,34.150000,31.000000,2536.000000,429.000000,990.000000,424.000000,5.459100,495500.000000 +-118.480000,34.140000,31.000000,9320.000000,1143.000000,2980.000000,1109.000000,10.359900,500001.000000 +-118.480000,34.070000,40.000000,3351.000000,484.000000,1564.000000,523.000000,8.515300,500001.000000 +-118.480000,34.070000,37.000000,4042.000000,549.000000,1318.000000,542.000000,12.866500,500001.000000 +-118.480000,34.070000,29.000000,4767.000000,777.000000,1500.000000,638.000000,10.793700,500001.000000 +-118.480000,34.050000,48.000000,3623.000000,528.000000,1282.000000,516.000000,9.522100,500001.000000 +-118.480000,34.050000,36.000000,2143.000000,434.000000,751.000000,396.000000,6.749600,500001.000000 +-118.480000,34.040000,49.000000,3780.000000,741.000000,1435.000000,690.000000,4.315800,500001.000000 +-118.480000,34.040000,47.000000,1956.000000,277.000000,724.000000,277.000000,8.961600,500001.000000 +-118.480000,34.040000,40.000000,1395.000000,285.000000,610.000000,262.000000,3.965900,500001.000000 +-118.480000,34.040000,36.000000,2539.000000,535.000000,979.000000,500.000000,3.666700,500001.000000 +-118.480000,34.030000,39.000000,1530.000000,401.000000,1074.000000,375.000000,3.507600,381800.000000 +-118.480000,34.030000,32.000000,1793.000000,476.000000,1143.000000,448.000000,2.898100,353600.000000 +-118.480000,34.020000,30.000000,2027.000000,609.000000,1425.000000,562.000000,2.291700,330800.000000 +-118.480000,34.020000,29.000000,1585.000000,542.000000,1019.000000,487.000000,2.707200,375000.000000 +-118.480000,34.020000,25.000000,1583.000000,460.000000,983.000000,422.000000,2.701900,293800.000000 +-118.480000,34.020000,22.000000,1249.000000,483.000000,1106.000000,481.000000,2.526100,375000.000000 +-118.480000,34.020000,11.000000,72.000000,16.000000,150.000000,20.000000,2.625000,250000.000000 +-118.480000,34.010000,40.000000,2198.000000,611.000000,1023.000000,567.000000,3.755000,398300.000000 +-118.480000,34.010000,31.000000,2851.000000,804.000000,1410.000000,782.000000,4.089300,381500.000000 +-118.480000,34.000000,52.000000,1359.000000,395.000000,521.000000,368.000000,2.673600,500001.000000 +-118.480000,34.000000,41.000000,2584.000000,743.000000,1058.000000,668.000000,3.206100,370000.000000 +-118.480000,34.000000,29.000000,1727.000000,479.000000,741.000000,431.000000,3.612100,500000.000000 +-118.480000,34.000000,25.000000,4149.000000,1067.000000,1749.000000,1000.000000,3.972200,450000.000000 +-118.480000,33.990000,46.000000,2219.000000,686.000000,1107.000000,590.000000,2.552300,387500.000000 +-118.480000,33.960000,16.000000,895.000000,181.000000,237.000000,149.000000,12.008800,500001.000000 +-118.480000,33.430000,29.000000,716.000000,214.000000,422.000000,173.000000,2.604200,287500.000000 +-118.490000,34.430000,15.000000,8244.000000,1409.000000,4453.000000,1357.000000,5.482900,199600.000000 +-118.490000,34.420000,23.000000,4166.000000,756.000000,2082.000000,743.000000,4.410700,213400.000000 +-118.490000,34.310000,25.000000,1024.000000,145.000000,357.000000,147.000000,7.059800,356300.000000 +-118.490000,34.290000,26.000000,4516.000000,611.000000,1714.000000,581.000000,9.287300,431800.000000 +-118.490000,34.280000,31.000000,3508.000000,585.000000,1957.000000,588.000000,6.645800,285500.000000 +-118.490000,34.280000,27.000000,2535.000000,389.000000,1071.000000,386.000000,6.869500,319400.000000 +-118.490000,34.270000,34.000000,4877.000000,815.000000,2521.000000,781.000000,5.571400,225900.000000 +-118.490000,34.270000,33.000000,3047.000000,527.000000,1578.000000,507.000000,4.580000,236200.000000 +-118.490000,34.260000,27.000000,2722.000000,468.000000,1164.000000,419.000000,4.659100,239900.000000 +-118.490000,34.260000,25.000000,8389.000000,1872.000000,4483.000000,1747.000000,3.549700,261300.000000 +-118.490000,34.250000,33.000000,2088.000000,383.000000,960.000000,362.000000,4.333300,232900.000000 +-118.490000,34.250000,30.000000,2871.000000,470.000000,1335.000000,458.000000,5.023200,253900.000000 +-118.490000,34.250000,28.000000,4054.000000,712.000000,2164.000000,746.000000,5.000000,258000.000000 +-118.490000,34.240000,35.000000,2707.000000,446.000000,1224.000000,445.000000,5.293900,244200.000000 +-118.490000,34.240000,34.000000,1971.000000,316.000000,917.000000,307.000000,6.096500,262300.000000 +-118.490000,34.230000,32.000000,4373.000000,683.000000,2040.000000,693.000000,5.266800,242300.000000 +-118.490000,34.220000,30.000000,1756.000000,314.000000,899.000000,288.000000,5.032500,238200.000000 +-118.490000,34.210000,25.000000,1131.000000,449.000000,746.000000,420.000000,1.356500,225000.000000 +-118.490000,34.200000,35.000000,1109.000000,206.000000,515.000000,202.000000,5.211800,215800.000000 +-118.490000,34.190000,23.000000,2087.000000,571.000000,1809.000000,553.000000,3.166700,202000.000000 +-118.490000,34.180000,31.000000,3073.000000,674.000000,1486.000000,684.000000,4.898400,311700.000000 +-118.490000,34.160000,37.000000,3333.000000,488.000000,1171.000000,485.000000,6.495800,500001.000000 +-118.490000,34.150000,33.000000,2829.000000,360.000000,1010.000000,363.000000,10.358700,500001.000000 +-118.490000,34.140000,28.000000,3539.000000,441.000000,1190.000000,421.000000,10.679600,500001.000000 +-118.490000,34.130000,24.000000,4394.000000,535.000000,1443.000000,528.000000,11.297900,500001.000000 +-118.490000,34.110000,27.000000,6603.000000,879.000000,2336.000000,868.000000,13.293500,500001.000000 +-118.490000,34.070000,36.000000,2929.000000,366.000000,1054.000000,352.000000,13.572800,500001.000000 +-118.490000,34.060000,42.000000,2861.000000,360.000000,829.000000,310.000000,15.000100,500001.000000 +-118.490000,34.050000,52.000000,2416.000000,291.000000,810.000000,270.000000,13.855600,500001.000000 +-118.490000,34.050000,45.000000,1346.000000,214.000000,415.000000,209.000000,7.028500,500001.000000 +-118.490000,34.050000,42.000000,1918.000000,216.000000,632.000000,224.000000,15.000100,500001.000000 +-118.490000,34.040000,50.000000,2597.000000,340.000000,964.000000,339.000000,13.303600,500001.000000 +-118.490000,34.040000,48.000000,2381.000000,345.000000,859.000000,306.000000,8.025700,500001.000000 +-118.490000,34.040000,31.000000,4066.000000,951.000000,1532.000000,868.000000,4.812500,500001.000000 +-118.490000,34.030000,32.000000,3851.000000,900.000000,1456.000000,836.000000,4.520800,442100.000000 +-118.490000,34.030000,31.000000,4949.000000,1293.000000,1985.000000,1244.000000,4.252000,436700.000000 +-118.490000,34.030000,31.000000,3155.000000,808.000000,1208.000000,745.000000,3.676900,450000.000000 +-118.490000,34.020000,30.000000,2075.000000,687.000000,1026.000000,592.000000,3.163500,366700.000000 +-118.490000,34.020000,29.000000,2709.000000,799.000000,1238.000000,793.000000,3.156300,330000.000000 +-118.490000,34.020000,28.000000,2545.000000,752.000000,1548.000000,679.000000,2.912500,475000.000000 +-118.490000,34.020000,28.000000,1394.000000,582.000000,716.000000,543.000000,1.513200,450000.000000 +-118.490000,34.020000,27.000000,4725.000000,1185.000000,1945.000000,1177.000000,4.136500,470800.000000 +-118.490000,34.010000,28.000000,651.000000,252.000000,333.000000,174.000000,1.972200,500001.000000 +-118.490000,34.000000,32.000000,3407.000000,1071.000000,1463.000000,986.000000,3.036900,500001.000000 +-118.500000,35.700000,18.000000,3303.000000,814.000000,986.000000,522.000000,1.595700,101400.000000 +-118.500000,34.520000,3.000000,6577.000000,1056.000000,3032.000000,1004.000000,5.926300,251800.000000 +-118.500000,34.460000,17.000000,10267.000000,1683.000000,4956.000000,1483.000000,5.506100,239400.000000 +-118.500000,34.450000,25.000000,1290.000000,190.000000,689.000000,216.000000,6.009700,220200.000000 +-118.500000,34.270000,35.000000,2235.000000,390.000000,1148.000000,416.000000,4.869000,221600.000000 +-118.500000,34.250000,32.000000,2411.000000,380.000000,1040.000000,344.000000,6.155000,257300.000000 +-118.500000,34.250000,32.000000,2333.000000,389.000000,969.000000,331.000000,4.816400,241100.000000 +-118.500000,34.230000,26.000000,3082.000000,573.000000,1590.000000,586.000000,4.516700,319000.000000 +-118.500000,34.210000,36.000000,1656.000000,310.000000,817.000000,308.000000,5.567500,215900.000000 +-118.500000,34.210000,36.000000,1254.000000,229.000000,629.000000,245.000000,4.964300,236100.000000 +-118.500000,34.210000,35.000000,1668.000000,332.000000,807.000000,311.000000,4.512500,200300.000000 +-118.500000,34.200000,42.000000,1558.000000,322.000000,884.000000,334.000000,2.230400,203800.000000 +-118.500000,34.190000,35.000000,2720.000000,490.000000,1158.000000,445.000000,5.079600,228300.000000 +-118.500000,34.150000,33.000000,3104.000000,387.000000,1111.000000,376.000000,13.419600,500001.000000 +-118.500000,34.050000,36.000000,4152.000000,542.000000,1461.000000,550.000000,15.000100,500001.000000 +-118.500000,34.040000,52.000000,2233.000000,317.000000,769.000000,277.000000,8.383900,500001.000000 +-118.500000,34.030000,52.000000,1711.000000,245.000000,671.000000,242.000000,7.757200,500001.000000 +-118.500000,34.030000,52.000000,1506.000000,208.000000,547.000000,186.000000,7.870500,500001.000000 +-118.500000,34.030000,44.000000,2146.000000,394.000000,851.000000,355.000000,6.480000,500001.000000 +-118.500000,34.030000,32.000000,6365.000000,1784.000000,2767.000000,1698.000000,3.645100,383300.000000 +-118.500000,34.020000,35.000000,2914.000000,934.000000,1334.000000,870.000000,2.993400,350000.000000 +-118.500000,34.020000,24.000000,2924.000000,1013.000000,1492.000000,943.000000,2.775000,291700.000000 +-118.500000,33.990000,22.000000,3484.000000,975.000000,1268.000000,952.000000,3.260900,500001.000000 +-118.500000,33.970000,52.000000,709.000000,329.000000,388.000000,313.000000,2.264300,350000.000000 +-118.500000,33.970000,29.000000,2737.000000,808.000000,1157.000000,696.000000,5.128000,500001.000000 +-118.510000,35.160000,7.000000,4371.000000,727.000000,1932.000000,654.000000,4.625000,136800.000000 +-118.510000,34.430000,15.000000,8510.000000,1258.000000,3733.000000,1233.000000,6.108200,253700.000000 +-118.510000,34.300000,24.000000,6145.000000,868.000000,2710.000000,875.000000,7.507800,344000.000000 +-118.510000,34.290000,29.000000,1287.000000,194.000000,525.000000,187.000000,6.417100,319300.000000 +-118.510000,34.280000,34.000000,3580.000000,565.000000,1694.000000,524.000000,5.406500,243800.000000 +-118.510000,34.280000,29.000000,4239.000000,653.000000,1890.000000,631.000000,6.391100,301700.000000 +-118.510000,34.270000,36.000000,2276.000000,429.000000,1001.000000,419.000000,4.104200,252100.000000 +-118.510000,34.270000,34.000000,3787.000000,771.000000,1966.000000,738.000000,4.055000,222500.000000 +-118.510000,34.250000,24.000000,4338.000000,558.000000,1514.000000,549.000000,8.861200,500001.000000 +-118.510000,34.240000,31.000000,5297.000000,664.000000,1986.000000,657.000000,8.645400,483500.000000 +-118.510000,34.230000,36.000000,3324.000000,448.000000,1190.000000,423.000000,7.277200,477200.000000 +-118.510000,34.220000,36.000000,2794.000000,523.000000,1334.000000,472.000000,4.346200,222100.000000 +-118.510000,34.220000,36.000000,1952.000000,387.000000,1156.000000,392.000000,4.185000,209200.000000 +-118.510000,34.210000,36.000000,2396.000000,421.000000,1064.000000,398.000000,4.700000,223600.000000 +-118.510000,34.200000,35.000000,1614.000000,308.000000,850.000000,330.000000,4.180600,209000.000000 +-118.510000,34.200000,33.000000,2327.000000,479.000000,1166.000000,472.000000,4.234400,262500.000000 +-118.510000,34.190000,35.000000,2537.000000,418.000000,1161.000000,421.000000,5.302800,229200.000000 +-118.510000,34.180000,37.000000,1893.000000,365.000000,911.000000,324.000000,4.803600,295300.000000 +-118.510000,34.170000,31.000000,3252.000000,834.000000,1411.000000,760.000000,3.188500,219000.000000 +-118.510000,34.160000,23.000000,11154.000000,1995.000000,4076.000000,1809.000000,5.460900,500001.000000 +-118.510000,34.140000,28.000000,6748.000000,904.000000,2431.000000,876.000000,12.887900,500001.000000 +-118.510000,34.110000,29.000000,9013.000000,1117.000000,2919.000000,1061.000000,13.947000,500001.000000 +-118.510000,34.040000,38.000000,4715.000000,691.000000,1660.000000,637.000000,10.188200,500001.000000 +-118.510000,34.030000,37.000000,4072.000000,905.000000,1468.000000,923.000000,3.857100,500001.000000 +-118.510000,34.000000,52.000000,1241.000000,502.000000,679.000000,459.000000,2.309800,500001.000000 +-118.510000,33.980000,40.000000,1901.000000,679.000000,865.000000,587.000000,2.341700,425000.000000 +-118.520000,34.460000,5.000000,15341.000000,2527.000000,7270.000000,2320.000000,6.128100,236200.000000 +-118.520000,34.440000,26.000000,934.000000,148.000000,519.000000,162.000000,5.320900,185000.000000 +-118.520000,34.400000,5.000000,7748.000000,1557.000000,4768.000000,1393.000000,5.305000,311200.000000 +-118.520000,34.390000,21.000000,5477.000000,1275.000000,3384.000000,1222.000000,3.662500,228100.000000 +-118.520000,34.360000,5.000000,4222.000000,712.000000,2024.000000,646.000000,5.870300,500001.000000 +-118.520000,34.320000,18.000000,7498.000000,976.000000,3189.000000,955.000000,8.124800,374000.000000 +-118.520000,34.300000,17.000000,4542.000000,621.000000,2144.000000,597.000000,8.846700,450700.000000 +-118.520000,34.290000,28.000000,2272.000000,320.000000,868.000000,312.000000,7.746400,474600.000000 +-118.520000,34.280000,33.000000,1975.000000,271.000000,801.000000,287.000000,7.819300,379600.000000 +-118.520000,34.270000,36.000000,3204.000000,538.000000,1499.000000,499.000000,5.564900,271200.000000 +-118.520000,34.260000,21.000000,8850.000000,2139.000000,4717.000000,1979.000000,3.781600,254200.000000 +-118.520000,34.250000,11.000000,7849.000000,1664.000000,3561.000000,1500.000000,4.662500,290900.000000 +-118.520000,34.240000,6.000000,3218.000000,949.000000,2295.000000,876.000000,3.092600,418500.000000 +-118.520000,34.230000,35.000000,1471.000000,210.000000,735.000000,219.000000,8.384100,472200.000000 +-118.520000,34.220000,35.000000,1620.000000,272.000000,1052.000000,248.000000,5.520900,203300.000000 +-118.520000,34.220000,35.000000,1275.000000,222.000000,959.000000,226.000000,5.028200,195400.000000 +-118.520000,34.210000,36.000000,2394.000000,424.000000,1490.000000,427.000000,4.326100,206700.000000 +-118.520000,34.210000,36.000000,1328.000000,287.000000,823.000000,273.000000,4.564800,193700.000000 +-118.520000,34.210000,34.000000,1663.000000,299.000000,762.000000,282.000000,5.126500,211000.000000 +-118.520000,34.200000,37.000000,1795.000000,346.000000,1082.000000,354.000000,4.910200,207200.000000 +-118.520000,34.190000,42.000000,881.000000,170.000000,464.000000,163.000000,2.951100,203900.000000 +-118.520000,34.190000,37.000000,1560.000000,275.000000,763.000000,284.000000,3.851600,206900.000000 +-118.520000,34.180000,46.000000,2082.000000,438.000000,1047.000000,393.000000,3.653400,216000.000000 +-118.520000,34.180000,43.000000,1700.000000,380.000000,930.000000,349.000000,3.675000,213100.000000 +-118.520000,34.180000,42.000000,1611.000000,410.000000,879.000000,386.000000,3.192300,221800.000000 +-118.520000,34.180000,34.000000,2307.000000,388.000000,1168.000000,427.000000,4.214300,245400.000000 +-118.520000,34.170000,20.000000,17377.000000,4457.000000,7450.000000,4204.000000,3.215400,259600.000000 +-118.520000,34.050000,45.000000,1814.000000,325.000000,709.000000,311.000000,4.825000,500001.000000 +-118.520000,34.040000,47.000000,1985.000000,315.000000,819.000000,340.000000,6.514700,500001.000000 +-118.520000,34.040000,43.000000,2167.000000,254.000000,761.000000,256.000000,13.684200,500001.000000 +-118.520000,34.040000,42.000000,993.000000,130.000000,368.000000,134.000000,10.808200,500001.000000 +-118.520000,34.020000,24.000000,7418.000000,1755.000000,2713.000000,1577.000000,5.086700,500001.000000 +-118.520000,34.010000,25.000000,2757.000000,738.000000,1014.000000,633.000000,3.143300,500001.000000 +-118.530000,34.450000,10.000000,5509.000000,969.000000,3002.000000,959.000000,5.598100,220100.000000 +-118.530000,34.440000,19.000000,3013.000000,507.000000,1356.000000,484.000000,5.116300,233200.000000 +-118.530000,34.380000,18.000000,2288.000000,607.000000,2305.000000,597.000000,3.227000,136100.000000 +-118.530000,34.370000,8.000000,3839.000000,852.000000,1342.000000,593.000000,3.911800,333700.000000 +-118.530000,34.270000,33.000000,1927.000000,305.000000,896.000000,293.000000,5.634000,320500.000000 +-118.530000,34.260000,18.000000,3674.000000,577.000000,1590.000000,550.000000,8.176000,308400.000000 +-118.530000,34.250000,20.000000,6331.000000,1537.000000,2957.000000,1509.000000,3.389200,323100.000000 +-118.530000,34.230000,32.000000,4039.000000,984.000000,2675.000000,941.000000,3.032100,240000.000000 +-118.530000,34.230000,27.000000,2131.000000,543.000000,1065.000000,528.000000,3.240400,230400.000000 +-118.530000,34.220000,29.000000,4101.000000,849.000000,2630.000000,867.000000,4.660700,199800.000000 +-118.530000,34.210000,18.000000,3124.000000,796.000000,1855.000000,725.000000,2.938900,213200.000000 +-118.530000,34.200000,33.000000,3270.000000,818.000000,2118.000000,763.000000,3.225000,205300.000000 +-118.530000,34.200000,26.000000,2221.000000,662.000000,1998.000000,603.000000,2.870100,191100.000000 +-118.530000,34.190000,32.000000,2618.000000,692.000000,1961.000000,633.000000,2.625000,192300.000000 +-118.530000,34.180000,26.000000,4175.000000,885.000000,2118.000000,778.000000,4.208300,240300.000000 +-118.530000,34.180000,16.000000,7194.000000,1976.000000,3687.000000,1894.000000,3.188700,189300.000000 +-118.530000,34.170000,18.000000,6430.000000,1412.000000,2897.000000,1348.000000,3.855000,243800.000000 +-118.530000,34.160000,32.000000,3554.000000,762.000000,1623.000000,750.000000,3.614100,290600.000000 +-118.530000,34.140000,28.000000,6920.000000,906.000000,2515.000000,860.000000,9.218900,500001.000000 +-118.530000,34.090000,37.000000,5477.000000,833.000000,1925.000000,757.000000,8.188800,500001.000000 +-118.530000,34.030000,40.000000,4350.000000,763.000000,1551.000000,665.000000,7.031800,500001.000000 +-118.540000,34.380000,18.000000,2096.000000,309.000000,1044.000000,328.000000,6.829900,262100.000000 +-118.540000,34.300000,22.000000,4423.000000,622.000000,1995.000000,582.000000,8.215900,376200.000000 +-118.540000,34.280000,18.000000,5481.000000,780.000000,2477.000000,764.000000,6.724800,377200.000000 +-118.540000,34.280000,10.000000,7665.000000,999.000000,3517.000000,998.000000,10.880500,500001.000000 +-118.540000,34.260000,23.000000,4960.000000,592.000000,1929.000000,586.000000,10.905200,500001.000000 +-118.540000,34.250000,26.000000,2639.000000,378.000000,1191.000000,401.000000,6.278800,322200.000000 +-118.540000,34.240000,24.000000,4631.000000,1164.000000,2360.000000,1083.000000,3.097700,264000.000000 +-118.540000,34.230000,35.000000,3422.000000,601.000000,1690.000000,574.000000,4.375000,232900.000000 +-118.540000,34.230000,29.000000,1753.000000,342.000000,1318.000000,333.000000,4.125000,241400.000000 +-118.540000,34.220000,35.000000,1664.000000,300.000000,1000.000000,309.000000,4.673100,224100.000000 +-118.540000,34.220000,34.000000,2193.000000,513.000000,1299.000000,497.000000,3.618700,211600.000000 +-118.540000,34.210000,32.000000,2593.000000,566.000000,1596.000000,547.000000,3.988600,199200.000000 +-118.540000,34.210000,22.000000,6064.000000,1826.000000,4876.000000,1697.000000,2.875000,227100.000000 +-118.540000,34.200000,37.000000,1600.000000,349.000000,1012.000000,366.000000,4.159700,201600.000000 +-118.540000,34.190000,33.000000,2205.000000,453.000000,1242.000000,419.000000,4.131900,203700.000000 +-118.540000,34.190000,22.000000,3380.000000,790.000000,2199.000000,737.000000,2.573900,239200.000000 +-118.540000,34.180000,25.000000,1938.000000,457.000000,1280.000000,425.000000,3.963200,240300.000000 +-118.540000,34.180000,17.000000,7214.000000,1994.000000,4100.000000,1823.000000,3.094300,174500.000000 +-118.540000,34.170000,34.000000,2458.000000,433.000000,1034.000000,373.000000,5.673800,443600.000000 +-118.540000,34.170000,25.000000,3352.000000,891.000000,1815.000000,860.000000,2.852800,425000.000000 +-118.540000,34.170000,11.000000,1080.000000,174.000000,386.000000,160.000000,6.127400,315900.000000 +-118.540000,34.060000,21.000000,3755.000000,525.000000,1493.000000,526.000000,11.423300,500001.000000 +-118.540000,34.050000,33.000000,6778.000000,1092.000000,2540.000000,1052.000000,8.565000,500001.000000 +-118.550000,34.440000,14.000000,15348.000000,2366.000000,7087.000000,2169.000000,6.327700,237700.000000 +-118.550000,34.410000,8.000000,21086.000000,3945.000000,9936.000000,3790.000000,5.860200,265100.000000 +-118.550000,34.390000,16.000000,8726.000000,1317.000000,3789.000000,1279.000000,6.841900,323300.000000 +-118.550000,34.370000,21.000000,7010.000000,1063.000000,3331.000000,1038.000000,6.776000,278100.000000 +-118.550000,34.280000,16.000000,8879.000000,1239.000000,3468.000000,1200.000000,8.112500,428600.000000 +-118.550000,34.270000,25.000000,4919.000000,661.000000,2183.000000,625.000000,8.135600,352800.000000 +-118.550000,34.260000,21.000000,4018.000000,536.000000,1508.000000,529.000000,8.203000,445400.000000 +-118.550000,34.240000,21.000000,5751.000000,1082.000000,2230.000000,1016.000000,4.345800,407500.000000 +-118.550000,34.230000,25.000000,4409.000000,1018.000000,4579.000000,1010.000000,2.872700,245100.000000 +-118.550000,34.210000,35.000000,2592.000000,490.000000,1427.000000,434.000000,5.062300,246400.000000 +-118.550000,34.200000,21.000000,2549.000000,651.000000,1624.000000,628.000000,3.690500,179800.000000 +-118.550000,34.190000,36.000000,978.000000,170.000000,475.000000,192.000000,4.675000,222500.000000 +-118.550000,34.190000,31.000000,1856.000000,370.000000,990.000000,360.000000,4.365400,223800.000000 +-118.550000,34.190000,18.000000,5862.000000,1322.000000,3161.000000,1280.000000,3.110600,170600.000000 +-118.550000,34.180000,32.000000,3011.000000,529.000000,1287.000000,525.000000,5.060500,311000.000000 +-118.550000,34.040000,41.000000,1482.000000,239.000000,617.000000,242.000000,8.861900,500001.000000 +-118.550000,34.030000,35.000000,9075.000000,1858.000000,3646.000000,1724.000000,6.030700,500001.000000 +-118.550000,33.990000,39.000000,2603.000000,456.000000,928.000000,410.000000,7.909600,500001.000000 +-118.560000,34.420000,2.000000,966.000000,270.000000,233.000000,169.000000,1.966700,450000.000000 +-118.560000,34.410000,4.000000,17313.000000,3224.000000,6902.000000,2707.000000,5.679800,320900.000000 +-118.560000,34.370000,23.000000,3927.000000,728.000000,1984.000000,707.000000,4.853600,202600.000000 +-118.560000,34.250000,31.000000,1962.000000,243.000000,697.000000,242.000000,8.565000,500001.000000 +-118.560000,34.240000,23.000000,2980.000000,362.000000,1208.000000,378.000000,8.171400,500001.000000 +-118.560000,34.230000,36.000000,3215.000000,529.000000,1710.000000,539.000000,5.512600,248400.000000 +-118.560000,34.230000,36.000000,2406.000000,432.000000,1242.000000,454.000000,4.694400,221800.000000 +-118.560000,34.220000,35.000000,1843.000000,329.000000,1041.000000,317.000000,4.427100,205100.000000 +-118.560000,34.220000,34.000000,1599.000000,294.000000,819.000000,306.000000,4.319400,197000.000000 +-118.560000,34.210000,36.000000,1286.000000,242.000000,788.000000,248.000000,3.533300,196800.000000 +-118.560000,34.210000,13.000000,8327.000000,1849.000000,4126.000000,1773.000000,3.731300,189800.000000 +-118.560000,34.200000,36.000000,1544.000000,308.000000,891.000000,286.000000,4.175000,190900.000000 +-118.560000,34.200000,35.000000,2273.000000,410.000000,1431.000000,403.000000,4.078900,196700.000000 +-118.560000,34.200000,35.000000,1770.000000,362.000000,1083.000000,355.000000,5.048300,221000.000000 +-118.560000,34.190000,36.000000,2600.000000,441.000000,1246.000000,426.000000,4.111100,215600.000000 +-118.560000,34.190000,35.000000,782.000000,144.000000,425.000000,140.000000,5.454800,201400.000000 +-118.560000,34.190000,34.000000,2579.000000,561.000000,1237.000000,517.000000,4.433000,235100.000000 +-118.560000,34.190000,34.000000,2185.000000,372.000000,986.000000,347.000000,4.812500,266700.000000 +-118.560000,34.190000,34.000000,1237.000000,242.000000,671.000000,221.000000,3.961500,183600.000000 +-118.560000,34.180000,39.000000,1819.000000,291.000000,770.000000,278.000000,5.408800,457300.000000 +-118.560000,34.180000,36.000000,1366.000000,224.000000,719.000000,270.000000,4.826400,251000.000000 +-118.560000,34.170000,35.000000,2987.000000,391.000000,1244.000000,387.000000,7.132200,500001.000000 +-118.560000,34.140000,23.000000,9657.000000,1189.000000,3585.000000,1162.000000,10.439900,500001.000000 +-118.560000,34.060000,24.000000,2332.000000,349.000000,761.000000,325.000000,7.303100,500001.000000 +-118.560000,34.030000,34.000000,2095.000000,343.000000,662.000000,299.000000,8.293400,500001.000000 +-118.570000,34.290000,4.000000,6995.000000,1151.000000,2907.000000,1089.000000,7.080800,341200.000000 +-118.570000,34.270000,20.000000,7384.000000,845.000000,2795.000000,872.000000,9.604700,500001.000000 +-118.570000,34.250000,34.000000,5098.000000,778.000000,2239.000000,778.000000,5.614900,273100.000000 +-118.570000,34.250000,20.000000,4679.000000,609.000000,1945.000000,609.000000,8.747100,419900.000000 +-118.570000,34.230000,22.000000,3275.000000,648.000000,1746.000000,585.000000,4.967600,221900.000000 +-118.570000,34.220000,27.000000,2795.000000,606.000000,1702.000000,586.000000,3.779800,258400.000000 +-118.570000,34.220000,17.000000,3262.000000,753.000000,1879.000000,708.000000,4.135900,255200.000000 +-118.570000,34.210000,36.000000,878.000000,167.000000,499.000000,179.000000,4.118100,190400.000000 +-118.570000,34.210000,23.000000,4891.000000,793.000000,2447.000000,765.000000,5.879800,270500.000000 +-118.570000,34.200000,36.000000,2559.000000,469.000000,1358.000000,445.000000,4.556800,201500.000000 +-118.570000,34.200000,33.000000,1759.000000,311.000000,943.000000,315.000000,5.223000,209200.000000 +-118.570000,34.200000,18.000000,7157.000000,1869.000000,4642.000000,1699.000000,3.181800,208000.000000 +-118.570000,34.180000,36.000000,2981.000000,441.000000,1243.000000,413.000000,6.530400,439800.000000 +-118.570000,34.170000,31.000000,1950.000000,383.000000,870.000000,357.000000,3.187500,500001.000000 +-118.570000,34.150000,22.000000,5791.000000,706.000000,2059.000000,673.000000,10.920100,500001.000000 +-118.570000,34.090000,14.000000,7970.000000,1142.000000,2926.000000,1096.000000,11.286600,500001.000000 +-118.580000,34.250000,23.000000,4883.000000,769.000000,2119.000000,725.000000,5.521000,280800.000000 +-118.580000,34.240000,26.000000,3239.000000,647.000000,1529.000000,590.000000,3.242600,236900.000000 +-118.580000,34.230000,35.000000,1917.000000,314.000000,1019.000000,340.000000,4.892900,234900.000000 +-118.580000,34.230000,29.000000,3907.000000,773.000000,2037.000000,727.000000,4.102300,230200.000000 +-118.580000,34.220000,35.000000,2560.000000,441.000000,1428.000000,468.000000,5.634500,228200.000000 +-118.580000,34.220000,35.000000,1969.000000,339.000000,950.000000,340.000000,4.875000,230400.000000 +-118.580000,34.210000,27.000000,2209.000000,353.000000,1034.000000,344.000000,4.712500,250900.000000 +-118.580000,34.210000,24.000000,2642.000000,696.000000,1649.000000,633.000000,3.018700,217700.000000 +-118.580000,34.200000,37.000000,1389.000000,252.000000,826.000000,249.000000,5.015000,220900.000000 +-118.580000,34.200000,35.000000,1558.000000,267.000000,793.000000,249.000000,5.146300,220200.000000 +-118.580000,34.200000,35.000000,1323.000000,228.000000,756.000000,216.000000,4.233000,221300.000000 +-118.580000,34.200000,21.000000,2979.000000,744.000000,1824.000000,692.000000,3.500000,223700.000000 +-118.580000,34.190000,35.000000,2329.000000,399.000000,966.000000,336.000000,3.883900,224900.000000 +-118.580000,34.190000,27.000000,4286.000000,1071.000000,2863.000000,1033.000000,3.312500,222800.000000 +-118.580000,34.190000,15.000000,3061.000000,1079.000000,2173.000000,1078.000000,2.850000,187500.000000 +-118.580000,34.180000,28.000000,908.000000,142.000000,368.000000,143.000000,5.615900,340500.000000 +-118.580000,34.170000,29.000000,3393.000000,574.000000,1471.000000,587.000000,6.206400,334900.000000 +-118.580000,34.150000,21.000000,3856.000000,547.000000,1422.000000,535.000000,8.419600,450700.000000 +-118.580000,34.120000,42.000000,718.000000,140.000000,324.000000,131.000000,6.401800,500001.000000 +-118.580000,34.060000,25.000000,4440.000000,693.000000,1560.000000,636.000000,8.866600,500001.000000 +-118.590000,35.720000,28.000000,1491.000000,408.000000,98.000000,48.000000,1.420500,90000.000000 +-118.590000,34.470000,5.000000,538.000000,98.000000,8733.000000,105.000000,4.239100,154600.000000 +-118.590000,34.260000,20.000000,8146.000000,1131.000000,3562.000000,1054.000000,7.167000,357100.000000 +-118.590000,34.250000,15.000000,9716.000000,2387.000000,4992.000000,2225.000000,3.623100,193300.000000 +-118.590000,34.230000,17.000000,6592.000000,1525.000000,4459.000000,1463.000000,3.034700,254500.000000 +-118.590000,34.230000,14.000000,4407.000000,1209.000000,2676.000000,1128.000000,3.409100,168800.000000 +-118.590000,34.220000,17.000000,6015.000000,1464.000000,3056.000000,1347.000000,4.007700,229000.000000 +-118.590000,34.210000,34.000000,2389.000000,521.000000,1560.000000,514.000000,4.833300,225400.000000 +-118.590000,34.210000,34.000000,1943.000000,320.000000,895.000000,305.000000,5.046200,227700.000000 +-118.590000,34.210000,26.000000,2335.000000,669.000000,1986.000000,645.000000,2.997400,178800.000000 +-118.590000,34.200000,21.000000,1789.000000,679.000000,2300.000000,677.000000,2.754000,179800.000000 +-118.590000,34.180000,7.000000,11853.000000,2691.000000,4404.000000,2447.000000,4.200900,271300.000000 +-118.590000,34.170000,36.000000,1887.000000,359.000000,761.000000,329.000000,5.484700,296000.000000 +-118.590000,34.150000,29.000000,2023.000000,330.000000,747.000000,304.000000,6.769400,369700.000000 +-118.590000,34.140000,19.000000,1303.000000,155.000000,450.000000,145.000000,10.551100,483100.000000 +-118.590000,34.110000,35.000000,2396.000000,472.000000,1054.000000,457.000000,6.450400,407000.000000 +-118.600000,37.390000,19.000000,2682.000000,518.000000,1134.000000,399.000000,3.213200,166000.000000 +-118.600000,34.260000,18.000000,6154.000000,1070.000000,3010.000000,1034.000000,5.639200,271500.000000 +-118.600000,34.230000,19.000000,8866.000000,2355.000000,5005.000000,2194.000000,3.256400,230300.000000 +-118.600000,34.210000,21.000000,9512.000000,2560.000000,7282.000000,2387.000000,2.803900,227500.000000 +-118.600000,34.200000,10.000000,2869.000000,941.000000,2162.000000,829.000000,3.229700,150000.000000 +-118.600000,34.190000,16.000000,14912.000000,4183.000000,5105.000000,3302.000000,2.831200,213900.000000 +-118.600000,34.160000,32.000000,3999.000000,667.000000,1628.000000,631.000000,6.079400,338500.000000 +-118.600000,34.150000,28.000000,4570.000000,744.000000,1693.000000,695.000000,6.140000,361900.000000 +-118.600000,34.090000,43.000000,2228.000000,438.000000,960.000000,395.000000,7.609100,438500.000000 +-118.600000,34.080000,40.000000,866.000000,181.000000,399.000000,176.000000,6.910000,380000.000000 +-118.600000,34.070000,16.000000,319.000000,59.000000,149.000000,64.000000,4.625000,433300.000000 +-118.600000,34.020000,36.000000,2043.000000,467.000000,606.000000,326.000000,8.433100,500001.000000 +-118.610000,35.470000,13.000000,2267.000000,601.000000,756.000000,276.000000,2.547400,78400.000000 +-118.610000,35.080000,6.000000,3660.000000,646.000000,1243.000000,482.000000,3.491100,137200.000000 +-118.610000,34.990000,11.000000,4031.000000,766.000000,1539.000000,564.000000,3.891700,120800.000000 +-118.610000,34.590000,5.000000,4028.000000,896.000000,2062.000000,826.000000,4.057900,167100.000000 +-118.610000,34.380000,2.000000,5989.000000,883.000000,1787.000000,613.000000,6.691600,329500.000000 +-118.610000,34.310000,4.000000,1949.000000,458.000000,868.000000,398.000000,5.015100,285200.000000 +-118.610000,34.250000,16.000000,8295.000000,1506.000000,3903.000000,1451.000000,5.511100,276600.000000 +-118.610000,34.240000,17.000000,5406.000000,895.000000,2337.000000,882.000000,6.013700,375900.000000 +-118.610000,34.230000,26.000000,3727.000000,572.000000,1724.000000,530.000000,6.141900,327300.000000 +-118.610000,34.220000,24.000000,5256.000000,758.000000,2474.000000,780.000000,7.325200,333700.000000 +-118.610000,34.210000,41.000000,1058.000000,228.000000,778.000000,245.000000,3.353400,180500.000000 +-118.610000,34.210000,34.000000,3494.000000,557.000000,1861.000000,576.000000,5.640700,251500.000000 +-118.610000,34.210000,33.000000,2609.000000,431.000000,1208.000000,406.000000,5.452700,227100.000000 +-118.610000,34.200000,16.000000,1718.000000,467.000000,896.000000,475.000000,3.629600,160900.000000 +-118.610000,34.190000,34.000000,703.000000,127.000000,369.000000,127.000000,4.312500,210100.000000 +-118.610000,34.190000,28.000000,3824.000000,749.000000,1790.000000,701.000000,4.115400,246400.000000 +-118.610000,34.170000,31.000000,1689.000000,362.000000,705.000000,360.000000,4.000000,278500.000000 +-118.610000,34.170000,19.000000,5944.000000,1345.000000,2372.000000,1250.000000,3.881900,328900.000000 +-118.610000,34.160000,29.000000,4364.000000,647.000000,1550.000000,624.000000,6.810700,367400.000000 +-118.620000,34.260000,15.000000,10860.000000,1653.000000,4178.000000,1581.000000,6.324900,262100.000000 +-118.620000,34.220000,34.000000,2633.000000,471.000000,1313.000000,428.000000,4.090900,232900.000000 +-118.620000,34.220000,33.000000,1636.000000,275.000000,866.000000,289.000000,5.635600,241300.000000 +-118.620000,34.210000,26.000000,3234.000000,517.000000,1597.000000,513.000000,6.107400,258600.000000 +-118.620000,34.200000,29.000000,2421.000000,402.000000,1120.000000,388.000000,5.030900,244800.000000 +-118.620000,34.200000,23.000000,3098.000000,542.000000,1486.000000,492.000000,5.761300,235800.000000 +-118.620000,34.190000,35.000000,1934.000000,307.000000,905.000000,315.000000,5.510100,267400.000000 +-118.620000,34.180000,25.000000,3124.000000,468.000000,1241.000000,439.000000,6.404400,333100.000000 +-118.620000,34.170000,34.000000,3268.000000,538.000000,1463.000000,519.000000,6.848200,308300.000000 +-118.620000,34.170000,32.000000,1491.000000,355.000000,756.000000,296.000000,3.040400,262800.000000 +-118.620000,34.150000,26.000000,5661.000000,791.000000,2493.000000,780.000000,7.981400,409900.000000 +-118.630000,34.210000,31.000000,3952.000000,647.000000,1762.000000,588.000000,5.570900,244800.000000 +-118.630000,34.200000,19.000000,7411.000000,1045.000000,2814.000000,950.000000,6.778500,336100.000000 +-118.630000,34.190000,32.000000,3568.000000,591.000000,1741.000000,563.000000,5.152900,259600.000000 +-118.630000,34.170000,33.000000,4769.000000,787.000000,2019.000000,743.000000,5.579800,338200.000000 +-118.630000,34.160000,33.000000,2896.000000,455.000000,1116.000000,411.000000,6.019200,347700.000000 +-118.630000,34.160000,30.000000,3346.000000,487.000000,1296.000000,495.000000,7.457000,392700.000000 +-118.630000,34.110000,35.000000,3795.000000,690.000000,1521.000000,653.000000,5.873500,448100.000000 +-118.640000,34.250000,47.000000,1315.000000,290.000000,581.000000,268.000000,5.402400,253000.000000 +-118.640000,34.220000,16.000000,4312.000000,574.000000,1902.000000,574.000000,8.443800,390000.000000 +-118.640000,34.190000,33.000000,3017.000000,494.000000,1423.000000,470.000000,5.616300,248400.000000 +-118.640000,34.190000,30.000000,2399.000000,373.000000,1062.000000,377.000000,6.009400,245600.000000 +-118.640000,34.190000,28.000000,3274.000000,571.000000,1424.000000,521.000000,4.416700,247300.000000 +-118.640000,34.180000,33.000000,3808.000000,623.000000,1784.000000,615.000000,5.164100,263400.000000 +-118.640000,34.170000,26.000000,6767.000000,903.000000,2574.000000,883.000000,7.784600,409000.000000 +-118.650000,34.270000,23.000000,1724.000000,265.000000,934.000000,306.000000,6.078300,229200.000000 +-118.650000,34.210000,5.000000,5429.000000,665.000000,2315.000000,687.000000,9.646500,500001.000000 +-118.650000,34.190000,27.000000,2772.000000,511.000000,1346.000000,497.000000,5.201600,243000.000000 +-118.650000,34.180000,27.000000,1793.000000,339.000000,1016.000000,326.000000,4.925000,240300.000000 +-118.650000,34.180000,26.000000,4607.000000,656.000000,1769.000000,643.000000,7.491800,367600.000000 +-118.660000,35.200000,7.000000,9664.000000,1692.000000,3617.000000,1370.000000,4.058100,162900.000000 +-118.660000,34.430000,9.000000,2356.000000,469.000000,1556.000000,386.000000,3.775000,155000.000000 +-118.660000,34.230000,18.000000,897.000000,142.000000,263.000000,110.000000,6.128800,350000.000000 +-118.660000,34.190000,23.000000,7544.000000,1031.000000,3221.000000,1043.000000,7.642000,374900.000000 +-118.660000,34.180000,25.000000,6612.000000,857.000000,2519.000000,843.000000,8.391200,419000.000000 +-118.660000,34.100000,12.000000,2560.000000,365.000000,907.000000,366.000000,10.076000,500001.000000 +-118.660000,34.020000,23.000000,8798.000000,1465.000000,2750.000000,1208.000000,8.736400,500001.000000 +-118.670000,34.300000,5.000000,6123.000000,825.000000,2440.000000,736.000000,7.901300,393000.000000 +-118.670000,34.270000,15.000000,3221.000000,659.000000,1390.000000,607.000000,3.531300,191800.000000 +-118.670000,34.270000,10.000000,3753.000000,678.000000,1859.000000,660.000000,4.994600,204600.000000 +-118.670000,34.160000,17.000000,16544.000000,2206.000000,6214.000000,2118.000000,9.122800,500001.000000 +-118.680000,34.330000,45.000000,121.000000,25.000000,67.000000,27.000000,2.982100,325000.000000 +-118.680000,34.280000,5.000000,6150.000000,1265.000000,3188.000000,1266.000000,4.703400,223000.000000 +-118.680000,34.280000,17.000000,6488.000000,1102.000000,3199.000000,1070.000000,5.096200,238000.000000 +-118.680000,34.270000,26.000000,1561.000000,212.000000,817.000000,242.000000,5.477000,209100.000000 +-118.680000,34.270000,16.000000,4637.000000,941.000000,2476.000000,878.000000,4.056800,225200.000000 +-118.680000,34.080000,18.000000,102.000000,17.000000,55.000000,21.000000,3.993400,500001.000000 +-118.690000,34.210000,10.000000,3663.000000,409.000000,1179.000000,371.000000,12.542000,500001.000000 +-118.690000,34.180000,11.000000,1177.000000,138.000000,415.000000,119.000000,10.047200,500001.000000 +-118.690000,34.080000,23.000000,204.000000,40.000000,117.000000,41.000000,9.764600,500001.000000 +-118.700000,35.820000,20.000000,4642.000000,1300.000000,658.000000,247.000000,2.393700,82100.000000 +-118.700000,34.530000,5.000000,14275.000000,2474.000000,7158.000000,2311.000000,5.428400,236300.000000 +-118.700000,34.300000,27.000000,1527.000000,220.000000,756.000000,226.000000,6.182500,227000.000000 +-118.700000,34.300000,23.000000,2831.000000,406.000000,1284.000000,393.000000,6.138300,244100.000000 +-118.700000,34.290000,25.000000,1678.000000,252.000000,862.000000,268.000000,6.183400,229800.000000 +-118.700000,34.280000,27.000000,727.000000,136.000000,467.000000,144.000000,3.718800,250000.000000 +-118.700000,34.280000,25.000000,2377.000000,491.000000,1200.000000,439.000000,4.708300,196100.000000 +-118.700000,34.240000,28.000000,2405.000000,462.000000,1011.000000,378.000000,4.504000,204300.000000 +-118.710000,34.300000,23.000000,1983.000000,280.000000,978.000000,287.000000,6.319900,236700.000000 +-118.710000,34.300000,20.000000,1586.000000,187.000000,699.000000,209.000000,6.548300,335000.000000 +-118.710000,34.290000,24.000000,2983.000000,406.000000,1203.000000,381.000000,6.323600,302000.000000 +-118.710000,34.290000,21.000000,2751.000000,493.000000,1432.000000,483.000000,5.206700,221200.000000 +-118.710000,34.280000,27.000000,2911.000000,562.000000,1773.000000,580.000000,4.652800,186600.000000 +-118.710000,34.270000,26.000000,990.000000,223.000000,719.000000,232.000000,3.163000,179400.000000 +-118.720000,34.280000,18.000000,2229.000000,371.000000,1283.000000,379.000000,5.595500,217700.000000 +-118.720000,34.280000,17.000000,3051.000000,505.000000,1705.000000,495.000000,5.737600,218600.000000 +-118.720000,34.280000,17.000000,2654.000000,478.000000,1392.000000,451.000000,5.445900,223900.000000 +-118.720000,34.140000,7.000000,23866.000000,4407.000000,9873.000000,4012.000000,5.403200,318500.000000 +-118.730000,36.010000,14.000000,3263.000000,651.000000,1910.000000,594.000000,2.860300,128900.000000 +-118.730000,34.290000,8.000000,4983.000000,754.000000,2510.000000,725.000000,6.945400,276500.000000 +-118.730000,34.290000,11.000000,5451.000000,736.000000,2526.000000,752.000000,7.355000,343900.000000 +-118.730000,34.270000,23.000000,4550.000000,762.000000,2301.000000,744.000000,4.556000,205300.000000 +-118.740000,37.580000,20.000000,3301.000000,779.000000,1085.000000,448.000000,3.731500,159300.000000 +-118.740000,34.270000,23.000000,2493.000000,522.000000,1488.000000,505.000000,4.180000,215000.000000 +-118.740000,34.260000,27.000000,3467.000000,545.000000,1798.000000,493.000000,4.871700,204100.000000 +-118.740000,34.260000,22.000000,4337.000000,673.000000,2347.000000,636.000000,5.409100,222400.000000 +-118.740000,34.250000,25.000000,1815.000000,281.000000,960.000000,284.000000,5.424300,214700.000000 +-118.740000,34.050000,19.000000,3487.000000,686.000000,2782.000000,584.000000,7.918400,500001.000000 +-118.750000,34.330000,27.000000,534.000000,85.000000,243.000000,77.000000,8.278700,330000.000000 +-118.750000,34.280000,27.000000,1452.000000,251.000000,928.000000,259.000000,4.690800,186600.000000 +-118.750000,34.280000,22.000000,3844.000000,537.000000,1665.000000,492.000000,6.205900,239900.000000 +-118.750000,34.270000,26.000000,966.000000,191.000000,690.000000,191.000000,5.169800,188000.000000 +-118.750000,34.270000,25.000000,3371.000000,502.000000,1717.000000,506.000000,6.125300,225000.000000 +-118.750000,34.270000,20.000000,3495.000000,449.000000,1629.000000,428.000000,5.809600,264400.000000 +-118.750000,34.260000,26.000000,1767.000000,265.000000,1040.000000,250.000000,5.478700,198100.000000 +-118.750000,34.260000,24.000000,2234.000000,373.000000,1325.000000,383.000000,5.460400,193400.000000 +-118.750000,34.180000,4.000000,16704.000000,2704.000000,6187.000000,2207.000000,6.612200,357600.000000 +-118.750000,34.170000,16.000000,2950.000000,387.000000,1228.000000,379.000000,5.374900,346100.000000 +-118.750000,34.100000,34.000000,2255.000000,402.000000,857.000000,317.000000,4.533300,377300.000000 +-118.760000,34.280000,21.000000,2786.000000,342.000000,1114.000000,322.000000,5.857800,266300.000000 +-118.760000,34.260000,26.000000,1929.000000,293.000000,1067.000000,320.000000,5.403800,222100.000000 +-118.760000,34.260000,26.000000,1750.000000,284.000000,962.000000,278.000000,4.567300,190400.000000 +-118.760000,34.130000,10.000000,4355.000000,716.000000,2030.000000,674.000000,6.557100,500001.000000 +-118.770000,34.280000,6.000000,4685.000000,965.000000,2180.000000,909.000000,4.545800,208200.000000 +-118.770000,34.280000,27.000000,1416.000000,251.000000,1024.000000,268.000000,5.107400,185200.000000 +-118.770000,34.280000,26.000000,2873.000000,480.000000,1915.000000,475.000000,5.368100,187700.000000 +-118.770000,34.270000,10.000000,1658.000000,310.000000,1053.000000,333.000000,4.757400,209900.000000 +-118.770000,34.240000,6.000000,16222.000000,2309.000000,6700.000000,2080.000000,6.496300,308100.000000 +-118.780000,34.270000,20.000000,2743.000000,685.000000,1798.000000,613.000000,3.676100,170900.000000 +-118.780000,34.260000,24.000000,4072.000000,582.000000,1834.000000,565.000000,6.048700,254500.000000 +-118.780000,34.250000,13.000000,1841.000000,237.000000,833.000000,231.000000,7.778500,404700.000000 +-118.780000,34.160000,9.000000,30405.000000,4093.000000,12873.000000,3931.000000,8.013700,399200.000000 +-118.780000,34.050000,28.000000,1343.000000,215.000000,487.000000,199.000000,6.830000,500001.000000 +-118.790000,34.270000,27.000000,1146.000000,189.000000,595.000000,197.000000,4.583300,198500.000000 +-118.790000,34.260000,17.000000,1986.000000,249.000000,761.000000,241.000000,7.213700,401900.000000 +-118.800000,34.270000,12.000000,3330.000000,600.000000,1577.000000,584.000000,4.698500,264100.000000 +-118.800000,34.210000,16.000000,1466.000000,196.000000,661.000000,209.000000,6.289300,282700.000000 +-118.800000,34.190000,4.000000,15572.000000,2222.000000,5495.000000,2152.000000,8.649900,500001.000000 +-118.800000,34.150000,9.000000,1143.000000,179.000000,647.000000,180.000000,6.847400,356700.000000 +-118.810000,34.280000,20.000000,3678.000000,684.000000,1882.000000,694.000000,4.160700,196800.000000 +-118.810000,34.250000,4.000000,9147.000000,1827.000000,3950.000000,1661.000000,5.716000,320800.000000 +-118.820000,36.130000,43.000000,1281.000000,287.000000,534.000000,231.000000,2.890600,65700.000000 +-118.820000,35.230000,31.000000,2358.000000,580.000000,2302.000000,574.000000,1.968800,53900.000000 +-118.820000,35.200000,34.000000,2185.000000,469.000000,1910.000000,455.000000,2.113600,57300.000000 +-118.820000,34.150000,9.000000,655.000000,110.000000,222.000000,109.000000,7.852800,337500.000000 +-118.820000,34.140000,22.000000,11668.000000,1730.000000,4054.000000,1671.000000,6.993500,385500.000000 +-118.830000,35.270000,33.000000,1190.000000,217.000000,717.000000,196.000000,2.630200,81300.000000 +-118.830000,35.200000,17.000000,1959.000000,484.000000,1763.000000,453.000000,2.135700,53500.000000 +-118.830000,34.330000,6.000000,6679.000000,1164.000000,3196.000000,1157.000000,5.449300,242600.000000 +-118.830000,34.230000,6.000000,8803.000000,1114.000000,3385.000000,1010.000000,8.728800,425800.000000 +-118.830000,34.180000,23.000000,5647.000000,786.000000,2050.000000,738.000000,6.358600,348300.000000 +-118.830000,34.170000,17.000000,4668.000000,628.000000,1917.000000,624.000000,8.139700,353900.000000 +-118.830000,34.150000,16.000000,3380.000000,731.000000,1227.000000,641.000000,4.285700,233200.000000 +-118.830000,34.140000,16.000000,1956.000000,312.000000,671.000000,319.000000,6.400100,321800.000000 +-118.830000,34.140000,16.000000,1316.000000,194.000000,450.000000,173.000000,10.159700,500001.000000 +-118.840000,34.220000,11.000000,3170.000000,420.000000,1418.000000,432.000000,7.511800,361900.000000 +-118.840000,34.210000,16.000000,4975.000000,949.000000,2537.000000,971.000000,5.236100,224700.000000 +-118.840000,34.170000,16.000000,3449.000000,820.000000,1877.000000,816.000000,3.217600,187500.000000 +-118.840000,34.160000,18.000000,6075.000000,1056.000000,2571.000000,1018.000000,5.220000,399400.000000 +-118.840000,34.150000,17.000000,3785.000000,494.000000,1527.000000,507.000000,8.444300,358500.000000 +-118.840000,34.110000,12.000000,7508.000000,1058.000000,2484.000000,965.000000,5.878800,500001.000000 +-118.850000,35.230000,26.000000,1639.000000,352.000000,1222.000000,395.000000,1.765600,68000.000000 +-118.850000,35.200000,17.000000,2783.000000,678.000000,2566.000000,641.000000,1.990700,51200.000000 +-118.850000,34.270000,50.000000,187.000000,33.000000,130.000000,35.000000,3.343800,500001.000000 +-118.850000,34.250000,17.000000,5593.000000,732.000000,1992.000000,660.000000,7.296500,342900.000000 +-118.850000,34.230000,13.000000,5094.000000,764.000000,2230.000000,737.000000,6.482300,290900.000000 +-118.850000,34.210000,29.000000,2195.000000,414.000000,1360.000000,401.000000,3.477300,206700.000000 +-118.850000,34.210000,25.000000,1328.000000,209.000000,691.000000,228.000000,4.923400,241400.000000 +-118.850000,34.190000,27.000000,2287.000000,320.000000,967.000000,321.000000,6.516200,349400.000000 +-118.850000,34.180000,11.000000,5873.000000,1455.000000,3089.000000,1365.000000,3.550400,173800.000000 +-118.850000,34.140000,24.000000,1999.000000,244.000000,759.000000,247.000000,8.765700,366300.000000 +-118.850000,34.140000,16.000000,4109.000000,543.000000,1409.000000,560.000000,8.106400,423400.000000 +-118.850000,34.040000,21.000000,3837.000000,578.000000,1509.000000,509.000000,8.447600,500001.000000 +-118.860000,36.410000,20.000000,2749.000000,575.000000,1195.000000,491.000000,3.039100,139700.000000 +-118.860000,35.900000,38.000000,298.000000,55.000000,161.000000,47.000000,4.125000,71300.000000 +-118.860000,34.220000,26.000000,1932.000000,280.000000,886.000000,289.000000,5.085500,232200.000000 +-118.860000,34.220000,26.000000,1775.000000,295.000000,1004.000000,323.000000,5.584500,251700.000000 +-118.860000,34.220000,22.000000,1230.000000,200.000000,673.000000,195.000000,6.270800,251400.000000 +-118.860000,34.210000,26.000000,3354.000000,659.000000,2020.000000,648.000000,4.157600,211800.000000 +-118.860000,34.200000,32.000000,2399.000000,384.000000,1199.000000,390.000000,4.125000,264600.000000 +-118.860000,34.190000,29.000000,1326.000000,185.000000,586.000000,187.000000,6.547400,422900.000000 +-118.860000,34.190000,26.000000,3135.000000,480.000000,1474.000000,458.000000,6.194900,243500.000000 +-118.860000,34.160000,16.000000,1509.000000,216.000000,578.000000,235.000000,10.261400,410800.000000 +-118.870000,35.650000,33.000000,1504.000000,325.000000,584.000000,223.000000,3.479200,94600.000000 +-118.870000,35.370000,14.000000,2458.000000,433.000000,1352.000000,411.000000,3.544100,87000.000000 +-118.870000,34.230000,14.000000,4242.000000,746.000000,1858.000000,689.000000,6.014500,287100.000000 +-118.870000,34.210000,26.000000,4439.000000,616.000000,1881.000000,592.000000,6.293500,258000.000000 +-118.870000,34.200000,26.000000,1924.000000,245.000000,775.000000,244.000000,7.001000,286800.000000 +-118.870000,34.190000,23.000000,2179.000000,423.000000,1338.000000,406.000000,5.522400,240700.000000 +-118.870000,34.180000,21.000000,5661.000000,1369.000000,3188.000000,1308.000000,3.467600,212800.000000 +-118.880000,35.340000,20.000000,1351.000000,255.000000,762.000000,253.000000,2.111100,105300.000000 +-118.880000,34.420000,20.000000,728.000000,120.000000,360.000000,115.000000,6.124400,375000.000000 +-118.880000,34.280000,22.000000,3369.000000,771.000000,2751.000000,710.000000,4.047400,182100.000000 +-118.880000,34.220000,22.000000,3654.000000,517.000000,1565.000000,518.000000,6.274800,274800.000000 +-118.880000,34.220000,16.000000,2343.000000,393.000000,2007.000000,383.000000,5.756000,302700.000000 +-118.880000,34.200000,23.000000,4862.000000,597.000000,1938.000000,594.000000,7.340900,316000.000000 +-118.880000,34.190000,26.000000,2296.000000,275.000000,842.000000,263.000000,7.788900,309900.000000 +-118.880000,34.190000,16.000000,7268.000000,1729.000000,3232.000000,1653.000000,3.370300,228700.000000 +-118.880000,34.170000,15.000000,4260.000000,746.000000,1701.000000,669.000000,5.103300,410700.000000 +-118.880000,34.020000,19.000000,15990.000000,2611.000000,5175.000000,2173.000000,7.784800,500001.000000 +-118.890000,34.330000,23.000000,366.000000,62.000000,265.000000,66.000000,3.125000,375000.000000 +-118.890000,34.290000,28.000000,1545.000000,371.000000,1334.000000,318.000000,3.437500,194100.000000 +-118.890000,34.280000,30.000000,917.000000,157.000000,678.000000,171.000000,5.813300,195700.000000 +-118.890000,34.220000,20.000000,3878.000000,665.000000,1651.000000,591.000000,5.540200,264600.000000 +-118.900000,35.410000,6.000000,4656.000000,971.000000,2320.000000,935.000000,3.093800,100800.000000 +-118.900000,35.260000,31.000000,6145.000000,1492.000000,5666.000000,1457.000000,1.906600,54600.000000 +-118.900000,34.410000,35.000000,4431.000000,739.000000,2304.000000,720.000000,4.259900,209100.000000 +-118.900000,34.400000,16.000000,2614.000000,575.000000,1163.000000,524.000000,1.578100,134400.000000 +-118.900000,34.300000,13.000000,5591.000000,1013.000000,3188.000000,971.000000,5.592500,208600.000000 +-118.900000,34.260000,5.000000,25187.000000,3521.000000,11956.000000,3478.000000,6.971200,321300.000000 +-118.900000,34.190000,26.000000,1582.000000,196.000000,573.000000,182.000000,10.059500,500001.000000 +-118.900000,34.170000,14.000000,4719.000000,734.000000,1880.000000,731.000000,5.355800,313800.000000 +-118.910000,36.790000,19.000000,1616.000000,324.000000,187.000000,80.000000,3.785700,78600.000000 +-118.910000,35.400000,10.000000,3587.000000,774.000000,1398.000000,763.000000,2.569000,113000.000000 +-118.910000,35.370000,32.000000,4121.000000,755.000000,2590.000000,721.000000,3.346200,67600.000000 +-118.910000,35.270000,29.000000,1401.000000,317.000000,1344.000000,306.000000,2.092100,61400.000000 +-118.910000,35.240000,29.000000,2888.000000,753.000000,2949.000000,699.000000,1.771600,45500.000000 +-118.910000,34.400000,30.000000,2861.000000,613.000000,2065.000000,586.000000,3.202400,176100.000000 +-118.910000,34.280000,6.000000,6106.000000,1134.000000,3246.000000,1062.000000,5.220600,280200.000000 +-118.910000,34.180000,17.000000,3220.000000,716.000000,1381.000000,733.000000,2.895800,176000.000000 +-118.920000,36.040000,28.000000,1148.000000,233.000000,521.000000,212.000000,2.920800,98500.000000 +-118.920000,35.470000,6.000000,1755.000000,280.000000,664.000000,254.000000,6.288500,216400.000000 +-118.920000,35.380000,33.000000,3122.000000,579.000000,1733.000000,545.000000,3.830700,70600.000000 +-118.920000,35.370000,17.000000,3589.000000,701.000000,1746.000000,640.000000,2.491900,75700.000000 +-118.920000,35.260000,20.000000,3815.000000,924.000000,3450.000000,920.000000,2.017400,63700.000000 +-118.920000,35.130000,29.000000,1297.000000,262.000000,909.000000,253.000000,1.923600,106300.000000 +-118.920000,34.410000,22.000000,2702.000000,655.000000,2664.000000,571.000000,3.089300,173400.000000 +-118.920000,34.400000,23.000000,1290.000000,283.000000,1060.000000,279.000000,3.315200,198000.000000 +-118.920000,34.190000,16.000000,3631.000000,974.000000,2585.000000,923.000000,3.069100,130400.000000 +-118.920000,34.180000,17.000000,2400.000000,352.000000,1067.000000,323.000000,6.352200,259300.000000 +-118.920000,34.170000,17.000000,1552.000000,246.000000,685.000000,244.000000,5.983600,294800.000000 +-118.930000,36.190000,30.000000,2685.000000,546.000000,951.000000,523.000000,2.618400,113900.000000 +-118.930000,35.440000,13.000000,1439.000000,237.000000,557.000000,227.000000,6.156300,204200.000000 +-118.930000,35.370000,34.000000,2412.000000,446.000000,1558.000000,421.000000,2.690300,62800.000000 +-118.930000,34.820000,8.000000,508.000000,111.000000,229.000000,84.000000,4.033200,128300.000000 +-118.930000,34.820000,24.000000,806.000000,168.000000,323.000000,136.000000,3.500000,113900.000000 +-118.930000,34.400000,17.000000,3275.000000,599.000000,2422.000000,637.000000,3.709200,190500.000000 +-118.930000,34.360000,33.000000,1775.000000,309.000000,1071.000000,296.000000,4.660700,187900.000000 +-118.930000,34.200000,17.000000,2619.000000,606.000000,1655.000000,557.000000,3.886000,281300.000000 +-118.930000,34.180000,18.000000,2730.000000,415.000000,1248.000000,412.000000,6.187000,287900.000000 +-118.940000,37.130000,12.000000,2255.000000,472.000000,1006.000000,334.000000,4.156300,94000.000000 +-118.940000,36.320000,10.000000,2271.000000,398.000000,986.000000,358.000000,4.070300,147100.000000 +-118.940000,35.410000,10.000000,3216.000000,526.000000,1539.000000,483.000000,6.363900,143000.000000 +-118.940000,35.400000,14.000000,5548.000000,941.000000,2815.000000,935.000000,4.221400,104600.000000 +-118.940000,35.390000,27.000000,3074.000000,452.000000,1223.000000,452.000000,5.459200,139100.000000 +-118.940000,35.390000,13.000000,3137.000000,417.000000,1318.000000,397.000000,7.775100,194100.000000 +-118.940000,35.370000,37.000000,1667.000000,362.000000,971.000000,335.000000,2.875000,57400.000000 +-118.940000,35.370000,33.000000,3372.000000,741.000000,2352.000000,704.000000,2.064300,57600.000000 +-118.940000,35.370000,23.000000,1106.000000,252.000000,790.000000,230.000000,1.852300,59700.000000 +-118.940000,35.360000,19.000000,2714.000000,512.000000,1823.000000,500.000000,3.128100,76200.000000 +-118.940000,34.240000,5.000000,10018.000000,1233.000000,4253.000000,1120.000000,8.906300,500001.000000 +-118.940000,34.180000,25.000000,3502.000000,508.000000,1713.000000,508.000000,5.518100,242100.000000 +-118.940000,34.180000,24.000000,3689.000000,585.000000,1898.000000,581.000000,5.922400,239400.000000 +-118.940000,34.170000,16.000000,3746.000000,508.000000,1556.000000,452.000000,6.330300,299400.000000 +-118.940000,34.170000,15.000000,1679.000000,271.000000,928.000000,264.000000,5.568100,235600.000000 +-118.940000,34.160000,3.000000,1170.000000,148.000000,493.000000,142.000000,8.042800,500001.000000 +-118.950000,35.380000,35.000000,2220.000000,388.000000,906.000000,373.000000,3.593800,95200.000000 +-118.950000,35.380000,30.000000,2594.000000,478.000000,1419.000000,480.000000,3.725000,83100.000000 +-118.950000,35.370000,37.000000,1475.000000,327.000000,946.000000,295.000000,1.672800,55400.000000 +-118.950000,35.370000,34.000000,1672.000000,359.000000,1059.000000,349.000000,2.158800,61300.000000 +-118.950000,35.360000,30.000000,2294.000000,508.000000,1753.000000,482.000000,2.107800,54700.000000 +-118.950000,35.320000,29.000000,3480.000000,608.000000,2007.000000,541.000000,3.273800,78700.000000 +-118.950000,34.830000,18.000000,3278.000000,762.000000,1338.000000,550.000000,2.989100,116500.000000 +-118.950000,34.810000,30.000000,2817.000000,604.000000,1089.000000,412.000000,3.136400,123500.000000 +-118.950000,34.190000,24.000000,2719.000000,434.000000,1318.000000,424.000000,4.675000,228800.000000 +-118.950000,34.180000,25.000000,2237.000000,331.000000,1121.000000,365.000000,6.099400,254900.000000 +-118.950000,34.170000,9.000000,2372.000000,312.000000,1039.000000,321.000000,7.601600,344900.000000 +-118.950000,34.170000,23.000000,2630.000000,404.000000,1184.000000,385.000000,5.295500,247600.000000 +-118.950000,34.160000,21.000000,2953.000000,419.000000,1397.000000,410.000000,6.541000,291500.000000 +-118.960000,37.640000,11.000000,3934.000000,697.000000,901.000000,345.000000,4.238100,242700.000000 +-118.960000,36.660000,18.000000,1302.000000,424.000000,320.000000,133.000000,3.196400,80000.000000 +-118.960000,36.490000,24.000000,1268.000000,269.000000,636.000000,183.000000,1.742000,118800.000000 +-118.960000,35.410000,29.000000,3548.000000,729.000000,1542.000000,659.000000,2.947000,87100.000000 +-118.960000,35.400000,27.000000,2473.000000,400.000000,1271.000000,427.000000,3.552400,89100.000000 +-118.960000,35.390000,23.000000,5624.000000,1148.000000,2842.000000,1042.000000,3.129700,79000.000000 +-118.960000,35.380000,41.000000,2417.000000,435.000000,973.000000,406.000000,3.056800,85600.000000 +-118.960000,35.380000,34.000000,2047.000000,347.000000,888.000000,352.000000,3.673400,92900.000000 +-118.960000,35.370000,40.000000,1603.000000,374.000000,1026.000000,337.000000,1.365000,54300.000000 +-118.960000,35.370000,32.000000,1025.000000,259.000000,874.000000,236.000000,1.961200,53400.000000 +-118.960000,35.360000,35.000000,2285.000000,497.000000,1738.000000,480.000000,2.484800,54000.000000 +-118.960000,34.300000,16.000000,3103.000000,482.000000,1567.000000,467.000000,6.907000,500001.000000 +-118.960000,34.230000,14.000000,15207.000000,2924.000000,6301.000000,2829.000000,3.969900,217000.000000 +-118.960000,34.190000,16.000000,1807.000000,346.000000,587.000000,296.000000,1.981100,162500.000000 +-118.960000,34.180000,16.000000,3137.000000,462.000000,1384.000000,436.000000,6.130600,258200.000000 +-118.970000,37.640000,14.000000,2284.000000,622.000000,342.000000,137.000000,3.092100,87500.000000 +-118.970000,37.640000,14.000000,1847.000000,439.000000,238.000000,98.000000,3.604200,137500.000000 +-118.970000,36.060000,26.000000,1289.000000,262.000000,1100.000000,244.000000,1.975000,51400.000000 +-118.970000,35.410000,36.000000,1896.000000,315.000000,937.000000,303.000000,3.996000,85500.000000 +-118.970000,35.400000,34.000000,1859.000000,323.000000,854.000000,309.000000,3.190600,76200.000000 +-118.970000,35.390000,38.000000,2121.000000,433.000000,1547.000000,441.000000,2.774000,59500.000000 +-118.970000,35.380000,42.000000,1185.000000,358.000000,1038.000000,299.000000,0.995100,48000.000000 +-118.970000,35.380000,35.000000,1673.000000,426.000000,1041.000000,413.000000,1.375000,57500.000000 +-118.970000,35.380000,32.000000,1361.000000,363.000000,1483.000000,297.000000,1.625000,46800.000000 +-118.970000,35.370000,52.000000,425.000000,119.000000,380.000000,97.000000,1.412500,42500.000000 +-118.970000,35.370000,41.000000,2396.000000,602.000000,1781.000000,543.000000,1.881900,58000.000000 +-118.970000,35.370000,34.000000,1379.000000,333.000000,1156.000000,315.000000,1.719700,48900.000000 +-118.970000,35.360000,31.000000,1418.000000,306.000000,1219.000000,312.000000,1.574300,46700.000000 +-118.970000,34.180000,18.000000,7338.000000,1020.000000,3419.000000,1058.000000,7.024200,293100.000000 +-118.980000,38.030000,15.000000,991.000000,277.000000,419.000000,170.000000,3.546900,82500.000000 +-118.980000,37.650000,18.000000,1795.000000,416.000000,483.000000,208.000000,4.537500,169800.000000 +-118.980000,37.640000,17.000000,3769.000000,908.000000,1160.000000,453.000000,3.050000,188900.000000 +-118.980000,36.060000,33.000000,2043.000000,443.000000,1497.000000,417.000000,2.343000,47400.000000 +-118.980000,35.400000,36.000000,1864.000000,331.000000,1052.000000,325.000000,3.420500,76600.000000 +-118.980000,35.400000,36.000000,1443.000000,273.000000,680.000000,259.000000,2.982100,73100.000000 +-118.980000,35.400000,34.000000,813.000000,171.000000,440.000000,170.000000,2.839300,69800.000000 +-118.980000,35.390000,32.000000,2620.000000,682.000000,2375.000000,684.000000,1.261800,46900.000000 +-118.980000,35.390000,29.000000,607.000000,177.000000,476.000000,143.000000,1.187500,50700.000000 +-118.980000,35.390000,22.000000,1812.000000,457.000000,1592.000000,420.000000,1.414600,49100.000000 +-118.980000,35.380000,39.000000,1497.000000,383.000000,1182.000000,355.000000,1.064800,50000.000000 +-118.980000,35.380000,34.000000,1020.000000,247.000000,795.000000,228.000000,1.625000,50800.000000 +-118.980000,35.380000,28.000000,1171.000000,299.000000,1193.000000,273.000000,0.863900,49400.000000 +-118.980000,35.380000,24.000000,1807.000000,465.000000,1460.000000,410.000000,1.478600,54800.000000 +-118.980000,35.370000,36.000000,1562.000000,398.000000,1223.000000,329.000000,0.967500,47100.000000 +-118.980000,35.370000,35.000000,825.000000,179.000000,670.000000,181.000000,1.163800,57900.000000 +-118.980000,35.360000,29.000000,1244.000000,266.000000,933.000000,227.000000,1.698100,49400.000000 +-118.980000,35.360000,15.000000,1482.000000,338.000000,1059.000000,279.000000,1.261700,42700.000000 +-118.980000,34.400000,34.000000,1328.000000,244.000000,795.000000,227.000000,4.421900,338100.000000 +-118.980000,34.160000,16.000000,2476.000000,402.000000,1251.000000,387.000000,5.767600,241300.000000 +-118.990000,37.650000,20.000000,2474.000000,625.000000,338.000000,141.000000,5.010000,195500.000000 +-118.990000,37.630000,10.000000,7744.000000,1573.000000,483.000000,224.000000,3.291700,231800.000000 +-118.990000,36.070000,21.000000,983.000000,165.000000,672.000000,169.000000,2.975000,63900.000000 +-118.990000,36.060000,19.000000,2153.000000,458.000000,1317.000000,386.000000,1.756400,42600.000000 +-118.990000,35.400000,48.000000,1908.000000,331.000000,789.000000,321.000000,3.571400,84600.000000 +-118.990000,35.400000,43.000000,2225.000000,392.000000,890.000000,374.000000,4.020800,90400.000000 +-118.990000,35.390000,52.000000,2805.000000,573.000000,1325.000000,522.000000,2.508300,70100.000000 +-118.990000,35.390000,39.000000,2228.000000,542.000000,1516.000000,435.000000,1.600900,48800.000000 +-118.990000,35.380000,30.000000,1390.000000,361.000000,1116.000000,298.000000,1.345100,57500.000000 +-118.990000,35.380000,26.000000,1317.000000,374.000000,1025.000000,304.000000,1.402400,51000.000000 +-118.990000,35.370000,38.000000,918.000000,220.000000,743.000000,222.000000,1.729200,58100.000000 +-118.990000,35.370000,36.000000,832.000000,198.000000,814.000000,174.000000,1.477300,57400.000000 +-118.990000,35.360000,31.000000,1498.000000,359.000000,1168.000000,340.000000,1.223200,49300.000000 +-118.990000,35.360000,18.000000,1524.000000,354.000000,1210.000000,344.000000,1.113600,47800.000000 +-118.990000,35.350000,32.000000,1293.000000,317.000000,1109.000000,286.000000,1.178600,45600.000000 +-118.990000,35.350000,27.000000,1615.000000,355.000000,1380.000000,332.000000,1.663200,49800.000000 +-118.990000,35.330000,36.000000,1590.000000,367.000000,1311.000000,390.000000,1.678600,52900.000000 +-118.990000,35.320000,35.000000,1576.000000,405.000000,870.000000,282.000000,1.657500,59500.000000 +-118.990000,35.320000,26.000000,875.000000,199.000000,567.000000,204.000000,0.928800,36600.000000 +-118.990000,35.300000,33.000000,2248.000000,434.000000,1461.000000,405.000000,2.940200,56200.000000 +-118.990000,34.230000,9.000000,10618.000000,1617.000000,4830.000000,1606.000000,6.624600,284200.000000 +-119.000000,36.070000,20.000000,1042.000000,183.000000,509.000000,175.000000,2.981500,73000.000000 +-119.000000,36.050000,24.000000,3208.000000,691.000000,1986.000000,662.000000,1.550600,52300.000000 +-119.000000,35.400000,44.000000,2250.000000,378.000000,928.000000,379.000000,4.390600,93900.000000 +-119.000000,35.390000,42.000000,2839.000000,516.000000,1203.000000,487.000000,3.770800,79400.000000 +-119.000000,35.370000,41.000000,303.000000,78.000000,216.000000,80.000000,2.221200,55500.000000 +-119.000000,35.360000,40.000000,850.000000,227.000000,764.000000,186.000000,0.940700,43600.000000 +-119.000000,35.360000,39.000000,896.000000,217.000000,805.000000,197.000000,1.250000,42500.000000 +-119.000000,35.360000,35.000000,1021.000000,280.000000,1258.000000,239.000000,1.737500,48600.000000 +-119.000000,35.350000,35.000000,1164.000000,277.000000,992.000000,284.000000,1.401500,48700.000000 +-119.000000,35.350000,31.000000,2931.000000,716.000000,1969.000000,588.000000,2.215500,62100.000000 +-119.000000,35.330000,35.000000,991.000000,221.000000,620.000000,207.000000,1.941700,53800.000000 +-119.000000,35.310000,37.000000,1337.000000,275.000000,767.000000,273.000000,1.652200,53300.000000 +-119.000000,34.190000,5.000000,3634.000000,718.000000,1317.000000,743.000000,4.291700,227900.000000 +-119.000000,34.080000,17.000000,1822.000000,438.000000,578.000000,291.000000,5.434600,428600.000000 +-119.010000,36.080000,31.000000,1620.000000,366.000000,1154.000000,348.000000,1.885700,55500.000000 +-119.010000,36.070000,44.000000,2450.000000,575.000000,1330.000000,508.000000,1.610300,50900.000000 +-119.010000,36.060000,25.000000,1505.000000,367.000000,1392.000000,359.000000,1.681200,47700.000000 +-119.010000,36.050000,27.000000,1127.000000,294.000000,839.000000,276.000000,1.380700,53100.000000 +-119.010000,36.020000,17.000000,3915.000000,742.000000,1768.000000,688.000000,2.375000,79800.000000 +-119.010000,35.440000,20.000000,3458.000000,651.000000,1465.000000,621.000000,2.580600,82500.000000 +-119.010000,35.380000,44.000000,434.000000,110.000000,274.000000,86.000000,1.194400,57500.000000 +-119.010000,35.380000,36.000000,790.000000,224.000000,426.000000,208.000000,1.442700,50600.000000 +-119.010000,35.370000,45.000000,629.000000,143.000000,568.000000,139.000000,1.732100,84400.000000 +-119.010000,35.370000,44.000000,593.000000,136.000000,364.000000,121.000000,1.477900,66000.000000 +-119.010000,35.370000,38.000000,1702.000000,380.000000,1191.000000,366.000000,1.880100,57800.000000 +-119.010000,35.370000,33.000000,821.000000,181.000000,579.000000,172.000000,1.246900,46700.000000 +-119.010000,35.360000,38.000000,1838.000000,388.000000,1203.000000,373.000000,1.679700,60700.000000 +-119.010000,35.360000,36.000000,2658.000000,626.000000,1490.000000,529.000000,1.215700,57000.000000 +-119.010000,35.360000,24.000000,1941.000000,484.000000,1277.000000,435.000000,1.056000,51600.000000 +-119.010000,35.350000,39.000000,598.000000,149.000000,366.000000,132.000000,1.912500,57900.000000 +-119.010000,35.350000,34.000000,1354.000000,325.000000,922.000000,304.000000,2.187500,58000.000000 +-119.010000,35.340000,44.000000,1730.000000,343.000000,782.000000,278.000000,3.020800,63700.000000 +-119.010000,35.340000,36.000000,973.000000,219.000000,613.000000,187.000000,1.562500,46700.000000 +-119.010000,35.330000,42.000000,1120.000000,255.000000,677.000000,213.000000,1.542900,39400.000000 +-119.010000,35.330000,32.000000,3068.000000,628.000000,1897.000000,607.000000,2.423400,63700.000000 +-119.010000,35.320000,23.000000,4870.000000,965.000000,2717.000000,928.000000,2.596000,70000.000000 +-119.010000,35.310000,19.000000,7092.000000,1517.000000,4101.000000,1436.000000,2.100600,74800.000000 +-119.010000,35.300000,7.000000,8596.000000,1597.000000,4893.000000,1520.000000,3.905400,80900.000000 +-119.010000,35.280000,10.000000,7011.000000,1453.000000,4163.000000,1307.000000,2.765900,77500.000000 +-119.010000,35.240000,6.000000,80.000000,16.000000,66.000000,21.000000,3.125000,65000.000000 +-119.020000,36.090000,15.000000,2234.000000,415.000000,1254.000000,420.000000,3.023400,88600.000000 +-119.020000,36.070000,39.000000,1173.000000,269.000000,702.000000,232.000000,1.614600,53100.000000 +-119.020000,36.070000,29.000000,2610.000000,597.000000,1659.000000,571.000000,1.591100,60800.000000 +-119.020000,36.050000,22.000000,2078.000000,431.000000,1336.000000,456.000000,2.220200,65200.000000 +-119.020000,35.440000,29.000000,3415.000000,631.000000,1527.000000,597.000000,4.012500,84400.000000 +-119.020000,35.430000,39.000000,2033.000000,370.000000,956.000000,379.000000,3.173600,70700.000000 +-119.020000,35.420000,40.000000,1089.000000,226.000000,520.000000,218.000000,2.272700,67200.000000 +-119.020000,35.420000,36.000000,2044.000000,447.000000,1021.000000,374.000000,1.847200,57400.000000 +-119.020000,35.410000,41.000000,2221.000000,516.000000,1106.000000,473.000000,1.970000,51900.000000 +-119.020000,35.410000,31.000000,2348.000000,701.000000,1413.000000,611.000000,1.322200,51400.000000 +-119.020000,35.390000,52.000000,191.000000,52.000000,106.000000,49.000000,2.045500,72500.000000 +-119.020000,35.390000,30.000000,227.000000,75.000000,169.000000,101.000000,1.352700,60000.000000 +-119.020000,35.380000,48.000000,346.000000,92.000000,129.000000,63.000000,1.187500,63800.000000 +-119.020000,35.370000,44.000000,2687.000000,620.000000,1521.000000,549.000000,1.721300,61600.000000 +-119.020000,35.360000,47.000000,1631.000000,340.000000,847.000000,315.000000,2.506200,73700.000000 +-119.020000,35.350000,38.000000,1472.000000,305.000000,670.000000,282.000000,2.240700,76000.000000 +-119.020000,35.340000,38.000000,1463.000000,294.000000,692.000000,295.000000,2.312500,65800.000000 +-119.020000,35.340000,35.000000,1650.000000,390.000000,1145.000000,343.000000,1.535700,56500.000000 +-119.020000,35.340000,34.000000,2861.000000,510.000000,1375.000000,486.000000,3.428600,71400.000000 +-119.020000,35.330000,35.000000,2053.000000,412.000000,1193.000000,387.000000,2.750000,65800.000000 +-119.020000,35.330000,26.000000,3691.000000,826.000000,2072.000000,827.000000,2.155300,84700.000000 +-119.020000,35.320000,14.000000,2927.000000,588.000000,1821.000000,561.000000,3.352900,82600.000000 +-119.020000,35.300000,10.000000,7397.000000,1369.000000,4611.000000,1310.000000,3.636900,81600.000000 +-119.020000,34.260000,40.000000,1498.000000,292.000000,707.000000,249.000000,3.797400,228700.000000 +-119.020000,34.240000,24.000000,4650.000000,748.000000,2374.000000,702.000000,5.883800,232600.000000 +-119.030000,36.080000,19.000000,2736.000000,549.000000,1432.000000,503.000000,2.694400,67700.000000 +-119.030000,36.080000,19.000000,2471.000000,431.000000,1040.000000,426.000000,3.250000,80600.000000 +-119.030000,36.070000,26.000000,3210.000000,646.000000,1908.000000,642.000000,2.416700,77600.000000 +-119.030000,36.060000,36.000000,1925.000000,443.000000,1405.000000,422.000000,2.162000,51900.000000 +-119.030000,35.450000,14.000000,3520.000000,604.000000,1748.000000,582.000000,4.316200,87100.000000 +-119.030000,35.420000,42.000000,1705.000000,418.000000,905.000000,393.000000,1.628600,54600.000000 +-119.030000,35.420000,38.000000,2952.000000,598.000000,1491.000000,568.000000,2.609400,67900.000000 +-119.030000,35.410000,41.000000,1808.000000,435.000000,1005.000000,373.000000,1.785700,54300.000000 +-119.030000,35.410000,37.000000,1761.000000,443.000000,911.000000,365.000000,2.033100,53200.000000 +-119.030000,35.400000,35.000000,2608.000000,620.000000,1566.000000,583.000000,2.181800,63500.000000 +-119.030000,35.390000,28.000000,4513.000000,764.000000,1593.000000,763.000000,2.982100,118700.000000 +-119.030000,35.380000,38.000000,2122.000000,394.000000,843.000000,410.000000,3.000000,91800.000000 +-119.030000,35.370000,52.000000,1503.000000,367.000000,554.000000,277.000000,1.678600,126600.000000 +-119.030000,35.370000,42.000000,2508.000000,483.000000,1035.000000,442.000000,2.651300,72300.000000 +-119.030000,35.360000,41.000000,2551.000000,594.000000,1342.000000,595.000000,1.967100,76800.000000 +-119.030000,35.360000,41.000000,1944.000000,363.000000,977.000000,388.000000,3.909700,81300.000000 +-119.030000,35.350000,34.000000,1441.000000,294.000000,695.000000,275.000000,2.687500,73700.000000 +-119.030000,35.340000,34.000000,2221.000000,436.000000,1131.000000,408.000000,3.048600,68500.000000 +-119.030000,35.330000,21.000000,3057.000000,698.000000,1627.000000,680.000000,2.708300,84700.000000 +-119.030000,35.320000,12.000000,2721.000000,549.000000,1294.000000,523.000000,2.557500,100200.000000 +-119.030000,35.300000,10.000000,829.000000,146.000000,447.000000,173.000000,4.148400,102900.000000 +-119.030000,34.250000,25.000000,3344.000000,502.000000,1483.000000,496.000000,6.196000,340600.000000 +-119.030000,34.240000,25.000000,3655.000000,545.000000,1776.000000,544.000000,5.687000,238100.000000 +-119.030000,34.230000,21.000000,3284.000000,487.000000,1832.000000,521.000000,5.277300,250800.000000 +-119.030000,34.220000,24.000000,3421.000000,656.000000,2220.000000,645.000000,4.783100,214200.000000 +-119.040000,36.090000,15.000000,2288.000000,401.000000,1238.000000,429.000000,3.027800,77400.000000 +-119.040000,36.070000,26.000000,2185.000000,435.000000,1108.000000,419.000000,2.227700,78000.000000 +-119.040000,36.070000,17.000000,2623.000000,659.000000,1912.000000,618.000000,1.589300,52000.000000 +-119.040000,35.950000,25.000000,1009.000000,246.000000,994.000000,222.000000,1.846200,55800.000000 +-119.040000,35.420000,47.000000,1691.000000,402.000000,913.000000,358.000000,1.840300,54700.000000 +-119.040000,35.410000,25.000000,1577.000000,310.000000,844.000000,309.000000,3.062500,69400.000000 +-119.040000,35.410000,20.000000,3268.000000,833.000000,1622.000000,758.000000,1.358700,67500.000000 +-119.040000,35.370000,46.000000,1637.000000,338.000000,714.000000,297.000000,2.181800,75300.000000 +-119.040000,35.370000,44.000000,1618.000000,310.000000,667.000000,300.000000,2.875000,82700.000000 +-119.040000,35.360000,40.000000,1533.000000,312.000000,771.000000,306.000000,3.043500,69500.000000 +-119.040000,35.350000,31.000000,1607.000000,336.000000,817.000000,307.000000,2.564400,73000.000000 +-119.040000,35.350000,27.000000,4590.000000,897.000000,2212.000000,894.000000,3.175300,85000.000000 +-119.040000,35.320000,20.000000,37.000000,11.000000,34.000000,8.000000,1.200000,50000.000000 +-119.040000,34.340000,35.000000,462.000000,90.000000,334.000000,96.000000,5.358200,281300.000000 +-119.040000,34.280000,21.000000,1856.000000,276.000000,863.000000,255.000000,4.583300,500001.000000 +-119.040000,34.240000,20.000000,7794.000000,1192.000000,4169.000000,1188.000000,5.931600,311900.000000 +-119.040000,34.230000,21.000000,9807.000000,1614.000000,4199.000000,1554.000000,5.014500,246600.000000 +-119.050000,36.090000,9.000000,3297.000000,568.000000,1749.000000,568.000000,4.021700,99200.000000 +-119.050000,36.070000,21.000000,2472.000000,523.000000,1238.000000,504.000000,1.775600,62900.000000 +-119.050000,35.420000,41.000000,1992.000000,421.000000,1006.000000,419.000000,2.839300,57000.000000 +-119.050000,35.420000,35.000000,2353.000000,483.000000,1368.000000,455.000000,2.325000,63200.000000 +-119.050000,35.400000,18.000000,1894.000000,319.000000,846.000000,317.000000,3.861100,126400.000000 +-119.050000,35.360000,30.000000,4635.000000,800.000000,2307.000000,754.000000,3.654800,84700.000000 +-119.050000,35.360000,16.000000,4507.000000,1049.000000,2261.000000,959.000000,3.326100,118400.000000 +-119.050000,35.330000,18.000000,12707.000000,2685.000000,7009.000000,2552.000000,2.943800,87200.000000 +-119.050000,35.300000,9.000000,10822.000000,1994.000000,6241.000000,1906.000000,4.063100,88200.000000 +-119.050000,34.400000,50.000000,1236.000000,282.000000,1079.000000,257.000000,2.699100,181300.000000 +-119.050000,34.360000,22.000000,1815.000000,506.000000,2428.000000,473.000000,2.841700,162500.000000 +-119.050000,34.210000,27.000000,4357.000000,926.000000,2110.000000,876.000000,3.011900,218200.000000 +-119.050000,34.190000,39.000000,143.000000,36.000000,113.000000,33.000000,2.894200,275000.000000 +-119.050000,34.130000,12.000000,57.000000,22.000000,69.000000,15.000000,5.006600,275000.000000 +-119.060000,36.150000,25.000000,2402.000000,478.000000,1527.000000,461.000000,2.319400,52900.000000 +-119.060000,36.150000,20.000000,1282.000000,273.000000,852.000000,247.000000,1.635400,49000.000000 +-119.060000,36.090000,11.000000,2572.000000,454.000000,1402.000000,415.000000,3.678600,72900.000000 +-119.060000,36.070000,20.000000,2683.000000,553.000000,1497.000000,548.000000,1.703100,64600.000000 +-119.060000,35.940000,18.000000,3501.000000,721.000000,2009.000000,660.000000,2.657600,65700.000000 +-119.060000,35.360000,9.000000,1228.000000,234.000000,409.000000,212.000000,4.348200,95200.000000 +-119.060000,35.350000,20.000000,9351.000000,2139.000000,4584.000000,1953.000000,2.575000,69900.000000 +-119.060000,35.320000,15.000000,3944.000000,746.000000,2355.000000,757.000000,3.569000,70700.000000 +-119.060000,34.620000,10.000000,416.000000,110.000000,436.000000,70.000000,2.222200,262500.000000 +-119.060000,34.360000,52.000000,1409.000000,359.000000,981.000000,304.000000,2.795100,199300.000000 +-119.060000,34.360000,48.000000,1459.000000,324.000000,902.000000,350.000000,2.418500,189900.000000 +-119.060000,34.350000,34.000000,2426.000000,646.000000,2116.000000,631.000000,2.068200,158300.000000 +-119.060000,34.240000,21.000000,7436.000000,984.000000,2982.000000,988.000000,7.677500,391200.000000 +-119.060000,34.230000,29.000000,3511.000000,632.000000,2591.000000,596.000000,3.021900,221700.000000 +-119.060000,34.230000,23.000000,3471.000000,510.000000,2002.000000,555.000000,5.274200,257500.000000 +-119.060000,34.220000,13.000000,4175.000000,1321.000000,2257.000000,1271.000000,3.144600,177100.000000 +-119.070000,37.800000,12.000000,1736.000000,352.000000,330.000000,123.000000,3.529400,160700.000000 +-119.070000,36.080000,5.000000,2693.000000,508.000000,1785.000000,491.000000,3.000000,71000.000000 +-119.070000,36.070000,11.000000,2265.000000,382.000000,1285.000000,387.000000,3.204200,76200.000000 +-119.070000,35.420000,19.000000,3889.000000,832.000000,1872.000000,731.000000,2.681200,107600.000000 +-119.070000,35.360000,19.000000,5254.000000,894.000000,2155.000000,831.000000,4.670500,110700.000000 +-119.070000,35.350000,24.000000,4119.000000,865.000000,1294.000000,879.000000,2.412300,86200.000000 +-119.070000,35.340000,16.000000,4201.000000,786.000000,1667.000000,724.000000,4.883900,134100.000000 +-119.070000,35.330000,13.000000,9027.000000,1901.000000,4870.000000,1797.000000,3.406000,100700.000000 +-119.070000,35.270000,25.000000,3081.000000,635.000000,1830.000000,591.000000,2.580400,97900.000000 +-119.080000,37.780000,17.000000,1631.000000,335.000000,285.000000,128.000000,2.765600,130000.000000 +-119.080000,36.220000,28.000000,1606.000000,320.000000,1158.000000,317.000000,3.032400,55600.000000 +-119.080000,36.210000,20.000000,1911.000000,389.000000,1241.000000,348.000000,2.515600,59300.000000 +-119.080000,36.130000,21.000000,2271.000000,376.000000,1145.000000,372.000000,3.152800,113700.000000 +-119.080000,36.090000,25.000000,1880.000000,339.000000,1003.000000,315.000000,2.729800,103400.000000 +-119.080000,36.020000,26.000000,1748.000000,346.000000,891.000000,303.000000,1.943900,62100.000000 +-119.080000,35.420000,10.000000,4159.000000,608.000000,2089.000000,591.000000,5.526100,132000.000000 +-119.080000,35.390000,10.000000,6435.000000,1040.000000,3242.000000,1030.000000,5.575000,132200.000000 +-119.080000,35.360000,12.000000,6442.000000,1116.000000,2966.000000,1092.000000,4.579100,123400.000000 +-119.080000,35.350000,20.000000,892.000000,129.000000,331.000000,135.000000,7.183700,176300.000000 +-119.080000,35.340000,18.000000,4070.000000,512.000000,1580.000000,540.000000,10.594100,245800.000000 +-119.080000,35.340000,16.000000,1535.000000,238.000000,768.000000,236.000000,5.444900,118500.000000 +-119.080000,35.340000,15.000000,1474.000000,235.000000,768.000000,238.000000,4.152800,130100.000000 +-119.080000,35.320000,8.000000,11609.000000,2141.000000,5696.000000,2100.000000,5.001200,106300.000000 +-119.080000,34.340000,23.000000,3065.000000,723.000000,2042.000000,698.000000,2.759300,194800.000000 +-119.090000,36.420000,17.000000,877.000000,219.000000,966.000000,218.000000,2.000000,52500.000000 +-119.090000,36.420000,15.000000,1517.000000,361.000000,1275.000000,343.000000,1.587500,55800.000000 +-119.090000,36.350000,21.000000,3146.000000,595.000000,1580.000000,513.000000,2.785700,92700.000000 +-119.090000,36.220000,34.000000,1715.000000,290.000000,780.000000,297.000000,3.430600,74300.000000 +-119.090000,36.210000,43.000000,1335.000000,280.000000,943.000000,288.000000,1.986100,47700.000000 +-119.090000,36.210000,38.000000,1901.000000,453.000000,1613.000000,400.000000,1.882800,44600.000000 +-119.090000,35.430000,28.000000,254.000000,35.000000,118.000000,37.000000,4.857100,237500.000000 +-119.090000,35.350000,14.000000,2113.000000,256.000000,842.000000,265.000000,8.532500,224100.000000 +-119.090000,35.330000,9.000000,7085.000000,1148.000000,3084.000000,1052.000000,4.997000,142900.000000 +-119.090000,34.350000,20.000000,4725.000000,881.000000,2823.000000,869.000000,4.012200,214800.000000 +-119.090000,34.220000,8.000000,40.000000,10.000000,309.000000,16.000000,4.020800,52500.000000 +-119.100000,36.420000,26.000000,1775.000000,416.000000,1217.000000,383.000000,1.880100,57600.000000 +-119.100000,36.400000,31.000000,1533.000000,361.000000,1518.000000,386.000000,1.560800,51700.000000 +-119.100000,36.400000,23.000000,1885.000000,363.000000,1056.000000,338.000000,3.215900,92800.000000 +-119.100000,36.210000,38.000000,727.000000,173.000000,559.000000,176.000000,2.465300,49500.000000 +-119.100000,36.190000,17.000000,1564.000000,396.000000,713.000000,362.000000,1.618600,77100.000000 +-119.100000,35.790000,19.000000,1809.000000,477.000000,2051.000000,416.000000,1.814400,49800.000000 +-119.100000,35.350000,5.000000,4597.000000,1071.000000,1916.000000,870.000000,4.032700,131000.000000 +-119.100000,35.330000,4.000000,6640.000000,898.000000,3121.000000,902.000000,6.759000,170300.000000 +-119.100000,34.310000,21.000000,2424.000000,527.000000,1379.000000,484.000000,2.678600,184000.000000 +-119.110000,36.290000,18.000000,1666.000000,294.000000,859.000000,301.000000,2.606500,93800.000000 +-119.110000,36.210000,10.000000,1972.000000,455.000000,1469.000000,442.000000,1.540700,58400.000000 +-119.110000,35.420000,52.000000,154.000000,28.000000,37.000000,16.000000,10.026300,200000.000000 +-119.110000,35.380000,37.000000,2044.000000,394.000000,894.000000,359.000000,2.945300,82800.000000 +-119.110000,34.330000,14.000000,4026.000000,769.000000,1825.000000,671.000000,3.554100,191800.000000 +-119.110000,34.170000,37.000000,470.000000,105.000000,522.000000,83.000000,2.036800,243800.000000 +-119.120000,36.690000,13.000000,3963.000000,812.000000,1905.000000,671.000000,2.227800,90500.000000 +-119.120000,36.540000,30.000000,2747.000000,515.000000,1368.000000,453.000000,2.982800,85200.000000 +-119.120000,36.290000,29.000000,1638.000000,323.000000,942.000000,322.000000,2.173100,66200.000000 +-119.120000,36.190000,21.000000,2645.000000,464.000000,1245.000000,407.000000,2.914500,114200.000000 +-119.120000,36.050000,27.000000,1575.000000,321.000000,1063.000000,317.000000,2.147700,53900.000000 +-119.120000,35.410000,12.000000,5589.000000,941.000000,3018.000000,917.000000,4.456100,96900.000000 +-119.120000,35.390000,13.000000,1264.000000,202.000000,552.000000,187.000000,4.590300,94300.000000 +-119.120000,35.380000,18.000000,1521.000000,269.000000,706.000000,279.000000,4.419600,121000.000000 +-119.120000,35.370000,13.000000,4527.000000,713.000000,2170.000000,671.000000,4.826600,146200.000000 +-119.120000,34.380000,28.000000,7200.000000,1281.000000,3793.000000,1238.000000,4.075000,237900.000000 +-119.120000,34.250000,31.000000,737.000000,146.000000,1436.000000,168.000000,3.562500,194100.000000 +-119.120000,34.230000,35.000000,2028.000000,554.000000,2252.000000,521.000000,2.464300,182000.000000 +-119.130000,36.300000,33.000000,3379.000000,612.000000,1565.000000,618.000000,2.732100,76500.000000 +-119.130000,36.130000,28.000000,1673.000000,385.000000,1434.000000,371.000000,2.058600,40900.000000 +-119.130000,35.220000,5.000000,6268.000000,1003.000000,3269.000000,980.000000,5.145700,118200.000000 +-119.130000,34.190000,16.000000,6389.000000,1330.000000,6242.000000,1340.000000,4.022200,206800.000000 +-119.140000,36.290000,36.000000,788.000000,181.000000,405.000000,180.000000,1.470000,61900.000000 +-119.140000,36.290000,32.000000,2084.000000,482.000000,1410.000000,420.000000,1.532100,48300.000000 +-119.140000,35.760000,30.000000,735.000000,137.000000,421.000000,113.000000,2.562500,156300.000000 +-119.140000,34.490000,17.000000,321.000000,44.000000,92.000000,39.000000,7.750000,375000.000000 +-119.140000,34.290000,17.000000,2754.000000,577.000000,1349.000000,533.000000,3.161800,154200.000000 +-119.140000,34.280000,31.000000,790.000000,241.000000,1095.000000,222.000000,2.250000,75000.000000 +-119.140000,34.230000,8.000000,243.000000,75.000000,102.000000,80.000000,2.571400,500001.000000 +-119.140000,34.170000,16.000000,1593.000000,353.000000,836.000000,357.000000,2.726000,67500.000000 +-119.140000,34.150000,25.000000,2202.000000,390.000000,1415.000000,412.000000,4.430000,207700.000000 +-119.150000,36.290000,18.000000,1435.000000,257.000000,657.000000,254.000000,2.428100,72500.000000 +-119.150000,34.830000,6.000000,8733.000000,1600.000000,2006.000000,736.000000,4.572400,168400.000000 +-119.150000,34.440000,33.000000,2005.000000,392.000000,1043.000000,351.000000,5.308000,297900.000000 +-119.150000,34.300000,21.000000,2475.000000,502.000000,1269.000000,505.000000,2.980000,259200.000000 +-119.150000,34.250000,36.000000,3511.000000,664.000000,2965.000000,695.000000,4.087800,186800.000000 +-119.150000,34.200000,25.000000,3445.000000,898.000000,5558.000000,894.000000,3.097200,169300.000000 +-119.150000,34.170000,22.000000,1612.000000,334.000000,1431.000000,335.000000,4.812500,194400.000000 +-119.160000,36.310000,7.000000,2946.000000,664.000000,1608.000000,622.000000,1.682900,80200.000000 +-119.160000,36.280000,18.000000,2377.000000,414.000000,1359.000000,424.000000,4.400000,79300.000000 +-119.160000,34.280000,30.000000,413.000000,98.000000,400.000000,112.000000,4.000000,219200.000000 +-119.160000,34.280000,11.000000,5330.000000,1056.000000,2801.000000,1028.000000,4.763000,232700.000000 +-119.160000,34.270000,24.000000,1824.000000,331.000000,1049.000000,320.000000,5.918100,221100.000000 +-119.160000,34.230000,26.000000,5444.000000,1293.000000,3700.000000,1158.000000,2.755600,213200.000000 +-119.160000,34.170000,17.000000,7982.000000,1603.000000,6437.000000,1596.000000,4.127900,223900.000000 +-119.160000,34.120000,17.000000,224.000000,70.000000,147.000000,71.000000,3.616700,280000.000000 +-119.170000,34.310000,21.000000,259.000000,38.000000,142.000000,45.000000,5.268100,500001.000000 +-119.170000,34.270000,24.000000,4165.000000,646.000000,2194.000000,658.000000,6.066100,234800.000000 +-119.170000,34.270000,18.000000,8010.000000,1539.000000,3982.000000,1483.000000,4.090500,236500.000000 +-119.170000,34.260000,10.000000,3654.000000,541.000000,1638.000000,551.000000,6.188500,267300.000000 +-119.170000,34.250000,15.000000,1329.000000,282.000000,1001.000000,284.000000,3.650000,189300.000000 +-119.170000,34.220000,29.000000,4188.000000,816.000000,2783.000000,790.000000,4.194900,197100.000000 +-119.170000,34.210000,33.000000,1039.000000,256.000000,1432.000000,272.000000,3.110300,143500.000000 +-119.170000,34.200000,40.000000,1083.000000,319.000000,1843.000000,349.000000,2.307700,106900.000000 +-119.170000,34.200000,36.000000,2028.000000,523.000000,2751.000000,496.000000,3.015000,149300.000000 +-119.170000,34.190000,28.000000,1444.000000,508.000000,2145.000000,437.000000,1.696400,175000.000000 +-119.170000,34.180000,38.000000,3221.000000,783.000000,2792.000000,736.000000,2.911800,172400.000000 +-119.170000,34.170000,42.000000,1411.000000,300.000000,1295.000000,339.000000,2.666700,164900.000000 +-119.170000,34.170000,34.000000,2749.000000,539.000000,2330.000000,559.000000,4.213700,185600.000000 +-119.170000,34.170000,32.000000,1567.000000,304.000000,1482.000000,308.000000,3.586700,182100.000000 +-119.170000,34.170000,25.000000,1596.000000,321.000000,1378.000000,308.000000,4.007400,188000.000000 +-119.170000,34.170000,21.000000,2361.000000,464.000000,1146.000000,396.000000,3.659700,195100.000000 +-119.170000,34.160000,17.000000,5276.000000,1020.000000,4066.000000,984.000000,4.582800,205400.000000 +-119.170000,34.150000,18.000000,2509.000000,688.000000,3129.000000,677.000000,2.609800,146100.000000 +-119.180000,36.400000,39.000000,1730.000000,310.000000,899.000000,309.000000,2.664800,129200.000000 +-119.180000,35.500000,36.000000,1253.000000,259.000000,932.000000,249.000000,2.163500,110400.000000 +-119.180000,34.280000,17.000000,4526.000000,717.000000,2088.000000,655.000000,5.688500,268200.000000 +-119.180000,34.260000,22.000000,2334.000000,359.000000,1298.000000,363.000000,5.527500,228900.000000 +-119.180000,34.240000,17.000000,629.000000,221.000000,514.000000,186.000000,3.284700,112500.000000 +-119.180000,34.230000,16.000000,4609.000000,1220.000000,2147.000000,1007.000000,3.375000,218800.000000 +-119.180000,34.210000,46.000000,2062.000000,484.000000,1522.000000,469.000000,3.087000,213900.000000 +-119.180000,34.210000,30.000000,1096.000000,231.000000,741.000000,229.000000,3.862500,234700.000000 +-119.180000,34.210000,29.000000,4039.000000,680.000000,1677.000000,644.000000,4.389700,257600.000000 +-119.180000,34.200000,21.000000,494.000000,127.000000,489.000000,106.000000,2.696400,170800.000000 +-119.180000,34.190000,5.000000,384.000000,131.000000,410.000000,149.000000,1.562500,87500.000000 +-119.180000,34.190000,19.000000,2393.000000,729.000000,1938.000000,762.000000,1.695300,167400.000000 +-119.180000,34.180000,31.000000,2636.000000,638.000000,2695.000000,614.000000,3.219600,175800.000000 +-119.180000,34.170000,32.000000,2388.000000,467.000000,1746.000000,483.000000,3.933100,187600.000000 +-119.180000,34.160000,30.000000,2053.000000,368.000000,1496.000000,391.000000,3.654600,186200.000000 +-119.180000,34.160000,27.000000,1832.000000,415.000000,1480.000000,414.000000,3.964300,186000.000000 +-119.180000,34.160000,12.000000,460.000000,101.000000,405.000000,103.000000,5.278300,167400.000000 +-119.180000,34.150000,22.000000,4769.000000,1366.000000,5534.000000,1318.000000,2.416700,192000.000000 +-119.190000,36.340000,33.000000,2199.000000,403.000000,1245.000000,394.000000,2.730000,96900.000000 +-119.190000,35.960000,25.000000,2014.000000,402.000000,1160.000000,362.000000,1.881000,52500.000000 +-119.190000,35.640000,29.000000,1476.000000,220.000000,902.000000,205.000000,2.672600,83300.000000 +-119.190000,35.410000,12.000000,2835.000000,471.000000,1399.000000,413.000000,4.412500,149000.000000 +-119.190000,34.460000,39.000000,2056.000000,381.000000,939.000000,371.000000,6.625700,427600.000000 +-119.190000,34.300000,25.000000,2197.000000,320.000000,934.000000,330.000000,6.311000,283200.000000 +-119.190000,34.280000,28.000000,3231.000000,524.000000,1665.000000,540.000000,4.858300,224200.000000 +-119.190000,34.260000,16.000000,5018.000000,853.000000,2524.000000,830.000000,5.175200,218000.000000 +-119.190000,34.250000,12.000000,232.000000,37.000000,79.000000,35.000000,4.166700,214600.000000 +-119.190000,34.230000,17.000000,3889.000000,748.000000,2415.000000,739.000000,4.500000,234300.000000 +-119.190000,34.210000,34.000000,3413.000000,693.000000,2223.000000,651.000000,3.823900,208200.000000 +-119.190000,34.210000,28.000000,4194.000000,811.000000,2556.000000,856.000000,4.222700,235400.000000 +-119.190000,34.210000,27.000000,1887.000000,487.000000,1339.000000,428.000000,2.918500,224500.000000 +-119.190000,34.200000,36.000000,1293.000000,312.000000,1128.000000,335.000000,2.154200,253900.000000 +-119.190000,34.200000,18.000000,3620.000000,852.000000,3171.000000,779.000000,3.340900,220500.000000 +-119.190000,34.190000,35.000000,2599.000000,552.000000,2726.000000,543.000000,3.221200,180500.000000 +-119.190000,34.180000,32.000000,3366.000000,677.000000,2857.000000,669.000000,4.618600,181100.000000 +-119.190000,34.170000,35.000000,4276.000000,767.000000,3295.000000,708.000000,4.258300,187300.000000 +-119.190000,34.170000,31.000000,1872.000000,434.000000,1511.000000,405.000000,3.231400,186800.000000 +-119.190000,34.160000,35.000000,2733.000000,510.000000,1814.000000,511.000000,4.418700,183400.000000 +-119.190000,34.160000,34.000000,2610.000000,466.000000,1543.000000,433.000000,3.972200,189000.000000 +-119.190000,34.150000,31.000000,4175.000000,1004.000000,3310.000000,954.000000,3.198900,185400.000000 +-119.200000,36.300000,32.000000,1355.000000,363.000000,1427.000000,384.000000,1.344400,45600.000000 +-119.200000,36.300000,19.000000,1427.000000,311.000000,1026.000000,293.000000,2.625000,57000.000000 +-119.200000,36.280000,22.000000,2295.000000,508.000000,1654.000000,478.000000,1.684000,65900.000000 +-119.200000,35.370000,6.000000,7383.000000,1095.000000,3415.000000,1059.000000,5.311900,157300.000000 +-119.200000,34.280000,22.000000,2362.000000,601.000000,1127.000000,499.000000,3.400600,219400.000000 +-119.200000,34.270000,8.000000,4942.000000,1173.000000,3012.000000,1033.000000,3.445000,203400.000000 +-119.200000,34.260000,25.000000,2203.000000,367.000000,1194.000000,377.000000,5.408700,223200.000000 +-119.200000,34.260000,13.000000,3009.000000,588.000000,1439.000000,607.000000,4.184500,199500.000000 +-119.200000,34.250000,25.000000,195.000000,59.000000,140.000000,43.000000,3.888900,187500.000000 +-119.200000,34.250000,18.000000,3208.000000,643.000000,1973.000000,614.000000,3.816200,235000.000000 +-119.200000,34.190000,19.000000,9503.000000,1769.000000,6370.000000,1718.000000,5.001600,218500.000000 +-119.200000,34.180000,27.000000,1035.000000,229.000000,782.000000,222.000000,4.221200,185400.000000 +-119.200000,34.180000,22.000000,6465.000000,1397.000000,2694.000000,1370.000000,2.983200,165600.000000 +-119.210000,37.250000,44.000000,3042.000000,697.000000,335.000000,115.000000,4.183800,85600.000000 +-119.210000,36.390000,31.000000,1465.000000,303.000000,1013.000000,297.000000,2.036300,53500.000000 +-119.210000,36.380000,18.000000,2158.000000,413.000000,1461.000000,395.000000,2.021600,58000.000000 +-119.210000,36.300000,23.000000,951.000000,235.000000,806.000000,222.000000,1.773400,41400.000000 +-119.210000,36.300000,18.000000,1433.000000,265.000000,1092.000000,276.000000,1.913500,49400.000000 +-119.210000,36.100000,30.000000,1471.000000,373.000000,1418.000000,357.000000,1.743200,42500.000000 +-119.210000,34.310000,22.000000,7548.000000,1038.000000,2855.000000,1008.000000,6.729000,409300.000000 +-119.210000,34.280000,27.000000,2219.000000,312.000000,937.000000,315.000000,5.760100,281100.000000 +-119.210000,34.260000,31.000000,224.000000,88.000000,326.000000,88.000000,2.375000,55000.000000 +-119.210000,34.260000,26.000000,2406.000000,411.000000,1313.000000,391.000000,4.907900,234100.000000 +-119.210000,34.260000,23.000000,2887.000000,540.000000,1508.000000,518.000000,3.345200,217600.000000 +-119.210000,34.260000,10.000000,3150.000000,781.000000,1582.000000,653.000000,4.244800,157300.000000 +-119.210000,34.190000,15.000000,5614.000000,989.000000,2754.000000,994.000000,5.035000,242900.000000 +-119.210000,34.190000,15.000000,3797.000000,692.000000,2216.000000,675.000000,4.744300,229500.000000 +-119.210000,34.180000,13.000000,6103.000000,1319.000000,2986.000000,1212.000000,3.971800,215200.000000 +-119.210000,34.120000,15.000000,5778.000000,1285.000000,1722.000000,829.000000,4.342700,305800.000000 +-119.220000,36.310000,17.000000,2079.000000,459.000000,2022.000000,462.000000,1.546400,54100.000000 +-119.220000,35.680000,16.000000,2874.000000,677.000000,3078.000000,651.000000,1.884300,55200.000000 +-119.220000,34.280000,33.000000,2467.000000,377.000000,1052.000000,363.000000,4.733300,257500.000000 +-119.220000,34.280000,24.000000,2212.000000,332.000000,899.000000,331.000000,5.533000,299700.000000 +-119.220000,34.270000,30.000000,1937.000000,295.000000,695.000000,313.000000,5.067900,234300.000000 +-119.220000,34.270000,11.000000,4695.000000,955.000000,2065.000000,982.000000,3.215800,223600.000000 +-119.220000,34.260000,16.000000,2596.000000,625.000000,1403.000000,562.000000,3.401800,145200.000000 +-119.220000,34.180000,17.000000,3332.000000,762.000000,1797.000000,673.000000,4.429200,231200.000000 +-119.220000,34.150000,32.000000,3152.000000,596.000000,3490.000000,526.000000,2.725000,450000.000000 +-119.230000,36.450000,36.000000,1508.000000,323.000000,1283.000000,312.000000,2.120500,60000.000000 +-119.230000,36.390000,39.000000,1660.000000,349.000000,1061.000000,306.000000,1.481200,53500.000000 +-119.230000,35.790000,31.000000,2862.000000,606.000000,2467.000000,600.000000,2.312500,62100.000000 +-119.230000,35.780000,8.000000,1612.000000,343.000000,1230.000000,330.000000,2.180600,67200.000000 +-119.230000,35.770000,36.000000,3225.000000,635.000000,2034.000000,593.000000,2.404400,72500.000000 +-119.230000,35.770000,26.000000,2636.000000,468.000000,1416.000000,485.000000,4.191700,84000.000000 +-119.230000,34.460000,34.000000,9280.000000,1765.000000,4514.000000,1693.000000,3.202600,227600.000000 +-119.230000,34.440000,34.000000,3193.000000,664.000000,1434.000000,627.000000,2.477700,260300.000000 +-119.230000,34.420000,16.000000,630.000000,117.000000,343.000000,100.000000,5.750000,325000.000000 +-119.230000,34.300000,18.000000,1713.000000,244.000000,690.000000,239.000000,6.948300,404300.000000 +-119.230000,34.280000,24.000000,4260.000000,691.000000,1581.000000,607.000000,5.504800,303600.000000 +-119.230000,34.270000,29.000000,3298.000000,804.000000,1509.000000,711.000000,3.812500,244500.000000 +-119.230000,34.270000,22.000000,3536.000000,615.000000,1650.000000,612.000000,4.238100,229300.000000 +-119.230000,34.250000,28.000000,26.000000,3.000000,29.000000,9.000000,8.000000,275000.000000 +-119.230000,34.190000,16.000000,5297.000000,810.000000,1489.000000,667.000000,6.452200,500001.000000 +-119.230000,34.150000,18.000000,6213.000000,1188.000000,2679.000000,1000.000000,3.748000,380400.000000 +-119.240000,36.800000,17.000000,2052.000000,405.000000,975.000000,340.000000,2.690200,94400.000000 +-119.240000,35.770000,28.000000,1737.000000,521.000000,1764.000000,514.000000,1.781300,67800.000000 +-119.240000,35.680000,21.000000,1885.000000,398.000000,1539.000000,388.000000,2.520800,58500.000000 +-119.240000,35.670000,32.000000,3216.000000,750.000000,2639.000000,709.000000,2.002500,54700.000000 +-119.240000,34.280000,41.000000,1280.000000,240.000000,608.000000,252.000000,4.403800,229100.000000 +-119.240000,34.270000,32.000000,4071.000000,888.000000,1900.000000,874.000000,3.279200,220500.000000 +-119.250000,36.360000,16.000000,3245.000000,469.000000,1471.000000,450.000000,5.867300,154800.000000 +-119.250000,36.320000,32.000000,1821.000000,345.000000,812.000000,299.000000,2.750000,72200.000000 +-119.250000,36.270000,23.000000,1494.000000,275.000000,678.000000,235.000000,2.687500,69100.000000 +-119.250000,36.230000,24.000000,2015.000000,355.000000,1031.000000,351.000000,3.430600,139200.000000 +-119.250000,35.790000,8.000000,3271.000000,797.000000,2700.000000,688.000000,1.741800,62200.000000 +-119.250000,35.780000,35.000000,1927.000000,386.000000,1371.000000,414.000000,2.298100,69900.000000 +-119.250000,35.780000,27.000000,1513.000000,342.000000,1346.000000,323.000000,2.741100,59800.000000 +-119.250000,35.770000,35.000000,1618.000000,378.000000,1449.000000,398.000000,1.678600,56500.000000 +-119.250000,35.760000,36.000000,2332.000000,656.000000,2175.000000,610.000000,1.604500,57300.000000 +-119.250000,35.750000,36.000000,1598.000000,443.000000,1658.000000,417.000000,1.517000,52100.000000 +-119.250000,34.300000,34.000000,1189.000000,220.000000,445.000000,203.000000,4.882400,396400.000000 +-119.250000,34.280000,36.000000,2232.000000,373.000000,951.000000,368.000000,5.226100,303200.000000 +-119.250000,34.280000,36.000000,1530.000000,341.000000,703.000000,317.000000,3.581900,231900.000000 +-119.250000,34.270000,46.000000,679.000000,159.000000,382.000000,143.000000,3.500000,221200.000000 +-119.250000,34.270000,35.000000,2532.000000,407.000000,1338.000000,422.000000,4.772700,219000.000000 +-119.250000,34.260000,30.000000,2948.000000,827.000000,1635.000000,750.000000,2.670000,214900.000000 +-119.250000,34.210000,12.000000,15201.000000,2418.000000,7132.000000,2251.000000,5.675600,301800.000000 +-119.260000,36.610000,33.000000,560.000000,90.000000,310.000000,113.000000,2.541700,118800.000000 +-119.260000,36.500000,35.000000,1689.000000,371.000000,1475.000000,329.000000,2.571900,74300.000000 +-119.260000,36.300000,18.000000,3578.000000,720.000000,1540.000000,640.000000,2.425000,84600.000000 +-119.260000,35.870000,24.000000,1590.000000,390.000000,1686.000000,372.000000,1.646900,47600.000000 +-119.260000,34.460000,30.000000,3826.000000,691.000000,1656.000000,657.000000,4.007400,434700.000000 +-119.260000,34.280000,41.000000,2822.000000,564.000000,1288.000000,541.000000,3.079900,254100.000000 +-119.260000,34.280000,41.000000,1835.000000,311.000000,683.000000,308.000000,4.897700,358200.000000 +-119.260000,34.270000,42.000000,918.000000,204.000000,394.000000,204.000000,4.006900,214300.000000 +-119.260000,34.270000,40.000000,2528.000000,572.000000,1318.000000,549.000000,3.641300,212700.000000 +-119.270000,36.390000,17.000000,2076.000000,350.000000,998.000000,340.000000,4.328100,145700.000000 +-119.270000,36.340000,7.000000,3433.000000,626.000000,1793.000000,626.000000,3.529600,83700.000000 +-119.270000,36.340000,26.000000,2057.000000,472.000000,1453.000000,439.000000,2.411300,58600.000000 +-119.270000,36.320000,9.000000,3631.000000,635.000000,1881.000000,628.000000,4.772300,113100.000000 +-119.270000,36.320000,6.000000,2881.000000,518.000000,1432.000000,504.000000,4.080600,110200.000000 +-119.270000,36.180000,23.000000,3180.000000,547.000000,1829.000000,498.000000,2.609800,66000.000000 +-119.270000,36.050000,29.000000,1016.000000,174.000000,481.000000,140.000000,2.291700,112500.000000 +-119.270000,35.890000,18.000000,1855.000000,424.000000,1839.000000,392.000000,1.757200,53300.000000 +-119.270000,35.880000,32.000000,1393.000000,343.000000,1282.000000,336.000000,1.506900,43700.000000 +-119.270000,35.500000,34.000000,1367.000000,329.000000,796.000000,319.000000,2.826900,61100.000000 +-119.270000,35.500000,21.000000,2171.000000,483.000000,1315.000000,450.000000,1.710500,52100.000000 +-119.270000,35.490000,39.000000,2649.000000,572.000000,1815.000000,547.000000,2.353300,65400.000000 +-119.270000,34.450000,15.000000,1659.000000,274.000000,679.000000,253.000000,5.000000,357900.000000 +-119.270000,34.440000,22.000000,3527.000000,711.000000,1483.000000,640.000000,2.701900,234700.000000 +-119.270000,34.290000,32.000000,2274.000000,406.000000,982.000000,393.000000,5.325400,385200.000000 +-119.270000,34.280000,52.000000,2239.000000,420.000000,941.000000,397.000000,4.125000,349000.000000 +-119.270000,34.280000,50.000000,1710.000000,412.000000,915.000000,380.000000,3.175700,206300.000000 +-119.270000,34.270000,52.000000,459.000000,112.000000,276.000000,107.000000,2.375000,198400.000000 +-119.270000,34.270000,44.000000,1312.000000,279.000000,668.000000,278.000000,4.090000,203800.000000 +-119.270000,34.260000,23.000000,3578.000000,753.000000,1455.000000,649.000000,4.189800,359100.000000 +-119.270000,34.170000,15.000000,11403.000000,2131.000000,3327.000000,1585.000000,4.369300,423300.000000 +-119.280000,37.110000,34.000000,1901.000000,394.000000,171.000000,73.000000,3.072900,144600.000000 +-119.280000,36.540000,33.000000,1470.000000,330.000000,1222.000000,301.000000,1.816300,57400.000000 +-119.280000,36.520000,19.000000,1402.000000,324.000000,1327.000000,316.000000,2.250000,53200.000000 +-119.280000,36.330000,10.000000,1051.000000,297.000000,927.000000,274.000000,0.780000,55500.000000 +-119.280000,36.320000,16.000000,2812.000000,514.000000,1620.000000,523.000000,3.740400,89200.000000 +-119.280000,35.520000,36.000000,786.000000,194.000000,573.000000,134.000000,2.232100,37500.000000 +-119.280000,35.500000,34.000000,1923.000000,379.000000,1101.000000,351.000000,2.404400,65800.000000 +-119.280000,35.500000,28.000000,3107.000000,782.000000,3260.000000,738.000000,1.694400,58600.000000 +-119.280000,34.450000,36.000000,2376.000000,541.000000,1505.000000,547.000000,2.459500,197600.000000 +-119.280000,34.420000,23.000000,4763.000000,828.000000,2198.000000,771.000000,4.810500,313000.000000 +-119.280000,34.270000,44.000000,706.000000,176.000000,399.000000,149.000000,3.008900,166700.000000 +-119.280000,34.270000,43.000000,403.000000,77.000000,156.000000,85.000000,4.666700,384600.000000 +-119.290000,36.550000,21.000000,2467.000000,520.000000,1721.000000,515.000000,2.552100,65600.000000 +-119.290000,36.520000,39.000000,858.000000,228.000000,1222.000000,224.000000,1.571400,43000.000000 +-119.290000,36.350000,15.000000,1740.000000,319.000000,1332.000000,308.000000,2.574300,60200.000000 +-119.290000,36.340000,35.000000,1235.000000,369.000000,1246.000000,341.000000,1.474000,71000.000000 +-119.290000,36.340000,28.000000,1440.000000,431.000000,2178.000000,440.000000,1.263400,50600.000000 +-119.290000,36.340000,10.000000,1832.000000,455.000000,1664.000000,429.000000,2.022700,53300.000000 +-119.290000,36.330000,19.000000,792.000000,232.000000,641.000000,222.000000,0.744500,112500.000000 +-119.290000,36.320000,35.000000,1898.000000,481.000000,1123.000000,433.000000,1.141900,62900.000000 +-119.290000,36.320000,33.000000,2107.000000,451.000000,1364.000000,442.000000,2.202400,67200.000000 +-119.290000,36.310000,34.000000,1439.000000,253.000000,607.000000,223.000000,3.097200,82800.000000 +-119.290000,36.310000,14.000000,2382.000000,377.000000,1278.000000,386.000000,5.189600,101900.000000 +-119.290000,36.300000,20.000000,1157.000000,179.000000,572.000000,191.000000,5.349500,177300.000000 +-119.290000,36.280000,23.000000,1895.000000,340.000000,749.000000,313.000000,2.233300,120100.000000 +-119.290000,34.450000,26.000000,2849.000000,535.000000,1383.000000,532.000000,2.689300,230800.000000 +-119.290000,34.400000,22.000000,3891.000000,657.000000,1727.000000,581.000000,4.265600,241400.000000 +-119.290000,34.300000,24.000000,7637.000000,1705.000000,4647.000000,1623.000000,3.538500,186800.000000 +-119.290000,34.290000,33.000000,3854.000000,982.000000,1835.000000,894.000000,3.529400,323900.000000 +-119.290000,34.260000,32.000000,3295.000000,764.000000,1344.000000,600.000000,3.600700,395500.000000 +-119.300000,38.260000,19.000000,3325.000000,660.000000,750.000000,286.000000,2.950900,114800.000000 +-119.300000,36.570000,32.000000,728.000000,136.000000,461.000000,149.000000,3.015600,109100.000000 +-119.300000,36.350000,24.000000,1855.000000,416.000000,1520.000000,410.000000,2.330400,64900.000000 +-119.300000,36.340000,45.000000,3723.000000,831.000000,2256.000000,770.000000,1.829900,63100.000000 +-119.300000,36.340000,27.000000,1515.000000,358.000000,1178.000000,309.000000,1.443200,48100.000000 +-119.300000,36.330000,44.000000,2060.000000,414.000000,819.000000,355.000000,2.879500,77000.000000 +-119.300000,36.300000,14.000000,3023.000000,469.000000,1523.000000,492.000000,5.360200,118600.000000 +-119.300000,35.870000,20.000000,1934.000000,377.000000,1341.000000,336.000000,2.143400,62600.000000 +-119.300000,34.420000,18.000000,5591.000000,1042.000000,2860.000000,1026.000000,3.582200,219900.000000 +-119.300000,34.390000,35.000000,3079.000000,579.000000,1807.000000,589.000000,4.690000,199300.000000 +-119.300000,34.290000,50.000000,3128.000000,825.000000,2535.000000,783.000000,2.366900,165300.000000 +-119.300000,34.290000,41.000000,1445.000000,410.000000,1052.000000,388.000000,2.633300,170800.000000 +-119.300000,34.290000,26.000000,3665.000000,932.000000,2775.000000,870.000000,1.928600,160500.000000 +-119.300000,34.270000,17.000000,1527.000000,503.000000,688.000000,423.000000,1.600700,187500.000000 +-119.310000,36.620000,33.000000,1485.000000,374.000000,1544.000000,329.000000,1.729200,52000.000000 +-119.310000,36.620000,25.000000,831.000000,230.000000,947.000000,244.000000,1.448100,51700.000000 +-119.310000,36.340000,32.000000,1893.000000,453.000000,1744.000000,425.000000,1.472900,54100.000000 +-119.310000,36.340000,14.000000,2985.000000,607.000000,2250.000000,607.000000,2.160200,65200.000000 +-119.310000,36.330000,46.000000,1636.000000,338.000000,772.000000,332.000000,2.425000,84900.000000 +-119.310000,36.320000,44.000000,2032.000000,308.000000,791.000000,336.000000,4.029800,109000.000000 +-119.310000,36.310000,18.000000,3860.000000,760.000000,1643.000000,664.000000,2.071400,92600.000000 +-119.310000,36.300000,20.000000,1256.000000,209.000000,566.000000,195.000000,4.022100,86300.000000 +-119.310000,36.200000,23.000000,1837.000000,332.000000,1064.000000,335.000000,3.145300,74500.000000 +-119.310000,36.060000,20.000000,2236.000000,434.000000,1405.000000,412.000000,1.882700,48700.000000 +-119.310000,35.990000,26.000000,1460.000000,316.000000,880.000000,286.000000,1.367600,47800.000000 +-119.310000,34.440000,5.000000,403.000000,48.000000,208.000000,54.000000,12.632000,500001.000000 +-119.310000,34.410000,22.000000,2612.000000,494.000000,1361.000000,439.000000,4.131900,245000.000000 +-119.310000,34.380000,23.000000,282.000000,69.000000,130.000000,57.000000,2.437500,225000.000000 +-119.320000,37.060000,15.000000,3111.000000,651.000000,276.000000,107.000000,5.131400,179200.000000 +-119.320000,36.620000,15.000000,1070.000000,256.000000,1070.000000,243.000000,1.564200,51500.000000 +-119.320000,36.360000,18.000000,2060.000000,383.000000,1348.000000,397.000000,3.431200,68400.000000 +-119.320000,36.330000,20.000000,1896.000000,266.000000,674.000000,277.000000,9.037600,239100.000000 +-119.320000,36.330000,18.000000,2603.000000,478.000000,1158.000000,423.000000,4.593800,150500.000000 +-119.320000,36.320000,35.000000,2316.000000,387.000000,849.000000,378.000000,4.381600,88600.000000 +-119.320000,36.320000,29.000000,2409.000000,436.000000,1142.000000,440.000000,3.689500,87700.000000 +-119.320000,36.310000,21.000000,2309.000000,424.000000,1047.000000,453.000000,2.988600,87500.000000 +-119.320000,36.300000,14.000000,1680.000000,343.000000,931.000000,350.000000,2.733600,89200.000000 +-119.320000,36.250000,21.000000,1231.000000,204.000000,609.000000,206.000000,2.836500,90000.000000 +-119.320000,36.220000,5.000000,2319.000000,438.000000,1283.000000,423.000000,3.634300,95400.000000 +-119.320000,36.210000,29.000000,1220.000000,232.000000,619.000000,246.000000,3.312500,78300.000000 +-119.320000,36.190000,11.000000,1281.000000,291.000000,861.000000,313.000000,1.096200,72300.000000 +-119.320000,34.350000,16.000000,52.000000,16.000000,51.000000,15.000000,2.475000,225000.000000 +-119.330000,36.890000,15.000000,1879.000000,411.000000,755.000000,294.000000,2.000000,83300.000000 +-119.330000,36.320000,23.000000,3137.000000,628.000000,1446.000000,548.000000,2.500000,85500.000000 +-119.330000,36.320000,20.000000,2025.000000,328.000000,1039.000000,346.000000,3.531300,82800.000000 +-119.330000,36.320000,19.000000,2778.000000,431.000000,1092.000000,451.000000,5.256100,121300.000000 +-119.330000,36.320000,16.000000,3331.000000,839.000000,1955.000000,763.000000,1.614800,86600.000000 +-119.330000,36.310000,17.000000,2401.000000,409.000000,1100.000000,409.000000,4.057700,107300.000000 +-119.330000,36.300000,12.000000,2172.000000,352.000000,1013.000000,354.000000,4.946400,115600.000000 +-119.330000,36.300000,11.000000,3045.000000,522.000000,1563.000000,516.000000,5.433700,133800.000000 +-119.330000,36.280000,16.000000,2624.000000,527.000000,1077.000000,520.000000,2.125000,104200.000000 +-119.330000,36.220000,9.000000,3748.000000,644.000000,1955.000000,620.000000,4.201100,108100.000000 +-119.330000,36.220000,29.000000,1735.000000,323.000000,805.000000,293.000000,3.503900,89900.000000 +-119.330000,36.210000,38.000000,3115.000000,622.000000,1238.000000,606.000000,2.608300,67000.000000 +-119.330000,35.600000,32.000000,2703.000000,683.000000,2682.000000,675.000000,1.461900,60500.000000 +-119.330000,35.590000,20.000000,3085.000000,691.000000,2645.000000,676.000000,1.786800,54100.000000 +-119.340000,37.120000,23.000000,1881.000000,380.000000,64.000000,37.000000,3.875000,125000.000000 +-119.340000,36.320000,6.000000,3266.000000,604.000000,1769.000000,580.000000,3.157400,89200.000000 +-119.340000,36.320000,14.000000,1204.000000,227.000000,633.000000,247.000000,3.925000,83800.000000 +-119.340000,36.310000,14.000000,1635.000000,422.000000,870.000000,399.000000,2.700000,88900.000000 +-119.340000,36.300000,13.000000,2394.000000,458.000000,1177.000000,389.000000,2.687500,74400.000000 +-119.340000,36.230000,12.000000,4965.000000,872.000000,2191.000000,804.000000,3.561100,90200.000000 +-119.340000,36.210000,30.000000,749.000000,214.000000,537.000000,199.000000,0.822900,68400.000000 +-119.340000,36.210000,22.000000,3065.000000,726.000000,2165.000000,738.000000,1.479200,54400.000000 +-119.340000,36.200000,12.000000,1632.000000,378.000000,1303.000000,315.000000,2.033300,54400.000000 +-119.340000,35.600000,16.000000,1584.000000,309.000000,1011.000000,268.000000,2.496100,58800.000000 +-119.340000,35.590000,33.000000,3240.000000,654.000000,1809.000000,616.000000,2.393400,71900.000000 +-119.350000,36.520000,39.000000,3027.000000,608.000000,2199.000000,592.000000,2.644500,62000.000000 +-119.350000,36.420000,18.000000,1115.000000,193.000000,1742.000000,176.000000,2.796900,123800.000000 +-119.350000,36.320000,10.000000,3817.000000,719.000000,1686.000000,714.000000,3.823500,94600.000000 +-119.350000,36.220000,32.000000,1290.000000,304.000000,852.000000,309.000000,1.442900,54600.000000 +-119.350000,36.200000,31.000000,1783.000000,382.000000,1266.000000,358.000000,2.226400,50800.000000 +-119.350000,36.200000,29.000000,1938.000000,404.000000,1487.000000,414.000000,1.746200,51100.000000 +-119.350000,36.160000,21.000000,2751.000000,602.000000,1496.000000,489.000000,2.388200,49200.000000 +-119.350000,35.580000,13.000000,1657.000000,362.000000,1186.000000,376.000000,1.190300,63200.000000 +-119.360000,36.330000,11.000000,3221.000000,617.000000,1351.000000,565.000000,2.984400,132000.000000 +-119.360000,36.220000,10.000000,2445.000000,526.000000,1262.000000,476.000000,1.935500,68300.000000 +-119.360000,36.210000,25.000000,1170.000000,259.000000,804.000000,257.000000,1.388900,50200.000000 +-119.360000,36.210000,18.000000,1082.000000,202.000000,793.000000,213.000000,2.403200,60000.000000 +-119.360000,36.200000,33.000000,1955.000000,398.000000,1412.000000,397.000000,2.250000,51500.000000 +-119.360000,35.550000,29.000000,510.000000,84.000000,236.000000,73.000000,2.700000,125000.000000 +-119.370000,36.470000,26.000000,337.000000,69.000000,277.000000,73.000000,2.343800,100000.000000 +-119.370000,36.350000,20.000000,1132.000000,177.000000,518.000000,178.000000,5.376700,231300.000000 +-119.370000,36.220000,19.000000,1673.000000,318.000000,1298.000000,343.000000,2.706000,64800.000000 +-119.370000,36.210000,35.000000,2228.000000,476.000000,1567.000000,449.000000,1.445500,54100.000000 +-119.380000,36.560000,25.000000,1180.000000,222.000000,611.000000,212.000000,2.072900,84700.000000 +-119.380000,36.550000,31.000000,2342.000000,439.000000,1411.000000,465.000000,3.017000,72000.000000 +-119.380000,36.540000,33.000000,2465.000000,536.000000,2030.000000,522.000000,1.522300,51800.000000 +-119.380000,36.530000,38.000000,1281.000000,297.000000,1423.000000,293.000000,1.960200,51400.000000 +-119.380000,36.300000,14.000000,1932.000000,330.000000,997.000000,291.000000,3.687500,93200.000000 +-119.390000,36.640000,38.000000,949.000000,190.000000,578.000000,187.000000,2.361800,80000.000000 +-119.390000,36.550000,30.000000,1669.000000,314.000000,837.000000,325.000000,3.386900,80400.000000 +-119.390000,36.540000,30.000000,1408.000000,326.000000,1184.000000,324.000000,1.716500,59100.000000 +-119.390000,34.320000,19.000000,3238.000000,629.000000,1195.000000,443.000000,4.847200,500001.000000 +-119.400000,37.090000,22.000000,2211.000000,477.000000,773.000000,288.000000,3.326900,102700.000000 +-119.400000,36.550000,19.000000,3000.000000,628.000000,2202.000000,590.000000,2.514100,67400.000000 +-119.400000,36.530000,28.000000,2201.000000,429.000000,1524.000000,412.000000,2.750000,65000.000000 +-119.400000,36.040000,39.000000,915.000000,199.000000,580.000000,175.000000,1.889400,112500.000000 +-119.400000,35.060000,21.000000,2213.000000,458.000000,1250.000000,440.000000,2.918700,52100.000000 +-119.410000,36.680000,18.000000,1802.000000,332.000000,945.000000,292.000000,3.404400,115300.000000 +-119.410000,36.350000,20.000000,1743.000000,340.000000,1390.000000,336.000000,2.222200,52900.000000 +-119.420000,36.350000,20.000000,1469.000000,303.000000,1031.000000,259.000000,1.664500,48000.000000 +-119.420000,35.400000,24.000000,2585.000000,480.000000,1442.000000,424.000000,2.845200,104700.000000 +-119.420000,35.190000,26.000000,890.000000,172.000000,483.000000,170.000000,4.150000,68200.000000 +-119.430000,36.630000,25.000000,1784.000000,312.000000,904.000000,303.000000,3.625000,107600.000000 +-119.430000,36.610000,19.000000,1484.000000,296.000000,1296.000000,298.000000,2.421900,65800.000000 +-119.430000,36.590000,15.000000,1371.000000,306.000000,1266.000000,309.000000,1.767000,63300.000000 +-119.430000,36.550000,27.000000,1621.000000,323.000000,882.000000,324.000000,2.750000,93500.000000 +-119.440000,38.530000,20.000000,1963.000000,434.000000,682.000000,273.000000,1.581700,97800.000000 +-119.440000,36.600000,5.000000,2353.000000,608.000000,2505.000000,573.000000,2.286300,69200.000000 +-119.440000,36.590000,32.000000,1153.000000,236.000000,761.000000,241.000000,2.825000,67600.000000 +-119.440000,36.590000,28.000000,1343.000000,330.000000,1331.000000,305.000000,1.516000,56700.000000 +-119.440000,36.580000,37.000000,1054.000000,239.000000,879.000000,257.000000,2.523400,63500.000000 +-119.440000,36.480000,27.000000,1546.000000,415.000000,1704.000000,395.000000,1.172800,41700.000000 +-119.450000,37.210000,17.000000,3538.000000,726.000000,1603.000000,629.000000,2.944900,95600.000000 +-119.450000,36.610000,24.000000,1302.000000,232.000000,693.000000,243.000000,3.791700,90500.000000 +-119.450000,36.600000,42.000000,510.000000,88.000000,247.000000,99.000000,2.500000,73000.000000 +-119.450000,36.600000,36.000000,2294.000000,489.000000,1430.000000,454.000000,1.897500,60900.000000 +-119.450000,36.590000,41.000000,1749.000000,342.000000,1171.000000,314.000000,1.687500,66100.000000 +-119.450000,36.590000,28.000000,1274.000000,215.000000,572.000000,202.000000,3.825000,84200.000000 +-119.450000,36.580000,18.000000,1425.000000,280.000000,753.000000,266.000000,3.781300,87300.000000 +-119.450000,36.480000,38.000000,402.000000,86.000000,311.000000,87.000000,3.171900,106300.000000 +-119.450000,36.350000,22.000000,1824.000000,333.000000,1076.000000,282.000000,2.336500,69600.000000 +-119.450000,36.090000,18.000000,408.000000,82.000000,253.000000,75.000000,2.031300,112500.000000 +-119.450000,35.160000,34.000000,3437.000000,696.000000,1783.000000,608.000000,2.391200,52900.000000 +-119.450000,35.130000,34.000000,1440.000000,309.000000,808.000000,294.000000,2.301300,26600.000000 +-119.450000,35.070000,45.000000,973.000000,183.000000,500.000000,177.000000,2.638900,30000.000000 +-119.460000,36.610000,13.000000,1348.000000,258.000000,719.000000,246.000000,3.625000,108300.000000 +-119.460000,36.600000,18.000000,1404.000000,226.000000,754.000000,229.000000,3.984400,118100.000000 +-119.460000,36.250000,32.000000,1702.000000,348.000000,1016.000000,350.000000,2.500000,73600.000000 +-119.460000,35.130000,46.000000,2745.000000,543.000000,1423.000000,482.000000,2.195500,26900.000000 +-119.470000,36.690000,19.000000,3351.000000,589.000000,1578.000000,542.000000,3.291700,160100.000000 +-119.470000,35.130000,44.000000,4599.000000,877.000000,2140.000000,831.000000,2.995200,63800.000000 +-119.480000,37.000000,16.000000,2904.000000,551.000000,1467.000000,509.000000,3.173600,111800.000000 +-119.480000,36.500000,32.000000,3451.000000,625.000000,1968.000000,574.000000,2.955400,110300.000000 +-119.480000,36.440000,22.000000,1389.000000,290.000000,1185.000000,271.000000,2.085700,49200.000000 +-119.480000,35.170000,36.000000,116.000000,20.000000,39.000000,18.000000,3.125000,37500.000000 +-119.490000,37.100000,24.000000,2532.000000,555.000000,1564.000000,507.000000,2.335900,92400.000000 +-119.490000,36.580000,21.000000,2106.000000,410.000000,867.000000,380.000000,1.991300,95300.000000 +-119.490000,34.390000,17.000000,4617.000000,982.000000,2303.000000,923.000000,3.922400,230600.000000 +-119.500000,36.740000,20.000000,1089.000000,208.000000,531.000000,212.000000,4.593800,106900.000000 +-119.500000,36.620000,34.000000,1440.000000,267.000000,1018.000000,265.000000,2.220600,63400.000000 +-119.510000,37.320000,14.000000,362.000000,78.000000,88.000000,39.000000,3.589300,214300.000000 +-119.510000,34.460000,28.000000,3506.000000,563.000000,1362.000000,483.000000,6.091000,500001.000000 +-119.510000,34.400000,24.000000,3422.000000,596.000000,1763.000000,601.000000,5.203900,301300.000000 +-119.510000,34.400000,15.000000,1112.000000,256.000000,411.000000,245.000000,2.062500,314300.000000 +-119.510000,34.390000,32.000000,1921.000000,394.000000,951.000000,334.000000,3.233000,346000.000000 +-119.520000,36.710000,21.000000,1834.000000,321.000000,1120.000000,314.000000,2.590000,69300.000000 +-119.520000,36.610000,33.000000,1225.000000,275.000000,1065.000000,248.000000,1.895800,55100.000000 +-119.520000,34.410000,20.000000,4489.000000,800.000000,2867.000000,765.000000,4.806000,279700.000000 +-119.520000,34.400000,20.000000,1834.000000,477.000000,1305.000000,417.000000,3.212500,251000.000000 +-119.530000,37.340000,26.000000,4047.000000,702.000000,571.000000,199.000000,2.348200,179500.000000 +-119.530000,36.780000,20.000000,2822.000000,479.000000,1372.000000,455.000000,4.562500,136900.000000 +-119.530000,36.610000,33.000000,587.000000,170.000000,730.000000,162.000000,1.562500,55800.000000 +-119.530000,36.550000,34.000000,2065.000000,343.000000,1041.000000,313.000000,3.291700,111500.000000 +-119.530000,34.410000,8.000000,1705.000000,400.000000,886.000000,391.000000,3.965900,297400.000000 +-119.530000,34.400000,14.000000,1671.000000,383.000000,1079.000000,365.000000,3.138900,248700.000000 +-119.530000,34.380000,22.000000,2323.000000,727.000000,1301.000000,478.000000,2.786400,300000.000000 +-119.540000,36.700000,20.000000,1815.000000,375.000000,1665.000000,357.000000,2.244800,58900.000000 +-119.540000,36.510000,36.000000,49.000000,7.000000,28.000000,2.000000,4.625000,162500.000000 +-119.550000,37.750000,30.000000,2165.000000,536.000000,1500.000000,414.000000,3.539100,55900.000000 +-119.550000,36.720000,6.000000,1186.000000,234.000000,1135.000000,218.000000,2.151500,63900.000000 +-119.550000,36.710000,32.000000,1963.000000,508.000000,2052.000000,518.000000,1.907600,55800.000000 +-119.550000,36.700000,31.000000,1671.000000,372.000000,1371.000000,347.000000,2.368700,63900.000000 +-119.550000,36.610000,14.000000,3004.000000,793.000000,3535.000000,735.000000,1.586000,56900.000000 +-119.550000,36.600000,18.000000,2379.000000,448.000000,1638.000000,436.000000,2.309000,57100.000000 +-119.550000,36.520000,31.000000,1986.000000,417.000000,1042.000000,422.000000,3.029400,70200.000000 +-119.550000,36.510000,46.000000,55.000000,11.000000,26.000000,5.000000,4.125000,67500.000000 +-119.550000,36.510000,46.000000,1889.000000,390.000000,971.000000,403.000000,2.213200,76600.000000 +-119.550000,36.370000,26.000000,1912.000000,339.000000,1002.000000,311.000000,3.037500,126300.000000 +-119.550000,34.380000,17.000000,1951.000000,368.000000,681.000000,350.000000,2.727500,500001.000000 +-119.560000,37.290000,14.000000,2391.000000,451.000000,798.000000,308.000000,3.092400,114600.000000 +-119.560000,36.710000,29.000000,1963.000000,392.000000,1208.000000,398.000000,2.574100,73000.000000 +-119.560000,36.700000,40.000000,1195.000000,326.000000,1135.000000,315.000000,2.118200,58900.000000 +-119.560000,36.100000,29.000000,424.000000,78.000000,284.000000,73.000000,1.531300,43800.000000 +-119.560000,36.100000,25.000000,1093.000000,262.000000,893.000000,252.000000,2.130000,50800.000000 +-119.560000,36.090000,35.000000,1648.000000,285.000000,792.000000,265.000000,3.284700,64700.000000 +-119.560000,36.090000,14.000000,1267.000000,290.000000,1077.000000,279.000000,1.850000,52300.000000 +-119.560000,36.080000,37.000000,766.000000,189.000000,639.000000,190.000000,1.660700,42100.000000 +-119.570000,37.940000,17.000000,346.000000,130.000000,51.000000,20.000000,3.486100,137500.000000 +-119.570000,37.020000,16.000000,4199.000000,794.000000,2140.000000,722.000000,3.332000,111800.000000 +-119.570000,36.720000,11.000000,2510.000000,460.000000,1248.000000,445.000000,3.616100,99500.000000 +-119.570000,36.710000,10.000000,1657.000000,359.000000,958.000000,380.000000,2.645800,84800.000000 +-119.570000,36.700000,7.000000,1761.000000,309.000000,974.000000,308.000000,3.726100,83900.000000 +-119.570000,36.700000,34.000000,1759.000000,354.000000,899.000000,337.000000,2.682300,72900.000000 +-119.570000,36.700000,30.000000,2370.000000,412.000000,1248.000000,410.000000,3.144200,72300.000000 +-119.570000,36.440000,30.000000,1860.000000,337.000000,1123.000000,347.000000,3.492600,94200.000000 +-119.570000,36.270000,20.000000,2673.000000,452.000000,1394.000000,449.000000,2.625000,97400.000000 +-119.570000,36.100000,36.000000,1729.000000,317.000000,737.000000,278.000000,3.531300,68800.000000 +-119.570000,36.100000,16.000000,1461.000000,400.000000,1201.000000,384.000000,1.572700,54800.000000 +-119.580000,36.830000,13.000000,6135.000000,863.000000,2473.000000,774.000000,5.489500,156700.000000 +-119.580000,36.770000,19.000000,3225.000000,548.000000,1760.000000,542.000000,4.022700,126500.000000 +-119.580000,36.690000,42.000000,1032.000000,215.000000,812.000000,225.000000,1.976600,58100.000000 +-119.580000,36.110000,21.000000,2004.000000,385.000000,1397.000000,398.000000,2.216900,61600.000000 +-119.590000,37.390000,19.000000,3273.000000,611.000000,1164.000000,481.000000,3.544600,106500.000000 +-119.590000,36.720000,18.000000,1284.000000,193.000000,621.000000,190.000000,4.537500,130600.000000 +-119.590000,36.570000,19.000000,1733.000000,303.000000,911.000000,281.000000,3.598700,131700.000000 +-119.590000,36.520000,35.000000,990.000000,192.000000,674.000000,178.000000,3.321400,101600.000000 +-119.590000,36.110000,32.000000,752.000000,159.000000,524.000000,155.000000,2.250000,50000.000000 +-119.590000,34.430000,28.000000,2718.000000,542.000000,1066.000000,442.000000,4.205900,500001.000000 +-119.590000,34.390000,35.000000,622.000000,170.000000,278.000000,139.000000,3.696900,335000.000000 +-119.600000,37.290000,13.000000,1722.000000,325.000000,712.000000,269.000000,2.625000,137500.000000 +-119.600000,36.660000,27.000000,1388.000000,296.000000,1056.000000,284.000000,1.609400,55200.000000 +-119.600000,36.570000,42.000000,2311.000000,439.000000,1347.000000,436.000000,2.555600,69700.000000 +-119.600000,36.570000,33.000000,1923.000000,403.000000,1205.000000,389.000000,1.833300,68300.000000 +-119.600000,36.560000,36.000000,738.000000,168.000000,737.000000,186.000000,1.441500,54400.000000 +-119.610000,36.940000,14.000000,863.000000,151.000000,315.000000,135.000000,4.267900,151800.000000 +-119.610000,36.590000,10.000000,2842.000000,620.000000,1443.000000,576.000000,2.272700,92700.000000 +-119.610000,36.580000,29.000000,1312.000000,280.000000,788.000000,271.000000,2.697400,73000.000000 +-119.610000,36.570000,42.000000,2242.000000,521.000000,1359.000000,483.000000,1.583300,65100.000000 +-119.610000,36.560000,34.000000,1911.000000,497.000000,1886.000000,481.000000,1.625000,53000.000000 +-119.610000,36.310000,25.000000,1847.000000,371.000000,1460.000000,353.000000,1.883900,46300.000000 +-119.610000,34.450000,33.000000,3597.000000,519.000000,1207.000000,479.000000,5.396300,500001.000000 +-119.610000,34.430000,16.000000,2665.000000,391.000000,794.000000,311.000000,9.026700,500001.000000 +-119.620000,37.330000,7.000000,3389.000000,621.000000,1268.000000,474.000000,3.022400,147800.000000 +-119.620000,36.580000,13.000000,1788.000000,405.000000,1652.000000,411.000000,2.685800,62400.000000 +-119.630000,36.790000,19.000000,1317.000000,189.000000,517.000000,187.000000,4.526000,148700.000000 +-119.630000,36.700000,42.000000,1338.000000,215.000000,617.000000,222.000000,3.083300,133300.000000 +-119.630000,36.600000,33.000000,1589.000000,294.000000,1102.000000,307.000000,1.967600,62400.000000 +-119.630000,36.580000,22.000000,1794.000000,435.000000,1127.000000,359.000000,1.264700,55300.000000 +-119.630000,36.350000,4.000000,1684.000000,343.000000,920.000000,324.000000,4.239600,90600.000000 +-119.630000,36.340000,26.000000,1463.000000,261.000000,699.000000,219.000000,3.553600,71400.000000 +-119.630000,36.330000,14.000000,2928.000000,600.000000,1633.000000,559.000000,1.838500,67500.000000 +-119.630000,36.180000,23.000000,207.000000,45.000000,171.000000,50.000000,2.428600,100000.000000 +-119.630000,34.400000,29.000000,3865.000000,814.000000,1266.000000,613.000000,6.006900,500001.000000 +-119.640000,37.610000,30.000000,2857.000000,661.000000,291.000000,135.000000,2.683800,164600.000000 +-119.640000,37.310000,15.000000,2654.000000,530.000000,1267.000000,489.000000,2.839300,104400.000000 +-119.640000,36.820000,14.000000,4872.000000,656.000000,2085.000000,617.000000,5.673900,173800.000000 +-119.640000,36.560000,34.000000,576.000000,117.000000,363.000000,97.000000,2.065800,92500.000000 +-119.640000,36.360000,13.000000,2360.000000,340.000000,1055.000000,312.000000,5.213400,97400.000000 +-119.640000,36.350000,23.000000,3182.000000,563.000000,1525.000000,585.000000,3.810800,90400.000000 +-119.640000,36.330000,41.000000,3095.000000,766.000000,1852.000000,721.000000,1.452400,51700.000000 +-119.640000,36.320000,32.000000,2205.000000,523.000000,1772.000000,479.000000,1.356900,43100.000000 +-119.640000,36.310000,27.000000,1513.000000,314.000000,1071.000000,284.000000,1.590900,50100.000000 +-119.640000,34.430000,32.000000,1872.000000,318.000000,749.000000,296.000000,4.625000,500001.000000 +-119.650000,37.320000,11.000000,2161.000000,448.000000,820.000000,405.000000,2.356500,122300.000000 +-119.650000,37.090000,17.000000,1280.000000,254.000000,707.000000,267.000000,3.550000,106300.000000 +-119.650000,36.620000,6.000000,1931.000000,422.000000,1344.000000,414.000000,1.660700,58000.000000 +-119.650000,36.510000,30.000000,1671.000000,319.000000,966.000000,282.000000,3.133300,100000.000000 +-119.650000,36.370000,4.000000,3725.000000,783.000000,1478.000000,600.000000,3.548600,148000.000000 +-119.650000,36.350000,38.000000,3148.000000,568.000000,1378.000000,537.000000,2.878800,85500.000000 +-119.650000,36.350000,21.000000,1745.000000,266.000000,837.000000,292.000000,4.391100,107900.000000 +-119.650000,36.340000,47.000000,1869.000000,357.000000,832.000000,315.000000,3.084600,76100.000000 +-119.650000,36.340000,46.000000,1730.000000,337.000000,752.000000,323.000000,1.852900,67200.000000 +-119.650000,36.330000,52.000000,1257.000000,257.000000,624.000000,243.000000,2.352300,59100.000000 +-119.650000,36.330000,47.000000,1059.000000,268.000000,693.000000,241.000000,1.388200,53800.000000 +-119.650000,36.320000,11.000000,1294.000000,314.000000,713.000000,290.000000,1.543300,50400.000000 +-119.650000,36.300000,28.000000,941.000000,175.000000,588.000000,180.000000,2.346600,53400.000000 +-119.660000,37.390000,10.000000,2106.000000,410.000000,1003.000000,397.000000,2.781300,124100.000000 +-119.660000,36.350000,15.000000,1724.000000,374.000000,947.000000,391.000000,3.109400,91900.000000 +-119.660000,36.330000,16.000000,2048.000000,373.000000,1052.000000,388.000000,4.090900,92800.000000 +-119.660000,36.330000,10.000000,1623.000000,409.000000,988.000000,395.000000,1.419400,58100.000000 +-119.660000,36.320000,24.000000,2652.000000,568.000000,1532.000000,445.000000,2.325600,56800.000000 +-119.660000,36.300000,18.000000,1147.000000,202.000000,717.000000,212.000000,3.368100,70500.000000 +-119.660000,34.440000,26.000000,2790.000000,413.000000,1014.000000,397.000000,6.563100,500001.000000 +-119.660000,34.430000,27.000000,5509.000000,1059.000000,2591.000000,979.000000,3.845600,500001.000000 +-119.670000,37.270000,13.000000,5087.000000,981.000000,2284.000000,913.000000,2.741300,123100.000000 +-119.670000,36.890000,15.000000,2373.000000,364.000000,1280.000000,386.000000,5.308000,167500.000000 +-119.670000,36.830000,4.000000,2145.000000,334.000000,1024.000000,308.000000,5.086400,113700.000000 +-119.670000,36.830000,3.000000,2029.000000,336.000000,1003.000000,340.000000,4.435600,111300.000000 +-119.670000,36.810000,4.000000,1262.000000,216.000000,622.000000,199.000000,4.943200,114400.000000 +-119.670000,36.800000,9.000000,3712.000000,508.000000,1632.000000,474.000000,6.011000,163100.000000 +-119.670000,36.740000,19.000000,2788.000000,614.000000,1365.000000,525.000000,2.781300,120300.000000 +-119.670000,36.730000,27.000000,2845.000000,417.000000,1219.000000,460.000000,4.919600,117900.000000 +-119.670000,36.720000,31.000000,843.000000,140.000000,453.000000,149.000000,2.687500,153800.000000 +-119.670000,36.350000,10.000000,1090.000000,164.000000,470.000000,158.000000,4.943200,118800.000000 +-119.670000,34.440000,32.000000,3202.000000,537.000000,1316.000000,538.000000,5.288800,463800.000000 +-119.670000,34.430000,39.000000,1467.000000,381.000000,1404.000000,374.000000,2.368100,241400.000000 +-119.670000,34.420000,37.000000,1673.000000,444.000000,1477.000000,446.000000,2.064300,246700.000000 +-119.670000,34.420000,23.000000,1333.000000,393.000000,1369.000000,381.000000,2.594700,232600.000000 +-119.670000,34.380000,28.000000,1814.000000,526.000000,849.000000,420.000000,3.162500,364300.000000 +-119.680000,37.350000,13.000000,2307.000000,386.000000,925.000000,347.000000,3.132600,119800.000000 +-119.680000,37.190000,10.000000,3113.000000,589.000000,1508.000000,512.000000,2.816700,96100.000000 +-119.680000,36.830000,11.000000,2455.000000,344.000000,1110.000000,339.000000,6.113300,120000.000000 +-119.680000,36.810000,16.000000,2668.000000,454.000000,1536.000000,457.000000,3.979200,88900.000000 +-119.680000,36.810000,13.000000,2589.000000,413.000000,1356.000000,435.000000,5.025300,106200.000000 +-119.680000,36.800000,7.000000,2855.000000,518.000000,1748.000000,498.000000,4.206600,88400.000000 +-119.680000,36.790000,16.000000,1551.000000,271.000000,1010.000000,292.000000,3.541700,71300.000000 +-119.680000,36.770000,21.000000,1260.000000,182.000000,583.000000,205.000000,6.013200,150800.000000 +-119.680000,36.630000,39.000000,1237.000000,256.000000,638.000000,239.000000,3.013900,65300.000000 +-119.680000,36.620000,31.000000,834.000000,229.000000,616.000000,211.000000,1.660200,61200.000000 +-119.680000,36.320000,28.000000,1325.000000,276.000000,873.000000,240.000000,2.583300,54400.000000 +-119.680000,36.320000,26.000000,592.000000,121.000000,268.000000,116.000000,1.759600,120800.000000 +-119.680000,36.310000,12.000000,2739.000000,535.000000,1859.000000,498.000000,2.993600,60600.000000 +-119.680000,34.440000,23.000000,2600.000000,398.000000,917.000000,374.000000,8.739400,500001.000000 +-119.680000,34.430000,49.000000,1785.000000,386.000000,1267.000000,380.000000,3.520800,251200.000000 +-119.680000,34.430000,33.000000,1961.000000,462.000000,1693.000000,445.000000,2.989600,236000.000000 +-119.680000,34.420000,38.000000,1452.000000,354.000000,1139.000000,340.000000,2.707000,236800.000000 +-119.690000,36.860000,20.000000,1676.000000,263.000000,786.000000,240.000000,4.000000,164600.000000 +-119.690000,36.850000,20.000000,2655.000000,432.000000,1081.000000,379.000000,4.539800,143100.000000 +-119.690000,36.830000,8.000000,943.000000,189.000000,475.000000,155.000000,4.932700,89500.000000 +-119.690000,36.830000,7.000000,2075.000000,353.000000,1040.000000,362.000000,3.994300,100200.000000 +-119.690000,36.830000,32.000000,1098.000000,226.000000,726.000000,224.000000,1.491300,54600.000000 +-119.690000,36.830000,28.000000,1868.000000,350.000000,898.000000,329.000000,3.181400,78900.000000 +-119.690000,36.820000,17.000000,1897.000000,433.000000,1207.000000,384.000000,1.802100,55900.000000 +-119.690000,36.810000,13.000000,1524.000000,366.000000,994.000000,370.000000,2.544600,93800.000000 +-119.690000,36.790000,15.000000,2524.000000,451.000000,1207.000000,424.000000,2.740400,76300.000000 +-119.690000,36.790000,13.000000,1736.000000,313.000000,993.000000,314.000000,3.769700,83600.000000 +-119.690000,36.770000,22.000000,2456.000000,496.000000,1720.000000,417.000000,2.687500,60600.000000 +-119.690000,36.750000,6.000000,1926.000000,303.000000,965.000000,316.000000,4.746300,93100.000000 +-119.690000,36.750000,13.000000,2343.000000,409.000000,1347.000000,405.000000,4.002700,93100.000000 +-119.690000,36.740000,23.000000,2097.000000,385.000000,911.000000,405.000000,3.512800,121600.000000 +-119.690000,36.740000,17.000000,2438.000000,598.000000,1563.000000,538.000000,1.544900,62500.000000 +-119.690000,36.710000,25.000000,556.000000,79.000000,249.000000,71.000000,4.458300,108300.000000 +-119.690000,36.690000,36.000000,1432.000000,269.000000,836.000000,237.000000,2.156300,88300.000000 +-119.690000,36.460000,29.000000,1702.000000,301.000000,914.000000,280.000000,2.812500,79200.000000 +-119.690000,36.430000,29.000000,1799.000000,356.000000,1278.000000,387.000000,1.781300,57900.000000 +-119.690000,36.410000,38.000000,1016.000000,202.000000,540.000000,187.000000,2.288500,75000.000000 +-119.690000,36.250000,35.000000,2011.000000,349.000000,970.000000,300.000000,2.395000,94100.000000 +-119.690000,35.620000,18.000000,820.000000,239.000000,1345.000000,207.000000,2.118600,47500.000000 +-119.690000,34.440000,41.000000,1989.000000,271.000000,666.000000,269.000000,6.840600,500001.000000 +-119.690000,34.430000,44.000000,2440.000000,485.000000,1011.000000,442.000000,4.149000,443600.000000 +-119.690000,34.430000,43.000000,1257.000000,311.000000,671.000000,263.000000,2.875000,280600.000000 +-119.690000,34.430000,37.000000,2801.000000,497.000000,1150.000000,476.000000,5.831100,387700.000000 +-119.690000,34.430000,30.000000,1273.000000,343.000000,1082.000000,325.000000,2.510400,228100.000000 +-119.690000,34.420000,52.000000,302.000000,112.000000,392.000000,114.000000,2.597800,258300.000000 +-119.690000,34.420000,17.000000,1826.000000,544.000000,1325.000000,532.000000,1.276200,253600.000000 +-119.690000,34.410000,44.000000,1208.000000,357.000000,603.000000,297.000000,2.610300,500000.000000 +-119.700000,36.940000,15.000000,1449.000000,277.000000,649.000000,265.000000,2.486100,86300.000000 +-119.700000,36.830000,23.000000,3532.000000,756.000000,1885.000000,758.000000,2.590400,71400.000000 +-119.700000,36.820000,25.000000,2379.000000,540.000000,1482.000000,484.000000,2.317300,68200.000000 +-119.700000,36.810000,32.000000,2623.000000,528.000000,1570.000000,492.000000,2.715900,68000.000000 +-119.700000,36.800000,31.000000,1746.000000,321.000000,1186.000000,360.000000,2.693200,66400.000000 +-119.700000,34.470000,32.000000,3725.000000,569.000000,1304.000000,527.000000,7.726100,500001.000000 +-119.700000,34.430000,52.000000,977.000000,289.000000,412.000000,272.000000,2.125000,300000.000000 +-119.700000,34.430000,52.000000,1364.000000,460.000000,804.000000,400.000000,2.375000,293800.000000 +-119.700000,34.430000,39.000000,1486.000000,467.000000,758.000000,409.000000,2.687500,320600.000000 +-119.700000,34.430000,37.000000,1462.000000,306.000000,678.000000,322.000000,5.154500,418400.000000 +-119.700000,34.430000,35.000000,1402.000000,369.000000,654.000000,385.000000,2.620500,318800.000000 +-119.700000,34.420000,52.000000,329.000000,109.000000,291.000000,102.000000,1.472200,350000.000000 +-119.700000,34.410000,52.000000,1526.000000,458.000000,1633.000000,449.000000,2.206900,226500.000000 +-119.700000,34.410000,19.000000,2086.000000,575.000000,1701.000000,530.000000,2.804200,236100.000000 +-119.700000,34.410000,19.000000,1215.000000,360.000000,1349.000000,423.000000,2.660700,226500.000000 +-119.700000,34.360000,35.000000,1604.000000,334.000000,904.000000,337.000000,4.741100,336400.000000 +-119.710000,36.880000,17.000000,2236.000000,315.000000,992.000000,312.000000,6.940500,165200.000000 +-119.710000,36.830000,5.000000,1087.000000,338.000000,623.000000,362.000000,1.806100,113400.000000 +-119.710000,36.830000,15.000000,2727.000000,500.000000,1228.000000,436.000000,3.507800,109000.000000 +-119.710000,36.820000,12.000000,2144.000000,568.000000,1320.000000,566.000000,2.338100,112500.000000 +-119.710000,36.810000,9.000000,1122.000000,290.000000,662.000000,284.000000,2.053600,55000.000000 +-119.710000,36.810000,19.000000,1648.000000,368.000000,557.000000,354.000000,1.796900,72800.000000 +-119.710000,36.800000,25.000000,875.000000,156.000000,646.000000,166.000000,3.000000,72800.000000 +-119.710000,36.800000,17.000000,2056.000000,366.000000,1259.000000,367.000000,3.933800,84700.000000 +-119.710000,36.800000,17.000000,1415.000000,267.000000,861.000000,293.000000,3.250000,81400.000000 +-119.710000,36.790000,34.000000,1891.000000,323.000000,966.000000,355.000000,3.668100,82000.000000 +-119.710000,36.770000,11.000000,5112.000000,1384.000000,2487.000000,1243.000000,2.146100,75900.000000 +-119.710000,36.760000,28.000000,2675.000000,527.000000,1392.000000,521.000000,2.310800,72000.000000 +-119.710000,36.740000,18.000000,8099.000000,1670.000000,4476.000000,1514.000000,2.472800,88300.000000 +-119.710000,36.730000,19.000000,3972.000000,585.000000,1586.000000,560.000000,5.260800,151400.000000 +-119.710000,34.450000,35.000000,2183.000000,363.000000,988.000000,351.000000,5.592200,384400.000000 +-119.710000,34.440000,52.000000,1837.000000,343.000000,711.000000,355.000000,4.131600,443000.000000 +-119.710000,34.430000,48.000000,2408.000000,536.000000,1005.000000,497.000000,3.521300,458600.000000 +-119.710000,34.430000,47.000000,1572.000000,417.000000,790.000000,384.000000,2.642900,279200.000000 +-119.710000,34.430000,18.000000,1170.000000,372.000000,681.000000,346.000000,2.197400,255000.000000 +-119.710000,34.420000,52.000000,1838.000000,692.000000,851.000000,576.000000,1.485100,237500.000000 +-119.710000,34.420000,50.000000,840.000000,279.000000,488.000000,270.000000,2.209700,258300.000000 +-119.710000,34.420000,49.000000,1560.000000,436.000000,1041.000000,411.000000,2.925000,246900.000000 +-119.710000,34.420000,39.000000,1172.000000,322.000000,606.000000,316.000000,2.160000,259100.000000 +-119.710000,34.420000,31.000000,1643.000000,499.000000,1253.000000,499.000000,3.156300,267000.000000 +-119.710000,34.420000,23.000000,2068.000000,658.000000,1898.000000,570.000000,2.550600,230800.000000 +-119.710000,34.410000,18.000000,1225.000000,317.000000,694.000000,306.000000,3.682300,255000.000000 +-119.710000,34.400000,36.000000,1846.000000,358.000000,748.000000,329.000000,4.228300,326800.000000 +-119.720000,37.460000,13.000000,1999.000000,375.000000,750.000000,308.000000,2.875000,96000.000000 +-119.720000,37.380000,16.000000,2131.000000,424.000000,989.000000,369.000000,2.607100,103700.000000 +-119.720000,36.820000,16.000000,2627.000000,613.000000,1054.000000,623.000000,1.948300,112500.000000 +-119.720000,36.810000,28.000000,1651.000000,305.000000,780.000000,309.000000,2.945300,72200.000000 +-119.720000,36.810000,15.000000,2175.000000,564.000000,1194.000000,482.000000,2.676700,87500.000000 +-119.720000,36.800000,23.000000,2128.000000,442.000000,1047.000000,450.000000,2.625000,71500.000000 +-119.720000,36.800000,16.000000,2396.000000,526.000000,1338.000000,518.000000,2.165300,78800.000000 +-119.720000,36.800000,15.000000,3045.000000,689.000000,1340.000000,588.000000,3.195300,85700.000000 +-119.720000,36.760000,23.000000,6403.000000,1370.000000,3573.000000,1260.000000,2.300600,69000.000000 +-119.720000,36.750000,27.000000,1691.000000,282.000000,869.000000,337.000000,3.951400,86900.000000 +-119.720000,36.750000,11.000000,4832.000000,993.000000,2190.000000,888.000000,2.661100,74700.000000 +-119.720000,36.730000,9.000000,1914.000000,491.000000,1116.000000,424.000000,1.464600,65900.000000 +-119.720000,36.720000,15.000000,1713.000000,246.000000,766.000000,232.000000,6.816200,127200.000000 +-119.720000,36.710000,7.000000,2456.000000,463.000000,1350.000000,424.000000,3.017900,91600.000000 +-119.720000,36.340000,33.000000,1287.000000,214.000000,580.000000,210.000000,3.201900,112500.000000 +-119.720000,36.320000,40.000000,1185.000000,221.000000,676.000000,256.000000,2.272100,52600.000000 +-119.720000,34.770000,35.000000,2469.000000,553.000000,1168.000000,427.000000,2.458300,62100.000000 +-119.720000,34.440000,50.000000,3265.000000,509.000000,1256.000000,443.000000,6.399700,500001.000000 +-119.720000,34.440000,43.000000,1781.000000,342.000000,663.000000,358.000000,4.700000,293800.000000 +-119.720000,34.440000,39.000000,1489.000000,304.000000,700.000000,268.000000,3.881900,289900.000000 +-119.720000,34.430000,36.000000,1156.000000,309.000000,521.000000,304.000000,2.601400,320600.000000 +-119.720000,34.430000,33.000000,1028.000000,377.000000,753.000000,356.000000,2.345400,243800.000000 +-119.720000,34.430000,27.000000,984.000000,299.000000,777.000000,313.000000,2.569400,275000.000000 +-119.720000,34.420000,49.000000,1610.000000,370.000000,961.000000,351.000000,2.698300,260100.000000 +-119.720000,34.420000,37.000000,1635.000000,427.000000,1027.000000,408.000000,3.590500,264700.000000 +-119.720000,34.420000,31.000000,1524.000000,383.000000,1257.000000,398.000000,2.601900,250000.000000 +-119.720000,34.410000,35.000000,871.000000,145.000000,354.000000,154.000000,4.321400,341800.000000 +-119.720000,34.410000,35.000000,1853.000000,375.000000,878.000000,338.000000,4.904400,335300.000000 +-119.720000,34.410000,26.000000,1648.000000,378.000000,954.000000,405.000000,3.289500,335000.000000 +-119.730000,36.830000,8.000000,3602.000000,547.000000,1959.000000,580.000000,5.347800,138800.000000 +-119.730000,36.830000,14.000000,3348.000000,491.000000,1584.000000,493.000000,5.082800,111400.000000 +-119.730000,36.810000,19.000000,1699.000000,356.000000,994.000000,368.000000,2.777800,79700.000000 +-119.730000,36.800000,15.000000,2376.000000,538.000000,1197.000000,510.000000,3.141700,74600.000000 +-119.730000,36.770000,24.000000,4410.000000,939.000000,2362.000000,862.000000,2.940600,73000.000000 +-119.730000,36.750000,39.000000,1745.000000,321.000000,901.000000,303.000000,3.171900,67900.000000 +-119.730000,36.740000,14.000000,6202.000000,1551.000000,5561.000000,1435.000000,1.607300,64700.000000 +-119.730000,36.730000,9.000000,1621.000000,428.000000,678.000000,394.000000,2.243700,54200.000000 +-119.730000,36.730000,7.000000,2461.000000,647.000000,1587.000000,551.000000,1.400700,225000.000000 +-119.730000,36.720000,26.000000,2645.000000,1005.000000,1660.000000,991.000000,0.699100,89500.000000 +-119.730000,36.720000,15.000000,2246.000000,456.000000,1190.000000,403.000000,2.029400,70400.000000 +-119.730000,36.680000,32.000000,755.000000,205.000000,681.000000,207.000000,1.798600,49300.000000 +-119.730000,36.590000,31.000000,1551.000000,296.000000,1058.000000,287.000000,3.343800,92600.000000 +-119.730000,36.560000,32.000000,1513.000000,272.000000,1038.000000,272.000000,3.046900,82700.000000 +-119.730000,36.520000,20.000000,1741.000000,331.000000,1466.000000,289.000000,2.592100,94200.000000 +-119.730000,34.440000,48.000000,2114.000000,390.000000,973.000000,367.000000,4.802100,351100.000000 +-119.730000,34.440000,38.000000,1729.000000,394.000000,801.000000,395.000000,3.136400,357500.000000 +-119.730000,34.430000,27.000000,1448.000000,404.000000,978.000000,338.000000,2.303000,261000.000000 +-119.730000,34.420000,25.000000,2024.000000,312.000000,907.000000,335.000000,5.412700,392800.000000 +-119.730000,34.420000,23.000000,1364.000000,227.000000,638.000000,238.000000,5.327900,413900.000000 +-119.730000,34.410000,29.000000,1769.000000,297.000000,703.000000,269.000000,4.437500,350000.000000 +-119.730000,34.350000,20.000000,1648.000000,319.000000,905.000000,307.000000,4.375000,335200.000000 +-119.740000,36.850000,3.000000,10425.000000,2121.000000,4432.000000,1778.000000,3.903200,140800.000000 +-119.740000,36.830000,14.000000,4675.000000,829.000000,2235.000000,787.000000,4.109800,108200.000000 +-119.740000,36.800000,18.000000,10862.000000,2401.000000,5466.000000,2209.000000,2.467800,74300.000000 +-119.740000,36.780000,27.000000,4049.000000,947.000000,2254.000000,882.000000,2.246700,70700.000000 +-119.740000,36.770000,30.000000,2427.000000,482.000000,1375.000000,518.000000,2.573700,76900.000000 +-119.740000,36.760000,42.000000,2093.000000,470.000000,1621.000000,438.000000,1.799400,58700.000000 +-119.740000,36.750000,47.000000,2236.000000,418.000000,1042.000000,397.000000,2.954500,59600.000000 +-119.740000,36.740000,39.000000,4893.000000,1210.000000,4749.000000,1067.000000,1.206500,55600.000000 +-119.740000,36.730000,42.000000,1236.000000,272.000000,946.000000,261.000000,2.053600,50000.000000 +-119.740000,36.730000,34.000000,1254.000000,272.000000,1056.000000,279.000000,2.326900,50800.000000 +-119.740000,36.720000,25.000000,3972.000000,842.000000,2863.000000,729.000000,2.130400,58500.000000 +-119.740000,36.710000,17.000000,5872.000000,1250.000000,5034.000000,1224.000000,2.190500,61800.000000 +-119.740000,36.650000,19.000000,2546.000000,463.000000,1257.000000,418.000000,2.901300,89500.000000 +-119.740000,36.150000,21.000000,1548.000000,308.000000,1137.000000,306.000000,2.468800,61300.000000 +-119.740000,34.450000,29.000000,2526.000000,388.000000,1092.000000,409.000000,6.059700,383100.000000 +-119.740000,34.440000,27.000000,1251.000000,282.000000,503.000000,283.000000,2.800000,353000.000000 +-119.740000,34.440000,26.000000,4257.000000,1031.000000,1861.000000,950.000000,3.404700,294500.000000 +-119.740000,34.430000,26.000000,3119.000000,562.000000,1459.000000,562.000000,5.043400,340400.000000 +-119.740000,34.410000,30.000000,2365.000000,417.000000,1053.000000,409.000000,5.595900,346200.000000 +-119.740000,34.380000,32.000000,1479.000000,287.000000,830.000000,288.000000,5.345000,322600.000000 +-119.750000,36.830000,15.000000,2793.000000,436.000000,1411.000000,441.000000,4.929200,109400.000000 +-119.750000,36.800000,30.000000,3308.000000,662.000000,1894.000000,648.000000,2.197000,74500.000000 +-119.750000,36.800000,25.000000,2718.000000,504.000000,1257.000000,465.000000,2.333300,90600.000000 +-119.750000,36.780000,35.000000,1129.000000,220.000000,474.000000,242.000000,2.440500,74300.000000 +-119.750000,36.770000,32.000000,1962.000000,399.000000,1005.000000,392.000000,2.672600,70400.000000 +-119.750000,36.760000,32.000000,2072.000000,497.000000,2002.000000,470.000000,1.327800,44500.000000 +-119.750000,36.750000,50.000000,1515.000000,294.000000,852.000000,297.000000,1.995500,54200.000000 +-119.750000,36.750000,49.000000,2331.000000,460.000000,1290.000000,477.000000,2.511100,55400.000000 +-119.750000,36.730000,39.000000,2290.000000,539.000000,1685.000000,536.000000,1.632500,52100.000000 +-119.750000,36.720000,22.000000,3247.000000,859.000000,4179.000000,881.000000,1.334300,60800.000000 +-119.750000,36.710000,38.000000,1481.000000,353.000000,1543.000000,372.000000,1.457700,49800.000000 +-119.750000,34.450000,6.000000,2864.000000,642.000000,1404.000000,603.000000,5.507300,263800.000000 +-119.750000,34.450000,26.000000,3578.000000,677.000000,1504.000000,618.000000,4.137500,395000.000000 +-119.750000,34.430000,23.000000,2982.000000,837.000000,1317.000000,787.000000,3.377600,283200.000000 +-119.760000,36.830000,22.000000,2803.000000,438.000000,1234.000000,457.000000,4.517900,99600.000000 +-119.760000,36.830000,20.000000,3214.000000,446.000000,1360.000000,463.000000,5.259500,110900.000000 +-119.760000,36.830000,17.000000,3690.000000,628.000000,1888.000000,601.000000,4.019600,84200.000000 +-119.760000,36.820000,17.000000,6932.000000,1486.000000,3056.000000,1453.000000,2.337500,99300.000000 +-119.760000,36.810000,19.000000,4643.000000,1429.000000,4638.000000,1335.000000,1.271600,69400.000000 +-119.760000,36.800000,29.000000,3494.000000,662.000000,1781.000000,616.000000,2.589300,70900.000000 +-119.760000,36.800000,20.000000,6257.000000,1346.000000,2795.000000,1267.000000,2.209400,83700.000000 +-119.760000,36.790000,26.000000,3654.000000,837.000000,1976.000000,830.000000,2.154400,72800.000000 +-119.760000,36.780000,30.000000,6117.000000,1330.000000,2768.000000,1224.000000,2.138300,78800.000000 +-119.760000,36.770000,38.000000,3804.000000,814.000000,2142.000000,816.000000,2.143900,60200.000000 +-119.760000,36.770000,36.000000,2507.000000,466.000000,1227.000000,474.000000,2.785000,72300.000000 +-119.760000,36.760000,23.000000,3800.000000,1003.000000,3786.000000,917.000000,1.476600,50600.000000 +-119.760000,36.750000,41.000000,1576.000000,417.000000,1567.000000,366.000000,1.254500,45500.000000 +-119.760000,36.750000,35.000000,2347.000000,526.000000,1676.000000,481.000000,1.654800,49400.000000 +-119.760000,36.750000,35.000000,1607.000000,383.000000,1407.000000,382.000000,2.190000,53400.000000 +-119.760000,36.740000,52.000000,2137.000000,448.000000,1194.000000,444.000000,1.302900,69100.000000 +-119.760000,36.730000,39.000000,1553.000000,363.000000,1449.000000,341.000000,1.441900,45500.000000 +-119.760000,36.720000,24.000000,1240.000000,265.000000,1035.000000,232.000000,2.875000,60600.000000 +-119.760000,36.710000,29.000000,1745.000000,441.000000,1530.000000,391.000000,1.561100,44400.000000 +-119.760000,36.680000,29.000000,1243.000000,312.000000,836.000000,277.000000,1.835500,74200.000000 +-119.760000,34.440000,28.000000,1985.000000,582.000000,1092.000000,548.000000,2.470100,290900.000000 +-119.770000,37.190000,8.000000,5212.000000,872.000000,2383.000000,857.000000,4.109900,113600.000000 +-119.770000,36.910000,3.000000,7520.000000,1143.000000,2878.000000,1077.000000,5.327200,174200.000000 +-119.770000,36.860000,7.000000,4139.000000,544.000000,1843.000000,562.000000,8.273700,193500.000000 +-119.770000,36.850000,8.000000,1519.000000,234.000000,711.000000,248.000000,5.989700,123600.000000 +-119.770000,36.840000,15.000000,2058.000000,412.000000,891.000000,378.000000,3.256900,124400.000000 +-119.770000,36.840000,15.000000,1924.000000,262.000000,848.000000,277.000000,5.388600,125300.000000 +-119.770000,36.830000,19.000000,3237.000000,507.000000,1378.000000,510.000000,4.780400,101100.000000 +-119.770000,36.830000,16.000000,2360.000000,355.000000,1034.000000,359.000000,5.063500,108500.000000 +-119.770000,36.810000,28.000000,1713.000000,302.000000,663.000000,282.000000,3.567000,85500.000000 +-119.770000,36.800000,32.000000,3461.000000,665.000000,1507.000000,649.000000,2.924400,84600.000000 +-119.770000,36.790000,34.000000,2679.000000,460.000000,1141.000000,470.000000,3.264200,89600.000000 +-119.770000,36.790000,27.000000,2258.000000,427.000000,1076.000000,423.000000,2.993700,81100.000000 +-119.770000,36.780000,40.000000,1411.000000,284.000000,609.000000,296.000000,1.937500,67700.000000 +-119.770000,36.780000,36.000000,3616.000000,779.000000,1994.000000,786.000000,2.543400,67300.000000 +-119.770000,36.770000,29.000000,2554.000000,705.000000,2669.000000,655.000000,1.217600,61900.000000 +-119.770000,36.760000,43.000000,1945.000000,413.000000,1492.000000,422.000000,1.517400,54600.000000 +-119.770000,36.760000,43.000000,1623.000000,294.000000,781.000000,272.000000,1.869000,56000.000000 +-119.770000,36.750000,44.000000,1818.000000,412.000000,1680.000000,418.000000,1.708300,48300.000000 +-119.770000,36.750000,39.000000,1287.000000,332.000000,1386.000000,306.000000,1.522700,46900.000000 +-119.770000,36.740000,51.000000,1454.000000,235.000000,729.000000,252.000000,3.312500,70100.000000 +-119.770000,36.740000,50.000000,1325.000000,280.000000,811.000000,281.000000,1.866700,62800.000000 +-119.770000,36.730000,45.000000,1081.000000,241.000000,821.000000,230.000000,1.782900,52600.000000 +-119.770000,36.730000,44.000000,1960.000000,393.000000,1286.000000,381.000000,2.151800,53000.000000 +-119.770000,36.320000,14.000000,3400.000000,618.000000,1867.000000,612.000000,3.992600,92500.000000 +-119.770000,36.310000,14.000000,3677.000000,863.000000,2191.000000,785.000000,2.621800,69100.000000 +-119.770000,36.300000,24.000000,2202.000000,471.000000,1052.000000,439.000000,2.103800,62000.000000 +-119.770000,35.650000,21.000000,2403.000000,483.000000,1647.000000,415.000000,2.606600,80000.000000 +-119.770000,34.440000,24.000000,5652.000000,1313.000000,2312.000000,1294.000000,2.471700,295300.000000 +-119.770000,34.430000,28.000000,3318.000000,441.000000,1604.000000,404.000000,9.782100,500001.000000 +-119.770000,34.430000,22.000000,2552.000000,443.000000,1066.000000,424.000000,5.127100,342500.000000 +-119.780000,38.690000,17.000000,1364.000000,282.000000,338.000000,152.000000,2.450000,117600.000000 +-119.780000,36.860000,8.000000,3468.000000,675.000000,1604.000000,626.000000,4.207100,128300.000000 +-119.780000,36.860000,10.000000,2902.000000,363.000000,1200.000000,363.000000,8.360800,187300.000000 +-119.780000,36.850000,12.000000,782.000000,166.000000,292.000000,164.000000,2.827400,79500.000000 +-119.780000,36.840000,7.000000,4907.000000,1075.000000,2014.000000,909.000000,3.214700,111900.000000 +-119.780000,36.830000,18.000000,4164.000000,741.000000,1817.000000,681.000000,4.215300,95200.000000 +-119.780000,36.830000,11.000000,2754.000000,663.000000,1328.000000,604.000000,2.366700,69300.000000 +-119.780000,36.820000,25.000000,5016.000000,963.000000,2133.000000,928.000000,3.625000,89500.000000 +-119.780000,36.820000,22.000000,4241.000000,1147.000000,1929.000000,971.000000,1.770800,53500.000000 +-119.780000,36.790000,33.000000,2260.000000,440.000000,966.000000,413.000000,2.930100,68300.000000 +-119.780000,36.780000,37.000000,2185.000000,455.000000,1143.000000,438.000000,1.978400,70700.000000 +-119.780000,36.780000,31.000000,2164.000000,456.000000,959.000000,463.000000,2.329300,73400.000000 +-119.780000,36.770000,45.000000,1315.000000,256.000000,666.000000,240.000000,2.356200,58100.000000 +-119.780000,36.760000,47.000000,1425.000000,323.000000,949.000000,325.000000,1.734400,51300.000000 +-119.780000,36.750000,49.000000,1175.000000,307.000000,982.000000,278.000000,1.293700,52000.000000 +-119.780000,36.750000,43.000000,2070.000000,512.000000,1925.000000,444.000000,1.463500,46600.000000 +-119.780000,36.750000,35.000000,2114.000000,506.000000,2050.000000,474.000000,1.237500,50000.000000 +-119.780000,36.740000,15.000000,1461.000000,415.000000,924.000000,356.000000,2.504500,90300.000000 +-119.780000,36.720000,22.000000,354.000000,121.000000,530.000000,115.000000,2.145800,34400.000000 +-119.780000,36.710000,35.000000,1987.000000,394.000000,1233.000000,383.000000,1.358700,45300.000000 +-119.780000,36.310000,14.000000,1287.000000,291.000000,737.000000,269.000000,3.166700,126400.000000 +-119.780000,36.300000,30.000000,1846.000000,391.000000,1255.000000,352.000000,2.168100,66600.000000 +-119.780000,36.270000,29.000000,1871.000000,315.000000,1066.000000,309.000000,4.571400,100800.000000 +-119.780000,34.480000,21.000000,2377.000000,322.000000,1007.000000,328.000000,7.924800,500001.000000 +-119.780000,34.450000,9.000000,1830.000000,353.000000,1515.000000,220.000000,4.210900,450000.000000 +-119.780000,34.450000,23.000000,2077.000000,306.000000,705.000000,256.000000,6.474400,500001.000000 +-119.780000,34.440000,28.000000,2864.000000,495.000000,1364.000000,482.000000,4.835000,353400.000000 +-119.790000,36.850000,11.000000,2596.000000,619.000000,1765.000000,539.000000,1.951100,54000.000000 +-119.790000,36.840000,22.000000,1529.000000,375.000000,1543.000000,395.000000,1.792600,51700.000000 +-119.790000,36.830000,15.000000,3356.000000,694.000000,1232.000000,627.000000,2.221500,72200.000000 +-119.790000,36.820000,25.000000,2330.000000,462.000000,1215.000000,467.000000,3.214300,93000.000000 +-119.790000,36.820000,23.000000,4358.000000,819.000000,1852.000000,802.000000,3.416700,105200.000000 +-119.790000,36.810000,35.000000,1877.000000,328.000000,1155.000000,353.000000,3.069000,69600.000000 +-119.790000,36.800000,27.000000,2462.000000,484.000000,852.000000,449.000000,3.320000,124700.000000 +-119.790000,36.790000,33.000000,3433.000000,785.000000,1806.000000,783.000000,1.938600,67500.000000 +-119.790000,36.790000,26.000000,1700.000000,423.000000,909.000000,386.000000,2.256000,64500.000000 +-119.790000,36.790000,19.000000,1524.000000,448.000000,960.000000,386.000000,1.512200,47500.000000 +-119.790000,36.780000,41.000000,2227.000000,462.000000,1129.000000,415.000000,2.319000,59100.000000 +-119.790000,36.780000,38.000000,1912.000000,456.000000,1131.000000,408.000000,2.030000,58800.000000 +-119.790000,36.770000,43.000000,2323.000000,502.000000,1144.000000,471.000000,2.396700,58700.000000 +-119.790000,36.760000,52.000000,2408.000000,498.000000,1361.000000,465.000000,2.105500,61300.000000 +-119.790000,36.760000,52.000000,1185.000000,260.000000,635.000000,239.000000,1.175000,56100.000000 +-119.790000,36.750000,52.000000,377.000000,97.000000,530.000000,96.000000,1.000000,45000.000000 +-119.790000,36.750000,33.000000,3161.000000,934.000000,3530.000000,846.000000,1.123000,46700.000000 +-119.790000,36.740000,52.000000,173.000000,87.000000,401.000000,84.000000,2.109400,75000.000000 +-119.790000,36.740000,35.000000,853.000000,296.000000,1228.000000,289.000000,1.051300,39600.000000 +-119.790000,36.730000,52.000000,112.000000,28.000000,193.000000,40.000000,1.975000,47500.000000 +-119.790000,36.720000,41.000000,1562.000000,322.000000,927.000000,277.000000,1.304700,44100.000000 +-119.790000,36.720000,19.000000,1719.000000,391.000000,1369.000000,368.000000,1.250000,53000.000000 +-119.790000,36.700000,23.000000,1731.000000,363.000000,1210.000000,341.000000,1.392200,49500.000000 +-119.790000,36.550000,32.000000,1393.000000,276.000000,999.000000,245.000000,2.021600,76800.000000 +-119.790000,36.320000,19.000000,3252.000000,614.000000,1971.000000,607.000000,3.066700,75800.000000 +-119.790000,36.300000,16.000000,1717.000000,277.000000,903.000000,289.000000,4.343800,93100.000000 +-119.790000,36.290000,6.000000,1265.000000,227.000000,764.000000,246.000000,4.291700,104200.000000 +-119.790000,34.450000,24.000000,2746.000000,433.000000,1076.000000,380.000000,5.863500,348700.000000 +-119.790000,34.440000,25.000000,1479.000000,314.000000,977.000000,309.000000,4.179700,271800.000000 +-119.790000,34.430000,26.000000,3611.000000,563.000000,2089.000000,540.000000,5.161500,276200.000000 +-119.790000,34.400000,20.000000,3104.000000,415.000000,1061.000000,380.000000,9.688500,500001.000000 +-119.800000,37.500000,15.000000,989.000000,184.000000,406.000000,151.000000,3.177100,121900.000000 +-119.800000,36.800000,43.000000,1951.000000,288.000000,725.000000,308.000000,6.335900,169300.000000 +-119.800000,36.780000,43.000000,2382.000000,431.000000,874.000000,380.000000,3.554200,96500.000000 +-119.800000,36.770000,52.000000,2964.000000,512.000000,1114.000000,486.000000,3.810500,87600.000000 +-119.800000,36.760000,52.000000,2224.000000,418.000000,832.000000,406.000000,2.395200,78400.000000 +-119.800000,36.760000,52.000000,1853.000000,437.000000,764.000000,390.000000,1.642900,69200.000000 +-119.800000,36.750000,52.000000,1788.000000,449.000000,1156.000000,418.000000,1.729800,58400.000000 +-119.800000,36.750000,46.000000,2625.000000,593.000000,1368.000000,551.000000,1.527300,59000.000000 +-119.800000,36.740000,25.000000,1717.000000,542.000000,1343.000000,471.000000,0.799000,51800.000000 +-119.800000,36.720000,43.000000,1286.000000,360.000000,972.000000,345.000000,0.951300,50400.000000 +-119.800000,36.720000,19.000000,1334.000000,336.000000,1171.000000,319.000000,1.048100,48500.000000 +-119.800000,36.710000,29.000000,1541.000000,291.000000,1007.000000,313.000000,2.004300,53500.000000 +-119.800000,36.700000,28.000000,1592.000000,304.000000,962.000000,282.000000,1.330400,51300.000000 +-119.800000,36.680000,31.000000,2214.000000,432.000000,1326.000000,416.000000,2.169100,66700.000000 +-119.800000,36.650000,34.000000,2263.000000,423.000000,1184.000000,407.000000,1.769200,74200.000000 +-119.800000,36.290000,7.000000,479.000000,84.000000,327.000000,103.000000,5.172800,107500.000000 +-119.800000,36.020000,20.000000,156.000000,39.000000,171.000000,37.000000,3.050000,225000.000000 +-119.800000,34.440000,27.000000,2674.000000,419.000000,1176.000000,416.000000,5.029400,280200.000000 +-119.800000,34.430000,27.000000,3143.000000,537.000000,1760.000000,570.000000,4.695700,271500.000000 +-119.800000,34.430000,22.000000,2845.000000,500.000000,1456.000000,454.000000,5.660400,276400.000000 +-119.810000,36.920000,14.000000,4795.000000,710.000000,2047.000000,640.000000,4.665000,121300.000000 +-119.810000,36.850000,17.000000,2340.000000,370.000000,1174.000000,396.000000,4.230400,94400.000000 +-119.810000,36.830000,19.000000,6789.000000,1200.000000,2325.000000,1109.000000,4.049000,126000.000000 +-119.810000,36.830000,10.000000,5780.000000,922.000000,2712.000000,883.000000,5.644500,135500.000000 +-119.810000,36.810000,33.000000,3972.000000,594.000000,1324.000000,561.000000,5.451300,143300.000000 +-119.810000,36.800000,38.000000,2252.000000,325.000000,777.000000,314.000000,6.157500,160100.000000 +-119.810000,36.800000,29.000000,2806.000000,552.000000,1242.000000,540.000000,3.595800,88800.000000 +-119.810000,36.790000,39.000000,2471.000000,460.000000,1118.000000,431.000000,2.416700,71900.000000 +-119.810000,36.790000,35.000000,2314.000000,443.000000,954.000000,457.000000,2.950600,73800.000000 +-119.810000,36.780000,52.000000,2281.000000,371.000000,839.000000,367.000000,3.597200,89900.000000 +-119.810000,36.780000,37.000000,1965.000000,364.000000,796.000000,335.000000,3.625000,83400.000000 +-119.810000,36.780000,35.000000,1012.000000,245.000000,633.000000,240.000000,2.032400,55500.000000 +-119.810000,36.770000,48.000000,1805.000000,329.000000,741.000000,331.000000,2.580400,78900.000000 +-119.810000,36.760000,52.000000,1792.000000,352.000000,1049.000000,357.000000,2.437500,57100.000000 +-119.810000,36.760000,48.000000,2059.000000,388.000000,834.000000,405.000000,2.930600,67900.000000 +-119.810000,36.750000,52.000000,1827.000000,356.000000,855.000000,353.000000,1.763600,55100.000000 +-119.810000,36.740000,36.000000,607.000000,155.000000,483.000000,146.000000,1.562500,47500.000000 +-119.810000,36.730000,50.000000,772.000000,194.000000,606.000000,167.000000,2.220600,59200.000000 +-119.810000,36.730000,47.000000,1314.000000,416.000000,1155.000000,326.000000,1.372000,49600.000000 +-119.810000,36.600000,24.000000,2246.000000,462.000000,1291.000000,394.000000,2.400600,76400.000000 +-119.810000,36.510000,31.000000,1241.000000,254.000000,767.000000,226.000000,2.732100,83600.000000 +-119.810000,36.280000,24.000000,544.000000,112.000000,442.000000,106.000000,3.107100,56100.000000 +-119.810000,34.460000,22.000000,3488.000000,452.000000,1479.000000,458.000000,7.168700,384400.000000 +-119.810000,34.450000,24.000000,3678.000000,567.000000,1554.000000,570.000000,6.517300,334000.000000 +-119.810000,34.440000,14.000000,961.000000,305.000000,662.000000,286.000000,3.211500,206300.000000 +-119.820000,37.570000,13.000000,1713.000000,340.000000,643.000000,241.000000,2.662000,92400.000000 +-119.820000,36.840000,9.000000,2340.000000,544.000000,860.000000,520.000000,3.322900,119300.000000 +-119.820000,36.840000,7.000000,2289.000000,342.000000,1077.000000,354.000000,5.486800,158800.000000 +-119.820000,36.830000,16.000000,2868.000000,376.000000,1016.000000,379.000000,6.117500,144700.000000 +-119.820000,36.830000,14.000000,2982.000000,412.000000,1408.000000,423.000000,5.324100,123000.000000 +-119.820000,36.820000,28.000000,2268.000000,336.000000,752.000000,330.000000,5.280900,151500.000000 +-119.820000,36.810000,25.000000,3305.000000,551.000000,1149.000000,500.000000,5.069800,150900.000000 +-119.820000,36.800000,33.000000,1670.000000,256.000000,528.000000,250.000000,2.947100,99500.000000 +-119.820000,36.790000,35.000000,1474.000000,291.000000,709.000000,294.000000,2.652200,65900.000000 +-119.820000,36.790000,18.000000,5822.000000,1439.000000,3415.000000,1224.000000,1.685400,64700.000000 +-119.820000,36.780000,36.000000,1582.000000,313.000000,761.000000,318.000000,2.605500,69200.000000 +-119.820000,36.770000,36.000000,2252.000000,468.000000,1117.000000,442.000000,2.908100,65600.000000 +-119.820000,36.760000,46.000000,2194.000000,563.000000,924.000000,542.000000,1.402800,68500.000000 +-119.820000,36.760000,41.000000,1973.000000,399.000000,1107.000000,375.000000,1.897100,66900.000000 +-119.820000,36.740000,52.000000,610.000000,128.000000,406.000000,122.000000,1.896700,43800.000000 +-119.820000,36.720000,17.000000,1276.000000,242.000000,927.000000,238.000000,2.617600,54100.000000 +-119.820000,36.320000,18.000000,942.000000,193.000000,424.000000,174.000000,2.067300,87500.000000 +-119.820000,36.190000,33.000000,1293.000000,272.000000,694.000000,229.000000,2.022100,52200.000000 +-119.820000,34.450000,24.000000,3592.000000,533.000000,1683.000000,528.000000,6.724700,333800.000000 +-119.820000,34.440000,28.000000,1992.000000,531.000000,1622.000000,509.000000,2.768900,228200.000000 +-119.820000,34.440000,22.000000,2239.000000,475.000000,1016.000000,434.000000,4.875000,295400.000000 +-119.820000,34.440000,16.000000,1414.000000,463.000000,793.000000,439.000000,3.603400,150000.000000 +-119.830000,36.830000,14.000000,2351.000000,341.000000,1128.000000,363.000000,6.990300,141200.000000 +-119.830000,36.820000,14.000000,1087.000000,165.000000,365.000000,176.000000,7.290900,155600.000000 +-119.830000,36.800000,24.000000,3756.000000,681.000000,1586.000000,739.000000,3.857100,90100.000000 +-119.830000,36.800000,16.000000,6101.000000,1200.000000,3407.000000,1134.000000,3.125000,80800.000000 +-119.830000,36.790000,35.000000,1872.000000,363.000000,1054.000000,369.000000,3.327200,65600.000000 +-119.830000,36.790000,24.000000,3505.000000,819.000000,2098.000000,774.000000,1.957500,67000.000000 +-119.830000,36.780000,35.000000,1789.000000,357.000000,933.000000,357.000000,2.522300,66200.000000 +-119.830000,36.780000,30.000000,3162.000000,640.000000,1660.000000,639.000000,2.835900,80300.000000 +-119.830000,36.770000,32.000000,2867.000000,615.000000,1705.000000,570.000000,2.428600,68100.000000 +-119.830000,36.770000,23.000000,2168.000000,503.000000,1190.000000,425.000000,2.625000,71600.000000 +-119.830000,36.760000,15.000000,3291.000000,772.000000,1738.000000,634.000000,1.976000,67300.000000 +-119.830000,36.750000,33.000000,662.000000,183.000000,607.000000,181.000000,1.392900,55600.000000 +-119.830000,36.730000,21.000000,1702.000000,358.000000,1347.000000,316.000000,2.413700,62100.000000 +-119.830000,36.720000,28.000000,60.000000,10.000000,46.000000,13.000000,4.350000,67500.000000 +-119.830000,36.710000,43.000000,355.000000,81.000000,233.000000,75.000000,2.416700,73900.000000 +-119.830000,36.540000,31.000000,1732.000000,332.000000,979.000000,294.000000,2.520800,60000.000000 +-119.830000,36.370000,25.000000,1549.000000,269.000000,819.000000,272.000000,2.715900,101400.000000 +-119.830000,34.450000,24.000000,2168.000000,373.000000,934.000000,366.000000,5.419700,280900.000000 +-119.830000,34.440000,35.000000,796.000000,281.000000,567.000000,257.000000,2.138900,260000.000000 +-119.830000,34.440000,26.000000,1739.000000,402.000000,599.000000,368.000000,3.087500,198400.000000 +-119.830000,34.430000,31.000000,798.000000,346.000000,699.000000,301.000000,2.141700,205000.000000 +-119.840000,37.480000,17.000000,2582.000000,553.000000,1087.000000,423.000000,2.500000,104200.000000 +-119.840000,36.850000,8.000000,3791.000000,487.000000,1424.000000,475.000000,10.514400,345900.000000 +-119.840000,36.840000,12.000000,2396.000000,290.000000,863.000000,258.000000,8.771600,229200.000000 +-119.840000,36.830000,17.000000,3012.000000,408.000000,987.000000,362.000000,7.420100,229700.000000 +-119.840000,36.820000,17.000000,2807.000000,376.000000,996.000000,353.000000,5.535700,167700.000000 +-119.840000,36.810000,18.000000,2789.000000,378.000000,937.000000,364.000000,7.706200,188300.000000 +-119.840000,36.800000,19.000000,3244.000000,776.000000,1463.000000,710.000000,2.046900,66900.000000 +-119.840000,36.800000,16.000000,2849.000000,506.000000,1508.000000,478.000000,3.407400,72700.000000 +-119.840000,36.790000,21.000000,3235.000000,648.000000,1820.000000,614.000000,3.344700,71400.000000 +-119.840000,36.780000,24.000000,3242.000000,795.000000,2764.000000,773.000000,1.338500,58800.000000 +-119.840000,36.770000,6.000000,1853.000000,473.000000,1397.000000,417.000000,1.481700,72000.000000 +-119.840000,36.540000,19.000000,1310.000000,241.000000,702.000000,217.000000,2.437500,78200.000000 +-119.840000,34.440000,28.000000,977.000000,162.000000,537.000000,159.000000,4.240400,274300.000000 +-119.850000,37.100000,8.000000,828.000000,168.000000,413.000000,146.000000,3.375000,80700.000000 +-119.850000,36.970000,13.000000,2872.000000,477.000000,1607.000000,481.000000,4.475000,102400.000000 +-119.850000,36.840000,12.000000,2272.000000,304.000000,840.000000,305.000000,8.966900,213900.000000 +-119.850000,36.830000,15.000000,2563.000000,335.000000,1080.000000,356.000000,6.718100,160300.000000 +-119.850000,36.830000,11.000000,2497.000000,427.000000,1101.000000,405.000000,4.803600,141600.000000 +-119.850000,36.820000,9.000000,3995.000000,778.000000,1691.000000,712.000000,3.323900,91300.000000 +-119.850000,36.820000,16.000000,1852.000000,274.000000,887.000000,286.000000,5.540500,119300.000000 +-119.850000,36.820000,15.000000,1387.000000,236.000000,638.000000,195.000000,5.584200,88900.000000 +-119.850000,36.810000,15.000000,1743.000000,310.000000,1011.000000,325.000000,3.755000,68000.000000 +-119.850000,36.800000,14.000000,4177.000000,914.000000,2300.000000,867.000000,2.956500,73000.000000 +-119.850000,36.800000,14.000000,1876.000000,324.000000,1031.000000,311.000000,3.656300,88800.000000 +-119.850000,36.780000,8.000000,3096.000000,684.000000,1454.000000,545.000000,2.785700,79700.000000 +-119.850000,36.770000,9.000000,1142.000000,314.000000,620.000000,283.000000,2.044600,81300.000000 +-119.850000,36.770000,27.000000,1510.000000,344.000000,847.000000,295.000000,2.931500,83200.000000 +-119.850000,36.760000,10.000000,2067.000000,450.000000,845.000000,354.000000,1.821400,80100.000000 +-119.850000,36.750000,24.000000,1143.000000,245.000000,608.000000,240.000000,2.819400,81100.000000 +-119.850000,36.430000,23.000000,1824.000000,354.000000,1146.000000,362.000000,2.891300,60900.000000 +-119.850000,34.480000,23.000000,1915.000000,277.000000,724.000000,267.000000,6.298700,348200.000000 +-119.850000,34.440000,28.000000,1765.000000,301.000000,1173.000000,297.000000,6.025600,276800.000000 +-119.850000,34.400000,14.000000,2307.000000,650.000000,5723.000000,615.000000,2.165200,37500.000000 +-119.860000,36.820000,12.000000,1488.000000,253.000000,675.000000,223.000000,4.762200,89300.000000 +-119.860000,36.810000,4.000000,4530.000000,1070.000000,1804.000000,837.000000,3.394200,72100.000000 +-119.860000,36.800000,18.000000,2536.000000,516.000000,1196.000000,466.000000,2.559500,67900.000000 +-119.860000,36.780000,7.000000,2232.000000,490.000000,1274.000000,499.000000,2.985300,74700.000000 +-119.860000,36.450000,19.000000,2439.000000,462.000000,1416.000000,469.000000,2.447400,75600.000000 +-119.860000,36.430000,34.000000,1175.000000,251.000000,683.000000,261.000000,1.717600,58400.000000 +-119.860000,34.410000,24.000000,1576.000000,580.000000,1630.000000,531.000000,1.240000,325000.000000 +-119.860000,34.380000,28.000000,1062.000000,309.000000,1058.000000,305.000000,1.507100,316700.000000 +-119.860000,34.380000,26.000000,1626.000000,375.000000,1580.000000,359.000000,2.147100,187500.000000 +-119.870000,36.930000,13.000000,1429.000000,209.000000,702.000000,205.000000,4.362500,111800.000000 +-119.870000,36.830000,4.000000,4833.000000,784.000000,2088.000000,789.000000,5.178100,122500.000000 +-119.870000,36.810000,6.000000,1891.000000,341.000000,969.000000,330.000000,4.672600,107800.000000 +-119.870000,36.790000,8.000000,2875.000000,548.000000,1718.000000,551.000000,3.652200,80200.000000 +-119.870000,36.790000,7.000000,1932.000000,419.000000,1014.000000,389.000000,3.093800,76700.000000 +-119.870000,36.780000,4.000000,6102.000000,1114.000000,3406.000000,1115.000000,3.421300,84500.000000 +-119.870000,36.760000,34.000000,1649.000000,323.000000,919.000000,316.000000,2.875000,74500.000000 +-119.870000,36.720000,30.000000,1584.000000,316.000000,984.000000,300.000000,2.065800,67900.000000 +-119.870000,36.540000,34.000000,1370.000000,287.000000,818.000000,269.000000,2.404400,72500.000000 +-119.870000,36.340000,26.000000,1414.000000,265.000000,779.000000,249.000000,2.916700,83900.000000 +-119.880000,36.850000,8.000000,2580.000000,372.000000,1111.000000,393.000000,7.500000,256200.000000 +-119.880000,36.830000,2.000000,4055.000000,735.000000,1730.000000,654.000000,4.213200,96500.000000 +-119.880000,36.810000,30.000000,2288.000000,474.000000,1435.000000,425.000000,1.322100,61200.000000 +-119.880000,34.440000,27.000000,4724.000000,793.000000,2394.000000,738.000000,5.595400,261400.000000 +-119.880000,34.430000,16.000000,2206.000000,541.000000,1227.000000,554.000000,3.750000,223100.000000 +-119.880000,34.430000,16.000000,1734.000000,365.000000,962.000000,391.000000,4.477700,282500.000000 +-119.880000,34.430000,14.000000,2472.000000,685.000000,1292.000000,621.000000,3.302600,229500.000000 +-119.890000,36.790000,5.000000,3821.000000,705.000000,2179.000000,694.000000,3.782100,80400.000000 +-119.890000,36.760000,17.000000,1987.000000,335.000000,1152.000000,313.000000,4.171900,126400.000000 +-119.890000,36.730000,43.000000,524.000000,93.000000,302.000000,93.000000,2.614600,81300.000000 +-119.890000,36.700000,32.000000,1485.000000,269.000000,867.000000,271.000000,2.580900,78300.000000 +-119.890000,36.640000,34.000000,1422.000000,237.000000,716.000000,222.000000,2.975000,90000.000000 +-119.890000,34.440000,25.000000,3160.000000,507.000000,1514.000000,523.000000,5.076700,271200.000000 +-119.900000,37.490000,13.000000,2230.000000,443.000000,920.000000,361.000000,3.000000,112000.000000 +-119.900000,36.940000,11.000000,2513.000000,408.000000,1360.000000,415.000000,4.277000,98500.000000 +-119.900000,36.580000,20.000000,1935.000000,363.000000,1319.000000,359.000000,2.481400,74600.000000 +-119.900000,36.200000,43.000000,187.000000,38.000000,106.000000,40.000000,1.875000,137500.000000 +-119.910000,37.230000,17.000000,2171.000000,389.000000,1042.000000,375.000000,3.625000,94400.000000 +-119.910000,36.830000,29.000000,2205.000000,366.000000,1072.000000,345.000000,3.805600,165400.000000 +-119.910000,34.400000,24.000000,2001.000000,365.000000,1170.000000,330.000000,6.099200,268800.000000 +-119.920000,38.910000,15.000000,3831.000000,625.000000,984.000000,328.000000,5.071800,162500.000000 +-119.920000,36.770000,18.000000,1422.000000,243.000000,702.000000,230.000000,3.620400,119800.000000 +-119.920000,34.440000,17.000000,2143.000000,324.000000,1073.000000,330.000000,6.032100,402600.000000 +-119.930000,38.940000,27.000000,1709.000000,408.000000,97.000000,44.000000,2.491700,200000.000000 +-119.930000,38.720000,15.000000,2061.000000,465.000000,573.000000,196.000000,2.241700,97900.000000 +-119.930000,37.850000,18.000000,473.000000,115.000000,88.000000,41.000000,4.083300,137500.000000 +-119.930000,36.320000,25.000000,8363.000000,1636.000000,7679.000000,1580.000000,2.028500,106300.000000 +-119.930000,35.200000,29.000000,1649.000000,342.000000,671.000000,264.000000,3.060200,69800.000000 +-119.940000,38.960000,20.000000,1451.000000,386.000000,467.000000,255.000000,1.553600,212500.000000 +-119.940000,38.950000,25.000000,1789.000000,536.000000,1134.000000,396.000000,2.320000,91300.000000 +-119.940000,38.920000,24.000000,1258.000000,216.000000,235.000000,96.000000,4.600000,136800.000000 +-119.940000,37.040000,14.000000,1636.000000,253.000000,766.000000,225.000000,3.125000,88500.000000 +-119.950000,38.950000,8.000000,430.000000,107.000000,36.000000,18.000000,2.625000,187500.000000 +-119.950000,38.950000,22.000000,1058.000000,352.000000,851.000000,269.000000,2.020000,87500.000000 +-119.950000,38.950000,21.000000,2046.000000,580.000000,952.000000,353.000000,1.724500,92200.000000 +-119.950000,38.940000,24.000000,2180.000000,517.000000,755.000000,223.000000,2.587500,173400.000000 +-119.950000,37.470000,32.000000,1312.000000,315.000000,600.000000,265.000000,1.500000,91500.000000 +-119.960000,38.940000,19.000000,1429.000000,292.000000,585.000000,188.000000,2.258900,131600.000000 +-119.960000,38.930000,22.000000,2731.000000,632.000000,1215.000000,483.000000,2.830000,110500.000000 +-119.960000,38.840000,17.000000,2722.000000,512.000000,828.000000,289.000000,3.571400,109700.000000 +-119.960000,35.990000,25.000000,1047.000000,270.000000,1505.000000,286.000000,2.097600,47700.000000 +-119.970000,38.930000,24.000000,856.000000,185.000000,388.000000,108.000000,3.180600,107200.000000 +-119.970000,36.570000,17.000000,1497.000000,308.000000,1425.000000,247.000000,2.031300,69400.000000 +-119.980000,38.930000,25.000000,1262.000000,293.000000,534.000000,226.000000,2.660700,90400.000000 +-119.980000,38.920000,27.000000,2682.000000,606.000000,1010.000000,399.000000,3.150000,86900.000000 +-119.980000,38.900000,16.000000,3109.000000,572.000000,885.000000,334.000000,3.500000,134700.000000 +-119.980000,36.900000,26.000000,1284.000000,239.000000,820.000000,254.000000,2.583300,62300.000000 +-119.980000,36.860000,31.000000,2366.000000,482.000000,1933.000000,433.000000,3.023400,65000.000000 +-119.980000,36.740000,26.000000,1453.000000,251.000000,896.000000,260.000000,3.486100,112500.000000 +-119.990000,38.940000,24.000000,1216.000000,289.000000,421.000000,185.000000,3.162500,103600.000000 +-119.990000,38.940000,22.000000,3119.000000,640.000000,786.000000,351.000000,3.080600,118500.000000 +-119.990000,38.930000,23.000000,1882.000000,414.000000,673.000000,277.000000,2.909100,141900.000000 +-119.990000,37.510000,14.000000,2878.000000,617.000000,1011.000000,509.000000,1.398000,103800.000000 +-119.990000,36.800000,45.000000,1270.000000,242.000000,598.000000,214.000000,3.281300,105400.000000 +-119.990000,36.090000,23.000000,333.000000,92.000000,198.000000,55.000000,0.499900,100000.000000 +-120.000000,38.930000,17.000000,8005.000000,1382.000000,999.000000,383.000000,3.972200,313400.000000 +-120.000000,38.920000,26.000000,529.000000,116.000000,191.000000,83.000000,3.500000,103600.000000 +-120.000000,38.900000,21.000000,1653.000000,419.000000,737.000000,308.000000,1.972700,114100.000000 +-120.000000,38.870000,12.000000,1437.000000,268.000000,395.000000,144.000000,4.225000,127600.000000 +-120.000000,38.520000,16.000000,3045.000000,543.000000,202.000000,102.000000,3.150000,140600.000000 +-120.000000,36.700000,33.000000,1902.000000,370.000000,1168.000000,358.000000,2.685200,70800.000000 +-120.000000,35.910000,16.000000,259.000000,53.000000,131.000000,38.000000,3.125000,62500.000000 +-120.010000,39.260000,26.000000,1930.000000,391.000000,307.000000,138.000000,2.602300,139300.000000 +-120.010000,38.930000,22.000000,3080.000000,610.000000,1045.000000,425.000000,2.996000,126100.000000 +-120.010000,38.920000,23.000000,964.000000,246.000000,485.000000,198.000000,1.718800,96100.000000 +-120.010000,38.910000,27.000000,968.000000,191.000000,283.000000,143.000000,2.093800,94400.000000 +-120.010000,38.910000,17.000000,2732.000000,609.000000,1005.000000,499.000000,1.985100,86700.000000 +-120.010000,38.890000,24.000000,1669.000000,422.000000,589.000000,281.000000,3.008900,100800.000000 +-120.020000,39.240000,32.000000,1347.000000,444.000000,825.000000,303.000000,1.826900,225000.000000 +-120.020000,39.240000,24.000000,1602.000000,426.000000,751.000000,257.000000,1.760900,99300.000000 +-120.020000,38.910000,22.000000,2138.000000,493.000000,829.000000,330.000000,2.205600,107200.000000 +-120.020000,38.860000,19.000000,2429.000000,459.000000,883.000000,300.000000,3.017000,97600.000000 +-120.020000,38.760000,15.000000,3142.000000,618.000000,725.000000,285.000000,4.333300,121400.000000 +-120.020000,37.720000,17.000000,2806.000000,600.000000,990.000000,410.000000,2.381800,88100.000000 +-120.020000,37.570000,17.000000,2116.000000,425.000000,909.000000,319.000000,2.718800,113100.000000 +-120.020000,36.950000,25.000000,2115.000000,482.000000,1976.000000,474.000000,1.843100,53900.000000 +-120.020000,36.800000,25.000000,1270.000000,255.000000,1050.000000,245.000000,2.161800,55300.000000 +-120.030000,38.890000,15.000000,3042.000000,588.000000,918.000000,336.000000,3.833300,118800.000000 +-120.030000,38.190000,26.000000,7005.000000,1358.000000,416.000000,189.000000,2.125000,132500.000000 +-120.040000,36.970000,20.000000,2129.000000,526.000000,1845.000000,522.000000,1.897300,51600.000000 +-120.040000,36.960000,23.000000,2126.000000,506.000000,2091.000000,491.000000,1.371300,51800.000000 +-120.040000,36.950000,36.000000,1528.000000,347.000000,1334.000000,304.000000,1.359400,48300.000000 +-120.040000,36.930000,11.000000,3606.000000,699.000000,2074.000000,644.000000,2.694100,63300.000000 +-120.040000,36.790000,48.000000,1341.000000,239.000000,671.000000,208.000000,2.791700,82800.000000 +-120.040000,36.740000,14.000000,3182.000000,730.000000,2298.000000,721.000000,1.616800,71800.000000 +-120.040000,34.720000,13.000000,3942.000000,585.000000,1542.000000,515.000000,6.605400,500001.000000 +-120.050000,36.980000,16.000000,3705.000000,739.000000,2463.000000,697.000000,2.528800,61800.000000 +-120.050000,36.970000,20.000000,2029.000000,427.000000,983.000000,401.000000,1.844400,47100.000000 +-120.050000,36.960000,37.000000,1000.000000,261.000000,1092.000000,233.000000,1.426700,52300.000000 +-120.050000,36.950000,31.000000,696.000000,254.000000,913.000000,248.000000,1.400000,52500.000000 +-120.050000,36.950000,18.000000,2287.000000,534.000000,1339.000000,505.000000,2.252700,65200.000000 +-120.050000,36.720000,24.000000,1961.000000,422.000000,1559.000000,374.000000,1.829900,57800.000000 +-120.060000,39.250000,21.000000,2459.000000,525.000000,584.000000,233.000000,3.010000,163500.000000 +-120.060000,39.150000,22.000000,2213.000000,372.000000,98.000000,42.000000,1.191200,170000.000000 +-120.060000,39.090000,30.000000,2979.000000,583.000000,316.000000,124.000000,2.198700,124000.000000 +-120.060000,39.010000,19.000000,2967.000000,528.000000,112.000000,48.000000,4.071400,437500.000000 +-120.060000,37.020000,13.000000,6301.000000,1080.000000,3840.000000,1033.000000,3.525800,84900.000000 +-120.060000,36.980000,12.000000,2710.000000,575.000000,1724.000000,516.000000,1.471200,60400.000000 +-120.060000,36.970000,38.000000,1542.000000,364.000000,1220.000000,334.000000,1.625000,52800.000000 +-120.060000,36.960000,44.000000,1288.000000,295.000000,723.000000,287.000000,1.653400,61400.000000 +-120.060000,36.940000,19.000000,901.000000,183.000000,700.000000,190.000000,2.237500,64300.000000 +-120.060000,36.720000,32.000000,981.000000,237.000000,736.000000,249.000000,1.800000,60400.000000 +-120.070000,39.240000,20.000000,3729.000000,614.000000,365.000000,152.000000,4.962000,169500.000000 +-120.070000,37.340000,16.000000,1667.000000,372.000000,762.000000,283.000000,1.750000,87500.000000 +-120.070000,36.980000,12.000000,1790.000000,379.000000,1399.000000,397.000000,2.538800,59600.000000 +-120.070000,36.970000,28.000000,1563.000000,403.000000,1564.000000,408.000000,1.566200,48000.000000 +-120.070000,36.970000,27.000000,968.000000,240.000000,587.000000,231.000000,1.607100,55000.000000 +-120.070000,36.960000,34.000000,1457.000000,239.000000,557.000000,226.000000,3.618100,96500.000000 +-120.070000,36.740000,19.000000,2627.000000,502.000000,1295.000000,441.000000,3.087000,88200.000000 +-120.080000,41.790000,34.000000,1355.000000,262.000000,434.000000,178.000000,2.090300,56100.000000 +-120.080000,39.230000,19.000000,1746.000000,306.000000,251.000000,104.000000,4.818200,146900.000000 +-120.080000,38.800000,34.000000,1988.000000,511.000000,36.000000,15.000000,4.625000,162500.000000 +-120.080000,37.060000,18.000000,402.000000,76.000000,213.000000,71.000000,1.906300,95800.000000 +-120.080000,36.970000,13.000000,3356.000000,589.000000,1458.000000,601.000000,3.825700,94200.000000 +-120.080000,36.960000,36.000000,2074.000000,349.000000,954.000000,363.000000,3.113600,73800.000000 +-120.080000,36.950000,41.000000,1164.000000,211.000000,476.000000,171.000000,2.419600,70700.000000 +-120.080000,36.790000,38.000000,1446.000000,285.000000,928.000000,255.000000,2.980800,89600.000000 +-120.080000,36.720000,22.000000,1339.000000,251.000000,820.000000,276.000000,3.600000,83200.000000 +-120.080000,36.340000,18.000000,1524.000000,414.000000,2030.000000,356.000000,2.115300,112500.000000 +-120.080000,34.640000,18.000000,2375.000000,429.000000,1048.000000,369.000000,4.222200,375000.000000 +-120.080000,34.620000,11.000000,3478.000000,588.000000,1693.000000,582.000000,4.655400,272300.000000 +-120.080000,34.590000,24.000000,1874.000000,319.000000,820.000000,315.000000,5.190900,390200.000000 +-120.090000,39.920000,19.000000,2335.000000,518.000000,1028.000000,383.000000,1.726700,60700.000000 +-120.090000,39.400000,17.000000,1076.000000,283.000000,171.000000,64.000000,2.125000,83900.000000 +-120.090000,37.020000,9.000000,1608.000000,297.000000,1057.000000,295.000000,3.714300,81600.000000 +-120.090000,36.190000,12.000000,1923.000000,559.000000,2809.000000,535.000000,1.419100,55100.000000 +-120.090000,34.620000,18.000000,2708.000000,382.000000,988.000000,359.000000,5.519400,367000.000000 +-120.090000,34.610000,11.000000,586.000000,125.000000,317.000000,74.000000,2.890600,84400.000000 +-120.100000,39.370000,10.000000,2325.000000,410.000000,1016.000000,373.000000,4.520800,117300.000000 +-120.100000,39.330000,9.000000,2738.000000,510.000000,1193.000000,412.000000,4.395800,124800.000000 +-120.100000,39.200000,20.000000,1703.000000,294.000000,409.000000,174.000000,3.087000,196900.000000 +-120.100000,39.190000,18.000000,3824.000000,559.000000,241.000000,106.000000,5.545600,360000.000000 +-120.100000,39.170000,33.000000,1849.000000,384.000000,218.000000,92.000000,1.708300,143800.000000 +-120.100000,38.910000,33.000000,1561.000000,282.000000,30.000000,11.000000,1.875000,500001.000000 +-120.100000,36.960000,20.000000,2100.000000,317.000000,910.000000,274.000000,4.818700,90900.000000 +-120.100000,36.660000,19.000000,2020.000000,416.000000,1341.000000,360.000000,1.700000,69000.000000 +-120.100000,36.210000,12.000000,1462.000000,356.000000,1708.000000,367.000000,1.508600,64700.000000 +-120.100000,36.160000,17.000000,598.000000,160.000000,715.000000,146.000000,2.329500,55000.000000 +-120.110000,39.240000,21.000000,3005.000000,574.000000,385.000000,150.000000,3.119300,153300.000000 +-120.110000,39.210000,18.000000,2245.000000,392.000000,421.000000,162.000000,4.579500,158300.000000 +-120.110000,36.960000,17.000000,3344.000000,570.000000,1624.000000,536.000000,3.895200,95300.000000 +-120.110000,34.620000,16.000000,2943.000000,394.000000,959.000000,359.000000,6.209400,440000.000000 +-120.120000,41.400000,33.000000,2820.000000,515.000000,976.000000,403.000000,2.606200,52600.000000 +-120.120000,39.180000,17.000000,2839.000000,525.000000,390.000000,189.000000,3.566700,179200.000000 +-120.120000,36.010000,18.000000,1165.000000,334.000000,1119.000000,308.000000,2.216700,48500.000000 +-120.120000,35.990000,7.000000,2049.000000,482.000000,1387.000000,422.000000,2.250000,56200.000000 +-120.120000,34.600000,10.000000,2426.000000,426.000000,966.000000,419.000000,5.510600,290900.000000 +-120.130000,39.060000,22.000000,2465.000000,539.000000,381.000000,146.000000,2.875000,87500.000000 +-120.130000,37.930000,5.000000,111.000000,26.000000,58.000000,25.000000,1.675000,112500.000000 +-120.130000,36.870000,32.000000,2089.000000,468.000000,1765.000000,427.000000,2.234000,61700.000000 +-120.130000,35.870000,26.000000,48.000000,8.000000,13.000000,8.000000,2.375000,71300.000000 +-120.130000,34.630000,11.000000,2137.000000,339.000000,916.000000,338.000000,5.522100,394900.000000 +-120.140000,36.040000,27.000000,2533.000000,518.000000,1371.000000,461.000000,2.970800,60900.000000 +-120.140000,36.000000,33.000000,1726.000000,420.000000,1371.000000,388.000000,2.033500,43900.000000 +-120.140000,34.600000,22.000000,2136.000000,465.000000,1143.000000,409.000000,2.947900,243100.000000 +-120.140000,34.590000,9.000000,2536.000000,499.000000,832.000000,385.000000,2.574300,309800.000000 +-120.140000,34.590000,24.000000,1601.000000,282.000000,731.000000,285.000000,4.202600,259800.000000 +-120.150000,39.800000,19.000000,785.000000,151.000000,366.000000,140.000000,3.062500,82500.000000 +-120.150000,39.360000,9.000000,2254.000000,400.000000,694.000000,243.000000,5.685600,138100.000000 +-120.150000,39.200000,14.000000,1382.000000,242.000000,141.000000,66.000000,4.101600,283300.000000 +-120.150000,39.150000,25.000000,1669.000000,348.000000,163.000000,78.000000,5.750000,176600.000000 +-120.150000,37.690000,13.000000,866.000000,252.000000,369.000000,165.000000,2.875000,70200.000000 +-120.160000,39.140000,21.000000,2484.000000,460.000000,309.000000,144.000000,3.972200,127800.000000 +-120.160000,39.040000,18.000000,2040.000000,402.000000,350.000000,129.000000,4.031300,126000.000000 +-120.160000,39.010000,16.000000,1463.000000,264.000000,54.000000,26.000000,4.975000,206300.000000 +-120.160000,37.120000,29.000000,1995.000000,392.000000,1261.000000,354.000000,1.907300,79200.000000 +-120.160000,36.960000,18.000000,508.000000,104.000000,393.000000,114.000000,3.000000,156300.000000 +-120.160000,34.610000,17.000000,921.000000,189.000000,434.000000,219.000000,3.018500,500001.000000 +-120.170000,39.330000,18.000000,1046.000000,204.000000,486.000000,179.000000,4.119000,110900.000000 +-120.170000,39.330000,10.000000,614.000000,141.000000,195.000000,95.000000,0.928300,116300.000000 +-120.170000,39.320000,14.000000,2421.000000,489.000000,1000.000000,354.000000,3.565200,119800.000000 +-120.180000,39.280000,14.000000,10098.000000,1545.000000,701.000000,254.000000,4.081900,141300.000000 +-120.180000,39.170000,18.000000,1703.000000,360.000000,354.000000,163.000000,3.656300,146900.000000 +-120.180000,36.590000,25.000000,948.000000,198.000000,613.000000,171.000000,2.302600,90600.000000 +-120.180000,34.750000,17.000000,2074.000000,382.000000,1035.000000,359.000000,3.795800,400000.000000 +-120.190000,39.350000,7.000000,2611.000000,395.000000,482.000000,159.000000,5.062200,174100.000000 +-120.190000,39.320000,16.000000,1536.000000,298.000000,646.000000,208.000000,2.359400,155700.000000 +-120.190000,38.420000,11.000000,1568.000000,369.000000,82.000000,33.000000,3.125000,77500.000000 +-120.190000,38.070000,43.000000,102.000000,19.000000,44.000000,13.000000,0.499900,162500.000000 +-120.190000,38.030000,17.000000,8651.000000,1579.000000,2071.000000,757.000000,3.107600,115800.000000 +-120.190000,36.610000,29.000000,1479.000000,338.000000,1408.000000,322.000000,2.293000,57200.000000 +-120.190000,36.600000,25.000000,875.000000,214.000000,931.000000,214.000000,1.553600,58300.000000 +-120.200000,40.260000,26.000000,2399.000000,518.000000,1037.000000,443.000000,2.676500,47600.000000 +-120.200000,39.330000,26.000000,1988.000000,379.000000,905.000000,321.000000,3.784100,109500.000000 +-120.200000,39.120000,15.000000,2146.000000,361.000000,197.000000,76.000000,4.131600,200000.000000 +-120.200000,37.840000,9.000000,13670.000000,2453.000000,2811.000000,1193.000000,3.258900,137900.000000 +-120.200000,37.800000,30.000000,1189.000000,255.000000,446.000000,165.000000,3.483800,112500.000000 +-120.200000,34.630000,14.000000,2647.000000,515.000000,1487.000000,488.000000,4.451900,227900.000000 +-120.200000,34.610000,15.000000,2958.000000,690.000000,1348.000000,617.000000,3.858200,215200.000000 +-120.210000,39.350000,7.000000,914.000000,159.000000,85.000000,34.000000,4.791700,187500.000000 +-120.210000,36.980000,21.000000,1667.000000,303.000000,861.000000,276.000000,2.601200,92200.000000 +-120.220000,39.350000,8.000000,1872.000000,281.000000,203.000000,71.000000,4.588200,198400.000000 +-120.220000,39.200000,22.000000,8259.000000,1409.000000,845.000000,353.000000,3.369900,244000.000000 +-120.220000,38.050000,14.000000,3803.000000,689.000000,1129.000000,477.000000,2.718800,137000.000000 +-120.220000,36.490000,14.000000,1508.000000,347.000000,1679.000000,345.000000,2.478600,56000.000000 +-120.230000,39.560000,14.000000,1781.000000,346.000000,734.000000,287.000000,2.460000,93000.000000 +-120.230000,39.360000,7.000000,2045.000000,358.000000,245.000000,92.000000,4.048100,152300.000000 +-120.230000,37.980000,14.000000,1954.000000,368.000000,917.000000,316.000000,3.152300,93300.000000 +-120.240000,39.670000,52.000000,296.000000,63.000000,143.000000,56.000000,3.625000,68600.000000 +-120.240000,39.670000,40.000000,690.000000,129.000000,305.000000,110.000000,2.362500,62500.000000 +-120.240000,39.350000,8.000000,4195.000000,725.000000,291.000000,115.000000,3.479200,180800.000000 +-120.240000,38.010000,11.000000,1214.000000,228.000000,633.000000,199.000000,3.125000,148600.000000 +-120.240000,37.210000,31.000000,2447.000000,465.000000,1313.000000,352.000000,3.392900,93800.000000 +-120.250000,39.340000,9.000000,2739.000000,555.000000,294.000000,110.000000,3.184200,162500.000000 +-120.250000,38.040000,22.000000,4173.000000,763.000000,1086.000000,444.000000,2.556200,136200.000000 +-120.250000,38.030000,21.000000,4924.000000,966.000000,1175.000000,454.000000,2.945700,116500.000000 +-120.250000,37.230000,34.000000,1656.000000,328.000000,1110.000000,332.000000,2.184500,59900.000000 +-120.250000,37.040000,21.000000,1724.000000,317.000000,1006.000000,290.000000,3.286800,91700.000000 +-120.250000,36.650000,31.000000,1177.000000,221.000000,744.000000,223.000000,2.493700,66000.000000 +-120.260000,39.320000,24.000000,6012.000000,1227.000000,780.000000,358.000000,3.004300,122100.000000 +-120.260000,38.130000,17.000000,301.000000,94.000000,122.000000,47.000000,4.058300,87500.000000 +-120.260000,37.990000,12.000000,2726.000000,517.000000,1351.000000,474.000000,3.500000,107100.000000 +-120.260000,37.130000,33.000000,1239.000000,250.000000,648.000000,227.000000,2.027800,58800.000000 +-120.260000,37.110000,33.000000,1097.000000,254.000000,627.000000,253.000000,1.279400,50700.000000 +-120.270000,38.310000,13.000000,3297.000000,662.000000,267.000000,97.000000,3.075000,108300.000000 +-120.270000,38.290000,10.000000,3486.000000,695.000000,298.000000,124.000000,3.354200,103800.000000 +-120.270000,38.020000,13.000000,3839.000000,715.000000,1486.000000,532.000000,3.187500,99800.000000 +-120.270000,37.120000,42.000000,1142.000000,236.000000,597.000000,210.000000,1.727900,52300.000000 +-120.270000,37.120000,36.000000,1219.000000,258.000000,639.000000,245.000000,1.946400,57000.000000 +-120.270000,37.120000,17.000000,3328.000000,628.000000,1580.000000,619.000000,2.986100,81500.000000 +-120.270000,37.110000,18.000000,1277.000000,234.000000,674.000000,238.000000,2.669400,75900.000000 +-120.270000,36.290000,11.000000,1337.000000,412.000000,1376.000000,318.000000,2.439800,87500.000000 +-120.270000,34.720000,14.000000,1289.000000,277.000000,693.000000,237.000000,3.256900,230800.000000 +-120.280000,38.070000,13.000000,1996.000000,410.000000,618.000000,218.000000,2.908300,104600.000000 +-120.280000,38.030000,13.000000,2095.000000,391.000000,860.000000,331.000000,3.683800,145700.000000 +-120.280000,37.900000,17.000000,1047.000000,212.000000,530.000000,196.000000,2.153800,153300.000000 +-120.290000,38.010000,12.000000,3014.000000,560.000000,1424.000000,485.000000,3.072900,105100.000000 +-120.290000,36.880000,34.000000,1391.000000,297.000000,943.000000,281.000000,2.421900,83900.000000 +-120.290000,35.560000,15.000000,4760.000000,871.000000,2459.000000,734.000000,2.811000,142100.000000 +-120.300000,38.900000,11.000000,1961.000000,435.000000,113.000000,53.000000,0.922700,95500.000000 +-120.300000,38.040000,6.000000,1281.000000,245.000000,422.000000,160.000000,3.287500,111300.000000 +-120.300000,37.990000,23.000000,1908.000000,383.000000,984.000000,374.000000,2.517000,153500.000000 +-120.300000,37.340000,33.000000,993.000000,186.000000,556.000000,175.000000,2.428600,103600.000000 +-120.300000,35.100000,16.000000,2819.000000,479.000000,1068.000000,365.000000,4.546100,270800.000000 +-120.310000,37.640000,11.000000,2403.000000,497.000000,890.000000,344.000000,3.000000,120800.000000 +-120.310000,37.290000,40.000000,1542.000000,341.000000,1283.000000,341.000000,1.692900,55900.000000 +-120.320000,38.710000,13.000000,1115.000000,255.000000,86.000000,32.000000,3.566700,115600.000000 +-120.320000,37.910000,16.000000,108.000000,18.000000,54.000000,22.000000,4.375000,100000.000000 +-120.320000,37.290000,9.000000,695.000000,188.000000,810.000000,190.000000,1.617200,56300.000000 +-120.330000,38.260000,13.000000,2962.000000,546.000000,252.000000,103.000000,4.406300,155800.000000 +-120.330000,37.970000,17.000000,2530.000000,526.000000,1024.000000,496.000000,2.005700,118900.000000 +-120.330000,34.870000,24.000000,2590.000000,404.000000,1093.000000,338.000000,3.937500,341200.000000 +-120.340000,38.250000,17.000000,5497.000000,1056.000000,997.000000,408.000000,2.982100,111500.000000 +-120.340000,38.230000,10.000000,3757.000000,722.000000,546.000000,223.000000,3.750000,121400.000000 +-120.350000,40.630000,33.000000,240.000000,49.000000,63.000000,22.000000,3.625000,200000.000000 +-120.350000,38.040000,16.000000,1499.000000,326.000000,733.000000,286.000000,2.572900,118800.000000 +-120.350000,37.990000,3.000000,1167.000000,306.000000,422.000000,186.000000,2.419100,217500.000000 +-120.350000,37.980000,4.000000,1658.000000,301.000000,676.000000,278.000000,3.571400,149500.000000 +-120.350000,37.950000,13.000000,2104.000000,407.000000,960.000000,401.000000,2.400000,177000.000000 +-120.350000,37.860000,25.000000,287.000000,57.000000,118.000000,50.000000,2.305600,162500.000000 +-120.350000,37.310000,17.000000,605.000000,159.000000,416.000000,83.000000,2.000000,87500.000000 +-120.350000,36.160000,18.000000,1519.000000,296.000000,846.000000,272.000000,2.779200,85300.000000 +-120.350000,36.140000,9.000000,2671.000000,647.000000,1484.000000,541.000000,1.707500,60400.000000 +-120.360000,36.140000,18.000000,1206.000000,274.000000,622.000000,217.000000,1.826400,62000.000000 +-120.360000,36.130000,29.000000,1938.000000,434.000000,1306.000000,415.000000,3.013400,55500.000000 +-120.370000,40.170000,21.000000,789.000000,141.000000,406.000000,146.000000,2.119800,73500.000000 +-120.370000,38.250000,13.000000,4495.000000,856.000000,1149.000000,459.000000,2.535200,113700.000000 +-120.370000,38.230000,13.000000,4401.000000,829.000000,924.000000,383.000000,2.694200,123500.000000 +-120.370000,37.980000,29.000000,2508.000000,591.000000,1112.000000,550.000000,1.602100,91400.000000 +-120.370000,36.160000,36.000000,613.000000,124.000000,310.000000,124.000000,3.065800,65000.000000 +-120.370000,36.150000,34.000000,2084.000000,339.000000,868.000000,347.000000,4.381000,86300.000000 +-120.370000,36.130000,10.000000,2522.000000,533.000000,1335.000000,493.000000,3.263900,86400.000000 +-120.370000,34.900000,17.000000,2649.000000,386.000000,1057.000000,362.000000,4.781300,326800.000000 +-120.370000,34.690000,18.000000,1868.000000,315.000000,747.000000,265.000000,4.794600,290600.000000 +-120.380000,40.980000,27.000000,777.000000,185.000000,318.000000,115.000000,1.683300,40000.000000 +-120.380000,39.820000,10.000000,1262.000000,258.000000,510.000000,209.000000,2.166700,92800.000000 +-120.380000,37.990000,36.000000,2864.000000,603.000000,1155.000000,565.000000,2.357100,113400.000000 +-120.380000,37.970000,47.000000,1060.000000,219.000000,496.000000,205.000000,2.578100,104800.000000 +-120.380000,36.760000,25.000000,991.000000,272.000000,941.000000,262.000000,1.812500,58000.000000 +-120.380000,36.760000,12.000000,932.000000,244.000000,1043.000000,243.000000,1.403800,54300.000000 +-120.380000,36.750000,25.000000,1689.000000,495.000000,1745.000000,457.000000,1.905600,60000.000000 +-120.380000,36.150000,17.000000,2279.000000,448.000000,1200.000000,420.000000,2.746100,70000.000000 +-120.380000,34.960000,9.000000,2813.000000,492.000000,1144.000000,490.000000,4.043100,226800.000000 +-120.390000,38.030000,20.000000,1551.000000,309.000000,647.000000,228.000000,2.609400,139100.000000 +-120.390000,38.000000,33.000000,2177.000000,404.000000,891.000000,383.000000,3.212000,105200.000000 +-120.390000,37.960000,10.000000,2554.000000,501.000000,922.000000,439.000000,2.109400,164000.000000 +-120.390000,36.780000,11.000000,1947.000000,488.000000,2104.000000,486.000000,1.718400,55200.000000 +-120.390000,34.520000,40.000000,2162.000000,395.000000,1010.000000,332.000000,2.566700,239300.000000 +-120.400000,38.060000,12.000000,1430.000000,310.000000,517.000000,240.000000,2.654400,128100.000000 +-120.400000,38.000000,17.000000,2098.000000,370.000000,912.000000,354.000000,2.654400,112600.000000 +-120.400000,37.980000,19.000000,2010.000000,433.000000,910.000000,390.000000,2.669600,121200.000000 +-120.400000,37.920000,22.000000,1022.000000,194.000000,517.000000,198.000000,3.625000,99400.000000 +-120.400000,37.300000,28.000000,1401.000000,292.000000,967.000000,257.000000,1.591700,89400.000000 +-120.400000,34.950000,8.000000,1885.000000,286.000000,835.000000,290.000000,5.020600,261000.000000 +-120.400000,34.870000,10.000000,2197.000000,329.000000,1064.000000,319.000000,4.976600,199600.000000 +-120.410000,38.330000,17.000000,1463.000000,338.000000,529.000000,226.000000,3.024000,100900.000000 +-120.410000,38.030000,14.000000,2061.000000,465.000000,859.000000,462.000000,2.128900,115300.000000 +-120.410000,37.880000,16.000000,744.000000,141.000000,311.000000,122.000000,4.423100,87500.000000 +-120.410000,37.160000,21.000000,1684.000000,341.000000,1052.000000,312.000000,2.080900,95800.000000 +-120.410000,36.770000,24.000000,1335.000000,312.000000,1180.000000,267.000000,1.947000,68900.000000 +-120.410000,34.960000,9.000000,2712.000000,428.000000,1116.000000,415.000000,4.553600,190100.000000 +-120.410000,34.960000,21.000000,1774.000000,263.000000,724.000000,237.000000,4.650000,152500.000000 +-120.410000,34.960000,16.000000,2299.000000,403.000000,1245.000000,395.000000,4.212500,148300.000000 +-120.410000,34.880000,8.000000,3119.000000,620.000000,1159.000000,544.000000,3.528800,165500.000000 +-120.410000,34.880000,4.000000,3680.000000,559.000000,1678.000000,569.000000,5.063900,201700.000000 +-120.410000,34.870000,32.000000,1997.000000,317.000000,866.000000,281.000000,5.062000,158900.000000 +-120.410000,34.870000,15.000000,1534.000000,251.000000,761.000000,240.000000,4.902800,193600.000000 +-120.410000,34.860000,15.000000,978.000000,187.000000,407.000000,182.000000,4.375000,158000.000000 +-120.420000,40.380000,26.000000,1652.000000,313.000000,762.000000,280.000000,2.475700,85600.000000 +-120.420000,38.420000,18.000000,2912.000000,663.000000,999.000000,411.000000,2.734400,91900.000000 +-120.420000,37.950000,19.000000,2787.000000,578.000000,1208.000000,532.000000,2.492200,98700.000000 +-120.420000,34.970000,18.000000,1932.000000,350.000000,1071.000000,346.000000,4.125000,139800.000000 +-120.420000,34.960000,31.000000,3518.000000,608.000000,1386.000000,572.000000,3.621200,151400.000000 +-120.420000,34.960000,20.000000,1678.000000,307.000000,840.000000,316.000000,4.434200,160700.000000 +-120.420000,34.960000,19.000000,2298.000000,511.000000,1246.000000,513.000000,2.212000,132000.000000 +-120.420000,34.960000,14.000000,2069.000000,343.000000,1240.000000,338.000000,4.506600,149800.000000 +-120.420000,34.950000,33.000000,3404.000000,711.000000,1579.000000,639.000000,3.107800,146700.000000 +-120.420000,34.940000,32.000000,2844.000000,551.000000,1337.000000,516.000000,2.718800,133700.000000 +-120.420000,34.890000,24.000000,2020.000000,307.000000,855.000000,283.000000,5.009900,162500.000000 +-120.420000,34.870000,18.000000,2505.000000,376.000000,1162.000000,382.000000,4.835900,195700.000000 +-120.430000,38.250000,13.000000,763.000000,161.000000,311.000000,125.000000,2.458300,112500.000000 +-120.430000,38.020000,15.000000,1613.000000,299.000000,655.000000,251.000000,3.687500,186000.000000 +-120.430000,37.320000,16.000000,1170.000000,178.000000,566.000000,181.000000,5.252200,125300.000000 +-120.430000,36.990000,16.000000,1027.000000,199.000000,673.000000,193.000000,2.968800,63800.000000 +-120.430000,36.180000,29.000000,579.000000,116.000000,218.000000,99.000000,2.145800,104200.000000 +-120.430000,35.170000,16.000000,947.000000,163.000000,477.000000,137.000000,3.851000,315000.000000 +-120.430000,34.980000,21.000000,2725.000000,514.000000,1466.000000,488.000000,3.663900,128600.000000 +-120.430000,34.970000,28.000000,1433.000000,270.000000,1001.000000,278.000000,4.012500,130100.000000 +-120.430000,34.960000,24.000000,2739.000000,414.000000,1171.000000,413.000000,4.815500,162900.000000 +-120.430000,34.960000,24.000000,1799.000000,470.000000,1416.000000,408.000000,2.067300,136900.000000 +-120.430000,34.960000,19.000000,2350.000000,631.000000,1291.000000,515.000000,1.034900,130800.000000 +-120.430000,34.950000,50.000000,1966.000000,413.000000,985.000000,403.000000,2.350600,136100.000000 +-120.430000,34.950000,43.000000,2020.000000,344.000000,692.000000,310.000000,3.681500,181800.000000 +-120.430000,34.930000,4.000000,2866.000000,648.000000,1311.000000,578.000000,2.864900,186500.000000 +-120.430000,34.930000,10.000000,2980.000000,585.000000,1593.000000,562.000000,3.285000,218300.000000 +-120.430000,34.900000,27.000000,2019.000000,354.000000,1029.000000,346.000000,3.539100,144700.000000 +-120.430000,34.890000,30.000000,1979.000000,342.000000,999.000000,320.000000,5.028600,158000.000000 +-120.430000,34.890000,28.000000,2862.000000,478.000000,1384.000000,463.000000,4.669400,158200.000000 +-120.430000,34.870000,21.000000,2131.000000,329.000000,1094.000000,353.000000,4.664800,193000.000000 +-120.430000,34.860000,17.000000,3172.000000,506.000000,1538.000000,473.000000,4.312500,168100.000000 +-120.430000,34.860000,17.000000,1932.000000,347.000000,874.000000,312.000000,3.820300,141500.000000 +-120.440000,37.310000,21.000000,6911.000000,1341.000000,3967.000000,1297.000000,3.051500,95200.000000 +-120.440000,37.290000,18.000000,1260.000000,268.000000,576.000000,263.000000,1.722200,101500.000000 +-120.440000,37.280000,12.000000,2855.000000,598.000000,1658.000000,586.000000,2.392900,81100.000000 +-120.440000,36.840000,29.000000,1563.000000,293.000000,883.000000,288.000000,2.818200,90500.000000 +-120.440000,34.970000,26.000000,1705.000000,344.000000,1605.000000,307.000000,3.758900,113700.000000 +-120.440000,34.970000,22.000000,1619.000000,360.000000,1509.000000,384.000000,1.794100,110300.000000 +-120.440000,34.960000,39.000000,1228.000000,379.000000,851.000000,341.000000,1.899000,113300.000000 +-120.440000,34.960000,34.000000,1248.000000,284.000000,986.000000,272.000000,2.916700,104200.000000 +-120.440000,34.940000,29.000000,1877.000000,516.000000,1634.000000,492.000000,1.687500,122700.000000 +-120.440000,34.940000,24.000000,2481.000000,476.000000,1101.000000,474.000000,3.157600,147200.000000 +-120.440000,34.930000,16.000000,2098.000000,558.000000,1252.000000,492.000000,2.150900,67500.000000 +-120.440000,34.930000,15.000000,868.000000,244.000000,1133.000000,253.000000,2.099500,87500.000000 +-120.440000,34.910000,12.000000,3189.000000,463.000000,1200.000000,442.000000,5.299000,226800.000000 +-120.440000,34.880000,9.000000,3124.000000,415.000000,1169.000000,407.000000,6.769400,275100.000000 +-120.440000,34.870000,13.000000,2312.000000,352.000000,1084.000000,388.000000,5.038000,194000.000000 +-120.440000,34.680000,6.000000,2187.000000,277.000000,697.000000,273.000000,6.268500,307400.000000 +-120.440000,34.660000,22.000000,3231.000000,549.000000,1739.000000,581.000000,4.541700,142400.000000 +-120.440000,34.650000,30.000000,2265.000000,512.000000,1402.000000,471.000000,1.975000,134000.000000 +-120.440000,34.640000,8.000000,787.000000,126.000000,446.000000,133.000000,4.602300,163400.000000 +-120.450000,39.800000,47.000000,2149.000000,456.000000,965.000000,419.000000,1.782900,55900.000000 +-120.450000,37.790000,8.000000,2687.000000,495.000000,5087.000000,385.000000,3.171900,115400.000000 +-120.450000,37.320000,21.000000,1318.000000,202.000000,618.000000,197.000000,4.821400,117800.000000 +-120.450000,37.320000,19.000000,3136.000000,466.000000,1631.000000,484.000000,3.647100,101400.000000 +-120.450000,37.310000,20.000000,4379.000000,753.000000,2055.000000,716.000000,3.765200,133500.000000 +-120.450000,36.860000,34.000000,673.000000,173.000000,539.000000,182.000000,2.352300,66000.000000 +-120.450000,36.850000,20.000000,1519.000000,376.000000,1681.000000,370.000000,2.175900,58100.000000 +-120.450000,34.970000,25.000000,1920.000000,380.000000,1434.000000,388.000000,3.036800,129300.000000 +-120.450000,34.970000,10.000000,1897.000000,354.000000,1353.000000,357.000000,3.767900,131300.000000 +-120.450000,34.960000,26.000000,1949.000000,396.000000,1575.000000,377.000000,2.875000,121400.000000 +-120.450000,34.960000,21.000000,2121.000000,445.000000,2211.000000,463.000000,4.060300,117600.000000 +-120.450000,34.960000,11.000000,1299.000000,280.000000,1158.000000,223.000000,2.555600,129200.000000 +-120.450000,34.950000,32.000000,1574.000000,447.000000,1772.000000,463.000000,1.862500,90200.000000 +-120.450000,34.950000,10.000000,2207.000000,644.000000,2232.000000,543.000000,2.375000,98500.000000 +-120.450000,34.940000,26.000000,1058.000000,232.000000,891.000000,243.000000,3.642200,120600.000000 +-120.450000,34.940000,24.000000,1702.000000,447.000000,1240.000000,417.000000,2.409100,115500.000000 +-120.450000,34.910000,16.000000,712.000000,147.000000,355.000000,162.000000,2.560000,150000.000000 +-120.450000,34.880000,15.000000,2143.000000,286.000000,929.000000,315.000000,5.730600,269700.000000 +-120.450000,34.860000,23.000000,3415.000000,778.000000,1492.000000,633.000000,2.279100,114800.000000 +-120.450000,34.710000,21.000000,1868.000000,268.000000,522.000000,255.000000,6.467800,249300.000000 +-120.450000,34.650000,27.000000,2253.000000,382.000000,1197.000000,384.000000,3.320300,134700.000000 +-120.450000,34.650000,27.000000,2215.000000,578.000000,1544.000000,527.000000,1.925700,135300.000000 +-120.450000,34.650000,25.000000,980.000000,276.000000,896.000000,245.000000,2.000000,87500.000000 +-120.450000,34.640000,40.000000,1051.000000,235.000000,574.000000,201.000000,2.086500,111500.000000 +-120.450000,34.640000,34.000000,2571.000000,499.000000,1105.000000,451.000000,3.777800,150000.000000 +-120.450000,34.640000,27.000000,2696.000000,622.000000,1322.000000,543.000000,3.035200,135400.000000 +-120.450000,34.640000,17.000000,1226.000000,277.000000,484.000000,224.000000,3.216700,112500.000000 +-120.450000,34.630000,32.000000,1840.000000,309.000000,828.000000,333.000000,4.548600,172400.000000 +-120.450000,34.630000,25.000000,2445.000000,368.000000,983.000000,363.000000,4.928600,180100.000000 +-120.460000,39.830000,18.000000,3406.000000,673.000000,1567.000000,617.000000,2.271700,75900.000000 +-120.460000,38.150000,16.000000,4221.000000,781.000000,1516.000000,697.000000,2.381600,116000.000000 +-120.460000,38.090000,16.000000,3758.000000,715.000000,1777.000000,615.000000,3.000000,122600.000000 +-120.460000,37.650000,17.000000,315.000000,89.000000,130.000000,58.000000,1.446400,79200.000000 +-120.460000,37.510000,22.000000,2704.000000,497.000000,1432.000000,399.000000,2.900000,83100.000000 +-120.460000,37.330000,4.000000,786.000000,116.000000,368.000000,109.000000,6.321500,138200.000000 +-120.460000,37.330000,17.000000,6111.000000,1171.000000,2950.000000,1104.000000,3.285200,98800.000000 +-120.460000,37.300000,36.000000,3346.000000,739.000000,2151.000000,713.000000,2.309500,68300.000000 +-120.460000,37.290000,30.000000,2972.000000,635.000000,1940.000000,590.000000,2.359400,72300.000000 +-120.460000,36.870000,20.000000,1287.000000,310.000000,954.000000,269.000000,1.338600,63000.000000 +-120.460000,34.740000,15.000000,2185.000000,386.000000,827.000000,336.000000,5.376500,223100.000000 +-120.460000,34.710000,17.000000,2830.000000,430.000000,1035.000000,416.000000,4.929200,207200.000000 +-120.460000,34.660000,5.000000,2904.000000,702.000000,1302.000000,618.000000,3.008100,135200.000000 +-120.460000,34.650000,14.000000,885.000000,223.000000,533.000000,224.000000,2.596600,109300.000000 +-120.460000,34.650000,10.000000,2143.000000,593.000000,1167.000000,548.000000,2.081900,103300.000000 +-120.460000,34.640000,37.000000,1697.000000,334.000000,740.000000,272.000000,2.380400,148000.000000 +-120.460000,34.640000,16.000000,686.000000,217.000000,614.000000,200.000000,0.810600,83300.000000 +-120.460000,34.640000,11.000000,562.000000,164.000000,504.000000,147.000000,2.016100,118800.000000 +-120.460000,34.630000,48.000000,1408.000000,301.000000,682.000000,279.000000,2.927100,146600.000000 +-120.470000,37.960000,25.000000,2505.000000,529.000000,1145.000000,483.000000,2.006000,103000.000000 +-120.470000,37.340000,9.000000,2934.000000,511.000000,1227.000000,501.000000,3.674200,117200.000000 +-120.470000,37.320000,15.000000,3952.000000,984.000000,2024.000000,1026.000000,2.558000,121600.000000 +-120.470000,37.300000,40.000000,3693.000000,771.000000,2102.000000,742.000000,2.183800,75000.000000 +-120.470000,37.290000,16.000000,749.000000,222.000000,1277.000000,224.000000,1.205400,60900.000000 +-120.470000,37.280000,19.000000,1548.000000,319.000000,1227.000000,309.000000,1.775600,73300.000000 +-120.470000,35.740000,9.000000,4267.000000,785.000000,2065.000000,691.000000,3.730300,162700.000000 +-120.470000,35.040000,29.000000,1315.000000,279.000000,926.000000,249.000000,2.937500,144800.000000 +-120.470000,34.710000,21.000000,2535.000000,383.000000,1012.000000,368.000000,5.617700,183800.000000 +-120.470000,34.700000,24.000000,2387.000000,385.000000,1051.000000,382.000000,4.459500,152700.000000 +-120.470000,34.660000,4.000000,3376.000000,525.000000,1684.000000,535.000000,4.923700,166600.000000 +-120.470000,34.650000,31.000000,1438.000000,320.000000,816.000000,270.000000,2.458300,128100.000000 +-120.470000,34.650000,16.000000,2549.000000,428.000000,1486.000000,432.000000,4.287500,150700.000000 +-120.470000,34.640000,8.000000,2482.000000,586.000000,1427.000000,540.000000,3.071000,120400.000000 +-120.470000,34.640000,16.000000,1912.000000,406.000000,1009.000000,417.000000,3.406300,138000.000000 +-120.470000,34.630000,23.000000,2441.000000,463.000000,1392.000000,434.000000,3.791700,142200.000000 +-120.480000,41.820000,20.000000,1367.000000,284.000000,429.000000,181.000000,2.022700,47500.000000 +-120.480000,39.780000,11.000000,513.000000,104.000000,204.000000,86.000000,2.375000,100000.000000 +-120.480000,39.660000,32.000000,1516.000000,289.000000,304.000000,131.000000,1.883900,71000.000000 +-120.480000,37.340000,8.000000,6146.000000,1017.000000,2821.000000,987.000000,4.670000,127600.000000 +-120.480000,37.320000,13.000000,3641.000000,897.000000,1737.000000,788.000000,2.141800,130600.000000 +-120.480000,37.310000,42.000000,2361.000000,512.000000,1684.000000,511.000000,2.355000,75600.000000 +-120.480000,37.300000,49.000000,2919.000000,719.000000,1956.000000,679.000000,1.542700,88500.000000 +-120.480000,37.300000,39.000000,1015.000000,356.000000,875.000000,313.000000,1.500000,67000.000000 +-120.480000,37.290000,17.000000,2266.000000,693.000000,3200.000000,664.000000,1.563500,60400.000000 +-120.480000,35.050000,24.000000,2314.000000,468.000000,1549.000000,463.000000,2.820300,152600.000000 +-120.480000,34.900000,20.000000,3842.000000,630.000000,2490.000000,662.000000,3.055900,120100.000000 +-120.480000,34.700000,26.000000,3069.000000,518.000000,1524.000000,539.000000,4.316200,136400.000000 +-120.480000,34.660000,4.000000,1897.000000,331.000000,915.000000,336.000000,4.156300,172600.000000 +-120.480000,34.650000,26.000000,1933.000000,316.000000,1001.000000,319.000000,4.462800,134400.000000 +-120.490000,37.320000,13.000000,3474.000000,927.000000,2149.000000,821.000000,1.952800,85300.000000 +-120.490000,37.320000,10.000000,1275.000000,255.000000,620.000000,240.000000,3.026300,118300.000000 +-120.490000,37.310000,45.000000,1834.000000,421.000000,1405.000000,407.000000,2.052100,72400.000000 +-120.490000,37.300000,50.000000,985.000000,309.000000,621.000000,250.000000,1.312500,60900.000000 +-120.490000,37.300000,35.000000,1313.000000,324.000000,1350.000000,343.000000,1.750000,57600.000000 +-120.490000,37.290000,17.000000,2414.000000,594.000000,2487.000000,582.000000,1.095500,62700.000000 +-120.490000,37.280000,11.000000,1721.000000,381.000000,1708.000000,373.000000,1.953500,57100.000000 +-120.500000,38.870000,10.000000,81.000000,41.000000,55.000000,16.000000,4.958300,87500.000000 +-120.500000,37.340000,16.000000,1245.000000,231.000000,956.000000,219.000000,3.455900,108000.000000 +-120.500000,37.320000,13.000000,1936.000000,384.000000,1158.000000,367.000000,2.750000,83200.000000 +-120.500000,37.310000,36.000000,2162.000000,433.000000,1048.000000,451.000000,2.679700,81800.000000 +-120.500000,37.300000,29.000000,1572.000000,456.000000,1697.000000,429.000000,1.760000,63200.000000 +-120.500000,35.030000,10.000000,10463.000000,1756.000000,4660.000000,1715.000000,3.568200,277300.000000 +-120.510000,36.860000,21.000000,1779.000000,399.000000,1446.000000,371.000000,2.441400,71900.000000 +-120.510000,36.550000,20.000000,1193.000000,263.000000,1274.000000,241.000000,1.941700,38800.000000 +-120.520000,35.060000,11.000000,1317.000000,234.000000,655.000000,243.000000,4.361100,329700.000000 +-120.530000,39.790000,18.000000,1234.000000,266.000000,543.000000,201.000000,2.515600,71900.000000 +-120.540000,40.290000,17.000000,3391.000000,623.000000,1529.000000,571.000000,3.402800,91000.000000 +-120.540000,38.750000,9.000000,3006.000000,540.000000,1102.000000,418.000000,3.981200,136600.000000 +-120.540000,38.410000,21.000000,1435.000000,294.000000,668.000000,267.000000,2.566700,77400.000000 +-120.540000,38.070000,37.000000,736.000000,148.000000,339.000000,140.000000,2.287500,79900.000000 +-120.540000,34.970000,23.000000,1353.000000,345.000000,1322.000000,336.000000,1.818500,97800.000000 +-120.550000,41.610000,22.000000,9047.000000,1831.000000,4276.000000,1622.000000,2.080200,47900.000000 +-120.550000,38.460000,16.000000,1443.000000,249.000000,435.000000,181.000000,3.203100,129200.000000 +-120.550000,38.450000,17.000000,2277.000000,474.000000,767.000000,356.000000,2.520800,99100.000000 +-120.550000,38.430000,18.000000,1564.000000,357.000000,618.000000,277.000000,2.354900,108900.000000 +-120.550000,38.310000,18.000000,1411.000000,312.000000,592.000000,230.000000,1.625000,94700.000000 +-120.550000,38.120000,10.000000,1566.000000,325.000000,785.000000,291.000000,2.500000,116100.000000 +-120.550000,38.070000,27.000000,1199.000000,224.000000,463.000000,199.000000,2.906300,92200.000000 +-120.550000,37.320000,21.000000,1410.000000,229.000000,590.000000,205.000000,3.319400,141400.000000 +-120.550000,36.970000,42.000000,1766.000000,344.000000,1084.000000,323.000000,2.329500,74400.000000 +-120.560000,38.480000,14.000000,3545.000000,702.000000,946.000000,411.000000,3.460900,120900.000000 +-120.560000,38.090000,34.000000,2745.000000,559.000000,1150.000000,491.000000,2.365400,94900.000000 +-120.560000,35.130000,15.000000,5818.000000,924.000000,2324.000000,845.000000,4.403300,267600.000000 +-120.560000,35.070000,14.000000,6788.000000,1216.000000,2866.000000,1036.000000,3.360300,280200.000000 +-120.570000,40.430000,15.000000,2045.000000,461.000000,1121.000000,402.000000,2.690200,71500.000000 +-120.570000,39.780000,15.000000,1291.000000,283.000000,582.000000,242.000000,2.121600,102000.000000 +-120.570000,38.350000,17.000000,1504.000000,358.000000,661.000000,250.000000,2.260400,84800.000000 +-120.570000,37.430000,39.000000,2235.000000,412.000000,1268.000000,402.000000,2.675800,74600.000000 +-120.570000,37.350000,18.000000,704.000000,176.000000,520.000000,154.000000,3.003000,101300.000000 +-120.570000,35.180000,16.000000,5209.000000,917.000000,2284.000000,809.000000,4.040300,346100.000000 +-120.570000,35.120000,39.000000,1656.000000,333.000000,866.000000,317.000000,2.882400,195200.000000 +-120.570000,35.110000,18.000000,2920.000000,556.000000,1068.000000,552.000000,3.524200,156800.000000 +-120.570000,34.960000,38.000000,1145.000000,297.000000,1107.000000,296.000000,2.177600,89100.000000 +-120.570000,34.960000,27.000000,1401.000000,294.000000,1306.000000,286.000000,2.580900,83200.000000 +-120.580000,40.370000,16.000000,3412.000000,667.000000,1873.000000,590.000000,2.266100,61800.000000 +-120.580000,39.270000,15.000000,4126.000000,903.000000,723.000000,266.000000,3.014700,118800.000000 +-120.580000,38.770000,21.000000,1661.000000,406.000000,789.000000,319.000000,2.358300,108700.000000 +-120.580000,37.360000,33.000000,3564.000000,716.000000,2603.000000,696.000000,2.217900,67500.000000 +-120.580000,35.000000,37.000000,523.000000,119.000000,374.000000,95.000000,1.472600,106300.000000 +-120.590000,38.760000,21.000000,1728.000000,417.000000,731.000000,334.000000,1.726600,94700.000000 +-120.590000,37.590000,36.000000,291.000000,48.000000,124.000000,47.000000,5.694500,154200.000000 +-120.590000,37.390000,16.000000,4717.000000,1119.000000,3589.000000,1017.000000,2.106100,72800.000000 +-120.590000,37.350000,15.000000,3249.000000,613.000000,1569.000000,595.000000,3.539300,88000.000000 +-120.590000,35.130000,8.000000,6638.000000,1054.000000,2710.000000,966.000000,4.677600,295500.000000 +-120.590000,35.120000,27.000000,3055.000000,677.000000,1407.000000,610.000000,2.170200,179700.000000 +-120.590000,35.110000,25.000000,3642.000000,726.000000,1729.000000,673.000000,3.155000,205400.000000 +-120.590000,35.110000,20.000000,3098.000000,571.000000,1449.000000,611.000000,3.574400,197800.000000 +-120.590000,34.700000,29.000000,17738.000000,3114.000000,12427.000000,2826.000000,2.737700,28300.000000 +-120.600000,38.760000,22.000000,1236.000000,273.000000,615.000000,248.000000,3.021700,106900.000000 +-120.600000,37.350000,34.000000,1722.000000,316.000000,904.000000,315.000000,2.465300,66100.000000 +-120.600000,37.350000,19.000000,3874.000000,676.000000,2441.000000,707.000000,3.295500,88600.000000 +-120.600000,37.340000,34.000000,1830.000000,431.000000,1304.000000,415.000000,2.118200,68900.000000 +-120.600000,35.600000,13.000000,4461.000000,764.000000,1795.000000,640.000000,4.475000,206900.000000 +-120.600000,35.120000,22.000000,3342.000000,644.000000,1342.000000,593.000000,3.450900,217700.000000 +-120.600000,35.110000,17.000000,2495.000000,524.000000,1292.000000,501.000000,2.262500,153000.000000 +-120.600000,35.100000,16.000000,3290.000000,686.000000,1497.000000,655.000000,2.687500,178200.000000 +-120.600000,34.910000,44.000000,711.000000,140.000000,384.000000,116.000000,2.109400,73800.000000 +-120.610000,37.360000,16.000000,638.000000,147.000000,380.000000,132.000000,1.913500,87500.000000 +-120.610000,37.350000,34.000000,1900.000000,401.000000,1009.000000,385.000000,2.222200,75000.000000 +-120.610000,37.320000,18.000000,5009.000000,826.000000,2497.000000,805.000000,4.250000,146300.000000 +-120.610000,37.030000,34.000000,1841.000000,354.000000,1019.000000,356.000000,1.784100,67500.000000 +-120.610000,35.130000,16.000000,3431.000000,721.000000,1777.000000,701.000000,2.730100,190400.000000 +-120.610000,35.120000,31.000000,1486.000000,345.000000,823.000000,322.000000,2.697400,165400.000000 +-120.610000,35.110000,11.000000,3733.000000,831.000000,1839.000000,739.000000,3.306200,158500.000000 +-120.610000,35.100000,17.000000,2799.000000,637.000000,2015.000000,592.000000,3.053600,143600.000000 +-120.610000,35.100000,14.000000,2919.000000,691.000000,1896.000000,577.000000,2.400300,142100.000000 +-120.610000,35.060000,13.000000,2364.000000,421.000000,1257.000000,380.000000,4.616700,273100.000000 +-120.620000,38.710000,10.000000,6305.000000,1150.000000,2597.000000,921.000000,4.019700,132200.000000 +-120.620000,38.390000,15.000000,3750.000000,691.000000,1444.000000,603.000000,2.739900,134800.000000 +-120.620000,37.370000,8.000000,2608.000000,428.000000,1530.000000,435.000000,3.968000,102100.000000 +-120.620000,37.350000,20.000000,1457.000000,372.000000,1000.000000,346.000000,1.461500,69200.000000 +-120.620000,37.350000,18.000000,874.000000,203.000000,572.000000,190.000000,1.683300,71000.000000 +-120.620000,36.990000,32.000000,2455.000000,508.000000,1344.000000,492.000000,1.973200,69400.000000 +-120.620000,35.280000,28.000000,3952.000000,592.000000,1469.000000,571.000000,6.314400,328800.000000 +-120.620000,35.110000,18.000000,2241.000000,544.000000,1521.000000,509.000000,2.029200,155800.000000 +-120.630000,37.410000,27.000000,2083.000000,444.000000,1462.000000,479.000000,2.643900,69100.000000 +-120.630000,37.360000,16.000000,1605.000000,282.000000,866.000000,284.000000,4.069400,110200.000000 +-120.630000,35.590000,9.000000,5782.000000,1184.000000,3026.000000,1130.000000,2.652800,113500.000000 +-120.630000,35.270000,23.000000,1630.000000,253.000000,704.000000,263.000000,5.156000,251300.000000 +-120.630000,35.130000,16.000000,2680.000000,704.000000,975.000000,619.000000,1.787800,55000.000000 +-120.640000,40.410000,50.000000,1741.000000,424.000000,987.000000,383.000000,1.506600,59300.000000 +-120.640000,37.700000,16.000000,284.000000,51.000000,239.000000,46.000000,1.895800,137500.000000 +-120.640000,37.380000,21.000000,3157.000000,637.000000,2268.000000,620.000000,2.567000,70400.000000 +-120.640000,37.380000,19.000000,2256.000000,449.000000,1469.000000,435.000000,2.512900,84600.000000 +-120.640000,37.200000,16.000000,2236.000000,438.000000,1361.000000,393.000000,2.006600,125000.000000 +-120.640000,36.990000,23.000000,2363.000000,449.000000,1168.000000,410.000000,2.279400,75700.000000 +-120.640000,35.650000,9.000000,3466.000000,673.000000,2356.000000,619.000000,2.992600,158200.000000 +-120.640000,35.470000,8.000000,416.000000,121.000000,936.000000,97.000000,2.115400,117200.000000 +-120.640000,35.150000,7.000000,7922.000000,1442.000000,2863.000000,1197.000000,4.849000,275000.000000 +-120.640000,34.970000,5.000000,2090.000000,469.000000,1911.000000,482.000000,2.431800,86100.000000 +-120.650000,40.420000,39.000000,3240.000000,652.000000,1467.000000,621.000000,2.187500,64300.000000 +-120.650000,38.500000,10.000000,1783.000000,337.000000,638.000000,262.000000,2.650000,116700.000000 +-120.650000,38.420000,23.000000,1538.000000,305.000000,730.000000,267.000000,2.607800,116700.000000 +-120.650000,38.280000,21.000000,3095.000000,681.000000,1341.000000,546.000000,2.138200,104000.000000 +-120.650000,37.330000,25.000000,1731.000000,311.000000,810.000000,266.000000,4.105800,107600.000000 +-120.650000,37.090000,22.000000,886.000000,173.000000,595.000000,161.000000,2.439800,150000.000000 +-120.650000,36.980000,26.000000,1787.000000,364.000000,1548.000000,362.000000,1.718800,49500.000000 +-120.650000,35.410000,15.000000,6725.000000,1111.000000,3139.000000,1029.000000,4.187500,261600.000000 +-120.650000,35.320000,20.000000,626.000000,212.000000,3574.000000,261.000000,1.029800,300000.000000 +-120.650000,35.290000,36.000000,1940.000000,315.000000,850.000000,298.000000,3.181800,249600.000000 +-120.650000,35.290000,29.000000,1785.000000,481.000000,1344.000000,472.000000,1.449200,222900.000000 +-120.650000,35.280000,32.000000,896.000000,240.000000,548.000000,231.000000,2.545500,165900.000000 +-120.650000,35.270000,52.000000,2254.000000,642.000000,1237.000000,590.000000,2.620800,227100.000000 +-120.650000,35.270000,27.000000,2034.000000,341.000000,768.000000,316.000000,4.241100,258900.000000 +-120.650000,35.270000,15.000000,2365.000000,538.000000,1446.000000,490.000000,2.512900,225900.000000 +-120.650000,35.120000,19.000000,2949.000000,662.000000,1425.000000,548.000000,2.961500,178100.000000 +-120.660000,40.410000,52.000000,2081.000000,478.000000,1051.000000,419.000000,2.299200,70200.000000 +-120.660000,39.720000,15.000000,3763.000000,784.000000,717.000000,348.000000,2.201900,130500.000000 +-120.660000,38.610000,19.000000,2715.000000,596.000000,1301.000000,473.000000,2.504200,126400.000000 +-120.660000,38.400000,18.000000,2144.000000,420.000000,985.000000,381.000000,3.175000,118500.000000 +-120.660000,35.500000,19.000000,1861.000000,364.000000,1040.000000,363.000000,3.312500,163900.000000 +-120.660000,35.470000,18.000000,2474.000000,449.000000,1269.000000,431.000000,3.906300,184800.000000 +-120.660000,35.290000,39.000000,2163.000000,652.000000,1153.000000,599.000000,2.084000,233300.000000 +-120.660000,35.290000,23.000000,1932.000000,487.000000,1380.000000,472.000000,1.964700,253600.000000 +-120.660000,35.280000,46.000000,2054.000000,502.000000,1170.000000,494.000000,2.178600,206300.000000 +-120.660000,35.280000,31.000000,2773.000000,844.000000,1358.000000,794.000000,1.403600,209600.000000 +-120.660000,35.270000,46.000000,2217.000000,544.000000,1107.000000,527.000000,2.800900,192600.000000 +-120.660000,35.270000,33.000000,1664.000000,455.000000,1077.000000,461.000000,1.687500,174200.000000 +-120.660000,35.270000,17.000000,2719.000000,589.000000,1386.000000,570.000000,3.733700,208200.000000 +-120.660000,35.200000,13.000000,5138.000000,713.000000,1838.000000,645.000000,5.967600,380000.000000 +-120.660000,35.130000,41.000000,2666.000000,751.000000,940.000000,507.000000,1.965300,236100.000000 +-120.660000,35.100000,19.000000,1583.000000,392.000000,704.000000,269.000000,2.104200,268300.000000 +-120.670000,40.500000,15.000000,5343.000000,1002.000000,2503.000000,902.000000,3.596200,85900.000000 +-120.670000,38.760000,35.000000,2104.000000,403.000000,1060.000000,400.000000,2.168200,138100.000000 +-120.670000,38.190000,17.000000,2967.000000,611.000000,1387.000000,564.000000,2.041700,92600.000000 +-120.670000,37.970000,9.000000,7450.000000,1475.000000,2233.000000,930.000000,2.652800,133000.000000 +-120.670000,37.370000,18.000000,164.000000,30.000000,104.000000,32.000000,1.660700,87500.000000 +-120.670000,36.720000,18.000000,819.000000,198.000000,996.000000,198.000000,2.500000,112500.000000 +-120.670000,35.630000,8.000000,2690.000000,410.000000,1085.000000,381.000000,4.284100,256700.000000 +-120.670000,35.480000,18.000000,2339.000000,443.000000,1097.000000,416.000000,3.343800,176100.000000 +-120.670000,35.300000,19.000000,1540.000000,715.000000,1799.000000,635.000000,0.702500,500001.000000 +-120.670000,35.290000,44.000000,2236.000000,411.000000,1036.000000,437.000000,3.083300,219300.000000 +-120.680000,37.470000,33.000000,1028.000000,226.000000,658.000000,197.000000,2.304300,66300.000000 +-120.680000,35.500000,19.000000,3369.000000,673.000000,1834.000000,646.000000,3.767200,173800.000000 +-120.680000,35.480000,15.000000,2608.000000,525.000000,1351.000000,502.000000,2.779800,205800.000000 +-120.680000,35.260000,26.000000,1704.000000,315.000000,918.000000,310.000000,3.246400,208000.000000 +-120.680000,35.250000,16.000000,4208.000000,897.000000,1634.000000,806.000000,2.286800,233700.000000 +-120.690000,39.120000,19.000000,1048.000000,262.000000,493.000000,184.000000,2.291700,118200.000000 +-120.690000,38.360000,19.000000,3267.000000,614.000000,1252.000000,566.000000,2.723600,109900.000000 +-120.690000,37.770000,46.000000,431.000000,86.000000,239.000000,80.000000,3.318200,282100.000000 +-120.690000,37.590000,27.000000,1170.000000,227.000000,660.000000,222.000000,2.390600,81800.000000 +-120.690000,36.840000,18.000000,902.000000,195.000000,771.000000,174.000000,2.208300,55000.000000 +-120.690000,35.650000,14.000000,3487.000000,889.000000,2352.000000,796.000000,1.630300,144900.000000 +-120.690000,35.620000,43.000000,3044.000000,652.000000,1456.000000,608.000000,2.456700,140000.000000 +-120.690000,35.620000,35.000000,3451.000000,713.000000,1550.000000,653.000000,2.916700,161700.000000 +-120.690000,35.520000,26.000000,2758.000000,571.000000,1291.000000,522.000000,2.925000,181400.000000 +-120.690000,35.340000,16.000000,164.000000,30.000000,542.000000,32.000000,1.656300,42500.000000 +-120.690000,35.280000,26.000000,4225.000000,886.000000,1795.000000,704.000000,2.284700,247000.000000 +-120.700000,38.750000,19.000000,2325.000000,430.000000,967.000000,376.000000,2.900000,158700.000000 +-120.700000,38.690000,13.000000,4492.000000,821.000000,2093.000000,734.000000,4.070900,151700.000000 +-120.700000,35.760000,15.000000,1914.000000,425.000000,1130.000000,421.000000,2.216500,90600.000000 +-120.700000,35.550000,10.000000,3979.000000,761.000000,1834.000000,671.000000,3.500000,172100.000000 +-120.700000,35.320000,46.000000,118.000000,17.000000,6532.000000,13.000000,4.263900,350000.000000 +-120.700000,35.310000,24.000000,3504.000000,521.000000,1490.000000,506.000000,4.671900,337000.000000 +-120.700000,35.280000,14.000000,3768.000000,682.000000,1884.000000,664.000000,4.607100,239900.000000 +-120.710000,40.360000,19.000000,4462.000000,828.000000,2229.000000,777.000000,3.553600,105700.000000 +-120.710000,40.130000,19.000000,897.000000,180.000000,276.000000,110.000000,2.955400,89400.000000 +-120.710000,38.850000,8.000000,1877.000000,479.000000,884.000000,323.000000,3.468800,120100.000000 +-120.710000,38.730000,17.000000,2146.000000,396.000000,862.000000,351.000000,2.921900,141300.000000 +-120.710000,38.340000,16.000000,1257.000000,231.000000,559.000000,213.000000,4.453100,144300.000000 +-120.710000,37.390000,11.000000,1479.000000,341.000000,1476.000000,327.000000,3.272100,73800.000000 +-120.710000,37.380000,14.000000,1979.000000,432.000000,1756.000000,382.000000,2.692300,71400.000000 +-120.710000,35.270000,9.000000,2568.000000,421.000000,1149.000000,398.000000,5.428700,331600.000000 +-120.720000,38.940000,10.000000,1604.000000,352.000000,540.000000,190.000000,3.762500,113200.000000 +-120.720000,38.570000,8.000000,892.000000,185.000000,427.000000,164.000000,2.683300,118800.000000 +-120.720000,38.420000,17.000000,5654.000000,1085.000000,2237.000000,953.000000,3.046500,144100.000000 +-120.720000,38.380000,9.000000,1787.000000,347.000000,806.000000,306.000000,2.525000,157200.000000 +-120.720000,37.540000,17.000000,729.000000,134.000000,431.000000,121.000000,4.218800,131300.000000 +-120.720000,37.380000,22.000000,1311.000000,319.000000,1455.000000,340.000000,2.281300,67300.000000 +-120.720000,35.630000,31.000000,3476.000000,644.000000,1476.000000,567.000000,3.347200,195200.000000 +-120.730000,37.380000,23.000000,1451.000000,292.000000,1052.000000,265.000000,2.869800,72900.000000 +-120.740000,39.900000,23.000000,1017.000000,218.000000,387.000000,152.000000,2.265600,88200.000000 +-120.740000,39.820000,9.000000,1955.000000,398.000000,294.000000,122.000000,3.958300,126500.000000 +-120.740000,39.390000,18.000000,453.000000,117.000000,152.000000,77.000000,1.352300,85700.000000 +-120.740000,37.330000,30.000000,2390.000000,470.000000,1409.000000,428.000000,2.148400,81300.000000 +-120.750000,37.690000,24.000000,2282.000000,423.000000,1167.000000,398.000000,3.821400,116100.000000 +-120.750000,37.440000,27.000000,2295.000000,424.000000,1252.000000,350.000000,3.618200,123200.000000 +-120.750000,37.370000,32.000000,1656.000000,317.000000,1037.000000,286.000000,2.496400,88800.000000 +-120.760000,38.760000,21.000000,3509.000000,606.000000,1576.000000,564.000000,2.639200,148500.000000 +-120.760000,38.730000,17.000000,512.000000,129.000000,314.000000,140.000000,1.562500,108300.000000 +-120.760000,38.650000,17.000000,2319.000000,430.000000,1126.000000,372.000000,3.551100,155900.000000 +-120.760000,38.600000,14.000000,2925.000000,625.000000,1226.000000,437.000000,2.586500,133800.000000 +-120.760000,38.470000,17.000000,1521.000000,309.000000,607.000000,240.000000,3.500000,123800.000000 +-120.760000,38.120000,7.000000,7188.000000,1288.000000,3175.000000,1115.000000,3.848800,130600.000000 +-120.760000,37.730000,16.000000,1343.000000,241.000000,732.000000,195.000000,3.583300,187500.000000 +-120.760000,37.650000,25.000000,3214.000000,682.000000,2319.000000,640.000000,2.038500,84300.000000 +-120.760000,37.580000,35.000000,1395.000000,264.000000,756.000000,253.000000,3.618100,178600.000000 +-120.760000,37.440000,18.000000,2003.000000,398.000000,1333.000000,411.000000,2.756200,90500.000000 +-120.760000,35.520000,7.000000,9613.000000,1666.000000,4487.000000,1653.000000,3.666700,250600.000000 +-120.770000,40.650000,11.000000,2635.000000,667.000000,280.000000,132.000000,1.721400,118300.000000 +-120.770000,38.380000,15.000000,4221.000000,816.000000,1737.000000,743.000000,2.312500,128600.000000 +-120.770000,37.640000,8.000000,3294.000000,667.000000,2277.000000,652.000000,2.641700,96800.000000 +-120.770000,37.420000,27.000000,949.000000,224.000000,888.000000,241.000000,2.333300,72800.000000 +-120.780000,38.730000,31.000000,3117.000000,616.000000,1606.000000,588.000000,2.984400,127900.000000 +-120.790000,38.830000,15.000000,1374.000000,291.000000,709.000000,239.000000,1.722200,118500.000000 +-120.790000,38.700000,13.000000,5036.000000,1034.000000,2243.000000,923.000000,2.331900,138500.000000 +-120.790000,38.540000,34.000000,1133.000000,254.000000,495.000000,187.000000,2.050000,68900.000000 +-120.790000,38.240000,19.000000,1003.000000,235.000000,538.000000,190.000000,2.982100,90400.000000 +-120.790000,37.820000,17.000000,4227.000000,729.000000,1809.000000,679.000000,3.266700,269500.000000 +-120.790000,37.760000,14.000000,3531.000000,508.000000,1505.000000,497.000000,5.522800,275300.000000 +-120.790000,37.530000,20.000000,1417.000000,263.000000,853.000000,263.000000,3.308300,108300.000000 +-120.790000,37.410000,35.000000,2436.000000,466.000000,1730.000000,469.000000,2.207100,85900.000000 +-120.790000,36.060000,29.000000,1916.000000,386.000000,1019.000000,314.000000,2.488100,87500.000000 +-120.800000,38.510000,23.000000,1001.000000,195.000000,369.000000,157.000000,3.125000,96400.000000 +-120.800000,37.610000,30.000000,918.000000,154.000000,469.000000,139.000000,3.968800,175000.000000 +-120.800000,37.550000,18.000000,1802.000000,335.000000,1110.000000,329.000000,3.164100,96300.000000 +-120.800000,37.530000,29.000000,1162.000000,254.000000,726.000000,225.000000,2.193200,90600.000000 +-120.800000,37.520000,13.000000,2920.000000,481.000000,1602.000000,490.000000,3.928600,145800.000000 +-120.800000,35.330000,20.000000,2200.000000,393.000000,996.000000,365.000000,3.587000,330000.000000 +-120.810000,39.020000,30.000000,806.000000,189.000000,326.000000,146.000000,2.815500,101000.000000 +-120.810000,38.890000,17.000000,1438.000000,324.000000,675.000000,268.000000,2.944400,119300.000000 +-120.810000,38.740000,29.000000,2259.000000,482.000000,1099.000000,463.000000,2.331400,121600.000000 +-120.810000,38.730000,42.000000,1276.000000,260.000000,799.000000,259.000000,2.727300,128600.000000 +-120.810000,38.730000,38.000000,2005.000000,385.000000,882.000000,353.000000,2.510400,120500.000000 +-120.810000,38.670000,14.000000,8396.000000,1578.000000,3952.000000,1474.000000,3.056500,118800.000000 +-120.810000,35.190000,14.000000,3414.000000,802.000000,1236.000000,632.000000,3.763500,336200.000000 +-120.820000,37.640000,20.000000,3375.000000,630.000000,1505.000000,598.000000,2.690000,201300.000000 +-120.820000,37.540000,20.000000,707.000000,114.000000,282.000000,86.000000,6.132400,164800.000000 +-120.820000,37.510000,17.000000,1664.000000,253.000000,736.000000,254.000000,4.408300,165800.000000 +-120.820000,37.500000,21.000000,2974.000000,495.000000,1313.000000,461.000000,4.488600,135400.000000 +-120.820000,37.490000,25.000000,1611.000000,285.000000,882.000000,261.000000,3.554700,122400.000000 +-120.820000,37.050000,15.000000,1385.000000,288.000000,775.000000,255.000000,1.933000,140600.000000 +-120.820000,35.320000,12.000000,3522.000000,683.000000,1780.000000,662.000000,3.395800,215800.000000 +-120.820000,35.310000,16.000000,3924.000000,699.000000,1325.000000,638.000000,2.517200,293900.000000 +-120.830000,39.270000,14.000000,3338.000000,608.000000,1373.000000,562.000000,3.670000,160100.000000 +-120.830000,39.020000,15.000000,1117.000000,242.000000,551.000000,229.000000,2.631900,97700.000000 +-120.830000,38.740000,17.000000,3685.000000,775.000000,1714.000000,734.000000,2.226900,128300.000000 +-120.830000,37.790000,16.000000,893.000000,164.000000,548.000000,155.000000,3.687500,121900.000000 +-120.830000,37.770000,20.000000,1717.000000,403.000000,1062.000000,401.000000,1.675900,116700.000000 +-120.830000,37.760000,21.000000,435.000000,96.000000,219.000000,83.000000,2.912500,112500.000000 +-120.830000,37.580000,30.000000,1527.000000,256.000000,757.000000,240.000000,3.662900,171400.000000 +-120.830000,37.510000,34.000000,3078.000000,477.000000,1226.000000,487.000000,4.601000,150000.000000 +-120.830000,37.510000,13.000000,3795.000000,604.000000,1639.000000,609.000000,4.663500,198400.000000 +-120.830000,37.500000,30.000000,1340.000000,244.000000,631.000000,231.000000,3.375000,118500.000000 +-120.830000,35.330000,14.000000,4155.000000,787.000000,2112.000000,755.000000,4.476600,192700.000000 +-120.830000,35.320000,11.000000,3252.000000,701.000000,1814.000000,660.000000,3.222600,183200.000000 +-120.840000,38.810000,11.000000,1280.000000,286.000000,609.000000,248.000000,3.163500,132600.000000 +-120.840000,38.770000,11.000000,1013.000000,188.000000,410.000000,158.000000,4.825000,184600.000000 +-120.840000,38.730000,17.000000,2616.000000,492.000000,1158.000000,457.000000,2.880700,142600.000000 +-120.840000,38.630000,12.000000,1313.000000,231.000000,731.000000,232.000000,5.737300,208300.000000 +-120.840000,37.920000,27.000000,471.000000,84.000000,195.000000,72.000000,3.333300,208300.000000 +-120.840000,37.520000,16.000000,4527.000000,887.000000,2531.000000,825.000000,3.706500,124800.000000 +-120.840000,37.510000,8.000000,1191.000000,242.000000,688.000000,260.000000,2.724300,138400.000000 +-120.840000,37.510000,20.000000,1901.000000,313.000000,1258.000000,320.000000,3.895800,126800.000000 +-120.840000,37.510000,14.000000,6337.000000,1593.000000,3909.000000,1480.000000,2.064300,106500.000000 +-120.840000,37.500000,47.000000,2310.000000,484.000000,1126.000000,447.000000,2.208300,97300.000000 +-120.840000,37.490000,25.000000,2383.000000,576.000000,1234.000000,583.000000,1.452900,86100.000000 +-120.840000,37.480000,10.000000,2874.000000,612.000000,1960.000000,596.000000,2.738100,104600.000000 +-120.840000,37.470000,11.000000,2285.000000,499.000000,1468.000000,471.000000,2.785700,110300.000000 +-120.840000,37.430000,32.000000,2892.000000,521.000000,1580.000000,484.000000,3.778400,164500.000000 +-120.840000,37.400000,7.000000,2773.000000,530.000000,1374.000000,505.000000,2.621400,103800.000000 +-120.840000,37.070000,24.000000,1520.000000,335.000000,882.000000,306.000000,2.201900,100000.000000 +-120.840000,37.060000,14.000000,1506.000000,380.000000,1096.000000,352.000000,1.130100,78500.000000 +-120.840000,37.050000,8.000000,1944.000000,283.000000,814.000000,276.000000,5.398800,165500.000000 +-120.840000,35.370000,34.000000,3279.000000,714.000000,1397.000000,646.000000,2.555200,200000.000000 +-120.840000,35.350000,27.000000,2863.000000,711.000000,930.000000,533.000000,2.620500,221100.000000 +-120.840000,35.330000,15.000000,3276.000000,670.000000,1520.000000,613.000000,3.641200,207800.000000 +-120.840000,35.320000,17.000000,4197.000000,802.000000,1656.000000,732.000000,3.526000,183900.000000 +-120.840000,35.320000,15.000000,2419.000000,538.000000,1279.000000,522.000000,3.476200,189600.000000 +-120.840000,35.310000,23.000000,3100.000000,603.000000,1515.000000,609.000000,2.849300,196100.000000 +-120.840000,35.300000,15.000000,2062.000000,327.000000,781.000000,316.000000,4.935900,317700.000000 +-120.850000,37.780000,30.000000,1120.000000,248.000000,609.000000,237.000000,2.238600,87200.000000 +-120.850000,37.780000,25.000000,421.000000,102.000000,303.000000,106.000000,2.267900,71300.000000 +-120.850000,37.780000,15.000000,3553.000000,659.000000,1684.000000,611.000000,3.316900,131200.000000 +-120.850000,37.770000,52.000000,436.000000,81.000000,197.000000,68.000000,1.862500,85400.000000 +-120.850000,37.770000,37.000000,1738.000000,403.000000,936.000000,366.000000,2.471700,77100.000000 +-120.850000,37.750000,26.000000,28.000000,4.000000,9.000000,5.000000,1.625000,85000.000000 +-120.850000,37.570000,27.000000,819.000000,157.000000,451.000000,150.000000,3.493400,193800.000000 +-120.850000,37.550000,45.000000,350.000000,62.000000,187.000000,63.000000,2.593800,275000.000000 +-120.850000,37.510000,5.000000,2899.000000,745.000000,1593.000000,633.000000,2.229200,127500.000000 +-120.850000,37.500000,52.000000,1724.000000,352.000000,922.000000,348.000000,1.722700,85700.000000 +-120.850000,37.490000,42.000000,264.000000,72.000000,310.000000,70.000000,1.406300,61500.000000 +-120.850000,37.490000,39.000000,2840.000000,733.000000,2606.000000,737.000000,1.942900,76400.000000 +-120.850000,37.070000,16.000000,1795.000000,362.000000,1642.000000,340.000000,2.536300,86300.000000 +-120.850000,37.060000,31.000000,2609.000000,645.000000,1796.000000,629.000000,1.547900,82000.000000 +-120.850000,37.050000,32.000000,2893.000000,481.000000,1198.000000,466.000000,3.171900,140600.000000 +-120.850000,35.380000,27.000000,3493.000000,909.000000,1481.000000,666.000000,2.307500,184200.000000 +-120.850000,35.370000,21.000000,1033.000000,195.000000,588.000000,187.000000,2.817300,226900.000000 +-120.860000,38.750000,15.000000,1533.000000,300.000000,674.000000,287.000000,2.562500,146100.000000 +-120.860000,37.770000,45.000000,621.000000,129.000000,257.000000,124.000000,1.718800,109400.000000 +-120.860000,37.760000,32.000000,964.000000,198.000000,623.000000,201.000000,3.091700,88900.000000 +-120.860000,37.730000,27.000000,508.000000,93.000000,263.000000,81.000000,3.113600,183300.000000 +-120.860000,37.600000,25.000000,1178.000000,206.000000,709.000000,214.000000,4.562500,133600.000000 +-120.860000,37.530000,18.000000,2829.000000,732.000000,1751.000000,712.000000,1.644500,156900.000000 +-120.860000,37.520000,9.000000,9885.000000,1871.000000,5372.000000,1843.000000,3.482100,127100.000000 +-120.860000,37.500000,34.000000,4272.000000,996.000000,2916.000000,962.000000,1.982900,82800.000000 +-120.860000,37.490000,37.000000,1084.000000,271.000000,893.000000,236.000000,1.621300,69500.000000 +-120.860000,37.490000,22.000000,2140.000000,445.000000,1441.000000,409.000000,2.470600,89400.000000 +-120.860000,37.400000,17.000000,3511.000000,636.000000,1904.000000,617.000000,3.111100,113900.000000 +-120.860000,35.390000,23.000000,1664.000000,355.000000,629.000000,279.000000,2.734400,188300.000000 +-120.870000,41.540000,21.000000,1091.000000,208.000000,660.000000,188.000000,2.232100,34600.000000 +-120.870000,39.180000,25.000000,2691.000000,598.000000,964.000000,373.000000,3.919600,142700.000000 +-120.870000,39.150000,17.000000,1819.000000,389.000000,736.000000,283.000000,2.860300,128900.000000 +-120.870000,38.830000,12.000000,2180.000000,423.000000,1070.000000,377.000000,2.856200,128200.000000 +-120.870000,38.710000,13.000000,2692.000000,470.000000,1302.000000,420.000000,4.000000,167400.000000 +-120.870000,38.370000,28.000000,3998.000000,765.000000,1614.000000,698.000000,2.812500,113400.000000 +-120.870000,37.770000,9.000000,4838.000000,920.000000,2460.000000,923.000000,3.595900,142700.000000 +-120.870000,37.640000,40.000000,1010.000000,155.000000,488.000000,157.000000,3.898400,170500.000000 +-120.870000,37.600000,32.000000,4579.000000,914.000000,2742.000000,856.000000,2.661900,86200.000000 +-120.870000,37.500000,7.000000,4966.000000,985.000000,2431.000000,904.000000,3.104200,122500.000000 +-120.870000,37.070000,26.000000,2036.000000,401.000000,1343.000000,414.000000,3.633100,88600.000000 +-120.870000,37.050000,29.000000,4176.000000,779.000000,2092.000000,741.000000,2.595000,104200.000000 +-120.880000,38.910000,15.000000,3876.000000,778.000000,1960.000000,691.000000,2.902000,127300.000000 +-120.880000,38.320000,18.000000,2791.000000,492.000000,1187.000000,438.000000,3.258900,103000.000000 +-120.880000,38.160000,8.000000,2029.000000,387.000000,1000.000000,364.000000,4.010900,125900.000000 +-120.880000,37.570000,22.000000,1440.000000,267.000000,774.000000,249.000000,3.982100,204300.000000 +-120.880000,37.530000,18.000000,239.000000,39.000000,92.000000,36.000000,5.316800,175000.000000 +-120.880000,37.520000,2.000000,1871.000000,409.000000,707.000000,256.000000,2.610300,133600.000000 +-120.880000,37.370000,24.000000,1294.000000,222.000000,684.000000,221.000000,2.690800,103100.000000 +-120.890000,39.300000,17.000000,2282.000000,431.000000,974.000000,371.000000,3.541700,155100.000000 +-120.890000,37.590000,33.000000,1016.000000,206.000000,617.000000,209.000000,2.151000,195800.000000 +-120.890000,37.540000,30.000000,509.000000,115.000000,275.000000,115.000000,2.267900,250000.000000 +-120.890000,37.450000,29.000000,1940.000000,337.000000,1070.000000,332.000000,3.659700,145600.000000 +-120.890000,37.330000,27.000000,2692.000000,481.000000,1518.000000,447.000000,2.041700,94200.000000 +-120.890000,37.210000,25.000000,3301.000000,678.000000,994.000000,306.000000,3.226200,97200.000000 +-120.890000,35.370000,29.000000,2046.000000,588.000000,846.000000,410.000000,1.650000,227300.000000 +-120.900000,39.950000,20.000000,1349.000000,238.000000,601.000000,203.000000,3.541700,96600.000000 +-120.900000,39.930000,23.000000,2679.000000,546.000000,1424.000000,529.000000,2.881200,81900.000000 +-120.900000,38.200000,16.000000,3120.000000,641.000000,1319.000000,526.000000,2.047200,93200.000000 +-120.900000,37.810000,27.000000,4213.000000,750.000000,2142.000000,746.000000,3.703100,173300.000000 +-120.900000,37.760000,20.000000,570.000000,112.000000,304.000000,108.000000,2.202400,156300.000000 +-120.900000,37.660000,19.000000,3377.000000,669.000000,2426.000000,663.000000,2.978300,82500.000000 +-120.900000,37.640000,26.000000,1762.000000,418.000000,855.000000,308.000000,1.676700,81300.000000 +-120.900000,35.690000,14.000000,5020.000000,909.000000,2105.000000,796.000000,3.815800,248700.000000 +-120.910000,40.080000,24.000000,1629.000000,313.000000,641.000000,274.000000,2.206700,69600.000000 +-120.910000,39.390000,16.000000,352.000000,105.000000,226.000000,82.000000,1.609400,79500.000000 +-120.910000,38.730000,11.000000,5460.000000,859.000000,2645.000000,838.000000,4.835000,230600.000000 +-120.910000,38.620000,12.000000,4545.000000,748.000000,2033.000000,718.000000,4.184300,207600.000000 +-120.910000,38.110000,9.000000,3585.000000,680.000000,1800.000000,598.000000,3.636000,133100.000000 +-120.910000,37.660000,36.000000,1320.000000,255.000000,720.000000,232.000000,2.652300,76300.000000 +-120.910000,37.570000,26.000000,3396.000000,705.000000,2446.000000,694.000000,2.052100,65400.000000 +-120.920000,40.020000,35.000000,383.000000,92.000000,202.000000,72.000000,2.645800,102500.000000 +-120.920000,38.860000,11.000000,1720.000000,345.000000,850.000000,326.000000,3.202700,128600.000000 +-120.920000,37.700000,24.000000,527.000000,112.000000,270.000000,112.000000,1.617200,156300.000000 +-120.920000,37.650000,23.000000,505.000000,124.000000,163.000000,129.000000,1.369600,275000.000000 +-120.920000,37.600000,12.000000,4485.000000,805.000000,2445.000000,832.000000,3.761100,123100.000000 +-120.920000,37.590000,26.000000,1705.000000,279.000000,642.000000,236.000000,2.659100,180500.000000 +-120.920000,35.400000,23.000000,2059.000000,354.000000,636.000000,278.000000,3.690800,278800.000000 +-120.930000,39.960000,15.000000,1666.000000,351.000000,816.000000,316.000000,2.955900,118800.000000 +-120.930000,39.170000,13.000000,2331.000000,464.000000,1110.000000,419.000000,3.656300,164900.000000 +-120.930000,38.770000,9.000000,2229.000000,355.000000,788.000000,341.000000,5.511100,196300.000000 +-120.930000,38.650000,12.000000,2213.000000,384.000000,1097.000000,351.000000,4.556800,170100.000000 +-120.930000,38.500000,15.000000,1248.000000,234.000000,529.000000,216.000000,3.339300,107200.000000 +-120.930000,38.260000,13.000000,2084.000000,449.000000,834.000000,305.000000,3.293700,114200.000000 +-120.930000,37.740000,37.000000,1956.000000,402.000000,1265.000000,397.000000,2.302300,91900.000000 +-120.930000,37.720000,18.000000,391.000000,71.000000,247.000000,71.000000,4.386400,179500.000000 +-120.930000,37.670000,6.000000,3491.000000,657.000000,2075.000000,644.000000,3.384400,138500.000000 +-120.930000,37.660000,10.000000,7566.000000,1348.000000,3227.000000,1199.000000,4.744000,148100.000000 +-120.930000,37.650000,1.000000,2254.000000,328.000000,402.000000,112.000000,4.250000,189200.000000 +-120.930000,37.560000,17.000000,1812.000000,361.000000,672.000000,334.000000,1.550000,166100.000000 +-120.940000,40.170000,22.000000,1334.000000,261.000000,597.000000,222.000000,2.213200,89200.000000 +-120.940000,40.140000,31.000000,3127.000000,664.000000,1345.000000,580.000000,1.577400,58000.000000 +-120.940000,39.220000,12.000000,1321.000000,268.000000,661.000000,232.000000,4.006200,153800.000000 +-120.940000,39.050000,8.000000,3758.000000,717.000000,1744.000000,661.000000,3.197200,151500.000000 +-120.940000,37.740000,35.000000,1166.000000,268.000000,515.000000,266.000000,2.346900,90200.000000 +-120.940000,37.700000,25.000000,1005.000000,159.000000,390.000000,139.000000,4.400000,174100.000000 +-120.940000,37.680000,4.000000,13315.000000,2424.000000,6420.000000,2289.000000,4.247100,162100.000000 +-120.940000,37.660000,17.000000,1147.000000,140.000000,327.000000,136.000000,6.865400,290500.000000 +-120.940000,37.650000,13.000000,5075.000000,978.000000,3033.000000,838.000000,3.057700,119000.000000 +-120.940000,37.630000,43.000000,244.000000,52.000000,176.000000,60.000000,1.425000,69400.000000 +-120.940000,37.610000,13.000000,3309.000000,603.000000,1796.000000,555.000000,3.837200,129300.000000 +-120.940000,37.600000,30.000000,3257.000000,574.000000,1804.000000,588.000000,3.533100,102900.000000 +-120.940000,37.590000,16.000000,3964.000000,824.000000,2622.000000,766.000000,2.315200,111300.000000 +-120.940000,37.580000,19.000000,1549.000000,369.000000,770.000000,370.000000,2.049300,99500.000000 +-120.940000,37.400000,32.000000,1175.000000,208.000000,774.000000,222.000000,3.000000,109400.000000 +-120.950000,39.930000,26.000000,2023.000000,385.000000,922.000000,365.000000,2.812500,83500.000000 +-120.950000,38.790000,12.000000,3247.000000,579.000000,1459.000000,517.000000,4.398100,202800.000000 +-120.950000,38.690000,10.000000,3421.000000,563.000000,1689.000000,545.000000,5.203200,217100.000000 +-120.950000,37.740000,18.000000,3453.000000,666.000000,1958.000000,601.000000,3.004300,156500.000000 +-120.950000,37.730000,12.000000,3609.000000,712.000000,2650.000000,742.000000,2.856500,92700.000000 +-120.950000,37.670000,15.000000,3062.000000,584.000000,1624.000000,538.000000,4.386400,137600.000000 +-120.950000,37.660000,16.000000,4478.000000,647.000000,1990.000000,672.000000,5.147300,188400.000000 +-120.950000,37.650000,37.000000,136.000000,20.000000,72.000000,22.000000,2.227900,225000.000000 +-120.950000,37.650000,14.000000,5200.000000,1119.000000,3221.000000,1102.000000,2.696400,107000.000000 +-120.950000,37.640000,32.000000,3487.000000,740.000000,1957.000000,685.000000,2.720900,88300.000000 +-120.950000,37.620000,11.000000,4981.000000,814.000000,1934.000000,686.000000,3.704100,174800.000000 +-120.950000,37.610000,17.000000,4054.000000,654.000000,2034.000000,667.000000,4.683300,142200.000000 +-120.950000,37.600000,35.000000,1493.000000,278.000000,729.000000,268.000000,2.982100,97400.000000 +-120.950000,37.590000,43.000000,1561.000000,354.000000,862.000000,332.000000,1.846600,81500.000000 +-120.950000,37.590000,29.000000,1727.000000,439.000000,1063.000000,386.000000,1.892900,63600.000000 +-120.950000,37.570000,29.000000,1179.000000,249.000000,672.000000,243.000000,3.112500,154800.000000 +-120.950000,37.090000,43.000000,1116.000000,222.000000,801.000000,207.000000,2.875000,97200.000000 +-120.950000,36.470000,52.000000,1691.000000,301.000000,618.000000,239.000000,3.229200,225000.000000 +-120.950000,35.440000,30.000000,6346.000000,1410.000000,1769.000000,887.000000,2.686400,283600.000000 +-120.960000,41.120000,29.000000,779.000000,136.000000,364.000000,123.000000,2.500000,59200.000000 +-120.960000,40.280000,19.000000,683.000000,139.000000,302.000000,111.000000,2.500000,64100.000000 +-120.960000,39.120000,24.000000,2069.000000,436.000000,909.000000,374.000000,2.532600,139100.000000 +-120.960000,37.770000,32.000000,2262.000000,416.000000,1156.000000,404.000000,3.853400,157600.000000 +-120.960000,37.670000,18.000000,1442.000000,229.000000,538.000000,220.000000,4.296900,163200.000000 +-120.960000,37.660000,16.000000,4961.000000,902.000000,2654.000000,804.000000,4.282300,138300.000000 +-120.960000,37.650000,34.000000,1700.000000,325.000000,972.000000,326.000000,2.448500,95500.000000 +-120.960000,37.640000,41.000000,1467.000000,328.000000,673.000000,310.000000,2.791700,90700.000000 +-120.960000,37.640000,36.000000,60.000000,12.000000,51.000000,14.000000,3.625000,67500.000000 +-120.960000,37.610000,23.000000,3497.000000,887.000000,2467.000000,816.000000,1.944400,93400.000000 +-120.960000,37.540000,29.000000,1468.000000,245.000000,747.000000,231.000000,3.464300,125000.000000 +-120.960000,37.510000,30.000000,1288.000000,237.000000,720.000000,233.000000,2.386400,139100.000000 +-120.970000,37.840000,28.000000,2368.000000,430.000000,1231.000000,403.000000,2.883000,141900.000000 +-120.970000,37.730000,19.000000,3725.000000,543.000000,1412.000000,463.000000,5.747600,248600.000000 +-120.970000,37.690000,16.000000,2793.000000,476.000000,1279.000000,477.000000,3.466700,160900.000000 +-120.970000,37.690000,14.000000,5514.000000,909.000000,2819.000000,970.000000,3.859800,174400.000000 +-120.970000,37.680000,9.000000,1114.000000,172.000000,529.000000,174.000000,4.715900,163700.000000 +-120.970000,37.680000,16.000000,2349.000000,446.000000,1302.000000,392.000000,3.162500,130300.000000 +-120.970000,37.670000,31.000000,1648.000000,293.000000,792.000000,294.000000,2.400000,121500.000000 +-120.970000,37.660000,24.000000,2930.000000,588.000000,1448.000000,570.000000,3.539500,127900.000000 +-120.970000,37.660000,21.000000,2760.000000,632.000000,1260.000000,576.000000,2.022700,179800.000000 +-120.970000,37.650000,16.000000,3960.000000,716.000000,1776.000000,724.000000,3.988600,137500.000000 +-120.970000,37.640000,42.000000,2359.000000,504.000000,1131.000000,480.000000,2.083300,95500.000000 +-120.970000,37.630000,39.000000,2360.000000,607.000000,2047.000000,605.000000,1.705400,58800.000000 +-120.970000,37.620000,7.000000,8489.000000,1673.000000,5807.000000,1575.000000,2.945100,127800.000000 +-120.970000,37.610000,16.000000,1326.000000,375.000000,884.000000,375.000000,1.871000,103900.000000 +-120.970000,37.430000,27.000000,1380.000000,270.000000,810.000000,262.000000,2.187500,137500.000000 +-120.980000,39.930000,25.000000,2220.000000,511.000000,912.000000,449.000000,1.891400,87800.000000 +-120.980000,39.080000,20.000000,4570.000000,906.000000,2125.000000,815.000000,3.040300,148000.000000 +-120.980000,38.990000,17.000000,3403.000000,661.000000,1540.000000,622.000000,3.635400,162900.000000 +-120.980000,38.680000,5.000000,4810.000000,909.000000,2242.000000,900.000000,3.296400,176900.000000 +-120.980000,37.790000,20.000000,2458.000000,491.000000,1227.000000,481.000000,2.785700,110900.000000 +-120.980000,37.690000,18.000000,3176.000000,468.000000,1296.000000,471.000000,5.568400,185100.000000 +-120.980000,37.680000,27.000000,4006.000000,762.000000,1806.000000,718.000000,3.184800,112800.000000 +-120.980000,37.680000,24.000000,705.000000,114.000000,347.000000,141.000000,3.191200,149600.000000 +-120.980000,37.680000,18.000000,4197.000000,1006.000000,2203.000000,874.000000,2.166000,118600.000000 +-120.980000,37.670000,13.000000,1221.000000,260.000000,682.000000,275.000000,3.650000,155500.000000 +-120.980000,37.660000,40.000000,3012.000000,616.000000,1423.000000,595.000000,2.634600,100600.000000 +-120.980000,37.660000,33.000000,1959.000000,342.000000,984.000000,356.000000,4.520800,114200.000000 +-120.980000,37.660000,10.000000,934.000000,260.000000,401.000000,255.000000,0.933600,127100.000000 +-120.980000,37.650000,40.000000,422.000000,63.000000,158.000000,63.000000,7.384100,172200.000000 +-120.980000,37.650000,36.000000,826.000000,167.000000,432.000000,150.000000,2.500000,103100.000000 +-120.980000,37.640000,45.000000,1913.000000,335.000000,839.000000,333.000000,3.139700,110700.000000 +-120.980000,37.640000,40.000000,1791.000000,359.000000,679.000000,322.000000,2.145800,130300.000000 +-120.980000,37.620000,26.000000,3819.000000,955.000000,3010.000000,932.000000,1.920600,81300.000000 +-120.980000,37.600000,36.000000,1437.000000,331.000000,1073.000000,320.000000,2.177900,58400.000000 +-120.980000,37.570000,27.000000,925.000000,176.000000,449.000000,168.000000,2.640600,129700.000000 +-120.990000,39.260000,16.000000,2616.000000,422.000000,1090.000000,425.000000,3.791700,179200.000000 +-120.990000,39.220000,16.000000,1497.000000,275.000000,737.000000,243.000000,2.894200,182500.000000 +-120.990000,39.200000,15.000000,2993.000000,562.000000,1296.000000,518.000000,3.300900,156800.000000 +-120.990000,39.180000,23.000000,2550.000000,457.000000,1016.000000,405.000000,3.660700,153000.000000 +-120.990000,39.130000,14.000000,770.000000,116.000000,285.000000,116.000000,3.643400,155400.000000 +-120.990000,39.040000,17.000000,2289.000000,450.000000,1182.000000,397.000000,2.369600,166800.000000 +-120.990000,38.690000,5.000000,5743.000000,1074.000000,2651.000000,962.000000,4.116300,172500.000000 +-120.990000,38.670000,8.000000,4913.000000,744.000000,2005.000000,723.000000,5.441300,187900.000000 +-120.990000,37.800000,32.000000,2564.000000,513.000000,1198.000000,459.000000,2.908300,113400.000000 +-120.990000,37.690000,25.000000,2773.000000,384.000000,1060.000000,381.000000,6.478800,199400.000000 +-120.990000,37.680000,30.000000,1975.000000,375.000000,732.000000,326.000000,2.693200,94900.000000 +-120.990000,37.680000,28.000000,3269.000000,647.000000,1595.000000,617.000000,2.233600,112700.000000 +-120.990000,37.670000,28.000000,1768.000000,423.000000,1066.000000,392.000000,1.831500,90500.000000 +-120.990000,37.670000,16.000000,568.000000,124.000000,307.000000,116.000000,2.151800,107400.000000 +-120.990000,37.660000,46.000000,1750.000000,347.000000,754.000000,356.000000,2.913700,106000.000000 +-120.990000,37.650000,44.000000,2848.000000,623.000000,1408.000000,576.000000,2.148700,86600.000000 +-120.990000,37.640000,50.000000,683.000000,189.000000,459.000000,195.000000,1.816200,70000.000000 +-120.990000,37.640000,41.000000,1580.000000,385.000000,881.000000,361.000000,2.753800,99600.000000 +-120.990000,37.630000,21.000000,319.000000,120.000000,276.000000,85.000000,2.479200,60000.000000 +-120.990000,37.620000,37.000000,2014.000000,505.000000,1787.000000,515.000000,1.551500,54100.000000 +-121.000000,39.750000,8.000000,1116.000000,214.000000,27.000000,39.000000,2.589300,83000.000000 +-121.000000,39.260000,14.000000,810.000000,151.000000,302.000000,138.000000,3.109400,136100.000000 +-121.000000,39.230000,15.000000,2809.000000,450.000000,1267.000000,408.000000,4.042600,191700.000000 +-121.000000,39.160000,10.000000,1170.000000,225.000000,537.000000,194.000000,3.281300,163200.000000 +-121.000000,39.090000,7.000000,439.000000,84.000000,246.000000,80.000000,3.078100,162500.000000 +-121.000000,39.000000,4.000000,170.000000,23.000000,93.000000,27.000000,10.989100,312500.000000 +-121.000000,38.580000,12.000000,3425.000000,549.000000,1357.000000,451.000000,5.334400,217500.000000 +-121.000000,37.800000,13.000000,4030.000000,744.000000,2248.000000,766.000000,3.610700,141300.000000 +-121.000000,37.710000,52.000000,102.000000,23.000000,35.000000,33.000000,2.250000,175000.000000 +-121.000000,37.690000,18.000000,3469.000000,661.000000,1452.000000,628.000000,3.407900,147500.000000 +-121.000000,37.680000,29.000000,2911.000000,445.000000,1170.000000,460.000000,4.990400,158100.000000 +-121.000000,37.680000,15.000000,1232.000000,180.000000,408.000000,196.000000,6.968200,182400.000000 +-121.000000,37.670000,27.000000,2278.000000,479.000000,995.000000,449.000000,2.514800,110200.000000 +-121.000000,37.670000,26.000000,90.000000,18.000000,47.000000,18.000000,1.125000,87500.000000 +-121.000000,37.660000,43.000000,2039.000000,331.000000,875.000000,342.000000,3.984400,152000.000000 +-121.000000,37.650000,52.000000,3887.000000,803.000000,1768.000000,779.000000,2.508900,119000.000000 +-121.000000,37.650000,17.000000,484.000000,202.000000,198.000000,204.000000,0.682500,187500.000000 +-121.000000,37.640000,43.000000,311.000000,95.000000,293.000000,94.000000,1.290200,67500.000000 +-121.000000,37.640000,19.000000,121.000000,41.000000,658.000000,41.000000,0.957300,162500.000000 +-121.000000,37.630000,49.000000,2051.000000,500.000000,1525.000000,467.000000,1.590000,80900.000000 +-121.000000,37.630000,31.000000,215.000000,62.000000,192.000000,66.000000,1.750000,73800.000000 +-121.000000,37.620000,28.000000,1153.000000,420.000000,1043.000000,357.000000,1.080100,75000.000000 +-121.000000,37.260000,45.000000,1750.000000,371.000000,847.000000,354.000000,1.706200,77400.000000 +-121.000000,37.250000,31.000000,1923.000000,341.000000,806.000000,349.000000,3.173800,97600.000000 +-121.000000,37.250000,21.000000,1937.000000,389.000000,1002.000000,373.000000,2.608700,96200.000000 +-121.000000,35.940000,16.000000,3077.000000,628.000000,1479.000000,536.000000,3.372400,114600.000000 +-121.010000,38.730000,7.000000,6322.000000,1046.000000,2957.000000,1024.000000,4.727600,197500.000000 +-121.010000,37.740000,14.000000,2368.000000,297.000000,796.000000,301.000000,8.778300,435000.000000 +-121.010000,37.700000,12.000000,9148.000000,1906.000000,4656.000000,1853.000000,3.244700,142200.000000 +-121.010000,37.690000,20.000000,3275.000000,760.000000,1538.000000,705.000000,2.480000,135600.000000 +-121.010000,37.680000,33.000000,828.000000,123.000000,373.000000,133.000000,5.500000,146200.000000 +-121.010000,37.680000,33.000000,3230.000000,587.000000,1579.000000,560.000000,3.577500,109700.000000 +-121.010000,37.670000,37.000000,2483.000000,459.000000,1072.000000,445.000000,3.072100,108100.000000 +-121.010000,37.660000,36.000000,3679.000000,613.000000,1366.000000,581.000000,4.500000,151400.000000 +-121.010000,37.650000,52.000000,178.000000,53.000000,152.000000,62.000000,0.499900,82500.000000 +-121.010000,37.640000,52.000000,201.000000,35.000000,74.000000,22.000000,1.303600,75000.000000 +-121.010000,37.640000,36.000000,1981.000000,507.000000,1998.000000,468.000000,1.901300,69900.000000 +-121.010000,37.640000,33.000000,693.000000,207.000000,598.000000,192.000000,1.021700,81300.000000 +-121.010000,37.630000,41.000000,2764.000000,639.000000,2122.000000,600.000000,1.964300,74900.000000 +-121.010000,37.620000,35.000000,568.000000,150.000000,622.000000,145.000000,1.816700,79500.000000 +-121.010000,37.620000,35.000000,2074.000000,477.000000,1687.000000,431.000000,2.088500,73700.000000 +-121.010000,37.610000,5.000000,3655.000000,696.000000,2316.000000,647.000000,3.470300,129300.000000 +-121.010000,37.370000,41.000000,1045.000000,233.000000,632.000000,230.000000,2.358300,95000.000000 +-121.010000,37.330000,17.000000,1926.000000,410.000000,1054.000000,321.000000,1.621400,71500.000000 +-121.020000,40.510000,17.000000,890.000000,167.000000,406.000000,154.000000,3.303600,78300.000000 +-121.020000,39.270000,52.000000,3720.000000,707.000000,1424.000000,609.000000,3.200000,155000.000000 +-121.020000,39.250000,52.000000,1549.000000,275.000000,604.000000,249.000000,2.227800,155400.000000 +-121.020000,39.230000,16.000000,1427.000000,319.000000,642.000000,333.000000,1.424100,125000.000000 +-121.020000,39.170000,17.000000,2277.000000,459.000000,1149.000000,476.000000,3.230300,149500.000000 +-121.020000,39.010000,17.000000,4786.000000,799.000000,2066.000000,770.000000,3.973400,185400.000000 +-121.020000,38.660000,4.000000,7392.000000,1155.000000,3096.000000,1065.000000,4.524600,198900.000000 +-121.020000,37.710000,25.000000,207.000000,41.000000,87.000000,43.000000,3.602300,131300.000000 +-121.020000,37.700000,16.000000,3476.000000,650.000000,2126.000000,665.000000,3.343800,125400.000000 +-121.020000,37.690000,19.000000,3814.000000,790.000000,2219.000000,804.000000,3.520800,145000.000000 +-121.020000,37.660000,28.000000,1437.000000,400.000000,806.000000,338.000000,1.607800,125000.000000 +-121.020000,37.650000,20.000000,2973.000000,620.000000,1996.000000,570.000000,3.064500,106000.000000 +-121.020000,37.640000,42.000000,1437.000000,307.000000,1035.000000,284.000000,2.103600,88300.000000 +-121.020000,37.630000,35.000000,1591.000000,364.000000,1290.000000,352.000000,1.564000,81800.000000 +-121.020000,37.620000,30.000000,1721.000000,399.000000,1878.000000,382.000000,2.536300,83900.000000 +-121.020000,37.620000,14.000000,5737.000000,1286.000000,4722.000000,1210.000000,1.673100,95800.000000 +-121.020000,37.610000,33.000000,1469.000000,370.000000,1318.000000,349.000000,1.710400,59000.000000 +-121.020000,37.600000,33.000000,1009.000000,238.000000,1027.000000,246.000000,2.599300,68000.000000 +-121.020000,37.580000,36.000000,1285.000000,270.000000,706.000000,273.000000,1.716900,121400.000000 +-121.020000,37.480000,26.000000,467.000000,93.000000,244.000000,83.000000,4.134600,187500.000000 +-121.020000,37.090000,17.000000,1118.000000,270.000000,560.000000,244.000000,2.021600,112500.000000 +-121.020000,36.940000,33.000000,1541.000000,313.000000,880.000000,272.000000,2.507400,117700.000000 +-121.020000,36.240000,12.000000,2198.000000,507.000000,1971.000000,502.000000,2.680100,100000.000000 +-121.030000,40.350000,52.000000,5486.000000,1044.000000,1977.000000,754.000000,2.183300,49500.000000 +-121.030000,39.370000,15.000000,1337.000000,326.000000,1172.000000,306.000000,2.634100,85000.000000 +-121.030000,39.260000,49.000000,3739.000000,759.000000,1422.000000,606.000000,2.428300,143100.000000 +-121.030000,39.210000,28.000000,2843.000000,535.000000,1310.000000,525.000000,3.233700,123100.000000 +-121.030000,39.140000,10.000000,3138.000000,524.000000,1275.000000,511.000000,4.077500,164500.000000 +-121.030000,39.050000,12.000000,1875.000000,307.000000,806.000000,283.000000,3.918500,195200.000000 +-121.030000,37.690000,6.000000,2607.000000,557.000000,1266.000000,475.000000,3.463200,137700.000000 +-121.030000,37.690000,5.000000,4034.000000,771.000000,1967.000000,742.000000,3.806500,146000.000000 +-121.030000,37.680000,27.000000,1956.000000,327.000000,1004.000000,307.000000,3.785700,110500.000000 +-121.030000,37.680000,20.000000,3204.000000,625.000000,2016.000000,605.000000,2.656700,110400.000000 +-121.030000,37.660000,31.000000,887.000000,217.000000,614.000000,199.000000,2.152800,75500.000000 +-121.030000,37.650000,37.000000,375.000000,58.000000,120.000000,37.000000,3.984400,150000.000000 +-121.030000,37.640000,22.000000,2001.000000,387.000000,1520.000000,387.000000,3.148000,102300.000000 +-121.030000,37.630000,5.000000,2881.000000,584.000000,1490.000000,570.000000,3.039800,120000.000000 +-121.030000,37.620000,46.000000,2331.000000,508.000000,1210.000000,484.000000,2.531300,77700.000000 +-121.030000,37.620000,43.000000,1241.000000,240.000000,612.000000,266.000000,2.819400,81300.000000 +-121.030000,37.550000,32.000000,946.000000,198.000000,624.000000,173.000000,1.972800,97900.000000 +-121.030000,37.330000,27.000000,1333.000000,230.000000,730.000000,229.000000,3.060000,106000.000000 +-121.030000,37.320000,42.000000,2905.000000,561.000000,1457.000000,551.000000,2.256600,82100.000000 +-121.040000,39.220000,14.000000,1889.000000,471.000000,853.000000,399.000000,2.250000,112500.000000 +-121.040000,39.190000,17.000000,856.000000,167.000000,518.000000,170.000000,3.585900,144300.000000 +-121.040000,39.080000,8.000000,2870.000000,526.000000,1307.000000,451.000000,3.463000,201700.000000 +-121.040000,39.000000,21.000000,4059.000000,730.000000,1874.000000,693.000000,4.805100,174300.000000 +-121.040000,38.810000,11.000000,3522.000000,623.000000,1456.000000,544.000000,3.930000,163400.000000 +-121.040000,37.780000,32.000000,2916.000000,528.000000,1466.000000,473.000000,2.564300,200000.000000 +-121.040000,37.700000,52.000000,349.000000,59.000000,121.000000,40.000000,3.303600,197500.000000 +-121.040000,37.690000,9.000000,6333.000000,1355.000000,3265.000000,1265.000000,3.021700,160900.000000 +-121.040000,37.690000,5.000000,9601.000000,1639.000000,4449.000000,1575.000000,4.533200,195500.000000 +-121.040000,37.680000,28.000000,1909.000000,398.000000,1140.000000,380.000000,2.378300,81400.000000 +-121.040000,37.680000,18.000000,5129.000000,1171.000000,3622.000000,1128.000000,2.027200,92700.000000 +-121.040000,37.660000,11.000000,1658.000000,301.000000,913.000000,298.000000,4.170500,162800.000000 +-121.040000,37.650000,8.000000,1959.000000,379.000000,995.000000,365.000000,3.356700,129100.000000 +-121.040000,37.600000,27.000000,958.000000,184.000000,580.000000,177.000000,2.187500,82800.000000 +-121.040000,37.500000,33.000000,613.000000,123.000000,343.000000,116.000000,3.187500,129200.000000 +-121.040000,37.300000,6.000000,2657.000000,486.000000,1409.000000,392.000000,3.382400,115500.000000 +-121.050000,39.230000,20.000000,1634.000000,374.000000,1053.000000,390.000000,1.531300,154900.000000 +-121.050000,39.210000,43.000000,1264.000000,273.000000,611.000000,260.000000,2.535000,117100.000000 +-121.050000,39.200000,48.000000,1759.000000,389.000000,716.000000,350.000000,2.312500,108300.000000 +-121.050000,39.130000,10.000000,3063.000000,497.000000,1168.000000,507.000000,4.437500,185100.000000 +-121.050000,39.110000,7.000000,2767.000000,423.000000,1143.000000,382.000000,3.633300,170200.000000 +-121.050000,38.970000,12.000000,3676.000000,550.000000,1572.000000,510.000000,4.821400,201900.000000 +-121.050000,38.140000,19.000000,3326.000000,561.000000,1544.000000,511.000000,2.987500,166300.000000 +-121.050000,37.930000,17.000000,2474.000000,480.000000,1649.000000,453.000000,3.275000,156500.000000 +-121.050000,37.640000,33.000000,1438.000000,237.000000,569.000000,208.000000,3.351600,150000.000000 +-121.050000,37.620000,37.000000,1043.000000,196.000000,555.000000,197.000000,3.412500,125000.000000 +-121.060000,40.230000,23.000000,1127.000000,225.000000,215.000000,85.000000,3.484400,143800.000000 +-121.060000,39.290000,14.000000,1864.000000,331.000000,894.000000,332.000000,3.402800,171800.000000 +-121.060000,39.250000,17.000000,3127.000000,539.000000,1390.000000,520.000000,3.953700,172800.000000 +-121.060000,39.230000,10.000000,2229.000000,537.000000,982.000000,512.000000,2.186000,132700.000000 +-121.060000,39.210000,52.000000,1452.000000,309.000000,637.000000,299.000000,2.208300,103900.000000 +-121.060000,39.040000,15.000000,1999.000000,287.000000,585.000000,246.000000,5.516100,361900.000000 +-121.060000,39.040000,14.000000,1651.000000,279.000000,633.000000,261.000000,4.280200,194800.000000 +-121.060000,39.030000,11.000000,1887.000000,303.000000,775.000000,283.000000,3.841700,187200.000000 +-121.060000,38.980000,14.000000,2267.000000,355.000000,1140.000000,369.000000,4.701900,212800.000000 +-121.060000,38.910000,18.000000,6501.000000,1416.000000,2954.000000,1373.000000,2.537300,143000.000000 +-121.060000,38.880000,17.000000,7635.000000,1284.000000,3096.000000,1227.000000,4.291700,184300.000000 +-121.060000,38.510000,6.000000,6873.000000,959.000000,2354.000000,931.000000,6.886900,263100.000000 +-121.060000,37.860000,24.000000,1713.000000,328.000000,1258.000000,324.000000,2.683000,169400.000000 +-121.060000,37.700000,7.000000,9374.000000,1847.000000,4827.000000,1722.000000,3.462000,151900.000000 +-121.060000,37.670000,31.000000,906.000000,146.000000,383.000000,129.000000,3.416700,196900.000000 +-121.060000,37.660000,6.000000,3655.000000,598.000000,1993.000000,596.000000,4.605300,150100.000000 +-121.060000,37.450000,33.000000,1401.000000,299.000000,915.000000,282.000000,3.446400,162500.000000 +-121.060000,37.420000,52.000000,504.000000,96.000000,295.000000,97.000000,3.500000,73500.000000 +-121.060000,37.180000,30.000000,2603.000000,507.000000,1491.000000,473.000000,3.090900,123400.000000 +-121.060000,36.720000,23.000000,395.000000,70.000000,166.000000,52.000000,2.213200,100000.000000 +-121.070000,40.850000,17.000000,976.000000,202.000000,511.000000,175.000000,3.664100,80800.000000 +-121.070000,39.230000,39.000000,2099.000000,433.000000,929.000000,423.000000,1.988600,113800.000000 +-121.070000,39.220000,52.000000,2432.000000,495.000000,928.000000,435.000000,2.425000,121100.000000 +-121.070000,39.150000,15.000000,6828.000000,1319.000000,3002.000000,1318.000000,2.472600,143400.000000 +-121.070000,39.130000,8.000000,4839.000000,832.000000,1977.000000,762.000000,4.084800,155900.000000 +-121.070000,39.090000,17.000000,1878.000000,345.000000,892.000000,299.000000,2.886400,143100.000000 +-121.070000,39.050000,10.000000,1813.000000,311.000000,827.000000,287.000000,3.608700,182100.000000 +-121.070000,39.040000,9.000000,2374.000000,372.000000,884.000000,333.000000,4.504200,206400.000000 +-121.070000,38.940000,14.000000,1710.000000,294.000000,839.000000,297.000000,4.714300,150700.000000 +-121.070000,38.920000,15.000000,5301.000000,884.000000,2335.000000,831.000000,4.515000,164000.000000 +-121.070000,37.710000,39.000000,223.000000,37.000000,92.000000,37.000000,3.375000,212500.000000 +-121.080000,40.190000,11.000000,919.000000,199.000000,69.000000,43.000000,1.694400,137500.000000 +-121.080000,39.220000,30.000000,2188.000000,455.000000,1033.000000,437.000000,2.141900,105200.000000 +-121.080000,39.180000,19.000000,2323.000000,397.000000,963.000000,379.000000,3.742600,162700.000000 +-121.080000,39.020000,13.000000,1839.000000,275.000000,752.000000,270.000000,4.203100,209600.000000 +-121.080000,38.950000,18.000000,1931.000000,380.000000,1271.000000,377.000000,2.746300,156100.000000 +-121.080000,38.930000,14.000000,4239.000000,824.000000,1729.000000,794.000000,2.427800,167700.000000 +-121.080000,38.900000,27.000000,3436.000000,755.000000,1568.000000,709.000000,2.427300,138400.000000 +-121.080000,38.850000,10.000000,2509.000000,422.000000,1037.000000,389.000000,6.000000,220100.000000 +-121.080000,38.670000,10.000000,2499.000000,331.000000,1040.000000,333.000000,6.844000,239600.000000 +-121.090000,40.300000,15.000000,1717.000000,336.000000,501.000000,206.000000,3.647700,113400.000000 +-121.090000,39.480000,25.000000,1665.000000,374.000000,845.000000,330.000000,1.560300,78100.000000 +-121.090000,39.230000,35.000000,2637.000000,511.000000,1181.000000,480.000000,2.781300,109200.000000 +-121.090000,38.680000,15.000000,5218.000000,711.000000,1949.000000,659.000000,4.708300,213300.000000 +-121.090000,38.190000,23.000000,762.000000,140.000000,358.000000,141.000000,2.454500,105000.000000 +-121.090000,37.670000,30.000000,1653.000000,285.000000,800.000000,291.000000,3.348200,220000.000000 +-121.090000,37.560000,32.000000,1717.000000,325.000000,1356.000000,307.000000,2.670500,91900.000000 +-121.090000,37.330000,40.000000,524.000000,112.000000,329.000000,96.000000,1.718800,112500.000000 +-121.100000,39.150000,10.000000,680.000000,143.000000,354.000000,140.000000,4.033300,161500.000000 +-121.100000,39.080000,13.000000,1110.000000,216.000000,602.000000,209.000000,2.588700,144400.000000 +-121.100000,39.000000,16.000000,1106.000000,195.000000,505.000000,187.000000,5.012600,192300.000000 +-121.100000,38.920000,21.000000,4064.000000,871.000000,1847.000000,859.000000,3.032100,135500.000000 +-121.100000,38.330000,14.000000,1357.000000,247.000000,695.000000,224.000000,4.197400,157800.000000 +-121.100000,37.800000,35.000000,1853.000000,331.000000,958.000000,340.000000,3.357800,149000.000000 +-121.100000,35.600000,20.000000,3389.000000,704.000000,1309.000000,520.000000,3.211200,204500.000000 +-121.110000,39.090000,16.000000,1000.000000,197.000000,508.000000,190.000000,2.306200,138800.000000 +-121.110000,38.950000,14.000000,3888.000000,890.000000,1830.000000,844.000000,1.823800,158600.000000 +-121.110000,38.910000,24.000000,2558.000000,423.000000,1149.000000,403.000000,4.067900,190500.000000 +-121.110000,38.040000,32.000000,1083.000000,188.000000,471.000000,178.000000,2.924100,187500.000000 +-121.110000,37.760000,22.000000,2606.000000,411.000000,1252.000000,397.000000,4.183300,192100.000000 +-121.110000,37.740000,11.000000,3886.000000,599.000000,1605.000000,529.000000,4.421300,182700.000000 +-121.110000,37.470000,12.000000,2263.000000,410.000000,913.000000,330.000000,3.579500,145600.000000 +-121.110000,37.430000,42.000000,412.000000,75.000000,227.000000,75.000000,2.500000,74200.000000 +-121.110000,35.520000,9.000000,6044.000000,1222.000000,2239.000000,972.000000,3.240000,264600.000000 +-121.120000,39.200000,9.000000,1431.000000,254.000000,681.000000,221.000000,3.045000,170400.000000 +-121.120000,39.030000,17.000000,838.000000,161.000000,388.000000,142.000000,3.656300,163500.000000 +-121.120000,38.000000,36.000000,683.000000,159.000000,505.000000,141.000000,3.426500,158900.000000 +-121.120000,37.730000,35.000000,1107.000000,227.000000,573.000000,210.000000,2.392400,102200.000000 +-121.120000,37.480000,5.000000,4109.000000,820.000000,3062.000000,713.000000,3.239600,125200.000000 +-121.120000,36.210000,16.000000,1720.000000,473.000000,1427.000000,291.000000,2.110700,76200.000000 +-121.120000,35.580000,16.000000,4109.000000,798.000000,1298.000000,626.000000,3.479900,320800.000000 +-121.130000,38.870000,48.000000,1127.000000,188.000000,530.000000,186.000000,3.091700,128100.000000 +-121.130000,38.660000,2.000000,12360.000000,1747.000000,4438.000000,1470.000000,6.250300,222500.000000 +-121.130000,38.470000,16.000000,2574.000000,441.000000,1041.000000,428.000000,3.664500,203400.000000 +-121.130000,38.370000,10.000000,1034.000000,153.000000,478.000000,155.000000,7.032600,241100.000000 +-121.130000,37.740000,31.000000,677.000000,144.000000,523.000000,159.000000,2.459800,97100.000000 +-121.130000,37.740000,21.000000,2376.000000,475.000000,1175.000000,441.000000,3.601600,134600.000000 +-121.130000,37.730000,40.000000,1126.000000,220.000000,667.000000,235.000000,3.315800,125900.000000 +-121.130000,37.470000,37.000000,1995.000000,448.000000,1559.000000,443.000000,2.183300,92700.000000 +-121.130000,36.210000,27.000000,1476.000000,352.000000,1156.000000,358.000000,3.192900,137900.000000 +-121.130000,36.200000,16.000000,1868.000000,443.000000,1323.000000,436.000000,2.955900,163200.000000 +-121.140000,40.290000,17.000000,1944.000000,394.000000,384.000000,172.000000,1.687500,111500.000000 +-121.140000,39.860000,16.000000,2534.000000,557.000000,638.000000,244.000000,2.210100,88800.000000 +-121.140000,39.100000,13.000000,1085.000000,227.000000,629.000000,214.000000,5.038900,171500.000000 +-121.140000,38.920000,16.000000,2069.000000,312.000000,889.000000,299.000000,4.677100,212000.000000 +-121.140000,38.840000,22.000000,2750.000000,433.000000,1161.000000,428.000000,4.214300,236500.000000 +-121.140000,38.820000,22.000000,1816.000000,278.000000,832.000000,278.000000,5.070000,233000.000000 +-121.140000,38.160000,14.000000,2591.000000,497.000000,1371.000000,479.000000,3.577400,113900.000000 +-121.140000,37.700000,29.000000,1343.000000,223.000000,751.000000,225.000000,3.239100,187500.000000 +-121.140000,37.470000,38.000000,2427.000000,450.000000,1272.000000,474.000000,2.883300,115200.000000 +-121.140000,37.460000,4.000000,2919.000000,503.000000,1592.000000,491.000000,5.245200,161900.000000 +-121.140000,35.550000,13.000000,5383.000000,1070.000000,1880.000000,796.000000,3.801900,271200.000000 +-121.150000,40.250000,14.000000,5156.000000,880.000000,616.000000,281.000000,3.346200,145200.000000 +-121.150000,39.230000,13.000000,3883.000000,763.000000,1816.000000,682.000000,2.810200,144400.000000 +-121.150000,39.000000,15.000000,4145.000000,691.000000,1872.000000,680.000000,4.355300,220600.000000 +-121.150000,38.890000,20.000000,2024.000000,313.000000,879.000000,309.000000,5.290300,239400.000000 +-121.150000,38.800000,20.000000,2104.000000,370.000000,745.000000,314.000000,4.168500,217500.000000 +-121.150000,38.210000,18.000000,4176.000000,700.000000,2164.000000,699.000000,4.036500,174200.000000 +-121.160000,41.780000,42.000000,2918.000000,576.000000,1182.000000,440.000000,2.143400,44000.000000 +-121.160000,39.180000,14.000000,1006.000000,187.000000,462.000000,185.000000,3.104200,152000.000000 +-121.160000,38.750000,27.000000,771.000000,108.000000,315.000000,111.000000,8.488200,276600.000000 +-121.160000,38.740000,17.000000,3353.000000,463.000000,1417.000000,447.000000,5.172100,237100.000000 +-121.160000,38.160000,31.000000,1953.000000,366.000000,999.000000,316.000000,2.490600,122500.000000 +-121.160000,38.030000,28.000000,253.000000,50.000000,201.000000,51.000000,1.473200,156300.000000 +-121.160000,37.730000,7.000000,4956.000000,941.000000,3006.000000,915.000000,3.442600,139000.000000 +-121.170000,38.760000,14.000000,2028.000000,255.000000,781.000000,251.000000,6.532200,394000.000000 +-121.170000,38.710000,15.000000,3084.000000,557.000000,1040.000000,562.000000,2.518300,293300.000000 +-121.170000,38.690000,5.000000,7138.000000,1227.000000,2623.000000,1139.000000,5.690200,243200.000000 +-121.170000,38.680000,37.000000,1252.000000,267.000000,686.000000,256.000000,3.000000,121900.000000 +-121.170000,37.820000,35.000000,2506.000000,406.000000,1316.000000,398.000000,3.847200,197600.000000 +-121.180000,41.310000,22.000000,2124.000000,432.000000,829.000000,313.000000,2.451900,57500.000000 +-121.180000,39.260000,14.000000,811.000000,161.000000,352.000000,121.000000,3.593800,140300.000000 +-121.180000,39.250000,9.000000,3415.000000,562.000000,1208.000000,479.000000,4.364600,185900.000000 +-121.180000,39.230000,8.000000,2112.000000,360.000000,782.000000,344.000000,3.712500,175000.000000 +-121.180000,39.190000,16.000000,1528.000000,351.000000,729.000000,319.000000,2.468800,138800.000000 +-121.180000,38.800000,18.000000,2541.000000,414.000000,1276.000000,405.000000,5.185700,220100.000000 +-121.180000,38.750000,16.000000,2807.000000,459.000000,1201.000000,429.000000,4.794100,247600.000000 +-121.180000,38.730000,16.000000,1584.000000,264.000000,613.000000,226.000000,6.030200,273100.000000 +-121.180000,38.690000,7.000000,7104.000000,970.000000,2772.000000,920.000000,6.352800,274500.000000 +-121.180000,38.670000,42.000000,2101.000000,480.000000,945.000000,426.000000,2.333300,116000.000000 +-121.180000,38.070000,21.000000,2333.000000,377.000000,1073.000000,332.000000,4.812500,161100.000000 +-121.180000,37.990000,31.000000,2450.000000,559.000000,1459.000000,478.000000,2.467400,130900.000000 +-121.180000,37.960000,35.000000,411.000000,74.000000,193.000000,59.000000,2.562500,146900.000000 +-121.180000,37.790000,16.000000,1326.000000,286.000000,509.000000,297.000000,1.946400,112500.000000 +-121.180000,37.640000,43.000000,1244.000000,209.000000,611.000000,197.000000,2.875000,187500.000000 +-121.190000,40.230000,10.000000,1572.000000,232.000000,247.000000,104.000000,5.845300,193800.000000 +-121.190000,39.550000,17.000000,1483.000000,284.000000,481.000000,211.000000,1.489600,83300.000000 +-121.190000,39.050000,14.000000,1131.000000,193.000000,520.000000,178.000000,3.900000,180400.000000 +-121.190000,38.950000,16.000000,2544.000000,431.000000,1199.000000,412.000000,4.512900,196300.000000 +-121.190000,38.850000,8.000000,4114.000000,710.000000,2268.000000,716.000000,4.408500,139400.000000 +-121.190000,38.710000,11.000000,4415.000000,723.000000,1520.000000,627.000000,3.232100,390800.000000 +-121.190000,38.670000,16.000000,1754.000000,284.000000,773.000000,277.000000,4.817000,147000.000000 +-121.190000,38.660000,26.000000,1937.000000,286.000000,769.000000,274.000000,6.118500,179200.000000 +-121.190000,38.130000,27.000000,2400.000000,435.000000,1085.000000,444.000000,3.768700,165200.000000 +-121.190000,38.040000,35.000000,703.000000,117.000000,290.000000,107.000000,3.225000,177100.000000 +-121.190000,37.930000,27.000000,1621.000000,363.000000,909.000000,345.000000,2.151300,99700.000000 +-121.190000,37.810000,8.000000,4019.000000,857.000000,1959.000000,782.000000,2.732100,175000.000000 +-121.200000,39.250000,5.000000,906.000000,144.000000,376.000000,141.000000,4.352300,188200.000000 +-121.200000,39.200000,16.000000,1039.000000,182.000000,554.000000,184.000000,2.968800,128300.000000 +-121.200000,38.730000,11.000000,4897.000000,636.000000,1931.000000,616.000000,7.749900,334800.000000 +-121.200000,38.700000,28.000000,2970.000000,471.000000,1379.000000,463.000000,4.321400,131700.000000 +-121.200000,38.680000,9.000000,2200.000000,422.000000,938.000000,369.000000,3.489600,143800.000000 +-121.200000,38.660000,17.000000,1605.000000,217.000000,732.000000,241.000000,5.470000,204800.000000 +-121.200000,38.360000,14.000000,2634.000000,463.000000,1402.000000,432.000000,3.889700,175700.000000 +-121.200000,38.280000,20.000000,1732.000000,307.000000,999.000000,305.000000,3.980800,160200.000000 +-121.200000,38.020000,44.000000,608.000000,108.000000,287.000000,83.000000,3.388200,125000.000000 +-121.200000,37.970000,39.000000,440.000000,83.000000,270.000000,97.000000,6.058200,157700.000000 +-121.200000,37.830000,18.000000,3415.000000,580.000000,1912.000000,562.000000,4.442300,161400.000000 +-121.200000,37.810000,26.000000,395.000000,74.000000,193.000000,72.000000,7.371800,212500.000000 +-121.200000,37.800000,37.000000,311.000000,61.000000,171.000000,54.000000,4.097200,101800.000000 +-121.200000,37.800000,28.000000,3264.000000,576.000000,1512.000000,567.000000,3.754600,135300.000000 +-121.200000,37.800000,24.000000,1698.000000,344.000000,927.000000,313.000000,3.562500,130800.000000 +-121.200000,37.780000,4.000000,58.000000,29.000000,79.000000,29.000000,3.375000,106300.000000 +-121.200000,37.600000,30.000000,2110.000000,406.000000,1301.000000,345.000000,2.317300,86500.000000 +-121.200000,36.140000,12.000000,3738.000000,710.000000,2337.000000,664.000000,3.964700,135000.000000 +-121.210000,39.490000,18.000000,697.000000,150.000000,356.000000,114.000000,2.556800,77100.000000 +-121.210000,38.830000,21.000000,3691.000000,640.000000,1758.000000,603.000000,3.560700,151900.000000 +-121.210000,38.760000,16.000000,1608.000000,296.000000,792.000000,286.000000,3.158300,239200.000000 +-121.210000,38.750000,11.000000,4552.000000,639.000000,2006.000000,623.000000,4.396200,264400.000000 +-121.210000,38.670000,19.000000,2987.000000,626.000000,1610.000000,605.000000,3.053300,112100.000000 +-121.210000,38.670000,11.000000,5500.000000,956.000000,2827.000000,946.000000,4.107100,145800.000000 +-121.210000,38.660000,15.000000,6940.000000,1019.000000,2829.000000,990.000000,5.488900,232300.000000 +-121.210000,38.650000,14.000000,3443.000000,510.000000,1413.000000,505.000000,5.652900,196000.000000 +-121.210000,37.810000,8.000000,1883.000000,298.000000,999.000000,301.000000,5.193000,172100.000000 +-121.210000,37.810000,12.000000,3667.000000,640.000000,2173.000000,652.000000,5.036900,163900.000000 +-121.210000,37.800000,45.000000,370.000000,84.000000,167.000000,70.000000,1.485300,101800.000000 +-121.210000,37.800000,44.000000,300.000000,72.000000,160.000000,73.000000,2.178600,120800.000000 +-121.210000,37.800000,33.000000,1862.000000,429.000000,971.000000,389.000000,2.605300,99200.000000 +-121.210000,37.800000,31.000000,699.000000,186.000000,460.000000,170.000000,2.744300,94200.000000 +-121.210000,37.790000,33.000000,811.000000,185.000000,446.000000,198.000000,1.672400,96900.000000 +-121.210000,37.500000,34.000000,294.000000,49.000000,147.000000,47.000000,3.000000,162500.000000 +-121.220000,39.430000,17.000000,2254.000000,485.000000,1007.000000,433.000000,1.700000,92300.000000 +-121.220000,39.110000,14.000000,1405.000000,269.000000,660.000000,228.000000,3.080400,156800.000000 +-121.220000,38.920000,19.000000,2531.000000,461.000000,1206.000000,429.000000,4.495800,192600.000000 +-121.220000,38.800000,11.000000,2521.000000,521.000000,1390.000000,477.000000,3.526500,124800.000000 +-121.220000,38.780000,8.000000,3418.000000,514.000000,1312.000000,409.000000,6.391400,218000.000000 +-121.220000,38.710000,23.000000,1843.000000,273.000000,818.000000,276.000000,4.469500,214700.000000 +-121.220000,38.680000,10.000000,6262.000000,1278.000000,2954.000000,1169.000000,3.450600,139000.000000 +-121.220000,38.670000,20.000000,1412.000000,226.000000,700.000000,227.000000,4.050000,130700.000000 +-121.220000,38.580000,25.000000,394.000000,94.000000,155.000000,83.000000,2.233000,55000.000000 +-121.220000,38.430000,20.000000,2054.000000,339.000000,934.000000,336.000000,4.536800,219300.000000 +-121.220000,38.400000,14.000000,2655.000000,441.000000,1277.000000,422.000000,4.698900,213800.000000 +-121.220000,38.160000,24.000000,4411.000000,776.000000,2038.000000,732.000000,3.475000,151200.000000 +-121.220000,38.040000,42.000000,343.000000,50.000000,116.000000,49.000000,5.537600,212500.000000 +-121.220000,38.000000,35.000000,1841.000000,300.000000,783.000000,285.000000,2.816700,162100.000000 +-121.220000,37.960000,31.000000,1484.000000,314.000000,1163.000000,336.000000,2.625000,72100.000000 +-121.220000,37.960000,30.000000,1737.000000,381.000000,1177.000000,347.000000,1.987500,56400.000000 +-121.220000,37.950000,30.000000,1055.000000,211.000000,629.000000,170.000000,2.867600,76900.000000 +-121.220000,37.930000,21.000000,336.000000,68.000000,206.000000,73.000000,4.750000,121400.000000 +-121.220000,37.820000,13.000000,4452.000000,949.000000,2740.000000,937.000000,3.196400,141500.000000 +-121.220000,37.810000,20.000000,1811.000000,352.000000,1191.000000,327.000000,4.012500,121500.000000 +-121.220000,37.800000,37.000000,1038.000000,222.000000,521.000000,211.000000,2.125000,91900.000000 +-121.220000,37.800000,28.000000,2608.000000,576.000000,1719.000000,554.000000,2.118600,94400.000000 +-121.220000,37.800000,13.000000,335.000000,89.000000,247.000000,77.000000,1.611100,74100.000000 +-121.220000,37.790000,5.000000,3107.000000,477.000000,1549.000000,443.000000,4.476600,169400.000000 +-121.220000,37.790000,38.000000,2152.000000,451.000000,1320.000000,457.000000,2.502500,101900.000000 +-121.220000,37.790000,36.000000,1052.000000,221.000000,712.000000,212.000000,1.722800,105000.000000 +-121.220000,37.720000,34.000000,2123.000000,387.000000,1310.000000,368.000000,2.636800,165600.000000 +-121.230000,40.290000,21.000000,3229.000000,667.000000,1501.000000,582.000000,2.152400,77100.000000 +-121.230000,40.010000,38.000000,725.000000,190.000000,219.000000,115.000000,1.625000,75000.000000 +-121.230000,39.270000,11.000000,1265.000000,224.000000,573.000000,205.000000,3.360300,162500.000000 +-121.230000,38.710000,18.000000,4947.000000,714.000000,2227.000000,675.000000,4.854200,170500.000000 +-121.230000,38.690000,19.000000,5268.000000,849.000000,2357.000000,849.000000,3.922600,148700.000000 +-121.230000,38.670000,27.000000,5266.000000,971.000000,2432.000000,948.000000,3.895400,133000.000000 +-121.230000,38.660000,19.000000,3243.000000,546.000000,1334.000000,515.000000,4.808800,169500.000000 +-121.230000,38.120000,22.000000,393.000000,58.000000,134.000000,57.000000,3.950000,178100.000000 +-121.230000,38.090000,23.000000,633.000000,91.000000,236.000000,83.000000,6.456200,230000.000000 +-121.230000,38.040000,32.000000,1829.000000,262.000000,677.000000,243.000000,6.180500,247900.000000 +-121.230000,37.990000,38.000000,523.000000,80.000000,226.000000,72.000000,5.569300,153100.000000 +-121.230000,37.980000,27.000000,849.000000,137.000000,373.000000,131.000000,5.036200,181300.000000 +-121.230000,37.950000,36.000000,811.000000,168.000000,514.000000,152.000000,2.625000,89200.000000 +-121.230000,37.950000,32.000000,2081.000000,472.000000,1342.000000,411.000000,2.795800,59000.000000 +-121.230000,37.940000,20.000000,268.000000,78.000000,77.000000,49.000000,1.125000,150000.000000 +-121.230000,37.920000,28.000000,590.000000,129.000000,315.000000,99.000000,1.895800,85700.000000 +-121.230000,37.870000,49.000000,98.000000,24.000000,59.000000,26.000000,3.650000,162500.000000 +-121.230000,37.820000,8.000000,1289.000000,235.000000,867.000000,239.000000,4.682100,138500.000000 +-121.230000,37.820000,14.000000,1847.000000,325.000000,1030.000000,309.000000,4.927100,155300.000000 +-121.230000,37.810000,16.000000,2085.000000,342.000000,1236.000000,345.000000,5.559100,149300.000000 +-121.230000,37.810000,15.000000,2906.000000,537.000000,1886.000000,557.000000,4.243100,137100.000000 +-121.230000,37.800000,11.000000,2451.000000,665.000000,1155.000000,533.000000,2.225400,130800.000000 +-121.230000,37.790000,23.000000,1985.000000,424.000000,1198.000000,389.000000,2.773400,116800.000000 +-121.230000,37.790000,21.000000,1922.000000,373.000000,1130.000000,372.000000,4.081500,117900.000000 +-121.230000,37.780000,20.000000,273.000000,49.000000,149.000000,49.000000,4.822900,158300.000000 +-121.230000,36.330000,23.000000,2095.000000,536.000000,1858.000000,457.000000,3.054300,92400.000000 +-121.240000,40.310000,36.000000,1597.000000,301.000000,632.000000,262.000000,3.596200,93600.000000 +-121.240000,39.650000,35.000000,632.000000,148.000000,221.000000,102.000000,2.368400,62500.000000 +-121.240000,39.370000,16.000000,2785.000000,616.000000,1387.000000,530.000000,2.388600,89400.000000 +-121.240000,39.220000,14.000000,983.000000,163.000000,399.000000,161.000000,2.291700,145100.000000 +-121.240000,38.820000,5.000000,12259.000000,1643.000000,4819.000000,1582.000000,5.449800,217300.000000 +-121.240000,38.790000,23.000000,1419.000000,261.000000,706.000000,269.000000,3.187500,110200.000000 +-121.240000,38.790000,15.000000,2615.000000,485.000000,1063.000000,428.000000,3.790400,173200.000000 +-121.240000,38.780000,18.000000,549.000000,143.000000,249.000000,136.000000,0.869100,136500.000000 +-121.240000,38.780000,11.000000,1851.000000,352.000000,1049.000000,369.000000,3.528800,141100.000000 +-121.240000,38.750000,5.000000,9137.000000,1368.000000,3667.000000,1294.000000,5.489600,229600.000000 +-121.240000,38.720000,12.000000,3605.000000,576.000000,1556.000000,549.000000,4.900000,203700.000000 +-121.240000,38.700000,13.000000,3243.000000,488.000000,1585.000000,480.000000,5.713300,166800.000000 +-121.240000,38.680000,20.000000,1402.000000,236.000000,676.000000,236.000000,3.742600,135500.000000 +-121.240000,38.670000,28.000000,3558.000000,589.000000,1742.000000,581.000000,4.018200,131700.000000 +-121.240000,38.660000,14.000000,3335.000000,440.000000,1329.000000,429.000000,6.208200,250300.000000 +-121.240000,38.640000,13.000000,4491.000000,689.000000,1657.000000,667.000000,5.259000,249400.000000 +-121.240000,38.220000,28.000000,2593.000000,487.000000,1365.000000,457.000000,3.392900,113000.000000 +-121.240000,38.010000,22.000000,1526.000000,299.000000,790.000000,300.000000,2.434200,125000.000000 +-121.240000,38.000000,25.000000,1471.000000,300.000000,721.000000,304.000000,2.468800,126800.000000 +-121.240000,37.980000,33.000000,450.000000,123.000000,236.000000,103.000000,1.196400,80400.000000 +-121.240000,37.970000,47.000000,886.000000,196.000000,517.000000,188.000000,2.199100,67200.000000 +-121.240000,37.960000,37.000000,1175.000000,260.000000,951.000000,267.000000,2.875000,57700.000000 +-121.240000,37.960000,29.000000,874.000000,217.000000,788.000000,222.000000,1.918700,57700.000000 +-121.240000,37.950000,36.000000,361.000000,63.000000,169.000000,62.000000,3.773400,63800.000000 +-121.240000,37.940000,5.000000,2232.000000,488.000000,1857.000000,435.000000,2.870500,113600.000000 +-121.240000,37.930000,21.000000,1185.000000,237.000000,960.000000,245.000000,2.089300,65000.000000 +-121.240000,37.900000,16.000000,50.000000,10.000000,20.000000,6.000000,2.625000,137500.000000 +-121.240000,37.820000,9.000000,6169.000000,959.000000,3378.000000,945.000000,5.104700,157900.000000 +-121.240000,37.810000,6.000000,3883.000000,800.000000,2319.000000,787.000000,3.559500,161000.000000 +-121.240000,37.790000,7.000000,5151.000000,867.000000,2553.000000,805.000000,4.075000,195000.000000 +-121.240000,36.340000,33.000000,1691.000000,308.000000,792.000000,262.000000,2.664800,164600.000000 +-121.240000,36.330000,13.000000,1642.000000,418.000000,1534.000000,388.000000,3.122200,125500.000000 +-121.250000,40.270000,25.000000,958.000000,245.000000,28.000000,16.000000,2.625000,67500.000000 +-121.250000,39.170000,9.000000,999.000000,189.000000,411.000000,176.000000,2.125000,151800.000000 +-121.250000,38.800000,14.000000,5094.000000,729.000000,1974.000000,705.000000,5.520500,188700.000000 +-121.250000,38.720000,15.000000,6838.000000,941.000000,3166.000000,926.000000,5.217700,162700.000000 +-121.250000,38.720000,10.000000,7277.000000,1168.000000,3507.000000,1131.000000,4.485000,179400.000000 +-121.250000,38.710000,14.000000,3713.000000,637.000000,1845.000000,635.000000,4.300900,143400.000000 +-121.250000,38.690000,24.000000,1014.000000,185.000000,606.000000,194.000000,4.160700,112800.000000 +-121.250000,38.690000,17.000000,3050.000000,481.000000,1490.000000,489.000000,4.556200,134500.000000 +-121.250000,38.680000,15.000000,1497.000000,243.000000,730.000000,242.000000,4.968800,135600.000000 +-121.250000,38.680000,13.000000,503.000000,70.000000,267.000000,77.000000,6.194300,276100.000000 +-121.250000,38.670000,14.000000,6155.000000,1034.000000,2407.000000,941.000000,4.226200,244300.000000 +-121.250000,38.660000,26.000000,3670.000000,556.000000,1616.000000,550.000000,5.020000,169600.000000 +-121.250000,38.640000,21.000000,2764.000000,363.000000,902.000000,360.000000,5.686400,258700.000000 +-121.250000,38.140000,16.000000,1174.000000,242.000000,464.000000,261.000000,2.300000,133300.000000 +-121.250000,38.130000,25.000000,1305.000000,270.000000,789.000000,235.000000,3.299300,91100.000000 +-121.250000,38.070000,28.000000,2103.000000,422.000000,1167.000000,391.000000,3.059200,152800.000000 +-121.250000,38.050000,25.000000,1967.000000,362.000000,1035.000000,361.000000,3.573500,106800.000000 +-121.250000,38.040000,26.000000,3080.000000,473.000000,1257.000000,465.000000,4.986100,201800.000000 +-121.250000,38.030000,29.000000,2465.000000,327.000000,859.000000,315.000000,6.660500,220700.000000 +-121.250000,38.010000,16.000000,2397.000000,501.000000,1053.000000,557.000000,2.699400,112500.000000 +-121.250000,38.000000,21.000000,446.000000,73.000000,182.000000,57.000000,2.895800,135000.000000 +-121.250000,37.980000,39.000000,1765.000000,414.000000,1056.000000,414.000000,1.532900,48300.000000 +-121.250000,37.970000,41.000000,855.000000,189.000000,716.000000,206.000000,2.037500,75000.000000 +-121.250000,37.970000,34.000000,1288.000000,344.000000,846.000000,293.000000,1.789500,63100.000000 +-121.250000,37.960000,26.000000,2205.000000,478.000000,1730.000000,472.000000,2.486600,68300.000000 +-121.250000,37.950000,46.000000,2001.000000,428.000000,1384.000000,401.000000,1.940200,62200.000000 +-121.250000,37.950000,40.000000,1703.000000,362.000000,1208.000000,373.000000,2.081700,55300.000000 +-121.250000,37.940000,30.000000,1509.000000,308.000000,967.000000,278.000000,1.779800,65900.000000 +-121.250000,37.940000,28.000000,964.000000,232.000000,782.000000,218.000000,2.326900,55900.000000 +-121.250000,37.930000,31.000000,1673.000000,382.000000,1734.000000,400.000000,2.083300,48300.000000 +-121.250000,37.920000,19.000000,2109.000000,427.000000,1742.000000,426.000000,2.409700,66000.000000 +-121.250000,37.760000,22.000000,2430.000000,417.000000,1292.000000,391.000000,3.400900,182400.000000 +-121.260000,38.740000,22.000000,7173.000000,1314.000000,3526.000000,1316.000000,3.394100,135900.000000 +-121.260000,38.730000,14.000000,3323.000000,499.000000,1527.000000,540.000000,5.345100,172100.000000 +-121.260000,38.700000,9.000000,7812.000000,1348.000000,3275.000000,1178.000000,4.382600,146600.000000 +-121.260000,38.690000,17.000000,3917.000000,638.000000,1809.000000,564.000000,5.258600,137000.000000 +-121.260000,38.680000,4.000000,3080.000000,827.000000,1195.000000,683.000000,2.747700,133000.000000 +-121.260000,38.680000,13.000000,4256.000000,619.000000,1948.000000,622.000000,5.205100,167400.000000 +-121.260000,38.670000,18.000000,1830.000000,313.000000,905.000000,361.000000,4.227300,141800.000000 +-121.260000,38.660000,8.000000,1145.000000,241.000000,447.000000,216.000000,4.078100,124300.000000 +-121.260000,38.660000,19.000000,3170.000000,444.000000,1344.000000,452.000000,6.118300,221600.000000 +-121.260000,38.650000,17.000000,2655.000000,421.000000,991.000000,384.000000,4.648400,270600.000000 +-121.260000,38.640000,40.000000,1098.000000,175.000000,415.000000,160.000000,4.837500,217400.000000 +-121.260000,38.140000,10.000000,3371.000000,665.000000,1823.000000,654.000000,3.533300,116800.000000 +-121.260000,38.130000,38.000000,1419.000000,411.000000,1226.000000,397.000000,2.218800,68800.000000 +-121.260000,38.130000,25.000000,2549.000000,675.000000,2053.000000,648.000000,2.087500,83100.000000 +-121.260000,38.120000,27.000000,1818.000000,459.000000,1182.000000,428.000000,1.857500,73800.000000 +-121.260000,38.110000,8.000000,2770.000000,642.000000,1611.000000,633.000000,3.128400,115100.000000 +-121.260000,38.110000,4.000000,2058.000000,366.000000,933.000000,316.000000,4.244800,150900.000000 +-121.260000,38.090000,35.000000,930.000000,186.000000,525.000000,201.000000,2.062500,155000.000000 +-121.260000,37.990000,27.000000,429.000000,102.000000,179.000000,90.000000,2.333300,87500.000000 +-121.260000,37.980000,41.000000,1633.000000,433.000000,885.000000,413.000000,0.978200,54200.000000 +-121.260000,37.980000,32.000000,3274.000000,820.000000,2050.000000,738.000000,2.126500,55700.000000 +-121.260000,37.970000,41.000000,2398.000000,448.000000,1143.000000,444.000000,3.035200,69800.000000 +-121.260000,37.970000,31.000000,1189.000000,295.000000,891.000000,292.000000,2.553600,50500.000000 +-121.260000,37.960000,43.000000,940.000000,208.000000,690.000000,181.000000,2.305600,62300.000000 +-121.260000,37.960000,43.000000,527.000000,133.000000,367.000000,152.000000,2.500000,63600.000000 +-121.260000,37.960000,40.000000,535.000000,105.000000,335.000000,102.000000,2.523400,62800.000000 +-121.260000,37.960000,35.000000,1511.000000,316.000000,892.000000,304.000000,1.789800,63500.000000 +-121.260000,37.950000,44.000000,819.000000,184.000000,677.000000,183.000000,1.725000,59300.000000 +-121.260000,37.950000,39.000000,1841.000000,428.000000,1368.000000,390.000000,2.158300,62000.000000 +-121.260000,37.940000,43.000000,1610.000000,412.000000,1409.000000,365.000000,1.757400,51700.000000 +-121.260000,37.930000,33.000000,2109.000000,531.000000,2248.000000,588.000000,1.458300,53000.000000 +-121.260000,37.880000,42.000000,465.000000,93.000000,256.000000,93.000000,3.171900,158300.000000 +-121.260000,36.320000,30.000000,146.000000,41.000000,164.000000,40.000000,2.300000,206300.000000 +-121.270000,38.870000,16.000000,2094.000000,358.000000,1092.000000,357.000000,4.476900,191400.000000 +-121.270000,38.750000,43.000000,1292.000000,307.000000,647.000000,249.000000,2.718800,85300.000000 +-121.270000,38.750000,21.000000,4812.000000,1117.000000,1985.000000,1045.000000,2.508300,128500.000000 +-121.270000,38.740000,19.000000,3869.000000,887.000000,2086.000000,685.000000,2.606500,154900.000000 +-121.270000,38.720000,6.000000,4664.000000,644.000000,2105.000000,663.000000,6.080400,198700.000000 +-121.270000,38.710000,16.000000,4082.000000,666.000000,1912.000000,652.000000,4.460900,142900.000000 +-121.270000,38.700000,16.000000,3747.000000,586.000000,1817.000000,590.000000,4.648800,145300.000000 +-121.270000,38.690000,16.000000,3389.000000,597.000000,1674.000000,568.000000,4.448900,145600.000000 +-121.270000,38.670000,16.000000,3185.000000,886.000000,1550.000000,802.000000,2.519900,149000.000000 +-121.270000,38.670000,15.000000,2116.000000,524.000000,866.000000,519.000000,2.738800,111600.000000 +-121.270000,38.670000,15.000000,1701.000000,346.000000,723.000000,352.000000,3.890600,128700.000000 +-121.270000,38.660000,19.000000,1891.000000,266.000000,678.000000,255.000000,6.187200,188700.000000 +-121.270000,38.650000,33.000000,1984.000000,289.000000,842.000000,276.000000,5.294900,173300.000000 +-121.270000,38.440000,19.000000,2780.000000,414.000000,1320.000000,404.000000,5.883100,209900.000000 +-121.270000,38.310000,17.000000,1144.000000,202.000000,626.000000,178.000000,4.410700,151600.000000 +-121.270000,38.140000,40.000000,929.000000,257.000000,576.000000,229.000000,2.125000,137500.000000 +-121.270000,38.130000,52.000000,1081.000000,257.000000,437.000000,225.000000,2.197900,114100.000000 +-121.270000,38.130000,40.000000,2402.000000,509.000000,1197.000000,486.000000,2.177100,98200.000000 +-121.270000,38.130000,39.000000,2614.000000,634.000000,1862.000000,654.000000,1.923800,70700.000000 +-121.270000,38.130000,35.000000,2607.000000,685.000000,2016.000000,618.000000,1.750000,82900.000000 +-121.270000,38.120000,44.000000,2356.000000,482.000000,1043.000000,443.000000,2.494900,108000.000000 +-121.270000,38.120000,37.000000,2232.000000,504.000000,1455.000000,471.000000,2.558700,87800.000000 +-121.270000,38.110000,15.000000,2039.000000,384.000000,1178.000000,375.000000,3.867200,120100.000000 +-121.270000,38.110000,11.000000,3163.000000,794.000000,2106.000000,762.000000,2.448200,103000.000000 +-121.270000,38.050000,26.000000,378.000000,75.000000,164.000000,65.000000,3.410700,82800.000000 +-121.270000,38.020000,32.000000,342.000000,58.000000,138.000000,52.000000,2.982100,155000.000000 +-121.270000,37.980000,43.000000,1005.000000,200.000000,492.000000,172.000000,2.681200,72800.000000 +-121.270000,37.970000,39.000000,1023.000000,243.000000,550.000000,224.000000,1.114100,54400.000000 +-121.270000,37.960000,52.000000,583.000000,114.000000,310.000000,93.000000,2.562500,54200.000000 +-121.270000,37.960000,43.000000,948.000000,221.000000,749.000000,208.000000,1.962000,52700.000000 +-121.270000,37.960000,43.000000,1624.000000,448.000000,1805.000000,440.000000,1.425000,61300.000000 +-121.270000,37.960000,41.000000,461.000000,101.000000,382.000000,79.000000,1.275000,54000.000000 +-121.270000,37.950000,52.000000,1318.000000,308.000000,1368.000000,310.000000,1.826100,54600.000000 +-121.270000,37.950000,43.000000,557.000000,165.000000,573.000000,144.000000,1.721200,59000.000000 +-121.270000,37.940000,38.000000,512.000000,133.000000,676.000000,124.000000,1.738600,52000.000000 +-121.270000,37.930000,24.000000,1451.000000,320.000000,1413.000000,283.000000,2.125000,61200.000000 +-121.270000,37.880000,43.000000,968.000000,249.000000,664.000000,240.000000,1.645800,83600.000000 +-121.270000,37.870000,34.000000,1010.000000,206.000000,678.000000,234.000000,2.953100,104000.000000 +-121.270000,37.820000,26.000000,1170.000000,238.000000,830.000000,216.000000,2.645800,127500.000000 +-121.270000,37.790000,16.000000,1853.000000,390.000000,1013.000000,362.000000,2.708300,173900.000000 +-121.270000,37.560000,31.000000,1223.000000,330.000000,1067.000000,245.000000,2.855800,100000.000000 +-121.280000,38.900000,31.000000,1297.000000,259.000000,765.000000,240.000000,2.765600,93600.000000 +-121.280000,38.800000,7.000000,9003.000000,1739.000000,4445.000000,1591.000000,3.816000,147900.000000 +-121.280000,38.750000,52.000000,493.000000,89.000000,189.000000,94.000000,2.108000,83800.000000 +-121.280000,38.730000,6.000000,4223.000000,672.000000,1747.000000,631.000000,5.419000,267400.000000 +-121.280000,38.710000,8.000000,4053.000000,912.000000,2033.000000,897.000000,2.897300,117100.000000 +-121.280000,38.710000,35.000000,3095.000000,594.000000,1550.000000,576.000000,3.575000,113500.000000 +-121.280000,38.700000,15.000000,5828.000000,1051.000000,2868.000000,1037.000000,3.781300,143200.000000 +-121.280000,38.700000,14.000000,5827.000000,1246.000000,2578.000000,1038.000000,3.021200,112900.000000 +-121.280000,38.680000,16.000000,3467.000000,615.000000,1478.000000,601.000000,3.750000,147300.000000 +-121.280000,38.680000,14.000000,11442.000000,2690.000000,6068.000000,2435.000000,2.601600,121200.000000 +-121.280000,38.670000,23.000000,1727.000000,264.000000,833.000000,258.000000,5.479700,160000.000000 +-121.280000,38.660000,17.000000,7741.000000,1401.000000,3153.000000,1331.000000,3.786900,216100.000000 +-121.280000,38.640000,24.000000,3459.000000,573.000000,1336.000000,544.000000,4.866100,186200.000000 +-121.280000,38.640000,19.000000,3574.000000,669.000000,1373.000000,643.000000,3.629800,242100.000000 +-121.280000,38.630000,36.000000,120.000000,16.000000,30.000000,14.000000,10.226400,350000.000000 +-121.280000,38.610000,23.000000,2547.000000,504.000000,1235.000000,469.000000,2.472200,103300.000000 +-121.280000,38.610000,22.000000,2938.000000,619.000000,1501.000000,561.000000,2.735600,96100.000000 +-121.280000,38.600000,25.000000,1122.000000,198.000000,564.000000,213.000000,3.165400,111600.000000 +-121.280000,38.600000,17.000000,1671.000000,378.000000,848.000000,351.000000,3.119400,112500.000000 +-121.280000,38.590000,3.000000,4188.000000,1136.000000,2081.000000,995.000000,3.048100,92500.000000 +-121.280000,38.550000,35.000000,7088.000000,1279.000000,4885.000000,1272.000000,2.698100,112500.000000 +-121.280000,38.170000,19.000000,1337.000000,236.000000,744.000000,225.000000,4.092400,244200.000000 +-121.280000,38.140000,37.000000,3278.000000,623.000000,1431.000000,575.000000,3.398700,99500.000000 +-121.280000,38.130000,48.000000,1892.000000,333.000000,804.000000,352.000000,4.062500,143200.000000 +-121.280000,38.130000,32.000000,3366.000000,676.000000,1916.000000,697.000000,2.540100,125400.000000 +-121.280000,38.120000,34.000000,3268.000000,640.000000,1906.000000,628.000000,2.823700,110700.000000 +-121.280000,38.110000,10.000000,2974.000000,588.000000,1559.000000,568.000000,3.882500,136800.000000 +-121.280000,38.100000,13.000000,2432.000000,586.000000,1441.000000,606.000000,2.555600,133100.000000 +-121.280000,38.030000,11.000000,826.000000,150.000000,684.000000,166.000000,3.926500,107400.000000 +-121.280000,38.030000,11.000000,3585.000000,729.000000,2769.000000,715.000000,3.090700,94100.000000 +-121.280000,38.020000,8.000000,1868.000000,392.000000,1258.000000,389.000000,3.175000,95900.000000 +-121.280000,37.990000,42.000000,495.000000,116.000000,284.000000,97.000000,2.885400,55700.000000 +-121.280000,37.980000,52.000000,941.000000,184.000000,414.000000,171.000000,2.142900,69900.000000 +-121.280000,37.970000,47.000000,2348.000000,507.000000,1455.000000,479.000000,1.650000,66000.000000 +-121.280000,37.960000,28.000000,1942.000000,724.000000,1618.000000,638.000000,0.936500,52500.000000 +-121.280000,37.950000,46.000000,1026.000000,330.000000,1109.000000,333.000000,1.290400,63300.000000 +-121.280000,37.940000,48.000000,1766.000000,444.000000,1406.000000,421.000000,1.703900,52700.000000 +-121.280000,37.940000,44.000000,1406.000000,357.000000,1489.000000,386.000000,1.468800,56800.000000 +-121.280000,37.940000,40.000000,2806.000000,685.000000,2268.000000,635.000000,1.881400,57700.000000 +-121.280000,37.940000,35.000000,2680.000000,634.000000,2188.000000,611.000000,1.937500,56700.000000 +-121.280000,37.930000,23.000000,1491.000000,346.000000,1223.000000,343.000000,2.159100,67800.000000 +-121.280000,37.920000,36.000000,499.000000,115.000000,451.000000,124.000000,2.170500,60300.000000 +-121.280000,37.910000,31.000000,820.000000,179.000000,576.000000,155.000000,1.690000,65900.000000 +-121.280000,37.900000,28.000000,371.000000,71.000000,171.000000,70.000000,0.961400,55700.000000 +-121.280000,37.830000,32.000000,696.000000,151.000000,443.000000,144.000000,2.515600,86300.000000 +-121.280000,37.820000,10.000000,9205.000000,1774.000000,5935.000000,1673.000000,3.650000,119400.000000 +-121.290000,38.900000,45.000000,2019.000000,394.000000,1104.000000,407.000000,3.169100,108700.000000 +-121.290000,38.890000,10.000000,653.000000,120.000000,407.000000,146.000000,3.388900,110800.000000 +-121.290000,38.760000,12.000000,1198.000000,174.000000,443.000000,170.000000,6.009700,187500.000000 +-121.290000,38.710000,32.000000,1875.000000,361.000000,1027.000000,343.000000,3.576900,103800.000000 +-121.290000,38.680000,12.000000,5098.000000,1094.000000,2029.000000,1065.000000,3.544400,132500.000000 +-121.290000,38.670000,20.000000,1992.000000,363.000000,889.000000,346.000000,3.651600,130500.000000 +-121.290000,38.650000,27.000000,2744.000000,464.000000,1340.000000,452.000000,3.881600,147300.000000 +-121.290000,38.610000,26.000000,1814.000000,299.000000,963.000000,317.000000,4.451900,110500.000000 +-121.290000,38.610000,17.000000,13553.000000,2474.000000,6544.000000,2359.000000,3.972700,132700.000000 +-121.290000,38.600000,29.000000,1276.000000,225.000000,600.000000,223.000000,4.093800,109100.000000 +-121.290000,38.590000,19.000000,2460.000000,470.000000,1346.000000,480.000000,3.656300,95600.000000 +-121.290000,38.360000,17.000000,2193.000000,386.000000,1148.000000,372.000000,4.527200,191700.000000 +-121.290000,38.280000,11.000000,1554.000000,260.000000,793.000000,233.000000,4.807300,156700.000000 +-121.290000,38.150000,23.000000,4183.000000,633.000000,1886.000000,628.000000,4.878700,175300.000000 +-121.290000,38.140000,34.000000,2770.000000,544.000000,1409.000000,535.000000,3.233800,101800.000000 +-121.290000,38.140000,34.000000,1500.000000,337.000000,674.000000,282.000000,2.515000,110800.000000 +-121.290000,38.140000,27.000000,836.000000,132.000000,303.000000,133.000000,3.875000,127400.000000 +-121.290000,38.130000,31.000000,1008.000000,212.000000,453.000000,195.000000,2.391700,113500.000000 +-121.290000,38.130000,20.000000,3168.000000,514.000000,1390.000000,490.000000,5.000000,154800.000000 +-121.290000,38.120000,18.000000,1534.000000,275.000000,741.000000,263.000000,3.960700,132500.000000 +-121.290000,38.070000,21.000000,1185.000000,207.000000,533.000000,213.000000,3.191700,204500.000000 +-121.290000,38.040000,16.000000,2128.000000,441.000000,1860.000000,459.000000,3.177900,97300.000000 +-121.290000,38.030000,7.000000,2021.000000,441.000000,1615.000000,406.000000,2.584200,111300.000000 +-121.290000,38.030000,16.000000,4356.000000,881.000000,1629.000000,818.000000,2.267200,91100.000000 +-121.290000,38.010000,2.000000,6403.000000,1116.000000,3327.000000,957.000000,4.487100,137900.000000 +-121.290000,38.010000,10.000000,69.000000,16.000000,50.000000,20.000000,3.750000,120800.000000 +-121.290000,38.000000,4.000000,1392.000000,322.000000,1784.000000,309.000000,2.375000,124500.000000 +-121.290000,38.000000,12.000000,4038.000000,1074.000000,3440.000000,942.000000,1.969800,112300.000000 +-121.290000,37.990000,41.000000,930.000000,191.000000,463.000000,185.000000,3.414100,90600.000000 +-121.290000,37.980000,49.000000,2501.000000,565.000000,1171.000000,550.000000,2.504300,76700.000000 +-121.290000,37.980000,42.000000,625.000000,143.000000,533.000000,159.000000,2.625000,65400.000000 +-121.290000,37.970000,52.000000,2995.000000,555.000000,1392.000000,503.000000,1.779400,98800.000000 +-121.290000,37.970000,52.000000,1610.000000,480.000000,1025.000000,440.000000,1.296200,110200.000000 +-121.290000,37.960000,52.000000,287.000000,119.000000,154.000000,85.000000,0.873800,75000.000000 +-121.290000,37.960000,50.000000,1669.000000,558.000000,1340.000000,484.000000,1.319100,92300.000000 +-121.290000,37.960000,48.000000,1778.000000,541.000000,1237.000000,462.000000,1.343800,103100.000000 +-121.290000,37.950000,52.000000,288.000000,86.000000,272.000000,54.000000,0.696000,42500.000000 +-121.290000,37.950000,52.000000,107.000000,79.000000,167.000000,53.000000,0.791700,22500.000000 +-121.290000,37.950000,16.000000,761.000000,306.000000,438.000000,282.000000,0.771400,87500.000000 +-121.290000,37.940000,40.000000,2827.000000,655.000000,2037.000000,574.000000,2.030300,63800.000000 +-121.290000,37.930000,37.000000,2488.000000,578.000000,1854.000000,514.000000,2.551000,59100.000000 +-121.290000,37.930000,24.000000,1438.000000,351.000000,1294.000000,342.000000,2.782900,61800.000000 +-121.290000,37.920000,12.000000,1096.000000,240.000000,1175.000000,278.000000,3.105300,73100.000000 +-121.290000,37.890000,26.000000,161.000000,27.000000,1542.000000,30.000000,5.748500,162500.000000 +-121.290000,37.870000,29.000000,488.000000,108.000000,308.000000,115.000000,2.656300,103100.000000 +-121.290000,37.720000,22.000000,1630.000000,404.000000,4402.000000,358.000000,1.979200,63000.000000 +-121.290000,36.900000,17.000000,3610.000000,593.000000,1734.000000,559.000000,5.832400,374200.000000 +-121.300000,38.890000,45.000000,1529.000000,317.000000,793.000000,281.000000,2.986600,91300.000000 +-121.300000,38.750000,36.000000,3903.000000,885.000000,2313.000000,804.000000,2.655000,86300.000000 +-121.300000,38.740000,41.000000,4374.000000,1039.000000,2387.000000,959.000000,2.361100,87900.000000 +-121.300000,38.730000,9.000000,5558.000000,1099.000000,2717.000000,1043.000000,3.645500,139200.000000 +-121.300000,38.720000,15.000000,2514.000000,482.000000,1166.000000,503.000000,2.281300,131900.000000 +-121.300000,38.710000,17.000000,5434.000000,1106.000000,2755.000000,1047.000000,2.822600,99900.000000 +-121.300000,38.700000,18.000000,7334.000000,1332.000000,3339.000000,1271.000000,3.235000,124700.000000 +-121.300000,38.690000,21.000000,6575.000000,1105.000000,3358.000000,1098.000000,4.073900,115400.000000 +-121.300000,38.690000,13.000000,2135.000000,429.000000,779.000000,432.000000,3.699500,134900.000000 +-121.300000,38.680000,19.000000,2655.000000,438.000000,1253.000000,454.000000,5.281700,140600.000000 +-121.300000,38.670000,23.000000,2145.000000,340.000000,1022.000000,349.000000,4.203700,125400.000000 +-121.300000,38.670000,20.000000,1234.000000,208.000000,649.000000,211.000000,4.852300,143000.000000 +-121.300000,38.670000,15.000000,4018.000000,850.000000,2070.000000,814.000000,3.073300,119800.000000 +-121.300000,38.660000,32.000000,2915.000000,492.000000,1292.000000,454.000000,3.318800,117100.000000 +-121.300000,38.660000,28.000000,3391.000000,550.000000,1546.000000,553.000000,4.218800,139200.000000 +-121.300000,38.660000,21.000000,3824.000000,634.000000,1818.000000,600.000000,3.712000,139000.000000 +-121.300000,38.650000,36.000000,1665.000000,293.000000,846.000000,306.000000,3.585200,121600.000000 +-121.300000,38.650000,26.000000,3192.000000,447.000000,1132.000000,418.000000,4.527800,144300.000000 +-121.300000,38.640000,20.000000,5001.000000,830.000000,2330.000000,830.000000,4.083300,160000.000000 +-121.300000,38.630000,31.000000,1817.000000,372.000000,992.000000,339.000000,3.097200,150000.000000 +-121.300000,38.610000,25.000000,2707.000000,464.000000,1423.000000,490.000000,4.323500,116900.000000 +-121.300000,38.600000,32.000000,9534.000000,1819.000000,4951.000000,1710.000000,3.392600,103400.000000 +-121.300000,38.590000,25.000000,3002.000000,718.000000,1660.000000,613.000000,2.111600,89600.000000 +-121.300000,38.580000,29.000000,2748.000000,563.000000,1619.000000,525.000000,2.896600,92400.000000 +-121.300000,38.580000,19.000000,2653.000000,680.000000,1419.000000,579.000000,2.378700,91300.000000 +-121.300000,38.580000,16.000000,1537.000000,364.000000,1125.000000,375.000000,2.647100,90700.000000 +-121.300000,38.510000,19.000000,822.000000,134.000000,457.000000,133.000000,4.150000,157500.000000 +-121.300000,38.260000,19.000000,1403.000000,276.000000,901.000000,290.000000,3.215000,104600.000000 +-121.300000,38.250000,27.000000,2475.000000,548.000000,1703.000000,517.000000,2.572700,86100.000000 +-121.300000,38.140000,17.000000,3507.000000,696.000000,1867.000000,709.000000,3.212300,120700.000000 +-121.300000,38.130000,27.000000,1004.000000,192.000000,470.000000,192.000000,2.894200,116700.000000 +-121.300000,38.130000,26.000000,2256.000000,360.000000,937.000000,372.000000,5.052800,153700.000000 +-121.300000,38.130000,23.000000,2864.000000,504.000000,1298.000000,499.000000,3.230300,131800.000000 +-121.300000,38.120000,11.000000,1792.000000,252.000000,767.000000,263.000000,7.688900,229300.000000 +-121.300000,38.110000,5.000000,5979.000000,1190.000000,2679.000000,1084.000000,4.196000,171700.000000 +-121.300000,38.090000,31.000000,335.000000,53.000000,154.000000,55.000000,2.069400,175000.000000 +-121.300000,38.050000,52.000000,122.000000,26.000000,62.000000,25.000000,1.150000,112500.000000 +-121.300000,38.040000,8.000000,2668.000000,447.000000,1713.000000,444.000000,4.015600,117600.000000 +-121.300000,38.030000,13.000000,1014.000000,200.000000,712.000000,197.000000,3.147100,102800.000000 +-121.300000,38.030000,11.000000,2866.000000,654.000000,1404.000000,525.000000,2.505000,95000.000000 +-121.300000,38.030000,10.000000,1409.000000,248.000000,782.000000,222.000000,4.022700,107700.000000 +-121.300000,38.020000,4.000000,1515.000000,384.000000,491.000000,348.000000,2.852300,87500.000000 +-121.300000,38.020000,16.000000,2717.000000,621.000000,3343.000000,643.000000,2.547300,106300.000000 +-121.300000,38.010000,30.000000,2547.000000,485.000000,1547.000000,501.000000,3.994000,95500.000000 +-121.300000,38.010000,29.000000,2289.000000,449.000000,1215.000000,435.000000,3.278800,100000.000000 +-121.300000,38.000000,27.000000,2918.000000,580.000000,1338.000000,544.000000,2.649500,116200.000000 +-121.300000,38.000000,23.000000,3706.000000,1106.000000,3785.000000,1019.000000,1.777400,100000.000000 +-121.300000,37.980000,47.000000,2373.000000,461.000000,990.000000,444.000000,4.175000,98300.000000 +-121.300000,37.970000,52.000000,2980.000000,537.000000,1128.000000,510.000000,4.061000,113600.000000 +-121.300000,37.960000,52.000000,1475.000000,238.000000,736.000000,260.000000,3.659100,105100.000000 +-121.300000,37.960000,31.000000,2668.000000,812.000000,1398.000000,721.000000,1.125000,110400.000000 +-121.300000,37.960000,24.000000,1212.000000,366.000000,1202.000000,343.000000,1.787500,76800.000000 +-121.300000,37.940000,52.000000,24.000000,6.000000,23.000000,5.000000,2.375000,67500.000000 +-121.300000,37.940000,40.000000,452.000000,109.000000,412.000000,97.000000,1.341700,60800.000000 +-121.300000,37.920000,28.000000,3308.000000,766.000000,3201.000000,720.000000,1.769400,73900.000000 +-121.300000,37.850000,35.000000,1034.000000,206.000000,604.000000,192.000000,2.239100,120000.000000 +-121.310000,38.970000,16.000000,1210.000000,228.000000,726.000000,222.000000,2.708300,82100.000000 +-121.310000,38.750000,7.000000,4185.000000,750.000000,2147.000000,706.000000,4.051900,129200.000000 +-121.310000,38.720000,11.000000,2306.000000,420.000000,1308.000000,418.000000,3.950600,122200.000000 +-121.310000,38.680000,22.000000,1194.000000,207.000000,545.000000,223.000000,3.860300,134300.000000 +-121.310000,38.680000,16.000000,5179.000000,1271.000000,2181.000000,1151.000000,2.100900,82500.000000 +-121.310000,38.670000,27.000000,1998.000000,353.000000,970.000000,343.000000,4.822400,115500.000000 +-121.310000,38.670000,26.000000,1387.000000,226.000000,807.000000,244.000000,4.156300,135700.000000 +-121.310000,38.660000,27.000000,1713.000000,282.000000,761.000000,295.000000,5.208100,136400.000000 +-121.310000,38.660000,26.000000,1604.000000,245.000000,751.000000,267.000000,4.738100,140500.000000 +-121.310000,38.650000,21.000000,2759.000000,409.000000,1053.000000,374.000000,5.500000,165700.000000 +-121.310000,38.640000,19.000000,5407.000000,838.000000,1927.000000,804.000000,4.630200,195400.000000 +-121.310000,38.610000,17.000000,992.000000,151.000000,316.000000,159.000000,6.623800,326700.000000 +-121.310000,38.590000,35.000000,3295.000000,560.000000,1454.000000,536.000000,3.171100,101900.000000 +-121.310000,38.580000,10.000000,2421.000000,580.000000,962.000000,497.000000,2.503500,112500.000000 +-121.310000,38.570000,9.000000,2748.000000,521.000000,1663.000000,565.000000,3.519200,113300.000000 +-121.310000,38.570000,22.000000,1229.000000,253.000000,733.000000,250.000000,2.500000,101600.000000 +-121.310000,38.280000,16.000000,1708.000000,391.000000,687.000000,378.000000,1.948500,155400.000000 +-121.310000,38.260000,22.000000,1768.000000,396.000000,1005.000000,420.000000,1.884600,88300.000000 +-121.310000,38.030000,24.000000,3050.000000,568.000000,1743.000000,549.000000,3.741300,105300.000000 +-121.310000,38.030000,18.000000,4893.000000,1008.000000,3036.000000,997.000000,2.521200,110000.000000 +-121.310000,38.020000,24.000000,4157.000000,951.000000,2734.000000,879.000000,2.798100,92100.000000 +-121.310000,38.010000,22.000000,2575.000000,680.000000,1367.000000,645.000000,1.427400,90500.000000 +-121.310000,38.000000,35.000000,2097.000000,351.000000,977.000000,358.000000,4.395800,108400.000000 +-121.310000,38.000000,19.000000,908.000000,158.000000,306.000000,154.000000,3.979200,131900.000000 +-121.310000,37.990000,23.000000,3135.000000,707.000000,1650.000000,680.000000,1.886000,105300.000000 +-121.310000,37.990000,15.000000,3103.000000,965.000000,3061.000000,861.000000,1.372900,110300.000000 +-121.310000,37.980000,47.000000,3386.000000,663.000000,1228.000000,619.000000,3.062500,141500.000000 +-121.310000,37.970000,45.000000,2604.000000,454.000000,988.000000,442.000000,3.666700,123100.000000 +-121.310000,37.970000,42.000000,1824.000000,277.000000,720.000000,309.000000,5.191500,183700.000000 +-121.310000,37.960000,52.000000,2654.000000,468.000000,1157.000000,494.000000,3.226000,107600.000000 +-121.310000,37.960000,52.000000,1938.000000,332.000000,788.000000,320.000000,3.609400,118400.000000 +-121.310000,37.960000,52.000000,1829.000000,301.000000,694.000000,319.000000,3.346600,92600.000000 +-121.310000,37.960000,48.000000,1112.000000,227.000000,583.000000,216.000000,2.339300,77600.000000 +-121.310000,37.940000,41.000000,375.000000,108.000000,323.000000,98.000000,1.953100,45000.000000 +-121.310000,37.900000,38.000000,226.000000,44.000000,125.000000,38.000000,2.900000,125000.000000 +-121.310000,37.810000,36.000000,284.000000,53.000000,130.000000,47.000000,3.142900,179200.000000 +-121.310000,37.440000,33.000000,69.000000,28.000000,47.000000,14.000000,0.536000,112500.000000 +-121.310000,36.420000,21.000000,2740.000000,615.000000,2630.000000,564.000000,2.662900,102700.000000 +-121.320000,39.430000,18.000000,1860.000000,409.000000,741.000000,349.000000,1.867200,84700.000000 +-121.320000,39.290000,11.000000,2640.000000,505.000000,1257.000000,445.000000,3.567300,112000.000000 +-121.320000,39.130000,5.000000,358.000000,65.000000,169.000000,59.000000,3.000000,162500.000000 +-121.320000,38.890000,9.000000,5927.000000,1269.000000,3369.000000,1176.000000,2.819400,111300.000000 +-121.320000,38.740000,14.000000,1449.000000,228.000000,670.000000,232.000000,4.389700,186300.000000 +-121.320000,38.710000,14.000000,4594.000000,774.000000,2474.000000,782.000000,4.524500,127500.000000 +-121.320000,38.700000,16.000000,2966.000000,578.000000,1365.000000,480.000000,3.244400,118400.000000 +-121.320000,38.690000,11.000000,13796.000000,2372.000000,6000.000000,2250.000000,3.877600,124500.000000 +-121.320000,38.680000,25.000000,1252.000000,207.000000,587.000000,217.000000,3.589300,146400.000000 +-121.320000,38.670000,31.000000,2532.000000,479.000000,1396.000000,467.000000,4.041700,114500.000000 +-121.320000,38.660000,21.000000,1276.000000,208.000000,501.000000,205.000000,3.950000,143600.000000 +-121.320000,38.640000,19.000000,8501.000000,1558.000000,3576.000000,1467.000000,3.652300,158500.000000 +-121.320000,38.630000,20.000000,7003.000000,1409.000000,3107.000000,1315.000000,3.034800,150500.000000 +-121.320000,38.620000,33.000000,898.000000,190.000000,470.000000,201.000000,2.689700,148300.000000 +-121.320000,38.610000,22.000000,3902.000000,845.000000,1870.000000,763.000000,2.774000,190200.000000 +-121.320000,38.590000,24.000000,4378.000000,910.000000,2149.000000,812.000000,2.503500,123700.000000 +-121.320000,38.590000,21.000000,9774.000000,1777.000000,4674.000000,1712.000000,3.681700,136100.000000 +-121.320000,38.570000,25.000000,692.000000,146.000000,504.000000,167.000000,3.689700,101100.000000 +-121.320000,38.560000,18.000000,1169.000000,186.000000,614.000000,192.000000,4.576600,108700.000000 +-121.320000,38.540000,13.000000,4715.000000,1090.000000,2420.000000,1059.000000,2.969300,104400.000000 +-121.320000,38.210000,27.000000,2643.000000,467.000000,1455.000000,444.000000,3.639800,146700.000000 +-121.320000,38.160000,14.000000,2049.000000,398.000000,1071.000000,369.000000,3.500000,240800.000000 +-121.320000,38.150000,5.000000,5428.000000,994.000000,2725.000000,902.000000,3.932300,130100.000000 +-121.320000,38.130000,5.000000,3136.000000,501.000000,1327.000000,467.000000,5.594200,186900.000000 +-121.320000,38.040000,30.000000,249.000000,44.000000,167.000000,45.000000,4.500000,92800.000000 +-121.320000,38.030000,25.000000,2474.000000,513.000000,1947.000000,524.000000,2.574200,98400.000000 +-121.320000,38.020000,26.000000,2851.000000,533.000000,1544.000000,499.000000,3.537900,99100.000000 +-121.320000,38.020000,23.000000,3251.000000,689.000000,1890.000000,668.000000,3.072900,104800.000000 +-121.320000,38.010000,36.000000,391.000000,74.000000,171.000000,79.000000,2.704500,102800.000000 +-121.320000,38.010000,20.000000,1903.000000,395.000000,919.000000,359.000000,2.676500,96400.000000 +-121.320000,38.000000,22.000000,2105.000000,521.000000,781.000000,483.000000,2.213000,87500.000000 +-121.320000,38.000000,21.000000,1795.000000,482.000000,1114.000000,472.000000,2.009100,101500.000000 +-121.320000,37.980000,37.000000,3247.000000,643.000000,1737.000000,665.000000,3.066000,94100.000000 +-121.320000,37.980000,20.000000,1591.000000,589.000000,1916.000000,536.000000,1.353100,94600.000000 +-121.320000,37.970000,46.000000,2270.000000,427.000000,1097.000000,453.000000,3.323500,87800.000000 +-121.320000,37.970000,43.000000,2453.000000,490.000000,1093.000000,438.000000,2.910700,88800.000000 +-121.320000,37.960000,5.000000,123.000000,21.000000,50.000000,20.000000,2.765600,50000.000000 +-121.320000,37.960000,47.000000,1700.000000,344.000000,922.000000,357.000000,3.184500,87200.000000 +-121.320000,37.960000,46.000000,1832.000000,365.000000,975.000000,373.000000,2.039800,88100.000000 +-121.320000,37.950000,40.000000,964.000000,230.000000,742.000000,209.000000,1.262500,43000.000000 +-121.320000,37.950000,36.000000,747.000000,189.000000,338.000000,145.000000,1.788500,62100.000000 +-121.320000,37.940000,36.000000,40.000000,10.000000,64.000000,14.000000,2.625000,55000.000000 +-121.320000,37.670000,21.000000,1494.000000,271.000000,781.000000,255.000000,4.301500,250000.000000 +-121.320000,36.790000,30.000000,516.000000,90.000000,288.000000,95.000000,3.633300,202500.000000 +-121.320000,36.430000,22.000000,2927.000000,637.000000,2546.000000,618.000000,2.715300,114300.000000 +-121.320000,35.950000,31.000000,372.000000,68.000000,479.000000,67.000000,3.554700,200000.000000 +-121.330000,38.770000,3.000000,20214.000000,3559.000000,8361.000000,3112.000000,4.225900,169300.000000 +-121.330000,38.700000,15.000000,2226.000000,421.000000,1004.000000,417.000000,2.786800,117800.000000 +-121.330000,38.690000,15.000000,3137.000000,509.000000,1635.000000,544.000000,4.692300,122700.000000 +-121.330000,38.680000,13.000000,5826.000000,1411.000000,2244.000000,1219.000000,1.909300,142900.000000 +-121.330000,38.670000,17.000000,2683.000000,704.000000,1410.000000,659.000000,1.962000,130200.000000 +-121.330000,38.660000,15.000000,4371.000000,908.000000,1842.000000,818.000000,2.779700,105500.000000 +-121.330000,38.650000,24.000000,3533.000000,741.000000,1496.000000,723.000000,2.810600,183200.000000 +-121.330000,38.640000,27.000000,2203.000000,493.000000,1158.000000,492.000000,2.434200,119500.000000 +-121.330000,38.630000,23.000000,1947.000000,409.000000,866.000000,400.000000,2.718100,156800.000000 +-121.330000,38.620000,19.000000,1853.000000,415.000000,772.000000,397.000000,2.257400,135800.000000 +-121.330000,38.610000,21.000000,2453.000000,518.000000,1326.000000,505.000000,2.707900,148000.000000 +-121.330000,38.560000,17.000000,3608.000000,682.000000,1694.000000,666.000000,3.311000,109400.000000 +-121.330000,38.040000,15.000000,1933.000000,280.000000,965.000000,260.000000,4.647700,142700.000000 +-121.330000,38.040000,10.000000,1421.000000,204.000000,657.000000,209.000000,5.187800,153900.000000 +-121.330000,38.030000,19.000000,1708.000000,291.000000,906.000000,288.000000,4.918000,130600.000000 +-121.330000,38.030000,10.000000,629.000000,140.000000,635.000000,146.000000,2.296100,126700.000000 +-121.330000,38.020000,37.000000,1964.000000,315.000000,915.000000,335.000000,4.300800,119800.000000 +-121.330000,38.020000,33.000000,2854.000000,489.000000,1109.000000,452.000000,4.300800,136400.000000 +-121.330000,38.020000,31.000000,1466.000000,264.000000,608.000000,254.000000,3.182700,162100.000000 +-121.330000,38.010000,36.000000,1383.000000,207.000000,531.000000,203.000000,5.919100,151900.000000 +-121.330000,38.010000,27.000000,1612.000000,234.000000,630.000000,255.000000,5.318000,155100.000000 +-121.330000,38.000000,14.000000,3731.000000,772.000000,1679.000000,750.000000,3.136900,119600.000000 +-121.330000,37.980000,9.000000,2370.000000,424.000000,1129.000000,386.000000,5.143000,176500.000000 +-121.330000,37.980000,36.000000,3113.000000,576.000000,1746.000000,544.000000,3.462500,84600.000000 +-121.330000,37.980000,10.000000,1564.000000,397.000000,643.000000,347.000000,2.703100,150000.000000 +-121.330000,37.970000,43.000000,1511.000000,292.000000,721.000000,320.000000,3.570300,87400.000000 +-121.330000,37.970000,38.000000,3166.000000,575.000000,1351.000000,561.000000,3.540400,91600.000000 +-121.330000,37.960000,20.000000,1727.000000,386.000000,730.000000,342.000000,2.519500,92600.000000 +-121.330000,36.430000,40.000000,622.000000,194.000000,902.000000,196.000000,2.625000,109100.000000 +-121.340000,38.690000,17.000000,1968.000000,364.000000,996.000000,331.000000,3.703100,114300.000000 +-121.340000,38.690000,16.000000,2686.000000,516.000000,1553.000000,529.000000,3.785700,112700.000000 +-121.340000,38.680000,28.000000,3379.000000,552.000000,1543.000000,556.000000,4.274300,124000.000000 +-121.340000,38.670000,35.000000,643.000000,117.000000,331.000000,134.000000,3.041700,120700.000000 +-121.340000,38.670000,34.000000,1503.000000,264.000000,731.000000,285.000000,4.035200,118500.000000 +-121.340000,38.660000,17.000000,1149.000000,257.000000,583.000000,243.000000,2.809200,137500.000000 +-121.340000,38.650000,27.000000,1595.000000,246.000000,610.000000,253.000000,4.600000,199000.000000 +-121.340000,38.640000,12.000000,2772.000000,578.000000,1335.000000,565.000000,3.806800,161000.000000 +-121.340000,38.630000,13.000000,3033.000000,540.000000,1363.000000,519.000000,4.003600,161700.000000 +-121.340000,38.610000,22.000000,1778.000000,408.000000,875.000000,375.000000,2.602300,142200.000000 +-121.340000,38.610000,20.000000,5801.000000,1148.000000,2586.000000,1063.000000,3.906300,162100.000000 +-121.340000,38.610000,11.000000,1716.000000,404.000000,722.000000,415.000000,2.092600,166100.000000 +-121.340000,38.590000,23.000000,2912.000000,421.000000,1132.000000,410.000000,5.917400,225900.000000 +-121.340000,38.590000,22.000000,3273.000000,480.000000,1151.000000,463.000000,8.050000,380000.000000 +-121.340000,38.580000,18.000000,1631.000000,228.000000,599.000000,228.000000,7.803100,267200.000000 +-121.340000,38.580000,17.000000,1605.000000,258.000000,748.000000,262.000000,5.091700,134100.000000 +-121.340000,38.570000,14.000000,5737.000000,1008.000000,2731.000000,983.000000,4.460200,134500.000000 +-121.340000,38.560000,12.000000,2975.000000,628.000000,1440.000000,593.000000,2.989600,118600.000000 +-121.340000,38.550000,11.000000,2838.000000,498.000000,1701.000000,504.000000,4.144700,114000.000000 +-121.340000,38.440000,14.000000,3205.000000,465.000000,1439.000000,456.000000,5.745200,240900.000000 +-121.340000,38.040000,16.000000,3295.000000,565.000000,2279.000000,576.000000,3.608300,146400.000000 +-121.340000,38.030000,20.000000,4213.000000,751.000000,2071.000000,714.000000,4.406300,130800.000000 +-121.340000,38.030000,12.000000,2707.000000,433.000000,1200.000000,380.000000,4.986100,133500.000000 +-121.340000,38.020000,30.000000,4375.000000,689.000000,2038.000000,709.000000,5.120200,133800.000000 +-121.340000,38.010000,17.000000,2033.000000,452.000000,1114.000000,446.000000,3.287200,175000.000000 +-121.340000,38.000000,32.000000,3877.000000,687.000000,1642.000000,647.000000,4.044400,129200.000000 +-121.340000,37.990000,14.000000,3111.000000,498.000000,1178.000000,525.000000,6.556000,234700.000000 +-121.340000,37.990000,11.000000,4487.000000,868.000000,2195.000000,780.000000,3.961500,194600.000000 +-121.340000,37.970000,33.000000,2493.000000,454.000000,1203.000000,436.000000,3.765000,94600.000000 +-121.340000,37.960000,27.000000,1839.000000,442.000000,2010.000000,416.000000,2.128400,59400.000000 +-121.340000,37.960000,23.000000,2830.000000,659.000000,1554.000000,654.000000,3.035400,113700.000000 +-121.340000,36.760000,15.000000,2638.000000,429.000000,1289.000000,357.000000,4.152800,336800.000000 +-121.350000,38.720000,2.000000,21897.000000,3513.000000,8652.000000,2873.000000,4.543200,151300.000000 +-121.350000,38.700000,5.000000,14414.000000,2979.000000,7608.000000,2832.000000,3.580200,129600.000000 +-121.350000,38.680000,18.000000,7923.000000,1558.000000,3789.000000,1473.000000,3.540300,98600.000000 +-121.350000,38.650000,20.000000,2498.000000,546.000000,1185.000000,506.000000,3.224300,107900.000000 +-121.350000,38.620000,28.000000,4175.000000,796.000000,2032.000000,830.000000,3.429900,164000.000000 +-121.350000,38.600000,27.000000,4314.000000,611.000000,1662.000000,575.000000,5.099700,170100.000000 +-121.350000,38.580000,20.000000,2992.000000,378.000000,1105.000000,368.000000,8.657200,320200.000000 +-121.350000,38.560000,16.000000,2629.000000,491.000000,1265.000000,485.000000,4.506600,140200.000000 +-121.350000,38.560000,16.000000,2278.000000,370.000000,1203.000000,371.000000,5.062200,132400.000000 +-121.350000,38.550000,22.000000,2607.000000,411.000000,1216.000000,407.000000,5.042700,126900.000000 +-121.350000,38.550000,18.000000,4481.000000,780.000000,2211.000000,775.000000,3.993400,123300.000000 +-121.350000,38.540000,12.000000,16239.000000,3358.000000,8656.000000,3234.000000,3.569100,116300.000000 +-121.350000,38.460000,2.000000,6992.000000,1132.000000,2816.000000,984.000000,4.387900,144400.000000 +-121.350000,38.090000,32.000000,1706.000000,292.000000,923.000000,284.000000,5.505700,147200.000000 +-121.350000,38.040000,12.000000,6217.000000,1019.000000,3771.000000,961.000000,3.720600,146000.000000 +-121.350000,38.030000,8.000000,1904.000000,255.000000,895.000000,242.000000,5.720100,155700.000000 +-121.350000,38.030000,16.000000,3158.000000,515.000000,1596.000000,528.000000,4.173900,131300.000000 +-121.350000,38.020000,16.000000,1665.000000,311.000000,1301.000000,259.000000,2.840300,132300.000000 +-121.350000,38.020000,15.000000,3583.000000,644.000000,2183.000000,643.000000,3.428000,140700.000000 +-121.350000,38.010000,15.000000,2682.000000,599.000000,1520.000000,601.000000,3.598200,94400.000000 +-121.350000,38.000000,22.000000,3564.000000,730.000000,1539.000000,699.000000,3.675000,152400.000000 +-121.350000,37.970000,33.000000,3656.000000,681.000000,1698.000000,671.000000,3.140600,93900.000000 +-121.350000,37.960000,21.000000,1343.000000,183.000000,462.000000,193.000000,5.899500,189900.000000 +-121.360000,38.730000,21.000000,2253.000000,416.000000,1050.000000,411.000000,3.141000,220100.000000 +-121.360000,38.670000,5.000000,5819.000000,1507.000000,3237.000000,1356.000000,2.233900,116600.000000 +-121.360000,38.670000,17.000000,2770.000000,684.000000,1471.000000,624.000000,2.368300,82500.000000 +-121.360000,38.660000,22.000000,2878.000000,599.000000,1362.000000,541.000000,2.795500,96500.000000 +-121.360000,38.660000,14.000000,756.000000,141.000000,424.000000,155.000000,3.695300,116100.000000 +-121.360000,38.640000,24.000000,6540.000000,1008.000000,2667.000000,1031.000000,5.563200,175200.000000 +-121.360000,38.630000,30.000000,2619.000000,370.000000,940.000000,359.000000,4.728300,164500.000000 +-121.360000,38.630000,28.000000,6119.000000,985.000000,2631.000000,934.000000,4.875000,146400.000000 +-121.360000,38.620000,34.000000,2447.000000,503.000000,1077.000000,456.000000,3.058000,133000.000000 +-121.360000,38.610000,37.000000,2191.000000,394.000000,951.000000,362.000000,3.888200,159500.000000 +-121.360000,38.610000,35.000000,2355.000000,365.000000,993.000000,354.000000,5.049200,144100.000000 +-121.360000,38.600000,36.000000,1275.000000,227.000000,530.000000,245.000000,3.875000,133600.000000 +-121.360000,38.600000,35.000000,1930.000000,328.000000,805.000000,338.000000,4.464300,133000.000000 +-121.360000,38.580000,25.000000,3196.000000,406.000000,978.000000,419.000000,8.469900,344000.000000 +-121.360000,38.570000,26.000000,1793.000000,244.000000,653.000000,235.000000,5.648500,129500.000000 +-121.360000,38.560000,20.000000,1232.000000,332.000000,667.000000,288.000000,1.828800,32500.000000 +-121.360000,38.550000,33.000000,1191.000000,198.000000,554.000000,191.000000,2.802100,118800.000000 +-121.360000,38.420000,6.000000,3254.000000,465.000000,1168.000000,345.000000,5.181100,188400.000000 +-121.360000,38.400000,18.000000,4813.000000,849.000000,2333.000000,843.000000,4.175000,144400.000000 +-121.360000,38.390000,10.000000,5121.000000,763.000000,2568.000000,758.000000,5.244700,148100.000000 +-121.360000,38.150000,42.000000,2051.000000,334.000000,878.000000,318.000000,4.355300,185700.000000 +-121.360000,38.040000,9.000000,2167.000000,370.000000,1290.000000,351.000000,5.028500,148200.000000 +-121.360000,38.040000,4.000000,2477.000000,359.000000,1234.000000,377.000000,5.542700,162100.000000 +-121.360000,38.030000,7.000000,3461.000000,859.000000,1518.000000,741.000000,3.568400,78700.000000 +-121.360000,38.030000,14.000000,2356.000000,438.000000,1378.000000,481.000000,3.737500,138800.000000 +-121.360000,38.020000,5.000000,2229.000000,543.000000,1010.000000,474.000000,4.171900,206100.000000 +-121.360000,38.010000,16.000000,926.000000,230.000000,451.000000,198.000000,4.022100,173300.000000 +-121.360000,38.010000,16.000000,2178.000000,667.000000,1192.000000,579.000000,2.333900,87100.000000 +-121.360000,38.010000,16.000000,1080.000000,166.000000,507.000000,182.000000,4.527800,166900.000000 +-121.360000,38.000000,17.000000,4535.000000,762.000000,1562.000000,743.000000,5.322400,225800.000000 +-121.360000,37.990000,8.000000,1801.000000,380.000000,684.000000,350.000000,4.258900,134900.000000 +-121.360000,36.810000,7.000000,4609.000000,741.000000,1660.000000,720.000000,5.087100,290500.000000 +-121.370000,39.030000,32.000000,1158.000000,244.000000,598.000000,227.000000,2.823500,65500.000000 +-121.370000,38.700000,26.000000,2230.000000,410.000000,1155.000000,377.000000,3.491100,88200.000000 +-121.370000,38.700000,18.000000,3938.000000,649.000000,1861.000000,606.000000,3.648400,95000.000000 +-121.370000,38.690000,35.000000,1851.000000,327.000000,1007.000000,286.000000,3.236100,84000.000000 +-121.370000,38.690000,35.000000,1093.000000,192.000000,590.000000,190.000000,2.700900,80200.000000 +-121.370000,38.690000,29.000000,2103.000000,380.000000,1124.000000,387.000000,3.083300,87000.000000 +-121.370000,38.680000,35.000000,1620.000000,276.000000,939.000000,277.000000,2.554200,72900.000000 +-121.370000,38.680000,34.000000,1086.000000,187.000000,663.000000,190.000000,3.307400,84200.000000 +-121.370000,38.680000,29.000000,3757.000000,646.000000,2022.000000,611.000000,3.542900,88200.000000 +-121.370000,38.670000,36.000000,1354.000000,258.000000,771.000000,267.000000,2.272300,78800.000000 +-121.370000,38.660000,9.000000,3184.000000,779.000000,1929.000000,769.000000,2.384400,86000.000000 +-121.370000,38.660000,17.000000,4866.000000,1056.000000,2371.000000,1030.000000,2.457400,103300.000000 +-121.370000,38.640000,36.000000,322.000000,48.000000,133.000000,59.000000,4.611100,139300.000000 +-121.370000,38.640000,27.000000,1672.000000,299.000000,757.000000,282.000000,3.678600,159700.000000 +-121.370000,38.630000,37.000000,494.000000,86.000000,253.000000,99.000000,4.819400,141100.000000 +-121.370000,38.630000,32.000000,3658.000000,797.000000,1452.000000,715.000000,2.662300,120700.000000 +-121.370000,38.630000,30.000000,5996.000000,1018.000000,2532.000000,1049.000000,4.612700,151800.000000 +-121.370000,38.620000,43.000000,1077.000000,199.000000,447.000000,182.000000,3.013900,115600.000000 +-121.370000,38.610000,42.000000,945.000000,193.000000,460.000000,193.000000,3.756900,127100.000000 +-121.370000,38.610000,39.000000,823.000000,146.000000,329.000000,144.000000,3.083300,114100.000000 +-121.370000,38.600000,35.000000,3137.000000,544.000000,1312.000000,549.000000,3.788000,136800.000000 +-121.370000,38.590000,36.000000,2523.000000,401.000000,927.000000,398.000000,3.517900,207800.000000 +-121.370000,38.590000,36.000000,2388.000000,369.000000,838.000000,356.000000,4.775000,194100.000000 +-121.370000,38.580000,37.000000,2839.000000,390.000000,1006.000000,400.000000,7.334300,280400.000000 +-121.370000,38.570000,16.000000,3895.000000,896.000000,1762.000000,855.000000,2.663500,135800.000000 +-121.370000,38.560000,27.000000,1827.000000,509.000000,852.000000,450.000000,2.090100,52500.000000 +-121.370000,38.560000,18.000000,2129.000000,363.000000,815.000000,347.000000,2.767900,118000.000000 +-121.370000,38.550000,21.000000,2713.000000,432.000000,1287.000000,440.000000,4.581500,125500.000000 +-121.370000,38.420000,18.000000,2643.000000,502.000000,1755.000000,541.000000,3.328100,91200.000000 +-121.370000,38.390000,15.000000,1883.000000,254.000000,893.000000,256.000000,6.257500,143500.000000 +-121.370000,38.010000,15.000000,2430.000000,315.000000,1016.000000,314.000000,10.008800,242000.000000 +-121.370000,37.770000,19.000000,2610.000000,474.000000,1290.000000,452.000000,4.129800,222800.000000 +-121.370000,37.060000,25.000000,474.000000,92.000000,300.000000,104.000000,3.806200,340900.000000 +-121.370000,36.890000,21.000000,2471.000000,473.000000,1753.000000,451.000000,4.025000,293800.000000 +-121.370000,36.840000,11.000000,1996.000000,382.000000,1023.000000,358.000000,3.571400,243000.000000 +-121.380000,38.710000,7.000000,4842.000000,935.000000,2857.000000,907.000000,3.931800,133000.000000 +-121.380000,38.700000,25.000000,3919.000000,764.000000,2203.000000,783.000000,2.240200,89500.000000 +-121.380000,38.690000,35.000000,2943.000000,554.000000,1460.000000,510.000000,2.671300,84400.000000 +-121.380000,38.680000,40.000000,67.000000,17.000000,50.000000,32.000000,1.759600,93800.000000 +-121.380000,38.680000,35.000000,1643.000000,298.000000,831.000000,305.000000,4.067300,84200.000000 +-121.380000,38.680000,35.000000,1565.000000,290.000000,861.000000,277.000000,2.484400,77000.000000 +-121.380000,38.670000,38.000000,1001.000000,228.000000,597.000000,226.000000,2.278800,73400.000000 +-121.380000,38.670000,37.000000,2176.000000,460.000000,1067.000000,357.000000,2.395800,78400.000000 +-121.380000,38.660000,17.000000,3778.000000,939.000000,2393.000000,862.000000,1.897200,100500.000000 +-121.380000,38.650000,34.000000,825.000000,173.000000,355.000000,130.000000,3.185800,109500.000000 +-121.380000,38.620000,41.000000,774.000000,144.000000,356.000000,150.000000,3.562500,115300.000000 +-121.380000,38.620000,34.000000,2352.000000,610.000000,1127.000000,592.000000,2.200000,116500.000000 +-121.380000,38.610000,34.000000,2888.000000,496.000000,1168.000000,479.000000,3.605300,148600.000000 +-121.380000,38.610000,27.000000,2375.000000,537.000000,863.000000,452.000000,3.008600,126900.000000 +-121.380000,38.600000,36.000000,1249.000000,159.000000,362.000000,143.000000,6.846900,446400.000000 +-121.380000,38.590000,38.000000,1839.000000,287.000000,685.000000,276.000000,4.531300,189400.000000 +-121.380000,38.590000,36.000000,2253.000000,434.000000,1018.000000,426.000000,3.259600,98700.000000 +-121.380000,38.580000,38.000000,2968.000000,475.000000,1176.000000,454.000000,5.049700,191700.000000 +-121.380000,38.550000,26.000000,1532.000000,264.000000,781.000000,285.000000,4.694400,130900.000000 +-121.380000,38.550000,23.000000,2790.000000,430.000000,1407.000000,460.000000,4.328800,133700.000000 +-121.380000,38.490000,11.000000,8537.000000,1643.000000,4224.000000,1648.000000,2.964700,108900.000000 +-121.380000,38.470000,4.000000,14418.000000,2282.000000,6578.000000,2140.000000,4.560400,145900.000000 +-121.380000,38.410000,7.000000,6091.000000,921.000000,2916.000000,886.000000,4.755700,150400.000000 +-121.380000,38.410000,10.000000,3425.000000,629.000000,1538.000000,587.000000,4.450000,138700.000000 +-121.380000,36.850000,13.000000,4115.000000,782.000000,2903.000000,747.000000,3.731600,192400.000000 +-121.380000,36.840000,6.000000,3769.000000,669.000000,2061.000000,648.000000,4.187500,217600.000000 +-121.380000,36.840000,17.000000,2625.000000,512.000000,1487.000000,481.000000,3.635400,221200.000000 +-121.390000,39.610000,22.000000,2828.000000,610.000000,986.000000,391.000000,2.887100,94700.000000 +-121.390000,39.390000,52.000000,189.000000,34.000000,121.000000,37.000000,3.020800,60000.000000 +-121.390000,39.120000,28.000000,10035.000000,1856.000000,6912.000000,1818.000000,2.094300,108300.000000 +-121.390000,38.850000,19.000000,3568.000000,646.000000,1714.000000,590.000000,4.086200,162700.000000 +-121.390000,38.690000,38.000000,300.000000,47.000000,154.000000,51.000000,4.090900,108300.000000 +-121.390000,38.690000,30.000000,2897.000000,506.000000,1508.000000,478.000000,3.865000,88400.000000 +-121.390000,38.670000,35.000000,562.000000,174.000000,240.000000,106.000000,0.933800,112500.000000 +-121.390000,38.640000,33.000000,1503.000000,282.000000,652.000000,229.000000,3.693700,99300.000000 +-121.390000,38.630000,34.000000,1226.000000,180.000000,359.000000,167.000000,3.806800,150400.000000 +-121.390000,38.630000,30.000000,2930.000000,739.000000,1661.000000,668.000000,2.781300,118900.000000 +-121.390000,38.620000,45.000000,2696.000000,624.000000,1059.000000,582.000000,1.817600,160900.000000 +-121.390000,38.620000,27.000000,5693.000000,1487.000000,2334.000000,1387.000000,2.284400,170500.000000 +-121.390000,38.610000,36.000000,2396.000000,536.000000,1225.000000,515.000000,2.955900,136600.000000 +-121.390000,38.590000,34.000000,1151.000000,234.000000,563.000000,251.000000,2.800000,113600.000000 +-121.390000,38.590000,33.000000,2091.000000,468.000000,1053.000000,470.000000,2.226400,108100.000000 +-121.390000,38.580000,41.000000,2577.000000,365.000000,913.000000,339.000000,6.340600,448300.000000 +-121.390000,38.580000,36.000000,2019.000000,369.000000,878.000000,356.000000,2.846200,93400.000000 +-121.390000,38.570000,33.000000,2648.000000,357.000000,863.000000,359.000000,8.401600,338700.000000 +-121.390000,38.560000,19.000000,8507.000000,1470.000000,3517.000000,1453.000000,4.364400,137400.000000 +-121.390000,38.550000,25.000000,2171.000000,431.000000,1053.000000,422.000000,3.527800,126200.000000 +-121.390000,38.550000,18.000000,1734.000000,467.000000,783.000000,447.000000,1.904400,154300.000000 +-121.390000,38.510000,19.000000,1808.000000,375.000000,758.000000,320.000000,2.006200,92000.000000 +-121.390000,38.430000,3.000000,2696.000000,384.000000,990.000000,316.000000,5.444500,237600.000000 +-121.390000,36.160000,28.000000,1057.000000,249.000000,288.000000,130.000000,3.052600,146900.000000 +-121.400000,39.330000,15.000000,2655.000000,493.000000,1200.000000,432.000000,3.517900,107200.000000 +-121.400000,38.710000,15.000000,4680.000000,758.000000,2626.000000,729.000000,3.835500,107000.000000 +-121.400000,38.630000,31.000000,1540.000000,452.000000,1079.000000,444.000000,1.857100,98700.000000 +-121.400000,38.630000,30.000000,3626.000000,834.000000,1577.000000,806.000000,2.517000,130400.000000 +-121.400000,38.620000,28.000000,3671.000000,886.000000,1733.000000,820.000000,2.229200,113200.000000 +-121.400000,38.610000,33.000000,3512.000000,825.000000,1515.000000,782.000000,1.990800,118800.000000 +-121.400000,38.590000,18.000000,2614.000000,624.000000,1181.000000,616.000000,2.043200,156800.000000 +-121.400000,38.570000,25.000000,2022.000000,295.000000,639.000000,278.000000,5.841600,297600.000000 +-121.400000,38.560000,22.000000,2623.000000,357.000000,838.000000,368.000000,7.143000,327800.000000 +-121.400000,38.550000,26.000000,2697.000000,398.000000,1088.000000,389.000000,5.000000,142500.000000 +-121.400000,38.550000,19.000000,2497.000000,494.000000,748.000000,442.000000,2.925000,142400.000000 +-121.400000,38.530000,38.000000,152.000000,30.000000,65.000000,35.000000,0.927400,67500.000000 +-121.400000,38.490000,12.000000,7290.000000,1283.000000,3960.000000,1248.000000,3.596800,106300.000000 +-121.400000,38.470000,4.000000,20982.000000,3392.000000,10329.000000,3086.000000,4.365800,130600.000000 +-121.400000,37.740000,20.000000,2706.000000,477.000000,1236.000000,474.000000,4.150000,322400.000000 +-121.400000,36.860000,36.000000,1256.000000,270.000000,910.000000,255.000000,1.940500,145400.000000 +-121.400000,36.850000,50.000000,2666.000000,613.000000,1768.000000,555.000000,2.659800,157300.000000 +-121.400000,36.840000,52.000000,1860.000000,400.000000,1215.000000,367.000000,2.955400,136500.000000 +-121.400000,36.840000,40.000000,2352.000000,536.000000,1430.000000,535.000000,3.091200,155300.000000 +-121.400000,36.830000,11.000000,3701.000000,739.000000,1749.000000,654.000000,3.067000,207900.000000 +-121.410000,40.820000,16.000000,2668.000000,516.000000,915.000000,362.000000,2.339300,90300.000000 +-121.410000,39.720000,17.000000,1583.000000,331.000000,730.000000,306.000000,2.389500,87500.000000 +-121.410000,39.040000,16.000000,1698.000000,300.000000,731.000000,291.000000,3.073900,87200.000000 +-121.410000,38.690000,28.000000,1601.000000,308.000000,848.000000,305.000000,3.642900,105200.000000 +-121.410000,38.640000,41.000000,1578.000000,317.000000,897.000000,333.000000,2.321400,66800.000000 +-121.410000,38.640000,38.000000,1384.000000,287.000000,682.000000,280.000000,1.916700,64400.000000 +-121.410000,38.620000,21.000000,3260.000000,763.000000,1735.000000,736.000000,2.516200,97500.000000 +-121.410000,38.610000,36.000000,3099.000000,605.000000,1322.000000,623.000000,3.478400,105500.000000 +-121.410000,38.590000,18.000000,5527.000000,1446.000000,2883.000000,1305.000000,2.648500,114500.000000 +-121.410000,38.590000,17.000000,12355.000000,3630.000000,5692.000000,3073.000000,2.524500,99100.000000 +-121.410000,38.580000,18.000000,6955.000000,1882.000000,2803.000000,1740.000000,3.089000,141400.000000 +-121.410000,38.570000,16.000000,4429.000000,1124.000000,1538.000000,960.000000,3.244300,190700.000000 +-121.410000,38.560000,17.000000,7228.000000,1369.000000,2455.000000,1365.000000,5.138500,179500.000000 +-121.410000,38.550000,14.000000,2534.000000,705.000000,1495.000000,583.000000,1.916700,156300.000000 +-121.410000,38.530000,35.000000,2061.000000,371.000000,1110.000000,342.000000,3.194400,79000.000000 +-121.410000,38.520000,25.000000,3087.000000,720.000000,2529.000000,708.000000,1.868900,66800.000000 +-121.410000,38.340000,24.000000,1605.000000,277.000000,1966.000000,250.000000,3.083300,162500.000000 +-121.410000,36.850000,11.000000,1708.000000,394.000000,1474.000000,372.000000,2.883900,145900.000000 +-121.410000,36.840000,23.000000,1771.000000,356.000000,1105.000000,338.000000,3.704900,192200.000000 +-121.420000,38.700000,10.000000,2562.000000,460.000000,1478.000000,433.000000,4.062500,96200.000000 +-121.420000,38.680000,32.000000,2118.000000,345.000000,1019.000000,338.000000,3.725000,112200.000000 +-121.420000,38.650000,21.000000,2274.000000,495.000000,1157.000000,445.000000,2.098000,49800.000000 +-121.420000,38.640000,44.000000,1728.000000,367.000000,1042.000000,349.000000,1.603300,58500.000000 +-121.420000,38.640000,42.000000,1720.000000,382.000000,1069.000000,362.000000,1.861100,60500.000000 +-121.420000,38.630000,42.000000,2217.000000,536.000000,1203.000000,507.000000,1.941200,73100.000000 +-121.420000,38.630000,42.000000,1385.000000,273.000000,740.000000,274.000000,2.605500,78000.000000 +-121.420000,38.620000,41.000000,1087.000000,272.000000,462.000000,219.000000,2.022400,64900.000000 +-121.420000,38.620000,33.000000,3171.000000,832.000000,1591.000000,695.000000,2.078600,88600.000000 +-121.420000,38.610000,34.000000,1126.000000,256.000000,589.000000,243.000000,2.177600,84400.000000 +-121.420000,38.600000,36.000000,1327.000000,209.000000,613.000000,230.000000,3.867200,111400.000000 +-121.420000,38.600000,35.000000,1166.000000,193.000000,574.000000,190.000000,2.245200,102800.000000 +-121.420000,38.600000,23.000000,3713.000000,1078.000000,2194.000000,1018.000000,1.745100,89600.000000 +-121.420000,38.570000,38.000000,1878.000000,338.000000,710.000000,342.000000,3.773100,161400.000000 +-121.420000,38.560000,21.000000,2066.000000,748.000000,2548.000000,734.000000,1.357100,55000.000000 +-121.420000,38.550000,35.000000,182.000000,39.000000,115.000000,43.000000,2.641700,98900.000000 +-121.420000,38.540000,29.000000,1407.000000,265.000000,556.000000,235.000000,3.052100,108000.000000 +-121.420000,38.540000,18.000000,2525.000000,501.000000,1726.000000,468.000000,2.398000,87600.000000 +-121.420000,38.530000,37.000000,1958.000000,367.000000,1171.000000,366.000000,2.829800,71200.000000 +-121.420000,38.530000,36.000000,1581.000000,288.000000,832.000000,291.000000,3.408300,71800.000000 +-121.420000,38.520000,32.000000,2828.000000,556.000000,1655.000000,485.000000,2.557400,72600.000000 +-121.420000,38.510000,21.000000,3249.000000,666.000000,2611.000000,663.000000,1.942300,87800.000000 +-121.420000,38.480000,13.000000,7880.000000,1992.000000,4749.000000,1882.000000,1.965700,116000.000000 +-121.420000,38.470000,11.000000,5665.000000,1507.000000,3422.000000,1299.000000,2.334300,97800.000000 +-121.420000,38.220000,35.000000,1507.000000,313.000000,868.000000,283.000000,2.028400,96300.000000 +-121.420000,37.760000,18.000000,5501.000000,1051.000000,2964.000000,1009.000000,4.185500,162100.000000 +-121.420000,37.750000,33.000000,1999.000000,368.000000,1061.000000,390.000000,3.524200,121400.000000 +-121.420000,37.750000,33.000000,1329.000000,266.000000,683.000000,233.000000,4.368700,128700.000000 +-121.420000,37.740000,45.000000,818.000000,144.000000,340.000000,138.000000,4.802100,133500.000000 +-121.420000,37.740000,38.000000,773.000000,147.000000,320.000000,134.000000,2.825000,152500.000000 +-121.420000,37.730000,2.000000,2682.000000,393.000000,883.000000,271.000000,5.993400,196700.000000 +-121.420000,37.710000,7.000000,8297.000000,1433.000000,4189.000000,1271.000000,4.369600,170700.000000 +-121.420000,36.850000,7.000000,1626.000000,325.000000,677.000000,304.000000,2.312500,170800.000000 +-121.430000,39.180000,36.000000,1124.000000,184.000000,504.000000,171.000000,2.166700,93800.000000 +-121.430000,38.690000,28.000000,927.000000,165.000000,542.000000,148.000000,2.500000,96200.000000 +-121.430000,38.660000,35.000000,1814.000000,367.000000,1076.000000,372.000000,2.717900,81100.000000 +-121.430000,38.650000,18.000000,909.000000,198.000000,661.000000,176.000000,3.169600,77400.000000 +-121.430000,38.620000,36.000000,1765.000000,438.000000,1008.000000,382.000000,2.063900,73000.000000 +-121.430000,38.610000,40.000000,1134.000000,252.000000,675.000000,249.000000,1.369600,65200.000000 +-121.430000,38.610000,33.000000,2289.000000,576.000000,1100.000000,503.000000,2.169400,95700.000000 +-121.430000,38.570000,46.000000,2443.000000,476.000000,939.000000,457.000000,3.589300,142000.000000 +-121.430000,38.570000,38.000000,2507.000000,446.000000,888.000000,448.000000,4.097200,163700.000000 +-121.430000,38.560000,46.000000,1316.000000,244.000000,452.000000,245.000000,3.093800,137800.000000 +-121.430000,38.560000,41.000000,1105.000000,227.000000,443.000000,210.000000,3.182700,131700.000000 +-121.430000,38.550000,44.000000,3514.000000,714.000000,1509.000000,656.000000,2.733300,100100.000000 +-121.430000,38.540000,44.000000,1879.000000,359.000000,791.000000,345.000000,3.150000,101500.000000 +-121.430000,38.540000,42.000000,3321.000000,688.000000,1346.000000,658.000000,2.461800,101300.000000 +-121.430000,38.530000,36.000000,2430.000000,426.000000,1199.000000,437.000000,3.166700,81900.000000 +-121.430000,38.530000,36.000000,1488.000000,294.000000,846.000000,279.000000,3.120800,82700.000000 +-121.430000,38.520000,43.000000,2089.000000,399.000000,955.000000,385.000000,2.589800,72100.000000 +-121.430000,38.520000,30.000000,3657.000000,945.000000,2925.000000,899.000000,1.392700,78300.000000 +-121.430000,38.480000,12.000000,4602.000000,930.000000,2299.000000,860.000000,3.062500,90500.000000 +-121.430000,38.470000,21.000000,1787.000000,291.000000,988.000000,301.000000,4.350000,96200.000000 +-121.430000,38.460000,18.000000,1378.000000,235.000000,818.000000,262.000000,4.062500,100300.000000 +-121.430000,37.780000,24.000000,807.000000,174.000000,585.000000,166.000000,2.618100,163500.000000 +-121.430000,37.760000,7.000000,2125.000000,508.000000,1358.000000,464.000000,3.631200,147600.000000 +-121.430000,37.750000,42.000000,1207.000000,278.000000,699.000000,279.000000,3.361100,117600.000000 +-121.430000,37.750000,41.000000,1717.000000,325.000000,855.000000,303.000000,2.750000,127300.000000 +-121.430000,37.750000,34.000000,1280.000000,268.000000,754.000000,294.000000,3.133300,132000.000000 +-121.430000,37.750000,30.000000,1912.000000,451.000000,1065.000000,388.000000,2.142400,125000.000000 +-121.430000,37.740000,52.000000,994.000000,258.000000,623.000000,264.000000,1.725000,111500.000000 +-121.430000,37.740000,52.000000,966.000000,247.000000,589.000000,228.000000,1.693700,108300.000000 +-121.430000,37.740000,52.000000,876.000000,170.000000,426.000000,179.000000,3.086500,119800.000000 +-121.430000,37.740000,40.000000,859.000000,196.000000,427.000000,176.000000,3.578900,110400.000000 +-121.430000,37.730000,40.000000,1718.000000,391.000000,1312.000000,388.000000,2.995500,134700.000000 +-121.430000,36.500000,14.000000,1835.000000,468.000000,1867.000000,461.000000,2.387900,129800.000000 +-121.440000,39.500000,26.000000,1652.000000,325.000000,790.000000,292.000000,3.044600,90800.000000 +-121.440000,39.000000,20.000000,755.000000,147.000000,457.000000,157.000000,2.416700,67000.000000 +-121.440000,38.710000,25.000000,2336.000000,406.000000,1172.000000,408.000000,3.512900,101200.000000 +-121.440000,38.690000,24.000000,3124.000000,556.000000,1512.000000,555.000000,3.194200,94900.000000 +-121.440000,38.650000,28.000000,1219.000000,240.000000,559.000000,212.000000,3.829500,122200.000000 +-121.440000,38.640000,25.000000,1678.000000,367.000000,971.000000,307.000000,1.039800,62100.000000 +-121.440000,38.640000,18.000000,1756.000000,442.000000,837.000000,320.000000,1.125000,70500.000000 +-121.440000,38.630000,38.000000,1673.000000,399.000000,1116.000000,382.000000,1.330200,62200.000000 +-121.440000,38.630000,38.000000,1402.000000,370.000000,970.000000,382.000000,1.634300,71000.000000 +-121.440000,38.630000,33.000000,1077.000000,271.000000,753.000000,236.000000,1.346200,55900.000000 +-121.440000,38.620000,37.000000,3009.000000,733.000000,1513.000000,588.000000,1.438700,61000.000000 +-121.440000,38.620000,37.000000,1607.000000,385.000000,972.000000,354.000000,1.910700,64700.000000 +-121.440000,38.610000,41.000000,1404.000000,313.000000,765.000000,330.000000,1.879200,63300.000000 +-121.440000,38.610000,33.000000,1591.000000,466.000000,1000.000000,418.000000,1.046700,70100.000000 +-121.440000,38.600000,16.000000,2987.000000,864.000000,1240.000000,755.000000,2.823100,137500.000000 +-121.440000,38.580000,43.000000,1806.000000,339.000000,764.000000,341.000000,3.927100,147100.000000 +-121.440000,38.580000,42.000000,2334.000000,435.000000,892.000000,446.000000,3.020800,148800.000000 +-121.440000,38.570000,52.000000,3080.000000,545.000000,975.000000,495.000000,3.776000,164500.000000 +-121.440000,38.560000,52.000000,906.000000,165.000000,257.000000,166.000000,2.854200,139400.000000 +-121.440000,38.560000,45.000000,2423.000000,466.000000,873.000000,438.000000,3.716700,131900.000000 +-121.440000,38.550000,46.000000,1698.000000,383.000000,726.000000,386.000000,2.982100,97000.000000 +-121.440000,38.540000,47.000000,2518.000000,501.000000,1308.000000,471.000000,2.538900,75700.000000 +-121.440000,38.540000,44.000000,2570.000000,509.000000,1145.000000,503.000000,2.569400,92400.000000 +-121.440000,38.530000,37.000000,1951.000000,432.000000,1089.000000,411.000000,2.327200,80600.000000 +-121.440000,38.520000,38.000000,2080.000000,388.000000,995.000000,380.000000,2.769700,76600.000000 +-121.440000,38.510000,27.000000,7212.000000,1606.000000,4828.000000,1549.000000,2.214000,82400.000000 +-121.440000,38.500000,27.000000,2527.000000,439.000000,1089.000000,415.000000,4.088000,96800.000000 +-121.440000,38.500000,20.000000,2033.000000,586.000000,1281.000000,521.000000,1.400700,97500.000000 +-121.440000,38.490000,31.000000,4297.000000,788.000000,2083.000000,771.000000,3.387800,109300.000000 +-121.440000,38.470000,16.000000,1215.000000,223.000000,787.000000,233.000000,4.159700,95900.000000 +-121.440000,38.460000,10.000000,4446.000000,897.000000,2499.000000,884.000000,3.546100,103600.000000 +-121.440000,37.750000,29.000000,918.000000,159.000000,417.000000,166.000000,4.276800,151300.000000 +-121.440000,37.740000,33.000000,1875.000000,363.000000,970.000000,381.000000,3.509600,141700.000000 +-121.440000,37.740000,25.000000,456.000000,116.000000,370.000000,106.000000,3.131900,112500.000000 +-121.440000,37.730000,7.000000,8363.000000,1314.000000,3907.000000,1068.000000,5.332100,208100.000000 +-121.440000,37.700000,5.000000,1365.000000,196.000000,591.000000,156.000000,6.038900,215100.000000 +-121.440000,36.840000,7.000000,1644.000000,338.000000,1143.000000,331.000000,4.005000,180400.000000 +-121.440000,36.510000,31.000000,1636.000000,380.000000,1468.000000,339.000000,3.221900,114700.000000 +-121.450000,41.040000,33.000000,2029.000000,378.000000,936.000000,343.000000,2.670000,77500.000000 +-121.450000,39.260000,15.000000,2319.000000,416.000000,1047.000000,385.000000,3.125000,115600.000000 +-121.450000,38.700000,24.000000,2159.000000,369.000000,1141.000000,355.000000,3.985300,90400.000000 +-121.450000,38.690000,32.000000,2962.000000,526.000000,1542.000000,521.000000,2.224300,89200.000000 +-121.450000,38.650000,5.000000,2680.000000,502.000000,1885.000000,498.000000,2.636900,110000.000000 +-121.450000,38.640000,23.000000,1481.000000,343.000000,1079.000000,315.000000,1.867000,60600.000000 +-121.450000,38.630000,28.000000,1246.000000,295.000000,884.000000,258.000000,1.439700,51700.000000 +-121.450000,38.620000,38.000000,2419.000000,605.000000,1696.000000,503.000000,1.486100,63100.000000 +-121.450000,38.620000,37.000000,1534.000000,315.000000,1147.000000,322.000000,2.564300,59800.000000 +-121.450000,38.610000,46.000000,1758.000000,511.000000,1094.000000,484.000000,1.068500,70000.000000 +-121.450000,38.610000,34.000000,438.000000,116.000000,263.000000,100.000000,0.937900,67500.000000 +-121.450000,38.570000,52.000000,3994.000000,635.000000,1295.000000,625.000000,5.116900,232500.000000 +-121.450000,38.570000,52.000000,2006.000000,412.000000,825.000000,384.000000,3.296300,236100.000000 +-121.450000,38.570000,48.000000,1962.000000,356.000000,704.000000,362.000000,3.531300,147900.000000 +-121.450000,38.560000,52.000000,3420.000000,555.000000,1301.000000,530.000000,4.041700,173800.000000 +-121.450000,38.560000,52.000000,3170.000000,476.000000,1027.000000,457.000000,4.630000,233800.000000 +-121.450000,38.550000,19.000000,3374.000000,808.000000,1412.000000,753.000000,1.488900,77600.000000 +-121.450000,38.540000,48.000000,3421.000000,734.000000,1441.000000,727.000000,1.948500,86600.000000 +-121.450000,38.540000,47.000000,1159.000000,250.000000,810.000000,244.000000,2.778700,56000.000000 +-121.450000,38.540000,41.000000,1278.000000,308.000000,839.000000,280.000000,1.470200,58300.000000 +-121.450000,38.530000,38.000000,1746.000000,388.000000,1142.000000,315.000000,1.771400,69900.000000 +-121.450000,38.530000,34.000000,1893.000000,415.000000,884.000000,395.000000,2.167900,75400.000000 +-121.450000,38.530000,34.000000,1717.000000,354.000000,848.000000,306.000000,2.474100,87000.000000 +-121.450000,38.520000,37.000000,1705.000000,325.000000,827.000000,326.000000,2.628800,71200.000000 +-121.450000,38.520000,37.000000,1477.000000,321.000000,888.000000,312.000000,2.559200,70300.000000 +-121.450000,38.500000,25.000000,3033.000000,665.000000,1559.000000,627.000000,2.710100,99500.000000 +-121.450000,38.490000,34.000000,3573.000000,662.000000,1540.000000,620.000000,3.532300,109800.000000 +-121.450000,38.480000,28.000000,2780.000000,510.000000,1638.000000,533.000000,2.957100,103100.000000 +-121.450000,38.480000,24.000000,1766.000000,340.000000,1028.000000,372.000000,3.540200,98700.000000 +-121.450000,38.370000,32.000000,1441.000000,261.000000,629.000000,249.000000,4.451900,137500.000000 +-121.450000,37.750000,15.000000,3846.000000,677.000000,2360.000000,635.000000,4.617300,164800.000000 +-121.450000,37.720000,2.000000,2239.000000,321.000000,766.000000,219.000000,5.750000,240200.000000 +-121.450000,36.860000,11.000000,1613.000000,335.000000,1617.000000,342.000000,3.137500,146200.000000 +-121.450000,36.510000,29.000000,1045.000000,311.000000,1245.000000,273.000000,1.775000,112500.000000 +-121.460000,39.400000,17.000000,3659.000000,735.000000,1970.000000,667.000000,2.425000,96200.000000 +-121.460000,38.690000,11.000000,3335.000000,658.000000,1963.000000,622.000000,3.312500,96800.000000 +-121.460000,38.680000,35.000000,1299.000000,254.000000,705.000000,245.000000,2.833300,103000.000000 +-121.460000,38.660000,3.000000,3438.000000,603.000000,1602.000000,554.000000,3.991400,120500.000000 +-121.460000,38.650000,8.000000,3746.000000,767.000000,2161.000000,744.000000,3.203900,103400.000000 +-121.460000,38.650000,14.000000,3167.000000,551.000000,1787.000000,533.000000,3.812500,92600.000000 +-121.460000,38.640000,20.000000,1517.000000,323.000000,1287.000000,328.000000,1.660700,67000.000000 +-121.460000,38.630000,26.000000,3185.000000,658.000000,2444.000000,626.000000,1.560000,67600.000000 +-121.460000,38.620000,35.000000,3326.000000,696.000000,2511.000000,649.000000,1.987100,60900.000000 +-121.460000,38.610000,43.000000,705.000000,178.000000,464.000000,159.000000,2.420500,60900.000000 +-121.460000,38.600000,29.000000,1978.000000,538.000000,823.000000,490.000000,1.968800,135600.000000 +-121.460000,38.580000,52.000000,4408.000000,807.000000,1604.000000,777.000000,3.891400,181600.000000 +-121.460000,38.580000,40.000000,1394.000000,397.000000,689.000000,353.000000,1.776500,109800.000000 +-121.460000,38.570000,52.000000,893.000000,159.000000,367.000000,160.000000,3.238600,213200.000000 +-121.460000,38.570000,52.000000,810.000000,172.000000,326.000000,151.000000,3.158300,140000.000000 +-121.460000,38.570000,52.000000,1917.000000,367.000000,722.000000,358.000000,3.148400,158900.000000 +-121.460000,38.560000,52.000000,1878.000000,393.000000,722.000000,381.000000,3.334800,122800.000000 +-121.460000,38.550000,52.000000,3126.000000,648.000000,1789.000000,558.000000,1.761600,84100.000000 +-121.460000,38.550000,52.000000,2094.000000,463.000000,1364.000000,407.000000,1.223500,68500.000000 +-121.460000,38.550000,40.000000,2077.000000,435.000000,1454.000000,385.000000,2.007400,57000.000000 +-121.460000,38.540000,39.000000,1453.000000,324.000000,843.000000,281.000000,1.769200,63900.000000 +-121.460000,38.540000,36.000000,1825.000000,411.000000,1226.000000,391.000000,1.529200,55700.000000 +-121.460000,38.530000,37.000000,2745.000000,588.000000,1607.000000,556.000000,1.800700,65400.000000 +-121.460000,38.520000,34.000000,1279.000000,285.000000,963.000000,268.000000,2.710000,65600.000000 +-121.460000,38.520000,29.000000,3873.000000,797.000000,2237.000000,706.000000,2.173600,72100.000000 +-121.460000,38.510000,32.000000,2437.000000,592.000000,1596.000000,557.000000,1.680000,84100.000000 +-121.460000,38.510000,18.000000,2123.000000,606.000000,1576.000000,599.000000,1.573500,110000.000000 +-121.460000,38.490000,15.000000,10211.000000,1995.000000,5656.000000,1752.000000,2.575000,107900.000000 +-121.460000,38.480000,8.000000,3593.000000,659.000000,1710.000000,530.000000,3.522700,93100.000000 +-121.460000,37.730000,20.000000,2039.000000,373.000000,862.000000,330.000000,5.162900,222900.000000 +-121.470000,41.120000,22.000000,2737.000000,512.000000,1168.000000,442.000000,2.830000,88700.000000 +-121.470000,39.510000,19.000000,3720.000000,636.000000,1304.000000,607.000000,2.692100,97500.000000 +-121.470000,39.010000,37.000000,1244.000000,247.000000,484.000000,157.000000,2.366100,77500.000000 +-121.470000,38.950000,34.000000,2129.000000,350.000000,969.000000,314.000000,2.703900,106300.000000 +-121.470000,38.720000,26.000000,1708.000000,299.000000,911.000000,290.000000,4.022700,99800.000000 +-121.470000,38.680000,19.000000,946.000000,182.000000,474.000000,173.000000,5.015500,97300.000000 +-121.470000,38.630000,29.000000,2197.000000,520.000000,1374.000000,483.000000,2.188900,69300.000000 +-121.470000,38.610000,31.000000,1072.000000,279.000000,781.000000,281.000000,1.656300,65800.000000 +-121.470000,38.580000,52.000000,2035.000000,483.000000,904.000000,459.000000,2.697600,109300.000000 +-121.470000,38.580000,44.000000,2092.000000,555.000000,878.000000,528.000000,1.592200,115100.000000 +-121.470000,38.570000,52.000000,438.000000,103.000000,176.000000,99.000000,3.021700,200000.000000 +-121.470000,38.570000,50.000000,3233.000000,968.000000,1223.000000,837.000000,1.204100,168100.000000 +-121.470000,38.570000,39.000000,1360.000000,368.000000,589.000000,338.000000,2.169100,150000.000000 +-121.470000,38.560000,52.000000,889.000000,162.000000,273.000000,145.000000,3.125000,85600.000000 +-121.470000,38.560000,52.000000,1532.000000,408.000000,782.000000,369.000000,1.891100,85900.000000 +-121.470000,38.560000,44.000000,1986.000000,573.000000,1044.000000,490.000000,1.732800,88100.000000 +-121.470000,38.550000,52.000000,1384.000000,295.000000,561.000000,244.000000,2.024200,94600.000000 +-121.470000,38.550000,48.000000,968.000000,310.000000,706.000000,274.000000,0.994800,65400.000000 +-121.470000,38.550000,48.000000,1091.000000,403.000000,926.000000,336.000000,1.145800,65400.000000 +-121.470000,38.550000,29.000000,1303.000000,308.000000,861.000000,263.000000,1.020800,55800.000000 +-121.470000,38.550000,24.000000,979.000000,287.000000,546.000000,291.000000,1.186000,67000.000000 +-121.470000,38.540000,47.000000,2085.000000,464.000000,1346.000000,402.000000,1.267900,56700.000000 +-121.470000,38.540000,36.000000,2099.000000,510.000000,1845.000000,483.000000,1.413800,52500.000000 +-121.470000,38.530000,44.000000,543.000000,146.000000,506.000000,125.000000,1.364600,65400.000000 +-121.470000,38.530000,43.000000,3215.000000,725.000000,2400.000000,625.000000,1.462500,54400.000000 +-121.470000,38.520000,26.000000,2177.000000,638.000000,1971.000000,560.000000,1.257500,66800.000000 +-121.470000,38.510000,52.000000,20.000000,4.000000,74.000000,9.000000,3.625000,80000.000000 +-121.470000,38.500000,17.000000,1895.000000,424.000000,620.000000,417.000000,1.718800,137500.000000 +-121.470000,38.490000,17.000000,3595.000000,790.000000,2760.000000,770.000000,2.323300,78800.000000 +-121.470000,38.480000,24.000000,2359.000000,462.000000,2048.000000,476.000000,3.270200,67300.000000 +-121.470000,38.130000,13.000000,3192.000000,715.000000,1768.000000,626.000000,2.261900,123500.000000 +-121.470000,36.920000,27.000000,2049.000000,417.000000,1230.000000,336.000000,4.647700,265900.000000 +-121.480000,39.050000,40.000000,198.000000,41.000000,151.000000,48.000000,4.562500,100000.000000 +-121.480000,38.620000,23.000000,7709.000000,1279.000000,4147.000000,1262.000000,3.827200,96600.000000 +-121.480000,38.610000,18.000000,1511.000000,315.000000,1062.000000,304.000000,2.343800,89400.000000 +-121.480000,38.590000,52.000000,1186.000000,341.000000,1038.000000,320.000000,1.611600,70500.000000 +-121.480000,38.590000,43.000000,987.000000,240.000000,1253.000000,237.000000,0.920400,82100.000000 +-121.480000,38.580000,52.000000,576.000000,146.000000,273.000000,127.000000,2.010000,94300.000000 +-121.480000,38.580000,52.000000,2501.000000,757.000000,1081.000000,708.000000,1.587200,157500.000000 +-121.480000,38.580000,48.000000,2434.000000,744.000000,1281.000000,662.000000,1.627700,140600.000000 +-121.480000,38.580000,42.000000,1823.000000,566.000000,761.000000,503.000000,1.245000,137500.000000 +-121.480000,38.570000,52.000000,567.000000,193.000000,272.000000,187.000000,1.625000,187500.000000 +-121.480000,38.570000,47.000000,2438.000000,804.000000,1148.000000,747.000000,1.430100,141700.000000 +-121.480000,38.570000,38.000000,2809.000000,805.000000,1243.000000,785.000000,1.851200,114100.000000 +-121.480000,38.570000,38.000000,1145.000000,324.000000,596.000000,288.000000,1.780000,114300.000000 +-121.480000,38.560000,52.000000,814.000000,216.000000,327.000000,181.000000,2.854200,125000.000000 +-121.480000,38.560000,50.000000,1587.000000,448.000000,877.000000,380.000000,2.083300,94300.000000 +-121.480000,38.560000,46.000000,1476.000000,344.000000,688.000000,353.000000,2.731600,134700.000000 +-121.480000,38.560000,44.000000,1151.000000,263.000000,518.000000,258.000000,2.008900,113600.000000 +-121.480000,38.550000,52.000000,2508.000000,360.000000,832.000000,345.000000,7.103500,228700.000000 +-121.480000,38.550000,52.000000,2216.000000,333.000000,714.000000,327.000000,4.860300,191900.000000 +-121.480000,38.550000,52.000000,2037.000000,358.000000,811.000000,375.000000,4.392900,162500.000000 +-121.480000,38.550000,52.000000,1684.000000,309.000000,675.000000,296.000000,4.146700,175000.000000 +-121.480000,38.540000,41.000000,3364.000000,685.000000,1841.000000,626.000000,2.197500,73500.000000 +-121.480000,38.530000,43.000000,1378.000000,280.000000,708.000000,280.000000,2.354200,103900.000000 +-121.480000,38.530000,38.000000,1451.000000,315.000000,786.000000,340.000000,2.348700,101600.000000 +-121.480000,38.530000,37.000000,1704.000000,361.000000,902.000000,356.000000,1.983700,62300.000000 +-121.480000,38.520000,36.000000,1824.000000,357.000000,906.000000,356.000000,2.991100,96400.000000 +-121.480000,38.520000,34.000000,2561.000000,497.000000,1583.000000,530.000000,3.158300,95800.000000 +-121.480000,38.510000,24.000000,979.000000,201.000000,723.000000,205.000000,2.592600,72300.000000 +-121.480000,38.500000,23.000000,2679.000000,792.000000,1740.000000,659.000000,1.367900,70300.000000 +-121.480000,38.490000,26.000000,3165.000000,806.000000,2447.000000,752.000000,1.590800,78600.000000 +-121.480000,38.460000,8.000000,10505.000000,1777.000000,6002.000000,1694.000000,4.051600,121200.000000 +-121.480000,37.770000,19.000000,2364.000000,373.000000,1264.000000,390.000000,5.017600,274200.000000 +-121.480000,36.490000,28.000000,1006.000000,228.000000,738.000000,193.000000,1.972200,210700.000000 +-121.490000,39.520000,25.000000,848.000000,153.000000,436.000000,155.000000,3.902800,93800.000000 +-121.490000,38.630000,6.000000,12197.000000,2617.000000,5634.000000,2329.000000,3.744900,129300.000000 +-121.490000,38.620000,8.000000,15309.000000,2996.000000,7463.000000,2885.000000,3.914300,129700.000000 +-121.490000,38.610000,6.000000,4391.000000,974.000000,1982.000000,914.000000,3.429100,105300.000000 +-121.490000,38.590000,20.000000,463.000000,180.000000,486.000000,190.000000,1.031300,85000.000000 +-121.490000,38.580000,52.000000,569.000000,405.000000,509.000000,367.000000,0.919600,137500.000000 +-121.490000,38.580000,52.000000,2151.000000,664.000000,1146.000000,603.000000,1.403400,90300.000000 +-121.490000,38.580000,52.000000,1000.000000,324.000000,456.000000,250.000000,1.437500,168800.000000 +-121.490000,38.570000,38.000000,2410.000000,967.000000,1091.000000,829.000000,1.220900,87900.000000 +-121.490000,38.560000,52.000000,1844.000000,392.000000,667.000000,353.000000,3.003300,103500.000000 +-121.490000,38.560000,42.000000,900.000000,239.000000,506.000000,231.000000,1.281300,87500.000000 +-121.490000,38.560000,35.000000,1521.000000,457.000000,987.000000,455.000000,1.901300,86900.000000 +-121.490000,38.550000,52.000000,2515.000000,460.000000,836.000000,442.000000,3.384400,151100.000000 +-121.490000,38.550000,51.000000,4280.000000,632.000000,1486.000000,621.000000,5.035900,224100.000000 +-121.490000,38.540000,47.000000,2313.000000,536.000000,779.000000,442.000000,2.563900,123000.000000 +-121.490000,38.540000,37.000000,1655.000000,393.000000,841.000000,355.000000,1.693200,78400.000000 +-121.490000,38.530000,42.000000,1468.000000,281.000000,571.000000,271.000000,3.390600,124200.000000 +-121.490000,38.530000,40.000000,2966.000000,536.000000,1225.000000,505.000000,3.125000,130600.000000 +-121.490000,38.520000,37.000000,1902.000000,413.000000,955.000000,384.000000,3.101400,96800.000000 +-121.490000,38.510000,18.000000,700.000000,169.000000,260.000000,128.000000,2.921900,152900.000000 +-121.490000,38.500000,32.000000,2364.000000,439.000000,1331.000000,449.000000,3.319000,84500.000000 +-121.490000,38.500000,30.000000,1715.000000,271.000000,842.000000,263.000000,3.031300,87900.000000 +-121.490000,38.490000,26.000000,4629.000000,832.000000,2902.000000,816.000000,2.735000,74600.000000 +-121.490000,38.490000,26.000000,1557.000000,301.000000,986.000000,300.000000,2.661300,77700.000000 +-121.490000,38.470000,26.000000,6121.000000,1185.000000,4224.000000,1105.000000,2.349600,68000.000000 +-121.500000,39.830000,15.000000,1896.000000,408.000000,893.000000,334.000000,1.694800,87500.000000 +-121.500000,38.630000,6.000000,693.000000,143.000000,276.000000,151.000000,3.194400,117000.000000 +-121.500000,38.620000,8.000000,16679.000000,3457.000000,7919.000000,3329.000000,3.718800,134500.000000 +-121.500000,38.590000,43.000000,88.000000,21.000000,119.000000,19.000000,1.725000,67500.000000 +-121.500000,38.580000,5.000000,761.000000,306.000000,2031.000000,295.000000,0.752600,162500.000000 +-121.500000,38.580000,20.000000,4018.000000,1220.000000,1570.000000,1122.000000,2.582100,125000.000000 +-121.500000,38.570000,9.000000,745.000000,175.000000,297.000000,160.000000,3.358000,77500.000000 +-121.500000,38.570000,45.000000,858.000000,254.000000,510.000000,200.000000,1.011400,80000.000000 +-121.500000,38.570000,44.000000,1375.000000,351.000000,766.000000,321.000000,2.171900,87500.000000 +-121.500000,38.560000,46.000000,2646.000000,645.000000,1684.000000,616.000000,1.128000,123100.000000 +-121.500000,38.550000,52.000000,2784.000000,455.000000,957.000000,448.000000,5.640200,209800.000000 +-121.500000,38.550000,49.000000,4094.000000,634.000000,1363.000000,659.000000,5.236200,236800.000000 +-121.500000,38.540000,52.000000,1145.000000,133.000000,334.000000,138.000000,8.338000,405800.000000 +-121.500000,38.540000,44.000000,1167.000000,201.000000,452.000000,209.000000,3.734400,179800.000000 +-121.500000,38.530000,39.000000,3184.000000,593.000000,1188.000000,572.000000,4.692300,192000.000000 +-121.500000,38.530000,37.000000,3642.000000,684.000000,1508.000000,657.000000,3.523100,114300.000000 +-121.500000,38.490000,32.000000,4013.000000,725.000000,2032.000000,675.000000,3.368900,83400.000000 +-121.500000,38.490000,29.000000,3606.000000,690.000000,2317.000000,696.000000,2.736800,78200.000000 +-121.500000,38.340000,36.000000,1212.000000,255.000000,569.000000,256.000000,2.004800,72900.000000 +-121.510000,39.970000,22.000000,1468.000000,285.000000,611.000000,235.000000,2.303600,73000.000000 +-121.510000,38.580000,42.000000,1822.000000,636.000000,1372.000000,560.000000,1.254200,76000.000000 +-121.510000,38.570000,36.000000,613.000000,166.000000,425.000000,147.000000,2.203100,93800.000000 +-121.510000,38.550000,45.000000,3032.000000,631.000000,1341.000000,597.000000,2.841700,137900.000000 +-121.510000,38.540000,34.000000,2815.000000,479.000000,1075.000000,471.000000,3.979200,164800.000000 +-121.510000,38.530000,36.000000,2603.000000,408.000000,966.000000,419.000000,5.313500,216600.000000 +-121.510000,38.530000,34.000000,1613.000000,265.000000,631.000000,266.000000,4.250000,191900.000000 +-121.510000,38.510000,33.000000,2918.000000,439.000000,1085.000000,427.000000,5.520800,171300.000000 +-121.510000,38.510000,31.000000,1595.000000,217.000000,542.000000,236.000000,6.611200,251600.000000 +-121.510000,38.500000,25.000000,4719.000000,745.000000,1857.000000,739.000000,5.037100,180200.000000 +-121.510000,38.490000,21.000000,4426.000000,790.000000,1856.000000,761.000000,4.100000,158300.000000 +-121.510000,38.260000,52.000000,910.000000,212.000000,429.000000,212.000000,1.645800,52800.000000 +-121.510000,37.020000,19.000000,2372.000000,394.000000,1142.000000,365.000000,4.023800,374600.000000 +-121.510000,36.860000,36.000000,1019.000000,168.000000,602.000000,169.000000,2.625000,210000.000000 +-121.520000,39.490000,30.000000,1217.000000,238.000000,677.000000,233.000000,2.656300,63600.000000 +-121.520000,39.480000,21.000000,2628.000000,494.000000,1364.000000,468.000000,2.045500,59400.000000 +-121.520000,39.430000,15.000000,2119.000000,389.000000,1079.000000,374.000000,2.356600,80400.000000 +-121.520000,39.120000,37.000000,102.000000,17.000000,29.000000,14.000000,4.125000,72000.000000 +-121.520000,38.900000,32.000000,1650.000000,313.000000,802.000000,284.000000,2.904800,98200.000000 +-121.520000,38.590000,35.000000,6418.000000,1545.000000,3814.000000,1496.000000,1.664700,69100.000000 +-121.520000,38.580000,24.000000,938.000000,275.000000,508.000000,253.000000,1.642000,32500.000000 +-121.520000,38.570000,43.000000,2360.000000,471.000000,1041.000000,452.000000,2.890000,86200.000000 +-121.520000,38.530000,31.000000,3089.000000,585.000000,1366.000000,561.000000,4.288500,160300.000000 +-121.520000,38.530000,30.000000,3377.000000,623.000000,1289.000000,594.000000,3.573700,171200.000000 +-121.520000,38.510000,23.000000,6876.000000,1456.000000,2942.000000,1386.000000,3.096300,156900.000000 +-121.520000,38.500000,19.000000,4900.000000,805.000000,2519.000000,855.000000,4.849700,184400.000000 +-121.520000,38.490000,5.000000,3344.000000,800.000000,1341.000000,670.000000,3.619600,152800.000000 +-121.520000,37.750000,18.000000,1544.000000,272.000000,825.000000,286.000000,4.322900,327300.000000 +-121.530000,39.530000,35.000000,1806.000000,293.000000,683.000000,295.000000,4.515600,91200.000000 +-121.530000,39.520000,24.000000,1028.000000,185.000000,471.000000,186.000000,2.968800,86400.000000 +-121.530000,39.490000,19.000000,1537.000000,329.000000,617.000000,274.000000,1.531300,50300.000000 +-121.530000,39.440000,26.000000,1340.000000,255.000000,662.000000,239.000000,2.607100,57100.000000 +-121.530000,39.190000,27.000000,2080.000000,412.000000,1082.000000,382.000000,2.549500,98300.000000 +-121.530000,39.080000,15.000000,1810.000000,441.000000,1157.000000,375.000000,2.046900,55100.000000 +-121.530000,38.610000,5.000000,8149.000000,1913.000000,2933.000000,1616.000000,3.678800,178800.000000 +-121.530000,38.580000,35.000000,1316.000000,321.000000,732.000000,336.000000,2.121300,79200.000000 +-121.530000,38.580000,33.000000,4988.000000,1169.000000,2414.000000,1075.000000,1.972800,76400.000000 +-121.530000,38.570000,34.000000,3395.000000,592.000000,1518.000000,627.000000,4.083300,118500.000000 +-121.530000,38.560000,39.000000,2438.000000,483.000000,1103.000000,472.000000,2.937500,86600.000000 +-121.530000,38.510000,20.000000,6132.000000,1324.000000,2595.000000,1174.000000,3.160700,178900.000000 +-121.530000,36.850000,23.000000,3359.000000,725.000000,1862.000000,651.000000,2.671900,193600.000000 +-121.540000,40.060000,17.000000,858.000000,262.000000,47.000000,27.000000,2.402800,67500.000000 +-121.540000,39.600000,15.000000,886.000000,204.000000,576.000000,205.000000,2.146700,84100.000000 +-121.540000,39.500000,38.000000,1438.000000,310.000000,779.000000,275.000000,1.328900,39400.000000 +-121.540000,39.480000,29.000000,2896.000000,596.000000,1809.000000,617.000000,1.804700,53800.000000 +-121.540000,39.470000,14.000000,1724.000000,315.000000,939.000000,302.000000,2.495200,53900.000000 +-121.540000,39.330000,27.000000,720.000000,150.000000,359.000000,138.000000,2.531300,61300.000000 +-121.540000,39.130000,18.000000,4289.000000,1021.000000,2707.000000,939.000000,1.337500,59600.000000 +-121.540000,39.120000,17.000000,4251.000000,899.000000,3265.000000,934.000000,2.349600,65000.000000 +-121.540000,39.080000,26.000000,2276.000000,460.000000,1455.000000,474.000000,2.469500,58000.000000 +-121.540000,38.590000,40.000000,2120.000000,504.000000,1304.000000,464.000000,2.036800,67800.000000 +-121.540000,38.590000,29.000000,2242.000000,493.000000,1481.000000,478.000000,2.078100,74800.000000 +-121.540000,38.580000,30.000000,4648.000000,1252.000000,2524.000000,1089.000000,1.317700,74300.000000 +-121.540000,38.540000,36.000000,1672.000000,302.000000,969.000000,337.000000,3.053600,73100.000000 +-121.540000,38.510000,17.000000,8482.000000,1590.000000,3362.000000,1513.000000,4.221600,217900.000000 +-121.540000,38.490000,6.000000,9104.000000,1535.000000,3759.000000,1481.000000,5.144200,174500.000000 +-121.540000,38.290000,47.000000,1396.000000,254.000000,630.000000,218.000000,2.861600,92500.000000 +-121.540000,36.700000,12.000000,6758.000000,1241.000000,3918.000000,1100.000000,3.525000,201700.000000 +-121.550000,39.510000,48.000000,827.000000,198.000000,396.000000,161.000000,0.802400,58300.000000 +-121.550000,39.510000,39.000000,1551.000000,353.000000,684.000000,310.000000,2.035700,57600.000000 +-121.550000,39.500000,26.000000,3215.000000,827.000000,2041.000000,737.000000,1.058500,45100.000000 +-121.550000,39.480000,41.000000,461.000000,107.000000,284.000000,90.000000,2.204500,41800.000000 +-121.550000,39.450000,18.000000,2278.000000,523.000000,1185.000000,475.000000,1.361100,60600.000000 +-121.550000,39.440000,31.000000,1434.000000,283.000000,811.000000,289.000000,1.772700,49000.000000 +-121.550000,39.100000,27.000000,1783.000000,441.000000,1163.000000,409.000000,1.285700,47000.000000 +-121.550000,39.060000,25.000000,1332.000000,247.000000,726.000000,226.000000,2.250000,63400.000000 +-121.550000,38.590000,36.000000,435.000000,95.000000,285.000000,90.000000,1.229200,69600.000000 +-121.550000,38.550000,10.000000,6227.000000,1164.000000,2909.000000,1077.000000,4.106000,115900.000000 +-121.550000,38.510000,22.000000,2403.000000,431.000000,1088.000000,421.000000,3.900000,146900.000000 +-121.550000,38.510000,14.000000,5490.000000,851.000000,2415.000000,837.000000,6.525300,216800.000000 +-121.550000,38.500000,9.000000,4868.000000,738.000000,2036.000000,750.000000,5.762100,204600.000000 +-121.550000,37.370000,39.000000,759.000000,141.000000,252.000000,106.000000,3.696400,262500.000000 +-121.560000,39.530000,12.000000,1733.000000,421.000000,1861.000000,415.000000,1.577100,65200.000000 +-121.560000,39.520000,9.000000,818.000000,197.000000,358.000000,197.000000,1.770800,79500.000000 +-121.560000,39.520000,26.000000,1957.000000,429.000000,945.000000,397.000000,1.730800,53600.000000 +-121.560000,39.510000,47.000000,1064.000000,245.000000,603.000000,190.000000,1.365400,57900.000000 +-121.560000,39.510000,46.000000,1885.000000,385.000000,871.000000,347.000000,1.635200,53100.000000 +-121.560000,39.270000,28.000000,2332.000000,395.000000,1041.000000,344.000000,3.712500,116800.000000 +-121.560000,39.160000,35.000000,2157.000000,441.000000,1009.000000,409.000000,1.582700,63000.000000 +-121.560000,39.160000,12.000000,3349.000000,642.000000,2029.000000,619.000000,2.964700,88800.000000 +-121.560000,39.130000,17.000000,2277.000000,608.000000,1607.000000,562.000000,1.508500,69700.000000 +-121.560000,39.110000,18.000000,2171.000000,480.000000,1527.000000,447.000000,2.301100,57500.000000 +-121.560000,39.100000,28.000000,2130.000000,484.000000,1195.000000,439.000000,1.363100,45500.000000 +-121.560000,39.080000,26.000000,1377.000000,289.000000,761.000000,267.000000,1.493400,48300.000000 +-121.560000,39.010000,22.000000,1891.000000,340.000000,1023.000000,296.000000,2.730300,99100.000000 +-121.560000,38.580000,32.000000,2070.000000,561.000000,2046.000000,523.000000,1.942600,82300.000000 +-121.560000,38.440000,43.000000,1485.000000,270.000000,653.000000,251.000000,3.000000,141700.000000 +-121.560000,37.080000,17.000000,6725.000000,1051.000000,3439.000000,1027.000000,6.431300,393100.000000 +-121.570000,39.800000,23.000000,790.000000,137.000000,365.000000,152.000000,2.191200,115200.000000 +-121.570000,39.780000,18.000000,2221.000000,459.000000,952.000000,440.000000,2.045800,105700.000000 +-121.570000,39.760000,20.000000,1384.000000,257.000000,557.000000,232.000000,2.088200,104900.000000 +-121.570000,39.740000,17.000000,1619.000000,292.000000,705.000000,285.000000,2.462300,126100.000000 +-121.570000,39.500000,31.000000,2023.000000,469.000000,1073.000000,436.000000,1.571400,56100.000000 +-121.570000,39.160000,21.000000,1872.000000,302.000000,870.000000,301.000000,3.725000,84700.000000 +-121.570000,39.160000,18.000000,1632.000000,367.000000,769.000000,330.000000,3.102900,71700.000000 +-121.570000,39.130000,30.000000,442.000000,103.000000,413.000000,88.000000,1.569400,57900.000000 +-121.570000,39.100000,28.000000,1442.000000,333.000000,832.000000,286.000000,1.841300,62300.000000 +-121.570000,38.190000,36.000000,1395.000000,264.000000,700.000000,244.000000,2.435300,162500.000000 +-121.570000,37.020000,17.000000,2889.000000,624.000000,2681.000000,608.000000,2.941700,178000.000000 +-121.570000,36.980000,14.000000,5231.000000,817.000000,2634.000000,799.000000,4.970200,279800.000000 +-121.580000,39.830000,16.000000,4591.000000,904.000000,1904.000000,812.000000,2.241900,93200.000000 +-121.580000,39.790000,19.000000,2636.000000,523.000000,1184.000000,465.000000,2.786300,108600.000000 +-121.580000,39.760000,19.000000,2487.000000,485.000000,1110.000000,453.000000,3.106100,110200.000000 +-121.580000,39.760000,18.000000,1676.000000,332.000000,733.000000,318.000000,1.787500,103800.000000 +-121.580000,39.520000,25.000000,2409.000000,490.000000,1384.000000,479.000000,1.995600,58000.000000 +-121.580000,39.510000,24.000000,1865.000000,372.000000,1087.000000,385.000000,1.638900,56700.000000 +-121.580000,39.500000,29.000000,1947.000000,383.000000,925.000000,337.000000,2.165800,57600.000000 +-121.580000,39.160000,36.000000,1206.000000,197.000000,537.000000,204.000000,3.361100,79800.000000 +-121.580000,39.150000,34.000000,1376.000000,376.000000,702.000000,317.000000,1.494600,55500.000000 +-121.580000,39.140000,52.000000,662.000000,160.000000,520.000000,149.000000,0.892800,55000.000000 +-121.580000,39.120000,26.000000,2796.000000,629.000000,2017.000000,632.000000,1.835500,61200.000000 +-121.580000,38.810000,25.000000,778.000000,135.000000,340.000000,155.000000,1.785700,258300.000000 +-121.580000,37.030000,16.000000,3120.000000,685.000000,2383.000000,681.000000,3.555100,198600.000000 +-121.580000,37.020000,27.000000,2303.000000,471.000000,1447.000000,467.000000,3.201900,203600.000000 +-121.580000,37.010000,44.000000,3192.000000,565.000000,1439.000000,568.000000,4.369300,234000.000000 +-121.580000,37.010000,15.000000,2873.000000,547.000000,1582.000000,567.000000,5.151900,264700.000000 +-121.590000,39.860000,14.000000,1527.000000,269.000000,665.000000,261.000000,2.865700,119600.000000 +-121.590000,39.820000,12.000000,1958.000000,369.000000,875.000000,354.000000,2.350700,97600.000000 +-121.590000,39.790000,20.000000,743.000000,171.000000,395.000000,168.000000,1.625000,88300.000000 +-121.590000,39.780000,18.000000,945.000000,205.000000,385.000000,207.000000,2.183800,58000.000000 +-121.590000,39.780000,16.000000,2754.000000,570.000000,1063.000000,543.000000,1.404800,86500.000000 +-121.590000,39.390000,22.000000,2515.000000,482.000000,1284.000000,462.000000,2.177600,73800.000000 +-121.590000,39.150000,48.000000,1783.000000,399.000000,938.000000,374.000000,1.665200,58900.000000 +-121.590000,39.140000,41.000000,1492.000000,350.000000,804.000000,353.000000,1.684000,71300.000000 +-121.590000,39.100000,24.000000,1107.000000,261.000000,768.000000,205.000000,1.716700,48800.000000 +-121.590000,38.690000,32.000000,541.000000,82.000000,229.000000,98.000000,8.037900,383300.000000 +-121.590000,37.190000,52.000000,220.000000,32.000000,55.000000,26.000000,15.000100,131300.000000 +-121.590000,37.020000,14.000000,6355.000000,1279.000000,3704.000000,1224.000000,4.423300,228600.000000 +-121.590000,37.010000,16.000000,6637.000000,1171.000000,3575.000000,1162.000000,4.322700,251500.000000 +-121.590000,36.970000,16.000000,865.000000,123.000000,403.000000,130.000000,5.739600,308700.000000 +-121.590000,36.550000,34.000000,737.000000,140.000000,362.000000,138.000000,5.178800,270000.000000 +-121.600000,39.830000,12.000000,3744.000000,699.000000,1532.000000,660.000000,2.307900,95300.000000 +-121.600000,39.800000,10.000000,1742.000000,307.000000,721.000000,312.000000,2.453700,117900.000000 +-121.600000,39.790000,18.000000,2672.000000,533.000000,1151.000000,532.000000,2.567000,102900.000000 +-121.600000,39.770000,26.000000,1503.000000,343.000000,699.000000,296.000000,1.875000,84000.000000 +-121.600000,39.770000,23.000000,2263.000000,497.000000,1138.000000,455.000000,2.340300,87300.000000 +-121.600000,39.760000,22.000000,2447.000000,556.000000,1157.000000,556.000000,1.824500,85500.000000 +-121.600000,39.750000,19.000000,2888.000000,591.000000,984.000000,499.000000,1.976600,92600.000000 +-121.600000,39.680000,15.000000,1677.000000,345.000000,844.000000,330.000000,2.395800,111200.000000 +-121.600000,39.150000,19.000000,1396.000000,336.000000,940.000000,309.000000,1.520800,70300.000000 +-121.600000,39.120000,21.000000,1299.000000,338.000000,1494.000000,311.000000,1.334800,225000.000000 +-121.600000,37.910000,13.000000,2479.000000,394.000000,1075.000000,350.000000,5.101700,241400.000000 +-121.600000,37.130000,14.000000,9483.000000,1361.000000,4108.000000,1281.000000,7.500000,344500.000000 +-121.600000,37.090000,24.000000,1511.000000,318.000000,1052.000000,292.000000,3.625000,350000.000000 +-121.600000,36.880000,21.000000,3416.000000,624.000000,1862.000000,595.000000,4.781300,241500.000000 +-121.600000,36.810000,18.000000,1575.000000,230.000000,751.000000,219.000000,5.220300,286500.000000 +-121.610000,39.770000,25.000000,1612.000000,313.000000,837.000000,303.000000,2.963000,89500.000000 +-121.610000,39.520000,24.000000,1610.000000,324.000000,909.000000,323.000000,1.866100,59800.000000 +-121.610000,39.140000,44.000000,2035.000000,476.000000,1030.000000,453.000000,1.466100,65200.000000 +-121.610000,39.130000,21.000000,1432.000000,328.000000,933.000000,336.000000,1.682300,83800.000000 +-121.610000,37.860000,30.000000,1428.000000,287.000000,989.000000,287.000000,3.691000,154400.000000 +-121.610000,37.770000,32.000000,404.000000,74.000000,144.000000,58.000000,4.208300,125000.000000 +-121.610000,37.060000,21.000000,5322.000000,908.000000,3011.000000,895.000000,5.593600,386800.000000 +-121.610000,37.030000,5.000000,6529.000000,1010.000000,3071.000000,977.000000,5.675400,298500.000000 +-121.610000,36.690000,19.000000,9899.000000,2617.000000,11272.000000,2528.000000,2.024400,118500.000000 +-121.610000,36.680000,37.000000,3149.000000,833.000000,3456.000000,788.000000,2.854200,127600.000000 +-121.610000,36.670000,39.000000,3260.000000,821.000000,3130.000000,793.000000,2.522400,119200.000000 +-121.620000,41.780000,40.000000,3272.000000,663.000000,1467.000000,553.000000,1.788500,43500.000000 +-121.620000,39.770000,23.000000,1759.000000,366.000000,788.000000,359.000000,1.812500,93500.000000 +-121.620000,39.750000,20.000000,1173.000000,261.000000,523.000000,258.000000,1.062500,92800.000000 +-121.620000,39.500000,18.000000,2105.000000,416.000000,974.000000,385.000000,1.634600,63300.000000 +-121.620000,39.160000,7.000000,4480.000000,776.000000,2271.000000,767.000000,3.809000,110700.000000 +-121.620000,39.160000,16.000000,2037.000000,464.000000,1267.000000,451.000000,2.455600,97100.000000 +-121.620000,39.150000,36.000000,2321.000000,455.000000,1168.000000,489.000000,3.096200,74000.000000 +-121.620000,39.150000,23.000000,1984.000000,528.000000,1043.000000,452.000000,1.937500,65300.000000 +-121.620000,39.140000,41.000000,2183.000000,559.000000,1202.000000,506.000000,1.690200,61500.000000 +-121.620000,39.130000,41.000000,1317.000000,309.000000,856.000000,337.000000,1.671900,64100.000000 +-121.620000,39.120000,35.000000,2787.000000,587.000000,1431.000000,601.000000,2.546900,65900.000000 +-121.620000,39.120000,26.000000,1405.000000,204.000000,627.000000,215.000000,4.218800,94200.000000 +-121.620000,39.110000,5.000000,2320.000000,502.000000,1245.000000,489.000000,3.246500,97200.000000 +-121.620000,39.110000,11.000000,3519.000000,577.000000,1459.000000,549.000000,4.279200,123800.000000 +-121.620000,39.090000,21.000000,2693.000000,481.000000,1337.000000,435.000000,3.853400,99700.000000 +-121.620000,38.960000,36.000000,1826.000000,329.000000,1068.000000,318.000000,1.979700,118800.000000 +-121.620000,37.610000,26.000000,1786.000000,306.000000,771.000000,279.000000,5.723900,430600.000000 +-121.620000,37.090000,37.000000,1593.000000,303.000000,1030.000000,287.000000,3.930600,260700.000000 +-121.620000,36.740000,30.000000,1337.000000,253.000000,838.000000,247.000000,5.037400,165400.000000 +-121.620000,36.710000,24.000000,4195.000000,706.000000,2200.000000,647.000000,4.345100,177800.000000 +-121.620000,36.690000,12.000000,512.000000,144.000000,767.000000,149.000000,2.266700,72900.000000 +-121.620000,36.690000,11.000000,4712.000000,1098.000000,5982.000000,1105.000000,2.598600,135700.000000 +-121.620000,36.680000,43.000000,2534.000000,592.000000,2448.000000,603.000000,2.488400,130500.000000 +-121.620000,36.670000,45.000000,1827.000000,408.000000,1507.000000,410.000000,2.894200,129000.000000 +-121.620000,36.670000,31.000000,2697.000000,690.000000,2220.000000,665.000000,2.532900,135200.000000 +-121.620000,36.630000,52.000000,1437.000000,298.000000,836.000000,257.000000,3.628600,165500.000000 +-121.620000,36.430000,20.000000,1335.000000,290.000000,717.000000,243.000000,4.789100,230600.000000 +-121.620000,36.140000,25.000000,726.000000,274.000000,411.000000,214.000000,3.237500,450000.000000 +-121.630000,40.920000,23.000000,1922.000000,411.000000,872.000000,350.000000,2.233700,64500.000000 +-121.630000,39.780000,28.000000,1677.000000,327.000000,770.000000,309.000000,2.682300,93400.000000 +-121.630000,39.760000,22.000000,2598.000000,482.000000,1151.000000,490.000000,2.818200,109700.000000 +-121.630000,39.750000,37.000000,1296.000000,296.000000,569.000000,257.000000,1.861600,70500.000000 +-121.630000,39.180000,13.000000,1907.000000,347.000000,821.000000,367.000000,2.097800,134000.000000 +-121.630000,39.160000,7.000000,1879.000000,444.000000,1065.000000,410.000000,2.418300,103800.000000 +-121.630000,39.150000,27.000000,336.000000,60.000000,195.000000,68.000000,5.394600,71800.000000 +-121.630000,39.150000,27.000000,2991.000000,637.000000,1419.000000,606.000000,1.884900,73500.000000 +-121.630000,39.150000,16.000000,1547.000000,418.000000,940.000000,400.000000,1.561300,72500.000000 +-121.630000,39.140000,39.000000,1874.000000,411.000000,822.000000,377.000000,2.503800,68300.000000 +-121.630000,39.130000,26.000000,2355.000000,531.000000,1047.000000,497.000000,1.820800,79500.000000 +-121.630000,39.120000,34.000000,1991.000000,348.000000,804.000000,344.000000,3.449200,98800.000000 +-121.630000,38.670000,34.000000,431.000000,85.000000,391.000000,77.000000,2.625000,225000.000000 +-121.630000,38.040000,25.000000,2019.000000,411.000000,888.000000,326.000000,3.261900,183800.000000 +-121.630000,38.030000,17.000000,2549.000000,596.000000,1169.000000,500.000000,3.669400,209400.000000 +-121.630000,37.120000,17.000000,1830.000000,398.000000,1110.000000,388.000000,2.482100,248200.000000 +-121.630000,37.100000,14.000000,5034.000000,797.000000,2124.000000,790.000000,4.902800,335000.000000 +-121.630000,36.710000,19.000000,5015.000000,1013.000000,3251.000000,940.000000,3.981800,152900.000000 +-121.630000,36.670000,34.000000,2486.000000,560.000000,2443.000000,557.000000,2.526300,130400.000000 +-121.640000,40.900000,24.000000,2237.000000,434.000000,834.000000,318.000000,1.753800,90300.000000 +-121.640000,39.740000,20.000000,1808.000000,334.000000,763.000000,335.000000,2.371100,121800.000000 +-121.640000,39.220000,37.000000,1189.000000,248.000000,627.000000,219.000000,3.861100,100000.000000 +-121.640000,39.120000,13.000000,6408.000000,1087.000000,3294.000000,1106.000000,4.265600,110700.000000 +-121.640000,39.110000,18.000000,3212.000000,542.000000,1817.000000,508.000000,3.379300,92900.000000 +-121.640000,37.850000,22.000000,1999.000000,415.000000,967.000000,320.000000,4.458300,253900.000000 +-121.640000,37.150000,13.000000,4780.000000,798.000000,2795.000000,764.000000,6.168400,288100.000000 +-121.640000,36.820000,18.000000,1819.000000,283.000000,919.000000,295.000000,4.169600,222500.000000 +-121.640000,36.740000,30.000000,2628.000000,444.000000,1372.000000,432.000000,4.169600,175000.000000 +-121.640000,36.720000,17.000000,4203.000000,816.000000,2900.000000,827.000000,4.174200,159900.000000 +-121.640000,36.700000,32.000000,4089.000000,735.000000,2927.000000,713.000000,4.167500,142500.000000 +-121.640000,36.680000,16.000000,6568.000000,1603.000000,6012.000000,1565.000000,2.346300,156100.000000 +-121.640000,36.670000,28.000000,256.000000,66.000000,214.000000,60.000000,3.019700,137500.000000 +-121.640000,36.660000,24.000000,3174.000000,506.000000,1466.000000,535.000000,5.228500,248100.000000 +-121.650000,40.880000,15.000000,2909.000000,549.000000,1537.000000,522.000000,3.017900,61300.000000 +-121.650000,39.760000,31.000000,1599.000000,318.000000,794.000000,303.000000,3.000000,96700.000000 +-121.650000,39.530000,23.000000,1387.000000,325.000000,640.000000,289.000000,1.483300,65200.000000 +-121.650000,39.350000,24.000000,1003.000000,251.000000,1098.000000,227.000000,1.755200,86400.000000 +-121.650000,39.320000,40.000000,812.000000,154.000000,374.000000,142.000000,2.789100,73500.000000 +-121.650000,39.160000,16.000000,5022.000000,1103.000000,2087.000000,956.000000,2.396300,114800.000000 +-121.650000,39.130000,11.000000,4833.000000,944.000000,2336.000000,841.000000,2.684200,89100.000000 +-121.650000,37.110000,14.000000,6006.000000,914.000000,2915.000000,898.000000,5.935600,321700.000000 +-121.650000,36.850000,20.000000,2606.000000,424.000000,1361.000000,426.000000,4.578700,245100.000000 +-121.650000,36.770000,15.000000,2191.000000,358.000000,1150.000000,330.000000,4.796900,227500.000000 +-121.650000,36.700000,29.000000,4964.000000,1056.000000,2773.000000,1036.000000,3.082700,148100.000000 +-121.650000,36.690000,21.000000,7884.000000,2011.000000,4907.000000,1919.000000,2.736700,160300.000000 +-121.650000,36.670000,52.000000,2351.000000,459.000000,1169.000000,439.000000,2.892400,169600.000000 +-121.650000,36.670000,28.000000,1926.000000,556.000000,1717.000000,535.000000,1.938500,123200.000000 +-121.650000,36.660000,42.000000,4261.000000,840.000000,2013.000000,801.000000,3.528800,221000.000000 +-121.650000,36.660000,30.000000,3745.000000,767.000000,1762.000000,748.000000,3.235500,214200.000000 +-121.660000,39.150000,22.000000,2144.000000,376.000000,1200.000000,370.000000,3.442600,102400.000000 +-121.660000,39.090000,27.000000,2098.000000,372.000000,1090.000000,333.000000,4.450000,96200.000000 +-121.660000,37.930000,19.000000,2055.000000,358.000000,1064.000000,350.000000,4.742600,263100.000000 +-121.660000,36.890000,15.000000,2608.000000,458.000000,1531.000000,457.000000,5.514800,253500.000000 +-121.660000,36.820000,17.000000,3921.000000,654.000000,1895.000000,641.000000,5.009200,238700.000000 +-121.660000,36.710000,27.000000,4131.000000,886.000000,2002.000000,815.000000,3.292900,157500.000000 +-121.660000,36.700000,33.000000,3252.000000,630.000000,2010.000000,641.000000,3.422200,158100.000000 +-121.660000,36.690000,6.000000,10613.000000,2485.000000,7249.000000,2375.000000,3.791200,168900.000000 +-121.660000,36.670000,40.000000,2878.000000,592.000000,1444.000000,564.000000,3.143900,192300.000000 +-121.660000,36.670000,40.000000,2497.000000,520.000000,1275.000000,508.000000,3.107100,193100.000000 +-121.660000,36.370000,9.000000,1580.000000,287.000000,465.000000,208.000000,6.166800,405800.000000 +-121.670000,40.890000,17.000000,2548.000000,537.000000,1118.000000,461.000000,2.267000,57800.000000 +-121.670000,40.610000,8.000000,2411.000000,463.000000,786.000000,297.000000,2.151300,80400.000000 +-121.670000,39.370000,27.000000,2599.000000,502.000000,1241.000000,502.000000,1.994300,86300.000000 +-121.670000,39.340000,22.000000,1217.000000,224.000000,537.000000,187.000000,2.660700,84600.000000 +-121.670000,39.260000,29.000000,3041.000000,683.000000,2106.000000,687.000000,1.631500,58000.000000 +-121.670000,39.180000,26.000000,2121.000000,375.000000,1125.000000,366.000000,3.395800,94600.000000 +-121.670000,39.140000,22.000000,2264.000000,390.000000,1056.000000,403.000000,3.611100,112300.000000 +-121.670000,38.850000,46.000000,645.000000,131.000000,410.000000,122.000000,1.741700,110400.000000 +-121.670000,38.540000,13.000000,6141.000000,1019.000000,2553.000000,967.000000,4.243200,326500.000000 +-121.670000,37.990000,22.000000,1046.000000,195.000000,527.000000,164.000000,4.375000,213500.000000 +-121.670000,36.930000,22.000000,569.000000,132.000000,542.000000,125.000000,2.187500,187500.000000 +-121.670000,36.680000,38.000000,5561.000000,1292.000000,3523.000000,1253.000000,2.828900,168300.000000 +-121.670000,36.670000,24.000000,3071.000000,544.000000,1477.000000,560.000000,3.921900,222500.000000 +-121.670000,36.660000,19.000000,9371.000000,1980.000000,4259.000000,1882.000000,3.687500,189700.000000 +-121.680000,39.820000,15.000000,3996.000000,748.000000,1786.000000,728.000000,3.518900,141300.000000 +-121.680000,39.290000,29.000000,1860.000000,400.000000,1137.000000,365.000000,1.528100,61600.000000 +-121.680000,39.130000,17.000000,1969.000000,297.000000,717.000000,268.000000,3.469800,179700.000000 +-121.680000,39.110000,19.000000,1366.000000,220.000000,596.000000,203.000000,4.062500,141700.000000 +-121.680000,37.980000,19.000000,3388.000000,599.000000,1707.000000,575.000000,3.641100,162800.000000 +-121.680000,37.000000,19.000000,3754.000000,588.000000,1692.000000,550.000000,6.764400,412600.000000 +-121.680000,36.720000,12.000000,19234.000000,4492.000000,12153.000000,4372.000000,3.265200,152800.000000 +-121.690000,39.360000,29.000000,2220.000000,471.000000,1170.000000,428.000000,2.322400,56200.000000 +-121.690000,38.870000,38.000000,412.000000,93.000000,304.000000,95.000000,2.659700,86000.000000 +-121.690000,38.160000,46.000000,2292.000000,472.000000,970.000000,431.000000,2.288800,94900.000000 +-121.690000,37.950000,15.000000,1850.000000,441.000000,1348.000000,403.000000,3.812500,125400.000000 +-121.690000,37.140000,12.000000,4077.000000,590.000000,1618.000000,540.000000,5.295100,386200.000000 +-121.690000,36.960000,23.000000,1229.000000,254.000000,687.000000,232.000000,5.143300,305600.000000 +-121.690000,36.810000,18.000000,2837.000000,522.000000,1454.000000,458.000000,4.527200,221000.000000 +-121.690000,36.800000,19.000000,2164.000000,410.000000,1309.000000,426.000000,3.338000,185300.000000 +-121.690000,36.520000,15.000000,4037.000000,586.000000,1596.000000,557.000000,8.092200,390100.000000 +-121.700000,39.370000,32.000000,1852.000000,373.000000,911.000000,365.000000,1.788500,57000.000000 +-121.700000,39.360000,46.000000,1210.000000,243.000000,523.000000,242.000000,1.910000,63900.000000 +-121.700000,39.360000,37.000000,2330.000000,495.000000,1505.000000,470.000000,2.047400,56000.000000 +-121.700000,38.650000,22.000000,1360.000000,282.000000,808.000000,229.000000,2.416700,225000.000000 +-121.700000,38.600000,16.000000,2372.000000,588.000000,1400.000000,583.000000,2.892200,153600.000000 +-121.700000,38.540000,13.000000,6819.000000,1158.000000,2828.000000,1115.000000,4.622500,226500.000000 +-121.700000,37.980000,9.000000,3079.000000,519.000000,1562.000000,512.000000,5.104100,172900.000000 +-121.700000,37.960000,33.000000,2396.000000,452.000000,1391.000000,465.000000,3.281300,151400.000000 +-121.700000,37.940000,36.000000,1710.000000,320.000000,861.000000,300.000000,2.882800,131100.000000 +-121.700000,37.930000,19.000000,2005.000000,405.000000,972.000000,403.000000,2.221600,156700.000000 +-121.700000,37.930000,10.000000,3258.000000,612.000000,1779.000000,558.000000,4.658700,152500.000000 +-121.700000,37.910000,17.000000,1962.000000,291.000000,825.000000,267.000000,4.895800,187100.000000 +-121.700000,37.200000,15.000000,531.000000,154.000000,469.000000,155.000000,4.650000,385700.000000 +-121.700000,36.840000,19.000000,2511.000000,465.000000,1551.000000,450.000000,4.910700,231900.000000 +-121.700000,36.770000,21.000000,2294.000000,478.000000,1306.000000,430.000000,3.034700,227200.000000 +-121.700000,36.670000,37.000000,641.000000,129.000000,458.000000,142.000000,3.345600,252600.000000 +-121.700000,36.600000,19.000000,3562.000000,530.000000,1607.000000,505.000000,5.016200,283100.000000 +-121.700000,36.480000,19.000000,2150.000000,479.000000,1052.000000,428.000000,3.503900,288400.000000 +-121.710000,39.420000,21.000000,1432.000000,284.000000,862.000000,275.000000,2.281300,57600.000000 +-121.710000,39.410000,22.000000,1814.000000,342.000000,941.000000,323.000000,2.172800,59400.000000 +-121.710000,38.720000,32.000000,710.000000,155.000000,550.000000,154.000000,2.888200,151400.000000 +-121.710000,38.560000,20.000000,8627.000000,1516.000000,4071.000000,1466.000000,4.219800,164100.000000 +-121.710000,37.990000,27.000000,3861.000000,718.000000,2085.000000,707.000000,3.355800,129700.000000 +-121.710000,36.900000,16.000000,1680.000000,285.000000,1103.000000,275.000000,4.612500,253800.000000 +-121.710000,36.880000,19.000000,2528.000000,554.000000,2332.000000,492.000000,3.776600,177000.000000 +-121.710000,36.780000,19.000000,2371.000000,324.000000,944.000000,332.000000,5.917500,240200.000000 +-121.720000,38.800000,36.000000,1069.000000,228.000000,567.000000,190.000000,1.955900,78400.000000 +-121.720000,38.540000,16.000000,2790.000000,624.000000,1386.000000,636.000000,3.190800,194300.000000 +-121.720000,38.000000,7.000000,7957.000000,1314.000000,4460.000000,1293.000000,4.961800,156500.000000 +-121.720000,37.980000,5.000000,7105.000000,1143.000000,3523.000000,1088.000000,5.046800,168800.000000 +-121.720000,37.700000,17.000000,1671.000000,352.000000,729.000000,252.000000,6.102300,450000.000000 +-121.720000,37.160000,21.000000,1062.000000,179.000000,631.000000,185.000000,4.738600,394100.000000 +-121.730000,38.560000,30.000000,3306.000000,629.000000,1623.000000,648.000000,2.861400,145200.000000 +-121.730000,38.540000,18.000000,974.000000,317.000000,521.000000,317.000000,1.063300,137500.000000 +-121.730000,38.130000,40.000000,1266.000000,257.000000,547.000000,247.000000,3.028800,164400.000000 +-121.730000,38.000000,3.000000,9217.000000,1522.000000,3578.000000,1272.000000,5.001600,189100.000000 +-121.730000,37.710000,12.000000,5608.000000,1049.000000,2595.000000,1067.000000,3.986400,200200.000000 +-121.730000,36.930000,29.000000,2931.000000,535.000000,1954.000000,506.000000,3.291700,224700.000000 +-121.730000,36.900000,23.000000,2392.000000,721.000000,3074.000000,718.000000,2.519500,136900.000000 +-121.730000,36.860000,28.000000,827.000000,178.000000,703.000000,144.000000,4.427100,175700.000000 +-121.730000,36.500000,27.000000,3184.000000,520.000000,1121.000000,493.000000,5.638300,377000.000000 +-121.740000,39.590000,24.000000,1535.000000,279.000000,726.000000,272.000000,2.383300,95100.000000 +-121.740000,39.380000,27.000000,2596.000000,435.000000,1100.000000,409.000000,2.324300,85500.000000 +-121.740000,39.150000,20.000000,2302.000000,412.000000,1205.000000,399.000000,2.800000,71200.000000 +-121.740000,38.560000,18.000000,3960.000000,1151.000000,2248.000000,1144.000000,1.725700,179100.000000 +-121.740000,38.150000,22.000000,1910.000000,326.000000,1001.000000,345.000000,4.817300,115800.000000 +-121.740000,37.950000,5.000000,4980.000000,774.000000,2399.000000,763.000000,5.710400,186300.000000 +-121.740000,37.350000,34.000000,440.000000,90.000000,217.000000,93.000000,5.232700,500001.000000 +-121.740000,37.300000,12.000000,1961.000000,280.000000,985.000000,269.000000,6.715900,362700.000000 +-121.740000,37.290000,6.000000,7292.000000,1295.000000,2468.000000,1262.000000,5.641100,294700.000000 +-121.740000,36.920000,29.000000,1210.000000,281.000000,863.000000,262.000000,3.106200,156000.000000 +-121.740000,36.920000,17.000000,2648.000000,589.000000,1193.000000,540.000000,2.446100,151700.000000 +-121.740000,36.920000,14.000000,3355.000000,695.000000,1350.000000,697.000000,2.650600,164600.000000 +-121.740000,36.790000,16.000000,3841.000000,620.000000,1799.000000,611.000000,4.381400,245300.000000 +-121.740000,36.490000,33.000000,2952.000000,565.000000,1264.000000,517.000000,4.420900,274600.000000 +-121.740000,36.470000,28.000000,1973.000000,343.000000,970.000000,349.000000,4.250000,279100.000000 +-121.750000,39.880000,16.000000,2867.000000,559.000000,1203.000000,449.000000,2.714300,95300.000000 +-121.750000,39.400000,29.000000,1687.000000,327.000000,864.000000,334.000000,2.494300,81900.000000 +-121.750000,38.670000,9.000000,12139.000000,2640.000000,6837.000000,2358.000000,3.125000,132500.000000 +-121.750000,38.550000,33.000000,2479.000000,382.000000,979.000000,377.000000,4.730800,236200.000000 +-121.750000,38.550000,26.000000,4802.000000,950.000000,2199.000000,939.000000,3.745200,227700.000000 +-121.750000,37.710000,11.000000,12070.000000,2220.000000,5826.000000,2125.000000,4.862400,192400.000000 +-121.750000,37.680000,35.000000,1755.000000,299.000000,702.000000,263.000000,5.244300,183400.000000 +-121.750000,37.350000,18.000000,1947.000000,250.000000,605.000000,184.000000,8.187100,500001.000000 +-121.750000,37.300000,23.000000,1801.000000,415.000000,548.000000,393.000000,2.505200,133700.000000 +-121.750000,37.290000,15.000000,1486.000000,310.000000,455.000000,296.000000,4.336500,221000.000000 +-121.750000,37.110000,18.000000,3167.000000,478.000000,1414.000000,482.000000,6.877300,467700.000000 +-121.750000,36.960000,19.000000,3461.000000,634.000000,2790.000000,607.000000,4.756900,190800.000000 +-121.750000,36.950000,27.000000,1580.000000,303.000000,1066.000000,306.000000,4.707100,202700.000000 +-121.750000,36.930000,24.000000,4026.000000,881.000000,2264.000000,863.000000,3.132700,218100.000000 +-121.750000,36.920000,46.000000,1362.000000,321.000000,1068.000000,305.000000,2.461500,177800.000000 +-121.750000,36.910000,52.000000,1211.000000,447.000000,1102.000000,392.000000,1.687500,161400.000000 +-121.750000,36.910000,32.000000,1461.000000,422.000000,1494.000000,416.000000,2.805600,173200.000000 +-121.750000,36.770000,25.000000,1851.000000,418.000000,1678.000000,390.000000,3.293700,135300.000000 +-121.750000,36.760000,32.000000,1740.000000,399.000000,1563.000000,389.000000,2.769400,132400.000000 +-121.760000,41.500000,31.000000,602.000000,153.000000,112.000000,47.000000,1.066700,34200.000000 +-121.760000,38.940000,48.000000,540.000000,110.000000,234.000000,74.000000,3.611100,67500.000000 +-121.760000,38.680000,38.000000,674.000000,178.000000,701.000000,189.000000,1.394200,69400.000000 +-121.760000,38.660000,17.000000,5320.000000,984.000000,2866.000000,928.000000,4.199700,133400.000000 +-121.760000,38.570000,11.000000,15018.000000,3008.000000,7984.000000,2962.000000,3.137100,201800.000000 +-121.760000,38.550000,23.000000,8800.000000,1857.000000,6330.000000,1832.000000,2.065000,219400.000000 +-121.760000,37.700000,9.000000,3980.000000,736.000000,1705.000000,679.000000,5.706800,256700.000000 +-121.760000,37.680000,52.000000,2157.000000,418.000000,929.000000,419.000000,3.730100,204400.000000 +-121.760000,37.680000,35.000000,1864.000000,357.000000,1189.000000,349.000000,4.236100,177500.000000 +-121.760000,37.680000,32.000000,1078.000000,207.000000,555.000000,197.000000,3.185600,186900.000000 +-121.760000,37.670000,6.000000,3023.000000,518.000000,1225.000000,468.000000,6.370500,350000.000000 +-121.760000,37.330000,5.000000,4153.000000,719.000000,2435.000000,697.000000,5.630600,286200.000000 +-121.760000,37.300000,6.000000,3526.000000,559.000000,1378.000000,491.000000,6.146300,335500.000000 +-121.760000,37.290000,15.000000,2267.000000,348.000000,1150.000000,327.000000,7.126700,277900.000000 +-121.760000,37.280000,17.000000,660.000000,129.000000,431.000000,123.000000,4.909700,241000.000000 +-121.760000,37.260000,17.000000,250.000000,52.000000,141.000000,51.000000,4.645800,500001.000000 +-121.760000,37.230000,16.000000,4274.000000,715.000000,2240.000000,704.000000,5.421800,233900.000000 +-121.760000,37.000000,21.000000,1416.000000,269.000000,779.000000,200.000000,3.198700,279800.000000 +-121.760000,36.920000,46.000000,947.000000,257.000000,1120.000000,264.000000,3.412500,160700.000000 +-121.760000,36.920000,36.000000,2096.000000,409.000000,1454.000000,394.000000,3.221600,238300.000000 +-121.760000,36.920000,20.000000,2687.000000,637.000000,2154.000000,610.000000,2.643400,169700.000000 +-121.760000,36.910000,23.000000,1276.000000,437.000000,1359.000000,376.000000,1.960900,155000.000000 +-121.760000,36.830000,28.000000,1445.000000,268.000000,1017.000000,284.000000,3.669300,211000.000000 +-121.760000,36.770000,27.000000,1608.000000,503.000000,2031.000000,498.000000,2.338400,121000.000000 +-121.770000,39.660000,20.000000,3759.000000,609.000000,1705.000000,600.000000,4.712000,158600.000000 +-121.770000,38.760000,32.000000,1950.000000,385.000000,1145.000000,363.000000,2.836500,87900.000000 +-121.770000,38.690000,47.000000,1697.000000,318.000000,775.000000,276.000000,3.455900,123100.000000 +-121.770000,38.680000,43.000000,2559.000000,598.000000,1820.000000,591.000000,2.192700,107900.000000 +-121.770000,38.670000,45.000000,2438.000000,462.000000,1415.000000,510.000000,2.835100,107200.000000 +-121.770000,38.670000,42.000000,2670.000000,518.000000,1548.000000,534.000000,2.279400,108900.000000 +-121.770000,38.010000,13.000000,2983.000000,534.000000,1417.000000,510.000000,3.986100,168100.000000 +-121.770000,37.990000,4.000000,5623.000000,780.000000,2429.000000,716.000000,5.440900,205100.000000 +-121.770000,37.740000,25.000000,494.000000,81.000000,254.000000,85.000000,9.153100,418800.000000 +-121.770000,37.680000,44.000000,495.000000,112.000000,277.000000,109.000000,2.666700,179200.000000 +-121.770000,37.680000,36.000000,1687.000000,372.000000,950.000000,372.000000,3.553200,158400.000000 +-121.770000,37.670000,20.000000,8068.000000,1217.000000,3489.000000,1259.000000,5.790700,264200.000000 +-121.770000,37.330000,9.000000,3160.000000,468.000000,1675.000000,470.000000,7.544300,348400.000000 +-121.770000,37.330000,8.000000,3088.000000,474.000000,1799.000000,456.000000,7.270700,355300.000000 +-121.770000,37.240000,12.000000,10236.000000,1878.000000,5674.000000,1816.000000,4.747000,261100.000000 +-121.770000,37.230000,15.000000,4713.000000,769.000000,2519.000000,778.000000,5.695800,253800.000000 +-121.770000,37.220000,16.000000,3992.000000,540.000000,2097.000000,555.000000,6.728700,299300.000000 +-121.770000,37.220000,16.000000,1617.000000,306.000000,667.000000,264.000000,4.522100,191100.000000 +-121.770000,36.960000,20.000000,4228.000000,816.000000,2389.000000,844.000000,3.525000,229100.000000 +-121.770000,36.930000,33.000000,1406.000000,317.000000,1075.000000,301.000000,3.281300,190000.000000 +-121.770000,36.930000,24.000000,1943.000000,447.000000,1844.000000,461.000000,3.019200,184300.000000 +-121.770000,36.930000,20.000000,2587.000000,547.000000,1534.000000,540.000000,2.437500,190400.000000 +-121.770000,36.920000,9.000000,4934.000000,1112.000000,3198.000000,977.000000,3.500000,194800.000000 +-121.770000,36.910000,8.000000,2715.000000,750.000000,2580.000000,718.000000,2.834800,162000.000000 +-121.770000,36.870000,37.000000,424.000000,65.000000,266.000000,64.000000,3.347200,293800.000000 +-121.770000,36.710000,18.000000,6601.000000,1395.000000,3562.000000,1299.000000,3.512000,174800.000000 +-121.770000,36.530000,18.000000,2321.000000,358.000000,803.000000,341.000000,10.185400,426000.000000 +-121.780000,39.710000,8.000000,140.000000,28.000000,84.000000,29.000000,2.125000,179200.000000 +-121.780000,38.680000,43.000000,3766.000000,847.000000,1855.000000,817.000000,2.346800,119400.000000 +-121.780000,38.670000,38.000000,2948.000000,478.000000,1123.000000,460.000000,4.055600,146900.000000 +-121.780000,38.660000,18.000000,4224.000000,632.000000,1907.000000,641.000000,4.822600,139900.000000 +-121.780000,38.550000,12.000000,10509.000000,2186.000000,5633.000000,2138.000000,2.960500,204300.000000 +-121.780000,38.010000,19.000000,2688.000000,469.000000,1216.000000,422.000000,4.449100,133900.000000 +-121.780000,38.000000,8.000000,2371.000000,375.000000,1094.000000,396.000000,5.324500,174500.000000 +-121.780000,37.970000,4.000000,17032.000000,2546.000000,7653.000000,2359.000000,5.560100,213700.000000 +-121.780000,37.690000,35.000000,2853.000000,588.000000,1761.000000,572.000000,4.353300,168400.000000 +-121.780000,37.690000,34.000000,2358.000000,498.000000,1157.000000,461.000000,3.361800,174600.000000 +-121.780000,37.680000,17.000000,3112.000000,872.000000,1392.000000,680.000000,3.022200,172500.000000 +-121.780000,37.670000,28.000000,1773.000000,278.000000,804.000000,269.000000,4.857100,201100.000000 +-121.780000,37.670000,26.000000,2211.000000,344.000000,1024.000000,321.000000,5.264900,199800.000000 +-121.780000,37.660000,25.000000,1947.000000,418.000000,900.000000,354.000000,3.852300,193000.000000 +-121.780000,37.340000,21.000000,1959.000000,292.000000,891.000000,300.000000,7.375000,338400.000000 +-121.780000,37.340000,11.000000,3195.000000,410.000000,1774.000000,418.000000,7.067100,378200.000000 +-121.780000,37.330000,10.000000,2829.000000,394.000000,1510.000000,386.000000,6.680000,359500.000000 +-121.780000,37.310000,25.000000,2093.000000,297.000000,983.000000,338.000000,6.466400,271500.000000 +-121.780000,37.240000,17.000000,2123.000000,341.000000,1067.000000,339.000000,6.006200,262700.000000 +-121.780000,37.220000,18.000000,2127.000000,387.000000,1547.000000,402.000000,5.095800,217100.000000 +-121.780000,36.930000,21.000000,2794.000000,662.000000,2236.000000,565.000000,2.405300,178400.000000 +-121.780000,36.920000,19.000000,1515.000000,253.000000,975.000000,266.000000,4.390600,241200.000000 +-121.790000,39.730000,8.000000,5690.000000,1189.000000,2887.000000,1077.000000,3.062500,116300.000000 +-121.790000,39.480000,39.000000,1105.000000,180.000000,408.000000,166.000000,3.392900,82100.000000 +-121.790000,38.690000,23.000000,1755.000000,321.000000,1061.000000,313.000000,2.886400,103100.000000 +-121.790000,38.680000,24.000000,3794.000000,848.000000,2225.000000,864.000000,2.806800,95300.000000 +-121.790000,38.670000,30.000000,2602.000000,401.000000,981.000000,405.000000,4.722200,167200.000000 +-121.790000,38.670000,17.000000,2875.000000,810.000000,1876.000000,749.000000,1.951000,152500.000000 +-121.790000,38.660000,15.000000,6809.000000,1052.000000,3060.000000,1060.000000,5.306400,165000.000000 +-121.790000,38.010000,17.000000,4032.000000,814.000000,1749.000000,618.000000,3.172800,146800.000000 +-121.790000,38.000000,36.000000,1141.000000,234.000000,562.000000,213.000000,2.589300,108500.000000 +-121.790000,38.000000,34.000000,3090.000000,593.000000,1588.000000,566.000000,3.611800,124700.000000 +-121.790000,37.990000,18.000000,3646.000000,534.000000,1651.000000,535.000000,5.732100,164700.000000 +-121.790000,37.990000,10.000000,4156.000000,609.000000,1878.000000,586.000000,5.650600,178600.000000 +-121.790000,37.690000,25.000000,6296.000000,1082.000000,3200.000000,1047.000000,4.535700,188400.000000 +-121.790000,37.670000,26.000000,2163.000000,339.000000,947.000000,346.000000,6.079700,211000.000000 +-121.790000,37.660000,22.000000,14701.000000,2210.000000,6693.000000,2232.000000,5.980000,245000.000000 +-121.790000,37.380000,22.000000,3650.000000,527.000000,1637.000000,520.000000,5.377400,325600.000000 +-121.790000,37.330000,13.000000,2978.000000,505.000000,1794.000000,485.000000,6.681300,277800.000000 +-121.790000,37.330000,10.000000,3283.000000,550.000000,2491.000000,522.000000,6.663300,283700.000000 +-121.790000,37.320000,6.000000,2850.000000,561.000000,2160.000000,581.000000,5.533600,241900.000000 +-121.790000,37.320000,20.000000,3034.000000,451.000000,1669.000000,430.000000,6.274200,241300.000000 +-121.790000,37.310000,22.000000,2199.000000,361.000000,1270.000000,386.000000,5.114900,235700.000000 +-121.790000,37.230000,17.000000,2281.000000,359.000000,1226.000000,394.000000,5.419300,259500.000000 +-121.790000,37.230000,16.000000,2240.000000,300.000000,1221.000000,305.000000,6.019800,289600.000000 +-121.790000,37.030000,18.000000,943.000000,213.000000,544.000000,179.000000,3.934000,228600.000000 +-121.790000,36.950000,34.000000,2152.000000,430.000000,1516.000000,386.000000,3.786300,192200.000000 +-121.790000,36.930000,19.000000,2512.000000,509.000000,1856.000000,537.000000,3.181500,189100.000000 +-121.790000,36.850000,28.000000,1049.000000,235.000000,705.000000,208.000000,2.732100,150000.000000 +-121.790000,36.680000,22.000000,6912.000000,1513.000000,3794.000000,1455.000000,3.060800,168300.000000 +-121.790000,36.640000,11.000000,32627.000000,6445.000000,28566.000000,6082.000000,2.308700,118800.000000 +-121.800000,40.340000,26.000000,4815.000000,910.000000,1341.000000,539.000000,2.881000,79800.000000 +-121.800000,39.750000,28.000000,2551.000000,378.000000,1011.000000,374.000000,4.330900,125200.000000 +-121.800000,39.750000,11.000000,7212.000000,1355.000000,3264.000000,1264.000000,3.112500,122600.000000 +-121.800000,39.640000,25.000000,2202.000000,422.000000,1109.000000,403.000000,2.830600,87500.000000 +-121.800000,39.330000,30.000000,1019.000000,192.000000,501.000000,185.000000,2.525900,81300.000000 +-121.800000,38.690000,8.000000,3544.000000,691.000000,2118.000000,678.000000,3.747700,122200.000000 +-121.800000,38.680000,11.000000,3851.000000,892.000000,1847.000000,747.000000,3.433100,120600.000000 +-121.800000,38.670000,11.000000,3251.000000,623.000000,1700.000000,615.000000,3.187500,172000.000000 +-121.800000,38.670000,10.000000,2086.000000,380.000000,1073.000000,378.000000,4.552600,154400.000000 +-121.800000,38.010000,37.000000,3058.000000,567.000000,1351.000000,523.000000,3.517900,130800.000000 +-121.800000,38.000000,34.000000,2738.000000,475.000000,1316.000000,459.000000,3.536800,122500.000000 +-121.800000,37.690000,17.000000,3956.000000,639.000000,2222.000000,662.000000,5.432400,215500.000000 +-121.800000,37.350000,27.000000,2358.000000,415.000000,1562.000000,383.000000,5.229700,192800.000000 +-121.800000,37.350000,15.000000,2781.000000,498.000000,1389.000000,475.000000,5.614000,223300.000000 +-121.800000,37.340000,25.000000,1642.000000,297.000000,1146.000000,279.000000,5.208800,231400.000000 +-121.800000,37.320000,20.000000,2473.000000,476.000000,2228.000000,501.000000,5.681700,224200.000000 +-121.800000,37.320000,14.000000,4412.000000,924.000000,2698.000000,891.000000,4.702700,227600.000000 +-121.800000,37.310000,15.000000,1807.000000,378.000000,1277.000000,341.000000,4.504500,164500.000000 +-121.800000,37.300000,16.000000,906.000000,149.000000,605.000000,148.000000,4.817300,235600.000000 +-121.800000,37.270000,17.000000,3912.000000,737.000000,2693.000000,746.000000,5.077200,221500.000000 +-121.800000,37.260000,18.000000,3631.000000,947.000000,2357.000000,757.000000,2.875000,184400.000000 +-121.800000,37.260000,16.000000,1868.000000,285.000000,995.000000,284.000000,5.905300,260500.000000 +-121.800000,37.230000,18.000000,3179.000000,526.000000,1663.000000,507.000000,5.922500,265800.000000 +-121.800000,37.230000,18.000000,2581.000000,358.000000,1284.000000,377.000000,6.738500,272400.000000 +-121.800000,36.720000,14.000000,2493.000000,407.000000,1296.000000,418.000000,5.450800,190000.000000 +-121.800000,36.680000,18.000000,8581.000000,1957.000000,6071.000000,1889.000000,3.000000,162200.000000 +-121.810000,39.710000,18.000000,1222.000000,250.000000,708.000000,281.000000,2.028800,116700.000000 +-121.810000,38.840000,37.000000,352.000000,65.000000,238.000000,67.000000,2.854200,275000.000000 +-121.810000,38.490000,18.000000,4518.000000,827.000000,2230.000000,715.000000,3.930900,178500.000000 +-121.810000,38.450000,24.000000,1951.000000,341.000000,1140.000000,338.000000,3.706100,128500.000000 +-121.810000,38.430000,30.000000,1674.000000,297.000000,756.000000,292.000000,3.928600,133100.000000 +-121.810000,38.010000,52.000000,1124.000000,245.000000,528.000000,226.000000,2.263900,128500.000000 +-121.810000,38.010000,47.000000,1942.000000,430.000000,1074.000000,393.000000,2.236100,105100.000000 +-121.810000,38.000000,37.000000,2724.000000,579.000000,1400.000000,540.000000,2.905000,97300.000000 +-121.810000,37.990000,18.000000,2807.000000,445.000000,1315.000000,437.000000,4.819400,170400.000000 +-121.810000,37.970000,8.000000,1584.000000,236.000000,615.000000,202.000000,6.475300,166800.000000 +-121.810000,37.410000,25.000000,2496.000000,351.000000,1034.000000,367.000000,7.054400,320700.000000 +-121.810000,37.390000,34.000000,2218.000000,286.000000,827.000000,299.000000,7.455900,456500.000000 +-121.810000,37.380000,29.000000,570.000000,76.000000,244.000000,72.000000,12.329200,416700.000000 +-121.810000,37.370000,24.000000,962.000000,146.000000,492.000000,155.000000,7.286100,328000.000000 +-121.810000,37.360000,20.000000,3189.000000,420.000000,1234.000000,389.000000,7.581300,374100.000000 +-121.810000,37.350000,29.000000,2396.000000,452.000000,2000.000000,481.000000,4.375000,185500.000000 +-121.810000,37.350000,28.000000,3477.000000,671.000000,2990.000000,648.000000,4.467100,172600.000000 +-121.810000,37.330000,4.000000,5532.000000,778.000000,3651.000000,770.000000,7.298200,343000.000000 +-121.810000,37.310000,15.000000,1898.000000,395.000000,1527.000000,381.000000,4.479200,212500.000000 +-121.810000,37.310000,15.000000,1794.000000,366.000000,1533.000000,371.000000,5.784300,209900.000000 +-121.810000,37.300000,15.000000,1929.000000,345.000000,1683.000000,347.000000,5.524800,235600.000000 +-121.810000,37.300000,14.000000,1870.000000,348.000000,1214.000000,347.000000,4.976900,186500.000000 +-121.810000,37.290000,15.000000,5957.000000,1037.000000,3838.000000,997.000000,6.290700,253100.000000 +-121.810000,37.280000,17.000000,2277.000000,428.000000,1887.000000,422.000000,5.707800,217000.000000 +-121.810000,37.270000,22.000000,2996.000000,695.000000,2169.000000,607.000000,4.343800,209700.000000 +-121.810000,37.270000,20.000000,3244.000000,520.000000,1769.000000,469.000000,5.921400,224000.000000 +-121.810000,37.260000,16.000000,1911.000000,327.000000,1158.000000,332.000000,5.935900,249500.000000 +-121.810000,37.260000,14.000000,3379.000000,683.000000,1465.000000,620.000000,4.054700,236200.000000 +-121.810000,37.250000,25.000000,4096.000000,623.000000,2128.000000,618.000000,6.295700,251800.000000 +-121.810000,37.250000,20.000000,3398.000000,771.000000,1231.000000,744.000000,2.028800,350000.000000 +-121.810000,37.250000,12.000000,2070.000000,587.000000,1216.000000,532.000000,4.192600,244500.000000 +-121.810000,37.230000,24.000000,2413.000000,369.000000,1237.000000,381.000000,6.432800,257200.000000 +-121.810000,37.230000,19.000000,2635.000000,427.000000,1497.000000,410.000000,6.317800,248000.000000 +-121.810000,37.230000,16.000000,1674.000000,281.000000,850.000000,254.000000,5.315700,253300.000000 +-121.820000,39.760000,23.000000,6010.000000,1116.000000,2710.000000,1149.000000,3.006800,107300.000000 +-121.820000,39.750000,37.000000,2236.000000,372.000000,974.000000,379.000000,3.201600,97000.000000 +-121.820000,39.750000,29.000000,7744.000000,1375.000000,3316.000000,1365.000000,3.025300,111400.000000 +-121.820000,39.730000,44.000000,2923.000000,659.000000,1371.000000,626.000000,2.292500,85800.000000 +-121.820000,39.730000,33.000000,2242.000000,517.000000,1160.000000,449.000000,1.742600,60300.000000 +-121.820000,39.720000,42.000000,2978.000000,694.000000,1879.000000,679.000000,1.506400,66300.000000 +-121.820000,38.460000,10.000000,6331.000000,1181.000000,3419.000000,1110.000000,3.708300,154800.000000 +-121.820000,38.360000,26.000000,1974.000000,364.000000,1002.000000,362.000000,3.303600,210000.000000 +-121.820000,38.020000,46.000000,176.000000,43.000000,101.000000,40.000000,2.236100,93800.000000 +-121.820000,38.010000,50.000000,1120.000000,281.000000,625.000000,239.000000,1.598800,96400.000000 +-121.820000,38.010000,47.000000,1265.000000,254.000000,587.000000,247.000000,2.636400,93500.000000 +-121.820000,38.010000,42.000000,1017.000000,253.000000,798.000000,266.000000,2.171900,99100.000000 +-121.820000,38.010000,25.000000,3018.000000,606.000000,1614.000000,568.000000,3.472200,127000.000000 +-121.820000,38.000000,30.000000,3268.000000,567.000000,1714.000000,565.000000,4.458300,131000.000000 +-121.820000,38.000000,29.000000,2070.000000,452.000000,985.000000,420.000000,2.846600,113400.000000 +-121.820000,37.980000,13.000000,3995.000000,605.000000,1969.000000,607.000000,5.016400,165200.000000 +-121.820000,37.810000,12.000000,4711.000000,659.000000,2089.000000,621.000000,8.320900,485400.000000 +-121.820000,37.690000,12.000000,1906.000000,351.000000,802.000000,319.000000,4.937500,227700.000000 +-121.820000,37.390000,37.000000,4137.000000,636.000000,1569.000000,578.000000,6.100800,286200.000000 +-121.820000,37.380000,32.000000,3747.000000,665.000000,1687.000000,649.000000,5.494900,330800.000000 +-121.820000,37.380000,32.000000,1650.000000,246.000000,768.000000,263.000000,6.846200,320900.000000 +-121.820000,37.370000,42.000000,2913.000000,577.000000,1873.000000,580.000000,3.721400,167900.000000 +-121.820000,37.370000,41.000000,1558.000000,281.000000,970.000000,304.000000,4.416700,215200.000000 +-121.820000,37.370000,40.000000,802.000000,149.000000,445.000000,143.000000,4.044600,196300.000000 +-121.820000,37.360000,34.000000,1834.000000,377.000000,1450.000000,347.000000,3.718800,161500.000000 +-121.820000,37.360000,33.000000,1624.000000,337.000000,1412.000000,323.000000,4.038500,167600.000000 +-121.820000,37.350000,24.000000,2298.000000,575.000000,2409.000000,569.000000,3.450900,182400.000000 +-121.820000,37.340000,23.000000,7609.000000,1446.000000,6034.000000,1414.000000,4.842400,195300.000000 +-121.820000,37.330000,23.000000,3279.000000,647.000000,2582.000000,630.000000,4.378200,175800.000000 +-121.820000,37.320000,10.000000,2506.000000,623.000000,2634.000000,622.000000,3.135000,231400.000000 +-121.820000,37.310000,22.000000,2044.000000,402.000000,1925.000000,429.000000,3.710200,177500.000000 +-121.820000,37.310000,15.000000,1504.000000,294.000000,1267.000000,291.000000,5.514500,219400.000000 +-121.820000,37.290000,16.000000,2085.000000,394.000000,1705.000000,391.000000,5.022500,222800.000000 +-121.820000,37.280000,33.000000,2873.000000,489.000000,1946.000000,475.000000,5.070900,176400.000000 +-121.820000,37.280000,31.000000,1340.000000,235.000000,1336.000000,270.000000,4.236100,179500.000000 +-121.820000,37.270000,16.000000,2030.000000,321.000000,1343.000000,365.000000,6.356600,279100.000000 +-121.820000,37.260000,10.000000,3030.000000,574.000000,1623.000000,589.000000,5.135600,218700.000000 +-121.820000,37.250000,25.000000,4021.000000,634.000000,2178.000000,650.000000,5.166300,241200.000000 +-121.820000,37.250000,24.000000,3344.000000,531.000000,1768.000000,541.000000,5.830500,245600.000000 +-121.820000,37.240000,20.000000,3671.000000,567.000000,1944.000000,589.000000,6.053800,241000.000000 +-121.820000,37.230000,25.000000,2349.000000,394.000000,1266.000000,383.000000,4.968800,233100.000000 +-121.820000,37.230000,23.000000,4487.000000,584.000000,2024.000000,580.000000,7.521800,291500.000000 +-121.820000,36.950000,16.000000,2599.000000,430.000000,1417.000000,445.000000,4.661100,349300.000000 +-121.820000,36.860000,17.000000,1573.000000,272.000000,142.000000,55.000000,2.171900,420000.000000 +-121.820000,36.610000,24.000000,2437.000000,438.000000,1430.000000,444.000000,3.801500,169100.000000 +-121.820000,36.540000,22.000000,1746.000000,363.000000,886.000000,364.000000,5.546900,378800.000000 +-121.830000,40.690000,14.000000,821.000000,170.000000,477.000000,129.000000,3.150000,87500.000000 +-121.830000,39.760000,12.000000,9831.000000,1921.000000,4644.000000,1775.000000,3.114200,112600.000000 +-121.830000,39.740000,34.000000,3263.000000,604.000000,1290.000000,594.000000,2.575000,130300.000000 +-121.830000,39.730000,52.000000,1741.000000,401.000000,753.000000,377.000000,2.006400,77900.000000 +-121.830000,39.720000,52.000000,1890.000000,420.000000,974.000000,383.000000,1.682700,78700.000000 +-121.830000,39.230000,25.000000,3819.000000,702.000000,1983.000000,658.000000,2.446400,72500.000000 +-121.830000,39.100000,42.000000,1282.000000,198.000000,451.000000,159.000000,3.291700,97900.000000 +-121.830000,38.450000,36.000000,839.000000,158.000000,446.000000,167.000000,2.343800,122700.000000 +-121.830000,38.450000,32.000000,2139.000000,440.000000,1154.000000,411.000000,3.267200,107500.000000 +-121.830000,38.450000,15.000000,5115.000000,776.000000,2540.000000,794.000000,4.861100,146400.000000 +-121.830000,38.000000,8.000000,2572.000000,738.000000,1384.000000,684.000000,1.716100,75800.000000 +-121.830000,38.000000,25.000000,1710.000000,288.000000,799.000000,259.000000,4.835900,145300.000000 +-121.830000,38.000000,15.000000,6365.000000,1646.000000,3838.000000,1458.000000,2.549500,103600.000000 +-121.830000,37.990000,23.000000,1970.000000,296.000000,935.000000,279.000000,4.485300,145900.000000 +-121.830000,37.990000,23.000000,1150.000000,174.000000,572.000000,174.000000,4.916700,152400.000000 +-121.830000,37.990000,16.000000,2919.000000,462.000000,1456.000000,453.000000,5.677900,164700.000000 +-121.830000,37.380000,31.000000,3633.000000,843.000000,2677.000000,797.000000,3.222200,184800.000000 +-121.830000,37.380000,15.000000,4430.000000,992.000000,3278.000000,1018.000000,4.553300,209900.000000 +-121.830000,37.370000,43.000000,821.000000,149.000000,370.000000,135.000000,4.250000,209100.000000 +-121.830000,37.360000,29.000000,4045.000000,885.000000,3036.000000,845.000000,3.198200,171700.000000 +-121.830000,37.360000,22.000000,3936.000000,860.000000,3508.000000,877.000000,4.231200,183800.000000 +-121.830000,37.350000,31.000000,2914.000000,715.000000,3547.000000,645.000000,3.714300,178600.000000 +-121.830000,37.340000,26.000000,1848.000000,339.000000,1952.000000,327.000000,4.087000,182500.000000 +-121.830000,37.340000,21.000000,6404.000000,1232.000000,6047.000000,1235.000000,4.209800,193400.000000 +-121.830000,37.330000,27.000000,3127.000000,610.000000,3257.000000,604.000000,4.633300,173600.000000 +-121.830000,37.320000,26.000000,1125.000000,210.000000,943.000000,214.000000,4.825000,181000.000000 +-121.830000,37.320000,21.000000,4559.000000,1163.000000,5124.000000,1124.000000,3.205200,179000.000000 +-121.830000,37.320000,17.000000,1887.000000,664.000000,1906.000000,597.000000,2.565200,165300.000000 +-121.830000,37.310000,19.000000,11181.000000,1895.000000,7817.000000,1853.000000,5.605800,232700.000000 +-121.830000,37.300000,16.000000,5684.000000,1386.000000,4203.000000,1318.000000,3.196400,185800.000000 +-121.830000,37.290000,20.000000,2308.000000,461.000000,2223.000000,456.000000,4.258900,191000.000000 +-121.830000,37.290000,20.000000,1649.000000,408.000000,1729.000000,362.000000,3.383300,115200.000000 +-121.830000,37.290000,10.000000,1828.000000,453.000000,1356.000000,409.000000,4.594300,123500.000000 +-121.830000,37.280000,19.000000,2644.000000,833.000000,2317.000000,780.000000,3.104200,183100.000000 +-121.830000,37.280000,17.000000,3057.000000,606.000000,2030.000000,602.000000,5.216600,230900.000000 +-121.830000,37.270000,8.000000,4454.000000,1058.000000,2595.000000,1027.000000,4.561500,282600.000000 +-121.830000,37.260000,7.000000,3609.000000,751.000000,1739.000000,682.000000,4.503300,213100.000000 +-121.830000,37.260000,15.000000,3243.000000,551.000000,1752.000000,551.000000,5.584900,257400.000000 +-121.830000,37.260000,11.000000,2394.000000,403.000000,1393.000000,409.000000,5.587500,259300.000000 +-121.830000,37.250000,17.000000,2332.000000,637.000000,1636.000000,623.000000,3.193200,123400.000000 +-121.830000,37.240000,23.000000,2543.000000,388.000000,1297.000000,385.000000,5.916400,237400.000000 +-121.830000,37.230000,7.000000,5389.000000,903.000000,2232.000000,825.000000,6.665900,500001.000000 +-121.830000,37.230000,22.000000,5507.000000,841.000000,2785.000000,848.000000,6.088900,245200.000000 +-121.830000,37.210000,14.000000,2714.000000,361.000000,1259.000000,375.000000,7.773800,387500.000000 +-121.830000,37.020000,22.000000,1903.000000,350.000000,760.000000,322.000000,2.955900,288400.000000 +-121.830000,36.980000,19.000000,4431.000000,705.000000,1764.000000,679.000000,4.332100,298600.000000 +-121.830000,36.620000,33.000000,2938.000000,576.000000,1516.000000,568.000000,3.500000,162400.000000 +-121.830000,36.620000,33.000000,1771.000000,398.000000,1037.000000,388.000000,2.770800,161800.000000 +-121.830000,36.610000,27.000000,5665.000000,1281.000000,3612.000000,1191.000000,3.054200,142100.000000 +-121.830000,36.610000,27.000000,2248.000000,466.000000,1644.000000,453.000000,3.254500,131200.000000 +-121.830000,36.610000,26.000000,3723.000000,789.000000,2563.000000,747.000000,3.453100,133100.000000 +-121.830000,36.600000,30.000000,2748.000000,502.000000,1491.000000,535.000000,4.347200,185000.000000 +-121.840000,39.760000,14.000000,2351.000000,620.000000,1215.000000,548.000000,2.315500,102300.000000 +-121.840000,39.740000,43.000000,2976.000000,599.000000,1181.000000,560.000000,2.262100,85100.000000 +-121.840000,39.730000,52.000000,957.000000,263.000000,513.000000,223.000000,1.367200,55000.000000 +-121.840000,39.730000,52.000000,857.000000,232.000000,520.000000,198.000000,0.987000,112500.000000 +-121.840000,39.730000,52.000000,677.000000,152.000000,379.000000,154.000000,1.679700,94800.000000 +-121.840000,39.730000,52.000000,502.000000,100.000000,311.000000,100.000000,1.548100,200000.000000 +-121.840000,39.680000,38.000000,549.000000,105.000000,275.000000,94.000000,3.537500,153100.000000 +-121.840000,38.650000,29.000000,3167.000000,548.000000,1554.000000,534.000000,4.348700,200700.000000 +-121.840000,37.990000,15.000000,2380.000000,385.000000,1292.000000,388.000000,4.602900,142600.000000 +-121.840000,37.990000,13.000000,4545.000000,952.000000,2188.000000,901.000000,3.362500,126100.000000 +-121.840000,37.980000,8.000000,7505.000000,1089.000000,3325.000000,1016.000000,5.269900,204200.000000 +-121.840000,37.660000,13.000000,13182.000000,2074.000000,4847.000000,1950.000000,5.641700,352900.000000 +-121.840000,37.390000,31.000000,5524.000000,914.000000,2848.000000,879.000000,5.559200,229900.000000 +-121.840000,37.380000,34.000000,762.000000,182.000000,611.000000,193.000000,3.562500,201800.000000 +-121.840000,37.380000,33.000000,835.000000,181.000000,781.000000,169.000000,5.108200,195800.000000 +-121.840000,37.370000,42.000000,1237.000000,232.000000,900.000000,241.000000,3.857100,187500.000000 +-121.840000,37.370000,28.000000,1579.000000,339.000000,1252.000000,353.000000,4.161500,214800.000000 +-121.840000,37.370000,15.000000,3315.000000,1042.000000,2749.000000,1059.000000,2.319900,140100.000000 +-121.840000,37.350000,20.000000,3375.000000,867.000000,4610.000000,860.000000,2.689400,182200.000000 +-121.840000,37.340000,27.000000,2512.000000,526.000000,3033.000000,526.000000,4.250000,162900.000000 +-121.840000,37.330000,28.000000,1535.000000,330.000000,1937.000000,317.000000,4.114600,160100.000000 +-121.840000,37.330000,26.000000,1934.000000,408.000000,2059.000000,416.000000,3.676500,163600.000000 +-121.840000,37.320000,22.000000,3015.000000,581.000000,2491.000000,530.000000,4.341900,176300.000000 +-121.840000,37.320000,14.000000,5762.000000,1538.000000,3979.000000,1389.000000,3.695300,192600.000000 +-121.840000,37.290000,24.000000,3403.000000,656.000000,2829.000000,612.000000,4.775000,191900.000000 +-121.840000,37.280000,18.000000,2749.000000,633.000000,1779.000000,561.000000,3.925000,166100.000000 +-121.840000,37.270000,9.000000,3624.000000,812.000000,1856.000000,721.000000,4.208300,198400.000000 +-121.840000,37.270000,17.000000,2795.000000,482.000000,1904.000000,506.000000,5.018600,250800.000000 +-121.840000,37.260000,5.000000,1808.000000,340.000000,825.000000,339.000000,5.050900,184800.000000 +-121.840000,37.250000,25.000000,5939.000000,989.000000,3275.000000,954.000000,5.648800,234600.000000 +-121.840000,37.250000,17.000000,2363.000000,473.000000,1369.000000,442.000000,4.835500,141600.000000 +-121.840000,37.240000,24.000000,7991.000000,1286.000000,4017.000000,1213.000000,5.474100,238800.000000 +-121.840000,37.240000,18.000000,3574.000000,504.000000,1803.000000,536.000000,6.783600,274100.000000 +-121.840000,37.180000,6.000000,9176.000000,1201.000000,3637.000000,1138.000000,8.383700,473400.000000 +-121.840000,36.940000,29.000000,4921.000000,967.000000,2319.000000,823.000000,4.951700,307900.000000 +-121.840000,36.610000,26.000000,2902.000000,761.000000,2258.000000,719.000000,2.566300,128900.000000 +-121.840000,36.610000,15.000000,2190.000000,586.000000,1570.000000,510.000000,1.875000,122300.000000 +-121.840000,36.600000,30.000000,2958.000000,691.000000,1616.000000,666.000000,3.464300,191800.000000 +-121.840000,36.590000,34.000000,3852.000000,733.000000,1661.000000,696.000000,4.326900,221300.000000 +-121.840000,36.250000,20.000000,958.000000,245.000000,590.000000,189.000000,2.609400,362500.000000 +-121.850000,39.770000,17.000000,5273.000000,1177.000000,2446.000000,1199.000000,1.936200,89900.000000 +-121.850000,39.750000,39.000000,568.000000,127.000000,267.000000,129.000000,1.809500,78100.000000 +-121.850000,39.740000,41.000000,2901.000000,689.000000,1426.000000,632.000000,1.563300,84500.000000 +-121.850000,39.730000,52.000000,444.000000,80.000000,1107.000000,98.000000,3.419100,137500.000000 +-121.850000,39.730000,17.000000,3425.000000,827.000000,2469.000000,758.000000,0.939300,88900.000000 +-121.850000,39.720000,18.000000,7272.000000,1559.000000,5022.000000,1524.000000,1.691100,98800.000000 +-121.850000,38.430000,2.000000,790.000000,135.000000,235.000000,87.000000,5.086200,166500.000000 +-121.850000,38.000000,26.000000,3364.000000,570.000000,1806.000000,566.000000,4.264700,133400.000000 +-121.850000,38.000000,24.000000,2269.000000,584.000000,1239.000000,542.000000,2.041100,100000.000000 +-121.850000,37.720000,43.000000,228.000000,40.000000,83.000000,42.000000,10.320300,400000.000000 +-121.850000,37.680000,4.000000,4719.000000,741.000000,1895.000000,742.000000,6.813200,282500.000000 +-121.850000,37.660000,14.000000,4236.000000,701.000000,1833.000000,663.000000,5.639900,300600.000000 +-121.850000,37.440000,8.000000,426.000000,61.000000,241.000000,55.000000,7.309000,367900.000000 +-121.850000,37.410000,25.000000,1837.000000,278.000000,1006.000000,271.000000,6.684200,265300.000000 +-121.850000,37.410000,17.000000,2156.000000,435.000000,1400.000000,393.000000,5.609600,199100.000000 +-121.850000,37.400000,23.000000,1793.000000,319.000000,1145.000000,310.000000,5.596800,243200.000000 +-121.850000,37.390000,15.000000,8748.000000,1547.000000,4784.000000,1524.000000,5.832200,276600.000000 +-121.850000,37.380000,12.000000,12980.000000,2568.000000,8190.000000,2515.000000,5.241500,286500.000000 +-121.850000,37.360000,18.000000,1525.000000,485.000000,1705.000000,448.000000,3.719800,128600.000000 +-121.850000,37.360000,15.000000,3148.000000,1116.000000,3556.000000,1037.000000,3.046600,159600.000000 +-121.850000,37.360000,11.000000,2109.000000,592.000000,2744.000000,607.000000,4.045200,205900.000000 +-121.850000,37.340000,27.000000,1481.000000,409.000000,1505.000000,391.000000,2.576900,137500.000000 +-121.850000,37.330000,19.000000,735.000000,158.000000,597.000000,134.000000,4.520800,188100.000000 +-121.850000,37.330000,19.000000,2228.000000,559.000000,2845.000000,551.000000,2.600000,172800.000000 +-121.850000,37.330000,16.000000,2987.000000,874.000000,4241.000000,841.000000,2.802400,127900.000000 +-121.850000,37.300000,19.000000,6275.000000,1546.000000,4312.000000,1466.000000,2.776800,178600.000000 +-121.850000,37.280000,17.000000,4208.000000,954.000000,1476.000000,904.000000,2.397100,87500.000000 +-121.850000,37.270000,17.000000,1957.000000,261.000000,863.000000,269.000000,7.333900,294200.000000 +-121.850000,37.260000,16.000000,2312.000000,303.000000,1158.000000,295.000000,7.432300,311800.000000 +-121.850000,37.260000,16.000000,1982.000000,280.000000,1030.000000,297.000000,6.433900,289200.000000 +-121.850000,37.260000,16.000000,1816.000000,241.000000,793.000000,234.000000,6.819400,291200.000000 +-121.850000,37.250000,20.000000,3773.000000,624.000000,1965.000000,607.000000,5.493900,241200.000000 +-121.850000,37.240000,17.000000,6425.000000,1268.000000,3934.000000,1238.000000,5.122800,237600.000000 +-121.850000,36.610000,38.000000,238.000000,72.000000,191.000000,67.000000,1.389700,125000.000000 +-121.850000,36.600000,41.000000,3138.000000,717.000000,1890.000000,642.000000,2.486400,140400.000000 +-121.850000,36.600000,21.000000,2381.000000,701.000000,1264.000000,659.000000,2.537200,218000.000000 +-121.850000,36.590000,42.000000,891.000000,203.000000,525.000000,212.000000,3.315600,186300.000000 +-121.860000,40.770000,17.000000,2816.000000,639.000000,1027.000000,406.000000,2.503000,65600.000000 +-121.860000,39.780000,12.000000,7653.000000,1578.000000,3628.000000,1494.000000,3.090500,117800.000000 +-121.860000,39.760000,19.000000,7254.000000,1785.000000,4030.000000,1667.000000,2.009400,87300.000000 +-121.860000,38.040000,52.000000,242.000000,59.000000,188.000000,54.000000,1.395800,67500.000000 +-121.860000,38.000000,16.000000,3216.000000,464.000000,1504.000000,453.000000,5.250000,161700.000000 +-121.860000,37.700000,13.000000,9621.000000,1344.000000,4389.000000,1391.000000,6.682700,313700.000000 +-121.860000,37.660000,22.000000,3634.000000,664.000000,1699.000000,640.000000,4.159700,293200.000000 +-121.860000,37.420000,20.000000,5032.000000,808.000000,2695.000000,801.000000,6.622700,264800.000000 +-121.860000,37.410000,16.000000,2938.000000,589.000000,1718.000000,568.000000,5.507300,178900.000000 +-121.860000,37.410000,16.000000,2411.000000,420.000000,1671.000000,442.000000,6.500400,263600.000000 +-121.860000,37.410000,16.000000,1489.000000,262.000000,945.000000,263.000000,7.386100,267000.000000 +-121.860000,37.400000,16.000000,2391.000000,369.000000,1419.000000,373.000000,5.872100,267800.000000 +-121.860000,37.390000,17.000000,1777.000000,328.000000,1235.000000,329.000000,5.422500,258100.000000 +-121.860000,37.380000,15.000000,2052.000000,405.000000,1380.000000,409.000000,5.868600,181100.000000 +-121.860000,37.370000,15.000000,8162.000000,2124.000000,8793.000000,2086.000000,3.330600,210300.000000 +-121.860000,37.360000,31.000000,1602.000000,358.000000,1179.000000,354.000000,4.489600,156800.000000 +-121.860000,37.350000,46.000000,1448.000000,330.000000,1094.000000,331.000000,2.496800,174100.000000 +-121.860000,37.350000,43.000000,1536.000000,371.000000,1256.000000,357.000000,2.800000,153300.000000 +-121.860000,37.350000,35.000000,2391.000000,605.000000,1886.000000,595.000000,2.555100,182100.000000 +-121.860000,37.340000,40.000000,2277.000000,508.000000,1718.000000,434.000000,3.008900,185200.000000 +-121.860000,37.340000,29.000000,5274.000000,1625.000000,6234.000000,1639.000000,2.594700,177300.000000 +-121.860000,37.320000,13.000000,2519.000000,581.000000,2094.000000,530.000000,4.362100,173400.000000 +-121.860000,37.310000,24.000000,1939.000000,652.000000,1808.000000,625.000000,2.225900,112500.000000 +-121.860000,37.290000,14.000000,6160.000000,1222.000000,2472.000000,1204.000000,4.144400,178400.000000 +-121.860000,37.270000,19.000000,1852.000000,268.000000,866.000000,272.000000,5.613900,279500.000000 +-121.860000,37.270000,17.000000,4393.000000,709.000000,2292.000000,692.000000,5.687600,246500.000000 +-121.860000,37.260000,16.000000,2814.000000,485.000000,1305.000000,465.000000,5.512100,224100.000000 +-121.860000,37.250000,16.000000,6958.000000,1300.000000,2965.000000,1217.000000,4.288500,262400.000000 +-121.860000,37.220000,18.000000,7245.000000,1029.000000,2893.000000,1049.000000,6.950800,361200.000000 +-121.860000,37.210000,23.000000,2552.000000,305.000000,916.000000,316.000000,9.197400,500001.000000 +-121.860000,37.000000,16.000000,8638.000000,1392.000000,3706.000000,1251.000000,5.503000,351800.000000 +-121.860000,36.630000,37.000000,338.000000,109.000000,231.000000,100.000000,2.531300,108300.000000 +-121.860000,36.600000,31.000000,1044.000000,285.000000,762.000000,301.000000,3.038000,195300.000000 +-121.860000,36.600000,21.000000,3634.000000,1011.000000,1985.000000,917.000000,2.908500,156300.000000 +-121.860000,36.580000,20.000000,6332.000000,991.000000,2668.000000,955.000000,5.757800,347700.000000 +-121.870000,39.820000,11.000000,5103.000000,825.000000,2456.000000,810.000000,4.503200,159700.000000 +-121.870000,39.750000,22.000000,1707.000000,296.000000,822.000000,297.000000,3.662500,126600.000000 +-121.870000,39.740000,7.000000,1737.000000,290.000000,747.000000,265.000000,3.900000,147000.000000 +-121.870000,38.020000,52.000000,2264.000000,439.000000,1403.000000,476.000000,2.708300,99400.000000 +-121.870000,38.020000,31.000000,3644.000000,746.000000,2229.000000,678.000000,3.138900,117800.000000 +-121.870000,38.000000,17.000000,2713.000000,442.000000,1475.000000,415.000000,4.854200,144100.000000 +-121.870000,37.990000,15.000000,2203.000000,312.000000,1051.000000,311.000000,4.978300,163900.000000 +-121.870000,37.670000,28.000000,1812.000000,294.000000,853.000000,278.000000,4.987900,229400.000000 +-121.870000,37.670000,10.000000,4337.000000,800.000000,1813.000000,743.000000,5.500000,247200.000000 +-121.870000,37.660000,39.000000,522.000000,116.000000,161.000000,102.000000,2.489600,238500.000000 +-121.870000,37.660000,27.000000,1569.000000,242.000000,583.000000,214.000000,5.751900,278500.000000 +-121.870000,37.570000,13.000000,5519.000000,833.000000,2444.000000,825.000000,7.069100,393200.000000 +-121.870000,37.460000,43.000000,91.000000,12.000000,58.000000,16.000000,15.000100,500001.000000 +-121.870000,37.430000,22.000000,3805.000000,596.000000,2118.000000,621.000000,6.289200,254200.000000 +-121.870000,37.420000,25.000000,4430.000000,729.000000,2685.000000,721.000000,5.696500,261100.000000 +-121.870000,37.410000,24.000000,3308.000000,548.000000,1891.000000,544.000000,5.668300,248700.000000 +-121.870000,37.410000,17.000000,3719.000000,588.000000,2089.000000,561.000000,6.786700,273700.000000 +-121.870000,37.400000,16.000000,1767.000000,268.000000,1061.000000,280.000000,6.958400,351600.000000 +-121.870000,37.390000,16.000000,2655.000000,487.000000,1862.000000,448.000000,6.057000,246800.000000 +-121.870000,37.390000,16.000000,1334.000000,389.000000,1103.000000,415.000000,3.715300,229800.000000 +-121.870000,37.380000,16.000000,1050.000000,245.000000,722.000000,228.000000,4.518700,163500.000000 +-121.870000,37.380000,14.000000,3851.000000,534.000000,2052.000000,478.000000,7.073500,335600.000000 +-121.870000,37.360000,34.000000,938.000000,242.000000,769.000000,226.000000,3.562500,194500.000000 +-121.870000,37.350000,52.000000,1557.000000,424.000000,1580.000000,434.000000,2.327700,183700.000000 +-121.870000,37.350000,37.000000,1566.000000,375.000000,1223.000000,346.000000,3.279300,174500.000000 +-121.870000,37.350000,27.000000,3500.000000,1036.000000,3019.000000,955.000000,2.922200,153700.000000 +-121.870000,37.340000,52.000000,1170.000000,215.000000,604.000000,207.000000,2.666700,325900.000000 +-121.870000,37.340000,52.000000,1087.000000,166.000000,650.000000,194.000000,6.634500,309000.000000 +-121.870000,37.340000,39.000000,2479.000000,541.000000,1990.000000,506.000000,2.430600,289100.000000 +-121.870000,37.330000,37.000000,3137.000000,685.000000,2048.000000,651.000000,3.015600,270300.000000 +-121.870000,37.320000,39.000000,1839.000000,471.000000,1528.000000,456.000000,2.681800,184900.000000 +-121.870000,37.320000,36.000000,1471.000000,360.000000,1182.000000,326.000000,2.703100,175800.000000 +-121.870000,37.310000,6.000000,3797.000000,984.000000,2437.000000,904.000000,3.680200,152400.000000 +-121.870000,37.300000,28.000000,859.000000,199.000000,455.000000,211.000000,2.329300,215900.000000 +-121.870000,37.300000,14.000000,360.000000,124.000000,134.000000,84.000000,2.741100,112500.000000 +-121.870000,37.290000,18.000000,1892.000000,568.000000,974.000000,553.000000,2.371500,228000.000000 +-121.870000,37.270000,16.000000,3298.000000,451.000000,1542.000000,423.000000,6.706400,305600.000000 +-121.870000,37.260000,24.000000,2383.000000,343.000000,1146.000000,341.000000,5.622300,265700.000000 +-121.870000,37.250000,4.000000,2498.000000,775.000000,1213.000000,631.000000,3.784400,183900.000000 +-121.870000,37.230000,19.000000,7357.000000,963.000000,3018.000000,981.000000,6.947300,361400.000000 +-121.870000,37.220000,26.000000,1921.000000,250.000000,725.000000,253.000000,7.693300,405900.000000 +-121.870000,37.220000,17.000000,2825.000000,365.000000,1052.000000,345.000000,8.059500,485000.000000 +-121.870000,37.210000,18.000000,1080.000000,122.000000,382.000000,121.000000,9.080000,500001.000000 +-121.870000,37.100000,20.000000,1918.000000,304.000000,798.000000,302.000000,7.575500,402300.000000 +-121.870000,36.950000,7.000000,3703.000000,679.000000,1375.000000,608.000000,4.921900,368400.000000 +-121.870000,36.610000,21.000000,1616.000000,400.000000,688.000000,384.000000,4.210900,278800.000000 +-121.870000,36.550000,20.000000,10053.000000,1768.000000,3083.000000,1621.000000,5.150600,387500.000000 +-121.880000,39.740000,12.000000,14631.000000,3298.000000,7517.000000,3262.000000,1.678500,153100.000000 +-121.880000,38.030000,52.000000,1225.000000,250.000000,725.000000,231.000000,2.000000,101400.000000 +-121.880000,38.030000,10.000000,2769.000000,619.000000,1045.000000,469.000000,4.111100,158600.000000 +-121.880000,38.020000,46.000000,2112.000000,466.000000,1249.000000,382.000000,2.573700,87000.000000 +-121.880000,38.010000,9.000000,5329.000000,1284.000000,2827.000000,1202.000000,2.737400,150000.000000 +-121.880000,38.000000,22.000000,721.000000,117.000000,367.000000,129.000000,5.309800,151900.000000 +-121.880000,38.000000,16.000000,2605.000000,440.000000,1352.000000,408.000000,4.194700,140300.000000 +-121.880000,37.670000,25.000000,2244.000000,301.000000,937.000000,324.000000,6.452400,296900.000000 +-121.880000,37.640000,20.000000,1309.000000,184.000000,514.000000,172.000000,10.950600,475800.000000 +-121.880000,37.440000,17.000000,1621.000000,299.000000,1028.000000,293.000000,5.272200,186900.000000 +-121.880000,37.440000,14.000000,2073.000000,343.000000,1107.000000,330.000000,6.709300,311200.000000 +-121.880000,37.430000,31.000000,2573.000000,474.000000,1898.000000,475.000000,5.665100,204100.000000 +-121.880000,37.410000,23.000000,3224.000000,652.000000,2183.000000,655.000000,4.380700,226900.000000 +-121.880000,37.400000,9.000000,6751.000000,1575.000000,4240.000000,1438.000000,5.340000,257400.000000 +-121.880000,37.390000,13.000000,3334.000000,565.000000,2240.000000,561.000000,7.105000,273900.000000 +-121.880000,37.370000,3.000000,4430.000000,841.000000,2559.000000,801.000000,6.095900,272700.000000 +-121.880000,37.360000,42.000000,2087.000000,402.000000,1342.000000,423.000000,4.214900,199000.000000 +-121.880000,37.360000,30.000000,2453.000000,573.000000,1845.000000,530.000000,3.739600,210700.000000 +-121.880000,37.350000,49.000000,1728.000000,350.000000,1146.000000,391.000000,3.578100,193000.000000 +-121.880000,37.350000,43.000000,1086.000000,219.000000,715.000000,226.000000,4.238100,193500.000000 +-121.880000,37.340000,52.000000,867.000000,232.000000,1264.000000,227.000000,2.631200,302900.000000 +-121.880000,37.340000,52.000000,1390.000000,365.000000,921.000000,352.000000,2.144200,188900.000000 +-121.880000,37.340000,44.000000,1267.000000,353.000000,1018.000000,327.000000,2.419600,194400.000000 +-121.880000,37.340000,40.000000,1547.000000,625.000000,1493.000000,543.000000,1.288700,212500.000000 +-121.880000,37.330000,45.000000,1192.000000,371.000000,1084.000000,345.000000,2.859400,205900.000000 +-121.880000,37.330000,41.000000,395.000000,164.000000,549.000000,184.000000,2.375000,175000.000000 +-121.880000,37.330000,36.000000,1904.000000,689.000000,3561.000000,632.000000,2.097200,187500.000000 +-121.880000,37.330000,35.000000,3300.000000,1154.000000,3120.000000,1075.000000,1.473000,213600.000000 +-121.880000,37.320000,45.000000,2213.000000,564.000000,1920.000000,514.000000,3.280600,164200.000000 +-121.880000,37.320000,30.000000,1242.000000,338.000000,1438.000000,325.000000,2.660700,169300.000000 +-121.880000,37.310000,28.000000,3085.000000,552.000000,1277.000000,512.000000,4.579500,262800.000000 +-121.880000,37.300000,42.000000,1867.000000,398.000000,927.000000,389.000000,4.325000,247000.000000 +-121.880000,37.280000,33.000000,2951.000000,529.000000,1288.000000,521.000000,4.155400,313100.000000 +-121.880000,37.270000,27.000000,2019.000000,335.000000,1020.000000,351.000000,5.817800,267400.000000 +-121.880000,37.270000,24.000000,4567.000000,688.000000,2102.000000,695.000000,5.689500,289000.000000 +-121.880000,37.260000,25.000000,3025.000000,689.000000,1755.000000,661.000000,3.889300,218600.000000 +-121.880000,37.260000,13.000000,1893.000000,487.000000,1018.000000,464.000000,3.804700,204700.000000 +-121.880000,37.260000,13.000000,1676.000000,471.000000,710.000000,406.000000,3.893600,225900.000000 +-121.880000,37.250000,24.000000,968.000000,240.000000,631.000000,250.000000,2.863600,240300.000000 +-121.880000,37.240000,24.000000,4420.000000,996.000000,2981.000000,975.000000,3.506000,226400.000000 +-121.880000,37.240000,14.000000,7174.000000,950.000000,2782.000000,899.000000,8.306500,394200.000000 +-121.880000,36.980000,21.000000,4117.000000,752.000000,2001.000000,763.000000,4.895300,289500.000000 +-121.880000,36.960000,18.000000,6355.000000,1100.000000,2304.000000,972.000000,6.028100,321100.000000 +-121.880000,36.960000,18.000000,4910.000000,817.000000,1971.000000,773.000000,5.832500,308800.000000 +-121.880000,36.600000,30.000000,1671.000000,469.000000,760.000000,375.000000,2.516400,178100.000000 +-121.880000,36.590000,30.000000,1822.000000,505.000000,932.000000,496.000000,2.689400,179500.000000 +-121.880000,36.490000,28.000000,2830.000000,458.000000,898.000000,370.000000,5.814200,500001.000000 +-121.890000,40.970000,26.000000,1183.000000,276.000000,513.000000,206.000000,2.225000,52000.000000 +-121.890000,39.710000,26.000000,2741.000000,451.000000,1217.000000,437.000000,3.700700,139200.000000 +-121.890000,38.020000,36.000000,2707.000000,550.000000,1827.000000,545.000000,3.337100,94600.000000 +-121.890000,38.010000,30.000000,4114.000000,743.000000,1994.000000,722.000000,4.222700,134400.000000 +-121.890000,38.010000,28.000000,3639.000000,751.000000,2362.000000,641.000000,3.004200,103900.000000 +-121.890000,37.820000,4.000000,11444.000000,1355.000000,3898.000000,1257.000000,13.294900,500001.000000 +-121.890000,37.690000,4.000000,6159.000000,1510.000000,2649.000000,1241.000000,3.620000,139300.000000 +-121.890000,37.680000,12.000000,7490.000000,1207.000000,3329.000000,1136.000000,6.337300,339700.000000 +-121.890000,37.670000,20.000000,2948.000000,471.000000,1181.000000,474.000000,6.060400,247900.000000 +-121.890000,37.670000,19.000000,2034.000000,288.000000,852.000000,295.000000,6.528500,300400.000000 +-121.890000,37.660000,3.000000,1565.000000,464.000000,769.000000,461.000000,2.118700,231300.000000 +-121.890000,37.490000,9.000000,4909.000000,577.000000,1981.000000,591.000000,9.719400,500001.000000 +-121.890000,37.450000,15.000000,2428.000000,513.000000,1687.000000,519.000000,4.750000,254400.000000 +-121.890000,37.440000,8.000000,2534.000000,380.000000,1527.000000,364.000000,7.853200,422800.000000 +-121.890000,37.390000,2.000000,1136.000000,365.000000,535.000000,257.000000,4.375000,425000.000000 +-121.890000,37.380000,3.000000,4778.000000,1047.000000,2522.000000,990.000000,5.769500,271400.000000 +-121.890000,37.360000,37.000000,1525.000000,363.000000,1075.000000,374.000000,2.897100,186100.000000 +-121.890000,37.350000,44.000000,2019.000000,615.000000,1243.000000,518.000000,2.054900,193800.000000 +-121.890000,37.350000,44.000000,1668.000000,380.000000,1143.000000,365.000000,3.208300,181900.000000 +-121.890000,37.350000,43.000000,1185.000000,296.000000,933.000000,244.000000,2.925000,170800.000000 +-121.890000,37.340000,46.000000,1197.000000,416.000000,898.000000,370.000000,2.171400,190600.000000 +-121.890000,37.340000,43.000000,1423.000000,467.000000,1013.000000,428.000000,1.670800,204200.000000 +-121.890000,37.340000,20.000000,1106.000000,494.000000,851.000000,448.000000,0.889400,350000.000000 +-121.890000,37.330000,6.000000,1495.000000,552.000000,1087.000000,557.000000,2.879800,225000.000000 +-121.890000,37.330000,49.000000,658.000000,318.000000,467.000000,316.000000,0.706800,200000.000000 +-121.890000,37.320000,43.000000,1105.000000,241.000000,982.000000,206.000000,2.114900,184900.000000 +-121.890000,37.320000,41.000000,977.000000,265.000000,865.000000,253.000000,3.231700,184800.000000 +-121.890000,37.320000,34.000000,1014.000000,246.000000,952.000000,215.000000,2.886400,172500.000000 +-121.890000,37.310000,52.000000,1994.000000,404.000000,1014.000000,389.000000,4.388200,223600.000000 +-121.890000,37.310000,47.000000,2986.000000,627.000000,1399.000000,613.000000,3.745500,247400.000000 +-121.890000,37.310000,40.000000,1844.000000,340.000000,719.000000,305.000000,3.368200,235200.000000 +-121.890000,37.300000,52.000000,2071.000000,362.000000,825.000000,364.000000,4.241400,284800.000000 +-121.890000,37.300000,47.000000,2355.000000,426.000000,961.000000,428.000000,5.395500,282300.000000 +-121.890000,37.300000,47.000000,1604.000000,284.000000,639.000000,278.000000,5.841500,283300.000000 +-121.890000,37.300000,46.000000,2639.000000,448.000000,938.000000,424.000000,5.066200,331600.000000 +-121.890000,37.290000,38.000000,1568.000000,351.000000,710.000000,339.000000,2.704200,286600.000000 +-121.890000,37.290000,36.000000,2959.000000,529.000000,1125.000000,520.000000,4.261400,268800.000000 +-121.890000,37.280000,32.000000,4308.000000,717.000000,2002.000000,695.000000,4.164500,281900.000000 +-121.890000,37.270000,28.000000,1481.000000,256.000000,688.000000,221.000000,5.208800,240900.000000 +-121.890000,37.260000,26.000000,1864.000000,331.000000,956.000000,325.000000,5.500000,231700.000000 +-121.890000,37.260000,25.000000,3319.000000,531.000000,1560.000000,502.000000,5.847900,263300.000000 +-121.890000,37.250000,26.000000,1741.000000,323.000000,1007.000000,339.000000,4.706900,234800.000000 +-121.890000,37.250000,23.000000,2705.000000,449.000000,1180.000000,442.000000,6.079100,316500.000000 +-121.890000,37.250000,21.000000,2080.000000,352.000000,1040.000000,325.000000,5.288700,264500.000000 +-121.890000,37.230000,20.000000,7754.000000,976.000000,3094.000000,941.000000,8.190000,361600.000000 +-121.890000,37.230000,16.000000,3574.000000,466.000000,1503.000000,487.000000,8.198800,355900.000000 +-121.890000,37.210000,14.000000,5636.000000,697.000000,2281.000000,680.000000,8.426200,459200.000000 +-121.890000,36.630000,20.000000,1834.000000,554.000000,971.000000,514.000000,3.038300,217300.000000 +-121.890000,36.600000,19.000000,656.000000,200.000000,248.000000,173.000000,1.265600,500000.000000 +-121.890000,36.590000,32.000000,784.000000,112.000000,262.000000,114.000000,6.918000,500001.000000 +-121.890000,36.590000,18.000000,2700.000000,937.000000,1042.000000,744.000000,3.136400,150000.000000 +-121.900000,39.590000,20.000000,1465.000000,278.000000,745.000000,250.000000,3.062500,93800.000000 +-121.900000,38.720000,38.000000,575.000000,107.000000,259.000000,109.000000,3.750000,187500.000000 +-121.900000,38.040000,36.000000,1489.000000,331.000000,838.000000,259.000000,1.202400,90200.000000 +-121.900000,38.020000,5.000000,1560.000000,369.000000,1037.000000,372.000000,3.615400,181800.000000 +-121.900000,38.020000,12.000000,1497.000000,360.000000,943.000000,341.000000,2.141700,122200.000000 +-121.900000,38.010000,34.000000,3779.000000,766.000000,2356.000000,722.000000,3.512900,110600.000000 +-121.900000,38.010000,16.000000,2604.000000,454.000000,1696.000000,481.000000,4.662800,136000.000000 +-121.900000,38.000000,14.000000,2677.000000,368.000000,1288.000000,375.000000,6.049700,177500.000000 +-121.900000,38.000000,14.000000,1930.000000,363.000000,990.000000,322.000000,4.109400,162200.000000 +-121.900000,37.670000,7.000000,9540.000000,1294.000000,3926.000000,1229.000000,7.435300,389800.000000 +-121.900000,37.670000,15.000000,2130.000000,273.000000,876.000000,285.000000,7.263900,332400.000000 +-121.900000,37.660000,18.000000,7397.000000,1137.000000,3126.000000,1115.000000,6.499400,323000.000000 +-121.900000,37.450000,18.000000,4900.000000,814.000000,2984.000000,758.000000,6.617600,276200.000000 +-121.900000,37.450000,16.000000,2952.000000,446.000000,1525.000000,460.000000,5.606300,320500.000000 +-121.900000,37.440000,9.000000,957.000000,139.000000,532.000000,142.000000,8.667500,441000.000000 +-121.900000,37.440000,4.000000,1646.000000,408.000000,853.000000,410.000000,5.082100,265500.000000 +-121.900000,37.410000,24.000000,4759.000000,921.000000,3188.000000,902.000000,5.634400,228700.000000 +-121.900000,37.400000,16.000000,2998.000000,603.000000,1606.000000,615.000000,3.762200,150000.000000 +-121.900000,37.370000,20.000000,78.000000,72.000000,120.000000,69.000000,1.093800,187500.000000 +-121.900000,37.360000,47.000000,1007.000000,245.000000,581.000000,240.000000,2.954500,237500.000000 +-121.900000,37.360000,38.000000,1141.000000,333.000000,1028.000000,291.000000,2.733300,182500.000000 +-121.900000,37.350000,42.000000,2082.000000,626.000000,1396.000000,610.000000,3.250000,185300.000000 +-121.900000,37.340000,52.000000,241.000000,69.000000,385.000000,64.000000,2.619000,212500.000000 +-121.900000,37.340000,50.000000,1345.000000,287.000000,791.000000,254.000000,3.596600,245800.000000 +-121.900000,37.330000,52.000000,1009.000000,231.000000,929.000000,210.000000,2.500000,162500.000000 +-121.900000,37.330000,34.000000,197.000000,44.000000,152.000000,47.000000,4.050000,200000.000000 +-121.900000,37.330000,11.000000,1283.000000,390.000000,718.000000,345.000000,4.226000,166700.000000 +-121.900000,37.320000,48.000000,1274.000000,313.000000,971.000000,291.000000,3.773800,220600.000000 +-121.900000,37.310000,52.000000,2125.000000,431.000000,1014.000000,443.000000,5.818600,281100.000000 +-121.900000,37.300000,52.000000,1575.000000,284.000000,629.000000,284.000000,5.643700,312000.000000 +-121.900000,37.300000,52.000000,1456.000000,269.000000,582.000000,277.000000,5.036000,296600.000000 +-121.900000,37.300000,39.000000,3627.000000,666.000000,1531.000000,635.000000,4.537000,345900.000000 +-121.900000,37.290000,36.000000,1389.000000,225.000000,623.000000,223.000000,6.633100,283300.000000 +-121.900000,37.290000,26.000000,1797.000000,244.000000,626.000000,244.000000,7.857500,424600.000000 +-121.900000,37.280000,34.000000,4613.000000,749.000000,2050.000000,725.000000,5.392200,302900.000000 +-121.900000,37.280000,26.000000,3756.000000,553.000000,1408.000000,535.000000,5.642700,320000.000000 +-121.900000,37.270000,33.000000,3410.000000,583.000000,1739.000000,588.000000,5.071400,255600.000000 +-121.900000,37.270000,28.000000,4538.000000,685.000000,1996.000000,667.000000,5.460900,263600.000000 +-121.900000,37.260000,20.000000,4447.000000,661.000000,2062.000000,660.000000,6.808800,283300.000000 +-121.900000,37.250000,28.000000,2714.000000,502.000000,1389.000000,490.000000,5.738500,240400.000000 +-121.900000,37.250000,20.000000,5483.000000,1079.000000,2892.000000,1057.000000,4.684500,250000.000000 +-121.900000,37.160000,43.000000,1529.000000,311.000000,570.000000,250.000000,5.236600,293300.000000 +-121.900000,37.100000,23.000000,1708.000000,287.000000,670.000000,238.000000,6.451700,356600.000000 +-121.900000,36.970000,21.000000,3707.000000,751.000000,1420.000000,608.000000,4.448500,295200.000000 +-121.900000,36.930000,22.000000,7281.000000,1233.000000,1849.000000,832.000000,5.327600,335500.000000 +-121.900000,36.600000,45.000000,2249.000000,412.000000,944.000000,429.000000,3.062500,260300.000000 +-121.900000,36.600000,39.000000,1629.000000,423.000000,804.000000,386.000000,2.466300,236500.000000 +-121.900000,36.600000,33.000000,2461.000000,649.000000,1234.000000,601.000000,2.872700,225000.000000 +-121.900000,36.590000,42.000000,2689.000000,510.000000,1023.000000,459.000000,4.618200,301000.000000 +-121.900000,36.580000,31.000000,1431.000000,370.000000,704.000000,393.000000,3.197700,289300.000000 +-121.910000,39.030000,48.000000,1096.000000,218.000000,657.000000,199.000000,2.784100,65800.000000 +-121.910000,38.020000,15.000000,2966.000000,558.000000,1687.000000,527.000000,3.481700,129800.000000 +-121.910000,37.930000,13.000000,1610.000000,198.000000,703.000000,217.000000,8.705900,329400.000000 +-121.910000,37.710000,25.000000,4377.000000,668.000000,2038.000000,671.000000,5.723300,231800.000000 +-121.910000,37.690000,23.000000,2179.000000,308.000000,926.000000,299.000000,5.934500,259600.000000 +-121.910000,37.690000,18.000000,2876.000000,423.000000,1395.000000,427.000000,6.313200,259200.000000 +-121.910000,37.680000,20.000000,1804.000000,254.000000,831.000000,260.000000,6.177000,262900.000000 +-121.910000,37.680000,18.000000,3631.000000,547.000000,1700.000000,520.000000,5.817000,257300.000000 +-121.910000,37.460000,26.000000,2762.000000,496.000000,1716.000000,459.000000,5.606200,226800.000000 +-121.910000,37.440000,26.000000,1669.000000,276.000000,951.000000,278.000000,4.779400,225800.000000 +-121.910000,37.440000,19.000000,2174.000000,484.000000,1645.000000,484.000000,5.036200,255100.000000 +-121.910000,37.420000,19.000000,1684.000000,387.000000,1224.000000,376.000000,4.138900,174100.000000 +-121.910000,37.360000,42.000000,3224.000000,708.000000,1940.000000,674.000000,3.215300,199700.000000 +-121.910000,37.340000,35.000000,2189.000000,607.000000,1193.000000,562.000000,2.804200,240900.000000 +-121.910000,37.330000,52.000000,2562.000000,410.000000,973.000000,398.000000,4.885400,330600.000000 +-121.910000,37.330000,52.000000,2212.000000,563.000000,1195.000000,532.000000,2.894000,209500.000000 +-121.910000,37.330000,28.000000,454.000000,147.000000,366.000000,140.000000,2.985300,187500.000000 +-121.910000,37.320000,42.000000,1067.000000,256.000000,608.000000,280.000000,3.009600,180800.000000 +-121.910000,37.310000,46.000000,3052.000000,587.000000,1373.000000,590.000000,4.728700,340000.000000 +-121.910000,37.310000,28.000000,3104.000000,811.000000,1488.000000,754.000000,3.642900,332600.000000 +-121.910000,37.300000,43.000000,828.000000,151.000000,446.000000,145.000000,4.437500,327600.000000 +-121.910000,37.300000,31.000000,616.000000,131.000000,185.000000,107.000000,3.625000,265000.000000 +-121.910000,37.300000,31.000000,2095.000000,427.000000,829.000000,405.000000,3.656300,344700.000000 +-121.910000,37.290000,36.000000,945.000000,149.000000,371.000000,158.000000,5.626600,320500.000000 +-121.910000,37.290000,18.000000,3597.000000,664.000000,1321.000000,593.000000,5.307700,351400.000000 +-121.910000,37.280000,29.000000,5650.000000,817.000000,2098.000000,813.000000,6.428500,337300.000000 +-121.910000,37.270000,30.000000,4412.000000,862.000000,2168.000000,772.000000,5.006200,232000.000000 +-121.910000,37.260000,32.000000,3983.000000,876.000000,1989.000000,794.000000,3.562500,255200.000000 +-121.910000,37.260000,32.000000,3776.000000,620.000000,1790.000000,612.000000,5.467500,261100.000000 +-121.910000,37.260000,25.000000,4258.000000,719.000000,2290.000000,743.000000,5.146100,267200.000000 +-121.910000,37.240000,30.000000,2327.000000,419.000000,1114.000000,372.000000,4.727900,272000.000000 +-121.910000,37.240000,24.000000,5046.000000,1001.000000,2449.000000,968.000000,4.711800,274600.000000 +-121.910000,37.230000,27.000000,4866.000000,668.000000,1956.000000,659.000000,7.384300,405000.000000 +-121.910000,37.230000,22.000000,2614.000000,453.000000,1240.000000,462.000000,6.071200,271800.000000 +-121.910000,36.990000,23.000000,5675.000000,964.000000,2197.000000,880.000000,4.869300,322300.000000 +-121.910000,36.850000,22.000000,2442.000000,624.000000,1301.000000,290.000000,3.156300,300000.000000 +-121.910000,36.630000,42.000000,817.000000,194.000000,391.000000,193.000000,2.177600,279200.000000 +-121.910000,36.620000,52.000000,541.000000,157.000000,240.000000,145.000000,3.586500,290000.000000 +-121.910000,36.620000,52.000000,1431.000000,300.000000,657.000000,293.000000,3.286500,240100.000000 +-121.910000,36.620000,52.000000,1220.000000,267.000000,488.000000,265.000000,3.745400,243800.000000 +-121.910000,36.620000,30.000000,724.000000,167.000000,325.000000,155.000000,3.333300,247900.000000 +-121.910000,36.610000,30.000000,2755.000000,597.000000,1519.000000,554.000000,3.295200,234600.000000 +-121.910000,36.600000,35.000000,2605.000000,410.000000,1110.000000,406.000000,5.551900,329500.000000 +-121.910000,36.590000,31.000000,2034.000000,335.000000,966.000000,322.000000,4.696400,291300.000000 +-121.910000,36.590000,17.000000,5039.000000,833.000000,1678.000000,710.000000,6.232300,339100.000000 +-121.910000,36.550000,39.000000,5468.000000,834.000000,1782.000000,712.000000,5.724800,398800.000000 +-121.910000,36.420000,14.000000,1078.000000,261.000000,382.000000,171.000000,3.708300,210000.000000 +-121.920000,40.520000,13.000000,4581.000000,881.000000,1799.000000,734.000000,2.299300,99500.000000 +-121.920000,38.570000,10.000000,1320.000000,246.000000,898.000000,228.000000,1.932700,193800.000000 +-121.920000,38.370000,26.000000,2056.000000,413.000000,933.000000,367.000000,2.705100,193800.000000 +-121.920000,38.340000,2.000000,7747.000000,1133.000000,3481.000000,1083.000000,6.111200,181000.000000 +-121.920000,38.030000,16.000000,2176.000000,464.000000,1410.000000,434.000000,3.543600,100200.000000 +-121.920000,38.020000,16.000000,1840.000000,355.000000,1288.000000,338.000000,4.206700,125000.000000 +-121.920000,38.010000,7.000000,1632.000000,248.000000,879.000000,262.000000,6.123700,166000.000000 +-121.920000,37.960000,14.000000,5332.000000,884.000000,2093.000000,839.000000,5.279800,237400.000000 +-121.920000,37.740000,8.000000,452.000000,51.000000,140.000000,43.000000,12.591500,432400.000000 +-121.920000,37.730000,24.000000,1407.000000,327.000000,501.000000,295.000000,2.482100,157200.000000 +-121.920000,37.720000,4.000000,7477.000000,1576.000000,2937.000000,1506.000000,5.143700,299400.000000 +-121.920000,37.690000,13.000000,3742.000000,555.000000,1590.000000,559.000000,7.316000,285400.000000 +-121.920000,37.680000,23.000000,1655.000000,223.000000,706.000000,219.000000,7.221100,291900.000000 +-121.920000,37.640000,46.000000,1280.000000,209.000000,512.000000,208.000000,5.140600,315600.000000 +-121.920000,37.530000,7.000000,28258.000000,3864.000000,12203.000000,3701.000000,8.404500,451100.000000 +-121.920000,37.490000,10.000000,7441.000000,1588.000000,3571.000000,1466.000000,5.164300,193100.000000 +-121.920000,37.480000,23.000000,4314.000000,676.000000,1972.000000,623.000000,5.381300,264400.000000 +-121.920000,37.470000,26.000000,2016.000000,322.000000,1105.000000,357.000000,6.087800,246900.000000 +-121.920000,37.450000,10.000000,3937.000000,1054.000000,2032.000000,1002.000000,3.261700,252200.000000 +-121.920000,37.360000,42.000000,198.000000,32.000000,158.000000,32.000000,3.156300,137500.000000 +-121.920000,37.340000,42.000000,2101.000000,524.000000,1212.000000,526.000000,3.638900,239200.000000 +-121.920000,37.340000,35.000000,357.000000,120.000000,377.000000,99.000000,3.013900,204200.000000 +-121.920000,37.330000,52.000000,2009.000000,338.000000,841.000000,338.000000,5.525900,295800.000000 +-121.920000,37.320000,39.000000,836.000000,254.000000,704.000000,272.000000,3.525600,192600.000000 +-121.920000,37.320000,31.000000,1902.000000,554.000000,1485.000000,494.000000,2.420700,165600.000000 +-121.920000,37.320000,28.000000,2089.000000,641.000000,1666.000000,587.000000,2.363300,198400.000000 +-121.920000,37.310000,34.000000,876.000000,150.000000,424.000000,163.000000,5.276900,241100.000000 +-121.920000,37.310000,13.000000,6035.000000,1551.000000,2946.000000,1481.000000,4.052400,213900.000000 +-121.920000,37.300000,35.000000,1335.000000,296.000000,635.000000,296.000000,3.605300,345800.000000 +-121.920000,37.290000,35.000000,2189.000000,307.000000,800.000000,320.000000,7.665900,426900.000000 +-121.920000,37.290000,34.000000,943.000000,135.000000,378.000000,139.000000,5.176500,344600.000000 +-121.920000,37.290000,32.000000,1260.000000,199.000000,560.000000,207.000000,6.585800,346700.000000 +-121.920000,37.280000,27.000000,3028.000000,486.000000,1284.000000,498.000000,4.583300,308800.000000 +-121.920000,37.280000,26.000000,6201.000000,783.000000,2381.000000,819.000000,7.981900,397000.000000 +-121.920000,37.270000,33.000000,3280.000000,569.000000,1583.000000,559.000000,4.562500,253500.000000 +-121.920000,37.270000,29.000000,5536.000000,862.000000,2651.000000,881.000000,5.635800,282100.000000 +-121.920000,37.260000,33.000000,1306.000000,259.000000,762.000000,237.000000,4.520800,230700.000000 +-121.920000,37.250000,36.000000,3874.000000,656.000000,1826.000000,639.000000,4.966200,258500.000000 +-121.920000,37.250000,34.000000,2231.000000,360.000000,1035.000000,365.000000,4.791700,243200.000000 +-121.920000,37.240000,27.000000,1265.000000,216.000000,660.000000,232.000000,5.391100,281200.000000 +-121.920000,37.240000,26.000000,6777.000000,1051.000000,3319.000000,1061.000000,6.366300,279400.000000 +-121.920000,36.620000,52.000000,974.000000,190.000000,403.000000,181.000000,4.328100,236500.000000 +-121.920000,36.620000,52.000000,867.000000,199.000000,391.000000,187.000000,2.671300,234600.000000 +-121.920000,36.620000,52.000000,728.000000,161.000000,313.000000,142.000000,3.432700,254500.000000 +-121.920000,36.620000,52.000000,1410.000000,303.000000,578.000000,285.000000,2.562500,235400.000000 +-121.920000,36.620000,52.000000,1001.000000,191.000000,425.000000,184.000000,3.761400,241700.000000 +-121.920000,36.620000,47.000000,1811.000000,401.000000,948.000000,375.000000,3.037900,249300.000000 +-121.920000,36.610000,29.000000,3735.000000,808.000000,1873.000000,757.000000,3.154300,253800.000000 +-121.920000,36.610000,27.000000,3044.000000,661.000000,1229.000000,618.000000,3.135900,268000.000000 +-121.920000,36.610000,21.000000,1242.000000,340.000000,834.000000,362.000000,2.492200,243800.000000 +-121.920000,36.560000,40.000000,2124.000000,449.000000,643.000000,341.000000,5.516400,369100.000000 +-121.920000,36.560000,39.000000,2144.000000,538.000000,749.000000,419.000000,2.703900,364000.000000 +-121.920000,36.550000,52.000000,2616.000000,483.000000,582.000000,313.000000,3.275000,500001.000000 +-121.920000,36.550000,44.000000,3494.000000,635.000000,693.000000,415.000000,3.600000,452800.000000 +-121.920000,36.550000,37.000000,2190.000000,419.000000,490.000000,300.000000,3.785200,465800.000000 +-121.920000,36.540000,33.000000,5323.000000,887.000000,1670.000000,740.000000,3.979200,468000.000000 +-121.930000,41.860000,28.000000,4225.000000,835.000000,1908.000000,686.000000,1.740000,44000.000000 +-121.930000,38.020000,13.000000,1524.000000,286.000000,940.000000,308.000000,5.133700,154800.000000 +-121.930000,37.930000,16.000000,2169.000000,262.000000,877.000000,245.000000,6.604900,312600.000000 +-121.930000,37.890000,13.000000,2085.000000,292.000000,852.000000,264.000000,7.344500,366700.000000 +-121.930000,37.780000,2.000000,227.000000,35.000000,114.000000,49.000000,3.159100,434700.000000 +-121.930000,37.760000,5.000000,2255.000000,269.000000,876.000000,258.000000,10.334500,461400.000000 +-121.930000,37.740000,16.000000,3326.000000,419.000000,1272.000000,402.000000,6.880600,343500.000000 +-121.930000,37.730000,23.000000,2564.000000,347.000000,1043.000000,351.000000,6.204800,275000.000000 +-121.930000,37.710000,26.000000,4822.000000,845.000000,2288.000000,805.000000,4.228100,206000.000000 +-121.930000,37.700000,3.000000,2456.000000,582.000000,793.000000,456.000000,4.408700,225600.000000 +-121.930000,37.660000,24.000000,3166.000000,424.000000,1081.000000,400.000000,8.333700,500001.000000 +-121.930000,37.540000,25.000000,1354.000000,192.000000,596.000000,220.000000,6.629000,352400.000000 +-121.930000,37.530000,27.000000,5532.000000,973.000000,2855.000000,960.000000,4.747800,243500.000000 +-121.930000,37.490000,5.000000,1150.000000,311.000000,648.000000,245.000000,3.571400,300000.000000 +-121.930000,37.400000,34.000000,148.000000,28.000000,132.000000,13.000000,3.375000,67500.000000 +-121.930000,37.350000,36.000000,1823.000000,410.000000,1589.000000,387.000000,3.106500,234100.000000 +-121.930000,37.330000,44.000000,2142.000000,358.000000,846.000000,375.000000,5.427300,421000.000000 +-121.930000,37.320000,52.000000,1460.000000,492.000000,1165.000000,455.000000,2.583300,167500.000000 +-121.930000,37.320000,50.000000,1135.000000,215.000000,500.000000,207.000000,4.261400,211300.000000 +-121.930000,37.310000,29.000000,1377.000000,430.000000,969.000000,399.000000,2.657300,252800.000000 +-121.930000,37.310000,26.000000,2182.000000,704.000000,1638.000000,704.000000,2.898100,229800.000000 +-121.930000,37.300000,16.000000,2111.000000,485.000000,1285.000000,499.000000,5.047700,292500.000000 +-121.930000,37.300000,16.000000,1111.000000,226.000000,317.000000,199.000000,2.715300,233300.000000 +-121.930000,37.300000,14.000000,6277.000000,1742.000000,3025.000000,1630.000000,4.065300,234200.000000 +-121.930000,37.290000,36.000000,2241.000000,437.000000,989.000000,442.000000,3.962500,288200.000000 +-121.930000,37.280000,34.000000,2422.000000,370.000000,1010.000000,395.000000,5.649400,376200.000000 +-121.930000,37.270000,35.000000,1855.000000,345.000000,985.000000,329.000000,6.019600,255100.000000 +-121.930000,37.270000,30.000000,2862.000000,544.000000,1387.000000,542.000000,5.110400,278100.000000 +-121.930000,37.260000,39.000000,1103.000000,175.000000,446.000000,163.000000,2.812500,291300.000000 +-121.930000,37.250000,32.000000,1555.000000,287.000000,779.000000,284.000000,6.035800,260100.000000 +-121.930000,37.250000,21.000000,1354.000000,289.000000,639.000000,273.000000,4.533300,234200.000000 +-121.930000,37.240000,26.000000,2574.000000,414.000000,1096.000000,428.000000,6.073800,335900.000000 +-121.930000,37.220000,21.000000,4872.000000,594.000000,1811.000000,560.000000,9.383400,500001.000000 +-121.930000,37.130000,37.000000,1150.000000,203.000000,511.000000,179.000000,5.741500,398500.000000 +-121.930000,37.050000,14.000000,679.000000,108.000000,306.000000,113.000000,6.421400,340600.000000 +-121.930000,37.040000,36.000000,1522.000000,230.000000,677.000000,206.000000,5.864200,363500.000000 +-121.930000,36.990000,19.000000,6356.000000,1100.000000,2954.000000,1070.000000,4.732500,283500.000000 +-121.930000,36.630000,28.000000,3983.000000,852.000000,1582.000000,778.000000,3.514700,313900.000000 +-121.930000,36.620000,39.000000,869.000000,173.000000,406.000000,165.000000,4.031300,253800.000000 +-121.930000,36.620000,34.000000,2351.000000,440.000000,1063.000000,428.000000,3.725000,278000.000000 +-121.930000,36.600000,33.000000,3455.000000,683.000000,1704.000000,663.000000,4.015400,225700.000000 +-121.930000,36.590000,25.000000,2201.000000,353.000000,622.000000,295.000000,5.062100,386500.000000 +-121.940000,39.450000,39.000000,844.000000,161.000000,535.000000,165.000000,1.832000,70500.000000 +-121.940000,38.890000,15.000000,1462.000000,314.000000,774.000000,271.000000,2.547800,91700.000000 +-121.940000,38.410000,15.000000,1263.000000,211.000000,665.000000,208.000000,4.500000,260900.000000 +-121.940000,38.380000,25.000000,182.000000,48.000000,71.000000,52.000000,1.020800,78600.000000 +-121.940000,38.370000,17.000000,7973.000000,1591.000000,2899.000000,1502.000000,2.835700,120100.000000 +-121.940000,38.270000,35.000000,10869.000000,2226.000000,9879.000000,2152.000000,2.568100,81300.000000 +-121.940000,38.030000,27.000000,1654.000000,478.000000,1141.000000,420.000000,1.487100,87100.000000 +-121.940000,38.020000,29.000000,5765.000000,1170.000000,3266.000000,1131.000000,2.790700,113900.000000 +-121.940000,37.950000,18.000000,2541.000000,355.000000,986.000000,346.000000,7.197800,288000.000000 +-121.940000,37.930000,16.000000,3421.000000,427.000000,1341.000000,428.000000,7.569500,320400.000000 +-121.940000,37.830000,11.000000,2836.000000,373.000000,959.000000,335.000000,10.581500,500001.000000 +-121.940000,37.800000,8.000000,11336.000000,1657.000000,4089.000000,1555.000000,7.828700,369200.000000 +-121.940000,37.760000,4.000000,6875.000000,1439.000000,2889.000000,1307.000000,4.693200,356100.000000 +-121.940000,37.750000,17.000000,2559.000000,370.000000,1238.000000,377.000000,6.278100,269800.000000 +-121.940000,37.730000,22.000000,1980.000000,291.000000,861.000000,290.000000,6.272600,258200.000000 +-121.940000,37.710000,15.000000,6473.000000,1027.000000,2484.000000,970.000000,5.014300,271100.000000 +-121.940000,37.560000,15.000000,5674.000000,748.000000,2412.000000,714.000000,8.399600,442900.000000 +-121.940000,37.540000,31.000000,2537.000000,382.000000,1067.000000,410.000000,6.759900,356000.000000 +-121.940000,37.540000,27.000000,3715.000000,526.000000,1631.000000,538.000000,6.217900,305300.000000 +-121.940000,37.530000,33.000000,2095.000000,342.000000,941.000000,304.000000,5.761000,259600.000000 +-121.940000,37.420000,16.000000,3936.000000,788.000000,1910.000000,769.000000,4.704900,112500.000000 +-121.940000,37.350000,52.000000,906.000000,227.000000,1662.000000,219.000000,3.166700,231600.000000 +-121.940000,37.350000,18.000000,1922.000000,561.000000,1096.000000,545.000000,2.371300,244000.000000 +-121.940000,37.340000,36.000000,3142.000000,632.000000,1372.000000,560.000000,5.017500,246100.000000 +-121.940000,37.340000,29.000000,3377.000000,853.000000,1674.000000,792.000000,3.423300,229300.000000 +-121.940000,37.330000,37.000000,1822.000000,329.000000,845.000000,348.000000,4.750000,251100.000000 +-121.940000,37.330000,36.000000,1893.000000,359.000000,797.000000,360.000000,3.681800,257600.000000 +-121.940000,37.330000,34.000000,1809.000000,317.000000,863.000000,302.000000,4.300000,330500.000000 +-121.940000,37.320000,46.000000,2451.000000,472.000000,1163.000000,448.000000,4.851900,225800.000000 +-121.940000,37.310000,5.000000,2364.000000,578.000000,1102.000000,502.000000,5.264200,246400.000000 +-121.940000,37.310000,30.000000,4238.000000,1010.000000,1914.000000,972.000000,3.763200,307000.000000 +-121.940000,37.310000,30.000000,1680.000000,312.000000,858.000000,310.000000,4.047400,280500.000000 +-121.940000,37.300000,30.000000,1758.000000,248.000000,814.000000,256.000000,6.623000,332500.000000 +-121.940000,37.300000,26.000000,4348.000000,814.000000,2347.000000,810.000000,4.727500,293000.000000 +-121.940000,37.300000,25.000000,1455.000000,370.000000,734.000000,331.000000,3.272700,262500.000000 +-121.940000,37.290000,22.000000,2593.000000,637.000000,1249.000000,623.000000,3.750000,212500.000000 +-121.940000,37.290000,20.000000,710.000000,188.000000,363.000000,176.000000,4.096200,214100.000000 +-121.940000,37.280000,27.000000,2859.000000,464.000000,1144.000000,430.000000,5.082200,327500.000000 +-121.940000,37.270000,39.000000,1030.000000,191.000000,537.000000,175.000000,3.926500,236900.000000 +-121.940000,37.260000,43.000000,2104.000000,388.000000,1137.000000,403.000000,4.923600,238000.000000 +-121.940000,37.260000,21.000000,3843.000000,716.000000,1850.000000,705.000000,4.675800,264200.000000 +-121.940000,37.250000,16.000000,3942.000000,749.000000,1894.000000,737.000000,5.289400,332800.000000 +-121.940000,37.240000,35.000000,1484.000000,244.000000,664.000000,238.000000,4.675000,245300.000000 +-121.940000,37.240000,26.000000,2561.000000,388.000000,1165.000000,393.000000,7.352200,363800.000000 +-121.940000,37.240000,19.000000,1741.000000,294.000000,632.000000,279.000000,5.594400,290500.000000 +-121.940000,37.000000,32.000000,2210.000000,426.000000,1082.000000,396.000000,4.158700,315200.000000 +-121.940000,36.990000,11.000000,4571.000000,924.000000,2004.000000,847.000000,4.289800,221700.000000 +-121.940000,36.980000,21.000000,3520.000000,831.000000,1486.000000,753.000000,3.090500,264300.000000 +-121.940000,36.970000,31.000000,1738.000000,422.000000,746.000000,355.000000,2.517200,330800.000000 +-121.940000,36.570000,28.000000,3153.000000,409.000000,569.000000,271.000000,14.411300,500001.000000 +-121.950000,38.650000,19.000000,1265.000000,228.000000,755.000000,218.000000,3.347200,69800.000000 +-121.950000,38.430000,19.000000,3011.000000,551.000000,1665.000000,535.000000,5.153400,232800.000000 +-121.950000,38.340000,9.000000,4999.000000,874.000000,2687.000000,817.000000,4.232400,142100.000000 +-121.950000,38.030000,5.000000,5526.000000,1102.000000,3207.000000,1012.000000,4.076700,143100.000000 +-121.950000,38.020000,9.000000,3360.000000,833.000000,2041.000000,810.000000,2.101300,100700.000000 +-121.950000,37.960000,18.000000,2739.000000,393.000000,1072.000000,374.000000,6.143600,259500.000000 +-121.950000,37.940000,27.000000,1469.000000,216.000000,578.000000,219.000000,5.934600,253600.000000 +-121.950000,37.940000,21.000000,3153.000000,411.000000,1318.000000,431.000000,6.864200,285400.000000 +-121.950000,37.810000,5.000000,7178.000000,898.000000,2823.000000,907.000000,9.077600,450400.000000 +-121.950000,37.780000,4.000000,14652.000000,2826.000000,5613.000000,2579.000000,6.394200,356700.000000 +-121.950000,37.740000,19.000000,5721.000000,837.000000,2653.000000,813.000000,6.263100,266000.000000 +-121.950000,37.740000,19.000000,1127.000000,170.000000,518.000000,167.000000,6.332500,250000.000000 +-121.950000,37.550000,21.000000,10687.000000,1540.000000,4552.000000,1520.000000,6.647800,333400.000000 +-121.950000,37.540000,29.000000,3517.000000,645.000000,1724.000000,585.000000,4.664100,248900.000000 +-121.950000,37.520000,33.000000,3994.000000,764.000000,2721.000000,763.000000,5.230800,196900.000000 +-121.950000,37.410000,13.000000,2164.000000,412.000000,1087.000000,411.000000,4.762500,137500.000000 +-121.950000,37.390000,24.000000,5230.000000,934.000000,3795.000000,970.000000,5.422800,264100.000000 +-121.950000,37.380000,22.000000,765.000000,198.000000,390.000000,176.000000,3.181200,87500.000000 +-121.950000,37.360000,27.000000,3236.000000,832.000000,2230.000000,798.000000,3.562500,208600.000000 +-121.950000,37.360000,25.000000,3472.000000,956.000000,2267.000000,944.000000,2.772700,235600.000000 +-121.950000,37.350000,48.000000,1246.000000,294.000000,697.000000,284.000000,3.611800,235500.000000 +-121.950000,37.350000,42.000000,1421.000000,330.000000,659.000000,303.000000,3.333300,237900.000000 +-121.950000,37.350000,36.000000,832.000000,211.000000,545.000000,211.000000,3.281300,244400.000000 +-121.950000,37.340000,25.000000,5236.000000,1320.000000,2529.000000,1213.000000,3.170200,256100.000000 +-121.950000,37.330000,36.000000,1683.000000,286.000000,740.000000,324.000000,4.760400,294700.000000 +-121.950000,37.330000,31.000000,1866.000000,465.000000,821.000000,447.000000,2.354700,275900.000000 +-121.950000,37.320000,39.000000,1164.000000,199.000000,619.000000,231.000000,4.630400,263200.000000 +-121.950000,37.320000,33.000000,726.000000,168.000000,351.000000,147.000000,3.145800,270500.000000 +-121.950000,37.310000,27.000000,4140.000000,969.000000,2135.000000,893.000000,3.629200,264600.000000 +-121.950000,37.300000,25.000000,5641.000000,1517.000000,3786.000000,1410.000000,3.395800,267500.000000 +-121.950000,37.300000,21.000000,4193.000000,1068.000000,2487.000000,1011.000000,3.718800,293000.000000 +-121.950000,37.290000,9.000000,1503.000000,381.000000,715.000000,349.000000,4.637100,234300.000000 +-121.950000,37.290000,30.000000,3734.000000,813.000000,1834.000000,824.000000,3.450500,260000.000000 +-121.950000,37.280000,52.000000,777.000000,148.000000,362.000000,144.000000,4.020800,262500.000000 +-121.950000,37.280000,19.000000,7027.000000,1847.000000,3759.000000,1753.000000,3.150900,242900.000000 +-121.950000,37.270000,17.000000,1330.000000,271.000000,408.000000,258.000000,1.717100,181300.000000 +-121.950000,37.260000,10.000000,3611.000000,803.000000,1599.000000,716.000000,5.200000,248700.000000 +-121.950000,37.250000,34.000000,2906.000000,544.000000,1282.000000,522.000000,5.512700,268200.000000 +-121.950000,37.250000,30.000000,3298.000000,634.000000,1532.000000,602.000000,5.086300,332000.000000 +-121.950000,37.240000,37.000000,3109.000000,541.000000,1566.000000,544.000000,6.023500,413500.000000 +-121.950000,37.240000,32.000000,1382.000000,239.000000,705.000000,251.000000,6.095700,405400.000000 +-121.950000,37.210000,20.000000,2345.000000,322.000000,890.000000,276.000000,10.018700,500001.000000 +-121.950000,37.110000,21.000000,2387.000000,357.000000,913.000000,341.000000,7.736000,397700.000000 +-121.950000,36.610000,31.000000,1736.000000,250.000000,497.000000,170.000000,6.383500,407800.000000 +-121.950000,36.600000,32.000000,3152.000000,504.000000,793.000000,426.000000,7.119800,469900.000000 +-121.950000,36.590000,22.000000,3553.000000,530.000000,1108.000000,441.000000,5.850500,417100.000000 +-121.960000,39.300000,39.000000,701.000000,130.000000,271.000000,89.000000,2.184500,112500.000000 +-121.960000,38.540000,6.000000,1485.000000,318.000000,894.000000,308.000000,3.222200,139600.000000 +-121.960000,38.360000,11.000000,3208.000000,790.000000,1772.000000,694.000000,2.743400,218800.000000 +-121.960000,38.350000,20.000000,1415.000000,266.000000,667.000000,250.000000,4.093800,117300.000000 +-121.960000,38.340000,7.000000,3292.000000,698.000000,1911.000000,702.000000,3.890000,140300.000000 +-121.960000,38.340000,15.000000,2857.000000,373.000000,1325.000000,359.000000,6.025200,151700.000000 +-121.960000,38.340000,14.000000,3035.000000,680.000000,1597.000000,663.000000,3.603600,143500.000000 +-121.960000,38.330000,3.000000,7985.000000,1257.000000,3664.000000,1215.000000,4.976000,158300.000000 +-121.960000,38.320000,12.000000,5127.000000,998.000000,2749.000000,976.000000,4.045800,130600.000000 +-121.960000,38.020000,35.000000,2691.000000,542.000000,1409.000000,505.000000,3.016000,95300.000000 +-121.960000,37.990000,2.000000,3129.000000,707.000000,1606.000000,698.000000,2.959100,210100.000000 +-121.960000,37.960000,28.000000,1838.000000,273.000000,899.000000,270.000000,5.214500,229200.000000 +-121.960000,37.950000,7.000000,3418.000000,740.000000,1583.000000,676.000000,3.613300,196100.000000 +-121.960000,37.950000,25.000000,4026.000000,791.000000,1850.000000,709.000000,4.148300,181200.000000 +-121.960000,37.950000,13.000000,3216.000000,765.000000,1627.000000,715.000000,3.085900,167800.000000 +-121.960000,37.940000,26.000000,3084.000000,505.000000,1557.000000,501.000000,5.158200,194700.000000 +-121.960000,37.850000,10.000000,3209.000000,379.000000,1199.000000,392.000000,12.247800,500001.000000 +-121.960000,37.840000,29.000000,7479.000000,977.000000,2744.000000,943.000000,7.513900,398200.000000 +-121.960000,37.810000,12.000000,6488.000000,778.000000,2404.000000,765.000000,8.318800,403400.000000 +-121.960000,37.760000,8.000000,3865.000000,463.000000,1548.000000,432.000000,9.703700,425100.000000 +-121.960000,37.710000,6.000000,8072.000000,1050.000000,3386.000000,1062.000000,7.249400,336500.000000 +-121.960000,37.540000,14.000000,5106.000000,1207.000000,2738.000000,1108.000000,3.990900,236000.000000 +-121.960000,37.530000,28.000000,2949.000000,529.000000,1538.000000,545.000000,4.961500,228000.000000 +-121.960000,37.530000,23.000000,2215.000000,475.000000,1278.000000,492.000000,4.295500,218800.000000 +-121.960000,37.530000,18.000000,2375.000000,652.000000,1252.000000,586.000000,2.619800,235900.000000 +-121.960000,37.520000,26.000000,4211.000000,741.000000,2352.000000,734.000000,5.239600,223900.000000 +-121.960000,37.510000,22.000000,5811.000000,1125.000000,3215.000000,1086.000000,4.410700,223500.000000 +-121.960000,37.430000,18.000000,2514.000000,578.000000,2205.000000,545.000000,3.385900,158000.000000 +-121.960000,37.410000,17.000000,3208.000000,617.000000,2286.000000,602.000000,5.293700,238000.000000 +-121.960000,37.390000,20.000000,1032.000000,229.000000,658.000000,238.000000,4.506200,219300.000000 +-121.960000,37.360000,33.000000,2581.000000,623.000000,1598.000000,628.000000,3.519900,261400.000000 +-121.960000,37.360000,16.000000,5040.000000,1325.000000,3150.000000,1196.000000,4.283700,264500.000000 +-121.960000,37.350000,37.000000,1755.000000,325.000000,699.000000,321.000000,3.925000,251300.000000 +-121.960000,37.350000,32.000000,1484.000000,274.000000,673.000000,272.000000,5.201900,279900.000000 +-121.960000,37.340000,42.000000,2001.000000,402.000000,942.000000,375.000000,4.445300,255400.000000 +-121.960000,37.340000,34.000000,1461.000000,299.000000,739.000000,276.000000,3.437500,252600.000000 +-121.960000,37.330000,26.000000,3269.000000,788.000000,1427.000000,696.000000,4.213600,288300.000000 +-121.960000,37.320000,11.000000,1711.000000,493.000000,1094.000000,543.000000,3.730000,227700.000000 +-121.960000,37.310000,31.000000,3890.000000,711.000000,1898.000000,717.000000,5.253400,290900.000000 +-121.960000,37.310000,26.000000,4310.000000,678.000000,1819.000000,686.000000,7.046900,365500.000000 +-121.960000,37.300000,23.000000,4040.000000,843.000000,2181.000000,843.000000,4.040300,303400.000000 +-121.960000,37.290000,24.000000,1240.000000,263.000000,690.000000,276.000000,5.000000,283000.000000 +-121.960000,37.280000,33.000000,1940.000000,327.000000,877.000000,314.000000,5.438600,280400.000000 +-121.960000,37.280000,28.000000,5018.000000,1066.000000,2846.000000,998.000000,4.017400,273900.000000 +-121.960000,37.270000,31.000000,3347.000000,589.000000,1566.000000,597.000000,5.515100,286800.000000 +-121.960000,37.270000,22.000000,6114.000000,1211.000000,2983.000000,1163.000000,5.253300,269100.000000 +-121.960000,37.260000,22.000000,1408.000000,351.000000,636.000000,294.000000,1.854200,333300.000000 +-121.960000,37.250000,35.000000,1018.000000,169.000000,484.000000,174.000000,6.164800,371900.000000 +-121.960000,37.250000,19.000000,1858.000000,359.000000,790.000000,347.000000,4.515600,339300.000000 +-121.960000,37.240000,26.000000,3032.000000,605.000000,1208.000000,562.000000,5.468300,430900.000000 +-121.960000,37.230000,36.000000,4423.000000,632.000000,1719.000000,608.000000,7.840700,476400.000000 +-121.960000,37.220000,35.000000,4709.000000,723.000000,1866.000000,694.000000,8.492000,500001.000000 +-121.960000,37.130000,26.000000,50.000000,5.000000,17.000000,4.000000,15.000100,400000.000000 +-121.960000,37.100000,20.000000,922.000000,155.000000,361.000000,135.000000,6.361700,331500.000000 +-121.960000,37.060000,16.000000,1321.000000,224.000000,650.000000,206.000000,6.325800,390000.000000 +-121.960000,37.030000,17.000000,1343.000000,203.000000,511.000000,185.000000,4.625000,386400.000000 +-121.960000,36.980000,16.000000,4907.000000,1117.000000,2265.000000,1048.000000,2.675700,229200.000000 +-121.960000,36.970000,23.000000,4324.000000,1034.000000,1844.000000,875.000000,3.077700,263800.000000 +-121.960000,36.880000,37.000000,2846.000000,553.000000,939.000000,433.000000,4.746800,294400.000000 +-121.970000,39.790000,16.000000,1453.000000,299.000000,904.000000,286.000000,3.573500,89600.000000 +-121.970000,38.350000,17.000000,5678.000000,1116.000000,3182.000000,1135.000000,3.738800,122000.000000 +-121.970000,38.340000,16.000000,2331.000000,450.000000,1074.000000,400.000000,4.032900,126800.000000 +-121.970000,38.340000,11.000000,1500.000000,319.000000,899.000000,304.000000,4.556800,127200.000000 +-121.970000,38.030000,17.000000,3685.000000,685.000000,1939.000000,649.000000,3.704300,139800.000000 +-121.970000,37.970000,27.000000,1691.000000,289.000000,807.000000,296.000000,6.116800,210500.000000 +-121.970000,37.970000,24.000000,1330.000000,183.000000,656.000000,205.000000,5.009200,244100.000000 +-121.970000,37.960000,28.000000,1433.000000,290.000000,877.000000,313.000000,4.789100,184800.000000 +-121.970000,37.950000,8.000000,4253.000000,709.000000,1883.000000,662.000000,5.431000,246700.000000 +-121.970000,37.930000,4.000000,3241.000000,464.000000,1552.000000,494.000000,6.613400,307000.000000 +-121.970000,37.870000,4.000000,1029.000000,126.000000,416.000000,122.000000,13.488300,500001.000000 +-121.970000,37.800000,17.000000,3279.000000,418.000000,1222.000000,381.000000,7.916800,356000.000000 +-121.970000,37.790000,17.000000,5688.000000,824.000000,2111.000000,773.000000,6.613100,312500.000000 +-121.970000,37.790000,16.000000,3873.000000,484.000000,1451.000000,501.000000,6.785700,341300.000000 +-121.970000,37.770000,13.000000,7241.000000,1007.000000,3221.000000,947.000000,7.221600,324600.000000 +-121.970000,37.640000,32.000000,1283.000000,194.000000,485.000000,171.000000,6.057400,431000.000000 +-121.970000,37.570000,21.000000,4342.000000,783.000000,2172.000000,789.000000,4.614600,247600.000000 +-121.970000,37.560000,13.000000,8918.000000,1823.000000,4518.000000,1772.000000,4.805200,254000.000000 +-121.970000,37.550000,17.000000,4924.000000,1247.000000,3080.000000,1182.000000,3.168000,189400.000000 +-121.970000,37.540000,31.000000,1949.000000,344.000000,986.000000,322.000000,4.634900,196200.000000 +-121.970000,37.540000,28.000000,2312.000000,496.000000,1344.000000,467.000000,4.713500,203200.000000 +-121.970000,37.530000,35.000000,2277.000000,420.000000,1353.000000,413.000000,4.750000,197000.000000 +-121.970000,37.530000,26.000000,2506.000000,387.000000,1273.000000,406.000000,5.429900,236400.000000 +-121.970000,37.520000,26.000000,3761.000000,623.000000,1776.000000,613.000000,4.531700,232600.000000 +-121.970000,37.520000,23.000000,4925.000000,948.000000,2530.000000,894.000000,5.082400,230900.000000 +-121.970000,37.440000,17.000000,127.000000,28.000000,219.000000,22.000000,4.517900,112500.000000 +-121.970000,37.400000,17.000000,2937.000000,558.000000,1662.000000,533.000000,5.879200,255500.000000 +-121.970000,37.360000,24.000000,4841.000000,894.000000,2656.000000,920.000000,6.057300,254500.000000 +-121.970000,37.350000,36.000000,815.000000,126.000000,353.000000,122.000000,6.319100,258300.000000 +-121.970000,37.350000,35.000000,1880.000000,370.000000,926.000000,321.000000,4.227300,269900.000000 +-121.970000,37.350000,35.000000,1249.000000,232.000000,556.000000,247.000000,3.925000,287100.000000 +-121.970000,37.350000,30.000000,1955.000000,388.000000,999.000000,386.000000,4.632800,287100.000000 +-121.970000,37.340000,33.000000,3162.000000,722.000000,1553.000000,686.000000,3.668200,266100.000000 +-121.970000,37.330000,21.000000,8275.000000,1566.000000,3636.000000,1524.000000,5.150600,302100.000000 +-121.970000,37.310000,25.000000,5775.000000,1225.000000,3580.000000,1138.000000,3.918700,314900.000000 +-121.970000,37.310000,21.000000,7628.000000,2166.000000,3637.000000,1749.000000,3.640100,267500.000000 +-121.970000,37.300000,31.000000,3340.000000,735.000000,1891.000000,686.000000,4.854200,275000.000000 +-121.970000,37.300000,25.000000,5463.000000,1351.000000,2758.000000,1310.000000,3.007900,277300.000000 +-121.970000,37.290000,29.000000,2721.000000,682.000000,1602.000000,646.000000,3.337000,265300.000000 +-121.970000,37.280000,27.000000,2427.000000,403.000000,1301.000000,438.000000,5.038500,277300.000000 +-121.970000,37.280000,25.000000,4707.000000,695.000000,1995.000000,642.000000,6.643700,296100.000000 +-121.970000,37.260000,19.000000,2174.000000,454.000000,998.000000,426.000000,4.682700,255100.000000 +-121.970000,37.250000,32.000000,2892.000000,496.000000,1193.000000,492.000000,6.131000,367800.000000 +-121.970000,37.250000,21.000000,2775.000000,389.000000,856.000000,350.000000,7.913500,496400.000000 +-121.970000,37.230000,22.000000,2781.000000,523.000000,1291.000000,516.000000,4.606500,445900.000000 +-121.970000,37.010000,21.000000,2073.000000,357.000000,1044.000000,351.000000,4.568200,371600.000000 +-121.970000,37.000000,25.000000,990.000000,166.000000,522.000000,185.000000,4.826900,272900.000000 +-121.970000,36.980000,17.000000,2813.000000,497.000000,1337.000000,477.000000,3.708300,252400.000000 +-121.970000,36.970000,24.000000,3665.000000,870.000000,1954.000000,833.000000,2.803600,228500.000000 +-121.970000,36.970000,15.000000,2849.000000,668.000000,1546.000000,582.000000,2.758700,228600.000000 +-121.970000,36.960000,27.000000,4001.000000,999.000000,1808.000000,945.000000,2.561000,234600.000000 +-121.980000,38.520000,27.000000,3044.000000,565.000000,1583.000000,514.000000,2.798900,126700.000000 +-121.980000,38.370000,21.000000,3027.000000,675.000000,2018.000000,642.000000,2.843800,111500.000000 +-121.980000,38.360000,33.000000,1083.000000,217.000000,562.000000,203.000000,2.433000,101700.000000 +-121.980000,38.360000,30.000000,140.000000,35.000000,103.000000,35.000000,4.163000,112500.000000 +-121.980000,38.360000,24.000000,2434.000000,630.000000,1538.000000,574.000000,2.106700,101100.000000 +-121.980000,38.350000,16.000000,1697.000000,267.000000,832.000000,277.000000,4.437500,132600.000000 +-121.980000,38.340000,18.000000,3876.000000,916.000000,2386.000000,867.000000,2.593800,129500.000000 +-121.980000,38.340000,13.000000,3616.000000,672.000000,2022.000000,652.000000,4.053600,134800.000000 +-121.980000,38.290000,4.000000,8778.000000,1291.000000,4010.000000,1188.000000,5.439900,187100.000000 +-121.980000,38.050000,31.000000,2810.000000,518.000000,1640.000000,503.000000,3.366100,98500.000000 +-121.980000,37.970000,26.000000,2738.000000,428.000000,1316.000000,430.000000,5.244200,213200.000000 +-121.980000,37.960000,22.000000,2987.000000,563.000000,1420.000000,540.000000,3.650000,204100.000000 +-121.980000,37.960000,12.000000,5048.000000,1122.000000,2209.000000,1014.000000,3.157300,126700.000000 +-121.980000,37.950000,16.000000,2984.000000,406.000000,1317.000000,397.000000,6.782100,265900.000000 +-121.980000,37.950000,14.000000,6290.000000,854.000000,2724.000000,820.000000,6.737100,267400.000000 +-121.980000,37.820000,18.000000,9117.000000,1248.000000,3280.000000,1167.000000,8.003000,351300.000000 +-121.980000,37.810000,18.000000,2903.000000,387.000000,1127.000000,372.000000,5.592100,359100.000000 +-121.980000,37.800000,16.000000,2498.000000,330.000000,1027.000000,343.000000,8.155000,343700.000000 +-121.980000,37.740000,8.000000,2865.000000,389.000000,1376.000000,417.000000,7.939300,399300.000000 +-121.980000,37.580000,20.000000,4126.000000,1031.000000,2079.000000,975.000000,3.683200,216900.000000 +-121.980000,37.540000,17.000000,5133.000000,1375.000000,3386.000000,1339.000000,3.132600,220800.000000 +-121.980000,37.530000,28.000000,2829.000000,566.000000,1610.000000,540.000000,4.600000,223200.000000 +-121.980000,37.530000,26.000000,3179.000000,703.000000,2142.000000,639.000000,4.194700,222700.000000 +-121.980000,37.370000,36.000000,1651.000000,344.000000,1062.000000,331.000000,4.575000,215400.000000 +-121.980000,37.360000,35.000000,1440.000000,267.000000,743.000000,259.000000,5.086600,254600.000000 +-121.980000,37.360000,35.000000,1293.000000,223.000000,701.000000,216.000000,7.854300,281900.000000 +-121.980000,37.360000,33.000000,1582.000000,272.000000,809.000000,267.000000,5.705900,287200.000000 +-121.980000,37.360000,32.000000,1199.000000,229.000000,814.000000,238.000000,4.671900,252100.000000 +-121.980000,37.350000,41.000000,1150.000000,249.000000,729.000000,260.000000,3.549100,261100.000000 +-121.980000,37.350000,36.000000,1054.000000,193.000000,546.000000,187.000000,4.562500,240000.000000 +-121.980000,37.340000,33.000000,3570.000000,776.000000,1922.000000,761.000000,4.956200,238700.000000 +-121.980000,37.340000,18.000000,6649.000000,1712.000000,3604.000000,1651.000000,4.536800,307400.000000 +-121.980000,37.330000,35.000000,1907.000000,326.000000,912.000000,313.000000,5.956700,294300.000000 +-121.980000,37.330000,30.000000,3742.000000,633.000000,1721.000000,631.000000,6.138800,302400.000000 +-121.980000,37.330000,30.000000,2645.000000,462.000000,1506.000000,480.000000,6.371600,330500.000000 +-121.980000,37.320000,17.000000,9789.000000,2552.000000,4748.000000,2206.000000,4.253100,279800.000000 +-121.980000,37.310000,34.000000,2034.000000,359.000000,1016.000000,375.000000,5.812700,288300.000000 +-121.980000,37.310000,32.000000,2248.000000,460.000000,1191.000000,419.000000,5.606000,288900.000000 +-121.980000,37.310000,28.000000,3840.000000,629.000000,1883.000000,662.000000,6.409500,335900.000000 +-121.980000,37.300000,30.000000,3404.000000,693.000000,1794.000000,633.000000,4.631200,283200.000000 +-121.980000,37.290000,33.000000,2120.000000,349.000000,907.000000,336.000000,7.544300,283000.000000 +-121.980000,37.280000,28.000000,3688.000000,633.000000,1877.000000,620.000000,5.725100,272600.000000 +-121.980000,37.280000,26.000000,1182.000000,309.000000,620.000000,306.000000,3.392200,269100.000000 +-121.980000,37.270000,29.000000,2658.000000,484.000000,1318.000000,498.000000,5.356100,298900.000000 +-121.980000,37.260000,27.000000,2331.000000,461.000000,1178.000000,447.000000,4.665400,340700.000000 +-121.980000,37.250000,19.000000,755.000000,93.000000,267.000000,99.000000,15.000000,500001.000000 +-121.980000,37.240000,35.000000,3574.000000,485.000000,1325.000000,476.000000,8.542500,500001.000000 +-121.980000,37.230000,33.000000,3585.000000,935.000000,1511.000000,835.000000,3.117600,396300.000000 +-121.980000,37.160000,42.000000,2533.000000,433.000000,957.000000,398.000000,5.346800,279900.000000 +-121.980000,36.990000,14.000000,6787.000000,1454.000000,3416.000000,1357.000000,3.594300,262400.000000 +-121.980000,36.980000,29.000000,2681.000000,632.000000,1652.000000,620.000000,3.075000,215800.000000 +-121.980000,36.970000,21.000000,3349.000000,737.000000,1952.000000,718.000000,3.727300,251900.000000 +-121.980000,36.960000,31.000000,3209.000000,723.000000,1489.000000,692.000000,3.661900,245100.000000 +-121.980000,36.960000,20.000000,3495.000000,818.000000,2186.000000,772.000000,3.116700,258300.000000 +-121.990000,39.150000,17.000000,6440.000000,1204.000000,3266.000000,1142.000000,2.713700,72000.000000 +-121.990000,38.480000,17.000000,1824.000000,348.000000,934.000000,305.000000,4.671900,250000.000000 +-121.990000,38.360000,35.000000,2728.000000,451.000000,1290.000000,452.000000,3.276800,117600.000000 +-121.990000,38.360000,33.000000,146.000000,31.000000,75.000000,31.000000,3.517900,84400.000000 +-121.990000,38.340000,16.000000,1470.000000,261.000000,748.000000,256.000000,4.043300,132200.000000 +-121.990000,38.340000,13.000000,3252.000000,610.000000,1915.000000,631.000000,4.213700,151700.000000 +-121.990000,38.270000,16.000000,4138.000000,758.000000,1762.000000,723.000000,3.197900,137500.000000 +-121.990000,38.150000,36.000000,263.000000,73.000000,88.000000,42.000000,2.531300,162500.000000 +-121.990000,37.980000,23.000000,2293.000000,411.000000,969.000000,399.000000,4.453600,184000.000000 +-121.990000,37.970000,30.000000,3320.000000,589.000000,1470.000000,543.000000,4.607100,184100.000000 +-121.990000,37.960000,17.000000,2756.000000,423.000000,1228.000000,426.000000,5.587200,200600.000000 +-121.990000,37.960000,16.000000,3324.000000,479.000000,1470.000000,461.000000,7.616600,260400.000000 +-121.990000,37.820000,22.000000,1248.000000,271.000000,579.000000,269.000000,3.375000,200000.000000 +-121.990000,37.810000,17.000000,465.000000,83.000000,146.000000,75.000000,4.901800,188500.000000 +-121.990000,37.770000,14.000000,8213.000000,1364.000000,3283.000000,1286.000000,5.175500,294800.000000 +-121.990000,37.610000,9.000000,3666.000000,711.000000,2341.000000,703.000000,4.645800,217000.000000 +-121.990000,37.580000,31.000000,2878.000000,478.000000,1276.000000,485.000000,6.207300,282500.000000 +-121.990000,37.560000,20.000000,6462.000000,1294.000000,3288.000000,1235.000000,4.339300,231200.000000 +-121.990000,37.560000,18.000000,5505.000000,1005.000000,2641.000000,971.000000,5.000000,269700.000000 +-121.990000,37.550000,28.000000,2414.000000,415.000000,1106.000000,453.000000,4.840300,268600.000000 +-121.990000,37.550000,16.000000,6647.000000,2098.000000,4649.000000,1903.000000,2.907400,213800.000000 +-121.990000,37.540000,28.000000,3046.000000,507.000000,1772.000000,516.000000,5.328300,227900.000000 +-121.990000,37.530000,25.000000,5405.000000,939.000000,2831.000000,923.000000,5.042300,222200.000000 +-121.990000,37.400000,35.000000,1845.000000,325.000000,1343.000000,317.000000,5.391200,235300.000000 +-121.990000,37.400000,24.000000,3217.000000,689.000000,1196.000000,684.000000,3.489600,226700.000000 +-121.990000,37.390000,25.000000,3495.000000,834.000000,2484.000000,797.000000,4.814500,230700.000000 +-121.990000,37.370000,27.000000,1797.000000,538.000000,1610.000000,531.000000,4.242200,237500.000000 +-121.990000,37.360000,33.000000,2677.000000,644.000000,1469.000000,633.000000,3.204800,261800.000000 +-121.990000,37.360000,33.000000,2545.000000,467.000000,1287.000000,458.000000,5.500000,282200.000000 +-121.990000,37.360000,33.000000,2321.000000,480.000000,1230.000000,451.000000,4.909100,270300.000000 +-121.990000,37.360000,32.000000,1754.000000,324.000000,917.000000,330.000000,4.676100,298300.000000 +-121.990000,37.350000,25.000000,1527.000000,325.000000,707.000000,339.000000,4.375000,212200.000000 +-121.990000,37.350000,16.000000,3249.000000,947.000000,1637.000000,841.000000,4.542700,198400.000000 +-121.990000,37.340000,27.000000,3353.000000,653.000000,1571.000000,621.000000,5.273000,315600.000000 +-121.990000,37.340000,26.000000,3637.000000,933.000000,2249.000000,905.000000,3.962500,262900.000000 +-121.990000,37.330000,35.000000,1802.000000,291.000000,841.000000,315.000000,4.836500,313900.000000 +-121.990000,37.330000,33.000000,2023.000000,425.000000,1016.000000,405.000000,3.941700,285800.000000 +-121.990000,37.320000,20.000000,4461.000000,864.000000,2042.000000,808.000000,4.708300,217700.000000 +-121.990000,37.310000,26.000000,3285.000000,502.000000,1443.000000,530.000000,5.783300,339600.000000 +-121.990000,37.300000,28.000000,4863.000000,901.000000,2110.000000,868.000000,5.148300,342000.000000 +-121.990000,37.290000,32.000000,2930.000000,481.000000,1336.000000,481.000000,6.463100,344100.000000 +-121.990000,37.270000,27.000000,2937.000000,497.000000,1454.000000,511.000000,5.405100,273500.000000 +-121.990000,37.270000,21.000000,1214.000000,192.000000,500.000000,185.000000,7.598000,347800.000000 +-121.990000,37.270000,17.000000,1527.000000,267.000000,775.000000,260.000000,5.965800,278000.000000 +-121.990000,37.260000,29.000000,2718.000000,365.000000,982.000000,339.000000,7.923400,500001.000000 +-121.990000,37.260000,17.000000,4034.000000,611.000000,1158.000000,560.000000,8.206900,442500.000000 +-121.990000,37.250000,25.000000,1743.000000,212.000000,604.000000,200.000000,10.758200,500001.000000 +-121.990000,37.250000,22.000000,4240.000000,532.000000,1480.000000,514.000000,11.246300,500001.000000 +-121.990000,37.050000,19.000000,2023.000000,392.000000,955.000000,328.000000,5.248600,353000.000000 +-121.990000,36.990000,29.000000,3119.000000,507.000000,1476.000000,487.000000,5.812300,281500.000000 +-121.990000,36.990000,16.000000,1592.000000,369.000000,1039.000000,351.000000,3.636400,207000.000000 +-121.990000,36.980000,40.000000,1104.000000,224.000000,669.000000,215.000000,4.340900,256300.000000 +-121.990000,36.980000,25.000000,2113.000000,422.000000,1365.000000,439.000000,4.648400,234600.000000 +-121.990000,36.980000,19.000000,5613.000000,1321.000000,3018.000000,1268.000000,3.191400,215600.000000 +-121.990000,36.970000,15.000000,3044.000000,786.000000,1306.000000,693.000000,2.177100,213200.000000 +-121.990000,36.960000,42.000000,1275.000000,272.000000,451.000000,200.000000,4.732100,422400.000000 +-122.000000,38.990000,39.000000,1548.000000,323.000000,815.000000,286.000000,2.948900,67500.000000 +-122.000000,38.830000,26.000000,272.000000,49.000000,194.000000,52.000000,3.418700,98400.000000 +-122.000000,38.410000,11.000000,2838.000000,429.000000,1331.000000,426.000000,4.945000,298400.000000 +-122.000000,38.380000,16.000000,2509.000000,366.000000,1043.000000,339.000000,6.070400,173400.000000 +-122.000000,38.360000,34.000000,2735.000000,539.000000,1390.000000,491.000000,2.726200,118800.000000 +-122.000000,38.360000,34.000000,1502.000000,282.000000,860.000000,297.000000,3.343800,135600.000000 +-122.000000,38.350000,34.000000,432.000000,65.000000,208.000000,71.000000,5.543500,136000.000000 +-122.000000,38.350000,34.000000,1084.000000,187.000000,561.000000,198.000000,4.211500,118900.000000 +-122.000000,38.250000,7.000000,11768.000000,1893.000000,6657.000000,1874.000000,4.922200,142900.000000 +-122.000000,38.230000,1.000000,2062.000000,343.000000,872.000000,268.000000,5.263600,191300.000000 +-122.000000,38.030000,4.000000,2341.000000,408.000000,1235.000000,431.000000,6.042400,165900.000000 +-122.000000,37.990000,28.000000,4035.000000,641.000000,1881.000000,659.000000,5.460700,192300.000000 +-122.000000,37.980000,36.000000,404.000000,77.000000,237.000000,88.000000,4.525000,161300.000000 +-122.000000,37.980000,35.000000,1192.000000,201.000000,535.000000,172.000000,4.921900,182000.000000 +-122.000000,37.980000,31.000000,2030.000000,337.000000,867.000000,341.000000,5.091500,193200.000000 +-122.000000,37.970000,27.000000,2491.000000,428.000000,1171.000000,431.000000,5.102100,202800.000000 +-122.000000,37.960000,32.000000,3364.000000,666.000000,1980.000000,678.000000,3.700000,179000.000000 +-122.000000,37.960000,28.000000,4071.000000,713.000000,2033.000000,647.000000,4.583300,190700.000000 +-122.000000,37.950000,9.000000,2214.000000,256.000000,848.000000,239.000000,6.814500,339200.000000 +-122.000000,37.840000,16.000000,7681.000000,946.000000,2777.000000,908.000000,9.527100,500001.000000 +-122.000000,37.820000,20.000000,2206.000000,458.000000,926.000000,432.000000,4.604200,256400.000000 +-122.000000,37.580000,6.000000,4405.000000,717.000000,2071.000000,688.000000,5.815100,295600.000000 +-122.000000,37.550000,27.000000,6103.000000,1249.000000,3026.000000,1134.000000,4.159100,332400.000000 +-122.000000,37.540000,29.000000,4133.000000,744.000000,2023.000000,749.000000,5.161600,275100.000000 +-122.000000,37.540000,26.000000,1910.000000,371.000000,852.000000,357.000000,5.832500,298900.000000 +-122.000000,37.510000,7.000000,6352.000000,1390.000000,3223.000000,1316.000000,4.986700,181700.000000 +-122.000000,37.400000,35.000000,1542.000000,298.000000,1164.000000,318.000000,5.914500,236900.000000 +-122.000000,37.400000,17.000000,5121.000000,1017.000000,1470.000000,968.000000,2.970600,81300.000000 +-122.000000,37.400000,17.000000,4324.000000,854.000000,1656.000000,885.000000,3.661900,232400.000000 +-122.000000,37.390000,36.000000,1236.000000,229.000000,880.000000,247.000000,5.791000,239400.000000 +-122.000000,37.390000,33.000000,2154.000000,405.000000,1655.000000,434.000000,5.796200,229800.000000 +-122.000000,37.370000,16.000000,1434.000000,372.000000,804.000000,361.000000,3.704500,178100.000000 +-122.000000,37.360000,25.000000,3534.000000,949.000000,1880.000000,849.000000,3.423800,337000.000000 +-122.000000,37.360000,19.000000,2237.000000,433.000000,1158.000000,426.000000,6.771800,368300.000000 +-122.000000,37.360000,17.000000,6012.000000,1737.000000,3539.000000,1625.000000,3.846400,239400.000000 +-122.000000,37.360000,17.000000,2070.000000,278.000000,797.000000,275.000000,8.615500,411200.000000 +-122.000000,37.340000,31.000000,3344.000000,620.000000,1604.000000,572.000000,5.210800,351500.000000 +-122.000000,37.340000,27.000000,1716.000000,290.000000,817.000000,301.000000,5.915800,343100.000000 +-122.000000,37.330000,30.000000,4033.000000,794.000000,1788.000000,807.000000,5.693200,338700.000000 +-122.000000,37.320000,34.000000,3450.000000,731.000000,1915.000000,689.000000,4.740200,244500.000000 +-122.000000,37.310000,33.000000,4211.000000,918.000000,2389.000000,861.000000,4.723500,242200.000000 +-122.000000,37.280000,35.000000,3133.000000,541.000000,1449.000000,555.000000,5.729500,346100.000000 +-122.000000,37.280000,33.000000,2170.000000,311.000000,854.000000,303.000000,8.360500,500001.000000 +-122.000000,37.280000,32.000000,2782.000000,495.000000,1092.000000,455.000000,5.410300,335900.000000 +-122.000000,37.270000,33.000000,1664.000000,271.000000,759.000000,272.000000,5.787600,415800.000000 +-122.000000,37.230000,36.000000,3191.000000,430.000000,1234.000000,440.000000,9.070400,500001.000000 +-122.000000,37.080000,17.000000,4154.000000,739.000000,2149.000000,693.000000,5.591900,373400.000000 +-122.000000,37.060000,20.000000,2403.000000,376.000000,1149.000000,369.000000,6.062100,304400.000000 +-122.000000,37.000000,16.000000,32.000000,4.000000,36.000000,5.000000,2.625000,137500.000000 +-122.000000,36.980000,43.000000,1636.000000,324.000000,792.000000,325.000000,3.556200,239200.000000 +-122.000000,36.980000,20.000000,2502.000000,454.000000,981.000000,399.000000,4.300000,275000.000000 +-122.000000,36.930000,51.000000,1616.000000,374.000000,608.000000,302.000000,3.193200,400000.000000 +-122.010000,39.740000,20.000000,2332.000000,518.000000,1856.000000,495.000000,2.174600,58700.000000 +-122.010000,39.210000,52.000000,1989.000000,392.000000,985.000000,396.000000,2.555600,75800.000000 +-122.010000,39.210000,50.000000,1592.000000,372.000000,781.000000,307.000000,2.267900,69100.000000 +-122.010000,39.210000,39.000000,1214.000000,250.000000,660.000000,249.000000,2.455900,75000.000000 +-122.010000,38.440000,12.000000,2344.000000,354.000000,1035.000000,321.000000,4.977300,281200.000000 +-122.010000,38.370000,16.000000,3996.000000,550.000000,1673.000000,539.000000,5.778000,175700.000000 +-122.010000,38.360000,28.000000,1967.000000,315.000000,734.000000,291.000000,4.958300,146200.000000 +-122.010000,38.360000,15.000000,476.000000,67.000000,213.000000,73.000000,7.105300,315200.000000 +-122.010000,38.360000,15.000000,1176.000000,166.000000,485.000000,171.000000,5.944100,228200.000000 +-122.010000,38.350000,18.000000,4486.000000,723.000000,1600.000000,697.000000,3.865100,189700.000000 +-122.010000,38.270000,17.000000,9089.000000,1542.000000,4758.000000,1520.000000,4.061900,126600.000000 +-122.010000,38.260000,12.000000,4132.000000,710.000000,2087.000000,633.000000,4.598700,139700.000000 +-122.010000,38.250000,16.000000,1081.000000,181.000000,792.000000,184.000000,4.677900,131300.000000 +-122.010000,38.250000,11.000000,6550.000000,1149.000000,3570.000000,1123.000000,3.858300,137900.000000 +-122.010000,37.990000,28.000000,1900.000000,401.000000,918.000000,351.000000,3.784100,144900.000000 +-122.010000,37.980000,34.000000,1256.000000,267.000000,638.000000,252.000000,4.050700,161000.000000 +-122.010000,37.980000,29.000000,2001.000000,373.000000,956.000000,370.000000,4.317000,194000.000000 +-122.010000,37.980000,25.000000,1476.000000,336.000000,777.000000,297.000000,3.517900,165500.000000 +-122.010000,37.970000,34.000000,3259.000000,498.000000,1250.000000,478.000000,5.379400,206200.000000 +-122.010000,37.970000,32.000000,3012.000000,527.000000,1288.000000,512.000000,3.644900,211500.000000 +-122.010000,37.950000,8.000000,3866.000000,539.000000,1555.000000,513.000000,6.090100,298200.000000 +-122.010000,37.940000,26.000000,1619.000000,224.000000,706.000000,220.000000,6.070400,268000.000000 +-122.010000,37.940000,23.000000,3741.000000,501.000000,1339.000000,499.000000,6.706100,322300.000000 +-122.010000,37.940000,18.000000,2077.000000,298.000000,937.000000,292.000000,6.380900,273600.000000 +-122.010000,37.930000,25.000000,2652.000000,335.000000,1062.000000,334.000000,7.589800,330200.000000 +-122.010000,37.920000,18.000000,2808.000000,337.000000,1038.000000,337.000000,8.395600,353600.000000 +-122.010000,37.920000,16.000000,2638.000000,345.000000,1055.000000,334.000000,8.116300,365800.000000 +-122.010000,37.910000,21.000000,10093.000000,1269.000000,3645.000000,1219.000000,7.687700,367700.000000 +-122.010000,37.830000,30.000000,3917.000000,549.000000,1330.000000,544.000000,6.561700,386600.000000 +-122.010000,37.590000,2.000000,838.000000,295.000000,240.000000,149.000000,2.875000,237500.000000 +-122.010000,37.570000,14.000000,16199.000000,2993.000000,8117.000000,2847.000000,5.832200,281800.000000 +-122.010000,37.560000,6.000000,3028.000000,778.000000,1531.000000,736.000000,4.425900,158000.000000 +-122.010000,37.560000,24.000000,2563.000000,485.000000,1174.000000,501.000000,3.817900,216100.000000 +-122.010000,37.550000,34.000000,2791.000000,495.000000,1276.000000,468.000000,4.916700,256300.000000 +-122.010000,37.550000,26.000000,2068.000000,532.000000,1434.000000,495.000000,3.300800,224200.000000 +-122.010000,37.540000,32.000000,2572.000000,406.000000,1128.000000,395.000000,5.000000,287600.000000 +-122.010000,37.530000,27.000000,1890.000000,303.000000,889.000000,314.000000,5.705700,287600.000000 +-122.010000,37.400000,14.000000,4841.000000,1130.000000,813.000000,517.000000,3.761400,137500.000000 +-122.010000,37.390000,36.000000,1976.000000,361.000000,1348.000000,371.000000,5.644700,252600.000000 +-122.010000,37.390000,26.000000,2500.000000,962.000000,2374.000000,879.000000,3.558600,222200.000000 +-122.010000,37.390000,16.000000,3015.000000,829.000000,1769.000000,807.000000,4.006800,249500.000000 +-122.010000,37.380000,32.000000,726.000000,204.000000,538.000000,203.000000,4.505000,230400.000000 +-122.010000,37.370000,25.000000,2213.000000,360.000000,1066.000000,390.000000,7.216500,360900.000000 +-122.010000,37.370000,11.000000,2559.000000,694.000000,1309.000000,668.000000,4.184700,167300.000000 +-122.010000,37.360000,25.000000,2796.000000,429.000000,1267.000000,426.000000,6.632900,349000.000000 +-122.010000,37.360000,21.000000,2483.000000,396.000000,1194.000000,424.000000,7.127300,346300.000000 +-122.010000,37.360000,16.000000,1105.000000,354.000000,499.000000,324.000000,4.206100,253600.000000 +-122.010000,37.350000,33.000000,2517.000000,496.000000,1158.000000,443.000000,5.078500,289500.000000 +-122.010000,37.350000,16.000000,3716.000000,916.000000,1551.000000,759.000000,4.500000,323600.000000 +-122.010000,37.340000,31.000000,3080.000000,526.000000,1493.000000,582.000000,6.305200,344200.000000 +-122.010000,37.320000,32.000000,3108.000000,613.000000,1577.000000,603.000000,4.661300,284000.000000 +-122.010000,37.310000,26.000000,1391.000000,241.000000,700.000000,236.000000,6.676600,332700.000000 +-122.010000,37.290000,31.000000,3136.000000,431.000000,1190.000000,412.000000,7.500000,500001.000000 +-122.010000,37.280000,22.000000,2038.000000,260.000000,773.000000,281.000000,9.156900,500001.000000 +-122.010000,37.270000,28.000000,3825.000000,473.000000,1415.000000,480.000000,10.675000,500001.000000 +-122.010000,37.270000,27.000000,3340.000000,451.000000,1220.000000,447.000000,8.817800,500001.000000 +-122.010000,37.260000,14.000000,2561.000000,404.000000,1172.000000,378.000000,7.610700,500001.000000 +-122.010000,37.250000,31.000000,1574.000000,193.000000,551.000000,191.000000,10.231100,500001.000000 +-122.010000,37.180000,37.000000,3852.000000,652.000000,1534.000000,567.000000,5.859600,318700.000000 +-122.010000,37.060000,19.000000,4113.000000,767.000000,2006.000000,732.000000,5.112100,308100.000000 +-122.010000,37.030000,21.000000,5904.000000,956.000000,2616.000000,916.000000,5.903900,355300.000000 +-122.010000,36.990000,41.000000,2548.000000,508.000000,1290.000000,488.000000,3.690200,233000.000000 +-122.010000,36.990000,28.000000,1321.000000,240.000000,652.000000,239.000000,4.980800,263100.000000 +-122.010000,36.980000,47.000000,2403.000000,517.000000,1144.000000,455.000000,2.595400,229400.000000 +-122.010000,36.980000,47.000000,1250.000000,249.000000,607.000000,234.000000,4.041700,265300.000000 +-122.010000,36.970000,52.000000,920.000000,202.000000,525.000000,264.000000,2.944400,232800.000000 +-122.010000,36.970000,35.000000,1605.000000,392.000000,743.000000,382.000000,2.536800,240000.000000 +-122.010000,36.950000,52.000000,1217.000000,325.000000,508.000000,237.000000,2.054700,326700.000000 +-122.010000,36.910000,19.000000,691.000000,191.000000,324.000000,167.000000,3.131200,388500.000000 +-122.020000,38.380000,16.000000,808.000000,137.000000,371.000000,145.000000,6.076700,216400.000000 +-122.020000,38.370000,16.000000,2495.000000,331.000000,1118.000000,338.000000,6.489400,198000.000000 +-122.020000,38.270000,20.000000,2237.000000,464.000000,1169.000000,425.000000,3.211500,99100.000000 +-122.020000,38.260000,8.000000,2894.000000,602.000000,1566.000000,572.000000,3.633500,131600.000000 +-122.020000,38.260000,27.000000,3440.000000,787.000000,2085.000000,748.000000,2.589600,104700.000000 +-122.020000,38.250000,10.000000,2237.000000,454.000000,1255.000000,429.000000,3.117600,126500.000000 +-122.020000,38.020000,44.000000,1465.000000,247.000000,817.000000,237.000000,4.869300,156900.000000 +-122.020000,38.000000,28.000000,2965.000000,533.000000,1591.000000,472.000000,4.637500,178200.000000 +-122.020000,37.990000,37.000000,2247.000000,416.000000,1237.000000,397.000000,4.450000,161900.000000 +-122.020000,37.980000,40.000000,1797.000000,401.000000,756.000000,369.000000,2.845600,165500.000000 +-122.020000,37.980000,37.000000,1474.000000,343.000000,782.000000,331.000000,3.418700,161700.000000 +-122.020000,37.970000,36.000000,2342.000000,436.000000,1191.000000,416.000000,4.000000,171000.000000 +-122.020000,37.950000,25.000000,1205.000000,260.000000,608.000000,272.000000,2.451900,208300.000000 +-122.020000,37.950000,22.000000,3526.000000,510.000000,1660.000000,508.000000,5.664200,237000.000000 +-122.020000,37.940000,23.000000,3516.000000,661.000000,1465.000000,623.000000,4.256900,213100.000000 +-122.020000,37.940000,19.000000,3192.000000,612.000000,1317.000000,594.000000,4.125000,267100.000000 +-122.020000,37.920000,26.000000,5077.000000,640.000000,1872.000000,636.000000,7.471300,351200.000000 +-122.020000,37.890000,29.000000,6349.000000,858.000000,2450.000000,778.000000,7.500000,356200.000000 +-122.020000,37.880000,16.000000,3031.000000,438.000000,1087.000000,421.000000,7.373200,287300.000000 +-122.020000,37.870000,14.000000,3056.000000,369.000000,1209.000000,377.000000,8.435200,441400.000000 +-122.020000,37.840000,34.000000,1879.000000,265.000000,729.000000,263.000000,7.707200,443800.000000 +-122.020000,37.800000,11.000000,6200.000000,907.000000,2286.000000,896.000000,7.651800,359300.000000 +-122.020000,37.630000,6.000000,2445.000000,590.000000,1189.000000,573.000000,3.895800,301100.000000 +-122.020000,37.600000,36.000000,1633.000000,345.000000,1382.000000,338.000000,3.694000,159600.000000 +-122.020000,37.600000,32.000000,1295.000000,280.000000,1156.000000,300.000000,3.500000,154300.000000 +-122.020000,37.600000,31.000000,2155.000000,522.000000,1858.000000,437.000000,2.652000,159800.000000 +-122.020000,37.580000,15.000000,3052.000000,760.000000,2097.000000,728.000000,3.361700,178100.000000 +-122.020000,37.560000,35.000000,1716.000000,312.000000,914.000000,316.000000,5.573700,214500.000000 +-122.020000,37.560000,23.000000,4332.000000,857.000000,2461.000000,829.000000,4.359400,223400.000000 +-122.020000,37.400000,33.000000,2015.000000,484.000000,1285.000000,419.000000,4.065500,226800.000000 +-122.020000,37.390000,35.000000,2297.000000,497.000000,1428.000000,497.000000,4.743100,239700.000000 +-122.020000,37.380000,43.000000,1261.000000,317.000000,836.000000,333.000000,4.091100,224600.000000 +-122.020000,37.380000,32.000000,1889.000000,487.000000,1321.000000,508.000000,3.257400,254400.000000 +-122.020000,37.370000,8.000000,5686.000000,1489.000000,3250.000000,1329.000000,4.278200,327700.000000 +-122.020000,37.360000,25.000000,2074.000000,387.000000,1273.000000,383.000000,4.760900,378000.000000 +-122.020000,37.360000,24.000000,1709.000000,437.000000,892.000000,408.000000,4.967100,335200.000000 +-122.020000,37.360000,21.000000,2471.000000,677.000000,1486.000000,689.000000,3.903800,243800.000000 +-122.020000,37.350000,26.000000,2785.000000,418.000000,1221.000000,422.000000,8.107800,365700.000000 +-122.020000,37.350000,22.000000,3219.000000,756.000000,1479.000000,667.000000,4.147300,354400.000000 +-122.020000,37.350000,18.000000,1221.000000,255.000000,507.000000,271.000000,5.367900,228400.000000 +-122.020000,37.340000,30.000000,1036.000000,151.000000,467.000000,156.000000,6.448000,360600.000000 +-122.020000,37.340000,28.000000,2488.000000,396.000000,1190.000000,410.000000,5.788100,344700.000000 +-122.020000,37.340000,26.000000,1992.000000,328.000000,980.000000,342.000000,6.247500,350000.000000 +-122.020000,37.310000,35.000000,2355.000000,384.000000,1248.000000,378.000000,5.971400,332500.000000 +-122.020000,37.310000,33.000000,2563.000000,434.000000,1230.000000,418.000000,6.319700,340100.000000 +-122.020000,37.300000,32.000000,2134.000000,328.000000,903.000000,322.000000,6.359000,341900.000000 +-122.020000,37.300000,26.000000,1983.000000,301.000000,924.000000,297.000000,6.712300,354600.000000 +-122.020000,37.290000,25.000000,3845.000000,492.000000,1461.000000,475.000000,10.397900,500001.000000 +-122.020000,37.290000,18.000000,2550.000000,312.000000,999.000000,320.000000,8.793900,500001.000000 +-122.020000,37.280000,25.000000,3437.000000,428.000000,1198.000000,411.000000,9.346400,500001.000000 +-122.020000,37.260000,34.000000,1764.000000,243.000000,692.000000,223.000000,8.033100,500001.000000 +-122.020000,37.260000,24.000000,2411.000000,299.000000,847.000000,299.000000,10.266600,500001.000000 +-122.020000,37.240000,28.000000,2796.000000,365.000000,1085.000000,363.000000,10.683400,500001.000000 +-122.020000,37.110000,36.000000,2066.000000,401.000000,942.000000,344.000000,5.241700,196400.000000 +-122.020000,37.090000,35.000000,1818.000000,368.000000,682.000000,254.000000,4.861100,240000.000000 +-122.020000,36.990000,30.000000,2156.000000,487.000000,1023.000000,458.000000,2.787500,245000.000000 +-122.020000,36.980000,44.000000,1153.000000,238.000000,657.000000,219.000000,3.236800,212500.000000 +-122.020000,36.980000,35.000000,1053.000000,263.000000,552.000000,237.000000,2.712500,217500.000000 +-122.020000,36.980000,21.000000,607.000000,155.000000,226.000000,136.000000,1.906300,166700.000000 +-122.020000,36.970000,29.000000,2568.000000,747.000000,1743.000000,659.000000,1.928600,195300.000000 +-122.020000,36.960000,52.000000,775.000000,305.000000,1054.000000,305.000000,2.017200,112500.000000 +-122.030000,38.690000,23.000000,1796.000000,380.000000,939.000000,330.000000,2.795500,96300.000000 +-122.030000,38.300000,5.000000,1569.000000,199.000000,713.000000,209.000000,6.677900,223900.000000 +-122.030000,38.280000,15.000000,5114.000000,833.000000,2418.000000,778.000000,4.488200,144000.000000 +-122.030000,38.270000,24.000000,3580.000000,735.000000,1959.000000,731.000000,2.728400,118500.000000 +-122.030000,38.260000,25.000000,4617.000000,1046.000000,2685.000000,1011.000000,2.957600,108500.000000 +-122.030000,38.250000,35.000000,1940.000000,384.000000,1177.000000,403.000000,3.138900,101100.000000 +-122.030000,38.250000,13.000000,3334.000000,541.000000,1923.000000,538.000000,4.090500,134800.000000 +-122.030000,38.240000,16.000000,1104.000000,164.000000,495.000000,156.000000,5.407400,157700.000000 +-122.030000,38.000000,25.000000,3577.000000,581.000000,1753.000000,593.000000,5.729500,178300.000000 +-122.030000,37.990000,37.000000,1755.000000,327.000000,882.000000,350.000000,4.590000,166600.000000 +-122.030000,37.990000,35.000000,3103.000000,537.000000,1614.000000,566.000000,4.902200,169300.000000 +-122.030000,37.980000,45.000000,2842.000000,567.000000,1261.000000,535.000000,3.604200,138200.000000 +-122.030000,37.980000,44.000000,1254.000000,252.000000,498.000000,217.000000,3.453100,148900.000000 +-122.030000,37.980000,16.000000,1209.000000,477.000000,627.000000,482.000000,1.389400,156300.000000 +-122.030000,37.970000,45.000000,1613.000000,338.000000,865.000000,336.000000,3.250000,151100.000000 +-122.030000,37.970000,20.000000,3968.000000,931.000000,2629.000000,903.000000,2.991500,166700.000000 +-122.030000,37.960000,20.000000,2636.000000,691.000000,1142.000000,627.000000,2.108300,162500.000000 +-122.030000,37.950000,32.000000,1955.000000,313.000000,804.000000,317.000000,4.948500,202300.000000 +-122.030000,37.950000,14.000000,3287.000000,793.000000,1601.000000,716.000000,3.171900,220500.000000 +-122.030000,37.940000,21.000000,5541.000000,776.000000,2214.000000,737.000000,5.577700,279300.000000 +-122.030000,37.930000,21.000000,4712.000000,624.000000,1773.000000,615.000000,6.091800,344800.000000 +-122.030000,37.920000,23.000000,3318.000000,408.000000,1124.000000,393.000000,6.584700,358800.000000 +-122.030000,37.870000,21.000000,3521.000000,447.000000,1396.000000,467.000000,8.267300,358700.000000 +-122.030000,37.860000,29.000000,3025.000000,477.000000,1035.000000,452.000000,6.112000,390600.000000 +-122.030000,37.860000,25.000000,3004.000000,393.000000,1145.000000,376.000000,7.265500,494000.000000 +-122.030000,37.690000,20.000000,200.000000,25.000000,83.000000,31.000000,6.500000,340000.000000 +-122.030000,37.620000,35.000000,2072.000000,352.000000,1001.000000,350.000000,4.710900,198700.000000 +-122.030000,37.620000,35.000000,1298.000000,236.000000,632.000000,204.000000,3.892900,209500.000000 +-122.030000,37.620000,32.000000,2964.000000,547.000000,1472.000000,527.000000,4.246800,221200.000000 +-122.030000,37.610000,37.000000,1383.000000,259.000000,808.000000,241.000000,4.012500,161400.000000 +-122.030000,37.610000,36.000000,1409.000000,271.000000,1002.000000,281.000000,3.726200,164900.000000 +-122.030000,37.590000,16.000000,4371.000000,889.000000,2530.000000,817.000000,4.678600,256000.000000 +-122.030000,37.560000,31.000000,4981.000000,964.000000,2841.000000,924.000000,4.896200,220200.000000 +-122.030000,37.560000,24.000000,8444.000000,1492.000000,4446.000000,1491.000000,4.697800,240300.000000 +-122.030000,37.550000,26.000000,3087.000000,532.000000,1597.000000,483.000000,4.911800,217300.000000 +-122.030000,37.550000,22.000000,9167.000000,1373.000000,4319.000000,1404.000000,6.992000,284800.000000 +-122.030000,37.540000,6.000000,2918.000000,672.000000,1911.000000,639.000000,4.140600,178200.000000 +-122.030000,37.540000,35.000000,1867.000000,343.000000,1213.000000,338.000000,4.821400,186000.000000 +-122.030000,37.540000,16.000000,4458.000000,856.000000,3038.000000,870.000000,5.073900,208000.000000 +-122.030000,37.530000,18.000000,1746.000000,437.000000,1268.000000,404.000000,3.256000,183300.000000 +-122.030000,37.390000,34.000000,2600.000000,650.000000,1994.000000,650.000000,4.022300,250200.000000 +-122.030000,37.380000,21.000000,2667.000000,798.000000,1433.000000,727.000000,3.873200,252400.000000 +-122.030000,37.370000,9.000000,2966.000000,770.000000,1430.000000,740.000000,3.004700,256000.000000 +-122.030000,37.370000,41.000000,2123.000000,425.000000,1032.000000,435.000000,4.695700,284800.000000 +-122.030000,37.370000,30.000000,1269.000000,290.000000,556.000000,266.000000,3.812500,325000.000000 +-122.030000,37.370000,16.000000,3402.000000,1193.000000,1479.000000,1043.000000,3.586100,500001.000000 +-122.030000,37.360000,28.000000,2490.000000,345.000000,948.000000,361.000000,6.491300,411900.000000 +-122.030000,37.360000,16.000000,2697.000000,803.000000,1369.000000,723.000000,4.469900,367400.000000 +-122.030000,37.350000,25.000000,3095.000000,514.000000,1251.000000,507.000000,5.538800,352100.000000 +-122.030000,37.350000,19.000000,3811.000000,1227.000000,1930.000000,1153.000000,3.515400,311400.000000 +-122.030000,37.340000,25.000000,5404.000000,906.000000,2338.000000,883.000000,6.057700,451800.000000 +-122.030000,37.340000,17.000000,1165.000000,278.000000,598.000000,287.000000,4.012900,342400.000000 +-122.030000,37.340000,16.000000,1755.000000,410.000000,674.000000,410.000000,5.160200,231200.000000 +-122.030000,37.320000,15.000000,5132.000000,1059.000000,2156.000000,982.000000,5.651100,404800.000000 +-122.030000,37.310000,25.000000,2131.000000,410.000000,1132.000000,395.000000,5.350800,409100.000000 +-122.030000,37.300000,30.000000,3007.000000,554.000000,1551.000000,616.000000,5.852100,326300.000000 +-122.030000,37.300000,22.000000,3583.000000,758.000000,1792.000000,695.000000,5.484200,335300.000000 +-122.030000,37.290000,22.000000,3118.000000,438.000000,1147.000000,425.000000,10.365300,500001.000000 +-122.030000,37.280000,29.000000,3752.000000,468.000000,1320.000000,471.000000,9.893700,500001.000000 +-122.030000,37.270000,32.000000,4350.000000,645.000000,1551.000000,609.000000,7.827900,500001.000000 +-122.030000,37.250000,34.000000,2892.000000,413.000000,903.000000,365.000000,7.871100,500001.000000 +-122.030000,37.050000,12.000000,2010.000000,422.000000,784.000000,407.000000,3.972800,190900.000000 +-122.030000,37.030000,21.000000,4650.000000,733.000000,2014.000000,704.000000,5.623300,322000.000000 +-122.030000,37.000000,30.000000,2077.000000,342.000000,816.000000,328.000000,5.207800,440500.000000 +-122.030000,36.980000,37.000000,2817.000000,716.000000,1341.000000,662.000000,2.155300,255400.000000 +-122.030000,36.970000,52.000000,403.000000,72.000000,200.000000,73.000000,1.692300,262500.000000 +-122.030000,36.970000,51.000000,924.000000,232.000000,488.000000,228.000000,2.196400,234400.000000 +-122.030000,36.970000,36.000000,337.000000,69.000000,223.000000,68.000000,3.240400,225000.000000 +-122.030000,36.960000,28.000000,1607.000000,421.000000,926.000000,385.000000,2.425000,216100.000000 +-122.030000,36.960000,18.000000,2677.000000,785.000000,1391.000000,656.000000,2.506700,232600.000000 +-122.040000,39.720000,23.000000,2502.000000,481.000000,1443.000000,455.000000,2.562500,70000.000000 +-122.040000,39.220000,27.000000,1446.000000,295.000000,670.000000,281.000000,3.262500,92800.000000 +-122.040000,38.680000,26.000000,1113.000000,222.000000,689.000000,234.000000,3.048600,83600.000000 +-122.040000,38.280000,25.000000,3304.000000,493.000000,1464.000000,488.000000,5.252700,130600.000000 +-122.040000,38.270000,16.000000,8517.000000,1910.000000,4508.000000,1837.000000,3.185300,129600.000000 +-122.040000,38.260000,34.000000,3082.000000,702.000000,1795.000000,703.000000,2.788500,105900.000000 +-122.040000,38.250000,52.000000,582.000000,131.000000,241.000000,106.000000,2.400000,125000.000000 +-122.040000,38.250000,38.000000,1214.000000,244.000000,632.000000,254.000000,2.843800,94200.000000 +-122.040000,38.250000,37.000000,1176.000000,291.000000,648.000000,271.000000,2.716700,92200.000000 +-122.040000,38.250000,32.000000,1203.000000,287.000000,571.000000,255.000000,3.093800,110400.000000 +-122.040000,38.240000,30.000000,2081.000000,456.000000,1005.000000,438.000000,1.995400,92900.000000 +-122.040000,38.240000,22.000000,2761.000000,757.000000,2612.000000,641.000000,1.687500,87500.000000 +-122.040000,38.000000,16.000000,3077.000000,733.000000,1447.000000,709.000000,3.248400,91100.000000 +-122.040000,37.990000,38.000000,2675.000000,541.000000,1378.000000,480.000000,3.889700,139900.000000 +-122.040000,37.990000,36.000000,2765.000000,495.000000,1478.000000,441.000000,4.125000,136200.000000 +-122.040000,37.990000,32.000000,1504.000000,279.000000,749.000000,267.000000,3.200000,134500.000000 +-122.040000,37.970000,26.000000,2470.000000,626.000000,1174.000000,573.000000,2.986100,160900.000000 +-122.040000,37.960000,27.000000,2587.000000,729.000000,1500.000000,623.000000,1.837000,175000.000000 +-122.040000,37.960000,20.000000,1143.000000,346.000000,578.000000,298.000000,2.241100,151800.000000 +-122.040000,37.960000,16.000000,2913.000000,723.000000,1705.000000,693.000000,2.909700,106300.000000 +-122.040000,37.950000,33.000000,1653.000000,334.000000,814.000000,328.000000,3.140600,163100.000000 +-122.040000,37.950000,29.000000,866.000000,138.000000,341.000000,133.000000,4.718800,197100.000000 +-122.040000,37.940000,24.000000,5732.000000,873.000000,2444.000000,888.000000,5.629200,231400.000000 +-122.040000,37.900000,20.000000,5467.000000,1044.000000,2310.000000,963.000000,5.698600,275800.000000 +-122.040000,37.890000,33.000000,2423.000000,322.000000,998.000000,346.000000,7.534900,349100.000000 +-122.040000,37.670000,29.000000,1694.000000,251.000000,690.000000,242.000000,6.050100,254200.000000 +-122.040000,37.670000,18.000000,3000.000000,419.000000,1155.000000,415.000000,6.823300,332600.000000 +-122.040000,37.660000,23.000000,2419.000000,348.000000,1066.000000,384.000000,6.350100,350000.000000 +-122.040000,37.660000,10.000000,2031.000000,357.000000,867.000000,352.000000,5.316900,299200.000000 +-122.040000,37.630000,33.000000,952.000000,172.000000,369.000000,159.000000,3.233100,226700.000000 +-122.040000,37.630000,21.000000,1307.000000,236.000000,586.000000,249.000000,4.781300,241900.000000 +-122.040000,37.620000,35.000000,899.000000,179.000000,455.000000,185.000000,4.285700,190400.000000 +-122.040000,37.620000,35.000000,657.000000,118.000000,328.000000,134.000000,3.812500,204200.000000 +-122.040000,37.620000,32.000000,1540.000000,324.000000,793.000000,302.000000,3.285700,193200.000000 +-122.040000,37.600000,17.000000,3314.000000,638.000000,1873.000000,602.000000,4.387500,238500.000000 +-122.040000,37.590000,14.000000,1727.000000,302.000000,1116.000000,273.000000,5.342800,243600.000000 +-122.040000,37.580000,14.000000,14917.000000,2708.000000,8012.000000,2606.000000,5.627700,269800.000000 +-122.040000,37.550000,23.000000,3170.000000,532.000000,1446.000000,515.000000,4.435700,291700.000000 +-122.040000,37.540000,26.000000,2145.000000,369.000000,1285.000000,377.000000,4.946400,223800.000000 +-122.040000,37.530000,34.000000,2316.000000,478.000000,1524.000000,467.000000,3.736400,190400.000000 +-122.040000,37.530000,25.000000,4458.000000,922.000000,2998.000000,890.000000,3.966700,218500.000000 +-122.040000,37.500000,17.000000,407.000000,97.000000,307.000000,100.000000,3.169600,156300.000000 +-122.040000,37.380000,38.000000,2850.000000,550.000000,1518.000000,514.000000,4.202800,273600.000000 +-122.040000,37.370000,42.000000,1125.000000,273.000000,616.000000,258.000000,3.676500,252800.000000 +-122.040000,37.370000,33.000000,2757.000000,489.000000,1201.000000,481.000000,5.045300,311600.000000 +-122.040000,37.370000,23.000000,5135.000000,911.000000,2351.000000,863.000000,5.231900,430100.000000 +-122.040000,37.350000,28.000000,3250.000000,485.000000,1328.000000,473.000000,7.472900,431600.000000 +-122.040000,37.350000,28.000000,1582.000000,264.000000,696.000000,270.000000,5.678000,370100.000000 +-122.040000,37.340000,28.000000,3081.000000,460.000000,1260.000000,461.000000,7.537200,432600.000000 +-122.040000,37.340000,25.000000,1994.000000,287.000000,704.000000,283.000000,7.779900,447300.000000 +-122.040000,37.340000,23.000000,2590.000000,725.000000,1795.000000,680.000000,3.160000,225000.000000 +-122.040000,37.340000,20.000000,4475.000000,1048.000000,2271.000000,1021.000000,4.883600,396200.000000 +-122.040000,37.340000,19.000000,3694.000000,1036.000000,2496.000000,986.000000,3.699100,271500.000000 +-122.040000,37.330000,26.000000,2690.000000,401.000000,1264.000000,429.000000,7.764300,474700.000000 +-122.040000,37.320000,27.000000,2826.000000,451.000000,1259.000000,439.000000,5.752800,431400.000000 +-122.040000,37.310000,29.000000,2476.000000,434.000000,1217.000000,416.000000,6.204500,393800.000000 +-122.040000,37.310000,24.000000,3388.000000,633.000000,1627.000000,585.000000,5.154000,355100.000000 +-122.040000,37.300000,25.000000,3807.000000,600.000000,1678.000000,600.000000,6.681800,411300.000000 +-122.040000,37.300000,25.000000,2366.000000,417.000000,1076.000000,398.000000,6.923800,345900.000000 +-122.040000,37.290000,19.000000,3625.000000,432.000000,1252.000000,409.000000,12.214500,500001.000000 +-122.040000,37.260000,24.000000,4973.000000,709.000000,1692.000000,696.000000,7.862700,500001.000000 +-122.040000,37.240000,24.000000,1521.000000,209.000000,539.000000,192.000000,11.155700,500001.000000 +-122.040000,37.080000,20.000000,467.000000,95.000000,229.000000,86.000000,4.800000,261500.000000 +-122.040000,37.040000,17.000000,4977.000000,994.000000,1987.000000,947.000000,3.885400,312300.000000 +-122.040000,36.980000,33.000000,797.000000,125.000000,385.000000,133.000000,6.797400,367600.000000 +-122.040000,36.970000,52.000000,1901.000000,335.000000,955.000000,301.000000,3.825900,253100.000000 +-122.040000,36.970000,49.000000,792.000000,136.000000,331.000000,137.000000,5.212800,238600.000000 +-122.040000,36.970000,45.000000,1302.000000,245.000000,621.000000,258.000000,5.180600,266400.000000 +-122.040000,36.970000,40.000000,1193.000000,227.000000,570.000000,204.000000,4.465900,237500.000000 +-122.040000,36.970000,30.000000,2695.000000,424.000000,1098.000000,420.000000,5.397200,362300.000000 +-122.040000,36.960000,44.000000,1294.000000,269.000000,645.000000,259.000000,3.243700,223900.000000 +-122.040000,36.960000,42.000000,538.000000,107.000000,200.000000,104.000000,2.166700,196400.000000 +-122.040000,36.960000,42.000000,1149.000000,264.000000,703.000000,232.000000,2.586500,206400.000000 +-122.040000,36.960000,32.000000,1438.000000,306.000000,802.000000,293.000000,4.196400,202000.000000 +-122.040000,36.950000,36.000000,1862.000000,364.000000,1080.000000,364.000000,4.456700,263800.000000 +-122.040000,36.950000,27.000000,1987.000000,374.000000,961.000000,343.000000,3.966700,265800.000000 +-122.050000,39.340000,44.000000,1064.000000,230.000000,494.000000,175.000000,2.875000,61500.000000 +-122.050000,38.560000,20.000000,1005.000000,168.000000,457.000000,157.000000,5.679000,225000.000000 +-122.050000,38.260000,32.000000,1070.000000,199.000000,631.000000,195.000000,2.682700,98900.000000 +-122.050000,38.250000,39.000000,199.000000,36.000000,101.000000,38.000000,6.229900,105400.000000 +-122.050000,38.250000,37.000000,1336.000000,251.000000,680.000000,231.000000,3.815000,99000.000000 +-122.050000,38.000000,36.000000,2476.000000,472.000000,1213.000000,393.000000,3.733300,136400.000000 +-122.050000,38.000000,16.000000,1085.000000,217.000000,356.000000,232.000000,2.346200,75000.000000 +-122.050000,37.970000,16.000000,60.000000,10.000000,65.000000,19.000000,6.135900,250000.000000 +-122.050000,37.960000,35.000000,2190.000000,384.000000,1154.000000,401.000000,3.845600,159800.000000 +-122.050000,37.950000,34.000000,1408.000000,277.000000,738.000000,269.000000,4.175000,169400.000000 +-122.050000,37.950000,27.000000,3513.000000,791.000000,1875.000000,694.000000,3.183800,182000.000000 +-122.050000,37.950000,22.000000,5175.000000,1213.000000,2804.000000,1091.000000,2.850000,144600.000000 +-122.050000,37.950000,20.000000,563.000000,107.000000,246.000000,123.000000,5.448200,190800.000000 +-122.050000,37.940000,22.000000,4162.000000,1194.000000,1804.000000,1185.000000,2.545900,179300.000000 +-122.050000,37.940000,22.000000,2105.000000,354.000000,993.000000,365.000000,4.660200,227800.000000 +-122.050000,37.930000,5.000000,4274.000000,1153.000000,1503.000000,881.000000,4.047300,266500.000000 +-122.050000,37.920000,14.000000,12713.000000,2558.000000,4741.000000,2412.000000,4.709400,234700.000000 +-122.050000,37.900000,32.000000,4498.000000,862.000000,1818.000000,851.000000,4.808800,321200.000000 +-122.050000,37.900000,32.000000,2676.000000,484.000000,986.000000,473.000000,4.652800,335700.000000 +-122.050000,37.900000,24.000000,4125.000000,1020.000000,1699.000000,873.000000,2.952600,271000.000000 +-122.050000,37.890000,37.000000,1677.000000,269.000000,689.000000,283.000000,4.262500,310600.000000 +-122.050000,37.870000,30.000000,2296.000000,329.000000,847.000000,322.000000,6.719200,397500.000000 +-122.050000,37.680000,32.000000,2015.000000,318.000000,1019.000000,340.000000,6.110400,240700.000000 +-122.050000,37.680000,23.000000,7518.000000,1279.000000,3827.000000,1294.000000,5.170100,216800.000000 +-122.050000,37.610000,16.000000,1642.000000,346.000000,705.000000,351.000000,2.897100,163900.000000 +-122.050000,37.590000,15.000000,6243.000000,1273.000000,3163.000000,1274.000000,3.746200,212500.000000 +-122.050000,37.570000,7.000000,10648.000000,1818.000000,6075.000000,1797.000000,6.104700,278200.000000 +-122.050000,37.550000,23.000000,4247.000000,835.000000,2357.000000,823.000000,5.132100,211300.000000 +-122.050000,37.540000,25.000000,4209.000000,731.000000,2568.000000,703.000000,5.288200,223100.000000 +-122.050000,37.390000,25.000000,347.000000,82.000000,148.000000,77.000000,4.453100,350000.000000 +-122.050000,37.380000,29.000000,1875.000000,340.000000,816.000000,350.000000,5.435100,336500.000000 +-122.050000,37.380000,23.000000,3200.000000,907.000000,2029.000000,866.000000,3.564900,450000.000000 +-122.050000,37.370000,35.000000,1365.000000,256.000000,662.000000,262.000000,5.653300,291400.000000 +-122.050000,37.370000,27.000000,2687.000000,768.000000,1362.000000,725.000000,3.402800,324200.000000 +-122.050000,37.360000,29.000000,1733.000000,255.000000,679.000000,278.000000,7.533700,406800.000000 +-122.050000,37.360000,27.000000,2621.000000,513.000000,1063.000000,523.000000,3.984800,409700.000000 +-122.050000,37.350000,34.000000,2494.000000,375.000000,1399.000000,382.000000,7.375300,388100.000000 +-122.050000,37.340000,34.000000,2515.000000,401.000000,1079.000000,399.000000,7.786500,423900.000000 +-122.050000,37.340000,31.000000,1443.000000,215.000000,627.000000,222.000000,6.608700,416500.000000 +-122.050000,37.330000,21.000000,2052.000000,346.000000,933.000000,351.000000,5.316700,416300.000000 +-122.050000,37.330000,17.000000,3674.000000,824.000000,1364.000000,694.000000,6.313100,436400.000000 +-122.050000,37.310000,25.000000,4111.000000,538.000000,1585.000000,568.000000,9.229800,500001.000000 +-122.050000,37.110000,39.000000,1065.000000,248.000000,497.000000,208.000000,4.597200,146300.000000 +-122.050000,36.970000,20.000000,2428.000000,473.000000,1145.000000,454.000000,3.679700,263800.000000 +-122.050000,36.970000,16.000000,3363.000000,611.000000,1603.000000,556.000000,4.254200,294100.000000 +-122.050000,36.960000,30.000000,971.000000,185.000000,644.000000,173.000000,4.204500,226500.000000 +-122.050000,36.870000,18.000000,2232.000000,440.000000,1091.000000,458.000000,3.826900,276000.000000 +-122.060000,40.550000,17.000000,3057.000000,577.000000,1497.000000,556.000000,3.518900,101000.000000 +-122.060000,40.020000,32.000000,1435.000000,277.000000,690.000000,254.000000,2.304300,68400.000000 +-122.060000,38.270000,14.000000,6920.000000,996.000000,3196.000000,978.000000,5.067200,171300.000000 +-122.060000,38.260000,36.000000,1248.000000,221.000000,672.000000,222.000000,3.383900,105900.000000 +-122.060000,38.250000,36.000000,1818.000000,323.000000,953.000000,298.000000,3.315300,99000.000000 +-122.060000,38.250000,34.000000,1562.000000,289.000000,898.000000,307.000000,3.359800,107200.000000 +-122.060000,37.990000,17.000000,1319.000000,316.000000,384.000000,269.000000,1.822900,137500.000000 +-122.060000,37.990000,16.000000,2445.000000,469.000000,721.000000,474.000000,2.804300,87500.000000 +-122.060000,37.960000,37.000000,1784.000000,313.000000,788.000000,304.000000,4.291700,189600.000000 +-122.060000,37.960000,10.000000,7136.000000,1691.000000,2959.000000,1507.000000,3.981600,182000.000000 +-122.060000,37.950000,36.000000,2213.000000,386.000000,950.000000,370.000000,4.738600,186400.000000 +-122.060000,37.940000,19.000000,4005.000000,972.000000,1896.000000,893.000000,2.526800,235700.000000 +-122.060000,37.910000,15.000000,5393.000000,1422.000000,2133.000000,1288.000000,4.161200,232800.000000 +-122.060000,37.900000,25.000000,5869.000000,1685.000000,2669.000000,1554.000000,2.699800,216100.000000 +-122.060000,37.890000,21.000000,4985.000000,1590.000000,2575.000000,1458.000000,3.100200,114300.000000 +-122.060000,37.880000,34.000000,4781.000000,703.000000,1879.000000,714.000000,6.537800,340900.000000 +-122.060000,37.850000,17.000000,7475.000000,1556.000000,2092.000000,1449.000000,3.643700,186500.000000 +-122.060000,37.770000,12.000000,14316.000000,2045.000000,5781.000000,2007.000000,7.263400,341600.000000 +-122.060000,37.730000,5.000000,3596.000000,467.000000,1738.000000,512.000000,7.056800,412500.000000 +-122.060000,37.700000,37.000000,1893.000000,310.000000,821.000000,315.000000,4.600500,231600.000000 +-122.060000,37.670000,22.000000,3882.000000,816.000000,1830.000000,743.000000,4.273300,180700.000000 +-122.060000,37.650000,33.000000,1227.000000,286.000000,848.000000,291.000000,3.803600,158200.000000 +-122.060000,37.640000,37.000000,1468.000000,304.000000,1038.000000,282.000000,4.165200,158200.000000 +-122.060000,37.640000,33.000000,1160.000000,252.000000,729.000000,220.000000,3.825900,146100.000000 +-122.060000,37.640000,20.000000,1655.000000,450.000000,857.000000,430.000000,3.554100,350000.000000 +-122.060000,37.630000,23.000000,1939.000000,356.000000,841.000000,364.000000,3.361100,169200.000000 +-122.060000,37.630000,12.000000,6711.000000,1374.000000,3388.000000,1289.000000,3.862500,208900.000000 +-122.060000,37.600000,22.000000,3009.000000,497.000000,1640.000000,514.000000,4.625000,235300.000000 +-122.060000,37.600000,18.000000,1726.000000,276.000000,1186.000000,310.000000,5.322600,231700.000000 +-122.060000,37.600000,17.000000,5159.000000,832.000000,3174.000000,817.000000,5.870400,234400.000000 +-122.060000,37.580000,15.000000,8112.000000,1376.000000,4576.000000,1348.000000,5.675800,253400.000000 +-122.060000,37.390000,26.000000,18.000000,4.000000,8.000000,4.000000,3.750000,375000.000000 +-122.060000,37.380000,21.000000,1798.000000,399.000000,837.000000,410.000000,5.699900,470000.000000 +-122.060000,37.380000,20.000000,3401.000000,768.000000,1497.000000,747.000000,4.218800,500001.000000 +-122.060000,37.370000,32.000000,2510.000000,578.000000,1160.000000,581.000000,4.908700,322700.000000 +-122.060000,37.370000,18.000000,3058.000000,661.000000,1377.000000,675.000000,6.129900,500001.000000 +-122.060000,37.360000,35.000000,2693.000000,493.000000,1343.000000,455.000000,6.077700,327500.000000 +-122.060000,37.360000,34.000000,1747.000000,250.000000,662.000000,257.000000,6.826800,500001.000000 +-122.060000,37.350000,31.000000,1795.000000,281.000000,872.000000,282.000000,8.059900,381800.000000 +-122.060000,37.350000,30.000000,2040.000000,294.000000,787.000000,278.000000,8.758000,500001.000000 +-122.060000,37.340000,20.000000,3435.000000,593.000000,1293.000000,553.000000,6.757800,451400.000000 +-122.060000,37.340000,13.000000,2057.000000,466.000000,790.000000,436.000000,5.008100,288300.000000 +-122.060000,37.330000,29.000000,1945.000000,269.000000,826.000000,275.000000,8.248000,498800.000000 +-122.060000,37.320000,30.000000,3033.000000,540.000000,1440.000000,507.000000,6.218200,380800.000000 +-122.060000,37.300000,11.000000,5488.000000,706.000000,1947.000000,641.000000,10.732600,500001.000000 +-122.060000,37.000000,14.000000,1547.000000,374.000000,4731.000000,348.000000,2.473200,131300.000000 +-122.060000,36.960000,52.000000,65.000000,17.000000,24.000000,10.000000,4.500000,258300.000000 +-122.070000,40.950000,14.000000,2721.000000,627.000000,1356.000000,468.000000,3.029900,73200.000000 +-122.070000,38.410000,17.000000,3053.000000,595.000000,1434.000000,557.000000,3.474100,245800.000000 +-122.070000,38.270000,8.000000,6761.000000,1234.000000,3237.000000,1177.000000,4.358600,173400.000000 +-122.070000,38.260000,15.000000,1173.000000,146.000000,450.000000,154.000000,6.048700,197700.000000 +-122.070000,38.240000,15.000000,7937.000000,1635.000000,4390.000000,1567.000000,3.546400,129800.000000 +-122.070000,37.990000,28.000000,3310.000000,574.000000,1811.000000,597.000000,4.540100,166900.000000 +-122.070000,37.980000,12.000000,6915.000000,1639.000000,2940.000000,1468.000000,4.015400,186100.000000 +-122.070000,37.970000,20.000000,1705.000000,353.000000,856.000000,341.000000,3.726200,211800.000000 +-122.070000,37.960000,34.000000,1692.000000,290.000000,836.000000,289.000000,5.017200,197100.000000 +-122.070000,37.950000,39.000000,2199.000000,388.000000,1025.000000,385.000000,4.589300,190000.000000 +-122.070000,37.940000,43.000000,1454.000000,234.000000,683.000000,258.000000,4.475000,265700.000000 +-122.070000,37.940000,36.000000,2639.000000,488.000000,1111.000000,476.000000,3.505700,205100.000000 +-122.070000,37.940000,30.000000,1260.000000,276.000000,707.000000,221.000000,2.892000,220800.000000 +-122.070000,37.930000,45.000000,1544.000000,244.000000,614.000000,238.000000,5.025500,226000.000000 +-122.070000,37.930000,25.000000,7201.000000,1521.000000,3264.000000,1433.000000,3.743300,252100.000000 +-122.070000,37.920000,26.000000,3872.000000,739.000000,1629.000000,684.000000,4.431200,225000.000000 +-122.070000,37.910000,33.000000,1550.000000,277.000000,638.000000,254.000000,3.683300,292500.000000 +-122.070000,37.910000,28.000000,1731.000000,295.000000,810.000000,295.000000,5.039100,259800.000000 +-122.070000,37.890000,38.000000,757.000000,124.000000,319.000000,123.000000,5.655800,263300.000000 +-122.070000,37.890000,28.000000,3410.000000,746.000000,1428.000000,670.000000,4.386400,266800.000000 +-122.070000,37.880000,11.000000,1077.000000,318.000000,590.000000,264.000000,3.553600,387200.000000 +-122.070000,37.860000,23.000000,1025.000000,205.000000,263.000000,191.000000,3.120000,155000.000000 +-122.070000,37.860000,17.000000,1102.000000,224.000000,317.000000,208.000000,3.589300,206300.000000 +-122.070000,37.720000,26.000000,3204.000000,477.000000,1411.000000,484.000000,5.483400,295200.000000 +-122.070000,37.710000,40.000000,1808.000000,302.000000,746.000000,270.000000,5.301500,254900.000000 +-122.070000,37.710000,36.000000,2879.000000,480.000000,1235.000000,455.000000,4.980100,241500.000000 +-122.070000,37.700000,39.000000,1420.000000,272.000000,645.000000,277.000000,4.125000,232500.000000 +-122.070000,37.700000,32.000000,3400.000000,736.000000,1487.000000,694.000000,3.000000,223200.000000 +-122.070000,37.690000,31.000000,5914.000000,1309.000000,2999.000000,1295.000000,3.096400,190500.000000 +-122.070000,37.690000,29.000000,2304.000000,618.000000,1021.000000,552.000000,2.536200,203800.000000 +-122.070000,37.680000,36.000000,1815.000000,426.000000,1280.000000,431.000000,3.250000,218100.000000 +-122.070000,37.670000,38.000000,2104.000000,409.000000,1039.000000,394.000000,3.875000,165300.000000 +-122.070000,37.670000,28.000000,2932.000000,739.000000,1198.000000,624.000000,3.241700,210800.000000 +-122.070000,37.670000,27.000000,3239.000000,671.000000,1469.000000,616.000000,3.246500,230600.000000 +-122.070000,37.660000,21.000000,5031.000000,1168.000000,2461.000000,1042.000000,3.875000,179300.000000 +-122.070000,37.650000,31.000000,3300.000000,790.000000,2181.000000,740.000000,3.016000,161800.000000 +-122.070000,37.640000,25.000000,4524.000000,860.000000,2426.000000,862.000000,4.708300,190900.000000 +-122.070000,37.640000,22.000000,5861.000000,1516.000000,5436.000000,1463.000000,2.515800,134900.000000 +-122.070000,37.630000,35.000000,1931.000000,376.000000,1175.000000,337.000000,3.729200,168100.000000 +-122.070000,37.630000,27.000000,2784.000000,723.000000,2028.000000,693.000000,2.480800,157600.000000 +-122.070000,37.630000,24.000000,2329.000000,465.000000,1401.000000,453.000000,4.591300,177600.000000 +-122.070000,37.590000,13.000000,2578.000000,551.000000,1680.000000,528.000000,4.825000,222000.000000 +-122.070000,37.580000,16.000000,1893.000000,338.000000,1461.000000,344.000000,5.225000,213700.000000 +-122.070000,37.580000,16.000000,1644.000000,251.000000,1033.000000,267.000000,6.511600,244300.000000 +-122.070000,37.580000,16.000000,1606.000000,240.000000,1117.000000,268.000000,6.066100,247000.000000 +-122.070000,37.570000,8.000000,8647.000000,1407.000000,5019.000000,1379.000000,6.561500,318300.000000 +-122.070000,37.520000,3.000000,14014.000000,2861.000000,7205.000000,2753.000000,6.082400,273500.000000 +-122.070000,37.440000,21.000000,4599.000000,986.000000,2756.000000,943.000000,2.981700,225000.000000 +-122.070000,37.410000,26.000000,1184.000000,225.000000,815.000000,218.000000,5.765700,322300.000000 +-122.070000,37.400000,16.000000,3352.000000,813.000000,1440.000000,729.000000,3.735900,262500.000000 +-122.070000,37.400000,15.000000,2940.000000,910.000000,943.000000,711.000000,4.359000,192200.000000 +-122.070000,37.390000,37.000000,1169.000000,239.000000,589.000000,249.000000,5.013100,330300.000000 +-122.070000,37.390000,30.000000,1695.000000,480.000000,932.000000,447.000000,3.504500,352500.000000 +-122.070000,37.390000,19.000000,1465.000000,342.000000,646.000000,345.000000,4.712000,289300.000000 +-122.070000,37.380000,26.000000,1272.000000,306.000000,562.000000,284.000000,4.564400,280200.000000 +-122.070000,37.370000,30.000000,2937.000000,407.000000,1097.000000,407.000000,7.981300,473500.000000 +-122.070000,37.370000,22.000000,3770.000000,727.000000,1657.000000,762.000000,4.802100,457500.000000 +-122.070000,37.360000,28.000000,4612.000000,608.000000,1686.000000,567.000000,10.034600,500001.000000 +-122.070000,37.360000,21.000000,3244.000000,426.000000,1158.000000,415.000000,7.500000,500001.000000 +-122.070000,37.350000,35.000000,1579.000000,210.000000,570.000000,196.000000,8.588800,500001.000000 +-122.070000,37.350000,35.000000,1447.000000,205.000000,619.000000,206.000000,9.814400,500001.000000 +-122.070000,37.340000,35.000000,1172.000000,184.000000,512.000000,175.000000,7.356100,500001.000000 +-122.070000,37.340000,33.000000,1208.000000,198.000000,495.000000,216.000000,5.465900,500001.000000 +-122.070000,37.330000,13.000000,2173.000000,349.000000,891.000000,345.000000,8.015800,420000.000000 +-122.070000,37.310000,24.000000,4401.000000,698.000000,1818.000000,685.000000,7.298600,500001.000000 +-122.070000,37.080000,21.000000,5639.000000,894.000000,2670.000000,871.000000,6.080900,270000.000000 +-122.070000,37.060000,31.000000,1634.000000,370.000000,939.000000,332.000000,3.862500,232300.000000 +-122.080000,40.090000,19.000000,2611.000000,503.000000,1185.000000,483.000000,2.365700,94000.000000 +-122.080000,38.300000,2.000000,6718.000000,858.000000,2012.000000,654.000000,6.887200,305200.000000 +-122.080000,37.990000,19.000000,4657.000000,739.000000,1914.000000,732.000000,5.050900,199900.000000 +-122.080000,37.960000,21.000000,9135.000000,1534.000000,3748.000000,1502.000000,6.085900,266000.000000 +-122.080000,37.950000,33.000000,1043.000000,157.000000,425.000000,148.000000,4.870200,235600.000000 +-122.080000,37.950000,24.000000,3173.000000,548.000000,1351.000000,536.000000,5.067200,243000.000000 +-122.080000,37.930000,35.000000,4043.000000,689.000000,1832.000000,662.000000,5.076100,233200.000000 +-122.080000,37.920000,28.000000,2377.000000,469.000000,1068.000000,435.000000,4.456100,250000.000000 +-122.080000,37.920000,26.000000,1733.000000,265.000000,796.000000,274.000000,6.195000,264900.000000 +-122.080000,37.890000,39.000000,3018.000000,501.000000,1223.000000,489.000000,6.292400,283900.000000 +-122.080000,37.870000,26.000000,2405.000000,564.000000,680.000000,531.000000,2.489600,73400.000000 +-122.080000,37.840000,17.000000,1320.000000,159.000000,1722.000000,141.000000,11.706400,500001.000000 +-122.080000,37.820000,4.000000,2045.000000,237.000000,830.000000,252.000000,11.342100,500001.000000 +-122.080000,37.720000,31.000000,3866.000000,531.000000,1368.000000,521.000000,6.187000,340400.000000 +-122.080000,37.710000,38.000000,3716.000000,657.000000,1784.000000,652.000000,4.823700,220900.000000 +-122.080000,37.710000,38.000000,1663.000000,295.000000,781.000000,301.000000,5.051900,227000.000000 +-122.080000,37.700000,32.000000,2718.000000,447.000000,1156.000000,410.000000,5.249700,259300.000000 +-122.080000,37.700000,25.000000,3402.000000,758.000000,1645.000000,710.000000,3.493400,209900.000000 +-122.080000,37.690000,43.000000,1575.000000,324.000000,740.000000,284.000000,2.851200,181000.000000 +-122.080000,37.690000,36.000000,2350.000000,499.000000,1105.000000,467.000000,3.302100,195700.000000 +-122.080000,37.680000,37.000000,848.000000,202.000000,314.000000,205.000000,2.395800,190800.000000 +-122.080000,37.680000,26.000000,2607.000000,682.000000,1401.000000,607.000000,2.656300,184100.000000 +-122.080000,37.680000,26.000000,1167.000000,370.000000,253.000000,137.000000,2.419600,275000.000000 +-122.080000,37.680000,15.000000,3051.000000,685.000000,1479.000000,668.000000,3.529500,242200.000000 +-122.080000,37.670000,29.000000,493.000000,168.000000,233.000000,152.000000,0.963700,160000.000000 +-122.080000,37.660000,37.000000,1997.000000,436.000000,1349.000000,437.000000,2.138200,166600.000000 +-122.080000,37.650000,17.000000,5018.000000,1439.000000,3069.000000,1299.000000,2.769400,161900.000000 +-122.080000,37.640000,36.000000,786.000000,133.000000,463.000000,160.000000,3.933800,182700.000000 +-122.080000,37.640000,36.000000,1340.000000,245.000000,789.000000,248.000000,3.800000,172000.000000 +-122.080000,37.640000,36.000000,1116.000000,199.000000,662.000000,226.000000,5.730900,177900.000000 +-122.080000,37.630000,37.000000,1793.000000,364.000000,1534.000000,346.000000,3.645800,156600.000000 +-122.080000,37.630000,35.000000,517.000000,108.000000,391.000000,107.000000,4.068200,156900.000000 +-122.080000,37.630000,34.000000,1619.000000,293.000000,1148.000000,310.000000,4.032600,164700.000000 +-122.080000,37.630000,33.000000,691.000000,127.000000,431.000000,149.000000,4.250000,192600.000000 +-122.080000,37.630000,31.000000,767.000000,171.000000,548.000000,185.000000,3.761400,176000.000000 +-122.080000,37.620000,27.000000,1826.000000,309.000000,1016.000000,313.000000,5.640000,206500.000000 +-122.080000,37.620000,17.000000,2485.000000,518.000000,1139.000000,550.000000,2.687500,157300.000000 +-122.080000,37.610000,26.000000,2261.000000,443.000000,1039.000000,395.000000,3.793100,203900.000000 +-122.080000,37.600000,10.000000,3046.000000,678.000000,2056.000000,628.000000,3.902200,191700.000000 +-122.080000,37.580000,16.000000,3349.000000,544.000000,2003.000000,488.000000,6.007400,236500.000000 +-122.080000,37.580000,15.000000,2576.000000,418.000000,1657.000000,410.000000,5.521800,254400.000000 +-122.080000,37.410000,20.000000,1896.000000,456.000000,1069.000000,436.000000,4.687500,288900.000000 +-122.080000,37.400000,52.000000,766.000000,203.000000,448.000000,196.000000,2.520800,316700.000000 +-122.080000,37.400000,25.000000,1750.000000,341.000000,999.000000,319.000000,5.806000,308700.000000 +-122.080000,37.400000,19.000000,3565.000000,858.000000,1639.000000,744.000000,4.154400,277000.000000 +-122.080000,37.390000,46.000000,1115.000000,248.000000,543.000000,248.000000,3.208300,334300.000000 +-122.080000,37.390000,44.000000,1498.000000,430.000000,848.000000,400.000000,2.843800,307100.000000 +-122.080000,37.390000,4.000000,2292.000000,605.000000,1050.000000,584.000000,4.803600,340000.000000 +-122.080000,37.390000,39.000000,2210.000000,483.000000,1023.000000,450.000000,4.583300,342400.000000 +-122.080000,37.380000,36.000000,857.000000,156.000000,448.000000,168.000000,5.008600,366700.000000 +-122.080000,37.380000,36.000000,782.000000,130.000000,348.000000,128.000000,6.828000,383900.000000 +-122.080000,37.380000,36.000000,1199.000000,198.000000,485.000000,199.000000,5.079600,373400.000000 +-122.080000,37.380000,33.000000,2771.000000,659.000000,1496.000000,581.000000,3.404200,353600.000000 +-122.080000,37.380000,25.000000,830.000000,228.000000,368.000000,174.000000,3.391700,342900.000000 +-122.080000,37.370000,29.000000,1229.000000,192.000000,707.000000,194.000000,7.110800,465000.000000 +-122.080000,37.360000,31.000000,2717.000000,376.000000,1001.000000,381.000000,9.281000,500001.000000 +-122.080000,37.360000,28.000000,2181.000000,284.000000,728.000000,238.000000,8.226600,500001.000000 +-122.080000,37.350000,33.000000,2398.000000,317.000000,832.000000,314.000000,10.359100,500001.000000 +-122.080000,37.340000,28.000000,1643.000000,216.000000,594.000000,205.000000,12.367000,500001.000000 +-122.080000,37.340000,23.000000,2597.000000,335.000000,922.000000,338.000000,10.514200,500001.000000 +-122.080000,37.310000,17.000000,2560.000000,396.000000,959.000000,400.000000,7.852800,368900.000000 +-122.080000,37.300000,30.000000,2268.000000,404.000000,1197.000000,372.000000,7.081300,485300.000000 +-122.080000,37.240000,21.000000,427.000000,63.000000,182.000000,70.000000,11.328300,500001.000000 +-122.080000,37.150000,23.000000,506.000000,96.000000,264.000000,89.000000,7.136600,273900.000000 +-122.080000,37.080000,35.000000,1541.000000,297.000000,791.000000,277.000000,4.425000,204800.000000 +-122.080000,37.040000,34.000000,2800.000000,577.000000,1353.000000,512.000000,4.116100,220900.000000 +-122.080000,37.030000,36.000000,4682.000000,899.000000,2143.000000,832.000000,4.509600,203700.000000 +-122.090000,41.320000,52.000000,4019.000000,824.000000,1728.000000,706.000000,2.246200,62900.000000 +-122.090000,39.130000,28.000000,4169.000000,895.000000,2587.000000,810.000000,2.331000,65500.000000 +-122.090000,38.000000,6.000000,10191.000000,1882.000000,4377.000000,1789.000000,5.201500,204200.000000 +-122.090000,37.990000,19.000000,3073.000000,506.000000,1773.000000,493.000000,5.449600,205400.000000 +-122.090000,37.980000,14.000000,5381.000000,871.000000,2296.000000,872.000000,5.687500,211000.000000 +-122.090000,37.970000,5.000000,5303.000000,779.000000,2017.000000,727.000000,6.996100,294100.000000 +-122.090000,37.940000,29.000000,6895.000000,1022.000000,2634.000000,1022.000000,6.192200,273200.000000 +-122.090000,37.910000,18.000000,9576.000000,1455.000000,3486.000000,1380.000000,7.089500,306900.000000 +-122.090000,37.890000,35.000000,880.000000,139.000000,352.000000,132.000000,6.868600,406500.000000 +-122.090000,37.860000,27.000000,5484.000000,760.000000,2212.000000,770.000000,7.620200,402600.000000 +-122.090000,37.710000,35.000000,2663.000000,387.000000,1086.000000,367.000000,5.149800,266400.000000 +-122.090000,37.710000,31.000000,1843.000000,282.000000,749.000000,269.000000,5.285500,253500.000000 +-122.090000,37.700000,31.000000,2053.000000,336.000000,867.000000,329.000000,4.337500,241800.000000 +-122.090000,37.700000,30.000000,1751.000000,269.000000,731.000000,263.000000,6.005000,263900.000000 +-122.090000,37.690000,20.000000,4296.000000,817.000000,1732.000000,800.000000,4.803600,188300.000000 +-122.090000,37.680000,43.000000,1415.000000,348.000000,569.000000,293.000000,2.515600,190900.000000 +-122.090000,37.680000,41.000000,1834.000000,463.000000,1105.000000,467.000000,2.832200,170300.000000 +-122.090000,37.680000,41.000000,1382.000000,353.000000,704.000000,314.000000,3.511400,197500.000000 +-122.090000,37.680000,29.000000,2333.000000,538.000000,1120.000000,540.000000,2.404200,205600.000000 +-122.090000,37.670000,48.000000,1252.000000,305.000000,673.000000,308.000000,2.335700,175000.000000 +-122.090000,37.670000,39.000000,2069.000000,500.000000,1408.000000,478.000000,3.111500,153500.000000 +-122.090000,37.670000,33.000000,2431.000000,655.000000,1854.000000,603.000000,2.701900,154000.000000 +-122.090000,37.660000,40.000000,1340.000000,313.000000,766.000000,271.000000,3.472200,135400.000000 +-122.090000,37.660000,39.000000,1160.000000,259.000000,725.000000,274.000000,2.222200,158300.000000 +-122.090000,37.650000,35.000000,1184.000000,200.000000,572.000000,194.000000,4.714300,193800.000000 +-122.090000,37.650000,27.000000,2630.000000,722.000000,1414.000000,634.000000,2.820300,195200.000000 +-122.090000,37.640000,36.000000,1885.000000,307.000000,853.000000,271.000000,4.114100,173100.000000 +-122.090000,37.640000,36.000000,1180.000000,212.000000,664.000000,200.000000,5.283800,172600.000000 +-122.090000,37.640000,32.000000,1578.000000,284.000000,836.000000,292.000000,3.906300,184200.000000 +-122.090000,37.630000,35.000000,1213.000000,221.000000,790.000000,243.000000,4.701900,174100.000000 +-122.090000,37.630000,34.000000,1457.000000,242.000000,735.000000,249.000000,3.916700,189500.000000 +-122.090000,37.600000,36.000000,385.000000,94.000000,295.000000,92.000000,2.970600,147900.000000 +-122.090000,37.420000,23.000000,4874.000000,1251.000000,2699.000000,1163.000000,3.800300,229800.000000 +-122.090000,37.410000,8.000000,1480.000000,414.000000,856.000000,445.000000,2.820300,284100.000000 +-122.090000,37.410000,18.000000,1476.000000,473.000000,838.000000,415.000000,3.575000,274000.000000 +-122.090000,37.410000,14.000000,753.000000,193.000000,421.000000,153.000000,4.246300,266700.000000 +-122.090000,37.400000,36.000000,1575.000000,379.000000,1036.000000,382.000000,5.140800,264700.000000 +-122.090000,37.400000,26.000000,3218.000000,1021.000000,2087.000000,964.000000,3.287500,182700.000000 +-122.090000,37.400000,24.000000,3983.000000,1126.000000,2645.000000,1072.000000,3.674200,275000.000000 +-122.090000,37.400000,17.000000,748.000000,184.000000,412.000000,180.000000,3.437500,290600.000000 +-122.090000,37.390000,36.000000,1035.000000,196.000000,475.000000,205.000000,5.538500,359000.000000 +-122.090000,37.390000,34.000000,1508.000000,483.000000,774.000000,443.000000,2.727900,365600.000000 +-122.090000,37.390000,30.000000,1722.000000,490.000000,1057.000000,517.000000,3.725000,261300.000000 +-122.090000,37.370000,34.000000,2165.000000,355.000000,776.000000,339.000000,5.297100,442100.000000 +-122.090000,37.370000,27.000000,1269.000000,186.000000,464.000000,182.000000,6.837400,500001.000000 +-122.090000,37.360000,37.000000,2269.000000,325.000000,930.000000,321.000000,7.527400,500001.000000 +-122.090000,37.360000,37.000000,1550.000000,238.000000,805.000000,250.000000,5.022200,500001.000000 +-122.090000,37.350000,37.000000,1795.000000,285.000000,791.000000,261.000000,7.579400,500001.000000 +-122.090000,37.350000,30.000000,1502.000000,186.000000,501.000000,180.000000,10.025900,500001.000000 +-122.090000,37.110000,32.000000,2637.000000,489.000000,1031.000000,410.000000,3.647400,231600.000000 +-122.090000,37.090000,46.000000,695.000000,136.000000,408.000000,148.000000,3.940800,222600.000000 +-122.090000,37.070000,33.000000,3581.000000,734.000000,1780.000000,663.000000,4.342900,214300.000000 +-122.100000,40.050000,26.000000,633.000000,129.000000,305.000000,140.000000,2.182700,72700.000000 +-122.100000,40.030000,25.000000,2516.000000,543.000000,1266.000000,494.000000,1.756600,58400.000000 +-122.100000,39.470000,43.000000,1320.000000,215.000000,512.000000,197.000000,2.491700,77100.000000 +-122.100000,38.240000,13.000000,7367.000000,1042.000000,3193.000000,983.000000,5.310200,195000.000000 +-122.100000,38.020000,28.000000,4308.000000,824.000000,2086.000000,776.000000,3.652300,159700.000000 +-122.100000,37.970000,18.000000,4326.000000,655.000000,1753.000000,646.000000,5.693100,269600.000000 +-122.100000,37.960000,25.000000,1374.000000,206.000000,569.000000,235.000000,6.369900,235500.000000 +-122.100000,37.960000,20.000000,3796.000000,650.000000,1679.000000,611.000000,4.357100,228200.000000 +-122.100000,37.890000,21.000000,3282.000000,653.000000,1398.000000,601.000000,5.207900,310300.000000 +-122.100000,37.880000,35.000000,3701.000000,528.000000,1511.000000,517.000000,7.231500,367100.000000 +-122.100000,37.720000,30.000000,2599.000000,366.000000,922.000000,350.000000,5.838200,330200.000000 +-122.100000,37.710000,27.000000,6740.000000,1073.000000,2723.000000,1035.000000,5.213100,252500.000000 +-122.100000,37.700000,25.000000,2973.000000,622.000000,1413.000000,595.000000,4.381900,209200.000000 +-122.100000,37.690000,44.000000,2341.000000,500.000000,1256.000000,485.000000,2.950700,157100.000000 +-122.100000,37.690000,41.000000,746.000000,145.000000,387.000000,161.000000,3.906300,178400.000000 +-122.100000,37.690000,30.000000,3115.000000,625.000000,1444.000000,568.000000,3.722200,195800.000000 +-122.100000,37.680000,38.000000,1779.000000,413.000000,1061.000000,400.000000,3.096200,180900.000000 +-122.100000,37.680000,37.000000,1352.000000,342.000000,691.000000,324.000000,3.403200,196900.000000 +-122.100000,37.670000,34.000000,3659.000000,897.000000,2479.000000,903.000000,2.956400,150500.000000 +-122.100000,37.660000,37.000000,901.000000,191.000000,599.000000,206.000000,3.730300,149700.000000 +-122.100000,37.660000,36.000000,1305.000000,225.000000,768.000000,234.000000,4.275000,185300.000000 +-122.100000,37.660000,35.000000,686.000000,142.000000,480.000000,149.000000,3.875000,162100.000000 +-122.100000,37.660000,34.000000,656.000000,115.000000,342.000000,112.000000,4.687500,200600.000000 +-122.100000,37.660000,33.000000,1954.000000,464.000000,1293.000000,448.000000,3.048900,152600.000000 +-122.100000,37.650000,25.000000,2538.000000,494.000000,1185.000000,501.000000,4.541700,194400.000000 +-122.100000,37.640000,28.000000,1784.000000,311.000000,735.000000,278.000000,4.663500,206700.000000 +-122.100000,37.630000,29.000000,2172.000000,435.000000,1377.000000,408.000000,3.789500,180900.000000 +-122.100000,37.630000,18.000000,9963.000000,2031.000000,5613.000000,1946.000000,3.817100,187200.000000 +-122.100000,37.410000,33.000000,6277.000000,1274.000000,3025.000000,1211.000000,5.472100,343300.000000 +-122.100000,37.400000,27.000000,3410.000000,1156.000000,2314.000000,1086.000000,3.486800,165600.000000 +-122.100000,37.400000,23.000000,514.000000,210.000000,367.000000,206.000000,3.173600,181300.000000 +-122.100000,37.400000,23.000000,1755.000000,508.000000,1374.000000,506.000000,4.307700,293500.000000 +-122.100000,37.400000,19.000000,1085.000000,288.000000,1009.000000,305.000000,3.909100,276000.000000 +-122.100000,37.390000,36.000000,1860.000000,367.000000,794.000000,366.000000,5.087100,354500.000000 +-122.100000,37.390000,35.000000,2471.000000,349.000000,881.000000,342.000000,7.622900,500001.000000 +-122.100000,37.390000,31.000000,1117.000000,304.000000,591.000000,302.000000,3.590900,353100.000000 +-122.100000,37.380000,37.000000,4167.000000,612.000000,1577.000000,597.000000,7.565500,500001.000000 +-122.100000,37.370000,40.000000,2224.000000,354.000000,929.000000,345.000000,8.106400,500001.000000 +-122.100000,37.370000,37.000000,2511.000000,354.000000,945.000000,348.000000,8.392400,500001.000000 +-122.100000,37.360000,32.000000,1433.000000,199.000000,498.000000,201.000000,9.358600,500001.000000 +-122.100000,37.190000,18.000000,808.000000,136.000000,420.000000,145.000000,7.183100,273300.000000 +-122.110000,39.820000,27.000000,1065.000000,214.000000,508.000000,198.000000,2.625000,91700.000000 +-122.110000,38.010000,41.000000,1345.000000,272.000000,718.000000,283.000000,3.383100,129400.000000 +-122.110000,38.010000,39.000000,1313.000000,306.000000,575.000000,231.000000,3.171100,116100.000000 +-122.110000,38.000000,9.000000,3424.000000,583.000000,1460.000000,543.000000,5.760000,212600.000000 +-122.110000,37.990000,16.000000,3913.000000,710.000000,1782.000000,676.000000,5.129700,206700.000000 +-122.110000,37.990000,10.000000,2864.000000,514.000000,1300.000000,507.000000,4.387500,287700.000000 +-122.110000,37.980000,11.000000,4371.000000,679.000000,1790.000000,660.000000,6.135000,297300.000000 +-122.110000,37.880000,37.000000,4005.000000,614.000000,1602.000000,606.000000,6.466600,348200.000000 +-122.110000,37.870000,33.000000,3398.000000,500.000000,1351.000000,457.000000,6.581400,314200.000000 +-122.110000,37.830000,19.000000,5130.000000,741.000000,1887.000000,712.000000,7.203000,369900.000000 +-122.110000,37.710000,36.000000,4569.000000,824.000000,1950.000000,819.000000,4.650000,206800.000000 +-122.110000,37.700000,29.000000,1298.000000,306.000000,835.000000,338.000000,2.327400,170400.000000 +-122.110000,37.700000,19.000000,2693.000000,789.000000,1765.000000,724.000000,2.420600,137500.000000 +-122.110000,37.690000,42.000000,1472.000000,310.000000,768.000000,309.000000,3.464300,160900.000000 +-122.110000,37.690000,37.000000,2444.000000,651.000000,1562.000000,618.000000,2.646400,155200.000000 +-122.110000,37.680000,37.000000,1976.000000,481.000000,1197.000000,465.000000,2.577200,170200.000000 +-122.110000,37.670000,38.000000,1035.000000,247.000000,599.000000,224.000000,3.091700,167200.000000 +-122.110000,37.670000,36.000000,2110.000000,389.000000,952.000000,370.000000,3.800000,187500.000000 +-122.110000,37.670000,32.000000,3028.000000,811.000000,2037.000000,703.000000,3.064500,165400.000000 +-122.110000,37.660000,36.000000,1755.000000,316.000000,913.000000,299.000000,4.130200,172700.000000 +-122.110000,37.650000,18.000000,4335.000000,808.000000,2041.000000,734.000000,3.486100,331600.000000 +-122.110000,37.640000,8.000000,3592.000000,849.000000,1907.000000,746.000000,3.670800,197900.000000 +-122.110000,37.640000,31.000000,1487.000000,280.000000,854.000000,301.000000,5.231200,197600.000000 +-122.110000,37.440000,35.000000,2016.000000,349.000000,1023.000000,376.000000,5.641300,376600.000000 +-122.110000,37.430000,35.000000,3905.000000,565.000000,1562.000000,553.000000,7.313000,463700.000000 +-122.110000,37.430000,35.000000,3584.000000,535.000000,1405.000000,538.000000,7.302300,451300.000000 +-122.110000,37.420000,32.000000,3058.000000,595.000000,1267.000000,540.000000,6.494900,417800.000000 +-122.110000,37.410000,35.000000,2712.000000,428.000000,1084.000000,425.000000,7.138200,443800.000000 +-122.110000,37.410000,33.000000,1641.000000,284.000000,659.000000,282.000000,6.088400,432900.000000 +-122.110000,37.400000,16.000000,1994.000000,489.000000,1173.000000,472.000000,4.187500,266400.000000 +-122.110000,37.390000,36.000000,1660.000000,261.000000,655.000000,249.000000,6.396700,500001.000000 +-122.110000,37.380000,36.000000,3598.000000,500.000000,1296.000000,533.000000,7.817700,500001.000000 +-122.110000,37.380000,22.000000,3638.000000,719.000000,1329.000000,650.000000,5.080400,500001.000000 +-122.110000,37.370000,22.000000,1477.000000,195.000000,520.000000,187.000000,10.332900,500001.000000 +-122.110000,37.360000,34.000000,1575.000000,183.000000,511.000000,180.000000,13.186700,500001.000000 +-122.110000,37.310000,7.000000,189.000000,26.000000,84.000000,29.000000,13.809300,500001.000000 +-122.110000,37.050000,18.000000,3337.000000,549.000000,1449.000000,519.000000,5.141200,315800.000000 +-122.120000,40.140000,34.000000,1950.000000,407.000000,1029.000000,376.000000,2.519700,82300.000000 +-122.120000,39.910000,16.000000,4006.000000,797.000000,2028.000000,752.000000,2.392900,77200.000000 +-122.120000,38.010000,50.000000,1738.000000,355.000000,837.000000,363.000000,3.609000,135700.000000 +-122.120000,38.010000,50.000000,1300.000000,263.000000,691.000000,239.000000,3.951900,126500.000000 +-122.120000,38.010000,42.000000,2225.000000,367.000000,864.000000,381.000000,4.118900,172400.000000 +-122.120000,37.990000,33.000000,1660.000000,277.000000,741.000000,261.000000,4.675000,225400.000000 +-122.120000,37.940000,22.000000,4949.000000,626.000000,1850.000000,590.000000,10.454900,500001.000000 +-122.120000,37.890000,30.000000,3227.000000,733.000000,1260.000000,684.000000,4.125000,257100.000000 +-122.120000,37.880000,35.000000,2785.000000,362.000000,1001.000000,363.000000,8.044800,433300.000000 +-122.120000,37.850000,18.000000,5252.000000,686.000000,1870.000000,657.000000,8.007400,454100.000000 +-122.120000,37.820000,26.000000,2269.000000,317.000000,918.000000,313.000000,6.665700,364500.000000 +-122.120000,37.810000,26.000000,4048.000000,513.000000,1486.000000,498.000000,7.671700,416500.000000 +-122.120000,37.750000,33.000000,1809.000000,261.000000,808.000000,219.000000,6.860000,250000.000000 +-122.120000,37.710000,38.000000,1164.000000,284.000000,632.000000,289.000000,3.034500,152100.000000 +-122.120000,37.710000,35.000000,1037.000000,207.000000,552.000000,210.000000,4.000000,167900.000000 +-122.120000,37.700000,41.000000,3495.000000,787.000000,1849.000000,750.000000,2.679000,144900.000000 +-122.120000,37.700000,19.000000,2495.000000,635.000000,1571.000000,579.000000,2.583300,159900.000000 +-122.120000,37.700000,17.000000,2488.000000,617.000000,1287.000000,538.000000,2.992200,179900.000000 +-122.120000,37.690000,35.000000,2681.000000,508.000000,1580.000000,536.000000,4.104200,179100.000000 +-122.120000,37.680000,45.000000,2179.000000,401.000000,1159.000000,399.000000,3.483900,180600.000000 +-122.120000,37.680000,40.000000,1553.000000,253.000000,724.000000,267.000000,4.380000,196400.000000 +-122.120000,37.680000,37.000000,2412.000000,394.000000,975.000000,375.000000,4.041700,191100.000000 +-122.120000,37.680000,35.000000,1958.000000,484.000000,1146.000000,448.000000,2.950000,148900.000000 +-122.120000,37.670000,33.000000,3429.000000,681.000000,1798.000000,694.000000,3.939500,184700.000000 +-122.120000,37.650000,26.000000,162.000000,27.000000,86.000000,25.000000,2.375000,137500.000000 +-122.120000,37.640000,40.000000,432.000000,102.000000,264.000000,77.000000,3.887500,228100.000000 +-122.120000,37.480000,36.000000,880.000000,177.000000,795.000000,188.000000,3.819400,159400.000000 +-122.120000,37.440000,33.000000,2974.000000,623.000000,1435.000000,588.000000,5.485000,406300.000000 +-122.120000,37.440000,33.000000,1509.000000,303.000000,748.000000,268.000000,4.875000,373400.000000 +-122.120000,37.430000,36.000000,3212.000000,553.000000,1455.000000,574.000000,6.460000,425500.000000 +-122.120000,37.430000,33.000000,3262.000000,668.000000,1411.000000,626.000000,5.316000,398100.000000 +-122.120000,37.420000,36.000000,2607.000000,551.000000,1165.000000,523.000000,5.152400,373100.000000 +-122.120000,37.420000,35.000000,2445.000000,533.000000,1187.000000,519.000000,5.280300,362100.000000 +-122.120000,37.410000,33.000000,2892.000000,617.000000,1250.000000,581.000000,5.372700,360900.000000 +-122.120000,37.400000,32.000000,3514.000000,473.000000,1583.000000,480.000000,10.389400,500001.000000 +-122.120000,37.400000,31.000000,2356.000000,405.000000,921.000000,358.000000,7.024500,500001.000000 +-122.120000,37.390000,34.000000,3561.000000,497.000000,1336.000000,501.000000,8.917200,500001.000000 +-122.120000,37.380000,34.000000,1443.000000,218.000000,504.000000,200.000000,8.470900,500001.000000 +-122.120000,37.370000,18.000000,1617.000000,231.000000,555.000000,222.000000,8.902100,500001.000000 +-122.120000,37.290000,11.000000,436.000000,70.000000,212.000000,75.000000,8.619600,500001.000000 +-122.120000,37.160000,32.000000,1602.000000,317.000000,752.000000,275.000000,5.166400,185100.000000 +-122.120000,37.120000,51.000000,2419.000000,485.000000,1078.000000,435.000000,2.793300,206900.000000 +-122.120000,37.090000,36.000000,1397.000000,289.000000,661.000000,243.000000,4.125000,239600.000000 +-122.130000,39.740000,20.000000,1401.000000,280.000000,668.000000,250.000000,2.256900,94300.000000 +-122.130000,39.000000,23.000000,3832.000000,774.000000,2435.000000,747.000000,2.275400,59200.000000 +-122.130000,38.260000,40.000000,1538.000000,255.000000,669.000000,263.000000,3.328100,170200.000000 +-122.130000,38.020000,52.000000,2378.000000,508.000000,940.000000,451.000000,2.958300,166000.000000 +-122.130000,38.010000,48.000000,2123.000000,494.000000,859.000000,474.000000,1.852300,144800.000000 +-122.130000,38.000000,33.000000,2821.000000,652.000000,1206.000000,640.000000,2.548100,150800.000000 +-122.130000,37.890000,27.000000,744.000000,214.000000,295.000000,169.000000,2.741100,350000.000000 +-122.130000,37.870000,18.000000,1820.000000,220.000000,728.000000,229.000000,10.371300,426100.000000 +-122.130000,37.770000,24.000000,2459.000000,317.000000,916.000000,324.000000,7.071200,293000.000000 +-122.130000,37.760000,26.000000,3266.000000,491.000000,1222.000000,533.000000,5.370000,275400.000000 +-122.130000,37.750000,36.000000,768.000000,93.000000,229.000000,93.000000,5.360200,330000.000000 +-122.130000,37.750000,30.000000,414.000000,54.000000,137.000000,50.000000,4.975000,311100.000000 +-122.130000,37.740000,41.000000,4400.000000,666.000000,1476.000000,648.000000,5.000000,248900.000000 +-122.130000,37.730000,33.000000,1996.000000,268.000000,686.000000,270.000000,6.909600,341800.000000 +-122.130000,37.720000,45.000000,2315.000000,451.000000,1006.000000,444.000000,3.524000,186200.000000 +-122.130000,37.720000,35.000000,2183.000000,383.000000,976.000000,392.000000,3.839300,243500.000000 +-122.130000,37.720000,25.000000,1134.000000,153.000000,340.000000,171.000000,6.509500,371200.000000 +-122.130000,37.710000,44.000000,1613.000000,339.000000,776.000000,346.000000,3.110300,188900.000000 +-122.130000,37.710000,44.000000,1421.000000,298.000000,609.000000,270.000000,3.578100,180000.000000 +-122.130000,37.700000,43.000000,3046.000000,557.000000,1333.000000,544.000000,3.458300,183700.000000 +-122.130000,37.700000,19.000000,3516.000000,710.000000,1810.000000,703.000000,3.903200,218000.000000 +-122.130000,37.690000,34.000000,1131.000000,278.000000,560.000000,237.000000,2.875000,161700.000000 +-122.130000,37.690000,17.000000,2380.000000,769.000000,1216.000000,643.000000,3.395000,271300.000000 +-122.130000,37.680000,45.000000,2457.000000,445.000000,1129.000000,422.000000,4.058800,182800.000000 +-122.130000,37.680000,43.000000,1676.000000,340.000000,924.000000,328.000000,3.600000,179400.000000 +-122.130000,37.670000,42.000000,3592.000000,703.000000,1625.000000,665.000000,3.243400,179900.000000 +-122.130000,37.670000,40.000000,1748.000000,318.000000,914.000000,317.000000,3.867600,184000.000000 +-122.130000,37.670000,38.000000,2012.000000,347.000000,880.000000,332.000000,3.173400,181600.000000 +-122.130000,37.470000,30.000000,1480.000000,294.000000,1126.000000,301.000000,4.983000,166700.000000 +-122.130000,37.470000,25.000000,1630.000000,353.000000,1546.000000,371.000000,5.089300,173400.000000 +-122.130000,37.460000,37.000000,1576.000000,334.000000,1385.000000,323.000000,2.529400,159400.000000 +-122.130000,37.460000,35.000000,1321.000000,300.000000,1133.000000,287.000000,3.731200,159600.000000 +-122.130000,37.450000,41.000000,3233.000000,540.000000,1251.000000,506.000000,6.635400,500001.000000 +-122.130000,37.450000,37.000000,2295.000000,332.000000,933.000000,332.000000,6.725700,500001.000000 +-122.130000,37.450000,37.000000,1287.000000,197.000000,510.000000,206.000000,7.902900,500001.000000 +-122.130000,37.440000,43.000000,3004.000000,440.000000,1088.000000,427.000000,9.150800,500001.000000 +-122.130000,37.440000,42.000000,2390.000000,462.000000,1146.000000,468.000000,6.311100,397400.000000 +-122.130000,37.440000,38.000000,2835.000000,447.000000,1148.000000,446.000000,5.927700,446600.000000 +-122.130000,37.430000,32.000000,4398.000000,878.000000,1799.000000,792.000000,4.737500,431900.000000 +-122.130000,37.420000,36.000000,3982.000000,1045.000000,2251.000000,995.000000,3.536400,314100.000000 +-122.130000,37.410000,36.000000,4787.000000,900.000000,2039.000000,890.000000,5.406300,440900.000000 +-122.130000,37.400000,29.000000,6027.000000,1195.000000,2687.000000,1171.000000,5.133500,461200.000000 +-122.130000,37.390000,27.000000,3385.000000,427.000000,1248.000000,409.000000,12.037200,500001.000000 +-122.130000,36.970000,27.000000,991.000000,194.000000,543.000000,155.000000,4.718800,350000.000000 +-122.140000,39.650000,33.000000,419.000000,77.000000,190.000000,67.000000,3.642900,87500.000000 +-122.140000,38.160000,4.000000,3273.000000,495.000000,1497.000000,454.000000,5.334500,176100.000000 +-122.140000,38.070000,31.000000,3401.000000,616.000000,1750.000000,602.000000,4.676100,143100.000000 +-122.140000,38.050000,27.000000,3794.000000,772.000000,1756.000000,724.000000,3.289100,150600.000000 +-122.140000,38.020000,44.000000,1625.000000,432.000000,825.000000,385.000000,2.052300,133900.000000 +-122.140000,38.010000,50.000000,1760.000000,341.000000,741.000000,316.000000,4.500000,178300.000000 +-122.140000,37.880000,34.000000,6986.000000,1096.000000,2865.000000,1124.000000,6.227500,394400.000000 +-122.140000,37.860000,20.000000,6201.000000,1182.000000,2415.000000,1141.000000,4.574400,314000.000000 +-122.140000,37.850000,27.000000,9147.000000,1276.000000,3371.000000,1269.000000,7.326700,389900.000000 +-122.140000,37.770000,27.000000,2229.000000,365.000000,1297.000000,355.000000,4.830400,279100.000000 +-122.140000,37.760000,34.000000,1513.000000,231.000000,545.000000,211.000000,5.570100,252800.000000 +-122.140000,37.750000,36.000000,690.000000,105.000000,299.000000,109.000000,4.031300,195500.000000 +-122.140000,37.740000,52.000000,1071.000000,201.000000,440.000000,192.000000,4.066200,204200.000000 +-122.140000,37.730000,52.000000,2024.000000,320.000000,823.000000,334.000000,5.000000,264700.000000 +-122.140000,37.730000,43.000000,2264.000000,390.000000,931.000000,368.000000,3.812500,235100.000000 +-122.140000,37.730000,38.000000,1723.000000,394.000000,711.000000,353.000000,3.067300,218400.000000 +-122.140000,37.720000,45.000000,1397.000000,253.000000,555.000000,248.000000,2.983000,202700.000000 +-122.140000,37.720000,39.000000,786.000000,132.000000,288.000000,132.000000,3.515600,218900.000000 +-122.140000,37.710000,27.000000,3094.000000,866.000000,1364.000000,789.000000,2.610100,181700.000000 +-122.140000,37.710000,18.000000,3905.000000,1007.000000,2197.000000,1044.000000,3.693200,166800.000000 +-122.140000,37.700000,36.000000,1266.000000,228.000000,606.000000,239.000000,3.970200,194100.000000 +-122.140000,37.700000,17.000000,1463.000000,292.000000,695.000000,330.000000,4.585900,187200.000000 +-122.140000,37.690000,38.000000,1571.000000,317.000000,874.000000,301.000000,4.465900,189100.000000 +-122.140000,37.690000,37.000000,2141.000000,535.000000,1093.000000,555.000000,2.995800,178400.000000 +-122.140000,37.680000,35.000000,2976.000000,518.000000,1424.000000,538.000000,4.267000,210300.000000 +-122.140000,37.680000,27.000000,3337.000000,613.000000,1489.000000,607.000000,3.636400,219200.000000 +-122.140000,37.670000,37.000000,3342.000000,569.000000,1635.000000,557.000000,4.793300,186900.000000 +-122.140000,37.670000,37.000000,3156.000000,534.000000,1495.000000,543.000000,4.812500,188300.000000 +-122.140000,37.670000,36.000000,1487.000000,249.000000,641.000000,243.000000,4.068200,196200.000000 +-122.140000,37.500000,46.000000,30.000000,4.000000,13.000000,5.000000,15.000100,500001.000000 +-122.140000,37.480000,36.000000,1210.000000,236.000000,981.000000,239.000000,4.003900,148900.000000 +-122.140000,37.470000,37.000000,3373.000000,815.000000,2909.000000,705.000000,2.886800,156600.000000 +-122.140000,37.470000,36.000000,2081.000000,412.000000,1931.000000,373.000000,3.791700,160600.000000 +-122.140000,37.460000,27.000000,5580.000000,2009.000000,4165.000000,1763.000000,2.437500,189000.000000 +-122.140000,37.450000,52.000000,3841.000000,537.000000,1391.000000,540.000000,7.864700,500001.000000 +-122.140000,37.450000,48.000000,2074.000000,297.000000,700.000000,279.000000,8.705100,500001.000000 +-122.140000,37.440000,52.000000,3117.000000,468.000000,1114.000000,421.000000,6.675600,500001.000000 +-122.140000,37.430000,52.000000,1944.000000,308.000000,696.000000,293.000000,8.266400,500001.000000 +-122.140000,37.430000,52.000000,1383.000000,227.000000,551.000000,249.000000,6.582900,500001.000000 +-122.140000,37.430000,52.000000,1327.000000,190.000000,467.000000,189.000000,12.590200,500001.000000 +-122.140000,37.420000,46.000000,206.000000,44.000000,134.000000,51.000000,4.150000,265000.000000 +-122.140000,37.380000,26.000000,2859.000000,343.000000,951.000000,336.000000,10.427700,500001.000000 +-122.140000,37.360000,23.000000,11294.000000,1377.000000,3840.000000,1367.000000,12.138700,500001.000000 +-122.140000,37.080000,18.000000,2420.000000,439.000000,1278.000000,416.000000,5.210100,334000.000000 +-122.150000,38.290000,17.000000,1625.000000,239.000000,703.000000,224.000000,6.589100,328800.000000 +-122.150000,38.060000,10.000000,3008.000000,532.000000,1381.000000,522.000000,5.366100,195800.000000 +-122.150000,38.040000,14.000000,2804.000000,587.000000,1083.000000,573.000000,2.646600,168500.000000 +-122.150000,37.760000,39.000000,1823.000000,286.000000,763.000000,270.000000,6.074900,196900.000000 +-122.150000,37.740000,52.000000,2898.000000,557.000000,1338.000000,550.000000,3.851000,183500.000000 +-122.150000,37.740000,52.000000,1394.000000,223.000000,545.000000,230.000000,3.950000,219000.000000 +-122.150000,37.740000,49.000000,1494.000000,316.000000,611.000000,288.000000,2.200000,187500.000000 +-122.150000,37.740000,49.000000,1325.000000,277.000000,764.000000,282.000000,3.312500,118000.000000 +-122.150000,37.740000,43.000000,1383.000000,275.000000,853.000000,272.000000,3.508300,122000.000000 +-122.150000,37.740000,41.000000,856.000000,178.000000,571.000000,191.000000,3.145800,130600.000000 +-122.150000,37.730000,52.000000,1028.000000,129.000000,317.000000,143.000000,4.913500,275000.000000 +-122.150000,37.730000,45.000000,3758.000000,819.000000,1573.000000,736.000000,2.835500,245400.000000 +-122.150000,37.730000,28.000000,2215.000000,587.000000,830.000000,573.000000,2.189800,141700.000000 +-122.150000,37.720000,47.000000,1190.000000,251.000000,540.000000,266.000000,3.375000,198300.000000 +-122.150000,37.720000,31.000000,1616.000000,372.000000,739.000000,379.000000,2.909700,210900.000000 +-122.150000,37.720000,29.000000,4169.000000,1047.000000,2024.000000,962.000000,2.812500,157400.000000 +-122.150000,37.710000,36.000000,998.000000,178.000000,531.000000,183.000000,4.020800,191500.000000 +-122.150000,37.710000,18.000000,5778.000000,1526.000000,2441.000000,1352.000000,3.168200,202700.000000 +-122.150000,37.700000,36.000000,1468.000000,252.000000,733.000000,229.000000,3.458300,192600.000000 +-122.150000,37.700000,36.000000,1464.000000,244.000000,672.000000,261.000000,3.554700,194700.000000 +-122.150000,37.690000,39.000000,1670.000000,308.000000,957.000000,335.000000,5.131200,183600.000000 +-122.150000,37.690000,38.000000,1246.000000,221.000000,637.000000,222.000000,3.662500,184600.000000 +-122.150000,37.690000,36.000000,1545.000000,273.000000,863.000000,267.000000,4.010900,192900.000000 +-122.150000,37.690000,36.000000,1501.000000,287.000000,703.000000,276.000000,3.886400,197300.000000 +-122.150000,37.680000,35.000000,2632.000000,447.000000,1349.000000,486.000000,4.386400,205200.000000 +-122.150000,37.680000,30.000000,2261.000000,443.000000,929.000000,383.000000,4.284100,213400.000000 +-122.150000,37.670000,35.000000,2472.000000,398.000000,1171.000000,390.000000,5.579700,198100.000000 +-122.150000,37.470000,39.000000,1295.000000,239.000000,566.000000,242.000000,5.640700,326400.000000 +-122.150000,37.460000,52.000000,1803.000000,257.000000,683.000000,259.000000,10.950800,500001.000000 +-122.150000,37.460000,42.000000,1995.000000,412.000000,794.000000,374.000000,5.623400,379600.000000 +-122.150000,37.460000,39.000000,906.000000,109.000000,353.000000,112.000000,10.394200,500001.000000 +-122.150000,37.460000,30.000000,4198.000000,1244.000000,2678.000000,1147.000000,3.671200,308600.000000 +-122.150000,37.450000,52.000000,568.000000,91.000000,219.000000,75.000000,6.157500,500001.000000 +-122.150000,37.450000,52.000000,2117.000000,353.000000,734.000000,328.000000,6.767000,500001.000000 +-122.150000,37.440000,52.000000,2063.000000,320.000000,774.000000,309.000000,7.254300,500001.000000 +-122.150000,37.440000,52.000000,1945.000000,293.000000,708.000000,275.000000,6.165500,500001.000000 +-122.150000,37.440000,52.000000,1400.000000,217.000000,522.000000,227.000000,4.986100,500001.000000 +-122.150000,37.430000,47.000000,2600.000000,490.000000,1149.000000,465.000000,5.020300,476300.000000 +-122.150000,37.430000,20.000000,11709.000000,3618.000000,7604.000000,3589.000000,1.904500,375000.000000 +-122.150000,37.420000,44.000000,3558.000000,839.000000,1779.000000,832.000000,3.924300,404800.000000 +-122.150000,37.410000,29.000000,473.000000,103.000000,359.000000,87.000000,7.030900,475000.000000 +-122.150000,37.350000,23.000000,3814.000000,485.000000,1344.000000,464.000000,12.979200,500001.000000 +-122.160000,39.780000,32.000000,1288.000000,221.000000,562.000000,203.000000,2.325000,69600.000000 +-122.160000,39.740000,20.000000,707.000000,126.000000,337.000000,125.000000,3.046900,85000.000000 +-122.160000,38.070000,14.000000,6360.000000,1236.000000,2876.000000,1127.000000,4.532100,190300.000000 +-122.160000,38.050000,52.000000,1888.000000,457.000000,830.000000,408.000000,3.137300,185100.000000 +-122.160000,38.050000,34.000000,2434.000000,428.000000,926.000000,423.000000,4.677600,208300.000000 +-122.160000,38.020000,40.000000,1800.000000,290.000000,761.000000,277.000000,5.126500,196100.000000 +-122.160000,37.890000,32.000000,1779.000000,241.000000,721.000000,258.000000,8.758900,434500.000000 +-122.160000,37.860000,36.000000,3359.000000,493.000000,1298.000000,483.000000,8.158600,404300.000000 +-122.160000,37.830000,16.000000,4596.000000,705.000000,1480.000000,650.000000,7.520000,370200.000000 +-122.160000,37.790000,22.000000,12842.000000,2048.000000,4985.000000,1967.000000,5.984900,371000.000000 +-122.160000,37.770000,48.000000,977.000000,194.000000,446.000000,180.000000,4.770800,156300.000000 +-122.160000,37.770000,47.000000,1256.000000,217.000000,570.000000,218.000000,4.375000,161900.000000 +-122.160000,37.770000,45.000000,2324.000000,397.000000,968.000000,384.000000,3.573900,176000.000000 +-122.160000,37.770000,39.000000,1583.000000,349.000000,857.000000,316.000000,3.095800,145800.000000 +-122.160000,37.760000,46.000000,1827.000000,307.000000,881.000000,302.000000,4.669600,164300.000000 +-122.160000,37.760000,36.000000,2781.000000,574.000000,1438.000000,519.000000,2.459800,155500.000000 +-122.160000,37.750000,46.000000,954.000000,161.000000,429.000000,154.000000,2.925000,142900.000000 +-122.160000,37.750000,44.000000,617.000000,131.000000,378.000000,135.000000,2.556800,111100.000000 +-122.160000,37.750000,40.000000,1227.000000,294.000000,928.000000,261.000000,1.823500,95200.000000 +-122.160000,37.750000,38.000000,2457.000000,624.000000,1516.000000,482.000000,1.562500,91700.000000 +-122.160000,37.750000,24.000000,1790.000000,454.000000,1137.000000,386.000000,2.537000,107900.000000 +-122.160000,37.740000,52.000000,771.000000,147.000000,355.000000,144.000000,4.145800,143400.000000 +-122.160000,37.740000,47.000000,824.000000,223.000000,533.000000,166.000000,2.625000,98200.000000 +-122.160000,37.740000,46.000000,1029.000000,181.000000,567.000000,211.000000,3.484400,129500.000000 +-122.160000,37.740000,44.000000,1097.000000,239.000000,609.000000,215.000000,2.022700,103100.000000 +-122.160000,37.740000,43.000000,1534.000000,300.000000,826.000000,295.000000,4.041700,109400.000000 +-122.160000,37.730000,52.000000,2260.000000,416.000000,994.000000,412.000000,4.116400,198200.000000 +-122.160000,37.730000,52.000000,1114.000000,206.000000,425.000000,207.000000,2.562500,175000.000000 +-122.160000,37.730000,49.000000,1699.000000,408.000000,768.000000,385.000000,2.830100,171600.000000 +-122.160000,37.720000,10.000000,2229.000000,601.000000,877.000000,485.000000,3.343100,137500.000000 +-122.160000,37.710000,37.000000,1507.000000,242.000000,632.000000,253.000000,4.555300,191000.000000 +-122.160000,37.700000,36.000000,2239.000000,391.000000,1203.000000,379.000000,5.004300,190400.000000 +-122.160000,37.700000,36.000000,1719.000000,303.000000,836.000000,311.000000,4.437500,193500.000000 +-122.160000,37.700000,36.000000,1097.000000,208.000000,568.000000,225.000000,2.991700,194600.000000 +-122.160000,37.690000,36.000000,1480.000000,278.000000,796.000000,283.000000,4.397100,205700.000000 +-122.160000,37.470000,44.000000,2581.000000,437.000000,1006.000000,414.000000,5.397000,341700.000000 +-122.160000,37.470000,33.000000,3687.000000,852.000000,3091.000000,852.000000,2.650600,162600.000000 +-122.160000,37.460000,45.000000,2068.000000,348.000000,844.000000,366.000000,6.227000,417800.000000 +-122.160000,37.460000,32.000000,2663.000000,661.000000,1403.000000,733.000000,4.266700,410200.000000 +-122.160000,37.450000,50.000000,196.000000,41.000000,76.000000,42.000000,7.612900,412500.000000 +-122.160000,37.450000,47.000000,4234.000000,1156.000000,1808.000000,1093.000000,4.229700,425000.000000 +-122.160000,37.450000,37.000000,2926.000000,874.000000,1363.000000,815.000000,4.598700,356000.000000 +-122.160000,37.450000,19.000000,2207.000000,810.000000,1304.000000,775.000000,2.140600,402500.000000 +-122.160000,37.440000,34.000000,2199.000000,529.000000,1193.000000,532.000000,4.297200,405900.000000 +-122.160000,37.420000,34.000000,4448.000000,610.000000,2571.000000,581.000000,11.049200,500001.000000 +-122.160000,37.170000,35.000000,6422.000000,1380.000000,2755.000000,1064.000000,5.016500,202300.000000 +-122.170000,40.200000,28.000000,1782.000000,334.000000,873.000000,311.000000,3.359400,79100.000000 +-122.170000,40.110000,24.000000,1631.000000,340.000000,1042.000000,333.000000,1.770800,59000.000000 +-122.170000,39.940000,32.000000,2352.000000,477.000000,1316.000000,447.000000,2.229200,57400.000000 +-122.170000,39.920000,16.000000,1566.000000,306.000000,652.000000,287.000000,1.903800,60800.000000 +-122.170000,38.080000,7.000000,18392.000000,2782.000000,8276.000000,2742.000000,6.423200,229200.000000 +-122.170000,38.070000,15.000000,2125.000000,278.000000,857.000000,272.000000,6.459900,219700.000000 +-122.170000,38.060000,16.000000,3515.000000,626.000000,1764.000000,626.000000,4.439700,187100.000000 +-122.170000,37.880000,33.000000,3626.000000,502.000000,1348.000000,480.000000,7.610700,423200.000000 +-122.170000,37.880000,32.000000,3633.000000,508.000000,1393.000000,506.000000,7.691700,401800.000000 +-122.170000,37.870000,38.000000,1261.000000,177.000000,472.000000,183.000000,6.917000,438000.000000 +-122.170000,37.780000,52.000000,653.000000,128.000000,296.000000,121.000000,4.175000,144000.000000 +-122.170000,37.780000,49.000000,893.000000,177.000000,468.000000,181.000000,3.875000,140600.000000 +-122.170000,37.780000,42.000000,1524.000000,260.000000,651.000000,267.000000,3.687500,157300.000000 +-122.170000,37.770000,39.000000,1612.000000,342.000000,912.000000,322.000000,3.395800,141900.000000 +-122.170000,37.770000,31.000000,2424.000000,533.000000,1360.000000,452.000000,1.871000,90700.000000 +-122.170000,37.770000,30.000000,3326.000000,746.000000,1704.000000,703.000000,2.875000,135300.000000 +-122.170000,37.760000,47.000000,2118.000000,413.000000,965.000000,382.000000,2.184200,107900.000000 +-122.170000,37.760000,41.000000,1594.000000,367.000000,1074.000000,355.000000,1.935600,90600.000000 +-122.170000,37.760000,40.000000,1685.000000,343.000000,949.000000,342.000000,1.842600,94800.000000 +-122.170000,37.760000,38.000000,1764.000000,397.000000,987.000000,354.000000,2.433300,98200.000000 +-122.170000,37.760000,33.000000,1280.000000,307.000000,999.000000,286.000000,2.562500,89300.000000 +-122.170000,37.750000,48.000000,1751.000000,390.000000,935.000000,349.000000,1.437500,90000.000000 +-122.170000,37.750000,47.000000,998.000000,211.000000,597.000000,185.000000,3.158700,100400.000000 +-122.170000,37.750000,44.000000,1218.000000,248.000000,763.000000,254.000000,2.328100,88800.000000 +-122.170000,37.750000,43.000000,1587.000000,320.000000,907.000000,306.000000,1.982100,98300.000000 +-122.170000,37.750000,41.000000,1257.000000,271.000000,828.000000,230.000000,2.504300,92300.000000 +-122.170000,37.750000,38.000000,992.000000,279.000000,732.000000,259.000000,1.619600,85100.000000 +-122.170000,37.750000,37.000000,1379.000000,287.000000,835.000000,259.000000,2.496200,91800.000000 +-122.170000,37.740000,47.000000,463.000000,134.000000,327.000000,137.000000,2.150000,97200.000000 +-122.170000,37.740000,46.000000,769.000000,183.000000,693.000000,178.000000,2.250000,84200.000000 +-122.170000,37.740000,46.000000,1026.000000,226.000000,749.000000,225.000000,3.029800,107600.000000 +-122.170000,37.740000,43.000000,818.000000,193.000000,494.000000,179.000000,2.477600,101600.000000 +-122.170000,37.740000,41.000000,1613.000000,445.000000,1481.000000,414.000000,2.402800,97700.000000 +-122.170000,37.740000,34.000000,1223.000000,281.000000,824.000000,280.000000,2.291700,92500.000000 +-122.170000,37.730000,52.000000,1555.000000,289.000000,620.000000,292.000000,3.715900,183300.000000 +-122.170000,37.730000,43.000000,1473.000000,371.000000,1231.000000,341.000000,2.158700,86500.000000 +-122.170000,37.720000,5.000000,1692.000000,398.000000,814.000000,328.000000,3.663000,158300.000000 +-122.170000,37.720000,46.000000,1369.000000,284.000000,766.000000,289.000000,3.531300,159700.000000 +-122.170000,37.720000,43.000000,3783.000000,814.000000,2139.000000,789.000000,4.020200,166300.000000 +-122.170000,37.720000,42.000000,3008.000000,659.000000,1817.000000,664.000000,3.371000,165000.000000 +-122.170000,37.690000,24.000000,2262.000000,391.000000,1125.000000,366.000000,4.760900,212600.000000 +-122.170000,37.480000,39.000000,2427.000000,401.000000,1178.000000,408.000000,5.962900,352700.000000 +-122.170000,37.460000,47.000000,2312.000000,332.000000,1044.000000,282.000000,9.459000,500001.000000 +-122.170000,37.450000,35.000000,1025.000000,242.000000,388.000000,232.000000,5.199500,500001.000000 +-122.170000,37.450000,33.000000,1828.000000,396.000000,766.000000,378.000000,4.453100,500001.000000 +-122.170000,37.430000,24.000000,3924.000000,1142.000000,7174.000000,950.000000,4.097200,387500.000000 +-122.180000,40.020000,30.000000,1952.000000,397.000000,961.000000,333.000000,2.250000,68200.000000 +-122.180000,39.930000,35.000000,1387.000000,272.000000,610.000000,237.000000,2.175900,59500.000000 +-122.180000,39.750000,30.000000,4157.000000,834.000000,1885.000000,774.000000,1.694800,67500.000000 +-122.180000,39.700000,23.000000,1658.000000,307.000000,836.000000,297.000000,3.350000,85400.000000 +-122.180000,39.550000,28.000000,1471.000000,259.000000,673.000000,246.000000,3.250000,81600.000000 +-122.180000,38.490000,15.000000,1743.000000,366.000000,655.000000,264.000000,3.339300,146900.000000 +-122.180000,38.350000,24.000000,407.000000,68.000000,175.000000,61.000000,6.026600,216700.000000 +-122.180000,38.290000,18.000000,1953.000000,265.000000,658.000000,270.000000,8.011300,393000.000000 +-122.180000,38.170000,7.000000,4093.000000,651.000000,2228.000000,646.000000,5.252300,161300.000000 +-122.180000,38.070000,10.000000,4976.000000,849.000000,2089.000000,803.000000,5.328800,201800.000000 +-122.180000,37.910000,31.000000,7200.000000,876.000000,2428.000000,843.000000,10.940500,500001.000000 +-122.180000,37.900000,36.000000,4760.000000,610.000000,1511.000000,572.000000,9.006400,500001.000000 +-122.180000,37.890000,18.000000,4845.000000,735.000000,1634.000000,734.000000,8.148900,499000.000000 +-122.180000,37.880000,36.000000,542.000000,119.000000,231.000000,121.000000,4.900000,354200.000000 +-122.180000,37.860000,33.000000,4449.000000,636.000000,1684.000000,617.000000,8.957100,399700.000000 +-122.180000,37.810000,37.000000,1643.000000,262.000000,620.000000,266.000000,5.444600,336700.000000 +-122.180000,37.810000,30.000000,292.000000,38.000000,126.000000,52.000000,6.362400,483300.000000 +-122.180000,37.800000,34.000000,1355.000000,195.000000,442.000000,195.000000,6.283800,318200.000000 +-122.180000,37.800000,23.000000,2317.000000,336.000000,955.000000,328.000000,6.752700,285800.000000 +-122.180000,37.790000,46.000000,2109.000000,387.000000,922.000000,329.000000,3.971200,208100.000000 +-122.180000,37.790000,39.000000,617.000000,95.000000,236.000000,106.000000,5.257800,253000.000000 +-122.180000,37.780000,50.000000,1642.000000,322.000000,713.000000,284.000000,3.298400,160700.000000 +-122.180000,37.780000,43.000000,1985.000000,440.000000,1085.000000,407.000000,3.420500,136700.000000 +-122.180000,37.770000,52.000000,2744.000000,547.000000,1479.000000,554.000000,2.276800,96200.000000 +-122.180000,37.770000,52.000000,1748.000000,362.000000,1029.000000,366.000000,2.055600,100000.000000 +-122.180000,37.770000,51.000000,2107.000000,471.000000,1173.000000,438.000000,3.255200,120100.000000 +-122.180000,37.770000,42.000000,1180.000000,257.000000,877.000000,268.000000,2.812500,97300.000000 +-122.180000,37.770000,27.000000,909.000000,236.000000,396.000000,157.000000,2.078600,97500.000000 +-122.180000,37.760000,52.000000,754.000000,175.000000,447.000000,165.000000,3.906300,93800.000000 +-122.180000,37.760000,50.000000,1187.000000,261.000000,907.000000,246.000000,1.947900,89500.000000 +-122.180000,37.760000,49.000000,2308.000000,452.000000,1299.000000,451.000000,1.840700,96700.000000 +-122.180000,37.760000,43.000000,2018.000000,408.000000,1111.000000,367.000000,1.891300,91200.000000 +-122.180000,37.760000,37.000000,1575.000000,358.000000,933.000000,320.000000,2.291700,107000.000000 +-122.180000,37.750000,46.000000,941.000000,218.000000,621.000000,195.000000,1.325000,87100.000000 +-122.180000,37.750000,45.000000,990.000000,261.000000,901.000000,260.000000,2.173100,82000.000000 +-122.180000,37.750000,45.000000,330.000000,76.000000,282.000000,80.000000,4.046900,80700.000000 +-122.180000,37.750000,43.000000,1036.000000,233.000000,652.000000,213.000000,2.069000,84600.000000 +-122.180000,37.750000,36.000000,1047.000000,214.000000,651.000000,166.000000,1.712000,82100.000000 +-122.180000,37.740000,46.000000,2103.000000,391.000000,1339.000000,354.000000,2.246700,88900.000000 +-122.180000,37.740000,42.000000,541.000000,154.000000,380.000000,123.000000,2.345600,83500.000000 +-122.180000,37.740000,35.000000,504.000000,126.000000,323.000000,109.000000,1.843800,90500.000000 +-122.180000,37.730000,44.000000,548.000000,119.000000,435.000000,136.000000,2.111100,79700.000000 +-122.180000,37.720000,45.000000,1498.000000,313.000000,1003.000000,305.000000,3.804700,156700.000000 +-122.180000,37.710000,45.000000,726.000000,147.000000,519.000000,135.000000,3.375000,157500.000000 +-122.180000,37.700000,35.000000,2562.000000,554.000000,1398.000000,525.000000,3.390600,178900.000000 +-122.180000,37.680000,5.000000,2087.000000,407.000000,840.000000,401.000000,5.485800,187800.000000 +-122.180000,37.470000,37.000000,2848.000000,328.000000,852.000000,327.000000,13.367000,500001.000000 +-122.180000,37.460000,40.000000,2529.000000,293.000000,831.000000,258.000000,15.000100,500001.000000 +-122.180000,37.450000,43.000000,2061.000000,437.000000,817.000000,385.000000,4.468800,460200.000000 +-122.180000,37.450000,37.000000,5257.000000,1360.000000,2128.000000,1264.000000,4.000000,394300.000000 +-122.180000,37.440000,44.000000,2237.000000,347.000000,948.000000,346.000000,8.243600,500001.000000 +-122.180000,37.410000,21.000000,1034.000000,117.000000,323.000000,117.000000,10.723700,500001.000000 +-122.190000,40.200000,30.000000,2750.000000,476.000000,1296.000000,464.000000,3.530500,73600.000000 +-122.190000,40.070000,21.000000,1548.000000,290.000000,744.000000,265.000000,1.977300,55000.000000 +-122.190000,39.910000,39.000000,2467.000000,529.000000,1433.000000,502.000000,1.857100,53500.000000 +-122.190000,39.740000,39.000000,4179.000000,814.000000,2111.000000,809.000000,2.350700,65600.000000 +-122.190000,39.530000,34.000000,2679.000000,533.000000,1287.000000,505.000000,2.165000,58700.000000 +-122.190000,39.500000,23.000000,462.000000,97.000000,261.000000,90.000000,2.170500,53000.000000 +-122.190000,38.130000,5.000000,7854.000000,1446.000000,4361.000000,1395.000000,4.950400,214800.000000 +-122.190000,38.090000,8.000000,614.000000,118.000000,278.000000,115.000000,6.373500,166300.000000 +-122.190000,38.070000,20.000000,3000.000000,525.000000,1207.000000,491.000000,4.640600,217500.000000 +-122.190000,37.840000,18.000000,1617.000000,210.000000,533.000000,194.000000,11.601700,392600.000000 +-122.190000,37.830000,29.000000,1791.000000,271.000000,661.000000,269.000000,6.853800,368900.000000 +-122.190000,37.830000,28.000000,1326.000000,184.000000,463.000000,190.000000,8.204900,335200.000000 +-122.190000,37.820000,32.000000,1835.000000,264.000000,635.000000,263.000000,8.317000,365900.000000 +-122.190000,37.800000,52.000000,1813.000000,271.000000,637.000000,277.000000,4.011400,263400.000000 +-122.190000,37.800000,48.000000,1694.000000,259.000000,610.000000,238.000000,4.744000,257300.000000 +-122.190000,37.800000,46.000000,1938.000000,341.000000,768.000000,332.000000,4.272700,246900.000000 +-122.190000,37.790000,50.000000,968.000000,195.000000,462.000000,184.000000,2.984400,179900.000000 +-122.190000,37.790000,50.000000,954.000000,217.000000,546.000000,201.000000,2.666700,172800.000000 +-122.190000,37.790000,47.000000,1229.000000,243.000000,582.000000,256.000000,2.951400,198100.000000 +-122.190000,37.790000,45.000000,2718.000000,451.000000,1106.000000,454.000000,4.656300,231800.000000 +-122.190000,37.790000,28.000000,3144.000000,761.000000,1737.000000,669.000000,2.929700,140500.000000 +-122.190000,37.780000,52.000000,2492.000000,415.000000,1109.000000,375.000000,4.312500,164400.000000 +-122.190000,37.780000,52.000000,2198.000000,397.000000,984.000000,369.000000,3.220000,156500.000000 +-122.190000,37.780000,52.000000,1070.000000,193.000000,555.000000,190.000000,3.726200,166900.000000 +-122.190000,37.780000,52.000000,1026.000000,180.000000,469.000000,168.000000,2.875000,160000.000000 +-122.190000,37.780000,49.000000,1183.000000,205.000000,496.000000,209.000000,5.232800,174200.000000 +-122.190000,37.770000,52.000000,2329.000000,445.000000,1144.000000,417.000000,3.511400,151200.000000 +-122.190000,37.770000,52.000000,2170.000000,428.000000,1086.000000,425.000000,3.371500,143900.000000 +-122.190000,37.770000,45.000000,1852.000000,393.000000,1132.000000,349.000000,2.715900,101400.000000 +-122.190000,37.770000,42.000000,932.000000,254.000000,900.000000,263.000000,1.803900,92300.000000 +-122.190000,37.770000,41.000000,2036.000000,510.000000,1412.000000,454.000000,2.046900,89300.000000 +-122.190000,37.760000,52.000000,2024.000000,391.000000,1030.000000,350.000000,2.465900,94700.000000 +-122.190000,37.760000,49.000000,1368.000000,282.000000,790.000000,269.000000,1.705600,91400.000000 +-122.190000,37.760000,45.000000,995.000000,238.000000,630.000000,237.000000,1.925000,74100.000000 +-122.190000,37.760000,41.000000,921.000000,207.000000,522.000000,159.000000,1.208300,72500.000000 +-122.190000,37.760000,38.000000,1493.000000,370.000000,1144.000000,351.000000,0.768300,81800.000000 +-122.190000,37.760000,26.000000,1293.000000,297.000000,984.000000,303.000000,1.947900,85800.000000 +-122.190000,37.750000,36.000000,1126.000000,263.000000,482.000000,150.000000,1.916700,82800.000000 +-122.190000,37.750000,28.000000,856.000000,189.000000,435.000000,162.000000,0.801200,81800.000000 +-122.190000,37.750000,19.000000,2207.000000,565.000000,1481.000000,520.000000,1.319400,81400.000000 +-122.190000,37.740000,43.000000,707.000000,147.000000,417.000000,155.000000,2.513900,83400.000000 +-122.190000,37.740000,36.000000,847.000000,212.000000,567.000000,159.000000,1.176500,87100.000000 +-122.190000,37.730000,45.000000,1528.000000,291.000000,801.000000,287.000000,1.262500,84700.000000 +-122.190000,37.480000,45.000000,886.000000,165.000000,492.000000,173.000000,4.270800,267000.000000 +-122.190000,37.460000,34.000000,5419.000000,1183.000000,2002.000000,1138.000000,4.198500,500001.000000 +-122.190000,37.450000,18.000000,1636.000000,414.000000,853.000000,439.000000,5.103200,464600.000000 +-122.190000,37.440000,39.000000,4402.000000,618.000000,1616.000000,631.000000,8.995500,500001.000000 +-122.190000,37.440000,38.000000,3383.000000,456.000000,1203.000000,465.000000,9.319800,500001.000000 +-122.190000,37.430000,39.000000,2392.000000,420.000000,937.000000,406.000000,6.613600,472800.000000 +-122.190000,37.420000,47.000000,932.000000,167.000000,295.000000,116.000000,8.437500,500001.000000 +-122.200000,39.530000,22.000000,3265.000000,658.000000,1647.000000,594.000000,2.356600,71000.000000 +-122.200000,39.520000,39.000000,2551.000000,482.000000,1181.000000,437.000000,2.062500,63400.000000 +-122.200000,39.150000,33.000000,1064.000000,174.000000,434.000000,147.000000,3.125000,108000.000000 +-122.200000,38.090000,18.000000,6860.000000,1079.000000,3205.000000,1058.000000,5.295700,171900.000000 +-122.200000,38.040000,31.000000,3029.000000,500.000000,1236.000000,487.000000,5.602200,197000.000000 +-122.200000,37.960000,9.000000,6306.000000,962.000000,2581.000000,911.000000,6.774100,310700.000000 +-122.200000,37.900000,36.000000,2107.000000,287.000000,740.000000,280.000000,10.341600,500001.000000 +-122.200000,37.890000,37.000000,3881.000000,560.000000,1315.000000,517.000000,7.319500,367500.000000 +-122.200000,37.880000,36.000000,1065.000000,160.000000,398.000000,155.000000,7.773600,378100.000000 +-122.200000,37.840000,35.000000,2865.000000,460.000000,1072.000000,443.000000,7.488200,319300.000000 +-122.200000,37.840000,30.000000,2211.000000,346.000000,844.000000,343.000000,6.066600,311500.000000 +-122.200000,37.830000,26.000000,1589.000000,223.000000,542.000000,211.000000,8.401000,351200.000000 +-122.200000,37.820000,39.000000,3770.000000,534.000000,1265.000000,500.000000,6.330200,362800.000000 +-122.200000,37.820000,37.000000,1229.000000,181.000000,420.000000,176.000000,7.017500,366700.000000 +-122.200000,37.810000,45.000000,2964.000000,436.000000,1067.000000,426.000000,6.785100,323500.000000 +-122.200000,37.800000,52.000000,1547.000000,293.000000,706.000000,268.000000,4.772100,217100.000000 +-122.200000,37.800000,46.000000,2041.000000,405.000000,1059.000000,399.000000,3.848700,203300.000000 +-122.200000,37.800000,43.000000,3045.000000,499.000000,1115.000000,455.000000,4.955900,273000.000000 +-122.200000,37.800000,41.000000,2070.000000,354.000000,804.000000,340.000000,5.118400,239600.000000 +-122.200000,37.790000,49.000000,882.000000,195.000000,737.000000,210.000000,2.666700,122000.000000 +-122.200000,37.790000,44.000000,1621.000000,452.000000,1354.000000,491.000000,2.619000,134700.000000 +-122.200000,37.790000,40.000000,1060.000000,256.000000,667.000000,235.000000,4.173900,169600.000000 +-122.200000,37.780000,52.000000,2300.000000,443.000000,1225.000000,423.000000,3.539800,158400.000000 +-122.200000,37.780000,50.000000,1867.000000,403.000000,1128.000000,378.000000,2.540100,129100.000000 +-122.200000,37.780000,45.000000,1766.000000,332.000000,869.000000,327.000000,4.589300,163500.000000 +-122.200000,37.780000,39.000000,1752.000000,399.000000,1071.000000,376.000000,3.116700,121600.000000 +-122.200000,37.770000,49.000000,2272.000000,498.000000,1621.000000,483.000000,2.433800,102400.000000 +-122.200000,37.770000,43.000000,2430.000000,502.000000,1537.000000,484.000000,2.898000,121400.000000 +-122.200000,37.770000,39.000000,2689.000000,597.000000,1888.000000,537.000000,2.256200,94800.000000 +-122.200000,37.760000,47.000000,1116.000000,259.000000,826.000000,279.000000,1.750000,85700.000000 +-122.200000,37.760000,37.000000,2680.000000,736.000000,1925.000000,667.000000,1.409700,84600.000000 +-122.200000,37.750000,36.000000,606.000000,132.000000,531.000000,133.000000,1.580900,70000.000000 +-122.200000,37.480000,41.000000,733.000000,155.000000,652.000000,140.000000,5.165400,233600.000000 +-122.200000,37.480000,32.000000,640.000000,166.000000,991.000000,160.000000,1.984400,270000.000000 +-122.200000,37.480000,30.000000,1170.000000,258.000000,610.000000,243.000000,3.442700,263500.000000 +-122.200000,37.470000,40.000000,2959.000000,389.000000,985.000000,365.000000,9.902500,500001.000000 +-122.200000,37.470000,37.000000,1403.000000,369.000000,1587.000000,331.000000,2.825800,232800.000000 +-122.200000,37.470000,37.000000,1053.000000,266.000000,939.000000,267.000000,3.198900,320800.000000 +-122.200000,37.460000,40.000000,1723.000000,208.000000,976.000000,209.000000,9.889200,500001.000000 +-122.200000,37.440000,31.000000,2328.000000,270.000000,722.000000,247.000000,15.000100,500001.000000 +-122.200000,37.430000,40.000000,2223.000000,412.000000,1050.000000,417.000000,5.242100,444500.000000 +-122.200000,37.430000,38.000000,3626.000000,528.000000,1350.000000,532.000000,7.368100,500001.000000 +-122.200000,37.430000,22.000000,3294.000000,744.000000,1337.000000,655.000000,5.239100,500001.000000 +-122.200000,37.400000,37.000000,1296.000000,194.000000,540.000000,192.000000,8.278200,500001.000000 +-122.200000,37.400000,30.000000,2612.000000,338.000000,980.000000,324.000000,10.048100,500001.000000 +-122.200000,37.350000,17.000000,3095.000000,442.000000,1173.000000,424.000000,13.298600,500001.000000 +-122.210000,40.180000,30.000000,744.000000,156.000000,410.000000,165.000000,2.189800,63200.000000 +-122.210000,38.830000,20.000000,1138.000000,221.000000,459.000000,209.000000,3.153400,123400.000000 +-122.210000,38.410000,12.000000,4270.000000,654.000000,1624.000000,598.000000,5.526600,331300.000000 +-122.210000,38.280000,35.000000,1273.000000,210.000000,555.000000,181.000000,4.486100,269300.000000 +-122.210000,38.110000,35.000000,2122.000000,400.000000,1189.000000,408.000000,3.096200,124600.000000 +-122.210000,38.100000,36.000000,3018.000000,557.000000,1445.000000,556.000000,3.802900,129900.000000 +-122.210000,38.090000,37.000000,4368.000000,779.000000,2083.000000,741.000000,3.866700,127000.000000 +-122.210000,38.060000,52.000000,2735.000000,559.000000,1076.000000,487.000000,3.615400,155700.000000 +-122.210000,38.020000,15.000000,2150.000000,327.000000,1094.000000,324.000000,6.022400,198500.000000 +-122.210000,37.840000,34.000000,3038.000000,490.000000,1140.000000,496.000000,7.054800,325900.000000 +-122.210000,37.830000,34.000000,5065.000000,788.000000,1627.000000,766.000000,6.897600,333300.000000 +-122.210000,37.820000,52.000000,2375.000000,333.000000,813.000000,350.000000,7.054900,331400.000000 +-122.210000,37.810000,52.000000,1389.000000,212.000000,510.000000,224.000000,5.240200,296400.000000 +-122.210000,37.800000,52.000000,3519.000000,711.000000,1883.000000,706.000000,3.486100,187100.000000 +-122.210000,37.800000,50.000000,2833.000000,605.000000,1260.000000,552.000000,2.892900,216700.000000 +-122.210000,37.800000,48.000000,1321.000000,263.000000,506.000000,252.000000,4.097700,229700.000000 +-122.210000,37.800000,39.000000,2018.000000,447.000000,1221.000000,446.000000,3.075700,151000.000000 +-122.210000,37.800000,38.000000,2254.000000,535.000000,951.000000,487.000000,3.081200,233100.000000 +-122.210000,37.790000,47.000000,1543.000000,307.000000,859.000000,292.000000,2.958300,138800.000000 +-122.210000,37.790000,45.000000,2115.000000,533.000000,1530.000000,474.000000,2.416700,139400.000000 +-122.210000,37.790000,35.000000,1745.000000,409.000000,1143.000000,386.000000,2.875000,143800.000000 +-122.210000,37.790000,34.000000,2364.000000,557.000000,1517.000000,516.000000,2.836500,139200.000000 +-122.210000,37.780000,52.000000,1477.000000,300.000000,1065.000000,269.000000,1.847200,137000.000000 +-122.210000,37.780000,52.000000,1056.000000,224.000000,792.000000,245.000000,2.658300,142600.000000 +-122.210000,37.780000,49.000000,898.000000,244.000000,779.000000,245.000000,3.053600,137500.000000 +-122.210000,37.780000,47.000000,881.000000,248.000000,753.000000,241.000000,2.625000,111300.000000 +-122.210000,37.780000,46.000000,2239.000000,508.000000,1390.000000,569.000000,2.735200,137300.000000 +-122.210000,37.780000,44.000000,1729.000000,414.000000,1240.000000,393.000000,2.312500,102800.000000 +-122.210000,37.780000,43.000000,1702.000000,460.000000,1227.000000,407.000000,1.718800,126800.000000 +-122.210000,37.770000,52.000000,591.000000,173.000000,353.000000,137.000000,4.090400,80600.000000 +-122.210000,37.770000,46.000000,1234.000000,375.000000,1183.000000,354.000000,2.330900,98700.000000 +-122.210000,37.770000,43.000000,1017.000000,328.000000,836.000000,277.000000,2.260400,100000.000000 +-122.210000,37.760000,52.000000,1420.000000,314.000000,1085.000000,300.000000,1.754600,80600.000000 +-122.210000,37.520000,18.000000,2962.000000,945.000000,1639.000000,851.000000,2.739900,87500.000000 +-122.210000,37.490000,24.000000,2528.000000,947.000000,2437.000000,861.000000,2.274600,225000.000000 +-122.210000,37.480000,39.000000,1535.000000,340.000000,1204.000000,370.000000,2.848200,247200.000000 +-122.210000,37.480000,37.000000,1326.000000,335.000000,1771.000000,335.000000,3.014700,218100.000000 +-122.210000,37.470000,43.000000,733.000000,162.000000,497.000000,175.000000,3.270800,255300.000000 +-122.210000,37.470000,26.000000,1777.000000,555.000000,1966.000000,497.000000,3.047200,211000.000000 +-122.210000,37.460000,48.000000,2560.000000,322.000000,921.000000,301.000000,10.875800,500001.000000 +-122.210000,37.460000,40.000000,1777.000000,207.000000,577.000000,207.000000,15.000100,500001.000000 +-122.210000,37.440000,35.000000,1140.000000,193.000000,486.000000,199.000000,4.690800,500001.000000 +-122.210000,37.430000,33.000000,1606.000000,254.000000,727.000000,271.000000,8.696300,500001.000000 +-122.210000,37.430000,23.000000,5741.000000,1012.000000,1843.000000,888.000000,5.721100,500001.000000 +-122.210000,37.430000,20.000000,975.000000,134.000000,324.000000,146.000000,9.779600,500001.000000 +-122.210000,37.420000,28.000000,564.000000,72.000000,191.000000,79.000000,11.966600,500001.000000 +-122.210000,37.380000,28.000000,4518.000000,578.000000,1489.000000,559.000000,11.317600,500001.000000 +-122.210000,37.370000,34.000000,1476.000000,217.000000,613.000000,223.000000,8.288300,500001.000000 +-122.220000,39.510000,17.000000,1201.000000,268.000000,555.000000,277.000000,2.100000,66900.000000 +-122.220000,38.150000,7.000000,5129.000000,820.000000,2824.000000,738.000000,5.513800,171100.000000 +-122.220000,38.120000,15.000000,14125.000000,2344.000000,6456.000000,2147.000000,5.101400,179500.000000 +-122.220000,38.110000,43.000000,1939.000000,353.000000,968.000000,392.000000,3.184800,112700.000000 +-122.220000,38.100000,44.000000,3013.000000,563.000000,1353.000000,512.000000,3.455900,111900.000000 +-122.220000,38.100000,44.000000,2256.000000,451.000000,1057.000000,426.000000,3.120400,110800.000000 +-122.220000,38.100000,40.000000,2549.000000,478.000000,1275.000000,494.000000,2.946900,111600.000000 +-122.220000,38.090000,47.000000,2161.000000,440.000000,966.000000,360.000000,2.273400,88700.000000 +-122.220000,38.080000,37.000000,4590.000000,857.000000,2920.000000,832.000000,3.436000,94800.000000 +-122.220000,38.070000,4.000000,15654.000000,2394.000000,7025.000000,2168.000000,5.817100,225200.000000 +-122.220000,37.860000,21.000000,7099.000000,1106.000000,2401.000000,1138.000000,8.301400,358500.000000 +-122.220000,37.820000,42.000000,2991.000000,335.000000,1018.000000,335.000000,13.499000,500001.000000 +-122.220000,37.820000,39.000000,2492.000000,310.000000,808.000000,315.000000,11.860300,500001.000000 +-122.220000,37.810000,52.000000,2927.000000,402.000000,1021.000000,380.000000,8.156400,390100.000000 +-122.220000,37.810000,52.000000,2024.000000,339.000000,756.000000,340.000000,4.072000,270100.000000 +-122.220000,37.810000,52.000000,1971.000000,335.000000,765.000000,308.000000,6.521700,273700.000000 +-122.220000,37.800000,52.000000,2721.000000,541.000000,1185.000000,515.000000,4.542800,239800.000000 +-122.220000,37.800000,52.000000,2286.000000,464.000000,1073.000000,441.000000,3.029800,199600.000000 +-122.220000,37.800000,52.000000,2183.000000,465.000000,1129.000000,460.000000,3.263200,227700.000000 +-122.220000,37.790000,52.000000,3424.000000,690.000000,2273.000000,685.000000,3.904800,164700.000000 +-122.220000,37.790000,49.000000,1826.000000,450.000000,1201.000000,424.000000,2.500000,136700.000000 +-122.220000,37.790000,46.000000,2366.000000,575.000000,1647.000000,527.000000,2.604200,124700.000000 +-122.220000,37.790000,44.000000,1487.000000,314.000000,961.000000,272.000000,3.515600,109500.000000 +-122.220000,37.790000,38.000000,3049.000000,711.000000,2167.000000,659.000000,2.796900,141700.000000 +-122.220000,37.780000,51.000000,1637.000000,463.000000,1543.000000,393.000000,2.489000,119100.000000 +-122.220000,37.780000,50.000000,1920.000000,530.000000,1525.000000,477.000000,1.488600,128800.000000 +-122.220000,37.780000,44.000000,2968.000000,710.000000,2269.000000,610.000000,2.390600,111700.000000 +-122.220000,37.780000,44.000000,1678.000000,514.000000,1700.000000,495.000000,2.080100,131900.000000 +-122.220000,37.770000,52.000000,391.000000,128.000000,520.000000,138.000000,1.647100,95000.000000 +-122.220000,37.770000,52.000000,1137.000000,301.000000,866.000000,259.000000,2.590000,96400.000000 +-122.220000,37.770000,40.000000,494.000000,114.000000,547.000000,135.000000,2.801500,114800.000000 +-122.220000,37.480000,47.000000,2570.000000,783.000000,3107.000000,724.000000,2.805800,229500.000000 +-122.220000,37.480000,34.000000,1541.000000,584.000000,1564.000000,558.000000,2.560000,250000.000000 +-122.220000,37.470000,28.000000,5956.000000,1612.000000,3571.000000,1549.000000,3.186400,272800.000000 +-122.220000,37.470000,23.000000,7740.000000,1943.000000,4124.000000,1743.000000,3.326800,322800.000000 +-122.220000,37.460000,37.000000,2586.000000,495.000000,1208.000000,502.000000,4.321400,342700.000000 +-122.220000,37.460000,13.000000,2888.000000,546.000000,1182.000000,504.000000,6.025500,409300.000000 +-122.220000,37.440000,32.000000,4281.000000,501.000000,1318.000000,484.000000,15.000100,500001.000000 +-122.220000,37.400000,32.000000,2297.000000,287.000000,814.000000,283.000000,15.000100,500001.000000 +-122.220000,37.370000,26.000000,440.000000,202.000000,322.000000,218.000000,5.183100,350000.000000 +-122.220000,37.360000,34.000000,1559.000000,243.000000,600.000000,242.000000,8.738200,500001.000000 +-122.230000,40.630000,16.000000,1141.000000,220.000000,563.000000,200.000000,2.328700,130700.000000 +-122.230000,40.400000,18.000000,2102.000000,377.000000,1059.000000,384.000000,3.055600,95500.000000 +-122.230000,40.320000,10.000000,2336.000000,426.000000,1003.000000,368.000000,3.083300,81300.000000 +-122.230000,40.200000,17.000000,762.000000,138.000000,322.000000,139.000000,4.291700,128800.000000 +-122.230000,39.950000,21.000000,2087.000000,382.000000,888.000000,361.000000,2.207000,86400.000000 +-122.230000,39.860000,21.000000,1730.000000,350.000000,982.000000,322.000000,1.837500,79800.000000 +-122.230000,39.750000,16.000000,2026.000000,396.000000,1031.000000,382.000000,1.937500,73100.000000 +-122.230000,39.530000,8.000000,1268.000000,336.000000,1237.000000,326.000000,1.370800,125000.000000 +-122.230000,38.330000,31.000000,3440.000000,574.000000,1538.000000,537.000000,5.536800,325900.000000 +-122.230000,38.170000,45.000000,350.000000,66.000000,225.000000,72.000000,1.894200,216700.000000 +-122.230000,38.150000,33.000000,1253.000000,238.000000,753.000000,236.000000,1.756000,86400.000000 +-122.230000,38.140000,36.000000,1412.000000,260.000000,792.000000,268.000000,2.305600,90400.000000 +-122.230000,38.130000,29.000000,5154.000000,1084.000000,2459.000000,1019.000000,3.266400,142900.000000 +-122.230000,38.120000,49.000000,2715.000000,435.000000,1006.000000,429.000000,4.230800,145800.000000 +-122.230000,38.110000,47.000000,3007.000000,524.000000,1152.000000,486.000000,4.000000,141500.000000 +-122.230000,38.100000,47.000000,1303.000000,278.000000,694.000000,269.000000,2.596900,92800.000000 +-122.230000,38.100000,46.000000,4143.000000,895.000000,2240.000000,847.000000,2.420100,92800.000000 +-122.230000,38.090000,26.000000,4397.000000,997.000000,2539.000000,965.000000,2.487500,90000.000000 +-122.230000,38.060000,52.000000,1350.000000,266.000000,490.000000,257.000000,3.125000,171100.000000 +-122.230000,38.050000,52.000000,1736.000000,358.000000,638.000000,297.000000,2.551700,147100.000000 +-122.230000,37.880000,41.000000,880.000000,129.000000,322.000000,126.000000,8.325200,452600.000000 +-122.230000,37.850000,52.000000,2800.000000,411.000000,1061.000000,403.000000,6.343400,373600.000000 +-122.230000,37.840000,50.000000,2515.000000,399.000000,970.000000,373.000000,5.859600,327600.000000 +-122.230000,37.840000,47.000000,3175.000000,454.000000,1098.000000,485.000000,5.286800,347600.000000 +-122.230000,37.830000,52.000000,2990.000000,379.000000,947.000000,361.000000,7.877200,500001.000000 +-122.230000,37.820000,52.000000,3494.000000,396.000000,1192.000000,383.000000,12.380400,500001.000000 +-122.230000,37.820000,52.000000,3242.000000,366.000000,1001.000000,352.000000,12.213800,500001.000000 +-122.230000,37.820000,52.000000,3051.000000,381.000000,1005.000000,369.000000,8.187200,466100.000000 +-122.230000,37.820000,52.000000,1611.000000,203.000000,556.000000,179.000000,8.747700,500001.000000 +-122.230000,37.810000,52.000000,2814.000000,365.000000,878.000000,352.000000,7.508000,348700.000000 +-122.230000,37.810000,52.000000,2315.000000,292.000000,861.000000,258.000000,8.879300,410300.000000 +-122.230000,37.810000,52.000000,1433.000000,229.000000,612.000000,213.000000,4.770800,314700.000000 +-122.230000,37.800000,52.000000,783.000000,184.000000,488.000000,186.000000,1.937500,126600.000000 +-122.230000,37.800000,52.000000,2033.000000,486.000000,787.000000,459.000000,3.160300,269500.000000 +-122.230000,37.800000,52.000000,1252.000000,299.000000,844.000000,280.000000,2.392900,111900.000000 +-122.230000,37.800000,52.000000,1192.000000,289.000000,772.000000,257.000000,2.383300,146900.000000 +-122.230000,37.800000,51.000000,1590.000000,414.000000,949.000000,392.000000,1.902800,127900.000000 +-122.230000,37.800000,50.000000,1746.000000,480.000000,1149.000000,415.000000,2.250000,123500.000000 +-122.230000,37.790000,52.000000,1783.000000,395.000000,1659.000000,412.000000,2.935700,107900.000000 +-122.230000,37.790000,49.000000,1175.000000,217.000000,859.000000,219.000000,2.293000,106300.000000 +-122.230000,37.790000,48.000000,1696.000000,396.000000,1481.000000,343.000000,2.037500,122500.000000 +-122.230000,37.790000,40.000000,930.000000,199.000000,564.000000,184.000000,1.328100,113300.000000 +-122.230000,37.790000,30.000000,999.000000,264.000000,1011.000000,263.000000,1.885400,137500.000000 +-122.230000,37.790000,30.000000,610.000000,145.000000,425.000000,140.000000,1.619800,122700.000000 +-122.230000,37.780000,52.000000,986.000000,258.000000,1008.000000,255.000000,1.484400,119400.000000 +-122.230000,37.780000,44.000000,2340.000000,825.000000,2813.000000,751.000000,1.600900,118100.000000 +-122.230000,37.780000,43.000000,1420.000000,472.000000,1506.000000,438.000000,1.933800,112500.000000 +-122.230000,37.770000,52.000000,772.000000,179.000000,409.000000,160.000000,3.321400,189600.000000 +-122.230000,37.770000,52.000000,769.000000,206.000000,612.000000,183.000000,2.570000,72000.000000 +-122.230000,37.760000,52.000000,3037.000000,516.000000,1242.000000,518.000000,5.212800,289900.000000 +-122.230000,37.760000,52.000000,3011.000000,542.000000,1303.000000,535.000000,5.103900,273800.000000 +-122.230000,37.760000,52.000000,2269.000000,323.000000,805.000000,321.000000,4.718800,335300.000000 +-122.230000,37.760000,52.000000,1705.000000,246.000000,658.000000,253.000000,5.750000,306300.000000 +-122.230000,37.760000,52.000000,1316.000000,177.000000,378.000000,162.000000,5.291500,333000.000000 +-122.230000,37.490000,11.000000,840.000000,329.000000,1338.000000,345.000000,2.333300,241700.000000 +-122.230000,37.480000,33.000000,3108.000000,805.000000,1895.000000,717.000000,3.301500,267700.000000 +-122.230000,37.470000,39.000000,5264.000000,1259.000000,3057.000000,1265.000000,3.623000,276600.000000 +-122.230000,37.460000,36.000000,6090.000000,1057.000000,3081.000000,1075.000000,5.662900,343600.000000 +-122.230000,37.460000,33.000000,2643.000000,464.000000,1015.000000,427.000000,4.223200,363700.000000 +-122.230000,37.460000,26.000000,4670.000000,1039.000000,2103.000000,933.000000,4.416700,333800.000000 +-122.230000,37.450000,34.000000,4177.000000,723.000000,1586.000000,660.000000,5.045700,395100.000000 +-122.230000,37.450000,29.000000,1617.000000,235.000000,758.000000,246.000000,7.793200,469900.000000 +-122.230000,37.420000,16.000000,1945.000000,320.000000,512.000000,300.000000,7.454200,500001.000000 +-122.240000,40.510000,23.000000,2216.000000,378.000000,1006.000000,338.000000,4.559000,116800.000000 +-122.240000,40.450000,27.000000,1804.000000,321.000000,782.000000,300.000000,3.597800,80600.000000 +-122.240000,40.190000,29.000000,1912.000000,336.000000,859.000000,325.000000,3.700000,70500.000000 +-122.240000,40.170000,51.000000,2378.000000,584.000000,1083.000000,494.000000,1.577000,51900.000000 +-122.240000,40.160000,19.000000,2500.000000,509.000000,1293.000000,494.000000,2.035000,55100.000000 +-122.240000,38.310000,38.000000,1938.000000,301.000000,823.000000,285.000000,6.108900,280800.000000 +-122.240000,38.250000,33.000000,213.000000,36.000000,91.000000,33.000000,4.916700,187500.000000 +-122.240000,38.140000,15.000000,8479.000000,1759.000000,5008.000000,1646.000000,3.724000,131600.000000 +-122.240000,38.130000,37.000000,3223.000000,564.000000,1325.000000,539.000000,4.093800,126900.000000 +-122.240000,38.120000,42.000000,1625.000000,255.000000,578.000000,243.000000,4.011400,166900.000000 +-122.240000,38.120000,39.000000,2967.000000,500.000000,1243.000000,523.000000,4.290200,152400.000000 +-122.240000,38.110000,52.000000,2111.000000,310.000000,772.000000,323.000000,4.775000,148200.000000 +-122.240000,38.110000,52.000000,2050.000000,492.000000,1277.000000,463.000000,3.050700,107300.000000 +-122.240000,38.100000,49.000000,1851.000000,356.000000,849.000000,307.000000,2.943200,103500.000000 +-122.240000,38.070000,13.000000,5451.000000,1194.000000,2957.000000,1081.000000,2.609800,162500.000000 +-122.240000,37.860000,52.000000,1668.000000,225.000000,517.000000,214.000000,7.852100,500001.000000 +-122.240000,37.850000,52.000000,3726.000000,474.000000,1366.000000,496.000000,9.395900,500001.000000 +-122.240000,37.850000,52.000000,1467.000000,190.000000,496.000000,177.000000,7.257400,352100.000000 +-122.240000,37.850000,37.000000,334.000000,54.000000,98.000000,47.000000,4.964300,335000.000000 +-122.240000,37.840000,52.000000,3529.000000,574.000000,1177.000000,555.000000,5.177300,389500.000000 +-122.240000,37.830000,52.000000,2449.000000,312.000000,916.000000,316.000000,8.119400,471600.000000 +-122.240000,37.830000,52.000000,1757.000000,246.000000,585.000000,227.000000,5.894800,457800.000000 +-122.240000,37.830000,41.000000,2576.000000,406.000000,794.000000,376.000000,5.956000,366100.000000 +-122.240000,37.820000,52.000000,3665.000000,517.000000,1470.000000,520.000000,6.155000,398600.000000 +-122.240000,37.820000,52.000000,3481.000000,751.000000,1444.000000,718.000000,3.900000,275700.000000 +-122.240000,37.820000,52.000000,1509.000000,225.000000,674.000000,244.000000,4.930600,313400.000000 +-122.240000,37.810000,52.000000,2513.000000,502.000000,1048.000000,518.000000,3.675000,269900.000000 +-122.240000,37.810000,52.000000,2485.000000,313.000000,953.000000,327.000000,6.859100,352400.000000 +-122.240000,37.810000,52.000000,1490.000000,238.000000,634.000000,256.000000,6.030200,287300.000000 +-122.240000,37.800000,52.000000,996.000000,228.000000,731.000000,228.000000,2.269700,127000.000000 +-122.240000,37.800000,52.000000,888.000000,168.000000,360.000000,175.000000,2.194400,211500.000000 +-122.240000,37.800000,52.000000,1803.000000,420.000000,1321.000000,401.000000,2.957000,122800.000000 +-122.240000,37.800000,52.000000,1591.000000,373.000000,1118.000000,347.000000,2.156300,128600.000000 +-122.240000,37.800000,52.000000,1586.000000,398.000000,1006.000000,335.000000,2.134800,140600.000000 +-122.240000,37.800000,49.000000,2838.000000,749.000000,1487.000000,677.000000,2.523800,169300.000000 +-122.240000,37.800000,47.000000,2046.000000,588.000000,1213.000000,554.000000,2.629200,182700.000000 +-122.240000,37.790000,52.000000,674.000000,180.000000,647.000000,168.000000,3.375000,116100.000000 +-122.240000,37.790000,47.000000,1372.000000,395.000000,1237.000000,303.000000,2.125000,95500.000000 +-122.240000,37.790000,43.000000,1626.000000,376.000000,1284.000000,357.000000,2.254200,112200.000000 +-122.240000,37.790000,39.000000,1469.000000,431.000000,1464.000000,389.000000,2.163800,105500.000000 +-122.240000,37.790000,27.000000,1632.000000,492.000000,1171.000000,429.000000,2.317300,125000.000000 +-122.240000,37.770000,52.000000,1711.000000,386.000000,885.000000,373.000000,3.641700,206300.000000 +-122.240000,37.770000,52.000000,1153.000000,235.000000,481.000000,223.000000,2.641100,241000.000000 +-122.240000,37.770000,43.000000,955.000000,284.000000,585.000000,266.000000,2.388200,162500.000000 +-122.240000,37.760000,52.000000,2646.000000,581.000000,1128.000000,522.000000,3.071800,266700.000000 +-122.240000,37.760000,52.000000,2567.000000,436.000000,1119.000000,415.000000,4.609400,229300.000000 +-122.240000,37.760000,52.000000,2504.000000,516.000000,979.000000,472.000000,3.476200,244000.000000 +-122.240000,37.760000,52.000000,1846.000000,471.000000,827.000000,446.000000,2.683300,240900.000000 +-122.240000,37.760000,49.000000,2428.000000,525.000000,1110.000000,492.000000,3.671900,229800.000000 +-122.240000,37.750000,27.000000,4051.000000,753.000000,1499.000000,797.000000,4.871100,286600.000000 +-122.240000,37.730000,21.000000,7031.000000,1249.000000,2930.000000,1235.000000,4.521300,228400.000000 +-122.240000,37.720000,5.000000,18634.000000,2885.000000,7427.000000,2718.000000,7.611000,350700.000000 +-122.240000,37.490000,30.000000,2956.000000,590.000000,1191.000000,594.000000,3.746300,427600.000000 +-122.240000,37.490000,19.000000,322.000000,112.000000,191.000000,102.000000,2.583300,500001.000000 +-122.240000,37.480000,47.000000,2423.000000,407.000000,1010.000000,407.000000,6.215400,362700.000000 +-122.240000,37.480000,45.000000,4126.000000,696.000000,1722.000000,668.000000,4.896600,362100.000000 +-122.240000,37.480000,40.000000,4459.000000,1027.000000,2080.000000,982.000000,3.532200,361900.000000 +-122.240000,37.470000,41.000000,1183.000000,203.000000,455.000000,171.000000,5.107100,314100.000000 +-122.240000,37.470000,40.000000,1504.000000,270.000000,689.000000,287.000000,6.124400,308800.000000 +-122.240000,37.470000,36.000000,2021.000000,433.000000,1117.000000,432.000000,3.929000,303100.000000 +-122.240000,37.470000,35.000000,2283.000000,491.000000,1148.000000,436.000000,4.555600,318600.000000 +-122.240000,37.460000,36.000000,4686.000000,781.000000,2254.000000,845.000000,6.104300,343500.000000 +-122.240000,37.430000,36.000000,2410.000000,361.000000,934.000000,377.000000,7.652000,500001.000000 +-122.250000,40.660000,15.000000,2771.000000,546.000000,1423.000000,505.000000,3.641300,108500.000000 +-122.250000,40.600000,16.000000,2753.000000,494.000000,1414.000000,459.000000,3.832300,128300.000000 +-122.250000,40.420000,17.000000,1429.000000,265.000000,692.000000,245.000000,2.861100,98700.000000 +-122.250000,40.170000,47.000000,1554.000000,308.000000,846.000000,301.000000,1.807700,54100.000000 +-122.250000,40.170000,19.000000,3182.000000,630.000000,1741.000000,642.000000,1.972700,64900.000000 +-122.250000,39.790000,16.000000,2127.000000,412.000000,1104.000000,369.000000,3.046900,72200.000000 +-122.250000,38.170000,34.000000,778.000000,137.000000,406.000000,136.000000,4.295500,121300.000000 +-122.250000,38.150000,24.000000,2917.000000,543.000000,1878.000000,531.000000,3.701400,123600.000000 +-122.250000,38.120000,47.000000,1339.000000,298.000000,794.000000,286.000000,2.586500,109800.000000 +-122.250000,38.110000,52.000000,2846.000000,705.000000,1519.000000,620.000000,2.197600,97900.000000 +-122.250000,38.110000,49.000000,2365.000000,504.000000,1131.000000,458.000000,2.613300,103100.000000 +-122.250000,38.100000,52.000000,248.000000,86.000000,173.000000,69.000000,2.300000,109400.000000 +-122.250000,38.100000,52.000000,2315.000000,556.000000,1113.000000,486.000000,2.504200,147900.000000 +-122.250000,38.100000,52.000000,1780.000000,373.000000,824.000000,317.000000,2.750000,109900.000000 +-122.250000,38.100000,52.000000,1591.000000,372.000000,817.000000,357.000000,2.141100,97200.000000 +-122.250000,38.090000,48.000000,833.000000,188.000000,652.000000,165.000000,2.241700,87900.000000 +-122.250000,38.050000,30.000000,1255.000000,297.000000,779.000000,307.000000,1.676700,147700.000000 +-122.250000,38.030000,15.000000,3338.000000,532.000000,1834.000000,520.000000,5.629300,197600.000000 +-122.250000,38.000000,16.000000,2978.000000,411.000000,1531.000000,400.000000,6.500600,237700.000000 +-122.250000,37.890000,42.000000,2863.000000,460.000000,1031.000000,448.000000,6.713800,368600.000000 +-122.250000,37.890000,37.000000,3000.000000,457.000000,987.000000,450.000000,7.538500,350000.000000 +-122.250000,37.870000,52.000000,609.000000,236.000000,1349.000000,250.000000,1.169600,500001.000000 +-122.250000,37.870000,42.000000,1756.000000,465.000000,2184.000000,422.000000,2.556200,371400.000000 +-122.250000,37.870000,41.000000,685.000000,141.000000,266.000000,123.000000,5.228900,384600.000000 +-122.250000,37.860000,52.000000,4048.000000,663.000000,1316.000000,590.000000,5.379400,376900.000000 +-122.250000,37.860000,52.000000,1709.000000,318.000000,719.000000,295.000000,5.046300,456300.000000 +-122.250000,37.860000,52.000000,1587.000000,444.000000,878.000000,449.000000,1.765200,336800.000000 +-122.250000,37.860000,52.000000,1389.000000,191.000000,514.000000,202.000000,7.089700,446200.000000 +-122.250000,37.860000,48.000000,2153.000000,517.000000,1656.000000,459.000000,3.041700,489600.000000 +-122.250000,37.850000,52.000000,919.000000,213.000000,413.000000,193.000000,4.036800,269700.000000 +-122.250000,37.850000,52.000000,1627.000000,280.000000,565.000000,259.000000,3.846200,342200.000000 +-122.250000,37.850000,52.000000,1274.000000,235.000000,558.000000,219.000000,5.643100,341300.000000 +-122.250000,37.840000,52.000000,3549.000000,707.000000,1551.000000,714.000000,3.691200,261100.000000 +-122.250000,37.840000,52.000000,3104.000000,687.000000,1157.000000,647.000000,3.120000,241400.000000 +-122.250000,37.840000,52.000000,2535.000000,489.000000,1094.000000,514.000000,3.659100,299200.000000 +-122.250000,37.830000,52.000000,4190.000000,1105.000000,1786.000000,1037.000000,3.089700,234100.000000 +-122.250000,37.830000,52.000000,2376.000000,559.000000,939.000000,519.000000,3.148400,224100.000000 +-122.250000,37.830000,52.000000,1279.000000,287.000000,534.000000,291.000000,3.142900,231600.000000 +-122.250000,37.830000,28.000000,5022.000000,1750.000000,2558.000000,1661.000000,2.423400,218500.000000 +-122.250000,37.820000,52.000000,1424.000000,289.000000,550.000000,253.000000,5.091700,262500.000000 +-122.250000,37.820000,32.000000,3809.000000,1098.000000,1806.000000,1022.000000,2.642900,218500.000000 +-122.250000,37.820000,28.000000,3337.000000,855.000000,1520.000000,802.000000,3.906300,225000.000000 +-122.250000,37.810000,52.000000,2155.000000,701.000000,895.000000,613.000000,2.579500,350000.000000 +-122.250000,37.810000,52.000000,1758.000000,460.000000,686.000000,422.000000,3.169100,259400.000000 +-122.250000,37.810000,46.000000,3232.000000,835.000000,1373.000000,747.000000,3.225000,218800.000000 +-122.250000,37.810000,39.000000,854.000000,242.000000,389.000000,228.000000,3.125000,237500.000000 +-122.250000,37.810000,29.000000,4656.000000,1414.000000,2304.000000,1250.000000,2.491200,257800.000000 +-122.250000,37.810000,28.000000,5806.000000,1603.000000,2563.000000,1497.000000,3.217700,273400.000000 +-122.250000,37.800000,43.000000,2364.000000,792.000000,1359.000000,722.000000,2.142900,250000.000000 +-122.250000,37.800000,43.000000,2344.000000,647.000000,1710.000000,644.000000,1.650400,151800.000000 +-122.250000,37.800000,41.000000,1471.000000,469.000000,1062.000000,413.000000,1.612100,171400.000000 +-122.250000,37.790000,52.000000,902.000000,237.000000,846.000000,227.000000,3.625000,125000.000000 +-122.250000,37.790000,52.000000,1339.000000,391.000000,1086.000000,363.000000,2.181000,138800.000000 +-122.250000,37.790000,51.000000,175.000000,43.000000,228.000000,55.000000,2.100000,75000.000000 +-122.250000,37.790000,50.000000,629.000000,188.000000,742.000000,196.000000,2.645800,125000.000000 +-122.250000,37.790000,45.000000,1786.000000,526.000000,1475.000000,460.000000,1.777200,97500.000000 +-122.250000,37.790000,39.000000,461.000000,129.000000,381.000000,123.000000,1.600000,112500.000000 +-122.250000,37.770000,52.000000,859.000000,157.000000,429.000000,158.000000,4.309800,197900.000000 +-122.250000,37.770000,52.000000,2650.000000,566.000000,1468.000000,567.000000,3.016100,215700.000000 +-122.250000,37.770000,52.000000,2156.000000,458.000000,872.000000,445.000000,3.268500,254200.000000 +-122.250000,37.770000,52.000000,1038.000000,220.000000,482.000000,215.000000,3.177100,210200.000000 +-122.250000,37.770000,43.000000,4329.000000,1110.000000,2086.000000,1053.000000,2.975000,243400.000000 +-122.250000,37.760000,52.000000,2876.000000,648.000000,1340.000000,632.000000,3.567000,252900.000000 +-122.250000,37.740000,25.000000,1914.000000,365.000000,897.000000,390.000000,4.456200,206200.000000 +-122.250000,37.560000,19.000000,7976.000000,1406.000000,3437.000000,1338.000000,5.639600,430300.000000 +-122.250000,37.530000,16.000000,4428.000000,664.000000,1677.000000,623.000000,7.686400,422500.000000 +-122.250000,37.520000,14.000000,1472.000000,291.000000,876.000000,292.000000,4.359400,366000.000000 +-122.250000,37.510000,45.000000,989.000000,174.000000,504.000000,180.000000,4.838200,289400.000000 +-122.250000,37.500000,45.000000,1812.000000,336.000000,752.000000,329.000000,4.950000,345000.000000 +-122.250000,37.500000,44.000000,348.000000,79.000000,154.000000,73.000000,4.770800,253800.000000 +-122.250000,37.490000,44.000000,4420.000000,743.000000,1790.000000,735.000000,6.142000,394700.000000 +-122.250000,37.490000,43.000000,2607.000000,477.000000,1225.000000,461.000000,4.224000,349600.000000 +-122.250000,37.490000,40.000000,2709.000000,521.000000,1156.000000,510.000000,4.636600,395500.000000 +-122.250000,37.480000,45.000000,2743.000000,390.000000,974.000000,400.000000,7.162100,500001.000000 +-122.250000,37.480000,37.000000,3507.000000,569.000000,1663.000000,608.000000,5.086300,440300.000000 +-122.250000,37.460000,33.000000,6841.000000,950.000000,2681.000000,980.000000,7.108800,443300.000000 +-122.250000,37.390000,33.000000,370.000000,42.000000,153.000000,53.000000,10.651400,500001.000000 +-122.250000,37.080000,20.000000,1201.000000,282.000000,601.000000,234.000000,2.555600,177500.000000 +-122.260000,41.660000,17.000000,1885.000000,350.000000,953.000000,328.000000,2.160700,61400.000000 +-122.260000,40.580000,14.000000,2539.000000,466.000000,1271.000000,438.000000,3.976200,138500.000000 +-122.260000,40.190000,35.000000,2467.000000,469.000000,1194.000000,444.000000,2.042500,63700.000000 +-122.260000,38.570000,22.000000,509.000000,103.000000,139.000000,73.000000,2.197900,152800.000000 +-122.260000,38.360000,25.000000,1821.000000,344.000000,349.000000,179.000000,6.993100,398800.000000 +-122.260000,38.310000,33.000000,4518.000000,704.000000,1776.000000,669.000000,5.244400,281100.000000 +-122.260000,38.290000,10.000000,969.000000,160.000000,482.000000,180.000000,6.579900,218100.000000 +-122.260000,38.160000,23.000000,2840.000000,491.000000,1586.000000,466.000000,4.033700,130400.000000 +-122.260000,38.150000,26.000000,3699.000000,671.000000,2388.000000,699.000000,4.051500,121900.000000 +-122.260000,38.150000,16.000000,3921.000000,727.000000,2830.000000,680.000000,4.505300,123700.000000 +-122.260000,38.140000,34.000000,963.000000,159.000000,392.000000,176.000000,4.015600,134700.000000 +-122.260000,38.130000,28.000000,3072.000000,790.000000,1375.000000,705.000000,1.636800,91200.000000 +-122.260000,38.120000,28.000000,3102.000000,734.000000,1623.000000,639.000000,3.102500,103700.000000 +-122.260000,38.110000,52.000000,2573.000000,639.000000,1238.000000,529.000000,2.670800,109700.000000 +-122.260000,38.110000,52.000000,1560.000000,353.000000,670.000000,287.000000,1.741100,98400.000000 +-122.260000,38.100000,30.000000,3317.000000,1058.000000,1794.000000,990.000000,1.183500,133300.000000 +-122.260000,38.100000,24.000000,1213.000000,395.000000,699.000000,386.000000,1.300700,94600.000000 +-122.260000,38.040000,41.000000,2512.000000,539.000000,1179.000000,480.000000,2.694000,123000.000000 +-122.260000,38.030000,41.000000,1631.000000,282.000000,752.000000,288.000000,3.934500,150200.000000 +-122.260000,38.030000,25.000000,2239.000000,361.000000,928.000000,353.000000,4.447400,203700.000000 +-122.260000,38.000000,6.000000,678.000000,104.000000,318.000000,91.000000,5.237500,246400.000000 +-122.260000,38.000000,5.000000,6265.000000,908.000000,3326.000000,872.000000,6.207300,272900.000000 +-122.260000,38.000000,14.000000,2338.000000,391.000000,1003.000000,398.000000,4.226900,170500.000000 +-122.260000,37.980000,28.000000,2038.000000,329.000000,947.000000,349.000000,5.117800,198000.000000 +-122.260000,37.900000,52.000000,1927.000000,279.000000,705.000000,288.000000,7.886400,357300.000000 +-122.260000,37.900000,37.000000,2220.000000,335.000000,903.000000,362.000000,7.833600,371300.000000 +-122.260000,37.890000,52.000000,3706.000000,531.000000,1205.000000,504.000000,6.682800,370900.000000 +-122.260000,37.890000,52.000000,3078.000000,494.000000,1005.000000,462.000000,6.381000,342200.000000 +-122.260000,37.880000,52.000000,2551.000000,417.000000,894.000000,404.000000,6.242500,391800.000000 +-122.260000,37.880000,52.000000,2363.000000,604.000000,1558.000000,573.000000,2.944000,338900.000000 +-122.260000,37.880000,52.000000,2255.000000,410.000000,823.000000,377.000000,5.797900,415300.000000 +-122.260000,37.880000,52.000000,1149.000000,255.000000,483.000000,249.000000,4.278800,332500.000000 +-122.260000,37.870000,52.000000,2773.000000,998.000000,1721.000000,949.000000,1.185900,241700.000000 +-122.260000,37.870000,52.000000,1087.000000,371.000000,3337.000000,350.000000,1.401200,175000.000000 +-122.260000,37.860000,52.000000,3497.000000,832.000000,1493.000000,794.000000,2.904400,257400.000000 +-122.260000,37.860000,52.000000,2888.000000,604.000000,1253.000000,538.000000,3.389300,241700.000000 +-122.260000,37.860000,35.000000,5161.000000,1744.000000,3276.000000,1742.000000,1.630700,253600.000000 +-122.260000,37.850000,52.000000,3618.000000,768.000000,1508.000000,755.000000,3.261900,309600.000000 +-122.260000,37.850000,52.000000,2643.000000,626.000000,1212.000000,620.000000,1.916700,159200.000000 +-122.260000,37.850000,52.000000,2491.000000,474.000000,1098.000000,468.000000,3.075000,213500.000000 +-122.260000,37.840000,52.000000,950.000000,202.000000,467.000000,198.000000,3.964300,188800.000000 +-122.260000,37.840000,52.000000,696.000000,191.000000,345.000000,174.000000,2.673600,191300.000000 +-122.260000,37.840000,50.000000,2239.000000,455.000000,990.000000,419.000000,1.991100,158700.000000 +-122.260000,37.840000,42.000000,2555.000000,665.000000,1206.000000,595.000000,2.080400,226700.000000 +-122.260000,37.830000,52.000000,1665.000000,419.000000,946.000000,395.000000,2.097800,155400.000000 +-122.260000,37.830000,52.000000,1656.000000,420.000000,718.000000,382.000000,2.676800,182300.000000 +-122.260000,37.830000,52.000000,1470.000000,330.000000,689.000000,309.000000,3.480000,191400.000000 +-122.260000,37.830000,52.000000,1443.000000,311.000000,660.000000,292.000000,3.012500,184400.000000 +-122.260000,37.830000,51.000000,936.000000,311.000000,517.000000,249.000000,1.285200,150000.000000 +-122.260000,37.820000,40.000000,624.000000,195.000000,423.000000,160.000000,0.950600,187500.000000 +-122.260000,37.820000,37.000000,3633.000000,1085.000000,1838.000000,980.000000,2.610400,193100.000000 +-122.260000,37.820000,31.000000,4596.000000,1331.000000,2048.000000,1180.000000,2.834500,183800.000000 +-122.260000,37.810000,34.000000,5871.000000,1914.000000,2689.000000,1789.000000,2.840600,335700.000000 +-122.260000,37.810000,29.000000,335.000000,107.000000,202.000000,91.000000,2.006200,125000.000000 +-122.260000,37.800000,36.000000,5329.000000,2477.000000,3469.000000,2323.000000,2.009600,130000.000000 +-122.260000,37.800000,20.000000,2373.000000,779.000000,1659.000000,676.000000,1.692900,115000.000000 +-122.260000,37.780000,52.000000,970.000000,217.000000,528.000000,208.000000,3.343800,201300.000000 +-122.260000,37.780000,52.000000,1045.000000,239.000000,496.000000,216.000000,2.921300,190800.000000 +-122.260000,37.770000,52.000000,2097.000000,444.000000,915.000000,413.000000,2.989900,228100.000000 +-122.260000,37.770000,52.000000,1848.000000,479.000000,921.000000,477.000000,2.875000,234000.000000 +-122.260000,37.770000,52.000000,1704.000000,371.000000,663.000000,340.000000,4.226000,275000.000000 +-122.260000,37.770000,52.000000,1670.000000,350.000000,793.000000,299.000000,2.973200,282100.000000 +-122.260000,37.770000,52.000000,1565.000000,315.000000,637.000000,297.000000,4.777800,351800.000000 +-122.260000,37.770000,52.000000,1210.000000,168.000000,411.000000,172.000000,3.357100,405400.000000 +-122.260000,37.570000,23.000000,7995.000000,1254.000000,3484.000000,1198.000000,6.594800,404000.000000 +-122.260000,37.550000,17.000000,1321.000000,425.000000,683.000000,408.000000,4.704500,500001.000000 +-122.260000,37.540000,5.000000,3264.000000,442.000000,1607.000000,453.000000,9.141500,500001.000000 +-122.260000,37.540000,5.000000,1649.000000,388.000000,779.000000,376.000000,6.963500,417300.000000 +-122.260000,37.540000,16.000000,2118.000000,333.000000,770.000000,318.000000,7.247700,376000.000000 +-122.260000,37.540000,13.000000,1422.000000,295.000000,395.000000,195.000000,5.324700,327800.000000 +-122.260000,37.530000,4.000000,5233.000000,1109.000000,1690.000000,907.000000,6.200700,311800.000000 +-122.260000,37.520000,34.000000,483.000000,131.000000,291.000000,157.000000,3.083300,256300.000000 +-122.260000,37.510000,46.000000,672.000000,149.000000,351.000000,136.000000,5.326400,258100.000000 +-122.260000,37.500000,52.000000,878.000000,186.000000,393.000000,186.000000,3.704500,360500.000000 +-122.260000,37.500000,44.000000,6983.000000,1131.000000,2818.000000,1115.000000,5.627100,374800.000000 +-122.260000,37.500000,24.000000,2307.000000,510.000000,842.000000,507.000000,3.611100,341500.000000 +-122.260000,37.480000,34.000000,4453.000000,682.000000,1805.000000,672.000000,5.603800,451300.000000 +-122.260000,37.460000,26.000000,5067.000000,750.000000,1996.000000,728.000000,7.000100,500001.000000 +-122.260000,37.450000,17.000000,2742.000000,441.000000,986.000000,421.000000,5.928500,496000.000000 +-122.260000,37.380000,28.000000,1103.000000,164.000000,415.000000,154.000000,7.863300,500001.000000 +-122.270000,40.530000,17.000000,2255.000000,416.000000,1171.000000,411.000000,2.875000,129800.000000 +-122.270000,40.460000,14.000000,2633.000000,530.000000,1324.000000,513.000000,2.276800,78600.000000 +-122.270000,38.680000,18.000000,742.000000,142.000000,343.000000,119.000000,3.156300,98400.000000 +-122.270000,38.530000,22.000000,678.000000,137.000000,336.000000,103.000000,4.400000,142500.000000 +-122.270000,38.320000,31.000000,1267.000000,319.000000,545.000000,297.000000,1.994600,206800.000000 +-122.270000,38.310000,44.000000,3030.000000,589.000000,1373.000000,582.000000,2.905400,155200.000000 +-122.270000,38.290000,36.000000,1446.000000,306.000000,678.000000,295.000000,2.840900,153000.000000 +-122.270000,38.290000,20.000000,3870.000000,795.000000,2088.000000,774.000000,3.302100,152700.000000 +-122.270000,38.280000,37.000000,1170.000000,303.000000,766.000000,302.000000,2.661800,136200.000000 +-122.270000,38.120000,42.000000,5266.000000,1167.000000,3124.000000,1025.000000,2.737500,120000.000000 +-122.270000,38.040000,47.000000,1685.000000,405.000000,835.000000,372.000000,2.310300,134500.000000 +-122.270000,38.000000,15.000000,1216.000000,166.000000,572.000000,178.000000,5.841800,240300.000000 +-122.270000,38.000000,12.000000,1592.000000,242.000000,969.000000,233.000000,6.157600,248700.000000 +-122.270000,37.990000,16.000000,4921.000000,737.000000,2312.000000,725.000000,5.889900,243200.000000 +-122.270000,37.980000,23.000000,3455.000000,479.000000,1375.000000,474.000000,6.028900,218600.000000 +-122.270000,37.970000,10.000000,15259.000000,2275.000000,7266.000000,2338.000000,6.066600,272400.000000 +-122.270000,37.910000,47.000000,1930.000000,315.000000,692.000000,296.000000,6.366900,315500.000000 +-122.270000,37.900000,52.000000,2079.000000,273.000000,684.000000,275.000000,7.955600,374400.000000 +-122.270000,37.900000,52.000000,2041.000000,270.000000,671.000000,253.000000,6.941400,417500.000000 +-122.270000,37.900000,52.000000,1803.000000,240.000000,572.000000,236.000000,6.174000,358800.000000 +-122.270000,37.900000,42.000000,1650.000000,274.000000,645.000000,256.000000,5.622800,375400.000000 +-122.270000,37.890000,52.000000,3046.000000,373.000000,975.000000,365.000000,8.834200,430500.000000 +-122.270000,37.890000,52.000000,2640.000000,366.000000,973.000000,355.000000,7.266000,371100.000000 +-122.270000,37.890000,52.000000,1978.000000,293.000000,723.000000,272.000000,5.398900,335600.000000 +-122.270000,37.880000,52.000000,2803.000000,930.000000,1372.000000,876.000000,2.190700,271400.000000 +-122.270000,37.880000,52.000000,1693.000000,391.000000,669.000000,367.000000,3.541700,287500.000000 +-122.270000,37.880000,44.000000,2252.000000,592.000000,989.000000,550.000000,3.013200,272900.000000 +-122.270000,37.880000,37.000000,2619.000000,682.000000,1152.000000,616.000000,2.520000,277800.000000 +-122.270000,37.870000,52.000000,3084.000000,698.000000,1424.000000,694.000000,2.737200,210200.000000 +-122.270000,37.870000,49.000000,1350.000000,368.000000,707.000000,350.000000,2.884600,211300.000000 +-122.270000,37.870000,35.000000,3218.000000,1108.000000,1675.000000,1000.000000,1.746400,216700.000000 +-122.270000,37.870000,30.000000,1465.000000,439.000000,862.000000,425.000000,1.777800,268800.000000 +-122.270000,37.860000,52.000000,834.000000,186.000000,494.000000,175.000000,3.150000,206300.000000 +-122.270000,37.860000,52.000000,1769.000000,372.000000,849.000000,365.000000,2.691400,218800.000000 +-122.270000,37.860000,52.000000,1088.000000,305.000000,486.000000,267.000000,2.607100,250000.000000 +-122.270000,37.860000,49.000000,2052.000000,435.000000,924.000000,414.000000,2.541700,182700.000000 +-122.270000,37.850000,52.000000,4076.000000,920.000000,1800.000000,815.000000,2.705400,182300.000000 +-122.270000,37.850000,52.000000,335.000000,83.000000,152.000000,77.000000,2.284100,106300.000000 +-122.270000,37.850000,52.000000,1974.000000,426.000000,875.000000,363.000000,1.581700,153600.000000 +-122.270000,37.850000,52.000000,1228.000000,293.000000,648.000000,303.000000,2.120200,155500.000000 +-122.270000,37.850000,50.000000,1279.000000,300.000000,675.000000,255.000000,1.902800,150800.000000 +-122.270000,37.850000,47.000000,2077.000000,400.000000,719.000000,326.000000,2.243100,172700.000000 +-122.270000,37.850000,47.000000,1375.000000,307.000000,843.000000,319.000000,1.378500,142300.000000 +-122.270000,37.850000,42.000000,1639.000000,367.000000,929.000000,366.000000,1.713500,159800.000000 +-122.270000,37.850000,40.000000,751.000000,184.000000,409.000000,166.000000,1.357800,147500.000000 +-122.270000,37.840000,52.000000,2436.000000,541.000000,1015.000000,478.000000,1.725000,113900.000000 +-122.270000,37.840000,52.000000,2224.000000,437.000000,1006.000000,422.000000,2.600000,132600.000000 +-122.270000,37.840000,48.000000,1922.000000,409.000000,1026.000000,335.000000,1.796900,110400.000000 +-122.270000,37.830000,51.000000,2665.000000,574.000000,1258.000000,536.000000,2.730300,109700.000000 +-122.270000,37.830000,49.000000,1655.000000,366.000000,754.000000,329.000000,1.375000,104900.000000 +-122.270000,37.830000,49.000000,1215.000000,282.000000,570.000000,264.000000,1.486100,97200.000000 +-122.270000,37.830000,48.000000,1798.000000,432.000000,987.000000,374.000000,1.097200,104500.000000 +-122.270000,37.820000,43.000000,1007.000000,312.000000,558.000000,253.000000,1.734800,137500.000000 +-122.270000,37.820000,41.000000,3221.000000,853.000000,1959.000000,720.000000,1.110800,97500.000000 +-122.270000,37.820000,40.000000,946.000000,375.000000,700.000000,352.000000,1.775000,112500.000000 +-122.270000,37.820000,21.000000,896.000000,453.000000,735.000000,438.000000,0.921800,171900.000000 +-122.270000,37.810000,52.000000,210.000000,56.000000,183.000000,56.000000,1.166700,112500.000000 +-122.270000,37.810000,40.000000,880.000000,451.000000,582.000000,380.000000,0.977000,118800.000000 +-122.270000,37.810000,10.000000,875.000000,348.000000,546.000000,330.000000,0.760000,162500.000000 +-122.270000,37.790000,27.000000,1055.000000,347.000000,718.000000,302.000000,2.635400,187500.000000 +-122.270000,37.780000,52.000000,1408.000000,280.000000,718.000000,265.000000,2.680600,207900.000000 +-122.270000,37.780000,52.000000,1222.000000,264.000000,630.000000,265.000000,3.770800,215300.000000 +-122.270000,37.780000,45.000000,1169.000000,263.000000,723.000000,286.000000,3.944400,212900.000000 +-122.270000,37.780000,13.000000,2020.000000,535.000000,959.000000,486.000000,5.260100,292700.000000 +-122.270000,37.770000,52.000000,2388.000000,559.000000,1121.000000,518.000000,3.326900,234500.000000 +-122.270000,37.770000,52.000000,2252.000000,388.000000,1033.000000,434.000000,5.533700,372000.000000 +-122.270000,37.770000,52.000000,1731.000000,377.000000,872.000000,363.000000,4.166700,225800.000000 +-122.270000,37.770000,52.000000,1710.000000,481.000000,849.000000,457.000000,2.711500,220800.000000 +-122.270000,37.770000,23.000000,5679.000000,1270.000000,2690.000000,1151.000000,4.769500,291700.000000 +-122.270000,37.740000,28.000000,6909.000000,1554.000000,2974.000000,1484.000000,3.687500,353900.000000 +-122.270000,37.730000,31.000000,5785.000000,1379.000000,2973.000000,1312.000000,3.268900,231000.000000 +-122.270000,37.560000,5.000000,4921.000000,1179.000000,1810.000000,1073.000000,5.693600,322200.000000 +-122.270000,37.560000,17.000000,3211.000000,847.000000,1553.000000,812.000000,4.943400,292100.000000 +-122.270000,37.550000,16.000000,4789.000000,816.000000,1840.000000,763.000000,6.747400,338200.000000 +-122.270000,37.550000,15.000000,1958.000000,282.000000,811.000000,284.000000,8.122100,483300.000000 +-122.270000,37.540000,5.000000,2140.000000,420.000000,990.000000,394.000000,6.035000,438800.000000 +-122.270000,37.540000,16.000000,3913.000000,565.000000,1752.000000,557.000000,7.364400,419700.000000 +-122.270000,37.530000,43.000000,1145.000000,230.000000,586.000000,254.000000,3.500000,267400.000000 +-122.270000,37.520000,35.000000,1051.000000,259.000000,517.000000,234.000000,3.700000,339700.000000 +-122.270000,37.510000,39.000000,3996.000000,793.000000,1744.000000,761.000000,4.507500,364900.000000 +-122.270000,37.470000,44.000000,3022.000000,473.000000,1235.000000,477.000000,6.705800,495900.000000 +-122.270000,37.430000,33.000000,1601.000000,223.000000,629.000000,215.000000,15.000100,500001.000000 +-122.270000,37.320000,37.000000,2607.000000,534.000000,1346.000000,507.000000,5.395100,277700.000000 +-122.270000,37.240000,30.000000,2762.000000,593.000000,1581.000000,502.000000,5.100200,319400.000000 +-122.280000,41.380000,15.000000,5266.000000,1031.000000,2147.000000,885.000000,2.803600,110100.000000 +-122.280000,40.500000,21.000000,2405.000000,476.000000,1197.000000,412.000000,2.648800,83100.000000 +-122.280000,38.320000,12.000000,4609.000000,1005.000000,2293.000000,960.000000,3.454300,194500.000000 +-122.280000,38.300000,23.000000,526.000000,152.000000,245.000000,130.000000,2.013400,142500.000000 +-122.280000,38.290000,23.000000,1398.000000,388.000000,1112.000000,406.000000,2.236600,140200.000000 +-122.280000,38.220000,42.000000,106.000000,18.000000,40.000000,25.000000,7.519700,275000.000000 +-122.280000,37.990000,28.000000,3801.000000,622.000000,1654.000000,571.000000,4.375000,193300.000000 +-122.280000,37.960000,35.000000,1579.000000,243.000000,734.000000,264.000000,5.500000,201000.000000 +-122.280000,37.910000,41.000000,3009.000000,482.000000,1053.000000,490.000000,5.828000,324400.000000 +-122.280000,37.910000,38.000000,2501.000000,348.000000,805.000000,329.000000,6.557600,358500.000000 +-122.280000,37.900000,52.000000,2318.000000,328.000000,779.000000,312.000000,7.175400,362900.000000 +-122.280000,37.900000,52.000000,2261.000000,328.000000,819.000000,335.000000,4.908300,346800.000000 +-122.280000,37.900000,52.000000,1369.000000,249.000000,490.000000,248.000000,4.121200,287500.000000 +-122.280000,37.900000,49.000000,3191.000000,516.000000,1148.000000,507.000000,6.353800,333700.000000 +-122.280000,37.890000,52.000000,2616.000000,473.000000,1085.000000,487.000000,4.125000,270900.000000 +-122.280000,37.890000,52.000000,2070.000000,329.000000,722.000000,306.000000,5.417100,292000.000000 +-122.280000,37.890000,52.000000,1225.000000,169.000000,412.000000,168.000000,5.791200,327100.000000 +-122.280000,37.880000,52.000000,957.000000,188.000000,403.000000,172.000000,3.234400,245500.000000 +-122.280000,37.880000,52.000000,2495.000000,491.000000,1058.000000,464.000000,4.142900,259600.000000 +-122.280000,37.880000,52.000000,1909.000000,416.000000,811.000000,406.000000,3.006000,227900.000000 +-122.280000,37.880000,52.000000,1844.000000,332.000000,769.000000,334.000000,4.261400,261300.000000 +-122.280000,37.880000,52.000000,1193.000000,200.000000,506.000000,207.000000,4.191200,254500.000000 +-122.280000,37.880000,52.000000,1172.000000,215.000000,489.000000,218.000000,3.916700,235600.000000 +-122.280000,37.870000,52.000000,1813.000000,353.000000,828.000000,339.000000,3.562500,191700.000000 +-122.280000,37.870000,52.000000,1233.000000,300.000000,571.000000,292.000000,2.278800,182300.000000 +-122.280000,37.870000,46.000000,3022.000000,696.000000,1293.000000,675.000000,2.543000,220700.000000 +-122.280000,37.870000,46.000000,1777.000000,446.000000,805.000000,431.000000,2.867600,212000.000000 +-122.280000,37.860000,52.000000,938.000000,195.000000,393.000000,189.000000,3.859400,196400.000000 +-122.280000,37.860000,52.000000,2031.000000,450.000000,958.000000,445.000000,1.932700,169900.000000 +-122.280000,37.860000,52.000000,1659.000000,367.000000,788.000000,346.000000,2.821400,164300.000000 +-122.280000,37.860000,49.000000,2932.000000,668.000000,1361.000000,608.000000,1.979800,147400.000000 +-122.280000,37.860000,41.000000,2214.000000,550.000000,1213.000000,568.000000,2.284500,153100.000000 +-122.280000,37.850000,52.000000,610.000000,145.000000,281.000000,132.000000,2.901800,119400.000000 +-122.280000,37.850000,52.000000,2246.000000,472.000000,1005.000000,449.000000,2.416700,152700.000000 +-122.280000,37.850000,52.000000,1898.000000,421.000000,1102.000000,397.000000,1.808000,105500.000000 +-122.280000,37.850000,49.000000,1130.000000,244.000000,607.000000,239.000000,2.459700,93800.000000 +-122.280000,37.850000,44.000000,1025.000000,198.000000,506.000000,204.000000,1.730000,147900.000000 +-122.280000,37.840000,52.000000,729.000000,160.000000,395.000000,155.000000,1.687500,132000.000000 +-122.280000,37.840000,52.000000,2153.000000,481.000000,1168.000000,441.000000,1.961500,115200.000000 +-122.280000,37.840000,50.000000,2082.000000,492.000000,1131.000000,473.000000,1.642400,108900.000000 +-122.280000,37.840000,49.000000,1916.000000,447.000000,863.000000,378.000000,1.927400,122300.000000 +-122.280000,37.830000,52.000000,3108.000000,813.000000,1623.000000,765.000000,2.699700,126900.000000 +-122.280000,37.830000,52.000000,1511.000000,390.000000,901.000000,403.000000,1.410300,103900.000000 +-122.280000,37.820000,52.000000,945.000000,243.000000,576.000000,220.000000,1.411300,83100.000000 +-122.280000,37.820000,52.000000,1489.000000,335.000000,728.000000,244.000000,0.817200,85300.000000 +-122.280000,37.820000,52.000000,1387.000000,341.000000,1074.000000,304.000000,1.217100,80300.000000 +-122.280000,37.820000,52.000000,1238.000000,288.000000,622.000000,259.000000,1.505700,87500.000000 +-122.280000,37.820000,52.000000,1170.000000,235.000000,701.000000,233.000000,1.609800,87500.000000 +-122.280000,37.810000,52.000000,773.000000,143.000000,377.000000,115.000000,2.408300,98200.000000 +-122.280000,37.810000,52.000000,386.000000,164.000000,346.000000,155.000000,0.807500,137500.000000 +-122.280000,37.810000,52.000000,340.000000,97.000000,200.000000,87.000000,1.520800,112500.000000 +-122.280000,37.810000,35.000000,948.000000,184.000000,467.000000,169.000000,1.808800,118800.000000 +-122.280000,37.810000,19.000000,1207.000000,243.000000,721.000000,207.000000,1.111100,108300.000000 +-122.280000,37.810000,17.000000,924.000000,289.000000,609.000000,289.000000,1.500000,162500.000000 +-122.280000,37.800000,38.000000,684.000000,176.000000,344.000000,155.000000,2.011400,131300.000000 +-122.280000,37.780000,50.000000,1487.000000,306.000000,730.000000,327.000000,2.513900,219000.000000 +-122.280000,37.780000,29.000000,5154.000000,1394.000000,3741.000000,1273.000000,2.576200,173400.000000 +-122.280000,37.750000,20.000000,1156.000000,365.000000,583.000000,326.000000,3.197200,100000.000000 +-122.280000,37.550000,17.000000,4199.000000,629.000000,2020.000000,630.000000,6.122800,375700.000000 +-122.280000,37.540000,37.000000,991.000000,180.000000,463.000000,177.000000,5.170100,294200.000000 +-122.280000,37.540000,24.000000,5114.000000,1357.000000,3169.000000,1268.000000,3.969900,293200.000000 +-122.280000,37.530000,34.000000,1980.000000,385.000000,970.000000,391.000000,5.120700,310900.000000 +-122.280000,37.530000,25.000000,3710.000000,1015.000000,2068.000000,958.000000,3.544500,286700.000000 +-122.280000,37.530000,15.000000,5417.000000,1199.000000,2593.000000,1098.000000,4.804700,438000.000000 +-122.280000,37.520000,38.000000,2197.000000,357.000000,1228.000000,373.000000,5.471900,397900.000000 +-122.280000,37.520000,29.000000,1526.000000,355.000000,724.000000,315.000000,4.031300,435200.000000 +-122.280000,37.510000,33.000000,4719.000000,758.000000,1980.000000,757.000000,6.106400,405000.000000 +-122.280000,37.500000,33.000000,6499.000000,998.000000,2694.000000,957.000000,7.478700,431300.000000 +-122.280000,37.490000,29.000000,4148.000000,635.000000,1638.000000,627.000000,6.912000,457200.000000 +-122.280000,37.490000,25.000000,7335.000000,1157.000000,2626.000000,1049.000000,6.547500,500001.000000 +-122.280000,37.470000,44.000000,863.000000,114.000000,281.000000,99.000000,6.887900,500001.000000 +-122.290000,40.470000,20.000000,2858.000000,612.000000,1422.000000,589.000000,1.965700,63000.000000 +-122.290000,40.440000,30.000000,1270.000000,365.000000,840.000000,324.000000,1.390400,48100.000000 +-122.290000,40.430000,21.000000,2842.000000,640.000000,1658.000000,608.000000,1.994300,59800.000000 +-122.290000,40.390000,17.000000,1682.000000,332.000000,887.000000,316.000000,1.843800,76400.000000 +-122.290000,38.400000,28.000000,2024.000000,340.000000,844.000000,309.000000,4.783300,361100.000000 +-122.290000,38.320000,23.000000,4312.000000,993.000000,2317.000000,934.000000,2.766700,153200.000000 +-122.290000,38.310000,45.000000,3075.000000,754.000000,1635.000000,723.000000,2.272100,139800.000000 +-122.290000,38.310000,25.000000,4927.000000,1005.000000,2756.000000,998.000000,2.732500,162900.000000 +-122.290000,38.300000,52.000000,144.000000,54.000000,89.000000,48.000000,1.009600,162500.000000 +-122.290000,38.300000,52.000000,1219.000000,288.000000,847.000000,283.000000,1.669100,183300.000000 +-122.290000,38.280000,38.000000,2308.000000,425.000000,1272.000000,406.000000,3.608300,134200.000000 +-122.290000,38.190000,13.000000,7065.000000,1259.000000,3864.000000,1221.000000,4.747200,148600.000000 +-122.290000,38.000000,16.000000,4986.000000,1081.000000,2805.000000,1016.000000,4.025000,173200.000000 +-122.290000,37.980000,27.000000,2133.000000,347.000000,850.000000,350.000000,5.104600,209800.000000 +-122.290000,37.970000,20.000000,3426.000000,632.000000,1512.000000,580.000000,4.491100,227400.000000 +-122.290000,37.940000,20.000000,7578.000000,1426.000000,3637.000000,1362.000000,4.438700,190000.000000 +-122.290000,37.920000,36.000000,1450.000000,235.000000,568.000000,234.000000,6.000000,311400.000000 +-122.290000,37.920000,35.000000,583.000000,88.000000,235.000000,84.000000,5.943000,288200.000000 +-122.290000,37.920000,32.000000,1736.000000,234.000000,602.000000,231.000000,6.516000,401000.000000 +-122.290000,37.910000,38.000000,2591.000000,424.000000,905.000000,378.000000,5.169100,263200.000000 +-122.290000,37.900000,52.000000,1604.000000,263.000000,594.000000,286.000000,5.338000,270900.000000 +-122.290000,37.900000,49.000000,1283.000000,238.000000,576.000000,236.000000,3.333300,276800.000000 +-122.290000,37.890000,52.000000,3171.000000,698.000000,1498.000000,696.000000,3.179500,218200.000000 +-122.290000,37.890000,52.000000,2269.000000,380.000000,1004.000000,371.000000,5.169600,261400.000000 +-122.290000,37.890000,52.000000,2248.000000,422.000000,870.000000,377.000000,3.473200,246200.000000 +-122.290000,37.890000,52.000000,2178.000000,421.000000,940.000000,423.000000,5.055100,232200.000000 +-122.290000,37.890000,52.000000,1571.000000,349.000000,693.000000,326.000000,3.137500,229100.000000 +-122.290000,37.880000,52.000000,2159.000000,424.000000,824.000000,388.000000,3.889700,218400.000000 +-122.290000,37.880000,52.000000,1650.000000,395.000000,841.000000,380.000000,3.556000,179300.000000 +-122.290000,37.880000,50.000000,1211.000000,261.000000,523.000000,227.000000,3.867200,216700.000000 +-122.290000,37.880000,48.000000,2365.000000,490.000000,1034.000000,475.000000,3.106500,229200.000000 +-122.290000,37.880000,46.000000,1895.000000,442.000000,920.000000,425.000000,2.992600,192100.000000 +-122.290000,37.870000,52.000000,895.000000,198.000000,386.000000,204.000000,3.875000,182600.000000 +-122.290000,37.870000,50.000000,1829.000000,536.000000,1129.000000,516.000000,2.668400,185600.000000 +-122.290000,37.870000,46.000000,1267.000000,324.000000,792.000000,321.000000,2.525000,165900.000000 +-122.290000,37.870000,44.000000,2539.000000,755.000000,1382.000000,713.000000,2.537000,175000.000000 +-122.290000,37.860000,52.000000,1665.000000,404.000000,815.000000,372.000000,1.994600,156900.000000 +-122.290000,37.840000,35.000000,1872.000000,419.000000,1017.000000,414.000000,2.210600,132500.000000 +-122.290000,37.830000,52.000000,1121.000000,211.000000,554.000000,187.000000,3.392900,75700.000000 +-122.290000,37.820000,49.000000,135.000000,29.000000,86.000000,23.000000,6.118300,75000.000000 +-122.290000,37.810000,49.000000,844.000000,204.000000,560.000000,152.000000,1.750000,75000.000000 +-122.290000,37.810000,46.000000,935.000000,297.000000,582.000000,277.000000,0.728600,95200.000000 +-122.290000,37.810000,46.000000,2801.000000,644.000000,1823.000000,611.000000,0.980200,129200.000000 +-122.290000,37.810000,46.000000,12.000000,4.000000,18.000000,7.000000,0.499900,67500.000000 +-122.290000,37.810000,26.000000,768.000000,152.000000,392.000000,127.000000,1.771900,82500.000000 +-122.290000,37.810000,23.000000,1745.000000,374.000000,1054.000000,325.000000,0.802600,112500.000000 +-122.290000,37.810000,20.000000,835.000000,161.000000,290.000000,133.000000,2.483000,137500.000000 +-122.290000,37.800000,52.000000,1027.000000,244.000000,492.000000,147.000000,2.609400,81300.000000 +-122.290000,37.780000,42.000000,1241.000000,309.000000,821.000000,300.000000,1.942700,102200.000000 +-122.290000,37.560000,34.000000,1693.000000,281.000000,846.000000,291.000000,5.368300,339400.000000 +-122.290000,37.550000,27.000000,3789.000000,874.000000,2243.000000,866.000000,4.390000,270100.000000 +-122.290000,37.540000,43.000000,2268.000000,438.000000,1151.000000,449.000000,4.909100,293200.000000 +-122.290000,37.540000,39.000000,1459.000000,285.000000,761.000000,291.000000,5.008100,298100.000000 +-122.290000,37.530000,41.000000,839.000000,190.000000,419.000000,215.000000,5.012000,368200.000000 +-122.290000,37.520000,38.000000,3767.000000,603.000000,1455.000000,615.000000,6.878700,386800.000000 +-122.290000,37.520000,33.000000,4104.000000,751.000000,1837.000000,771.000000,5.350600,388100.000000 +-122.290000,37.510000,35.000000,3040.000000,520.000000,1374.000000,518.000000,6.100400,426400.000000 +-122.290000,37.480000,15.000000,5480.000000,892.000000,2009.000000,831.000000,7.467800,500001.000000 +-122.290000,37.410000,30.000000,6373.000000,854.000000,2149.000000,798.000000,10.686800,500001.000000 +-122.300000,41.320000,13.000000,2300.000000,513.000000,1151.000000,488.000000,2.157100,81500.000000 +-122.300000,41.310000,29.000000,4059.000000,787.000000,1700.000000,702.000000,2.452600,97100.000000 +-122.300000,40.580000,19.000000,1043.000000,204.000000,505.000000,183.000000,1.603300,98800.000000 +-122.300000,40.450000,32.000000,1286.000000,271.000000,694.000000,236.000000,1.657900,68500.000000 +-122.300000,38.330000,15.000000,4741.000000,956.000000,2043.000000,856.000000,4.186200,183600.000000 +-122.300000,38.320000,20.000000,2063.000000,486.000000,1045.000000,460.000000,2.503500,153200.000000 +-122.300000,38.310000,34.000000,1797.000000,395.000000,1162.000000,407.000000,3.455000,137500.000000 +-122.300000,38.300000,44.000000,3690.000000,809.000000,1922.000000,736.000000,2.634600,139800.000000 +-122.300000,38.300000,21.000000,1108.000000,269.000000,524.000000,274.000000,2.761900,154600.000000 +-122.300000,38.290000,48.000000,2278.000000,477.000000,1219.000000,453.000000,2.964300,154000.000000 +-122.300000,38.290000,40.000000,1739.000000,318.000000,744.000000,312.000000,2.651800,156100.000000 +-122.300000,38.280000,31.000000,1633.000000,316.000000,944.000000,300.000000,3.397700,158700.000000 +-122.300000,38.270000,4.000000,1051.000000,263.000000,455.000000,248.000000,3.638900,130200.000000 +-122.300000,38.250000,18.000000,3548.000000,880.000000,1476.000000,699.000000,3.718800,163400.000000 +-122.300000,37.980000,25.000000,3807.000000,806.000000,1821.000000,792.000000,3.651800,164300.000000 +-122.300000,37.970000,35.000000,1811.000000,377.000000,911.000000,340.000000,3.375000,149700.000000 +-122.300000,37.970000,30.000000,4030.000000,772.000000,1777.000000,718.000000,3.639300,184000.000000 +-122.300000,37.930000,34.000000,2254.000000,357.000000,715.000000,306.000000,4.500000,304000.000000 +-122.300000,37.920000,33.000000,1615.000000,271.000000,710.000000,285.000000,4.080400,239000.000000 +-122.300000,37.910000,40.000000,2866.000000,617.000000,1305.000000,589.000000,3.632100,209100.000000 +-122.300000,37.900000,41.000000,2053.000000,435.000000,873.000000,415.000000,3.409100,223000.000000 +-122.300000,37.900000,38.000000,2263.000000,522.000000,1027.000000,509.000000,3.512500,224200.000000 +-122.300000,37.900000,37.000000,2125.000000,489.000000,912.000000,462.000000,2.921900,217200.000000 +-122.300000,37.900000,35.000000,1102.000000,308.000000,688.000000,303.000000,2.394600,141700.000000 +-122.300000,37.900000,30.000000,1772.000000,471.000000,880.000000,437.000000,2.267200,162500.000000 +-122.300000,37.900000,15.000000,5083.000000,1212.000000,2420.000000,1146.000000,4.582400,256100.000000 +-122.300000,37.890000,36.000000,1077.000000,293.000000,518.000000,276.000000,3.020800,206300.000000 +-122.300000,37.880000,46.000000,1647.000000,376.000000,854.000000,355.000000,2.900000,144800.000000 +-122.300000,37.880000,45.000000,453.000000,146.000000,749.000000,137.000000,1.475000,187500.000000 +-122.300000,37.870000,52.000000,3123.000000,749.000000,1695.000000,684.000000,2.220800,144800.000000 +-122.300000,37.870000,10.000000,503.000000,118.000000,228.000000,100.000000,2.170500,150000.000000 +-122.300000,37.860000,50.000000,499.000000,127.000000,287.000000,128.000000,2.750000,140600.000000 +-122.300000,37.840000,14.000000,7355.000000,2408.000000,3100.000000,2051.000000,4.001800,143800.000000 +-122.300000,37.810000,52.000000,1224.000000,237.000000,521.000000,159.000000,1.191000,76100.000000 +-122.300000,37.810000,52.000000,1010.000000,209.000000,604.000000,187.000000,1.166700,78400.000000 +-122.300000,37.810000,48.000000,828.000000,182.000000,392.000000,133.000000,2.593800,73500.000000 +-122.300000,37.810000,48.000000,1455.000000,354.000000,788.000000,332.000000,0.805600,84400.000000 +-122.300000,37.770000,42.000000,2038.000000,368.000000,2037.000000,355.000000,2.644700,200000.000000 +-122.300000,37.570000,36.000000,1973.000000,352.000000,1169.000000,370.000000,5.033000,270900.000000 +-122.300000,37.560000,35.000000,1873.000000,351.000000,945.000000,333.000000,5.518400,274800.000000 +-122.300000,37.550000,35.000000,3675.000000,735.000000,1930.000000,715.000000,3.983300,342800.000000 +-122.300000,37.540000,39.000000,4292.000000,1097.000000,1758.000000,987.000000,2.940500,340500.000000 +-122.300000,37.530000,43.000000,1748.000000,366.000000,984.000000,371.000000,4.511600,337800.000000 +-122.300000,37.530000,40.000000,1833.000000,308.000000,751.000000,306.000000,6.000000,384200.000000 +-122.300000,37.530000,38.000000,984.000000,171.000000,429.000000,157.000000,5.326100,376800.000000 +-122.300000,37.530000,37.000000,1338.000000,215.000000,535.000000,221.000000,5.435100,376600.000000 +-122.300000,37.520000,38.000000,2769.000000,387.000000,994.000000,395.000000,5.590200,417000.000000 +-122.300000,37.520000,32.000000,2297.000000,347.000000,871.000000,342.000000,8.103900,382200.000000 +-122.300000,37.510000,35.000000,2789.000000,445.000000,1156.000000,404.000000,5.432200,391000.000000 +-122.310000,40.890000,18.000000,754.000000,161.000000,247.000000,107.000000,2.258300,78800.000000 +-122.310000,40.650000,11.000000,3664.000000,647.000000,1686.000000,613.000000,2.933800,141600.000000 +-122.310000,40.550000,11.000000,13714.000000,2302.000000,6511.000000,2267.000000,3.552200,100100.000000 +-122.310000,40.490000,18.000000,4026.000000,718.000000,1731.000000,705.000000,3.350000,118400.000000 +-122.310000,40.470000,26.000000,2723.000000,551.000000,1326.000000,547.000000,2.359400,66000.000000 +-122.310000,40.450000,25.000000,2596.000000,557.000000,1536.000000,549.000000,2.022100,60400.000000 +-122.310000,40.450000,10.000000,1187.000000,236.000000,728.000000,248.000000,2.046900,66800.000000 +-122.310000,38.340000,19.000000,4187.000000,684.000000,1827.000000,605.000000,4.529300,210400.000000 +-122.310000,38.330000,26.000000,2155.000000,339.000000,956.000000,365.000000,4.013200,174700.000000 +-122.310000,38.330000,21.000000,1922.000000,344.000000,1051.000000,342.000000,3.604200,183300.000000 +-122.310000,38.320000,35.000000,3997.000000,762.000000,2074.000000,703.000000,3.285000,138100.000000 +-122.310000,38.320000,33.000000,2463.000000,421.000000,1235.000000,465.000000,3.704500,161500.000000 +-122.310000,38.310000,32.000000,2577.000000,458.000000,1172.000000,447.000000,3.879600,175500.000000 +-122.310000,38.300000,25.000000,3883.000000,740.000000,1641.000000,676.000000,3.900000,187300.000000 +-122.310000,38.270000,34.000000,1748.000000,284.000000,783.000000,303.000000,4.358500,194400.000000 +-122.310000,38.000000,26.000000,3735.000000,641.000000,1708.000000,633.000000,4.621000,191100.000000 +-122.310000,37.990000,25.000000,6508.000000,1137.000000,3259.000000,1081.000000,4.234800,157800.000000 +-122.310000,37.940000,38.000000,2172.000000,403.000000,945.000000,384.000000,4.395800,194200.000000 +-122.310000,37.940000,38.000000,1794.000000,349.000000,810.000000,335.000000,3.834300,191400.000000 +-122.310000,37.930000,39.000000,2505.000000,371.000000,872.000000,345.000000,5.343300,286500.000000 +-122.310000,37.930000,36.000000,2403.000000,408.000000,917.000000,404.000000,5.039900,253400.000000 +-122.310000,37.930000,36.000000,1526.000000,256.000000,696.000000,263.000000,3.508900,261900.000000 +-122.310000,37.920000,43.000000,2116.000000,407.000000,900.000000,361.000000,4.158700,212200.000000 +-122.310000,37.920000,30.000000,1014.000000,236.000000,537.000000,204.000000,2.845600,183300.000000 +-122.310000,37.910000,45.000000,3924.000000,834.000000,1992.000000,773.000000,4.114600,177800.000000 +-122.310000,37.910000,43.000000,2549.000000,511.000000,1060.000000,528.000000,3.641700,178400.000000 +-122.310000,37.910000,39.000000,2955.000000,696.000000,1417.000000,682.000000,2.762800,167800.000000 +-122.310000,37.910000,31.000000,1432.000000,348.000000,681.000000,348.000000,2.724300,218100.000000 +-122.310000,37.600000,34.000000,3225.000000,726.000000,1958.000000,656.000000,3.681100,273000.000000 +-122.310000,37.580000,44.000000,1990.000000,442.000000,1141.000000,424.000000,3.969600,258300.000000 +-122.310000,37.570000,45.000000,1165.000000,236.000000,845.000000,251.000000,4.187500,267300.000000 +-122.310000,37.570000,42.000000,3157.000000,676.000000,1603.000000,629.000000,3.742200,292600.000000 +-122.310000,37.570000,31.000000,2197.000000,477.000000,1193.000000,394.000000,4.637100,271100.000000 +-122.310000,37.560000,52.000000,2351.000000,494.000000,1126.000000,482.000000,3.968800,356900.000000 +-122.310000,37.560000,40.000000,1351.000000,330.000000,701.000000,297.000000,3.320000,292900.000000 +-122.310000,37.560000,36.000000,1727.000000,340.000000,952.000000,337.000000,4.791700,316000.000000 +-122.310000,37.550000,52.000000,900.000000,183.000000,371.000000,166.000000,3.250000,296400.000000 +-122.310000,37.550000,45.000000,507.000000,140.000000,305.000000,139.000000,2.615900,272900.000000 +-122.310000,37.550000,27.000000,3931.000000,933.000000,1877.000000,851.000000,3.972200,354100.000000 +-122.310000,37.540000,46.000000,2444.000000,397.000000,952.000000,402.000000,4.750000,388200.000000 +-122.310000,37.540000,45.000000,1222.000000,220.000000,492.000000,205.000000,5.539000,396900.000000 +-122.310000,37.540000,42.000000,1159.000000,261.000000,465.000000,247.000000,3.184200,352800.000000 +-122.310000,37.530000,39.000000,1160.000000,191.000000,508.000000,185.000000,5.953900,379100.000000 +-122.310000,37.500000,22.000000,14034.000000,3020.000000,6266.000000,2952.000000,4.393900,491200.000000 +-122.320000,40.710000,18.000000,2879.000000,578.000000,1399.000000,586.000000,2.403600,105400.000000 +-122.320000,40.580000,2.000000,1937.000000,350.000000,756.000000,274.000000,3.000000,114200.000000 +-122.320000,40.570000,15.000000,2524.000000,449.000000,1374.000000,467.000000,3.381600,93800.000000 +-122.320000,38.350000,20.000000,3494.000000,549.000000,1673.000000,541.000000,5.571800,185200.000000 +-122.320000,38.320000,26.000000,2710.000000,498.000000,1439.000000,484.000000,5.000000,175200.000000 +-122.320000,38.320000,19.000000,2922.000000,417.000000,1221.000000,442.000000,5.800200,238700.000000 +-122.320000,38.290000,21.000000,1607.000000,356.000000,834.000000,352.000000,2.378700,177900.000000 +-122.320000,38.120000,12.000000,5382.000000,928.000000,3928.000000,921.000000,5.378500,150600.000000 +-122.320000,38.060000,4.000000,7999.000000,1611.000000,3596.000000,1396.000000,5.096900,174200.000000 +-122.320000,38.010000,26.000000,3054.000000,492.000000,1495.000000,496.000000,4.694400,171100.000000 +-122.320000,37.990000,24.000000,4865.000000,968.000000,2315.000000,893.000000,4.285200,173500.000000 +-122.320000,37.970000,33.000000,1156.000000,190.000000,643.000000,209.000000,4.500000,156600.000000 +-122.320000,37.970000,29.000000,2347.000000,464.000000,1135.000000,490.000000,3.972200,161000.000000 +-122.320000,37.960000,34.000000,2070.000000,357.000000,784.000000,294.000000,4.041700,182800.000000 +-122.320000,37.950000,37.000000,1887.000000,353.000000,895.000000,359.000000,4.450000,196600.000000 +-122.320000,37.950000,35.000000,1612.000000,354.000000,887.000000,331.000000,2.576900,146100.000000 +-122.320000,37.940000,47.000000,1911.000000,283.000000,697.000000,275.000000,6.271200,267700.000000 +-122.320000,37.940000,46.000000,1901.000000,295.000000,833.000000,352.000000,5.519600,210800.000000 +-122.320000,37.940000,38.000000,2751.000000,522.000000,1390.000000,489.000000,3.727700,165100.000000 +-122.320000,37.930000,40.000000,3056.000000,489.000000,1103.000000,481.000000,5.106700,247300.000000 +-122.320000,37.930000,40.000000,1141.000000,213.000000,434.000000,196.000000,3.946400,186900.000000 +-122.320000,37.930000,33.000000,296.000000,73.000000,216.000000,63.000000,2.675000,22500.000000 +-122.320000,37.920000,29.000000,2304.000000,399.000000,1377.000000,454.000000,5.018700,140600.000000 +-122.320000,37.920000,28.000000,4649.000000,977.000000,2606.000000,953.000000,3.267400,129100.000000 +-122.320000,37.920000,22.000000,1119.000000,220.000000,565.000000,199.000000,3.359400,186900.000000 +-122.320000,37.910000,34.000000,2669.000000,647.000000,1341.000000,555.000000,2.639900,119600.000000 +-122.320000,37.690000,48.000000,592.000000,122.000000,340.000000,143.000000,5.966000,315200.000000 +-122.320000,37.570000,52.000000,499.000000,148.000000,318.000000,145.000000,2.993400,256300.000000 +-122.320000,37.570000,42.000000,2574.000000,614.000000,2377.000000,588.000000,3.289100,237900.000000 +-122.320000,37.560000,9.000000,1150.000000,287.000000,377.000000,243.000000,3.831700,237500.000000 +-122.320000,37.560000,44.000000,537.000000,173.000000,355.000000,194.000000,2.857100,250000.000000 +-122.320000,37.560000,26.000000,2339.000000,704.000000,1283.000000,654.000000,3.162000,415000.000000 +-122.320000,37.550000,50.000000,2501.000000,433.000000,1050.000000,410.000000,4.640600,500001.000000 +-122.320000,37.550000,46.000000,1437.000000,266.000000,607.000000,263.000000,4.806800,369700.000000 +-122.320000,37.550000,44.000000,2151.000000,411.000000,849.000000,370.000000,4.458300,397100.000000 +-122.320000,37.540000,34.000000,3661.000000,692.000000,1608.000000,656.000000,5.077400,407200.000000 +-122.320000,37.530000,39.000000,2795.000000,464.000000,1183.000000,443.000000,5.779000,387100.000000 +-122.320000,37.520000,17.000000,6645.000000,1034.000000,2557.000000,1032.000000,6.389200,480800.000000 +-122.330000,41.860000,19.000000,3599.000000,695.000000,1572.000000,601.000000,2.234000,58600.000000 +-122.330000,40.600000,5.000000,6383.000000,1206.000000,2965.000000,1141.000000,3.810300,111100.000000 +-122.330000,40.570000,16.000000,2777.000000,503.000000,1432.000000,500.000000,2.559200,75900.000000 +-122.330000,40.480000,26.000000,695.000000,126.000000,319.000000,124.000000,3.278800,101600.000000 +-122.330000,40.470000,30.000000,2502.000000,523.000000,1296.000000,481.000000,2.125000,66100.000000 +-122.330000,39.100000,10.000000,266.000000,62.000000,154.000000,49.000000,2.250000,75000.000000 +-122.330000,38.390000,36.000000,831.000000,122.000000,272.000000,109.000000,6.342700,304800.000000 +-122.330000,38.380000,28.000000,1020.000000,169.000000,504.000000,164.000000,4.569400,287500.000000 +-122.330000,38.330000,15.000000,3193.000000,468.000000,1303.000000,426.000000,5.301700,202600.000000 +-122.330000,38.310000,14.000000,6778.000000,947.000000,2768.000000,1014.000000,6.195300,258900.000000 +-122.330000,38.290000,14.000000,3541.000000,499.000000,1577.000000,459.000000,5.335100,269900.000000 +-122.330000,38.210000,33.000000,2017.000000,370.000000,949.000000,342.000000,4.625000,228600.000000 +-122.330000,38.000000,35.000000,3779.000000,711.000000,2493.000000,679.000000,2.978100,109000.000000 +-122.330000,37.990000,4.000000,3999.000000,1079.000000,1591.000000,887.000000,3.911000,112500.000000 +-122.330000,37.980000,3.000000,2850.000000,544.000000,1024.000000,515.000000,6.011500,175000.000000 +-122.330000,37.970000,45.000000,1982.000000,376.000000,1179.000000,398.000000,3.546300,130800.000000 +-122.330000,37.960000,46.000000,1222.000000,236.000000,819.000000,251.000000,3.911800,129400.000000 +-122.330000,37.950000,46.000000,1543.000000,339.000000,777.000000,322.000000,4.092700,142600.000000 +-122.330000,37.950000,45.000000,1585.000000,329.000000,981.000000,373.000000,3.031300,135800.000000 +-122.330000,37.950000,42.000000,1627.000000,336.000000,848.000000,316.000000,3.770800,144600.000000 +-122.330000,37.950000,22.000000,2099.000000,569.000000,1135.000000,509.000000,2.191500,120800.000000 +-122.330000,37.940000,47.000000,1882.000000,361.000000,797.000000,342.000000,3.584800,140800.000000 +-122.330000,37.940000,45.000000,1226.000000,279.000000,590.000000,260.000000,2.883300,140400.000000 +-122.330000,37.940000,44.000000,1769.000000,332.000000,828.000000,309.000000,4.052600,150800.000000 +-122.330000,37.940000,42.000000,1695.000000,345.000000,719.000000,334.000000,3.941700,139100.000000 +-122.330000,37.930000,27.000000,2158.000000,424.000000,1220.000000,442.000000,3.015600,111500.000000 +-122.330000,37.920000,26.000000,3887.000000,779.000000,2512.000000,740.000000,2.230100,122400.000000 +-122.330000,37.890000,42.000000,1342.000000,291.000000,551.000000,266.000000,4.526800,207400.000000 +-122.330000,37.580000,43.000000,1772.000000,422.000000,1573.000000,401.000000,2.747400,233100.000000 +-122.330000,37.580000,40.000000,2362.000000,468.000000,992.000000,425.000000,4.791700,359900.000000 +-122.330000,37.570000,27.000000,3085.000000,876.000000,1453.000000,896.000000,3.433300,290000.000000 +-122.330000,37.570000,20.000000,2126.000000,643.000000,1112.000000,597.000000,3.625000,283300.000000 +-122.330000,37.560000,50.000000,1975.000000,245.000000,644.000000,251.000000,10.074300,500001.000000 +-122.330000,37.560000,34.000000,6394.000000,1619.000000,2400.000000,1496.000000,3.490200,500001.000000 +-122.330000,37.550000,51.000000,2565.000000,332.000000,870.000000,309.000000,9.369400,500001.000000 +-122.330000,37.550000,33.000000,2199.000000,312.000000,827.000000,319.000000,6.134900,500001.000000 +-122.330000,37.530000,18.000000,4493.000000,760.000000,1784.000000,725.000000,6.704200,413000.000000 +-122.340000,41.210000,26.000000,178.000000,40.000000,55.000000,25.000000,2.037500,57500.000000 +-122.340000,41.060000,33.000000,2149.000000,498.000000,631.000000,273.000000,1.881600,65800.000000 +-122.340000,40.630000,10.000000,2183.000000,369.000000,1061.000000,325.000000,3.685300,151600.000000 +-122.340000,40.580000,7.000000,4843.000000,992.000000,2223.000000,932.000000,3.054900,101700.000000 +-122.340000,40.570000,26.000000,2187.000000,472.000000,1339.000000,463.000000,2.039500,67900.000000 +-122.340000,40.570000,24.000000,1610.000000,307.000000,748.000000,307.000000,2.659100,82800.000000 +-122.340000,40.510000,16.000000,2247.000000,502.000000,1206.000000,463.000000,1.994600,119200.000000 +-122.340000,40.320000,12.000000,3848.000000,689.000000,2008.000000,683.000000,2.635200,92200.000000 +-122.340000,37.990000,42.000000,1531.000000,326.000000,1271.000000,377.000000,2.616700,85100.000000 +-122.340000,37.980000,33.000000,2014.000000,410.000000,1354.000000,427.000000,3.977300,131300.000000 +-122.340000,37.960000,33.000000,1817.000000,441.000000,1220.000000,389.000000,2.538200,103600.000000 +-122.340000,37.960000,15.000000,6487.000000,1717.000000,3408.000000,1560.000000,2.199100,133300.000000 +-122.340000,37.950000,44.000000,1788.000000,368.000000,933.000000,329.000000,2.875000,133400.000000 +-122.340000,37.950000,44.000000,1675.000000,317.000000,806.000000,311.000000,3.069400,135300.000000 +-122.340000,37.950000,39.000000,1986.000000,427.000000,1041.000000,385.000000,3.233300,135100.000000 +-122.340000,37.950000,38.000000,1340.000000,298.000000,766.000000,241.000000,3.283300,111700.000000 +-122.340000,37.940000,47.000000,2313.000000,433.000000,947.000000,430.000000,3.942000,143300.000000 +-122.340000,37.940000,44.000000,1917.000000,444.000000,936.000000,435.000000,2.739100,140300.000000 +-122.340000,37.940000,42.000000,2206.000000,451.000000,989.000000,444.000000,3.125000,143900.000000 +-122.340000,37.940000,31.000000,1611.000000,455.000000,786.000000,411.000000,1.681000,145500.000000 +-122.340000,37.930000,45.000000,2225.000000,486.000000,1304.000000,459.000000,2.640000,112100.000000 +-122.340000,37.930000,32.000000,2389.000000,652.000000,1672.000000,584.000000,1.442300,88300.000000 +-122.340000,37.930000,30.000000,2515.000000,481.000000,1327.000000,428.000000,2.128700,95000.000000 +-122.340000,37.880000,37.000000,3061.000000,930.000000,2556.000000,924.000000,1.737500,350000.000000 +-122.340000,37.590000,44.000000,1395.000000,269.000000,736.000000,288.000000,5.620600,386400.000000 +-122.340000,37.580000,50.000000,2784.000000,743.000000,1622.000000,698.000000,3.841300,372200.000000 +-122.340000,37.570000,52.000000,2635.000000,408.000000,967.000000,374.000000,7.042200,500001.000000 +-122.340000,37.570000,52.000000,2547.000000,373.000000,876.000000,359.000000,8.259800,500001.000000 +-122.340000,37.570000,39.000000,2647.000000,616.000000,1254.000000,555.000000,4.240700,433800.000000 +-122.340000,37.570000,28.000000,3751.000000,949.000000,1691.000000,846.000000,3.972800,300000.000000 +-122.340000,37.550000,44.000000,2465.000000,328.000000,843.000000,324.000000,6.953300,500001.000000 +-122.340000,37.550000,25.000000,4470.000000,518.000000,1507.000000,504.000000,13.391300,500001.000000 +-122.340000,37.530000,27.000000,3339.000000,481.000000,1354.000000,458.000000,7.308100,464600.000000 +-122.340000,37.520000,34.000000,3559.000000,560.000000,1747.000000,550.000000,6.695900,411200.000000 +-122.340000,37.460000,21.000000,1799.000000,293.000000,576.000000,277.000000,7.439000,500001.000000 +-122.350000,40.680000,36.000000,1822.000000,449.000000,930.000000,399.000000,1.380100,56600.000000 +-122.350000,40.570000,22.000000,589.000000,97.000000,338.000000,107.000000,3.263900,87500.000000 +-122.350000,40.570000,18.000000,2226.000000,490.000000,859.000000,451.000000,1.682100,69400.000000 +-122.350000,40.540000,17.000000,2280.000000,453.000000,976.000000,434.000000,2.710000,97800.000000 +-122.350000,40.250000,10.000000,1621.000000,318.000000,866.000000,283.000000,3.500000,104300.000000 +-122.350000,38.300000,18.000000,3735.000000,557.000000,1504.000000,521.000000,5.630400,243100.000000 +-122.350000,37.980000,34.000000,3756.000000,726.000000,2237.000000,686.000000,3.756200,132900.000000 +-122.350000,37.970000,43.000000,2178.000000,482.000000,1545.000000,471.000000,2.586300,112200.000000 +-122.350000,37.960000,34.000000,1428.000000,335.000000,1272.000000,319.000000,2.546100,93900.000000 +-122.350000,37.960000,32.000000,1991.000000,504.000000,1139.000000,423.000000,2.035300,113600.000000 +-122.350000,37.960000,29.000000,1899.000000,524.000000,1357.000000,443.000000,1.875000,97200.000000 +-122.350000,37.950000,45.000000,2142.000000,431.000000,1318.000000,431.000000,3.073700,111600.000000 +-122.350000,37.950000,42.000000,1485.000000,290.000000,971.000000,303.000000,3.609400,114600.000000 +-122.350000,37.950000,31.000000,2449.000000,595.000000,1801.000000,548.000000,2.632800,110300.000000 +-122.350000,37.940000,34.000000,1880.000000,459.000000,1358.000000,422.000000,1.657100,105200.000000 +-122.350000,37.930000,41.000000,268.000000,75.000000,198.000000,82.000000,3.222200,156300.000000 +-122.350000,37.930000,39.000000,2002.000000,416.000000,1166.000000,395.000000,1.725700,91500.000000 +-122.350000,37.930000,28.000000,1995.000000,488.000000,1182.000000,439.000000,2.335200,84300.000000 +-122.350000,37.930000,19.000000,1334.000000,366.000000,1048.000000,316.000000,1.786500,88000.000000 +-122.350000,37.910000,4.000000,2851.000000,798.000000,1285.000000,712.000000,4.289500,186800.000000 +-122.350000,37.580000,52.000000,2495.000000,458.000000,1081.000000,471.000000,4.085500,410800.000000 +-122.350000,37.580000,30.000000,5039.000000,1564.000000,2129.000000,1536.000000,3.346900,345000.000000 +-122.350000,37.570000,52.000000,2170.000000,269.000000,784.000000,274.000000,10.428600,500001.000000 +-122.350000,37.570000,52.000000,2059.000000,345.000000,800.000000,308.000000,4.970000,500001.000000 +-122.350000,37.560000,52.000000,1659.000000,191.000000,519.000000,201.000000,14.421900,500001.000000 +-122.350000,37.530000,27.000000,2169.000000,305.000000,905.000000,319.000000,7.774300,453100.000000 +-122.360000,40.690000,32.000000,3611.000000,772.000000,2060.000000,759.000000,1.742700,60600.000000 +-122.360000,40.680000,28.000000,1745.000000,379.000000,1011.000000,370.000000,2.039100,59800.000000 +-122.360000,40.660000,17.000000,2786.000000,559.000000,1528.000000,517.000000,2.011900,75800.000000 +-122.360000,40.620000,11.000000,3896.000000,886.000000,1902.000000,843.000000,2.290500,94200.000000 +-122.360000,40.580000,17.000000,1220.000000,275.000000,800.000000,261.000000,1.918100,118800.000000 +-122.360000,40.570000,31.000000,431.000000,90.000000,231.000000,78.000000,2.184000,77300.000000 +-122.360000,40.560000,20.000000,3592.000000,868.000000,1865.000000,781.000000,2.025800,64800.000000 +-122.360000,40.550000,21.000000,2500.000000,466.000000,1428.000000,502.000000,2.651300,113300.000000 +-122.360000,40.480000,21.000000,2333.000000,514.000000,1308.000000,509.000000,2.089900,74800.000000 +-122.360000,38.400000,16.000000,2716.000000,546.000000,898.000000,500.000000,2.253600,201200.000000 +-122.360000,38.030000,32.000000,2159.000000,393.000000,981.000000,369.000000,4.317300,175400.000000 +-122.360000,37.960000,39.000000,246.000000,57.000000,316.000000,52.000000,0.716000,104200.000000 +-122.360000,37.960000,31.000000,1157.000000,276.000000,956.000000,232.000000,1.534700,80400.000000 +-122.360000,37.960000,30.000000,950.000000,317.000000,1073.000000,280.000000,1.866400,107800.000000 +-122.360000,37.950000,40.000000,408.000000,102.000000,302.000000,81.000000,1.833300,69800.000000 +-122.360000,37.950000,38.000000,1066.000000,248.000000,729.000000,286.000000,1.513900,81700.000000 +-122.360000,37.950000,38.000000,1042.000000,289.000000,773.000000,248.000000,2.771400,104700.000000 +-122.360000,37.940000,45.000000,907.000000,188.000000,479.000000,161.000000,3.086200,79000.000000 +-122.360000,37.940000,43.000000,369.000000,107.000000,371.000000,111.000000,1.600000,79400.000000 +-122.360000,37.940000,41.000000,2591.000000,585.000000,1638.000000,462.000000,1.822000,79700.000000 +-122.360000,37.940000,26.000000,1540.000000,343.000000,1007.000000,338.000000,1.336500,72900.000000 +-122.360000,37.930000,44.000000,1891.000000,449.000000,1047.000000,432.000000,1.772700,86100.000000 +-122.360000,37.930000,42.000000,1796.000000,389.000000,1107.000000,372.000000,1.937500,87000.000000 +-122.360000,37.920000,52.000000,215.000000,41.000000,126.000000,43.000000,1.392900,104200.000000 +-122.360000,37.590000,20.000000,2638.000000,854.000000,1352.000000,718.000000,3.512500,350600.000000 +-122.360000,37.580000,52.000000,3084.000000,595.000000,1324.000000,571.000000,5.075600,374200.000000 +-122.360000,37.580000,37.000000,3325.000000,734.000000,1468.000000,692.000000,4.098700,434000.000000 +-122.360000,37.570000,35.000000,1774.000000,205.000000,588.000000,207.000000,10.733900,500001.000000 +-122.360000,37.560000,32.000000,4684.000000,540.000000,1512.000000,511.000000,15.000100,500001.000000 +-122.360000,37.540000,23.000000,6184.000000,747.000000,2165.000000,700.000000,10.167500,500001.000000 +-122.370000,41.410000,28.000000,1729.000000,419.000000,929.000000,370.000000,1.270000,53100.000000 +-122.370000,40.600000,7.000000,5178.000000,1336.000000,2557.000000,1283.000000,2.407900,111400.000000 +-122.370000,40.580000,25.000000,2054.000000,495.000000,835.000000,475.000000,2.153800,76900.000000 +-122.370000,40.540000,28.000000,2213.000000,390.000000,1096.000000,378.000000,3.692300,86900.000000 +-122.370000,40.450000,18.000000,1748.000000,337.000000,921.000000,327.000000,3.331500,85400.000000 +-122.370000,40.390000,12.000000,3783.000000,702.000000,1970.000000,639.000000,3.300500,98500.000000 +-122.370000,37.950000,35.000000,215.000000,45.000000,100.000000,34.000000,1.602300,81300.000000 +-122.370000,37.950000,32.000000,1298.000000,363.000000,716.000000,268.000000,0.979700,76400.000000 +-122.370000,37.940000,49.000000,969.000000,229.000000,599.000000,195.000000,1.316700,71600.000000 +-122.370000,37.930000,45.000000,3150.000000,756.000000,1798.000000,749.000000,1.750000,37900.000000 +-122.370000,37.930000,37.000000,709.000000,190.000000,644.000000,174.000000,0.864100,84200.000000 +-122.370000,37.810000,26.000000,5416.000000,1045.000000,4531.000000,962.000000,2.790900,250000.000000 +-122.370000,37.600000,26.000000,15.000000,3.000000,11.000000,3.000000,5.048000,350000.000000 +-122.370000,37.590000,52.000000,2272.000000,403.000000,963.000000,376.000000,5.724500,500000.000000 +-122.370000,37.590000,39.000000,4645.000000,1196.000000,2156.000000,1113.000000,3.441200,353800.000000 +-122.370000,37.580000,52.000000,1900.000000,290.000000,665.000000,276.000000,4.548600,500001.000000 +-122.370000,37.580000,43.000000,2506.000000,432.000000,967.000000,428.000000,4.740400,500001.000000 +-122.370000,37.580000,34.000000,2697.000000,313.000000,810.000000,279.000000,12.429100,500001.000000 +-122.370000,37.560000,21.000000,7189.000000,874.000000,2440.000000,846.000000,11.683300,500001.000000 +-122.380000,41.540000,14.000000,4453.000000,797.000000,1817.000000,685.000000,2.746800,81100.000000 +-122.380000,41.430000,45.000000,2245.000000,448.000000,1155.000000,421.000000,1.650900,46200.000000 +-122.380000,40.690000,21.000000,1774.000000,370.000000,875.000000,354.000000,1.742200,61500.000000 +-122.380000,40.670000,10.000000,2281.000000,444.000000,1274.000000,438.000000,2.212000,65600.000000 +-122.380000,40.610000,14.000000,4773.000000,1133.000000,2101.000000,1072.000000,1.722700,105000.000000 +-122.380000,40.580000,36.000000,1808.000000,384.000000,807.000000,383.000000,1.837500,74800.000000 +-122.380000,40.580000,34.000000,1262.000000,267.000000,520.000000,259.000000,1.698300,72600.000000 +-122.380000,40.570000,43.000000,2251.000000,542.000000,1479.000000,512.000000,1.567600,58200.000000 +-122.380000,40.560000,23.000000,2281.000000,408.000000,1164.000000,420.000000,3.534700,101200.000000 +-122.380000,40.540000,36.000000,1216.000000,240.000000,647.000000,228.000000,2.694400,75300.000000 +-122.380000,39.680000,21.000000,1155.000000,210.000000,510.000000,175.000000,2.385100,67500.000000 +-122.380000,37.910000,18.000000,3507.000000,711.000000,1224.000000,676.000000,5.052400,269800.000000 +-122.380000,37.760000,52.000000,248.000000,68.000000,124.000000,51.000000,1.488600,450000.000000 +-122.380000,37.730000,40.000000,543.000000,101.000000,259.000000,89.000000,2.216700,193800.000000 +-122.380000,37.730000,38.000000,1388.000000,276.000000,871.000000,265.000000,2.166700,193800.000000 +-122.380000,37.730000,18.000000,4037.000000,990.000000,2722.000000,834.000000,1.428200,140400.000000 +-122.380000,37.710000,47.000000,1088.000000,190.000000,558.000000,166.000000,4.270800,207100.000000 +-122.380000,37.600000,33.000000,2577.000000,590.000000,1867.000000,566.000000,3.363200,265100.000000 +-122.380000,37.590000,49.000000,1657.000000,266.000000,613.000000,270.000000,5.783700,378100.000000 +-122.380000,37.590000,44.000000,2089.000000,348.000000,837.000000,317.000000,4.662800,459200.000000 +-122.380000,37.580000,52.000000,2039.000000,299.000000,772.000000,303.000000,6.471000,500001.000000 +-122.380000,37.580000,52.000000,1704.000000,226.000000,671.000000,243.000000,8.470400,500001.000000 +-122.380000,37.340000,33.000000,1054.000000,209.000000,400.000000,161.000000,7.777300,456300.000000 +-122.380000,37.180000,52.000000,1746.000000,315.000000,941.000000,220.000000,3.304700,286100.000000 +-122.390000,41.410000,23.000000,910.000000,199.000000,370.000000,169.000000,1.744800,80100.000000 +-122.390000,40.640000,13.000000,3604.000000,704.000000,1598.000000,670.000000,2.414100,78700.000000 +-122.390000,40.590000,26.000000,1279.000000,438.000000,1276.000000,420.000000,1.240400,81300.000000 +-122.390000,40.580000,44.000000,1625.000000,392.000000,944.000000,347.000000,1.597200,68900.000000 +-122.390000,40.530000,28.000000,1427.000000,304.000000,692.000000,285.000000,2.125000,80800.000000 +-122.390000,40.520000,24.000000,2068.000000,346.000000,951.000000,332.000000,3.930600,85900.000000 +-122.390000,38.370000,33.000000,1066.000000,191.000000,403.000000,163.000000,6.800000,240800.000000 +-122.390000,38.000000,33.000000,44.000000,6.000000,23.000000,11.000000,4.125000,212500.000000 +-122.390000,37.800000,25.000000,4561.000000,1474.000000,1525.000000,1169.000000,4.558100,500001.000000 +-122.390000,37.790000,52.000000,94.000000,24.000000,113.000000,27.000000,4.656300,350000.000000 +-122.390000,37.780000,5.000000,1405.000000,515.000000,725.000000,392.000000,3.603700,187500.000000 +-122.390000,37.760000,52.000000,624.000000,170.000000,410.000000,148.000000,4.004200,208300.000000 +-122.390000,37.760000,52.000000,3390.000000,691.000000,1645.000000,596.000000,3.705100,253700.000000 +-122.390000,37.760000,52.000000,2316.000000,468.000000,1047.000000,476.000000,4.505700,321600.000000 +-122.390000,37.760000,52.000000,1877.000000,427.000000,712.000000,398.000000,3.972200,290900.000000 +-122.390000,37.760000,52.000000,157.000000,28.000000,88.000000,27.000000,3.675000,162500.000000 +-122.390000,37.740000,42.000000,4110.000000,846.000000,2147.000000,674.000000,2.569400,201000.000000 +-122.390000,37.730000,52.000000,1931.000000,329.000000,1025.000000,293.000000,2.906300,192000.000000 +-122.390000,37.730000,46.000000,1517.000000,299.000000,879.000000,309.000000,2.222200,195100.000000 +-122.390000,37.720000,52.000000,135.000000,34.000000,93.000000,26.000000,2.148400,181300.000000 +-122.390000,37.720000,45.000000,2893.000000,570.000000,1923.000000,535.000000,3.660700,192300.000000 +-122.390000,37.720000,28.000000,1609.000000,340.000000,1064.000000,290.000000,1.112500,206300.000000 +-122.390000,37.600000,44.000000,2304.000000,384.000000,986.000000,379.000000,4.652000,387100.000000 +-122.390000,37.600000,36.000000,1770.000000,499.000000,1225.000000,459.000000,2.560000,273100.000000 +-122.390000,37.600000,34.000000,707.000000,159.000000,381.000000,156.000000,4.375000,340900.000000 +-122.390000,37.590000,33.000000,2064.000000,299.000000,813.000000,303.000000,6.037400,500001.000000 +-122.390000,37.580000,36.000000,6026.000000,852.000000,2314.000000,892.000000,7.899700,500001.000000 +-122.390000,37.570000,35.000000,520.000000,83.000000,185.000000,76.000000,6.486500,450000.000000 +-122.400000,40.620000,9.000000,4794.000000,889.000000,2162.000000,865.000000,3.143900,103100.000000 +-122.400000,40.580000,43.000000,1455.000000,300.000000,747.000000,279.000000,2.785700,104200.000000 +-122.400000,40.580000,40.000000,3895.000000,929.000000,1782.000000,910.000000,1.332900,78200.000000 +-122.400000,40.570000,23.000000,1321.000000,259.000000,749.000000,222.000000,1.655000,90100.000000 +-122.400000,40.510000,20.000000,1750.000000,352.000000,834.000000,340.000000,2.485000,100600.000000 +-122.400000,38.530000,24.000000,1741.000000,289.000000,564.000000,231.000000,3.611800,248400.000000 +-122.400000,38.460000,33.000000,2542.000000,466.000000,1099.000000,420.000000,4.635000,248500.000000 +-122.400000,38.410000,20.000000,4867.000000,1015.000000,1725.000000,1015.000000,2.568500,267600.000000 +-122.400000,38.340000,33.000000,1408.000000,273.000000,520.000000,212.000000,3.578100,242500.000000 +-122.400000,37.810000,12.000000,1349.000000,349.000000,536.000000,334.000000,7.785200,250000.000000 +-122.400000,37.800000,52.000000,2094.000000,568.000000,920.000000,503.000000,4.201500,412500.000000 +-122.400000,37.800000,52.000000,1642.000000,570.000000,1432.000000,513.000000,1.906300,300000.000000 +-122.400000,37.780000,52.000000,464.000000,202.000000,286.000000,148.000000,1.612500,112500.000000 +-122.400000,37.780000,32.000000,352.000000,132.000000,313.000000,105.000000,2.574200,350000.000000 +-122.400000,37.770000,52.000000,144.000000,63.000000,1061.000000,68.000000,4.395800,225000.000000 +-122.400000,37.760000,52.000000,4265.000000,912.000000,1555.000000,836.000000,4.119000,298300.000000 +-122.400000,37.760000,52.000000,1529.000000,385.000000,1347.000000,348.000000,2.931200,239100.000000 +-122.400000,37.760000,52.000000,1495.000000,311.000000,506.000000,275.000000,4.437500,320000.000000 +-122.400000,37.750000,52.000000,1182.000000,307.000000,1029.000000,306.000000,2.057700,214600.000000 +-122.400000,37.750000,44.000000,6848.000000,1584.000000,3269.000000,1383.000000,2.867900,243300.000000 +-122.400000,37.750000,26.000000,54.000000,9.000000,23.000000,9.000000,6.135900,225000.000000 +-122.400000,37.740000,45.000000,2462.000000,509.000000,1587.000000,450.000000,2.590000,211800.000000 +-122.400000,37.730000,50.000000,1947.000000,411.000000,1170.000000,384.000000,3.476900,238700.000000 +-122.400000,37.730000,45.000000,3490.000000,712.000000,2337.000000,781.000000,3.447200,225400.000000 +-122.400000,37.730000,42.000000,1413.000000,406.000000,1027.000000,362.000000,2.362500,233000.000000 +-122.400000,37.720000,47.000000,1167.000000,250.000000,953.000000,253.000000,4.272700,241900.000000 +-122.400000,37.720000,41.000000,1975.000000,440.000000,1528.000000,424.000000,3.862500,218300.000000 +-122.400000,37.720000,40.000000,1948.000000,413.000000,1434.000000,396.000000,3.031300,219100.000000 +-122.400000,37.720000,37.000000,971.000000,248.000000,647.000000,208.000000,2.118700,239300.000000 +-122.400000,37.710000,47.000000,1649.000000,328.000000,1183.000000,356.000000,3.362500,223700.000000 +-122.400000,37.710000,40.000000,1883.000000,397.000000,1411.000000,438.000000,3.046900,238000.000000 +-122.400000,37.680000,41.000000,2267.000000,486.000000,1045.000000,459.000000,4.114600,272200.000000 +-122.400000,37.680000,36.000000,3595.000000,815.000000,1649.000000,755.000000,3.381600,253400.000000 +-122.400000,37.620000,32.000000,3586.000000,921.000000,2249.000000,911.000000,3.105800,253000.000000 +-122.400000,37.610000,35.000000,2084.000000,549.000000,1077.000000,545.000000,3.162800,318400.000000 +-122.400000,37.600000,52.000000,1380.000000,203.000000,530.000000,210.000000,6.221000,420300.000000 +-122.400000,37.600000,30.000000,5351.000000,1134.000000,2558.000000,1074.000000,3.581700,369300.000000 +-122.410000,40.580000,35.000000,2072.000000,385.000000,1029.000000,375.000000,2.851200,75600.000000 +-122.410000,40.530000,28.000000,1127.000000,245.000000,538.000000,208.000000,2.037000,72000.000000 +-122.410000,38.070000,20.000000,4536.000000,708.000000,1812.000000,701.000000,6.043300,435900.000000 +-122.410000,37.980000,36.000000,60.000000,15.000000,42.000000,25.000000,1.458300,67500.000000 +-122.410000,37.940000,52.000000,154.000000,33.000000,89.000000,38.000000,3.287500,275000.000000 +-122.410000,37.810000,31.000000,3991.000000,1311.000000,2305.000000,1201.000000,1.898100,500001.000000 +-122.410000,37.800000,52.000000,4088.000000,946.000000,1906.000000,863.000000,3.606500,433300.000000 +-122.410000,37.800000,52.000000,3697.000000,837.000000,1446.000000,711.000000,5.866000,500001.000000 +-122.410000,37.800000,52.000000,3278.000000,775.000000,1279.000000,709.000000,5.437800,500001.000000 +-122.410000,37.800000,52.000000,3260.000000,1535.000000,3260.000000,1457.000000,0.900000,500001.000000 +-122.410000,37.800000,52.000000,2892.000000,751.000000,1785.000000,733.000000,3.574600,350000.000000 +-122.410000,37.800000,52.000000,2618.000000,611.000000,1328.000000,559.000000,4.160700,350000.000000 +-122.410000,37.800000,52.000000,2583.000000,672.000000,1335.000000,613.000000,3.147700,500001.000000 +-122.410000,37.800000,52.000000,2450.000000,741.000000,1415.000000,664.000000,2.822900,375000.000000 +-122.410000,37.800000,52.000000,1999.000000,642.000000,1846.000000,620.000000,1.914500,225000.000000 +-122.410000,37.800000,52.000000,1866.000000,748.000000,2957.000000,710.000000,1.829500,243800.000000 +-122.410000,37.800000,52.000000,1724.000000,416.000000,1016.000000,395.000000,3.383900,400000.000000 +-122.410000,37.800000,52.000000,1394.000000,395.000000,1700.000000,400.000000,2.750000,168800.000000 +-122.410000,37.800000,52.000000,1288.000000,309.000000,437.000000,272.000000,6.324500,500001.000000 +-122.410000,37.800000,30.000000,1821.000000,738.000000,1648.000000,684.000000,0.883600,450000.000000 +-122.410000,37.790000,52.000000,6016.000000,2509.000000,3436.000000,2119.000000,2.516600,275000.000000 +-122.410000,37.790000,52.000000,5783.000000,2747.000000,4518.000000,2538.000000,1.724000,225000.000000 +-122.410000,37.790000,52.000000,3610.000000,1286.000000,1504.000000,1047.000000,3.205900,500001.000000 +-122.410000,37.790000,52.000000,3598.000000,1011.000000,2062.000000,966.000000,2.987100,380000.000000 +-122.410000,37.790000,52.000000,2909.000000,851.000000,1711.000000,830.000000,3.029600,500001.000000 +-122.410000,37.790000,52.000000,2302.000000,938.000000,1515.000000,861.000000,1.366800,55000.000000 +-122.410000,37.790000,52.000000,2161.000000,544.000000,904.000000,431.000000,3.506600,350000.000000 +-122.410000,37.790000,52.000000,1436.000000,738.000000,1688.000000,662.000000,1.515600,237500.000000 +-122.410000,37.780000,52.000000,1928.000000,836.000000,2124.000000,739.000000,1.118500,55000.000000 +-122.410000,37.780000,52.000000,1534.000000,763.000000,1520.000000,614.000000,1.455400,375000.000000 +-122.410000,37.770000,52.000000,849.000000,276.000000,582.000000,222.000000,3.467100,250000.000000 +-122.410000,37.770000,52.000000,361.000000,76.000000,168.000000,55.000000,3.229200,275000.000000 +-122.410000,37.770000,52.000000,1963.000000,565.000000,1628.000000,524.000000,2.608300,193800.000000 +-122.410000,37.760000,52.000000,351.000000,81.000000,308.000000,75.000000,2.666700,325000.000000 +-122.410000,37.760000,52.000000,3452.000000,784.000000,2987.000000,753.000000,2.813500,260300.000000 +-122.410000,37.760000,52.000000,2605.000000,678.000000,2071.000000,611.000000,3.296400,265000.000000 +-122.410000,37.760000,52.000000,2479.000000,515.000000,1816.000000,496.000000,3.077400,300000.000000 +-122.410000,37.760000,52.000000,2064.000000,496.000000,1726.000000,466.000000,3.402800,233300.000000 +-122.410000,37.760000,52.000000,1427.000000,281.000000,620.000000,236.000000,1.994400,262500.000000 +-122.410000,37.750000,9.000000,1282.000000,334.000000,1176.000000,305.000000,2.653800,206300.000000 +-122.410000,37.750000,52.000000,3065.000000,622.000000,1405.000000,606.000000,3.781300,275900.000000 +-122.410000,37.750000,52.000000,2524.000000,559.000000,1430.000000,476.000000,3.407300,254700.000000 +-122.410000,37.750000,52.000000,2515.000000,576.000000,1209.000000,540.000000,3.591200,284900.000000 +-122.410000,37.750000,52.000000,2452.000000,623.000000,1932.000000,549.000000,2.390300,236100.000000 +-122.410000,37.750000,52.000000,2164.000000,606.000000,2034.000000,513.000000,2.032500,178100.000000 +-122.410000,37.750000,52.000000,1919.000000,404.000000,1483.000000,421.000000,3.406300,253900.000000 +-122.410000,37.750000,52.000000,1892.000000,415.000000,1442.000000,371.000000,4.289100,230000.000000 +-122.410000,37.750000,52.000000,1678.000000,386.000000,1220.000000,357.000000,2.580900,255300.000000 +-122.410000,37.740000,52.000000,831.000000,175.000000,415.000000,159.000000,1.946400,249000.000000 +-122.410000,37.740000,52.000000,2058.000000,399.000000,1208.000000,399.000000,3.642900,230000.000000 +-122.410000,37.740000,52.000000,1842.000000,339.000000,1032.000000,357.000000,5.556300,250800.000000 +-122.410000,37.740000,48.000000,409.000000,86.000000,148.000000,70.000000,3.668700,335000.000000 +-122.410000,37.740000,47.000000,1728.000000,398.000000,1178.000000,315.000000,3.281300,229600.000000 +-122.410000,37.740000,43.000000,1663.000000,330.000000,935.000000,335.000000,4.155200,240900.000000 +-122.410000,37.740000,38.000000,1754.000000,382.000000,928.000000,354.000000,4.141700,270800.000000 +-122.410000,37.730000,52.000000,1931.000000,358.000000,1092.000000,356.000000,3.783500,271300.000000 +-122.410000,37.730000,41.000000,2115.000000,378.000000,1168.000000,365.000000,4.064200,272500.000000 +-122.410000,37.730000,33.000000,2789.000000,567.000000,1682.000000,552.000000,3.864300,276200.000000 +-122.410000,37.720000,35.000000,2104.000000,434.000000,1225.000000,410.000000,4.821400,242900.000000 +-122.410000,37.720000,32.000000,1650.000000,316.000000,904.000000,295.000000,4.058300,236200.000000 +-122.410000,37.710000,47.000000,2289.000000,481.000000,1697.000000,465.000000,3.477300,226300.000000 +-122.410000,37.700000,23.000000,1817.000000,400.000000,1376.000000,382.000000,2.411300,214200.000000 +-122.410000,37.660000,44.000000,431.000000,195.000000,682.000000,212.000000,3.283300,233300.000000 +-122.410000,37.660000,40.000000,1294.000000,308.000000,1177.000000,301.000000,3.666700,218800.000000 +-122.410000,37.660000,37.000000,694.000000,188.000000,658.000000,225.000000,4.610300,237500.000000 +-122.410000,37.660000,37.000000,2155.000000,446.000000,1255.000000,428.000000,3.843800,250700.000000 +-122.410000,37.660000,32.000000,1385.000000,356.000000,1096.000000,353.000000,4.475000,246700.000000 +-122.410000,37.640000,38.000000,1204.000000,268.000000,921.000000,247.000000,4.446400,215400.000000 +-122.410000,37.630000,39.000000,4220.000000,1055.000000,2720.000000,1046.000000,2.639000,242500.000000 +-122.410000,37.630000,37.000000,1252.000000,275.000000,878.000000,287.000000,4.226200,228500.000000 +-122.410000,37.630000,35.000000,865.000000,226.000000,602.000000,217.000000,3.000000,229100.000000 +-122.410000,37.620000,49.000000,1464.000000,302.000000,636.000000,259.000000,4.250000,284100.000000 +-122.410000,37.620000,39.000000,3119.000000,758.000000,1807.000000,696.000000,3.221600,242700.000000 +-122.410000,37.610000,43.000000,1934.000000,303.000000,847.000000,300.000000,4.738100,347400.000000 +-122.410000,37.610000,42.000000,1602.000000,262.000000,705.000000,255.000000,5.739800,336400.000000 +-122.410000,37.600000,26.000000,2754.000000,402.000000,1128.000000,395.000000,6.371900,466900.000000 +-122.420000,40.630000,23.000000,2248.000000,489.000000,1132.000000,444.000000,1.642900,80400.000000 +-122.420000,40.570000,10.000000,7949.000000,1309.000000,3176.000000,1163.000000,4.109900,120100.000000 +-122.420000,40.320000,16.000000,1978.000000,375.000000,961.000000,333.000000,2.682700,83900.000000 +-122.420000,38.310000,18.000000,1479.000000,246.000000,550.000000,217.000000,4.735600,333300.000000 +-122.420000,38.270000,25.000000,3282.000000,566.000000,1244.000000,483.000000,4.531300,308400.000000 +-122.420000,37.930000,47.000000,3453.000000,779.000000,1353.000000,728.000000,4.016000,274500.000000 +-122.420000,37.810000,52.000000,1314.000000,317.000000,473.000000,250.000000,4.347200,500001.000000 +-122.420000,37.800000,52.000000,741.000000,170.000000,277.000000,165.000000,4.471200,500001.000000 +-122.420000,37.800000,52.000000,4079.000000,1112.000000,1466.000000,1024.000000,4.591300,500001.000000 +-122.420000,37.800000,52.000000,3823.000000,1040.000000,1830.000000,977.000000,4.245800,450000.000000 +-122.420000,37.800000,52.000000,3321.000000,1115.000000,1576.000000,1034.000000,2.098700,458300.000000 +-122.420000,37.800000,52.000000,3067.000000,870.000000,2122.000000,850.000000,2.560300,287500.000000 +-122.420000,37.800000,52.000000,2852.000000,581.000000,838.000000,510.000000,8.075500,500001.000000 +-122.420000,37.800000,52.000000,2797.000000,685.000000,1156.000000,651.000000,4.347200,500001.000000 +-122.420000,37.800000,52.000000,2657.000000,772.000000,1014.000000,685.000000,4.038000,500001.000000 +-122.420000,37.800000,52.000000,1777.000000,486.000000,932.000000,427.000000,3.364300,420000.000000 +-122.420000,37.790000,52.000000,3511.000000,1232.000000,2452.000000,1131.000000,2.501300,275000.000000 +-122.420000,37.790000,52.000000,3457.000000,1021.000000,2286.000000,994.000000,2.565000,225000.000000 +-122.420000,37.790000,52.000000,2737.000000,1241.000000,1761.000000,1029.000000,1.806800,225000.000000 +-122.420000,37.790000,52.000000,2511.000000,895.000000,1202.000000,804.000000,2.660700,87500.000000 +-122.420000,37.790000,48.000000,4506.000000,1342.000000,1980.000000,1239.000000,4.015600,500001.000000 +-122.420000,37.780000,52.000000,989.000000,425.000000,634.000000,341.000000,2.441400,275000.000000 +-122.420000,37.780000,52.000000,1254.000000,469.000000,895.000000,456.000000,2.151600,187500.000000 +-122.420000,37.780000,27.000000,1728.000000,884.000000,1211.000000,752.000000,0.854300,500001.000000 +-122.420000,37.780000,26.000000,812.000000,507.000000,628.000000,445.000000,2.330400,500001.000000 +-122.420000,37.780000,17.000000,1257.000000,339.000000,1093.000000,384.000000,1.843800,72500.000000 +-122.420000,37.770000,52.000000,759.000000,323.000000,421.000000,255.000000,2.054800,162500.000000 +-122.420000,37.770000,52.000000,4226.000000,1315.000000,2619.000000,1242.000000,2.575500,325000.000000 +-122.420000,37.770000,52.000000,2185.000000,656.000000,1266.000000,626.000000,2.779400,350000.000000 +-122.420000,37.770000,52.000000,1925.000000,568.000000,867.000000,515.000000,2.879000,450000.000000 +-122.420000,37.770000,52.000000,1176.000000,493.000000,1136.000000,436.000000,1.375000,312500.000000 +-122.420000,37.770000,52.000000,1086.000000,349.000000,589.000000,361.000000,2.518600,250000.000000 +-122.420000,37.760000,52.000000,4001.000000,1084.000000,2129.000000,1037.000000,3.505200,391200.000000 +-122.420000,37.760000,52.000000,2088.000000,487.000000,1082.000000,488.000000,2.680300,490000.000000 +-122.420000,37.760000,52.000000,1494.000000,610.000000,1630.000000,590.000000,1.650000,265000.000000 +-122.420000,37.760000,52.000000,1190.000000,400.000000,1270.000000,332.000000,2.032900,225000.000000 +-122.420000,37.760000,46.000000,2150.000000,817.000000,2075.000000,807.000000,1.382400,212500.000000 +-122.420000,37.760000,37.000000,1291.000000,588.000000,1846.000000,557.000000,1.336500,225000.000000 +-122.420000,37.750000,52.000000,801.000000,272.000000,639.000000,259.000000,2.197100,275000.000000 +-122.420000,37.750000,52.000000,2708.000000,762.000000,1460.000000,741.000000,2.905200,400000.000000 +-122.420000,37.750000,52.000000,2163.000000,607.000000,1447.000000,546.000000,3.355500,275000.000000 +-122.420000,37.750000,52.000000,2112.000000,528.000000,1227.000000,513.000000,3.553600,400000.000000 +-122.420000,37.750000,52.000000,1974.000000,525.000000,935.000000,465.000000,2.717300,300000.000000 +-122.420000,37.750000,52.000000,1855.000000,611.000000,1715.000000,614.000000,2.128900,250000.000000 +-122.420000,37.750000,52.000000,1609.000000,510.000000,1155.000000,439.000000,2.232800,250000.000000 +-122.420000,37.750000,52.000000,1564.000000,396.000000,1162.000000,374.000000,3.000000,275000.000000 +-122.420000,37.750000,52.000000,1207.000000,302.000000,1008.000000,269.000000,3.381600,262500.000000 +-122.420000,37.740000,52.000000,2713.000000,624.000000,1370.000000,594.000000,4.654700,325700.000000 +-122.420000,37.740000,52.000000,2084.000000,550.000000,1438.000000,516.000000,2.308700,258600.000000 +-122.420000,37.740000,52.000000,2019.000000,418.000000,999.000000,448.000000,4.221200,271300.000000 +-122.420000,37.740000,52.000000,1916.000000,432.000000,889.000000,424.000000,4.039100,279900.000000 +-122.420000,37.740000,52.000000,1786.000000,427.000000,856.000000,394.000000,3.083300,328100.000000 +-122.420000,37.740000,52.000000,1674.000000,346.000000,734.000000,335.000000,3.886400,281300.000000 +-122.420000,37.740000,52.000000,1651.000000,351.000000,973.000000,366.000000,3.458300,240900.000000 +-122.420000,37.740000,52.000000,1540.000000,370.000000,1136.000000,363.000000,4.312500,243000.000000 +-122.420000,37.740000,52.000000,1271.000000,353.000000,1076.000000,324.000000,2.991100,263900.000000 +-122.420000,37.730000,52.000000,3230.000000,654.000000,1765.000000,611.000000,3.333300,292300.000000 +-122.420000,37.730000,48.000000,1474.000000,308.000000,998.000000,330.000000,4.078100,250300.000000 +-122.420000,37.730000,46.000000,1819.000000,411.000000,1534.000000,406.000000,4.013200,229400.000000 +-122.420000,37.730000,35.000000,1871.000000,342.000000,1055.000000,310.000000,4.625000,279300.000000 +-122.420000,37.730000,35.000000,1791.000000,322.000000,988.000000,304.000000,4.576900,254500.000000 +-122.420000,37.720000,42.000000,4219.000000,1125.000000,3549.000000,993.000000,1.238700,212800.000000 +-122.420000,37.720000,37.000000,2638.000000,546.000000,1789.000000,521.000000,4.007100,244700.000000 +-122.420000,37.660000,41.000000,2189.000000,414.000000,1063.000000,409.000000,4.736100,302600.000000 +-122.420000,37.660000,36.000000,725.000000,121.000000,335.000000,140.000000,4.125000,327600.000000 +-122.420000,37.660000,28.000000,3520.000000,672.000000,1746.000000,602.000000,4.923600,273500.000000 +-122.420000,37.660000,26.000000,3253.000000,932.000000,2246.000000,855.000000,2.663100,244000.000000 +-122.420000,37.640000,41.000000,98.000000,20.000000,68.000000,19.000000,2.225000,212500.000000 +-122.420000,37.630000,46.000000,66.000000,11.000000,30.000000,12.000000,2.375000,275000.000000 +-122.420000,37.630000,46.000000,1811.000000,337.000000,796.000000,333.000000,3.430000,292900.000000 +-122.420000,37.620000,43.000000,2367.000000,409.000000,1141.000000,400.000000,4.829500,319000.000000 +-122.420000,37.620000,40.000000,1545.000000,264.000000,756.000000,282.000000,4.464300,308100.000000 +-122.420000,37.620000,36.000000,1538.000000,256.000000,671.000000,247.000000,4.409100,317900.000000 +-122.420000,37.610000,37.000000,1866.000000,300.000000,822.000000,305.000000,4.700000,341300.000000 +-122.420000,37.610000,17.000000,1040.000000,432.000000,669.000000,405.000000,4.151300,137500.000000 +-122.430000,40.660000,15.000000,2532.000000,458.000000,1183.000000,450.000000,2.541700,92200.000000 +-122.430000,37.810000,52.000000,4309.000000,942.000000,1297.000000,798.000000,4.678100,500001.000000 +-122.430000,37.810000,39.000000,3275.000000,837.000000,1137.000000,725.000000,3.767900,500001.000000 +-122.430000,37.800000,52.000000,3172.000000,848.000000,1259.000000,806.000000,4.104700,466700.000000 +-122.430000,37.800000,52.000000,2994.000000,821.000000,1240.000000,779.000000,3.371500,500000.000000 +-122.430000,37.800000,52.000000,2802.000000,622.000000,954.000000,572.000000,4.539900,500001.000000 +-122.430000,37.800000,52.000000,2520.000000,649.000000,959.000000,607.000000,5.793400,500001.000000 +-122.430000,37.800000,52.000000,1976.000000,726.000000,1045.000000,669.000000,3.689300,475000.000000 +-122.430000,37.800000,52.000000,1380.000000,322.000000,553.000000,288.000000,4.041700,500001.000000 +-122.430000,37.790000,52.000000,3565.000000,892.000000,1377.000000,852.000000,3.806800,500001.000000 +-122.430000,37.790000,52.000000,3522.000000,938.000000,1319.000000,887.000000,4.398600,500001.000000 +-122.430000,37.790000,52.000000,3486.000000,847.000000,1248.000000,813.000000,7.262300,500001.000000 +-122.430000,37.790000,52.000000,3020.000000,842.000000,1294.000000,769.000000,3.437500,500001.000000 +-122.430000,37.790000,50.000000,3312.000000,1095.000000,1475.000000,997.000000,2.716500,500001.000000 +-122.430000,37.790000,25.000000,1637.000000,394.000000,649.000000,379.000000,5.004900,460000.000000 +-122.430000,37.790000,24.000000,2459.000000,1001.000000,1362.000000,957.000000,2.678200,450000.000000 +-122.430000,37.780000,52.000000,4014.000000,1069.000000,2070.000000,927.000000,2.820200,442900.000000 +-122.430000,37.780000,52.000000,1952.000000,628.000000,1284.000000,576.000000,2.105000,316700.000000 +-122.430000,37.780000,26.000000,3587.000000,1034.000000,1821.000000,936.000000,2.639200,287500.000000 +-122.430000,37.780000,24.000000,2037.000000,696.000000,1371.000000,585.000000,0.935500,112500.000000 +-122.430000,37.780000,2.000000,1205.000000,468.000000,577.000000,363.000000,3.643700,275000.000000 +-122.430000,37.780000,17.000000,2728.000000,908.000000,1670.000000,893.000000,1.077000,115000.000000 +-122.430000,37.780000,10.000000,2380.000000,843.000000,1245.000000,789.000000,1.306200,220000.000000 +-122.430000,37.770000,52.000000,4397.000000,1116.000000,1939.000000,1053.000000,2.758700,354500.000000 +-122.430000,37.770000,52.000000,3944.000000,1072.000000,1913.000000,973.000000,2.956700,425000.000000 +-122.430000,37.770000,52.000000,3563.000000,832.000000,1712.000000,787.000000,3.370200,335700.000000 +-122.430000,37.770000,52.000000,2714.000000,779.000000,1438.000000,733.000000,3.603100,275000.000000 +-122.430000,37.770000,52.000000,2514.000000,729.000000,1428.000000,597.000000,2.397700,412500.000000 +-122.430000,37.770000,52.000000,1862.000000,472.000000,872.000000,471.000000,3.298100,222700.000000 +-122.430000,37.770000,52.000000,1760.000000,366.000000,742.000000,318.000000,4.445000,400000.000000 +-122.430000,37.770000,52.000000,1567.000000,482.000000,654.000000,425.000000,2.691400,366700.000000 +-122.430000,37.760000,52.000000,3771.000000,1017.000000,1575.000000,921.000000,3.565500,427300.000000 +-122.430000,37.760000,52.000000,3708.000000,849.000000,1531.000000,822.000000,3.356500,386400.000000 +-122.430000,37.760000,52.000000,2250.000000,566.000000,1051.000000,562.000000,2.845800,350000.000000 +-122.430000,37.760000,52.000000,1582.000000,353.000000,868.000000,329.000000,3.826100,250000.000000 +-122.430000,37.750000,52.000000,3521.000000,767.000000,1415.000000,687.000000,4.875000,362200.000000 +-122.430000,37.750000,52.000000,2960.000000,623.000000,1191.000000,589.000000,3.950000,347700.000000 +-122.430000,37.750000,52.000000,2721.000000,581.000000,1043.000000,519.000000,3.754500,383700.000000 +-122.430000,37.750000,52.000000,2700.000000,595.000000,1181.000000,575.000000,3.575000,396800.000000 +-122.430000,37.750000,52.000000,2459.000000,507.000000,1012.000000,475.000000,4.056800,387900.000000 +-122.430000,37.750000,52.000000,2285.000000,509.000000,839.000000,456.000000,4.794600,355600.000000 +-122.430000,37.750000,52.000000,2155.000000,468.000000,962.000000,490.000000,3.775000,325900.000000 +-122.430000,37.750000,52.000000,1970.000000,495.000000,871.000000,474.000000,4.062500,355600.000000 +-122.430000,37.750000,52.000000,1615.000000,393.000000,633.000000,378.000000,3.511400,347500.000000 +-122.430000,37.750000,40.000000,4850.000000,977.000000,1824.000000,952.000000,5.051900,356100.000000 +-122.430000,37.740000,52.000000,3328.000000,653.000000,1260.000000,614.000000,4.743700,331000.000000 +-122.430000,37.740000,52.000000,2637.000000,539.000000,1159.000000,497.000000,3.884600,333100.000000 +-122.430000,37.740000,52.000000,2229.000000,498.000000,1079.000000,472.000000,5.019600,324300.000000 +-122.430000,37.730000,52.000000,1985.000000,401.000000,1337.000000,424.000000,4.107100,240900.000000 +-122.430000,37.730000,52.000000,1494.000000,306.000000,1463.000000,360.000000,3.178600,222600.000000 +-122.430000,37.730000,52.000000,1386.000000,276.000000,729.000000,274.000000,3.669400,275500.000000 +-122.430000,37.730000,52.000000,1029.000000,205.000000,461.000000,212.000000,5.078200,310800.000000 +-122.430000,37.730000,49.000000,1435.000000,322.000000,1008.000000,329.000000,4.000000,264000.000000 +-122.430000,37.720000,52.000000,3351.000000,719.000000,2101.000000,706.000000,3.010700,242000.000000 +-122.430000,37.720000,52.000000,2206.000000,478.000000,1583.000000,456.000000,3.710500,250500.000000 +-122.430000,37.720000,50.000000,2912.000000,562.000000,1989.000000,537.000000,3.666700,252600.000000 +-122.430000,37.720000,49.000000,3427.000000,696.000000,2363.000000,661.000000,3.688500,254000.000000 +-122.430000,37.710000,52.000000,1508.000000,278.000000,1138.000000,304.000000,4.023400,266500.000000 +-122.430000,37.710000,52.000000,1410.000000,286.000000,879.000000,282.000000,3.190800,255600.000000 +-122.430000,37.710000,35.000000,2878.000000,564.000000,1633.000000,528.000000,4.500000,266900.000000 +-122.430000,37.710000,24.000000,4299.000000,857.000000,2249.000000,788.000000,4.609800,290400.000000 +-122.430000,37.700000,19.000000,1733.000000,354.000000,959.000000,348.000000,4.770800,281700.000000 +-122.430000,37.660000,43.000000,1769.000000,387.000000,1102.000000,377.000000,4.549300,281500.000000 +-122.430000,37.660000,29.000000,3541.000000,786.000000,2259.000000,770.000000,4.303900,278400.000000 +-122.430000,37.640000,42.000000,4091.000000,757.000000,1861.000000,771.000000,4.207000,272700.000000 +-122.430000,37.640000,34.000000,8400.000000,1812.000000,4101.000000,1717.000000,4.103300,301000.000000 +-122.430000,37.630000,34.000000,4135.000000,687.000000,2154.000000,742.000000,4.973200,342300.000000 +-122.430000,37.610000,21.000000,10252.000000,2595.000000,4790.000000,2428.000000,4.169200,344500.000000 +-122.430000,37.430000,17.000000,11999.000000,2249.000000,5467.000000,1989.000000,4.840500,354300.000000 +-122.440000,38.570000,26.000000,2101.000000,390.000000,2171.000000,360.000000,3.642900,159700.000000 +-122.440000,38.030000,13.000000,4284.000000,1042.000000,2146.000000,937.000000,4.128900,179200.000000 +-122.440000,37.800000,52.000000,3830.000000,1142.000000,1310.000000,963.000000,3.480100,500001.000000 +-122.440000,37.800000,52.000000,3257.000000,735.000000,1045.000000,620.000000,4.552300,500001.000000 +-122.440000,37.800000,52.000000,3161.000000,472.000000,842.000000,410.000000,7.976100,500001.000000 +-122.440000,37.800000,52.000000,3149.000000,719.000000,1145.000000,658.000000,4.625000,500001.000000 +-122.440000,37.800000,52.000000,2865.000000,593.000000,1029.000000,577.000000,5.253900,500001.000000 +-122.440000,37.800000,52.000000,1724.000000,412.000000,540.000000,319.000000,4.285700,500001.000000 +-122.440000,37.800000,52.000000,1603.000000,487.000000,727.000000,464.000000,3.985600,500001.000000 +-122.440000,37.800000,52.000000,1580.000000,470.000000,714.000000,448.000000,3.244700,500001.000000 +-122.440000,37.800000,52.000000,1006.000000,291.000000,445.000000,257.000000,2.771700,500000.000000 +-122.440000,37.790000,52.000000,3785.000000,808.000000,1371.000000,799.000000,6.420900,500001.000000 +-122.440000,37.790000,52.000000,3640.000000,840.000000,1525.000000,796.000000,4.437500,500001.000000 +-122.440000,37.790000,52.000000,2083.000000,491.000000,1224.000000,483.000000,4.088200,468800.000000 +-122.440000,37.790000,52.000000,2045.000000,353.000000,722.000000,327.000000,8.075500,500001.000000 +-122.440000,37.790000,52.000000,1979.000000,359.000000,648.000000,370.000000,5.312400,500001.000000 +-122.440000,37.790000,52.000000,1903.000000,461.000000,831.000000,433.000000,4.446400,500001.000000 +-122.440000,37.790000,52.000000,1817.000000,535.000000,800.000000,487.000000,3.975000,500001.000000 +-122.440000,37.790000,52.000000,1726.000000,384.000000,614.000000,356.000000,3.681200,500000.000000 +-122.440000,37.790000,52.000000,1447.000000,186.000000,483.000000,181.000000,15.000100,500001.000000 +-122.440000,37.790000,52.000000,1335.000000,151.000000,402.000000,157.000000,10.878300,500001.000000 +-122.440000,37.780000,52.000000,3510.000000,791.000000,1703.000000,657.000000,2.865400,280000.000000 +-122.440000,37.780000,52.000000,3017.000000,851.000000,1588.000000,800.000000,3.388200,471400.000000 +-122.440000,37.780000,52.000000,2911.000000,753.000000,1696.000000,676.000000,2.572100,475000.000000 +-122.440000,37.780000,52.000000,2747.000000,736.000000,1309.000000,653.000000,2.943000,341700.000000 +-122.440000,37.780000,52.000000,2695.000000,657.000000,1243.000000,573.000000,2.856900,372200.000000 +-122.440000,37.780000,52.000000,1118.000000,279.000000,514.000000,284.000000,2.419600,346200.000000 +-122.440000,37.780000,44.000000,1545.000000,334.000000,561.000000,326.000000,3.875000,412500.000000 +-122.440000,37.780000,39.000000,1181.000000,310.000000,901.000000,281.000000,1.486600,237500.000000 +-122.440000,37.780000,31.000000,1364.000000,386.000000,707.000000,379.000000,3.160700,293800.000000 +-122.440000,37.780000,16.000000,883.000000,236.000000,601.000000,219.000000,2.151000,146900.000000 +-122.440000,37.770000,52.000000,3505.000000,745.000000,1374.000000,714.000000,4.366700,500001.000000 +-122.440000,37.770000,52.000000,3475.000000,807.000000,1518.000000,777.000000,3.618600,500001.000000 +-122.440000,37.770000,52.000000,3225.000000,667.000000,1494.000000,619.000000,4.487500,500001.000000 +-122.440000,37.770000,52.000000,2994.000000,736.000000,1428.000000,700.000000,3.076600,438900.000000 +-122.440000,37.770000,52.000000,2705.000000,647.000000,1355.000000,628.000000,2.016100,364300.000000 +-122.440000,37.770000,52.000000,2537.000000,559.000000,849.000000,530.000000,5.178800,476900.000000 +-122.440000,37.770000,52.000000,2002.000000,520.000000,939.000000,501.000000,3.223900,488900.000000 +-122.440000,37.760000,52.000000,2959.000000,683.000000,1145.000000,666.000000,4.222200,361600.000000 +-122.440000,37.760000,52.000000,2509.000000,496.000000,855.000000,478.000000,5.073100,405400.000000 +-122.440000,37.760000,52.000000,2110.000000,454.000000,816.000000,438.000000,3.907900,370000.000000 +-122.440000,37.760000,50.000000,2589.000000,569.000000,945.000000,544.000000,5.251900,376600.000000 +-122.440000,37.760000,38.000000,2202.000000,452.000000,833.000000,435.000000,6.893900,455900.000000 +-122.440000,37.760000,35.000000,1581.000000,422.000000,580.000000,388.000000,4.050000,423100.000000 +-122.440000,37.760000,30.000000,5089.000000,1210.000000,1935.000000,1139.000000,4.605300,386100.000000 +-122.440000,37.750000,52.000000,2082.000000,425.000000,801.000000,411.000000,4.270800,368900.000000 +-122.440000,37.750000,52.000000,1573.000000,334.000000,725.000000,338.000000,5.050500,380400.000000 +-122.440000,37.750000,46.000000,1519.000000,291.000000,573.000000,289.000000,4.266700,338800.000000 +-122.440000,37.750000,28.000000,4930.000000,1381.000000,2232.000000,1321.000000,4.323200,316200.000000 +-122.440000,37.750000,21.000000,5457.000000,1247.000000,2304.000000,1180.000000,4.546900,409700.000000 +-122.440000,37.740000,52.000000,2074.000000,366.000000,909.000000,394.000000,4.838200,294900.000000 +-122.440000,37.740000,23.000000,6291.000000,1269.000000,2818.000000,1198.000000,4.267200,391900.000000 +-122.440000,37.740000,23.000000,184.000000,44.000000,118.000000,40.000000,4.537500,350000.000000 +-122.440000,37.730000,52.000000,866.000000,205.000000,587.000000,171.000000,5.022400,261900.000000 +-122.440000,37.730000,52.000000,2838.000000,567.000000,1411.000000,526.000000,3.830400,261400.000000 +-122.440000,37.730000,52.000000,2381.000000,492.000000,1485.000000,447.000000,4.389800,270000.000000 +-122.440000,37.730000,46.000000,3581.000000,758.000000,1670.000000,703.000000,4.193200,269200.000000 +-122.440000,37.730000,39.000000,1912.000000,418.000000,970.000000,406.000000,4.781300,275500.000000 +-122.440000,37.720000,52.000000,2890.000000,571.000000,1769.000000,541.000000,3.827400,252000.000000 +-122.440000,37.720000,52.000000,1507.000000,282.000000,929.000000,281.000000,3.895800,247700.000000 +-122.440000,37.720000,52.000000,1380.000000,272.000000,847.000000,284.000000,3.714300,260000.000000 +-122.440000,37.720000,49.000000,1557.000000,405.000000,1173.000000,385.000000,3.460500,265000.000000 +-122.440000,37.720000,48.000000,2675.000000,585.000000,1773.000000,540.000000,3.956500,268500.000000 +-122.440000,37.710000,46.000000,1230.000000,247.000000,895.000000,257.000000,5.391300,248900.000000 +-122.440000,37.710000,31.000000,2370.000000,441.000000,1524.000000,470.000000,5.020100,264100.000000 +-122.440000,37.700000,6.000000,3523.000000,664.000000,1705.000000,608.000000,4.931800,258100.000000 +-122.440000,37.670000,35.000000,1814.000000,365.000000,1025.000000,384.000000,4.425000,268400.000000 +-122.440000,37.660000,36.000000,1447.000000,276.000000,799.000000,275.000000,4.763900,265600.000000 +-122.440000,37.650000,38.000000,5277.000000,1008.000000,2695.000000,997.000000,3.972200,276200.000000 +-122.440000,37.630000,35.000000,5113.000000,959.000000,3004.000000,964.000000,4.762500,281300.000000 +-122.440000,37.520000,16.000000,7077.000000,1179.000000,3502.000000,1148.000000,5.991900,345100.000000 +-122.450000,40.850000,20.000000,2701.000000,573.000000,892.000000,358.000000,2.773600,107800.000000 +-122.450000,40.560000,17.000000,1712.000000,307.000000,963.000000,329.000000,3.937500,148700.000000 +-122.450000,38.580000,34.000000,2517.000000,483.000000,1324.000000,464.000000,3.093800,189400.000000 +-122.450000,38.510000,18.000000,1297.000000,337.000000,610.000000,312.000000,1.944100,184400.000000 +-122.450000,38.300000,24.000000,1946.000000,400.000000,718.000000,380.000000,3.550700,257900.000000 +-122.450000,38.280000,20.000000,3306.000000,503.000000,1374.000000,460.000000,5.798400,297600.000000 +-122.450000,37.910000,27.000000,2682.000000,382.000000,935.000000,369.000000,10.079100,500001.000000 +-122.450000,37.810000,52.000000,1375.000000,322.000000,287.000000,184.000000,3.902800,500001.000000 +-122.450000,37.790000,52.000000,3069.000000,579.000000,1107.000000,536.000000,5.563400,500001.000000 +-122.450000,37.790000,52.000000,2196.000000,280.000000,668.000000,291.000000,10.091400,500001.000000 +-122.450000,37.790000,52.000000,1734.000000,482.000000,731.000000,429.000000,1.480400,425000.000000 +-122.450000,37.790000,46.000000,2009.000000,464.000000,761.000000,453.000000,3.718800,500001.000000 +-122.450000,37.780000,52.000000,3975.000000,716.000000,1515.000000,691.000000,5.015600,500001.000000 +-122.450000,37.780000,52.000000,2033.000000,438.000000,2198.000000,418.000000,3.666700,418400.000000 +-122.450000,37.780000,52.000000,1345.000000,291.000000,560.000000,294.000000,3.715900,494400.000000 +-122.450000,37.780000,48.000000,1013.000000,194.000000,464.000000,205.000000,3.201100,428300.000000 +-122.450000,37.780000,45.000000,2747.000000,699.000000,1320.000000,693.000000,3.157600,333300.000000 +-122.450000,37.780000,43.000000,1452.000000,397.000000,897.000000,393.000000,4.131900,322700.000000 +-122.450000,37.770000,52.000000,3939.000000,852.000000,1737.000000,797.000000,4.505200,500001.000000 +-122.450000,37.770000,52.000000,3188.000000,708.000000,1526.000000,664.000000,3.306800,500001.000000 +-122.450000,37.770000,52.000000,3095.000000,682.000000,1269.000000,639.000000,3.575000,500001.000000 +-122.450000,37.770000,52.000000,2645.000000,626.000000,1275.000000,553.000000,3.350000,492900.000000 +-122.450000,37.770000,52.000000,2602.000000,714.000000,1330.000000,647.000000,3.543500,278600.000000 +-122.450000,37.770000,52.000000,2296.000000,509.000000,1039.000000,472.000000,4.141700,500000.000000 +-122.450000,37.770000,52.000000,2191.000000,627.000000,1100.000000,585.000000,3.040900,500000.000000 +-122.450000,37.760000,52.000000,1457.000000,292.000000,621.000000,315.000000,4.647700,450000.000000 +-122.450000,37.760000,51.000000,2564.000000,457.000000,810.000000,442.000000,5.623500,500001.000000 +-122.450000,37.760000,31.000000,5283.000000,1330.000000,2659.000000,1269.000000,3.574400,500000.000000 +-122.450000,37.750000,36.000000,2303.000000,381.000000,862.000000,371.000000,6.027400,349000.000000 +-122.450000,37.750000,36.000000,1997.000000,356.000000,772.000000,348.000000,4.950000,322600.000000 +-122.450000,37.750000,35.000000,1363.000000,302.000000,1786.000000,301.000000,3.080400,313400.000000 +-122.450000,37.740000,52.000000,1596.000000,276.000000,642.000000,273.000000,4.375000,349500.000000 +-122.450000,37.740000,46.000000,6429.000000,1093.000000,2535.000000,1109.000000,5.088700,335100.000000 +-122.450000,37.730000,52.000000,2510.000000,438.000000,1153.000000,407.000000,5.123800,335100.000000 +-122.450000,37.730000,52.000000,2035.000000,424.000000,1193.000000,430.000000,5.063400,264200.000000 +-122.450000,37.730000,52.000000,1350.000000,241.000000,752.000000,246.000000,3.244800,266200.000000 +-122.450000,37.720000,52.000000,982.000000,197.000000,653.000000,171.000000,4.216700,231900.000000 +-122.450000,37.720000,51.000000,2690.000000,554.000000,1795.000000,539.000000,3.658100,225000.000000 +-122.450000,37.720000,47.000000,1773.000000,345.000000,1083.000000,315.000000,4.475000,221200.000000 +-122.450000,37.720000,46.000000,1406.000000,235.000000,771.000000,239.000000,4.714300,219300.000000 +-122.450000,37.720000,45.000000,964.000000,173.000000,613.000000,201.000000,2.911900,228900.000000 +-122.450000,37.710000,52.000000,1658.000000,322.000000,1086.000000,326.000000,3.858300,261600.000000 +-122.450000,37.710000,50.000000,1441.000000,283.000000,1159.000000,286.000000,4.541700,233700.000000 +-122.450000,37.710000,49.000000,2244.000000,442.000000,1948.000000,423.000000,4.763900,251500.000000 +-122.450000,37.710000,46.000000,2559.000000,506.000000,1562.000000,498.000000,4.384600,270600.000000 +-122.450000,37.710000,41.000000,1578.000000,351.000000,1159.000000,299.000000,3.916700,243600.000000 +-122.450000,37.710000,34.000000,3131.000000,669.000000,2204.000000,600.000000,3.553600,251000.000000 +-122.450000,37.700000,16.000000,6457.000000,1336.000000,4375.000000,1231.000000,5.178800,267000.000000 +-122.450000,37.690000,17.000000,2359.000000,501.000000,884.000000,504.000000,3.062500,87500.000000 +-122.450000,37.670000,36.000000,1664.000000,326.000000,963.000000,322.000000,4.781300,246400.000000 +-122.450000,37.660000,36.000000,5456.000000,926.000000,2761.000000,916.000000,4.775500,280700.000000 +-122.450000,37.660000,35.000000,2738.000000,509.000000,1545.000000,493.000000,5.344600,263300.000000 +-122.450000,37.630000,28.000000,4946.000000,848.000000,2683.000000,824.000000,5.748000,302100.000000 +-122.450000,37.620000,26.000000,3507.000000,512.000000,1712.000000,509.000000,6.720600,344600.000000 +-122.460000,40.520000,13.000000,2085.000000,322.000000,1077.000000,333.000000,5.214900,146500.000000 +-122.460000,38.530000,32.000000,1735.000000,331.000000,785.000000,309.000000,3.664100,275800.000000 +-122.460000,38.290000,35.000000,1762.000000,350.000000,686.000000,339.000000,3.598200,271700.000000 +-122.460000,37.980000,10.000000,1325.000000,189.000000,427.000000,162.000000,12.093300,500001.000000 +-122.460000,37.880000,35.000000,2492.000000,409.000000,812.000000,373.000000,8.838600,500001.000000 +-122.460000,37.790000,52.000000,899.000000,96.000000,304.000000,110.000000,14.295900,500001.000000 +-122.460000,37.790000,52.000000,2106.000000,373.000000,743.000000,348.000000,5.290900,500001.000000 +-122.460000,37.790000,52.000000,2005.000000,359.000000,847.000000,356.000000,4.102900,500001.000000 +-122.460000,37.780000,52.000000,4140.000000,984.000000,2030.000000,892.000000,3.423600,376800.000000 +-122.460000,37.780000,52.000000,3429.000000,773.000000,1584.000000,696.000000,3.788700,500001.000000 +-122.460000,37.780000,52.000000,3088.000000,727.000000,1636.000000,662.000000,2.855300,360700.000000 +-122.460000,37.780000,52.000000,2632.000000,542.000000,1364.000000,544.000000,3.460500,441700.000000 +-122.460000,37.780000,52.000000,2594.000000,622.000000,1421.000000,593.000000,3.026500,350000.000000 +-122.460000,37.780000,52.000000,2165.000000,580.000000,1067.000000,530.000000,2.929300,350000.000000 +-122.460000,37.780000,52.000000,2051.000000,552.000000,1400.000000,510.000000,3.239600,375000.000000 +-122.460000,37.780000,47.000000,1682.000000,379.000000,837.000000,375.000000,5.280600,400000.000000 +-122.460000,37.770000,52.000000,3193.000000,688.000000,2099.000000,681.000000,3.937500,402900.000000 +-122.460000,37.760000,52.000000,1817.000000,449.000000,948.000000,380.000000,3.930000,390000.000000 +-122.460000,37.760000,28.000000,1072.000000,165.000000,363.000000,168.000000,6.163600,367700.000000 +-122.460000,37.750000,52.000000,1849.000000,287.000000,695.000000,258.000000,6.537200,394000.000000 +-122.460000,37.750000,52.000000,1590.000000,236.000000,622.000000,232.000000,5.815100,500001.000000 +-122.460000,37.750000,52.000000,1207.000000,152.000000,465.000000,162.000000,10.756900,500001.000000 +-122.460000,37.750000,26.000000,2192.000000,438.000000,954.000000,456.000000,4.535200,374200.000000 +-122.460000,37.740000,52.000000,2180.000000,326.000000,856.000000,326.000000,5.396100,416900.000000 +-122.460000,37.740000,52.000000,2053.000000,281.000000,791.000000,287.000000,10.959000,500001.000000 +-122.460000,37.740000,51.000000,1905.000000,291.000000,707.000000,284.000000,6.256100,431000.000000 +-122.460000,37.730000,52.000000,3547.000000,506.000000,1276.000000,491.000000,8.006900,426800.000000 +-122.460000,37.730000,52.000000,2857.000000,469.000000,1431.000000,496.000000,5.208800,344200.000000 +-122.460000,37.730000,52.000000,2673.000000,349.000000,876.000000,338.000000,7.847600,500001.000000 +-122.460000,37.730000,52.000000,2401.000000,346.000000,812.000000,328.000000,6.832200,394100.000000 +-122.460000,37.720000,52.000000,2951.000000,406.000000,1115.000000,397.000000,6.722800,405200.000000 +-122.460000,37.720000,49.000000,1207.000000,255.000000,658.000000,220.000000,4.085900,228600.000000 +-122.460000,37.720000,47.000000,1723.000000,389.000000,1216.000000,399.000000,3.320800,238600.000000 +-122.460000,37.720000,45.000000,2399.000000,419.000000,1225.000000,399.000000,4.085500,244100.000000 +-122.460000,37.720000,39.000000,2254.000000,415.000000,1388.000000,404.000000,2.968800,232000.000000 +-122.460000,37.710000,52.000000,1580.000000,337.000000,1425.000000,330.000000,4.054700,246200.000000 +-122.460000,37.710000,49.000000,1711.000000,348.000000,1138.000000,325.000000,2.875000,225000.000000 +-122.460000,37.710000,47.000000,1527.000000,283.000000,1102.000000,282.000000,4.000000,231600.000000 +-122.460000,37.710000,45.000000,1799.000000,394.000000,1436.000000,389.000000,3.650000,239900.000000 +-122.460000,37.710000,44.000000,364.000000,102.000000,339.000000,98.000000,2.483000,214300.000000 +-122.460000,37.710000,39.000000,2076.000000,482.000000,1738.000000,445.000000,3.195800,232100.000000 +-122.460000,37.700000,42.000000,876.000000,216.000000,713.000000,203.000000,3.840000,235900.000000 +-122.460000,37.700000,37.000000,3029.000000,738.000000,2436.000000,700.000000,3.321400,243200.000000 +-122.460000,37.690000,35.000000,1983.000000,385.000000,1577.000000,414.000000,4.083300,266700.000000 +-122.460000,37.660000,15.000000,6082.000000,1284.000000,3861.000000,1198.000000,5.422100,284700.000000 +-122.460000,37.650000,16.000000,8676.000000,1633.000000,5130.000000,1574.000000,4.809600,262000.000000 +-122.460000,37.640000,26.000000,2806.000000,375.000000,1617.000000,396.000000,5.392200,353700.000000 +-122.460000,37.640000,17.000000,3523.000000,669.000000,2150.000000,666.000000,4.593800,251200.000000 +-122.460000,37.630000,22.000000,6728.000000,1382.000000,3783.000000,1310.000000,5.047900,280400.000000 +-122.460000,37.590000,21.000000,12902.000000,2118.000000,6160.000000,2082.000000,5.765300,325800.000000 +-122.460000,37.510000,23.000000,949.000000,151.000000,399.000000,149.000000,5.628600,411300.000000 +-122.470000,38.600000,20.000000,1036.000000,202.000000,589.000000,194.000000,5.369800,303300.000000 +-122.470000,38.510000,18.000000,2487.000000,516.000000,980.000000,503.000000,3.550600,187500.000000 +-122.470000,38.340000,15.000000,2411.000000,446.000000,1144.000000,407.000000,4.347200,261000.000000 +-122.470000,38.300000,15.000000,4885.000000,988.000000,2175.000000,924.000000,3.403100,209500.000000 +-122.470000,37.950000,16.000000,3769.000000,839.000000,1986.000000,815.000000,3.971200,187500.000000 +-122.470000,37.890000,23.000000,10774.000000,1736.000000,3895.000000,1683.000000,7.290500,500001.000000 +-122.470000,37.810000,45.000000,6927.000000,1258.000000,4715.000000,1165.000000,3.405100,500001.000000 +-122.470000,37.790000,52.000000,437.000000,105.000000,194.000000,87.000000,2.812500,500001.000000 +-122.470000,37.790000,52.000000,2383.000000,477.000000,990.000000,464.000000,3.968800,483300.000000 +-122.470000,37.780000,52.000000,3021.000000,569.000000,1479.000000,514.000000,4.020800,414600.000000 +-122.470000,37.780000,52.000000,2635.000000,587.000000,1302.000000,577.000000,3.729200,416700.000000 +-122.470000,37.780000,52.000000,2388.000000,507.000000,1078.000000,494.000000,3.522100,443300.000000 +-122.470000,37.780000,52.000000,2275.000000,412.000000,1166.000000,424.000000,4.065200,421300.000000 +-122.470000,37.780000,52.000000,2169.000000,522.000000,1220.000000,505.000000,3.198900,446900.000000 +-122.470000,37.780000,51.000000,1485.000000,386.000000,880.000000,385.000000,2.743100,307100.000000 +-122.470000,37.770000,52.000000,3143.000000,635.000000,1350.000000,623.000000,3.857100,366700.000000 +-122.470000,37.760000,52.000000,4001.000000,809.000000,1886.000000,756.000000,3.323900,350000.000000 +-122.470000,37.760000,52.000000,2941.000000,783.000000,1545.000000,726.000000,2.989900,406500.000000 +-122.470000,37.760000,52.000000,2680.000000,740.000000,1587.000000,713.000000,2.593300,359600.000000 +-122.470000,37.760000,52.000000,2465.000000,489.000000,1170.000000,498.000000,4.079300,306700.000000 +-122.470000,37.760000,49.000000,2842.000000,670.000000,1396.000000,648.000000,3.267900,345700.000000 +-122.470000,37.760000,48.000000,2464.000000,459.000000,1179.000000,458.000000,4.494600,358600.000000 +-122.470000,37.760000,40.000000,3525.000000,941.000000,1675.000000,857.000000,3.208300,330000.000000 +-122.470000,37.750000,51.000000,2713.000000,396.000000,1090.000000,401.000000,9.360300,500001.000000 +-122.470000,37.750000,51.000000,2413.000000,431.000000,1095.000000,437.000000,4.008900,357000.000000 +-122.470000,37.750000,49.000000,2747.000000,472.000000,1281.000000,448.000000,5.482000,366300.000000 +-122.470000,37.750000,46.000000,3238.000000,544.000000,1293.000000,470.000000,6.159200,381700.000000 +-122.470000,37.750000,45.000000,2399.000000,426.000000,911.000000,423.000000,4.431200,361000.000000 +-122.470000,37.740000,52.000000,3688.000000,640.000000,1605.000000,567.000000,4.953700,365600.000000 +-122.470000,37.740000,52.000000,2055.000000,265.000000,735.000000,252.000000,8.118900,500001.000000 +-122.470000,37.740000,52.000000,1538.000000,305.000000,819.000000,319.000000,4.084600,333600.000000 +-122.470000,37.730000,52.000000,2151.000000,280.000000,762.000000,274.000000,10.730900,500001.000000 +-122.470000,37.730000,52.000000,2134.000000,277.000000,936.000000,285.000000,5.924500,484600.000000 +-122.470000,37.730000,50.000000,1653.000000,252.000000,641.000000,224.000000,10.660500,500001.000000 +-122.470000,37.720000,49.000000,1690.000000,307.000000,770.000000,294.000000,4.591300,259700.000000 +-122.470000,37.720000,46.000000,1836.000000,319.000000,767.000000,302.000000,5.911400,399000.000000 +-122.470000,37.720000,46.000000,1453.000000,306.000000,817.000000,310.000000,3.000000,246700.000000 +-122.470000,37.710000,44.000000,2547.000000,511.000000,1577.000000,516.000000,4.193900,237900.000000 +-122.470000,37.710000,37.000000,1046.000000,251.000000,822.000000,239.000000,3.500000,224400.000000 +-122.470000,37.700000,47.000000,737.000000,126.000000,370.000000,136.000000,3.775000,281300.000000 +-122.470000,37.700000,45.000000,3290.000000,693.000000,2466.000000,666.000000,3.658800,238600.000000 +-122.470000,37.690000,35.000000,1720.000000,421.000000,1452.000000,425.000000,3.590900,256100.000000 +-122.470000,37.690000,34.000000,1954.000000,357.000000,1130.000000,367.000000,4.644700,304500.000000 +-122.470000,37.690000,27.000000,2447.000000,720.000000,2104.000000,657.000000,3.449000,239100.000000 +-122.470000,37.680000,31.000000,4077.000000,777.000000,2544.000000,738.000000,4.533700,306700.000000 +-122.470000,37.670000,20.000000,5689.000000,992.000000,3752.000000,1002.000000,5.584500,304300.000000 +-122.470000,37.660000,18.000000,4172.000000,806.000000,3226.000000,790.000000,5.753500,297900.000000 +-122.470000,37.650000,27.000000,8103.000000,1655.000000,5023.000000,1605.000000,4.645200,236200.000000 +-122.470000,37.500000,25.000000,950.000000,259.000000,404.000000,195.000000,3.193700,319200.000000 +-122.470000,37.500000,18.000000,2297.000000,416.000000,1086.000000,381.000000,4.875000,334600.000000 +-122.480000,38.900000,10.000000,304.000000,63.000000,161.000000,61.000000,2.196400,112500.000000 +-122.480000,38.540000,37.000000,1898.000000,359.000000,973.000000,340.000000,4.209600,256600.000000 +-122.480000,38.510000,49.000000,1977.000000,393.000000,741.000000,339.000000,3.131200,247600.000000 +-122.480000,38.500000,37.000000,3049.000000,508.000000,1287.000000,439.000000,4.312500,276500.000000 +-122.480000,38.480000,29.000000,2278.000000,397.000000,765.000000,322.000000,4.637900,348200.000000 +-122.480000,38.320000,42.000000,2106.000000,533.000000,1141.000000,445.000000,3.112900,149300.000000 +-122.480000,38.320000,31.000000,1701.000000,363.000000,680.000000,324.000000,3.137500,192100.000000 +-122.480000,38.310000,19.000000,2398.000000,521.000000,1266.000000,471.000000,2.772700,186800.000000 +-122.480000,38.300000,17.000000,2703.000000,550.000000,1241.000000,515.000000,2.652000,171300.000000 +-122.480000,37.930000,16.000000,2947.000000,802.000000,1385.000000,743.000000,3.673100,318000.000000 +-122.480000,37.860000,52.000000,3914.000000,752.000000,1177.000000,670.000000,6.211300,500001.000000 +-122.480000,37.850000,42.000000,6297.000000,1307.000000,2096.000000,1205.000000,6.475200,500001.000000 +-122.480000,37.790000,52.000000,4683.000000,1055.000000,2246.000000,975.000000,4.114800,457800.000000 +-122.480000,37.790000,52.000000,1647.000000,236.000000,546.000000,227.000000,9.188100,500001.000000 +-122.480000,37.780000,52.000000,3047.000000,641.000000,1427.000000,620.000000,3.488300,337200.000000 +-122.480000,37.780000,52.000000,2910.000000,611.000000,1508.000000,515.000000,3.586500,311400.000000 +-122.480000,37.780000,52.000000,2666.000000,515.000000,1362.000000,494.000000,4.218000,393800.000000 +-122.480000,37.780000,50.000000,2159.000000,437.000000,1111.000000,417.000000,3.558800,346400.000000 +-122.480000,37.780000,48.000000,2835.000000,728.000000,1674.000000,684.000000,3.129000,375000.000000 +-122.480000,37.780000,44.000000,3371.000000,794.000000,1738.000000,753.000000,3.165300,335300.000000 +-122.480000,37.770000,52.000000,2556.000000,595.000000,1202.000000,568.000000,3.889900,348500.000000 +-122.480000,37.760000,52.000000,3260.000000,653.000000,1594.000000,632.000000,4.409400,336100.000000 +-122.480000,37.760000,52.000000,2684.000000,574.000000,1395.000000,549.000000,3.909700,323800.000000 +-122.480000,37.760000,52.000000,1845.000000,336.000000,1015.000000,337.000000,4.139700,331300.000000 +-122.480000,37.760000,50.000000,2236.000000,484.000000,1171.000000,467.000000,4.097700,322100.000000 +-122.480000,37.760000,48.000000,2660.000000,616.000000,1491.000000,602.000000,3.975800,348600.000000 +-122.480000,37.760000,48.000000,2304.000000,558.000000,1273.000000,512.000000,3.275000,332100.000000 +-122.480000,37.750000,52.000000,2515.000000,494.000000,1583.000000,477.000000,4.339300,317600.000000 +-122.480000,37.750000,51.000000,2095.000000,410.000000,1126.000000,429.000000,4.400000,318400.000000 +-122.480000,37.750000,49.000000,2203.000000,407.000000,1052.000000,405.000000,4.437500,329200.000000 +-122.480000,37.750000,48.000000,2555.000000,548.000000,1285.000000,482.000000,3.773400,314700.000000 +-122.480000,37.740000,52.000000,2841.000000,517.000000,1372.000000,517.000000,3.923600,335000.000000 +-122.480000,37.740000,52.000000,2453.000000,508.000000,1056.000000,453.000000,3.685900,311800.000000 +-122.480000,37.740000,52.000000,2166.000000,423.000000,1072.000000,370.000000,4.131000,314300.000000 +-122.480000,37.730000,52.000000,1597.000000,240.000000,566.000000,231.000000,5.168100,500001.000000 +-122.480000,37.730000,47.000000,2382.000000,392.000000,867.000000,376.000000,5.259800,371500.000000 +-122.480000,37.730000,38.000000,3195.000000,828.000000,2410.000000,778.000000,3.135900,350000.000000 +-122.480000,37.720000,46.000000,2403.000000,638.000000,1281.000000,603.000000,3.232100,112500.000000 +-122.480000,37.710000,43.000000,3850.000000,1018.000000,1497.000000,829.000000,3.529600,400000.000000 +-122.480000,37.710000,39.000000,3615.000000,632.000000,1571.000000,615.000000,5.114900,314200.000000 +-122.480000,37.700000,33.000000,4492.000000,1583.000000,3477.000000,1537.000000,3.054600,297900.000000 +-122.480000,37.700000,33.000000,4167.000000,1398.000000,2923.000000,1314.000000,3.049000,307000.000000 +-122.480000,37.690000,43.000000,2661.000000,455.000000,1384.000000,456.000000,4.242100,257500.000000 +-122.480000,37.690000,42.000000,2993.000000,512.000000,1594.000000,546.000000,4.482100,252400.000000 +-122.480000,37.690000,33.000000,2347.000000,512.000000,1259.000000,481.000000,3.449200,264300.000000 +-122.480000,37.670000,31.000000,2609.000000,433.000000,1746.000000,464.000000,5.105400,294500.000000 +-122.480000,37.670000,15.000000,2897.000000,728.000000,2340.000000,720.000000,3.390600,303700.000000 +-122.480000,37.640000,7.000000,120.000000,21.000000,50.000000,27.000000,12.500000,281000.000000 +-122.480000,37.590000,29.000000,5889.000000,959.000000,2784.000000,923.000000,5.399100,273000.000000 +-122.480000,37.570000,34.000000,4648.000000,806.000000,2282.000000,814.000000,4.555600,249000.000000 +-122.490000,41.430000,19.000000,3689.000000,644.000000,1544.000000,566.000000,3.125000,76100.000000 +-122.490000,38.320000,17.000000,3308.000000,720.000000,1587.000000,632.000000,3.272700,176000.000000 +-122.490000,38.310000,27.000000,3078.000000,597.000000,1411.000000,586.000000,3.250000,195500.000000 +-122.490000,38.300000,14.000000,2844.000000,602.000000,1613.000000,544.000000,3.357100,193600.000000 +-122.490000,38.290000,26.000000,1726.000000,289.000000,672.000000,251.000000,3.800000,242100.000000 +-122.490000,38.270000,8.000000,5092.000000,988.000000,1657.000000,936.000000,3.562500,213200.000000 +-122.490000,38.100000,43.000000,1226.000000,244.000000,491.000000,205.000000,4.928600,307000.000000 +-122.490000,37.990000,27.000000,5470.000000,755.000000,1916.000000,764.000000,6.994000,420800.000000 +-122.490000,37.980000,34.000000,1256.000000,178.000000,460.000000,174.000000,6.427100,451700.000000 +-122.490000,37.920000,26.000000,2170.000000,347.000000,849.000000,318.000000,6.295300,386200.000000 +-122.490000,37.890000,23.000000,1650.000000,403.000000,541.000000,336.000000,6.023800,500001.000000 +-122.490000,37.860000,52.000000,2175.000000,510.000000,809.000000,503.000000,4.539800,442000.000000 +-122.490000,37.850000,38.000000,240.000000,29.000000,63.000000,34.000000,12.254700,500001.000000 +-122.490000,37.790000,52.000000,3146.000000,478.000000,1143.000000,455.000000,6.140700,500001.000000 +-122.490000,37.790000,52.000000,2488.000000,281.000000,805.000000,295.000000,10.705800,500001.000000 +-122.490000,37.780000,52.000000,3440.000000,722.000000,1663.000000,665.000000,3.027800,356300.000000 +-122.490000,37.780000,52.000000,2050.000000,439.000000,1109.000000,437.000000,2.671900,318500.000000 +-122.490000,37.780000,49.000000,2176.000000,441.000000,1040.000000,448.000000,4.241400,500001.000000 +-122.490000,37.780000,47.000000,2695.000000,643.000000,1505.000000,644.000000,3.087700,329100.000000 +-122.490000,37.780000,46.000000,3304.000000,792.000000,1783.000000,777.000000,3.614800,352200.000000 +-122.490000,37.780000,42.000000,2723.000000,579.000000,1419.000000,519.000000,3.642900,328400.000000 +-122.490000,37.780000,32.000000,3028.000000,815.000000,1704.000000,718.000000,3.202800,322900.000000 +-122.490000,37.770000,52.000000,2342.000000,458.000000,1170.000000,458.000000,3.703600,369200.000000 +-122.490000,37.760000,52.000000,2564.000000,502.000000,1092.000000,459.000000,3.530200,329600.000000 +-122.490000,37.760000,52.000000,2245.000000,425.000000,1091.000000,409.000000,3.590900,331200.000000 +-122.490000,37.760000,52.000000,1792.000000,305.000000,782.000000,287.000000,4.039100,332700.000000 +-122.490000,37.760000,52.000000,1382.000000,230.000000,708.000000,279.000000,5.809600,339800.000000 +-122.490000,37.760000,49.000000,1724.000000,295.000000,795.000000,297.000000,4.397700,353600.000000 +-122.490000,37.760000,49.000000,1637.000000,304.000000,729.000000,281.000000,4.328100,323100.000000 +-122.490000,37.750000,52.000000,2226.000000,385.000000,1177.000000,416.000000,4.851600,323800.000000 +-122.490000,37.750000,48.000000,2181.000000,419.000000,1041.000000,379.000000,3.736100,320200.000000 +-122.490000,37.750000,47.000000,2140.000000,425.000000,1105.000000,401.000000,3.705400,308500.000000 +-122.490000,37.750000,45.000000,2341.000000,461.000000,1092.000000,438.000000,4.803600,297800.000000 +-122.490000,37.750000,43.000000,2044.000000,393.000000,979.000000,378.000000,3.920500,319100.000000 +-122.490000,37.740000,52.000000,2442.000000,449.000000,1188.000000,436.000000,4.390900,317700.000000 +-122.490000,37.740000,52.000000,2302.000000,457.000000,1154.000000,424.000000,4.574400,315200.000000 +-122.490000,37.740000,52.000000,2189.000000,433.000000,1147.000000,420.000000,3.458300,321300.000000 +-122.490000,37.740000,48.000000,1186.000000,213.000000,487.000000,207.000000,3.833300,340800.000000 +-122.490000,37.740000,44.000000,1472.000000,275.000000,820.000000,310.000000,5.682600,300000.000000 +-122.490000,37.730000,48.000000,1190.000000,182.000000,497.000000,199.000000,6.264200,438500.000000 +-122.490000,37.730000,39.000000,1937.000000,336.000000,742.000000,307.000000,5.199100,369400.000000 +-122.490000,37.730000,37.000000,1399.000000,224.000000,530.000000,235.000000,3.921900,433300.000000 +-122.490000,37.700000,36.000000,1946.000000,340.000000,828.000000,313.000000,5.281100,287700.000000 +-122.490000,37.690000,35.000000,2644.000000,456.000000,1465.000000,430.000000,4.937500,277000.000000 +-122.490000,37.670000,35.000000,5275.000000,903.000000,2892.000000,842.000000,4.677100,266400.000000 +-122.490000,37.670000,29.000000,3795.000000,675.000000,2494.000000,696.000000,5.284800,260300.000000 +-122.490000,37.630000,34.000000,696.000000,145.000000,398.000000,162.000000,3.525000,254100.000000 +-122.490000,37.630000,31.000000,1256.000000,328.000000,785.000000,297.000000,3.244600,234600.000000 +-122.490000,37.600000,33.000000,3507.000000,669.000000,1697.000000,660.000000,4.079500,270600.000000 +-122.490000,37.590000,35.000000,2683.000000,475.000000,1498.000000,484.000000,5.128200,262500.000000 +-122.490000,37.540000,15.000000,3456.000000,545.000000,1527.000000,535.000000,6.325600,368000.000000 +-122.490000,37.500000,21.000000,1209.000000,309.000000,801.000000,259.000000,4.562500,500000.000000 +-122.500000,38.820000,12.000000,2394.000000,443.000000,877.000000,341.000000,2.562500,109200.000000 +-122.500000,38.400000,36.000000,1860.000000,364.000000,777.000000,339.000000,4.130700,295700.000000 +-122.500000,38.350000,25.000000,1566.000000,352.000000,784.000000,362.000000,3.075000,165100.000000 +-122.500000,37.970000,25.000000,6526.000000,1902.000000,5917.000000,1812.000000,2.727300,187500.000000 +-122.500000,37.920000,32.000000,2639.000000,415.000000,1013.000000,408.000000,6.163200,349200.000000 +-122.500000,37.920000,30.000000,2270.000000,359.000000,974.000000,351.000000,5.592600,300900.000000 +-122.500000,37.910000,31.000000,7001.000000,1282.000000,2755.000000,1267.000000,5.485100,441100.000000 +-122.500000,37.880000,28.000000,5448.000000,1089.000000,2100.000000,1023.000000,4.747500,474600.000000 +-122.500000,37.870000,17.000000,4333.000000,947.000000,1650.000000,919.000000,6.306600,346100.000000 +-122.500000,37.790000,52.000000,8.000000,1.000000,13.000000,1.000000,15.000100,500001.000000 +-122.500000,37.780000,50.000000,1922.000000,427.000000,1049.000000,443.000000,3.583300,348500.000000 +-122.500000,37.770000,52.000000,2739.000000,569.000000,1312.000000,531.000000,3.583300,322900.000000 +-122.500000,37.770000,52.000000,1769.000000,414.000000,1032.000000,380.000000,3.995400,324700.000000 +-122.500000,37.760000,52.000000,2018.000000,422.000000,1142.000000,463.000000,3.708300,307700.000000 +-122.500000,37.760000,50.000000,1993.000000,410.000000,1009.000000,374.000000,3.946400,295600.000000 +-122.500000,37.760000,48.000000,1408.000000,295.000000,891.000000,269.000000,3.833300,296300.000000 +-122.500000,37.760000,46.000000,1491.000000,285.000000,841.000000,306.000000,4.532900,278800.000000 +-122.500000,37.760000,45.000000,1673.000000,377.000000,1078.000000,393.000000,3.339300,272300.000000 +-122.500000,37.760000,43.000000,2108.000000,456.000000,1299.000000,447.000000,3.140600,316200.000000 +-122.500000,37.750000,45.000000,1672.000000,344.000000,838.000000,314.000000,4.141900,291500.000000 +-122.500000,37.750000,45.000000,1620.000000,344.000000,941.000000,328.000000,4.385900,270200.000000 +-122.500000,37.750000,44.000000,1819.000000,365.000000,1137.000000,354.000000,3.491900,271800.000000 +-122.500000,37.750000,44.000000,1739.000000,343.000000,872.000000,330.000000,2.963200,286300.000000 +-122.500000,37.740000,45.000000,1771.000000,349.000000,1098.000000,342.000000,3.755200,296600.000000 +-122.500000,37.740000,44.000000,2527.000000,518.000000,1434.000000,444.000000,3.875000,275700.000000 +-122.500000,37.740000,44.000000,2374.000000,496.000000,1087.000000,426.000000,3.500000,275700.000000 +-122.500000,37.740000,44.000000,2082.000000,470.000000,1154.000000,403.000000,4.361100,268100.000000 +-122.500000,37.740000,42.000000,1667.000000,395.000000,1041.000000,387.000000,3.958300,273700.000000 +-122.500000,37.740000,40.000000,2310.000000,445.000000,1266.000000,490.000000,3.796900,297800.000000 +-122.500000,37.590000,36.000000,1521.000000,253.000000,736.000000,241.000000,4.354200,237500.000000 +-122.500000,37.510000,11.000000,749.000000,137.000000,355.000000,124.000000,8.236400,371800.000000 +-122.510000,39.300000,19.000000,1629.000000,386.000000,551.000000,214.000000,1.746300,68800.000000 +-122.510000,38.170000,8.000000,5875.000000,1115.000000,2808.000000,1029.000000,3.639200,246300.000000 +-122.510000,38.060000,24.000000,9493.000000,1935.000000,5162.000000,1880.000000,3.074200,118800.000000 +-122.510000,38.000000,17.000000,2449.000000,536.000000,1157.000000,543.000000,3.951900,274200.000000 +-122.510000,37.990000,32.000000,4138.000000,632.000000,1541.000000,626.000000,5.579100,433300.000000 +-122.510000,37.980000,37.000000,4801.000000,699.000000,1830.000000,679.000000,6.076200,487800.000000 +-122.510000,37.970000,37.000000,4296.000000,1089.000000,2100.000000,1025.000000,3.246200,329400.000000 +-122.510000,37.960000,39.000000,3302.000000,684.000000,1574.000000,653.000000,3.686300,263800.000000 +-122.510000,37.910000,2.000000,647.000000,136.000000,203.000000,118.000000,6.641000,310000.000000 +-122.510000,37.890000,27.000000,2674.000000,565.000000,1233.000000,547.000000,3.448500,458300.000000 +-122.510000,37.870000,21.000000,3904.000000,980.000000,1949.000000,919.000000,2.862000,258400.000000 +-122.510000,37.760000,43.000000,2527.000000,619.000000,1332.000000,558.000000,3.046500,274200.000000 +-122.510000,37.760000,40.000000,2320.000000,562.000000,1499.000000,521.000000,3.279200,260800.000000 +-122.510000,37.580000,20.000000,64.000000,21.000000,59.000000,21.000000,2.237500,450000.000000 +-122.510000,37.530000,17.000000,1574.000000,262.000000,672.000000,241.000000,7.292900,355800.000000 +-122.520000,38.990000,16.000000,975.000000,219.000000,337.000000,155.000000,1.660700,77800.000000 +-122.520000,38.700000,26.000000,102.000000,17.000000,43.000000,13.000000,0.536000,87500.000000 +-122.520000,38.670000,35.000000,1705.000000,321.000000,708.000000,253.000000,3.453900,300000.000000 +-122.520000,38.530000,35.000000,1227.000000,236.000000,548.000000,207.000000,4.875000,336700.000000 +-122.520000,38.270000,18.000000,2405.000000,390.000000,872.000000,367.000000,5.215500,248300.000000 +-122.520000,37.980000,31.000000,6555.000000,1571.000000,2962.000000,1464.000000,2.890300,324200.000000 +-122.520000,37.960000,35.000000,2012.000000,346.000000,818.000000,352.000000,5.281800,331000.000000 +-122.520000,37.950000,37.000000,350.000000,57.000000,179.000000,69.000000,6.286200,500001.000000 +-122.520000,37.950000,33.000000,4448.000000,631.000000,1675.000000,628.000000,7.890400,468800.000000 +-122.520000,37.940000,18.000000,1804.000000,284.000000,600.000000,241.000000,5.958200,500001.000000 +-122.520000,37.920000,47.000000,793.000000,163.000000,334.000000,151.000000,5.850900,317800.000000 +-122.520000,37.920000,24.000000,421.000000,64.000000,163.000000,75.000000,14.583300,500001.000000 +-122.520000,37.910000,30.000000,4174.000000,739.000000,1818.000000,705.000000,5.595100,402900.000000 +-122.520000,37.900000,16.000000,1704.000000,402.000000,689.000000,348.000000,4.423900,267100.000000 +-122.520000,37.890000,17.000000,4363.000000,1041.000000,1640.000000,989.000000,3.953100,417600.000000 +-122.530000,41.810000,21.000000,2400.000000,485.000000,1109.000000,443.000000,1.763900,55400.000000 +-122.530000,39.500000,25.000000,1231.000000,240.000000,658.000000,211.000000,2.486100,71900.000000 +-122.530000,38.320000,22.000000,3577.000000,553.000000,1371.000000,501.000000,5.795000,332300.000000 +-122.530000,38.010000,27.000000,3121.000000,531.000000,1318.000000,489.000000,5.478100,310900.000000 +-122.530000,38.010000,16.000000,1495.000000,292.000000,472.000000,284.000000,3.443200,67500.000000 +-122.530000,37.980000,32.000000,2390.000000,336.000000,810.000000,354.000000,8.575900,500001.000000 +-122.530000,37.970000,37.000000,1340.000000,322.000000,621.000000,314.000000,3.558800,268800.000000 +-122.530000,37.960000,36.000000,4385.000000,620.000000,1549.000000,626.000000,8.393500,470500.000000 +-122.530000,37.960000,35.000000,908.000000,194.000000,413.000000,197.000000,3.991700,290800.000000 +-122.530000,37.930000,42.000000,2171.000000,362.000000,887.000000,347.000000,6.612500,393200.000000 +-122.530000,37.930000,37.000000,1722.000000,352.000000,648.000000,337.000000,4.125000,310300.000000 +-122.530000,37.920000,45.000000,1530.000000,324.000000,608.000000,328.000000,3.875000,390800.000000 +-122.530000,37.920000,42.000000,1741.000000,301.000000,723.000000,306.000000,5.537900,410500.000000 +-122.530000,37.910000,37.000000,2524.000000,398.000000,999.000000,417.000000,7.989200,500001.000000 +-122.530000,37.900000,44.000000,2846.000000,551.000000,1232.000000,537.000000,3.883900,327200.000000 +-122.530000,37.890000,35.000000,4127.000000,689.000000,1596.000000,707.000000,5.907300,400400.000000 +-122.530000,37.880000,25.000000,4921.000000,866.000000,1913.000000,834.000000,6.874200,413100.000000 +-122.530000,37.870000,20.000000,1814.000000,282.000000,658.000000,253.000000,7.997700,400000.000000 +-122.530000,37.660000,25.000000,7778.000000,1493.000000,4674.000000,1451.000000,5.469400,272400.000000 +-122.530000,37.500000,19.000000,4768.000000,807.000000,2199.000000,805.000000,6.189600,331100.000000 +-122.540000,38.360000,40.000000,2725.000000,531.000000,1167.000000,458.000000,3.796900,202800.000000 +-122.540000,38.000000,28.000000,3416.000000,826.000000,1694.000000,800.000000,3.180000,277000.000000 +-122.540000,37.990000,32.000000,2236.000000,348.000000,818.000000,330.000000,7.352100,444000.000000 +-122.540000,37.980000,52.000000,1758.000000,316.000000,607.000000,264.000000,5.508300,371900.000000 +-122.540000,37.970000,39.000000,4193.000000,762.000000,1833.000000,737.000000,5.626300,352100.000000 +-122.540000,37.960000,44.000000,1552.000000,204.000000,596.000000,208.000000,10.129000,500001.000000 +-122.540000,37.960000,33.000000,2534.000000,495.000000,996.000000,449.000000,4.308300,500001.000000 +-122.540000,37.950000,38.000000,2310.000000,400.000000,971.000000,386.000000,5.697000,435700.000000 +-122.540000,37.940000,39.000000,3670.000000,775.000000,1519.000000,788.000000,4.408100,435200.000000 +-122.540000,37.940000,26.000000,3990.000000,804.000000,1550.000000,792.000000,5.183400,405500.000000 +-122.540000,37.910000,48.000000,2924.000000,489.000000,1159.000000,505.000000,5.630200,489000.000000 +-122.540000,37.890000,33.000000,4971.000000,836.000000,1907.000000,795.000000,6.127500,424400.000000 +-122.540000,37.880000,30.000000,4382.000000,732.000000,1775.000000,745.000000,6.780900,414400.000000 +-122.540000,37.720000,17.000000,2975.000000,968.000000,1453.000000,828.000000,3.527000,318900.000000 +-122.540000,37.700000,36.000000,3988.000000,732.000000,1793.000000,708.000000,4.247200,292500.000000 +-122.540000,37.620000,35.000000,1481.000000,277.000000,747.000000,254.000000,4.428600,262100.000000 +-122.550000,38.810000,7.000000,3639.000000,637.000000,1027.000000,421.000000,3.883100,132100.000000 +-122.550000,38.420000,24.000000,2220.000000,411.000000,894.000000,365.000000,4.289100,211700.000000 +-122.550000,38.100000,26.000000,5188.000000,892.000000,2341.000000,906.000000,5.002900,255600.000000 +-122.550000,38.070000,5.000000,1495.000000,235.000000,555.000000,201.000000,6.723200,345000.000000 +-122.550000,38.070000,38.000000,3392.000000,709.000000,1894.000000,713.000000,3.057300,350800.000000 +-122.550000,38.030000,29.000000,7174.000000,1169.000000,3063.000000,1172.000000,6.090200,293200.000000 +-122.550000,38.020000,27.000000,4985.000000,711.000000,1928.000000,742.000000,6.497800,361500.000000 +-122.550000,38.010000,27.000000,3966.000000,577.000000,1657.000000,611.000000,6.331400,342200.000000 +-122.550000,37.990000,34.000000,3306.000000,555.000000,1398.000000,585.000000,4.899300,319900.000000 +-122.550000,37.970000,52.000000,2232.000000,291.000000,731.000000,253.000000,7.115500,500001.000000 +-122.550000,37.920000,52.000000,2303.000000,350.000000,859.000000,359.000000,6.108500,500001.000000 +-122.550000,37.910000,48.000000,1283.000000,278.000000,567.000000,255.000000,3.279400,460000.000000 +-122.550000,37.900000,34.000000,1431.000000,224.000000,503.000000,220.000000,7.960600,453400.000000 +-122.550000,37.790000,32.000000,2131.000000,625.000000,1229.000000,572.000000,2.920100,322200.000000 +-122.550000,37.590000,31.000000,1331.000000,245.000000,598.000000,225.000000,4.182700,345500.000000 +-122.560000,41.690000,21.000000,2010.000000,360.000000,947.000000,306.000000,2.410700,70100.000000 +-122.560000,40.750000,20.000000,1182.000000,250.000000,512.000000,210.000000,1.793500,74500.000000 +-122.560000,38.090000,17.000000,9614.000000,2123.000000,4684.000000,2060.000000,4.170500,209800.000000 +-122.560000,38.030000,34.000000,1887.000000,290.000000,815.000000,283.000000,6.524900,324800.000000 +-122.560000,37.980000,36.000000,2649.000000,542.000000,1111.000000,557.000000,4.805600,345700.000000 +-122.560000,37.970000,52.000000,1833.000000,324.000000,735.000000,306.000000,4.694400,398900.000000 +-122.560000,37.950000,34.000000,2677.000000,411.000000,933.000000,410.000000,6.144400,500001.000000 +-122.560000,37.940000,36.000000,2023.000000,242.000000,653.000000,241.000000,10.627200,500001.000000 +-122.560000,37.920000,37.000000,1926.000000,290.000000,721.000000,298.000000,8.924800,500001.000000 +-122.560000,37.910000,52.000000,1972.000000,327.000000,755.000000,345.000000,7.192400,500001.000000 +-122.560000,37.900000,48.000000,1550.000000,253.000000,641.000000,276.000000,8.634000,463500.000000 +-122.560000,37.900000,24.000000,221.000000,41.000000,75.000000,38.000000,5.129200,362500.000000 +-122.570000,40.610000,27.000000,1540.000000,315.000000,883.000000,321.000000,2.803600,93400.000000 +-122.570000,39.900000,15.000000,3873.000000,810.000000,1697.000000,627.000000,2.455500,55600.000000 +-122.570000,38.580000,18.000000,2083.000000,506.000000,926.000000,487.000000,1.992500,225000.000000 +-122.570000,38.270000,7.000000,6508.000000,1028.000000,2902.000000,1010.000000,5.370700,250500.000000 +-122.570000,38.030000,24.000000,2330.000000,322.000000,911.000000,320.000000,6.525300,387700.000000 +-122.570000,38.020000,33.000000,9531.000000,1487.000000,3798.000000,1409.000000,5.651200,314000.000000 +-122.570000,37.990000,45.000000,2404.000000,425.000000,926.000000,400.000000,4.967400,320100.000000 +-122.570000,37.990000,38.000000,5587.000000,996.000000,2466.000000,1027.000000,4.171100,336900.000000 +-122.570000,37.980000,49.000000,2860.000000,552.000000,1178.000000,522.000000,4.625000,355000.000000 +-122.570000,37.970000,47.000000,5416.000000,1115.000000,2177.000000,1027.000000,3.505500,382100.000000 +-122.570000,37.960000,52.000000,3458.000000,468.000000,1449.000000,471.000000,9.183400,500001.000000 +-122.580000,38.590000,33.000000,1239.000000,262.000000,539.000000,246.000000,3.520800,195800.000000 +-122.580000,38.580000,32.000000,2723.000000,637.000000,1549.000000,556.000000,2.394200,183100.000000 +-122.580000,38.430000,10.000000,3597.000000,661.000000,1132.000000,639.000000,3.937500,269200.000000 +-122.580000,38.380000,27.000000,3800.000000,728.000000,1587.000000,605.000000,4.723700,306600.000000 +-122.580000,38.150000,9.000000,1302.000000,177.000000,682.000000,190.000000,7.500000,423200.000000 +-122.580000,38.120000,13.000000,5027.000000,871.000000,1912.000000,770.000000,4.928600,309500.000000 +-122.580000,38.100000,22.000000,11872.000000,2300.000000,5600.000000,2200.000000,4.646300,276300.000000 +-122.580000,38.080000,27.000000,10839.000000,1637.000000,4406.000000,1623.000000,5.615000,285600.000000 +-122.590000,38.780000,15.000000,764.000000,145.000000,366.000000,143.000000,3.375000,103100.000000 +-122.590000,38.580000,18.000000,3753.000000,752.000000,1454.000000,668.000000,3.758500,185700.000000 +-122.590000,38.560000,43.000000,2088.000000,379.000000,721.000000,293.000000,4.650000,245000.000000 +-122.590000,38.440000,14.000000,1665.000000,390.000000,505.000000,348.000000,3.183000,201200.000000 +-122.590000,38.430000,20.000000,2791.000000,546.000000,785.000000,512.000000,3.456100,216700.000000 +-122.590000,38.130000,20.000000,1589.000000,231.000000,601.000000,224.000000,5.375500,290900.000000 +-122.590000,37.970000,46.000000,4036.000000,856.000000,1872.000000,833.000000,4.562500,275200.000000 +-122.600000,38.930000,16.000000,1657.000000,390.000000,572.000000,301.000000,1.476700,62000.000000 +-122.600000,38.900000,23.000000,292.000000,56.000000,92.000000,41.000000,2.958300,91700.000000 +-122.600000,38.480000,17.000000,1528.000000,264.000000,606.000000,251.000000,6.600400,341500.000000 +-122.600000,38.240000,16.000000,1410.000000,209.000000,741.000000,229.000000,4.725000,204500.000000 +-122.600000,38.110000,23.000000,8642.000000,1294.000000,3594.000000,1253.000000,5.396200,301500.000000 +-122.600000,38.110000,19.000000,1752.000000,328.000000,873.000000,336.000000,3.806800,201600.000000 +-122.600000,38.000000,21.000000,2198.000000,462.000000,1100.000000,449.000000,4.109800,246600.000000 +-122.610000,41.740000,15.000000,4206.000000,922.000000,1863.000000,869.000000,2.059100,55700.000000 +-122.610000,38.930000,14.000000,231.000000,36.000000,108.000000,31.000000,4.389700,71300.000000 +-122.610000,38.420000,13.000000,7731.000000,1360.000000,2543.000000,1249.000000,4.695700,259800.000000 +-122.610000,38.260000,17.000000,2864.000000,487.000000,1482.000000,547.000000,4.683300,215200.000000 +-122.610000,38.250000,18.000000,2915.000000,418.000000,1340.000000,421.000000,5.245200,204900.000000 +-122.610000,38.240000,25.000000,2990.000000,450.000000,1335.000000,434.000000,4.700000,190100.000000 +-122.610000,38.240000,18.000000,2933.000000,481.000000,1279.000000,443.000000,5.084900,188500.000000 +-122.610000,38.240000,17.000000,1728.000000,271.000000,897.000000,284.000000,3.489600,185900.000000 +-122.610000,38.230000,18.000000,2042.000000,420.000000,914.000000,400.000000,2.987100,193800.000000 +-122.610000,38.090000,18.000000,6205.000000,821.000000,2311.000000,756.000000,6.908100,368700.000000 +-122.610000,37.990000,40.000000,7737.000000,1488.000000,3108.000000,1349.000000,4.437500,289600.000000 +-122.620000,38.960000,16.000000,1914.000000,446.000000,828.000000,332.000000,2.057700,69000.000000 +-122.620000,38.950000,19.000000,2230.000000,538.000000,832.000000,359.000000,1.686500,58800.000000 +-122.620000,38.940000,14.000000,1731.000000,400.000000,638.000000,282.000000,2.317900,57500.000000 +-122.620000,38.940000,13.000000,524.000000,129.000000,215.000000,90.000000,1.545500,55000.000000 +-122.620000,38.730000,21.000000,1425.000000,323.000000,727.000000,287.000000,2.147400,85300.000000 +-122.620000,38.540000,24.000000,2409.000000,464.000000,1006.000000,403.000000,4.516700,265200.000000 +-122.620000,38.400000,10.000000,9772.000000,1308.000000,3741.000000,1242.000000,6.526100,324700.000000 +-122.620000,38.250000,33.000000,1453.000000,250.000000,677.000000,237.000000,4.096200,170200.000000 +-122.620000,38.250000,24.000000,2388.000000,358.000000,1187.000000,362.000000,4.653400,196500.000000 +-122.620000,38.250000,20.000000,1888.000000,411.000000,826.000000,396.000000,2.875000,189100.000000 +-122.620000,38.240000,33.000000,1369.000000,280.000000,758.000000,246.000000,4.034100,156500.000000 +-122.620000,38.240000,19.000000,1687.000000,253.000000,893.000000,257.000000,6.204000,201800.000000 +-122.620000,38.150000,14.000000,2259.000000,341.000000,1127.000000,346.000000,6.409200,334900.000000 +-122.620000,37.970000,52.000000,370.000000,62.000000,150.000000,56.000000,7.700600,316700.000000 +-122.620000,37.850000,30.000000,833.000000,164.000000,358.000000,143.000000,6.819800,493800.000000 +-122.630000,38.960000,20.000000,2507.000000,577.000000,1072.000000,457.000000,2.308300,60200.000000 +-122.630000,38.960000,17.000000,1708.000000,459.000000,633.000000,312.000000,1.750000,64000.000000 +-122.630000,38.950000,11.000000,686.000000,127.000000,246.000000,86.000000,1.708300,77300.000000 +-122.630000,38.940000,25.000000,661.000000,144.000000,192.000000,93.000000,1.756600,49000.000000 +-122.630000,38.940000,18.000000,3844.000000,969.000000,1832.000000,845.000000,1.125000,81800.000000 +-122.630000,38.500000,19.000000,2107.000000,332.000000,874.000000,341.000000,5.781900,265600.000000 +-122.630000,38.340000,15.000000,2153.000000,345.000000,979.000000,335.000000,5.196600,325400.000000 +-122.630000,38.260000,7.000000,7808.000000,1390.000000,3551.000000,1392.000000,4.606900,202300.000000 +-122.630000,38.250000,20.000000,3460.000000,602.000000,1707.000000,568.000000,3.711500,181900.000000 +-122.630000,38.240000,45.000000,1615.000000,338.000000,823.000000,327.000000,2.517900,145500.000000 +-122.630000,38.230000,45.000000,2264.000000,504.000000,1076.000000,472.000000,3.013900,194100.000000 +-122.630000,38.220000,34.000000,878.000000,160.000000,372.000000,167.000000,4.041700,232100.000000 +-122.630000,38.220000,17.000000,2652.000000,342.000000,1199.000000,350.000000,5.565000,267100.000000 +-122.630000,38.210000,22.000000,2933.000000,461.000000,1283.000000,449.000000,6.203400,291100.000000 +-122.640000,41.950000,18.000000,1867.000000,424.000000,802.000000,314.000000,1.824200,53500.000000 +-122.640000,41.740000,33.000000,2644.000000,459.000000,1113.000000,483.000000,3.309500,81300.000000 +-122.640000,41.730000,50.000000,1525.000000,308.000000,661.000000,285.000000,2.220600,63200.000000 +-122.640000,38.960000,29.000000,883.000000,187.000000,326.000000,136.000000,1.727300,58200.000000 +-122.640000,38.950000,28.000000,1503.000000,370.000000,522.000000,268.000000,1.202900,68900.000000 +-122.640000,38.870000,16.000000,1177.000000,240.000000,519.000000,199.000000,1.573900,73500.000000 +-122.640000,38.710000,20.000000,531.000000,126.000000,231.000000,96.000000,2.625000,89600.000000 +-122.640000,38.480000,19.000000,3244.000000,449.000000,1174.000000,454.000000,5.836900,255700.000000 +-122.640000,38.250000,31.000000,2554.000000,515.000000,1507.000000,533.000000,3.800000,162600.000000 +-122.640000,38.240000,52.000000,1621.000000,393.000000,635.000000,349.000000,2.520200,244000.000000 +-122.640000,38.240000,40.000000,1974.000000,410.000000,1039.000000,398.000000,3.791700,151600.000000 +-122.640000,38.230000,52.000000,2156.000000,469.000000,1070.000000,467.000000,3.301100,252300.000000 +-122.640000,38.230000,52.000000,1075.000000,249.000000,519.000000,210.000000,3.076900,230900.000000 +-122.640000,38.230000,49.000000,2300.000000,463.000000,1061.000000,429.000000,4.075000,228800.000000 +-122.640000,38.010000,36.000000,1336.000000,258.000000,678.000000,249.000000,5.578900,292000.000000 +-122.640000,37.960000,29.000000,377.000000,58.000000,151.000000,67.000000,9.555100,500001.000000 +-122.650000,41.720000,15.000000,3643.000000,801.000000,1784.000000,743.000000,1.853300,57500.000000 +-122.650000,38.990000,16.000000,4279.000000,951.000000,1596.000000,666.000000,1.857100,75900.000000 +-122.650000,38.970000,32.000000,1856.000000,472.000000,703.000000,292.000000,1.191200,60000.000000 +-122.650000,38.920000,30.000000,70.000000,38.000000,20.000000,13.000000,4.125000,112500.000000 +-122.650000,38.470000,24.000000,2268.000000,330.000000,847.000000,296.000000,3.858000,214400.000000 +-122.650000,38.460000,14.000000,2096.000000,420.000000,926.000000,397.000000,4.064700,187800.000000 +-122.650000,38.400000,21.000000,1059.000000,150.000000,400.000000,154.000000,6.858600,343100.000000 +-122.650000,38.370000,15.000000,1848.000000,280.000000,786.000000,282.000000,5.720400,344100.000000 +-122.650000,38.270000,9.000000,4764.000000,816.000000,2077.000000,755.000000,5.139100,234500.000000 +-122.650000,38.250000,23.000000,4030.000000,813.000000,1852.000000,778.000000,3.402000,193300.000000 +-122.650000,38.240000,49.000000,3273.000000,579.000000,1431.000000,539.000000,4.275000,227600.000000 +-122.650000,38.240000,24.000000,1948.000000,310.000000,922.000000,313.000000,4.950000,243600.000000 +-122.650000,38.230000,52.000000,1923.000000,393.000000,910.000000,345.000000,3.450000,200600.000000 +-122.650000,38.110000,21.000000,3891.000000,616.000000,1968.000000,632.000000,5.552400,279200.000000 +-122.660000,40.520000,13.000000,3013.000000,486.000000,1361.000000,515.000000,4.535700,171200.000000 +-122.660000,39.030000,27.000000,1446.000000,329.000000,594.000000,255.000000,1.165000,53300.000000 +-122.660000,39.020000,16.000000,3715.000000,810.000000,943.000000,510.000000,1.744600,109400.000000 +-122.660000,38.480000,21.000000,2066.000000,393.000000,919.000000,395.000000,3.267000,176200.000000 +-122.660000,38.480000,16.000000,2724.000000,593.000000,1124.000000,586.000000,2.825000,186200.000000 +-122.660000,38.480000,16.000000,2697.000000,490.000000,1462.000000,515.000000,4.205100,190300.000000 +-122.660000,38.470000,23.000000,2246.000000,437.000000,1035.000000,386.000000,3.761700,172600.000000 +-122.660000,38.460000,14.000000,2364.000000,631.000000,1300.000000,625.000000,2.602300,221100.000000 +-122.660000,38.450000,26.000000,2081.000000,339.000000,906.000000,323.000000,4.437500,293500.000000 +-122.660000,38.440000,17.000000,5815.000000,898.000000,2614.000000,887.000000,4.365700,215900.000000 +-122.660000,38.420000,14.000000,5315.000000,1037.000000,2228.000000,950.000000,4.023000,208400.000000 +-122.660000,38.270000,16.000000,1523.000000,308.000000,477.000000,315.000000,2.169600,75000.000000 +-122.660000,37.930000,42.000000,1505.000000,324.000000,553.000000,277.000000,4.179200,350000.000000 +-122.670000,38.470000,19.000000,1848.000000,428.000000,1130.000000,433.000000,3.056800,190300.000000 +-122.670000,38.470000,16.000000,3452.000000,791.000000,1567.000000,731.000000,2.472200,194300.000000 +-122.670000,38.450000,24.000000,2622.000000,525.000000,1027.000000,510.000000,2.922200,242600.000000 +-122.670000,38.440000,29.000000,2551.000000,448.000000,1165.000000,456.000000,4.358700,196400.000000 +-122.670000,38.430000,17.000000,2007.000000,400.000000,895.000000,403.000000,3.281300,202700.000000 +-122.670000,38.430000,17.000000,1804.000000,304.000000,750.000000,298.000000,4.558800,196400.000000 +-122.670000,38.330000,4.000000,8072.000000,1606.000000,4323.000000,1475.000000,3.951800,220300.000000 +-122.670000,38.310000,28.000000,1915.000000,419.000000,930.000000,342.000000,3.787500,292700.000000 +-122.670000,38.250000,32.000000,1333.000000,235.000000,660.000000,206.000000,4.072900,288500.000000 +-122.680000,41.150000,32.000000,817.000000,206.000000,224.000000,89.000000,3.631000,90400.000000 +-122.680000,38.980000,27.000000,2300.000000,508.000000,526.000000,254.000000,2.183800,109700.000000 +-122.680000,38.760000,29.000000,994.000000,226.000000,302.000000,117.000000,2.312500,67900.000000 +-122.680000,38.480000,15.000000,1575.000000,262.000000,716.000000,259.000000,5.340900,244600.000000 +-122.680000,38.460000,19.000000,4976.000000,711.000000,1926.000000,625.000000,7.300300,381300.000000 +-122.680000,38.460000,17.000000,3201.000000,527.000000,1244.000000,495.000000,4.714300,202900.000000 +-122.680000,38.460000,15.000000,1811.000000,406.000000,718.000000,403.000000,2.392900,141300.000000 +-122.680000,38.440000,29.000000,2796.000000,588.000000,1346.000000,562.000000,2.910700,169700.000000 +-122.680000,38.430000,18.000000,2723.000000,529.000000,1150.000000,520.000000,3.588500,191900.000000 +-122.680000,38.400000,32.000000,2826.000000,627.000000,1767.000000,628.000000,3.104700,141400.000000 +-122.680000,38.250000,29.000000,1315.000000,240.000000,650.000000,228.000000,3.826900,306000.000000 +-122.680000,38.070000,26.000000,1445.000000,244.000000,510.000000,207.000000,5.630500,430000.000000 +-122.690000,39.040000,9.000000,254.000000,50.000000,66.000000,29.000000,2.763900,112500.000000 +-122.690000,39.020000,27.000000,2199.000000,527.000000,744.000000,316.000000,2.109400,72400.000000 +-122.690000,38.940000,9.000000,1245.000000,234.000000,517.000000,187.000000,3.125000,93400.000000 +-122.690000,38.510000,18.000000,3364.000000,501.000000,1442.000000,506.000000,6.685400,313000.000000 +-122.690000,38.450000,36.000000,1943.000000,337.000000,711.000000,318.000000,3.919100,183000.000000 +-122.690000,38.440000,40.000000,1449.000000,281.000000,636.000000,295.000000,2.722200,161200.000000 +-122.690000,38.440000,35.000000,1356.000000,241.000000,620.000000,216.000000,3.552100,168300.000000 +-122.690000,38.370000,8.000000,6322.000000,1001.000000,2969.000000,1043.000000,4.823300,214000.000000 +-122.690000,38.360000,6.000000,5496.000000,1374.000000,2502.000000,1189.000000,2.482700,177500.000000 +-122.690000,38.350000,16.000000,1689.000000,254.000000,921.000000,270.000000,4.444400,191800.000000 +-122.690000,38.340000,16.000000,1683.000000,341.000000,880.000000,327.000000,3.285700,160200.000000 +-122.690000,38.340000,15.000000,3091.000000,697.000000,1602.000000,682.000000,4.007100,135500.000000 +-122.690000,38.340000,12.000000,3876.000000,782.000000,2146.000000,764.000000,4.084400,165400.000000 +-122.690000,38.320000,15.000000,2536.000000,414.000000,1400.000000,426.000000,5.661300,172400.000000 +-122.690000,38.300000,30.000000,3919.000000,743.000000,1693.000000,693.000000,3.382700,292100.000000 +-122.690000,38.270000,32.000000,2344.000000,434.000000,1066.000000,384.000000,4.031300,285000.000000 +-122.700000,39.000000,18.000000,793.000000,148.000000,186.000000,59.000000,2.312500,162500.000000 +-122.700000,38.990000,18.000000,1177.000000,224.000000,181.000000,105.000000,2.355800,134700.000000 +-122.700000,38.660000,43.000000,1384.000000,284.000000,582.000000,224.000000,3.906300,210000.000000 +-122.700000,38.460000,29.000000,2891.000000,459.000000,1012.000000,441.000000,5.041500,240200.000000 +-122.700000,38.450000,47.000000,904.000000,154.000000,310.000000,144.000000,3.976600,190600.000000 +-122.700000,38.450000,39.000000,2015.000000,335.000000,640.000000,315.000000,4.173400,240500.000000 +-122.700000,38.440000,45.000000,883.000000,202.000000,401.000000,194.000000,3.284500,178300.000000 +-122.700000,38.440000,42.000000,709.000000,182.000000,547.000000,172.000000,2.191200,165000.000000 +-122.700000,38.440000,35.000000,1304.000000,343.000000,822.000000,304.000000,3.293500,157800.000000 +-122.700000,38.430000,28.000000,1585.000000,412.000000,1362.000000,424.000000,1.668500,114100.000000 +-122.700000,38.390000,16.000000,4922.000000,1211.000000,2557.000000,1088.000000,2.091500,168100.000000 +-122.700000,38.360000,11.000000,5817.000000,878.000000,2538.000000,876.000000,4.221000,227100.000000 +-122.700000,38.350000,16.000000,1328.000000,187.000000,607.000000,197.000000,5.036600,257800.000000 +-122.700000,38.350000,14.000000,2313.000000,411.000000,954.000000,397.000000,3.781300,146500.000000 +-122.700000,38.340000,19.000000,2987.000000,676.000000,1782.000000,688.000000,2.826100,154500.000000 +-122.700000,38.330000,26.000000,1887.000000,381.000000,1060.000000,364.000000,3.007800,160400.000000 +-122.700000,38.330000,26.000000,1584.000000,295.000000,846.000000,295.000000,3.375000,156300.000000 +-122.700000,38.310000,14.000000,3155.000000,580.000000,1208.000000,501.000000,4.196400,258100.000000 +-122.700000,38.230000,47.000000,2090.000000,387.000000,1053.000000,377.000000,3.567300,310300.000000 +-122.700000,38.030000,42.000000,1410.000000,308.000000,624.000000,292.000000,4.137900,309100.000000 +-122.710000,38.910000,20.000000,41.000000,18.000000,94.000000,10.000000,1.375000,55000.000000 +-122.710000,38.500000,15.000000,5645.000000,830.000000,2324.000000,769.000000,6.610400,330900.000000 +-122.710000,38.460000,42.000000,1574.000000,376.000000,844.000000,369.000000,2.314000,169400.000000 +-122.710000,38.460000,41.000000,1974.000000,482.000000,965.000000,458.000000,2.905000,159300.000000 +-122.710000,38.460000,36.000000,2175.000000,516.000000,1087.000000,477.000000,3.044400,167200.000000 +-122.710000,38.460000,23.000000,3220.000000,603.000000,1299.000000,591.000000,3.926100,213300.000000 +-122.710000,38.450000,52.000000,2259.000000,537.000000,957.000000,520.000000,2.182700,188800.000000 +-122.710000,38.450000,48.000000,3118.000000,561.000000,1275.000000,530.000000,3.455000,222100.000000 +-122.710000,38.450000,39.000000,2739.000000,573.000000,1223.000000,569.000000,2.966300,185400.000000 +-122.710000,38.440000,52.000000,988.000000,283.000000,475.000000,242.000000,1.368400,258300.000000 +-122.710000,38.440000,27.000000,966.000000,251.000000,462.000000,230.000000,1.700000,350000.000000 +-122.710000,38.430000,52.000000,1439.000000,325.000000,738.000000,316.000000,2.226200,129900.000000 +-122.710000,38.430000,38.000000,1689.000000,526.000000,1071.000000,529.000000,1.502600,124000.000000 +-122.710000,38.420000,23.000000,1569.000000,414.000000,1031.000000,368.000000,1.626700,129200.000000 +-122.710000,38.400000,17.000000,1690.000000,464.000000,833.000000,445.000000,1.439000,140600.000000 +-122.710000,38.370000,16.000000,2355.000000,345.000000,1014.000000,348.000000,5.601800,253000.000000 +-122.710000,38.350000,11.000000,2242.000000,699.000000,1203.000000,642.000000,2.346400,104200.000000 +-122.710000,38.340000,23.000000,2744.000000,588.000000,1493.000000,557.000000,3.178100,162000.000000 +-122.710000,38.340000,22.000000,1249.000000,335.000000,699.000000,308.000000,2.603300,121600.000000 +-122.710000,38.330000,13.000000,4011.000000,936.000000,2064.000000,914.000000,3.695300,157600.000000 +-122.710000,37.900000,23.000000,1250.000000,257.000000,437.000000,188.000000,3.115000,242600.000000 +-122.720000,38.880000,29.000000,2781.000000,617.000000,890.000000,310.000000,1.990600,96600.000000 +-122.720000,38.580000,4.000000,7042.000000,1100.000000,2936.000000,1043.000000,5.055500,240800.000000 +-122.720000,38.480000,23.000000,2296.000000,356.000000,902.000000,334.000000,6.029800,289100.000000 +-122.720000,38.470000,29.000000,1706.000000,415.000000,990.000000,394.000000,1.993200,164800.000000 +-122.720000,38.460000,35.000000,1445.000000,309.000000,795.000000,308.000000,2.907300,157000.000000 +-122.720000,38.440000,52.000000,188.000000,62.000000,301.000000,72.000000,0.943700,129200.000000 +-122.720000,38.440000,52.000000,1059.000000,281.000000,627.000000,273.000000,1.535700,137500.000000 +-122.720000,38.440000,48.000000,707.000000,166.000000,458.000000,172.000000,3.179700,140400.000000 +-122.720000,38.430000,31.000000,2020.000000,476.000000,1408.000000,437.000000,2.573500,131100.000000 +-122.720000,38.420000,30.000000,2099.000000,406.000000,1156.000000,401.000000,2.803600,152300.000000 +-122.720000,38.420000,26.000000,1168.000000,253.000000,937.000000,248.000000,1.945800,146000.000000 +-122.720000,38.350000,16.000000,3049.000000,609.000000,1675.000000,618.000000,2.411700,162500.000000 +-122.720000,38.310000,26.000000,1644.000000,294.000000,801.000000,291.000000,4.390600,248000.000000 +-122.730000,41.760000,19.000000,2200.000000,414.000000,950.000000,367.000000,2.535700,94200.000000 +-122.730000,39.040000,23.000000,1618.000000,395.000000,425.000000,244.000000,1.983300,111500.000000 +-122.730000,38.470000,16.000000,1834.000000,391.000000,994.000000,390.000000,3.726600,156500.000000 +-122.730000,38.460000,14.000000,2324.000000,754.000000,1026.000000,677.000000,1.722000,150000.000000 +-122.730000,38.440000,35.000000,1120.000000,297.000000,659.000000,274.000000,2.382400,145000.000000 +-122.730000,38.440000,28.000000,1073.000000,241.000000,652.000000,238.000000,2.400000,146200.000000 +-122.730000,38.440000,20.000000,2919.000000,508.000000,1711.000000,500.000000,3.875000,140300.000000 +-122.730000,38.430000,15.000000,3265.000000,690.000000,1629.000000,629.000000,3.713200,167600.000000 +-122.730000,38.420000,26.000000,1446.000000,296.000000,884.000000,295.000000,4.352300,150000.000000 +-122.730000,38.370000,40.000000,1389.000000,309.000000,841.000000,288.000000,3.109400,183300.000000 +-122.730000,38.340000,44.000000,743.000000,155.000000,434.000000,162.000000,2.581900,209600.000000 +-122.730000,38.260000,35.000000,3941.000000,645.000000,1668.000000,620.000000,4.385000,317700.000000 +-122.740000,39.710000,16.000000,255.000000,73.000000,85.000000,38.000000,1.660700,14999.000000 +-122.740000,38.830000,12.000000,4515.000000,909.000000,1554.000000,528.000000,3.353100,90800.000000 +-122.740000,38.470000,16.000000,1426.000000,287.000000,525.000000,260.000000,3.071400,161700.000000 +-122.740000,38.450000,25.000000,2696.000000,496.000000,1296.000000,514.000000,4.079800,179200.000000 +-122.740000,38.450000,17.000000,3064.000000,588.000000,1704.000000,590.000000,3.932900,170900.000000 +-122.740000,38.440000,23.000000,2819.000000,612.000000,1644.000000,546.000000,2.657600,147900.000000 +-122.740000,38.440000,17.000000,2287.000000,497.000000,1240.000000,493.000000,3.584500,164300.000000 +-122.740000,38.430000,11.000000,4670.000000,1007.000000,2430.000000,962.000000,3.034100,142300.000000 +-122.740000,38.420000,42.000000,2050.000000,434.000000,1073.000000,416.000000,2.375000,141000.000000 +-122.750000,38.540000,6.000000,6719.000000,1016.000000,2699.000000,997.000000,5.488600,254200.000000 +-122.750000,38.500000,16.000000,4196.000000,638.000000,1713.000000,615.000000,5.449000,252100.000000 +-122.750000,38.460000,16.000000,2653.000000,606.000000,1693.000000,586.000000,2.638400,146900.000000 +-122.750000,38.460000,13.000000,4323.000000,1020.000000,2566.000000,728.000000,3.014700,142800.000000 +-122.750000,38.430000,36.000000,1599.000000,345.000000,1086.000000,314.000000,2.666700,149100.000000 +-122.750000,38.410000,17.000000,3150.000000,588.000000,1857.000000,610.000000,3.968800,165000.000000 +-122.760000,40.400000,22.000000,2153.000000,461.000000,903.000000,314.000000,2.125000,123200.000000 +-122.760000,38.520000,6.000000,2073.000000,388.000000,826.000000,375.000000,3.055000,224100.000000 +-122.760000,38.460000,14.000000,4794.000000,767.000000,2252.000000,768.000000,4.206100,213100.000000 +-122.760000,38.450000,8.000000,5823.000000,1104.000000,2864.000000,1041.000000,3.629200,183600.000000 +-122.760000,38.440000,14.000000,4376.000000,797.000000,1809.000000,746.000000,3.824400,180000.000000 +-122.760000,38.440000,11.000000,2895.000000,524.000000,1633.000000,534.000000,4.728300,170200.000000 +-122.760000,38.350000,30.000000,2260.000000,374.000000,958.000000,359.000000,5.032300,222400.000000 +-122.770000,38.920000,26.000000,712.000000,140.000000,293.000000,100.000000,4.011900,119400.000000 +-122.770000,38.390000,35.000000,2611.000000,475.000000,1293.000000,463.000000,2.750000,197500.000000 +-122.770000,38.330000,32.000000,2054.000000,324.000000,843.000000,306.000000,4.587500,290700.000000 +-122.770000,38.290000,32.000000,3201.000000,542.000000,1869.000000,519.000000,3.244200,268000.000000 +-122.780000,39.050000,15.000000,1601.000000,323.000000,661.000000,269.000000,2.618100,108900.000000 +-122.780000,38.530000,9.000000,3659.000000,652.000000,1889.000000,632.000000,4.271600,250800.000000 +-122.780000,38.520000,23.000000,2511.000000,549.000000,1052.000000,527.000000,2.492200,192000.000000 +-122.780000,38.440000,14.000000,4143.000000,656.000000,1569.000000,629.000000,3.976600,345300.000000 +-122.780000,38.410000,43.000000,1351.000000,277.000000,1011.000000,297.000000,2.591700,144000.000000 +-122.780000,38.370000,21.000000,795.000000,163.000000,414.000000,162.000000,3.799100,175000.000000 +-122.790000,40.750000,17.000000,3851.000000,818.000000,1352.000000,560.000000,2.125000,71700.000000 +-122.790000,39.090000,20.000000,1798.000000,395.000000,685.000000,331.000000,1.625000,66800.000000 +-122.790000,39.080000,23.000000,952.000000,200.000000,321.000000,128.000000,1.520800,89000.000000 +-122.790000,38.500000,18.000000,4839.000000,918.000000,2755.000000,841.000000,3.750000,248300.000000 +-122.790000,38.480000,7.000000,6837.000000,1417.000000,3468.000000,1405.000000,3.166200,191000.000000 +-122.790000,38.420000,9.000000,4967.000000,885.000000,2581.000000,915.000000,5.038000,185600.000000 +-122.800000,39.080000,17.000000,1880.000000,467.000000,798.000000,342.000000,1.467600,65000.000000 +-122.800000,38.390000,26.000000,2273.000000,474.000000,1124.000000,420.000000,2.945300,166700.000000 +-122.800000,38.370000,26.000000,1634.000000,315.000000,909.000000,317.000000,4.173100,257200.000000 +-122.800000,38.180000,36.000000,2378.000000,476.000000,957.000000,362.000000,3.625000,253100.000000 +-122.810000,40.930000,16.000000,2050.000000,471.000000,588.000000,195.000000,2.708300,88900.000000 +-122.810000,38.540000,12.000000,2289.000000,611.000000,919.000000,540.000000,1.155300,139300.000000 +-122.810000,38.460000,28.000000,3580.000000,611.000000,1634.000000,567.000000,4.745000,248600.000000 +-122.810000,38.360000,18.000000,2399.000000,389.000000,1131.000000,391.000000,5.276900,293900.000000 +-122.810000,38.080000,19.000000,1615.000000,366.000000,815.000000,337.000000,3.460900,238800.000000 +-122.820000,38.640000,29.000000,2176.000000,385.000000,1117.000000,374.000000,3.868100,188600.000000 +-122.820000,38.610000,41.000000,2720.000000,501.000000,987.000000,364.000000,4.029400,201700.000000 +-122.820000,38.550000,8.000000,6190.000000,1088.000000,2967.000000,1000.000000,3.861600,195100.000000 +-122.820000,38.530000,27.000000,1823.000000,360.000000,907.000000,317.000000,3.276000,172900.000000 +-122.820000,38.440000,23.000000,1551.000000,236.000000,555.000000,243.000000,4.679200,304700.000000 +-122.820000,38.410000,32.000000,701.000000,182.000000,489.000000,168.000000,2.785000,169300.000000 +-122.820000,38.400000,40.000000,2406.000000,423.000000,1054.000000,426.000000,3.884600,215900.000000 +-122.820000,38.390000,32.000000,1437.000000,257.000000,752.000000,245.000000,4.742200,240900.000000 +-122.820000,38.390000,22.000000,1288.000000,243.000000,593.000000,220.000000,3.625000,233700.000000 +-122.820000,38.380000,27.000000,2565.000000,479.000000,1227.000000,467.000000,4.513200,259900.000000 +-122.820000,38.330000,25.000000,3067.000000,569.000000,1602.000000,550.000000,3.991700,244100.000000 +-122.830000,39.090000,26.000000,2191.000000,495.000000,679.000000,371.000000,1.467900,94700.000000 +-122.830000,38.990000,15.000000,289.000000,49.000000,191.000000,54.000000,1.683300,113900.000000 +-122.830000,38.980000,17.000000,1383.000000,347.000000,719.000000,296.000000,1.616400,77800.000000 +-122.830000,38.960000,15.000000,1318.000000,296.000000,567.000000,276.000000,1.869200,93800.000000 +-122.830000,38.890000,11.000000,640.000000,134.000000,268.000000,90.000000,3.451400,100000.000000 +-122.830000,38.580000,17.000000,5199.000000,1023.000000,2036.000000,890.000000,3.245200,168800.000000 +-122.830000,38.400000,37.000000,2217.000000,451.000000,1019.000000,428.000000,3.121700,178500.000000 +-122.830000,38.390000,19.000000,1765.000000,394.000000,868.000000,388.000000,2.462000,260300.000000 +-122.840000,38.420000,29.000000,2756.000000,551.000000,1381.000000,531.000000,2.962500,237300.000000 +-122.840000,38.410000,19.000000,2191.000000,391.000000,1065.000000,404.000000,4.125000,204600.000000 +-122.840000,38.390000,16.000000,1688.000000,292.000000,793.000000,280.000000,4.435700,216900.000000 +-122.840000,38.070000,31.000000,1858.000000,367.000000,701.000000,297.000000,3.826900,270700.000000 +-122.850000,39.000000,20.000000,1580.000000,318.000000,753.000000,252.000000,1.870400,88500.000000 +-122.850000,38.460000,22.000000,3328.000000,550.000000,1309.000000,512.000000,4.710500,266200.000000 +-122.850000,38.370000,16.000000,1762.000000,293.000000,810.000000,297.000000,4.443700,305000.000000 +-122.860000,39.080000,24.000000,3127.000000,674.000000,1015.000000,448.000000,2.041700,78800.000000 +-122.860000,39.050000,20.000000,1592.000000,327.000000,647.000000,253.000000,2.532600,136800.000000 +-122.860000,38.620000,35.000000,2597.000000,522.000000,1231.000000,499.000000,2.743200,174000.000000 +-122.860000,38.610000,52.000000,1753.000000,380.000000,982.000000,380.000000,3.401300,183300.000000 +-122.860000,38.440000,31.000000,1534.000000,292.000000,716.000000,288.000000,3.447100,209500.000000 +-122.860000,38.420000,38.000000,1166.000000,223.000000,584.000000,225.000000,3.666700,244400.000000 +-122.860000,38.100000,44.000000,2602.000000,509.000000,691.000000,343.000000,4.312500,261500.000000 +-122.870000,39.130000,15.000000,1927.000000,427.000000,810.000000,321.000000,1.636900,86500.000000 +-122.870000,38.680000,32.000000,4073.000000,718.000000,2053.000000,629.000000,3.735200,228000.000000 +-122.870000,38.620000,52.000000,1514.000000,348.000000,767.000000,354.000000,2.190300,160000.000000 +-122.870000,38.610000,23.000000,2676.000000,521.000000,1456.000000,500.000000,3.736100,173700.000000 +-122.870000,38.480000,27.000000,3894.000000,776.000000,1832.000000,715.000000,3.508500,187800.000000 +-122.870000,38.430000,36.000000,1987.000000,387.000000,1065.000000,347.000000,4.044600,172200.000000 +-122.880000,39.140000,20.000000,1125.000000,231.000000,521.000000,196.000000,2.218800,106300.000000 +-122.880000,38.460000,25.000000,1563.000000,314.000000,737.000000,305.000000,2.568700,249200.000000 +-122.880000,38.340000,20.000000,3404.000000,628.000000,1641.000000,585.000000,5.057400,276200.000000 +-122.890000,40.760000,14.000000,712.000000,131.000000,270.000000,90.000000,2.395800,102100.000000 +-122.890000,39.420000,16.000000,411.000000,114.000000,26.000000,19.000000,0.499900,73500.000000 +-122.890000,38.930000,20.000000,1214.000000,247.000000,504.000000,223.000000,2.718800,105700.000000 +-122.890000,38.420000,28.000000,2388.000000,437.000000,1015.000000,381.000000,5.151200,268300.000000 +-122.890000,38.400000,22.000000,2900.000000,538.000000,1445.000000,515.000000,4.511000,296800.000000 +-122.890000,38.380000,16.000000,2017.000000,369.000000,931.000000,336.000000,5.766400,267500.000000 +-122.900000,41.460000,31.000000,1277.000000,263.000000,600.000000,241.000000,1.729200,61700.000000 +-122.900000,39.230000,39.000000,1295.000000,240.000000,534.000000,179.000000,3.951900,98900.000000 +-122.900000,39.170000,45.000000,1314.000000,277.000000,649.000000,232.000000,2.575000,73600.000000 +-122.900000,39.090000,15.000000,2483.000000,544.000000,835.000000,380.000000,1.914100,143200.000000 +-122.900000,38.280000,52.000000,1275.000000,218.000000,627.000000,185.000000,2.348200,163500.000000 +-122.910000,39.180000,43.000000,89.000000,18.000000,86.000000,27.000000,2.020800,72500.000000 +-122.910000,39.170000,44.000000,202.000000,42.000000,142.000000,39.000000,4.350000,68300.000000 +-122.910000,39.060000,21.000000,1236.000000,238.000000,601.000000,261.000000,1.939000,100300.000000 +-122.910000,39.050000,27.000000,789.000000,208.000000,295.000000,108.000000,3.766700,95000.000000 +-122.910000,39.050000,20.000000,1128.000000,229.000000,621.000000,210.000000,3.221600,93500.000000 +-122.910000,39.030000,14.000000,2374.000000,557.000000,723.000000,427.000000,1.353200,95800.000000 +-122.910000,38.490000,37.000000,2469.000000,519.000000,1137.000000,474.000000,3.634300,146500.000000 +-122.910000,38.460000,18.000000,2021.000000,353.000000,912.000000,329.000000,4.500000,251900.000000 +-122.910000,38.430000,19.000000,1968.000000,350.000000,852.000000,308.000000,4.670500,269800.000000 +-122.920000,41.700000,23.000000,4017.000000,792.000000,1634.000000,619.000000,2.357100,62000.000000 +-122.920000,39.080000,24.000000,341.000000,64.000000,146.000000,57.000000,4.000000,166300.000000 +-122.920000,39.050000,38.000000,3131.000000,624.000000,1591.000000,568.000000,2.545700,80700.000000 +-122.920000,39.050000,16.000000,1548.000000,295.000000,605.000000,250.000000,3.565200,119000.000000 +-122.920000,38.970000,20.000000,2067.000000,384.000000,904.000000,333.000000,2.993400,134200.000000 +-122.930000,41.480000,20.000000,4288.000000,789.000000,1800.000000,660.000000,2.723000,79600.000000 +-122.930000,40.780000,20.000000,3758.000000,798.000000,1685.000000,757.000000,2.366700,91200.000000 +-122.930000,38.380000,18.000000,2562.000000,500.000000,1128.000000,414.000000,3.933600,262500.000000 +-122.930000,38.020000,28.000000,1284.000000,265.000000,628.000000,219.000000,3.546900,200000.000000 +-122.940000,38.640000,26.000000,4050.000000,712.000000,2072.000000,636.000000,4.078100,287800.000000 +-122.940000,38.570000,33.000000,1530.000000,266.000000,728.000000,250.000000,5.100500,266700.000000 +-122.940000,38.530000,49.000000,1141.000000,239.000000,505.000000,184.000000,3.714300,148800.000000 +-122.940000,38.500000,46.000000,2280.000000,492.000000,807.000000,366.000000,2.631600,117000.000000 +-122.940000,38.490000,37.000000,3169.000000,719.000000,777.000000,344.000000,2.707200,117100.000000 +-122.950000,40.670000,17.000000,1498.000000,331.000000,574.000000,242.000000,2.026800,94200.000000 +-122.950000,38.730000,37.000000,1548.000000,328.000000,863.000000,287.000000,2.979200,151300.000000 +-122.960000,40.770000,29.000000,1637.000000,297.000000,753.000000,270.000000,3.289100,93100.000000 +-122.960000,38.420000,50.000000,2530.000000,524.000000,940.000000,361.000000,2.937500,122900.000000 +-122.960000,38.260000,20.000000,1982.000000,358.000000,308.000000,132.000000,3.142900,240900.000000 +-122.970000,38.530000,48.000000,3939.000000,860.000000,1257.000000,571.000000,2.116500,98700.000000 +-122.970000,38.500000,44.000000,3234.000000,746.000000,1112.000000,470.000000,1.926500,132700.000000 +-122.980000,38.440000,29.000000,4450.000000,939.000000,1328.000000,590.000000,3.100000,162100.000000 +-122.990000,39.020000,14.000000,1582.000000,301.000000,851.000000,273.000000,3.450000,164100.000000 +-123.000000,38.510000,33.000000,1565.000000,390.000000,759.000000,311.000000,2.672600,153100.000000 +-123.000000,38.330000,8.000000,3223.000000,637.000000,851.000000,418.000000,5.644500,364800.000000 +-123.010000,38.800000,21.000000,360.000000,96.000000,131.000000,74.000000,3.515600,133300.000000 +-123.010000,38.790000,32.000000,2697.000000,529.000000,1417.000000,535.000000,3.254600,134100.000000 +-123.010000,38.670000,33.000000,914.000000,147.000000,394.000000,132.000000,4.687500,246200.000000 +-123.010000,38.480000,37.000000,1179.000000,282.000000,354.000000,176.000000,1.371200,118300.000000 +-123.020000,38.810000,45.000000,1717.000000,389.000000,916.000000,367.000000,3.242500,138800.000000 +-123.020000,38.540000,35.000000,2157.000000,487.000000,768.000000,322.000000,3.231500,136900.000000 +-123.020000,38.460000,52.000000,2154.000000,499.000000,524.000000,259.000000,2.055600,120000.000000 +-123.020000,38.360000,16.000000,1496.000000,298.000000,778.000000,284.000000,3.858900,268800.000000 +-123.030000,38.790000,16.000000,4047.000000,769.000000,1998.000000,673.000000,3.375000,171900.000000 +-123.040000,38.490000,30.000000,3977.000000,930.000000,1387.000000,582.000000,2.616100,132500.000000 +-123.070000,39.120000,24.000000,1098.000000,193.000000,353.000000,145.000000,3.833300,92600.000000 +-123.070000,38.460000,31.000000,855.000000,217.000000,280.000000,139.000000,2.361100,112500.000000 +-123.080000,41.260000,34.000000,2773.000000,679.000000,1066.000000,424.000000,1.675700,63300.000000 +-123.080000,38.380000,28.000000,3297.000000,676.000000,923.000000,373.000000,3.916700,232600.000000 +-123.100000,39.150000,32.000000,1143.000000,208.000000,454.000000,188.000000,3.833300,116100.000000 +-123.100000,38.970000,36.000000,1211.000000,247.000000,697.000000,251.000000,2.576100,94900.000000 +-123.100000,38.790000,20.000000,3109.000000,712.000000,1643.000000,638.000000,2.834400,164400.000000 +-123.110000,39.320000,20.000000,2745.000000,504.000000,1421.000000,430.000000,3.343100,137500.000000 +-123.120000,40.540000,23.000000,1091.000000,217.000000,539.000000,201.000000,1.869600,61500.000000 +-123.130000,40.850000,18.000000,1650.000000,377.000000,675.000000,282.000000,1.893300,84700.000000 +-123.150000,39.740000,23.000000,608.000000,143.000000,281.000000,108.000000,2.930600,70000.000000 +-123.150000,39.310000,19.000000,1026.000000,205.000000,424.000000,152.000000,2.883300,154200.000000 +-123.150000,38.940000,22.000000,2163.000000,436.000000,1048.000000,358.000000,2.717100,95800.000000 +-123.160000,39.100000,31.000000,418.000000,82.000000,327.000000,81.000000,2.775000,120800.000000 +-123.170000,40.310000,36.000000,98.000000,28.000000,18.000000,8.000000,0.536000,14999.000000 +-123.170000,39.180000,14.000000,2240.000000,327.000000,1030.000000,308.000000,5.958500,214900.000000 +-123.170000,39.150000,30.000000,1904.000000,331.000000,816.000000,325.000000,4.425000,161900.000000 +-123.180000,40.580000,18.000000,1451.000000,278.000000,695.000000,254.000000,1.726200,73700.000000 +-123.180000,39.260000,25.000000,3066.000000,570.000000,1558.000000,535.000000,3.788000,134200.000000 +-123.180000,39.230000,18.000000,243.000000,55.000000,115.000000,54.000000,2.125000,175000.000000 +-123.190000,39.210000,22.000000,1542.000000,291.000000,821.000000,285.000000,3.591700,118800.000000 +-123.190000,39.150000,16.000000,2577.000000,495.000000,1232.000000,488.000000,2.601200,125600.000000 +-123.190000,39.120000,38.000000,267.000000,57.000000,196.000000,60.000000,2.312500,70000.000000 +-123.200000,39.160000,14.000000,1908.000000,484.000000,1195.000000,467.000000,1.792900,82300.000000 +-123.200000,39.150000,27.000000,990.000000,238.000000,592.000000,225.000000,2.007400,96200.000000 +-123.200000,39.140000,17.000000,1620.000000,396.000000,878.000000,399.000000,1.804200,109200.000000 +-123.200000,39.130000,26.000000,1474.000000,417.000000,1065.000000,401.000000,1.375000,84400.000000 +-123.210000,40.510000,16.000000,241.000000,84.000000,152.000000,61.000000,1.375000,48800.000000 +-123.210000,39.200000,17.000000,3145.000000,693.000000,1560.000000,647.000000,2.292600,149300.000000 +-123.210000,39.180000,17.000000,2772.000000,576.000000,1501.000000,584.000000,2.627500,142100.000000 +-123.210000,39.150000,52.000000,1370.000000,258.000000,617.000000,228.000000,2.550000,112900.000000 +-123.210000,39.150000,31.000000,2685.000000,675.000000,1367.000000,626.000000,1.657100,108900.000000 +-123.210000,39.140000,39.000000,1419.000000,262.000000,661.000000,278.000000,3.000000,114600.000000 +-123.210000,39.130000,27.000000,1531.000000,266.000000,822.000000,234.000000,4.046900,127400.000000 +-123.210000,39.070000,17.000000,1890.000000,342.000000,877.000000,312.000000,3.783300,159800.000000 +-123.220000,40.540000,27.000000,1573.000000,361.000000,847.000000,330.000000,1.903400,49600.000000 +-123.220000,40.160000,27.000000,1848.000000,449.000000,396.000000,150.000000,2.847200,41300.000000 +-123.220000,39.280000,16.000000,5569.000000,1106.000000,3148.000000,1088.000000,3.145500,142900.000000 +-123.220000,39.150000,45.000000,1348.000000,265.000000,639.000000,270.000000,3.366700,115200.000000 +-123.220000,39.150000,36.000000,1166.000000,216.000000,504.000000,203.000000,3.593800,122100.000000 +-123.230000,39.770000,25.000000,2075.000000,435.000000,991.000000,377.000000,1.228100,60300.000000 +-123.230000,39.330000,20.000000,804.000000,121.000000,448.000000,140.000000,3.963200,147100.000000 +-123.230000,39.130000,33.000000,1176.000000,211.000000,529.000000,217.000000,3.895800,144000.000000 +-123.240000,39.810000,25.000000,1435.000000,304.000000,746.000000,259.000000,1.778800,57900.000000 +-123.240000,38.700000,38.000000,1460.000000,311.000000,569.000000,176.000000,2.717100,131300.000000 +-123.250000,38.540000,27.000000,3658.000000,764.000000,1278.000000,518.000000,3.353600,157500.000000 +-123.260000,41.860000,25.000000,2344.000000,532.000000,1117.000000,424.000000,2.722200,64600.000000 +-123.320000,40.430000,15.000000,661.000000,146.000000,131.000000,57.000000,0.499900,56700.000000 +-123.320000,39.420000,22.000000,2085.000000,432.000000,1133.000000,402.000000,2.390600,92600.000000 +-123.340000,39.500000,15.000000,2342.000000,535.000000,1064.000000,433.000000,1.896700,96600.000000 +-123.340000,39.390000,18.000000,2821.000000,628.000000,1636.000000,615.000000,2.333300,84000.000000 +-123.340000,39.100000,24.000000,5372.000000,1051.000000,3002.000000,992.000000,3.065200,131100.000000 +-123.350000,39.420000,18.000000,1619.000000,346.000000,904.000000,295.000000,2.162500,77200.000000 +-123.350000,39.400000,27.000000,1321.000000,338.000000,779.000000,327.000000,1.850000,71800.000000 +-123.360000,39.410000,46.000000,1748.000000,362.000000,808.000000,330.000000,2.918300,76900.000000 +-123.360000,39.400000,21.000000,1081.000000,254.000000,715.000000,275.000000,1.562500,71500.000000 +-123.360000,39.250000,17.000000,1087.000000,254.000000,522.000000,202.000000,2.587500,144500.000000 +-123.360000,39.010000,35.000000,1551.000000,321.000000,857.000000,288.000000,2.723200,115400.000000 +-123.370000,39.430000,32.000000,2780.000000,470.000000,1281.000000,479.000000,3.588000,96000.000000 +-123.380000,41.800000,25.000000,1941.000000,477.000000,1000.000000,390.000000,2.297600,54400.000000 +-123.380000,39.370000,18.000000,3946.000000,813.000000,1899.000000,730.000000,2.642400,124600.000000 +-123.400000,39.460000,10.000000,4086.000000,831.000000,2111.000000,758.000000,3.215600,104400.000000 +-123.410000,41.600000,23.000000,1654.000000,369.000000,669.000000,273.000000,1.965000,65400.000000 +-123.410000,40.070000,17.000000,449.000000,151.000000,141.000000,53.000000,0.836200,87500.000000 +-123.430000,40.220000,20.000000,133.000000,35.000000,87.000000,37.000000,3.625000,67500.000000 +-123.480000,40.790000,15.000000,619.000000,160.000000,287.000000,104.000000,1.910700,79200.000000 +-123.480000,40.340000,19.000000,518.000000,108.000000,216.000000,80.000000,2.708300,64500.000000 +-123.490000,38.700000,9.000000,5409.000000,1019.000000,594.000000,327.000000,3.312500,295400.000000 +-123.530000,40.880000,20.000000,2680.000000,599.000000,918.000000,345.000000,2.211500,75000.000000 +-123.530000,38.930000,38.000000,1706.000000,355.000000,506.000000,211.000000,2.562500,165600.000000 +-123.540000,39.170000,18.000000,2251.000000,510.000000,1032.000000,369.000000,2.294600,101000.000000 +-123.580000,39.660000,15.000000,1839.000000,489.000000,887.000000,332.000000,2.242900,100000.000000 +-123.590000,38.800000,17.000000,5202.000000,1037.000000,1742.000000,803.000000,3.120100,176100.000000 +-123.630000,41.110000,19.000000,1797.000000,384.000000,1033.000000,327.000000,1.491100,59200.000000 +-123.640000,39.450000,21.000000,3359.000000,677.000000,1908.000000,642.000000,3.043300,140700.000000 +-123.660000,41.300000,22.000000,1580.000000,372.000000,686.000000,264.000000,1.806500,62700.000000 +-123.680000,40.240000,31.000000,1852.000000,452.000000,917.000000,359.000000,1.725000,54300.000000 +-123.690000,38.900000,17.000000,2206.000000,478.000000,1140.000000,428.000000,2.198500,95300.000000 +-123.700000,39.320000,18.000000,1652.000000,352.000000,711.000000,292.000000,3.107100,213200.000000 +-123.710000,39.880000,42.000000,1518.000000,383.000000,656.000000,303.000000,1.495200,69800.000000 +-123.720000,41.090000,19.000000,1970.000000,431.000000,1166.000000,363.000000,1.820800,50000.000000 +-123.730000,40.480000,25.000000,2015.000000,524.000000,746.000000,251.000000,1.715300,77100.000000 +-123.730000,39.440000,32.000000,790.000000,151.000000,380.000000,142.000000,2.700000,165000.000000 +-123.750000,40.110000,35.000000,2052.000000,477.000000,900.000000,402.000000,1.962500,101500.000000 +-123.750000,39.370000,16.000000,1377.000000,296.000000,830.000000,279.000000,3.250000,151400.000000 +-123.760000,41.030000,24.000000,2386.000000,565.000000,1058.000000,414.000000,2.064400,79800.000000 +-123.780000,40.050000,17.000000,2019.000000,496.000000,899.000000,347.000000,2.186400,101900.000000 +-123.790000,39.440000,49.000000,2290.000000,482.000000,1201.000000,479.000000,3.500000,113300.000000 +-123.790000,39.440000,36.000000,1330.000000,273.000000,761.000000,286.000000,2.781300,105800.000000 +-123.790000,39.440000,16.000000,2017.000000,423.000000,1177.000000,414.000000,3.217100,116200.000000 +-123.800000,39.470000,28.000000,2492.000000,507.000000,1202.000000,460.000000,2.785700,150300.000000 +-123.800000,39.440000,33.000000,2024.000000,459.000000,1019.000000,422.000000,1.920800,93600.000000 +-123.810000,39.340000,17.000000,1981.000000,371.000000,773.000000,325.000000,3.156300,277000.000000 +-123.810000,39.310000,23.000000,2754.000000,577.000000,887.000000,432.000000,3.365400,225000.000000 +-123.820000,40.160000,19.000000,2283.000000,634.000000,1184.000000,453.000000,1.222700,76800.000000 +-123.820000,40.120000,33.000000,2985.000000,591.000000,1221.000000,486.000000,2.087000,82400.000000 +-123.830000,41.880000,18.000000,1504.000000,357.000000,660.000000,258.000000,3.130000,116700.000000 +-123.840000,40.280000,28.000000,2809.000000,605.000000,1093.000000,438.000000,2.096200,74000.000000 +-123.840000,39.830000,19.000000,1461.000000,340.000000,515.000000,227.000000,1.527800,145800.000000 +-123.840000,39.460000,47.000000,1150.000000,244.000000,552.000000,201.000000,2.519200,110400.000000 +-123.850000,41.320000,31.000000,938.000000,238.000000,425.000000,157.000000,1.048600,36700.000000 +-123.850000,39.420000,11.000000,1804.000000,506.000000,895.000000,451.000000,1.757400,150000.000000 +-123.880000,40.930000,28.000000,1272.000000,259.000000,519.000000,220.000000,3.289100,106300.000000 +-123.910000,41.680000,22.000000,1880.000000,360.000000,743.000000,314.000000,2.968800,152700.000000 +-123.960000,40.570000,31.000000,1854.000000,365.000000,883.000000,310.000000,2.316700,92600.000000 +-123.980000,40.880000,41.000000,1719.000000,372.000000,844.000000,336.000000,2.692300,84200.000000 +-124.000000,40.920000,29.000000,1429.000000,268.000000,672.000000,266.000000,2.948500,98800.000000 +-124.000000,40.220000,16.000000,2088.000000,535.000000,816.000000,326.000000,1.319000,70700.000000 +-124.010000,40.890000,28.000000,1470.000000,336.000000,811.000000,314.000000,2.455900,75600.000000 +-124.020000,40.800000,22.000000,2588.000000,435.000000,1198.000000,442.000000,3.979200,133900.000000 +-124.020000,40.720000,28.000000,3513.000000,634.000000,1658.000000,598.000000,3.809500,119900.000000 +-124.030000,40.450000,34.000000,1006.000000,213.000000,443.000000,158.000000,2.609400,71300.000000 +-124.050000,40.940000,14.000000,1452.000000,217.000000,516.000000,181.000000,5.032900,165600.000000 +-124.050000,40.850000,31.000000,2414.000000,428.000000,1005.000000,401.000000,3.515600,143000.000000 +-124.050000,40.590000,32.000000,1878.000000,340.000000,937.000000,353.000000,3.440800,95200.000000 +-124.060000,41.130000,22.000000,3263.000000,799.000000,1384.000000,578.000000,2.470800,119400.000000 +-124.060000,40.880000,12.000000,2087.000000,424.000000,1603.000000,438.000000,2.566700,139500.000000 +-124.060000,40.860000,34.000000,4183.000000,702.000000,1891.000000,669.000000,3.221600,98100.000000 +-124.070000,40.870000,47.000000,1765.000000,326.000000,796.000000,333.000000,2.213800,99200.000000 +-124.070000,40.870000,31.000000,334.000000,134.000000,780.000000,130.000000,0.768400,153100.000000 +-124.070000,40.810000,23.000000,2103.000000,411.000000,1019.000000,387.000000,2.991100,119700.000000 +-124.080000,41.360000,29.000000,1029.000000,239.000000,509.000000,196.000000,2.015600,62800.000000 +-124.080000,40.990000,18.000000,3297.000000,662.000000,1554.000000,578.000000,2.684700,111300.000000 +-124.080000,40.940000,18.000000,1550.000000,345.000000,941.000000,335.000000,2.314700,70100.000000 +-124.080000,40.910000,13.000000,2522.000000,719.000000,1381.000000,628.000000,1.666700,78800.000000 +-124.080000,40.860000,18.000000,1287.000000,484.000000,805.000000,502.000000,1.115700,150000.000000 +-124.080000,40.060000,17.000000,1319.000000,267.000000,393.000000,163.000000,2.625000,135600.000000 +-124.090000,40.920000,12.000000,2497.000000,491.000000,1153.000000,462.000000,2.818200,126900.000000 +-124.090000,40.880000,50.000000,921.000000,187.000000,420.000000,187.000000,2.218800,105800.000000 +-124.090000,40.880000,31.000000,1982.000000,495.000000,1052.000000,467.000000,1.532600,74100.000000 +-124.090000,40.880000,26.000000,2683.000000,555.000000,1353.000000,526.000000,2.432100,82100.000000 +-124.090000,40.870000,44.000000,692.000000,206.000000,398.000000,211.000000,1.157600,87500.000000 +-124.090000,40.860000,25.000000,1322.000000,387.000000,794.000000,379.000000,1.174200,75000.000000 +-124.090000,40.440000,38.000000,2220.000000,426.000000,1041.000000,401.000000,2.394700,70500.000000 +-124.100000,41.040000,26.000000,1633.000000,380.000000,890.000000,370.000000,1.974100,97900.000000 +-124.100000,40.900000,18.000000,4032.000000,798.000000,1948.000000,775.000000,2.732100,92600.000000 +-124.100000,40.880000,35.000000,2987.000000,578.000000,1581.000000,585.000000,2.065700,81100.000000 +-124.100000,40.730000,33.000000,644.000000,129.000000,334.000000,121.000000,3.965900,111800.000000 +-124.100000,40.500000,42.000000,2380.000000,553.000000,1300.000000,504.000000,1.757400,57500.000000 +-124.100000,40.500000,30.000000,1927.000000,393.000000,996.000000,374.000000,2.235700,72300.000000 +-124.100000,40.470000,52.000000,1196.000000,236.000000,965.000000,265.000000,3.534500,55000.000000 +-124.110000,40.950000,19.000000,1734.000000,365.000000,866.000000,342.000000,2.960000,81700.000000 +-124.110000,40.930000,25.000000,2392.000000,474.000000,1298.000000,461.000000,3.507600,73600.000000 +-124.110000,40.930000,17.000000,1661.000000,329.000000,948.000000,357.000000,2.763900,90200.000000 +-124.110000,40.810000,23.000000,959.000000,212.000000,425.000000,175.000000,2.553600,96100.000000 +-124.110000,40.570000,33.000000,1348.000000,234.000000,573.000000,236.000000,2.489600,74100.000000 +-124.130000,40.800000,31.000000,2152.000000,462.000000,1259.000000,420.000000,2.247800,81100.000000 +-124.130000,40.790000,32.000000,2017.000000,359.000000,855.000000,346.000000,3.583300,92800.000000 +-124.130000,40.790000,29.000000,2474.000000,453.000000,1130.000000,427.000000,2.883300,83000.000000 +-124.130000,40.780000,34.000000,2142.000000,420.000000,1056.000000,382.000000,2.110100,86900.000000 +-124.130000,40.620000,43.000000,2131.000000,399.000000,910.000000,389.000000,2.580400,92100.000000 +-124.130000,40.550000,38.000000,544.000000,89.000000,240.000000,91.000000,3.250000,94800.000000 +-124.140000,41.950000,21.000000,2696.000000,578.000000,1208.000000,494.000000,2.275000,122400.000000 +-124.140000,41.060000,32.000000,1020.000000,215.000000,421.000000,198.000000,3.020800,143400.000000 +-124.140000,40.790000,38.000000,1552.000000,290.000000,873.000000,291.000000,2.489600,81000.000000 +-124.140000,40.780000,35.000000,2426.000000,423.000000,982.000000,432.000000,3.421900,92800.000000 +-124.140000,40.770000,27.000000,3046.000000,605.000000,1407.000000,571.000000,2.914300,99600.000000 +-124.140000,40.670000,23.000000,580.000000,117.000000,320.000000,109.000000,4.205400,130600.000000 +-124.140000,40.590000,22.000000,1665.000000,405.000000,826.000000,382.000000,1.562500,66800.000000 +-124.140000,40.590000,17.000000,2985.000000,610.000000,1544.000000,584.000000,2.178000,76800.000000 +-124.140000,40.580000,25.000000,1899.000000,357.000000,891.000000,355.000000,2.698700,92500.000000 +-124.140000,40.570000,29.000000,2864.000000,600.000000,1314.000000,562.000000,2.135400,75100.000000 +-124.150000,41.810000,17.000000,3276.000000,628.000000,3546.000000,585.000000,2.286800,103100.000000 +-124.150000,40.880000,33.000000,2235.000000,506.000000,1165.000000,441.000000,1.725000,57500.000000 +-124.150000,40.810000,50.000000,340.000000,74.000000,235.000000,83.000000,1.750000,67500.000000 +-124.150000,40.800000,47.000000,1486.000000,335.000000,765.000000,329.000000,1.755000,74100.000000 +-124.150000,40.790000,37.000000,2692.000000,488.000000,1263.000000,486.000000,3.021600,86400.000000 +-124.150000,40.780000,36.000000,2112.000000,374.000000,829.000000,368.000000,3.398400,90000.000000 +-124.150000,40.760000,24.000000,2858.000000,511.000000,1388.000000,512.000000,3.375000,100600.000000 +-124.150000,40.590000,39.000000,1186.000000,238.000000,539.000000,212.000000,2.093800,79600.000000 +-124.160000,41.740000,15.000000,2715.000000,569.000000,1532.000000,530.000000,2.182900,69500.000000 +-124.160000,41.020000,23.000000,1672.000000,385.000000,1060.000000,390.000000,2.172600,75500.000000 +-124.160000,40.950000,20.000000,1075.000000,214.000000,529.000000,196.000000,3.140600,96000.000000 +-124.160000,40.800000,52.000000,2416.000000,618.000000,1150.000000,571.000000,1.730800,80500.000000 +-124.160000,40.800000,52.000000,1703.000000,500.000000,952.000000,435.000000,1.138600,74100.000000 +-124.160000,40.790000,52.000000,2148.000000,421.000000,975.000000,430.000000,2.256600,92700.000000 +-124.160000,40.790000,52.000000,1264.000000,277.000000,591.000000,284.000000,1.777800,76900.000000 +-124.160000,40.780000,50.000000,2285.000000,403.000000,837.000000,353.000000,2.541700,85400.000000 +-124.160000,40.780000,46.000000,1975.000000,346.000000,791.000000,349.000000,3.800000,81800.000000 +-124.160000,40.780000,43.000000,2241.000000,446.000000,932.000000,395.000000,2.903800,82000.000000 +-124.160000,40.600000,39.000000,1322.000000,283.000000,642.000000,292.000000,2.451900,85100.000000 +-124.170000,41.760000,20.000000,2673.000000,538.000000,1282.000000,514.000000,2.460500,105900.000000 +-124.170000,40.800000,52.000000,661.000000,316.000000,392.000000,244.000000,0.957000,60000.000000 +-124.170000,40.800000,52.000000,1606.000000,419.000000,891.000000,367.000000,1.585000,75500.000000 +-124.170000,40.800000,52.000000,1557.000000,344.000000,758.000000,319.000000,1.852900,62500.000000 +-124.170000,40.790000,43.000000,2285.000000,479.000000,1169.000000,482.000000,1.968800,70500.000000 +-124.170000,40.780000,39.000000,1606.000000,330.000000,731.000000,327.000000,1.636900,68300.000000 +-124.170000,40.770000,30.000000,1895.000000,366.000000,990.000000,359.000000,2.222700,81300.000000 +-124.170000,40.760000,26.000000,1776.000000,361.000000,992.000000,380.000000,2.805600,82800.000000 +-124.170000,40.750000,13.000000,2171.000000,339.000000,951.000000,353.000000,4.851600,116100.000000 +-124.170000,40.620000,32.000000,1595.000000,309.000000,706.000000,277.000000,2.895800,86400.000000 +-124.180000,40.790000,39.000000,1836.000000,352.000000,883.000000,337.000000,1.745000,70500.000000 +-124.180000,40.780000,37.000000,1453.000000,293.000000,867.000000,310.000000,2.553600,70200.000000 +-124.180000,40.780000,34.000000,1592.000000,364.000000,950.000000,317.000000,2.160700,67000.000000 +-124.180000,40.780000,33.000000,1076.000000,222.000000,656.000000,236.000000,2.509600,72200.000000 +-124.180000,40.620000,35.000000,952.000000,178.000000,480.000000,179.000000,3.053600,107000.000000 +-124.190000,41.780000,15.000000,3140.000000,714.000000,1645.000000,640.000000,1.665400,74600.000000 +-124.190000,40.780000,37.000000,1371.000000,319.000000,640.000000,260.000000,1.824200,70000.000000 +-124.190000,40.770000,30.000000,2975.000000,634.000000,1367.000000,583.000000,2.442000,69000.000000 +-124.190000,40.730000,21.000000,5694.000000,1056.000000,2907.000000,972.000000,3.536300,90100.000000 +-124.210000,41.770000,17.000000,3461.000000,722.000000,1947.000000,647.000000,2.579500,68400.000000 +-124.210000,41.750000,20.000000,3810.000000,787.000000,1993.000000,721.000000,2.007400,66900.000000 +-124.210000,40.750000,32.000000,1218.000000,331.000000,620.000000,268.000000,1.652800,58100.000000 +-124.220000,41.730000,28.000000,3003.000000,699.000000,1530.000000,653.000000,1.703800,78300.000000 +-124.230000,41.750000,11.000000,3159.000000,616.000000,1343.000000,479.000000,2.480500,73200.000000 +-124.230000,40.810000,52.000000,1112.000000,209.000000,544.000000,172.000000,3.346200,50800.000000 +-124.230000,40.540000,52.000000,2694.000000,453.000000,1152.000000,435.000000,3.080600,106700.000000 +-124.250000,40.280000,32.000000,1430.000000,419.000000,434.000000,187.000000,1.941700,76100.000000 +-124.260000,40.580000,52.000000,2217.000000,394.000000,907.000000,369.000000,2.357100,111400.000000 +-124.270000,40.690000,36.000000,2349.000000,528.000000,1194.000000,465.000000,2.517900,79000.000000 +-124.300000,41.840000,17.000000,2677.000000,531.000000,1244.000000,456.000000,3.031300,103600.000000 +-124.300000,41.800000,19.000000,2672.000000,552.000000,1298.000000,478.000000,1.979700,85800.000000 +-124.350000,40.540000,52.000000,1820.000000,300.000000,806.000000,270.000000,3.014700,94600.000000 diff --git a/sample_data/mnist_test.csv b/sample_data/mnist_test.csv new file mode 100644 index 0000000000000000000000000000000000000000..0bea413bf24149ab4bbc3e446c7984478ce371d4 --- /dev/null +++ b/sample_data/mnist_test.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51c292478d94ec3a01461bdfa82eb0885d262eb09e615679b2d69dedb6ad09e7 +size 18289443 diff --git a/sample_data/mnist_train_small.csv b/sample_data/mnist_train_small.csv new file mode 100644 index 0000000000000000000000000000000000000000..7aa361d977d328126edf44b4dad536a83291408c --- /dev/null +++ b/sample_data/mnist_train_small.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ef64781aa03180f4f5ce504314f058f5d0227277df86060473d973cf43b033e +size 36523880 diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..a7a4556ebf3cf79f2887c3412068e8daea578538 --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,134 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "mask_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/spiece.model b/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..940a1b4a39fabbd3787e0239512599d758dab6e4 --- /dev/null +++ b/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0015189ef36359283fec8b93cf6d9ce51bca37eb1101defc68a53b394913b96c +size 1912529 diff --git a/tokenizer.json b/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..82fead4fcac736db203a33ffe6809317d3d8cda5 --- /dev/null +++ b/tokenizer.json @@ -0,0 +1,385478 @@ +{ + "version": "1.0", + "truncation": { + "direction": "Right", + "max_length": 512, + "strategy": "LongestFirst", + "stride": 0 + }, + "padding": { + "strategy": { + "Fixed": 512 + }, + "direction": "Right", + "pad_to_multiple_of": null, + "pad_id": 0, + "pad_type_id": 0, + "pad_token": "" + }, + "added_tokens": [ + { + "id": 0, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 1, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 2, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 3, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 4, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 5, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 6, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 7, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 8, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 9, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 10, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 11, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 12, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 13, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 14, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 15, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 16, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 17, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 18, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 19, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 20, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 21, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 22, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 23, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 24, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 25, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 26, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 27, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 28, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 29, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 30, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 31, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 33, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 34, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 35, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 36, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 37, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 38, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 39, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 40, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 41, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 42, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 43, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 44, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 45, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 46, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 47, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 48, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 49, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 50, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 51, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 52, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 53, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 54, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 55, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 56, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 57, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 58, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 59, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 60, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 61, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 62, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 63, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 64, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 65, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 66, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 67, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 68, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 69, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 70, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 71, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 72, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 73, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 74, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 75, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 76, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 77, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 78, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 79, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 80, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 81, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 82, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 83, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 84, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 85, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 86, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 87, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 88, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 89, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 90, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 91, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 92, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 93, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 94, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 95, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 96, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 97, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 98, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 99, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 100, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 101, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 102, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 103, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 104, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 105, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + } + ], + "normalizer": { + "type": "Sequence", + "normalizers": [ + { + "type": "Precompiled", + "precompiled_charsmap": "ALQCAACEAAAAAACAAQAAgMz8AgC4BQAAhyIAgMzkAgC4PQAAeyIAgMzsAgC4BQAAiyIAgMw8AADNvAAAmwkAgJ4JAIChCQCAgx0AAIAZAACBGQAAPR0AgDUdAIBNHQCARR0AgIAxAACBMQAApAkAgIkxAAA9WAMAPEgDAEAKAIA+aAMAAYUAAIQBAQADjQAAAokAAAWVAAAEkQAAB50AAAaZAAAJqQAACKEAAAutAAAKpQAADbkAAAy9AAAPvQAADrkAABHFAAAQwQAAE80AABLJAAAV1QAAFNEAABfdAAAW2QAAGeUAABjhAAAb7QAAGukAAB31AAAc8QAAH/0AAB75AABhOAkAZR0AgGNADgBi8AgAZSgPAGSADgBn2A8AZvAPAGlwDABoMAwAa/AMAGrYDABtSA0AbBwNAG8QEgBubA0ARgoAgHAMEwBzqBMAcuwTAHUoEAB0TBAAd9ARAHYUEAB50BYAePQQAF0dAIB69BYAdR0AgG0dAIB/fQEAhgwAgEGAAgDeCwCAQxgAAELAAABFSAAARGAAAEeQBgBGhAEASSgGAEhsAQBLOAcASvAHAE1wBwBMRAcAT/AEAE7MBACnCQCAUCwFAFOgCgBSEAUAVQAKAFRQCgBX0AgAVhALAFlICABYuAgAhBEAAFo8CACA9QAAgZ0AANgLAIAtHQCAg2kCAIJFAgCBNQIAgDUCAIdtAwCGVQMAgTkAAIRlAgAXDACAigEEAInVAwCI7QMAjwkAAKgLAIApDACAjAkAAC8MAICJMQMAkQkAAMzYAABVHQCAfR0AgL0aAIBMCgCAgGUDAIENAwCGPQAAgx0DAMwQAgDNhAEAgikAAMx0AwCjgQYAxRoAgICxAgCBsQIAzRoAgIEpAAClwQAA1RoAgMzoAwDNYAIAUgoAgKjxAABYCgCAXgoAgGQKAIDdGgCAgWkAAMzcBACCEQEA5RoAgGoKAIDtGgCA/RoAgAUbAID1GgCAswkAgMygBADN3AQAzAgBALYJAIClHQCAhhEBAOEAKwDgfCcA44hIAuIMOAKdHQCAh5EBALUdAICtHQCAgNkBAIE1AADMxAIA6kRkApUdAIANGwCA72hkAoERBwCC8QEA8NCLAolVAACB5QEAFRsAgIfhAQCAbQAAgQ0AAIN5AAB2CgCAgXkAAICVAQDMOAEAzRQBAIzBAQB8CgCAvAkAgKMVAQDDlBcAwpwUAMWEFwDEUBcAx+wXAMaAEgCNHQCAiAoAgMvQFgDK4BYAzRQWADUMAIDPvCAAzpwZANHMJADQ2CUA0+gkALFRAQA7DACAp90HAL0dAIDWvCQA2cgnANjUIgDb+CcALRsAgIftBwCCCgCAzPgEAB0bAIAlHQCAh8kGALAJAICR3QcAuQkAgCUbAIBwCgCANRsAgIUdAICMDACAjPkGAAsMAICA1QYAgcEGAMzEAgDNBAUAglEAAIN1BwCArQYAgbkGAIY1BwCHKQcAhEEAAI4KAICn7QAAPRsAgIjpBwCJzQcAlAoAgI/BBwCM3QcAmgoAgOoLAICnXQYAsJ0AAKAKAICmCgCAo0EGAEUbAIBVGwCAfQwAgE0bAIBdGwCArXEGAGUbAIC/CQCAzPgDAM0sAwDCCQCAo+UAAMUJAICMTQAAsgoAgKfxAAC4CgCAsT0GAIedAACGlQAAqB0HAISJAAC+CgCAgqkAAIHVAACtAQcAygoAgJE9AACCmQEAyAkAgM0MBQDMCAUAgT0AAIeFAQCIvQEAdRsAgMUdAICuCwCAjJEBAEEMAIBHDACAzR0AgID1AQCBhQEAgoEBAIOdAQCEiQEAxAoAgIapAQCHXQAAiG0AAIlNAABtGwCAzBACAIxdAACCDQAA0AoAgI9JAACw6QAAfRsAgPALAICjKQEAgCUBAIFVAQCFGwCApzUBAMykAQDNEAIA1goAgI0bAICBNQAA3AoAgK4JAQDoCgCAzOgBAM0oAgCVGwCAo/EAAIQFAACdGwCA4goAgK0bAICotQAApRsAgIFdAAC1GwCAzPwBAM3AAQC9GwCAxRsAgIGFAwARDACAgeUDAO4KAICH6QMAywkAgIylAwDNGwCA+goAgKoJAIDVGwCAgZkDAIHdAwCMvQMAzSQBAMwgAQDMEAIAzTACAIH5AACHUQAAgFUAAIFZAAD0CgCAg0kAAIxBAADlGwCA3RsAgM4JAICBfQAAgHEAAMwgAwDNsAMAo30DANEJAICjEQMA7R0AgIEtAQCx/QAApzEDAK1BAwDlHQCAo20DAP0dAID1HQCA7RsAgKdtAwCANQAAgR0AALFtAwCILQAAmAwAgKeVAACBcQAAgFkAAINxAACj9QAAgVEAAK2BAAD1GwCAsQkDAIldAACEPQAAzDgBAISdAQCBGQAAgAkAAIRlAAD9GwCAzNAHAMzwBwAFHACAkYkAAMxMBgDNBAYAzHAGAM10BgDMQAcAmy0PAMyoBwDNrAcAhg0AAIdVDwCEQQ8ACQsAgIIBDACDVQ8AgDUBAIHZAQCkDACAj+kAAIztAACSDACA3R0AgIv1AACIbQ8AiQ0AAA8LAIC0CwCAgiUAAE0MAICBQQAAUwwAgBUeAIANHgCAJR4AgB0eAIAtHgCABR4AgIApAACBKQAA/AsAgA0cAICEeQAAFRwAgIFNAQCAoQEAGAsAgKP9DwDMOAIAzUgDAB0cAICBWQAAzXwCAMykDQAkCwCAWQwAgKjJDwCHOQAA1wkAgImhDwADCwCAkREAAJ4MAIDaCQCAmQsAgF8MAICAuQ8AgbkPANUdAICDjQ8A9gsAgCUcAICEBQAALRwAgB4LAIA1HACAKgsAgIGdDwCHIQAAh7UPAMyoAgDN6AIAzLQMAM3cDACmzQAAp8UAAE0cAICPgQ8AjIkPAKPlAAAwCwCAPRwAgDwLAICxyQAAhwUAAFUcAIBFHACAhz0AAF0cAIBxDACANgsAgKMFDwCB+QAAzKgDAGUcAIBICwCAjEkAAKPxAABtHACAdwwAgEILAICnlQAAfRwAgHUcAIDMrAMAzcgAAN0JAICHaQAA4AkAgIG9AACCeQAA4wkAgIe5AQBOCwCAkaUAAIEdAACdHACAVAsAgIgFAAClHACAm5EAAFoLAIDmCQCAjJEBANILAIDGCwCAwAsAgMwLAICDRQAAgrkBAIG5AQCApQEAPR4AgIZxAABgCwCAhEkAAIsVAACKPQAAiTkAAIhFAACP+QAAZgsAgLoLAICMBQAAp1EBAKZJAQBlDACAsHkAAKNZAQCMqQAAgKkAAIGpAACBlQAAgJUAAK1xAQBrDACAogsAgISNAABNHgCARR4AgKMhAABdHgCAVR4AgGUeAICBbQAAgG0AALEFAQCkOQAANR4AgIUcAIBsCwCAqAUAAJUcAICNHACArQkAAMywAQCBvQMAgL0DAIPNAwCtHACAtRwAgL0cAIDMvAEAzYQBAInpAwDMHAEAgdkCAIDFAgDNOAEAzDwBAMxoAgDNRAIAg00AAMUcAICH2QAAhy0AAIBFAACBEQAAggUAAHILAIDVHACAzRwAgN0cAIDMOAIAiBUAAIjhAACAbQAAgTkAAMyEAgDNUAEAo0UDAIQ5AQDlHACA7RwAgMzcAwDNSAIAbR4AgOkJAIB4CwCAhR4AgKoMAICBbQAA9RwAgH4LAICj0QAAfR4AgHUeAIDMiAQAgXUAAIB1AACBCwCAo7UAAMwABADNVAIA/RwAgIcLAICETQEAjQsAgAUdAIANHQCAzNAOAMwsAQDMAAUAzVwFAOwJAIDvCQCAzJgOAIHBAADMzA8AzDwOAMwIAQDNnA4AzNQPAM14DwDMPA4AzTgOAIHlAQCA5QEAg+UBAILlAQDUCQCAhOUBAIfhAQBBHQCAiaUBAIjZAQCByQcAOR0AgFEdAIBJHQCAzDQBAPUJAICA3QAAgekAAEMKAICD/QAAgM0AAIH5AACBEQcAaR0AgGEdAICJ0QAAzCgBAHkdAIBxHQCA4QsAgMw0AQDbCwCAgF0AAIFlAACjAQEAg2EAAIFxAACASQAAMR0AgBoMAICrCwCAiVUAACwMAIAyDACAWR0AgIEdAIDBGgCATwoAgIIdAACDeQcAgBkHAIEZBwCGIQAAhykAAISRBwDyCQCAimkAALHZBgCIaQAAifUHAEkKAICP3QcAjNkHAIkMAID4CQCAKR0AgPsJAICRoQcAgEEHAIFBBwCHBQAAyRoAgIKRBwDRGgCA2RoAgKOVBgCGhQcAp+0AAMyQAgDN4AUAsekAAKPBAABVCgCAWwoAgGEKAIBnCgCA/gkAgKVlBwDhGgCAzLgDAKhVBwDpGgCAbQoAgPEaAIABGwCACRsAgPkaAIABCgCAo60AAAQKAICMJQYABwoAgIxNAACpHQCAgm0AAIE9BgCCAQYAgWUAAKEdAICHZQAAuR0AgIcRBgCHrQEAsR0AgMxQAgDNxAIAgeEBAIDJAQCD4QEAkYkAAID9AQCB1QEAmR0AgIydAQCJNQAAcwoAgIB1AACBXQAAhi0AAIc1AACEfQAAERsAgIKFAQCDfQAAgJ0BAIGRAQAZGwCAj+kAAIzhAAB5CgCAfwoAgAoKAICIDQAAifkAAKc5AQCRHQCAiwoAgDgMAICjJQEAPgwAgLBZAACJHQCAggUAAMEdAICtFQEAjwwAgDEbAICGBQAAhQoAgCEbAIApGwCAp2kAAIANAQCBAQEAhzEAAKNJAACxGQEAzBACADkbAIAODACAkQoAgK1RAADM1AEAzfgBAKhBAABBGwCAzTgBAMw8AQCB7QMAlwoAgJ0KAICMDQAA7QsAgKMKAICBxQMAzGgCAKkKAICCxQMASRsAgITJAwCHKQAAhjEAAFkbAICCbQAAgAwAgFEbAICHYQAAYRsAgGkbAIAVHQCAzKgDAM2sAgCB+QAAiC0AAA0KAIAQCgCAEwoAgIw1AAC1CgCAuwoAgLHVAADBCgCAeRsAgMkdAICxCwCAzDABAEQMAIBKDACA0R0AgMwEAQDHCgCAcRsAgKelAADTCgCAo40AAMwUAgCAuQAAgbkAAKeFAAAIDACAgmUAAIEbAICMNQAA8wsAgMzsHADN/AMAiRsAgK6tAADZCgCAkRsAgMzABgDN0AYAsL0BAMyQBwDfCgCAgckBAMwYHQDNIAIAhBEAAOsKAIDNuAYAzKwGAKEbAIDlCgCAgSkAALEbAICpGwCAo+0BAMxAHQDNEAIAuRsAgMEbAICBCQAAyRsAgMxAHQDN0AIAqNkBABQMAIDMkAcAzBwBAMxgBgDNZAYA8QoAgBwKAIDRGwCAkSkBAP0KAICBzR8A2RsAgPcKAIDpGwCA4RsAgMzEBgDNwAYAgTEAAIDZAAAfCgCAIgoAgIK5AQCDRQEAgLkBAIG5AQCGXQEA8R0AgIRdAQDpHQCAzcAAAMzwAACIARwAiXkBAAEeAICPVQEAjGEBAPkdAICB3R4AgRUfAJkbAICBXR8AjIEfAIdBHwDMGAMAzWgDAIBNHwCBpR8AJQoAgIOpHwCMFR8AjNEeACgKAICHtR8AgJUfAIGZHwCBEQAAg70fAICFHwCBiR8A8RsAgIQ9AACbDACAiZkfAPkbAICIBQAABgsAgAEcAICADQAAgf0AAAkcAICj2R8Ao3keAKOFAAAMCwCArTUfAKdhHgCnqR8AoQwAgIQNAACnDACAozUfACsKAICtiR8AhHEAAKchHwCxPR4AsYUfAJUMAIDhHQCAEgsAgLcLAIDMtBwAzbAcAFAMAICxQR8AVgwAgJwLAIAZHgCAER4AgCkeAIAhHgCAgLkeAIG5HgCCIQEAgzUBAIRhAQAxHgCAhokBAIe9AQCIkQEAiekBANkdAICL/QEAjOUBAIINAAAJHgCAj90BAIO5AQCRrQEAgb0BAIC9AQCAoQEAgaEBAPkLAID/CwCAhD0AABEcAICJlQEAm4EBAIHNHgCAzR4AzPwCAM3wAgCB5QAAGRwAgIHtAACjpQAAzJABAM1cAgCHHQAAGwsAgKj5AAAhHACAJwsAgFwMAIBiDACAKRwAgIQFAAAxHACAo9UAACELAIA5HACAgVEAAMz0AQDN0AEALQsAgIc9AABRHACAMwsAgEEcAIA/CwCAhwUAAFkcAIBJHACAh/EDAIHZAwCBmQMAgZEAAGEcAIB0DACAjPkDAMwkAQCHuQMAgfkDADkLAIDMZAIAgskDAIyZAwBpHACAh9EDAI+RAwCB3QYAkfUDAMwABADN7AMAh2UAABkdAIBLCwCAcRwAgHoMAIBFCwCAzBgBAIg5AACBHACAeRwAgMxcAwCMJQAALgoAgMwsAQCx/QAAozkDADEKAIA0CgCAoRwAgKdZAwDMdAMAiAkAAKNRAwCpHACAXQsAgINtDQCnnQAApq0AAKOdAACxDQMAzCgBANULAICntQAAprUAAMkLAIDMMAEAgdUHAMMLAIDMKAEAzwsAgEEeAIBjCwCArYkAAGkLAICAzQEAgd0BAMxEAQDNnB4AhPUBAL0LAIDMWAEAzUwBAIDtAQCB/QEAg7UAAGgMAICM3QEAbgwAgMwIHgCM8QYAzDgBAM08AQBRHgCAiREAAIEFBgBJHgCAYR4AgFkeAIBpHgCAgz0AAIAhAACBOQAAgDkAAIEhAAA5HgCAiRwAgMwoAQCB2QYAbwsAgIH9BgDMJAEAmRwAgJEcAICxHACAgCEBAIE1AQCjBQAAuRwAgMEcAIDJHACAzIwFAM1AAgC3HAMAdQsAgIfNBwDZHACA0RwAgB0dAIDNiAAAzJAAAIzdBQCjhQAAFgoAgMzgAgDhHACAiNUHAIFNAACATQAAUQsAgOkcAIBXCwCAkTkHADcKAICIxQcApQsAgIrJBwDxHACAmz0AAIflBwBxHgCAgYUHAICFBwA6CgCAgvkHAILVBgCDRQAAgMkGAIHdBgCG4QYAewsAgIRRAACJHgCAipUGAIuZBgCIeQAAiZ0GAK0MAICPWQcAjG0HAPkcAIDMgAMAzSQCALARBwA9CgCAgR4AgCEdAIB5HgCAhAsAgICNAACBnQAAzOwDAM3oBAABHQCAigsAgKNJBwCQCwCACR0AgKO9BwARHQCAGwAAgOcHAIALAACApKUHAOsEAICKBQCAAwAAgKhhBwDZDQCAZQAAgMgDAIAbCQCArWkHAIAtAQCBPQEAgl0BAINRAQCEYQEAuAQAgKwEAICHYQEAiK0BAIm1AQCKvQEAjykVALwFAIAdDACAzHgCAM3YBQCB3QEAgXEAAOQLAICC/QEAhBkAACMMAICH7QEAIAwAgMw0BADNMAQA5wsAgJ9pFQAmDACAjMkBAM34BADM8AIAsUkBACEHAICB1QAAoxUBAKCZFQBzCACARgcAgIT1AADMKAQAzSwEAMMIAICveQEAqH0BADENAICqaQEAUgkAgLQlAQC1KQEAowkBAAIMAIDqBgCA7gYAgLIFAQCzPQEAvPUAAL39AAC+2QAAOAgAgLgBAQC5AQEAugEBADwHAIBDBwCAhgwAALOdAwCyiQMAswgAgIC9AwBpBwCAbAcAgBIJAIDkBgCA5wYAgDUIAICJhQMAzOQHAL+hAwAFDACA1wwAgIxlAADN5AwAzCQMAIlBAACIVQAAi0UAAIpFAACFtQMAhLUDAIeVAwCGgQMAAQ0AgAQNAIAHDQCAmCwAABMAAICmyAAAzYwGAMyoBgCFaQAAFwAAgDEAAIBpAACAzPADAAcAAIA1AACA0QwAgLGVAAAlDQCAs5UAALKVAAA1DQCAOA0AgEANAIA7DQCALg0AgHUAAICmBgCAJQAAgJgJAIAdIQCAv1UDAEMNAIAZIQCAFSEAgGEgAIC4bAAAlGUNAJIAAgCcrQEAnaUBAJqJAQCbiQEAmJkBAJmJAQDMIAYAzQQGAMxABgDNXAYAzDwHAM04BwDMvAcAhXUAAIABDwCBDQ8AaSAAgLqZAQCFBQAAcSAAgFkgAIC+hQEAgSkPAIAlDwBlIACAgiEPAIUpAAC0pQEAhREAAG0gAICziQ8AsoUPALHJAQCwAQwAt4EPALbtAQC17QEAtO0BAIFlAQCAZQEAg2EBALi1DwDMPAsAhHkBAIDhDwCB3Q8AdSAAgF0gAIDMyAQAzbgEAIWtAACFFQAAISEAgDkhAIDM6BkAzbQZAKRdAQBGDQCAok0CAKPxDwCgVQEAod0PAH8IAIBuCQCAOwkAgO0eAIBsCQCA9R4AgHcJAIDxHgCAsQgAgJMNAACtHgCA+R4AgITVDACF6Q4AlGkAAIfdDgC1HgCAmbQCAL0eAIDFHgCAsR4AgD0hAIC5HgCAn3QBAMEeAICRGA0AgI0OAIGBDgCGhQ4AlYwDAISJDgCXRAIAghEAAKm4AACA0QAAge0AAMkeAIBJDQCA5R4AgIVZDwCDiQAAoTQNAIFFDgCASQ4A6R4AgKU0AQCFYQ8AzPAUAB0fAIC5xAUAzMgDAM3cAwCA3QAAgcEAACUfAIC/kAUAhREAALHsBwCA9QAAgcEAAKEgAIC1jAYALR8AgLdABgCA3Q4AgekOAMwoAgDNtAIAgM0OAIH5DgCFKQAAg4UBAIB1AQCBsQEAgPEBAIHVAQCpIACANR8AgIUFAACxIACAgJkBAIG9AQCCfQAAk9UBAJThAQCFDQAAmSAAgCEfAICACQAAgRkAACkfAICTrQEAlC0AAKUgAICFDQAAMR8AgIUFAACtIACAOR8AgIUpAACCGQAAhTUAAIDxAACB4QAAtSAAgJ0gAIBBIQCAhQUAAGEhAICDdQEAgO0BAIEpAQDM8AEAzbABAEwNAIBdIQCAWSEAgKMNAIBdHwCAZR8AgIA9AACBDQAAbR8AgHUfAICALQAAgR0AAIIVAABhHwCAzSwBAGkfAIBxHwCAeR8AgIjFAwClIQCAzJACAM28AgCE7QMATw0AgIb5AwCdHwCAgIEDAIH9AwCAPQAAgTUAAIFJAACAQQAAzdwBAIJBAAClHwCAoR8AgKkfAIDNMAEAlJ0DAI0hAIDN8AEAzAwBAIG5AwCAxQMAg6EDAJOlAwCArQAAgdUAAICdAACBqQAAiSEAgFINAICBwQAAgMkAAIC1AACBgQAAhSEAgINpBADMcAMAzbQDAIEhAIDNPAEApg0AgJMBBADNjAIAzPQCAIANAACBNQAAlNkGANEfAIDVHwCA2R8AgMwIAQDNHAEAgREAAIApAACpIQCAghkAAICRAQCBkQEAzWgFAMyUAgDMEAkAzSgWAMxYDgDNeA4AzBQNAM3YCgDMKAwAzYwNAMzgFwDM4AoAzDgLAM30CACFEQAAVQ0AgIBRBwCBUQcA4SAAgM2QDgCFBQAA6SAAgMzYDgDN7AEA8SAAgM0ADgCFGQAAzfAPAM08DgDNVA4AzGgBAM1sAQDZIACAYQgAgJSZBwDMwDsAgGEBAIHZAACFKQAAzWQOAMx4AQDNfAEAga0HAICtBwCFZQAAgp0HAIBRAQCBUQEAlOEHAM3AAACEeQEAk8UHAIZhAQDlIACAiCEBAIUNAADtIACAzRgBAMzYAADNtAAAgN0HAIHNBwCZHwCAhQkAAM0fAID1IACA/R8AgN0gAIAFIACADSAAgBUgAIAJIACAASAAgK0hAIARIACAGSAAgMy4AgDNHAMAgGUAAIF1AACCfQAAHSAAgIUJAACFQQAAASEAgKkNAICAmQYAgSEHAIUZAACDfQAACSEAgIVZAAD9IACA+SAAgIDNAACB2QAAjR4AgIURAACE6QAAlR4AgIblAABBIACAgDUAAIENAACdHgCAhR0AAEkgAIClHgCAhQUAAFEgAICAVQAAgW0AAIJ9AACTRQAAlA0AAIUNAAA5IACAkR4AgIAJAACBEQAAmR4AgIUdAABFIACAoR4AgIUFAABNIACAgOkBAIHxAQCCBQAAqR4AgIUJAACFCQAAVSAAgD0gAICAbQEAgXkBAIIZAACDpQEADSEAgIV1AACFBQAAESEAgAUhAIAhIACAzMgCAM3cAgCsDQCAzR4AgIA5AACBOQAA1R4AgN0eAIDRHgCA2R4AgIAdAACBDQAA4R4AgCUgAICAxQAAgdUAAM3AAADMJAIAgNUAAIHFAACFOQAAg8kAACUhAICvDQCAgNUAAIEJAACFBQAALSEAgP0eAICBIACAgAkAAIERAAAFHwCAk5kAAJS5AAANHwCAhWUAAIU9AACJIACAk10AABUfAICFEQAAzXAFAMx0BQCUATwAkSAAgHkgAIDNKAEAhSAAgI0gAICFGQAAlSAAgH0gAIA1IQCAKSEAgCkgAICFJQAAhTkAAMz4AgDNxAMAzTwBALINAICBlQMAgI0DAM3EAQCCpQMAhVEAAIVJAADMKAEAzSwBAM04AQDMPAEAgGk+AIFpPgBJIQCARSEAgM04PADMVDwAgdE8AJOdPgDMSAEAzcgCAM00AQBNIQCAlLk+AFgNAICAoT4AgaE+AIKhPgCIjTwAVSEAgIWtAACALQAAgSEAAIXVPwCVHwCAgO0AAIHxAACGpQAARR8AgISpAADNJAEAzSgBAE0fAICI+T4AhfE/AFUfAIBJHwCAhcU/AM0wAQDNEAEAzfQGAIDdAQCB6QEAzbwGAM1wBgDM4AYAzVwBAMxoBgDNkAYAzWQGAM14BgDMrAcAzagHAMzoBwDNyAcAgk0/AIP9AgCANQIAgekCAFEfAIBZHwCAgAU9AIV9AQBRIQCALSAAgM0UAQApDgCAge0BAIDhAQDNPAEAgs0BAM0sAQCCdQEAgW0BAIBZAQCAZQEAgcUAAIUfAIDNJAEAzTgBAILxAACB+QAAgFkBAIApAACBcQAAzBgBAM18AQDNLAEAjR8AgIEdAACAHQAAiR8AgJEfAIBxIQCAzSQBAMzkPQDNXA8AzegAAMwMAQCA1QEAgckBAIKZAACD5T8ACR8AgBEfAIAZHwCAMSEAgCMOAIB1IQCAPR8AgDEgAIBBHwCALA4AgIBNPwCBQT8AfR8AgGkhAICBHwCAZSEAgIAlPwCBKT8Ak5E/AIN9AAAmDgCAlEEAAMzYAgDNrAIAbSEAgJNVAACACQAAgR0AALUNAIB9IQCAlEEAAK0fAICAnQAAgaEAAIAdAACBEQAAhKUAALUfAICGpQAAvR8AgIjxAACC0QAAgdkAAIDNAACAJQAAgSkAAIIFAADFHwCAsR8AgLkfAIDBHwCAk7EAAJQRAADJHwCAgB0AAIEVAACAJQAAgS0AAII9AAB5IQCAgO0AAIHRAACCFQAAg4EAAIHQPQA1IACAzCACAM3cAQCFeAIAkSEAgC8OAICZIQCAiRgDAN0fAICALQAAgTUAAIAJAACBbQAA5R8AgMEgAICRsQAAkKkAAJPdOwCSAQQAlaUAAJSVOwDtHwCAlqEAAIUJAACTQQAAySAAgPUfAICFBQAA0SAAgJT1AAC5IACAgLkAAIHdAACC5QAA4R8AgOkfAICF6QAAgAkAAIE1AACFBQAAxSAAgPEfAICFHQAAzSAAgPkfAICFBQAA1SAAgLHBBQCwxQMAvSAAgLLFAwC12QUAtM0DAJ0hAICFOQAAuf0DAKEhAICVIQCAuw0AgM0NAIAXDgCAAR8AgAUOAIDTDQCAzIgCAAsOAIDN4D4AzZABAMwkAQBwDQCAjg0AgEEOAIB9DgCAgLEAAM3UPgDN5D4Agw4AgMy8PgDNuD4AgNEDAIHtAwCC/QMAhmkAAD4OAICFnQMAzTwBADgOAIDM6AIAzTw/AIjlAADNGAEAiQ4AgIhBAAA7DgCAdw4AgM0sAQCVDgCAgNUAAJsOAICG4QAAhukAAEcOAIDNJAEAoQ4AgM0QAQCI0QAAiCkAAMz4AgBNDgCAzfgCAMwkAQCnDgCAhS0DAMygPgDNbD4AgNUDAIHNAwCCAQMAg/kDAMxkAwDNzAIARA4AgM0kAQDMDAIAzQgCAIERAADMnAMAzLA+AM20PgDMxD4AzcA+AMyAPgDNuD4ArQ4AgMyEAgDMmD8AzVA+AMwgPgDNoD4AzQw/AM0wPwDNeD8AzQQ/AIhZAAC/DgCAzfgBAMzEAQBKDgCAxQ4AgMsOAIDMFAIAzAgBAM3IAQCIBQAA0Q4AgNcOAIDMKAIAuQ4AgIgNAACG0QAAgB0BAITNAACI9QAAzDwCAIQ1AQDMRAIAhikBAIAOAICIZQEAhg4AgKdEBQBiDgCAi+0AAIjtAACBDQAAiCUAAIZlAADMcAIAzXQCAMwwAgDN2AUAXA4AgIwOAICAOQAAXw4AgMzgBQB6DgCAzCgBAM0UAQCGJQAAiFUAAAgOAICGhDAAxA0AgIDVBwCG/QcAmA4AgMwkAgCIPQAAng4AgGsOAICIPQAApA4AgMxIAgDNeAIAUA4AgKoOAICXwAUAlnAFAJUYBQCAaQAAk1gFAIE5AACIZQAAkPg8AIZZAACeqAUAhEUAAGgOAIDM1AIAmrQFAIBdAACYrAUAp+wEAIgRAADM2AIAzdwCAKO8BACwDgCAzGACAMIOAIBuDgCAyA4AgK0IBADODgCAq/QEAMwsAgCIBQAA1A4AgLfoAwC2HAQAtSgEAMwAAgCzKAQAi3kAAIh9AACwdAQAhkEAAL6kAwCEdQAAiB0AANoOAIC6TAMAzNwDALj8AwCDqAIAiA0AALwOAICIFQAAh5QCAMw4AgBlDgCAzAQCAIvcAgCPDQAAcQ4AgI8ZAADMIAIAdA4AgI3wAgCIdQAAmCADAJksAwCPDgCAlA0AgMxMAgCWcAMAzCQCAIg9AACSDgCAzCwCAIgFAACzDgCAzCQCAIgNAAC2DgCAh/UAAKjUAwCpxAMA3Q4AgNlgAgDSDwCA1Q8AgNsPAICUNQAAkzEAANloAgDYDwCA2UwCAJQFAADeDwCAlSEAAJQpAABQEACAdBYAgEMXAIDSFgCA2WACADcXAIC12AMAtPADAJQ1AADZWAIAWhcAgJQFAADZVAIAlA0AADEXAIDgdAEAisgAALwVAACIyAAA4IACAIcXAICBoAAApOwCAKTIAgCoXAAAvA0AAJkXAIDghAIAvAUAAJ0XAICk+AIA4PQCALDMAwCV0AAAXRcAgLPgAwCmyAIAp2ACAJLYAABkFwCAvsEAAGsXAICXwQAAchcAgHkXAICAFwCAzXg/AMy8PwC+gA0AixcAgLx4DAC9gA0AuvQMALtUDAC49AwAkhcAgLYXAIC3uAwAuhcAgLWMDACyoAMAs6AMAKEXAICxQAMArnACAK9kAwC4BQMArUgDAKgXAICvFwCAqEQDAKnYAwDaFwCAp9gDAKRoAgCliAMAtjUDALc9AwCSyAIAtT0DAJldAQCYTQEAm2UBAJppAQCdZQEAnGUBAJ+FAQCemQEAh5wCAL6tAACWpQAAl70AAMw0BQDNjDcAzLg4AM2sOACflQEAth0AAJ2ZAQCc9QEAs7EBAK54AgDhFwCAvhcAgJk9AADFFwCAmxkAAJoJAADMFwCA0xcAgOBIAgCeCQAArFwCAK30AgD6FwCA9hcAgP4XAIDoFwCAh2ADAO8XAICvVAIAvhEAAJcFAAACGACA4KwCAAYYAICG+AMAh+wDAOC0AgAOGACAr0gCAK6QAgDgPAIAvg0AAAoYAICXGQAA4NgCAIaEAwCWEQAAvwAMAJ1tAACcYQAAEhgAgLFMAgCzUAIAlQ0AABYYAICGnAMA4MgCALMEAgCCBQAAIhgAgLNQAgCVDQAAJhgAgBoYAIAeGACA4LQCAIaMAwCH3AMAvg0AAJVpAACWeQAAKhgAgLToAgC1UAIAlwUAADIYAIDg1AIAtPQCAL4ZAADgoAIALhgAgODUAgCZjAMAt9QCAIoFAAA2GACAOhgAgIoVAAC3NAIAjx0AAD4YAIBCGACAswUAAEYYAICzBQAAWxgAgJwJAACdCQAATRgAgFQYAICMBQAAYhgAgG0YAIB0GACAexgAgJ9JAACCGACAiRgAgGYYAICQGACAlxgAgNkYAIDPGACA6hgAgOAYAICeGACAg8kBAIH5AQCsGACAsxgAgLoYAIDBGACAyBgAgKUYAICAtAIApYgDAOEIAgCuHQAA8RgAgLwJAACN9QEA9RgAgOEAAgCSlQEA45QQAJNFAACXiQEAhRQAAId4AQCGAAQARjoAgEo6AIBOOgCAUjoAgFY6AICdeQAA74xoAJyhAQBaOgCAXjoAgKKZAABiOgCAZjoAgGo6AIBuOgCAp4kAAHI6AIB2OgCAqUkBAHo6AICsqQAAfjoAgII6AICGOgCAsyUBAIo6AICOOgCAkjoAgLchAQC2OQEAtTEBAJY6AICaOgCAufkAALkRAQC4GQEAnjoAgKI6AICmOgCAqjoAgICwAQCEiAIArjoAgIPIAQCEVAMAhFwEALI6AICEXAUAgN0DAIEtAACCMQAAvjwCALo6AIC+OgCAh4gDAIacBACzLQMAwjoAgMY6AIC+AAQAvhwFALbRAwC12QMAyjoAgLv5AwC68QMAmljTAYTgBwC/xQMAvtkDAL3dAwC83QMAvgAYAKUFAwCmDQMAzjoAgIQcGADSOgCA1joAgKPxAwCsAQMArQEDAK4FAwCvGQMArKQbAq3cGgKqLQMAqyUDAL5MGQC+SBoA2joAgL6AGwC04BoCtdQdArYwHgLvCAIA3joAgOGgAQC6OBoC4/gCALoAAAC9ZBwCvvQcAr8AEAKRBNMBkOT2AeBEAQCSCD4C4joAgOY6AIDqOgCA7joAgL6sHADyOgCA9joAgPo6AID+OgCAAjsAgAY7AIAKOwCAgbBtAICAAQCDHFIAgth3AIUgmgCEkL4AhwjPAIaM5gCJbDcBiOAsAYsYfgGK2BMBjeClAYzwWgGP/OsBjliPAbDVFwCxAWgAso1rALOdawC0SWsAtZVvAA47AIDgcAEAEjsAgBY7AIAaOwCAHjsAgIAZAACBGQAAggUAACI7AIAqOwCAoaUCAKJJBwCjQQcApEEGAKXVGwCm3RsAp8EaAKgBHACp4R8AqkkfAKsBEACs9RMAra0TAK4BFACv+RcAqDEGAKkxBgCqTQYAq0UGAKxNBgCtmQYAro0GAK+FBgCGgAMAhxgDAC47AIAyOwCANjsAgDo7AIA+OwCAQjsAgLhtBwC5dQcAun0HALt1BwC8bQcAvc0HAL75BwC/+QcAsKkGALGFBgCyeQcAs3kHALRpBwC1aQcAtl0HALdVBwC2OgCAs8EGAEY7AIAmOwCAth0GAEo7AIBOOwCAtcEGALppBgC7RQYAUjsAgFY7AIC+qQcAv6kHALypBwC9qQcAo4UGAFo7AIBeOwCAYjsAgGY7AICmWQYApYUGAGo7AICrAQYAqi0GAG47AIByOwCAr+0HAK7tBwCt7QcArO0HAKjBBgCpLQEAqiUBAKs9AQCsJQEArS0BAK4lAQCvlQEAdjsAgHo7AIB+OwCAgjsAgIY7AICCvQAAgb0AAIC9AAC4nQEAua0BALqlAQC7bQAAvHUAAL19AAC+dQAAv20AALD1AQCx/QEAssEBALPBAQC0tQEAtb0BALa1AQC3rQEAijsAgI47AICSOwCAs6EBAJY7AIC1oQEAtqEBAJo7AICGgAEAh8QBALo9AQC7NQEAvBkBAL0ZAQC+fQEAv3UBAKPtAQCeOwCAojsAgKY7AICqOwCApu0BAKXtAQCuOwCAq3kBAKpxAQCyOwCAtjsAgK85AQCuMQEArVUBAKxVAQC6OwCAvjsAgMI7AIDGOwCAyjsAgOGsAQDOOwCA42AGANI7AIDWOwCA2jsAgO9UBgDeOwCA4jsAgL60GgDmOwCA6jsAgO47AICGaBwAh4wDAPI7AID2OwCA+jsAgP47AICAOQAAgTkAAIIFAAACPACACjwAgA48AIASPACAFjwAgKgdAwCpQQMAqkEDAKtBAwCsQQMArUkDAK5xAwCvcQMAhCAdABo8AIAePACAIjwAgCY8AIAqPACALjwAgDI8AIC46QAAufUAALr9AAC78QAAvJEAAL2RAAC+iQAAv4kAALDhAACx4QAAsuEAALPhAAC04QAAte0AALbZAAC32QAA4wwHAOEgBwDhMAEA4wgHADY8AIA6PACAPjwAgEI8AIBGPACASjwAgE48AIBSPACA75gHAFY8AIBaPACA74gHALOJAgBePACAYjwAgL6AGgBmPACAtokCALWJAgBqPACAu2UBALplAQBuPACAcjwAgL9pAQC+ZQEAvXUBALx1AQC3PQYAtj0GALU9BgC0IQYAszUGALI1BgCxAQYAsAkGAL9ZBgC+UQYAvVkGALxNBgC7bQYAunkGALlxBgC4eQYAgJ0AAIGtAACCpQAAejwAgH48AICCPACAhjwAgIo8AICvcQYArmkGAK1tBgCsbQYAq4EGAKqZBgCpkQYAqJkGAAY8AIB2PACAjjwAgKPFHQCSPACApcUdAKbFHQCWPACAhgADAIdkAwCqKR4AqykeAKw5HgCtOR4ArikeAK8lHgCzOR4AmjwAgJ48AICiPACApjwAgLb9HgC1/R4AqjwAgLvZHgC60R4ArjwAgLI8AIC/aR8AvmEfAL1pHwC8wR4AqPEeAKnxHgCq8R4Aq/EeAKw1HgCtPR4ArjUeAK8tHgC2PACAujwAgL48AIDCPACAxjwAgMo8AIDOPACA0jwAgLjlHwC57R8AuuUfALv5HwC86R8AvZEfAL6RHwC/jR8AsFUeALFdHgCyVR4As/0fALTlHwC17R8AtuUfALfdHwCjeR8A1jwAgNo8AIDePACA4jwAgKa9HwClvR8A5jwAgKuZHwCqkR8AhogAAIdMAQCvKR4AriEeAK0pHgCsgR8AgEkAAIFJAACCWQAAs5keAOo8AIC1iR4AtlEBAO48AIDyPACA9jwAgLotAQC7JQEAvD0BAL0lAQC+JQEAvxUBAKhNHgCpVR4Aql0eAKtVHgCsTR4ArZ0BAK6JAQCvgQEAhKwBAPo8AID+PACAAj0AgAY9AIAKPQCADj0AgBI9AIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kAALClAQCxrQEAsqUBALO9AQC0rQEAtZ0BALaVAQC3XQEAo9UdABY9AIAaPQCAHj0AgCI9AICmHQIApcUdACY9AICraQIAqmECACo9AIAuPQCAr1kCAK5pAgCtaQIArHECADI9AIA2PQCAOj0AgD49AIBCPQCARj0AgEo9AIBOPQCAgDkAAIE5AACCBQAAUj0AgFo9AIBePQCAh0ADAIZcBACETAQAYj0AgGY9AICEBAUA4yABAGo9AIDhqAEAbj0AgO+UGgByPQCAdj0AgHo9AIB+PQCAgj0AgIY9AICKPQCAs6EDAI49AICSPQCAlj0AgJo9AIC2fQMAtX0DAJ49AIC7WQMAulEDAKI9AICmPQCAv/0AAL79AAC9/QAAvEEDAKhRAgCpWQIAqmkCAKtpAgCstQIArb0CAK61AgCvrQIAhKgHAKo9AICuPQCAsj0AgIKpAAC2PQCAgKkAAIGpAAC4aQEAuWkBALoJAQC7CQEAvBkBAL0ZAQC+CQEAvwkBALDVAgCx3QIAstUCALNpAQC0eQEAtXkBALZpAQC3YQEA4bgBAOHUHwDjOB8A4wwbALo9AIC+PQCAwj0AgMo9AIDOPQCA0j0AgNY9AIDaPQCAvjwJAN49AIDvhBsA74QbAKOhAgDiPQCAhugEAIe8BQDmPQCApn0CAKV9AgDqPQCAq1kCAKpRAgDuPQCA8j0AgK/9AQCu/QEArf0BAKxBAgCzhQYAxj0AgPY9AID6PQCA/j0AgLaJBgC1jQYAAj4AgLuRBgC6iQYABj4AgAo+AIC/9QYAvokGAL2BBgC8iQYADj4AgBI+AIAWPgCAGj4AgB4+AIAiPgCAJj4AgO+EHQAqPgCA4QAEAC4+AIDj/AQAgBEAAIEdAACCBQAAMj4AgKjxBgCp8QYAqg0GAKsFBgCsBQYArQkGAK49BgCvNQYANj4AgDo+AICGiAAAhxADAD4+AIBCPgCARj4AgEo+AIC4EQYAuRkGALohBgC7IQYAvPUHAL39BwC+9QcAv+kHALBNBgCxVQYAsl0GALNVBgC0TQYAtTEGALYxBgC3MQYAo4UHAE4+AIBSPgCAVj4AgFo+AICmiQcApY0HAF4+AICrkQcAqokHAGI+AIBmPgCAr/UHAK6JBwCtgQcArIkHAGo+AICz4QYAbj4AgHI+AIC25QYAdj4AgHo+AIC18QYAur0GALuNBgB+PgCAgj4AgL59AQC/ZQEAvJUGAL11AQCoHQYAqSUGAKotBgCrJQYArD0GAK0hBgCuXQYAr00GAIY+AICKPgCAjj4AgJI+AICWPgCAgrkDAIGxAwCAuQMAuO0BALmFAQC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCwPQYAsQ0GALIFBgCz5QEAtP0BALXlAQC25QEAt9UBAKOlBQCaPgCAnj4AgKI+AICqPgCApqEFAKW1BQCuPgCAq8kFAKr5BQCGCAwAhxwDAK8hAgCuOQIArTECAKzRBQCyPgCAs/ECALY+AIC6PgCAtlUDAL4+AIDCPgCAteECALpxAwC7eQMAxj4AgMo+AIC+MQMAvz0DALxRAwC9UQMAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQMArpEDAK+RAwDOPgCA0j4AgNY+AIDaPgCArAAAAN4+AIDiPgCA5j4AgLiZAwC5rQMAuqUDALttAwC8dQMAvX0DAL51AwC/bQMAsPEDALH5AwCywQMAs8EDALSxAwC1vQMAtrUDALepAwDqPgCA7j4AgPI+AID2PgCA+j4AgP4+AIACPwCA76gaAL5oDADhlAEABj8AgOMcBgCADQAAgXEAAIJxAAAKPwCAo/UDAA4/AIASPwCAhEwCABo/AICmUQIApeUDAB4/AICrfQIAqnUCAIbIDACHLA0ArzkCAK41AgCtVQIArFUCAOFQBgAiPwCA4xQHAITADAAmPwCAKj8AgC4/AIAyPwCANj8AgDo/AIA+PwCAQj8AgEY/AIBKPwCA73gbAL74DwBOPwCAUj8AgFY/AICzjQEAWj8AgLWZAQC2jQEAXj8AgFY9AIBiPwCAuoUBALtNAQC8VQEAvV0BAL5VAQC/SQEAo0EOABY/AIBmPwCAaj8AgG4/AICmQQ4ApVUOAHI/AICrgQ4AqkkOAHY/AIB6PwCAr4UOAK6ZDgCtkQ4ArJkOAIBtAACBCQAAgh0AAH4/AIDvGAkAgj8AgIY/AICKPwCA4zwNAI4/AIDhWAwAkj8AgIbQAACHvAMAlj8AgJo/AICokQ4AqZkOAKrJDgCrxQ4ArN0OAK3BDgCuwQ4Ar/UOAIToAACePwCAoj8AgKY/AICqPwCArj8AgLI/AIC2PwCAuMEPALnBDwC6wQ8Au8EPALzBDwC9wQ8AvsEPAL/1DwCwjQ4AsUUOALJNDgCzRQ4AtF0OALVBDgC2QQ4At0EOAKhRDgCpWQ4Aqo0OAKudDgCshQ4ArY0OAK6FDgCvvQ4Auj8AgL4/AIDCPwCAxj8AgMo/AIDOPwCA0j8AgNY/AIC4kQ4AuZkOALqtDgC7RQEAvF0BAL1FAQC+RQEAv3UBALDFDgCxzQ4AssUOALPdDgC0xQ4AtbUOALa9DgC3tQ4AswUOANo/AIDePwCA4j8AgOY/AIC2DQ4AtQ0OAOo/AIC7CQ4AugEOAO4/AIDyPwCAv3EOAL4BDgC9CQ4AvBEOAIJtAACjQQ4AgFUAAIFlAACmSQ4A+j8AgP4/AIClSQ4AqkUOAKtNDgCGSAAAh3gAAK5FDgCvNQ4ArFUOAK1NDgCoXQIAqWECAKplAgCrdQIArG0CAK2xAgCusQIAr7ECAITsBAACQACABkAAgApAAIAOQACAEkAAgBZAAIAaQACAuHEDALlxAwC6cQMAu3EDALzVAwC93QMAvtUDAL/NAwCw0QIAsdECALLRAgCz0QIAtFEDALVRAwC2UQMAt1EDAB5AAICz6QIAIkAAgL6ABAC2NQIAJkAAgCpAAIC14QIAuhECALsRAgAuQACAMkAAgL6RAwC/kQMAvAECAL0BAgA2QACAOkAAgKOlAgA+QACApa0CAEJAAIBGQACApnkCAEpAAIBOQACAq10CAKpdAgCtTQIArE0CAK/dAwCu3QMAqNUCAKndAgCqLQEAqyUBAKw9AQCtJQEAri0BAK8lAQBSQACAVkAAgFpAAIBeQACAYkAAgGpAAIBuQACAckAAgLiFAQC5iQEAup0BALuVAQC8sQEAvbEBAL55AAC/eQAAsF0BALHlAQCy4QEAs/kBALTpAQC13QEAttUBALe9AQDh8A4AdkAAgOMUDgB6QACAgb0AAIC9AAB+QACAgq0AAIYABACH7AUAgkAAgIZAAICKQACAjkAAgO9gDgCSQACAlkAAgJpAAICFXH0AnkAAgKJAAIDjZAEApkAAgOG0AQCqQACA76AOAK5AAICmPgCAhPgFALJAAIC2QACAukAAgLMlBgBmQACAvkAAgMJAAIDGQACAtiUGALU1BgDKQACAu6EGALoZBgDOQACA0kAAgL+ZBgC+rQYAva0GALy1BgCCbQAA7zAEAIBVAACBZQAAvlwDANZAAICG+AAAh2wDANpAAIDeQACA4kAAgOZAAIDqQACA40QEAO5AAIDhjAcAo6UGAPJAAID2QACA+kAAgP5AAICmpQYApbUGAAJBAICrIQYAqpkGAAZBAIAKQQCArxkGAK4tBgCtLQYArDUGAA5BAICz+QcAEkEAgBZBAIC2SQcAGkEAgB5BAIC1UQcAulEHALtRBwAiQQCAJkEAgL41BwC/OQcAvEUHAL09BwCoNQYAqT0GAKo1BgCriQYArJ0GAK2NBgCusQYAr7EGACpBAIAuQQCAMkEAgDZBAICADQAAgbEAAIKxAAA6QQCAuKEGALmtBgC6vQYAu7UGALytBgC9XQEAvlUBAL9NAQCw0QYAsdEGALLVBgCzrQYAtLUGALW5BgC2qQYAt6UGAKO9BgA+QQCAQkEAgISEAgC+kAEApg0GAKUVBgBKQQCAqxUGAKoVBgCGCAAAh3wBAK99BgCucQYArXkGAKwBBgBOQQCAs60BAFJBAIBWQQCAtqkBAFpBAIBeQQCAta0BALptAQC7dQEAYkEAgGZBAIC+XQEAvzUBALxlAQC9VQEAqGECAKlhAgCqYQIAq2ECAKxhAgCtbQIArp0CAK+VAgBqQQCAbkEAgHJBAIB2QQCAekEAgH5BAICCQQCAhkEAgLiVAgC5nQIAuqECALuhAgC8cQMAvXEDAL5xAwC/cQMAsO0CALH1AgCy9QIAs8UCALTdAgC1tQIAtrECALexAgCKQQCAjkEAgJJBAICj5QIAlkEAgKXlAgCm4QIAmkEAgJ5BAICiQQCAqiUCAKs9AgCsLQIArR0CAK4VAgCvfQIApkEAgKpBAICuQQCAhEB8AIAVAACBHQAAggUAALJBAIC+7HwAukEAgIZIfQCHCAMAvkEAgMJBAIDGQQCAykEAgKidAgCpxQIAqsECAKvBAgCsxQIArc0CAK7xAgCv8QIAzkEAgNJBAIDWQQCA2kEAgMkAAADeQQCA4kEAgOZBAIC4wQEAucEBALrBAQC73QEAvM0BAL31AQC+/QEAv50BALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEA4TgGAOpBAIDjaAYA7kEAgPJBAID2QQCA+kEAgISUfQC+rHwA/kEAgAJCAIAGQgCAvrh/AApCAIDvEAEADkIAgBJCAIAWQgCAGkIAgB5CAIDhkAEAIkIAgONEAAAqQgCAgS0AAIAtAADvgAAAgjkAAC5CAIAyQgCA9j8AgDZCAIDhsH8AtkEAgOPUfAA6QgCAJkIAgD5CAICGuAAAh9QCAEJCAIBGQgCASkIAgE5CAIBSQgCAVkIAgO8gfABaQgCAs4l9AF5CAIBiQgCAZkIAgGpCAIC2jX0AtY19AG5CAIC7RX4AukV+AHJCAIB2QgCAv0V+AL5FfgC9VX4AvFV+AKNJfQB6QgCAfkIAgIJCAICGQgCApk19AKVNfQCKQgCAq4V+AKqFfgCOQgCAkkIAgK+FfgCuhX4ArZV+AKyVfgCCbQAAszF+AIBVAACBZQAAtvF/AITcAwCWQgCAtSF+ALrNfwC70X8AhgAEAIfUAAC+dX8Av3l/ALzBfwC9wX8AqOV/AKn1fwCq/X8Aq/V/AKztfwCtNX4Arj1+AK81fgCaQgCAnkIAgKJCAICmQgCAqkIAgK5CAICyQgCAtkIAgLjZfgC54X4AuuF+ALvhfgC85X4Avel+AL6ZfgC/mX4AsE1+ALFRfgCyUX4As1F+ALT1fgC1+X4Atul+ALfpfgCjdX8AukIAgL5CAIDCQgCAxkIAgKa1fgClZX8AykIAgKuVfgCqiX4AzkIAgNJCAICvPX4ArjF+AK2FfgCshX4A1kIAgLMxfgDaQgCA3kIAgLbFAQDiQgCA5kIAgLXRAQC6yQEAu8kBAOpCAIDuQgCAvs0BAL+xAQC8yQEAvckBAKjdfQCp9X0Aqv19AKvxfQCsHQIArQECAK45AgCvOQIA8kIAgPZCAID6QgCA/kIAgIIFAAACQwCAgBEAAIERAAC4EQIAuRkCALohAgC7IQIAvNUCAL3dAgC+1QIAv80CALBJAgCxSQIAslkCALNZAgC0TQIAtTECALYxAgC3MQIAvgADAKNxfQCEiAIAvoAEAKaFAgAKQwCADkMAgKWRAgCqiQIAq4kCAIYoBACHDAMAro0CAK/xAgCsiQIArYkCABJDAICEyAMAhcwFALPlAwAWQwCAteUDALbtAwAaQwCAHkMAgCJDAIC6bQMAu2UDALx9AwC9ZQMAvmUDAL9VAwAmQwCAKkMAgL8ABACjJQIALkMAgKUlAgCmLQIAMkMAgDZDAIA6QwCAqq0CAKulAgCsvQIAraUCAK6lAgCvlQIAPkMAgEJDAIBGQwCASkMAgE5DAIDjzAMAUkMAgOGsAQBWQwCA7xwDAFpDAIBeQwCAYkMAgGZDAIBqQwCAbkMAgOFwfwBGQQCA4wR+AHJDAIB6QwCA4ZQBAH5DAIDjWAEAgNkAAIHZAACCJQAA7+R+AIJDAICGQwCA7+B+AIpDAICzAQEAjkMAgIboBwCHLAQAkkMAgLY1AQC1BQEAlkMAgLvxAAC64QAAmkMAgJ5DAIC/sQAAvtEAAL3ZAAC84QAABkMAgHZDAICiQwCApkMAgKEBBACgEQQAoxkAAKLFBACotQYAqb0GAKrpBgCr/QYArO0GAK3VBgCu3QYArz0HALBFBwCxVQcAslUHALNtBwC0dQcAtRUHALYdBwC3FQcAuC0HALk1BwC6MQcAuw0HALwZBwC9GQcAvgkHAL8JBwCjQQYAqkMAgK5DAICyQwCAtkMAgKZ1BgClRQYAukMAgKuxBwCqoQcAj8ltAL5DAICv8QcArpEHAK2ZBwCsoQcAld11AJTBdACXzXAAli1zAJFdaACQVWgAk9l0AJJNaQCd5XgAnB17AJ9tBwCeuXgAmR1/AJhVcACboXwAmvl8AIJhbACDhWkAwkMAgMZDAICGEXUAhxF1AISVaQCFjWgAij10AIvFcgDKQwCAzkMAgI7dfgCPMX0AjD1xAI2dcQCSGX0Ak716ANJDAIDvkAkAltUGAJdRBQCUXXkAlQl5AJpxBQCbvQUA1kMAgNpDAIDeQwCA4agFAJx5AQDjuAgAoYUBAOJDAICjqQ0AogEMAKUBCACkOQ0Ap6kJAKa9CQCppRUAqAEUAKsBFACq/RUArbkRAKyxEQCvARwArqEQALH9HACw5R0As+kZALIBGAC1ASQAtH0ZAIQUAAC+FAAAgI0AAIGVAACCbQAA6kMAgIZQDwCHZAAA7kMAgPJDAIC61QcAu90HALjBBwC5wQcAvjEEAL8xBAC88QcAvfEHALKtBwCztQcAsK0HALGlBwC2nQcAt/UHALSlBwC1lQcAqmkHAKtpBwCoaQcAqWkHAK5pBwCvaQcArGkHAK1pBwD2QwCA+kMAgP5DAIACRACABkQAgApEAIAORACAEkQAgKgRBQCpHQUAqjkFAKs5BQCsLQUArVEFAK5JBQCvQQUAFkQAgBpEAIAeRACAIkQAgCZEAIAqRACALkQAgDJEAIC4XQIAuWkCALrBAwC7wQMAvPkDAL35AwC+kQMAv7UDALAJBQCxCQUAsuECALPhAgC0dQIAtX0CALZ1AgC3bQIAs7EEAIQAAgC+BA0ANkQAgDpEAIC20QQAtaUEAD5EAIC7zQQAus0EAEJEAIBGRACAv7kDAL6xAwC9NQMAvDUDAEpEAICj9QQATkQAgFJEAICmlQQAWkQAgF5EAICl4QQAqokEAKuJBACHqA0AhswMAK71AwCv/QMArHEDAK1xAwDhUAYA4TQHAONAAADjWAcAgNEAAIHdAACC1QAAYkQAgGZEAIBqRACAbkQAgHJEAIB2RACAekQAgO+cAADvyAcAfkQAgIJEAICzNQIAhkQAgLW1AQCKRACAjkQAgLa1AQC+7AwAkkQAgLuRAQC6mQEAvVEBALyJAQC/UQEAvlkBAKjtDQCp/Q0AqvUNAKttDgCsdQ4ArX0OAK51DgCvbQ4AVkQAgJZEAICaRACAnkQAgKJEAICmRACAqkQAgK5EAIC49Q4Auf0OALr1DgC7QQ8AvEEPAL1JDwC+cQ8Av3EPALAVDgCxHQ4AshUOALPNDgC01Q4Atd0OALbVDgC3zQ4Ao30NALJEAIC2RACAukQAgL5EAICm/Q4Apf0OAMJEAICr2Q4AqtEOAISoAgDGRACArxkOAK4RDgCtGQ4ArMEOAIBNAACBVQAAglUAALNRDwDKRACAtXEPALZxDwDORACAhuAAAIcEAwC6XQ8Auy0PALw1DwC9OQ8Avi0PAL8lDwCoVQ4AqV0OAKqVDgCrrQ4ArLUOAK29DgCutQ4Ar60OANJEAIDWRACA2kQAgN5EAIDiRACA5kQAgOpEAIDuRACAuGkBALlpAQC6eQEAu3kBALxpAQC9aQEAvt0BAL/VAQCw1Q4AsaUOALKtDgCzoQ4AtKUOALWtDgC2nQ4At1kBAKMdDgDyRACA9kQAgOZDAID6RACApj0OAKU9DgD+RACAq2EOAKoRDgACRQCABkUAgK9pDgCuYQ4ArXUOAKx5DgAKRQCADkUAgBJFAIAWRQCAGkUAgB5FAIAiRQCAJkUAgIANAACBFQAAgh0AACpFAIAuRQCAMkUAgIR4AQC+FAAA4xQPADpFAIDh4A0AhAADAIawBACHFAMAPkUAgEJFAIBGRQCASkUAgE5FAIBSRQCA78APAFZFAIBaRQCAXkUAgGJFAIBmRQCAakUAgLNtAwBuRQCAtX0DALZ1AwByRQCAdkUAgHpFAIC6UQMAu1EDALz1AwC9/QMAvukDAL/hAwB+RQCAgkUAgIZFAICKRQCAjkUAgJJFAICWRQCAmkUAgKhxAgCpeQIAqokDAKuJAwCsmQMArZkDAK6JAwCviQMAsPkDALH5AwCyTQMAs0UDALRBAwC1SQMAtnEDALdxAwC4IQMAuSEDALohAwC7IQMAvCEDAL0hAwC+IQMAvyEDAICdAQCBEQAAghEAAIQEBQDvFAAAnkUAgKJFAIC+EAUA48gAAKpFAIDh0AEArkUAgLJFAIC2RQCAukUAgL5FAICqeQIAq3kCAIboBACHYAUArsECAK/JAgCs3QIArdUCAMJFAICjRQIAxkUAgMpFAICmXQIAzkUAgNJFAIClVQIA1kUAgNpFAIDeRQCA4kUAgOZFAIDqRQCA7kUAgO+EDgC+rAQA4dAOAPJFAIDjFAEA9kUAgPpFAID+RQCAAkYAgLPdAQAGRgCACkYAgA5GAIASRgCAtv0BALX9AQAaRgCAu90BALrdAQCE4AQAHkYAgL+hAQC+vQEAvb0BALy9AQCoBQYAqR0GAKoVBgCrLQYArDUGAK09BgCuNQYArykGAKZFAICC9QcAgeUHAIDlBwAWRgCAIkYAgIYcAACHsAMAuCUGALnFBgC6zQYAu8UGALzdBgC9xQYAvs0GAL/FBgCwWQYAsVkGALIpBgCzKQYAtDkGALUlBgC2JQYAtx0GAKOdBgAmRgCAKkYAgC5GAIAyRgCApr0GAKW9BgA2RgCAq50GAKqdBgA6RgCAPkYAgK/hBgCu/QYArf0GAKz9BgBCRgCAs/UHAEZGAIBKRgCAtu0HAE5GAIBSRgCAteUHALqNBwC7kQcAVkYAgFpGAIC+dQcAv30HALyBBwC9fQcAqCUGAKkpBgCqOQYAqzkGAKwpBgCtKQYArnkGAK91BgBeRgCAYkYAgGZGAIBqRgCAbkYAgHJGAIB2RgCAekYAgLjVBgC53QYAuuEGALv9BgC85QYAve0GAL7lBgC/mQYAsA0GALERBgCyEQYAs+0GALT1BgC1/QYAtvUGALftBgCjsQYAgi0AAIEVAACAsQAANkUAgKapBgCloQYAfkYAgKvVBgCqyQYAgkYAgL5oAQCvOQYArjEGAK05BgCsxQYAikYAgLPxAQCGaAAAh3wBALZdAQCORgCAkkYAgLVVAQC6SQEAu0kBAJZGAICaRgCAvj0BAL8hAQC8OQEAvTUBAJ5GAICiRgCAhAQDAL6AHACmRgCA4RwGAKpGAIDjAAYAvwguAK5GAICyRgCA78gHALZGAIC6RgCAvkYAgMJGAIDGRgCAykYAgKN9AgDORgCApdkCANJGAIDWRgCAptECANpGAIDeRgCAq8UCAKrFAgCtuQIArLUCAK+tAgCusQIAqW0FAKhZBQCrDQIAqrkCAK0dAgCsHQIArwUCAK4NAgC+aB0A4kYAgOZGAIDqRgCAgB0AAIEJAACCmQEA7kYAgLnhAwC4KQIAu+EDALrpAwC94QMAvPkDAL/hAwC+6QMAsU0CALBNAgCzIQIAsi0CALUlAgC0OQIAtxECALYlAgCowQIAqdECAKrRAgCr5QIArP0CAK0VAQCuHQEArw0BAPJGAID6RgCA/kYAgAJHAIAGRwCACkcAgA5HAIASRwCAuAUBALkJAQC6HQEAuxUBALwxAQC9MQEAvv0BAL/1AQCweQEAsUEBALJBAQCzXQEAtEUBALVNAQC2RQEAtz0BAIagHQCHxB0AFkcAgO/YAAAaRwCAHkcAgCJHAIDvxAYAhGwcAOH0BgAmRwCA47AGACpHAIDhlAEALkcAgONEBgCzGQIAMkcAgDZHAIA6RwCAhewsALbVAQC1NQIAPkcAgLvFAQC6/QEAQkcAgEZHAIC/yQEAvsEBAL3JAQC81QEAo9kdAPZGAIBKRwCATkcAgFJHAICmFR4ApfUdAFZHAICrBR4Aqj0eAFpHAIBeRwCArwkeAK4BHgCtCR4ArBUeAIBpAACBaQAAggUAAGJHAIBmRwCAakcAgIcQAwCGfAMAbkcAgHJHAIB2RwCAekcAgH5HAICCRwCAhkcAgIpHAICopR8Aqa0fAKqlHwCrvR8ArKUfAK2tHwCupR8ArxUfAI5HAICSRwCAlkcAgJpHAICeRwCAokcAgKZHAICqRwCAuA0fALkZHwC6IR8AuyEfALzZAAC92QAAvskAAL/BAACwcR8AsXEfALJxHwCzRR8AtEEfALVNHwC2PR8AtzUfALMtHgCuRwCAskcAgLZHAIC6RwCAti0eALUtHgC+RwCAu7UeALq1HgDCRwCAxkcAgL+JHgC+hR4AvZEeALylHgCCKQAAo2keAIAdAACBFQAApmkeAMpHAIDORwCApWkeAKrxHgCr8R4A0kcAgITgAQCuwR4Ar80eAKzhHgCt1R4AqNUBAKnlAQCq7QEAq+UBAKz9AQCt5QEAru0BAK/lAQC+oAEAhkYAgNZHAIDaRwCAhhAAAId0AQDeRwCA4kcAgLh9AQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsJ0BALFFAQCyTQEAs0UBALRdAQC1RQEAtk0BALdFAQDmRwCA6kcAgO5HAIDyRwCA9kcAgO80AgDv7B4A+kcAgOHwHQDj4AIA4zAeAOGEAQD+RwCAAkgAgAZIAIAKSACAsyUCAJQAAAAOSACAEkgAgBZIAIC2JQIAtTUCABpIAIC7wQIAuhkCAB5IAIAiSACAv8ECAL7ZAgC90QIAvNkCACZIAIAqSACALkgAgKPpAgAySACApfkCAKbpAgA2SACAOkgAgD5IAICq1QIAqw0CAKwVAgCtHQIArhUCAK8NAgCAYQAAgWEAAIIFAABCSACASkgAgIQABAC+FAQATkgAgIbABACHUAMAUkgAgFZIAIBaSACAXkgAgGJIAIBmSACAqK0CAKm9AgCqtQIAqw0BAKwVAQCtHQEArhUBAK8NAQCE7AQAakgAgG5IAIBySACAdkgAgHpIAIB+SACAgkgAgLgdAQC5LQEAuiUBALvNAQC81QEAvd0BAL7JAQC/wQEAsH0BALFVAQCyXQEAs1UBALRNAQC1PQEAtjUBALctAQDhGB4AhkgAgOM4HgCKSACAjkgAgJJIAICWSACAmkgAgJ5IAICiSACAvmAEAKZIAICBdQAAgHUAAO/gHwCCbQAAqkgAgK5IAICG6AQAh3wFALJIAIDhkAEAukgAgOOgAAC+SACAwkgAgMZIAIDvtAAAykgAgM5IAIDSSACA1kgAgLUFBgBGSACAtkgAgLYFBgDaSACA3kgAgLOlBQDiSACAvRkGALwRBgC/YQYAvhEGAOZIAIDqSACAuwkGALohBgCj/QUA7kgAgPJIAID2SACA+kgAgKZdBgClXQYA/kgAgKtRBgCqeQYAAkkAgAZJAICvOQYArkkGAK1BBgCsSQYAqFEGAKlZBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgAKSQCADkkAgBJJAIAWSQCAgA0AAIGxAQCCsQEAGkkAgLhNBwC5VQcAul0HALtVBwC8TQcAvXUHAL59BwC/cQcAsMUHALHNBwCyxQcAs90HALTFBwC1zQcAtsUHALd5BwCz6QcAHkkAgCJJAICEwAEAvtgBALbhBwC16QcAJkkAgLsJBgC6AQYAhogAAIesAQC/CQYAvgEGAL0JBgC8EQYAKkkAgKOtBwAuSQCAMkkAgKalBwA2SQCAOkkAgKWtBwCqRQYAq00GAD5JAIBCSQCArkUGAK9NBgCsVQYArU0GAKhZBgCpZQYAqm0GAKtlBgCsYQYArWEGAK5hBgCvYQYAhKwBAEZJAIBKSQCATkkAgFJJAIBWSQCAWkkAgF5JAIC4kQEAuZkBALqhAQC7oQEAvHEBAL1xAQC+cQEAv3EBALDxAQCx8QEAsvUBALPdAQC0xQEAtbEBALaxAQC3sQEAs+UFAGJJAIBmSQCAakkAgG5JAIC24QUAtekFAHJJAIC7NQIAujUCAHZJAIB6SQCAv3UCAL4BAgC9CQIAvCECAH5JAICjoQUAgkkAgIZJAICmpQUAikkAgI5JAIClrQUAqnECAKtxAgCSSQCAvigDAK5FAgCvMQIArGUCAK1NAgCA1QAAgd0AAILhAACaSQCA4yABAJ5JAIDhqAEAokkAgO80AgCmSQCAhggMAIdoAwCsAAAAqkkAgK5JAICySQCAs40DALZJAIC6SQCAhIAMAL5JAIC2vQMAtYEDAMJJAIC7TQMAuk0DAMZJAIDKSQCAv00DAL5NAwC9TQMAvE0DAKhBAgCpTQIAqkUCAKtZAgCsSQIArX0CAK51AgCvuQIAvmgNAM5JAIDSSQCA1kkAgIRsDADaSQCA3kkAgOJJAIC4TQEAuVUBALpVAQC7ZQEAvH0BAL0VAQC+EQEAvxEBALDJAgCxyQIAstkCALPZAgC0yQIAtckCALZ9AQC3dQEA4XgHAOOYAADjuAYA4VwGAOZJAIDqSQCA7kkAgPJJAID2SQCA+kkAgP5JAIACSgCA7AAAAO9cAADv6AYACkoAgIFpAACAYQAAo4UCAIJhAACliQIADkoAgBJKAICmtQIAhkAMAIfEDACrRQIAqkUCAK1FAgCsRQIAr0UCAK5FAgCojQ4AqZEOAKqVDgCrqQ4ArKUOAK2tDgCupQ4Ar9kOAAZKAIAWSgCAGkoAgB5KAIAiSgCAJkoAgCpKAIAuSgCAuHUPALl9DwC6dQ8Au90PALzFDwC9zQ8AvsUPAL/9DwCwqQ4AsbUOALK1DgCzhQ4AtJ0OALVRDwC2UQ8At1EPALMdDgAySgCANkoAgDpKAIA+SgCAti0OALUtDgBCSgCAu3EOALptDgBGSgCASkoAgL+VDwC+WQ4AvVEOALxhDgBOSgCAo1kOAFJKAIBWSgCApmkOAFpKAIBeSgCApWkOAKopDgCrNQ4AYkoAgGZKAICuHQ4Ar9EPAKwlDgCtFQ4AqL0OAKnRDgCq0Q4AqykBAKw5AQCtOQEArikBAK8pAQCADQAAgRUAAIIdAABqSgCAbkoAgHJKAIC+dAIAdkoAgLjtAQC5hQEAuoEBALuBAQC8hQEAvY0BAL6xAQC/sQEAsFkBALFZAQCy7QEAs+UBALT9AQC15QEAtuUBALfVAQB6SgCAtqkBALWhAQB+SgCAs0kOAIJKAICGOAAAh9wBAL8xAQC+KQEAvSEBALwpAQC7jQEAuo0BAJZJAICGSgCAoxkOAIpKAICOSgCAkkoAgJZKAICm+QEApfEBAJpKAICr3QEAqt0BAJ5KAICiSgCAr2EBAK55AQCtcQEArHkBAKZKAIDv3A8AqkoAgK5KAICySgCAtkoAgLpKAIC+SgCAwkoAgMZKAIDKSgCAzkoAgNJKAIDj6A4A1koAgOGMDgCAEQAAgREAAIIRAACEQAIA2koAgN5KAIDiSgCAvhADAIbABACHRAMA6koAgO5KAIDySgCA9koAgPpKAID+SgCA7yQCAAJLAIAGSwCACksAgA5LAIASSwCAFksAgBpLAICE7AQAHksAgCJLAIAmSwCA4+wCACpLAIDhOAEALksAgLNVAwAySwCANksAgDpLAIA+SwCAth0DALUdAwBCSwCAuwkDALo5AwBGSwCASksAgL/9AAC+/QAAvfkAALwRAwCogQIAqYkCAKqdAgCrsQIArNUCAK3dAgCu1QIAr80CAIDNAQCBCQAAghkAAE5LAIBSSwCAWksAgL5wBQBeSwCAuFkBALlZAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9lAQCwvQIAsY0CALKFAgCzbQEAtHkBALV5AQC2aQEAt2kBAIYgBACHCAUAYksAgGZLAIBqSwCAbksAgHJLAIDvXAAAhOwEAOFcDgB2SwCA44wOAHpLAIB+SwCAgksAgIZLAICjVQIAiksAgI5LAICSSwCAlksAgKYdAgClHQIAmksAgKsJAgCqOQIAnksAgKJLAICv/QEArv0BAK35AQCsEQIAqGkGAKlpBgCqeQYAq3kGAKxpBgCtaQYArp0GAK+VBgBWSwCApksAgKpLAICuSwCAsksAgLZLAIC6SwCAvksAgLj1BgC5+QYAuo0GALuFBgC8nQYAvYUGAL6FBgC/tQYAsO0GALH1BgCy/QYAs/UGALTtBgC10QYAttEGALfRBgCz8QYAghUAAIG1AACAtQAAwksAgLbpBgC14QYAvtQDALsxBgC6KQYAxksAgMpLAIC/FQYAvikGAL0hBgC8KQYAzksAgKO1BgCGyAAAh8gAAKatBgDSSwCA1ksAgKWlBgCqbQYAq3UGANpLAIDeSwCArm0GAK9RBgCsbQYArWUGAKg1BgCpOQYAqoEGAKuBBgCsgQYArYEGAK6BBgCvtQYA4ksAgOZLAIDqSwCA7ksAgPJLAID2SwCA+ksAgP5LAIC4nQYAua0GALqlBgC7aQEAvHkBAL15AQC+aQEAv2kBALDRBgCx0QYAstEGALPRBgC0tQYAtb0GALa1BgC3rQYAswkGAAJMAIAGTACACkwAgA5MAIC2AQYAtQkGABJMAIC7FQYAuhUGABZMAIAaTACAv3kGAL5xBgC9BQYAvAUGAB5MAICjTQYAIkwAgOZKAICmRQYAJkwAgCpMAIClTQYAqlEGAKtRBgAuTACAMkwAgK41BgCvPQYArEEGAK1BBgCB6QMAgN0DAISIAwCC4QMAhrA8AIeIAgC+VAMAOkwAgD5MAIBCTACARkwAgEpMAIBOTACAUkwAgFZMAIBaTACA4/AGAF5MAIDhMAYAhAA8AGJMAIBmTACAakwAgG5MAIByTACAhTQ9AHZMAIB6TACA77AHAH5MAICCTACAhkwAgIpMAICOTACAkkwAgL7EPACWTACAgp0BAIGdAQCAnQEAqA0CAKllAgCqfQIAq3UCAKxZAgCtWQIArpkDAK+ZAwCw6QMAsekDALL5AwCz+QMAtOkDALXpAwC2XQMAt1UDALhtAwC5dQMAunUDALtFAwC8XQMAvTUDAL4xAwC/KQMAmkwAgJ5MAICiTACAqkwAgOFgAwDv9AMA40QCAK5MAICyTACA4zwDAO/0NwDh/AEAtkwAgLpMAIC+TACAwkwAgIZkPwCHaD0AhTQhALOZAwDGTACAtb0DALa1AwDKTACAzkwAgNJMAIC6QQIAu0ECALxBAgC9QQIAvkECAL9BAgDWTACA2kwAgN5MAIDiTACA5kwAgOpMAIDuTACA7/gBAIRoPADhPAYA8kwAgOMcBgD2TACA+kwAgP5MAIACTQCAoxUDAAZNAIAKTQCADk0AgBJNAICmOQMApTEDABpNAICrzQIAqs0CAL5kPgAeTQCAr80CAK7NAgCtzQIArM0CAKgdPgCpJT4Aqi0+AKslPgCsPT4ArSU+AK4tPgCvJT4ApkwAgIL1PwCB5T8AgOU/ABZNAIAiTQCAhgAEAIecAwC4LT4AuTE+ALoxPgC7MT4AvNE+AL3RPgC+0T4Av80+ALBdPgCxIT4Asjk+ALM5PgC0KT4AtSk+ALYZPgC3FT4As6U+ACZNAIAqTQCALk0AgDJNAIC2pT4AtbU+ADZNAIC75T4Aupk+ADpNAIA+TQCAv+0+AL7tPgC97T4AvO0+AEJNAICj4T4ARk0AgEpNAICm4T4ATk0AgFJNAICl8T4Aqt0+AKuhPgBWTQCAWk0AgK6pPgCvqT4ArKk+AK2pPgCPBSUAsyU+AF5NAIBiTQCAtik+AGZNAIBqTQCAtSk+ALp9PgC7RT4Abk0AgHJNAIC+tT4Av70+ALxdPgC9vT4An304AJ5lOQCd8TgAnFE0AJtZNQCaUTUAmfEwAJgNMQCXZTEAlsEwAJVZLQCUTS0Ak+EsAJLZKQCRWSkAkPEoALSlGQC13RgAdk0AgIQIAACwkRUAsQEVALIBGACzvRkAgA0AAIGtAwCCpQMAek0AgKNhAACiHT0AoZk9AKBxPACkxQUApUEEAKYBCACn4QkANkwAgKH1AQCi6QEAo90FAKwBEACtxREArtkRAK85EACoZQgAqQEMAKrZDQCrCQ0AijEuAIuhMwB+TQCAgk0AgI65MwCPETYAjB0yAI1NMgCCJSYAg6krAL5kAwCEYAQAhqEvAIcVLgCEGSoAhZEqAJphPgCb7T4AhsgEAIfcAwCKTQCA4Vw+AJyJAwDjAD4Akmk2AJN5NwCOTQCA7xg+AJZNOwCXuT8AlME7AJVdOgCpnT0AqIk9AKu5PQCqrT0Arak9AKyhPQCvyT0ArqE9AL7oBACSTQCAlk0AgJpNAICeTQCAok0AgKZNAICqTQCAuVk9ALhRPQC7eT0AumU9AL1pPQC8YT0Avx09AL5hPQCxgT0AsLk9ALNpPQCyiT0AtXk9ALRxPQC3aT0AtnE9AKMhPACuTQCAsk0AgLZNAIC6TQCApi08AKUtPAC+TQCAq0E8AKp5PADCTQCAxk0AgK+5PACusTwArbk8AKxZPADKTQCAzk0AgLN9AwDSTQCAtdkDANZNAIDaTQCAttEDAN5NAIDiTQCAu8UDALrFAwC9uQMAvLUDAL+tAwC+sQMA5k0AgOpNAIDuTQCA71wDAIAVAACBHQAAgjEAAO+MPgCE7AQA4fw+APJNAIDjHD4A+k0AgOGUAQD+TQCA4yAAAKP1AwACTgCAh+gEAIZsBAAGTgCAplkDAKVRAwAKTgCAq00DAKpNAwAOTgCAEk4AgK8lAwCuOQMArTEDAKw9AwCGTQCA9k0AgBZOAIAaTgCAHk4AgCJOAIAmTgCAKk4AgKhxBgCpTQYAqo0GAKuFBgCsnQYArYUGAK6NBgCvhQYAsP0GALFBBwCyQQcAs0EHALRBBwC1SQcAtnEHALdxBwC4IQcAuSEHALolBwC7OQcAvCkHAL0VBwC+HQcAv/0HALMlBgAuTgCAMk4AgDZOAIA6TgCAtiUGALU1BgA+TgCAu6UHALoZBgBCTgCARk4AgL+tBwC+pQcAvbUHALy1BwBKTgCAo2EGAE5OAIBSTgCApmEGAFZOAIBaTgCApXEGAKpdBgCr4QcAXk4AgGJOAICu4QcAr+kHAKzxBwCt8QcAqLEGAKm9BgCqzQYAq90GAKzNBgCt/QYArvUGAK8VAQCA+QEAgc0BAILFAQC+ZAIAhpAAAIcAAQBqTgCAbk4AgLjRAQC52QEAuuEBALvhAQC8kQEAvZ0BAL6VAQC/iQEAsG0BALF1AQCyfQEAs3UBALRtAQC18QEAtvEBALfxAQCzRQYAZk4AgHJOAIB2TgCAek4AgLZ9BgC1RQYAfk4AgLuxAQC6qQEAgk4AgIZOAIC/NQEAvqkBAL2hAQC8qQEAik4AgKMBBgCOTgCAkk4AgKY5BgCWTgCAmk4AgKUBBgCq7QEAq/UBAJ5OAICiTgCAru0BAK9xAQCs7QEAreUBAOEoAQCmTgCA41ACAKpOAICuTgCAsk4AgLZOAIC6TgCAvk4AgMJOAIDGTgCAyk4AgIFxAACAGQAA75wCAIJ5AADOTgCA0k4AgITIAgCzxQMA2k4AgLXFAwC2xQMAvhADAIbADACHRAwAuqkDALulAwC8vQMAvaEDAL6hAwC/lQMArhEGAK8ZBgCsAQYArQEGAKqlBgCrEQYAqEU5AKlxOQDeTgCA4k4AgOZOAIDqTgCA7k4AgPJOAID2TgCA+k4AgL7tBwC/TQcAvNEHAL3lBwC63QcAu8EHALg1BgC51QcAtjkGALcNBgC0JQYAtTkGALIxBgCzPQYAsFEGALFRBgCoOQIAqTkCAKqBAgCrgQIArIECAK2JAgCusQIAr7ECAIRsDQD+TgCAvmANAAJPAIAGTwCACk8AgA5PAIASTwCAuE0BALlVAQC6XQEAu1UBALxNAQC9dQEAvn0BAL91AQCwoQIAsa0CALKlAgCzuQIAtKkCALWdAgC2lQIAt3kBAOFUBgDh1AcA4zgGAOOwBwAWTwCAGk8AgB5PAIAiTwCAhOQMACZPAIAqTwCALk8AgDJPAIA2TwCA72wAAO/kBwCjSQIAOk8AgD5PAIBCTwCASk8AgKZJAgClSQIATk8AgKspAgCqJQIAhkgMAIfcDACvGQIAri0CAK0tAgCsMQIAqFEOAKmlDgCqrQ4Aq6UOAKy9DgCtpQ4Arq0OAK+lDgCA5Q8Age0PAILlDwBGTwCAUk8AgFZPAIBaTwCAXk8AgLjVDwC53Q8AutUPALvpDwC8+Q8AvfkPAL7pDwC/6Q8AsN0OALFBDwCyRQ8As10PALRFDwC1TQ8AtkUPALftDwCzJQ4AYk8AgGZPAIBqTwCAbk8AgLYlDgC1NQ4Ack8AgLuFDwC6GQ4Adk8AgHpPAIC/iQ8AvoEPAL2JDwC8kQ8Afk8AgKNhDgCCTwCAhk8AgKZhDgCKTwCAjk8AgKVxDgCqXQ4Aq8EPAJJPAICWTwCArsUPAK/NDwCs1Q8Arc0PAKjRDgCp2Q4AqjkBAKs5AQCsKQEArSkBAK6dAQCvlQEAmk8AgJ5PAICiTwCApk8AgIANAACBtQAAgr0AAKpPAIC4lQEAuZ0BALqhAQC7oQEAvHEAAL1xAAC+cQAAv3EAALDtAQCx9QEAsvUBALPFAQC03QEAtbUBALaxAQC3sQEArk8AgLJPAICzuQEAvsACALWpAQC2TwCAuk8AgLahAQCGgAEAh8QBALs5AQC6IQEAvRkBALwpAQC/eQEAvhEBAKPxAQC+TwCA1k4AgMJPAIDGTwCApukBAKXhAQDKTwCAq3EBAKppAQDOTwCA0k8AgK8xAQCuWQEArVEBAKxhAQDWTwCA2k8AgN5PAIDiTwCA4agBAOZPAIDjQAIA6k8AgL8oFQDuTwCA73QCAPJPAID2TwCA+k8AgP5PAIACUACABlAAgON0DwCEiAMA4TQOAApQAIAOUACAElAAgBZQAICADQAAgRUAAIIRAAAaUACAHlAAgO+kDwAiUACAKlAAgKgZAwCpQQMAqkUDAKtdAwCsTQMArX0DAK51AwCvnQAAhaQVAL58AwCGCAQAhxwDAC5QAIAyUACANlAAgDpQAIC49QAAuf0AALr1AAC7jQAAvIEAAL2BAAC+gQAAv4EAALDlAACx7QAAsuUAALP5AAC07QAAtdEAALbVAAC3zQAAPlAAgEJQAIBGUACAs8ECAEpQAIC1yQIAtvECAE5QAIBSUACAVlAAgLotAQC7JQEAvD0BAL0hAQC+JQEAvxkBAKapAgCESAIAWlAAgKWRAgBeUACAo5kCAGJQAIBmUACArn0BAK9BAQCsZQEArXkBAKp1AQCrfQEAalAAgG5QAIByUACAdlAAgHpQAIB+UACA7+QAAIJQAICGUACAilAAgOMQDgCOUACA4VgOAJJQAICALQAAgREAAIIVAAC+sAUAs3UBAJpQAICHFAUAhmwEAJ5QAIC21QAAtWUBAKJQAIC7/QAAuvUAAKZQAICqUACAv6EAAL69AAC93QAAvN0AAKh9BgCptQYAqr0GAKu1BgCsrQYArRUHAK4dBwCvFQcAllAAgK5QAICyUACAtlAAgLpQAIC+UACAwlAAgMZQAIC4OQcAuTkHALrJBwC7yQcAvNkHAL3ZBwC+zQcAv8UHALBxBwCxeQcAskkHALNJBwC0OQcAtSUHALYhBwC3IQcAozUGAMpQAIDOUACA0lAAgNZQAICmlQcApSUGANpQAICrvQcAqrUHAN5QAIDiUACAr+EHAK79BwCtnQcArJ0HAOZQAIDqUACA7lAAgPJQAID2UACAgj0AAIE9AACAPQAA+lAAgP5QAIACUQCAhKADAL6kAwAGUQCAhvgAAIfgAACoxQYAqdUGAKrVBgCr5QYArP0GAK0xAQCuMQEArzEBAApRAIAOUQCAElEAgBZRAIAaUQCAHlEAgCJRAIAmUQCAuN0BALntAQC65QEAu40BALyVAQC9nQEAvpUBAL+NAQCwUQEAsVEBALJRAQCzUQEAtPUBALX9AQC29QEAt+0BALNdBgAqUQCALlEAgDJRAIA2UQCAtrEBALV1BgA6UQCAu5UBALqVAQA+UQCAQlEAgL85AQC+MQEAvYUBALyFAQClLQYARlEAgEpRAICm6QEATlEAgFJRAICjBQYAVlEAgK3dAQCs3QEAr2EBAK5pAQBaUQCAJlAAgKvNAQCqzQEAXlEAgGJRAICExAMAvwD0AGZRAICCPQAAgT0AAIA9AABqUQCAblEAgHJRAIC+YAMAelEAgH5RAICCUQCAhlEAgIbgHACHAAMA7wwHAIpRAICOUQCAklEAgJZRAICaUQCAnlEAgKJRAICmUQCAqlEAgOHABgCuUQCA4ywHALJRAIC2UQCAulEAgL5RAIDCUQCAxlEAgMpRAIDOUQCA0lEAgKiBAwCpgQMAqoEDAKuBAwCsgQMArYEDAK6BAwCvgQMAsEUDALFNAwCyRQMAs10DALRNAwC1fQMAtnUDALcZAwC4KQMAuTUDALo9AwC7MQMAvAEDAL31AAC+/QAAv+0AALMpAgDWUQCA2lEAgN5RAIDiUQCAtiECALUpAgCEUB0Au6kCALqhAgDqUQCA7lEAgL+ZAgC+qQIAvakCALyxAgCBTQAAgE0AAO+cAwCCXQAAhvAcAId4HQC+EB0A8lEAgPZRAID6UQCA/lEAgAJSAIDhkAEABlIAgONgAwAKUgCADlIAgBJSAIAWUgCAGlIAgB5SAIAiUgCAJlIAgO+UAQCE7BwA4XAGACpSAIDjUAEALlIAgDJSAIA2UgCAOlIAgKPpAgA+UgCAQlIAgEZSAIBKUgCApuECAKXpAgBOUgCAq2kCAKphAgBSUgCAvqgcAK9ZAgCuaQIArWkCAKxxAgCoMR4AqTEeAKoxHgCrMR4ArF0eAK1FHgCuTR4Ar0UeAOZRAICCzR8AgfUfAID9HwBWUgCAWlIAgIYcAACH+AMAuMUeALnNHgC6xR4Au90eALzFHgC9zR4AvsUeAL9ZHwCwPR4AsQUeALINHgCzBR4AtB0eALUBHgC2BR4At/0eALO5HgBeUgCAYlIAgGZSAIBqUgCAtsUeALXVHgBuUgCAu8EeALr5HgByUgCAdlIAgL/FHgC+2R4AvdEeALzZHgB6UgCAo/0eAH5SAICCUgCApoEeAIZSAICKUgCApZEeAKq9HgCrhR4AjlIAgJJSAICunR4Ar4EeAKydHgCtlR4AqCkeAKkpHgCqVR4Aq20eAKx1HgCtfR4ArnUeAK9pHgCWUgCAmlIAgJ5SAICiUgCAplIAgKpSAICuUgCAslIAgLjpHgC59R4Auv0eALv1HgC87R4AvZEeAL6RHgC/kR4AsB0eALHlHgCy7R4As+UeALT9HgC15R4Atu0eALflHgCz3R4AtlIAgLpSAIC+UgCAwlIAgLb9HgC1/R4AhFgBALshHgC62R4AvigAAMpSAIC/IR4AvjkeAL0xHgC8OR4AgU0AAIBNAACjlR4Agl0AAKW1HgDGUgCAzlIAgKa1HgB2UQCA0lIAgKtpHgCqkR4ArXkeAKxxHgCvaR4ArnEeAIYABACHRAMAs4ECANZSAIC1gQIA2lIAgN5SAIC2gQIAiAAAAOJSAIC74QIAuu0CAL3lAgC8+QIAv9ECAL7lAgDmUgCA6lIAgIREAwC+jAMA4UgCAO5SAIDjAAIA7/wfAPJSAIDhPB4A79wCAONgHwD2UgCA+lIAgP5SAIACUwCAqQUCAKixAgCrBQIAqgUCAK0NAgCsBQIArzUCAK41AgCEbAUABlMAgApTAIAOUwCAElMAgBZTAIAaUwCAHlMAgLnpAwC44QMAu/kDALrhAwC96QMAvOEDAL9dAwC+4QMAsSkCALAlAgCzPQIAsiECALUZAgC0LQIAt9kDALYRAgAiUwCAJlMAgCpTAICjhQMALlMAgKWFAwCmhQMAMlMAgDpTAIA+UwCAqukDAKvlAwCs/QMAreEDAK7hAwCv1QMAgEkAAIFVAACCVQAAo6kCAL6YBAClQQEApkEBAEJTAICG4AUAh+AFAKotAQCrOQEArBEBAK0FAQCuDQEArwUBAEZTAIBKUwCATlMAgO/cAABSUwCAVlMAgFpTAIDviB4AhCwHAOHsHgBeUwCA4xweAGJTAIDhlAEAZlMAgOMwAACzJQIAhWDmAGpTAIBuUwCAclMAgLbNAQC1zQEAdlMAgLu1AQC6oQEAelMAgH5TAIC/iQEAvoEBAL2JAQC8nQEANlMAgIJTAICGUwCAilMAgI5TAICSUwCAllMAgJpTAICoAQcAqQEHAKp1BwCrrQcArLUHAK29BwCuqQcAr6kHALDZBwCx7QcAsvkHALP1BwC0mQcAtZkHALaJBwC3gQcAuIkHALmJBwC6bQAAu2UAALx9AAC9ZQAAvm0AAL9lAACBCQAAgJkAAJ5TAICCHQAAolMAgKZTAICqUwCArlMAgKgNBQCpfQUAqk0FAKuhBgCspQYAra0GAK6dBgCv/QYAsIUGALGRBgCyqQYAs70GALSlBgC1rQYAtqUGALd5BgC4SQYAuUkGALpZBgC7WQYAvEkGAL1JBgC++QcAv/kHALNdBgCyUwCAhigCAIcsAQC2UwCAtp0GALWdBgC6UwCAu4kGALq9BgC+UwCAwlMAgL/9BgC+/QYAvYEGALyNBgDGUwCAoxkGAMpTAIDOUwCAptkGANJTAIDWUwCApdkGAKr5BgCrzQYA2lMAgN5TAICuuQYAr7kGAKzJBgCtxQYAqBkBAKkZAQCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiUwCA5lMAgOpTAIDuUwCA8lMAgPZTAID6UwCA/lMAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7dAwC/1QMAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAC+LAIAAlQAgAZUAIAKVACADlQAgBJUAIAaVACAHlQAgIAtAACBNQAAgj0AACJUAICGkAwAh+gCACZUAIAqVACAs0UDAC5UAIAyVACANlQAgDpUAIC2fQMAtUUDAD5UAIC7LQMAui0DAEJUAIBGVACAvx0DAL4dAwC9IQMAvCkDAKvNAwCqzQMASlQAgE5UAICv/QMArv0DAK3BAwCsyQMAo6UDAFJUAIBWVACAWlQAgF5UAICmnQMApaUDAGJUAIBmVACAalQAgG5UAIByVACAdlQAgII9AACBPQAAgD0AAHpUAIB+VACAglQAgIRgAwCG0AwAhzADAIpUAICOVACAvkQCAJJUAICWVACAmlQAgOEAAACeVACA46gGAKJUAICE7AwAplQAgO/QAwCqVACArlQAgLJUAIC2VACAulQAgLNtAQC+VACAwlQAgMZUAIDKVACAthEBALVlAQDOVACAuz0BALo1AQDSVACA1lQAgL/9AQC+/QEAvRUBALwVAQDaVACA4fwGAN5UAIDjPAcA4lQAgOZUAIDqVACA7lQAgPJUAIC+bAwA+lQAgP5UAIACVQCABlUAgApVAIDvFAYAgV0AAIBdAACj5QEAgm0AAKXtAQAOVQCAElUAgKaZAQCHqAwAhuQMAKu1AQCqvQEArZ0BAKydAQCvdQEArnUBAKgZDgCpGQ4AqiUOAKs1DgCsLQ4ArVEOAK5RDgCvUQ4AhlQAgPZUAIAWVQCAGlUAgB5VAIAiVQCAJlUAgCpVAIC47Q4AufUOALr1DgC7jQ4AvJUOAL2dDgC+lQ4Av40OALAxDgCxOQ4AsgEOALMBDgC0+Q4AtfkOALbdDgC31Q4AqHkOAKl5DgCqjQ8Aq4UPAKydDwCtgQ8AroUPAK+5DwAuVQCAMlUAgDZVAIA6VQCAPlUAgEJVAIBGVQCASlUAgLiRDwC5mQ8AuqEPALuhDwC8UQ8AvV0PAL5JDwC/SQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1sQ8AtrEPALexDwCzBQ4ATlUAgFJVAIBWVQCAWlUAgLYBDgC1FQ4AXlUAgLsRDgC6CQ4AYlUAgISgAQC/dQ4AvgkOAL0BDgC8CQ4AgmkAAKNBDgCAWQAAgVEAAKZFDgC+WAEAZlUAgKVRDgCqTQ4Aq1UOAIbIAACHrAEArk0OAK8xDgCsTQ4ArUUOAGpVAIBuVQCAclUAgHZVAIB6VQCAflUAgBZUAICCVQCAqAkOAKkJDgCqGQ4AqxkOAKwJDgCtYQ4ArmEOAK+VAQCw7QEAsfUBALL9AQCz9QEAtO0BALV1AQC2fQEAt3UBALhNAQC5VQEAul0BALtVAQC8TQEAvfEAAL7xAAC/8QAAhlUAgIpVAICOVQCAklUAgJZVAIDj6A4AmlUAgOE0DgC+AAQA79wPAJ5VAICiVQCAplUAgKpVAICuVQCAslUAgLPxDQC2VQCAulUAgL5VAIDCVQCAtoENALXhDQDGVQCAu1ECALpJAgDKVQCAzlUAgL/RAgC+SQIAvUECALxJAgCjMQ0A0lUAgISIAwDaVQCA3lUAgKZBDQClIQ0A4lUAgKuRAgCqiQIA5lUAgOpVAICvEQIArokCAK2BAgCsiQIAgKkAAIGpAACCTQAA7lUAgOFkEgDjTAIA4wgLAOGsAQDyVQCA7zwCAO8YFgD2VQCAhlAGAIdIAwD6VQCA/lUAgKiBAgCpgQIAqoECAKuBAgCsgQIArYECAK6FAgCvHQEAAlYAgAZWAIAKVgCADlYAgBJWAIAWVgCAGlYAgIS4BQC4dQEAuX0BALp1AQC7CQEAvBkBAL0ZAQC+CQEAvwEBALBlAQCxbQEAsmUBALN9AQC0aQEAtV0BALZVAQC3TQEAHlYAgCJWAIAmVgCAKlYAgC5WAIAyVgCA7zQAAO/ADgDhXA4A4UwPAOOUAADjnA4ANlYAgIJlAACBfQAAgH0AADpWAIA+VgCAvsQHALNFAgBCVgCAtUUCALZNAgBKVgCAhkAGAIeQBAC67QEAu+UBALz9AQC95QEAvuEBAL/VAQCflQgAngUIAJ3dDQCcPQwAmzEMAJr1DQCZ7RAAmD0QAJfVEQCWsRUAlQUUAJTlFQCTtRkAkjEYAJE5GACQDRwAj2EcANZVAICz1QYATlYAgLX9BgBGVgCAUlYAgLaRBgBWVgCAWlYAgLuVBgC6lQYAvVUHALxVBwC/VQcAvlUHAF5WAIBiVgCAqo0GAKuFBgCsnQYArYUGAK6BBgCvtQYAhKgAAGZWAIBqVgCAoyUFAG5WAIClJQUApi0FAHJWAIB2VgCAelYAgH5WAICCVgCAhlYAgIpWAICOVgCAklYAgJZWAICaVgCAnlYAgKJWAICjqQUAotEEAKHZBACgZQUAgiEdAIM1HQCmVgCAqlYAgIaVGACH3RQAhBkZAIUZGQCKDRUAi7EUAK5WAICyVgCAjsURAI/VDACMzRAAjR0RAJJhDQCTdQ0AvkwAALpWAICWxQkAl80EAJSNDACVXQkAmkEFAJtBBQCGyP8Ah0wAAIFZAACAeQAAnCEEAIJRAAChxQEAvlYAgKMB/ACi2QEApRX9AKS1/QCnufkApgH4AKkJ+AColfkAqwX1AKqt9QCtsfEArAHwAK8d8ACurfEAseHtALAB7ACzAegAsv3sALVd6QC09ekAwlYAgMZWAIDKVgCAzlYAgNJWAIDWVgCA2lYAgN5WAIDiVgCA5lYAgKiNBACplQQAqpUEAKulBACsvQQArdkEAK75BACv8QQAhGz8AOpWAIDuVgCA8lYAgPZWAID6VgCA/lYAgAJXAIC4eQUAucUFALrNBQC7xQUAvN0FAL3FBQC+zQUAv+0FALCZBACxmQQAskkFALNJBQC0WQUAtVkFALZJBQC3SQUAox0EAL7M/AAGVwCAClcAgA5XAICmWQQApTUEABJXAICrXQQAql0EABZXAIAaVwCAr50FAK6dBQCtnQUArJ0FAB5XAICznQIAIlcAgCpXAIC2UQIALlcAgDJXAIC1uQIAukkCALtVAgCGSP0Ah8D8AL41AgC/PQIAvEUCAL09AgCo3QQAqUkDAKpRAwCrbQMArHUDAK2VAwCunQMAr7kDAICNAQCB5QEAguEBADZXAIA6VwCAPlcAgEJXAIBGVwCAuJUDALmdAwC6lQMAu60DALy1AwC9vQMAvrUDAL9VAgCwyQMAsdUDALLVAwCzrQMAtLUDALW9AwC2tQMAt60DAEpXAIBOVwCAo9EDAFJXAICl9QMAVlcAgFpXAICmHQMAXlcAgGJXAICrGQMAqgUDAK1xAwCsCQMAr3EDAK55AwDhKAcAZlcAgOPkBgBqVwCA4SgGAG5XAIDjaAEAclcAgHZXAIB6VwCA71gAAH5XAICCVwCAhlcAgO/IBgCKVwCAqE39AKmB/QCq0f0Aq9H9AKzx/QCt8f0ArvH9AK/x/QAmVwCAghEAAIEZAACA0f8AjlcAgJJXAICEdAMAvnQDALh1/gC5ff4AunX+ALvF/gC83f4AvcX+AL7F/gC/9f4AsJH9ALGR/QCykf0As5H9ALRV/gC1Xf4AtlX+ALdN/gCzWf0AllcAgIasAACHRAMAmlcAgLZx/QC1ef0AnlcAgLtV/QC6Vf0AolcAgKZXAIC/mf4AvpH+AL1F/QC8Rf0AqlcAgKMd/QCuVwCAslcAgKY1/QC2VwCAulcAgKU9/QCqEf0AqxH9AL5XAIDCVwCArtX+AK/d/gCsAf0ArQH9AKjN/wCp0f8AqtH/AKsh/gCsIf4ArSH+AK4h/gCvIf4AxlcAgMpXAIDOVwCA0lcAgNZXAIDaVwCA3lcAgOJXAIC4jf4AuZH+ALqV/gC7rf4AvLX+AL25/gC+qf4Av6n+ALDh/gCx4f4AsuX+ALP5/gC06f4AtdX+ALbd/gC3uf4As1n/AOZXAIC2VgCA6lcAgO5XAIC2of4Atan+APJXAIC7Jf4AuiX+APZXAID6VwCAvxH+AL4t/gC9Lf4AvDH+AIIZAACjHf8AgGUAAIEZAACm5f4A/lcAgAJYAICl7f4AqmH+AKth/gCEZAEAviAAAK5p/gCvVf4ArHX+AK1p/gAKWACA4zT+AA5YAIDhfP0AhrAEAIcIAwASWACAFlgAgBpYAIAeWACAhCQDAIQkBAAiWACA70j+ACZYAIAqWACAs+kCAC5YAIC+RAQAvkAFADJYAIC2nQIAtZkCADZYAIC7iQIAur0CADpYAIA+WACAv1kDAL5RAwC9WQMAvJECAKkdAgCoFQIAqyUCAKolAgCtWQIArFUCAK9NAgCuUQIAvmQGAEJYAIBGWACASlgAgE5YAIBSWACAVlgAgFpYAIC5+QMAuPEDALtNAwC68QMAvUEDALxZAwC/cQMAvkEDALEJAgCwPQIAs8kDALIBAgC12QMAtNEDALfJAwC20QMA4ZABAF5YAIDj8AAAYlgAgGZYAICCPQAAgT0AAIA9AABqWACAblgAgHJYAIB6WACAflgAgIJYAIDvLAAAhlgAgKPpAwCKWACAhugEAIdgBQCOWACApp0DAKWZAwCSWACAq4kDAKq9AwCWWACAmlgAgK9ZAgCuUQIArVkCAKyRAwCeWACAolgAgKZYAICqWACArlgAgLJYAIC2WACA71gBAISgBADhVP8AulgAgOOEAQC+WACAwlgAgMZYAIDKWACAs9kBAM5YAICFzBkA0lgAgNZYAIC28QEAtfkBANpYAIC7pQEAutkBAN5YAIDiWACAv50BAL6dAQC9pQEAvK0BAKgBBgCpDQYAqhEGAKsRBgCsMQYArTEGAK4pBgCvJQYAdlgAgILJBwCBwQcAgPEHAOZYAIDqWACAhhwAAIf8AwC47QYAufUGALr9BgC79QYAvO0GAL1RBwC+VQcAv00HALBdBgCxIQYAsjkGALMxBgC0GQYAtRkGALbdBgC31QYAo5kGAO5YAIDyWACA9lgAgPpYAICmsQYApbkGAP5YAICr5QYAqpkGAAJZAIAGWQCAr90GAK7dBgCt5QYArO0GAApZAICz8QcADlkAgBJZAIC2gQcAFlkAgBpZAIC1mQcAuo0HALtlBwAeWQCAIlkAgL59BwC/ZQcAvH0HAL11BwCoLQYAqTUGAKo9BgCrMQYArFUGAK1FBgCuRQYAr3UGACZZAIAqWQCALlkAgDJZAIA2WQCAOlkAgD5ZAIBCWQCAuOkGALn1BgC6/QYAu/UGALztBgC9kQYAvpUGAL+NBgCwDQYAseUGALLtBgCz5QYAtP0GALXlBgC27QYAt+UGAKO1BgBGWQCASlkAgE5ZAIBSWQCApsUGAKXdBgAGWACAqyEGAKrJBgBWWQCAWlkAgK8hBgCuOQYArTEGAKw5BgCASQAAgUkAAIJZAACzRQEAXlkAgLVFAQC2RQEAYlkAgIZAAACHZAAAuikBALslAQC8PQEAvSEBAL4hAQC/FQEAZlkAgGpZAICEBAMAvgAMAOMoBgDv4AIA4RAGAG5ZAIDvkAYA4zwCAHJZAIDh1AEAdlkAgHpZAIB+WQCAglkAgIZZAICKWQCAo8ECAI5ZAIClwQIAklkAgJZZAICmwQIAmlkAgJ5ZAICroQIAqq0CAK2lAgCsuQIAr5ECAK6lAgCpBQIAqLECAKsFAgCqBQIArQ0CAKwFAgCvNQIArjUCAISoDACiWQCAplkAgKpZAICuWQCAslkAgLZZAIC6WQCAuekDALjhAwC7+QMAuuEDAL3pAwC84QMAv10DAL7hAwCxKQIAsCUCALM9AgCyIQIAtRkCALQtAgC32QMAthECAKitAgCp1QIAqtUCAKsNAQCsFQEArQkBAK4xAQCvLQEAvlkAgMJZAIDKWQCAzlkAgNJZAIDWWQCA2lkAgN5ZAIC4IQEAuSEBALrtAQC75QEAvP0BAL3lAQC+7QEAv+UBALBVAQCxXQEAslUBALMtAQC0NQEAtTkBALYtAQC3JQEAgD0BAIGlAACCrQAA79QHAOJZAIDmWQCA6lkAgO8oBwC+LAwA4fQGAO5ZAIDjkAcA8lkAgOGUAQD2WQCA4wwGALMdAgD6WQCAh0QNAIZMDQD+WQCAtskBALXdAQACWgCAu9kBALrRAQAGWgCACloAgL+9AQC+sQEAvbkBALzBAQDGWQCADloAgBJaAIAWWgCAGloAgB5aAIAiWgCAJloAgKgJDwCpCQ8AqhkPAKsZDwCsCQ8ArQkPAK6pDwCvqQ8AsNkPALHtDwCy+Q8As/UPALSVDwC1hQ8AtoUPALe1DwC4jQ8AuWEAALphAAC7YQAAvGEAAL1hAAC+YQAAv2EAAKNdDQCCLQAAgRUAAIAdAAAqWgCApokOAKWdDgAuWgCAq5kOAKqRDgAyWgCANloAgK/9DgCu8Q4ArfkOAKyBDgA6WgCAs/UPAIboAwCHvAMAtu0PAD5aAIBCWgCAteUPALp5DwC7TQ8ARloAgEpaAIC+NQ8AvyUPALxJDwC9RQ8AozEOAE5aAIBSWgCAVloAgFpaAICmKQ4ApSEOAF5aAICriQ4Aqr0OAGJaAIBmWgCAr+EOAK7xDgCtgQ4ArI0OAGpaAIBuWgCAcloAgHZaAIB6WgCAfloAgIJaAICGWgCAiloAgI5aAICSWgCAlloAgIANAACB1QAAgt0AAJpaAICoQQEAqVEBAKpRAQCrZQEArH0BAK2RAACukQAAr5EAAJ5aAICiWgCAhGQBAL5kAQCGkAEAh4QAAKpaAICuWgCAuJEAALmRAAC6kQAAu5EAALyxAAC9sQAAvrEAAL+xAACw8QAAsfkAALLBAACzwQAAtLEAALWxAAC2sQAAt7EAALPZAgCyWgCAvnADAL5EBAC2WgCAthEDALX1AgC6WgCAuz0DALo1AwC+WgCAwloAgL91AwC+dQMAvRUDALwVAwDGWgCAo50CAMpaAIDOWgCAplUDANJaAIDWWgCApbECAKpxAwCreQMA2loAgN5aAICuMQMArzEDAKxRAwCtUQMAqDkDAKk5AwCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiWgCA5loAgOpaAIDuWgCA8loAgPZaAID6WgCA/loAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7ZAQC/2QEAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAACWwCABlsAgApbAIAOWwCA70QAABJbAICGmAUAh+QCAOOYAACEqAIA4fgBABpbAICAOQAAgTkAAIItAAAeWwCAs0UBACJbAIAmWwCAKlsAgC5bAIC2fQEAtUUBADJbAIC7LQEAui0BADZbAIA6WwCAvx0BAL4dAQC9IQEAvCkBAD5bAIDhUA4AQlsAgOM8DwBGWwCASlsAgE5bAIBSWwCAVlsAgFpbAIDjAAAAXlsAgGJbAIBmWwCAhPQFAO/kDgCuqQEAr6kBAKydAQCtlQEAqpkBAKuZAQBqWwCAblsAgKbJAQByWwCAdlsAgKXxAQCC/QcAo/EBAID9BwCB9QcAFlsAgHpbAIB+WwCAglsAgIZbAICKWwCAhrgDAIeQAwCoDQcAqRkHAKptBwCrZQcArH0HAK1lBwCuZQcAr1UHALAtBwCxxQcAssEHALPdBwC0xQcAtc0HALbFBwC3/QcAuMUHALnJBwC62QcAu9kHALypBwC9qQcAvp0HAL+VBwCzxQcAjlsAgJJbAICWWwCAmlsAgLbFBwC11QcAnlsAgLshBwC6yQcAolsAgKZbAIC/KQcAviEHAL0pBwC8NQcAqlsAgKOBBwCuWwCAslsAgKaBBwC2WwCAulsAgKWRBwCqjQcAq2UHAL5bAIDCWwCArmUHAK9tBwCscQcArW0HAKgVAQCpgQEAqoEBAKuBAQCsgQEArYkBAK6xAQCvsQEAxlsAgMpbAIDOWwCA0lsAgNZbAIDaWwCA3lsAgOJbAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90AALChAQCxrQEAsqUBALO5AQC0qQEAtZ0BALaVAQC3XQAA5lsAgIIdAACBHQAAgB0AAOpbAIDuWwCA8lsAgL5YAQCErAIA9lsAgIcIAQCGjAEA+lsAgKZaAID+WwCAAlwAgLNJAQAGXACAClwAgA5cAIASXACAtkkBALVJAQAWXACAuykBALolAQAaXACAHlwAgL8ZAQC+LQEAvS0BALwxAQC+2AMAIlwAgO/4BgAmXACAKlwAgC5cAIDv4AIAMlwAgOGUAQA2XACA43QCADpcAIDhmAUAPlwAgOMMBwBCXACARlwAgEpcAICjwQIAhIwDAKXBAgBOXACAUlwAgKbBAgBWXACAWlwAgKuhAgCqrQIAraUCAKy5AgCvkQIArqUCAKgxAwCpPQMAqjUDAKtJAwCsWQMArVkDAK5JAwCvQQMAgMUAAIEJAACCGQAAXlwAgGJcAIBqXACAh2wDAIYcHAC47QAAufEAALr1AAC7jQAAvJUAAL2BAAC+gQAAv70AALAJAwCxCQMAsu0AALPhAAC04QAAteEAALblAAC32QAAblwAgHJcAIB2XACAs7ECAHpcAIC13QIAttUCAH5cAICCXACAhlwAgLrBAgC7wQIAvDUBAL05AQC+KQEAvykBAKaNAgCKXACAjlwAgKWFAgCSXACAo+kCAJZcAICaXACArnEBAK9xAQCsbQEArWEBAKqZAgCrmQIAnlwAgKJcAICmXACA4YQGAKpcAIDjJAYArlwAgOGUAQCyXACA4ywAAL7oHQC2XACAulwAgO/IAACE/B0AvvAcAL5cAIDvSAcAwlwAgMZcAIDKXACAzlwAgIEdAACAHQAA0lwAgIIFAACGQBwAh8QcANpcAIDeXACA4lwAgOZcAIDqXACA7lwAgKi1HgCpBR8Aqg0fAKsFHwCsAR8ArQkfAK45HwCvOR8A1lwAgPJcAID2XACA+lwAgP5cAIACXQCABl0AgApdAIC4yR8AudUfALrRHwC76R8AvPkfAL3tHwC+mR8Av5kfALAlHwCxLR8AsjkfALM1HwC0LR8AtQ0fALYFHwC3/R8As4UfAA5dAIASXQCAFl0AgBpdAIC2iR8AtYkfAB5dAIC76R8AuuEfACJdAIAmXQCAv8kfAL7pHwC94R8AvO0fACpdAICjwR8ALl0AgDJdAICmzR8ANl0AgDpdAIClzR8AqqUfAKutHwA+XQCAQl0AgK6tHwCvjR8ArKkfAK2lHwCo6R4AqekeAKr5HgCr+R4ArOkeAK3pHgCuPQEArzUBAID5AQCBzQEAgsUBAIRgAgBGXQCASl0AgIdoAQCGnAAAuNEBALnZAQC64QEAu+EBALyRAQC9nQEAvpUBAL+JAQCwTQEAsVUBALJdAQCzVQEAtE0BALXxAQC28QEAt/EBALNxHgBOXQCAUl0AgFZdAIBaXQCAtmkeALVhHgBeXQCAu5EBALqJAQBiXQCAZl0AgL81AQC+iQEAvYEBALyJAQBqXQCAZlwAgKM5HgBuXQCApSkeAHJdAIB2XQCApiEeAHpdAIB+XQCAq9kBAKrBAQCtyQEArMEBAK99AQCuwQEAgl0AgIZdAICKXQCAjl0AgJJdAICWXQCAml0AgJ5dAICiXQCApl0AgKpdAICuXQCAsl0AgLpdAIC+XQCAvnADAOHkHgCESAIA4+gfAIQABACAeQAAgXkAAIJpAADCXQCAhsAEAIdEAwDGXQCAyl0AgM5dAIDSXQCA7yAfANZdAIDaXQCA3l0AgOJdAIDvSAIA5l0AgOpdAIDuXQCA8l0AgL7oBAD2XQCA+l0AgP5dAIACXgCA4ZABAAZeAIDj6AIAs0kDAApeAIAOXgCAEl4AgBZeAIC2SQMAtUkDABpeAIC7LQMAuiUDAB5eAIAiXgCAvxUDAL4VAwC9IQMAvCkDAKg1AgCpgQIAqoECAKuBAgCsgQIArYkCAK6xAgCvsQIAgP0BAIHNAQCCxQEAKl4AgIaQBACHBAUALl4AgIRwBAC4SQEAuUkBALpZAQC7WQEAvEkBAL1JAQC+eQEAv3kBALChAgCxqQIAsr0CALO1AgC0kQIAtZECALZ5AQC3eQEAMl4AgDZeAIA6XgCAPl4AgEJeAIBGXgCASl4AgO/QHgC+6AQA4VweAE5eAIDjkAAAUl4AgFZeAIBaXgCAXl4AgKNJAgBiXgCAZl4AgGpeAIBuXgCApkkCAKVJAgByXgCAqy0CAKolAgB2XgCAel4AgK8VAgCuFQIArSECAKwpAgCoNQYAqT0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2EGACZeAIB+XgCAgl4AgIZeAICADQAAgbEAAIKxAACKXgCAuOkGALnpBgC6+QYAu/UGALyVBgC9nQYAvpUGAL+NBgCw4QYAseEGALLhBgCz/QYAtOUGALXtBgC25QYAt9kGALPdBgCOXgCAkl4AgJZeAICaXgCAtuUGALX1BgCeXgCAuyUGALolBgCGmAAAh6wAAL8pBgC+IQYAvSkGALw1BgCiXgCAo5kGAKZeAICqXgCApqEGAK5eAICyXgCApbEGAKphBgCrYQYAtl4AgLpeAICuZQYAr20GAKxxBgCtbQYAqC0GAKk9BgCqiQYAq4kGAKyZBgCtmQYArokGAK+JBgC+XgCAwl4AgMZeAIDKXgCAzl4AgNJeAIDWXgCA2l4AgLiNBgC5lQYAupUGALulBgC8vQYAvXEBAL5xAQC/cQEAsPkGALHNBgCy2QYAs9kGALTJBgC1yQYAtr0GALe1BgCzAQYA3l4AgOJeAIDmXgCA6l4AgLYZBgC1EQYA7l4AgLsJBgC6PQYA8l4AgPZeAIC/DQYAvg0GAL0NBgC8DQYA+l4AgKNFBgC2XQCA/l4AgKZdBgACXwCAhFgAAKVVBgCqeQYAq00GAL5oAQAGXwCArkkGAK9JBgCsSQYArUkGAIDBAwCByQMAgt0DAKPNAgAKXwCApdkCAKbNAgAOXwCAhoANAIeUAwCqxQIAqw0DAKwVAwCtHQMArhUDAK8NAwDhnBcA4xgGAOMUAwDhNAYA7xgCABJfAIAWXwCAGl8AgOPQAgAeXwCA4VACACJfAIAmXwCA7ywGAO/kJQAqXwCArE0CAK1RAgCuUQIAr2UCAKgBAgCpCQIAqlkCAKtVAgCE7A0ALl8AgDJfAIA2XwCAvvgNADpfAIA+XwCAQl8AgLxRAwC9WQMAvmEDAL9hAwC47QMAuVEDALpRAwC7UQMAtM0DALXVAwC23QMAt9UDALAdAgCx1QMAst0DALPVAwDjyAAARl8AgOG4AQBKXwCAhFQPAE5fAIBSXwCAVl8AgKHpAgCgFQYAo6UDAKINAwDvIAAAWl8AgF5fAIBiXwCAZl8AgGpfAICFNCYAs40DAG5fAIC1mQMAto0DAHJfAICGwA8Ah5QNALqFAwC7TQIAvFUCAL1dAgC+VQIAv00CAHpfAIB+XwCAgl8AgIZfAICKXwCAjl8AgI/d6wDvxAYAvuAPAOGMBgCSXwCA44AGAID1AACB5QAAguUAAJZfAICZbR8AmMUfAJvJGwCaeRoAnXUaAJzFGwCf+QcAnhkGAJFpFgCQsesAk20XAJLNFwCV0RMAlGkSAJdREgCWzRMAg1XkAIJB5AB2XwCAml8AgIeNHQCGkRgAhTkYAISVGQCLERwAigUcAJ5fAICiXwCAj4UVAI6ZEACNORAAjJUdAJNRFACSRRQApl8AgKpfAICXYQkAlnUIAJWdCQCU+RUAm0EMAJqtDQCuXwCAsl8AgLZfAIC6XwCAvl8AgJzxDAChbQ0Awl8AgKMBBACihQAApZkEAKSRBACnGTgApsUFAKkJOACoKTgAq4k8AKoBPACtATAArB08AK8pMACunTAAseE0ALABNACzASgAsv00ALXZKAC00SgAxl8AgMpfAIDOXwCA0l8AgNZfAIDaXwCAgB0AAIEJAACC2QEA3l8AgKgRDwCpGQ8Aql0PAKtVDwCsTQ8ArXEPAK51DwCvbQ8A4l8AgOpfAICGiAAAhxABAO5fAIDyXwCA9l8AgPpfAIC4TQ4AuVEOALpRDgC7UQ4AvGUOAL1tDgC+ZQ4Avx0OALAdDwCxwQ8AssEPALPBDwC0xQ8Atc0PALbFDwC3eQ4As9UPAP5fAIACYACABmAAgApgAIC28Q8AtcUPAA5gAIC7BQ8AutkPABJgAIAWYACAvwkPAL4BDwC9FQ8AvBUPABpgAICjkQ8AHmAAgCJgAICmtQ8AJmAAgCpgAIClgQ8Aqp0PAKtBDwAuYACAMmAAgK5FDwCvTQ8ArFEPAK1RDwCogQ0AqYENAKqBDQCrgQ0ArIENAK2BDQCusQ0Ar6ENADZgAIA6YACAPmAAgEJgAIBGYACAgrkAAIG9AACAvQAAuDUCALk9AgC6zQIAu5UCALyNAgC9tQIAvr0CAL+1AgCwbQIAsU0CALJFAgCzJQIAtD0CALUdAgC2FQIAtw0CAEpgAIBOYACAswENAFJgAIC1AQ0AWmAAgISUAwC2CQ0AviwEAF5gAIC7gQIAuqECAL35AgC8mQIAv9ECAL7xAgBiYACAZmAAgGpgAICjRQ0AbmAAgKVFDQCmTQ0AcmAAgIbgBACHpAQAquUCAKvFAgCs3QIArb0CAK61AgCvlQIAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQIArpECAK+RAgB2YACAemAAgH5gAICCYACAzAAAAIZgAICKYACAjmAAgLiZAgC5rQIAuqUCALttAQC8dQEAvX0BAL51AQC/bQEAsPECALH5AgCywQIAs8ECALSxAgC1vQIAtrUCALepAgCSYACA44QOAJZgAIDh9A4AmmAAgJ5gAICiYACApmAAgIQgBQCqYACArmAAgLJgAIC2YACA7+wOALpgAIC+YACAs/UCAMJgAICG6AQAh4wEAL5cBAC2UQIAteUCAMpgAIC7fQIAunUCAM5gAIDSYACAvzkCAL41AgC9VQIAvFUCAKM1BQBWYACAxmAAgNZgAIDaYACAppEFAKUlBQDeYACAq70FAKq1BQDiYACA5mAAgK/5BQCu9QUArZUFAKyVBQCA+QcAgfkHAIKNBwCzjQYA6mAAgLWdBgC2iQYA7mAAgPJgAID2YACAuk0HALtFBwC8XQcAvUEHAL5BBwC/QQcA+mAAgP5gAIDmXwCAAmEAgAZhAIAKYQCADmEAgBJhAICoNQYAqQEGAKppBgCraQYArHkGAK1lBgCuZQYAr50HALDlBwCx7QcAsuUHALP5BwC06QcAtekHALZZBwC3VQcAuHEHALlxBwC6cQcAu3EHALxVBwC9XQcAvlUHAL9NBwCjwQcAFmEAgBphAIAeYQCAImEAgKbFBwCl0QcAJmEAgKsJBgCqAQYAKmEAgC5hAICvDQYArg0GAK0NBgCsEQYAgGkAAIFpAACCBQAAMmEAgL6YAQCEmAEANmEAgDphAICGADwAh8QBAD5hAIBCYQCARmEAgEphAIBOYQCAUmEAgKhdBgCpbQYAqmUGAKuBAQCsgQEArYkBAK6xAQCvsQEAVmEAgFphAIBeYQCAYmEAgGZhAIBqYQCAbmEAgHJhAIC4VQEAuV0BALpVAQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCxAQCxuQEAsokBALOJAQC0cQEAtXEBALZ1AQC3bQEAs+0FAHZhAIB6YQCAfmEAgIJhAIC2CQIAtQkCAIZhAIC7fQIAunUCAIphAICOYQCAv7UCAL61AgC9XQIAvF0CAL5gAgCjqQUAkmEAgJZhAICmTQIAmmEAgJ5hAIClTQIAqjECAKs5AgCiYQCAhOADAK7xAgCv8QIArBkCAK0ZAgC+iDwAqmEAgKotAwCrJQMArD0DAK0lAwCuLQMAryUDAID1AACB/QAAgsEAAKPBAwCuYQCApcEDAKbBAwCyYQCAhmA8AIdUAwC2YQCAumEAgL5hAIDjqAIAwmEAgOGkAQDGYQCA71wCAMphAIDOYQCA0mEAgNZhAIDaYQCA3mEAgOJhAIDjjAcA5mEAgOE8BADqYQCA7mEAgPJhAID2YQCAhCACAPphAID+YQCAAmIAgAZiAIDvbAcACmIAgA5iAICzLQIAhEQ9ABJiAIAaYgCAHmIAgLYtAgC1LQIAImIAgLvJAgC6wQIAJmIAgCpiAIC/yQIAvsECAL3JAgC80QIA4XgHAOPAAADjOAYA4VwGAICpAACBqQAAgtEAAC5iAIAyYgCANmIAgL6kPAA6YgCAPmIAgO8cAADvkAYAQmIAgIZgPACHBD0ARmIAgLNxAQBKYgCAtRkBALYJAQBOYgCAUmIAgFZiAIC6AQEAuwEBALwBAQC9AQEAvgEBAL8BAQCohT4AqbU+AKq1PgCrxT4ArN0+AK3FPgCuwT4Ar/0+AFpiAIBeYgCAYmIAgGZiAIBqYgCAbmIAgHJiAIB2YgCAuFE/ALlRPwC6UT8Au1E/ALx1PwC9fT8AvnU/AL9tPwCwiT4AsYk+ALKZPgCzmT4AtIk+ALWJPgC2eT8At3U/AKZhAICjOT4AemIAgBZiAICmQT4AfmIAgIJiAIClUT4Aqkk+AKtJPgCGYgCAimIAgK5JPgCvST4ArEk+AK1JPgCASQAAgVEAAIJRAACzkT8AjmIAgLW5PwC2RT8AkmIAgIZAAACHBAMAukU/ALtdPwC8TT8AvT0/AL4pPwC/IT8AqE0+AKlVPgCqVT4Aq2U+AKx9PgCtiT4Arrk+AK+5PgCWYgCAmmIAgJ5iAICiYgCApmIAgKpiAICuYgCAsmIAgLhhAQC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsM0+ALHVPgCy1T4As6U+ALShPgC1qT4Atpk+ALeZPgCj3T4AtmIAgLpiAIC+YgCAwmIAgKYJPgCl9T4AxmIAgKsRPgCqCT4AymIAgM5iAICvbT4ArmU+AK1xPgCsAT4A0mIAgNZiAIDaYgCA3mIAgOJiAIDmYgCA6mIAgO5iAICAOQAAgTkAAIIFAADyYgCAvrgBAIS4AQD6YgCA/mIAgKitAgCp1QIAqtUCAKstAwCsNQMArT0DAK41AwCvLQMAAmMAgAZjAIAKYwCADmMAgBJjAIAWYwCAGmMAgB5jAIC46QMAuekDALqJAwC7iQMAvJkDAL2ZAwC+iQMAv4kDALBVAwCxXQMAslUDALPpAwC0+QMAtfkDALbpAwC34QMAs10CACJjAICGKAQAh8wDACZjAIC2vQMAtb0DACpjAIC7mQMAupEDAC5jAIAyYwCAvz0DAL49AwC9PQMAvIEDAIUAFACjGQIANmMAgDpjAICm+QMAPmMAgEJjAICl+QMAqtUDAKvdAwBGYwCASmMAgK55AwCveQMArMUDAK15AwDjVD4A4dw/AOHQPgDjPD4ATmMAgO8cAABSYwCAVmMAgFpjAIDjwAAAXmMAgOHUAQDvYD4AYmMAgGpjAIDvRD8AgGEAAIFtAACCfQAAhAAFAIbwBACHnAUAvhAFAG5jAIByYwCAdmMAgHpjAIB+YwCAgmMAgIZjAICKYwCAjmMAgLiJPQC5iT0Aupk9ALuRPQC8uT0Avbk9AL7RPQC/0T0AsAU+ALENPgCyBT4Asx0+ALQFPgC1DT4AtgU+ALe5PQConT4Aqa0+AKqlPgCrvT4ArKU+AK2tPgCupT4Ar30+AISsBAC+rAQAkmMAgJZjAICaYwCAnmMAgKJjAICmYwCAqPkFAKn5BQCqKQYAqykGAKw5BgCtOQYArikGAK8pBgBmYwCAqmMAgK5jAICyYwCAtmMAgLpjAIC+YwCAwmMAgLiNBgC5kQYAupEGALulBgC8vQYAvUUHAL5BBwC/QQcAsFkGALFZBgCy7QYAs/0GALTtBgC13QYAttUGALe1BgCzoQYAxmMAgMpjAIDOYwCA0mMAgLa5BgC1sQYA2mMAgLudBgC6nQYA1mMAgPZiAIC/GQYAvikGAL0pBgC8OQYAglEAAKPlBgCAQQAAgUEAAKb9BgDeYwCA4mMAgKX1BgCq2QYAq9kGAIZIAACHbAAArm0GAK9dBgCsfQYArW0GAKg5BgCpWQYAqmkGAKtpBgCseQYArXkGAK5pBgCvaQYA5mMAgOpjAIDuYwCA8mMAgPZjAID6YwCA/mMAgAJkAIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kBALAZBgCxGQYAsoEGALOBBgC0gQYAtYEGALaBBgC3gQYAs+EGAAZkAIAKZACADmQAgBJkAIC2+QYAtfEGABZkAIC73QYAut0GABpkAIAeZACAv0UGAL5FBgC9VQYAvFUGACJkAICjpQYAJmQAgCpkAICmvQYALmQAgDJkAICltQYAqpkGAKuZBgA2ZACAOmQAgK4BBgCvAQYArBEGAK0RBgConQIAqdECAKrRAgCrLQMArDUDAK09AwCuNQMAry0DAD5kAIBCZACAvmQCAEpkAIBOZACAUmQAgFZkAIBaZACAuOkDALnpAwC6iQMAu4UDALydAwC9gQMAvoEDAL+1AwCwVQMAsV0DALJVAwCz6QMAtPkDALX5AwC26QMAt+EDAIBtAwCBpQAAgq0AALNVAgBeZACAtbEDALaxAwBiZACAhOACAGZkAIC6nQMAu5UDALyNAwC9MQMAvjEDAL8xAwCjGQIAamQAgIVwaQBuZACAcmQAgKb9AwCl/QMAdmQAgKvZAwCq0QMAhkgMAIe8AwCvfQMArn0DAK19AwCswQMAemQAgH5kAICCZACAhmQAgO+wBgDvxAMAimQAgI5kAIDjfAYA45QDAOG4BwDh3AEAkmQAgJZkAICaZACAnmQAgKJkAICmZACAhEQCAL5YDQCADQAAgTUAAII9AACqZACArmQAgLJkAICGyAwAh1wNALpkAIC+ZACAwmQAgMZkAIDKZACAzmQAgNJkAIDWZACA2mQAgN5kAIDiZACA74AGAISsDQDh7AYA5mQAgONcBgDqZACA7mQAgPJkAID2ZACAs/UBAPpkAID+ZACAAmUAgAZlAIC2RQEAteUBAAplAIC7LQEAuiEBAA5lAIASZQCAv/UAAL71AAC9JQEAvC0BAKgtDgCpNQ4Aqj0OAKs1DgCsLQ4ArYUOAK6FDgCvuQ4AtmQAgBZlAIAaZQCAHmUAgIAZAACBGQAAggUAACJlAIC4WQ8AuVkPALp5DwC7eQ8AvGkPAL1pDwC+GQ8AvxkPALClDgCxqQ4AsrkOALOxDgC0cQ8AtXEPALZxDwC3cQ8Apb0OAL6IAwAqZQCAph0OACZlAIAuZQCAo60OADJlAICtfQ4ArHUOAK+tDwCurQ8ARmQAgDZlAICrdQ4AqnkOALO5DwA6ZQCAhmgAAIcMAwA+ZQCAtlEPALVZDwBCZQCAu3UPALp1DwBGZQCASmUAgL9FDwC+RQ8AvVEPALxlDwCocQ4AqXEOAKpxDgCrcQ4ArJEOAK2RDgCukQ4Ar5EOAE5lAIBSZQCAVmUAgFplAIBeZQCAYmUAgGZlAIBqZQCAuIUOALmNDgC6hQ4Au50OALyNDgC9vQ4AvrUOAL95AQCw8Q4AsfEOALLxDgCzxQ4AtMEOALXBDgC2wQ4At8EOAKP5DgBuZQCAcmUAgHZlAIB6ZQCAphEOAKUZDgB+ZQCAqzUOAKo1DgCCZQCAhmUAgK8FDgCuBQ4ArREOAKwlDgCADQAAgRUAAIIdAACKZQCAjmUAgJJlAICElAEAvpQBAIZABwCH5AAAmmUAgJ5lAICiZQCApmUAgKplAICuZQCAqIkCAKmRAgCqlQIAq7kCAKzVAgCtxQIArsUCAK/1AgCyZQCAtmUAgLplAIC+ZQCAvnwDAMJlAIDGZQCAymUAgLh9AwC5wQMAusEDALvBAwC8wQMAvckDAL7xAwC/8QMAsI0CALFFAwCyTQMAs0UDALRdAwC1RQMAtk0DALdFAwCzHQIAzmUAgNJlAIDWZQCA2mUAgLZFAgC1XQIA3mUAgLuBAwC6SQIA4mUAgOZlAIC/gQMAvpkDAL2RAwC8mQMA6mUAgKNZAgDuZQCA8mUAgKYBAgD2ZQCA+mUAgKUZAgCqDQIAq8UDAP5lAIACZgCArt0DAK/FAwCs3QMArdUDAIDZAQCB7QEAguUBAO+4DgAKZgCA4cQBAISYAgDj1AAADmYAgL7sBAASZgCA7wgAABZmAIDhxA8AGmYAgONkDgCGAAUAh2gFAB5mAICzvQIAImYAgLWtAgC2pQIAJmYAgCpmAIAuZgCAukEBALtBAQC8RQEAvU0BAL5FAQC/+QEAMmYAgDZmAIA6ZgCAPmYAgEJmAIBGZgCASmYAgO/gAQCEbAQA4dQOAE5mAIDjHA4AUmYAgFZmAIBaZgCAXmYAgKMxAgBiZgCAhCQHAGZmAIBqZgCApikCAKUhAgBuZgCAq80BAKrNAQByZgCAemYAgK91AQCuyQEArcEBAKzJAQCo6QUAqekFAKr5BQCr+QUArOkFAK3pBQCuOQYArzkGAAZmAICCzQcAgfUHAID9BwB2ZgCAfmYAgIYYAwCHkAMAuNEGALnZBgC64QYAu+EGALyRBgC9nQYAvpUGAL+JBgCwSQYAsUkGALJdBgCzVQYAtE0GALXxBgC28QYAt/EGALDhBwCx4QcAsgkHALMJBwC0GQcAtRkHALYJBwC3CQcAuDkHALkNBwC6GQcAuxkHALwJBwC9CQcAvn0HAL9xBwCCZgCAlmUAgIZmAICKZgCAjmYAgJJmAICWZgCAmmYAgKjxBwCpxQcAqsEHAKvdBwCsyQcArb0HAK6pBwCvoQcAsykGAJ5mAICiZgCApmYAgKpmAIC2XQYAtSEGAK5mAIC7RQYAukUGALJmAIC2ZgCAv70GAL69BgC9vQYAvL0GALpmAICjbQYAvmYAgMJmAICmGQYAxmYAgMpmAIClZQYAqgEGAKsBBgDOZgCA0mYAgK75BgCv+QYArPkGAK35BgCobQYAqbEBAKpJAQCrRQEArF0BAK1FAQCuTQEAr0UBANZmAICCHQAAgR0AAIAdAADaZgCA3mYAgOJmAIC+VAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwPQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAALsFAwC62QIAhiwCAIcsAwC/DQMAvgUDAL0VAwC8FQMAs+ECAOpmAIDuZgCAhCwDAPJmAIC25QIAtfUCAPZmAICqnQIAq0EDAPpmAID+ZgCArkEDAK9JAwCsUQMArVEDAAJnAICjpQIABmcAgApnAICmoQIADmcAgBJnAIClsQIAqakAAKihAACrtQAAqr0AAK3dAACs3QAAr/EAAK79AAC+LBwAFmcAgBpnAIAeZwCAImcAgCZnAIAqZwCALmcAgLl9AAC4fQAAu80BALrNAQC93QEAvN0BAL/NAQC+zQEAsZUAALCJAACzTQAAspUAALVdAAC0XQAAt00AALZNAAAyZwCANmcAgDpnAIA+ZwCAQmcAgEZnAIBKZwCATmcAgIA5AACBOQAAggUAAFJnAIBaZwCAXmcAgIf4AgCGfB0A4bgEAL7IHADjQAYAYmcAgGZnAIBqZwCAbmcAgHJnAIB2ZwCAemcAgH5nAICCZwCAhmcAgIpnAIDvsAcAjmcAgJJnAICWZwCAmmcAgO/IAACeZwCAomcAgKZnAIDvQAYAqmcAgOH8BgCuZwCA4xwGALJnAIDhlAEAtmcAgONkBgCAEQAAgRkAAIIpAACz/QEAumcAgLWdAQC2lQEAvmcAgMJnAICEbB0AuoUBALuZAQC8iQEAvVEBAL5RAQC/UQEAozEeAFZnAIDGZwCAymcAgM5nAICmWR4ApVEeANJnAICrVR4AqkkeAIYIAwCHbAMAr50eAK6dHgCtnR4ArEUeANZnAICzCR8A2mcAgN5nAIC2CR8A4mcAgOZnAIC1CR8AugUfALsNHwDqZwCA7mcAgL4FHwC/CR8AvBUfAL0NHwCw5R8Ase0fALLlHwCz/R8AtOUfALXpHwC2GR8AtxkfALgpHwC5NR8Auj0fALs1HwC8ER8AvR0fAL4JHwC/BR8A8mcAgPZnAIDmZgCA+mcAgP5nAIACaACABmgAgApoAICo0R8AqdEfAKqlHwCrvR8ArKUfAK2tHwCupR8Ar50fAKNNHgAOaACAEmgAgBZoAIAaaACApk0eAKVNHgAeaACAq0keAKpBHgAiaACAJmgAgK9NHgCuQR4ArUkeAKxRHgCADQAAgRUAAIIdAAAqaACALmgAgDJoAICEtAEAvrQBAL/oAQA6aACAhkgHAIc0AACEvAYAPmgAgEJoAIC+tAYAqI0BAKmVAQCqlQEAq80BAKzZAQCt2QEArs0BAK/FAQBGaACASmgAgE5oAIBSaACAVmgAgFpoAIBeaACAYmgAgLgdAQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsIkBALGJAQCyKQEAsykBALQ9AQC1JQEAti0BALclAQC7bQIAum0CAGZoAIBqaACAv8ECAL7ZAgC93QIAvN0CALM9AgBuaACAcmgAgHZoAICE/AYAtnkCALVxAgB6aACAqikCAKspAgB+aACAgmgAgK6dAgCvhQIArJkCAK2ZAgCGaACAo3kCAIpoAICOaACApj0CAJJoAICWaACApTUCAIJtJwCDjSoAhqgFAIdsAwCGmS4Ah80vAIQRLgCFmS4AiiESAIspEgCaaACAnmgAgI6RFgCPHRYAjBESAI0RFgCScRoAk+UaAKJoAIDvlHYAlvEeAJflHgCUSRoAlRkeAJopAgCb4QIAqmgAgK5oAICyaACA4SASAJzxAgDjIBYAnyEfAJ7BHwCdmRsAnC0bAJuhGwCavRcAmTkXAJixFwCXiRMAlqkTAJWpEwCUdS4AkzkvAJIxLwCRsS8AkDUrAI+tJgDjeB8A0gAAAOFcHwCCmQEAtmgAgIDxAQCB8QEAvqgHALpoAIC+aACAwmgAgIS8BgDvLB8AxmgAgMpoAIDhpB4A48wAAON8HgDhvAEAzmgAgNJoAIDWaACAhJwGANpoAIC+bAYA3mgAgOJoAIDmaACA7xAAAO8EHgDqaACA7mgAgPJoAID2aACA+mgAgP5oAIACaQCABmkAgAppAICAPQAAgQkAAILJBwAOaQCAo/kDAKLxAwChMQMAoM0fALBJcQCxAXwAsgl8ALMhfQC0AXgAtRV4ADZoAICmaACAEmkAgL4oDgCGDAAAh4wDABZpAIAaaQCAHmkAgCJpAIAmaQCAoV0AAKJVAACjfQAApAEMAKUVDACm9QwApwEIAKghCACpxQgAqgF0AKsJdACsAXQArR11AK55cACveXAAqOUFAKnxBQCq8QUAqy0FAKw1BQCtPQUArjUFAK8tBQAqaQCALmkAgDJpAIA2aQCAOmkAgD5pAIBCaQCARmkAgLj9BgC5jQYAuoUGALutBgC8uQYAvbkGAL6tBgC/pQYAsFUFALFdBQCyVQUAs+UGALT9BgC10QYAttEGALfRBgCzeQQASmkAgE5pAIBSaQCAVmkAgLa9BAC1vQQAWmkAgLuZBAC6kQQAXmkAgGJpAIC/FQcAvjkHAL0xBwC8gQQAZmkAgKM9BABqaQCAbmkAgKb5BAByaQCAdmkAgKX5BACq1QQAq90EAHppAIB+aQCArn0HAK9RBwCsxQQArXUHAKhpBwCpaQcAqnkHAKvZBgCs9QYArf0GAK71BgCv5QYAgMkAAIHJAACCBQAAgmkAgIZwDwCHNAAAimkAgI5pAIC4fQYAuQUGALoNBgC7BQYAvB0GAL0FBgC+DQYAvwUGALCdBgCxdQYAsn0GALN1BgC0UQYAtV0GALZVBgC3TQYAs/EEAJJpAICWaQCAmmkAgJ5pAIC2fQUAtX0FAKJpAIC7sQUAulkFAKZpAICqaQCAv5kFAL6VBQC9oQUAvKkFAK5pAICjtQQAsmkAgLZpAICmOQUAumkAgL5pAIClOQUAqh0FAKv1BQDCaQCAxmkAgK7RBQCv3QUArO0FAK3lBQCpuQIAqLECAKvJAgCqsQIArTUCAKw1AgCvNQIArjUCAMppAIDOaQCA0mkAgNZpAIDaaQCA3mkAgOJpAIDmaQCAuekDALjZAwC7iQMAuuEDAL2dAwC8nQMAv4EDAL6JAwCxVQIAsFUCALNVAgCyVQIAtfkDALTxAwC36QMAtvEDALM9AwDqaQCA7mkAgPJpAID6aQCAtrEDALW5AwD+aQCAu5UDALqVAwCGiAwAh6ANAL85AgC+MQIAvYUDALyFAwACagCAo3kDAAZqAIAKagCApvUDAA5qAIASagCApf0DAKrRAwCr0QMAFmoAgBpqAICudQIAr30CAKzBAwCtwQMAgIUAAIGNAACChQAA79AGAOOwBwDj9AQA4QgHAOHsBADvOAYA7yAEAL6kDAAeagCAImoAgOGEAQAmagCA49wGACpqAIAuagCAhMANALPJAQAyagCAtdkBALbJAQA2agCAOmoAgD5qAIC6xQEAu60BALy5AQC9uQEAvq0BAL+lAQCwLQ4AsUUOALJBDgCzQQ4AtEUOALVNDgC2cQ4At3EOALiBDgC5gQ4AuoEOALuBDgC8gQ4AvYEOAL6BDgC/gQ4A9mkAgEJqAIBGagCASmoAgIZpAIBOagCAUmoAgFZqAICo2Q0AqdkNAKptDgCrZQ4ArH0OAK1lDgCuZQ4Ar1UOAKOFDgCCLQAAgRUAAIAdAABaagCApoUOAKWVDgBeagCAq+EOAKqJDgBiagCAZmoAgK/pDgCu4Q4ArfUOAKz1DgBqagCAs4UPAIZoAACHHAMAtoUPAG5qAIByagCAtZEPALqNDwC7SQ8AdmoAgHpqAIC+MQ8AvzEPALxJDwC9RQ8AqBEOAKkZDgCqSQ4Aq0UOAKxdDgCtQQ4ArkEOAK91DgB+agCAgmoAgIZqAICKagCAjmoAgJJqAICWagCAmmoAgLihDgC5oQ4Aug0BALsFAQC8HQEAvQEBAL4BAQC/AQEAsA0OALHJDgCy2Q4As9UOALSxDgC1sQ4AtqkOALehDgCjwQ4AnmoAgKJqAICmagCAqmoAgKbBDgCl1Q4ArmoAgKsNDgCqyQ4AsmoAgLZqAICvdQ4ArnUOAK0BDgCsDQ4AumoAgL5qAIDCagCAxmoAgIANAACBNQAAgj0AAMpqAIDOagCA0moAgISEAQC+hAEAhjAHAIf4AADaagCA3moAgKjBAgCp0QIAqtECAKvlAgCs/QIArTUDAK49AwCvNQMA4moAgOZqAIDqagCA7moAgPJqAID2agCA+moAgP5qAIC40QMAudkDALrhAwC74QMAvJEDAL2RAwC+kQMAv5EDALBNAwCxVQMAsl0DALNVAwC0TQMAtfEDALbxAwC38QMAu7EDALqpAwACawCAvoQDAL8VAwC+qQMAvaEDALypAwCzeQIABmsAgAprAIAOawCAEmsAgLaVAwC1VQIAFmsAgKrtAwCr9QMAGmsAgB5rAICu7QMAr1EDAKztAwCt5QMAImsAgKM9AgAmawCAKmsAgKbRAwAuawCAMmsAgKURAgA2awCAgiEAAIEVAACAFQAA7wQAAISUAgA6awCAPmsAgOPYAABCawCA4fgBAEprAIBOawCAUmsAgFZrAIBaawCAhmAFAIcIBQBeawCAs20BAGJrAIC1fQEAtnUBAGZrAIBqawCAbmsAgLpRAQC7UQEAvPkBAL3RAQC+0QEAv9EBAHJrAICjpQEAdmsAgHprAICmvQEAfmsAgIJrAICltQEAqpkBAKuZAQCGawCAimsAgK4ZAQCvGQEArDEBAK0ZAQCOawCA4fQOAJJrAIDjFA4A9AAAAOF8DACWawCA41AKAJprAICeawCAviAEAO8wDQCiawCApmsAgIQ0BADvrA4AsDkGALE5BgCygQYAs6kGALS5BgC1uQYAtqkGALehBgC46QYAuekGALrJBgC7xQYAvN0GAL3BBgC+wQYAvz0HAEZrAICCHQAAgR0AAIAdAACqawCArmsAgLJrAIDWagCAqJkFAKmZBQCqSQYAq0kGAKxZBgCtWQYArkkGAK9JBgCorQcAqbUHAKq9BwCrtQcArK0HAK3dBwCuyQcAr8EHALZrAIC6awCAhogDAIcQAwC+awCAwmsAgMZrAIDKawCAuG0HALkFBwC6AQcAuxUHALwxBwC9MQcAvikHAL8pBwCwgQcAsYEHALJpBwCzZQcAtH0HALVhBwC2YQcAt1UHALM1BgDOawCA0msAgNZrAIDaawCAtl0GALUlBgDeawCAu0UGALpFBgDiawCA5msAgL+lBgC+uQYAvbEGALy9BgDqawCAo3EGAO5rAIDyawCAphkGAPZrAID6awCApWEGAKoBBgCrAQYA/msAgAJsAICu/QYAr+EGAKz5BgCt9QYAqCUBAKk1AQCqPQEAqzUBAKwtAQCtkQAArpEAAK+RAAAGbACACmwAgA5sAIASbACAFmwAgIK9AwCBvQMAgL0DALiZAAC5rQAAuqUAALttAAC8dQAAvX0AAL51AAC/bQAAsPEAALH5AACywQAAs8EAALSxAAC1vQAAtrUAALepAAAabACAHmwAgCJsAICEgAIAvhwCACpsAICG+HwAh8wCAISsAwAubACAMmwAgDZsAIA6bACAPmwAgEJsAIBGbACAs/UCAEpsAIBObACAkgAAAFJsAIC2UQMAteUCAFZsAIC7fQMAunUDAFpsAIBebACAvzkDAL41AwC9VQMAvFUDAKM1AgBibACAZmwAgGpsAIBubACAppEDAKUlAgBybACAq70DAKq1AwB2bACAemwAgK/5AwCu9QMArZUDAKyVAwC+wAMAfmwAgIJsAICGbACAgA0AAIE1AACCPQAAimwAgI5sAICSbACAhsh8AIcAAwCabACAnmwAgKJsAICmbACAqmwAgK5sAICybACAtmwAgLpsAIC+bACAwmwAgO/0AwCE7HwA4ZQBAMZsAIDjMAMAymwAgM5sAIDSbACA1mwAgLNpAQDabACA3mwAgOJsAIDmbACAtmEBALVpAQDqbACAuykBALohAQDubACA8mwAgL8dAQC+HQEAvSUBALwtAQD2bACA+mwAgP5sAICjpQEAAm0AgKWlAQCmrQEAvlR8AIaAfACH7HwAqu0BAKvlAQCs4QEArekBAK7RAQCv0QEACm0AgOGcBgCEBH8A4yQGAOPUBgAObQCA4TAEABJtAIDvlAcAgnUAAIFhAACAaQAAFm0AgBptAIAebQCA7+wGALiNfgC5lX4AupV+ALulfgC8vX4AvdF+AL7RfgC/0X4AsGV+ALFtfgCyeX4As3F+ALRZfgC1WX4Atr1+ALe1fgCoVX4AqWF+AKphfgCrYX4ArGF+AK1hfgCuYX4Ar2F+ACJtAICWbACAJmwAgCZtAIAGbQCAKm0AgC5tAIAybQCAqHF+AKlxfgCqcX4Aq3F+AKyRfwCtkX8ArpF/AK+RfwA2bQCAOm0AgD5tAIBCbQCARm0AgEptAIBObQCAUm0AgLiFfwC5jX8AuoV/ALudfwC8jX8Avb1/AL61fwC/XX8AsPF/ALHxfwCy8X8As8V/ALTBfwC1wX8AtsF/ALfBfwCz+X8AVm0AgFptAIBebQCAYm0AgLYRfgC1GX4AZm0AgLs1fgC6NX4Aam0AgG5tAIC/BX4AvgV+AL0RfgC8JX4AghUAAKO9fwCAYQAAgWEAAKZVfgBybQCAvpABAKVdfgCqcX4Aq3F+AHZtAIB6bQCArkF+AK9BfgCsYX4ArVV+AKhBfgCpUX4AqlV+AKt9fgCsZX4ArW1+AK75AQCv8QEAhgAAAIc0AQB+bQCAgm0AgIZtAICKbQCAjm0AgJJtAIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCVAQCxnQEAspUBALNNAQC0VQEAtV0BALZVAQC3TQEAs919AJZtAICabQCAnm0AgKJtAIC27X0Ate19AKZtAIC7WQIAulECAKptAICubQCAv5kCAL6RAgC9mQIAvEECALJtAICjmX0Atm0AgLptAICmqX0Avm0AgMJtAIClqX0AqhUCAKsdAgDGbQCAym0AgK7VAgCv3QIArAUCAK3dAgDObQCA0m0AgNZtAIDabQCAgB0AAIEJAACCOQAA3m0AgOJtAIC+AAQA6m0AgO5tAIDybQCA9m0AgPptAID+bQCAhIwDAAJuAICHCAMAhuwEAAZuAIDviAIACm4AgA5uAICEbAQA4zQCABJuAIDhVAEAFm4AgBpuAIAebgCAIm4AgKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvGQEAvqwEACZuAIAqbgCALm4AgDJuAIA2bgCAOm4AgD5uAIC4DQEAuREBALoRAQC7JQEAvD0BAL3VAQC+3QEAv9UBALBpAQCxaQEAsnkBALNxAQC0WQEAtVkBALY5AQC3NQEAsy0CAEJuAIBGbgCASm4AgE5uAIC2LQIAtS0CAFJuAIC7rQEAuq0BAFpuAIBebgCAv50BAL6dAQC9pQEAvK0BAIBNAACBVQAAglUAAO9sAABibgCA7+x/AO+8fgBmbgCA4RB/AOPUfwDj2H4A4ex/AGpuAIDhTH4Abm4AgOMkfgDmbQCAVm4AgKsFBgCqBQYArQ0GAKwFBgCvNQYArjUGAIYAAwCHKAMAo4UFAHJuAIClhQUAdm4AgHpuAICmhQUAs/EGAH5uAICCbgCAhm4AgIpuAIC26QYAteEGAI5uAIC7vQYAur0GAJJuAICWbgCAv4kGAL6BBgC9iQYAvJUGAKgpBgCpKQYAqjkGAKs5BgCsKQYArSkGAK5dBgCvTQYAmm4AgJ5uAICibgCApm4AgKpuAICubgCAsm4AgLZuAIC46QcAuekHALr5BwC7+QcAvOkHAL3pBwC+XQcAv1UHALA5BgCxOQYAsgEGALMdBgC0BQYAtQ0GALYFBgC32QcAo7EHAIItAACBFQAAgB0AALpuAICmqQcApaEHAL5uAICr/QcAqv0HAMJuAICEpAIAr8kHAK7BBwCtyQcArNUHAL7MAQCzlQYAxm4AgMpuAIC2qQYAzm4AgNJuAIC1rQYAulkBALshAQCGyAAAhwwBAL4hAQC/KQEAvDEBAL0xAQCoKQYAqSkGAKpZBgCrUQYArGEGAK1tBgCutQEAr6kBAITgAQDWbgCA2m4AgN5uAIDibgCA5m4AgOpuAIDubgCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCw2QEAsaEBALKhAQCzoQEAtKEBALWpAQC2kQEAt5EBAKPRBQDybgCA9m4AgPpuAID+bgCApu0FAKXpBQACbwCAq2UCAKodAgAGbwCACm8AgK9tAgCuZQIArXUCAKx1AgAObwCAEm8AgBZvAIAabwCAHm8AgCJvAIAmbwCAKm8AgIA9AACBCQAAghkAAC5vAIAybwCAOm8AgL48AwA+bwCAhgAMAIcUAwBCbwCAs9UDAEZvAIC1PQMAtjUDAEpvAIBObwCAv4wKALoRAwC7EQMAvLUAAL29AAC+tQAAv60AAFJvAIDjdAEAVm8AgOG8AQBabwCAXm8AgGJvAIBmbwCAam8AgG5vAIBybwCAdm8AgHpvAIDvdAIAfm8AgIJvAICoTQIAqVECAKpRAgCrqQIArLkCAK25AgCuqQIAr6kCAIRsDQCGbwCAim8AgI5vAICSbwCAlm8AgJpvAIC+dA0AuG0BALkFAQC6DQEAuwUBALwdAQC9BQEAvg0BAL8FAQCw2QIAsdkCALJtAQCzZQEAtH0BALVlAQC2ZQEAt1UBAOG4AQDhUAcA47QAAON8BwCAqQAAgQkAAII5AACebwCAom8AgKpvAICubwCAsm8AgO4AAAC2bwCA7wAAAO9kBgCGYAwAh+QMAKORAgC6bwCApXkCAL5vAIDCbwCApnECAMZvAIDKbwCAq1UCAKpVAgCt+QEArPEBAK/pAQCu8QEApm8AgDZvAIDObwCA0m8AgNZvAIDabwCA3m8AgOJvAICoVQ4AqVkOAKqhDgCrvQ4ArK0OAK2VDgCu+Q4Ar/UOALCRDgCxkQ4AspEOALORDgC0sQ4AtbEOALaxDgC3sQ4AuJEOALmdDgC6lQ4Au0kPALxZDwC9WQ8AvkkPAL9JDwCzCQ4A5m8AgOpvAIDubwCA8m8AgLY1DgC1BQ4A9m8AgLt1DgC6dQ4A+m8AgP5vAIC/VQ4AvlUOAL1lDgC8ZQ4AAnAAgKNNDgAGcACACnAAgKZxDgAOcACAEnAAgKVBDgCqMQ4AqzEOAISkAwC+pAMArhEOAK8RDgCsIQ4ArSEOAKilDgCprQ4AqqUOAKu5DgCs3Q4ArcEOAK7BDgCv/Q4AgO0BAIHxAQCC8QEAFnAAgIaQAQCHtAEAGnAAgB5wAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALCFDgCxbQEAsmUBALN9AQC0ZQEAtW0BALZlAQC3+QEAsy0OACJwAIAmcACAKnAAgC5wAIC2QQ4AtVUOADJwAIC7qQEAukEOADZwAIA6cACAv6kBAL6hAQC9qQEAvLEBAD5wAICjaQ4AQnAAgEZwAICmBQ4ASnAAgE5wAIClEQ4AqgUOAKvtAQBScACAVnAAgK7lAQCv7QEArPUBAK3tAQCoOQMAqTkDAKqNAwCrhQMArJ0DAK2FAwCuhQMAr7UDAFpwAIBecACAYnAAgGZwAIBqcACAbnAAgHJwAIB2cACAuGEAALlhAAC6YQAAu2EAALxhAAC9YQAAvmEAAL9hAACwzQMAsaUDALKhAwCzoQMAtKUDALWtAwC2kQMAt5EDAIANAACBEQAAghEAAHpwAIDv9AIAfnAAgIJwAIC+HAMA4xQCAISIAgDhgAEAinAAgI5wAICScACAh8gDAIY8BAC7AQMAumkDAJZwAICacACAvwkDAL4BAwC9FQMAvBUDALNlAwCecACAonAAgKZwAICqcACAtmUDALV1AwCucACAsnAAgLZwAIC6cACAo4kCAL5wAIClmQIApokCAMJwAICELAIAxnAAgKqFAgCr7QIArPkCAK35AgCu7QIAr+UCAMpwAIDOcACAvkQFAIRMBQDScACA1nAAgNpwAIDecACA4nAAgOZwAIDqcACA7nAAgIAZAACBGQAAggUAAPJwAIDhGA8A4VwOAOO4DgDjdAEA+nAAgP5wAIACcQCABnEAgIYABACHZAUACnEAgA5xAIAScQCAFnEAgO98DgDvqAEAs3UBABpxAIAecQCAInEAgCZxAIC2MQEAtRUBACpxAIC7HQEAuhUBAC5xAIAycQCAv+EAAL79AAC9/QAAvP0AAPZwAIA2cQCAOnEAgD5xAICGcACAQnEAgEZxAIBKcQCAqI0GAKmVBgCqnQYAq+UGAKz9BgCt0QYArtEGAK/RBgCwsQYAsbkGALJJBwCzSQcAtFkHALVFBwC2RQcAt3kHALghBwC5IQcAujkHALs5BwC8KQcAvSkHAL4ZBwC/GQcAozUGAE5xAIBScQCAVnEAgFpxAICmcQYApVUGAF5xAICrXQYAqlUGAGJxAIC+oAMAr6EHAK69BwCtvQcArL0HAIBRAACBWQAAgmEAALNVBwCF9AAAtX0HALZ1BwBmcQCAhgAcAIfkAQC6LQcAuyUHALw9BwC9JQcAviUHAL8VBwCokQYAqZEGAKqRBgCrkQYArLkGAK25BgCuqQYAr6kGAGpxAIBucQCAcnEAgHZxAICiIQEAozUBAKA5BQChEQQAuEkBALlJAQC6XQEAu1UBALxNAQC90QEAvtEBAL/RAQCwpQYAsa0GALKlBgCzvQYAtK0GALWdBgC2lQYAt3kBAKMZBgCPnXkAenEAgH5xAICCcQCApjkGAKUxBgCGcQCAq2kGAKphBgCKcQCAjnEAgK9ZBgCuaQYArWkGAKxxBgCeiQgAn8EFAJzJCQCdyQkAmqENAJu9DACYsQ0AmbkNAJahcQCXRXEAlEV1AJWxcQCSoXUAk7V1AJDleQCRzXkAil1yAItFcgCScQCAvoAcAI51DgCPZQ4AjLlyAI11DgCCOXoAgzl6AJZxAICacQCAhnF2AIeZdgCECXoAhW12AJptBwCbVQIAnnEAgKJxAICmcQCA4ZAAAJxZAgDjCBoAkgkPAJNlCgCqcQCA7zgWAJZ1BgCXdQYAlH0KAJU1CwCpjRYAqIUWAKsBEACqMRYArXESAKy1EgCvuS4ArgEsAKF9AgCucQCAo6EeAKKpHgClsRoApPUfAKflGwCmsRoAhMwDAIRMHACycQCAtnEAgLpxAIC+cQCAwnEAgMZxAICxASgAsNkuALONKgCy6SoAtfUmALQBJACEcB0AynEAgID9AQCBFQAAgh0AAL6AHADOcQCA0nEAgIe4AgCGPB0A2nEAgN5xAIDicQCA5nEAgOpxAIDucQCA8nEAgPZxAID6cQCA/nEAgAJyAIAGcgCA44ADAApyAIDhoAEADnIAgO+UAwAScgCAFnIAgBpyAIAecgCAInIAgCZyAIAqcgCALnIAgOE8BgAycgCA49AGADZyAIDhMAcAOnIAgOOsBgCAOQAAgRUAAIIdAADvHAYAPnIAgEJyAIC+uB8A7+gBALPpAgBKcgCAh8QcAIbsHABOcgCAtlkCALVRAgBScgCAu00CALpNAgBWcgCAWnIAgL+5AQC+2QEAvdEBALz1AQCjKR0A1nEAgEZyAIBecgCAYnIAgKaZHQClkR0AZnIAgKuNHQCqjR0AanIAgG5yAICveR4ArhkeAK0RHgCsNR4AcnIAgLNtHwB2cgCAenIAgLZlHwB+cgCAgnIAgLVtHwC6IR8AuyEfAIZyAICKcgCAviUfAL8pHwC8MR8AvTEfAKihHwCpoR8AqqEfAKuhHwCsoR8AraEfAK6hHwCvoR8AjnIAgJJyAICWcgCAmnIAgJ5yAICicgCApnIAgKpyAIC4rR8AubUfALq9HwC7tR8AvK0fAL1VHwC+UR8Av00fALChHwCxoR8AsqEfALOhHwC0pR8AtakfALadHwC3lR8AoykeAIIZAACBGQAAgLEBAK5yAICmIR4ApSkeALJyAICrZR4AqmUeAIaIAACH/AEAr20eAK5hHgCtdR4ArHUeALZyAICzmR4AunIAgL5yAIC2XQEAwnIAgMZyAIC1sR4AukkBALtJAQDKcgCAznIAgL49AQC/IQEAvDkBAL01AQCoRR4AqVUeAKpVHgCrZR4ArH0eAK2ZAQCuiQEAr4EBAISsAADScgCA1nIAgNpyAIDecgCA4nIAgOZyAIDqcgCAuK0BALllAQC6bQEAu2UBALx9AQC9ZQEAvm0BAL9lAQCwyQEAsckBALKpAQCzpQEAtL0BALWhAQC2oQEAt5UBALhpHAC5oRwAusEcALvBHAC8wRwAvcEcAL7BHAC/wRwAsIkfALGJHwCyIRwAswUcALQdHAC1fRwAtnUcALdtHACoYR8AqWEfAKphHwCrYR8ArNkfAK3ZHwCuyR8Ar8EfAO5yAIDycgCA9nIAgPpyAID+cgCAAnMAgAZzAIAKcwCADnMAgBJzAIC+AAQAo1EdABZzAICleR0AppUCABpzAIAecwCAInMAgKqBAgCrgQIArPECAK39AgCu9QIAr+kCACpzAIDh9AEALnMAgON8AQCATQAAgXUAAIJ9AAAycwCAhsAEAIekBAA2cwCAOnMAgD5zAIBCcwCARnMAgO+MAgCoSQIAqUkCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAISgBQBKcwCATnMAgFJzAIC+vAQAVnMAgFpzAIBecwCAuC0BALk1AQC6PQEAuzUBALwtAQC91QEAvt0BAL/NAQCwzQIAsdUCALLdAgCz1QIAtM0CALUVAQC2HQEAtxUBAOGEHgDjbB8A41wfAOFYHgBicwCAZnMAgGpzAIBucwCAcnMAgHZzAIB6cwCAfnMAgOkAAADv9B4A70weAIJzAICzlQIAhnMAgIpzAICOcwCAknMAgLa5AgC1sQIAmnMAgLtRAgC6SQIAhsgEAIesBAC/kQEAvkkCAL1BAgC8SQIAJnMAgKNRBQCecwCAlnMAgKZ9BQCicwCApnMAgKV1BQCqjQUAq5UFAKpzAICucwCAro0FAK9VBgCsjQUArYUFAICJBwCBiQcAgpkHALORBgCycwCAtbkGALapBgC2cwCAunMAgL5zAIC6TQcAu0UHALxdBwC9QQcAvkEHAL9BBwCoQQYAqU0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2UGAMJzAIDGcwCAynMAgM5zAIDScwCA1nMAgNpzAIDecwCAuFkHALlZBwC6aQcAu2kHALx5BwC9eQcAvmUHAL8ZBwCwxQcAsc0HALLFBwCz2QcAtMkHALXJBwC2aQcAt2kHAKPdBwDicwCA5nMAgOpzAIDucwCApuUHAKX1BwDycwCAqwkGAKoBBgD2cwCA+nMAgK8NBgCuDQYArQ0GAKwRBgCAbQAAgQkAAIIZAAD+cwCAAnQAgISYAQC+kAEABnQAgIbAAACH5AEACnQAgA50AIASdACAFnQAgBp0AIAedACAqF0GAKmNAQCqnQEAq5UBAKy5AQCtuQEArskBAK/BAQCEoAAAInQAgCZ0AIAqdACALnQAgDJ0AIA2dACAOnQAgLh5AQC5eQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsIEBALGBAQCySQEAs0kBALRZAQC1WQEAtkkBALdJAQCzFQIAPnQAgEJ0AIBGdACASnQAgLY5AgC1MQIATnQAgLtFAgC6RQIAUnQAgFZ0AIC/nQIAvp0CAL2dAgC8nQIAhXw+AKNRAgBadACAXnQAgKZ9AgBidACAZnQAgKV1AgCqAQIAqwECAGp0AIBudACArtkCAK/ZAgCs2QIArdkCAIDpAACB6QAAggUAAHJ0AIC+AAwAenQAgIeoAwCGvAwAfnQAgIJ0AICGdACAinQAgI50AICSdACAlnQAgJp0AICedACAonQAgKZ0AICqdACA42ABAK50AIDhoAEAsnQAgO+IAgC2dACAunQAgL50AIDCdACAxnQAgMp0AIDOdACAqGkCAKlpAgCqeQIAq3kCAKxpAgCtaQIArr0CAK+1AgC+rAwA0nQAgNZ0AIDadACAgB0AAIEJAACCqQAA3nQAgLhRAQC5WQEAumEBALthAQC8GQEAvRkBAL4NAQC/BQEAsM0CALHVAgCy3QIAs9UCALTNAgC1cQEAtnEBALdxAQDjxAAA4XwHAOF4BgDjvAYA4nQAgIQYDQCGuAwAhzwNAL4sDwDqdACA7nQAgPJ0AIDvEAAA9nQAgPp0AIDvdAYA/nQAgAJ1AIAGdQCAs70CAAp1AIC1rQIAtqUCAA51AIASdQCAFnUAgLpFAgC7XQIAvEUCAL1NAgC+RQIAv/kBAHZ0AIClfQ0ApnUNAOZ0AIAadQCAHnUAgCJ1AICjbQ0ArJUNAK2dDQCulQ0ArykOACZ1AIAqdQCAqpUNAKuNDQCz5Q4ALnUAgDJ1AIA2dQCAOnUAgLblDgC19Q4APnUAgLuhDgC62Q4AQnUAgEZ1AIC/pQ4AvrkOAL2xDgC8uQ4AqBUOAKklDgCqLQ4AqyUOAKw9DgCtJQ4Ari0OAK8lDgCADQAAgRUAAIIdAABKdQCATnUAgFJ1AICEMAMAVnUAgLgpDgC5KQ4AujkOALs5DgC8KQ4AvSkOAL79DwC/9Q8AsF0OALElDgCyLQ4AsyUOALQ9DgC1IQ4AtiUOALcZDgCjpQ8AWnUAgIYoAQCHTAEAXnUAgKalDwCltQ8AYnUAgKvhDwCqmQ8AZnUAgGp1AICv5Q8ArvkPAK3xDwCs+Q8AbnUAgLPpDgBydQCAdnUAgLaRDgB6dQCAfnUAgLXlDgC6sQ4Au7kOAIJ1AICGdQCAvmEBAL9hAQC8mQ4AvZkOAKglDgCpLQ4AqiUOAKs5DgCsKQ4ArVUOAK5dDgCvVQ4AinUAgI51AICSdQCAlnUAgJp1AICedQCAonUAgKZ1AIC49QEAuYEBALqBAQC7gQEAvIEBAL2JAQC+sQEAv7EBALAxDgCxOQ4AsgkOALMJDgC04QEAteEBALbhAQC3zQEAo60NAKp1AICudQCAsnUAgLZ1AICm1Q0ApaENALp1AICr/Q0AqvUNAL51AIDCdQCAryUCAK4lAgCt3Q0ArN0NAIBdAACBbQAAgmUAALNRAwC+nAMAtXkDALYZAwDKdQCAhOACAM51AIC6PQMAuzUDALwZAwC9GQMAvtkDAL/ZAwCohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAIYABACHNAMAv6AzANJ1AIDWdQCA2nUAgN51AIDidQCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAO+oAwDmdQCA6nUAgO51AICEHAIA8nUAgPZ1AID6dQCAviwFAP51AIACdgCABnYAgONAAwAKdgCA4SgAAA52AICjXQIAEnYAgBZ2AIAadgCAHnYAgKYVAgCldQIAInYAgKs5AgCqMQIAJnYAgCp2AICv1QIArtUCAK0VAgCsFQIA4ygBAOEADwDhCA4A4wgOAID9AACBCQAAgjkAAC52AIAydgCAOnYAgD52AIBCdgCA7+gOAEZ2AIBKdgCA72QOALNtAQBOdgCAhugEAIcMBQBSdgCAtm0BALVtAQBWdgCAu+0AALrtAABadgCAXnYAgL/VAAC+6QAAveEAALzpAACoXQYAqWEGAKqlBgCrvQYArKUGAK2tBgCupQYArxkHADZ2AIBidgCAZnYAgGp2AIBudgCAcnYAgHZ2AIB6dgCAuHUHALl5BwC6DQcAuwUHALwdBwC9BQcAvgUHAL81BwCwaQcAsWkHALJ9BwCzdQcAtG0HALVRBwC2UQcAt1EHAKMtBgB+dgCAgnYAgIZ2AICKdgCApi0GAKUtBgCOdgCAq60HAKqtBwCSdgCAlnYAgK+VBwCuqQcAraEHAKypBwCADQAAgRUAAIIdAACadgCAnnYAgKJ2AICEVAMAvlwAAKZ2AICqdgCAhugAAIdMAwCudgCAsnYAgLZ2AIC6dgCAvnYAgOMEBADCdgCA4bQFAMZ2AIDKdgCAznYAgNJ2AIDWdgCA2nYAgN52AIDidgCA5nYAgO/sBADqdgCA7nYAgLPtBgDydgCA9nYAgPp2AID+dgCAtpEGALXhBgACdwCAu40GALqNBgAGdwCACncAgL9BAQC+WQEAvVEBALxZAQCoJQYAqS0GAKolBgCrOQYArCkGAK1RBgCuSQYAr0EGAIDNAACBCQAAghkAAA53AIASdwCAhCwBAL40AAAadwCAuP0BALlBAQC6QQEAu0EBALxBAQC9SQEAvnEBAL9xAQCwCQYAsQkGALLNAQCzxQEAtN0BALXFAQC2zQEAt8UBAIagPACHRAMAHncAgKOhBQAidwCApa0FAKbdBQAmdwCAKncAgL4oPACqwQUAq8EFAKwVAgCtHQIArhUCAK8NAgC2QQMALncAgDJ3AIC1sQIANncAgLOhAgA6dwCAPncAgL5FAwC/TQMAvHUDAL1NAwC6ZQMAu20DAEJ3AIBGdwCASncAgE53AIDGdQCAUncAgFZ3AIBadwCAXncAgGJ3AICoRQIAqVUCAKpdAgCrVQIArE0CAK21AwCusQMAr60DALDVAwCx3QMAstUDALPtAwC09QMAtf0DALb1AwC37QMAuNkDALnZAwC6rQMAu6UDALy9AwC9pQMAvqUDAL+VAwCj9QMAZncAgGp3AIBudwCAcncAgKYVAgCl5QMAdncAgKs5AgCqMQIAencAgH53AICvGQIArhECAK0ZAgCsIQIAgGkAAIFpAACCBQAAgncAgIp3AICOdwCAkncAgO8cAACEbAIA4ZQBAJZ3AIDjyAAAmncAgJ53AICGWDwAh1A9AKJ3AICmdwCAqncAgISEPQCudwCAsncAgLZ3AIDvuAEAvmw8AOF0BgC6dwCA42QBAL53AIDCdwCAxncAgMp3AICz0QEAzncAgNJ3AIDWdwCA2ncAgLaRAQC1+QEA3ncAgLu9AQC6vQEA4ncAgOZ3AIC/dQEAvnUBAL2FAQC8hQEAqL09AKkNPgCqGT4AqxE+AKwxPgCtUT4ArlE+AK9NPgCGdwCAgh0AAIEdAACAHQAA6ncAgO53AIDydwCA9ncAgLjVPgC53T4AutU+ALtJPwC8WT8AvVk/AL5JPwC/QT8AsDk+ALE5PgCyET4AsxE+ALTxPgC18T4AtvU+ALftPgCjkT4A+ncAgIYoAACHwAMA/ncAgKbRPgCluT4AAngAgKv9PgCq/T4ABngAgAp4AICvNT4ArjU+AK3FPgCsxT4ADngAgLOdPwASeACAFngAgLalPwAaeACAHngAgLWtPwC6aT8Au3U/ACJ4AIAmeACAvlk/AL9FPwC8bT8AvWU/ACp4AIAueACAMngAgDZ4AIDjYDwAOngAgOEAPQA+eACA7/w9AEJ4AIBGeACASngAgE54AIBSeACAVngAgFp4AICjGT4AghkAAIEZAACAcQAAXngAgKYhPgClKT4AYngAgKvxPgCq7T4AhCQBAL4kAQCvwT4Art0+AK3hPgCs6T4AqNE+AKnRPgCq0T4Aq+U+AKzhPgCt4T4Arhk+AK8ZPgCGAAAAh4QAAGp4AIBueACAcngAgHZ4AIB6eACAfngAgLh9PgC5AT4AugE+ALsBPgC8AT4AvQk+AL4xPgC/MT4AsGk+ALF1PgCyfT4As3U+ALRZPgC1RT4Atk0+ALdFPgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIJ4AICGeACAingAgL8k5gGOeACAkngAgJZ4AICaeACAuFUDALlZAwC6bQMAu2UDALx9AwC9ZQMAvm0DAL9lAwCwtQIAsb0CALKBAgCzgQIAtHEDALVxAwC2cQMAt3EDALMdAgCeeACAongAgKZ4AICEiAMAtlUCALU1AgAWdwCAu3kCALpxAgCqeACArngAgL+1AwC+tQMAvVUCALxVAgCyeACAo1kCALZ4AIC6eACAphECAL54AIDCeACApXECAKo1AgCrPQIAxngAgMp4AICu8QMAr/EDAKwRAgCtEQIAqKkCAKmpAgCquQIAq7kCAKypAgCtqQIArjkBAK85AQCAzQEAgQkAAIIZAADOeACA0ngAgL64BQDaeACA3ngAgLjpAQC56QEAuokBALuFAQC8nQEAvYEBAL6BAQC/tQEAsEkBALFVAQCyXQEAs1UBALRNAQC18QEAtvEBALfxAQDvFAAA4ngAgIaoBQCH3AUA5ngAgIRYBADqeACA78Q+AO54AIDhxD4A8ngAgOMwPgDjyAAA9ngAgOEoAQD6eACAtn0CAP54AIACeQCAtXUCAAZ5AICzZQIACnkAgA55AIC+3QEAv2EBALzdAQC91QEAutkBALvFAQASeQCAFnkAgKOxBQDWeACAGnkAgB55AIAieQCApqkFAKWhBQAmeQCAqxEGAKoNBgAqeQCALnkAgK+1BgCuCQYArQEGAKwJBgAyeQCANnkAgDp5AIA+eQCAgBkAAIEZAACCBQAAQnkAgL5sAwBGeQCAhsgAAIccAwBKeQCATnkAgFJ5AIBWeQCAqLkHAKm5BwCqDQcAqx0HAKwJBwCtNQcArjEHAK8pBwCEqAMAWnkAgF55AIBieQCAZnkAgGp5AIBueQCAcnkAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsF0HALEhBwCyIQcAsz0HALQpBwC1KQcAtgEHALcBBwCzhQYAdnkAgHp5AIB+eQCAgnkAgLa1BgC1gQYAhnkAgLvlBgC6mQYAinkAgI55AIC/7QYAvu0GAL3pBgC89QYAknkAgJZ5AICaeQCAnnkAgKJ5AICmeQCAqnkAgO+QBACueQCA4dwGALJ5AIDj7AUAgCkAAIEVAACCEQAAvnwBAKMFBgC6eQCAhigAAIdMAQC+eQCApjUGAKUBBgDCeQCAq2UGAKoZBgDGeQCAynkAgK9tBgCubQYArWkGAKx1BgDOeQCAs70BANJ5AIDWeQCAtnkBANp5AIDeeQCAtXkBALpVAQC7XQEA4nkAgOZ5AIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgCE7AwA6nkAgO55AIDyeQCA9nkAgPp5AID+eQCAAnoAgLhpAwC5aQMAugkDALsJAwC8GQMAvRkDAL4JAwC/CQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwAGegCACnoAgA56AICj9QIAEnoAgKUxAgCmMQIAFnoAgBp6AIAeegCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMAgGEAAIFhAACCBQAAInoAgIbwDACHYAMAvhAMACp6AIBmeACALnoAgDJ6AIA2egCAOnoAgD56AIBCegCARnoAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIASnoAgE56AIBSegCAVnoAgFp6AIBeegCAYnoAgGZ6AIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4RAGAIRIDADjDAYAanoAgISYDABuegCAcnoAgHZ6AIB6egCAfnoAgIJ6AICGegCAgXUAAIB1AADvIAEAgnUAAIp6AICOegCAknoAgL7ADACFtA4A4RACAO9cAADjABYA4ZABAJp6AIDjWAEA7zwHAJ56AICiegCAhgAIAIe4DACznQ0AJnoAgKZ6AICqegCArnoAgLbVDQC1tQ0AsnoAgLv5DQC68Q0AtnoAgLp6AIC/GQ4AvhEOAL3VDQC81Q0AvnoAgKPZDQDCegCAxnoAgKaRDQDKegCAznoAgKXxDQCqtQ0Aq70NANJ6AIDWegCArlUOAK9dDgCskQ0ArZENAKhdDgCpYQ4AqmEOAKthDgCsYQ4ArWEOAK5hDgCvYQ4A2noAgN56AIDiegCA5noAgOp6AIDuegCA8noAgPZ6AIC4TQ8AuVEPALpRDwC7UQ8AvHEPAL1xDwC+cQ8Av3EPALDBDwCxwQ8AssEPALPBDwC0wQ8AtcEPALbBDwC3wQ8As+kPAPp6AIC+gAEA/noAgJZ6AIC24Q8AtekPAAJ7AIC7BQ4AugUOAAp7AIAGewCAvwUOAL4FDgC9FQ4AvBUOAIFNAACAQQAA72gNAIJRAACG8AcAh9QBAA57AIASewCAFnsAgIRwAQAaewCAHnsAgOHgDgAiewCA40gNACZ7AICjaQ8AKnsAgC57AIAyewCANnsAgKZhDwClaQ8AOnsAgKuFDgCqhQ4APnsAgEJ7AICvhQ4AroUOAK2VDgCslQ4ARnsAgLMxDgBKewCATnsAgLbBAQBSewCAVnsAgLXRAQC6zQEAu6UBAFp7AIBeewCAvqUBAL+tAQC8sQEAvbEBAI/dJgCj8Q0AYnsAgGZ7AICmAQIAansAgG57AIClEQIAqg0CAKtlAgByewCAviAEAK5lAgCvbQIArHECAK1xAgCfoQwAnnkKAJ1pCgCc0QgAm7E2AJp1NgCZ0TQAmOEyAJdtMgCWZTIAlTU/AJRhPgCTcT4AkjU7AJFxOgCQeToAgJUAAIGdAACCoQAAensAgO9EAgDhdA8AfnsAgOMcDwDj1AEAgnsAgOHgAQDvXAEAo7UCAKJBAACh3Q4AoLkOALWpAwCGewCAhMAEALahAwCG8AUAh+QEALOFAwCKewCAvXEDALxpAwC/QQMAvnEDAI57AIC2eQCAu3EDALp5AwCC3ScAgwE7AL6EBwC+wAYAhhE/AIcZPwCEETsAhV06AIp9PgCLJTMAknsAgJZ7AICOuTUAjxU3AIw1MwCNgTMAkqE3AJPZCQC+xBkAmnsAgJaxDQCXUQ8AlHkLAJVhCwCaBQ8Am5EBAJ57AICiewCApnsAgN0AAACcfQMAqnsAgOFIDwCuewCA4xwOALJ7AIC2ewCAunsAgL57AIDCewCAsUEXALChFwCzqesBsgHoAbUB7AG0EesB74wOAMZ7AICpxR8AqAEcAKsBEACqkR8ArdkTAKzREwCv2RcArgUTAKHxAgDKewCAo8kHAKLBAgClARgApGUHAKehGwCm+RsAqCkFAKldBQCqVQUAq20FAKx5BQCteQUArm0FAK9hBQB2ewCAznsAgNJ7AIDWewCAgA0AAIGxAACCsQAA2nsAgLiJBQC5iQUAup0FALuVBQC8uQUAvbkFAL5RBgC/UQYAsOUFALHtBQCy5QUAs/0FALTtBQC13QUAttUFALe9BQCj3QUA3nsAgOJ7AICEDAAA5nsAgKb5BQCl8QUA6nsAgKspBQCqIQUAhpgAAIegAACvGQUArikFAK0pBQCsMQUA7nsAgLNhBgDyewCA9nsAgLYhBgD6ewCA/nsAgLUBBgC6rQcAu40HAAJ8AIAGfACAvo0HAL9xBwC8lQcAvY0HAL65BQC/uQUAvLkFAL25BQC6uQUAu7kFALi5BQC5uQUAtkkFALdJBQC0fQUAtXUFALJ5BQCzeQUAsBUFALF9BQCuXQUAr20FAKxFBQCtXQUAqqUKAKtdBQCovQoAqa0KAAp8AIAOfACAEnwAgBZ8AIAafACAHnwAgCJ8AIAmfACAqA0HAKkdBwCqLQcAq0kHAKxNBwCtZQcArrEGAK+xBgAqfACALnwAgDJ8AIA2fACAOnwAgD58AIBCfACARnwAgLhVBgC5XQYAulUGALtxBgC8NQYAvfEBAL7xAQC/8QEAsK0GALGNBgCyhQYAs50GALSNBgC1cQYAtnUGALdtBgCjpQQAgi0AAIEVAACAHQAASnwAgKblBAClxQQATnwAgKtJBQCqaQUAUnwAgFp8AICvtQUArkkFAK1JBQCsUQUAhmAcAIcIAwBefACAs4UCAGJ8AIC1gQIAtoECAGZ8AIBqfACAbnwAgLoJAwC7CQMAvBkDAL0ZAwC+CQMAvwkDAKxVAgCtXQIArmECAK9hAgCoDQIAqVUCAKpRAgCrUQIAhKwDAHJ8AIB2fACAenwAgIT8HQB+fACAgnwAgIZ8AIC8cQMAvXEDAL5xAwC/cQMAuHEDALlxAwC6cQMAu3EDALSRAwC1kQMAtpEDALeRAwCwkQMAsZEDALKRAwCzkQMAinwAgI58AICSfACAlnwAgJp8AIDhpAEAnnwAgOOAAQC+aBwAonwAgKZ8AIDv2AYAqnwAgK58AICyfACAtnwAgKOJAwCCLQAAgRUAAIAdAAC6fACApo0DAKWNAwC+fACAqwUCAKoFAgDCfACAynwAgK8FAgCuBQIArRUCAKwVAgCGIBwAh8QdAM58AIDSfACA1nwAgNp8AIDefACA72wGAOJ8AIDhbAcA5nwAgON0BwDqfACA7nwAgPJ8AID2fACAs5EBAPp8AID+fACAAn0AgAZ9AIC2sQEAtbkBAAp9AIC7VQEAukkBAA59AIASfQCAv/UAAL71AAC9RQEAvEUBAKNRHgDGfACAFn0AgBp9AIAefQCApnEeAKV5HgAifQCAq5UeAKqJHgAmfQCAKn0AgK81HwCuNR8ArYUeAKyFHgCAbQAAgRUAAIIdAADv/BkALn0AgDJ9AIA2fQCAOn0AgIbAAACHrAMAPn0AgEJ9AIBGfQCA4SwcAEp9AIDjzBwAqK0eAKnNHgCq2R4Aq9EeAKzxHgCt8R4Arj0eAK81HgCE7AAATn0AgFJ9AIBWfQCAWn0AgF59AIBifQCAZn0AgLjRHwC53R8Auu0fALvlHwC84R8AveEfAL7hHwC/4R8AsE0eALFRHgCyUR4As1EeALTxHwC18R8AtvEfALfxHwCobR4AqY0eAKqFHgCrnR4ArIUeAK2NHgCuuR4Ar7UeAGp9AIBufQCAcn0AgHZ9AIB6fQCAfn0AgIJ9AICGfQCAuJ0eALmtHgC6pR4Au0UBALxdAQC9RQEAvkUBAL91AQCw0R4AsdEeALLRHgCz0R4AtLUeALW9HgC2tR4At60eALMNHgCKfQCAjn0AgJJ9AICWfQCAtg0eALUNHgCafQCAuxUeALoVHgCefQCAon0AgL95HgC+cR4AvQUeALwFHgCCbQAAo0keAIBVAACBZQAApkkeAL6cAQCqfQCApUkeAKpRHgCrUR4Ah3wAAIZMAACuNR4Arz0eAKxBHgCtQR4AqF0CAKltAgCqZQIAq30CAKxpAgCtsQIArrECAK+xAgCE7AQArn0AgLJ9AIC2fQCAun0AgL59AIDCfQCAxn0AgLhxAwC5cQMAunEDALtxAwC81QMAvd0DAL7VAwC/zQMAsNECALHRAgCy0QIAs9ECALRRAwC1UQMAtlEDALdRAwCz7QIAyn0AgM59AIC+gAQA0n0AgLYxAgC14QIA1n0AgLsVAgC6FQIA2n0AgN59AIC/lQMAvpUDAL0FAgC8BQIA4n0AgKOpAgDmfQCA6n0AgKZ1AgDufQCA8n0AgKWlAgCqUQIAq1ECAPZ9AID6fQCArtEDAK/RAwCsQQIArUECAKjZAgCpIQEAqiEBAKshAQCsIQEArSEBAK4hAQCvIQEA/n0AgAJ+AIAGfgCAviAEAAp+AIAOfgCAEn4AgBp+AIC4jQEAuZEBALqRAQC7pQEAvL0BAL11AAC+fQAAv3UAALDlAQCx7QEAsvkBALPxAQC02QEAtdkBALa5AQC3tQEA4RgeAB5+AIDjKB8AIn4AgIGlAACApQAAJn4AgIKlAACGAAQAh/QFACp+AIAufgCAMn4AgDZ+AIDvYB4AOn4AgD5+AIBCfgCAhfD0AUZ+AIBKfgCA42QBAE5+AIDhpAEAUn4AgO/IAABWfgCAWn4AgFZ8AICE/AUAXn4AgGJ+AICzKQYAFn4AgGZ+AIBqfgCAbn4AgLYhBgC1KQYAcn4AgLupBgC6oQYAdn4AgHp+AIC/nQYAvp0GAL2lBgC8rQYA4bQHAH5+AIDjeAQAgn4AgIB9AACBEQAAghUAAIZ+AICGwAAAh1gDAIp+AICOfgCAkn4AgJZ+AIDvDAQAmn4AgKOpBgCefgCAon4AgKZ+AICqfgCApqEGAKWpBgCufgCAqykGAKohBgCyfgCAtn4AgK8dBgCuHQYArSUGAKwtBgC6fgCAs0kHAL5+AIDCfgCAtn0HAMZ+AIDKfgCAtXUHALpdBwC7JQcAzn4AgNJ+AIC+IQcAvy0HALw9BwC9MQcAqD0GAKmBBgCqhQYAq5UGAKy5BgCtuQYArqkGAK+pBgDWfgCA2n4AgN5+AIDifgCA5n4AgIK5AACBsQAAgLkAALitBgC5vQYAurUGALtFAQC8XQEAvUUBAL5FAQC/dQEAsN0GALGlBgCyrQYAs6EGALShBgC1rQYAtpkGALeVBgCjDQYA6n4AgO5+AIDyfgCAhJgCAKY5BgClMQYAvpwBAKthBgCqGQYAhggAAId8AQCvaQYArmUGAK11BgCseQYA+n4AgLO1AQD+fgCAAn8AgLZVAQAGfwCACn8AgLWhAQC6cQEAu3kBAA5/AIASfwCAvjEBAL89AQC8UQEAvVEBAKhpAgCpaQIAqnkCAKt5AgCsbQIArZECAK6RAgCvkQIAFn8AgBp/AIAefwCAIn8AgCZ/AIAqfwCALn8AgDJ/AIC4mQIAua0CALqlAgC7bQMAvHUDAL19AwC+dQMAv20DALDxAgCx+QIAssECALPBAgC0sQIAtb0CALa1AgC3qQIANn8AgDp/AIA+fwCAo/0CAEJ/AICl6QIAph0CAEZ/AIBKfwCATn8AgKo5AgCrMQIArBkCAK0ZAgCueQIAr3UCAFJ/AIBWfwCAWn8AgIQADACAGQAAgQkAAII5AABefwCAYn8AgGp/AIBufwCAvuAMAHJ/AIB2fwCAhlgNAIcMAwCowQIAqc0CAKrFAgCr2QIArMkCAK39AgCu9QIArz0BAHp/AIB+fwCAgn8AgIZ/AICKfwCAjn8AgJJ/AIC+MAwAuMUBALnNAQC62QEAu9EBALzxAQC98QEAvpkBAL+ZAQCwRQEAsU0BALJFAQCzXQEAtEUBALVNAQC2RQEAt/0BAOE4BgCWfwCA42wGAJp/AICefwCAon8AgKZ/AICqfwCAhKgNAK5/AICyfwCAtn8AgL6wDwC6fwCA72wGAL5/AIDCfwCApn0AgMZ/AIDKfwCA41AAAM5/AIDhoAEA0n8AgO+EAADafwCAhyANAIZMDwCAPQAAgSEAAIIlAADefwCAs80NAGZ/AIDWfwCA4n8AgOZ/AIC2/Q0AtcENAOp/AIC7CQ4AugEOAO5/AIDyfwCAvwkOAL4BDgC9CQ4AvBEOAPZ/AIDjmAwA+n8AgOH8DwD+fwCAAoAAgAaAAIAKgACADoAAgBKAAIAWgACAGoAAgB6AAIDvYAwAIoAAgCaAAICjTQ0AKoAAgC6AAIAygACANoAAgKZ9DQClQQ0AOoAAgKuJDgCqgQ4APoAAgEKAAICviQ4AroEOAK2JDgCskQ4Agm0AALM1DgCAVQAAgWUAALb1DwCE3AMARoAAgLX9DwC60Q8Au9EPAIYABACH3AAAvn0PAL9lDwC8wQ8AvXkPAKjlDwCp7Q8AqvkPAKv5DwCsMQ4ArTEOAK4xDgCvMQ4ASoAAgE6AAIBSgACAVoAAgFqAAIBegACAYoAAgGaAAIC43Q4AueEOALrhDgC74Q4AvOUOAL3pDgC+mQ4Av5UOALBRDgCxUQ4AslEOALPpDgC0/Q4AteUOALbtDgC35Q4Ao3EPAGqAAIBugACAcoAAgHaAAICmsQ4ApbkOAHqAAICrlQ4AqpUOAH6AAICCgACAryEOAK45DgCtPQ4ArIUOAIaAAICzyQEAioAAgI6AAIC2+QEAkoAAgJaAAIC1wQEAuqkBALu1AQCagACAnoAAgL6tAQC/lQEAvK0BAL2lAQCo5Q0AqfkNAKoFAgCrHQIArA0CAK09AgCuNQIAr10CAKKAAICmgACAqoAAgK6AAICAGQAAgRkAAIIFAACygACAuC0CALk1AgC6MQIAuzECALzVAgC93QIAvtUCAL/NAgCwKQIAsTUCALI9AgCzNQIAtC0CALUVAgC2HQIAtxUCALqAAICEnAIAvoAAgKOBAgDCgACApYkCAKaxAgDGgACAhiAEAIfUAwCq4QIAq/0CAKzlAgCt7QIAruUCAK/dAgC29QMAvkQDAIWM/QG1/QMAyoAAgLP9AwDOgACA0oAAgL59AwC/TQMAvGUDAL19AwC6dQMAu30DANaAAIDagACA3oAAgOKAAICEBAIAoyUCAOaAAIClJQIApi0CAOqAAIDugACA8oAAgKqtAgCrpQIArL0CAK2lAgCupQIAr5UCAPaAAID6gACA/oAAgAKBAIAGgQCA48ADAAqBAIDhrAEADoEAgO9YAwASgQCAFoEAgIANAACB5QAAgu0AABqBAIDhYA8A40ABAOM4DgDheA4AHoEAgCKBAIC+lAUAKoEAgIYABACHZAUALoEAgDKBAIA2gQCA7/wOAO98DgA6gQCAs1EBAD6BAID2fgCAQoEAgEaBAIC2DQEAtQkBAEqBAIC74QAAuhkBAE6BAIBSgQCAv9EAAL7pAAC96QAAvPkAALaAAIAmgQCAVoEAgFqBAIBegQCAYoEAgGaBAIBqgQCAqKEGAKmtBgCquQYAq7EGAKzhBgCt7QYAruUGAK/FBgCwvQYAsUUHALJNBwCzXQcAtE0HALV1BwC2fQcAtx0HALglBwC5LQcAuiUHALs9BwC8KQcAvRUHAL4RBwC/EQcAoxEGAG6BAIBygQCAdoEAgHqBAICmTQYApUkGAH6BAICroQcAqlkGAIKBAICGgQCAr5EHAK6pBwCtqQcArLkHAIANAACBFQAAgh0AAIqBAICOgQCAkoEAgISUAwC+lAMAloEAgJqBAICGyAAAh4wAAJ6BAICigQCApoEAgKqBAIConQYAqa0GAKqlBgCrvQYArK0GAK3RBgCu1QYAr80GAK6BAICygQCAtoEAgLqBAIC+gQCAwoEAgMaBAIDKgQCAuF0BALnBAQC6wQEAu8EBALzBAQC9yQEAvvEBAL/xAQCwvQYAsY0GALKFBgCzZQEAtH0BALVlAQC2bQEAt2UBALMtBgDOgQCA0oEAgNaBAIDagQCAtlEGALUlBgDegQCAu0kGALp5BgDigQCA5oEAgL+hAQC+uQEAvbEBALxRBgDqgQCAo2kGAO6BAIDygQCAphUGAPaBAID6gQCApWEGAKo9BgCrDQYA/oEAgAKCAICu/QEAr+UBAKwVBgCt9QEAutUHALvdBwC4wQcAucEHAL4xBAC/MQQAvPEHAL3xBwCyrQcAs7UHALCtBwCxpQcAtp0HALf1BwC0pQcAtZUHAKppBwCraQcAqGkHAKlpBwCuaQcAr2kHAKxpBwCtaQcAgLkDAIGNAwCChQMAhKgDAIZQ/AGHCAMAvjQDAAqCAICoZQIAqXUCAKp9AgCrdQIArG0CAK21AwCuvQMAr7UDAA6CAIASggCAFoIAgBqCAIAeggCAIoIAgCaCAIAqggCAuFEDALlZAwC6YQMAu2EDALwRAwC9HQMAvhUDAL8JAwCwzQMAsdUDALLdAwCz1QMAtM0DALVxAwC2cQMAt3EDAC6CAIAyggCAs/0DADaCAIC17QMAOoIAgD6CAIC2PQIAQoIAgEaCAIC7GQIAugECAL0JAgC8AQIAv70CAL4BAgBKggCAToIAgITE/QG+wPwBUoIAgFaCAIBaggCA79wDAF6CAIDhlAEAYoIAgOMQAwBmggCAgu0AAIHtAACA7QAA4TgGAOE8BwDjQAEA45QGAGqCAIBuggCAcoIAgHqCAICGgPwBh+j9AX6CAICCggCAhoIAgIqCAIDvnAEA79wGAKM1AwCOggCAkoIAgJaCAICaggCApvUCAKUlAwCeggCAq9ECAKrJAgCiggCApoIAgK91AgCuyQIArcECAKzJAgB2ggCAqoIAgK6CAICyggCA76T9AbaCAIC6ggCAvoIAgON4/QHCggCA4UD8AcaCAIDKggCAzoIAgNKCAIDWggCAs+X+AYItAACBFQAAgB0AANqCAIC25f4BtfX+Ad6CAIC7Yf8Butn+AeKCAICE5AMAv2n/Ab5h/wG9df8BvHn/Aaj9/gGpJf4Bqi3+Aasl/gGsPf4BrSX+Aa4t/gGvJf4BviwAAOaCAICGiAAAh+wAAOqCAIDuggCA8oIAgPaCAIC4gf8BuYH/AbqZ/wG7mf8BvIn/Ab21/wG+sf8Bv63/AbBd/gGx5f8Bsu3/AbPh/wG05f8Bte3/AbbZ/wG32f8Bo6X/AfqCAID+ggCAAoMAgAaDAICmpf8BpbX/AQqDAICrIf4Bqpn/AQ6DAIASgwCAryn+Aa4h/gGtNf4BrDn+ARaDAICz6f4BGoMAgB6DAIC2lf4BIoMAgCaDAIC16f4BurH+Abu5/gEqgwCALoMAgL51AQC/fQEAvJH+Ab2R/gGoHf4BqS3+Aaol/gGrPf4BrCX+Aa1R/gGuUf4Br1H+ATKDAIA2gwCAOoMAgD6DAIBCgwCARoMAgEqDAIBOgwCAuNkBALnZAQC67QEAu+EBALzhAQC94QEAvuEBAL/hAQCwMf4BsTn+AbIB/gGzAf4BtPUBALX9AQC29QEAt+kBAKOt/QFSgwCAvkwDAFqDAIBegwCAptH9AaWt/QFigwCAq/39Aar1/QFmgwCAaoMAgK85AgCuMQIArdX9AazV/QGA+QMAgfkDAIJNAACFdCAAboMAgITYAwCE1AQAcoMAgIZABACHVAMAdoMAgHqDAIB+gwCAgoMAgIaDAIC+8AUAqDECAKkxAgCqMQIAqzECAKyVAwCtnQMArpUDAK+NAwCKgwCAjoMAgJKDAICWgwCAhHwHAJqDAICegwCAooMAgLipAwC5qQMAumkDALtpAwC8eQMAvXkDAL5pAwC/aQMAsP0DALHNAwCyxQMAs60DALS5AwC1uQMAtq0DALelAwCmgwCAqoMAgK6DAICygwCAtoMAgLqDAIDv6AMAvoMAgOGQAQDCgwCA42wDAMqDAICAJQAAgSkAAIIdAADOgwCAs/kDANKDAICGaAcAh1wFANaDAIC2XQIAtV0CANqDAIC7SQIAunkCAN6DAIDigwCAvz0CAL49AgC9OQIAvFECAOaDAIDhPP4BvkAGAOPwAQDqgwCA7oMAgPKDAID2gwCA+oMAgP6DAIAChACABoIAgAaEAIAKhACADoQAgO/kAQAShACAFoQAgKNxAwAahACApdUCAB6EAIAihACAptUCACaEAIAqhACAq8ECAKrxAgCtsQIArNkCAK+1AgCutQIA4dz8AcaDAIDjUAQA74gEAID1BwCBCQAAgj0AAC6EAICEJAEAMoQAgDaEAIA6hACAPoQAgOFMBADv5BwA43QEALNdBgBChACAhgAMAIfgAwBGhACAtgUGALV1BgBKhACAuxEGALoJBgBOhACAUoQAgL/VBgC+1QYAvQEGALwJBgCojQYAqZUGAKqVBgCrpQYArL0GAK3FBgCuxQYAr/UGAFaEAIBahACAXoQAgGKEAIBmhACAaoQAgG6EAIByhACAuHUGALl9BgC6dQYAu80HALzVBwC93QcAvtUHAL/NBwCwjQYAsZUGALKdBgCzlQYAtFEGALVRBgC2UQYAt1EGAKMdBwCPFewBdoQAgHqEAIB+hACApkUHAKU1BwCChACAq1EHAKpJBwCGhACAioQAgK+VBwCulQcArUEHAKxJBwCeRfkBn6X5AZyR/QGdTfkBmlX9AZtd/QGYBfEBmZX+AZal8gGXYfEBlG31AZU19QGS4ekBk4X2AZBV7AGRXekBsbEdALClHQCziRkAskEcALUBJAC09RkAjoQAgJKEAICWhACAgqkDAIGhAwCAaQAAohUFAKMFAgCgFQYAob0FAKHFAQCahACAo80NAKLlAQClAQgApN0NAKfRCQCm2QkAqQEUAKilCACrxRQAqs0VAK3REQCsARAArwEcAK51EQCCEe8BgynvAZ6EAICihACAhuH1AYcR9gGEOeoBhY3qAYp59gGL4fEBvqQMAKqEAICO+f0BjzH+AYw98gGNYfIBkkn+AZOd/gGHCAwAhmwMAJax+gGX+QUAlFn6AZVZ+gGaYQYAm8EGAK6EAICyhACAtoQAgLqEAICcyQEAvoQAgKitBQCpuQUAqs0FAKvdBQCszQUArf0FAK71BQCvHQUAwoQAgMaEAIDKhACAzoQAgNKEAIDWhACA2oQAgN6EAIC4dQUAuX0FALoJBQC7CQUAvB0FAL0BBQC+AQUAvz0FALBxBQCxcQUAsnEFALNxBQC0UQUAtVEFALZRBQC3TQUAs0UEAOKEAIDmhACA6oQAgO6EAIC2fQQAtUUEAPKEAIC7tQQAurUEAPaEAID6hACAv5UEAL6VBAC9pQQAvKUEAP6EAICjAQQAAoUAgAaFAICmOQQACoUAgA6FAIClAQQAqvEEAKvxBAAShQCAhOwNAK7RBACv0QQArOEEAK3hBADh0AYAhAwMAOMoBwC+AAwAGoUAgO9EAwCGuAwAhywNAB6FAIDjlAEAIoUAgOH8AQBWgwCAJoUAgO/IBgAqhQCALoUAgDKFAICzjQMANoUAgLWNAwA6hQCAPoUAgLa1AwBChQCARoUAgLtBAwC6SQMAvUEDALxZAwC/QQMAvkkDAKNFDACmhACAFoUAgEqFAIBOhQCApn0MAKVFDABShQCAq4kMAKqBDABWhQCAWoUAgK+JDACugQwArYkMAKyRDACAFQ8AgR0PAIIhDwCzIQ4AXoUAgLUhDgC2JQ4AYoUAgGaFAIBqhQCAusEOALvBDgC8wQ4AvcEOAL7BDgC/wQ4AqK0OAKntDgCq5Q4Aq/0OAKzlDgCt6Q4ArjkOAK85DgBuhQCAcoUAgHaFAIB6hQCAgB0AAIEJAACCvQEAfoUAgLjNDwC51Q8AutUPALvlDwC8/Q8AvZUPAL6RDwC/kQ8AsEkOALFJDgCyWQ4As1kOALRJDgC1SQ4Atv0PALf1DwCjbQ8AgoUAgL6EAQCKhQCAjoUAgKZpDwClbQ8AkoUAgKuNDwCqjQ8AhogAAIdsAQCvjQ8Aro0PAK2NDwCsjQ8AloUAgLPtDgCahQCAnoUAgLaRDgCihQCApoUAgLXhDgC6tQ4Au70OAKqFAICuhQCAvn0BAL9lAQC8mQ4AvZkOAKgRDgCpJQ4AqiEOAKs5DgCsLQ4ArVUOAK5dDgCvUQ4AhKgAALKFAIC2hQCAuoUAgL6FAIDChQCAxoUAgMqFAIC47QEAuZUBALqVAQC7rQEAvLUBAL11AQC+fQEAv3UBALA1DgCxPQ4AsgkOALMJDgC0/QEAteUBALblAQC31QEAo6kNAM6FAIDShQCA1oUAgNqFAICm1Q0ApaUNAN6FAICr+Q0AqvENAOKFAIDmhQCAryECAK45AgCt3Q0ArN0NAIANAACBFQAAgh0AAOqFAIDuhQCA8oUAgIeQAwCGfAQAvuwEAPqFAID+hQCAAoYAgAaGAIAKhgCADoYAgBKGAICyLQ4AszUOALAtDgCxJQ4Ati0OALedDwC0LQ4AtSUOALq9DwC7jQ8AuKUPALm9DwC+LQ8AvxUPALyVDwC9JQ8AFoYAgBqGAIAehgCAIoYAgCaGAIAqhgCALoYAgDKGAICqpQ4Aq7UOAKjFDgCp3Q4Arp0OAK9VDgCspQ4ArZUOAKgNAgCpFQIAqhUCAKtNAgCsWQIArVkCAK5NAgCvRQIAhKgFADaGAIA6hgCAPoYAgIS4BABChgCARoYAgEqGAIC4/QIAuUEBALpBAQC7QQEAvEEBAL1JAQC+cQEAv3EBALAJAgCxCQIAss0CALPFAgC03QIAtcUCALbNAgC3xQIA4dQPAOMQDgDj9A4A4QwOAE6GAIBShgCAVoYAgFqGAIBehgCAYoYAgL4kBABqhgCA7AAAAO9EAADvzA4AboYAgIJlAACz2QIAgFUAAIFtAAC2nQIAcoYAgHaGAIC1lQIAuokCALuJAgCGqAQAh+AEAL5dAgC/RQIAvF0CAL1VAgCjHQUA9oUAgGaGAIB6hgCAfoYAgKZZBQClUQUAgoYAgKtNBQCqTQUAhoYAgIqGAICvgQUArpkFAK2RBQCsmQUAjoYAgLMpBgCShgCAloYAgLYpBgCahgCAnoYAgLUpBgC6pQYAu60GAKKGAICmhgCAvqUGAL+tBgC8tQYAva0GAKjlBgCp7QYAquUGAKv9BgCs5QYAre0GAK7lBgCvXQYAqoYAgK6GAICyhgCAtoYAgLqGAIC+hgCAwoYAgMaGAIC46QcAuekHALr9BwC79QcAvO0HAL1FBwC+TQcAv0UHALAlBgCxLQYAsiUGALM9BgC0JQYAtS0GALYlBgC32QcAo20HAIItAACBFQAAgB0AAMqGAICmbQcApW0HAM6GAICr6QcAquEHANKGAIC+oAEAr+kHAK7hBwCt6QcArPEHANaGAICzkQYAhugAAIcsAQC2QQEA2oYAgN6GAIC1UQEAuk0BALslAQDihgCA5oYAgL4lAQC/LQEAvDEBAL0xAQCwrQEAscUBALLBAQCzwQEAtMUBALXNAQC28QEAt/EBALgBAQC5AQEAugEBALsBAQC8AQEAvQEBAL4BAQC/AQEA6oYAgO6GAIDyhgCA9oYAgIaFAID6hgCA/oYAgAKHAICoTQYAqVkGAKo9BgCrNQYArP0BAK3lAQCu5QEAr9UBAKPVBQAGhwCACocAgA6HAIAShwCApgUCAKUVAgAWhwCAq2ECAKoJAgAahwCAHocAgK9pAgCuYQIArXUCAKx1AgAihwCAJocAgCqHAIAuhwCAMocAgOFkBQA2hwCA4+wFAIARAACBEQAAghEAAO/0BgA6hwCAPocAgEKHAIC+MAMAhMQCAEqHAICz4QMAhMAcALVRAwBOhwCAUocAgLZZAwBWhwCAWocAgLtxAwC6eQMAvbUAALxpAwC/tQAAvrUAAF6HAIDhlAEAYocAgONcAgCGcBwAh0QDAGaHAIBqhwCAbocAgHKHAIB2hwCAeocAgH6HAICChwCAhocAgO94AgCoVQIAqV0CAKphAgCrYQIArNECAK3RAgCu0QIAr9ECAIqHAICOhwCAkocAgJaHAICahwCAnocAgKKHAICmhwCAuGkBALlpAQC6CQEAuwkBALwZAQC9GQEAvgkBAL8FAQCwtQIAsb0CALK1AgCzaQEAtHkBALV5AQC2aQEAt2EBAOHEBwDjpAYA47gGAOF8BgCADQAAgTUAAII9AACqhwCArocAgLKHAIC+4B0AuocAgL6HAIDvYAAA7+gGAMKHAICjqQIAxocAgMqHAIDOhwCA0ocAgKYRAgClGQIA1ocAgKs5AgCqMQIAhkgcAIfMHACv/QEArv0BAK39AQCsIQIAqIUeAKmRHgCqkR4Aq60eAKy1HgCt1R4ArtEeAK/FHgC2hwCA2ocAgN6HAIDihwCA5ocAgOqHAIDuhwCA8ocAgLhhHwC5YR8AumEfALthHwC8YR8AvWEfAL5hHwC/YR8AsL0eALGFHgCyjR4As4UeALSdHgC1hR4Ato0eALeFHgCzGR4A9ocAgPqHAID+hwCAAogAgLZVHgC1PR4ABogAgLtBHgC6eR4ACogAgA6IAIC/QR4AvlkeAL1RHgC8WR4AEogAgKNdHgAWiACAGogAgKYRHgAeiACAIogAgKV5HgCqPR4AqwUeAISkAwC+qAMArh0eAK8FHgCsHR4ArRUeAKitHgCptR4AqrUeAKvJHgCs2R4ArdkeAK7JHgCvwR4AgO0BAIHxAQCC8QEAJogAgIaQAACHdAEAKogAgC6IAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALBFAQCxTQEAskUBALNdAQC0RQEAtU0BALZFAQC3+QEAsz0eADKIAIA2iACAOogAgD6IAIC2WR4AtVEeAEKIAIC7iQEAuoEBAEaIAIBKiACAv4kBAL6BAQC9iQEAvJEBAE6IAIBSiACAo3UeAFaIAIClGR4AWogAgF6IAICmER4ARocAgGKIAICrwQEAqskBAK3BAQCs2QEAr8EBAK7JAQBmiACAaogAgG6IAIByiACAdogAgIQYAgB6iACAfogAgIKIAICGiACAiogAgI6IAICSiACAmogAgJ6IAIC+cAMAgGkAAIFpAACCeQAAhAAEAIbwBACHdAMAoogAgO8MHwCmiACA4aweAKqIAIDj8B4ArogAgLKIAIC2iACAuogAgL6IAIDCiACAxogAgMqIAIDvVAIAzogAgNKIAIDWiACA46QCANqIAIDhgAEA3ogAgOKIAIDmiACA6ogAgO6IAICzRQMA8ogAgPaIAID6iACA/ogAgLZFAwC1VQMAAokAgLshAwC6SQMAvqAEAAqJAIC/KQMAviEDAL01AwC8OQMAqDkCAKk5AgCqjQIAq4UCAKydAgCthQIAroUCAK+1AgCA7QEAgfUBAIL1AQAOiQCAhpAEAIcEBQASiQCAFokAgLhFAQC5TQEAukUBALtdAQC8SQEAvUkBAL55AQC/eQEAsM0CALGlAgCyrQIAs6ECALSlAgC1rQIAtp0CALd9AQAaiQCAHokAgCKJAIAmiQCAKokAgC6JAIAyiQCA74gBAITsBADhVB4ANokAgONUAQA6iQCAPokAgEKJAIBGiQCAo0UCAEqJAIBOiQCAUokAgFaJAICmRQIApVUCAFqJAICrIQIAqkkCAF6JAIBiiQCArykCAK4hAgCtNQIArDkCAKg1BgCpPQYAqlEGAKttBgCseQYArWUGAK5tBgCvZQYABokAgGaJAIBqiQCAbokAgIAZAACBGQAAggUAAHKJAIC45QYAuekGALr5BgC7+QYAvOkGAL3pBgC+nQYAv5UGALAdBgCx5QYAsu0GALPlBgC0/QYAteEGALbhBgC34QYAs9kGAL7QAwB2iQCAeokAgH6JAIC25QYAtfEGAIKJAIC7IQYAutkGAIaYAACHeAMAvyUGAL45BgC9MQYAvDkGAIaJAICjnQYAiokAgI6JAICmoQYAkokAgJaJAICltQYAqp0GAKtlBgCaiQCAnokAgK59BgCvYQYArH0GAK11BgCo7QcAqSkGAKoxBgCrMQYArJEGAK2RBgCukQYAr5EGAKKJAICmiQCAqokAgK6JAICyiQCAtokAgLqJAIC+iQCAuIUGALmNBgC6hQYAu50GALyNBgC9vQYAvrUGAL95AQCw8QYAsfEGALLxBgCzxQYAtMEGALXBBgC2wQYAt8EGALO5BgDCiQCAxokAgMqJAIDOiQCAthEGALUZBgDSiQCAuzUGALo1BgDWiQCA2okAgL8FBgC+BQYAvREGALwlBgClQQYA3okAgOKJAICmSQYAgRUAAIB5AACj4QYAghUAAK1JBgCsfQYAr10GAK5dBgCENAEAlogAgKttBgCqbQYAvswDAOqJAICzlQIA7okAgLXZAgDyiQCA9okAgLbRAgCGgAwAhzgDALvFAgC6xQIAvRUDALwVAwC/FQMAvhUDAPqJAID+iQCA71gGAIRAAwACigCABooAgAqKAIAOigCAEooAgBaKAIAaigCAHooAgOE4BgAiigCA4yQGAL5wDACsSQIArUkCAK5dAgCvVQIAqB0CAKkFAgCqBQIAq10CAISoDAAmigCAKooAgC6KAIC+vA0AMooAgDaKAIA6igCAvE0DAL1VAwC+VQMAv2UDALjpAwC56QMAul0DALtVAwC0yQMAtckDALbZAwC32QMAsBkCALEZAgCy2QMAs9kDAD6KAIDj5AAAQooAgOG8AQBGigCAgj0AAIE9AACAPQAASooAgE6KAIBSigCAWooAgF6KAIDvzAMAYooAgGaKAICj3QMAaooAgIboDACHYA0AbooAgKaZAwClkQMAcooAgKuNAwCqjQMAdooAgHqKAICvXQIArl0CAK1dAgCsXQIAfooAgIKKAICGigCAiooAgI6KAICSigCAlooAgO/gAQCEvAwA4YwGAJqKAIDjHAYAnooAgKKKAICmigCAqooAgLPVAQCuigCAsooAgLaKAIC6igCAtpEBALWZAQC+igCAu70BALq9AQDCigCAyooAgL+dAQC+nQEAvZ0BALydAQCoBQ4AqQkOAKodDgCrFQ4ArFEOAK1RDgCuSQ4Ar0kOAFaKAICCzQ8AgfUPAID9DwDGigCAzooAgIYcAACHsAMAuOkOALnpDgC6/Q4Au/UOALztDgC9VQ8AvlEPAL9NDwCwOQ4AsTkOALIJDgCzCQ4AtBkOALUZDgC2DQ4At9kOAKOVDgDSigCA1ooAgNqKAIDeigCAptEOAKXZDgDiigCAq/0OAKr9DgDmigCA6ooAgK/dDgCu3Q4Ard0OAKzdDgDuigCAs/0PAPKKAID2igCAtoEPAPqKAID+igCAtZkPALqNDwC7ZQ8AAosAgAaLAIC+fQ8Av2UPALx9DwC9dQ8AqC0OAKk1DgCqMQ4AqzEOAKxVDgCtRQ4ArkUOAK91DgAKiwCADosAgBKLAIAWiwCAGosAgB6LAIAiiwCAJosAgLjpDgC59Q4Auv0OALv1DgC87Q4AvZEOAL6RDgC/kQ4AsA0OALHlDgCy7Q4As+UOALT9DgC15Q4Atu0OALflDgCjuQ4Agi0AAIEVAACAHQAAKosAgKbFDgCl3Q4ALosAgKshDgCqyQ4AMosAgL4sAQCvIQ4ArjkOAK0xDgCsOQ4AOosAgLZVAQC1RQEANosAgLNVAQA+iwCAhngAAIdcAAC/OQEAvjEBAL0lAQC8JQEAuzEBALpZAQDmiQCAQosAgEaLAIBKiwCAhAQDAKOJAgBOiwCApZkCAKaJAgBSiwCAvyg5AFaLAICqhQIAq+0CAKz5AgCt+QIAru0CAK/lAgDjWAIA78AOAOGIAQBaiwCAXosAgGKLAIBmiwCAaosAgG6LAIByiwCAdosAgHqLAIDvKAIA4ygOAH6LAIDhRA4AqbUCAKhpDQCrAQIAqgkCAK0BAgCsGQIArzECAK4BAgC+AAQAgosAgIaLAICKiwCAjosAgJKLAICWiwCAmosAgLnlAwC45QMAu+UDALrlAwC95QMAvOUDAL/lAwC+5QMAsSECALBJAgCzJQIAsiUCALUpAgC0IQIAtxUCALYVAgCowQIAqdECAKr1AgCrDQEArBUBAK0FAQCuBQEArzkBAJ6LAICiiwCAqosAgK6LAICyiwCAtosAgLqLAIC+iwCAuC0BALk9AQC67QEAu+UBALz9AQC95QEAvu0BAL/lAQCwLQEAsTUBALI9AQCzNQEAtC0BALUVAQC2HQEAtxUBAIA9AQCBpQAAgq0AAO/YAACGsAUAh9gFAMKLAIDv1A8AhGwEAOH0DgDGiwCA4xwPAMqLAIDhlAEAzosAgOMMDgCzPQIA0osAgNaLAIDaiwCA3osAgLbFAQC13QEA4osAgLuxAQC6qQEA5osAgOqLAIC/kQEAvqkBAL2hAQC8qQEAposAgO6LAICqRQYAq10GAKxFBgCtTQYArkUGAK99BgDyiwCA9osAgPqLAICj0QUA/osAgKUxBgCmKQYAAowAgAaMAICCHQAAgR0AAIAdAAAKjACADowAgBKMAIC+lAMAFowAgBqMAICGSAMAh8wDAB6MAIAijACAJowAgCqMAICoqQcAqakHAKq5BwCruQcArKkHAK2pBwCuAQcArzUHAC6MAIAyjACANowAgDqMAIA+jACAQowAgEaMAIBKjACAuC0HALnBAAC66QAAu+kAALz5AAC95QAAvuUAAL+dAACwUQcAsV0HALItBwCzJQcAtD0HALUlBwC2JQcAtxUHALMxBgBOjACAUowAgFaMAIBajACAtikGALUhBgBejACAu5kGALqVBgBijACAZowAgL/hBgC++QYAvfEGALz5BgBqjACAo3UGAG6MAIByjACApm0GAHaMAIB6jACApWUGAKrRBgCr3QYAfowAgIKMAICuvQYAr6UGAKy9BgCttQYAqOUBAKn1AQCq/QEAq/UBAKztAQCtNQEArj0BAK81AQCA+QAAgc0AAILFAACEYAEAvngBAIqMAICHrAAAhpABALjRAAC52QAAuuEAALvhAAC8kQAAvZ0AAL6VAAC/iQAAsE0BALFVAQCyXQEAs1UBALRNAQC18QAAtvEAALfxAACzdQIAjowAgJKMAICWjACAmowAgLa1AgC1ZQIAnowAgLuRAgC6iQIAoowAgKaMAIC/NQMAvokCAL2BAgC8iQIAqowAgKMxAgCujACAhMADAKbxAgCyjACAtowAgKUhAgCqzQIAq9UCALqMAIC+jACArs0CAK9xAwCszQIArcUCAKuNAACqjQAAqY0AAKg5AwCvvQAArr0AAK2FAACsjQAAqgAAAKsAAADCjACAxowAgMqMAIDOjACA0owAgNaMAIC7fQAAun0AALl9AAC4fQAAv90BAL7dAQC93QEAvN0BALO5AACysQAAsaEAALCtAAC3XQAAtl0AALWVAAC0lQAA2owAgN6MAIDijACA5owAgIE1AACADQAA6owAgII1AAC+rD0A7owAgPKMAICFaD0A+owAgP6MAICGODwAh8ACALNJAQACjQCA0AAAAAaNAIAKjQCAtkkBALVJAQAOjQCAuykBALolAQASjQCAFo0AgL8dAQC+HQEAvSEBALwpAQDjNDYA4QwGAOGwAgDjPAYAGo0AgB6NAIAijQCAJo0AgIQsPwC+oD8AKo0AgC6NAIDvfDcAMo0AgDaNAIDvGAEAOo0AgD6NAICGaD4Ah8w/AEKNAIBGjQCASo0AgO+UAABOjQCA4ZQBAFKNAIDjUAAAVo0AgILpPwCB6T8AgPE/AKMJPgCPASQA9owAgFqNAIBejQCApgk+AKUJPgBijQCAq2k+AKplPgBmjQCAao0AgK9dPgCuXT4ArWE+AKxpPgCeYTgAn3U4AJzBNACdtTkAmqU1AJt1NACYeTAAmXExAJYhLQCXhTEAlG0sAJVlLACSeSgAk6UtAJBRJACReSgAsQ0UALAFFACzARgAslUUALV5GAC0tRgAbo0AgHKNAIB2jQCAeo0AgH6NAICCjQCAotE8AKMlAQCgdTkAob08AKHJAACGjQCAowEEAKLlAAClHQQApPUEAKf5CACmAQgAqQEMAKhtCACrzQwAqs0MAK3REACsARAAr9URAK7ZEACCBSUAgy0lAIqNAICOjQCAhsEsAIcRLQCEHSkAhRUpAIopLQCLZSwAko0AgJaNAICOHTAAj8E0AIzZMACNHTEAkmE1AJPNNQCajQCAno0AgJZhOQCXmTgAlKE4AJV9OQCaYT0AmwU9AKKNAICmjQCAqo0AgK6NAICc6QAAso0AgLaNAIC6jQCAvo0AgMKNAICGjACAxo0AgMqNAIDOjQCAqJE+AKmRPgCq7T4Aq+E+AKzhPgCt6T4ArtE+AK/RPgCwUT4AsVE+ALJRPgCzUT4AtHk+ALV5PgC2bT4At2U+ALghPgC5IT4Aujk+ALs5PgC8KT4AvRU+AL4RPgC/DT4AgJkDAIGZAwCCBQAA0o0AgL5UAwDhsD0A2o0AgONAPgCEOAIA3o0AgOKNAIDv9D8A5o0AgOqNAICGmAQAhxwDALMFPQCECAQA7o0AgPKNAID2jQCAtgk9ALUJPQD6jQCAu/U9ALr1PQD+jQCAAo4AgL/dPQC+3T0AveU9ALzlPQAGjgCACo4AgKPNPQC+xAQApcE9AA6OAIASjgCApsE9ABaOAIAajgCAqz09AKo9PQCtLT0ArC09AK8VPQCuFT0AtmkCAB6OAIAijgCAtWkCACaOAICzSQIAKo4AgC6OAIC+qQMAv6kDALzBAwC9wQMAuvkDALv5AwAyjgCANo4AgKgtAwCpnQMAqpUDAKutAwCstQMArb0DAK61AwCv2QMAgA0AAIEVAACCHQAAOo4AgD6OAIBCjgCAh7QFAIacBAC4MQIAuTECALo1AgC7zQIAvNUCAL3dAgC+1QIAv8kCALBpAgCxaQIAskECALNBAgC0OQIAtTkCALYRAgC3EQIASo4AgOM0PgBOjgCA4aw+AFKOAIDvfAMAVo4AgFqOAIBejgCA45QDAGKOAIDhfD4AZo4AgO/oPgBqjgCAbo4AgHKOAIB2jgCAo1UDAHqOAICldQMAfo4AgIKOAICmdQMAho4AgIqOAICr5QIAquUCAK3dAgCs3QIAr7UCAK61AgCoGQYAqSEGAKohBgCrPQYArCUGAK1dBgCuVQYAr00GAEaOAICOjgCAko4AgJaOAICajgCAno4AgKKOAICmjgCAuOUGALmBBgC6gQYAu50GALyJBgC9iQYAvqEGAL+hBgCwPQYAsQ0GALIFBgCz7QYAtPUGALXhBgC24QYAt90GALOpBgCCLQAAgRUAAIAdAACqjgCAtt0GALWtBgCujgCAu8kGALr5BgCyjgCAhOADAL8lBgC+MQYAvTkGALzRBgC+iAMAo+0GANaNAIC2jgCAppkGALqOAIC+jgCApekGAKq9BgCrjQYAhkgAAIdsAACudQYAr2EGAKyVBgCtfQYAqIEGAKmNBgCqmQYAq5UGAKyNBgCttQYArrEGAK+tBgDCjgCAxo4AgMqOAIDOjgCA0o4AgNaOAIDajgCA3o4AgLilBgC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsNkGALHZBgCyqQYAs6kGALS9BgC1oQYAtqEGALedBgCzEQYA4o4AgOaOAIDqjgCA7o4AgLY1BgC1BQYA8o4AgLsdBgC6HQYA9o4AgPqOAIC/ZQYAvnkGAL19BgC8fQYA/o4AgKNVBgACjwCABo8AgKZxBgAKjwCADo8AgKVBBgCqWQYAq1kGABKPAIAWjwCArj0GAK8hBgCsOQYArTkGAKjVAgCp3QIAqikDAKspAwCsOQMArTkDAK4pAwCvKQMAGo8AgB6PAIAijwCAKo8AgC6PAIAyjwCAvrgDADaPAIC47QMAuYUDALqBAwC7gQMAvIUDAL2NAwC+sQMAv7EDALBZAwCxWQMAsu0DALPlAwC0/QMAteUDALblAwC31QMAgKEAAIGhAACCoQAAvoAMADqPAICEmAIAPo8AgEKPAICGAAwAh/QDAEaPAIBKjwCATo8AgFKPAIBWjwCAhLADALPhAwBajwCAXo8AgGKPAIBmjwCAtvkDALXxAwBqjwCAu90DALrdAwBujwCAco8AgL9hAwC+eQMAvXEDALx5AwB2jwCAeo8AgH6PAICjLQIAgo8AgKU9AgCmNQIAho8AgIqPAICOjwCAqhECAKsRAgCstQIArb0CAK61AgCvrQIA48QDAOMQBwDhuAEA4WwHAIBxAACBcQAAggUAAJKPAICGwAwAh1QNAJqPAICejwCA77ADAO8ABwCijwCApo8AgKqPAICujwCAso8AgLaPAIC6jwCAvo8AgMKPAIDvpAEAhKANAOGABgDGjwCA4xABAMqPAIDOjwCA0o8AgNaPAICz9QEA2o8AgN6PAIDijwCA5o8AgLZNAQC1SQEA6o8AgLtRAQC6SQEA7o8AgPKPAIC/OQEAvjEBAL1BAQC8SQEAqC0OAKk1DgCqPQ4AqzEOAKyBDgCtjQ4AroUOAK+1DgCWjwCA9o8AgPqPAID+jwCAgBkAAIEZAACCBQAAApAAgLidDgC5rQ4AuqUOALtNDwC8VQ8AvV0PAL5JDwC/QQ8AsM0OALHVDgCy3Q4As9UOALS1DgC1vQ4AtrUOALetDgCjtQ4AvogDAAaQAIAKkACADpAAgKYNDgClCQ4AEpAAgKsRDgCqCQ4AhggAAIdsAwCveQ4ArnEOAK0BDgCsCQ4AFpAAgBqQAIAekACAs7UPACKQAIC1VQ8Atl0PACaPAIAmkACAKpAAgLp5DwC7eQ8AvGkPAL1dDwC+SQ8Av0kPAKhpDgCpaQ4AqnEOAKtxDgCskQ4ArZEOAK6RDgCvkQ4ALpAAgDKQAIA2kACAOpAAgD6QAIBCkACARpAAgEqQAIC4hQ4AuY0OALqFDgC7nQ4AvI0OAL29DgC+tQ4Av3kBALDxDgCx8Q4AsvEOALPFDgC0wQ4AtcEOALbBDgC3wQ4Ao/kOAE6QAIBSkACAVpAAgFqQAICmEQ4ApRkOAF6QAICrNQ4AqjUOAGKQAIBmkACArwUOAK4FDgCtEQ4ArCUOAIANAACBFQAAgh0AAGqQAIBukACAcpAAgISUAQC+lAEAhkAHAIf0AAB6kACAfpAAgIKQAICGkACAipAAgI6QAICojQIAqZUCAKqVAgCrzQIArNUCAK3dAgCuyQIAr/0CAJKQAICWkACAmpAAgJ6QAIC/ABQAopAAgKaQAICqkACAuH0DALnBAwC6wQMAu8EDALzBAwC9yQMAvvEDAL/xAwCwhQIAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALMdAgCukACAspAAgLaQAIC6kACAtl0CALVdAgC+kACAu4EDALpBAgDCkACAxpAAgL+BAwC+mQMAvZEDALyZAwDKkACAo1kCAM6QAIDSkACAphkCANaQAIDakACApRkCAKoFAgCrxQMA3pAAgOKQAICu3QMAr8UDAKzdAwCt1QMA6pAAgOPMAACEBAIA4bwBAIDJAQCB/QEAgvUBAL4QBQDukACAvigEAPKQAID2kACA+pAAgO8QAAD+kACAApEAgIbgBACH9AIABpEAgAqRAIDj/A8ADpEAgOHgDwASkQCA7xQPABaRAIAakQCAHpEAgCKRAIAmkQCAKpEAgC6RAIAykQCANpEAgDqRAIA+kQCAQpEAgEaRAIBKkQCA7+ABAIUEEgDh3A4ATpEAgOMcDgCAKQAAgR0AAIIFAABSkQCAszECAFqRAICEzAUAXpEAgGKRAIC2KQIAtSECAGaRAIC7zQEAus0BAGqRAIBukQCAv3UBAL7JAQC9wQEAvMkBAKjpBQCp6QUAqvkFAKv5BQCs6QUArekFAK45BgCvOQYA5pAAgFaRAICGiAAAhwADAHKRAIB2kQCAepEAgH6RAIC40QYAudkGALrhBgC74QYAvJEGAL2dBgC+lQYAv4kGALBJBgCxSQYAsl0GALNVBgC0TQYAtfEGALbxBgC38QYAo3EFAIKRAICGkQCAipEAgI6RAICmaQUApWEFAJKRAICrjQYAqo0GAJaRAICakQCArzUGAK6JBgCtgQYArIkGAJ6RAICikQCAs+EHAKaRAIC14QcAqpEAgK6RAIC25QcAdpAAgLKRAIC7vQcAuqEHAL2VBwC8qQcAv5UHAL6VBwCoAQYAqSUGAKohBgCrIQYArCEGAK0tBgCuJQYAr1UGALaRAICCHQAAgR0AAIAdAAC6kQCAvpEAgMKRAIC+MAEAuDkGALk5BgC6yQYAu8kGALzZBgC92QYAvskGAL/JBgCwLQYAsTEGALI1BgCzCQYAtBkGALUZBgC2CQYAtwkGAKOpBgCEjAIAhigfAIdEAQDKkQCApq0GAKWpBgDOkQCAq/UGAKrpBgDSkQCA1pEAgK/dBgCu3QYArd0GAKzhBgDakQCAsxUGAN6RAIDikQCAtj0GAOaRAIDqkQCAtTUGALrZAQC72QEA7pEAgPKRAIC+fQEAv2UBALx9AQC9dQEAqMUFAKnJBQCq2QUAq9EFAKz5BQCt+QUArikCAK8pAgD2kQCA+pEAgP6RAIACkgCAjAAAAAaSAIAKkgCADpIAgLjtAgC5hQIAuo0CALuBAgC8hQIAvY0CAL69AgC/fQMAsFkCALFZAgCy7QIAs+UCALT9AgC15QIAtuUCALfVAgCjUQUAEpIAgBaSAIAakgCAHpIAgKZ5BQClcQUAIpIAgKudAgCqnQIAJpIAgCqSAICvIQIArjkCAK0xAgCsOQIAghEAAC6SAICAZQAAgQkAADKSAIC+mAMAOpIAgD6SAICEJAMAQpIAgIdoAwCGjBwARpIAgEqSAIBOkgCAUpIAgFaSAIBakgCAs6ECAITAHAC10QIAXpIAgGKSAIC21QIAZpIAgGqSAIC7wQIAuvUCAL0RAQC82QIAvxEBAL4ZAQBukgCAcpIAgHaSAIB6kgCAfpIAgIKSAICGkgCA77gGAIqSAIDhnAQAjpIAgON0BgCSkgCAlpIAgJqSAICekgCAgPkAAIH5AACCBQAAopIAgL5YHACEWB8A71wAAO9ABgDhkAEA4fwGAOM8AADjdAYAqpIAgK6SAICGmBwAh/QcAKNpAgC+DB8AspIAgLaSAIC6kgCAph0CAKUZAgC+kgCAqwkCAKo9AgDCkgCAxpIAgK/ZAQCu0QEArdkBAKwRAgCokR0AqZkdAKqhHQCroR0ArNEdAK3dHQCu1R0Ar8kdADaSAICmkgCAypIAgM6SAIDSkgCA1pIAgNqSAIDekgCAuHkeALl5HgC6zR4Au8UeALzdHgC9xR4AvsUeAL/1HgCwuR0AsY0dALKFHQCzTR4AtFUeALVdHgC2VR4At0keALjNHwC51R8Aut0fALvVHwC88R8Avf0fAL7pHwC/6R8AsKUfALGxHwCysR8As40fALSVHwC19R8Atv0fALf1HwCoGR4AqRkeAKotHgCrPR4ArCUeAK0tHgCuJR4Ar90fAOKSAIDmkgCA6pIAgO6SAIDykgCAxpEAgPaSAID6kgCAs+UfAP6SAIACkwCABpMAgAqTAIC27R8Ate0fAA6TAIC7NR4AuiEeABKTAIAWkwCAv3EeAL4RHgC9GR4AvCUeAIJpAACjoR8AgFkAAIFRAACmqR8AGpMAgB6TAIClqR8AqmUeAKtxHgCGAAQAh+wBAK5VHgCvNR4ArGEeAK1dHgCoMR4AqTEeAKpBHgCrQR4ArEEeAK1JHgCucR4Ar3EeACKTAIAmkwCAKpMAgC6TAIAykwCANpMAgDqTAIA+kwCAuCkBALkpAQC6OQEAuzUBALwtAQC90QAAvtEAAL/RAACwyQEAsckBALLZAQCz2QEAtMkBALXJAQC2GQEAtxkBALPJHQBCkwCARpMAgEqTAIBOkwCAtskdALXJHQBSkwCAuw0CALoNAgBWkwCAWpMAgL8NAgC+DQIAvQ0CALwNAgBekwCAo40dAGKTAIBmkwCApo0dAGqTAIBukwCApY0dAKpJAgCrSQIAcpMAgHaTAICuSQIAr0kCAKxJAgCtSQIAgA0AAIERAACCEQAAepMAgO/MAgB+kwCAgpMAgISQAgDjLAIAvigDAOHYAQCKkwCAhhAEAIfUAwCOkwCAkpMAgLNhAwCWkwCAmpMAgJ6TAICikwCAtnkDALVxAwCmkwCAu10DALpdAwCqkwCArpMAgL/hAAC++QAAvfEAALz5AACjoQIAspMAgLaTAIC6kwCAvpMAgKa5AgClsQIAwpMAgKudAgCqnQIAxpMAgMqTAICvIQEArjkBAK0xAQCsOQEAzpMAgNKTAIDvZB8A1pMAgNqTAIDekwCA4pMAgOaTAICADQAAgREAAIIVAADqkwCA4eAcAO6TAIDjiB8A8pMAgISAAgC+jAUAh0gFAIYsBAD6kwCA/pMAgO+kHgDv9B4A4QAeAOFQHwDjLB4A47AeAAKUAIAGlACACpQAgA6UAIASlACAFpQAgISEBACzcQEAGpQAgLUdAQC2FQEAHpQAgCKUAIAmlACAugEBALsBAQC89QAAvf0AAL71AAC/7QAAqK0GAKm9BgCqtQYAq8kGAKzZBgCt2QYArskGAK/BBgAqlACALpQAgDKUAIA2lACAOpQAgD6UAIBClACARpQAgLhtBwC5BQcAug0HALsBBwC8AQcAvQEHAL4BBwC/AQcAsIkGALGJBgCybQcAs2UHALR9BwC1ZQcAtmUHALdVBwCGkwCAozkGAEqUAID2kwCApl0GAE6UAIBSlACApVUGAKpJBgCrSQYAVpQAgFqUAICuvQcAr6UHAKy9BwCttQcAgG0AAIEJAACCGQAAXpQAgGKUAIC+nAMAZpQAgGqUAICGQAAAh2AAAG6UAIBylACAdpQAgHqUAIB+lACAgpQAgKiRBgCpkQYAqrkGAKu5BgCsqQYArakGAK7ZBgCv2QYAhpQAgIqUAICOlACAkpQAgJaUAICalACAnpQAgKKUAIC4cQEAuXEBALpxAQC7cQEAvNkBAL3BAQC+wQEAv/UBALCxBgCxuQYAsokGALOJBgC0UQEAtVEBALZRAQC3UQEAszEGAKaUAICqlACArpQAgLKUAIC2KQYAtSEGALaUAIC7fQYAunUGALqUAIC+lACAv5UBAL6VAQC9XQYAvF0GAMKUAICjdQYAxpQAgMqUAICmbQYAzpQAgNKUAIClZQYAqjEGAKs5BgCErAEAvqABAK7RAQCv0QEArBkGAK0ZBgCo3QIAqe0CAKrlAgCr/QIArOUCAK3tAgCu5QIArz0DANqUAIDelACA4pQAgL5kDADmlACA6pQAgO6UAIDylACAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+VAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDAIFVAwCASQMAs2UCAIJVAwC1ZQIA9pQAgPqUAIC2ZQIAhgAMAIfkAwC7gQMAuokDAL2BAwC8mQMAv4EDAL6JAwCjLQIA/pQAgAKVAIAGlQCACpUAgKYtAgClLQIADpUAgKvJAwCqwQMAEpUAgBaVAICvyQMArsEDAK3JAwCs0QMA49gGAOGsBwDhnAYA45wGABqVAICEWA0AHpUAgCKVAIAmlQCAKpUAgC6VAIAylQCA7xwBADaVAIA6lQCA70AGAIB5AACBFQAAghEAAIQADAA+lQCA46wAAEKVAIDhpAEASpUAgO9wAACGyAwAh6QNAE6VAIBSlQCAVpUAgFqVAIC6yQUAu8kFALilBQC5zQUAvvkFAL/5BQC8zQUAvcUFALKlBQCzrQUAsBEGALERBgC2rQUAt50FALS1BQC1rQUAqmEGAKthBgConQYAqZUGAK5hBgCvYQYArHEGAK1xBgBelQCAYpUAgGaVAIBqlQCAbpUAgHKVAIC+sAwAdpUAgKghDgCpIQ4AqiEOAKs9DgCsJQ4ArS0OAK4lDgCviQ4ARpUAgHqVAIB+lQCAgpUAgIaVAICKlQCAjpUAgJKVAIC4UQ8AuV0PALpVDwC7bQ8AvHUPAL19DwC+dQ8Av2kPALD5DgCxoQ4AsqEOALOhDgC0oQ4AtakOALaRDgC3kQ4As6kOAJaVAIDWlACAmpUAgJ6VAIC2rQ4Ata0OAKKVAIC7ZQ4Auj0OAKaVAICqlQCAv20OAL5lDgC9dQ4AvHUOAIIZAACj7Q4AgGUAAIEZAACm6Q4ArpUAgLKVAICl6Q4AqnkOAKshDgC2lQCAupUAgK4hDgCvKQ4ArDEOAK0xDgCoYQ4AqXUOAKp9DgCrdQ4ArG0OAK31DgCu/Q4Ar/UOAIaAAQCHpAEAvpUAgMKVAIDGlQCAypUAgM6VAIDSlQCAuHUBALl9AQC6dQEAu8kBALzdAQC9xQEAvsUBAL/1AQCwjQ4AsZUOALKdDgCzkQ4AtFUBALVdAQC2VQEAt00BALP1DgDWlQCA2pUAgN6VAIDilQCAtnUOALXlDgDmlQCAu1EOALpJDgDqlQCA7pUAgL+ZAQC+kQEAvUUOALxJDgDylQCAo7EOAPaVAID6lQCApjEOAP6VAIAClgCApaEOAKoNDgCrFQ4ABpYAgAqWAICu1QEAr90BAKwNDgCtAQ4AqO0CAKktAwCqJQMAqz0DAKwlAwCtLQMAriUDAK+ZAwAOlgCAEpYAgBaWAIAalgCAHpYAgCKWAIC+dAIAKpYAgLiNAwC5kQMAupEDALulAwC8vQMAvXUAAL59AAC/dQAAsOkDALHpAwCy+QMAs/EDALTZAwC12QMAtrkDALe1AwCArQAAgbUAAIK9AACzoQMALpYAgLWhAwC2oQMAMpYAgITgAgA2lgCAuiEDALshAwC8IQMAvSkDAL4RAwC/EQMAo+0DAIXABACFtG8AOpYAgD6WAICm7QMApe0DAEKWAICrbQMAqm0DAIZIBQCHbAMAr10DAK5dAwCtZQMArG0DAEaWAIDjAA4A71hsAOG0DwBKlgCATpYAgFKWAIBWlgCAoakDAKD9DwCjwQMAog0DAOHgAwDv4A8A4+QDAFqWAIBelgCAYpYAgIQEBAC+BAQAZpYAgO+UAwBqlgCAbpYAgHKWAIDj1AMAdpYAgOFUAAB6lgCAfpYAgIKWAICGlgCAgA0AAIEVAACCHQAAipYAgI6WAICSlgCAj5EbAO+cDgCE4AcA4dQOAJqWAIDj8A4AnpYAgKKWAICGGAcAh5AEAJnlFwCY5RcAm+kLAJo5CwCd/QoAnPELAJ9VDwCeXQ8AkSkfAJDNGwCTJR8Aks0fAJXREwCUKRMAlxkXAJZ1EwCM4RAAjSUQAI4tEACP+QwAJpYAgJaWAICKORQAi5UUAITpGACFBRgAhuUYAIfxFACmlgCAqpYAgIIxHACDFRwAnKkEAK6WAICylgCAtpYAgLqWAIC+lgCAmtEEAJt9BACUTQ0AleUIAJblCACXtQgAwpYAgMaWAICSWQwAk1kMAKGRAADKlgCAowF8AKKZAACluXwApJF8AKeZeACm4X0AqYF5AKiheACriXQAqgF0AK0BcACsWXQAr4VwAK6dcACx4WwAsAFsALMBaACyHWwAtfVoALT1aADOlgCA0pYAgNaWAIDalgCA3pYAgOKWAIDmlgCA6pYAgO6WAIDylgCAqD0HAKmVBwCqlQcAq6kHAKzdBwCtxQcArsUHAK8dBgD2lgCAgh0AAIEdAACAHQAA+pYAgP6WAIAClwCAvmABALgZBgC5GQYAuikGALslBgC8IQYAvSEGAL4hBgC/IQYAsHEGALFxBgCycQYAs3EGALRNBgC1NQYAtj0GALctBgCzHQcACpcAgIYoAACHqAAADpcAgLZFBwC1VQcAEpcAgLu1BgC6tQYAFpcAgBqXAIC/8QYAvokGAL2lBgC8pQYAHpcAgKNZBwAilwCAJpcAgKYBBwAqlwCALpcAgKURBwCq8QYAq/EGADKXAIA2lwCArs0GAK+1BgCs4QYAreEGAKipBQCptQUAqr0FAKs9AgCsJQIArVECAK5RAgCvUQIAOpcAgD6XAIBClwCARpcAgIQ8AwBKlwCATpcAgFKXAIC4pQIAua0CALqlAgC7vQIAvKUCAL2tAgC+pQIAv30DALAxAgCxMQIAshkCALMZAgC09QIAta0CALalAgC3nQIAVpcAgFqXAIBelwCAszkFAGKXAIC1oQIAtt0CAGaXAIBqlwCAbpcAgLr5AgC7+QIAvMECAL3BAgC+PQIAv2UCAHKXAICmgQIApf0CAHqXAICjZQUAvlh8AIbYfACHnHwArzkCAK5hAgCtnQIArJ0CAKulAgCqpQIAfpcAgIKXAICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIGFAQCAhQEAhpcAgILtAQCKlwCAjpcAgJKXAICWlwCAuHUBALl9AQC6dQEAu80BALzVAQC93QEAvskBAL/BAQCwtQIAsb0CALKBAgCzgQIAtFEBALVRAQC2UQEAt1EBAJqXAICelwCAopcAgKaXAIDhMAYA4WQHAOMoBgDjxAYAhCB9AKqXAIDvbAAA7xgGAK6XAICylwCAtpcAgLqXAICzXQIAvkh8AL6XAIDClwCAxpcAgLYVAgC1dQIAypcAgLs5AgC6MQIAzpcAgNKXAIC/1QEAvtUBAL0VAgC8FQIAo519AHaXAIDWlwCA2pcAgN6XAICm1X0ApbV9AOKXAICr+X0AqvF9AOaXAIDqlwCArxV+AK4VfgCt1X0ArNV9AIBNAACBVQAAglUAALOxfgDulwCAtWV/ALZtfwDylwCAhkADAIcEAwC66X8Au+l/ALz5fwC9+X8Avt1/AL/NfwD2lwCA+pcAgAaXAID+lwCAApgAgAaYAIAKmACADpgAgKhtfgCpXX4AqlV+AKuFfwCsgX8ArYF/AK6BfwCvgX8AsEF/ALFBfwCyQX8As0F/ALR1fwC1ZX8Atm1/ALdlfwC4XX8AuS1/ALolfwC7PX8AvC1/AL0dfwC+FX8Av/UAAKP9fwASmACAFpgAgBqYAIAemACApiF+AKUpfgAimACAq6V+AKqlfgAmmACAKpgAgK+BfgCukX4ArbV+AKy1fgAumACAMpgAgDaYAIA6mACAPpgAgEKYAIBGmACASpgAgIA9AACBCQAAghkAAE6YAIBSmACAhLgBAL6wAQBWmACAqK0BAKnVAQCq1QEAqw0BAKwVAQCtGQEArgkBAK8JAQCGAAQAhwQBAFqYAIBemACAYpgAgGaYAIBqmACAbpgAgLjtAAC5hQAAuo0AALuFAAC8nQAAvYUAAL6NAAC/hQAAsHkBALF5AQCy7QAAs+UAALT9AAC15QAAtuUAALfVAACzXQIAcpgAgHaYAIB6mACAfpgAgLaZAgC1nQIAgpgAgLu9AgC6vQIAhpgAgIqYAIC/IQMAvjkDAL0xAwC8OQMAvigDAKMZAgCOmACAkpgAgKbdAgCWmACAmpgAgKXZAgCq+QIAq/kCAJ6YAICimACArn0DAK9lAwCsfQMArXUDAL7IBACmmACAqpgAgL7EBQCumACAspgAgLaYAIC6mACAgD0AAIEJAACCGQAAvpgAgMKYAICEOAMAypgAgM6YAIDveAIA0pgAgIZIBACHVAMA1pgAgNqYAIDemACA4pgAgOaYAIDqmACA7pgAgPKYAIDjVAIA9pgAgOFAAQD6mACA/pgAgOMkfwACmQCA4Zx8AAaZAIAKmQCADpkAgBKZAICEbAUAFpkAgBqZAIAemQCAIpkAgO8YfwAmmQCAKpkAgLPxAgAumQCAMpkAgDqZAIA+mQCAtukCALXhAgBCmQCAu3EBALppAQCHoAUAhswEAL85AQC+WQEAvVEBALxhAQDhQH8ARpkAgOM4fgCEwAQAgtkAAO8UAACApQAAgdkAAEqZAIDjwAAATpkAgOHUAQBSmQCAVpkAgO+EfgBamQCAqs0BAKvVAQBemQCAYpkAgK79AQCvnQEArMUBAK31AQBmmQCAo1UCAGqZAIBumQCApk0CAHKZAIB2mQCApUUCAMaYAIA2mQCAepkAgH6ZAICCmQCAhpkAgIqZAICOmQCAqJkGAKmZBgCq7QYAq/0GAKzlBgCt7QYAruUGAK/dBgCwpQYAsa0GALKlBgCzuQYAtK0GALVVBwC2UQcAt00HALh1BwC5fQcAunUHALtJBwC8WQcAvVkHAL5JBwC/RQcAs0UGAJKZAICWmQCAmpkAgJ6ZAIC2TQYAtU0GAKKZAIC7SQYAukEGAIYIAACHjAAAv7EHAL5JBgC9TQYAvFEGAIJdAACjAQYAgEUAAIFdAACmCQYAqpkAgK6ZAIClCQYAqgUGAKsNBgCymQCAtpkAgK4NBgCv9QcArBUGAK0JBgCoTQYAqVUGAKpVBgCriQYArLEGAK29BgCuqQYAr6kGAKaZAIC6mQCAvpkAgMKZAIDGmQCAypkAgM6ZAIDSmQCAuEkBALlJAQC6WQEAu1kBALxJAQC9SQEAvt0BAL/VAQCw3QYAsa0GALKlBgCzjQYAtJkGALWZBgC2jQYAt4UGALPdBgDWmQCA2pkAgN6ZAIDimQCAtj0GALU5BgDmmQCAu2kGALoZBgDqmQCA7pkAgL9dBgC+XQYAvVkGALxxBgDymQCAo5kGAPaZAID6mQCApnkGAP6ZAIACmgCApX0GAKpdBgCrLQYABpoAgAqaAICuGQYArxkGAKw1BgCtHQYAqNUCAKndAgCq4QIAq+ECAKw1AwCtPQMArjUDAK8tAwCAzQMAgQkAAIIZAAAOmgCAEpoAgIQYAgC+dAMAGpoAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsFUDALFdAwCyVQMAs+kDALT5AwC1+QMAtukDALfhAwCGIAwAhxADAB6aAIAimgCAJpoAgCqaAIAumgCA71wCADKaAIDhFAAANpoAgOOIAgC++AwAOpoAgD6aAIBCmgCAu/kDALrxAwC+gA0ARpoAgL9dAwC+XQMAvV0DALzhAwCzCQIASpoAgE6aAIBSmgCAVpoAgLbdAwC13QMAWpoAgKipBgCpqQYAqrkGAKu5BgCsqQYArakGAK4dBQCvFQUAXpoAgGKaAIBmmgCAapoAgG6aAIBymgCAdpoAgHqaAIC4GQUAuS0FALolBQC7yQUAvNkFAL3FBQC+zQUAv8UFALBtBQCxdQUAsnUFALNFBQC0XQUAtT0FALY1BQC3KQUA4fQGAOFUBwDjFAYA47wGAIEJAACAqQAAfpoAgII5AACE7A0AgpoAgIeIDACGDAwAipoAgI6aAIDvzAcA78QHAKMpAwCSmgCAlpoAgJqaAICemgCApv0CAKX9AgCimgCAq9kCAKrRAgCmmgCAqpoAgK99AgCufQIArX0CAKzBAgCoPQ4AqY0OAKqFDgCrnQ4ArIUOAK2NDgCuuQ4Ar7UOAIaaAICumgCAspoAgLaaAIC6mgCAvpoAgMKaAIDGmgCAuL0OALllDwC6bQ8Au2UPALx9DwC9ZQ8Avm0PAL9lDwCw1Q4Asd0OALLVDgCzoQ4AtJUOALWdDgC2lQ4At40OALMNDgDKmgCAzpoAgNKaAIDWmgCAtg0OALUNDgDamgCAuxkOALoRDgDemgCAFpoAgL9ZDgC+UQ4AvXUOALwBDgDimgCAo0kOAOaaAIDqmgCApkkOAO6aAIDymgCApUkOAKpVDgCrXQ4AhKQDAPaaAICuFQ4Arx0OAKxFDgCtMQ4AqLEOAKmxDgCqzQ4Aq8UOAKzdDgCtxQ4ArsUOAK/1DgCA7QEAgfEBAILxAQD6mgCAhpABAIe0AQD+mgCAApsAgLjFAQC5zQEAusUBALvdAQC8zQEAvf0BAL6ZAQC/lQEAsI0OALFBAQCyQQEAs0EBALRBAQC1QQEAtkEBALdBAQCzRQ4ABpsAgAqbAIAOmwCAEpsAgLZFDgC1VQ4AFpsAgLuFAQC6SQ4AGpsAgB6bAIC/hQEAvoUBAL2VAQC8lQEAIpsAgKMBDgAmmwCAKpsAgKYBDgAumwCAMpsAgKURDgCqDQ4Aq8EBADabAIA6mwCArsEBAK/BAQCs0QEArdEBAKgtAwCpPQMAqjUDAKuJAwCsmQMArZkDAK6JAwCvgQMAPpsAgEKbAIBGmwCASpsAgE6bAIBSmwCAVpsAgFqbAIC4rQMAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALDJAwCxyQMAsqkDALOlAwC0vQMAtaEDALahAwC3lQMAgL0AAIEJAACCGQAAXpsAgGKbAIC+2AMAapsAgG6bAICErAIAcpsAgIfoAwCGDAQAdpsAgHqbAIB+mwCAgpsAgLP9AwCGmwCAipsAgI6bAICSmwCAtlkDALVRAwCWmwCAu00DALpNAwCamwCAnpsAgL8lAwC+OQMAvTEDALw9AwCimwCAppsAgKqbAICumwCA71gPALKbAIC2mwCAupsAgOOQDgC+mwCA4bAPAMKbAIDGmwCAypsAgM6bAIDSmwCAgHUAAIF9AACCdQAAhBgFAO88AwDamwCAvhQFAN6bAIDj0AMA4psAgOFAAADmmwCAhtAEAIdYBQDqmwCA7psAgPKbAID2mwCA+psAgP6bAIACnACABpwAgAqcAIDvrA8AhOwEAOEQDgAOnACA41QBABKcAIAWnACAGpwAgB6cAICj/QIAIpwAgCacAIAqnACALpwAgKZZAgClUQIAMpwAgKtNAgCqTQIANpwAgDqcAICvJQIArjkCAK0xAgCsPQIAqJkGAKmZBgCqrQYAq70GAKylBgCtrQYArqUGAK/ZBgDWmwCAghEAAIEZAACAwQcAPpwAgEKcAIC+cAMARpwAgLhJBwC5SQcAul0HALtVBwC8TQcAvXEHAL51BwC/bQcAsKkGALGpBgCyuQYAs7EGALSZBgC1mQYAtnkHALd5BwC1NQYASpwAgE6cAIC2NQYAhjAAAIdcAwCzPQYAUpwAgL19BgC8dQYAv0UGAL5FBgBmmwCAVpwAgLt1BgC6dQYAo2UGAFqcAIBenACAYpwAgGacAICmbQYApW0GAGqcAICrLQYAqi0GAG6cAIBynACArx0GAK4dBgCtJQYArC0GAKhVBgCpWQYAqm0GAKthBgCsaQYArWkGAK6ZBgCvmQYAdpwAgHqcAIB+nACAgpwAgIacAICKnACAjpwAgJKcAIC4+QYAufkGALqNBgC7hQYAvJ0GAL2FBgC+hQYAv7UGALDpBgCx6QYAsvkGALP5BgC06QYAtd0GALbJBgC3yQYAs+UGAJacAICanACAnpwAgKKcAIC26QYAteEGAKacAIC7LQYAui0GAKqcAICunACAvxkGAL4tBgC9LQYAvC0GAIIVAACjoQYAgGEAAIFhAACmrQYAspwAgL6QAQClpQYAqmkGAKtpBgCEpAEAupwAgK5pBgCvXQYArGkGAK1pBgCohQIAqY0CAKqVAgCruQIArNUCAK3dAgCu1QIAr80CAIaAHACHZAMAvpwAgL5gAwDCnACAxpwAgMqcAIDOnACAuHUDALl9AwC6dQMAu8kDALzZAwC92QMAvskDAL/BAwCwvQIAsY0CALKFAgCzTQMAtFUDALVdAwC2VQMAt00DALMdAgDSnACAhAgDANacAIDanACAtl0CALVdAgDenACAu0kCALp5AgDinACA5pwAgL+ZAwC+kQMAvZkDALxRAgCwAAAAo1kCAOqcAIDunACAphkCAPKcAID2nACApRkCAKo9AgCrDQIA+pwAgP6cAICu1QMAr90DAKwVAgCt3QMAAp0AgAadAIAKnQCA76wGAA6dAIASnQCAFp0AgBqdAIC+6BwAHp0AgCKdAIAqnQCALp0AgOGABwAynQCA42AGAIBdAACBYQAAgmEAALN9AQA2nQCAtW0BALZlAQA6nQCAhiAdAIdYHQC6+QEAu/EBALzZAQC92QEAvrEBAL+xAQDvoAAAPp0AgEKdAIBGnQCASp0AgE6dAIBSnQCA71wBAIRsHADhzAYAVp0AgOMcBgDjSAAAWp0AgOEwAQBenQCAo/EBAGKdAICFABQAZp0AgGqdAICm6QEApeEBAG6dAICrfQEAqnUBAHKdAIB2nQCArz0BAK49AQCtVQEArFUBAKjtHQCpLR4AqjkeAKs5HgCsKR4ArSkeAK6dHgCvkR4AJp0AgHqdAIB+nQCAgp0AgIadAICC+QAAgfEAAID9AAC4qR4AuakeALpJHwC7SR8AvFkfAL1FHwC+TR8Av0UfALDxHgCx+R4AssEeALPBHgC0uR4AtbkeALatHgC3pR4AsBEfALERHwCyER8AsyUfALQlHwC1KR8Atl0fALdRHwC4cR8AuXkfALpBHwC7QR8AvJUAAL2dAAC+lQAAv40AAIqdAIC2nACAjp0AgJKdAICWnQCAmp0AgIb4AwCH0AAAqM0fAKnVHwCq0R8Aq70fAKytHwCtcR8ArnEfAK9xHwCzOR4Anp0AgKKdAICmnQCAqp0AgLaRHgC1RR4Arp0AgLu1HgC6tR4Asp0AgLadAIC/jR4AvoEeAL2RHgC8pR4Aup0AgKN9HgC+nQCAwp0AgKbVHgDGnQCAyp0AgKUBHgCq8R4Aq/EeAM6dAIDSnQCArsUeAK/JHgCs4R4ArdUeAKhVAQCpgQAAqoEAAKuBAACsgQAArYkAAK6xAACvsQAA1p0AgNqdAIDenQCA4p0AgOadAIDqnQCA7p0AgPKdAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90DALChAACxrQAAsqUAALO5AAC0qQAAtZ0AALaVAAC3XQAA9p0AgIIdAACBHQAAgB0AAPqdAID+nQCAAp4AgL4UAgAKngCAhKgCAA6eAIASngCAFp4AgBqeAIAengCAjwAAALNJAwAingCAhugEAIesAgAmngCAtkkDALVJAwAqngCAuykDALolAwAungCAMp4AgL8ZAwC+LQMAvS0DALwxAwA2ngCAo40DADqeAIA+ngCApo0DAEKeAIBGngCApY0DAKrhAwCr7QMASp4AgE6eAICu6QMAr90DAKz1AwCt6QMAvoQDAFKeAIBWngCAWp4AgF6eAIBingCAZp4AgGqeAICAPQAAgQkAAIIZAABungCAcp4AgHqeAICENAMAfp4AgLMtAQCCngCAh8wCAIZMBQCGngCAti0BALUtAQCKngCAu0kBALp5AQCOngCAkp4AgL+9AQC+vQEAvbkBALxRAQDheB8Alp4AgOPQHwCangCAnp4AgOGUAQCingCA42gDAKaeAICqngCArp4AgO+IAwCyngCAtp4AgO+sHwC6ngCAvp4AgMKeAIDGngCAyp4AgM6eAIDSngCA1p4AgO9EHgDangCA4dweAN6eAIDjHB4A4p4AgOqeAIDungCA8p4AgIFpAACAZQAAo+UBAIJ9AACl5QEA9p4AgIQUBACm5QEAvigEAPqeAICrgQEAqrEBAK1xAQCsmQEAr3UBAK51AQCoIQYAqS0GAKolBgCrPQYArCUGAK0tBgCuXQYAr00GAHaeAIDmngCAhggDAIeMAwD+ngCAAp8AgAafAIAKnwCAuOkGALnpBgC6jQYAu4UGALydBgC9hQYAvo0GAL+FBgCwPQYAsQ0GALIFBgCz7QYAtPkGALX5BgC27QYAt+UGALDNBwCx1QcAstEHALPtBwC09QcAtf0HALbpBwC36QcAuN0HALklBwC6LQcAuyUHALw9BwC9JQcAvi0HAL8lBwAOnwCAEp8AgAaeAIAWnwCAGp8AgB6fAIAinwCAJp8AgKgVBgCpGQYAqu0HAKv9BwCs7QcArd0HAK7VBwCvuQcAswUGACqfAIAunwCAMp8AgDafAIC2PQYAtQUGADqfAIC7cQYAumkGAD6fAIBCnwCAv1kGAL5RBgC9WQYAvGUGAEafAICjQQYASp8AgE6fAICmeQYAUp8AgIS0AQClQQYAqi0GAKs1BgC+gAEAWp8AgK4VBgCvHQYArCEGAK0dBgCoNQYAqT0GAKo1BgCrWQYArHUGAK2lAQCurQEAr6UBAIDpAACB6QAAgv0AAL8kAQCGMA8Ah+QAAF6fAIBinwCAuMUAALnNAAC6xQAAu90AALzNAAC9/QAAvvUAAL+dAACw3QEAsSUBALItAQCzIQEAtCEBALUhAQC2IQEAtyEBALvBAgC6OQIAZp8AgGqfAIC/xQIAvsUCAL3VAgC82QIAs50FAG6fAIBynwCAdp8AgIwAAAC2BQIAtd0FAHqfAICqfQIAq4UCAH6fAICCnwCAroECAK+BAgCsnQIArZECAIafAICj2QUAip8AgI6fAICmQQIAkp8AgJafAIClmQUAgpFqAIORagCanwCAnp8AgIa5FgCH6RcAhBEWAIWZFgCKoRIAi6ESAKKfAICmnwCAjpEeAI9ZHgCMmRMAjREeAJJxGgCT5RoAqp8AgO/oJACW8QYAlwUGAJTlGgCVGQYAmikCAJvFAgCunwCAsp8AgLafAIDhKBsAnN0CAOMgDwCfIQcAnsEHAJ01GwCcLRsAm6EbAJr5HwCZOR8AmLEfAJcBEgCWIRMAlSkTAJRRFgCTGRcAkjEXAJGxFwCQKWsAj1FrAOOsBwCEBA0A4RwHAIANAACBNQAAgj0AALqfAIC+nwCAwp8AgL4gDQDKnwCAzp8AgO9MBwCGWAwAh2ANANKfAIDWnwCA2p8AgN6fAICEXA8A4p8AgO8IAADvhAYA4ZABAOGwBgDj4AAA42QGAOafAIDqnwCA7p8AgPKfAID2nwCA+p8AgL4ADwCEQA4A/p8AgAKgAIAGoACACqAAgA6gAIASoACAFqAAgBqgAICj1QMAotUDAKExAwCgLQcAVp8AgMafAIAeoACAIqAAgCagAICCmQAAgZEAAICZAACoTQ0AqZ0NAKqVDQCrJQ4ArD0OAK0RDgCuEQ4ArxEOALB9DgCxDQ4AsgUOALMtDgC0OQ4AtTkOALYtDgC3JQ4AuOkOALnpDgC6wQ4Au8EOALy5DgC9nQ4AvpUOAL+NDgCzPQ0AKqAAgC6gAIAyoACANqAAgLaxDgC1lQ4AOqAAgLvpDgC6mQ4AhogAAIfkAAC/3Q4Avt0OAL3ZDgC88Q4APqAAgKN5DQC+hAEAhIAGAKb1DgBCoACARqAAgKXRDgCq3Q4Aq60OAEqgAIBOoACArpkOAK+ZDgCstQ4ArZ0OALIFNQCzGTQAsG0wALENNQBSoACAVqAAgLQBKAC1PSkAWqAAgF6gAIBioACAZqAAgGqgAIBuoACAcqAAgHagAICiRQEAo9UBAHqgAIChTQEAps0FAKcBOACkAQQApX0FAKoBPACrRT0AqEk5AKnlOQCudTEAr30xAKxdPQCtATAAqO0OAKn1DgCqCQ4AqwkOAKwZDgCtGQ4Arg0OAK8tDgB+oACAgqAAgIagAICKoACAjqAAgJKgAICWoACAmqAAgLgdDgC5JQ4Aui0OALslDgC8PQ4Avd0BAL7VAQC/zQEAsFUOALFdDgCyVQ4Asy0OALQ1DgC1JQ4Ati0OALclDgCzgQ0AnqAAgKKgAICqoACArqAAgLaZDQC1kQ0AvlQEALuZDQC6kQ0AhogEAIe8AwC/4Q0AvvENAL35DQC8gQ0AgkkAAKPFDQCA9QMAgUkAAKbdDQCyoACAtqAAgKXVDQCq1Q0Aq90NALqgAIC+oACArrUNAK+lDQCsxQ0Arb0NAKgdAgCpRQIAql0CAKtVAgCseQIArXkCAK6JAwCviQMAwqAAgMagAIDKoACAzqAAgIT8BQDSoACA1qAAgNqgAIC4iQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDBAwCxwQMAssEDALPBAwC0wQMAtcEDALbBAwC3wQMA3qAAgOKgAIDmoACA6qAAgO6gAIDhpAEA8qAAgOPADgC+aAQA9qAAgPqgAIDvHAEA/qAAgAKhAIAGoQCACqEAgLOVAwAOoQCAEqEAgBqhAIAeoQCAtrkDALWxAwAioQCAu0UCALpFAgCGqAQAh6QFAL9FAgC+RQIAvVUCALxVAgDh4A4A4SwMAOMIDgDj1A4AgK0AAIHRAACC0QAAJqEAgCqhAIAuoQCAMqEAgDahAIA6oQCAPqEAgO+IDgDvLA4AoxUDAEKhAICFxCsARqEAgEqhAICmOQMApTEDAE6hAICrxQIAqsUCAFKhAIBWoQCAr8UCAK7FAgCt1QIArNUCAKgNBgCpFQYAql0GAKtVBgCseQYArXkGAK65BgCvuQYAFqEAgFqhAIBeoQCAYqEAgGahAIBqoQCAbqEAgHKhAIC4TQcAuVUHALpRBwC7aQcAvHkHAL1lBwC+bQcAv2UHALDJBgCxyQYAst0GALPVBgC0zQYAtXUHALZ9BwC3dQcAs9UGAHahAIB6oQCAfqEAgIKhAIC2+QYAtfEGAIahAIC7DQYAug0GAIYIAACHLAAAv7EHAL4JBgC9AQYAvAkGAIJRAACjkQYAgEEAAIFBAACmvQYAiqEAgI6hAICltQYAqkkGAKtJBgCSoQCAlqEAgK5NBgCv9QcArE0GAK1FBgCwsQYAsbEGALLNBgCzwQYAtMEGALXJBgC28QYAt/EGALgFAQC5DQEAugUBALsdAQC8BQEAvQ0BAL4FAQC/uQEAmqEAgJ6hAICioQCApqEAgKqhAICuoQCApqAAgLKhAICoLQYAqTUGAKo1BgCr8QYArNEGAK3RBgCu0QYAr9EGALPdBgC2oQCAuqEAgL6hAIDCoQCAtjEGALU5BgDGoQCAuxUGALoVBgDKoQCAzqEAgL9tBgC+ZQYAvXUGALx5BgDSoQCAo5kGANahAIDaoQCApnUGAN6hAIDioQCApX0GAKpRBgCrUQYA5qEAgOqhAICuIQYArykGAKw9BgCtMQYAqNUCAKndAgCq4QIAq+ECAKxRAwCtUQMArlEDAK9RAwDuoQCA8qEAgL7sAwD6oQCA/qEAgAKiAIAGogCACqIAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsDEDALExAwCyNQMAs+kDALT5AwC1+QMAtukDALfhAwCAbQMAgaUAAIKtAACzZQIADqIAgLXVAwC23QMAEqIAgITgAgAWogCAuvkDALv5AwC87QMAvTEDAL4xAwC/MQMAh+wDAIZkPACyAAAAGqIAgB6iAIDjCAQAIqIAgOHsBgAmogCA7wAGACqiAIAuogCAMqIAgDaiAIA6ogCAPqIAgEKiAIBGogCASqIAgE6iAIDjoAMAUqIAgOGoAQBWogCA7/ADAIIdAACBHQAAgB0AAFqiAIBeogCAYqIAgGqiAIC+TD0AbqIAgKOhAwC+QDwApRECAHKiAIB2ogCAphkCAIRsAgB6ogCAqz0CAKo9AgCt9QIArCkCAK/1AgCu9QIAhkA8AIe0PQB+ogCAgqIAgIaiAICKogCAjqIAgO9EBgCSogCA4dQGAJaiAIDjDAcAmqIAgJ6iAICiogCApqIAgLP1AQCqogCArqIAgLKiAIC2ogCAtkUBALXlAQC6ogCAuzEBALopAQC+ogCAwqIAgL8dAQC+HQEAvRkBALwlAQCoLT4AqTU+AKo9PgCrNT4ArC0+AK2FPgCuhT4Ar7k+AGaiAIDGogCAyqIAgM6iAICAGQAAgRkAAIIFAADSogCAuLk+ALm5PgC6ST8Au0k/ALxZPwC9WT8Avk0/AL9BPwCwrT4AsbU+ALKxPgCzjT4AtJk+ALWZPgC2iT4At4k+AKO1PgCEjAIA1qIAgNqiAIDeogCApgU+AKWlPgDiogCAq3E+AKppPgCGCAAAh2gDAK9dPgCuXT4ArVk+AKxlPgDmogCAs5E/AOqiAIDuogCAtlk/APKiAID2ogCAtbk/ALp1PwC7fT8A+qIAgP6iAIC+QT8Av0E/ALxZPwC9VT8AsJU+ALGdPgCyqT4As6U+ALShPgC1oT4AtqE+ALehPgC45T4Aue0+ALrlPgC7/T4AvO0+AL3dPgC+1T4AvxkBAAKjAIAGowCACqMAgA6jAIASowCA9qEAgBajAIAaowCAqF0+AKkhPgCqPT4AqzU+AKwVPgCt/T4ArvU+AK/tPgCj1T4AHqMAgCKjAIAmowCAKqMAgKYdPgCl/T4ALqMAgKs5PgCqMT4AMqMAgDajAICvBT4ArgU+AK0RPgCsHT4AgREAAIANAAA6owCAghkAAD6jAIBCowCAhJQBAL4QAACGQAcAhwABAEqjAIBOowCAUqMAgFajAIBaowCAXqMAgKiNAgCplQIAqpUCAKvNAgCs2QIArdkCAK7NAgCvxQIAYqMAgGajAIBqowCAbqMAgIwAAAByowCAdqMAgHqjAIC4HQMAucEDALrBAwC7wQMAvMEDAL3JAwC+8QMAv/EDALCJAgCxiQIAsikDALMpAwC0OQMAtTkDALYpAwC3JQMAsx0CAH6jAICCowCAhqMAgIqjAIC2WQIAtVECAI6jAIC7TQIAuk0CAJKjAICWowCAv/0DAL79AwC9/QMAvP0DAJqjAICeowCAoqMAgKajAIDhDD4AqqMAgOOoPwCuowCAgT0AAIAxAADvUD8Agh0AALKjAIC++AQAhhgFAIdMAwCEDAIA48wAALqjAIDhvAEAvqMAgMKjAIDGowCAyqMAgM6jAICELAUA0qMAgNajAIDaowCA7xAAAN6jAIDiowCAo90DAOajAIDqowCA7qMAgPKjAICmmQMApZEDAPajAICrjQMAqo0DAPqjAID+owCArz0CAK49AgCtPQIArD0CAAKkAIAGpACACqQAgA6kAIASpACAFqQAgBqkAIDvKD4AHqQAgOE8PgAipACA4zgBAIApAACBFQAAghEAACqkAICzMQIAvsgEAITABAAupACAMqQAgLYpAgC1IQIANqQAgLvNAQC6zQEAOqQAgD6kAIC/dQEAvskBAL3BAQC8yQEAqOkFAKnpBQCq+QUAq/kFAKzpBQCt6QUArjkGAK85BgC2owCAJqQAgIaIAACHQAMAQqQAgEakAIBKpACATqQAgLjRBgC52QYAuuEGALvhBgC8kQYAvZEGAL6RBgC/kQYAsEkGALFJBgCyXQYAs1UGALRNBgC18QYAtvEGALfxBgCjcQUAUqQAgFakAIBapACAXqQAgKZpBQClYQUAYqQAgKuNBgCqjQYAZqQAgGqkAICvNQYArokGAK2BBgCsiQYAbqQAgLPRBwBypACAdqQAgLbxBwB6pACAfqQAgLXBBwC60QcAu90HAIKkAICGpACAvrkHAL+5BwC8xQcAvbkHALhpBgC5aQYAuokGALuJBgC8mQYAvZkGAL6JBgC/iQYAsBEGALEdBgCyFQYAs2kGALR5BgC1eQYAtmkGALdhBgCoSQYAqVUGAKpdBgCrVQYArE0GAK11BgCucQYAr3EGAEajAICCHQAAgR0AAIAdAACKpACAjqQAgJKkAIC+cAEAo5UGAJqkAICGKAAAh0gBAJ6kAICmtQYApYUGAKKkAICrmQYAqpUGAKakAICqpACAr/0GAK79BgCt/QYArIEGAK6kAICzFQYAsqQAgLakAIC2PQYAuqQAgL6kAIC1NQYAutkBALvZAQDCpACAxqQAgL59AQC/ZQEAvH0BAL11AQCovQUAqckFAKrZBQCr0QUArPkFAK35BQCuKQIArykCAMqkAIDOpACA0qQAgNakAICMAAAA2qQAgN6kAIDipACAuO0CALmFAgC6gQIAu4ECALyFAgC9jQIAvrECAL+xAgCwWQIAsVkCALLtAgCz5QIAtP0CALXlAgC25QIAt9UCAKNRBQDmpACA6qQAgO6kAIDypACApnkFAKVxBQD2pACAq50CAKqdAgD6pACA/qQAgK8hAgCuOQIArTECAKw5AgCBbQAAgG0AAAKlAICCBQAAvlwMAAqlAIAOpQCA79AGAITsAwDhHAUAEqUAgOP8BwAWpQCAGqUAgIbYDACHvAwAqIUCAKmVAgCqlQIAq6UCAKy9AgCt1QIArtECAK/RAgAepQCAIqUAgCalAIAqpQCALqUAgDKlAIA2pQCAOqUAgLh1AQC5fQEAunUBALvJAQC82QEAvdkBAL7JAQC/wQEAsLUCALG9AgCygQIAs4ECALRRAQC1UQEAtlEBALdRAQA+pQCAhAQNAEKlAIBGpQCAvhwMAEqlAIDvHAAA76AGAOGQAQDhRAcA43AGAOOYBgBOpQCAUqUAgFalAIBapQCAs10CAF6lAIBipQCAZqUAgGqlAIC2FQIAtXUCAG6lAIC7OQIAujECAHKlAIB6pQCAv9UBAL7VAQC9FQIAvBUCAKOdDQAGpQCAdqUAgH6lAICCpQCAptUNAKW1DQCGpQCAq/kNAKrxDQCGCAMAh2ADAK8VDgCuFQ4ArdUNAKzVDQCAkQ8AgZkPAIKhDwCzpQ4AiqUAgLWhDgC2eQ8AjqUAgJKlAICWpQCAukUPALtdDwC8RQ8AvU0PAL5FDwC//Q8AqFUOAKldDgCqYQ4Aq30OAKxlDgCttQ8Arr0PAK+1DwCapQCAnqUAgKKlAICmpQCAqqUAgK6lAICypQCAtqUAgLhVDwC5dQ8Aun0PALt1DwC8bQ8AvREPAL4RDwC/EQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1dQ8AtnEPALdxDwCj6Q8AuqUAgL6lAIDCpQCAxqUAgKY1DgCl7Q8AyqUAgKsRDgCqCQ4AzqUAgNKlAICvsQ4ArgkOAK0BDgCsCQ4A1qUAgIIdAACBHQAAgB0AANqlAIDepQCA4qUAgL6UAQCErAEA5qUAgIfgAQCGzAAA6qUAgO6lAIDypQCAlqQAgKhtDgCpiQEAqpkBAKuRAQCswQEArckBAK75AQCv+QEAhKAAAPalAID6pQCA/qUAgAKmAIAGpgCACqYAgA6mAIC4xQAAuc0AALrFAAC73QAAvM0AAL39AAC+9QAAv50AALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEAsxECABKmAIAWpgCAGqYAgB6mAIC2SQIAtUkCACKmAIC7hQIAuoUCACamAIAqpgCAv4UCAL6FAgC9lQIAvJUCAIU8GgCjVQIALqYAgDKmAICmDQIANqYAgDqmAIClDQIAqsECAKvBAgA+pgCAQqYAgK7BAgCvwQIArNECAK3RAgCCGQAARqYAgIAZAACBGQAASqYAgE6mAIBSpgCAWqYAgL4ABABepgCAYqYAgGamAIBqpgCAbqYAgHKmAIB2pgCA7+gOAHqmAICG6AQAh1ADAH6mAICCpgCA74ACAIamAIDhlAEAiqYAgONYAQCOpgCA4wAOAJKmAIDhaA0AlqYAgKhxAgCpcQIAqnECAKupAgCsuQIArbkCAK6pAgCvqQIAhKwFAJqmAICepgCAoqYAgKamAICqpgCArqYAgLKmAIC4bQEAuQ0BALoFAQC7GQEAvAkBAL09AQC+NQEAv9kBALDZAgCx2QIAsm0BALNlAQC0fQEAtWUBALZlAQC3VQEA4WAPAOP0AADjHA4A4bwBALamAICCOQAAgTEAAIA9AAC6pgCAvigEAL6mAIDCpgCAvjwHAO8QAADv0A4AyqYAgIbgBACHyAQAzqYAgLO1AgDSpgCAtX0CALZ1AgDWpgCA2qYAgN6mAIC6UQIAu1ECALz1AQC9/QEAvvUBAL/tAQBWpgCAxqYAgKqxBQCrsQUArBUGAK0dBgCuFQYArw0GAOKmAIDmpgCA6qYAgKNVBQDupgCApZ0FAKaVBQDypgCAs+kGAPamAID6pgCA/qYAgAKnAIC24QYAtekGAAanAIC7sQYAuqEGAAqnAIAOpwCAv50GAL6RBgC9pQYAvKkGAKgdBgCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvIQYAEqcAgBanAIAapwCAHqcAgCKnAIAmpwCAKqcAgC6nAIC45QcAue0HALrlBwC7/QcAvOUHAL3tBwC+5QcAv00HALAlBgCxNQYAsj0GALMxBgC0FQYAtRkGALYNBgC3AQYAo6kHAIIVAACBtQEAgLUBADKnAICmoQcApakHADanAICr8QcAquEHAISgAgA6pwCAr90HAK7RBwCt5QcArOkHAD6nAICzlQYAhugAAIcYAQC2tQYAQqcAgEanAIC1vQYAukkBALtVAQBKpwCATqcAgL45AQC/OQEAvEUBAL05AQCoPQYAqU0GAKpZBgCrUQYArHEGAK1xBgCuuQEAr7kBAISsAQBSpwCAVqcAgFqnAIBepwCAYqcAgGanAIBqpwCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCwyQEAsdUBALLVAQCzqQEAtLkBALW5AQC2qQEAt6EBAKPRBQBupwCAcqcAgHanAIB6pwCApvEFAKX5BQB+pwCAqxECAKoNAgCCpwCAhqcAgK99AgCufQIArX0CAKwBAgCKpwCAjqcAgJKnAICWpwCAgTEAAIANAACapwCAgjkAAJ6nAICipwCAviQDAKqnAICupwCAsqcAgIbYHACHTAMAtqcAgLqnAIC+pwCAhMAcAOMgAQDCpwCA4cgBAManAIDvMAIAyqcAgM6nAIDSpwCA1qcAgNqnAIDepwCA4qcAgLOVAwDmpwCA6qcAgO6nAIDypwCAtrkDALWxAwD2pwCAu1EDALpJAwD6pwCA/qcAgL/1AAC+SQMAvUEDALxJAwCoLQIAqUUCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAL5oHQACqACABqgAgAqoAICAHQAAgQkAAIKpAAAOqACAuFEBALlZAQC6YQEAu2EBALwRAQC9EQEAvhEBAL8RAQCwzQIAsdUCALLdAgCz1QIAtM0CALVxAQC2cQEAt3EBAOFYBgDhVAcA47AAAOO8BgASqACAGqgAgIYYHACHVB0AHqgAgCKoAIAmqACAKqgAgL74HAAuqACA7/AGAO/gBgCjlQIAMqgAgDaoAIA6qACAPqgAgKa5AgClsQIAQqgAgKtRAgCqSQIARqgAgEqoAICv9QEArkkCAK1BAgCsSQIAqG0eAKl1HgCqfR4Aq40eAKyVHgCtnR4Aro0eAK+BHgAWqACATqgAgFKoAIBWqACAWqgAgF6oAIBiqACAZqgAgLiJHgC5iR4AupkeALuRHgC8uR4AvbkeAL59HwC/dR8AsMUeALHNHgCyxR4As90eALTFHgC1zR4AtsUeALe5HgCz9R4AaqgAgG6oAIByqACAdqgAgLYdHgC1HR4AeqgAgLsJHgC6AR4AfqgAgIKoAIC/CR4AvgEeAL0JHgC8ER4Agm0AAKOxHgCAVQAAgWUAAKZZHgCEmAMAv9ABAKVZHgCqRR4Aq00eAIYABACHmAEArkUeAK9NHgCsVR4ArU0eAIqoAICOqACAhCQAAJKoAICWqACAmqgAgKanAICGqACAqLUeAKmFHgCqjR4Aq4UeAKydHgCtgR4Arv0eAK/1HgCwjR4AsZUeALKVHgCzpR4AtL0eALVxAQC2cQEAt3EBALhRAQC5UQEAulEBALtRAQC89QEAvf0BAL71AQC/7QEAsyUeAL4IBwCeqACAoqgAgKaoAIC2IR4AtTUeAKqoAIC7cR4AumkeAK6oAICyqACAv5UBAL5ZHgC9UR4AvGEeALaoAICjYR4AuqgAgL6oAICmZR4AwqgAgMaoAIClcR4Aqi0eAKs1HgDKqACAzqgAgK4dHgCv0QEArCUeAK0VHgDhVBoA0qgAgONcCgDWqACA2qgAgN6oAIDiqACA5qgAgOqoAIC+qAUA7qgAgPKoAICPMSoA+qgAgO/E+wD+qACAk2EuAJIdLwCR2SoAkEkqAJfZEgCWdRIAlQ0TAJTBLgCbHRsAmkEWAJlJFgCYDRcAn3EeAJ4RGwCdcRoAnHkaAKOhAgCinQMAoZUfAKCJHgDjiAEA4wgeAOFoAADh/B4A79wBAO98HwC1if4AtAH8ALMB+gCylfoAsQH4ALAR9gCv4fYArgH0AK0l8gCs7fIAqwHwAKrpDwCp1Q4AqN0OAKcBDACmyQoApe0KAKQBCACj4QYAovEGAKHlAwACqQCAggErAIMBKwAGqQCACqkAgIYxLwCHiS8AhIkrAIVFLgCKdRIAiwUTAIYIBQCHbAUAjhEXAI8RFwCMsRMAjV0WAJI9GgCTQRsAhMgFAIQABwCWUR8Al1EfAJRRGwCVORoAmn0eAJt9AgAOqQCAEqkAgIFZAQCAVQEAnFkDAIJRAQC+yAcAFqkAgBqpAIAeqQCAIqkAgCapAIAqqQCA79QeAC6pAIDhJB4AMqkAgONoAQA2qQCAOqkAgD6pAIBCqQCAu2kCALpZAgBGqQCASqkAgL8dAgC+HQIAvRkCALxxAgCz7QIATqkAgFKpAIBWqQCAWqkAgLZ9AgC17QIAXqkAgKMNBQD2qACAYqkAgGqpAIBmqQCApp0FAKUNBQBuqQCAq4kFAKq5BQCGCAMAh3wDAK/9BQCu/QUArfkFAKyRBQCAsQcAgbkHAIJBAACzsQYAcqkAgLVZBwC2MQcAdqkAgHqpAIB+qQCAuuEHALvhBwC84QcAveEHAL7hBwC/3QcAqLUGAKm5BgCqdQYAq4UHAKydBwCt/QcArvUHAK8ZBwCCqQCAhqkAgIqpAICOqQCAkqkAgJapAICaqQCAnqkAgLh1BwC5fQcAunUHALsFBwC8HQcAvTEHAL4xBwC/MQcAsGkHALFpBwCyeQcAs3kHALRpBwC1VQcAtlEHALdNBwCj/QcAoqkAgKapAICqqQCArqkAgKZ9BgClFQYAsqkAgKutBgCqrQYAtqkAgLqpAICvkQYArq0GAK2tBgCsrQYAvqkAgMKpAIDGqQCAyqkAgIAdAACBCQAAgjkAAM6pAIDSqQCA2qkAgIbIAACHpAEA3qkAgOKpAIDmqQCA6qkAgKiNAQCpmQEAqtkBAKvRAQCs8QEArfEBAK45AQCvOQEAhKAAAO6pAIDyqQCA9qkAgPqpAID+qQCAAqoAgAaqAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAAugUEALsJBAC44QcAueEHAL4JBAC/CQQAvAkEAL0JBACyjQcAs+UHALC1BwCxhQcAtuUHALftBwC08QcAtfEHAKpNBwCrVQcAqEkHAKlJBwCu3QcAr8UHAKxNBwCt1QcACqoAgA6qAIASqgCAFqoAgBqqAIAeqgCAIqoAgCaqAICz0QIAKqoAgC6qAIC+AAwAMqoAgLbxAgC1+QIANqoAgLsNAgC6DQIAOqoAgD6qAIC/DQIAvg0CAL0NAgC8DQIAghUAAKOVAgCAYQAAgWEAAKa1AgBCqgCASqoAgKW9AgCqSQIAq0kCAIbIDACHrAwArkkCAK9JAgCsSQIArUkCAKhlAgCpdQIAqn0CAKt1AgCsbQIArbECAK6xAgCvsQIAhKANAE6qAIBSqgCAVqoAgFqqAIBeqgCAYqoAgGaqAIC4MQEAuTEBALoxAQC7MQEAvNUBAL3dAQC+yQEAv8EBALDRAgCx0QIAstECALPRAgC0EQEAtREBALYRAQC3EQEA4bAGAGqqAIDj0AYAhEAPAG6qAIDhpAEAcqoAgOPABgB2qgCAeqoAgH6qAIDv1AYA7AAAAIKqAIDvZAcAhqoAgIqqAICOqgCAkqoAgLO5AgCWqgCAtakCALZ9AgCaqgCAnqoAgKKqAIC6WQIAu1kCALxJAgC9SQIAvpkBAL+ZAQCjdQ0ARqoAgKaqAICqqgCArqoAgKaxDQClZQ0AsqoAgKuVDQCqlQ0AvqQDALaqAICvVQ4ArlUOAK2FDQCshQ0AgE0AAIFVAACCVQAAs2UPALqqAIC1ZQ8Atm0PAL6qAICGQAMAhxQDALrtDwC7/Q8AvOkPAL3VDwC+3Q8Av9UPAKhZDgCpoQ8AqqEPAKuhDwCsoQ8AraEPAK6hDwCvoQ8AwqoAgMaqAIDKqgCAzqoAgNKqAIDWqgCA2qoAgN6qAIC4AQ8AuQEPALoBDwC7HQ8AvA0PAL01DwC+PQ8Av9UAALBlDwCxdQ8AsnEPALNNDwC0VQ8AtV0PALZNDwC3QQ8AoykOAOKqAIDmqgCA6qoAgO6qAICmIQ4ApSkOAPKqAICrsQ4AqqEOAPaqAID6qgCAr5kOAK6RDgCtmQ4ArKUOAP6qAIACqwCABqsAgAqrAIDvJA0ADqsAgBKrAIAWqwCA49AOABqrAIDhGA4AHqsAgIAVAACBGQAAggUAACKrAICo0QEAqdkBAKopAQCrKQEArDkBAK05AQCuKQEArykBAL5oAQAqqwCAhsgBAIesAAAuqwCAMqsAgDarAIA6qwCAuO0AALmFAAC6jQAAu4UAALydAAC9gQAAvoEAAL+BAACwWQEAsVkBALLtAACz5QAAtP0AALXlAAC25QAAt9UAALOhAgA+qwCAQqsAgEarAIBKqwCAtrkCALWxAgBOqwCAu50CALqdAgBSqwCAVqsAgL8hAwC+OQMAvTEDALw5AwCF+PUAo+UCAFqrAIBeqwCApv0CAGKrAIBmqwCApfUCAKrZAgCr2QIAaqsAgG6rAICufQMAr2UDAKx9AwCtdQMAuOkAALnpAAC6aQAAu2kAALx5AAC9ZQAAvm0AAL9lAACwsQAAsbkAALKBAACzgQAAtPkAALX5AAC27QAAt+UAAKhlAwCpdQMAqn0DAKt1AwCsbQMArdEAAK7RAACv0QAAcqsAgHarAIB6qwCA1qkAgH6rAICCqwCAhqsAgIqrAICA/QEAgQkAAIIZAACOqwCAkqsAgL5EAgCaqwCAnqsAgISsAgCiqwCAh/gCAIasBQCmqwCAqqsAgK6rAICyqwCAs/UCALarAIC6qwCAvqsAgMKrAIC2UQEAteUCAMarAIC7fQEAunUBAMqrAIDOqwCAvz0BAL49AQC9VQEAvFUBAOFwDwDSqwCA47gOAITABQDvyAAA1qsAgNqrAIDeqwCA4zwOAOKrAIDh0AEA5qsAgIR0BwDqqwCA72gBAO6rAIDyqwCApXkCAKbNAQD2qwCAgCEAAIEhAACC3QcAo2kCAKzJAQCtyQEArqEBAK+hAQD6qwCA/qsAgKrpAQCr4QEAlqsAgAKsAIC+QAIABqwAgIYwAwCHMAMACqwAgA6sAICoOQcAqTkHAKoNBwCrHQcArAUHAK0NBwCuBQcAr3kHALAJBwCxCQcAshkHALMRBwC0OQcAtTkHALbdBwC3yQcAuPkHALn5BwC6zQcAu8EHALzFBwC9yQcAvrkHAL+xBwCzpQcAEqwAgBasAIAarACAHqwAgLatBwC1rQcAIqwAgLvtBwC67QcAJqwAgCqsAIC/3QcAvt0HAL3lBwC87QcALqwAgKPhBwAyrACANqwAgKbpBwA6rACAPqwAgKXpBwCqqQcAq6kHAEKsAIBGrACArpkHAK+ZBwCsqQcAraEHAEqsAIBOrACAUqwAgFasAIBarACAXqwAgGKsAIBmrACAgREAAIANAABqrACAghkAAG6sAIByrACAvuQBAHasAICG4AAAhxgBAHqsAIB+rACAgqwAgIasAICKrACA77AEAI6sAIDh1AYAkqwAgONcBACWrACAmqwAgJ6sAICirACAqJkBAKmZAQCqDQEAqwUBAKwdAQCtBQEArgUBAK81AQCEiAEApqwAgKqsAICurACAsqwAgLasAIC6rACAvqwAgLjBAAC5wQAAusEAALvBAAC8wQAAvcEAAL7BAAC/wQAAsE0BALElAQCyIQEAsyEBALQlAQC1LQEAthEBALcRAQDCrACAxqwAgLONAgDKrACAtZ0CAM6sAIDSrACAto0CANasAIDarACAu+kCALqBAgC9/QIAvP0CAL/hAgC+6QIA3qwAgKbVAgClxQIAvggDAKPVAgCCLQAAgRkAAIB5AACvuQIArrECAK2lAgCspQIAq7ECAKrZAgDirACA6qwAgO80AgDurACAhxgDAIYs/ADyrACA9qwAgPqsAID+rACAAq0AgAatAIAKrQCADq0AgOMAAQASrQCA4eABABatAIC6tQMAu70DABqtAIAerQCAvnkDAL95AwC8pQMAvXkDACarAICztQMAIq0AgCatAIC2kQMAKq0AgC6tAIC1pQMAqEkCAKlJAgCqWQIAq1kCAKxJAgCtdQIArnECAK9tAgC+aP0AvqT/ADKtAIA2rQCAOq0AgD6tAIBCrQCARq0AgLj5AgC5+QIAukkBALtJAQC8XQEAvUEBAL5BAQC/fQEAsBUCALEdAgCyFQIAs8kCALTZAgC12QIAtskCALfJAgDjIAYA4bAGAOGAAQDjEAYAgA0AAIE1AACCPQAASq0AgE6tAIBSrQCAWq0AgF6tAIDvcAAAYq0AgGatAIDvTAEAhIz9AGqtAICjmQIAbq0AgKWJAgByrQCAdq0AgKa9AgCGwPwAh+T8AKuRAgCqmQIArVUCAKyJAgCvVQIArlUCAKh9/gCpgf4Aqpn+AKuZ/gCsif4ArYn+AK65/gCvuf4AVq0AgHqtAIB+rQCAgq0AgIatAICKrQCAjq0AgJKtAIC4tf4Aub3+ALph/wC7Yf8AvGH/AL1h/wC+Yf8Av2H/ALDJ/gCxyf4Ast3+ALPR/gC0uf4Atbn+ALaR/gC3kf4AsxH+AJatAICarQCAnq0AgKKtAIC2Cf4AtQH+AKatAIC7Df4Aug3+AKqtAICurQCAv33+AL59/gC9Bf4AvAn+ALKtAICjVf4Atq0AgLqtAICmTf4Avq0AgMKtAIClRf4Aqkn+AKtJ/gCEKAMAxq0AgK45/gCvOf4ArE3+AK1B/gCAzQEAgdEBAILRAQCzuf4Ayq0AgLXR/gC21f4Azq0AgIZgAQCHYAEAug0BALsFAQC8HQEAvQUBAL4NAQC/BQEA0q0AgNatAIDarQCA3q0AgOKtAIDhwP0A5q0AgOOM/ADqrQCA7q0AgPKtAIDvtPwA9q0AgPqtAID+rQCAAq4AgKgp/gCpKf4Aqj3+AKs1/gCsVf4ArVn+AK5N/gCvRf4ABq4AgAquAIAOrgCAEq4AgBauAIAargCAHq4AgCKuAIC4SQEAuUkBALpZAQC7UQEAvHkBAL15AQC+GQEAvxUBALDFAQCxzQEAssUBALPdAQC0xQEAtc0BALbFAQC3eQEAJq4AgCquAIAurgCAo7n9ADKuAICl0f0AptX9AITQAwBBrgCAvuACAKoNAgCrBQIArB0CAK0FAgCuDQIArwUCAIFJAACAQQAAowkDAIJdAAClGQMARa4AgEmuAICmEQMAhsAEAIfkAwCrDQMAqg0DAK0BAwCsHQMArwEDAK4JAwCw4QMAseEDALLhAwCz/QMAtOUDALXtAwC25QMAtz0DALgFAwC5DQMAugUDALsdAwC8BQMAvQ0DAL4FAwC/vQAATa4AgFGuAIBVrgCAWa4AgOasAIBdrgCAYa4AgGWuAICo8QMAqfkDAKqpAwCrqQMArLkDAK25AwCuqQMAr6UDALNBAgBprgCAba4AgHGuAIB1rgCAtlkCALVRAgB5rgCAu0UCALpFAgB9rgCAga4AgL9JAgC+QQIAvUkCALxVAgCFrgCAia4AgI2uAICRrgCA74wDAJWuAICZrgCAna4AgONsAwChrgCA4VAAAKWuAICprgCAvngFALGuAICEcAIAgOUAAIHpAACC+QAAta4AgIawBACHVAUAua4AgO9A/gC9rgCA4Vz+AMGuAIDjVAEAxa4AgMmuAIDNrgCA0a4AgLOZAQDVrgCA2a4AgN2uAIDhrgCAth0BALUdAQDlrgCAuz0BALo9AQDprgCA7a4AgL/hAAC++QAAvfEAALz5AACoIQYAqVEGAKpRBgCrzQYArNUGAK3dBgCu1QYAr8kGAK2uAIDxrgCA9a4AgPmuAID9rgCAAa8AgAWvAIAJrwCAuG0HALkFBwC6DQcAuwUHALwdBwC9AQcAvgEHAL8BBwCwuQYAsbkGALJtBwCzZQcAtH0HALVlBwC2ZQcAt1UHAKPZBgANrwCAEa8AgBWvAIAZrwCApl0GAKVdBgCEnAIAq30GAKp9BgC+JAMAHa8AgK+hBwCuuQcArbEHAKy5BwCASQAAgUkAAIJZAACzVQcAIa8AgLV9BwC2aQcAJa8AgIZAAACHVAMAulUHALspBwC8OQcAvTkHAL4pBwC/IQcAo5kGACmvAIAtrwCAMa8AgDWvAICmpQYApbEGADmvAICr5QYAqpkGAD2vAIBBrwCAr+0GAK7lBgCt9QYArPUGAOE4BQBFrwCA4yQEAEmvAIBNrwCAUa8AgFWvAIBZrwCAXa8AgGGvAIBlrwCAaa8AgG2vAIBxrwCA7/QEAHWvAICo+QYAqQkGAKoRBgCrLQYArDkGAK0lBgCuLQYAryUGAHmvAIB9rwCAga8AgIWvAICAGQAAgRkAAIIFAACJrwCAuOUBALntAQC65QEAu/0BALzlAQC97QEAvuUBAL9ZAQCwXQYAsSEGALIhBgCzIQYAtCEGALUpBgC2EQYAtxEGAKjRAgCp2QIAqg0DAKsFAwCsHQMArQUDAK4FAwCvNQMAvmQCAJGvAICVrwCAma8AgJ2vAIChrwCApa8AgKmvAIC4JQMAuS0DALolAwC7PQMAvCUDAL0pAwC++QMAv/kDALBNAwCxIQMAsiUDALM9AwC0JQMAtS0DALYlAwC3HQMAs4UDAITIAgCtrwCAhAgDALGvAIC2hQMAtZUDALWvAIC75QMAuokDAIYIDACHnAMAv+kDAL7hAwC96QMAvPEDAIXsCgA2rgCAo80DALmvAICl3QMAva8AgMGvAICmzQMAxa8AgMmvAICrrQMAqsEDAK2hAwCsuQMAr6EDAK6pAwDNrwCA0a8AgNWvAIDZrwCA78gDAN2vAIDhrwCA5a8AgOO0AwDprwCA4dABAO2vAICADQAAgXUAAIJ9AADxrwCA9a8AgPmvAICzZQEAvgQCALVlAQABsACABbAAgLZlAQCGQA0Ah1gNALv1AQC6/QEAvaUBALy5AQC/mQEAvqUBAAmwAIANsACAEbAAgIQADAAVsACAGbAAgB2wAIDvzAEAIbAAgOEsBgAlsACA4yABAOwAAAApsACALbAAgDGwAIA1sACAo+kBADmwAIA9sACApukBAEGwAIBFsACApekBAKpxAQCreQEASbAAgE2wAICuKQEArxUBAKw1AQCtKQEAqCUOAKktDgCqJQ4Aqz0OAKwlDgCtLQ4AriUOAK+VDgD9rwCAUbAAgFWwAIBZsACAXbAAgIKdAACBnQAAgJ0AALhFDwC5TQ8AukUPALtZDwC8SQ8AvUkPAL59DwC/cQ8AsPEOALH5DgCypQ4As7kOALSpDgC1lQ4Atp0OALd9DwCo1Q8Aqd0PAKoJDwCrCQ8ArBkPAK0FDwCuDQ8ArwUPAGGwAIBlsACAabAAgL6gAwBtsACAcbAAgId4AwCGEAAAuBUPALkdDwC6IQ8AuyEPALz1AAC9/QAAvvUAAL/tAACwQQ8AsU0PALJdDwCzVQ8AtE0PALU1DwC2MQ8AtzEPAHWwAIDvsAwAebAAgH2wAICBsACAhbAAgImwAICNsACAkbAAgJWwAICZsACAnbAAgKGwAIDjqA0ApbAAgOGMDQCzwQ4AqbAAgK2wAICxsACAtbAAgLbFDgC10Q4AubAAgLvJDgC6xQ4AvbAAgMGwAIC/sQ4AvskOAL3BDgC8yQ4AowEOAMWwAIDJsACAzbAAgNGwAICmBQ4ApREOANWwAICrCQ4AqgUOANmwAICErAIAr3EOAK4JDgCtAQ4ArAkOAIBRAACBWQAAgmEAALPFAAC+zAEAtcUAALbNAADhsACAhkAHAIcUAQC6yQAAu8kAALzZAAC92QAAvskAAL/FAACrDQMAqg0DAKkJAwCouQIArw0DAK4NAwCtDQMArA0DAL5gAwDlsACA6bAAgO2wAIDxsACA9bAAgPmwAIC+MAUAuykDALoZAwC5GQMAuAEDAL/dAwC+3QMAvd0DALwxAwCzTQMAsk0DALFNAwCwTQMAtzkDALYxAwC1QQMAtE0DAP2wAICmkQMApZkDAAGxAICjmQMABbEAgAmxAIANsQCAr5kDAK6VAwCthQMArIUDAKuVAwCqlQMAja8AgBGxAIAVsQCAGbEAgB2xAIAhsQCAJbEAgCmxAIAtsQCAMbEAgDWxAIA5sQCAPbEAgEGxAICAHQAAgQkAAIL9AQBFsQCAvwgHAEmxAIBRsQCA7yQAAFWxAICElAIAWbEAgF2xAICH4AIAhgQFAL4AGABhsQCAZbEAgOGQAQBpsQCA44AAAG2xAIBxsQCAdbEAgLNlAQB5sQCAtWUBALZtAQB9sQCAgbEAgIWxAIC65QEAu/kBALzpAQC96QEAvsUBAL+9AQCJsQCAjbEAgJGxAIC+xBkAlbEAgJmxAICdsQCA78gBAKGxAIDh3A4ApbEAgOMwDgCpsQCArbEAgLGxAICEMAQAgHkAAIEVAACCFQAAo+UBALWxAICl5QEApu0BALmxAICGQAYAh5AHAKplAQCreQEArGkBAK1pAQCuRQEArz0BAKjdBQCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvnQYATbEAgL2xAIDBsQCAhDABAMWxAIDJsQCAzbEAgNGxAIC4jQYAuZUGALqdBgC7lQYAvI0GAL21BgC+vQYAv7UGALDtBgCx8QYAsvEGALPxBgC0zQYAtbUGALa9BgC3tQYAqIkHAKmVBwCqkQcAq5EHAKy9BwCtpQcArqEHAK/dBwDVsQCA2bEAgN2xAIDhsQCA5bEAgOmxAIDtsQCA8bEAgLhJBwC5VQcAul0HALtVBwC8cQcAvX0HAL5pBwC/aQcAsKUHALGtBwCyuQcAs7EHALSRBwC1kQcAtnkHALd5BwD1sQCA+bEAgP2xAIABsgCA78gFAOHACQAFsgCA48AZAOMkBAAJsgCA4dAGAO/cKACinQMAoxUBAKAZBQChjQUAs1kGAA2yAIARsgCAFbIAgBmyAIC2ZQYAtXUGAB2yAIC7KQYAuiEGACGyAIAlsgCAvxUGAL4VBgC9JQYAvC0GAKOZBgCPmfwAKbIAgDGyAIA1sgCApqUGAKW1BgA5sgCAq+kGAKrhBgCGKB8Ah5wAAK/VBgCu1QYAreUGAKztBgCebQkAn30HAJwNCwCd7QkAmvENAJs5DQCY5fAAmQ0PAJbh8QCX6fEAlMX1AJUN8wCSHfcAk/H1AJD9+QCR7fkAgh3/AIMB+gA9sgCAQbIAgIYV9gCHOfYAhAn6AIXx9ACKwfAAiyXyAEWyAIBJsgCAjuEMAI8VDgCMNfIAjQHzAJKtDgCTgQgATbIAgFGyAICW6QQAl3UGAJR5CgCV8QoAmtEGAJvJAABVsgCAWbIAgIEdAwCAHQMAnFkCAIL1AwCrARAAqpUWAKmNFgCojRYAr5UuAK4BLACt/RIArJkSAKOlHgCipR4AoY0CAN2wAICnGRoAppUaAKUBGACknR8AXbIAgGGyAIBlsgCAabIAgG2yAIBxsgCAdbIAgHmyAICz5SoAsuUqALGtLwCw5S4AfbIAgIGyAIC1ASQAtBEqAKgpAwCpNQMAqj0DAKs1AwCsLQMArbUDAK69AwCvtQMAhbIAgImyAICNsgCAkbIAgIAdAACBCQAAgrkAAJWyAIC4TQIAuV0CALptAgC7CQIAvBkCAL0ZAgC+CQIAvwECALDNAwCx1QMAst0DALPVAwC0zQMAtXUCALZ9AgC3dQIAmbIAgITIHQChsgCAvgwfAKWyAICpsgCA70gGAO9YBwDhWAYA4ZgGAOOUAQDjAAYAhhAcAId8HQC+9B4ArbIAgLGyAIC2ZQMAtfUDALWyAICz5QMAubIAgL2yAIDBsgCAv+ECAL5ZAwC9UQMAvFkDALtBAwC6WQMAxbIAgMmyAIAtsgCAnbIAgM2yAIDRsgCA1bIAgNmyAIDdsgCA4bIAgKitHQCptR0AqrUdAKslHgCsPR4ArR0eAK4VHgCvdR4AsA0eALEtHgCyJR4As40eALSVHgC1nR4AtpUeALeNHgC4tR4Aub0eALq1HgC7nR4AvIUeAL1VHwC+XR8Av1UfALMdHQDlsgCA6bIAgO2yAIDxsgCAtr0eALWVHgD1sgCAu8keALrpHgD5sgCA/bIAgL95HgC+cR4AvXkeALzRHgCCKQAAo1kdAIAdAACBFQAApvkeAAGzAIAFswCApdEeAKqtHgCrjR4ACbMAgITgAwCuNR4Arz0eAKyVHgCtPR4AqIkeAKmVHgCqnR4Aq7EeAKzRHgCt2R4Ars0eAK/FHgANswCAEbMAgIaIAACHbAEAFbMAgBmzAIAdswCAIbMAgLhdAQC5wQEAusEBALvBAQC8wQEAvckBAL7xAQC/8QEAsL0eALGdHgCylR4As2UBALR9AQC1ZQEAtm0BALdlAQCqLR0AqzUdACWzAIApswCAri0dAK+VHACsLR0ArSUdAISMAQCjkR0ALbMAgDGzAICmER0ANbMAgDmzAIClgR0As1UeAD2zAIBBswCARbMAgEmzAIC2GR4AtRkeAE2zAIC7GR4AujkeAFGzAIBVswCAv+EBAL75AQC98QEAvAEeAFmzAIBdswCAYbMAgKOZHQBlswCApdUdAKbVHQBpswCAbbMAgHGzAICq9R0Aq9UdAKzNHQCtPQIArjUCAK8tAgCAZQAAgRUAAIIdAACEAAQAdbMAgHmzAICHcAMAhvwEAIGzAICFswCAibMAgI2zAICRswCAlbMAgJmzAICdswCAvsgEAKGzAIClswCAqbMAgK2zAICxswCAtbMAgO/cHwC5swCA4ZQBAL2zAIDjHAEAwbMAgMWzAIDJswCAzbMAgLt1AwC6aQMAvkgGANGzAIC/HQMAvh0DAL0dAwC8ZQMAs9UDANWzAIDZswCA3bMAgOGzAIC2fQMAtcUDAIRwBQCoJQIAqTUCAKo9AgCrNQIArC0CAK2dAgCulQIAr7UCAIIVAADlswCAgNkBAIEJAADEAAAA6bMAgPGzAID1swCAuKkCALmpAgC6SQEAu0kBALxZAQC9RQEAvkUBAL99AQCwzQIAsdECALLRAgCzqQIAtLkCALW5AgC2qQIAt6ECAOEoHgDhNBwA43QBAOMYHgD5swCA/bMAgIa4BACHVAUAhDgHAAG0AIAFtACACbQAgL6sBwANtACA78weAO/IGgCj9QIAEbQAgBW0AIAZtACAHbQAgKZdAgCl5QIAIbQAgKtVAgCqSQIAJbQAgCm0AICvPQIArj0CAK09AgCsRQIAqGEGAKlhBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgDtswCALbQAgDG0AIA1tACAObQAgD20AIBBtACARbQAgLjxBgC58QYAuvEGALvxBgC8nQYAvbEGAL6xBgC/sQYAsOUGALHtBgCy5QYAs/0GALTlBgC17QYAttkGALfVBgCz6QYASbQAgE20AIBRtACAVbQAgLbhBgC16QYAWbQAgLspBgC6IQYAXbQAgGG0AIC/KQYAviEGAL0pBgC8MQYAgl0AAKOtBgCARQAAgV0AAKalBgBltACAabQAgKWtBgCqZQYAq20GAIYADACHQAMArmUGAK9tBgCsdQYArW0GAG20AIDvfAUAcbQAgHW0AIB5tACAfbQAgIG0AICFtACAibQAgI20AICRtACAlbQAgJm0AIDjaAUAnbQAgOF4BQCz0QYAobQAgKW0AICptACArbQAgLb9BgC1/QYAsbQAgLupBgC6oQYAtbQAgLm0AIC/mQYAvqkGAL2pBgC8sQYAqLkGAKm5BgCqGQYAqxkGAKw1BgCtPQYArjUGAK8pBgC9tACAgh0AAIEdAACAHQAAwbQAgMW0AIDJtACA0bQAgLjpAQC56QEAuvkBALv5AQC86QEAvekBAL5dAQC/VQEAsCUGALEtBgCyJQYAsz0GALQtBgC1HQYAthUGALfZAQCGgAwAh+QCANW0AICjnQUA2bQAgKWxBQCmsQUA3bQAgOG0AIDltACAqu0FAKvlBQCs/QUAreUFAK7lBQCv1QUAtk0DAOm0AICExAMAtUUDAO20AICzjQIA8bQAgPW0AIC+SQMAv0kDALxJAwC9SQMAumkDALtpAwD5tACA/bQAgAG1AICmiQMApYEDAAW1AICjSQIACbUAgA21AIARtQCAr40DAK6NAwCtjQMArI0DAKutAwCqrQMAfbMAgBW1AIAZtQCAHbUAgIW0PQAhtQCAJbUAgCm1AIAttQCAMbUAgIA9AACBCQAAgh0AADW1AIC+sAMAObUAgIc4AwCG3AwAQbUAgEW1AIBJtQCATbUAgFG1AIDvXAYAVbUAgFm1AIC+6AwA45QGAF21AIDh3AEAYbUAgGW1AIBptQCAbbUAgLNRAQBxtQCAdbUAgHm1AIB9tQCAtnEBALV5AQCBtQCAuz0BALo9AQCFtQCAibUAgL/9AQC+9QEAvQUBALwFAQCNtQCAkbUAgJW1AICEQAwAmbUAgJ21AIChtQCA76wHAKW1AIDhJAYAqbUAgONABwCGkAwAh/wMALG1AIC1tQCAgFkAAIFlAACCYQAAo90BALm1AICl9QEApv0BAL21AIDBtQCAxbUAgKqxAQCrsQEArIkBAK2JAQCueQEAr3EBAM20AIA9tQCAybUAgM21AICttQCA0bUAgNW1AIDZtQCAqJ0NAKktDgCqOQ4AqzEOAKwRDgCtEQ4Arn0OAK9tDgCwGQ4AsRkOALIxDgCzMQ4AtNEOALXZDgC2zQ4At8UOALj9DgC52Q4AuqkOALupDgC8vQ4AvaUOAL6tDgC/pQ4AqIEPAKmBDwCqgQ8Aq4EPAKyBDwCtjQ8AroUPAK+1DwDdtQCA4bUAgOW1AIDptQCA7bUAgPG1AID1tQCA+bUAgLidDwC5rQ8AuqUPALtNDwC8VQ8AvV0PAL5JDwC/SQ8AsNEPALHRDwCy0Q8As9EPALS1DwC1vQ8AtrUPALetDwCzCQ4A/bUAgAG2AIAFtgCACbYAgLYNDgC1CQ4ADbYAgLsVDgC6FQ4AEbYAgBW2AIC/eQ4AvnEOAL0FDgC8BQ4AghUAAKNNDgCAYQAAgWEAAKZJDgAZtgCAvhABAKVNDgCqUQ4Aq1EOAIQkAQAhtgCArjUOAK89DgCsQQ4ArUEOAKg5DgCpOQ4AqlkOAKtRDgCscQ4ArXEOAK6RAQCvkQEAhgAAAIeEAAAltgCAKbYAgC22AIAxtgCANbYAgDm2AIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALD1AQCx/QEAsvUBALNNAQC0VQEAtV0BALZVAQC3TQEAuk0PALtVDwC4TQ8AuUUPAL59DwC/tQ8AvEUPAL11DwCyAQ8AswEPALAxDwCxMQ8AtgEPALcNDwC0EQ8AtREPAKqZDgCrRQ8AqOUOAKmZDgCuQQ8Ar0EPAKxRDwCtUQ8APbYAgEG2AIBFtgCASbYAgE22AIBRtgCAVbYAgFm2AICzUQ0AXbYAgGG2AIBltgCAabYAgLZxDQC1eQ0AbbYAgLu5AgC6sQIAcbYAgHW2AIC/GQIAvhECAL0ZAgC8oQIAebYAgKMVDQB9tgCAgbYAgKY1DQCFtgCAibYAgKU9DQCq9QIAq/0CAIToAwCRtgCArlUCAK9dAgCs5QIArV0CAKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvfQEAgO0BAIHxAQCC8QEAvqAFAJW2AICZtgCAh2gFAIYcBQC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALAFAQCxDQEAsgUBALMdAQC0BQEAtQ0BALYFAQC3+QEA4WQPAOGcDwDjFA4A49QPAJ22AIDhPA4AobYAgOPkAAC+rAQApbYAgKm2AIDvDAAArbYAgLG2AIDvYA4A77QPALW2AIC5tgCAhEQEALNhAgC9tgCAtWECALZhAgDBtgCAxbYAgMm2AIC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCjrQUAjbYAgM22AIDRtgCA1bYAgKatBQClrQUA2bYAgKtJBgCqQQYA3bYAgOG2AICvSQYArkEGAK1JBgCsUQYA5bYAgOm2AIDttgCA8bYAgIAdAACBCQAAgjkAAPW2AID5tgCA/bYAgIbIAACHIAMAAbcAgAW3AIAJtwCADbcAgKhtBgCptQcAqr0HAKsdBwCsCQcArTEHAK4xBwCvLQcAhKgDABG3AIAVtwCAGbcAgB23AIAhtwCAJbcAgCm3AIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBVBwCxJQcAsi0HALM9BwC0LQcAtRUHALYdBwC39QAALbcAgOG8BgAxtwCA4/QFADW3AIA5twCAPbcAgEG3AIBFtwCASbcAgE23AIBRtwCAVbcAgFm3AIBdtwCA7+gEALN1BgCCLQAAgRUAAIAdAABhtwCAtvEGALXBBgBltwCAu6EGALrRBgBptwCAvmwBAL+RBgC+qQYAvakGALy5BgCjtQYAcbcAgIYoAACHTAEAdbcAgKYxBgClAQYAebcAgKthBgCqEQYAfbcAgIG3AICvUQYArmkGAK1pBgCseQYAhbcAgLO9AQCJtwCAjbcAgLZ5AQCRtwCAlbcAgLV5AQC6VQEAu10BAJm3AICdtwCAvvkAAL/lAAC8RQEAvf0AAKhxAgCpcQIAqnECAKtxAgCstQIArb0CAK61AgCvrQIAhOw8AKG3AICltwCAqbcAgK23AICxtwCAtbcAgLm3AIC4XQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDVAgCx3QIAstUCALNtAwC0eQMAtWUDALZtAwC3ZQMAHbYAgL23AIDBtwCAo/UCAMW3AIClMQIApjECAMm3AIDNtwCA0bcAgKodAgCrFQIArA0CAK21AwCusQMAr60DAIBlAACBCQAAghkAANW3AIDZtwCA4bcAgL4QPADltwCAhsA8AIcgAwDptwCA7bcAgPG3AID1twCA+bcAgP23AICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAAG4AIAFuACACbgAgA24AIARuACAFbgAgBm4AIAduACAuHUBALl9AQC6dQEAu8kBALzZAQC9xQEAvsUBAL/9AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2VQEAt00BAOGkBgAhuACA41AGAL6APACEHDwAvoA/ACW4AIApuACALbgAgDG4AIA1uACAObgAgD24AIBBuACA7+AGAEW4AICBfQAAgHEAAEm4AICCBQAAUbgAgFW4AIDvTAAAWbgAgOGQAQBduACA41gBAGG4AIBluACAabgAgIZYPwCH/DwAs509AN23AIBNuACAbbgAgHG4AIC21T0AtbU9AHW4AIC7+T0AuvE9AHm4AIB9uACAvxk+AL4RPgC91T0AvNU9AIG4AICj2T0AhbgAgIm4AICmkT0AjbgAgJG4AICl8T0AqrU9AKu9PQCVuACAmbgAgK5VPgCvXT4ArJE9AK2RPQCoVT4AqVk+AKphPgCrYT4ArGE+AK1hPgCuYT4Ar2E+AISoAwCduACAobgAgKW4AICpuACArbgAgLG4AIC1uACAuEU/ALldPwC6VT8Au20/ALx1PwC9fT8AvnU/AL9tPwCwwT8AscE/ALLBPwCzwT8AtME/ALXBPwC2wT8At8E/AIC5AQCBuQEAggUAALm4AIDhgD4AwbgAgOMoPQDFuACAhoAAAIcEAQDvCD0AybgAgM24AIDRuACA1bgAgNm4AICzqT8AvbgAgN24AIDhuACA5bgAgLahPwC1qT8A6bgAgLtFPgC6RT4A7bgAgPG4AIC/RT4AvkU+AL1VPgC8VT4Ao2k/APW4AID5uACA/bgAgAG5AICmYT8ApWk/AAW5AICrhT4AqoU+AAm5AIANuQCAr4U+AK6FPgCtlT4ArJU+ABG5AICzGT4AFbkAgBm5AIC2IT4AHbkAgCG5AIC1MT4AuvEBALv5AQAluQCAKbkAgL6xAQC/vQEAvNEBAL3RAQCo0T0AqdE9AKrVPQCr6T0ArP09AK3lPQCu7T0ArxECAID5AwCBzQMAgsUDAIQkAwC+AAQAMbkAgIesAwCGvAQAuBkCALktAgC6JQIAu+kCALz5AgC9+QIAvukCAL/pAgCwcQIAsXkCALJBAgCzQQIAtDECALU9AgC2NQIAtykCAKVtPQA1uQCAObkAgKZ9PQA9uQCAbbcAgKNFPQBBuQCArY0CAKyNAgCv4QIAru0CAKwAAABFuQCAq6UCAKqtAgDh+AEASbkAgOP0AgCEwAQATbkAgFG5AIBVuQCAWbkAgF25AIBhuQCAZbkAgGm5AIBtuQCAcbkAgO8wAgB1uQCAqBUCAKkZAgCqJQIAqz0CAKwlAgCtLQIAriUCAK9VAgB5uQCAfbkAgIG5AICFuQCAibkAgI25AICEsAQAkbkAgLjRAgC52QIAuuECALvhAgC8kQIAvZ0CAL6VAgC/iQIAsC0CALE1AgCyNQIAswUCALQdAgC18QIAtvECALfxAgDheD8A4zQBAOMIPgDhbD4AgQkAAICpAACVuQCAgj0AAJm5AIChuQCApbkAgL4gBACpuQCA79g+AO/MPgCtuQCAsbkAgLPpAgCG6AQAh8AEALbpAgC1uQCAubkAgLXpAgC6rQIAu7UCAL25AIDBuQCAvp0CAL9xAgC8pQIAvZUCAC25AICduQCAxbkAgMm5AIDNuQCA0bkAgNW5AIDZuQCAqBUGAKmhBgCqoQYAq70GAKytBgCtgQYArv0GAK/tBgCwlQYAsZ0GALKVBgCzrQYAtLUGALW9BgC2tQYAt60GALiVBgC5mQYAukkHALtJBwC8WQcAvVkHAL5JBwC/SQcArN0FAK3tBQCu5QUArwkFAN25AIDhuQCAqtUFAKvNBQDluQCApZEFAKaRBQDpuQCA7bkAgPG5AID1uQCAo5EFALNJBgD5uQCA/bkAgAG6AIAFugCAtmEGALVFBgAJugCAuzkGALoxBgC+ZAAADboAgL8ZBgC+EQYAvRkGALwhBgCjiQcAgtkBAIHZAQCAwQEAEboAgKahBwClhQcAFboAgKv5BwCq8QcAhggBAId8AQCv2QcArtEHAK3ZBwCs4QcAGboAgLP1BgAdugCAIboAgLaFBgAlugCAKboAgLWdBgC6jQYAu20BAC26AIAxugCAvmUBAL9tAQC8dQEAvW0BAKglBgCpLQYAqjkGAKsxBgCsUQYArUEGAK5BBgCvdQYANboAgDm6AIA9ugCAQboAgEW6AIBJugCATboAgFG6AIC4VQEAuWUBALplAQC7fQEAvGUBAL1tAQC+HQEAvxUBALANBgCx7QEAsuUBALP9AQC05QEAte0BALblAQC3bQEAo7EFAFW6AIBZugCAvkgDAL5YDACmwQUApdkFAF26AICrKQIAqskFAGG6AIBlugCArykCAK4hAgCtKQIArDECAGm6AIBtugCAcboAgHW6AICAGQAAgRkAAIIFAAB5ugCAhKwDAIG6AICHGAMAhswMAIW6AICJugCAjboAgJG6AICokQMAqZkDAKrJAwCrxQMArN0DAK3BAwCuwQMAr/UDAJW6AICZugCAnboAgKG6AIClugCAqboAgK26AICxugCAuH0DALnBAAC6wQAAu9EAALz5AAC9+QAAvpkAAL+ZAACwjQMAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALNBAgC1ugCAuboAgL8EDwC9ugCAtkECALVVAgDBugCAu4ECALpJAgDFugCAyboAgL+BAgC+mQIAvZECALyZAgDNugCA0boAgNW6AIDZugCA76QDAN26AIDhugCA5boAgOMQAwDpugCA4VgAAIQgDQCAKQAAgSkAAIIdAADxugCA4VAGAOGgBwDjoAYA41AHAIWUDAD1ugCA70gbAPm6AIDhJAIA/boAgONwGgABuwCABbsAgAm7AIDvqAEA7+gGAIagDwCHDA0Ao4kCAA27AIClnQIAEbsAgBW7AICmiQIAGbsAgB27AICrSQIAqoECAK1ZAgCsUQIAr0kCAK5RAgCoZQ4AqXUOAKp9DgCrdQ4ArG0OAK21DgCuvQ4Ar7UOAO26AIAhuwCAJbsAgCm7AIAtuwCAOLsAgDy7AIBAuwCAuF0PALltDwC6ZQ8Auw0PALwVDwC9HQ8AvhUPAL8JDwCwzQ4AsdUOALLdDgCz1Q4AtM0OALVxDwC2cQ8At20PALP1DgBEuwCASLsAgEy7AIBQuwCAtjUOALXlDgBUuwCAuxEOALoJDgBYuwCAXLsAgL+1DwC+CQ4AvQEOALwJDgCCFQAAo7EOAIBhAACBYQAApnEOAGC7AIC+EAEApaEOAKpNDgCrVQ4AaLsAgIQgAQCuTQ4Ar/EPAKxNDgCtRQ4An0UIAJ4NCQCdDQkAnJkLAJt1NQCaETUAmZk3AJgNMQCXJTEAliUxAJWBPQCUDT0Ak4k/AJIVOACRPTkAkD05AI9lJQDvrA0AhgAEAIegAQBsuwCAcLsAgHS7AIDv6AEAeLsAgOE0AgB8uwCA4zQBAIC7AIDjCAwAhLsAgOEIDQChoQEAiLsAgKMJBQCibQMApc0EAKQRBQCnHRkAph0ZAKmhHQCoORkAq+kcAKqpHQCtkREArAEQAK8BFACuUREAsfkVALDlFQCz6WkAsgFoALUBbAC0eWkAjLsAgJC7AICUuwCAmLsAgJy7AICguwCAowkDAKIZDQCh/Q0AoP0NAIIlJgCDBToApLsAgKi7AICGqTwAhzU+AIQdOgCFPTsAiok+AIslMgCsuwCAsLsAgI6xNACPMTYAjD0yAI0tMgCSJTYAk9EIAIREAwC+wAQAlhULAJdVDgCUXQoAlVUKAJplDgCbiQ4AtLsAgLi7AIC8uwCAwLsAgJyBAADEuwCAuLUCALm9AgC6tQIAuwkCALwZAgC9GQIAvgkCAL8BAgCwdQ0AsX0NALJJDQCzSQ0AtJUCALWdAgC2lQIAt40CAKi9DQCpUQ0AqlUNAKtpDQCsfQ0ArWUNAK5tDQCvEQ0AZLsAgILtAQCBHQAAgB0AAMi7AIDMuwCAfboAgL5wBQCznQwAhIwFANC7AIDYuwCA3LsAgLalDAC1tQwA4LsAgLv5DAC68QwAhigFAIcgBQC/GQMAvhEDAL3dDAC83QwA5LsAgKPZDADouwCA7LsAgKbhDADwuwCA9LsAgKXxDACqtQwAq70MAPi7AID8uwCArlUDAK9dAwCsmQwArZkMAAC8AIAEvACACLwAgAy8AIAQvACAFLwAgBi8AIDvvAEAHLwAgOF8DgAgvACA41ABACS8AIAovACALLwAgDC8AICzlQIANLwAgDi8AIA8vACAQLwAgLa9AgC1uQIASLwAgLs5AgC6YQIAhsgEAIesBAC/GQIAvhECAL0ZAgC8IQIAo1UFAILVBwCBxQcAgMUHAEy8AICmfQUApXkFAFC8AICr+QUAqqEFAFS8AIBYvACAr9kFAK7RBQCt2QUArOEFAFy8AICzWQcAYLwAgGS8AIC2HQcAaLwAgGy8AIC1FQcAugkHALsJBwBwvACAdLwAgL75BwC/+QcAvPkHAL35BwDUuwCARLwAgHi8AIB8vACAgLwAgIS8AICIvACAjLwAgKitBwCptQcAqrUHAKvtBwCs+QcArfkHAK7tBwCv5QcAsKkHALGpBwCySQcAs0kHALRZBwC1WQcAtkkHALdJBwC4eQcAuUUHALpBBwC7XQcAvEUHAL1NBwC+RQcAvzkHAKMdBgCQvACAlLwAgJi8AICcvACAplkGAKVRBgCgvACAq00GAKpNBgCkvACAqLwAgK+9BgCuvQYArb0GAKy9BgCAbQAAgQkAAIIZAACsvACAsLwAgISYAQC+kAEAtLwAgIYAHACHxAEAuLwAgLy8AIDAvACAxLwAgMi8AIDMvACAqF0GAKmVAQCqlQEAq6UBAKy9AQCt1QEArtEBAK/RAQDQvACA1LwAgNi8AIDcvACA4LwAgOS8AIDovACA7LwAgLhZAQC5WQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsLUBALG9AQCygQEAs4EBALR5AQC1eQEAtmkBALdpAQCzHQIA8LwAgPS8AIC+gBwA+LwAgLZVAgC1NQIA/LwAgLt5AgC6cQIAAL0AgAS9AIC/vQIAvr0CAL1VAgC8VQIACL0AgKNZAgAMvQCAEL0AgKYRAgAUvQCAGL0AgKVxAgCqNQIAqz0CABy9AIAgvQCArvkCAK/5AgCsEQIArRECACi9AIAsvQCAvgQdAL4AHgAwvQCANL0AgDi9AIA8vQCAgPkAAIHNAACCxQAAhCADAIawHACHlAMAQL0AgES9AIBIvQCATL0AgFC9AIBUvQCA42wCAFi9AIDhoAEAXL0AgO8UAgBgvQCAZL0AgGi9AIBsvQCAcL0AgHS9AIB4vQCA4fAGAOE0BgDjTAAA4xgGAHy9AICAvQCAhL0AgIi9AICAPQAAgQkAAIIZAACMvQCAkL0AgIS8HQDvmAAA7zgHALMxAgDRAAAAh9gdAIZsHACYvQCAtikCALUhAgCcvQCAu80CALrNAgCgvQCApL0AgL/NAgC+zQIAvc0CALzNAgCyXQYAs2UGALANBgCxVQYAtn0GALedBQC0fQYAtXUGALqNBQC7zQUAuKUFALmFBQC+xQUAv8kFALzVBQC9zQUAqL0AgKy9AICwvQCAtL0AgLi9AIC8vQCAwL0AgMS9AICqtQYAq70GAKgBBwCpvQYAroEGAK+NBgCsmQYArZUGAKNxHQDIvQCAzL0AgNC9AIDUvQCApmkdAKVhHQDYvQCAq40dAKqNHQDcvQCA4L0AgK+NHQCujR0ArY0dAKyNHQDkvQCAs9UeAOi9AIDsvQCAts0eAPC9AID0vQCAtcUeALqhHgC7oR4A+L0AgPy9AIC+pR4Av6keALyxHgC9sR4AJL0AgJS9AIAAvgCAhAQDAID5AACB+QAAghEAAAS+AICoIR4AqSEeAKo5HgCrOR4ArCkeAK0pHgCuAR4ArwEeALABHgCxAR4AsgEeALMBHgC0BR4AtQkeALY9HgC3NR4AuA0eALkVHgC6HR4AuxUeALwNHgC95R8Avu0fAL/lHwCjkR8ACL4AgIYoAQCHSAEADL4AgKaJHwClgR8AEL4AgKvlHwCq5R8AFL4AgBi+AICv7R8AruEfAK31HwCs9R8AHL4AgLMtHgAgvgCAJL4AgLaVHgAovgCALL4AgLWdHgC6sR4Au7EeADC+AIA0vgCAvnUBAL99AQC8oR4AvaEeAKjRHgCp2R4AquEeAKvhHgCsUR4ArVEeAK5RHgCvUR4AOL4AgDy+AIBAvgCARL4AgEi+AIBMvgCAUL4AgFS+AIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALAxHgCxMR4AsjEeALMxHgC09QEAtf0BALb1AQC37QEAo2kdAFi+AIBcvgCAYL4AgGS+AICm0R0ApdkdAGi+AICr9R0AqvUdAGy+AIBwvgCArzkCAK4xAgCt5R0ArOUdAIFpAACAWQAAvgAEAIJhAAB4vgCAfL4AgIC+AICEvgCAhOwDAIi+AICHiAMAhuwEAIy+AICQvgCAlL4AgJi+AICohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAJy+AICgvgCApL4AgKi+AICsvgCAsL4AgLS+AIC4vgCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAOFUHgDhrB8A45QBAOMoHgDjYAMAvL4AgOEIAADAvgCA75ADAMS+AIDIvgCAzL4AgNC+AIDUvgCA70wfAO9MHwCzXQIA2L4AgNy+AIDgvgCA6L4AgLYVAgC1dQIA7L4AgLs5AgC6MQIAhCQFAL7gBAC/1QIAvtUCAL0VAgC8FQIAuJEdALmZHQC6oR0Au6EdALzRHQC93R0AvtUdAL/JHQCwCR4AsQkeALIZHgCzGR4AtAkeALUJHgC2vR0At7UdAKipHgCpqR4AqrkeAKu5HgCsqR4ArakeAK55HgCveR4AgKUAAIGtAACCpQAA8L4AgIbQBACH+AQA9L4AgPi+AIB0vgCA5L4AgPy+AIAAvwCABL8AgAi/AIAMvwCAEL8AgKhxBgCpcQYAqnEGAKtxBgCsVQYArUUGAK5NBgCvRQYAsD0GALHlBgCy7QYAs+UGALT9BgC15QYAtu0GALflBgC43QYAuXEHALp1BwC7SQcAvFkHAL1ZBwC+SQcAv0kHALPZBgAUvwCAGL8AgBy/AIAgvwCAtuUGALX9BgAkvwCAuwEGALrZBgAovwCALL8AgL8BBgC+GQYAvREGALwZBgAwvwCAo9kFADS/AIA4vwCAppEFADy/AIBAvwCApfEFAKq1BQCrvQUARL8AgEi/AICuUQUAr1EFAKyRBQCtkQUAo1kHAIIZAACBGQAAgOEBAEy/AICmZQcApX0HAFC/AICrgQcAqlkHAISgAgC+rAEAr4EHAK6ZBwCtkQcArJkHAFS/AICzqQYAhugAAIcsAQC2WQEAWL8AgFy/AIC1oQYAunUBALt9AQBgvwCAZL8AgL75AQC/+QEAvGUBAL35AQCo0QYAqdkGAKplBgCrdQYArG0GAK2dAQCulQEAr40BAITsAQBovwCAbL8AgHC/AIB0vwCAeL8AgHy/AICAvwCAuGkBALlpAQC6CQEAuwUBALwdAQC9AQEAvgEBAL81AQCw9QEAsf0BALL1AQCzaQEAtHkBALV5AQC2aQEAt2EBAIS/AICIvwCAjL8AgKPhBQCQvwCApekFAKYRAgCUvwCAmL8AgJy/AICqPQIAqzUCAKwtAgCtsQIArrECAK+xAgCgvwCApL8AgL4EAwCEAAwAqL8AgKy/AICwvwCAtL8AgIANAACBFQAAgh0AALi/AIC8vwCAwL8AgIdEAwCG3AwAs+kDAMi/AIDMvwCA0L8AgNS/AIC2PQMAtT0DANi/AIC7GQMAuhEDANy/AIDgvwCAv7kAAL6xAAC9uQAAvAEDAOS/AIDhlAEA6L8AgON8AQDsvwCA8L8AgPS/AID4vwCA/L8AgADAAIAEwACACMAAgAzAAIAQwACAFMAAgO9MAgCoVQIAqV0CAKphAgCrYQIArLUCAK29AgCutQIAr60CAL5oDQAYwACAHMAAgCDAAIAkwACAgq0AAIGtAACArQAAuGEBALlhAQC6CQEAuwkBALwBAQC9AQEAvgEBAL8BAQCw1QIAsd0CALLVAgCzbQEAtHUBALV9AQC2aQEAt2EBAOFoBgDh8AcA47AAAOP0BgAowACALMAAgDDAAIA4wACAPMAAgEDAAIBEwACASMAAgL78DABMwACA72wAAO8oBgCjqQIAUMAAgIZoDACHBA0AVMAAgKZ9AgClfQIAWMAAgKtZAgCqUQIAXMAAgGDAAICv+QEArvEBAK35AQCsQQIAqIUOAKmNDgCqhQ4Aq50OAKyNDgCtvQ4ArrUOAK/dDgA0wACAZMAAgGjAAIBswACAcMAAgHTAAIB4wACAfMAAgLitDgC5tQ4Aur0OALu1DgC8dQ8AvX0PAL51DwC/bQ8AsKkOALG1DgCyvQ4As7UOALStDgC1lQ4Atp0OALeVDgCzDQ4AgMAAgITAAICIwACAjMAAgLY9DgC1BQ4AkMAAgLtxDgC6bQ4AlMAAgJjAAIC/UQ4AvmkOAL1hDgC8aQ4AghkAAKNJDgCAZQAAgRkAAKZ5DgCcwACAoMAAgKVBDgCqKQ4AqzUOAIS8AwCkwACAri0OAK8VDgCsLQ4ArSUOAKidDgCppQ4Aqq0OAKulDgCsvQ4AraEOAK7dDgCvzQ4AhiABAIdkAQCowACArMAAgLDAAIC0wACAuMAAgLzAAIC4eQEAuXkBALrNAQC7xQEAvN0BAL3FAQC+xQEAv/UBALC9DgCxjQ4AsoUOALNJAQC0WQEAtVkBALZJAQC3SQEAtS0OAMDAAIDEwACAtjkOAMjAAIDMwACAsz0OANDAAIC9hQEAvEkOAL+FAQC+hQEA1MAAgMS/AIC7UQ4AumEOAKNlDgDYwACA3MAAgODAAIDkwACApmEOAKV1DgDowACAqwkOAKo5DgDswACA8MAAgK/dAQCu3QEArd0BAKwRDgD0wACA+MAAgO/QDwD8wACAAMEAgATBAIAIwQCADMEAgBDBAIC+aAMAGMEAgBzBAIDhVA4AIMEAgONkDgAkwQCAgFkAAIFZAACCaQAAhIwDAIbwBACHFAMAKMEAgCzBAIAwwQCANMEAgDjBAIA8wQCAQMEAgETBAIBIwQCATMEAgFDBAIBUwQCAWMEAgFzBAIBgwQCAZMEAgGjBAIBswQCAqIkDAKmJAwCqmQMAq5kDAKyJAwCtiQMArj0DAK81AwCwUQMAsVEDALJVAwCzfQMAtBUDALUdAwC2FQMAtw0DALg9AwC5DQMAugUDALvtAAC89QAAvfkAAL7pAAC/6QAAcMEAgHTBAIB4wQCAsz0CAHzBAIC1LQIAtiUCAIDBAIC+aAUAiMEAgLq5AgC7uQIAvK0CAL2FAgC+/QIAv/UCAIBJAACBVQAAglUAAIQABQDvjAMAvhgEAId0BQCG/AQA4zwDAIzBAIDhUAAAkMEAgJTBAICYwQCAnMEAgKDBAICkwQCAqMEAgKzBAICwwQCAtMEAgLjBAIC8wQCA79QOAL4oBgDhdA4AwMEAgONUAQDEwQCAyMEAgMzBAIDQwQCAo/ECANTBAIDYwQCA3MEAgODBAICm6QIApeECAOTBAICrdQIAqnUCAOjBAIDswQCArzkCAK4xAgCtSQIArGECAKgpBgCpKQYAqj0GAKsxBgCsSQYArUkGAK55BgCveQYAhMEAgIIVAACBxQcAgMUHAPDBAICEaAMA9MEAgPjBAIC4yQYAuckGALrZBgC72QYAvMkGAL3JBgC+WQcAv1kHALAJBgCxCQYAshkGALMZBgC0CQYAtQkGALb5BgC3+QYAs7UGAPzBAICGrAAAh0ADAADCAIC2yQYAtcEGAATCAIC7zQYAus0GAAjCAIAMwgCAv80GAL7NBgC9zQYAvM0GABDCAICj8QYAFMIAgBjCAICmjQYAHMIAgCDCAIClhQYAqokGAKuJBgAkwgCAKMIAgK6JBgCviQYArIkGAK2JBgCoJQYAqWEGAKplBgCrfQYArGUGAK1tBgCuZQYAr50GACzCAIAwwgCANMIAgDjCAIA8wgCAQMIAgETCAIBIwgCAuPUGALn9BgC69QYAu4kGALyZBgC9mQYAvokGAL+BBgCw5QYAse0GALLlBgCz/QYAtOUGALXtBgC20QYAt80GAEzCAIC2/QYAtf0GAFDCAICz/QYAVMIAgFjCAIBcwgCAvzkGAL4xBgC9OQYAvCEGALs5BgC6MQYAFMEAgGDCAICjrQYAgnkAAIFVAACAVQAAhFwBAKatBgClrQYAaMIAgKtpBgCqYQYAhkh/AIfkAACvaQYArmEGAK1pBgCscQYAbMIAgO/cBwBwwgCAdMIAgHjCAIB8wgCAgMIAgITCAICIwgCAhKADAIzCAIC/JHkAkMIAgONoBwCUwgCA4XQGALPRAgCYwgCAvgQDAISAfQCcwgCAtvkCALXxAgCgwgCAu7UCALqpAgCkwgCAqMIAgL9RAwC+mQIAvZECALylAgCpBQIAqLkCAKsVAgCqHQIArT0CAKw9AgCvUQIArl0CAL5ofQCswgCAsMIAgLTCAIC4wgCAvMIAgMDCAIDEwgCAufEDALjpAwC78QMAuvkDAL1RAwC86QMAv00DAL5RAwCxNQIAsCkCALMBAgCyNQIAtdEDALQZAgC30QMAttkDAIIpAACjlQMAgB0AAIEVAACmvQMAyMIAgMzCAICltQMAqu0DAKvxAwDQwgCA2MIAgK7dAwCvFQIArOEDAK3VAwCGYH0Ah3h9ALNBAQCEAH8AtUEBANzCAIDgwgCAtkkBAOTCAIDowgCAu0EBALpNAQC9SQEAvEUBAL8pAQC+OQEA7MIAgO/cBgDwwgCA9MIAgPjCAID8wgCAAMMAgO8wBgCELH4A4eAGAATDAIDjiAEACMMAgON0AAAMwwCA4SwBAKPJAQAQwwCAFMMAgIVweQAYwwCApsEBAKXJAQAcwwCAq8kBAKrFAQAgwwCAJMMAgK+hAQCusQEArcEBAKzNAQCo3X0AqQV+AKoBfgCrAX4ArAF+AK0BfgCuAX4ArwF+ANTCAIAowwCALMMAgDDDAIA0wwCAgp0AAIGdAACAnQAAuC1+ALnhfgC64X4Au+F+ALzhfgC94X4AvuF+AL/hfgCwQX4AsU1+ALJZfgCzVX4AtDV+ALUlfgC2JX4AtxV+AKitfwCp0X8AqtF/AKvtfwCs9X8ArRV/AK4RfwCvEX8AOMMAgDzDAIBAwwCARMMAgIbwAwCHuAAASMMAgEzDAIC4EX8AuRl/ALohfwC7IX8AvPUAAL39AAC+9QAAv+0AALBxfwCxcX8AsnF/ALNFfwC0QX8AtU1/ALY9fwC3NX8As1l+AFDDAIBUwwCAWMMAgFzDAIC2lX4AtX1+AGDDAIC7tX4AurV+AGTDAIBowwCAv4l+AL6FfgC9kX4AvKV+AGzDAICjHX4AcMMAgHTDAICm0X4AeMMAgHzDAIClOX4AqvF+AKvxfgCAwwCAhMMAgK7BfgCvzX4ArOF+AK3VfgCwrQAAscUAALLBAACzwQAAtMUAALXNAAC28QAAt/EAALhhAAC5YQAAumEAALt9AAC8ZQAAvW0AAL5lAAC/vQMAiMMAgIzDAICQwwCAZMIAgJTDAICYwwCAnMMAgKDDAICoWQEAqVkBAKrtAACr5QAArP0AAK3lAACu5QAAr9UAAKTDAICCHQAAgR0AAIAdAACowwCArMMAgLDDAIC+VAIAhoAEAIfsAgC4wwCAvMMAgMDDAIDEwwCAyMMAgL54AwDjdH4AzMMAgOG4fQDQwwCA1MMAgNjDAIDcwwCA4MMAgOTDAIDowwCA7MMAgPDDAIDvwH4A9MMAgPjDAID8wwCAs4UDAADEAIAExACACMQAgAzEAIC2hQMAtZUDABDEAIC74QMAuokDAL4kBgAUxACAv+kDAL7hAwC99QMAvPUDAIIpAACjwQMAgB0AAIEVAACmwQMAGMQAgBzEAICl0QMAqs0DAKulAwAgxACAheAFAK6lAwCvrQMArLEDAK2xAwDh+AMAKMQAgONcHwAsxACA7/QDADDEAICGPAcAh6wCAON8fgA0xACA4YABADjEAIA8xACAQMQAgO/kEwBExACAs3EBAEjEAIBMxACAUMQAgFTEAIC2EQEAtWEBAFjEAIC7OQEAujEBAFzEAIBgxACAvxkBAL4RAQC9GQEAvCEBAGTEAIBoxACAbMQAgHDEAIB0xACAeMQAgHzEAIDvxH8AgMQAgOH8fgCExACA4/B/AIANAACBdQAAgn0AAIjEAICMxACAkMQAgKP5AQC+AAgApekBAJjEAICcxACAppkBAISoBQCgxACAq7EBAKq5AQCtkQEArKkBAK+RAQCumQEAqCkGAKkpBgCqOQYAqzkGAKwpBgCtUQYArlUGAK9NBgAkxACAhCABAKTEAICUxACAo+EBAKKZBAChGQQAoPEFALg5BgC5OQYAus0GALvFBgC83QYAvcUGAL7FBgC/8QYAsDUGALE9BgCyNQYAsw0GALQVBgC1HQYAthUGALcJBgCPoWwAs5EHAIYoAQCHfAMAtqEHAKjEAICsxACAtbEHALrlBwC77QcAsMQAgLTEAIC+7QcAv90HALz1BwC97QcAn/l4AJ7leACdcXkAnCF8AJvxfACaYX0AmZlxAJjZcACX4XAAlnl0AJVtdACUbXQAk61pAJJxaACReWgAkB1uAIIhbQCD5W8AuMQAgLzEAICGTWgAh5V1AISZaQCFmWkAiqV1AIu5dQDAxACAxMQAgI5xcACPgXwAjDlxAI05cQCSYX0Ak6l9AMjEAIDMxACAlml5AJeZBACU4XgAlX15AJpBBQCbyQUA0MQAgNTEAIDYxACA3MQAgJypAADgxACAo4ENAKKpAQChqQEA5MQAgKexCQCmAQgApU0NAKSZDQCrkRUAqoUVAKkBFACocQkArx0QAK7pEQCtvREArAEQALMBGACy8RwAscEdALDJHQC0wwCA6MQAgLXhGAC0/RkA7MQAgPDEAID0xACA+MQAgIAdAACBCQAAgv0DAPzEAICjFQUAAMUAgIaIDACHPAMACMUAgKYlBQClNQUADMUAgKtpBQCqYQUAEMUAgBTFAICvWQUArmkFAK1pBQCscQUAGMUAgBzFAICEBAwAIMUAgCTFAIDhbAYAKMUAgOPsewAsxQCAMMUAgDTFAIDvqAYAOMUAgDzFAIBAxQCARMUAgKmNBQCogQUAq60FAKqZBQCtoQUArLkFAK+lBQCuqQUAhGgNAEjFAIBMxQCAUMUAgFTFAIBYxQCAXMUAgL70DAC5SQUAuEEFALtZBQC6QQUAvUkFALxBBQC/cQUAvn0FALGpBQCwoQUAs7kFALKhBQC1mQUAtKkFALd5BQC2kQUAqNUEAKndBACq7QQAqyUDAKyFAwCtjQMArrEDAK+xAwBgxQCAZMUAgGjFAIBsxQCAgBkAAIEZAACCBQAAcMUAgLgxAgC5MQIAujUCALvBAgC8hQIAvbUCAL69AgC/tQIAsGkCALFpAgCyQQIAs0ECALQ5AgC1OQIAthECALcRAgCGoAwAh0wNAHjFAIB8xQCA76QGAIDFAICExQCA78wHAOOUAQDhpAYA4TgBAONcBgCIxQCAjMUAgJDFAICUxQCAmMUAgJzFAICzLQQAoMUAgLVFAwCkxQCAqMUAgLZFAwCsxQCAsMUAgLvlAgC65QIAvd0CALzdAgC/tQIAvrUCAATFAIB0xQCAtMUAgLjFAIC8xQCAwMUAgMTFAIDIxQCAqDEOAKk5DgCqAQ4AqwEOAKxxDgCtcQ4ArnUOAK9tDgCwGQ4AsSUOALItDgCzJQ4AtCEOALUhDgC2IQ4AtyEOALjFDgC5zQ4AusUOALvdDgC8xQ4Avc0OAL5ZDwC/WQ8As6kOAMzFAIDQxQCA1MUAgNjFAIC20Q4AtdkOANzFAIC7wQ4Auv0OAODFAIC+LAAAv8UOAL7FDgC90Q4AvNkOAIJpAACj7Q4AgFkAAIFRAACmlQ4A5MUAgOjFAIClnQ4AqrkOAKuFDgCGyAAAh6wAAK6BDgCvgQ4ArJ0OAK2VDgDsxQCAs5EOAPDFAID0xQCAtqUOAPjFAID8xQCAta0OALrhDgC74Q4AAMYAgATGAIC+6Q4Av9UOALz1DgC96Q4Ao6UKAAjGAIAMxgCAEMYAgBTGAICmzQ0Apc0NABjGAICrbQwAqm0MABzGAIAgxgCArz0MAK49DACtVQwArFUMAKgJDgCpCQ4Aqh0OAKsVDgCsIQ4ArSEOAK4hDgCvIQ4AJMYAgCjGAIAsxgCAMMYAgDTGAIA4xgCAPMYAgEDGAIC4zQEAudUBALrdAQC71QEAvM0BAL1RAQC+UQEAv1EBALAhDgCxIQ4AsiUOALM5DgC0KQ4AtRUOALYdDgC39QEARMYAgEjGAIBMxgCAo5kNAFDGAIClpQ0Apq0NAL7cAgCE7AMAWMYAgKrpDQCr6Q0ArP0NAK3hDQCu4Q0Ar90NAIBFAACBTQAAglkAAKNFAwBcxgCApUEDAKZBAwBgxgCAhsAEAIcAAwCqLQMAqyUDAKw9AwCtJQMAriUDAK8VAwCoWQIAqYUDAKqBAwCrgQMArIUDAK2NAwCusQMAr7EDAGTGAIBoxgCAbMYAgHDGAIB0xgCAeMYAgHzGAICAxgCAuGUDALltAwC6ZQMAu30DALxlAwC9bQMAvmUDAL/dAACwpQMAsa0DALKlAwCzvQMAtK0DALWdAwC2lQMAt10DALMJAgCExgCAiMYAgIzGAICQxgCAtg0CALUNAgCUxgCAu2kCALphAgCYxgCAnMYAgL9ZAgC+aQIAvWkCALxxAgCgxgCApMYAgKjGAICsxgCA4aABALDGAIDjaAMAtMYAgIEVAACAFQAA74wDAIIVAAC4xgCAvMYAgMDGAIC+cAUA4RgOAOGUDwDjOA8A49QPAISUAgDIxgCAzMYAgNDGAIDUxgCA2MYAgNzGAIDgxgCA5MYAgOjGAIDv7AEA7/gPAIZgBACHBAUAs5UBAITMBQC1dQEA7MYAgPDGAIC2dQEA9MYAgPjGAIC7UQEAulkBAL31AAC8SQEAv/UAAL71AACoJQYAqVUGAKpVBgCrrQYArLUGAK29BgCutQYAr60GAMTGAID8xgCAAMcAgATHAIAIxwCADMcAgBDHAIAUxwCAuGkHALlpBwC6CQcAuwkHALwZBwC9GQcAvg0HAL8BBwCw1QYAsd0GALLVBgCzaQcAtHkHALV5BwC2aQcAt2EHAKPdBgAYxwCAHMcAgCDHAIAkxwCApj0GAKU9BgAoxwCAqxkGAKoRBgAsxwCAMMcAgK+9BwCuvQcArb0HAKwBBgCAXQAAgW0AAIJlAACzUQcAvtgDALVxBwC2cQcANMcAgIbgAACHFAMAul0HALs5BwC8KQcAvRUHAL4dBwC/2QAAqJUGAKmdBgCqlQYAq60GAKy1BgCtvQYArrUGAK+tBgA4xwCAPMcAgEDHAIBExwCASMcAgEzHAIBQxwCAVMcAgLhxAQC5cQEAunEBALtxAQC81QEAvd0BAL7VAQC/zQEAsNUGALGxBgCysQYAs40GALSVBgC1UQEAtlEBALdRAQBYxwCAoxkGAFzHAIBgxwCApjkGAFTGAIBkxwCApTkGAKoVBgCrcQYAaMcAgGzHAICuVQYAr5EBAKxhBgCtXQYAcMcAgHTHAIB4xwCAfMcAgIDHAICExwCAiMcAgIzHAICQxwCAlMcAgJjHAICcxwCAgBkAAIEZAACCBQAAoMcAgISAAgC+gAMAhwwDAIasHADhaAYAqMcAgOOYBwCsxwCAsMcAgLTHAIDvrAcAuMcAgLzHAIDAxwCAxMcAgMjHAIDMxwCA0McAgNTHAICzZQMA2McAgLVlAwC2bQMA3McAgODHAIDkxwCAuukDALvlAwC8/QMAve0DAL7RAwC/0QMA6McAgOzHAIDwxwCA9McAgPjHAID8xwCAAMgAgATIAICogQMAqYEDAKqBAwCrgQMArIEDAK2BAwCugQMAr4EDALBBAwCxTQMAskUDALNVAwC0eQMAtXkDALYZAwC3GQMAuCkDALkpAwC6OQMAuzkDALwpAwC9KQMAvhkDAL8ZAwCBGQAAgBEAAKMhAgCCLQAApSECAAjIAIAMyACApikCABDIAIAYyACAq6ECAKqtAgCtqQIArLkCAK+VAgCulQIAhEwCAL5IHQCHZB0AhuwcAONAAwAcyACA4aABACDIAIDvnAMAJMgAgCjIAIAsyACAMMgAgDTIAIA4yACAPMgAgEDIAIBEyACASMgAgEzIAIBQyACAVMgAgFjIAIDvtAEAhKgdAOF8BgBcyACA43AGAGDIAIBkyACAaMgAgGzIAICz4QEAcMgAgHTIAIB4yACAfMgAgLblAQC19QEAgMgAgLuhAQC62QEAvuQcAIjIAIC/rQEAvqUBAL2xAQC8uQEAqBUeAKkZHgCqKR4AqykeAKw9HgCtJR4Ari0eAK8lHgAUyACAgvkfAIH5HwCA4R8AhMgAgIzIAICGHAAAh7ADALjBHgC5wR4AusEeALvBHgC8wR4AvcEeAL7BHgC/wR4AsF0eALElHgCyLR4AsyUeALQhHgC1KR4AthkeALcZHgCjoR4AkMgAgJTIAICYyACAnMgAgKalHgCltR4AoMgAgKvhHgCqmR4ApMgAgKjIAICv7R4AruUeAK3xHgCs+R4ArMgAgLOZHwCwyACAtMgAgLa9HwC4yACAvMgAgLW1HwC6mR8Au5kfAMDIAIDEyACAvnkfAL95HwC8eR8AvXkfAKglHgCpUR4AqlUeAKtpHgCseR4ArXkeAK5pHgCvaR4AyMgAgMzIAIDQyACA1MgAgNjIAIDcyACA4MgAgOTIAIC42R4Aue0eALr5HgC7+R4AvOkeAL3pHgC+nR4Av5UeALAZHgCxGR4AsukeALPpHgC0+R4AtfkeALbpHgC36R4Ao90eAIIpAACBFQAAgB0AAOjIAICm+R4ApfEeAOzIAICr3R4Aqt0eAKTHAIDwyACArz0eAK49HgCtPR4ArD0eAITIAgCzQQEAvgwBAPjIAIC2QQEA/MgAgADJAIC1UQEAuk0BALslAQCGSAAAh1ABAL4lAQC/LQEAvDEBAL0xAQAEyQCACMkAgIQEAwC+gAQADMkAgO+oHwAQyQCAFMkAgL8oMQDjdB8AGMkAgOE4HgAcyQCAIMkAgCTJAIAoyQCALMkAgDDJAICjzQIANMkAgKXdAgA4yQCAPMkAgKbNAgBAyQCARMkAgKupAgCqwQIArb0CAKy9AgCvoQIArqkCAKm1AgCoaR0AqwECAKoJAgCtAQIArBkCAK8xAgCuAQIAhGwFAEjJAIBMyQCAUMkAgFTJAICCnQEAgZ0BAICdAQC55QMAuOUDALvlAwC65QMAveUDALzlAwC/5QMAvuUDALEhAgCwSQIAsyUCALIlAgC1KQIAtCECALcVAgC2FQIAqM0CAKnRAgCq0QIAqw0BAKwVAQCtBQEArgEBAK8BAQBYyQCAXMkAgGDJAIBoyQCAvvgEAGzJAIBwyQCAdMkAgLgVAQC5HQEAuikBALspAQC89QEAvf0BAL71AQC/7QEAsEkBALFVAQCyXQEAs1UBALRNAQC1NQEAtj0BALcxAQCGoAUAh8gFAHjJAIDvvAAAfMkAgIDJAICEyQCA74weAIQsBwDh8B4AiMkAgOMcHgCMyQCA4ZQBAJDJAIDjbAAAsxkCAJTJAICYyQCAnMkAgIQACAC2xQEAtd0BAKDJAIC70QEAus0BAKTJAICoyQCAv7EBAL7JAQC9wQEAvMkBAKPZBQBkyQCArMkAgLDJAIC0yQCApgUGAKUdBgC4yQCAqxEGAKoNBgC8yQCAwMkAgK9xBgCuCQYArQEGAKwJBgDEyQCAgh0AAIEdAACAHQAAyMkAgMzJAIDQyQCA1MkAgIZAAwCHxAMA2MkAgNzJAIDgyQCA5MkAgOjJAIDsyQCAqK0HAKmxBwCqsQcAq7EHAKwZBwCtBQcArg0HAK8FBwDwyQCA9MkAgPjJAID8yQCAAMoAgATKAIAIygCADMoAgLgtBwC5zQAAusUAALvdAAC8zQAAvf0AAL71AAC/nQAAsEkHALFVBwCyUQcAsykHALQ5BwC1OQcAtiUHALcVBwCzOQYAEMoAgBTKAIAYygCAHMoAgLaFBgC1kQYAIMoAgLuRBgC6jQYAJMoAgCjKAIC//QYAvv0GAL39BgC8hQYALMoAgKN9BgAwygCANMoAgKbBBgA4ygCAPMoAgKXVBgCqyQYAq9UGAEDKAIC+bAEArrkGAK+5BgCswQYArbkGAKjpAQCp6QEAqvkBAKv5AQCs6QEArekBAK45AQCvOQEAgPUAAIH9AACCwQAARMoAgIYQAACHdAEASMoAgPTIAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+kQAAv5EAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAA7/QGAEzKAIBQygCAVMoAgO8wAgBYygCAXMoAgGDKAIDj4AcAZMoAgOGAAQBoygCA4ygGAGzKAIDhyAUAcMoAgLMxAgB0ygCAeMoAgJYAAAB8ygCAtikCALUhAgCAygCAu80CALrNAgCEygCAiMoAgL/NAgC+zQIAvc0CALzNAgCMygCAkMoAgJTKAICj/QIAmMoAgKXtAgCm5QIAnMoAgKDKAICkygCAqgECAKsBAgCsAQIArQECAK4BAgCvAQIAgA0AAIEVAACCHQAAqMoAgKzKAICwygCAvlQMALjKAICGwAwAhyQDALzKAIDAygCAxMoAgMjKAIDMygCA0MoAgKi5AgCpAQEAqgEBAKsBAQCsBQEArQ0BAK4FAQCvOQEAhKgNANTKAIDYygCA3MoAgODKAIDkygCA6MoAgOzKAIC4LQEAucUBALrNAQC7xQEAvMEBAL3JAQC++QEAv/kBALBNAQCxUQEAslUBALMpAQC0OQEAtSUBALYlAQC3FQEA4RgGAPDKAIDjOAcA9MoAgPjKAIC+WAwA/MoAgADLAICEbA8ABMsAgL5gDwAIywCADMsAgBDLAIDvcAYAFMsAgIAVAACBGQAAgi0AAITMDwDjYAYAGMsAgOGgAQAcywCA73QAACDLAICGyAwAh/wMACjLAIAsywCAMMsAgDTLAICjCQ4AtMoAgCTLAIA4ywCAPMsAgKYNDgClDQ4AQMsAgKsVDgCqCQ4ARMsAgEjLAICvYQ4Arn0OAK19DgCsAQ4ATMsAgLOpDgBQywCAVMsAgLapDgBYywCAXMsAgLWpDgC6SQ8Au0kPAGDLAIBkywCAvkkPAL9JDwC8SQ8AvUkPAKhdDgCpbQ4AqmUOAKt9DgCsZQ4ArW0OAK5lDgCvuQ8AaMsAgGzLAIBwywCAdMsAgHjLAIB8ywCAgMsAgITLAIC4UQ8AuV0PALpVDwC7aQ8AvH0PAL1lDwC+bQ8Av2EPALDJDwCxyQ8AstkPALPZDwC0yQ8AtckPALZ9DwC3cQ8AiMsAgLURDwC2EQ8AjMsAgIARAACBGQAAgikAALMVDwC8HQ8AvWEPAL5hDwC/fQ8AkMsAgJTLAIC6FQ8AuwkPAKOtDwCYywCAhugAAIfIAQCcywCApq0PAKWtDwCgywCAq00OAKpNDgCkywCAqMsAgK9NDgCuTQ4ArU0OAKxNDgCocQ4AqXEOAKpxDgCrcQ4ArJ0BAK2FAQCuhQEAr7UBAL7sAACsywCAsMsAgLTLAIC4ywCAvMsAgMDLAIDEywCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCwzQEAsaUBALKhAQCzoQEAtKUBALWtAQC2kQEAt5EBALP5DQDIywCAzMsAgNDLAIDUywCAtgUCALUVAgDYywCAu2ECALoJAgDcywCA4MsAgL9pAgC+YQIAvXUCALx1AgDkywCAo70NAOjLAIDsywCApkECAPDLAID0ywCApVECAKpNAgCrJQIA+MsAgPzLAICuJQIAry0CAKwxAgCtMQIAge0AAIDtAADv0AEAgh0AAADMAIAIzACAhjgEAIdQAwAMzACAEMwAgBTMAIAYzACA4eABABzMAIDjZA8AIMwAgCTMAIAozACALMwAgLORAwAwzACAtbkDALZ9AwA0zACAOMwAgDzMAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAL5oBQBAzACARMwAgEjMAIBMzACAUMwAgFTMAIBYzACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOF4DwDjNA4A47gOAOF8DgBczACAYMwAgGTMAIBozACAbMwAgHDMAIB4zACAfMwAgIDMAIDv5A4A79QOAITMAICjnQIAgmEAAIFpAACAUQAAhJwFAKZxAgCltQIAiMwAgKtVAgCqVQIAhkgEAIfMBACv+QEArvEBAK1FAgCsRQIAqJUGAKmlBgCqrQYAq6UGAKy9BgCtoQYArqUGAK/dBgB0zACAjMwAgJDMAICUzACAmMwAgJzMAICgzACApMwAgLhtBwC5dQcAun0HALt1BwC8bQcAvcUHAL7NBwC/xQcAsKUGALGtBgCyuQYAs7EGALSRBgC1kQYAtl0HALdVBwCzJQYAqMwAgKzMAICwzACAtMwAgLYhBgC1NQYAuMwAgLtpBgC6YQYAvMwAgMDMAIC/VQYAvlUGAL1lBgC8bQYAxMwAgKNhBgDIzACAzMwAgKZlBgDQzACA1MwAgKVxBgCqJQYAqy0GANjMAIDczACArhEGAK8RBgCsKQYArSEGAKipBgCpqQYAqrkGAKuxBgCszQYArTEBAK4xAQCvMQEAgMkBAIHJAQCCBQAA4MwAgL54AgCEeAIA5MwAgOjMAIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALBRAQCxUQEAslEBALNRAQC09QEAtf0BALb1AQC37QEAszEGAOzMAICGKAAAh9wBAPDMAIC2sQEAtUUGAPTMAIC7lQEAupUBAPjMAID8zACAvzkBAL4xAQC9hQEAvIUBAATMAICjdQYAAM0AgATNAICm9QEACM0AgAzNAIClAQYAqtEBAKvRAQAQzQCAFM0AgK51AQCvfQEArMEBAK3BAQAYzQCAHM0AgCDNAIAkzQCAKM0AgCzNAIAwzQCANM0AgDjNAIA8zQCAQM0AgETNAIBIzQCATM0AgFDNAIC+cAMAhQA8AOHEBgCERAIA44wHAIBhAACBYQAAgmEAAO9oAwCFRDwA4RACAFjNAIDj2CsAhlA9AIf0AwBczQCA76QHAGDNAIDvQAIAZM0AgGjNAIBszQCAcM0AgHTNAIB4zQCAhDw8AHzNAICAzQCAhM0AgIjNAIDj7AIAjM0AgOEsAQCzUQMAkM0AgJTNAICYzQCAnM0AgLZ5AwC1cQMAoM0AgLs5AwC6MQMApM0AgKjNAIC/9QAAvvUAAL0VAwC8FQMAqD0CAKmBAgCqmQIAq5ECAKy5AgCtuQIArtECAK/RAgCEqD8Avqg/AKzNAICwzQCAtM0AgLjNAIC8zQCAwM0AgLhRAQC5UQEAulEBALtRAQC8cQEAvXEBAL5xAQC/cQEAsLUCALG9AgCygQIAs4ECALRxAQC1cQEAtnEBALdxAQCAtQAAgb0AAIK1AADIzQCAhrA/AIfgPADMzQCA71QAAL4sPgDhVAYA0M0AgOOIAADUzQCA2M0AgNzNAIDgzQCAo1ECAOTNAIC/2CYA6M0AgOzNAICmeQIApXECAPDNAICrOQIAqjECAPTNAID4zQCAr/UBAK71AQCtFQIArBUCAJAtJACRBSgAkg0oAJPZKACUhS0AlTUsAJbFLACXtTEAmAEwAJkVMACalTUAmyk0AJxtNACdmTUAnj04AJ81OABUzQCAttU+ALXFPgDEzQCAs9E+APzNAIAAzgCABM4AgL/ZPgC+1T4AvcU+ALzFPgC71T4Auuk+AAjOAICPXSQAqeUJAKgVCACrBQwAqg0MAK0BEACsAQwAr0EQAK69EACh4QAADM4AgKMBBACi4QAApZ0EAKSVBACnuQgApgEIAKD1OQChBT0Aouk8AKP1PQAQzgCAFM4AgBjOAIAczgCAscEUALABFACzARgAsn0UALXVGAC01RgAIM4AgCTOAICCISUAgyklACjOAIAszgCAhsUpAIeBLACEGSkAhRkpAIoBLQCL+S0AMM4AgDjOAICOATEAj4k0AIyRMACNHTEAkkU1AJMZNQCG6AcAh+wBAJZZOQCXYTgAlPU0AJVZOQCaoTwAm0U9ADzOAIBAzgCAgX0AAIB9AACcQTwAglUAAKjpPwCp/T8Aqgk/AKsFPwCsHT8ArQU/AK4NPwCvBT8ARM4AgEjOAIBMzgCAUM4AgFTOAIBYzgCAXM4AgGDOAIC4DT8AuRU/ALoVPwC7JT8AvD0/AL39PgC+9T4Av+0+ALB9PwCxQT8AskE/ALNBPwC0QT8AtU0/ALY9PwC3NT8Ao4E8AGTOAIBozgCAbM4AgHDOAICmhTwApZU8AHTOAICrhTwAqrk8AHjOAIB8zgCAr4k8AK6FPACtlTwArJU8AITIAwCz7T0AgM4AgITOAIC26T0AiM4AgIzOAIC16T0Auq09ALu1PQCQzgCAlM4AgL6dPQC/IQIAvKU9AL2VPQCoDT0AqR09AKohPQCrPT0ArCU9AK0tPQCuJT0Ar1k9AIANAACBFQAAgh0AAJjOAICczgCAoM4AgKjOAIC+uAMAuLkCALlhAgC6GQIAuxkCALwJAgC9CQIAviECAL8hAgCwLT0AsTU9ALI1PQCzBT0AtB09ALWhAgC2oQIAt6ECAKOpPACszgCAhigFAIfsAgCwzgCApq08AKWtPAC0zgCAq/E8AKrpPAC4zgCAvM4AgK9lAwCu2TwArdE8AKzhPADAzgCAsykCAMTOAIDIzgCAtvkCAMzOAIDQzgCAtfkCALrVAgC73QIA1M4AgNjOAIC+eQEAv3kBALzFAgC9eQEA3M4AgODOAICj5QIA5M4AgKU1AgDozgCA7M4AgKY1AgDwzgCA9M4AgKsRAgCqGQIArbUBAKwJAgCvtQEArrUBAOPwPgDhrD8A4UA+AON8PwD4zgCA/M4AgADPAIAEzwCAgA0AAIERAACCEQAACM8AgO+oPgAMzwCAEM8AgO8gPgCoLQUAqW0FAKplBQCrrQUArLUFAK29BQCutQUAr60FAKTOAICE6AMAvuADABTPAICGEAMAh5gDABjPAIAczwCAuGkGALlpBgC6AQYAuwEGALwFBgC9DQYAvjEGAL8xBgCw1QUAsd0FALLVBQCzaQYAtHkGALV5BgC2aQYAt2EGAKg5BgCpgQcAqpkHAKuRBwCsuQcArbkHAK7ZBwCv1QcAIM8AgCTPAIA0zgCAKM8AgCzPAIAwzwCANM8AgDjPAIC4VQcAuV0HALppBwC7aQcAvAEHAL0BBwC+AQcAvwEHALCtBwCxsQcAsrEHALOFBwC0nQcAtXUHALZ9BwC3cQcAsxEGADzPAIBAzwCARM8AgEjPAIC2OQYAtTEGAEzPAIC7dQYAumkGAFDPAIBUzwCAv7EGAL5ZBgC9UQYAvGUGAFjPAICjVQYAXM8AgGDPAICmfQYAZM8AgGjPAICldQYAqi0GAKsxBgBszwCAcM8AgK4dBgCv9QYArCEGAK0VBgCouQEAqbkBAKopAQCrKQEArD0BAK0lAQCuLQEAryUBAHTPAICCHQAAgR0AAIAdAAB4zwCAfM8AgIDPAIC+cAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwXQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAAITIAgCzpQIAhzgDAIYoAgC2oQIAiM8AgIzPAIC1sQIAup0CALshAwC+bAMAkM8AgL4hAwC/KQMAvDEDAL0xAwCj4QIAlM8AgJjPAICczwCAoM8AgKblAgCl9QIApM8AgKtlAwCq2QIAqM8AgKzPAICvbQMArmUDAK11AwCsdQMAqZkAAKiRAACrzQAAqqEAAK3dAACs3QAAr8UAAK7NAAC+LA0AsM8AgLTPAIC4zwCAvM8AgMDPAIDEzwCAyM8AgLnBAQC4eQAAu8EBALrJAQC9wQEAvNkBAL/FAQC+xQEAsY0AALCNAACzQQAAskkAALVBAAC0WQAAt0EAALZJAADMzwCA0M8AgNTPAIDYzwCA3M8AgO9QBwDgzwCA5M8AgL74DwDjdAcA6M8AgOF8BACAGQAAgQkAAIJ5AADszwCA8M8AgLNpAQD4zwCAhMQCALYdAQD8zwCAANAAgLUVAQC6CQEAuwkBAIboDQCH6A0Avt0BAL/FAQC83QEAvdUBAATQAIAI0ACADNAAgBDQAIDv1AAAFNAAgBjQAIDvTAEA47ADAOG0BgDhgAEA45gBABzQAIAg0ACAJNAAgCjQAIAs0ACAMNAAgKPlAQCEwA0ApZkBADTQAIA40ACAppEBADzQAIBA0ACAq4UBAKqFAQCtWQEArFEBAK9JAQCuUQEA9M8AgETQAIBI0ACATNAAgFDQAIBU0ACAWNAAgFzQAICoaQ8AqXEPAKpxDwCrrQ8ArLUPAK29DwCutQ8Ar6kPALDZDwCx9Q8Asv0PALP1DwC07Q8AtZUPALadDwC3iQ8AuLkPALmFDwC6jQ8Au2kAALx5AAC9eQAAvmkAAL9pAACBnQAAgJ0AAGDQAICCBQAAZNAAgGjQAIBs0ACAcNAAgIaAAwCH9AMAdNAAgHjQAIB80ACAgNAAgITQAICEzwCAs5kPAIjQAICM0ACAkNAAgJTQAIC2XQ8AtV0PAJjQAIC7UQ8Aun0PAJzQAICg0ACAvzEPAL5JDwC9QQ8AvEkPAKNZDgCk0ACAqNAAgKzQAICw0ACApp0OAKWdDgC00ACAq5EOAKq9DgC40ACAvNAAgK/xDgCuiQ4ArYEOAKyJDgDA0ACAxNAAgMjQAIDM0ACAgBkAAIEZAACCBQAA0NAAgISgAQDU0ACAh+gBAIYABADY0ACA3NAAgODQAIDk0ACAqBUBAKkdAQCqFQEAqyUBAKw9AQCtJQEAri0BAK8lAQDo0ACA7NAAgPDQAID00ACA+NAAgPzQAIAA0QCABNEAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsCUBALEtAQCyJQEAsz0BALQtAQC1HQEAthUBALf5AAAI0QCADNEAgBDRAICzkQIAFNEAgLW5AgC2qQIAGNEAgBzRAIAg0QCAuu0CALvlAgC8/QIAveUCAL7lAgC/1QIApvECACTRAIAo0QCApeECACzRAICjyQIAMNEAgDTRAICuvQIAr40CAKylAgCtvQIAqrUCAKu9AgA40QCAPNEAgID5AACB+QAAggUAAEDRAIC+yAMAhBgDAEjRAIBM0QCAUNEAgFTRAIBY0QCAXNEAgGDRAIBk0QCAhhgEAIecAwBo0QCAbNEAgHDRAIB00QCAeNEAgHzRAIDvsAIAgNEAgOGUAQCE0QCA42wCAIjRAICM0QCAkNEAgJTRAICY0QCA79APAJzRAICg0QCApNEAgKjRAIDhrAEArNEAgONsAACAMQAAgT0AAIIdAADv9A4A42wOALDRAIDhLA8AvnAFALM5AgCEDAUAhugEAIdgBQDcAAAAtvECALX5AgC40QCAu9UCALrVAgC80QCAwNEAgL91AQC+dQEAvcUCALzFAgDE0QCA4fQOAMjRAIDjUA4AzNEAgNDRAIDU0QCA2NEAgNzRAIDg0QCA5NEAgOjRAIDs0QCA8NEAgPTRAIDv5A8ApmUCAPjRAID80QCApW0CAADSAICjrQIABNIAgAjSAICu4QEAr+EBAKxRAgCtUQIAqkECAKtBAgAM0gCAENIAgKiZBgCpmQYAqqkGAKupBgCsuQYArbkGAK6pBgCvqQYAFNIAgIIdAACBHQAAgB0AABjSAIAc0gCAINIAgL50AwC4rQYAubUGALq9BgC7tQYAvK0GAL1RBwC+UQcAv1EHALChBgCxoQYAsqEGALOhBgC0oQYAtaEGALalBgC3mQYARNEAgLMlBgCExAMAtNEAgLY9BgAk0gCAKNIAgLU1BgC6YQYAu2EGAIYIAACHiAAAvmEGAL9hBgC8cQYAvXEGAKNhBgAs0gCAMNIAgDTSAIA40gCApnkGAKVxBgA80gCAqyUGAKolBgBA0gCARNIAgK8lBgCuJQYArTUGAKw1BgCoXQYAqW0GAKplBgCrjQYArJkGAK2FBgCujQYAr4UGAEjSAIBM0gCAUNIAgFTSAIBY0gCAXNIAgGDSAIBk0gCAuIUGALmNBgC6mQYAu5UGALyNBgC9rQYAvqUGAL99AQCw/QYAscUGALLNBgCzxQYAtN0GALXFBgC2zQYAt8UGALPtBgBo0gCAbNIAgHDSAIB00gCAtgUGALURBgB40gCAuwEGALo5BgB80gCAgNIAgL8BBgC+GQYAvREGALwZBgCE0gCAo6kGAIjSAICM0gCApkEGAJDSAICElAEApVUGAKp9BgCrRQYAvqABAJjSAICuXQYAr0UGAKxdBgCtVQYAqJkCAKnBAgCqwQIAq8ECAKzBAgCtyQIArvECAK/xAgCB7QMAgO0DAJzSAICC+QMAhpAcAId0AwCg0gCApNIAgLjFAwC5zQMAusUDALvdAwC8zQMAvf0DAL71AwC/nQMAsEEDALFBAwCyQQMAs0EDALRBAwC1QQMAtkEDALdBAwCzSQIAqNIAgKzSAICw0gCAtNIAgLZJAgC1SQIAuNIAgLuFAwC6hQMAvNIAgMDSAIC/hQMAvoUDAL2VAwC8lQMAxNIAgKMNAgDI0gCAzNIAgKYNAgDQ0gCA1NIAgKUNAgCqwQMAq8EDANjSAIDc0gCArsEDAK/BAwCs0QMArdEDAOOYAQDhpAcA4VgGAONYBgDhoAEA4NIAgOPQAADk0gCA6NIAgOzSAIDvOAAA8NIAgO/0AQD00gCA+NIAgO/4BgCAeQAAgRUAAIIdAACEAB0A/NIAgADTAIC+EB0ACNMAgIbAHACHrB0ADNMAgBDTAIAU0wCAGNMAgBzTAIAg0wCAu8UFALqhBQC5qQUAuJEFAL/NBQC+zQUAvckFALzVBQCzHQYAsh0GALEdBgCwHQYAt6EFALa9BQC1vQUAtL0FAKu9BgCqvQYAqb0GAKi9BgCvfQYArn0GAK19BgCsfQYAJNMAgCjTAIAs0wCAMNMAgDTTAIA40wCAPNMAgEDTAICo7R0AqS0eAKoxHgCrMR4ArJUeAK2dHgCulR4Ar40eAATTAIBE0wCASNMAgEzTAIBQ0wCAVNMAgFjTAIBc0wCAuKkeALmpHgC6XR8Au1EfALxxHwC9cR8AvnUfAL9pHwCw/R4Asc0eALLFHgCzrR4AtLkeALW5HgC2rR4At6UeALO5HgBg0wCAZNMAgGjTAICU0gCAth0eALUdHgBs0wCAuwkeALo5HgBw0wCAhOADAL99HgC+fR4AvXkeALwRHgCCaQAAo/0eAIBFAACBUQAAplkeAL6cAwB00wCApVkeAKp9HgCrTR4AhkgAAIdsAACuOR4ArzkeAKxVHgCtPR4AqF0eAKltHgCqZR4Aq30eAKxlHgCtbR4ArmUeAK/9HgB40wCAfNMAgIDTAICE0wCAiNMAgIzTAICQ0wCAlNMAgLhpAQC5aQEAunkBALt5AQC8aQEAvWkBAL7dAQC/1QEAsIUeALGNHgCyhR4As50eALSFHgC1jR4AtoUeALdZAQCz7R4AmNMAgJzTAICg0wCApNMAgLbtHgC17R4AqNMAgLtJHgC6QR4ArNMAgLDTAIC/SR4AvkEeAL1JHgC8UR4AtNMAgKOpHgC40wCAvNMAgKapHgDA0wCAxNMAgKWpHgCqBR4Aqw0eAMjTAIDM0wCArgUeAK8NHgCsFR4ArQ0eAKghAwCpIQMAqiEDAKshAwCsIQMArSEDAK4hAwCvIQMA0NMAgNTTAIDY0wCAvmACANzTAIDg0wCA6NMAgOzTAIC4iQMAuYkDALqdAwC7lQMAvLkDAL25AwC+eQAAv3kAALDlAwCx7QMAsuUDALP9AwC07QMAtd0DALbVAwC3vQMAgKkAAIG1AACCvQAAs6UDAPDTAIC1pQMAtq0DAPTTAICE4AIA+NMAgLotAwC7JQMAvD0DAL0lAwC+JQMAvxUDAKPpAwD80wCAhmgEAIeAAwAA1ACApuEDAKXpAwAE1ACAq2kDAKphAwAI1ACADNQAgK9ZAwCuaQMArWkDAKxxAwAQ1ACAFNQAgBjUAIAc1ACAINQAgOE8HwAk1ACA40AeACjUAIAs1ACAMNQAgO+MHgA01ACAONQAgDzUAIBA1ACARNQAgIIlAACBEQAAgB0AAEjUAIDj5AMATNQAgOGsAQBQ1ACA77ADAIRkAgC+YAUAhtAEAIdEBQBY1ACAXNQAgGDUAIBk1ACAaNQAgGzUAIBw1ACAdNQAgHjUAIDvsAEAhKQFAOHcHgB81ACA4xABAIDUAICE1ACAiNQAgIzUAICzUQEAkNQAgJTUAICY1ACAnNQAgLYRAQC1fQEAoNQAgLsNAQC6DQEApNQAgKjUAIC//QAAvv0AAL39AAC8/QAAqDkGAKk5BgCqmQYAq5EGAKy1BgCt0QYArskGAK/BBgBU1ACArNQAgLDUAIC01ACAgA0AAIGxAACCsQAAuNQAgLhhBwC5YQcAumEHALt9BwC8ZQcAvW0HAL5lBwC/HQcAsIkGALGJBgCyaQcAs2kHALR5BwC1eQcAtmkHALdlBwCjEQYAvNQAgMDUAIC+gAMAxNQAgKZRBgClPQYAyNQAgKtNBgCqTQYAhggAAId8AwCvvQcArr0HAK29BwCsvQcAzNQAgNDUAICzSQcA1NQAgLVZBwDY1ACA3NQAgLZRBwDg1ACA5NMAgLtBBwC6dQcAvUUHALxFBwC/RQcAvkUHAKh5BgCpeQYAqokGAKuJBgCsmQYArZkGAK6JBgCviQYA5NQAgOjUAIDs1ACA8NQAgPTUAID41ACA/NQAgADVAIC4jQYAuZUGALqVBgC7pQYAvL0GAL1xAQC+cQEAv3EBALD5BgCxzQYAstkGALPZBgC0yQYAtckGALa9BgC3tQYAowEGAATVAIAI1QCADNUAgBDVAICmGQYApREGABTVAICrCQYAqj0GABjVAIAc1QCArw0GAK4NBgCtDQYArA0GACDVAIAk1QCAKNUAgCzVAICAGQAAgRkAAIIFAAAw1QCAhKwBAL6sAQCH6AAAhkwPADjVAIA81QCAQNUAgETVAIConQIAqcUCAKrNAgCrwQIArMUCAK3NAgCu+QIArz0DAEjVAIBM1QCAUNUAgFTVAIC+PAwAWNUAgFzVAIBg1QCAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+ZAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDALNFAgBk1QCAaNUAgGzVAIBw1QCAtk0CALVNAgB01QCAu4kDALqBAwB41QCAfNUAgL+JAwC+gQMAvYkDALyRAwCA1QCAowECAITVAICI1QCApgkCAIzVAICQ1QCApQkCAKrFAwCrzQMAlNUAgJjVAICuxQMAr80DAKzVAwCtzQMAgO0BAIEVAACCEQAAhAACAJzVAIDhpAEAoNUAgOPsAACo1QCArNUAgLDVAIDvMAAAtNUAgLjVAIC81QCAwNUAgIbgDACH9AIAxNUAgMjVAIDM1QCA0NUAgO/MBgDU1QCA4bAHANjVAIDjEAYA3NUAgODVAIDk1QCA6NUAgOzVAIDw1QCA9NUAgPjVAID81QCAANYAgATWAIAI1gCA7+gBAIUYDwDhzAYADNYAgOMcBgCAKQAAgR0AAIIFAAAQ1gCAszkCAITMDQCGaA8Ah/wMAOHQ0gO28QEAtfkBABjWAIC72QEAutEBAL7kDAAc1gCAv30BAL59AQC9fQEAvMEBAKjxDQCp8Q0AqvENAKvxDQCsMQ4ArTEOAK4xDgCvMQ4ApNUAgBTWAIAg1gCAJNYAgCjWAIAs1gCAMNYAgDTWAIC46Q4AuekOALqJDgC7hQ4AvJ0OAL2BDgC+gQ4Av7UOALBVDgCxXQ4AslUOALPpDgC0+Q4AtfkOALbpDgC34Q4Ao3kNADjWAIA81gCAQNYAgETWAICmsQ4ApbkOAEjWAICrmQ4AqpEOAEzWAIBQ1gCArz0OAK49DgCtPQ4ArIEOAFTWAICz7Q8AWNYAgFzWAIC26Q8AYNYAgGTWAIC16Q8Auq0PALu1DwA01QCAaNYAgL6VDwC/mQ8AvK0PAL2hDwCoIQ4AqSEOAKohDgCrPQ4ArCUOAK0tDgCuJQ4Ar1UOAGzWAIBw1gCAdNYAgHjWAICAHQAAgQkAAIK9AAB81gCAuDkOALk5DgC6yQ4Au8kOALzZDgC92Q4AvskOAL/JDgCwLQ4AsTUOALI9DgCzMQ4AtBUOALUZDgC2CQ4AtwkOAKOpDgCA1gCAhIACAL6AAQCFAAQApq0OAKWtDgCI1gCAq/EOAKrpDgCGKAcAhxgAAK/dDgCu0Q4AreUOAKzpDgCM1gCAs+0BAJDWAICU1gCAtuUBAJjWAICc1gCAte0BALplAQC7bQEAoNYAgKTWAIC+bQEAv10BALx1AQC9bQEAqN0NAKnpDQCqIQIAqyECAKwhAgCtIQIAriECAK8hAgCo1gCArNYAgLDWAIC01gCAohECAKMRAgCgqQ4AodUCALiJAgC5iQIAup0CALuVAgC8vQIAvXUDAL59AwC/dQMAsOUCALHtAgCy5QIAs/0CALTtAgC13QIAttUCALe9AgCjqQIAj8UaALjWAIC81gCAwNYAgKahAgClqQIAxNYAgKspAgCqIQIAyNYAgMzWAICvGQIArikCAK0pAgCsMQIAniUOAJ/lDgCc6QoAnRUKAJpFFgCbRQoAmFkWAJlRFgCWcRIAl4ETAJRVEgCV7RIAktEeAJPZHgCQtRoAkVUeAISpHwCFJR8AhiUfAIexEwDQ1gCA1NYAgIJZGwCDURsAjEUSAI2lFwCOpRcAj7kXAIA5+wHY1gCAijkTAIutEwCUmQsAlaEPAJZpDwCX3Q8A3NYAgO+cDwCSyQsAk30LAJxFAwDjeA4A4NYAgOGYDADk1gCAhHgCAJqRAwCbXQMA4QQAAL6IBQDj3OoD6NYAgOzWAIDw1gCA7+wAAO+MDgDhcA4A4fwOAOMwAADjeA4AgSEAAIA5AADvtO0DgikAALMJAgD41gCAhmgEAIcsBQD81gCAtg0CALUNAgAA1wCAu8UBALrFAQAE1wCACNcAgL99AQC+fQEAvdUBALzVAQCE1gCA9NYAgAzXAIAQ1wCAFNcAgBjXAIAc1wCAINcAgKi9BQCp5QUAquEFAKvhBQCs5QUAre0FAK7RBQCv0QUAsGEGALFhBgCyYQYAs2EGALTZBgC12QYAtskGALfBBgC4yQYAuckGALp5BwC7eQcAvEUHAL0lBwC+EQcAvw0HAKNJBQAk1wCAKNcAgCzXAIAw1wCApk0FAKVNBQA01wCAq4UGAKqFBgA41wCAPNcAgK89BgCuPQYArZUGAKyVBgBA1wCARNcAgEjXAIBM1wCAUNcAgFTXAIBY1wCAXNcAgIA5AACBOQAAggUAAGDXAIC+uAMAhLgDAGjXAIBs1wCAqMUGAKnVBgCq1QYAq+UGAKz9BgCtHQEArhUBAK8NAQBk1wCAcNcAgIaIAQCHHAEAdNcAgHjXAIB81wCAgNcAgLjpAQC56QEAuokBALuJAQC8mQEAvZkBAL6JAQC/iQEAsHUBALF9AQCydQEAs+kBALT5AQC1+QEAtukBALfhAQCzXQYAhNcAgIjXAICM1wCAhLwBALadAQC1dQYAkNcAgLu5AQC6sQEAlNcAgJjXAIC/PQEAvj0BAL09AQC8oQEAnNcAgKMZBgCg1wCApNcAgKbZAQCo1wCArNcAgKUxBgCq9QEAq/0BALDXAIC01wCArnkBAK95AQCs5QEArXkBAKj5AgCp+QIAqi0DAKs9AwCsJQMArS0DAK4lAwCvmQMAuNcAgLzXAIDA1wCAxNcAgIANAACBsQAAgrEAAMjXAIC4lQMAuZ0DALqhAwC7oQMAvHEAAL1xAAC+cQAAv3EAALDpAwCx6QMAsvUDALPFAwC03QMAtbUDALaxAwC3sQMAvswDAMzXAIDQ1wCA2NcAgNzXAIDg1wCA5NcAgO/kAgDo1wCA4ZQBAOzXAIDjLAEA8NcAgPTXAICHGAMAhhz8A7tNAwC6TQMA+NcAgPzXAIC/EQMAvnkDAL1xAwC8QQMAs8UDAITo/AMA2ACABNgAgAjYAIC2zQMAtc0DAAzYAICkAfwDpSX/A6bZ/wOnAfgDENgAgKEVAwCiHQMAoz0CAKwR9wOtAfADri3zA68B8wOoEfsDqZn7A6oB9AOrHfcDtAHoA7Vl6wO+xPwDhMT8A7AB7AOxVe8Dsk3vA7Nx7gMU2ACAGNgAgBzYAIAg2ACAJNgAgCjYAIAs2ACAMNgAgOFQBgDhNAQA42wBAOPoBgA02ACAONgAgDzYAIBA2ACAgDUAAIE9AACCNQAASNgAgEzYAIBQ2ACA77ABAO/ABgCj5QIAVNgAgIbo/AOHfP0DWNgAgKbtAgCl7QIAXNgAgKttAgCqbQIAYNgAgGTYAICvMQIArlkCAK1RAgCsYQIAqI3+A6mV/gOqnf4Dq5X+A6yx/gOtvf4Drqn+A6+p/gNE2ACAaNgAgGzYAIBw2ACAdNgAgHjYAIB82ACAgNgAgLgl/wO5Lf8DuiX/A7s9/wO8Jf8DvS3/A74l/wO/zf8DsKn+A7Gp/gOygf4Ds4H+A7SB/gO1if4Dtmn/A7cd/wOE2ACA4SD8A4jYAIDjePwDjNgAgJDYAICU2ACAmNgAgJzYAICg2ACApNgAgKjYAICAHQAAgXEAAIJxAADvDP0Ds1X+A6zYAICw2ACAvkAAALTYAIC2ff4DtXn+A7jYAIC7Lf4Dui3+A4boAACHrAAAvw3+A74F/gO9Ff4DvBX+A6OV/wO82ACAwNgAgMTYAIDI2ACApr3/A6W5/wPM2ACAq+3/A6rt/wPQ2ACA1NgAgK/N/wOuxf8DrdX/A6zV/wPY2ACAs/H+A9zYAIDg2ACAto3+A+TYAIDo2ACAtY3+A7pFAQC7TQEA7NgAgPDYAIC+RQEAv00BALxVAQC9TQEAqC3+A6k1/gOqPf4Dq0n+A6xB/gOtSf4DrnH+A69x/gP02ACA+NgAgPzYAIAA2QCABNkAgAjZAIAM2QCAENkAgLhJAQC5VQEAul0BALtVAQC8TQEAvXUBAL59AQC/dQEAsMUBALHNAQCyxQEAs90BALTFAQC1zQEAtsUBALd9AQCjtf0DFNkAgBjZAICExAMAHNkAgKbJ/QOlyf0DINkAgKsJAgCqAQIAKNkAgL7sAgCvCQIArgECAK0JAgCsEQIAgEkAAIFVAACCVQAAo0UDACzZAIClRQMApkUDADDZAICGwAQAhxQDAKopAwCrJQMArD0DAK0hAwCuIQMArxUDADTZAIA42QCAPNkAgEDZAIBE2QCASNkAgEzZAIBQ2QCAqH0CAKmhAwCqoQMAq6EDAKyhAwCtqQMArpEDAK+RAwCwgQMAsY0DALKFAwCzmQMAtIkDALW9AwC2tQMAt30DALhFAwC5TQMAukUDALtdAwC8RQMAvU0DAL5FAwC/+QAA1NcAgLMNAgBU2QCAWNkAgLYNAgBc2QCAYNkAgLUNAgC6YQIAu20CAGTZAIBo2QCAvmkCAL9dAgC8dQIAvWkCAGzZAIBw2QCAdNkAgHjZAIB82QCA4aQBAIDZAIDjQAMAhNkAgIjZAICM2QCA77gDAIAVAACBHQAAggUAAJDZAICEgAIAvsgFAIcYBQCGLAQAmNkAgJzZAICg2QCA76gBAKTZAIDhdP4DqNkAgOPw/gOs2QCAsNkAgLTZAIC42QCAvNkAgMDZAIDE2QCAs5EBAMjZAIC1UQEAtlEBAMzZAIDQ2QCA1NkAgLp9AQC7dQEAvG0BAL39AAC+9QAAv+kAAKgpBgCpVQYAqlUGAKuNBgCslQYArZ0GAK6VBgCvjQYAlNkAgNjZAIDc2QCA4NkAgOTZAIDo2QCA7NkAgPDZAIC4bQcAuQUHALoNBwC7BQcAvB0HAL0FBwC+AQcAvz0HALD1BgCx/QYAsvUGALNlBwC0fQcAtWEHALZhBwC3VQcA4xAFAPTZAIDh8AQA+NkAgIAdAACBCQAAgjkAAPzZAIAA2gCAhOgDAL7gAwAE2gCA78wFAAjaAICHOAAAhhgAAKOdBgAM2gCAENoAgBTaAIAY2gCApl0GAKVdBgAc2gCAq3kGAKpxBgAg2gCAJNoAgK/lBwCu+QcArfEHAKxhBgCokQYAqZEGAKqRBgCrrQYArLkGAK2lBgCurQYAr6UGACjaAIAs2gCAMNoAgDTaAIA42gCAPNoAgEDaAIBE2gCAuGUBALltAQC6ZQEAu30BALxlAQC9bQEAvmUBAL/ZAQCw3QYAsaUGALKtBgCzpQYAtKEGALWpBgC2mQYAt5kGALMZBgBI2gCATNoAgFDaAIBU2gCAtiUGALUxBgBY2gCAu2EGALoZBgBc2gCAYNoAgL9tBgC+ZQYAvXEGALx5BgBk2gCAo10GAGjaAIBs2gCApmEGAHDaAICEmAEApXUGAKpdBgCrJQYAvqQBAHjaAICuIQYArykGAKw9BgCtNQYAqcUCAKixAgCrxQIAqsUCAK3NAgCsxQIAr/UCAK71AgB82gCAgNoAgITaAICI2gCAjNoAgJDaAICU2gCAmNoAgLnJAwC4wQMAu9kDALrBAwC9+QMAvMkDAL+ZAwC+8QMAsUUDALBFAwCzRQMAskUDALVFAwC0RQMAt0UDALZFAwCASQMAgUkDAIJdAwCzRQIAvtwMALVFAgC2RQIAnNoAgIYADACH5AMAuokDALuJAwC8mQMAvZkDAL6JAwC/iQMAowkCAKDaAICk2gCAqNoAgKzaAICmCQIApQkCALDaAICrxQMAqsUDALTaAIC42gCAr8UDAK7FAwCt1QMArNUDALzaAIDA2gCAxNoAgCTZAIDvAAAAyNoAgMzaAIDQ2gCA4+gAANTaAIDhjAEA2NoAgNzaAIDg2gCA6NoAgOzaAICAbQAAgXUAAIJ9AACEQAIAhvAMAId4DQDw2gCA9NoAgPjaAID82gCAANsAgATbAIAI2wCADNsAgBDbAIAU2wCAGNsAgBzbAIAg2wCAJNsAgCjbAIAs2wCAMNsAgO/MAQCE7AwA4TAGADTbAIDjGAEAONsAgDzbAIBA2wCARNsAgLPlAQBI2wCAhIQPAEzbAIBQ2wCAtuUBALX1AQBY2wCAu30BALrZAQC+oAwAXNsAgL8hAQC+OQEAvTEBALw5AQCo7Q0AqSUOAKotDgCrJQ4ArD0OAK0lDgCuLQ4AryUOAOTaAICC9Q8AgeUPAIDpDwBU2wCAYNsAgIaYAACHDAMAuK0OALlFDwC6TQ8Au0UPALxFDwC9TQ8AvkUPAL95DwCwXQ4AsfkOALKtDgCzpQ4AtL0OALWlDgC2pQ4At5UOAGTbAIDv7AwAaNsAgGzbAIBw2wCAdNsAgHjbAIB82wCAvugAAIDbAICE2wCAiNsAgIzbAIDj6A0AkNsAgOEEDACj5Q4AlNsAgJjbAICc2wCAoNsAgKblDgCl9Q4ApNsAgKt9DgCq2Q4AqNsAgKzbAICvIQ4ArjkOAK0xDgCsOQ4AqDkOAKk5DgCqUQ4Aq1EOAKxxDgCtcQ4ArnEOAK9xDgCw2wCAtNsAgLjbAIC82wCAgBkAAIEZAACCBQAAwNsAgLjRDgC50Q4AutEOALvlDgC84Q4AveEOAL7hDgC/4Q4AsBEOALERDgCyEQ4AsxEOALTxDgC18Q4AtvEOALfxDgCz2Q4AyNsAgIYoAACHuAAAzNsAgLbxDgC1+Q4A0NsAgLvVDgC61Q4A1NsAgNjbAIC/NQ4AvjUOAL3FDgC8xQ4A3NsAgKOdDgDg2wCA5NsAgKa1DgDo2wCA7NsAgKW9DgCqkQ4Aq5EOAPDbAID02wCArnEOAK9xDgCsgQ4ArYEOAKjdDQCp6Q0Aqj0CAKuNAgCsmQIArZkCAK6JAgCviQIAvqwEAPjbAID82wCAhCADAADcAIAE3ACACNwAgAzcAIC4iQIAuYkCALqZAgC7kQIAvLkCAL25AgC+eQMAv3kDALD5AgCx+QIAss0CALPFAgC03QIAtcUCALbBAgC3uQIAs7UCABDcAIAU3ACAGNwAgBzcAIC2GQIAtRECACDcAIC7PQIAuj0CACTcAIAo3ACAvwECAL4ZAgC9EQIAvBkCACzcAICj8QIAMNwAgDjcAICmXQIAPNwAgEDcAIClVQIAqnkCAKt5AgCGSAUAh6wEAK5dAgCvRQIArF0CAK1VAgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAETcAIBI3ACATNwAgFDcAICB8QEAgJkBAHTaAICC9QEAuHkBALl5AQC6zQEAu8UBALzdAQC9xQEAvsUBAL/1AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2SQEAt0kBAFTcAIBY3ACAXNwAgO/UAQCEEAUAYNwAgGTcAIDvjA4AvuwFAOHsDgBo3ACA4xwOAGzcAIDhlAEAcNwAgONkDgCzXQIAdNwAgHjcAIB83ACAgNwAgLYVAgC1dQIAhNwAgLs5AgC6MQIAiNwAgIzcAIC/2QEAvtEBAL0VAgC8FQIAo50FADTcAICQ3ACAlNwAgJjcAICm1QUApbUFAJzcAICr+QUAqvEFAKDcAICk3ACArxkGAK4RBgCt1QUArNUFAIBRAACBWQAAgmEAALOVBgCo3ACAtXEHALZxBwCs3ACAhkADAIdUAwC67QcAu+UHALzlBwC97QcAvtEHAL/NBwCw3ACAtNwAgLjcAIC83ACAwNwAgMTcAIDvQAQAyNwAgOEwBwDM3ACA45QEANDcAIDU3ACA2NwAgNzcAIDg3ACAoxkGAOTcAIDo3ACA7NwAgPDcAICm/QcApf0HAPTcAICraQcAqmEHAPjcAID83ACAr0EHAK5dBwCtYQcArGkHAKjNBwCp0QcAqtEHAKstBgCsNQYArT0GAK41BgCvnQYAAN0AgATdAIAI3QCADN0AgIAZAACBGQAAggUAABDdAIC4iQYAuYkGALqZBgC7kQYAvLkGAL25BgC+UQEAv1EBALDlBgCx7QYAsv0GALP1BgC02QYAtcUGALbBBgC3uQYAqNEBAKnZAQCqCQEAqwkBAKwZAQCtGQEArgkBAK8JAQCEYAEAvnwBAIeoAACGjAEAGN0AgBzdAIAg3QCAJN0AgLgJAQC5CQEAuhkBALsRAQC8OQEAvTkBAL75AAC/+QAAsH0BALFBAQCyRQEAs10BALRFAQC1TQEAtkUBALc5AQAo3QCALN0AgDDdAICzjQIANN0AgLWdAgC2lQIAON0AgDzdAIBA3QCAurUCALuJAgC8nQIAvYUCAL6NAgC/hQIAps0CAETdAIBI3QCApcUCAEzdAICj1QIAUN0AgFTdAICu1QIAr90CAKzFAgCt3QIAqu0CAKvRAgCE9AMAWN0AgKgxAwCpMQMAqjEDAKsxAwCskQAArZEAAK6RAACvjQAAXN0AgGDdAIBk3QCAaN0AgGzdAIBw3QCAdN0AgHjdAIC4vQAAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALD9AACxxQAAss0AALOpAAC0uQAAtaUAALahAAC3oQAAgL0BAIEJAACCGQAAfN0AgIDdAIC+WAIAhxQdAIacHQCEbB0AxNsAgIjdAICM3QCAvrwcAJDdAICU3QCAmN0AgLP5AgCc3QCAoN0AgKTdAICo3QCAtlEBALVZAQC+3B8Au0EBALp5AQCs3QCAsN0AgL8hAQC+PQEAvT0BALxZAQDhcAcAtN0AgOMIBgC43QCA78wAALzdAIDA3QCAxN0AgOMQAADI3QCA4dABAMzdAICGkBwAh/QcAO/gBgDQ3QCAo3kCANTdAIDY3QCA3N0AgODdAICm0QEApdkBAOTdAICrwQEAqvkBAOjdAIDs3QCAr6EBAK69AQCtvQEArNkBAITdAICCFQAAgeUfAIDlHwDw3QCA9N0AgPjdAID83QCAqAkfAKkJHwCqHR8AqxUfAKwNHwCtcR8ArnEfAK9xHwCwER8AsS0fALIlHwCzyR8AtN0fALXBHwC2wR8At8EfALjFHwC5yR8AutUfALupHwC8uR8AvbkfAL6pHwC/oR8As7UfAADeAIAE3gCACN4AgAzeAIC20R8AtaUfABDeAIC7yR8AuvUfABTeAIAY3gCAvyUfAL45HwC9PR8AvNEfABzeAIAg3gCAJN4AgCjeAIAs3gCA4WAfADDeAIDjtBwANN4AgDjeAIA83gCA7wAdAEDeAIBE3gCASN4AgEzeAICjNR4AUN4AgFTeAIBY3gCAXN4AgKZRHgClJR4AYN4AgKtJHgCqdR4AhKgCAGTeAICvpR4ArrkeAK29HgCsUR4AgE0AAIFVAACCVQAAs8kBAGjeAIC12QEAtskBAGzeAICGoAAAhwQBALrFAQC7rQEAvLUBAL29AQC+tQEAv60BAKiZAQCpmQEAqg0BAKsFAQCsHQEArQUBAK4FAQCvNQEAcN4AgHTeAIB43gCAfN4AgIDeAICE3gCAiN4AgIzeAIC4JQEAuS0BALo5AQC7OQEAvCkBAL0pAQC+3QAAv9UAALBNAQCxJQEAsi0BALMlAQC0PQEAtSUBALYhAQC3HQEAkN4AgJTeAICY3gCAo4kCAJzeAIClmQIApokCAKDeAICk3gCAqN4AgKqFAgCr7QIArPUCAK39AgCu9QIAr+0CAKzeAICw3gCAtN4AgIRAAgC43gCAvN4AgMDeAIDE3gCAgA0AAIEVAACCHQAAyN4AgMzeAIDQ3gCAh7QDAIbcBAC+zAMA2N4AgNzeAIDg3gCA7+gCAOTeAIDo3gCA7N4AgOP8AgDw3gCA4dABAPTeAID43gCA/N4AgADfAIAE3wCAs2EDAAjfAIAM3wCAEN8AgBTfAIC2eQMAtXEDABjfAIC7XQMAul0DABzfAIAg3wCAv+EAAL79AAC9/QAAvP0AALC5AgCxuQIAsgkBALMJAQC0GQEAtQUBALYFAQC3PQEAuAUBALllAQC6bQEAu2UBALxhAQC9YQEAvmEBAL9hAQCFXAcAJN8AgCjfAIAs3wCAFN0AgDDfAIA03wCAON8AgKgxAgCpOQIAqskCAKvJAgCs2QIArdkCAK7JAgCvyQIAhMwFAOGAHgA83wCA47weAOE4HgBA3wCA46AAAL4QBABI3wCATN8AgO8MHgBQ3wCAVN8AgFjfAIBc3wCA73QeAKNhAgCCUQAAgUEAAICRAABg3wCApnkCAKVxAgBk3wCAq10CAKpdAgCGyAQAhzwFAK/hAQCu/QEArf0BAKz9AQCohQYAqY0GAKqFBgCrmQYArIkGAK2JBgCuvQYAr7EGAETfAIBo3wCAbN8AgHDfAIB03wCAeN8AgHzfAICA3wCAuJ0GALmtBgC6pQYAuwkHALwZBwC9GQcAvg0HAL8FBwCw0QYAsdEGALLRBgCz0QYAtLUGALW9BgC2tQYAt60GALMNBgCE3wCAiN8AgIzfAICQ3wCAtgkGALUBBgCU3wCAuxUGALoVBgCY3wCAnN8AgL95BgC+cQYAvQUGALwFBgCg3wCA4aAEAKTfAIDjXAUAgA0AAIE1AACCPQAAqN8AgKzfAICw3wCAhGADAL5sAAC/8AEAhZAAALTfAIDvmAUAo40HAIQIAACGAAwAh4wAALjfAICmiQcApYEHALzfAICrlQcAqpUHAMDfAIDE3wCAr/kHAK7xBwCthQcArIUHAMjfAICz6QYAzN8AgNDfAIC26QYA1N8AgNjfAIC16QYAukUBALtNAQDc3wCA4N8AgL5FAQC/TQEAvFUBAL1NAQCoIQYAqSEGAKolBgCrPQYArCUGAK0tBgCuSQYAr0EGAOTfAIDo3wCA7N8AgPDfAID03wCA+N8AgPzfAIAA4ACAuEkBALlJAQC6WQEAu1EBALx5AQC9eQEAvhkBAL8VAQCwxQEAsc0BALLFAQCz3QEAtMUBALXNAQC2xQEAt3kBAATgAIAI4ACADOAAgKOhBQAQ4ACApaEFAKahBQAU4ACAjyHqAxjgAICqDQIAqwUCAKwdAgCtBQIArg0CAK8FAgCX7RIAlmUSAJVFEQCUnRYAk3EWAJJVFQCReesDkFnqA59hBgCeNQUAnUUaAJxpGgCbVRkAmkUeAJlZHgCYRR0A4WAAABzgAIDjTD4AIOAAgKOxAgCi1QEAobUHAKCJBgCxATgAsAk+ALOVOgCyjToAtbUmALQBJADvaDoAvjAMAKnJNgCowTYAqwEwAKrhNwCtzTMArPUyAK/5PgCuATwAoRkCACjgAICjbQ4Aom0OAKX1CgCkAQgAp4ULAKaZCgCGAA0Ah0QNAIIJ6wODCesDhDHqA4UVFACGORcAh80XAISgDQAs4ACAiiUQAIsNEwCMnRMAjQ0cAI4ZHwCPDR8A1N4AgO8AAwCSbRgAk0kbAJR9GwCVBQQAllkHAJdJBwAw4ACANOAAgJpFBgCbLQAAnFEDAONgAAA44ACA4WwAAIClAQCBAQEAggUBAL4ADAA84ACAQOAAgETgAIDviAEASOAAgOFUBgBM4ACA41QBAFDgAIBU4ACAWOAAgFzgAICz6QIAYOAAgGTgAIBo4ACAbOAAgLadAgC1mQIAcOAAgLuJAgC6vQIAdOAAgHjgAIC/WQIAvlECAL1ZAgC8kQIAoykNAHzgAICA4ACAhOAAgIjgAICmXQ0ApVkNAIzgAICrSQ0Aqn0NAJDgAICY4ACAr5kNAK6RDQCtmQ0ArFENAIBRAACBWQAAgmEAALMtDwCc4ACAtS0PALbJDwCg4ACAhkADAIcIAwC6yQ8Au8UPALzBDwC9wQ8AvsEPAL/BDwAk4ACAlOAAgKTgAICo4ACArOAAgLDgAIC04ACAuOAAgKhFDgCpgQ8AqskPAKvJDwCsyQ8ArSUPAK4tDwCvJQ8AsGEPALFtDwCyeQ8As3kPALRpDwC1aQ8Ath0PALcVDwC4LQ8AuTUPALo1DwC7BQ8AvB0PAL3xAAC+8QAAv/EAAKNhDgC84ACAhMQBAMDgAIDE4ACApoUOAKVhDgDI4ACAq4kOAKqFDgDM4ACA0OAAgK+NDgCujQ4ArY0OAKyNDgDU4ACA2OAAgNzgAIDg4ACA5OAAgOjgAIDs4ACA8OAAgPTgAICCHQAAgR0AAIAdAAD44ACA/OAAgADhAIC+tAEAqK0BAKnVAQCq1QEAqwUBAKwdAQCtBQEArg0BAK8FAQCGgAEAhxgBAAjhAIAM4QCAEOEAgBThAIAY4QCAHOEAgLiFAAC5jQAAuoUAALudAAC8hQAAvY0AAL6FAAC/vQAAsH0BALHhAACy5QAAs/0AALTtAAC13QAAttUAALe9AACzXQIAIOEAgCThAIAo4QCALOEAgLaFAgC1lQIAMOEAgLslAwC6uQIANOEAgDjhAIC/GQMAvikDAL0pAwC8MQMAvswEAKMZAgA84QCAQOEAgKbBAgBE4QCASOEAgKXRAgCq/QIAq2EDAEzhAIBQ4QCArm0DAK9dAwCsdQMArW0DAKgpAwCpKQMAqjkDAKs5AwCsKQMArSkDAK6dAACvlQAAVOEAgFjhAIBc4QCAYOEAgGThAICCqQEAga0BAICtAQC4mQAAua0AALqlAAC7bQAAvHUAAL19AAC+dQAAv20AALDtAACx9QAAsvUAALPFAAC03QAAtb0AALa1AAC3qQAA4XgBAOEcDgDjEAAA4zwOAGjhAIBs4QCAvhQEAHDhAICErAIAeOEAgId4BQCGDAUAfOEAgIDhAIDvvAAA70gOALPxAgCE4QCAiOEAgIzhAICQ4QCAtukCALXhAgCU4QCAu3EBALppAQCY4QCAhKAEAL85AQC+WQEAvVEBALxhAQCc4QCAhIwEAKDhAICEADgApOEAgKjhAICs4QCAsOEAgKqJDgCriQ4AqLkOAKmxDgCu/Q4Ar+EOAKz5DgCt9Q4Asq0OALNlDgCwkQ4AsaUOALZ9DgC3ZQ4AtH0OALV1DgC6XQ4Au+UNALhdDgC5VQ4AvuENAL/pDQC8/Q0AvfUNAKOxBQB04QCAtOEAgLjhAIC84QCApqkFAKWhBQDA4QCAqzEGAKopBgDE4QCAyOEAgK95BgCuGQYArREGAKwhBgDM4QCA0OEAgNThAIDY4QCAgB0AAIEJAACCOQAA3OEAgODhAIDk4QCAhsgAAIcMAwDo4QCA7OEAgPDhAID04QCAqKUHAKm1BwCqvQcAq8kHAKzZBwCt2QcArskHAK/BBwC+oAAA+OEAgPzhAIAA4gCABOIAgAjiAIAM4gCAEOIAgLjNAAC51QAAutUAALvlAAC8/QAAvZUAAL6dAAC/lQAAsIkHALFlBwCyYQcAs30HALRlBwC1bQcAtmUHALf1AACzNQYAFOIAgBjiAIAc4gCAIOIAgLZZBgC1UQYAJOIAgLuhBgC6TQYAKOIAgCziAIC/qQYAvqEGAL2pBgC8tQYAMOIAgDTiAIDv8AUAOOIAgDziAIBA4gCAROIAgEjiAICAPQAAgQkAAIIdAABM4gCA4cgGAFDiAIDjSAQAVOIAgKO1BgBY4gCAhigAAIdAAQBc4gCAptkGAKXRBgBg4gCAqyEGAKrNBgBk4gCAaOIAgK8pBgCuIQYArSkGAKw1BgBs4gCAs70BAHDiAIB04gCAtnkBAHjiAIB84gCAtXkBALpVAQC7XQEAgOIAgITiAIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgC+rDwAiOIAgIziAICQ4gCAlOIAgJjiAICc4gCAoOIAgLhpAwC5aQMAugkDALsJAwC8HQMAvQUDAL4NAwC/BQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwCk4gCAqOIAgKziAICj9QIAsOIAgKUxAgCmMQIAtOIAgLjiAIC84gCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMA7xgCAIIVAACBbQAAgG0AAMDiAIDI4gCAhvg8AIcYAwDM4gCA0OIAgNTiAIDY4gCA42wHAAThAIDhaAEA3OIAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIA4OIAgOTiAIDo4gCA7OIAgPDiAID04gCA+OIAgPziAIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4bQGAADjAIDj9AYABOMAgIQYPQAI4wCADOMAgBDjAIAU4wCAGOMAgBzjAIAg4wCAJOMAgCjjAIDvWAYALOMAgIF9AACAcQAAMOMAgIIFAAA44wCAPOMAgO+AAQC+VDwA4ZABAEDjAIDjfAYAROMAgEjjAIBM4wCAhtg8AIf0PACjnT0AxOIAgDTjAIBQ4wCAVOMAgKbVPQCltT0AWOMAgKv5PQCq8T0AXOMAgGDjAICvGT4ArhE+AK3VPQCs1T0AZOMAgLOhPgBo4wCAbOMAgLatPgBw4wCAdOMAgLWxPgC6ST8Au0k/AHjjAIB84wCAvkk/AL9JPwC8ST8AvUk/AKhVPgCpZT4Aqm0+AKtlPgCsfT4ArWk+AK65PwCvuT8AgOMAgITjAICI4wCAjOMAgJDjAICU4wCAmOMAgJzjAIC4VT8AuV0/ALpVPwC7bT8AvHU/AL19PwC+dT8Av20/ALDJPwCxyT8Astk/ALPZPwC0yT8Atck/ALZ9PwC3cT8AghUAAKPhPwCAsQEAgbEBAKbtPwCg4wCAvtABAKXxPwCqCT4Aqwk+AITkAQCk4wCArgk+AK8JPgCsCT4ArQk+ALPdPACo4wCAhugAAIfMAQCs4wCAtpU8ALX1PACw4wCAu7k8ALqxPAC04wCAuOMAgL9ZPwC+UT8AvZU8ALyVPACoUT4AqVE+AKptPgCrYT4ArGE+AK1hPgCulQEAr40BAISgAQC84wCAwOMAgMTjAIDI4wCAzOMAgNDjAIDU4wCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCw/QEAsc0BALLFAQCzrQEAtLkBALW5AQC2rQEAt6UBALPlPQDY4wCA3OMAgODjAIDk4wCAtuE9ALXpPQDo4wCAuwkCALo5AgDs4wCA8OMAgL99AgC+fQIAvXkCALwRAgD04wCAo6E9APjjAID84wCApqU9AADkAIAE5ACApa09AKp9AgCrTQIACOQAgAzkAICuOQIArzkCAKxVAgCtPQIAgOkAAIHpAACCHQAAvsADAO/kAgAQ5ACAh1QDAIY8BADjEAEAGOQAgOH4AQAc5ACAIOQAgCTkAIAo5ACALOQAgDDkAIA05ACAOOQAgLORAwA85ACAtbkDALZ9AwBA5ACAROQAgEjkAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAIRsBQBM5ACAUOQAgFTkAIBY5ACAXOQAgL5wBQBg5ACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOFAPwDjvAAA4wg+AOFsPgBk5ACAaOQAgGzkAIBw5ACAdOQAgHjkAIB85ACAgOQAgL5sBwDvVAAA75w+AIjkAICjnQIAgmkAAIFhAACAaQAAjOQAgKZxAgCltQIAkOQAgKtVAgCqVQIAhsgEAIfsBACv+QEArvEBAK1FAgCsRQIAqKUGAKmpBgCquQYAq7kGAKypBgCtqQYArtkGAK/ZBgCE5ACAlOQAgJjkAICc5ACAoOQAgKTkAICo5ACArOQAgLhxBwC5cQcAunUHALvdBwC8xQcAvc0HAL7FBwC//QcAsKkGALG1BgCytQYAs40GALSVBgC1UQcAtlEHALdRBwCzMQYAsOQAgLTkAIC45ACAvOQAgLYpBgC1IQYAwOQAgLtxBgC6bQYAxOQAgMjkAIC/lQcAvlEGAL1ZBgC8YQYAzOQAgKN1BgDQ5ACA1OQAgKZtBgDY5ACA3OQAgKVlBgCqKQYAqzUGAODkAIDk5ACArhUGAK/RBwCsJQYArR0GAIANAACBFQAAgh0AAOjkAIDs5ACA8OQAgITcAQD05ACAhoAAAIcgAQD45ACA/OQAgADlAIAE5QCACOUAgAzlAIAQ5QCA43QEABTlAIDhyAUAGOUAgBzlAIAg5QCAJOUAgCjlAIAs5QCAMOUAgDTlAIA45QCA77QEADzlAIBA5QCAqD0GAKlVBgCqVQYAq6kBAKy5AQCtuQEArqkBAK+pAQCErAEAROUAgEjlAIBM5QCAUOUAgFTlAIBY5QCAXOUAgLhtAQC5BQEAugEBALsBAQC8BQEAvQ0BAL4xAQC/MQEAsNkBALHZAQCybQEAs2UBALR9AQC1ZQEAtmUBALdVAQCBvQMAgL0DALPVBQCCGQAAtTkCAGDlAIC+VAMAtjECAGjlAIBs5QCAuxUCALoVAgC9uQIAvLECAL+pAgC+sQIAcOUAgKZpAgClYQIAhAAMAKONBQB05QCAhvgMAId8AwCv8QIArukCAK3hAgCs6QIAq00CAKpNAgB45QCAfOUAgIDlAICE5QCAiOUAgIzlAIDjIAEAkOUAgOGgAQCU5QCA70ACAJjlAICc5QCAoOUAgKTlAICo5QCArOUAgLDlAICz8QMAtOUAgBTkAIC45QCAvOUAgLbpAwC14QMAwOUAgLu1AwC6tQMAxOUAgMjlAIC/lQMAvpUDAL2lAwC8pQMAqCkCAKkpAgCqOQIAqzkCAKwpAgCtKQIArlkCAK9VAgCAzQEAgQkAAIIZAADM5QCA0OUAgL58DQCHtA0AhhwMALgxAgC5PQIAujUCALvpAgC8+QIAvfkCAL7pAgC/6QIAsDECALExAgCyMQIAszECALQRAgC1EQIAthECALcRAgDY5QCA3OUAgODlAIDk5QCA6OUAgOzlAIDw5QCA79QGAPTlAIDhVAYA+OUAgOOkAACsDBUA/OUAgADmAIAE5gCAo/ECAAjmAIAM5gCAEOYAgBTmAICm6QIApeECABjmAICrtQIAqrUCABzmAIAg5gCAr5UCAK6VAgCtpQIArKUCAKghDgCpIQ4AqkkOAKtZDgCsaQ4ArWkOAK6ZDgCvmQ4A1OUAgCTmAIAo5gCALOYAgDDmAIA05gCAOOYAgDzmAIC49Q4Auf0OALr1DgC7iQ4AvJ0OAL2FDgC+hQ4Av7UOALDpDgCx6Q4Asv0OALPxDgC01Q4Atd0OALbVDgC3zQ4As8EOAIIVAACBtQAAgLUAAEDmAIC26Q4AteEOAL4QAAC7LQ4Aui0OAIRkAwBE5gCAvxkOAL4RDgC9JQ4AvCkOAEjmAICjhQ4AhogAAIdsAwCmrQ4ATOYAgFDmAIClpQ4AqmkOAKtpDgBU5gCAWOYAgK5VDgCvXQ4ArG0OAK1hDgCziQ4AXOYAgGDmAIBk5gCAaOYAgLaBDgC1iQ4AbOYAgLuVDgC6jQ4AcOYAgHTmAIC/+Q4AvvEOAL2FDgC8hQ4AeOYAgHzmAICA5gCAhOYAgOMMDQCI5gCA4RgNAIzmAIDvrAwAkOYAgJTmAICY5gCAnOYAgKDmAICk5gCAqOYAgKgBDgCpAQ4AqgEOAKsBDgCsAQ4ArQEOAK4BDgCvPQ4AgN0AAIEJAACCGQAArOYAgLDmAICEPAEAvnQAALjmAIC4HQ4AuS0OALolDgC76QEAvPkBAL35AQC+6QEAv+kBALBJDgCxUQ4AslEOALNRDgC0NQ4AtT0OALY1DgC3LQ4Ao4kNALzmAICGrAQAhzwDAMDmAICmgQ0ApYkNAMTmAICrlQ0Aqo0NAMjmAIDM5gCAr/kNAK7xDQCthQ0ArIUNANDmAICznQIAhEgDAL5ABAC2VQMA1OYAgNjmAIC1sQIAunEDALt5AwDc5gCA4OYAgL4xAwC/MQMAvFEDAL1RAwCwkQMAsZkDALKhAwCzoQMAtNEDALXRAwC20QMAt9EDALj1AwC5+QMAus0DALvFAwC83QMAvcUDAL7NAwC/xQMA5OYAgOjmAIDs5gCA8OYAgIV8GQD05gCA+OYAgGTlAICoIQIAqTECAKoxAgCrBQIArB0CAK3xAwCu8QMAr/EDAPzmAIAA5wCABOcAgAjnAIDvUAAADOcAgBDnAIAU5wCA44QAABjnAIDh+AEAHOcAgIAVAACBGQAAggUAACDnAICjmQMAKOcAgIZoBACHYAUALOcAgKZRAgCltQMAMOcAgKt9AgCqdQIANOcAgDjnAICvNQIArjUCAK1VAgCsVQIAPOcAgEDnAIBE5wCASOcAgEznAIBQ5wCAVOcAgO/4AQC+bAQA4YAOAFjnAIDjFAEAXOcAgGDnAIBk5wCAaOcAgGznAIBw5wCAdOcAgLPdAQB45wCAtf0BALb1AQB85wCAgOcAgITnAIC6sQEAu4UBALydAQC9NQEAvj0BAL81AQCpBQYAqLkFAKsVBgCqHQYArT0GAKw9BgCvTQYArl0GACTnAICCHQAAgR0AAIAdAACI5wCAjOcAgJDnAICU5wCAuUEHALidBgC7QQcAukkHAL1FBwC8WQcAv0UHAL5FBwCxCQYAsD0GALOpBgCyAQYAtbkGALSxBgC3rQYAtrEGAKORBgCEjAIAhigAAIfAAwCY5wCAprkGAKWxBgCc5wCAq8kGAKr9BgCg5wCApOcAgK95BgCucQYArXkGAKzRBgCo5wCAs5kHAKznAICw5wCAtlEHALTnAIC45wCAtbEHALptBwC7dQcAvOcAgMDnAIC+WQcAv0UHALxtBwC9ZQcAxOcAgMjnAIDM5wCA0OcAgNTnAIDY5wCA3OcAgO+oBQDg5wCA4TQFAOTnAIDjdAUA6OcAgOznAIDw5wCA9OcAgKMdBgCCLQAAgRUAAIAdAAD45wCAptUGAKU1BgD85wCAq/EGAKrpBgAA6ACAhCgBAK/BBgCu3QYAreEGAKzpBgCoxQYAqdUGAKrVBgCr5QYArP0GAK0VBgCuHQYArxUGAL7sAQAI6ACAhggAAIcgAAAM6ACAEOgAgBToAIAY6ACAuH0GALkFBgC6DQYAuwUGALwBBgC9CQYAvjkGAL85BgCwbQYAsXUGALJ9BgCzdQYAtFkGALVFBgC2TQYAt0UGAKiRAgCpmQIAqqECAKuhAgCs0QIArd0CAK7VAgCvyQIAHOgAgCDoAIAk6ACAvyweACjoAIAs6ACAMOgAgDToAIC4VQMAuV0DALppAwC7ZQMAvGEDAL1hAwC+YQMAv2EDALC5AgCxjQIAsoUCALNtAwC0dQMAtX0DALZ1AwC3bQMAOOgAgDzoAICzIQIAQOgAgLVRAgCEiAMAROgAgLZVAgC05gCAvigcALtBAgC6dQIAvbEDALxZAgC/sQMAvrkDAKNpAgBI6ACATOgAgFDoAIBU6ACAph0CAKUZAgBY6ACAqwkCAKo9AgBc6ACAYOgAgK/5AwCu8QMArfkDAKwRAgCopQIAqbUCAKq9AgCrtQIArK0CAK01AQCuPQEArzUBAL4sHABk6ACAaOgAgGzoAIBw6ACAeOgAgIdoHQCGHB0AuIUBALmNAQC6hQEAu50BALyNAQC9vQEAvrUBAL95AACwUQEAsVEBALJRAQCzUQEAtPEBALXxAQC29QEAt+UBAO/YAACCtQAAgaUAAIClAAB86ACAgOgAgIToAIDvxAYAiOgAgOH0BgCM6ACA4zgBAOPMAACQ6ACA4SgBAJToAICY6ACAtuUBALV1AgCEQBwAs2UCAJzoAICg6ACApOgAgL9lAQC+ZQEAvdUBALzVAQC7xQEAusUBAKjoAICs6ACAo7UdAHToAICw6ACAtOgAgLjoAICmNR4ApaUdALzoAICrFR4AqhUeAMDoAIDE6ACAr7UeAK61HgCtBR4ArAUeAMjoAIDM6ACA0OgAgNToAICADQAAgTUAAII9AADY6ACA3OgAgODoAIC1BQAAcRoAgOG0AgCs2AIAtQUAAHUaAICotR8AqRUfAKodHwCrFR8ArDEfAK09HwCuLR8AryEfAOG0AgCs2AIAtQUAAHkaAIDhtAIArNgCALUFAAB9GgCAuNEAALnZAAC64QAAu+EAALyRAAC9kQAAvpEAAL+RAACwIR8AsTEfALIxHwCzMR8AtAkfALUJHwC28QAAt/EAAOG0AgCs3AIA71QdALUdAACBGgCA4bwCAKzQAgC1KQAAoyUBAKKRAwChFR0AoA0dAOGAHgCFGgCA47wdAOHEAgCz1R4AtQkAAKzYAgCJGgCA4bwCALb9HgC1+R4ArOACALu1HgC6pR4AtQUAAI0aAIC/jR4Avo0eAL2lHgC8pR4AoxUeAOG8AgCs0AIAtREAAI9pJQCmPR4ApTkeAJEaAICrdR4AqmUeAOG0AgCseAEAr00eAK5NHgCtZR4ArGUeAJvdFACa5RUAmQEXAJjhEACfcR8AnnkZAJ35GQCcARsAk+UtAJIRLwCRbSkAkG0pAJf5EQCW8REAlYUsAJSZLQC1JQAA4ZQCAILxJgCDjSoAhJUqAIXhLACGHS4Ah3kuAKy0AgCVGgCAilUvAIspEgCMORIAjRkTAI7xFACPHRYAtQUAAJkaAICSVRcAk5EYAJRxGgCV+RoAlvkcAJd9HgCC4AMAkwsAgJpVHgCb2QAAnHUCAIMMAICzDACAuIkKAKwBBACthQYAroEGAMwQAgDMfAMAtgwAgJ0aAIDCDACAxQwAgMgMAIAACwCAgaUyArwMAIAE6ACAmpUGAJtVIwK8kQYAvbEAAL6RBgC/rQYAuOkGALmVBgC6kQYAoRoAgLTBBgC1zQYAts0GALfdBgCw/QYAseUGALKdAACz5QYAhVTHA6UaAICH/AAAuAEKAK0aAIDpDACAsRoAgIyRcwCNpAEAzPACAL4NAIDBDQCAiRQAALgZCgCLDAAAGg4AgFMOAIC5DACAvwwAgBkKAICRwAEAywwAgLhtCgDODACA1AwAgNoMAIDdDACA4AwAgLUaAIAoDQCA5gwAgLkaAIDhpB4AKw0AgONUHgCvIXMAzCgCAO8MAIDsDACA8gwAgPUMAID4DACAzIACAJS4AwD7DACAkhQCAO9gHgCQAAIA/gwAgAoNAIC48QoADQ0AgJ8LAIAQDQCAiSkLABMNAICpGgCAvDABAL/EAQC+7AEAFg0AgMzsAgC4xQoAukQBAK0JAIAZDQCAygYAgN8GAIDyBgCAHA0AgPoGAIAfDQCACgcAgC0HAIAYBwCA9gcAgC8HAICpDQCAOgcAgK8NAIBKBwCAtXkAAGcHAIC3cSoCcgcAgLFhAAB0BwCAsw0pAo0HAIC96QAAoAcAgPoHAICtBwCAuRkrAsMHAIC7WRQCHwgAgFoJAIA8CACALw4AgFsIAIA5AACAgQgAgHEAAIDHCACAKwAAgCAJAIA9AACAXAkAgEMAAIBeCQCARQgAgGoIAIBJAACAAAgAgFMAAIB5CQCAWQAAgCINAIBfAACAuw0iAtANAIDMFDYCHwAAgL9lAAC+EQAAvW0AAOUHAICAaQEAgXUBAIJxAQCD3SEChGkHAIWBBwCGgQcAh3EBAIihAQCJrQEAirUHAIuNBwCMlQcAjaUBAE8AAICPpQEAkOEBAJHtBwCSsSECk/0HAJSNBwCVUQYAlvEBAJfZAQCY0QEAmXUGAJp9BgCb1QEAnGkGAJ2ZFAKeUQYAn1EGAKB1FAKhuQYAokkBAKOFLQKkIQEApS0BAKZ1FAKntQYAqKERAqlRFAKqlQYAsSEAgMy8NQLNPDUCbQAAgKoDAICsAwCArwMAgL0hAIDEIQCA2yEAgOIhAIDJAACADwAAgLihBgC6BgCAtwYAgMwAAIDOIQCAtQMAgN0FAIAYBgCAugUCALvVAgC46QUAuf0FAL7JAgC/5RcCvA0CAL0BAgCy4QUAs+EFALCNBQCxnQUAtuUFALfpBQC09QUAte0FAKo9BQCrwQUAqD0FAKk1BQCuzQUAr/UFAKzNBQCtxQUAoj0FAKMFBQCg1QIAoTkFAKYdBQCnBQUApB0FAKUVBQC/BgCAm8EFAD4GAIBVBgCAnt0FAJ8xBACcUQIAndUFAHIGAICJBgCApAMAgDAiAIDbAACAoAMAgI8HAIDuBwCA8gcAgJAJAIACCACABggAgJYLAICUCQCArwoAgG8HAICLBwCAlwcAgKIHAICqBwCAqgkAgPsOAIASDwCAHw8AgMwEMwLNsDACzCAzAs3gMALMEDACzGgwAsxYMALNjDACzGgxAs0UMQLM1DECzRQ2AsxwIALN0CcCzDA2AswkMQLMDDwCzWg/AswYPwLNND8CzBg9As3AMgLMRDwCzBg5Asw4MgLNqDICzIgyAs34MwLMfDMCzUAzAswoMwLNCDMCzMghAs0kJgLMrCYCzEA4AsyYJQLNyDoCzBwkAs0QJALMhDsCzag7AsysJQLNvDoCzKw4Asz4JwLM4DgCzXQ4AicPAID2BgCAYQ0AgIgNAIDNICoCzBwrAqoGAIAsIgCAzKQgAs2gJwLMOCYCygQAgMw4OgLNPDsCzBA5As1gPgLMoAMAvj0NAL3tLALWBACAu1UjAgQJAIC5PSICzwYAgNkHAIClBACAoA0AgLIEAIBvBQCA9AYAgL4EAIB1BQCAr70MAK6ZLgKtpQwAwgUAgKvFIgIDBgCAxAQAgCMGAIDQBACAyAUAgCkGAIBdBgCAowEYAqAEAIAaBwCAHQcAgJ9dDACeUQwAnUUMACcHAICbWSECrwcAgLEHAIC0BwCAuAcAgCoHAIDOBwCA0AcAgJMtJgLTBwCAbAgAgG8IAICPBQwAjnEMAI1lDAB5CACAi0UgAmAJAICJNS8CYwkAgGcJAIB8CACAcAkAgHMJAIC9AwCAACIAgIFdDACAYQwAgAABAIEYAACCAAQABCIAgIQQBwCFFAYAhuQIAIc8AgCILAUAiaQFAIoAeAAIIgCAjCQAAAwiAIAUIgCAECIAgLgRAACRxHsAkkh6AJNMeQAcIgCAzOgCAJbwCQC4OQAAkMAJACQiAICS8AkAzPgCAJS0CQC4DQAAKCIAgMwcAgC4BQAANCIAgMzkAgC4HQAAOCIAgDwiAIBDIgCAWiIAgKiMCACp5HsAYSIAgKvUBgDM5AIAuA0AAGsiAIDMlAIAbyIAgLGAewC4CQAAuBUAAMz8AgC15AgAcyIAgMzYAgB3IgCAuAUAALqcBQC7XAUAvAB8AL30fwC++H0Av/xyAIAJOgKBDToCggE6AoMFOgKEGToChR06AoYROgKHFToCiCk6AoktOgKKIToCiyU6Aow5OgKNPToCjjE6Ao81OgLM8AIAkekPAIMiAIDMzAIAuBkAAH8iAIDM3AIAl+UPALg1AAC4DQAAjyIAgMz8AgC4BQAAkyIAgMwwAgCXIgCAzNACAJsiAICfIgCAzIgCAKQtDwClVQ8Apl0PAMyUAgCoqToCqa06ArjVAACjIgCAuDUAAKciAIDMUAMAr7U6AswsAwCrIgCAzBgDALMFDwC0HQ8AzyIAgLYJDwC3CQ8Avmh9ALhtAAC4RQAAzDgDALwpDwDTIgCAviUPAMxYAwCH5Q4AzOg6Ari9AQC4yQEAzPA1As2kMwLMgCICzXwlAs2UNgLMBCkCzew7AsxkOgK45QEAuMEBAInVDgCI1Q4Al7EOALgNAACvIgCAsyIAgLciAIC4GQAAuyIAgNciAICfaTsC2yIAgL8iAIC4PQAAzMQCAMz4AgDDIgCAxyIAgLjZAADLIgCA3yIAgLjRAADjIgCAuPEAAMzMMwLnIgCAuMkAAMzoMwLrIgCAuNUAAKllAAC4yQAAzNgCAKq5BgC3TQ0Atk0NALU1DgC0NQ4AuFUAABUjAICxGQ8AsCkOAL/1AwC+UQ0AvVkNALw1DAC7XQ0Aul0NALldDQC4XQ0AgL0KAIHFCgCCFQQAg8kKAMx8BQCF3QoAhtUKAIfNCgDMVAUAifEKAIq5CACLDQgAjBEIAI0VCACOtScCj+UKAJBpCACRbQgAknEIAJNtJALMEAUAlR0IAJaFCgDMEAUAzDQFAJk9CACaiQoAmw0IAJwRCACdFQgAzEgFAMwQAgCgZQoAoW0KAKJlCgC4BQcApLEEAMzoAgCmsQQAuA0HAKiBBADM/AIAqpkIAKtdCgCsuQgArakEALglBwCvNQgAsNEIALHxBADMwAIAs40IALQpKAK1IQoAtiEKALchCgC4IQsAuSUIALhBBwC7KQsAvA0dAr3dDwC+MQsAvzELAIDdCgAZIwCAnKF9ANADAIDpAwCAhRkJAIaZCQCHlQkAiOEJAIklJQICBACAGwQAgC4EAIBBBACAVAQAgGcEAICQrQoAkUkFAJJtBQCTYQUAlGEFAJVtBQCWZQUAlxEFAJg1BQCZPQUAmjUFAJsNBQCcFQUAnR0FAJ4VBQCfCQUAoKkJAKH9BQCi9QUAowEFAKQFBQClDQUApgUFAKc9BQCoBQUAqQ0FAKoFBQCrGQUArIkJAK2pBQCutQkAr/0JALABCQCxfQUAsnUFALMBBQC0aQkAtQEFALYFBQC3PQUAuAUFALnhJQK6AQUAuwEFALzRJQK9PQkAvnkJAL9dCQCDMAUAoXgHAJ+xfgB6BACApHgHAKVIBwCNBACA8wQAgIt8BADdAACAEwEAgIhIBAAcAQCAIAEAgCQBAIAoAQCALAEAgDABAICyAAcAs/wHADQBAIDhAACAtuQHALfwBwDmAACA6wAAgLrgBwC7nAcAvIgHAL2oBwDwAACAs8F+AKPMBAD1AACA+gAAgIMABAD/AACAhXQEAKUgBAAEAQCAiEwEAAkBAIAOAQCAFwEAgK8tBwCNxAcArSEHAKwpBwDNAwCA8AQAgI8FAICwZQcA4gUAgB0GAIBDBgCAWgYAgHcGAICOBgCA0wMAgOwDAIAFBACAHgQAgDEEAIC8fAQAgt0rAoPlKwKA/QoAgfkrAoaZCQCHmQkAhOEKAIXhCgCKiQkAi4kJAIiJCQCJiQkAjoUJAEQEAICM4QgAjY0JAJK5KwKTQScCkJkrApHFCwCWyQsAl3UnApTFDQCV0SQCmskLAJvZKgKYyQsAmXkHAFcEAIBqBACAnP0LAH0EAICQBACA9gQAgKABAICkAQCAqAEAgONkAgCsAQCAsAEAgLQBAIDvvAcAqBEJALgBAIC8AQCAwAEAgMQBAIDIAQCAzAEAgNABAIDUAQCA2AEAgNwBAIDgAQCA5AEAgOgBAIDsAQCA8AEAgPQBAID4AQCA/AEAgAACAICCnH4ABAIAgKD1VAKh2VQCoulUAqP1dQCk7XUApZ12AKaVdgCnvXYAqIV2AKkpfQCqOX0AqwV9AKwdfQCtBX0Arg19AK8FfQCwfX0AsUl+ALJRfgCzUX4AtHV+ALV9fgC2aX4At2l+ALhZfgC5WX4Auil+ALspfgC8IX4AvSF+AL4ZfgC/GX4AkgcAgDkJAIDXBwCATSIAgLQNAAC1NQAAtj0AAKIGAICsBgCArwYAgAMjAIAJIwCAvSV4ALy1WALGMQCALjoAgJkqAIC9KgCAySoAgNkqAIDhKgCA7SoAgPUqAID9KgCACSsAgF0rAIB1KwCAhSsAgJUrAIClKwCAtSsAgNUrAICAeX8AgYF/AIKBfwCDnX8AhI1/AIWxfwCGsX8Ah7F/AIjhfwCJ4X8AiuF/AIv9fwCM5X8Aje1/AI7lfwCP3X8AkKV/AJGtfwCSpX8Ak71/AJSlfwCVrX8Alm1+AJctfgCYFX4AmRl+AJrpfgCb6X4AnPl+AJ35fgCe6X4An+V+AKAdfgChJX4AoiV+AKM9fgCkJX4ApS1+AKYlfgCnXX4AqGV+AKltfgCqZX4Aq31+AKxlfgCtbX4ArmV+AK9dfgCwJX4AsS1+ALIlfgCzPX4AtCV+ALUpfgC2WXcAt9V1ALj9eQC56XUAuvl1ALvZeQC86XUAvdV1AL7RdQC/2XUAgDF2AIE9dgCCSXYAg0V2AIRBdgCFTXYAhvl0AId9dgCIoQIAiU12AIpZdgCLuXoAjEl2AI2degCOsQIAjx16AJCRVgKRKXYAkoF2AJPNdgCU2XYAlel2AJbJdgCX0VkCmKF2AJllWgKa8XYAm01aApzRdgCdYXoAnoFWAp/VdgCgBQIAoY1aAqI1VwKjCXYApCF2AKUtdgCmiVoCp5laAqi5WgKpdXYAql13ANkrAIDdKwCAESwAgDksAIBJLACAUSwAgFUsAIBhLACAfSwAgIEsAICZLACAnSwAgKUsAIC1LACAUS0AgGUtAIClLQCAuS0AgMEtAIDFLQCA1S0AgJl1CgD4LQCAJC4AgDAuAIBQLgCAXC4AgGAuAIBkLgCAgux6AINkewB8LgCAgC4AgIZ0ewCHvHsArC4AgLguAIDALgCAyC4AgNguAIDnLgCA7y4AgBsvAIAfLwCAJy8AgJJwfAArLwCAMy8AgJFMfAA7LwCASy8AgGcvAIDfLwCA8y8AgKvMfACo5HwAqdx8APcvAIB3MACAezAAgI8wAICiwHwAkzAAgJswAICjMACAzEBJAs0ASQLM/EoCzWhLAqswAIC3MACA7TAAgP0wAIARMQCAjjEAgJoxAICqMQCAsqx8ALNAfAC2MQCAwjEAgMoxAIDOMQCAtGx8ALUEfACAlQcAgZ0HAIKVBwCDqQcAhLkHAIW5BwCG2QcAh9kHAIjpBwCJ6QcAivkHAIv5BwCM6QcAjekHAI7RBwCP0QcAkLEHAJGxBwCSSQEAk0kBAJRZAQCVWQEAlkkBAJdJAQCYeQEAmXkBAJpJAQCbSQEAnFkBAJ1ZAQCeSQEAn0kBAKC5AQChuQEAoskBAKPJAQCk2QEApdkBAKbJAQCnyQEAqPkBAKn5AQCqyQEAq8kBAKzZAQCt2QEArskBAK/JAQCwuQEAsbkBALJJAQCzSQEAtFkBALVZAQC2SQEAt0kBALh5AQC5eQEAukkBALtJAQC8WQEAvVkBAL5JAQC/SQEA0jEAgNYxAIDaMQCAkjIAgNoyAIDmMgCA6jIAgO4yAIDyMgCA+jIAgP4yAIASMwCALjMAgDYzAIB2MwCAejMAgIIzAICGMwCAjjMAgJIzAIC2MwCAujMAgNYzAIDaMwCA3jMAgOIzAID2MwCAGjQAgB40AIAiNACARjQAgIY0AICKNACAqjQAgLo0AIDCNACA4jQAgAY1AIBKNQCAUjUAgGY1AIByNQCAejUAgII1AICGNQCAijUAgKI1AICmNQCAwjUAgMo1AIDSNQCA1jUAgOI1AIDqNQCA7jUAgPI1AID6NQCA/jUAgJ42AICyNgCAnoUMAOY2AIDqNgCA8jYAgIC5AwCBuQMAgskDAIPJAwCE2QMAhdkDAIbJAwCHyQMAiPkDAIn5AwCKyQMAi8kDAIzZAwCN2QMAjs0DAI/FAwCQvQMAkQEMAJJJDgCTSQ4AlFkOAJVZDgCWSQ4Al0kOAJh5DgCZeQ4AmkkOAJtJDgCcWQ4AnVkOAJ5JDgCfSQ4AoLkOAKG5DgCiyQ4Ao8kOAKTZDgCl2Q4ApskOAKfJDgCo+Q4AqfkOAKrJDgCryQ4ArNkOAK3ZDgCuyQ4Ar8kOALC5DgCxuQ4AskkOALNJDgC0WQ4AtVkOALZJDgC3SQ4AuHkOALl5DgC6SQ4Au0kOALxZDgC9WQ4AvkkOAL9JDgC8eQQAvXkEAL6JBAC/nQQAuHUEALl9BAC6aQQAu2kEALRxBAC1cQQAtnEEALdxBACwcQQAsXEEALJxBACzcQQArGkEAK1pBACucQQAr3EEAKhBBACpQQQAqkEEAKtBBACknQUApWEEAKZhBACnYQQAoJ0FAKGFBQCijQUAo4UFAJxdBQCdZQUAnm0FAJ9lBQCYXQUAmUUFAJpNBQCbRQUAlB0FAJVlBQCWbQUAl2UFAJAdBQCRBQUAkg0FAJMFBQCMMQcAjTEHAI4xBwCPMQcAiDEHAIkxBwCKMQcAizEHAIQxBwCFMQcAhjEHAIcxBwCAMQcAgTEHAIIxBwCDMQcAJjcAgC43AIA2NwCAcjcAgHY3AIB+NwCAgjcAgIY3AICyNwCAtjcAgL43AIDSNwCA1jcAgPI3AID6NwCA/jcAgCI4AIBCOACAUjgAgFY4AIBeOACAijgAgI44AICeOACAwjgAgM44AIDeOACA9jgAgP44AIACOQCABjkAgAo5AIAWOQCAGjkAgCI5AIA+OQCAQjkAgEY5AIBeOQCAYjkAgGo5AIB+OQCAgjkAgIY5AICOOQCAkjkAgJY5AICaOQCAnjkAgK45AIDGOQCAyjkAgNY5AIDaOQCA3jkAgOI5AIDqOQCA7jkAgPI5AID+OQCABjoAgA46AIASOgCAGjoAgIC5AQCBuQEAgskBAIPJAQCE2QEAhdkBAIbJAQCHyQEAiPkBAIn5AQCKyQEAi8kBAIzZAQCN2QEAjskBAI/JAQCQuQEAkbkBAJIRAACTEQAAlDEAAJUxAAAeOgCAIjoAgCo6AIAyOgCAPSMAgGUsAIBpLACAJSQAgIJgAgCZ4QAAgIAAAIGYAACC5AYAg4gEAITUGwCFlBoAhhgfALMjAICIxB4AiQAQAIqoEwCLrBEAjAAoAI20KwCOuCoAj7wpAOOwAgC+dAIAnlUAAOMUAgCCbAIAtyMAgJkNAAC+RAIAnjUAAIJoAgCZBQAAuyMAgO/MAgC+oAAAgoQAAO/YAgDj7AEA4/QBAL8jAIDjCAMAwyMAgOM4AwDHIwCA44gDAMsjAIDv4AMAzyMAgO+IAwDvPAEA78QDANMjAIDv1AMA4+wDAB43AIDXIwCA4+wDAOPsAwDj5AMA2yMAgOO4AwDvXAMA70wDAN8jAIDvSAMA7/QDAOMjAIDnIwCA7zQDAON8AwDjlAQA6yMAgO8jAIDzIwCA47QEAPcjAID7IwCA/yMAgO9sBAADJACAByQAgO9YBADvUAQACyQAgBYkAIAaJACAvQAAgOP4BADCAACAMSQAgB4kAIBtKQCA45wEAAglAIBrJQCAriUAgO9QBADaJQCABCYAgO88BAApJgCAgAlLAoYcdwC+RAIAgnQCAL5QAgA+JgCAmREBAJkNAQCPrAIAggQCAI1oAQCewQIAi3wBAJ49AQCeKQEAvggCAJfQAgCZXQEAldACAJ5VAQCT0AIAmXUBAJHQAgC+SAIAn7gCAEYmAICdtAIAnk0BAJuwAgCZXQEAmbQCAL6EAgCeqQEApowCAGImAICkgAIAmakBAGomAIChSAIAgqwCAK/kAgCCtAIAglwCAJnlAQC+CAIAgnwCAIIABACopAIAnvkBAL5wAgC1HAQAnoUBAL6oBQCyhAIAtrECAL6sBQC4KQkAuYkCALqZAgCCjAUAu+gEAIKcBQByJgCAuPAEAJ5ZBgCZbQYAnmEGAJl5BgC+fAIAnmEGAIJcAgC+QAIAmVkGAJ5dBgCCYAIAmaUGAL58AgCevQYAghwCAL4UAgCZzQYAvkwCAIJMAgCa3QYAnt0GAJ/FBgDjDAIAgrwCAJn5BgC+ZAIA7/QCAJrxBgCe6QYAn+kGAJ7ZBgCf1QYA4wQCAJklBgCaIQYAgngCAJk9BgDjBAIAgkQCAJolBgC+cAIA75wCAJ4FBgCfFQYA7+gCAJp1BgCZBQYAggQCAL5wAgDjcAIAnnUGAJ8NBgCeAQYAvnwCAOM0AgCZDQYAvmACAIJsAgDv8AIAmTUGAIKQAwDv2AIAniEGAIQmAICbxQcAmeUHAL58AgCe7QcAn8UHAOPsAwCdUAIAnNEHAIJsAgDv1AIAmc0HAIJ8AgC+cAIAmd0HAJ7dBwC+AAIA42gCAJ6tBwCZuQcA42gCAIJ8AgDjDAIAvkgCAJmpBwCCWAIA78QCAJ6ZBwC+bAIA77gCAIKUAgCejQcA77gCALsAAACZeQcAuQwAAJ5xBwC/AAAAglQCAL0EAAC+aAIAs9QDAJmxBgCxcAMAggQCALc4AACeoQYAtTQAAL5wAgCrWAMAnqEGAO9cAgCZqQYArxADAIJQAgCtFAMAmYUHAJlpBgC+WAIAnmEGAL58AgCCaAIApqACAOOQAgCZaQYA43wBAOOYAQDjrAEA49ABAOPoAQC+dAIAno0FAOMwAgDvzAIAgmgCAJnRBQDvlAIA71QBAO9wAQDvJAEA7ygBAL58AgCevQUA4wwCAIJ4AgCZrQIAvnQCAJ6lAgDjNAIAgmACAJkZAAC+YAIA7/wCAJ4NAACClAIA79QCAJAmAIDj/AIAmQkAAL5gAgCYJgCAnh0AAOMAAgCwJSoAglgCAJkNAADv9AIAvmQCAK4mAIDvwAIAnhkAAIIYAgCCOAIA43ACAJkRAACaNQAAmSkBAL50AgDsJgCAnyUAAJ4JAACZ6QEAvrQDAL7gAwCazQEA79gCAJ4RAQCC2AMA/SYAgIHEAgDjsAMAHycAgOP8AwC+/AIAhMQCAIIoAgCGEAIAKicAgIg8AgCeIQAAnw0AAHonAIDvKAMAj3QCAO8sAwCCiAIAmXUAAJoVAACSxAMAldADAJktAACa0QAAjicAgL7IAgCYaAMAm3wDAILEAwCeQQAAnykAALAnAICChAIA45ACAL4IAwC+JwCABigAgJ8ZAACe7QAA49ACAJlxAACaFQAAvhQCAO8wAgCZIQAA71gCABQoAICv7AMAggQCALFMHACwABwAniUAALJMHACeXQAAn2EAAOO8AgCZIQAA+QAAAHEpAIDvlAIAdSkAgL08HACCgB0Av8EfAHkpAIDjtB0AvnQCAJ71HwDj8B0AmQUAAH0pAIC+fAIAngkAAIJgAgCZDQAAiSkAgL5gAgDvzAIAnh0AAOklAIDv3AIA42gCAPkYAIDjPB0AIRoAgP0YAIABGQCAJRoAgCkaAIAtGgCAMRoAgDUaAIA5GgCA76QCAD0aAIDvJB0AQRoAgLHFAAAFGQCAs8UAALLdAAC1yQAAtMEAALcdAAC2wQAAuWUAALhlAAC7zQAAus0AAL3dAAC83QAAv8UAAL7JAAAJGQCADRkAgE0ZAIBhGQCAERkAgBUZAIDvFHgD7wBIA+HYTQPhOKgC41x5A+O0UAOtGQCAsRkAgLUZAIC5GQCAgMkBAIHVAQCC3QEAg20CAITdAQCFcQIAhgEEAIcdBQCIJQUAiTUFAIo9BQCLbQUAjHUFAI1lBQCObQUAj80BAJC1AQCRvQEAkrUBAJNNAwCUVQMAlV0DAJZVAwCXTQMAmHUDAJl9AwCadQMAm00DAJxVAwCdWQMAnkkDAJ9JAwCguQMAobkDAKLBAwCj3QMApMUDAKXNAwCmxQMAp/0DAKjJAwCpyQMAqtEDAKvRAwCsMQMArTEDAK4xAwCvMQMAsFEDALFRAwCyUQMAs1EDALRxAwC1cQMAtnEDALdxAwC4UQMAuVEDALpRAwC7UQMAvDEDAL0xAwC+MQMAvzEDAL0ZAIDBGQCAxRkAgMkZAIDNGQCA0RkAgNUZAIDZGQCA3RkAgOEZAIDwIAIA5RkAgOkZAIDtGQCA8RkAgPUZAICc9TYAnf02APkZAICRkAIA/RkAgKkZAIBFGQCASRkAgEUaAIC6adgASRoAgE0aAIC4sTYAubE2AFEaAIBVGgCAWRoAgF0aAIBRGQCAYRoAgGUaAIBVGQCAWRkAgF0ZAIBlGQCAaRkAgG0ZAIBxGQCAdRkAgHkZAIB9GQCAgRkAgIUZAICJGQCAjRkAgJEZAICVGQCAglgCAJkZAIBpGgCA8FgCAG0aAICdGQCAoRkAgKUZAIABGgCABRoAgJF0AwDhtDsCCRoAgOPYIgINGgCAERoAgBUaAIAZGgCAHRoAgKUqAIBVLQCAqSoAgMEqAICtKgCAljMAgO/IPwK1KgCA4ZTzAuGY0gLjlPcC4xDGAuGUtgLhkJ0C44SiAuMIhwIZGQCAHRkAgO+4swLvOIsCnSoAgOAtAIDvIJcC7+DgAoLkAgBpLQCACAIAgLrF2QAOAgCAFAIAgBoCAIAgAgCAJgIAgCwCAIAyAgCAOAIAgD4CAIBEAgCASgIAgFACAIDhgHgC8OQGAOMUagKCgAgA4aAPAuEIEwLjhA4C4xgeAlYCAIA0AwCA7zQ7Au8wHwI6AwCAQAMAgO8MEgJGAwCAJRkAgCkZAIBMAwCAUgMAgC0ZAIAxGQCAWAMAgF4DAIB2AwCAggMAgIgDAICOAwCAlAMAgJoDAIB8AwCAZAMAgDUZAIA5GQCAbQMAgFwCAIA9GQCAQRkAgHQCAIBoAgCAvAIAgHoCAICYAgCAYgIAgJICAIBuAgCApAIAgNQCAICAUQYAgV0GAIJVBgCDaQYAhHkGAIV5BgCGaQYAh2kGAIhZBgCJoQcAiqUHAIu9BwCMpQcAja0HAI6lBwDyAgCA7AIAgOACAICSCRQAkxUUAJTxBwCV8QcAlvEHAJfxBwCY0QcAmdEHAJo5FACb0QcAnIEHAJ2BBwCefQcAnx0UAJktAQCYLQEAmz0BAJo9AQCdLQEAnC0BACEZAICeVQEAkd0GAJDRBgCTJQEAkiUBAJUtAQCULQEAlx0BAJYdAQCJ8QYAiOkGAIvxBgCK+QYAjbEGAIzpBgCPqQYAjrkGAIHxBgCA7QYAg/EGAIL5BgCF0QYAhOkGAIfRBgCG2QYAua0DALitAwC7vQMAur0DAL2tAwC8rQMAv90DAL7dAwCxrQMAsK0DALO9AwCyvQMAta0DALStAwC3nQMAtp0DAKm5AQCosQEAq3UBAKqxAQCtFQEArBUBAK/dAwCu3QMAobkBAKCpAQCjiQEAorEBAKWZAQCkkQEAp4kBAKaRAQAuAwCAwgIAgM4CAIDmAgCA2gIAgAQDAICwAgCA+AIAgCIDAIAKAwCAngIAgIACAIC2AgCAyAIAgP4CAICGAgCAKAMAgKoCAIAQAwCAjAIAgBYDAIAcAwCACS0AgOsuAIDKNACAhAcAgAYFAIAVBQCAJAUAgDMFAIBCBQCASwUAgPAsOABUBQCAXQUAgGYFAICSBQCA40huA5sFAIDhTG4DpAUAgO/0AQOnBQCAqgUAgK0FAIBGOgCApkwAgNZVAIA2aACAZnEAgJZ6AID2jACAVp8AgIaoAIDtugCAJMQAgFTNAICE1gCAtN8AgDG7AIA6rgCABqUAgPkqAICJKwCAoSoAgOUqAIBBMQCAATEAgE40AIDVLACABjMAgIo3AIBiNACAHSwAgJI0AICeMwCAEjgAgFkrAICFLACA+jEAgCY5AIAdKwCArSsAgJ4xAIC8LgCAySwAgFksAIA4LgCALC4AgJGgBgDuMwCAGSsAgJ43AIB1LACAzS0AgLAFAIDh1D8D4VgaA+PcLwPjUA4D4RTyA+FA0wPjQOoD40DDA7MFAIC2BQCA73jrA+9c8gO5BQCA5QUAgO9E3gPvmCUD4bSLA+E8lwPjfKID45iLA+EwQQDhUKwD4xx/AOOIRgDoBQCA6wUAgO84ewDv4EEA7gUAgPEFAIDvzIoD7yCHA4DBGACB3RgAgikLAIMpCwCE6Q4AhekOAIYZDwCH8RgAiCUPAIntGgCK5RsAiyEdAIw5HQCN5RsAjmkQAI/VGgCQhRsAkU0PAJJFDwCTXQ8AlEUPAJVNDwCWRQ8Al30PAJhFDwCZTQ8AmkUPAJtpGwCcQQ8AnUEPAJ5BDwCfQQ8AoMEPAKHBDwCiwQ8Ao8EPAKS5CwCluQsApqkLAKfNDwCo9Q8Aqf0PAKr1DwCrzQ8ArNkPAK3ZDwCuyQ8Ar8kPALC5DwCxuQ8AsmkPALNpDwC0YQ8AtWEPALY5DwC3OQ8AuBEPALkRDwC66QEAu+kBALz5AQC9+QEAvukBAL/pAQD0BQCA9wUAgPoFAID9BQCAAAYAgCAGAIDhBACAgAUAgNMFAIAOBgCANAYAgEsGAIBoBgCAfwYAgJYGAIDdAwCA9gMAgA8EAIASBwCAQQgAgD4IAIA/BwCAOSQAgHIkAICjJACAyCQAgLkmAIDEJgCAyCYAgMwmAIDQJgCALygAgG4oAICWKACAmigAgL8oAIDHKACA4ygAgPUoAID5KACA/SgAgLrp0wAVKQCAMCkAgEspAIA9JACASiQAgFckAIBkJACAdiQAgIMkAICVJACApyQAgLckAIDMJACA1iQAgOQkAIDuJACA+yQAgAwlAIAWJQCAbyUAgHYlAIAkJQCAgBkDAIEZAwCCKQMAgykDAIQ5AwCFOQMAhikDAIcpAwCIGQMAiRkDAIppAwCLaQMAjHkDAI15AwCOaQMAj2kDAJAZAwCRGQMAkgEEAJMtAwCUNQMAlVUGAJZdBgCXVQYAmG0GAJl1BgCafQYAm3UGAJxtBgCdNQYAnj0GAJ81BgCgzQYAodUGAKLdBgCj1QYApPkDAKX5AwCm6QMAp+kDAKjZAwCp+QYAqikGAKspBgCsOQYArTkGAK7FAwCvPQMAsEUDALFNAwCyRQMAs10DALRFAwC1TQMAtkUDALd9AwC4SQMAuUkDALpZAwC7fQYAvGUGAL1tBgC+ZQYAgCUAgKkVDwCoAQ8Aq00PAKpNDwCtRQ8ArEUPAK+hDQCuqQ0AoXULAKBhCwCj7QsAoqkLAKXlCwCk5QsApzkPAKZZCAC5oQ0AuJkNALuhDQC6qQ0AvaENALy5DQAxJQCAvqkNALGhDQCw2Q0As6ENALKpDQC1oQ0AtLkNALehDQC2qQ0AOCUAgEglAIBbJQCAsiUAgLwlAICRJQCAoSUAgNAlAICB7Q0AgO0NAIP9DQCC/Q0Ahe0NAITtDQCH2Q0AhiEYAJlNDQCYTQ0Am1ENAJpdDQCdeQ0AnHUNAJ9pDQCecQ0AkYkNAJCBDQCTmQ0AkoENAJWJDQCUgQ0Al30NAJaBDQDgJACAICUAgI0lAIDMJQCA3iUAgAgmAIAtJgCAQiYAgPAlAID6JQCADCYAgBkmAIAxJgCATiYAgFgmAIB2JgCASiYAgGYmAIBuJgCAgCYAgIwmAICUJgCAoyYAgN4mAICcJgCAsiYAgKcmAIC9JgCA1CYAgOImAIABJwCAEScAgBsnAIBPJwCAkicAgOcnAIBPKQCAXSkAgGEpAIBlKQCA8CYAgC4nAIA+JwCASCcAgCMnAIBTJwCAYycAgH4nAIBwJwCAlicAgMInAIDJJwCApicAgNMnAIDdJwCAtCcAgBgoAIAKKACA6ycAgCUoAIDyJwCA/CcAgDMoAIBAKACASigAgFQoAIBeKACAcigAgH8oAICGKACAnigAgKUoAICyKACAyygAgNUoAIDnKACAASkAgA4pAIAZKQCAIykAgDQpAIA7KQCAUykAgMMDAIDmBACAhQUAgNgFAIATBgCAOQYAgFAGAIBtBgCAhAYAgJsGAIDjAwCA/AMAgBUEAIAoBACAOwQAgE4EAIBhBACAdAQAgIcEAICaBACAAAUAgA8FAIAeBQCALQUAgDwFAIBjCACAJAgAgMEGAID8BwCAHQkAgOMoEwAzCQCAKggAgC0IAIAxCACAJAcAgNwuAIDKMACA2S0AgLswAIBFMQCAJwkAgO/sEwAGCQCA3A0AgM8IAICDCACAMQcAgEwHAID8BgCACggAgJQIAIAqCQCACQkAgOANAIDsDQCA2wgAgJkIAIAVBwCAhggAgFUHAID/BgCApgcAgJEkAIDwDQCA4ggAgCcIAICcCACAWAgAgBUJAID0DQCA5QgAgBQIAICfCACA6AgAgBcIAIDJCACAoggAgOwIAIAbCACAzAgAgKYIAID3CACA/QgAgIgHAICKCACAWQcAgAMHAIA9CQCAQQkAgEkJAIA2CQCAGAkAgPgNAID0CACALQkAgAwJAIDkDQCA0ggAgI4IAIBdBwCAMAkAgA8JAIDoDQCA1QgAgJEIAIBgBwCArQgAgGMHAIDjSBIA4xQSAOP4EwDjuBMA4+wSAOOgEgDjbBIA43gSAO/ADQDv2A0A73QSAO9QEgDvqBIA79wSAO8oEwDvIBMA6QcAgMwGAIAOCACAEQgAgNgGAIDUBgCAIQgAgAcHAIBnCACADAcAgHYIAIA0BwCANwcAgKoIAIC2CACAuQgAgOPYEADjoBAA46AQAON0EQDjNBAA4wgQAOPkEADj9BAA77wQAO/gEADvzBAA7zgQAO8QEADvcBAA73AQAO9MEADjhBMA4+gTAOMwEADjEBAA42ATAONAEwDjpBMA47QTAO/IEwDvtBMA75gTAO98EwDvXBMA70wTAO8UEwDv6BAAgO08AIH1PACC/TwAg/U8AITtPACFFT0Ahh09AIcVPQCILT0AiTU9AIo9PQCLNT0AjC09AI0VPQCOHT0AjxU9AJBtPQCRdT0Akn09AJN1PQCUbT0AlRU9AJYdPQCXFT0AmC09AJk1PQCaPT0AmzU9AJwtPQCdFT0Anh09AJ8VPQCg7T0AofU9AKL9PQCj9T0ApO09AKUVPQCmHT0ApxU9AKgtPQCpNT0Aqj09AKs1PQCsLT0ArRU9AK4dPQCvFT0AsG09ALF1PQCyfT0As3U9ALRtPQC1FT0AthE9ALcRPQC4MT0AuTE9ALoxPQC7MT0AvBE9AL0RPQC+ET0AvxE9AIDxPACB/TwAgvU8AIMNPwCEFT8AhR0/AIYVPwCHDT8AiDU/AIk9PwCKNT8Aiw0/AIwVPwCNHT8AjhU/AI8NPwCQdT8AkX0/AJJ1PwCTDT8AlBU/AJUZPwCWCT8Alwk/AJg5PwCZOT8Amgk/AJsJPwCcGT8AnRk/AJ4JPwCfCT8AoPk/AKH5PwCiCT8Aowk/AKQZPwClGT8Apgk/AKcJPwCoOT8AqTk/AKoJPwCrCT8ArBk/AK0ZPwCuCT8Arwk/ALB5PwCxeT8Asgk/ALMJPwC0GT8AtRk/ALYJPwC3CT8AuDk/ALk5PwC6CT8Auwk/ALwZPwC9GT8Avgk/AL8JPwCA+TwAgfk8AIJJPQCDST0AhFk9AIVZPQCGST0Ah0k9AIh5PQCJeT0Aikk9AItJPQCMWT0AjVk9AI5JPQCPST0AkDk9AJE5PQCSAQQAk00GAJRVBgCVXQYAllUGAJdNBgCYdQYAmX0GAJp1BgCbTQYAnFUGAJ1dBgCeVQYAn00GAKC1BgChvQYAorUGAKPNBgCk1QYApd0GAKbVBgCnzQYAqPUGAKn9BgCq9QYAq80GAKzVBgCt3QYArtUGAK/NBgCwtQYAsb0GALK1BgCzTQYAtFUGALVdBgC2VQYAt00GALh1BgC5fQYAunUGALtNBgC8VQYAvV0GAL5VBgC/TQYArH0/AK2lPwCurT8Ar6U/AKh9PwCpZT8Aqm0/AKtlPwCkHT8ApUU/AKZNPwCnRT8AoB0/AKEFPwCiDT8AowU/ALydPwC9pT8Avq0/AL+lPwC4nT8AuYU/ALqNPwC7hT8AtN0/ALWlPwC2rT8At6U/ALDdPwCxxT8Ass0/ALPFPwCMZToAjW06AI5lOgCPfToAiEU6AIlNOgCKRToAi306AIRlOgCFbToAhmU6AId9OgCABToAgQ06AIIFOgCDfToAnF04AJ3lPwCe7T8An+U/AJhdOACZRTgAmk04AJtFOACUuTgAlWU4AJZtOACXZTgAkAU6AJENOgCSBToAkwE5AMAIAIDYCACA3ggAgPAIAIB2BwCAIgkAgHkHAICBBwCAVAkAgJ0HAIDLBwCAvQcAgMQGAIDcBACAewUAgM4FAIAJBgCALwYAgEYGAIBjBgCAegYAgJEGAIDXAwCA8AMAgAkEAIAiBACANQQAgEgEAIBbBACAbgQAgIEEAICUBACA+gQAgAkFAIAYBQCAJwUAgDYFAIBFBQCATgUAgFcFAIBgBQCAaQUAgJUFAICeBQCAXQgAgFYOAIBZDgCAOjoAgKwKAIAVCwCANjoAgD46AICcGQAAnRkAAJ45AACfOQAA4wwAgEI6AIB6NwCA8TAAgKI3AIBaMgCAxSoAgLksAICaMDUA7C0AgB0tAIDoLQCA1y8AgJ+ENQDSMwCAnUQpAGI1AICaNgCA1jYAgAo3AIAeOACAdjEAgAIyAICuMgCARjMAgGI2AIBGOACAcjkAgOkqAICNLACAijEAgNIyAICWNgCAwjkAgJQuAIB6MgCAhjYAgBo3AIALMACAvjUAgLSAGgC1hBkAtojmALeM5ACwABwAsZQeALIAGACznBsAvADsAL2k7wC+qO4Av6TtALgA4AC5tOMAurjiALu84QCkwAAApQAMAKbIDgCnAAgA4jYAgAcvAIAFMQCArXwDAKwAEACt5BMArugSAK9gEQCo8AoAqRwJAKr4FgCr/BQAGjIAgB4zAIAqOACAKSsAgMErAIAtLACAczAAgIIxAIDOMgCA8jMAgI42AICmNgCAyjcAgO44AICiOQCAvjkAgC40AIBuNACAvAgAgCY1AIBGNgCAejgAgE43AIChLQCAIy8AgN40AICeNQCAAjMAgDY0AICaNwCA5jgAgJ0tAIBwLgCAejEAgC4yAIBiMgCAFjUAgD41AICmOACAKSwAgJwAAACqNQCAzSsAgMkrAICaNACAKjUAgF42AICuOACAajcAgA8wAIBaNwCA0SoAgEQuAIB7LwCAMjMAgLIzAIBNLACAPjQAgDkrAIBfLwCAsSoAgO4xAICLMACAEjUAgIDpAwCB6QMAgjkvAIP9AwCE5QMAhe0DAIblAwCHfS4AiEEuAIkhAgCKeS8AiyUCAIw9AgCNJQIAjiECAI8dAgCQZQIAkW0CAJJlAgCTfQIAlGUCAJVtAgCWZQIAlx0CAJglAgCZLQIAmiUCAJs9AgCcJQIAnS0CAJ4lAgCfHQIAoOUCAKHtAgCi5QIAo/0CAKTlAgCl7QIApuUCAKdNAgCodQIAqX0CAKqpAQCrqQEArLkBAK25AQCuqQEAr6kBALDZAQCx2QEAsukBALPpAQC0eSIAtf0BALb1AQC37QEAuNUBALndAQC61QEAu60BALy1AQC9uQEAvqkBAL+pAQChLACAjS0AgP4zAIBmNgCAPjcAgLoxAIDmMQCAHzAAgB42AIA/MACArjMAgAUrAICBKwCAxSsAgFYxAID+NACA9jUAgEo3AIBaOACANSwAgOksAIAXLwCApzAAgH4yAIBCNACAljgAgHo5AIDOOQCA5jkAgOkwAICmMQCA7jcAgOMuAIC/LwCA2y8AgGswAIBuMgCAujIAgGozAICONACAMjUAgJY1AIDeNwCAbjYAgAY4AIB+OACA6SsAgBUsAID9LACAqjIAgPY2AIADLwCAcy8AgDcwAICyMQCA2jQAgCYzAIAVKwCAWS0AgKguAIB/LwCAQjMAgF4zAIBuNQCAgFEBAIEBKgCCXQEAg1UBAIRNAQCFdQEAhn0BAId1AQCITQEAiVUBAIqdKwCLWQEAjEkBAI1JAQCOuQEAj7kBAJDJAQCRyQEAktkBAJPZAQCUyQEAlckBAJb5AQCX+QEAmMkBAJnJAQCa2QEAm9kBAJzJAQCdyQEAnrkBAJ+5AQCgSQEAoZUBAKJFAQCjXQEApEUBAKVNAQCmRQEAp30BAKhFAQCpTQEAqnkPAKtBAQCsQQEArUEBAK5BAQCvQQEAsMEDALHBAwCywQMAs8EDALTBAwC1wQMAtsEDALfBAwC4wQMAucEDALrBAwC7wQMAvMEDAL3BAwC+wQMAv8kMAI41AIBiOACA4jgAgPI4AIAuOQCALSsAgII0AIBOOACAyjgAgJcvAIDxKgCAUSsAgEguAIBoLgCAlzAAgMYyAIDOMwCAejYAgBo4AIDZMACAojgAgA0sAIAlMQCAMTEAgBIyAIBKMgCATjMAgKozAIAqNACADjUAgDo5AIDrLwCAsjgAgEErAICMLgCAMjIAgOI3AIBPLwCAny8AgDkxAIC6OACA8SsAgNksAIB4LgCAwjAAgBUxAIBiMQCA9jEAgEozAIC+MwCAWjUAgPo2AIAGNwCA1jgAgF0sAIBOMgCA3SwAgMoyAIBuMwCAijYAgL44AICqOQCA0jkAgC0xAICxOSMAsBEDALMVAwCyFQMAtTUDALQ1AwC3NQMAtjUDALkVAwC4FQMAuxUDALoVAwC9dQMAvHUDAL91AwC+dQMAoZkNAKCRDQCjqQ0AopENAKW5DQCksQ0Ap6kNAKaxDQCpmQ0AqJENAKtpAwCqkQ0ArXkDAKxxAwCvaQMArnEDAJEZDQCQEQ0Aky0NAJIRDQCVPQ0AlD0NAJctDQCWLQ0AmR0NAJgdDQCbbQ0Amm0NAJ15DQCcgQ4An2kNAJ5xDQCBmQ0AgAkjAIOpDQCCkQ0AhbkNAISxDQCHqQ0AhrENAImZDQCIkQ0Ai2kNAIqRDQCNeQ0AjHENAI9pDQCOcQ0AKjIAgMY1AIDGNACA6jQAgBozAICiMgCAZjcAgA0rAIAuNgCA9SsAgOUrAIDzLgCAEzAAgPY0AIA0LgCABjIAgOUwAIDqNwCAqjgAgA8vAIBhKwCANS0AgIktAIDVMACA0SsAgCIzAIDmMwCASjQAgGY0AIBqNACAfjQAgPo4AIDuNACAkjYAgFY3AIAKOACANjgAgE45AIBSOQCAVjkAgLo5AIAuOACAxjgAgDErAIBVKwCAaSsAgCUsAIAxLACAcSwAgCUtAIBBLQCASS0AgIUtAICRLQCAdC4AgIsvAICzLwCAuy8AgJH4EADTLwCAfzAAgK8wAIDdMACAWjEAgIApAQCBKQEAgjkBAIM5AQCEKQEAhSkBAIZZAQCHWQEAiNkoAIltAQCKKSUAi2EBAIxhAQCNYQEAHjIAgDoyAICQGQEAajIAgJIVAQC+MgCA3jIAgJU1AQCWPQEAlzUBAJgNAQCZFQEAmh0BAJsVAQCcDQEAnfUBAJ7dKABSMwCAoAUBADI0AICiAQEAVjQAgFI0AIClGQEApgkBAFo0AIBeNACAdjQAgKo9AQCrNQEArC0BAK0VAQCuHQEArxUBALBtAQCxdQEAsn0BALN1AQC0bQEAtRUBALYdAQC3FQEAuC0BALk1AQC6PQEAuzUBALzZLgC9KQEAvhkBAL8ZAQC6eR4Au3keALjNAgC5eR4AvpUeAL+dHgC8QQIAvZ0eALJ9HgCzRR4AsH0eALF1HgC2XR4At0UeALRdHgC1VR4AqgUeAKsNHgCodR4AqQ0eAHo0AICeNACArBUeAK0NHgCiSR4Ao0keAKBJHgChSR4ApkkeAKf5AgCkSR4ApUkeAJqNHgCblR4AmI0eAJmFHgCeiR4An4keAJyNHgCdhR4AkgUDAJP1AACQCQMAkY05AJaxHgCXFQYAlO0AAJUBHACKvQMAi0EDAIiFAwCJnQMAjkEDAI9JAwCMyTkAjVEDAIIVAgCDHQIAgAUCAIEdAgCGzQMAh7EDAIQFAgCFxQMAs/kFALLxBQCx+QUAsOEFALeZKgC2EQMAtRkDALThBQC7NQMAujUDALklAwC4JQMAvxUDAL4VAwC9JQMAvCUDAKP9BQCi/QUAof0FAKD9BQCnnQUApp0FAKWdBQCknQUAq7kFAKqxBQCpJScAqL0FAK+ZBQCukQUArZkFAKyhBQCTAQUAkvkFAJF1OQCQ9QUAlwEFAJYZBQCVEQUAlBkFAJt5CQCaOQUAmTEFAJg5BQCfHQUAnh0FAJ0dBQCcHQUAg4kFAIKBBQCBiQUAgPEFAIeFBQCGhQUAhZUFAISBJgCLhQUAioUFAIm1BQCItQUAj4UFAI6FBQCNlQUAjJUFAM40AIA6NQCAQjUAgFY1AIB+NQCAzjUAgAI2AIBqNgCAEjcAgCo3AIBeNwCAYjcAgKY3AICqNwCAAjgAgNo4AIAeOQCANjkAgIMvAICQ6gCA5jUAgLkqAIC9KwCAfSsAgCUrAIBlKwCAkSsAgCEsAIA9LACAES0AgCEtAIA9LQCAmS0AgOQtAIDwLQCADC4AgBwuAIALLwCAEy8AgEMvAIBjLwCAky8AgKsvAICbLwCAry8AgO8vAIBHMACAUzAAgFswAICDMACACTEAgB0xAIBeMgCAVjIAgIYyAIAWNACA4jIAgBYzAIBiMwCAfjMAgKIzAIDGMwCAyjMAgOozAICAjQEAgZUBAIKdAQCDlQEAhI0BAIW1AQCGvQEAh7UBAIiNAQCJwR0AipkBAIvBHQCMhQEAjY0BAI6FAQCP/QEAkIUBAJEZHQCSkRQAk4UBAJSdAQCViTIAlk0ZAJc9GwCYsQEAmbEBAJotHACbtQEAnD0cAJ2pAQCemQEAn5kBAKDlHQChbQEAomUBAKN9AQCkZQEApW0BAKbxHQCnYQEAqKEDAKmhAwCqoQMAq6EDAKyhAwCttQEArq0DAK+lAwCwYRkAsdkDALLZAQCz7QMAtPUDALX9AwC29QMAt+0DALjFAQC50QMAumEdALvVAwC82QEAvT0XAL7FAwC/0QEA+jMAgA40AIAKNACAOjQAgLY0AIDmNACAHjUAgE41AIAyNgCAWjYAgM42AIAWNwCAIjcAgEI3AIBGNwCAUjcAgG43AIDmNwCAFjgAgEo4AIBqOACAtjgAgA45AIAqOQCAijkAgCfqAIAi6gCAVOoAgOEpAIAJKgCADSoAgNbqAIAD6wCAe+sAgBY6AIAmOgCARwgAgFIIAIBVCACASggAgE4IAIBXCQCA8Q4AgOIOAIDnDgCA9g4AgOwOAICyNACASw8AgMoPAICBDwCALw8AgFoPAIBnDwCAbw8AgJ0PAIDCDwCAuA8AgL0PAICqDwCAsQ8AgP4OAIADDwCACA8AgIBBAQCBMQMAgk0BAINFAQCEXQEAhUUBAIZNAQCHIQMAiF0fAIl9AQCKaQMAi3EBAIx1AwCNVQEAjlk6AI9ZAQCQKQEAkSkBAJI5AQCTOQEAlCkBAJUpAQCW2QEAl9kBAJjpAQCZ6QEAFQ8AgCIPAIAqDwCAMg8AgDwPAIBBDwCARg8AgFAPAIBVDwCAXQ8AgGoPAIByDwCAdw8AgHwPAICEDwCAiQ8AgJMPAICYDwCAoA8AgKUPAIDFDwCANw8AgBoPAIBiDwCAjg8AgA0PAIDdFgCA5hYAgOkWAIDvFgCA4xYAgOwWAIDgFgCAExcAgBYXAID1FgCA8hYAgPgWAICAmQcAgZkHAPsWAICDrQcAhLUHAAQXAICGsQcAh7EHAIiRBwCJkQcAipEHAIuRBwCM8QcAjfEHAI7xBwCP8QcAkJEHAJGVBwCSnQcAk5kHAJSFBwCVgQcAloEHAJeFBwCYuQcAmb0HAJq1BwCbsQcAnK0HAJ2pBwCemQcAn50HAKBhBwChZQcAom0HAKNpBwCkdQcApXEHAKZxBwCndQcAqEkHAKlNBwCqRQcAq0EHAKxdBwCtWQcArkkHAK9NBwCwMQcAsTUHALI9BwCzOQcAtCUHALUhBwC2IQcAtyUHALgZBwC5HQcAuhUHALsRBwC8DQcAvQkHAL7xAAC/9QAAgAkBAIENAQCCHQEAgxkBAITZAACF3QAAhtUAAIfRAACI8QAAifUAAIr9AACL+QAAjOkAAI3tAACO5QAAj+EAAJCdAACRmQAAkq0AAJOpAACUtQAAlbEAAJaxAACXtQAAmIkAAJmNAACahQAAm4EAAJydAACdmQAAnokAAJ+NAACgdQAAoXEAAKJ9AACjeQAApGlQAqVtUAKmYQAAp2UAAKhZAACpXQAAqlUAAKtRAACsTQAArUkAAK49AwCvOQMAsClQArEtUAIBFwCABxcAgP4WAIANFwCAChcAgBkXAIDZXFICHxcAgCUXAIAiFwCAKBcAgCsXAIA0FwCALhcAgKOhAACipQAAoZEAAKCVAACntQAAprEAAKW9AACkuQAAq40AAKqJAACpgQAAqIUAAK+FAACugQAArYkAAKyNAACz/QAAsvkAALHxAACw9QAAt5kAALadAAC1nQAAtJkAALutAAC6qQAAuaUAALilAAC/ZQEAvmEBAL1tAQC8aQEAHBcAgFcXAIBAFwCAPRcAgEgXAIBOFwCAOhcAgNksUQJLFwCAVBcAgHkWAIDhDwCAMRAAgA4QAIAiEACAHRAAgJNBAAAnEACALBAAgBMQAICXWQAAllUAAJVZAACUXQAAm3EAAJppAACZZQAAmGUAAJ9lAACeYQAAnTFTApxtAAC4gQQAuYEEALqBBAC7gQQAvIEEAFEXAIC+jQQA5g8AgLDdBQCxTQQAskUEALNdBAC0RQQAtU0EALZFBADrDwCAqKEFAKntQQCqrQUAq6UFAKy9BQCtpQUArq0FAK+lBQCgqQUAoZFBAKKpQACjoQUApKEFAKWhBQCmoQUAp6EFAP8PAIAYEACAWBAAgF0QAIBpEACAnVUFAH8QAICfWQUAjhAAgJMQAICeEACAkwUFAJQdBQCVBQUAlg0FAJcFBQC4EACAyxAAgO8QAIAhEQCAJhEAgC4RAIA9EQCATBEAgIBxBQCBcQUAgnEFAINxBQCEUQUAhVEFAIZdBQBREQCAWREAgHwRAICjEQCArxEAgM8RAIDUEQCA2REAgBMSAIAmEgCAMhIAgEoSAIDEEgCAGhMAgDMTAIA4EwCASxMAgFwTAIBuEwCAcxMAgJoTAICiEwCAtxMAgN4TAIDjEwCAPRQAgEIUAIBHFACAUxQAgF8UAIBkFACAbBQAgHgUAICSFACAlxQAgJ8UAICkFACAqRQAgK4UAICzFACAuBQAgMsUAIDQFACA7BQAgAYVAIAgFQCALBUAgEQVAIBJFQCAVhUAgHcVAICaFQCAtBUAgMAVAIDFFQCAzRUAgO4VAIAIFgCAFxYAgDQWAIA5FgCAQRYAgEYWAIBZFgCAXhYAgICtAQCBtQEAgr0BAIO1AQCErQEAhdUBAIbdAQCH1QEAiO0BAIn1AQCK/QEAi/UBAIztAQCN1QEAjt0BAI/VAQCQrQEAkbUBAJK9AQCTtQEAlK0BAJVVAwCWXQMAl1UDAJhtAwCZdQMAmn0DAJt1AwCcbQMAnVUDAJ5dAwCfVQMAoK0DAKG1AwCivQMAo7UDAKStAwCl1QMAphkOAKfZAwCobQ8AqSEOAKrhAwCr4QMArCkOAK3lAwCuGQ4ArxkOALCVAwCxnQMAsgEOALORAwC0HQ4AtQUOALa5AwC3uQMAuDkOALmNAwC6NQ4AuxEOALyBAQC9gQEAvnkBAL95AQCEFgCAkBYAgJwWAICrFgCAyBYAgM0WAIDuEQCA/xEAgHwWAICBAACAiwAAgJUAAICfAACAqQAAgLMAAID1DwCA+g8AgAQQAIB1EACAehAAgIQQAIDlEACA6hAAgBcRAIAzEQCAOBEAgEIRAIBRFQCADRYAgBIWAIAqFgCAoRYAgKYWAIC+FgCA8A8AgAkQAICJEACAHBEAgNcSAIA/FQCALxYAgGMWAIDDFgCARxEAgGQSAICfEgCAshIAgBEUAIAdFACAKRQAgI0TAICSEwCA0RMAgNYTAID9EwCAAhQAgGkSAIBuEgCAtxIAgLwSAIDCEQCAxxEAgJYRAICbEQCApD0DAKVFAwCmTQMAp0UDAKA9AwChJQMAoi0DAKMlAwCsfQMArUUDAK5NAwCvRQMAqH0DAKllAwCqbQMAq2UDALQ9AwC1xQMAts0DALfFAwCwPQMAsSUDALItAwCzJQMAvP0DAL3FAwC+zQMAv8UDALj9AwC55QMAuu0DALvlAwCEBQwAhQ0MAIYFDACHHQwAgI0MAIGpDACCGQwAg1ENAIxhDACNYQwAjmEMAI9hDACIKQwAiRUMAIodDACLFQwAlD0MAJXFAwCWzQMAl8UDAJABDACRAQwAkgEMAJMBDACc/QMAncUDAJ7NAwCfxQMAmP0DAJnlAwCa7QMAm+UDAIBpBACBaQQAgnEEAINxBACEnQQAhYUEAIaNBACHhQQAiL0EAImNBACKhQQAi50EAIyFBACNqQYAjvkEAI/5BACQiQQAkYkEAJKRBACTkQQAlLEEAJWxBACW+QYAl60EAJiVBACZwQYAmmkGAJtpBgCceQYAnXkGAJ7RBgCf/QsAoA0GAKEdCwCiGQYAo0ULAKQFBgClTQsApjUGAKe1BACoEQYAqREGAKoRBgCrNQQArC0EAK0BBACuXQQArx0GALDNBgCxbQYAsnUGALMNBgC0FQYAtR0GALYVBgC3DQYAuDUGALk9BgC6NQYAuw0GALwVBgC9HQYAvhUGAL8NBgCA9QcAgf0HAIL1BwCD9QAAhO0AAIURAwCGEQMAhxEDAIgxAwCJMQMAijEDAIsxAwCMhQcAjRUDAI4dAwCPFQMAkG0DAJGNBwCShQcAk50HAJSFBwCVjQcAloUHAJe9BwCYhQcAmY0HAJqFBwCbnQcAnIUHAJ2NBwCehQcAn4UAAKB9AAChgQMAooEDAKOBAwCkgQMApYEDAKaBAwCngQMAqBUHAKmFAwCqjQMAq4UDAKydAwCtoQMArqEDAK+hAwCwdQcAsXUHALJxBwCzhQUAtM0FALX1BQC2/QUAt8kDALj5AwC5+QMAuqEFALuhBQC8wQMAvcUDAN4RAIDjEQCAhJz7ACYTAIArEwCAYRMAgGYTAIB2EgCAghIAgJUSAICaEgCARRIAgNwSAIBXEwCASxAAgKMQAIC9EACAxBAAgJB1AACRfQAAknEAAJNxAACUAfwAlVX+AJZd/gCXVf4AmG3+AJlp/gCaef4Am3n+AJxp/gCdaf4Anln+AJ9Z/gCgpf4Aoa3+AKKl/gCjof4ApKH+AKWl/gCmrf4Ap6X+AKiZ/gCpmf4Aqun+AKvt/gCs9f4ArfH+AK7x/gCv8f4AsI3+ALGV/gCymf4As5n+ALSJ/gC1if4Atrn+ALe9/gC4hf4AuY3+ALqF/gC7nf4AvIX+AL2B/gC+gf4Av4H+AKbZCACnBQcApMEIAKWZBQCi0QgAo9EIAKCJBQChtQgArgEHAK8BBwCsMQcArTEHAKo9BwCrJQcAqD0HAKk1BwC2fQcAtwUHALR9BwC1dQcAsskFALNlBwCwcQcAsXEHAL4BBwC/AQcAvDEHAL0xBwC6IQcAuyEHALg9BwC5MQcAhjkHAIc5BwCELQcAhTkHAIINBwCDNQcAgBEHAIEFBwCOSQcAj0kHAIxNBwCN1QUAisEFAIvBBQCI1QUAiXEHAJbVBQCX2QgAlE0FAJXdBQCSUQUAk9kFAJD5BQCRoQUAnnEIAJ99CACcYQgAnWEIAJpxCACbeQUAmMUIAJl1BQD0EACA+xAAgAIRAICBEQCAuxEAgLQRAIArEgCAGBIAgB8SAIBWEgCATxIAgF0SAIDJEgCAHxMAgIcSAIB7EgCApBIAgKsSAIA9EwCAUBMAgHgTAIB/EwCAhhMAgKcTAIC8EwCAwxMAgOgTAID2EwCA7xMAgEwUAIB9FACAhBQAgAsVAIAZFQCAEhUAgPEUAIAlFQCAMRUAgHwVAICDFQCAkxUAgFsVAIBpFQCAnxUAgKYVAIBiFQCASxYAgFIWAIDzFQCA+hUAgNkVAIDgFQCAIxYAgBwWAICwFgCAbhAAgLEQAICqEACA3hAAgNcQAIAQEQCACREAgI8RAIBeEQCAgIEBAIGBAQCCgQEAg4EBAISdAQCFhQEAhokBAIeJAQCItQEAib0BAIq1AQCLjQEAjJUBAI2dAQCOlQEAj40BAIgRAIA3EgCAkv0BAJP1AQCU7QEAlZUBAJadAQCXlQEAmKkBAJmpAQCauQEAm7kBAJypAQCdrQEAnqUBAJ+dAQCgZQEAoW0BAKJlAQCjfQEApGUBAKVtAQCmZQEAp90AAKjlAACppQMAqq0DAKulAwCsvQMAraUDAK6tAwCvpQMAsN0DALHlAwCy7QMAs+UDALSpAQC1VQEAtvUDALftAwC41QMAud0DALrVAwC7rQMAvM0DAL3BAwC+vQMAv7UDANASAICOEgCARBMAgP8UAIA4FQCAlRYAgIkWAIC3FgCAuRUAgIsUAIABFgCAyhMAgMQUAIDSFQCArRUAgPgUAIC9FACAZREAgKgRAIBwFQCA0BAAgFgUAIBiEACAPhIAgOcVAIATEwCAcRQAgEIQAIA5EACAihUAgOESAID2EQCArhMAgGsWAIDqEgCA8RIAgGwRAIAEEgCApgMAgA0jAIARIwCAoAYAgMcAAIC1BgCAqyMAgK8jAIC5IQCAtSEAgOMHAIB7CQCAfwkAgEEjAICnIwCANSMAgDkjAIAdIwCAISMAgCUjAIApIwCALSMAgDEjAIDbBwCA3wcAgNEAAICATQEAgVEBAIJRAQCDTQEAhE0DAIUhAwCGRQEAh30BANcAAICiAwCAqAMAgN0HAIDTAACA1QAAgL0GAIB5AACABxQAgH0AAICHAACAkQAAgAwUAICbAACAGBQAgKUAAIAkFACArwAAgDAUAIC5AACANRQAgM8PAIBVEACAmBAAgJsQAIArEQCAVhEAgKARAIDMEQCA6BEAgOsRAIDzEQCADRIAgBASAIBzEgCAwRIAgDATAIBrEwCAlxMAgJ8TAICwpQEAsa0BALKlAQCzvQEAtKUBALWtAQC2pQEAt10BALhlAQC5bQEAumUBALt9AQC8ZQEA2xMAgDoUAIBpFACAgAW5AIHhBgCC4QYAg+EGAIThBgCoBgCAswYAgIfpBgCI2QYAifmxAIr1sQCL8bEAjO2xAI31BgCO+QYAj/0GAJDZBgCR2QYAkvWxAJwUAICUiZIClfEGAJb1BgCX9QYAmNkGAJnVsgCa3bIAm6kGAJy5BgCduQYAnqkGAJ+BBgCgoQcAoaEHAKIhsgCjpQcApIUAAKWNAACmQbMA1RQAgKiNBwCplQcAqp0HAKuVBwBOFQCAyhUAgDYQAIA+FgCAsP0HALGFBwCyjQcAaBYAgLSZBwCBFgCAtpUHALeNBwC4tQcAub0HALq1BwC7jQcAvJUHAL2dBwC+lQcAv40HAIB1BgCBlaACgpmgAoOZoAKEhaAChb2gAoaxoAKHhaACiLmgAomRoAKKnaACi5mgAoyFoAKNjQEAjoEBAI9FBgCQOQYAkT0GAJIxBgCTMQYAlC0GAJXVBgCW2QYAl90GAJjhBgCZ4QYAmu0GAJvpBgCc9QYAnf0GAJ7xBgCf9QYAoAkGAKEJBgCiBQYAowEGAKQdBgClBQYApgkGAKcNBgCoMQYAqTEGAKo9BgCrNQYArCkGAK0pBgCuJQYArx0GALBhBgCxYQYAsm0GALNpBgC0dQYAtX0GALZxBgC3dQYAuEkGALlJBgC6RQYAu0EGALxdBgC9RQYAvkkGAL9NBgCAsQUAgbEFAIK9BQCDuQUAhKUFAIWtBQCGoQUAh6UFAIiZBQCJmQUAipUFAIuRBQCMjQUAjcEFAI7NBQCPyQUAkLUFAJG9BQCSsQUAk7UFAJSpBQCVqQUAlqUFAJehBQCYnQUAmSkCAJolAgCbIQIAnD0CAJ3pAgCe5QIAn+ECAKAdAgChNQIAojkCAKM9AgCkIQIApSECAKYtAgCnKQIAqBUCAKkZAgCqFQIAqxECAKwNAgCteQIArnUCAK8V8ACwafAAsRECALIdAgCzGQIAtAUCALUhAAC2LQAAtyUAALgZAAC54QEAuu0BALvlAQC8+QEA2BQAgN0UAIC/9YYCp2kNAOIUAIDnFACAzwAAgNkAAICzAwCA4QcAgH0JAID7IgCAzNSFAszghQL/IgCAgSkAgDUkAIBuJACAjSQAgLyZBQC9mQUAvqkFAL+ZvAC4mQUAuZkFALqJBQC7iQUAtKEFALXVsQC23bEAt6kFALCxsgCxzQUAssUFALO9BQCfJACAxCQAgMMoAIDfKACA8SgAgIgmAICFKQCAaSkAgCkkAIAtJACA2WSgAoEJAIDZUKAChAkAgI0JAICKCQCAhwkAgOwhAIDvIgCA9CEAgJhlBQCZEbIA/CEAgNkwoAKUOZEClU0FAJZFBQCXXQUAkGkFAJFpBQCSWQUAk1kFAID9vACB1ZwCgmW8AIPFvACEkbwAhZ28AIalvACHjbwAiK2TAonlvACKKZACi7W8AIwRkAKNlbwAji2wAI/FnAKQ6bwAkcHIAJJBkAKT8Z0ClNW8AJXlvACW4bwAl02QAphlkAKZfZACmrm8AJupCgCcbQ8Anb0KAPMiAICfXQ8AoK0PAKElCgCibQoAo2UKAKQNCgClpQ8ApgXUAKepDwComQ8AqZkPAKopDwCrKQ8ArDkPAK05DwCuKQ8ArykPALBZDwCxndEAspXRALOF1gC0sdEAtbHRALbZ1AC32dQAuOnUALnp1AC6+dQAu/nUALzp1AC96dQAvrnUAL+51ACASdUAgUnVAIJZ1QCDWdUAhEnVAIV90ACGddAAh23QAIhV0ACJXdAAinXVAIut1QCMtdUAjb3VAI611QCPQdAAkMHQAJHB0ACSwdAAk8HQAJTB0ACVwdAAlsHQAJfB0ACYwdAAmc3QAJrF0ACb3dAAnOHVAJ3pDgCe2Q4An9kOAKDV2wChwdkAotnZAKPB2QCkxdkApc3ZAKbF2QCnGdkAqGHZAKlh2QCqydkAq8nZAKzZ2QCt2dkArs3ZAK/B2QCwCdkAsRXZALId2QCzrdoAtB3ZALWx2gC2wdwAt93dALjl3QC59d0Auv3dALut3QC8td0AvaXdAL6t3QDwIQCAgvHaAIPx2gD3IgCA5OgAgIYR2ACHEdgAhOHaAIXh2gCKKdgAiynYAK9AEwClKNoAjinYAI8p2ACMKdgAjSnYAJJh2ACTYdgA6egAgO7oAICWZdgAl23YAJR12ACVbdgAml3YAJst2ADz6ACA8FwCALEw3wCR8AIAnCnYALLQAwCiOQ0Ao1GeAqAlDQChOQ0AplUNAIS8AgCkJQ0ApV0NAKptDQCrAQQAqGENAKlRAwCuuQAAp3UAAKxhDQCtxQIA+OgAgIfMAwDwVAIAzFC6AJHYBACb9NsAkRgCAJk02wCddAQAvh0AAJ9gBQCejAUAjOwCAI2sBAD96ACAvfWKAqghvwCpLb8Aqi2/AKs9vwCsKb8ArVW/AK5RvwCvTb8AoBkIAKGlvQCiIb8AozGzAKQ9vwClJb8Apg2zAKclvwC46bMAuc3LALppswC7uQkAvH0IAL2tCQC+QQwAv50JALA5vwCxhb0Asgm/ALPtywC0Gb8AtQW/ALbtswC3Bb8AiDG9AIkxvQCKrQgAiyW9AIwJCQCNvQgAjiW+AI+JDAAC6QCAgQ0JAIKlDACDUQkAhIEIAIWBCACGmQgAh60MAJhhvQCZYb0Amm0JAJsVnQKcxQ8AnQ28AJ7BDwCfcQkAkBW+AJERnwKSNZ8Ckw2fApQJvgCVCb4AlnG9AJdxvQCCuAQAl6UHALnEAwDwWAIAkUwCAJLIAgCErAQAsD0AAAzpAIAH6QCAvQUAABHpAIDwTAIAuhEAAJEkAgCN5AQAkqwCAJasAgC4uAMAudADAJb4AgCvDQAAFukAgPB4AgCRXAIAlrACAK8FAAAb6QCAIOkAgCnpAIAy6QCAP+kAgIX4AwBM6QCAh4ADAIbAAgBZ6QCAZukAgHPpAICW6QCAuzkAAHzpAICf6QCAiekAgL8dAAC+HQAAvR0AALwhAACVwB0AlMQfAJfIGgCWABgAkSAAAJDUAQCT2B4AkgAcAJ3gEgCcABAAn+gRAJ7sEwCZ8BkAmPQbAJv4FwCaABQAnnEBAJ9xAQCABQAArOkAgM0KAICwDACAXg0AgGQNAIBqDQCAdg0AgHkNAIB8DQCAfw0AgIINAICRDQCAlw0AgJoNAICdDQCAICIAgMcNAIDWDQCA/A0AgP8NAIAODgCAEQ4AgB0OAIAYIgCAMg4AgDUOAIDXFgCAEBcAgNoWAIC4ACwAuYwvALqILgC6AwCAhpwXAMx4vACEmC0AhVwXALcDAIDKAwCAiAAoAIksFADtBACAjAUAgN8FAIAaBgCAQAYAgFcGAIB0BgCAiwYAgDgBAIA8AQCAQAEAgEQBAIBIAQCATAEAgKR9AQBQAQCAonUBAKNlAQCggQEAoYEBALxxugC9kbYAvnG6AL+ltgC48bgAuXW6ALqZzgC7dboAtGG6ALVtugC2eboAt3W6ALAZugCxEboAsgm6ALMFugCsUboArXG2AK5RugCvbboAqNG4AKldugCqRbYAq1G6AKRxlgKlYZYCpnGWAqe9ugCgzZsCofG6AKLJugCjxboAnHmaAp0tugCeDc4An4WWApgJugCZtZYCmjm6AJuJtgCUMboA+CEAgJZpugCXrZYCkHm6AJE1ugCSMboAkwG6AIxJzgCN5bYAjhmaAo+hugCIoboAiUG2AIqhugCLdbYAhAG4AIWFugCGac4Ah4W6AICxugCBvboAgqm6AIOlugCAgbkAgQ27AIIVtwCDAbsAhAG7AIUhtwCGAbsAhz27AIgJuwCJAbsAihm7AIsVuwCMcbsAjX27AI5puwCPZbsAkKG5AJEluwCSyc8AkyW7AJQhuwCVwbcAliG7AJf1twCY6c8AmUW3AJq5mwKbAbsAnLm7AJ31uwCe8bsAn8G7AKARuwChCZQCokm7AKONlwKkCbsApbWXAqY5uwCnibcAqFmbAqkNuwCqLc8Aq6WXAqwNmgKtMbsArgm7AK8FuwCw0ZcCscGXArLRlwKzHbsAtFG5ALXduwC2xbcAt9G7ALjxuwC50bcAuvG7ALvNuwC82bsAvdG7AL7JuwC/xbsAgJmkAIEliAKCqaQAgxmoAFsNAICFvaQAhp3QAIcViAKInYUCiaGkAIqZpACLlaQAjCGIAo0xiAKOIYgCj+2kAJDBpgCRTaQAklWoAJNBpACUQaQAlWGoAJZBpACXfaQAmEmkAJlBpACaWaQAm1WkAJwxpACdPaQAnimkAJ8lpACgYaYAoeWkAKIJ0ACj5aQApOGkAKUBqACm4aQApzWoAKgp0ACphagAqnmEAqvBpACseaQArTWkAK4xpACvAaQAsFGkALFJiwKyCaQAs82IArRJpAC19YgCtnmkALfJqAC4GYQCuU2kALpt0AC75YgCvE2FAr1xpAC+SaQAv0WkAIARiQKBAYkCghGJAoPdpQCEkacAhR2lAFQBAICHEaUAiDGlAIkRqQCKMaUAWAEAgFwBAICNEaUAjgmlAI8FpQCQAaUAkQ2lAJIZpQCTFaUAlLGnAGABAICW2dEAlzWlAJgRpQCZ8akAmhGlAJvFqQCc+dEAZAEAgJ6phQKfEaUAoEmlAKEFpQCiAaUAozGlAKQBpQClGYoCplmlAKediQKoOaUAqYWJAqoJpQCruakArEmFAq0dpQCuPdEAr7WJArB9hAKxQaUAsnmlALN1pQC0wYkCtdGJArbBiQK3DaUAuGGnALntpQBoAQCAu+GlALzhpQC9wakAvuGlAGwBAIC3baYAttWGArUpqgC0hdIAs7mqALJtpgCxjaoAsG2mAL8higK+5aYAvaWJAnABAIC7jaYAdAEAgLm5pgC49aYAeAEAgKZ1pgClbaYAfAEAgIABAICiTaYAhAEAgIgBAICvCaYAruXSAIwBAICsjaQAqymmAKolpgCpMaYAkAEAgJc5pgCWNaYAlQ2mAJQxhwKTmYoCkhHSAJExpgCQZYYCn62mAJ65qgCUAQCAnC2kAJthpgCarYoCmb2KApitigKHfaYAhk2mAIVJpgCEBaYAg72mAIIFhgKB+aoAgFXSAI/1qgCORaYAjcmKAox1pgCL8YoCijWmAIl1iQKIbaYAgCmnAIEhpwCCOacAgzWnAIRRpwCYAQCAhkmnAJwBAIDMSIkCzYiJAoqp0wCLRacAjEGnAI2hqwCOQacAj5WrAJDJ0wBFIwCAkpmHApMhpwCUmacAldWnAJbRpwCX4acAmPGnAJnpiAKaqacAm22LApzppwCdVYsCntmnAJ9pqwCgeYcCoS2nAKIN0wCjhYsCpC2GAqURpwCmKacApyWnAKixiwKpoYsCqrGLAqt9pwCsMaUArb2nAK6lqwCvsacAsNGnALHxqwCy0acAs+2nALT5pwC18acAtumnALflpwC4oacAua2nALq5pwC7tacAvBGlAL2VpwC+edMAv5WnAICRoACBiY8CgsmgAIMNjAKEiaAAhTWMAoa5oACHCawAiNmAAomNoACKrdQAiyWMAoyNgQKNsaAAjomgAI+FoACQUYwCkUGMApJRjAKTnaAAlNGiAJVdoACWRawAl1GgAJhxoACZUawAmnGgAJtNoACcWaAAnVGgAJ5JoACfRaAAoMGgAKHNoACi2aAAo9WgAKRxogCl9aAAphnUAKf1oACo0aAAqTGsAKrRoACrBawArDnUAK2VrACuaYACr9GgALAJoACxRaAAskGgALNxoAC0QaAAtVmPArYZoAC33YwCuHmgALnFjAK6SaAAu/msALwJgAK9XaAAvn3UAL/1jAKAvYACgYGhAIK5oQCDtaEAhAGNAoURjQKGAY0Ch82hAIihowCJLaEAijWtAIshoQCMIaEAjQGtAI4hoQCPHaEAkGmhAJFhoQCSeaEAk3WhAJQRoQCVHaEAlgmhAJcFoQCYgaMAmQWhAJrp1QCbBaEAnAGhAJ3hrQCeAaEAn9WtAKAJ1QChpa0AolmBAqPhoQCkWaEApRWhAKYRoQCnIaEAqDGhAKkpjgKqaaEAq62NAqwpoQCtlY0CrhmhAK+prQCwOYECsW2hALJN1QCzxY0CtG2AArVRoQC2aaEAt2WhALjxjQK54Y0CuvGNArs9oQC8caMAvf2hAL7lrQC/8aEAs2miALKF1gCxaaIAsO2gALe5rgC2baIAtY2uALRtogC7TaIAuvWCArkJrgC4pdYAv42iAL69ogC9uaIAvPWiAKNNogCiWa4AoUGiAKDNoACncaIApk2iAKVtrgCkTaIAq1miAKpVogCpTaIAqEWiAK8pogCuJaIArTGiAKw9ogCTla4AkiWiAJGpjgKQFaIAl5mOApYR1gCVMaIAlGWCApsZogCaFaIAmS2iAJgRgwKfYaIAnq2OAp29jgKcrY4Cg2muAIK9ogCBXa4AgL2iAIe9ogCGBYIChfmuAIRV1gCLXaIAim2iAIlpogCIJaIAj/GOAo41ogCNdY0CjG2iAIARowCBMa8AghGjAIMtowCEOaMAhTGjAIYpowCHJaMAiGGjAIltowCKeaMAi3WjAIzRoQCNVaMAjrnXAI9VowCQMaMAkdGvAJIxowCT5a8AlNnXAJV1rwCWiYMClzGjAJipowCZ5aMAmuGjAJvRowCc4aMAnfmMAp65owCffY8CoBmjAKGljwKiKaMAo5mvAKRpgwKlPaMAph3XAKeVjwKoHYICqSGjAKoZowCrFaMArKGPAq2xjwKuoY8Cr22jALBBoQCxzaMAstWvALPBowC0waMAteGvALbBowC3/aMAuMmjALnBowC62aMAu9WjALyxowC9vaMAvqmjAL+lowBnDQCA0QYAgG0NAIDIBwCAcw0AgA8HAICFDQCAlAcAgIsNAICaBwCAuA0AgH0HAIDKDQCAxQcAgAIOAIBPBwCAFA4AgFIHAIAgDgCAkB0AAOEGAIAPJACA4iUAgCguAICtLACAyS0AgKpVAACrKQAAMjcAgAErAIDGMACAsjIAgAEsAIBTLwCAmSsAgJ8wAIDtKwCAGjUAgI43AICtLQCA5SwAgGYyAIADMACALzAAgA44AIAjMACA+y8AgHI0AICAIa4AgaWsAIJJ2ACDpawAhKGsAIVBoACGoawAh3WgAIhp2ACJxaAAiv0AAIsxxgCM7QAAjdEAAI7VAACPyQAAgCmhAIFNFACCIQEAg+G4AoQ5qgCFOaoAhhG9AodRFACIEQEAidW4AorNrQCLLbsCjGEUAI3ZjQKObRQAj2UUAJB5AQCRubgCkkm9ApNFuwKUDRQAlTUUAJYZAQCXqbgCmF2qAJkBFACaIQEAmwUUAJx5vQKdhbgCnnm7Ap+JuAKggb0CoXm4AqKZCQCjlRQApFmuAKWJFACmmQEAp70UAKipAQCpvbsCqrkBAKuJFACsmRQArZkUAK6JFACviRQAsNkBALEJrgCy6QEAs9W7ArTNuwK17RQAtpW8ArfhFAC4oRQAuaEUALrBoQC7pRQAvNkBAL0ZuAK+0aoAv9GqAL9FFwC+RRcAvTUXALxBvwK7KRcAugm4ArkBuAK4PQIAt+2tALY9AgC1HRcAtB0XALMdFwCyHRcAsR0XALAtAgCvWbgCrk0CAK1pFwCsTQIAq00XAKqdrQCpQRcAqE0KAK40AIDRLACApX0XAKR9FwCjoa4Aom2CAqF9ggKgbYICnzmuAJ41rgCdDa4AnDGPApuZggKaEdoAmTGuAJhljgKXtaIAlgWuAJWJggKUNa4Ak7GCApJ1rgCRNYECkC2uAI99rgCOTa4AjUmuAIwFrgCLva4AigWOAon5ogCIVdoAh0miAIadrgCFfaIAhJ2uAIOZrgCCddoAgZmuAIAdrADMqIQCzUyGAswguQLNTLkCzECOAkYyAIDMmIUCzTyEAswQgwLNUIMCzKCDAs2MgwLMMIACzSSAAswYgALNhIACmjMAgAUsAIAxLQCAiSMAgE0jAIBXIwCAayMAgJMjAIB1IwCAnSMAgGEjAIB/IwCAzPC5As2EuQLMULgCzay7AoDNAACB1QAAgt0AAIPVAACEzQAAhfUAAIb9AACH9QAAiM0AAFcvAIDBLACA1SoAgM0qAIDdKgCAuekAgCErAICQZQAAkW0AAKiIKgA1KwCAPSsAgEUrAIBJKwCATSsAgKIAMACjzDMAoOg9AKHsPACm8DYAp/QoAKQANACl/DUAgFERAIHpiAKCXREAg1URAIQpBACF6b0Chhm4AocVvgKIfREAiUURAIppBACL2b0CjA2vAI1REQCOcQQAj1URAJBJuAKRtb0Ckkm+ApO5vQKUUbgClam9ApZJDACXRREAmKmrAJl5EQCaaQQAm00RAJx5BACdbb4CnmkEAJ9ZEQCgqREAoakRAKK5EQCjuREApIkEAKVZqwCmuQQAp4W+Aqi9vgKpnREAquW5AquREQCs8REArfERAK6RpACv9REAsOkEALEpvQKy4a8As+GvALTZuAK1mREAtukEALctvQK4BagAueW+Arq5EQC7AYgCvKURAL2tEQC+wQQAvwG9AoABuQKBDb8CglUQAINtEACEUQUAheG8AoYlrgCHeRAAiGkFAIlNEACKIbkCi928AowxvwKNwbwCjjm5Ao/BvAKQUQ0AkV0QAJKBqgCTURAAlFEFAJV1EACWUQUAl0W/AphxBQCZQRAAmkEQAJtBEACcQRAAnUEQAJ5hBQCfsaoAoKEFAKGdvwKilb8Co7UQAKTduAKlqRAAptkQAKfZEACoiaUAqe0QAKqBBQCrQbwCrJmuAK2ZrgCusbkCr/EQALDxBQCxNbwCsi2pALPNvwK0gRAAtTmJAraNEAC3hRAAuNkFALkZvAK66bkCu+W/ArytEAC9lRAAvrkFAL8JvAK5La0AuC2tALtFEwC6BboCveG/ArwlBgC/GbwCvvmqALEdEwCwabsCs20TALJtEwC1eRMAtB2mALfVvwK2FQYAqXUTAKh1EwCrhakAqlUGAK1JvAKsdQYAr2ETAK5BvAKhQRMAoGUGAKNxvAKiZQYApVUTAKRlBgCnVRMAplUTAJl1vwKYhbwCm3W/ApqNugKdiRMAnIUOAJ+FEwCeVakAkVW/ApDlBgCTzRMAkpGtAJXZEwCU/QYAl0m/Apa1ugKJmRMAiJETAIs1vwKK9QYAjdm8AozVugKPuRMAjoETAIGtEwCA7boCgxm/AoLdBgCF8bwChBGqAIcVigKGrRMAgD2sAIFhEgCCQQcAg2USAIQZuwKF5b4Chhm9AofpvgKIIbsCidm+AopFEgCLXRIAjSkAgM3pAICOzaoAj8mLApCdiwKRpYsCkrGqAJOxqgCU2akAldmpAJb5qQCX+akAmJWqAJmRiwKatYsCm42LApyJqgCdiaoAnvGpAJ/xqQCgIakAoSGpAKJ9qgCjeYsCpE2LAqV1iwKmYaoAp2GqAKgpqQCpKakAqgmpAKsJqQCsRaoArUGLAq5liwKvXYsCsDmqALE5qgCyQakAs0GpALRxqQC1cakAti2qALcpiwK4PYsCuQWLAroRqgC7EaoAvHmpAL15qQC+WakAv1mpAIKJIwBtKwCAcSsAgI0rAIC+6QCAh5kjAJEpAIB5KwCAyOkAgIu5JACpKwCAifkkAI6VIwCPiSMAsSsAgI2JJACSvSMAESsAgLkrAICR4SMAo+sAgJfFIwCU8SMA4SsAgJkpAICbkSMA+SsAgJndIwD9KwCAnwktAAksAICdjdUAogkjAJ0pAIBBLACAofUjAEUsAICnGSMApCUkAG0sAICq7SQAeSwAgKgdIwCpeSQArhUjAK8JIwCsCSQArQkkALI9IwCJLACAsDEjALFhIwC2VSMAt0UjALRxIwC1XSMAulkjALsRIwCRLACAuV0jAL6JLQCVLACAvI0tANzpAICAuSUAgX0iAIKBIgCDmSIAhK0lAIXZJQCGuSIAh5EiAIiVIgCJ8SUAljIAgIuxJQCMgSUAjYElAI6dIgCPgSIAkLkiAJHpIgCStSIAk9EiAJT5IgCV1SIAlt0iAJfNIgCY+SIAmdUiAJrRIgCbmSIAqSwAgLEsAIDh6QCAvSwAgGUAAACh/SIAogEiAKMZIgDFLACApVklAKY5IgCnESIAqBUiAKlxJQDNLACAqzElAKwBJQCtASUArh0iAK8BIgCwOSIAsWkiALI1IgCzUSIAtHkiALVVIgC2XSIAt00iALh5IgC5VSIAulEiALsZIgD1LACA4SwAgO0sAIDxLACAgI0vAIGlLwCCrS8Ag70vAISlLwCFrS8AhqUvAIfdLwCI5S8Aie0vAIrlLwD5LACAAS0AgAUtAIANLQCAFS0AgJCRLwCRkS8AkpEvAJORLwCUsS8AlbEvAJa1LwCXRTMAmE0zAJlVMwCaPTMAmxkzAJyZMwCdiTMAnlUwAJ9JMACgwTAAockwAKLZMACj1TAApM0wAKX9MACm5TAApzUwAKi1MQCpuTEAqu0xAKuxmgCs0ZYArbE6AK61OgAZLQCAsEGUALHNlgCy1ZoAs8GWALTBlgC14ZoAtsGWALf9lgC4yZYAucGWALrZlgC71ZYAvLGWAL29lgC+qZYAv6WWAMUAAAChfSAAooEgACktAICkrScALS0AgDktAICnkSAAXS0AgKnxJwCqZScAq7EnAKyBJwCtgScArp0gAK+BIACwuSAAsekgALK1IABhLQCAtPkgALXVIAC23SAAt80gAEUtAIC51SAATS0AgLuZIACpLQCAcS0AgHUtAIB5LQCAgDknAIH9IACCASAAgxkgAG0tAICFWScAhjkgAIcRIACIFSAAiXEnAIrlJwCLMScAjAEnAI0BJwCOHSAAjwEgAJA5IACRaSAAkjUgAJNRIACUeSAAlVUgAJZdIACXTSAAmHkgAJlVIACaUSAAmxkgAJyFLgCdBdYAnoEuAJ+BLgCArT8AgbU/AIK9PwCDtT8AhK0/AIW5yACG1T8Ah80/AIj1PwCJ/T8AipnIAIvxPwCMATsAjQE7AI6NyACPOQQAkEkEAJFJBACSWQQAk1UEAJRNBACV3TwAlnkEAJd1BACYWQQAmSEEAJohBACbNdQAnCEEAJ3Z5gCeJQQAnx0EAKDpBACh9QQAos0/AKP1BACkFQQApfnUAKYhyACnIcgAqNHUAKktBACqOQQAq03CAKwtBACtdcgArh0EAK95BACwKQQAsTEEALI9BACzOQQAtC0EALX9BQC2qQUAt6kFALiZBQC5mQUAunkFALtFBQC8AQUAvQEFAL4BBQC/AQUAgC0HAIE1BwCCPQcAgzUHAIQtBwCFqQcAhqUHAIdl1QCILQYAiTEGAIoxBgCLDQYAjPnJAI15BgCOWQYAj1UGAJBpyQCRNQYAkj0GAJM1BgCULQYAlcUGAJZdAwCXVQMAmG0DAJl1AwCafQMAm3UDAJxtAwCdET0AnlkDAJ9ZAwCgqQMAoakDAKK5AwCjuQMApKkDAKWpAwCm2QMAp9kDAKjpAwCp6QMAqvkDAKv9AwCs5QMAre0DAK7lAwCvbcMAsKEDALGhAwCyoQMAs6EDALShAwC1zeYAtq0DALelAwC4yeYAuZkDALppAwC7aQMAvHkDAL15AwC+aQMAv2kDAIAAAACBLQCAfS0AgJUtAIDm6QCAsS0AgLUtAIC9LQCA0S0AgPQtAIDr6QCA8OkAgAAuAIAELgCACC4AgPwtAIAQLgCAoSkAgKUpAIAYLgCAIC4AgPXpAIA8LgCAQC4AgEwuAID66QCAVC4AgFguAIA3LwCAqSkAgGwuAICILgCAhC4AgATqAICQLgCACeoAgJwuAICYLgCAoC4AgLAuAIC0LgCArSkAgMQuAIDMLgCA0C4AgNQuAICxKQCADuoAgLUpAID3LgCA+y4AgP8uAIDV6wCAGOoAgNo1AIAvLwCAuSkAgDvqAIAN6wCAPy8AgEcvAIC9KQCAWy8AgGsvAICqIfQAq7U/AKilPwCpzecArkXwAK+hPwCsSfAArTH0AKJl4gCjvT8AoLk/AKG5PwCmlT8Ap50/AKSlPwClnT8Augk8AG8vAIC4CTwAuQk8AHcvAICHLwCAxSkAgMEpAICy3T8AswU9ALBN7wCx1T8Atn3wALe55AC0HT0AtWk8AB3qAICPLwCAoy8AgKcvAIC3LwCAyy8AgMMvAIDHLwCAgrX7AM8vAICA/T8AgfU/AOMvAIDnLwCA/y8AgAcwAICavT8Am/3NAJi9PwCZtT8Anlk/AJ9ZPwCcWT8AnVk/AJKBPwCTaekAkHnkAJGxPwCWgT8Al4H0AJQh5wCVmT8AFzAAgCswAIAs6gCAJzAAgBswAIAzMACAOzAAgE8wAIAx6gCAVzAAgEoAAABLMACAQzAAgMkpAIBfMACAZzAAgG8wAIBjMACAzSkAgIcwAIA26gCAszAAgPUwAIDRMACA2SkAgNUpAIDRKQCAnSsAgKErAID5MACA4TAAgK41AIA9KgCADTEAgCExAIAZMQCAT+oAgN0pAIA1MQCAKTEAgFIxAIBZ6gCAXjEAgD0xAIBmMQCAajEAgG4xAIByMQCAfjEAgF7qAICGMQCA5SkAgJIxAIBj6gCAljEAgOkpAICiMQCArjEAgL4xAIBo6gCA/+kAgG3qAIDeMQCAcuoAgLgJAQC5CQEAuhkBALsZAQC8CQEAvQkBAL45AQC/OQEAsM3FALE1zACymQ4As5kOALSJDgC1iQ4AtjkBALc5AQCo6dkAqckOAKrZDgCrqcUArMUOAK3NDgCuxQ4Ar/kOAKA1DgChPQ4AojUOAKOxxQCk8Q4ApfEOAKbxDgCn8Q4AmGkPAJlpDwCaeQ8Am3kPAJxpDwCdaQ8Ant0OAJ/NDgCQ+eoAkXEPAJJ9DwCTdQ8AlG0PAJVpDwCWWQ8Al1kPAIh5DwCJeQ8AigkPAIsJDwCMGQ8AjRkPAI4NzACPDQ8AgHkPAIF5DwCCSQ8Ag0kPAIRZDwCFWQ8AhkkPAIdJDwCKUQIAi1ECAIj5xgCJQQIAjnECAI/txgCMQQIAjUECAIIVAgCDHQIAgAUCAIEdAgCGdQIAh30CAIQFAgCFfQIAmsUCAJvNAgCYkc8AmYXaAJ7FAgCfzQIAnNUCAJ3NAgCSDQIAkxUCAJANAgCRBQIAlg0CAJf1AgCUDQIAlQUCAKo9AgCrRQIAqD0CAKk1AgCuXQIAr0UCAKxdAgCtVQIAol3GAKMBAgCgNQIAoQ0CAKYBAgCnxdgApBECAKURAgC6OQIAuzkCALg5AgC5OQIAvtkBAL/ZAQC82QEAvdkBALI9AgCzBQIAsD0CALE1AgC2GQIAtxkCALQdAgC16cIA6jEAgPIxAIDiMQCA/jEAgA4yAIAWMgCAIjIAgCYyAIB36gCACjIAgD4yAIBCMgCA7SkAgFIyAIB86gCANjIAgHIyAICB6gCAhuoAgHYyAICKMgCAgjIAgPEpAICOMgCAnjIAgJoyAICmMgCAw+kAgLYyAICL6gCAwjIAgJXqAIDWMgCA9jIAgJrqAIAKMwCADjMAgJ/qAICk6gCAKjMAgDozAID1KQCAPjMAgPkpAIBWMwCAWjMAgGYzAIByMwCA/SkAgIozAICp6gCApjMAgK7qAIAT6gCAwjMAgLPqAIC4AAAAuOoAgL3qAIABKgCABSoAgMfqAIDC6gCAzOoAgIAB3gCB8QcAgvEHAIPxBwCEFQIAhR0CAIYVAgCHEQIAiCXeAIld3gCKOQIAizkCAIwpAgCNKQIAjhkCAI99ygCQTd4AkWECAJJhAgCT7cEAlH0CAJVlAgCWIcAAl2kCAJhZAgCZMcIAmlUCAJstAgCcNQIAnT0CAJ4xAgCfMQIAoNECAKHRAgCi0QIAo9ECAKTxAgCl8QIApvECAKfxAgCo0QIAqdECAKrRAgCr0QIArDECAK0xAgCuMQIArzECALBRAgCxUQIAslECALNRAgC0cQIAtXECALZxAgC3cQIAuFECALlRAgC6+dwAu1UCALxNAgC9NQIAvj0CAL81AgC+7QYAv/UGALztBgC95QYAuskGALvJBgC4xcsAuckGALbtBgC39QYAtO0GALXlBgCyjQYAs/UGALDR3QCxhQYArvEGAK/xBgCs5QYAreEGAKr1BgCr/QYAqMUGAKn9BgCm9QYAp/0GAKTlBgCl/QYAovUGAKP9BgCg+QYAoZ3dAJ75BgCf+QYAnPkGAJ35BgCa+QYAm/kGAJj5BgCZ+QYAlvkGAJf5BgCUcd0AlfkGAJL9BgCT5QYAkP0GAJH1BgCO/QYAj4UGAIz9BgCN9QYAiuEGAIsB3QCI8QYAifEGAIbBBgCHwQYAhPEGAIXxBgCCkccAg+EGAIDpBgCBxcAAgAAAANHqAIACNACABjQAgBI0AIARKgCAFSoAgNvqAIAmNACAGSoAgODqAIDl6gCA6uoAgJY0AIAdKgCAojQAgKY0AIDv6gCA9OoAgL40AIAhKgCA+eoAgNI0AIDWNACAJSoAgP7qAIDyNACAKSoAgAI1AID6NACACjUAgAjrAIAiNQCALSoAgC41AIA2NQCARjUAgDEqAIAS6wCAF+sAgDUqAIAc6wCAXjUAgCHrAIBqNQCAdjUAgCbrAIAr6wCAkjUAgDDrAICaNQCAQOoAgDkqAICyNQCAtjUAgEEqAIC6NQCAFC4AgDXrAIA66wCAReoAgErqAIDeNQCA9jcAgIDNAQCB1QEAgt0BAIPVAQCEzQEAhfUBAIb9AQCH9QEAiM0BAInVAQCK3QEAi/UJAIzJAQCNyQEAjgEcAI89HwCQRR8AkU0fAJJFHwCTXR8AlEUfAJVNHwCWRR8Al30fAJhBxwCZQR8AmkEfAJtBHwCcQR8AnUEfAJ5BHwCfYd8AoL0fAKHFHwCizR8Ao8UfAKTdHwClxR8Aps0fAKfFHwCo/R8AqcUfAKrNHwCrxR8ArN0fAK3FHwCuzR8Ar8UfALC9HwCxRR8Ask0fALNFHwC0/ckAtVkfALZJHwC3SR8AuHkfALl5HwC6SR8Au8XdALxVHwC9XR8AvlUfAL9NHwAKNgCABjYAgA42AIAZLACAEjYAgBY2AIAaNgCAIjYAgD/rAIAmNgCAOjYAgD42AIAqNgCAQjYAgFY2AIA2NgCASjYAgE42AIBSNgCAROsAgE7rAIBJ6wCASSoAgHI2AIB2NgCAfjYAgGLrAICCNgCAU+sAgE0qAIBRKgCAWOsAgF3rAIBVKgCAojYAgKo2AICuNgCAujYAgLY2AIDCNgCAvjYAgMY2AIDKNgCA0jYAgFkqAIDaNgCA3jYAgF0qAIDuNgCAZ+sAgP42AIACNwCAYSoAgA43AICVKQCAbOsAgHHrAIBlKgCAaSoAgDo3AIB26wCAkjcAgJY3AICuNwCAgLUBAIG9AQCCtQEAg80BAITt9ACF0QEAhtEBAIfRAQCI8QEAifEBAIrxAQCL8QEAjNEBAI3RAQCO0QEAj9EBAJB9wwCRBcMAkl35AJO9AQCUpQEAla0BAJalAQCXXQMAmGUDAJltAwCaZQMAm30DAJxlAwCdbQMAnmUDAJ85wwCgoQMAoaEDAKKhAwCjoQMApKEDAKWhAwCmoQMAp6EDAKjhAwCp4QMAquEDAKvhAwCs4QMAreEDAK7hAwCv4QMAsKEDALGhAwCyoQMAs6EDALShAwC1oQMAtqEDALehAwC4YQMAuWEDALphAwC7YQMAvGEDAL1hAwC+pcMAv6HDALo3AICA6wCA0ukAgMY3AIDCNwCAzjcAgNfpAIDaNwCAhesAgIrrAIAmOACAMjgAgDo4AICP6wCAPjgAgGY4AIByOACAdjgAgG44AICCOACAhjgAgJTrAICSOACAbSoAgJo4AICZ6wCAcSoAgNI4AICkLgCA6jgAgJ7rAICo6wCAdSoAgHkqAIASOQCAresAgH0qAICy6wCAMjkAgLfrAIBKOQCAgSoAgFo5AIBmOQCAbjkAgHY5AICFKgCAvOsAgKY5AICyOQCAiSoAgI0qAIC2OQCAwesAgJEqAIDG6wCAy+sAgNDrAICVKgCA9jkAgPo5AIACOgCACjoAgNrrAICQ1QEAkd0BAJLVAQCT7QEAlPUBAJXB+wCW8QEAl/n7AJjNAQCZ1QEAmt0BAJvVAQCcyfsAnckBAEUqAICPAAAAgNkBAIHZAQCC6QEAg+kBAIT5AQCF+QEAhukBAIfpAQCI2QEAidkBAIoJwQCLrQEAjLUBAI29AQCOtQEAj60BAKAAAAChAAAAogAAAKMAAACkAAAApQAAAKYAAACnAAAAqAAAAKkAAACqAAAAqwAAAKwAAACtAAAArgAAAK8AAACwAAAAsQAAALIAAACzAAAAtAAAALUAAAC2AAAAtwAAALgAAAC5AAAAugAAALsAAAC8AAAAvQAAAL4AAAC/AAAAACAAIMyBACDMgwAgzIQAIMyFACDMhgAgzIcAIMyIACDMiMyAACDMiMyBACDMiM2CACDMigAgzIsAIMyTACDMk8yAACDMk8yBACDMk82CACDMlAAgzJTMgAAgzJTMgQAgzJTNggAgzKcAIMyoACDMswAgzYIAIM2FACDZiwAg2YwAINmM2ZEAINmNACDZjdmRACDZjgAg2Y7ZkQAg2Y8AINmP2ZEAINmQACDZkNmRACDZkQAg2ZHZsAAg2ZIAIOOCmQAg44KaACEAISEAIT8AIgAjACQAJQAmACcAKAAoMSkAKDEwKQAoMTEpACgxMikAKDEzKQAoMTQpACgxNSkAKDE2KQAoMTcpACgxOCkAKDE5KQAoMikAKDIwKQAoMykAKDQpACg1KQAoNikAKDcpACg4KQAoOSkAKEEpAChCKQAoQykAKEQpAChFKQAoRikAKEcpAChIKQAoSSkAKEopAChLKQAoTCkAKE0pAChOKQAoTykAKFApAChRKQAoUikAKFMpAChUKQAoVSkAKFYpAChXKQAoWCkAKFkpAChaKQAoYSkAKGIpAChjKQAoZCkAKGUpAChmKQAoZykAKGgpAChpKQAoaikAKGspAChsKQAobSkAKG4pAChvKQAocCkAKHEpAChyKQAocykAKHQpACh1KQAodikAKHcpACh4KQAoeSkAKHopACjhhIApACjhhIIpACjhhIMpACjhhIUpACjhhIYpACjhhIcpACjhhIkpACjhhIspACjhhIwpACjhhI4pACjhhI8pACjhhJApACjhhJEpACjhhJIpACjkuIApACjkuIMpACjkuIkpACjkuZ0pACjkuowpACjkupQpACjku6MpACjkvIEpACjkvJEpACjlhaspACjlha0pACjlirQpACjljYEpACjljZQpACjlkI0pACjlkbwpACjlm5spACjlnJ8pACjlraYpACjml6UpACjmnIgpACjmnIkpACjmnKgpACjmoKopACjmsLQpACjngaspACjnibkpACjnm6MpACjnpL4pACjnpZ0pACjnpa0pACjoh6opACjoh7MpACjosqEpACjos4cpACjph5EpACjqsIApACjrgpgpACjri6QpACjrnbwpACjrp4gpACjrsJQpACjsgqwpACjslYQpACjsmKTsoIQpACjsmKTtm4QpACjsnpApACjso7wpACjssKgpACjsubQpACjtg4ApACjtjIwpACjtlZgpACkAKgArACwALQAuAC4uAC4uLgAvADAAMCwAMC4AMOKBhDMAMOeCuQAxADEsADEuADEwADEwLgAxMOaXpQAxMOaciAAxMOeCuQAxMQAxMS4AMTHml6UAMTHmnIgAMTHngrkAMTIAMTIuADEy5pelADEy5pyIADEy54K5ADEzADEzLgAxM+aXpQAxM+eCuQAxNAAxNC4AMTTml6UAMTTngrkAMTUAMTUuADE15pelADE154K5ADE2ADE2LgAxNuaXpQAxNueCuQAxNwAxNy4AMTfml6UAMTfngrkAMTgAMTguADE45pelADE454K5ADE5ADE5LgAxOeaXpQAxOeeCuQAx4oGEADHigYQxMAAx4oGEMgAx4oGEMwAx4oGENAAx4oGENQAx4oGENgAx4oGENwAx4oGEOAAx4oGEOQAx5pelADHmnIgAMeeCuQAyADIsADIuADIwADIwLgAyMOaXpQAyMOeCuQAyMQAyMeaXpQAyMeeCuQAyMgAyMuaXpQAyMueCuQAyMwAyM+aXpQAyM+eCuQAyNAAyNOaXpQAyNOeCuQAyNQAyNeaXpQAyNgAyNuaXpQAyNwAyN+aXpQAyOAAyOOaXpQAyOQAyOeaXpQAy4oGEMwAy4oGENQAy5pelADLmnIgAMueCuQAzADMsADMuADMwADMw5pelADMxADMx5pelADMyADMzADM0ADM1ADM2ADM3ADM4ADM5ADPigYQ0ADPigYQ1ADPigYQ4ADPml6UAM+aciAAz54K5ADQANCwANC4ANDAANDEANDIANDMANDQANDUANDYANDcANDgANDkANOKBhDUANOaXpQA05pyIADTngrkANQA1LAA1LgA1MAA14oGENgA14oGEOAA15pelADXmnIgANeeCuQA2ADYsADYuADbml6UANuaciAA254K5ADcANywANy4AN+KBhDgAN+aXpQA35pyIADfngrkAOAA4LAA4LgA45pelADjmnIgAOOeCuQA5ADksADkuADnml6UAOeaciAA554K5ADoAOjo9ADsAPAA9AD09AD09PQA+AD8APyEAPz8AQABBAEFVAEHiiJVtAEIAQnEAQwBDRABDby4AQ+KIlWtnAEQAREoARFoARHoARMW9AETFvgBFAEYARkFYAEcAR0IAR0h6AEdQYQBHeQBIAEhQAEhWAEhnAEh6AEkASUkASUlJAElKAElVAElWAElYAEoASwBLQgBLSwBLTQBMAExKAExURABMagBMwrcATQBNQgBNQwBNRABNSHoATVBhAE1WAE1XAE3OqQBOAE5KAE5qAE5vAE8AUABQSABQUE0AUFBWAFBSAFBURQBQYQBRAFIAUnMAUwBTRABTTQBTUwBTdgBUAFRFTABUSHoAVE0AVQBWAFZJAFZJSQBWSUlJAFbiiJVtAFcAV0MAV1oAV2IAWABYSQBYSUkAWQBaAFsAXABdAF4AXwBgAGEAYS5tLgBhL2MAYS9zAGHKvgBiAGJhcgBjAGMvbwBjL3UAY2FsAGNjAGNkAGNtAGNtMgBjbTMAZABkQgBkYQBkbABkbQBkbTIAZG0zAGR6AGTFvgBlAGVWAGVyZwBmAGZmAGZmaQBmZmwAZmkAZmwAZm0AZwBnYWwAaABoUGEAaGEAaQBpaQBpaWkAaWoAaW4AaXYAaXgAagBrAGtBAGtIegBrUGEAa1YAa1cAa2NhbABrZwBrbABrbQBrbTIAa20zAGt0AGvOqQBsAGxqAGxtAGxuAGxvZwBseABswrcAbQBtMgBtMwBtQQBtVgBtVwBtYgBtZwBtaWwAbWwAbW0AbW0yAG1tMwBtb2wAbXMAbeKIlXMAbeKIlXMyAG4AbkEAbkYAblYAblcAbmoAbm0AbnMAbwBvVgBwAHAubS4AcEEAcEYAcFYAcFcAcGMAcHMAcQByAHJhZAByYWTiiJVzAHJhZOKIlXMyAHMAc3IAc3QAdAB1AHYAdmkAdmlpAHZpaWkAdwB4AHhpAHhpaQB5AHoAewB8AH0AwqIAwqMAwqUAwqYAwqwAwrBDAMKwRgDCtwDDgADDgQDDggDDgwDDhADDhQDDhgDDhwDDiADDiQDDigDDiwDDjADDjQDDjgDDjwDDkQDDkgDDkwDDlADDlQDDlgDDmQDDmgDDmwDDnADDnQDDoADDoQDDogDDowDDpADDpQDDpwDDqADDqQDDqgDDqwDDrADDrQDDrgDDrwDDsADDsQDDsgDDswDDtADDtQDDtgDDuQDDugDDuwDDvADDvQDDvwDEgADEgQDEggDEgwDEhADEhQDEhgDEhwDEiADEiQDEigDEiwDEjADEjQDEjgDEjwDEkgDEkwDElADElQDElgDElwDEmADEmQDEmgDEmwDEnADEnQDEngDEnwDEoADEoQDEogDEowDEpADEpQDEpgDEpwDEqADEqQDEqgDEqwDErADErQDErgDErwDEsADEsQDEtADEtQDEtgDEtwDEuQDEugDEuwDEvADEvQDEvgDFgwDFhADFhQDFhgDFhwDFiADFiwDFjADFjQDFjgDFjwDFkADFkQDFkwDFlADFlQDFlgDFlwDFmADFmQDFmgDFmwDFnADFnQDFngDFnwDFoADFoQDFogDFowDFpADFpQDFqADFqQDFqgDFqwDFrADFrQDFrgDFrwDFsADFsQDFsgDFswDFtADFtQDFtgDFtwDFuADFuQDFugDFuwDFvADFvQDFvgDGjgDGkADGoADGoQDGqwDGrwDGsADHjQDHjgDHjwDHkADHkQDHkgDHkwDHlADHlQDHlgDHlwDHmADHmQDHmgDHmwDHnADHngDHnwDHoADHoQDHogDHowDHpgDHpwDHqADHqQDHqgDHqwDHrADHrQDHrgDHrwDHsADHtADHtQDHuADHuQDHugDHuwDHvADHvQDHvgDHvwDIgADIgQDIggDIgwDIhADIhQDIhgDIhwDIiADIiQDIigDIiwDIjADIjQDIjgDIjwDIkADIkQDIkgDIkwDIlADIlQDIlgDIlwDImADImQDImgDImwDIngDInwDIogDIpgDIpwDIqADIqQDIqgDIqwDIrADIrQDIrgDIrwDIsADIsQDIsgDIswDItwDJkADJkQDJkgDJlADJlQDJmQDJmwDJnADJnwDJoQDJowDJpQDJpgDJqADJqQDJqgDJqwDJrQDJrwDJsADJsQDJsgDJswDJtADJtQDJuADJuQDJuwDKgQDKggDKgwDKiQDKigDKiwDKjADKkADKkQDKkgDKlQDKnQDKnwDKuQDKvG4AzIAAzIEAzIjMgQDMkwDOhgDOiADOiQDOigDOjADOjgDOjwDOkADOkQDOkgDOkwDOlADOlQDOlgDOlwDOmADOmQDOmgDOmwDOnADOnQDOngDOnwDOoADOoQDOowDOpADOpQDOpgDOpwDOqADOqQDOqgDOqwDOrADOrQDOrgDOrwDOsADOsQDOsgDOswDOtADOtQDOtgDOtwDOuADOuQDOugDOuwDOvADOvEEAzrxGAM68VgDOvFcAzrxnAM68bADOvG0AzrxzAM69AM6+AM6/AM+AAM+BAM+CAM+DAM+EAM+FAM+GAM+HAM+IAM+JAM+KAM+LAM+MAM+NAM+OAM+cAM+dANCAANCBANCDANCHANCMANCNANCOANCZANC5ANC9ANGKANGMANGQANGRANGTANGXANGcANGdANGeANG2ANG3ANOBANOCANOQANORANOSANOTANOWANOXANOaANObANOcANOdANOeANOfANOiANOjANOkANOlANOmANOnANOqANOrANOsANOtANOuANOvANOwANOxANOyANOzANO0ANO1ANO4ANO5ANWl1oIA1bTVpQDVtNWrANW01a0A1bTVtgDVvtW2ANeQANeQ1rcA15DWuADXkNa8ANeQ15wA15EA15HWvADXkda/ANeSANeS1rwA15MA15PWvADXlADXlNa8ANeV1rkA15XWvADXlta8ANeY1rwA15nWtADXmda8ANea1rwA15sA15vWvADXm9a/ANecANec1rwA150A157WvADXoNa8ANeh1rwA16IA16PWvADXpNa8ANek1r8A16bWvADXp9a8ANeoANeo1rwA16nWvADXqda814EA16nWvNeCANep14EA16nXggDXqgDXqta8ANey1rcA2KEA2KIA2KMA2KQA2KUA2KYA2KbYpwDYptisANim2K0A2KbYrgDYptixANim2LIA2KbZhQDYptmGANim2YcA2KbZiADYptmJANim2YoA2KbbhgDYptuHANim24gA2KbbkADYptuVANinANin2YPYqNixANin2YTZhNmHANin2YsA2KfZtADYqADYqNisANio2K0A2KjYrdmKANio2K4A2KjYrtmKANio2LEA2KjYsgDYqNmFANio2YYA2KjZhwDYqNmJANio2YoA2KkA2KoA2KrYrADYqtis2YUA2KrYrNmJANiq2KzZigDYqtitANiq2K3YrADYqtit2YUA2KrYrgDYqtiu2YUA2KrYrtmJANiq2K7ZigDYqtixANiq2LIA2KrZhQDYqtmF2KwA2KrZhditANiq2YXYrgDYqtmF2YkA2KrZhdmKANiq2YYA2KrZhwDYqtmJANiq2YoA2KsA2KvYrADYq9ixANir2LIA2KvZhQDYq9mGANir2YcA2KvZiQDYq9mKANisANis2K0A2KzYrdmJANis2K3ZigDYrNmEINis2YTYp9mE2YcA2KzZhQDYrNmF2K0A2KzZhdmJANis2YXZigDYrNmJANis2YoA2K0A2K3YrADYrdis2YoA2K3ZhQDYrdmF2YkA2K3ZhdmKANit2YkA2K3ZigDYrgDYrtisANiu2K0A2K7ZhQDYrtmJANiu2YoA2K8A2LAA2LDZsADYsQDYsdiz2YjZhADYsdmwANix24zYp9mEANiyANizANiz2KwA2LPYrNitANiz2KzZiQDYs9itANiz2K3YrADYs9iuANiz2K7ZiQDYs9iu2YoA2LPYsQDYs9mFANiz2YXYrADYs9mF2K0A2LPZhdmFANiz2YcA2LPZiQDYs9mKANi0ANi02KwA2LTYrNmKANi02K0A2LTYrdmFANi02K3ZigDYtNiuANi02LEA2LTZhQDYtNmF2K4A2LTZhdmFANi02YcA2LTZiQDYtNmKANi1ANi12K0A2LXYrditANi12K3ZigDYtdiuANi12LEA2LXZhNi52YUA2LXZhNmJANi12YTZiSDYp9mE2YTZhyDYudmE2YrZhyDZiNiz2YTZhQDYtdmE25IA2LXZhQDYtdmF2YUA2LXZiQDYtdmKANi2ANi22KwA2LbYrQDYttit2YkA2LbYrdmKANi22K4A2LbYrtmFANi22LEA2LbZhQDYttmJANi22YoA2LcA2LfYrQDYt9mFANi32YXYrQDYt9mF2YUA2LfZhdmKANi32YkA2LfZigDYuADYuNmFANi5ANi52KwA2LnYrNmFANi52YTZitmHANi52YUA2LnZhdmFANi52YXZiQDYudmF2YoA2LnZiQDYudmKANi6ANi62KwA2LrZhQDYutmF2YUA2LrZhdmJANi62YXZigDYutmJANi62YoA2YDZiwDZgNmOANmA2Y7ZkQDZgNmPANmA2Y/ZkQDZgNmQANmA2ZDZkQDZgNmRANmA2ZIA2YEA2YHYrADZgditANmB2K4A2YHYrtmFANmB2YUA2YHZhdmKANmB2YkA2YHZigDZggDZgtitANmC2YTbkgDZgtmFANmC2YXYrQDZgtmF2YUA2YLZhdmKANmC2YkA2YLZigDZgwDZg9inANmD2KwA2YPYrQDZg9iuANmD2YQA2YPZhQDZg9mF2YUA2YPZhdmKANmD2YkA2YPZigDZhADZhNiiANmE2KMA2YTYpQDZhNinANmE2KwA2YTYrNisANmE2KzZhQDZhNis2YoA2YTYrQDZhNit2YUA2YTYrdmJANmE2K3ZigDZhNiuANmE2K7ZhQDZhNmFANmE2YXYrQDZhNmF2YoA2YTZhwDZhNmJANmE2YoA2YUA2YXYpwDZhdisANmF2KzYrQDZhdis2K4A2YXYrNmFANmF2KzZigDZhditANmF2K3YrADZhdit2YUA2YXYrdmF2K8A2YXYrdmKANmF2K4A2YXYrtisANmF2K7ZhQDZhdiu2YoA2YXZhQDZhdmF2YoA2YXZiQDZhdmKANmGANmG2KwA2YbYrNitANmG2KzZhQDZhtis2YkA2YbYrNmKANmG2K0A2YbYrdmFANmG2K3ZiQDZhtit2YoA2YbYrgDZhtixANmG2LIA2YbZhQDZhtmF2YkA2YbZhdmKANmG2YYA2YbZhwDZhtmJANmG2YoA2YcA2YfYrADZh9mFANmH2YXYrADZh9mF2YUA2YfZiQDZh9mKANmH2bAA2YgA2YjYs9mE2YUA2YjZtADZiQDZidmwANmKANmK2KwA2YrYrNmKANmK2K0A2YrYrdmKANmK2K4A2YrYsQDZitiyANmK2YUA2YrZhdmFANmK2YXZigDZitmGANmK2YcA2YrZiQDZitmKANmK2bQA2a4A2a8A2bEA2bkA2boA2bsA2b4A2b8A2oAA2oMA2oQA2oYA2ocA2ogA2owA2o0A2o4A2pEA2pgA2qEA2qQA2qYA2qkA2q0A2q8A2rEA2rMA2roA2rsA2r4A24AA24EA24IA24UA24YA24cA24fZtADbiADbiQDbiwDbjADbkADbkgDbkwDgpJXgpLwA4KSW4KS8AOCkl+CkvADgpJzgpLwA4KSh4KS8AOCkouCkvADgpKkA4KSr4KS8AOCkr+CkvADgpLEA4KS0AOCmoeCmvADgpqLgprwA4Kav4Ka8AOCniwDgp4wA4KiW4Ki8AOCol+CovADgqJzgqLwA4Kir4Ki8AOCosuCovADgqLjgqLwA4Kyh4Ky8AOCsouCsvADgrYgA4K2LAOCtjADgrpQA4K+KAOCviwDgr4wA4LGIAOCzgADgs4cA4LOIAOCzigDgs4sA4LWKAOC1iwDgtYwA4LeaAOC3nADgt50A4LeeAOC5jeC4sgDguqvgupkA4Lqr4LqhAOC7jeC6sgDgvIsA4L2A4L61AOC9guC+twDgvYzgvrcA4L2R4L63AOC9luC+twDgvZvgvrcA4L2x4L2yAOC9seC9tADgvbHgvoAA4L6Q4L61AOC+kuC+twDgvpzgvrcA4L6h4L63AOC+puC+twDgvqvgvrcA4L6y4L2x4L6AAOC+suC+gADgvrPgvbHgvoAA4L6z4L6AAOGApgDhg5wA4YSAAOGEgQDhhIIA4YSDAOGEhADhhIUA4YSGAOGEhwDhhIgA4YSJAOGEigDhhIsA4YSMAOGEjQDhhI4A4YSPAOGEkADhhJEA4YSSAOGElADhhJUA4YSaAOGEnADhhJ0A4YSeAOGEoADhhKEA4YSiAOGEowDhhKcA4YSpAOGEqwDhhKwA4YStAOGErgDhhK8A4YSyAOGEtgDhhYAA4YWHAOGFjADhhZcA4YWYAOGFmQDhhaAA4YWhAOGFogDhhaMA4YWkAOGFpQDhhaYA4YWnAOGFqADhhakA4YWqAOGFqwDhhawA4YWtAOGFrgDhha8A4YWwAOGFsQDhhbIA4YWzAOGFtADhhbUA4YaEAOGGhQDhhogA4YaRAOGGkgDhhpQA4YaeAOGGoQDhhqoA4YasAOGGrQDhhrAA4YaxAOGGsgDhhrMA4Ya0AOGGtQDhh4cA4YeIAOGHjADhh44A4YeTAOGHlwDhh5kA4YedAOGHnwDhh7EA4YeyAOGshgDhrIgA4ayKAOGsjADhrI4A4aySAOGsuwDhrL0A4a2AAOGtgQDhrYMA4bSCAOG0lgDhtJcA4bScAOG0nQDhtKUA4bW7AOG2hQDhuIAA4biBAOG4ggDhuIMA4biEAOG4hQDhuIYA4biHAOG4iADhuIkA4biKAOG4iwDhuIwA4biNAOG4jgDhuI8A4biQAOG4kQDhuJIA4biTAOG4lADhuJUA4biWAOG4lwDhuJgA4biZAOG4mgDhuJsA4bicAOG4nQDhuJ4A4bifAOG4oADhuKEA4biiAOG4owDhuKQA4bilAOG4pgDhuKcA4bioAOG4qQDhuKoA4birAOG4rADhuK0A4biuAOG4rwDhuLAA4bixAOG4sgDhuLMA4bi0AOG4tQDhuLYA4bi3AOG4uADhuLkA4bi6AOG4uwDhuLwA4bi9AOG4vgDhuL8A4bmAAOG5gQDhuYIA4bmDAOG5hADhuYUA4bmGAOG5hwDhuYgA4bmJAOG5igDhuYsA4bmMAOG5jQDhuY4A4bmPAOG5kADhuZEA4bmSAOG5kwDhuZQA4bmVAOG5lgDhuZcA4bmYAOG5mQDhuZoA4bmbAOG5nADhuZ0A4bmeAOG5nwDhuaAA4bmhAOG5ogDhuaMA4bmkAOG5pQDhuaYA4bmnAOG5qADhuakA4bmqAOG5qwDhuawA4bmtAOG5rgDhua8A4bmwAOG5sQDhubIA4bmzAOG5tADhubUA4bm2AOG5twDhubgA4bm5AOG5ugDhubsA4bm8AOG5vQDhub4A4bm/AOG6gADhuoEA4bqCAOG6gwDhuoQA4bqFAOG6hgDhuocA4bqIAOG6iQDhuooA4bqLAOG6jADhuo0A4bqOAOG6jwDhupAA4bqRAOG6kgDhupMA4bqUAOG6lQDhupYA4bqXAOG6mADhupkA4bqgAOG6oQDhuqIA4bqjAOG6pADhuqUA4bqmAOG6pwDhuqgA4bqpAOG6qgDhuqsA4bqsAOG6rQDhuq4A4bqvAOG6sADhurEA4bqyAOG6swDhurQA4bq1AOG6tgDhurcA4bq4AOG6uQDhuroA4bq7AOG6vADhur0A4bq+AOG6vwDhu4AA4buBAOG7ggDhu4MA4buEAOG7hQDhu4YA4buHAOG7iADhu4kA4buKAOG7iwDhu4wA4buNAOG7jgDhu48A4buQAOG7kQDhu5IA4buTAOG7lADhu5UA4buWAOG7lwDhu5gA4buZAOG7mgDhu5sA4bucAOG7nQDhu54A4bufAOG7oADhu6EA4buiAOG7owDhu6QA4bulAOG7pgDhu6cA4buoAOG7qQDhu6oA4burAOG7rADhu60A4buuAOG7rwDhu7AA4buxAOG7sgDhu7MA4bu0AOG7tQDhu7YA4bu3AOG7uADhu7kA4byAAOG8gQDhvIIA4byDAOG8hADhvIUA4byGAOG8hwDhvIgA4byJAOG8igDhvIsA4byMAOG8jQDhvI4A4byPAOG8kADhvJEA4bySAOG8kwDhvJQA4byVAOG8mADhvJkA4byaAOG8mwDhvJwA4bydAOG8oADhvKEA4byiAOG8owDhvKQA4bylAOG8pgDhvKcA4byoAOG8qQDhvKoA4byrAOG8rADhvK0A4byuAOG8rwDhvLAA4byxAOG8sgDhvLMA4by0AOG8tQDhvLYA4by3AOG8uADhvLkA4by6AOG8uwDhvLwA4by9AOG8vgDhvL8A4b2AAOG9gQDhvYIA4b2DAOG9hADhvYUA4b2IAOG9iQDhvYoA4b2LAOG9jADhvY0A4b2QAOG9kQDhvZIA4b2TAOG9lADhvZUA4b2WAOG9lwDhvZkA4b2bAOG9nQDhvZ8A4b2gAOG9oQDhvaIA4b2jAOG9pADhvaUA4b2mAOG9pwDhvagA4b2pAOG9qgDhvasA4b2sAOG9rQDhva4A4b2vAOG9sADhvbIA4b20AOG9tgDhvbgA4b26AOG9vADhvoAA4b6BAOG+ggDhvoMA4b6EAOG+hQDhvoYA4b6HAOG+iADhvokA4b6KAOG+iwDhvowA4b6NAOG+jgDhvo8A4b6QAOG+kQDhvpIA4b6TAOG+lADhvpUA4b6WAOG+lwDhvpgA4b6ZAOG+mgDhvpsA4b6cAOG+nQDhvp4A4b6fAOG+oADhvqEA4b6iAOG+owDhvqQA4b6lAOG+pgDhvqcA4b6oAOG+qQDhvqoA4b6rAOG+rADhvq0A4b6uAOG+rwDhvrAA4b6xAOG+sgDhvrMA4b60AOG+tgDhvrcA4b64AOG+uQDhvroA4b68AOG/ggDhv4MA4b+EAOG/hgDhv4cA4b+IAOG/igDhv4wA4b+QAOG/kQDhv5IA4b+WAOG/lwDhv5gA4b+ZAOG/mgDhv6AA4b+hAOG/ogDhv6QA4b+lAOG/pgDhv6cA4b+oAOG/qQDhv6oA4b+sAOG/sgDhv7MA4b+0AOG/tgDhv7cA4b+4AOG/ugDhv7wA4oCQAOKAkwDigJQA4oCy4oCyAOKAsuKAsuKAsgDigLLigLLigLLigLIA4oC14oC1AOKAteKAteKAtQDigqkA4oaQAOKGkQDihpIA4oaTAOKGmgDihpsA4oauAOKHjQDih44A4oePAOKIggDiiIQA4oiHAOKIiQDiiIwA4oiRAOKIkgDiiKQA4oimAOKIq+KIqwDiiKviiKviiKsA4oir4oir4oir4oirAOKIruKIrgDiiK7iiK7iiK4A4omBAOKJhADiiYcA4omJAOKJoADiiaIA4omtAOKJrgDiia8A4omwAOKJsQDiibQA4om1AOKJuADiibkA4oqAAOKKgQDiioQA4oqFAOKKiADiiokA4oqsAOKKrQDiiq4A4oqvAOKLoADii6EA4ouiAOKLowDii6oA4ourAOKLrADii60A4pSCAOKWoADil4sA4qaFAOKmhgDiq53MuADitaEA44CBAOOAggDjgIgA44CJAOOAigDjgIsA44CMAOOAjQDjgI4A44CPAOOAkADjgJEA44CSAOOAlADjgJRT44CVAOOAlOS4ieOAlQDjgJTkuozjgJUA44CU5Yud44CVAOOAlOWuieOAlQDjgJTmiZPjgJUA44CU5pWX44CVAOOAlOacrOOAlQDjgJTngrnjgJUA44CU55uX44CVAOOAlQDjgJYA44CXAOOBjADjgY4A44GQAOOBkgDjgZQA44GWAOOBmADjgZoA44GcAOOBngDjgaAA44GiAOOBpQDjgacA44GpAOOBsADjgbEA44GzAOOBtADjgbYA44G3AOOBuQDjgboA44G744GLAOOBvADjgb0A44KI44KKAOOClADjgpkA44KaAOOCngDjgqEA44KiAOOCouODkeODvOODiADjgqLjg6vjg5XjgqEA44Ki44Oz44Oa44KiAOOCouODvOODqwDjgqMA44KkAOOCpOODi+ODs+OCsADjgqTjg7Pjg4EA44KlAOOCpgDjgqbjgqnjg7MA44KnAOOCqADjgqjjgrnjgq/jg7zjg4kA44Ko44O844Kr44O8AOOCqQDjgqoA44Kq44Oz44K5AOOCquODvOODoADjgqsA44Kr44Kk44OqAOOCq+ODqeODg+ODiADjgqvjg63jg6rjg7wA44KsAOOCrOODreODswDjgqzjg7Pjg54A44KtAOOCreODpeODquODvADjgq3jg60A44Kt44Ot44Kw44Op44OgAOOCreODreODoeODvOODiOODqwDjgq3jg63jg6/jg4Pjg4gA44KuAOOCruOCrADjgq7jg4vjg7wA44Ku44Or44OA44O8AOOCrwDjgq/jg6vjgrzjgqTjg60A44Kv44Ot44O844ONAOOCsADjgrDjg6njg6AA44Kw44Op44Og44OI44OzAOOCsQDjgrHjg7zjgrkA44KyAOOCswDjgrPjgrMA44Kz44OIAOOCs+ODq+ODigDjgrPjg7zjg50A44K0AOOCtQDjgrXjgqTjgq/jg6sA44K144Oz44OB44O844OgAOOCtgDjgrcA44K344Oq44Oz44KwAOOCuADjgrkA44K6AOOCuwDjgrvjg7Pjg4EA44K744Oz44OIAOOCvADjgr0A44K+AOOCvwDjg4AA44OA44O844K5AOODgQDjg4IA44ODAOODhADjg4UA44OGAOODhwDjg4fjgrcA44OIAOODiOODswDjg4kA44OJ44OrAOODigDjg4rjg44A44OLAOODjADjg40A44OOAOODjuODg+ODiADjg48A44OP44Kk44OEAOODkADjg5Djg7zjg6zjg6sA44ORAOODkeODvOOCu+ODs+ODiADjg5Hjg7zjg4QA44OSAOODkwDjg5Pjg6sA44OUAOODlOOCouOCueODiOODqwDjg5Tjgq/jg6sA44OU44KzAOODlQDjg5XjgqHjg6njg4Pjg4kA44OV44Kj44O844OIAOODleODqeODswDjg5YA44OW44OD44K344Kn44OrAOODlwDjg5gA44OY44Kv44K/44O844OrAOODmOODq+ODhADjg5kA44OZ44O844K/AOODmgDjg5rjgr0A44Oa44OL44OSAOODmuODs+OCuQDjg5rjg7zjgrgA44ObAOODm+ODswDjg5vjg7zjg6sA44Ob44O844OzAOODnADjg5zjg6vjg4gA44OdAOODneOCpOODs+ODiADjg53jg7Pjg4kA44OeAOODnuOCpOOCr+ODrQDjg57jgqTjg6sA44Oe44OD44OPAOODnuODq+OCrwDjg57jg7Pjgrfjg6fjg7MA44OfAOODn+OCr+ODreODswDjg5/jg6oA44Of44Oq44OQ44O844OrAOODoADjg6EA44Oh44KsAOODoeOCrOODiOODswDjg6Hjg7zjg4jjg6sA44OiAOODowDjg6QA44Ok44O844OJAOODpOODvOODqwDjg6UA44OmAOODpuOCouODswDjg6cA44OoAOODqQDjg6oA44Oq44OD44OI44OrAOODquODqQDjg6sA44Or44OU44O8AOODq+ODvOODluODqwDjg6wA44Os44OgAOODrOODs+ODiOOCsuODswDjg60A44OvAOODr+ODg+ODiADjg7AA44OxAOODsgDjg7MA44O0AOODtwDjg7gA44O5AOODugDjg7sA44O8AOODvgDjkp4A45K5AOOSuwDjk58A45SVAOObrgDjm7wA456BAOOgrwDjoaIA46G8AOOjhwDjo6MA46ScAOOkugDjqK4A46msAOOrpADjrIgA46yZAOOtiQDjrp0A47CYAOOxjgDjtLMA47aWAOO6rADjurgA47ybAOO/vADkgIgA5ICYAOSAuQDkgYYA5IKWAOSDowDkhK8A5IiCAOSIpwDkiqAA5IyBAOSMtADkjZkA5I+VAOSPmQDkkIsA5JGrAOSUqwDklZ0A5JWhAOSVqwDkl5cA5Je5AOSYtQDkmr4A5JuHAOSmlQDkp6YA5KmuAOSptgDkqrIA5KyzAOSvjgDks44A5LOtAOSzuADktZYA5LiAAOS4gQDkuIMA5LiJAOS4igDkuIsA5LiNAOS4mQDkuKYA5LioAOS4rQDkuLIA5Li2AOS4uADkuLkA5Li9AOS4vwDkuYEA5LmZAOS5nQDkuoIA5LqFAOS6hgDkuowA5LqUAOS6oADkuqQA5LquAOS6ugDku4AA5LuMAOS7pADkvIEA5LyRAOS9oADkvoAA5L6GAOS+iwDkvq4A5L67AOS+vwDlgIIA5YCrAOWBugDlgpkA5YOPAOWDmgDlg6cA5YSqAOWEvwDlhYAA5YWFAOWFjQDlhZQA5YWkAOWFpQDlhacA5YWoAOWFqQDlhasA5YWtAOWFtwDlhoAA5YaCAOWGjQDlhpIA5YaVAOWGlgDlhpcA5YaZAOWGpADlhqsA5YasAOWGtQDlhrcA5YeJAOWHjADlh5wA5YeeAOWHoADlh7UA5YiAAOWIgwDliIcA5YiXAOWInQDliKkA5Yi6AOWIuwDliYYA5YmNAOWJsgDlibcA5YqJAOWKmwDliqMA5YqzAOWKtADli4cA5YuJAOWLkgDli54A5YukAOWLtQDli7kA5Yu6AOWMhQDljIYA5YyVAOWMlwDljJoA5Yy4AOWMuwDljL8A5Y2BAOWNhADljYUA5Y2JAOWNkQDljZQA5Y2aAOWNnADljakA5Y2wAOWNswDljbUA5Y29AOWNvwDljoIA5Y62AOWPgwDlj4gA5Y+KAOWPjADlj58A5Y+jAOWPpQDlj6sA5Y+vAOWPsQDlj7MA5ZCGAOWQiADlkI0A5ZCPAOWQnQDlkLgA5ZC5AOWRggDlkYgA5ZGoAOWSngDlkqIA5ZK9AOWTtgDllJAA5ZWPAOWVkwDllZUA5ZWjAOWWhADllocA5ZaZAOWWnQDllqsA5ZazAOWWtgDll4AA5ZeCAOWXogDlmIYA5ZmRAOWZqADlmbQA5ZuXAOWbmwDlm7kA5ZyWAOWclwDlnJ8A5ZywAOWeiwDln44A5Z+0AOWgjQDloLEA5aCyAOWhgADloZoA5aGeAOWiqADloqwA5aKzAOWjmADlo58A5aOrAOWjrgDlo7AA5aOyAOWjtwDlpIIA5aSGAOWkigDlpJUA5aSaAOWknADlpKIA5aSnAOWkp+atowDlpKkA5aWEAOWliADlpZEA5aWUAOWlogDlpbMA5aeYAOWnrADlqJsA5ainAOWpogDlqaYA5aq1AOWsiADlrKgA5ay+AOWtkADlrZcA5a2mAOWugADlroUA5a6XAOWvgwDlr5gA5a+nAOWvrgDlr7MA5a+4AOWvvwDlsIYA5bCPAOWwogDlsLgA5bC/AOWxoADlsaIA5bGkAOWxpQDlsa4A5bGxAOWyjQDls4AA5bSZAOW1gwDltZAA5bWrAOW1rgDltbwA5bayAOW2ugDlt5sA5behAOW3ogDlt6UA5bemAOW3sQDlt70A5be+AOW4qADluL0A5bmpAOW5sgDlubPmiJAA5bm0AOW5ugDlubwA5bm/AOW6pgDlurAA5bqzAOW6tgDlu4kA5buKAOW7kgDlu5MA5buZAOW7rADlu7QA5bu+AOW8hADlvIsA5byTAOW8ogDlvZAA5b2TAOW9oQDlvaIA5b2pAOW9qwDlvbMA5b6LAOW+jADlvpcA5b6aAOW+qQDlvq0A5b+DAOW/jQDlv5cA5b+1AOW/uQDmgJIA5oCcAOaBtQDmgoEA5oKUAOaDhwDmg5gA5oOhAOaEiADmhYQA5oWIAOaFjADmhY4A5oWgAOaFqADmhboA5oaOAOaGkADmhqQA5oavAOaGsgDmh54A5oeyAOaHtgDmiIAA5oiIAOaIkADmiJsA5oiuAOaItADmiLYA5omLAOaJkwDmiZ0A5oqVAOaKsQDmi4kA5ouPAOaLkwDmi5QA5ou8AOaLvgDmjIcA5oy9AOaNkADmjZUA5o2oAOaNuwDmjoMA5o6gAOaOqQDmj4QA5o+FAOaPpADmkJwA5pCiAOaRkgDmkakA5pG3AOaRvgDmkpoA5pKdAOaThADmlK8A5pS0AOaVjwDmlZYA5pWsAOaVuADmlocA5paXAOaWmQDmlqQA5pawAOaWuQDml4UA5pegAOaXogDml6MA5pelAOaYjuayuwDmmJMA5pigAOaYreWSjADmmYkA5pm0AOaaiADmmpEA5pqcAOaatADmm4YA5puwAOabtADmm7gA5pyAAOaciADmnIkA5pyXAOacmwDmnKEA5pyoAOadjgDmnZMA5p2WAOadngDmnbsA5p6FAOaelwDmn7MA5p+6AOaglwDmoJ8A5qCqAOagquW8j+S8muekvgDmoZIA5qKBAOaihQDmoo4A5qKoAOaklADmpYIA5qajAOanqgDmqIIA5qiTAOaqqADmq5MA5qubAOashADmrKAA5qyhAOatlADmraIA5q2jAOatsgDmrbcA5q25AOaunwDmrq4A5q6zAOauugDmrrsA5q+LAOavjQDmr5QA5q+bAOawjwDmsJQA5rC0AOaxjgDmsacA5rKIAOayvwDms4wA5rONAOazpQDms6gA5rSWAOa0mwDmtJ4A5rS0AOa0vgDmtYEA5rWpAOa1qgDmtbcA5rW4AOa2hQDmt4sA5reaAOa3qgDmt7kA5riaAOa4rwDmua4A5rqAAOa6nADmuroA5ruHAOa7iwDmu5EA5rubAOa8jwDmvJQA5ryiAOa8owDmva4A5r+GAOa/qwDmv74A54CbAOeAngDngLkA54GKAOeBqwDngbAA54G3AOeBvQDngpkA54KtAOeDiADng5kA54ShAOeFhQDnhYkA54WuAOeGnADnh44A54eQAOeIkADniJsA54ioAOeIqgDniKsA54i1AOeItgDniLsA54i/AOeJhwDniZAA54mZAOeJmwDniaIA54m5AOeKgADnipUA54qsAOeKrwDni4AA54u8AOeMqgDnjbUA5426AOeOhADnjocA546JAOeOiwDnjqUA546yAOePngDnkIYA55CJAOeQogDnkYcA55GcAOeRqQDnkbEA55KFAOeSiQDnkpgA55OKAOeTnADnk6YA55SGAOeUmADnlJ8A55SkAOeUqADnlLAA55SyAOeUswDnlLcA55S7AOeUvgDnlZkA55WlAOeVsADnlosA55aSAOeXogDnmJAA55idAOeYnwDnmYIA55mpAOeZtgDnmb0A55quAOeavwDnm4oA55ubAOebowDnm6cA55uuAOebtADnnIEA55yeAOecnwDnnYAA552KAOeeiwDnnqcA55+bAOefogDnn7MA56GOAOehqwDnoowA56KRAOejigDno4wA56O7AOekqgDnpLoA56S8AOekvgDnpYgA56WJAOelkADnpZYA56WdAOelngDnpaUA56W/AOemgQDnpo0A56aOAOemjwDnpq4A56a4AOemvgDnp4oA56eYAOenqwDnqJwA56mAAOepigDnqY8A56m0AOepugDnqoEA56qxAOeriwDnq64A56u5AOesoADnro8A56+AAOevhgDnr4kA57C+AOexoADnsbMA57G7AOeykgDnsr4A57OSAOezlgDns6MA57OnAOezqADns7gA57SAAOe0kADntKIA57SvAOe1ggDntZsA57WjAOe2oADntr4A57eHAOe3tADnuIIA57iJAOe4twDnuYEA57mFAOe8tgDnvL4A572RAOe9sgDnvbkA5726AOe+hQDnvooA576VAOe+mgDnvr0A57+6AOiAgQDogIUA6ICMAOiAkgDogLMA6IGGAOiBoADoga8A6IGwAOiBvgDogb8A6IKJAOiCiwDogq0A6IKyAOiEgwDohL4A6IeYAOiHowDoh6gA6IeqAOiHrQDoh7MA6Ie8AOiIgQDoiIQA6IiMAOiImADoiJsA6IifAOiJrgDoia8A6ImyAOiJuADoibkA6IqLAOiKkQDoip0A6IqxAOiKswDoir0A6IulAOiLpgDojJ0A6IyjAOiMtgDojZIA6I2TAOiNowDojq0A6I69AOiPiQDoj4oA6I+MAOiPnADoj6cA6I+vAOiPsQDokL0A6JGJAOiRlwDok64A6JOxAOiTswDok7wA6JSWAOiVpADol40A6Je6AOiYhgDomJIA6JitAOiYvwDomY0A6JmQAOiZnADomacA6JmpAOiZqwDomogA6JqpAOibogDonI4A6JyoAOidqwDonbkA6J6GAOieugDon6EA6KCBAOignwDooYAA6KGMAOihoADooaMA6KOCAOijjwDoo5cA6KOeAOijoQDoo7gA6KO6AOikkADopYEA6KWkAOilvgDopoYA6KaLAOimlgDop5IA6KejAOiogADoqqAA6KqqAOiqvwDoq4sA6KuSAOirlgDoq60A6Ku4AOirvgDorIEA6Ky5AOitmADoroAA6K6KAOiwtwDosYYA6LGIAOixlQDosbgA6LKdAOiyoQDosqkA6LKrAOizgQDos4IA6LOHAOiziADos5MA6LSIAOi0mwDotaQA6LWwAOi1twDotrMA6La8AOi3iwDot68A6LewAOi6qwDou4oA6LuUAOi8pgDovKoA6Ly4AOi8uwDovaIA6L6bAOi+ngDovrAA6L61AOi+tgDpgKMA6YC4AOmBigDpgakA6YGyAOmBvADpgo8A6YKRAOmClADpg44A6YOeAOmDsQDpg70A6YSRAOmEmwDphYkA6YWqAOmGmQDphrQA6YeGAOmHjADph48A6YeRAOmItADpiLgA6Ym2AOmJvADpi5cA6YuYAOmMhADpjYoA6Y+5AOmQlQDplbcA6ZaAAOmWiwDplq0A6Za3AOmYnADpmK4A6ZmLAOmZjQDpmbUA6Zm4AOmZvADpmoYA6ZqjAOmatgDpmrcA6Zq4AOmauQDpm4MA6ZuiAOmbowDpm6gA6Zu2AOmbtwDpnKMA6ZyyAOmdiADpnZEA6Z2WAOmdngDpnaIA6Z2pAOmfiwDpn5sA6Z+gAOmfrQDpn7MA6Z+/AOmggQDpoIUA6aCLAOmgmADpoKkA6aC7AOmhngDpoqgA6aObAOmjnwDpo6IA6aOvAOmjvADppKgA6aSpAOmmlgDpppkA6aanAOmmrADpp4IA6aexAOmnvgDpqaoA6aqoAOmrmADpq58A6aySAOmspQDprK8A6ayyAOmsvADprZoA6a2vAOmxgADpsZcA6bOlAOmzvQDptacA6ba0AOm3ugDpuJ4A6bm1AOm5vwDpupcA6bqfAOm6pQDpursA6buDAOm7jQDpu44A6buRAOm7uQDpu70A6bu+AOm8hQDpvI4A6byPAOm8kwDpvJYA6bygAOm8uwDpvYMA6b2KAOm9kgDpvo0A6b6OAOm+nADpvp8A6b6gAOqcpwDqna8A6qy3AOqtkgDqsIAA6rCBAOqwggDqsIMA6rCEAOqwhQDqsIYA6rCHAOqwiADqsIkA6rCKAOqwiwDqsIwA6rCNAOqwjgDqsI8A6rCQAOqwkQDqsJIA6rCTAOqwlADqsJUA6rCWAOqwlwDqsJgA6rCZAOqwmgDqsJsA6rCcAOqwnQDqsJ4A6rCfAOqwoADqsKEA6rCiAOqwowDqsKQA6rClAOqwpgDqsKcA6rCoAOqwqQDqsKoA6rCrAOqwrADqsK0A6rCuAOqwrwDqsLAA6rCxAOqwsgDqsLMA6rC0AOqwtQDqsLYA6rC3AOqwuADqsLkA6rC6AOqwuwDqsLwA6rC9AOqwvgDqsL8A6rGAAOqxgQDqsYIA6rGDAOqxhADqsYUA6rGGAOqxhwDqsYgA6rGJAOqxigDqsYsA6rGMAOqxjQDqsY4A6rGPAOqxkADqsZEA6rGSAOqxkwDqsZQA6rGVAOqxlgDqsZcA6rGYAOqxmQDqsZoA6rGbAOqxnADqsZ0A6rGeAOqxnwDqsaAA6rGhAOqxogDqsaMA6rGkAOqxpQDqsaYA6rGnAOqxqADqsakA6rGqAOqxqwDqsawA6rGtAOqxrgDqsa8A6rGwAOqxsQDqsbIA6rGzAOqxtADqsbUA6rG2AOqxtwDqsbgA6rG5AOqxugDqsbsA6rG8AOqxvQDqsb4A6rG/AOqygADqsoEA6rKCAOqygwDqsoQA6rKFAOqyhgDqsocA6rKIAOqyiQDqsooA6rKLAOqyjADqso0A6rKOAOqyjwDqspAA6rKRAOqykgDqspMA6rKUAOqylQDqspYA6rKXAOqymADqspkA6rKaAOqymwDqspwA6rKdAOqyngDqsp8A6rKgAOqyoQDqsqIA6rKjAOqypADqsqUA6rKmAOqypwDqsqgA6rKpAOqyqgDqsqsA6rKsAOqyrQDqsq4A6rKvAOqysADqsrEA6rKyAOqyswDqsrQA6rK1AOqytgDqsrcA6rK4AOqyuQDqsroA6rK7AOqyvADqsr0A6rK+AOqyvwDqs4AA6rOBAOqzggDqs4MA6rOEAOqzhQDqs4YA6rOHAOqziADqs4kA6rOKAOqziwDqs4wA6rONAOqzjgDqs48A6rOQAOqzkQDqs5IA6rOTAOqzlADqs5UA6rOWAOqzlwDqs5gA6rOZAOqzmgDqs5sA6rOcAOqznQDqs54A6rOfAOqzoADqs6EA6rOiAOqzowDqs6QA6rOlAOqzpgDqs6cA6rOoAOqzqQDqs6oA6rOrAOqzrADqs60A6rOuAOqzrwDqs7AA6rOxAOqzsgDqs7MA6rO0AOqztQDqs7YA6rO3AOqzuADqs7kA6rO6AOqzuwDqs7wA6rO9AOqzvgDqs78A6rSAAOq0gQDqtIIA6rSDAOq0hADqtIUA6rSGAOq0hwDqtIgA6rSJAOq0igDqtIsA6rSMAOq0jQDqtI4A6rSPAOq0kADqtJEA6rSSAOq0kwDqtJQA6rSVAOq0lgDqtJcA6rSYAOq0mQDqtJoA6rSbAOq0nADqtJ0A6rSeAOq0nwDqtKAA6rShAOq0ogDqtKMA6rSkAOq0pQDqtKYA6rSnAOq0qADqtKkA6rSqAOq0qwDqtKwA6rStAOq0rgDqtK8A6rSwAOq0sQDqtLIA6rSzAOq0tADqtLUA6rS2AOq0twDqtLgA6rS5AOq0ugDqtLsA6rS8AOq0vQDqtL4A6rS/AOq1gADqtYEA6rWCAOq1gwDqtYQA6rWFAOq1hgDqtYcA6rWIAOq1iQDqtYoA6rWLAOq1jADqtY0A6rWOAOq1jwDqtZAA6rWRAOq1kgDqtZMA6rWUAOq1lQDqtZYA6rWXAOq1mADqtZkA6rWaAOq1mwDqtZwA6rWdAOq1ngDqtZ8A6rWgAOq1oQDqtaIA6rWjAOq1pADqtaUA6rWmAOq1pwDqtagA6rWpAOq1qgDqtasA6rWsAOq1rQDqta4A6rWvAOq1sADqtbEA6rWyAOq1swDqtbQA6rW1AOq1tgDqtbcA6rW4AOq1uQDqtboA6rW7AOq1vADqtb0A6rW+AOq1vwDqtoAA6raBAOq2ggDqtoMA6raEAOq2hQDqtoYA6raHAOq2iADqtokA6raKAOq2iwDqtowA6raNAOq2jgDqto8A6raQAOq2kQDqtpIA6raTAOq2lADqtpUA6raWAOq2lwDqtpgA6raZAOq2mgDqtpsA6racAOq2nQDqtp4A6rafAOq2oADqtqEA6raiAOq2owDqtqQA6ralAOq2pgDqtqcA6raoAOq2qQDqtqoA6rarAOq2rADqtq0A6rauAOq2rwDqtrAA6raxAOq2sgDqtrMA6ra0AOq2tQDqtrYA6ra3AOq2uADqtrkA6ra6AOq2uwDqtrwA6ra9AOq2vgDqtr8A6reAAOq3gQDqt4IA6reDAOq3hADqt4UA6reGAOq3hwDqt4gA6reJAOq3igDqt4sA6reMAOq3jQDqt44A6rePAOq3kADqt5EA6reSAOq3kwDqt5QA6reVAOq3lgDqt5cA6reYAOq3mQDqt5oA6rebAOq3nADqt50A6reeAOq3nwDqt6AA6rehAOq3ogDqt6MA6rekAOq3pQDqt6YA6renAOq3qADqt6kA6reqAOq3qwDqt6wA6retAOq3rgDqt68A6rewAOq3sQDqt7IA6rezAOq3tADqt7UA6re2AOq3twDqt7gA6re5AOq3ugDqt7sA6re8AOq3vQDqt74A6re/AOq4gADquIEA6riCAOq4gwDquIQA6riFAOq4hgDquIcA6riIAOq4iQDquIoA6riLAOq4jADquI0A6riOAOq4jwDquJAA6riRAOq4kgDquJMA6riUAOq4lQDquJYA6riXAOq4mADquJkA6riaAOq4mwDquJwA6ridAOq4ngDquJ8A6rigAOq4oQDquKIA6rijAOq4pADquKUA6rimAOq4pwDquKgA6ripAOq4qgDquKsA6risAOq4rQDquK4A6rivAOq4sADquLEA6riyAOq4swDquLQA6ri1AOq4tgDquLcA6ri4AOq4uQDquLoA6ri7AOq4vADquL0A6ri+AOq4vwDquYAA6rmBAOq5ggDquYMA6rmEAOq5hQDquYYA6rmHAOq5iADquYkA6rmKAOq5iwDquYwA6rmNAOq5jgDquY8A6rmQAOq5kQDquZIA6rmTAOq5lADquZUA6rmWAOq5lwDquZgA6rmZAOq5mgDquZsA6rmcAOq5nQDquZ4A6rmfAOq5oADquaEA6rmiAOq5owDquaQA6rmlAOq5pgDquacA6rmoAOq5qQDquaoA6rmrAOq5rADqua0A6rmuAOq5rwDqubAA6rmxAOq5sgDqubMA6rm0AOq5tQDqubYA6rm3AOq5uADqubkA6rm6AOq5uwDqubwA6rm9AOq5vgDqub8A6rqAAOq6gQDquoIA6rqDAOq6hADquoUA6rqGAOq6hwDquogA6rqJAOq6igDquosA6rqMAOq6jQDquo4A6rqPAOq6kADqupEA6rqSAOq6kwDqupQA6rqVAOq6lgDqupcA6rqYAOq6mQDqupoA6rqbAOq6nADqup0A6rqeAOq6nwDquqAA6rqhAOq6ogDquqMA6rqkAOq6pQDquqYA6rqnAOq6qADquqkA6rqqAOq6qwDquqwA6rqtAOq6rgDquq8A6rqwAOq6sQDqurIA6rqzAOq6tADqurUA6rq2AOq6twDqurgA6rq5AOq6ugDqursA6rq8AOq6vQDqur4A6rq/AOq7gADqu4EA6ruCAOq7gwDqu4QA6ruFAOq7hgDqu4cA6ruIAOq7iQDqu4oA6ruLAOq7jADqu40A6ruOAOq7jwDqu5AA6ruRAOq7kgDqu5MA6ruUAOq7lQDqu5YA6ruXAOq7mADqu5kA6ruaAOq7mwDqu5wA6rudAOq7ngDqu58A6rugAOq7oQDqu6IA6rujAOq7pADqu6UA6rumAOq7pwDqu6gA6rupAOq7qgDqu6sA6rusAOq7rQDqu64A6ruvAOq7sADqu7EA6ruyAOq7swDqu7QA6ru1AOq7tgDqu7cA6ru4AOq7uQDqu7oA6ru7AOq7vADqu70A6ru+AOq7vwDqvIAA6ryBAOq8ggDqvIMA6ryEAOq8hQDqvIYA6ryHAOq8iADqvIkA6ryKAOq8iwDqvIwA6ryNAOq8jgDqvI8A6ryQAOq8kQDqvJIA6ryTAOq8lADqvJUA6ryWAOq8lwDqvJgA6ryZAOq8mgDqvJsA6rycAOq8nQDqvJ4A6ryfAOq8oADqvKEA6ryiAOq8owDqvKQA6rylAOq8pgDqvKcA6ryoAOq8qQDqvKoA6ryrAOq8rADqvK0A6ryuAOq8rwDqvLAA6ryxAOq8sgDqvLMA6ry0AOq8tQDqvLYA6ry3AOq8uADqvLkA6ry6AOq8uwDqvLwA6ry9AOq8vgDqvL8A6r2AAOq9gQDqvYIA6r2DAOq9hADqvYUA6r2GAOq9hwDqvYgA6r2JAOq9igDqvYsA6r2MAOq9jQDqvY4A6r2PAOq9kADqvZEA6r2SAOq9kwDqvZQA6r2VAOq9lgDqvZcA6r2YAOq9mQDqvZoA6r2bAOq9nADqvZ0A6r2eAOq9nwDqvaAA6r2hAOq9ogDqvaMA6r2kAOq9pQDqvaYA6r2nAOq9qADqvakA6r2qAOq9qwDqvawA6r2tAOq9rgDqva8A6r2wAOq9sQDqvbIA6r2zAOq9tADqvbUA6r22AOq9twDqvbgA6r25AOq9ugDqvbsA6r28AOq9vQDqvb4A6r2/AOq+gADqvoEA6r6CAOq+gwDqvoQA6r6FAOq+hgDqvocA6r6IAOq+iQDqvooA6r6LAOq+jADqvo0A6r6OAOq+jwDqvpAA6r6RAOq+kgDqvpMA6r6UAOq+lQDqvpYA6r6XAOq+mADqvpkA6r6aAOq+mwDqvpwA6r6dAOq+ngDqvp8A6r6gAOq+oQDqvqIA6r6jAOq+pADqvqUA6r6mAOq+pwDqvqgA6r6pAOq+qgDqvqsA6r6sAOq+rQDqvq4A6r6vAOq+sADqvrEA6r6yAOq+swDqvrQA6r61AOq+tgDqvrcA6r64AOq+uQDqvroA6r67AOq+vADqvr0A6r6+AOq+vwDqv4AA6r+BAOq/ggDqv4MA6r+EAOq/hQDqv4YA6r+HAOq/iADqv4kA6r+KAOq/iwDqv4wA6r+NAOq/jgDqv48A6r+QAOq/kQDqv5IA6r+TAOq/lADqv5UA6r+WAOq/lwDqv5gA6r+ZAOq/mgDqv5sA6r+cAOq/nQDqv54A6r+fAOq/oADqv6EA6r+iAOq/owDqv6QA6r+lAOq/pgDqv6cA6r+oAOq/qQDqv6oA6r+rAOq/rADqv60A6r+uAOq/rwDqv7AA6r+xAOq/sgDqv7MA6r+0AOq/tQDqv7YA6r+3AOq/uADqv7kA6r+6AOq/uwDqv7wA6r+9AOq/vgDqv78A64CAAOuAgQDrgIIA64CDAOuAhADrgIUA64CGAOuAhwDrgIgA64CJAOuAigDrgIsA64CMAOuAjQDrgI4A64CPAOuAkADrgJEA64CSAOuAkwDrgJQA64CVAOuAlgDrgJcA64CYAOuAmQDrgJoA64CbAOuAnADrgJ0A64CeAOuAnwDrgKAA64ChAOuAogDrgKMA64CkAOuApQDrgKYA64CnAOuAqADrgKkA64CqAOuAqwDrgKwA64CtAOuArgDrgK8A64CwAOuAsQDrgLIA64CzAOuAtADrgLUA64C2AOuAtwDrgLgA64C5AOuAugDrgLsA64C8AOuAvQDrgL4A64C/AOuBgADrgYEA64GCAOuBgwDrgYQA64GFAOuBhgDrgYcA64GIAOuBiQDrgYoA64GLAOuBjADrgY0A64GOAOuBjwDrgZAA64GRAOuBkgDrgZMA64GUAOuBlQDrgZYA64GXAOuBmADrgZkA64GaAOuBmwDrgZwA64GdAOuBngDrgZ8A64GgAOuBoQDrgaIA64GjAOuBpADrgaUA64GmAOuBpwDrgagA64GpAOuBqgDrgasA64GsAOuBrQDrga4A64GvAOuBsADrgbEA64GyAOuBswDrgbQA64G1AOuBtgDrgbcA64G4AOuBuQDrgboA64G7AOuBvADrgb0A64G+AOuBvwDrgoAA64KBAOuCggDrgoMA64KEAOuChQDrgoYA64KHAOuCiADrgokA64KKAOuCiwDrgowA64KNAOuCjgDrgo8A64KQAOuCkQDrgpIA64KTAOuClADrgpUA64KWAOuClwDrgpgA64KZAOuCmgDrgpsA64KcAOuCnQDrgp4A64KfAOuCoADrgqEA64KiAOuCowDrgqQA64KlAOuCpgDrgqcA64KoAOuCqQDrgqoA64KrAOuCrADrgq0A64KuAOuCrwDrgrAA64KxAOuCsgDrgrMA64K0AOuCtQDrgrYA64K3AOuCuADrgrkA64K6AOuCuwDrgrwA64K9AOuCvgDrgr8A64OAAOuDgQDrg4IA64ODAOuDhADrg4UA64OGAOuDhwDrg4gA64OJAOuDigDrg4sA64OMAOuDjQDrg44A64OPAOuDkADrg5EA64OSAOuDkwDrg5QA64OVAOuDlgDrg5cA64OYAOuDmQDrg5oA64ObAOuDnADrg50A64OeAOuDnwDrg6AA64OhAOuDogDrg6MA64OkAOuDpQDrg6YA64OnAOuDqADrg6kA64OqAOuDqwDrg6wA64OtAOuDrgDrg68A64OwAOuDsQDrg7IA64OzAOuDtADrg7UA64O2AOuDtwDrg7gA64O5AOuDugDrg7sA64O8AOuDvQDrg74A64O/AOuEgADrhIEA64SCAOuEgwDrhIQA64SFAOuEhgDrhIcA64SIAOuEiQDrhIoA64SLAOuEjADrhI0A64SOAOuEjwDrhJAA64SRAOuEkgDrhJMA64SUAOuElQDrhJYA64SXAOuEmADrhJkA64SaAOuEmwDrhJwA64SdAOuEngDrhJ8A64SgAOuEoQDrhKIA64SjAOuEpADrhKUA64SmAOuEpwDrhKgA64SpAOuEqgDrhKsA64SsAOuErQDrhK4A64SvAOuEsADrhLEA64SyAOuEswDrhLQA64S1AOuEtgDrhLcA64S4AOuEuQDrhLoA64S7AOuEvADrhL0A64S+AOuEvwDrhYAA64WBAOuFggDrhYMA64WEAOuFhQDrhYYA64WHAOuFiADrhYkA64WKAOuFiwDrhYwA64WNAOuFjgDrhY8A64WQAOuFkQDrhZIA64WTAOuFlADrhZUA64WWAOuFlwDrhZgA64WZAOuFmgDrhZsA64WcAOuFnQDrhZ4A64WfAOuFoADrhaEA64WiAOuFowDrhaQA64WlAOuFpgDrhacA64WoAOuFqQDrhaoA64WrAOuFrADrha0A64WuAOuFrwDrhbAA64WxAOuFsgDrhbMA64W0AOuFtQDrhbYA64W3AOuFuADrhbkA64W6AOuFuwDrhbwA64W9AOuFvgDrhb8A64aAAOuGgQDrhoIA64aDAOuGhADrhoUA64aGAOuGhwDrhogA64aJAOuGigDrhosA64aMAOuGjQDrho4A64aPAOuGkADrhpEA64aSAOuGkwDrhpQA64aVAOuGlgDrhpcA64aYAOuGmQDrhpoA64abAOuGnADrhp0A64aeAOuGnwDrhqAA64ahAOuGogDrhqMA64akAOuGpQDrhqYA64anAOuGqADrhqkA64aqAOuGqwDrhqwA64atAOuGrgDrhq8A64awAOuGsQDrhrIA64azAOuGtADrhrUA64a2AOuGtwDrhrgA64a5AOuGugDrhrsA64a8AOuGvQDrhr4A64a/AOuHgADrh4EA64eCAOuHgwDrh4QA64eFAOuHhgDrh4cA64eIAOuHiQDrh4oA64eLAOuHjADrh40A64eOAOuHjwDrh5AA64eRAOuHkgDrh5MA64eUAOuHlQDrh5YA64eXAOuHmADrh5kA64eaAOuHmwDrh5wA64edAOuHngDrh58A64egAOuHoQDrh6IA64ejAOuHpADrh6UA64emAOuHpwDrh6gA64epAOuHqgDrh6sA64esAOuHrQDrh64A64evAOuHsADrh7EA64eyAOuHswDrh7QA64e1AOuHtgDrh7cA64e4AOuHuQDrh7oA64e7AOuHvADrh70A64e+AOuHvwDriIAA64iBAOuIggDriIMA64iEAOuIhQDriIYA64iHAOuIiADriIkA64iKAOuIiwDriIwA64iNAOuIjgDriI8A64iQAOuIkQDriJIA64iTAOuIlADriJUA64iWAOuIlwDriJgA64iZAOuImgDriJsA64icAOuInQDriJ4A64ifAOuIoADriKEA64iiAOuIowDriKQA64ilAOuIpgDriKcA64ioAOuIqQDriKoA64irAOuIrADriK0A64iuAOuIrwDriLAA64ixAOuIsgDriLMA64i0AOuItQDriLYA64i3AOuIuADriLkA64i6AOuIuwDriLwA64i9AOuIvgDriL8A64mAAOuJgQDriYIA64mDAOuJhADriYUA64mGAOuJhwDriYgA64mJAOuJigDriYsA64mMAOuJjQDriY4A64mPAOuJkADriZEA64mSAOuJkwDriZQA64mVAOuJlgDriZcA64mYAOuJmQDriZoA64mbAOuJnADriZ0A64meAOuJnwDriaAA64mhAOuJogDriaMA64mkAOuJpQDriaYA64mnAOuJqADriakA64mqAOuJqwDriawA64mtAOuJrgDria8A64mwAOuJsQDribIA64mzAOuJtADribUA64m2AOuJtwDribgA64m5AOuJugDribsA64m8AOuJvQDrib4A64m/AOuKgADrioEA64qCAOuKgwDrioQA64qFAOuKhgDriocA64qIAOuKiQDriooA64qLAOuKjADrio0A64qOAOuKjwDripAA64qRAOuKkgDripMA64qUAOuKlQDripYA64qXAOuKmADripkA64qaAOuKmwDripwA64qdAOuKngDrip8A64qgAOuKoQDriqIA64qjAOuKpADriqUA64qmAOuKpwDriqgA64qpAOuKqgDriqsA64qsAOuKrQDriq4A64qvAOuKsADrirEA64qyAOuKswDrirQA64q1AOuKtgDrircA64q4AOuKuQDriroA64q7AOuKvADrir0A64q+AOuKvwDri4AA64uBAOuLggDri4MA64uEAOuLhQDri4YA64uHAOuLiADri4kA64uKAOuLiwDri4wA64uNAOuLjgDri48A64uQAOuLkQDri5IA64uTAOuLlADri5UA64uWAOuLlwDri5gA64uZAOuLmgDri5sA64ucAOuLnQDri54A64ufAOuLoADri6EA64uiAOuLowDri6QA64ulAOuLpgDri6cA64uoAOuLqQDri6oA64urAOuLrADri60A64uuAOuLrwDri7AA64uxAOuLsgDri7MA64u0AOuLtQDri7YA64u3AOuLuADri7kA64u6AOuLuwDri7wA64u9AOuLvgDri78A64yAAOuMgQDrjIIA64yDAOuMhADrjIUA64yGAOuMhwDrjIgA64yJAOuMigDrjIsA64yMAOuMjQDrjI4A64yPAOuMkADrjJEA64ySAOuMkwDrjJQA64yVAOuMlgDrjJcA64yYAOuMmQDrjJoA64ybAOuMnADrjJ0A64yeAOuMnwDrjKAA64yhAOuMogDrjKMA64ykAOuMpQDrjKYA64ynAOuMqADrjKkA64yqAOuMqwDrjKwA64ytAOuMrgDrjK8A64ywAOuMsQDrjLIA64yzAOuMtADrjLUA64y2AOuMtwDrjLgA64y5AOuMugDrjLsA64y8AOuMvQDrjL4A64y/AOuNgADrjYEA642CAOuNgwDrjYQA642FAOuNhgDrjYcA642IAOuNiQDrjYoA642LAOuNjADrjY0A642OAOuNjwDrjZAA642RAOuNkgDrjZMA642UAOuNlQDrjZYA642XAOuNmADrjZkA642aAOuNmwDrjZwA642dAOuNngDrjZ8A642gAOuNoQDrjaIA642jAOuNpADrjaUA642mAOuNpwDrjagA642pAOuNqgDrjasA642sAOuNrQDrja4A642vAOuNsADrjbEA642yAOuNswDrjbQA6421AOuNtgDrjbcA6424AOuNuQDrjboA6427AOuNvADrjb0A642+AOuNvwDrjoAA646BAOuOggDrjoMA646EAOuOhQDrjoYA646HAOuOiADrjokA646KAOuOiwDrjowA646NAOuOjgDrjo8A646QAOuOkQDrjpIA646TAOuOlADrjpUA646WAOuOlwDrjpgA646ZAOuOmgDrjpsA646cAOuOnQDrjp4A646fAOuOoADrjqEA646iAOuOowDrjqQA646lAOuOpgDrjqcA646oAOuOqQDrjqoA646rAOuOrADrjq0A646uAOuOrwDrjrAA646xAOuOsgDrjrMA6460AOuOtQDrjrYA6463AOuOuADrjrkA6466AOuOuwDrjrwA6469AOuOvgDrjr8A64+AAOuPgQDrj4IA64+DAOuPhADrj4UA64+GAOuPhwDrj4gA64+JAOuPigDrj4sA64+MAOuPjQDrj44A64+PAOuPkADrj5EA64+SAOuPkwDrj5QA64+VAOuPlgDrj5cA64+YAOuPmQDrj5oA64+bAOuPnADrj50A64+eAOuPnwDrj6AA64+hAOuPogDrj6MA64+kAOuPpQDrj6YA64+nAOuPqADrj6kA64+qAOuPqwDrj6wA64+tAOuPrgDrj68A64+wAOuPsQDrj7IA64+zAOuPtADrj7UA64+2AOuPtwDrj7gA64+5AOuPugDrj7sA64+8AOuPvQDrj74A64+/AOuQgADrkIEA65CCAOuQgwDrkIQA65CFAOuQhgDrkIcA65CIAOuQiQDrkIoA65CLAOuQjADrkI0A65COAOuQjwDrkJAA65CRAOuQkgDrkJMA65CUAOuQlQDrkJYA65CXAOuQmADrkJkA65CaAOuQmwDrkJwA65CdAOuQngDrkJ8A65CgAOuQoQDrkKIA65CjAOuQpADrkKUA65CmAOuQpwDrkKgA65CpAOuQqgDrkKsA65CsAOuQrQDrkK4A65CvAOuQsADrkLEA65CyAOuQswDrkLQA65C1AOuQtgDrkLcA65C4AOuQuQDrkLoA65C7AOuQvADrkL0A65C+AOuQvwDrkYAA65GBAOuRggDrkYMA65GEAOuRhQDrkYYA65GHAOuRiADrkYkA65GKAOuRiwDrkYwA65GNAOuRjgDrkY8A65GQAOuRkQDrkZIA65GTAOuRlADrkZUA65GWAOuRlwDrkZgA65GZAOuRmgDrkZsA65GcAOuRnQDrkZ4A65GfAOuRoADrkaEA65GiAOuRowDrkaQA65GlAOuRpgDrkacA65GoAOuRqQDrkaoA65GrAOuRrADrka0A65GuAOuRrwDrkbAA65GxAOuRsgDrkbMA65G0AOuRtQDrkbYA65G3AOuRuADrkbkA65G6AOuRuwDrkbwA65G9AOuRvgDrkb8A65KAAOuSgQDrkoIA65KDAOuShADrkoUA65KGAOuShwDrkogA65KJAOuSigDrkosA65KMAOuSjQDrko4A65KPAOuSkADrkpEA65KSAOuSkwDrkpQA65KVAOuSlgDrkpcA65KYAOuSmQDrkpoA65KbAOuSnADrkp0A65KeAOuSnwDrkqAA65KhAOuSogDrkqMA65KkAOuSpQDrkqYA65KnAOuSqADrkqkA65KqAOuSqwDrkqwA65KtAOuSrgDrkq8A65KwAOuSsQDrkrIA65KzAOuStADrkrUA65K2AOuStwDrkrgA65K5AOuSugDrkrsA65K8AOuSvQDrkr4A65K/AOuTgADrk4EA65OCAOuTgwDrk4QA65OFAOuThgDrk4cA65OIAOuTiQDrk4oA65OLAOuTjADrk40A65OOAOuTjwDrk5AA65ORAOuTkgDrk5MA65OUAOuTlQDrk5YA65OXAOuTmADrk5kA65OaAOuTmwDrk5wA65OdAOuTngDrk58A65OgAOuToQDrk6IA65OjAOuTpADrk6UA65OmAOuTpwDrk6gA65OpAOuTqgDrk6sA65OsAOuTrQDrk64A65OvAOuTsADrk7EA65OyAOuTswDrk7QA65O1AOuTtgDrk7cA65O4AOuTuQDrk7oA65O7AOuTvADrk70A65O+AOuTvwDrlIAA65SBAOuUggDrlIMA65SEAOuUhQDrlIYA65SHAOuUiADrlIkA65SKAOuUiwDrlIwA65SNAOuUjgDrlI8A65SQAOuUkQDrlJIA65STAOuUlADrlJUA65SWAOuUlwDrlJgA65SZAOuUmgDrlJsA65ScAOuUnQDrlJ4A65SfAOuUoADrlKEA65SiAOuUowDrlKQA65SlAOuUpgDrlKcA65SoAOuUqQDrlKoA65SrAOuUrADrlK0A65SuAOuUrwDrlLAA65SxAOuUsgDrlLMA65S0AOuUtQDrlLYA65S3AOuUuADrlLkA65S6AOuUuwDrlLwA65S9AOuUvgDrlL8A65WAAOuVgQDrlYIA65WDAOuVhADrlYUA65WGAOuVhwDrlYgA65WJAOuVigDrlYsA65WMAOuVjQDrlY4A65WPAOuVkADrlZEA65WSAOuVkwDrlZQA65WVAOuVlgDrlZcA65WYAOuVmQDrlZoA65WbAOuVnADrlZ0A65WeAOuVnwDrlaAA65WhAOuVogDrlaMA65WkAOuVpQDrlaYA65WnAOuVqADrlakA65WqAOuVqwDrlawA65WtAOuVrgDrla8A65WwAOuVsQDrlbIA65WzAOuVtADrlbUA65W2AOuVtwDrlbgA65W5AOuVugDrlbsA65W8AOuVvQDrlb4A65W/AOuWgADrloEA65aCAOuWgwDrloQA65aFAOuWhgDrlocA65aIAOuWiQDrlooA65aLAOuWjADrlo0A65aOAOuWjwDrlpAA65aRAOuWkgDrlpMA65aUAOuWlQDrlpYA65aXAOuWmADrlpkA65aaAOuWmwDrlpwA65adAOuWngDrlp8A65agAOuWoQDrlqIA65ajAOuWpADrlqUA65amAOuWpwDrlqgA65apAOuWqgDrlqsA65asAOuWrQDrlq4A65avAOuWsADrlrEA65ayAOuWswDrlrQA65a1AOuWtgDrlrcA65a4AOuWuQDrlroA65a7AOuWvADrlr0A65a+AOuWvwDrl4AA65eBAOuXggDrl4MA65eEAOuXhQDrl4YA65eHAOuXiADrl4kA65eKAOuXiwDrl4wA65eNAOuXjgDrl48A65eQAOuXkQDrl5IA65eTAOuXlADrl5UA65eWAOuXlwDrl5gA65eZAOuXmgDrl5sA65ecAOuXnQDrl54A65efAOuXoADrl6EA65eiAOuXowDrl6QA65elAOuXpgDrl6cA65eoAOuXqQDrl6oA65erAOuXrADrl60A65euAOuXrwDrl7AA65exAOuXsgDrl7MA65e0AOuXtQDrl7YA65e3AOuXuADrl7kA65e6AOuXuwDrl7wA65e9AOuXvgDrl78A65iAAOuYgQDrmIIA65iDAOuYhADrmIUA65iGAOuYhwDrmIgA65iJAOuYigDrmIsA65iMAOuYjQDrmI4A65iPAOuYkADrmJEA65iSAOuYkwDrmJQA65iVAOuYlgDrmJcA65iYAOuYmQDrmJoA65ibAOuYnADrmJ0A65ieAOuYnwDrmKAA65ihAOuYogDrmKMA65ikAOuYpQDrmKYA65inAOuYqADrmKkA65iqAOuYqwDrmKwA65itAOuYrgDrmK8A65iwAOuYsQDrmLIA65izAOuYtADrmLUA65i2AOuYtwDrmLgA65i5AOuYugDrmLsA65i8AOuYvQDrmL4A65i/AOuZgADrmYEA65mCAOuZgwDrmYQA65mFAOuZhgDrmYcA65mIAOuZiQDrmYoA65mLAOuZjADrmY0A65mOAOuZjwDrmZAA65mRAOuZkgDrmZMA65mUAOuZlQDrmZYA65mXAOuZmADrmZkA65maAOuZmwDrmZwA65mdAOuZngDrmZ8A65mgAOuZoQDrmaIA65mjAOuZpADrmaUA65mmAOuZpwDrmagA65mpAOuZqgDrmasA65msAOuZrQDrma4A65mvAOuZsADrmbEA65myAOuZswDrmbQA65m1AOuZtgDrmbcA65m4AOuZuQDrmboA65m7AOuZvADrmb0A65m+AOuZvwDrmoAA65qBAOuaggDrmoMA65qEAOuahQDrmoYA65qHAOuaiADrmokA65qKAOuaiwDrmowA65qNAOuajgDrmo8A65qQAOuakQDrmpIA65qTAOualADrmpUA65qWAOualwDrmpgA65qZAOuamgDrmpsA65qcAOuanQDrmp4A65qfAOuaoADrmqEA65qiAOuaowDrmqQA65qlAOuapgDrmqcA65qoAOuaqQDrmqoA65qrAOuarADrmq0A65quAOuarwDrmrAA65qxAOuasgDrmrMA65q0AOuatQDrmrYA65q3AOuauADrmrkA65q6AOuauwDrmrwA65q9AOuavgDrmr8A65uAAOubgQDrm4IA65uDAOubhADrm4UA65uGAOubhwDrm4gA65uJAOubigDrm4sA65uMAOubjQDrm44A65uPAOubkADrm5EA65uSAOubkwDrm5QA65uVAOublgDrm5cA65uYAOubmQDrm5oA65ubAOubnADrm50A65ueAOubnwDrm6AA65uhAOubogDrm6MA65ukAOubpQDrm6YA65unAOubqADrm6kA65uqAOubqwDrm6wA65utAOubrgDrm68A65uwAOubsQDrm7IA65uzAOubtADrm7UA65u2AOubtwDrm7gA65u5AOubugDrm7sA65u8AOubvQDrm74A65u/AOucgADrnIEA65yCAOucgwDrnIQA65yFAOuchgDrnIcA65yIAOuciQDrnIoA65yLAOucjADrnI0A65yOAOucjwDrnJAA65yRAOuckgDrnJMA65yUAOuclQDrnJYA65yXAOucmADrnJkA65yaAOucmwDrnJwA65ydAOucngDrnJ8A65ygAOucoQDrnKIA65yjAOucpADrnKUA65ymAOucpwDrnKgA65ypAOucqgDrnKsA65ysAOucrQDrnK4A65yvAOucsADrnLEA65yyAOucswDrnLQA65y1AOuctgDrnLcA65y4AOucuQDrnLoA65y7AOucvADrnL0A65y+AOucvwDrnYAA652BAOudggDrnYMA652EAOudhQDrnYYA652HAOudiADrnYkA652KAOudiwDrnYwA652NAOudjgDrnY8A652QAOudkQDrnZIA652TAOudlADrnZUA652WAOudlwDrnZgA652ZAOudmgDrnZsA652cAOudnQDrnZ4A652fAOudoADrnaEA652iAOudowDrnaQA652lAOudpgDrnacA652oAOudqQDrnaoA652rAOudrADrna0A652uAOudrwDrnbAA652xAOudsgDrnbMA6520AOudtQDrnbYA6523AOuduADrnbkA6526AOuduwDrnbwA6529AOudvgDrnb8A656AAOuegQDrnoIA656DAOuehADrnoUA656GAOuehwDrnogA656JAOueigDrnosA656MAOuejQDrno4A656PAOuekADrnpEA656SAOuekwDrnpQA656VAOuelgDrnpcA656YAOuemQDrnpoA656bAOuenADrnp0A656eAOuenwDrnqAA656hAOueogDrnqMA656kAOuepQDrnqYA656nAOueqADrnqkA656qAOueqwDrnqwA656tAOuergDrnq8A656wAOuesQDrnrIA656zAOuetADrnrUA6562AOuetwDrnrgA6565AOueugDrnrsA6568AOuevQDrnr4A656/AOufgADrn4EA65+CAOufgwDrn4QA65+FAOufhgDrn4cA65+IAOufiQDrn4oA65+LAOufjADrn40A65+OAOufjwDrn5AA65+RAOufkgDrn5MA65+UAOuflQDrn5YA65+XAOufmADrn5kA65+aAOufmwDrn5wA65+dAOufngDrn58A65+gAOufoQDrn6IA65+jAOufpADrn6UA65+mAOufpwDrn6gA65+pAOufqgDrn6sA65+sAOufrQDrn64A65+vAOufsADrn7EA65+yAOufswDrn7QA65+1AOuftgDrn7cA65+4AOufuQDrn7oA65+7AOufvADrn70A65++AOufvwDroIAA66CBAOugggDroIMA66CEAOughQDroIYA66CHAOugiADroIkA66CKAOugiwDroIwA66CNAOugjgDroI8A66CQAOugkQDroJIA66CTAOuglADroJUA66CWAOuglwDroJgA66CZAOugmgDroJsA66CcAOugnQDroJ4A66CfAOugoADroKEA66CiAOugowDroKQA66ClAOugpgDroKcA66CoAOugqQDroKoA66CrAOugrADroK0A66CuAOugrwDroLAA66CxAOugsgDroLMA66C0AOugtQDroLYA66C3AOuguADroLkA66C6AOuguwDroLwA66C9AOugvgDroL8A66GAAOuhgQDroYIA66GDAOuhhADroYUA66GGAOuhhwDroYgA66GJAOuhigDroYsA66GMAOuhjQDroY4A66GPAOuhkADroZEA66GSAOuhkwDroZQA66GVAOuhlgDroZcA66GYAOuhmQDroZoA66GbAOuhnADroZ0A66GeAOuhnwDroaAA66GhAOuhogDroaMA66GkAOuhpQDroaYA66GnAOuhqADroakA66GqAOuhqwDroawA66GtAOuhrgDroa8A66GwAOuhsQDrobIA66GzAOuhtADrobUA66G2AOuhtwDrobgA66G5AOuhugDrobsA66G8AOuhvQDrob4A66G/AOuigADrooEA66KCAOuigwDrooQA66KFAOuihgDroocA66KIAOuiiQDroooA66KLAOuijADroo0A66KOAOuijwDropAA66KRAOuikgDropMA66KUAOuilQDropYA66KXAOuimADropkA66KaAOuimwDropwA66KdAOuingDrop8A66KgAOuioQDroqIA66KjAOuipADroqUA66KmAOuipwDroqgA66KpAOuiqgDroqsA66KsAOuirQDroq4A66KvAOuisADrorEA66KyAOuiswDrorQA66K1AOuitgDrorcA66K4AOuiuQDroroA66K7AOuivADror0A66K+AOuivwDro4AA66OBAOujggDro4MA66OEAOujhQDro4YA66OHAOujiADro4kA66OKAOujiwDro4wA66ONAOujjgDro48A66OQAOujkQDro5IA66OTAOujlADro5UA66OWAOujlwDro5gA66OZAOujmgDro5sA66OcAOujnQDro54A66OfAOujoADro6EA66OiAOujowDro6QA66OlAOujpgDro6cA66OoAOujqQDro6oA66OrAOujrADro60A66OuAOujrwDro7AA66OxAOujsgDro7MA66O0AOujtQDro7YA66O3AOujuADro7kA66O6AOujuwDro7wA66O9AOujvgDro78A66SAAOukgQDrpIIA66SDAOukhADrpIUA66SGAOukhwDrpIgA66SJAOukigDrpIsA66SMAOukjQDrpI4A66SPAOukkADrpJEA66SSAOukkwDrpJQA66SVAOuklgDrpJcA66SYAOukmQDrpJoA66SbAOuknADrpJ0A66SeAOuknwDrpKAA66ShAOukogDrpKMA66SkAOukpQDrpKYA66SnAOukqADrpKkA66SqAOukqwDrpKwA66StAOukrgDrpK8A66SwAOuksQDrpLIA66SzAOuktADrpLUA66S2AOuktwDrpLgA66S5AOukugDrpLsA66S8AOukvQDrpL4A66S/AOulgADrpYEA66WCAOulgwDrpYQA66WFAOulhgDrpYcA66WIAOuliQDrpYoA66WLAOuljADrpY0A66WOAOuljwDrpZAA66WRAOulkgDrpZMA66WUAOullQDrpZYA66WXAOulmADrpZkA66WaAOulmwDrpZwA66WdAOulngDrpZ8A66WgAOuloQDrpaIA66WjAOulpADrpaUA66WmAOulpwDrpagA66WpAOulqgDrpasA66WsAOulrQDrpa4A66WvAOulsADrpbEA66WyAOulswDrpbQA66W1AOultgDrpbcA66W4AOuluQDrpboA66W7AOulvADrpb0A66W+AOulvwDrpoAA66aBAOumggDrpoMA66aEAOumhQDrpoYA66aHAOumiADrpokA66aKAOumiwDrpowA66aNAOumjgDrpo8A66aQAOumkQDrppIA66aTAOumlADrppUA66aWAOumlwDrppgA66aZAOummgDrppsA66acAOumnQDrpp4A66afAOumoADrpqEA66aiAOumowDrpqQA66alAOumpgDrpqcA66aoAOumqQDrpqoA66arAOumrADrpq0A66auAOumrwDrprAA66axAOumsgDrprMA66a0AOumtQDrprYA66a3AOumuADrprkA66a6AOumuwDrprwA66a9AOumvgDrpr8A66eAAOungQDrp4IA66eDAOunhADrp4UA66eGAOunhwDrp4gA66eJAOunigDrp4sA66eMAOunjQDrp44A66ePAOunkADrp5EA66eSAOunkwDrp5QA66eVAOunlgDrp5cA66eYAOunmQDrp5oA66ebAOunnADrp50A66eeAOunnwDrp6AA66ehAOunogDrp6MA66ekAOunpQDrp6YA66enAOunqADrp6kA66eqAOunqwDrp6wA66etAOunrgDrp68A66ewAOunsQDrp7IA66ezAOuntADrp7UA66e2AOuntwDrp7gA66e5AOunugDrp7sA66e8AOunvQDrp74A66e/AOuogADrqIEA66iCAOuogwDrqIQA66iFAOuohgDrqIcA66iIAOuoiQDrqIoA66iLAOuojADrqI0A66iOAOuojwDrqJAA66iRAOuokgDrqJMA66iUAOuolQDrqJYA66iXAOuomADrqJkA66iaAOuomwDrqJwA66idAOuongDrqJ8A66igAOuooQDrqKIA66ijAOuopADrqKUA66imAOuopwDrqKgA66ipAOuoqgDrqKsA66isAOuorQDrqK4A66ivAOuosADrqLEA66iyAOuoswDrqLQA66i1AOuotgDrqLcA66i4AOuouQDrqLoA66i7AOuovADrqL0A66i+AOuovwDrqYAA66mBAOupggDrqYMA66mEAOuphQDrqYYA66mHAOupiADrqYkA66mKAOupiwDrqYwA66mNAOupjgDrqY8A66mQAOupkQDrqZIA66mTAOuplADrqZUA66mWAOuplwDrqZgA66mZAOupmgDrqZsA66mcAOupnQDrqZ4A66mfAOupoADrqaEA66miAOupowDrqaQA66mlAOuppgDrqacA66moAOupqQDrqaoA66mrAOuprADrqa0A66muAOuprwDrqbAA66mxAOupsgDrqbMA66m0AOuptQDrqbYA66m3AOupuADrqbkA66m6AOupuwDrqbwA66m9AOupvgDrqb8A66qAAOuqgQDrqoIA66qDAOuqhADrqoUA66qGAOuqhwDrqogA66qJAOuqigDrqosA66qMAOuqjQDrqo4A66qPAOuqkADrqpEA66qSAOuqkwDrqpQA66qVAOuqlgDrqpcA66qYAOuqmQDrqpoA66qbAOuqnADrqp0A66qeAOuqnwDrqqAA66qhAOuqogDrqqMA66qkAOuqpQDrqqYA66qnAOuqqADrqqkA66qqAOuqqwDrqqwA66qtAOuqrgDrqq8A66qwAOuqsQDrqrIA66qzAOuqtADrqrUA66q2AOuqtwDrqrgA66q5AOuqugDrqrsA66q8AOuqvQDrqr4A66q/AOurgADrq4EA66uCAOurgwDrq4QA66uFAOurhgDrq4cA66uIAOuriQDrq4oA66uLAOurjADrq40A66uOAOurjwDrq5AA66uRAOurkgDrq5MA66uUAOurlQDrq5YA66uXAOurmADrq5kA66uaAOurmwDrq5wA66udAOurngDrq58A66ugAOuroQDrq6IA66ujAOurpADrq6UA66umAOurpwDrq6gA66upAOurqgDrq6sA66usAOurrQDrq64A66uvAOursADrq7EA66uyAOurswDrq7QA66u1AOurtgDrq7cA66u4AOuruQDrq7oA66u7AOurvADrq70A66u+AOurvwDrrIAA66yBAOusggDrrIMA66yEAOushQDrrIYA66yHAOusiADrrIkA66yKAOusiwDrrIwA66yNAOusjgDrrI8A66yQAOuskQDrrJIA66yTAOuslADrrJUA66yWAOuslwDrrJgA66yZAOusmgDrrJsA66ycAOusnQDrrJ4A66yfAOusoADrrKEA66yiAOusowDrrKQA66ylAOuspgDrrKcA66yoAOusqQDrrKoA66yrAOusrADrrK0A66yuAOusrwDrrLAA66yxAOussgDrrLMA66y0AOustQDrrLYA66y3AOusuADrrLkA66y6AOusuwDrrLwA66y9AOusvgDrrL8A662AAOutgQDrrYIA662DAOuthADrrYUA662GAOuthwDrrYgA662JAOutigDrrYsA662MAOutjQDrrY4A662PAOutkADrrZEA662SAOutkwDrrZQA662VAOutlgDrrZcA662YAOutmQDrrZoA662bAOutnADrrZ0A662eAOutnwDrraAA662hAOutogDrraMA662kAOutpQDrraYA662nAOutqADrrakA662qAOutqwDrrawA662tAOutrgDrra8A662wAOutsQDrrbIA662zAOuttADrrbUA6622AOuttwDrrbgA6625AOutugDrrbsA6628AOutvQDrrb4A662/AOuugADrroEA666CAOuugwDrroQA666FAOuuhgDrrocA666IAOuuiQDrrooA666LAOuujADrro0A666OAOuujwDrrpAA666RAOuukgDrrpMA666UAOuulQDrrpYA666XAOuumADrrpkA666aAOuumwDrrpwA666dAOuungDrrp8A666gAOuuoQDrrqIA666jAOuupADrrqUA666mAOuupwDrrqgA666pAOuuqgDrrqsA666sAOuurQDrrq4A666vAOuusADrrrEA666yAOuuswDrrrQA6661AOuutgDrrrcA6664AOuuuQDrrroA6667AOuuvADrrr0A666+AOuuvwDrr4AA66+BAOuvggDrr4MA66+EAOuvhQDrr4YA66+HAOuviADrr4kA66+KAOuviwDrr4wA66+NAOuvjgDrr48A66+QAOuvkQDrr5IA66+TAOuvlADrr5UA66+WAOuvlwDrr5gA66+ZAOuvmgDrr5sA66+cAOuvnQDrr54A66+fAOuvoADrr6EA66+iAOuvowDrr6QA66+lAOuvpgDrr6cA66+oAOuvqQDrr6oA66+rAOuvrADrr60A66+uAOuvrwDrr7AA66+xAOuvsgDrr7MA66+0AOuvtQDrr7YA66+3AOuvuADrr7kA66+6AOuvuwDrr7wA66+9AOuvvgDrr78A67CAAOuwgQDrsIIA67CDAOuwhADrsIUA67CGAOuwhwDrsIgA67CJAOuwigDrsIsA67CMAOuwjQDrsI4A67CPAOuwkADrsJEA67CSAOuwkwDrsJQA67CVAOuwlgDrsJcA67CYAOuwmQDrsJoA67CbAOuwnADrsJ0A67CeAOuwnwDrsKAA67ChAOuwogDrsKMA67CkAOuwpQDrsKYA67CnAOuwqADrsKkA67CqAOuwqwDrsKwA67CtAOuwrgDrsK8A67CwAOuwsQDrsLIA67CzAOuwtADrsLUA67C2AOuwtwDrsLgA67C5AOuwugDrsLsA67C8AOuwvQDrsL4A67C/AOuxgADrsYEA67GCAOuxgwDrsYQA67GFAOuxhgDrsYcA67GIAOuxiQDrsYoA67GLAOuxjADrsY0A67GOAOuxjwDrsZAA67GRAOuxkgDrsZMA67GUAOuxlQDrsZYA67GXAOuxmADrsZkA67GaAOuxmwDrsZwA67GdAOuxngDrsZ8A67GgAOuxoQDrsaIA67GjAOuxpADrsaUA67GmAOuxpwDrsagA67GpAOuxqgDrsasA67GsAOuxrQDrsa4A67GvAOuxsADrsbEA67GyAOuxswDrsbQA67G1AOuxtgDrsbcA67G4AOuxuQDrsboA67G7AOuxvADrsb0A67G+AOuxvwDrsoAA67KBAOuyggDrsoMA67KEAOuyhQDrsoYA67KHAOuyiADrsokA67KKAOuyiwDrsowA67KNAOuyjgDrso8A67KQAOuykQDrspIA67KTAOuylADrspUA67KWAOuylwDrspgA67KZAOuymgDrspsA67KcAOuynQDrsp4A67KfAOuyoADrsqEA67KiAOuyowDrsqQA67KlAOuypgDrsqcA67KoAOuyqQDrsqoA67KrAOuyrADrsq0A67KuAOuyrwDrsrAA67KxAOuysgDrsrMA67K0AOuytQDrsrYA67K3AOuyuADrsrkA67K6AOuyuwDrsrwA67K9AOuyvgDrsr8A67OAAOuzgQDrs4IA67ODAOuzhADrs4UA67OGAOuzhwDrs4gA67OJAOuzigDrs4sA67OMAOuzjQDrs44A67OPAOuzkADrs5EA67OSAOuzkwDrs5QA67OVAOuzlgDrs5cA67OYAOuzmQDrs5oA67ObAOuznADrs50A67OeAOuznwDrs6AA67OhAOuzogDrs6MA67OkAOuzpQDrs6YA67OnAOuzqADrs6kA67OqAOuzqwDrs6wA67OtAOuzrgDrs68A67OwAOuzsQDrs7IA67OzAOuztADrs7UA67O2AOuztwDrs7gA67O5AOuzugDrs7sA67O8AOuzvQDrs74A67O/AOu0gADrtIEA67SCAOu0gwDrtIQA67SFAOu0hgDrtIcA67SIAOu0iQDrtIoA67SLAOu0jADrtI0A67SOAOu0jwDrtJAA67SRAOu0kgDrtJMA67SUAOu0lQDrtJYA67SXAOu0mADrtJkA67SaAOu0mwDrtJwA67SdAOu0ngDrtJ8A67SgAOu0oQDrtKIA67SjAOu0pADrtKUA67SmAOu0pwDrtKgA67SpAOu0qgDrtKsA67SsAOu0rQDrtK4A67SvAOu0sADrtLEA67SyAOu0swDrtLQA67S1AOu0tgDrtLcA67S4AOu0uQDrtLoA67S7AOu0vADrtL0A67S+AOu0vwDrtYAA67WBAOu1ggDrtYMA67WEAOu1hQDrtYYA67WHAOu1iADrtYkA67WKAOu1iwDrtYwA67WNAOu1jgDrtY8A67WQAOu1kQDrtZIA67WTAOu1lADrtZUA67WWAOu1lwDrtZgA67WZAOu1mgDrtZsA67WcAOu1nQDrtZ4A67WfAOu1oADrtaEA67WiAOu1owDrtaQA67WlAOu1pgDrtacA67WoAOu1qQDrtaoA67WrAOu1rADrta0A67WuAOu1rwDrtbAA67WxAOu1sgDrtbMA67W0AOu1tQDrtbYA67W3AOu1uADrtbkA67W6AOu1uwDrtbwA67W9AOu1vgDrtb8A67aAAOu2gQDrtoIA67aDAOu2hADrtoUA67aGAOu2hwDrtogA67aJAOu2igDrtosA67aMAOu2jQDrto4A67aPAOu2kADrtpEA67aSAOu2kwDrtpQA67aVAOu2lgDrtpcA67aYAOu2mQDrtpoA67abAOu2nADrtp0A67aeAOu2nwDrtqAA67ahAOu2ogDrtqMA67akAOu2pQDrtqYA67anAOu2qADrtqkA67aqAOu2qwDrtqwA67atAOu2rgDrtq8A67awAOu2sQDrtrIA67azAOu2tADrtrUA67a2AOu2twDrtrgA67a5AOu2ugDrtrsA67a8AOu2vQDrtr4A67a/AOu3gADrt4EA67eCAOu3gwDrt4QA67eFAOu3hgDrt4cA67eIAOu3iQDrt4oA67eLAOu3jADrt40A67eOAOu3jwDrt5AA67eRAOu3kgDrt5MA67eUAOu3lQDrt5YA67eXAOu3mADrt5kA67eaAOu3mwDrt5wA67edAOu3ngDrt58A67egAOu3oQDrt6IA67ejAOu3pADrt6UA67emAOu3pwDrt6gA67epAOu3qgDrt6sA67esAOu3rQDrt64A67evAOu3sADrt7EA67eyAOu3swDrt7QA67e1AOu3tgDrt7cA67e4AOu3uQDrt7oA67e7AOu3vADrt70A67e+AOu3vwDruIAA67iBAOu4ggDruIMA67iEAOu4hQDruIYA67iHAOu4iADruIkA67iKAOu4iwDruIwA67iNAOu4jgDruI8A67iQAOu4kQDruJIA67iTAOu4lADruJUA67iWAOu4lwDruJgA67iZAOu4mgDruJsA67icAOu4nQDruJ4A67ifAOu4oADruKEA67iiAOu4owDruKQA67ilAOu4pgDruKcA67ioAOu4qQDruKoA67irAOu4rADruK0A67iuAOu4rwDruLAA67ixAOu4sgDruLMA67i0AOu4tQDruLYA67i3AOu4uADruLkA67i6AOu4uwDruLwA67i9AOu4vgDruL8A67mAAOu5gQDruYIA67mDAOu5hADruYUA67mGAOu5hwDruYgA67mJAOu5igDruYsA67mMAOu5jQDruY4A67mPAOu5kADruZEA67mSAOu5kwDruZQA67mVAOu5lgDruZcA67mYAOu5mQDruZoA67mbAOu5nADruZ0A67meAOu5nwDruaAA67mhAOu5ogDruaMA67mkAOu5pQDruaYA67mnAOu5qADruakA67mqAOu5qwDruawA67mtAOu5rgDrua8A67mwAOu5sQDrubIA67mzAOu5tADrubUA67m2AOu5twDrubgA67m5AOu5ugDrubsA67m8AOu5vQDrub4A67m/AOu6gADruoEA67qCAOu6gwDruoQA67qFAOu6hgDruocA67qIAOu6iQDruooA67qLAOu6jADruo0A67qOAOu6jwDrupAA67qRAOu6kgDrupMA67qUAOu6lQDrupYA67qXAOu6mADrupkA67qaAOu6mwDrupwA67qdAOu6ngDrup8A67qgAOu6oQDruqIA67qjAOu6pADruqUA67qmAOu6pwDruqgA67qpAOu6qgDruqsA67qsAOu6rQDruq4A67qvAOu6sADrurEA67qyAOu6swDrurQA67q1AOu6tgDrurcA67q4AOu6uQDruroA67q7AOu6vADrur0A67q+AOu6vwDru4AA67uBAOu7ggDru4MA67uEAOu7hQDru4YA67uHAOu7iADru4kA67uKAOu7iwDru4wA67uNAOu7jgDru48A67uQAOu7kQDru5IA67uTAOu7lADru5UA67uWAOu7lwDru5gA67uZAOu7mgDru5sA67ucAOu7nQDru54A67ufAOu7oADru6EA67uiAOu7owDru6QA67ulAOu7pgDru6cA67uoAOu7qQDru6oA67urAOu7rADru60A67uuAOu7rwDru7AA67uxAOu7sgDru7MA67u0AOu7tQDru7YA67u3AOu7uADru7kA67u6AOu7uwDru7wA67u9AOu7vgDru78A67yAAOu8gQDrvIIA67yDAOu8hADrvIUA67yGAOu8hwDrvIgA67yJAOu8igDrvIsA67yMAOu8jQDrvI4A67yPAOu8kADrvJEA67ySAOu8kwDrvJQA67yVAOu8lgDrvJcA67yYAOu8mQDrvJoA67ybAOu8nADrvJ0A67yeAOu8nwDrvKAA67yhAOu8ogDrvKMA67ykAOu8pQDrvKYA67ynAOu8qADrvKkA67yqAOu8qwDrvKwA67ytAOu8rgDrvK8A67ywAOu8sQDrvLIA67yzAOu8tADrvLUA67y2AOu8twDrvLgA67y5AOu8ugDrvLsA67y8AOu8vQDrvL4A67y/AOu9gADrvYEA672CAOu9gwDrvYQA672FAOu9hgDrvYcA672IAOu9iQDrvYoA672LAOu9jADrvY0A672OAOu9jwDrvZAA672RAOu9kgDrvZMA672UAOu9lQDrvZYA672XAOu9mADrvZkA672aAOu9mwDrvZwA672dAOu9ngDrvZ8A672gAOu9oQDrvaIA672jAOu9pADrvaUA672mAOu9pwDrvagA672pAOu9qgDrvasA672sAOu9rQDrva4A672vAOu9sADrvbEA672yAOu9swDrvbQA6721AOu9tgDrvbcA6724AOu9uQDrvboA6727AOu9vADrvb0A672+AOu9vwDrvoAA676BAOu+ggDrvoMA676EAOu+hQDrvoYA676HAOu+iADrvokA676KAOu+iwDrvowA676NAOu+jgDrvo8A676QAOu+kQDrvpIA676TAOu+lADrvpUA676WAOu+lwDrvpgA676ZAOu+mgDrvpsA676cAOu+nQDrvp4A676fAOu+oADrvqEA676iAOu+owDrvqQA676lAOu+pgDrvqcA676oAOu+qQDrvqoA676rAOu+rADrvq0A676uAOu+rwDrvrAA676xAOu+sgDrvrMA6760AOu+tQDrvrYA6763AOu+uADrvrkA6766AOu+uwDrvrwA6769AOu+vgDrvr8A67+AAOu/gQDrv4IA67+DAOu/hADrv4UA67+GAOu/hwDrv4gA67+JAOu/igDrv4sA67+MAOu/jQDrv44A67+PAOu/kADrv5EA67+SAOu/kwDrv5QA67+VAOu/lgDrv5cA67+YAOu/mQDrv5oA67+bAOu/nADrv50A67+eAOu/nwDrv6AA67+hAOu/ogDrv6MA67+kAOu/pQDrv6YA67+nAOu/qADrv6kA67+qAOu/qwDrv6wA67+tAOu/rgDrv68A67+wAOu/sQDrv7IA67+zAOu/tADrv7UA67+2AOu/twDrv7gA67+5AOu/ugDrv7sA67+8AOu/vQDrv74A67+/AOyAgADsgIEA7ICCAOyAgwDsgIQA7ICFAOyAhgDsgIcA7ICIAOyAiQDsgIoA7ICLAOyAjADsgI0A7ICOAOyAjwDsgJAA7ICRAOyAkgDsgJMA7ICUAOyAlQDsgJYA7ICXAOyAmADsgJkA7ICaAOyAmwDsgJwA7ICdAOyAngDsgJ8A7ICgAOyAoQDsgKIA7ICjAOyApADsgKUA7ICmAOyApwDsgKgA7ICpAOyAqgDsgKsA7ICsAOyArQDsgK4A7ICvAOyAsADsgLEA7ICyAOyAswDsgLQA7IC1AOyAtgDsgLcA7IC4AOyAuQDsgLoA7IC7AOyAvADsgL0A7IC+AOyAvwDsgYAA7IGBAOyBggDsgYMA7IGEAOyBhQDsgYYA7IGHAOyBiADsgYkA7IGKAOyBiwDsgYwA7IGNAOyBjgDsgY8A7IGQAOyBkQDsgZIA7IGTAOyBlADsgZUA7IGWAOyBlwDsgZgA7IGZAOyBmgDsgZsA7IGcAOyBnQDsgZ4A7IGfAOyBoADsgaEA7IGiAOyBowDsgaQA7IGlAOyBpgDsgacA7IGoAOyBqQDsgaoA7IGrAOyBrADsga0A7IGuAOyBrwDsgbAA7IGxAOyBsgDsgbMA7IG0AOyBtQDsgbYA7IG3AOyBuADsgbkA7IG6AOyBuwDsgbwA7IG9AOyBvgDsgb8A7IKAAOyCgQDsgoIA7IKDAOyChADsgoUA7IKGAOyChwDsgogA7IKJAOyCigDsgosA7IKMAOyCjQDsgo4A7IKPAOyCkADsgpEA7IKSAOyCkwDsgpQA7IKVAOyClgDsgpcA7IKYAOyCmQDsgpoA7IKbAOyCnADsgp0A7IKeAOyCnwDsgqAA7IKhAOyCogDsgqMA7IKkAOyCpQDsgqYA7IKnAOyCqADsgqkA7IKqAOyCqwDsgqwA7IKtAOyCrgDsgq8A7IKwAOyCsQDsgrIA7IKzAOyCtADsgrUA7IK2AOyCtwDsgrgA7IK5AOyCugDsgrsA7IK8AOyCvQDsgr4A7IK/AOyDgADsg4EA7IOCAOyDgwDsg4QA7IOFAOyDhgDsg4cA7IOIAOyDiQDsg4oA7IOLAOyDjADsg40A7IOOAOyDjwDsg5AA7IORAOyDkgDsg5MA7IOUAOyDlQDsg5YA7IOXAOyDmADsg5kA7IOaAOyDmwDsg5wA7IOdAOyDngDsg58A7IOgAOyDoQDsg6IA7IOjAOyDpADsg6UA7IOmAOyDpwDsg6gA7IOpAOyDqgDsg6sA7IOsAOyDrQDsg64A7IOvAOyDsADsg7EA7IOyAOyDswDsg7QA7IO1AOyDtgDsg7cA7IO4AOyDuQDsg7oA7IO7AOyDvADsg70A7IO+AOyDvwDshIAA7ISBAOyEggDshIMA7ISEAOyEhQDshIYA7ISHAOyEiADshIkA7ISKAOyEiwDshIwA7ISNAOyEjgDshI8A7ISQAOyEkQDshJIA7ISTAOyElADshJUA7ISWAOyElwDshJgA7ISZAOyEmgDshJsA7IScAOyEnQDshJ4A7ISfAOyEoADshKEA7ISiAOyEowDshKQA7ISlAOyEpgDshKcA7ISoAOyEqQDshKoA7ISrAOyErADshK0A7ISuAOyErwDshLAA7ISxAOyEsgDshLMA7IS0AOyEtQDshLYA7IS3AOyEuADshLkA7IS6AOyEuwDshLwA7IS9AOyEvgDshL8A7IWAAOyFgQDshYIA7IWDAOyFhADshYUA7IWGAOyFhwDshYgA7IWJAOyFigDshYsA7IWMAOyFjQDshY4A7IWPAOyFkADshZEA7IWSAOyFkwDshZQA7IWVAOyFlgDshZcA7IWYAOyFmQDshZoA7IWbAOyFnADshZ0A7IWeAOyFnwDshaAA7IWhAOyFogDshaMA7IWkAOyFpQDshaYA7IWnAOyFqADshakA7IWqAOyFqwDshawA7IWtAOyFrgDsha8A7IWwAOyFsQDshbIA7IWzAOyFtADshbUA7IW2AOyFtwDshbgA7IW5AOyFugDshbsA7IW8AOyFvQDshb4A7IW/AOyGgADshoEA7IaCAOyGgwDshoQA7IaFAOyGhgDshocA7IaIAOyGiQDshooA7IaLAOyGjADsho0A7IaOAOyGjwDshpAA7IaRAOyGkgDshpMA7IaUAOyGlQDshpYA7IaXAOyGmADshpkA7IaaAOyGmwDshpwA7IadAOyGngDshp8A7IagAOyGoQDshqIA7IajAOyGpADshqUA7IamAOyGpwDshqgA7IapAOyGqgDshqsA7IasAOyGrQDshq4A7IavAOyGsADshrEA7IayAOyGswDshrQA7Ia1AOyGtgDshrcA7Ia4AOyGuQDshroA7Ia7AOyGvADshr0A7Ia+AOyGvwDsh4AA7IeBAOyHggDsh4MA7IeEAOyHhQDsh4YA7IeHAOyHiADsh4kA7IeKAOyHiwDsh4wA7IeNAOyHjgDsh48A7IeQAOyHkQDsh5IA7IeTAOyHlADsh5UA7IeWAOyHlwDsh5gA7IeZAOyHmgDsh5sA7IecAOyHnQDsh54A7IefAOyHoADsh6EA7IeiAOyHowDsh6QA7IelAOyHpgDsh6cA7IeoAOyHqQDsh6oA7IerAOyHrADsh60A7IeuAOyHrwDsh7AA7IexAOyHsgDsh7MA7Ie0AOyHtQDsh7YA7Ie3AOyHuADsh7kA7Ie6AOyHuwDsh7wA7Ie9AOyHvgDsh78A7IiAAOyIgQDsiIIA7IiDAOyIhADsiIUA7IiGAOyIhwDsiIgA7IiJAOyIigDsiIsA7IiMAOyIjQDsiI4A7IiPAOyIkADsiJEA7IiSAOyIkwDsiJQA7IiVAOyIlgDsiJcA7IiYAOyImQDsiJoA7IibAOyInADsiJ0A7IieAOyInwDsiKAA7IihAOyIogDsiKMA7IikAOyIpQDsiKYA7IinAOyIqADsiKkA7IiqAOyIqwDsiKwA7IitAOyIrgDsiK8A7IiwAOyIsQDsiLIA7IizAOyItADsiLUA7Ii2AOyItwDsiLgA7Ii5AOyIugDsiLsA7Ii8AOyIvQDsiL4A7Ii/AOyJgADsiYEA7ImCAOyJgwDsiYQA7ImFAOyJhgDsiYcA7ImIAOyJiQDsiYoA7ImLAOyJjADsiY0A7ImOAOyJjwDsiZAA7ImRAOyJkgDsiZMA7ImUAOyJlQDsiZYA7ImXAOyJmADsiZkA7ImaAOyJmwDsiZwA7ImdAOyJngDsiZ8A7ImgAOyJoQDsiaIA7ImjAOyJpADsiaUA7ImmAOyJpwDsiagA7ImpAOyJqgDsiasA7ImsAOyJrQDsia4A7ImvAOyJsADsibEA7ImyAOyJswDsibQA7Im1AOyJtgDsibcA7Im4AOyJuQDsiboA7Im7AOyJvADsib0A7Im+AOyJvwDsioAA7IqBAOyKggDsioMA7IqEAOyKhQDsioYA7IqHAOyKiADsiokA7IqKAOyKiwDsiowA7IqNAOyKjgDsio8A7IqQAOyKkQDsipIA7IqTAOyKlADsipUA7IqWAOyKlwDsipgA7IqZAOyKmgDsipsA7IqcAOyKnQDsip4A7IqfAOyKoADsiqEA7IqiAOyKowDsiqQA7IqlAOyKpgDsiqcA7IqoAOyKqQDsiqoA7IqrAOyKrADsiq0A7IquAOyKrwDsirAA7IqxAOyKsgDsirMA7Iq0AOyKtQDsirYA7Iq3AOyKuADsirkA7Iq6AOyKuwDsirwA7Iq9AOyKvgDsir8A7IuAAOyLgQDsi4IA7IuDAOyLhADsi4UA7IuGAOyLhwDsi4gA7IuJAOyLigDsi4sA7IuMAOyLjQDsi44A7IuPAOyLkADsi5EA7IuSAOyLkwDsi5QA7IuVAOyLlgDsi5cA7IuYAOyLmQDsi5oA7IubAOyLnADsi50A7IueAOyLnwDsi6AA7IuhAOyLogDsi6MA7IukAOyLpQDsi6YA7IunAOyLqADsi6kA7IuqAOyLqwDsi6wA7IutAOyLrgDsi68A7IuwAOyLsQDsi7IA7IuzAOyLtADsi7UA7Iu2AOyLtwDsi7gA7Iu5AOyLugDsi7sA7Iu8AOyLvQDsi74A7Iu/AOyMgADsjIEA7IyCAOyMgwDsjIQA7IyFAOyMhgDsjIcA7IyIAOyMiQDsjIoA7IyLAOyMjADsjI0A7IyOAOyMjwDsjJAA7IyRAOyMkgDsjJMA7IyUAOyMlQDsjJYA7IyXAOyMmADsjJkA7IyaAOyMmwDsjJwA7IydAOyMngDsjJ8A7IygAOyMoQDsjKIA7IyjAOyMpADsjKUA7IymAOyMpwDsjKgA7IypAOyMqgDsjKsA7IysAOyMrQDsjK4A7IyvAOyMsADsjLEA7IyyAOyMswDsjLQA7Iy1AOyMtgDsjLcA7Iy4AOyMuQDsjLoA7Iy7AOyMvADsjL0A7Iy+AOyMvwDsjYAA7I2BAOyNggDsjYMA7I2EAOyNhQDsjYYA7I2HAOyNiADsjYkA7I2KAOyNiwDsjYwA7I2NAOyNjgDsjY8A7I2QAOyNkQDsjZIA7I2TAOyNlADsjZUA7I2WAOyNlwDsjZgA7I2ZAOyNmgDsjZsA7I2cAOyNnQDsjZ4A7I2fAOyNoADsjaEA7I2iAOyNowDsjaQA7I2lAOyNpgDsjacA7I2oAOyNqQDsjaoA7I2rAOyNrADsja0A7I2uAOyNrwDsjbAA7I2xAOyNsgDsjbMA7I20AOyNtQDsjbYA7I23AOyNuADsjbkA7I26AOyNuwDsjbwA7I29AOyNvgDsjb8A7I6AAOyOgQDsjoIA7I6DAOyOhADsjoUA7I6GAOyOhwDsjogA7I6JAOyOigDsjosA7I6MAOyOjQDsjo4A7I6PAOyOkADsjpEA7I6SAOyOkwDsjpQA7I6VAOyOlgDsjpcA7I6YAOyOmQDsjpoA7I6bAOyOnADsjp0A7I6eAOyOnwDsjqAA7I6hAOyOogDsjqMA7I6kAOyOpQDsjqYA7I6nAOyOqADsjqkA7I6qAOyOqwDsjqwA7I6tAOyOrgDsjq8A7I6wAOyOsQDsjrIA7I6zAOyOtADsjrUA7I62AOyOtwDsjrgA7I65AOyOugDsjrsA7I68AOyOvQDsjr4A7I6/AOyPgADsj4EA7I+CAOyPgwDsj4QA7I+FAOyPhgDsj4cA7I+IAOyPiQDsj4oA7I+LAOyPjADsj40A7I+OAOyPjwDsj5AA7I+RAOyPkgDsj5MA7I+UAOyPlQDsj5YA7I+XAOyPmADsj5kA7I+aAOyPmwDsj5wA7I+dAOyPngDsj58A7I+gAOyPoQDsj6IA7I+jAOyPpADsj6UA7I+mAOyPpwDsj6gA7I+pAOyPqgDsj6sA7I+sAOyPrQDsj64A7I+vAOyPsADsj7EA7I+yAOyPswDsj7QA7I+1AOyPtgDsj7cA7I+4AOyPuQDsj7oA7I+7AOyPvADsj70A7I++AOyPvwDskIAA7JCBAOyQggDskIMA7JCEAOyQhQDskIYA7JCHAOyQiADskIkA7JCKAOyQiwDskIwA7JCNAOyQjgDskI8A7JCQAOyQkQDskJIA7JCTAOyQlADskJUA7JCWAOyQlwDskJgA7JCZAOyQmgDskJsA7JCcAOyQnQDskJ4A7JCfAOyQoADskKEA7JCiAOyQowDskKQA7JClAOyQpgDskKcA7JCoAOyQqQDskKoA7JCrAOyQrADskK0A7JCuAOyQrwDskLAA7JCxAOyQsgDskLMA7JC0AOyQtQDskLYA7JC3AOyQuADskLkA7JC6AOyQuwDskLwA7JC9AOyQvgDskL8A7JGAAOyRgQDskYIA7JGDAOyRhADskYUA7JGGAOyRhwDskYgA7JGJAOyRigDskYsA7JGMAOyRjQDskY4A7JGPAOyRkADskZEA7JGSAOyRkwDskZQA7JGVAOyRlgDskZcA7JGYAOyRmQDskZoA7JGbAOyRnADskZ0A7JGeAOyRnwDskaAA7JGhAOyRogDskaMA7JGkAOyRpQDskaYA7JGnAOyRqADskakA7JGqAOyRqwDskawA7JGtAOyRrgDska8A7JGwAOyRsQDskbIA7JGzAOyRtADskbUA7JG2AOyRtwDskbgA7JG5AOyRugDskbsA7JG8AOyRvQDskb4A7JG/AOySgADskoEA7JKCAOySgwDskoQA7JKFAOyShgDskocA7JKIAOySiQDskooA7JKLAOySjADsko0A7JKOAOySjwDskpAA7JKRAOySkgDskpMA7JKUAOySlQDskpYA7JKXAOySmADskpkA7JKaAOySmwDskpwA7JKdAOySngDskp8A7JKgAOySoQDskqIA7JKjAOySpADskqUA7JKmAOySpwDskqgA7JKpAOySqgDskqsA7JKsAOySrQDskq4A7JKvAOySsADskrEA7JKyAOySswDskrQA7JK1AOyStgDskrcA7JK4AOySuQDskroA7JK7AOySvADskr0A7JK+AOySvwDsk4AA7JOBAOyTggDsk4MA7JOEAOyThQDsk4YA7JOHAOyTiADsk4kA7JOKAOyTiwDsk4wA7JONAOyTjgDsk48A7JOQAOyTkQDsk5IA7JOTAOyTlADsk5UA7JOWAOyTlwDsk5gA7JOZAOyTmgDsk5sA7JOcAOyTnQDsk54A7JOfAOyToADsk6EA7JOiAOyTowDsk6QA7JOlAOyTpgDsk6cA7JOoAOyTqQDsk6oA7JOrAOyTrADsk60A7JOuAOyTrwDsk7AA7JOxAOyTsgDsk7MA7JO0AOyTtQDsk7YA7JO3AOyTuADsk7kA7JO6AOyTuwDsk7wA7JO9AOyTvgDsk78A7JSAAOyUgQDslIIA7JSDAOyUhADslIUA7JSGAOyUhwDslIgA7JSJAOyUigDslIsA7JSMAOyUjQDslI4A7JSPAOyUkADslJEA7JSSAOyUkwDslJQA7JSVAOyUlgDslJcA7JSYAOyUmQDslJoA7JSbAOyUnADslJ0A7JSeAOyUnwDslKAA7JShAOyUogDslKMA7JSkAOyUpQDslKYA7JSnAOyUqADslKkA7JSqAOyUqwDslKwA7JStAOyUrgDslK8A7JSwAOyUsQDslLIA7JSzAOyUtADslLUA7JS2AOyUtwDslLgA7JS5AOyUugDslLsA7JS8AOyUvQDslL4A7JS/AOyVgADslYEA7JWCAOyVgwDslYQA7JWFAOyVhgDslYcA7JWIAOyViQDslYoA7JWLAOyVjADslY0A7JWOAOyVjwDslZAA7JWRAOyVkgDslZMA7JWUAOyVlQDslZYA7JWXAOyVmADslZkA7JWaAOyVmwDslZwA7JWdAOyVngDslZ8A7JWgAOyVoQDslaIA7JWjAOyVpADslaUA7JWmAOyVpwDslagA7JWpAOyVqgDslasA7JWsAOyVrQDsla4A7JWvAOyVsADslbEA7JWyAOyVswDslbQA7JW1AOyVtgDslbcA7JW4AOyVuQDslboA7JW7AOyVvADslb0A7JW+AOyVvwDsloAA7JaBAOyWggDsloMA7JaEAOyWhQDsloYA7JaHAOyWiADslokA7JaKAOyWiwDslowA7JaNAOyWjgDslo8A7JaQAOyWkQDslpIA7JaTAOyWlADslpUA7JaWAOyWlwDslpgA7JaZAOyWmgDslpsA7JacAOyWnQDslp4A7JafAOyWoADslqEA7JaiAOyWowDslqQA7JalAOyWpgDslqcA7JaoAOyWqQDslqoA7JarAOyWrADslq0A7JauAOyWrwDslrAA7JaxAOyWsgDslrMA7Ja0AOyWtQDslrYA7Ja3AOyWuADslrkA7Ja6AOyWuwDslrwA7Ja9AOyWvgDslr8A7JeAAOyXgQDsl4IA7JeDAOyXhADsl4UA7JeGAOyXhwDsl4gA7JeJAOyXigDsl4sA7JeMAOyXjQDsl44A7JePAOyXkADsl5EA7JeSAOyXkwDsl5QA7JeVAOyXlgDsl5cA7JeYAOyXmQDsl5oA7JebAOyXnADsl50A7JeeAOyXnwDsl6AA7JehAOyXogDsl6MA7JekAOyXpQDsl6YA7JenAOyXqADsl6kA7JeqAOyXqwDsl6wA7JetAOyXrgDsl68A7JewAOyXsQDsl7IA7JezAOyXtADsl7UA7Je2AOyXtwDsl7gA7Je5AOyXugDsl7sA7Je8AOyXvQDsl74A7Je/AOyYgADsmIEA7JiCAOyYgwDsmIQA7JiFAOyYhgDsmIcA7JiIAOyYiQDsmIoA7JiLAOyYjADsmI0A7JiOAOyYjwDsmJAA7JiRAOyYkgDsmJMA7JiUAOyYlQDsmJYA7JiXAOyYmADsmJkA7JiaAOyYmwDsmJwA7JidAOyYngDsmJ8A7JigAOyYoQDsmKIA7JijAOyYpADsmKUA7JimAOyYpwDsmKgA7JipAOyYqgDsmKsA7JisAOyYrQDsmK4A7JivAOyYsADsmLEA7JiyAOyYswDsmLQA7Ji1AOyYtgDsmLcA7Ji4AOyYuQDsmLoA7Ji7AOyYvADsmL0A7Ji+AOyYvwDsmYAA7JmBAOyZggDsmYMA7JmEAOyZhQDsmYYA7JmHAOyZiADsmYkA7JmKAOyZiwDsmYwA7JmNAOyZjgDsmY8A7JmQAOyZkQDsmZIA7JmTAOyZlADsmZUA7JmWAOyZlwDsmZgA7JmZAOyZmgDsmZsA7JmcAOyZnQDsmZ4A7JmfAOyZoADsmaEA7JmiAOyZowDsmaQA7JmlAOyZpgDsmacA7JmoAOyZqQDsmaoA7JmrAOyZrADsma0A7JmuAOyZrwDsmbAA7JmxAOyZsgDsmbMA7Jm0AOyZtQDsmbYA7Jm3AOyZuADsmbkA7Jm6AOyZuwDsmbwA7Jm9AOyZvgDsmb8A7JqAAOyagQDsmoIA7JqDAOyahADsmoUA7JqGAOyahwDsmogA7JqJAOyaigDsmosA7JqMAOyajQDsmo4A7JqPAOyakADsmpEA7JqSAOyakwDsmpQA7JqVAOyalgDsmpcA7JqYAOyamQDsmpoA7JqbAOyanADsmp0A7JqeAOyanwDsmqAA7JqhAOyaogDsmqMA7JqkAOyapQDsmqYA7JqnAOyaqADsmqkA7JqqAOyaqwDsmqwA7JqtAOyargDsmq8A7JqwAOyasQDsmrIA7JqzAOyatADsmrUA7Jq2AOyatwDsmrgA7Jq5AOyaugDsmrsA7Jq8AOyavQDsmr4A7Jq/AOybgADsm4EA7JuCAOybgwDsm4QA7JuFAOybhgDsm4cA7JuIAOybiQDsm4oA7JuLAOybjADsm40A7JuOAOybjwDsm5AA7JuRAOybkgDsm5MA7JuUAOyblQDsm5YA7JuXAOybmADsm5kA7JuaAOybmwDsm5wA7JudAOybngDsm58A7JugAOyboQDsm6IA7JujAOybpADsm6UA7JumAOybpwDsm6gA7JupAOybqgDsm6sA7JusAOybrQDsm64A7JuvAOybsADsm7EA7JuyAOybswDsm7QA7Ju1AOybtgDsm7cA7Ju4AOybuQDsm7oA7Ju7AOybvADsm70A7Ju+AOybvwDsnIAA7JyBAOycggDsnIMA7JyEAOychQDsnIYA7JyHAOyciADsnIkA7JyKAOyciwDsnIwA7JyNAOycjgDsnI8A7JyQAOyckQDsnJIA7JyTAOyclADsnJUA7JyWAOyclwDsnJgA7JyZAOycmgDsnJsA7JycAOycnQDsnJ4A7JyfAOycoADsnKEA7JyiAOycowDsnKQA7JylAOycpgDsnKcA7JyoAOycqQDsnKoA7JyrAOycrADsnK0A7JyuAOycrwDsnLAA7JyxAOycsgDsnLMA7Jy0AOyctQDsnLYA7Jy3AOycuADsnLkA7Jy6AOycuwDsnLwA7Jy9AOycvgDsnL8A7J2AAOydgQDsnYIA7J2DAOydhADsnYUA7J2GAOydhwDsnYgA7J2JAOydigDsnYsA7J2MAOydjQDsnY4A7J2PAOydkADsnZEA7J2SAOydkwDsnZQA7J2VAOydlgDsnZcA7J2YAOydmQDsnZoA7J2bAOydnADsnZ0A7J2eAOydnwDsnaAA7J2hAOydogDsnaMA7J2kAOydpQDsnaYA7J2nAOydqADsnakA7J2qAOydqwDsnawA7J2tAOydrgDsna8A7J2wAOydsQDsnbIA7J2zAOydtADsnbUA7J22AOydtwDsnbgA7J25AOydugDsnbsA7J28AOydvQDsnb4A7J2/AOyegADsnoEA7J6CAOyegwDsnoQA7J6FAOyehgDsnocA7J6IAOyeiQDsnooA7J6LAOyejADsno0A7J6OAOyejwDsnpAA7J6RAOyekgDsnpMA7J6UAOyelQDsnpYA7J6XAOyemADsnpkA7J6aAOyemwDsnpwA7J6dAOyengDsnp8A7J6gAOyeoQDsnqIA7J6jAOyepADsnqUA7J6mAOyepwDsnqgA7J6pAOyeqgDsnqsA7J6sAOyerQDsnq4A7J6vAOyesADsnrEA7J6yAOyeswDsnrQA7J61AOyetgDsnrcA7J64AOyeuQDsnroA7J67AOyevADsnr0A7J6+AOyevwDsn4AA7J+BAOyfggDsn4MA7J+EAOyfhQDsn4YA7J+HAOyfiADsn4kA7J+KAOyfiwDsn4wA7J+NAOyfjgDsn48A7J+QAOyfkQDsn5IA7J+TAOyflADsn5UA7J+WAOyflwDsn5gA7J+ZAOyfmgDsn5sA7J+cAOyfnQDsn54A7J+fAOyfoADsn6EA7J+iAOyfowDsn6QA7J+lAOyfpgDsn6cA7J+oAOyfqQDsn6oA7J+rAOyfrADsn60A7J+uAOyfrwDsn7AA7J+xAOyfsgDsn7MA7J+0AOyftQDsn7YA7J+3AOyfuADsn7kA7J+6AOyfuwDsn7wA7J+9AOyfvgDsn78A7KCAAOyggQDsoIIA7KCDAOyghADsoIUA7KCGAOyghwDsoIgA7KCJAOygigDsoIsA7KCMAOygjQDsoI4A7KCPAOygkADsoJEA7KCSAOygkwDsoJQA7KCVAOyglgDsoJcA7KCYAOygmQDsoJoA7KCbAOygnADsoJ0A7KCeAOygnwDsoKAA7KChAOygogDsoKMA7KCkAOygpQDsoKYA7KCnAOygqADsoKkA7KCqAOygqwDsoKwA7KCtAOygrgDsoK8A7KCwAOygsQDsoLIA7KCzAOygtADsoLUA7KC2AOygtwDsoLgA7KC5AOygugDsoLsA7KC8AOygvQDsoL4A7KC/AOyhgADsoYEA7KGCAOyhgwDsoYQA7KGFAOyhhgDsoYcA7KGIAOyhiQDsoYoA7KGLAOyhjADsoY0A7KGOAOyhjwDsoZAA7KGRAOyhkgDsoZMA7KGUAOyhlQDsoZYA7KGXAOyhmADsoZkA7KGaAOyhmwDsoZwA7KGdAOyhngDsoZ8A7KGgAOyhoQDsoaIA7KGjAOyhpADsoaUA7KGmAOyhpwDsoagA7KGpAOyhqgDsoasA7KGsAOyhrQDsoa4A7KGvAOyhsADsobEA7KGyAOyhswDsobQA7KG1AOyhtgDsobcA7KG4AOyhuQDsoboA7KG7AOyhvADsob0A7KG+AOyhvwDsooAA7KKBAOyiggDsooMA7KKEAOyihQDsooYA7KKHAOyiiADsookA7KKKAOyiiwDsoowA7KKNAOyijgDsoo8A7KKQAOyikQDsopIA7KKTAOyilADsopUA7KKWAOyilwDsopgA7KKZAOyimgDsopsA7KKcAOyinQDsop4A7KKfAOyioADsoqEA7KKiAOyiowDsoqQA7KKlAOyipgDsoqcA7KKoAOyiqQDsoqoA7KKrAOyirADsoq0A7KKuAOyirwDsorAA7KKxAOyisgDsorMA7KK0AOyitQDsorYA7KK3AOyiuADsorkA7KK6AOyiuwDsorwA7KK9AOyivgDsor8A7KOAAOyjgQDso4IA7KODAOyjhADso4UA7KOGAOyjhwDso4gA7KOJAOyjigDso4sA7KOMAOyjjQDso44A7KOPAOyjkADso5EA7KOSAOyjkwDso5QA7KOVAOyjlgDso5cA7KOYAOyjmQDso5oA7KObAOyjnADso50A7KOeAOyjnwDso6AA7KOhAOyjogDso6MA7KOkAOyjpQDso6YA7KOnAOyjqADso6kA7KOqAOyjqwDso6wA7KOtAOyjrgDso68A7KOwAOyjsQDso7IA7KOzAOyjtADso7UA7KO2AOyjtwDso7gA7KO5AOyjugDso7sA7KO8AOyjvOydmADso70A7KO+AOyjvwDspIAA7KSBAOykggDspIMA7KSEAOykhQDspIYA7KSHAOykiADspIkA7KSKAOykiwDspIwA7KSNAOykjgDspI8A7KSQAOykkQDspJIA7KSTAOyklADspJUA7KSWAOyklwDspJgA7KSZAOykmgDspJsA7KScAOyknQDspJ4A7KSfAOykoADspKEA7KSiAOykowDspKQA7KSlAOykpgDspKcA7KSoAOykqQDspKoA7KSrAOykrADspK0A7KSuAOykrwDspLAA7KSxAOyksgDspLMA7KS0AOyktQDspLYA7KS3AOykuADspLkA7KS6AOykuwDspLwA7KS9AOykvgDspL8A7KWAAOylgQDspYIA7KWDAOylhADspYUA7KWGAOylhwDspYgA7KWJAOyligDspYsA7KWMAOyljQDspY4A7KWPAOylkADspZEA7KWSAOylkwDspZQA7KWVAOyllgDspZcA7KWYAOylmQDspZoA7KWbAOylnADspZ0A7KWeAOylnwDspaAA7KWhAOylogDspaMA7KWkAOylpQDspaYA7KWnAOylqADspakA7KWqAOylqwDspawA7KWtAOylrgDspa8A7KWwAOylsQDspbIA7KWzAOyltADspbUA7KW2AOyltwDspbgA7KW5AOylugDspbsA7KW8AOylvQDspb4A7KW/AOymgADspoEA7KaCAOymgwDspoQA7KaFAOymhgDspocA7KaIAOymiQDspooA7KaLAOymjADspo0A7KaOAOymjwDsppAA7KaRAOymkgDsppMA7KaUAOymlQDsppYA7KaXAOymmADsppkA7KaaAOymmwDsppwA7KadAOymngDspp8A7KagAOymoQDspqIA7KajAOympADspqUA7KamAOympwDspqgA7KapAOymqgDspqsA7KasAOymrQDspq4A7KavAOymsADsprEA7KayAOymswDsprQA7Ka1AOymtgDsprcA7Ka4AOymuQDsproA7Ka7AOymvADspr0A7Ka+AOymvwDsp4AA7KeBAOynggDsp4MA7KeEAOynhQDsp4YA7KeHAOyniADsp4kA7KeKAOyniwDsp4wA7KeNAOynjgDsp48A7KeQAOynkQDsp5IA7KeTAOynlADsp5UA7KeWAOynlwDsp5gA7KeZAOynmgDsp5sA7KecAOynnQDsp54A7KefAOynoADsp6EA7KeiAOynowDsp6QA7KelAOynpgDsp6cA7KeoAOynqQDsp6oA7KerAOynrADsp60A7KeuAOynrwDsp7AA7KexAOynsgDsp7MA7Ke0AOyntQDsp7YA7Ke3AOynuADsp7kA7Ke6AOynuwDsp7wA7Ke9AOynvgDsp78A7KiAAOyogQDsqIIA7KiDAOyohADsqIUA7KiGAOyohwDsqIgA7KiJAOyoigDsqIsA7KiMAOyojQDsqI4A7KiPAOyokADsqJEA7KiSAOyokwDsqJQA7KiVAOyolgDsqJcA7KiYAOyomQDsqJoA7KibAOyonADsqJ0A7KieAOyonwDsqKAA7KihAOyoogDsqKMA7KikAOyopQDsqKYA7KinAOyoqADsqKkA7KiqAOyoqwDsqKwA7KitAOyorgDsqK8A7KiwAOyosQDsqLIA7KizAOyotADsqLUA7Ki2AOyotwDsqLgA7Ki5AOyougDsqLsA7Ki8AOyovQDsqL4A7Ki/AOypgADsqYEA7KmCAOypgwDsqYQA7KmFAOyphgDsqYcA7KmIAOypiQDsqYoA7KmLAOypjADsqY0A7KmOAOypjwDsqZAA7KmRAOypkgDsqZMA7KmUAOyplQDsqZYA7KmXAOypmADsqZkA7KmaAOypmwDsqZwA7KmdAOypngDsqZ8A7KmgAOypoQDsqaIA7KmjAOyppADsqaUA7KmmAOyppwDsqagA7KmpAOypqgDsqasA7KmsAOyprQDsqa4A7KmvAOypsADsqbEA7KmyAOypswDsqbQA7Km1AOyptgDsqbcA7Km4AOypuQDsqboA7Km7AOypvADsqb0A7Km+AOypvwDsqoAA7KqBAOyqggDsqoMA7KqEAOyqhQDsqoYA7KqHAOyqiADsqokA7KqKAOyqiwDsqowA7KqNAOyqjgDsqo8A7KqQAOyqkQDsqpIA7KqTAOyqlADsqpUA7KqWAOyqlwDsqpgA7KqZAOyqmgDsqpsA7KqcAOyqnQDsqp4A7KqfAOyqoADsqqEA7KqiAOyqowDsqqQA7KqlAOyqpgDsqqcA7KqoAOyqqQDsqqoA7KqrAOyqrADsqq0A7KquAOyqrwDsqrAA7KqxAOyqsgDsqrMA7Kq0AOyqtQDsqrYA7Kq3AOyquADsqrkA7Kq6AOyquwDsqrwA7Kq9AOyqvgDsqr8A7KuAAOyrgQDsq4IA7KuDAOyrhADsq4UA7KuGAOyrhwDsq4gA7KuJAOyrigDsq4sA7KuMAOyrjQDsq44A7KuPAOyrkADsq5EA7KuSAOyrkwDsq5QA7KuVAOyrlgDsq5cA7KuYAOyrmQDsq5oA7KubAOyrnADsq50A7KueAOyrnwDsq6AA7KuhAOyrogDsq6MA7KukAOyrpQDsq6YA7KunAOyrqADsq6kA7KuqAOyrqwDsq6wA7KutAOyrrgDsq68A7KuwAOyrsQDsq7IA7KuzAOyrtADsq7UA7Ku2AOyrtwDsq7gA7Ku5AOyrugDsq7sA7Ku8AOyrvQDsq74A7Ku/AOysgADsrIEA7KyCAOysgwDsrIQA7KyFAOyshgDsrIcA7KyIAOysiQDsrIoA7KyLAOysjADsrI0A7KyOAOysjwDsrJAA7KyRAOyskgDsrJMA7KyUAOyslQDsrJYA7KyXAOysmADsrJkA7KyaAOysmwDsrJwA7KydAOysngDsrJ8A7KygAOysoQDsrKIA7KyjAOyspADsrKUA7KymAOyspwDsrKgA7KypAOysqgDsrKsA7KysAOysrQDsrK4A7KyvAOyssADsrLEA7KyyAOysswDsrLQA7Ky1AOystgDsrLcA7Ky4AOysuQDsrLoA7Ky7AOysvADsrL0A7Ky+AOysvwDsrYAA7K2BAOytggDsrYMA7K2EAOythQDsrYYA7K2HAOytiADsrYkA7K2KAOytiwDsrYwA7K2NAOytjgDsrY8A7K2QAOytkQDsrZIA7K2TAOytlADsrZUA7K2WAOytlwDsrZgA7K2ZAOytmgDsrZsA7K2cAOytnQDsrZ4A7K2fAOytoADsraEA7K2iAOytowDsraQA7K2lAOytpgDsracA7K2oAOytqQDsraoA7K2rAOytrADsra0A7K2uAOytrwDsrbAA7K2xAOytsgDsrbMA7K20AOyttQDsrbYA7K23AOytuADsrbkA7K26AOytuwDsrbwA7K29AOytvgDsrb8A7K6AAOyugQDsroIA7K6DAOyuhADsroUA7K6GAOyuhwDsrogA7K6JAOyuigDsrosA7K6MAOyujQDsro4A7K6PAOyukADsrpEA7K6SAOyukwDsrpQA7K6VAOyulgDsrpcA7K6YAOyumQDsrpoA7K6bAOyunADsrp0A7K6eAOyunwDsrqAA7K6hAOyuogDsrqMA7K6kAOyupQDsrqYA7K6nAOyuqADsrqkA7K6qAOyuqwDsrqwA7K6tAOyurgDsrq8A7K6wAOyusQDsrrIA7K6zAOyutADsrrUA7K62AOyutwDsrrgA7K65AOyuugDsrrsA7K68AOyuvQDsrr4A7K6/AOyvgADsr4EA7K+CAOyvgwDsr4QA7K+FAOyvhgDsr4cA7K+IAOyviQDsr4oA7K+LAOyvjADsr40A7K+OAOyvjwDsr5AA7K+RAOyvkgDsr5MA7K+UAOyvlQDsr5YA7K+XAOyvmADsr5kA7K+aAOyvmwDsr5wA7K+dAOyvngDsr58A7K+gAOyvoQDsr6IA7K+jAOyvpADsr6UA7K+mAOyvpwDsr6gA7K+pAOyvqgDsr6sA7K+sAOyvrQDsr64A7K+vAOyvsADsr7EA7K+yAOyvswDsr7QA7K+1AOyvtgDsr7cA7K+4AOyvuQDsr7oA7K+7AOyvvADsr70A7K++AOyvvwDssIAA7LCBAOywggDssIMA7LCEAOywhQDssIYA7LCHAOywiADssIkA7LCKAOywiwDssIwA7LCNAOywjgDssI8A7LCQAOywkQDssJIA7LCTAOywlADssJUA7LCWAOywlwDssJgA7LCZAOywmgDssJsA7LCcAOywnQDssJ4A7LCfAOywoADssKEA7LCiAOywowDssKQA7LClAOywpgDssKcA7LCoAOywqQDssKoA7LCrAOywrADssK0A7LCuAOywrwDssLAA7LCxAOywsgDssLMA7LC0AOywtQDssLYA7LC3AOywuADssLjqs6AA7LC5AOywugDssLsA7LC8AOywvQDssL4A7LC/AOyxgADssYEA7LGCAOyxgwDssYQA7LGFAOyxhgDssYcA7LGIAOyxiQDssYoA7LGLAOyxjADssY0A7LGOAOyxjwDssZAA7LGRAOyxkgDssZMA7LGUAOyxlQDssZYA7LGXAOyxmADssZkA7LGaAOyxmwDssZwA7LGdAOyxngDssZ8A7LGgAOyxoQDssaIA7LGjAOyxpADssaUA7LGmAOyxpwDssagA7LGpAOyxqgDssasA7LGsAOyxrQDssa4A7LGvAOyxsADssbEA7LGyAOyxswDssbQA7LG1AOyxtgDssbcA7LG4AOyxuQDssboA7LG7AOyxvADssb0A7LG+AOyxvwDssoAA7LKBAOyyggDssoMA7LKEAOyyhQDssoYA7LKHAOyyiADssokA7LKKAOyyiwDssowA7LKNAOyyjgDsso8A7LKQAOyykQDsspIA7LKTAOyylADsspUA7LKWAOyylwDsspgA7LKZAOyymgDsspsA7LKcAOyynQDssp4A7LKfAOyyoADssqEA7LKiAOyyowDssqQA7LKlAOyypgDssqcA7LKoAOyyqQDssqoA7LKrAOyyrADssq0A7LKuAOyyrwDssrAA7LKxAOyysgDssrMA7LK0AOyytQDssrYA7LK3AOyyuADssrkA7LK6AOyyuwDssrwA7LK9AOyyvgDssr8A7LOAAOyzgQDss4IA7LODAOyzhADss4UA7LOGAOyzhwDss4gA7LOJAOyzigDss4sA7LOMAOyzjQDss44A7LOPAOyzkADss5EA7LOSAOyzkwDss5QA7LOVAOyzlgDss5cA7LOYAOyzmQDss5oA7LObAOyznADss50A7LOeAOyznwDss6AA7LOhAOyzogDss6MA7LOkAOyzpQDss6YA7LOnAOyzqADss6kA7LOqAOyzqwDss6wA7LOtAOyzrgDss68A7LOwAOyzsQDss7IA7LOzAOyztADss7UA7LO2AOyztwDss7gA7LO5AOyzugDss7sA7LO8AOyzvQDss74A7LO/AOy0gADstIEA7LSCAOy0gwDstIQA7LSFAOy0hgDstIcA7LSIAOy0iQDstIoA7LSLAOy0jADstI0A7LSOAOy0jwDstJAA7LSRAOy0kgDstJMA7LSUAOy0lQDstJYA7LSXAOy0mADstJkA7LSaAOy0mwDstJwA7LSdAOy0ngDstJ8A7LSgAOy0oQDstKIA7LSjAOy0pADstKUA7LSmAOy0pwDstKgA7LSpAOy0qgDstKsA7LSsAOy0rQDstK4A7LSvAOy0sADstLEA7LSyAOy0swDstLQA7LS1AOy0tgDstLcA7LS4AOy0uQDstLoA7LS7AOy0vADstL0A7LS+AOy0vwDstYAA7LWBAOy1ggDstYMA7LWEAOy1hQDstYYA7LWHAOy1iADstYkA7LWKAOy1iwDstYwA7LWNAOy1jgDstY8A7LWQAOy1kQDstZIA7LWTAOy1lADstZUA7LWWAOy1lwDstZgA7LWZAOy1mgDstZsA7LWcAOy1nQDstZ4A7LWfAOy1oADstaEA7LWiAOy1owDstaQA7LWlAOy1pgDstacA7LWoAOy1qQDstaoA7LWrAOy1rADsta0A7LWuAOy1rwDstbAA7LWxAOy1sgDstbMA7LW0AOy1tQDstbYA7LW3AOy1uADstbkA7LW6AOy1uwDstbwA7LW9AOy1vgDstb8A7LaAAOy2gQDstoIA7LaDAOy2hADstoUA7LaGAOy2hwDstogA7LaJAOy2igDstosA7LaMAOy2jQDsto4A7LaPAOy2kADstpEA7LaSAOy2kwDstpQA7LaVAOy2lgDstpcA7LaYAOy2mQDstpoA7LabAOy2nADstp0A7LaeAOy2nwDstqAA7LahAOy2ogDstqMA7LakAOy2pQDstqYA7LanAOy2qADstqkA7LaqAOy2qwDstqwA7LatAOy2rgDstq8A7LawAOy2sQDstrIA7LazAOy2tADstrUA7La2AOy2twDstrgA7La5AOy2ugDstrsA7La8AOy2vQDstr4A7La/AOy3gADst4EA7LeCAOy3gwDst4QA7LeFAOy3hgDst4cA7LeIAOy3iQDst4oA7LeLAOy3jADst40A7LeOAOy3jwDst5AA7LeRAOy3kgDst5MA7LeUAOy3lQDst5YA7LeXAOy3mADst5kA7LeaAOy3mwDst5wA7LedAOy3ngDst58A7LegAOy3oQDst6IA7LejAOy3pADst6UA7LemAOy3pwDst6gA7LepAOy3qgDst6sA7LesAOy3rQDst64A7LevAOy3sADst7EA7LeyAOy3swDst7QA7Le1AOy3tgDst7cA7Le4AOy3uQDst7oA7Le7AOy3vADst70A7Le+AOy3vwDsuIAA7LiBAOy4ggDsuIMA7LiEAOy4hQDsuIYA7LiHAOy4iADsuIkA7LiKAOy4iwDsuIwA7LiNAOy4jgDsuI8A7LiQAOy4kQDsuJIA7LiTAOy4lADsuJUA7LiWAOy4lwDsuJgA7LiZAOy4mgDsuJsA7LicAOy4nQDsuJ4A7LifAOy4oADsuKEA7LiiAOy4owDsuKQA7LilAOy4pgDsuKcA7LioAOy4qQDsuKoA7LirAOy4rADsuK0A7LiuAOy4rwDsuLAA7LixAOy4sgDsuLMA7Li0AOy4tQDsuLYA7Li3AOy4uADsuLkA7Li6AOy4uwDsuLwA7Li9AOy4vgDsuL8A7LmAAOy5gQDsuYIA7LmDAOy5hADsuYUA7LmGAOy5hwDsuYgA7LmJAOy5igDsuYsA7LmMAOy5jQDsuY4A7LmPAOy5kADsuZEA7LmSAOy5kwDsuZQA7LmVAOy5lgDsuZcA7LmYAOy5mQDsuZoA7LmbAOy5nADsuZ0A7LmeAOy5nwDsuaAA7LmhAOy5ogDsuaMA7LmkAOy5pQDsuaYA7LmnAOy5qADsuakA7LmqAOy5qwDsuawA7LmtAOy5rgDsua8A7LmwAOy5sQDsubIA7LmzAOy5tADsubUA7Lm2AOy5twDsubgA7Lm5AOy5ugDsubsA7Lm8AOy5vQDsub4A7Lm/AOy6gADsuoEA7LqCAOy6gwDsuoQA7LqFAOy6hgDsuocA7LqIAOy6iQDsuooA7LqLAOy6jADsuo0A7LqOAOy6jwDsupAA7LqRAOy6kgDsupMA7LqUAOy6lQDsupYA7LqXAOy6mADsupkA7LqaAOy6mwDsupwA7LqdAOy6ngDsup8A7LqgAOy6oQDsuqIA7LqjAOy6pADsuqUA7LqmAOy6pwDsuqgA7LqpAOy6qgDsuqsA7LqsAOy6rQDsuq4A7LqvAOy6sADsurEA7LqyAOy6swDsurQA7Lq1AOy6tgDsurcA7Lq4AOy6uQDsuroA7Lq7AOy6vADsur0A7Lq+AOy6vwDsu4AA7LuBAOy7ggDsu4MA7LuEAOy7hQDsu4YA7LuHAOy7iADsu4kA7LuKAOy7iwDsu4wA7LuNAOy7jgDsu48A7LuQAOy7kQDsu5IA7LuTAOy7lADsu5UA7LuWAOy7lwDsu5gA7LuZAOy7mgDsu5sA7LucAOy7nQDsu54A7LufAOy7oADsu6EA7LuiAOy7owDsu6QA7LulAOy7pgDsu6cA7LuoAOy7qQDsu6oA7LurAOy7rADsu60A7LuuAOy7rwDsu7AA7LuxAOy7sgDsu7MA7Lu0AOy7tQDsu7YA7Lu3AOy7uADsu7kA7Lu6AOy7uwDsu7wA7Lu9AOy7vgDsu78A7LyAAOy8gQDsvIIA7LyDAOy8hADsvIUA7LyGAOy8hwDsvIgA7LyJAOy8igDsvIsA7LyMAOy8jQDsvI4A7LyPAOy8kADsvJEA7LySAOy8kwDsvJQA7LyVAOy8lgDsvJcA7LyYAOy8mQDsvJoA7LybAOy8nADsvJ0A7LyeAOy8nwDsvKAA7LyhAOy8ogDsvKMA7LykAOy8pQDsvKYA7LynAOy8qADsvKkA7LyqAOy8qwDsvKwA7LytAOy8rgDsvK8A7LywAOy8sQDsvLIA7LyzAOy8tADsvLUA7Ly2AOy8twDsvLgA7Ly5AOy8ugDsvLsA7Ly8AOy8vQDsvL4A7Ly/AOy9gADsvYEA7L2CAOy9gwDsvYQA7L2FAOy9hgDsvYcA7L2IAOy9iQDsvYoA7L2LAOy9jADsvY0A7L2OAOy9jwDsvZAA7L2RAOy9kgDsvZMA7L2UAOy9lQDsvZYA7L2XAOy9mADsvZkA7L2aAOy9mwDsvZwA7L2dAOy9ngDsvZ8A7L2gAOy9oQDsvaIA7L2jAOy9pADsvaUA7L2mAOy9pwDsvagA7L2pAOy9qgDsvasA7L2sAOy9rQDsva4A7L2vAOy9sADsvbEA7L2yAOy9swDsvbQA7L21AOy9tgDsvbcA7L24AOy9uQDsvboA7L27AOy9vADsvb0A7L2+AOy9vwDsvoAA7L6BAOy+ggDsvoMA7L6EAOy+hQDsvoYA7L6HAOy+iADsvokA7L6KAOy+iwDsvowA7L6NAOy+jgDsvo8A7L6QAOy+kQDsvpIA7L6TAOy+lADsvpUA7L6WAOy+lwDsvpgA7L6ZAOy+mgDsvpsA7L6cAOy+nQDsvp4A7L6fAOy+oADsvqEA7L6iAOy+owDsvqQA7L6lAOy+pgDsvqcA7L6oAOy+qQDsvqoA7L6rAOy+rADsvq0A7L6uAOy+rwDsvrAA7L6xAOy+sgDsvrMA7L60AOy+tQDsvrYA7L63AOy+uADsvrkA7L66AOy+uwDsvrwA7L69AOy+vgDsvr8A7L+AAOy/gQDsv4IA7L+DAOy/hADsv4UA7L+GAOy/hwDsv4gA7L+JAOy/igDsv4sA7L+MAOy/jQDsv44A7L+PAOy/kADsv5EA7L+SAOy/kwDsv5QA7L+VAOy/lgDsv5cA7L+YAOy/mQDsv5oA7L+bAOy/nADsv50A7L+eAOy/nwDsv6AA7L+hAOy/ogDsv6MA7L+kAOy/pQDsv6YA7L+nAOy/qADsv6kA7L+qAOy/qwDsv6wA7L+tAOy/rgDsv68A7L+wAOy/sQDsv7IA7L+zAOy/tADsv7UA7L+2AOy/twDsv7gA7L+5AOy/ugDsv7sA7L+8AOy/vQDsv74A7L+/AO2AgADtgIEA7YCCAO2AgwDtgIQA7YCFAO2AhgDtgIcA7YCIAO2AiQDtgIoA7YCLAO2AjADtgI0A7YCOAO2AjwDtgJAA7YCRAO2AkgDtgJMA7YCUAO2AlQDtgJYA7YCXAO2AmADtgJkA7YCaAO2AmwDtgJwA7YCdAO2AngDtgJ8A7YCgAO2AoQDtgKIA7YCjAO2ApADtgKUA7YCmAO2ApwDtgKgA7YCpAO2AqgDtgKsA7YCsAO2ArQDtgK4A7YCvAO2AsADtgLEA7YCyAO2AswDtgLQA7YC1AO2AtgDtgLcA7YC4AO2AuQDtgLoA7YC7AO2AvADtgL0A7YC+AO2AvwDtgYAA7YGBAO2BggDtgYMA7YGEAO2BhQDtgYYA7YGHAO2BiADtgYkA7YGKAO2BiwDtgYwA7YGNAO2BjgDtgY8A7YGQAO2BkQDtgZIA7YGTAO2BlADtgZUA7YGWAO2BlwDtgZgA7YGZAO2BmgDtgZsA7YGcAO2BnQDtgZ4A7YGfAO2BoADtgaEA7YGiAO2BowDtgaQA7YGlAO2BpgDtgacA7YGoAO2BqQDtgaoA7YGrAO2BrADtga0A7YGuAO2BrwDtgbAA7YGxAO2BsgDtgbMA7YG0AO2BtQDtgbYA7YG3AO2BuADtgbkA7YG6AO2BuwDtgbwA7YG9AO2BvgDtgb8A7YKAAO2CgQDtgoIA7YKDAO2ChADtgoUA7YKGAO2ChwDtgogA7YKJAO2CigDtgosA7YKMAO2CjQDtgo4A7YKPAO2CkADtgpEA7YKSAO2CkwDtgpQA7YKVAO2ClgDtgpcA7YKYAO2CmQDtgpoA7YKbAO2CnADtgp0A7YKeAO2CnwDtgqAA7YKhAO2CogDtgqMA7YKkAO2CpQDtgqYA7YKnAO2CqADtgqkA7YKqAO2CqwDtgqwA7YKtAO2CrgDtgq8A7YKwAO2CsQDtgrIA7YKzAO2CtADtgrUA7YK2AO2CtwDtgrgA7YK5AO2CugDtgrsA7YK8AO2CvQDtgr4A7YK/AO2DgADtg4EA7YOCAO2DgwDtg4QA7YOFAO2DhgDtg4cA7YOIAO2DiQDtg4oA7YOLAO2DjADtg40A7YOOAO2DjwDtg5AA7YORAO2DkgDtg5MA7YOUAO2DlQDtg5YA7YOXAO2DmADtg5kA7YOaAO2DmwDtg5wA7YOdAO2DngDtg58A7YOgAO2DoQDtg6IA7YOjAO2DpADtg6UA7YOmAO2DpwDtg6gA7YOpAO2DqgDtg6sA7YOsAO2DrQDtg64A7YOvAO2DsADtg7EA7YOyAO2DswDtg7QA7YO1AO2DtgDtg7cA7YO4AO2DuQDtg7oA7YO7AO2DvADtg70A7YO+AO2DvwDthIAA7YSBAO2EggDthIMA7YSEAO2EhQDthIYA7YSHAO2EiADthIkA7YSKAO2EiwDthIwA7YSNAO2EjgDthI8A7YSQAO2EkQDthJIA7YSTAO2ElADthJUA7YSWAO2ElwDthJgA7YSZAO2EmgDthJsA7YScAO2EnQDthJ4A7YSfAO2EoADthKEA7YSiAO2EowDthKQA7YSlAO2EpgDthKcA7YSoAO2EqQDthKoA7YSrAO2ErADthK0A7YSuAO2ErwDthLAA7YSxAO2EsgDthLMA7YS0AO2EtQDthLYA7YS3AO2EuADthLkA7YS6AO2EuwDthLwA7YS9AO2EvgDthL8A7YWAAO2FgQDthYIA7YWDAO2FhADthYUA7YWGAO2FhwDthYgA7YWJAO2FigDthYsA7YWMAO2FjQDthY4A7YWPAO2FkADthZEA7YWSAO2FkwDthZQA7YWVAO2FlgDthZcA7YWYAO2FmQDthZoA7YWbAO2FnADthZ0A7YWeAO2FnwDthaAA7YWhAO2FogDthaMA7YWkAO2FpQDthaYA7YWnAO2FqADthakA7YWqAO2FqwDthawA7YWtAO2FrgDtha8A7YWwAO2FsQDthbIA7YWzAO2FtADthbUA7YW2AO2FtwDthbgA7YW5AO2FugDthbsA7YW8AO2FvQDthb4A7YW/AO2GgADthoEA7YaCAO2GgwDthoQA7YaFAO2GhgDthocA7YaIAO2GiQDthooA7YaLAO2GjADtho0A7YaOAO2GjwDthpAA7YaRAO2GkgDthpMA7YaUAO2GlQDthpYA7YaXAO2GmADthpkA7YaaAO2GmwDthpwA7YadAO2GngDthp8A7YagAO2GoQDthqIA7YajAO2GpADthqUA7YamAO2GpwDthqgA7YapAO2GqgDthqsA7YasAO2GrQDthq4A7YavAO2GsADthrEA7YayAO2GswDthrQA7Ya1AO2GtgDthrcA7Ya4AO2GuQDthroA7Ya7AO2GvADthr0A7Ya+AO2GvwDth4AA7YeBAO2HggDth4MA7YeEAO2HhQDth4YA7YeHAO2HiADth4kA7YeKAO2HiwDth4wA7YeNAO2HjgDth48A7YeQAO2HkQDth5IA7YeTAO2HlADth5UA7YeWAO2HlwDth5gA7YeZAO2HmgDth5sA7YecAO2HnQDth54A7YefAO2HoADth6EA7YeiAO2HowDth6QA7YelAO2HpgDth6cA7YeoAO2HqQDth6oA7YerAO2HrADth60A7YeuAO2HrwDth7AA7YexAO2HsgDth7MA7Ye0AO2HtQDth7YA7Ye3AO2HuADth7kA7Ye6AO2HuwDth7wA7Ye9AO2HvgDth78A7YiAAO2IgQDtiIIA7YiDAO2IhADtiIUA7YiGAO2IhwDtiIgA7YiJAO2IigDtiIsA7YiMAO2IjQDtiI4A7YiPAO2IkADtiJEA7YiSAO2IkwDtiJQA7YiVAO2IlgDtiJcA7YiYAO2ImQDtiJoA7YibAO2InADtiJ0A7YieAO2InwDtiKAA7YihAO2IogDtiKMA7YikAO2IpQDtiKYA7YinAO2IqADtiKkA7YiqAO2IqwDtiKwA7YitAO2IrgDtiK8A7YiwAO2IsQDtiLIA7YizAO2ItADtiLUA7Yi2AO2ItwDtiLgA7Yi5AO2IugDtiLsA7Yi8AO2IvQDtiL4A7Yi/AO2JgADtiYEA7YmCAO2JgwDtiYQA7YmFAO2JhgDtiYcA7YmIAO2JiQDtiYoA7YmLAO2JjADtiY0A7YmOAO2JjwDtiZAA7YmRAO2JkgDtiZMA7YmUAO2JlQDtiZYA7YmXAO2JmADtiZkA7YmaAO2JmwDtiZwA7YmdAO2JngDtiZ8A7YmgAO2JoQDtiaIA7YmjAO2JpADtiaUA7YmmAO2JpwDtiagA7YmpAO2JqgDtiasA7YmsAO2JrQDtia4A7YmvAO2JsADtibEA7YmyAO2JswDtibQA7Ym1AO2JtgDtibcA7Ym4AO2JuQDtiboA7Ym7AO2JvADtib0A7Ym+AO2JvwDtioAA7YqBAO2KggDtioMA7YqEAO2KhQDtioYA7YqHAO2KiADtiokA7YqKAO2KiwDtiowA7YqNAO2KjgDtio8A7YqQAO2KkQDtipIA7YqTAO2KlADtipUA7YqWAO2KlwDtipgA7YqZAO2KmgDtipsA7YqcAO2KnQDtip4A7YqfAO2KoADtiqEA7YqiAO2KowDtiqQA7YqlAO2KpgDtiqcA7YqoAO2KqQDtiqoA7YqrAO2KrADtiq0A7YquAO2KrwDtirAA7YqxAO2KsgDtirMA7Yq0AO2KtQDtirYA7Yq3AO2KuADtirkA7Yq6AO2KuwDtirwA7Yq9AO2KvgDtir8A7YuAAO2LgQDti4IA7YuDAO2LhADti4UA7YuGAO2LhwDti4gA7YuJAO2LigDti4sA7YuMAO2LjQDti44A7YuPAO2LkADti5EA7YuSAO2LkwDti5QA7YuVAO2LlgDti5cA7YuYAO2LmQDti5oA7YubAO2LnADti50A7YueAO2LnwDti6AA7YuhAO2LogDti6MA7YukAO2LpQDti6YA7YunAO2LqADti6kA7YuqAO2LqwDti6wA7YutAO2LrgDti68A7YuwAO2LsQDti7IA7YuzAO2LtADti7UA7Yu2AO2LtwDti7gA7Yu5AO2LugDti7sA7Yu8AO2LvQDti74A7Yu/AO2MgADtjIEA7YyCAO2MgwDtjIQA7YyFAO2MhgDtjIcA7YyIAO2MiQDtjIoA7YyLAO2MjADtjI0A7YyOAO2MjwDtjJAA7YyRAO2MkgDtjJMA7YyUAO2MlQDtjJYA7YyXAO2MmADtjJkA7YyaAO2MmwDtjJwA7YydAO2MngDtjJ8A7YygAO2MoQDtjKIA7YyjAO2MpADtjKUA7YymAO2MpwDtjKgA7YypAO2MqgDtjKsA7YysAO2MrQDtjK4A7YyvAO2MsADtjLEA7YyyAO2MswDtjLQA7Yy1AO2MtgDtjLcA7Yy4AO2MuQDtjLoA7Yy7AO2MvADtjL0A7Yy+AO2MvwDtjYAA7Y2BAO2NggDtjYMA7Y2EAO2NhQDtjYYA7Y2HAO2NiADtjYkA7Y2KAO2NiwDtjYwA7Y2NAO2NjgDtjY8A7Y2QAO2NkQDtjZIA7Y2TAO2NlADtjZUA7Y2WAO2NlwDtjZgA7Y2ZAO2NmgDtjZsA7Y2cAO2NnQDtjZ4A7Y2fAO2NoADtjaEA7Y2iAO2NowDtjaQA7Y2lAO2NpgDtjacA7Y2oAO2NqQDtjaoA7Y2rAO2NrADtja0A7Y2uAO2NrwDtjbAA7Y2xAO2NsgDtjbMA7Y20AO2NtQDtjbYA7Y23AO2NuADtjbkA7Y26AO2NuwDtjbwA7Y29AO2NvgDtjb8A7Y6AAO2OgQDtjoIA7Y6DAO2OhADtjoUA7Y6GAO2OhwDtjogA7Y6JAO2OigDtjosA7Y6MAO2OjQDtjo4A7Y6PAO2OkADtjpEA7Y6SAO2OkwDtjpQA7Y6VAO2OlgDtjpcA7Y6YAO2OmQDtjpoA7Y6bAO2OnADtjp0A7Y6eAO2OnwDtjqAA7Y6hAO2OogDtjqMA7Y6kAO2OpQDtjqYA7Y6nAO2OqADtjqkA7Y6qAO2OqwDtjqwA7Y6tAO2OrgDtjq8A7Y6wAO2OsQDtjrIA7Y6zAO2OtADtjrUA7Y62AO2OtwDtjrgA7Y65AO2OugDtjrsA7Y68AO2OvQDtjr4A7Y6/AO2PgADtj4EA7Y+CAO2PgwDtj4QA7Y+FAO2PhgDtj4cA7Y+IAO2PiQDtj4oA7Y+LAO2PjADtj40A7Y+OAO2PjwDtj5AA7Y+RAO2PkgDtj5MA7Y+UAO2PlQDtj5YA7Y+XAO2PmADtj5kA7Y+aAO2PmwDtj5wA7Y+dAO2PngDtj58A7Y+gAO2PoQDtj6IA7Y+jAO2PpADtj6UA7Y+mAO2PpwDtj6gA7Y+pAO2PqgDtj6sA7Y+sAO2PrQDtj64A7Y+vAO2PsADtj7EA7Y+yAO2PswDtj7QA7Y+1AO2PtgDtj7cA7Y+4AO2PuQDtj7oA7Y+7AO2PvADtj70A7Y++AO2PvwDtkIAA7ZCBAO2QggDtkIMA7ZCEAO2QhQDtkIYA7ZCHAO2QiADtkIkA7ZCKAO2QiwDtkIwA7ZCNAO2QjgDtkI8A7ZCQAO2QkQDtkJIA7ZCTAO2QlADtkJUA7ZCWAO2QlwDtkJgA7ZCZAO2QmgDtkJsA7ZCcAO2QnQDtkJ4A7ZCfAO2QoADtkKEA7ZCiAO2QowDtkKQA7ZClAO2QpgDtkKcA7ZCoAO2QqQDtkKoA7ZCrAO2QrADtkK0A7ZCuAO2QrwDtkLAA7ZCxAO2QsgDtkLMA7ZC0AO2QtQDtkLYA7ZC3AO2QuADtkLkA7ZC6AO2QuwDtkLwA7ZC9AO2QvgDtkL8A7ZGAAO2RgQDtkYIA7ZGDAO2RhADtkYUA7ZGGAO2RhwDtkYgA7ZGJAO2RigDtkYsA7ZGMAO2RjQDtkY4A7ZGPAO2RkADtkZEA7ZGSAO2RkwDtkZQA7ZGVAO2RlgDtkZcA7ZGYAO2RmQDtkZoA7ZGbAO2RnADtkZ0A7ZGeAO2RnwDtkaAA7ZGhAO2RogDtkaMA7ZGkAO2RpQDtkaYA7ZGnAO2RqADtkakA7ZGqAO2RqwDtkawA7ZGtAO2RrgDtka8A7ZGwAO2RsQDtkbIA7ZGzAO2RtADtkbUA7ZG2AO2RtwDtkbgA7ZG5AO2RugDtkbsA7ZG8AO2RvQDtkb4A7ZG/AO2SgADtkoEA7ZKCAO2SgwDtkoQA7ZKFAO2ShgDtkocA7ZKIAO2SiQDtkooA7ZKLAO2SjADtko0A7ZKOAO2SjwDtkpAA7ZKRAO2SkgDtkpMA7ZKUAO2SlQDtkpYA7ZKXAO2SmADtkpkA7ZKaAO2SmwDtkpwA7ZKdAO2SngDtkp8A7ZKgAO2SoQDtkqIA7ZKjAO2SpADtkqUA7ZKmAO2SpwDtkqgA7ZKpAO2SqgDtkqsA7ZKsAO2SrQDtkq4A7ZKvAO2SsADtkrEA7ZKyAO2SswDtkrQA7ZK1AO2StgDtkrcA7ZK4AO2SuQDtkroA7ZK7AO2SvADtkr0A7ZK+AO2SvwDtk4AA7ZOBAO2TggDtk4MA7ZOEAO2ThQDtk4YA7ZOHAO2TiADtk4kA7ZOKAO2TiwDtk4wA7ZONAO2TjgDtk48A7ZOQAO2TkQDtk5IA7ZOTAO2TlADtk5UA7ZOWAO2TlwDtk5gA7ZOZAO2TmgDtk5sA7ZOcAO2TnQDtk54A7ZOfAO2ToADtk6EA7ZOiAO2TowDtk6QA7ZOlAO2TpgDtk6cA7ZOoAO2TqQDtk6oA7ZOrAO2TrADtk60A7ZOuAO2TrwDtk7AA7ZOxAO2TsgDtk7MA7ZO0AO2TtQDtk7YA7ZO3AO2TuADtk7kA7ZO6AO2TuwDtk7wA7ZO9AO2TvgDtk78A7ZSAAO2UgQDtlIIA7ZSDAO2UhADtlIUA7ZSGAO2UhwDtlIgA7ZSJAO2UigDtlIsA7ZSMAO2UjQDtlI4A7ZSPAO2UkADtlJEA7ZSSAO2UkwDtlJQA7ZSVAO2UlgDtlJcA7ZSYAO2UmQDtlJoA7ZSbAO2UnADtlJ0A7ZSeAO2UnwDtlKAA7ZShAO2UogDtlKMA7ZSkAO2UpQDtlKYA7ZSnAO2UqADtlKkA7ZSqAO2UqwDtlKwA7ZStAO2UrgDtlK8A7ZSwAO2UsQDtlLIA7ZSzAO2UtADtlLUA7ZS2AO2UtwDtlLgA7ZS5AO2UugDtlLsA7ZS8AO2UvQDtlL4A7ZS/AO2VgADtlYEA7ZWCAO2VgwDtlYQA7ZWFAO2VhgDtlYcA7ZWIAO2ViQDtlYoA7ZWLAO2VjADtlY0A7ZWOAO2VjwDtlZAA7ZWRAO2VkgDtlZMA7ZWUAO2VlQDtlZYA7ZWXAO2VmADtlZkA7ZWaAO2VmwDtlZwA7ZWdAO2VngDtlZ8A7ZWgAO2VoQDtlaIA7ZWjAO2VpADtlaUA7ZWmAO2VpwDtlagA7ZWpAO2VqgDtlasA7ZWsAO2VrQDtla4A7ZWvAO2VsADtlbEA7ZWyAO2VswDtlbQA7ZW1AO2VtgDtlbcA7ZW4AO2VuQDtlboA7ZW7AO2VvADtlb0A7ZW+AO2VvwDtloAA7ZaBAO2WggDtloMA7ZaEAO2WhQDtloYA7ZaHAO2WiADtlokA7ZaKAO2WiwDtlowA7ZaNAO2WjgDtlo8A7ZaQAO2WkQDtlpIA7ZaTAO2WlADtlpUA7ZaWAO2WlwDtlpgA7ZaZAO2WmgDtlpsA7ZacAO2WnQDtlp4A7ZafAO2WoADtlqEA7ZaiAO2WowDtlqQA7ZalAO2WpgDtlqcA7ZaoAO2WqQDtlqoA7ZarAO2WrADtlq0A7ZauAO2WrwDtlrAA7ZaxAO2WsgDtlrMA7Za0AO2WtQDtlrYA7Za3AO2WuADtlrkA7Za6AO2WuwDtlrwA7Za9AO2WvgDtlr8A7ZeAAO2XgQDtl4IA7ZeDAO2XhADtl4UA7ZeGAO2XhwDtl4gA7ZeJAO2XigDtl4sA7ZeMAO2XjQDtl44A7ZePAO2XkADtl5EA7ZeSAO2XkwDtl5QA7ZeVAO2XlgDtl5cA7ZeYAO2XmQDtl5oA7ZebAO2XnADtl50A7ZeeAO2XnwDtl6AA7ZehAO2XogDtl6MA7ZekAO2XpQDtl6YA7ZenAO2XqADtl6kA7ZeqAO2XqwDtl6wA7ZetAO2XrgDtl68A7ZewAO2XsQDtl7IA7ZezAO2XtADtl7UA7Ze2AO2XtwDtl7gA7Ze5AO2XugDtl7sA7Ze8AO2XvQDtl74A7Ze/AO2YgADtmIEA7ZiCAO2YgwDtmIQA7ZiFAO2YhgDtmIcA7ZiIAO2YiQDtmIoA7ZiLAO2YjADtmI0A7ZiOAO2YjwDtmJAA7ZiRAO2YkgDtmJMA7ZiUAO2YlQDtmJYA7ZiXAO2YmADtmJkA7ZiaAO2YmwDtmJwA7ZidAO2YngDtmJ8A7ZigAO2YoQDtmKIA7ZijAO2YpADtmKUA7ZimAO2YpwDtmKgA7ZipAO2YqgDtmKsA7ZisAO2YrQDtmK4A7ZivAO2YsADtmLEA7ZiyAO2YswDtmLQA7Zi1AO2YtgDtmLcA7Zi4AO2YuQDtmLoA7Zi7AO2YvADtmL0A7Zi+AO2YvwDtmYAA7ZmBAO2ZggDtmYMA7ZmEAO2ZhQDtmYYA7ZmHAO2ZiADtmYkA7ZmKAO2ZiwDtmYwA7ZmNAO2ZjgDtmY8A7ZmQAO2ZkQDtmZIA7ZmTAO2ZlADtmZUA7ZmWAO2ZlwDtmZgA7ZmZAO2ZmgDtmZsA7ZmcAO2ZnQDtmZ4A7ZmfAO2ZoADtmaEA7ZmiAO2ZowDtmaQA7ZmlAO2ZpgDtmacA7ZmoAO2ZqQDtmaoA7ZmrAO2ZrADtma0A7ZmuAO2ZrwDtmbAA7ZmxAO2ZsgDtmbMA7Zm0AO2ZtQDtmbYA7Zm3AO2ZuADtmbkA7Zm6AO2ZuwDtmbwA7Zm9AO2ZvgDtmb8A7ZqAAO2agQDtmoIA7ZqDAO2ahADtmoUA7ZqGAO2ahwDtmogA7ZqJAO2aigDtmosA7ZqMAO2ajQDtmo4A7ZqPAO2akADtmpEA7ZqSAO2akwDtmpQA7ZqVAO2algDtmpcA7ZqYAO2amQDtmpoA7ZqbAO2anADtmp0A7ZqeAO2anwDtmqAA7ZqhAO2aogDtmqMA7ZqkAO2apQDtmqYA7ZqnAO2aqADtmqkA7ZqqAO2aqwDtmqwA7ZqtAO2argDtmq8A7ZqwAO2asQDtmrIA7ZqzAO2atADtmrUA7Zq2AO2atwDtmrgA7Zq5AO2augDtmrsA7Zq8AO2avQDtmr4A7Zq/AO2bgADtm4EA7ZuCAO2bgwDtm4QA7ZuFAO2bhgDtm4cA7ZuIAO2biQDtm4oA7ZuLAO2bjADtm40A7ZuOAO2bjwDtm5AA7ZuRAO2bkgDtm5MA7ZuUAO2blQDtm5YA7ZuXAO2bmADtm5kA7ZuaAO2bmwDtm5wA7ZudAO2bngDtm58A7ZugAO2boQDtm6IA7ZujAO2bpADtm6UA7ZumAO2bpwDtm6gA7ZupAO2bqgDtm6sA7ZusAO2brQDtm64A7ZuvAO2bsADtm7EA7ZuyAO2bswDtm7QA7Zu1AO2btgDtm7cA7Zu4AO2buQDtm7oA7Zu7AO2bvADtm70A7Zu+AO2bvwDtnIAA7ZyBAO2cggDtnIMA7ZyEAO2chQDtnIYA7ZyHAO2ciADtnIkA7ZyKAO2ciwDtnIwA7ZyNAO2cjgDtnI8A7ZyQAO2ckQDtnJIA7ZyTAO2clADtnJUA7ZyWAO2clwDtnJgA7ZyZAO2cmgDtnJsA7ZycAO2cnQDtnJ4A7ZyfAO2coADtnKEA7ZyiAO2cowDtnKQA7ZylAO2cpgDtnKcA7ZyoAO2cqQDtnKoA7ZyrAO2crADtnK0A7ZyuAO2crwDtnLAA7ZyxAO2csgDtnLMA7Zy0AO2ctQDtnLYA7Zy3AO2cuADtnLkA7Zy6AO2cuwDtnLwA7Zy9AO2cvgDtnL8A7Z2AAO2dgQDtnYIA7Z2DAO2dhADtnYUA7Z2GAO2dhwDtnYgA7Z2JAO2digDtnYsA7Z2MAO2djQDtnY4A7Z2PAO2dkADtnZEA7Z2SAO2dkwDtnZQA7Z2VAO2dlgDtnZcA7Z2YAO2dmQDtnZoA7Z2bAO2dnADtnZ0A7Z2eAO2dnwDtnaAA7Z2hAO2dogDtnaMA7Z2kAO2dpQDtnaYA7Z2nAO2dqADtnakA7Z2qAO2dqwDtnawA7Z2tAO2drgDtna8A7Z2wAO2dsQDtnbIA7Z2zAO2dtADtnbUA7Z22AO2dtwDtnbgA7Z25AO2dugDtnbsA7Z28AO2dvQDtnb4A7Z2/AO2egADtnoEA7Z6CAO2egwDtnoQA7Z6FAO2ehgDtnocA7Z6IAO2eiQDtnooA7Z6LAO2ejADtno0A7Z6OAO2ejwDtnpAA7Z6RAO2ekgDtnpMA7Z6UAO2elQDtnpYA7Z6XAO2emADtnpkA7Z6aAO2emwDtnpwA7Z6dAO2engDtnp8A7Z6gAO2eoQDtnqIA7Z6jAPCRgpoA8JGCnADwkYKrAPCRhK4A8JGErwDwkY2LAPCRjYwA8JGSuwDwkZK8APCRkr4A8JGWugDwkZa7APCdhZfwnYWlAPCdhZjwnYWlAPCdhZjwnYWl8J2FrgDwnYWY8J2FpfCdha8A8J2FmPCdhaXwnYWwAPCdhZjwnYWl8J2FsQDwnYWY8J2FpfCdhbIA8J2GufCdhaUA8J2GufCdhaXwnYWuAPCdhrnwnYWl8J2FrwDwnYa68J2FpQDwnYa68J2FpfCdha4A8J2GuvCdhaXwnYWvAPCghKIA8KCUnADwoJSlAPCglYsA8KCYugDwoKCEAPCgo54A8KCorADwoK2jAPChk6QA8KGaqADwoZuqAPChp4gA8KGsmADwobSLAPCht6QA8KG3pgDwooaDAPCihp8A8KKMsQDwopuUAPCioYQA8KKhigDwoqyMAPCir7EA8KOAigDwo4q4APCjjZ8A8KOOkwDwo46cAPCjj4MA8KOPlQDwo5GtAPCjmqMA8KOipwDwo6qNAPCjq7oA8KOyvADwo7SeAPCju5EA8KO9ngDwo76OAPCkiaMA8KSLrgDwpI6rAPCkmIgA8KSctQDwpKCUAPCksLYA8KSykgDwpL6hAPCkvrgA8KWBhADwpYOyAPClg7MA8KWEmQDwpYSzAPCliYkA8KWQnQDwpZimAPClmpoA8KWbhQDwpaW8APClqqcA8KWuqwDwpbKAAPCls5AA8KW+hgDwpoeaAPCmiKgA8KaJhwDwpouZAPCmjL4A8KaTmgDwppSjAPCmlqgA8KaepwDwpp61APCmrLwA8KawtgDwprOVAPCmtasA8Ka8rADwpr6xAPCng5IA8KePigDwp5mnAPCnoq4A8KelpgDwp7KoAPCnu5MA8Ke8rwDwqJeSAPCol60A8KicrgDwqK+6APCotbcA8KmFhQDwqYefAPCpiJoA8KmQigDwqZKWAPCplrYA8KmssADwqoOOAPCqhIUA8KqIjgDwqoqRAPCqjpIA8KqYgAA=" + }, + { + "type": "Replace", + "pattern": { + "Regex": " {2,}" + }, + "content": " " + } + ] + }, + "pre_tokenizer": { + "type": "Sequence", + "pretokenizers": [ + { + "type": "WhitespaceSplit" + }, + { + "type": "Metaspace", + "replacement": "▁", + "prepend_scheme": "always", + "split": true + } + ] + }, + "post_processor": { + "type": "TemplateProcessing", + "single": [ + { + "Sequence": { + "id": "A", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + } + ], + "pair": [ + { + "Sequence": { + "id": "A", + "type_id": 0 + } + }, + { + "Sequence": { + "id": "B", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + } + ], + "special_tokens": { + "": { + "id": "", + "ids": [ + 1 + ], + "tokens": [ + "" + ] + } + } + }, + "decoder": { + "type": "Metaspace", + "replacement": "▁", + "prepend_scheme": "always", + "split": true + }, + "model": { + "type": "Unigram", + "unk_id": 105, + "vocab": [ + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + -100.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + ".", + -3.1310248374938965 + ], + [ + ",", + -3.3096930980682373 + ], + [ + "▁the", + -3.329261541366577 + ], + [ + "▁", + -3.71622633934021 + ], + [ + "▁and", + -3.7503232955932617 + ], + [ + "▁to", + -3.8084170818328857 + ], + [ + "▁of", + -3.947807788848877 + ], + [ + "▁a", + -4.054682731628418 + ], + [ + "▁in", + -4.340486526489258 + ], + [ + "s", + -4.450276851654053 + ], + [ + "▁is", + -4.6409502029418945 + ], + [ + "▁for", + -4.738903999328613 + ], + [ + "▁you", + -4.900545597076416 + ], + [ + "▁that", + -4.957766532897949 + ], + [ + "-", + -4.9944891929626465 + ], + [ + "▁with", + -5.030882358551025 + ], + [ + "’", + -5.101049900054932 + ], + [ + "▁on", + -5.133164405822754 + ], + [ + "▁I", + -5.223630905151367 + ], + [ + "▁it", + -5.293469429016113 + ], + [ + "▁are", + -5.301150321960449 + ], + [ + "▁your", + -5.400738716125488 + ], + [ + "▁be", + -5.4154863357543945 + ], + [ + "▁as", + -5.503668785095215 + ], + [ + "'", + -5.531991481781006 + ], + [ + "▁or", + -5.606969833374023 + ], + [ + "▁have", + -5.638710975646973 + ], + [ + "▁at", + -5.668367385864258 + ], + [ + "▁from", + -5.690201282501221 + ], + [ + "▁this", + -5.707475185394287 + ], + [ + "▁can", + -5.715296268463135 + ], + [ + "▁will", + -5.736892223358154 + ], + [ + "▁The", + -5.760174751281738 + ], + [ + "▁was", + -5.787508964538574 + ], + [ + "▁by", + -5.7888922691345215 + ], + [ + "▁an", + -5.899631500244141 + ], + [ + "▁(", + -5.9247822761535645 + ], + [ + "t", + -5.987702369689941 + ], + [ + "▁we", + -6.008418560028076 + ], + [ + "▁not", + -6.010701656341553 + ], + [ + "!", + -6.073370933532715 + ], + [ + "▁has", + -6.112986087799072 + ], + [ + "▁all", + -6.145323276519775 + ], + [ + "▁our", + -6.172176361083984 + ], + [ + ":", + -6.200052261352539 + ], + [ + "?", + -6.233884811401367 + ], + [ + "▁their", + -6.249642372131348 + ], + [ + "▁more", + -6.250922203063965 + ], + [ + "▁but", + -6.291007995605469 + ], + [ + "▁one", + -6.3344645500183105 + ], + [ + "▁they", + -6.338380336761475 + ], + [ + ")", + -6.369032382965088 + ], + [ + "The", + -6.370370864868164 + ], + [ + "▁about", + -6.416199684143066 + ], + [ + "▁my", + -6.441829681396484 + ], + [ + "▁which", + -6.478437900543213 + ], + [ + "▁also", + -6.492162227630615 + ], + [ + "▁up", + -6.499144077301025 + ], + [ + "▁out", + -6.5046491622924805 + ], + [ + "▁time", + -6.52347993850708 + ], + [ + "▁so", + -6.537102699279785 + ], + [ + "▁It", + -6.704108238220215 + ], + [ + "▁his", + -6.705111503601074 + ], + [ + "▁who", + -6.707167148590088 + ], + [ + "▁do", + -6.717360019683838 + ], + [ + "▁like", + -6.723618507385254 + ], + [ + "▁when", + -6.729841232299805 + ], + [ + "▁been", + -6.74003791809082 + ], + [ + "▁if", + -6.7455220222473145 + ], + [ + "▁other", + -6.759890556335449 + ], + [ + "▁new", + -6.765444755554199 + ], + [ + "▁he", + -6.7865681648254395 + ], + [ + "▁get", + -6.794158458709717 + ], + [ + "▁what", + -6.802104949951172 + ], + [ + "▁some", + -6.826183795928955 + ], + [ + "▁This", + -6.83860445022583 + ], + [ + "▁them", + -6.8505167961120605 + ], + [ + "▁We", + -6.85291862487793 + ], + [ + "▁“", + -6.858825206756592 + ], + [ + "▁there", + -6.865011215209961 + ], + [ + "I", + -6.8684492111206055 + ], + [ + "▁just", + -6.874081134796143 + ], + [ + "▁any", + -6.879689693450928 + ], + [ + "▁into", + -6.887314319610596 + ], + [ + "/", + -6.896563529968262 + ], + [ + "▁would", + -6.8974289894104 + ], + [ + "▁make", + -6.906951904296875 + ], + [ + "\"", + -6.912654876708984 + ], + [ + "▁were", + -6.934229373931885 + ], + [ + "▁had", + -6.952000141143799 + ], + [ + "▁than", + -6.96122407913208 + ], + [ + "▁\"", + -6.969889163970947 + ], + [ + "▁how", + -6.981604099273682 + ], + [ + "▁people", + -6.992509841918945 + ], + [ + "▁work", + -7.00425910949707 + ], + [ + "▁A", + -7.005988597869873 + ], + [ + "▁its", + -7.008430480957031 + ], + [ + "▁over", + -7.009399890899658 + ], + [ + "▁most", + -7.033555507659912 + ], + [ + ";", + -7.038615703582764 + ], + [ + "▁use", + -7.047380447387695 + ], + [ + "m", + -7.050930500030518 + ], + [ + "▁only", + -7.051677703857422 + ], + [ + "▁well", + -7.055868148803711 + ], + [ + "▁first", + -7.056195259094238 + ], + [ + "▁–", + -7.0657758712768555 + ], + [ + "▁me", + -7.068888187408447 + ], + [ + "▁us", + -7.0706787109375 + ], + [ + "▁her", + -7.072997093200684 + ], + [ + "re", + -7.0779924392700195 + ], + [ + "▁need", + -7.094850540161133 + ], + [ + "▁may", + -7.1019368171691895 + ], + [ + "▁these", + -7.131898880004883 + ], + [ + "▁no", + -7.1435546875 + ], + [ + "▁very", + -7.169593334197998 + ], + [ + "▁In", + -7.17523193359375 + ], + [ + "▁many", + -7.184239864349365 + ], + [ + "▁through", + -7.192859649658203 + ], + [ + "▁help", + -7.199311256408691 + ], + [ + "▁You", + -7.202390193939209 + ], + [ + "”", + -7.216441631317139 + ], + [ + "▁two", + -7.224501132965088 + ], + [ + "▁best", + -7.22633695602417 + ], + [ + "▁way", + -7.228824615478516 + ], + [ + "▁years", + -7.249144554138184 + ], + [ + "▁year", + -7.277033805847168 + ], + [ + "▁-", + -7.302977085113525 + ], + [ + "▁good", + -7.311716079711914 + ], + [ + "▁know", + -7.319109916687012 + ], + [ + "▁see", + -7.327856063842773 + ], + [ + "▁then", + -7.33629846572876 + ], + [ + "▁home", + -7.347983360290527 + ], + [ + "▁now", + -7.351974964141846 + ], + [ + "▁If", + -7.352500915527344 + ], + [ + "▁where", + -7.359532833099365 + ], + [ + "▁day", + -7.368444442749023 + ], + [ + "▁said", + -7.372145175933838 + ], + [ + "▁after", + -7.37817907333374 + ], + [ + "▁want", + -7.381801605224609 + ], + [ + "▁should", + -7.388603687286377 + ], + [ + "▁back", + -7.389666080474853 + ], + [ + "▁take", + -7.396261215209961 + ], + [ + "▁much", + -7.399851322174072 + ], + [ + ").", + -7.404089450836182 + ], + [ + "A", + -7.411473751068115 + ], + [ + "d", + -7.413090705871582 + ], + [ + "▁such", + -7.415567874908447 + ], + [ + "▁even", + -7.4157514572143555 + ], + [ + "▁great", + -7.419434070587158 + ], + [ + "▁could", + -7.421172142028809 + ], + [ + "▁information", + -7.432606220245361 + ], + [ + "▁find", + -7.43942928314209 + ], + [ + "▁&", + -7.440021514892578 + ], + [ + "▁business", + -7.483234882354736 + ], + [ + "ve", + -7.488013744354248 + ], + [ + "▁because", + -7.488485336303711 + ], + [ + "▁used", + -7.493058204650879 + ], + [ + "▁here", + -7.495416641235352 + ], + [ + "▁she", + -7.49976921081543 + ], + [ + "▁made", + -7.50059175491333 + ], + [ + "ll", + -7.515738487243652 + ], + [ + "▁right", + -7.532498359680176 + ], + [ + "▁before", + -7.539369583129883 + ], + [ + "▁being", + -7.5465850830078125 + ], + [ + "▁life", + -7.563371181488037 + ], + [ + "▁don", + -7.567350387573242 + ], + [ + "ing", + -7.568863391876221 + ], + [ + "▁those", + -7.5763349533081055 + ], + [ + "▁go", + -7.581284999847412 + ], + [ + "▁each", + -7.583556652069092 + ], + [ + "▁while", + -7.619012832641602 + ], + [ + "▁world", + -7.623234272003174 + ], + [ + "▁around", + -7.625234603881836 + ], + [ + "▁2", + -7.626479625701904 + ], + [ + "▁high", + -7.633934020996094 + ], + [ + "▁own", + -7.635804176330566 + ], + [ + "S", + -7.648947715759277 + ], + [ + "We", + -7.664306163787842 + ], + [ + "▁He", + -7.6673583984375 + ], + [ + "▁look", + -7.691705703735352 + ], + [ + "This", + -7.700477123260498 + ], + [ + "▁really", + -7.702106952667236 + ], + [ + "▁last", + -7.706178665161133 + ], + [ + "▁every", + -7.709034442901611 + ], + [ + "▁different", + -7.710362911224365 + ], + [ + "▁service", + -7.710605621337891 + ], + [ + "▁available", + -7.715913772583008 + ], + [ + "▁free", + -7.718165874481201 + ], + [ + "▁place", + -7.72369384765625 + ], + [ + "▁3", + -7.724313735961914 + ], + [ + "▁part", + -7.724812507629395 + ], + [ + "▁love", + -7.725207328796387 + ], + [ + "▁off", + -7.72635555267334 + ], + [ + "▁long", + -7.733267307281494 + ], + [ + "▁company", + -7.735748767852783 + ], + [ + "▁both", + -7.737212657928467 + ], + [ + "▁using", + -7.742841243743896 + ], + [ + "a", + -7.744221210479736 + ], + [ + "▁1", + -7.749725341796875 + ], + [ + "▁experience", + -7.750367164611816 + ], + [ + "th", + -7.757729053497315 + ], + [ + "▁down", + -7.760585784912109 + ], + [ + "▁still", + -7.777933597564697 + ], + [ + "▁same", + -7.777952194213867 + ], + [ + "▁think", + -7.787189960479736 + ], + [ + "),", + -7.793578624725342 + ], + [ + "▁going", + -7.799764156341553 + ], + [ + "▁too", + -7.803317070007324 + ], + [ + "In", + -7.805799961090088 + ], + [ + "ed", + -7.810286998748779 + ], + [ + "▁between", + -7.821091175079346 + ], + [ + "▁services", + -7.8221845626831055 + ], + [ + "▁provide", + -7.822526931762695 + ], + [ + "▁team", + -7.823488712310791 + ], + [ + "▁For", + -7.824619770050049 + ], + [ + "▁They", + -7.829378128051758 + ], + [ + "▁set", + -7.834484100341797 + ], + [ + "▁few", + -7.837801456451416 + ], + [ + "▁And", + -7.840814113616943 + ], + [ + "e", + -7.841697692871094 + ], + [ + "▁system", + -7.852316856384277 + ], + [ + "▁family", + -7.855905532836914 + ], + [ + "▁always", + -7.857081413269043 + ], + [ + "▁including", + -7.867188930511475 + ], + [ + "▁come", + -7.899563312530518 + ], + [ + "▁little", + -7.9062042236328125 + ], + [ + "▁during", + -7.910624027252197 + ], + [ + "▁sure", + -7.911512851715088 + ], + [ + "▁data", + -7.914026260375977 + ], + [ + "▁water", + -7.917076110839844 + ], + [ + "▁support", + -7.917360305786133 + ], + [ + "▁online", + -7.920180797576904 + ], + [ + "▁three", + -7.921014785766602 + ], + [ + "▁better", + -7.923113822937012 + ], + [ + "▁things", + -7.926352500915527 + ], + [ + "▁him", + -7.943122863769531 + ], + [ + "▁But", + -7.94437313079834 + ], + [ + "▁number", + -7.948531150817871 + ], + [ + "▁area", + -7.949511528015137 + ], + [ + "▁am", + -7.950676441192627 + ], + [ + "▁without", + -7.954408168792725 + ], + [ + "▁quality", + -7.956590175628662 + ], + [ + "▁top", + -7.962404251098633 + ], + [ + "▁able", + -7.965260505676269 + ], + [ + "▁New", + -7.966537952423096 + ], + [ + "▁next", + -7.971044540405273 + ], + [ + "▁There", + -7.971127033233643 + ], + [ + "▁design", + -7.974674224853516 + ], + [ + "▁must", + -7.984644412994385 + ], + [ + "▁important", + -7.987626075744629 + ], + [ + "▁full", + -7.989880084991455 + ], + [ + "▁does", + -7.998542308807373 + ], + [ + "If", + -8.000590324401855 + ], + [ + "▁small", + -8.004314422607422 + ], + [ + "▁give", + -8.005414962768555 + ], + [ + "It", + -8.009657859802246 + ], + [ + "com", + -8.01388931274414 + ], + [ + "▁something", + -8.015584945678711 + ], + [ + "▁under", + -8.020474433898926 + ], + [ + "▁process", + -8.024904251098633 + ], + [ + "▁lot", + -8.031489372253418 + ], + [ + "▁did", + -8.03785514831543 + ], + [ + "▁offer", + -8.044572830200195 + ], + [ + "▁end", + -8.04595947265625 + ], + [ + "▁5", + -8.050633430480957 + ], + [ + "▁another", + -8.054558753967285 + ], + [ + "▁within", + -8.062564849853516 + ], + [ + "▁found", + -8.065214157104492 + ], + [ + "▁working", + -8.066254615783691 + ], + [ + "▁keep", + -8.07094955444336 + ], + [ + "▁10", + -8.071211814880371 + ], + [ + "▁site", + -8.07218074798584 + ], + [ + "▁products", + -8.074247360229492 + ], + [ + "▁today", + -8.074755668640137 + ], + [ + "▁since", + -8.078740119934082 + ], + [ + "▁might", + -8.08227825164795 + ], + [ + "▁looking", + -8.086539268493652 + ], + [ + "▁4", + -8.092743873596191 + ], + [ + "▁order", + -8.096660614013672 + ], + [ + "in", + -8.09752368927002 + ], + [ + "▁website", + -8.098860740661621 + ], + [ + "▁start", + -8.099067687988281 + ], + [ + "▁game", + -8.110054969787598 + ], + [ + "▁days", + -8.110576629638672 + ], + [ + "▁local", + -8.12161636352539 + ], + [ + "▁students", + -8.129379272460938 + ], + [ + "▁feel", + -8.130245208740234 + ], + [ + "▁never", + -8.141186714172363 + ], + [ + "▁making", + -8.142308235168457 + ], + [ + "▁week", + -8.149123191833496 + ], + [ + "▁needs", + -8.151945114135742 + ], + [ + "▁As", + -8.155682563781738 + ], + [ + "▁school", + -8.159008026123047 + ], + [ + "▁easy", + -8.169631958007812 + ], + [ + "...", + -8.169824600219727 + ], + [ + "▁‘", + -8.174454689025879 + ], + [ + "▁show", + -8.183157920837402 + ], + [ + "▁children", + -8.183238983154297 + ], + [ + "▁project", + -8.188772201538086 + ], + [ + "▁care", + -8.189933776855469 + ], + [ + "▁market", + -8.196416854858398 + ], + [ + "▁money", + -8.199808120727539 + ], + [ + "▁Our", + -8.199830055236816 + ], + [ + "▁book", + -8.203202247619629 + ], + [ + "▁change", + -8.21865177154541 + ], + [ + "▁So", + -8.221673965454102 + ], + [ + "▁To", + -8.223464965820312 + ], + [ + "▁put", + -8.226777076721191 + ], + [ + "y", + -8.230263710021973 + ], + [ + "▁say", + -8.230818748474121 + ], + [ + "You", + -8.235950469970703 + ], + [ + "▁room", + -8.236367225646973 + ], + [ + "▁got", + -8.236542701721191 + ], + [ + "er", + -8.238554000854492 + ], + [ + "▁create", + -8.238569259643555 + ], + [ + "▁course", + -8.245872497558594 + ], + [ + "▁large", + -8.246334075927734 + ], + [ + "▁together", + -8.247201919555664 + ], + [ + "▁food", + -8.258245468139648 + ], + [ + "▁health", + -8.259581565856934 + ], + [ + "▁community", + -8.26923656463623 + ], + [ + "▁open", + -8.270891189575195 + ], + [ + "▁away", + -8.27224063873291 + ], + [ + "▁until", + -8.275766372680664 + ], + [ + "▁program", + -8.276126861572266 + ], + [ + "▁often", + -8.284207344055176 + ], + [ + "▁possible", + -8.287511825561523 + ], + [ + "▁When", + -8.290070533752441 + ], + [ + "▁again", + -8.290444374084473 + ], + [ + "▁All", + -8.290567398071289 + ], + [ + "▁case", + -8.2905912399292 + ], + [ + "▁page", + -8.298100471496582 + ], + [ + "▁car", + -8.30151653289795 + ], + [ + "▁real", + -8.30361270904541 + ], + [ + "▁With", + -8.30585765838623 + ], + [ + "▁name", + -8.306732177734375 + ], + [ + "▁call", + -8.310717582702637 + ], + [ + "▁include", + -8.31357479095459 + ], + [ + "ly", + -8.3138427734375 + ], + [ + "▁per", + -8.318714141845703 + ], + [ + "▁why", + -8.322693824768066 + ], + [ + "▁product", + -8.32348918914795 + ], + [ + "▁state", + -8.325530052185059 + ], + [ + "▁post", + -8.32573127746582 + ], + [ + "▁based", + -8.327101707458496 + ], + [ + "▁She", + -8.335611343383789 + ], + [ + "▁second", + -8.340707778930664 + ], + [ + "n", + -8.348139762878418 + ], + [ + "▁event", + -8.349672317504883 + ], + [ + "▁group", + -8.355369567871094 + ], + [ + "i", + -8.356607437133789 + ], + [ + "▁having", + -8.358033180236816 + ], + [ + "▁old", + -8.366349220275879 + ], + [ + "▁become", + -8.367206573486328 + ], + [ + "▁big", + -8.36790657043457 + ], + [ + "▁play", + -8.368231773376465 + ], + [ + "▁What", + -8.368507385253906 + ], + [ + "▁against", + -8.373008728027344 + ], + [ + "▁person", + -8.377306938171387 + ], + [ + "▁along", + -8.377911567687988 + ], + [ + "▁list", + -8.37870979309082 + ], + [ + "“", + -8.379605293273926 + ], + [ + "▁price", + -8.380281448364258 + ], + [ + "D", + -8.381325721740723 + ], + [ + "▁contact", + -8.382407188415527 + ], + [ + "▁comes", + -8.382603645324707 + ], + [ + "▁research", + -8.382624626159668 + ], + [ + "▁thing", + -8.383493423461914 + ], + [ + "▁U", + -8.388687133789062 + ], + [ + "▁level", + -8.390752792358398 + ], + [ + "▁side", + -8.391456604003906 + ], + [ + "▁less", + -8.392041206359863 + ], + [ + "▁done", + -8.392279624938965 + ], + [ + "▁house", + -8.395618438720703 + ], + [ + "▁public", + -8.399389266967773 + ], + [ + "▁across", + -8.401331901550293 + ], + [ + ",”", + -8.401877403259277 + ], + [ + "▁power", + -8.404821395874023 + ], + [ + "▁That", + -8.406052589416504 + ], + [ + "▁development", + -8.406698226928711 + ], + [ + "▁below", + -8.408527374267578 + ], + [ + "▁times", + -8.41074275970459 + ], + [ + "▁access", + -8.411165237426758 + ], + [ + "or", + -8.41163158416748 + ], + [ + "▁point", + -8.412212371826172 + ], + [ + "▁—", + -8.41431713104248 + ], + [ + "▁makes", + -8.41592025756836 + ], + [ + "▁job", + -8.416211128234863 + ], + [ + "▁means", + -8.417153358459473 + ], + [ + ".\"", + -8.417852401733398 + ], + [ + "to", + -8.4223051071167 + ], + [ + "▁live", + -8.42379379272461 + ], + [ + "▁range", + -8.424534797668457 + ], + [ + "▁several", + -8.426468849182129 + ], + [ + "▁space", + -8.427942276000977 + ], + [ + "▁University", + -8.433019638061523 + ], + [ + "▁industry", + -8.435643196105957 + ], + [ + "▁offers", + -8.43648624420166 + ], + [ + "▁enough", + -8.437776565551758 + ], + [ + "▁already", + -8.441006660461426 + ], + [ + "▁These", + -8.441560745239258 + ], + [ + "▁try", + -8.441743850708008 + ], + [ + "▁getting", + -8.44401741027832 + ], + [ + "▁personal", + -8.446427345275879 + ], + [ + "▁plan", + -8.448685646057129 + ], + [ + "▁perfect", + -8.451140403747559 + ], + [ + "▁body", + -8.454229354858398 + ], + [ + "▁hard", + -8.455275535583496 + ], + [ + "▁form", + -8.46046257019043 + ], + [ + "▁read", + -8.461481094360352 + ], + [ + "▁city", + -8.470338821411133 + ], + [ + "▁left", + -8.472179412841797 + ], + [ + "▁cost", + -8.472900390625 + ], + [ + "▁S", + -8.473122596740723 + ], + [ + "▁ever", + -8.473933219909668 + ], + [ + "2", + -8.475369453430176 + ], + [ + "▁light", + -8.477889060974121 + ], + [ + "▁companies", + -8.480267524719238 + ], + [ + "▁social", + -8.482386589050293 + ], + [ + "and", + -8.483469009399414 + ], + [ + "▁customers", + -8.485298156738281 + ], + [ + "▁please", + -8.486449241638184 + ], + [ + "C", + -8.490277290344238 + ], + [ + "▁6", + -8.4917573928833 + ], + [ + "▁country", + -8.49443531036377 + ], + [ + "▁i", + -8.495920181274414 + ], + [ + "▁future", + -8.497748374938965 + ], + [ + "▁music", + -8.498245239257812 + ], + [ + "▁add", + -8.498602867126465 + ], + [ + "▁others", + -8.499848365783691 + ], + [ + "▁share", + -8.50073528289795 + ], + [ + "▁let", + -8.501670837402344 + ], + [ + "▁hours", + -8.501974105834961 + ], + [ + "▁line", + -8.502188682556152 + ], + [ + "▁four", + -8.51021671295166 + ], + [ + "▁minutes", + -8.511435508728027 + ], + [ + "▁learn", + -8.51347541809082 + ], + [ + "the", + -8.515169143676758 + ], + [ + "▁video", + -8.518858909606934 + ], + [ + "▁fun", + -8.522723197937012 + ], + [ + "▁started", + -8.523714065551758 + ], + [ + "▁special", + -8.52413558959961 + ], + [ + "▁members", + -8.525784492492676 + ], + [ + "▁run", + -8.526727676391602 + ], + [ + "r", + -8.529671669006348 + ], + [ + "▁technology", + -8.532273292541504 + ], + [ + "▁check", + -8.532812118530273 + ], + [ + "o", + -8.532858848571777 + ], + [ + "▁past", + -8.536928176879883 + ], + [ + "▁features", + -8.539444923400879 + ], + [ + "▁doing", + -8.539698600769043 + ], + [ + "▁visit", + -8.54074478149414 + ], + [ + "▁once", + -8.542430877685547 + ], + [ + "—", + -8.547511100769043 + ], + [ + "▁hand", + -8.553688049316406 + ], + [ + "▁control", + -8.55370044708252 + ], + [ + "▁building", + -8.555005073547363 + ], + [ + "▁value", + -8.555305480957031 + ], + [ + "▁night", + -8.557099342346191 + ], + [ + "▁No", + -8.558977127075195 + ], + [ + "What", + -8.560403823852539 + ], + [ + "▁called", + -8.566706657409668 + ], + [ + "▁training", + -8.570247650146484 + ], + [ + "▁professional", + -8.570328712463379 + ], + [ + "▁far", + -8.572444915771484 + ], + [ + "▁God", + -8.573610305786133 + ], + [ + "▁complete", + -8.581742286682129 + ], + [ + "▁questions", + -8.590280532836914 + ], + [ + "▁problem", + -8.591117858886719 + ], + [ + "▁won", + -8.593725204467773 + ], + [ + "▁though", + -8.594154357910156 + ], + [ + "▁season", + -8.594377517700195 + ], + [ + "▁everything", + -8.594948768615723 + ], + [ + "▁low", + -8.595890998840332 + ], + [ + "For", + -8.597628593444824 + ], + [ + "▁current", + -8.59793758392334 + ], + [ + "▁least", + -8.598852157592773 + ], + [ + "▁story", + -8.599130630493164 + ], + [ + "▁beautiful", + -8.599903106689453 + ], + [ + "▁simple", + -8.601421356201172 + ], + [ + "▁example", + -8.601912498474121 + ], + [ + "▁bit", + -8.602218627929688 + ], + [ + "▁actually", + -8.60239028930664 + ], + [ + "▁months", + -8.60717487335205 + ], + [ + "▁doesn", + -8.607365608215332 + ], + [ + "▁provides", + -8.609465599060059 + ], + [ + "▁property", + -8.612061500549316 + ], + [ + "▁friends", + -8.613401412963867 + ], + [ + "▁didn", + -8.616750717163086 + ], + [ + "▁B", + -8.617932319641113 + ], + [ + "▁C", + -8.621614456176758 + ], + [ + "▁enjoy", + -8.623465538024902 + ], + [ + "▁20", + -8.6243314743042 + ], + [ + "▁My", + -8.625374794006348 + ], + [ + "▁content", + -8.631168365478516 + ], + [ + "▁results", + -8.637327194213867 + ], + [ + "▁management", + -8.637876510620117 + ], + [ + "▁million", + -8.63830280303955 + ], + [ + "As", + -8.638511657714844 + ], + [ + "▁known", + -8.640451431274414 + ], + [ + "▁above", + -8.640816688537598 + ], + [ + "▁8", + -8.643412590026855 + ], + [ + "▁non", + -8.643502235412598 + ], + [ + "▁yet", + -8.64928913116455 + ], + [ + "▁However", + -8.650386810302734 + ], + [ + "▁single", + -8.650899887084961 + ], + [ + "▁short", + -8.653928756713867 + ], + [ + "▁One", + -8.654900550842285 + ], + [ + "▁ensure", + -8.655611038208008 + ], + [ + "▁early", + -8.655922889709473 + ], + [ + "▁fact", + -8.657227516174316 + ], + [ + "▁issues", + -8.657938957214355 + ], + [ + "▁type", + -8.658736228942871 + ], + [ + "▁unique", + -8.658904075622559 + ], + [ + "There", + -8.662323951721191 + ], + [ + "▁living", + -8.663537979125977 + ], + [ + "▁various", + -8.665393829345703 + ], + [ + "▁7", + -8.666431427001953 + ], + [ + "▁month", + -8.667030334472656 + ], + [ + "▁pay", + -8.667912483215332 + ], + [ + "▁M", + -8.668265342712402 + ], + [ + "▁size", + -8.668610572814941 + ], + [ + "▁office", + -8.668834686279297 + ], + [ + "▁understand", + -8.670031547546387 + ], + [ + "▁buy", + -8.671445846557617 + ], + [ + "▁choose", + -8.673722267150879 + ], + [ + "▁areas", + -8.674372673034668 + ], + [ + "▁given", + -8.681473731994629 + ], + [ + "▁took", + -8.681658744812012 + ], + [ + "▁media", + -8.684259414672852 + ], + [ + "▁performance", + -8.685129165649414 + ], + [ + "▁designed", + -8.685369491577148 + ], + [ + "▁customer", + -8.690478324890137 + ], + [ + "▁due", + -8.691640853881836 + ], + [ + "▁idea", + -8.69194507598877 + ], + [ + "▁came", + -8.692439079284668 + ], + [ + "▁email", + -8.692550659179688 + ], + [ + "▁energy", + -8.692963600158691 + ], + [ + "▁following", + -8.69970989227295 + ], + [ + "▁taking", + -8.700371742248535 + ], + [ + "▁someone", + -8.70041561126709 + ], + [ + "▁staff", + -8.704477310180664 + ], + [ + "▁says", + -8.704545021057129 + ], + [ + "▁bring", + -8.704769134521484 + ], + [ + "▁On", + -8.706055641174316 + ], + [ + "▁women", + -8.707350730895996 + ], + [ + "▁mind", + -8.708806037902832 + ], + [ + "▁At", + -8.710515975952148 + ], + [ + "▁American", + -8.71088695526123 + ], + [ + "▁required", + -8.712048530578613 + ], + [ + "▁government", + -8.714275360107422 + ], + [ + "▁clients", + -8.717752456665039 + ], + [ + "▁works", + -8.724774360656738 + ], + [ + "▁kind", + -8.724949836730957 + ], + [ + "on", + -8.725707054138184 + ], + [ + "▁key", + -8.726762771606445 + ], + [ + "▁addition", + -8.729994773864746 + ], + [ + "▁whole", + -8.73404312133789 + ], + [ + "▁12", + -8.735607147216797 + ], + [ + "▁thought", + -8.737224578857422 + ], + [ + "▁child", + -8.73930835723877 + ], + [ + "▁five", + -8.741908073425293 + ], + [ + "▁style", + -8.742426872253418 + ], + [ + "▁meet", + -8.7449312210083 + ], + [ + "▁options", + -8.744941711425781 + ], + [ + "▁City", + -8.745487213134766 + ], + [ + "▁Your", + -8.747692108154297 + ], + [ + "▁main", + -8.748741149902344 + ], + [ + "▁ideas", + -8.750164031982422 + ], + [ + "When", + -8.751725196838379 + ], + [ + "▁30", + -8.755098342895508 + ], + [ + "▁later", + -8.755724906921387 + ], + [ + "▁series", + -8.756606101989746 + ], + [ + "▁software", + -8.756613731384277 + ], + [ + "▁yourself", + -8.758259773254395 + ], + [ + "▁whether", + -8.75875186920166 + ], + [ + "▁front", + -8.760543823242188 + ], + [ + "▁seen", + -8.761364936828613 + ], + [ + "▁phone", + -8.761407852172852 + ], + [ + "▁close", + -8.767147064208984 + ], + [ + "▁went", + -8.773189544677734 + ], + [ + "▁everyone", + -8.773709297180176 + ], + [ + "▁history", + -8.776589393615723 + ], + [ + "▁needed", + -8.77776050567627 + ], + [ + "▁art", + -8.778430938720703 + ], + [ + "▁study", + -8.779533386230469 + ], + [ + "▁head", + -8.78136157989502 + ], + [ + "▁air", + -8.782264709472656 + ], + [ + "▁white", + -8.782979965209961 + ], + [ + "▁move", + -8.783723831176758 + ], + [ + "▁believe", + -8.783974647521973 + ], + [ + "▁major", + -8.784621238708496 + ], + [ + "▁E", + -8.787566184997559 + ], + [ + "▁view", + -8.788816452026367 + ], + [ + "▁further", + -8.789299964904785 + ], + [ + "▁events", + -8.790008544921875 + ], + [ + "▁details", + -8.79029655456543 + ], + [ + "▁especially", + -8.79155445098877 + ], + [ + "▁simply", + -8.792705535888672 + ], + [ + "▁United", + -8.792872428894043 + ], + [ + "▁either", + -8.795005798339844 + ], + [ + "▁quite", + -8.795449256896973 + ], + [ + "▁includes", + -8.796135902404785 + ], + [ + "▁natural", + -8.797107696533203 + ], + [ + "▁result", + -8.799500465393066 + ], + [ + "▁web", + -8.800081253051758 + ], + [ + "▁amount", + -8.801153182983398 + ], + [ + "▁D", + -8.801957130432129 + ], + [ + "▁hope", + -8.808331489562988 + ], + [ + "▁insurance", + -8.811552047729492 + ], + [ + "▁added", + -8.813457489013672 + ], + [ + "▁de", + -8.81393814086914 + ], + [ + "▁receive", + -8.8145170211792 + ], + [ + "▁throughout", + -8.817765235900879 + ], + [ + "▁After", + -8.81965160369873 + ], + [ + "▁How", + -8.820748329162598 + ], + [ + "▁application", + -8.820927619934082 + ], + [ + "▁writing", + -8.821414947509766 + ], + [ + "▁search", + -8.823315620422363 + ], + [ + "3", + -8.823563575744629 + ], + [ + "▁games", + -8.824029922485352 + ], + [ + "▁account", + -8.824139595031738 + ], + [ + "▁man", + -8.824563026428223 + ], + [ + "▁opportunity", + -8.82780933380127 + ], + [ + "▁report", + -8.829124450683594 + ], + [ + "▁created", + -8.829667091369629 + ], + [ + "▁etc", + -8.82994556427002 + ], + [ + "▁ready", + -8.830066680908203 + ], + [ + "▁provided", + -8.83117389678955 + ], + [ + "▁build", + -8.835588455200195 + ], + [ + "To", + -8.840737342834473 + ], + [ + "▁15", + -8.841056823730469 + ], + [ + "▁specific", + -8.842687606811523 + ], + [ + "1", + -8.846052169799805 + ], + [ + "(", + -8.847412109375 + ], + [ + "▁anything", + -8.84786319732666 + ], + [ + "▁problems", + -8.848278999328613 + ], + [ + "▁almost", + -8.848941802978516 + ], + [ + ",\"", + -8.848979949951172 + ], + [ + "▁located", + -8.849791526794434 + ], + [ + "▁systems", + -8.85130786895752 + ], + [ + "▁financial", + -8.854772567749023 + ], + [ + "▁face", + -8.855401992797852 + ], + [ + "▁security", + -8.855586051940918 + ], + [ + "▁half", + -8.858562469482422 + ], + [ + "▁card", + -8.860523223876953 + ], + [ + "▁stay", + -8.860800743103027 + ], + [ + "▁ago", + -8.86223316192627 + ], + [ + "▁class", + -8.863239288330078 + ], + [ + "▁lead", + -8.865066528320312 + ], + [ + "x", + -8.866487503051758 + ], + [ + "▁young", + -8.867810249328613 + ], + [ + "&", + -8.869599342346191 + ], + [ + "▁School", + -8.871227264404297 + ], + [ + "▁learning", + -8.872227668762207 + ], + [ + "▁oil", + -8.87280559539795 + ], + [ + "▁allow", + -8.873347282409668 + ], + [ + "▁field", + -8.875032424926758 + ], + [ + "▁National", + -8.875052452087402 + ], + [ + "▁skills", + -8.877705574035645 + ], + [ + "▁currently", + -8.877791404724121 + ], + [ + "up", + -8.879448890686035 + ], + [ + "▁skin", + -8.883238792419434 + ], + [ + "▁likely", + -8.886331558227539 + ], + [ + "▁ways", + -8.886456489562988 + ], + [ + "es", + -8.886765480041504 + ], + [ + "▁heart", + -8.887955665588379 + ], + [ + "▁happy", + -8.888766288757324 + ], + [ + "▁law", + -8.889339447021484 + ], + [ + "▁third", + -8.889482498168945 + ], + [ + "▁focus", + -8.890401840209961 + ], + [ + "▁color", + -8.890971183776855 + ], + [ + "▁age", + -8.890972137451172 + ], + [ + "▁users", + -8.895500183105469 + ], + [ + "▁T", + -8.896495819091797 + ], + [ + "▁forward", + -8.896827697753906 + ], + [ + "▁soon", + -8.897915840148926 + ], + [ + "▁taken", + -8.899215698242188 + ], + [ + "▁popular", + -8.89972972869873 + ], + [ + "▁clear", + -8.899882316589355 + ], + [ + "▁US", + -8.900382995605469 + ], + [ + "B", + -8.901542663574219 + ], + [ + "▁brand", + -8.902568817138672 + ], + [ + "▁among", + -8.903935432434082 + ], + [ + "▁treatment", + -8.904376029968262 + ], + [ + "▁coming", + -8.905299186706543 + ], + [ + "▁South", + -8.905774116516113 + ], + [ + "▁store", + -8.907940864562988 + ], + [ + "▁turn", + -8.90888500213623 + ], + [ + "▁date", + -8.909137725830078 + ], + [ + "▁issue", + -8.910526275634766 + ], + [ + "▁education", + -8.910872459411621 + ], + [ + "▁present", + -8.912420272827148 + ], + [ + "▁paper", + -8.913366317749023 + ], + [ + "▁continue", + -8.913426399230957 + ], + [ + "▁however", + -8.913522720336914 + ], + [ + "▁save", + -8.914637565612793 + ], + [ + "▁test", + -8.91720962524414 + ], + [ + "▁image", + -8.91730785369873 + ], + [ + "▁strong", + -8.918907165527344 + ], + [ + "▁latest", + -8.920565605163574 + ], + [ + "▁private", + -8.920843124389648 + ], + [ + "▁variety", + -8.921234130859375 + ], + [ + "▁black", + -8.922979354858398 + ], + [ + "▁kids", + -8.92323112487793 + ], + [ + "So", + -8.923332214355469 + ], + [ + "▁self", + -8.92359733581543 + ], + [ + "▁choice", + -8.92402458190918 + ], + [ + "▁increase", + -8.924145698547363 + ], + [ + "▁State", + -8.924439430236816 + ], + [ + "▁activities", + -8.92678165435791 + ], + [ + "▁deal", + -8.927753448486328 + ], + [ + "▁individual", + -8.92797565460205 + ], + [ + "▁interest", + -8.929634094238281 + ], + [ + "▁blog", + -8.9298095703125 + ], + [ + "Our", + -8.929862022399902 + ], + [ + "▁tell", + -8.931259155273438 + ], + [ + "▁version", + -8.931310653686523 + ], + [ + "▁knowledge", + -8.932472229003906 + ], + [ + "▁table", + -8.932650566101074 + ], + [ + "▁wide", + -8.932659149169922 + ], + [ + "▁near", + -8.934755325317383 + ], + [ + "▁party", + -8.935462951660156 + ], + [ + "▁common", + -8.935980796813965 + ], + [ + "▁easily", + -8.938100814819336 + ], + [ + "▁usually", + -8.939117431640625 + ], + [ + "▁outside", + -8.939471244812012 + ], + [ + "▁marketing", + -8.93984317779541 + ], + [ + "▁sales", + -8.941242218017578 + ], + [ + "▁built", + -8.941800117492676 + ], + [ + "▁percent", + -8.942456245422363 + ], + [ + "g", + -8.943798065185547 + ], + [ + "▁takes", + -8.944527626037598 + ], + [ + "▁R", + -8.944595336914062 + ], + [ + "▁matter", + -8.944963455200195 + ], + [ + "▁Do", + -8.945013046264648 + ], + [ + "▁items", + -8.945489883422852 + ], + [ + "▁projects", + -8.945586204528809 + ], + [ + "▁address", + -8.945998191833496 + ], + [ + "▁practice", + -8.946027755737305 + ], + [ + "▁trying", + -8.946415901184082 + ], + [ + "▁pretty", + -8.9465970993042 + ], + [ + "▁environment", + -8.947281837463379 + ], + [ + "▁running", + -8.949472427368164 + ], + [ + "▁file", + -8.950657844543457 + ], + [ + "▁changes", + -8.95258903503418 + ], + [ + "▁additional", + -8.953200340270996 + ], + [ + "▁ask", + -8.95376968383789 + ], + [ + "▁member", + -8.95561695098877 + ], + [ + "▁original", + -8.956013679504395 + ], + [ + "▁follow", + -8.957947731018066 + ], + [ + "▁leave", + -8.958440780639648 + ], + [ + "▁York", + -8.959409713745117 + ], + [ + "▁e", + -8.95975399017334 + ], + [ + "▁model", + -8.960509300231934 + ], + [ + "▁user", + -8.965814590454102 + ], + [ + "▁step", + -8.965836524963379 + ], + [ + "▁probably", + -8.967220306396484 + ], + [ + "▁inside", + -8.967582702636719 + ], + [ + "▁potential", + -8.969286918640137 + ], + [ + "▁download", + -8.969303131103516 + ], + [ + "▁role", + -8.970100402832031 + ], + [ + "▁wanted", + -8.970849990844727 + ], + [ + "▁reason", + -8.97148609161377 + ], + [ + "▁allows", + -8.971899032592773 + ], + [ + "▁rate", + -8.97201156616211 + ], + [ + "▁St", + -8.974040985107422 + ], + [ + "▁growth", + -8.975920677185059 + ], + [ + "▁isn", + -8.97617244720459 + ], + [ + "▁providing", + -8.976178169250488 + ], + [ + "▁amazing", + -8.977059364318848 + ], + [ + "▁certain", + -8.977330207824707 + ], + [ + "▁via", + -8.977396965026855 + ], + [ + "▁rather", + -8.979070663452148 + ], + [ + "▁P", + -8.981762886047363 + ], + [ + "▁equipment", + -8.982707977294922 + ], + [ + "▁human", + -8.9829683303833 + ], + [ + "▁points", + -8.983405113220215 + ], + [ + "▁cover", + -8.983735084533691 + ], + [ + "▁held", + -8.986278533935547 + ], + [ + "▁risk", + -8.98660659790039 + ], + [ + "▁option", + -8.989980697631836 + ], + [ + "▁production", + -8.990348815917969 + ], + [ + "▁quickly", + -8.991655349731445 + ], + [ + "▁p", + -8.991721153259277 + ], + [ + "▁North", + -8.992506980895996 + ], + [ + "▁behind", + -8.994646072387695 + ], + [ + "▁World", + -8.99557876586914 + ], + [ + "▁longer", + -8.997027397155762 + ], + [ + "▁film", + -8.999271392822266 + ], + [ + "M", + -9.000236511230469 + ], + [ + "▁told", + -9.001049995422363 + ], + [ + "▁weeks", + -9.001425743103027 + ], + [ + "▁Center", + -9.00243854522705 + ], + [ + "▁click", + -9.003296852111816 + ], + [ + "▁higher", + -9.003859519958496 + ], + [ + "▁Please", + -9.005452156066895 + ], + [ + "▁location", + -9.005966186523438 + ], + [ + "▁travel", + -9.006172180175781 + ], + [ + "▁question", + -9.006987571716309 + ], + [ + "▁J", + -9.007912635803224 + ], + [ + "▁period", + -9.008030891418455 + ], + [ + "▁recent", + -9.009512901306152 + ], + [ + "▁credit", + -9.012911796569824 + ], + [ + "▁extra", + -9.014187812805176 + ], + [ + "▁purchase", + -9.014617919921877 + ], + [ + "▁May", + -9.014866828918455 + ], + [ + "4", + -9.0150728225708 + ], + [ + "▁received", + -9.016094207763672 + ], + [ + "▁total", + -9.018630981445312 + ], + [ + "▁materials", + -9.01911449432373 + ], + [ + "▁action", + -9.019588470458984 + ], + [ + "▁drive", + -9.020418167114258 + ], + [ + "▁re", + -9.0209321975708 + ], + [ + "▁true", + -9.022054672241213 + ], + [ + "▁summer", + -9.025042533874512 + ], + [ + "▁stop", + -9.025452613830566 + ], + [ + "▁success", + -9.026233673095703 + ], + [ + "▁sale", + -9.026857376098633 + ], + [ + "▁UK", + -9.027408599853516 + ], + [ + "▁improve", + -9.02920150756836 + ], + [ + "▁favorite", + -9.029309272766112 + ], + [ + "▁code", + -9.029489517211914 + ], + [ + "T", + -9.031166076660156 + ], + [ + "▁2018", + -9.032273292541504 + ], + [ + "▁couple", + -9.034724235534668 + ], + [ + "▁review", + -9.035015106201172 + ], + [ + "▁County", + -9.036612510681152 + ], + [ + "▁return", + -9.037851333618164 + ], + [ + "based", + -9.03791618347168 + ], + [ + "How", + -9.039952278137209 + ], + [ + "▁recently", + -9.040114402770996 + ], + [ + "▁shows", + -9.040854454040527 + ], + [ + "_", + -9.040928840637209 + ], + [ + "▁medical", + -9.04203987121582 + ], + [ + "▁international", + -9.042813301086426 + ], + [ + "▁/", + -9.043288230895996 + ], + [ + "▁mobile", + -9.043302536010742 + ], + [ + "▁solutions", + -9.043858528137209 + ], + [ + "▁modern", + -9.045321464538574 + ], + [ + "▁worked", + -9.047015190124512 + ], + [ + "▁nice", + -9.047977447509766 + ], + [ + "▁collection", + -9.04828643798828 + ], + [ + "▁9", + -9.048710823059082 + ], + [ + "▁solution", + -9.048749923706056 + ], + [ + "▁network", + -9.04929256439209 + ], + [ + "▁included", + -9.050390243530272 + ], + [ + "▁entire", + -9.050850868225098 + ], + [ + "But", + -9.051969528198242 + ], + [ + "▁general", + -9.052306175231934 + ], + [ + "▁effective", + -9.0543794631958 + ], + [ + "▁computer", + -9.05479907989502 + ], + [ + "▁G", + -9.055571556091309 + ], + [ + "▁April", + -9.057588577270508 + ], + [ + "▁reading", + -9.057947158813477 + ], + [ + "▁programs", + -9.058039665222168 + ], + [ + "▁safe", + -9.058367729187012 + ], + [ + "▁leading", + -9.058565139770508 + ], + [ + "▁lives", + -9.05942726135254 + ], + [ + "▁anyone", + -9.06020736694336 + ], + [ + "▁cannot", + -9.060442924499512 + ], + [ + "▁kitchen", + -9.060599327087402 + ], + [ + "P", + -9.062978744506836 + ], + [ + "▁particular", + -9.06344509124756 + ], + [ + "▁standard", + -9.063654899597168 + ], + [ + "▁parts", + -9.066191673278809 + ], + [ + "▁costs", + -9.069206237792969 + ], + [ + "▁article", + -9.069786071777344 + ], + [ + "▁position", + -9.071045875549316 + ], + [ + "▁final", + -9.072073936462402 + ], + [ + "▁tools", + -9.072898864746094 + ], + [ + "▁looks", + -9.074307441711426 + ], + [ + "▁impact", + -9.076008796691896 + ], + [ + "▁student", + -9.077422142028809 + ], + [ + "▁Park", + -9.078987121582031 + ], + [ + "▁Dr", + -9.07900047302246 + ], + [ + "▁An", + -9.079806327819824 + ], + [ + "▁similar", + -9.080866813659668 + ], + [ + "▁related", + -9.081357955932615 + ], + [ + "▁ability", + -9.081427574157717 + ], + [ + "▁highly", + -9.081758499145508 + ], + [ + "▁meeting", + -9.082496643066406 + ], + [ + "▁words", + -9.083226203918455 + ], + [ + "▁news", + -9.08547019958496 + ], + [ + "▁fast", + -9.086585998535156 + ], + [ + "▁according", + -9.08803653717041 + ], + [ + "▁necessary", + -9.08924961090088 + ], + [ + "▁excellent", + -9.091692924499512 + ], + [ + "▁benefits", + -9.09267520904541 + ], + [ + "And", + -9.093510627746582 + ], + [ + "▁else", + -9.094104766845703 + ], + [ + "▁app", + -9.094550132751465 + ], + [ + "▁cut", + -9.095458030700684 + ], + [ + "▁goal", + -9.096965789794922 + ], + [ + "▁material", + -9.098736763000488 + ], + [ + "▁talk", + -9.100778579711914 + ], + [ + "▁clean", + -9.100994110107422 + ], + [ + "▁rest", + -9.102072715759276 + ], + [ + "E", + -9.102229118347168 + ], + [ + "▁His", + -9.103106498718262 + ], + [ + "▁cause", + -9.103181838989258 + ], + [ + "▁safety", + -9.103330612182615 + ], + [ + "▁hair", + -9.103680610656738 + ], + [ + "▁career", + -9.10439109802246 + ], + [ + "▁difficult", + -9.10509204864502 + ], + [ + "▁chance", + -9.1076078414917 + ], + [ + "▁States", + -9.109415054321287 + ], + [ + "▁approach", + -9.1095609664917 + ], + [ + "▁nothing", + -9.10962963104248 + ], + [ + "▁digital", + -9.109781265258787 + ], + [ + "▁plans", + -9.10995388031006 + ], + [ + "▁road", + -9.11147117614746 + ], + [ + "year", + -9.112391471862791 + ], + [ + "▁types", + -9.11311149597168 + ], + [ + "▁mean", + -9.11530590057373 + ], + [ + "▁fit", + -9.116071701049805 + ], + [ + "▁win", + -9.118023872375488 + ], + [ + "▁men", + -9.121715545654297 + ], + [ + "▁bad", + -9.122437477111816 + ], + [ + "▁x", + -9.122769355773926 + ], + [ + "▁Some", + -9.124670028686523 + ], + [ + "K", + -9.125080108642578 + ], + [ + "▁six", + -9.125518798828123 + ], + [ + "▁floor", + -9.125666618347168 + ], + [ + "▁books", + -9.126091957092283 + ], + [ + "▁Now", + -9.126760482788086 + ], + [ + "▁link", + -9.126943588256836 + ], + [ + "▁'", + -9.127501487731934 + ], + [ + "▁tax", + -9.128698348999023 + ], + [ + "▁daily", + -9.12881565093994 + ], + [ + "▁consider", + -9.129351615905762 + ], + [ + "▁piece", + -9.131295204162598 + ], + [ + "▁morning", + -9.133667945861816 + ], + [ + "▁resources", + -9.133819580078123 + ], + [ + "▁While", + -9.134456634521484 + ], + [ + "▁board", + -9.134913444519045 + ], + [ + "▁Home", + -9.13818645477295 + ], + [ + "▁patients", + -9.139657020568848 + ], + [ + "▁seems", + -9.14179801940918 + ], + [ + "▁K", + -9.142311096191406 + ], + [ + "▁conditions", + -9.14273738861084 + ], + [ + "▁feature", + -9.14331340789795 + ], + [ + "▁asked", + -9.144075393676758 + ], + [ + "▁weight", + -9.14465045928955 + ], + [ + "▁March", + -9.145047187805176 + ], + [ + "k", + -9.14742374420166 + ], + [ + "▁send", + -9.150863647460938 + ], + [ + "▁L", + -9.151713371276855 + ], + [ + "▁images", + -9.153392791748049 + ], + [ + "▁sound", + -9.153644561767578 + ], + [ + "▁decision", + -9.15371799468994 + ], + [ + "▁Google", + -9.154807090759276 + ], + [ + "▁door", + -9.155550956726074 + ], + [ + "▁By", + -9.15566349029541 + ], + [ + "▁100", + -9.156330108642578 + ], + [ + "▁planning", + -9.156681060791016 + ], + [ + "▁Here", + -9.1573486328125 + ], + [ + "G", + -9.15839958190918 + ], + [ + "▁involved", + -9.15932559967041 + ], + [ + "▁Day", + -9.159483909606934 + ], + [ + "▁policy", + -9.159859657287598 + ], + [ + "▁prices", + -9.160503387451172 + ], + [ + "▁fully", + -9.160601615905762 + ], + [ + "▁develop", + -9.161020278930664 + ], + [ + "▁upon", + -9.161510467529297 + ], + [ + "▁directly", + -9.161568641662598 + ], + [ + "▁11", + -9.162260055541992 + ], + [ + "▁lower", + -9.162874221801758 + ], + [ + "▁wall", + -9.16359519958496 + ], + [ + "▁box", + -9.168008804321287 + ], + [ + "▁average", + -9.16814422607422 + ], + [ + "▁From", + -9.169859886169434 + ], + [ + "▁multiple", + -9.169919967651367 + ], + [ + "▁players", + -9.170605659484863 + ], + [ + "▁photos", + -9.170828819274902 + ], + [ + "▁picture", + -9.171344757080078 + ], + [ + "▁sense", + -9.17337131500244 + ], + [ + "▁John", + -9.175950050354004 + ], + [ + "▁require", + -9.176244735717772 + ], + [ + "▁America", + -9.177779197692873 + ], + [ + "▁House", + -9.178332328796388 + ], + [ + "▁instead", + -9.178505897521973 + ], + [ + "▁Not", + -9.179887771606444 + ], + [ + "▁attention", + -9.180248260498049 + ], + [ + "▁F", + -9.1802978515625 + ], + [ + "▁worth", + -9.182486534118652 + ], + [ + "▁record", + -9.182689666748049 + ], + [ + "▁write", + -9.184625625610352 + ], + [ + "▁late", + -9.185016632080078 + ], + [ + "▁requirements", + -9.185047149658203 + ], + [ + "He", + -9.185209274291992 + ], + [ + "▁businesses", + -9.18594455718994 + ], + [ + "▁levels", + -9.186119079589844 + ], + [ + "]", + -9.188353538513184 + ], + [ + "▁N", + -9.188691139221191 + ], + [ + "▁walk", + -9.191417694091797 + ], + [ + "▁track", + -9.192421913146973 + ], + [ + "▁center", + -9.192838668823242 + ], + [ + "▁countries", + -9.193312644958496 + ], + [ + "▁gives", + -9.193424224853516 + ], + [ + "▁helps", + -9.193753242492676 + ], + [ + "▁red", + -9.194440841674805 + ], + [ + "*", + -9.194546699523926 + ], + [ + "▁itself", + -9.196451187133787 + ], + [ + "▁reach", + -9.196941375732422 + ], + [ + "▁storage", + -9.1973295211792 + ], + [ + "▁traditional", + -9.197717666625977 + ], + [ + "al", + -9.199090003967283 + ], + [ + "With", + -9.199440002441406 + ], + [ + "▁source", + -9.199467658996582 + ], + [ + "▁trip", + -9.199599266052246 + ], + [ + "▁themselves", + -9.200769424438477 + ], + [ + "▁parents", + -9.200770378112791 + ], + [ + "▁town", + -9.200998306274414 + ], + [ + "man", + -9.201421737670898 + ], + [ + "▁global", + -9.204296112060549 + ], + [ + "▁playing", + -9.204689979553224 + ], + [ + "▁huge", + -9.206341743469238 + ], + [ + "▁creating", + -9.208115577697754 + ], + [ + "▁[", + -9.208853721618652 + ], + [ + "▁completely", + -9.208928108215332 + ], + [ + "▁device", + -9.21014404296875 + ], + [ + "ers", + -9.211604118347168 + ], + [ + "▁terms", + -9.21205997467041 + ], + [ + "▁release", + -9.212292671203612 + ], + [ + "▁giving", + -9.213119506835938 + ], + [ + "▁pre", + -9.214103698730469 + ], + [ + "▁organization", + -9.215381622314451 + ], + [ + "▁loss", + -9.215450286865234 + ], + [ + "▁hot", + -9.21657371520996 + ], + [ + "▁hold", + -9.21741008758545 + ], + [ + "▁apply", + -9.219775199890137 + ], + [ + "time", + -9.222823143005373 + ], + [ + "▁answer", + -9.223997116088867 + ], + [ + "▁O", + -9.225193977355955 + ], + [ + "▁International", + -9.225231170654297 + ], + [ + "▁vehicle", + -9.225537300109863 + ], + [ + "▁India", + -9.22681999206543 + ], + [ + "▁cases", + -9.228256225585938 + ], + [ + "▁national", + -9.228574752807615 + ], + [ + "▁successful", + -9.228918075561523 + ], + [ + "▁saw", + -9.230380058288574 + ], + [ + "▁50", + -9.231193542480469 + ], + [ + "▁myself", + -9.23149871826172 + ], + [ + "▁friend", + -9.231599807739258 + ], + [ + "c", + -9.232640266418455 + ], + [ + "▁photo", + -9.233963966369627 + ], + [ + "▁interesting", + -9.234264373779297 + ], + [ + "▁posted", + -9.234309196472168 + ], + [ + "R", + -9.235191345214844 + ], + [ + "▁machine", + -9.23708152770996 + ], + [ + "▁written", + -9.237899780273438 + ], + [ + "▁decided", + -9.23883819580078 + ], + [ + "▁applications", + -9.23983097076416 + ], + [ + "▁interested", + -9.2401762008667 + ], + [ + "▁commercial", + -9.240805625915527 + ], + [ + "▁fresh", + -9.240854263305664 + ], + [ + "▁employees", + -9.241742134094238 + ], + [ + "▁legal", + -9.243992805480955 + ], + [ + "▁lost", + -9.244497299194336 + ], + [ + "▁West", + -9.24544906616211 + ], + [ + "▁goes", + -9.24565315246582 + ], + [ + "▁London", + -9.246331214904783 + ], + [ + "▁opportunities", + -9.247490882873535 + ], + [ + "▁moment", + -9.25163745880127 + ], + [ + "▁word", + -9.252359390258787 + ], + [ + "▁remember", + -9.254194259643556 + ], + [ + "▁Christmas", + -9.254562377929688 + ], + [ + "▁client", + -9.25473403930664 + ], + [ + "▁H", + -9.25647258758545 + ], + [ + "▁tool", + -9.257564544677734 + ], + [ + "▁spend", + -9.258164405822754 + ], + [ + "▁Of", + -9.258246421813965 + ], + [ + "▁TV", + -9.25828456878662 + ], + [ + ".”", + -9.262378692626951 + ], + [ + "▁25", + -9.262384414672852 + ], + [ + "▁watch", + -9.263062477111816 + ], + [ + "▁developed", + -9.263222694396973 + ], + [ + "▁June", + -9.263778686523438 + ], + [ + "▁High", + -9.26442813873291 + ], + [ + "▁construction", + -9.26449966430664 + ], + [ + "▁English", + -9.266347885131836 + ], + [ + "On", + -9.26873779296875 + ], + [ + "▁green", + -9.26896858215332 + ], + [ + "l", + -9.269753456115724 + ], + [ + "▁offering", + -9.27163028717041 + ], + [ + "▁wonderful", + -9.272048950195312 + ], + [ + "▁hit", + -9.272587776184082 + ], + [ + "▁16", + -9.273627281188965 + ], + [ + "▁sites", + -9.27378749847412 + ], + [ + "▁Friday", + -9.27441120147705 + ], + [ + "–", + -9.276825904846191 + ], + [ + "All", + -9.278800010681152 + ], + [ + "▁healthy", + -9.279529571533203 + ], + [ + "▁section", + -9.279763221740724 + ], + [ + "▁24", + -9.281908988952637 + ], + [ + "▁goals", + -9.282225608825684 + ], + [ + "▁18", + -9.284729957580566 + ], + [ + "▁Just", + -9.285815238952637 + ], + [ + "▁heat", + -9.28666877746582 + ], + [ + "My", + -9.28768253326416 + ], + [ + "▁Facebook", + -9.289240837097168 + ], + [ + "▁regular", + -9.289523124694824 + ], + [ + "At", + -9.294232368469238 + ], + [ + "▁groups", + -9.29566478729248 + ], + [ + "▁San", + -9.297327995300291 + ], + [ + "▁begin", + -9.298177719116213 + ], + [ + "▁expected", + -9.298547744750977 + ], + [ + "▁starting", + -9.298710823059082 + ], + [ + "▁wish", + -9.300169944763184 + ], + [ + "▁base", + -9.301396369934082 + ], + [ + "▁moving", + -9.301743507385254 + ], + [ + "▁began", + -9.301767349243164 + ], + [ + "▁subject", + -9.301849365234377 + ], + [ + "▁hotel", + -9.302014350891112 + ], + [ + "▁growing", + -9.302616119384766 + ], + [ + "F", + -9.302973747253418 + ], + [ + "▁China", + -9.303224563598633 + ], + [ + "▁significant", + -9.304999351501465 + ], + [ + "▁fine", + -9.305570602416992 + ], + [ + "en", + -9.305706977844238 + ], + [ + "://", + -9.305923461914062 + ], + [ + "▁co", + -9.306883811950684 + ], + [ + "▁ground", + -9.30872631072998 + ], + [ + "▁2019", + -9.30994987487793 + ], + [ + "▁hear", + -9.31001091003418 + ], + [ + "▁hands", + -9.310657501220703 + ], + [ + "▁thinking", + -9.310911178588867 + ], + [ + "▁quick", + -9.310988426208496 + ], + [ + "▁screen", + -9.311211585998535 + ], + [ + "▁investment", + -9.311466217041016 + ], + [ + "▁Inc", + -9.313434600830078 + ], + [ + "▁towards", + -9.313884735107422 + ], + [ + "L", + -9.313899993896484 + ], + [ + "▁land", + -9.31560516357422 + ], + [ + "!!", + -9.315702438354492 + ], + [ + "▁sign", + -9.319091796875 + ], + [ + "▁entry", + -9.3204984664917 + ], + [ + "▁guide", + -9.32083511352539 + ], + [ + "▁pain", + -9.321303367614746 + ], + [ + "▁nature", + -9.321732521057127 + ], + [ + "▁grow", + -9.321905136108398 + ], + [ + "▁Then", + -9.32273292541504 + ], + [ + "▁limited", + -9.323657989501951 + ], + [ + "▁fall", + -9.324983596801758 + ], + [ + "▁families", + -9.325305938720703 + ], + [ + "▁recommend", + -9.325501441955566 + ], + [ + "▁sometimes", + -9.325647354125977 + ], + [ + "▁Service", + -9.325807571411133 + ], + [ + "▁...", + -9.326738357543944 + ], + [ + "▁became", + -9.326862335205078 + ], + [ + "▁platform", + -9.32953643798828 + ], + [ + "▁positive", + -9.330193519592283 + ], + [ + "▁Group", + -9.331836700439451 + ], + [ + "▁language", + -9.333261489868164 + ], + [ + "▁places", + -9.334182739257812 + ], + [ + "▁Mr", + -9.33748722076416 + ], + [ + "▁definitely", + -9.338895797729492 + ], + [ + "▁14", + -9.339701652526855 + ], + [ + "▁touch", + -9.341796875 + ], + [ + "▁experienced", + -9.34355926513672 + ], + [ + "▁January", + -9.345343589782717 + ], + [ + "▁hour", + -9.346231460571287 + ], + [ + "▁exactly", + -9.346713066101074 + ], + [ + "b", + -9.34726619720459 + ], + [ + "▁bed", + -9.347460746765137 + ], + [ + "▁individuals", + -9.348721504211426 + ], + [ + "▁cool", + -9.34878635406494 + ], + [ + "▁wedding", + -9.3493070602417 + ], + [ + "▁President", + -9.34974479675293 + ], + [ + "▁wood", + -9.350672721862791 + ], + [ + "▁serve", + -9.35133934020996 + ], + [ + "▁stock", + -9.352561950683594 + ], + [ + "▁benefit", + -9.352648735046388 + ], + [ + "▁stand", + -9.35302734375 + ], + [ + "▁shop", + -9.353446006774902 + ], + [ + "▁gift", + -9.35414981842041 + ], + [ + "▁internet", + -9.35609245300293 + ], + [ + "▁message", + -9.357561111450195 + ], + [ + "▁term", + -9.357789039611816 + ], + [ + "One", + -9.357958793640137 + ], + [ + "▁situation", + -9.357965469360352 + ], + [ + "▁relationship", + -9.358084678649902 + ], + [ + "▁bar", + -9.358241081237791 + ], + [ + "▁released", + -9.358471870422363 + ], + [ + "▁Free", + -9.358901023864746 + ], + [ + "▁pick", + -9.359139442443848 + ], + [ + "▁easier", + -9.359533309936523 + ], + [ + "▁break", + -9.359766006469728 + ], + [ + "▁dog", + -9.360187530517578 + ], + [ + "▁pictures", + -9.360305786132812 + ], + [ + "▁effect", + -9.361112594604492 + ], + [ + "▁published", + -9.36138153076172 + ], + [ + "▁Health", + -9.363022804260254 + ], + [ + "▁understanding", + -9.363285064697266 + ], + [ + "▁advice", + -9.363608360290527 + ], + [ + "▁damage", + -9.364266395568848 + ], + [ + "▁ones", + -9.364673614501951 + ], + [ + "▁join", + -9.368640899658203 + ], + [ + "▁plant", + -9.368759155273438 + ], + [ + "▁July", + -9.370817184448242 + ], + [ + "▁Also", + -9.37125015258789 + ], + [ + "▁speed", + -9.3717041015625 + ], + [ + "▁California", + -9.37186336517334 + ], + [ + "▁match", + -9.37197208404541 + ], + [ + "▁physical", + -9.371978759765623 + ], + [ + "of", + -9.372115135192873 + ], + [ + "▁loved", + -9.372647285461426 + ], + [ + "▁--", + -9.373116493225098 + ], + [ + "▁fire", + -9.374103546142578 + ], + [ + "▁custom", + -9.374317169189451 + ], + [ + "▁Department", + -9.376538276672363 + ], + [ + "▁former", + -9.377802848815918 + ], + [ + "▁request", + -9.377985000610352 + ], + [ + "an", + -9.37818717956543 + ], + [ + "▁region", + -9.38011360168457 + ], + [ + "▁death", + -9.381324768066406 + ], + [ + "▁eye", + -9.381951332092283 + ], + [ + "▁cash", + -9.382421493530272 + ], + [ + "▁2017", + -9.382437705993652 + ], + [ + "▁Saturday", + -9.383275985717772 + ], + [ + "▁V", + -9.383785247802734 + ], + [ + "▁reduce", + -9.3840913772583 + ], + [ + "▁highest", + -9.38424301147461 + ], + [ + "▁previous", + -9.38568115234375 + ], + [ + "st", + -9.385714530944824 + ], + [ + "▁furniture", + -9.385736465454102 + ], + [ + "▁seem", + -9.385844230651855 + ], + [ + "▁wait", + -9.38609218597412 + ], + [ + "After", + -9.388423919677734 + ], + [ + "▁expect", + -9.389175415039062 + ], + [ + "▁September", + -9.38943099975586 + ], + [ + "▁weekend", + -9.389897346496582 + ], + [ + "▁offered", + -9.39202117919922 + ], + [ + "▁considered", + -9.39202880859375 + ], + [ + "▁Sunday", + -9.392271995544434 + ], + [ + "5", + -9.392281532287598 + ], + [ + "▁round", + -9.392657279968262 + ], + [ + "▁glass", + -9.39278507232666 + ], + [ + "▁X", + -9.395626068115234 + ], + [ + "▁difference", + -9.396002769470217 + ], + [ + "▁stage", + -9.396950721740724 + ], + [ + "▁budget", + -9.39728832244873 + ], + [ + "▁October", + -9.39906406402588 + ], + [ + "▁note", + -9.39993667602539 + ], + [ + "▁text", + -9.401155471801758 + ], + [ + "▁feeling", + -9.403990745544434 + ], + [ + "▁W", + -9.407418251037598 + ], + [ + "▁deep", + -9.40750789642334 + ], + [ + "▁method", + -9.411478996276855 + ], + [ + "▁stories", + -9.413996696472168 + ], + [ + "▁led", + -9.41649055480957 + ], + [ + "▁comfortable", + -9.417373657226562 + ], + [ + "▁helping", + -9.4175386428833 + ], + [ + "▁pages", + -9.418639183044434 + ], + [ + "▁baby", + -9.41962718963623 + ], + [ + "▁spent", + -9.42013168334961 + ], + [ + "▁sell", + -9.421378135681152 + ], + [ + "▁finish", + -9.422001838684082 + ], + [ + "▁delivery", + -9.422094345092772 + ], + [ + "▁pressure", + -9.42214012145996 + ], + [ + "▁largest", + -9.422673225402832 + ], + [ + "▁College", + -9.422943115234377 + ], + [ + "▁schools", + -9.42410659790039 + ], + [ + "▁active", + -9.424616813659668 + ], + [ + "▁rates", + -9.424646377563477 + ], + [ + "▁felt", + -9.425179481506348 + ], + [ + "▁setting", + -9.425440788269045 + ], + [ + "▁devices", + -9.42546272277832 + ], + [ + "▁supply", + -9.427017211914062 + ], + [ + "▁feet", + -9.42717170715332 + ], + [ + "▁culture", + -9.427889823913574 + ], + [ + "p", + -9.428995132446287 + ], + [ + "▁overall", + -9.429547309875488 + ], + [ + "▁display", + -9.429656982421877 + ], + [ + "▁analysis", + -9.42977809906006 + ], + [ + "▁activity", + -9.431283950805664 + ], + [ + "@", + -9.433197021484377 + ], + [ + "▁existing", + -9.433402061462402 + ], + [ + "▁Most", + -9.433568954467772 + ], + [ + "▁ideal", + -9.433656692504885 + ], + [ + "▁truly", + -9.436214447021484 + ], + [ + "▁paid", + -9.436792373657228 + ], + [ + "▁Services", + -9.437063217163086 + ], + [ + "▁happen", + -9.43779754638672 + ], + [ + "▁creative", + -9.43816375732422 + ], + [ + "▁degree", + -9.438589096069336 + ], + [ + "▁double", + -9.43886375427246 + ], + [ + "▁charge", + -9.438940048217772 + ], + [ + "▁purpose", + -9.438961029052734 + ], + [ + "▁movie", + -9.440091133117676 + ], + [ + "▁Co", + -9.440963745117188 + ], + [ + "▁repair", + -9.442566871643066 + ], + [ + "N", + -9.443626403808594 + ], + [ + "▁wrong", + -9.444265365600586 + ], + [ + "▁selection", + -9.444687843322754 + ], + [ + "▁weather", + -9.444908142089844 + ], + [ + "▁Windows", + -9.444998741149902 + ], + [ + "▁avoid", + -9.445609092712402 + ], + [ + "▁sent", + -9.447586059570312 + ], + [ + "▁response", + -9.448476791381836 + ], + [ + "▁December", + -9.448850631713867 + ], + [ + "▁regarding", + -9.449363708496094 + ], + [ + "▁November", + -9.450337409973145 + ], + [ + "▁Street", + -9.450349807739258 + ], + [ + "▁basic", + -9.451077461242676 + ], + [ + "▁completed", + -9.45240306854248 + ], + [ + "▁Don", + -9.452458381652832 + ], + [ + "O", + -9.454659461975098 + ], + [ + "▁college", + -9.455010414123535 + ], + [ + "▁wasn", + -9.45542335510254 + ], + [ + "it", + -9.455877304077148 + ], + [ + "▁firm", + -9.456120491027832 + ], + [ + "▁rights", + -9.456374168395996 + ], + [ + "▁played", + -9.45673656463623 + ], + [ + "▁gave", + -9.457432746887209 + ], + [ + "▁Business", + -9.460956573486328 + ], + [ + "▁tried", + -9.461268424987791 + ], + [ + "▁lots", + -9.463912010192873 + ], + [ + "▁|", + -9.46494483947754 + ], + [ + "▁beginning", + -9.465808868408203 + ], + [ + "▁13", + -9.46591091156006 + ], + [ + "▁files", + -9.467326164245604 + ], + [ + "▁coffee", + -9.467455863952637 + ], + [ + "able", + -9.46786880493164 + ], + [ + "▁estate", + -9.467877388000488 + ], + [ + "▁achieve", + -9.470016479492188 + ], + [ + "▁function", + -9.470524787902832 + ], + [ + "That", + -9.471174240112305 + ], + [ + "▁condition", + -9.471182823181152 + ], + [ + "▁window", + -9.47199535369873 + ], + [ + "▁Best", + -9.472371101379396 + ], + [ + "▁More", + -9.472900390625 + ], + [ + "▁Black", + -9.47372817993164 + ], + [ + "▁effort", + -9.47416877746582 + ], + [ + "▁essential", + -9.474902153015137 + ], + [ + "▁direct", + -9.47547435760498 + ], + [ + "▁basis", + -9.476029396057127 + ], + [ + "▁East", + -9.476155281066896 + ], + [ + "▁player", + -9.477067947387695 + ], + [ + "®", + -9.477227210998535 + ], + [ + "▁2016", + -9.478067398071287 + ], + [ + "▁produce", + -9.478964805603027 + ], + [ + "▁annual", + -9.48001480102539 + ], + [ + "▁unit", + -9.480341911315918 + ], + [ + "▁select", + -9.481358528137209 + ], + [ + "▁Or", + -9.481724739074709 + ], + [ + "▁beyond", + -9.482691764831545 + ], + [ + "▁heard", + -9.483516693115234 + ], + [ + "▁trade", + -9.483901977539062 + ], + [ + "▁brought", + -9.487058639526367 + ], + [ + "▁blood", + -9.48835563659668 + ], + [ + "▁challenge", + -9.488667488098145 + ], + [ + "▁gold", + -9.488919258117676 + ], + [ + "▁eat", + -9.490545272827148 + ], + [ + "▁court", + -9.492839813232422 + ], + [ + "▁homes", + -9.493577003479004 + ], + [ + "▁Be", + -9.493727684020996 + ], + [ + "▁Europe", + -9.494354248046877 + ], + [ + "▁40", + -9.49464225769043 + ], + [ + "h", + -9.495165824890137 + ], + [ + "▁certainly", + -9.495409965515137 + ], + [ + "▁Internet", + -9.495996475219728 + ], + [ + "▁responsible", + -9.49778938293457 + ], + [ + "▁protection", + -9.49825954437256 + ], + [ + "▁bottom", + -9.49835968017578 + ], + [ + "▁tour", + -9.498982429504396 + ], + [ + "▁payment", + -9.499096870422363 + ], + [ + "▁button", + -9.499442100524902 + ], + [ + "▁gets", + -9.500204086303713 + ], + [ + "▁finally", + -9.500773429870604 + ], + [ + "▁Many", + -9.500823974609377 + ], + [ + "▁pieces", + -9.502817153930664 + ], + [ + "▁protect", + -9.502911567687988 + ], + [ + "▁uses", + -9.506143569946287 + ], + [ + "▁complex", + -9.509228706359863 + ], + [ + "as", + -9.51053524017334 + ], + [ + "▁garden", + -9.511312484741213 + ], + [ + "▁First", + -9.513148307800291 + ], + [ + "▁journey", + -9.5132474899292 + ], + [ + "▁announced", + -9.51373291015625 + ], + [ + "▁political", + -9.514009475708008 + ], + [ + "▁European", + -9.51457405090332 + ], + [ + "▁blue", + -9.515680313110352 + ], + [ + "▁Monday", + -9.516149520874023 + ], + [ + "▁Australia", + -9.516664505004885 + ], + [ + "▁Canada", + -9.516871452331545 + ], + [ + "▁surface", + -9.517046928405762 + ], + [ + "▁plus", + -9.520615577697754 + ], + [ + "▁cleaning", + -9.523825645446776 + ], + [ + "▁experts", + -9.523940086364746 + ], + [ + "▁useful", + -9.524060249328612 + ], + [ + "▁mother", + -9.524256706237791 + ], + [ + "▁economic", + -9.525444984436035 + ], + [ + "▁immediately", + -9.52652645111084 + ], + [ + "▁served", + -9.527725219726562 + ], + [ + "▁gas", + -9.528145790100098 + ], + [ + "▁Get", + -9.528911590576172 + ], + [ + "▁cold", + -9.529155731201172 + ], + [ + "▁extremely", + -9.52927589416504 + ], + [ + "▁engine", + -9.530349731445312 + ], + [ + "▁August", + -9.53062629699707 + ], + [ + "▁Great", + -9.530994415283203 + ], + [ + "▁character", + -9.531364440917969 + ], + [ + "▁deliver", + -9.532485008239746 + ], + [ + "▁powerful", + -9.533278465270996 + ], + [ + "▁Even", + -9.534539222717283 + ], + [ + "▁dry", + -9.534649848937988 + ], + [ + "▁warm", + -9.535271644592283 + ], + [ + "▁thanks", + -9.536304473876951 + ], + [ + "▁nearly", + -9.536701202392578 + ], + [ + "▁teams", + -9.537108421325684 + ], + [ + "▁cards", + -9.537829399108888 + ], + [ + "▁strategy", + -9.539118766784668 + ], + [ + "▁effects", + -9.539161682128906 + ], + [ + "▁schedule", + -9.539876937866213 + ], + [ + "▁reasons", + -9.541766166687012 + ], + [ + "am", + -9.542279243469238 + ], + [ + "▁eyes", + -9.542311668395996 + ], + [ + "▁Can", + -9.542736053466797 + ], + [ + "▁tips", + -9.542884826660156 + ], + [ + "▁driving", + -9.543195724487305 + ], + [ + "▁camera", + -9.543204307556152 + ], + [ + "▁appropriate", + -9.546771049499512 + ], + [ + "▁item", + -9.547248840332031 + ], + [ + "▁patient", + -9.547982215881348 + ], + [ + "▁particularly", + -9.549318313598633 + ], + [ + "▁Association", + -9.550564765930176 + ], + [ + "▁holiday", + -9.55191135406494 + ], + [ + "▁pool", + -9.552292823791504 + ], + [ + "▁sports", + -9.554478645324709 + ], + [ + "▁February", + -9.555649757385254 + ], + [ + "▁professionals", + -9.55609130859375 + ], + [ + "▁steel", + -9.55723476409912 + ], + [ + "▁campaign", + -9.560546875 + ], + [ + "▁sharing", + -9.56116008758545 + ], + [ + "▁helped", + -9.561368942260742 + ], + [ + "▁wine", + -9.561784744262695 + ], + [ + "▁changed", + -9.56346607208252 + ], + [ + "▁multi", + -9.563505172729492 + ], + [ + "▁Each", + -9.56357192993164 + ], + [ + "▁dedicated", + -9.565587043762209 + ], + [ + "▁stuff", + -9.565796852111816 + ], + [ + "▁steps", + -9.566490173339844 + ], + [ + "▁White", + -9.568072319030762 + ], + [ + "▁turned", + -9.568376541137695 + ], + [ + "▁shopping", + -9.57016658782959 + ], + [ + "▁finished", + -9.570771217346191 + ], + [ + "▁Company", + -9.572437286376951 + ], + [ + "▁maybe", + -9.572532653808594 + ], + [ + "▁structure", + -9.574026107788086 + ], + [ + "▁reviews", + -9.574248313903809 + ], + [ + "Do", + -9.57544231414795 + ], + [ + "▁title", + -9.576057434082031 + ], + [ + "▁super", + -9.577320098876951 + ], + [ + "▁increased", + -9.57791519165039 + ], + [ + "▁Art", + -9.578374862670898 + ], + [ + "▁police", + -9.578409194946287 + ], + [ + "▁wear", + -9.578807830810549 + ], + [ + "▁pass", + -9.579103469848633 + ], + [ + "▁miles", + -9.57939910888672 + ], + [ + "▁disease", + -9.57973861694336 + ], + [ + "▁lines", + -9.579766273498535 + ], + [ + "▁Design", + -9.580902099609377 + ], + [ + "▁official", + -9.581975936889648 + ], + [ + "co", + -9.582801818847656 + ], + [ + "▁ahead", + -9.583975791931152 + ], + [ + "▁reports", + -9.584073066711426 + ], + [ + "▁sold", + -9.58436107635498 + ], + [ + "▁welcome", + -9.584776878356934 + ], + [ + "”.", + -9.584975242614746 + ], + [ + "▁science", + -9.585053443908691 + ], + [ + "us", + -9.58577823638916 + ], + [ + "▁race", + -9.586462020874023 + ], + [ + "▁models", + -9.586918830871582 + ], + [ + "▁winter", + -9.586928367614746 + ], + [ + "▁Council", + -9.587838172912598 + ], + [ + "▁Office", + -9.58836841583252 + ], + [ + "▁prevent", + -9.5900297164917 + ], + [ + "▁numbers", + -9.590102195739746 + ], + [ + "H", + -9.590798377990724 + ], + [ + "▁church", + -9.591985702514648 + ], + [ + "▁associated", + -9.592263221740724 + ], + [ + "▁woman", + -9.593303680419922 + ], + [ + "▁print", + -9.593550682067873 + ], + [ + "mm", + -9.59454345703125 + ], + [ + "▁Let", + -9.59486961364746 + ], + [ + "V", + -9.596312522888184 + ], + [ + "▁assist", + -9.5963716506958 + ], + [ + "▁un", + -9.596570014953612 + ], + [ + "▁update", + -9.59666633605957 + ], + [ + "▁owner", + -9.596931457519531 + ], + [ + "▁larger", + -9.59840202331543 + ], + [ + "▁alone", + -9.599892616271973 + ], + [ + "▁son", + -9.600626945495604 + ], + [ + "These", + -9.600630760192873 + ], + [ + "▁Once", + -9.600937843322754 + ], + [ + "▁band", + -9.60177516937256 + ], + [ + "▁Air", + -9.602371215820312 + ], + [ + "▁knew", + -9.604732513427734 + ], + [ + "▁soft", + -9.604869842529297 + ], + [ + "▁allowed", + -9.605019569396973 + ], + [ + "▁saying", + -9.606547355651855 + ], + [ + "▁views", + -9.60719108581543 + ], + [ + "▁technical", + -9.607715606689451 + ], + [ + "▁secure", + -9.608376502990724 + ], + [ + "▁colors", + -9.609798431396484 + ], + [ + "▁established", + -9.61026096343994 + ], + [ + "▁friendly", + -9.610455513000488 + ], + [ + "▁21", + -9.611369132995604 + ], + [ + "▁session", + -9.611504554748535 + ], + [ + "▁manage", + -9.611647605895996 + ], + [ + "▁traffic", + -9.611897468566896 + ], + [ + "▁prior", + -9.612889289855955 + ], + [ + "▁guests", + -9.613263130187988 + ], + [ + "▁covered", + -9.61362361907959 + ], + [ + "old", + -9.61380386352539 + ], + [ + "▁middle", + -9.614612579345703 + ], + [ + "▁methods", + -9.61493682861328 + ], + [ + "▁greater", + -9.61565113067627 + ], + [ + "▁partner", + -9.616110801696776 + ], + [ + "▁challenges", + -9.616345405578612 + ], + [ + "W", + -9.616400718688965 + ], + [ + "▁advanced", + -9.618142127990724 + ], + [ + "▁facilities", + -9.618330955505373 + ], + [ + "▁requires", + -9.619723320007324 + ], + [ + "▁Thanks", + -9.620017051696776 + ], + [ + "▁maintain", + -9.620308876037598 + ], + [ + "▁carry", + -9.620874404907228 + ], + [ + "▁organizations", + -9.621511459350586 + ], + [ + "▁rooms", + -9.623456001281738 + ], + [ + "pm", + -9.623662948608398 + ], + [ + "▁remove", + -9.623964309692385 + ], + [ + "▁spot", + -9.624571800231934 + ], + [ + "While", + -9.625238418579102 + ], + [ + "▁handle", + -9.625520706176758 + ], + [ + "▁income", + -9.625614166259766 + ], + [ + "▁normal", + -9.625862121582031 + ], + [ + "▁efforts", + -9.626192092895508 + ], + [ + "▁enter", + -9.628043174743652 + ], + [ + "▁standards", + -9.628570556640623 + ], + [ + "▁advantage", + -9.628853797912598 + ], + [ + "▁song", + -9.629796028137209 + ], + [ + "▁trust", + -9.629867553710938 + ], + [ + "▁serious", + -9.6300048828125 + ], + [ + "▁moved", + -9.630642890930176 + ], + [ + "▁states", + -9.63071060180664 + ], + [ + "▁connection", + -9.631019592285156 + ], + [ + "day", + -9.631065368652344 + ], + [ + "▁onto", + -9.631653785705566 + ], + [ + "▁maintenance", + -9.63178253173828 + ], + [ + "▁installation", + -9.63440990447998 + ], + [ + "is", + -9.635242462158203 + ], + [ + "▁communication", + -9.635458946228027 + ], + [ + "▁listed", + -9.637130737304688 + ], + [ + "nd", + -9.637298583984377 + ], + [ + "▁finding", + -9.63781452178955 + ], + [ + "Please", + -9.63844871520996 + ], + [ + "▁jobs", + -9.639135360717772 + ], + [ + "▁club", + -9.639554023742676 + ], + [ + "No", + -9.640379905700684 + ], + [ + "▁reported", + -9.64065933227539 + ], + [ + "▁park", + -9.641616821289062 + ], + [ + "▁although", + -9.642623901367188 + ], + [ + "▁opening", + -9.6437406539917 + ], + [ + "▁outdoor", + -9.643854141235352 + ], + [ + "▁shown", + -9.64437198638916 + ], + [ + "▁shape", + -9.64523696899414 + ], + [ + "▁aren", + -9.645596504211426 + ], + [ + "▁met", + -9.646209716796877 + ], + [ + "▁bathroom", + -9.646653175354004 + ], + [ + "▁earlier", + -9.648459434509276 + ], + [ + "▁bank", + -9.649700164794922 + ], + [ + "▁2015", + -9.650338172912598 + ], + [ + "▁tree", + -9.651007652282717 + ], + [ + "▁metal", + -9.651537895202637 + ], + [ + "▁studies", + -9.652143478393556 + ], + [ + "▁audience", + -9.652231216430664 + ], + [ + "▁Since", + -9.65353775024414 + ], + [ + "▁remain", + -9.653741836547852 + ], + [ + "▁plenty", + -9.654547691345217 + ], + [ + "▁background", + -9.654820442199709 + ], + [ + "▁17", + -9.655872344970703 + ], + [ + "▁developing", + -9.656001091003418 + ], + [ + "▁dining", + -9.65670394897461 + ], + [ + "▁shared", + -9.657160758972168 + ], + [ + "▁discuss", + -9.657547950744627 + ], + [ + "▁Jesus", + -9.661703109741213 + ], + [ + "▁cancer", + -9.662184715270996 + ], + [ + "▁rules", + -9.66236686706543 + ], + [ + "▁explore", + -9.66242504119873 + ], + [ + "▁shall", + -9.663179397583008 + ], + [ + "▁appear", + -9.6633939743042 + ], + [ + "▁dark", + -9.663690567016602 + ], + [ + "▁loan", + -9.664766311645508 + ], + [ + "▁competition", + -9.6648588180542 + ], + [ + "▁teaching", + -9.666093826293944 + ], + [ + "term", + -9.668041229248049 + ], + [ + "▁restaurant", + -9.668500900268556 + ], + [ + "▁Are", + -9.670561790466309 + ], + [ + "▁mix", + -9.67104721069336 + ], + [ + "▁primary", + -9.67132568359375 + ], + [ + "▁evening", + -9.673425674438477 + ], + [ + "▁critical", + -9.673566818237305 + ], + [ + "▁compared", + -9.674169540405272 + ], + [ + "▁driver", + -9.67489242553711 + ], + [ + "▁host", + -9.675336837768556 + ], + [ + "▁Red", + -9.675418853759766 + ], + [ + "▁biggest", + -9.67615032196045 + ], + [ + "▁balance", + -9.676202774047852 + ], + [ + "6", + -9.67655086517334 + ], + [ + "▁Club", + -9.67687702178955 + ], + [ + "▁visitors", + -9.67752742767334 + ], + [ + "They", + -9.678492546081545 + ], + [ + "▁owners", + -9.679210662841797 + ], + [ + "▁billion", + -9.679349899291992 + ], + [ + "▁born", + -9.679458618164062 + ], + [ + "▁communities", + -9.680331230163574 + ], + [ + "▁leaders", + -9.681350708007812 + ], + [ + "▁Board", + -9.682011604309082 + ], + [ + "▁keeping", + -9.68267822265625 + ], + [ + "▁wants", + -9.683038711547852 + ], + [ + "▁named", + -9.683388710021973 + ], + [ + "▁fans", + -9.684121131896973 + ], + [ + "▁fish", + -9.684432983398438 + ], + [ + "▁cars", + -9.684661865234377 + ], + [ + "▁contains", + -9.684975624084473 + ], + [ + "▁followed", + -9.685798645019531 + ], + [ + "▁determine", + -9.687294960021973 + ], + [ + "▁statement", + -9.687559127807615 + ], + [ + "▁plants", + -9.688888549804688 + ], + [ + "▁fill", + -9.689062118530272 + ], + [ + "▁techniques", + -9.689952850341797 + ], + [ + "▁looked", + -9.690873146057127 + ], + [ + "▁Washington", + -9.691383361816406 + ], + [ + "Now", + -9.69294548034668 + ], + [ + "▁stress", + -9.6936674118042 + ], + [ + "▁Pro", + -9.694134712219238 + ], + [ + "▁classes", + -9.694393157958984 + ], + [ + "▁properties", + -9.695011138916016 + ], + [ + "▁experiences", + -9.695246696472168 + ], + [ + "▁notice", + -9.695459365844728 + ], + [ + "▁seeing", + -9.695502281188965 + ], + [ + "▁wife", + -9.6957368850708 + ], + [ + "▁heavy", + -9.695806503295898 + ], + [ + "▁beauty", + -9.69584846496582 + ], + [ + "▁drop", + -9.696200370788574 + ], + [ + "+", + -9.697113990783691 + ], + [ + "▁mission", + -9.697712898254396 + ], + [ + "▁sleep", + -9.697850227355955 + ], + [ + "▁comfort", + -9.698175430297852 + ], + [ + "▁director", + -9.6996431350708 + ], + [ + "ness", + -9.701571464538574 + ], + [ + "▁ball", + -9.701859474182127 + ], + [ + "▁seven", + -9.702041625976562 + ], + [ + "▁target", + -9.702289581298828 + ], + [ + "▁David", + -9.70233917236328 + ], + [ + "▁Management", + -9.702488899230955 + ], + [ + "free", + -9.70263957977295 + ], + [ + "▁expert", + -9.702898979187012 + ], + [ + "▁talking", + -9.702922821044922 + ], + [ + "▁#", + -9.703628540039062 + ], + [ + "▁Road", + -9.704426765441896 + ], + [ + "▁comments", + -9.705025672912598 + ], + [ + "▁coverage", + -9.705472946166992 + ], + [ + "z", + -9.705754280090332 + ], + [ + "▁voice", + -9.70586395263672 + ], + [ + "▁classic", + -9.7064790725708 + ], + [ + "▁French", + -9.706490516662598 + ], + [ + "▁testing", + -9.707233428955078 + ], + [ + "▁beach", + -9.707477569580078 + ], + [ + "▁task", + -9.707611083984377 + ], + [ + "▁adding", + -9.707649230957031 + ], + [ + "▁0", + -9.70770263671875 + ], + [ + "▁fan", + -9.707709312438965 + ], + [ + "▁author", + -9.707965850830078 + ], + [ + "▁buying", + -9.708484649658203 + ], + [ + "▁links", + -9.709518432617188 + ], + [ + "▁shot", + -9.709891319274902 + ], + [ + "▁haven", + -9.71022605895996 + ], + [ + "▁act", + -9.710594177246094 + ], + [ + "▁produced", + -9.711122512817385 + ], + [ + "▁Thursday", + -9.711284637451172 + ], + [ + "▁60", + -9.7116117477417 + ], + [ + "▁package", + -9.711685180664062 + ], + [ + "▁conference", + -9.712011337280272 + ], + [ + "org", + -9.7123441696167 + ], + [ + "▁correct", + -9.713985443115234 + ], + [ + "▁war", + -9.714902877807615 + ], + [ + "▁sun", + -9.71562957763672 + ], + [ + "▁IT", + -9.717309951782228 + ], + [ + "▁perform", + -9.717642784118652 + ], + [ + "▁speak", + -9.721288681030272 + ], + [ + "▁learned", + -9.72146987915039 + ], + [ + "▁letter", + -9.72204303741455 + ], + [ + "▁father", + -9.722103118896484 + ], + [ + "line", + -9.723265647888184 + ], + [ + "no", + -9.723795890808104 + ], + [ + "▁fee", + -9.724028587341309 + ], + [ + "▁demand", + -9.724392890930176 + ], + [ + "▁Why", + -9.725290298461914 + ], + [ + "▁famous", + -9.725381851196287 + ], + [ + "▁copy", + -9.725383758544922 + ], + [ + "▁Research", + -9.726136207580566 + ], + [ + "▁elements", + -9.726508140563965 + ], + [ + "▁evidence", + -9.726765632629396 + ], + [ + "▁generally", + -9.72691249847412 + ], + [ + "▁Apple", + -9.72790241241455 + ], + [ + "▁movement", + -9.72825050354004 + ], + [ + "▁British", + -9.728570938110352 + ], + [ + "▁spring", + -9.7289400100708 + ], + [ + "▁award", + -9.729060173034668 + ], + [ + "▁Make", + -9.729447364807127 + ], + [ + "▁connect", + -9.730429649353027 + ], + [ + "▁Institute", + -9.730724334716797 + ], + [ + "▁sweet", + -9.731188774108888 + ], + [ + "▁artist", + -9.731243133544922 + ], + [ + "▁Texas", + -9.732037544250488 + ], + [ + "▁De", + -9.733283996582031 + ], + [ + "▁couldn", + -9.735137939453123 + ], + [ + "▁sector", + -9.735323905944824 + ], + [ + "▁placed", + -9.737306594848633 + ], + [ + "▁parties", + -9.738622665405272 + ], + [ + "▁gain", + -9.7393798828125 + ], + [ + "▁prepared", + -9.73943328857422 + ], + [ + "▁selected", + -9.739680290222168 + ], + [ + "▁press", + -9.741368293762209 + ], + [ + "▁map", + -9.741464614868164 + ], + [ + "▁THE", + -9.742254257202148 + ], + [ + "▁residents", + -9.742650032043455 + ], + [ + "▁funds", + -9.742850303649902 + ], + [ + "▁waiting", + -9.743049621582031 + ], + [ + "▁designs", + -9.74375820159912 + ], + [ + "▁server", + -9.744004249572754 + ], + [ + "▁workers", + -9.744333267211914 + ], + [ + "▁Tuesday", + -9.74527072906494 + ], + [ + "▁whose", + -9.746014595031738 + ], + [ + "▁claim", + -9.746628761291504 + ], + [ + "▁exciting", + -9.747182846069336 + ], + [ + "by", + -9.747695922851562 + ], + [ + "▁himself", + -9.747797012329102 + ], + [ + "▁presented", + -9.7479829788208 + ], + [ + "An", + -9.748499870300291 + ], + [ + "▁perhaps", + -9.74867343902588 + ], + [ + "▁sort", + -9.75228786468506 + ], + [ + "▁combination", + -9.753460884094238 + ], + [ + "•", + -9.753462791442873 + ], + [ + "▁decide", + -9.75477695465088 + ], + [ + "▁exercise", + -9.755488395690918 + ], + [ + "▁bag", + -9.755518913269045 + ], + [ + "▁anti", + -9.755555152893066 + ], + [ + "u", + -9.755844116210938 + ], + [ + "▁Go", + -9.756141662597656 + ], + [ + "▁Thank", + -9.756390571594238 + ], + [ + "▁forget", + -9.757062911987305 + ], + [ + "▁relevant", + -9.7572603225708 + ], + [ + "▁capital", + -9.757465362548828 + ], + [ + "▁album", + -9.757500648498535 + ], + [ + "▁capacity", + -9.758272171020508 + ], + [ + "▁numerous", + -9.758685111999512 + ], + [ + "▁kept", + -9.75889778137207 + ], + [ + "▁figure", + -9.759246826171877 + ], + [ + "▁recipe", + -9.759810447692873 + ], + [ + "▁straight", + -9.760791778564451 + ], + [ + "▁gone", + -9.760801315307615 + ], + [ + "▁changing", + -9.762125015258787 + ], + [ + "▁thousands", + -9.762141227722168 + ], + [ + "▁affordable", + -9.762221336364746 + ], + [ + "▁operations", + -9.763029098510742 + ], + [ + "▁Green", + -9.76308250427246 + ], + [ + "▁+", + -9.764981269836426 + ], + [ + "▁forms", + -9.765085220336914 + ], + [ + "ie", + -9.765289306640623 + ], + [ + "X", + -9.765482902526855 + ], + [ + "▁strength", + -9.765612602233888 + ], + [ + "▁efficient", + -9.765913009643556 + ], + [ + "▁See", + -9.76671028137207 + ], + [ + "▁excited", + -9.769336700439451 + ], + [ + "Q", + -9.76966381072998 + ], + [ + "▁install", + -9.769927024841309 + ], + [ + "▁Church", + -9.76994514465332 + ], + [ + "▁bedroom", + -9.77025032043457 + ], + [ + "▁rich", + -9.770559310913086 + ], + [ + "▁essay", + -9.770681381225586 + ], + [ + "▁cross", + -9.771037101745604 + ], + [ + "▁Web", + -9.771636962890623 + ], + [ + "▁concept", + -9.771726608276367 + ], + [ + "▁Well", + -9.772619247436523 + ], + [ + "▁practices", + -9.77320384979248 + ], + [ + "▁husband", + -9.773404121398926 + ], + [ + "for", + -9.77342128753662 + ], + [ + "­", + -9.774024963378906 + ], + [ + "▁contract", + -9.774650573730469 + ], + [ + "▁Wednesday", + -9.77471923828125 + ], + [ + "▁operating", + -9.77505874633789 + ], + [ + "▁century", + -9.7753324508667 + ], + [ + "▁videos", + -9.775354385375977 + ], + [ + "▁Lake", + -9.775564193725586 + ], + [ + "▁lack", + -9.775681495666504 + ], + [ + "▁aware", + -9.776291847229004 + ], + [ + "▁solid", + -9.77642059326172 + ], + [ + "▁helpful", + -9.77679443359375 + ], + [ + "▁stone", + -9.777379989624023 + ], + [ + "▁separate", + -9.779024124145508 + ], + [ + "▁Her", + -9.779025077819824 + ], + [ + "▁General", + -9.779204368591309 + ], + [ + "▁street", + -9.77955722808838 + ], + [ + "▁older", + -9.779828071594238 + ], + [ + "▁taste", + -9.78081226348877 + ], + [ + "▁actual", + -9.780831336975098 + ], + [ + "▁sea", + -9.782020568847656 + ], + [ + "▁characters", + -9.78297233581543 + ], + [ + "▁leader", + -9.784523010253906 + ], + [ + "▁ingredients", + -9.786253929138184 + ], + [ + "▁dinner", + -9.787384033203123 + ], + [ + "▁Africa", + -9.78745460510254 + ], + [ + "▁therefore", + -9.787524223327637 + ], + [ + "▁selling", + -9.787714958190918 + ], + [ + "▁19", + -9.787808418273926 + ], + [ + "▁Check", + -9.789351463317873 + ], + [ + "▁proper", + -9.78954792022705 + ], + [ + "▁values", + -9.790157318115234 + ], + [ + "▁comment", + -9.790238380432127 + ], + [ + "▁Life", + -9.790535926818848 + ], + [ + "▁expensive", + -9.790797233581545 + ], + [ + "▁status", + -9.791051864624023 + ], + [ + "▁corporate", + -9.79209804534912 + ], + [ + "▁smaller", + -9.7924222946167 + ], + [ + "▁becoming", + -9.79245662689209 + ], + [ + "▁path", + -9.792954444885254 + ], + [ + "▁force", + -9.79338550567627 + ], + [ + "▁plastic", + -9.794053077697754 + ], + [ + "▁installed", + -9.794219970703123 + ], + [ + "▁artists", + -9.794279098510742 + ], + [ + "www", + -9.79526138305664 + ], + [ + "▁vision", + -9.795483589172363 + ], + [ + "▁menu", + -9.7962007522583 + ], + [ + "▁facility", + -9.796375274658203 + ], + [ + "▁expertise", + -9.797107696533203 + ], + [ + "▁La", + -9.79763126373291 + ], + [ + "▁fashion", + -9.797795295715332 + ], + [ + "▁population", + -9.797832489013672 + ], + [ + "▁fantastic", + -9.798367500305176 + ], + [ + "▁Chinese", + -9.798453330993652 + ], + [ + "▁River", + -9.799756050109863 + ], + [ + "▁Good", + -9.799918174743652 + ], + [ + "▁2014", + -9.800718307495115 + ], + [ + "7", + -9.801284790039062 + ], + [ + "▁transfer", + -9.801340103149414 + ], + [ + "▁identify", + -9.80187702178955 + ], + [ + "▁focused", + -9.802200317382812 + ], + [ + "▁factors", + -9.802252769470217 + ], + [ + "▁memory", + -9.802552223205566 + ], + [ + "▁minute", + -9.80327033996582 + ], + [ + "▁medium", + -9.803653717041016 + ], + [ + "▁core", + -9.805671691894531 + ], + [ + "▁opened", + -9.806583404541016 + ], + [ + "▁names", + -9.806706428527832 + ], + [ + "▁eight", + -9.80671501159668 + ], + [ + "Here", + -9.807106971740724 + ], + [ + "▁Florida", + -9.807785987854004 + ], + [ + "▁economy", + -9.807857513427734 + ], + [ + "▁Click", + -9.808161735534668 + ], + [ + "▁Director", + -9.808168411254885 + ], + [ + "▁flow", + -9.808924674987791 + ], + [ + "▁temperature", + -9.809189796447754 + ], + [ + "▁passed", + -9.809526443481444 + ], + [ + "▁progress", + -9.813366889953612 + ], + [ + "▁Court", + -9.814471244812012 + ], + [ + "▁train", + -9.815282821655272 + ], + [ + "▁president", + -9.81638240814209 + ], + [ + "▁ride", + -9.816941261291504 + ], + [ + "2.", + -9.817376136779783 + ], + [ + "▁reality", + -9.818071365356444 + ], + [ + "▁War", + -9.819402694702148 + ], + [ + "▁sugar", + -9.822135925292969 + ], + [ + "▁theme", + -9.822306632995604 + ], + [ + "▁Today", + -9.822609901428224 + ], + [ + "▁100%", + -9.82300090789795 + ], + [ + "▁showing", + -9.823929786682127 + ], + [ + "▁interior", + -9.82452392578125 + ], + [ + "▁Education", + -9.824673652648926 + ], + [ + "▁agree", + -9.824695587158203 + ], + [ + "▁awesome", + -9.826555252075195 + ], + [ + "▁independent", + -9.827253341674805 + ], + [ + "out", + -9.82758617401123 + ], + [ + "▁decisions", + -9.827872276306152 + ], + [ + "▁processes", + -9.828213691711426 + ], + [ + "▁lovely", + -9.828411102294922 + ], + [ + "▁society", + -9.82988166809082 + ], + [ + "J", + -9.83016586303711 + ], + [ + "▁Use", + -9.83072566986084 + ], + [ + "▁paint", + -9.83072566986084 + ], + [ + "8", + -9.83115005493164 + ], + [ + "▁Trump", + -9.83145809173584 + ], + [ + "▁military", + -9.831894874572754 + ], + [ + "▁ice", + -9.832497596740724 + ], + [ + "▁maximum", + -9.83267307281494 + ], + [ + "▁frame", + -9.832806587219238 + ], + [ + "▁Foundation", + -9.83436679840088 + ], + [ + "rd", + -9.83533763885498 + ], + [ + "▁windows", + -9.835516929626465 + ], + [ + "▁direction", + -9.83636474609375 + ], + [ + "▁documents", + -9.836482048034668 + ], + [ + "▁detail", + -9.838356971740724 + ], + [ + "▁runs", + -9.838677406311035 + ], + [ + "▁happens", + -9.838861465454102 + ], + [ + "▁Re", + -9.839279174804688 + ], + [ + "▁injury", + -9.839621543884276 + ], + [ + "▁Bay", + -9.840411186218262 + ], + [ + "▁courses", + -9.841236114501951 + ], + [ + "one", + -9.842029571533203 + ], + [ + "▁lose", + -9.842893600463867 + ], + [ + "▁bought", + -9.843059539794922 + ], + [ + "▁wrote", + -9.844070434570312 + ], + [ + "▁units", + -9.84422206878662 + ], + [ + "▁properly", + -9.844473838806152 + ], + [ + "▁Year", + -9.84484577178955 + ], + [ + "▁Online", + -9.845321655273438 + ], + [ + "▁enjoyed", + -9.845691680908203 + ], + [ + "▁spending", + -9.846301078796388 + ], + [ + "▁distance", + -9.846444129943848 + ], + [ + "▁treat", + -9.847392082214355 + ], + [ + "▁innovative", + -9.847457885742188 + ], + [ + "▁manager", + -9.848349571228027 + ], + [ + "▁happened", + -9.848566055297852 + ], + [ + "▁updated", + -9.849329948425291 + ], + [ + "▁foot", + -9.849364280700684 + ], + [ + "ton", + -9.849528312683104 + ], + [ + "▁serving", + -9.84958553314209 + ], + [ + "▁brain", + -9.850089073181152 + ], + [ + "▁proud", + -9.850723266601562 + ], + [ + "▁Bank", + -9.850964546203612 + ], + [ + "▁Paul", + -9.850987434387209 + ], + [ + "▁thank", + -9.85161304473877 + ], + [ + "▁www", + -9.853279113769531 + ], + [ + "▁federal", + -9.853561401367188 + ], + [ + "▁walking", + -9.853767395019531 + ], + [ + "▁daughter", + -9.854098320007324 + ], + [ + "▁Act", + -9.854238510131836 + ], + [ + "▁sounds", + -9.854536056518556 + ], + [ + "▁King", + -9.855706214904783 + ], + [ + "▁alternative", + -9.856255531311035 + ], + [ + "▁meaning", + -9.856751441955566 + ], + [ + "▁sit", + -9.857479095458984 + ], + [ + "▁becomes", + -9.857515335083008 + ], + [ + "▁brings", + -9.858118057250977 + ], + [ + "▁rock", + -9.858406066894531 + ], + [ + "▁flat", + -9.858877182006836 + ], + [ + "▁central", + -9.859800338745115 + ], + [ + "▁assistance", + -9.86008644104004 + ], + [ + "▁discover", + -9.86044979095459 + ], + [ + "▁continued", + -9.861607551574709 + ], + [ + "▁centre", + -9.861738204956056 + ], + [ + "▁initial", + -9.86229419708252 + ], + [ + "▁stores", + -9.862792015075684 + ], + [ + "▁allowing", + -9.863147735595703 + ], + [ + "▁connected", + -9.863417625427246 + ], + [ + "▁dream", + -9.864299774169922 + ], + [ + "▁format", + -9.864495277404783 + ], + [ + "▁detailed", + -9.864834785461426 + ], + [ + "▁absolutely", + -9.865620613098145 + ], + [ + "▁ten", + -9.865674018859863 + ], + [ + "▁vehicles", + -9.86582851409912 + ], + [ + "▁leadership", + -9.86620807647705 + ], + [ + "▁length", + -9.86751651763916 + ], + [ + "f", + -9.867721557617188 + ], + [ + "▁relationships", + -9.868123054504396 + ], + [ + "v", + -9.868294715881348 + ], + [ + "▁whatever", + -9.868916511535645 + ], + [ + "Your", + -9.869111061096191 + ], + [ + "▁tickets", + -9.86923122406006 + ], + [ + "▁managed", + -9.871294975280762 + ], + [ + "▁master", + -9.8714599609375 + ], + [ + "▁locations", + -9.872097969055176 + ], + [ + "▁teachers", + -9.872284889221191 + ], + [ + "▁fees", + -9.872455596923828 + ], + [ + "▁concerns", + -9.872714042663574 + ], + [ + "▁remains", + -9.873414039611816 + ], + [ + "▁cheap", + -9.87351417541504 + ], + [ + "▁recommended", + -9.87381362915039 + ], + [ + "▁station", + -9.874034881591797 + ], + [ + "▁provider", + -9.87459945678711 + ], + [ + "▁reliable", + -9.875452995300291 + ], + [ + "▁partners", + -9.876503944396973 + ], + [ + "▁girl", + -9.876877784729004 + ], + [ + "▁automatically", + -9.878005981445312 + ], + [ + "▁fight", + -9.879064559936523 + ], + [ + "▁websites", + -9.879168510437012 + ], + [ + "▁leaving", + -9.879322052001951 + ], + [ + "less", + -9.879517555236816 + ], + [ + "▁Island", + -9.87967300415039 + ], + [ + "▁markets", + -9.88006591796875 + ], + [ + "▁suitable", + -9.880075454711914 + ], + [ + "▁calls", + -9.880178451538086 + ], + [ + "▁agency", + -9.881035804748535 + ], + [ + "▁smart", + -9.881049156188965 + ], + [ + "▁mid", + -9.88122844696045 + ], + [ + "▁block", + -9.881354331970217 + ], + [ + "▁USA", + -9.881806373596191 + ], + [ + "▁Amazon", + -9.88182544708252 + ], + [ + "▁drug", + -9.88184642791748 + ], + [ + "!!!", + -9.88218593597412 + ], + [ + "▁Because", + -9.882245063781738 + ], + [ + "▁poor", + -9.882274627685549 + ], + [ + "▁Beach", + -9.882418632507324 + ], + [ + "▁Although", + -9.88254165649414 + ], + [ + "▁cities", + -9.883902549743652 + ], + [ + "▁trees", + -9.883910179138184 + ], + [ + "▁scale", + -9.884228706359863 + ], + [ + "▁busy", + -9.88425064086914 + ], + [ + "▁teacher", + -9.884454727172852 + ], + [ + "▁minimum", + -9.884882926940918 + ], + [ + "▁sets", + -9.88543701171875 + ], + [ + "▁Whether", + -9.885499954223633 + ], + [ + "▁filled", + -9.885517120361328 + ], + [ + "▁senior", + -9.886075019836426 + ], + [ + "▁parking", + -9.88625144958496 + ], + [ + "▁records", + -9.886652946472168 + ], + [ + "▁Science", + -9.88690185546875 + ], + [ + "▁closed", + -9.887356758117676 + ], + [ + "▁Indian", + -9.888056755065918 + ], + [ + "▁shipping", + -9.88817024230957 + ], + [ + "at", + -9.888579368591309 + ], + [ + "▁posts", + -9.888633728027344 + ], + [ + "▁respect", + -9.889056205749512 + ], + [ + "▁James", + -9.889129638671877 + ], + [ + "like", + -9.890249252319336 + ], + [ + "▁score", + -9.890374183654783 + ], + [ + "▁corner", + -9.891843795776367 + ], + [ + "▁mentioned", + -9.892739295959473 + ], + [ + "▁continues", + -9.893671989440918 + ], + [ + "▁sources", + -9.894118309020996 + ], + [ + "▁applied", + -9.894641876220703 + ], + [ + "▁panel", + -9.894715309143066 + ], + [ + "▁colour", + -9.895240783691406 + ], + [ + "▁edge", + -9.895943641662598 + ], + [ + "▁Law", + -9.89617156982422 + ], + [ + "▁caused", + -9.89625072479248 + ], + [ + "▁encourage", + -9.89682674407959 + ], + [ + "▁cream", + -9.89690399169922 + ], + [ + "▁operation", + -9.896931648254396 + ], + [ + "▁funding", + -9.899097442626951 + ], + [ + "▁technologies", + -9.89941692352295 + ], + [ + "▁square", + -9.899911880493164 + ], + [ + "▁category", + -9.900338172912598 + ], + [ + "▁toward", + -9.901463508605955 + ], + [ + "▁policies", + -9.901703834533691 + ], + [ + "▁info", + -9.902100563049316 + ], + [ + "▁tea", + -9.903270721435549 + ], + [ + "▁delicious", + -9.90354824066162 + ], + [ + "▁agreement", + -9.903608322143556 + ], + [ + "▁England", + -9.904478073120115 + ], + [ + "▁waste", + -9.905412673950195 + ], + [ + "▁eating", + -9.905545234680176 + ], + [ + "▁topic", + -9.905771255493164 + ], + [ + "▁leaves", + -9.905887603759766 + ], + [ + "▁brands", + -9.905922889709473 + ], + [ + "▁animals", + -9.906571388244627 + ], + [ + "▁scene", + -9.907354354858398 + ], + [ + "▁Microsoft", + -9.90738296508789 + ], + [ + "▁22", + -9.907774925231934 + ], + [ + "▁Valley", + -9.908906936645508 + ], + [ + "▁launch", + -9.9095458984375 + ], + [ + "▁starts", + -9.909819602966309 + ], + [ + "▁Community", + -9.910063743591309 + ], + [ + "▁Program", + -9.910962104797363 + ], + [ + "▁department", + -9.91107940673828 + ], + [ + "▁strategies", + -9.91202163696289 + ], + [ + "▁shower", + -9.91203498840332 + ], + [ + "▁wouldn", + -9.912579536437988 + ], + [ + "▁Central", + -9.912836074829102 + ], + [ + "▁prepare", + -9.91297721862793 + ], + [ + "▁Michael", + -9.9132080078125 + ], + [ + "▁System", + -9.913317680358888 + ], + [ + "▁joined", + -9.913326263427734 + ], + [ + "▁doors", + -9.913444519042969 + ], + [ + "▁People", + -9.91379737854004 + ], + [ + "▁Music", + -9.914129257202148 + ], + [ + "▁increasing", + -9.915572166442873 + ], + [ + "▁star", + -9.915777206420898 + ], + [ + "▁pair", + -9.916510581970217 + ], + [ + "▁Have", + -9.916580200195312 + ], + [ + "▁watching", + -9.917221069335938 + ], + [ + "▁2013", + -9.918241500854492 + ], + [ + "1.", + -9.918349266052246 + ], + [ + "▁putting", + -9.918972969055176 + ], + [ + "▁deals", + -9.921978950500488 + ], + [ + "▁songs", + -9.922675132751465 + ], + [ + "▁processing", + -9.923733711242676 + ], + [ + "▁Android", + -9.92388153076172 + ], + [ + "▁majority", + -9.9238862991333 + ], + [ + "▁document", + -9.924271583557127 + ], + [ + "▁committed", + -9.924432754516602 + ], + [ + "▁cell", + -9.925066947937012 + ], + [ + "▁miss", + -9.92587184906006 + ], + [ + "▁practical", + -9.925885200500488 + ], + [ + "3.", + -9.926000595092772 + ], + [ + "▁advance", + -9.926000595092772 + ], + [ + "▁peace", + -9.92607879638672 + ], + [ + "▁Open", + -9.926237106323242 + ], + [ + "▁PC", + -9.926549911499023 + ], + [ + "▁dress", + -9.926766395568848 + ], + [ + "▁presence", + -9.92756462097168 + ], + [ + "▁combined", + -9.928105354309082 + ], + [ + "▁depending", + -9.928960800170898 + ], + [ + "▁Call", + -9.929381370544434 + ], + [ + "▁doctor", + -9.929874420166016 + ], + [ + "▁flowers", + -9.931280136108398 + ], + [ + "▁approximately", + -9.933385848999023 + ], + [ + "▁accept", + -9.933449745178224 + ], + [ + "▁Top", + -9.933836936950684 + ], + [ + "▁visiting", + -9.933987617492676 + ], + [ + "▁girls", + -9.934155464172363 + ], + [ + "▁battery", + -9.934258460998535 + ], + [ + "▁typically", + -9.93434238433838 + ], + [ + "▁Love", + -9.935359954833984 + ], + [ + "▁Blue", + -9.93549919128418 + ], + [ + "▁Development", + -9.936376571655272 + ], + [ + "▁promote", + -9.937639236450195 + ], + [ + "▁importance", + -9.938420295715332 + ], + [ + "▁Add", + -9.93854808807373 + ], + [ + "Thanks", + -9.939058303833008 + ], + [ + "▁feedback", + -9.93992042541504 + ], + [ + "▁http", + -9.940061569213867 + ], + [ + "▁academic", + -9.94192123413086 + ], + [ + "▁generation", + -9.942660331726074 + ], + [ + "▁lighting", + -9.943795204162598 + ], + [ + "apos", + -9.94389820098877 + ], + [ + "▁seat", + -9.944262504577637 + ], + [ + "▁slightly", + -9.944439888000488 + ], + [ + "▁bus", + -9.944854736328123 + ], + [ + "She", + -9.945093154907228 + ], + [ + "▁anywhere", + -9.945578575134276 + ], + [ + "▁hospital", + -9.945799827575684 + ], + [ + "▁claims", + -9.945978164672852 + ], + [ + "▁roof", + -9.946496963500977 + ], + [ + "▁raised", + -9.946929931640623 + ], + [ + "▁otherwise", + -9.947709083557127 + ], + [ + "▁2012", + -9.947955131530762 + ], + [ + "▁Time", + -9.948941230773926 + ], + [ + "▁extensive", + -9.95094394683838 + ], + [ + "▁environmental", + -9.951449394226074 + ], + [ + "▁comprehensive", + -9.953011512756348 + ], + [ + "▁profile", + -9.953046798706056 + ], + [ + "U", + -9.95395565032959 + ], + [ + "▁auto", + -9.954054832458496 + ], + [ + "▁Will", + -9.95487117767334 + ], + [ + "▁previously", + -9.955159187316896 + ], + [ + "▁drink", + -9.955164909362791 + ], + [ + "▁choices", + -9.956608772277832 + ], + [ + "▁lunch", + -9.95722198486328 + ], + [ + "TM", + -9.957270622253418 + ], + [ + "▁ourselves", + -9.958561897277832 + ], + [ + "▁inspired", + -9.958951950073242 + ], + [ + "▁worry", + -9.959007263183594 + ], + [ + "▁aspects", + -9.959623336791992 + ], + [ + "▁exchange", + -9.960165023803713 + ], + [ + "▁attend", + -9.96062183380127 + ], + [ + "▁guys", + -9.96085262298584 + ], + [ + "▁Take", + -9.960984230041504 + ], + [ + "▁industrial", + -9.96141529083252 + ], + [ + "▁winning", + -9.96248722076416 + ], + [ + "▁nation", + -9.96311378479004 + ], + [ + "▁debt", + -9.963823318481444 + ], + [ + "▁bill", + -9.963915824890137 + ], + [ + "▁island", + -9.96403694152832 + ], + [ + "▁components", + -9.96435260772705 + ], + [ + "▁spread", + -9.965349197387695 + ], + [ + "▁meal", + -9.96599006652832 + ], + [ + "▁Twitter", + -9.966486930847168 + ], + [ + "le", + -9.96712875366211 + ], + [ + "▁thoughts", + -9.967724800109863 + ], + [ + "▁emergency", + -9.968147277832031 + ], + [ + "▁attack", + -9.968290328979492 + ], + [ + "▁Hall", + -9.968964576721191 + ], + [ + "▁slow", + -9.969502449035645 + ], + [ + "▁confidence", + -9.970702171325684 + ], + [ + "▁internal", + -9.971637725830078 + ], + [ + "▁seconds", + -9.971735000610352 + ], + [ + "▁mental", + -9.972108840942385 + ], + [ + "ic", + -9.974671363830566 + ], + [ + "▁Power", + -9.976271629333496 + ], + [ + "▁competitive", + -9.97674560546875 + ], + [ + "▁smooth", + -9.977224349975586 + ], + [ + "▁interview", + -9.977725982666016 + ], + [ + "▁pattern", + -9.978431701660156 + ], + [ + "▁v", + -9.978816986083984 + ], + [ + "▁error", + -9.97927474975586 + ], + [ + "▁fabric", + -9.98100757598877 + ], + [ + "▁thus", + -9.982867240905762 + ], + [ + "▁suggest", + -9.983001708984377 + ], + [ + "▁fruit", + -9.983144760131836 + ], + [ + "▁iPhone", + -9.983171463012695 + ], + [ + "▁responsibility", + -9.983278274536133 + ], + [ + "\".", + -9.983988761901855 + ], + [ + "▁performed", + -9.984291076660156 + ], + [ + "▁specifically", + -9.985572814941406 + ], + [ + "▁Centre", + -9.98557949066162 + ], + [ + "▁guarantee", + -9.98702907562256 + ], + [ + "▁employee", + -9.987611770629885 + ], + [ + "▁Team", + -9.9877290725708 + ], + [ + "▁Find", + -9.987736701965332 + ], + [ + "▁Their", + -9.988134384155272 + ], + [ + "▁appointment", + -9.98965835571289 + ], + [ + "▁clearly", + -9.98975944519043 + ], + [ + "▁District", + -9.99005889892578 + ], + [ + ">", + -9.990177154541016 + ], + [ + "▁flight", + -9.990618705749512 + ], + [ + "▁wind", + -9.992668151855469 + ], + [ + "▁Every", + -9.993839263916016 + ], + [ + "▁restaurants", + -9.994308471679688 + ], + [ + "▁adults", + -9.994367599487305 + ], + [ + "▁mostly", + -9.99458122253418 + ], + [ + "▁dating", + -9.994657516479492 + ], + [ + "▁fuel", + -9.994850158691406 + ], + [ + "▁suit", + -9.995461463928224 + ], + [ + "▁valuable", + -9.996038436889648 + ], + [ + "▁dance", + -9.996280670166016 + ], + [ + "▁leads", + -9.99693775177002 + ], + [ + "▁audio", + -9.997665405273438 + ], + [ + "▁foreign", + -9.997797012329102 + ], + [ + "▁cloud", + -9.99864387512207 + ], + [ + "▁diet", + -9.998736381530762 + ], + [ + "▁Data", + -9.998971939086914 + ], + [ + "▁actions", + -9.9998197555542 + ], + [ + "▁Christ", + -10.000214576721191 + ], + [ + "▁reference", + -10.000495910644531 + ], + [ + "▁fair", + -10.000941276550291 + ], + [ + "▁delivered", + -10.001517295837402 + ], + [ + "▁register", + -10.002480506896973 + ], + [ + "▁Big", + -10.002821922302246 + ], + [ + "▁Over", + -10.003525733947754 + ], + [ + "▁updates", + -10.004611015319824 + ], + [ + "▁discussion", + -10.00545597076416 + ], + [ + "▁died", + -10.006202697753906 + ], + [ + "▁breakfast", + -10.006255149841309 + ], + [ + "▁saving", + -10.006270408630373 + ], + [ + "▁cultural", + -10.006834030151367 + ], + [ + "▁Lord", + -10.007421493530272 + ], + [ + "▁faster", + -10.007454872131348 + ], + [ + "▁During", + -10.00853443145752 + ], + [ + "▁quarter", + -10.009472846984863 + ], + [ + "▁providers", + -10.009499549865724 + ], + [ + "▁Download", + -10.011237144470217 + ], + [ + "▁bike", + -10.01131534576416 + ], + [ + "▁attempt", + -10.011436462402344 + ], + [ + "▁climate", + -10.012340545654297 + ], + [ + "▁Post", + -10.013080596923828 + ], + [ + "▁participants", + -10.013394355773926 + ], + [ + "▁monthly", + -10.013568878173828 + ], + [ + "▁housing", + -10.015334129333496 + ], + [ + "▁consumers", + -10.015392303466797 + ], + [ + "▁fat", + -10.015668869018556 + ], + [ + "▁cells", + -10.0162353515625 + ], + [ + "▁missing", + -10.016312599182127 + ], + [ + "▁south", + -10.0164213180542 + ], + [ + "▁prefer", + -10.016566276550291 + ], + [ + "▁launched", + -10.01657009124756 + ], + [ + "▁retail", + -10.017992973327637 + ], + [ + "▁database", + -10.018536567687988 + ], + [ + "▁knows", + -10.0191068649292 + ], + [ + "▁Star", + -10.020464897155762 + ], + [ + "▁covers", + -10.020913124084473 + ], + [ + "▁ended", + -10.020970344543455 + ], + [ + "▁appears", + -10.02125072479248 + ], + [ + "▁supplies", + -10.021546363830566 + ], + [ + "▁fix", + -10.022014617919922 + ], + [ + "▁showed", + -10.022156715393066 + ], + [ + "▁Gold", + -10.02370262145996 + ], + [ + "▁Society", + -10.023746490478516 + ], + [ + "From", + -10.023975372314451 + ], + [ + "▁truth", + -10.023992538452148 + ], + [ + "▁News", + -10.024067878723145 + ], + [ + "▁raise", + -10.0255126953125 + ], + [ + "▁apps", + -10.025771141052246 + ], + [ + "▁Project", + -10.026798248291016 + ], + [ + "▁affect", + -10.026992797851562 + ], + [ + "▁Care", + -10.026995658874512 + ], + [ + "▁commitment", + -10.027379035949709 + ], + [ + "▁educational", + -10.027555465698242 + ], + [ + "▁draw", + -10.028457641601562 + ], + [ + "▁surrounding", + -10.028502464294434 + ], + [ + "▁calling", + -10.029361724853516 + ], + [ + "▁articles", + -10.030084609985352 + ], + [ + "▁enable", + -10.030896186828612 + ], + [ + "Why", + -10.03104305267334 + ], + [ + "▁supported", + -10.031131744384766 + ], + [ + "▁seek", + -10.031245231628418 + ], + [ + "ia", + -10.031865119934082 + ], + [ + "▁Public", + -10.032513618469238 + ], + [ + "▁species", + -10.033082008361816 + ], + [ + "▁Christian", + -10.034185409545898 + ], + [ + "▁library", + -10.034249305725098 + ], + [ + "▁notes", + -10.03438663482666 + ], + [ + "▁totally", + -10.03515625 + ], + [ + "▁manner", + -10.036504745483398 + ], + [ + "▁negative", + -10.03752326965332 + ], + [ + "▁asking", + -10.03752899169922 + ], + [ + "▁resolution", + -10.037824630737305 + ], + [ + "▁lights", + -10.03821086883545 + ], + [ + "▁replacement", + -10.038565635681152 + ], + [ + "▁despite", + -10.039886474609377 + ], + [ + "▁volume", + -10.039982795715332 + ], + [ + "▁cooking", + -10.041970252990724 + ], + [ + "▁23", + -10.04224681854248 + ], + [ + "▁radio", + -10.04333782196045 + ], + [ + "▁boat", + -10.043757438659668 + ], + [ + "▁bringing", + -10.044167518615724 + ], + [ + "▁200", + -10.04496955871582 + ], + [ + "▁fear", + -10.045656204223633 + ], + [ + "▁football", + -10.046165466308594 + ], + [ + "By", + -10.046310424804688 + ], + [ + "Some", + -10.04652500152588 + ], + [ + "▁passion", + -10.046775817871094 + ], + [ + "▁Super", + -10.047940254211426 + ], + [ + "▁rise", + -10.048874855041504 + ], + [ + "▁unless", + -10.048938751220703 + ], + [ + "▁electric", + -10.049324035644531 + ], + [ + "▁obtain", + -10.049407958984377 + ], + [ + "▁listen", + -10.04995059967041 + ], + [ + "▁Other", + -10.049992561340332 + ], + [ + "▁advertising", + -10.050622940063477 + ], + [ + "▁trading", + -10.050800323486328 + ], + [ + "▁Water", + -10.050804138183594 + ], + [ + "▁cheese", + -10.051080703735352 + ], + [ + "▁faith", + -10.051767349243164 + ], + [ + "▁hearing", + -10.05242156982422 + ], + [ + "▁eventually", + -10.052705764770508 + ], + [ + "▁appearance", + -10.052818298339844 + ], + [ + "▁chocolate", + -10.052937507629396 + ], + [ + "▁manufacturing", + -10.053228378295898 + ], + [ + "▁guest", + -10.053314208984377 + ], + [ + "▁regularly", + -10.053335189819336 + ], + [ + "▁rental", + -10.05338191986084 + ], + [ + "▁signed", + -10.053671836853027 + ], + [ + "▁meant", + -10.053685188293455 + ], + [ + "▁approved", + -10.053930282592772 + ], + [ + "However", + -10.054681777954102 + ], + [ + "▁surgery", + -10.055160522460938 + ], + [ + "▁Museum", + -10.055322647094728 + ], + [ + "▁effectively", + -10.056170463562012 + ], + [ + "10", + -10.05628776550293 + ], + [ + "▁2018.", + -10.056478500366213 + ], + [ + "▁registered", + -10.05653190612793 + ], + [ + "▁accounts", + -10.05679416656494 + ], + [ + "▁birthday", + -10.056830406188965 + ], + [ + "▁introduced", + -10.057329177856444 + ], + [ + "▁reached", + -10.058134078979492 + ], + [ + "▁drivers", + -10.058389663696287 + ], + [ + "▁dogs", + -10.058431625366213 + ], + [ + "▁pet", + -10.058526039123535 + ], + [ + "▁Book", + -10.058871269226074 + ], + [ + "▁Committee", + -10.059215545654297 + ], + [ + "▁AND", + -10.059783935546877 + ], + [ + "▁incredible", + -10.05983543395996 + ], + [ + "▁chicken", + -10.060203552246094 + ], + [ + "▁Z", + -10.060593605041504 + ], + [ + "о", + -10.061664581298828 + ], + [ + "▁Japan", + -10.062061309814451 + ], + [ + "off", + -10.062872886657717 + ], + [ + "▁instance", + -10.063899993896484 + ], + [ + "▁hire", + -10.064194679260254 + ], + [ + "▁Technology", + -10.064264297485352 + ], + [ + "▁Family", + -10.064529418945312 + ], + [ + "▁crusher", + -10.064620018005373 + ], + [ + "е", + -10.065031051635742 + ], + [ + "▁Al", + -10.065264701843262 + ], + [ + "▁perfectly", + -10.065300941467283 + ], + [ + "▁grade", + -10.065631866455078 + ], + [ + "▁cutting", + -10.066462516784668 + ], + [ + "▁resource", + -10.066864967346191 + ], + [ + "▁salt", + -10.066975593566896 + ], + [ + "▁Social", + -10.067481994628906 + ], + [ + "▁France", + -10.067948341369627 + ], + [ + "▁28", + -10.069005012512209 + ], + [ + "▁purchased", + -10.069025993347168 + ], + [ + "▁Bar", + -10.07061004638672 + ], + [ + "▁afternoon", + -10.070965766906738 + ], + [ + "▁seeking", + -10.071110725402832 + ], + [ + "▁engineering", + -10.071728706359863 + ], + [ + "▁measure", + -10.07229709625244 + ], + [ + "▁functions", + -10.072490692138672 + ], + [ + "▁hundreds", + -10.07277774810791 + ], + [ + "▁returned", + -10.073928833007812 + ], + [ + "▁trained", + -10.07553482055664 + ], + [ + "▁League", + -10.075666427612305 + ], + [ + "▁bright", + -10.076157569885254 + ], + [ + "▁therapy", + -10.076581954956056 + ], + [ + "▁vote", + -10.076693534851074 + ], + [ + "te", + -10.078518867492676 + ], + [ + "▁trial", + -10.079205513000488 + ], + [ + "▁settings", + -10.079512596130373 + ], + [ + "▁opinion", + -10.079763412475586 + ], + [ + "▁Chicago", + -10.080160140991213 + ], + [ + "▁greatest", + -10.0805025100708 + ], + [ + "▁Information", + -10.081169128417969 + ], + [ + "▁appreciate", + -10.081201553344728 + ], + [ + "▁snow", + -10.081345558166504 + ], + [ + "▁Government", + -10.081555366516112 + ], + [ + "9", + -10.081579208374023 + ], + [ + "▁Two", + -10.081586837768556 + ], + [ + "▁tend", + -10.08189582824707 + ], + [ + "▁consumer", + -10.081926345825195 + ], + [ + "▁buildings", + -10.083858489990234 + ], + [ + "▁contain", + -10.084099769592283 + ], + [ + "▁Hotel", + -10.084112167358398 + ], + [ + "▁switch", + -10.084147453308104 + ], + [ + "▁removed", + -10.084528923034668 + ], + [ + "▁limit", + -10.08510971069336 + ], + [ + "▁animal", + -10.085458755493164 + ], + [ + "▁supporting", + -10.085877418518066 + ], + [ + "▁chair", + -10.086613655090332 + ], + [ + "▁inspiration", + -10.086820602416992 + ], + [ + "▁improved", + -10.086958885192873 + ], + [ + "▁teeth", + -10.087181091308594 + ], + [ + "▁north", + -10.087721824645996 + ], + [ + "▁desire", + -10.087728500366213 + ], + [ + "▁participate", + -10.087865829467772 + ], + [ + "▁leather", + -10.088014602661133 + ], + [ + "▁500", + -10.088287353515623 + ], + [ + "▁painting", + -10.088994026184082 + ], + [ + "▁feed", + -10.089587211608888 + ], + [ + "▁ship", + -10.08987808227539 + ], + [ + "▁mode", + -10.090303421020508 + ], + [ + "which", + -10.090394973754885 + ], + [ + "▁Any", + -10.090628623962402 + ], + [ + "de", + -10.09080696105957 + ], + [ + "▁Award", + -10.09103012084961 + ], + [ + "▁paying", + -10.092652320861816 + ], + [ + "▁sitting", + -10.092710494995115 + ], + [ + "▁Mark", + -10.09384059906006 + ], + [ + "▁Union", + -10.094319343566896 + ], + [ + "▁described", + -10.094589233398438 + ], + [ + "▁enhance", + -10.095059394836426 + ], + [ + "▁efficiency", + -10.095254898071287 + ], + [ + "▁fixed", + -10.095767974853516 + ], + [ + "▁coach", + -10.096078872680664 + ], + [ + "▁Hill", + -10.096537590026855 + ], + [ + "▁German", + -10.097325325012209 + ], + [ + "▁pop", + -10.097390174865724 + ], + [ + "▁measures", + -10.097733497619627 + ], + [ + "▁studio", + -10.097816467285156 + ], + [ + "▁Security", + -10.098244667053224 + ], + [ + "▁sessions", + -10.098268508911133 + ], + [ + "▁chain", + -10.099356651306152 + ], + [ + "▁Contact", + -10.099809646606444 + ], + [ + "▁organic", + -10.099967956542969 + ], + [ + "▁replace", + -10.099997520446776 + ], + [ + "Thank", + -10.10021686553955 + ], + [ + "▁readers", + -10.101820945739746 + ], + [ + "na", + -10.102449417114258 + ], + [ + "Once", + -10.103385925292969 + ], + [ + "▁aim", + -10.104549407958984 + ], + [ + "▁luxury", + -10.104578971862791 + ], + [ + "▁realize", + -10.104636192321776 + ], + [ + "▁revenue", + -10.10512351989746 + ], + [ + "▁laws", + -10.105422973632812 + ], + [ + "▁Ltd", + -10.106328964233398 + ], + [ + "▁signs", + -10.107617378234863 + ], + [ + "land", + -10.108089447021484 + ], + [ + "▁sizes", + -10.108685493469238 + ], + [ + "▁procedure", + -10.109963417053224 + ], + [ + "▁possibly", + -10.110025405883787 + ], + [ + "▁26", + -10.110074996948242 + ], + [ + "▁premium", + -10.110478401184082 + ], + [ + "▁contemporary", + -10.110724449157717 + ], + [ + "▁fields", + -10.110774040222168 + ], + [ + "▁agent", + -10.111336708068848 + ], + [ + "▁solar", + -10.111517906188965 + ], + [ + "▁Man", + -10.111553192138672 + ], + [ + "▁purposes", + -10.11164093017578 + ], + [ + "▁Germany", + -10.111664772033691 + ], + [ + "▁visual", + -10.113408088684082 + ], + [ + "▁influence", + -10.113883972167969 + ], + [ + "▁infrastructure", + -10.113996505737305 + ], + [ + "▁presentation", + -10.115033149719238 + ], + [ + "▁George", + -10.11545181274414 + ], + [ + "▁village", + -10.116880416870115 + ], + [ + "▁worldwide", + -10.117605209350586 + ], + [ + "▁Me", + -10.117805480957031 + ], + [ + "▁teach", + -10.117892265319824 + ], + [ + "▁push", + -10.118261337280272 + ], + [ + "▁chosen", + -10.118524551391602 + ], + [ + "▁topics", + -10.118535995483398 + ], + [ + "▁bowl", + -10.118559837341309 + ], + [ + "▁guy", + -10.118680953979492 + ], + [ + "▁conversation", + -10.118873596191406 + ], + [ + "▁Both", + -10.119327545166016 + ], + [ + "▁Plus", + -10.119376182556152 + ], + [ + "▁recovery", + -10.119874000549316 + ], + [ + "▁accurate", + -10.120550155639648 + ], + [ + "▁explain", + -10.121499061584473 + ], + [ + "▁sides", + -10.122017860412598 + ], + [ + "▁pull", + -10.122053146362305 + ], + [ + "▁catch", + -10.1224946975708 + ], + [ + "▁Mac", + -10.12263011932373 + ], + [ + "▁Y", + -10.122679710388184 + ], + [ + "▁holding", + -10.12277889251709 + ], + [ + "▁electronic", + -10.123618125915527 + ], + [ + "▁district", + -10.123666763305664 + ], + [ + "▁registration", + -10.12367057800293 + ], + [ + "▁dead", + -10.123778343200684 + ], + [ + "▁route", + -10.12401294708252 + ], + [ + "▁familiar", + -10.125225067138672 + ], + [ + "▁milk", + -10.125697135925291 + ], + [ + "▁rule", + -10.12714672088623 + ], + [ + "▁sheet", + -10.127497673034668 + ], + [ + "▁creation", + -10.128067016601562 + ], + [ + "▁exam", + -10.128809928894045 + ], + [ + "▁fund", + -10.128828048706056 + ], + [ + "▁feels", + -10.128886222839355 + ], + [ + "if", + -10.1294527053833 + ], + [ + "ville", + -10.12947940826416 + ], + [ + "▁instructions", + -10.129741668701172 + ], + [ + "▁walls", + -10.130541801452637 + ], + [ + "▁Like", + -10.13197135925293 + ], + [ + "▁Car", + -10.131994247436523 + ], + [ + "we", + -10.13323974609375 + ], + [ + "▁residential", + -10.1345796585083 + ], + [ + "▁owned", + -10.136058807373049 + ], + [ + "▁1,", + -10.13616180419922 + ], + [ + "▁survey", + -10.136370658874512 + ], + [ + "▁sample", + -10.136611938476562 + ], + [ + "▁university", + -10.138202667236328 + ], + [ + "▁Who", + -10.139093399047852 + ], + [ + "▁administration", + -10.139102935791016 + ], + [ + "▁factor", + -10.139159202575684 + ], + [ + "▁considering", + -10.139732360839844 + ], + [ + "▁African", + -10.140181541442873 + ], + [ + "▁aid", + -10.140572547912598 + ], + [ + "▁load", + -10.140579223632812 + ], + [ + "▁cup", + -10.140618324279783 + ], + [ + "▁successfully", + -10.141016960144045 + ], + [ + "▁domain", + -10.141139030456545 + ], + [ + "▁discovered", + -10.142212867736816 + ], + [ + "▁mine", + -10.143038749694824 + ], + [ + "▁cake", + -10.143054008483888 + ], + [ + "rs", + -10.143563270568848 + ], + [ + "▁landscape", + -10.144329071044922 + ], + [ + "▁Medical", + -10.14447784423828 + ], + [ + "▁accident", + -10.145681381225586 + ], + [ + "▁discount", + -10.146611213684082 + ], + [ + "▁@", + -10.14737606048584 + ], + [ + "▁2011", + -10.147771835327148 + ], + [ + "▁2010", + -10.14849090576172 + ], + [ + "▁m", + -10.15024471282959 + ], + [ + "▁hardware", + -10.15068531036377 + ], + [ + "▁featured", + -10.15208339691162 + ], + [ + "▁interests", + -10.152145385742188 + ], + [ + "▁Yes", + -10.152341842651367 + ], + [ + "▁tough", + -10.1527681350708 + ], + [ + "▁constantly", + -10.15359878540039 + ], + [ + "▁bigger", + -10.153712272644045 + ], + [ + "▁Asia", + -10.153724670410156 + ], + [ + "▁officials", + -10.154193878173828 + ], + [ + "▁submit", + -10.154239654541016 + ], + [ + "▁count", + -10.154378890991213 + ], + [ + "▁guess", + -10.154559135437012 + ], + [ + "▁pack", + -10.154842376708984 + ], + [ + "▁Arts", + -10.154967308044434 + ], + [ + "▁exclusive", + -10.15530776977539 + ], + [ + "▁seemed", + -10.156166076660156 + ], + [ + "▁Manager", + -10.156874656677246 + ], + [ + "▁houses", + -10.157387733459473 + ], + [ + "▁sub", + -10.158536911010742 + ], + [ + "▁kinds", + -10.158647537231444 + ], + [ + "▁candidates", + -10.15914249420166 + ], + [ + "ism", + -10.15939235687256 + ], + [ + "▁Australian", + -10.160025596618652 + ], + [ + "▁net", + -10.16061782836914 + ], + [ + "▁writer", + -10.161787033081056 + ], + [ + "▁carefully", + -10.161991119384766 + ], + [ + "Z", + -10.162172317504885 + ], + [ + "▁foods", + -10.162192344665527 + ], + [ + "▁cast", + -10.162609100341797 + ], + [ + "do", + -10.162675857543944 + ], + [ + "▁27", + -10.162744522094728 + ], + [ + "▁intended", + -10.1633939743042 + ], + [ + "▁wonder", + -10.163578987121582 + ], + [ + "ist", + -10.163722038269045 + ], + [ + "▁shoes", + -10.163921356201172 + ], + [ + "▁mark", + -10.164396286010742 + ], + [ + "▁collect", + -10.16470718383789 + ], + [ + "▁silver", + -10.16493320465088 + ], + [ + "▁90", + -10.165334701538086 + ], + [ + "▁kit", + -10.165484428405762 + ], + [ + "▁Food", + -10.16618824005127 + ], + [ + "Can", + -10.166840553283691 + ], + [ + "▁die", + -10.16700553894043 + ], + [ + "▁lifestyle", + -10.167444229125977 + ], + [ + "▁dental", + -10.169441223144531 + ], + [ + "▁charges", + -10.169678688049316 + ], + [ + "▁al", + -10.16997241973877 + ], + [ + "▁decades", + -10.17016887664795 + ], + [ + "▁ring", + -10.170649528503418 + ], + [ + "▁lived", + -10.171053886413574 + ], + [ + "▁Media", + -10.172164916992188 + ], + [ + "▁destination", + -10.17240047454834 + ], + [ + "▁Q", + -10.172561645507812 + ], + [ + "▁roll", + -10.172866821289062 + ], + [ + "▁perspective", + -10.17290496826172 + ], + [ + "▁entertainment", + -10.17306423187256 + ], + [ + "▁>", + -10.173095703125 + ], + [ + "▁favourite", + -10.17398166656494 + ], + [ + "▁pan", + -10.174335479736328 + ], + [ + "▁upcoming", + -10.174678802490234 + ], + [ + "▁investors", + -10.17514991760254 + ], + [ + "▁privacy", + -10.176314353942873 + ], + [ + "▁standing", + -10.178592681884766 + ], + [ + "▁pro", + -10.179214477539062 + ], + [ + "▁assets", + -10.179271697998049 + ], + [ + "▁innovation", + -10.17954444885254 + ], + [ + "▁sauce", + -10.179767608642578 + ], + [ + "▁mention", + -10.180171966552734 + ], + [ + "▁tasks", + -10.180415153503418 + ], + [ + "uk", + -10.180469512939451 + ], + [ + "▁examples", + -10.181307792663574 + ], + [ + "▁transport", + -10.18133544921875 + ], + [ + "▁Los", + -10.182098388671877 + ], + [ + "▁stunning", + -10.182153701782228 + ], + [ + "▁Western", + -10.182657241821287 + ], + [ + "▁dollars", + -10.18284034729004 + ], + [ + "▁agencies", + -10.182853698730469 + ], + [ + "▁Women", + -10.183056831359863 + ], + [ + "▁awareness", + -10.183509826660156 + ], + [ + "▁garage", + -10.185775756835938 + ], + [ + "el", + -10.186037063598633 + ], + [ + "▁executive", + -10.186186790466309 + ], + [ + "▁styles", + -10.18638801574707 + ], + [ + "▁beat", + -10.186558723449709 + ], + [ + "▁broken", + -10.187005996704102 + ], + [ + "▁Mary", + -10.187368392944336 + ], + [ + "▁programme", + -10.187772750854492 + ], + [ + "▁farm", + -10.18777847290039 + ], + [ + "▁stick", + -10.18832015991211 + ], + [ + "▁answers", + -10.18839168548584 + ], + [ + "▁accessible", + -10.18857479095459 + ], + [ + "▁impressive", + -10.189115524291992 + ], + [ + "Yes", + -10.18934726715088 + ], + [ + "▁willing", + -10.189760208129885 + ], + [ + "▁spirit", + -10.190211296081545 + ], + [ + "▁tests", + -10.190223693847656 + ], + [ + "▁rain", + -10.190738677978516 + ], + [ + "▁II", + -10.190828323364258 + ], + [ + "▁machines", + -10.190906524658203 + ], + [ + "▁knowing", + -10.191556930541992 + ], + [ + "▁featuring", + -10.191716194152832 + ], + [ + "▁birth", + -10.19200325012207 + ], + [ + "▁remaining", + -10.192445755004885 + ], + [ + "▁improvement", + -10.19248867034912 + ], + [ + "▁holds", + -10.192495346069336 + ], + [ + "▁license", + -10.19276237487793 + ], + [ + "▁apartment", + -10.192906379699709 + ], + [ + "▁manual", + -10.193254470825195 + ], + [ + "▁everyday", + -10.193652153015137 + ], + [ + "▁grown", + -10.193830490112305 + ], + [ + "▁behavior", + -10.194318771362305 + ], + [ + "▁adult", + -10.194406509399414 + ], + [ + "▁diverse", + -10.194491386413574 + ], + [ + "▁doubt", + -10.19544506072998 + ], + [ + "▁Up", + -10.19568157196045 + ], + [ + "▁Japanese", + -10.195761680603027 + ], + [ + "so", + -10.195987701416016 + ], + [ + "▁planned", + -10.196246147155762 + ], + [ + "▁expectations", + -10.196945190429688 + ], + [ + "▁qualified", + -10.19751262664795 + ], + [ + "▁Earth", + -10.197839736938477 + ], + [ + "▁symptoms", + -10.198298454284668 + ], + [ + "▁earth", + -10.198419570922852 + ], + [ + "▁carried", + -10.199399948120115 + ], + [ + "4.", + -10.199862480163574 + ], + [ + "▁begins", + -10.200575828552246 + ], + [ + "▁loans", + -10.20122528076172 + ], + [ + "▁Festival", + -10.202183723449709 + ], + [ + "▁challenging", + -10.202616691589355 + ], + [ + "▁arrived", + -10.202848434448242 + ], + [ + "▁payments", + -10.203007698059082 + ], + [ + "▁reduced", + -10.203176498413086 + ], + [ + "▁Only", + -10.20343780517578 + ], + [ + "▁ticket", + -10.20358943939209 + ], + [ + "son", + -10.204273223876951 + ], + [ + "▁remote", + -10.20456314086914 + ], + [ + "▁affected", + -10.204639434814451 + ], + [ + "▁causes", + -10.20496940612793 + ], + [ + "▁CEO", + -10.205058097839355 + ], + [ + "▁OF", + -10.206192016601562 + ], + [ + "▁novel", + -10.20623779296875 + ], + [ + "w", + -10.207433700561523 + ], + [ + "▁outstanding", + -10.20787239074707 + ], + [ + "end", + -10.20811653137207 + ], + [ + "▁scheduled", + -10.208281517028809 + ], + [ + "▁brown", + -10.20928192138672 + ], + [ + "▁foundation", + -10.209368705749512 + ], + [ + "▁Commission", + -10.209458351135254 + ], + [ + "▁Master", + -10.209895133972168 + ], + [ + "Just", + -10.210792541503906 + ], + [ + "▁meat", + -10.211125373840332 + ], + [ + "▁designer", + -10.21118450164795 + ], + [ + "▁situations", + -10.211359024047852 + ], + [ + "▁distribution", + -10.211499214172363 + ], + [ + "▁gallery", + -10.213994979858398 + ], + [ + "▁celebrate", + -10.214268684387209 + ], + [ + "....", + -10.214401245117188 + ], + [ + "▁procedures", + -10.21479320526123 + ], + [ + "▁twice", + -10.216609001159668 + ], + [ + "▁rent", + -10.216991424560549 + ], + [ + "▁hosting", + -10.217021942138672 + ], + [ + "▁youth", + -10.217066764831545 + ], + [ + "▁yes", + -10.217511177062988 + ], + [ + "▁Real", + -10.218342781066896 + ], + [ + "▁truck", + -10.218631744384766 + ], + [ + "but", + -10.219318389892578 + ], + [ + "▁airport", + -10.21998691558838 + ], + [ + "Not", + -10.22027587890625 + ], + [ + "▁rare", + -10.221094131469728 + ], + [ + "▁searching", + -10.221454620361328 + ], + [ + "▁TO", + -10.221460342407228 + ], + [ + "▁rear", + -10.22251319885254 + ], + [ + "▁married", + -10.222620010375977 + ], + [ + "▁clinical", + -10.222753524780272 + ], + [ + "▁certified", + -10.222906112670898 + ], + [ + "net", + -10.223076820373535 + ], + [ + "▁cook", + -10.223177909851074 + ], + [ + "▁Room", + -10.223370552062988 + ], + [ + "▁choosing", + -10.22434425354004 + ], + [ + "▁integrated", + -10.224414825439451 + ], + [ + "▁*", + -10.224885940551758 + ], + [ + "▁bath", + -10.22495937347412 + ], + [ + "▁2019.", + -10.225974082946776 + ], + [ + "Don", + -10.226113319396973 + ], + [ + "▁significantly", + -10.228734970092772 + ], + [ + "▁treated", + -10.229238510131836 + ], + [ + "▁plays", + -10.229443550109863 + ], + [ + "▁serves", + -10.22946834564209 + ], + [ + "▁accepted", + -10.22995662689209 + ], + [ + "▁assessment", + -10.23010540008545 + ], + [ + "▁channel", + -10.230167388916016 + ], + [ + "▁manufacturer", + -10.231425285339355 + ], + [ + "▁Read", + -10.231460571289062 + ], + [ + "▁respond", + -10.231468200683594 + ], + [ + "▁goods", + -10.231879234313965 + ], + [ + "▁spaces", + -10.231964111328123 + ], + [ + "▁vital", + -10.23335075378418 + ], + [ + "▁closely", + -10.233445167541504 + ], + [ + "▁equipped", + -10.23348903656006 + ], + [ + "▁shares", + -10.23366355895996 + ], + [ + "▁except", + -10.233696937561035 + ], + [ + "▁concrete", + -10.233699798583984 + ], + [ + "▁employment", + -10.233933448791504 + ], + [ + "▁glad", + -10.234071731567385 + ], + [ + "ley", + -10.234541893005373 + ], + [ + "▁Long", + -10.234898567199709 + ], + [ + "▁cute", + -10.23507308959961 + ], + [ + "▁earn", + -10.23520278930664 + ], + [ + "▁Smith", + -10.235321044921877 + ], + [ + "▁Market", + -10.23532485961914 + ], + [ + "▁emotional", + -10.23581886291504 + ], + [ + "▁layer", + -10.236398696899414 + ], + [ + "▁adventure", + -10.23680591583252 + ], + [ + "▁sport", + -10.237735748291016 + ], + [ + "▁fourth", + -10.2378511428833 + ], + [ + "▁orders", + -10.238627433776855 + ], + [ + "▁80", + -10.239492416381836 + ], + [ + "▁Global", + -10.239995002746582 + ], + [ + "▁winner", + -10.240228652954102 + ], + [ + "mail", + -10.240315437316896 + ], + [ + "▁pm", + -10.242036819458008 + ], + [ + "▁vs", + -10.242417335510254 + ], + [ + "▁historical", + -10.242592811584473 + ], + [ + "▁Conference", + -10.242772102355955 + ], + [ + "▁partnership", + -10.24303913116455 + ], + [ + "▁Series", + -10.24315357208252 + ], + [ + "▁yellow", + -10.24334716796875 + ], + [ + "▁regional", + -10.243431091308594 + ], + [ + "▁Last", + -10.243481636047363 + ], + [ + "▁template", + -10.243666648864746 + ], + [ + "all", + -10.244900703430176 + ], + [ + "▁represent", + -10.245034217834473 + ], + [ + "▁receiving", + -10.24520492553711 + ], + [ + "▁matters", + -10.245962142944336 + ], + [ + "▁Academy", + -10.245994567871094 + ], + [ + "▁iron", + -10.24649429321289 + ], + [ + "▁patterns", + -10.246724128723145 + ], + [ + "▁cycle", + -10.247138023376465 + ], + [ + "▁collaboration", + -10.248055458068848 + ], + [ + "▁appeared", + -10.248739242553713 + ], + [ + "”,", + -10.2490816116333 + ], + [ + "▁c", + -10.249143600463867 + ], + [ + "▁healthcare", + -10.250314712524414 + ], + [ + "▁savings", + -10.25048828125 + ], + [ + "▁cable", + -10.250554084777832 + ], + [ + "▁nine", + -10.250741004943848 + ], + [ + "▁external", + -10.25080108642578 + ], + [ + "▁whom", + -10.251007080078123 + ], + [ + "▁occur", + -10.251323699951172 + ], + [ + "▁logo", + -10.25135898590088 + ], + [ + "▁convenient", + -10.251367568969728 + ], + [ + "▁Party", + -10.251498222351074 + ], + [ + "▁nearby", + -10.251629829406738 + ], + [ + "house", + -10.251699447631836 + ], + [ + "▁caught", + -10.25173282623291 + ], + [ + "▁upper", + -10.252270698547363 + ], + [ + "▁messages", + -10.253149032592772 + ], + [ + "▁quote", + -10.254229545593262 + ], + [ + "▁female", + -10.254243850708008 + ], + [ + "▁Grand", + -10.254321098327637 + ], + [ + "▁engagement", + -10.254388809204102 + ], + [ + "ar", + -10.254424095153809 + ], + [ + "Y", + -10.25460720062256 + ], + [ + "▁frequently", + -10.254843711853027 + ], + [ + "▁atmosphere", + -10.255321502685549 + ], + [ + "▁lessons", + -10.256369590759276 + ], + [ + "▁EU", + -10.256413459777832 + ], + [ + "▁fitness", + -10.25650119781494 + ], + [ + "ity", + -10.2567777633667 + ], + [ + "▁talent", + -10.257187843322754 + ], + [ + "▁mountain", + -10.25816535949707 + ], + [ + "▁imagine", + -10.258293151855469 + ], + [ + "▁massive", + -10.25833797454834 + ], + [ + "▁meetings", + -10.258413314819336 + ], + [ + "ion", + -10.258482933044434 + ], + [ + "▁parent", + -10.258526802062988 + ], + [ + "▁taught", + -10.258569717407228 + ], + [ + "▁facing", + -10.258695602416992 + ], + [ + "▁ease", + -10.259100914001465 + ], + [ + "▁flexible", + -10.259315490722656 + ], + [ + "Are", + -10.26026439666748 + ], + [ + "me", + -10.260265350341797 + ], + [ + "▁brother", + -10.260765075683594 + ], + [ + "▁gifts", + -10.260828971862791 + ], + [ + "▁degrees", + -10.261449813842772 + ], + [ + "▁failure", + -10.261585235595703 + ], + [ + "▁output", + -10.261608123779297 + ], + [ + "▁wearing", + -10.26242446899414 + ], + [ + "▁Italian", + -10.262578010559082 + ], + [ + "▁closer", + -10.262724876403809 + ], + [ + "▁plate", + -10.263404846191406 + ], + [ + "▁Show", + -10.263487815856934 + ], + [ + "▁edition", + -10.263697624206545 + ], + [ + "▁Network", + -10.264625549316406 + ], + [ + "▁Wall", + -10.26475715637207 + ], + [ + "▁battle", + -10.265185356140137 + ], + [ + "а", + -10.26522731781006 + ], + [ + "▁object", + -10.265439987182615 + ], + [ + "▁accessories", + -10.26563549041748 + ], + [ + "▁typical", + -10.266158103942873 + ], + [ + "▁trouble", + -10.266473770141602 + ], + [ + "▁boy", + -10.26751708984375 + ], + [ + "▁context", + -10.26755428314209 + ], + [ + "not", + -10.267715454101562 + ], + [ + "▁mini", + -10.267871856689451 + ], + [ + "▁movies", + -10.270328521728516 + ], + [ + "▁Old", + -10.271700859069824 + ], + [ + "▁35", + -10.27182674407959 + ], + [ + "▁proposed", + -10.271912574768066 + ], + [ + "▁HD", + -10.27216339111328 + ], + [ + "According", + -10.272329330444336 + ], + [ + "▁Minister", + -10.273048400878906 + ], + [ + "▁operate", + -10.273963928222656 + ], + [ + "go", + -10.27429485321045 + ], + [ + "▁speaking", + -10.274429321289062 + ], + [ + "Is", + -10.275009155273438 + ], + [ + "▁domestic", + -10.275286674499512 + ], + [ + "▁apart", + -10.276881217956545 + ], + [ + "▁cat", + -10.277543067932127 + ], + [ + "▁According", + -10.277727127075195 + ], + [ + "▁election", + -10.2783842086792 + ], + [ + "▁recorded", + -10.279027938842772 + ], + [ + "▁informed", + -10.27984619140625 + ], + [ + "▁mass", + -10.27999496459961 + ], + [ + "▁cent", + -10.280112266540527 + ], + [ + "ne", + -10.280149459838867 + ], + [ + "Since", + -10.280418395996094 + ], + [ + "▁clothes", + -10.280455589294434 + ], + [ + "▁recognized", + -10.2819185256958 + ], + [ + "▁oven", + -10.282108306884766 + ], + [ + "▁river", + -10.282334327697754 + ], + [ + "▁usual", + -10.28238582611084 + ], + [ + "▁manufacturers", + -10.282702445983888 + ], + [ + "▁missed", + -10.28285026550293 + ], + [ + "▁Sun", + -10.283241271972656 + ], + [ + "▁surprise", + -10.283339500427246 + ], + [ + "▁browser", + -10.283758163452148 + ], + [ + "▁concern", + -10.284046173095703 + ], + [ + "Many", + -10.284300804138184 + ], + [ + "▁concerned", + -10.284525871276855 + ], + [ + "▁trends", + -10.284679412841797 + ], + [ + "▁researchers", + -10.28543758392334 + ], + [ + "▁electrical", + -10.285661697387695 + ], + [ + "▁en", + -10.28679370880127 + ], + [ + "▁protected", + -10.28705596923828 + ], + [ + "▁quiet", + -10.287156105041504 + ], + [ + "▁supports", + -10.287452697753906 + ], + [ + "▁nor", + -10.288151741027832 + ], + [ + "▁freedom", + -10.28835678100586 + ], + [ + "la", + -10.289048194885254 + ], + [ + "▁failed", + -10.289616584777832 + ], + [ + "ra", + -10.289617538452148 + ], + [ + "▁joint", + -10.29017448425293 + ], + [ + "▁Press", + -10.290641784667969 + ], + [ + "▁moments", + -10.29112720489502 + ], + [ + "▁IN", + -10.291173934936523 + ], + [ + "▁vacation", + -10.291364669799805 + ], + [ + "▁photography", + -10.29171085357666 + ], + [ + "▁trend", + -10.292031288146973 + ], + [ + "▁Energy", + -10.292317390441896 + ], + [ + "▁Bill", + -10.292534828186035 + ], + [ + "▁exact", + -10.29265308380127 + ], + [ + "▁skill", + -10.292832374572754 + ], + [ + "▁campus", + -10.293399810791016 + ], + [ + "way", + -10.29362678527832 + ], + [ + "▁Israel", + -10.293628692626951 + ], + [ + "▁issued", + -10.29363250732422 + ], + [ + "▁butter", + -10.293890953063965 + ], + [ + "▁attorney", + -10.294651985168455 + ], + [ + "▁native", + -10.295208930969238 + ], + [ + "▁improving", + -10.29550552368164 + ], + [ + "▁medicine", + -10.295879364013672 + ], + [ + "▁mail", + -10.29605484008789 + ], + [ + "▁b", + -10.296622276306152 + ], + [ + "▁monitor", + -10.29698085784912 + ], + [ + "▁counter", + -10.298542022705078 + ], + [ + "with", + -10.29886531829834 + ], + [ + "▁PM", + -10.298924446105955 + ], + [ + "▁earned", + -10.298957824707031 + ], + [ + "▁w", + -10.299273490905762 + ], + [ + "▁stands", + -10.300116539001465 + ], + [ + "▁determined", + -10.300745964050291 + ], + [ + "un", + -10.300823211669922 + ], + [ + "▁transportation", + -10.301414489746094 + ], + [ + "▁extended", + -10.301681518554688 + ], + [ + "▁2017.", + -10.3018159866333 + ], + [ + "▁killed", + -10.301894187927246 + ], + [ + "▁arrive", + -10.302328109741213 + ], + [ + "▁pleased", + -10.303166389465332 + ], + [ + "▁turns", + -10.303587913513184 + ], + [ + "▁mixed", + -10.303890228271484 + ], + [ + "▁dish", + -10.3038969039917 + ], + [ + "be", + -10.304057121276855 + ], + [ + "▁conducted", + -10.304309844970703 + ], + [ + "▁reasonable", + -10.304731369018556 + ], + [ + "▁crew", + -10.305014610290527 + ], + [ + "▁workshop", + -10.305279731750488 + ], + [ + "▁sister", + -10.305291175842283 + ], + [ + "▁inches", + -10.305935859680176 + ], + [ + "▁beer", + -10.306065559387209 + ], + [ + "▁Instead", + -10.306129455566406 + ], + [ + "▁CA", + -10.306314468383787 + ], + [ + "New", + -10.307036399841309 + ], + [ + "▁wild", + -10.307044982910156 + ], + [ + "▁Russian", + -10.30794620513916 + ], + [ + "▁monitoring", + -10.308491706848145 + ], + [ + "▁dates", + -10.308992385864258 + ], + [ + "▁stars", + -10.30915641784668 + ], + [ + "▁forces", + -10.310317993164062 + ], + [ + "▁golf", + -10.310324668884276 + ], + [ + "▁Mexico", + -10.310402870178224 + ], + [ + "▁Brown", + -10.311151504516602 + ], + [ + "▁Canadian", + -10.311159133911133 + ], + [ + "▁shops", + -10.311283111572266 + ], + [ + "▁f", + -10.311393737792969 + ], + [ + "▁television", + -10.31179141998291 + ], + [ + "▁fishing", + -10.312403678894045 + ], + [ + "▁institutions", + -10.312485694885254 + ], + [ + "▁Angeles", + -10.312525749206545 + ], + [ + "▁agents", + -10.312627792358398 + ], + [ + "▁relatively", + -10.312893867492676 + ], + [ + "you", + -10.313034057617188 + ], + [ + "Another", + -10.3131742477417 + ], + [ + "▁reputation", + -10.313185691833496 + ], + [ + "▁gorgeous", + -10.31319808959961 + ], + [ + "▁sustainable", + -10.313865661621094 + ], + [ + "▁casino", + -10.31389045715332 + ], + [ + "▁soil", + -10.31495189666748 + ], + [ + "▁camp", + -10.3150053024292 + ], + [ + "▁removal", + -10.315321922302246 + ], + [ + "▁Royal", + -10.315346717834473 + ], + [ + "▁protein", + -10.315645217895508 + ], + [ + "▁expand", + -10.31569766998291 + ], + [ + "▁identified", + -10.316365242004396 + ], + [ + "▁Plan", + -10.316400527954102 + ], + [ + "▁Set", + -10.31678581237793 + ], + [ + "▁guidance", + -10.317577362060549 + ], + [ + "▁Children", + -10.318115234375 + ], + [ + "▁=", + -10.31856632232666 + ], + [ + "▁picked", + -10.319306373596191 + ], + [ + "▁Sea", + -10.319332122802734 + ], + [ + "▁engage", + -10.319355010986328 + ], + [ + "▁Its", + -10.319561958312988 + ], + [ + "▁mouth", + -10.319564819335938 + ], + [ + "▁height", + -10.31958293914795 + ], + [ + "▁chat", + -10.319917678833008 + ], + [ + "▁circumstances", + -10.320040702819824 + ], + [ + "▁interface", + -10.320195198059082 + ], + [ + "▁Robert", + -10.320196151733398 + ], + [ + "▁Instagram", + -10.320209503173828 + ], + [ + "▁Peter", + -10.320723533630373 + ], + [ + "▁architecture", + -10.32076644897461 + ], + [ + "os", + -10.321782112121582 + ], + [ + "▁bags", + -10.322359085083008 + ], + [ + "▁Congress", + -10.32338047027588 + ], + [ + "▁70", + -10.323448181152344 + ], + [ + "▁ages", + -10.324333190917969 + ], + [ + "▁technique", + -10.32436180114746 + ], + [ + "▁strategic", + -10.324615478515623 + ], + [ + "▁mom", + -10.32520580291748 + ], + [ + "▁happening", + -10.325218200683594 + ], + [ + "▁returns", + -10.32590675354004 + ], + [ + "▁organized", + -10.32614803314209 + ], + [ + "▁ends", + -10.326176643371582 + ], + [ + "▁Journal", + -10.326864242553713 + ], + [ + "▁la", + -10.328099250793455 + ], + [ + "▁ran", + -10.328146934509276 + ], + [ + "▁ongoing", + -10.328202247619627 + ], + [ + "▁Light", + -10.32828426361084 + ], + [ + "▁aspect", + -10.329447746276855 + ], + [ + "ling", + -10.330002784729004 + ], + [ + "▁45", + -10.330188751220703 + ], + [ + "quality", + -10.330265998840332 + ], + [ + "▁keeps", + -10.330317497253418 + ], + [ + "▁Full", + -10.330631256103516 + ], + [ + "▁bottle", + -10.330950736999512 + ], + [ + "▁El", + -10.33187484741211 + ], + [ + "▁charged", + -10.33224868774414 + ], + [ + "▁noticed", + -10.332544326782228 + ], + [ + "▁joy", + -10.33261013031006 + ], + [ + "▁yesterday", + -10.332884788513184 + ], + [ + "▁stopped", + -10.333337783813477 + ], + [ + "▁managing", + -10.334205627441406 + ], + [ + "▁implementation", + -10.33444595336914 + ], + [ + "▁d", + -10.33450984954834 + ], + [ + "▁grew", + -10.334779739379885 + ], + [ + "▁Rock", + -10.335332870483398 + ], + [ + "▁fly", + -10.335423469543455 + ], + [ + "0", + -10.335874557495115 + ], + [ + "▁entirely", + -10.336189270019531 + ], + [ + "▁festival", + -10.336270332336426 + ], + [ + "▁Town", + -10.33790683746338 + ], + [ + "▁arts", + -10.337955474853516 + ], + [ + "▁Those", + -10.338300704956056 + ], + [ + "▁musical", + -10.338624000549316 + ], + [ + "▁identity", + -10.338677406311035 + ], + [ + "▁worse", + -10.33891773223877 + ], + [ + "▁noted", + -10.339218139648438 + ], + [ + "▁collected", + -10.339930534362791 + ], + [ + "▁neighborhood", + -10.340527534484863 + ], + [ + "▁port", + -10.340645790100098 + ], + [ + "▁capture", + -10.340821266174316 + ], + [ + "▁agreed", + -10.341989517211914 + ], + [ + "▁positions", + -10.342028617858888 + ], + [ + "▁regulations", + -10.342060089111328 + ], + [ + "▁capable", + -10.342496871948242 + ], + [ + "▁element", + -10.342766761779783 + ], + [ + "▁motor", + -10.343476295471191 + ], + [ + "▁Game", + -10.34412956237793 + ], + [ + "▁stated", + -10.344151496887209 + ], + [ + "▁Cup", + -10.34512424468994 + ], + [ + "▁Paris", + -10.345287322998049 + ], + [ + "▁printed", + -10.345572471618652 + ], + [ + "ry", + -10.346055030822754 + ], + [ + "▁NOT", + -10.34736442565918 + ], + [ + "▁turning", + -10.348283767700195 + ], + [ + "▁alcohol", + -10.34844207763672 + ], + [ + "Let", + -10.349237442016602 + ], + [ + "▁Code", + -10.349369049072266 + ], + [ + "▁mixture", + -10.349373817443848 + ], + [ + "▁tested", + -10.349465370178224 + ], + [ + "▁fell", + -10.349870681762695 + ], + [ + "Most", + -10.350138664245604 + ], + [ + "▁graduate", + -10.352840423583984 + ], + [ + "▁n", + -10.353327751159668 + ], + [ + "▁Ideas", + -10.353919982910156 + ], + [ + "▁description", + -10.354044914245604 + ], + [ + "▁phase", + -10.354104042053224 + ], + [ + "▁attached", + -10.354269981384276 + ], + [ + "▁Maybe", + -10.35473918914795 + ], + [ + "▁31", + -10.355009078979492 + ], + [ + "▁boys", + -10.35521125793457 + ], + [ + "ch", + -10.355504989624023 + ], + [ + "▁mainly", + -10.356173515319824 + ], + [ + "▁defense", + -10.356258392333984 + ], + [ + "▁scientific", + -10.356285095214844 + ], + [ + "▁personally", + -10.35702419281006 + ], + [ + "▁Keep", + -10.357524871826172 + ], + [ + "▁specialist", + -10.358139991760254 + ], + [ + "▁solve", + -10.358965873718262 + ], + [ + "▁candidate", + -10.359549522399902 + ], + [ + "▁objects", + -10.359939575195312 + ], + [ + "▁input", + -10.360441207885742 + ], + [ + "▁deck", + -10.360628128051758 + ], + [ + "▁risks", + -10.361251831054688 + ], + [ + "▁compare", + -10.361526489257812 + ], + [ + "▁honest", + -10.361699104309082 + ], + [ + "▁exist", + -10.361896514892578 + ], + [ + "▁whenever", + -10.362237930297852 + ], + [ + "▁Thomas", + -10.362554550170898 + ], + [ + "▁wire", + -10.362560272216797 + ], + [ + "▁vary", + -10.362762451171877 + ], + [ + "▁membership", + -10.362871170043944 + ], + [ + "▁injuries", + -10.363316535949709 + ], + [ + "▁Spring", + -10.363818168640137 + ], + [ + "▁normally", + -10.364482879638672 + ], + [ + "▁generate", + -10.36471939086914 + ], + [ + "▁29", + -10.365556716918944 + ], + [ + "Get", + -10.367162704467772 + ], + [ + "▁performing", + -10.3673734664917 + ], + [ + "20", + -10.367894172668455 + ], + [ + "▁meals", + -10.368160247802734 + ], + [ + "▁banks", + -10.368345260620115 + ], + [ + "▁industries", + -10.368537902832031 + ], + [ + "▁inch", + -10.368592262268066 + ], + [ + "▁clothing", + -10.368882179260254 + ], + [ + "ta", + -10.36894702911377 + ], + [ + "▁boost", + -10.36921501159668 + ], + [ + "▁contribute", + -10.36953353881836 + ], + [ + "▁Jan", + -10.369717597961426 + ], + [ + "▁Small", + -10.36976146697998 + ], + [ + "ma", + -10.369918823242188 + ], + [ + "▁Santa", + -10.3699951171875 + ], + [ + "▁Tom", + -10.370909690856934 + ], + [ + "▁extension", + -10.371849060058594 + ], + [ + "▁gear", + -10.37197971343994 + ], + [ + "▁Buy", + -10.371991157531738 + ], + [ + "▁unable", + -10.372963905334473 + ], + [ + "▁platforms", + -10.373065948486328 + ], + [ + "▁ultimate", + -10.37331771850586 + ], + [ + "▁reporting", + -10.374348640441896 + ], + [ + "▁secret", + -10.374699592590332 + ], + [ + "▁crucial", + -10.374853134155272 + ], + [ + "▁creates", + -10.374913215637209 + ], + [ + "▁citizens", + -10.37508773803711 + ], + [ + "▁drugs", + -10.375381469726562 + ], + [ + "▁preparation", + -10.375630378723145 + ], + [ + "▁motion", + -10.37569808959961 + ], + [ + "▁bonus", + -10.37658405303955 + ], + [ + "▁attended", + -10.377038955688477 + ], + [ + "▁investigation", + -10.377347946166992 + ], + [ + "▁Control", + -10.37766456604004 + ], + [ + "▁historic", + -10.37818717956543 + ], + [ + "Also", + -10.378347396850586 + ], + [ + "▁dynamic", + -10.379666328430176 + ], + [ + "▁Live", + -10.379987716674805 + ], + [ + "▁Fire", + -10.380390167236328 + ], + [ + "▁300", + -10.380976676940918 + ], + [ + "▁filed", + -10.381773948669434 + ], + [ + "▁wheel", + -10.382136344909668 + ], + [ + "friendly", + -10.382246017456056 + ], + [ + "▁Before", + -10.382573127746582 + ], + [ + "▁et", + -10.382840156555176 + ], + [ + "▁episode", + -10.383271217346191 + ], + [ + "▁Spanish", + -10.383289337158203 + ], + [ + "▁powder", + -10.383622169494627 + ], + [ + "▁Russia", + -10.383726119995115 + ], + [ + "▁Un", + -10.384778022766112 + ], + [ + "ian", + -10.384906768798828 + ], + [ + "▁virtual", + -10.38497543334961 + ], + [ + "▁revealed", + -10.385398864746094 + ], + [ + "▁films", + -10.38563346862793 + ], + [ + "▁ancient", + -10.386109352111816 + ], + [ + "▁mining", + -10.386222839355469 + ], + [ + "▁craft", + -10.386327743530272 + ], + [ + "▁attractive", + -10.38668727874756 + ], + [ + "▁Using", + -10.387113571166992 + ], + [ + "▁founded", + -10.38784122467041 + ], + [ + "▁Therefore", + -10.388400077819824 + ], + [ + "▁originally", + -10.388470649719238 + ], + [ + "▁invited", + -10.388690948486328 + ], + [ + "ca", + -10.388753890991213 + ], + [ + "▁IP", + -10.389203071594238 + ], + [ + "▁theory", + -10.389274597167969 + ], + [ + "▁tech", + -10.391098976135254 + ], + [ + "▁expenses", + -10.39112663269043 + ], + [ + "▁tiny", + -10.391403198242188 + ], + [ + "▁relief", + -10.39206886291504 + ], + [ + "▁filter", + -10.39283847808838 + ], + [ + "▁log", + -10.39284896850586 + ], + [ + "▁pricing", + -10.393638610839844 + ], + [ + "▁Library", + -10.394240379333496 + ], + [ + "▁Carolina", + -10.394370079040527 + ], + [ + "▁magazine", + -10.394525527954102 + ], + [ + "▁proven", + -10.395397186279297 + ], + [ + "▁committee", + -10.395482063293455 + ], + [ + "▁urban", + -10.395586967468262 + ], + [ + "▁reflect", + -10.396045684814451 + ], + [ + "▁Chris", + -10.396448135375977 + ], + [ + "▁Sometimes", + -10.396536827087402 + ], + [ + "▁equal", + -10.39676284790039 + ], + [ + "▁entered", + -10.39676570892334 + ], + [ + "▁networks", + -10.397180557250977 + ], + [ + "▁dealing", + -10.39767074584961 + ], + [ + "▁default", + -10.399163246154783 + ], + [ + "▁heating", + -10.399264335632324 + ], + [ + "▁offices", + -10.399269104003906 + ], + [ + "▁ensuring", + -10.399354934692385 + ], + [ + "▁consistent", + -10.399701118469238 + ], + [ + "▁satisfaction", + -10.400209426879885 + ], + [ + "First", + -10.400460243225098 + ], + [ + "▁availability", + -10.40047836303711 + ], + [ + "Re", + -10.400542259216309 + ], + [ + "▁ceiling", + -10.40058135986328 + ], + [ + "▁developers", + -10.400660514831545 + ], + [ + "▁weekly", + -10.40085506439209 + ], + [ + "▁achieved", + -10.40086841583252 + ], + [ + "5.", + -10.401134490966797 + ], + [ + "▁obvious", + -10.401471138000488 + ], + [ + "▁booking", + -10.401799201965332 + ], + [ + "▁Another", + -10.402174949645996 + ], + [ + "▁Mobile", + -10.402783393859863 + ], + [ + "related", + -10.403189659118652 + ], + [ + "ies", + -10.403326034545898 + ], + [ + "Click", + -10.403560638427734 + ], + [ + "▁officer", + -10.403770446777344 + ], + [ + "▁Little", + -10.403912544250488 + ], + [ + "▁immediate", + -10.404885292053224 + ], + [ + "▁Digital", + -10.405306816101074 + ], + [ + "▁SEO", + -10.406455039978027 + ], + [ + "▁finance", + -10.407018661499023 + ], + [ + "▁recipes", + -10.407533645629885 + ], + [ + "▁recognize", + -10.408095359802246 + ], + [ + "▁hotels", + -10.408487319946287 + ], + [ + "▁versions", + -10.408753395080566 + ], + [ + "▁Special", + -10.40903091430664 + ], + [ + "▁fairly", + -10.409724235534668 + ], + [ + "▁pink", + -10.409805297851562 + ], + [ + "▁treatments", + -10.40990161895752 + ], + [ + "▁visited", + -10.41065788269043 + ], + [ + "▁trail", + -10.410792350769045 + ], + [ + "▁linked", + -10.410812377929688 + ], + [ + "▁viewed", + -10.411025047302246 + ], + [ + "▁routine", + -10.41208839416504 + ], + [ + "▁tables", + -10.41209602355957 + ], + [ + "▁Auto", + -10.41211986541748 + ], + [ + "▁bread", + -10.412223815917969 + ], + [ + "▁enables", + -10.41250228881836 + ], + [ + "▁Insurance", + -10.412525177001951 + ], + [ + "▁listening", + -10.412527084350586 + ], + [ + "▁smile", + -10.412999153137209 + ], + [ + "▁fellow", + -10.413472175598145 + ], + [ + "▁dangerous", + -10.413883209228516 + ], + [ + "da", + -10.414109230041504 + ], + [ + "▁Hospital", + -10.414382934570312 + ], + [ + "▁desk", + -10.414457321166992 + ], + [ + "▁Where", + -10.414770126342772 + ], + [ + "▁county", + -10.4150972366333 + ], + [ + "▁eligible", + -10.415549278259276 + ], + [ + "▁Virginia", + -10.415820121765137 + ], + [ + "▁forced", + -10.416372299194336 + ], + [ + "▁CD", + -10.416570663452148 + ], + [ + "▁label", + -10.416854858398438 + ], + [ + "▁constant", + -10.416963577270508 + ], + [ + "▁firms", + -10.41697597503662 + ], + [ + "▁carbon", + -10.416976928710938 + ], + [ + "▁Ireland", + -10.416987419128418 + ], + [ + "▁Americans", + -10.41706371307373 + ], + [ + "▁desired", + -10.417715072631836 + ], + [ + "▁mortgage", + -10.418290138244627 + ], + [ + "-1", + -10.419398307800291 + ], + [ + "▁defined", + -10.419670104980469 + ], + [ + "▁awarded", + -10.419845581054688 + ], + [ + "back", + -10.420092582702637 + ], + [ + "▁hidden", + -10.420554161071776 + ], + [ + "▁consultation", + -10.42087459564209 + ], + [ + "▁depth", + -10.420875549316406 + ], + [ + "Well", + -10.421449661254885 + ], + [ + "ment", + -10.421576499938965 + ], + [ + "▁formed", + -10.422406196594238 + ], + [ + "▁Martin", + -10.423251152038574 + ], + [ + "▁confident", + -10.423372268676758 + ], + [ + "▁portfolio", + -10.423577308654783 + ], + [ + "▁throw", + -10.423813819885254 + ], + [ + "▁Head", + -10.424400329589844 + ], + [ + "▁conduct", + -10.424612045288086 + ], + [ + "▁Store", + -10.42520236968994 + ], + [ + "▁west", + -10.425457954406738 + ], + [ + "▁losing", + -10.426027297973633 + ], + [ + "▁luck", + -10.426159858703612 + ], + [ + "▁drawing", + -10.427599906921388 + ], + [ + "▁Police", + -10.428150177001951 + ], + [ + "▁portion", + -10.428297996520996 + ], + [ + "▁tells", + -10.428525924682615 + ], + [ + "▁exhibition", + -10.42939281463623 + ], + [ + "▁shooting", + -10.429657936096191 + ], + [ + "▁2008", + -10.429950714111328 + ], + [ + "▁sand", + -10.430459022521973 + ], + [ + "▁papers", + -10.430492401123049 + ], + [ + "level", + -10.431056022644045 + ], + [ + "▁impossible", + -10.432293891906738 + ], + [ + "▁implement", + -10.432334899902344 + ], + [ + "▁Middle", + -10.432969093322754 + ], + [ + "▁Italy", + -10.433351516723633 + ], + [ + "wood", + -10.43345069885254 + ], + [ + "see", + -10.433878898620604 + ], + [ + "▁communicate", + -10.434317588806152 + ], + [ + "▁represents", + -10.434529304504396 + ], + [ + "▁alongside", + -10.436155319213867 + ], + [ + "▁telling", + -10.436219215393066 + ], + [ + "▁vast", + -10.436464309692385 + ], + [ + "▁bridge", + -10.4366455078125 + ], + [ + "▁Systems", + -10.436774253845217 + ], + [ + "▁rating", + -10.437170028686523 + ], + [ + "▁awards", + -10.43738079071045 + ], + [ + "▁Work", + -10.438258171081545 + ], + [ + "▁Mike", + -10.43894863128662 + ], + [ + "▁exposure", + -10.439261436462402 + ], + [ + "▁codes", + -10.4403076171875 + ], + [ + "▁blend", + -10.440335273742676 + ], + [ + "▁Pre", + -10.44041919708252 + ], + [ + "▁seriously", + -10.440574645996094 + ], + [ + "▁criminal", + -10.440862655639648 + ], + [ + "▁seats", + -10.441434860229492 + ], + [ + "▁2009", + -10.441854476928713 + ], + [ + "▁Play", + -10.442049026489258 + ], + [ + "▁wooden", + -10.442621231079102 + ], + [ + "▁Times", + -10.442635536193848 + ], + [ + "well", + -10.443589210510254 + ], + [ + "▁boxes", + -10.44374179840088 + ], + [ + "▁cabinet", + -10.44426155090332 + ], + [ + "▁spiritual", + -10.444729804992676 + ], + [ + "▁broad", + -10.44483757019043 + ], + [ + "▁crowd", + -10.444872856140137 + ], + [ + "▁folks", + -10.444893836975098 + ], + [ + "▁recording", + -10.4454927444458 + ], + [ + "▁Place", + -10.445765495300291 + ], + [ + "▁purchasing", + -10.445907592773438 + ], + [ + "▁figures", + -10.446020126342772 + ], + [ + "▁Guys", + -10.44689655303955 + ], + [ + "▁lucky", + -10.446989059448242 + ], + [ + "▁printing", + -10.447525024414062 + ], + [ + "▁explained", + -10.44770336151123 + ], + [ + "▁Shop", + -10.448293685913086 + ], + [ + "▁swimming", + -10.448844909667969 + ], + [ + "▁letters", + -10.449353218078612 + ], + [ + "▁About", + -10.449493408203123 + ], + [ + "▁capabilities", + -10.449673652648926 + ], + [ + "▁speech", + -10.450937271118164 + ], + [ + "▁Marketing", + -10.45155906677246 + ], + [ + "▁marriage", + -10.451797485351562 + ], + [ + "▁increases", + -10.451979637145996 + ], + [ + "▁exceptional", + -10.452011108398438 + ], + [ + "▁widely", + -10.452674865722656 + ], + [ + "Have", + -10.452680587768556 + ], + [ + "▁establish", + -10.4531888961792 + ], + [ + "▁upgrade", + -10.453288078308104 + ], + [ + "▁buyers", + -10.45352268218994 + ], + [ + "▁loves", + -10.45400619506836 + ], + [ + "▁component", + -10.454293251037598 + ], + [ + "▁superior", + -10.455004692077637 + ], + [ + "▁appeal", + -10.455020904541016 + ], + [ + "▁Learn", + -10.455127716064451 + ], + [ + "▁Boston", + -10.455184936523438 + ], + [ + "▁handling", + -10.455224990844728 + ], + [ + "▁violence", + -10.456661224365234 + ], + [ + "▁shift", + -10.457253456115724 + ], + [ + "▁View", + -10.457371711730955 + ], + [ + "▁enjoying", + -10.457813262939451 + ], + [ + "▁enterprise", + -10.459486961364746 + ], + [ + "▁neck", + -10.459534645080566 + ], + [ + "▁Review", + -10.459678649902344 + ], + [ + "▁resulting", + -10.460411071777344 + ], + [ + "▁taxes", + -10.460424423217772 + ], + [ + "▁William", + -10.460562705993652 + ], + [ + "▁tomorrow", + -10.460652351379396 + ], + [ + "▁horse", + -10.460694313049316 + ], + [ + "▁consists", + -10.461071014404297 + ], + [ + "▁PDF", + -10.461329460144045 + ], + [ + "▁Lee", + -10.46178150177002 + ], + [ + "▁magic", + -10.461962699890137 + ], + [ + "▁Back", + -10.462060928344728 + ], + [ + "▁tank", + -10.462775230407717 + ], + [ + "▁Jersey", + -10.463306427001951 + ], + [ + "▁ordered", + -10.463878631591797 + ], + [ + "▁eggs", + -10.464224815368652 + ], + [ + "▁possibility", + -10.46430206298828 + ], + [ + "▁carpet", + -10.464387893676758 + ], + [ + "▁indeed", + -10.46466827392578 + ], + [ + "field", + -10.464852333068848 + ], + [ + "▁rice", + -10.465250968933104 + ], + [ + "▁Training", + -10.46550464630127 + ], + [ + "▁prove", + -10.466075897216797 + ], + [ + "▁Engineering", + -10.4661865234375 + ], + [ + "▁adds", + -10.466297149658203 + ], + [ + "▁memories", + -10.466842651367188 + ], + [ + "▁soul", + -10.467011451721191 + ], + [ + "▁command", + -10.467100143432615 + ], + [ + "▁raw", + -10.4672212600708 + ], + [ + "▁Le", + -10.467591285705566 + ], + [ + "▁communications", + -10.467625617980955 + ], + [ + "▁liked", + -10.46772289276123 + ], + [ + "▁decade", + -10.467931747436523 + ], + [ + "▁resistance", + -10.467939376831056 + ], + [ + "▁suggested", + -10.468929290771484 + ], + [ + "▁2016.", + -10.469097137451172 + ], + [ + "▁Coast", + -10.469308853149414 + ], + [ + "▁Wood", + -10.469525337219238 + ], + [ + "▁minor", + -10.469632148742676 + ], + [ + "▁religious", + -10.47003936767578 + ], + [ + "▁invest", + -10.470675468444824 + ], + [ + "▁Area", + -10.470970153808594 + ], + [ + "▁32", + -10.47102165222168 + ], + [ + "▁scheme", + -10.471095085144045 + ], + [ + "▁profit", + -10.471273422241213 + ], + [ + "he", + -10.471394538879396 + ], + [ + "▁categories", + -10.471431732177734 + ], + [ + "▁FREE", + -10.47190284729004 + ], + [ + "▁Garden", + -10.472174644470217 + ], + [ + "▁Enjoy", + -10.472293853759766 + ], + [ + "▁submitted", + -10.472404479980469 + ], + [ + "▁Southern", + -10.472511291503906 + ], + [ + "▁Next", + -10.472660064697266 + ], + [ + "▁thick", + -10.472704887390137 + ], + [ + "▁password", + -10.473129272460938 + ], + [ + "▁tagged", + -10.47372341156006 + ], + [ + "▁express", + -10.473782539367676 + ], + [ + "▁km", + -10.473905563354492 + ], + [ + "▁chief", + -10.474321365356444 + ], + [ + "▁scored", + -10.474431037902832 + ], + [ + "▁flavor", + -10.474665641784668 + ], + [ + "▁permanent", + -10.474903106689451 + ], + [ + "▁muscle", + -10.474905967712402 + ], + [ + "▁managers", + -10.474952697753906 + ], + [ + "age", + -10.47543239593506 + ], + [ + "▁compliance", + -10.475481986999512 + ], + [ + "▁replaced", + -10.475573539733888 + ], + [ + "▁Obama", + -10.476557731628418 + ], + [ + "▁YouTube", + -10.476818084716797 + ], + [ + "▁crisis", + -10.477149963378906 + ], + [ + "▁venue", + -10.477171897888184 + ], + [ + "▁Federal", + -10.477490425109863 + ], + [ + "▁healing", + -10.47775650024414 + ], + [ + "▁Airport", + -10.478014945983888 + ], + [ + "▁valid", + -10.478317260742188 + ], + [ + "▁elegant", + -10.478965759277344 + ], + [ + "▁jump", + -10.479252815246582 + ], + [ + "▁civil", + -10.4793701171875 + ], + [ + "▁confirmed", + -10.47962474822998 + ], + [ + "▁noise", + -10.479902267456056 + ], + [ + "▁Price", + -10.479914665222168 + ], + [ + "▁whilst", + -10.480304718017578 + ], + [ + "▁none", + -10.48033618927002 + ], + [ + "▁dishes", + -10.480571746826172 + ], + [ + "▁Students", + -10.480999946594238 + ], + [ + "▁Kitchen", + -10.481101989746094 + ], + [ + "▁channels", + -10.482008934020996 + ], + [ + "▁depends", + -10.482011795043944 + ], + [ + "▁discussed", + -10.482373237609863 + ], + [ + "▁ID", + -10.48261547088623 + ], + [ + "▁labor", + -10.482851028442385 + ], + [ + "▁2000", + -10.48293685913086 + ], + [ + "▁promise", + -10.483346939086914 + ], + [ + "▁surprised", + -10.483600616455078 + ], + [ + "▁ultimately", + -10.483969688415527 + ], + [ + "▁priority", + -10.48524570465088 + ], + [ + "▁incredibly", + -10.486169815063477 + ], + [ + "▁array", + -10.486350059509276 + ], + [ + "▁chose", + -10.487264633178713 + ], + [ + "▁reducing", + -10.487265586853027 + ], + [ + "▁household", + -10.48758316040039 + ], + [ + "▁duty", + -10.488027572631836 + ], + [ + "j", + -10.488030433654783 + ], + [ + "▁Games", + -10.488380432128906 + ], + [ + "▁chemical", + -10.488420486450195 + ], + [ + "▁joining", + -10.488802909851074 + ], + [ + "▁zone", + -10.489500045776367 + ], + [ + "▁Zealand", + -10.490099906921388 + ], + [ + "▁somewhere", + -10.490214347839355 + ], + [ + "▁bodies", + -10.490710258483888 + ], + [ + "▁hoping", + -10.49137020111084 + ], + [ + "side", + -10.491765022277832 + ], + [ + "▁involves", + -10.492148399353027 + ], + [ + "▁realized", + -10.49217414855957 + ], + [ + "▁holidays", + -10.492507934570312 + ], + [ + "▁chairs", + -10.492792129516602 + ], + [ + "ation", + -10.492840766906738 + ], + [ + "▁Does", + -10.493185997009276 + ], + [ + "▁Scott", + -10.493456840515137 + ], + [ + "▁Class", + -10.494010925292969 + ], + [ + "▁officers", + -10.494074821472168 + ], + [ + "▁streets", + -10.494133949279783 + ], + [ + "▁sending", + -10.494292259216309 + ], + [ + "Be", + -10.49553108215332 + ], + [ + "▁vintage", + -10.495678901672363 + ], + [ + "▁tip", + -10.496169090270996 + ], + [ + "▁crime", + -10.496365547180176 + ], + [ + "▁Through", + -10.497061729431152 + ], + [ + "▁colours", + -10.497130393981934 + ], + [ + "During", + -10.497184753417969 + ], + [ + "▁vegetables", + -10.497721672058104 + ], + [ + "Although", + -10.497912406921388 + ], + [ + "▁transition", + -10.498244285583496 + ], + [ + "▁displayed", + -10.498272895812988 + ], + [ + "▁brief", + -10.4985933303833 + ], + [ + "▁producing", + -10.499526023864746 + ], + [ + "▁stored", + -10.499693870544434 + ], + [ + "▁Mar", + -10.499980926513672 + ], + [ + "12", + -10.500020027160645 + ], + [ + "Today", + -10.500420570373535 + ], + [ + "▁Michigan", + -10.50064182281494 + ], + [ + "▁Bible", + -10.501279830932615 + ], + [ + "▁hosted", + -10.501423835754396 + ], + [ + "▁combine", + -10.501981735229492 + ], + [ + "Last", + -10.5021333694458 + ], + [ + "▁pride", + -10.502873420715332 + ], + [ + "▁dreams", + -10.502973556518556 + ], + [ + "▁arms", + -10.50337028503418 + ], + [ + "▁deposit", + -10.503409385681152 + ], + [ + "▁attacks", + -10.504490852355955 + ], + [ + "▁root", + -10.504569053649902 + ], + [ + "▁saved", + -10.504741668701172 + ], + [ + "▁usage", + -10.505040168762209 + ], + [ + "▁finest", + -10.505108833312988 + ], + [ + "▁passing", + -10.505779266357422 + ], + [ + "▁approval", + -10.505908012390137 + ], + [ + "▁suggestions", + -10.50613784790039 + ], + [ + "?”", + -10.506314277648926 + ], + [ + "▁percentage", + -10.506327629089355 + ], + [ + "Hi", + -10.50695514678955 + ], + [ + "▁requests", + -10.507220268249512 + ], + [ + "▁guidelines", + -10.507315635681152 + ], + [ + "▁describe", + -10.507341384887695 + ], + [ + "▁estimated", + -10.507500648498535 + ], + [ + "▁crazy", + -10.507542610168455 + ], + [ + "▁younger", + -10.507978439331056 + ], + [ + "▁recommendations", + -10.508584976196287 + ], + [ + "▁aside", + -10.508586883544922 + ], + [ + "▁increasingly", + -10.509403228759766 + ], + [ + "▁packages", + -10.509647369384766 + ], + [ + "▁licensed", + -10.509718894958496 + ], + [ + "men", + -10.509828567504885 + ], + [ + "▁tradition", + -10.510140419006348 + ], + [ + "▁matches", + -10.510159492492676 + ], + [ + "▁Site", + -10.51180934906006 + ], + [ + "▁occasion", + -10.51183795928955 + ], + [ + "▁egg", + -10.512040138244627 + ], + [ + "▁Jack", + -10.512351989746094 + ], + [ + "▁slowly", + -10.512521743774414 + ], + [ + "▁Part", + -10.512531280517578 + ], + [ + "▁pace", + -10.512614250183104 + ], + [ + "▁Trust", + -10.51268196105957 + ], + [ + "▁honor", + -10.512906074523926 + ], + [ + "▁graphics", + -10.513141632080078 + ], + [ + "▁inner", + -10.513160705566406 + ], + [ + "▁connections", + -10.513304710388184 + ], + [ + "▁Financial", + -10.513513565063477 + ], + [ + "▁listing", + -10.513737678527832 + ], + [ + "▁Note", + -10.51509952545166 + ], + [ + "▁authority", + -10.515207290649414 + ], + [ + "▁primarily", + -10.515303611755373 + ], + [ + "▁writers", + -10.515966415405272 + ], + [ + "▁fail", + -10.51621913909912 + ], + [ + "▁engaged", + -10.516546249389648 + ], + [ + "▁supposed", + -10.516722679138184 + ], + [ + "▁cookies", + -10.518071174621582 + ], + [ + "▁border", + -10.51821231842041 + ], + [ + "▁programming", + -10.518381118774414 + ], + [ + "▁$", + -10.519742012023926 + ], + [ + "her", + -10.520416259765623 + ], + [ + "▁pleasure", + -10.520503997802734 + ], + [ + "▁Executive", + -10.520870208740234 + ], + [ + "▁Out", + -10.521576881408691 + ], + [ + "▁classroom", + -10.521974563598633 + ], + [ + "▁roles", + -10.522103309631348 + ], + [ + "▁victory", + -10.52235984802246 + ], + [ + "▁Chief", + -10.523032188415527 + ], + [ + "▁Building", + -10.523103713989258 + ], + [ + "▁calendar", + -10.52316951751709 + ], + [ + "▁chart", + -10.523207664489746 + ], + [ + "▁Human", + -10.523530006408691 + ], + [ + "▁LED", + -10.523921012878418 + ], + [ + "▁yard", + -10.523934364318848 + ], + [ + "▁Guide", + -10.524405479431152 + ], + [ + "Next", + -10.524645805358888 + ], + [ + "▁reader", + -10.525155067443848 + ], + [ + "▁App", + -10.52535629272461 + ], + [ + "▁volunteers", + -10.525952339172363 + ], + [ + "▁coast", + -10.526121139526367 + ], + [ + "▁setup", + -10.526165962219238 + ], + [ + "▁Video", + -10.526880264282228 + ], + [ + "▁factory", + -10.527050971984863 + ], + [ + "▁obtained", + -10.527447700500488 + ], + [ + "▁Colorado", + -10.528412818908691 + ], + [ + "▁behalf", + -10.52900505065918 + ], + [ + "▁forever", + -10.529727935791016 + ], + [ + "▁maintaining", + -10.529854774475098 + ], + [ + "▁mirror", + -10.529878616333008 + ], + [ + "▁relax", + -10.529987335205078 + ], + [ + "▁covering", + -10.530454635620115 + ], + [ + "▁Having", + -10.530893325805664 + ], + [ + "▁Very", + -10.531418800354004 + ], + [ + "▁Welcome", + -10.53150749206543 + ], + [ + "▁chapter", + -10.531960487365724 + ], + [ + "▁Ohio", + -10.532126426696776 + ], + [ + "▁hole", + -10.532328605651855 + ], + [ + "▁skilled", + -10.532328605651855 + ], + [ + "▁Northern", + -10.532487869262695 + ], + [ + "▁presents", + -10.532662391662598 + ], + [ + "▁Ben", + -10.533360481262209 + ], + [ + "▁editor", + -10.533923149108888 + ], + [ + "▁lock", + -10.53395175933838 + ], + [ + "▁accommodation", + -10.534356117248535 + ], + [ + "um", + -10.534942626953123 + ], + [ + "▁Sports", + -10.535213470458984 + ], + [ + "▁introduction", + -10.535300254821776 + ], + [ + "▁DC", + -10.535375595092772 + ], + [ + "▁Watch", + -10.535541534423828 + ], + [ + "▁continuing", + -10.53586483001709 + ], + [ + "that", + -10.536417007446287 + ], + [ + "▁arm", + -10.536898612976074 + ], + [ + "▁Card", + -10.537341117858888 + ], + [ + "▁suffering", + -10.537473678588867 + ], + [ + "▁yoga", + -10.537572860717772 + ], + [ + "▁struggle", + -10.537603378295898 + ], + [ + "▁male", + -10.537757873535156 + ], + [ + "▁bars", + -10.538463592529297 + ], + [ + "▁kid", + -10.53880500793457 + ], + [ + "▁formal", + -10.538933753967283 + ], + [ + "▁grant", + -10.539299011230469 + ], + [ + "▁tile", + -10.539974212646484 + ], + [ + "▁consideration", + -10.540401458740234 + ], + [ + "▁severe", + -10.541016578674316 + ], + [ + "▁interactive", + -10.54165744781494 + ], + [ + "=", + -10.541930198669434 + ], + [ + "▁faculty", + -10.542394638061523 + ], + [ + "▁Francisco", + -10.542617797851562 + ], + [ + "▁Under", + -10.543609619140623 + ], + [ + "▁commonly", + -10.543617248535156 + ], + [ + "▁principles", + -10.543874740600586 + ], + [ + "▁millions", + -10.544498443603516 + ], + [ + "▁History", + -10.545069694519045 + ], + [ + "▁justice", + -10.545400619506836 + ], + [ + "▁naturally", + -10.54565143585205 + ], + [ + "▁suppliers", + -10.545960426330566 + ], + [ + "▁rely", + -10.546429634094238 + ], + [ + "▁authorities", + -10.547215461730955 + ], + [ + "▁worst", + -10.547298431396484 + ], + [ + "▁entrance", + -10.547462463378906 + ], + [ + "▁Living", + -10.547772407531738 + ], + [ + "▁potentially", + -10.548431396484377 + ], + [ + "▁legs", + -10.548514366149902 + ], + [ + "▁reduction", + -10.548736572265623 + ], + [ + "▁herself", + -10.54919719696045 + ], + [ + "▁Table", + -10.549392700195312 + ], + [ + "▁flower", + -10.549583435058594 + ], + [ + "▁acid", + -10.549726486206056 + ], + [ + "Then", + -10.550288200378418 + ], + [ + "per", + -10.55029582977295 + ], + [ + "▁applying", + -10.551332473754885 + ], + [ + "▁lawyer", + -10.551332473754885 + ], + [ + "▁Pacific", + -10.55178165435791 + ], + [ + "▁shouldn", + -10.55199909210205 + ], + [ + "▁integration", + -10.553210258483888 + ], + [ + "▁east", + -10.554000854492188 + ], + [ + "▁museum", + -10.554012298583984 + ], + [ + ");", + -10.554181098937988 + ], + [ + "use", + -10.554679870605469 + ], + [ + "▁beneficial", + -10.555072784423828 + ], + [ + "▁pure", + -10.555301666259766 + ], + [ + "Even", + -10.555435180664062 + ], + [ + "▁funny", + -10.555546760559082 + ], + [ + "▁micro", + -10.555797576904297 + ], + [ + "▁Which", + -10.55581760406494 + ], + [ + "▁regardless", + -10.555954933166504 + ], + [ + "▁electricity", + -10.556541442871094 + ], + [ + "▁Men", + -10.55697536468506 + ], + [ + "▁recognition", + -10.557475090026855 + ], + [ + "▁framework", + -10.558332443237305 + ], + [ + "▁layout", + -10.558512687683104 + ], + [ + "▁repairs", + -10.559019088745115 + ], + [ + "▁conflict", + -10.559584617614746 + ], + [ + "▁baking", + -10.560349464416504 + ], + [ + "▁juice", + -10.56035327911377 + ], + [ + "▁split", + -10.560392379760742 + ], + [ + "Dr", + -10.561012268066406 + ], + [ + "▁fighting", + -10.56131362915039 + ], + [ + "▁Support", + -10.56214427947998 + ], + [ + "▁checking", + -10.562228202819824 + ], + [ + "▁blocks", + -10.562871932983398 + ], + [ + "▁drinks", + -10.563203811645508 + ], + [ + "▁gun", + -10.564248085021973 + ], + [ + "More", + -10.564386367797852 + ], + [ + "▁sensitive", + -10.565197944641112 + ], + [ + "▁Search", + -10.565495491027832 + ], + [ + "▁decor", + -10.565621376037598 + ], + [ + "▁visible", + -10.565720558166504 + ], + [ + "▁certificate", + -10.56596565246582 + ], + [ + "ized", + -10.56682300567627 + ], + [ + "▁causing", + -10.56682586669922 + ], + [ + "▁careful", + -10.566850662231444 + ], + [ + "▁referred", + -10.566895484924316 + ], + [ + "▁filling", + -10.566974639892578 + ], + [ + "▁traveling", + -10.567830085754396 + ], + [ + "▁newly", + -10.567911148071287 + ], + [ + "▁requirement", + -10.56853199005127 + ], + [ + "▁designers", + -10.568832397460938 + ], + [ + "?\"", + -10.56900405883789 + ], + [ + "▁authors", + -10.569122314453123 + ], + [ + "Of", + -10.569600105285645 + ], + [ + "ally", + -10.570072174072266 + ], + [ + "▁Ford", + -10.570079803466797 + ], + [ + "▁birds", + -10.570477485656738 + ], + [ + "▁staying", + -10.570658683776855 + ], + [ + "▁panels", + -10.570839881896973 + ], + [ + "my", + -10.571053504943848 + ], + [ + "▁Mountain", + -10.571145057678224 + ], + [ + "▁lift", + -10.57143497467041 + ], + [ + "▁believed", + -10.571611404418944 + ], + [ + "▁laptop", + -10.572026252746582 + ], + [ + "▁errors", + -10.57248306274414 + ], + [ + "▁tracks", + -10.572602272033691 + ], + [ + "▁proof", + -10.572978019714355 + ], + [ + "▁consumption", + -10.57313060760498 + ], + [ + "▁drinking", + -10.573177337646484 + ], + [ + "▁functional", + -10.573297500610352 + ], + [ + "▁tight", + -10.5742769241333 + ], + [ + "Each", + -10.574277877807615 + ], + [ + "▁guitar", + -10.574458122253418 + ], + [ + "▁stainless", + -10.574470520019531 + ], + [ + "▁structures", + -10.574502944946287 + ], + [ + "▁encouraged", + -10.574645042419434 + ], + [ + "▁utility", + -10.57550048828125 + ], + [ + "▁delivering", + -10.575838088989258 + ], + [ + "▁forum", + -10.575960159301758 + ], + [ + "▁Johnson", + -10.57608127593994 + ], + [ + "▁driven", + -10.576308250427246 + ], + [ + "▁resort", + -10.576665878295898 + ], + [ + "▁shoot", + -10.577192306518556 + ], + [ + "▁Young", + -10.577763557434082 + ], + [ + "▁l", + -10.577964782714844 + ], + [ + "▁Summer", + -10.578784942626951 + ], + [ + "▁stuck", + -10.579275131225586 + ], + [ + "▁extreme", + -10.580615043640137 + ], + [ + "▁Machine", + -10.581388473510742 + ], + [ + "▁Professional", + -10.581443786621094 + ], + [ + "▁hopes", + -10.581583976745604 + ], + [ + "▁organisation", + -10.5818510055542 + ], + [ + "▁Look", + -10.582173347473145 + ], + [ + "▁row", + -10.58255672454834 + ], + [ + "▁lesson", + -10.582581520080566 + ], + [ + "▁tracking", + -10.582789421081545 + ], + [ + "▁signal", + -10.582964897155762 + ], + [ + "▁expansion", + -10.58336353302002 + ], + [ + "▁flexibility", + -10.583403587341309 + ], + [ + "▁convenience", + -10.58436107635498 + ], + [ + "▁comparison", + -10.58485221862793 + ], + [ + "▁USB", + -10.584903717041016 + ], + [ + "▁Jones", + -10.585563659667969 + ], + [ + "▁centers", + -10.585683822631836 + ], + [ + "▁dust", + -10.586395263671877 + ], + [ + "ish", + -10.586445808410645 + ], + [ + "Check", + -10.587061882019045 + ], + [ + "▁certification", + -10.58709716796875 + ], + [ + "▁2015.", + -10.587100982666016 + ], + [ + "▁initiative", + -10.587255477905272 + ], + [ + "▁everywhere", + -10.587395668029783 + ], + [ + "▁warranty", + -10.587610244750977 + ], + [ + "▁photographs", + -10.587613105773926 + ], + [ + "▁abuse", + -10.587862014770508 + ], + [ + "▁https", + -10.588105201721191 + ], + [ + "▁threat", + -10.588374137878418 + ], + [ + "▁resume", + -10.588401794433594 + ], + [ + "▁league", + -10.589154243469238 + ], + [ + "▁wealth", + -10.589256286621094 + ], + [ + "▁pump", + -10.589597702026367 + ], + [ + "▁mill", + -10.589625358581545 + ], + [ + "▁stream", + -10.590093612670898 + ], + [ + "▁Page", + -10.59025764465332 + ], + [ + "▁wireless", + -10.590514183044434 + ], + [ + "▁forest", + -10.590720176696776 + ], + [ + "▁Local", + -10.591097831726074 + ], + [ + "▁dropped", + -10.591179847717283 + ], + [ + "▁downtown", + -10.591333389282228 + ], + [ + "▁pot", + -10.591707229614258 + ], + [ + "▁feelings", + -10.592161178588867 + ], + [ + "▁doctors", + -10.592575073242188 + ], + [ + "▁preferred", + -10.592829704284668 + ], + [ + "ro", + -10.593070030212402 + ], + [ + "▁Joe", + -10.593141555786133 + ], + [ + "ization", + -10.593222618103027 + ], + [ + "▁ads", + -10.59323501586914 + ], + [ + "▁spoke", + -10.593432426452637 + ], + [ + "▁AM", + -10.593449592590332 + ], + [ + "▁differences", + -10.5944185256958 + ], + [ + "▁cap", + -10.594871520996094 + ], + [ + "▁personality", + -10.595422744750977 + ], + [ + "▁guaranteed", + -10.595913887023926 + ], + [ + "▁Sale", + -10.596078872680664 + ], + [ + "▁Oil", + -10.596235275268556 + ], + [ + "cm", + -10.596407890319824 + ], + [ + "▁functionality", + -10.596925735473633 + ], + [ + "▁Quality", + -10.597648620605469 + ], + [ + "▁Richard", + -10.597943305969238 + ], + [ + "▁1.", + -10.59820556640625 + ], + [ + "▁transaction", + -10.598431587219238 + ], + [ + "▁thin", + -10.599172592163086 + ], + [ + "▁shots", + -10.599284172058104 + ], + [ + "▁fits", + -10.59943675994873 + ], + [ + "▁moves", + -10.59986686706543 + ], + [ + "art", + -10.600133895874023 + ], + [ + "▁anxiety", + -10.600785255432127 + ], + [ + "▁posting", + -10.60114574432373 + ], + [ + "▁phones", + -10.601231575012209 + ], + [ + "▁stable", + -10.60137939453125 + ], + [ + "▁planet", + -10.60176944732666 + ], + [ + "▁trips", + -10.601903915405272 + ], + [ + "▁Fund", + -10.602025985717772 + ], + [ + "▁amounts", + -10.602228164672852 + ], + [ + "▁lifetime", + -10.602869987487791 + ], + [ + "Great", + -10.602922439575195 + ], + [ + "▁OR", + -10.60295867919922 + ], + [ + "▁compensation", + -10.603103637695312 + ], + [ + "▁retirement", + -10.603333473205566 + ], + [ + "▁spots", + -10.603386878967283 + ], + [ + "▁liquid", + -10.603554725646973 + ], + [ + "▁basically", + -10.604416847229004 + ], + [ + "▁aims", + -10.60569953918457 + ], + [ + "▁controlled", + -10.605842590332031 + ], + [ + "Make", + -10.606295585632324 + ], + [ + "▁concepts", + -10.606468200683594 + ], + [ + "▁48", + -10.606489181518556 + ], + [ + "▁thread", + -10.606782913208008 + ], + [ + "▁clicking", + -10.607245445251465 + ], + [ + "▁leg", + -10.607894897460938 + ], + [ + "▁chances", + -10.608224868774414 + ], + [ + "▁tall", + -10.608994483947754 + ], + [ + "▁visitor", + -10.609146118164062 + ], + [ + "▁flooring", + -10.609163284301758 + ], + [ + "▁rural", + -10.60927963256836 + ], + [ + "ka", + -10.609773635864258 + ], + [ + "▁talks", + -10.610803604125977 + ], + [ + "▁LLC", + -10.610886573791504 + ], + [ + "▁marks", + -10.610922813415527 + ], + [ + "▁automatic", + -10.611680030822754 + ], + [ + "▁2018,", + -10.61257266998291 + ], + [ + "▁tag", + -10.613088607788086 + ], + [ + "▁intelligence", + -10.614239692687988 + ], + [ + "▁NY", + -10.614486694335938 + ], + [ + "▁generated", + -10.61463451385498 + ], + [ + "▁directed", + -10.61472511291504 + ], + [ + "▁contrast", + -10.61486530303955 + ], + [ + "▁preparing", + -10.614980697631836 + ], + [ + "▁rose", + -10.61510181427002 + ], + [ + "▁Steve", + -10.615326881408691 + ], + [ + "▁seeds", + -10.615863800048828 + ], + [ + "ING", + -10.61635684967041 + ], + [ + "▁kick", + -10.616599082946776 + ], + [ + "‘", + -10.616826057434082 + ], + [ + "ner", + -10.617989540100098 + ], + [ + "▁damaged", + -10.618398666381836 + ], + [ + "▁definition", + -10.618727684020996 + ], + [ + "▁YOU", + -10.618853569030762 + ], + [ + "▁g", + -10.619126319885254 + ], + [ + "▁Photo", + -10.619366645812988 + ], + [ + "▁outcomes", + -10.620128631591797 + ], + [ + "▁Start", + -10.620267868041992 + ], + [ + "▁brilliant", + -10.621370315551758 + ], + [ + "▁celebration", + -10.62137508392334 + ], + [ + "▁insight", + -10.62138843536377 + ], + [ + "▁semi", + -10.621689796447754 + ], + [ + "▁employer", + -10.621834754943848 + ], + [ + "▁orange", + -10.622139930725098 + ], + [ + "▁durable", + -10.62289047241211 + ], + [ + "▁150", + -10.6240816116333 + ], + [ + "▁correctly", + -10.624429702758787 + ], + [ + "▁literally", + -10.624905586242676 + ], + [ + "▁lake", + -10.625596046447754 + ], + [ + "▁focuses", + -10.625600814819336 + ], + [ + "▁Professor", + -10.626383781433104 + ], + [ + "▁Singapore", + -10.626713752746582 + ], + [ + "▁empty", + -10.626720428466797 + ], + [ + "▁exploring", + -10.626888275146484 + ], + [ + "▁closing", + -10.627373695373535 + ], + [ + "▁till", + -10.627538681030272 + ], + [ + "▁wash", + -10.627715110778809 + ], + [ + "▁expression", + -10.628057479858398 + ], + [ + "▁workshops", + -10.628067016601562 + ], + [ + "▁gaming", + -10.628779411315918 + ], + [ + "▁volunteer", + -10.628883361816406 + ], + [ + "▁refer", + -10.628893852233888 + ], + [ + "▁Word", + -10.628972053527832 + ], + [ + "▁statements", + -10.628997802734377 + ], + [ + "▁judge", + -10.62913703918457 + ], + [ + "▁patio", + -10.629243850708008 + ], + [ + "▁ceremony", + -10.630022048950195 + ], + [ + "▁concerning", + -10.630178451538086 + ], + [ + "▁Queen", + -10.63080883026123 + ], + [ + "▁aircraft", + -10.631428718566896 + ], + [ + "▁proposal", + -10.632378578186035 + ], + [ + "▁opposite", + -10.633404731750488 + ], + [ + "▁Awards", + -10.6334810256958 + ], + [ + "▁Try", + -10.633597373962402 + ], + [ + "class", + -10.633963584899902 + ], + [ + "▁walked", + -10.6339693069458 + ], + [ + "▁believes", + -10.635361671447754 + ], + [ + "▁regions", + -10.635456085205078 + ], + [ + "▁2007", + -10.635494232177734 + ], + [ + "▁Right", + -10.635661125183104 + ], + [ + "▁Land", + -10.635719299316406 + ], + [ + "Oh", + -10.635971069335938 + ], + [ + "▁hurt", + -10.636409759521484 + ], + [ + "▁sky", + -10.637126922607422 + ], + [ + "▁smartphone", + -10.637287139892578 + ], + [ + "▁facts", + -10.637638092041016 + ], + [ + "#", + -10.637701988220217 + ], + [ + "▁Drive", + -10.638263702392578 + ], + [ + "ting", + -10.638301849365234 + ], + [ + "▁Week", + -10.63855266571045 + ], + [ + "▁somewhat", + -10.639137268066406 + ], + [ + "▁Modern", + -10.639145851135254 + ], + [ + "▁Tour", + -10.639167785644531 + ], + [ + "over", + -10.639930725097656 + ], + [ + "▁kill", + -10.640216827392578 + ], + [ + "▁engaging", + -10.640395164489746 + ], + [ + "▁outcome", + -10.640826225280762 + ], + [ + "▁improvements", + -10.64121913909912 + ], + [ + "▁complicated", + -10.64172077178955 + ], + [ + "▁lens", + -10.64195728302002 + ], + [ + "▁packed", + -10.641983032226562 + ], + [ + "▁legislation", + -10.643993377685549 + ], + [ + "including", + -10.644725799560549 + ], + [ + "▁ex", + -10.644729614257812 + ], + [ + "▁Spain", + -10.644926071166992 + ], + [ + "▁Night", + -10.645011901855469 + ], + [ + "▁Software", + -10.645480155944824 + ], + [ + "▁Medicine", + -10.64584255218506 + ], + [ + "▁emails", + -10.645869255065918 + ], + [ + "▁satisfied", + -10.64603042602539 + ], + [ + "▁samples", + -10.646187782287598 + ], + [ + "▁Division", + -10.646336555480955 + ], + [ + "!\"", + -10.646635055541992 + ], + [ + "▁Point", + -10.6467924118042 + ], + [ + "▁Sam", + -10.648560523986816 + ], + [ + "!)", + -10.648606300354004 + ], + [ + "▁creativity", + -10.648886680603027 + ], + [ + "▁Stone", + -10.649114608764648 + ], + [ + "▁speaker", + -10.649785041809082 + ], + [ + "▁debut", + -10.650286674499512 + ], + [ + "▁container", + -10.650298118591309 + ], + [ + "▁bedrooms", + -10.650370597839355 + ], + [ + "▁ocean", + -10.650514602661133 + ], + [ + "▁lets", + -10.65114688873291 + ], + [ + "Note", + -10.651203155517578 + ], + [ + "▁upload", + -10.651323318481444 + ], + [ + "▁Asian", + -10.651461601257324 + ], + [ + "▁Avenue", + -10.65195083618164 + ], + [ + "▁evaluation", + -10.652297973632812 + ], + [ + "▁completion", + -10.652463912963867 + ], + [ + "▁$1", + -10.65275764465332 + ], + [ + "▁participation", + -10.652826309204102 + ], + [ + "▁giant", + -10.653018951416016 + ], + [ + "▁applicable", + -10.653634071350098 + ], + [ + "▁tab", + -10.65378475189209 + ], + [ + "▁asset", + -10.65419578552246 + ], + [ + "▁anyway", + -10.654824256896973 + ], + [ + "▁studying", + -10.655221939086914 + ], + [ + "▁permission", + -10.655282974243164 + ], + [ + "▁hundred", + -10.655719757080078 + ], + [ + "ier", + -10.65581512451172 + ], + [ + "▁involving", + -10.656649589538574 + ], + [ + "▁era", + -10.656657218933104 + ], + [ + "▁division", + -10.656835556030272 + ], + [ + "▁flights", + -10.656941413879396 + ], + [ + "li", + -10.657340049743652 + ], + [ + "▁carrying", + -10.657438278198242 + ], + [ + "▁afford", + -10.657477378845217 + ], + [ + "▁quotes", + -10.65794563293457 + ], + [ + "▁limits", + -10.658544540405272 + ], + [ + "▁Georgia", + -10.65860080718994 + ], + [ + "▁bills", + -10.659366607666016 + ], + [ + "▁residence", + -10.659894943237305 + ], + [ + "▁PA", + -10.660440444946287 + ], + [ + "▁incident", + -10.66086769104004 + ], + [ + "▁cotton", + -10.661608695983888 + ], + [ + "▁suggests", + -10.661630630493164 + ], + [ + "▁2017,", + -10.662229537963867 + ], + [ + "▁loving", + -10.66265869140625 + ], + [ + "▁heads", + -10.662839889526367 + ], + [ + "▁follows", + -10.663047790527344 + ], + [ + "down", + -10.663116455078123 + ], + [ + "▁stronger", + -10.6631441116333 + ], + [ + "▁prize", + -10.663562774658203 + ], + [ + "▁400", + -10.663569450378418 + ], + [ + "▁texture", + -10.663857460021973 + ], + [ + "▁specialists", + -10.664311408996582 + ], + [ + "▁focusing", + -10.664499282836914 + ], + [ + "▁acts", + -10.664515495300291 + ], + [ + "▁Gallery", + -10.665125846862791 + ], + [ + "▁Hope", + -10.66536808013916 + ], + [ + "▁introduce", + -10.66635513305664 + ], + [ + "▁humans", + -10.666481971740724 + ], + [ + "▁Kingdom", + -10.667165756225586 + ], + [ + "▁association", + -10.667208671569824 + ], + [ + "▁Happy", + -10.667256355285645 + ], + [ + "▁maintained", + -10.667256355285645 + ], + [ + "▁HP", + -10.667304039001465 + ], + [ + "[", + -10.667322158813477 + ], + [ + "▁pocket", + -10.667638778686523 + ], + [ + "ni", + -10.66782283782959 + ], + [ + "▁tone", + -10.667916297912598 + ], + [ + "led", + -10.668039321899414 + ], + [ + "inch", + -10.668110847473145 + ], + [ + "▁attending", + -10.66855525970459 + ], + [ + "▁Space", + -10.669286727905272 + ], + [ + "made", + -10.670013427734377 + ], + [ + "▁demands", + -10.670193672180176 + ], + [ + "▁meets", + -10.670223236083984 + ], + [ + "▁zero", + -10.67048168182373 + ], + [ + "▁invite", + -10.670572280883787 + ], + [ + "▁Smart", + -10.670660972595217 + ], + [ + "▁riding", + -10.670987129211426 + ], + [ + "▁arrival", + -10.671014785766602 + ], + [ + "▁indoor", + -10.671311378479004 + ], + [ + "▁pounds", + -10.67210292816162 + ], + [ + "▁gained", + -10.672361373901367 + ], + [ + "▁checked", + -10.67276382446289 + ], + [ + "▁faces", + -10.673048973083496 + ], + [ + "▁Samsung", + -10.673422813415527 + ], + [ + "▁Three", + -10.673471450805664 + ], + [ + "style", + -10.673666954040527 + ], + [ + "▁greatly", + -10.673772811889648 + ], + [ + "▁stretch", + -10.673794746398926 + ], + [ + "▁controls", + -10.674091339111328 + ], + [ + "▁packaging", + -10.674156188964844 + ], + [ + "▁objective", + -10.674701690673828 + ], + [ + "▁organisations", + -10.675152778625488 + ], + [ + "▁extend", + -10.675329208374023 + ], + [ + "▁liability", + -10.67566204071045 + ], + [ + "▁graphic", + -10.675798416137695 + ], + [ + "▁colleagues", + -10.675853729248049 + ], + [ + "▁highlight", + -10.676369667053224 + ], + [ + "▁Army", + -10.677103996276855 + ], + [ + "ED", + -10.677179336547852 + ], + [ + "▁king", + -10.677638053894045 + ], + [ + "▁Dan", + -10.67774486541748 + ], + [ + "▁Line", + -10.678696632385254 + ], + [ + "▁Studio", + -10.679057121276855 + ], + [ + "▁wiring", + -10.67924976348877 + ], + [ + "▁explains", + -10.679742813110352 + ], + [ + "▁transactions", + -10.679986953735352 + ], + [ + "▁MP", + -10.680339813232422 + ], + [ + "▁hasn", + -10.680401802062988 + ], + [ + "▁concert", + -10.680466651916504 + ], + [ + "▁Disney", + -10.680575370788574 + ], + [ + "▁promotion", + -10.680981636047363 + ], + [ + "▁lowest", + -10.681042671203612 + ], + [ + "▁arrange", + -10.681452751159668 + ], + [ + "▁tournament", + -10.681825637817385 + ], + [ + "▁Steel", + -10.681979179382324 + ], + [ + "▁Charles", + -10.682185173034668 + ], + [ + "▁speakers", + -10.68246841430664 + ], + [ + "▁extent", + -10.682626724243164 + ], + [ + "▁publication", + -10.682787895202637 + ], + [ + "▁Country", + -10.683152198791504 + ], + [ + "▁floors", + -10.684103965759276 + ], + [ + "▁unexpected", + -10.684185028076172 + ], + [ + "▁instant", + -10.684367179870604 + ], + [ + "▁Did", + -10.684475898742676 + ], + [ + "▁Travel", + -10.684568405151367 + ], + [ + "Use", + -10.68493938446045 + ], + [ + "▁inventory", + -10.685199737548828 + ], + [ + "▁approaches", + -10.68570327758789 + ], + [ + "▁Village", + -10.68575382232666 + ], + [ + "▁grand", + -10.686052322387695 + ], + [ + "▁stylish", + -10.686386108398438 + ], + [ + "▁debate", + -10.686400413513184 + ], + [ + "▁Image", + -10.686717987060549 + ], + [ + "▁pulled", + -10.687066078186035 + ], + [ + "▁Additionally", + -10.687715530395508 + ], + [ + "site", + -10.688447952270508 + ], + [ + "▁safely", + -10.688804626464844 + ], + [ + "▁Rights", + -10.688858032226562 + ], + [ + "▁passionate", + -10.689047813415527 + ], + [ + "ter", + -10.689189910888672 + ], + [ + "▁trusted", + -10.689668655395508 + ], + [ + "▁hate", + -10.689672470092772 + ], + [ + "▁sight", + -10.68992519378662 + ], + [ + "▁scientists", + -10.689949035644531 + ], + [ + "▁addresses", + -10.69002628326416 + ], + [ + "▁Station", + -10.690162658691406 + ], + [ + "▁flying", + -10.69087028503418 + ], + [ + "▁mile", + -10.691554069519045 + ], + [ + "new", + -10.692220687866213 + ], + [ + "▁drawn", + -10.69282054901123 + ], + [ + "▁diseases", + -10.692840576171877 + ], + [ + "▁deeper", + -10.692867279052734 + ], + [ + "ine", + -10.693968772888184 + ], + [ + "em", + -10.694890022277832 + ], + [ + "▁politics", + -10.694966316223145 + ], + [ + "▁roads", + -10.695066452026367 + ], + [ + "▁Square", + -10.69516658782959 + ], + [ + "hour", + -10.69527816772461 + ], + [ + "▁iPad", + -10.695478439331056 + ], + [ + "▁scores", + -10.695552825927734 + ], + [ + "▁sat", + -10.695908546447754 + ], + [ + "▁exposed", + -10.695935249328612 + ], + [ + "▁sections", + -10.696622848510742 + ], + [ + "▁2014.", + -10.696694374084473 + ], + [ + "▁Cross", + -10.696918487548828 + ], + [ + "▁Unfortunately", + -10.697247505187988 + ], + [ + "ur", + -10.69732666015625 + ], + [ + "▁contractors", + -10.697406768798828 + ], + [ + "▁painted", + -10.697420120239258 + ], + [ + "▁stages", + -10.697545051574709 + ], + [ + "▁bird", + -10.697635650634766 + ], + [ + "▁suite", + -10.69791030883789 + ], + [ + "▁string", + -10.698087692260742 + ], + [ + "Take", + -10.69808864593506 + ], + [ + "▁necessarily", + -10.698519706726074 + ], + [ + "▁Safety", + -10.69882106781006 + ], + [ + "▁gap", + -10.699419021606444 + ], + [ + "▁acting", + -10.699989318847656 + ], + [ + "▁icon", + -10.700318336486816 + ], + [ + "▁di", + -10.70058250427246 + ], + [ + "▁entering", + -10.701278686523438 + ], + [ + "▁rising", + -10.70233917236328 + ], + [ + "▁talented", + -10.702398300170898 + ], + [ + "▁illness", + -10.70273494720459 + ], + [ + "▁signature", + -10.702889442443848 + ], + [ + "▁index", + -10.703362464904783 + ], + [ + "▁Williams", + -10.70355987548828 + ], + [ + "▁jewelry", + -10.70359992980957 + ], + [ + "▁brush", + -10.70361328125 + ], + [ + "▁engines", + -10.703656196594238 + ], + [ + "▁hanging", + -10.703770637512209 + ], + [ + "▁Prime", + -10.703839302062988 + ], + [ + "▁Solutions", + -10.704187393188477 + ], + [ + "▁faced", + -10.704211235046388 + ], + [ + "▁storm", + -10.704489707946776 + ], + [ + "▁exterior", + -10.705028533935549 + ], + [ + "▁massage", + -10.70506191253662 + ], + [ + "▁harder", + -10.705745697021484 + ], + [ + "▁Silver", + -10.70620059967041 + ], + [ + "▁Come", + -10.706262588500977 + ], + [ + "▁supplier", + -10.707603454589844 + ], + [ + "▁lists", + -10.708943367004396 + ], + [ + "▁injured", + -10.709942817687988 + ], + [ + "how", + -10.71046543121338 + ], + [ + "▁coloring", + -10.7106351852417 + ], + [ + "▁Senior", + -10.710648536682127 + ], + [ + "▁participating", + -10.710790634155272 + ], + [ + "▁marked", + -10.711299896240234 + ], + [ + "▁Van", + -10.71144676208496 + ], + [ + "▁twenty", + -10.71152687072754 + ], + [ + "▁criteria", + -10.711776733398438 + ], + [ + "▁fabulous", + -10.71196460723877 + ], + [ + "▁DVD", + -10.71214771270752 + ], + [ + "▁founder", + -10.712217330932615 + ], + [ + "▁heading", + -10.712515830993652 + ], + [ + "long", + -10.712602615356444 + ], + [ + "▁falling", + -10.712980270385742 + ], + [ + "▁Being", + -10.713264465332031 + ], + [ + "▁Order", + -10.713577270507812 + ], + [ + "▁OS", + -10.714224815368652 + ], + [ + "▁ad", + -10.714431762695312 + ], + [ + "▁Agency", + -10.714437484741213 + ], + [ + "▁accounting", + -10.714740753173828 + ], + [ + "15", + -10.71489429473877 + ], + [ + "▁Report", + -10.71553897857666 + ], + [ + "▁telephone", + -10.715951919555664 + ], + [ + "▁Port", + -10.715965270996094 + ], + [ + "▁grab", + -10.716500282287598 + ], + [ + "▁responses", + -10.716601371765137 + ], + [ + "▁Toronto", + -10.71675968170166 + ], + [ + "▁lies", + -10.717087745666504 + ], + [ + "▁dual", + -10.717123031616213 + ], + [ + "▁accommodate", + -10.717124938964844 + ], + [ + "▁productivity", + -10.717279434204102 + ], + [ + "▁Creek", + -10.71731948852539 + ], + [ + "▁temporary", + -10.717555046081545 + ], + [ + "▁insights", + -10.717928886413574 + ], + [ + "▁bear", + -10.718358993530272 + ], + [ + "Before", + -10.71865940093994 + ], + [ + "ri", + -10.718764305114746 + ], + [ + "▁operated", + -10.71932888031006 + ], + [ + "▁yards", + -10.720011711120604 + ], + [ + "▁alive", + -10.72002410888672 + ], + [ + "▁breaking", + -10.720067024230955 + ], + [ + "▁requested", + -10.721075057983398 + ], + [ + "▁Natural", + -10.721936225891112 + ], + [ + "▁environments", + -10.722494125366213 + ], + [ + "▁contest", + -10.722559928894045 + ], + [ + "▁Fort", + -10.722860336303713 + ], + [ + "▁anymore", + -10.72304916381836 + ], + [ + "▁viewing", + -10.723069190979004 + ], + [ + "▁stations", + -10.72311782836914 + ], + [ + "▁utilize", + -10.72311782836914 + ], + [ + "▁rubber", + -10.723246574401855 + ], + [ + "Over", + -10.723394393920898 + ], + [ + "▁nights", + -10.723640441894531 + ], + [ + "▁headed", + -10.72374153137207 + ], + [ + "▁hiring", + -10.72485065460205 + ], + [ + "▁rid", + -10.724936485290527 + ], + [ + "▁indicate", + -10.725193977355955 + ], + [ + "▁FOR", + -10.725237846374512 + ], + [ + "▁Way", + -10.725564002990724 + ], + [ + "▁mood", + -10.72586154937744 + ], + [ + "▁council", + -10.725879669189451 + ], + [ + "▁Rose", + -10.726195335388184 + ], + [ + "▁afraid", + -10.726713180541992 + ], + [ + "▁desktop", + -10.728060722351074 + ], + [ + "ty", + -10.728540420532228 + ], + [ + "▁Limited", + -10.72863483428955 + ], + [ + "▁Repair", + -10.728673934936523 + ], + [ + "Add", + -10.72881031036377 + ], + [ + "▁assume", + -10.729053497314451 + ], + [ + "30", + -10.729430198669434 + ], + [ + "▁personnel", + -10.729562759399414 + ], + [ + "▁investments", + -10.73012638092041 + ], + [ + "▁gym", + -10.730127334594728 + ], + [ + "▁Mill", + -10.732006072998049 + ], + [ + "▁coat", + -10.732152938842772 + ], + [ + "▁Second", + -10.73224925994873 + ], + [ + "▁largely", + -10.732428550720217 + ], + [ + "▁returning", + -10.732911109924316 + ], + [ + "▁Key", + -10.733010292053224 + ], + [ + "▁coaching", + -10.73301124572754 + ], + [ + "▁dollar", + -10.733107566833496 + ], + [ + "▁inspection", + -10.733163833618164 + ], + [ + "▁estimate", + -10.733386993408203 + ], + [ + "▁Studies", + -10.733470916748049 + ], + [ + "▁venture", + -10.733770370483398 + ], + [ + "▁Yet", + -10.734051704406738 + ], + [ + "▁computers", + -10.73415470123291 + ], + [ + "▁Britain", + -10.735527992248535 + ], + [ + "▁hang", + -10.735552787780762 + ], + [ + "▁strongly", + -10.73603057861328 + ], + [ + "▁contribution", + -10.736742973327637 + ], + [ + "▁workplace", + -10.736902236938477 + ], + [ + "Every", + -10.737006187438965 + ], + [ + "▁36", + -10.737141609191896 + ], + [ + "▁gender", + -10.737433433532717 + ], + [ + "▁talked", + -10.737834930419922 + ], + [ + "known", + -10.737957000732422 + ], + [ + "▁Spirit", + -10.739662170410156 + ], + [ + "▁representative", + -10.739786148071287 + ], + [ + "▁hits", + -10.739832878112791 + ], + [ + "Where", + -10.739890098571776 + ], + [ + "▁holes", + -10.741108894348145 + ], + [ + "▁laser", + -10.741477012634276 + ], + [ + "6.", + -10.74150848388672 + ], + [ + "▁employers", + -10.74237823486328 + ], + [ + "▁golden", + -10.742393493652344 + ], + [ + "▁networking", + -10.74258804321289 + ], + [ + "▁abroad", + -10.742648124694824 + ], + [ + "▁attract", + -10.742894172668455 + ], + [ + "▁farmers", + -10.743115425109863 + ], + [ + "▁module", + -10.7432279586792 + ], + [ + "▁Made", + -10.743271827697754 + ], + [ + "▁Vegas", + -10.743288040161133 + ], + [ + "▁beds", + -10.743539810180664 + ], + [ + "▁Corporation", + -10.74356174468994 + ], + [ + "▁diversity", + -10.74382495880127 + ], + [ + "▁objectives", + -10.743867874145508 + ], + [ + "▁wondering", + -10.744139671325684 + ], + [ + "▁promoting", + -10.744181632995604 + ], + [ + "▁portable", + -10.744373321533203 + ], + [ + "▁relations", + -10.745162010192873 + ], + [ + "11", + -10.746006965637209 + ], + [ + "▁adjust", + -10.746058464050291 + ], + [ + "▁importantly", + -10.74624252319336 + ], + [ + "▁Should", + -10.746384620666504 + ], + [ + "▁secondary", + -10.746530532836914 + ], + [ + "▁obviously", + -10.746634483337402 + ], + [ + "▁developer", + -10.746724128723145 + ], + [ + "▁subjects", + -10.746807098388672 + ], + [ + "▁bunch", + -10.746930122375488 + ], + [ + "▁diagram", + -10.747359275817873 + ], + [ + "▁advantages", + -10.747811317443848 + ], + [ + "▁temperatures", + -10.747892379760742 + ], + [ + "▁sofa", + -10.747901916503906 + ], + [ + "▁servers", + -10.74799633026123 + ], + [ + "▁grass", + -10.748601913452148 + ], + [ + "more", + -10.748614311218262 + ], + [ + "▁reaching", + -10.748719215393066 + ], + [ + "au", + -10.748769760131836 + ], + [ + "▁Rs", + -10.74973201751709 + ], + [ + "▁anniversary", + -10.749737739562988 + ], + [ + "▁illegal", + -10.75010585784912 + ], + [ + "▁Oh", + -10.750643730163574 + ], + [ + "▁Such", + -10.750751495361328 + ], + [ + "▁prayer", + -10.751029014587402 + ], + [ + "▁globe", + -10.751100540161133 + ], + [ + "▁Jewish", + -10.75149917602539 + ], + [ + "▁buyer", + -10.751628875732422 + ], + [ + "▁documentation", + -10.751691818237305 + ], + [ + "Whether", + -10.75169849395752 + ], + [ + "▁Holy", + -10.751842498779297 + ], + [ + "▁Private", + -10.751901626586914 + ], + [ + "▁flour", + -10.752375602722168 + ], + [ + "▁Looking", + -10.75244140625 + ], + [ + "▁Administration", + -10.752635955810549 + ], + [ + "▁visits", + -10.75271987915039 + ], + [ + "▁reserved", + -10.752869606018066 + ], + [ + "▁assigned", + -10.75316333770752 + ], + [ + "▁contained", + -10.753254890441896 + ], + [ + "▁wanting", + -10.753559112548828 + ], + [ + "▁charity", + -10.753568649291992 + ], + [ + "▁occurs", + -10.753761291503906 + ], + [ + "▁seed", + -10.753929138183594 + ], + [ + "See", + -10.754009246826172 + ], + [ + "▁van", + -10.75413990020752 + ], + [ + "▁scope", + -10.754376411437988 + ], + [ + "▁ensures", + -10.75444793701172 + ], + [ + "▁consistently", + -10.754569053649902 + ], + [ + "▁tooth", + -10.75462245941162 + ], + [ + "▁Estate", + -10.755144119262695 + ], + [ + "▁statistics", + -10.755220413208008 + ], + [ + "▁literature", + -10.75527000427246 + ], + [ + "▁sufficient", + -10.755293846130373 + ], + [ + "▁2016,", + -10.75530242919922 + ], + [ + "▁ranging", + -10.755362510681152 + ], + [ + "▁initiatives", + -10.755374908447266 + ], + [ + "▁sick", + -10.755382537841797 + ], + [ + "▁Wi", + -10.755518913269045 + ], + [ + "▁rapidly", + -10.75637435913086 + ], + [ + "▁random", + -10.75651741027832 + ], + [ + "▁equally", + -10.7566556930542 + ], + [ + "▁excess", + -10.757844924926758 + ], + [ + "▁Eastern", + -10.758016586303713 + ], + [ + "▁Capital", + -10.75808048248291 + ], + [ + "▁accuracy", + -10.758085250854492 + ], + [ + "▁layers", + -10.758382797241213 + ], + [ + "▁sink", + -10.758596420288086 + ], + [ + "▁o", + -10.758715629577637 + ], + [ + "io", + -10.758767127990724 + ], + [ + "):", + -10.758957862854004 + ], + [ + "▁announce", + -10.759089469909668 + ], + [ + "▁Thus", + -10.759392738342283 + ], + [ + "▁wet", + -10.759576797485352 + ], + [ + "▁script", + -10.759716987609863 + ], + [ + "▁themes", + -10.75992202758789 + ], + [ + "▁pepper", + -10.760601043701172 + ], + [ + "▁persons", + -10.76100730895996 + ], + [ + "▁ending", + -10.761249542236328 + ], + [ + "▁implemented", + -10.761585235595703 + ], + [ + "▁childhood", + -10.761667251586914 + ], + [ + "50", + -10.76181697845459 + ], + [ + "▁Collection", + -10.761982917785645 + ], + [ + "▁branch", + -10.761987686157228 + ], + [ + "▁Sydney", + -10.762084007263184 + ], + [ + "▁racing", + -10.762110710144045 + ], + [ + "▁Test", + -10.762325286865234 + ], + [ + "▁absolute", + -10.762373924255373 + ], + [ + "▁Main", + -10.762580871582031 + ], + [ + "▁Though", + -10.762625694274902 + ], + [ + "▁Hi", + -10.763046264648438 + ], + [ + "▁cameras", + -10.763407707214355 + ], + [ + "▁Forest", + -10.763453483581545 + ], + [ + "▁demonstrate", + -10.763826370239258 + ], + [ + "▁Jim", + -10.764293670654297 + ], + [ + "▁characteristics", + -10.76513957977295 + ], + [ + "▁dad", + -10.7652587890625 + ], + [ + "▁Sales", + -10.765839576721191 + ], + [ + "GB", + -10.76595687866211 + ], + [ + "▁configuration", + -10.766098976135254 + ], + [ + "▁highlights", + -10.766199111938477 + ], + [ + "▁pursue", + -10.766366958618164 + ], + [ + "▁designing", + -10.766786575317385 + ], + [ + "se", + -10.76684284210205 + ], + [ + "▁Justice", + -10.766843795776367 + ], + [ + "▁define", + -10.76719570159912 + ], + [ + "▁edit", + -10.767349243164062 + ], + [ + "making", + -10.768001556396484 + ], + [ + "▁findings", + -10.768072128295898 + ], + [ + "▁expressed", + -10.768463134765623 + ], + [ + "▁initially", + -10.768702507019045 + ], + [ + "▁discounts", + -10.768966674804688 + ], + [ + "▁Learning", + -10.768982887268066 + ], + [ + "▁Radio", + -10.769279479980469 + ], + [ + "ft", + -10.769309043884276 + ], + [ + "▁genuine", + -10.769545555114746 + ], + [ + "▁Cloud", + -10.769644737243652 + ], + [ + "▁inform", + -10.769890785217283 + ], + [ + "▁boards", + -10.770068168640137 + ], + [ + "▁confirm", + -10.770925521850586 + ], + [ + "▁tape", + -10.771148681640623 + ], + [ + "▁languages", + -10.771234512329102 + ], + [ + "▁inspire", + -10.771315574645996 + ], + [ + "▁Irish", + -10.771522521972656 + ], + [ + "▁coupon", + -10.771930694580078 + ], + [ + "▁appointed", + -10.771957397460938 + ], + [ + "▁Crusher", + -10.772443771362305 + ], + [ + "▁Remember", + -10.772945404052734 + ], + [ + "▁breath", + -10.774706840515137 + ], + [ + "▁lay", + -10.775242805480955 + ], + [ + "▁muscles", + -10.775636672973633 + ], + [ + "▁smoke", + -10.775882720947266 + ], + [ + "Mr", + -10.776022911071776 + ], + [ + "▁reveal", + -10.7760648727417 + ], + [ + "▁wallpaper", + -10.776177406311035 + ], + [ + "Fi", + -10.776543617248535 + ], + [ + "▁laid", + -10.776715278625488 + ], + [ + "▁Republic", + -10.776774406433104 + ], + [ + "▁bone", + -10.776799201965332 + ], + [ + "▁clock", + -10.77708339691162 + ], + [ + "▁fence", + -10.777134895324709 + ], + [ + "▁compatible", + -10.777229309082031 + ], + [ + "и", + -10.777236938476562 + ], + [ + "▁unknown", + -10.77725601196289 + ], + [ + "▁transmission", + -10.777437210083008 + ], + [ + "▁interaction", + -10.77748680114746 + ], + [ + "Find", + -10.777777671813965 + ], + [ + "▁sad", + -10.777936935424805 + ], + [ + "▁spray", + -10.778529167175291 + ], + [ + "▁containing", + -10.778823852539062 + ], + [ + "▁hall", + -10.77882480621338 + ], + [ + "▁suffer", + -10.778928756713867 + ], + [ + "▁minimal", + -10.778970718383787 + ], + [ + "▁escape", + -10.779601097106934 + ], + [ + "▁campaigns", + -10.779783248901367 + ], + [ + "▁relaxing", + -10.779874801635742 + ], + [ + "▁bond", + -10.779898643493652 + ], + [ + "ya", + -10.78053092956543 + ], + [ + "▁iOS", + -10.780726432800291 + ], + [ + "▁customized", + -10.780863761901855 + ], + [ + "▁deeply", + -10.780996322631836 + ], + [ + "▁Him", + -10.781039237976074 + ], + [ + "▁tissue", + -10.78111457824707 + ], + [ + "▁watched", + -10.781349182128906 + ], + [ + "▁studied", + -10.78141975402832 + ], + [ + "▁equivalent", + -10.782426834106444 + ], + [ + "life", + -10.782732009887695 + ], + [ + "▁stability", + -10.78290557861328 + ], + [ + "▁Kong", + -10.783475875854492 + ], + [ + "▁depression", + -10.783824920654297 + ], + [ + "▁narrow", + -10.784018516540527 + ], + [ + "▁contributions", + -10.784384727478027 + ], + [ + "▁Senate", + -10.78484344482422 + ], + [ + "▁authentic", + -10.785234451293944 + ], + [ + "▁rolling", + -10.785468101501465 + ], + [ + "▁Force", + -10.785674095153809 + ], + [ + "▁directory", + -10.785731315612791 + ], + [ + "▁mm", + -10.785748481750488 + ], + [ + "▁tap", + -10.78582763671875 + ], + [ + "▁acquired", + -10.785849571228027 + ], + [ + "▁Website", + -10.786210060119627 + ], + [ + "▁tie", + -10.78637981414795 + ], + [ + "▁represented", + -10.786625862121582 + ], + [ + "▁mistakes", + -10.786905288696287 + ], + [ + "▁amongst", + -10.787223815917969 + ], + [ + "▁LA", + -10.787871360778809 + ], + [ + "▁nursing", + -10.788275718688965 + ], + [ + "▁continuous", + -10.788939476013184 + ], + [ + "▁Annual", + -10.788941383361816 + ], + [ + "▁templates", + -10.789116859436035 + ], + [ + "▁officially", + -10.78925609588623 + ], + [ + "▁salad", + -10.789468765258787 + ], + [ + "▁performances", + -10.789777755737305 + ], + [ + "▁maps", + -10.78997039794922 + ], + [ + "▁matching", + -10.790207862854004 + ], + [ + "▁emotions", + -10.7907133102417 + ], + [ + "▁belt", + -10.791053771972656 + ], + [ + "▁spectacular", + -10.791644096374512 + ], + [ + "▁MA", + -10.79167938232422 + ], + [ + "▁cooked", + -10.791714668273926 + ], + [ + "▁prime", + -10.791787147521973 + ], + [ + "▁grateful", + -10.792244911193848 + ], + [ + "▁folder", + -10.792275428771973 + ], + [ + "▁WordPress", + -10.792293548583984 + ], + [ + "▁sharp", + -10.792476654052734 + ], + [ + "▁peak", + -10.7925386428833 + ], + [ + "▁50%", + -10.792993545532228 + ], + [ + "▁resolve", + -10.793241500854492 + ], + [ + "▁Catholic", + -10.79417610168457 + ], + [ + "▁possibilities", + -10.794286727905272 + ], + [ + "▁granted", + -10.794466018676758 + ], + [ + "▁beautifully", + -10.794791221618652 + ], + [ + "▁nuclear", + -10.794989585876465 + ], + [ + "▁slot", + -10.795073509216309 + ], + [ + "▁fifth", + -10.795217514038086 + ], + [ + "▁southern", + -10.79534149169922 + ], + [ + "▁attitude", + -10.7957124710083 + ], + [ + "▁directions", + -10.795713424682615 + ], + [ + "▁Box", + -10.796185493469238 + ], + [ + "▁locally", + -10.796457290649414 + ], + [ + "home", + -10.796745300292969 + ], + [ + "▁draft", + -10.796910285949709 + ], + [ + "▁mountains", + -10.796920776367188 + ], + [ + "▁Finally", + -10.796927452087402 + ], + [ + "▁CO", + -10.79706859588623 + ], + [ + "▁2.0", + -10.797269821166992 + ], + [ + "▁Still", + -10.797465324401855 + ], + [ + "▁Houston", + -10.797468185424805 + ], + [ + "▁keys", + -10.797618865966797 + ], + [ + "▁currency", + -10.798040390014648 + ], + [ + "ey", + -10.798066139221191 + ], + [ + "▁Matt", + -10.798324584960938 + ], + [ + "▁tours", + -10.798418998718262 + ], + [ + "▁operational", + -10.79843521118164 + ], + [ + "▁reception", + -10.79843807220459 + ], + [ + "▁Help", + -10.798639297485352 + ], + [ + "▁contracts", + -10.798771858215332 + ], + [ + "Go", + -10.79893970489502 + ], + [ + "▁Arizona", + -10.799083709716797 + ], + [ + "▁Las", + -10.79914665222168 + ], + [ + "▁3-", + -10.79936695098877 + ], + [ + "▁IS", + -10.799567222595217 + ], + [ + "▁Film", + -10.800073623657228 + ], + [ + "▁List", + -10.80020523071289 + ], + [ + "▁Bob", + -10.800264358520508 + ], + [ + "▁occurred", + -10.80067253112793 + ], + [ + "▁sought", + -10.800830841064451 + ], + [ + "▁Server", + -10.800832748413086 + ], + [ + "▁professor", + -10.800877571105955 + ], + [ + "▁emerging", + -10.80093765258789 + ], + [ + "▁Korea", + -10.800943374633787 + ], + [ + "▁situated", + -10.80106258392334 + ], + [ + "▁calm", + -10.801481246948242 + ], + [ + "▁garlic", + -10.801895141601562 + ], + [ + "▁remained", + -10.80189609527588 + ], + [ + "▁personalized", + -10.802593231201172 + ], + [ + "▁expanded", + -10.802803993225098 + ], + [ + "den", + -10.802833557128906 + ], + [ + "▁wake", + -10.802947998046877 + ], + [ + "▁claimed", + -10.802963256835938 + ], + [ + "▁curriculum", + -10.803338050842283 + ], + [ + "▁breast", + -10.80355167388916 + ], + [ + "ko", + -10.804247856140137 + ], + [ + "▁abilities", + -10.804376602172852 + ], + [ + "▁resident", + -10.804763793945312 + ], + [ + "▁displays", + -10.804964065551758 + ], + [ + "▁assistant", + -10.80534839630127 + ], + [ + "▁Theatre", + -10.805438041687012 + ], + [ + "▁amenities", + -10.805697441101074 + ], + [ + "▁plane", + -10.805941581726074 + ], + [ + "▁Scotland", + -10.806941032409668 + ], + [ + "▁relative", + -10.807082176208496 + ], + [ + "▁tired", + -10.807321548461914 + ], + [ + "▁pitch", + -10.807865142822266 + ], + [ + "▁targeted", + -10.807988166809082 + ], + [ + "▁Mount", + -10.808037757873535 + ], + [ + "don", + -10.808053970336914 + ], + [ + "▁eliminate", + -10.808600425720217 + ], + [ + "▁investing", + -10.808605194091797 + ], + [ + "▁shade", + -10.809415817260742 + ], + [ + "▁ratio", + -10.809927940368652 + ], + [ + "▁pets", + -10.810216903686523 + ], + [ + "▁spacious", + -10.810373306274414 + ], + [ + "▁decent", + -10.81042766571045 + ], + [ + "▁End", + -10.810661315917969 + ], + [ + "▁Save", + -10.810970306396484 + ], + [ + "▁Farm", + -10.81103801727295 + ], + [ + "▁Andrew", + -10.811461448669434 + ], + [ + "Who", + -10.81151008605957 + ], + [ + "▁regard", + -10.81162452697754 + ], + [ + "▁sheets", + -10.811847686767578 + ], + [ + "ng", + -10.812345504760742 + ], + [ + "▁(1", + -10.812637329101562 + ], + [ + "▁Products", + -10.812646865844728 + ], + [ + "▁depend", + -10.812782287597656 + ], + [ + "profit", + -10.813133239746094 + ], + [ + "▁existence", + -10.81340503692627 + ], + [ + "▁reaction", + -10.81385612487793 + ], + [ + "▁representatives", + -10.814026832580566 + ], + [ + "▁wave", + -10.814653396606444 + ], + [ + "▁struggling", + -10.814682006835938 + ], + [ + "▁Policy", + -10.814836502075195 + ], + [ + "▁roots", + -10.815207481384276 + ], + [ + "▁shapes", + -10.81610107421875 + ], + [ + "tech", + -10.81626796722412 + ], + [ + "▁Golden", + -10.816509246826172 + ], + [ + "▁loose", + -10.816716194152832 + ], + [ + "point", + -10.816967964172363 + ], + [ + "▁Easy", + -10.817758560180664 + ], + [ + "Good", + -10.817837715148926 + ], + [ + "▁Illinois", + -10.817856788635254 + ], + [ + "▁opens", + -10.81794548034668 + ], + [ + "▁raising", + -10.817973136901855 + ], + [ + "▁Ministry", + -10.81832790374756 + ], + [ + "▁Kids", + -10.818727493286133 + ], + [ + "▁assess", + -10.818987846374512 + ], + [ + "▁printer", + -10.819188117980955 + ], + [ + "▁compact", + -10.819290161132812 + ], + [ + "▁philosophy", + -10.819321632385254 + ], + [ + "▁efficiently", + -10.819644927978516 + ], + [ + "▁compete", + -10.819808959960938 + ], + [ + "▁grey", + -10.819926261901855 + ], + [ + "▁UN", + -10.821037292480469 + ], + [ + "▁finishing", + -10.821263313293455 + ], + [ + "▁medication", + -10.821738243103027 + ], + [ + "▁rapid", + -10.821927070617676 + ], + [ + "▁Hot", + -10.822247505187988 + ], + [ + "▁vibrant", + -10.822517395019531 + ], + [ + "▁convert", + -10.822614669799805 + ], + [ + "▁titles", + -10.823226928710938 + ], + [ + "▁Golf", + -10.823284149169922 + ], + [ + "▁tons", + -10.823463439941406 + ], + [ + "▁delivers", + -10.823610305786133 + ], + [ + "▁unusual", + -10.823680877685549 + ], + [ + "Because", + -10.823711395263672 + ], + [ + "▁lab", + -10.823869705200195 + ], + [ + "▁Officer", + -10.8240327835083 + ], + [ + "from", + -10.824376106262209 + ], + [ + "▁gather", + -10.824895858764648 + ], + [ + "▁wheels", + -10.825181007385254 + ], + [ + "▁fundamental", + -10.825274467468262 + ], + [ + "star", + -10.82571506500244 + ], + [ + "▁institution", + -10.826245307922363 + ], + [ + "▁toilet", + -10.826406478881836 + ], + [ + "▁enforcement", + -10.826695442199709 + ], + [ + "▁weren", + -10.826786994934082 + ], + [ + "▁Glass", + -10.82690143585205 + ], + [ + "▁formula", + -10.827661514282228 + ], + [ + "▁Access", + -10.828120231628418 + ], + [ + "▁scenes", + -10.828182220458984 + ], + [ + "▁Product", + -10.828368186950684 + ], + [ + "▁examination", + -10.828661918640137 + ], + [ + "▁pin", + -10.828779220581056 + ], + [ + "▁equity", + -10.828871726989746 + ], + [ + "▁plot", + -10.828882217407228 + ], + [ + "▁AC", + -10.82940673828125 + ], + [ + "ful", + -10.829447746276855 + ], + [ + "winning", + -10.82957649230957 + ], + [ + "▁conversion", + -10.83026123046875 + ], + [ + "▁sophisticated", + -10.831164360046388 + ], + [ + "▁involve", + -10.831231117248535 + ], + [ + "▁reserve", + -10.831392288208008 + ], + [ + "▁Joseph", + -10.83152675628662 + ], + [ + "▁Join", + -10.83211898803711 + ], + [ + "▁experiencing", + -10.832136154174805 + ], + [ + "▁consequences", + -10.832341194152832 + ], + [ + "▁Hong", + -10.832542419433594 + ], + [ + "▁contractor", + -10.83268928527832 + ], + [ + "▁shoulder", + -10.83289909362793 + ], + [ + "▁Credit", + -10.832974433898926 + ], + [ + "▁banking", + -10.833065032958984 + ], + [ + "▁collections", + -10.8330717086792 + ], + [ + "▁Without", + -10.833429336547852 + ], + [ + "▁Camp", + -10.833441734313965 + ], + [ + "▁mistake", + -10.833758354187012 + ], + [ + "▁Dec", + -10.833970069885254 + ], + [ + "▁aluminum", + -10.834062576293944 + ], + [ + "▁wider", + -10.834239959716797 + ], + [ + "▁plain", + -10.834291458129885 + ], + [ + "▁Winter", + -10.834470748901367 + ], + [ + "▁cheaper", + -10.83461570739746 + ], + [ + "▁suffered", + -10.834704399108888 + ], + [ + "▁bench", + -10.83477783203125 + ], + [ + "▁Visit", + -10.834904670715332 + ], + [ + "▁Fair", + -10.83498191833496 + ], + [ + "▁entitled", + -10.835002899169922 + ], + [ + "▁Nov", + -10.83513641357422 + ], + [ + "▁intense", + -10.835457801818848 + ], + [ + "▁flash", + -10.835785865783691 + ], + [ + "US", + -10.835920333862305 + ], + [ + "▁clubs", + -10.835987091064451 + ], + [ + "▁warning", + -10.836018562316896 + ], + [ + "▁specialized", + -10.836931228637695 + ], + [ + "▁Secretary", + -10.837224006652832 + ], + [ + "▁vendors", + -10.837421417236328 + ], + [ + "▁rated", + -10.837451934814451 + ], + [ + "▁restore", + -10.83747673034668 + ], + [ + "▁beaches", + -10.83786392211914 + ], + [ + "ford", + -10.837928771972656 + ], + [ + "▁solo", + -10.83800220489502 + ], + [ + "▁cm", + -10.838126182556152 + ], + [ + "▁column", + -10.838242530822754 + ], + [ + "▁2015,", + -10.839003562927246 + ], + [ + "▁exercises", + -10.839138984680176 + ], + [ + "▁appliances", + -10.839571952819824 + ], + [ + "▁Jackson", + -10.839831352233888 + ], + [ + "▁Daniel", + -10.840617179870604 + ], + [ + "▁strange", + -10.840618133544922 + ], + [ + "▁coal", + -10.84066104888916 + ], + [ + "▁Feb", + -10.840699195861816 + ], + [ + "▁editing", + -10.8408203125 + ], + [ + "▁instantly", + -10.840840339660645 + ], + [ + "▁meaningful", + -10.840866088867188 + ], + [ + "▁ministry", + -10.841103553771973 + ], + [ + "this", + -10.841803550720217 + ], + [ + "▁Tim", + -10.842226028442385 + ], + [ + "▁wins", + -10.842799186706545 + ], + [ + "▁2013.", + -10.84368896484375 + ], + [ + "▁Greek", + -10.8438081741333 + ], + [ + "▁transform", + -10.843844413757324 + ], + [ + "▁75", + -10.844202995300291 + ], + [ + "▁cuts", + -10.844438552856444 + ], + [ + "▁evaluate", + -10.844626426696776 + ], + [ + "▁newest", + -10.8450288772583 + ], + [ + "▁actively", + -10.845348358154297 + ], + [ + "▁falls", + -10.845383644104004 + ], + [ + "▁photographer", + -10.84548568725586 + ], + [ + "▁artwork", + -10.845917701721191 + ], + [ + "▁1000", + -10.846112251281738 + ], + [ + "▁financing", + -10.846294403076172 + ], + [ + "▁heritage", + -10.846528053283691 + ], + [ + "▁Tech", + -10.847090721130373 + ], + [ + "▁Perhaps", + -10.84719944000244 + ], + [ + "▁overnight", + -10.847259521484377 + ], + [ + "▁aimed", + -10.847357749938965 + ], + [ + "▁worn", + -10.84743881225586 + ], + [ + "▁false", + -10.847639083862305 + ], + [ + "▁overcome", + -10.84765625 + ], + [ + "▁specified", + -10.848002433776855 + ], + [ + "▁plug", + -10.84817886352539 + ], + [ + "head", + -10.8482027053833 + ], + [ + "▁respectively", + -10.848713874816896 + ], + [ + "▁opt", + -10.848899841308594 + ], + [ + "town", + -10.849040031433104 + ], + [ + "▁newspaper", + -10.849903106689451 + ], + [ + "▁tablet", + -10.850212097167969 + ], + [ + "▁victims", + -10.850536346435549 + ], + [ + "▁Choose", + -10.850552558898926 + ], + [ + "▁wildlife", + -10.85061740875244 + ], + [ + "▁OK", + -10.850682258605955 + ], + [ + "$", + -10.851385116577148 + ], + [ + "lin", + -10.85140895843506 + ], + [ + "▁Iran", + -10.851469039916992 + ], + [ + "▁cooling", + -10.852250099182127 + ], + [ + "▁connecting", + -10.852455139160156 + ], + [ + "▁adopted", + -10.854178428649902 + ], + [ + "▁k", + -10.855243682861328 + ], + [ + "▁acquire", + -10.855393409729004 + ], + [ + "▁journal", + -10.855470657348633 + ], + [ + "▁broke", + -10.855831146240234 + ], + [ + "▁shirt", + -10.85593032836914 + ], + [ + "▁2006", + -10.856414794921877 + ], + [ + "▁makeup", + -10.856578826904297 + ], + [ + "▁pipe", + -10.85663604736328 + ], + [ + "▁Simply", + -10.856870651245115 + ], + [ + "▁edited", + -10.857097625732422 + ], + [ + "▁manufactured", + -10.857248306274414 + ], + [ + "▁passengers", + -10.85733699798584 + ], + [ + "▁sensor", + -10.857653617858888 + ], + [ + "est", + -10.858139038085938 + ], + [ + "▁circuit", + -10.858481407165527 + ], + [ + "▁pizza", + -10.85897445678711 + ], + [ + "▁Louis", + -10.85903549194336 + ], + [ + "▁Bridge", + -10.859084129333496 + ], + [ + "box", + -10.859286308288574 + ], + [ + "▁letting", + -10.859454154968262 + ], + [ + "▁cooperation", + -10.859806060791016 + ], + [ + "▁universities", + -10.859966278076172 + ], + [ + "bar", + -10.859975814819336 + ], + [ + "▁conventional", + -10.860182762145996 + ], + [ + "▁weapons", + -10.860311508178713 + ], + [ + "▁nose", + -10.86093521118164 + ], + [ + "▁aged", + -10.861180305480955 + ], + [ + "▁substantial", + -10.861661911010742 + ], + [ + "▁foam", + -10.861992835998535 + ], + [ + "▁circle", + -10.862579345703123 + ], + [ + "▁Standard", + -10.8628568649292 + ], + [ + "▁copper", + -10.863133430480955 + ], + [ + "▁instruments", + -10.863558769226074 + ], + [ + "▁Diego", + -10.863571166992188 + ], + [ + "▁happiness", + -10.863679885864258 + ], + [ + "▁washing", + -10.864076614379885 + ], + [ + "▁completing", + -10.864452362060549 + ], + [ + "▁delay", + -10.864699363708496 + ], + [ + "▁harm", + -10.86479663848877 + ], + [ + "▁Pakistan", + -10.865274429321287 + ], + [ + "▁pleasant", + -10.86532497406006 + ], + [ + "▁toys", + -10.86539363861084 + ], + [ + "▁overview", + -10.865845680236816 + ], + [ + "minute", + -10.865906715393066 + ], + [ + "▁opinions", + -10.867013931274414 + ], + [ + "▁copies", + -10.867206573486328 + ], + [ + "▁elected", + -10.86794376373291 + ], + [ + "▁basketball", + -10.868402481079102 + ], + [ + "▁ownership", + -10.868670463562012 + ], + [ + "-3", + -10.86924648284912 + ], + [ + "▁Magazine", + -10.869436264038086 + ], + [ + "▁oral", + -10.869589805603027 + ], + [ + "▁gathering", + -10.869658470153809 + ], + [ + "▁Model", + -10.869833946228027 + ], + [ + "▁parks", + -10.869967460632324 + ], + [ + "▁Protection", + -10.870811462402344 + ], + [ + "▁advise", + -10.871068000793455 + ], + [ + "▁western", + -10.87126922607422 + ], + [ + "▁instrument", + -10.87191677093506 + ], + [ + "18", + -10.87193202972412 + ], + [ + "▁workout", + -10.87195110321045 + ], + [ + "▁permit", + -10.87220859527588 + ], + [ + "▁encounter", + -10.872532844543455 + ], + [ + "▁Bathroom", + -10.872538566589355 + ], + [ + "▁Student", + -10.872879981994627 + ], + [ + "▁mold", + -10.87307357788086 + ], + [ + "ate", + -10.874221801757812 + ], + [ + "▁admit", + -10.8743314743042 + ], + [ + "▁engineers", + -10.87442111968994 + ], + [ + "▁constructed", + -10.874573707580566 + ], + [ + "▁graduated", + -10.874791145324709 + ], + [ + "▁precious", + -10.87480354309082 + ], + [ + "▁Furniture", + -10.875114440917969 + ], + [ + "▁moisture", + -10.87527561187744 + ], + [ + "▁neither", + -10.875837326049805 + ], + [ + "▁surrounded", + -10.876106262207031 + ], + [ + "▁cruise", + -10.876840591430664 + ], + [ + "▁lasting", + -10.877241134643556 + ], + [ + "▁divorce", + -10.877348899841309 + ], + [ + "▁trailer", + -10.878000259399414 + ], + [ + "▁crash", + -10.878026962280272 + ], + [ + "▁consent", + -10.879009246826172 + ], + [ + "▁thoroughly", + -10.879061698913574 + ], + [ + "▁Pennsylvania", + -10.879136085510254 + ], + [ + "▁cats", + -10.879611015319824 + ], + [ + "▁acquisition", + -10.879621505737305 + ], + [ + "▁arrangements", + -10.879640579223633 + ], + [ + "▁removing", + -10.879972457885742 + ], + [ + "▁northern", + -10.879980087280272 + ], + [ + "▁chronic", + -10.880404472351074 + ], + [ + "▁Computer", + -10.880497932434082 + ], + [ + "▁Everyone", + -10.880579948425291 + ], + [ + "▁Max", + -10.88097095489502 + ], + [ + "▁habits", + -10.881101608276367 + ], + [ + "▁u", + -10.881726264953612 + ], + [ + "ant", + -10.881845474243164 + ], + [ + "▁Columbia", + -10.88200569152832 + ], + [ + "▁exception", + -10.882122039794922 + ], + [ + "id", + -10.88349437713623 + ], + [ + "▁Off", + -10.883655548095703 + ], + [ + "▁infection", + -10.883859634399414 + ], + [ + "▁distributed", + -10.88415813446045 + ], + [ + "▁installing", + -10.884278297424316 + ], + [ + "▁worship", + -10.884650230407717 + ], + [ + "▁picking", + -10.885018348693848 + ], + [ + "▁seller", + -10.885069847106934 + ], + [ + "▁Heart", + -10.885088920593262 + ], + [ + "▁singing", + -10.885139465332031 + ], + [ + "▁Give", + -10.885834693908691 + ], + [ + "▁tube", + -10.88586139678955 + ], + [ + "▁thousand", + -10.886910438537598 + ], + [ + "▁artistic", + -10.887252807617188 + ], + [ + "▁competitors", + -10.887782096862791 + ], + [ + "▁drives", + -10.887885093688965 + ], + [ + "▁•", + -10.888218879699709 + ], + [ + "work", + -10.888482093811035 + ], + [ + "▁Taylor", + -10.888508796691896 + ], + [ + "▁enhanced", + -10.888748168945312 + ], + [ + "il", + -10.888787269592283 + ], + [ + "▁belief", + -10.888795852661133 + ], + [ + "▁Jr", + -10.889019012451172 + ], + [ + "▁gardens", + -10.889323234558104 + ], + [ + "▁Personal", + -10.889845848083496 + ], + [ + "▁alumni", + -10.88990592956543 + ], + [ + "▁tonight", + -10.89139461517334 + ], + [ + "▁surely", + -10.891995429992676 + ], + [ + "▁applies", + -10.892037391662598 + ], + [ + "izing", + -10.892227172851562 + ], + [ + "▁fewer", + -10.893270492553713 + ], + [ + "▁NO", + -10.893620491027832 + ], + [ + "▁pilot", + -10.893710136413574 + ], + [ + "▁wrap", + -10.893891334533691 + ], + [ + "▁producer", + -10.894200325012209 + ], + [ + "▁Halloween", + -10.894476890563965 + ], + [ + "▁sectors", + -10.894501686096191 + ], + [ + "▁Blog", + -10.895004272460938 + ], + [ + "▁succeed", + -10.895537376403809 + ], + [ + "▁ft", + -10.895830154418944 + ], + [ + "▁impression", + -10.896087646484377 + ], + [ + "▁33", + -10.896127700805664 + ], + [ + "▁Per", + -10.896145820617676 + ], + [ + "▁artificial", + -10.896356582641602 + ], + [ + "▁integrity", + -10.896520614624023 + ], + [ + "▁winners", + -10.896567344665527 + ], + [ + "▁rarely", + -10.896727561950684 + ], + [ + "▁angle", + -10.896913528442385 + ], + [ + "▁instruction", + -10.897224426269531 + ], + [ + "▁bold", + -10.897561073303224 + ], + [ + "▁hat", + -10.897602081298828 + ], + [ + "▁stayed", + -10.897623062133787 + ], + [ + "▁exists", + -10.897699356079102 + ], + [ + "▁rough", + -10.897838592529297 + ], + [ + "▁Saint", + -10.89855670928955 + ], + [ + "▁drama", + -10.898676872253418 + ], + [ + "▁hosts", + -10.898809432983398 + ], + [ + "▁processed", + -10.89892578125 + ], + [ + "▁examine", + -10.898938179016112 + ], + [ + "▁union", + -10.899179458618164 + ], + [ + "▁Father", + -10.899231910705566 + ], + [ + "▁Easter", + -10.899588584899902 + ], + [ + "▁Miami", + -10.89995002746582 + ], + [ + "▁Exchange", + -10.900274276733398 + ], + [ + "▁Express", + -10.900626182556152 + ], + [ + "▁1/2", + -10.901238441467283 + ], + [ + "Like", + -10.901549339294434 + ], + [ + "▁heavily", + -10.901619911193848 + ], + [ + "▁bands", + -10.901671409606934 + ], + [ + "▁sorry", + -10.90167999267578 + ], + [ + "▁consulting", + -10.90174388885498 + ], + [ + "▁Casino", + -10.901938438415527 + ], + [ + "▁Despite", + -10.902206420898438 + ], + [ + "▁latter", + -10.902207374572754 + ], + [ + "▁Frank", + -10.902728080749512 + ], + [ + "▁math", + -10.904130935668944 + ], + [ + "▁seasons", + -10.904139518737791 + ], + [ + "hand", + -10.904492378234863 + ], + [ + "▁sleeping", + -10.904574394226074 + ], + [ + "▁mattress", + -10.904644012451172 + ], + [ + "Other", + -10.904708862304688 + ], + [ + "▁strive", + -10.905332565307615 + ], + [ + "▁Customer", + -10.905755996704102 + ], + [ + "▁balanced", + -10.905876159667969 + ], + [ + "▁fascinating", + -10.905957221984863 + ], + [ + "▁gotten", + -10.905959129333496 + ], + [ + "▁assembly", + -10.906463623046877 + ], + [ + "▁describes", + -10.906693458557127 + ], + [ + "▁Form", + -10.906734466552734 + ], + [ + "▁romantic", + -10.90773582458496 + ], + [ + "▁fiber", + -10.907753944396973 + ], + [ + "▁ratings", + -10.907822608947754 + ], + [ + "▁athletes", + -10.908032417297363 + ], + [ + "▁suddenly", + -10.90806770324707 + ], + [ + "et", + -10.908384323120115 + ], + [ + "▁backup", + -10.909320831298828 + ], + [ + "▁fruits", + -10.909784317016602 + ], + [ + "▁interviews", + -10.910310745239258 + ], + [ + "▁Would", + -10.91041660308838 + ], + [ + "▁mechanical", + -10.910428047180176 + ], + [ + "▁corn", + -10.9105806350708 + ], + [ + "▁handy", + -10.910597801208496 + ], + [ + "▁reply", + -10.910615921020508 + ], + [ + "▁Sky", + -10.910689353942873 + ], + [ + "▁inspiring", + -10.910823822021484 + ], + [ + "▁meters", + -10.911026000976562 + ], + [ + "date", + -10.911287307739258 + ], + [ + "▁spa", + -10.911352157592772 + ], + [ + "▁cabinets", + -10.911578178405762 + ], + [ + "▁Ryan", + -10.911659240722656 + ], + [ + "▁250", + -10.911909103393556 + ], + [ + "▁piano", + -10.912577629089355 + ], + [ + "board", + -10.913911819458008 + ], + [ + "▁smell", + -10.914443016052246 + ], + [ + "▁10%", + -10.91530418395996 + ], + [ + "▁frequent", + -10.915326118469238 + ], + [ + "▁hearts", + -10.916037559509276 + ], + [ + "▁2012.", + -10.91604709625244 + ], + [ + "▁breaks", + -10.91624641418457 + ], + [ + "▁essentially", + -10.91645336151123 + ], + [ + "▁guides", + -10.916481018066406 + ], + [ + "▁Wedding", + -10.916718482971191 + ], + [ + "▁excellence", + -10.916973114013672 + ], + [ + "▁impressed", + -10.917492866516112 + ], + [ + "▁employed", + -10.917661666870115 + ], + [ + "▁diabetes", + -10.917683601379396 + ], + [ + "▁donation", + -10.917719841003418 + ], + [ + "▁Color", + -10.91832447052002 + ], + [ + "▁Type", + -10.918477058410645 + ], + [ + "▁vinyl", + -10.91849136352539 + ], + [ + "▁understood", + -10.918584823608398 + ], + [ + "car", + -10.918618202209473 + ], + [ + "▁versatile", + -10.918672561645508 + ], + [ + "▁casual", + -10.918692588806152 + ], + [ + "▁regulatory", + -10.91894245147705 + ], + [ + "▁Tax", + -10.919161796569824 + ], + [ + "▁discussions", + -10.920260429382324 + ], + [ + "▁restoration", + -10.920487403869627 + ], + [ + "▁Cover", + -10.920686721801758 + ], + [ + "▁losses", + -10.9209623336792 + ], + [ + "▁bacteria", + -10.921198844909668 + ], + [ + "▁ear", + -10.921359062194824 + ], + [ + "▁distinct", + -10.921814918518066 + ], + [ + "▁outfit", + -10.922490119934082 + ], + [ + "▁periods", + -10.92282485961914 + ], + [ + "▁Seattle", + -10.922832489013672 + ], + [ + "▁Perfect", + -10.923006057739258 + ], + [ + "▁lemon", + -10.923294067382812 + ], + [ + "▁dirt", + -10.923593521118164 + ], + [ + "▁religion", + -10.92372226715088 + ], + [ + "▁puts", + -10.924025535583496 + ], + [ + "▁Construction", + -10.92404556274414 + ], + [ + "▁purchases", + -10.924217224121094 + ], + [ + "▁guard", + -10.924464225769045 + ], + [ + "▁Custom", + -10.924674987792969 + ], + [ + "▁vice", + -10.925177574157717 + ], + [ + "bit", + -10.92562770843506 + ], + [ + "▁Trade", + -10.925943374633787 + ], + [ + "▁tiles", + -10.926432609558104 + ], + [ + "▁interact", + -10.926531791687012 + ], + [ + "▁singer", + -10.926705360412598 + ], + [ + "▁robust", + -10.927149772644045 + ], + [ + "▁2014,", + -10.927154541015623 + ], + [ + "Two", + -10.92717742919922 + ], + [ + "▁enjoyable", + -10.927785873413086 + ], + [ + "▁API", + -10.927873611450195 + ], + [ + "▁Baby", + -10.927897453308104 + ], + [ + "▁knowledgeable", + -10.92796516418457 + ], + [ + "▁experiment", + -10.928173065185549 + ], + [ + "▁chemicals", + -10.928220748901367 + ], + [ + "▁Low", + -10.928248405456545 + ], + [ + "▁attempts", + -10.928421974182127 + ], + [ + "▁symbol", + -10.92881202697754 + ], + [ + "▁decline", + -10.928845405578612 + ], + [ + "▁favor", + -10.929113388061523 + ], + [ + "▁Hollywood", + -10.929276466369627 + ], + [ + "▁protecting", + -10.929769515991213 + ], + [ + "▁beans", + -10.930129051208496 + ], + [ + "▁babies", + -10.930201530456545 + ], + [ + "▁Plant", + -10.930305480957031 + ], + [ + "▁browse", + -10.930715560913086 + ], + [ + "▁olive", + -10.930968284606934 + ], + [ + "▁recover", + -10.931158065795898 + ], + [ + "▁Austin", + -10.931302070617676 + ], + [ + "▁frequency", + -10.931374549865724 + ], + [ + "▁strike", + -10.93168830871582 + ], + [ + "7.", + -10.932065963745115 + ], + [ + "▁actor", + -10.932188987731934 + ], + [ + "▁relating", + -10.93218994140625 + ], + [ + "▁Ball", + -10.93264865875244 + ], + [ + "▁lease", + -10.932682991027832 + ], + [ + "di", + -10.932903289794922 + ], + [ + "▁Four", + -10.933317184448242 + ], + [ + "▁supplied", + -10.933340072631836 + ], + [ + "▁2005", + -10.933401107788086 + ], + [ + "▁dramatic", + -10.933448791503906 + ], + [ + "▁mg", + -10.933876037597656 + ], + [ + "▁technicians", + -10.93454647064209 + ], + [ + "▁lawyers", + -10.934929847717283 + ], + [ + "▁representation", + -10.93502712249756 + ], + [ + "▁Minnesota", + -10.935291290283203 + ], + [ + "▁accomplish", + -10.935319900512695 + ], + [ + "▁Mini", + -10.935327529907228 + ], + [ + "▁ALL", + -10.93558120727539 + ], + [ + "▁surfaces", + -10.935640335083008 + ], + [ + "▁apartments", + -10.935702323913574 + ], + [ + "▁stood", + -10.935995101928713 + ], + [ + "▁Fox", + -10.936320304870604 + ], + [ + "▁everybody", + -10.937088966369627 + ], + [ + "▁h", + -10.937119483947754 + ], + [ + "▁segment", + -10.937172889709473 + ], + [ + "▁65", + -10.938011169433594 + ], + [ + "▁merely", + -10.938657760620115 + ], + [ + "▁targets", + -10.938942909240724 + ], + [ + "scale", + -10.939363479614258 + ], + [ + "▁signing", + -10.93956184387207 + ], + [ + "foot", + -10.939632415771484 + ], + [ + "▁respective", + -10.93967342376709 + ], + [ + "▁Level", + -10.939818382263184 + ], + [ + "▁produces", + -10.940553665161133 + ], + [ + "▁disaster", + -10.940634727478027 + ], + [ + "▁victim", + -10.941041946411133 + ], + [ + "▁Field", + -10.941048622131348 + ], + [ + "fi", + -10.9414644241333 + ], + [ + "ir", + -10.94167709350586 + ], + [ + "▁deserve", + -10.941731452941896 + ], + [ + "▁prison", + -10.942063331604004 + ], + [ + "Any", + -10.942344665527344 + ], + [ + "▁beef", + -10.942461013793944 + ], + [ + "Step", + -10.942574501037598 + ], + [ + "▁addressed", + -10.942666053771973 + ], + [ + "▁shelf", + -10.942852973937988 + ], + [ + "▁steam", + -10.943066596984863 + ], + [ + "▁decrease", + -10.943577766418455 + ], + [ + "▁subscription", + -10.943857192993164 + ], + [ + "world", + -10.94443130493164 + ], + [ + "ups", + -10.944591522216797 + ], + [ + "▁assured", + -10.944745063781738 + ], + [ + "can", + -10.94520378112793 + ], + [ + "▁gentle", + -10.94536590576172 + ], + [ + "Finally", + -10.945708274841309 + ], + [ + "▁Victoria", + -10.94587516784668 + ], + [ + "▁remarkable", + -10.945878028869627 + ], + [ + "▁Mother", + -10.946468353271484 + ], + [ + "▁Think", + -10.946475982666016 + ], + [ + "▁celebrated", + -10.946541786193848 + ], + [ + "▁burn", + -10.946720123291016 + ], + [ + "▁chips", + -10.94693374633789 + ], + [ + "▁suspect", + -10.947248458862305 + ], + [ + "▁listings", + -10.947270393371582 + ], + [ + "▁HR", + -10.947383880615234 + ], + [ + "▁coconut", + -10.947491645812988 + ], + [ + "▁fitted", + -10.947654724121094 + ], + [ + "▁FL", + -10.947710990905762 + ], + [ + "▁contents", + -10.94813346862793 + ], + [ + "▁spare", + -10.949023246765137 + ], + [ + "▁quantity", + -10.949066162109377 + ], + [ + "▁administrative", + -10.949141502380373 + ], + [ + "▁nutrition", + -10.949529647827148 + ], + [ + "▁soup", + -10.94960594177246 + ], + [ + "▁apparently", + -10.950042724609377 + ], + [ + "▁announcement", + -10.950397491455078 + ], + [ + "▁Oct", + -10.95048713684082 + ], + [ + "▁generations", + -10.951252937316896 + ], + [ + "40", + -10.952189445495604 + ], + [ + "▁Property", + -10.952573776245115 + ], + [ + "ana", + -10.952604293823242 + ], + [ + "▁passes", + -10.953455924987791 + ], + [ + "▁settlement", + -10.953472137451172 + ], + [ + "▁representing", + -10.953763961791992 + ], + [ + "▁damages", + -10.953810691833496 + ], + [ + "▁facilitate", + -10.954224586486816 + ], + [ + "▁scenario", + -10.954380989074709 + ], + [ + "▁drain", + -10.954462051391602 + ], + [ + "▁hopefully", + -10.954835891723633 + ], + [ + "▁appreciated", + -10.954842567443848 + ], + [ + "▁principal", + -10.95492172241211 + ], + [ + "▁releases", + -10.955077171325684 + ], + [ + "▁repeat", + -10.955132484436035 + ], + [ + "▁alike", + -10.955160140991213 + ], + [ + "▁modified", + -10.955310821533203 + ], + [ + "mo", + -10.955941200256348 + ], + [ + "▁publishing", + -10.95595645904541 + ], + [ + "▁edges", + -10.956045150756836 + ], + [ + "▁transformation", + -10.956476211547852 + ], + [ + "▁difficulty", + -10.95670223236084 + ], + [ + "book", + -10.956748008728027 + ], + [ + "ha", + -10.956796646118164 + ], + [ + "▁discovery", + -10.95705223083496 + ], + [ + "▁jacket", + -10.95709991455078 + ], + [ + "▁paintings", + -10.957250595092772 + ], + [ + "▁honey", + -10.957303047180176 + ], + [ + "▁mouse", + -10.957634925842283 + ], + [ + "▁Action", + -10.957703590393066 + ], + [ + "▁y", + -10.95779037475586 + ], + [ + "▁virtually", + -10.958212852478027 + ], + [ + "▁Industry", + -10.958306312561035 + ], + [ + "▁flag", + -10.959117889404297 + ], + [ + "▁MS", + -10.959219932556152 + ], + [ + "▁apple", + -10.95944118499756 + ], + [ + "▁divided", + -10.959473609924316 + ], + [ + "▁reviewed", + -10.959490776062012 + ], + [ + "▁extraordinary", + -10.959548950195312 + ], + [ + "▁refund", + -10.95969009399414 + ], + [ + "▁salary", + -10.959752082824709 + ], + [ + "▁diagnosis", + -10.959794044494627 + ], + [ + "▁observed", + -10.959808349609377 + ], + [ + "▁emphasis", + -10.959843635559082 + ], + [ + "▁Due", + -10.960431098937988 + ], + [ + "▁Multi", + -10.960512161254885 + ], + [ + "▁behaviour", + -10.96052074432373 + ], + [ + "▁vertical", + -10.96070384979248 + ], + [ + "▁Finance", + -10.960854530334473 + ], + [ + "▁operators", + -10.96117115020752 + ], + [ + "▁survive", + -10.96207046508789 + ], + [ + "▁Republican", + -10.962129592895508 + ], + [ + "▁Working", + -10.962199211120604 + ], + [ + "▁gathered", + -10.962371826171877 + ], + [ + "▁conversations", + -10.962702751159668 + ], + [ + "▁selecting", + -10.962777137756348 + ], + [ + "▁timely", + -10.963027000427246 + ], + [ + "▁ON", + -10.963102340698242 + ], + [ + "▁Oregon", + -10.963106155395508 + ], + [ + "▁worried", + -10.96369457244873 + ], + [ + "▁relation", + -10.963702201843262 + ], + [ + "Free", + -10.964314460754396 + ], + [ + "▁argument", + -10.964327812194824 + ], + [ + "▁relaxed", + -10.964503288269045 + ], + [ + "▁shut", + -10.964515686035156 + ], + [ + "▁Henry", + -10.96460247039795 + ], + [ + "▁Non", + -10.964802742004396 + ], + [ + "▁qualify", + -10.965097427368164 + ], + [ + "25", + -10.965450286865234 + ], + [ + "▁pregnancy", + -10.965594291687012 + ], + [ + "▁Latin", + -10.966097831726074 + ], + [ + "▁Supreme", + -10.967416763305664 + ], + [ + "▁operator", + -10.967490196228027 + ], + [ + "▁Bookmark", + -10.967524528503418 + ], + [ + "▁mounted", + -10.96757698059082 + ], + [ + "▁retailers", + -10.968029022216797 + ], + [ + "sized", + -10.968286514282228 + ], + [ + "▁translation", + -10.968669891357422 + ], + [ + "▁$5", + -10.969141960144045 + ], + [ + "▁finds", + -10.969152450561523 + ], + [ + "▁Gas", + -10.969338417053224 + ], + [ + "▁admission", + -10.969923973083496 + ], + [ + "▁fraud", + -10.97024154663086 + ], + [ + "▁Adam", + -10.970295906066896 + ], + [ + "▁generous", + -10.970477104187012 + ], + [ + "▁earnings", + -10.970520973205566 + ], + [ + "▁Am", + -10.970840454101562 + ], + [ + "▁fastest", + -10.970952033996582 + ], + [ + "▁expanding", + -10.971179962158203 + ], + [ + "▁commission", + -10.97119140625 + ], + [ + "▁Fall", + -10.971275329589844 + ], + [ + "▁precise", + -10.971373558044434 + ], + [ + "▁pray", + -10.971403121948242 + ], + [ + "▁intellectual", + -10.97171401977539 + ], + [ + "▁wisdom", + -10.971837043762209 + ], + [ + "▁Edition", + -10.972055435180664 + ], + [ + "▁Moon", + -10.97206211090088 + ], + [ + "ize", + -10.972246170043944 + ], + [ + "His", + -10.97230052947998 + ], + [ + "▁55", + -10.972457885742188 + ], + [ + "▁rack", + -10.972536087036133 + ], + [ + "Love", + -10.97319793701172 + ], + [ + "▁encouraging", + -10.973198890686035 + ], + [ + "ac", + -10.973362922668455 + ], + [ + "▁arrested", + -10.973499298095703 + ], + [ + "▁productive", + -10.973894119262695 + ], + [ + "▁Brian", + -10.974153518676758 + ], + [ + "▁dough", + -10.974382400512695 + ], + [ + "▁Dallas", + -10.974627494812012 + ], + [ + "▁ski", + -10.974665641784668 + ], + [ + "▁incorporate", + -10.97476577758789 + ], + [ + "▁Beautiful", + -10.974843978881836 + ], + [ + "▁rings", + -10.97496223449707 + ], + [ + "▁wines", + -10.975059509277344 + ], + [ + "▁bio", + -10.97507095336914 + ], + [ + "▁Chair", + -10.975682258605955 + ], + [ + "▁conclusion", + -10.975686073303224 + ], + [ + "▁Alex", + -10.97590160369873 + ], + [ + "▁Change", + -10.97612190246582 + ], + [ + "▁$10", + -10.97624397277832 + ], + [ + "▁minister", + -10.976713180541992 + ], + [ + "▁Turkey", + -10.977399826049805 + ], + [ + "▁accordance", + -10.977428436279297 + ], + [ + "Looking", + -10.977510452270508 + ], + [ + "▁Available", + -10.977596282958984 + ], + [ + "▁entries", + -10.977627754211426 + ], + [ + "▁evil", + -10.977865219116213 + ], + [ + "▁inter", + -10.978038787841797 + ], + [ + "▁oils", + -10.978073120117188 + ], + [ + "▁Experience", + -10.978087425231934 + ], + [ + "▁Based", + -10.978179931640623 + ], + [ + "ah", + -10.978424072265623 + ], + [ + "▁combat", + -10.978800773620604 + ], + [ + "▁popularity", + -10.978909492492676 + ], + [ + "▁ill", + -10.978999137878418 + ], + [ + "▁blogs", + -10.979012489318848 + ], + [ + "▁tourist", + -10.979154586791992 + ], + [ + "▁lightweight", + -10.979185104370115 + ], + [ + "▁exit", + -10.97937297821045 + ], + [ + "▁bug", + -10.97972583770752 + ], + [ + "▁chest", + -10.979890823364258 + ], + [ + "▁attractions", + -10.980042457580566 + ], + [ + "▁Delhi", + -10.980831146240234 + ], + [ + "▁placing", + -10.98090934753418 + ], + [ + "▁Daily", + -10.981081008911133 + ], + [ + "▁Tree", + -10.981511116027832 + ], + [ + "▁Certified", + -10.981680870056152 + ], + [ + "▁adoption", + -10.981793403625488 + ], + [ + "▁entertaining", + -10.982260704040527 + ], + [ + "▁grounds", + -10.982651710510254 + ], + [ + "▁workforce", + -10.982741355895996 + ], + [ + "▁sole", + -10.982869148254396 + ], + [ + "▁elsewhere", + -10.983526229858398 + ], + [ + "ham", + -10.983742713928224 + ], + [ + "▁lady", + -10.984437942504885 + ], + [ + "Read", + -10.9844388961792 + ], + [ + "▁powered", + -10.984683990478516 + ], + [ + "▁opposed", + -10.98520278930664 + ], + [ + "▁achieving", + -10.985523223876951 + ], + [ + "▁Everything", + -10.985600471496582 + ], + [ + "American", + -10.98578643798828 + ], + [ + "▁captured", + -10.985810279846191 + ], + [ + "▁specifications", + -10.986180305480955 + ], + [ + "ce", + -10.986324310302734 + ], + [ + "▁proved", + -10.98671054840088 + ], + [ + "▁AI", + -10.98688793182373 + ], + [ + "▁acres", + -10.986947059631348 + ], + [ + "▁AT", + -10.987314224243164 + ], + [ + "▁estimates", + -10.987421989440918 + ], + [ + "▁indicated", + -10.987872123718262 + ], + [ + "▁composition", + -10.987920761108398 + ], + [ + "▁Civil", + -10.988062858581545 + ], + [ + "▁rail", + -10.98811149597168 + ], + [ + "▁navigate", + -10.98843002319336 + ], + [ + "▁Ocean", + -10.988463401794434 + ], + [ + "▁Memorial", + -10.98853588104248 + ], + [ + "▁con", + -10.988643646240234 + ], + [ + "▁Ontario", + -10.98878288269043 + ], + [ + "16", + -10.988831520080566 + ], + [ + "▁Large", + -10.98886775970459 + ], + [ + "▁slide", + -10.989051818847656 + ], + [ + "▁poverty", + -10.989419937133787 + ], + [ + "▁advised", + -10.989455223083496 + ], + [ + "▁theatre", + -10.989580154418944 + ], + [ + "▁prescription", + -10.989697456359863 + ], + [ + "▁Band", + -10.989733695983888 + ], + [ + "▁Cape", + -10.989873886108398 + ], + [ + "▁priced", + -10.989916801452637 + ], + [ + "▁pen", + -10.990041732788086 + ], + [ + "▁copyright", + -10.990079879760742 + ], + [ + "▁Authority", + -10.990114212036133 + ], + [ + "▁grace", + -10.990124702453612 + ], + [ + "é", + -10.990221977233888 + ], + [ + "▁lie", + -10.990325927734377 + ], + [ + "▁reverse", + -10.99107265472412 + ], + [ + "▁capability", + -10.991170883178713 + ], + [ + "▁caring", + -10.991639137268066 + ], + [ + "▁Vice", + -10.991695404052734 + ], + [ + "▁Kim", + -10.991804122924805 + ], + [ + "▁Con", + -10.991926193237305 + ], + [ + "ge", + -10.99307632446289 + ], + [ + "▁Need", + -10.993172645568848 + ], + [ + "▁forth", + -10.993351936340332 + ], + [ + "▁Youth", + -10.99339485168457 + ], + [ + "▁streaming", + -10.993406295776367 + ], + [ + "▁parameters", + -10.99348258972168 + ], + [ + "▁Roman", + -10.994100570678713 + ], + [ + "▁locate", + -10.994292259216309 + ], + [ + "▁vulnerable", + -10.994874954223633 + ], + [ + "▁boss", + -10.994975090026855 + ], + [ + "▁screening", + -10.994986534118652 + ], + [ + "▁Mrs", + -10.996150016784668 + ], + [ + "100", + -10.996222496032717 + ], + [ + "▁Melbourne", + -10.996599197387695 + ], + [ + "▁iconic", + -10.996880531311035 + ], + [ + "▁fiction", + -10.997015953063965 + ], + [ + "▁ranked", + -10.99708652496338 + ], + [ + "▁somehow", + -10.997480392456056 + ], + [ + "▁Cook", + -10.997486114501951 + ], + [ + "▁prints", + -10.998080253601074 + ], + [ + "month", + -10.998455047607422 + ], + [ + "▁duration", + -10.998784065246582 + ], + [ + "▁Orange", + -10.998790740966797 + ], + [ + "▁provision", + -10.999244689941406 + ], + [ + "set", + -10.999302864074709 + ], + [ + "▁weak", + -10.999329566955566 + ], + [ + "▁cleaner", + -10.99977207183838 + ], + [ + "▁walks", + -10.999951362609863 + ], + [ + "▁boot", + -11.00063705444336 + ], + [ + "▁courts", + -11.000943183898926 + ], + [ + "con", + -11.000946044921877 + ], + [ + "▁danger", + -11.001104354858398 + ], + [ + "▁Industrial", + -11.001957893371582 + ], + [ + "▁mount", + -11.002117156982422 + ], + [ + "▁arranged", + -11.002156257629396 + ], + [ + "▁exclusively", + -11.002331733703612 + ], + [ + "▁wishes", + -11.002548217773438 + ], + [ + "▁Cleaning", + -11.00287628173828 + ], + [ + "▁Atlanta", + -11.003257751464844 + ], + [ + "▁endless", + -11.004283905029297 + ], + [ + "▁memorable", + -11.004438400268556 + ], + [ + "▁Manchester", + -11.00446319580078 + ], + [ + "▁baseball", + -11.004494667053224 + ], + [ + "▁hunting", + -11.004498481750488 + ], + [ + "▁immune", + -11.004566192626951 + ], + [ + "▁crushing", + -11.005051612854004 + ], + [ + "▁hip", + -11.0050687789917 + ], + [ + "▁Environmental", + -11.005090713500977 + ], + [ + "▁Classic", + -11.005395889282228 + ], + [ + "▁export", + -11.005526542663574 + ], + [ + "▁Pay", + -11.00562858581543 + ], + [ + "▁occasions", + -11.005643844604492 + ], + [ + "].", + -11.005959510803224 + ], + [ + "▁Assistant", + -11.006132125854492 + ], + [ + "People", + -11.006564140319824 + ], + [ + "▁restrictions", + -11.007036209106444 + ], + [ + "▁Advanced", + -11.007161140441896 + ], + [ + "▁reward", + -11.007257461547852 + ], + [ + "▁photograph", + -11.007277488708496 + ], + [ + "▁operates", + -11.007434844970703 + ], + [ + "▁oldest", + -11.008010864257812 + ], + [ + "▁ultra", + -11.00851058959961 + ], + [ + "▁Forum", + -11.008535385131836 + ], + [ + "▁publish", + -11.008801460266112 + ], + [ + "▁enabled", + -11.008955955505373 + ], + [ + "▁III", + -11.00916862487793 + ], + [ + "▁trails", + -11.009459495544434 + ], + [ + "▁stocks", + -11.009465217590332 + ], + [ + "▁universe", + -11.009810447692873 + ], + [ + "▁bound", + -11.009870529174805 + ], + [ + "▁Ms", + -11.010072708129885 + ], + [ + "▁tennis", + -11.010127067565918 + ], + [ + "▁NFL", + -11.010332107543944 + ], + [ + "▁fireplace", + -11.010454177856444 + ], + [ + "▁donations", + -11.01082992553711 + ], + [ + "▁twin", + -11.01101303100586 + ], + [ + "▁resulted", + -11.01113986968994 + ], + [ + "▁Section", + -11.011199951171877 + ], + [ + "▁120", + -11.011366844177246 + ], + [ + "▁Number", + -11.012102127075195 + ], + [ + "▁pointed", + -11.012234687805176 + ], + [ + "ists", + -11.012239456176758 + ], + [ + "▁burning", + -11.012289047241213 + ], + [ + "▁keen", + -11.012346267700195 + ], + [ + "▁Size", + -11.012455940246582 + ], + [ + "▁Ray", + -11.01250457763672 + ], + [ + "▁closet", + -11.012545585632324 + ], + [ + "responsibilities", + -11.013330459594728 + ], + [ + "▁travelling", + -11.013778686523438 + ], + [ + "what", + -11.014068603515623 + ], + [ + "▁bid", + -11.014152526855469 + ], + [ + "▁Ed", + -11.015019416809082 + ], + [ + "▁profits", + -11.015192031860352 + ], + [ + "▁Brazil", + -11.015310287475586 + ], + [ + "▁showcase", + -11.016085624694824 + ], + [ + "▁colorful", + -11.016311645507812 + ], + [ + "▁legacy", + -11.01634120941162 + ], + [ + "▁ordinary", + -11.01646900177002 + ], + [ + "▁Linux", + -11.016802787780762 + ], + [ + "▁waves", + -11.016940116882324 + ], + [ + "▁Resume", + -11.01700496673584 + ], + [ + "▁34", + -11.017033576965332 + ], + [ + "▁permalink", + -11.017284393310549 + ], + [ + "▁excitement", + -11.017287254333496 + ], + [ + "▁tied", + -11.017447471618652 + ], + [ + "q", + -11.01759433746338 + ], + [ + "▁Mo", + -11.01766300201416 + ], + [ + "run", + -11.017724990844728 + ], + [ + "▁jaw", + -11.018365859985352 + ], + [ + "▁DNA", + -11.018486976623535 + ], + [ + "▁stops", + -11.019089698791504 + ], + [ + "▁hide", + -11.019224166870115 + ], + [ + "▁Miller", + -11.019323348999023 + ], + [ + "ability", + -11.01949405670166 + ], + [ + "▁hesitate", + -11.019608497619627 + ], + [ + "▁bulk", + -11.019700050354004 + ], + [ + "▁thorough", + -11.01974868774414 + ], + [ + "▁trucks", + -11.019891738891602 + ], + [ + "▁Run", + -11.02032470703125 + ], + [ + "▁prevention", + -11.02066421508789 + ], + [ + "▁scan", + -11.02093505859375 + ], + [ + "▁musicians", + -11.02116584777832 + ], + [ + "▁crystal", + -11.021469116210938 + ], + [ + "▁Donald", + -11.021650314331056 + ], + [ + "▁Davis", + -11.021836280822754 + ], + [ + "▁swing", + -11.021939277648926 + ], + [ + "law", + -11.022489547729492 + ], + [ + "▁conservation", + -11.0227632522583 + ], + [ + "▁rank", + -11.022855758666992 + ], + [ + "▁Cat", + -11.022930145263672 + ], + [ + "▁wise", + -11.023062705993652 + ], + [ + "Does", + -11.02312469482422 + ], + [ + "▁generic", + -11.023422241210938 + ], + [ + "▁seating", + -11.023507118225098 + ], + [ + "▁Door", + -11.023512840270996 + ], + [ + "▁shaped", + -11.023565292358398 + ], + [ + "▁retain", + -11.02366828918457 + ], + [ + "▁tailored", + -11.023802757263184 + ], + [ + "13", + -11.023838996887209 + ], + [ + "▁hospitals", + -11.023907661437988 + ], + [ + "▁unlike", + -11.024063110351562 + ], + [ + "▁optimal", + -11.02439022064209 + ], + [ + "km", + -11.024971961975098 + ], + [ + "▁En", + -11.025174140930176 + ], + [ + "▁buttons", + -11.025639533996582 + ], + [ + "▁expense", + -11.026180267333984 + ], + [ + "▁horses", + -11.026193618774414 + ], + [ + "▁glasses", + -11.026695251464844 + ], + [ + "▁Nature", + -11.026893615722656 + ], + [ + "!”", + -11.026935577392578 + ], + [ + "▁Jordan", + -11.027084350585938 + ], + [ + "▁600", + -11.027193069458008 + ], + [ + "▁MD", + -11.027392387390137 + ], + [ + "▁strengthen", + -11.027578353881836 + ], + [ + "▁assignment", + -11.027767181396484 + ], + [ + "▁mess", + -11.02785587310791 + ], + [ + "▁Select", + -11.027860641479492 + ], + [ + "▁Bush", + -11.027891159057615 + ], + [ + "Best", + -11.02808666229248 + ], + [ + "▁NC", + -11.02818202972412 + ], + [ + "▁2013,", + -11.028276443481444 + ], + [ + "▁Kevin", + -11.028411865234377 + ], + [ + "▁frames", + -11.028443336486816 + ], + [ + "▁conditioning", + -11.02855396270752 + ], + [ + "▁Money", + -11.028654098510742 + ], + [ + "▁Paper", + -11.029204368591309 + ], + [ + "▁batteries", + -11.029255867004396 + ], + [ + "▁Philadelphia", + -11.02932357788086 + ], + [ + "▁sexual", + -11.02940845489502 + ], + [ + "▁Creative", + -11.030434608459473 + ], + [ + "▁VA", + -11.030467987060549 + ], + [ + "▁Dog", + -11.030753135681152 + ], + [ + "ive", + -11.030779838562012 + ], + [ + "ex", + -11.031038284301758 + ], + [ + "lo", + -11.031452178955078 + ], + [ + "▁laundry", + -11.031453132629396 + ], + [ + "▁sized", + -11.031492233276367 + ], + [ + "▁ought", + -11.031808853149414 + ], + [ + "▁okay", + -11.031850814819336 + ], + [ + "▁satellite", + -11.032134056091309 + ], + [ + "▁tackle", + -11.03227996826172 + ], + [ + "▁physically", + -11.032629013061523 + ], + [ + "▁keyboard", + -11.032642364501951 + ], + [ + "▁Resources", + -11.032670021057127 + ], + [ + "▁Again", + -11.033076286315918 + ], + [ + "▁dancing", + -11.03313446044922 + ], + [ + "▁preserve", + -11.033288955688477 + ], + [ + "▁stomach", + -11.033324241638184 + ], + [ + "▁voters", + -11.033397674560549 + ], + [ + "▁loop", + -11.033495903015137 + ], + [ + "▁tourism", + -11.033921241760254 + ], + [ + "▁fingers", + -11.035216331481934 + ], + [ + "▁resistant", + -11.035468101501465 + ], + [ + "▁boots", + -11.03594207763672 + ], + [ + "▁movements", + -11.03603458404541 + ], + [ + "▁landing", + -11.036091804504396 + ], + [ + "▁tutorial", + -11.036173820495604 + ], + [ + "▁Chapter", + -11.03639316558838 + ], + [ + "▁sentence", + -11.03653335571289 + ], + [ + "▁Iron", + -11.036663055419922 + ], + [ + "▁Regional", + -11.036730766296388 + ], + [ + "▁Double", + -11.036846160888672 + ], + [ + "▁Wisconsin", + -11.037057876586914 + ], + [ + "▁Planning", + -11.038171768188477 + ], + [ + "▁TX", + -11.038338661193848 + ], + [ + "ite", + -11.038426399230955 + ], + [ + "▁plumbing", + -11.038442611694336 + ], + [ + "▁answered", + -11.038447380065918 + ], + [ + "ler", + -11.03844928741455 + ], + [ + "▁spin", + -11.038455963134766 + ], + [ + "▁placement", + -11.038777351379396 + ], + [ + "▁shed", + -11.039298057556152 + ], + [ + "▁deadline", + -11.03932762145996 + ], + [ + "▁formation", + -11.039411544799805 + ], + [ + "▁sorts", + -11.039687156677246 + ], + [ + "▁1.5", + -11.039868354797363 + ], + [ + "▁knee", + -11.039895057678224 + ], + [ + "▁departments", + -11.039928436279297 + ], + [ + "▁involvement", + -11.040289878845217 + ], + [ + "▁pushing", + -11.040863037109377 + ], + [ + "▁Unit", + -11.04104995727539 + ], + [ + "ke", + -11.041178703308104 + ], + [ + "▁finger", + -11.041397094726562 + ], + [ + "▁clinic", + -11.041430473327637 + ], + [ + "▁reliability", + -11.04177474975586 + ], + [ + "▁anytime", + -11.042344093322754 + ], + [ + "▁army", + -11.042974472045898 + ], + [ + "▁likes", + -11.043102264404297 + ], + [ + "▁references", + -11.043296813964844 + ], + [ + "▁2011.", + -11.043463706970217 + ], + [ + "▁Sound", + -11.04348373413086 + ], + [ + "▁stir", + -11.043487548828123 + ], + [ + "▁lawn", + -11.043628692626951 + ], + [ + "▁Moreover", + -11.043755531311035 + ], + [ + "▁Date", + -11.044411659240724 + ], + [ + "▁elections", + -11.04460620880127 + ], + [ + "▁Sarah", + -11.04500675201416 + ], + [ + "▁Bed", + -11.04515266418457 + ], + [ + "▁Member", + -11.045307159423828 + ], + [ + "▁presentations", + -11.045342445373535 + ], + [ + "▁opposition", + -11.045412063598633 + ], + [ + "▁Wales", + -11.04542350769043 + ], + [ + "▁luxurious", + -11.045597076416016 + ], + [ + "▁struck", + -11.047088623046877 + ], + [ + "▁Lady", + -11.047106742858888 + ], + [ + "▁diamond", + -11.047194480895996 + ], + [ + "stone", + -11.047338485717772 + ], + [ + "▁substance", + -11.04761791229248 + ], + [ + "▁summary", + -11.047688484191896 + ], + [ + "▁outdoors", + -11.04782772064209 + ], + [ + "▁Maryland", + -11.04807949066162 + ], + [ + "▁adequate", + -11.04818630218506 + ], + [ + "▁worker", + -11.048247337341309 + ], + [ + "▁followers", + -11.04834270477295 + ], + [ + "2)", + -11.048683166503906 + ], + [ + "▁Interior", + -11.048717498779297 + ], + [ + "▁#1", + -11.048922538757324 + ], + [ + "▁renowned", + -11.049420356750488 + ], + [ + "▁Name", + -11.049842834472656 + ], + [ + "▁dealer", + -11.050044059753418 + ], + [ + "▁magical", + -11.05014991760254 + ], + [ + "▁automated", + -11.05035400390625 + ], + [ + "▁suits", + -11.050538063049316 + ], + [ + "▁nail", + -11.050682067871094 + ], + [ + "▁superb", + -11.0507230758667 + ], + [ + "▁prominent", + -11.050761222839355 + ], + [ + "▁fancy", + -11.050850868225098 + ], + [ + "▁coaches", + -11.050938606262209 + ], + [ + "▁grinding", + -11.051026344299316 + ], + [ + "▁Li", + -11.051095962524414 + ], + [ + "▁Application", + -11.051605224609377 + ], + [ + "wide", + -11.05212688446045 + ], + [ + "▁fluid", + -11.052227020263672 + ], + [ + "▁principle", + -11.05268669128418 + ], + [ + "▁flavors", + -11.05317211151123 + ], + [ + "ray", + -11.05351734161377 + ], + [ + "being", + -11.053894996643066 + ], + [ + "▁forecast", + -11.05398178100586 + ], + [ + "▁delighted", + -11.054118156433104 + ], + [ + "▁20%", + -11.054986953735352 + ], + [ + "▁brick", + -11.055179595947266 + ], + [ + "▁User", + -11.055327415466309 + ], + [ + "▁mechanism", + -11.055912017822266 + ], + [ + "▁emissions", + -11.055926322937012 + ], + [ + "▁38", + -11.056224822998049 + ], + [ + "▁duties", + -11.056632995605469 + ], + [ + "▁shoe", + -11.056668281555176 + ], + [ + "▁loaded", + -11.057127952575684 + ], + [ + "▁1990", + -11.05726432800293 + ], + [ + "▁tension", + -11.05759620666504 + ], + [ + "▁Castle", + -11.057741165161133 + ], + [ + "▁Kansas", + -11.057751655578612 + ], + [ + "▁wherever", + -11.057894706726074 + ], + [ + "▁labels", + -11.058138847351074 + ], + [ + "▁calories", + -11.05903434753418 + ], + [ + "▁contributed", + -11.059122085571287 + ], + [ + "▁fitting", + -11.059252738952637 + ], + [ + "▁cart", + -11.059293746948242 + ], + [ + "▁Mc", + -11.059444427490234 + ], + [ + "im", + -11.059523582458496 + ], + [ + "▁origin", + -11.060018539428713 + ], + [ + "▁64", + -11.060052871704102 + ], + [ + "▁Fast", + -11.060310363769531 + ], + [ + "▁shipped", + -11.060355186462402 + ], + [ + "▁shortly", + -11.060829162597656 + ], + [ + "▁42", + -11.060887336730955 + ], + [ + "▁overseas", + -11.0609769821167 + ], + [ + "she", + -11.061016082763672 + ], + [ + "▁Within", + -11.061145782470703 + ], + [ + "▁Bedroom", + -11.061232566833496 + ], + [ + "▁Furthermore", + -11.061277389526367 + ], + [ + "▁survival", + -11.06166648864746 + ], + [ + "▁downloaded", + -11.0620756149292 + ], + [ + "▁vitamin", + -11.063050270080566 + ], + [ + "▁Equipment", + -11.063051223754885 + ], + [ + "▁Bio", + -11.06312370300293 + ], + [ + "▁fake", + -11.063359260559082 + ], + [ + "▁secured", + -11.063490867614746 + ], + [ + "Or", + -11.063677787780762 + ], + [ + "▁delete", + -11.06382942199707 + ], + [ + "▁2004", + -11.064552307128906 + ], + [ + "14", + -11.064720153808594 + ], + [ + "▁supplement", + -11.06480598449707 + ], + [ + "▁shock", + -11.064924240112305 + ], + [ + "▁extract", + -11.065260887145996 + ], + [ + "▁lamp", + -11.065403938293455 + ], + [ + "▁consultant", + -11.065570831298828 + ], + [ + "▁Bell", + -11.066137313842772 + ], + [ + "View", + -11.066208839416504 + ], + [ + "▁developments", + -11.066840171813965 + ], + [ + "▁fault", + -11.067411422729492 + ], + [ + "▁complaint", + -11.067560195922852 + ], + [ + "▁rescue", + -11.067825317382812 + ], + [ + "light", + -11.068426132202148 + ], + [ + "▁fridge", + -11.068758010864258 + ], + [ + "▁regulation", + -11.068971633911133 + ], + [ + "▁Dark", + -11.069071769714355 + ], + [ + "▁fulfill", + -11.06924057006836 + ], + [ + "▁treats", + -11.070001602172852 + ], + [ + "▁carrier", + -11.07014274597168 + ], + [ + "▁venues", + -11.070192337036133 + ], + [ + "▁combines", + -11.070330619812012 + ], + [ + "▁bottles", + -11.070427894592283 + ], + [ + "▁entrepreneurs", + -11.070621490478516 + ], + [ + "▁checks", + -11.071619987487791 + ], + [ + "▁annually", + -11.071700096130373 + ], + [ + "▁pregnant", + -11.071755409240724 + ], + [ + "▁enabling", + -11.071942329406738 + ], + [ + "▁Ph", + -11.071988105773926 + ], + [ + "▁variable", + -11.072317123413086 + ], + [ + "-2", + -11.0726318359375 + ], + [ + "▁killing", + -11.072674751281738 + ], + [ + "▁investigate", + -11.072869300842283 + ], + [ + "▁Tri", + -11.07316780090332 + ], + [ + "▁newsletter", + -11.073232650756836 + ], + [ + "▁dose", + -11.07328987121582 + ], + [ + "▁Sciences", + -11.073375701904297 + ], + [ + "▁processor", + -11.073801040649414 + ], + [ + "▁marine", + -11.074077606201172 + ], + [ + "water", + -11.074884414672852 + ], + [ + "▁funded", + -11.07504940032959 + ], + [ + "▁Indiana", + -11.075121879577637 + ], + [ + "▁Mass", + -11.07524299621582 + ], + [ + "▁arrangement", + -11.07535457611084 + ], + [ + "▁bay", + -11.07544231414795 + ], + [ + "▁hiking", + -11.07545566558838 + ], + [ + "▁implementing", + -11.076641082763672 + ], + [ + "▁occasionally", + -11.077219009399414 + ], + [ + "ee", + -11.077594757080078 + ], + [ + "▁cookie", + -11.077710151672363 + ], + [ + "▁passenger", + -11.07783031463623 + ], + [ + "▁charging", + -11.078354835510254 + ], + [ + "▁subsequent", + -11.07852268218994 + ], + [ + "▁2-", + -11.07861328125 + ], + [ + "▁minds", + -11.078882217407228 + ], + [ + "▁optional", + -11.079642295837402 + ], + [ + "▁gate", + -11.079822540283203 + ], + [ + "▁churches", + -11.07984447479248 + ], + [ + "▁routes", + -11.080065727233888 + ], + [ + "▁producers", + -11.080706596374512 + ], + [ + "▁Stephen", + -11.080899238586426 + ], + [ + "▁handled", + -11.081116676330566 + ], + [ + "▁Commercial", + -11.081158638000488 + ], + [ + "▁dialogue", + -11.081193923950195 + ], + [ + "▁organize", + -11.081546783447266 + ], + [ + "▁participated", + -11.08173370361328 + ], + [ + "▁sits", + -11.082076072692873 + ], + [ + "▁transferred", + -11.082159996032717 + ], + [ + "▁indicates", + -11.082165718078612 + ], + [ + "▁DJ", + -11.082232475280762 + ], + [ + "▁gently", + -11.082568168640137 + ], + [ + "▁agenda", + -11.083311080932615 + ], + [ + "▁profiles", + -11.083324432373049 + ], + [ + "▁Hills", + -11.083805084228516 + ], + [ + "▁Map", + -11.083834648132324 + ], + [ + "▁retired", + -11.083970069885254 + ], + [ + "▁Resort", + -11.084110260009766 + ], + [ + "▁Prince", + -11.08417797088623 + ], + [ + "▁subtle", + -11.084269523620604 + ], + [ + "▁nations", + -11.08436107635498 + ], + [ + "▁brothers", + -11.08446216583252 + ], + [ + "▁roofing", + -11.08452320098877 + ], + [ + "▁Kit", + -11.0850248336792 + ], + [ + "▁spectrum", + -11.085407257080078 + ], + [ + "▁Things", + -11.085466384887695 + ], + [ + "▁bass", + -11.0855073928833 + ], + [ + "▁Performance", + -11.08553981781006 + ], + [ + "▁physician", + -11.085843086242676 + ], + [ + "Learn", + -11.086023330688477 + ], + [ + "▁Wild", + -11.087263107299805 + ], + [ + "▁pdf", + -11.087369918823242 + ], + [ + "▁curious", + -11.087451934814451 + ], + [ + "▁wellness", + -11.087496757507324 + ], + [ + "Will", + -11.087592124938965 + ], + [ + "▁Democratic", + -11.08803653717041 + ], + [ + "▁threats", + -11.088144302368164 + ], + [ + "▁potatoes", + -11.08823299407959 + ], + [ + "such", + -11.088335990905762 + ], + [ + "ary", + -11.088557243347168 + ], + [ + "▁Elizabeth", + -11.088753700256348 + ], + [ + "▁costly", + -11.088757514953612 + ], + [ + "▁effectiveness", + -11.08893585205078 + ], + [ + "▁Massachusetts", + -11.089225769042969 + ], + [ + "▁Di", + -11.08994483947754 + ], + [ + "▁loads", + -11.089970588684082 + ], + [ + "▁regards", + -11.090167045593262 + ], + [ + "▁differently", + -11.090301513671877 + ], + [ + "▁immigration", + -11.090805053710938 + ], + [ + "▁virus", + -11.091078758239746 + ], + [ + "▁Body", + -11.091272354125977 + ], + [ + "▁filing", + -11.091365814208984 + ], + [ + "▁Child", + -11.09139919281006 + ], + [ + "▁odd", + -11.09143352508545 + ], + [ + "▁franchise", + -11.09171199798584 + ], + [ + "▁Side", + -11.092111587524414 + ], + [ + "▁evolution", + -11.09256076812744 + ], + [ + "▁homeowners", + -11.092695236206056 + ], + [ + "▁Championship", + -11.09300136566162 + ], + [ + "▁pushed", + -11.093238830566406 + ], + [ + "▁essays", + -11.093608856201172 + ], + [ + "▁tender", + -11.093621253967283 + ], + [ + "▁camping", + -11.093754768371582 + ], + [ + "▁realistic", + -11.093783378601074 + ], + [ + "Having", + -11.09425163269043 + ], + [ + "▁Economic", + -11.094756126403809 + ], + [ + "▁Atlantic", + -11.094768524169922 + ], + [ + "▁cabin", + -11.095293045043944 + ], + [ + "▁grid", + -11.095372200012209 + ], + [ + "▁Win", + -11.09563446044922 + ], + [ + "▁hired", + -11.095706939697266 + ], + [ + "▁Korean", + -11.095746040344238 + ], + [ + "ise", + -11.095866203308104 + ], + [ + "▁patch", + -11.096165657043455 + ], + [ + "▁analytics", + -11.09632396697998 + ], + [ + "▁Trail", + -11.096793174743652 + ], + [ + "party", + -11.096809387207031 + ], + [ + "▁settle", + -11.096832275390623 + ], + [ + "▁frozen", + -11.096929550170898 + ], + [ + "▁hill", + -11.097490310668944 + ], + [ + "▁plates", + -11.097498893737791 + ], + [ + "va", + -11.097692489624023 + ], + [ + "▁bearing", + -11.097844123840332 + ], + [ + "▁Has", + -11.097856521606444 + ], + [ + "▁thesis", + -11.098381996154783 + ], + [ + "nt", + -11.09850025177002 + ], + [ + "▁measured", + -11.098581314086914 + ], + [ + "▁Season", + -11.09868049621582 + ], + [ + "▁Olympic", + -11.099294662475586 + ], + [ + "▁moon", + -11.099583625793455 + ], + [ + "▁trigger", + -11.09975528717041 + ], + [ + "▁Harry", + -11.099820137023926 + ], + [ + "▁basement", + -11.100093841552734 + ], + [ + "▁2003", + -11.100131034851074 + ], + [ + "▁lounge", + -11.100190162658691 + ], + [ + "▁Inter", + -11.100411415100098 + ], + [ + "▁Us", + -11.100439071655272 + ], + [ + "▁Create", + -11.100515365600586 + ], + [ + "▁shell", + -11.100759506225586 + ], + [ + "▁attorneys", + -11.100987434387209 + ], + [ + "▁actors", + -11.101577758789062 + ], + [ + "▁trick", + -11.101731300354004 + ], + [ + "▁login", + -11.101761817932127 + ], + [ + "▁Dave", + -11.101935386657717 + ], + [ + "▁settled", + -11.102031707763672 + ], + [ + "▁airline", + -11.102185249328612 + ], + [ + "▁roughly", + -11.10261344909668 + ], + [ + "▁disk", + -11.102636337280272 + ], + [ + "60", + -11.10267448425293 + ], + [ + "▁Wilson", + -11.103388786315918 + ], + [ + "▁automation", + -11.103477478027344 + ], + [ + "▁backyard", + -11.10378646850586 + ], + [ + "▁Below", + -11.103843688964844 + ], + [ + "▁poetry", + -11.103941917419434 + ], + [ + "▁Galaxy", + -11.103947639465332 + ], + [ + "▁mixing", + -11.104347229003906 + ], + [ + "▁specialty", + -11.104537010192873 + ], + [ + "▁Story", + -11.104724884033203 + ], + [ + "▁hitting", + -11.104948043823242 + ], + [ + "ti", + -11.105085372924805 + ], + [ + "ten", + -11.105148315429688 + ], + [ + "▁DIY", + -11.105386734008787 + ], + [ + "▁unlikely", + -11.105557441711426 + ], + [ + "▁(2", + -11.10560703277588 + ], + [ + "▁governments", + -11.106086730957031 + ], + [ + "▁(“", + -11.106167793273926 + ], + [ + "ised", + -11.10618019104004 + ], + [ + "▁stopping", + -11.10623264312744 + ], + [ + "▁$2", + -11.106423377990724 + ], + [ + "▁affects", + -11.10642910003662 + ], + [ + "ul", + -11.106582641601562 + ], + [ + "▁Books", + -11.10683250427246 + ], + [ + "▁ISO", + -11.106986045837402 + ], + [ + "top", + -11.107182502746582 + ], + [ + "▁measurement", + -11.107466697692873 + ], + [ + "Both", + -11.10751724243164 + ], + [ + "▁Girl", + -11.107743263244627 + ], + [ + "▁reduces", + -11.107787132263184 + ], + [ + "▁Enterprise", + -11.107794761657717 + ], + [ + "▁scoring", + -11.107806205749512 + ], + [ + "▁Nations", + -11.10782241821289 + ], + [ + "▁filters", + -11.107913970947266 + ], + [ + "▁Springs", + -11.107956886291504 + ], + [ + "high", + -11.10819149017334 + ], + [ + "▁territory", + -11.108484268188477 + ], + [ + "▁stakeholders", + -11.108625411987305 + ], + [ + "just", + -11.108725547790527 + ], + [ + "▁dimensions", + -11.108832359313965 + ], + [ + "▁sum", + -11.108868598937988 + ], + [ + "▁disability", + -11.108953475952148 + ], + [ + "▁guided", + -11.10909938812256 + ], + [ + "▁difficulties", + -11.109400749206545 + ], + [ + "▁underlying", + -11.109893798828123 + ], + [ + "▁sustainability", + -11.109940528869627 + ], + [ + "▁fleet", + -11.109970092773438 + ], + [ + "▁zip", + -11.10998249053955 + ], + [ + "service", + -11.11000633239746 + ], + [ + "▁races", + -11.110121726989746 + ], + [ + "▁leaf", + -11.11026096343994 + ], + [ + "sen", + -11.110271453857422 + ], + [ + "▁Cancer", + -11.110437393188477 + ], + [ + "▁import", + -11.110465049743652 + ], + [ + "▁tourists", + -11.110535621643066 + ], + [ + "▁maker", + -11.11054515838623 + ], + [ + "▁hook", + -11.110703468322754 + ], + [ + "▁collective", + -11.110788345336914 + ], + [ + "▁composed", + -11.110799789428713 + ], + [ + "▁achievement", + -11.11091423034668 + ], + [ + "▁associate", + -11.111041069030762 + ], + [ + "▁invitation", + -11.111260414123535 + ], + [ + "▁ladies", + -11.111696243286133 + ], + [ + "▁alarm", + -11.11172580718994 + ], + [ + "▁Nick", + -11.111918449401855 + ], + [ + "▁Job", + -11.113000869750977 + ], + [ + "▁scratch", + -11.113202095031738 + ], + [ + "▁engineer", + -11.11324977874756 + ], + [ + "▁accomplished", + -11.113314628601074 + ], + [ + "▁analyze", + -11.11346435546875 + ], + [ + "▁sequence", + -11.113505363464355 + ], + [ + "▁Ka", + -11.113850593566896 + ], + [ + "▁Down", + -11.11388111114502 + ], + [ + "▁murder", + -11.114405632019045 + ], + [ + "▁1980", + -11.114407539367676 + ], + [ + "sh", + -11.114538192749023 + ], + [ + "▁offerings", + -11.11471176147461 + ], + [ + "▁suited", + -11.115344047546388 + ], + [ + "▁climb", + -11.115781784057615 + ], + [ + "▁witness", + -11.115979194641112 + ], + [ + "▁surprising", + -11.116040229797363 + ], + [ + "▁voted", + -11.116290092468262 + ], + [ + "▁profession", + -11.116579055786133 + ], + [ + "8.", + -11.116673469543455 + ], + [ + "▁gray", + -11.11676025390625 + ], + [ + "▁2012,", + -11.116947174072266 + ], + [ + "▁slip", + -11.11704158782959 + ], + [ + "▁alternatives", + -11.117141723632812 + ], + [ + "▁SO", + -11.117538452148438 + ], + [ + "▁drops", + -11.117626190185549 + ], + [ + "▁knife", + -11.118088722229004 + ], + [ + "▁designated", + -11.118107795715332 + ], + [ + "▁diameter", + -11.11816692352295 + ], + [ + "▁sing", + -11.118203163146973 + ], + [ + "▁Self", + -11.118328094482422 + ], + [ + "▁structural", + -11.118596076965332 + ], + [ + "▁disorders", + -11.11877155303955 + ], + [ + "▁min", + -11.119064331054688 + ], + [ + "▁aggressive", + -11.119065284729004 + ], + [ + "▁accompanied", + -11.119359970092772 + ], + [ + "let", + -11.119467735290527 + ], + [ + "▁concentration", + -11.119477272033691 + ], + [ + "▁imagination", + -11.119491577148438 + ], + [ + "▁Wars", + -11.119510650634766 + ], + [ + "▁Jeff", + -11.119580268859863 + ], + [ + "▁decorative", + -11.119619369506836 + ], + [ + "▁auction", + -11.11971950531006 + ], + [ + "▁pie", + -11.120132446289062 + ], + [ + "http", + -11.12027072906494 + ], + [ + "▁nicely", + -11.120278358459473 + ], + [ + "▁directors", + -11.120530128479004 + ], + [ + "▁tub", + -11.120564460754396 + ], + [ + "ons", + -11.120667457580566 + ], + [ + "▁37", + -11.120814323425291 + ], + [ + "▁narrative", + -11.121297836303713 + ], + [ + "▁Pan", + -11.12197208404541 + ], + [ + "▁solely", + -11.122103691101074 + ], + [ + "▁enjoys", + -11.122109413146973 + ], + [ + "▁GPS", + -11.122178077697754 + ], + [ + "▁breathing", + -11.122197151184082 + ], + [ + "▁feeding", + -11.122262954711914 + ], + [ + "▁identifying", + -11.122286796569824 + ], + [ + "Pro", + -11.122321128845217 + ], + [ + "▁Ask", + -11.122668266296388 + ], + [ + "▁whereas", + -11.122706413269045 + ], + [ + "▁Berlin", + -11.122718811035156 + ], + [ + "▁celebrating", + -11.122762680053713 + ], + [ + "▁Study", + -11.122919082641602 + ], + [ + "▁demonstrated", + -11.12314510345459 + ], + [ + "op", + -11.123242378234863 + ], + [ + "▁replacing", + -11.123465538024902 + ], + [ + "▁BC", + -11.123649597167969 + ], + [ + "▁tower", + -11.123920440673828 + ], + [ + "▁canvas", + -11.12428379058838 + ], + [ + "▁grocery", + -11.124370574951172 + ], + [ + "▁backed", + -11.12445831298828 + ], + [ + "▁timing", + -11.124560356140137 + ], + [ + "▁smoking", + -11.124716758728027 + ], + [ + "▁Course", + -11.12502670288086 + ], + [ + "▁varied", + -11.125093460083008 + ], + [ + "▁disorder", + -11.125580787658691 + ], + [ + "▁Net", + -11.125689506530762 + ], + [ + "ba", + -11.12578010559082 + ], + [ + "▁loud", + -11.126102447509766 + ], + [ + "our", + -11.126426696777344 + ], + [ + "air", + -11.126439094543455 + ], + [ + "▁Storage", + -11.126461029052734 + ], + [ + "▁decorating", + -11.12662410736084 + ], + [ + "▁preferences", + -11.12667179107666 + ], + [ + "▁Dutch", + -11.126821517944336 + ], + [ + "▁trim", + -11.126898765563965 + ], + [ + "▁broadcast", + -11.127206802368164 + ], + [ + "▁Stock", + -11.127650260925291 + ], + [ + "▁internationally", + -11.128177642822266 + ], + [ + "▁guilty", + -11.12821102142334 + ], + [ + "▁laugh", + -11.128808975219728 + ], + [ + "▁01", + -11.128921508789062 + ], + [ + "▁tablets", + -11.129424095153809 + ], + [ + "▁explanation", + -11.12958526611328 + ], + [ + "▁weird", + -11.129966735839844 + ], + [ + "only", + -11.12996768951416 + ], + [ + "▁comply", + -11.13001823425293 + ], + [ + "▁tasty", + -11.130194664001465 + ], + [ + "▁destinations", + -11.130399703979492 + ], + [ + "▁addiction", + -11.13054370880127 + ], + [ + "▁exploration", + -11.13077449798584 + ], + [ + "▁Ave", + -11.131095886230469 + ], + [ + "▁ban", + -11.131295204162598 + ], + [ + "▁alert", + -11.131878852844238 + ], + [ + "pa", + -11.132132530212402 + ], + [ + "▁grain", + -11.13233470916748 + ], + [ + "▁poker", + -11.13257122039795 + ], + [ + "▁contacts", + -11.133031845092772 + ], + [ + "▁shades", + -11.133075714111328 + ], + [ + "▁utilizing", + -11.133231163024902 + ], + [ + "▁coupons", + -11.133583068847656 + ], + [ + "▁exhibit", + -11.133769035339355 + ], + [ + "▁architectural", + -11.133916854858398 + ], + [ + "▁Follow", + -11.134041786193848 + ], + [ + "ol", + -11.134270668029783 + ], + [ + "ki", + -11.134742736816406 + ], + [ + "▁dressing", + -11.134861946105955 + ], + [ + "▁$100", + -11.13494873046875 + ], + [ + "▁blow", + -11.135087013244627 + ], + [ + "▁execution", + -11.135169982910156 + ], + [ + "▁qualities", + -11.13534164428711 + ], + [ + "▁Iraq", + -11.135435104370115 + ], + [ + "▁NEW", + -11.135998725891112 + ], + [ + "-12", + -11.13622760772705 + ], + [ + "▁confirmation", + -11.136451721191406 + ], + [ + "▁betting", + -11.136850357055664 + ], + [ + "▁promises", + -11.13689136505127 + ], + [ + "ER", + -11.136937141418455 + ], + [ + "ny", + -11.137093544006348 + ], + [ + "▁Homes", + -11.137096405029297 + ], + [ + "▁Tony", + -11.137537956237791 + ], + [ + "who", + -11.137885093688965 + ], + [ + "▁unlimited", + -11.138010025024414 + ], + [ + "▁purple", + -11.138014793395996 + ], + [ + "▁Jason", + -11.138038635253906 + ], + [ + "Contact", + -11.138118743896484 + ], + [ + "hi", + -11.138527870178224 + ], + [ + "▁soccer", + -11.138594627380373 + ], + [ + "▁climbing", + -11.138872146606444 + ], + [ + "▁nervous", + -11.139034271240234 + ], + [ + "▁intention", + -11.139092445373535 + ], + [ + "▁PhD", + -11.139345169067385 + ], + [ + "▁collecting", + -11.139528274536133 + ], + [ + "▁colored", + -11.139616012573242 + ], + [ + "▁versus", + -11.139755249023438 + ], + [ + "▁marketplace", + -11.13976764678955 + ], + [ + "▁dependent", + -11.139902114868164 + ], + [ + "▁commit", + -11.140286445617676 + ], + [ + "ous", + -11.14034366607666 + ], + [ + "▁enemy", + -11.140361785888672 + ], + [ + "▁2010.", + -11.140512466430664 + ], + [ + "▁promotional", + -11.140705108642578 + ], + [ + "▁charm", + -11.140741348266602 + ], + [ + "▁bite", + -11.140926361083984 + ], + [ + "▁mystery", + -11.141013145446776 + ], + [ + "▁Chi", + -11.14106273651123 + ], + [ + "▁carries", + -11.141151428222656 + ], + [ + "▁vacuum", + -11.141486167907717 + ], + [ + "▁Football", + -11.141637802124023 + ], + [ + "▁automotive", + -11.14254379272461 + ], + [ + "▁swim", + -11.142751693725586 + ], + [ + "wa", + -11.142999649047852 + ], + [ + "▁consult", + -11.143068313598633 + ], + [ + "▁nuts", + -11.14332389831543 + ], + [ + "▁blind", + -11.144067764282228 + ], + [ + "▁Hand", + -11.144125938415527 + ], + [ + "▁audiences", + -11.144192695617676 + ], + [ + "▁medications", + -11.144390106201172 + ], + [ + "▁hadn", + -11.14463996887207 + ], + [ + "▁theater", + -11.144856452941896 + ], + [ + "▁seasonal", + -11.14492893218994 + ], + [ + "May", + -11.14543628692627 + ], + [ + "TV", + -11.145663261413574 + ], + [ + "▁tune", + -11.145825386047363 + ], + [ + "▁balls", + -11.145829200744627 + ], + [ + "▁branches", + -11.14592742919922 + ], + [ + "▁satisfy", + -11.146400451660156 + ], + [ + "▁treating", + -11.146479606628418 + ], + [ + "Using", + -11.146958351135254 + ], + [ + "owned", + -11.146960258483888 + ], + [ + "▁motivation", + -11.147393226623535 + ], + [ + "Despite", + -11.147825241088867 + ], + [ + "17", + -11.148120880126951 + ], + [ + "▁r", + -11.148178100585938 + ], + [ + "▁intervention", + -11.14842128753662 + ], + [ + "▁Ad", + -11.149191856384276 + ], + [ + "▁yield", + -11.149829864501951 + ], + [ + "▁width", + -11.149850845336914 + ], + [ + "▁neighbors", + -11.149868965148926 + ], + [ + "mi", + -11.150032997131348 + ], + [ + "ad", + -11.150168418884276 + ], + [ + "▁embrace", + -11.15025806427002 + ], + [ + "der", + -11.150548934936523 + ], + [ + "▁Style", + -11.150716781616213 + ], + [ + "▁harmful", + -11.15084457397461 + ], + [ + "▁Never", + -11.150960922241213 + ], + [ + "▁scholarship", + -11.151339530944824 + ], + [ + "sa", + -11.15159797668457 + ], + [ + "▁easiest", + -11.151665687561035 + ], + [ + "▁declared", + -11.151670455932615 + ], + [ + "▁Was", + -11.15170669555664 + ], + [ + "▁seal", + -11.151729583740234 + ], + [ + "Home", + -11.15184211730957 + ], + [ + "▁Sport", + -11.151932716369627 + ], + [ + "▁powers", + -11.15230655670166 + ], + [ + "▁disc", + -11.152502059936523 + ], + [ + "▁motivated", + -11.152737617492676 + ], + [ + "▁Electric", + -11.153246879577637 + ], + [ + "▁twist", + -11.153437614440918 + ], + [ + "▁mild", + -11.153482437133787 + ], + [ + "room", + -11.153569221496582 + ], + [ + "▁collaborative", + -11.153692245483398 + ], + [ + "▁interactions", + -11.153921127319336 + ], + [ + "▁rewards", + -11.154071807861328 + ], + [ + "▁agricultural", + -11.154114723205566 + ], + [ + "▁Core", + -11.15420913696289 + ], + [ + "▁integrate", + -11.15507698059082 + ], + [ + "▁penalty", + -11.15525245666504 + ], + [ + "▁Patrick", + -11.155384063720703 + ], + [ + "▁countless", + -11.15539264678955 + ], + [ + "▁ink", + -11.15554141998291 + ], + [ + "▁PR", + -11.155625343322754 + ], + [ + "▁remind", + -11.155889511108398 + ], + [ + "▁Pages", + -11.156050682067873 + ], + [ + "▁1,000", + -11.156280517578123 + ], + [ + "round", + -11.156331062316896 + ], + [ + "depth", + -11.156641006469728 + ], + [ + "▁Letter", + -11.156689643859863 + ], + [ + "▁Vancouver", + -11.15701389312744 + ], + [ + "▁Phone", + -11.157218933105469 + ], + [ + "▁universal", + -11.15732765197754 + ], + [ + "▁sponsored", + -11.158177375793455 + ], + [ + "▁technological", + -11.158531188964844 + ], + [ + "▁credits", + -11.15877914428711 + ], + [ + "▁Walk", + -11.15889835357666 + ], + [ + "▁precision", + -11.159249305725098 + ], + [ + "▁meditation", + -11.15937042236328 + ], + [ + "▁Want", + -11.159382820129396 + ], + [ + "”)", + -11.159404754638672 + ], + [ + "▁steady", + -11.159422874450684 + ], + [ + "▁portal", + -11.159424781799316 + ], + [ + "▁flood", + -11.159430503845217 + ], + [ + "▁Template", + -11.159504890441896 + ], + [ + "▁Case", + -11.159785270690918 + ], + [ + "▁Practice", + -11.160066604614258 + ], + [ + "▁accidents", + -11.160375595092772 + ], + [ + "▁containers", + -11.16043758392334 + ], + [ + "▁Challenge", + -11.16048812866211 + ], + [ + "▁Simple", + -11.160918235778809 + ], + [ + "▁Following", + -11.161035537719728 + ], + [ + "za", + -11.161327362060549 + ], + [ + "▁shine", + -11.161328315734863 + ], + [ + "▁vendor", + -11.161433219909668 + ], + [ + "▁jazz", + -11.161558151245115 + ], + [ + "▁publications", + -11.16240692138672 + ], + [ + "ja", + -11.162492752075195 + ], + [ + "▁Short", + -11.162662506103516 + ], + [ + "▁hardly", + -11.162710189819336 + ], + [ + "ak", + -11.16285800933838 + ], + [ + "▁Toyota", + -11.162900924682615 + ], + [ + "▁specially", + -11.163193702697754 + ], + [ + "▁drove", + -11.163517951965332 + ], + [ + "▁proceed", + -11.16362190246582 + ], + [ + "▁800", + -11.163652420043944 + ], + [ + "▁admin", + -11.163805961608888 + ], + [ + "▁Bring", + -11.163867950439451 + ], + [ + "▁adapt", + -11.163911819458008 + ], + [ + "▁plugin", + -11.163930892944336 + ], + [ + "▁Vietnam", + -11.164365768432615 + ], + [ + "Me", + -11.164977073669434 + ], + [ + "▁clip", + -11.16539192199707 + ], + [ + "▁Metal", + -11.165496826171877 + ], + [ + "Those", + -11.165515899658203 + ], + [ + "▁supporters", + -11.16578483581543 + ], + [ + "▁enormous", + -11.1660795211792 + ], + [ + "▁makers", + -11.166430473327637 + ], + [ + "▁tries", + -11.166728019714355 + ], + [ + "size", + -11.166921615600586 + ], + [ + "▁terrible", + -11.1669282913208 + ], + [ + "▁formats", + -11.16720485687256 + ], + [ + "▁towns", + -11.167390823364258 + ], + [ + "▁verify", + -11.168328285217283 + ], + [ + "▁recall", + -11.16849422454834 + ], + [ + "▁laboratory", + -11.168810844421388 + ], + [ + "called", + -11.168902397155762 + ], + [ + "Call", + -11.169148445129396 + ], + [ + "▁pp", + -11.169228553771973 + ], + [ + "▁pour", + -11.169482231140137 + ], + [ + "▁hybrid", + -11.169629096984863 + ], + [ + "▁4-", + -11.169722557067873 + ], + [ + "▁tags", + -11.169751167297363 + ], + [ + "▁junior", + -11.16983699798584 + ], + [ + "▁cultures", + -11.16996955871582 + ], + [ + "▁responded", + -11.170001029968262 + ], + [ + "▁expecting", + -11.170048713684082 + ], + [ + "▁heater", + -11.170557022094728 + ], + [ + "▁grants", + -11.170790672302246 + ], + [ + "▁voting", + -11.170825958251951 + ], + [ + "▁10-", + -11.171539306640623 + ], + [ + "▁SC", + -11.171557426452637 + ], + [ + "▁Alliance", + -11.171568870544434 + ], + [ + "▁champion", + -11.171698570251465 + ], + [ + "Very", + -11.17184829711914 + ], + [ + "▁screens", + -11.171897888183594 + ], + [ + "▁Premier", + -11.172121047973633 + ], + [ + "▁Early", + -11.17217254638672 + ], + [ + "▁affiliate", + -11.172245979309082 + ], + [ + "▁podcast", + -11.172332763671877 + ], + [ + "▁tomatoes", + -11.172510147094728 + ], + [ + "▁aging", + -11.172870635986328 + ], + [ + "▁Kelly", + -11.173073768615724 + ], + [ + "▁couples", + -11.173279762268066 + ], + [ + "▁reform", + -11.173601150512695 + ], + [ + "▁limitations", + -11.173785209655762 + ], + [ + "▁placeholder", + -11.17408275604248 + ], + [ + "Download", + -11.17447280883789 + ], + [ + "▁identification", + -11.174670219421388 + ], + [ + "▁Iowa", + -11.17474365234375 + ], + [ + "▁Dental", + -11.17492961883545 + ], + [ + "▁Lincoln", + -11.175527572631836 + ], + [ + "▁welcoming", + -11.175561904907228 + ], + [ + "care", + -11.175628662109377 + ], + [ + "▁tropical", + -11.175713539123535 + ], + [ + "▁detection", + -11.175743103027344 + ], + [ + "▁hence", + -11.17595672607422 + ], + [ + "▁shelter", + -11.176246643066406 + ], + [ + "Co", + -11.176321029663086 + ], + [ + "▁dentist", + -11.17720890045166 + ], + [ + "▁promised", + -11.177255630493164 + ], + [ + "▁writes", + -11.177286148071287 + ], + [ + "▁strip", + -11.177398681640623 + ], + [ + "▁Aug", + -11.177416801452637 + ], + [ + "▁chip", + -11.177470207214355 + ], + [ + "▁exceed", + -11.177626609802246 + ], + [ + "▁peaceful", + -11.177706718444824 + ], + [ + "▁disabled", + -11.178109169006348 + ], + [ + "▁foster", + -11.178112030029297 + ], + [ + "ney", + -11.178204536437988 + ], + [ + "▁Heritage", + -11.17823314666748 + ], + [ + "▁discipline", + -11.178391456604004 + ], + [ + "▁Age", + -11.17861557006836 + ], + [ + "ted", + -11.178940773010254 + ], + [ + "▁Anti", + -11.179061889648438 + ], + [ + "▁Share", + -11.179119110107422 + ], + [ + "▁Front", + -11.179206848144531 + ], + [ + "▁cloth", + -11.17920970916748 + ], + [ + "Which", + -11.179231643676758 + ], + [ + "▁candy", + -11.179299354553224 + ], + [ + "▁requiring", + -11.179482460021973 + ], + [ + "▁hike", + -11.179548263549805 + ], + [ + "▁Miss", + -11.179558753967283 + ], + [ + "▁Islamic", + -11.179896354675291 + ], + [ + "▁Much", + -11.180218696594238 + ], + [ + "ina", + -11.180238723754885 + ], + [ + "▁disappointed", + -11.180341720581056 + ], + [ + "▁Members", + -11.18039321899414 + ], + [ + "▁beloved", + -11.180691719055176 + ], + [ + "▁Coffee", + -11.18156623840332 + ], + [ + "▁Channel", + -11.182929039001465 + ], + [ + "▁dozen", + -11.182931900024414 + ], + [ + "▁cuisine", + -11.183127403259276 + ], + [ + "even", + -11.183310508728027 + ], + [ + "▁ships", + -11.183469772338867 + ], + [ + "▁Sign", + -11.183576583862305 + ], + [ + "step", + -11.183738708496094 + ], + [ + "▁Dream", + -11.183746337890623 + ], + [ + "▁begun", + -11.183963775634766 + ], + [ + "▁peer", + -11.184074401855469 + ], + [ + "▁Thanksgiving", + -11.184279441833496 + ], + [ + "▁demo", + -11.184425354003906 + ], + [ + "▁Dance", + -11.184435844421388 + ], + [ + "▁rolled", + -11.184496879577637 + ], + [ + "▁Eric", + -11.184582710266112 + ], + [ + "24", + -11.184664726257324 + ], + [ + "▁admitted", + -11.184669494628906 + ], + [ + "▁shorter", + -11.184690475463867 + ], + [ + "▁nationwide", + -11.18469524383545 + ], + [ + "▁Event", + -11.184816360473633 + ], + [ + "▁relate", + -11.18516445159912 + ], + [ + "game", + -11.185409545898438 + ], + [ + "ber", + -11.18554973602295 + ], + [ + "▁careers", + -11.185663223266602 + ], + [ + "▁NJ", + -11.186025619506836 + ], + [ + "▁acceptable", + -11.186241149902344 + ], + [ + "▁temple", + -11.186616897583008 + ], + [ + "▁trials", + -11.18675708770752 + ], + [ + "gen", + -11.186882019042969 + ], + [ + "Did", + -11.186952590942385 + ], + [ + "ance", + -11.187004089355469 + ], + [ + "▁Sony", + -11.187175750732422 + ], + [ + "▁absence", + -11.187262535095217 + ], + [ + "▁applicants", + -11.187732696533203 + ], + [ + "▁impacts", + -11.188007354736328 + ], + [ + "▁strict", + -11.188557624816896 + ], + [ + "▁Egypt", + -11.188801765441896 + ], + [ + "▁Email", + -11.188849449157717 + ], + [ + "▁Spa", + -11.18907070159912 + ], + [ + "▁distinctive", + -11.189858436584473 + ], + [ + "▁drag", + -11.190053939819336 + ], + [ + "▁entity", + -11.190174102783203 + ], + [ + "▁Simon", + -11.190436363220217 + ], + [ + "▁comedy", + -11.19065761566162 + ], + [ + "▁Always", + -11.190855979919434 + ], + [ + "▁Ha", + -11.190916061401367 + ], + [ + "▁refers", + -11.191580772399902 + ], + [ + "▁Pinterest", + -11.192030906677246 + ], + [ + "▁complaints", + -11.192254066467283 + ], + [ + "▁healthier", + -11.19225788116455 + ], + [ + "La", + -11.192623138427734 + ], + [ + "▁Dubai", + -11.19262409210205 + ], + [ + "▁39", + -11.192879676818848 + ], + [ + "▁Direct", + -11.193077087402344 + ], + [ + "▁soap", + -11.194035530090332 + ], + [ + "▁discussing", + -11.194666862487791 + ], + [ + "▁wrapped", + -11.195063591003418 + ], + [ + "▁Clean", + -11.195184707641602 + ], + [ + "▁adjustable", + -11.1953706741333 + ], + [ + "▁somebody", + -11.195887565612791 + ], + [ + "▁Stop", + -11.195958137512209 + ], + [ + "▁Future", + -11.196261405944824 + ], + [ + "▁neutral", + -11.196470260620115 + ], + [ + "-10", + -11.196795463562012 + ], + [ + "▁Nigeria", + -11.197211265563965 + ], + [ + "▁establishment", + -11.197349548339844 + ], + [ + "▁patent", + -11.19752311706543 + ], + [ + "▁continuously", + -11.197657585144045 + ], + [ + "▁Missouri", + -11.197922706604004 + ], + [ + "▁2002", + -11.19830322265625 + ], + [ + "PM", + -11.198318481445312 + ], + [ + "▁2019,", + -11.19862461090088 + ], + [ + "▁barely", + -11.198967933654783 + ], + [ + "▁earning", + -11.199528694152832 + ], + [ + "▁Stay", + -11.199817657470703 + ], + [ + "▁modules", + -11.199894905090332 + ], + [ + "▁Grant", + -11.199907302856444 + ], + [ + "les", + -11.200228691101074 + ], + [ + "▁individually", + -11.200638771057127 + ], + [ + "▁recycling", + -11.200806617736816 + ], + [ + "▁Oklahoma", + -11.200812339782717 + ], + [ + "▁Oak", + -11.200923919677734 + ], + [ + "▁recruitment", + -11.201245307922363 + ], + [ + "LY", + -11.20130443572998 + ], + [ + "▁slots", + -11.20150375366211 + ], + [ + "▁mask", + -11.201505661010742 + ], + [ + "▁boats", + -11.201549530029297 + ], + [ + "▁blessed", + -11.201552391052246 + ], + [ + "▁44", + -11.201619148254396 + ], + [ + "▁Lane", + -11.201622009277344 + ], + [ + "▁Clinton", + -11.201882362365724 + ], + [ + "▁Marine", + -11.201902389526367 + ], + [ + "▁Sir", + -11.201923370361328 + ], + [ + "▁Da", + -11.202054977416992 + ], + [ + "▁Round", + -11.202095985412598 + ], + [ + "▁instructor", + -11.20213508605957 + ], + [ + "▁Bureau", + -11.202192306518556 + ], + [ + "speed", + -11.202654838562012 + ], + [ + "▁documentary", + -11.20276927947998 + ], + [ + "red", + -11.202977180480955 + ], + [ + "▁reflects", + -11.203325271606444 + ], + [ + "▁Kentucky", + -11.203472137451172 + ], + [ + "▁invested", + -11.203622817993164 + ], + [ + "▁authorized", + -11.203749656677246 + ], + [ + "▁phrase", + -11.203773498535156 + ], + [ + "▁Affairs", + -11.204022407531738 + ], + [ + "▁liver", + -11.204030990600586 + ], + [ + "▁charming", + -11.204086303710938 + ], + [ + "▁overwhelming", + -11.204425811767578 + ], + [ + "▁Thailand", + -11.20443058013916 + ], + [ + "▁dedication", + -11.204560279846191 + ], + [ + "▁nurse", + -11.20462703704834 + ], + [ + "▁Writing", + -11.204682350158691 + ], + [ + "▁Reading", + -11.20485496520996 + ], + [ + "1)", + -11.2048921585083 + ], + [ + "▁eco", + -11.205092430114746 + ], + [ + "▁Tea", + -11.205340385437012 + ], + [ + "▁snack", + -11.205357551574709 + ], + [ + "▁attendance", + -11.205368041992188 + ], + [ + "▁maximize", + -11.205562591552734 + ], + [ + "▁painful", + -11.205747604370115 + ], + [ + "▁Highway", + -11.20582675933838 + ], + [ + "▁Mid", + -11.206015586853027 + ], + [ + "▁rocks", + -11.20630168914795 + ], + [ + "▁permitted", + -11.206361770629885 + ], + [ + "▁blogging", + -11.206683158874512 + ], + [ + "▁49", + -11.206697463989258 + ], + [ + "▁veteran", + -11.206747055053713 + ], + [ + "they", + -11.206816673278809 + ], + [ + "▁Five", + -11.207213401794434 + ], + [ + "▁Hair", + -11.207382202148438 + ], + [ + "▁Gift", + -11.207440376281738 + ], + [ + "▁Outdoor", + -11.207612991333008 + ], + [ + "▁Friends", + -11.207743644714355 + ], + [ + "▁Lewis", + -11.208074569702148 + ], + [ + "▁odds", + -11.20814323425293 + ], + [ + "??", + -11.208145141601562 + ], + [ + "▁varieties", + -11.20833969116211 + ], + [ + "▁Agreement", + -11.208352088928224 + ], + [ + "▁Ma", + -11.20848274230957 + ], + [ + "▁Currently", + -11.209248542785645 + ], + [ + "▁URL", + -11.209293365478516 + ], + [ + "▁Bo", + -11.20939826965332 + ], + [ + "▁converted", + -11.209430694580078 + ], + [ + "▁onion", + -11.209705352783203 + ], + [ + "▁startup", + -11.210280418395996 + ], + [ + "▁floating", + -11.210434913635254 + ], + [ + "▁confused", + -11.210490226745604 + ], + [ + "▁boasts", + -11.210718154907228 + ], + [ + "ga", + -11.211024284362791 + ], + [ + "▁hell", + -11.21105670928955 + ], + [ + "▁2.5", + -11.21108341217041 + ], + [ + "ji", + -11.211129188537598 + ], + [ + "▁BBC", + -11.211504936218262 + ], + [ + "▁ore", + -11.21194839477539 + ], + [ + "▁mature", + -11.21200180053711 + ], + [ + "▁paste", + -11.212044715881348 + ], + [ + "▁dried", + -11.21226692199707 + ], + [ + "▁lovers", + -11.212323188781738 + ], + [ + "▁delicate", + -11.21290397644043 + ], + [ + "▁Sub", + -11.21290683746338 + ], + [ + "▁adventures", + -11.212919235229492 + ], + [ + "▁hub", + -11.21318244934082 + ], + [ + "▁Muslim", + -11.21336269378662 + ], + [ + "▁Inn", + -11.213624954223633 + ], + [ + "▁premier", + -11.213655471801758 + ], + [ + "▁Complete", + -11.213659286499023 + ], + [ + "Man", + -11.213708877563477 + ], + [ + "▁du", + -11.213759422302246 + ], + [ + "▁VPN", + -11.21381378173828 + ], + [ + "▁Ho", + -11.213889122009276 + ], + [ + "▁forgotten", + -11.21403694152832 + ], + [ + "▁ranking", + -11.21418571472168 + ], + [ + "▁accused", + -11.214725494384766 + ], + [ + "Keep", + -11.21474552154541 + ], + [ + "▁oak", + -11.214826583862305 + ], + [ + "▁possess", + -11.214966773986816 + ], + [ + "▁signals", + -11.214988708496094 + ], + [ + "▁handful", + -11.215168952941896 + ], + [ + "▁centuries", + -11.21517848968506 + ], + [ + "▁burden", + -11.215354919433594 + ], + [ + "▁mineral", + -11.21544361114502 + ], + [ + "▁USD", + -11.215624809265137 + ], + [ + "▁teaches", + -11.216386795043944 + ], + [ + "▁prestigious", + -11.216450691223145 + ], + [ + "▁ordering", + -11.216464042663574 + ], + [ + "▁agreements", + -11.216604232788086 + ], + [ + "▁2011,", + -11.21725368499756 + ], + [ + "▁ignore", + -11.217326164245604 + ], + [ + "▁electronics", + -11.217569351196287 + ], + [ + "week", + -11.217761993408203 + ], + [ + "▁spouse", + -11.217818260192873 + ], + [ + "▁controller", + -11.217915534973145 + ], + [ + "▁considerable", + -11.218087196350098 + ], + [ + "▁Put", + -11.21886157989502 + ], + [ + "▁couch", + -11.218924522399902 + ], + [ + "▁customize", + -11.219013214111328 + ], + [ + "▁titled", + -11.219024658203123 + ], + [ + "▁destroyed", + -11.219088554382324 + ], + [ + "▁bet", + -11.219382286071776 + ], + [ + "▁console", + -11.220240592956545 + ], + [ + "▁Nothing", + -11.220401763916016 + ], + [ + "min", + -11.220552444458008 + ], + [ + "▁habit", + -11.220766067504885 + ], + [ + "▁soldiers", + -11.220787048339844 + ], + [ + "▁Motor", + -11.221284866333008 + ], + [ + "▁appreciation", + -11.221808433532717 + ], + [ + "▁cleaned", + -11.222044944763184 + ], + [ + "▁kits", + -11.222208976745604 + ], + [ + "▁Making", + -11.2222261428833 + ], + [ + "are", + -11.222679138183594 + ], + [ + "▁waters", + -11.222816467285156 + ], + [ + "▁continually", + -11.223153114318848 + ], + [ + "▁spoken", + -11.22354221343994 + ], + [ + "▁heated", + -11.223633766174316 + ], + [ + "▁demanding", + -11.223834991455078 + ], + [ + "▁navigation", + -11.224165916442873 + ], + [ + "▁computing", + -11.224188804626465 + ], + [ + "▁floral", + -11.224210739135742 + ], + [ + "▁Netherlands", + -11.224434852600098 + ], + [ + "▁rush", + -11.224791526794434 + ], + [ + "▁lighter", + -11.224937438964844 + ], + [ + "▁blame", + -11.225013732910156 + ], + [ + "▁favorites", + -11.2251615524292 + ], + [ + "▁Jo", + -11.225268363952637 + ], + [ + "▁ok", + -11.225302696228027 + ], + [ + "looking", + -11.22548484802246 + ], + [ + "ship", + -11.22591781616211 + ], + [ + "▁Mexican", + -11.226101875305176 + ], + [ + "▁Temple", + -11.226435661315918 + ], + [ + "▁tail", + -11.226560592651367 + ], + [ + "mar", + -11.226574897766112 + ], + [ + "▁additionally", + -11.226774215698242 + ], + [ + "▁slight", + -11.226780891418455 + ], + [ + "▁dresses", + -11.226859092712402 + ], + [ + "▁Cash", + -11.226938247680664 + ], + [ + "▁finishes", + -11.227025032043455 + ], + [ + "▁Records", + -11.227391242980955 + ], + [ + "▁Ice", + -11.22750473022461 + ], + [ + "▁fantasy", + -11.227788925170898 + ], + [ + "▁parallel", + -11.227933883666992 + ], + [ + "▁Besides", + -11.228553771972656 + ], + [ + "▁Source", + -11.228650093078612 + ], + [ + "▁visa", + -11.228687286376951 + ], + [ + "▁Floor", + -11.22869110107422 + ], + [ + "▁recommendation", + -11.2287015914917 + ], + [ + "▁separately", + -11.228741645812988 + ], + [ + "▁Restaurant", + -11.22883415222168 + ], + [ + "ns", + -11.228921890258787 + ], + [ + "▁holder", + -11.22895622253418 + ], + [ + "▁Lab", + -11.229021072387695 + ], + [ + "▁vegetable", + -11.229238510131836 + ], + [ + "▁intimate", + -11.22927474975586 + ], + [ + "▁measuring", + -11.229398727416992 + ], + [ + "▁lenses", + -11.229510307312012 + ], + [ + "▁speeds", + -11.229838371276855 + ], + [ + "▁pasta", + -11.229839324951172 + ], + [ + "▁stays", + -11.230119705200195 + ], + [ + "▁valued", + -11.23037815093994 + ], + [ + "▁Ten", + -11.23060703277588 + ], + [ + "▁Ann", + -11.23092269897461 + ], + [ + "▁tremendous", + -11.23105525970459 + ], + [ + "▁colleges", + -11.231485366821287 + ], + [ + "▁Bowl", + -11.231528282165527 + ], + [ + "▁4,", + -11.231799125671388 + ], + [ + "▁Morgan", + -11.23234748840332 + ], + [ + "▁hero", + -11.233009338378906 + ], + [ + "Up", + -11.233685493469238 + ], + [ + "▁Know", + -11.234282493591309 + ], + [ + "▁suspension", + -11.23428440093994 + ], + [ + "ard", + -11.23450756072998 + ], + [ + "▁facial", + -11.23454761505127 + ], + [ + "▁Islands", + -11.234872817993164 + ], + [ + "▁locked", + -11.23489475250244 + ], + [ + "bo", + -11.235177993774414 + ], + [ + "▁HERE", + -11.23521614074707 + ], + [ + "▁adorable", + -11.235981941223145 + ], + [ + "ster", + -11.236037254333496 + ], + [ + "▁transparent", + -11.236113548278809 + ], + [ + "▁moderate", + -11.236153602600098 + ], + [ + "▁incorporated", + -11.236276626586914 + ], + [ + "▁Navy", + -11.236312866210938 + ], + [ + "▁utilized", + -11.236748695373535 + ], + [ + "▁Description", + -11.236918449401855 + ], + [ + "▁Anderson", + -11.237592697143556 + ], + [ + "▁responsive", + -11.237767219543455 + ], + [ + "▁crafted", + -11.23790168762207 + ], + [ + "▁Content", + -11.238015174865724 + ], + [ + "▁terminal", + -11.238091468811035 + ], + [ + "▁crop", + -11.238327026367188 + ], + [ + "▁tear", + -11.238821029663086 + ], + [ + "▁cancel", + -11.23887538909912 + ], + [ + "▁del", + -11.2389554977417 + ], + [ + "▁stem", + -11.23898220062256 + ], + [ + "▁merchandise", + -11.239017486572266 + ], + [ + "▁trash", + -11.23948860168457 + ], + [ + "▁acceptance", + -11.239498138427734 + ], + [ + "▁Micro", + -11.23953628540039 + ], + [ + "▁prospects", + -11.239806175231934 + ], + [ + "▁Fresh", + -11.239855766296388 + ], + [ + "▁chef", + -11.23995304107666 + ], + [ + "▁grill", + -11.239986419677734 + ], + [ + "▁stroke", + -11.24022102355957 + ], + [ + "▁Manual", + -11.24049949645996 + ], + [ + "Her", + -11.240694046020508 + ], + [ + "▁investor", + -11.240951538085938 + ], + [ + "▁detect", + -11.240964889526367 + ], + [ + "▁intent", + -11.241569519042969 + ], + [ + "▁Single", + -11.24164581298828 + ], + [ + "▁stones", + -11.241662979125977 + ], + [ + "▁Sept", + -11.241727828979492 + ], + [ + "▁islands", + -11.241774559020996 + ], + [ + "19", + -11.241809844970703 + ], + [ + "▁gains", + -11.241912841796877 + ], + [ + "▁Scottish", + -11.241928100585938 + ], + [ + "▁5-", + -11.242545127868652 + ], + [ + "mg", + -11.242704391479492 + ], + [ + "▁mutual", + -11.24275016784668 + ], + [ + "▁observe", + -11.243139266967772 + ], + [ + "▁visibility", + -11.243348121643066 + ], + [ + "page", + -11.243363380432127 + ], + [ + "▁Fine", + -11.243438720703123 + ], + [ + "▁Allen", + -11.243685722351074 + ], + [ + "John", + -11.2438383102417 + ], + [ + "▁essence", + -11.24436092376709 + ], + [ + "▁measurements", + -11.2445068359375 + ], + [ + "▁complexity", + -11.244779586791992 + ], + [ + "▁donated", + -11.245012283325195 + ], + [ + "▁dessert", + -11.245230674743652 + ], + [ + "▁Chairman", + -11.245441436767578 + ], + [ + "▁taxi", + -11.24563217163086 + ], + [ + "▁desert", + -11.245633125305176 + ], + [ + "▁beliefs", + -11.245915412902832 + ], + [ + "▁province", + -11.245988845825195 + ], + [ + "▁marijuana", + -11.246193885803224 + ], + [ + "▁Step", + -11.246429443359377 + ], + [ + "▁Holiday", + -11.246440887451172 + ], + [ + "▁programmes", + -11.24650764465332 + ], + [ + "▁snacks", + -11.246651649475098 + ], + [ + "▁preventing", + -11.246971130371094 + ], + [ + "▁simultaneously", + -11.246992111206056 + ], + [ + "▁grows", + -11.24710178375244 + ], + [ + "▁Cambridge", + -11.247108459472656 + ], + [ + "▁presenting", + -11.24718189239502 + ], + [ + "▁RE", + -11.24722957611084 + ], + [ + "▁bathrooms", + -11.247427940368652 + ], + [ + "▁rug", + -11.247594833374023 + ], + [ + "▁instances", + -11.247798919677734 + ], + [ + "era", + -11.248164176940918 + ], + [ + "▁Malaysia", + -11.24826717376709 + ], + [ + "▁360", + -11.248337745666504 + ], + [ + "▁ears", + -11.24847412109375 + ], + [ + "▁Reserve", + -11.248619079589844 + ], + [ + "▁YOUR", + -11.24907684326172 + ], + [ + "▁nutrients", + -11.249505043029783 + ], + [ + "▁Companies", + -11.249663352966309 + ], + [ + "▁mobility", + -11.249737739562988 + ], + [ + "▁sensors", + -11.24984359741211 + ], + [ + "▁handles", + -11.25023365020752 + ], + [ + "▁possession", + -11.25034236907959 + ], + [ + "▁Denver", + -11.250351905822754 + ], + [ + "▁Brand", + -11.250370979309082 + ], + [ + "▁Falls", + -11.250712394714355 + ], + [ + "▁Metro", + -11.251017570495604 + ], + [ + "▁homemade", + -11.251507759094238 + ], + [ + "▁skip", + -11.251514434814451 + ], + [ + "▁comic", + -11.251765251159668 + ], + [ + "▁striking", + -11.25222396850586 + ], + [ + "good", + -11.252254486083984 + ], + [ + "▁Democrats", + -11.252418518066406 + ], + [ + "▁sees", + -11.252544403076172 + ], + [ + "▁Better", + -11.252649307250977 + ], + [ + "▁Prize", + -11.252663612365724 + ], + [ + "▁hunt", + -11.252673149108888 + ], + [ + "▁Bitcoin", + -11.252816200256348 + ], + [ + "▁par", + -11.252994537353516 + ], + [ + "▁crown", + -11.253190994262695 + ], + [ + "▁Fish", + -11.253304481506348 + ], + [ + "▁prospective", + -11.253365516662598 + ], + [ + "▁contacted", + -11.253377914428713 + ], + [ + "▁cups", + -11.253457069396973 + ], + [ + "ci", + -11.253463745117188 + ], + [ + "ology", + -11.253620147705078 + ], + [ + "▁manually", + -11.254755973815918 + ], + [ + "▁Jay", + -11.25482177734375 + ], + [ + "▁competing", + -11.254860877990724 + ], + [ + "▁semester", + -11.2551908493042 + ], + [ + "▁solving", + -11.255345344543455 + ], + [ + "▁Euro", + -11.25607204437256 + ], + [ + "▁Cut", + -11.256304740905762 + ], + [ + "▁BE", + -11.256564140319824 + ], + [ + "▁anticipated", + -11.256654739379885 + ], + [ + "▁modify", + -11.256668090820312 + ], + [ + "▁closest", + -11.25670337677002 + ], + [ + "▁supplements", + -11.256921768188477 + ], + [ + "▁Title", + -11.257071495056152 + ], + [ + "▁outer", + -11.257116317749023 + ], + [ + "▁LOVE", + -11.257265090942385 + ], + [ + "▁2001", + -11.257351875305176 + ], + [ + "▁shelves", + -11.257445335388184 + ], + [ + "▁cord", + -11.257649421691896 + ], + [ + "▁Dating", + -11.257841110229492 + ], + [ + "▁7,", + -11.25801944732666 + ], + [ + "▁boundaries", + -11.258027076721191 + ], + [ + "▁Coloring", + -11.258100509643556 + ], + [ + "▁crack", + -11.258172988891602 + ], + [ + "32", + -11.258460998535156 + ], + [ + "▁Cal", + -11.258512496948242 + ], + [ + "▁deciding", + -11.258551597595217 + ], + [ + "9.", + -11.25904941558838 + ], + [ + "▁notification", + -11.259764671325684 + ], + [ + "edu", + -11.259912490844728 + ], + [ + "▁Portland", + -11.260010719299316 + ], + [ + "▁350", + -11.260049819946287 + ], + [ + "▁booked", + -11.2601318359375 + ], + [ + "200", + -11.260149955749512 + ], + [ + "▁Tennessee", + -11.26101303100586 + ], + [ + "▁dive", + -11.261065483093262 + ], + [ + "▁lately", + -11.26128101348877 + ], + [ + "▁prizes", + -11.261454582214355 + ], + [ + "▁refreshing", + -11.261744499206545 + ], + [ + "kin", + -11.261746406555176 + ], + [ + "▁precisely", + -11.261797904968262 + ], + [ + "▁prompt", + -11.26181411743164 + ], + [ + "▁Son", + -11.261953353881836 + ], + [ + "▁defeat", + -11.26199722290039 + ], + [ + "▁herbs", + -11.262300491333008 + ], + [ + "▁pockets", + -11.262365341186523 + ], + [ + "▁agriculture", + -11.262408256530762 + ], + [ + "▁reveals", + -11.262411117553713 + ], + [ + "MP", + -11.262420654296877 + ], + [ + "▁promo", + -11.26345157623291 + ], + [ + "▁Schools", + -11.26351261138916 + ], + [ + "rich", + -11.263633728027344 + ], + [ + "▁thrown", + -11.263888359069824 + ], + [ + "▁Along", + -11.264140129089355 + ], + [ + "▁packing", + -11.264638900756836 + ], + [ + "▁Technical", + -11.264695167541504 + ], + [ + "▁homework", + -11.265316009521484 + ], + [ + "▁oxygen", + -11.265373229980469 + ], + [ + "▁libraries", + -11.265463829040527 + ], + [ + "▁Hard", + -11.265508651733398 + ], + [ + "▁intake", + -11.265650749206545 + ], + [ + "▁Among", + -11.265928268432615 + ], + [ + "▁submission", + -11.2667236328125 + ], + [ + "▁basket", + -11.266735076904297 + ], + [ + "▁Assembly", + -11.26705265045166 + ], + [ + "▁moral", + -11.26706600189209 + ], + [ + "▁ceramic", + -11.267884254455566 + ], + [ + "▁HTML", + -11.267935752868652 + ], + [ + "▁File", + -11.268205642700195 + ], + [ + "▁Palm", + -11.268220901489258 + ], + [ + "▁pants", + -11.26830005645752 + ], + [ + "▁Com", + -11.268412590026855 + ], + [ + "▁reminder", + -11.268428802490234 + ], + [ + "▁genre", + -11.268465042114258 + ], + [ + "▁aesthetic", + -11.268498420715332 + ], + [ + "▁Sri", + -11.268527030944824 + ], + [ + "▁Vol", + -11.2687349319458 + ], + [ + "▁Alabama", + -11.268900871276855 + ], + [ + "▁dirty", + -11.269441604614258 + ], + [ + "▁loading", + -11.269627571105955 + ], + [ + "▁premises", + -11.270187377929688 + ], + [ + "▁gaining", + -11.270223617553713 + ], + [ + "▁hassle", + -11.270346641540527 + ], + [ + "▁Detroit", + -11.270516395568848 + ], + [ + "▁Ken", + -11.27053928375244 + ], + [ + "▁intelligent", + -11.270706176757812 + ], + [ + "▁grades", + -11.270916938781738 + ], + [ + "▁appealing", + -11.271011352539062 + ], + [ + "▁accessed", + -11.271269798278809 + ], + [ + "▁traditions", + -11.271631240844728 + ], + [ + "▁genetic", + -11.271974563598633 + ], + [ + "▁dozens", + -11.272191047668455 + ], + [ + "▁supportive", + -11.27230167388916 + ], + [ + "▁votes", + -11.272377014160156 + ], + [ + "▁Girls", + -11.27239227294922 + ], + [ + "▁Peace", + -11.273226737976074 + ], + [ + "▁Attorney", + -11.27331256866455 + ], + [ + "▁crisp", + -11.273354530334473 + ], + [ + "▁pulling", + -11.27378273010254 + ], + [ + "po", + -11.274194717407228 + ], + [ + "▁machinery", + -11.27425479888916 + ], + [ + "▁publicly", + -11.274310111999512 + ], + [ + "▁CT", + -11.274443626403809 + ], + [ + "▁devoted", + -11.27480697631836 + ], + [ + "▁Utah", + -11.27511978149414 + ], + [ + "▁Te", + -11.275177001953123 + ], + [ + "▁proposals", + -11.27524471282959 + ], + [ + "▁Pass", + -11.27573299407959 + ], + [ + "▁poster", + -11.275936126708984 + ], + [ + "▁reflection", + -11.276187896728516 + ], + [ + "▁Mayor", + -11.276253700256348 + ], + [ + "▁Yoga", + -11.276611328125 + ], + [ + "▁tires", + -11.27665901184082 + ], + [ + "▁Communications", + -11.276729583740234 + ], + [ + "▁manufacture", + -11.276782035827637 + ], + [ + "▁Anthony", + -11.276869773864746 + ], + [ + "▁footage", + -11.276901245117188 + ], + [ + "▁sin", + -11.277612686157228 + ], + [ + "▁1970", + -11.277788162231444 + ], + [ + "▁addressing", + -11.278191566467283 + ], + [ + "▁Oxford", + -11.278745651245115 + ], + [ + "▁attendees", + -11.278759002685549 + ], + [ + "▁Turn", + -11.27903938293457 + ], + [ + "▁Saudi", + -11.27934741973877 + ], + [ + "▁Pet", + -11.279393196105955 + ], + [ + "▁determination", + -11.280098915100098 + ], + [ + "▁Legal", + -11.280213356018066 + ], + [ + "▁legendary", + -11.280296325683594 + ], + [ + "▁judgment", + -11.280298233032228 + ], + [ + "school", + -11.280717849731444 + ], + [ + "▁Account", + -11.280865669250488 + ], + [ + "▁Philippines", + -11.28100872039795 + ], + [ + "▁google", + -11.281048774719238 + ], + [ + "▁psychological", + -11.281240463256836 + ], + [ + "▁coastal", + -11.281317710876465 + ], + [ + "▁(1)", + -11.281627655029297 + ], + [ + "▁Rome", + -11.28203582763672 + ], + [ + "▁unfortunately", + -11.282201766967772 + ], + [ + "▁tire", + -11.282268524169922 + ], + [ + "▁protective", + -11.282617568969728 + ], + [ + "▁decoration", + -11.282649993896484 + ], + [ + "▁assisted", + -11.282992362976074 + ], + [ + "▁speaks", + -11.283041954040527 + ], + [ + "De", + -11.283276557922363 + ], + [ + "▁Works", + -11.283495903015137 + ], + [ + "▁donate", + -11.284010887145996 + ], + [ + "▁Tower", + -11.284043312072754 + ], + [ + "▁Meeting", + -11.284356117248535 + ], + [ + "▁corporations", + -11.28443717956543 + ], + [ + "▁41", + -11.284646034240724 + ], + [ + "▁Wine", + -11.285303115844728 + ], + [ + "view", + -11.285337448120115 + ], + [ + "▁Solar", + -11.285378456115724 + ], + [ + "right", + -11.285487174987791 + ], + [ + "▁Far", + -11.285545349121094 + ], + [ + "▁globally", + -11.285758018493652 + ], + [ + "▁Palace", + -11.285772323608398 + ], + [ + "ring", + -11.28618335723877 + ], + [ + "▁Engine", + -11.286269187927246 + ], + [ + "▁Java", + -11.28634548187256 + ], + [ + "▁Magic", + -11.286417007446287 + ], + [ + "▁durability", + -11.287091255187988 + ], + [ + "▁und", + -11.28718090057373 + ], + [ + "▁dressed", + -11.287796020507812 + ], + [ + "%", + -11.287930488586426 + ], + [ + "▁grip", + -11.288331031799316 + ], + [ + "▁denied", + -11.288732528686523 + ], + [ + "▁apparent", + -11.288761138916016 + ], + [ + "▁combining", + -11.288808822631836 + ], + [ + "▁coins", + -11.288898468017578 + ], + [ + "▁straightforward", + -11.289022445678713 + ], + [ + "▁progressive", + -11.289032936096191 + ], + [ + "▁Bad", + -11.289538383483888 + ], + [ + "▁printable", + -11.290011405944824 + ], + [ + "▁decorated", + -11.290021896362305 + ], + [ + "▁independence", + -11.290058135986328 + ], + [ + "▁AR", + -11.29006576538086 + ], + [ + "▁Talk", + -11.290728569030762 + ], + [ + "▁clarity", + -11.29092025756836 + ], + [ + "Time", + -11.291125297546388 + ], + [ + "▁complimentary", + -11.291237831115724 + ], + [ + "▁fallen", + -11.291486740112305 + ], + [ + "▁welcomed", + -11.291753768920898 + ], + [ + "▁alleged", + -11.292048454284668 + ], + [ + "▁batch", + -11.292451858520508 + ], + [ + "▁Na", + -11.292534828186035 + ], + [ + "▁Unlike", + -11.292764663696287 + ], + [ + "▁singles", + -11.29287338256836 + ], + [ + "▁Caribbean", + -11.293081283569336 + ], + [ + "▁arise", + -11.29372787475586 + ], + [ + "▁gradually", + -11.293733596801758 + ], + [ + "▁Link", + -11.293745994567873 + ], + [ + "▁recipient", + -11.293764114379885 + ], + [ + "▁PS", + -11.29385757446289 + ], + [ + "▁professionally", + -11.294082641601562 + ], + [ + "▁viewers", + -11.294214248657228 + ], + [ + "ger", + -11.294509887695312 + ], + [ + "▁2010,", + -11.29584503173828 + ], + [ + "▁Matthew", + -11.296656608581545 + ], + [ + "ins", + -11.296957015991213 + ], + [ + "▁variations", + -11.297208786010742 + ], + [ + "▁heaven", + -11.297388076782228 + ], + [ + "▁employ", + -11.29741096496582 + ], + [ + "▁Fashion", + -11.29742431640625 + ], + [ + "3)", + -11.297492027282717 + ], + [ + "▁lined", + -11.298070907592772 + ], + [ + "win", + -11.29808521270752 + ], + [ + "▁diagnosed", + -11.29809284210205 + ], + [ + "▁magnificent", + -11.298107147216797 + ], + [ + "▁boring", + -11.29872703552246 + ], + [ + "▁Syria", + -11.298788070678713 + ], + [ + "▁Honda", + -11.298911094665527 + ], + [ + "▁pollution", + -11.299015998840332 + ], + [ + "▁Quick", + -11.299205780029297 + ], + [ + "▁conferences", + -11.29949951171875 + ], + [ + "▁Israeli", + -11.299516677856444 + ], + [ + "▁pit", + -11.299525260925291 + ], + [ + "▁Gen", + -11.299707412719728 + ], + [ + "driven", + -11.299750328063965 + ], + [ + "▁Located", + -11.299817085266112 + ], + [ + "▁funeral", + -11.299851417541504 + ], + [ + "▁queries", + -11.300292015075684 + ], + [ + "▁True", + -11.300375938415527 + ], + [ + "isation", + -11.300821304321287 + ], + [ + "▁Christians", + -11.30100154876709 + ], + [ + "▁outlet", + -11.30128288269043 + ], + [ + "gov", + -11.301493644714355 + ], + [ + "▁Urban", + -11.301709175109863 + ], + [ + "▁onions", + -11.301714897155762 + ], + [ + "▁dear", + -11.301838874816896 + ], + [ + "▁Plans", + -11.30191707611084 + ], + [ + "▁episodes", + -11.302063941955566 + ], + [ + "▁Excel", + -11.30219268798828 + ], + [ + "▁contributing", + -11.302226066589355 + ], + [ + "▁figured", + -11.302268981933594 + ], + [ + "▁championship", + -11.302346229553224 + ], + [ + "▁weapon", + -11.302728652954102 + ], + [ + "▁blank", + -11.302967071533203 + ], + [ + "▁beneath", + -11.30361270904541 + ], + [ + "-4", + -11.303854942321776 + ], + [ + "▁Ali", + -11.303854942321776 + ], + [ + "▁fixtures", + -11.303863525390623 + ], + [ + "▁refrigerator", + -11.304655075073242 + ], + [ + "▁refused", + -11.304673194885254 + ], + [ + "▁Maine", + -11.304984092712402 + ], + [ + "▁Mission", + -11.305538177490234 + ], + [ + "▁nobody", + -11.30556869506836 + ], + [ + "▁influenced", + -11.305704116821287 + ], + [ + "▁tricks", + -11.306035041809082 + ], + [ + "▁tears", + -11.306193351745604 + ], + [ + "▁keywords", + -11.306207656860352 + ], + [ + "▁granite", + -11.306228637695312 + ], + [ + "▁density", + -11.306306838989258 + ], + [ + "ini", + -11.306512832641602 + ], + [ + "▁classical", + -11.3067045211792 + ], + [ + "▁43", + -11.30710792541504 + ], + [ + "▁argue", + -11.308453559875488 + ], + [ + "▁defensive", + -11.308799743652344 + ], + [ + "Star", + -11.308819770812988 + ], + [ + "▁litigation", + -11.308952331542969 + ], + [ + "▁Medicare", + -11.309112548828123 + ], + [ + "▁sixth", + -11.309216499328612 + ], + [ + "▁reminded", + -11.309746742248535 + ], + [ + "▁sunny", + -11.310380935668944 + ], + [ + "▁offense", + -11.310611724853516 + ], + [ + "▁logic", + -11.310833930969238 + ], + [ + "▁asks", + -11.311403274536133 + ], + [ + "▁partnerships", + -11.311448097229004 + ], + [ + "▁1960", + -11.311640739440918 + ], + [ + "▁Print", + -11.311762809753418 + ], + [ + "▁IL", + -11.311965942382812 + ], + [ + "▁excessive", + -11.311965942382812 + ], + [ + "▁adopt", + -11.312098503112791 + ], + [ + "▁Used", + -11.31248664855957 + ], + [ + "▁unnecessary", + -11.312541961669922 + ], + [ + "▁Greece", + -11.312844276428224 + ], + [ + "▁Foreign", + -11.312963485717772 + ], + [ + "pro", + -11.31302261352539 + ], + [ + "▁peers", + -11.31316089630127 + ], + [ + "▁intend", + -11.31322956085205 + ], + [ + "▁tale", + -11.313252449035645 + ], + [ + "▁nearest", + -11.313305854797363 + ], + [ + "▁valve", + -11.31350803375244 + ], + [ + "▁uk", + -11.313833236694336 + ], + [ + "▁ties", + -11.31424045562744 + ], + [ + "▁Exam", + -11.31425666809082 + ], + [ + "▁launching", + -11.314600944519045 + ], + [ + "▁lyrics", + -11.314682960510254 + ], + [ + "▁wing", + -11.314988136291504 + ], + [ + "▁pad", + -11.315055847167969 + ], + [ + "▁safer", + -11.315471649169922 + ], + [ + "▁Too", + -11.315515518188477 + ], + [ + "▁labour", + -11.315654754638672 + ], + [ + "▁compelling", + -11.315909385681152 + ], + [ + "lan", + -11.316303253173828 + ], + [ + "▁thermal", + -11.316476821899414 + ], + [ + "▁Panel", + -11.316526412963867 + ], + [ + "▁stack", + -11.31657886505127 + ], + [ + "cha", + -11.316579818725586 + ], + [ + "▁gambling", + -11.31667423248291 + ], + [ + "▁provisions", + -11.316868782043455 + ], + [ + "▁Certificate", + -11.316932678222656 + ], + [ + "Only", + -11.317108154296877 + ], + [ + "▁voices", + -11.317112922668455 + ], + [ + "▁transit", + -11.31726360321045 + ], + [ + "▁minimize", + -11.317296028137209 + ], + [ + "▁paths", + -11.317774772644045 + ], + [ + "▁disabilities", + -11.317874908447266 + ], + [ + "▁elderly", + -11.317962646484377 + ], + [ + "▁Analysis", + -11.31806468963623 + ], + [ + "▁Pop", + -11.318120002746582 + ], + [ + "▁pork", + -11.318130493164062 + ], + [ + "▁consistency", + -11.318157196044922 + ], + [ + "▁educate", + -11.318178176879885 + ], + [ + "▁protocol", + -11.31820297241211 + ], + [ + "▁72", + -11.318307876586914 + ], + [ + "▁Speed", + -11.318408966064451 + ], + [ + "▁readily", + -11.318549156188965 + ], + [ + "▁bi", + -11.318671226501465 + ], + [ + "▁basics", + -11.318887710571287 + ], + [ + "▁vanilla", + -11.319012641906738 + ], + [ + "▁nonprofit", + -11.3190336227417 + ], + [ + "▁broker", + -11.31907558441162 + ], + [ + "▁bones", + -11.319180488586426 + ], + [ + "▁lawsuit", + -11.319339752197266 + ], + [ + "▁deaths", + -11.319376945495604 + ], + [ + "▁Innovation", + -11.319422721862791 + ], + [ + "▁Flash", + -11.31976318359375 + ], + [ + "▁Valentine", + -11.320432662963867 + ], + [ + "▁audit", + -11.320460319519045 + ], + [ + "▁priorities", + -11.320473670959473 + ], + [ + "▁optimization", + -11.320703506469728 + ], + [ + "▁tastes", + -11.321005821228027 + ], + [ + "▁Together", + -11.321145057678224 + ], + [ + "▁preference", + -11.321348190307615 + ], + [ + "▁obligation", + -11.321438789367676 + ], + [ + "▁Governor", + -11.321460723876951 + ], + [ + "▁cycling", + -11.32154655456543 + ], + [ + "▁districts", + -11.322760581970217 + ], + [ + "▁blocked", + -11.322938919067385 + ], + [ + "person", + -11.323153495788574 + ], + [ + "▁animation", + -11.323296546936035 + ], + [ + "▁accepting", + -11.323585510253906 + ], + [ + "▁veterans", + -11.323811531066896 + ], + [ + "Under", + -11.324108123779297 + ], + [ + "▁dairy", + -11.32411003112793 + ], + [ + "▁seemingly", + -11.324174880981444 + ], + [ + "▁delight", + -11.324261665344238 + ], + [ + "▁advocate", + -11.324530601501465 + ], + [ + "▁highlighted", + -11.324588775634766 + ], + [ + "▁builds", + -11.324674606323242 + ], + [ + "▁attempting", + -11.325121879577637 + ], + [ + "▁establishing", + -11.326001167297363 + ], + [ + "▁embedded", + -11.326239585876465 + ], + [ + "▁Beauty", + -11.326440811157228 + ], + [ + "▁Jean", + -11.326630592346191 + ], + [ + "▁quest", + -11.326709747314451 + ], + [ + "berg", + -11.327146530151367 + ], + [ + "▁integral", + -11.327513694763184 + ], + [ + "▁defend", + -11.32809066772461 + ], + [ + "▁relaxation", + -11.328112602233888 + ], + [ + "▁uploaded", + -11.328208923339844 + ], + [ + "▁corners", + -11.328267097473145 + ], + [ + "▁insulation", + -11.328396797180176 + ], + [ + "▁Thai", + -11.328500747680664 + ], + [ + "▁2009.", + -11.32860279083252 + ], + [ + "▁fiscal", + -11.328693389892578 + ], + [ + "▁Apr", + -11.32880687713623 + ], + [ + "list", + -11.328852653503418 + ], + [ + "▁seeks", + -11.329100608825684 + ], + [ + "▁Swiss", + -11.329413414001465 + ], + [ + "▁charts", + -11.329452514648438 + ], + [ + "▁leverage", + -11.329654693603516 + ], + [ + "▁affairs", + -11.329763412475586 + ], + [ + "▁Phoenix", + -11.329896926879885 + ], + [ + "▁chairman", + -11.329996109008787 + ], + [ + "▁disposal", + -11.330059051513672 + ], + [ + "▁reaches", + -11.330113410949709 + ], + [ + "▁identical", + -11.330430030822754 + ], + [ + "▁insert", + -11.331931114196776 + ], + [ + "▁honored", + -11.332136154174805 + ], + [ + "▁46", + -11.332139015197754 + ], + [ + "▁screw", + -11.332575798034668 + ], + [ + "▁highway", + -11.332608222961426 + ], + [ + "▁brass", + -11.33290958404541 + ], + [ + "ek", + -11.33319091796875 + ], + [ + "yard", + -11.33327865600586 + ], + [ + "▁Bachelor", + -11.333348274230955 + ], + [ + "ang", + -11.333403587341309 + ], + [ + "▁Stadium", + -11.333486557006836 + ], + [ + "▁surgical", + -11.33371353149414 + ], + [ + "▁graduation", + -11.33381462097168 + ], + [ + "▁talents", + -11.333887100219728 + ], + [ + "▁Ultra", + -11.333897590637209 + ], + [ + "▁pics", + -11.334101676940918 + ], + [ + "▁Update", + -11.33447551727295 + ], + [ + "▁Summit", + -11.334678649902344 + ], + [ + "▁antique", + -11.33517360687256 + ], + [ + "▁Mom", + -11.335206985473633 + ], + [ + "▁Anne", + -11.335209846496582 + ], + [ + "state", + -11.33525562286377 + ], + [ + "▁corresponding", + -11.335626602172852 + ], + [ + "▁bankruptcy", + -11.335801124572754 + ], + [ + "▁underground", + -11.33597183227539 + ], + [ + "▁consecutive", + -11.336345672607422 + ], + [ + "▁fed", + -11.336402893066406 + ], + [ + "▁worthy", + -11.336636543273926 + ], + [ + "ola", + -11.336661338806152 + ], + [ + "▁Coach", + -11.33696174621582 + ], + [ + "Con", + -11.336976051330566 + ], + [ + "▁graduates", + -11.337130546569824 + ], + [ + "▁glory", + -11.337501525878906 + ], + [ + "▁warehouse", + -11.337506294250488 + ], + [ + "▁surprises", + -11.337599754333496 + ], + [ + "▁conjunction", + -11.337788581848145 + ], + [ + "▁sudden", + -11.338119506835938 + ], + [ + "▁Major", + -11.338149070739746 + ], + [ + "ker", + -11.338245391845703 + ], + [ + "▁strengths", + -11.338482856750488 + ], + [ + "21", + -11.338528633117676 + ], + [ + "▁baked", + -11.339032173156738 + ], + [ + "▁voltage", + -11.339141845703123 + ], + [ + "▁abstract", + -11.339263916015623 + ], + [ + "▁WA", + -11.339277267456056 + ], + [ + "▁bonds", + -11.339599609375 + ], + [ + "▁informative", + -11.339828491210938 + ], + [ + "port", + -11.339845657348633 + ], + [ + "▁Often", + -11.340004920959473 + ], + [ + "▁Total", + -11.34018898010254 + ], + [ + "▁exams", + -11.340271949768066 + ], + [ + "▁costume", + -11.340357780456545 + ], + [ + "▁DO", + -11.340444564819336 + ], + [ + "▁Moore", + -11.340673446655272 + ], + [ + "▁52", + -11.3407564163208 + ], + [ + "▁cheapest", + -11.340777397155762 + ], + [ + "▁reserves", + -11.340875625610352 + ], + [ + "▁cocktail", + -11.341078758239746 + ], + [ + "▁Eye", + -11.341385841369627 + ], + [ + "▁fabrics", + -11.341424942016602 + ], + [ + "▁serial", + -11.34162425994873 + ], + [ + "▁concluded", + -11.341880798339844 + ], + [ + "▁cozy", + -11.341880798339844 + ], + [ + "▁Staff", + -11.342122077941896 + ], + [ + "▁ingredient", + -11.34213638305664 + ], + [ + "specific", + -11.342143058776855 + ], + [ + "▁tomato", + -11.34215259552002 + ], + [ + "▁queen", + -11.342166900634766 + ], + [ + "▁Mining", + -11.34229564666748 + ], + [ + "-5", + -11.342361450195312 + ], + [ + "▁connectivity", + -11.34241008758545 + ], + [ + "▁Leadership", + -11.342527389526367 + ], + [ + "▁Fun", + -11.342716217041016 + ], + [ + "▁calculated", + -11.342796325683594 + ], + [ + "▁lid", + -11.342893600463867 + ], + [ + "▁cannabis", + -11.34290885925293 + ], + [ + "▁entities", + -11.342973709106444 + ], + [ + "▁accurately", + -11.343215942382812 + ], + [ + "▁Arab", + -11.343267440795898 + ], + [ + "▁Pool", + -11.343358039855955 + ], + [ + "▁Jane", + -11.343497276306152 + ], + [ + "▁Mon", + -11.343635559082031 + ], + [ + "Join", + -11.343820571899414 + ], + [ + "▁Sweden", + -11.343838691711426 + ], + [ + "▁receipt", + -11.343950271606444 + ], + [ + "▁departure", + -11.344008445739746 + ], + [ + "▁Clark", + -11.344046592712402 + ], + [ + "▁gluten", + -11.34421730041504 + ], + [ + "▁Mix", + -11.344366073608398 + ], + [ + "▁Audio", + -11.3446044921875 + ], + [ + "▁Chamber", + -11.344830513000488 + ], + [ + "▁mesh", + -11.344844818115234 + ], + [ + "▁Brooklyn", + -11.345108032226562 + ], + [ + "▁strain", + -11.345258712768556 + ], + [ + "▁GA", + -11.345284461975098 + ], + [ + "▁sponsor", + -11.34532356262207 + ], + [ + "▁vegan", + -11.345399856567385 + ], + [ + "▁Common", + -11.34599494934082 + ], + [ + "▁Language", + -11.346209526062012 + ], + [ + "▁2020", + -11.346352577209473 + ], + [ + "mer", + -11.346652030944824 + ], + [ + "▁executives", + -11.34677505493164 + ], + [ + "▁$3", + -11.347271919250488 + ], + [ + "▁needing", + -11.347290992736816 + ], + [ + "▁Hawaii", + -11.347414016723633 + ], + [ + "type", + -11.347442626953123 + ], + [ + "▁glue", + -11.347478866577148 + ], + [ + "▁Pat", + -11.34750747680664 + ], + [ + "▁throwing", + -11.347808837890623 + ], + [ + "▁smoothly", + -11.347976684570312 + ], + [ + "▁Anna", + -11.34805393218994 + ], + [ + "▁retreat", + -11.348092079162598 + ], + [ + "▁restricted", + -11.34827995300293 + ], + [ + "▁conducting", + -11.348299026489258 + ], + [ + "▁charter", + -11.34836196899414 + ], + [ + "▁honestly", + -11.348833084106444 + ], + [ + "▁coin", + -11.34900188446045 + ], + [ + "▁Doctor", + -11.349106788635254 + ], + [ + "▁encourages", + -11.349224090576172 + ], + [ + "▁8,", + -11.349303245544434 + ], + [ + "▁arrest", + -11.349327087402344 + ], + [ + "▁Boy", + -11.349932670593262 + ], + [ + "▁tasting", + -11.350016593933104 + ], + [ + "ze", + -11.350043296813965 + ], + [ + "▁j", + -11.350287437438965 + ], + [ + "▁Gulf", + -11.350370407104492 + ], + [ + "▁fired", + -11.350407600402832 + ], + [ + "▁patience", + -11.350509643554688 + ], + [ + "▁font", + -11.350749015808104 + ], + [ + "▁permits", + -11.350820541381836 + ], + [ + "▁renewable", + -11.351506233215332 + ], + [ + "▁Maria", + -11.351776123046877 + ], + [ + "▁convinced", + -11.35185432434082 + ], + [ + "▁receives", + -11.352113723754885 + ], + [ + "link", + -11.352250099182127 + ], + [ + "▁stolen", + -11.352821350097656 + ], + [ + "▁Index", + -11.353177070617676 + ], + [ + "▁Shipping", + -11.35319709777832 + ], + [ + "▁magazines", + -11.35321044921875 + ], + [ + "▁le", + -11.353275299072266 + ], + [ + "▁Ever", + -11.353364944458008 + ], + [ + "▁hospitality", + -11.35385799407959 + ], + [ + "▁drum", + -11.35387134552002 + ], + [ + "▁friendship", + -11.35399055480957 + ], + [ + "▁jail", + -11.354037284851074 + ], + [ + "▁barrier", + -11.354135513305664 + ], + [ + "▁achievements", + -11.354181289672852 + ], + [ + "▁stamp", + -11.35451602935791 + ], + [ + "▁complement", + -11.354702949523926 + ], + [ + "▁10,000", + -11.354778289794922 + ], + [ + "▁Culture", + -11.355002403259276 + ], + [ + "▁MI", + -11.355025291442873 + ], + [ + "▁bits", + -11.35520076751709 + ], + [ + "!).", + -11.355364799499512 + ], + [ + "▁Corporate", + -11.355695724487305 + ], + [ + "▁symbols", + -11.355783462524414 + ], + [ + "Sometimes", + -11.355905532836914 + ], + [ + "▁thrilled", + -11.355934143066406 + ], + [ + "▁destroy", + -11.356585502624512 + ], + [ + "▁quit", + -11.356974601745604 + ], + [ + "▁tray", + -11.357073783874512 + ], + [ + "screen", + -11.357176780700684 + ], + [ + "▁Feel", + -11.357186317443848 + ], + [ + "▁winds", + -11.357412338256836 + ], + [ + "▁magnetic", + -11.3575439453125 + ], + [ + "que", + -11.357755661010742 + ], + [ + "Want", + -11.357892990112305 + ], + [ + "▁Dean", + -11.35811996459961 + ], + [ + "▁Whatever", + -11.358219146728516 + ], + [ + "▁organised", + -11.358263969421388 + ], + [ + "▁Bath", + -11.358448028564451 + ], + [ + "▁Visual", + -11.358527183532717 + ], + [ + "▁folk", + -11.358808517456056 + ], + [ + "▁(2)", + -11.358867645263672 + ], + [ + "tion", + -11.359169960021973 + ], + [ + "▁curtain", + -11.359683990478516 + ], + [ + "▁Indonesia", + -11.359827041625977 + ], + [ + "ov", + -11.359976768493652 + ], + [ + "▁tuned", + -11.360061645507812 + ], + [ + "▁weekends", + -11.360528945922852 + ], + [ + "▁loyalty", + -11.360767364501951 + ], + [ + "▁synthetic", + -11.3607816696167 + ], + [ + "▁47", + -11.360845565795898 + ], + [ + "▁functioning", + -11.360872268676758 + ], + [ + "▁thinks", + -11.360877990722656 + ], + [ + "▁Organization", + -11.361120223999023 + ], + [ + "▁Further", + -11.36115264892578 + ], + [ + "▁blade", + -11.361294746398926 + ], + [ + "▁$50", + -11.361517906188965 + ], + [ + "|", + -11.36152172088623 + ], + [ + "▁Diamond", + -11.361820220947266 + ], + [ + "▁fails", + -11.362066268920898 + ], + [ + "▁regime", + -11.362247467041016 + ], + [ + "▁headquarters", + -11.36234188079834 + ], + [ + "low", + -11.3626070022583 + ], + [ + "▁DE", + -11.362619400024414 + ], + [ + "▁interpretation", + -11.362852096557615 + ], + [ + "▁counsel", + -11.36293601989746 + ], + [ + "▁des", + -11.362953186035156 + ], + [ + "▁der", + -11.362997055053713 + ], + [ + "▁Del", + -11.363147735595703 + ], + [ + "▁fundraising", + -11.363521575927734 + ], + [ + "▁passage", + -11.363627433776855 + ], + [ + "▁adjacent", + -11.363802909851074 + ], + [ + "▁reservation", + -11.363957405090332 + ], + [ + "▁niche", + -11.36404037475586 + ], + [ + "▁NYC", + -11.36429500579834 + ], + [ + "▁eastern", + -11.364548683166504 + ], + [ + "▁se", + -11.364675521850586 + ], + [ + "bed", + -11.36482048034668 + ], + [ + "▁bugs", + -11.364830017089844 + ], + [ + "▁beta", + -11.364856719970703 + ], + [ + "▁Cool", + -11.365280151367188 + ], + [ + "▁belong", + -11.36585807800293 + ], + [ + "▁poll", + -11.36586570739746 + ], + [ + "▁Intel", + -11.365888595581056 + ], + [ + "▁practicing", + -11.36606502532959 + ], + [ + "▁transformed", + -11.366130828857422 + ], + [ + "chi", + -11.366147994995115 + ], + [ + "ising", + -11.366470336914062 + ], + [ + "▁microwave", + -11.366488456726074 + ], + [ + "▁affecting", + -11.36652946472168 + ], + [ + "▁rounds", + -11.366539001464844 + ], + [ + "▁Junior", + -11.367070198059082 + ], + [ + "▁cyber", + -11.367402076721191 + ], + [ + "▁branding", + -11.367427825927734 + ], + [ + "▁tent", + -11.36757755279541 + ], + [ + "▁epic", + -11.367985725402832 + ], + [ + "▁undergraduate", + -11.369107246398926 + ], + [ + "▁Bluetooth", + -11.369256019592283 + ], + [ + "▁compromise", + -11.369345664978027 + ], + [ + "▁hungry", + -11.369705200195312 + ], + [ + "dy", + -11.369861602783203 + ], + [ + "▁drawings", + -11.36989974975586 + ], + [ + "▁vessel", + -11.369924545288086 + ], + [ + "▁Entertainment", + -11.370039939880373 + ], + [ + "▁significance", + -11.370043754577637 + ], + [ + "Back", + -11.3701810836792 + ], + [ + "▁generating", + -11.370190620422363 + ], + [ + "▁underneath", + -11.370198249816896 + ], + [ + "▁surroundings", + -11.370406150817873 + ], + [ + "▁bake", + -11.37045192718506 + ], + [ + "▁Empire", + -11.370627403259276 + ], + [ + "▁outlets", + -11.370728492736816 + ], + [ + "war", + -11.37083625793457 + ], + [ + "▁gel", + -11.370980262756348 + ], + [ + "▁Ridge", + -11.371061325073242 + ], + [ + "▁horror", + -11.371223449707031 + ], + [ + "▁minerals", + -11.37123966217041 + ], + [ + "▁thirty", + -11.371297836303713 + ], + [ + "▁trademark", + -11.371715545654297 + ], + [ + "▁ethical", + -11.371777534484863 + ], + [ + "effective", + -11.372156143188477 + ], + [ + "▁Sweet", + -11.372312545776367 + ], + [ + "▁toxic", + -11.372421264648438 + ], + [ + "▁Dead", + -11.37263011932373 + ], + [ + "70", + -11.373276710510254 + ], + [ + "▁obtaining", + -11.37328815460205 + ], + [ + "▁%", + -11.373905181884766 + ], + [ + "▁dramatically", + -11.374105453491213 + ], + [ + "▁pose", + -11.374164581298828 + ], + [ + "▁Labor", + -11.374411582946776 + ], + [ + "Meanwhile", + -11.37453556060791 + ], + [ + "▁IBM", + -11.37455940246582 + ], + [ + "▁structured", + -11.374740600585938 + ], + [ + "▁failing", + -11.375096321105955 + ], + [ + "▁trauma", + -11.375277519226074 + ], + [ + "▁Switzerland", + -11.375462532043455 + ], + [ + "▁Images", + -11.375621795654297 + ], + [ + "▁Corp", + -11.375630378723145 + ], + [ + "▁11,", + -11.376246452331545 + ], + [ + "▁heal", + -11.376423835754396 + ], + [ + "Students", + -11.376470565795898 + ], + [ + "non", + -11.376716613769531 + ], + [ + "▁Pack", + -11.376748085021973 + ], + [ + "▁dropping", + -11.37689971923828 + ], + [ + "▁Dad", + -11.37741470336914 + ], + [ + "▁derived", + -11.377676010131836 + ], + [ + "▁dial", + -11.37787628173828 + ], + [ + "▁ranks", + -11.378094673156738 + ], + [ + "▁teens", + -11.378393173217772 + ], + [ + "▁Grade", + -11.378552436828612 + ], + [ + "▁barriers", + -11.378717422485352 + ], + [ + "▁outline", + -11.378782272338867 + ], + [ + "▁rewarding", + -11.37899684906006 + ], + [ + "rate", + -11.379043579101562 + ], + [ + "▁Events", + -11.379083633422852 + ], + [ + "now", + -11.379213333129885 + ], + [ + "▁ruling", + -11.379315376281738 + ], + [ + "▁Alaska", + -11.37941074371338 + ], + [ + "▁newer", + -11.379971504211426 + ], + [ + "About", + -11.380096435546877 + ], + [ + "▁Ko", + -11.380194664001465 + ], + [ + "▁THIS", + -11.380261421203612 + ], + [ + "▁specializes", + -11.38047981262207 + ], + [ + "▁Salt", + -11.3804931640625 + ], + [ + "▁differ", + -11.380844116210938 + ], + [ + "▁accent", + -11.380889892578123 + ], + [ + "▁mounting", + -11.381084442138672 + ], + [ + "▁sq", + -11.38150691986084 + ], + [ + "Day", + -11.381806373596191 + ], + [ + "▁handmade", + -11.38193416595459 + ], + [ + "▁dimension", + -11.382120132446287 + ], + [ + "▁attempted", + -11.38219928741455 + ], + [ + "▁Lo", + -11.38284397125244 + ], + [ + "▁armed", + -11.383325576782228 + ], + [ + "▁Dining", + -11.383392333984377 + ], + [ + "▁concentrate", + -11.383414268493652 + ], + [ + "▁dig", + -11.383480072021484 + ], + [ + "▁marble", + -11.383563995361328 + ], + [ + "▁offensive", + -11.38387966156006 + ], + [ + "▁6,", + -11.383997917175291 + ], + [ + "▁organizing", + -11.384078979492188 + ], + [ + "▁Getting", + -11.384114265441896 + ], + [ + "▁Harvard", + -11.384190559387209 + ], + [ + "▁gene", + -11.384384155273438 + ], + [ + "▁Ross", + -11.384417533874512 + ], + [ + "▁promising", + -11.384489059448242 + ], + [ + "▁RSS", + -11.384556770324709 + ], + [ + "▁roller", + -11.384605407714844 + ], + [ + "▁bin", + -11.385062217712402 + ], + [ + "▁Costa", + -11.38514232635498 + ], + [ + "get", + -11.385287284851074 + ], + [ + "▁rides", + -11.385318756103516 + ], + [ + "▁Rob", + -11.38540744781494 + ], + [ + "▁appliance", + -11.385462760925291 + ], + [ + "▁Partners", + -11.38551139831543 + ], + [ + "▁Designs", + -11.38552951812744 + ], + [ + "▁determining", + -11.38635540008545 + ], + [ + "▁SE", + -11.386356353759766 + ], + [ + "▁substitute", + -11.386395454406738 + ], + [ + "▁pipes", + -11.3864164352417 + ], + [ + "▁Du", + -11.386981010437012 + ], + [ + "ping", + -11.387206077575684 + ], + [ + "▁Mumbai", + -11.387675285339355 + ], + [ + "▁upset", + -11.387929916381836 + ], + [ + "▁travelers", + -11.388011932373049 + ], + [ + "▁bikes", + -11.388035774230955 + ], + [ + "▁farming", + -11.388042449951172 + ], + [ + "▁viagra", + -11.388160705566406 + ], + [ + "▁buses", + -11.38816261291504 + ], + [ + "▁Edge", + -11.388290405273438 + ], + [ + "▁courtesy", + -11.388384819030762 + ], + [ + "35", + -11.388888359069824 + ], + [ + "▁conscious", + -11.388981819152832 + ], + [ + "▁Inside", + -11.389093399047852 + ], + [ + "▁Rev", + -11.389251708984377 + ], + [ + "▁assignments", + -11.3893404006958 + ], + [ + "ities", + -11.389486312866213 + ], + [ + "▁Investment", + -11.38952350616455 + ], + [ + "▁Phil", + -11.389589309692385 + ], + [ + "store", + -11.38962459564209 + ], + [ + "▁rolls", + -11.38991928100586 + ], + [ + "▁attributes", + -11.390019416809082 + ], + [ + "▁Really", + -11.39033031463623 + ], + [ + "▁vocal", + -11.390351295471191 + ], + [ + "ware", + -11.390458106994627 + ], + [ + "▁towel", + -11.390767097473145 + ], + [ + "▁separation", + -11.390775680541992 + ], + [ + "▁clay", + -11.390851974487305 + ], + [ + "▁Su", + -11.390891075134276 + ], + [ + "▁sporting", + -11.391021728515623 + ], + [ + "ball", + -11.391053199768066 + ], + [ + "▁Kate", + -11.391053199768066 + ], + [ + "▁Alexander", + -11.391310691833496 + ], + [ + "▁Player", + -11.391433715820312 + ], + [ + "▁smartphones", + -11.391799926757812 + ], + [ + "▁Convention", + -11.391868591308594 + ], + [ + "▁implications", + -11.392274856567385 + ], + [ + "▁Chrome", + -11.392301559448242 + ], + [ + "▁riders", + -11.39259910583496 + ], + [ + "▁Window", + -11.39266586303711 + ], + [ + "Being", + -11.393357276916504 + ], + [ + "▁respected", + -11.393729209899902 + ], + [ + "▁wage", + -11.393770217895508 + ], + [ + "▁harsh", + -11.39392375946045 + ], + [ + "▁inviting", + -11.393924713134766 + ], + [ + "▁praise", + -11.393933296203612 + ], + [ + "▁Base", + -11.39397144317627 + ], + [ + "▁Remove", + -11.394159317016602 + ], + [ + "▁Rather", + -11.394186973571776 + ], + [ + "▁Driver", + -11.394197463989258 + ], + [ + "▁angry", + -11.394229888916016 + ], + [ + "▁motorcycle", + -11.394250869750977 + ], + [ + "ial", + -11.39426326751709 + ], + [ + "▁Sure", + -11.394427299499512 + ], + [ + "▁anybody", + -11.39490795135498 + ], + [ + "▁fold", + -11.395112037658691 + ], + [ + "▁restored", + -11.395271301269531 + ], + [ + "▁perception", + -11.395567893981934 + ], + [ + "stop", + -11.396004676818848 + ], + [ + "▁Andy", + -11.396262168884276 + ], + [ + "▁RV", + -11.39659309387207 + ], + [ + "▁Technologies", + -11.396899223327637 + ], + [ + "▁intensity", + -11.396933555603027 + ], + [ + "▁coating", + -11.397042274475098 + ], + [ + "▁ME", + -11.397089004516602 + ], + [ + "▁da", + -11.397616386413574 + ], + [ + "▁mainstream", + -11.39793300628662 + ], + [ + "▁freely", + -11.398035049438477 + ], + [ + "▁reflected", + -11.398045539855955 + ], + [ + "▁Birthday", + -11.398228645324709 + ], + [ + "▁crossing", + -11.398269653320312 + ], + [ + "▁consist", + -11.398276329040527 + ], + [ + "▁Captain", + -11.39831256866455 + ], + [ + "▁handed", + -11.398499488830566 + ], + [ + "Un", + -11.398536682128906 + ], + [ + "cut", + -11.39921760559082 + ], + [ + "shirt", + -11.399219512939451 + ], + [ + "▁triple", + -11.399250030517578 + ], + [ + "▁adjusted", + -11.39937973022461 + ], + [ + "▁Premium", + -11.3994779586792 + ], + [ + "ho", + -11.399807929992676 + ], + [ + "▁Luke", + -11.39996337890625 + ], + [ + "▁cure", + -11.399977684020996 + ], + [ + "ron", + -11.40003490447998 + ], + [ + "▁flip", + -11.40016746520996 + ], + [ + "▁Battle", + -11.400333404541016 + ], + [ + "▁separated", + -11.400443077087402 + ], + [ + "Based", + -11.400445938110352 + ], + [ + "▁thankful", + -11.400634765625 + ], + [ + "▁gonna", + -11.40070629119873 + ], + [ + "▁uniform", + -11.400965690612791 + ], + [ + "▁Several", + -11.401083946228027 + ], + [ + "iness", + -11.401142120361328 + ], + [ + "▁tends", + -11.40126609802246 + ], + [ + "▁inclusion", + -11.401389122009276 + ], + [ + "▁cosmetic", + -11.401732444763184 + ], + [ + "▁assists", + -11.401863098144531 + ], + [ + "white", + -11.402022361755373 + ], + [ + "▁dying", + -11.402185440063477 + ], + [ + "▁debris", + -11.40246868133545 + ], + [ + "▁180", + -11.402511596679688 + ], + [ + "▁draws", + -11.40268898010254 + ], + [ + "▁drill", + -11.40268898010254 + ], + [ + "▁leisure", + -11.40276336669922 + ], + [ + "▁transparency", + -11.402785301208496 + ], + [ + "▁13,", + -11.402955055236816 + ], + [ + "1,", + -11.40298843383789 + ], + [ + "▁bow", + -11.40300464630127 + ], + [ + "▁governance", + -11.403087615966797 + ], + [ + "▁ports", + -11.403298377990724 + ], + [ + "▁wings", + -11.403427124023438 + ], + [ + "▁Safe", + -11.40351676940918 + ], + [ + "▁wardrobe", + -11.403524398803713 + ], + [ + "▁longest", + -11.403536796569824 + ], + [ + "▁Deep", + -11.403609275817873 + ], + [ + "▁wool", + -11.404306411743164 + ], + [ + "▁cement", + -11.404777526855469 + ], + [ + "▁repeated", + -11.40574836730957 + ], + [ + "▁query", + -11.40596866607666 + ], + [ + "▁presidential", + -11.406620025634766 + ], + [ + "▁terrace", + -11.406811714172363 + ], + [ + "▁suicide", + -11.406869888305664 + ], + [ + "▁advisor", + -11.406901359558104 + ], + [ + "commerce", + -11.406903266906738 + ], + [ + "Tech", + -11.406914710998535 + ], + [ + "▁legitimate", + -11.407135009765623 + ], + [ + "▁gross", + -11.407151222229004 + ], + [ + "▁inexpensive", + -11.407443046569824 + ], + [ + "▁Mi", + -11.40770435333252 + ], + [ + "▁span", + -11.407719612121582 + ], + [ + "about", + -11.40774631500244 + ], + [ + "▁Roll", + -11.407829284667969 + ], + [ + "▁hills", + -11.407880783081056 + ], + [ + "▁confusion", + -11.408004760742188 + ], + [ + "▁appointments", + -11.408188819885254 + ], + [ + "▁centres", + -11.408235549926758 + ], + [ + "Welcome", + -11.408400535583496 + ], + [ + "▁palm", + -11.408485412597656 + ], + [ + "▁accordingly", + -11.408616065979004 + ], + [ + "▁Commerce", + -11.408825874328612 + ], + [ + "Watch", + -11.408931732177734 + ], + [ + "play", + -11.4091215133667 + ], + [ + "-20", + -11.409186363220217 + ], + [ + "▁acknowledge", + -11.409366607666016 + ], + [ + "▁recycled", + -11.40969944000244 + ], + [ + "▁renovation", + -11.409734725952148 + ], + [ + "▁cooler", + -11.41000747680664 + ], + [ + "▁steering", + -11.41013240814209 + ], + [ + "▁partial", + -11.410158157348633 + ], + [ + "▁mat", + -11.410528182983398 + ], + [ + "▁dont", + -11.410635948181152 + ], + [ + "▁Parliament", + -11.410669326782228 + ], + [ + "▁Cost", + -11.411059379577637 + ], + [ + "▁Edward", + -11.411170959472656 + ], + [ + "▁consultants", + -11.411656379699709 + ], + [ + "▁neat", + -11.411853790283203 + ], + [ + "▁lying", + -11.412005424499512 + ], + [ + "▁independently", + -11.412270545959473 + ], + [ + "▁Pi", + -11.412368774414062 + ], + [ + "▁sooner", + -11.412749290466309 + ], + [ + "IT", + -11.41288948059082 + ], + [ + "▁Mail", + -11.413199424743652 + ], + [ + "▁warmth", + -11.413265228271484 + ], + [ + "▁meter", + -11.413291931152344 + ], + [ + "▁loyal", + -11.41353988647461 + ], + [ + "▁Third", + -11.413616180419922 + ], + [ + "▁alter", + -11.413860321044922 + ], + [ + "▁Grace", + -11.414525032043455 + ], + [ + "Open", + -11.41463565826416 + ], + [ + "▁creatures", + -11.414714813232422 + ], + [ + "▁Pa", + -11.415120124816896 + ], + [ + "▁Movie", + -11.415242195129396 + ], + [ + "▁sisters", + -11.415353775024414 + ], + [ + "▁trains", + -11.41553783416748 + ], + [ + "▁Region", + -11.415672302246094 + ], + [ + "▁reactions", + -11.415717124938965 + ], + [ + "ating", + -11.415837287902832 + ], + [ + "▁Adobe", + -11.41590976715088 + ], + [ + "▁pupils", + -11.415972709655762 + ], + [ + "▁2008.", + -11.41614818572998 + ], + [ + "▁locks", + -11.416175842285156 + ], + [ + "Previous", + -11.416407585144045 + ], + [ + "▁poem", + -11.416640281677246 + ], + [ + "▁Barcelona", + -11.416675567626951 + ], + [ + "▁seniors", + -11.41711711883545 + ], + [ + "▁(3", + -11.41715145111084 + ], + [ + "!!!!", + -11.417356491088867 + ], + [ + "▁AS", + -11.417552947998049 + ], + [ + "▁trainer", + -11.417852401733398 + ], + [ + "▁slice", + -11.417994499206545 + ], + [ + "▁shadow", + -11.418539047241213 + ], + [ + "pe", + -11.41861629486084 + ], + [ + "some", + -11.41889476776123 + ], + [ + "▁browsing", + -11.418935775756836 + ], + [ + "300", + -11.4190092086792 + ], + [ + "▁pine", + -11.419049263000488 + ], + [ + "▁picks", + -11.41908359527588 + ], + [ + "Hey", + -11.419097900390623 + ], + [ + "▁Century", + -11.41910457611084 + ], + [ + "▁dispute", + -11.41959285736084 + ], + [ + "▁predict", + -11.420000076293944 + ], + [ + "▁Original", + -11.42017650604248 + ], + [ + "▁Charlotte", + -11.42021656036377 + ], + [ + "▁nails", + -11.420308113098145 + ], + [ + "▁Associates", + -11.420329093933104 + ], + [ + "Black", + -11.420527458190918 + ], + [ + "▁Bus", + -11.420564651489258 + ], + [ + "▁Gray", + -11.420998573303224 + ], + [ + "▁architect", + -11.421001434326172 + ], + [ + "▁searches", + -11.421052932739258 + ], + [ + "▁inclusive", + -11.42128849029541 + ], + [ + "▁lean", + -11.4213228225708 + ], + [ + "▁silent", + -11.421379089355469 + ], + [ + "▁deemed", + -11.42141056060791 + ], + [ + "▁pressing", + -11.421675682067873 + ], + [ + "▁enemies", + -11.421882629394531 + ], + [ + "▁deployment", + -11.421889305114746 + ], + [ + "▁forming", + -11.421918869018556 + ], + [ + "▁obligations", + -11.421963691711426 + ], + [ + "▁destruction", + -11.422274589538574 + ], + [ + "▁introducing", + -11.422370910644531 + ], + [ + "▁sacrifice", + -11.423595428466797 + ], + [ + "▁19,", + -11.42367458343506 + ], + [ + "▁promotions", + -11.423711776733398 + ], + [ + "key", + -11.42398166656494 + ], + [ + "▁Antonio", + -11.42430019378662 + ], + [ + "▁Final", + -11.424361228942873 + ], + [ + "▁Oracle", + -11.424463272094728 + ], + [ + "AP", + -11.424519538879396 + ], + [ + "▁thereby", + -11.424899101257324 + ], + [ + "▁profitable", + -11.424986839294434 + ], + [ + "▁jeans", + -11.42498779296875 + ], + [ + "Net", + -11.425278663635254 + ], + [ + "▁inflation", + -11.425374984741213 + ], + [ + "▁Discover", + -11.42567253112793 + ], + [ + "▁15,", + -11.425909042358398 + ], + [ + "▁schemes", + -11.42591381072998 + ], + [ + "▁SD", + -11.425941467285156 + ], + [ + "▁flows", + -11.42597198486328 + ], + [ + "▁Prior", + -11.426013946533203 + ], + [ + "▁uncomfortable", + -11.42635726928711 + ], + [ + "born", + -11.427041053771973 + ], + [ + "▁logistics", + -11.427131652832031 + ], + [ + "▁curtains", + -11.427369117736816 + ], + [ + "▁BMW", + -11.427449226379396 + ], + [ + "▁evolved", + -11.427563667297363 + ], + [ + "▁booth", + -11.427803993225098 + ], + [ + "▁lenders", + -11.427855491638184 + ], + [ + "▁anger", + -11.42801570892334 + ], + [ + "▁unwanted", + -11.428242683410645 + ], + [ + "▁lightly", + -11.428464889526367 + ], + [ + "▁humanity", + -11.42864227294922 + ], + [ + "▁welfare", + -11.428711891174316 + ], + [ + "▁occasional", + -11.42890739440918 + ], + [ + "▁targeting", + -11.428933143615724 + ], + [ + "▁pumpkin", + -11.42898941040039 + ], + [ + "▁pays", + -11.42912769317627 + ], + [ + "si", + -11.429177284240724 + ], + [ + "▁eager", + -11.429234504699709 + ], + [ + "▁metres", + -11.429706573486328 + ], + [ + "▁potato", + -11.429737091064451 + ], + [ + "▁RAM", + -11.429805755615234 + ], + [ + "▁24/7", + -11.4298734664917 + ], + [ + "▁topped", + -11.430011749267578 + ], + [ + "▁survived", + -11.430139541625977 + ], + [ + "▁Tokyo", + -11.430170059204102 + ], + [ + "▁Nice", + -11.430185317993164 + ], + [ + "▁shoulders", + -11.430253982543944 + ], + [ + "▁22,", + -11.43026065826416 + ], + [ + "▁Six", + -11.430828094482422 + ], + [ + "▁Environment", + -11.431015014648438 + ], + [ + "▁tuition", + -11.431256294250488 + ], + [ + "▁updating", + -11.431391716003418 + ], + [ + "ella", + -11.431702613830566 + ], + [ + "▁pools", + -11.431750297546388 + ], + [ + "▁clever", + -11.431781768798828 + ], + [ + "▁Years", + -11.43186092376709 + ], + [ + "▁momentum", + -11.431939125061035 + ], + [ + "▁Defense", + -11.432506561279297 + ], + [ + "▁rustic", + -11.432610511779783 + ], + [ + "▁trans", + -11.43265151977539 + ], + [ + "▁85", + -11.432785034179688 + ], + [ + "▁lining", + -11.432890892028809 + ], + [ + "▁Associate", + -11.433035850524902 + ], + [ + "▁inflammation", + -11.433144569396973 + ], + [ + "▁forums", + -11.433923721313477 + ], + [ + "▁valley", + -11.433929443359377 + ], + [ + "▁Gardens", + -11.434181213378906 + ], + [ + "▁lender", + -11.43420124053955 + ], + [ + "▁Bi", + -11.434226036071776 + ], + [ + "▁mandatory", + -11.434614181518556 + ], + [ + "▁elite", + -11.434714317321776 + ], + [ + "Wow", + -11.435187339782717 + ], + [ + "▁albums", + -11.43577480316162 + ], + [ + "▁bacon", + -11.435795783996582 + ], + [ + "▁emerged", + -11.436243057250977 + ], + [ + "▁Days", + -11.436444282531738 + ], + [ + "▁diesel", + -11.436448097229004 + ], + [ + "▁algorithm", + -11.436481475830078 + ], + [ + "▁execute", + -11.436514854431152 + ], + [ + "▁ranges", + -11.43671417236328 + ], + [ + "▁Pink", + -11.436738967895508 + ], + [ + "▁pipeline", + -11.43715763092041 + ], + [ + "▁Yellow", + -11.43765926361084 + ], + [ + "▁31,", + -11.437908172607422 + ], + [ + "▁avoiding", + -11.438255310058594 + ], + [ + "ik", + -11.438316345214844 + ], + [ + "▁fur", + -11.43845272064209 + ], + [ + "▁judges", + -11.438512802124023 + ], + [ + "▁Song", + -11.43910312652588 + ], + [ + "▁besides", + -11.439189910888672 + ], + [ + "▁Republicans", + -11.439208030700684 + ], + [ + "▁surveys", + -11.43922233581543 + ], + [ + "▁stressed", + -11.439315795898438 + ], + [ + "▁vessels", + -11.43960666656494 + ], + [ + "▁Netflix", + -11.440082550048828 + ], + [ + "▁wheat", + -11.440108299255373 + ], + [ + "Unfortunately", + -11.440185546875 + ], + [ + "▁practitioners", + -11.440186500549316 + ], + [ + "▁im", + -11.440519332885742 + ], + [ + "▁promptly", + -11.440550804138184 + ], + [ + "▁paired", + -11.440670013427734 + ], + [ + "your", + -11.441021919250488 + ], + [ + "▁imaging", + -11.44131851196289 + ], + [ + "▁companion", + -11.441431045532228 + ], + [ + "▁experimental", + -11.441452980041504 + ], + [ + "have", + -11.441646575927734 + ], + [ + "▁installations", + -11.442304611206056 + ], + [ + "ck", + -11.44273853302002 + ], + [ + "▁margin", + -11.44296169281006 + ], + [ + "▁dip", + -11.44322395324707 + ], + [ + "▁Death", + -11.44322681427002 + ], + [ + "▁Animal", + -11.443347930908203 + ], + [ + "▁silk", + -11.443367958068848 + ], + [ + "▁Khan", + -11.443376541137695 + ], + [ + "▁romance", + -11.443424224853516 + ], + [ + "▁resist", + -11.443427085876465 + ], + [ + "▁fears", + -11.44351577758789 + ], + [ + "▁Howard", + -11.443540573120115 + ], + [ + "▁texts", + -11.443575859069824 + ], + [ + "▁subscribe", + -11.44399070739746 + ], + [ + "▁attach", + -11.444021224975586 + ], + [ + "▁ecosystem", + -11.44426441192627 + ], + [ + "Following", + -11.444445610046388 + ], + [ + "best", + -11.444450378417969 + ], + [ + "▁resolved", + -11.444604873657228 + ], + [ + "▁conservative", + -11.444711685180664 + ], + [ + "dale", + -11.44478702545166 + ], + [ + "▁Grey", + -11.44490909576416 + ], + [ + "there", + -11.4451265335083 + ], + [ + "▁30%", + -11.44544506072998 + ], + [ + "Remember", + -11.445456504821776 + ], + [ + "▁Hamilton", + -11.445842742919922 + ], + [ + "▁Bear", + -11.445985794067385 + ], + [ + "▁troops", + -11.446368217468262 + ], + [ + "▁sellers", + -11.4465913772583 + ], + [ + "▁politicians", + -11.44677448272705 + ], + [ + "▁18,", + -11.447075843811035 + ], + [ + "▁surprisingly", + -11.447755813598633 + ], + [ + "▁pursuing", + -11.447931289672852 + ], + [ + "▁Ukraine", + -11.4482421875 + ], + [ + "▁scenarios", + -11.44825267791748 + ], + [ + "▁Crystal", + -11.4486665725708 + ], + [ + "▁planted", + -11.448833465576172 + ], + [ + "▁spine", + -11.44886302947998 + ], + [ + "where", + -11.448888778686523 + ], + [ + "▁attraction", + -11.448945045471191 + ], + [ + "kg", + -11.449125289916992 + ], + [ + "▁migration", + -11.44921588897705 + ], + [ + "▁counts", + -11.449297904968262 + ], + [ + "▁rejected", + -11.449435234069824 + ], + [ + "▁lip", + -11.449560165405272 + ], + [ + "▁backing", + -11.449875831604004 + ], + [ + "▁radiation", + -11.45040512084961 + ], + [ + "▁Francis", + -11.450628280639648 + ], + [ + "▁wholesale", + -11.451006889343262 + ], + [ + "▁chemistry", + -11.45126247406006 + ], + [ + "▁enthusiasm", + -11.451465606689451 + ], + [ + "yl", + -11.451583862304688 + ], + [ + "▁preview", + -11.451683044433594 + ], + [ + "edge", + -11.451729774475098 + ], + [ + "▁theft", + -11.451754570007324 + ], + [ + "10.", + -11.451807975769045 + ], + [ + "▁1950", + -11.45187759399414 + ], + [ + "▁NBA", + -11.451952934265137 + ], + [ + "▁wallet", + -11.451992988586426 + ], + [ + "CO", + -11.452347755432127 + ], + [ + "▁jet", + -11.452356338500977 + ], + [ + "vi", + -11.452367782592772 + ], + [ + "▁abandoned", + -11.452486038208008 + ], + [ + "▁adjustments", + -11.452582359313965 + ], + [ + "cost", + -11.452604293823242 + ], + [ + "▁28,", + -11.452787399291992 + ], + [ + "▁varying", + -11.4530029296875 + ], + [ + "▁citizen", + -11.453081130981444 + ], + [ + "▁biological", + -11.453152656555176 + ], + [ + "▁twelve", + -11.4531831741333 + ], + [ + "▁spell", + -11.45350170135498 + ], + [ + "▁organizational", + -11.453516006469728 + ], + [ + "▁21,", + -11.453640937805176 + ], + [ + "▁curve", + -11.453783988952637 + ], + [ + "▁thrive", + -11.453871726989746 + ], + [ + "▁squad", + -11.453902244567873 + ], + [ + "▁stairs", + -11.453969955444336 + ], + [ + "▁indicator", + -11.454209327697754 + ], + [ + "▁Terms", + -11.454296112060549 + ], + [ + "▁villages", + -11.4544095993042 + ], + [ + "▁reportedly", + -11.45456314086914 + ], + [ + "▁EP", + -11.454854011535645 + ], + [ + "▁Photography", + -11.45496940612793 + ], + [ + "▁broader", + -11.455079078674316 + ], + [ + "2,", + -11.45529842376709 + ], + [ + "▁billing", + -11.455328941345217 + ], + [ + "▁17,", + -11.455480575561523 + ], + [ + "▁24-", + -11.455609321594238 + ], + [ + "▁vitamins", + -11.455665588378906 + ], + [ + "▁Poland", + -11.455775260925291 + ], + [ + "▁packs", + -11.45584201812744 + ], + [ + "▁lane", + -11.455940246582031 + ], + [ + "▁23,", + -11.456428527832031 + ], + [ + "▁Therapy", + -11.456515312194824 + ], + [ + "Through", + -11.456908226013184 + ], + [ + "▁snap", + -11.456948280334473 + ], + [ + "▁Architecture", + -11.456953048706056 + ], + [ + "▁Process", + -11.456968307495115 + ], + [ + "▁fought", + -11.456971168518066 + ], + [ + "St", + -11.457049369812012 + ], + [ + "▁checkout", + -11.457110404968262 + ], + [ + "▁2009,", + -11.457134246826172 + ], + [ + "▁UV", + -11.457223892211914 + ], + [ + "▁prospect", + -11.457268714904783 + ], + [ + "▁30-", + -11.457337379455566 + ], + [ + "▁attracted", + -11.457440376281738 + ], + [ + "▁Rich", + -11.45751667022705 + ], + [ + "ara", + -11.457682609558104 + ], + [ + "▁Jon", + -11.457756042480469 + ], + [ + "▁icons", + -11.457962989807127 + ], + [ + "▁Gary", + -11.458202362060549 + ], + [ + "▁shame", + -11.45847988128662 + ], + [ + "▁pub", + -11.458486557006836 + ], + [ + "▁static", + -11.45850944519043 + ], + [ + "▁strictly", + -11.458566665649414 + ], + [ + "▁Orleans", + -11.458573341369627 + ], + [ + "▁unlock", + -11.458623886108398 + ], + [ + "▁violation", + -11.458695411682127 + ], + [ + "▁Connect", + -11.458740234375 + ], + [ + "▁breach", + -11.458837509155272 + ], + [ + "▁legally", + -11.458905220031738 + ], + [ + "▁2008,", + -11.45903491973877 + ], + [ + "▁scary", + -11.45919132232666 + ], + [ + "▁suggestion", + -11.45949649810791 + ], + [ + "▁dumpster", + -11.459714889526367 + ], + [ + "▁recreational", + -11.459994316101074 + ], + [ + "AM", + -11.460021018981934 + ], + [ + "▁Ron", + -11.460021018981934 + ], + [ + "▁bucket", + -11.460065841674805 + ], + [ + "▁tongue", + -11.460124015808104 + ], + [ + "▁Liverpool", + -11.460142135620115 + ], + [ + "▁rehabilitation", + -11.46022129058838 + ], + [ + "▁Dublin", + -11.460335731506348 + ], + [ + "▁cognitive", + -11.460429191589355 + ], + [ + "▁counseling", + -11.460429191589355 + ], + [ + "▁exotic", + -11.460526466369627 + ], + [ + "▁Jose", + -11.460541725158691 + ], + [ + "▁SA", + -11.460665702819824 + ], + [ + "▁Fan", + -11.460689544677734 + ], + [ + "▁upgrades", + -11.460969924926758 + ], + [ + "▁distribute", + -11.461098670959473 + ], + [ + "▁prayers", + -11.46111011505127 + ], + [ + "▁variables", + -11.461332321166992 + ], + [ + "▁bronze", + -11.461374282836914 + ], + [ + "▁oz", + -11.46155834197998 + ], + [ + "▁travels", + -11.461706161499023 + ], + [ + "▁retailer", + -11.461833000183104 + ], + [ + "High", + -11.461894989013672 + ], + [ + "▁offline", + -11.461923599243164 + ], + [ + "▁festivals", + -11.46196174621582 + ], + [ + "▁physicians", + -11.462063789367676 + ], + [ + "▁$20", + -11.462215423583984 + ], + [ + "▁decorations", + -11.46297836303711 + ], + [ + "▁founding", + -11.46303653717041 + ], + [ + "▁Va", + -11.463116645812988 + ], + [ + "▁binding", + -11.463457107543944 + ], + [ + "▁drawer", + -11.463509559631348 + ], + [ + "▁Touch", + -11.463825225830078 + ], + [ + "▁Harris", + -11.463972091674805 + ], + [ + "Look", + -11.46402072906494 + ], + [ + "▁mechanisms", + -11.464195251464844 + ], + [ + "▁steep", + -11.464244842529297 + ], + [ + "▁WiFi", + -11.4646635055542 + ], + [ + "▁timber", + -11.464740753173828 + ], + [ + "▁Suite", + -11.465059280395508 + ], + [ + "▁Sand", + -11.465227127075195 + ], + [ + "90", + -11.465303421020508 + ], + [ + "▁publisher", + -11.465435981750488 + ], + [ + "▁Masters", + -11.465547561645508 + ], + [ + "▁catering", + -11.465707778930664 + ], + [ + "▁closure", + -11.465946197509766 + ], + [ + "▁BBQ", + -11.4664306640625 + ], + [ + "▁criticism", + -11.466644287109377 + ], + [ + "ru", + -11.466724395751951 + ], + [ + "▁54", + -11.466806411743164 + ], + [ + "▁CBD", + -11.46685791015625 + ], + [ + "▁HVAC", + -11.467039108276367 + ], + [ + "▁SQL", + -11.467265129089355 + ], + [ + "▁Native", + -11.46746826171875 + ], + [ + "▁Rick", + -11.467988014221191 + ], + [ + "▁reset", + -11.46799373626709 + ], + [ + "▁FDA", + -11.46847438812256 + ], + [ + "▁courage", + -11.46852207183838 + ], + [ + "king", + -11.469124794006348 + ], + [ + "▁adapted", + -11.46920394897461 + ], + [ + "▁Cleveland", + -11.469294548034668 + ], + [ + "▁Throughout", + -11.46948528289795 + ], + [ + "▁chamber", + -11.469768524169922 + ], + [ + "▁corporation", + -11.469768524169922 + ], + [ + "▁literary", + -11.470015525817873 + ], + [ + "▁infections", + -11.470112800598145 + ], + [ + "▁chic", + -11.47019386291504 + ], + [ + "▁recognised", + -11.470257759094238 + ], + [ + "▁tv", + -11.470656394958496 + ], + [ + "▁suspended", + -11.47078800201416 + ], + [ + "▁cry", + -11.471051216125488 + ], + [ + "▁Enter", + -11.471158027648926 + ], + [ + "▁stressful", + -11.471189498901367 + ], + [ + "▁consume", + -11.471545219421388 + ], + [ + "▁Jazz", + -11.471624374389648 + ], + [ + "▁defence", + -11.47169589996338 + ], + [ + "\")", + -11.471699714660645 + ], + [ + "▁seamless", + -11.471776962280272 + ], + [ + "▁volumes", + -11.471924781799316 + ], + [ + "▁notable", + -11.47195529937744 + ], + [ + "▁14,", + -11.472431182861328 + ], + [ + "▁Louisiana", + -11.47284698486328 + ], + [ + "▁Ram", + -11.473041534423828 + ], + [ + "▁liable", + -11.473296165466309 + ], + [ + "▁lime", + -11.473824501037598 + ], + [ + "▁MO", + -11.474206924438477 + ], + [ + "ino", + -11.474530220031738 + ], + [ + "▁silence", + -11.47461986541748 + ], + [ + "▁Independent", + -11.47472095489502 + ], + [ + "▁complications", + -11.474796295166016 + ], + [ + "▁harvest", + -11.475675582885742 + ], + [ + "▁Federation", + -11.475687980651855 + ], + [ + "▁Could", + -11.475756645202637 + ], + [ + "▁construct", + -11.475945472717283 + ], + [ + "▁shake", + -11.475977897644045 + ], + [ + "▁sons", + -11.476598739624023 + ], + [ + "▁executed", + -11.476808547973633 + ], + [ + "▁secrets", + -11.47699737548828 + ], + [ + "gate", + -11.477011680603027 + ], + [ + "ette", + -11.477055549621582 + ], + [ + "▁intensive", + -11.477067947387695 + ], + [ + "▁Duration", + -11.477134704589844 + ], + [ + "▁na", + -11.47714900970459 + ], + [ + "▁catalog", + -11.477272033691406 + ], + [ + "▁specify", + -11.477402687072754 + ], + [ + "▁calculate", + -11.477875709533691 + ], + [ + "▁royal", + -11.478311538696287 + ], + [ + "▁Cheap", + -11.478463172912598 + ], + [ + "▁diving", + -11.478580474853516 + ], + [ + "▁Clinic", + -11.478729248046877 + ], + [ + "▁reminds", + -11.479942321777344 + ], + [ + "▁particles", + -11.480100631713867 + ], + [ + "▁explaining", + -11.480185508728027 + ], + [ + "▁hardwood", + -11.480224609375 + ], + [ + "▁terrain", + -11.480263710021973 + ], + [ + "aka", + -11.480292320251465 + ], + [ + "▁touches", + -11.480446815490724 + ], + [ + "▁variation", + -11.480592727661133 + ], + [ + "most", + -11.48102569580078 + ], + [ + "▁arriving", + -11.481860160827637 + ], + [ + "▁psychology", + -11.481966972351074 + ], + [ + "▁declined", + -11.482239723205566 + ], + [ + "▁manages", + -11.482333183288574 + ], + [ + "▁pros", + -11.48249340057373 + ], + [ + "▁contacting", + -11.482791900634766 + ], + [ + "▁Guard", + -11.482854843139648 + ], + [ + "▁Housing", + -11.482933044433594 + ], + [ + "▁Wind", + -11.483786582946776 + ], + [ + "ata", + -11.483861923217772 + ], + [ + "▁blanket", + -11.48391342163086 + ], + [ + "▁Anyone", + -11.484065055847168 + ], + [ + "▁Clear", + -11.48451042175293 + ], + [ + "▁Kings", + -11.484712600708008 + ], + [ + "▁Heat", + -11.485004425048828 + ], + [ + "▁backgrounds", + -11.485113143920898 + ], + [ + "▁blockchain", + -11.485212326049805 + ], + [ + "▁Bird", + -11.485239028930664 + ], + [ + "▁insured", + -11.48535442352295 + ], + [ + "▁planting", + -11.485464096069336 + ], + [ + "▁Alan", + -11.485841751098633 + ], + [ + "▁Additional", + -11.485873222351074 + ], + [ + "▁duo", + -11.48601531982422 + ], + [ + "▁95", + -11.48608112335205 + ], + [ + "ut", + -11.486169815063477 + ], + [ + "▁Par", + -11.486465454101562 + ], + [ + "▁rentals", + -11.486650466918944 + ], + [ + "▁Justin", + -11.48684024810791 + ], + [ + "▁matched", + -11.486992835998535 + ], + [ + "name", + -11.487385749816896 + ], + [ + "tel", + -11.487403869628906 + ], + [ + "read", + -11.487421989440918 + ], + [ + "OS", + -11.487700462341309 + ], + [ + "Posted", + -11.48790454864502 + ], + [ + "▁consisting", + -11.488070487976074 + ], + [ + "Copyright", + -11.488299369812012 + ], + [ + "▁Active", + -11.488521575927734 + ], + [ + "▁regarded", + -11.48869514465332 + ], + [ + "▁skirt", + -11.488751411437988 + ], + [ + "▁enrolled", + -11.488767623901367 + ], + [ + "▁refuse", + -11.489110946655272 + ], + [ + "▁mis", + -11.48913860321045 + ], + [ + "▁transfers", + -11.48935317993164 + ], + [ + "▁lbs", + -11.489374160766602 + ], + [ + "▁improves", + -11.489611625671388 + ], + [ + "face", + -11.48972511291504 + ], + [ + "▁abundance", + -11.490518569946287 + ], + [ + "▁puzzle", + -11.490745544433594 + ], + [ + "▁Snow", + -11.491024017333984 + ], + [ + "▁syrup", + -11.491195678710938 + ], + [ + "▁51", + -11.491451263427734 + ], + [ + "▁Dragon", + -11.49148178100586 + ], + [ + "▁Ke", + -11.491483688354492 + ], + [ + "▁remembered", + -11.491890907287598 + ], + [ + "▁referring", + -11.491908073425291 + ], + [ + "▁Upon", + -11.493179321289062 + ], + [ + "▁stove", + -11.493659019470217 + ], + [ + "▁widespread", + -11.493892669677734 + ], + [ + "▁athletic", + -11.493913650512695 + ], + [ + "▁fried", + -11.494048118591309 + ], + [ + "▁adjustment", + -11.49415111541748 + ], + [ + "▁Leather", + -11.494184494018556 + ], + [ + "▁classified", + -11.49457836151123 + ], + [ + "ig", + -11.494701385498049 + ], + [ + "▁treasure", + -11.494903564453123 + ], + [ + "▁suppose", + -11.494954109191896 + ], + [ + "▁Jennifer", + -11.494987487792969 + ], + [ + "▁actress", + -11.495057106018066 + ], + [ + "▁lifting", + -11.495110511779783 + ], + [ + "▁Race", + -11.495390892028809 + ], + [ + "▁sells", + -11.49587631225586 + ], + [ + "▁joints", + -11.495965003967283 + ], + [ + "ai", + -11.496267318725586 + ], + [ + "▁experiments", + -11.496429443359377 + ], + [ + "▁16,", + -11.49652099609375 + ], + [ + "▁dealers", + -11.496790885925291 + ], + [ + "▁humor", + -11.496989250183104 + ], + [ + "▁specialize", + -11.497211456298828 + ], + [ + "▁cakes", + -11.497352600097656 + ], + [ + "ran", + -11.497386932373049 + ], + [ + "▁junk", + -11.497981071472168 + ], + [ + "fish", + -11.49799633026123 + ], + [ + "▁linear", + -11.498254776000977 + ], + [ + "hr", + -11.498384475708008 + ], + [ + "▁Garage", + -11.498528480529783 + ], + [ + "▁chapters", + -11.498908996582031 + ], + [ + "▁Universal", + -11.498966217041016 + ], + [ + "▁brake", + -11.498973846435549 + ], + [ + "▁spice", + -11.498992919921877 + ], + [ + "▁Tool", + -11.499324798583984 + ], + [ + "▁challenged", + -11.499377250671388 + ], + [ + "▁AP", + -11.499442100524902 + ], + [ + "▁consuming", + -11.499627113342283 + ], + [ + "▁Walker", + -11.499632835388184 + ], + [ + "▁polished", + -11.499839782714844 + ], + [ + "▁casinos", + -11.499896049499512 + ], + [ + "▁vanity", + -11.500356674194336 + ], + [ + "▁cottage", + -11.500630378723145 + ], + [ + "▁Leave", + -11.500636100769045 + ], + [ + "▁pairs", + -11.500758171081545 + ], + [ + "▁3.5", + -11.500993728637695 + ], + [ + "▁catching", + -11.50111961364746 + ], + [ + "▁proprietary", + -11.501253128051758 + ], + [ + "▁buried", + -11.501296997070312 + ], + [ + "ent", + -11.501410484313965 + ], + [ + "▁Fe", + -11.501419067382812 + ], + [ + "Life", + -11.501734733581545 + ], + [ + "▁fortunate", + -11.501819610595703 + ], + [ + "▁Canon", + -11.502156257629396 + ], + [ + "▁iTunes", + -11.502293586730955 + ], + [ + "Though", + -11.50238037109375 + ], + [ + "▁sunlight", + -11.50240993499756 + ], + [ + "▁upgraded", + -11.502410888671877 + ], + [ + "▁scenery", + -11.50246524810791 + ], + [ + "▁analysts", + -11.50259780883789 + ], + [ + "▁deserves", + -11.50289535522461 + ], + [ + "▁balcony", + -11.502914428710938 + ], + [ + "▁scent", + -11.502954483032228 + ], + [ + "▁holders", + -11.503050804138184 + ], + [ + "MB", + -11.503190994262695 + ], + [ + "▁flavour", + -11.503303527832031 + ], + [ + "▁Bruce", + -11.503464698791504 + ], + [ + "▁democracy", + -11.50352954864502 + ], + [ + "▁saves", + -11.503629684448242 + ], + [ + "Mo", + -11.50461483001709 + ], + [ + "▁jumping", + -11.504677772521973 + ], + [ + "00", + -11.50507926940918 + ], + [ + "▁Amy", + -11.50508975982666 + ], + [ + "▁Delta", + -11.505173683166504 + ], + [ + "▁eBay", + -11.505367279052734 + ], + [ + "Start", + -11.505440711975098 + ], + [ + "▁generator", + -11.505475997924805 + ], + [ + "▁claiming", + -11.505586624145508 + ], + [ + "yes", + -11.505606651306152 + ], + [ + "▁Hello", + -11.50562858581543 + ], + [ + "▁Editor", + -11.505681037902832 + ], + [ + "▁nationally", + -11.505805015563965 + ], + [ + "▁Lodge", + -11.506043434143066 + ], + [ + "▁optical", + -11.50614070892334 + ], + [ + "-8", + -11.506369590759276 + ], + [ + "▁realise", + -11.506440162658691 + ], + [ + "▁1999", + -11.506796836853027 + ], + [ + "▁submitting", + -11.506823539733888 + ], + [ + "▁GM", + -11.506936073303224 + ], + [ + "▁Chicken", + -11.50702667236328 + ], + [ + "▁enterprises", + -11.507052421569824 + ], + [ + "▁sliding", + -11.507234573364258 + ], + [ + "▁cope", + -11.507341384887695 + ], + [ + "22", + -11.507479667663574 + ], + [ + "▁negotiations", + -11.507723808288574 + ], + [ + "▁therapeutic", + -11.508642196655272 + ], + [ + "▁Degree", + -11.509021759033203 + ], + [ + "▁revolution", + -11.509031295776367 + ], + [ + "▁Villa", + -11.509147644042969 + ], + [ + ".....", + -11.509211540222168 + ], + [ + "▁Jobs", + -11.509291648864746 + ], + [ + "after", + -11.509459495544434 + ], + [ + "Place", + -11.509897232055664 + ], + [ + "▁Mediterranean", + -11.509917259216309 + ], + [ + "▁warned", + -11.510015487670898 + ], + [ + "▁Organic", + -11.51008129119873 + ], + [ + "▁Usually", + -11.510252952575684 + ], + [ + "▁educated", + -11.510299682617188 + ], + [ + "▁sponsors", + -11.510313034057615 + ], + [ + "▁accredited", + -11.51036262512207 + ], + [ + "▁hack", + -11.51073169708252 + ], + [ + "▁circulation", + -11.510818481445312 + ], + [ + "▁Duke", + -11.510870933532717 + ], + [ + "pin", + -11.51100254058838 + ], + [ + "▁Rep", + -11.511063575744627 + ], + [ + "▁sustained", + -11.51115894317627 + ], + [ + "PS", + -11.511369705200195 + ], + [ + "▁creamy", + -11.511445999145508 + ], + [ + "▁Soft", + -11.511476516723633 + ], + [ + "▁Ch", + -11.511486053466797 + ], + [ + "ez", + -11.51149845123291 + ], + [ + "▁depot", + -11.511560440063477 + ], + [ + "▁portrait", + -11.511685371398926 + ], + [ + "▁Rd", + -11.51175308227539 + ], + [ + "▁Tell", + -11.511820793151855 + ], + [ + "▁satisfying", + -11.51188850402832 + ], + [ + "▁crimes", + -11.512184143066406 + ], + [ + "▁mailing", + -11.51234245300293 + ], + [ + "▁GB", + -11.512406349182127 + ], + [ + "▁Range", + -11.51243495941162 + ], + [ + "▁gauge", + -11.512499809265137 + ], + [ + "▁Tickets", + -11.512584686279297 + ], + [ + "?!", + -11.513129234313965 + ], + [ + "▁zones", + -11.513182640075684 + ], + [ + "click", + -11.513202667236328 + ], + [ + "▁traveled", + -11.51327419281006 + ], + [ + "▁associations", + -11.513389587402344 + ], + [ + "▁Charlie", + -11.51345157623291 + ], + [ + "▁confidential", + -11.513509750366213 + ], + [ + "▁nurses", + -11.51353359222412 + ], + [ + "▁violent", + -11.51357364654541 + ], + [ + "▁populations", + -11.51362419128418 + ], + [ + "▁jam", + -11.514826774597168 + ], + [ + "▁Laura", + -11.514912605285645 + ], + [ + "▁Ni", + -11.515098571777344 + ], + [ + "▁Build", + -11.51525592803955 + ], + [ + "▁Birmingham", + -11.515501976013184 + ], + [ + "▁Hunter", + -11.515634536743164 + ], + [ + "▁porch", + -11.516321182250977 + ], + [ + "▁convention", + -11.51653003692627 + ], + [ + "vo", + -11.516897201538086 + ], + [ + "▁Lisa", + -11.517045974731444 + ], + [ + "▁combinations", + -11.517047882080078 + ], + [ + "also", + -11.517202377319336 + ], + [ + "▁translate", + -11.51721477508545 + ], + [ + "▁5%", + -11.517326354980469 + ], + [ + "▁retention", + -11.51733684539795 + ], + [ + "▁nerve", + -11.517500877380373 + ], + [ + "▁goodness", + -11.5176420211792 + ], + [ + "▁boil", + -11.517826080322266 + ], + [ + "OR", + -11.518061637878418 + ], + [ + "▁Rio", + -11.518096923828123 + ], + [ + "▁promoted", + -11.518190383911133 + ], + [ + "▁assault", + -11.518228530883787 + ], + [ + "▁ABC", + -11.518312454223633 + ], + [ + "▁privilege", + -11.5184965133667 + ], + [ + "▁Nelson", + -11.51860809326172 + ], + [ + "Book", + -11.518637657165527 + ], + [ + "▁Parts", + -11.518640518188477 + ], + [ + "▁modeling", + -11.51864528656006 + ], + [ + "▁lineup", + -11.518712997436523 + ], + [ + "▁emotion", + -11.5189208984375 + ], + [ + "▁circles", + -11.518959045410156 + ], + [ + "▁sunset", + -11.519195556640623 + ], + [ + "▁enhancing", + -11.519399642944336 + ], + [ + "▁explores", + -11.519445419311523 + ], + [ + "▁tanks", + -11.51954746246338 + ], + [ + "Maybe", + -11.51961898803711 + ], + [ + "▁leak", + -11.51984405517578 + ], + [ + "▁CV", + -11.519916534423828 + ], + [ + "()", + -11.520228385925291 + ], + [ + "▁Factory", + -11.520557403564451 + ], + [ + "▁modes", + -11.52064037322998 + ], + [ + "▁compound", + -11.520746231079102 + ], + [ + "▁Especially", + -11.52085018157959 + ], + [ + "▁cargo", + -11.52090835571289 + ], + [ + "▁mi", + -11.521004676818848 + ], + [ + "▁max", + -11.521360397338867 + ], + [ + "html", + -11.521377563476562 + ], + [ + "▁Eve", + -11.5214262008667 + ], + [ + "ab", + -11.52149772644043 + ], + [ + "▁Meanwhile", + -11.52160358428955 + ], + [ + "▁Emergency", + -11.521791458129885 + ], + [ + "▁Depending", + -11.521970748901367 + ], + [ + "▁downloading", + -11.522160530090332 + ], + [ + "▁mentor", + -11.522398948669434 + ], + [ + "▁isolated", + -11.522414207458496 + ], + [ + "▁Opera", + -11.523016929626465 + ], + [ + "▁Zone", + -11.523061752319336 + ], + [ + "tic", + -11.523229598999023 + ], + [ + "▁ample", + -11.523277282714844 + ], + [ + "▁guns", + -11.52362060546875 + ], + [ + "▁alternate", + -11.523758888244627 + ], + [ + "▁summit", + -11.524027824401855 + ], + [ + "▁coupled", + -11.52408218383789 + ], + [ + "ung", + -11.524092674255373 + ], + [ + "▁god", + -11.524405479431152 + ], + [ + "▁Connecticut", + -11.52441692352295 + ], + [ + "▁Value", + -11.524768829345703 + ], + [ + "▁reasonably", + -11.525131225585938 + ], + [ + "▁Ring", + -11.525136947631836 + ], + [ + "▁Nike", + -11.525171279907228 + ], + [ + "▁mirrors", + -11.52532196044922 + ], + [ + "▁arguments", + -11.525762557983398 + ], + [ + "▁tenants", + -11.525903701782228 + ], + [ + "▁connects", + -11.526044845581056 + ], + [ + "ii", + -11.526272773742676 + ], + [ + "▁elementary", + -11.526700973510742 + ], + [ + "▁700", + -11.52687931060791 + ], + [ + "▁Nevada", + -11.52706527709961 + ], + [ + "▁Shower", + -11.52716827392578 + ], + [ + "▁pumps", + -11.52727508544922 + ], + [ + "▁Manhattan", + -11.527363777160645 + ], + [ + "▁beating", + -11.527543067932127 + ], + [ + "▁rod", + -11.527837753295898 + ], + [ + "▁Register", + -11.52797031402588 + ], + [ + "▁clearance", + -11.528143882751465 + ], + [ + "▁scheduling", + -11.528546333312988 + ], + [ + "▁optimize", + -11.528583526611328 + ], + [ + "▁annoying", + -11.528831481933594 + ], + [ + "Enjoy", + -11.529322624206545 + ], + [ + "▁keyword", + -11.529399871826172 + ], + [ + "▁dis", + -11.52944278717041 + ], + [ + "▁tailor", + -11.529542922973633 + ], + [ + "500", + -11.52956199645996 + ], + [ + "▁mothers", + -11.52978515625 + ], + [ + "▁Primary", + -11.529828071594238 + ], + [ + "▁tricky", + -11.52983570098877 + ], + [ + "cc", + -11.530065536499023 + ], + [ + "▁Details", + -11.530179977416992 + ], + [ + "tz", + -11.530200004577637 + ], + [ + "▁sturdy", + -11.530477523803713 + ], + [ + "God", + -11.530577659606934 + ], + [ + "▁notified", + -11.530694007873535 + ], + [ + "▁Screen", + -11.531089782714844 + ], + [ + "▁stake", + -11.53116226196289 + ], + [ + "80", + -11.531302452087402 + ], + [ + "▁protest", + -11.531362533569336 + ], + [ + "▁blast", + -11.531509399414062 + ], + [ + "▁borders", + -11.531888961791992 + ], + [ + "▁ruled", + -11.532224655151367 + ], + [ + "▁obstacles", + -11.532581329345703 + ], + [ + "▁Judge", + -11.532633781433104 + ], + [ + "▁(5", + -11.532880783081056 + ], + [ + "▁25%", + -11.53333568572998 + ], + [ + "▁Grove", + -11.53335666656494 + ], + [ + "Save", + -11.533767700195312 + ], + [ + "ff", + -11.533934593200684 + ], + [ + "▁robot", + -11.534318923950195 + ], + [ + "▁therapist", + -11.534594535827637 + ], + [ + "▁pharmacy", + -11.534652709960938 + ], + [ + "ver", + -11.53528881072998 + ], + [ + "▁approached", + -11.535295486450195 + ], + [ + "▁Sen", + -11.535324096679688 + ], + [ + "▁Dakota", + -11.53536605834961 + ], + [ + "▁maintains", + -11.53538703918457 + ], + [ + "▁hop", + -11.535432815551758 + ], + [ + "▁sensitivity", + -11.535606384277344 + ], + [ + "▁365", + -11.535696029663086 + ], + [ + "▁extensions", + -11.535916328430176 + ], + [ + "▁wore", + -11.53593635559082 + ], + [ + "▁promotes", + -11.53599452972412 + ], + [ + "▁Later", + -11.536202430725098 + ], + [ + "▁Ex", + -11.536459922790527 + ], + [ + "▁56", + -11.536487579345703 + ], + [ + "▁9,", + -11.5365571975708 + ], + [ + "▁26,", + -11.53656005859375 + ], + [ + "▁tribute", + -11.537192344665527 + ], + [ + "▁shore", + -11.537205696105955 + ], + [ + "▁mysterious", + -11.537237167358398 + ], + [ + "▁diagnostic", + -11.537298202514648 + ], + [ + "▁Afghanistan", + -11.537301063537598 + ], + [ + "▁Pick", + -11.53730583190918 + ], + [ + "zi", + -11.537506103515623 + ], + [ + "ning", + -11.53757381439209 + ], + [ + "▁struggles", + -11.537619590759276 + ], + [ + "▁collar", + -11.53771686553955 + ], + [ + "▁repeatedly", + -11.538095474243164 + ], + [ + "▁80%", + -11.53822135925293 + ], + [ + "▁receiver", + -11.538329124450684 + ], + [ + "oh", + -11.538533210754396 + ], + [ + "▁pest", + -11.538718223571776 + ], + [ + "▁controlling", + -11.53885555267334 + ], + [ + "▁forgot", + -11.53902816772461 + ], + [ + "▁autumn", + -11.539030075073242 + ], + [ + "▁Greg", + -11.539192199707031 + ], + [ + "▁picnic", + -11.539320945739746 + ], + [ + "ml", + -11.539331436157228 + ], + [ + "▁Amazing", + -11.539344787597656 + ], + [ + "▁schedules", + -11.53955364227295 + ], + [ + "▁museums", + -11.539603233337402 + ], + [ + "▁warming", + -11.53985595703125 + ], + [ + "▁exhaust", + -11.53989601135254 + ], + [ + "▁daughters", + -11.53994369506836 + ], + [ + "▁Installation", + -11.540205001831056 + ], + [ + "▁homeless", + -11.540778160095217 + ], + [ + "▁darkness", + -11.541030883789062 + ], + [ + "▁Decor", + -11.54106903076172 + ], + [ + "part", + -11.54144287109375 + ], + [ + "▁sailing", + -11.541796684265137 + ], + [ + "Set", + -11.54195499420166 + ], + [ + "▁acids", + -11.54224967956543 + ], + [ + "▁releasing", + -11.542250633239746 + ], + [ + "oriented", + -11.542471885681152 + ], + [ + "▁53", + -11.542702674865724 + ], + [ + "▁assure", + -11.543246269226074 + ], + [ + "▁Bag", + -11.543342590332031 + ], + [ + "▁jumped", + -11.543540954589844 + ], + [ + "**", + -11.54355525970459 + ], + [ + "▁tactics", + -11.543662071228027 + ], + [ + "▁Trans", + -11.54377555847168 + ], + [ + "▁deleted", + -11.543880462646484 + ], + [ + "ria", + -11.54388427734375 + ], + [ + "▁delightful", + -11.544024467468262 + ], + [ + "▁Weight", + -11.54417324066162 + ], + [ + "▁Results", + -11.544179916381836 + ], + [ + "▁ambitious", + -11.5443696975708 + ], + [ + "▁Freedom", + -11.54447078704834 + ], + [ + "▁drew", + -11.544517517089844 + ], + [ + "▁outlook", + -11.544595718383787 + ], + [ + "▁woods", + -11.544612884521484 + ], + [ + "▁1-", + -11.544672966003418 + ], + [ + "▁protects", + -11.544816970825195 + ], + [ + "▁intuitive", + -11.544899940490724 + ], + [ + "▁Lead", + -11.545063018798828 + ], + [ + "▁physics", + -11.545183181762695 + ], + [ + "▁gaps", + -11.545295715332031 + ], + [ + "▁crossed", + -11.54545783996582 + ], + [ + "▁watches", + -11.54550075531006 + ], + [ + "▁Directors", + -11.545897483825684 + ], + [ + "▁pile", + -11.546030044555664 + ], + [ + "▁counties", + -11.54640293121338 + ], + [ + "▁arrives", + -11.54666233062744 + ], + [ + "▁sake", + -11.546737670898438 + ], + [ + "▁Jonathan", + -11.546832084655762 + ], + [ + "▁guarantees", + -11.546923637390137 + ], + [ + "▁Baltimore", + -11.546937942504885 + ], + [ + "▁secretary", + -11.546977996826172 + ], + [ + "▁lips", + -11.547011375427246 + ], + [ + "▁para", + -11.547045707702637 + ], + [ + "▁27,", + -11.547468185424805 + ], + [ + "▁desires", + -11.54755401611328 + ], + [ + "▁seafood", + -11.54773235321045 + ], + [ + "▁Consider", + -11.547769546508787 + ], + [ + "▁Greater", + -11.548069953918455 + ], + [ + "▁Xbox", + -11.54812240600586 + ], + [ + "▁syndrome", + -11.548253059387209 + ], + [ + "▁corruption", + -11.54830265045166 + ], + [ + "▁editorial", + -11.548434257507324 + ], + [ + "▁Madison", + -11.54853057861328 + ], + [ + "▁gloves", + -11.548603057861328 + ], + [ + "▁Half", + -11.548822402954102 + ], + [ + "▁Mall", + -11.549002647399902 + ], + [ + "▁pond", + -11.54903793334961 + ], + [ + "▁Tools", + -11.549124717712402 + ], + [ + "Come", + -11.549189567565918 + ], + [ + "▁Vision", + -11.549308776855469 + ], + [ + "▁Sa", + -11.549325942993164 + ], + [ + "▁jury", + -11.549481391906738 + ], + [ + "▁WITH", + -11.54958438873291 + ], + [ + "▁touched", + -11.549887657165527 + ], + [ + "▁opponent", + -11.549991607666016 + ], + [ + "▁firmly", + -11.55001163482666 + ], + [ + "▁freezer", + -11.550198554992676 + ], + [ + "▁styling", + -11.550294876098633 + ], + [ + "▁barrel", + -11.550336837768556 + ], + [ + "▁supervision", + -11.55056095123291 + ], + [ + "Visit", + -11.550673484802246 + ], + [ + "gar", + -11.550766944885254 + ], + [ + "▁applicant", + -11.550848960876465 + ], + [ + "▁phenomenon", + -11.550889015197754 + ], + [ + "ated", + -11.550891876220703 + ], + [ + "▁afterwards", + -11.550951957702637 + ], + [ + "▁switching", + -11.551076889038086 + ], + [ + "▁Orlando", + -11.551243782043455 + ], + [ + "▁Version", + -11.55126667022705 + ], + [ + "real", + -11.551365852355955 + ], + [ + "▁Overall", + -11.551401138305664 + ], + [ + "▁quilt", + -11.55142879486084 + ], + [ + "▁drying", + -11.55154800415039 + ], + [ + "▁dissertation", + -11.551606178283691 + ], + [ + "▁prevents", + -11.55190658569336 + ], + [ + "▁Mt", + -11.55219554901123 + ], + [ + "grade", + -11.55224323272705 + ], + [ + "▁IV", + -11.55236530303955 + ], + [ + "▁1:", + -11.552444458007812 + ], + [ + "▁Moscow", + -11.552490234375 + ], + [ + "▁stepped", + -11.5530424118042 + ], + [ + "▁entrepreneur", + -11.55305290222168 + ], + [ + "▁Kent", + -11.553058624267578 + ], + [ + "▁decreased", + -11.553058624267578 + ], + [ + "▁weddings", + -11.5533447265625 + ], + [ + "▁Choice", + -11.553564071655272 + ], + [ + "▁legend", + -11.55360221862793 + ], + [ + "▁jewellery", + -11.55374813079834 + ], + [ + "▁understands", + -11.553874969482422 + ], + [ + "▁Resource", + -11.55411434173584 + ], + [ + "▁reviewing", + -11.554314613342283 + ], + [ + "▁cables", + -11.554532051086426 + ], + [ + "▁killer", + -11.554760932922363 + ], + [ + "▁rally", + -11.555070877075195 + ], + [ + "▁confusing", + -11.55516529083252 + ], + [ + "▁methodology", + -11.55527400970459 + ], + [ + "▁frustrating", + -11.555757522583008 + ], + [ + "bi", + -11.555840492248535 + ], + [ + "▁Susan", + -11.555940628051758 + ], + [ + "▁pension", + -11.555970191955566 + ], + [ + "▁Target", + -11.556549072265623 + ], + [ + "▁finances", + -11.556574821472168 + ], + [ + "mann", + -11.556660652160645 + ], + [ + "▁alignment", + -11.556904792785645 + ], + [ + "▁educators", + -11.557129859924316 + ], + [ + "▁oh", + -11.557236671447754 + ], + [ + "▁teen", + -11.55725383758545 + ], + [ + "▁jar", + -11.557708740234377 + ], + [ + "▁99", + -11.55784034729004 + ], + [ + "▁Analytics", + -11.557908058166504 + ], + [ + "▁revenues", + -11.557926177978516 + ], + [ + "▁bride", + -11.558027267456056 + ], + [ + "▁uncertainty", + -11.558039665222168 + ], + [ + "▁consolidation", + -11.558382987976074 + ], + [ + "▁mod", + -11.55871868133545 + ], + [ + "will", + -11.558918952941896 + ], + [ + "ano", + -11.55903434753418 + ], + [ + "▁boiler", + -11.5592041015625 + ], + [ + "mon", + -11.559316635131836 + ], + [ + "▁disciplines", + -11.559584617614746 + ], + [ + "ani", + -11.559747695922852 + ], + [ + "▁relatives", + -11.559950828552246 + ], + [ + "▁photographers", + -11.560275077819824 + ], + [ + "▁governor", + -11.560479164123535 + ], + [ + "▁66", + -11.561037063598633 + ], + [ + "▁Mind", + -11.561236381530762 + ], + [ + "▁gardening", + -11.561236381530762 + ], + [ + "▁occupied", + -11.56155490875244 + ], + [ + "...\"", + -11.56165599822998 + ], + [ + "01", + -11.561717987060549 + ], + [ + "▁Med", + -11.561718940734863 + ], + [ + "▁musician", + -11.56238842010498 + ], + [ + "han", + -11.56242847442627 + ], + [ + "▁mapping", + -11.562551498413086 + ], + [ + "▁assumed", + -11.56263256072998 + ], + [ + "▁Tan", + -11.562654495239258 + ], + [ + "▁animated", + -11.562753677368164 + ], + [ + "▁Send", + -11.562880516052246 + ], + [ + "▁flowing", + -11.562921524047852 + ], + [ + "▁journalist", + -11.563106536865234 + ], + [ + "▁facebook", + -11.563204765319824 + ], + [ + "▁fatigue", + -11.56326389312744 + ], + [ + "▁proceeds", + -11.563360214233398 + ], + [ + "▁lover", + -11.563458442687988 + ], + [ + "▁cherry", + -11.563708305358888 + ], + [ + "▁Cream", + -11.563942909240724 + ], + [ + "▁concerts", + -11.563977241516112 + ], + [ + "ries", + -11.564061164855955 + ], + [ + "▁traders", + -11.564108848571776 + ], + [ + "▁Yu", + -11.564172744750977 + ], + [ + "▁households", + -11.56445598602295 + ], + [ + "▁Operations", + -11.56470775604248 + ], + [ + "▁approaching", + -11.564981460571287 + ], + [ + "▁Clinical", + -11.56535530090332 + ], + [ + "▁influences", + -11.565404891967772 + ], + [ + "▁runner", + -11.565435409545898 + ], + [ + "▁Communication", + -11.56569004058838 + ], + [ + "▁mere", + -11.56592845916748 + ], + [ + "▁tunnel", + -11.566452980041504 + ], + [ + "▁behaviors", + -11.567012786865234 + ], + [ + "▁struggled", + -11.56725025177002 + ], + [ + "-6", + -11.567742347717283 + ], + [ + "▁Glen", + -11.56783962249756 + ], + [ + "▁delays", + -11.56793212890625 + ], + [ + "▁Given", + -11.56804084777832 + ], + [ + "▁sweat", + -11.568098068237305 + ], + [ + "▁Craig", + -11.568219184875488 + ], + [ + "under", + -11.568225860595703 + ], + [ + "▁IRS", + -11.568378448486328 + ], + [ + "▁Transportation", + -11.56853199005127 + ], + [ + "▁pixels", + -11.568597793579102 + ], + [ + "▁Barbara", + -11.56865119934082 + ], + [ + "▁Beijing", + -11.56885051727295 + ], + [ + "Should", + -11.569204330444336 + ], + [ + "ura", + -11.569214820861816 + ], + [ + "▁crafts", + -11.569353103637695 + ], + [ + "▁Ga", + -11.56992530822754 + ], + [ + "▁streams", + -11.57021713256836 + ], + [ + "▁galleries", + -11.57027530670166 + ], + [ + "NET", + -11.570314407348633 + ], + [ + "▁12-", + -11.57040309906006 + ], + [ + "▁SP", + -11.570473670959473 + ], + [ + "▁quicker", + -11.57058334350586 + ], + [ + "▁turkey", + -11.570634841918944 + ], + [ + "ach", + -11.570686340332031 + ], + [ + "▁Lighting", + -11.570855140686035 + ], + [ + "▁automobile", + -11.571085929870604 + ], + [ + "▁farms", + -11.57140064239502 + ], + [ + "▁freeze", + -11.5714111328125 + ], + [ + "▁lit", + -11.571637153625488 + ], + [ + "▁Face", + -11.571863174438477 + ], + [ + "▁forests", + -11.571903228759766 + ], + [ + "▁poly", + -11.571940422058104 + ], + [ + "▁Swedish", + -11.572113037109377 + ], + [ + "▁excel", + -11.572145462036133 + ], + [ + "▁101", + -11.572224617004396 + ], + [ + "▁Baker", + -11.572256088256836 + ], + [ + "Would", + -11.57226848602295 + ], + [ + "▁positioned", + -11.572653770446776 + ], + [ + "▁subsequently", + -11.572850227355955 + ], + [ + "▁learners", + -11.572918891906738 + ], + [ + "▁1998", + -11.57315444946289 + ], + [ + "▁Chef", + -11.573331832885742 + ], + [ + "▁spicy", + -11.57347297668457 + ], + [ + "▁Josh", + -11.573482513427734 + ], + [ + "▁dynamics", + -11.573516845703123 + ], + [ + "▁Jews", + -11.57366180419922 + ], + [ + "▁simpler", + -11.573750495910645 + ], + [ + "▁Thompson", + -11.573882102966309 + ], + [ + "▁proceedings", + -11.573896408081056 + ], + [ + "▁landscaping", + -11.5739164352417 + ], + [ + "▁illustrations", + -11.573997497558594 + ], + [ + "▁:)", + -11.574575424194336 + ], + [ + "▁Ca", + -11.574752807617188 + ], + [ + "▁breed", + -11.574979782104492 + ], + [ + "▁crops", + -11.575161933898926 + ], + [ + "green", + -11.575185775756836 + ], + [ + "▁2-3", + -11.575244903564451 + ], + [ + "▁Text", + -11.57532024383545 + ], + [ + "space", + -11.575353622436523 + ], + [ + "▁deposits", + -11.57557487487793 + ], + [ + "▁Production", + -11.5758056640625 + ], + [ + "▁Reserved", + -11.575843811035156 + ], + [ + "▁controversial", + -11.576085090637209 + ], + [ + "▁hoped", + -11.576207160949709 + ], + [ + "▁Hence", + -11.576374053955078 + ], + [ + "▁lending", + -11.576458930969238 + ], + [ + "▁neighborhoods", + -11.576777458190918 + ], + [ + "▁cluster", + -11.576809883117676 + ], + [ + "04", + -11.577096939086914 + ], + [ + "▁Impact", + -11.577141761779783 + ], + [ + "▁Drug", + -11.577764511108398 + ], + [ + "▁hood", + -11.57796859741211 + ], + [ + "▁110", + -11.578032493591309 + ], + [ + "▁licensing", + -11.57806396484375 + ], + [ + "▁surveillance", + -11.57818603515625 + ], + [ + "▁spreading", + -11.57826805114746 + ], + [ + "▁perspectives", + -11.578536987304688 + ], + [ + "▁prep", + -11.578782081604004 + ], + [ + "info", + -11.579216003417969 + ], + [ + "▁likewise", + -11.579231262207031 + ], + [ + "▁horizontal", + -11.579463005065918 + ], + [ + "▁custody", + -11.579784393310549 + ], + [ + "▁gay", + -11.579885482788086 + ], + [ + "▁Pittsburgh", + -11.580137252807615 + ], + [ + "▁Pictures", + -11.58016872406006 + ], + [ + "▁captain", + -11.580202102661133 + ], + [ + "▁notify", + -11.580276489257812 + ], + [ + "▁lace", + -11.58030605316162 + ], + [ + "▁Ar", + -11.58071994781494 + ], + [ + "▁comparable", + -11.580747604370115 + ], + [ + "▁Blood", + -11.580926895141602 + ], + [ + "▁Registration", + -11.58106517791748 + ], + [ + "▁circular", + -11.58115005493164 + ], + [ + "▁nutritional", + -11.581523895263672 + ], + [ + "▁opponents", + -11.581768989562988 + ], + [ + "▁considerably", + -11.582037925720217 + ], + [ + "▁Survey", + -11.582098960876465 + ], + [ + "▁evident", + -11.58212661743164 + ], + [ + "▁strap", + -11.58214282989502 + ], + [ + "▁30,", + -11.582480430603027 + ], + [ + "ren", + -11.58272933959961 + ], + [ + "when", + -11.582743644714355 + ], + [ + "▁carriers", + -11.582898139953612 + ], + [ + "▁Chocolate", + -11.583066940307615 + ], + [ + "▁Victorian", + -11.583244323730469 + ], + [ + "▁metrics", + -11.583429336547852 + ], + [ + "▁spices", + -11.583516120910645 + ], + [ + "▁mad", + -11.58356475830078 + ], + [ + "▁Transfer", + -11.583874702453612 + ], + [ + "▁Canyon", + -11.584537506103516 + ], + [ + "▁clips", + -11.584538459777832 + ], + [ + "dis", + -11.58480739593506 + ], + [ + "▁undertaking", + -11.58507251739502 + ], + [ + "▁castle", + -11.585123062133787 + ], + [ + "▁Nursing", + -11.585156440734863 + ], + [ + "▁innovations", + -11.585216522216797 + ], + [ + "▁react", + -11.585259437561035 + ], + [ + "▁runners", + -11.585445404052734 + ], + [ + "▁sticks", + -11.58555030822754 + ], + [ + "▁monetary", + -11.58557415008545 + ], + [ + "▁supplying", + -11.585588455200195 + ], + [ + "▁Kenya", + -11.585591316223145 + ], + [ + "▁Hopefully", + -11.585646629333496 + ], + [ + "▁12,", + -11.585820198059082 + ], + [ + "▁assessments", + -11.585877418518066 + ], + [ + "▁mud", + -11.58595371246338 + ], + [ + "ST", + -11.585968017578123 + ], + [ + "▁Block", + -11.58624267578125 + ], + [ + "▁evolving", + -11.58627223968506 + ], + [ + "Top", + -11.586316108703612 + ], + [ + "▁clouds", + -11.586963653564451 + ], + [ + "▁thickness", + -11.586990356445312 + ], + [ + "▁25,", + -11.587080955505373 + ], + [ + "▁overhead", + -11.587298393249512 + ], + [ + "▁ribbon", + -11.587327003479004 + ], + [ + "▁partially", + -11.587382316589355 + ], + [ + "▁pole", + -11.587668418884276 + ], + [ + "▁Contemporary", + -11.587676048278809 + ], + [ + "▁assisting", + -11.587849617004396 + ], + [ + "▁Mississippi", + -11.588170051574709 + ], + [ + "▁dealt", + -11.588237762451172 + ], + [ + "▁lamps", + -11.588369369506836 + ], + [ + "▁advances", + -11.588431358337402 + ], + [ + "▁dialing", + -11.589208602905272 + ], + [ + "▁Intelligence", + -11.589441299438477 + ], + [ + "▁ignored", + -11.589486122131348 + ], + [ + "▁glow", + -11.589500427246094 + ], + [ + "▁comparing", + -11.589702606201172 + ], + [ + "▁convince", + -11.589816093444824 + ], + [ + "▁urgent", + -11.589829444885254 + ], + [ + "▁comfortably", + -11.590100288391112 + ], + [ + "▁proudly", + -11.590314865112305 + ], + [ + "▁cater", + -11.590405464172363 + ], + [ + "▁Albert", + -11.590838432312012 + ], + [ + "▁Ver", + -11.590932846069336 + ], + [ + "▁environmentally", + -11.591282844543455 + ], + [ + "▁notion", + -11.591611862182615 + ], + [ + "▁(4", + -11.59162139892578 + ], + [ + "▁institutional", + -11.591629981994627 + ], + [ + "▁pickup", + -11.591646194458008 + ], + [ + "▁descriptions", + -11.59170150756836 + ], + [ + "ile", + -11.591727256774902 + ], + [ + "▁coding", + -11.591853141784668 + ], + [ + "▁Eagle", + -11.592171669006348 + ], + [ + "master", + -11.592206001281738 + ], + [ + "▁Delivery", + -11.592257499694824 + ], + [ + "▁sewing", + -11.592527389526367 + ], + [ + "▁Se", + -11.59256649017334 + ], + [ + "▁wax", + -11.592689514160156 + ], + [ + "▁holy", + -11.592742919921877 + ], + [ + "▁Boys", + -11.592968940734863 + ], + [ + "▁varies", + -11.593059539794922 + ], + [ + "Buy", + -11.593263626098633 + ], + [ + "▁Gordon", + -11.593269348144531 + ], + [ + "▁Studios", + -11.593295097351074 + ], + [ + "▁participant", + -11.59347915649414 + ], + [ + "▁segments", + -11.593777656555176 + ], + [ + "▁hockey", + -11.593999862670898 + ], + [ + "▁Cold", + -11.59409999847412 + ], + [ + "▁cave", + -11.594103813171388 + ], + [ + "▁spotted", + -11.594172477722168 + ], + [ + "▁15%", + -11.594399452209473 + ], + [ + "▁compiled", + -11.594764709472656 + ], + [ + "▁databases", + -11.594926834106444 + ], + [ + "▁bicycle", + -11.595218658447266 + ], + [ + "▁Cabinet", + -11.59523868560791 + ], + [ + "▁Awesome", + -11.595248222351074 + ], + [ + "▁VR", + -11.59527587890625 + ], + [ + "▁encountered", + -11.595309257507324 + ], + [ + "oz", + -11.595504760742188 + ], + [ + "▁AL", + -11.59559726715088 + ], + [ + "▁threw", + -11.59561252593994 + ], + [ + "▁explored", + -11.595659255981444 + ], + [ + "▁simulation", + -11.595674514770508 + ], + [ + "▁excuse", + -11.59570026397705 + ], + [ + "▁Wayne", + -11.595711708068848 + ], + [ + "▁Skin", + -11.59581470489502 + ], + [ + "▁rivers", + -11.595902442932127 + ], + [ + "▁beverage", + -11.596232414245604 + ], + [ + "▁mp", + -11.596349716186523 + ], + [ + "▁lecture", + -11.596406936645508 + ], + [ + "MS", + -11.596427917480469 + ], + [ + "DS", + -11.59672737121582 + ], + [ + "▁advocacy", + -11.596809387207031 + ], + [ + "▁comprised", + -11.597396850585938 + ], + [ + "lock", + -11.597813606262209 + ], + [ + "▁simplicity", + -11.597840309143066 + ], + [ + "▁ethnic", + -11.598278045654297 + ], + [ + "▁Sample", + -11.598382949829102 + ], + [ + "▁radical", + -11.598392486572266 + ], + [ + "▁composite", + -11.598599433898926 + ], + [ + "▁festive", + -11.598608016967772 + ], + [ + "fire", + -11.598660469055176 + ], + [ + "▁Pen", + -11.598787307739258 + ], + [ + "▁chains", + -11.598821640014648 + ], + [ + "▁tones", + -11.598862648010254 + ], + [ + "▁seminar", + -11.599151611328123 + ], + [ + "▁examined", + -11.59918975830078 + ], + [ + "▁sheer", + -11.599234580993652 + ], + [ + "▁cone", + -11.59940242767334 + ], + [ + "▁condo", + -11.59958839416504 + ], + [ + "▁perceived", + -11.599641799926758 + ], + [ + "▁Current", + -11.599952697753906 + ], + [ + "▁Russell", + -11.599980354309082 + ], + [ + "▁29,", + -11.600015640258787 + ], + [ + "▁Franklin", + -11.600048065185549 + ], + [ + "▁brave", + -11.600127220153809 + ], + [ + "▁collaborate", + -11.60028839111328 + ], + [ + "▁sealed", + -11.600343704223633 + ], + [ + "▁Crown", + -11.600393295288086 + ], + [ + "▁Write", + -11.60056209564209 + ], + [ + "▁Consumer", + -11.600601196289062 + ], + [ + "▁Po", + -11.600720405578612 + ], + [ + "▁2007.", + -11.600805282592772 + ], + [ + "▁Circle", + -11.600811004638672 + ], + [ + "▁sleek", + -11.6009521484375 + ], + [ + "▁flu", + -11.601178169250488 + ], + [ + "ery", + -11.601309776306152 + ], + [ + "▁Something", + -11.601340293884276 + ], + [ + "▁Horse", + -11.601360321044922 + ], + [ + "▁sleeves", + -11.601519584655762 + ], + [ + "▁develops", + -11.601734161376951 + ], + [ + "▁nominated", + -11.601736068725586 + ], + [ + "▁Eco", + -11.601972579956056 + ], + [ + "▁bare", + -11.602014541625977 + ], + [ + "▁proteins", + -11.602224349975586 + ], + [ + "▁Lu", + -11.602227210998535 + ], + [ + "▁deer", + -11.602252960205078 + ], + [ + "▁Nation", + -11.602531433105469 + ], + [ + "▁refined", + -11.602556228637695 + ], + [ + "▁deployed", + -11.602693557739258 + ], + [ + "▁sights", + -11.602813720703123 + ], + [ + "▁Pin", + -11.602863311767578 + ], + [ + "▁Treatment", + -11.602875709533691 + ], + [ + "▁archive", + -11.602972030639648 + ], + [ + "▁appearing", + -11.603222846984863 + ], + [ + "▁counting", + -11.603767395019531 + ], + [ + "▁Mercedes", + -11.603875160217283 + ], + [ + "▁enthusiasts", + -11.60404109954834 + ], + [ + "▁Article", + -11.604164123535156 + ], + [ + "shaped", + -11.604302406311035 + ], + [ + "▁Graham", + -11.604447364807127 + ], + [ + "▁salmon", + -11.604561805725098 + ], + [ + "▁administrator", + -11.604631423950195 + ], + [ + "Hello", + -11.604644775390623 + ], + [ + "▁Christopher", + -11.604985237121582 + ], + [ + "▁prescribed", + -11.605182647705078 + ], + [ + "pur", + -11.60525131225586 + ], + [ + "▁Tips", + -11.605341911315918 + ], + [ + "▁journalists", + -11.605402946472168 + ], + [ + "▁pound", + -11.605786323547363 + ], + [ + "ative", + -11.605814933776855 + ], + [ + "▁pharmaceutical", + -11.605839729309082 + ], + [ + "▁belongs", + -11.605862617492676 + ], + [ + "▁Marvel", + -11.605901718139648 + ], + [ + "▁Vista", + -11.606019020080566 + ], + [ + "▁permanently", + -11.606071472167969 + ], + [ + "▁affiliated", + -11.606523513793944 + ], + [ + "▁polish", + -11.606536865234377 + ], + [ + "▁strips", + -11.606575965881348 + ], + [ + "▁dryer", + -11.606609344482422 + ], + [ + "▁hose", + -11.606700897216797 + ], + [ + "▁Douglas", + -11.60688591003418 + ], + [ + "minded", + -11.60694408416748 + ], + [ + "▁Virtual", + -11.607144355773926 + ], + [ + "▁athlete", + -11.607154846191406 + ], + [ + "ju", + -11.607525825500488 + ], + [ + "▁coordination", + -11.60759449005127 + ], + [ + "▁builder", + -11.60808563232422 + ], + [ + "▁mindset", + -11.608296394348145 + ], + [ + "Create", + -11.608680725097656 + ], + [ + "▁pillow", + -11.608699798583984 + ], + [ + "kind", + -11.608712196350098 + ], + [ + "▁Otherwise", + -11.608830451965332 + ], + [ + "▁Healthcare", + -11.609192848205566 + ], + [ + "▁bubble", + -11.609201431274414 + ], + [ + "▁Lower", + -11.60922908782959 + ], + [ + "▁AD", + -11.60928440093994 + ], + [ + "▁Si", + -11.609651565551758 + ], + [ + "▁offset", + -11.609692573547363 + ], + [ + "▁undertake", + -11.609896659851074 + ], + [ + "▁verified", + -11.610068321228027 + ], + [ + "Three", + -11.610090255737305 + ], + [ + "▁logical", + -11.610153198242188 + ], + [ + "▁terrific", + -11.610214233398438 + ], + [ + "23", + -11.610461235046388 + ], + [ + "▁rankings", + -11.610462188720703 + ], + [ + "▁Racing", + -11.61064624786377 + ], + [ + "▁PRO", + -11.610671997070312 + ], + [ + "ski", + -11.610730171203612 + ], + [ + "▁ST", + -11.610739707946776 + ], + [ + "▁performs", + -11.61081314086914 + ], + [ + "▁meantime", + -11.611043930053713 + ], + [ + "▁inquiry", + -11.61112117767334 + ], + [ + "▁railway", + -11.61127471923828 + ], + [ + "▁Indeed", + -11.61130428314209 + ], + [ + "▁shirts", + -11.611612319946287 + ], + [ + "▁breathe", + -11.611727714538574 + ], + [ + "▁Published", + -11.612037658691406 + ], + [ + "worth", + -11.612048149108888 + ], + [ + "▁Supply", + -11.612613677978516 + ], + [ + "▁combo", + -11.612804412841797 + ], + [ + "▁regret", + -11.613018989562988 + ], + [ + "▁landed", + -11.61306381225586 + ], + [ + "▁novels", + -11.613192558288574 + ], + [ + "▁disclosure", + -11.61340045928955 + ], + [ + "▁median", + -11.613587379455566 + ], + [ + "wall", + -11.613752365112305 + ], + [ + "▁acrylic", + -11.613986015319824 + ], + [ + "▁scared", + -11.614546775817873 + ], + [ + "▁conveniently", + -11.614940643310549 + ], + [ + "plus", + -11.614947319030762 + ], + [ + "▁Partner", + -11.614990234375 + ], + [ + "▁creator", + -11.615104675292969 + ], + [ + "▁router", + -11.615450859069824 + ], + [ + "▁1920", + -11.615514755249023 + ], + [ + "▁messaging", + -11.615561485290527 + ], + [ + "▁preliminary", + -11.615703582763672 + ], + [ + "first", + -11.615934371948242 + ], + [ + "▁shifting", + -11.616175651550291 + ], + [ + "▁neighbor", + -11.616293907165527 + ], + [ + "sky", + -11.616411209106444 + ], + [ + "▁discomfort", + -11.616653442382812 + ], + [ + "▁Brexit", + -11.61666774749756 + ], + [ + "▁wondered", + -11.61682415008545 + ], + [ + "▁waist", + -11.61688232421875 + ], + [ + "▁starter", + -11.617047309875488 + ], + [ + "▁rhythm", + -11.617287635803224 + ], + [ + "▁habitat", + -11.617576599121094 + ], + [ + "IN", + -11.617677688598633 + ], + [ + "▁UI", + -11.617846488952637 + ], + [ + "▁HIV", + -11.617950439453123 + ], + [ + "▁WE", + -11.617950439453123 + ], + [ + "▁translated", + -11.618115425109863 + ], + [ + "▁cinema", + -11.618328094482422 + ], + [ + "▁Baptist", + -11.618592262268066 + ], + [ + "▁Explorer", + -11.618873596191406 + ], + [ + "▁incidents", + -11.61896800994873 + ], + [ + "▁technician", + -11.619174003601074 + ], + [ + "▁Norway", + -11.619547843933104 + ], + [ + "▁discusses", + -11.619933128356934 + ], + [ + "▁Mars", + -11.61994457244873 + ], + [ + "▁socks", + -11.620084762573242 + ], + [ + "▁ginger", + -11.620099067687988 + ], + [ + "▁seventh", + -11.620701789855955 + ], + [ + "▁Users", + -11.620793342590332 + ], + [ + "▁eliminating", + -11.620834350585938 + ], + [ + "▁vinegar", + -11.620842933654783 + ], + [ + "▁Transport", + -11.621103286743164 + ], + [ + "▁Includes", + -11.621148109436035 + ], + [ + "▁luggage", + -11.6212158203125 + ], + [ + "▁financially", + -11.62125015258789 + ], + [ + "▁Others", + -11.621464729309082 + ], + [ + "▁countryside", + -11.621466636657717 + ], + [ + "▁2:", + -11.621761322021484 + ], + [ + "▁palette", + -11.622299194335938 + ], + [ + "▁Specialist", + -11.622331619262695 + ], + [ + "▁competitions", + -11.622580528259276 + ], + [ + "▁Trading", + -11.622676849365234 + ], + [ + "▁ARE", + -11.622881889343262 + ], + [ + "▁discretion", + -11.622994422912598 + ], + [ + "▁badly", + -11.623022079467772 + ], + [ + "▁paragraph", + -11.623173713684082 + ], + [ + "▁informal", + -11.623255729675291 + ], + [ + "▁practically", + -11.623300552368164 + ], + [ + "ID", + -11.62339687347412 + ], + [ + "▁lands", + -11.62375259399414 + ], + [ + "▁stadium", + -11.623833656311035 + ], + [ + "▁z", + -11.62384033203125 + ], + [ + "▁ANY", + -11.623847007751465 + ], + [ + "▁worries", + -11.623889923095703 + ], + [ + "▁ethics", + -11.623929023742676 + ], + [ + "▁Belgium", + -11.624011039733888 + ], + [ + "▁arena", + -11.624117851257324 + ], + [ + "▁cleared", + -11.624191284179688 + ], + [ + "▁immigrants", + -11.624216079711914 + ], + [ + "▁Theater", + -11.624290466308594 + ], + [ + "▁argued", + -11.624330520629885 + ], + [ + "ble", + -11.624448776245115 + ], + [ + "▁qualifications", + -11.624833106994627 + ], + [ + "▁ha", + -11.624950408935549 + ], + [ + "▁soda", + -11.625184059143066 + ], + [ + "▁conditioner", + -11.625185012817385 + ], + [ + "Below", + -11.625319480895996 + ], + [ + "▁organ", + -11.62541961669922 + ], + [ + "▁panic", + -11.625457763671877 + ], + [ + "▁orientation", + -11.625699043273926 + ], + [ + "▁pic", + -11.625972747802734 + ], + [ + "▁substances", + -11.62608242034912 + ], + [ + "▁90%", + -11.626343727111816 + ], + [ + "▁Competition", + -11.62658977508545 + ], + [ + "▁competent", + -11.626717567443848 + ], + [ + "▁40%", + -11.626775741577148 + ], + [ + "▁paperwork", + -11.627238273620604 + ], + [ + "▁Bristol", + -11.627239227294922 + ], + [ + "▁newspapers", + -11.627530097961426 + ], + [ + "▁sheep", + -11.627765655517578 + ], + [ + "▁OH", + -11.62779426574707 + ], + [ + "▁crust", + -11.62786102294922 + ], + [ + "▁evaluated", + -11.627883911132812 + ], + [ + "▁Retrieved", + -11.62791347503662 + ], + [ + "▁proximity", + -11.628106117248535 + ], + [ + "▁lap", + -11.62856388092041 + ], + [ + "▁den", + -11.62876033782959 + ], + [ + "▁amazed", + -11.62899112701416 + ], + [ + "▁Risk", + -11.62912654876709 + ], + [ + "▁Cooper", + -11.629195213317873 + ], + [ + "▁celebrity", + -11.629231452941896 + ], + [ + "▁injection", + -11.62956714630127 + ], + [ + "▁commands", + -11.630029678344728 + ], + [ + "▁Features", + -11.630500793457031 + ], + [ + "▁responding", + -11.63054084777832 + ], + [ + "▁delayed", + -11.63063144683838 + ], + [ + "▁cited", + -11.630690574645996 + ], + [ + "▁airlines", + -11.630859375 + ], + [ + "▁Basic", + -11.631104469299316 + ], + [ + "▁dated", + -11.631220817565918 + ], + [ + "▁fixing", + -11.631307601928713 + ], + [ + "▁strikes", + -11.63135051727295 + ], + [ + "▁Edinburgh", + -11.63135814666748 + ], + [ + "▁threatened", + -11.631360054016112 + ], + [ + "ium", + -11.63137435913086 + ], + [ + "▁Ku", + -11.631385803222656 + ], + [ + "market", + -11.63151741027832 + ], + [ + "▁gut", + -11.631675720214844 + ], + [ + "changing", + -11.63167667388916 + ], + [ + "ct", + -11.63173484802246 + ], + [ + "▁indicators", + -11.631884574890137 + ], + [ + "▁kidney", + -11.632020950317385 + ], + [ + "▁GT", + -11.632627487182615 + ], + [ + "▁midnight", + -11.63266944885254 + ], + [ + "▁Designer", + -11.632896423339844 + ], + [ + "▁Month", + -11.63315486907959 + ], + [ + "▁trace", + -11.633217811584473 + ], + [ + "▁blessing", + -11.633241653442385 + ], + [ + "▁petition", + -11.63337230682373 + ], + [ + "▁breakdown", + -11.63338851928711 + ], + [ + "bank", + -11.633599281311035 + ], + [ + "▁instructors", + -11.633907318115234 + ], + [ + "▁hyper", + -11.63393783569336 + ], + [ + "▁poems", + -11.633955001831056 + ], + [ + "▁pc", + -11.63400936126709 + ], + [ + "▁binary", + -11.634111404418944 + ], + [ + "▁Singh", + -11.63442325592041 + ], + [ + "▁Steven", + -11.634604454040527 + ], + [ + "▁VIP", + -11.634620666503906 + ], + [ + "yo", + -11.634641647338867 + ], + [ + "▁barn", + -11.63486671447754 + ], + [ + "▁solved", + -11.634982109069824 + ], + [ + "▁Apply", + -11.635014533996582 + ], + [ + "▁puppy", + -11.63504695892334 + ], + [ + "▁scenic", + -11.635430335998535 + ], + [ + "century", + -11.635490417480469 + ], + [ + "jo", + -11.635784149169922 + ], + [ + "▁reputable", + -11.635817527770996 + ], + [ + "RE", + -11.635830879211426 + ], + [ + "▁analyst", + -11.635876655578612 + ], + [ + "proof", + -11.63609504699707 + ], + [ + "▁trap", + -11.636109352111816 + ], + [ + "▁economics", + -11.636126518249512 + ], + [ + "▁Heights", + -11.636223793029783 + ], + [ + "▁waterproof", + -11.63644790649414 + ], + [ + "▁LCD", + -11.636463165283203 + ], + [ + "▁Chelsea", + -11.63690948486328 + ], + [ + "▁algorithms", + -11.637531280517578 + ], + [ + "▁metals", + -11.637627601623535 + ], + [ + "▁acne", + -11.637791633605955 + ], + [ + "▁progression", + -11.637815475463867 + ], + [ + "▁assembled", + -11.637900352478027 + ], + [ + "▁Stage", + -11.63804817199707 + ], + [ + "▁sleeve", + -11.638132095336914 + ], + [ + "▁foil", + -11.63815689086914 + ], + [ + "▁Mountains", + -11.638423919677734 + ], + [ + "▁punch", + -11.638670921325684 + ], + [ + "▁suggesting", + -11.638853073120115 + ], + [ + "IP", + -11.638882637023926 + ], + [ + "▁consciousness", + -11.639168739318848 + ], + [ + "▁outreach", + -11.639243125915527 + ], + [ + "HD", + -11.639299392700195 + ], + [ + "▁happier", + -11.639695167541504 + ], + [ + "▁CE", + -11.640121459960938 + ], + [ + "▁Reviews", + -11.640137672424316 + ], + [ + "▁traditionally", + -11.64022731781006 + ], + [ + "▁$25", + -11.64048957824707 + ], + [ + "che", + -11.640552520751951 + ], + [ + "▁Lawrence", + -11.640631675720217 + ], + [ + "▁attachment", + -11.640666961669922 + ], + [ + "▁recreation", + -11.640746116638184 + ], + [ + "▁LG", + -11.640908241271973 + ], + [ + "▁57", + -11.641186714172363 + ], + [ + "two", + -11.641277313232422 + ], + [ + "▁sculpture", + -11.641554832458496 + ], + [ + "▁extends", + -11.641626358032228 + ], + [ + "▁24,", + -11.641669273376465 + ], + [ + "▁acute", + -11.641786575317385 + ], + [ + "▁yarn", + -11.641796112060549 + ], + [ + "▁discounted", + -11.641850471496582 + ], + [ + "▁certificates", + -11.642007827758787 + ], + [ + "▁happily", + -11.642060279846191 + ], + [ + "▁toy", + -11.642191886901855 + ], + [ + "▁tenant", + -11.642250061035156 + ], + [ + "▁Harbor", + -11.64256191253662 + ], + [ + "▁Designed", + -11.642626762390137 + ], + [ + "▁$15", + -11.642762184143066 + ], + [ + "▁subscribers", + -11.64299488067627 + ], + [ + "▁nowhere", + -11.643375396728516 + ], + [ + "▁lobby", + -11.643424034118652 + ], + [ + "▁FC", + -11.64362335205078 + ], + [ + "▁Princess", + -11.64374542236328 + ], + [ + "▁LinkedIn", + -11.643876075744627 + ], + [ + "▁Electronic", + -11.643936157226562 + ], + [ + "▁pencil", + -11.644003868103027 + ], + [ + "▁tops", + -11.644183158874512 + ], + [ + "▁observation", + -11.644227027893066 + ], + [ + "ps", + -11.64433765411377 + ], + [ + "▁Wright", + -11.644457817077637 + ], + [ + "ita", + -11.644521713256836 + ], + [ + "▁researching", + -11.644689559936523 + ], + [ + "▁blogger", + -11.64470100402832 + ], + [ + "▁culinary", + -11.644760131835938 + ], + [ + "▁recruiting", + -11.644765853881836 + ], + [ + "▁Info", + -11.644983291625977 + ], + [ + "64", + -11.64504337310791 + ], + [ + "▁scroll", + -11.645112991333008 + ], + [ + "▁pointing", + -11.645216941833496 + ], + [ + "▁Imagine", + -11.645511627197266 + ], + [ + "▁rope", + -11.645639419555664 + ], + [ + "▁hiding", + -11.645669937133787 + ], + [ + "▁compassion", + -11.6459379196167 + ], + [ + "mark", + -11.646003723144531 + ], + [ + "▁hint", + -11.646079063415527 + ], + [ + "▁comprises", + -11.646160125732422 + ], + [ + "▁58", + -11.646349906921388 + ], + [ + "▁chrome", + -11.646421432495115 + ], + [ + "▁Leader", + -11.646441459655762 + ], + [ + "▁garbage", + -11.64647388458252 + ], + [ + "▁fare", + -11.646502494812012 + ], + [ + "▁scholars", + -11.646538734436035 + ], + [ + "▁perfection", + -11.646745681762695 + ], + [ + "▁refugees", + -11.646747589111328 + ], + [ + "▁heroes", + -11.64694118499756 + ], + [ + "▁wound", + -11.647054672241213 + ], + [ + "▁96", + -11.64725399017334 + ], + [ + "▁stupid", + -11.647397994995115 + ], + [ + "▁incentives", + -11.6474027633667 + ], + [ + "▁investigations", + -11.64747142791748 + ], + [ + "▁Teacher", + -11.647698402404783 + ], + [ + "▁instrumental", + -11.647780418395996 + ], + [ + "very", + -11.648334503173828 + ], + [ + "▁invoice", + -11.64838695526123 + ], + [ + "▁Economics", + -11.648519515991213 + ], + [ + "▁Category", + -11.648598670959473 + ], + [ + "▁Upper", + -11.648721694946287 + ], + [ + "▁sciences", + -11.648869514465332 + ], + [ + "▁Route", + -11.64915943145752 + ], + [ + "▁warmer", + -11.649240493774414 + ], + [ + "▁PayPal", + -11.64931297302246 + ], + [ + "▁disclose", + -11.649514198303224 + ], + [ + "▁iPod", + -11.649535179138184 + ], + [ + "▁WI", + -11.649651527404783 + ], + [ + "▁boyfriend", + -11.649946212768556 + ], + [ + "▁bundle", + -11.65017032623291 + ], + [ + "▁assessed", + -11.65036964416504 + ], + [ + "▁sustain", + -11.650430679321287 + ], + [ + "▁stain", + -11.650602340698242 + ], + [ + "▁indication", + -11.650636672973633 + ], + [ + "▁assurance", + -11.650753021240234 + ], + [ + "▁ca", + -11.650839805603027 + ], + [ + "▁BA", + -11.650869369506836 + ], + [ + "▁necessity", + -11.651029586791992 + ], + [ + "Work", + -11.651061058044434 + ], + [ + "▁Plastic", + -11.65113925933838 + ], + [ + "▁locals", + -11.651203155517578 + ], + [ + "▁demonstration", + -11.651269912719728 + ], + [ + "▁bears", + -11.651307106018066 + ], + [ + "▁bloggers", + -11.651326179504396 + ], + [ + "▁Certification", + -11.651461601257324 + ], + [ + "▁Labour", + -11.65147590637207 + ], + [ + "▁Luxury", + -11.651609420776367 + ], + [ + "▁gum", + -11.651920318603516 + ], + [ + "▁columns", + -11.651926040649414 + ], + [ + "▁urge", + -11.651983261108398 + ], + [ + "▁viable", + -11.652098655700684 + ], + [ + "45", + -11.652255058288574 + ], + [ + "AL", + -11.652280807495115 + ], + [ + "order", + -11.652456283569336 + ], + [ + "▁profound", + -11.652583122253418 + ], + [ + "▁investigating", + -11.65260124206543 + ], + [ + "ada", + -11.652639389038086 + ], + [ + "▁spirits", + -11.653210639953612 + ], + [ + "log", + -11.653225898742676 + ], + [ + "▁Richmond", + -11.65329360961914 + ], + [ + "▁apples", + -11.653313636779783 + ], + [ + "▁administrators", + -11.65331745147705 + ], + [ + "▁tri", + -11.65343189239502 + ], + [ + "▁overlooking", + -11.653874397277832 + ], + [ + "▁occurring", + -11.653968811035156 + ], + [ + "▁desirable", + -11.654040336608888 + ], + [ + "▁UP", + -11.6541109085083 + ], + [ + "▁noon", + -11.654134750366213 + ], + [ + "▁posters", + -11.65419101715088 + ], + [ + "▁tin", + -11.65471649169922 + ], + [ + "▁banner", + -11.654900550842283 + ], + [ + "▁villa", + -11.65536880493164 + ], + [ + "▁specification", + -11.655725479125977 + ], + [ + "▁cinnamon", + -11.655755043029783 + ], + [ + "▁influential", + -11.655755043029783 + ], + [ + "▁lively", + -11.655965805053713 + ], + [ + "pre", + -11.655985832214355 + ], + [ + "▁SW", + -11.656264305114746 + ], + [ + "▁activate", + -11.656368255615234 + ], + [ + "CA", + -11.656643867492676 + ], + [ + "core", + -11.656795501708984 + ], + [ + "▁researcher", + -11.65727710723877 + ], + [ + "▁Airlines", + -11.657608032226562 + ], + [ + "▁Emily", + -11.657622337341309 + ], + [ + "▁verification", + -11.65773868560791 + ], + [ + "▁bat", + -11.65776252746582 + ], + [ + "▁Aaron", + -11.6580171585083 + ], + [ + "▁wages", + -11.658188819885254 + ], + [ + "▁demonstrates", + -11.658265113830566 + ], + [ + "▁strings", + -11.658269882202148 + ], + [ + "▁Madrid", + -11.658288955688477 + ], + [ + "▁steal", + -11.658353805541992 + ], + [ + "Still", + -11.6585111618042 + ], + [ + "▁Format", + -11.658530235290527 + ], + [ + "▁projected", + -11.65855312347412 + ], + [ + "rock", + -11.658636093139648 + ], + [ + "▁offshore", + -11.658695220947266 + ], + [ + "▁Physical", + -11.658738136291504 + ], + [ + "▁describing", + -11.658944129943848 + ], + [ + "▁1996", + -11.658960342407228 + ], + [ + "Right", + -11.659225463867188 + ], + [ + "▁inspirational", + -11.659863471984863 + ], + [ + "▁bespoke", + -11.659878730773926 + ], + [ + "▁sacred", + -11.659903526306152 + ], + [ + "▁Recovery", + -11.660000801086426 + ], + [ + "▁6-", + -11.66003131866455 + ], + [ + "mile", + -11.660219192504885 + ], + [ + "▁necklace", + -11.66047191619873 + ], + [ + "▁XP", + -11.660883903503418 + ], + [ + "3,", + -11.660988807678224 + ], + [ + "▁missions", + -11.661176681518556 + ], + [ + "▁breeze", + -11.661320686340332 + ], + [ + "▁Stand", + -11.661584854125977 + ], + [ + "▁Strategy", + -11.661920547485352 + ], + [ + "▁Constitution", + -11.662002563476562 + ], + [ + "TH", + -11.662007331848145 + ], + [ + "▁unemployment", + -11.662107467651367 + ], + [ + "▁Soul", + -11.662158012390137 + ], + [ + "▁imported", + -11.662385940551758 + ], + [ + "▁fate", + -11.662466049194336 + ], + [ + "lar", + -11.662592887878418 + ], + [ + "form", + -11.66270637512207 + ], + [ + "▁commissioned", + -11.662991523742676 + ], + [ + "▁Agriculture", + -11.663037300109863 + ], + [ + "iya", + -11.663225173950195 + ], + [ + "▁silly", + -11.66340446472168 + ], + [ + "ok", + -11.66352367401123 + ], + [ + "▁reservations", + -11.66377067565918 + ], + [ + "▁biology", + -11.663870811462402 + ], + [ + "▁Artist", + -11.664273262023926 + ], + [ + "▁spam", + -11.664859771728516 + ], + [ + "28", + -11.664969444274902 + ], + [ + "▁Northwest", + -11.665292739868164 + ], + [ + "▁Amsterdam", + -11.66547679901123 + ], + [ + "▁NASA", + -11.665828704833984 + ], + [ + "▁Ready", + -11.66585922241211 + ], + [ + "▁Taking", + -11.66591453552246 + ], + [ + "▁legislative", + -11.665948867797852 + ], + [ + "▁sandwich", + -11.666044235229492 + ], + [ + "▁Elementary", + -11.666359901428224 + ], + [ + "▁Carter", + -11.666434288024902 + ], + [ + "▁3.0", + -11.666488647460938 + ], + [ + "Mark", + -11.666626930236816 + ], + [ + "▁nut", + -11.666728019714355 + ], + [ + "▁Nu", + -11.666762351989746 + ], + [ + "▁Lights", + -11.66697883605957 + ], + [ + "▁Track", + -11.66713047027588 + ], + [ + "▁avoided", + -11.667330741882324 + ], + [ + "▁Workshop", + -11.667373657226562 + ], + [ + "▁economical", + -11.667482376098633 + ], + [ + "▁exhibitions", + -11.667534828186035 + ], + [ + "▁wires", + -11.667646408081056 + ], + [ + "▁textures", + -11.668024063110352 + ], + [ + "▁Joint", + -11.668025970458984 + ], + [ + "▁Doors", + -11.668061256408691 + ], + [ + "▁medicines", + -11.66806983947754 + ], + [ + "▁bother", + -11.668095588684082 + ], + [ + "through", + -11.668143272399902 + ], + [ + "▁Meet", + -11.668387413024902 + ], + [ + "▁Robin", + -11.668424606323242 + ], + [ + "▁predicted", + -11.668580055236816 + ], + [ + "▁Rachel", + -11.668813705444336 + ], + [ + "▁appetite", + -11.668871879577637 + ], + [ + "▁deploy", + -11.669225692749023 + ], + [ + "▁negotiate", + -11.669363975524902 + ], + [ + "▁Columbus", + -11.669621467590332 + ], + [ + "▁Voice", + -11.669809341430664 + ], + [ + "▁glimpse", + -11.670158386230469 + ], + [ + "▁incorrect", + -11.670158386230469 + ], + [ + "West", + -11.67026138305664 + ], + [ + "je", + -11.670575141906738 + ], + [ + "▁pads", + -11.670796394348145 + ], + [ + "▁ch", + -11.670856475830078 + ], + [ + "▁smallest", + -11.671231269836426 + ], + [ + "▁quantities", + -11.671277046203612 + ], + [ + "▁eaten", + -11.67128086090088 + ], + [ + "▁timeless", + -11.671360969543455 + ], + [ + "▁pursuit", + -11.671364784240724 + ], + [ + "▁traded", + -11.67140007019043 + ], + [ + "▁hung", + -11.67156982421875 + ], + [ + "55", + -11.671611785888672 + ], + [ + "▁Close", + -11.671655654907228 + ], + [ + "Al", + -11.672225952148438 + ], + [ + "▁Beyond", + -11.67224407196045 + ], + [ + "World", + -11.672249794006348 + ], + [ + "▁Arabia", + -11.672258377075195 + ], + [ + "▁NHS", + -11.672327041625977 + ], + [ + "▁beside", + -11.672389030456545 + ], + [ + "▁dishwasher", + -11.672613143920898 + ], + [ + "▁Angel", + -11.672701835632324 + ], + [ + "▁securely", + -11.672780990600586 + ], + [ + "▁quoted", + -11.672783851623535 + ], + [ + "▁Bond", + -11.672913551330566 + ], + [ + "▁outlined", + -11.67311668395996 + ], + [ + "▁yogurt", + -11.673282623291016 + ], + [ + "▁Questions", + -11.673304557800291 + ], + [ + "▁distant", + -11.67331600189209 + ], + [ + "▁Parker", + -11.673531532287598 + ], + [ + "▁Sheet", + -11.673765182495115 + ], + [ + "▁MBA", + -11.674067497253418 + ], + [ + "hill", + -11.67408847808838 + ], + [ + "▁Paint", + -11.67409896850586 + ], + [ + "▁landscapes", + -11.67426586151123 + ], + [ + "pp", + -11.674323081970217 + ], + [ + "▁securities", + -11.674375534057615 + ], + [ + "base", + -11.674580574035645 + ], + [ + "▁Kennedy", + -11.674627304077148 + ], + [ + "▁camps", + -11.674814224243164 + ], + [ + "▁Stewart", + -11.674870491027832 + ], + [ + "▁revised", + -11.67494773864746 + ], + [ + "▁2007,", + -11.67502498626709 + ], + [ + "▁Murray", + -11.67520523071289 + ], + [ + "▁Focus", + -11.675310134887695 + ], + [ + "Green", + -11.675450325012209 + ], + [ + "▁statistical", + -11.6755952835083 + ], + [ + "Select", + -11.675970077514648 + ], + [ + "▁Programme", + -11.67601490020752 + ], + [ + "▁selections", + -11.676070213317873 + ], + [ + "▁recovered", + -11.676177024841309 + ], + [ + "▁accommodations", + -11.676366806030272 + ], + [ + "▁Nashville", + -11.676444053649902 + ], + [ + "ade", + -11.676506996154783 + ], + [ + "ence", + -11.676660537719728 + ], + [ + "▁washer", + -11.676886558532717 + ], + [ + "▁Marie", + -11.676932334899902 + ], + [ + "▁reads", + -11.677553176879885 + ], + [ + "▁candle", + -11.677574157714844 + ], + [ + "Happy", + -11.677664756774902 + ], + [ + "▁Cards", + -11.67775058746338 + ], + [ + "▁Say", + -11.677926063537598 + ], + [ + "▁enthusiastic", + -11.67814826965332 + ], + [ + "▁greens", + -11.678340911865234 + ], + [ + "▁holistic", + -11.678375244140623 + ], + [ + "▁lakes", + -11.678389549255373 + ], + [ + "▁Copyright", + -11.67839241027832 + ], + [ + "▁appearances", + -11.67855739593506 + ], + [ + "▁sends", + -11.678664207458496 + ], + [ + "night", + -11.678740501403809 + ], + [ + "▁kingdom", + -11.67896842956543 + ], + [ + "▁nursery", + -11.678974151611328 + ], + [ + "▁mph", + -11.678997039794922 + ], + [ + "▁shy", + -11.67900562286377 + ], + [ + "▁enrollment", + -11.679166793823242 + ], + [ + "▁Ash", + -11.679569244384766 + ], + [ + "▁proportion", + -11.679765701293944 + ], + [ + "▁Author", + -11.679924964904783 + ], + [ + "▁Denmark", + -11.680002212524414 + ], + [ + "▁Visa", + -11.68001651763916 + ], + [ + "▁skiing", + -11.680143356323242 + ], + [ + "▁roster", + -11.680152893066406 + ], + [ + "▁Until", + -11.68033504486084 + ], + [ + "▁enjoyment", + -11.680415153503418 + ], + [ + "▁Forex", + -11.68064022064209 + ], + [ + "▁costumes", + -11.681093215942385 + ], + [ + "▁Payment", + -11.681170463562012 + ], + [ + "▁que", + -11.68125057220459 + ], + [ + "▁phrases", + -11.681361198425291 + ], + [ + "▁conflicts", + -11.681557655334473 + ], + [ + "▁frustration", + -11.681560516357422 + ], + [ + "▁Vintage", + -11.681988716125488 + ], + [ + "▁ceilings", + -11.682028770446776 + ], + [ + "▁anticipate", + -11.682122230529783 + ], + [ + "▁Fitness", + -11.682153701782228 + ], + [ + "▁CC", + -11.682164192199709 + ], + [ + "▁mechanics", + -11.682201385498049 + ], + [ + "▁gospel", + -11.68231964111328 + ], + [ + "▁Either", + -11.682392120361328 + ], + [ + "▁empower", + -11.682669639587402 + ], + [ + "▁Recently", + -11.682870864868164 + ], + [ + "▁CPU", + -11.683270454406738 + ], + [ + "▁consumed", + -11.683308601379396 + ], + [ + "▁Arkansas", + -11.68345069885254 + ], + [ + "▁referral", + -11.683481216430664 + ], + [ + "filled", + -11.6835355758667 + ], + [ + "▁gem", + -11.68362045288086 + ], + [ + "▁remedy", + -11.683785438537598 + ], + [ + "▁Dis", + -11.683868408203123 + ], + [ + "▁Official", + -11.68392562866211 + ], + [ + "▁Maintenance", + -11.684123992919922 + ], + [ + "▁penalties", + -11.684192657470703 + ], + [ + "▁Har", + -11.684355735778809 + ], + [ + "▁62", + -11.684504508972168 + ], + [ + "▁Alberta", + -11.684630393981934 + ], + [ + "▁listened", + -11.684678077697754 + ], + [ + "▁Hart", + -11.684730529785156 + ], + [ + "▁Virgin", + -11.684828758239746 + ], + [ + "▁Fortunately", + -11.684942245483398 + ], + [ + "▁absorb", + -11.68523406982422 + ], + [ + "▁recordings", + -11.685518264770508 + ], + [ + "▁Chat", + -11.68553352355957 + ], + [ + "Data", + -11.68561840057373 + ], + [ + "▁Standards", + -11.68576431274414 + ], + [ + "▁Extra", + -11.68585205078125 + ], + [ + "▁Liberty", + -11.686071395874023 + ], + [ + "▁witnessed", + -11.686112403869627 + ], + [ + "▁swap", + -11.686214447021484 + ], + [ + "▁Rice", + -11.686325073242188 + ], + [ + "▁Aid", + -11.68632698059082 + ], + [ + "▁rival", + -11.686713218688965 + ], + [ + "▁joke", + -11.686803817749023 + ], + [ + "▁screws", + -11.686824798583984 + ], + [ + "▁Warren", + -11.687036514282228 + ], + [ + "Try", + -11.687207221984863 + ], + [ + "▁stirring", + -11.687210083007812 + ], + [ + "ph", + -11.687237739562988 + ], + [ + "▁Collins", + -11.687396049499512 + ], + [ + "fer", + -11.68756103515625 + ], + [ + "▁el", + -11.68769073486328 + ], + [ + "▁Budget", + -11.687742233276367 + ], + [ + "UK", + -11.687798500061035 + ], + [ + "▁Islam", + -11.68797779083252 + ], + [ + "▁Browse", + -11.688013076782228 + ], + [ + "▁NE", + -11.688240051269531 + ], + [ + "▁SMS", + -11.68830108642578 + ], + [ + "▁implants", + -11.688310623168944 + ], + [ + "▁thumb", + -11.688511848449709 + ], + [ + "▁Brothers", + -11.688775062561035 + ], + [ + "▁documented", + -11.68896198272705 + ], + [ + "▁pending", + -11.68909740447998 + ], + [ + "▁Must", + -11.689374923706056 + ], + [ + "▁slices", + -11.689530372619627 + ], + [ + "▁vector", + -11.689584732055664 + ], + [ + "Car", + -11.689630508422852 + ], + [ + "▁adverse", + -11.68967342376709 + ], + [ + "▁Karen", + -11.68967628479004 + ], + [ + "▁header", + -11.68978214263916 + ], + [ + "▁20,", + -11.69031810760498 + ], + [ + "▁locksmith", + -11.690384864807127 + ], + [ + "▁journals", + -11.69042682647705 + ], + [ + "▁Revolution", + -11.690431594848633 + ], + [ + "36", + -11.690460205078123 + ], + [ + "▁Ian", + -11.69071102142334 + ], + [ + "▁seasoned", + -11.690930366516112 + ], + [ + "▁Had", + -11.691320419311523 + ], + [ + "▁Cafe", + -11.691362380981444 + ], + [ + "▁activated", + -11.691472053527832 + ], + [ + "▁Shopping", + -11.691481590270996 + ], + [ + "▁flesh", + -11.691481590270996 + ], + [ + "▁lush", + -11.69155502319336 + ], + [ + "▁illustration", + -11.69167709350586 + ], + [ + "Part", + -11.691713333129885 + ], + [ + "▁Turkish", + -11.69184684753418 + ], + [ + "▁washed", + -11.691876411437988 + ], + [ + "▁Mad", + -11.691981315612791 + ], + [ + "▁backdrop", + -11.692065238952637 + ], + [ + "▁Gate", + -11.69206714630127 + ], + [ + "▁Between", + -11.69211196899414 + ], + [ + "▁Ham", + -11.692279815673828 + ], + [ + "▁elevated", + -11.692524909973145 + ], + [ + "▁Twin", + -11.692621231079102 + ], + [ + "▁bedding", + -11.693227767944336 + ], + [ + "▁slower", + -11.69344425201416 + ], + [ + "▁banned", + -11.69347858428955 + ], + [ + "▁defeated", + -11.6934814453125 + ], + [ + "▁mint", + -11.693689346313477 + ], + [ + "▁advisory", + -11.693928718566896 + ], + [ + "built", + -11.694029808044434 + ], + [ + "▁Platform", + -11.694090843200684 + ], + [ + "▁Cell", + -11.694347381591797 + ], + [ + "▁briefly", + -11.694379806518556 + ], + [ + "▁liberal", + -11.69438648223877 + ], + [ + "▁Alice", + -11.695068359375 + ], + [ + "▁regulated", + -11.695188522338867 + ], + [ + "▁surf", + -11.69536018371582 + ], + [ + "▁decorate", + -11.695534706115724 + ], + [ + "▁ebook", + -11.695557594299316 + ], + [ + "▁Southeast", + -11.695764541625977 + ], + [ + "4)", + -11.69618797302246 + ], + [ + "▁bases", + -11.696617126464844 + ], + [ + "▁000", + -11.69672393798828 + ], + [ + "▁Graduate", + -11.69680118560791 + ], + [ + "▁linking", + -11.696907043457031 + ], + [ + "▁Rules", + -11.69694709777832 + ], + [ + "▁divine", + -11.69697380065918 + ], + [ + "▁Ge", + -11.69699478149414 + ], + [ + "▁aggregate", + -11.697028160095217 + ], + [ + "▁hats", + -11.697155952453612 + ], + [ + "▁Commissioner", + -11.697242736816406 + ], + [ + "app", + -11.69730281829834 + ], + [ + "▁partly", + -11.69735050201416 + ], + [ + "▁dietary", + -11.698189735412598 + ], + [ + "▁modest", + -11.698326110839844 + ], + [ + "▁59", + -11.69834041595459 + ], + [ + "▁answering", + -11.698342323303224 + ], + [ + "▁Options", + -11.698490142822266 + ], + [ + "du", + -11.698492050170898 + ], + [ + "▁Circuit", + -11.698548316955566 + ], + [ + "▁farmer", + -11.69865608215332 + ], + [ + "▁MB", + -11.698710441589355 + ], + [ + "▁Soviet", + -11.698847770690918 + ], + [ + "▁shifts", + -11.698978424072266 + ], + [ + "▁Arthur", + -11.699121475219728 + ], + [ + "▁elegance", + -11.699337005615234 + ], + [ + "▁coordinate", + -11.699406623840332 + ], + [ + "▁brokers", + -11.699469566345217 + ], + [ + "ever", + -11.699495315551758 + ], + [ + "▁heels", + -11.69971752166748 + ], + [ + "▁invites", + -11.699874877929688 + ], + [ + "▁rounded", + -11.700008392333984 + ], + [ + "sville", + -11.700231552124023 + ], + [ + "▁Campus", + -11.70037078857422 + ], + [ + "▁notifications", + -11.700395584106444 + ], + [ + "▁grandmother", + -11.700519561767578 + ], + [ + "▁earliest", + -11.70057773590088 + ], + [ + "▁Storm", + -11.70085334777832 + ], + [ + "▁rugs", + -11.70125675201416 + ], + [ + "▁utilities", + -11.701401710510254 + ], + [ + "▁harmony", + -11.701458930969238 + ], + [ + "▁tobacco", + -11.701528549194336 + ], + [ + "▁Applications", + -11.701942443847656 + ], + [ + "▁mall", + -11.702021598815918 + ], + [ + "▁warrant", + -11.702034950256348 + ], + [ + "▁chopped", + -11.702235221862791 + ], + [ + "▁surgeon", + -11.702375411987305 + ], + [ + "▁compounds", + -11.702427864074709 + ], + [ + "▁switches", + -11.702526092529297 + ], + [ + "▁flame", + -11.70256519317627 + ], + [ + "▁sometime", + -11.702642440795898 + ], + [ + "▁Flat", + -11.702655792236328 + ], + [ + "▁Campbell", + -11.702999114990234 + ], + [ + "ical", + -11.70334815979004 + ], + [ + "▁ONE", + -11.703486442565918 + ], + [ + "▁donors", + -11.703500747680664 + ], + [ + "▁scholarships", + -11.703661918640137 + ], + [ + "▁Orchestra", + -11.70405387878418 + ], + [ + "▁extraction", + -11.704056739807127 + ], + [ + "▁Austria", + -11.704206466674805 + ], + [ + "▁beads", + -11.704241752624512 + ], + [ + "▁Nintendo", + -11.70447063446045 + ], + [ + "▁Picture", + -11.704538345336914 + ], + [ + "▁mushrooms", + -11.704869270324709 + ], + [ + "▁Faculty", + -11.70501708984375 + ], + [ + "▁Ground", + -11.7051362991333 + ], + [ + "▁Outlook", + -11.705307960510254 + ], + [ + "▁gratitude", + -11.705543518066406 + ], + [ + "▁qualifying", + -11.70582103729248 + ], + [ + "▁lazy", + -11.705873489379885 + ], + [ + "ong", + -11.706005096435549 + ], + [ + "▁clinics", + -11.706011772155762 + ], + [ + "▁casting", + -11.706032752990724 + ], + [ + "▁passport", + -11.706063270568848 + ], + [ + "▁Carl", + -11.706320762634276 + ], + [ + "▁minority", + -11.706494331359863 + ], + [ + "Red", + -11.706496238708496 + ], + [ + "▁PHP", + -11.706536293029783 + ], + [ + "▁cafe", + -11.706619262695312 + ], + [ + "▁spoon", + -11.706905364990234 + ], + [ + "▁bitter", + -11.706914901733398 + ], + [ + "▁convey", + -11.706988334655762 + ], + [ + "▁accessing", + -11.707143783569336 + ], + [ + "▁remodeling", + -11.70716953277588 + ], + [ + "founder", + -11.707199096679688 + ], + [ + "▁fortune", + -11.707230567932127 + ], + [ + "▁Camera", + -11.70726490020752 + ], + [ + "▁yearly", + -11.707581520080566 + ], + [ + "▁Sean", + -11.70767593383789 + ], + [ + "ix", + -11.707797050476074 + ], + [ + "▁68", + -11.70780086517334 + ], + [ + "▁Jerusalem", + -11.707996368408203 + ], + [ + "▁sunshine", + -11.708043098449709 + ], + [ + "▁observations", + -11.708292007446287 + ], + [ + "DC", + -11.708345413208008 + ], + [ + "▁android", + -11.708351135253906 + ], + [ + "▁incentive", + -11.7085542678833 + ], + [ + "▁Ranch", + -11.708579063415527 + ], + [ + "▁Polish", + -11.708669662475586 + ], + [ + "kan", + -11.70868682861328 + ], + [ + "▁Pearl", + -11.708822250366213 + ], + [ + "▁characteristic", + -11.708829879760742 + ], + [ + "▁cents", + -11.70897102355957 + ], + [ + "▁evenly", + -11.709059715270996 + ], + [ + "▁shallow", + -11.709100723266602 + ], + [ + "▁grasp", + -11.709186553955078 + ], + [ + "▁performers", + -11.709239959716797 + ], + [ + "▁Military", + -11.709257125854492 + ], + [ + "Overall", + -11.709269523620604 + ], + [ + "▁Penn", + -11.709427833557127 + ], + [ + "Their", + -11.709477424621582 + ], + [ + "▁harness", + -11.709512710571287 + ], + [ + "▁bomb", + -11.709869384765623 + ], + [ + "▁Adams", + -11.709917068481444 + ], + [ + "ken", + -11.710267066955566 + ], + [ + "ward", + -11.71031379699707 + ], + [ + "▁bulbs", + -11.710351943969728 + ], + [ + "▁Electrical", + -11.710412979125977 + ], + [ + "▁visually", + -11.710481643676758 + ], + [ + "▁Czech", + -11.710586547851562 + ], + [ + "▁suites", + -11.71074390411377 + ], + [ + "▁belly", + -11.711039543151855 + ], + [ + "▁hobby", + -11.711111068725586 + ], + [ + "▁boutique", + -11.711132049560549 + ], + [ + "▁detected", + -11.71117115020752 + ], + [ + "ett", + -11.71140193939209 + ], + [ + "▁Jacob", + -11.71141242980957 + ], + [ + "▁impacted", + -11.711467742919922 + ], + [ + "▁Buffalo", + -11.711636543273926 + ], + [ + "source", + -11.71175765991211 + ], + [ + "Google", + -11.712169647216797 + ], + [ + "▁Hotels", + -11.712265014648438 + ], + [ + "▁node", + -11.71243667602539 + ], + [ + "▁slides", + -11.71262550354004 + ], + [ + "▁prone", + -11.712728500366213 + ], + [ + "▁gates", + -11.71304702758789 + ], + [ + "▁Seven", + -11.713068962097168 + ], + [ + "wing", + -11.713394165039062 + ], + [ + "▁accessory", + -11.713571548461914 + ], + [ + "▁jurisdiction", + -11.71381950378418 + ], + [ + "▁cancelled", + -11.713916778564451 + ], + [ + "▁Montana", + -11.7141752243042 + ], + [ + "▁roasted", + -11.714282989501951 + ], + [ + "especially", + -11.714316368103027 + ], + [ + "yan", + -11.714381217956545 + ], + [ + "tal", + -11.714605331420898 + ], + [ + "▁Carpet", + -11.714741706848145 + ], + [ + "▁dense", + -11.71480941772461 + ], + [ + "open", + -11.714899063110352 + ], + [ + "▁Olympics", + -11.714908599853516 + ], + [ + "story", + -11.715020179748535 + ], + [ + "▁namely", + -11.715124130249023 + ], + [ + "▁1997", + -11.715250015258787 + ], + [ + "▁breathtaking", + -11.715325355529783 + ], + [ + "▁20-", + -11.715547561645508 + ], + [ + "Perhaps", + -11.715600967407228 + ], + [ + "▁credentials", + -11.716084480285645 + ], + [ + "nes", + -11.716118812561035 + ], + [ + "▁thoughtful", + -11.716178894042969 + ], + [ + "▁discharge", + -11.716423034667969 + ], + [ + "▁touring", + -11.716523170471191 + ], + [ + "▁energetic", + -11.716534614562988 + ], + [ + "-9", + -11.716592788696287 + ], + [ + "▁salon", + -11.716714859008787 + ], + [ + "▁showers", + -11.716880798339844 + ], + [ + "band", + -11.716888427734377 + ], + [ + "▁calcium", + -11.717013359069824 + ], + [ + "▁approve", + -11.717287063598633 + ], + [ + "▁themed", + -11.717344284057615 + ], + [ + "▁dominant", + -11.717435836791992 + ], + [ + "▁Success", + -11.717596054077148 + ], + [ + "▁touching", + -11.717658996582031 + ], + [ + "▁lung", + -11.71766757965088 + ], + [ + "▁bulb", + -11.71780490875244 + ], + [ + "ably", + -11.71802806854248 + ], + [ + "▁Cultural", + -11.71806526184082 + ], + [ + "tan", + -11.718269348144531 + ], + [ + "▁lectures", + -11.718503952026367 + ], + [ + "▁hammer", + -11.718603134155272 + ], + [ + "▁Broadway", + -11.718708992004396 + ], + [ + "▁grams", + -11.71894359588623 + ], + [ + "▁Rod", + -11.718948364257812 + ], + [ + "▁bridges", + -11.71895980834961 + ], + [ + "▁Tile", + -11.71920394897461 + ], + [ + "▁Moving", + -11.71924114227295 + ], + [ + "▁cellular", + -11.7193021774292 + ], + [ + "▁Lots", + -11.719528198242188 + ], + [ + "▁allergies", + -11.719754219055176 + ], + [ + "▁configure", + -11.719974517822266 + ], + [ + "▁marketers", + -11.720122337341309 + ], + [ + "▁fragrance", + -11.720425605773926 + ], + [ + "▁acoustic", + -11.720523834228516 + ], + [ + "▁eligibility", + -11.720663070678713 + ], + [ + "▁coated", + -11.720694541931152 + ], + [ + "▁qualification", + -11.72087287902832 + ], + [ + "▁drilling", + -11.720873832702637 + ], + [ + "Pre", + -11.721047401428224 + ], + [ + "▁160", + -11.721152305603027 + ], + [ + "▁Advisory", + -11.72118854522705 + ], + [ + "▁humble", + -11.721280097961426 + ], + [ + "▁terrorist", + -11.72160816192627 + ], + [ + "▁shrimp", + -11.721861839294434 + ], + [ + "▁Champions", + -11.721894264221191 + ], + [ + "▁Steam", + -11.721956253051758 + ], + [ + "▁veggies", + -11.722016334533691 + ], + [ + "▁FM", + -11.722087860107422 + ], + [ + "▁anonymous", + -11.72222137451172 + ], + [ + "▁exquisite", + -11.72246265411377 + ], + [ + "27", + -11.722612380981444 + ], + [ + "▁allegedly", + -11.722657203674316 + ], + [ + "▁discrimination", + -11.722658157348633 + ], + [ + "▁Location", + -11.722956657409668 + ], + [ + "▁grandchildren", + -11.723352432250977 + ], + [ + "COM", + -11.723411560058594 + ], + [ + "▁spokesman", + -11.723562240600586 + ], + [ + "▁premiere", + -11.723569869995115 + ], + [ + "post", + -11.723587036132812 + ], + [ + "ES", + -11.724056243896484 + ], + [ + "???", + -11.724095344543455 + ], + [ + "▁favour", + -11.72410488128662 + ], + [ + "▁Linda", + -11.72417163848877 + ], + [ + "▁folding", + -11.724441528320312 + ], + [ + "▁rotation", + -11.72481632232666 + ], + [ + "sch", + -11.724888801574709 + ], + [ + "▁intersection", + -11.724961280822754 + ], + [ + "SA", + -11.72500228881836 + ], + [ + "▁Log", + -11.725016593933104 + ], + [ + "▁lengthy", + -11.725231170654297 + ], + [ + "away", + -11.725452423095703 + ], + [ + "▁footprint", + -11.725664138793944 + ], + [ + "▁Arena", + -11.726240158081056 + ], + [ + "▁shiny", + -11.726306915283203 + ], + [ + "▁theoretical", + -11.726346015930176 + ], + [ + "▁Unique", + -11.726360321044922 + ], + [ + "▁altogether", + -11.726948738098145 + ], + [ + "Search", + -11.727154731750488 + ], + [ + "▁£1", + -11.727164268493652 + ], + [ + "▁Wolf", + -11.72722339630127 + ], + [ + "▁accountability", + -11.727232933044434 + ], + [ + "▁withdrawal", + -11.727316856384276 + ], + [ + "▁builders", + -11.727725982666016 + ], + [ + "▁tendency", + -11.727798461914062 + ], + [ + "▁Crew", + -11.72797966003418 + ], + [ + "▁Die", + -11.728068351745604 + ], + [ + "▁capturing", + -11.728132247924805 + ], + [ + "▁Portugal", + -11.72844123840332 + ], + [ + "▁dull", + -11.72856616973877 + ], + [ + "▁acknowledged", + -11.728609085083008 + ], + [ + "▁Mitchell", + -11.728684425354004 + ], + [ + "▁playground", + -11.728719711303713 + ], + [ + "card", + -11.728760719299316 + ], + [ + "▁attacked", + -11.729240417480469 + ], + [ + "▁innocent", + -11.729484558105469 + ], + [ + "▁Secret", + -11.729619026184082 + ], + [ + "building", + -11.73007869720459 + ], + [ + "ell", + -11.73019313812256 + ], + [ + "▁disputes", + -11.730360984802246 + ], + [ + "py", + -11.730436325073242 + ], + [ + "income", + -11.73045539855957 + ], + [ + "▁Managing", + -11.730491638183594 + ], + [ + "China", + -11.730603218078612 + ], + [ + "▁chaos", + -11.730799674987791 + ], + [ + "▁cholesterol", + -11.730877876281738 + ], + [ + "▁pressed", + -11.730884552001951 + ], + [ + "▁coordinator", + -11.731175422668455 + ], + [ + "▁LP", + -11.731659889221191 + ], + [ + "800", + -11.731866836547852 + ], + [ + "▁Weather", + -11.731985092163086 + ], + [ + "pan", + -11.7319974899292 + ], + [ + "▁Photos", + -11.732190132141112 + ], + [ + "▁towels", + -11.732192039489746 + ], + [ + "▁compliant", + -11.73226261138916 + ], + [ + "ula", + -11.73228931427002 + ], + [ + "▁caps", + -11.732366561889648 + ], + [ + "▁melt", + -11.73243236541748 + ], + [ + "▁wellbeing", + -11.732465744018556 + ], + [ + "▁appropriately", + -11.732487678527832 + ], + [ + "▁celebrations", + -11.732569694519045 + ], + [ + "▁Retail", + -11.732589721679688 + ], + [ + "cast", + -11.732656478881836 + ], + [ + "▁spark", + -11.732800483703612 + ], + [ + "▁cylinder", + -11.732903480529783 + ], + [ + "▁1%", + -11.733074188232422 + ], + [ + "▁GP", + -11.733181953430176 + ], + [ + "▁extending", + -11.733924865722656 + ], + [ + "▁Cisco", + -11.73405647277832 + ], + [ + "▁mixer", + -11.734060287475586 + ], + [ + "▁Left", + -11.73406219482422 + ], + [ + "▁exhibits", + -11.73430061340332 + ], + [ + "web", + -11.734654426574709 + ], + [ + "Everyone", + -11.734829902648926 + ], + [ + "▁homeowner", + -11.73486328125 + ], + [ + "▁Taiwan", + -11.73495864868164 + ], + [ + "▁grief", + -11.735026359558104 + ], + [ + "family", + -11.735077857971191 + ], + [ + "ena", + -11.735078811645508 + ], + [ + "Due", + -11.735114097595217 + ], + [ + "▁crowded", + -11.735336303710938 + ], + [ + "ay", + -11.735389709472656 + ], + [ + "▁reporter", + -11.735414505004885 + ], + [ + "▁behavioral", + -11.73556900024414 + ], + [ + "▁encouragement", + -11.735672950744627 + ], + [ + "▁wishing", + -11.735681533813477 + ], + [ + "▁$500", + -11.735753059387209 + ], + [ + "▁THAT", + -11.735939979553224 + ], + [ + "▁memorial", + -11.73594093322754 + ], + [ + "▁startups", + -11.736021995544434 + ], + [ + "▁Tampa", + -11.736166954040527 + ], + [ + "▁publishers", + -11.73627471923828 + ], + [ + "▁evolve", + -11.73659610748291 + ], + [ + "media", + -11.736791610717772 + ], + [ + "▁bonuses", + -11.736883163452148 + ], + [ + "▁2,000", + -11.737253189086914 + ], + [ + "▁pins", + -11.73770236968994 + ], + [ + "▁Return", + -11.737716674804688 + ], + [ + "▁glance", + -11.737919807434082 + ], + [ + "▁wonders", + -11.73809051513672 + ], + [ + "lee", + -11.738329887390137 + ], + [ + "▁affair", + -11.738375663757324 + ], + [ + "PA", + -11.738584518432615 + ], + [ + "600", + -11.738693237304688 + ], + [ + "▁fraction", + -11.738791465759276 + ], + [ + "▁arising", + -11.73880100250244 + ], + [ + "▁insects", + -11.739093780517578 + ], + [ + "▁genuinely", + -11.739948272705078 + ], + [ + "▁extensively", + -11.740327835083008 + ], + [ + "▁realised", + -11.740534782409668 + ], + [ + "▁scientist", + -11.740554809570312 + ], + [ + "▁dominated", + -11.74070930480957 + ], + [ + "▁Shanghai", + -11.7407865524292 + ], + [ + "▁Sep", + -11.74096393585205 + ], + [ + "▁frustrated", + -11.74109935760498 + ], + [ + "▁alerts", + -11.74121379852295 + ], + [ + "▁hydraulic", + -11.741374969482422 + ], + [ + "▁professionalism", + -11.741519927978516 + ], + [ + "▁assortment", + -11.741888046264648 + ], + [ + "ika", + -11.741971015930176 + ], + [ + "▁revolutionary", + -11.742466926574709 + ], + [ + "▁beginner", + -11.742511749267578 + ], + [ + "▁laying", + -11.742545127868652 + ], + [ + "ator", + -11.742632865905762 + ], + [ + "▁Worth", + -11.742961883544922 + ], + [ + "PC", + -11.743041038513184 + ], + [ + "▁diamonds", + -11.74309539794922 + ], + [ + "tor", + -11.743247985839844 + ], + [ + "▁sentences", + -11.743342399597168 + ], + [ + "▁Phase", + -11.743558883666992 + ], + [ + "▁Sorry", + -11.744032859802246 + ], + [ + "▁cattle", + -11.744063377380373 + ], + [ + "▁asleep", + -11.74428367614746 + ], + [ + "▁payday", + -11.744291305541992 + ], + [ + "season", + -11.744340896606444 + ], + [ + "iv", + -11.744388580322266 + ], + [ + "▁prohibited", + -11.744592666625977 + ], + [ + "love", + -11.744625091552734 + ], + [ + "▁divide", + -11.744647979736328 + ], + [ + "▁Client", + -11.744648933410645 + ], + [ + "▁shoppers", + -11.744755744934082 + ], + [ + "▁flush", + -11.74520206451416 + ], + [ + "▁float", + -11.745448112487791 + ], + [ + "▁linen", + -11.745635986328123 + ], + [ + "▁Traditional", + -11.745683670043944 + ], + [ + "▁License", + -11.745713233947754 + ], + [ + "front", + -11.745834350585938 + ], + [ + "▁1)", + -11.745845794677734 + ], + [ + "▁respondents", + -11.74587059020996 + ], + [ + "▁Got", + -11.745948791503906 + ], + [ + "Online", + -11.746410369873049 + ], + [ + "33", + -11.74714183807373 + ], + [ + "▁midst", + -11.747207641601562 + ], + [ + "▁connector", + -11.747565269470217 + ], + [ + "▁ankle", + -11.747939109802246 + ], + [ + "cy", + -11.74805736541748 + ], + [ + "▁editors", + -11.748419761657717 + ], + [ + "▁bachelor", + -11.748534202575684 + ], + [ + "▁Desk", + -11.74865436553955 + ], + [ + "▁throat", + -11.748786926269531 + ], + [ + "var", + -11.748921394348145 + ], + [ + "▁knock", + -11.7489595413208 + ], + [ + "▁timeline", + -11.749120712280272 + ], + [ + "▁slim", + -11.749250411987305 + ], + [ + "▁NOW", + -11.749329566955566 + ], + [ + "▁theories", + -11.74942684173584 + ], + [ + "▁patches", + -11.749505996704102 + ], + [ + "▁illustrated", + -11.749591827392578 + ], + [ + "▁Teaching", + -11.74960231781006 + ], + [ + "▁graph", + -11.749804496765137 + ], + [ + "▁Climate", + -11.749822616577148 + ], + [ + "▁gameplay", + -11.749900817871094 + ], + [ + "▁foundations", + -11.749907493591309 + ], + [ + "▁undoubtedly", + -11.75010585784912 + ], + [ + "▁Match", + -11.750210762023926 + ], + [ + "▁distinction", + -11.750362396240234 + ], + [ + "▁Margaret", + -11.750394821166992 + ], + [ + "▁seminars", + -11.75052547454834 + ], + [ + "▁hi", + -11.750648498535156 + ], + [ + "lu", + -11.750848770141602 + ], + [ + "David", + -11.750983238220217 + ], + [ + "Full", + -11.751005172729492 + ], + [ + "▁overwhelmed", + -11.751070976257324 + ], + [ + "▁scanning", + -11.751260757446287 + ], + [ + "code", + -11.751522064208984 + ], + [ + "▁occupation", + -11.751558303833008 + ], + [ + "▁accessibility", + -11.751696586608888 + ], + [ + "▁Almost", + -11.751712799072266 + ], + [ + "▁approx", + -11.75229549407959 + ], + [ + "▁fires", + -11.752361297607422 + ], + [ + "▁Disease", + -11.75253200531006 + ], + [ + "▁cycles", + -11.752622604370115 + ], + [ + "▁positively", + -11.752824783325195 + ], + [ + "▁technically", + -11.753034591674805 + ], + [ + "▁Industries", + -11.753153800964355 + ], + [ + "▁fever", + -11.753171920776367 + ], + [ + "▁badge", + -11.753504753112791 + ], + [ + "▁Morris", + -11.753539085388184 + ], + [ + "▁stats", + -11.75365161895752 + ], + [ + "▁lacking", + -11.753759384155272 + ], + [ + "▁Engineer", + -11.75383758544922 + ], + [ + "▁collapse", + -11.753998756408691 + ], + [ + "▁anchor", + -11.754000663757324 + ], + [ + "▁communicating", + -11.75401210784912 + ], + [ + "▁Cyber", + -11.754228591918944 + ], + [ + "▁(3)", + -11.754243850708008 + ], + [ + "data", + -11.754249572753906 + ], + [ + "pen", + -11.75428581237793 + ], + [ + "▁Stainless", + -11.754624366760254 + ], + [ + "▁Cable", + -11.754645347595217 + ], + [ + "▁monster", + -11.754924774169922 + ], + [ + "▁renewed", + -11.75519561767578 + ], + [ + "▁marking", + -11.755300521850586 + ], + [ + "▁weigh", + -11.755417823791504 + ], + [ + "▁Brazilian", + -11.755535125732422 + ], + [ + "▁clue", + -11.75558853149414 + ], + [ + "Instead", + -11.75561237335205 + ], + [ + "▁rows", + -11.75580883026123 + ], + [ + "▁parameter", + -11.756032943725586 + ], + [ + "ding", + -11.756107330322266 + ], + [ + "▁pale", + -11.756364822387695 + ], + [ + "▁shareholders", + -11.756528854370115 + ], + [ + "▁1930", + -11.756613731384276 + ], + [ + "▁Robinson", + -11.756670951843262 + ], + [ + "▁Morning", + -11.75670337677002 + ], + [ + "uri", + -11.756816864013672 + ], + [ + "cher", + -11.756871223449709 + ], + [ + "▁vibe", + -11.757306098937988 + ], + [ + "▁optimized", + -11.757357597351074 + ], + [ + "▁Dry", + -11.757488250732422 + ], + [ + "▁CAN", + -11.757587432861328 + ], + [ + "▁licence", + -11.757608413696287 + ], + [ + "▁existed", + -11.75779151916504 + ], + [ + "▁architects", + -11.757953643798828 + ], + [ + "▁burned", + -11.757969856262209 + ], + [ + "▁needle", + -11.758106231689451 + ], + [ + "▁Prices", + -11.75834846496582 + ], + [ + "▁Emma", + -11.7583589553833 + ], + [ + "▁63", + -11.758416175842283 + ], + [ + "▁associates", + -11.758537292480469 + ], + [ + "▁boom", + -11.758618354797363 + ], + [ + "▁Knowledge", + -11.758707046508787 + ], + [ + "▁intentions", + -11.75885009765625 + ], + [ + "▁hunger", + -11.758893966674805 + ], + [ + "▁discovering", + -11.759132385253906 + ], + [ + "▁OEM", + -11.759320259094238 + ], + [ + "▁withstand", + -11.759387969970703 + ], + [ + "amp", + -11.759599685668944 + ], + [ + "▁branded", + -11.759950637817385 + ], + [ + "▁76", + -11.760091781616213 + ], + [ + "▁Offer", + -11.760233879089355 + ], + [ + "▁accountable", + -11.760257720947266 + ], + [ + "▁Sugar", + -11.76071071624756 + ], + [ + "▁Math", + -11.760991096496582 + ], + [ + "▁Ultimate", + -11.761533737182615 + ], + [ + "▁reporters", + -11.761609077453612 + ], + [ + "▁abundant", + -11.761611938476562 + ], + [ + "▁logged", + -11.761651992797852 + ], + [ + "SE", + -11.762113571166992 + ], + [ + "▁damaging", + -11.762131690979004 + ], + [ + "▁sync", + -11.762303352355955 + ], + [ + "▁Downtown", + -11.762310981750488 + ], + [ + "▁Col", + -11.762316703796388 + ], + [ + "par", + -11.762495994567873 + ], + [ + "▁sour", + -11.76255989074707 + ], + [ + "▁Wireless", + -11.762578964233398 + ], + [ + "▁Knight", + -11.762580871582031 + ], + [ + "▁undertaken", + -11.762630462646484 + ], + [ + "▁2006.", + -11.76267147064209 + ], + [ + "▁initiated", + -11.762819290161133 + ], + [ + "▁Puerto", + -11.76296043395996 + ], + [ + "▁stains", + -11.762984275817873 + ], + [ + "▁Mel", + -11.76303768157959 + ], + [ + "THE", + -11.763096809387209 + ], + [ + "▁modifications", + -11.763336181640623 + ], + [ + "▁Nevertheless", + -11.763457298278809 + ], + [ + "▁meta", + -11.763510704040527 + ], + [ + "▁cricket", + -11.76353645324707 + ], + [ + "▁Academic", + -11.763586044311523 + ], + [ + "▁workflow", + -11.763839721679688 + ], + [ + "▁advocates", + -11.764120101928713 + ], + [ + "▁Michelle", + -11.764375686645508 + ], + [ + "▁$4", + -11.764740943908691 + ], + [ + "▁introduces", + -11.764798164367676 + ], + [ + "▁tattoo", + -11.764826774597168 + ], + [ + "Big", + -11.764976501464844 + ], + [ + "▁chase", + -11.764981269836426 + ], + [ + "▁violations", + -11.765239715576172 + ], + [ + "▁awful", + -11.76526927947998 + ], + [ + "▁assign", + -11.765398025512695 + ], + [ + "Shop", + -11.765419006347656 + ], + [ + "▁Agent", + -11.765423774719238 + ], + [ + "▁viral", + -11.765737533569336 + ], + [ + "▁Larry", + -11.765816688537598 + ], + [ + "▁beverages", + -11.765875816345217 + ], + [ + "▁giveaway", + -11.766101837158203 + ], + [ + "400", + -11.7661714553833 + ], + [ + "▁positioning", + -11.766725540161133 + ], + [ + "▁compression", + -11.766820907592772 + ], + [ + "▁Wheel", + -11.767047882080078 + ], + [ + "▁unforgettable", + -11.767167091369627 + ], + [ + "▁Schedule", + -11.767220497131348 + ], + [ + "▁decides", + -11.76724910736084 + ], + [ + "▁Dell", + -11.76732063293457 + ], + [ + "▁Marc", + -11.767386436462402 + ], + [ + "▁partnered", + -11.76748275756836 + ], + [ + "▁cocktails", + -11.76764678955078 + ], + [ + "Currently", + -11.767742156982422 + ], + [ + "▁Fred", + -11.767996788024902 + ], + [ + "▁140", + -11.768051147460938 + ], + [ + "med", + -11.768211364746094 + ], + [ + "▁retro", + -11.768572807312012 + ], + [ + "▁Queensland", + -11.76866340637207 + ], + [ + "General", + -11.76874542236328 + ], + [ + "▁Historical", + -11.76881217956543 + ], + [ + "▁switched", + -11.768861770629885 + ], + [ + "▁horrible", + -11.768881797790527 + ], + [ + "▁nowadays", + -11.768935203552246 + ], + [ + "▁storing", + -11.769381523132324 + ], + [ + "▁gravity", + -11.769412994384766 + ], + [ + "Choose", + -11.76944065093994 + ], + [ + "ari", + -11.769536018371582 + ], + [ + "zo", + -11.769537925720217 + ], + [ + "maker", + -11.769841194152832 + ], + [ + "▁Higher", + -11.769936561584473 + ], + [ + "▁Sat", + -11.77001667022705 + ], + [ + "▁renewal", + -11.7701997756958 + ], + [ + "▁cancellation", + -11.770235061645508 + ], + [ + "▁1-2", + -11.77045440673828 + ], + [ + "▁municipal", + -11.770537376403809 + ], + [ + "▁allegations", + -11.77070426940918 + ], + [ + "▁Plaza", + -11.770709037780762 + ], + [ + "▁driveway", + -11.770759582519531 + ], + [ + "▁poorly", + -11.770977020263672 + ], + [ + "▁Vermont", + -11.771245002746582 + ], + [ + "▁kicked", + -11.77152156829834 + ], + [ + "▁hygiene", + -11.771591186523438 + ], + [ + "▁Prof", + -11.771953582763672 + ], + [ + "▁Calendar", + -11.772034645080566 + ], + [ + "▁Fat", + -11.772037506103516 + ], + [ + "▁malware", + -11.772627830505373 + ], + [ + "▁Corps", + -11.772738456726074 + ], + [ + "▁literacy", + -11.77275848388672 + ], + [ + "26", + -11.772982597351074 + ], + [ + "▁hardest", + -11.7730131149292 + ], + [ + "▁67", + -11.773018836975098 + ], + [ + "▁Gun", + -11.773036003112791 + ], + [ + "▁Hat", + -11.773070335388184 + ], + [ + "▁Breakfast", + -11.773305892944336 + ], + [ + "▁strongest", + -11.77370262145996 + ], + [ + "▁sentiment", + -11.773781776428224 + ], + [ + "▁Built", + -11.773893356323242 + ], + [ + "▁desperate", + -11.77394962310791 + ], + [ + "▁invitations", + -11.774434089660645 + ], + [ + "▁Han", + -11.774782180786133 + ], + [ + "million", + -11.774928092956545 + ], + [ + "▁Pure", + -11.774935722351074 + ], + [ + "▁emerge", + -11.775042533874512 + ], + [ + "▁Privacy", + -11.775067329406738 + ], + [ + "▁adhere", + -11.77549171447754 + ], + [ + "▁domains", + -11.77586269378662 + ], + [ + "▁bias", + -11.776374816894531 + ], + [ + "▁Volume", + -11.77639389038086 + ], + [ + "▁incorporating", + -11.776394844055176 + ], + [ + "▁engineered", + -11.776450157165527 + ], + [ + "▁suspected", + -11.77694320678711 + ], + [ + "▁Deputy", + -11.777196884155272 + ], + [ + "▁securing", + -11.777372360229492 + ], + [ + "▁critics", + -11.777647972106934 + ], + [ + "▁cleaners", + -11.777681350708008 + ], + [ + "▁breeding", + -11.777769088745115 + ], + [ + "▁anxious", + -11.777996063232422 + ], + [ + "▁ka", + -11.778180122375488 + ], + [ + "▁activation", + -11.778247833251951 + ], + [ + "▁mum", + -11.778321266174316 + ], + [ + "▁fixture", + -11.778335571289062 + ], + [ + "▁lesser", + -11.77846908569336 + ], + [ + "Need", + -11.778544425964355 + ], + [ + "▁Notice", + -11.778605461120604 + ], + [ + "▁cab", + -11.7786283493042 + ], + [ + "Plus", + -11.77872085571289 + ], + [ + "▁Brisbane", + -11.778888702392578 + ], + [ + "▁$6", + -11.778989791870115 + ], + [ + "makers", + -11.779038429260254 + ], + [ + "▁blocking", + -11.779182434082031 + ], + [ + "▁Materials", + -11.779500007629396 + ], + [ + "▁Different", + -11.779532432556152 + ], + [ + "burg", + -11.779626846313477 + ], + [ + "risk", + -11.779706954956056 + ], + [ + "▁deadly", + -11.779979705810549 + ], + [ + "▁curiosity", + -11.78005027770996 + ], + [ + "▁Customers", + -11.780179977416992 + ], + [ + "▁Excellent", + -11.780315399169922 + ], + [ + "▁raises", + -11.780508041381836 + ], + [ + "place", + -11.780517578125 + ], + [ + "▁1/4", + -11.780630111694336 + ], + [ + "▁tan", + -11.780679702758787 + ], + [ + "elle", + -11.780698776245115 + ], + [ + "▁interventions", + -11.780763626098633 + ], + [ + "soft", + -11.78085994720459 + ], + [ + "▁fame", + -11.78138256072998 + ], + [ + "▁VAT", + -11.781461715698242 + ], + [ + "▁#2", + -11.781527519226074 + ], + [ + "▁Yahoo", + -11.781625747680664 + ], + [ + "Description", + -11.781795501708984 + ], + [ + "▁beginners", + -11.781800270080566 + ], + [ + "▁130", + -11.781867027282717 + ], + [ + "▁Switch", + -11.782023429870604 + ], + [ + "▁indicating", + -11.782024383544922 + ], + [ + "▁strengthening", + -11.782061576843262 + ], + [ + "▁youngest", + -11.782198905944824 + ], + [ + "▁Hub", + -11.7822265625 + ], + [ + "▁optimum", + -11.782289505004885 + ], + [ + "CD", + -11.78240966796875 + ], + [ + "▁yeah", + -11.782554626464844 + ], + [ + "▁framed", + -11.78297519683838 + ], + [ + "piece", + -11.78299617767334 + ], + [ + "▁coloured", + -11.78313446044922 + ], + [ + "range", + -11.78316593170166 + ], + [ + "works", + -11.783166885375977 + ], + [ + "▁Palestinian", + -11.78325080871582 + ], + [ + "▁formerly", + -11.783270835876465 + ], + [ + "▁TN", + -11.783312797546388 + ], + [ + "▁mathematics", + -11.783634185791016 + ], + [ + "▁marathon", + -11.783649444580078 + ], + [ + "▁genes", + -11.783674240112305 + ], + [ + "▁batter", + -11.783930778503418 + ], + [ + "ala", + -11.783949851989746 + ], + [ + "▁inevitable", + -11.78404140472412 + ], + [ + "▁Excellence", + -11.78410530090332 + ], + [ + "▁Hudson", + -11.784205436706545 + ], + [ + "▁drainage", + -11.78460693359375 + ], + [ + "▁fatty", + -11.78466510772705 + ], + [ + "▁helmet", + -11.784671783447266 + ], + [ + "▁vocals", + -11.7849760055542 + ], + [ + "▁pastor", + -11.784979820251465 + ], + [ + "▁servicing", + -11.78498077392578 + ], + [ + "▁Adventure", + -11.78513240814209 + ], + [ + "▁recognise", + -11.78542709350586 + ], + [ + "▁devastating", + -11.785429954528809 + ], + [ + "▁vegetarian", + -11.78562831878662 + ], + [ + "sha", + -11.78585720062256 + ], + [ + "▁eighth", + -11.786092758178713 + ], + [ + "▁Adult", + -11.7861909866333 + ], + [ + "▁humidity", + -11.78634548187256 + ], + [ + "▁forex", + -11.786524772644045 + ], + [ + "▁Starting", + -11.786548614501951 + ], + [ + "▁kg", + -11.786635398864746 + ], + [ + "▁Samuel", + -11.787043571472168 + ], + [ + "▁emotionally", + -11.78705596923828 + ], + [ + "five", + -11.787153244018556 + ], + [ + "▁Package", + -11.787221908569336 + ], + [ + "▁beats", + -11.787242889404297 + ], + [ + "▁Pot", + -11.787328720092772 + ], + [ + "▁Glasgow", + -11.787872314453123 + ], + [ + "▁bamboo", + -11.788009643554688 + ], + [ + "▁prompted", + -11.78835105895996 + ], + [ + "ide", + -11.788424491882324 + ], + [ + "eg", + -11.788814544677734 + ], + [ + "▁8-", + -11.788838386535645 + ], + [ + "▁analyzed", + -11.788858413696287 + ], + [ + "iest", + -11.788878440856934 + ], + [ + "▁Vitamin", + -11.788965225219728 + ], + [ + "stage", + -11.789116859436035 + ], + [ + "▁fifty", + -11.789294242858888 + ], + [ + "75", + -11.78947639465332 + ], + [ + "▁licenses", + -11.789548873901367 + ], + [ + "▁debit", + -11.789616584777832 + ], + [ + "max", + -11.789639472961426 + ], + [ + "tro", + -11.789640426635742 + ], + [ + "▁acclaimed", + -11.789833068847656 + ], + [ + "ane", + -11.789834022521973 + ], + [ + "▁beam", + -11.789922714233398 + ], + [ + "▁recommends", + -11.789956092834473 + ], + [ + "▁candles", + -11.78996467590332 + ], + [ + "▁imagined", + -11.789984703063965 + ], + [ + "▁commentary", + -11.790048599243164 + ], + [ + "▁girlfriend", + -11.790154457092283 + ], + [ + "▁relieve", + -11.7902193069458 + ], + [ + "▁Ja", + -11.79024887084961 + ], + [ + "▁waited", + -11.79025650024414 + ], + [ + "▁repaired", + -11.790472984313965 + ], + [ + "▁Roger", + -11.790587425231934 + ], + [ + "▁MN", + -11.790666580200195 + ], + [ + "▁apparel", + -11.79084014892578 + ], + [ + "▁Hampshire", + -11.790864944458008 + ], + [ + "▁splash", + -11.791003227233888 + ], + [ + "▁Oscar", + -11.791024208068848 + ], + [ + "▁invaluable", + -11.791093826293944 + ], + [ + "▁concentrated", + -11.791289329528809 + ], + [ + "Out", + -11.79183864593506 + ], + [ + "▁defects", + -11.792228698730469 + ], + [ + "▁failures", + -11.792552947998049 + ], + [ + "▁tee", + -11.792604446411133 + ], + [ + "▁furnished", + -11.792759895324709 + ], + [ + "▁internship", + -11.79288387298584 + ], + [ + "▁expects", + -11.792925834655762 + ], + [ + "wan", + -11.792941093444824 + ], + [ + "▁Val", + -11.792988777160645 + ], + [ + "-15", + -11.793374061584473 + ], + [ + "▁glorious", + -11.79369831085205 + ], + [ + "▁Partnership", + -11.793899536132812 + ], + [ + "tin", + -11.793974876403809 + ], + [ + "▁imperative", + -11.794008255004885 + ], + [ + "▁burst", + -11.794323921203612 + ], + [ + "▁Bright", + -11.794404029846191 + ], + [ + "▁stickers", + -11.794754981994627 + ], + [ + "▁monitored", + -11.794873237609863 + ], + [ + "▁flagship", + -11.794964790344238 + ], + [ + "▁unprecedented", + -11.79500961303711 + ], + [ + "▁Parks", + -11.795110702514648 + ], + [ + "▁threads", + -11.795132637023926 + ], + [ + "▁buffet", + -11.795207977294922 + ], + [ + "▁collision", + -11.795221328735352 + ], + [ + "▁freshly", + -11.795307159423828 + ], + [ + "▁contrary", + -11.795376777648926 + ], + [ + "▁melted", + -11.795382499694824 + ], + [ + "т", + -11.79544448852539 + ], + [ + "was", + -11.796259880065918 + ], + [ + "▁parish", + -11.796269416809082 + ], + [ + "BC", + -11.796469688415527 + ], + [ + "▁Ban", + -11.796521186828612 + ], + [ + "▁Jun", + -11.79660701751709 + ], + [ + "▁Fr", + -11.796974182128906 + ], + [ + "▁Stars", + -11.797019004821776 + ], + [ + "▁SAP", + -11.797099113464355 + ], + [ + "▁FBI", + -11.79720973968506 + ], + [ + "RS", + -11.797375679016112 + ], + [ + "▁Bike", + -11.797595024108888 + ], + [ + "▁viewer", + -11.79771900177002 + ], + [ + "yn", + -11.797737121582031 + ], + [ + "▁Montreal", + -11.797755241394045 + ], + [ + "AC", + -11.79814624786377 + ], + [ + "▁knees", + -11.798184394836426 + ], + [ + "▁continent", + -11.79818630218506 + ], + [ + "▁tissues", + -11.798234939575195 + ], + [ + "▁CRM", + -11.798501014709473 + ], + [ + "▁Tar", + -11.798565864562988 + ], + [ + "▁Gear", + -11.798605918884276 + ], + [ + "▁china", + -11.79863452911377 + ], + [ + "▁activists", + -11.79885482788086 + ], + [ + "▁uniquely", + -11.799131393432615 + ], + [ + "▁configured", + -11.799174308776855 + ], + [ + "▁77", + -11.799384117126465 + ], + [ + "▁SUV", + -11.79946517944336 + ], + [ + "▁ladder", + -11.799625396728516 + ], + [ + "▁bail", + -11.799631118774414 + ], + [ + "ue", + -11.799816131591797 + ], + [ + "▁scrap", + -11.799861907958984 + ], + [ + "▁gang", + -11.79991054534912 + ], + [ + "▁Zoo", + -11.799991607666016 + ], + [ + "▁Potter", + -11.80018711090088 + ], + [ + "▁analytical", + -11.800248146057127 + ], + [ + "▁Sch", + -11.800301551818848 + ], + [ + "har", + -11.800318717956545 + ], + [ + "▁Nepal", + -11.800423622131348 + ], + [ + "▁yummy", + -11.80043601989746 + ], + [ + "▁Loan", + -11.800498008728027 + ], + [ + "▁peppers", + -11.80056381225586 + ], + [ + "▁highlighting", + -11.80068588256836 + ], + [ + "▁ya", + -11.800714492797852 + ], + [ + "▁parade", + -11.800787925720217 + ], + [ + "▁SF", + -11.800914764404297 + ], + [ + "▁doubled", + -11.801029205322266 + ], + [ + "▁Wide", + -11.801072120666504 + ], + [ + "bridge", + -11.80138874053955 + ], + [ + "▁commented", + -11.801508903503418 + ], + [ + "gan", + -11.801718711853027 + ], + [ + "▁Audi", + -11.801862716674805 + ], + [ + "▁zoom", + -11.801942825317385 + ], + [ + "▁predictions", + -11.801946640014648 + ], + [ + "▁modification", + -11.801961898803713 + ], + [ + "▁Flower", + -11.802008628845217 + ], + [ + "▁correction", + -11.802068710327148 + ], + [ + "▁Planet", + -11.8021879196167 + ], + [ + "▁Sara", + -11.802310943603516 + ], + [ + "▁CS", + -11.802464485168455 + ], + [ + "▁Marshall", + -11.802478790283203 + ], + [ + "▁Assessment", + -11.802536010742188 + ], + [ + "▁pills", + -11.802608489990234 + ], + [ + "Water", + -11.802638053894045 + ], + [ + "Located", + -11.80270290374756 + ], + [ + "▁shortage", + -11.803200721740724 + ], + [ + "▁intriguing", + -11.80357551574707 + ], + [ + "Com", + -11.803688049316406 + ], + [ + "▁Conservation", + -11.803759574890137 + ], + [ + "▁medal", + -11.803777694702148 + ], + [ + "color", + -11.803869247436523 + ], + [ + "▁soak", + -11.803898811340332 + ], + [ + "▁metallic", + -11.80393886566162 + ], + [ + "▁blended", + -11.804014205932615 + ], + [ + "▁detailing", + -11.804443359375 + ], + [ + "▁searched", + -11.804510116577148 + ], + [ + "▁sketch", + -11.804743766784668 + ], + [ + "▁rider", + -11.804780960083008 + ], + [ + "▁cow", + -11.804821968078612 + ], + [ + "▁blinds", + -11.804999351501465 + ], + [ + "▁Drop", + -11.805136680603027 + ], + [ + "▁Bull", + -11.805413246154783 + ], + [ + "▁Solution", + -11.805538177490234 + ], + [ + "▁beaten", + -11.805550575256348 + ], + [ + "▁2005.", + -11.805570602416992 + ], + [ + "▁realm", + -11.80570125579834 + ], + [ + "wheel", + -11.805848121643066 + ], + [ + "br", + -11.80599308013916 + ], + [ + "▁Material", + -11.80629062652588 + ], + [ + "▁Purchase", + -11.806467056274414 + ], + [ + "az", + -11.806546211242676 + ], + [ + "▁portions", + -11.806689262390137 + ], + [ + "▁worrying", + -11.807148933410645 + ], + [ + "White", + -11.807280540466309 + ], + [ + "▁flooding", + -11.807293891906738 + ], + [ + "body", + -11.807337760925291 + ], + [ + "▁2006,", + -11.80737590789795 + ], + [ + "▁tutorials", + -11.80741024017334 + ], + [ + "▁aligned", + -11.80749225616455 + ], + [ + "▁threshold", + -11.80752182006836 + ], + [ + "▁kindness", + -11.807661056518556 + ], + [ + "▁modular", + -11.807672500610352 + ], + [ + "▁temporarily", + -11.807820320129396 + ], + [ + "ties", + -11.80808162689209 + ], + [ + "▁(6", + -11.808135032653809 + ], + [ + "▁Continue", + -11.808371543884276 + ], + [ + "▁spinal", + -11.80842399597168 + ], + [ + "▁adapter", + -11.808612823486328 + ], + [ + "▁Actually", + -11.808720588684082 + ], + [ + "check", + -11.80874729156494 + ], + [ + "▁Consulting", + -11.80881118774414 + ], + [ + "▁Roy", + -11.80894374847412 + ], + [ + "▁infected", + -11.809016227722168 + ], + [ + "▁testimony", + -11.809083938598633 + ], + [ + "▁imposed", + -11.80948257446289 + ], + [ + "▁attribute", + -11.809526443481444 + ], + [ + "▁preservation", + -11.80954933166504 + ], + [ + "▁freelance", + -11.810302734375 + ], + [ + "▁Sleep", + -11.810869216918944 + ], + [ + "▁clearing", + -11.810962677001951 + ], + [ + "▁Episode", + -11.810985565185549 + ], + [ + "af", + -11.811034202575684 + ], + [ + "▁Finding", + -11.811213493347168 + ], + [ + "▁deny", + -11.811513900756836 + ], + [ + "ene", + -11.811837196350098 + ], + [ + "▁celebrities", + -11.811849594116213 + ], + [ + "▁Alpha", + -11.81185817718506 + ], + [ + "▁imagery", + -11.81196117401123 + ], + [ + "▁mega", + -11.811996459960938 + ], + [ + "▁shield", + -11.812055587768556 + ], + [ + "▁angles", + -11.812496185302734 + ], + [ + "State", + -11.812687873840332 + ], + [ + "▁cushion", + -11.813033103942873 + ], + [ + "Mobile", + -11.813040733337402 + ], + [ + "▁furnishings", + -11.813151359558104 + ], + [ + "▁encryption", + -11.813241958618164 + ], + [ + "▁Release", + -11.8133544921875 + ], + [ + "▁Championships", + -11.813394546508787 + ], + [ + "▁Cap", + -11.81384563446045 + ], + [ + "▁Around", + -11.81398582458496 + ], + [ + "▁payroll", + -11.814000129699709 + ], + [ + "▁freezing", + -11.81406307220459 + ], + [ + "Give", + -11.814064025878906 + ], + [ + "▁greenhouse", + -11.81418514251709 + ], + [ + "▁Carol", + -11.814348220825195 + ], + [ + "▁trades", + -11.81435775756836 + ], + [ + "▁stretching", + -11.814358711242676 + ], + [ + "▁Nissan", + -11.814425468444824 + ], + [ + "▁propose", + -11.814599990844728 + ], + [ + "▁Jessica", + -11.81461238861084 + ], + [ + "▁Branch", + -11.81491756439209 + ], + [ + "▁seamlessly", + -11.81496524810791 + ], + [ + "▁competitor", + -11.815000534057615 + ], + [ + "▁renting", + -11.815279960632324 + ], + [ + "▁monitors", + -11.81531810760498 + ], + [ + "▁Ward", + -11.815418243408203 + ], + [ + "tra", + -11.81565284729004 + ], + [ + "table", + -11.815692901611328 + ], + [ + "▁Record", + -11.815910339355469 + ], + [ + "▁Syrian", + -11.81591510772705 + ], + [ + "▁insulin", + -11.816048622131348 + ], + [ + "▁Initiative", + -11.81607723236084 + ], + [ + "wise", + -11.816213607788086 + ], + [ + "▁rigorous", + -11.81623649597168 + ], + [ + "▁preserved", + -11.816290855407717 + ], + [ + "▁honour", + -11.816476821899414 + ], + [ + "▁unsure", + -11.816513061523438 + ], + [ + "than", + -11.816643714904783 + ], + [ + "▁Stir", + -11.816696166992188 + ], + [ + "▁Cars", + -11.816705703735352 + ], + [ + "▁fellowship", + -11.816766738891602 + ], + [ + "kar", + -11.81682586669922 + ], + [ + "▁consensus", + -11.816939353942873 + ], + [ + "▁renovated", + -11.817105293273926 + ], + [ + "van", + -11.81721019744873 + ], + [ + "▁traits", + -11.817214965820312 + ], + [ + "▁Fed", + -11.81725025177002 + ], + [ + "▁faithful", + -11.817410469055176 + ], + [ + "▁IoT", + -11.817713737487791 + ], + [ + "▁MY", + -11.817764282226562 + ], + [ + "▁Born", + -11.817801475524902 + ], + [ + "▁earrings", + -11.817971229553224 + ], + [ + "▁Truck", + -11.81831169128418 + ], + [ + "power", + -11.8184814453125 + ], + [ + "▁recipients", + -11.818808555603027 + ], + [ + "ped", + -11.820087432861328 + ], + [ + "MA", + -11.820151329040527 + ], + [ + "▁Kar", + -11.820257186889648 + ], + [ + "ado", + -11.820404052734377 + ], + [ + "▁Firm", + -11.820453643798828 + ], + [ + "▁Explore", + -11.820477485656738 + ], + [ + "▁Corner", + -11.82056999206543 + ], + [ + "▁Gospel", + -11.820780754089355 + ], + [ + "▁voluntary", + -11.82079029083252 + ], + [ + "▁surround", + -11.820915222167969 + ], + [ + "tu", + -11.821075439453123 + ], + [ + "11.", + -11.821165084838867 + ], + [ + "▁aiming", + -11.821173667907717 + ], + [ + "▁quarterly", + -11.821675300598145 + ], + [ + "bound", + -11.821681022644045 + ], + [ + "▁Woman", + -11.82175350189209 + ], + [ + "29", + -11.82179355621338 + ], + [ + "OK", + -11.821871757507324 + ], + [ + "86", + -11.82188892364502 + ], + [ + "▁withdraw", + -11.821928977966309 + ], + [ + "len", + -11.8219633102417 + ], + [ + "▁Mega", + -11.82224941253662 + ], + [ + "selling", + -11.82237148284912 + ], + [ + "▁wealthy", + -11.822491645812988 + ], + [ + "▁broadband", + -11.822554588317873 + ], + [ + "▁1995", + -11.82271671295166 + ], + [ + "▁opera", + -11.823139190673828 + ], + [ + "▁Concrete", + -11.823226928710938 + ], + [ + "▁composer", + -11.823356628417969 + ], + [ + "▁weakness", + -11.82345485687256 + ], + [ + "▁SS", + -11.823748588562012 + ], + [ + "▁defendant", + -11.824094772338867 + ], + [ + "▁medieval", + -11.82414722442627 + ], + [ + "▁61", + -11.824226379394531 + ], + [ + "Web", + -11.824501037597656 + ], + [ + "▁Perry", + -11.82453441619873 + ], + [ + "▁2)", + -11.82479190826416 + ], + [ + "▁displaying", + -11.824804306030272 + ], + [ + "▁Keith", + -11.825213432312012 + ], + [ + "▁admire", + -11.82542896270752 + ], + [ + "▁Woods", + -11.82559871673584 + ], + [ + "cap", + -11.825725555419922 + ], + [ + "▁Heavy", + -11.825870513916016 + ], + [ + "▁underway", + -11.825901985168455 + ], + [ + "fa", + -11.826079368591309 + ], + [ + "▁organizer", + -11.826102256774902 + ], + [ + "▁evenings", + -11.826159477233888 + ], + [ + "▁Brad", + -11.826260566711426 + ], + [ + "▁Jimmy", + -11.826326370239258 + ], + [ + "▁3:", + -11.82633113861084 + ], + [ + "▁Tu", + -11.826360702514648 + ], + [ + "wear", + -11.826425552368164 + ], + [ + "06", + -11.826525688171388 + ], + [ + "▁Za", + -11.826644897460938 + ], + [ + "▁mice", + -11.826650619506836 + ], + [ + "ky", + -11.826693534851074 + ], + [ + "rie", + -11.82671070098877 + ], + [ + "▁united", + -11.826762199401855 + ], + [ + "▁coalition", + -11.826833724975586 + ], + [ + "▁interviewed", + -11.826905250549316 + ], + [ + "ap", + -11.826913833618164 + ], + [ + "▁Bishop", + -11.827102661132812 + ], + [ + "▁cushions", + -11.82723617553711 + ], + [ + "▁Senator", + -11.8273344039917 + ], + [ + "road", + -11.827574729919434 + ], + [ + "▁donor", + -11.827754020690918 + ], + [ + "▁Photoshop", + -11.827805519104004 + ], + [ + "shirts", + -11.827911376953123 + ], + [ + "▁limitation", + -11.827959060668944 + ], + [ + "▁Cruz", + -11.828289985656738 + ], + [ + "▁substantially", + -11.828304290771484 + ], + [ + "▁Pine", + -11.828370094299316 + ], + [ + "▁likelihood", + -11.828605651855469 + ], + [ + "▁drone", + -11.828670501708984 + ], + [ + "▁align", + -11.82878303527832 + ], + [ + "▁peanut", + -11.82879638671875 + ], + [ + "▁maple", + -11.828974723815918 + ], + [ + "▁shuttle", + -11.828990936279297 + ], + [ + "ification", + -11.82915210723877 + ], + [ + "▁molecular", + -11.82938289642334 + ], + [ + "▁Joy", + -11.829390525817873 + ], + [ + "▁complain", + -11.829476356506348 + ], + [ + "▁trapped", + -11.829510688781738 + ], + [ + "▁Guy", + -11.829517364501951 + ], + [ + "▁Champion", + -11.829716682434082 + ], + [ + "test", + -11.82995891571045 + ], + [ + "▁toddler", + -11.830048561096191 + ], + [ + "▁ver", + -11.830060005187988 + ], + [ + "▁phases", + -11.83029079437256 + ], + [ + "▁airports", + -11.830365180969238 + ], + [ + "▁multitude", + -11.830491065979004 + ], + [ + "ure", + -11.830851554870604 + ], + [ + "▁trainers", + -11.830857276916504 + ], + [ + "▁cracks", + -11.830975532531738 + ], + [ + "▁1500", + -11.831094741821287 + ], + [ + "▁Chain", + -11.831178665161133 + ], + [ + "rin", + -11.83119773864746 + ], + [ + "▁mentally", + -11.831573486328123 + ], + [ + "▁horizon", + -11.831609725952148 + ], + [ + "▁lengths", + -11.831645965576172 + ], + [ + "▁heel", + -11.831647872924805 + ], + [ + "▁borrow", + -11.831697463989258 + ], + [ + "craft", + -11.831785202026367 + ], + [ + "▁charitable", + -11.831896781921388 + ], + [ + "▁poet", + -11.832009315490724 + ], + [ + "▁incorporates", + -11.83227825164795 + ], + [ + "▁viruses", + -11.832594871520996 + ], + [ + "▁stating", + -11.832629203796388 + ], + [ + ")?", + -11.832879066467283 + ], + [ + "▁Career", + -11.832982063293455 + ], + [ + "▁Mills", + -11.833078384399414 + ], + [ + "▁prototype", + -11.833090782165527 + ], + [ + "ston", + -11.833151817321776 + ], + [ + "▁radar", + -11.833236694335938 + ], + [ + "▁exchanges", + -11.83335018157959 + ], + [ + "▁peel", + -11.833370208740234 + ], + [ + "▁canada", + -11.833450317382812 + ], + [ + "▁shocked", + -11.834168434143066 + ], + [ + "▁logs", + -11.834218978881836 + ], + [ + "▁Whenever", + -11.834284782409668 + ], + [ + "meter", + -11.834436416625977 + ], + [ + "▁Jam", + -11.834561347961426 + ], + [ + "▁sa", + -11.83465576171875 + ], + [ + "▁ghost", + -11.83469295501709 + ], + [ + "▁Going", + -11.834753036499023 + ], + [ + "▁Tourism", + -11.834823608398438 + ], + [ + "▁Belt", + -11.834920883178713 + ], + [ + "▁laughing", + -11.835023880004885 + ], + [ + "▁Surgery", + -11.83506202697754 + ], + [ + "eye", + -11.8351411819458 + ], + [ + "▁Er", + -11.83514404296875 + ], + [ + "ice", + -11.835199356079102 + ], + [ + "▁Vi", + -11.835371971130373 + ], + [ + "▁contributes", + -11.835546493530272 + ], + [ + "▁budgets", + -11.83590030670166 + ], + [ + "▁cons", + -11.836012840270996 + ], + [ + "▁sourced", + -11.836053848266602 + ], + [ + "inspired", + -11.836406707763672 + ], + [ + "▁Mine", + -11.836548805236816 + ], + [ + "▁Oliver", + -11.83665657043457 + ], + [ + "▁Display", + -11.836695671081545 + ], + [ + "▁teenagers", + -11.837028503417969 + ], + [ + "▁aids", + -11.837030410766602 + ], + [ + "Blue", + -11.837480545043944 + ], + [ + "▁78", + -11.837891578674316 + ], + [ + "▁Medicaid", + -11.837942123413086 + ], + [ + "Post", + -11.83808708190918 + ], + [ + "▁submissions", + -11.838123321533203 + ], + [ + "▁WILL", + -11.838160514831545 + ], + [ + "▁duck", + -11.83877944946289 + ], + [ + "because", + -11.838915824890137 + ], + [ + "▁learnt", + -11.838934898376465 + ], + [ + "oli", + -11.838967323303224 + ], + [ + "▁Growth", + -11.839052200317385 + ], + [ + "▁Oakland", + -11.83909511566162 + ], + [ + "▁Mario", + -11.839096069335938 + ], + [ + "▁grilled", + -11.83958911895752 + ], + [ + "▁wrist", + -11.839740753173828 + ], + [ + "▁07", + -11.839802742004396 + ], + [ + "▁Replacement", + -11.840075492858888 + ], + [ + "▁Mu", + -11.840083122253418 + ], + [ + "▁$30", + -11.840167999267578 + ], + [ + "▁tubes", + -11.840180397033691 + ], + [ + "▁trendy", + -11.840217590332031 + ], + [ + "▁torque", + -11.840229988098145 + ], + [ + "▁landlord", + -11.840316772460938 + ], + [ + "Among", + -11.840742111206056 + ], + [ + "2018", + -11.84075927734375 + ], + [ + "▁metro", + -11.840936660766602 + ], + [ + "▁deficit", + -11.8409423828125 + ], + [ + "▁siding", + -11.8409423828125 + ], + [ + "▁Psychology", + -11.841064453125 + ], + [ + "▁defining", + -11.84117031097412 + ], + [ + "▁tumor", + -11.841254234313965 + ], + [ + "▁customs", + -11.841376304626465 + ], + [ + "▁exceptionally", + -11.84144687652588 + ], + [ + "▁RM", + -11.84156608581543 + ], + [ + "▁debts", + -11.841581344604492 + ], + [ + "▁Roof", + -11.84170150756836 + ], + [ + "▁rugged", + -11.841843605041504 + ], + [ + "▁Cor", + -11.841853141784668 + ], + [ + "▁Muslims", + -11.84205722808838 + ], + [ + "▁Ana", + -11.842148780822754 + ], + [ + "ros", + -11.842562675476074 + ], + [ + "▁Jerry", + -11.84267234802246 + ], + [ + "▁Nancy", + -11.842798233032228 + ], + [ + "Mar", + -11.842918395996094 + ], + [ + "▁900", + -11.843152046203612 + ], + [ + "▁ugly", + -11.843185424804688 + ], + [ + "▁Manufacturing", + -11.843283653259276 + ], + [ + "bu", + -11.843737602233888 + ], + [ + "▁twitter", + -11.843871116638184 + ], + [ + "Air", + -11.844162940979004 + ], + [ + "▁grandfather", + -11.844277381896973 + ], + [ + "▁equality", + -11.84451961517334 + ], + [ + "▁darker", + -11.844606399536133 + ], + [ + "▁blown", + -11.84469223022461 + ], + [ + "▁comfy", + -11.844696044921877 + ], + [ + "▁fencing", + -11.844697952270508 + ], + [ + "▁upholstery", + -11.844704627990724 + ], + [ + "▁lifelong", + -11.844870567321776 + ], + [ + "▁Soon", + -11.844923973083496 + ], + [ + "▁classification", + -11.845169067382812 + ], + [ + "▁thriving", + -11.845170974731444 + ], + [ + "▁(\"", + -11.845233917236328 + ], + [ + "▁diy", + -11.845473289489746 + ], + [ + "▁isolation", + -11.84564971923828 + ], + [ + "▁roast", + -11.845656394958496 + ], + [ + "▁Hey", + -11.845947265625 + ], + [ + "▁creations", + -11.84596824645996 + ], + [ + "▁outfits", + -11.846030235290527 + ], + [ + "lon", + -11.846187591552734 + ], + [ + "AS", + -11.84619140625 + ], + [ + "▁70%", + -11.846288681030272 + ], + [ + "ati", + -11.84634017944336 + ], + [ + "bel", + -11.846622467041016 + ], + [ + "Follow", + -11.846692085266112 + ], + [ + "▁resin", + -11.846704483032228 + ], + [ + "going", + -11.846773147583008 + ], + [ + "▁indigenous", + -11.846787452697754 + ], + [ + "▁aroma", + -11.846887588500977 + ], + [ + "▁intermediate", + -11.846963882446287 + ], + [ + "▁Deck", + -11.847049713134766 + ], + [ + "▁Holland", + -11.847323417663574 + ], + [ + "▁drawers", + -11.847455024719238 + ], + [ + "▁leaks", + -11.847501754760742 + ], + [ + "▁Alzheimer", + -11.84754753112793 + ], + [ + "▁Terry", + -11.847759246826172 + ], + [ + "▁upside", + -11.847823143005373 + ], + [ + "ME", + -11.84786319732666 + ], + [ + "▁Bit", + -11.847930908203123 + ], + [ + "vin", + -11.84793758392334 + ], + [ + "▁scripts", + -11.848076820373535 + ], + [ + "▁125", + -11.848122596740724 + ], + [ + "live", + -11.848259925842283 + ], + [ + "ku", + -11.848283767700195 + ], + [ + "GS", + -11.848438262939451 + ], + [ + "▁brakes", + -11.84844207763672 + ], + [ + "▁dashboard", + -11.848491668701172 + ], + [ + "▁sampling", + -11.848503112792969 + ], + [ + "▁registry", + -11.848587989807127 + ], + [ + "▁Vehicle", + -11.848923683166504 + ], + [ + "▁Les", + -11.848958015441896 + ], + [ + "▁ridiculous", + -11.848958969116213 + ], + [ + "▁jack", + -11.849088668823242 + ], + [ + "▁Coordinator", + -11.849489212036133 + ], + [ + "▁worlds", + -11.849491119384766 + ], + [ + "▁fulfilling", + -11.849613189697266 + ], + [ + "black", + -11.849760055541992 + ], + [ + "▁toast", + -11.849909782409668 + ], + [ + "▁limiting", + -11.849918365478516 + ], + [ + "NO", + -11.849930763244627 + ], + [ + "▁acquiring", + -11.849953651428224 + ], + [ + "▁staple", + -11.849969863891602 + ], + [ + "▁Murphy", + -11.850526809692385 + ], + [ + "▁Stanley", + -11.85053539276123 + ], + [ + "▁altered", + -11.850576400756836 + ], + [ + "▁sticking", + -11.850714683532717 + ], + [ + "▁crushed", + -11.850741386413574 + ], + [ + "▁feeds", + -11.850763320922852 + ], + [ + "▁sensation", + -11.85083293914795 + ], + [ + "▁Rate", + -11.850882530212402 + ], + [ + "▁adaptation", + -11.850954055786133 + ], + [ + "Help", + -11.850956916809082 + ], + [ + "▁advertise", + -11.851012229919434 + ], + [ + "▁racial", + -11.851015090942385 + ], + [ + "▁Zero", + -11.851106643676758 + ], + [ + "▁vent", + -11.8511323928833 + ], + [ + "▁characterized", + -11.851332664489746 + ], + [ + "▁revealing", + -11.85150909423828 + ], + [ + "▁eliminated", + -11.851654052734377 + ], + [ + "▁sensible", + -11.851873397827148 + ], + [ + "▁forcing", + -11.851996421813965 + ], + [ + "▁rebuild", + -11.852240562438965 + ], + [ + "▁lifted", + -11.85234546661377 + ], + [ + "▁dealership", + -11.852375030517578 + ], + [ + "▁dies", + -11.85243034362793 + ], + [ + "▁mayor", + -11.85254192352295 + ], + [ + "▁downloads", + -11.852596282958984 + ], + [ + "▁workouts", + -11.85262680053711 + ], + [ + "▁98", + -11.852690696716309 + ], + [ + "▁Stories", + -11.852753639221191 + ], + [ + "▁Ore", + -11.852758407592772 + ], + [ + "▁protocols", + -11.852849960327148 + ], + [ + "▁opener", + -11.85286808013916 + ], + [ + "▁centered", + -11.85290241241455 + ], + [ + "▁elaborate", + -11.852953910827637 + ], + [ + "▁Laboratory", + -11.853045463562012 + ], + [ + "▁logging", + -11.85307502746582 + ], + [ + "▁Ashley", + -11.853084564208984 + ], + [ + "▁economies", + -11.853316307067873 + ], + [ + "▁100,000", + -11.853801727294922 + ], + [ + "▁exports", + -11.853857040405272 + ], + [ + "Business", + -11.853946685791016 + ], + [ + "▁faucet", + -11.854007720947266 + ], + [ + "▁freight", + -11.854058265686035 + ], + [ + "▁Prevention", + -11.854461669921877 + ], + [ + "nie", + -11.854578971862791 + ], + [ + "▁von", + -11.854608535766602 + ], + [ + "▁Regardless", + -11.854612350463867 + ], + [ + "▁bored", + -11.85476303100586 + ], + [ + "▁void", + -11.854902267456056 + ], + [ + "▁constraints", + -11.85496711730957 + ], + [ + "Play", + -11.855010032653809 + ], + [ + "resistant", + -11.855073928833008 + ], + [ + "iz", + -11.855147361755373 + ], + [ + "▁coats", + -11.855292320251465 + ], + [ + "▁Bangladesh", + -11.855311393737791 + ], + [ + "TO", + -11.855359077453612 + ], + [ + "▁Luckily", + -11.85545253753662 + ], + [ + "▁graduating", + -11.855724334716797 + ], + [ + "▁equation", + -11.855748176574709 + ], + [ + "▁eternal", + -11.856085777282717 + ], + [ + "▁tier", + -11.856297492980955 + ], + [ + "▁Programs", + -11.856467247009276 + ], + [ + "▁metabolism", + -11.856525421142578 + ], + [ + "▁parenting", + -11.856876373291016 + ], + [ + "▁subsidiary", + -11.857172012329102 + ], + [ + "used", + -11.857282638549805 + ], + [ + "▁purely", + -11.85758113861084 + ], + [ + "city", + -11.857746124267578 + ], + [ + "▁fifteen", + -11.85810089111328 + ], + [ + "zer", + -11.858222007751465 + ], + [ + "▁Discovery", + -11.858294486999512 + ], + [ + "▁therapies", + -11.858358383178713 + ], + [ + "▁surfing", + -11.85866641998291 + ], + [ + "mont", + -11.858702659606934 + ], + [ + "▁stuffed", + -11.858856201171877 + ], + [ + "▁Same", + -11.858932495117188 + ], + [ + "saving", + -11.859051704406738 + ], + [ + "half", + -11.859136581420898 + ], + [ + "▁bracket", + -11.85927391052246 + ], + [ + "▁mins", + -11.859330177307127 + ], + [ + "▁spotlight", + -11.859403610229492 + ], + [ + "▁Apart", + -11.85973834991455 + ], + [ + "▁Comments", + -11.859981536865234 + ], + [ + "08", + -11.8603515625 + ], + [ + "▁hints", + -11.86036205291748 + ], + [ + "▁markers", + -11.860858917236328 + ], + [ + "▁distinguished", + -11.860891342163086 + ], + [ + "has", + -11.86094093322754 + ], + [ + "▁Parents", + -11.861019134521484 + ], + [ + "▁studios", + -11.861034393310549 + ], + [ + "via", + -11.861044883728027 + ], + [ + "▁sandwiches", + -11.861215591430664 + ], + [ + "▁merchant", + -11.861660957336426 + ], + [ + "▁liner", + -11.861668586730955 + ], + [ + "▁GDP", + -11.861778259277344 + ], + [ + "▁Tiger", + -11.861797332763672 + ], + [ + "▁bell", + -11.86183738708496 + ], + [ + "▁grains", + -11.86188793182373 + ], + [ + "▁tabs", + -11.862059593200684 + ], + [ + "del", + -11.86208438873291 + ], + [ + "▁reflecting", + -11.86226749420166 + ], + [ + "▁Tab", + -11.862276077270508 + ], + [ + "stock", + -11.862332344055176 + ], + [ + "▁resorts", + -11.862497329711914 + ], + [ + "▁Fu", + -11.862512588500977 + ], + [ + "▁specializing", + -11.86255168914795 + ], + [ + "▁terror", + -11.86257553100586 + ], + [ + "▁EC", + -11.862606048583984 + ], + [ + "▁furnace", + -11.862615585327148 + ], + [ + "▁teenager", + -11.862781524658203 + ], + [ + "▁consequence", + -11.863088607788086 + ], + [ + "▁Mode", + -11.86309051513672 + ], + [ + "▁menus", + -11.863222122192385 + ], + [ + "▁BY", + -11.863430976867676 + ], + [ + "▁cousin", + -11.86358642578125 + ], + [ + "▁loft", + -11.863701820373535 + ], + [ + "▁immense", + -11.863715171813965 + ], + [ + "▁Wildlife", + -11.86400032043457 + ], + [ + "▁webinar", + -11.86409854888916 + ], + [ + "▁Compare", + -11.864250183105469 + ], + [ + "▁Elite", + -11.864316940307615 + ], + [ + "▁premiums", + -11.864402770996094 + ], + [ + "▁practitioner", + -11.864405632019045 + ], + [ + "▁forty", + -11.864524841308594 + ], + [ + "▁avail", + -11.864543914794922 + ], + [ + "▁spelling", + -11.864766120910645 + ], + [ + "▁obesity", + -11.864930152893066 + ], + [ + "▁Walter", + -11.864980697631836 + ], + [ + "▁heights", + -11.86507511138916 + ], + [ + "▁india", + -11.865094184875488 + ], + [ + "itis", + -11.86520767211914 + ], + [ + "▁Expert", + -11.865633010864258 + ], + [ + "▁Mason", + -11.866086959838867 + ], + [ + "▁resolutions", + -11.866129875183104 + ], + [ + "42", + -11.866172790527344 + ], + [ + "▁ET", + -11.86623764038086 + ], + [ + "▁60%", + -11.86635971069336 + ], + [ + "▁Veterans", + -11.866392135620115 + ], + [ + "▁Flight", + -11.866430282592772 + ], + [ + "▁Tap", + -11.866578102111816 + ], + [ + "▁headphones", + -11.866860389709473 + ], + [ + "▁Guest", + -11.866989135742188 + ], + [ + "▁tweet", + -11.867000579833984 + ], + [ + "▁aviation", + -11.867368698120115 + ], + [ + "▁considerations", + -11.86750316619873 + ], + [ + "▁privately", + -11.867586135864258 + ], + [ + "▁disagree", + -11.867709159851074 + ], + [ + "▁EPA", + -11.86778163909912 + ], + [ + "▁tragedy", + -11.867855072021484 + ], + [ + "burn", + -11.86791706085205 + ], + [ + "▁Barry", + -11.867964744567873 + ], + [ + "▁infant", + -11.868023872375488 + ], + [ + "▁Jet", + -11.86823558807373 + ], + [ + "▁Lock", + -11.868401527404783 + ], + [ + "▁Finland", + -11.868486404418944 + ], + [ + "▁stitch", + -11.868562698364258 + ], + [ + "▁vaccine", + -11.868586540222168 + ], + [ + "▁stems", + -11.868606567382812 + ], + [ + "Yeah", + -11.86864948272705 + ], + [ + "▁soy", + -11.868927001953123 + ], + [ + "▁pillows", + -11.869073867797852 + ], + [ + "▁Hell", + -11.869534492492676 + ], + [ + "▁Brain", + -11.869678497314451 + ], + [ + "▁pots", + -11.869950294494627 + ], + [ + "▁attitudes", + -11.870346069335938 + ], + [ + "online", + -11.870455741882324 + ], + [ + "▁joins", + -11.870509147644045 + ], + [ + "▁Eagles", + -11.870709419250488 + ], + [ + "▁Players", + -11.870779037475586 + ], + [ + "▁foremost", + -11.870800018310549 + ], + [ + "▁Billy", + -11.871020317077637 + ], + [ + "Yet", + -11.871430397033691 + ], + [ + "▁Benjamin", + -11.87157917022705 + ], + [ + "Speaking", + -11.87159824371338 + ], + [ + "▁guards", + -11.871899604797363 + ], + [ + "bus", + -11.872136116027832 + ], + [ + "powered", + -11.872340202331545 + ], + [ + "▁amid", + -11.872357368469238 + ], + [ + "▁rust", + -11.872368812561035 + ], + [ + "▁stimulate", + -11.872369766235352 + ], + [ + "▁Laser", + -11.87240982055664 + ], + [ + "▁Argentina", + -11.872577667236328 + ], + [ + "▁Sets", + -11.872601509094238 + ], + [ + "▁Discussion", + -11.872751235961914 + ], + [ + "▁chill", + -11.872790336608888 + ], + [ + "▁Ok", + -11.87285327911377 + ], + [ + "▁rocket", + -11.873088836669922 + ], + [ + "sey", + -11.873124122619627 + ], + [ + "▁passive", + -11.87314224243164 + ], + [ + "rated", + -11.873231887817385 + ], + [ + "▁unclear", + -11.87333869934082 + ], + [ + "▁packaged", + -11.873390197753906 + ], + [ + "▁Ted", + -11.87342357635498 + ], + [ + "▁boarding", + -11.87366008758545 + ], + [ + "both", + -11.873993873596191 + ], + [ + "▁$8", + -11.874029159545898 + ], + [ + "▁Lanka", + -11.874090194702148 + ], + [ + "▁proactive", + -11.874496459960938 + ], + [ + "▁incoming", + -11.874499320983888 + ], + [ + "▁commitments", + -11.874571800231934 + ], + [ + "su", + -11.87465000152588 + ], + [ + "▁pendant", + -11.875176429748535 + ], + [ + "▁expectation", + -11.875343322753906 + ], + [ + "▁evaluating", + -11.875351905822754 + ], + [ + "▁Nebraska", + -11.875486373901367 + ], + [ + "▁Haven", + -11.875703811645508 + ], + [ + "pay", + -11.875732421875 + ], + [ + "▁bandwidth", + -11.875829696655272 + ], + [ + "▁1800", + -11.876151084899902 + ], + [ + "▁Pizza", + -11.87630844116211 + ], + [ + "▁Flooring", + -11.876666069030762 + ], + [ + "▁inquiries", + -11.876680374145508 + ], + [ + "▁Wholesale", + -11.87677764892578 + ], + [ + "▁headaches", + -11.876837730407717 + ], + [ + "Ex", + -11.876908302307127 + ], + [ + "▁Idaho", + -11.876928329467772 + ], + [ + "▁sticky", + -11.87704086303711 + ], + [ + "▁dash", + -11.8771390914917 + ], + [ + "▁Rain", + -11.87714958190918 + ], + [ + "▁conclude", + -11.877342224121094 + ], + [ + "▁74", + -11.877729415893556 + ], + [ + "▁UC", + -11.878085136413574 + ], + [ + "▁pill", + -11.878140449523926 + ], + [ + "▁Relations", + -11.878474235534668 + ], + [ + "▁colourful", + -11.878477096557615 + ], + [ + "▁04", + -11.878626823425291 + ], + [ + "▁flavours", + -11.878681182861328 + ], + [ + "▁Mortgage", + -11.87883758544922 + ], + [ + "▁Para", + -11.87897777557373 + ], + [ + "▁colleague", + -11.879358291625977 + ], + [ + "Working", + -11.879469871520996 + ], + [ + "▁similarly", + -11.879664421081545 + ], + [ + "▁arch", + -11.879877090454102 + ], + [ + "Bar", + -11.879913330078123 + ], + [ + "Think", + -11.880115509033203 + ], + [ + "▁Lin", + -11.880123138427734 + ], + [ + "▁Blu", + -11.880196571350098 + ], + [ + "▁Mor", + -11.880301475524902 + ], + [ + "▁voucher", + -11.880303382873535 + ], + [ + "tec", + -11.880417823791504 + ], + [ + "▁Honor", + -11.8804292678833 + ], + [ + "▁clicks", + -11.880477905273438 + ], + [ + "▁envelope", + -11.88056755065918 + ], + [ + "▁Tournament", + -11.880593299865724 + ], + [ + "▁macro", + -11.880624771118164 + ], + [ + "31", + -11.880828857421877 + ], + [ + "oc", + -11.88092803955078 + ], + [ + "▁Geo", + -11.881009101867676 + ], + [ + "▁Juan", + -11.881189346313477 + ], + [ + "▁Julie", + -11.881210327148438 + ], + [ + "user", + -11.881234169006348 + ], + [ + "▁threatening", + -11.881369590759276 + ], + [ + "▁Ideal", + -11.881393432617188 + ], + [ + "cho", + -11.881404876708984 + ], + [ + "group", + -11.881576538085938 + ], + [ + "▁MT", + -11.881665229797363 + ], + [ + "▁Des", + -11.881950378417969 + ], + [ + "▁Teachers", + -11.88217067718506 + ], + [ + "▁Wash", + -11.882283210754396 + ], + [ + "▁Lauren", + -11.882306098937988 + ], + [ + "pi", + -11.88234043121338 + ], + [ + "▁defending", + -11.882508277893066 + ], + [ + "▁yeast", + -11.882672309875488 + ], + [ + "Anyone", + -11.882752418518066 + ], + [ + "02", + -11.883017539978027 + ], + [ + "▁utmost", + -11.883108139038086 + ], + [ + "door", + -11.88319969177246 + ], + [ + "▁token", + -11.883220672607422 + ], + [ + "Super", + -11.883380889892578 + ], + [ + "HP", + -11.883434295654297 + ], + [ + "▁worthwhile", + -11.883649826049805 + ], + [ + "ens", + -11.883816719055176 + ], + [ + "▁accelerate", + -11.883865356445312 + ], + [ + "▁analyzing", + -11.884024620056152 + ], + [ + "▁Yorkshire", + -11.884181022644045 + ], + [ + "▁demographic", + -11.88442325592041 + ], + [ + "▁id", + -11.884686470031738 + ], + [ + "▁5,000", + -11.884831428527832 + ], + [ + "istic", + -11.88497543334961 + ], + [ + "▁cardiovascular", + -11.885088920593262 + ], + [ + "▁belonging", + -11.885310173034668 + ], + [ + "▁genius", + -11.885324478149414 + ], + [ + "▁1.2", + -11.88555145263672 + ], + [ + "▁Desktop", + -11.885716438293455 + ], + [ + "▁Philip", + -11.885751724243164 + ], + [ + "▁compatibility", + -11.885812759399414 + ], + [ + "ground", + -11.88597011566162 + ], + [ + "▁Turner", + -11.886013984680176 + ], + [ + "ori", + -11.886220932006836 + ], + [ + "South", + -11.886404991149902 + ], + [ + "▁Testing", + -11.886409759521484 + ], + [ + "▁persistent", + -11.886666297912598 + ], + [ + "▁Perth", + -11.886848449707031 + ], + [ + "▁blues", + -11.886850357055664 + ], + [ + "▁NSW", + -11.887056350708008 + ], + [ + "▁nonetheless", + -11.887076377868652 + ], + [ + "▁GO", + -11.887097358703612 + ], + [ + "▁Hire", + -11.887255668640137 + ], + [ + "▁Expo", + -11.887556076049805 + ], + [ + "▁Solid", + -11.887666702270508 + ], + [ + "▁overly", + -11.887672424316406 + ], + [ + "▁Milan", + -11.887904167175291 + ], + [ + "▁Generally", + -11.887933731079102 + ], + [ + "▁charities", + -11.88807773590088 + ], + [ + "▁classrooms", + -11.888381004333496 + ], + [ + "▁honors", + -11.888445854187012 + ], + [ + "▁firmware", + -11.88869857788086 + ], + [ + "▁Rental", + -11.888769149780272 + ], + [ + "ris", + -11.888930320739746 + ], + [ + "▁beers", + -11.889019012451172 + ], + [ + "▁Profile", + -11.889052391052246 + ], + [ + "▁ballot", + -11.88906955718994 + ], + [ + "▁distributor", + -11.889154434204102 + ], + [ + "▁Nor", + -11.889410018920898 + ], + [ + "▁ONLY", + -11.889763832092283 + ], + [ + "▁Beer", + -11.889925003051758 + ], + [ + "▁Faith", + -11.889997482299805 + ], + [ + "▁Internal", + -11.89004898071289 + ], + [ + "ua", + -11.890067100524902 + ], + [ + "▁trillion", + -11.89006805419922 + ], + [ + "Long", + -11.890325546264648 + ], + [ + "▁lol", + -11.890542984008787 + ], + [ + "▁nevertheless", + -11.890565872192385 + ], + [ + "▁Maps", + -11.89065170288086 + ], + [ + "▁picturesque", + -11.890711784362791 + ], + [ + "ova", + -11.89090061187744 + ], + [ + "▁defines", + -11.891127586364746 + ], + [ + "▁elevation", + -11.891237258911133 + ], + [ + "▁ventilation", + -11.891281127929688 + ], + [ + "▁Swift", + -11.891302108764648 + ], + [ + "▁urged", + -11.891335487365724 + ], + [ + "April", + -11.89186191558838 + ], + [ + "▁Walmart", + -11.892016410827637 + ], + [ + "▁democratic", + -11.892026901245115 + ], + [ + "▁opted", + -11.892087936401367 + ], + [ + "▁overlooked", + -11.89209270477295 + ], + [ + "NA", + -11.89217758178711 + ], + [ + "▁triggered", + -11.892455101013184 + ], + [ + "▁rendering", + -11.89254379272461 + ], + [ + "▁Fame", + -11.89260196685791 + ], + [ + "▁recruit", + -11.892618179321287 + ], + [ + "▁authentication", + -11.892658233642578 + ], + [ + "▁remainder", + -11.892727851867676 + ], + [ + "▁investigated", + -11.892925262451172 + ], + [ + "▁flew", + -11.893075942993164 + ], + [ + "▁implant", + -11.89307689666748 + ], + [ + "▁crude", + -11.893143653869627 + ], + [ + "▁Deal", + -11.893515586853027 + ], + [ + "▁regulate", + -11.89357566833496 + ], + [ + "▁stepping", + -11.893675804138184 + ], + [ + "▁expose", + -11.894048690795898 + ], + [ + "ich", + -11.894108772277832 + ], + [ + "▁squash", + -11.894186973571776 + ], + [ + "▁Railway", + -11.894217491149902 + ], + [ + "▁battles", + -11.894336700439451 + ], + [ + "▁dosage", + -11.89463710784912 + ], + [ + "▁ferry", + -11.894672393798828 + ], + [ + "▁relies", + -11.89516544342041 + ], + [ + "Art", + -11.89517879486084 + ], + [ + "▁sanctions", + -11.89519214630127 + ], + [ + "▁robots", + -11.895462989807127 + ], + [ + "▁angel", + -11.895512580871582 + ], + [ + "▁storytelling", + -11.89560890197754 + ], + [ + "▁journalism", + -11.895682334899902 + ], + [ + "▁Bel", + -11.895705223083496 + ], + [ + "▁herbal", + -11.895709991455078 + ], + [ + "▁Tel", + -11.895716667175291 + ], + [ + "▁Sierra", + -11.89596462249756 + ], + [ + "▁Membership", + -11.896039962768556 + ], + [ + "stein", + -11.896044731140137 + ], + [ + "▁grammar", + -11.896081924438477 + ], + [ + "▁Hunt", + -11.896098136901855 + ], + [ + "▁Settings", + -11.896184921264648 + ], + [ + "▁aluminium", + -11.896240234375 + ], + [ + "san", + -11.89626693725586 + ], + [ + "https", + -11.896272659301758 + ], + [ + "▁residency", + -11.896416664123535 + ], + [ + "Link", + -11.896458625793455 + ], + [ + "Pay", + -11.896852493286133 + ], + [ + "fe", + -11.896875381469728 + ], + [ + "▁assuming", + -11.896992683410645 + ], + [ + "▁conviction", + -11.897029876708984 + ], + [ + "▁origins", + -11.897141456604004 + ], + [ + "although", + -11.89724063873291 + ], + [ + "▁mate", + -11.897321701049805 + ], + [ + "▁buzz", + -11.897347450256348 + ], + [ + "▁trio", + -11.89746379852295 + ], + [ + "▁senses", + -11.897486686706545 + ], + [ + "▁quarters", + -11.897496223449709 + ], + [ + "▁disappear", + -11.897528648376465 + ], + [ + "▁litter", + -11.897696495056152 + ], + [ + "▁Campaign", + -11.897719383239746 + ], + [ + "dimensional", + -11.897749900817873 + ], + [ + "bn", + -11.897765159606934 + ], + [ + "▁sponsorship", + -11.898148536682127 + ], + [ + "▁Cha", + -11.898173332214355 + ], + [ + "▁stroll", + -11.898333549499512 + ], + [ + "▁AB", + -11.898386001586914 + ], + [ + "▁Publishing", + -11.898499488830566 + ], + [ + "▁steak", + -11.898514747619627 + ], + [ + "▁dancers", + -11.898815155029297 + ], + [ + "▁kindly", + -11.898837089538574 + ], + [ + "▁Grinding", + -11.898845672607422 + ], + [ + "any", + -11.8988618850708 + ], + [ + "▁spite", + -11.899078369140623 + ], + [ + "USA", + -11.899395942687988 + ], + [ + "▁sail", + -11.899420738220217 + ], + [ + "▁thru", + -11.899746894836426 + ], + [ + "▁springs", + -11.899785995483398 + ], + [ + "▁corrosion", + -11.899922370910645 + ], + [ + "▁vocabulary", + -11.900099754333496 + ], + [ + "▁dock", + -11.900238990783691 + ], + [ + "▁tales", + -11.90037441253662 + ], + [ + "▁poses", + -11.900551795959473 + ], + [ + "▁comics", + -11.900588989257812 + ], + [ + "▁Foods", + -11.901290893554688 + ], + [ + "tes", + -11.901698112487791 + ], + [ + "▁Commonwealth", + -11.901844024658203 + ], + [ + "▁specs", + -11.901902198791504 + ], + [ + "▁Healthy", + -11.902016639709473 + ], + [ + "ase", + -11.902132987976074 + ], + [ + "▁advertisements", + -11.902139663696287 + ], + [ + "▁jersey", + -11.902151107788086 + ], + [ + "▁render", + -11.902323722839355 + ], + [ + "▁flies", + -11.90244960784912 + ], + [ + "▁exceptions", + -11.903356552124023 + ], + [ + "▁Ra", + -11.90354347229004 + ], + [ + "▁Historic", + -11.903565406799316 + ], + [ + "▁Delaware", + -11.903627395629885 + ], + [ + "President", + -11.903671264648438 + ], + [ + "▁critically", + -11.90388011932373 + ], + [ + "▁3,000", + -11.904030799865724 + ], + [ + "around", + -11.904041290283203 + ], + [ + "oni", + -11.90418815612793 + ], + [ + "▁messy", + -11.904396057128906 + ], + [ + "▁kitchens", + -11.904440879821776 + ], + [ + "▁turnover", + -11.90471649169922 + ], + [ + "▁Command", + -11.90483856201172 + ], + [ + "▁Listen", + -11.905405044555664 + ], + [ + "degree", + -11.905508995056152 + ], + [ + "▁carved", + -11.90553092956543 + ], + [ + "▁conserve", + -11.905609130859377 + ], + [ + "▁analyses", + -11.90568733215332 + ], + [ + "Sorry", + -11.905750274658203 + ], + [ + "ians", + -11.90615940093994 + ], + [ + "▁ideally", + -11.906205177307127 + ], + [ + "▁landmark", + -11.906323432922363 + ], + [ + "▁printers", + -11.906439781188965 + ], + [ + "focused", + -11.90646266937256 + ], + [ + "ric", + -11.906481742858888 + ], + [ + "▁Bat", + -11.906855583190918 + ], + [ + "▁inspect", + -11.906885147094728 + ], + [ + "▁Dinner", + -11.907516479492188 + ], + [ + "dar", + -11.90766143798828 + ], + [ + "▁2005,", + -11.907825469970703 + ], + [ + "▁Nokia", + -11.90809726715088 + ], + [ + "85", + -11.908232688903809 + ], + [ + "▁calculation", + -11.90833568572998 + ], + [ + "▁readings", + -11.908407211303713 + ], + [ + "65", + -11.908451080322266 + ], + [ + "▁Bonus", + -11.908673286437988 + ], + [ + "▁allocated", + -11.908761978149414 + ], + [ + "▁Neil", + -11.909042358398438 + ], + [ + "▁gig", + -11.90923309326172 + ], + [ + "▁Someone", + -11.90931510925293 + ], + [ + "▁yields", + -11.909493446350098 + ], + [ + "▁Abu", + -11.909527778625488 + ], + [ + "▁drums", + -11.909595489501951 + ], + [ + "▁Ba", + -11.909675598144531 + ], + [ + "▁AA", + -11.909680366516112 + ], + [ + "▁forefront", + -11.909728050231934 + ], + [ + "▁wasted", + -11.909735679626465 + ], + [ + "▁illustrate", + -11.909790992736816 + ], + [ + "generation", + -11.909832000732422 + ], + [ + "▁Chapel", + -11.90984058380127 + ], + [ + "▁fats", + -11.90994644165039 + ], + [ + "▁crowds", + -11.910033226013184 + ], + [ + "▁Nova", + -11.910197257995604 + ], + [ + "ow", + -11.910234451293944 + ], + [ + "▁spinach", + -11.910256385803224 + ], + [ + "▁Notes", + -11.910539627075195 + ], + [ + "ify", + -11.910622596740724 + ], + [ + "hole", + -11.910655975341797 + ], + [ + "▁Shore", + -11.910687446594238 + ], + [ + "▁shaft", + -11.910689353942873 + ], + [ + "cat", + -11.910691261291504 + ], + [ + "▁Cake", + -11.910844802856444 + ], + [ + "▁Truth", + -11.91100788116455 + ], + [ + "▁Fill", + -11.911185264587402 + ], + [ + "▁NCAA", + -11.911283493041992 + ], + [ + "▁acted", + -11.91128921508789 + ], + [ + "▁Tyler", + -11.911297798156738 + ], + [ + "▁amateur", + -11.91149616241455 + ], + [ + "▁Copper", + -11.911566734313965 + ], + [ + "▁innings", + -11.911602020263672 + ], + [ + "▁focal", + -11.91172218322754 + ], + [ + "shop", + -11.912005424499512 + ], + [ + "tone", + -11.912137985229492 + ], + [ + "Box", + -11.912250518798828 + ], + [ + "▁notably", + -11.912469863891602 + ], + [ + "ona", + -11.912487983703612 + ], + [ + "▁fax", + -11.912725448608398 + ], + [ + "▁retained", + -11.912741661071776 + ], + [ + "▁dementia", + -11.9127836227417 + ], + [ + "▁endeavor", + -11.91297721862793 + ], + [ + "?)", + -11.913101196289062 + ], + [ + "▁Poker", + -11.913362503051758 + ], + [ + "▁Directory", + -11.913444519042969 + ], + [ + "▁Path", + -11.91348648071289 + ], + [ + "▁entertain", + -11.913511276245115 + ], + [ + "▁None", + -11.91359043121338 + ], + [ + "▁crying", + -11.913653373718262 + ], + [ + "12.", + -11.913663864135742 + ], + [ + "▁Skills", + -11.913726806640623 + ], + [ + "▁logos", + -11.913768768310549 + ], + [ + "▁Political", + -11.913774490356444 + ], + [ + "▁Owner", + -11.913838386535645 + ], + [ + "▁Conditions", + -11.913946151733398 + ], + [ + "▁unions", + -11.91395092010498 + ], + [ + "▁transported", + -11.914003372192385 + ], + [ + "▁calculator", + -11.914111137390137 + ], + [ + "▁PE", + -11.91451358795166 + ], + [ + "▁Wikipedia", + -11.914545059204102 + ], + [ + "share", + -11.91457462310791 + ], + [ + "▁Ki", + -11.914589881896973 + ], + [ + "▁streak", + -11.914673805236816 + ], + [ + "▁Statistics", + -11.91469383239746 + ], + [ + "▁firing", + -11.91481590270996 + ], + [ + "▁Roofing", + -11.91486358642578 + ], + [ + "▁nest", + -11.91494846343994 + ], + [ + "▁remotely", + -11.91514492034912 + ], + [ + "▁enhances", + -11.915349960327148 + ], + [ + "▁guiding", + -11.915853500366213 + ], + [ + "▁accompany", + -11.91585922241211 + ], + [ + "case", + -11.91600227355957 + ], + [ + "▁praying", + -11.916120529174805 + ], + [ + "▁paradise", + -11.916327476501465 + ], + [ + "Sign", + -11.91697883605957 + ], + [ + "▁unveiled", + -11.916985511779783 + ], + [ + "▁procurement", + -11.917094230651855 + ], + [ + "▁Fly", + -11.91721248626709 + ], + [ + "▁tolerance", + -11.917243003845217 + ], + [ + "▁Hero", + -11.917277336120604 + ], + [ + "▁expressions", + -11.91730785369873 + ], + [ + "▁BB", + -11.917335510253906 + ], + [ + "▁coil", + -11.917449951171877 + ], + [ + "▁favorable", + -11.917595863342283 + ], + [ + "▁lodge", + -11.917646408081056 + ], + [ + "▁Fit", + -11.917946815490724 + ], + [ + "▁fatal", + -11.917986869812012 + ], + [ + "▁Various", + -11.918160438537598 + ], + [ + "▁governing", + -11.918220520019531 + ], + [ + "▁intact", + -11.918441772460938 + ], + [ + "AD", + -11.918496131896973 + ], + [ + "▁invisible", + -11.91860294342041 + ], + [ + "▁Fri", + -11.918741226196287 + ], + [ + "Additionally", + -11.91896629333496 + ], + [ + "▁probability", + -11.918974876403809 + ], + [ + "MM", + -11.91928482055664 + ], + [ + "▁Nutrition", + -11.919376373291016 + ], + [ + "▁Dual", + -11.919448852539062 + ], + [ + "▁Unless", + -11.919462203979492 + ], + [ + "Made", + -11.91983127593994 + ], + [ + "▁Mal", + -11.919917106628418 + ], + [ + "▁Running", + -11.919928550720217 + ], + [ + "Such", + -11.919974327087402 + ], + [ + "▁Uber", + -11.920291900634766 + ], + [ + "▁wars", + -11.92037868499756 + ], + [ + "▁portraits", + -11.920416831970217 + ], + [ + "▁noticeable", + -11.920666694641112 + ], + [ + "▁fuse", + -11.920682907104492 + ], + [ + "▁marker", + -11.920839309692385 + ], + [ + "▁Flow", + -11.92096710205078 + ], + [ + "▁deadlines", + -11.921102523803713 + ], + [ + "▁asthma", + -11.921184539794922 + ], + [ + "▁terrorism", + -11.92158317565918 + ], + [ + "▁plugins", + -11.921686172485352 + ], + [ + "▁Depot", + -11.92187786102295 + ], + [ + "SO", + -11.922026634216309 + ], + [ + "▁trustworthy", + -11.9220609664917 + ], + [ + "▁1994", + -11.922256469726562 + ], + [ + "▁Medium", + -11.922772407531738 + ], + [ + "El", + -11.92277717590332 + ], + [ + "▁guilt", + -11.922786712646484 + ], + [ + "07", + -11.922813415527344 + ], + [ + "▁gifted", + -11.92306137084961 + ], + [ + "▁sparkling", + -11.923087120056152 + ], + [ + "▁2004.", + -11.923283576965332 + ], + [ + "▁knit", + -11.923300743103027 + ], + [ + "▁ranch", + -11.92331886291504 + ], + [ + "▁Pretty", + -11.923439979553224 + ], + [ + "▁aboard", + -11.923469543457031 + ], + [ + "▁scales", + -11.92349624633789 + ], + [ + "Am", + -11.92353630065918 + ], + [ + "Getting", + -11.923675537109377 + ], + [ + "▁Flowers", + -11.923785209655762 + ], + [ + "▁blades", + -11.923797607421877 + ], + [ + "Paul", + -11.923910140991213 + ], + [ + "▁administered", + -11.92398166656494 + ], + [ + "▁Anniversary", + -11.924083709716797 + ], + [ + "Second", + -11.924333572387695 + ], + [ + "▁92", + -11.924402236938477 + ], + [ + "48", + -11.924428939819336 + ], + [ + "▁verse", + -11.924429893493652 + ], + [ + "▁rays", + -11.924562454223633 + ], + [ + "performance", + -11.924684524536133 + ], + [ + "gmail", + -11.92483901977539 + ], + [ + "▁bolt", + -11.924861907958984 + ], + [ + "▁compliment", + -11.925049781799316 + ], + [ + "▁scalp", + -11.925088882446287 + ], + [ + "hard", + -11.925395965576172 + ], + [ + "▁squeeze", + -11.925626754760742 + ], + [ + "shi", + -11.925719261169434 + ], + [ + "▁constitute", + -11.925895690917969 + ], + [ + "▁organs", + -11.926027297973633 + ], + [ + "pad", + -11.926063537597656 + ], + [ + "acre", + -11.92608642578125 + ], + [ + "▁Cole", + -11.926207542419434 + ], + [ + "▁stereo", + -11.926225662231444 + ], + [ + "▁15-", + -11.92623805999756 + ], + [ + "▁gravel", + -11.926239013671877 + ], + [ + "▁Chevrolet", + -11.92629051208496 + ], + [ + "▁arthritis", + -11.92727756500244 + ], + [ + "▁fork", + -11.927397727966309 + ], + [ + "▁Chen", + -11.927657127380373 + ], + [ + "▁hottest", + -11.927812576293944 + ], + [ + "▁gadgets", + -11.92806339263916 + ], + [ + "▁transforming", + -11.928167343139648 + ], + [ + "▁03", + -11.928400039672852 + ], + [ + "▁honesty", + -11.928438186645508 + ], + [ + "▁vulnerability", + -11.928793907165527 + ], + [ + "small", + -11.928970336914062 + ], + [ + "▁enters", + -11.928980827331545 + ], + [ + "▁retaining", + -11.928982734680176 + ], + [ + "▁absorbed", + -11.929036140441896 + ], + [ + "▁Poly", + -11.92908763885498 + ], + [ + "▁undergo", + -11.929274559020996 + ], + [ + "▁bullet", + -11.929287910461426 + ], + [ + "▁fashionable", + -11.929383277893066 + ], + [ + "▁owns", + -11.929429054260254 + ], + [ + "▁oriented", + -11.92976188659668 + ], + [ + "hu", + -11.929876327514648 + ], + [ + "▁bargain", + -11.930180549621582 + ], + [ + "▁cache", + -11.930295944213867 + ], + [ + "▁0.", + -11.930327415466309 + ], + [ + "▁crushers", + -11.930651664733888 + ], + [ + "▁Cotton", + -11.930672645568848 + ], + [ + "FM", + -11.930776596069336 + ], + [ + "▁french", + -11.930809020996094 + ], + [ + "▁Address", + -11.930825233459473 + ], + [ + "▁feasible", + -11.930856704711914 + ], + [ + "▁indoors", + -11.930953979492188 + ], + [ + "▁woven", + -11.931048393249512 + ], + [ + "▁masters", + -11.931151390075684 + ], + [ + "▁Mat", + -11.931197166442873 + ], + [ + "▁AN", + -11.931451797485352 + ], + [ + "▁Todd", + -11.931618690490724 + ], + [ + "▁Blues", + -11.931729316711426 + ], + [ + "▁Warner", + -11.93185806274414 + ], + [ + "▁implies", + -11.93185806274414 + ], + [ + "▁Duty", + -11.932045936584473 + ], + [ + "City", + -11.932816505432127 + ], + [ + "▁cryptocurrency", + -11.93289852142334 + ], + [ + "▁Cathedral", + -11.933226585388184 + ], + [ + "▁notebook", + -11.933462142944336 + ], + [ + "▁quietly", + -11.933632850646973 + ], + [ + "▁siblings", + -11.933643341064451 + ], + [ + "▁Berkeley", + -11.933770179748535 + ], + [ + "▁Theory", + -11.933856010437012 + ], + [ + "▁recession", + -11.934082984924316 + ], + [ + "▁Powder", + -11.934149742126465 + ], + [ + "▁planes", + -11.934162139892578 + ], + [ + "▁Pour", + -11.934409141540527 + ], + [ + "▁fog", + -11.934524536132812 + ], + [ + "▁84", + -11.934539794921877 + ], + [ + "▁laptops", + -11.934672355651855 + ], + [ + "▁fonts", + -11.934906005859377 + ], + [ + "▁Calgary", + -11.935012817382812 + ], + [ + "▁Jamie", + -11.93506908416748 + ], + [ + "▁champions", + -11.935235977172852 + ], + [ + "▁SEC", + -11.935264587402344 + ], + [ + "▁valuation", + -11.935381889343262 + ], + [ + "▁survivors", + -11.935436248779297 + ], + [ + "▁smiling", + -11.935506820678713 + ], + [ + "▁roses", + -11.935744285583496 + ], + [ + "▁shipment", + -11.935786247253418 + ], + [ + "▁Educational", + -11.93580436706543 + ], + [ + "though", + -11.935833930969238 + ], + [ + "▁hormone", + -11.93593406677246 + ], + [ + "▁notch", + -11.936050415039062 + ], + [ + "▁planner", + -11.936086654663086 + ], + [ + "▁canal", + -11.936135292053224 + ], + [ + "▁proxy", + -11.93614101409912 + ], + [ + "▁whatsoever", + -11.936161994934082 + ], + [ + "▁glucose", + -11.936326026916504 + ], + [ + "▁Carbon", + -11.936477661132812 + ], + [ + "▁genres", + -11.93663215637207 + ], + [ + "Enter", + -11.936686515808104 + ], + [ + "▁Automatic", + -11.936689376831056 + ], + [ + "▁Jefferson", + -11.936765670776367 + ], + [ + "▁hazards", + -11.936859130859377 + ], + [ + "mp", + -11.93715763092041 + ], + [ + "▁captures", + -11.9375 + ], + [ + "▁Dam", + -11.93752670288086 + ], + [ + "▁illnesses", + -11.937559127807615 + ], + [ + "▁hazardous", + -11.937631607055664 + ], + [ + "▁Doug", + -11.937806129455566 + ], + [ + "▁Latest", + -11.93785285949707 + ], + [ + "▁Stanford", + -11.93812656402588 + ], + [ + "▁Tai", + -11.93812656402588 + ], + [ + "▁accompanying", + -11.938284873962402 + ], + [ + "▁tunes", + -11.938302040100098 + ], + [ + "▁unfortunate", + -11.938409805297852 + ], + [ + "▁71", + -11.938419342041016 + ], + [ + "▁crafting", + -11.938430786132812 + ], + [ + "▁Egyptian", + -11.938684463500977 + ], + [ + "Le", + -11.938687324523926 + ], + [ + "▁kilometers", + -11.938712120056152 + ], + [ + "▁Brandon", + -11.93911838531494 + ], + [ + "▁HS", + -11.939332008361816 + ], + [ + "▁balancing", + -11.939334869384766 + ], + [ + "▁Yo", + -11.939398765563965 + ], + [ + "▁abortion", + -11.93950653076172 + ], + [ + "▁bleeding", + -11.9395112991333 + ], + [ + "53", + -11.939592361450195 + ], + [ + "▁toilets", + -11.939696311950684 + ], + [ + "▁advertisement", + -11.939709663391112 + ], + [ + "▁irrigation", + -11.940062522888184 + ], + [ + "▁Lakes", + -11.940306663513184 + ], + [ + "lie", + -11.940338134765623 + ], + [ + "▁Bee", + -11.940503120422363 + ], + [ + "▁Jaw", + -11.940754890441896 + ], + [ + "▁Avoid", + -11.94077205657959 + ], + [ + "▁wipe", + -11.940826416015623 + ], + [ + "▁Watson", + -11.94091510772705 + ], + [ + "▁grapes", + -11.941080093383787 + ], + [ + "▁disappointment", + -11.941313743591309 + ], + [ + "▁Foot", + -11.941368103027344 + ], + [ + "▁scattered", + -11.941433906555176 + ], + [ + "▁inherent", + -11.94155216217041 + ], + [ + "▁coordinated", + -11.941610336303713 + ], + [ + "▁historically", + -11.941665649414062 + ], + [ + "▁Commons", + -11.941808700561523 + ], + [ + "▁soothing", + -11.94189453125 + ], + [ + "▁$200", + -11.942068099975586 + ], + [ + "▁DS", + -11.942076683044434 + ], + [ + "▁Barn", + -11.942096710205078 + ], + [ + "▁1940", + -11.942110061645508 + ], + [ + "▁mono", + -11.942148208618164 + ], + [ + "bin", + -11.942234992980955 + ], + [ + "pop", + -11.942279815673828 + ], + [ + "▁moist", + -11.942300796508787 + ], + [ + "▁bearings", + -11.942480087280272 + ], + [ + "▁Multiple", + -11.94248104095459 + ], + [ + "▁Anyway", + -11.942575454711914 + ], + [ + "▁Reports", + -11.94279956817627 + ], + [ + "▁Sterling", + -11.942828178405762 + ], + [ + "▁sword", + -11.94287109375 + ], + [ + "▁explosion", + -11.942875862121582 + ], + [ + "ington", + -11.942962646484377 + ], + [ + "email", + -11.9431734085083 + ], + [ + "▁calculations", + -11.943230628967283 + ], + [ + "Michael", + -11.943303108215332 + ], + [ + "▁daunting", + -11.943371772766112 + ], + [ + "▁gateway", + -11.943408966064451 + ], + [ + "▁Memory", + -11.943753242492676 + ], + [ + "▁relates", + -11.943818092346191 + ], + [ + "▁1.0", + -11.943937301635742 + ], + [ + "▁justify", + -11.943937301635742 + ], + [ + "▁bean", + -11.944079399108888 + ], + [ + "flow", + -11.94416332244873 + ], + [ + "09", + -11.944365501403809 + ], + [ + "39", + -11.944424629211426 + ], + [ + "Power", + -11.94455909729004 + ], + [ + "▁Guardian", + -11.94473361968994 + ], + [ + "▁laminate", + -11.944978713989258 + ], + [ + "▁skillet", + -11.945093154907228 + ], + [ + "▁creature", + -11.94528102874756 + ], + [ + "▁caution", + -11.945429801940918 + ], + [ + "▁impress", + -11.9454345703125 + ], + [ + "▁AG", + -11.945497512817385 + ], + [ + "▁proves", + -11.945539474487305 + ], + [ + "▁protests", + -11.945550918579102 + ], + [ + "▁fairy", + -11.945734024047852 + ], + [ + "▁quiz", + -11.945751190185549 + ], + [ + "▁Norman", + -11.94590950012207 + ], + [ + "▁berries", + -11.945992469787598 + ], + [ + "▁02", + -11.946258544921877 + ], + [ + "March", + -11.9462890625 + ], + [ + "▁laughter", + -11.946313858032228 + ], + [ + "BA", + -11.946368217468262 + ], + [ + "▁shaping", + -11.94643211364746 + ], + [ + "▁banana", + -11.94665813446045 + ], + [ + "▁helicopter", + -11.947007179260254 + ], + [ + "▁tens", + -11.947023391723633 + ], + [ + "▁integrating", + -11.947065353393556 + ], + [ + "▁Walking", + -11.947135925292969 + ], + [ + "▁baths", + -11.947175979614258 + ], + [ + "▁amp", + -11.947430610656738 + ], + [ + "▁Shot", + -11.947494506835938 + ], + [ + "▁MLS", + -11.94768524169922 + ], + [ + "▁Sar", + -11.947713851928713 + ], + [ + "▁Cuba", + -11.947943687438965 + ], + [ + "▁Projects", + -11.948047637939451 + ], + [ + "▁Kindle", + -11.948084831237791 + ], + [ + "▁Ti", + -11.948124885559082 + ], + [ + "National", + -11.948546409606934 + ], + [ + "sk", + -11.948562622070312 + ], + [ + "▁lend", + -11.948648452758787 + ], + [ + "▁removable", + -11.948675155639648 + ], + [ + "▁Helen", + -11.948749542236328 + ], + [ + "four", + -11.948988914489746 + ], + [ + "▁hooked", + -11.949047088623049 + ], + [ + "▁Mental", + -11.94912815093994 + ], + [ + "phone", + -11.949281692504885 + ], + [ + "▁Evans", + -11.949299812316896 + ], + [ + "Ha", + -11.949422836303713 + ], + [ + "Nice", + -11.949448585510254 + ], + [ + "▁Jeremy", + -11.94948673248291 + ], + [ + "show", + -11.94961166381836 + ], + [ + "▁travelled", + -11.949625015258787 + ], + [ + "▁EN", + -11.949628829956056 + ], + [ + "▁Peru", + -11.94965648651123 + ], + [ + "▁personalised", + -11.949762344360352 + ], + [ + "▁awkward", + -11.949913024902344 + ], + [ + "ible", + -11.949941635131836 + ], + [ + "illa", + -11.95019817352295 + ], + [ + "▁elastic", + -11.950335502624512 + ], + [ + "▁bounce", + -11.95037078857422 + ], + [ + "▁nodes", + -11.950461387634276 + ], + [ + "▁blends", + -11.950493812561035 + ], + [ + "▁leap", + -11.950687408447266 + ], + [ + "▁Strong", + -11.95069980621338 + ], + [ + "250", + -11.950806617736816 + ], + [ + "▁Jeep", + -11.950815200805664 + ], + [ + "length", + -11.950901985168455 + ], + [ + "▁69", + -11.950953483581545 + ], + [ + "▁relay", + -11.951119422912598 + ], + [ + "▁replied", + -11.951300621032717 + ], + [ + "Special", + -11.951339721679688 + ], + [ + "ud", + -11.95136833190918 + ], + [ + "▁relocation", + -11.95136833190918 + ], + [ + "▁Cameron", + -11.951445579528809 + ], + [ + "▁Roberts", + -11.952130317687988 + ], + [ + "▁Cruise", + -11.952259063720703 + ], + [ + "▁employs", + -11.952277183532717 + ], + [ + "▁Norwegian", + -11.95232105255127 + ], + [ + "▁Properties", + -11.952347755432127 + ], + [ + "▁Ghana", + -11.9524564743042 + ], + [ + "▁Wallpaper", + -11.952523231506348 + ], + [ + "ified", + -11.952677726745604 + ], + [ + "▁Zen", + -11.952800750732422 + ], + [ + "▁rigid", + -11.952848434448242 + ], + [ + "▁Processing", + -11.952860832214355 + ], + [ + "▁Remote", + -11.95289134979248 + ], + [ + "working", + -11.9532470703125 + ], + [ + "▁willingness", + -11.953269004821776 + ], + [ + "▁mo", + -11.953411102294922 + ], + [ + "▁advisors", + -11.953694343566896 + ], + [ + "▁Request", + -11.953702926635742 + ], + [ + "▁Framework", + -11.954004287719728 + ], + [ + "▁acre", + -11.954026222229004 + ], + [ + "▁attain", + -11.954050064086914 + ], + [ + "▁rub", + -11.954092979431152 + ], + [ + "▁collectors", + -11.954157829284668 + ], + [ + "▁Discount", + -11.954231262207031 + ], + [ + "▁XL", + -11.954380989074709 + ], + [ + "▁Whole", + -11.954479217529297 + ], + [ + "▁convicted", + -11.95457363128662 + ], + [ + "▁whisk", + -11.954609870910645 + ], + [ + "▁Lost", + -11.954663276672363 + ], + [ + "business", + -11.954682350158691 + ], + [ + "▁Bali", + -11.954806327819824 + ], + [ + "west", + -11.955144882202148 + ], + [ + "▁interpret", + -11.955224990844728 + ], + [ + "▁Peak", + -11.95533847808838 + ], + [ + "▁Ottawa", + -11.955410957336426 + ], + [ + "▁Bass", + -11.955451011657717 + ], + [ + "▁Fellow", + -11.955506324768066 + ], + [ + "▁Mirror", + -11.955601692199709 + ], + [ + "▁generates", + -11.955673217773438 + ], + [ + "▁dialog", + -11.955812454223633 + ], + [ + "▁straps", + -11.955855369567873 + ], + [ + "▁82", + -11.95586109161377 + ], + [ + "▁Shell", + -11.95595359802246 + ], + [ + "▁mug", + -11.955994606018066 + ], + [ + "▁arrow", + -11.956214904785156 + ], + [ + "▁webpage", + -11.956584930419922 + ], + [ + "▁validation", + -11.956796646118164 + ], + [ + "▁bracelet", + -11.95681381225586 + ], + [ + "▁uncommon", + -11.956835746765137 + ], + [ + "▁assemble", + -11.95707893371582 + ], + [ + "▁Resolution", + -11.957086563110352 + ], + [ + "▁Serve", + -11.957354545593262 + ], + [ + "▁Ski", + -11.957490921020508 + ], + [ + "▁Jake", + -11.957517623901367 + ], + [ + "▁teenage", + -11.957622528076172 + ], + [ + "▁Vienna", + -11.957648277282717 + ], + [ + "▁asphalt", + -11.957693099975586 + ], + [ + "▁ritual", + -11.95797634124756 + ], + [ + "▁Obviously", + -11.95813274383545 + ], + [ + "▁supervisor", + -11.958569526672363 + ], + [ + "ram", + -11.95865249633789 + ], + [ + "▁spinning", + -11.958687782287598 + ], + [ + "▁Pope", + -11.958824157714844 + ], + [ + "▁Stan", + -11.958900451660156 + ], + [ + "▁Lovely", + -11.95895004272461 + ], + [ + "▁CMS", + -11.959118843078612 + ], + [ + "ban", + -11.959229469299316 + ], + [ + "▁Task", + -11.959253311157228 + ], + [ + "▁credibility", + -11.959291458129885 + ], + [ + "▁Wire", + -11.959301948547363 + ], + [ + "▁tournaments", + -11.959508895874023 + ], + [ + "▁McDonald", + -11.959527015686035 + ], + [ + "Local", + -11.95953369140625 + ], + [ + "▁mentoring", + -11.95954132080078 + ], + [ + "▁disable", + -11.959582328796388 + ], + [ + "▁vet", + -11.96003246307373 + ], + [ + "▁ma", + -11.960075378417969 + ], + [ + "▁adhesive", + -11.960259437561035 + ], + [ + "▁grease", + -11.960297584533691 + ], + [ + "▁Rule", + -11.960482597351074 + ], + [ + "▁Hack", + -11.960658073425291 + ], + [ + "▁Lounge", + -11.96066188812256 + ], + [ + "▁deputy", + -11.960897445678713 + ], + [ + "Beautiful", + -11.961040496826172 + ], + [ + "▁Featuring", + -11.961055755615234 + ], + [ + "▁Min", + -11.961517333984377 + ], + [ + "▁Wave", + -11.96163558959961 + ], + [ + "▁Movement", + -11.961636543273926 + ], + [ + "▁stranger", + -11.96173858642578 + ], + [ + "olo", + -11.96174144744873 + ], + [ + "▁vivid", + -11.961771965026855 + ], + [ + "▁milestone", + -11.961809158325195 + ], + [ + "▁Parish", + -11.961864471435549 + ], + [ + "gy", + -11.961909294128418 + ], + [ + "full", + -11.961965560913086 + ], + [ + "value", + -11.9619722366333 + ], + [ + "cent", + -11.962352752685549 + ], + [ + "▁gourmet", + -11.962483406066896 + ], + [ + "▁accepts", + -11.962613105773926 + ], + [ + "▁requesting", + -11.962776184082031 + ], + [ + "▁Trinity", + -11.962861061096191 + ], + [ + "▁believing", + -11.962961196899414 + ], + [ + "ye", + -11.96299934387207 + ], + [ + "gun", + -11.963138580322266 + ], + [ + "▁christmas", + -11.96343231201172 + ], + [ + "▁UAE", + -11.963488578796388 + ], + [ + "▁Principal", + -11.963493347167969 + ], + [ + "▁severely", + -11.963510513305664 + ], + [ + "uff", + -11.963700294494627 + ], + [ + "▁Revenue", + -11.963809967041016 + ], + [ + "lasting", + -11.963863372802734 + ], + [ + "▁Venice", + -11.963933944702148 + ], + [ + "▁73", + -11.963948249816896 + ], + [ + "▁Cu", + -11.964016914367676 + ], + [ + "▁Essay", + -11.964109420776367 + ], + [ + "▁Harrison", + -11.964205741882324 + ], + [ + "▁declare", + -11.964359283447266 + ], + [ + "▁£", + -11.964437484741213 + ], + [ + "▁grave", + -11.96480655670166 + ], + [ + "▁Scholarship", + -11.964927673339844 + ], + [ + "▁dump", + -11.964947700500488 + ], + [ + "▁gallon", + -11.964964866638184 + ], + [ + "▁traction", + -11.964985847473145 + ], + [ + "▁username", + -11.964999198913574 + ], + [ + "▁cardboard", + -11.965088844299316 + ], + [ + "▁dismissed", + -11.965394020080566 + ], + [ + "-19", + -11.965405464172363 + ], + [ + "▁inspections", + -11.965494155883787 + ], + [ + "▁boundary", + -11.965906143188477 + ], + [ + "▁Universe", + -11.966069221496582 + ], + [ + "▁Hour", + -11.966198921203612 + ], + [ + "▁Hop", + -11.966217041015623 + ], + [ + "▁advancement", + -11.966252326965332 + ], + [ + "start", + -11.966391563415527 + ], + [ + "▁invented", + -11.966403007507324 + ], + [ + "▁chefs", + -11.966490745544434 + ], + [ + "lia", + -11.96663761138916 + ], + [ + "Ask", + -11.966717720031738 + ], + [ + "fold", + -11.966958999633787 + ], + [ + "▁dangers", + -11.96696662902832 + ], + [ + "▁labeled", + -11.967243194580078 + ], + [ + "▁Templates", + -11.96741771697998 + ], + [ + "anti", + -11.967572212219238 + ], + [ + "▁Ro", + -11.967622756958008 + ], + [ + "▁Typically", + -11.967652320861816 + ], + [ + "TB", + -11.967743873596191 + ], + [ + "▁Rogers", + -11.96784496307373 + ], + [ + "▁Dollar", + -11.967877388000488 + ], + [ + "▁4.5", + -11.967958450317385 + ], + [ + "▁Lion", + -11.96800136566162 + ], + [ + "/10", + -11.968093872070312 + ], + [ + "standing", + -11.96813678741455 + ], + [ + "▁randomly", + -11.968161582946776 + ], + [ + "▁Battery", + -11.968360900878906 + ], + [ + "▁polyester", + -11.968485832214355 + ], + [ + "▁enforce", + -11.968523979187012 + ], + [ + "▁adore", + -11.969016075134276 + ], + [ + "Social", + -11.969080924987791 + ], + [ + "ica", + -11.969098091125488 + ], + [ + "Health", + -11.969159126281738 + ], + [ + "▁cheer", + -11.96932601928711 + ], + [ + "ib", + -11.96939182281494 + ], + [ + "-7", + -11.969564437866213 + ], + [ + "▁Portable", + -11.96956729888916 + ], + [ + "▁Iranian", + -11.969572067260742 + ], + [ + "▁Alternatively", + -11.969677925109863 + ], + [ + "▁reside", + -11.969841003417969 + ], + [ + "▁accidentally", + -11.970126152038574 + ], + [ + "▁payable", + -11.970621109008787 + ], + [ + "▁posture", + -11.970663070678713 + ], + [ + "▁attributed", + -11.971293449401855 + ], + [ + "ging", + -11.971654891967772 + ], + [ + "▁blender", + -11.971757888793944 + ], + [ + "▁stiff", + -11.972040176391602 + ], + [ + "▁wheelchair", + -11.972064018249512 + ], + [ + "▁disappointing", + -11.97206974029541 + ], + [ + "▁Cherry", + -11.972156524658203 + ], + [ + "▁cleansing", + -11.972315788269045 + ], + [ + "▁Marina", + -11.972346305847168 + ], + [ + "▁slate", + -11.972382545471191 + ], + [ + "▁rivals", + -11.97243309020996 + ], + [ + "▁headache", + -11.972671508789062 + ], + [ + "▁headlines", + -11.972885131835938 + ], + [ + "▁PVC", + -11.972966194152832 + ], + [ + "▁Ger", + -11.973000526428224 + ], + [ + "▁Funeral", + -11.97317886352539 + ], + [ + "lay", + -11.973226547241213 + ], + [ + "▁Township", + -11.973410606384276 + ], + [ + "▁st", + -11.973466873168944 + ], + [ + "▁Grab", + -11.973928451538086 + ], + [ + "▁cartoon", + -11.97400951385498 + ], + [ + "▁investigators", + -11.974027633666992 + ], + [ + "▁7:30", + -11.974095344543455 + ], + [ + "5)", + -11.974160194396973 + ], + [ + "square", + -11.974210739135742 + ], + [ + "▁dwelling", + -11.97423267364502 + ], + [ + "boy", + -11.974403381347656 + ], + [ + "nan", + -11.97447109222412 + ], + [ + "mel", + -11.974477767944336 + ], + [ + "MD", + -11.97459602355957 + ], + [ + "Never", + -11.974696159362791 + ], + [ + "▁Chart", + -11.974817276000977 + ], + [ + "▁Rail", + -11.975279808044434 + ], + [ + "▁bend", + -11.975306510925291 + ], + [ + "▁disappeared", + -11.975353240966797 + ], + [ + "▁systematic", + -11.975378036499023 + ], + [ + "▁crypto", + -11.975552558898926 + ], + [ + "▁Ta", + -11.975855827331545 + ], + [ + "▁archives", + -11.976052284240724 + ], + [ + "▁clarify", + -11.97659397125244 + ], + [ + "▁spiral", + -11.97661590576172 + ], + [ + "ily", + -11.976770401000977 + ], + [ + "▁trek", + -11.976791381835938 + ], + [ + "▁ancestors", + -11.976953506469728 + ], + [ + "-30", + -11.97719669342041 + ], + [ + "▁Bur", + -11.977252960205078 + ], + [ + "▁Tamil", + -11.977492332458496 + ], + [ + "▁cartridge", + -11.977544784545898 + ], + [ + "▁SSL", + -11.97783374786377 + ], + [ + "▁swelling", + -11.977834701538086 + ], + [ + "▁owning", + -11.977859497070312 + ], + [ + "▁folders", + -11.97794246673584 + ], + [ + "ius", + -11.978132247924805 + ], + [ + "Anyway", + -11.978190422058104 + ], + [ + "▁tragic", + -11.978318214416504 + ], + [ + "▁locking", + -11.978401184082031 + ], + [ + "▁Figure", + -11.97846794128418 + ], + [ + "▁respiratory", + -11.978753089904783 + ], + [ + "Besides", + -11.978776931762695 + ], + [ + "▁typing", + -11.978867530822754 + ], + [ + "▁hilarious", + -11.97903823852539 + ], + [ + "▁Waste", + -11.979092597961426 + ], + [ + "▁shorts", + -11.979119300842283 + ], + [ + "03", + -11.979191780090332 + ], + [ + "▁attracting", + -11.979408264160156 + ], + [ + "52", + -11.979950904846191 + ], + [ + "▁AZ", + -11.97995376586914 + ], + [ + "lib", + -11.979971885681152 + ], + [ + "▁Southwest", + -11.979976654052734 + ], + [ + "▁outdated", + -11.980236053466797 + ], + [ + "▁tightly", + -11.980269432067873 + ], + [ + "▁sodium", + -11.980294227600098 + ], + [ + "ora", + -11.980767250061035 + ], + [ + "nis", + -11.98078155517578 + ], + [ + "▁88", + -11.980853080749512 + ], + [ + "food", + -11.980968475341797 + ], + [ + "▁damp", + -11.9811429977417 + ], + [ + "bad", + -11.981274604797363 + ], + [ + "▁converting", + -11.981444358825684 + ], + [ + "▁enroll", + -11.98145866394043 + ], + [ + "▁stance", + -11.98170280456543 + ], + [ + "▁punishment", + -11.981914520263672 + ], + [ + "▁trademarks", + -11.981947898864746 + ], + [ + "▁Craft", + -11.98202419281006 + ], + [ + "▁pantry", + -11.982054710388184 + ], + [ + "val", + -11.982484817504885 + ], + [ + "▁Coalition", + -11.982563018798828 + ], + [ + "▁aerial", + -11.982617378234863 + ], + [ + "▁Lamp", + -11.982857704162598 + ], + [ + "▁controversy", + -11.983108520507812 + ], + [ + "DB", + -11.983197212219238 + ], + [ + "Share", + -11.983266830444336 + ], + [ + "▁Christianity", + -11.983354568481444 + ], + [ + "▁personalities", + -11.98349666595459 + ], + [ + "▁ensemble", + -11.983619689941406 + ], + [ + "▁avocado", + -11.983627319335938 + ], + [ + "▁fixes", + -11.983742713928224 + ], + [ + "plan", + -11.983824729919434 + ], + [ + "▁bets", + -11.98392391204834 + ], + [ + "▁Database", + -11.984115600585938 + ], + [ + "▁Arabic", + -11.984230995178224 + ], + [ + "▁Move", + -11.984285354614258 + ], + [ + "▁margins", + -11.984371185302734 + ], + [ + "▁starring", + -11.98442840576172 + ], + [ + "▁Sheriff", + -11.984504699707031 + ], + [ + "▁Scientific", + -11.984556198120115 + ], + [ + "▁tenure", + -11.984785079956056 + ], + [ + "▁rendered", + -11.985185623168944 + ], + [ + "design", + -11.985212326049805 + ], + [ + "▁warranties", + -11.985342979431152 + ], + [ + "▁refresh", + -11.985432624816896 + ], + [ + "▁Chase", + -11.985434532165527 + ], + [ + "stra", + -11.98552703857422 + ], + [ + "▁broth", + -11.98561191558838 + ], + [ + "▁patented", + -11.985625267028809 + ], + [ + "▁wrinkles", + -11.985709190368652 + ], + [ + "▁Row", + -11.986016273498535 + ], + [ + "05", + -11.986048698425291 + ], + [ + "▁accents", + -11.986157417297363 + ], + [ + "▁Guests", + -11.98626708984375 + ], + [ + "make", + -11.986373901367188 + ], + [ + "Care", + -11.986419677734377 + ], + [ + "▁biking", + -11.986422538757324 + ], + [ + "Bo", + -11.98655605316162 + ], + [ + "▁referrals", + -11.98666286468506 + ], + [ + "▁Chile", + -11.986735343933104 + ], + [ + "North", + -11.986923217773438 + ], + [ + "▁Heaven", + -11.987088203430176 + ], + [ + "▁compassionate", + -11.98719882965088 + ], + [ + "▁bookings", + -11.98747730255127 + ], + [ + "▁cared", + -11.987502098083496 + ], + [ + "▁NBC", + -11.987554550170898 + ], + [ + "▁cage", + -11.987613677978516 + ], + [ + "▁mileage", + -11.987614631652832 + ], + [ + "▁rooted", + -11.987663269042969 + ], + [ + "▁Bal", + -11.987815856933594 + ], + [ + "▁recognizes", + -11.987874984741213 + ], + [ + "▁catalogue", + -11.988184928894045 + ], + [ + "▁neglect", + -11.988204002380373 + ], + [ + "▁pulse", + -11.988341331481934 + ], + [ + "▁volunteering", + -11.98837184906006 + ], + [ + "▁quarterback", + -11.988561630249023 + ], + [ + "Always", + -11.988574981689451 + ], + [ + "safe", + -11.98861026763916 + ], + [ + "▁feast", + -11.989270210266112 + ], + [ + "▁Kyle", + -11.989676475524902 + ], + [ + "▁0.5", + -11.989691734313965 + ], + [ + "▁desserts", + -11.989754676818848 + ], + [ + "▁Boat", + -11.9898099899292 + ], + [ + "▁sneak", + -11.990345001220703 + ], + [ + "▁sorted", + -11.990449905395508 + ], + [ + "track", + -11.990482330322266 + ], + [ + "▁fusion", + -11.990561485290527 + ], + [ + "▁practiced", + -11.990705490112305 + ], + [ + "▁vol", + -11.990711212158203 + ], + [ + "▁Amendment", + -11.990814208984377 + ], + [ + "▁digging", + -11.990824699401855 + ], + [ + "▁umbrella", + -11.990854263305664 + ], + [ + "▁intricate", + -11.991119384765623 + ], + [ + "▁suspicious", + -11.991203308105469 + ], + [ + "▁Wear", + -11.991331100463867 + ], + [ + "▁Kid", + -11.991427421569824 + ], + [ + "▁reforms", + -11.991456985473633 + ], + [ + "▁canopy", + -11.991765975952148 + ], + [ + "blue", + -11.991790771484377 + ], + [ + "▁drunk", + -11.991989135742188 + ], + [ + "▁autism", + -11.992164611816406 + ], + [ + "weight", + -11.992321014404297 + ], + [ + "▁backpack", + -11.99234390258789 + ], + [ + "51", + -11.992494583129885 + ], + [ + "▁grabbed", + -11.992506980895996 + ], + [ + "▁lamb", + -11.992559432983398 + ], + [ + "▁upgrading", + -11.992621421813965 + ], + [ + "▁curb", + -11.992728233337402 + ], + [ + "▁relying", + -11.992996215820312 + ], + [ + "4,", + -11.993054389953612 + ], + [ + "▁lb", + -11.993098258972168 + ], + [ + "▁Weekend", + -11.993118286132812 + ], + [ + "▁carpets", + -11.993151664733888 + ], + [ + "FA", + -11.993157386779783 + ], + [ + "▁Dennis", + -11.993173599243164 + ], + [ + "▁Uni", + -11.99325942993164 + ], + [ + "mat", + -11.993263244628906 + ], + [ + "▁Generation", + -11.99356746673584 + ], + [ + "▁societies", + -11.993571281433104 + ], + [ + "▁94", + -11.993721961975098 + ], + [ + "Unlike", + -11.993776321411133 + ], + [ + "gi", + -11.993918418884276 + ], + [ + "▁pinch", + -11.993999481201172 + ], + [ + "▁bitcoin", + -11.994050979614258 + ], + [ + "▁benchmark", + -11.994267463684082 + ], + [ + "▁Dress", + -11.994388580322266 + ], + [ + "▁witnesses", + -11.994466781616213 + ], + [ + "ip", + -11.994611740112305 + ], + [ + "Live", + -11.995055198669434 + ], + [ + "-16", + -11.99516487121582 + ], + [ + "▁renew", + -11.995315551757812 + ], + [ + "▁smells", + -11.995418548583984 + ], + [ + "ob", + -11.99545192718506 + ], + [ + "▁$7", + -11.995553970336914 + ], + [ + "▁Apps", + -11.995614051818848 + ], + [ + "Remove", + -11.995731353759766 + ], + [ + "▁weaknesses", + -11.99595546722412 + ], + [ + "▁CSS", + -11.996127128601074 + ], + [ + "▁uncover", + -11.996150016784668 + ], + [ + "▁wrapping", + -11.996323585510254 + ], + [ + "▁Portuguese", + -11.996427536010742 + ], + [ + "▁woke", + -11.996501922607422 + ], + [ + "▁scam", + -11.996679306030272 + ], + [ + "▁Katie", + -11.99685001373291 + ], + [ + "▁competence", + -11.996915817260742 + ], + [ + "▁Ruth", + -11.996940612792969 + ], + [ + "▁unity", + -11.996996879577637 + ], + [ + "▁2004,", + -11.997492790222168 + ], + [ + "Put", + -11.997556686401367 + ], + [ + "Men", + -11.997641563415527 + ], + [ + "▁PO", + -11.99765396118164 + ], + [ + "berry", + -11.997900009155272 + ], + [ + "▁retire", + -11.9979829788208 + ], + [ + "▁demonstrating", + -11.99800968170166 + ], + [ + "2-", + -11.998052597045898 + ], + [ + "▁Statement", + -11.998083114624023 + ], + [ + "▁(10", + -11.99816608428955 + ], + [ + "▁fibre", + -11.998262405395508 + ], + [ + "▁porcelain", + -11.99828815460205 + ], + [ + "▁alloy", + -11.998418807983398 + ], + [ + "▁assessing", + -11.998547554016112 + ], + [ + "▁Hal", + -11.998705863952637 + ], + [ + "▁mandate", + -11.999105453491213 + ], + [ + "php", + -11.99912166595459 + ], + [ + "PO", + -11.99912929534912 + ], + [ + "▁rehab", + -11.999199867248535 + ], + [ + "▁registering", + -11.99951457977295 + ], + [ + "▁wilderness", + -11.999678611755373 + ], + [ + "June", + -11.999715805053713 + ], + [ + "▁worksheets", + -11.999795913696287 + ], + [ + "▁amendment", + -11.999839782714844 + ], + [ + "▁inserted", + -11.999897003173828 + ], + [ + "▁revision", + -11.999919891357422 + ], + [ + "▁FA", + -11.99995231628418 + ], + [ + "▁Dodge", + -11.999957084655762 + ], + [ + "▁formally", + -11.99997329711914 + ], + [ + "▁wonderfully", + -12.000067710876465 + ], + [ + "▁Employment", + -12.000235557556152 + ], + [ + "▁2003.", + -12.000323295593262 + ], + [ + "▁affiliates", + -12.00049114227295 + ], + [ + "▁weights", + -12.000494003295898 + ], + [ + "▁mac", + -12.000580787658691 + ], + [ + "▁noting", + -12.00062084197998 + ], + [ + "▁humanitarian", + -12.00081729888916 + ], + [ + "▁launches", + -12.000821113586426 + ], + [ + "▁browsers", + -12.001036643981934 + ], + [ + "▁kicks", + -12.001258850097656 + ], + [ + "▁tick", + -12.001277923583984 + ], + [ + "▁Aqua", + -12.001386642456056 + ], + [ + "▁earthquake", + -12.001522064208984 + ], + [ + "▁nomination", + -12.001710891723633 + ], + [ + "▁allies", + -12.001718521118164 + ], + [ + "▁Brussels", + -12.001729011535645 + ], + [ + "▁carrots", + -12.00177764892578 + ], + [ + "▁Cedar", + -12.00198459625244 + ], + [ + "▁trunk", + -12.002242088317873 + ], + [ + "▁showcasing", + -12.002467155456545 + ], + [ + "▁simplify", + -12.002518653869627 + ], + [ + "▁rented", + -12.002601623535156 + ], + [ + "Several", + -12.00278091430664 + ], + [ + "▁negligence", + -12.003026008605955 + ], + [ + "▁versatility", + -12.003026008605955 + ], + [ + "▁Basically", + -12.003090858459473 + ], + [ + "▁Symphony", + -12.00319766998291 + ], + [ + "AR", + -12.003609657287598 + ], + [ + "▁showroom", + -12.003987312316896 + ], + [ + "▁Dar", + -12.004141807556152 + ], + [ + "ERS", + -12.004196166992188 + ], + [ + "▁dentistry", + -12.004217147827148 + ], + [ + "▁Pastor", + -12.004236221313477 + ], + [ + "▁optimistic", + -12.004476547241213 + ], + [ + "▁Newcastle", + -12.00450611114502 + ], + [ + "▁freshman", + -12.004619598388672 + ], + [ + "▁Sim", + -12.004651069641112 + ], + [ + "▁CNN", + -12.004742622375488 + ], + [ + "▁Firefox", + -12.00481128692627 + ], + [ + "▁Ceiling", + -12.004863739013672 + ], + [ + "▁activist", + -12.004890441894531 + ], + [ + "▁4.0", + -12.004898071289062 + ], + [ + "”).", + -12.00490379333496 + ], + [ + "▁INC", + -12.004962921142578 + ], + [ + "gas", + -12.005179405212402 + ], + [ + "▁Question", + -12.005449295043944 + ], + [ + "▁potent", + -12.00556182861328 + ], + [ + "▁450", + -12.005818367004396 + ], + [ + "▁baskets", + -12.005936622619627 + ], + [ + "▁fossil", + -12.005955696105955 + ], + [ + "▁curved", + -12.006285667419434 + ], + [ + "▁Shakespeare", + -12.006514549255373 + ], + [ + "▁Rent", + -12.006537437438965 + ], + [ + "▁celebrates", + -12.006590843200684 + ], + [ + "▁Gov", + -12.006604194641112 + ], + [ + "▁Abraham", + -12.006608963012695 + ], + [ + "▁Indians", + -12.006608963012695 + ], + [ + "▁Similarly", + -12.006728172302246 + ], + [ + "▁prevented", + -12.006897926330566 + ], + [ + "▁sandy", + -12.007243156433104 + ], + [ + "▁enclosed", + -12.00725555419922 + ], + [ + "▁useless", + -12.007380485534668 + ], + [ + "▁customizable", + -12.007526397705078 + ], + [ + "▁lawmakers", + -12.007582664489746 + ], + [ + "other", + -12.007850646972656 + ], + [ + "88", + -12.008124351501465 + ], + [ + "▁cocoa", + -12.008203506469728 + ], + [ + "▁uncertain", + -12.008207321166992 + ], + [ + "CC", + -12.008209228515623 + ], + [ + "▁analog", + -12.008326530456545 + ], + [ + "▁simplest", + -12.008688926696776 + ], + [ + "Hope", + -12.00876808166504 + ], + [ + "▁Motion", + -12.008803367614746 + ], + [ + "ag", + -12.008810997009276 + ], + [ + "▁invasive", + -12.00884246826172 + ], + [ + "▁disclosed", + -12.008930206298828 + ], + [ + "▁Near", + -12.009020805358888 + ], + [ + "▁youtube", + -12.009161949157717 + ], + [ + "▁remodel", + -12.00949001312256 + ], + [ + "▁simmer", + -12.009521484375 + ], + [ + "▁Train", + -12.009624481201172 + ], + [ + "▁storms", + -12.009723663330078 + ], + [ + "▁tangible", + -12.00975513458252 + ], + [ + "▁upstairs", + -12.009881019592283 + ], + [ + "then", + -12.00995922088623 + ], + [ + "▁determines", + -12.010004043579102 + ], + [ + "lands", + -12.010027885437012 + ], + [ + "▁Fellowship", + -12.010031700134276 + ], + [ + "▁Cam", + -12.010168075561523 + ], + [ + "▁Electronics", + -12.010173797607422 + ], + [ + "▁pause", + -12.01020622253418 + ], + [ + "ese", + -12.010483741760254 + ], + [ + "tri", + -12.010709762573242 + ], + [ + "▁flags", + -12.01079559326172 + ], + [ + "▁steer", + -12.010872840881348 + ], + [ + "▁inaugural", + -12.01088047027588 + ], + [ + "▁Securities", + -12.011080741882324 + ], + [ + "House", + -12.011115074157717 + ], + [ + "▁recovering", + -12.011260032653809 + ], + [ + "▁bowls", + -12.011314392089844 + ], + [ + "▁inhabitants", + -12.011462211608888 + ], + [ + "ev", + -12.01172161102295 + ], + [ + "▁Grill", + -12.011808395385742 + ], + [ + "▁Less", + -12.01185703277588 + ], + [ + "Line", + -12.012301445007324 + ], + [ + "▁supermarket", + -12.01240062713623 + ], + [ + "▁pastry", + -12.01303482055664 + ], + [ + "▁Conditioning", + -12.01341438293457 + ], + [ + "▁Bet", + -12.013561248779297 + ], + [ + "▁norm", + -12.013627052307127 + ], + [ + "▁civic", + -12.013701438903809 + ], + [ + "▁MC", + -12.013813972473145 + ], + [ + "cal", + -12.01402187347412 + ], + [ + "▁drought", + -12.014030456542969 + ], + [ + "▁committees", + -12.014273643493652 + ], + [ + "▁collector", + -12.014430046081545 + ], + [ + "▁ruin", + -12.014623641967772 + ], + [ + "▁matrix", + -12.014732360839844 + ], + [ + "▁Cities", + -12.01474666595459 + ], + [ + "▁resilience", + -12.014830589294434 + ], + [ + "▁implied", + -12.014838218688965 + ], + [ + "Stay", + -12.015247344970703 + ], + [ + "▁sadly", + -12.015653610229492 + ], + [ + "▁prediction", + -12.015788078308104 + ], + [ + "▁hooks", + -12.015871047973633 + ], + [ + "▁probe", + -12.015953063964844 + ], + [ + "▁Lou", + -12.016053199768066 + ], + [ + "▁heavier", + -12.01605987548828 + ], + [ + "▁crystals", + -12.016077041625977 + ], + [ + "▁Mod", + -12.016236305236816 + ], + [ + "rt", + -12.0166015625 + ], + [ + "▁mathematical", + -12.016645431518556 + ], + [ + "workers", + -12.016767501831056 + ], + [ + "demand", + -12.016772270202637 + ], + [ + "▁packet", + -12.016801834106444 + ], + [ + "Congratulations", + -12.017087936401367 + ], + [ + "▁sang", + -12.01710033416748 + ], + [ + "Key", + -12.017146110534668 + ], + [ + "▁countertops", + -12.017617225646973 + ], + [ + "▁Den", + -12.017661094665527 + ], + [ + "▁Jen", + -12.01772689819336 + ], + [ + "LE", + -12.017813682556152 + ], + [ + "ida", + -12.017966270446776 + ], + [ + "▁initiate", + -12.0180082321167 + ], + [ + "▁statue", + -12.018019676208496 + ], + [ + "▁pedestrian", + -12.01806354522705 + ], + [ + "matic", + -12.01808738708496 + ], + [ + "▁homepage", + -12.018143653869627 + ], + [ + "▁Archives", + -12.018357276916504 + ], + [ + "▁Harvey", + -12.018359184265137 + ], + [ + "▁variant", + -12.01840877532959 + ], + [ + "PE", + -12.01878261566162 + ], + [ + "▁martial", + -12.018800735473633 + ], + [ + "▁chili", + -12.01901149749756 + ], + [ + "▁interiors", + -12.01909351348877 + ], + [ + "▁Heating", + -12.01921844482422 + ], + [ + "43", + -12.019381523132324 + ], + [ + "▁Experts", + -12.019598007202148 + ], + [ + "▁resting", + -12.019652366638184 + ], + [ + "▁Congratulations", + -12.019845962524414 + ], + [ + "▁bloom", + -12.019880294799805 + ], + [ + "▁socket", + -12.019888877868652 + ], + [ + "▁Python", + -12.01996898651123 + ], + [ + "▁unto", + -12.020286560058594 + ], + [ + "▁Stream", + -12.020304679870604 + ], + [ + "▁Tesla", + -12.02043914794922 + ], + [ + "ela", + -12.020594596862791 + ], + [ + "▁Formula", + -12.021034240722656 + ], + [ + "▁assumptions", + -12.02104949951172 + ], + [ + "▁absent", + -12.021103858947754 + ], + [ + "▁molecules", + -12.021142959594728 + ], + [ + "tier", + -12.021143913269045 + ], + [ + "▁Johnny", + -12.021294593811035 + ], + [ + "ou", + -12.021321296691896 + ], + [ + "▁Ya", + -12.02134895324707 + ], + [ + "▁Residential", + -12.021550178527832 + ], + [ + "om", + -12.021562576293944 + ], + [ + "▁comp", + -12.021583557128906 + ], + [ + "▁Frame", + -12.021584510803224 + ], + [ + "-25", + -12.021608352661133 + ], + [ + "▁ham", + -12.021772384643556 + ], + [ + "▁pulls", + -12.02183723449707 + ], + [ + "file", + -12.021844863891602 + ], + [ + "▁passwords", + -12.021868705749512 + ], + [ + "▁remarks", + -12.021961212158203 + ], + [ + "ot", + -12.022173881530762 + ], + [ + "▁shadows", + -12.02218532562256 + ], + [ + "▁lanes", + -12.022252082824709 + ], + [ + "▁(8", + -12.022503852844238 + ], + [ + "▁Journey", + -12.022516250610352 + ], + [ + "▁CBS", + -12.022655487060549 + ], + [ + "▁Ne", + -12.022762298583984 + ], + [ + "▁GE", + -12.023009300231934 + ], + [ + "▁mills", + -12.02308750152588 + ], + [ + "▁Popular", + -12.02309799194336 + ], + [ + "lb", + -12.0232515335083 + ], + [ + "▁relevance", + -12.023348808288574 + ], + [ + "Research", + -12.02367877960205 + ], + [ + "▁Azure", + -12.023710250854492 + ], + [ + "▁motivate", + -12.02375316619873 + ], + [ + "Author", + -12.023951530456545 + ], + [ + "rum", + -12.024211883544922 + ], + [ + "▁barrels", + -12.024251937866213 + ], + [ + "▁Desert", + -12.024279594421388 + ], + [ + "ima", + -12.02434539794922 + ], + [ + "▁wounded", + -12.024497985839844 + ], + [ + "▁vibration", + -12.024563789367676 + ], + [ + "▁fireworks", + -12.024588584899902 + ], + [ + "▁owe", + -12.024624824523926 + ], + [ + "▁91", + -12.024688720703123 + ], + [ + "ans", + -12.025032997131348 + ], + [ + "▁proving", + -12.0252046585083 + ], + [ + "San", + -12.02520751953125 + ], + [ + "ante", + -12.025290489196776 + ], + [ + "▁pixel", + -12.0255708694458 + ], + [ + "oma", + -12.025634765625 + ], + [ + "▁craftsmanship", + -12.025640487670898 + ], + [ + "▁Diagram", + -12.02566909790039 + ], + [ + "▁invention", + -12.025690078735352 + ], + [ + "Ma", + -12.025726318359377 + ], + [ + "cell", + -12.025797843933104 + ], + [ + "▁Skype", + -12.02585506439209 + ], + [ + "150", + -12.026155471801758 + ], + [ + "▁barbecue", + -12.026162147521973 + ], + [ + "▁alien", + -12.026166915893556 + ], + [ + "▁1993", + -12.026594161987305 + ], + [ + "▁salaries", + -12.026779174804688 + ], + [ + "▁Late", + -12.02680206298828 + ], + [ + "▁photographic", + -12.027240753173828 + ], + [ + "57", + -12.027247428894045 + ], + [ + "▁profitability", + -12.027426719665527 + ], + [ + "▁Ab", + -12.027482986450195 + ], + [ + "▁Items", + -12.027490615844728 + ], + [ + "▁DNS", + -12.028386116027832 + ], + [ + "CM", + -12.028390884399414 + ], + [ + "▁Rocky", + -12.028411865234377 + ], + [ + "▁Fisher", + -12.028634071350098 + ], + [ + "▁Biology", + -12.028883934020996 + ], + [ + "▁considers", + -12.028935432434082 + ], + [ + "▁disruption", + -12.029013633728027 + ], + [ + "▁Em", + -12.029070854187012 + ], + [ + "tax", + -12.02915859222412 + ], + [ + "▁belongings", + -12.029231071472168 + ], + [ + "▁successes", + -12.029614448547363 + ], + [ + "▁appeals", + -12.029662132263184 + ], + [ + "▁overtime", + -12.029862403869627 + ], + [ + "ele", + -12.02992057800293 + ], + [ + "▁Beat", + -12.030120849609377 + ], + [ + "lite", + -12.030195236206056 + ], + [ + "Show", + -12.030214309692385 + ], + [ + "▁bees", + -12.030404090881348 + ], + [ + "▁pleasing", + -12.030421257019045 + ], + [ + "designed", + -12.030477523803713 + ], + [ + "▁grandparents", + -12.030573844909668 + ], + [ + "▁1991", + -12.030600547790527 + ], + [ + "▁checklist", + -12.030680656433104 + ], + [ + "▁Surface", + -12.030716896057127 + ], + [ + "▁sophomore", + -12.030790328979492 + ], + [ + "▁rises", + -12.030841827392578 + ], + [ + "▁rainbow", + -12.030903816223145 + ], + [ + ".00", + -12.031044960021973 + ], + [ + "using", + -12.03105640411377 + ], + [ + "shot", + -12.031190872192385 + ], + [ + "▁Cemetery", + -12.031364440917969 + ], + [ + "▁identifies", + -12.031364440917969 + ], + [ + "▁attacking", + -12.031373023986816 + ], + [ + "▁Yeah", + -12.031386375427246 + ], + [ + "gel", + -12.031396865844728 + ], + [ + "▁grind", + -12.031548500061035 + ], + [ + "▁constitutional", + -12.031620025634766 + ], + [ + "▁advertisers", + -12.03214168548584 + ], + [ + "▁je", + -12.032341003417969 + ], + [ + "▁geographic", + -12.032384872436523 + ], + [ + "uma", + -12.032410621643066 + ], + [ + "▁dots", + -12.032464981079102 + ], + [ + "▁diagnose", + -12.032567977905272 + ], + [ + "▁bush", + -12.032585144042969 + ], + [ + "▁citrus", + -12.032829284667969 + ], + [ + "▁gems", + -12.032862663269045 + ], + [ + "nik", + -12.03287124633789 + ], + [ + "▁developmental", + -12.032934188842772 + ], + [ + "▁performer", + -12.032949447631836 + ], + [ + "▁£2", + -12.033116340637209 + ], + [ + "▁multimedia", + -12.033185005187988 + ], + [ + "Without", + -12.03333568572998 + ], + [ + "▁distress", + -12.03360652923584 + ], + [ + "▁Pradesh", + -12.03366756439209 + ], + [ + "▁Workers", + -12.03376007080078 + ], + [ + "▁balloon", + -12.034065246582031 + ], + [ + "▁priest", + -12.034205436706545 + ], + [ + "▁Sandy", + -12.03432846069336 + ], + [ + "▁Allow", + -12.034353256225586 + ], + [ + "▁Previous", + -12.034616470336914 + ], + [ + "▁Florence", + -12.034663200378418 + ], + [ + "▁absorption", + -12.034802436828612 + ], + [ + "▁18-", + -12.035006523132324 + ], + [ + "▁boiling", + -12.035094261169434 + ], + [ + "▁pathway", + -12.03511905670166 + ], + [ + "▁Gar", + -12.035148620605469 + ], + [ + "Bring", + -12.03529930114746 + ], + [ + "▁indie", + -12.035404205322266 + ], + [ + "▁Parking", + -12.035436630249023 + ], + [ + "fit", + -12.03582000732422 + ], + [ + "tar", + -12.035935401916504 + ], + [ + "▁sunglasses", + -12.035978317260742 + ], + [ + "▁Batman", + -12.03599452972412 + ], + [ + "growing", + -12.036120414733888 + ], + [ + "▁reject", + -12.03623867034912 + ], + [ + "▁cracked", + -12.036261558532717 + ], + [ + "▁throws", + -12.036665916442873 + ], + [ + "▁Wing", + -12.036724090576172 + ], + [ + "ew", + -12.036808967590332 + ], + [ + "▁underwater", + -12.036931991577148 + ], + [ + "▁distinguish", + -12.036972999572754 + ], + [ + "▁kinda", + -12.03704071044922 + ], + [ + "brook", + -12.037280082702637 + ], + [ + "▁shifted", + -12.037323951721191 + ], + [ + "▁Tip", + -12.03742218017578 + ], + [ + "▁89", + -12.037495613098145 + ], + [ + "▁lasted", + -12.037603378295898 + ], + [ + "Police", + -12.03774070739746 + ], + [ + "dan", + -12.038097381591797 + ], + [ + "▁fighter", + -12.038113594055176 + ], + [ + "▁restart", + -12.038331031799316 + ], + [ + "▁Shaw", + -12.03834342956543 + ], + [ + "▁CI", + -12.038496971130373 + ], + [ + "▁sweater", + -12.038729667663574 + ], + [ + "▁textile", + -12.03880214691162 + ], + [ + "▁Strategic", + -12.038931846618652 + ], + [ + "▁Baseball", + -12.03916072845459 + ], + [ + "▁ramp", + -12.039164543151855 + ], + [ + "▁rinse", + -12.03950023651123 + ], + [ + "=\"", + -12.03952407836914 + ], + [ + "▁3-4", + -12.039558410644531 + ], + [ + "38", + -12.039589881896973 + ], + [ + "▁Wang", + -12.039876937866213 + ], + [ + "▁Uncategorized", + -12.039972305297852 + ], + [ + "▁bull", + -12.039987564086914 + ], + [ + "▁SIM", + -12.040009498596191 + ], + [ + "▁décor", + -12.040026664733888 + ], + [ + "▁negotiation", + -12.040064811706545 + ], + [ + "▁Own", + -12.040385246276855 + ], + [ + "▁Lucy", + -12.04057788848877 + ], + [ + "▁Welsh", + -12.04068088531494 + ], + [ + "bury", + -12.04068660736084 + ], + [ + "▁hometown", + -12.04099178314209 + ], + [ + "Real", + -12.041126251220703 + ], + [ + "▁soldier", + -12.04121208190918 + ], + [ + "▁Rescue", + -12.041236877441406 + ], + [ + "▁neighbourhood", + -12.041253089904783 + ], + [ + "▁decay", + -12.041464805603027 + ], + [ + "▁Secure", + -12.041702270507812 + ], + [ + "▁transmitted", + -12.041780471801758 + ], + [ + "▁sanctuary", + -12.04178524017334 + ], + [ + "▁malicious", + -12.042110443115234 + ], + [ + "▁weed", + -12.042253494262695 + ], + [ + "▁inception", + -12.042527198791504 + ], + [ + "▁institute", + -12.04274559020996 + ], + [ + "▁myriad", + -12.04293155670166 + ], + [ + "Ed", + -12.04302215576172 + ], + [ + "▁clutch", + -12.043136596679688 + ], + [ + "help", + -12.043142318725586 + ], + [ + "▁puzzles", + -12.043231964111328 + ], + [ + "▁Amanda", + -12.043395042419434 + ], + [ + "die", + -12.043427467346191 + ], + [ + "▁NHL", + -12.043641090393066 + ], + [ + "▁utilizes", + -12.043651580810549 + ], + [ + "▁dare", + -12.044325828552246 + ], + [ + "▁peek", + -12.044501304626465 + ], + [ + "▁masses", + -12.044507026672363 + ], + [ + "▁Blake", + -12.0447359085083 + ], + [ + "▁charger", + -12.044888496398926 + ], + [ + "▁civilian", + -12.044918060302734 + ], + [ + "▁zinc", + -12.045015335083008 + ], + [ + "row", + -12.045082092285156 + ], + [ + "three", + -12.045402526855469 + ], + [ + "▁moms", + -12.04544734954834 + ], + [ + "▁Sustainable", + -12.04563045501709 + ], + [ + "mate", + -12.045696258544922 + ], + [ + "▁skies", + -12.045734405517578 + ], + [ + "▁inputs", + -12.045806884765623 + ], + [ + "Master", + -12.045933723449709 + ], + [ + "▁pertaining", + -12.04601764678955 + ], + [ + "▁RF", + -12.046080589294434 + ], + [ + "▁lightning", + -12.046189308166504 + ], + [ + "Per", + -12.046192169189451 + ], + [ + "Point", + -12.04625415802002 + ], + [ + "hood", + -12.046260833740234 + ], + [ + "▁tide", + -12.046438217163086 + ], + [ + "▁Rest", + -12.04647159576416 + ], + [ + "▁Entry", + -12.04653549194336 + ], + [ + "▁Hold", + -12.046597480773926 + ], + [ + "▁definitions", + -12.046749114990234 + ], + [ + "▁coral", + -12.046792984008787 + ], + [ + "NY", + -12.046940803527832 + ], + [ + "▁Province", + -12.046996116638184 + ], + [ + "▁almond", + -12.047161102294922 + ], + [ + "▁Gaming", + -12.047430038452148 + ], + [ + "▁strangers", + -12.047467231750488 + ], + [ + "▁08", + -12.047490119934082 + ], + [ + "left", + -12.047493934631348 + ], + [ + "▁routines", + -12.047762870788574 + ], + [ + "Children", + -12.047873497009276 + ], + [ + "pdf", + -12.047990798950195 + ], + [ + "▁lottery", + -12.048177719116213 + ], + [ + "▁surge", + -12.048295974731444 + ], + [ + "▁mats", + -12.04860782623291 + ], + [ + "ridge", + -12.048684120178224 + ], + [ + "▁CR", + -12.04891872406006 + ], + [ + "disciplinary", + -12.048925399780272 + ], + [ + "action", + -12.048944473266602 + ], + [ + "▁RC", + -12.048986434936523 + ], + [ + "▁contamination", + -12.049095153808594 + ], + [ + "▁4:", + -12.049232482910156 + ], + [ + "▁Maker", + -12.049365043640137 + ], + [ + "▁rotating", + -12.049372673034668 + ], + [ + "▁polls", + -12.049485206604004 + ], + [ + "too", + -12.049567222595217 + ], + [ + "▁creators", + -12.049673080444336 + ], + [ + "▁digestive", + -12.049957275390623 + ], + [ + "▁pathways", + -12.049999237060549 + ], + [ + "▁hello", + -12.050005912780762 + ], + [ + "▁(7", + -12.05014419555664 + ], + [ + "▁dragon", + -12.050164222717283 + ], + [ + "▁signage", + -12.050387382507324 + ], + [ + "▁footwear", + -12.050583839416504 + ], + [ + "▁cartridges", + -12.050642013549805 + ], + [ + "▁Cincinnati", + -12.050694465637209 + ], + [ + "▁Loans", + -12.05079174041748 + ], + [ + "▁examining", + -12.05146598815918 + ], + [ + "▁noble", + -12.051543235778809 + ], + [ + "▁neuro", + -12.05163288116455 + ], + [ + "ae", + -12.051891326904297 + ], + [ + "▁slopes", + -12.052048683166504 + ], + [ + "▁Elegant", + -12.052129745483398 + ], + [ + "leading", + -12.05219268798828 + ], + [ + "▁avid", + -12.052236557006836 + ], + [ + "sp", + -12.05226230621338 + ], + [ + "▁duct", + -12.053059577941896 + ], + [ + "▁obsessed", + -12.05322265625 + ], + [ + "▁Pete", + -12.053382873535156 + ], + [ + "▁prevalent", + -12.053470611572266 + ], + [ + "▁Sofa", + -12.053519248962402 + ], + [ + "▁interference", + -12.053550720214844 + ], + [ + "▁Portal", + -12.05360221862793 + ], + [ + "▁internally", + -12.053648948669434 + ], + [ + "▁Leeds", + -12.053716659545898 + ], + [ + "▁dependable", + -12.053717613220217 + ], + [ + "▁organizers", + -12.053979873657228 + ], + [ + "▁shampoo", + -12.054009437561035 + ], + [ + "▁Wal", + -12.054073333740234 + ], + [ + "▁periodically", + -12.054146766662598 + ], + [ + "Type", + -12.05423355102539 + ], + [ + "▁Theme", + -12.054241180419922 + ], + [ + "▁Regular", + -12.054335594177246 + ], + [ + "▁salads", + -12.054343223571776 + ], + [ + "▁tracked", + -12.054383277893066 + ], + [ + "hop", + -12.054597854614258 + ], + [ + "▁Leo", + -12.054694175720217 + ], + [ + "▁Scout", + -12.054777145385742 + ], + [ + "▁burger", + -12.05482292175293 + ], + [ + "▁reflective", + -12.054828643798828 + ], + [ + "++", + -12.054859161376951 + ], + [ + "IS", + -12.054911613464355 + ], + [ + "▁op", + -12.054932594299316 + ], + [ + "▁repairing", + -12.0550537109375 + ], + [ + "▁Posted", + -12.055139541625977 + ], + [ + "▁Clay", + -12.05515480041504 + ], + [ + "▁bridal", + -12.055218696594238 + ], + [ + "▁retrieve", + -12.055219650268556 + ], + [ + "▁3000", + -12.05557632446289 + ], + [ + "▁Response", + -12.055584907531738 + ], + [ + "▁provincial", + -12.055614471435549 + ], + [ + "▁caregivers", + -12.055663108825684 + ], + [ + "▁Bon", + -12.055699348449709 + ], + [ + "▁fulfilled", + -12.055708885192873 + ], + [ + "▁realizing", + -12.056011199951172 + ], + [ + "▁halfway", + -12.056058883666992 + ], + [ + "▁RBI", + -12.056156158447266 + ], + [ + "▁socially", + -12.056181907653809 + ], + [ + "▁sculptures", + -12.056194305419922 + ], + [ + "▁Television", + -12.056207656860352 + ], + [ + "▁conclusions", + -12.056219100952148 + ], + [ + "▁Income", + -12.056227684020996 + ], + [ + "▁ROI", + -12.056398391723633 + ], + [ + "▁yacht", + -12.056466102600098 + ], + [ + "▁Foster", + -12.056623458862305 + ], + [ + "▁interval", + -12.05688190460205 + ], + [ + "▁merchants", + -12.056885719299316 + ], + [ + "SC", + -12.057013511657717 + ], + [ + "▁Pain", + -12.05705738067627 + ], + [ + "ned", + -12.057123184204102 + ], + [ + "▁headboard", + -12.057130813598633 + ], + [ + "▁Past", + -12.057202339172363 + ], + [ + "aw", + -12.05726432800293 + ], + [ + "▁motors", + -12.05737590789795 + ], + [ + "99", + -12.057538986206056 + ], + [ + "lit", + -12.057539939880373 + ], + [ + "▁consultancy", + -12.057580947875977 + ], + [ + "▁Sol", + -12.057788848876951 + ], + [ + "▁troubles", + -12.057838439941406 + ], + [ + "▁euro", + -12.057843208312988 + ], + [ + "▁damn", + -12.057875633239746 + ], + [ + "▁travellers", + -12.057994842529297 + ], + [ + "▁remedies", + -12.058165550231934 + ], + [ + "▁spins", + -12.058207511901855 + ], + [ + "▁soundtrack", + -12.058353424072266 + ], + [ + "▁stamps", + -12.058481216430664 + ], + [ + "▁Peninsula", + -12.058599472045898 + ], + [ + "▁Sha", + -12.058601379394531 + ], + [ + "stream", + -12.058727264404297 + ], + [ + "▁BS", + -12.05876636505127 + ], + [ + "▁counted", + -12.059056282043455 + ], + [ + "▁announcements", + -12.05948257446289 + ], + [ + "▁unfair", + -12.05967903137207 + ], + [ + "▁fade", + -12.059806823730469 + ], + [ + "▁Honey", + -12.05981159210205 + ], + [ + "-11", + -12.05984115600586 + ], + [ + "Max", + -12.059906959533691 + ], + [ + "bee", + -12.060050964355469 + ], + [ + "▁fittings", + -12.060099601745604 + ], + [ + "▁1992", + -12.060134887695312 + ], + [ + "▁sticker", + -12.060528755187988 + ], + [ + "watch", + -12.060601234436035 + ], + [ + "▁6:30", + -12.060694694519045 + ], + [ + "big", + -12.06070327758789 + ], + [ + "wick", + -12.060827255249023 + ], + [ + "▁wow", + -12.060951232910156 + ], + [ + "rn", + -12.06114101409912 + ], + [ + "▁Machinery", + -12.061262130737305 + ], + [ + "▁advancing", + -12.06171417236328 + ], + [ + "▁Prepare", + -12.06192398071289 + ], + [ + "load", + -12.06214427947998 + ], + [ + "34", + -12.062255859375 + ], + [ + "▁Speaker", + -12.062265396118164 + ], + [ + "AA", + -12.062360763549805 + ], + [ + "take", + -12.06254768371582 + ], + [ + "eth", + -12.062721252441406 + ], + [ + "▁Phillips", + -12.062735557556152 + ], + [ + "▁antenna", + -12.062803268432615 + ], + [ + "▁hopeful", + -12.062865257263184 + ], + [ + "▁hurry", + -12.06290054321289 + ], + [ + "DE", + -12.062981605529783 + ], + [ + "LA", + -12.063024520874023 + ], + [ + "ions", + -12.063084602355955 + ], + [ + "▁rushed", + -12.063226699829102 + ], + [ + "▁wounds", + -12.063234329223633 + ], + [ + "▁Danny", + -12.06337070465088 + ], + [ + "▁bless", + -12.063406944274902 + ], + [ + "▁Physics", + -12.063448905944824 + ], + [ + "▁getaway", + -12.063591003417969 + ], + [ + "▁Buddhist", + -12.063721656799316 + ], + [ + "▁Dogs", + -12.063820838928224 + ], + [ + "▁Chemical", + -12.064056396484377 + ], + [ + "▁Probably", + -12.06406307220459 + ], + [ + "▁Fortune", + -12.064080238342283 + ], + [ + "▁Milwaukee", + -12.06410026550293 + ], + [ + "▁Vincent", + -12.064167022705078 + ], + [ + "▁Centers", + -12.064214706420898 + ], + [ + "▁herb", + -12.064265251159668 + ], + [ + "▁adjusting", + -12.064288139343262 + ], + [ + "▁7-", + -12.064322471618652 + ], + [ + "tis", + -12.0643892288208 + ], + [ + "▁adopting", + -12.064569473266602 + ], + [ + "▁unparalleled", + -12.064682006835938 + ], + [ + "its", + -12.064736366271973 + ], + [ + "▁toxins", + -12.064772605895996 + ], + [ + "▁Landscape", + -12.064783096313477 + ], + [ + "▁Ghost", + -12.0648193359375 + ], + [ + "▁harassment", + -12.064839363098145 + ], + [ + "self", + -12.06487274169922 + ], + [ + "▁faux", + -12.06496810913086 + ], + [ + "▁Fence", + -12.065275192260742 + ], + [ + "▁Sh", + -12.065275192260742 + ], + [ + "▁STEM", + -12.065577507019045 + ], + [ + "▁welcomes", + -12.065759658813477 + ], + [ + "▁chassis", + -12.065871238708496 + ], + [ + "▁compromised", + -12.06588363647461 + ], + [ + "▁Gene", + -12.06590461730957 + ], + [ + "▁curated", + -12.066014289855955 + ], + [ + "▁tribe", + -12.066018104553224 + ], + [ + "▁towers", + -12.066067695617676 + ], + [ + "▁examines", + -12.06609344482422 + ], + [ + "▁metric", + -12.066107749938965 + ], + [ + "▁wasting", + -12.06618881225586 + ], + [ + "▁repository", + -12.06623077392578 + ], + [ + "▁commerce", + -12.06631851196289 + ], + [ + "▁distances", + -12.066362380981444 + ], + [ + "▁congregation", + -12.06659984588623 + ], + [ + "▁Guild", + -12.06662368774414 + ], + [ + "▁curry", + -12.066709518432615 + ], + [ + "▁crews", + -12.066728591918944 + ], + [ + "▁lowering", + -12.066766738891602 + ], + [ + "▁wished", + -12.066819190979004 + ], + [ + "▁pedal", + -12.066929817199709 + ], + [ + "▁Hours", + -12.066957473754885 + ], + [ + "▁Adelaide", + -12.066974639892578 + ], + [ + "▁dignity", + -12.06706428527832 + ], + [ + "▁Understanding", + -12.067317962646484 + ], + [ + "lyn", + -12.067343711853027 + ], + [ + "▁Patients", + -12.067367553710938 + ], + [ + "▁Hillary", + -12.067476272583008 + ], + [ + "MT", + -12.067526817321776 + ], + [ + "EU", + -12.067791938781738 + ], + [ + "▁Milk", + -12.068060874938965 + ], + [ + "▁keynote", + -12.068135261535645 + ], + [ + "▁steadily", + -12.068255424499512 + ], + [ + "▁mortality", + -12.068265914916992 + ], + [ + "▁Removal", + -12.0682954788208 + ], + [ + "cia", + -12.068775177001951 + ], + [ + "▁brighter", + -12.068963050842283 + ], + [ + "stick", + -12.069050788879396 + ], + [ + "Prior", + -12.06906795501709 + ], + [ + "▁20,000", + -12.069098472595217 + ], + [ + "oo", + -12.069114685058594 + ], + [ + "▁Brick", + -12.069258689880373 + ], + [ + "▁Accounting", + -12.069308280944824 + ], + [ + "▁flex", + -12.069320678710938 + ], + [ + "▁cooker", + -12.069488525390623 + ], + [ + "▁Marathon", + -12.069499969482422 + ], + [ + "▁distributors", + -12.069513320922852 + ], + [ + "▁Jul", + -12.06972885131836 + ], + [ + "▁urine", + -12.069730758666992 + ], + [ + "▁Newton", + -12.069870948791504 + ], + [ + "▁accomplishments", + -12.069881439208984 + ], + [ + "▁additions", + -12.070239067077637 + ], + [ + "▁onsite", + -12.070481300354004 + ], + [ + "entrepreneurship", + -12.070616722106934 + ], + [ + "▁Claire", + -12.070640563964844 + ], + [ + "OP", + -12.070653915405272 + ], + [ + "▁Lot", + -12.07065486907959 + ], + [ + "Everything", + -12.070905685424805 + ], + [ + "▁mustard", + -12.07113265991211 + ], + [ + "▁Sacramento", + -12.071174621582031 + ], + [ + "▁GOP", + -12.071263313293455 + ], + [ + "▁petrol", + -12.07127571105957 + ], + [ + "▁Coming", + -12.071298599243164 + ], + [ + "▁chin", + -12.071415901184082 + ], + [ + "▁Julia", + -12.07142448425293 + ], + [ + "▁allocation", + -12.071690559387209 + ], + [ + "▁employing", + -12.071741104125977 + ], + [ + "path", + -12.07177448272705 + ], + [ + "▁1.1", + -12.071775436401367 + ], + [ + "▁mercy", + -12.0718412399292 + ], + [ + "▁Dale", + -12.071871757507324 + ], + [ + "▁Complex", + -12.0719633102417 + ], + [ + "▁march", + -12.072036743164062 + ], + [ + "▁membrane", + -12.072360038757324 + ], + [ + "▁forthcoming", + -12.072458267211914 + ], + [ + "▁reinforced", + -12.072731018066406 + ], + [ + "▁bent", + -12.072802543640137 + ], + [ + "▁geographical", + -12.072898864746094 + ], + [ + "▁Wa", + -12.072904586791992 + ], + [ + "▁citizenship", + -12.073076248168944 + ], + [ + "pot", + -12.073110580444336 + ], + [ + "Small", + -12.073431015014648 + ], + [ + "▁arguably", + -12.073772430419922 + ], + [ + "▁excluded", + -12.074074745178224 + ], + [ + "▁reasoning", + -12.074090003967283 + ], + [ + "▁stained", + -12.074104309082031 + ], + [ + "▁Rico", + -12.074111938476562 + ], + [ + "▁cider", + -12.074283599853516 + ], + [ + "sburg", + -12.074398040771484 + ], + [ + "▁pH", + -12.074508666992188 + ], + [ + "▁Networks", + -12.074556350708008 + ], + [ + "▁attracts", + -12.07474136352539 + ], + [ + "▁Keeping", + -12.074743270874023 + ], + [ + "▁Hindi", + -12.07494831085205 + ], + [ + "▁smiles", + -12.07497501373291 + ], + [ + "▁Wonder", + -12.075064659118652 + ], + [ + "ali", + -12.075078964233398 + ], + [ + "47", + -12.075258255004885 + ], + [ + "▁stretches", + -12.075409889221191 + ], + [ + "▁Examples", + -12.075458526611328 + ], + [ + "▁oversight", + -12.075803756713867 + ], + [ + "▁Fireplace", + -12.0761079788208 + ], + [ + "Has", + -12.076214790344238 + ], + [ + "lab", + -12.076271057128906 + ], + [ + "▁vpn", + -12.07639503479004 + ], + [ + "▁females", + -12.076401710510254 + ], + [ + "▁2003,", + -12.076512336730955 + ], + [ + "▁Imperial", + -12.07667636871338 + ], + [ + "▁rainy", + -12.076797485351562 + ], + [ + "▁Ag", + -12.077008247375488 + ], + [ + "human", + -12.07714557647705 + ], + [ + "▁NZ", + -12.077259063720703 + ], + [ + "▁taxpayers", + -12.077292442321776 + ], + [ + "▁weighing", + -12.077340126037598 + ], + [ + "▁Painting", + -12.077372550964355 + ], + [ + "▁blowing", + -12.077445030212402 + ], + [ + "▁hedge", + -12.07773780822754 + ], + [ + "▁maturity", + -12.077754974365234 + ], + [ + "▁prosperity", + -12.07785701751709 + ], + [ + "▁Cabinets", + -12.07791519165039 + ], + [ + "▁expressing", + -12.077959060668944 + ], + [ + "GO", + -12.078059196472168 + ], + [ + "▁diagrams", + -12.078083038330078 + ], + [ + "▁impressions", + -12.078116416931152 + ], + [ + "fest", + -12.078118324279783 + ], + [ + "▁preserving", + -12.07823371887207 + ], + [ + "▁tweets", + -12.07826042175293 + ], + [ + "▁parcel", + -12.07843017578125 + ], + [ + "▁Wellness", + -12.07847023010254 + ], + [ + "▁exhausted", + -12.07852268218994 + ], + [ + "▁Outside", + -12.078587532043455 + ], + [ + "▁RS", + -12.078740119934082 + ], + [ + "▁layouts", + -12.07878875732422 + ], + [ + "▁Monitor", + -12.078812599182127 + ], + [ + "▁Danish", + -12.078855514526367 + ], + [ + "▁exhibited", + -12.078863143920898 + ], + [ + "▁Recruitment", + -12.078949928283691 + ], + [ + "▁tasted", + -12.078987121582031 + ], + [ + "▁te", + -12.07906436920166 + ], + [ + "▁Colombia", + -12.079115867614746 + ], + [ + "▁Arctic", + -12.079120635986328 + ], + [ + "▁Banking", + -12.07922077178955 + ], + [ + "▁layered", + -12.07928466796875 + ], + [ + "▁Bor", + -12.0792875289917 + ], + [ + "▁Gr", + -12.079326629638672 + ], + [ + "▁Sounds", + -12.079551696777344 + ], + [ + "▁brushes", + -12.079609870910645 + ], + [ + "900", + -12.079617500305176 + ], + [ + "SS", + -12.079669952392578 + ], + [ + "tr", + -12.079716682434082 + ], + [ + "Question", + -12.079719543457031 + ], + [ + "▁spends", + -12.079751014709473 + ], + [ + "▁surviving", + -12.080255508422852 + ], + [ + "NOTE", + -12.08033561706543 + ], + [ + "▁Seat", + -12.080357551574709 + ], + [ + "Design", + -12.080501556396484 + ], + [ + "▁nasty", + -12.080866813659668 + ], + [ + "▁therapists", + -12.080891609191896 + ], + [ + "▁sounded", + -12.08100414276123 + ], + [ + "▁variants", + -12.081138610839844 + ], + [ + "▁Plate", + -12.081274032592772 + ], + [ + "▁incurred", + -12.081374168395996 + ], + [ + "▁Applied", + -12.081418991088867 + ], + [ + "▁smarter", + -12.08145523071289 + ], + [ + "▁correspondence", + -12.081470489501951 + ], + [ + "▁Ticket", + -12.08155918121338 + ], + [ + "▁Highly", + -12.081646919250488 + ], + [ + "▁2020.", + -12.081656455993652 + ], + [ + "▁scoop", + -12.081759452819824 + ], + [ + "▁Pad", + -12.082104682922363 + ], + [ + "▁Sci", + -12.082113265991213 + ], + [ + "▁Boss", + -12.08227252960205 + ], + [ + "▁Pressure", + -12.08257293701172 + ], + [ + "▁Nathan", + -12.082630157470703 + ], + [ + "Given", + -12.08265209197998 + ], + [ + "▁panoramic", + -12.08267307281494 + ], + [ + "ming", + -12.082730293273926 + ], + [ + "▁filming", + -12.08282470703125 + ], + [ + "▁quantum", + -12.082923889160156 + ], + [ + "▁93", + -12.082942008972168 + ], + [ + "▁Ber", + -12.08298397064209 + ], + [ + "▁nano", + -12.082999229431152 + ], + [ + "▁caramel", + -12.083184242248535 + ], + [ + "Food", + -12.083240509033203 + ], + [ + "▁denim", + -12.083330154418944 + ], + [ + "▁Rebecca", + -12.083358764648438 + ], + [ + "▁Autumn", + -12.083389282226562 + ], + [ + "▁128", + -12.083418846130373 + ], + [ + "▁NH", + -12.083491325378418 + ], + [ + "▁cooperative", + -12.083683967590332 + ], + [ + "▁Treasury", + -12.083765983581545 + ], + [ + "▁Catherine", + -12.083831787109377 + ], + [ + "Light", + -12.084002494812012 + ], + [ + "▁Trek", + -12.08477020263672 + ], + [ + "Ra", + -12.084774017333984 + ], + [ + "HR", + -12.084961891174316 + ], + [ + "▁choir", + -12.085089683532717 + ], + [ + "▁fond", + -12.08533763885498 + ], + [ + "Service", + -12.08534812927246 + ], + [ + "▁Install", + -12.085433959960938 + ], + [ + "▁Device", + -12.08547019958496 + ], + [ + "▁Mag", + -12.085787773132324 + ], + [ + "▁Operating", + -12.085810661315918 + ], + [ + "▁Cinema", + -12.086050987243652 + ], + [ + "▁crashes", + -12.086175918579102 + ], + [ + "▁forgive", + -12.086198806762695 + ], + [ + "▁Soccer", + -12.08620548248291 + ], + [ + "▁movers", + -12.086277961730955 + ], + [ + "International", + -12.086338996887209 + ], + [ + "▁Metropolitan", + -12.086527824401855 + ], + [ + "▁$9", + -12.086565971374512 + ], + [ + "▁inbox", + -12.086824417114258 + ], + [ + "▁Spider", + -12.08701229095459 + ], + [ + "ks", + -12.087043762207031 + ], + [ + "▁deceased", + -12.087071418762209 + ], + [ + "▁Introduction", + -12.087198257446287 + ], + [ + "▁1.3", + -12.087390899658203 + ], + [ + "▁pests", + -12.087462425231934 + ], + [ + "▁Monster", + -12.087531089782717 + ], + [ + "▁clause", + -12.087844848632812 + ], + [ + "▁Related", + -12.088027000427246 + ], + [ + "▁Lebanon", + -12.088075637817385 + ], + [ + "▁Bryan", + -12.088133811950684 + ], + [ + "▁Ruby", + -12.088187217712402 + ], + [ + "▁awe", + -12.088290214538574 + ], + [ + "▁demonstrations", + -12.088369369506836 + ], + [ + "DO", + -12.08851718902588 + ], + [ + "▁Driving", + -12.08853244781494 + ], + [ + "▁microphone", + -12.088567733764648 + ], + [ + "▁Eat", + -12.088640213012695 + ], + [ + "▁Hannah", + -12.08891773223877 + ], + [ + "una", + -12.088979721069336 + ], + [ + "▁rep", + -12.089018821716309 + ], + [ + "▁Chemistry", + -12.089125633239746 + ], + [ + "▁geo", + -12.089198112487791 + ], + [ + "ending", + -12.089287757873535 + ], + [ + "▁$1,000", + -12.089591979980469 + ], + [ + "▁Iceland", + -12.089597702026367 + ], + [ + "▁mindful", + -12.089627265930176 + ], + [ + "▁stamped", + -12.089643478393556 + ], + [ + "paced", + -12.08966064453125 + ], + [ + "Whatever", + -12.08973503112793 + ], + [ + "▁synthesis", + -12.089778900146484 + ], + [ + "▁pioneer", + -12.089942932128906 + ], + [ + "▁valves", + -12.08994483947754 + ], + [ + "profile", + -12.089961051940918 + ], + [ + "▁liking", + -12.08997631072998 + ], + [ + "▁noodles", + -12.08998203277588 + ], + [ + "▁autonomous", + -12.09005355834961 + ], + [ + "▁Activity", + -12.090413093566896 + ], + [ + "▁83", + -12.090460777282717 + ], + [ + "▁folded", + -12.090588569641112 + ], + [ + "▁configurations", + -12.09067440032959 + ], + [ + "▁authenticity", + -12.09070110321045 + ], + [ + "▁Cast", + -12.090944290161133 + ], + [ + "▁Ride", + -12.091025352478027 + ], + [ + "▁Ty", + -12.091132164001465 + ], + [ + "▁nickel", + -12.091261863708496 + ], + [ + "▁Ci", + -12.091307640075684 + ], + [ + "zone", + -12.091326713562012 + ], + [ + "▁heaters", + -12.09133243560791 + ], + [ + "▁Fuel", + -12.091334342956545 + ], + [ + "▁alliance", + -12.091480255126951 + ], + [ + "▁counterparts", + -12.091645240783691 + ], + [ + "▁Method", + -12.091766357421877 + ], + [ + "▁fa", + -12.091824531555176 + ], + [ + "▁Diet", + -12.091853141784668 + ], + [ + "mes", + -12.09213161468506 + ], + [ + "▁Wiring", + -12.092337608337402 + ], + [ + "▁Eventually", + -12.092397689819336 + ], + [ + "▁Mint", + -12.092470169067385 + ], + [ + "▁sequel", + -12.092479705810549 + ], + [ + "sell", + -12.092482566833496 + ], + [ + "▁arc", + -12.092533111572266 + ], + [ + "▁Independence", + -12.092726707458496 + ], + [ + "▁fry", + -12.092855453491213 + ], + [ + "▁Capitol", + -12.09318733215332 + ], + [ + "▁Minneapolis", + -12.093289375305176 + ], + [ + "▁NW", + -12.093318939208984 + ], + [ + "DR", + -12.093334197998049 + ], + [ + "pack", + -12.093482971191406 + ], + [ + "efficient", + -12.093530654907228 + ], + [ + "ION", + -12.09353733062744 + ], + [ + "▁questioned", + -12.09372615814209 + ], + [ + "▁DB", + -12.093816757202148 + ], + [ + "▁asbestos", + -12.094236373901367 + ], + [ + "▁termination", + -12.09425926208496 + ], + [ + "▁verbal", + -12.09429168701172 + ], + [ + "esh", + -12.09434413909912 + ], + [ + "▁enhancement", + -12.09453296661377 + ], + [ + "▁intervals", + -12.094611167907717 + ], + [ + "▁glitter", + -12.094622611999512 + ], + [ + "▁merit", + -12.094650268554688 + ], + [ + "▁removes", + -12.094820976257324 + ], + [ + "▁Brother", + -12.094868659973145 + ], + [ + "▁Volkswagen", + -12.095171928405762 + ], + [ + "▁Vinyl", + -12.095196723937988 + ], + [ + "Rated", + -12.095503807067873 + ], + [ + "▁rejection", + -12.09553337097168 + ], + [ + "▁Purple", + -12.09559440612793 + ], + [ + "▁spy", + -12.095629692077637 + ], + [ + "▁divisions", + -12.09570026397705 + ], + [ + "Excellent", + -12.09571647644043 + ], + [ + "▁restrict", + -12.095807075500488 + ], + [ + "▁exceeded", + -12.095843315124512 + ], + [ + "▁Menu", + -12.095865249633787 + ], + [ + "▁Tours", + -12.09597110748291 + ], + [ + "▁knocked", + -12.096023559570312 + ], + [ + "▁hugely", + -12.09613800048828 + ], + [ + "▁plaque", + -12.096273422241213 + ], + [ + "▁Contractors", + -12.09644603729248 + ], + [ + "▁97", + -12.096575736999512 + ], + [ + "▁rebounds", + -12.096643447875977 + ], + [ + "▁zoo", + -12.09669017791748 + ], + [ + "▁inability", + -12.09708023071289 + ], + [ + "▁installment", + -12.097105979919434 + ], + [ + "▁Hungary", + -12.097148895263672 + ], + [ + "second", + -12.097210884094238 + ], + [ + "▁embroidery", + -12.097331047058104 + ], + [ + "▁VI", + -12.097349166870115 + ], + [ + "▁problematic", + -12.097397804260254 + ], + [ + "▁welding", + -12.097464561462402 + ], + [ + "▁rational", + -12.097526550292969 + ], + [ + "▁Acid", + -12.097558975219728 + ], + [ + "▁Bake", + -12.097588539123535 + ], + [ + "▁emphasize", + -12.097768783569336 + ], + [ + "▁SL", + -12.097796440124512 + ], + [ + "▁rewarded", + -12.097869873046877 + ], + [ + "▁ambition", + -12.098037719726562 + ], + [ + "AT", + -12.098050117492676 + ], + [ + "▁empowering", + -12.09807014465332 + ], + [ + "News", + -12.09808349609375 + ], + [ + "▁Res", + -12.098310470581056 + ], + [ + "think", + -12.098331451416016 + ], + [ + "aj", + -12.09842300415039 + ], + [ + "▁awhile", + -12.098536491394045 + ], + [ + "▁Hood", + -12.098546981811523 + ], + [ + "▁pretend", + -12.098625183105469 + ], + [ + "ero", + -12.098702430725098 + ], + [ + "tree", + -12.098847389221191 + ], + [ + "▁Qatar", + -12.098983764648438 + ], + [ + "▁KS", + -12.099663734436035 + ], + [ + "▁blessings", + -12.099729537963867 + ], + [ + "▁Warriors", + -12.09981632232666 + ], + [ + "bb", + -12.100004196166992 + ], + [ + "-18", + -12.10020923614502 + ], + [ + "des", + -12.100347518920898 + ], + [ + "▁Quebec", + -12.10037326812744 + ], + [ + "▁Safari", + -12.100393295288086 + ], + [ + "▁delegates", + -12.100506782531738 + ], + [ + "pers", + -12.100591659545898 + ], + [ + "▁Traffic", + -12.100645065307615 + ], + [ + "▁Weekly", + -12.10067081451416 + ], + [ + "▁livestock", + -12.10067081451416 + ], + [ + "▁fighters", + -12.100738525390623 + ], + [ + "▁Decorating", + -12.100752830505373 + ], + [ + "▁Led", + -12.100831031799316 + ], + [ + "Recently", + -12.100865364074709 + ], + [ + "▁copied", + -12.100996971130373 + ], + [ + "▁Automotive", + -12.101043701171877 + ], + [ + "▁Moses", + -12.101079940795898 + ], + [ + "▁advertised", + -12.101154327392578 + ], + [ + "▁fights", + -12.101177215576172 + ], + [ + "▁magnet", + -12.101276397705078 + ], + [ + "▁Lions", + -12.101287841796877 + ], + [ + "▁housed", + -12.101336479187012 + ], + [ + "46", + -12.1013765335083 + ], + [ + "▁Manufacturer", + -12.101651191711426 + ], + [ + "▁Hurricane", + -12.101694107055664 + ], + [ + "▁Rooms", + -12.101733207702637 + ], + [ + "▁Sue", + -12.101826667785645 + ], + [ + "▁Bangalore", + -12.101861000061035 + ], + [ + "▁Crime", + -12.102554321289062 + ], + [ + "TA", + -12.10263442993164 + ], + [ + "▁merger", + -12.102653503417969 + ], + [ + "jpg", + -12.102702140808104 + ], + [ + "▁Individual", + -12.102725982666016 + ], + [ + "▁demanded", + -12.102806091308594 + ], + [ + "▁Superior", + -12.102826118469238 + ], + [ + "▁premise", + -12.103139877319336 + ], + [ + "Fire", + -12.103215217590332 + ], + [ + "▁souls", + -12.10329818725586 + ], + [ + "ants", + -12.103384971618652 + ], + [ + "▁Wells", + -12.103615760803224 + ], + [ + "▁TD", + -12.103754997253418 + ], + [ + "Mac", + -12.103806495666504 + ], + [ + "▁Aviation", + -12.103846549987791 + ], + [ + "▁compose", + -12.103873252868652 + ], + [ + "▁palate", + -12.104131698608398 + ], + [ + "▁Doing", + -12.104134559631348 + ], + [ + "▁playoff", + -12.10418701171875 + ], + [ + "▁paddle", + -12.104365348815918 + ], + [ + "▁grounded", + -12.104400634765623 + ], + [ + "▁nutrient", + -12.1044340133667 + ], + [ + "Earlier", + -12.104549407958984 + ], + [ + "▁slope", + -12.104558944702148 + ], + [ + "▁disasters", + -12.104565620422363 + ], + [ + "▁Participants", + -12.104612350463867 + ], + [ + "nu", + -12.104646682739258 + ], + [ + "▁pinterest", + -12.104706764221191 + ], + [ + "▁Bangkok", + -12.1051664352417 + ], + [ + "hen", + -12.105463027954102 + ], + [ + "67", + -12.105649948120115 + ], + [ + "MO", + -12.105751037597656 + ], + [ + "▁Score", + -12.105770111083984 + ], + [ + "▁Pra", + -12.105823516845703 + ], + [ + "▁partnering", + -12.105849266052246 + ], + [ + "Video", + -12.105865478515623 + ], + [ + "▁Kan", + -12.105928421020508 + ], + [ + "▁negotiating", + -12.106172561645508 + ], + [ + "▁crush", + -12.106283187866213 + ], + [ + "▁questioning", + -12.106306076049805 + ], + [ + "force", + -12.106521606445312 + ], + [ + "▁risky", + -12.10690975189209 + ], + [ + "▁exercising", + -12.107176780700684 + ], + [ + "Address", + -12.107287406921388 + ], + [ + "▁imports", + -12.107495307922363 + ], + [ + "-21", + -12.107603073120115 + ], + [ + "enberg", + -12.107641220092772 + ], + [ + "▁Rangers", + -12.107935905456545 + ], + [ + "▁Contest", + -12.108101844787598 + ], + [ + "▁Geneva", + -12.10832977294922 + ], + [ + "▁Gates", + -12.108490943908691 + ], + [ + "▁Maple", + -12.108712196350098 + ], + [ + "▁pilots", + -12.108722686767578 + ], + [ + "PL", + -12.10874366760254 + ], + [ + "▁poured", + -12.10881519317627 + ], + [ + "▁credited", + -12.10909366607666 + ], + [ + "rk", + -12.109106063842772 + ], + [ + "▁miracle", + -12.109132766723633 + ], + [ + "▁Victor", + -12.109160423278809 + ], + [ + "▁accumulated", + -12.109190940856934 + ], + [ + "▁bucks", + -12.109272956848145 + ], + [ + "▁Trip", + -12.10927677154541 + ], + [ + "▁altitude", + -12.10929012298584 + ], + [ + "▁rap", + -12.109298706054688 + ], + [ + "▁factories", + -12.109305381774902 + ], + [ + "▁Exhibition", + -12.109334945678713 + ], + [ + "▁fibers", + -12.10942840576172 + ], + [ + "Easy", + -12.109638214111328 + ], + [ + "▁advisable", + -12.109660148620604 + ], + [ + "▁salvation", + -12.109660148620604 + ], + [ + "▁sterling", + -12.109662055969238 + ], + [ + "▁Wow", + -12.109713554382324 + ], + [ + "▁Fans", + -12.109797477722168 + ], + [ + "▁greeted", + -12.10999584197998 + ], + [ + "▁CON", + -12.110007286071776 + ], + [ + "▁Lucas", + -12.110106468200684 + ], + [ + "▁limestone", + -12.110177993774414 + ], + [ + "zy", + -12.110334396362305 + ], + [ + "▁regulators", + -12.110390663146973 + ], + [ + "▁undergoing", + -12.110698699951172 + ], + [ + "▁lo", + -12.110782623291016 + ], + [ + "▁Engineers", + -12.110838890075684 + ], + [ + "▁adventurous", + -12.110904693603516 + ], + [ + "▁Leaders", + -12.111001014709473 + ], + [ + "▁Shadow", + -12.111113548278809 + ], + [ + "▁neighboring", + -12.111113548278809 + ], + [ + "▁sausage", + -12.111186981201172 + ], + [ + "▁desperately", + -12.111244201660156 + ], + [ + "gr", + -12.11146640777588 + ], + [ + "▁hormones", + -12.111610412597656 + ], + [ + "▁directing", + -12.111745834350586 + ], + [ + "▁seated", + -12.111868858337402 + ], + [ + "▁trailers", + -12.111883163452148 + ], + [ + "CT", + -12.111939430236816 + ], + [ + "▁upward", + -12.112122535705566 + ], + [ + "▁crispy", + -12.112133026123049 + ], + [ + "▁EUR", + -12.112494468688965 + ], + [ + "▁convincing", + -12.112530708312988 + ], + [ + "zen", + -12.112590789794922 + ], + [ + "▁Jar", + -12.11263942718506 + ], + [ + "▁notices", + -12.112646102905272 + ], + [ + "▁Above", + -12.112662315368652 + ], + [ + "▁sliced", + -12.112727165222168 + ], + [ + "▁encounters", + -12.112738609313965 + ], + [ + "▁anime", + -12.112784385681152 + ], + [ + "н", + -12.112800598144531 + ], + [ + "▁sweeping", + -12.113287925720217 + ], + [ + "mouth", + -12.113325119018556 + ], + [ + "▁paints", + -12.113593101501465 + ], + [ + "▁playful", + -12.113607406616213 + ], + [ + "▁comprehend", + -12.113862037658691 + ], + [ + "▁dye", + -12.11429500579834 + ], + [ + "▁ecological", + -12.114339828491213 + ], + [ + "▁Ink", + -12.11440086364746 + ], + [ + "▁Assistance", + -12.114419937133787 + ], + [ + "▁brightness", + -12.114492416381836 + ], + [ + "▁adviser", + -12.11455535888672 + ], + [ + "▁nurture", + -12.114673614501951 + ], + [ + "▁collectively", + -12.11469554901123 + ], + [ + "▁Broad", + -12.114818572998049 + ], + [ + "▁colonial", + -12.114984512329102 + ], + [ + "▁Filter", + -12.115066528320312 + ], + [ + "▁1989", + -12.11512851715088 + ], + [ + "▁jackets", + -12.11518383026123 + ], + [ + "▁Butter", + -12.115310668945312 + ], + [ + "bs", + -12.115670204162598 + ], + [ + "▁geared", + -12.115670204162598 + ], + [ + "Talk", + -12.11573314666748 + ], + [ + "▁commodity", + -12.115808486938477 + ], + [ + "▁interim", + -12.115949630737305 + ], + [ + "▁transferring", + -12.116028785705566 + ], + [ + "▁allergy", + -12.116232872009276 + ], + [ + "▁decks", + -12.116238594055176 + ], + [ + "▁Operation", + -12.116250991821287 + ], + [ + "▁crap", + -12.116292953491213 + ], + [ + "▁Democrat", + -12.116421699523926 + ], + [ + "▁Harbour", + -12.11652660369873 + ], + [ + "▁Lang", + -12.11652660369873 + ], + [ + "▁Wait", + -12.116562843322754 + ], + [ + "▁succeeded", + -12.11661434173584 + ], + [ + "▁Nicholas", + -12.116669654846191 + ], + [ + "▁brunch", + -12.1170072555542 + ], + [ + "▁declaration", + -12.117444038391112 + ], + [ + "▁deliveries", + -12.117561340332031 + ], + [ + "▁Instant", + -12.117565155029297 + ], + [ + "],", + -12.117587089538574 + ], + [ + "▁OUT", + -12.117645263671877 + ], + [ + "▁Athletic", + -12.1177396774292 + ], + [ + "▁manuals", + -12.117767333984377 + ], + [ + "▁Harper", + -12.117841720581056 + ], + [ + "hot", + -12.117850303649902 + ], + [ + "▁constitution", + -12.11790370941162 + ], + [ + "GA", + -12.117950439453123 + ], + [ + "▁Ambassador", + -12.118002891540527 + ], + [ + "▁peaks", + -12.118020057678224 + ], + [ + "66", + -12.11806297302246 + ], + [ + "▁recurring", + -12.118078231811523 + ], + [ + "▁contributor", + -12.11818790435791 + ], + [ + "ments", + -12.118229866027832 + ], + [ + "▁**", + -12.118243217468262 + ], + [ + "▁Chairs", + -12.118247032165527 + ], + [ + "▁census", + -12.118402481079102 + ], + [ + "▁plasma", + -12.11854648590088 + ], + [ + "Apple", + -12.118621826171877 + ], + [ + "▁Fruit", + -12.118972778320312 + ], + [ + "PT", + -12.11899185180664 + ], + [ + "▁entrepreneurial", + -12.119049072265623 + ], + [ + "▁installer", + -12.119174003601074 + ], + [ + "44", + -12.11933135986328 + ], + [ + "pl", + -12.119400024414062 + ], + [ + "centric", + -12.119640350341797 + ], + [ + "CE", + -12.119672775268556 + ], + [ + "▁infinite", + -12.119731903076172 + ], + [ + "Discover", + -12.119998931884766 + ], + [ + "▁bouquet", + -12.120044708251951 + ], + [ + "▁dam", + -12.120135307312012 + ], + [ + "▁temples", + -12.12015438079834 + ], + [ + "▁pa", + -12.12031078338623 + ], + [ + "▁Tag", + -12.12035846710205 + ], + [ + "▁converter", + -12.120366096496582 + ], + [ + "▁Provider", + -12.12084674835205 + ], + [ + "▁gutter", + -12.120857238769531 + ], + [ + "▁orchestra", + -12.12122917175293 + ], + [ + "▁Bailey", + -12.121582984924316 + ], + [ + "▁Kon", + -12.121853828430176 + ], + [ + "▁Heather", + -12.12208080291748 + ], + [ + "II", + -12.122169494628906 + ], + [ + "▁compilation", + -12.122172355651855 + ], + [ + "▁severity", + -12.12217903137207 + ], + [ + "wire", + -12.122288703918455 + ], + [ + "▁1080", + -12.12247371673584 + ], + [ + "Sun", + -12.12250804901123 + ], + [ + "▁gallons", + -12.122546195983888 + ], + [ + "▁tract", + -12.122601509094238 + ], + [ + "▁jungle", + -12.12260627746582 + ], + [ + "▁а", + -12.1226224899292 + ], + [ + "▁Apparently", + -12.122795104980469 + ], + [ + "▁Massage", + -12.122800827026367 + ], + [ + "▁unauthorized", + -12.122812271118164 + ], + [ + "▁Ship", + -12.122845649719238 + ], + [ + "▁Banks", + -12.12299919128418 + ], + [ + "▁elevator", + -12.12314796447754 + ], + [ + "▁torn", + -12.123196601867676 + ], + [ + "▁feminine", + -12.123213768005373 + ], + [ + "aging", + -12.123300552368164 + ], + [ + "▁males", + -12.123456001281738 + ], + [ + "wi", + -12.123696327209473 + ], + [ + "▁1900", + -12.123726844787598 + ], + [ + "▁Pe", + -12.123734474182127 + ], + [ + "▁ge", + -12.123976707458496 + ], + [ + "▁itinerary", + -12.124062538146973 + ], + [ + "▁comprising", + -12.124064445495604 + ], + [ + "▁alleviate", + -12.12409210205078 + ], + [ + "▁75%", + -12.12416172027588 + ], + [ + "▁Isle", + -12.124261856079102 + ], + [ + "Information", + -12.124472618103027 + ], + [ + "▁reps", + -12.12449550628662 + ], + [ + "▁Silicon", + -12.124688148498535 + ], + [ + "▁openly", + -12.124735832214355 + ], + [ + "▁3/4", + -12.12503433227539 + ], + [ + "▁Barack", + -12.125161170959473 + ], + [ + "▁goodbye", + -12.125267028808594 + ], + [ + "▁emergencies", + -12.125327110290527 + ], + [ + "▁Reader", + -12.125358581542969 + ], + [ + "2017", + -12.125458717346191 + ], + [ + "▁shining", + -12.12553882598877 + ], + [ + "Li", + -12.125566482543944 + ], + [ + "▁prisoners", + -12.12576389312744 + ], + [ + "▁em", + -12.125884056091309 + ], + [ + "▁Literature", + -12.126052856445312 + ], + [ + "▁Affordable", + -12.126121520996094 + ], + [ + "▁clues", + -12.126191139221191 + ], + [ + "▁bolts", + -12.126245498657228 + ], + [ + "▁lifts", + -12.126346588134766 + ], + [ + "▁rim", + -12.12644863128662 + ], + [ + "▁expansive", + -12.126588821411133 + ], + [ + "▁Publisher", + -12.126596450805664 + ], + [ + "▁PowerPoint", + -12.126652717590332 + ], + [ + "▁Document", + -12.126718521118164 + ], + [ + "▁Tank", + -12.126761436462402 + ], + [ + "▁insurers", + -12.126790046691896 + ], + [ + "▁Reed", + -12.126898765563965 + ], + [ + "▁Manufacturers", + -12.126914978027344 + ], + [ + "taking", + -12.126983642578123 + ], + [ + "▁DA", + -12.127012252807615 + ], + [ + "▁queue", + -12.12737274169922 + ], + [ + "▁showcases", + -12.127527236938477 + ], + [ + "▁170", + -12.12753963470459 + ], + [ + "▁86", + -12.127571105957031 + ], + [ + "▁SM", + -12.127739906311035 + ], + [ + "▁indirect", + -12.127745628356934 + ], + [ + "▁proposition", + -12.127764701843262 + ], + [ + "▁outlines", + -12.127781867980955 + ], + [ + "▁fragile", + -12.127854347229004 + ], + [ + "▁veterinary", + -12.127854347229004 + ], + [ + "!),", + -12.12796115875244 + ], + [ + "▁nap", + -12.128063201904297 + ], + [ + "▁su", + -12.128203392028809 + ], + [ + "▁Session", + -12.128512382507324 + ], + [ + "En", + -12.1286039352417 + ], + [ + "▁Auckland", + -12.128744125366213 + ], + [ + "▁jointly", + -12.12877368927002 + ], + [ + "Therefore", + -12.128993034362791 + ], + [ + "eh", + -12.129321098327637 + ], + [ + "▁81", + -12.129551887512209 + ], + [ + "▁Midwest", + -12.129586219787598 + ], + [ + "▁Criminal", + -12.129655838012695 + ], + [ + "▁vegetation", + -12.129755020141602 + ], + [ + "▁Kay", + -12.129865646362305 + ], + [ + "▁Accessories", + -12.129981994628906 + ], + [ + "▁Hebrew", + -12.13007640838623 + ], + [ + "learning", + -12.130082130432127 + ], + [ + "▁textured", + -12.130256652832031 + ], + [ + "duty", + -12.130271911621094 + ], + [ + "▁nightmare", + -12.130290031433104 + ], + [ + "content", + -12.13029956817627 + ], + [ + "▁accreditation", + -12.130326271057127 + ], + [ + "▁explosive", + -12.130359649658203 + ], + [ + "SP", + -12.130378723144531 + ], + [ + "▁lettuce", + -12.130390167236328 + ], + [ + "▁diets", + -12.13054084777832 + ], + [ + "▁predictable", + -12.13054656982422 + ], + [ + "sta", + -12.130759239196776 + ], + [ + "Making", + -12.130949020385742 + ], + [ + "his", + -12.131460189819336 + ], + [ + "natural", + -12.131542205810549 + ], + [ + "James", + -12.13160228729248 + ], + [ + "▁gamers", + -12.131797790527344 + ], + [ + "▁eleven", + -12.131906509399414 + ], + [ + "▁scanner", + -12.131935119628906 + ], + [ + "▁Plumbing", + -12.131965637207031 + ], + [ + "▁squares", + -12.132275581359863 + ], + [ + "▁Medal", + -12.132322311401367 + ], + [ + "elli", + -12.132633209228516 + ], + [ + "suite", + -12.132802963256836 + ], + [ + "▁pictured", + -12.132802963256836 + ], + [ + "rat", + -12.132885932922363 + ], + [ + "▁ko", + -12.133182525634766 + ], + [ + "▁240", + -12.13323974609375 + ], + [ + "▁Indianapolis", + -12.133243560791016 + ], + [ + "▁Sal", + -12.133370399475098 + ], + [ + "▁FROM", + -12.13353443145752 + ], + [ + "▁Testament", + -12.133606910705566 + ], + [ + "▁shutdown", + -12.133666038513184 + ], + [ + "Head", + -12.133817672729492 + ], + [ + "▁Salon", + -12.133866310119627 + ], + [ + "▁blankets", + -12.134057998657228 + ], + [ + "▁Porsche", + -12.134071350097656 + ], + [ + "Ok", + -12.134113311767578 + ], + [ + "▁Monte", + -12.134191513061523 + ], + [ + "▁Sr", + -12.134202003479004 + ], + [ + "▁sprinkle", + -12.134220123291016 + ], + [ + "▁Saints", + -12.134228706359863 + ], + [ + "56", + -12.134295463562012 + ], + [ + "▁Ju", + -12.134334564208984 + ], + [ + "▁CM", + -12.13440990447998 + ], + [ + "▁79", + -12.134503364562988 + ], + [ + "▁Hockey", + -12.134560585021973 + ], + [ + "▁velvet", + -12.134580612182615 + ], + [ + "▁quartz", + -12.134634017944336 + ], + [ + "▁Break", + -12.134773254394531 + ], + [ + "exe", + -12.134889602661133 + ], + [ + "▁meats", + -12.134934425354004 + ], + [ + "▁Domain", + -12.134970664978027 + ], + [ + "▁founders", + -12.135130882263184 + ], + [ + "▁kicking", + -12.135247230529783 + ], + [ + "▁Knowing", + -12.135647773742676 + ], + [ + "▁voter", + -12.135741233825684 + ], + [ + "writing", + -12.13588809967041 + ], + [ + "fast", + -12.136011123657228 + ], + [ + "books", + -12.1361665725708 + ], + [ + "▁butterfly", + -12.13619327545166 + ], + [ + "▁masks", + -12.136242866516112 + ], + [ + "▁Hilton", + -12.13624668121338 + ], + [ + "▁kilometres", + -12.13630485534668 + ], + [ + "EN", + -12.136425971984863 + ], + [ + "▁recognizing", + -12.136756896972656 + ], + [ + "▁Raw", + -12.13681697845459 + ], + [ + "▁Lynn", + -12.13720417022705 + ], + [ + "▁upright", + -12.137330055236816 + ], + [ + "TR", + -12.137343406677246 + ], + [ + "Class", + -12.137392044067385 + ], + [ + "bedroom", + -12.13744831085205 + ], + [ + "▁Charter", + -12.137511253356934 + ], + [ + "▁Oz", + -12.137557983398438 + ], + [ + "▁Interest", + -12.13758659362793 + ], + [ + "▁scarf", + -12.137765884399414 + ], + [ + "▁opposing", + -12.13803768157959 + ], + [ + "▁tempted", + -12.138154983520508 + ], + [ + "▁HAVE", + -12.138394355773926 + ], + [ + "▁Athens", + -12.138405799865724 + ], + [ + "▁Ethernet", + -12.138431549072266 + ], + [ + "▁formulated", + -12.138471603393556 + ], + [ + "lic", + -12.138555526733398 + ], + [ + "▁Representative", + -12.138915061950684 + ], + [ + "▁Mas", + -12.13898468017578 + ], + [ + "▁CAD", + -12.139046669006348 + ], + [ + "▁lungs", + -12.139111518859863 + ], + [ + "ira", + -12.139184951782228 + ], + [ + "rick", + -12.139246940612791 + ], + [ + "▁dividend", + -12.139299392700195 + ], + [ + "▁Wyoming", + -12.139702796936035 + ], + [ + "tek", + -12.13978099822998 + ], + [ + "▁ambitions", + -12.139830589294434 + ], + [ + "▁2001.", + -12.139850616455078 + ], + [ + "▁Issue", + -12.139902114868164 + ], + [ + "▁Songs", + -12.139906883239746 + ], + [ + "▁rushing", + -12.139944076538086 + ], + [ + "▁interfaces", + -12.139952659606934 + ], + [ + "▁Builder", + -12.14006805419922 + ], + [ + "▁OFF", + -12.140103340148926 + ], + [ + "▁onboard", + -12.14023208618164 + ], + [ + "▁Mai", + -12.140284538269045 + ], + [ + "▁chasing", + -12.140297889709473 + ], + [ + "▁bubbles", + -12.140416145324709 + ], + [ + "но", + -12.140435218811035 + ], + [ + "vis", + -12.1404447555542 + ], + [ + "▁$12", + -12.141020774841309 + ], + [ + "▁behave", + -12.141145706176758 + ], + [ + "Source", + -12.14128589630127 + ], + [ + "▁Colin", + -12.14138889312744 + ], + [ + "▁headline", + -12.141460418701172 + ], + [ + "▁ba", + -12.141502380371094 + ], + [ + "▁Tommy", + -12.141581535339355 + ], + [ + "▁220", + -12.14187240600586 + ], + [ + "▁cows", + -12.141971588134766 + ], + [ + "▁Sell", + -12.141996383666992 + ], + [ + "▁Diploma", + -12.142239570617676 + ], + [ + "▁cult", + -12.1422700881958 + ], + [ + "▁inappropriate", + -12.142318725585938 + ], + [ + "▁Regulation", + -12.142342567443848 + ], + [ + "▁Advertising", + -12.142451286315918 + ], + [ + "▁Arc", + -12.142516136169434 + ], + [ + "▁Fantasy", + -12.142556190490724 + ], + [ + "IC", + -12.142690658569336 + ], + [ + "▁hackers", + -12.142690658569336 + ], + [ + "▁agrees", + -12.14279079437256 + ], + [ + "▁10:00", + -12.14299201965332 + ], + [ + "/1", + -12.143052101135254 + ], + [ + "▁Gel", + -12.143360137939451 + ], + [ + "ago", + -12.143397331237791 + ], + [ + "▁Nicole", + -12.143479347229004 + ], + [ + "▁classics", + -12.143548965454102 + ], + [ + "▁1987", + -12.14365577697754 + ], + [ + "▁statutory", + -12.143829345703123 + ], + [ + "▁assumption", + -12.143832206726074 + ], + [ + "pet", + -12.144097328186035 + ], + [ + "37", + -12.144371032714844 + ], + [ + "▁Million", + -12.14443016052246 + ], + [ + "▁reconstruction", + -12.144514083862305 + ], + [ + "▁Ralph", + -12.14459228515625 + ], + [ + "▁pairing", + -12.144628524780272 + ], + [ + "49", + -12.14503002166748 + ], + [ + "▁enquiry", + -12.145095825195312 + ], + [ + "▁authorization", + -12.145315170288086 + ], + [ + "▁explicitly", + -12.145316123962402 + ], + [ + "Friday", + -12.14532470703125 + ], + [ + "▁essentials", + -12.145328521728516 + ], + [ + "▁Benefits", + -12.145587921142578 + ], + [ + "▁unhappy", + -12.145769119262695 + ], + [ + "Photo", + -12.145981788635254 + ], + [ + "▁Gran", + -12.146041870117188 + ], + [ + "▁2000,", + -12.14604949951172 + ], + [ + "▁silicone", + -12.1461181640625 + ], + [ + "▁Patio", + -12.146275520324709 + ], + [ + "drive", + -12.146324157714844 + ], + [ + "▁serum", + -12.14641284942627 + ], + [ + "▁friction", + -12.146612167358398 + ], + [ + "▁skincare", + -12.14675235748291 + ], + [ + "lam", + -12.146854400634766 + ], + [ + "▁pristine", + -12.147029876708984 + ], + [ + "▁utilization", + -12.147029876708984 + ], + [ + "▁restoring", + -12.147032737731934 + ], + [ + "▁listeners", + -12.14711570739746 + ], + [ + "Simply", + -12.147292137145996 + ], + [ + "▁refugee", + -12.147366523742676 + ], + [ + "▁Points", + -12.14742374420166 + ], + [ + "▁favourites", + -12.14750862121582 + ], + [ + "Access", + -12.147589683532717 + ], + [ + "▁admissions", + -12.147720336914062 + ], + [ + "▁Olive", + -12.147809982299805 + ], + [ + "▁haul", + -12.147873878479004 + ], + [ + "▁conceptual", + -12.148149490356444 + ], + [ + "▁costing", + -12.148241996765137 + ], + [ + "▁hydro", + -12.1482515335083 + ], + [ + "▁longevity", + -12.148322105407717 + ], + [ + "▁spokesperson", + -12.148323059082031 + ], + [ + "▁marry", + -12.148493766784668 + ], + [ + "▁nerves", + -12.148701667785645 + ], + [ + "▁wraps", + -12.14877986907959 + ], + [ + "great", + -12.148881912231444 + ], + [ + "▁penny", + -12.148951530456545 + ], + [ + "ico", + -12.148953437805176 + ], + [ + "eu", + -12.149106979370115 + ], + [ + "▁Idea", + -12.149121284484863 + ], + [ + "▁Pie", + -12.14959716796875 + ], + [ + "▁lodging", + -12.149616241455078 + ], + [ + "▁coordinates", + -12.149630546569824 + ], + [ + "▁oval", + -12.149645805358888 + ], + [ + "buy", + -12.149749755859377 + ], + [ + "▁chatting", + -12.149773597717283 + ], + [ + "▁pressures", + -12.149819374084473 + ], + [ + "▁Cheese", + -12.149857521057127 + ], + [ + "▁Mont", + -12.14986801147461 + ], + [ + "▁Integrated", + -12.149889945983888 + ], + [ + "fat", + -12.149893760681152 + ], + [ + "▁subway", + -12.150099754333496 + ], + [ + "▁Melissa", + -12.15036964416504 + ], + [ + "word", + -12.150408744812012 + ], + [ + "▁Corn", + -12.150577545166016 + ], + [ + "▁spatial", + -12.150607109069824 + ], + [ + "bert", + -12.150729179382324 + ], + [ + "▁Mer", + -12.150769233703612 + ], + [ + "▁thrilling", + -12.150922775268556 + ], + [ + "Turn", + -12.151030540466309 + ], + [ + "▁Recreation", + -12.15147876739502 + ], + [ + "▁sensory", + -12.151508331298828 + ], + [ + "▁sourcing", + -12.151705741882324 + ], + [ + "▁lonely", + -12.151726722717283 + ], + [ + "▁Gil", + -12.151762962341309 + ], + [ + "▁Looks", + -12.151819229125977 + ], + [ + "▁Sites", + -12.151922225952148 + ], + [ + "▁DUI", + -12.152117729187012 + ], + [ + "▁stretched", + -12.152325630187988 + ], + [ + "▁pledge", + -12.152448654174805 + ], + [ + "▁corrupt", + -12.152502059936523 + ], + [ + "▁1.6", + -12.15260887145996 + ], + [ + "▁MORE", + -12.152666091918944 + ], + [ + "▁handsome", + -12.15268325805664 + ], + [ + "print", + -12.152697563171388 + ], + [ + "▁strives", + -12.152782440185549 + ], + [ + "brand", + -12.152840614318848 + ], + [ + "▁openings", + -12.152999877929688 + ], + [ + "▁deductible", + -12.153043746948242 + ], + [ + "▁belts", + -12.153178215026855 + ], + [ + "▁Hughes", + -12.153191566467283 + ], + [ + "clock", + -12.153249740600586 + ], + [ + "▁IF", + -12.153322219848633 + ], + [ + "▁Term", + -12.153324127197266 + ], + [ + "▁funnel", + -12.153327941894531 + ], + [ + "▁Ukrainian", + -12.153427124023438 + ], + [ + "▁blending", + -12.153718948364258 + ], + [ + "Saturday", + -12.153918266296388 + ], + [ + "team", + -12.154013633728027 + ], + [ + "ick", + -12.154154777526855 + ], + [ + "▁aesthetics", + -12.154196739196776 + ], + [ + "UP", + -12.154363632202148 + ], + [ + "Smart", + -12.154616355895996 + ], + [ + "▁fountain", + -12.154845237731934 + ], + [ + "▁gods", + -12.154921531677246 + ], + [ + "izer", + -12.154991149902344 + ], + [ + "List", + -12.155111312866213 + ], + [ + "▁allergic", + -12.155160903930664 + ], + [ + "ami", + -12.155271530151367 + ], + [ + "▁Advisor", + -12.1552734375 + ], + [ + "▁replica", + -12.155338287353516 + ], + [ + "▁Lunch", + -12.15559196472168 + ], + [ + "Name", + -12.15566349029541 + ], + [ + "▁16-", + -12.155742645263672 + ], + [ + "Lo", + -12.155773162841797 + ], + [ + "▁Brook", + -12.155779838562012 + ], + [ + "coming", + -12.155860900878906 + ], + [ + "control", + -12.155861854553224 + ], + [ + "▁XML", + -12.155901908874512 + ], + [ + "▁Creating", + -12.155976295471191 + ], + [ + "▁Chennai", + -12.156143188476562 + ], + [ + "▁flaws", + -12.156244277954102 + ], + [ + "▁Dun", + -12.156378746032717 + ], + [ + "Jan", + -12.156400680541992 + ], + [ + "▁Cove", + -12.156442642211914 + ], + [ + "▁stool", + -12.156478881835938 + ], + [ + "▁inclined", + -12.156644821166992 + ], + [ + "▁Shi", + -12.15665340423584 + ], + [ + "▁prosecution", + -12.156758308410645 + ], + [ + "▁Reference", + -12.156766891479492 + ], + [ + "iva", + -12.156810760498049 + ], + [ + "▁unused", + -12.156850814819336 + ], + [ + "▁Economy", + -12.156906127929688 + ], + [ + "2019", + -12.157084465026855 + ], + [ + "▁extracted", + -12.157288551330566 + ], + [ + "ea", + -12.157825469970703 + ], + [ + "▁amino", + -12.157928466796877 + ], + [ + "▁bins", + -12.158302307128906 + ], + [ + "Within", + -12.158365249633787 + ], + [ + "▁bookmark", + -12.158422470092772 + ], + [ + "▁Canal", + -12.158706665039062 + ], + [ + "▁Che", + -12.158817291259766 + ], + [ + "▁contributors", + -12.158985137939451 + ], + [ + "▁Against", + -12.159168243408203 + ], + [ + "▁trophy", + -12.159192085266112 + ], + [ + "▁euros", + -12.159228324890137 + ], + [ + "▁Butler", + -12.159259796142578 + ], + [ + "nia", + -12.159382820129396 + ], + [ + "Again", + -12.159383773803713 + ], + [ + "▁brochure", + -12.15948486328125 + ], + [ + "▁consisted", + -12.159563064575195 + ], + [ + "▁shocking", + -12.159989356994627 + ], + [ + "▁Hu", + -12.159991264343262 + ], + [ + "▁Gateway", + -12.160076141357422 + ], + [ + "▁hourly", + -12.160157203674316 + ], + [ + "▁bump", + -12.16025161743164 + ], + [ + "▁Bros", + -12.160292625427246 + ], + [ + "country", + -12.16061305999756 + ], + [ + "Review", + -12.160755157470703 + ], + [ + "▁patrons", + -12.160945892333984 + ], + [ + "▁collects", + -12.161028861999512 + ], + [ + "except", + -12.161062240600586 + ], + [ + "Update", + -12.161402702331545 + ], + [ + "▁MUST", + -12.161443710327148 + ], + [ + "▁buddy", + -12.16160011291504 + ], + [ + "▁KY", + -12.1618070602417 + ], + [ + "▁BR", + -12.161812782287598 + ], + [ + "▁quantitative", + -12.161892890930176 + ], + [ + "▁$40", + -12.161983489990234 + ], + [ + "uc", + -12.162071228027344 + ], + [ + "▁billions", + -12.162131309509276 + ], + [ + "▁regain", + -12.162282943725586 + ], + [ + "Along", + -12.162498474121094 + ], + [ + "Figure", + -12.16262149810791 + ], + [ + "700", + -12.162663459777832 + ], + [ + "▁remembering", + -12.162737846374512 + ], + [ + "▁occurrence", + -12.162839889526367 + ], + [ + "▁courtyard", + -12.163078308105469 + ], + [ + "-22", + -12.163115501403809 + ], + [ + "▁Pixel", + -12.163163185119627 + ], + [ + "/2", + -12.163289070129396 + ], + [ + "▁arranging", + -12.163297653198242 + ], + [ + "▁teamed", + -12.163407325744627 + ], + [ + "▁Tennis", + -12.163490295410156 + ], + [ + "▁illusion", + -12.163578987121582 + ], + [ + "tron", + -12.164030075073242 + ], + [ + "▁750", + -12.164079666137695 + ], + [ + "Ms", + -12.164246559143066 + ], + [ + "▁Pump", + -12.164369583129885 + ], + [ + "▁particle", + -12.164481163024902 + ], + [ + "▁doll", + -12.164536476135254 + ], + [ + "▁Tub", + -12.16454792022705 + ], + [ + "▁attic", + -12.164628028869627 + ], + [ + "▁tutor", + -12.164671897888184 + ], + [ + "ila", + -12.164731979370115 + ], + [ + "▁forecasts", + -12.164855003356934 + ], + [ + "▁argues", + -12.164899826049805 + ], + [ + "▁reproduction", + -12.165011405944824 + ], + [ + "▁Brooks", + -12.165181159973145 + ], + [ + "▁seals", + -12.165249824523926 + ], + [ + "▁VP", + -12.165275573730469 + ], + [ + "▁duplicate", + -12.165288925170898 + ], + [ + "▁researched", + -12.165383338928224 + ], + [ + "▁endure", + -12.165398597717283 + ], + [ + "▁Brighton", + -12.16543674468994 + ], + [ + "▁distracted", + -12.165507316589355 + ], + [ + "▁Platinum", + -12.16567611694336 + ], + [ + "▁HA", + -12.165864944458008 + ], + [ + "▁Extension", + -12.165903091430664 + ], + [ + "▁sins", + -12.166013717651367 + ], + [ + "▁pig", + -12.166033744812012 + ], + [ + "▁Eden", + -12.166288375854492 + ], + [ + "▁mentors", + -12.16629409790039 + ], + [ + "▁sinks", + -12.166425704956056 + ], + [ + "▁Posts", + -12.166692733764648 + ], + [ + "sub", + -12.166715621948242 + ], + [ + "▁weighs", + -12.166935920715332 + ], + [ + "▁susceptible", + -12.16706371307373 + ], + [ + "▁electro", + -12.167165756225586 + ], + [ + "▁uncle", + -12.1671781539917 + ], + [ + "▁commissions", + -12.167232513427734 + ], + [ + "▁nylon", + -12.167243003845217 + ], + [ + "▁dioxide", + -12.167243957519531 + ], + [ + "CH", + -12.16732692718506 + ], + [ + "▁Sp", + -12.16733741760254 + ], + [ + "▁eliminates", + -12.167349815368652 + ], + [ + "▁endurance", + -12.167391777038574 + ], + [ + "▁9:00", + -12.16745376586914 + ], + [ + "office", + -12.167524337768556 + ], + [ + "uck", + -12.167808532714844 + ], + [ + "roll", + -12.167948722839355 + ], + [ + "▁hazard", + -12.168004989624023 + ], + [ + "map", + -12.168257713317873 + ], + [ + "▁Rush", + -12.168292999267578 + ], + [ + "▁ABS", + -12.168354988098145 + ], + [ + "▁empire", + -12.168472290039062 + ], + [ + "▁09", + -12.168756484985352 + ], + [ + "▁911", + -12.168774604797363 + ], + [ + "▁Printable", + -12.168997764587402 + ], + [ + "Press", + -12.16904640197754 + ], + [ + "▁Mile", + -12.169060707092283 + ], + [ + "▁smoked", + -12.169075965881348 + ], + [ + "▁Bend", + -12.169157028198242 + ], + [ + "osa", + -12.16938018798828 + ], + [ + "▁Basketball", + -12.169387817382812 + ], + [ + "▁Forces", + -12.16945743560791 + ], + [ + "Fast", + -12.169509887695312 + ], + [ + "▁Derby", + -12.169774055480955 + ], + [ + "▁Tin", + -12.16981601715088 + ], + [ + "350", + -12.170173645019531 + ], + [ + "▁creditors", + -12.170228958129885 + ], + [ + "▁Regarding", + -12.170402526855469 + ], + [ + "▁contests", + -12.170740127563477 + ], + [ + "▁effortless", + -12.170782089233398 + ], + [ + "▁Marcus", + -12.170899391174316 + ], + [ + "▁turf", + -12.170941352844238 + ], + [ + "▁educator", + -12.17102336883545 + ], + [ + "▁waterfront", + -12.171093940734863 + ], + [ + "▁Kumar", + -12.17129611968994 + ], + [ + "▁bathtub", + -12.171314239501951 + ], + [ + "▁gutters", + -12.171456336975098 + ], + [ + "▁originated", + -12.171457290649414 + ], + [ + "here", + -12.1715669631958 + ], + [ + "▁Sin", + -12.171870231628418 + ], + [ + "tte", + -12.171874046325684 + ], + [ + "▁antibiotics", + -12.171897888183594 + ], + [ + "▁Coupon", + -12.17197322845459 + ], + [ + "▁café", + -12.172080993652344 + ], + [ + "▁halls", + -12.172231674194336 + ], + [ + "▁commence", + -12.172255516052246 + ], + [ + "▁Tele", + -12.17231273651123 + ], + [ + "met", + -12.17239475250244 + ], + [ + "▁kills", + -12.172462463378906 + ], + [ + "▁gloss", + -12.172497749328612 + ], + [ + "▁exploit", + -12.172505378723145 + ], + [ + "▁risen", + -12.172527313232422 + ], + [ + "▁Whilst", + -12.1726655960083 + ], + [ + "▁Martha", + -12.172712326049805 + ], + [ + "▁pricey", + -12.172861099243164 + ], + [ + "▁governed", + -12.172910690307615 + ], + [ + "▁Kirk", + -12.173012733459473 + ], + [ + "▁Blvd", + -12.173184394836426 + ], + [ + "▁denial", + -12.173211097717283 + ], + [ + "▁corrected", + -12.173285484313965 + ], + [ + "▁hay", + -12.173319816589355 + ], + [ + "▁commute", + -12.173361778259276 + ], + [ + "▁juices", + -12.173434257507324 + ], + [ + "▁hype", + -12.173437118530272 + ], + [ + "▁wears", + -12.173462867736816 + ], + [ + "▁tonnes", + -12.173517227172852 + ], + [ + "▁Bradley", + -12.173548698425291 + ], + [ + "▁jars", + -12.17372226715088 + ], + [ + "▁snake", + -12.173749923706056 + ], + [ + "▁reproduce", + -12.173754692077637 + ], + [ + "▁BEST", + -12.17388916015625 + ], + [ + "▁Lloyd", + -12.173946380615234 + ], + [ + "▁goodies", + -12.17412281036377 + ], + [ + "▁judicial", + -12.174209594726562 + ], + [ + "▁triggers", + -12.174534797668455 + ], + [ + "FC", + -12.174572944641112 + ], + [ + "▁Logan", + -12.174600601196287 + ], + [ + "eur", + -12.174674987792969 + ], + [ + "ska", + -12.1747407913208 + ], + [ + "lis", + -12.174830436706545 + ], + [ + "▁ninth", + -12.175180435180664 + ], + [ + "▁prolonged", + -12.175378799438477 + ], + [ + "▁equip", + -12.175396919250488 + ], + [ + "call", + -12.175573348999023 + ], + [ + "July", + -12.175651550292969 + ], + [ + "▁fries", + -12.17581558227539 + ], + [ + "▁generosity", + -12.175837516784668 + ], + [ + "ema", + -12.176061630249023 + ], + [ + "ро", + -12.176112174987791 + ], + [ + "Gold", + -12.17617130279541 + ], + [ + "please", + -12.176218032836914 + ], + [ + "▁Immigration", + -12.176435470581056 + ], + [ + "▁settling", + -12.176508903503418 + ], + [ + "▁Boeing", + -12.176605224609377 + ], + [ + "▁announcing", + -12.176724433898926 + ], + [ + "▁straw", + -12.176799774169922 + ], + [ + "Level", + -12.176856994628906 + ], + [ + "▁Shoes", + -12.176891326904297 + ], + [ + "▁Beth", + -12.177083969116213 + ], + [ + "▁patents", + -12.177324295043944 + ], + [ + "▁incomplete", + -12.177678108215332 + ], + [ + "▁entertained", + -12.177694320678713 + ], + [ + "▁Beta", + -12.178162574768066 + ], + [ + "▁Pit", + -12.178248405456545 + ], + [ + "Mix", + -12.178299903869627 + ], + [ + "▁caffeine", + -12.17849826812744 + ], + [ + "▁mitigate", + -12.178563117980955 + ], + [ + "▁invasion", + -12.178702354431152 + ], + [ + "▁princess", + -12.178820610046388 + ], + [ + "▁PT", + -12.17885684967041 + ], + [ + "ening", + -12.178900718688965 + ], + [ + "rise", + -12.178949356079102 + ], + [ + "▁forgiveness", + -12.17898654937744 + ], + [ + "▁purse", + -12.179141998291016 + ], + [ + "ec", + -12.179436683654783 + ], + [ + "▁embraced", + -12.179508209228516 + ], + [ + "▁stellar", + -12.17951488494873 + ], + [ + "ways", + -12.179726600646973 + ], + [ + "▁Engagement", + -12.179808616638184 + ], + [ + "▁leasing", + -12.179835319519045 + ], + [ + "▁Joshua", + -12.179991722106934 + ], + [ + "▁english", + -12.180005073547363 + ], + [ + "▁ambient", + -12.180024147033691 + ], + [ + "CS", + -12.180100440979004 + ], + [ + "▁thrill", + -12.180204391479492 + ], + [ + "bet", + -12.18027400970459 + ], + [ + "▁Error", + -12.180475234985352 + ], + [ + "▁praised", + -12.180517196655272 + ], + [ + "▁hydrogen", + -12.180572509765623 + ], + [ + "▁Paradise", + -12.180724143981934 + ], + [ + "▁brutal", + -12.180890083312988 + ], + [ + "▁lion", + -12.18118953704834 + ], + [ + "written", + -12.181304931640623 + ], + [ + "▁Uploaded", + -12.181351661682127 + ], + [ + "▁organise", + -12.18136501312256 + ], + [ + "▁Gala", + -12.181431770324709 + ], + [ + "▁interacting", + -12.181612968444824 + ], + [ + "eo", + -12.18161964416504 + ], + [ + "something", + -12.181650161743164 + ], + [ + "▁airplane", + -12.181721687316896 + ], + [ + "▁agile", + -12.181854248046877 + ], + [ + "▁Census", + -12.181934356689451 + ], + [ + "Could", + -12.18197536468506 + ], + [ + "IL", + -12.182086944580078 + ], + [ + "▁magnitude", + -12.182170867919922 + ], + [ + "▁ribs", + -12.18222999572754 + ], + [ + "▁delegation", + -12.18250846862793 + ], + [ + "▁coordinating", + -12.182515144348145 + ], + [ + "▁transplant", + -12.182520866394045 + ], + [ + "▁95%", + -12.183218955993652 + ], + [ + "RT", + -12.183221817016602 + ], + [ + "▁bark", + -12.183245658874512 + ], + [ + "▁upwards", + -12.183245658874512 + ], + [ + "▁treasures", + -12.183429718017578 + ], + [ + "say", + -12.183443069458008 + ], + [ + "bird", + -12.18348503112793 + ], + [ + "▁awake", + -12.183660507202148 + ], + [ + "Taking", + -12.183679580688477 + ], + [ + "▁dominate", + -12.183743476867676 + ], + [ + "FS", + -12.183826446533203 + ], + [ + "▁drastically", + -12.18385410308838 + ], + [ + "▁stimulating", + -12.183975219726562 + ], + [ + "▁vein", + -12.184004783630373 + ], + [ + "▁Motors", + -12.18415641784668 + ], + [ + "▁Romania", + -12.184165000915527 + ], + [ + "Its", + -12.18429183959961 + ], + [ + "▁tractor", + -12.184508323669434 + ], + [ + "▁Pokemon", + -12.18458080291748 + ], + [ + "▁Pier", + -12.18466567993164 + ], + [ + "▁Lift", + -12.184701919555664 + ], + [ + "▁cardiac", + -12.184805870056152 + ], + [ + "▁Triple", + -12.184890747070312 + ], + [ + "▁miniature", + -12.185041427612305 + ], + [ + "▁Ce", + -12.18513011932373 + ], + [ + "▁inevitably", + -12.185179710388184 + ], + [ + "try", + -12.185189247131348 + ], + [ + "▁mighty", + -12.185230255126951 + ], + [ + "▁unified", + -12.18526840209961 + ], + [ + "SM", + -12.185276985168455 + ], + [ + "had", + -12.185296058654783 + ], + [ + "▁downside", + -12.185314178466797 + ], + [ + "▁Edwards", + -12.18545627593994 + ], + [ + "▁Loss", + -12.1857328414917 + ], + [ + "▁Met", + -12.185970306396484 + ], + [ + "▁longtime", + -12.186281204223633 + ], + [ + "▁catches", + -12.186321258544922 + ], + [ + "▁Joan", + -12.186378479003906 + ], + [ + "▁Loop", + -12.186675071716309 + ], + [ + "▁rifle", + -12.187105178833008 + ], + [ + "▁empathy", + -12.187215805053713 + ], + [ + "41", + -12.187286376953123 + ], + [ + "▁fierce", + -12.187509536743164 + ], + [ + "▁Rosa", + -12.187782287597656 + ], + [ + "▁Hay", + -12.187821388244627 + ], + [ + "GE", + -12.187996864318848 + ], + [ + "▁1.4", + -12.188115119934082 + ], + [ + "▁newborn", + -12.188192367553713 + ], + [ + "▁Shah", + -12.188220024108888 + ], + [ + "▁radius", + -12.18826389312744 + ], + [ + "▁ego", + -12.188365936279297 + ], + [ + "▁artifacts", + -12.188435554504396 + ], + [ + "▁educating", + -12.188538551330566 + ], + [ + "▁Abstract", + -12.188718795776367 + ], + [ + "▁disciples", + -12.18874454498291 + ], + [ + "▁2%", + -12.188819885253906 + ], + [ + "▁greeting", + -12.188820838928224 + ], + [ + "cor", + -12.188883781433104 + ], + [ + "▁manuscript", + -12.189101219177246 + ], + [ + "▁Grande", + -12.189186096191406 + ], + [ + "▁champagne", + -12.189210891723633 + ], + [ + "▁doubts", + -12.189640045166016 + ], + [ + "▁Recent", + -12.18966007232666 + ], + [ + "▁rumors", + -12.189751625061035 + ], + [ + "▁curves", + -12.189774513244627 + ], + [ + "▁decreasing", + -12.189884185791016 + ], + [ + "ole", + -12.189937591552734 + ], + [ + "▁parliament", + -12.189939498901367 + ], + [ + "bro", + -12.190135955810549 + ], + [ + "▁Dynamics", + -12.190322875976562 + ], + [ + "▁stakes", + -12.190422058105469 + ], + [ + "▁antioxidants", + -12.190427780151367 + ], + [ + "▁Markets", + -12.19056797027588 + ], + [ + "▁bang", + -12.190608024597168 + ], + [ + "▁cialis", + -12.19061279296875 + ], + [ + "▁и", + -12.190624237060549 + ], + [ + "▁Methodist", + -12.190679550170898 + ], + [ + "▁tidy", + -12.19074535369873 + ], + [ + "▁1200", + -12.190860748291016 + ], + [ + "▁Carlos", + -12.190865516662598 + ], + [ + "Upon", + -12.190999031066896 + ], + [ + "▁Lan", + -12.19120979309082 + ], + [ + "▁Hip", + -12.191214561462402 + ], + [ + "▁painter", + -12.191329002380373 + ], + [ + "▁basin", + -12.191620826721191 + ], + [ + "▁Answer", + -12.19166088104248 + ], + [ + "▁stripes", + -12.191683769226074 + ], + [ + "▁Andrea", + -12.19172191619873 + ], + [ + "▁referendum", + -12.191906929016112 + ], + [ + "▁Hydro", + -12.192011833190918 + ], + [ + "▁navy", + -12.19202995300293 + ], + [ + "▁rocking", + -12.192042350769045 + ], + [ + "▁arises", + -12.192121505737305 + ], + [ + "54", + -12.192204475402832 + ], + [ + "97", + -12.192339897155762 + ], + [ + "▁tyres", + -12.19236183166504 + ], + [ + "▁Ellen", + -12.192477226257324 + ], + [ + "▁cam", + -12.192679405212402 + ], + [ + "▁feat", + -12.192777633666992 + ], + [ + "▁Wu", + -12.19279670715332 + ], + [ + "▁clash", + -12.192903518676758 + ], + [ + "▁Bin", + -12.193017959594728 + ], + [ + "▁oversized", + -12.193120956420898 + ], + [ + "▁Item", + -12.193346977233888 + ], + [ + "▁Rochester", + -12.193352699279783 + ], + [ + "put", + -12.193406105041504 + ], + [ + "▁backwards", + -12.193439483642578 + ], + [ + "Po", + -12.193498611450195 + ], + [ + "▁£3", + -12.19351863861084 + ], + [ + "▁defect", + -12.193743705749512 + ], + [ + "▁brains", + -12.19379425048828 + ], + [ + "▁whip", + -12.19392204284668 + ], + [ + "▁acquisitions", + -12.194283485412598 + ], + [ + "tag", + -12.19430160522461 + ], + [ + "′", + -12.19527816772461 + ], + [ + "fall", + -12.195302963256836 + ], + [ + "▁rugby", + -12.19545841217041 + ], + [ + "▁warnings", + -12.195547103881836 + ], + [ + "economic", + -12.195589065551758 + ], + [ + "▁dawn", + -12.195642471313477 + ], + [ + "▁framing", + -12.195711135864258 + ], + [ + "▁Spot", + -12.19577980041504 + ], + [ + "▁currencies", + -12.195937156677246 + ], + [ + "▁complementary", + -12.196001052856444 + ], + [ + "▁Basement", + -12.196137428283691 + ], + [ + "▁espresso", + -12.19615650177002 + ], + [ + "▁shooter", + -12.196160316467283 + ], + [ + "ven", + -12.196537971496582 + ], + [ + "▁ultrasound", + -12.196672439575195 + ], + [ + "▁Huawei", + -12.196693420410156 + ], + [ + "▁Balance", + -12.196831703186035 + ], + [ + "▁mar", + -12.19700050354004 + ], + [ + "▁processors", + -12.197099685668944 + ], + [ + "▁spec", + -12.197259902954102 + ], + [ + "▁Manor", + -12.19727611541748 + ], + [ + "▁Chuck", + -12.19728183746338 + ], + [ + "rio", + -12.197383880615234 + ], + [ + "▁Pride", + -12.197402000427246 + ], + [ + "▁cooperate", + -12.19742202758789 + ], + [ + "ls", + -12.197579383850098 + ], + [ + "▁Artists", + -12.197635650634766 + ], + [ + "▁explanations", + -12.197650909423828 + ], + [ + "▁Renaissance", + -12.197681427001951 + ], + [ + "▁topping", + -12.197749137878418 + ], + [ + "▁Kin", + -12.197877883911133 + ], + [ + "Na", + -12.19804573059082 + ], + [ + "Stop", + -12.198195457458496 + ], + [ + "▁inning", + -12.198271751403809 + ], + [ + "▁Pros", + -12.198357582092283 + ], + [ + "▁Mercury", + -12.198405265808104 + ], + [ + "▁wired", + -12.198668479919434 + ], + [ + "▁kiss", + -12.198811531066896 + ], + [ + "▁Administrator", + -12.198847770690918 + ], + [ + "▁Chan", + -12.199023246765137 + ], + [ + "tie", + -12.199204444885254 + ], + [ + "▁matte", + -12.19922924041748 + ], + [ + "▁safeguard", + -12.19924545288086 + ], + [ + "▁thermostat", + -12.199270248413086 + ], + [ + "▁2002.", + -12.199285507202148 + ], + [ + "aries", + -12.199585914611816 + ], + [ + "ral", + -12.199599266052246 + ], + [ + "▁groom", + -12.199766159057615 + ], + [ + "▁informational", + -12.200074195861816 + ], + [ + "▁Concert", + -12.200173377990724 + ], + [ + "▁judged", + -12.200203895568848 + ], + [ + "▁Gifts", + -12.200238227844238 + ], + [ + "▁Codes", + -12.200276374816896 + ], + [ + "▁1986", + -12.200286865234377 + ], + [ + "▁Charleston", + -12.200332641601562 + ], + [ + "Cut", + -12.200419425964355 + ], + [ + "▁classy", + -12.200624465942385 + ], + [ + "▁Dawn", + -12.200627326965332 + ], + [ + "▁fulfillment", + -12.200674057006836 + ], + [ + "▁negatively", + -12.20078468322754 + ], + [ + "▁adequately", + -12.200807571411133 + ], + [ + "▁pronounced", + -12.200888633728027 + ], + [ + "▁breakthrough", + -12.200895309448242 + ], + [ + "▁insisted", + -12.201074600219728 + ], + [ + "▁approximate", + -12.201162338256836 + ], + [ + "▁depressed", + -12.201510429382324 + ], + [ + "▁doubles", + -12.20155906677246 + ], + [ + "▁Realty", + -12.201580047607422 + ], + [ + "▁Leon", + -12.20167350769043 + ], + [ + "▁Problem", + -12.201722145080566 + ], + [ + "▁cautious", + -12.201896667480469 + ], + [ + "▁identifiable", + -12.202048301696776 + ], + [ + "▁unaware", + -12.202109336853027 + ], + [ + "PDF", + -12.202261924743652 + ], + [ + "RA", + -12.202455520629885 + ], + [ + "▁definite", + -12.202512741088867 + ], + [ + "IF", + -12.20253086090088 + ], + [ + "BR", + -12.202916145324709 + ], + [ + "▁presently", + -12.203120231628418 + ], + [ + "▁Fabric", + -12.203132629394531 + ], + [ + "▁labs", + -12.203218460083008 + ], + [ + "▁boosting", + -12.2032470703125 + ], + [ + "▁garments", + -12.20339012145996 + ], + [ + "▁BUT", + -12.203423500061035 + ], + [ + "▁geometry", + -12.203452110290527 + ], + [ + "▁tactical", + -12.20364475250244 + ], + [ + "▁ripe", + -12.2039213180542 + ], + [ + "▁baseline", + -12.203950881958008 + ], + [ + "▁Nano", + -12.204068183898926 + ], + [ + "▁garment", + -12.204201698303224 + ], + [ + "people", + -12.204336166381836 + ], + [ + "ras", + -12.204410552978516 + ], + [ + "▁parked", + -12.204497337341309 + ], + [ + "▁customization", + -12.204631805419922 + ], + [ + "▁citing", + -12.20473289489746 + ], + [ + "blog", + -12.204751968383787 + ], + [ + "▁sphere", + -12.204789161682127 + ], + [ + "center", + -12.20495891571045 + ], + [ + "▁Quotes", + -12.204965591430664 + ], + [ + "▁Starbucks", + -12.205214500427246 + ], + [ + "▁mornings", + -12.205241203308104 + ], + [ + "▁ballet", + -12.205318450927734 + ], + [ + "▁amended", + -12.205367088317873 + ], + [ + "▁rig", + -12.205387115478516 + ], + [ + "▁Movies", + -12.20539379119873 + ], + [ + "▁resilient", + -12.205470085144045 + ], + [ + "▁Neo", + -12.205474853515623 + ], + [ + "▁50,000", + -12.205618858337402 + ], + [ + "sea", + -12.205631256103516 + ], + [ + "▁Thankfully", + -12.205671310424805 + ], + [ + "▁downtime", + -12.20579433441162 + ], + [ + "far", + -12.20582389831543 + ], + [ + "Okay", + -12.205902099609377 + ], + [ + "▁realization", + -12.206055641174316 + ], + [ + "▁87", + -12.20607852935791 + ], + [ + "▁unhealthy", + -12.206212997436523 + ], + [ + "▁CH", + -12.206225395202637 + ], + [ + "69", + -12.206345558166504 + ], + [ + "▁Poor", + -12.2064208984375 + ], + [ + "▁diploma", + -12.20647430419922 + ], + [ + "▁sp", + -12.206475257873535 + ], + [ + "nar", + -12.206483840942385 + ], + [ + "▁Cab", + -12.206717491149902 + ], + [ + "▁ke", + -12.206896781921388 + ], + [ + "▁gesture", + -12.206963539123535 + ], + [ + "0%", + -12.207040786743164 + ], + [ + "▁lowered", + -12.207067489624023 + ], + [ + "MC", + -12.207087516784668 + ], + [ + "aro", + -12.207098960876465 + ], + [ + "▁7:00", + -12.20726203918457 + ], + [ + "▁HE", + -12.207262992858888 + ], + [ + "▁friendships", + -12.20728874206543 + ], + [ + "breaking", + -12.207324981689451 + ], + [ + "▁Bro", + -12.20737648010254 + ], + [ + "▁merge", + -12.207395553588867 + ], + [ + "▁Wish", + -12.207429885864258 + ], + [ + "▁strawberries", + -12.207555770874023 + ], + [ + "▁2000.", + -12.207578659057615 + ], + [ + "▁routinely", + -12.207640647888184 + ], + [ + "▁expenditure", + -12.207656860351562 + ], + [ + "▁masterpiece", + -12.207738876342772 + ], + [ + "▁Edmonton", + -12.207883834838867 + ], + [ + "▁encrypted", + -12.207889556884766 + ], + [ + "▁Fest", + -12.20805549621582 + ], + [ + "▁Divine", + -12.20806121826172 + ], + [ + "▁roofs", + -12.208126068115234 + ], + [ + "course", + -12.208224296569824 + ], + [ + "▁lavender", + -12.208239555358888 + ], + [ + "▁Spencer", + -12.20824146270752 + ], + [ + "▁renovations", + -12.208516120910645 + ], + [ + "98", + -12.20915985107422 + ], + [ + "▁disturbing", + -12.209181785583496 + ], + [ + "::", + -12.209234237670898 + ], + [ + "▁rectangular", + -12.20923900604248 + ], + [ + "▁Gro", + -12.209271430969238 + ], + [ + "▁Nurse", + -12.20928192138672 + ], + [ + "▁Fix", + -12.209287643432615 + ], + [ + "▁ho", + -12.209391593933104 + ], + [ + "▁allowance", + -12.209428787231444 + ], + [ + "▁Election", + -12.209566116333008 + ], + [ + "text", + -12.209593772888184 + ], + [ + "▁Originally", + -12.210147857666016 + ], + [ + "▁Border", + -12.210176467895508 + ], + [ + "▁Host", + -12.210412979125977 + ], + [ + "▁Giants", + -12.210654258728027 + ], + [ + "▁prop", + -12.210704803466797 + ], + [ + "▁2001,", + -12.210722923278809 + ], + [ + "-40", + -12.210735321044922 + ], + [ + "FL", + -12.21101188659668 + ], + [ + "▁relied", + -12.21101188659668 + ], + [ + "▁figuring", + -12.211024284362791 + ], + [ + "rah", + -12.21108055114746 + ], + [ + "▁refine", + -12.211145401000977 + ], + [ + "▁Finish", + -12.211231231689451 + ], + [ + "▁winding", + -12.211448669433594 + ], + [ + "▁aspiring", + -12.211496353149414 + ], + [ + "▁anticipation", + -12.211676597595217 + ], + [ + "▁saucepan", + -12.211737632751465 + ], + [ + "▁Horn", + -12.211862564086914 + ], + [ + "▁immigrant", + -12.21217918395996 + ], + [ + "Nothing", + -12.212263107299805 + ], + [ + "NYSE", + -12.212281227111816 + ], + [ + "▁Egg", + -12.212287902832031 + ], + [ + "▁territories", + -12.21236515045166 + ], + [ + "▁unfamiliar", + -12.21237087249756 + ], + [ + "▁compartment", + -12.212518692016602 + ], + [ + "sia", + -12.21254825592041 + ], + [ + "▁endorsement", + -12.212549209594728 + ], + [ + "mas", + -12.2125883102417 + ], + [ + "▁posed", + -12.212949752807615 + ], + [ + "handed", + -12.212972640991213 + ], + [ + "▁Memphis", + -12.213072776794434 + ], + [ + "▁pens", + -12.21334743499756 + ], + [ + "▁Conditioner", + -12.213397026062012 + ], + [ + "▁Miles", + -12.21351146697998 + ], + [ + "▁1988", + -12.213520050048828 + ], + [ + "▁Counter", + -12.213677406311035 + ], + [ + "▁BIG", + -12.213873863220217 + ], + [ + "▁starters", + -12.213884353637695 + ], + [ + "▁ward", + -12.213951110839844 + ], + [ + "▁Bobby", + -12.213956832885742 + ], + [ + "▁velocity", + -12.214415550231934 + ], + [ + "▁representations", + -12.214526176452637 + ], + [ + "author", + -12.21460247039795 + ], + [ + "▁Anything", + -12.214640617370604 + ], + [ + "▁exempt", + -12.214877128601074 + ], + [ + "▁slave", + -12.214942932128906 + ], + [ + "▁concentrations", + -12.215123176574709 + ], + [ + "▁designation", + -12.215126991271973 + ], + [ + "Ka", + -12.215171813964844 + ], + [ + "▁Berry", + -12.215269088745115 + ], + [ + "▁Verizon", + -12.21527862548828 + ], + [ + "▁strategically", + -12.215293884277344 + ], + [ + "▁outsourcing", + -12.215346336364746 + ], + [ + "-13", + -12.215703010559082 + ], + [ + "ux", + -12.215744018554688 + ], + [ + "▁playoffs", + -12.215785026550291 + ], + [ + "▁compost", + -12.215802192687988 + ], + [ + "▁Nigerian", + -12.215835571289062 + ], + [ + "▁compensate", + -12.215907096862791 + ], + [ + "ATION", + -12.216153144836426 + ], + [ + "▁mechanic", + -12.216312408447266 + ], + [ + "ON", + -12.216338157653809 + ], + [ + "▁Fourth", + -12.216666221618652 + ], + [ + "▁brew", + -12.216732025146484 + ], + [ + "▁glazed", + -12.21682357788086 + ], + [ + "▁Char", + -12.2168607711792 + ], + [ + "-14", + -12.217018127441406 + ], + [ + "▁tribal", + -12.217058181762695 + ], + [ + "▁burns", + -12.217085838317873 + ], + [ + "Matt", + -12.21709156036377 + ], + [ + "AI", + -12.217236518859863 + ], + [ + "▁defender", + -12.217302322387695 + ], + [ + "▁Ancient", + -12.217374801635742 + ], + [ + "▁translates", + -12.21762466430664 + ], + [ + "▁clutter", + -12.217745780944824 + ], + [ + "▁IRA", + -12.217805862426758 + ], + [ + "▁Improvement", + -12.21794891357422 + ], + [ + "fin", + -12.217965126037598 + ], + [ + "▁seam", + -12.218023300170898 + ], + [ + "▁SR", + -12.21811294555664 + ], + [ + "▁reinforce", + -12.218202590942385 + ], + [ + "Ho", + -12.21827793121338 + ], + [ + "▁brackets", + -12.218504905700684 + ], + [ + "▁tense", + -12.218817710876465 + ], + [ + "▁palace", + -12.218831062316896 + ], + [ + "▁timer", + -12.219006538391112 + ], + [ + "BE", + -12.219039916992188 + ], + [ + "▁Gal", + -12.21916675567627 + ], + [ + "▁Zip", + -12.219442367553713 + ], + [ + "terdisciplinary", + -12.219443321228027 + ], + [ + "▁sadness", + -12.219536781311035 + ], + [ + "▁fuels", + -12.219627380371094 + ], + [ + "▁Hook", + -12.219679832458496 + ], + [ + "▁$10,000", + -12.21990203857422 + ], + [ + "late", + -12.2199068069458 + ], + [ + "▁screened", + -12.21993637084961 + ], + [ + "▁Montgomery", + -12.220111846923828 + ], + [ + "wo", + -12.220219612121582 + ], + [ + "touch", + -12.220449447631836 + ], + [ + "▁Repeat", + -12.220600128173828 + ], + [ + "▁ne", + -12.220701217651367 + ], + [ + "▁Karl", + -12.220708847045898 + ], + [ + "Experience", + -12.220810890197754 + ], + [ + "ei", + -12.220844268798828 + ], + [ + "av", + -12.220908164978027 + ], + [ + "▁rage", + -12.22100830078125 + ], + [ + "▁1984", + -12.221012115478516 + ], + [ + "active", + -12.221047401428224 + ], + [ + "▁05", + -12.221138954162598 + ], + [ + "▁ACT", + -12.221301078796388 + ], + [ + "▁eclectic", + -12.22136116027832 + ], + [ + "▁unpleasant", + -12.221367835998535 + ], + [ + "▁vacant", + -12.221379280090332 + ], + [ + "▁crawl", + -12.221721649169922 + ], + [ + "▁differential", + -12.221806526184082 + ], + [ + "▁uncovered", + -12.221898078918455 + ], + [ + "▁Sage", + -12.222023963928224 + ], + [ + "▁suburbs", + -12.222036361694336 + ], + [ + "▁sweep", + -12.222394943237305 + ], + [ + ".99", + -12.222596168518066 + ], + [ + "▁temp", + -12.222700119018556 + ], + [ + "▁Calif", + -12.223146438598633 + ], + [ + "▁5000", + -12.223223686218262 + ], + [ + "▁neglected", + -12.22325325012207 + ], + [ + "▁Sur", + -12.22340488433838 + ], + [ + "▁Fla", + -12.223509788513184 + ], + [ + "▁exceeds", + -12.223563194274902 + ], + [ + "Available", + -12.223565101623535 + ], + [ + "▁Legacy", + -12.223569869995115 + ], + [ + "▁Newport", + -12.223660469055176 + ], + [ + "▁effortlessly", + -12.223732948303224 + ], + [ + "▁tuning", + -12.223933219909668 + ], + [ + "▁cleanup", + -12.2239408493042 + ], + [ + "▁Professionals", + -12.22401237487793 + ], + [ + "▁Visitors", + -12.224164962768556 + ], + [ + "▁Devon", + -12.224417686462402 + ], + [ + "▁quarry", + -12.224445343017578 + ], + [ + "anna", + -12.22446346282959 + ], + [ + "▁enthusiast", + -12.22447109222412 + ], + [ + "▁Northeast", + -12.224539756774902 + ], + [ + "▁Kai", + -12.224609375 + ], + [ + "▁exceeding", + -12.224649429321287 + ], + [ + "▁cafes", + -12.224660873413086 + ], + [ + "▁coastline", + -12.224698066711426 + ], + [ + "▁sewer", + -12.22469997406006 + ], + [ + "Chris", + -12.224886894226074 + ], + [ + "mine", + -12.225083351135254 + ], + [ + "▁emailed", + -12.225143432617188 + ], + [ + "▁Cy", + -12.225262641906738 + ], + [ + "▁//", + -12.225308418273926 + ], + [ + "▁guessing", + -12.22537326812744 + ], + [ + "▁correlation", + -12.225427627563477 + ], + [ + "▁ye", + -12.225522994995115 + ], + [ + "▁ottoman", + -12.225542068481444 + ], + [ + "▁Fee", + -12.225627899169922 + ], + [ + "▁Guitar", + -12.22565746307373 + ], + [ + "ison", + -12.225679397583008 + ], + [ + "▁Deluxe", + -12.225753784179688 + ], + [ + "▁chiropractic", + -12.225966453552246 + ], + [ + "▁bites", + -12.225998878479004 + ], + [ + "▁irritation", + -12.226239204406738 + ], + [ + "▁waking", + -12.226248741149902 + ], + [ + "▁filmed", + -12.226434707641602 + ], + [ + "▁bait", + -12.226485252380373 + ], + [ + "tail", + -12.22663688659668 + ], + [ + "▁phenomenal", + -12.226703643798828 + ], + [ + "▁Holly", + -12.226739883422852 + ], + [ + "▁Logo", + -12.226740837097168 + ], + [ + "▁popped", + -12.227001190185549 + ], + [ + "▁counselor", + -12.227092742919922 + ], + [ + "▁buffer", + -12.227177619934082 + ], + [ + "▁beast", + -12.227185249328612 + ], + [ + "-17", + -12.227187156677246 + ], + [ + "eta", + -12.22721004486084 + ], + [ + "▁Status", + -12.227231979370115 + ], + [ + "▁neighbours", + -12.227295875549316 + ], + [ + "nic", + -12.22736644744873 + ], + [ + "▁Joel", + -12.227434158325195 + ], + [ + "▁academics", + -12.227445602416992 + ], + [ + "▁Mathematics", + -12.22751808166504 + ], + [ + "▁Gabriel", + -12.227842330932615 + ], + [ + "▁sequences", + -12.22785758972168 + ], + [ + "▁cares", + -12.22797393798828 + ], + [ + "▁stocked", + -12.227993965148926 + ], + [ + "▁hurricane", + -12.22801399230957 + ], + [ + "Price", + -12.228019714355469 + ], + [ + "themed", + -12.228130340576172 + ], + [ + "social", + -12.22818660736084 + ], + [ + "▁judging", + -12.228341102600098 + ], + [ + "▁elephant", + -12.228360176086426 + ], + [ + "▁confidentiality", + -12.228408813476562 + ], + [ + "▁Sadly", + -12.228597640991213 + ], + [ + "▁Essential", + -12.228636741638184 + ], + [ + "▁stall", + -12.228710174560549 + ], + [ + "▁DR", + -12.22878360748291 + ], + [ + "▁climbed", + -12.22884464263916 + ], + [ + "▁Au", + -12.228962898254396 + ], + [ + "▁minus", + -12.229036331176758 + ], + [ + "Support", + -12.229074478149414 + ], + [ + "▁insect", + -12.229111671447754 + ], + [ + "▁knot", + -12.22919464111328 + ], + [ + "Non", + -12.229595184326172 + ], + [ + "▁circuits", + -12.229660987854004 + ], + [ + "▁amazingly", + -12.229687690734863 + ], + [ + "▁preschool", + -12.22972011566162 + ], + [ + "▁VM", + -12.229798316955566 + ], + [ + "▁Ste", + -12.229849815368652 + ], + [ + "▁saddle", + -12.229955673217772 + ], + [ + "▁CNC", + -12.229958534240724 + ], + [ + "▁elimination", + -12.230127334594728 + ], + [ + "▁burgers", + -12.230269432067873 + ], + [ + "▁Walt", + -12.230435371398926 + ], + [ + "▁Rural", + -12.230608940124512 + ], + [ + "▁Rapid", + -12.230643272399902 + ], + [ + "▁Lamb", + -12.23067569732666 + ], + [ + "▁dice", + -12.23081398010254 + ], + [ + "▁monsters", + -12.230815887451172 + ], + [ + "▁judgments", + -12.230847358703612 + ], + [ + "▁prepares", + -12.230865478515623 + ], + [ + "▁basil", + -12.231101989746094 + ], + [ + "▁Silk", + -12.231148719787598 + ], + [ + "▁postal", + -12.231173515319824 + ], + [ + "▁armor", + -12.231216430664062 + ], + [ + "Jo", + -12.2314453125 + ], + [ + "▁professors", + -12.231629371643066 + ], + [ + "▁decking", + -12.231663703918455 + ], + [ + "▁devil", + -12.231751441955566 + ], + [ + "▁Angela", + -12.231867790222168 + ], + [ + "▁mines", + -12.232324600219728 + ], + [ + "SU", + -12.23250961303711 + ], + [ + "▁tents", + -12.232687950134276 + ], + [ + "▁recalled", + -12.232695579528809 + ], + [ + "▁Standing", + -12.23293113708496 + ], + [ + "▁wholly", + -12.232962608337402 + ], + [ + "catching", + -12.233033180236816 + ], + [ + "▁Hammer", + -12.233072280883787 + ], + [ + "▁theaters", + -12.23315143585205 + ], + [ + "NE", + -12.23318099975586 + ], + [ + "▁inadequate", + -12.23337459564209 + ], + [ + "▁Movers", + -12.233386039733888 + ], + [ + "▁paved", + -12.2335205078125 + ], + [ + "channel", + -12.23354148864746 + ], + [ + "▁geography", + -12.233783721923828 + ], + [ + "▁traces", + -12.233945846557615 + ], + [ + "October", + -12.234334945678713 + ], + [ + "EC", + -12.234454154968262 + ], + [ + "▁Coal", + -12.234488487243652 + ], + [ + "add", + -12.234622955322266 + ], + [ + "▁foul", + -12.23469352722168 + ], + [ + "India", + -12.23470401763916 + ], + [ + "▁amazon", + -12.234710693359377 + ], + [ + "▁Eddie", + -12.234732627868652 + ], + [ + "▁pores", + -12.234797477722168 + ], + [ + "▁ruins", + -12.234956741333008 + ], + [ + "▁Th", + -12.235100746154783 + ], + [ + "▁Gu", + -12.235114097595217 + ], + [ + "▁8:30", + -12.235130310058594 + ], + [ + "▁Cuban", + -12.23523998260498 + ], + [ + "▁bust", + -12.235433578491213 + ], + [ + "▁bullying", + -12.23543643951416 + ], + [ + "why", + -12.235544204711914 + ], + [ + "▁Representatives", + -12.235568046569824 + ], + [ + "Four", + -12.235685348510742 + ], + [ + "▁elect", + -12.23569679260254 + ], + [ + "▁daylight", + -12.235756874084473 + ], + [ + "▁upfront", + -12.236405372619627 + ], + [ + "▁spider", + -12.236413955688477 + ], + [ + "▁Prague", + -12.236464500427246 + ], + [ + "▁Ladies", + -12.236520767211914 + ], + [ + "13.", + -12.236542701721191 + ], + [ + "▁laughed", + -12.236562728881836 + ], + [ + "▁diligence", + -12.23658561706543 + ], + [ + "▁Apartment", + -12.236648559570312 + ], + [ + "▁Queens", + -12.23668098449707 + ], + [ + "6)", + -12.237114906311035 + ], + [ + "BB", + -12.237153053283691 + ], + [ + "▁cubic", + -12.237180709838867 + ], + [ + "▁transmit", + -12.237199783325195 + ], + [ + "-24", + -12.237370491027832 + ], + [ + "outs", + -12.237507820129396 + ], + [ + "▁Belle", + -12.237922668457031 + ], + [ + "Little", + -12.237945556640623 + ], + [ + "▁о", + -12.23796272277832 + ], + [ + "▁Candidates", + -12.23803997039795 + ], + [ + "▁Registered", + -12.238081932067873 + ], + [ + "VA", + -12.238187789916992 + ], + [ + "▁brokerage", + -12.238337516784668 + ], + [ + "▁Jewelry", + -12.23859977722168 + ], + [ + "360", + -12.238627433776855 + ], + [ + "▁Munich", + -12.238669395446776 + ], + [ + "din", + -12.238872528076172 + ], + [ + "Heat", + -12.23940086364746 + ], + [ + "▁UT", + -12.23952865600586 + ], + [ + "▁photographed", + -12.239544868469238 + ], + [ + "▁unpredictable", + -12.239601135253906 + ], + [ + "▁PlayStation", + -12.23961067199707 + ], + [ + "14.", + -12.239886283874512 + ], + [ + "▁Tigers", + -12.239978790283203 + ], + [ + "▁Casa", + -12.240026473999023 + ], + [ + "▁Fiber", + -12.240065574645996 + ], + [ + "▁crore", + -12.240110397338867 + ], + [ + "Trump", + -12.2401704788208 + ], + [ + "▁WP", + -12.240283966064451 + ], + [ + "▁nutritious", + -12.240309715270996 + ], + [ + "pc", + -12.240455627441406 + ], + [ + "▁Cohen", + -12.24046802520752 + ], + [ + "▁(9", + -12.240490913391112 + ], + [ + "▁Copy", + -12.24049186706543 + ], + [ + "▁Male", + -12.240565299987791 + ], + [ + "▁Talent", + -12.240629196166992 + ], + [ + "▁#3", + -12.240697860717772 + ], + [ + "▁1.8", + -12.240782737731934 + ], + [ + "▁Suppliers", + -12.240857124328612 + ], + [ + "▁volatility", + -12.241018295288086 + ], + [ + "▁Contractor", + -12.241026878356934 + ], + [ + "▁rip", + -12.24103832244873 + ], + [ + "▁warn", + -12.241044998168944 + ], + [ + "▁serviced", + -12.24109172821045 + ], + [ + "▁jokes", + -12.241106033325195 + ], + [ + "▁Sanders", + -12.241113662719728 + ], + [ + "▁Holidays", + -12.241128921508787 + ], + [ + "▁invalid", + -12.241299629211426 + ], + [ + "▁Message", + -12.24134922027588 + ], + [ + "▁leaking", + -12.241353034973145 + ], + [ + "▁Cyprus", + -12.241375923156738 + ], + [ + "▁Developer", + -12.241390228271484 + ], + [ + "English", + -12.241578102111816 + ], + [ + "HC", + -12.241579055786133 + ], + [ + "▁indulge", + -12.241622924804688 + ], + [ + "▁chickens", + -12.24162769317627 + ], + [ + "▁knitting", + -12.2417631149292 + ], + [ + "▁infectious", + -12.241836547851562 + ], + [ + "▁2.4", + -12.24185848236084 + ], + [ + "▁doi", + -12.241944313049316 + ], + [ + "▁Gi", + -12.242061614990234 + ], + [ + "▁FX", + -12.24246597290039 + ], + [ + "▁faulty", + -12.242576599121094 + ], + [ + "▁thicker", + -12.242576599121094 + ], + [ + "▁scrub", + -12.24258804321289 + ], + [ + "▁destructive", + -12.242618560791016 + ], + [ + "▁instructional", + -12.242635726928713 + ], + [ + "▁Mechanical", + -12.242748260498049 + ], + [ + "▁intentionally", + -12.242751121520996 + ], + [ + "▁explicit", + -12.24278163909912 + ], + [ + "▁$300", + -12.242786407470703 + ], + [ + "Ar", + -12.242902755737305 + ], + [ + "Women", + -12.243024826049805 + ], + [ + "▁weeds", + -12.243167877197266 + ], + [ + "▁intends", + -12.24332332611084 + ], + [ + "▁waterfall", + -12.24342918395996 + ], + [ + "wind", + -12.243578910827637 + ], + [ + "▁Kill", + -12.243730545043944 + ], + [ + "▁pouring", + -12.243926048278809 + ], + [ + "▁BT", + -12.244175910949709 + ], + [ + "▁Consultant", + -12.244185447692873 + ], + [ + "▁Shield", + -12.244240760803224 + ], + [ + "▁Stuart", + -12.244254112243652 + ], + [ + "▁Uganda", + -12.244288444519045 + ], + [ + "bach", + -12.244304656982422 + ], + [ + "▁Feed", + -12.24434757232666 + ], + [ + "nce", + -12.244351387023926 + ], + [ + "▁Donna", + -12.24436855316162 + ], + [ + "▁Dial", + -12.244413375854492 + ], + [ + "▁cutter", + -12.24454116821289 + ], + [ + "▁descent", + -12.24460792541504 + ], + [ + "▁sweetness", + -12.244613647460938 + ], + [ + "▁rainfall", + -12.24461841583252 + ], + [ + "▁projection", + -12.244661331176758 + ], + [ + "▁minimalist", + -12.244664192199709 + ], + [ + "▁Sector", + -12.244678497314451 + ], + [ + "▁Allah", + -12.24471950531006 + ], + [ + "▁Ah", + -12.244766235351562 + ], + [ + "▁stacked", + -12.244866371154783 + ], + [ + "▁5.5", + -12.244930267333984 + ], + [ + "▁Volunteer", + -12.24510097503662 + ], + [ + "yr", + -12.245208740234377 + ], + [ + "▁sunrise", + -12.245347023010254 + ], + [ + "notch", + -12.245561599731444 + ], + [ + "▁awaiting", + -12.245633125305176 + ], + [ + "▁Packers", + -12.24576473236084 + ], + [ + "press", + -12.246238708496094 + ], + [ + "▁editions", + -12.24635410308838 + ], + [ + "▁triangle", + -12.24656867980957 + ], + [ + "▁Lines", + -12.246574401855469 + ], + [ + "▁speculation", + -12.246575355529783 + ], + [ + "▁Brass", + -12.24659538269043 + ], + [ + "▁Spray", + -12.246634483337402 + ], + [ + "health", + -12.24675464630127 + ], + [ + "▁Person", + -12.246776580810549 + ], + [ + "▁needles", + -12.246871948242188 + ], + [ + "▁Rainbow", + -12.246882438659668 + ], + [ + "sel", + -12.246919631958008 + ], + [ + "▁Thunder", + -12.246931076049805 + ], + [ + "AB", + -12.247007369995115 + ], + [ + "▁UPS", + -12.247024536132812 + ], + [ + "▁Cottage", + -12.247142791748049 + ], + [ + "▁detached", + -12.247269630432127 + ], + [ + "▁beforehand", + -12.247315406799316 + ], + [ + "▁continuity", + -12.24731731414795 + ], + [ + "Sunday", + -12.247486114501951 + ], + [ + "▁validate", + -12.247519493103027 + ], + [ + "▁fluffy", + -12.247523307800291 + ], + [ + "▁Selection", + -12.247535705566406 + ], + [ + "▁BI", + -12.247708320617676 + ], + [ + "rod", + -12.247710227966309 + ], + [ + "▁Jamaica", + -12.247835159301758 + ], + [ + "▁infants", + -12.247910499572754 + ], + [ + "▁coping", + -12.248017311096191 + ], + [ + "system", + -12.248087882995604 + ], + [ + "RF", + -12.248098373413086 + ], + [ + "▁buck", + -12.248506546020508 + ], + [ + "▁youngsters", + -12.248652458190918 + ], + [ + "▁wanna", + -12.248662948608398 + ], + [ + "▁Lucky", + -12.248839378356934 + ], + [ + "▁Ze", + -12.248957633972168 + ], + [ + "▁dictionary", + -12.248961448669434 + ], + [ + "▁replicate", + -12.249082565307615 + ], + [ + "▁crab", + -12.24948787689209 + ], + [ + "▁Tro", + -12.249515533447266 + ], + [ + "▁foliage", + -12.24956226348877 + ], + [ + "▁interfere", + -12.249573707580566 + ], + [ + "▁nitrogen", + -12.249741554260254 + ], + [ + "▁Fixed", + -12.249992370605469 + ], + [ + "▁kernel", + -12.250187873840332 + ], + [ + "▁gasoline", + -12.25029182434082 + ], + [ + "▁kindergarten", + -12.25032901763916 + ], + [ + "▁troubled", + -12.25034236907959 + ], + [ + "▁statewide", + -12.250364303588867 + ], + [ + "▁Remodel", + -12.250422477722168 + ], + [ + "▁AMD", + -12.250515937805176 + ], + [ + "▁Brush", + -12.250901222229004 + ], + [ + "2016", + -12.25092601776123 + ], + [ + "▁grape", + -12.250943183898926 + ], + [ + "mini", + -12.251096725463867 + ], + [ + "GP", + -12.251120567321776 + ], + [ + "▁Managers", + -12.251466751098633 + ], + [ + "▁popping", + -12.25148105621338 + ], + [ + "▁8:00", + -12.25148868560791 + ], + [ + "▁manipulation", + -12.25148868560791 + ], + [ + "-01", + -12.251612663269045 + ], + [ + "▁compares", + -12.2516508102417 + ], + [ + "▁quirky", + -12.251747131347656 + ], + [ + "▁sincere", + -12.25200080871582 + ], + [ + "▁cloudy", + -12.252053260803224 + ], + [ + "▁Leaf", + -12.252074241638184 + ], + [ + "▁shutter", + -12.252087593078612 + ], + [ + "▁Indoor", + -12.252137184143066 + ], + [ + "▁advising", + -12.25242805480957 + ], + [ + "▁strawberry", + -12.252432823181152 + ], + [ + "▁Bang", + -12.25246524810791 + ], + [ + "▁impose", + -12.252486228942873 + ], + [ + "▁ash", + -12.252513885498049 + ], + [ + "▁Malta", + -12.252653121948242 + ], + [ + "▁sorting", + -12.252863883972168 + ], + [ + "▁Cart", + -12.252873420715332 + ], + [ + "▁Hindu", + -12.252918243408203 + ], + [ + "▁Founder", + -12.2529296875 + ], + [ + "▁cemetery", + -12.253144264221191 + ], + [ + "controlled", + -12.25323486328125 + ], + [ + "▁turnaround", + -12.253335952758787 + ], + [ + "▁Pipe", + -12.2534761428833 + ], + [ + "▁staffing", + -12.253496170043944 + ], + [ + "▁Regard", + -12.2535400390625 + ], + [ + "▁Labs", + -12.253690719604492 + ], + [ + "▁£5", + -12.253913879394531 + ], + [ + "walk", + -12.253918647766112 + ], + [ + "▁icing", + -12.254350662231444 + ], + [ + "▁shoots", + -12.254565238952637 + ], + [ + "▁shells", + -12.254745483398438 + ], + [ + "▁Dev", + -12.254841804504396 + ], + [ + "▁axis", + -12.255044937133787 + ], + [ + "▁Hardware", + -12.25509262084961 + ], + [ + "ious", + -12.255115509033203 + ], + [ + "▁inconvenience", + -12.255258560180664 + ], + [ + "▁stools", + -12.25526523590088 + ], + [ + "▁Sharon", + -12.2553129196167 + ], + [ + "cu", + -12.255362510681152 + ], + [ + "▁Gilbert", + -12.25539779663086 + ], + [ + "▁racism", + -12.255444526672363 + ], + [ + "▁Contract", + -12.255477905273438 + ], + [ + "▁carbs", + -12.255496978759766 + ], + [ + "▁clearer", + -12.255667686462402 + ], + [ + "dr", + -12.255739212036133 + ], + [ + "ug", + -12.255742073059082 + ], + [ + "▁overlook", + -12.255935668945312 + ], + [ + "▁HTC", + -12.25609016418457 + ], + [ + "▁wander", + -12.256244659423828 + ], + [ + "▁singled", + -12.25629711151123 + ], + [ + "aged", + -12.25650119781494 + ], + [ + "▁rocky", + -12.256505966186523 + ], + [ + "▁cosmetics", + -12.256550788879396 + ], + [ + "▁pineapple", + -12.256744384765623 + ], + [ + "6,", + -12.256980895996094 + ], + [ + "▁Apartments", + -12.257067680358888 + ], + [ + "▁believers", + -12.257176399230955 + ], + [ + "park", + -12.257205963134766 + ], + [ + "▁Papers", + -12.257339477539062 + ], + [ + "sale", + -12.257380485534668 + ], + [ + "▁obstacle", + -12.257431983947754 + ], + [ + "zu", + -12.257494926452637 + ], + [ + "Music", + -12.257684707641602 + ], + [ + "▁Pub", + -12.257702827453612 + ], + [ + "0-", + -12.258011817932127 + ], + [ + "▁Chester", + -12.25812530517578 + ], + [ + "▁harbor", + -12.25821018218994 + ], + [ + "▁bakery", + -12.258360862731934 + ], + [ + "BO", + -12.25840950012207 + ], + [ + "tha", + -12.258524894714355 + ], + [ + "▁smoother", + -12.258934020996094 + ], + [ + "▁Arm", + -12.259018898010254 + ], + [ + "▁rails", + -12.259033203125 + ], + [ + "▁cease", + -12.25906753540039 + ], + [ + "HA", + -12.259077072143556 + ], + [ + "▁Scale", + -12.259098052978516 + ], + [ + "▁myth", + -12.259170532226562 + ], + [ + "Family", + -12.259180068969728 + ], + [ + "▁cliff", + -12.259395599365234 + ], + [ + "CR", + -12.259876251220703 + ], + [ + "▁Modi", + -12.25997257232666 + ], + [ + "▁Politics", + -12.260019302368164 + ], + [ + "▁Words", + -12.260183334350586 + ], + [ + "▁devotion", + -12.260271072387695 + ], + [ + "without", + -12.26030731201172 + ], + [ + "▁bizarre", + -12.260345458984377 + ], + [ + "▁perceive", + -12.260400772094728 + ], + [ + "▁rotate", + -12.260427474975586 + ], + [ + "▁racks", + -12.260469436645508 + ], + [ + "▁loses", + -12.260790824890137 + ], + [ + "▁Wonderful", + -12.2609224319458 + ], + [ + "▁Genesis", + -12.26101016998291 + ], + [ + "▁??", + -12.261415481567385 + ], + [ + "▁06", + -12.2614164352417 + ], + [ + "▁Fran", + -12.261479377746582 + ], + [ + "sley", + -12.261494636535645 + ], + [ + "▁charcoal", + -12.26179313659668 + ], + [ + "▁grinder", + -12.261968612670898 + ], + [ + "▁FIFA", + -12.261988639831545 + ], + [ + "▁Rear", + -12.262042045593262 + ], + [ + "▁Norfolk", + -12.262287139892578 + ], + [ + "▁Worldwide", + -12.262314796447754 + ], + [ + "▁silhouette", + -12.262556076049805 + ], + [ + "▁peas", + -12.262635231018066 + ], + [ + "▁Hyper", + -12.262715339660645 + ], + [ + "▁Provide", + -12.262774467468262 + ], + [ + "▁visualization", + -12.262774467468262 + ], + [ + "▁efficacy", + -12.26278591156006 + ], + [ + "▁Forward", + -12.26279354095459 + ], + [ + "▁Dame", + -12.262812614440918 + ], + [ + "▁Giving", + -12.262917518615724 + ], + [ + "▁Ellis", + -12.26297664642334 + ], + [ + "▁hey", + -12.263011932373049 + ], + [ + "▁pumping", + -12.26309585571289 + ], + [ + "▁finale", + -12.26313018798828 + ], + [ + "▁Erin", + -12.26318073272705 + ], + [ + "▁NATO", + -12.263227462768556 + ], + [ + "▁Boulevard", + -12.2632417678833 + ], + [ + "▁atmospheric", + -12.2632417678833 + ], + [ + "▁Crushing", + -12.263333320617676 + ], + [ + "▁proceeding", + -12.263335227966309 + ], + [ + "▁Roth", + -12.263574600219728 + ], + [ + "▁watering", + -12.26360034942627 + ], + [ + "▁TB", + -12.26379108428955 + ], + [ + "▁Ly", + -12.263998985290527 + ], + [ + "63", + -12.26406478881836 + ], + [ + "▁glowing", + -12.26454734802246 + ], + [ + "▁hollow", + -12.26455307006836 + ], + [ + "▁Quarter", + -12.264762878417969 + ], + [ + "▁Rubber", + -12.264808654785156 + ], + [ + "(2)", + -12.264811515808104 + ], + [ + "▁booklet", + -12.264869689941406 + ], + [ + "72", + -12.265034675598145 + ], + [ + "Global", + -12.265158653259276 + ], + [ + "Tell", + -12.265182495117188 + ], + [ + "▁flawless", + -12.265182495117188 + ], + [ + "DP", + -12.265195846557615 + ], + [ + "▁youthful", + -12.265283584594728 + ], + [ + "▁Globe", + -12.265336990356444 + ], + [ + "▁KB", + -12.265643119812012 + ], + [ + "▁empowered", + -12.266138076782228 + ], + [ + "▁trader", + -12.266340255737305 + ], + [ + "sis", + -12.266493797302246 + ], + [ + "▁tart", + -12.266775131225586 + ], + [ + "▁slowing", + -12.26681900024414 + ], + [ + "▁fabrication", + -12.266834259033203 + ], + [ + "▁Edit", + -12.266845703125 + ], + [ + "▁observing", + -12.266874313354492 + ], + [ + "▁Stephanie", + -12.26689910888672 + ], + [ + "▁fascinated", + -12.26693344116211 + ], + [ + "▁2002,", + -12.26707363128662 + ], + [ + "▁confronted", + -12.267242431640623 + ], + [ + "▁cabbage", + -12.267600059509276 + ], + [ + "▁immersive", + -12.267600059509276 + ], + [ + "▁Buddha", + -12.267629623413086 + ], + [ + "▁fundraiser", + -12.267748832702637 + ], + [ + "▁Infrastructure", + -12.267783164978027 + ], + [ + "▁tokens", + -12.267800331115724 + ], + [ + "▁partition", + -12.267922401428224 + ], + [ + "▁recycle", + -12.26811981201172 + ], + [ + "▁AWS", + -12.26819133758545 + ], + [ + "eng", + -12.268318176269531 + ], + [ + "▁arguing", + -12.268328666687012 + ], + [ + "▁analyse", + -12.268383979797363 + ], + [ + "▁softer", + -12.268414497375488 + ], + [ + "▁fertility", + -12.268468856811523 + ], + [ + "▁cabins", + -12.268528938293455 + ], + [ + "▁testimonials", + -12.268528938293455 + ], + [ + "▁reimbursement", + -12.268654823303224 + ], + [ + "▁Arsenal", + -12.268792152404783 + ], + [ + "▁appraisal", + -12.26883602142334 + ], + [ + "ili", + -12.268869400024414 + ], + [ + "▁dreaming", + -12.268908500671388 + ], + [ + "▁Cox", + -12.269009590148926 + ], + [ + "▁credible", + -12.26905918121338 + ], + [ + "▁scans", + -12.269161224365234 + ], + [ + "Di", + -12.269164085388184 + ], + [ + "▁embracing", + -12.269244194030762 + ], + [ + "▁publicity", + -12.269280433654783 + ], + [ + "NC", + -12.269579887390137 + ], + [ + "▁AV", + -12.269747734069824 + ], + [ + "▁fundamentals", + -12.269784927368164 + ], + [ + "▁manifest", + -12.269864082336426 + ], + [ + "▁traveler", + -12.27001953125 + ], + [ + "ello", + -12.270025253295898 + ], + [ + "▁intentional", + -12.270042419433594 + ], + [ + "▁sur", + -12.27044677734375 + ], + [ + "▁reminiscent", + -12.270520210266112 + ], + [ + "▁Hon", + -12.270626068115234 + ], + [ + "colored", + -12.270627975463867 + ], + [ + "▁gown", + -12.270642280578612 + ], + [ + "▁Louisville", + -12.270719528198242 + ], + [ + "73", + -12.2708101272583 + ], + [ + "▁bricks", + -12.271259307861328 + ], + [ + "nal", + -12.271361351013184 + ], + [ + "▁sealing", + -12.271750450134276 + ], + [ + "▁fines", + -12.271832466125488 + ], + [ + "▁ambassador", + -12.271879196166992 + ], + [ + "2.5", + -12.271909713745115 + ], + [ + "lines", + -12.271937370300291 + ], + [ + "▁mit", + -12.272014617919922 + ], + [ + "▁Aside", + -12.27207374572754 + ], + [ + "▁bypass", + -12.27244472503662 + ], + [ + "WD", + -12.272594451904297 + ], + [ + "▁Seed", + -12.272603034973145 + ], + [ + "▁Offering", + -12.272621154785156 + ], + [ + "▁saturated", + -12.273012161254885 + ], + [ + "▁Connection", + -12.273123741149902 + ], + [ + "▁AAA", + -12.27333927154541 + ], + [ + "dom", + -12.27337646484375 + ], + [ + "▁respectful", + -12.273609161376951 + ], + [ + "▁Indigenous", + -12.273698806762695 + ], + [ + "▁flyer", + -12.27382755279541 + ], + [ + "▁deserved", + -12.273950576782228 + ], + [ + "▁5:", + -12.27405834197998 + ], + [ + "▁insane", + -12.274185180664062 + ], + [ + "▁doses", + -12.27423858642578 + ], + [ + "may", + -12.27425765991211 + ], + [ + "ological", + -12.274271965026855 + ], + [ + "/4", + -12.27464485168457 + ], + [ + "▁Secondary", + -12.274754524230955 + ], + [ + "▁Effects", + -12.27480125427246 + ], + [ + "▁volleyball", + -12.274909973144531 + ], + [ + "▁Inspection", + -12.275057792663574 + ], + [ + "▁Deer", + -12.27517795562744 + ], + [ + "Connect", + -12.275257110595703 + ], + [ + "▁donating", + -12.275382041931152 + ], + [ + "▁Tor", + -12.27542495727539 + ], + [ + "▁ornaments", + -12.275430679321287 + ], + [ + "▁ERP", + -12.275435447692873 + ], + [ + "consuming", + -12.275590896606444 + ], + [ + "act", + -12.27560329437256 + ], + [ + "▁WC", + -12.275630950927734 + ], + [ + "▁Neither", + -12.275646209716797 + ], + [ + "cl", + -12.275694847106934 + ], + [ + "▁railroad", + -12.275866508483888 + ], + [ + "▁staring", + -12.275993347167969 + ], + [ + "gram", + -12.276137351989746 + ], + [ + "▁abandon", + -12.276180267333984 + ], + [ + "▁greet", + -12.276180267333984 + ], + [ + "iel", + -12.276239395141602 + ], + [ + "asi", + -12.276247024536133 + ], + [ + "▁Westminster", + -12.276399612426758 + ], + [ + "Throughout", + -12.27646541595459 + ], + [ + "▁economically", + -12.2764892578125 + ], + [ + "▁Nat", + -12.276602745056152 + ], + [ + "▁tucked", + -12.27664852142334 + ], + [ + "PR", + -12.276716232299805 + ], + [ + "oil", + -12.276724815368652 + ], + [ + "▁beard", + -12.276766777038574 + ], + [ + "▁amendments", + -12.276925086975098 + ], + [ + "▁Stick", + -12.276930809020996 + ], + [ + "▁Bella", + -12.276969909667969 + ], + [ + "▁inspires", + -12.277000427246094 + ], + [ + "▁magnesium", + -12.277006149291992 + ], + [ + "▁rats", + -12.277012825012209 + ], + [ + "▁Belgian", + -12.27712059020996 + ], + [ + "August", + -12.277129173278809 + ], + [ + "▁finals", + -12.277416229248049 + ], + [ + "local", + -12.27742862701416 + ], + [ + "▁loops", + -12.27743911743164 + ], + [ + "▁bore", + -12.277461051940918 + ], + [ + "▁Tables", + -12.277558326721191 + ], + [ + "▁cheat", + -12.277745246887209 + ], + [ + "▁productions", + -12.277830123901367 + ], + [ + "▁Café", + -12.277849197387695 + ], + [ + "▁humour", + -12.277854919433594 + ], + [ + "▁shout", + -12.27787971496582 + ], + [ + "▁naming", + -12.277948379516602 + ], + [ + "▁burial", + -12.278080940246582 + ], + [ + "▁albeit", + -12.27823257446289 + ], + [ + "▁sickness", + -12.2783203125 + ], + [ + "▁overlap", + -12.278353691101074 + ], + [ + "Project", + -12.2785005569458 + ], + [ + "▁surplus", + -12.278590202331545 + ], + [ + "▁obsession", + -12.278606414794922 + ], + [ + "▁complaining", + -12.278651237487791 + ], + [ + "TS", + -12.278759956359863 + ], + [ + "▁accomplishment", + -12.27891731262207 + ], + [ + "▁satin", + -12.279020309448242 + ], + [ + "▁mixes", + -12.279026985168455 + ], + [ + "▁buds", + -12.27919101715088 + ], + [ + "news", + -12.27926254272461 + ], + [ + "▁prey", + -12.27936553955078 + ], + [ + "▁Rhode", + -12.279394149780272 + ], + [ + "Cloud", + -12.27940273284912 + ], + [ + "Digital", + -12.280120849609377 + ], + [ + "▁gadget", + -12.280224800109863 + ], + [ + "▁Than", + -12.280268669128418 + ], + [ + "▁privileged", + -12.280299186706545 + ], + [ + "▁expired", + -12.28057098388672 + ], + [ + "▁composing", + -12.280795097351074 + ], + [ + "▁Regulations", + -12.280816078186035 + ], + [ + "▁widget", + -12.280817031860352 + ], + [ + "▁Bears", + -12.28111457824707 + ], + [ + "Much", + -12.281227111816406 + ], + [ + "▁diary", + -12.281571388244627 + ], + [ + "▁Period", + -12.281610488891602 + ], + [ + "▁spill", + -12.281673431396484 + ], + [ + "▁staging", + -12.281673431396484 + ], + [ + "▁lasts", + -12.281877517700195 + ], + [ + "▁scandal", + -12.282124519348145 + ], + [ + "▁standardized", + -12.282265663146973 + ], + [ + "▁Lawyers", + -12.282307624816896 + ], + [ + "▁Pharmacy", + -12.28232765197754 + ], + [ + "▁seekers", + -12.28236198425293 + ], + [ + "▁123", + -12.282403945922852 + ], + [ + "server", + -12.282434463500977 + ], + [ + "▁modelling", + -12.28243923187256 + ], + [ + "▁sh", + -12.282583236694336 + ], + [ + "▁connectors", + -12.28284740447998 + ], + [ + "▁teammates", + -12.28297233581543 + ], + [ + "▁odor", + -12.283072471618652 + ], + [ + "▁Buying", + -12.283080101013184 + ], + [ + "▁politically", + -12.28317928314209 + ], + [ + "east", + -12.283354759216309 + ], + [ + "▁00", + -12.283379554748535 + ], + [ + "▁workmanship", + -12.283403396606444 + ], + [ + "▁mentions", + -12.283440589904783 + ], + [ + "▁Families", + -12.283528327941896 + ], + [ + "▁firewall", + -12.283581733703612 + ], + [ + "▁realities", + -12.283613204956056 + ], + [ + "62", + -12.283641815185549 + ], + [ + "▁Granite", + -12.283785820007324 + ], + [ + "▁Inspired", + -12.283862113952637 + ], + [ + "▁Vanity", + -12.283927917480469 + ], + [ + "▁los", + -12.28404140472412 + ], + [ + "▁swept", + -12.28404426574707 + ], + [ + "▁mediation", + -12.28404712677002 + ], + [ + "▁props", + -12.28420352935791 + ], + [ + "▁Croatia", + -12.284319877624512 + ], + [ + "▁introductory", + -12.2844877243042 + ], + [ + "▁endorsed", + -12.284503936767578 + ], + [ + "▁utterly", + -12.284649848937988 + ], + [ + "▁validity", + -12.284701347351074 + ], + [ + "ready", + -12.284830093383787 + ], + [ + "▁ATM", + -12.284870147705078 + ], + [ + "▁Printer", + -12.284998893737791 + ], + [ + "▁attachments", + -12.285042762756348 + ], + [ + "PD", + -12.285086631774902 + ], + [ + "▁Emirates", + -12.285344123840332 + ], + [ + "▁manipulate", + -12.28536891937256 + ], + [ + "▁Architects", + -12.285530090332031 + ], + [ + "▁Ace", + -12.285707473754885 + ], + [ + "▁hairstyles", + -12.28573226928711 + ], + [ + "▁Civic", + -12.285772323608398 + ], + [ + "▁Admin", + -12.285799980163574 + ], + [ + "▁journeys", + -12.28580379486084 + ], + [ + "public", + -12.285848617553713 + ], + [ + "▁disposable", + -12.285970687866213 + ], + [ + "▁cracking", + -12.286051750183104 + ], + [ + "▁singers", + -12.286065101623535 + ], + [ + "purpose", + -12.286144256591797 + ], + [ + "stand", + -12.28632354736328 + ], + [ + "58", + -12.286396980285645 + ], + [ + "▁rods", + -12.286514282226562 + ], + [ + "▁Equity", + -12.286580085754396 + ], + [ + "shire", + -12.28665542602539 + ], + [ + "▁toss", + -12.286665916442873 + ], + [ + "▁cr", + -12.286928176879885 + ], + [ + "Robert", + -12.2869291305542 + ], + [ + "▁filtering", + -12.287113189697266 + ], + [ + "▁comprise", + -12.2871732711792 + ], + [ + "▁braces", + -12.28721523284912 + ], + [ + "▁(4)", + -12.287249565124512 + ], + [ + "▁occupational", + -12.28727912902832 + ], + [ + "pal", + -12.28735637664795 + ], + [ + "▁Chad", + -12.28748893737793 + ], + [ + "▁Defence", + -12.287506103515623 + ], + [ + "hal", + -12.287579536437988 + ], + [ + "▁establishments", + -12.287699699401855 + ], + [ + "▁alpha", + -12.287755966186523 + ], + [ + "▁Nine", + -12.287970542907717 + ], + [ + "▁Dynamic", + -12.28798007965088 + ], + [ + "▁Adding", + -12.288066864013672 + ], + [ + "dal", + -12.28809928894043 + ], + [ + "▁lawsuits", + -12.288171768188477 + ], + [ + "Meet", + -12.28818130493164 + ], + [ + "iti", + -12.288331031799316 + ], + [ + "▁VERY", + -12.288379669189451 + ], + [ + "▁Hybrid", + -12.288455963134766 + ], + [ + "Microsoft", + -12.28846549987793 + ], + [ + "▁newsletters", + -12.288514137268066 + ], + [ + "▁cough", + -12.288578033447266 + ], + [ + "▁hemp", + -12.288652420043944 + ], + [ + "▁Ser", + -12.288735389709473 + ], + [ + "yu", + -12.288783073425291 + ], + [ + "▁Interactive", + -12.288819313049316 + ], + [ + "▁Saw", + -12.288922309875488 + ], + [ + "▁Echo", + -12.288997650146484 + ], + [ + "▁spells", + -12.28906536102295 + ], + [ + "thanks", + -12.28911590576172 + ], + [ + "▁tempting", + -12.289151191711426 + ], + [ + "▁teaspoon", + -12.28932285308838 + ], + [ + "▁Mum", + -12.289542198181152 + ], + [ + "▁sore", + -12.289946556091309 + ], + [ + "▁Fear", + -12.2901029586792 + ], + [ + "▁Tags", + -12.290203094482422 + ], + [ + "Send", + -12.290484428405762 + ], + [ + "▁Panama", + -12.290559768676758 + ], + [ + "▁enduring", + -12.290647506713867 + ], + [ + "▁Kerry", + -12.290727615356444 + ], + [ + "▁Youtube", + -12.290779113769531 + ], + [ + "▁terrorists", + -12.290786743164062 + ], + [ + "▁reuse", + -12.291014671325684 + ], + [ + "▁goat", + -12.291236877441406 + ], + [ + "cycle", + -12.291314125061035 + ], + [ + "inflammatory", + -12.291507720947266 + ], + [ + "▁enamel", + -12.291522979736328 + ], + [ + "▁specialised", + -12.291617393493652 + ], + [ + "▁biblical", + -12.291645050048828 + ], + [ + "▁consequently", + -12.29196548461914 + ], + [ + "WE", + -12.29197120666504 + ], + [ + "▁Ve", + -12.29197120666504 + ], + [ + "▁NT", + -12.29201316833496 + ], + [ + "▁automate", + -12.292128562927246 + ], + [ + "▁constitutes", + -12.292166709899902 + ], + [ + "(1)", + -12.29234504699707 + ], + [ + "▁Dal", + -12.292388916015623 + ], + [ + "▁telecommunicati", + -12.292487144470217 + ], + [ + "▁strains", + -12.292573928833008 + ], + [ + "▁Op", + -12.29257583618164 + ], + [ + "look", + -12.292598724365234 + ], + [ + "▁cube", + -12.292670249938965 + ], + [ + "Starting", + -12.292695999145508 + ], + [ + "▁conspiracy", + -12.292840003967283 + ], + [ + "▁cubes", + -12.2929048538208 + ], + [ + "talk", + -12.29293155670166 + ], + [ + "▁reunion", + -12.293128967285156 + ], + [ + "ituated", + -12.293290138244627 + ], + [ + "▁flashing", + -12.293353080749512 + ], + [ + "▁Bennett", + -12.293749809265137 + ], + [ + "▁lacks", + -12.293987274169922 + ], + [ + "▁inspected", + -12.293996810913086 + ], + [ + "▁Aluminum", + -12.29400634765625 + ], + [ + "▁academy", + -12.294133186340332 + ], + [ + "▁FAQ", + -12.294139862060549 + ], + [ + "▁Comfort", + -12.29420280456543 + ], + [ + "▁cosy", + -12.29431438446045 + ], + [ + "Mc", + -12.29433536529541 + ], + [ + "▁Vietnamese", + -12.294556617736816 + ], + [ + "▁concise", + -12.294672012329102 + ], + [ + "▁clusters", + -12.294687271118164 + ], + [ + "packed", + -12.294705390930176 + ], + [ + "▁brewing", + -12.294715881347656 + ], + [ + "▁insure", + -12.29497241973877 + ], + [ + "Cal", + -12.29516887664795 + ], + [ + "▁Sullivan", + -12.295208930969238 + ], + [ + "standard", + -12.29531192779541 + ], + [ + "▁Rica", + -12.295333862304688 + ], + [ + "▁decreases", + -12.295340538024902 + ], + [ + "▁Hyundai", + -12.29537582397461 + ], + [ + "▁experimenting", + -12.295400619506836 + ], + [ + "▁Neuro", + -12.295404434204102 + ], + [ + "▁Colour", + -12.295445442199709 + ], + [ + "▁scalable", + -12.29565715789795 + ], + [ + "▁Chem", + -12.295698165893556 + ], + [ + "▁crashed", + -12.295796394348145 + ], + [ + "▁Liz", + -12.295930862426758 + ], + [ + "smart", + -12.295936584472656 + ], + [ + "▁reign", + -12.296045303344728 + ], + [ + "▁Thinking", + -12.296072959899902 + ], + [ + "▁Chip", + -12.296088218688965 + ], + [ + "▁ounces", + -12.29629611968994 + ], + [ + "▁judgement", + -12.296308517456056 + ], + [ + "▁enhancements", + -12.296360969543455 + ], + [ + "Ta", + -12.296385765075684 + ], + [ + "▁vacancies", + -12.296405792236328 + ], + [ + "▁harbour", + -12.296406745910645 + ], + [ + "▁Clients", + -12.296488761901855 + ], + [ + "▁Turbo", + -12.296590805053713 + ], + [ + "▁Earlier", + -12.296610832214355 + ], + [ + "▁Bug", + -12.296629905700684 + ], + [ + "▁boast", + -12.29670238494873 + ], + [ + "▁snapshot", + -12.296866416931152 + ], + [ + "▁Kerala", + -12.29690170288086 + ], + [ + "▁!!", + -12.296974182128906 + ], + [ + "▁Durham", + -12.296979904174805 + ], + [ + "▁communal", + -12.2969970703125 + ], + [ + "▁Ft", + -12.297029495239258 + ], + [ + "▁Omega", + -12.297096252441406 + ], + [ + "▁po", + -12.297155380249023 + ], + [ + "▁Bu", + -12.29726791381836 + ], + [ + "▁Mae", + -12.297364234924316 + ], + [ + "third", + -12.297560691833496 + ], + [ + "mph", + -12.297684669494627 + ], + [ + "▁counters", + -12.297709465026855 + ], + [ + "▁Rover", + -12.29780387878418 + ], + [ + "▁committing", + -12.297987937927246 + ], + [ + "▁mindfulness", + -12.29803466796875 + ], + [ + "▁medicinal", + -12.298099517822266 + ], + [ + "▁suspects", + -12.298140525817873 + ], + [ + "▁boxing", + -12.298274040222168 + ], + [ + "92", + -12.298293113708496 + ], + [ + "▁sew", + -12.298327445983888 + ], + [ + "▁->", + -12.298378944396973 + ], + [ + "Furthermore", + -12.298433303833008 + ], + [ + "▁Terminal", + -12.298508644104004 + ], + [ + "Off", + -12.298568725585938 + ], + [ + "▁Abbey", + -12.298661231994627 + ], + [ + "Perfect", + -12.298697471618652 + ], + [ + "▁fills", + -12.298698425292969 + ], + [ + "▁calming", + -12.2987060546875 + ], + [ + "hat", + -12.298941612243652 + ], + [ + "▁heck", + -12.29896640777588 + ], + [ + "▁reel", + -12.298995018005373 + ], + [ + "▁Christine", + -12.299066543579102 + ], + [ + "▁Sharp", + -12.299217224121094 + ], + [ + "▁EA", + -12.29922103881836 + ], + [ + "▁usable", + -12.299277305603027 + ], + [ + "Large", + -12.299281120300291 + ], + [ + "▁Noah", + -12.299344062805176 + ], + [ + "59", + -12.299463272094728 + ], + [ + "▁thereafter", + -12.29952621459961 + ], + [ + "▁Zo", + -12.299554824829102 + ], + [ + "▁Warm", + -12.300141334533691 + ], + [ + "▁cooled", + -12.300206184387209 + ], + [ + "▁Venezuela", + -12.300317764282228 + ], + [ + "trained", + -12.300437927246094 + ], + [ + "▁drones", + -12.300582885742188 + ], + [ + "▁thereof", + -12.30070972442627 + ], + [ + "▁sponge", + -12.300713539123535 + ], + [ + "▁skating", + -12.30087661743164 + ], + [ + "Sure", + -12.301021575927734 + ], + [ + "▁Tube", + -12.301084518432615 + ], + [ + "▁Marco", + -12.30121898651123 + ], + [ + "dog", + -12.301219940185549 + ], + [ + "▁EV", + -12.301480293273926 + ], + [ + "▁wont", + -12.301609992980955 + ], + [ + "▁Drama", + -12.301668167114258 + ], + [ + "▁bidding", + -12.301698684692385 + ], + [ + "▁organisms", + -12.30178451538086 + ], + [ + "worthy", + -12.302093505859377 + ], + [ + "▁30,000", + -12.30216121673584 + ], + [ + "▁hips", + -12.30217170715332 + ], + [ + "▁orbit", + -12.302322387695312 + ], + [ + "▁Porter", + -12.30234432220459 + ], + [ + "▁aspirations", + -12.302701950073242 + ], + [ + "▁resides", + -12.302712440490724 + ], + [ + "▁Arch", + -12.302953720092772 + ], + [ + "▁sc", + -12.30306911468506 + ], + [ + "▁possessions", + -12.303119659423828 + ], + [ + "▁Kat", + -12.303146362304688 + ], + [ + "▁spanning", + -12.303277969360352 + ], + [ + "▁instructed", + -12.303363800048828 + ], + [ + "DM", + -12.30345344543457 + ], + [ + "▁controllers", + -12.30348300933838 + ], + [ + "▁Aurora", + -12.303499221801758 + ], + [ + "▁psychic", + -12.303695678710938 + ], + [ + "▁mattresses", + -12.30374813079834 + ], + [ + "▁Drew", + -12.303788185119627 + ], + [ + "▁virgin", + -12.303797721862791 + ], + [ + "▁Leonard", + -12.303813934326172 + ], + [ + "▁meanwhile", + -12.30400848388672 + ], + [ + "▁ICT", + -12.304152488708496 + ], + [ + "▁Deals", + -12.30434799194336 + ], + [ + "Personal", + -12.304563522338867 + ], + [ + "ute", + -12.304582595825195 + ], + [ + "▁25-", + -12.3046236038208 + ], + [ + "▁est", + -12.304712295532228 + ], + [ + "▁daytime", + -12.304800033569336 + ], + [ + "▁cultivate", + -12.304839134216309 + ], + [ + "▁adaptive", + -12.304913520812988 + ], + [ + "▁Legend", + -12.304960250854492 + ], + [ + "▁Unlimited", + -12.305176734924316 + ], + [ + "Team", + -12.305200576782228 + ], + [ + "▁evaluations", + -12.305241584777832 + ], + [ + "sar", + -12.305254936218262 + ], + [ + "▁turquoise", + -12.305327415466309 + ], + [ + "▁Myanmar", + -12.305328369140623 + ], + [ + "▁pearl", + -12.305498123168944 + ], + [ + "▁confined", + -12.305508613586426 + ], + [ + "pass", + -12.306034088134766 + ], + [ + "▁MIT", + -12.306175231933594 + ], + [ + "gold", + -12.30622673034668 + ], + [ + "▁Ob", + -12.306229591369627 + ], + [ + "▁Videos", + -12.306333541870115 + ], + [ + "▁geometric", + -12.306529998779297 + ], + [ + "▁discoveries", + -12.306554794311523 + ], + [ + "ites", + -12.306689262390137 + ], + [ + "▁neural", + -12.306719779968262 + ], + [ + "▁Bre", + -12.306782722473145 + ], + [ + "Run", + -12.306957244873049 + ], + [ + "▁Alcohol", + -12.307011604309082 + ], + [ + "▁supporter", + -12.307109832763672 + ], + [ + "▁electronically", + -12.307183265686035 + ], + [ + "▁abide", + -12.307195663452148 + ], + [ + "▁bonded", + -12.307266235351562 + ], + [ + "▁Domestic", + -12.307571411132812 + ], + [ + "7)", + -12.307663917541504 + ], + [ + "▁accumulation", + -12.307709693908691 + ], + [ + "▁collaborating", + -12.307709693908691 + ], + [ + "▁exchanged", + -12.307920455932615 + ], + [ + "▁Lawn", + -12.307938575744627 + ], + [ + "▁criminals", + -12.30795669555664 + ], + [ + "▁Protect", + -12.308109283447266 + ], + [ + "▁Length", + -12.308116912841797 + ], + [ + "last", + -12.308231353759766 + ], + [ + "▁Comment", + -12.30831527709961 + ], + [ + "production", + -12.308320999145508 + ], + [ + "▁maritime", + -12.30848503112793 + ], + [ + "IO", + -12.308551788330078 + ], + [ + "▁pans", + -12.308573722839355 + ], + [ + "Was", + -12.308602333068848 + ], + [ + "▁surveyed", + -12.308911323547363 + ], + [ + "▁insurer", + -12.308923721313477 + ], + [ + "▁definitive", + -12.30898380279541 + ], + [ + "▁fool", + -12.30913543701172 + ], + [ + "▁bumper", + -12.309232711791992 + ], + [ + "▁Griffin", + -12.309237480163574 + ], + [ + "▁Angels", + -12.309395790100098 + ], + [ + "lt", + -12.30945873260498 + ], + [ + "Windows", + -12.30950164794922 + ], + [ + "horn", + -12.309514999389648 + ], + [ + "sson", + -12.309677124023438 + ], + [ + "▁naked", + -12.309694290161133 + ], + [ + "▁Cookies", + -12.309825897216797 + ], + [ + "▁Laptop", + -12.310197830200195 + ], + [ + "▁PG", + -12.310298919677734 + ], + [ + "▁capsule", + -12.310314178466797 + ], + [ + "▁cupcakes", + -12.31039047241211 + ], + [ + "▁brushed", + -12.310447692871094 + ], + [ + "▁Guarantee", + -12.310643196105955 + ], + [ + "▁Extract", + -12.31064510345459 + ], + [ + "▁victories", + -12.310751914978027 + ], + [ + "▁Mama", + -12.310813903808594 + ], + [ + "▁Kil", + -12.310845375061035 + ], + [ + "▁(12", + -12.310949325561523 + ], + [ + "▁Barnes", + -12.311050415039062 + ], + [ + "▁Average", + -12.311161994934082 + ], + [ + "▁Productions", + -12.31136989593506 + ], + [ + "▁broadly", + -12.311551094055176 + ], + [ + "etc", + -12.311639785766602 + ], + [ + "▁Friend", + -12.311639785766602 + ], + [ + "▁Offers", + -12.311708450317385 + ], + [ + "▁poles", + -12.311739921569824 + ], + [ + "ui", + -12.311841011047363 + ], + [ + "▁Quest", + -12.312013626098633 + ], + [ + "▁BO", + -12.312078475952148 + ], + [ + "000", + -12.31209945678711 + ], + [ + "▁Scan", + -12.312166213989258 + ], + [ + "▁Flex", + -12.3121919631958 + ], + [ + "▁scare", + -12.312268257141112 + ], + [ + "▁gases", + -12.312275886535645 + ], + [ + "normal", + -12.31227970123291 + ], + [ + "Site", + -12.31247615814209 + ], + [ + "▁Luis", + -12.312554359436035 + ], + [ + "▁spans", + -12.312576293945312 + ], + [ + "▁Dylan", + -12.312614440917969 + ], + [ + "▁strokes", + -12.312623023986816 + ], + [ + "▁regulator", + -12.31263542175293 + ], + [ + "▁infamous", + -12.312644004821776 + ], + [ + "▁sparkle", + -12.312654495239258 + ], + [ + "energy", + -12.312832832336426 + ], + [ + "▁105", + -12.312949180603027 + ], + [ + "Consider", + -12.312981605529783 + ], + [ + "84", + -12.312987327575684 + ], + [ + "▁Ensure", + -12.313074111938477 + ], + [ + "▁Archive", + -12.313121795654297 + ], + [ + "Ah", + -12.31314468383789 + ], + [ + "▁styled", + -12.313369750976562 + ], + [ + "▁tribes", + -12.31344985961914 + ], + [ + "▁hunters", + -12.31350326538086 + ], + [ + "aga", + -12.31350803375244 + ], + [ + "▁insist", + -12.313714027404783 + ], + [ + "▁liquor", + -12.313783645629885 + ], + [ + "▁declining", + -12.31379222869873 + ], + [ + "▁Ale", + -12.314162254333496 + ], + [ + "▁australia", + -12.314349174499512 + ], + [ + "▁benches", + -12.31442928314209 + ], + [ + "East", + -12.314440727233888 + ], + [ + "▁violin", + -12.314480781555176 + ], + [ + "▁4,000", + -12.314640045166016 + ], + [ + "▁inherited", + -12.314719200134276 + ], + [ + "Bio", + -12.314833641052246 + ], + [ + "▁similarities", + -12.314841270446776 + ], + [ + "▁Ru", + -12.314916610717772 + ], + [ + "▁imply", + -12.31499481201172 + ], + [ + "▁plush", + -12.315150260925291 + ], + [ + "▁Annie", + -12.315156936645508 + ], + [ + "▁sounding", + -12.3153076171875 + ], + [ + "▁rubbish", + -12.315317153930664 + ], + [ + "fully", + -12.315342903137209 + ], + [ + "▁defective", + -12.31544017791748 + ], + [ + "▁RA", + -12.315492630004885 + ], + [ + "ep", + -12.31554126739502 + ], + [ + "▁Windsor", + -12.31555461883545 + ], + [ + "▁EST", + -12.315601348876951 + ], + [ + "▁visuals", + -12.315624237060549 + ], + [ + "usually", + -12.31563663482666 + ], + [ + "Old", + -12.315666198730469 + ], + [ + "▁circumstance", + -12.315984725952148 + ], + [ + "▁southwest", + -12.316020011901855 + ], + [ + "▁wandering", + -12.316620826721191 + ], + [ + "▁commercially", + -12.316739082336426 + ], + [ + "▁Critical", + -12.316936492919922 + ], + [ + "▁wifi", + -12.316973686218262 + ], + [ + "▁Rad", + -12.316981315612791 + ], + [ + "▁symbolic", + -12.317155838012695 + ], + [ + "▁ensured", + -12.317173957824709 + ], + [ + "▁6:00", + -12.317254066467283 + ], + [ + "▁scratches", + -12.317317008972168 + ], + [ + "▁Awareness", + -12.31735134124756 + ], + [ + "▁Agricultural", + -12.3173828125 + ], + [ + "▁Administrative", + -12.317419052124023 + ], + [ + "▁understandable", + -12.317486763000488 + ], + [ + "▁Graphic", + -12.317516326904297 + ], + [ + "▁migrants", + -12.317516326904297 + ], + [ + "▁Glenn", + -12.317586898803713 + ], + [ + "▁pudding", + -12.317611694335938 + ], + [ + "▁motto", + -12.317686080932615 + ], + [ + "▁chunks", + -12.317815780639648 + ], + [ + "▁deliberately", + -12.317828178405762 + ], + [ + "▁Fully", + -12.317917823791504 + ], + [ + "fu", + -12.31815242767334 + ], + [ + "Test", + -12.318331718444824 + ], + [ + "▁facilitating", + -12.318376541137695 + ], + [ + "Additional", + -12.318382263183594 + ], + [ + "▁Essex", + -12.318488121032717 + ], + [ + "jan", + -12.31853199005127 + ], + [ + "▁someday", + -12.318689346313477 + ], + [ + "▁Chevy", + -12.31871223449707 + ], + [ + "▁1999.", + -12.318753242492676 + ], + [ + "▁balances", + -12.31876277923584 + ], + [ + "iki", + -12.31881046295166 + ], + [ + "area", + -12.319012641906738 + ], + [ + "Works", + -12.319025993347168 + ], + [ + "▁corridor", + -12.319087982177734 + ], + [ + "▁striving", + -12.319143295288086 + ], + [ + "▁Sussex", + -12.319186210632324 + ], + [ + "▁generously", + -12.31930923461914 + ], + [ + "▁Farmers", + -12.319355010986328 + ], + [ + "▁populated", + -12.319408416748049 + ], + [ + "ben", + -12.319679260253906 + ], + [ + "▁Earl", + -12.31984519958496 + ], + [ + "▁wrestling", + -12.319917678833008 + ], + [ + "ations", + -12.320120811462402 + ], + [ + "video", + -12.320448875427246 + ], + [ + "ash", + -12.3204927444458 + ], + [ + "▁affection", + -12.320910453796388 + ], + [ + "▁Toy", + -12.321038246154783 + ], + [ + "App", + -12.321175575256348 + ], + [ + "▁stealing", + -12.321218490600586 + ], + [ + "▁Scheme", + -12.321234703063965 + ], + [ + "▁ceremonies", + -12.32144546508789 + ], + [ + "▁cried", + -12.321462631225586 + ], + [ + "▁Gaza", + -12.321527481079102 + ], + [ + "▁Messenger", + -12.321645736694336 + ], + [ + "▁Employees", + -12.321659088134766 + ], + [ + "lies", + -12.321691513061523 + ], + [ + "LP", + -12.321693420410156 + ], + [ + "Page", + -12.322163581848145 + ], + [ + "▁Pest", + -12.322373390197754 + ], + [ + "▁atop", + -12.322378158569336 + ], + [ + "▁Bol", + -12.32256031036377 + ], + [ + "▁assistants", + -12.322588920593262 + ], + [ + "▁pitcher", + -12.32261848449707 + ], + [ + "▁contracted", + -12.322661399841309 + ], + [ + "▁sofas", + -12.322867393493652 + ], + [ + "▁Kingston", + -12.322876930236816 + ], + [ + "▁pigs", + -12.322980880737305 + ], + [ + "▁puppies", + -12.322983741760254 + ], + [ + "▁despair", + -12.323053359985352 + ], + [ + "cs", + -12.323174476623535 + ], + [ + "▁elder", + -12.323223114013672 + ], + [ + "▁platinum", + -12.323482513427734 + ], + [ + "MAN", + -12.323494911193848 + ], + [ + "▁Activities", + -12.323554039001465 + ], + [ + "▁Opening", + -12.323664665222168 + ], + [ + "ava", + -12.3236722946167 + ], + [ + "▁Files", + -12.323681831359863 + ], + [ + "▁proficient", + -12.323755264282228 + ], + [ + "▁quotation", + -12.323760032653809 + ], + [ + "▁LORD", + -12.323781967163086 + ], + [ + "▁certifications", + -12.323785781860352 + ], + [ + "▁predominantly", + -12.323832511901855 + ], + [ + "▁melting", + -12.323866844177246 + ], + [ + "▁Hit", + -12.323870658874512 + ], + [ + "▁NV", + -12.323885917663574 + ], + [ + "▁Wellington", + -12.323914527893066 + ], + [ + "Rock", + -12.323941230773926 + ], + [ + "etti", + -12.324214935302734 + ], + [ + "rus", + -12.324370384216309 + ], + [ + "▁pulp", + -12.324420928955078 + ], + [ + "qu", + -12.32443618774414 + ], + [ + "▁compositions", + -12.324528694152832 + ], + [ + "▁calorie", + -12.324551582336426 + ], + [ + "▁bonding", + -12.324745178222656 + ], + [ + "January", + -12.324929237365724 + ], + [ + "▁Female", + -12.3250150680542 + ], + [ + "▁signatures", + -12.325121879577637 + ], + [ + "▁Wool", + -12.325155258178713 + ], + [ + "▁handcrafted", + -12.32546043395996 + ], + [ + "nn", + -12.325480461120604 + ], + [ + "▁Likewise", + -12.325544357299805 + ], + [ + "▁mortar", + -12.325632095336914 + ], + [ + "▁metabolic", + -12.325666427612305 + ], + [ + "Further", + -12.325695991516112 + ], + [ + "mal", + -12.325749397277832 + ], + [ + "▁Mis", + -12.325850486755373 + ], + [ + ">>", + -12.325860023498535 + ], + [ + "Depending", + -12.325904846191406 + ], + [ + "1000", + -12.325998306274414 + ], + [ + "▁Restoration", + -12.326048851013184 + ], + [ + "member", + -12.32607650756836 + ], + [ + "bra", + -12.3261079788208 + ], + [ + "▁bloody", + -12.326128005981444 + ], + [ + "mb", + -12.326380729675291 + ], + [ + "▁depths", + -12.326422691345217 + ], + [ + "▁Reuters", + -12.326568603515623 + ], + [ + "▁administer", + -12.326759338378906 + ], + [ + "Product", + -12.326760292053224 + ], + [ + "▁vicinity", + -12.326837539672852 + ], + [ + "▁shelters", + -12.327022552490234 + ], + [ + "▁hash", + -12.327176094055176 + ], + [ + "▁Draw", + -12.32746696472168 + ], + [ + "68", + -12.32761573791504 + ], + [ + "frame", + -12.327622413635254 + ], + [ + "▁staircase", + -12.327666282653809 + ], + [ + "Mike", + -12.327730178833008 + ], + [ + "tex", + -12.32813835144043 + ], + [ + "Thus", + -12.328231811523438 + ], + [ + "...”", + -12.32831573486328 + ], + [ + "▁aforementioned", + -12.328384399414062 + ], + [ + "▁sauna", + -12.328461647033691 + ], + [ + "nor", + -12.328649520874023 + ], + [ + "▁savvy", + -12.328779220581056 + ], + [ + "centered", + -12.32882308959961 + ], + [ + "Complete", + -12.328835487365724 + ], + [ + "▁cigarette", + -12.32888412475586 + ], + [ + "gal", + -12.32903003692627 + ], + [ + "ois", + -12.32903003692627 + ], + [ + "▁Bow", + -12.329111099243164 + ], + [ + "▁missile", + -12.329323768615724 + ], + [ + "▁twins", + -12.32943630218506 + ], + [ + "mobile", + -12.329466819763184 + ], + [ + "funded", + -12.329602241516112 + ], + [ + "September", + -12.32968044281006 + ], + [ + "▁conversions", + -12.329731941223145 + ], + [ + "▁encompasses", + -12.3300142288208 + ], + [ + "gle", + -12.330063819885254 + ], + [ + "▁Lemon", + -12.33009910583496 + ], + [ + "▁incidence", + -12.330181121826172 + ], + [ + "▁Definitely", + -12.330286979675291 + ], + [ + "ches", + -12.330326080322266 + ], + [ + "▁screaming", + -12.33035945892334 + ], + [ + "▁pat", + -12.330367088317873 + ], + [ + "▁Tall", + -12.330389022827148 + ], + [ + "▁monument", + -12.330488204956056 + ], + [ + "Tom", + -12.330548286437988 + ], + [ + "▁Frederick", + -12.330743789672852 + ], + [ + "▁Scripture", + -12.330793380737305 + ], + [ + "▁Isaac", + -12.33090114593506 + ], + [ + "Tip", + -12.33095932006836 + ], + [ + "jet", + -12.33095932006836 + ], + [ + "▁traps", + -12.330986976623535 + ], + [ + "▁Links", + -12.3310546875 + ], + [ + "▁tilt", + -12.33110523223877 + ], + [ + "▁Brewing", + -12.331171035766602 + ], + [ + "▁dancer", + -12.331255912780762 + ], + [ + "▁vague", + -12.33135223388672 + ], + [ + "▁popcorn", + -12.331513404846191 + ], + [ + "▁plaintiff", + -12.331571578979492 + ], + [ + "▁exemption", + -12.331610679626465 + ], + [ + "▁integrates", + -12.33165454864502 + ], + [ + "▁coatings", + -12.33166217803955 + ], + [ + "▁SI", + -12.33173656463623 + ], + [ + "Apply", + -12.331802368164062 + ], + [ + "▁glove", + -12.331829071044922 + ], + [ + "pr", + -12.331892013549805 + ], + [ + "▁Letters", + -12.331896781921388 + ], + [ + "▁Cho", + -12.331920623779297 + ], + [ + "▁tuna", + -12.332002639770508 + ], + [ + "▁arbitration", + -12.332260131835938 + ], + [ + "▁Guinea", + -12.332305908203123 + ], + [ + "TP", + -12.332314491271973 + ], + [ + "▁5:30", + -12.332347869873049 + ], + [ + "▁vineyards", + -12.332387924194336 + ], + [ + "▁hated", + -12.33245086669922 + ], + [ + "▁comeback", + -12.33261013031006 + ], + [ + "77", + -12.332613945007324 + ], + [ + "▁Putin", + -12.332637786865234 + ], + [ + "▁hashtag", + -12.332722663879396 + ], + [ + "▁Spin", + -12.332834243774414 + ], + [ + "▁Similar", + -12.3329439163208 + ], + [ + "▁supposedly", + -12.332972526550291 + ], + [ + "▁worksheet", + -12.33301830291748 + ], + [ + "▁biodiversity", + -12.333040237426758 + ], + [ + "▁bra", + -12.33342170715332 + ], + [ + "▁discs", + -12.333443641662598 + ], + [ + "▁privileges", + -12.33347225189209 + ], + [ + "▁copying", + -12.333697319030762 + ], + [ + "▁royalty", + -12.333868026733398 + ], + [ + "▁Jump", + -12.333884239196776 + ], + [ + "▁CPA", + -12.33393669128418 + ], + [ + "▁veins", + -12.334052085876465 + ], + [ + "EX", + -12.334290504455566 + ], + [ + "▁Hampton", + -12.334455490112305 + ], + [ + "▁nurturing", + -12.334592819213867 + ], + [ + "▁municipalities", + -12.334612846374512 + ], + [ + "▁polar", + -12.334630012512209 + ], + [ + "End", + -12.334641456604004 + ], + [ + "▁acceleration", + -12.334675788879396 + ], + [ + "▁Tur", + -12.334721565246582 + ], + [ + "▁periodic", + -12.334800720214844 + ], + [ + "▁Atlas", + -12.334941864013672 + ], + [ + "Really", + -12.334959983825684 + ], + [ + "▁Signature", + -12.33502197265625 + ], + [ + "fo", + -12.335055351257324 + ], + [ + "15.", + -12.335296630859377 + ], + [ + "▁preparations", + -12.335321426391602 + ], + [ + "SL", + -12.335418701171877 + ], + [ + "▁Officers", + -12.33559226989746 + ], + [ + "▁redesign", + -12.335740089416504 + ], + [ + "▁stalls", + -12.335847854614258 + ], + [ + "▁streamline", + -12.335944175720217 + ], + [ + "▁bunk", + -12.335948944091797 + ], + [ + "▁interpreted", + -12.335951805114746 + ], + [ + "▁galaxy", + -12.336152076721191 + ], + [ + "TY", + -12.336318969726562 + ], + [ + "Store", + -12.336411476135254 + ], + [ + "▁farmhouse", + -12.336539268493652 + ], + [ + "mu", + -12.336543083190918 + ], + [ + "▁Consequently", + -12.33686351776123 + ], + [ + "▁accounted", + -12.336926460266112 + ], + [ + "▁gatherings", + -12.337090492248535 + ], + [ + "mod", + -12.337153434753418 + ], + [ + "▁Reality", + -12.33718490600586 + ], + [ + "▁disappoint", + -12.337238311767578 + ], + [ + "▁sci", + -12.337247848510742 + ], + [ + "Building", + -12.337302207946776 + ], + [ + "▁Admission", + -12.337368965148926 + ], + [ + "▁Yale", + -12.337479591369627 + ], + [ + "▁debates", + -12.3377046585083 + ], + [ + "tv", + -12.33775806427002 + ], + [ + "▁traumatic", + -12.33778476715088 + ], + [ + "▁doorstep", + -12.337848663330078 + ], + [ + "▁antioxidant", + -12.33788013458252 + ], + [ + "▁deduction", + -12.337937355041504 + ], + [ + "aba", + -12.337955474853516 + ], + [ + "▁induction", + -12.337993621826172 + ], + [ + "▁Yesterday", + -12.338057518005373 + ], + [ + "guard", + -12.338094711303713 + ], + [ + "▁reels", + -12.33817195892334 + ], + [ + "char", + -12.3383207321167 + ], + [ + "dia", + -12.338321685791016 + ], + [ + "▁Satellite", + -12.338394165039062 + ], + [ + "▁Patient", + -12.338529586791992 + ], + [ + "▁civilization", + -12.338827133178713 + ], + [ + "▁Grid", + -12.338890075683594 + ], + [ + "osis", + -12.338895797729492 + ], + [ + "▁manufactures", + -12.339055061340332 + ], + [ + "▁1985", + -12.339076042175291 + ], + [ + "▁horn", + -12.339086532592772 + ], + [ + "tle", + -12.339120864868164 + ], + [ + "▁oversee", + -12.33919906616211 + ], + [ + "▁surgeons", + -12.339235305786133 + ], + [ + "▁confront", + -12.339278221130373 + ], + [ + "University", + -12.33928394317627 + ], + [ + "▁GST", + -12.33931827545166 + ], + [ + "▁grout", + -12.339341163635254 + ], + [ + "▁SB", + -12.339454650878906 + ], + [ + "▁tolerate", + -12.339466094970703 + ], + [ + "▁dinners", + -12.339468002319336 + ], + [ + "р", + -12.339470863342283 + ], + [ + "▁stumbled", + -12.339592933654783 + ], + [ + "▁leftover", + -12.339802742004396 + ], + [ + "▁Reform", + -12.339838027954102 + ], + [ + "TE", + -12.340045928955078 + ], + [ + "▁penetration", + -12.340060234069824 + ], + [ + "▁Celtic", + -12.340171813964844 + ], + [ + "▁runway", + -12.340194702148438 + ], + [ + "Email", + -12.340465545654297 + ], + [ + "CU", + -12.34063720703125 + ], + [ + "▁soils", + -12.34063720703125 + ], + [ + "▁stole", + -12.34067153930664 + ], + [ + "FR", + -12.34069538116455 + ], + [ + "▁Volvo", + -12.340761184692385 + ], + [ + "▁Vo", + -12.34080410003662 + ], + [ + "▁capacities", + -12.340840339660645 + ], + [ + "▁philosophical", + -12.340840339660645 + ], + [ + "▁detention", + -12.340869903564451 + ], + [ + "▁Fi", + -12.340912818908691 + ], + [ + "▁chess", + -12.34132957458496 + ], + [ + "▁Carr", + -12.34146785736084 + ], + [ + "▁Troy", + -12.3414888381958 + ], + [ + "▁fictional", + -12.341614723205566 + ], + [ + "▁petroleum", + -12.341626167297363 + ], + [ + "▁ridge", + -12.34168529510498 + ], + [ + "▁Via", + -12.341693878173828 + ], + [ + "▁Shin", + -12.341878890991213 + ], + [ + "▁MAC", + -12.341984748840332 + ], + [ + "▁reminders", + -12.342005729675291 + ], + [ + "96", + -12.342019081115724 + ], + [ + "ied", + -12.34202480316162 + ], + [ + "▁reluctant", + -12.342035293579102 + ], + [ + "▁Fishing", + -12.342045783996582 + ], + [ + "rel", + -12.342111587524414 + ], + [ + "▁115", + -12.342124938964844 + ], + [ + "▁Expect", + -12.342247009277344 + ], + [ + "▁energies", + -12.34241008758545 + ], + [ + "▁sightseeing", + -12.342414855957031 + ], + [ + "▁trout", + -12.342466354370115 + ], + [ + "▁governmental", + -12.342493057250977 + ], + [ + "▁depart", + -12.342514991760254 + ], + [ + "▁Supplier", + -12.342605590820312 + ], + [ + "▁guitarist", + -12.342718124389648 + ], + [ + "▁Clarke", + -12.342971801757812 + ], + [ + "▁Nuclear", + -12.343061447143556 + ], + [ + "▁borrowed", + -12.34307098388672 + ], + [ + "OH", + -12.343092918395996 + ], + [ + "▁Distribution", + -12.343132972717283 + ], + [ + "▁plethora", + -12.34319305419922 + ], + [ + "▁Hopkins", + -12.343198776245115 + ], + [ + "▁trackback", + -12.343389511108398 + ], + [ + "▁collateral", + -12.343396186828612 + ], + [ + "facing", + -12.34340476989746 + ], + [ + "▁detox", + -12.343432426452637 + ], + [ + "▁Morocco", + -12.343436241149902 + ], + [ + "▁ministers", + -12.34344482421875 + ], + [ + "CON", + -12.343534469604492 + ], + [ + "▁Mouse", + -12.343589782714844 + ], + [ + "▁emission", + -12.34359073638916 + ], + [ + "▁Alumni", + -12.343634605407717 + ], + [ + "▁sentenced", + -12.343838691711426 + ], + [ + "▁flair", + -12.343992233276367 + ], + [ + "Media", + -12.344144821166992 + ], + [ + "▁Hy", + -12.344167709350586 + ], + [ + "▁expedition", + -12.344226837158203 + ], + [ + "sided", + -12.344388008117676 + ], + [ + "▁routing", + -12.34455680847168 + ], + [ + "▁streamlined", + -12.3446044921875 + ], + [ + "▁Mono", + -12.344647407531738 + ], + [ + "lly", + -12.344676971435549 + ], + [ + "hp", + -12.34469985961914 + ], + [ + "▁juicy", + -12.344764709472656 + ], + [ + "▁Alexa", + -12.34488296508789 + ], + [ + "▁hp", + -12.344970703125 + ], + [ + "▁fled", + -12.345052719116213 + ], + [ + "Image", + -12.345358848571776 + ], + [ + "▁milling", + -12.345376014709473 + ], + [ + "▁civilians", + -12.345462799072266 + ], + [ + "▁insightful", + -12.345492362976074 + ], + [ + "▁battling", + -12.345551490783691 + ], + [ + "▁Inch", + -12.3455810546875 + ], + [ + "▁clicked", + -12.345736503601074 + ], + [ + "▁Units", + -12.34579372406006 + ], + [ + "sie", + -12.345807075500488 + ], + [ + "▁tablespoon", + -12.345840454101562 + ], + [ + "▁Accounts", + -12.346073150634766 + ], + [ + "▁cu", + -12.346077919006348 + ], + [ + "▁Okay", + -12.346323013305664 + ], + [ + "▁jelly", + -12.346335411071776 + ], + [ + "ser", + -12.346436500549316 + ], + [ + "CL", + -12.346479415893556 + ], + [ + "Related", + -12.346497535705566 + ], + [ + "▁lookout", + -12.346516609191896 + ], + [ + "▁1998.", + -12.346532821655272 + ], + [ + "download", + -12.346607208251951 + ], + [ + "▁substitution", + -12.34661102294922 + ], + [ + "verse", + -12.346720695495604 + ], + [ + "▁hug", + -12.346720695495604 + ], + [ + "▁Diane", + -12.346763610839844 + ], + [ + "▁GDPR", + -12.34676742553711 + ], + [ + "▁mentioning", + -12.346818923950195 + ], + [ + "▁Roller", + -12.346842765808104 + ], + [ + "▁dreamed", + -12.347143173217772 + ], + [ + "▁Bone", + -12.347272872924805 + ], + [ + "2000", + -12.347310066223145 + ], + [ + "▁landlords", + -12.347312927246094 + ], + [ + "▁Piano", + -12.347419738769531 + ], + [ + "▁revelation", + -12.347426414489746 + ], + [ + "▁Philosophy", + -12.347495079040527 + ], + [ + "▁invoices", + -12.347575187683104 + ], + [ + "▁differentiate", + -12.347783088684082 + ], + [ + "loss", + -12.347869873046877 + ], + [ + "▁repetitive", + -12.347915649414062 + ], + [ + "▁Jesse", + -12.347925186157228 + ], + [ + "▁Forbes", + -12.347994804382324 + ], + [ + "▁stitching", + -12.34804630279541 + ], + [ + "▁aquatic", + -12.348061561584473 + ], + [ + "▁Liberal", + -12.348150253295898 + ], + [ + "▁elbow", + -12.348204612731934 + ], + [ + "ech", + -12.34826946258545 + ], + [ + "▁Tier", + -12.348464012145996 + ], + [ + "▁polite", + -12.34848403930664 + ], + [ + "▁projections", + -12.348498344421388 + ], + [ + "▁bind", + -12.348550796508787 + ], + [ + "▁ignition", + -12.348705291748049 + ], + [ + "▁tackling", + -12.34873104095459 + ], + [ + "▁apologize", + -12.348895072937012 + ], + [ + "▁Compliance", + -12.349018096923828 + ], + [ + "rm", + -12.349282264709473 + ], + [ + "operative", + -12.34929370880127 + ], + [ + "▁Asset", + -12.349514961242676 + ], + [ + "▁settlements", + -12.34953784942627 + ], + [ + "ink", + -12.349679946899414 + ], + [ + "▁Albany", + -12.349721908569336 + ], + [ + "Five", + -12.349756240844728 + ], + [ + "▁referenced", + -12.34976863861084 + ], + [ + "▁Pull", + -12.349857330322266 + ], + [ + "▁TIME", + -12.349942207336426 + ], + [ + "▁rooftop", + -12.349968910217283 + ], + [ + "▁mainland", + -12.350020408630373 + ], + [ + "▁crunch", + -12.350049018859863 + ], + [ + "▁Raymond", + -12.350194931030272 + ], + [ + "▁retains", + -12.350264549255373 + ], + [ + "▁bacterial", + -12.350502014160156 + ], + [ + "LC", + -12.350508689880373 + ], + [ + "EM", + -12.350677490234377 + ], + [ + "▁Chu", + -12.350688934326172 + ], + [ + "▁complained", + -12.3509521484375 + ], + [ + "▁DM", + -12.351057052612305 + ], + [ + "▁Springfield", + -12.351207733154297 + ], + [ + "▁rum", + -12.351275444030762 + ], + [ + "▁Included", + -12.351296424865724 + ], + [ + "▁Ros", + -12.351312637329102 + ], + [ + "Low", + -12.351398468017578 + ], + [ + "nick", + -12.351449966430664 + ], + [ + "▁simplified", + -12.351452827453612 + ], + [ + "▁constructive", + -12.351524353027344 + ], + [ + "▁flooded", + -12.351591110229492 + ], + [ + "▁Fo", + -12.351794242858888 + ], + [ + "FT", + -12.351868629455566 + ], + [ + "▁Clara", + -12.351895332336426 + ], + [ + "▁Kal", + -12.351958274841309 + ], + [ + "oy", + -12.352046012878418 + ], + [ + "▁Farms", + -12.352128982543944 + ], + [ + "Spring", + -12.352195739746094 + ], + [ + "ido", + -12.35220718383789 + ], + [ + "▁Count", + -12.352217674255373 + ], + [ + "▁compressor", + -12.352296829223633 + ], + [ + "▁subjected", + -12.35230827331543 + ], + [ + "Ready", + -12.352387428283691 + ], + [ + "▁Drink", + -12.352399826049805 + ], + [ + "▁ISBN", + -12.352428436279297 + ], + [ + "▁GPU", + -12.35244846343994 + ], + [ + "▁edible", + -12.35269832611084 + ], + [ + "▁Employee", + -12.352758407592772 + ], + [ + "▁LLP", + -12.352766036987305 + ], + [ + "USD", + -12.35279369354248 + ], + [ + "▁sincerely", + -12.352943420410156 + ], + [ + "▁Drawing", + -12.353070259094238 + ], + [ + "▁outright", + -12.353109359741213 + ], + [ + "ien", + -12.353135108947754 + ], + [ + "▁Moto", + -12.353189468383787 + ], + [ + "AF", + -12.35325527191162 + ], + [ + "▁Cave", + -12.353346824645996 + ], + [ + "LS", + -12.353353500366213 + ], + [ + "▁3%", + -12.35338306427002 + ], + [ + "▁bead", + -12.353401184082031 + ], + [ + "▁swear", + -12.35342502593994 + ], + [ + "Wood", + -12.353459358215332 + ], + [ + "mus", + -12.353487014770508 + ], + [ + "▁slipped", + -12.353532791137695 + ], + [ + "Benz", + -12.353713989257812 + ], + [ + "▁Charity", + -12.353919982910156 + ], + [ + "▁Former", + -12.353940963745115 + ], + [ + "▁Ny", + -12.353973388671877 + ], + [ + "▁Evening", + -12.354012489318848 + ], + [ + "▁doctoral", + -12.354037284851074 + ], + [ + "▁insufficient", + -12.354093551635742 + ], + [ + "▁Poetry", + -12.354138374328612 + ], + [ + "▁Bernard", + -12.354222297668455 + ], + [ + "▁Drivers", + -12.354225158691406 + ], + [ + "vy", + -12.354413986206056 + ], + [ + "▁compressed", + -12.35442066192627 + ], + [ + "▁misleading", + -12.35443878173828 + ], + [ + "▁legends", + -12.354602813720703 + ], + [ + "▁Orders", + -12.354846954345703 + ], + [ + "▁Sheffield", + -12.354866027832031 + ], + [ + "▁Rates", + -12.35500144958496 + ], + [ + "▁majestic", + -12.355015754699709 + ], + [ + "▁Mur", + -12.355034828186035 + ], + [ + "▁whipped", + -12.355057716369627 + ], + [ + "uka", + -12.355061531066896 + ], + [ + "▁Lancaster", + -12.355125427246094 + ], + [ + "▁Grow", + -12.35512638092041 + ], + [ + "▁handheld", + -12.355210304260254 + ], + [ + "DL", + -12.355291366577148 + ], + [ + "Game", + -12.355301856994627 + ], + [ + "Early", + -12.355485916137695 + ], + [ + "▁constructing", + -12.355538368225098 + ], + [ + "▁Julian", + -12.355605125427246 + ], + [ + "▁Taste", + -12.355647087097168 + ], + [ + "along", + -12.355672836303713 + ], + [ + "▁whereby", + -12.355742454528809 + ], + [ + "2,000", + -12.355839729309082 + ], + [ + "▁Strength", + -12.355913162231444 + ], + [ + "▁Ferrari", + -12.355977058410645 + ], + [ + "▁Henderson", + -12.355996131896973 + ], + [ + "▁obscure", + -12.35603141784668 + ], + [ + "▁scholar", + -12.35615348815918 + ], + [ + "IR", + -12.356268882751465 + ], + [ + "▁competed", + -12.356284141540527 + ], + [ + "▁repayment", + -12.356327056884766 + ], + [ + "▁pitched", + -12.35634994506836 + ], + [ + "▁twisted", + -12.356363296508787 + ], + [ + "▁learner", + -12.356365203857422 + ], + [ + "▁chop", + -12.35655689239502 + ], + [ + "▁Ant", + -12.356595993041992 + ], + [ + "▁Cone", + -12.356754302978516 + ], + [ + "▁farther", + -12.356785774230955 + ], + [ + "▁Background", + -12.35680103302002 + ], + [ + "▁glossy", + -12.356948852539062 + ], + [ + "▁insulated", + -12.356961250305176 + ], + [ + "▁restriction", + -12.356963157653809 + ], + [ + "▁Flag", + -12.357195854187012 + ], + [ + "▁Arnold", + -12.357254028320312 + ], + [ + "▁Strip", + -12.357282638549805 + ], + [ + "▁leveraging", + -12.357434272766112 + ], + [ + "95", + -12.35745906829834 + ], + [ + "iah", + -12.35756778717041 + ], + [ + "▁Hyderabad", + -12.357572555541992 + ], + [ + "▁fingertips", + -12.35782527923584 + ], + [ + "js", + -12.35783576965332 + ], + [ + "bot", + -12.357847213745115 + ], + [ + "November", + -12.357863426208496 + ], + [ + "▁outputs", + -12.357951164245604 + ], + [ + "▁ventures", + -12.35824966430664 + ], + [ + "▁por", + -12.358293533325195 + ], + [ + "▁touchdown", + -12.358302116394045 + ], + [ + "▁Monroe", + -12.358650207519531 + ], + [ + "▁IEEE", + -12.35873031616211 + ], + [ + "▁racist", + -12.358834266662598 + ], + [ + "▁comforting", + -12.358860969543455 + ], + [ + "▁Boot", + -12.359127044677734 + ], + [ + "▁groove", + -12.359132766723633 + ], + [ + "▁Ren", + -12.359156608581545 + ], + [ + "0.00", + -12.359228134155272 + ], + [ + "Tickets", + -12.359235763549805 + ], + [ + "▁Writers", + -12.359498023986816 + ], + [ + "▁Draft", + -12.35949993133545 + ], + [ + "0000", + -12.35954761505127 + ], + [ + "▁Suites", + -12.359561920166016 + ], + [ + "▁Luther", + -12.359766006469728 + ], + [ + "hy", + -12.35981273651123 + ], + [ + "▁Diana", + -12.35988712310791 + ], + [ + "▁northeast", + -12.359901428222656 + ], + [ + "▁planets", + -12.359914779663086 + ], + [ + "▁planners", + -12.360033988952637 + ], + [ + "▁cereal", + -12.360259056091309 + ], + [ + "▁spreadsheet", + -12.360284805297852 + ], + [ + "120", + -12.360312461853027 + ], + [ + "EST", + -12.36040496826172 + ], + [ + "lot", + -12.360482215881348 + ], + [ + "▁jerseys", + -12.36056137084961 + ], + [ + "▁Parent", + -12.360602378845217 + ], + [ + "Pa", + -12.360702514648438 + ], + [ + "▁plots", + -12.360763549804688 + ], + [ + "▁advent", + -12.36081600189209 + ], + [ + "▁formatting", + -12.360925674438477 + ], + [ + "▁weave", + -12.360989570617676 + ], + [ + "▁hacking", + -12.361019134521484 + ], + [ + "King", + -12.361047744750977 + ], + [ + "▁Basin", + -12.361054420471191 + ], + [ + "▁dot", + -12.361083984375 + ], + [ + "▁Savings", + -12.36124324798584 + ], + [ + "▁Eli", + -12.361254692077637 + ], + [ + "▁Chrysler", + -12.361281394958496 + ], + [ + "Sa", + -12.361284255981444 + ], + [ + "▁beams", + -12.361297607421877 + ], + [ + "▁Os", + -12.361361503601074 + ], + [ + "▁Deposit", + -12.361448287963867 + ], + [ + "▁foreclosure", + -12.36145305633545 + ], + [ + "▁bowling", + -12.36147689819336 + ], + [ + "▁Jerseys", + -12.361542701721191 + ], + [ + "▁Municipal", + -12.361634254455566 + ], + [ + "▁Glad", + -12.3618745803833 + ], + [ + "▁Randy", + -12.361886978149414 + ], + [ + "mill", + -12.361968040466309 + ], + [ + "▁Bud", + -12.36201000213623 + ], + [ + "▁HTTP", + -12.362072944641112 + ], + [ + "section", + -12.36211395263672 + ], + [ + "▁halt", + -12.362171173095703 + ], + [ + "▁Giant", + -12.362218856811523 + ], + [ + "▁knives", + -12.362218856811523 + ], + [ + "▁1979", + -12.362247467041016 + ], + [ + "▁ESPN", + -12.36230754852295 + ], + [ + "lor", + -12.362449645996094 + ], + [ + "rc", + -12.362489700317385 + ], + [ + "▁northwest", + -12.362504959106444 + ], + [ + "▁illustrates", + -12.362773895263672 + ], + [ + "▁diplomatic", + -12.362995147705078 + ], + [ + "▁verdict", + -12.363021850585938 + ], + [ + "▁piping", + -12.363152503967283 + ], + [ + "▁14-", + -12.363179206848145 + ], + [ + "▁Wake", + -12.36326026916504 + ], + [ + "alone", + -12.363279342651367 + ], + [ + "▁Curtis", + -12.363299369812012 + ], + [ + "▁dubbed", + -12.363409042358398 + ], + [ + "▁Founded", + -12.363496780395508 + ], + [ + "▁supervised", + -12.36358642578125 + ], + [ + "▁verses", + -12.363703727722168 + ], + [ + "▁reap", + -12.3638334274292 + ], + [ + "▁Sundays", + -12.363868713378906 + ], + [ + "pool", + -12.364062309265137 + ], + [ + "▁Knights", + -12.364418029785156 + ], + [ + "▁Summary", + -12.364561080932615 + ], + [ + "▁lobster", + -12.364629745483398 + ], + [ + "▁guitars", + -12.364712715148926 + ], + [ + "▁Mazda", + -12.364823341369627 + ], + [ + "▁Wallace", + -12.36495590209961 + ], + [ + "▁unbelievable", + -12.36523151397705 + ], + [ + "▁Spread", + -12.365315437316896 + ], + [ + "▁Monica", + -12.365370750427246 + ], + [ + "▁Nobody", + -12.36544132232666 + ], + [ + "eau", + -12.365477561950684 + ], + [ + "living", + -12.365511894226074 + ], + [ + "▁suburban", + -12.365523338317873 + ], + [ + "▁NM", + -12.365668296813965 + ], + [ + "▁Combine", + -12.365674018859863 + ], + [ + "▁Associated", + -12.365699768066406 + ], + [ + "▁plastics", + -12.365704536437988 + ], + [ + "RO", + -12.36571979522705 + ], + [ + "▁Spark", + -12.365811347961426 + ], + [ + "▁plumber", + -12.36596393585205 + ], + [ + "Summer", + -12.36615753173828 + ], + [ + "▁speeches", + -12.366170883178713 + ], + [ + "▁executing", + -12.366229057312012 + ], + [ + "▁reboot", + -12.366238594055176 + ], + [ + "perfect", + -12.366283416748049 + ], + [ + "▁fuss", + -12.36634922027588 + ], + [ + "▁shark", + -12.366397857666016 + ], + [ + "▁q", + -12.36643123626709 + ], + [ + "RP", + -12.36645221710205 + ], + [ + "▁banners", + -12.366456985473633 + ], + [ + "▁fences", + -12.366517066955566 + ], + [ + "▁listener", + -12.366705894470217 + ], + [ + "▁servant", + -12.366726875305176 + ], + [ + "▁laboratories", + -12.367033004760742 + ], + [ + "▁herein", + -12.367125511169434 + ], + [ + "▁bladder", + -12.367219924926758 + ], + [ + "▁troubleshooting", + -12.367276191711426 + ], + [ + "uf", + -12.367472648620604 + ], + [ + "▁mushroom", + -12.367483139038086 + ], + [ + "▁Marriage", + -12.367640495300291 + ], + [ + "▁safest", + -12.367722511291504 + ], + [ + "mount", + -12.367783546447754 + ], + [ + "▁whoever", + -12.367806434631348 + ], + [ + "▁issuing", + -12.367852210998535 + ], + [ + "▁CDs", + -12.368040084838867 + ], + [ + "▁drift", + -12.368050575256348 + ], + [ + "bio", + -12.368138313293455 + ], + [ + "▁infrared", + -12.368459701538086 + ], + [ + "▁rib", + -12.368651390075684 + ], + [ + "▁Surrey", + -12.36866569519043 + ], + [ + "▁chord", + -12.368812561035156 + ], + [ + "Peter", + -12.36887264251709 + ], + [ + "▁contracting", + -12.368974685668944 + ], + [ + "sign", + -12.369033813476562 + ], + [ + "neck", + -12.369051933288574 + ], + [ + "▁Trial", + -12.36910343170166 + ], + [ + "2013", + -12.369133949279783 + ], + [ + "esteem", + -12.369168281555176 + ], + [ + "▁dysfunction", + -12.36953067779541 + ], + [ + "beat", + -12.369535446166992 + ], + [ + "School", + -12.369603157043455 + ], + [ + "▁Pune", + -12.36970329284668 + ], + [ + "▁Classes", + -12.369810104370115 + ], + [ + "▁OUR", + -12.369826316833496 + ], + [ + "▁questionnaire", + -12.370015144348145 + ], + [ + "▁flats", + -12.37013053894043 + ], + [ + "sport", + -12.370224952697754 + ], + [ + "scape", + -12.37027645111084 + ], + [ + "▁collaborations", + -12.37035846710205 + ], + [ + "▁shrink", + -12.370497703552246 + ], + [ + "▁digest", + -12.370509147644045 + ], + [ + "▁Hawaiian", + -12.370831489562988 + ], + [ + "▁Jin", + -12.37097454071045 + ], + [ + "▁digestion", + -12.371102333068848 + ], + [ + "▁borrowing", + -12.37112522125244 + ], + [ + "▁Travis", + -12.371254920959473 + ], + [ + "▁deficiency", + -12.371268272399902 + ], + [ + "▁medals", + -12.371280670166016 + ], + [ + "▁teachings", + -12.371441841125488 + ], + [ + "▁craving", + -12.371625900268556 + ], + [ + "▁qualitative", + -12.371625900268556 + ], + [ + "sharing", + -12.371649742126465 + ], + [ + "▁assumes", + -12.371685981750488 + ], + [ + "▁lever", + -12.37170124053955 + ], + [ + "dot", + -12.371726989746094 + ], + [ + "▁Orthodox", + -12.37173080444336 + ], + [ + "Bill", + -12.371862411499023 + ], + [ + "▁Jeffrey", + -12.37187671661377 + ], + [ + "Until", + -12.3719482421875 + ], + [ + "▁Wizard", + -12.372072219848633 + ], + [ + "▁inquire", + -12.37208652496338 + ], + [ + "Born", + -12.372105598449709 + ], + [ + "▁Parade", + -12.372283935546877 + ], + [ + "Auto", + -12.372285842895508 + ], + [ + "Da", + -12.372466087341309 + ], + [ + "▁SharePoint", + -12.372697830200195 + ], + [ + "Pacific", + -12.372732162475586 + ], + [ + "▁burnt", + -12.372732162475586 + ], + [ + "ters", + -12.372764587402344 + ], + [ + "▁polymer", + -12.37285041809082 + ], + [ + "▁parental", + -12.37297534942627 + ], + [ + "▁Musical", + -12.372997283935549 + ], + [ + "dd", + -12.373058319091797 + ], + [ + "Fill", + -12.373153686523438 + ], + [ + "▁mock", + -12.373170852661133 + ], + [ + "▁Disc", + -12.373241424560549 + ], + [ + "WP", + -12.373326301574709 + ], + [ + "▁shaking", + -12.373528480529783 + ], + [ + "▁commander", + -12.373598098754885 + ], + [ + "▁aka", + -12.373661994934082 + ], + [ + "▁Pakistani", + -12.373703956604004 + ], + [ + "▁Hans", + -12.373740196228027 + ], + [ + "▁Hispanic", + -12.373754501342772 + ], + [ + "▁rookie", + -12.37397003173828 + ], + [ + "▁escaped", + -12.374011993408203 + ], + [ + "url", + -12.374046325683594 + ], + [ + "▁Colors", + -12.374153137207031 + ], + [ + "▁pitches", + -12.374176025390623 + ], + [ + "▁slept", + -12.374297142028809 + ], + [ + "UN", + -12.374326705932615 + ], + [ + "▁sip", + -12.374427795410156 + ], + [ + "▁dedicate", + -12.374506950378418 + ], + [ + "▁Yamaha", + -12.374592781066896 + ], + [ + "▁pops", + -12.374646186828612 + ], + [ + "DA", + -12.37466526031494 + ], + [ + "▁joyful", + -12.374818801879885 + ], + [ + "▁Arlington", + -12.37494945526123 + ], + [ + "▁hallway", + -12.375067710876465 + ], + [ + "▁enquiries", + -12.375106811523438 + ], + [ + "▁colony", + -12.375337600708008 + ], + [ + "▁Berg", + -12.375411033630373 + ], + [ + "▁campuses", + -12.37543773651123 + ], + [ + "”),", + -12.375448226928713 + ], + [ + "▁deepest", + -12.375484466552734 + ], + [ + "▁Dana", + -12.375680923461914 + ], + [ + "▁fluids", + -12.375772476196287 + ], + [ + ",000", + -12.375784873962402 + ], + [ + "▁nausea", + -12.375933647155762 + ], + [ + "▁prostate", + -12.375995635986328 + ], + [ + "▁Growing", + -12.37611484527588 + ], + [ + "▁enzymes", + -12.376131057739258 + ], + [ + "▁Label", + -12.376158714294434 + ], + [ + "▁manageable", + -12.37632179260254 + ], + [ + "▁Riverside", + -12.376352310180664 + ], + [ + "fly", + -12.376476287841797 + ], + [ + "▁Reg", + -12.376501083374023 + ], + [ + "▁Integration", + -12.376667976379396 + ], + [ + "▁overweight", + -12.376687049865724 + ], + [ + "▁Subaru", + -12.376734733581545 + ], + [ + "RC", + -12.376768112182615 + ], + [ + "▁Cycle", + -12.376785278320312 + ], + [ + "▁UNESCO", + -12.376875877380373 + ], + [ + "aya", + -12.376896858215332 + ], + [ + "▁collagen", + -12.376935958862305 + ], + [ + "eb", + -12.37710952758789 + ], + [ + "▁FB", + -12.37725830078125 + ], + [ + "-35", + -12.377354621887209 + ], + [ + "▁Comes", + -12.377354621887209 + ], + [ + "▁1,500", + -12.377389907836914 + ], + [ + "▁digitally", + -12.37742042541504 + ], + [ + "▁mortgages", + -12.377610206604004 + ], + [ + "▁alcoholic", + -12.37761402130127 + ], + [ + "lle", + -12.377673149108888 + ], + [ + "▁Copenhagen", + -12.377689361572266 + ], + [ + "▁harvested", + -12.377708435058594 + ], + [ + "▁junction", + -12.37779140472412 + ], + [ + "▁Highland", + -12.377921104431152 + ], + [ + "▁Sally", + -12.378029823303224 + ], + [ + "▁Ballet", + -12.378416061401367 + ], + [ + "▁suffers", + -12.37848663330078 + ], + [ + "AH", + -12.378560066223145 + ], + [ + "▁slick", + -12.378681182861328 + ], + [ + "▁Believe", + -12.378717422485352 + ], + [ + "▁pursued", + -12.378921508789062 + ], + [ + "ography", + -12.379020690917969 + ], + [ + "chan", + -12.379199981689451 + ], + [ + "▁Bloomberg", + -12.379318237304688 + ], + [ + "SF", + -12.379385948181152 + ], + [ + "▁LAN", + -12.379443168640137 + ], + [ + "▁Progress", + -12.37946891784668 + ], + [ + "MI", + -12.379487991333008 + ], + [ + "▁Advance", + -12.379554748535156 + ], + [ + "▁Into", + -12.379620552062988 + ], + [ + "▁successor", + -12.379655838012695 + ], + [ + "▁tablespoons", + -12.379711151123049 + ], + [ + "▁Tomorrow", + -12.37981414794922 + ], + [ + "▁tweeted", + -12.379833221435549 + ], + [ + "modern", + -12.379940032958984 + ], + [ + "Ever", + -12.380084991455078 + ], + [ + "▁Stress", + -12.380085945129396 + ], + [ + "▁Maya", + -12.38022518157959 + ], + [ + "▁liberty", + -12.380295753479004 + ], + [ + "▁Drain", + -12.380311012268066 + ], + [ + "▁feathers", + -12.380334854125977 + ], + [ + "▁Adrian", + -12.380423545837402 + ], + [ + "▁endeavors", + -12.380489349365234 + ], + [ + "Young", + -12.380559921264648 + ], + [ + "▁downward", + -12.380651473999023 + ], + [ + "▁infused", + -12.380703926086426 + ], + [ + "▁Ry", + -12.380837440490724 + ], + [ + "▁Ubuntu", + -12.381023406982422 + ], + [ + "▁detector", + -12.381355285644531 + ], + [ + "▁Sigma", + -12.381403923034668 + ], + [ + "LL", + -12.38143825531006 + ], + [ + "▁vineyard", + -12.381487846374512 + ], + [ + "▁impaired", + -12.381508827209473 + ], + [ + "▁Facility", + -12.381613731384276 + ], + [ + "▁addictive", + -12.381658554077148 + ], + [ + "▁Inspector", + -12.381684303283691 + ], + [ + "uch", + -12.381696701049805 + ], + [ + "rr", + -12.38172435760498 + ], + [ + "▁$150", + -12.381744384765623 + ], + [ + "between", + -12.381834030151367 + ], + [ + "▁carbohydrates", + -12.381890296936035 + ], + [ + "Ko", + -12.38200855255127 + ], + [ + "▁immensely", + -12.382019996643066 + ], + [ + "▁pod", + -12.382071495056152 + ], + [ + "mates", + -12.382100105285645 + ], + [ + "▁Jacksonville", + -12.38227367401123 + ], + [ + "▁identities", + -12.382431030273438 + ], + [ + "▁Transit", + -12.38245964050293 + ], + [ + "▁socio", + -12.38246726989746 + ], + [ + "76", + -12.382516860961914 + ], + [ + "▁Podcast", + -12.38255214691162 + ], + [ + "▁Prayer", + -12.38265609741211 + ], + [ + "▁cancers", + -12.382787704467772 + ], + [ + "▁rabbit", + -12.382790565490724 + ], + [ + "▁frequencies", + -12.382866859436035 + ], + [ + "▁Drake", + -12.382874488830566 + ], + [ + "▁conceived", + -12.3828763961792 + ], + [ + "2015", + -12.382889747619627 + ], + [ + "▁Adventures", + -12.382928848266602 + ], + [ + "▁Rugby", + -12.383030891418455 + ], + [ + "▁Derek", + -12.383069038391112 + ], + [ + "▁si", + -12.38309383392334 + ], + [ + "▁satisfactory", + -12.38319206237793 + ], + [ + "▁beneficiaries", + -12.383246421813965 + ], + [ + "/5", + -12.38353443145752 + ], + [ + "maybe", + -12.38357162475586 + ], + [ + "▁occupy", + -12.383726119995115 + ], + [ + "Clean", + -12.383992195129396 + ], + [ + "defined", + -12.384135246276855 + ], + [ + "▁theatrical", + -12.384143829345703 + ], + [ + "▁standalone", + -12.384153366088867 + ], + [ + "▁Inspiration", + -12.384307861328123 + ], + [ + "▁hearty", + -12.384352684020996 + ], + [ + "▁Brands", + -12.384483337402344 + ], + [ + "▁passions", + -12.384614944458008 + ], + [ + "▁Powered", + -12.384645462036133 + ], + [ + "▁breeds", + -12.384714126586914 + ], + [ + "▁accidental", + -12.38482666015625 + ], + [ + "▁justified", + -12.384840965270996 + ], + [ + "▁lucrative", + -12.384882926940918 + ], + [ + "▁vacations", + -12.38497543334961 + ], + [ + "Jesus", + -12.38518238067627 + ], + [ + "▁concludes", + -12.38524341583252 + ], + [ + "▁Epic", + -12.385247230529783 + ], + [ + "▁Groups", + -12.38526725769043 + ], + [ + "▁Ultimately", + -12.38530445098877 + ], + [ + "▁1999,", + -12.38533878326416 + ], + [ + "▁calendars", + -12.385369300842283 + ], + [ + "▁Fusion", + -12.385478973388672 + ], + [ + "▁Indonesian", + -12.385658264160156 + ], + [ + "▁metropolitan", + -12.385703086853027 + ], + [ + "▁Wooden", + -12.38572883605957 + ], + [ + "▁endangered", + -12.385807991027832 + ], + [ + "▁dentists", + -12.385846138000488 + ], + [ + "▁LS", + -12.385891914367676 + ], + [ + "fan", + -12.385951042175291 + ], + [ + "sum", + -12.385968208312988 + ], + [ + "▁1968", + -12.386181831359863 + ], + [ + "▁ram", + -12.386277198791504 + ], + [ + "▁Flying", + -12.386302947998049 + ], + [ + "lets", + -12.386375427246094 + ], + [ + "▁Kor", + -12.386439323425291 + ], + [ + "▁radiant", + -12.38652229309082 + ], + [ + "▁mag", + -12.386551856994627 + ], + [ + "▁Bench", + -12.386553764343262 + ], + [ + "PI", + -12.386590003967283 + ], + [ + "▁WhatsApp", + -12.386648178100586 + ], + [ + "▁IR", + -12.386690139770508 + ], + [ + "▁lifespan", + -12.38683795928955 + ], + [ + "▁Buck", + -12.38690948486328 + ], + [ + "▁Nar", + -12.387086868286133 + ], + [ + "▁Controller", + -12.387113571166992 + ], + [ + "▁gums", + -12.387114524841309 + ], + [ + "▁grooming", + -12.38713264465332 + ], + [ + "GC", + -12.387136459350586 + ], + [ + "▁Amber", + -12.387142181396484 + ], + [ + "▁Casey", + -12.387214660644531 + ], + [ + "▁taxpayer", + -12.387290000915527 + ], + [ + "▁lumber", + -12.387346267700195 + ], + [ + "▁Etsy", + -12.387373924255373 + ], + [ + "▁Sunshine", + -12.387429237365724 + ], + [ + "▁Sox", + -12.387495994567873 + ], + [ + "▁Protein", + -12.387548446655272 + ], + [ + "▁consultations", + -12.387581825256348 + ], + [ + "▁Sunset", + -12.387676239013672 + ], + [ + "▁compile", + -12.38771915435791 + ], + [ + "▁digits", + -12.387810707092283 + ], + [ + "ory", + -12.387986183166504 + ], + [ + "note", + -12.388092041015623 + ], + [ + "▁festivities", + -12.38816261291504 + ], + [ + "▁cedar", + -12.388214111328123 + ], + [ + "approved", + -12.388264656066896 + ], + [ + "RY", + -12.388472557067873 + ], + [ + "▁bothered", + -12.388477325439451 + ], + [ + "▁Owen", + -12.38849925994873 + ], + [ + "▁tossed", + -12.38854694366455 + ], + [ + "Facebook", + -12.388554573059082 + ], + [ + "▁Solo", + -12.38861846923828 + ], + [ + "▁Timothy", + -12.388859748840332 + ], + [ + "▁cathedral", + -12.388989448547363 + ], + [ + "Designed", + -12.38909912109375 + ], + [ + "sec", + -12.389317512512209 + ], + [ + "▁whitening", + -12.389348030090332 + ], + [ + "▁Vale", + -12.389450073242188 + ], + [ + "▁Garcia", + -12.389639854431152 + ], + [ + "▁Birth", + -12.3897705078125 + ], + [ + "▁repertoire", + -12.389806747436523 + ], + [ + "▁contrasting", + -12.389840126037598 + ], + [ + "cam", + -12.38987922668457 + ], + [ + "▁MacBook", + -12.389994621276855 + ], + [ + "▁Aero", + -12.390005111694336 + ], + [ + "▁wallpapers", + -12.390144348144531 + ], + [ + "▁Booth", + -12.390151977539062 + ], + [ + "▁refurbished", + -12.390166282653809 + ], + [ + "Imagine", + -12.39023208618164 + ], + [ + "▁Album", + -12.390447616577148 + ], + [ + "SB", + -12.390460968017578 + ], + [ + "▁transitions", + -12.39051342010498 + ], + [ + "▁seasoning", + -12.390573501586914 + ], + [ + "clean", + -12.390759468078612 + ], + [ + "▁PI", + -12.390851020812988 + ], + [ + "▁CAR", + -12.39090633392334 + ], + [ + "▁shores", + -12.391014099121094 + ], + [ + "Soft", + -12.391023635864258 + ], + [ + "Ray", + -12.391075134277344 + ], + [ + "EP", + -12.391159057617188 + ], + [ + "▁devote", + -12.391196250915527 + ], + [ + "▁pediatric", + -12.39125156402588 + ], + [ + "xi", + -12.39137077331543 + ], + [ + "▁remarkably", + -12.39145851135254 + ], + [ + "▁wisely", + -12.391592025756836 + ], + [ + "▁aftermath", + -12.391813278198242 + ], + [ + "▁Symposium", + -12.392017364501951 + ], + [ + "▁nominations", + -12.39203643798828 + ], + [ + "”?", + -12.392051696777344 + ], + [ + "▁chew", + -12.392061233520508 + ], + [ + "▁destroying", + -12.392142295837402 + ], + [ + "▁Serving", + -12.392148971557615 + ], + [ + "▁versa", + -12.392213821411133 + ], + [ + "▁Pavilion", + -12.392277717590332 + ], + [ + "▁Sauce", + -12.392332077026367 + ], + [ + "image", + -12.392544746398926 + ], + [ + "▁Alternative", + -12.392573356628418 + ], + [ + "▁emergence", + -12.39263916015625 + ], + [ + "▁Plain", + -12.392657279968262 + ], + [ + "▁seized", + -12.39281177520752 + ], + [ + "▁exclusion", + -12.39283561706543 + ], + [ + "1%", + -12.39309024810791 + ], + [ + "▁Makes", + -12.393110275268556 + ], + [ + "Dec", + -12.39328670501709 + ], + [ + "ML", + -12.393351554870604 + ], + [ + "▁Added", + -12.393387794494627 + ], + [ + "▁fraudulent", + -12.39369010925293 + ], + [ + "▁owed", + -12.39389419555664 + ], + [ + "NS", + -12.393912315368652 + ], + [ + "▁NOTE", + -12.393953323364258 + ], + [ + "▁tapping", + -12.394060134887695 + ], + [ + "lane", + -12.39415454864502 + ], + [ + "▁compliments", + -12.394214630126951 + ], + [ + "▁dumb", + -12.394506454467772 + ], + [ + "▁teamwork", + -12.394610404968262 + ], + [ + "▁congestion", + -12.394777297973633 + ], + [ + "iri", + -12.39478588104248 + ], + [ + "▁Leading", + -12.394878387451172 + ], + [ + "▁Palmer", + -12.39497184753418 + ], + [ + "court", + -12.395034790039062 + ], + [ + "▁Antique", + -12.395042419433594 + ], + [ + "qui", + -12.395181655883787 + ], + [ + "▁frameworks", + -12.39523983001709 + ], + [ + "▁conception", + -12.395411491394045 + ], + [ + "cultural", + -12.395512580871582 + ], + [ + "▁shipments", + -12.395756721496582 + ], + [ + "▁wolf", + -12.396180152893066 + ], + [ + "▁Ark", + -12.396188735961914 + ], + [ + "bell", + -12.39633560180664 + ], + [ + "▁Patriots", + -12.39633846282959 + ], + [ + "▁clerk", + -12.39638328552246 + ], + [ + "▁preferably", + -12.396402359008787 + ], + [ + "▁Fifth", + -12.396405220031738 + ], + [ + "▁Computing", + -12.39641571044922 + ], + [ + "▁6:", + -12.396434783935549 + ], + [ + "▁Dee", + -12.39655303955078 + ], + [ + "ees", + -12.3966064453125 + ], + [ + "▁tar", + -12.39671230316162 + ], + [ + "▁Fur", + -12.396747589111328 + ], + [ + "▁Pilot", + -12.396759033203123 + ], + [ + "▁40-", + -12.396831512451172 + ], + [ + "▁stresses", + -12.39692497253418 + ], + [ + "cus", + -12.396980285644531 + ], + [ + "December", + -12.397003173828123 + ], + [ + "▁1/2\"", + -12.397090911865234 + ], + [ + "▁exclude", + -12.397103309631348 + ], + [ + "▁Pocket", + -12.397106170654297 + ], + [ + "cer", + -12.39723777770996 + ], + [ + "▁scanned", + -12.39729118347168 + ], + [ + "▁chunk", + -12.397320747375488 + ], + [ + "▁perceptions", + -12.398031234741213 + ], + [ + "▁MRI", + -12.398096084594728 + ], + [ + "▁selective", + -12.398104667663574 + ], + [ + "▁predecessor", + -12.398127555847168 + ], + [ + "▁padded", + -12.39820671081543 + ], + [ + "▁brushing", + -12.398344993591309 + ], + [ + "▁american", + -12.398364067077637 + ], + [ + "super", + -12.398431777954102 + ], + [ + "▁Bread", + -12.39857578277588 + ], + [ + "▁specialise", + -12.398639678955078 + ], + [ + "▁(0", + -12.39864730834961 + ], + [ + "61", + -12.398852348327637 + ], + [ + "▁hurts", + -12.398853302001951 + ], + [ + "▁makeover", + -12.39887809753418 + ], + [ + "▁Papa", + -12.398902893066406 + ], + [ + "▁statute", + -12.398969650268556 + ], + [ + "Brien", + -12.399075508117676 + ], + [ + "ale", + -12.399075508117676 + ], + [ + "Jack", + -12.399080276489258 + ], + [ + "▁mu", + -12.399107933044434 + ], + [ + "▁Es", + -12.399147987365724 + ], + [ + "▁Persian", + -12.399188041687012 + ], + [ + "▁formulation", + -12.399219512939451 + ], + [ + "▁mankind", + -12.3992280960083 + ], + [ + "▁Candy", + -12.399236679077148 + ], + [ + "VR", + -12.39937973022461 + ], + [ + "▁marketed", + -12.399528503417969 + ], + [ + "private", + -12.39955711364746 + ], + [ + "▁ZIP", + -12.39961051940918 + ], + [ + "▁ignoring", + -12.399728775024414 + ], + [ + "▁unmatched", + -12.399746894836426 + ], + [ + "▁angels", + -12.399867057800291 + ], + [ + "▁commenting", + -12.399962425231934 + ], + [ + "▁overhaul", + -12.40011501312256 + ], + [ + "▁Away", + -12.400136947631836 + ], + [ + "▁locker", + -12.400179862976074 + ], + [ + "▁les", + -12.40021514892578 + ], + [ + "▁Gibson", + -12.40025806427002 + ], + [ + "▁LOL", + -12.40035915374756 + ], + [ + "▁9:30", + -12.40037441253662 + ], + [ + "▁outbreak", + -12.40038013458252 + ], + [ + "▁norms", + -12.400392532348633 + ], + [ + "▁Armstrong", + -12.40040111541748 + ], + [ + "▁accustomed", + -12.400559425354004 + ], + [ + "▁navigating", + -12.400559425354004 + ], + [ + "▁chemotherapy", + -12.400561332702637 + ], + [ + "▁Der", + -12.400616645812988 + ], + [ + "▁oxide", + -12.400625228881836 + ], + [ + "▁Holdings", + -12.40063190460205 + ], + [ + "cin", + -12.400745391845703 + ], + [ + "▁$35", + -12.400877952575684 + ], + [ + "▁Powell", + -12.401000022888184 + ], + [ + "▁3-5", + -12.401007652282717 + ], + [ + "▁unsafe", + -12.401127815246582 + ], + [ + "▁splendid", + -12.401148796081545 + ], + [ + "iko", + -12.401150703430176 + ], + [ + "▁perks", + -12.40115451812744 + ], + [ + "▁exported", + -12.40133285522461 + ], + [ + "vel", + -12.401368141174316 + ], + [ + "▁baggage", + -12.40140438079834 + ], + [ + "▁purity", + -12.401408195495604 + ], + [ + "▁BP", + -12.401411056518556 + ], + [ + "host", + -12.401496887207031 + ], + [ + "▁Fiction", + -12.401605606079102 + ], + [ + "Dan", + -12.401643753051758 + ], + [ + "▁countertop", + -12.401676177978516 + ], + [ + "turn", + -12.401731491088867 + ], + [ + "▁Guru", + -12.40204620361328 + ], + [ + "WA", + -12.402069091796877 + ], + [ + "▁hd", + -12.40220069885254 + ], + [ + "▁captivating", + -12.40222454071045 + ], + [ + "▁lateral", + -12.402228355407717 + ], + [ + "▁blew", + -12.402238845825195 + ], + [ + "▁Merry", + -12.402338027954102 + ], + [ + "▁zu", + -12.402402877807615 + ], + [ + "▁thinner", + -12.402454376220703 + ], + [ + "▁Residence", + -12.402544021606444 + ], + [ + "▁Hosting", + -12.40255355834961 + ], + [ + "Sc", + -12.40269947052002 + ], + [ + "▁biography", + -12.402703285217283 + ], + [ + "▁Bronze", + -12.402853965759276 + ], + [ + "▁Snap", + -12.402867317199709 + ], + [ + "▁zipper", + -12.402878761291504 + ], + [ + "▁Root", + -12.402959823608398 + ], + [ + "▁Cornwall", + -12.403069496154783 + ], + [ + "▁burner", + -12.403079986572266 + ], + [ + "▁Monitoring", + -12.403085708618164 + ], + [ + "lum", + -12.403125762939451 + ], + [ + "▁virtue", + -12.40317726135254 + ], + [ + "unit", + -12.403190612792969 + ], + [ + "▁uploading", + -12.403247833251951 + ], + [ + "▁alphabet", + -12.403433799743652 + ], + [ + "▁sufficiently", + -12.403524398803713 + ], + [ + "▁Sie", + -12.403642654418944 + ], + [ + "chen", + -12.403706550598145 + ], + [ + "▁quad", + -12.40375518798828 + ], + [ + "▁broadcasting", + -12.404048919677734 + ], + [ + "Ba", + -12.40422534942627 + ], + [ + "▁famed", + -12.404303550720217 + ], + [ + "wash", + -12.404545783996582 + ], + [ + "▁Rise", + -12.404744148254396 + ], + [ + "▁permitting", + -12.40489387512207 + ], + [ + "▁Cooking", + -12.404972076416016 + ], + [ + "▁playlist", + -12.405007362365724 + ], + [ + "oon", + -12.405083656311035 + ], + [ + "moving", + -12.405190467834473 + ], + [ + "▁refunds", + -12.405224800109863 + ], + [ + "Color", + -12.405251502990724 + ], + [ + "▁wiki", + -12.40532398223877 + ], + [ + "▁cigarettes", + -12.40540599822998 + ], + [ + "▁Dash", + -12.405407905578612 + ], + [ + "▁accountant", + -12.40552806854248 + ], + [ + "▁epidemic", + -12.405561447143556 + ], + [ + "pel", + -12.405808448791504 + ], + [ + "▁GI", + -12.405843734741213 + ], + [ + "▁closes", + -12.405887603759766 + ], + [ + "nz", + -12.405942916870115 + ], + [ + "▁Creator", + -12.405965805053713 + ], + [ + "▁Holmes", + -12.406055450439451 + ], + [ + "▁pallet", + -12.406071662902832 + ], + [ + "▁canned", + -12.406155586242676 + ], + [ + "-23", + -12.406203269958496 + ], + [ + "paid", + -12.406216621398926 + ], + [ + "Travel", + -12.406312942504885 + ], + [ + "▁Isn", + -12.406400680541992 + ], + [ + "▁AF", + -12.406566619873049 + ], + [ + "▁WHO", + -12.406618118286133 + ], + [ + "▁Few", + -12.406625747680664 + ], + [ + "▁lag", + -12.406749725341797 + ], + [ + "▁Builders", + -12.406776428222656 + ], + [ + "floor", + -12.406848907470703 + ], + [ + "▁fashioned", + -12.406874656677246 + ], + [ + "search", + -12.406928062438965 + ], + [ + "wave", + -12.407175064086914 + ], + [ + "▁leagues", + -12.407217979431152 + ], + [ + "▁Example", + -12.407527923583984 + ], + [ + "▁bathing", + -12.407551765441896 + ], + [ + "tur", + -12.407560348510742 + ], + [ + "▁stripped", + -12.407574653625488 + ], + [ + "/11", + -12.407632827758787 + ], + [ + "▁Minimum", + -12.407722473144531 + ], + [ + "fill", + -12.407766342163086 + ], + [ + "▁Raj", + -12.407835006713867 + ], + [ + "▁murdered", + -12.407842636108398 + ], + [ + "Change", + -12.40799045562744 + ], + [ + "▁Coin", + -12.407994270324709 + ], + [ + "▁onset", + -12.408117294311523 + ], + [ + "▁frankly", + -12.40820598602295 + ], + [ + "▁Bean", + -12.40843391418457 + ], + [ + "▁certainty", + -12.408766746520996 + ], + [ + "six", + -12.408767700195312 + ], + [ + "▁embarrassing", + -12.408828735351562 + ], + [ + "▁3)", + -12.40894889831543 + ], + [ + "▁Noble", + -12.408949851989746 + ], + [ + "▁Lite", + -12.408978462219238 + ], + [ + "Ben", + -12.409083366394045 + ], + [ + "▁Coral", + -12.409097671508787 + ], + [ + "▁Pierre", + -12.40915870666504 + ], + [ + "4,000", + -12.409159660339355 + ], + [ + "▁crunchy", + -12.409189224243164 + ], + [ + "▁screenshot", + -12.40924835205078 + ], + [ + "▁Tru", + -12.40931797027588 + ], + [ + "CI", + -12.409411430358888 + ], + [ + "sur", + -12.409467697143556 + ], + [ + "▁Brady", + -12.409711837768556 + ], + [ + "▁perimeter", + -12.409811973571776 + ], + [ + "▁Blair", + -12.409974098205566 + ], + [ + "WC", + -12.409993171691896 + ], + [ + "▁Option", + -12.41016960144043 + ], + [ + "▁Effect", + -12.41028118133545 + ], + [ + "▁perfume", + -12.41028118133545 + ], + [ + "▁sedan", + -12.410326957702637 + ], + [ + "▁1982", + -12.410517692565918 + ], + [ + "mid", + -12.410632133483888 + ], + [ + "BP", + -12.411029815673828 + ], + [ + "▁Petersburg", + -12.411033630371094 + ], + [ + "▁jurisdictions", + -12.411190032958984 + ], + [ + "▁Duncan", + -12.411270141601562 + ], + [ + "America", + -12.41128635406494 + ], + [ + "▁bids", + -12.411299705505373 + ], + [ + "▁facilitates", + -12.411375045776367 + ], + [ + "▁broccoli", + -12.411429405212402 + ], + [ + "▁Speaking", + -12.41144847869873 + ], + [ + "▁accelerated", + -12.41160488128662 + ], + [ + "▁Doc", + -12.411714553833008 + ], + [ + "Browse", + -12.411910057067873 + ], + [ + "89", + -12.411911010742188 + ], + [ + "▁repeating", + -12.411925315856934 + ], + [ + "ama", + -12.411965370178224 + ], + [ + "▁recharge", + -12.41204071044922 + ], + [ + "▁Communities", + -12.4120454788208 + ], + [ + "▁guardian", + -12.412062644958496 + ], + [ + "▁superhero", + -12.412164688110352 + ], + [ + "▁1983", + -12.412321090698242 + ], + [ + "▁POS", + -12.412330627441406 + ], + [ + "▁Lynch", + -12.412382125854492 + ], + [ + "before", + -12.412421226501465 + ], + [ + "▁Slot", + -12.412456512451172 + ], + [ + "▁detective", + -12.41254997253418 + ], + [ + "▁Intended", + -12.412696838378906 + ], + [ + "▁Converter", + -12.41271686553955 + ], + [ + "pt", + -12.412841796875 + ], + [ + "▁Alfred", + -12.412923812866213 + ], + [ + "▁Crack", + -12.412994384765623 + ], + [ + "▁replies", + -12.413119316101074 + ], + [ + "▁ecosystems", + -12.41324234008789 + ], + [ + "glass", + -12.413270950317385 + ], + [ + "▁NS", + -12.413348197937012 + ], + [ + "eva", + -12.413358688354492 + ], + [ + "▁Gym", + -12.413412094116213 + ], + [ + "▁Button", + -12.413419723510742 + ], + [ + "▁Tara", + -12.413562774658203 + ], + [ + "▁molding", + -12.413594245910645 + ], + [ + "▁giants", + -12.41364574432373 + ], + [ + "Gen", + -12.413666725158691 + ], + [ + "▁examinations", + -12.413829803466797 + ], + [ + "bon", + -12.413897514343262 + ], + [ + "▁vapor", + -12.413926124572754 + ], + [ + "etta", + -12.413969039916992 + ], + [ + "PRO", + -12.413999557495115 + ], + [ + "▁0.1", + -12.414034843444824 + ], + [ + "▁Prix", + -12.414118766784668 + ], + [ + "▁wherein", + -12.414165496826172 + ], + [ + "▁Apollo", + -12.414166450500488 + ], + [ + "▁Xi", + -12.414204597473145 + ], + [ + "gu", + -12.414323806762695 + ], + [ + "hit", + -12.41432762145996 + ], + [ + "▁Cricket", + -12.414335250854492 + ], + [ + "▁Places", + -12.414575576782228 + ], + [ + "▁Hands", + -12.41461181640625 + ], + [ + "▁Mask", + -12.41468334197998 + ], + [ + "Group", + -12.414855003356934 + ], + [ + "▁translations", + -12.414907455444336 + ], + [ + "▁noisy", + -12.41498851776123 + ], + [ + "shell", + -12.414996147155762 + ], + [ + "London", + -12.414998054504396 + ], + [ + "hold", + -12.41504192352295 + ], + [ + "▁finely", + -12.4151611328125 + ], + [ + "▁Caroline", + -12.415166854858398 + ], + [ + "jar", + -12.41518497467041 + ], + [ + "▁Quite", + -12.415224075317385 + ], + [ + "▁maximise", + -12.415311813354492 + ], + [ + "▁oily", + -12.415447235107422 + ], + [ + "▁CP", + -12.415576934814451 + ], + [ + "▁resembles", + -12.41561508178711 + ], + [ + "▁Bulgaria", + -12.415643692016602 + ], + [ + "build", + -12.41565227508545 + ], + [ + "▁scrutiny", + -12.415655136108398 + ], + [ + "▁Lily", + -12.415674209594728 + ], + [ + "001", + -12.415693283081056 + ], + [ + "Win", + -12.415698051452637 + ], + [ + "▁responds", + -12.41574764251709 + ], + [ + "▁glaze", + -12.415915489196776 + ], + [ + "▁Patrol", + -12.415925979614258 + ], + [ + "Moreover", + -12.416013717651367 + ], + [ + "▁possesses", + -12.41612720489502 + ], + [ + "avi", + -12.4161376953125 + ], + [ + "▁Diesel", + -12.416253089904783 + ], + [ + "▁poultry", + -12.416486740112305 + ], + [ + "▁cybersecurity", + -12.416494369506836 + ], + [ + "▁Continental", + -12.416596412658691 + ], + [ + "▁outgoing", + -12.416611671447754 + ], + [ + "ota", + -12.416763305664062 + ], + [ + "▁abused", + -12.416857719421388 + ], + [ + "▁pouch", + -12.41723346710205 + ], + [ + "▁numbered", + -12.417262077331545 + ], + [ + "▁Winner", + -12.41730785369873 + ], + [ + "ock", + -12.417497634887695 + ], + [ + "▁enclosure", + -12.41762351989746 + ], + [ + "▁crosses", + -12.417633056640623 + ], + [ + "trip", + -12.417704582214355 + ], + [ + "▁salsa", + -12.417709350585938 + ], + [ + "▁tyre", + -12.41774082183838 + ], + [ + "▁announces", + -12.417805671691896 + ], + [ + "Custom", + -12.417893409729004 + ], + [ + "▁supreme", + -12.417935371398926 + ], + [ + "▁whichever", + -12.41794490814209 + ], + [ + "▁Piece", + -12.41806411743164 + ], + [ + "▁PD", + -12.418075561523438 + ], + [ + "▁surgeries", + -12.418190956115724 + ], + [ + "▁Patent", + -12.418205261230469 + ], + [ + "▁unavailable", + -12.418224334716797 + ], + [ + "▁GREAT", + -12.41847038269043 + ], + [ + "▁drafted", + -12.41849422454834 + ], + [ + "▁thriller", + -12.418697357177734 + ], + [ + "▁Yang", + -12.41882610321045 + ], + [ + "▁9-", + -12.419000625610352 + ], + [ + "▁carb", + -12.41903591156006 + ], + [ + "▁GS", + -12.41921043395996 + ], + [ + "▁Cr", + -12.419307708740234 + ], + [ + "aria", + -12.41930866241455 + ], + [ + "▁Zimbabwe", + -12.419377326965332 + ], + [ + "▁Princeton", + -12.419458389282228 + ], + [ + "▁Plymouth", + -12.419622421264648 + ], + [ + "▁NA", + -12.419742584228516 + ], + [ + "▁imaginative", + -12.419872283935549 + ], + [ + "▁methodologies", + -12.419894218444824 + ], + [ + "▁inequality", + -12.419918060302734 + ], + [ + "▁Rams", + -12.42002773284912 + ], + [ + "forward", + -12.420278549194336 + ], + [ + "▁robotic", + -12.420326232910156 + ], + [ + "▁scaling", + -12.420380592346191 + ], + [ + "lik", + -12.420524597167969 + ], + [ + "▁Coaching", + -12.420730590820312 + ], + [ + "Human", + -12.420771598815918 + ], + [ + "▁Presidential", + -12.420791625976562 + ], + [ + "▁potassium", + -12.420853614807127 + ], + [ + "▁permissions", + -12.421055793762209 + ], + [ + "▁Emperor", + -12.421064376831056 + ], + [ + "Smith", + -12.42106819152832 + ], + [ + "Quality", + -12.421074867248535 + ], + [ + "▁bombs", + -12.42108154296875 + ], + [ + "▁Kia", + -12.421342849731444 + ], + [ + "▁rescued", + -12.421344757080078 + ], + [ + "▁650", + -12.421350479125977 + ], + [ + "awa", + -12.421517372131348 + ], + [ + "▁Harley", + -12.421533584594728 + ], + [ + "▁Carnival", + -12.421740531921388 + ], + [ + "▁Monthly", + -12.421813011169434 + ], + [ + "▁1600", + -12.42192840576172 + ], + [ + "▁Prairie", + -12.421935081481934 + ], + [ + "▁ebay", + -12.422048568725586 + ], + [ + "▁Evolution", + -12.42215633392334 + ], + [ + "▁Boulder", + -12.422168731689451 + ], + [ + "ucci", + -12.422185897827148 + ], + [ + "▁Austrian", + -12.422201156616213 + ], + [ + "▁chilled", + -12.422258377075195 + ], + [ + "▁melody", + -12.422418594360352 + ], + [ + "▁Compact", + -12.422422409057615 + ], + [ + "▁Sad", + -12.422423362731934 + ], + [ + "▁amusement", + -12.42243194580078 + ], + [ + "▁Kick", + -12.42246150970459 + ], + [ + "▁Manila", + -12.422505378723145 + ], + [ + "▁Clerk", + -12.4227294921875 + ], + [ + "DD", + -12.42274570465088 + ], + [ + "fix", + -12.422880172729492 + ], + [ + "▁shareholder", + -12.422910690307615 + ], + [ + "▁skinny", + -12.422914505004885 + ], + [ + "▁caves", + -12.42296028137207 + ], + [ + "▁Embassy", + -12.42302131652832 + ], + [ + "▁kayak", + -12.4230318069458 + ], + [ + "▁Printing", + -12.423068046569824 + ], + [ + "▁brighten", + -12.423091888427734 + ], + [ + "▁monuments", + -12.423198699951172 + ], + [ + "▁Plants", + -12.423212051391602 + ], + [ + "▁ounce", + -12.423234939575195 + ], + [ + "▁southeast", + -12.423330307006836 + ], + [ + "8,000", + -12.423343658447266 + ], + [ + "BT", + -12.42340850830078 + ], + [ + "▁drip", + -12.423425674438477 + ], + [ + "▁companions", + -12.423469543457031 + ], + [ + "Build", + -12.423484802246094 + ], + [ + "▁phenomena", + -12.423555374145508 + ], + [ + "▁5:00", + -12.423617362976074 + ], + [ + "▁reservoir", + -12.423628807067873 + ], + [ + "▁Found", + -12.423664093017578 + ], + [ + "▁thyroid", + -12.423687934875488 + ], + [ + "▁acupuncture", + -12.423788070678713 + ], + [ + "▁Patricia", + -12.423802375793455 + ], + [ + "▁Apache", + -12.423892974853516 + ], + [ + "▁Ronald", + -12.423930168151855 + ], + [ + "▁extracts", + -12.423935890197754 + ], + [ + "▁Birds", + -12.42404842376709 + ], + [ + "kel", + -12.42416286468506 + ], + [ + "oid", + -12.424205780029297 + ], + [ + "running", + -12.42424201965332 + ], + [ + "▁Models", + -12.424361228942873 + ], + [ + "▁ml", + -12.42438793182373 + ], + [ + "▁pup", + -12.42444133758545 + ], + [ + "hl", + -12.424530982971191 + ], + [ + "▁hurting", + -12.424593925476074 + ], + [ + "▁Brett", + -12.424813270568848 + ], + [ + "▁Por", + -12.424917221069336 + ], + [ + "▁pains", + -12.42495346069336 + ], + [ + "lation", + -12.425007820129396 + ], + [ + "▁Minor", + -12.425201416015623 + ], + [ + "February", + -12.42542839050293 + ], + [ + "block", + -12.425487518310549 + ], + [ + "▁Iraqi", + -12.425517082214355 + ], + [ + "▁Issues", + -12.425601959228516 + ], + [ + "leg", + -12.425758361816406 + ], + [ + "▁pi", + -12.425775527954102 + ], + [ + "▁prayed", + -12.425816535949709 + ], + [ + "▁replaces", + -12.425877571105955 + ], + [ + "OC", + -12.42591667175293 + ], + [ + "▁IA", + -12.425965309143066 + ], + [ + "▁deliberate", + -12.426012992858888 + ], + [ + "▁ac", + -12.426044464111328 + ], + [ + "usa", + -12.426092147827148 + ], + [ + "mos", + -12.42609691619873 + ], + [ + "▁harmless", + -12.426240921020508 + ], + [ + "▁CIA", + -12.426350593566896 + ], + [ + "▁Interview", + -12.426403999328612 + ], + [ + "▁Ads", + -12.42661476135254 + ], + [ + "▁Lego", + -12.426643371582031 + ], + [ + "Ro", + -12.426651000976562 + ], + [ + "▁Kathy", + -12.426765441894531 + ], + [ + "▁soaking", + -12.426773071289062 + ], + [ + "▁cheerful", + -12.426825523376465 + ], + [ + "▁stimulation", + -12.426828384399414 + ], + [ + "▁continental", + -12.426838874816896 + ], + [ + "▁FR", + -12.42703342437744 + ], + [ + "▁salty", + -12.427046775817873 + ], + [ + "▁HDMI", + -12.427213668823242 + ], + [ + "▁MX", + -12.427224159240724 + ], + [ + "▁secular", + -12.427289009094238 + ], + [ + "▁winery", + -12.427488327026367 + ], + [ + "▁unchanged", + -12.427541732788086 + ], + [ + "▁scars", + -12.42757797241211 + ], + [ + "▁slavery", + -12.427637100219728 + ], + [ + "ра", + -12.427741050720217 + ], + [ + "▁Burton", + -12.427766799926758 + ], + [ + "▁LIVE", + -12.42778205871582 + ], + [ + "▁Twenty", + -12.42778491973877 + ], + [ + "▁critic", + -12.427889823913574 + ], + [ + "▁1972", + -12.428010940551758 + ], + [ + "▁Nam", + -12.428030014038086 + ], + [ + "▁2-1", + -12.428106307983398 + ], + [ + "▁Dow", + -12.42813205718994 + ], + [ + "▁2.2", + -12.428141593933104 + ], + [ + "▁emphasized", + -12.42836856842041 + ], + [ + "▁pianist", + -12.428399085998535 + ], + [ + "▁expands", + -12.428619384765623 + ], + [ + "▁legislature", + -12.428813934326172 + ], + [ + "▁induced", + -12.428845405578612 + ], + [ + "▁Uncle", + -12.42885684967041 + ], + [ + "BS", + -12.42900562286377 + ], + [ + "kit", + -12.429224967956545 + ], + [ + "▁mitigation", + -12.429242134094238 + ], + [ + "▁tu", + -12.429290771484377 + ], + [ + "▁confirming", + -12.429333686828612 + ], + [ + "▁intern", + -12.429359436035156 + ], + [ + "aire", + -12.429685592651367 + ], + [ + "established", + -12.429723739624023 + ], + [ + "Cover", + -12.429731369018556 + ], + [ + "▁Kara", + -12.429791450500488 + ], + [ + "▁pushes", + -12.429808616638184 + ], + [ + "▁astonishing", + -12.429877281188965 + ], + [ + "▁Conduct", + -12.430065155029297 + ], + [ + "▁Pur", + -12.430109977722168 + ], + [ + "Code", + -12.430119514465332 + ], + [ + "▁drafting", + -12.43012809753418 + ], + [ + "▁generators", + -12.430176734924316 + ], + [ + "▁Mil", + -12.430203437805176 + ], + [ + "▁Toys", + -12.430418014526367 + ], + [ + "▁contaminated", + -12.430642127990724 + ], + [ + "smith", + -12.430824279785156 + ], + [ + "▁haha", + -12.43086051940918 + ], + [ + "▁succession", + -12.43099880218506 + ], + [ + "▁municipality", + -12.431022644042969 + ], + [ + "▁expire", + -12.43120288848877 + ], + [ + "▁sketches", + -12.431219100952148 + ], + [ + "▁expertly", + -12.431229591369627 + ], + [ + "▁Healing", + -12.431259155273438 + ], + [ + "▁Jenny", + -12.43127727508545 + ], + [ + "▁propane", + -12.431297302246094 + ], + [ + "▁PV", + -12.431340217590332 + ], + [ + "▁Tre", + -12.431344032287598 + ], + [ + "▁negotiated", + -12.43138313293457 + ], + [ + "▁volatile", + -12.431405067443848 + ], + [ + "▁primer", + -12.431406021118164 + ], + [ + "5,", + -12.431438446044922 + ], + [ + "bay", + -12.431496620178224 + ], + [ + "▁gotta", + -12.431526184082031 + ], + [ + "▁upscale", + -12.431649208068848 + ], + [ + "▁Rotary", + -12.431812286376951 + ], + [ + "▁cruising", + -12.431815147399902 + ], + [ + "▁Lean", + -12.431840896606444 + ], + [ + "▁brewery", + -12.431899070739746 + ], + [ + "▁visualize", + -12.431951522827148 + ], + [ + "▁fulfil", + -12.432243347167969 + ], + [ + "▁Guidelines", + -12.432323455810549 + ], + [ + "▁dan", + -12.43244743347168 + ], + [ + "▁verb", + -12.43249797821045 + ], + [ + "cross", + -12.432558059692385 + ], + [ + "▁Updated", + -12.432642936706545 + ], + [ + "ture", + -12.43264865875244 + ], + [ + "▁pitching", + -12.432682991027832 + ], + [ + "▁Outstanding", + -12.43269157409668 + ], + [ + "▁staged", + -12.432933807373049 + ], + [ + "▁filtration", + -12.43300724029541 + ], + [ + "▁mic", + -12.433042526245115 + ], + [ + "▁empowerment", + -12.43308448791504 + ], + [ + "▁Protocol", + -12.433133125305176 + ], + [ + "▁Friendly", + -12.433144569396973 + ], + [ + "tta", + -12.433191299438477 + ], + [ + "▁refreshed", + -12.4332914352417 + ], + [ + "Professional", + -12.433306694030762 + ], + [ + "▁surrounds", + -12.433377265930176 + ], + [ + "▁contexts", + -12.433384895324709 + ], + [ + "▁pioneering", + -12.433385848999023 + ], + [ + "▁groceries", + -12.433537483215332 + ], + [ + "wp", + -12.433845520019531 + ], + [ + "wer", + -12.433853149414062 + ], + [ + "▁Thread", + -12.4338960647583 + ], + [ + "▁vest", + -12.43398094177246 + ], + [ + "▁topical", + -12.434024810791016 + ], + [ + "▁skull", + -12.43409538269043 + ], + [ + "▁Yard", + -12.434334754943848 + ], + [ + "▁vulnerabilities", + -12.434389114379885 + ], + [ + "▁gal", + -12.434402465820312 + ], + [ + "▁refuge", + -12.434460639953612 + ], + [ + "▁Fin", + -12.43459701538086 + ], + [ + "▁whale", + -12.434626579284668 + ], + [ + "▁triumph", + -12.43472957611084 + ], + [ + "▁differs", + -12.434853553771973 + ], + [ + "▁Fraser", + -12.4348783493042 + ], + [ + "▁waiver", + -12.435002326965332 + ], + [ + "71", + -12.435190200805664 + ], + [ + "▁asylum", + -12.435251235961914 + ], + [ + "▁destiny", + -12.43526840209961 + ], + [ + "Space", + -12.435285568237305 + ], + [ + "▁Nan", + -12.43533420562744 + ], + [ + "▁hostel", + -12.43540096282959 + ], + [ + "▁reef", + -12.43563461303711 + ], + [ + "SR", + -12.435641288757324 + ], + [ + "▁formulas", + -12.435955047607422 + ], + [ + "▁Thor", + -12.435978889465332 + ], + [ + "▁firearms", + -12.435990333557127 + ], + [ + "▁headquartered", + -12.43600368499756 + ], + [ + "fashioned", + -12.436159133911133 + ], + [ + "▁5.0", + -12.436294555664062 + ], + [ + "▁rhetoric", + -12.43637752532959 + ], + [ + "Later", + -12.436402320861816 + ], + [ + "▁discourse", + -12.436524391174316 + ], + [ + "▁Gill", + -12.436690330505373 + ], + [ + "▁Bru", + -12.436702728271484 + ], + [ + "▁Nearly", + -12.436824798583984 + ], + [ + "▁swivel", + -12.436973571777344 + ], + [ + "child", + -12.43698024749756 + ], + [ + "▁Ky", + -12.437005996704102 + ], + [ + "▁collapsed", + -12.437326431274414 + ], + [ + "/3", + -12.43745231628418 + ], + [ + "▁ripped", + -12.437699317932127 + ], + [ + "Finding", + -12.437734603881836 + ], + [ + "▁precautions", + -12.437796592712402 + ], + [ + "▁1/3", + -12.437873840332031 + ], + [ + "▁upholstered", + -12.437881469726562 + ], + [ + "gon", + -12.43795394897461 + ], + [ + "▁rude", + -12.43797206878662 + ], + [ + "▁Split", + -12.438060760498049 + ], + [ + "▁villas", + -12.43814468383789 + ], + [ + "▁recreate", + -12.438212394714355 + ], + [ + "▁1973", + -12.438230514526367 + ], + [ + "▁Yi", + -12.438244819641112 + ], + [ + "▁torture", + -12.438982009887695 + ], + [ + "▁fundamentally", + -12.439050674438477 + ], + [ + "▁vouchers", + -12.43906307220459 + ], + [ + "▁pesticides", + -12.439067840576172 + ], + [ + "▁Pac", + -12.439117431640623 + ], + [ + "▁gladly", + -12.439167976379396 + ], + [ + "▁String", + -12.439227104187012 + ], + [ + "▁abnormal", + -12.439278602600098 + ], + [ + "Learning", + -12.439594268798828 + ], + [ + "▁Pu", + -12.439667701721191 + ], + [ + "▁preventative", + -12.439720153808594 + ], + [ + "▁thy", + -12.439764976501465 + ], + [ + "▁averaged", + -12.439781188964844 + ], + [ + "▁rebuilt", + -12.439860343933104 + ], + [ + "▁dome", + -12.439905166625977 + ], + [ + "▁boating", + -12.439952850341797 + ], + [ + "kov", + -12.43996238708496 + ], + [ + "▁2.1", + -12.44012451171875 + ], + [ + "▁demolition", + -12.44013500213623 + ], + [ + "▁Sit", + -12.44015884399414 + ], + [ + "▁optimism", + -12.440177917480469 + ], + [ + "▁Tire", + -12.4402494430542 + ], + [ + "▁honeymoon", + -12.440438270568848 + ], + [ + "Interior", + -12.44045639038086 + ], + [ + "▁MM", + -12.440534591674805 + ], + [ + "NP", + -12.440566062927246 + ], + [ + "▁outpatient", + -12.44068431854248 + ], + [ + "▁grandson", + -12.440778732299805 + ], + [ + "▁Clock", + -12.440834045410156 + ], + [ + "▁PCs", + -12.440851211547852 + ], + [ + "▁Written", + -12.441105842590332 + ], + [ + "78", + -12.441192626953123 + ], + [ + "▁chimney", + -12.44123077392578 + ], + [ + "▁surrender", + -12.441258430480955 + ], + [ + "▁attire", + -12.441283226013184 + ], + [ + "DI", + -12.441293716430664 + ], + [ + "▁Enforcement", + -12.44129467010498 + ], + [ + "▁windshield", + -12.441309928894045 + ], + [ + "▁Veterinary", + -12.441366195678713 + ], + [ + "▁Subject", + -12.441393852233888 + ], + [ + "▁Hearing", + -12.44200611114502 + ], + [ + "▁Audit", + -12.44201946258545 + ], + [ + "▁cupboard", + -12.442052841186523 + ], + [ + "▁enrich", + -12.442062377929688 + ], + [ + "▁Alt", + -12.44236183166504 + ], + [ + "▁investigator", + -12.442429542541504 + ], + [ + "▁Nobel", + -12.44266128540039 + ], + [ + "RR", + -12.442668914794922 + ], + [ + "dle", + -12.442741394042969 + ], + [ + "▁symptom", + -12.442900657653809 + ], + [ + "▁regimen", + -12.442911148071287 + ], + [ + "▁Liquid", + -12.442959785461426 + ], + [ + "▁Shannon", + -12.443032264709473 + ], + [ + "Provide", + -12.443058013916016 + ], + [ + "▁$100,000", + -12.443108558654783 + ], + [ + "▁plated", + -12.44313907623291 + ], + [ + "▁Writer", + -12.44320297241211 + ], + [ + "▁patrol", + -12.443208694458008 + ], + [ + "metal", + -12.44330883026123 + ], + [ + "▁rat", + -12.443385124206545 + ], + [ + "▁specialties", + -12.44340705871582 + ], + [ + "▁collage", + -12.443411827087402 + ], + [ + "16.", + -12.44347095489502 + ], + [ + "▁violated", + -12.443563461303713 + ], + [ + "Looks", + -12.44357681274414 + ], + [ + "▁titanium", + -12.443840026855469 + ], + [ + "▁LE", + -12.443865776062012 + ], + [ + "▁proficiency", + -12.443897247314451 + ], + [ + "▁organising", + -12.443923950195312 + ], + [ + "▁owing", + -12.4439697265625 + ], + [ + "▁Fantastic", + -12.44398307800293 + ], + [ + "▁Beginning", + -12.444045066833496 + ], + [ + "▁deposited", + -12.444051742553713 + ], + [ + "▁distressed", + -12.444215774536133 + ], + [ + "▁Exercise", + -12.444233894348145 + ], + [ + "▁vaccines", + -12.444470405578612 + ], + [ + "▁cardio", + -12.444531440734863 + ], + [ + "▁Handbook", + -12.444540023803713 + ], + [ + "-2019", + -12.444564819335938 + ], + [ + "▁Blend", + -12.444570541381836 + ], + [ + "▁Suzuki", + -12.444704055786133 + ], + [ + "Explore", + -12.445161819458008 + ], + [ + "short", + -12.44516944885254 + ], + [ + "Apart", + -12.445219039916992 + ], + [ + "▁toddlers", + -12.445282936096191 + ], + [ + "▁entirety", + -12.445435523986816 + ], + [ + "▁Reid", + -12.445479393005373 + ], + [ + "▁spreads", + -12.44550323486328 + ], + [ + "▁Eugene", + -12.445528030395508 + ], + [ + "Feel", + -12.445656776428224 + ], + [ + "▁conform", + -12.445762634277344 + ], + [ + "▁capsules", + -12.445836067199709 + ], + [ + "▁Bottom", + -12.445988655090332 + ], + [ + "▁LTD", + -12.446019172668455 + ], + [ + "▁Cow", + -12.446139335632324 + ], + [ + "fiction", + -12.446233749389648 + ], + [ + "▁fertilizer", + -12.44626808166504 + ], + [ + "▁slab", + -12.446380615234377 + ], + [ + "▁720", + -12.446430206298828 + ], + [ + "▁hunter", + -12.44655990600586 + ], + [ + "▁chilly", + -12.446983337402344 + ], + [ + "▁Sold", + -12.44717502593994 + ], + [ + "▁Doctors", + -12.447182655334473 + ], + [ + "▁borrowers", + -12.447211265563965 + ], + [ + "▁dolls", + -12.447242736816406 + ], + [ + "▁protector", + -12.447432518005373 + ], + [ + "▁coveted", + -12.447460174560549 + ], + [ + "▁estimation", + -12.44760799407959 + ], + [ + "fr", + -12.447665214538574 + ], + [ + "▁Important", + -12.447978019714355 + ], + [ + "index", + -12.448140144348145 + ], + [ + "▁Reach", + -12.448143005371094 + ], + [ + "▁incur", + -12.448145866394045 + ], + [ + "▁weighed", + -12.44817066192627 + ], + [ + "▁unfinished", + -12.448172569274902 + ], + [ + "▁Cork", + -12.448238372802734 + ], + [ + "▁retiring", + -12.448248863220217 + ], + [ + "Public", + -12.44827938079834 + ], + [ + "▁resignation", + -12.448284149169922 + ], + [ + "▁testament", + -12.448286056518556 + ], + [ + "▁Airways", + -12.44834327697754 + ], + [ + "▁dispose", + -12.448467254638672 + ], + [ + "▁VC", + -12.448567390441896 + ], + [ + "▁Articles", + -12.448628425598145 + ], + [ + "▁Wheels", + -12.44864273071289 + ], + [ + "▁believer", + -12.448655128479004 + ], + [ + "▁Stein", + -12.448740005493164 + ], + [ + "▁Prep", + -12.448840141296388 + ], + [ + "▁ob", + -12.44886302947998 + ], + [ + "101", + -12.448907852172852 + ], + [ + "▁GC", + -12.448981285095217 + ], + [ + "▁Behind", + -12.449278831481934 + ], + [ + "▁Ji", + -12.449433326721191 + ], + [ + "▁Sessions", + -12.449457168579102 + ], + [ + "▁terminals", + -12.449518203735352 + ], + [ + "▁sim", + -12.449705123901367 + ], + [ + "illo", + -12.449795722961426 + ], + [ + "lim", + -12.449950218200684 + ], + [ + "▁Harvest", + -12.44998550415039 + ], + [ + "change", + -12.45000457763672 + ], + [ + "▁Evaluation", + -12.450005531311035 + ], + [ + "▁metadata", + -12.450011253356934 + ], + [ + "▁Canvas", + -12.450045585632324 + ], + [ + "▁shines", + -12.450170516967772 + ], + [ + "▁Afghan", + -12.450173377990724 + ], + [ + "▁toughest", + -12.450187683105469 + ], + [ + "▁1-800-", + -12.450189590454102 + ], + [ + "▁Warranty", + -12.450249671936035 + ], + [ + "▁chapel", + -12.450363159179688 + ], + [ + "▁echo", + -12.450443267822266 + ], + [ + "▁Judy", + -12.4504976272583 + ], + [ + "▁MLB", + -12.450522422790527 + ], + [ + "▁collaborated", + -12.450522422790527 + ], + [ + "▁systemic", + -12.450546264648438 + ], + [ + "strong", + -12.45072078704834 + ], + [ + "▁butterflies", + -12.450878143310549 + ], + [ + "▁faint", + -12.451183319091797 + ], + [ + "▁Duck", + -12.451412200927734 + ], + [ + "HS", + -12.451465606689451 + ], + [ + "▁Previously", + -12.451499938964844 + ], + [ + "Write", + -12.451608657836914 + ], + [ + "▁receipts", + -12.451695442199709 + ], + [ + "▁Precision", + -12.45169734954834 + ], + [ + "▁Devil", + -12.451836585998535 + ], + [ + "▁Push", + -12.451913833618164 + ], + [ + "pound", + -12.452013969421388 + ], + [ + "▁myths", + -12.452025413513184 + ], + [ + "▁tighten", + -12.452200889587402 + ], + [ + "▁11:00", + -12.45233154296875 + ], + [ + "▁conductor", + -12.452407836914062 + ], + [ + "▁radiator", + -12.452452659606934 + ], + [ + "▁flourish", + -12.452570915222168 + ], + [ + "och", + -12.45270824432373 + ], + [ + "▁reliance", + -12.452805519104004 + ], + [ + "▁Bing", + -12.452875137329102 + ], + [ + "▁mailbox", + -12.452885627746582 + ], + [ + "▁slowed", + -12.45298671722412 + ], + [ + "▁Mari", + -12.453086853027344 + ], + [ + "▁aunt", + -12.453147888183594 + ], + [ + "cup", + -12.45317268371582 + ], + [ + "Hot", + -12.45323085784912 + ], + [ + "Ni", + -12.45331573486328 + ], + [ + "▁cyclists", + -12.453362464904783 + ], + [ + "▁wizard", + -12.453489303588867 + ], + [ + "▁sunscreen", + -12.453566551208496 + ], + [ + "▁crossover", + -12.453690528869627 + ], + [ + "▁growers", + -12.45370101928711 + ], + [ + "▁conveyor", + -12.453810691833496 + ], + [ + "▁Terra", + -12.453914642333984 + ], + [ + "▁scar", + -12.453943252563477 + ], + [ + "▁hostile", + -12.454084396362305 + ], + [ + "tus", + -12.454320907592772 + ], + [ + "2014", + -12.454383850097656 + ], + [ + "▁FY", + -12.454487800598145 + ], + [ + "▁EL", + -12.454567909240724 + ], + [ + "▁plea", + -12.454670906066896 + ], + [ + "▁inspector", + -12.454697608947754 + ], + [ + "Monday", + -12.454737663269045 + ], + [ + "▁redeem", + -12.454804420471191 + ], + [ + "▁trays", + -12.454863548278809 + ], + [ + "Cool", + -12.454875946044922 + ], + [ + "▁distraction", + -12.4551420211792 + ], + [ + "▁USE", + -12.455157279968262 + ], + [ + "▁glamorous", + -12.455262184143066 + ], + [ + "IE", + -12.455414772033691 + ], + [ + "Bi", + -12.455439567565918 + ], + [ + "▁stitches", + -12.45550537109375 + ], + [ + "▁Salesforce", + -12.455523490905762 + ], + [ + "Regardless", + -12.455581665039062 + ], + [ + "▁Moss", + -12.455620765686035 + ], + [ + "▁1969", + -12.455818176269531 + ], + [ + "▁Spotify", + -12.45583438873291 + ], + [ + "▁embroidered", + -12.455978393554688 + ], + [ + "▁residue", + -12.456002235412598 + ], + [ + "dry", + -12.45608901977539 + ], + [ + "▁fathers", + -12.456136703491213 + ], + [ + "▁Specifically", + -12.45627784729004 + ], + [ + "▁Gay", + -12.456311225891112 + ], + [ + "ET", + -12.456506729125977 + ], + [ + "▁neon", + -12.456549644470217 + ], + [ + "▁throne", + -12.456592559814451 + ], + [ + "▁Nazi", + -12.456738471984863 + ], + [ + "thank", + -12.456745147705078 + ], + [ + "\").", + -12.45687484741211 + ], + [ + "▁tow", + -12.456938743591309 + ], + [ + "▁Stack", + -12.456964492797852 + ], + [ + "▁Ferguson", + -12.456995010375977 + ], + [ + "▁condos", + -12.456995964050291 + ], + [ + "▁averaging", + -12.457009315490724 + ], + [ + "▁Ol", + -12.457025527954102 + ], + [ + "▁Finnish", + -12.457033157348633 + ], + [ + "▁breastfeeding", + -12.457111358642578 + ], + [ + "▁#4", + -12.457155227661133 + ], + [ + "▁Comic", + -12.45720672607422 + ], + [ + "▁Effective", + -12.457408905029297 + ], + [ + "break", + -12.457475662231444 + ], + [ + "▁warrior", + -12.457487106323242 + ], + [ + "File", + -12.457571029663086 + ], + [ + "national", + -12.457579612731934 + ], + [ + "▁Buyer", + -12.457707405090332 + ], + [ + "▁reversed", + -12.4578857421875 + ], + [ + "▁benefited", + -12.457913398742676 + ], + [ + "▁6.5", + -12.457927703857422 + ], + [ + "▁hem", + -12.457928657531738 + ], + [ + "▁50-", + -12.458032608032228 + ], + [ + "▁distractions", + -12.458035469055176 + ], + [ + "▁WHAT", + -12.458202362060549 + ], + [ + "▁SAT", + -12.458218574523926 + ], + [ + "▁preventive", + -12.458391189575195 + ], + [ + "▁Ikea", + -12.458420753479004 + ], + [ + "▁Megan", + -12.45842742919922 + ], + [ + "▁catalyst", + -12.458504676818848 + ], + [ + "aki", + -12.45852756500244 + ], + [ + "pu", + -12.458707809448242 + ], + [ + "▁Marble", + -12.45875358581543 + ], + [ + "▁erosion", + -12.45877170562744 + ], + [ + "▁Promo", + -12.459150314331056 + ], + [ + "▁Pioneer", + -12.459478378295898 + ], + [ + "tive", + -12.459498405456545 + ], + [ + "▁embark", + -12.459553718566896 + ], + [ + "▁pancakes", + -12.45957851409912 + ], + [ + "large", + -12.459638595581056 + ], + [ + "▁balloons", + -12.459680557250977 + ], + [ + "▁bells", + -12.459697723388672 + ], + [ + "hall", + -12.46038818359375 + ], + [ + "▁reclaimed", + -12.460404396057127 + ], + [ + "▁residing", + -12.460536003112791 + ], + [ + "sc", + -12.460745811462402 + ], + [ + "▁Lip", + -12.460817337036133 + ], + [ + "▁criticized", + -12.460866928100586 + ], + [ + "▁Gregory", + -12.461540222167969 + ], + [ + "wal", + -12.461546897888184 + ], + [ + "▁Depression", + -12.461689949035645 + ], + [ + "Quick", + -12.46173095703125 + ], + [ + "▁Cardiff", + -12.461739540100098 + ], + [ + "▁podcasts", + -12.461791038513184 + ], + [ + "▁marvelous", + -12.461797714233398 + ], + [ + "▁Meta", + -12.461853981018066 + ], + [ + "language", + -12.461883544921877 + ], + [ + "Private", + -12.461915969848633 + ], + [ + "▁directories", + -12.461936950683594 + ], + [ + "▁fl", + -12.461949348449709 + ], + [ + "▁sneakers", + -12.462005615234377 + ], + [ + "▁Graphics", + -12.46205234527588 + ], + [ + "▁seams", + -12.46207046508789 + ], + [ + "▁speeding", + -12.462128639221191 + ], + [ + "▁sushi", + -12.46245002746582 + ], + [ + "▁ce", + -12.462637901306152 + ], + [ + "▁lessen", + -12.46269416809082 + ], + [ + "▁Lightning", + -12.46271800994873 + ], + [ + "Port", + -12.463006019592283 + ], + [ + "▁Frost", + -12.463144302368164 + ], + [ + "▁Mp", + -12.46324348449707 + ], + [ + "▁recommending", + -12.463334083557127 + ], + [ + "▁Sisters", + -12.46341037750244 + ], + [ + "▁tariffs", + -12.463557243347168 + ], + [ + "▁gaze", + -12.463781356811523 + ], + [ + "▁jealous", + -12.463810920715332 + ], + [ + "▁Muhammad", + -12.463841438293455 + ], + [ + "CP", + -12.463859558105469 + ], + [ + "▁graphs", + -12.463873863220217 + ], + [ + "▁downloadable", + -12.46395778656006 + ], + [ + "▁Fer", + -12.463985443115234 + ], + [ + "▁Grammy", + -12.464062690734863 + ], + [ + "▁expiration", + -12.464073181152344 + ], + [ + "▁facilitated", + -12.464128494262695 + ], + [ + "anne", + -12.464241981506348 + ], + [ + "Cat", + -12.464280128479004 + ], + [ + "▁Ethics", + -12.46431827545166 + ], + [ + "▁Tra", + -12.46438694000244 + ], + [ + "▁vitality", + -12.464399337768556 + ], + [ + "▁Answers", + -12.464410781860352 + ], + [ + "▁abdominal", + -12.464554786682127 + ], + [ + "▁Janet", + -12.464699745178224 + ], + [ + "▁Bra", + -12.464702606201172 + ], + [ + "▁closets", + -12.46489143371582 + ], + [ + "▁Syndrome", + -12.464961051940918 + ], + [ + "▁''", + -12.464995384216309 + ], + [ + "▁downstairs", + -12.465015411376951 + ], + [ + "▁eventual", + -12.465024948120115 + ], + [ + "▁NI", + -12.465143203735352 + ], + [ + "▁classmates", + -12.465149879455566 + ], + [ + "▁filtered", + -12.465266227722168 + ], + [ + "▁Patch", + -12.465373039245604 + ], + [ + "▁Minecraft", + -12.465483665466309 + ], + [ + "▁attentive", + -12.465542793273926 + ], + [ + "▁Mate", + -12.465593338012695 + ], + [ + "ato", + -12.465713500976562 + ], + [ + "▁Quote", + -12.465758323669434 + ], + [ + "▁sparked", + -12.465786933898926 + ], + [ + "thirds", + -12.4658203125 + ], + [ + "▁crochet", + -12.465829849243164 + ], + [ + "▁cultivation", + -12.46584701538086 + ], + [ + "▁aromatic", + -12.465848922729492 + ], + [ + "▁portfolios", + -12.465864181518556 + ], + [ + "▁resolving", + -12.465954780578612 + ], + [ + "▁Translation", + -12.465996742248535 + ], + [ + "▁nominee", + -12.46604824066162 + ], + [ + "▁quo", + -12.466104507446287 + ], + [ + "▁Owners", + -12.466214179992676 + ], + [ + "science", + -12.46623706817627 + ], + [ + "▁Launch", + -12.46630859375 + ], + [ + "▁conventions", + -12.466389656066896 + ], + [ + "▁keeper", + -12.466394424438477 + ], + [ + "zing", + -12.466497421264648 + ], + [ + "onia", + -12.466622352600098 + ], + [ + "▁Individuals", + -12.466644287109377 + ], + [ + "▁Performing", + -12.46670913696289 + ], + [ + "▁installers", + -12.4668550491333 + ], + [ + "▁Automation", + -12.466917037963867 + ], + [ + "▁tutors", + -12.467011451721191 + ], + [ + "▁13.", + -12.467019081115724 + ], + [ + "▁Specialists", + -12.467120170593262 + ], + [ + "ito", + -12.467177391052246 + ], + [ + "lf", + -12.467275619506836 + ], + [ + "▁Eight", + -12.46729850769043 + ], + [ + "hai", + -12.467302322387695 + ], + [ + "▁CCTV", + -12.467327117919922 + ], + [ + "layer", + -12.467388153076172 + ], + [ + "▁breadth", + -12.467412948608398 + ], + [ + "▁Semi", + -12.46742820739746 + ], + [ + "▁minded", + -12.467440605163574 + ], + [ + "▁Artificial", + -12.467499732971191 + ], + [ + "street", + -12.46751880645752 + ], + [ + "▁Nordic", + -12.467529296875 + ], + [ + "▁tensions", + -12.467812538146973 + ], + [ + "▁Brunswick", + -12.46800708770752 + ], + [ + "▁knots", + -12.468084335327148 + ], + [ + "een", + -12.468181610107422 + ], + [ + "▁dresser", + -12.468236923217772 + ], + [ + "▁MW", + -12.468313217163086 + ], + [ + "▁Seeing", + -12.46834659576416 + ], + [ + "▁Kind", + -12.468381881713867 + ], + [ + "▁230", + -12.468384742736816 + ], + [ + "▁textiles", + -12.468493461608888 + ], + [ + "▁7.5", + -12.468721389770508 + ], + [ + "▁Relief", + -12.46891975402832 + ], + [ + "▁walnut", + -12.468924522399902 + ], + [ + "▁Junction", + -12.469056129455566 + ], + [ + "▁horsepower", + -12.469120979309082 + ], + [ + "▁spike", + -12.469176292419434 + ], + [ + "▁impacting", + -12.46930980682373 + ], + [ + "▁aerospace", + -12.46940803527832 + ], + [ + "uda", + -12.469502449035645 + ], + [ + "▁respects", + -12.469520568847656 + ], + [ + "SD", + -12.469610214233398 + ], + [ + "▁slated", + -12.469632148742676 + ], + [ + "▁suburb", + -12.469647407531738 + ], + [ + "▁Comics", + -12.46967315673828 + ], + [ + "▁resemble", + -12.469724655151367 + ], + [ + "▁continents", + -12.469769477844238 + ], + [ + "▁courteous", + -12.470311164855955 + ], + [ + "▁briefing", + -12.470362663269045 + ], + [ + "ome", + -12.470470428466797 + ], + [ + "▁tubing", + -12.47047519683838 + ], + [ + "▁swift", + -12.470588684082031 + ], + [ + "▁accumulate", + -12.470633506774902 + ], + [ + "▁dwell", + -12.470945358276367 + ], + [ + "▁PL", + -12.470961570739746 + ], + [ + "▁neatly", + -12.470993041992188 + ], + [ + "Christian", + -12.471226692199709 + ], + [ + "▁9/11", + -12.47127628326416 + ], + [ + "▁preceded", + -12.471308708190918 + ], + [ + "▁Leicester", + -12.471331596374512 + ], + [ + "▁whites", + -12.471452713012695 + ], + [ + "▁hectic", + -12.471495628356934 + ], + [ + "dra", + -12.47152328491211 + ], + [ + "od", + -12.471524238586426 + ], + [ + "▁(18", + -12.471868515014648 + ], + [ + "▁recruited", + -12.471894264221191 + ], + [ + "▁Dear", + -12.472034454345703 + ], + [ + "▁HOME", + -12.472107887268066 + ], + [ + "TON", + -12.47211742401123 + ], + [ + "shan", + -12.472192764282228 + ], + [ + "▁Myers", + -12.472346305847168 + ], + [ + "▁migrate", + -12.47237777709961 + ], + [ + "▁premature", + -12.472752571105955 + ], + [ + "▁Liu", + -12.472822189331056 + ], + [ + "▁merged", + -12.472865104675291 + ], + [ + "▁Falcon", + -12.472882270812988 + ], + [ + "Avoid", + -12.472914695739746 + ], + [ + "Leave", + -12.473036766052246 + ], + [ + "83", + -12.47325325012207 + ], + [ + "▁consolidate", + -12.473268508911133 + ], + [ + "▁sung", + -12.473361015319824 + ], + [ + "▁Celebration", + -12.473432540893556 + ], + [ + "▁cellar", + -12.473505020141602 + ], + [ + "▁Exclusive", + -12.473529815673828 + ], + [ + "▁religions", + -12.473661422729492 + ], + [ + "▁bodily", + -12.473869323730469 + ], + [ + "▁tasked", + -12.47391128540039 + ], + [ + "nah", + -12.473934173583984 + ], + [ + "▁elevate", + -12.474249839782717 + ], + [ + "▁Dia", + -12.474273681640623 + ], + [ + "▁Height", + -12.474360466003418 + ], + [ + "▁Mold", + -12.47452449798584 + ], + [ + "▁uniforms", + -12.474541664123535 + ], + [ + "▁fullest", + -12.474691390991213 + ], + [ + "▁Ethiopia", + -12.474700927734377 + ], + [ + "▁Pendant", + -12.474763870239258 + ], + [ + "▁Pertaining", + -12.474763870239258 + ], + [ + "▁quaint", + -12.47476577758789 + ], + [ + "▁paramount", + -12.474776268005373 + ], + [ + "▁alot", + -12.474970817565918 + ], + [ + "▁10:30", + -12.475013732910156 + ], + [ + "vision", + -12.475037574768066 + ], + [ + "▁Agents", + -12.475069046020508 + ], + [ + "sun", + -12.475159645080566 + ], + [ + "▁Teams", + -12.475175857543944 + ], + [ + "▁terminate", + -12.475218772888184 + ], + [ + "▁unwind", + -12.475236892700195 + ], + [ + "3-", + -12.475263595581056 + ], + [ + "pole", + -12.4753999710083 + ], + [ + "▁midfielder", + -12.475438117980955 + ], + [ + "▁Il", + -12.47562313079834 + ], + [ + "NB", + -12.475829124450684 + ], + [ + "▁turbo", + -12.47595500946045 + ], + [ + "Plan", + -12.475992202758787 + ], + [ + "▁sidewalk", + -12.476184844970703 + ], + [ + "▁rot", + -12.476433753967283 + ], + [ + "▁Salad", + -12.476523399353027 + ], + [ + "GT", + -12.47652530670166 + ], + [ + "▁cucumber", + -12.476666450500488 + ], + [ + "▁Residents", + -12.476813316345217 + ], + [ + "▁TC", + -12.47685432434082 + ], + [ + "▁Choosing", + -12.476969718933104 + ], + [ + "450", + -12.476977348327637 + ], + [ + "▁funky", + -12.477118492126465 + ], + [ + "▁tranquil", + -12.47736644744873 + ], + [ + "▁Considering", + -12.47738552093506 + ], + [ + "iro", + -12.477394104003906 + ], + [ + "-50", + -12.477411270141602 + ], + [ + "Actually", + -12.477463722229004 + ], + [ + "(),", + -12.477519989013672 + ], + [ + "▁sensing", + -12.477519989013672 + ], + [ + "▁hikes", + -12.477546691894531 + ], + [ + "▁sage", + -12.47759246826172 + ], + [ + "▁Customs", + -12.477675437927246 + ], + [ + "▁CF", + -12.477713584899902 + ], + [ + "EA", + -12.477834701538086 + ], + [ + "lbs", + -12.478034019470217 + ], + [ + "▁Rec", + -12.478094100952148 + ], + [ + "▁presumably", + -12.478145599365234 + ], + [ + "▁animations", + -12.478181838989258 + ], + [ + "▁maternity", + -12.478353500366213 + ], + [ + "▁doctrine", + -12.478487014770508 + ], + [ + "▁desks", + -12.478632926940918 + ], + [ + "▁Normally", + -12.478765487670898 + ], + [ + "▁harvesting", + -12.478838920593262 + ], + [ + "▁peach", + -12.478886604309082 + ], + [ + "▁Playing", + -12.47892951965332 + ], + [ + "▁Bach", + -12.479063034057615 + ], + [ + "bag", + -12.47923183441162 + ], + [ + "▁convertible", + -12.47928524017334 + ], + [ + "▁100-", + -12.479394912719728 + ], + [ + "▁Andrews", + -12.479414939880373 + ], + [ + "mic", + -12.47946548461914 + ], + [ + "ose", + -12.480090141296388 + ], + [ + "▁RO", + -12.480107307434082 + ], + [ + "▁Wii", + -12.480131149291992 + ], + [ + "▁commodities", + -12.480152130126951 + ], + [ + "▁immunity", + -12.48015308380127 + ], + [ + "▁Belfast", + -12.480161666870115 + ], + [ + "▁USDA", + -12.480192184448242 + ], + [ + "▁denying", + -12.480247497558594 + ], + [ + "MLS", + -12.480265617370604 + ], + [ + "▁Hai", + -12.480273246765137 + ], + [ + "▁disrupt", + -12.480281829833984 + ], + [ + "▁Mostly", + -12.48036003112793 + ], + [ + "▁Become", + -12.480422019958496 + ], + [ + "sales", + -12.480438232421877 + ], + [ + "▁ga", + -12.480518341064451 + ], + [ + "▁vocational", + -12.480581283569336 + ], + [ + "▁TM", + -12.480610847473145 + ], + [ + "drop", + -12.48063850402832 + ], + [ + "▁Istanbul", + -12.480843544006348 + ], + [ + "▁taxation", + -12.480975151062012 + ], + [ + "▁Nonetheless", + -12.481006622314451 + ], + [ + "▁aired", + -12.48102855682373 + ], + [ + "▁aisle", + -12.48104190826416 + ], + [ + "▁hobbies", + -12.481054306030272 + ], + [ + "▁Seriously", + -12.481294631958008 + ], + [ + "▁stationary", + -12.481369972229004 + ], + [ + "▁habitats", + -12.481388092041016 + ], + [ + "▁Oaks", + -12.48139190673828 + ], + [ + "Got", + -12.481410026550291 + ], + [ + "▁Venus", + -12.48145866394043 + ], + [ + "▁Rising", + -12.48150634765625 + ], + [ + "▁plugs", + -12.481568336486816 + ], + [ + "▁iphone", + -12.481600761413574 + ], + [ + "▁gr", + -12.481715202331545 + ], + [ + "▁Legislature", + -12.48195457458496 + ], + [ + "▁Coastal", + -12.481993675231934 + ], + [ + "▁Klein", + -12.482019424438477 + ], + [ + "▁antivirus", + -12.48216438293457 + ], + [ + "mit", + -12.48222541809082 + ], + [ + "▁Meat", + -12.48222541809082 + ], + [ + "▁$250", + -12.48223114013672 + ], + [ + "▁rebuilding", + -12.482301712036133 + ], + [ + "▁inserts", + -12.482447624206545 + ], + [ + "▁Benefit", + -12.482462882995604 + ], + [ + "▁forged", + -12.482518196105955 + ], + [ + "▁plunge", + -12.482671737670898 + ], + [ + "sure", + -12.482705116271973 + ], + [ + "▁weaving", + -12.482979774475098 + ], + [ + "▁transcription", + -12.483038902282717 + ], + [ + "▁Dimension", + -12.48328685760498 + ], + [ + "▁advancements", + -12.48333740234375 + ], + [ + "▁frost", + -12.48336696624756 + ], + [ + "▁kale", + -12.483535766601562 + ], + [ + "▁chorus", + -12.483623504638672 + ], + [ + "Steve", + -12.483682632446287 + ], + [ + "▁Canadians", + -12.4837064743042 + ], + [ + "▁rains", + -12.483757019042969 + ], + [ + "▁compromising", + -12.48376178741455 + ], + [ + "▁PPC", + -12.483933448791504 + ], + [ + "atory", + -12.483953475952148 + ], + [ + "▁progressed", + -12.484064102172852 + ], + [ + "▁MK", + -12.484146118164062 + ], + [ + "▁aquarium", + -12.484170913696287 + ], + [ + "▁DI", + -12.48420524597168 + ], + [ + "Fun", + -12.48448371887207 + ], + [ + "sensitive", + -12.484501838684082 + ], + [ + "isi", + -12.484574317932127 + ], + [ + "▁ikea", + -12.48467254638672 + ], + [ + "▁parsley", + -12.4846830368042 + ], + [ + "fair", + -12.484755516052246 + ], + [ + "▁Carroll", + -12.484770774841309 + ], + [ + "91", + -12.48499584197998 + ], + [ + "speaking", + -12.485036849975586 + ], + [ + "▁Ak", + -12.485182762145996 + ], + [ + "▁Armed", + -12.485223770141602 + ], + [ + "▁cheating", + -12.48531436920166 + ], + [ + "▁Selling", + -12.48547649383545 + ], + [ + "▁Mayo", + -12.485492706298828 + ], + [ + "▁cleanse", + -12.485559463500977 + ], + [ + "▁advises", + -12.48558521270752 + ], + [ + "▁Shu", + -12.485607147216797 + ], + [ + "ef", + -12.485681533813477 + ], + [ + "▁forensic", + -12.485830307006836 + ], + [ + "▁Je", + -12.485844612121582 + ], + [ + "▁resigned", + -12.485876083374023 + ], + [ + "▁carrot", + -12.485923767089844 + ], + [ + "▁ruler", + -12.485991477966309 + ], + [ + "▁mentality", + -12.486003875732422 + ], + [ + "▁trafficking", + -12.486190795898438 + ], + [ + "fil", + -12.4862060546875 + ], + [ + "tia", + -12.486214637756348 + ], + [ + "▁Var", + -12.486441612243652 + ], + [ + "▁1971", + -12.486492156982422 + ], + [ + "▁Salem", + -12.486498832702637 + ], + [ + "rest", + -12.486509323120115 + ], + [ + "EE", + -12.486560821533203 + ], + [ + "Map", + -12.486763954162598 + ], + [ + "▁Panels", + -12.48688793182373 + ], + [ + "▁packets", + -12.48688793182373 + ], + [ + "▁additives", + -12.486893653869627 + ], + [ + "ged", + -12.486919403076172 + ], + [ + "rays", + -12.48692512512207 + ], + [ + "▁mounts", + -12.486964225769045 + ], + [ + "▁inflammatory", + -12.486977577209473 + ], + [ + "▁subjective", + -12.48710823059082 + ], + [ + "Size", + -12.487144470214844 + ], + [ + "▁Carson", + -12.487200736999512 + ], + [ + "▁liquids", + -12.487298011779783 + ], + [ + "▁dine", + -12.487324714660645 + ], + [ + "▁sympathy", + -12.487381935119627 + ], + [ + "▁pupil", + -12.48763942718506 + ], + [ + "▁rode", + -12.487662315368652 + ], + [ + "▁disruptive", + -12.487671852111816 + ], + [ + "▁Ker", + -12.487686157226562 + ], + [ + "▁flora", + -12.48774528503418 + ], + [ + "Lab", + -12.487759590148926 + ], + [ + "▁seaside", + -12.48780632019043 + ], + [ + "▁calibration", + -12.487876892089844 + ], + [ + "▁Tam", + -12.488107681274414 + ], + [ + "▁grading", + -12.488253593444824 + ], + [ + "▁liabilities", + -12.488290786743164 + ], + [ + "▁podium", + -12.48831272125244 + ], + [ + "▁residences", + -12.4883394241333 + ], + [ + "election", + -12.488363265991213 + ], + [ + "▁Niagara", + -12.488409996032717 + ], + [ + "▁paradigm", + -12.488568305969238 + ], + [ + "▁ecommerce", + -12.488698959350586 + ], + [ + "TER", + -12.488741874694824 + ], + [ + "ache", + -12.488795280456545 + ], + [ + "▁Bollywood", + -12.48879623413086 + ], + [ + "▁Viagra", + -12.488911628723145 + ], + [ + "-2018", + -12.488978385925291 + ], + [ + "▁Venture", + -12.489039421081545 + ], + [ + "▁threaten", + -12.489093780517578 + ], + [ + "▁GmbH", + -12.489197731018066 + ], + [ + "▁hypothesis", + -12.489198684692385 + ], + [ + "▁Beverly", + -12.48922348022461 + ], + [ + "▁pursuant", + -12.48922348022461 + ], + [ + "▁Crazy", + -12.489316940307615 + ], + [ + "▁Blade", + -12.48932647705078 + ], + [ + "▁Hidden", + -12.4894437789917 + ], + [ + "▁dilemma", + -12.489460945129396 + ], + [ + "▁safari", + -12.48949146270752 + ], + [ + "▁Ranger", + -12.48970890045166 + ], + [ + "▁Simpson", + -12.489710807800291 + ], + [ + "▁recessed", + -12.489803314208984 + ], + [ + "▁Jill", + -12.489845275878906 + ], + [ + "▁Investors", + -12.489919662475586 + ], + [ + "▁Pedro", + -12.49009609222412 + ], + [ + "▁solidarity", + -12.49012851715088 + ], + [ + "▁LEGO", + -12.490164756774902 + ], + [ + "held", + -12.490224838256836 + ], + [ + "▁clientele", + -12.490249633789062 + ], + [ + "esque", + -12.490331649780272 + ], + [ + "▁Stockholm", + -12.49036979675293 + ], + [ + "▁comparative", + -12.49077606201172 + ], + [ + "▁Fields", + -12.490787506103516 + ], + [ + "Sub", + -12.491008758544922 + ], + [ + "▁histories", + -12.49102020263672 + ], + [ + "▁schooling", + -12.491114616394045 + ], + [ + "//", + -12.491226196289062 + ], + [ + "▁Supplies", + -12.491753578186035 + ], + [ + "▁tougher", + -12.491887092590332 + ], + [ + "▁Grass", + -12.491912841796877 + ], + [ + "▁parchment", + -12.49192714691162 + ], + [ + "▁reminding", + -12.49203109741211 + ], + [ + "▁curse", + -12.492070198059082 + ], + [ + "▁Layer", + -12.49207592010498 + ], + [ + "▁1958", + -12.492177963256836 + ], + [ + "▁concealed", + -12.492180824279783 + ], + [ + "▁lump", + -12.492196083068848 + ], + [ + "▁blamed", + -12.49223804473877 + ], + [ + "imo", + -12.492298126220703 + ], + [ + "▁trusting", + -12.492376327514648 + ], + [ + "▁crane", + -12.49238109588623 + ], + [ + "▁Advantage", + -12.492399215698242 + ], + [ + "▁milestones", + -12.492453575134276 + ], + [ + "▁oppose", + -12.492544174194336 + ], + [ + "▁prioritize", + -12.492560386657717 + ], + [ + "▁cousins", + -12.492624282836914 + ], + [ + "cock", + -12.492721557617188 + ], + [ + "▁overflow", + -12.492753028869627 + ], + [ + "▁disconnect", + -12.492754936218262 + ], + [ + "▁anatomy", + -12.492838859558104 + ], + [ + "lac", + -12.492859840393066 + ], + [ + "▁facets", + -12.492878913879396 + ], + [ + "▁hairstyle", + -12.492905616760254 + ], + [ + "trade", + -12.4929780960083 + ], + [ + "▁LGBT", + -12.492999076843262 + ], + [ + "eri", + -12.493014335632324 + ], + [ + "▁Levi", + -12.493206977844238 + ], + [ + "▁intrigued", + -12.493257522583008 + ], + [ + "▁vase", + -12.49329662322998 + ], + [ + "▁TR", + -12.493595123291016 + ], + [ + "▁dependence", + -12.493597984313965 + ], + [ + "▁volcano", + -12.493614196777344 + ], + [ + "RM", + -12.493706703186035 + ], + [ + "Safe", + -12.493724822998049 + ], + [ + "▁tedious", + -12.493756294250488 + ], + [ + "▁uneven", + -12.493766784667969 + ], + [ + "▁Pattern", + -12.493794441223145 + ], + [ + "dor", + -12.49384880065918 + ], + [ + "LM", + -12.493894577026367 + ], + [ + "▁manga", + -12.494003295898438 + ], + [ + "▁hrs", + -12.49406909942627 + ], + [ + "▁Export", + -12.494110107421877 + ], + [ + "Modern", + -12.494152069091797 + ], + [ + "FF", + -12.494199752807615 + ], + [ + "▁carving", + -12.494327545166016 + ], + [ + "United", + -12.49435329437256 + ], + [ + "▁Oral", + -12.494452476501465 + ], + [ + "Print", + -12.494547843933104 + ], + [ + "lich", + -12.494644165039062 + ], + [ + "▁Courses", + -12.494834899902344 + ], + [ + "Sea", + -12.494858741760254 + ], + [ + "▁playback", + -12.494927406311035 + ], + [ + "3,000", + -12.494938850402832 + ], + [ + "War", + -12.495036125183104 + ], + [ + "▁relocated", + -12.495295524597168 + ], + [ + "DF", + -12.49539566040039 + ], + [ + "▁descriptive", + -12.49557876586914 + ], + [ + "▁terribly", + -12.49557876586914 + ], + [ + "▁agility", + -12.495579719543455 + ], + [ + "Park", + -12.49560546875 + ], + [ + "▁Rat", + -12.495685577392578 + ], + [ + "▁Speech", + -12.495762825012209 + ], + [ + "▁reductions", + -12.495903968811035 + ], + [ + "▁17.", + -12.496017456054688 + ], + [ + "▁1956", + -12.496182441711426 + ], + [ + "1-", + -12.49623966217041 + ], + [ + "▁Commander", + -12.496371269226074 + ], + [ + "▁mosaic", + -12.496430397033691 + ], + [ + "▁Viking", + -12.496451377868652 + ], + [ + "▁congressional", + -12.49649429321289 + ], + [ + "▁postage", + -12.496505737304688 + ], + [ + "▁Comedy", + -12.496562004089355 + ], + [ + "▁Sai", + -12.496569633483888 + ], + [ + "▁Teen", + -12.496710777282717 + ], + [ + "Ti", + -12.496719360351562 + ], + [ + "▁Haiti", + -12.49673843383789 + ], + [ + "▁depicted", + -12.496744155883787 + ], + [ + "uli", + -12.496800422668455 + ], + [ + "▁(20", + -12.496809005737305 + ], + [ + "▁Highlands", + -12.496889114379885 + ], + [ + "UC", + -12.49691104888916 + ], + [ + "2012", + -12.496981620788574 + ], + [ + "▁Rolling", + -12.497024536132812 + ], + [ + "▁computational", + -12.497241973876951 + ], + [ + "▁£4", + -12.497297286987305 + ], + [ + "▁floods", + -12.49731159210205 + ], + [ + "Table", + -12.497357368469238 + ], + [ + "▁revival", + -12.497410774230955 + ], + [ + "Mail", + -12.497679710388184 + ], + [ + "Almost", + -12.497909545898438 + ], + [ + "▁Jupiter", + -12.498027801513672 + ], + [ + "▁raid", + -12.498112678527832 + ], + [ + "▁pubs", + -12.498248100280762 + ], + [ + "/16", + -12.49843978881836 + ], + [ + "ios", + -12.498544692993164 + ], + [ + "▁misuse", + -12.498714447021484 + ], + [ + "▁fingerprint", + -12.49874210357666 + ], + [ + "▁colder", + -12.498806953430176 + ], + [ + "▁recalls", + -12.498872756958008 + ], + [ + "▁EX", + -12.498906135559082 + ], + [ + "▁Spice", + -12.498927116394045 + ], + [ + "▁knob", + -12.499171257019045 + ], + [ + "Whenever", + -12.499177932739258 + ], + [ + "▁Chang", + -12.499195098876951 + ], + [ + "▁borrower", + -12.499199867248535 + ], + [ + "▁infringement", + -12.499199867248535 + ], + [ + "▁Landing", + -12.499364852905272 + ], + [ + "▁communicated", + -12.499381065368652 + ], + [ + "adi", + -12.499435424804688 + ], + [ + "owner", + -12.49950885772705 + ], + [ + "▁addicted", + -12.49950885772705 + ], + [ + "▁pivotal", + -12.49982452392578 + ], + [ + "hurst", + -12.500003814697266 + ], + [ + "▁drains", + -12.50001335144043 + ], + [ + "▁textbook", + -12.500046730041504 + ], + [ + "▁showcased", + -12.500049591064451 + ], + [ + "Lu", + -12.5000638961792 + ], + [ + "▁1981", + -12.500067710876465 + ], + [ + "▁Somerset", + -12.500125885009766 + ], + [ + "▁redemption", + -12.500162124633787 + ], + [ + "▁Begin", + -12.500174522399902 + ], + [ + "Members", + -12.500283241271973 + ], + [ + "▁headset", + -12.500449180603027 + ], + [ + "skin", + -12.50046730041504 + ], + [ + "sion", + -12.50048542022705 + ], + [ + "▁rectangle", + -12.50055694580078 + ], + [ + "ching", + -12.500590324401855 + ], + [ + "▁refuses", + -12.50065803527832 + ], + [ + "▁proportions", + -12.500667572021484 + ], + [ + "1,000", + -12.500887870788574 + ], + [ + "▁Evan", + -12.50090503692627 + ], + [ + "6,000", + -12.500983238220217 + ], + [ + "Drive", + -12.501015663146973 + ], + [ + "▁urgency", + -12.501084327697754 + ], + [ + "▁batting", + -12.501141548156738 + ], + [ + "▁glazing", + -12.501203536987305 + ], + [ + "▁bot", + -12.501226425170898 + ], + [ + "▁VT", + -12.501277923583984 + ], + [ + "SN", + -12.50134563446045 + ], + [ + "▁Keys", + -12.50134563446045 + ], + [ + "NEW", + -12.501428604125977 + ], + [ + "▁programmed", + -12.50153350830078 + ], + [ + "▁receptor", + -12.50156021118164 + ], + [ + "▁Sudan", + -12.501561164855955 + ], + [ + "tool", + -12.501707077026367 + ], + [ + "▁irregular", + -12.501710891723633 + ], + [ + "▁numerical", + -12.50179958343506 + ], + [ + "▁reflections", + -12.501860618591309 + ], + [ + "▁flames", + -12.501907348632812 + ], + [ + "▁genetics", + -12.502007484436035 + ], + [ + "▁Sons", + -12.502013206481934 + ], + [ + "▁Swan", + -12.5020170211792 + ], + [ + "▁RT", + -12.502099990844728 + ], + [ + "elo", + -12.502180099487305 + ], + [ + "▁Gmail", + -12.502229690551758 + ], + [ + "750", + -12.502370834350586 + ], + [ + "nen", + -12.502395629882812 + ], + [ + "▁Steps", + -12.502635955810549 + ], + [ + "▁Interface", + -12.502800941467283 + ], + [ + "ulation", + -12.502848625183104 + ], + [ + "▁revenge", + -12.502927780151367 + ], + [ + "▁reproductive", + -12.5029878616333 + ], + [ + "▁Norton", + -12.50304889678955 + ], + [ + "▁$60", + -12.5031156539917 + ], + [ + "▁SAS", + -12.503130912780762 + ], + [ + "▁Pokémon", + -12.503201484680176 + ], + [ + "87", + -12.50334930419922 + ], + [ + "▁dug", + -12.50336456298828 + ], + [ + "lift", + -12.503525733947754 + ], + [ + "▁confirms", + -12.503557205200195 + ], + [ + "▁novice", + -12.503623008728027 + ], + [ + "▁Et", + -12.503796577453612 + ], + [ + "▁ambiance", + -12.503843307495115 + ], + [ + "▁pr", + -12.503866195678713 + ], + [ + "▁trending", + -12.50387477874756 + ], + [ + "▁Seller", + -12.504087448120115 + ], + [ + "▁Blanc", + -12.504100799560549 + ], + [ + "▁waterfalls", + -12.504167556762695 + ], + [ + "Current", + -12.504244804382324 + ], + [ + "▁Conservative", + -12.504278182983398 + ], + [ + "▁mercury", + -12.50428581237793 + ], + [ + "▁Inner", + -12.50439167022705 + ], + [ + "liter", + -12.504497528076172 + ], + [ + "▁256", + -12.50463581085205 + ], + [ + "▁Opportunity", + -12.504694938659668 + ], + [ + "▁4:30", + -12.504725456237791 + ], + [ + "▁11-", + -12.504730224609377 + ], + [ + "▁Lexington", + -12.504768371582031 + ], + [ + "▁Milton", + -12.504884719848633 + ], + [ + "▁Diabetes", + -12.504901885986328 + ], + [ + "▁DVDs", + -12.504923820495604 + ], + [ + "▁Appliance", + -12.50498104095459 + ], + [ + "▁gentleman", + -12.5052490234375 + ], + [ + "bone", + -12.505365371704102 + ], + [ + "▁Fa", + -12.50537109375 + ], + [ + "▁UX", + -12.505440711975098 + ], + [ + "▁um", + -12.505504608154297 + ], + [ + "▁screenshots", + -12.505511283874512 + ], + [ + "▁Betty", + -12.5056791305542 + ], + [ + "▁Absolutely", + -12.50585651397705 + ], + [ + "▁Citizens", + -12.50623607635498 + ], + [ + "▁175", + -12.506282806396484 + ], + [ + "▁erase", + -12.506529808044434 + ], + [ + "▁revisit", + -12.506718635559082 + ], + [ + "▁backbone", + -12.50672721862793 + ], + [ + "Serve", + -12.506744384765623 + ], + [ + "▁comparisons", + -12.506749153137209 + ], + [ + "▁mailed", + -12.506796836853027 + ], + [ + "▁weaker", + -12.506914138793944 + ], + [ + "▁Breast", + -12.507037162780762 + ], + [ + "▁speedy", + -12.507100105285645 + ], + [ + "▁jewel", + -12.50711727142334 + ], + [ + "Bank", + -12.507471084594728 + ], + [ + "▁Bou", + -12.507493019104004 + ], + [ + "Control", + -12.507498741149902 + ], + [ + "▁flowering", + -12.507500648498535 + ], + [ + "▁hydrated", + -12.507672309875488 + ], + [ + "▁Eating", + -12.507691383361816 + ], + [ + "price", + -12.50788402557373 + ], + [ + "▁Hull", + -12.507904052734377 + ], + [ + "▁ASAP", + -12.508124351501465 + ], + [ + "▁deepen", + -12.5081787109375 + ], + [ + "▁GMT", + -12.508305549621582 + ], + [ + "▁Naturally", + -12.508413314819336 + ], + [ + "▁shelving", + -12.508464813232422 + ], + [ + "▁fiberglass", + -12.50848388671875 + ], + [ + "▁reusable", + -12.508511543273926 + ], + [ + "▁Rack", + -12.508647918701172 + ], + [ + "▁dim", + -12.508761405944824 + ], + [ + "▁mango", + -12.508829116821287 + ], + [ + "▁skeptical", + -12.508883476257324 + ], + [ + "▁Meaning", + -12.508949279785156 + ], + [ + "▁320", + -12.509065628051758 + ], + [ + "▁Kra", + -12.509183883666992 + ], + [ + "til", + -12.509230613708496 + ], + [ + "db", + -12.509347915649414 + ], + [ + "▁colon", + -12.509385108947754 + ], + [ + "▁Flip", + -12.50938892364502 + ], + [ + "Central", + -12.509411811828612 + ], + [ + "services", + -12.50944995880127 + ], + [ + "▁Seal", + -12.509466171264648 + ], + [ + "product", + -12.509533882141112 + ], + [ + "▁undergone", + -12.50954818725586 + ], + [ + "LT", + -12.50955295562744 + ], + [ + "▁Forever", + -12.509625434875488 + ], + [ + "▁Skip", + -12.509700775146484 + ], + [ + "▁reconciliation", + -12.509841918945312 + ], + [ + "▁ambulance", + -12.509899139404297 + ], + [ + "▁jackpot", + -12.509909629821776 + ], + [ + "▁standout", + -12.510007858276367 + ], + [ + "▁Lopez", + -12.510581970214844 + ], + [ + "arch", + -12.51125144958496 + ], + [ + "▁shutters", + -12.511279106140137 + ], + [ + "▁Ap", + -12.511566162109377 + ], + [ + "▁Mineral", + -12.511662483215332 + ], + [ + "▁wi", + -12.511892318725586 + ], + [ + "▁LOT", + -12.51190185546875 + ], + [ + "▁leaning", + -12.511972427368164 + ], + [ + "▁rash", + -12.512102127075195 + ], + [ + "string", + -12.51210594177246 + ], + [ + "▁Dentistry", + -12.51219081878662 + ], + [ + "ld", + -12.512269973754885 + ], + [ + "▁counselling", + -12.512279510498049 + ], + [ + "▁stocking", + -12.512630462646484 + ], + [ + "▁Pol", + -12.512645721435549 + ], + [ + "▁canceled", + -12.512666702270508 + ], + [ + "▁Personally", + -12.512673377990724 + ], + [ + "▁flock", + -12.512843132019045 + ], + [ + "▁19.", + -12.512961387634276 + ], + [ + "▁TA", + -12.512983322143556 + ], + [ + "▁prolific", + -12.513107299804688 + ], + [ + "▁distributing", + -12.513110160827637 + ], + [ + "▁embed", + -12.513132095336914 + ], + [ + "▁terrifying", + -12.513161659240724 + ], + [ + "▁zombie", + -12.513174057006836 + ], + [ + "Brand", + -12.51333713531494 + ], + [ + "▁Pair", + -12.513385772705078 + ], + [ + "Oct", + -12.513395309448242 + ], + [ + "▁alarms", + -12.51344394683838 + ], + [ + "▁impairment", + -12.513669967651367 + ], + [ + "▁championships", + -12.513693809509276 + ], + [ + "EL", + -12.513757705688477 + ], + [ + "▁thou", + -12.51380443572998 + ], + [ + "ering", + -12.513938903808594 + ], + [ + "▁strengthened", + -12.513973236083984 + ], + [ + "▁equations", + -12.513992309570312 + ], + [ + "Kids", + -12.514010429382324 + ], + [ + "▁cruel", + -12.514026641845703 + ], + [ + "▁Gandhi", + -12.514349937438965 + ], + [ + "▁Sink", + -12.514373779296877 + ], + [ + "Inside", + -12.514381408691406 + ], + [ + "▁Wendy", + -12.514418601989746 + ], + [ + "▁Heath", + -12.514559745788574 + ], + [ + "▁transforms", + -12.51480484008789 + ], + [ + "▁hydration", + -12.514969825744627 + ], + [ + "▁Debt", + -12.514992713928224 + ], + [ + "▁personalize", + -12.51504898071289 + ], + [ + "counter", + -12.515089988708496 + ], + [ + "▁groundbreaking", + -12.515093803405762 + ], + [ + "▁glued", + -12.51518726348877 + ], + [ + "▁outing", + -12.515310287475586 + ], + [ + "▁breaches", + -12.515376091003418 + ], + [ + "▁OC", + -12.515432357788086 + ], + [ + "Sounds", + -12.515450477600098 + ], + [ + "▁Era", + -12.515646934509276 + ], + [ + "▁gears", + -12.51597023010254 + ], + [ + "▁Burns", + -12.516032218933104 + ], + [ + "▁Result", + -12.516050338745115 + ], + [ + "▁twists", + -12.516209602355955 + ], + [ + "quot", + -12.516244888305664 + ], + [ + "still", + -12.516267776489258 + ], + [ + "74", + -12.516460418701172 + ], + [ + "heart", + -12.516541481018066 + ], + [ + "hn", + -12.516826629638672 + ], + [ + "▁oneself", + -12.516850471496582 + ], + [ + "▁politician", + -12.516956329345703 + ], + [ + "▁£10", + -12.517012596130373 + ], + [ + "toxic", + -12.517030715942385 + ], + [ + "dos", + -12.517148971557615 + ], + [ + "▁Morrison", + -12.517512321472168 + ], + [ + "▁landmarks", + -12.517585754394531 + ], + [ + "UT", + -12.517587661743164 + ], + [ + "▁GET", + -12.517587661743164 + ], + [ + "▁Alert", + -12.517657279968262 + ], + [ + "quarter", + -12.517683029174805 + ], + [ + "▁ministries", + -12.517770767211914 + ], + [ + "▁stringent", + -12.517818450927734 + ], + [ + "aa", + -12.517865180969238 + ], + [ + "▁amidst", + -12.51799201965332 + ], + [ + "▁omega", + -12.518040657043455 + ], + [ + "ple", + -12.51805019378662 + ], + [ + "▁await", + -12.51810359954834 + ], + [ + "▁lone", + -12.518108367919922 + ], + [ + "▁Ran", + -12.518144607543944 + ], + [ + "Edit", + -12.518230438232422 + ], + [ + "▁Providers", + -12.518234252929688 + ], + [ + "▁biscuits", + -12.518328666687012 + ], + [ + "▁psychologist", + -12.518378257751465 + ], + [ + "girl", + -12.518497467041016 + ], + [ + "▁mural", + -12.51857089996338 + ], + [ + "▁Drum", + -12.518659591674805 + ], + [ + "▁Gor", + -12.518790245056152 + ], + [ + "▁Lutheran", + -12.518831253051758 + ], + [ + "support", + -12.518930435180664 + ], + [ + "▁subscriber", + -12.518956184387209 + ], + [ + "▁headlights", + -12.519179344177246 + ], + [ + "▁dumps", + -12.519356727600098 + ], + [ + "easy", + -12.519412994384766 + ], + [ + "▁Disability", + -12.519776344299316 + ], + [ + "nk", + -12.519929885864258 + ], + [ + "▁Deb", + -12.520041465759276 + ], + [ + "▁Straight", + -12.520041465759276 + ], + [ + "▁Buddy", + -12.520071983337402 + ], + [ + "▁Riley", + -12.520100593566896 + ], + [ + "PF", + -12.520275115966797 + ], + [ + "▁singular", + -12.52031421661377 + ], + [ + "▁Investigator", + -12.520330429077148 + ], + [ + "nell", + -12.520341873168944 + ], + [ + "▁tutoring", + -12.520366668701172 + ], + [ + "got", + -12.5204439163208 + ], + [ + "▁Except", + -12.5204439163208 + ], + [ + "▁Louise", + -12.520480155944824 + ], + [ + "AGE", + -12.520493507385254 + ], + [ + "Indeed", + -12.520694732666016 + ], + [ + "▁Stevens", + -12.520745277404783 + ], + [ + "▁???", + -12.520747184753418 + ], + [ + "▁pal", + -12.52087688446045 + ], + [ + "▁HQ", + -12.520983695983888 + ], + [ + "▁Pharma", + -12.521135330200195 + ], + [ + "▁bedtime", + -12.521321296691896 + ], + [ + "▁Pilates", + -12.521716117858888 + ], + [ + "ille", + -12.521748542785645 + ], + [ + "▁creek", + -12.52177619934082 + ], + [ + "come", + -12.521852493286133 + ], + [ + "▁MR", + -12.522335052490234 + ], + [ + "▁Sharing", + -12.522366523742676 + ], + [ + "▁Rings", + -12.522435188293455 + ], + [ + "ique", + -12.522540092468262 + ], + [ + "▁wearable", + -12.522682189941406 + ], + [ + "▁Wiki", + -12.522790908813477 + ], + [ + "▁pies", + -12.522920608520508 + ], + [ + "mis", + -12.522944450378418 + ], + [ + "▁hues", + -12.52297306060791 + ], + [ + "125", + -12.523089408874512 + ], + [ + "▁excluding", + -12.523112297058104 + ], + [ + "▁volcanic", + -12.523193359375 + ], + [ + "▁Romans", + -12.523335456848145 + ], + [ + "Ten", + -12.52334213256836 + ], + [ + "▁filmmaker", + -12.523381233215332 + ], + [ + "Obviously", + -12.523577690124512 + ], + [ + "▁passages", + -12.52361297607422 + ], + [ + "▁800-", + -12.523638725280762 + ], + [ + "▁tumors", + -12.523723602294922 + ], + [ + "▁neo", + -12.523788452148438 + ], + [ + "▁bun", + -12.524080276489258 + ], + [ + "▁repay", + -12.524083137512209 + ], + [ + "▁almonds", + -12.524133682250977 + ], + [ + "▁>>", + -12.524210929870604 + ], + [ + "▁electoral", + -12.52433967590332 + ], + [ + "▁mastered", + -12.524340629577637 + ], + [ + "▁booster", + -12.524418830871582 + ], + [ + "▁sums", + -12.524459838867188 + ], + [ + "▁Eng", + -12.524629592895508 + ], + [ + "▁prompts", + -12.524678230285645 + ], + [ + "▁Sebastian", + -12.52488136291504 + ], + [ + "digit", + -12.524968147277832 + ], + [ + "▁Elder", + -12.525152206420898 + ], + [ + "ets", + -12.525235176086426 + ], + [ + "▁polling", + -12.525296211242676 + ], + [ + "▁Pal", + -12.525365829467772 + ], + [ + "▁unlocked", + -12.525508880615234 + ], + [ + "certified", + -12.52554416656494 + ], + [ + "1.1", + -12.525554656982422 + ], + [ + "▁Goods", + -12.52561855316162 + ], + [ + "Interested", + -12.52562427520752 + ], + [ + "▁audits", + -12.525686264038086 + ], + [ + "▁specials", + -12.525863647460938 + ], + [ + "▁Comp", + -12.525873184204102 + ], + [ + "▁fin", + -12.525925636291504 + ], + [ + "▁cd", + -12.526061058044434 + ], + [ + "strom", + -12.526130676269531 + ], + [ + "-26", + -12.526168823242188 + ], + [ + "▁dividends", + -12.526284217834473 + ], + [ + "▁Incredible", + -12.52633285522461 + ], + [ + "▁Terrace", + -12.526571273803713 + ], + [ + "▁indirectly", + -12.526618003845217 + ], + [ + "▁Machines", + -12.526659965515137 + ], + [ + "▁Mueller", + -12.526937484741213 + ], + [ + "▁temptation", + -12.52695083618164 + ], + [ + "▁Mesa", + -12.526978492736816 + ], + [ + "▁1998,", + -12.527013778686523 + ], + [ + "▁propaganda", + -12.527165412902832 + ], + [ + "▁conserving", + -12.527176856994627 + ], + [ + "▁Thing", + -12.527207374572754 + ], + [ + "commercial", + -12.527247428894045 + ], + [ + "▁critique", + -12.527281761169434 + ], + [ + "▁Certainly", + -12.527416229248049 + ], + [ + "▁Heroes", + -12.52744960784912 + ], + [ + "▁Motorola", + -12.527470588684082 + ], + [ + "▁hue", + -12.527475357055664 + ], + [ + "▁Wrap", + -12.527618408203123 + ], + [ + "own", + -12.527681350708008 + ], + [ + "▁1997.", + -12.527709007263184 + ], + [ + "▁Unity", + -12.52776336669922 + ], + [ + "▁Tv", + -12.527788162231444 + ], + [ + "ée", + -12.527844429016112 + ], + [ + "▁sponsoring", + -12.527876853942873 + ], + [ + "▁toggle", + -12.527905464172363 + ], + [ + "Year", + -12.527935028076172 + ], + [ + "▁digit", + -12.527976036071776 + ], + [ + "18.", + -12.528016090393066 + ], + [ + "▁Sprint", + -12.528019905090332 + ], + [ + "oka", + -12.528058052062988 + ], + [ + "▁hustle", + -12.528120994567873 + ], + [ + "▁excursions", + -12.52814483642578 + ], + [ + "▁usability", + -12.528154373168944 + ], + [ + "▁felony", + -12.528169631958008 + ], + [ + "▁bingo", + -12.52822971343994 + ], + [ + "......", + -12.52866554260254 + ], + [ + "▁Nowadays", + -12.528706550598145 + ], + [ + "▁Devices", + -12.528776168823242 + ], + [ + "▁Wy", + -12.528810501098633 + ], + [ + "▁Slow", + -12.52884292602539 + ], + [ + "▁tad", + -12.52888298034668 + ], + [ + "▁VMware", + -12.529069900512695 + ], + [ + "dev", + -12.529094696044922 + ], + [ + "▁Escape", + -12.52910041809082 + ], + [ + "Short", + -12.529108047485352 + ], + [ + "▁shredded", + -12.5291109085083 + ], + [ + "▁Surf", + -12.529156684875488 + ], + [ + "▁vines", + -12.529181480407717 + ], + [ + "▁fab", + -12.52924919128418 + ], + [ + "dollar", + -12.529382705688477 + ], + [ + "▁Willow", + -12.52945041656494 + ], + [ + "▁Beast", + -12.52950382232666 + ], + [ + "***", + -12.529898643493652 + ], + [ + "▁Console", + -12.529994010925291 + ], + [ + "▁Newsletter", + -12.530014991760254 + ], + [ + "▁feared", + -12.530033111572266 + ], + [ + "▁nestled", + -12.530112266540527 + ], + [ + "▁idle", + -12.530168533325195 + ], + [ + "!!!!!", + -12.53021240234375 + ], + [ + "▁striped", + -12.53022289276123 + ], + [ + "▁oceans", + -12.53028964996338 + ], + [ + "Brown", + -12.530376434326172 + ], + [ + "▁Winnipeg", + -12.53038501739502 + ], + [ + "▁Failure", + -12.53062629699707 + ], + [ + "▁Certain", + -12.530665397644045 + ], + [ + "▁bottled", + -12.530674934387209 + ], + [ + "inter", + -12.530710220336914 + ], + [ + "▁Fuji", + -12.53073024749756 + ], + [ + "Pass", + -12.530735969543455 + ], + [ + "▁comprehension", + -12.530767440795898 + ], + [ + "▁caravan", + -12.530896186828612 + ], + [ + "▁Load", + -12.531036376953123 + ], + [ + "▁Brilliant", + -12.5310697555542 + ], + [ + "▁nightlife", + -12.531105041503906 + ], + [ + "mind", + -12.53114128112793 + ], + [ + "ended", + -12.53115177154541 + ], + [ + "▁stark", + -12.531182289123535 + ], + [ + "tter", + -12.531396865844728 + ], + [ + "▁Initially", + -12.53139877319336 + ], + [ + "▁Peterson", + -12.531538963317873 + ], + [ + "▁blur", + -12.531846046447754 + ], + [ + "▁consolidated", + -12.53187370300293 + ], + [ + "▁woodland", + -12.531885147094728 + ], + [ + "▁HO", + -12.531886100769045 + ], + [ + "▁poison", + -12.53190803527832 + ], + [ + "▁DL", + -12.53194522857666 + ], + [ + "▁Reynolds", + -12.53196907043457 + ], + [ + "▁bible", + -12.532018661499023 + ], + [ + "▁Matter", + -12.532096862792969 + ], + [ + "▁bending", + -12.532129287719728 + ], + [ + "SW", + -12.532179832458496 + ], + [ + "▁illegally", + -12.532387733459473 + ], + [ + "ural", + -12.532430648803713 + ], + [ + "▁thrust", + -12.532470703125 + ], + [ + "zel", + -12.532482147216797 + ], + [ + "▁towing", + -12.532657623291016 + ], + [ + "▁arcade", + -12.532697677612305 + ], + [ + "▁Sister", + -12.532745361328123 + ], + [ + "zip", + -12.532797813415527 + ], + [ + "▁mysteries", + -12.532844543457031 + ], + [ + "▁Rehabilitation", + -12.532999038696287 + ], + [ + "▁Rivers", + -12.533170700073242 + ], + [ + "Answer", + -12.533211708068848 + ], + [ + "▁torrent", + -12.533411979675291 + ], + [ + "▁Snapchat", + -12.533422470092772 + ], + [ + "▁crave", + -12.53353214263916 + ], + [ + "▁NP", + -12.533647537231444 + ], + [ + "eno", + -12.533769607543944 + ], + [ + "▁humid", + -12.53404712677002 + ], + [ + "▁touchscreen", + -12.534079551696776 + ], + [ + "▁zest", + -12.534345626831056 + ], + [ + "▁modem", + -12.53444766998291 + ], + [ + "▁revisions", + -12.534481048583984 + ], + [ + "▁appoint", + -12.53476333618164 + ], + [ + "▁!!!", + -12.5347900390625 + ], + [ + "▁unpaid", + -12.534823417663574 + ], + [ + "▁portrayed", + -12.535079002380373 + ], + [ + "▁Savannah", + -12.535183906555176 + ], + [ + "▁dissolved", + -12.535210609436035 + ], + [ + "▁poetic", + -12.535277366638184 + ], + [ + "▁Marion", + -12.53531265258789 + ], + [ + "FX", + -12.535438537597656 + ], + [ + "dh", + -12.535562515258787 + ], + [ + "▁protagonist", + -12.535562515258787 + ], + [ + "▁ROM", + -12.535682678222656 + ], + [ + "▁expressly", + -12.53569793701172 + ], + [ + "▁precipitation", + -12.535698890686035 + ], + [ + "▁preceding", + -12.535714149475098 + ], + [ + "▁unstable", + -12.535725593566896 + ], + [ + "▁fading", + -12.535937309265137 + ], + [ + "▁workload", + -12.53604793548584 + ], + [ + "Must", + -12.53605842590332 + ], + [ + "▁pe", + -12.536103248596191 + ], + [ + "▁frosting", + -12.536231994628906 + ], + [ + "▁wit", + -12.53627586364746 + ], + [ + "▁enzyme", + -12.536298751831056 + ], + [ + "▁WAS", + -12.536601066589355 + ], + [ + "cio", + -12.53666877746582 + ], + [ + "IM", + -12.536746978759766 + ], + [ + "▁savory", + -12.536776542663574 + ], + [ + "▁punk", + -12.536930084228516 + ], + [ + "▁Exit", + -12.536989212036133 + ], + [ + "bal", + -12.53707218170166 + ], + [ + "NT", + -12.537124633789062 + ], + [ + "equipped", + -12.537174224853516 + ], + [ + "▁Davidson", + -12.537175178527832 + ], + [ + "▁WW", + -12.537187576293944 + ], + [ + "▁shrubs", + -12.537220001220703 + ], + [ + "▁scooter", + -12.537229537963867 + ], + [ + "▁ruined", + -12.53731632232666 + ], + [ + "▁bilateral", + -12.537606239318848 + ], + [ + "▁trekking", + -12.537619590759276 + ], + [ + "▁Chiropractic", + -12.537652015686035 + ], + [ + "Cap", + -12.537672996520996 + ], + [ + "spring", + -12.53768539428711 + ], + [ + "2010", + -12.53779125213623 + ], + [ + "▁regeneration", + -12.537864685058594 + ], + [ + "▁pillars", + -12.538174629211426 + ], + [ + "▁Boost", + -12.538262367248535 + ], + [ + "▁intra", + -12.53839111328125 + ], + [ + "sal", + -12.538470268249512 + ], + [ + "▁mould", + -12.538501739501951 + ], + [ + "▁irrelevant", + -12.538556098937988 + ], + [ + "▁auctions", + -12.538562774658203 + ], + [ + "▁102", + -12.538793563842772 + ], + [ + "▁persuade", + -12.538910865783691 + ], + [ + "▁insider", + -12.538994789123535 + ], + [ + "▁Needs", + -12.53908920288086 + ], + [ + "Going", + -12.539091110229492 + ], + [ + "▁Sta", + -12.539111137390137 + ], + [ + "series", + -12.539278030395508 + ], + [ + "IV", + -12.53928565979004 + ], + [ + "▁probation", + -12.539508819580078 + ], + [ + "▁plugged", + -12.539546966552734 + ], + [ + "▁displaced", + -12.539644241333008 + ], + [ + "▁marvel", + -12.539667129516602 + ], + [ + "▁jumps", + -12.539752006530762 + ], + [ + "▁Budapest", + -12.539756774902344 + ], + [ + "▁fr", + -12.539841651916504 + ], + [ + "▁CW", + -12.539999961853027 + ], + [ + "▁subscriptions", + -12.54003620147705 + ], + [ + "yama", + -12.54014492034912 + ], + [ + "▁Lay", + -12.540220260620115 + ], + [ + "quin", + -12.540225982666016 + ], + [ + "▁validated", + -12.54038906097412 + ], + [ + "▁autonomy", + -12.54047679901123 + ], + [ + "▁sec", + -12.540477752685549 + ], + [ + "MU", + -12.540508270263672 + ], + [ + "▁LC", + -12.540545463562012 + ], + [ + "▁servants", + -12.540834426879885 + ], + [ + "▁Composite", + -12.5408353805542 + ], + [ + "▁amend", + -12.540848731994627 + ], + [ + "itz", + -12.5408935546875 + ], + [ + "▁Christie", + -12.540918350219728 + ], + [ + "gl", + -12.540971755981444 + ], + [ + "17.", + -12.541091918945312 + ], + [ + "▁questionable", + -12.54117488861084 + ], + [ + "MR", + -12.541224479675291 + ], + [ + "pat", + -12.541370391845703 + ], + [ + "▁TOP", + -12.541495323181152 + ], + [ + "▁multiplayer", + -12.541496276855469 + ], + [ + "Sam", + -12.541540145874023 + ], + [ + "▁poised", + -12.541579246520996 + ], + [ + "hearted", + -12.54164218902588 + ], + [ + "▁Cambodia", + -12.541725158691406 + ], + [ + "▁Bearing", + -12.541813850402832 + ], + [ + "ax", + -12.54197120666504 + ], + [ + "▁Jackie", + -12.542119026184082 + ], + [ + "▁spontaneous", + -12.542140007019045 + ], + [ + "▁Universities", + -12.542276382446287 + ], + [ + "▁inflatable", + -12.542383193969728 + ], + [ + "Dear", + -12.542579650878906 + ], + [ + "yne", + -12.542648315429688 + ], + [ + "▁Raleigh", + -12.54282569885254 + ], + [ + "Fresh", + -12.542912483215332 + ], + [ + "George", + -12.543066024780272 + ], + [ + "Often", + -12.543148040771484 + ], + [ + "lined", + -12.54321575164795 + ], + [ + "▁WWE", + -12.543246269226074 + ], + [ + "▁peripheral", + -12.543296813964844 + ], + [ + "▁catastrophic", + -12.543340682983398 + ], + [ + "▁miserable", + -12.543340682983398 + ], + [ + "▁separating", + -12.543340682983398 + ], + [ + "▁sophistication", + -12.543340682983398 + ], + [ + "ded", + -12.543359756469728 + ], + [ + "Living", + -12.543364524841309 + ], + [ + "▁lashes", + -12.54342269897461 + ], + [ + "▁COM", + -12.543582916259766 + ], + [ + "▁Proceedings", + -12.543607711791992 + ], + [ + "▁Flood", + -12.543620109558104 + ], + [ + "▁vans", + -12.543696403503418 + ], + [ + "▁excursion", + -12.544266700744627 + ], + [ + "▁Interstate", + -12.544363975524902 + ], + [ + "▁soften", + -12.544373512268066 + ], + [ + "▁rec", + -12.544374465942385 + ], + [ + "▁Rewards", + -12.54448699951172 + ], + [ + "▁flare", + -12.544525146484377 + ], + [ + "▁stray", + -12.545114517211914 + ], + [ + "▁Squad", + -12.54522705078125 + ], + [ + "▁historian", + -12.54523754119873 + ], + [ + "▁refrain", + -12.545263290405272 + ], + [ + "▁serene", + -12.545269966125488 + ], + [ + "▁Alexandria", + -12.54527187347412 + ], + [ + "Multi", + -12.545458793640137 + ], + [ + "▁Cornell", + -12.545494079589844 + ], + [ + "▁eclipse", + -12.545502662658691 + ], + [ + "▁Seth", + -12.545570373535156 + ], + [ + "▁doc", + -12.545594215393066 + ], + [ + "▁gin", + -12.545610427856444 + ], + [ + "▁2-0", + -12.545696258544922 + ], + [ + "▁graphical", + -12.545697212219238 + ], + [ + "▁neighbouring", + -12.545703887939451 + ], + [ + "▁immersed", + -12.545742988586426 + ], + [ + "▁Prophet", + -12.545827865600586 + ], + [ + "project", + -12.54599380493164 + ], + [ + "European", + -12.546062469482422 + ], + [ + "▁slider", + -12.546066284179688 + ], + [ + "▁Winston", + -12.546091079711914 + ], + [ + "▁interviewing", + -12.546093940734863 + ], + [ + "▁VS", + -12.546277046203612 + ], + [ + "▁Clare", + -12.546283721923828 + ], + [ + "tas", + -12.546311378479004 + ], + [ + "▁staggering", + -12.54632568359375 + ], + [ + "Ad", + -12.546380996704102 + ], + [ + "▁backward", + -12.54645824432373 + ], + [ + "▁clinicians", + -12.546479225158691 + ], + [ + "▁malt", + -12.546479225158691 + ], + [ + "▁swiftly", + -12.546483993530272 + ], + [ + "▁Pond", + -12.546639442443848 + ], + [ + "▁Lau", + -12.54673671722412 + ], + [ + "Em", + -12.546992301940918 + ], + [ + "▁abusive", + -12.54718780517578 + ], + [ + "▁homage", + -12.54718780517578 + ], + [ + "needed", + -12.54725170135498 + ], + [ + "▁excuses", + -12.547255516052246 + ], + [ + "▁highways", + -12.54738712310791 + ], + [ + "player", + -12.547475814819336 + ], + [ + "▁Swimming", + -12.547542572021484 + ], + [ + "ign", + -12.547560691833496 + ], + [ + "▁cherish", + -12.547616958618164 + ], + [ + "▁Nottingham", + -12.547674179077148 + ], + [ + "▁Cute", + -12.54784107208252 + ], + [ + "▁chandelier", + -12.547964096069336 + ], + [ + "▁Advisors", + -12.547990798950195 + ], + [ + "▁Ivan", + -12.548065185546877 + ], + [ + "aid", + -12.548096656799316 + ], + [ + "▁feasibility", + -12.548108100891112 + ], + [ + "▁archaeological", + -12.548148155212402 + ], + [ + "▁innovate", + -12.548361778259276 + ], + [ + "▁Pike", + -12.548429489135742 + ], + [ + "els", + -12.548439025878906 + ], + [ + "▁Hole", + -12.548460960388184 + ], + [ + "▁withdrawn", + -12.548469543457031 + ], + [ + "▁Kris", + -12.548677444458008 + ], + [ + "▁clocks", + -12.548834800720217 + ], + [ + "▁dire", + -12.548954010009766 + ], + [ + "▁Demand", + -12.549007415771484 + ], + [ + "▁adolescents", + -12.549055099487305 + ], + [ + "▁favored", + -12.549187660217283 + ], + [ + "▁LTE", + -12.549219131469728 + ], + [ + "▁Shepherd", + -12.54926586151123 + ], + [ + "▁eats", + -12.549310684204102 + ], + [ + "▁spouses", + -12.54942512512207 + ], + [ + "▁Funny", + -12.549488067626951 + ], + [ + "▁1.7", + -12.549588203430176 + ], + [ + "▁skirts", + -12.549670219421388 + ], + [ + "▁sixty", + -12.549802780151367 + ], + [ + "▁135", + -12.54981803894043 + ], + [ + "▁strands", + -12.54990005493164 + ], + [ + "▁Farmer", + -12.549981117248535 + ], + [ + "▁ignorance", + -12.550078392028809 + ], + [ + "▁destined", + -12.55010223388672 + ], + [ + "▁taller", + -12.550105094909668 + ], + [ + "▁rapper", + -12.550158500671388 + ], + [ + "▁1975", + -12.550248146057127 + ], + [ + "▁Setting", + -12.550328254699709 + ], + [ + "ration", + -12.550355911254885 + ], + [ + "▁discontinued", + -12.5503568649292 + ], + [ + "▁Exterior", + -12.55046272277832 + ], + [ + "▁stud", + -12.550576210021973 + ], + [ + "▁Blind", + -12.550640106201172 + ], + [ + "▁proceeded", + -12.550698280334473 + ], + [ + "▁Hugh", + -12.550700187683104 + ], + [ + "▁Dot", + -12.550724029541016 + ], + [ + "GM", + -12.550832748413086 + ], + [ + "▁Bottle", + -12.550987243652344 + ], + [ + "▁terminology", + -12.551044464111328 + ], + [ + "▁leaked", + -12.551191329956056 + ], + [ + "▁Stores", + -12.55150032043457 + ], + [ + "▁Ur", + -12.551621437072754 + ], + [ + "▁cop", + -12.552006721496582 + ], + [ + "▁lithium", + -12.552011489868164 + ], + [ + "▁MU", + -12.552077293395996 + ], + [ + "▁Shift", + -12.552152633666992 + ], + [ + "▁Rapids", + -12.552380561828612 + ], + [ + "Si", + -12.552388191223145 + ], + [ + "beck", + -12.552449226379396 + ], + [ + "▁afterward", + -12.55250358581543 + ], + [ + "unless", + -12.55260944366455 + ], + [ + "ALL", + -12.552675247192385 + ], + [ + "▁continuation", + -12.552726745605469 + ], + [ + "backed", + -12.552732467651367 + ], + [ + "coat", + -12.55277156829834 + ], + [ + "▁ER", + -12.552846908569336 + ], + [ + "▁beneficiary", + -12.552979469299316 + ], + [ + "▁sharks", + -12.552983283996582 + ], + [ + "▁exposing", + -12.55299186706543 + ], + [ + "flower", + -12.55300998687744 + ], + [ + "PP", + -12.553049087524414 + ], + [ + "▁Businesses", + -12.553117752075195 + ], + [ + "▁cavity", + -12.553420066833496 + ], + [ + "▁disadvantage", + -12.553421020507812 + ], + [ + "▁SK", + -12.553434371948242 + ], + [ + "▁Appeals", + -12.553447723388672 + ], + [ + "▁protesters", + -12.553492546081545 + ], + [ + "▁Closet", + -12.553582191467283 + ], + [ + "ert", + -12.553589820861816 + ], + [ + "▁Providence", + -12.553741455078123 + ], + [ + "AG", + -12.553902626037598 + ], + [ + "Awesome", + -12.553914070129396 + ], + [ + "▁Ahmed", + -12.553962707519531 + ], + [ + "▁Dhabi", + -12.553990364074709 + ], + [ + "▁relieved", + -12.554265022277832 + ], + [ + "▁Positive", + -12.554281234741213 + ], + [ + "▁handing", + -12.554400444030762 + ], + [ + "sil", + -12.554442405700684 + ], + [ + "Ca", + -12.554454803466797 + ], + [ + "▁campers", + -12.55449390411377 + ], + [ + "bis", + -12.554535865783691 + ], + [ + "▁1978", + -12.554543495178224 + ], + [ + "Simple", + -12.554600715637209 + ], + [ + "▁Tee", + -12.554662704467772 + ], + [ + "▁improper", + -12.554680824279783 + ], + [ + "ABLE", + -12.554707527160645 + ], + [ + "▁SH", + -12.554713249206545 + ], + [ + "▁Valencia", + -12.554734230041504 + ], + [ + "▁Juice", + -12.554746627807615 + ], + [ + "▁Jets", + -12.554752349853516 + ], + [ + "▁MG", + -12.554876327514648 + ], + [ + "▁ISIS", + -12.554884910583496 + ], + [ + "▁sue", + -12.554895401000977 + ], + [ + "▁celery", + -12.554920196533203 + ], + [ + "▁Sheets", + -12.554969787597656 + ], + [ + "▁Licensed", + -12.555017471313477 + ], + [ + "-0", + -12.555072784423828 + ], + [ + "▁Nexus", + -12.555150032043455 + ], + [ + "▁Rocket", + -12.555195808410645 + ], + [ + "▁TS", + -12.555213928222656 + ], + [ + "▁Retirement", + -12.555526733398438 + ], + [ + "▁refreshments", + -12.555530548095703 + ], + [ + "auto", + -12.555609703063965 + ], + [ + "▁APIs", + -12.555641174316406 + ], + [ + "bing", + -12.555904388427734 + ], + [ + "TF", + -12.556221961975098 + ], + [ + "▁thanked", + -12.556501388549805 + ], + [ + "▁bracelets", + -12.556525230407717 + ], + [ + "shaw", + -12.556541442871094 + ], + [ + "▁notorious", + -12.55661964416504 + ], + [ + "▁trousers", + -12.556869506835938 + ], + [ + "▁plaster", + -12.556913375854492 + ], + [ + "▁Meyer", + -12.556982040405272 + ], + [ + "▁Evil", + -12.557013511657717 + ], + [ + "training", + -12.55720329284668 + ], + [ + "deep", + -12.557223320007324 + ], + [ + "root", + -12.55734157562256 + ], + [ + "▁expires", + -12.55734157562256 + ], + [ + "agh", + -12.557379722595217 + ], + [ + "▁priests", + -12.557501792907717 + ], + [ + "▁Southampton", + -12.557613372802734 + ], + [ + "RI", + -12.557644844055176 + ], + [ + "▁lure", + -12.557659149169922 + ], + [ + "▁smoothie", + -12.557730674743652 + ], + [ + "▁Solomon", + -12.557809829711914 + ], + [ + "▁presidency", + -12.557842254638672 + ], + [ + "▁Been", + -12.558063507080078 + ], + [ + "personal", + -12.558130264282228 + ], + [ + "▁mansion", + -12.558133125305176 + ], + [ + "▁Gross", + -12.55824089050293 + ], + [ + "▁Gan", + -12.558282852172852 + ], + [ + "▁Napa", + -12.558284759521484 + ], + [ + "▁Identity", + -12.558320045471191 + ], + [ + "▁Boutique", + -12.55832862854004 + ], + [ + "▁enriched", + -12.558330535888672 + ], + [ + "▁Polo", + -12.558371543884276 + ], + [ + "▁SU", + -12.558384895324709 + ], + [ + "▁Wan", + -12.558599472045898 + ], + [ + "▁downstream", + -12.558682441711426 + ], + [ + "▁Kre", + -12.55872631072998 + ], + [ + "▁lunches", + -12.558761596679688 + ], + [ + "▁inconsistent", + -12.55876636505127 + ], + [ + "▁onwards", + -12.558801651000977 + ], + [ + "▁calculating", + -12.558807373046877 + ], + [ + "▁residual", + -12.558807373046877 + ], + [ + "▁viewpoint", + -12.558826446533203 + ], + [ + "▁Olivia", + -12.558828353881836 + ], + [ + "▁Molly", + -12.558844566345217 + ], + [ + "▁aggressively", + -12.558856964111328 + ], + [ + "▁sprint", + -12.558857917785645 + ], + [ + "8)", + -12.55892848968506 + ], + [ + "▁dia", + -12.558929443359377 + ], + [ + "release", + -12.559070587158203 + ], + [ + "nine", + -12.559085845947266 + ], + [ + "▁UCLA", + -12.559152603149414 + ], + [ + "mond", + -12.559164047241213 + ], + [ + "▁Mickey", + -12.559246063232422 + ], + [ + "▁influencers", + -12.559290885925291 + ], + [ + "▁replay", + -12.559406280517578 + ], + [ + "▁filler", + -12.559449195861816 + ], + [ + "▁centrally", + -12.559488296508787 + ], + [ + "▁WV", + -12.559499740600586 + ], + [ + "▁cheeses", + -12.55958366394043 + ], + [ + "▁Sacred", + -12.55966854095459 + ], + [ + "▁affordability", + -12.559890747070312 + ], + [ + "heads", + -12.560032844543455 + ], + [ + "Self", + -12.560107231140137 + ], + [ + "nel", + -12.560131072998049 + ], + [ + "▁lotion", + -12.560208320617676 + ], + [ + "many", + -12.560352325439451 + ], + [ + "▁CB", + -12.560375213623049 + ], + [ + "▁Tanzania", + -12.560412406921388 + ], + [ + "▁1955", + -12.56047534942627 + ], + [ + "Inter", + -12.560522079467772 + ], + [ + "▁unacceptable", + -12.560783386230469 + ], + [ + "▁unemployed", + -12.560824394226074 + ], + [ + "▁ergonomic", + -12.560891151428224 + ], + [ + "▁creepy", + -12.560942649841309 + ], + [ + "▁Spend", + -12.561005592346191 + ], + [ + "▁memoir", + -12.561015129089355 + ], + [ + "▁statues", + -12.561117172241213 + ], + [ + "▁(19", + -12.56117343902588 + ], + [ + "tile", + -12.561396598815918 + ], + [ + "▁transporting", + -12.56144905090332 + ], + [ + "▁uplifting", + -12.561482429504396 + ], + [ + "bat", + -12.561527252197266 + ], + [ + "▁Damage", + -12.56156063079834 + ], + [ + "setting", + -12.561759948730469 + ], + [ + "threatening", + -12.561895370483398 + ], + [ + "CK", + -12.561927795410156 + ], + [ + "▁transcript", + -12.561935424804688 + ], + [ + "iron", + -12.561991691589355 + ], + [ + "▁Richardson", + -12.56206512451172 + ], + [ + "▁deed", + -12.562079429626465 + ], + [ + "ann", + -12.562135696411133 + ], + [ + "▁mantra", + -12.5621919631958 + ], + [ + "▁ED", + -12.562275886535645 + ], + [ + "virus", + -12.562285423278809 + ], + [ + "Action", + -12.562331199645996 + ], + [ + "▁raffle", + -12.562339782714844 + ], + [ + "▁rituals", + -12.56234645843506 + ], + [ + "▁Lotus", + -12.56242847442627 + ], + [ + "▁gradual", + -12.562472343444824 + ], + [ + "▁intimidating", + -12.562475204467772 + ], + [ + "race", + -12.56259059906006 + ], + [ + "▁pledged", + -12.562639236450195 + ], + [ + "operation", + -12.562642097473145 + ], + [ + "▁Kashmir", + -12.562695503234863 + ], + [ + "▁mist", + -12.562763214111328 + ], + [ + "▁bee", + -12.563042640686035 + ], + [ + "▁govern", + -12.563226699829102 + ], + [ + "Aside", + -12.56324291229248 + ], + [ + "▁filmmakers", + -12.563270568847656 + ], + [ + "▁$11", + -12.563292503356934 + ], + [ + "MENT", + -12.563346862792969 + ], + [ + "since", + -12.563361167907717 + ], + [ + "▁Neu", + -12.563411712646484 + ], + [ + "match", + -12.563433647155762 + ], + [ + "rating", + -12.563606262207031 + ], + [ + "▁plank", + -12.563673973083496 + ], + [ + "▁pottery", + -12.56379222869873 + ], + [ + "▁apt", + -12.563850402832031 + ], + [ + "▁documenting", + -12.56392765045166 + ], + [ + "thinking", + -12.56393051147461 + ], + [ + "▁spirituality", + -12.563982009887695 + ], + [ + "▁Marriott", + -12.563990592956545 + ], + [ + "▁richer", + -12.564029693603516 + ], + [ + "▁Films", + -12.564040184020996 + ], + [ + "▁storyline", + -12.564069747924805 + ], + [ + "▁Handle", + -12.564203262329102 + ], + [ + "▁Numbers", + -12.564496040344238 + ], + [ + "▁Alto", + -12.564544677734377 + ], + [ + "▁voyage", + -12.564550399780272 + ], + [ + "▁mates", + -12.56460952758789 + ], + [ + "▁tiled", + -12.564635276794434 + ], + [ + "▁(5)", + -12.5646390914917 + ], + [ + "▁lifestyles", + -12.564687728881836 + ], + [ + "то", + -12.564911842346191 + ], + [ + "▁Rodriguez", + -12.564940452575684 + ], + [ + "81", + -12.565074920654297 + ], + [ + "▁sir", + -12.565235137939451 + ], + [ + "conscious", + -12.56553554534912 + ], + [ + "travel", + -12.56565761566162 + ], + [ + "Pop", + -12.565808296203612 + ], + [ + "Hand", + -12.56584930419922 + ], + [ + "▁striker", + -12.56589126586914 + ], + [ + "▁$13", + -12.566025733947754 + ], + [ + "baby", + -12.566123008728027 + ], + [ + "▁Gin", + -12.566123962402344 + ], + [ + "▁batches", + -12.566165924072266 + ], + [ + "double", + -12.566279411315918 + ], + [ + "4-", + -12.566304206848145 + ], + [ + "▁helm", + -12.566372871398926 + ], + [ + "▁wagon", + -12.566444396972656 + ], + [ + "▁RPG", + -12.56659984588623 + ], + [ + "▁shortest", + -12.566620826721191 + ], + [ + "▁Minutes", + -12.566630363464355 + ], + [ + "▁pertinent", + -12.566631317138672 + ], + [ + "▁Kickstarter", + -12.566632270812988 + ], + [ + "▁deluxe", + -12.566652297973633 + ], + [ + "▁eCommerce", + -12.56671905517578 + ], + [ + "hua", + -12.566747665405272 + ], + [ + "▁Nest", + -12.56675148010254 + ], + [ + "Lastly", + -12.56680965423584 + ], + [ + "▁Tonight", + -12.566818237304688 + ], + [ + "▁Hayes", + -12.566884994506836 + ], + [ + "-02", + -12.56694793701172 + ], + [ + "▁whisky", + -12.56711769104004 + ], + [ + "▁counterpart", + -12.567193984985352 + ], + [ + "▁PH", + -12.567264556884766 + ], + [ + "▁lavish", + -12.567383766174316 + ], + [ + "▁Coconut", + -12.567429542541504 + ], + [ + "▁Sec", + -12.567438125610352 + ], + [ + "▁conducts", + -12.567523956298828 + ], + [ + "▁childcare", + -12.567550659179688 + ], + [ + "▁razor", + -12.567617416381836 + ], + [ + "▁Borough", + -12.567620277404783 + ], + [ + "▁Reagan", + -12.567652702331545 + ], + [ + "▁MySQL", + -12.567788124084473 + ], + [ + "▁4000", + -12.567963600158691 + ], + [ + "▁unfold", + -12.568060874938965 + ], + [ + "Pick", + -12.568065643310549 + ], + [ + "▁Poll", + -12.568171501159668 + ], + [ + "VM", + -12.568182945251465 + ], + [ + "▁drained", + -12.568363189697266 + ], + [ + "▁Latino", + -12.568470001220703 + ], + [ + "Scott", + -12.568501472473145 + ], + [ + "▁bustling", + -12.568643569946287 + ], + [ + "▁Titan", + -12.568753242492676 + ], + [ + "▁haunted", + -12.568815231323242 + ], + [ + "▁feather", + -12.56884479522705 + ], + [ + "▁scholarly", + -12.568896293640137 + ], + [ + "▁Joyce", + -12.568971633911133 + ], + [ + "▁renamed", + -12.568989753723145 + ], + [ + "▁engraved", + -12.569046974182127 + ], + [ + "▁sizing", + -12.569121360778809 + ], + [ + "▁inaccurate", + -12.5691556930542 + ], + [ + "write", + -12.569293022155762 + ], + [ + "User", + -12.569303512573242 + ], + [ + "▁piles", + -12.569328308105469 + ], + [ + "Way", + -12.569358825683594 + ], + [ + "▁unconscious", + -12.569363594055176 + ], + [ + "Mary", + -12.569396018981934 + ], + [ + "▁witch", + -12.569498062133787 + ], + [ + "▁presenter", + -12.569586753845217 + ], + [ + "▁zoning", + -12.569595336914062 + ], + [ + "Pour", + -12.569795608520508 + ], + [ + "▁feeder", + -12.569985389709473 + ], + [ + "▁Signs", + -12.570029258728027 + ], + [ + "▁Hungarian", + -12.570046424865724 + ], + [ + "▁Logistics", + -12.570220947265623 + ], + [ + "▁Facilities", + -12.570245742797852 + ], + [ + "▁violate", + -12.57025909423828 + ], + [ + "▁Kenneth", + -12.57029628753662 + ], + [ + "▁debuted", + -12.570313453674316 + ], + [ + "▁QR", + -12.570446968078612 + ], + [ + "▁opting", + -12.570496559143066 + ], + [ + ".95", + -12.570501327514648 + ], + [ + "50,000", + -12.570562362670898 + ], + [ + "▁maternal", + -12.570568084716797 + ], + [ + "▁diversified", + -12.570571899414062 + ], + [ + "▁caregiver", + -12.57075023651123 + ], + [ + "Card", + -12.570796012878418 + ], + [ + "▁defended", + -12.570955276489258 + ], + [ + "▁corrections", + -12.571189880371094 + ], + [ + "▁Ways", + -12.5712308883667 + ], + [ + "▁redesigned", + -12.571309089660645 + ], + [ + "▁Lauderdale", + -12.57155418395996 + ], + [ + "▁buddies", + -12.57155418395996 + ], + [ + "▁sesame", + -12.571560859680176 + ], + [ + "▁probable", + -12.571578979492188 + ], + [ + "Land", + -12.571633338928224 + ], + [ + "Total", + -12.571732521057127 + ], + [ + "▁ornament", + -12.57176399230957 + ], + [ + "▁pavement", + -12.571863174438477 + ], + [ + "▁Chance", + -12.57188320159912 + ], + [ + "vic", + -12.572036743164062 + ], + [ + "▁Mir", + -12.572245597839355 + ], + [ + "▁obsolete", + -12.57232666015625 + ], + [ + "▁Normal", + -12.57246208190918 + ], + [ + "▁urging", + -12.572551727294922 + ], + [ + "▁Capacity", + -12.572612762451172 + ], + [ + "▁suspicion", + -12.572839736938477 + ], + [ + "▁Martinez", + -12.572949409484863 + ], + [ + "▁Zach", + -12.573028564453123 + ], + [ + "▁IC", + -12.57310390472412 + ], + [ + "▁1967", + -12.573135375976562 + ], + [ + "▁unexpectedly", + -12.573152542114258 + ], + [ + "isa", + -12.573211669921877 + ], + [ + "▁blonde", + -12.57346534729004 + ], + [ + "▁subsidies", + -12.573528289794922 + ], + [ + "▁Disaster", + -12.5736083984375 + ], + [ + "Ga", + -12.573688507080078 + ], + [ + "▁legislators", + -12.573748588562012 + ], + [ + "▁courier", + -12.57384204864502 + ], + [ + "▁Sonic", + -12.573851585388184 + ], + [ + "/18", + -12.573929786682127 + ], + [ + "OT", + -12.57395076751709 + ], + [ + "Canada", + -12.574119567871094 + ], + [ + "▁Purpose", + -12.574148178100586 + ], + [ + "▁sued", + -12.57418155670166 + ], + [ + "▁32-", + -12.574214935302734 + ], + [ + "club", + -12.574373245239258 + ], + [ + "oe", + -12.574403762817385 + ], + [ + "▁caliber", + -12.574413299560549 + ], + [ + "▁Omaha", + -12.5745210647583 + ], + [ + "▁artisan", + -12.574767112731934 + ], + [ + "cra", + -12.574849128723145 + ], + [ + "cz", + -12.574880599975586 + ], + [ + "▁drummer", + -12.574883460998535 + ], + [ + "▁carriage", + -12.574959754943848 + ], + [ + "▁yo", + -12.575197219848633 + ], + [ + "▁electrician", + -12.57521915435791 + ], + [ + "electric", + -12.575284004211426 + ], + [ + "▁continual", + -12.575295448303224 + ], + [ + "▁Notre", + -12.57547092437744 + ], + [ + "▁dispatch", + -12.575587272644045 + ], + [ + "▁$5,000", + -12.575923919677734 + ], + [ + "▁Auburn", + -12.575944900512695 + ], + [ + "▁defendants", + -12.57626247406006 + ], + [ + "▁futures", + -12.576340675354004 + ], + [ + "▁toasted", + -12.576343536376951 + ], + [ + "▁Clearly", + -12.576349258422852 + ], + [ + "▁Collective", + -12.57646656036377 + ], + [ + "▁ambience", + -12.576498031616213 + ], + [ + "▁circus", + -12.576499938964844 + ], + [ + "▁chaotic", + -12.57650375366211 + ], + [ + "▁tread", + -12.576539993286133 + ], + [ + "▁ethic", + -12.576692581176758 + ], + [ + "320", + -12.576800346374512 + ], + [ + "▁Mun", + -12.577369689941406 + ], + [ + "yi", + -12.577381134033203 + ], + [ + "uro", + -12.577500343322754 + ], + [ + "▁Parkinson", + -12.57752799987793 + ], + [ + "▁provinces", + -12.577529907226562 + ], + [ + "hara", + -12.577589988708496 + ], + [ + "▁Elliott", + -12.57768440246582 + ], + [ + "▁fox", + -12.57769775390625 + ], + [ + "▁torch", + -12.577715873718262 + ], + [ + "▁silicon", + -12.57776927947998 + ], + [ + "▁Seoul", + -12.577801704406738 + ], + [ + "ving", + -12.578039169311523 + ], + [ + "Yesterday", + -12.578043937683104 + ], + [ + "▁Rating", + -12.57815647125244 + ], + [ + "kt", + -12.57823085784912 + ], + [ + "▁Rand", + -12.578242301940918 + ], + [ + "▁conscience", + -12.578502655029297 + ], + [ + "▁flute", + -12.57855224609375 + ], + [ + "▁Trailer", + -12.578646659851074 + ], + [ + "close", + -12.57866668701172 + ], + [ + "▁Yankees", + -12.578676223754885 + ], + [ + "▁wastewater", + -12.578707695007324 + ], + [ + "▁HBO", + -12.57876682281494 + ], + [ + "ige", + -12.578768730163574 + ], + [ + "▁Gap", + -12.578824043273926 + ], + [ + "▁Congressional", + -12.578831672668455 + ], + [ + "▁Horizon", + -12.57889175415039 + ], + [ + "▁merits", + -12.578973770141602 + ], + [ + "▁hereby", + -12.57900047302246 + ], + [ + "Between", + -12.579065322875977 + ], + [ + "Beyond", + -12.579065322875977 + ], + [ + "▁Philippine", + -12.5791015625 + ], + [ + "▁stash", + -12.579214096069336 + ], + [ + "▁specimens", + -12.579225540161133 + ], + [ + "...)", + -12.579286575317385 + ], + [ + "Mart", + -12.579304695129396 + ], + [ + "seat", + -12.579442024230955 + ], + [ + "▁algae", + -12.579445838928224 + ], + [ + "kai", + -12.57945442199707 + ], + [ + "-03", + -12.579476356506348 + ], + [ + "▁artisans", + -12.57948875427246 + ], + [ + "▁Debbie", + -12.57958984375 + ], + [ + "mac", + -12.579720497131348 + ], + [ + "▁liquidity", + -12.579730033874512 + ], + [ + "charge", + -12.579747200012209 + ], + [ + "▁Wealth", + -12.579798698425291 + ], + [ + "▁spacecraft", + -12.579938888549805 + ], + [ + "▁multinational", + -12.579940795898438 + ], + [ + "VC", + -12.580129623413086 + ], + [ + "▁Malaysian", + -12.580172538757324 + ], + [ + "▁Lifetime", + -12.580192565917969 + ], + [ + "▁Treat", + -12.580194473266602 + ], + [ + "▁breakout", + -12.580205917358398 + ], + [ + "▁sheds", + -12.580256462097168 + ], + [ + "▁Palestinians", + -12.58029079437256 + ], + [ + "▁Kane", + -12.58031940460205 + ], + [ + "▁ropes", + -12.58035945892334 + ], + [ + "/8", + -12.5803861618042 + ], + [ + "▁SSD", + -12.580448150634766 + ], + [ + "▁agreeing", + -12.580473899841309 + ], + [ + "▁Paypal", + -12.58049488067627 + ], + [ + "▁vastly", + -12.580549240112305 + ], + [ + "▁monkey", + -12.580552101135254 + ], + [ + "▁Cardinals", + -12.580628395080566 + ], + [ + "▁Andre", + -12.580720901489258 + ], + [ + "▁Comprehensive", + -12.580829620361328 + ], + [ + "▁drills", + -12.581026077270508 + ], + [ + "▁flavorful", + -12.581121444702148 + ], + [ + "▁Filipino", + -12.581164360046388 + ], + [ + "▁Manitoba", + -12.58135223388672 + ], + [ + "▁silky", + -12.581446647644045 + ], + [ + "▁whiskey", + -12.581507682800291 + ], + [ + "resolution", + -12.581509590148926 + ], + [ + "▁Cry", + -12.581534385681152 + ], + [ + "▁limo", + -12.581660270690918 + ], + [ + "▁Definition", + -12.581921577453612 + ], + [ + "▁breathable", + -12.58193588256836 + ], + [ + "▁CL", + -12.582000732421877 + ], + [ + "79", + -12.582085609436035 + ], + [ + "▁terminated", + -12.582188606262209 + ], + [ + "▁pastel", + -12.582279205322266 + ], + [ + "anta", + -12.582284927368164 + ], + [ + "▁necessities", + -12.58246612548828 + ], + [ + "▁Compared", + -12.582600593566896 + ], + [ + "▁Pleasant", + -12.58271598815918 + ], + [ + "▁prescribe", + -12.58290672302246 + ], + [ + "▁relocate", + -12.58290672302246 + ], + [ + "▁Hen", + -12.582935333251951 + ], + [ + "▁Ventures", + -12.582999229431152 + ], + [ + "▁coarse", + -12.583005905151367 + ], + [ + "▁leash", + -12.583060264587402 + ], + [ + "▁replacements", + -12.583137512207031 + ], + [ + "sic", + -12.58335304260254 + ], + [ + "▁Including", + -12.583366394042969 + ], + [ + "▁commenced", + -12.583369255065918 + ], + [ + "▁paving", + -12.58346939086914 + ], + [ + "▁imposing", + -12.583470344543455 + ], + [ + "▁Goals", + -12.583595275878906 + ], + [ + "▁Sprinkle", + -12.583645820617676 + ], + [ + "chu", + -12.583699226379396 + ], + [ + "▁Vera", + -12.583704948425291 + ], + [ + "▁tenth", + -12.583734512329102 + ], + [ + "▁fracture", + -12.58383083343506 + ], + [ + "▁Injury", + -12.583844184875488 + ], + [ + "▁detergent", + -12.58389663696289 + ], + [ + "▁1-0", + -12.584083557128906 + ], + [ + "BL", + -12.584136009216309 + ], + [ + "▁Beef", + -12.58427619934082 + ], + [ + "▁emphasizes", + -12.584327697753906 + ], + [ + "▁Concept", + -12.58434772491455 + ], + [ + "▁Bis", + -12.584362030029297 + ], + [ + "▁achievable", + -12.58446216583252 + ], + [ + "▁Hor", + -12.584518432617188 + ], + [ + "▁Mercy", + -12.584566116333008 + ], + [ + "▁uphold", + -12.58461856842041 + ], + [ + "▁exhibiting", + -12.584624290466309 + ], + [ + "digital", + -12.584643363952637 + ], + [ + "▁Plains", + -12.584660530090332 + ], + [ + "▁downhill", + -12.584779739379885 + ], + [ + "▁converts", + -12.584782600402832 + ], + [ + "OD", + -12.584939002990724 + ], + [ + "LD", + -12.584948539733888 + ], + [ + "Dis", + -12.585079193115234 + ], + [ + "ando", + -12.585091590881348 + ], + [ + "▁advisers", + -12.585187911987305 + ], + [ + "▁motherboard", + -12.585293769836426 + ], + [ + "▁theology", + -12.58530330657959 + ], + [ + "leigh", + -12.585355758666992 + ], + [ + "▁Dai", + -12.585371017456056 + ], + [ + "▁Clothing", + -12.585373878479004 + ], + [ + "▁depicts", + -12.58542823791504 + ], + [ + "▁Tucson", + -12.58546257019043 + ], + [ + "▁fermentation", + -12.58547592163086 + ], + [ + "▁Achievement", + -12.585488319396973 + ], + [ + "▁Trends", + -12.585506439208984 + ], + [ + "▁fostering", + -12.585531234741213 + ], + [ + "offs", + -12.585667610168455 + ], + [ + "▁tactic", + -12.585874557495115 + ], + [ + "▁equals", + -12.58597183227539 + ], + [ + "▁accommodating", + -12.586122512817385 + ], + [ + "▁boiled", + -12.586135864257812 + ], + [ + "▁slaves", + -12.5862455368042 + ], + [ + "Something", + -12.586271286010742 + ], + [ + "▁pivot", + -12.586271286010742 + ], + [ + "▁workspace", + -12.58642864227295 + ], + [ + "▁dislike", + -12.586475372314451 + ], + [ + "▁Dealer", + -12.5866117477417 + ], + [ + "▁17-", + -12.586719512939451 + ], + [ + "▁MVP", + -12.586777687072754 + ], + [ + "▁Password", + -12.586814880371094 + ], + [ + "▁freshness", + -12.587200164794922 + ], + [ + "employed", + -12.58721923828125 + ], + [ + "Whilst", + -12.587247848510742 + ], + [ + "▁automobiles", + -12.587289810180664 + ], + [ + "qua", + -12.587309837341309 + ], + [ + "▁nickname", + -12.587346076965332 + ], + [ + "Stone", + -12.587360382080078 + ], + [ + "▁societal", + -12.587465286254885 + ], + [ + "▁Naval", + -12.587486267089844 + ], + [ + "tab", + -12.587498664855955 + ], + [ + "té", + -12.587586402893066 + ], + [ + "▁ES", + -12.587678909301758 + ], + [ + "Content", + -12.58768081665039 + ], + [ + "▁Guides", + -12.587682723999023 + ], + [ + "ual", + -12.5877103805542 + ], + [ + "▁frightening", + -12.58771514892578 + ], + [ + "▁Include", + -12.587718963623049 + ], + [ + "▁char", + -12.587718963623049 + ], + [ + "▁Legends", + -12.587810516357422 + ], + [ + "▁sock", + -12.587944984436035 + ], + [ + "▁rotary", + -12.58808135986328 + ], + [ + "whether", + -12.58828067779541 + ], + [ + "▁walmart", + -12.588313102722168 + ], + [ + "▁mutually", + -12.588396072387695 + ], + [ + "enabled", + -12.588451385498049 + ], + [ + "boot", + -12.588497161865234 + ], + [ + "▁15,000", + -12.588530540466309 + ], + [ + "▁Utility", + -12.58864688873291 + ], + [ + "▁Moor", + -12.588668823242188 + ], + [ + "▁$16", + -12.58884048461914 + ], + [ + "▁sauces", + -12.588866233825684 + ], + [ + "▁Mann", + -12.588868141174316 + ], + [ + "critical", + -12.58888339996338 + ], + [ + "▁Dreams", + -12.589146614074709 + ], + [ + "▁textbooks", + -12.589187622070312 + ], + [ + "▁congratulations", + -12.589190483093262 + ], + [ + "these", + -12.589296340942385 + ], + [ + "▁Factor", + -12.589314460754396 + ], + [ + "▁Panasonic", + -12.589322090148926 + ], + [ + "▁crashing", + -12.589322090148926 + ], + [ + "RB", + -12.58932876586914 + ], + [ + "▁occupancy", + -12.589576721191406 + ], + [ + "▁Advice", + -12.589595794677734 + ], + [ + "▁Timber", + -12.589677810668944 + ], + [ + "3.5", + -12.589733123779297 + ], + [ + "ement", + -12.589825630187988 + ], + [ + "Know", + -12.589900970458984 + ], + [ + "▁juvenile", + -12.58997917175293 + ], + [ + "Center", + -12.590161323547363 + ], + [ + "▁admits", + -12.590253829956056 + ], + [ + "NASDAQ", + -12.590299606323242 + ], + [ + "Standard", + -12.590326309204102 + ], + [ + "Location", + -12.590346336364746 + ], + [ + "formerly", + -12.59047794342041 + ], + [ + "▁simulations", + -12.590557098388672 + ], + [ + "▁Promotion", + -12.590583801269531 + ], + [ + "▁Hearts", + -12.590611457824709 + ], + [ + "▁atomic", + -12.590727806091309 + ], + [ + "lier", + -12.590743064880373 + ], + [ + "▁Attack", + -12.590827941894531 + ], + [ + "▁Interested", + -12.590858459472656 + ], + [ + "▁Lime", + -12.590920448303224 + ], + [ + "▁articulate", + -12.59092140197754 + ], + [ + "UM", + -12.590944290161133 + ], + [ + "▁prom", + -12.591042518615724 + ], + [ + "▁Bot", + -12.59104347229004 + ], + [ + "▁Tracy", + -12.591058731079102 + ], + [ + "Bake", + -12.59106159210205 + ], + [ + "▁Pole", + -12.591306686401367 + ], + [ + "grown", + -12.591327667236328 + ], + [ + "Richard", + -12.59132957458496 + ], + [ + "▁resumes", + -12.591338157653809 + ], + [ + "▁AU", + -12.59134578704834 + ], + [ + "▁Listing", + -12.591489791870115 + ], + [ + "▁professions", + -12.591572761535645 + ], + [ + "▁Kolkata", + -12.591667175292969 + ], + [ + "▁Methods", + -12.591679573059082 + ], + [ + "▁Spectrum", + -12.591736793518066 + ], + [ + "▁Providing", + -12.591910362243652 + ], + [ + "TIP", + -12.59196662902832 + ], + [ + "▁supernatural", + -12.591992378234863 + ], + [ + "▁Pepper", + -12.59201717376709 + ], + [ + "▁mulch", + -12.59202766418457 + ], + [ + "▁fruity", + -12.592087745666504 + ], + [ + "▁Booking", + -12.59210968017578 + ], + [ + "▁creatively", + -12.592118263244627 + ], + [ + "HL", + -12.592148780822754 + ], + [ + "▁Leg", + -12.592157363891602 + ], + [ + "ub", + -12.592191696166992 + ], + [ + "®,", + -12.592257499694824 + ], + [ + "rose", + -12.59231948852539 + ], + [ + "▁Wade", + -12.592436790466309 + ], + [ + "▁0.2", + -12.592514038085938 + ], + [ + "▁whopping", + -12.592554092407228 + ], + [ + "▁ecology", + -12.59263038635254 + ], + [ + "▁intro", + -12.593000411987305 + ], + [ + "ste", + -12.593161582946776 + ], + [ + "▁coursework", + -12.593213081359863 + ], + [ + "▁bananas", + -12.593393325805664 + ], + [ + "▁grit", + -12.5936861038208 + ], + [ + "▁Natalie", + -12.593690872192385 + ], + [ + "▁Finn", + -12.593701362609863 + ], + [ + "Listen", + -12.59373378753662 + ], + [ + "▁oats", + -12.593743324279783 + ], + [ + "▁chalk", + -12.59378433227539 + ], + [ + "▁blooms", + -12.594001770019531 + ], + [ + "82", + -12.594096183776855 + ], + [ + "▁Appeal", + -12.59438133239746 + ], + [ + "▁heavenly", + -12.594385147094728 + ], + [ + "▁Preston", + -12.594441413879396 + ], + [ + "stown", + -12.59445571899414 + ], + [ + "▁Choir", + -12.59453582763672 + ], + [ + "again", + -12.594539642333984 + ], + [ + "▁spiritually", + -12.594575881958008 + ], + [ + "▁informing", + -12.594599723815918 + ], + [ + "▁Herald", + -12.594619750976562 + ], + [ + "▁Carrie", + -12.594630241394045 + ], + [ + "▁Insert", + -12.594653129577637 + ], + [ + "▁vertically", + -12.59474277496338 + ], + [ + "▁2.3", + -12.594902992248535 + ], + [ + "▁discharged", + -12.594947814941406 + ], + [ + "▁forgetting", + -12.595000267028809 + ], + [ + "▁Shoe", + -12.595115661621094 + ], + [ + "▁Changing", + -12.595192909240724 + ], + [ + "▁hail", + -12.595223426818848 + ], + [ + "▁patron", + -12.59523105621338 + ], + [ + "▁Lifestyle", + -12.595295906066896 + ], + [ + "▁cultivated", + -12.595309257507324 + ], + [ + "▁Burger", + -12.595355987548828 + ], + [ + "induced", + -12.595499038696287 + ], + [ + "▁cauliflower", + -12.595518112182615 + ], + [ + "▁synonymous", + -12.595575332641602 + ], + [ + "1.5", + -12.595614433288574 + ], + [ + "▁Extreme", + -12.595614433288574 + ], + [ + "▁cliffs", + -12.595752716064451 + ], + [ + "▁conquer", + -12.595778465270996 + ], + [ + "▁Beck", + -12.596055030822754 + ], + [ + "▁resurrection", + -12.596073150634766 + ], + [ + "txt", + -12.596078872680664 + ], + [ + "▁affiliation", + -12.59609603881836 + ], + [ + "▁tiger", + -12.59619426727295 + ], + [ + "▁readiness", + -12.596219062805176 + ], + [ + "Law", + -12.596220016479492 + ], + [ + "▁Jacket", + -12.596224784851074 + ], + [ + "▁chores", + -12.596308708190918 + ], + [ + "▁sugars", + -12.596323013305664 + ], + [ + "▁eBook", + -12.596529006958008 + ], + [ + "▁exploitation", + -12.596576690673828 + ], + [ + "photo", + -12.596701622009276 + ], + [ + "▁roadmap", + -12.596745491027832 + ], + [ + "▁hefty", + -12.596750259399414 + ], + [ + "▁belonged", + -12.596885681152344 + ], + [ + "▁banquet", + -12.596891403198242 + ], + [ + "▁jets", + -12.596940994262695 + ], + [ + "▁Pam", + -12.59698486328125 + ], + [ + "▁ace", + -12.597140312194824 + ], + [ + "Direct", + -12.597173690795898 + ], + [ + "▁aggression", + -12.597368240356444 + ], + [ + "Too", + -12.597442626953123 + ], + [ + "▁finalists", + -12.597453117370604 + ], + [ + "▁Arms", + -12.597464561462402 + ], + [ + "Third", + -12.597482681274414 + ], + [ + "▁seldom", + -12.597540855407717 + ], + [ + "▁$14", + -12.597541809082031 + ], + [ + "▁aperture", + -12.597543716430664 + ], + [ + "8,", + -12.597715377807615 + ], + [ + "▁humorous", + -12.597726821899414 + ], + [ + "▁budgeting", + -12.597807884216309 + ], + [ + "iana", + -12.597967147827148 + ], + [ + "▁Character", + -12.598014831542969 + ], + [ + "▁grabbing", + -12.598028182983398 + ], + [ + "▁Coupons", + -12.598076820373535 + ], + [ + "▁vi", + -12.598093032836914 + ], + [ + "▁Adults", + -12.59810733795166 + ], + [ + "Tri", + -12.598112106323242 + ], + [ + "▁Dictionary", + -12.598118782043455 + ], + [ + "▁arbitrary", + -12.59816074371338 + ], + [ + "▁Tiffany", + -12.598326683044434 + ], + [ + "▁tofu", + -12.598443031311035 + ], + [ + "▁impeccable", + -12.598554611206056 + ], + [ + "▁restructuring", + -12.598563194274902 + ], + [ + "▁ratios", + -12.598607063293455 + ], + [ + "Amazon", + -12.598613739013672 + ], + [ + "Christmas", + -12.598617553710938 + ], + [ + "▁tripod", + -12.598699569702148 + ], + [ + "▁tariff", + -12.59872341156006 + ], + [ + "eck", + -12.598889350891112 + ], + [ + "▁Folk", + -12.598918914794922 + ], + [ + "▁bumps", + -12.598926544189451 + ], + [ + "▁':", + -12.598943710327148 + ], + [ + "hair", + -12.599016189575195 + ], + [ + "▁purchaser", + -12.599085807800291 + ], + [ + "▁111", + -12.599387168884276 + ], + [ + "boat", + -12.599417686462402 + ], + [ + "▁Everybody", + -12.599469184875488 + ], + [ + "▁uninstall", + -12.59957790374756 + ], + [ + "money", + -12.59958267211914 + ], + [ + "Grand", + -12.599603652954102 + ], + [ + "▁endorse", + -12.59960651397705 + ], + [ + "▁Pdf", + -12.59966278076172 + ], + [ + "▁Vernon", + -12.5997314453125 + ], + [ + "▁Fleet", + -12.599764823913574 + ], + [ + "▁worm", + -12.599790573120115 + ], + [ + "▁RSVP", + -12.599797248840332 + ], + [ + "▁Strike", + -12.599854469299316 + ], + [ + "▁altar", + -12.599855422973633 + ], + [ + "▁cravings", + -12.599876403808594 + ], + [ + "▁1995.", + -12.599923133850098 + ], + [ + "▁Soup", + -12.600035667419434 + ], + [ + "▁awaits", + -12.600051879882812 + ], + [ + "ista", + -12.600184440612791 + ], + [ + "▁airy", + -12.60019302368164 + ], + [ + "▁sweets", + -12.600201606750488 + ], + [ + "▁literal", + -12.600204467773438 + ], + [ + "▁Cowboys", + -12.600337028503418 + ], + [ + "ore", + -12.600339889526367 + ], + [ + "▁las", + -12.600397109985352 + ], + [ + "▁Das", + -12.600564002990724 + ], + [ + "▁Rally", + -12.600591659545898 + ], + [ + "Double", + -12.600709915161133 + ], + [ + "▁predictive", + -12.600785255432127 + ], + [ + "▁Satan", + -12.600979804992676 + ], + [ + "▁extras", + -12.601070404052734 + ], + [ + "▁businessman", + -12.60107135772705 + ], + [ + "▁Hospice", + -12.601226806640623 + ], + [ + "▁PCB", + -12.601243019104004 + ], + [ + "▁nod", + -12.601551055908203 + ], + [ + "▁Aboriginal", + -12.601603507995604 + ], + [ + "▁Champagne", + -12.601603507995604 + ], + [ + "▁auf", + -12.601644515991213 + ], + [ + "jack", + -12.601703643798828 + ], + [ + "▁Architect", + -12.601704597473145 + ], + [ + "▁sensational", + -12.601715087890623 + ], + [ + "▁Ri", + -12.601736068725586 + ], + [ + "▁Brent", + -12.601777076721191 + ], + [ + "sheet", + -12.601780891418455 + ], + [ + "ario", + -12.601837158203123 + ], + [ + "▁Examination", + -12.60186004638672 + ], + [ + "▁muscular", + -12.602057456970217 + ], + [ + "▁Rug", + -12.602115631103516 + ], + [ + "▁Analyst", + -12.602253913879396 + ], + [ + "▁citation", + -12.602381706237791 + ], + [ + "▁individualized", + -12.602609634399414 + ], + [ + "▁Compensation", + -12.602622985839844 + ], + [ + "▁Beatles", + -12.602642059326172 + ], + [ + "Order", + -12.602906227111816 + ], + [ + "▁miners", + -12.603158950805664 + ], + [ + "▁teas", + -12.603282928466797 + ], + [ + "▁perennial", + -12.603316307067873 + ], + [ + "▁bosses", + -12.603322982788086 + ], + [ + "Bob", + -12.603371620178224 + ], + [ + "▁meanings", + -12.603556632995604 + ], + [ + "▁Sel", + -12.60367202758789 + ], + [ + "▁Brock", + -12.603779792785645 + ], + [ + "110", + -12.603800773620604 + ], + [ + "Above", + -12.603819847106934 + ], + [ + "Company", + -12.603829383850098 + ], + [ + "shore", + -12.603843688964844 + ], + [ + "▁vents", + -12.603894233703612 + ], + [ + "sol", + -12.603978157043455 + ], + [ + "▁minimise", + -12.604053497314451 + ], + [ + "RD", + -12.604069709777832 + ], + [ + "▁Dur", + -12.604340553283691 + ], + [ + "▁Scouts", + -12.604385375976562 + ], + [ + "▁incorrectly", + -12.60439395904541 + ], + [ + "▁Robertson", + -12.604485511779783 + ], + [ + "▁Dimensions", + -12.604486465454102 + ], + [ + "▁Reporting", + -12.60450267791748 + ], + [ + "▁4%", + -12.604626655578612 + ], + [ + "while", + -12.604646682739258 + ], + [ + "▁compelled", + -12.60466480255127 + ], + [ + "▁offence", + -12.604691505432127 + ], + [ + "▁Proof", + -12.604772567749023 + ], + [ + "inger", + -12.604801177978516 + ], + [ + "▁Maritime", + -12.604819297790527 + ], + [ + "conditioned", + -12.60497760772705 + ], + [ + "7,000", + -12.604987144470217 + ], + [ + "▁guessed", + -12.605170249938965 + ], + [ + "▁sandals", + -12.60520362854004 + ], + [ + "▁tattoos", + -12.60532569885254 + ], + [ + "▁Sho", + -12.605456352233888 + ], + [ + "▁Feature", + -12.605487823486328 + ], + [ + "model", + -12.6055269241333 + ], + [ + "▁***", + -12.605528831481934 + ], + [ + "▁firefighters", + -12.605557441711426 + ], + [ + "▁scream", + -12.605643272399902 + ], + [ + "▁subsidiaries", + -12.60567569732666 + ], + [ + "▁gazebo", + -12.60571002960205 + ], + [ + "▁Retreat", + -12.60573959350586 + ], + [ + "▁Gain", + -12.60574436187744 + ], + [ + "storm", + -12.605774879455566 + ], + [ + "▁backups", + -12.605817794799805 + ], + [ + "▁softball", + -12.605957984924316 + ], + [ + "▁che", + -12.605965614318848 + ], + [ + "Mi", + -12.606038093566896 + ], + [ + "▁Frankfurt", + -12.60629940032959 + ], + [ + "mode", + -12.606613159179688 + ], + [ + "▁prince", + -12.606667518615724 + ], + [ + "▁optimizing", + -12.606697082519531 + ], + [ + "fed", + -12.606942176818848 + ], + [ + "▁Midlands", + -12.606987953186035 + ], + [ + "▁tempo", + -12.607013702392578 + ], + [ + "▁Ven", + -12.607017517089844 + ], + [ + "▁Agile", + -12.60707664489746 + ], + [ + "chy", + -12.60709285736084 + ], + [ + "▁caller", + -12.607148170471191 + ], + [ + "Vi", + -12.607192993164062 + ], + [ + "▁ARM", + -12.607281684875488 + ], + [ + "chat", + -12.607354164123535 + ], + [ + "yer", + -12.607439994812012 + ], + [ + "▁bats", + -12.607568740844728 + ], + [ + "▁decals", + -12.607593536376951 + ], + [ + "▁Sanctuary", + -12.60777759552002 + ], + [ + "▁hatred", + -12.60779857635498 + ], + [ + "▁Pierce", + -12.607953071594238 + ], + [ + "▁UFC", + -12.608030319213867 + ], + [ + "▁BJP", + -12.608049392700195 + ], + [ + "Cor", + -12.608084678649902 + ], + [ + "▁alterations", + -12.608195304870604 + ], + [ + "▁(15", + -12.608216285705566 + ], + [ + "▁pinpoint", + -12.608254432678224 + ], + [ + "▁assorted", + -12.608375549316406 + ], + [ + "▁customised", + -12.608430862426758 + ], + [ + "▁fairness", + -12.608550071716309 + ], + [ + "▁Curry", + -12.608564376831056 + ], + [ + "▁oversees", + -12.608609199523926 + ], + [ + "inc", + -12.60865306854248 + ], + [ + "▁4-5", + -12.608665466308594 + ], + [ + "▁consortium", + -12.60874366760254 + ], + [ + "▁forehead", + -12.60878562927246 + ], + [ + "plate", + -12.608799934387209 + ], + [ + "▁metaphor", + -12.6088285446167 + ], + [ + "▁faded", + -12.608956336975098 + ], + [ + "vale", + -12.60899829864502 + ], + [ + "ces", + -12.609067916870115 + ], + [ + "Log", + -12.609074592590332 + ], + [ + "▁flap", + -12.609347343444824 + ], + [ + "ologist", + -12.609402656555176 + ], + [ + "▁rehearsal", + -12.60948371887207 + ], + [ + "vey", + -12.609517097473145 + ], + [ + "▁Organisation", + -12.609527587890623 + ], + [ + "▁recognizable", + -12.609768867492676 + ], + [ + "▁tubs", + -12.609795570373535 + ], + [ + "▁snapped", + -12.609833717346191 + ], + [ + "▁opioid", + -12.609837532043455 + ], + [ + "▁Zhang", + -12.609877586364746 + ], + [ + "slip", + -12.609889030456545 + ], + [ + "▁brides", + -12.610098838806152 + ], + [ + "▁prosecutors", + -12.610108375549316 + ], + [ + "ther", + -12.610191345214844 + ], + [ + "▁stubborn", + -12.610492706298828 + ], + [ + "▁Lenovo", + -12.610565185546877 + ], + [ + "igan", + -12.610578536987305 + ], + [ + "Cost", + -12.610580444335938 + ], + [ + "▁laughs", + -12.610600471496582 + ], + [ + "music", + -12.610602378845217 + ], + [ + "▁dazzling", + -12.610793113708496 + ], + [ + "▁40,000", + -12.610795974731444 + ], + [ + "▁Bars", + -12.610815048217772 + ], + [ + "▁imp", + -12.610841751098633 + ], + [ + "▁Coleman", + -12.610852241516112 + ], + [ + "▁underwent", + -12.610854148864746 + ], + [ + "▁Yemen", + -12.610902786254885 + ], + [ + "▁cottages", + -12.61093807220459 + ], + [ + "▁percussion", + -12.610944747924805 + ], + [ + "▁Boo", + -12.610952377319336 + ], + [ + "▁deaf", + -12.610952377319336 + ], + [ + "▁cabinetry", + -12.611069679260254 + ], + [ + "▁nominal", + -12.611148834228516 + ], + [ + "▁Eu", + -12.611358642578123 + ], + [ + "▁Fight", + -12.611413955688477 + ], + [ + "▁Celebrate", + -12.611462593078612 + ], + [ + "▁veterinarian", + -12.611559867858888 + ], + [ + "▁Strategies", + -12.61172103881836 + ], + [ + "▁cookbook", + -12.611749649047852 + ], + [ + "▁locating", + -12.611799240112305 + ], + [ + "True", + -12.611873626708984 + ], + [ + "▁Hwy", + -12.611909866333008 + ], + [ + "▁hull", + -12.611912727355955 + ], + [ + "▁ISP", + -12.611945152282717 + ], + [ + "▁Punjab", + -12.611985206604004 + ], + [ + "▁knocking", + -12.611995697021484 + ], + [ + "▁wording", + -12.6119966506958 + ], + [ + "▁Lastly", + -12.61212158203125 + ], + [ + "SG", + -12.612228393554688 + ], + [ + "ages", + -12.612229347229004 + ], + [ + "▁Arbor", + -12.61223030090332 + ], + [ + "▁Kam", + -12.612327575683594 + ], + [ + "▁coolest", + -12.612327575683594 + ], + [ + "▁loaf", + -12.612387657165527 + ], + [ + "rant", + -12.612406730651855 + ], + [ + "vit", + -12.612530708312988 + ], + [ + "▁faults", + -12.612563133239746 + ], + [ + "▁limbs", + -12.612627983093262 + ], + [ + "▁clubhouse", + -12.612640380859377 + ], + [ + "iff", + -12.612801551818848 + ], + [ + "▁bombing", + -12.61281394958496 + ], + [ + "▁Trophy", + -12.6128511428833 + ], + [ + "▁GOOD", + -12.612874984741213 + ], + [ + "▁homeland", + -12.61302661895752 + ], + [ + "▁primitive", + -12.61311149597168 + ], + [ + "▁Accent", + -12.613202095031738 + ], + [ + "▁Burke", + -12.61327838897705 + ], + [ + "▁Topics", + -12.61329174041748 + ], + [ + "▁Omni", + -12.61329746246338 + ], + [ + "▁beige", + -12.613346099853516 + ], + [ + "▁mosquito", + -12.61373519897461 + ], + [ + "arm", + -12.613761901855469 + ], + [ + "▁fragrant", + -12.613877296447754 + ], + [ + "220", + -12.613903045654297 + ], + [ + "Things", + -12.614014625549316 + ], + [ + "▁culturally", + -12.614078521728516 + ], + [ + "▁vomiting", + -12.614084243774414 + ], + [ + "▁Beaver", + -12.614105224609377 + ], + [ + "▁Throw", + -12.614178657531738 + ], + [ + "must", + -12.614237785339355 + ], + [ + "▁1977", + -12.61428451538086 + ], + [ + "cur", + -12.61431884765625 + ], + [ + "▁Palestine", + -12.614468574523926 + ], + [ + "ND", + -12.614484786987305 + ], + [ + "5,000", + -12.61467170715332 + ], + [ + "▁SAN", + -12.614686965942385 + ], + [ + "graph", + -12.614712715148926 + ], + [ + "▁measurable", + -12.614837646484377 + ], + [ + "▁reputed", + -12.614991188049316 + ], + [ + "▁uni", + -12.615035057067873 + ], + [ + "▁Chancellor", + -12.615232467651367 + ], + [ + "chair", + -12.615256309509276 + ], + [ + "▁wicked", + -12.615269660949709 + ], + [ + "▁wildly", + -12.61528205871582 + ], + [ + "▁Signal", + -12.61541748046875 + ], + [ + "ITY", + -12.615514755249023 + ], + [ + "mounted", + -12.615571022033691 + ], + [ + "▁drilled", + -12.615592002868652 + ], + [ + "▁Diversity", + -12.615715980529783 + ], + [ + "▁Ages", + -12.615772247314451 + ], + [ + "mix", + -12.615806579589844 + ], + [ + "FE", + -12.61583423614502 + ], + [ + "▁Neighborhood", + -12.615889549255373 + ], + [ + "▁zucchini", + -12.615937232971191 + ], + [ + "▁Registry", + -12.615941047668455 + ], + [ + "▁systematically", + -12.615985870361328 + ], + [ + "▁Slim", + -12.616100311279297 + ], + [ + "▁epoxy", + -12.616280555725098 + ], + [ + "▁intuition", + -12.616333961486816 + ], + [ + "▁unrelated", + -12.616565704345703 + ], + [ + "▁dances", + -12.616609573364258 + ], + [ + "▁robotics", + -12.61661434173584 + ], + [ + "▁1947", + -12.61673641204834 + ], + [ + "▁Religion", + -12.616761207580566 + ], + [ + "AN", + -12.616893768310549 + ], + [ + "▁Classmates", + -12.617003440856934 + ], + [ + "▁astounding", + -12.617055892944336 + ], + [ + "▁Done", + -12.617155075073242 + ], + [ + "▁Object", + -12.61728858947754 + ], + [ + "▁Catch", + -12.617300987243652 + ], + [ + "▁Creation", + -12.617401123046877 + ], + [ + "▁broaden", + -12.617419242858888 + ], + [ + "▁commissioner", + -12.617461204528809 + ], + [ + "▁Programming", + -12.61750030517578 + ], + [ + "▁poisoning", + -12.61752223968506 + ], + [ + "▁counselors", + -12.617523193359377 + ], + [ + "chain", + -12.617565155029297 + ], + [ + "▁Quartz", + -12.617573738098145 + ], + [ + "▁Stuff", + -12.61762523651123 + ], + [ + "▁hierarchy", + -12.618001937866213 + ], + [ + "▁Forums", + -12.618003845214844 + ], + [ + "▁swings", + -12.618003845214844 + ], + [ + "▁Gre", + -12.618040084838867 + ], + [ + "▁Shan", + -12.618098258972168 + ], + [ + "▁Origin", + -12.6182279586792 + ], + [ + "7,", + -12.61827564239502 + ], + [ + "19.", + -12.618309020996094 + ], + [ + "▁Adidas", + -12.618368148803713 + ], + [ + "▁TVs", + -12.618486404418944 + ], + [ + "▁Present", + -12.618680000305176 + ], + [ + "▁follower", + -12.618820190429688 + ], + [ + "chin", + -12.618983268737791 + ], + [ + "▁confidently", + -12.619144439697266 + ], + [ + "▁catchy", + -12.619149208068848 + ], + [ + "Ver", + -12.61965560913086 + ], + [ + "▁1976", + -12.619668960571287 + ], + [ + "▁Module", + -12.619766235351562 + ], + [ + "▁Davies", + -12.619799613952637 + ], + [ + "▁Mara", + -12.619848251342772 + ], + [ + "▁wreath", + -12.619906425476074 + ], + [ + "▁Estates", + -12.619972229003906 + ], + [ + "▁minimizing", + -12.620071411132812 + ], + [ + "▁dragged", + -12.620088577270508 + ], + [ + "camp", + -12.620121955871582 + ], + [ + "▁kidding", + -12.62013816833496 + ], + [ + "▁mirrored", + -12.62017059326172 + ], + [ + "▁4:00", + -12.62027645111084 + ], + [ + "ics", + -12.620298385620115 + ], + [ + "▁billed", + -12.620359420776367 + ], + [ + "▁athletics", + -12.620465278625488 + ], + [ + "▁ponds", + -12.62049961090088 + ], + [ + "▁Libya", + -12.620525360107422 + ], + [ + "▁cooks", + -12.620609283447266 + ], + [ + "lla", + -12.620611190795898 + ], + [ + "sports", + -12.620619773864746 + ], + [ + "Cook", + -12.620716094970703 + ], + [ + "PER", + -12.620882987976074 + ], + [ + "▁lipstick", + -12.620977401733398 + ], + [ + "▁birthdays", + -12.621074676513672 + ], + [ + "▁muffins", + -12.621078491210938 + ], + [ + "▁YA", + -12.62110710144043 + ], + [ + "▁peninsula", + -12.621108055114746 + ], + [ + "FI", + -12.62112045288086 + ], + [ + "▁roasting", + -12.621172904968262 + ], + [ + "▁lakh", + -12.62119960784912 + ], + [ + "▁maze", + -12.621209144592283 + ], + [ + "confidence", + -12.621739387512209 + ], + [ + "▁Smooth", + -12.621780395507812 + ], + [ + "▁soaked", + -12.621837615966797 + ], + [ + "▁Funds", + -12.621849060058594 + ], + [ + "▁Trainer", + -12.621851921081545 + ], + [ + "▁APK", + -12.621914863586426 + ], + [ + "▁injected", + -12.62221908569336 + ], + [ + "▁crib", + -12.622662544250488 + ], + [ + "20.", + -12.622675895690918 + ], + [ + "▁Trees", + -12.62281322479248 + ], + [ + "▁actionable", + -12.62288761138916 + ], + [ + "▁Retro", + -12.622899055480955 + ], + [ + "▁Condition", + -12.622958183288574 + ], + [ + "▁magnets", + -12.622970581054688 + ], + [ + "▁Sustainability", + -12.623052597045898 + ], + [ + "▁cohort", + -12.623054504394531 + ], + [ + "650", + -12.623058319091797 + ], + [ + "▁apk", + -12.623167037963867 + ], + [ + "▁dipping", + -12.623242378234863 + ], + [ + "▁trumpet", + -12.623285293579102 + ], + [ + "GL", + -12.623290061950684 + ], + [ + "cool", + -12.62335968017578 + ], + [ + "▁JD", + -12.62339210510254 + ], + [ + "▁enacted", + -12.623398780822754 + ], + [ + "▁Operator", + -12.623580932617188 + ], + [ + "▁Panda", + -12.623640060424805 + ], + [ + "▁lieu", + -12.623743057250977 + ], + [ + "▁revise", + -12.62375831604004 + ], + [ + "▁predicting", + -12.623835563659668 + ], + [ + "▁Estonia", + -12.623916625976562 + ], + [ + "▁fasting", + -12.623941421508787 + ], + [ + "kins", + -12.6239595413208 + ], + [ + "▁empirical", + -12.623988151550291 + ], + [ + "▁108", + -12.624015808105469 + ], + [ + "▁motorcycles", + -12.624187469482422 + ], + [ + "▁voluntarily", + -12.624222755432127 + ], + [ + "▁gravy", + -12.624226570129396 + ], + [ + "▁refusing", + -12.624227523803713 + ], + [ + "▁enchanting", + -12.624363899230955 + ], + [ + "▁Sc", + -12.624452590942385 + ], + [ + "▁hubby", + -12.62446403503418 + ], + [ + "▁progresses", + -12.624488830566406 + ], + [ + "▁Organizations", + -12.62451457977295 + ], + [ + "VI", + -12.624553680419922 + ], + [ + "▁africa", + -12.624570846557615 + ], + [ + "▁Evidence", + -12.624574661254885 + ], + [ + "▁Speak", + -12.624688148498535 + ], + [ + "iser", + -12.624710083007812 + ], + [ + "▁septic", + -12.624764442443848 + ], + [ + "▁elephants", + -12.624768257141112 + ], + [ + "uh", + -12.624874114990234 + ], + [ + "Text", + -12.624919891357422 + ], + [ + "INE", + -12.625022888183594 + ], + [ + "▁Flickr", + -12.625064849853516 + ], + [ + "94", + -12.625086784362791 + ], + [ + "▁Novel", + -12.625317573547363 + ], + [ + "▁timeframe", + -12.625361442565918 + ], + [ + "▁adapting", + -12.625368118286133 + ], + [ + "▁Panthers", + -12.625672340393066 + ], + [ + "▁programmers", + -12.625675201416016 + ], + [ + "▁Bosch", + -12.625680923461914 + ], + [ + "▁Collections", + -12.62574863433838 + ], + [ + "functional", + -12.626014709472656 + ], + [ + "▁Bradford", + -12.62611961364746 + ], + [ + "driving", + -12.626136779785156 + ], + [ + "▁Scotia", + -12.626477241516112 + ], + [ + "TMs", + -12.626480102539062 + ], + [ + "▁Locksmith", + -12.6265287399292 + ], + [ + "▁handbook", + -12.626535415649414 + ], + [ + "▁Volunteers", + -12.62661838531494 + ], + [ + "▁Leslie", + -12.62668800354004 + ], + [ + "▁illuminated", + -12.626715660095217 + ], + [ + "▁Reduce", + -12.626747131347656 + ], + [ + "borne", + -12.626770973205566 + ], + [ + "▁AIDS", + -12.626785278320312 + ], + [ + "▁PIN", + -12.626836776733398 + ], + [ + "Definition", + -12.626873970031738 + ], + [ + "rey", + -12.626877784729004 + ], + [ + "▁lacked", + -12.626911163330078 + ], + [ + "▁Sing", + -12.627087593078612 + ], + [ + "Ensure", + -12.627124786376951 + ], + [ + "▁unite", + -12.62727165222168 + ], + [ + "BER", + -12.62738037109375 + ], + [ + "▁Researchers", + -12.6273832321167 + ], + [ + "eon", + -12.627495765686035 + ], + [ + "listed", + -12.627591133117676 + ], + [ + "▁admired", + -12.627642631530762 + ], + [ + "lea", + -12.627655029296877 + ], + [ + "ifying", + -12.62779426574707 + ], + [ + "▁Michel", + -12.62781047821045 + ], + [ + "sys", + -12.62789821624756 + ], + [ + "▁Created", + -12.627955436706545 + ], + [ + "▁scrolling", + -12.627973556518556 + ], + [ + "▁stair", + -12.628158569335938 + ], + [ + "traditional", + -12.628198623657228 + ], + [ + "▁Counsel", + -12.628254890441896 + ], + [ + "tour", + -12.628371238708496 + ], + [ + "fun", + -12.628419876098633 + ], + [ + "BY", + -12.628446578979492 + ], + [ + "▁ju", + -12.62855339050293 + ], + [ + "▁demographics", + -12.628700256347656 + ], + [ + "▁External", + -12.628761291503906 + ], + [ + "▁whales", + -12.62878704071045 + ], + [ + "▁practise", + -12.628835678100586 + ], + [ + "ern", + -12.628902435302734 + ], + [ + "▁Foam", + -12.629097938537598 + ], + [ + "▁Lit", + -12.629207611083984 + ], + [ + "CF", + -12.629244804382324 + ], + [ + "▁1996.", + -12.629263877868652 + ], + [ + "▁apparatus", + -12.629295349121094 + ], + [ + "▁antibiotic", + -12.629405975341797 + ], + [ + "▁centerpiece", + -12.629615783691406 + ], + [ + "▁justification", + -12.629640579223633 + ], + [ + "▁Mixed", + -12.629700660705566 + ], + [ + "▁stripe", + -12.630054473876951 + ], + [ + "▁lifecycle", + -12.630165100097656 + ], + [ + "Thursday", + -12.630269050598145 + ], + [ + "▁Costume", + -12.630362510681152 + ], + [ + "▁academia", + -12.63048267364502 + ], + [ + "▁Dominican", + -12.63049030303955 + ], + [ + "▁Penny", + -12.630556106567385 + ], + [ + "ller", + -12.63056182861328 + ], + [ + "headed", + -12.63058090209961 + ], + [ + "/2019", + -12.630642890930176 + ], + [ + "Includes", + -12.63070011138916 + ], + [ + "▁Banner", + -12.630738258361816 + ], + [ + "job", + -12.63085651397705 + ], + [ + "▁nephew", + -12.630902290344238 + ], + [ + "ators", + -12.631053924560549 + ], + [ + "▁HI", + -12.631147384643556 + ], + [ + "▁tapes", + -12.63115692138672 + ], + [ + "▁Blogger", + -12.631180763244627 + ], + [ + "▁volunteered", + -12.63132381439209 + ], + [ + "▁powerhouse", + -12.631386756896973 + ], + [ + "▁DD", + -12.631470680236816 + ], + [ + "▁Br", + -12.631752014160156 + ], + [ + "▁paragraphs", + -12.631793022155762 + ], + [ + "mag", + -12.631796836853027 + ], + [ + "▁tele", + -12.63197898864746 + ], + [ + "BI", + -12.631996154785156 + ], + [ + "▁Thu", + -12.63230037689209 + ], + [ + "▁decisive", + -12.632336616516112 + ], + [ + "little", + -12.632471084594728 + ], + [ + "▁Shane", + -12.632660865783691 + ], + [ + "Winter", + -12.632668495178224 + ], + [ + "▁Tiny", + -12.632678031921388 + ], + [ + "LED", + -12.632821083068848 + ], + [ + "present", + -12.633042335510254 + ], + [ + "▁prose", + -12.633152961730955 + ], + [ + "▁amusing", + -12.633288383483888 + ], + [ + "▁Forms", + -12.63329029083252 + ], + [ + "▁Ninja", + -12.63337516784668 + ], + [ + "9,000", + -12.63347625732422 + ], + [ + "▁compulsory", + -12.63353157043457 + ], + [ + "Clear", + -12.633609771728516 + ], + [ + "▁Practices", + -12.63372039794922 + ], + [ + "▁Pvt", + -12.633740425109863 + ], + [ + "ace", + -12.633828163146973 + ], + [ + "▁consoles", + -12.633840560913086 + ], + [ + "should", + -12.633907318115234 + ], + [ + "kal", + -12.634078025817873 + ], + [ + "▁inmates", + -12.634249687194824 + ], + [ + "▁spoil", + -12.634369850158691 + ], + [ + "▁prevalence", + -12.63437271118164 + ], + [ + "▁Monkey", + -12.634384155273438 + ], + [ + "▁detrimental", + -12.63446044921875 + ], + [ + "▁booths", + -12.634464263916016 + ], + [ + "▁Catalog", + -12.63460636138916 + ], + [ + "▁1-3", + -12.634672164916992 + ], + [ + "▁Petroleum", + -12.63467788696289 + ], + [ + "▁plywood", + -12.634693145751951 + ], + [ + "factor", + -12.634709358215332 + ], + [ + "▁secretly", + -12.634899139404297 + ], + [ + "▁DAY", + -12.634928703308104 + ], + [ + "TC", + -12.635038375854492 + ], + [ + "▁Phi", + -12.635225296020508 + ], + [ + "▁nonsense", + -12.63539695739746 + ], + [ + "flight", + -12.635536193847656 + ], + [ + "uni", + -12.635600090026855 + ], + [ + "▁tremendously", + -12.635664939880373 + ], + [ + "▁Submit", + -12.635702133178713 + ], + [ + "▁dimensional", + -12.635722160339355 + ], + [ + "▁Suitable", + -12.635819435119627 + ], + [ + "▁Sil", + -12.63587760925293 + ], + [ + "founded", + -12.635905265808104 + ], + [ + "▁curl", + -12.63595962524414 + ], + [ + "▁clan", + -12.635969161987305 + ], + [ + "-04", + -12.636102676391602 + ], + [ + "▁SITE", + -12.636173248291016 + ], + [ + "Eye", + -12.63621997833252 + ], + [ + "▁Slots", + -12.636226654052734 + ], + [ + "Ke", + -12.63625717163086 + ], + [ + "▁flown", + -12.636301040649414 + ], + [ + "▁3.1", + -12.636311531066896 + ], + [ + "▁vaccination", + -12.636474609375 + ], + [ + "▁unlawful", + -12.636539459228516 + ], + [ + "▁Were", + -12.636621475219728 + ], + [ + "▁Citizen", + -12.636639595031738 + ], + [ + "▁pumped", + -12.636693954467772 + ], + [ + "▁disadvantages", + -12.63670539855957 + ], + [ + "Hopefully", + -12.636741638183594 + ], + [ + "▁dependency", + -12.636795043945312 + ], + [ + "▁malls", + -12.636818885803224 + ], + [ + "▁Georgetown", + -12.637024879455566 + ], + [ + "▁$75", + -12.637090682983398 + ], + [ + "leaf", + -12.637126922607422 + ], + [ + "▁authorised", + -12.63714599609375 + ], + [ + "III", + -12.637166976928713 + ], + [ + "Ch", + -12.63735008239746 + ], + [ + "los", + -12.637431144714355 + ], + [ + "▁Krishna", + -12.637548446655272 + ], + [ + "▁frying", + -12.637665748596191 + ], + [ + "CN", + -12.637710571289062 + ], + [ + "▁Maggie", + -12.63779640197754 + ], + [ + "Request", + -12.637819290161133 + ], + [ + "▁coincidence", + -12.637845993041992 + ], + [ + "▁motif", + -12.63794994354248 + ], + [ + "▁fleece", + -12.637954711914062 + ], + [ + "▁tying", + -12.637964248657228 + ], + [ + "▁payout", + -12.638117790222168 + ], + [ + "Customer", + -12.638240814208984 + ], + [ + "▁flee", + -12.638250350952148 + ], + [ + "▁Saturn", + -12.638283729553224 + ], + [ + "▁Mit", + -12.638310432434082 + ], + [ + "▁6,000", + -12.63832664489746 + ], + [ + "une", + -12.63844871520996 + ], + [ + "▁conditioned", + -12.638534545898438 + ], + [ + "▁muffin", + -12.638554573059082 + ], + [ + "direct", + -12.638751983642578 + ], + [ + "▁3.2", + -12.638809204101562 + ], + [ + "would", + -12.638894081115724 + ], + [ + "▁SG", + -12.6389741897583 + ], + [ + "▁yourselves", + -12.639152526855469 + ], + [ + "▁Mah", + -12.639179229736328 + ], + [ + "▁99%", + -12.63924503326416 + ], + [ + "▁starred", + -12.639280319213867 + ], + [ + "▁puck", + -12.63933277130127 + ], + [ + "▁olives", + -12.639480590820312 + ], + [ + "corn", + -12.63952350616455 + ], + [ + "▁Zi", + -12.63969898223877 + ], + [ + "▁Shawn", + -12.639886856079102 + ], + [ + "▁immersion", + -12.639946937561035 + ], + [ + "▁canine", + -12.639978408813477 + ], + [ + "▁CDC", + -12.640018463134766 + ], + [ + "▁Mustang", + -12.640045166015623 + ], + [ + "▁reinforcement", + -12.640057563781738 + ], + [ + "▁12:00", + -12.64012336730957 + ], + [ + "▁Vector", + -12.640298843383787 + ], + [ + "bri", + -12.64035701751709 + ], + [ + "▁noticing", + -12.640446662902832 + ], + [ + "▁anesthesia", + -12.640542030334473 + ], + [ + "Lots", + -12.64061450958252 + ], + [ + "DT", + -12.640618324279783 + ], + [ + "▁whistle", + -12.640663146972656 + ], + [ + "▁Animals", + -12.640721321105955 + ], + [ + "inn", + -12.640951156616213 + ], + [ + "▁redundant", + -12.641003608703612 + ], + [ + "▁112", + -12.6410493850708 + ], + [ + "▁Americas", + -12.641095161437988 + ], + [ + "▁amplifier", + -12.64134120941162 + ], + [ + "▁FT", + -12.641343116760254 + ], + [ + "▁CG", + -12.641395568847656 + ], + [ + "therapy", + -12.641407012939451 + ], + [ + "▁на", + -12.641407012939451 + ], + [ + "▁daring", + -12.64141845703125 + ], + [ + "▁Burn", + -12.641424179077148 + ], + [ + "▁Consultants", + -12.641438484191896 + ], + [ + "▁rebound", + -12.641555786132812 + ], + [ + "▁mash", + -12.641573905944824 + ], + [ + "▁expenditures", + -12.6419038772583 + ], + [ + "▁ol", + -12.641925811767578 + ], + [ + "pit", + -12.641972541809082 + ], + [ + "▁Warehouse", + -12.642034530639648 + ], + [ + "▁YES", + -12.642407417297363 + ], + [ + "▁Smoke", + -12.642436981201172 + ], + [ + "▁Bloom", + -12.642499923706056 + ], + [ + "▁overcoming", + -12.642515182495115 + ], + [ + "ular", + -12.642560958862305 + ], + [ + "▁ND", + -12.642613410949709 + ], + [ + "▁handset", + -12.642630577087402 + ], + [ + "▁dominance", + -12.643121719360352 + ], + [ + "▁Libraries", + -12.643125534057615 + ], + [ + "▁ni", + -12.643298149108888 + ], + [ + "TX", + -12.64339828491211 + ], + [ + "nya", + -12.643425941467283 + ], + [ + "▁endeavour", + -12.643589973449709 + ], + [ + "▁13-", + -12.643610000610352 + ], + [ + "umi", + -12.643678665161133 + ], + [ + "▁Robot", + -12.643706321716309 + ], + [ + "▁fenced", + -12.643871307373049 + ], + [ + "▁Known", + -12.643898963928224 + ], + [ + "▁absurd", + -12.643903732299805 + ], + [ + "▁cal", + -12.643924713134766 + ], + [ + "▁trustee", + -12.643933296203612 + ], + [ + "▁Script", + -12.64414405822754 + ], + [ + "▁tunnels", + -12.644159317016602 + ], + [ + "Cha", + -12.644210815429688 + ], + [ + "▁Peel", + -12.644288063049316 + ], + [ + "▁mower", + -12.644393920898438 + ], + [ + "▁4-6", + -12.644404411315918 + ], + [ + "stan", + -12.64507007598877 + ], + [ + "▁GIS", + -12.64516258239746 + ], + [ + "▁drank", + -12.645273208618164 + ], + [ + "▁Optimization", + -12.645277976989746 + ], + [ + "Fortunately", + -12.645405769348145 + ], + [ + "▁snakes", + -12.645484924316406 + ], + [ + "sponsored", + -12.645516395568848 + ], + [ + "▁104", + -12.645551681518556 + ], + [ + "▁interrupted", + -12.645569801330566 + ], + [ + "Tap", + -12.645703315734863 + ], + [ + "▁Georgian", + -12.64575481414795 + ], + [ + "▁flakes", + -12.645986557006836 + ], + [ + "▁visions", + -12.646100044250488 + ], + [ + "▁raspberry", + -12.646129608154297 + ], + [ + "▁PLEASE", + -12.646286964416504 + ], + [ + "▁clarification", + -12.646306037902832 + ], + [ + "▁Roosevelt", + -12.646367073059082 + ], + [ + "8%", + -12.646377563476562 + ], + [ + "gli", + -12.646628379821776 + ], + [ + "▁Walsh", + -12.646668434143066 + ], + [ + "▁Parkway", + -12.646679878234863 + ], + [ + "▁centralized", + -12.64681911468506 + ], + [ + "▁dismiss", + -12.646820068359377 + ], + [ + "▁Decoration", + -12.646844863891602 + ], + [ + "▁toured", + -12.64702320098877 + ], + [ + "▁Holder", + -12.64706325531006 + ], + [ + "▁Spiritual", + -12.64709186553955 + ], + [ + "▁Demo", + -12.647167205810549 + ], + [ + "▁watercolor", + -12.647330284118652 + ], + [ + "▁warfare", + -12.64739227294922 + ], + [ + "▁suspend", + -12.647430419921877 + ], + [ + "Market", + -12.647480010986328 + ], + [ + "▁Santiago", + -12.647591590881348 + ], + [ + "9)", + -12.64763641357422 + ], + [ + "▁Kel", + -12.64768886566162 + ], + [ + "▁Mystery", + -12.647744178771973 + ], + [ + "▁supermarkets", + -12.647882461547852 + ], + [ + "▁Wo", + -12.64791202545166 + ], + [ + "nate", + -12.647923469543455 + ], + [ + "▁survivor", + -12.648115158081056 + ], + [ + "▁painters", + -12.648274421691896 + ], + [ + "▁Mart", + -12.64829158782959 + ], + [ + "Around", + -12.648515701293944 + ], + [ + "yet", + -12.648564338684082 + ], + [ + "▁kings", + -12.64857578277588 + ], + [ + "▁disco", + -12.648577690124512 + ], + [ + "▁Naples", + -12.648712158203123 + ], + [ + "▁WWII", + -12.648785591125488 + ], + [ + "▁Portfolio", + -12.648887634277344 + ], + [ + "▁Dust", + -12.648943901062012 + ], + [ + "▁190", + -12.648951530456545 + ], + [ + "▁GL", + -12.64895725250244 + ], + [ + "ager", + -12.649155616760254 + ], + [ + "Base", + -12.649276733398438 + ], + [ + "▁melodies", + -12.649500846862791 + ], + [ + "▁grassroots", + -12.649505615234377 + ], + [ + "▁Tibetan", + -12.649614334106444 + ], + [ + "▁prepaid", + -12.649638175964355 + ], + [ + "▁Syn", + -12.649654388427734 + ], + [ + "▁padding", + -12.649724006652832 + ], + [ + "▁firearm", + -12.649767875671388 + ], + [ + "▁Billboard", + -12.64987850189209 + ], + [ + "▁comedian", + -12.64995002746582 + ], + [ + "▁turtle", + -12.650022506713867 + ], + [ + "▁Gem", + -12.65009593963623 + ], + [ + "▁rhythms", + -12.650096893310549 + ], + [ + "Internet", + -12.650201797485352 + ], + [ + "▁1974", + -12.650324821472168 + ], + [ + "▁Aberdeen", + -12.650434494018556 + ], + [ + "▁Covers", + -12.650522232055664 + ], + [ + "▁Yan", + -12.650579452514648 + ], + [ + "▁Clip", + -12.65059757232666 + ], + [ + "▁Gorgeous", + -12.65065574645996 + ], + [ + "▁stacks", + -12.65072250366211 + ], + [ + "▁Fl", + -12.650800704956056 + ], + [ + "▁smiled", + -12.65084457397461 + ], + [ + "▁fragments", + -12.650863647460938 + ], + [ + "▁reviewers", + -12.650978088378906 + ], + [ + "hours", + -12.65138339996338 + ], + [ + "▁il", + -12.65141773223877 + ], + [ + "Generally", + -12.651453971862791 + ], + [ + "▁Offices", + -12.651460647583008 + ], + [ + "▁selfie", + -12.651484489440918 + ], + [ + "network", + -12.65157985687256 + ], + [ + "▁artworks", + -12.65158462524414 + ], + [ + "▁saint", + -12.651633262634276 + ], + [ + "▁envy", + -12.651688575744627 + ], + [ + "▁fares", + -12.651906967163086 + ], + [ + "Speed", + -12.6519136428833 + ], + [ + "management", + -12.651962280273438 + ], + [ + "▁polishing", + -12.652010917663574 + ], + [ + "▁landfill", + -12.652297019958496 + ], + [ + "▁Eva", + -12.652342796325684 + ], + [ + "int", + -12.652451515197754 + ], + [ + "growth", + -12.652544021606444 + ], + [ + "Former", + -12.65263557434082 + ], + [ + "▁trimmed", + -12.652670860290527 + ], + [ + "▁braking", + -12.65272045135498 + ], + [ + "▁captive", + -12.652737617492676 + ], + [ + "▁treaty", + -12.65274143218994 + ], + [ + "clear", + -12.652770042419434 + ], + [ + "▁Phillip", + -12.652775764465332 + ], + [ + "gio", + -12.652791976928713 + ], + [ + "▁busiest", + -12.652822494506836 + ], + [ + "grass", + -12.65293312072754 + ], + [ + "Moving", + -12.653066635131836 + ], + [ + "▁soaring", + -12.653128623962402 + ], + [ + "▁audition", + -12.653226852416992 + ], + [ + "tl", + -12.653301239013672 + ], + [ + "▁pencils", + -12.65336799621582 + ], + [ + "▁LEDs", + -12.653450965881348 + ], + [ + "coin", + -12.653485298156738 + ], + [ + "▁Invest", + -12.653550148010254 + ], + [ + "Im", + -12.65366268157959 + ], + [ + "▁Backup", + -12.653718948364258 + ], + [ + "▁2500", + -12.653745651245115 + ], + [ + "▁Established", + -12.65376091003418 + ], + [ + "▁reliably", + -12.65377712249756 + ], + [ + "▁separator", + -12.65377712249756 + ], + [ + "▁regression", + -12.653806686401367 + ], + [ + "▁villain", + -12.653846740722656 + ], + [ + "Mon", + -12.653944969177246 + ], + [ + "▁Tang", + -12.653961181640623 + ], + [ + "▁bargains", + -12.654043197631836 + ], + [ + "▁Sharma", + -12.654141426086426 + ], + [ + "▁Elements", + -12.65431308746338 + ], + [ + "▁KA", + -12.654492378234863 + ], + [ + "▁Auction", + -12.654706001281738 + ], + [ + "▁stigma", + -12.654850959777832 + ], + [ + "▁cuff", + -12.655001640319824 + ], + [ + "▁$18", + -12.655007362365724 + ], + [ + "▁telecom", + -12.655014038085938 + ], + [ + "▁OP", + -12.655080795288086 + ], + [ + "▁Roma", + -12.65510368347168 + ], + [ + "Community", + -12.655256271362305 + ], + [ + "▁Congo", + -12.655282020568848 + ], + [ + "Ja", + -12.65528678894043 + ], + [ + "▁Baldwin", + -12.655322074890137 + ], + [ + "▁balm", + -12.65537166595459 + ], + [ + "▁103", + -12.65544891357422 + ], + [ + "true", + -12.65546417236328 + ], + [ + "▁Miguel", + -12.655653953552246 + ], + [ + "▁Harold", + -12.65566349029541 + ], + [ + "▁cite", + -12.655694961547852 + ], + [ + "▁programmer", + -12.65570831298828 + ], + [ + "▁Saturdays", + -12.655729293823242 + ], + [ + "▁proposes", + -12.655871391296388 + ], + [ + "keeping", + -12.655908584594728 + ], + [ + "▁refusal", + -12.655921936035156 + ], + [ + "▁deeds", + -12.655962944030762 + ], + [ + "▁smash", + -12.656055450439451 + ], + [ + "▁NEVER", + -12.656118392944336 + ], + [ + "final", + -12.65616226196289 + ], + [ + "▁Types", + -12.656234741210938 + ], + [ + "▁Elk", + -12.656268119812012 + ], + [ + "Together", + -12.656353950500488 + ], + [ + "ONE", + -12.656495094299316 + ], + [ + "▁Returns", + -12.656672477722168 + ], + [ + "2.0", + -12.65700626373291 + ], + [ + "▁Shows", + -12.657039642333984 + ], + [ + "▁Fountain", + -12.657061576843262 + ], + [ + "▁roadside", + -12.65706729888916 + ], + [ + "▁wiped", + -12.657090187072754 + ], + [ + "▁progressing", + -12.657098770141602 + ], + [ + "93", + -12.657147407531738 + ], + [ + "▁experimentation", + -12.657224655151367 + ], + [ + "▁mistaken", + -12.65726089477539 + ], + [ + "▁Carpenter", + -12.657279014587402 + ], + [ + "▁null", + -12.657312393188477 + ], + [ + "▁Rim", + -12.657368659973145 + ], + [ + "▁healed", + -12.657371520996094 + ], + [ + "SON", + -12.657413482666016 + ], + [ + "client", + -12.657563209533691 + ], + [ + "Unless", + -12.657632827758787 + ], + [ + "▁cues", + -12.657782554626465 + ], + [ + "▁toner", + -12.658020973205566 + ], + [ + "▁slippery", + -12.658126831054688 + ], + [ + "▁Rouge", + -12.658127784729004 + ], + [ + "▁lowes", + -12.658232688903809 + ], + [ + "NR", + -12.658260345458984 + ], + [ + "▁Ear", + -12.658303260803224 + ], + [ + "▁aliens", + -12.658401489257812 + ], + [ + "▁Darren", + -12.658449172973633 + ], + [ + "▁VW", + -12.658488273620604 + ], + [ + "▁Cass", + -12.658604621887209 + ], + [ + "kh", + -12.65866470336914 + ], + [ + "Se", + -12.658878326416016 + ], + [ + "tum", + -12.658890724182127 + ], + [ + "▁Across", + -12.658961296081545 + ], + [ + "▁Rom", + -12.658963203430176 + ], + [ + "▁cumulative", + -12.659122467041016 + ], + [ + "▁Shape", + -12.659138679504396 + ], + [ + "▁flotation", + -12.659149169921877 + ], + [ + "▁Nikon", + -12.65920639038086 + ], + [ + "mil", + -12.659294128417969 + ], + [ + "hack", + -12.65934944152832 + ], + [ + "▁appealed", + -12.659507751464844 + ], + [ + "▁lava", + -12.659693717956545 + ], + [ + "▁ATV", + -12.660147666931152 + ], + [ + "▁injections", + -12.660283088684082 + ], + [ + "▁Triangle", + -12.660296440124512 + ], + [ + "▁tracker", + -12.6603364944458 + ], + [ + "ill", + -12.660375595092772 + ], + [ + "ao", + -12.660418510437012 + ], + [ + "▁Calvin", + -12.660568237304688 + ], + [ + "/19", + -12.66059684753418 + ], + [ + "Wall", + -12.660602569580078 + ], + [ + "▁markings", + -12.660896301269531 + ], + [ + "▁alley", + -12.66090965270996 + ], + [ + "MAX", + -12.660966873168944 + ], + [ + "▁signaling", + -12.661038398742676 + ], + [ + "365", + -12.661090850830078 + ], + [ + "▁trusts", + -12.661100387573242 + ], + [ + "▁Pod", + -12.661249160766602 + ], + [ + "▁brightly", + -12.661280632019045 + ], + [ + "position", + -12.661309242248535 + ], + [ + "worker", + -12.661324501037598 + ], + [ + "▁RNA", + -12.661370277404783 + ], + [ + "▁Hardwood", + -12.661502838134766 + ], + [ + "▁Lawyer", + -12.661544799804688 + ], + [ + "done", + -12.661636352539062 + ], + [ + "▁1997,", + -12.661638259887695 + ], + [ + "▁trimming", + -12.661657333374023 + ], + [ + "ivity", + -12.661699295043944 + ], + [ + "▁specialization", + -12.661792755126951 + ], + [ + "▁satellites", + -12.661901473999023 + ], + [ + "▁consulted", + -12.66203784942627 + ], + [ + "create", + -12.662120819091797 + ], + [ + "▁Pumpkin", + -12.66230297088623 + ], + [ + "tics", + -12.662361145019531 + ], + [ + "▁stimulus", + -12.662392616271973 + ], + [ + "▁Cru", + -12.662470817565918 + ], + [ + "▁Structure", + -12.662470817565918 + ], + [ + "▁distract", + -12.662723541259766 + ], + [ + "▁soups", + -12.662809371948242 + ], + [ + "▁breasts", + -12.66283130645752 + ], + [ + "▁sharply", + -12.662878036499023 + ], + [ + "▁Vacation", + -12.662992477416992 + ], + [ + "▁Networking", + -12.663086891174316 + ], + [ + "each", + -12.663199424743652 + ], + [ + "▁wreck", + -12.663257598876951 + ], + [ + "▁influencing", + -12.663464546203612 + ], + [ + "▁trajectory", + -12.663464546203612 + ], + [ + "▁kettle", + -12.663530349731444 + ], + [ + "px", + -12.663774490356444 + ], + [ + "MF", + -12.663800239562988 + ], + [ + "▁contemplate", + -12.663951873779297 + ], + [ + "▁swipe", + -12.664045333862305 + ], + [ + "▁seize", + -12.664068222045898 + ], + [ + "▁redirect", + -12.66409397125244 + ], + [ + "▁Fold", + -12.664138793945312 + ], + [ + "Baby", + -12.664166450500488 + ], + [ + "▁210", + -12.664176940917969 + ], + [ + "▁gestures", + -12.66439723968506 + ], + [ + "▁engages", + -12.664401054382324 + ], + [ + "RL", + -12.664426803588867 + ], + [ + "▁Raspberry", + -12.66455078125 + ], + [ + "Series", + -12.664593696594238 + ], + [ + "▁ageing", + -12.664660453796388 + ], + [ + "▁Stamp", + -12.664722442626951 + ], + [ + "gg", + -12.664852142333984 + ], + [ + "---", + -12.664867401123049 + ], + [ + "▁forecasting", + -12.664918899536133 + ], + [ + "▁drizzle", + -12.665030479431152 + ], + [ + "▁niece", + -12.665083885192873 + ], + [ + "▁poets", + -12.665130615234377 + ], + [ + "▁ailments", + -12.665257453918455 + ], + [ + "▁authored", + -12.665390014648438 + ], + [ + "▁Approach", + -12.665406227111816 + ], + [ + "▁deductions", + -12.665411949157717 + ], + [ + "▁Cord", + -12.665468215942385 + ], + [ + "guide", + -12.66555404663086 + ], + [ + "ride", + -12.665661811828612 + ], + [ + "▁dragging", + -12.665663719177246 + ], + [ + "▁prejudice", + -12.66605281829834 + ], + [ + "▁indications", + -12.6660737991333 + ], + [ + "pes", + -12.666077613830566 + ], + [ + "▁Quad", + -12.66610622406006 + ], + [ + "▁CU", + -12.666157722473145 + ], + [ + "▁standpoint", + -12.666356086730955 + ], + [ + "▁PGA", + -12.666396141052246 + ], + [ + "▁berry", + -12.666421890258787 + ], + [ + "▁Colonial", + -12.666605949401855 + ], + [ + "▁Gore", + -12.666666984558104 + ], + [ + "▁evacuation", + -12.666714668273926 + ], + [ + "▁forbidden", + -12.666725158691406 + ], + [ + "GR", + -12.666733741760254 + ], + [ + "▁Kuwait", + -12.666892051696776 + ], + [ + "giving", + -12.666903495788574 + ], + [ + "▁Rum", + -12.667397499084473 + ], + [ + "their", + -12.667428970336914 + ], + [ + "▁hinder", + -12.667509078979492 + ], + [ + "GHz", + -12.667552947998049 + ], + [ + "▁sectional", + -12.667591094970703 + ], + [ + "▁contractual", + -12.667619705200195 + ], + [ + "those", + -12.667622566223145 + ], + [ + "$1", + -12.66763973236084 + ], + [ + "dll", + -12.66781520843506 + ], + [ + "▁reproduced", + -12.667824745178224 + ], + [ + "▁moss", + -12.667856216430664 + ], + [ + "▁Medi", + -12.667881965637209 + ], + [ + "▁enforced", + -12.667930603027344 + ], + [ + "▁bolster", + -12.667990684509276 + ], + [ + "access", + -12.668031692504885 + ], + [ + "▁brightest", + -12.66805934906006 + ], + [ + "▁Goa", + -12.668087005615234 + ], + [ + "Hard", + -12.66810703277588 + ], + [ + "igh", + -12.668184280395508 + ], + [ + "▁prevailing", + -12.668214797973633 + ], + [ + "▁beginnings", + -12.668278694152832 + ], + [ + "▁Assist", + -12.668328285217283 + ], + [ + "▁Grinder", + -12.66859531402588 + ], + [ + "▁cured", + -12.668692588806152 + ], + [ + "▁Cas", + -12.668915748596191 + ], + [ + "▁Belize", + -12.668943405151367 + ], + [ + "▁fungus", + -12.669084548950195 + ], + [ + "▁expressive", + -12.66923999786377 + ], + [ + "▁neurological", + -12.669357299804688 + ], + [ + "members", + -12.669401168823242 + ], + [ + "Natural", + -12.66940212249756 + ], + [ + "180", + -12.669415473937988 + ], + [ + "ingham", + -12.66946506500244 + ], + [ + "▁Guatemala", + -12.669580459594728 + ], + [ + "▁1995,", + -12.669685363769531 + ], + [ + "VE", + -12.669692993164062 + ], + [ + "KE", + -12.669912338256836 + ], + [ + "▁Erik", + -12.669940948486328 + ], + [ + "▁ammunition", + -12.670005798339844 + ], + [ + "wow", + -12.670069694519045 + ], + [ + "▁#5", + -12.67014217376709 + ], + [ + "▁ACC", + -12.670180320739746 + ], + [ + "vie", + -12.670339584350586 + ], + [ + "▁fe", + -12.670498847961426 + ], + [ + "▁Combined", + -12.670547485351562 + ], + [ + "▁Vin", + -12.670554161071776 + ], + [ + "▁Recommended", + -12.670632362365724 + ], + [ + "▁Coca", + -12.670650482177734 + ], + [ + "▁Pitt", + -12.6707763671875 + ], + [ + "▁arrows", + -12.670852661132812 + ], + [ + "▁QC", + -12.670897483825684 + ], + [ + "refundable", + -12.67090892791748 + ], + [ + "▁bro", + -12.670950889587402 + ], + [ + "▁nasal", + -12.670987129211426 + ], + [ + "cel", + -12.671051025390623 + ], + [ + "▁1300", + -12.671091079711914 + ], + [ + "▁Superman", + -12.671196937561035 + ], + [ + "▁Trustees", + -12.671797752380373 + ], + [ + "▁overload", + -12.67181396484375 + ], + [ + "▁dictate", + -12.671998977661133 + ], + [ + "▁refining", + -12.67215633392334 + ], + [ + "550", + -12.672210693359377 + ], + [ + "▁prosperous", + -12.672211647033691 + ], + [ + "▁Paula", + -12.672239303588867 + ], + [ + "cheap", + -12.672285079956056 + ], + [ + "▁GUI", + -12.672285079956056 + ], + [ + "-80", + -12.672337532043455 + ], + [ + "-27", + -12.672365188598633 + ], + [ + "▁Junk", + -12.672452926635742 + ], + [ + "Model", + -12.672468185424805 + ], + [ + "▁syntax", + -12.672476768493652 + ], + [ + "existing", + -12.67251968383789 + ], + [ + "▁pleasantly", + -12.672540664672852 + ], + [ + "credit", + -12.67258071899414 + ], + [ + "-31", + -12.672614097595217 + ], + [ + "▁Luck", + -12.672774314880373 + ], + [ + "▁battlefield", + -12.672795295715332 + ], + [ + "▁longing", + -12.672810554504396 + ], + [ + "▁tweak", + -12.67281723022461 + ], + [ + "▁cape", + -12.672927856445312 + ], + [ + "▁Crossing", + -12.672995567321776 + ], + [ + "▁Crisis", + -12.673062324523926 + ], + [ + "▁HB", + -12.673090934753418 + ], + [ + "▁wig", + -12.673187255859377 + ], + [ + "▁Alpine", + -12.673202514648438 + ], + [ + "▁backsplash", + -12.673234939575195 + ], + [ + "▁hesitation", + -12.673253059387209 + ], + [ + "▁Darwin", + -12.67327117919922 + ], + [ + "rian", + -12.673358917236328 + ], + [ + "▁Hol", + -12.67339324951172 + ], + [ + "▁sovereign", + -12.673397064208984 + ], + [ + "▁avenues", + -12.673535346984863 + ], + [ + "▁shoreline", + -12.673612594604492 + ], + [ + "0.0", + -12.673628807067873 + ], + [ + "Had", + -12.673660278320312 + ], + [ + "▁Lindsay", + -12.67374038696289 + ], + [ + "▁Charge", + -12.673741340637209 + ], + [ + "▁acquainted", + -12.673919677734377 + ], + [ + "/12", + -12.674006462097168 + ], + [ + "loop", + -12.674150466918944 + ], + [ + "▁superstar", + -12.674156188964844 + ], + [ + "▁skate", + -12.674158096313477 + ], + [ + "▁botanical", + -12.674181938171388 + ], + [ + "▁Shake", + -12.67421817779541 + ], + [ + "▁Katherine", + -12.674257278442385 + ], + [ + "▁Malcolm", + -12.674339294433594 + ], + [ + "▁mundane", + -12.67434024810791 + ], + [ + "▁competencies", + -12.67434787750244 + ], + [ + "▁referee", + -12.674365043640137 + ], + [ + "▁Nile", + -12.674434661865234 + ], + [ + "▁detained", + -12.674463272094728 + ], + [ + "▁Pulse", + -12.674490928649902 + ], + [ + "▁Vote", + -12.674681663513184 + ], + [ + "bl", + -12.674735069274902 + ], + [ + "▁quilts", + -12.67480754852295 + ], + [ + "▁1996,", + -12.674969673156738 + ], + [ + "Six", + -12.675249099731444 + ], + [ + "▁ideology", + -12.675435066223145 + ], + [ + "▁lib", + -12.67552375793457 + ], + [ + "▁Anglo", + -12.675620079040527 + ], + [ + "▁$50,000", + -12.675664901733398 + ], + [ + "▁widest", + -12.675716400146484 + ], + [ + "▁Bengal", + -12.675874710083008 + ], + [ + "▁Crawford", + -12.675933837890623 + ], + [ + "▁Mol", + -12.675933837890623 + ], + [ + "PG", + -12.6762056350708 + ], + [ + "▁campsite", + -12.676212310791016 + ], + [ + "▁chilli", + -12.676263809204102 + ], + [ + "haus", + -12.676344871520996 + ], + [ + "▁Frontier", + -12.676511764526367 + ], + [ + "▁8.1", + -12.67656707763672 + ], + [ + "misunderstanding", + -12.676594734191896 + ], + [ + "▁trait", + -12.67661952972412 + ], + [ + "▁absorbing", + -12.676629066467283 + ], + [ + "ener", + -12.67670440673828 + ], + [ + "▁transitional", + -12.67681884765625 + ], + [ + "▁Hospitality", + -12.67686653137207 + ], + [ + "pie", + -12.67693042755127 + ], + [ + "▁FS", + -12.677083015441896 + ], + [ + "▁timetable", + -12.677297592163086 + ], + [ + "▁brochures", + -12.677356719970703 + ], + [ + "▁Function", + -12.677447319030762 + ], + [ + "▁commemorate", + -12.677536010742188 + ], + [ + "▁HUGE", + -12.67762851715088 + ], + [ + "tty", + -12.677647590637209 + ], + [ + "▁warriors", + -12.677689552307127 + ], + [ + "Founded", + -12.67771816253662 + ], + [ + "location", + -12.677721977233888 + ], + [ + "▁selfish", + -12.677722930908203 + ], + [ + "▁Rare", + -12.67783260345459 + ], + [ + "▁li", + -12.677833557128906 + ], + [ + "▁hypertension", + -12.677855491638184 + ], + [ + "▁Hamburg", + -12.677882194519045 + ], + [ + "▁advantageous", + -12.677937507629396 + ], + [ + "▁cheeks", + -12.677964210510254 + ], + [ + "▁songwriter", + -12.678018569946287 + ], + [ + "▁discard", + -12.6780366897583 + ], + [ + "main", + -12.678094863891602 + ], + [ + "▁bundles", + -12.678115844726562 + ], + [ + "▁Mesh", + -12.67814826965332 + ], + [ + "▁NGOs", + -12.678155899047852 + ], + [ + "▁Nail", + -12.678236961364746 + ], + [ + "▁Zu", + -12.678240776062012 + ], + [ + "Main", + -12.67849349975586 + ], + [ + "▁leftovers", + -12.678608894348145 + ], + [ + "▁Restaurants", + -12.67862606048584 + ], + [ + "▁spokeswoman", + -12.678723335266112 + ], + [ + "▁Vimeo", + -12.678725242614746 + ], + [ + "▁elusive", + -12.678726196289062 + ], + [ + "▁colonies", + -12.67876148223877 + ], + [ + "Down", + -12.678792953491213 + ], + [ + "▁Parties", + -12.678832054138184 + ], + [ + "fresh", + -12.678913116455078 + ], + [ + "did", + -12.678927421569824 + ], + [ + "og", + -12.679001808166504 + ], + [ + "▁Smartphone", + -12.679116249084473 + ], + [ + "▁cupcake", + -12.679272651672363 + ], + [ + "▁Lam", + -12.679304122924805 + ], + [ + "emi", + -12.679388999938965 + ], + [ + "ean", + -12.679407119750977 + ], + [ + "▁Greene", + -12.679439544677734 + ], + [ + "▁eagerly", + -12.679569244384766 + ], + [ + "▁oasis", + -12.67982292175293 + ], + [ + "▁fi", + -12.679880142211914 + ], + [ + "wy", + -12.679893493652344 + ], + [ + "▁Changes", + -12.679938316345217 + ], + [ + "Jim", + -12.679944038391112 + ], + [ + "▁interpretations", + -12.6800537109375 + ], + [ + "▁thighs", + -12.68009090423584 + ], + [ + "ates", + -12.680197715759276 + ], + [ + "▁preaching", + -12.680357933044434 + ], + [ + "▁ordinance", + -12.680365562438965 + ], + [ + "Du", + -12.680386543273926 + ], + [ + "RES", + -12.680418968200684 + ], + [ + "▁Swing", + -12.680493354797363 + ], + [ + "▁chewing", + -12.680509567260742 + ], + [ + "▁paced", + -12.68053150177002 + ], + [ + "▁swallow", + -12.680549621582031 + ], + [ + "/2018", + -12.680575370788574 + ], + [ + "▁sporty", + -12.680642127990724 + ], + [ + "▁motivational", + -12.68064785003662 + ], + [ + "▁fourteen", + -12.68067455291748 + ], + [ + "▁truths", + -12.680691719055176 + ], + [ + "▁relentless", + -12.680727005004885 + ], + [ + "▁Broken", + -12.680749893188477 + ], + [ + "▁neighbour", + -12.680791854858398 + ], + [ + "▁shave", + -12.68081760406494 + ], + [ + "know", + -12.680871963500977 + ], + [ + "Absolutely", + -12.680895805358888 + ], + [ + "▁das", + -12.680895805358888 + ], + [ + "▁hurdles", + -12.680902481079102 + ], + [ + "▁sacrificing", + -12.680922508239746 + ], + [ + "▁dentures", + -12.68093490600586 + ], + [ + "▁Imaging", + -12.680950164794922 + ], + [ + "Hu", + -12.68097686767578 + ], + [ + "days", + -12.680981636047363 + ], + [ + "▁upkeep", + -12.681025505065918 + ], + [ + "▁PCI", + -12.68110179901123 + ], + [ + "▁mob", + -12.681310653686523 + ], + [ + "ICE", + -12.681570053100586 + ], + [ + "▁Enterprises", + -12.68160629272461 + ], + [ + "▁Wings", + -12.681638717651367 + ], + [ + "tine", + -12.681682586669922 + ], + [ + "▁SPF", + -12.681700706481934 + ], + [ + "▁Crafts", + -12.681715965270996 + ], + [ + "sive", + -12.681755065917969 + ], + [ + "▁Ac", + -12.68177318572998 + ], + [ + "Money", + -12.681805610656738 + ], + [ + "▁competitiveness", + -12.681838989257812 + ], + [ + "▁cohesive", + -12.681962966918944 + ], + [ + "bur", + -12.681994438171388 + ], + [ + "▁monastery", + -12.682024002075195 + ], + [ + "▁augmented", + -12.682137489318848 + ], + [ + "▁compensated", + -12.682183265686035 + ], + [ + "▁favors", + -12.682316780090332 + ], + [ + "▁WR", + -12.682470321655272 + ], + [ + "▁colouring", + -12.68252944946289 + ], + [ + "▁discouraged", + -12.682575225830078 + ], + [ + "▁Kenny", + -12.682604789733888 + ], + [ + "tation", + -12.682735443115234 + ], + [ + "medical", + -12.682903289794922 + ], + [ + "(4)", + -12.682964324951172 + ], + [ + "▁veg", + -12.6829833984375 + ], + [ + "▁Classical", + -12.683176040649414 + ], + [ + "alo", + -12.683448791503906 + ], + [ + "▁NL", + -12.683500289916992 + ], + [ + "▁exploded", + -12.683538436889648 + ], + [ + "▁darling", + -12.68358039855957 + ], + [ + "▁Biblical", + -12.683656692504885 + ], + [ + "▁Allison", + -12.68368434906006 + ], + [ + "▁techno", + -12.68372631072998 + ], + [ + "▁traced", + -12.683746337890623 + ], + [ + "▁($", + -12.683915138244627 + ], + [ + "▁amber", + -12.683959007263184 + ], + [ + "lang", + -12.683971405029297 + ], + [ + "▁tuck", + -12.684004783630373 + ], + [ + "▁Repairs", + -12.684009552001951 + ], + [ + "▁inbound", + -12.684026718139648 + ], + [ + "▁creams", + -12.684042930603027 + ], + [ + "▁cork", + -12.68405055999756 + ], + [ + "▁Legion", + -12.684131622314451 + ], + [ + "▁regulating", + -12.684147834777832 + ], + [ + "find", + -12.68415355682373 + ], + [ + "▁institutes", + -12.684161186218262 + ], + [ + "▁insomnia", + -12.68423080444336 + ], + [ + "▁combustion", + -12.684234619140623 + ], + [ + "▁rubbing", + -12.684243202209473 + ], + [ + "▁Bid", + -12.684256553649902 + ], + [ + "▁misses", + -12.684266090393066 + ], + [ + "▁exemplary", + -12.684280395507812 + ], + [ + "Single", + -12.684350967407228 + ], + [ + "Originally", + -12.684473037719728 + ], + [ + "▁Role", + -12.684488296508787 + ], + [ + "CAD", + -12.684560775756836 + ], + [ + "▁divorced", + -12.684572219848633 + ], + [ + "▁carts", + -12.684610366821287 + ], + [ + "▁(11", + -12.684630393981934 + ], + [ + "▁motorists", + -12.684651374816896 + ], + [ + "para", + -12.684717178344728 + ], + [ + "▁MPs", + -12.684772491455078 + ], + [ + "▁mama", + -12.68481731414795 + ], + [ + "angle", + -12.684829711914062 + ], + [ + "▁sixteen", + -12.685004234313965 + ], + [ + "▁scaled", + -12.685338973999023 + ], + [ + "▁Nursery", + -12.685386657714844 + ], + [ + "▁Investigation", + -12.685495376586914 + ], + [ + "▁lecturer", + -12.685501098632812 + ], + [ + "heim", + -12.685724258422852 + ], + [ + "▁EB", + -12.685733795166016 + ], + [ + "▁delta", + -12.685735702514648 + ], + [ + "▁(30", + -12.685784339904783 + ], + [ + "▁CAT", + -12.685786247253418 + ], + [ + "1.2", + -12.686079025268556 + ], + [ + "▁SDK", + -12.686230659484863 + ], + [ + "hir", + -12.68632698059082 + ], + [ + "▁550", + -12.686338424682615 + ], + [ + "str", + -12.686365127563477 + ], + [ + "XL", + -12.686396598815918 + ], + [ + "Track", + -12.686466217041016 + ], + [ + "▁Cairo", + -12.686474800109863 + ], + [ + "Thomas", + -12.686519622802734 + ], + [ + "▁EM", + -12.686530113220217 + ], + [ + "plastic", + -12.686670303344728 + ], + [ + "gh", + -12.6867036819458 + ], + [ + "▁complexities", + -12.686712265014648 + ], + [ + "chemical", + -12.686874389648438 + ], + [ + "culture", + -12.686908721923828 + ], + [ + "▁pollen", + -12.686931610107422 + ], + [ + "▁Vertical", + -12.687002182006836 + ], + [ + "▁millennials", + -12.687295913696287 + ], + [ + "▁Salvador", + -12.687329292297363 + ], + [ + "▁McCarthy", + -12.687359809875488 + ], + [ + "gs", + -12.687400817871094 + ], + [ + "hart", + -12.68763542175293 + ], + [ + "ASH", + -12.687700271606444 + ], + [ + "▁Sensor", + -12.68770694732666 + ], + [ + "▁crate", + -12.687823295593262 + ], + [ + "next", + -12.68788242340088 + ], + [ + "▁Suffolk", + -12.687888145446776 + ], + [ + "nagar", + -12.688018798828123 + ], + [ + "▁vibes", + -12.688050270080566 + ], + [ + "aq", + -12.688384056091309 + ], + [ + "▁san", + -12.68842601776123 + ], + [ + "▁Recipe", + -12.68844509124756 + ], + [ + "Office", + -12.688509941101074 + ], + [ + "▁Ivy", + -12.688552856445312 + ], + [ + "▁refurbishment", + -12.688677787780762 + ], + [ + "▁eternity", + -12.688690185546877 + ], + [ + "-90", + -12.688907623291016 + ], + [ + "ister", + -12.68894100189209 + ], + [ + "▁jpg", + -12.689020156860352 + ], + [ + "▁Bryant", + -12.689058303833008 + ], + [ + "▁complexion", + -12.689064025878906 + ], + [ + "▁Lessons", + -12.689201354980469 + ], + [ + "▁Oriental", + -12.689234733581545 + ], + [ + "▁touchdowns", + -12.689249992370604 + ], + [ + "Nov", + -12.6892671585083 + ], + [ + "rag", + -12.689410209655762 + ], + [ + "▁councils", + -12.68972110748291 + ], + [ + "▁horrific", + -12.689767837524414 + ], + [ + "▁hospice", + -12.68976879119873 + ], + [ + "▁FULL", + -12.68985080718994 + ], + [ + "▁categorized", + -12.690065383911133 + ], + [ + "▁flawed", + -12.69012451171875 + ], + [ + "▁crackers", + -12.69025421142578 + ], + [ + "▁entails", + -12.69033432006836 + ], + [ + "▁Walnut", + -12.69034194946289 + ], + [ + "▁grandma", + -12.690510749816896 + ], + [ + "▁wary", + -12.69076919555664 + ], + [ + "▁Declaration", + -12.690878868103027 + ], + [ + "▁cockpit", + -12.690878868103027 + ], + [ + "priced", + -12.690882682800291 + ], + [ + "▁106", + -12.691021919250488 + ], + [ + "▁Detective", + -12.691044807434082 + ], + [ + "▁obey", + -12.69112777709961 + ], + [ + "▁pits", + -12.691147804260254 + ], + [ + "campus", + -12.691167831420898 + ], + [ + "▁upstream", + -12.691302299499512 + ], + [ + "▁hubs", + -12.69154167175293 + ], + [ + "▁resale", + -12.69161891937256 + ], + [ + "▁IM", + -12.691620826721191 + ], + [ + "▁Matters", + -12.691779136657717 + ], + [ + "MG", + -12.69180393218994 + ], + [ + "▁efficiencies", + -12.691990852355955 + ], + [ + "▁mozzarella", + -12.691990852355955 + ], + [ + "▁booming", + -12.69217586517334 + ], + [ + "▁sings", + -12.692184448242188 + ], + [ + "▁STAR", + -12.692193031311035 + ], + [ + "weather", + -12.692378044128418 + ], + [ + "Ab", + -12.692461013793944 + ], + [ + "▁publishes", + -12.69275188446045 + ], + [ + "▁dipped", + -12.692758560180664 + ], + [ + "LF", + -12.69283962249756 + ], + [ + "▁unsuccessful", + -12.692880630493164 + ], + [ + "▁ADHD", + -12.69290256500244 + ], + [ + "phase", + -12.692943572998049 + ], + [ + "▁Element", + -12.692978858947754 + ], + [ + "special", + -12.69304656982422 + ], + [ + "▁crank", + -12.693065643310549 + ], + [ + "▁motive", + -12.69313144683838 + ], + [ + "▁Avengers", + -12.693140983581545 + ], + [ + "▁suction", + -12.693167686462402 + ], + [ + "▁embarrassed", + -12.693191528320312 + ], + [ + "▁WOW", + -12.69322395324707 + ], + [ + "▁Categories", + -12.693241119384766 + ], + [ + "thing", + -12.69325065612793 + ], + [ + "▁establishes", + -12.693316459655762 + ], + [ + "aci", + -12.693340301513672 + ], + [ + "Creating", + -12.693350791931152 + ], + [ + "▁Connor", + -12.6934232711792 + ], + [ + "▁discarded", + -12.693507194519045 + ], + [ + "▁Jess", + -12.693592071533203 + ], + [ + "▁tackles", + -12.693641662597656 + ], + [ + "▁quickest", + -12.69367504119873 + ], + [ + "▁innate", + -12.693700790405272 + ], + [ + "▁Thrones", + -12.693731307983398 + ], + [ + "▁thirteen", + -12.693790435791016 + ], + [ + "▁peanuts", + -12.693801879882812 + ], + [ + "▁veggie", + -12.693818092346191 + ], + [ + "version", + -12.693922996520996 + ], + [ + "gus", + -12.694067001342772 + ], + [ + "spec", + -12.69412612915039 + ], + [ + "▁laps", + -12.69423007965088 + ], + [ + "▁AK", + -12.694262504577637 + ], + [ + "▁noises", + -12.694326400756836 + ], + [ + "▁inferior", + -12.694332122802734 + ], + [ + "pping", + -12.694339752197266 + ], + [ + "▁Requirements", + -12.694341659545898 + ], + [ + "▁blossom", + -12.69438362121582 + ], + [ + "▁guideline", + -12.694422721862791 + ], + [ + "▁vascular", + -12.69448947906494 + ], + [ + "▁Koh", + -12.694494247436523 + ], + [ + "Security", + -12.694501876831056 + ], + [ + "sometimes", + -12.694512367248535 + ], + [ + "▁Australians", + -12.694575309753418 + ], + [ + "▁Cats", + -12.69461441040039 + ], + [ + "ANT", + -12.694695472717283 + ], + [ + "cht", + -12.69483470916748 + ], + [ + "▁Occupational", + -12.695012092590332 + ], + [ + "▁turtles", + -12.695162773132324 + ], + [ + "▁secluded", + -12.695338249206545 + ], + [ + "▁fishermen", + -12.69534397125244 + ], + [ + "▁degradation", + -12.695362091064451 + ], + [ + "▁defenders", + -12.695469856262209 + ], + [ + "▁heroic", + -12.695672035217283 + ], + [ + "▁herd", + -12.695680618286133 + ], + [ + "▁flattering", + -12.695686340332031 + ], + [ + "▁breaker", + -12.695844650268556 + ], + [ + "▁capped", + -12.695856094360352 + ], + [ + "often", + -12.69591236114502 + ], + [ + "▁pirate", + -12.695942878723145 + ], + [ + "▁diaper", + -12.695958137512209 + ], + [ + "▁exhausting", + -12.695974349975586 + ], + [ + "▁eagle", + -12.696075439453123 + ], + [ + "▁prosecutor", + -12.696081161499023 + ], + [ + "▁FedEx", + -12.696084022521973 + ], + [ + "▁Laurel", + -12.696094512939451 + ], + [ + "▁Thought", + -12.696155548095703 + ], + [ + "cro", + -12.696192741394045 + ], + [ + "▁RGB", + -12.696382522583008 + ], + [ + "woman", + -12.69660472869873 + ], + [ + "▁heightened", + -12.69674015045166 + ], + [ + "▁deploying", + -12.696752548217772 + ], + [ + "▁Plug", + -12.696822166442873 + ], + [ + "LES", + -12.696919441223145 + ], + [ + "▁Instructor", + -12.69693374633789 + ], + [ + "▁Pricing", + -12.697002410888672 + ], + [ + "▁cornerstone", + -12.697160720825195 + ], + [ + "▁simulator", + -12.69720458984375 + ], + [ + "▁homelessness", + -12.697205543518066 + ], + [ + "▁taps", + -12.697242736816406 + ], + [ + "▁discovers", + -12.697338104248049 + ], + [ + "▁Subscribe", + -12.697417259216309 + ], + [ + "wen", + -12.697546005249023 + ], + [ + "▁honoring", + -12.697596549987791 + ], + [ + "ire", + -12.69761848449707 + ], + [ + "▁spectators", + -12.697628021240234 + ], + [ + "▁Tina", + -12.6976318359375 + ], + [ + "▁obliged", + -12.697754859924316 + ], + [ + "▁Plenty", + -12.697772979736328 + ], + [ + "▁Om", + -12.697933197021484 + ], + [ + "▁Alibaba", + -12.69794750213623 + ], + [ + "nc", + -12.69817066192627 + ], + [ + "▁Johannesburg", + -12.698394775390623 + ], + [ + "▁gigs", + -12.6984281539917 + ], + [ + "▁Theresa", + -12.698563575744627 + ], + [ + "▁Random", + -12.698582649230955 + ], + [ + "9.99", + -12.698668479919434 + ], + [ + "▁bo", + -12.698691368103027 + ], + [ + "▁illumination", + -12.698691368103027 + ], + [ + "▁resonance", + -12.698691368103027 + ], + [ + "Details", + -12.698702812194824 + ], + [ + "▁Railroad", + -12.698713302612305 + ], + [ + "▁altering", + -12.698744773864746 + ], + [ + "▁10-15", + -12.698775291442873 + ], + [ + "▁Az", + -12.698880195617676 + ], + [ + "▁acidity", + -12.698992729187012 + ], + [ + "▁stitched", + -12.699003219604492 + ], + [ + "pressure", + -12.69916820526123 + ], + [ + "▁rebel", + -12.699211120605469 + ], + [ + "▁utter", + -12.699264526367188 + ], + [ + "▁Provides", + -12.69947624206543 + ], + [ + "▁Shirley", + -12.69951629638672 + ], + [ + "▁lends", + -12.69963264465332 + ], + [ + "▁Bingo", + -12.699698448181152 + ], + [ + "▁divers", + -12.69983959197998 + ], + [ + "gro", + -12.699889183044434 + ], + [ + "▁Sherman", + -12.700081825256348 + ], + [ + "NZ", + -12.700127601623535 + ], + [ + "▁recap", + -12.700127601623535 + ], + [ + "mur", + -12.700145721435549 + ], + [ + "▁Scandinavian", + -12.700240135192873 + ], + [ + "▁dispatched", + -12.700263023376465 + ], + [ + "▁thermo", + -12.700277328491213 + ], + [ + "tti", + -12.700291633605955 + ], + [ + "▁cinematic", + -12.70044231414795 + ], + [ + "▁writings", + -12.700597763061523 + ], + [ + "▁Underground", + -12.700634956359863 + ], + [ + "▁ETF", + -12.700773239135742 + ], + [ + "▁Matrix", + -12.700796127319336 + ], + [ + "▁tighter", + -12.700864791870115 + ], + [ + "▁footsteps", + -12.700950622558594 + ], + [ + "stad", + -12.7010498046875 + ], + [ + "▁SaaS", + -12.70119857788086 + ], + [ + "▁imprint", + -12.701216697692873 + ], + [ + "▁turbine", + -12.701242446899414 + ], + [ + "▁contaminants", + -12.701263427734377 + ], + [ + "▁DT", + -12.701278686523438 + ], + [ + "Registration", + -12.701387405395508 + ], + [ + "▁bout", + -12.701390266418455 + ], + [ + "NG", + -12.701462745666504 + ], + [ + "Feb", + -12.701492309570312 + ], + [ + "▁Waters", + -12.70150661468506 + ], + [ + "ere", + -12.701666831970217 + ], + [ + "▁Emerald", + -12.701748847961426 + ], + [ + "▁Firstly", + -12.701762199401855 + ], + [ + "▁cheque", + -12.701804161071776 + ], + [ + "▁Baron", + -12.701860427856444 + ], + [ + "▁Brake", + -12.701937675476074 + ], + [ + "▁Sunny", + -12.702064514160156 + ], + [ + "▁goats", + -12.7021484375 + ], + [ + "▁tr", + -12.702160835266112 + ], + [ + "▁Rag", + -12.70223903656006 + ], + [ + "script", + -12.702407836914062 + ], + [ + "▁60-", + -12.702471733093262 + ], + [ + "serve", + -12.702485084533691 + ], + [ + "▁Preview", + -12.702502250671388 + ], + [ + "▁throttle", + -12.702515602111816 + ], + [ + "▁economists", + -12.702569007873535 + ], + [ + "▁icy", + -12.702695846557615 + ], + [ + "▁pier", + -12.70284652709961 + ], + [ + "Core", + -12.702855110168455 + ], + [ + "▁translator", + -12.702880859375 + ], + [ + "eyed", + -12.703030586242676 + ], + [ + "lake", + -12.703031539916992 + ], + [ + "▁displacement", + -12.703204154968262 + ], + [ + "▁Hitler", + -12.703229904174805 + ], + [ + "▁Limit", + -12.703344345092772 + ], + [ + "▁akin", + -12.703414916992188 + ], + [ + "(3)", + -12.70350456237793 + ], + [ + "▁Quinn", + -12.70358943939209 + ], + [ + "▁Jacobs", + -12.70360279083252 + ], + [ + "Application", + -12.703669548034668 + ], + [ + "▁labeling", + -12.7036714553833 + ], + [ + "contact", + -12.703742027282717 + ], + [ + "platform", + -12.703782081604004 + ], + [ + "▁supervisors", + -12.70389175415039 + ], + [ + "▁Hyde", + -12.703924179077148 + ], + [ + "▁Mattress", + -12.704005241394045 + ], + [ + "▁1951", + -12.704055786132812 + ], + [ + "▁Institution", + -12.704083442687988 + ], + [ + "▁Eclipse", + -12.70408821105957 + ], + [ + "▁Won", + -12.704140663146973 + ], + [ + "▁Tribune", + -12.70417308807373 + ], + [ + "Frank", + -12.70419216156006 + ], + [ + "▁Distance", + -12.704302787780762 + ], + [ + "▁historians", + -12.704337120056152 + ], + [ + "▁Christina", + -12.7044095993042 + ], + [ + "rent", + -12.704476356506348 + ], + [ + "▁turnout", + -12.70461654663086 + ], + [ + "Ne", + -12.704636573791504 + ], + [ + "▁Johns", + -12.704798698425291 + ], + [ + "TN", + -12.704805374145508 + ], + [ + "loved", + -12.704960823059082 + ], + [ + "▁Holocaust", + -12.705052375793455 + ], + [ + "train", + -12.705082893371582 + ], + [ + "▁thirst", + -12.705175399780272 + ], + [ + "▁Amp", + -12.705193519592283 + ], + [ + "▁REAL", + -12.70520305633545 + ], + [ + "▁portray", + -12.70541763305664 + ], + [ + "▁cherries", + -12.705437660217283 + ], + [ + "▁Maximum", + -12.7054443359375 + ], + [ + "▁plagiarism", + -12.705445289611816 + ], + [ + "▁spacing", + -12.705520629882812 + ], + [ + "▁attainable", + -12.705598831176758 + ], + [ + "hang", + -12.705613136291504 + ], + [ + "▁145", + -12.705613136291504 + ], + [ + "▁Silva", + -12.705618858337402 + ], + [ + "▁165", + -12.70571231842041 + ], + [ + "▁Blockchain", + -12.705723762512209 + ], + [ + "▁Ideally", + -12.705820083618164 + ], + [ + "▁artery", + -12.70590877532959 + ], + [ + "▁inherently", + -12.70595645904541 + ], + [ + "▁shingles", + -12.70595932006836 + ], + [ + "▁url", + -12.706133842468262 + ], + [ + "▁PB", + -12.706146240234377 + ], + [ + "rist", + -12.70617389678955 + ], + [ + "▁Palo", + -12.706254005432127 + ], + [ + "aux", + -12.706429481506348 + ], + [ + "▁Import", + -12.706440925598145 + ], + [ + "nl", + -12.706454277038574 + ], + [ + "▁parliamentary", + -12.70659065246582 + ], + [ + "▁optic", + -12.70666790008545 + ], + [ + "▁Dunn", + -12.706698417663574 + ], + [ + "Martin", + -12.707079887390137 + ], + [ + "▁Houses", + -12.707097053527832 + ], + [ + "System", + -12.707106590270996 + ], + [ + "▁allocate", + -12.707109451293944 + ], + [ + "ovic", + -12.70718765258789 + ], + [ + "▁Boots", + -12.707220077514648 + ], + [ + "▁cheek", + -12.70723819732666 + ], + [ + "▁coup", + -12.70724868774414 + ], + [ + "hana", + -12.707280158996582 + ], + [ + "▁RN", + -12.70743179321289 + ], + [ + "▁diabetic", + -12.707484245300291 + ], + [ + "▁missiles", + -12.707608222961426 + ], + [ + "▁Sense", + -12.707623481750488 + ], + [ + "▁psychiatric", + -12.70769500732422 + ], + [ + "▁coherent", + -12.707695960998535 + ], + [ + "▁tandem", + -12.707701683044434 + ], + [ + "▁inheritance", + -12.7077054977417 + ], + [ + "▁hearings", + -12.707716941833496 + ], + [ + "▁adrenaline", + -12.707756996154783 + ], + [ + "▁Cherokee", + -12.707759857177734 + ], + [ + "▁Py", + -12.707769393920898 + ], + [ + "▁wedge", + -12.707772254943848 + ], + [ + "▁canoe", + -12.707788467407228 + ], + [ + "feld", + -12.707996368408203 + ], + [ + "▁guild", + -12.708056449890137 + ], + [ + "uta", + -12.7081880569458 + ], + [ + "▁1953", + -12.708209037780762 + ], + [ + "dio", + -12.708416938781738 + ], + [ + "▁foreigners", + -12.708425521850586 + ], + [ + "▁Dip", + -12.70843505859375 + ], + [ + "▁Eyes", + -12.708723068237305 + ], + [ + "▁overseeing", + -12.7087984085083 + ], + [ + "Su", + -12.708805084228516 + ], + [ + "▁pedestal", + -12.708826065063477 + ], + [ + "▁sanitation", + -12.70882797241211 + ], + [ + "▁Lexus", + -12.708869934082031 + ], + [ + "▁Huntington", + -12.708887100219728 + ], + [ + "▁Alison", + -12.70893096923828 + ], + [ + "LAND", + -12.709041595458984 + ], + [ + "▁Problems", + -12.709047317504885 + ], + [ + ",000,000", + -12.709054946899414 + ], + [ + "▁Bahrain", + -12.709065437316896 + ], + [ + "▁Daddy", + -12.709089279174805 + ], + [ + "loving", + -12.709094047546388 + ], + [ + "▁Transition", + -12.70917797088623 + ], + [ + "▁depicting", + -12.709220886230469 + ], + [ + "▁Whereas", + -12.709453582763672 + ], + [ + "▁avenue", + -12.709651947021484 + ], + [ + "▁heroin", + -12.709668159484863 + ], + [ + "▁emailing", + -12.709691047668455 + ], + [ + "▁6%", + -12.709769248962402 + ], + [ + "▁pyramid", + -12.709951400756836 + ], + [ + "▁dividing", + -12.709959983825684 + ], + [ + "▁robbery", + -12.710006713867188 + ], + [ + "▁outward", + -12.710027694702148 + ], + [ + "hm", + -12.710135459899902 + ], + [ + "▁steroids", + -12.710222244262695 + ], + [ + "▁fringe", + -12.710399627685549 + ], + [ + "▁Bentley", + -12.710420608520508 + ], + [ + "▁splitting", + -12.710432052612305 + ], + [ + "▁attained", + -12.710441589355469 + ], + [ + "▁mods", + -12.710528373718262 + ], + [ + "▁Mud", + -12.710615158081056 + ], + [ + "spot", + -12.710702896118164 + ], + [ + "tica", + -12.71090602874756 + ], + [ + "▁Kurt", + -12.71091079711914 + ], + [ + "▁Neck", + -12.71096897125244 + ], + [ + "Comments", + -12.710981369018556 + ], + [ + "ij", + -12.7110595703125 + ], + [ + "staff", + -12.711060523986816 + ], + [ + "▁carnival", + -12.711092948913574 + ], + [ + "▁fluorescent", + -12.71109390258789 + ], + [ + "▁Zoom", + -12.711153984069824 + ], + [ + "▁bicycles", + -12.711162567138672 + ], + [ + "▁delegate", + -12.711166381835938 + ], + [ + "▁minorities", + -12.711199760437012 + ], + [ + "▁noteworthy", + -12.71121883392334 + ], + [ + "▁Pri", + -12.711276054382324 + ], + [ + "▁0.7", + -12.711362838745115 + ], + [ + "▁Karnataka", + -12.711383819580078 + ], + [ + "SEO", + -12.711400985717772 + ], + [ + "▁behaviours", + -12.711709976196287 + ], + [ + "uz", + -12.711773872375488 + ], + [ + "▁swinging", + -12.711885452270508 + ], + [ + "sten", + -12.712047576904297 + ], + [ + "▁windy", + -12.712093353271484 + ], + [ + "▁modifying", + -12.71214199066162 + ], + [ + "▁granting", + -12.712148666381836 + ], + [ + "ego", + -12.712186813354492 + ], + [ + "▁RB", + -12.712217330932615 + ], + [ + "▁uniqueness", + -12.712343215942385 + ], + [ + "▁Generator", + -12.712355613708496 + ], + [ + "▁teaser", + -12.712445259094238 + ], + [ + "haven", + -12.712454795837402 + ], + [ + "▁6-8", + -12.712501525878906 + ], + [ + "▁Maui", + -12.712506294250488 + ], + [ + "▁adaptable", + -12.7125883102417 + ], + [ + "-60", + -12.712611198425291 + ], + [ + "VO", + -12.712762832641602 + ], + [ + "▁UTC", + -12.712772369384766 + ], + [ + "▁Infinity", + -12.712833404541016 + ], + [ + "▁ERA", + -12.712890625 + ], + [ + "rad", + -12.712929725646973 + ], + [ + "▁ki", + -12.712944984436035 + ], + [ + "▁Religious", + -12.713221549987791 + ], + [ + "▁Marsh", + -12.713228225708008 + ], + [ + "▁stylist", + -12.713316917419434 + ], + [ + "▁Talking", + -12.71334743499756 + ], + [ + "▁ashamed", + -12.71338176727295 + ], + [ + "▁leakage", + -12.713574409484863 + ], + [ + "owski", + -12.71363639831543 + ], + [ + "▁condemned", + -12.71364688873291 + ], + [ + "▁Mean", + -12.71365451812744 + ], + [ + "▁Ay", + -12.713845252990724 + ], + [ + "▁Luxembourg", + -12.71385383605957 + ], + [ + "▁scarce", + -12.713857650756836 + ], + [ + "▁woodworking", + -12.713859558105469 + ], + [ + "▁blows", + -12.713907241821287 + ], + [ + "Va", + -12.713934898376465 + ], + [ + "▁molded", + -12.714120864868164 + ], + [ + "Usually", + -12.714126586914062 + ], + [ + "▁Musk", + -12.714126586914062 + ], + [ + "▁Homeland", + -12.714139938354492 + ], + [ + "▁Rai", + -12.714211463928224 + ], + [ + "▁persona", + -12.714335441589355 + ], + [ + "lived", + -12.714472770690918 + ], + [ + "▁uninterrupted", + -12.71450138092041 + ], + [ + "▁imminent", + -12.714502334594728 + ], + [ + "▁HOW", + -12.71461582183838 + ], + [ + "▁1994,", + -12.714692115783691 + ], + [ + "▁meticulously", + -12.71474266052246 + ], + [ + "▁1917", + -12.714814186096191 + ], + [ + "▁projector", + -12.714838027954102 + ], + [ + "▁stabilize", + -12.714862823486328 + ], + [ + "▁Discuss", + -12.714982986450195 + ], + [ + "▁FCC", + -12.715001106262209 + ], + [ + "▁Reef", + -12.715020179748535 + ], + [ + "▁Bomb", + -12.715091705322266 + ], + [ + "▁Bun", + -12.71511173248291 + ], + [ + "▁Strange", + -12.71511459350586 + ], + [ + "▁TP", + -12.715274810791016 + ], + [ + "ici", + -12.71552562713623 + ], + [ + "current", + -12.715605735778809 + ], + [ + "▁sheriff", + -12.715816497802734 + ], + [ + "▁Permanent", + -12.715818405151367 + ], + [ + "▁WH", + -12.715888023376465 + ], + [ + "▁Forget", + -12.715909957885742 + ], + [ + "▁ein", + -12.715991020202637 + ], + [ + "▁pearls", + -12.71601104736328 + ], + [ + "▁chords", + -12.71605110168457 + ], + [ + "WS", + -12.716087341308594 + ], + [ + "▁buckle", + -12.716142654418944 + ], + [ + "oxy", + -12.716214179992676 + ], + [ + "▁instagram", + -12.716238021850586 + ], + [ + "▁Hut", + -12.71653938293457 + ], + [ + "▁lays", + -12.716585159301758 + ], + [ + "▁pas", + -12.716612815856934 + ], + [ + "▁richness", + -12.716811180114746 + ], + [ + "▁Tropical", + -12.71683406829834 + ], + [ + "▁200,000", + -12.71685028076172 + ], + [ + "tric", + -12.71723175048828 + ], + [ + "▁480", + -12.717401504516602 + ], + [ + "▁Cycling", + -12.71740436553955 + ], + [ + "Fit", + -12.717455863952637 + ], + [ + "▁mandated", + -12.717517852783203 + ], + [ + "▁Postal", + -12.71751880645752 + ], + [ + "▁mapped", + -12.717608451843262 + ], + [ + "▁delights", + -12.717610359191896 + ], + [ + "French", + -12.717634201049805 + ], + [ + "Professor", + -12.717634201049805 + ], + [ + "▁transmitter", + -12.717705726623535 + ], + [ + "▁protections", + -12.717792510986328 + ], + [ + "▁Cardinal", + -12.7178373336792 + ], + [ + "▁RI", + -12.717891693115234 + ], + [ + "bid", + -12.717913627624512 + ], + [ + "▁indispensable", + -12.71792221069336 + ], + [ + "▁remediation", + -12.71794319152832 + ], + [ + "▁Lottery", + -12.718109130859377 + ], + [ + "▁Wise", + -12.718119621276855 + ], + [ + "▁roulette", + -12.718414306640623 + ], + [ + "aff", + -12.718520164489746 + ], + [ + "▁Sophie", + -12.718674659729004 + ], + [ + "▁Warning", + -12.71871566772461 + ], + [ + "▁Thi", + -12.718785285949709 + ], + [ + "▁Cause", + -12.71882438659668 + ], + [ + "▁0.3", + -12.718830108642578 + ], + [ + "▁Manuel", + -12.718879699707031 + ], + [ + "▁sermon", + -12.718950271606444 + ], + [ + "flex", + -12.718992233276367 + ], + [ + "tam", + -12.719019889831545 + ], + [ + "ox", + -12.719039916992188 + ], + [ + "▁solitary", + -12.71906566619873 + ], + [ + "▁heartfelt", + -12.719123840332031 + ], + [ + "▁interpersonal", + -12.719164848327637 + ], + [ + "▁Passion", + -12.719249725341797 + ], + [ + "▁1949", + -12.719511032104492 + ], + [ + "▁badges", + -12.719797134399414 + ], + [ + "pol", + -12.71985149383545 + ], + [ + "ranked", + -12.719865798950195 + ], + [ + "professional", + -12.71992015838623 + ], + [ + "nation", + -12.719924926757812 + ], + [ + "▁Bart", + -12.719953536987305 + ], + [ + "▁repetition", + -12.72011375427246 + ], + [ + "▁aromas", + -12.720120429992676 + ], + [ + "▁trilogy", + -12.72021484375 + ], + [ + "▁spur", + -12.720221519470217 + ], + [ + "▁Areas", + -12.720245361328123 + ], + [ + "▁Kits", + -12.72025203704834 + ], + [ + "▁slipping", + -12.720267295837402 + ], + [ + "▁Adjustable", + -12.720314979553224 + ], + [ + "laws", + -12.720422744750977 + ], + [ + "▁monumental", + -12.720525741577148 + ], + [ + "▁FIRST", + -12.720559120178224 + ], + [ + "▁Floyd", + -12.720763206481934 + ], + [ + "patient", + -12.720795631408691 + ], + [ + "▁wholesome", + -12.720805168151855 + ], + [ + "▁widow", + -12.720855712890623 + ], + [ + "▁overdose", + -12.720914840698242 + ], + [ + "▁cruises", + -12.720916748046877 + ], + [ + "▁NAS", + -12.720918655395508 + ], + [ + "▁gram", + -12.720925331115724 + ], + [ + "▁discrete", + -12.721117973327637 + ], + [ + "▁Thames", + -12.721364974975586 + ], + [ + "▁endured", + -12.721370697021484 + ], + [ + "▁sewage", + -12.72140407562256 + ], + [ + "▁Gone", + -12.721412658691406 + ], + [ + "▁VoIP", + -12.72148609161377 + ], + [ + "reading", + -12.721517562866213 + ], + [ + "-28", + -12.721531867980955 + ], + [ + "▁Millennium", + -12.721534729003906 + ], + [ + "▁+1", + -12.721549987792969 + ], + [ + "▁attacker", + -12.721842765808104 + ], + [ + "▁Literary", + -12.721968650817873 + ], + [ + "▁openness", + -12.722009658813477 + ], + [ + "▁1941", + -12.722054481506348 + ], + [ + "▁pigment", + -12.72206687927246 + ], + [ + "▁latex", + -12.722107887268066 + ], + [ + "▁temporal", + -12.7222318649292 + ], + [ + "▁haircut", + -12.722250938415527 + ], + [ + "▁meticulous", + -12.722259521484377 + ], + [ + "▁hardship", + -12.72231101989746 + ], + [ + "gie", + -12.722408294677734 + ], + [ + "▁hectares", + -12.72240924835205 + ], + [ + "▁$400", + -12.722441673278809 + ], + [ + "Stream", + -12.722448348999023 + ], + [ + "▁Elephant", + -12.722478866577148 + ], + [ + "▁symposium", + -12.722539901733398 + ], + [ + "▁blueberries", + -12.72255802154541 + ], + [ + "▁Swim", + -12.722599983215332 + ], + [ + "▁ASP", + -12.722616195678713 + ], + [ + "▁gated", + -12.722675323486328 + ], + [ + "▁Readers", + -12.722685813903809 + ], + [ + "▁Burlington", + -12.722715377807615 + ], + [ + "IVE", + -12.722780227661133 + ], + [ + "▁Elm", + -12.722888946533203 + ], + [ + "▁Setup", + -12.722888946533203 + ], + [ + "Date", + -12.722986221313477 + ], + [ + "▁Serial", + -12.722997665405272 + ], + [ + "▁Tie", + -12.723115921020508 + ], + [ + "uring", + -12.723176956176758 + ], + [ + "▁Sandra", + -12.723257064819336 + ], + [ + "Tuesday", + -12.723482131958008 + ], + [ + "▁narratives", + -12.723501205444336 + ], + [ + "▁Cabin", + -12.72350788116455 + ], + [ + "PH", + -12.723544120788574 + ], + [ + "▁persist", + -12.723610877990724 + ], + [ + "▁bargaining", + -12.72362232208252 + ], + [ + "▁Pirates", + -12.72364616394043 + ], + [ + "▁disposition", + -12.723660469055176 + ], + [ + "▁Liability", + -12.723736763000488 + ], + [ + "▁correspond", + -12.723774909973145 + ], + [ + "▁Kazakhstan", + -12.723831176757812 + ], + [ + "▁usa", + -12.723957061767578 + ], + [ + "▁KO", + -12.72405242919922 + ], + [ + "▁Scholar", + -12.724105834960938 + ], + [ + "▁disconnected", + -12.72415828704834 + ], + [ + "▁Mus", + -12.724198341369627 + ], + [ + "▁Cre", + -12.72433376312256 + ], + [ + "▁McG", + -12.72443675994873 + ], + [ + "▁differing", + -12.724444389343262 + ], + [ + "ZA", + -12.724566459655762 + ], + [ + "▁SEE", + -12.72473430633545 + ], + [ + "dine", + -12.724796295166016 + ], + [ + "▁ubiquitous", + -12.724799156188965 + ], + [ + "▁Territory", + -12.72480010986328 + ], + [ + "▁misconduct", + -12.724802017211914 + ], + [ + "▁marginal", + -12.72482204437256 + ], + [ + "130", + -12.72482681274414 + ], + [ + "▁precedent", + -12.72482967376709 + ], + [ + "nice", + -12.724849700927734 + ], + [ + "=1", + -12.725083351135254 + ], + [ + "▁Maharashtra", + -12.725123405456545 + ], + [ + "isse", + -12.725278854370115 + ], + [ + "▁weighted", + -12.725321769714355 + ], + [ + "▁Thin", + -12.725441932678224 + ], + [ + "-70", + -12.725605964660645 + ], + [ + "▁Aunt", + -12.725665092468262 + ], + [ + "▁Extended", + -12.725678443908691 + ], + [ + "▁Warrior", + -12.725702285766602 + ], + [ + "▁Sanchez", + -12.72584056854248 + ], + [ + "▁mascara", + -12.725951194763184 + ], + [ + "▁vacancy", + -12.725951194763184 + ], + [ + "▁Jaguar", + -12.726057052612305 + ], + [ + "Mrs", + -12.726137161254885 + ], + [ + "▁giveaways", + -12.72618007659912 + ], + [ + "▁chambers", + -12.726197242736816 + ], + [ + "▁Bacon", + -12.72625732421875 + ], + [ + "▁republic", + -12.726292610168455 + ], + [ + "▁metre", + -12.72650146484375 + ], + [ + "▁Allied", + -12.726536750793455 + ], + [ + "▁5-10", + -12.726558685302734 + ], + [ + "▁deter", + -12.72659683227539 + ], + [ + "▁Wagner", + -12.726658821105955 + ], + [ + "▁Bas", + -12.72668170928955 + ], + [ + "▁Bla", + -12.726749420166016 + ], + [ + "▁Mia", + -12.726816177368164 + ], + [ + "Andrew", + -12.727129936218262 + ], + [ + "▁Emmy", + -12.727155685424805 + ], + [ + "▁mimic", + -12.72718334197998 + ], + [ + "▁Ginger", + -12.727198600769045 + ], + [ + "LR", + -12.72724723815918 + ], + [ + "Style", + -12.72744369506836 + ], + [ + "▁cartoons", + -12.72744846343994 + ], + [ + "Trust", + -12.727910041809082 + ], + [ + "Amazing", + -12.72797679901123 + ], + [ + "▁Gum", + -12.72801399230957 + ], + [ + "reach", + -12.728144645690918 + ], + [ + "▁carers", + -12.728172302246094 + ], + [ + "▁Gardner", + -12.728206634521484 + ], + [ + "▁Superintendent", + -12.7282075881958 + ], + [ + "▁plentiful", + -12.72825527191162 + ], + [ + "▁wives", + -12.728265762329102 + ], + [ + "▁unconventional", + -12.728271484375 + ], + [ + "▁evolutionary", + -12.728333473205566 + ], + [ + "▁gala", + -12.728339195251465 + ], + [ + "▁Rajasthan", + -12.72834300994873 + ], + [ + "▁Savior", + -12.728347778320312 + ], + [ + "gard", + -12.728376388549805 + ], + [ + "▁1994.", + -12.728464126586914 + ], + [ + "▁Seems", + -12.728558540344238 + ], + [ + "▁lingering", + -12.728622436523438 + ], + [ + "▁Counseling", + -12.728822708129885 + ], + [ + "▁1964", + -12.72895622253418 + ], + [ + "▁scissors", + -12.729181289672852 + ], + [ + "▁Manage", + -12.729208946228027 + ], + [ + "▁Qu", + -12.72923183441162 + ], + [ + "cover", + -12.729251861572266 + ], + [ + "▁Resident", + -12.729432106018066 + ], + [ + "rail", + -12.72962474822998 + ], + [ + "▁Holding", + -12.729801177978516 + ], + [ + "▁pedestrians", + -12.729844093322754 + ], + [ + "Anti", + -12.72994613647461 + ], + [ + "▁commercials", + -12.729948043823242 + ], + [ + "123", + -12.730006217956545 + ], + [ + "▁vaulted", + -12.730055809020996 + ], + [ + "▁Fab", + -12.730090141296388 + ], + [ + "▁Authentic", + -12.730134963989258 + ], + [ + "▁memo", + -12.730183601379396 + ], + [ + "Chi", + -12.730199813842772 + ], + [ + "▁FF", + -12.730253219604492 + ], + [ + "▁screenings", + -12.73027515411377 + ], + [ + "▁Geographic", + -12.730292320251465 + ], + [ + "▁Animation", + -12.730443000793455 + ], + [ + "▁Approximately", + -12.730554580688477 + ], + [ + "▁motivating", + -12.730566024780272 + ], + [ + "▁comforter", + -12.730693817138672 + ], + [ + "▁clone", + -12.730758666992188 + ], + [ + "▁Favorite", + -12.730853080749512 + ], + [ + "▁35%", + -12.730886459350586 + ], + [ + "▁Ras", + -12.730945587158203 + ], + [ + "zero", + -12.73099422454834 + ], + [ + "▁obese", + -12.73102569580078 + ], + [ + "▁cherished", + -12.731042861938477 + ], + [ + "▁Lagos", + -12.731075286865234 + ], + [ + "▁1:1", + -12.731170654296877 + ], + [ + "▁Keller", + -12.731266975402832 + ], + [ + "las", + -12.731403350830078 + ], + [ + "holders", + -12.731417655944824 + ], + [ + "▁Uk", + -12.731475830078123 + ], + [ + "▁CPR", + -12.731557846069336 + ], + [ + "KA", + -12.731644630432127 + ], + [ + "▁cilantro", + -12.73172378540039 + ], + [ + "▁Inspirational", + -12.731754302978516 + ], + [ + "▁Trying", + -12.731757164001465 + ], + [ + "▁finalized", + -12.731924057006836 + ], + [ + "▁camper", + -12.73195457458496 + ], + [ + "▁texting", + -12.731976509094238 + ], + [ + "▁novelty", + -12.73200511932373 + ], + [ + "▁workflows", + -12.732036590576172 + ], + [ + "▁dinosaur", + -12.73220443725586 + ], + [ + "▁109", + -12.732314109802246 + ], + [ + "▁blink", + -12.73238468170166 + ], + [ + "▁Ter", + -12.732386589050291 + ], + [ + "▁calf", + -12.73249053955078 + ], + [ + "▁neckline", + -12.732549667358398 + ], + [ + "▁PLC", + -12.732710838317873 + ], + [ + "▁Practical", + -12.732715606689451 + ], + [ + "▁illustrator", + -12.732781410217283 + ], + [ + "▁Vatican", + -12.732805252075195 + ], + [ + "▁thankfully", + -12.732810020446776 + ], + [ + "▁fueled", + -12.73281478881836 + ], + [ + "▁sleeper", + -12.732948303222656 + ], + [ + "CB", + -12.732986450195312 + ], + [ + "-29", + -12.733026504516602 + ], + [ + "▁blush", + -12.733080863952637 + ], + [ + "▁hatch", + -12.733125686645508 + ], + [ + "▁Ceramic", + -12.733155250549316 + ], + [ + "vid", + -12.73322296142578 + ], + [ + "▁Goddess", + -12.733230590820312 + ], + [ + "OB", + -12.733242988586426 + ], + [ + "oa", + -12.733270645141602 + ], + [ + "▁Fra", + -12.733314514160156 + ], + [ + "▁greenery", + -12.73335075378418 + ], + [ + "▁Ole", + -12.733501434326172 + ], + [ + "TL", + -12.733580589294434 + ], + [ + "▁Started", + -12.733661651611328 + ], + [ + "▁Harmony", + -12.733726501464844 + ], + [ + "▁Opportunities", + -12.733752250671388 + ], + [ + "due", + -12.733996391296388 + ], + [ + "▁faucets", + -12.73403549194336 + ], + [ + "oi", + -12.734054565429688 + ], + [ + "▁mammals", + -12.73405933380127 + ], + [ + "▁Singer", + -12.734126091003418 + ], + [ + "▁Ul", + -12.734127044677734 + ], + [ + "▁Monument", + -12.734249114990234 + ], + [ + "£", + -12.734381675720217 + ], + [ + "▁airflow", + -12.734381675720217 + ], + [ + "▁Sims", + -12.734492301940918 + ], + [ + "solving", + -12.73453426361084 + ], + [ + "▁1966", + -12.734539031982422 + ], + [ + "▁Yacht", + -12.734560012817385 + ], + [ + "4.5", + -12.734763145446776 + ], + [ + "▁Chartered", + -12.734773635864258 + ], + [ + "▁Already", + -12.734861373901367 + ], + [ + "▁Logic", + -12.73501205444336 + ], + [ + "▁landscaped", + -12.735065460205078 + ], + [ + "▁Trend", + -12.735095977783203 + ], + [ + "▁concentrating", + -12.735203742980955 + ], + [ + "▁rendition", + -12.735203742980955 + ], + [ + "▁undermine", + -12.73524284362793 + ], + [ + "\"...", + -12.735416412353516 + ], + [ + "▁12,000", + -12.735458374023438 + ], + [ + "▁TE", + -12.735459327697754 + ], + [ + "games", + -12.735492706298828 + ], + [ + "▁clamp", + -12.735711097717283 + ], + [ + "▁starch", + -12.7357177734375 + ], + [ + "▁veneers", + -12.73582363128662 + ], + [ + "▁coached", + -12.73611068725586 + ], + [ + "▁scams", + -12.736183166503906 + ], + [ + "▁composers", + -12.73619270324707 + ], + [ + "▁1942", + -12.736214637756348 + ], + [ + "▁diarrhea", + -12.736358642578123 + ], + [ + "▁arsenal", + -12.736367225646973 + ], + [ + "▁pruning", + -12.736367225646973 + ], + [ + "▁gossip", + -12.736368179321287 + ], + [ + "240", + -12.73637580871582 + ], + [ + "▁thermometer", + -12.736393928527832 + ], + [ + "▁confess", + -12.736444473266602 + ], + [ + "▁Brewery", + -12.736519813537598 + ], + [ + "▁Bald", + -12.736522674560549 + ], + [ + "▁Alarm", + -12.736605644226074 + ], + [ + "▁bushes", + -12.736660957336426 + ], + [ + "flat", + -12.736719131469728 + ], + [ + "▁Worker", + -12.736771583557127 + ], + [ + "▁Shark", + -12.736862182617188 + ], + [ + "▁dotted", + -12.736943244934082 + ], + [ + "▁Recycling", + -12.736998558044434 + ], + [ + "▁remake", + -12.737030029296877 + ], + [ + "ounce", + -12.73703384399414 + ], + [ + "Combine", + -12.737131118774414 + ], + [ + "1-2", + -12.737188339233398 + ], + [ + "▁induce", + -12.737521171569824 + ], + [ + "GH", + -12.737557411193848 + ], + [ + "TOR", + -12.737663269042969 + ], + [ + "Item", + -12.737679481506348 + ], + [ + "▁PST", + -12.737688064575195 + ], + [ + "fl", + -12.737715721130373 + ], + [ + "Wednesday", + -12.737717628479004 + ], + [ + "▁Keyboard", + -12.737934112548828 + ], + [ + "Director", + -12.738151550292969 + ], + [ + "▁skyline", + -12.738158226013184 + ], + [ + "▁Tablet", + -12.738160133361816 + ], + [ + "▁multiply", + -12.738245964050291 + ], + [ + "▁ray", + -12.738332748413086 + ], + [ + "▁Ye", + -12.73840045928955 + ], + [ + "apa", + -12.738432884216309 + ], + [ + "▁Login", + -12.73850154876709 + ], + [ + "▁dodge", + -12.738672256469728 + ], + [ + "▁malpractice", + -12.738697052001951 + ], + [ + "▁maximizing", + -12.73870086669922 + ], + [ + "▁Vladimir", + -12.738763809204102 + ], + [ + "▁Overview", + -12.73877239227295 + ], + [ + "tune", + -12.738794326782228 + ], + [ + "▁fret", + -12.73888874053955 + ], + [ + "▁Transaction", + -12.73889446258545 + ], + [ + "▁assemblies", + -12.738908767700195 + ], + [ + "▁config", + -12.738924980163574 + ], + [ + "covered", + -12.73893928527832 + ], + [ + "▁Steering", + -12.73894214630127 + ], + [ + "Credit", + -12.73908519744873 + ], + [ + "▁Veteran", + -12.73936367034912 + ], + [ + "▁conditional", + -12.739370346069336 + ], + [ + "▁Churchill", + -12.73959732055664 + ], + [ + "▁vine", + -12.73962688446045 + ], + [ + "▁fo", + -12.73963737487793 + ], + [ + "▁microscope", + -12.739886283874512 + ], + [ + "-05", + -12.739967346191406 + ], + [ + "▁Bamboo", + -12.739986419677734 + ], + [ + "tors", + -12.740044593811035 + ], + [ + "▁1957", + -12.74011516571045 + ], + [ + "inclusive", + -12.740153312683104 + ], + [ + "zie", + -12.740182876586914 + ], + [ + "2011", + -12.740206718444824 + ], + [ + "▁Jenkins", + -12.740222930908203 + ], + [ + "▁sewn", + -12.740241050720217 + ], + [ + "▁imbalance", + -12.740251541137695 + ], + [ + "▁envision", + -12.740299224853516 + ], + [ + "▁compute", + -12.740401268005373 + ], + [ + "Tags", + -12.740484237670898 + ], + [ + "▁dries", + -12.740485191345217 + ], + [ + "Continue", + -12.74071216583252 + ], + [ + "▁flyers", + -12.740772247314451 + ], + [ + "ush", + -12.740961074829102 + ], + [ + "▁SV", + -12.740983963012695 + ], + [ + "▁contingent", + -12.741031646728516 + ], + [ + "▁har", + -12.741081237792969 + ], + [ + "▁Stunning", + -12.741098403930664 + ], + [ + "Med", + -12.74116039276123 + ], + [ + "▁Fargo", + -12.74121379852295 + ], + [ + "▁pleaded", + -12.741259574890137 + ], + [ + "Lovely", + -12.741537094116213 + ], + [ + "▁Featured", + -12.741596221923828 + ], + [ + "vor", + -12.741698265075684 + ], + [ + "▁kayaking", + -12.741727828979492 + ], + [ + "▁Hugo", + -12.741859436035156 + ], + [ + "▁Saving", + -12.741968154907228 + ], + [ + "▁Brew", + -12.74199104309082 + ], + [ + "ranging", + -12.74203109741211 + ], + [ + "▁Crowd", + -12.742061614990234 + ], + [ + "▁bliss", + -12.742170333862305 + ], + [ + "▁lethal", + -12.742188453674316 + ], + [ + "▁Wai", + -12.742201805114746 + ], + [ + "▁flavored", + -12.74227523803711 + ], + [ + "▁JP", + -12.742327690124512 + ], + [ + "▁Castro", + -12.742344856262209 + ], + [ + "rp", + -12.742402076721191 + ], + [ + "▁Rentals", + -12.742469787597656 + ], + [ + "▁bulky", + -12.742501258850098 + ], + [ + "California", + -12.742517471313477 + ], + [ + "▁Salmon", + -12.742677688598633 + ], + [ + "▁Oven", + -12.74277114868164 + ], + [ + "▁Tate", + -12.74278450012207 + ], + [ + "IES", + -12.742801666259766 + ], + [ + "▁TODAY", + -12.742815017700195 + ], + [ + "▁valleys", + -12.74283218383789 + ], + [ + "▁330", + -12.742871284484863 + ], + [ + "▁capitalism", + -12.742966651916504 + ], + [ + "nin", + -12.743033409118652 + ], + [ + "gin", + -12.743067741394045 + ], + [ + "▁Cement", + -12.743246078491213 + ], + [ + "▁cosmic", + -12.743383407592772 + ], + [ + "▁Bahamas", + -12.743440628051758 + ], + [ + "▁Famous", + -12.743556022644045 + ], + [ + "▁gu", + -12.743646621704102 + ], + [ + "▁Syracuse", + -12.743672370910645 + ], + [ + "▁Bold", + -12.743818283081056 + ], + [ + "holder", + -12.74393081665039 + ], + [ + "▁compass", + -12.744038581848145 + ], + [ + "entry", + -12.744091987609863 + ], + [ + "▁disadvantaged", + -12.744097709655762 + ], + [ + "Lock", + -12.744333267211914 + ], + [ + "▁turmeric", + -12.744544982910156 + ], + [ + "▁competency", + -12.744552612304688 + ], + [ + "▁grouped", + -12.744620323181152 + ], + [ + "▁covenant", + -12.744718551635742 + ], + [ + "Matthew", + -12.744902610778809 + ], + [ + "▁turbines", + -12.744906425476074 + ], + [ + "▁UW", + -12.744986534118652 + ], + [ + "▁chocolates", + -12.744993209838867 + ], + [ + "▁Cleaner", + -12.745047569274902 + ], + [ + "▁Tulsa", + -12.745548248291016 + ], + [ + "▁obedience", + -12.745718002319336 + ], + [ + "▁spaghetti", + -12.745718002319336 + ], + [ + "▁crowns", + -12.745820045471191 + ], + [ + "iga", + -12.745866775512695 + ], + [ + "eight", + -12.74588680267334 + ], + [ + "▁lord", + -12.745899200439451 + ], + [ + "▁garnered", + -12.746026992797852 + ], + [ + "▁Crow", + -12.746135711669922 + ], + [ + "rik", + -12.746185302734377 + ], + [ + "eman", + -12.746191024780272 + ], + [ + "▁infusion", + -12.746221542358398 + ], + [ + "▁Nord", + -12.746437072753906 + ], + [ + "▁greatness", + -12.746535301208496 + ], + [ + "▁Clubs", + -12.74659538269043 + ], + [ + "▁sidebar", + -12.746611595153809 + ], + [ + "MW", + -12.746682167053224 + ], + [ + "▁Wil", + -12.746723175048828 + ], + [ + "Hill", + -12.746963500976562 + ], + [ + "▁Lens", + -12.74705410003662 + ], + [ + "▁madness", + -12.747090339660645 + ], + [ + "dir", + -12.747125625610352 + ], + [ + "▁Wilmington", + -12.747137069702148 + ], + [ + "▁reactive", + -12.74717140197754 + ], + [ + "▁behold", + -12.747185707092283 + ], + [ + "▁petals", + -12.747258186340332 + ], + [ + "▁Guaranteed", + -12.747288703918455 + ], + [ + "▁GR", + -12.747300148010254 + ], + [ + "▁MAY", + -12.747300148010254 + ], + [ + "▁Slide", + -12.7474365234375 + ], + [ + "▁Allan", + -12.74748706817627 + ], + [ + "▁Pig", + -12.747499465942385 + ], + [ + "▁NFC", + -12.74750518798828 + ], + [ + "▁Sears", + -12.747529983520508 + ], + [ + "years", + -12.74754524230957 + ], + [ + "▁dude", + -12.747642517089844 + ], + [ + "▁Cutting", + -12.747669219970703 + ], + [ + "▁bred", + -12.747793197631836 + ], + [ + "Os", + -12.74790096282959 + ], + [ + "▁Serbia", + -12.747913360595703 + ], + [ + "Age", + -12.747973442077637 + ], + [ + "▁Technician", + -12.747974395751951 + ], + [ + "▁mastery", + -12.74803638458252 + ], + [ + "▁Pearson", + -12.748037338256836 + ], + [ + "cryptocurrencies", + -12.74808120727539 + ], + [ + "▁uncertainties", + -12.748085021972656 + ], + [ + "▁honoured", + -12.74815273284912 + ], + [ + "▁liners", + -12.748316764831545 + ], + [ + "▁Rolex", + -12.748323440551758 + ], + [ + "iii", + -12.748343467712402 + ], + [ + "cle", + -12.74836540222168 + ], + [ + "▁Broker", + -12.748406410217283 + ], + [ + "▁prefers", + -12.748578071594238 + ], + [ + "▁demons", + -12.7485990524292 + ], + [ + "▁Vita", + -12.748629570007324 + ], + [ + "▁shortcut", + -12.748802185058594 + ], + [ + "Tim", + -12.748826026916504 + ], + [ + "▁diminished", + -12.748845100402832 + ], + [ + "▁una", + -12.74888515472412 + ], + [ + "1:", + -12.748895645141602 + ], + [ + "▁Publications", + -12.749013900756836 + ], + [ + "▁renal", + -12.74908447265625 + ], + [ + "▁priceless", + -12.749164581298828 + ], + [ + "▁Orchard", + -12.749271392822266 + ], + [ + "▁pinned", + -12.74928855895996 + ], + [ + "▁chooses", + -12.749322891235352 + ], + [ + "▁Chandler", + -12.749324798583984 + ], + [ + "▁supplemental", + -12.749469757080078 + ], + [ + "▁adorned", + -12.749515533447266 + ], + [ + "fort", + -12.749645233154297 + ], + [ + "▁Rustic", + -12.749717712402344 + ], + [ + "▁tacos", + -12.750236511230469 + ], + [ + "▁elders", + -12.75032901763916 + ], + [ + "▁casserole", + -12.75042724609375 + ], + [ + "▁Romney", + -12.750442504882812 + ], + [ + "▁pronunciation", + -12.750442504882812 + ], + [ + "ESS", + -12.750479698181152 + ], + [ + "▁Ferry", + -12.750618934631348 + ], + [ + "▁cylinders", + -12.750734329223633 + ], + [ + "▁Behavior", + -12.750844955444336 + ], + [ + "▁Treasure", + -12.750846862792969 + ], + [ + "▁toppings", + -12.750910758972168 + ], + [ + "vil", + -12.750947952270508 + ], + [ + "▁revive", + -12.751025199890137 + ], + [ + "▁Lithuania", + -12.75107192993164 + ], + [ + "▁1910", + -12.75111484527588 + ], + [ + "▁ditch", + -12.751205444335938 + ], + [ + "▁Remodeling", + -12.751269340515137 + ], + [ + "▁Needless", + -12.751319885253906 + ], + [ + "▁scented", + -12.751346588134766 + ], + [ + "ened", + -12.75136375427246 + ], + [ + "▁Walls", + -12.751453399658203 + ], + [ + "legged", + -12.751458168029783 + ], + [ + "Sports", + -12.751465797424316 + ], + [ + "▁sacrifices", + -12.751470565795898 + ], + [ + "nica", + -12.751631736755373 + ], + [ + "amba", + -12.75163459777832 + ], + [ + "▁Deborah", + -12.751665115356444 + ], + [ + "▁Genuine", + -12.751683235168455 + ], + [ + "▁ART", + -12.751741409301758 + ], + [ + "▁4.1", + -12.751869201660156 + ], + [ + "▁messenger", + -12.751870155334473 + ], + [ + "▁Gothic", + -12.751914024353027 + ], + [ + "▁biased", + -12.751982688903809 + ], + [ + "▁Byron", + -12.752002716064451 + ], + [ + "government", + -12.752047538757324 + ], + [ + "▁hangs", + -12.752151489257812 + ], + [ + "▁scents", + -12.752164840698242 + ], + [ + "dl", + -12.75221061706543 + ], + [ + "▁rightly", + -12.75232219696045 + ], + [ + "▁specifics", + -12.752338409423828 + ], + [ + "▁simulate", + -12.75246524810791 + ], + [ + "▁fetch", + -12.752642631530762 + ], + [ + "▁Continuing", + -12.752789497375488 + ], + [ + "▁irrespective", + -12.752789497375488 + ], + [ + "▁vodka", + -12.752789497375488 + ], + [ + "▁unbiased", + -12.752798080444336 + ], + [ + "zon", + -12.752997398376465 + ], + [ + "ered", + -12.753029823303224 + ], + [ + "sound", + -12.75306224822998 + ], + [ + "▁Washing", + -12.75322151184082 + ], + [ + "▁Manning", + -12.753352165222168 + ], + [ + "conference", + -12.75338363647461 + ], + [ + "▁107", + -12.753398895263672 + ], + [ + "▁ranged", + -12.753494262695312 + ], + [ + "▁(16", + -12.753572463989258 + ], + [ + "▁Payments", + -12.753652572631836 + ], + [ + "▁Insider", + -12.753780364990234 + ], + [ + "▁maneuver", + -12.753808975219728 + ], + [ + "▁Accessed", + -12.753824234008787 + ], + [ + "dell", + -12.753861427307127 + ], + [ + "▁hypo", + -12.753910064697266 + ], + [ + "rite", + -12.753944396972656 + ], + [ + "▁nostalgic", + -12.753973007202148 + ], + [ + "Lee", + -12.754110336303713 + ], + [ + "▁limb", + -12.754127502441406 + ], + [ + "▁worms", + -12.754246711730955 + ], + [ + "▁Disk", + -12.75425624847412 + ], + [ + "▁Chapman", + -12.754287719726562 + ], + [ + "▁Mack", + -12.754298210144045 + ], + [ + "▁Phantom", + -12.754302024841309 + ], + [ + "▁creep", + -12.754351615905762 + ], + [ + "nam", + -12.754414558410645 + ], + [ + "▁dr", + -12.754587173461914 + ], + [ + "cott", + -12.75472640991211 + ], + [ + "▁boosted", + -12.75472640991211 + ], + [ + "▁Carnegie", + -12.754873275756836 + ], + [ + "▁eve", + -12.75504207611084 + ], + [ + "▁glamour", + -12.75505828857422 + ], + [ + "brown", + -12.755086898803713 + ], + [ + "LG", + -12.755242347717283 + ], + [ + "▁LGBTQ", + -12.75525188446045 + ], + [ + "▁LI", + -12.755289077758787 + ], + [ + "bie", + -12.75531005859375 + ], + [ + "▁wooded", + -12.75532054901123 + ], + [ + "▁Rib", + -12.755341529846191 + ], + [ + "▁Microwave", + -12.75535774230957 + ], + [ + "Allow", + -12.755370140075684 + ], + [ + "▁caters", + -12.75543975830078 + ], + [ + "▁pistol", + -12.75545883178711 + ], + [ + "▁specimen", + -12.75545883178711 + ], + [ + "▁instability", + -12.755480766296388 + ], + [ + "▁Carrier", + -12.755495071411133 + ], + [ + "▁pasture", + -12.755518913269045 + ], + [ + "▁Armenian", + -12.755558967590332 + ], + [ + "▁Shri", + -12.75558376312256 + ], + [ + "▁Affiliate", + -12.755642890930176 + ], + [ + "▁Guess", + -12.755699157714844 + ], + [ + "kha", + -12.755751609802246 + ], + [ + "▁BlackBerry", + -12.755802154541016 + ], + [ + "machine", + -12.755983352661133 + ], + [ + "▁fillers", + -12.756030082702637 + ], + [ + "▁rave", + -12.756070137023926 + ], + [ + "▁$1000", + -12.75610065460205 + ], + [ + "▁Augustine", + -12.756109237670898 + ], + [ + "▁exhibitors", + -12.756230354309082 + ], + [ + "▁12\"", + -12.756260871887209 + ], + [ + "▁Meadows", + -12.756270408630373 + ], + [ + "▁Katy", + -12.756389617919922 + ], + [ + "▁dataset", + -12.7564115524292 + ], + [ + "▁visionary", + -12.756637573242188 + ], + [ + "HE", + -12.756712913513184 + ], + [ + "▁constituents", + -12.756830215454102 + ], + [ + "rg", + -12.756847381591797 + ], + [ + "▁geek", + -12.756850242614746 + ], + [ + "▁ext", + -12.756851196289062 + ], + [ + "ias", + -12.756969451904297 + ], + [ + "▁hum", + -12.757001876831056 + ], + [ + "Writing", + -12.757018089294434 + ], + [ + "▁Crushers", + -12.757161140441896 + ], + [ + "▁penetrate", + -12.757254600524902 + ], + [ + "hey", + -12.757262229919434 + ], + [ + "▁Alta", + -12.757280349731444 + ], + [ + "rid", + -12.757442474365234 + ], + [ + "rine", + -12.7574462890625 + ], + [ + "▁Autism", + -12.757457733154297 + ], + [ + "/6", + -12.757513046264648 + ], + [ + "▁clogged", + -12.757546424865724 + ], + [ + "▁nonprofits", + -12.75755786895752 + ], + [ + "bc", + -12.75759983062744 + ], + [ + "▁PLUS", + -12.757684707641602 + ], + [ + "▁Samantha", + -12.757686614990234 + ], + [ + "backs", + -12.757708549499512 + ], + [ + "▁loosely", + -12.758030891418455 + ], + [ + "▁ro", + -12.758034706115724 + ], + [ + "▁LIFE", + -12.758049964904783 + ], + [ + "▁alt", + -12.758075714111328 + ], + [ + "140", + -12.758092880249023 + ], + [ + "film", + -12.758098602294922 + ], + [ + "▁snaps", + -12.758099555969238 + ], + [ + "security", + -12.75823211669922 + ], + [ + "▁Transmission", + -12.758339881896973 + ], + [ + "Heart", + -12.758386611938477 + ], + [ + "▁2\"", + -12.758389472961426 + ], + [ + "▁Cliff", + -12.758399963378906 + ], + [ + "Results", + -12.758519172668455 + ], + [ + "▁upbeat", + -12.758546829223633 + ], + [ + "▁Rabbit", + -12.758567810058594 + ], + [ + "Ball", + -12.758748054504396 + ], + [ + "col", + -12.758767127990724 + ], + [ + "▁Browser", + -12.758848190307615 + ], + [ + "▁Runner", + -12.758893013000488 + ], + [ + "anga", + -12.75894546508789 + ], + [ + "conditioning", + -12.758994102478027 + ], + [ + "▁mashed", + -12.75917625427246 + ], + [ + "▁Pinot", + -12.759221076965332 + ], + [ + "Return", + -12.75922679901123 + ], + [ + "▁Electro", + -12.759262084960938 + ], + [ + "▁Advent", + -12.759406089782717 + ], + [ + "jpeg", + -12.759441375732422 + ], + [ + "▁2:00", + -12.759532928466797 + ], + [ + "▁Romanian", + -12.75966453552246 + ], + [ + "▁Daisy", + -12.759681701660156 + ], + [ + "▁Presentation", + -12.759698867797852 + ], + [ + "▁fabricated", + -12.75973129272461 + ], + [ + "Pi", + -12.759766578674316 + ], + [ + "▁observers", + -12.759790420532228 + ], + [ + "▁darn", + -12.75979232788086 + ], + [ + "▁med", + -12.759819984436035 + ], + [ + "kon", + -12.759825706481934 + ], + [ + "▁mythology", + -12.759934425354004 + ], + [ + "▁investigative", + -12.759971618652344 + ], + [ + "▁specialises", + -12.759984970092772 + ], + [ + "▁demise", + -12.760040283203123 + ], + [ + "NI", + -12.760087013244627 + ], + [ + "▁Halo", + -12.76020050048828 + ], + [ + "izes", + -12.760217666625977 + ], + [ + "2/", + -12.760221481323242 + ], + [ + "▁Rey", + -12.760333061218262 + ], + [ + "▁Bolton", + -12.760416984558104 + ], + [ + "▁strand", + -12.760424613952637 + ], + [ + "▁FUN", + -12.760485649108888 + ], + [ + "Close", + -12.760488510131836 + ], + [ + "Far", + -12.760546684265137 + ], + [ + "▁Barrel", + -12.7605562210083 + ], + [ + "Prepare", + -12.760677337646484 + ], + [ + "Gi", + -12.760680198669434 + ], + [ + "▁coils", + -12.760693550109863 + ], + [ + "▁Kos", + -12.760747909545898 + ], + [ + "Senior", + -12.760757446289062 + ], + [ + "▁Updates", + -12.760783195495604 + ], + [ + "▁retreats", + -12.760787963867188 + ], + [ + "▁Frog", + -12.760933876037598 + ], + [ + "▁toxicity", + -12.76097011566162 + ], + [ + "▁Lori", + -12.761046409606934 + ], + [ + "topic", + -12.761099815368652 + ], + [ + "▁5.1", + -12.76112174987793 + ], + [ + "▁antibodies", + -12.761122703552246 + ], + [ + "▁wrongful", + -12.761176109313965 + ], + [ + "▁Curriculum", + -12.761200904846191 + ], + [ + "▁furry", + -12.761228561401367 + ], + [ + "▁Cuisine", + -12.761344909667969 + ], + [ + "▁ants", + -12.761345863342283 + ], + [ + "▁bookstore", + -12.7613525390625 + ], + [ + "▁scratching", + -12.76135540008545 + ], + [ + "▁Navigation", + -12.761374473571776 + ], + [ + "▁Imp", + -12.76142406463623 + ], + [ + "▁browned", + -12.76144313812256 + ], + [ + "▁Reno", + -12.761451721191406 + ], + [ + "▁gradient", + -12.761476516723633 + ], + [ + "▁1,200", + -12.761516571044922 + ], + [ + "▁stint", + -12.7615327835083 + ], + [ + "▁Webb", + -12.761537551879885 + ], + [ + "ically", + -12.761618614196776 + ], + [ + "▁Interesting", + -12.761811256408691 + ], + [ + "▁Vikings", + -12.761885643005373 + ], + [ + "Energy", + -12.761900901794434 + ], + [ + "▁advocating", + -12.761935234069824 + ], + [ + "▁nicer", + -12.76205825805664 + ], + [ + "▁Grain", + -12.76206398010254 + ], + [ + "▁confuse", + -12.762088775634766 + ], + [ + "gb", + -12.762140274047852 + ], + [ + "pic", + -12.762228965759276 + ], + [ + "▁copyrighted", + -12.76224136352539 + ], + [ + "▁fauna", + -12.762307167053224 + ], + [ + "▁brow", + -12.76247787475586 + ], + [ + "▁Devils", + -12.762595176696776 + ], + [ + "▁slogan", + -12.762700080871582 + ], + [ + "▁Von", + -12.762749671936035 + ], + [ + "▁Nash", + -12.762788772583008 + ], + [ + "SQL", + -12.762876510620115 + ], + [ + "UL", + -12.762953758239746 + ], + [ + "dic", + -12.76304817199707 + ], + [ + "▁Shirt", + -12.763062477111816 + ], + [ + "yon", + -12.763063430786133 + ], + [ + "▁Relationship", + -12.763080596923828 + ], + [ + "▁Progressive", + -12.763181686401367 + ], + [ + "▁Funding", + -12.763238906860352 + ], + [ + "▁DP", + -12.76329517364502 + ], + [ + "▁visas", + -12.76337432861328 + ], + [ + "▁adulthood", + -12.763567924499512 + ], + [ + "▁maths", + -12.763639450073242 + ], + [ + "earth", + -12.763689041137695 + ], + [ + "▁Elvis", + -12.76369285583496 + ], + [ + "▁Vehicles", + -12.763734817504885 + ], + [ + "▁grips", + -12.763797760009766 + ], + [ + "▁accord", + -12.763861656188965 + ], + [ + "▁immerse", + -12.76408863067627 + ], + [ + "▁Communist", + -12.76410961151123 + ], + [ + "metric", + -12.764162063598633 + ], + [ + "axis", + -12.76425075531006 + ], + [ + "gia", + -12.764366149902344 + ], + [ + "▁predators", + -12.764535903930664 + ], + [ + "▁marketer", + -12.76453685760498 + ], + [ + "▁insists", + -12.764559745788574 + ], + [ + "aku", + -12.764585494995115 + ], + [ + "▁Vic", + -12.764630317687988 + ], + [ + "▁syllabus", + -12.764687538146973 + ], + [ + "▁havoc", + -12.764691352844238 + ], + [ + "▁vault", + -12.764694213867188 + ], + [ + "▁alright", + -12.764875411987305 + ], + [ + "▁tomb", + -12.764939308166504 + ], + [ + "Pack", + -12.765020370483398 + ], + [ + "▁spit", + -12.765034675598145 + ], + [ + "▁Tape", + -12.76508331298828 + ], + [ + "▁Feng", + -12.765094757080078 + ], + [ + "▁blueprint", + -12.765271186828612 + ], + [ + "▁fluctuations", + -12.765296936035156 + ], + [ + "amine", + -12.765301704406738 + ], + [ + "▁3.6", + -12.76530933380127 + ], + [ + "▁Joey", + -12.765372276306152 + ], + [ + "dependent", + -12.765417098999023 + ], + [ + "▁Initial", + -12.76548194885254 + ], + [ + "▁pioneers", + -12.76576328277588 + ], + [ + "▁Position", + -12.765851020812988 + ], + [ + "▁stunned", + -12.765886306762695 + ], + [ + "▁translating", + -12.76589012145996 + ], + [ + "▁Avon", + -12.765957832336426 + ], + [ + "▁Garmin", + -12.76602268218994 + ], + [ + "▁Barr", + -12.766140937805176 + ], + [ + "▁erected", + -12.766142845153809 + ], + [ + "▁Investments", + -12.766148567199709 + ], + [ + "▁Successful", + -12.766148567199709 + ], + [ + "▁raft", + -12.766167640686035 + ], + [ + "hum", + -12.766216278076172 + ], + [ + "▁7:", + -12.766324996948242 + ], + [ + "▁distortion", + -12.766327857971191 + ], + [ + "▁Huge", + -12.76634120941162 + ], + [ + "▁taxable", + -12.766400337219238 + ], + [ + "▁discourage", + -12.76642608642578 + ], + [ + "▁Fig", + -12.766497611999512 + ], + [ + "▁90-", + -12.766820907592772 + ], + [ + "produced", + -12.766836166381836 + ], + [ + "sweet", + -12.766864776611328 + ], + [ + "▁Scroll", + -12.76689338684082 + ], + [ + "▁abdomen", + -12.767084121704102 + ], + [ + "paper", + -12.767107009887695 + ], + [ + "▁vanities", + -12.767169952392578 + ], + [ + "▁golfers", + -12.76718044281006 + ], + [ + "AU", + -12.767205238342283 + ], + [ + "▁Lil", + -12.767364501953123 + ], + [ + "▁Ethereum", + -12.767393112182615 + ], + [ + "▁Acer", + -12.7675199508667 + ], + [ + "▁Dock", + -12.767580032348633 + ], + [ + "▁bowel", + -12.76767921447754 + ], + [ + "doc", + -12.767733573913574 + ], + [ + "Host", + -12.767769813537598 + ], + [ + "mails", + -12.767873764038086 + ], + [ + "▁gen", + -12.76819133758545 + ], + [ + "▁270", + -12.768243789672852 + ], + [ + "▁derive", + -12.768260955810549 + ], + [ + "GI", + -12.768263816833496 + ], + [ + "▁correspondent", + -12.768324851989746 + ], + [ + "▁Aspen", + -12.768362045288086 + ], + [ + "▁Bai", + -12.76840877532959 + ], + [ + "▁lions", + -12.768428802490234 + ], + [ + "▁Cooperative", + -12.768434524536133 + ], + [ + "▁Trim", + -12.768518447875977 + ], + [ + "▁Gram", + -12.768678665161133 + ], + [ + "▁Teresa", + -12.768714904785156 + ], + [ + "▁pastoral", + -12.768881797790527 + ], + [ + "▁pastries", + -12.768935203552246 + ], + [ + "▁thrift", + -12.768969535827637 + ], + [ + "ija", + -12.769014358520508 + ], + [ + "Similar", + -12.769017219543455 + ], + [ + "▁brace", + -12.769149780273438 + ], + [ + "▁Mick", + -12.769186973571776 + ], + [ + "▁Omar", + -12.769211769104004 + ], + [ + "Built", + -12.769262313842772 + ], + [ + "rai", + -12.769437789916992 + ], + [ + "outdoor", + -12.769445419311523 + ], + [ + "Jet", + -12.769476890563965 + ], + [ + "▁finite", + -12.76951026916504 + ], + [ + "Fa", + -12.769533157348633 + ], + [ + "▁enrichment", + -12.769542694091797 + ], + [ + "▁injustice", + -12.769630432128906 + ], + [ + "nee", + -12.769704818725586 + ], + [ + "▁refunded", + -12.769720077514648 + ], + [ + "▁genome", + -12.76973819732666 + ], + [ + "▁4.2", + -12.769832611083984 + ], + [ + "▁chunky", + -12.769866943359377 + ], + [ + "▁knobs", + -12.76991367340088 + ], + [ + "▁grazing", + -12.77006721496582 + ], + [ + "▁1937", + -12.770078659057615 + ], + [ + "▁APP", + -12.770122528076172 + ], + [ + "▁Nas", + -12.770143508911133 + ], + [ + "▁contested", + -12.770167350769045 + ], + [ + "▁overlay", + -12.770245552062988 + ], + [ + "▁gracious", + -12.7704496383667 + ], + [ + "Science", + -12.770492553710938 + ], + [ + "▁Replace", + -12.770567893981934 + ], + [ + "▁twentieth", + -12.770639419555664 + ], + [ + "▁DSL", + -12.770675659179688 + ], + [ + "▁Bronx", + -12.770796775817873 + ], + [ + "▁SERVICE", + -12.770849227905272 + ], + [ + "▁proliferation", + -12.7708740234375 + ], + [ + "▁CX", + -12.770874977111816 + ], + [ + "▁hacked", + -12.770898818969728 + ], + [ + "itch", + -12.77091121673584 + ], + [ + "sham", + -12.770967483520508 + ], + [ + "-06", + -12.77131175994873 + ], + [ + "▁Principles", + -12.771331787109377 + ], + [ + "▁Lt", + -12.771360397338867 + ], + [ + "▁Prison", + -12.771388053894045 + ], + [ + "lett", + -12.771510124206545 + ], + [ + "▁0.6", + -12.771533012390137 + ], + [ + "▁Binary", + -12.77153491973877 + ], + [ + "▁Rosen", + -12.771624565124512 + ], + [ + "grid", + -12.7716646194458 + ], + [ + "▁hone", + -12.771757125854492 + ], + [ + "▁simultaneous", + -12.771780967712402 + ], + [ + "▁utilise", + -12.77179718017578 + ], + [ + "-100", + -12.771800994873049 + ], + [ + "▁rosemary", + -12.771900177001951 + ], + [ + "▁ineffective", + -12.77202606201172 + ], + [ + "Dark", + -12.772037506103516 + ], + [ + "▁stereotypes", + -12.772161483764648 + ], + [ + "▁Cara", + -12.772232055664062 + ], + [ + "ude", + -12.772239685058594 + ], + [ + "▁Dong", + -12.772287368774414 + ], + [ + "▁stranded", + -12.772299766540527 + ], + [ + "das", + -12.772343635559082 + ], + [ + "▁exporting", + -12.772452354431152 + ], + [ + "▁Est", + -12.77254867553711 + ], + [ + "Deep", + -12.772612571716309 + ], + [ + "▁CFO", + -12.772668838500977 + ], + [ + "▁WS", + -12.772705078125 + ], + [ + "▁futuristic", + -12.772804260253906 + ], + [ + "gna", + -12.772912979125977 + ], + [ + "▁225", + -12.77292823791504 + ], + [ + "▁orthopedic", + -12.773100852966309 + ], + [ + "▁Meditation", + -12.773162841796877 + ], + [ + "▁Canberra", + -12.773170471191406 + ], + [ + "▁Berkshire", + -12.77317237854004 + ], + [ + "▁ethos", + -12.77317714691162 + ], + [ + "▁comb", + -12.77319049835205 + ], + [ + "▁Sto", + -12.773265838623049 + ], + [ + "▁Tracking", + -12.773266792297363 + ], + [ + "alis", + -12.77327823638916 + ], + [ + "▁FE", + -12.77336597442627 + ], + [ + "▁Developing", + -12.773399353027344 + ], + [ + "170", + -12.773406982421877 + ], + [ + "▁trucking", + -12.773589134216309 + ], + [ + "▁Views", + -12.77359676361084 + ], + [ + "sim", + -12.773612022399902 + ], + [ + "▁HAS", + -12.773653984069824 + ], + [ + "▁atoms", + -12.77376937866211 + ], + [ + "▁miracles", + -12.773855209350586 + ], + [ + "products", + -12.773886680603027 + ], + [ + "▁Stylish", + -12.773910522460938 + ], + [ + "▁helmets", + -12.773959159851074 + ], + [ + "rous", + -12.773968696594238 + ], + [ + "▁Barber", + -12.773977279663086 + ], + [ + "iq", + -12.7741060256958 + ], + [ + "Keeping", + -12.77413845062256 + ], + [ + "▁stakeholder", + -12.774216651916504 + ], + [ + "▁buffalo", + -12.774311065673828 + ], + [ + "Job", + -12.774335861206056 + ], + [ + "▁oranges", + -12.77441120147705 + ], + [ + "▁postgraduate", + -12.774468421936035 + ], + [ + "▁Mobility", + -12.77451992034912 + ], + [ + "▁280", + -12.77452278137207 + ], + [ + "Stock", + -12.77453327178955 + ], + [ + "▁Hawk", + -12.77453899383545 + ], + [ + "▁Baltic", + -12.774694442749023 + ], + [ + "▁Spy", + -12.774761199951172 + ], + [ + "▁(+", + -12.774826049804688 + ], + [ + "▁photographing", + -12.774857521057127 + ], + [ + "▁draining", + -12.77502155303955 + ], + [ + "▁Dom", + -12.77507209777832 + ], + [ + "songwriter", + -12.775432586669922 + ], + [ + "▁dusty", + -12.775442123413086 + ], + [ + "▁plum", + -12.775497436523438 + ], + [ + "▁declaring", + -12.775517463684082 + ], + [ + "▁pillar", + -12.775532722473145 + ], + [ + "▁imperfections", + -12.775700569152832 + ], + [ + "▁eyebrows", + -12.775731086730955 + ], + [ + "cr", + -12.775741577148438 + ], + [ + "▁HK", + -12.775887489318848 + ], + [ + "▁TT", + -12.775906562805176 + ], + [ + "▁Cher", + -12.776058197021484 + ], + [ + "▁treadmill", + -12.776067733764648 + ], + [ + "▁GW", + -12.776084899902344 + ], + [ + "▁unusually", + -12.776122093200684 + ], + [ + "▁64-", + -12.776260375976562 + ], + [ + "ree", + -12.77628231048584 + ], + [ + "▁Decision", + -12.77636432647705 + ], + [ + "Users", + -12.776412963867188 + ], + [ + "▁GitHub", + -12.776558876037598 + ], + [ + "eria", + -12.776616096496582 + ], + [ + "▁detecting", + -12.776665687561035 + ], + [ + "▁nostalgia", + -12.776728630065918 + ], + [ + "▁persuasive", + -12.776728630065918 + ], + [ + "▁REALLY", + -12.77674961090088 + ], + [ + "▁vicious", + -12.776755332946776 + ], + [ + "dining", + -12.77676010131836 + ], + [ + "▁bursting", + -12.776873588562012 + ], + [ + "enko", + -12.776917457580566 + ], + [ + "IST", + -12.776937484741213 + ], + [ + "ense", + -12.777045249938965 + ], + [ + "▁Maj", + -12.77713394165039 + ], + [ + "ander", + -12.777148246765137 + ], + [ + "▁ducks", + -12.777186393737791 + ], + [ + "▁quilting", + -12.77724266052246 + ], + [ + "▁Comforter", + -12.77724838256836 + ], + [ + "▁2-4", + -12.777270317077637 + ], + [ + "▁empowers", + -12.77728271484375 + ], + [ + "SH", + -12.777318000793455 + ], + [ + "▁internships", + -12.777334213256836 + ], + [ + "kat", + -12.777347564697266 + ], + [ + "▁Improve", + -12.777420997619627 + ], + [ + "lead", + -12.777541160583496 + ], + [ + "->", + -12.77757740020752 + ], + [ + "ré", + -12.777685165405272 + ], + [ + "▁benefiting", + -12.777706146240234 + ], + [ + "▁Presbyterian", + -12.777739524841309 + ], + [ + "▁OSHA", + -12.777748107910156 + ], + [ + "need", + -12.77781867980957 + ], + [ + "▁EPS", + -12.777872085571287 + ], + [ + "▁cursor", + -12.777924537658691 + ], + [ + "▁Freeman", + -12.778002738952637 + ], + [ + "Coming", + -12.77805519104004 + ], + [ + "▁marina", + -12.77819538116455 + ], + [ + "cas", + -12.778244018554688 + ], + [ + "▁frog", + -12.778291702270508 + ], + [ + "-09", + -12.778346061706545 + ], + [ + "▁(25", + -12.778359413146973 + ], + [ + "▁Vegan", + -12.778399467468262 + ], + [ + "▁Scientists", + -12.77841091156006 + ], + [ + "▁patterned", + -12.778474807739258 + ], + [ + "▁4\"", + -12.778491020202637 + ], + [ + "▁commissioning", + -12.77860164642334 + ], + [ + "▁tester", + -12.778618812561035 + ], + [ + "mol", + -12.77878189086914 + ], + [ + "border", + -12.779007911682127 + ], + [ + "▁ser", + -12.77903175354004 + ], + [ + "▁drastic", + -12.779129028320312 + ], + [ + "▁narrator", + -12.77915382385254 + ], + [ + "furniture", + -12.779173851013184 + ], + [ + "dad", + -12.779178619384766 + ], + [ + "▁Claus", + -12.779397010803224 + ], + [ + "Sch", + -12.779434204101562 + ], + [ + "▁1-1", + -12.779437065124512 + ], + [ + "▁Condo", + -12.779577255249023 + ], + [ + "▁pollutants", + -12.779595375061035 + ], + [ + "▁motions", + -12.779879570007324 + ], + [ + "▁shaving", + -12.779900550842283 + ], + [ + "▁rationale", + -12.779948234558104 + ], + [ + "better", + -12.780072212219238 + ], + [ + "▁Tales", + -12.780110359191896 + ], + [ + "▁Whitney", + -12.780112266540527 + ], + [ + "▁Increase", + -12.780223846435549 + ], + [ + "▁cocaine", + -12.780373573303224 + ], + [ + "Ultimately", + -12.780470848083496 + ], + [ + "9,", + -12.780482292175291 + ], + [ + "▁deserving", + -12.78049373626709 + ], + [ + "▁shopper", + -12.780658721923828 + ], + [ + "▁Magento", + -12.780743598937988 + ], + [ + "▁complemented", + -12.780801773071287 + ], + [ + "▁0%", + -12.780806541442873 + ], + [ + "NL", + -12.780844688415527 + ], + [ + "dies", + -12.780845642089844 + ], + [ + "▁override", + -12.780902862548828 + ], + [ + "▁bundled", + -12.781012535095217 + ], + [ + "▁scripture", + -12.78102970123291 + ], + [ + "▁mil", + -12.781132698059082 + ], + [ + "loc", + -12.781269073486328 + ], + [ + "gue", + -12.781421661376951 + ], + [ + "▁ping", + -12.781468391418455 + ], + [ + "▁PER", + -12.781484603881836 + ], + [ + "▁$17", + -12.781917572021484 + ], + [ + "▁($1", + -12.781981468200684 + ], + [ + "▁Packaging", + -12.782061576843262 + ], + [ + "▁granddaughter", + -12.7821044921875 + ], + [ + "▁facade", + -12.782209396362305 + ], + [ + "▁electron", + -12.782301902770996 + ], + [ + "▁Prop", + -12.782444953918455 + ], + [ + "▁surname", + -12.782474517822266 + ], + [ + "▁richest", + -12.782488822937012 + ], + [ + "▁Lyn", + -12.78251838684082 + ], + [ + "▁Valid", + -12.782678604125977 + ], + [ + "▁peculiar", + -12.782792091369627 + ], + [ + "▁Might", + -12.78280258178711 + ], + [ + "▁Marketplace", + -12.782901763916016 + ], + [ + "▁catered", + -12.782931327819824 + ], + [ + "▁stationery", + -12.78297233581543 + ], + [ + "NN", + -12.783041954040527 + ], + [ + "▁Alloy", + -12.783195495605469 + ], + [ + "Section", + -12.78325080871582 + ], + [ + "▁Mitch", + -12.78328800201416 + ], + [ + "▁3.3", + -12.78329372406006 + ], + [ + "▁motives", + -12.783294677734377 + ], + [ + "CAN", + -12.783379554748535 + ], + [ + "lc", + -12.783401489257812 + ], + [ + "▁Liam", + -12.783479690551758 + ], + [ + "otic", + -12.783506393432615 + ], + [ + "▁1965", + -12.783699989318848 + ], + [ + "▁Mens", + -12.78381633758545 + ], + [ + "▁Shelter", + -12.783857345581056 + ], + [ + "agi", + -12.784011840820312 + ], + [ + "▁enticing", + -12.784024238586426 + ], + [ + "▁archival", + -12.784027099609377 + ], + [ + "▁Doesn", + -12.78406047821045 + ], + [ + "Board", + -12.784208297729492 + ], + [ + "Night", + -12.784377098083496 + ], + [ + "▁Interestingly", + -12.784520149230955 + ], + [ + "▁boosts", + -12.784603118896484 + ], + [ + "▁solicitor", + -12.784960746765137 + ], + [ + "▁dolphins", + -12.785009384155272 + ], + [ + "▁mutations", + -12.785110473632812 + ], + [ + "▁Halifax", + -12.78524398803711 + ], + [ + "▁influenza", + -12.78524398803711 + ], + [ + "▁transformative", + -12.785306930541992 + ], + [ + "▁8%", + -12.78551197052002 + ], + [ + "▁teammate", + -12.785516738891602 + ], + [ + "▁Maha", + -12.785521507263184 + ], + [ + "▁heartbeat", + -12.78554344177246 + ], + [ + "▁Taxi", + -12.785680770874023 + ], + [ + "IG", + -12.78574562072754 + ], + [ + "▁economist", + -12.785911560058594 + ], + [ + "▁Trader", + -12.785972595214844 + ], + [ + "▁0.8", + -12.78600025177002 + ], + [ + "▁Minute", + -12.786093711853027 + ], + [ + "▁Hang", + -12.786276817321776 + ], + [ + "▁Lives", + -12.786365509033203 + ], + [ + "▁bounty", + -12.786466598510742 + ], + [ + "▁experiential", + -12.786466598510742 + ], + [ + "▁bytes", + -12.78651237487793 + ], + [ + "shift", + -12.786520957946776 + ], + [ + "▁Hartford", + -12.78652572631836 + ], + [ + "▁necklaces", + -12.786590576171877 + ], + [ + "▁hinges", + -12.786606788635254 + ], + [ + "UV", + -12.78662109375 + ], + [ + "▁seas", + -12.786650657653809 + ], + [ + "▁expresses", + -12.786669731140137 + ], + [ + "British", + -12.78667449951172 + ], + [ + "▁engagements", + -12.786736488342283 + ], + [ + "▁wineries", + -12.78676414489746 + ], + [ + "eli", + -12.78677463531494 + ], + [ + "▁Dairy", + -12.786914825439451 + ], + [ + "▁bestselling", + -12.787156105041504 + ], + [ + "▁Habitat", + -12.787209510803224 + ], + [ + "▁Assisted", + -12.78724765777588 + ], + [ + "▁Seasons", + -12.787251472473145 + ], + [ + "▁Secondly", + -12.787270545959473 + ], + [ + "bill", + -12.787328720092772 + ], + [ + "Mid", + -12.787420272827148 + ], + [ + "▁forwarded", + -12.787449836730955 + ], + [ + "▁Dancing", + -12.787458419799805 + ], + [ + "▁tapped", + -12.787490844726562 + ], + [ + "▁Acrylic", + -12.787506103515623 + ], + [ + "▁116", + -12.787628173828123 + ], + [ + "▁mal", + -12.787686347961426 + ], + [ + "▁translucent", + -12.787692070007324 + ], + [ + "▁cleanliness", + -12.787762641906738 + ], + [ + "▁viability", + -12.787771224975586 + ], + [ + "▁documentaries", + -12.787779808044434 + ], + [ + "▁detectors", + -12.7879056930542 + ], + [ + "▁Mala", + -12.788290023803713 + ], + [ + "▁congratulate", + -12.788314819335938 + ], + [ + "borough", + -12.788331985473633 + ], + [ + "▁intrinsic", + -12.78859519958496 + ], + [ + "▁85%", + -12.788599014282228 + ], + [ + "cloud", + -12.788796424865724 + ], + [ + "▁dissolve", + -12.78902530670166 + ], + [ + "▁LT", + -12.789050102233888 + ], + [ + "▁ceramics", + -12.789057731628418 + ], + [ + "▁iteration", + -12.7891206741333 + ], + [ + "category", + -12.789140701293944 + ], + [ + "▁restrictive", + -12.789162635803224 + ], + [ + "▁fuzzy", + -12.789287567138672 + ], + [ + "▁yrs", + -12.789321899414062 + ], + [ + "100%", + -12.789673805236816 + ], + [ + "▁florist", + -12.78968906402588 + ], + [ + "▁Organ", + -12.789780616760254 + ], + [ + "once", + -12.789856910705566 + ], + [ + "▁Dorothy", + -12.789886474609377 + ], + [ + "▁Chin", + -12.790064811706545 + ], + [ + "▁tended", + -12.790091514587402 + ], + [ + "▁holdings", + -12.790103912353516 + ], + [ + "▁engraving", + -12.790143013000488 + ], + [ + "mun", + -12.79017734527588 + ], + [ + "▁sibling", + -12.790205955505373 + ], + [ + "▁machining", + -12.79022979736328 + ], + [ + "▁sustaining", + -12.790239334106444 + ], + [ + "▁modeled", + -12.790270805358888 + ], + [ + "▁yang", + -12.790360450744627 + ], + [ + "Say", + -12.790382385253906 + ], + [ + "painted", + -12.790452003479004 + ], + [ + "▁pods", + -12.790641784667969 + ], + [ + "cream", + -12.790715217590332 + ], + [ + "▁Arrow", + -12.790746688842772 + ], + [ + "▁Specialty", + -12.790756225585938 + ], + [ + "▁Nina", + -12.790767669677734 + ], + [ + "▁Asus", + -12.79082202911377 + ], + [ + "▁(@", + -12.790913581848145 + ], + [ + "▁impactful", + -12.79095458984375 + ], + [ + "▁NGO", + -12.79112148284912 + ], + [ + "Server", + -12.791215896606444 + ], + [ + "▁Tunnel", + -12.791391372680664 + ], + [ + "▁Trent", + -12.791489601135254 + ], + [ + "published", + -12.791631698608398 + ], + [ + "▁placebo", + -12.791641235351562 + ], + [ + "▁disturbed", + -12.7916841506958 + ], + [ + "▁bureau", + -12.79168701171875 + ], + [ + "▁Abbott", + -12.791844367980955 + ], + [ + "▁Kiss", + -12.791885375976562 + ], + [ + "▁Stones", + -12.791889190673828 + ], + [ + "▁wa", + -12.791977882385254 + ], + [ + "▁demon", + -12.79206085205078 + ], + [ + "▁Epson", + -12.79209327697754 + ], + [ + "stroke", + -12.792096138000488 + ], + [ + "▁allied", + -12.7923583984375 + ], + [ + "Roll", + -12.79240894317627 + ], + [ + "999", + -12.79244327545166 + ], + [ + "▁ja", + -12.792466163635254 + ], + [ + "▁slack", + -12.792552947998049 + ], + [ + "▁persistence", + -12.792610168457031 + ], + [ + "▁Substitution", + -12.792691230773926 + ], + [ + "▁genetically", + -12.792723655700684 + ], + [ + "▁Fry", + -12.792800903320312 + ], + [ + "asa", + -12.79286766052246 + ], + [ + "▁guru", + -12.792899131774902 + ], + [ + "▁Vent", + -12.79294204711914 + ], + [ + "ancy", + -12.792963027954102 + ], + [ + "Rather", + -12.79297161102295 + ], + [ + "▁Worship", + -12.792975425720217 + ], + [ + "lio", + -12.793015480041504 + ], + [ + "sman", + -12.79334831237793 + ], + [ + "▁Refrigerator", + -12.79335594177246 + ], + [ + "DVD", + -12.793371200561523 + ], + [ + "-07", + -12.79340648651123 + ], + [ + "▁Newark", + -12.793461799621582 + ], + [ + "▁loosen", + -12.793474197387695 + ], + [ + "▁puree", + -12.793498039245604 + ], + [ + "▁moderately", + -12.793752670288086 + ], + [ + "▁Cookie", + -12.7938232421875 + ], + [ + "▁doubling", + -12.79383373260498 + ], + [ + "▁Qui", + -12.793973922729492 + ], + [ + "▁Banana", + -12.794031143188477 + ], + [ + "▁sch", + -12.79409122467041 + ], + [ + "William", + -12.794150352478027 + ], + [ + "▁Buzz", + -12.794230461120604 + ], + [ + "Published", + -12.79429817199707 + ], + [ + "teen", + -12.794407844543455 + ], + [ + "▁Generic", + -12.79442310333252 + ], + [ + "▁1\"", + -12.794437408447266 + ], + [ + "▁Conversion", + -12.794499397277832 + ], + [ + "▁curator", + -12.794512748718262 + ], + [ + "stay", + -12.79473114013672 + ], + [ + "▁molds", + -12.794891357421877 + ], + [ + "▁Violence", + -12.794917106628418 + ], + [ + "▁accusations", + -12.79495906829834 + ], + [ + "tions", + -12.794960975646973 + ], + [ + "▁authoritative", + -12.795084953308104 + ], + [ + "▁tummy", + -12.795101165771484 + ], + [ + "▁Cadillac", + -12.795138359069824 + ], + [ + "▁Sunrise", + -12.795207023620604 + ], + [ + "▁seizures", + -12.795217514038086 + ], + [ + "▁Trevor", + -12.795268058776855 + ], + [ + "phi", + -12.795512199401855 + ], + [ + "▁caption", + -12.79556941986084 + ], + [ + "▁Deco", + -12.795698165893556 + ], + [ + "▁bud", + -12.79570198059082 + ], + [ + "▁Horror", + -12.795751571655272 + ], + [ + "▁flaw", + -12.795876502990724 + ], + [ + "▁brilliantly", + -12.7959623336792 + ], + [ + "▁dean", + -12.796001434326172 + ], + [ + "▁prescriptions", + -12.79615879058838 + ], + [ + "▁conflicting", + -12.796212196350098 + ], + [ + "▁Lyon", + -12.796304702758787 + ], + [ + "▁crises", + -12.796305656433104 + ], + [ + "▁violating", + -12.79631805419922 + ], + [ + "▁steamed", + -12.796363830566406 + ], + [ + "▁warehouses", + -12.796538352966309 + ], + [ + "/20", + -12.796563148498535 + ], + [ + "▁palms", + -12.79690647125244 + ], + [ + "France", + -12.796984672546388 + ], + [ + "▁sober", + -12.796993255615234 + ], + [ + "▁nightly", + -12.797004699707031 + ], + [ + "▁Sleeve", + -12.79704475402832 + ], + [ + "▁hex", + -12.797063827514648 + ], + [ + "▁Promise", + -12.797249794006348 + ], + [ + "▁3:00", + -12.797271728515623 + ], + [ + "▁stew", + -12.797334671020508 + ], + [ + "▁Chambers", + -12.797454833984377 + ], + [ + "▁Easily", + -12.79746437072754 + ], + [ + "▁contour", + -12.797472953796388 + ], + [ + "▁Valve", + -12.797503471374512 + ], + [ + "▁pint", + -12.797837257385254 + ], + [ + "▁Bags", + -12.797887802124023 + ], + [ + "Walk", + -12.797934532165527 + ], + [ + "▁newcomers", + -12.79806423187256 + ], + [ + "▁supper", + -12.798065185546877 + ], + [ + "▁Mohammed", + -12.798069953918455 + ], + [ + "dark", + -12.798084259033203 + ], + [ + "▁2,500", + -12.798093795776367 + ], + [ + "Har", + -12.798118591308594 + ], + [ + "Craft", + -12.798126220703123 + ], + [ + "▁canyon", + -12.79813003540039 + ], + [ + "▁Insulation", + -12.79820442199707 + ], + [ + "▁Fern", + -12.7982177734375 + ], + [ + "dam", + -12.79825210571289 + ], + [ + "▁radically", + -12.798343658447266 + ], + [ + "▁Punch", + -12.798377990722656 + ], + [ + "▁shouting", + -12.79839038848877 + ], + [ + "almost", + -12.798443794250488 + ], + [ + "▁0.9", + -12.798518180847168 + ], + [ + "▁Innovative", + -12.798669815063477 + ], + [ + "▁diagnostics", + -12.798694610595703 + ], + [ + "▁Developers", + -12.79872703552246 + ], + [ + "▁vape", + -12.79872703552246 + ], + [ + "▁disastrous", + -12.79877471923828 + ], + [ + "▁impending", + -12.798786163330078 + ], + [ + "▁Siri", + -12.798799514770508 + ], + [ + "▁refill", + -12.798828125 + ], + [ + "▁Rocks", + -12.798898696899414 + ], + [ + "plane", + -12.798994064331056 + ], + [ + "▁programmable", + -12.799036026000977 + ], + [ + "▁Peer", + -12.799102783203123 + ], + [ + "Apr", + -12.799137115478516 + ], + [ + "Better", + -12.799398422241213 + ], + [ + "▁Magnetic", + -12.799445152282717 + ], + [ + "ifies", + -12.799479484558104 + ], + [ + "▁Colonel", + -12.799527168273926 + ], + [ + "▁Mich", + -12.799643516540527 + ], + [ + "▁Bride", + -12.799692153930664 + ], + [ + "▁forge", + -12.799903869628906 + ], + [ + "▁Beau", + -12.799907684326172 + ], + [ + "▁Intelligent", + -12.799945831298828 + ], + [ + "▁asparagus", + -12.800013542175291 + ], + [ + "▁stewardship", + -12.800021171569824 + ], + [ + "▁QB", + -12.80002784729004 + ], + [ + "▁expectancy", + -12.800097465515137 + ], + [ + "▁Rama", + -12.800298690795898 + ], + [ + "▁ensuite", + -12.800302505493164 + ], + [ + "▁Beds", + -12.800374031066896 + ], + [ + "▁separates", + -12.80038356781006 + ], + [ + "▁GF", + -12.800447463989258 + ], + [ + "figure", + -12.800466537475586 + ], + [ + "▁backend", + -12.800623893737791 + ], + [ + "▁Northwestern", + -12.800721168518066 + ], + [ + "▁Simmons", + -12.800795555114746 + ], + [ + "▁stencil", + -12.800901412963867 + ], + [ + "▁Newman", + -12.800958633422852 + ], + [ + "▁Turtle", + -12.80100440979004 + ], + [ + "▁Cindy", + -12.801034927368164 + ], + [ + "▁sympathetic", + -12.801254272460938 + ], + [ + "▁finalist", + -12.801384925842283 + ], + [ + "▁Slovakia", + -12.801572799682615 + ], + [ + "▁layering", + -12.801678657531738 + ], + [ + "Transfer", + -12.8016996383667 + ], + [ + "mc", + -12.801733016967772 + ], + [ + "rol", + -12.80174732208252 + ], + [ + "▁goalkeeper", + -12.801756858825684 + ], + [ + "▁Mul", + -12.80176830291748 + ], + [ + "Preheat", + -12.801827430725098 + ], + [ + "▁Acts", + -12.801895141601562 + ], + [ + "Final", + -12.801918029785156 + ], + [ + "▁Kir", + -12.801924705505373 + ], + [ + "▁observer", + -12.801931381225586 + ], + [ + "▁Ep", + -12.802050590515137 + ], + [ + "impact", + -12.802088737487791 + ], + [ + "▁Carmel", + -12.802122116088867 + ], + [ + "itt", + -12.802361488342283 + ], + [ + "▁demos", + -12.802380561828612 + ], + [ + "▁1:00", + -12.802385330200195 + ], + [ + "dc", + -12.802414894104004 + ], + [ + "vers", + -12.802492141723633 + ], + [ + "▁DJs", + -12.802549362182615 + ], + [ + "RN", + -12.802660942077637 + ], + [ + "▁Broncos", + -12.802694320678713 + ], + [ + "Hub", + -12.802720069885254 + ], + [ + "▁Outlet", + -12.80286693572998 + ], + [ + "▁instinct", + -12.802933692932127 + ], + [ + "▁responders", + -12.80296516418457 + ], + [ + "▁envelopes", + -12.802989959716797 + ], + [ + "ete", + -12.803014755249023 + ], + [ + "▁freshwater", + -12.803041458129885 + ], + [ + "▁warmed", + -12.803128242492676 + ], + [ + "Neill", + -12.803324699401855 + ], + [ + "Pan", + -12.803345680236816 + ], + [ + "▁clinically", + -12.80335807800293 + ], + [ + "▁paycheck", + -12.803370475769045 + ], + [ + "▁accelerating", + -12.80341339111328 + ], + [ + "FREE", + -12.803424835205078 + ], + [ + "▁Pic", + -12.80344295501709 + ], + [ + "▁Snake", + -12.803478240966797 + ], + [ + "▁Coat", + -12.803511619567873 + ], + [ + "▁Fried", + -12.803518295288086 + ], + [ + "▁Stella", + -12.803547859191896 + ], + [ + "▁nicotine", + -12.803740501403809 + ], + [ + "famous", + -12.803751945495604 + ], + [ + "▁interstate", + -12.80386734008789 + ], + [ + "▁tearing", + -12.803911209106444 + ], + [ + "grain", + -12.804037094116213 + ], + [ + "▁KM", + -12.804052352905272 + ], + [ + "▁utensils", + -12.804068565368652 + ], + [ + "▁Oslo", + -12.804226875305176 + ], + [ + "▁folds", + -12.804268836975098 + ], + [ + "▁Torah", + -12.80429458618164 + ], + [ + "ear", + -12.804312705993652 + ], + [ + "available", + -12.804344177246094 + ], + [ + "▁Seminar", + -12.804367065429688 + ], + [ + "thy", + -12.804404258728027 + ], + [ + "▁raining", + -12.804434776306152 + ], + [ + "▁powdered", + -12.804466247558594 + ], + [ + "gra", + -12.804475784301758 + ], + [ + "▁$45", + -12.804498672485352 + ], + [ + "▁isolate", + -12.804569244384766 + ], + [ + "▁260", + -12.804611206054688 + ], + [ + "erson", + -12.804654121398926 + ], + [ + "ess", + -12.804658889770508 + ], + [ + "▁inks", + -12.804659843444824 + ], + [ + "Used", + -12.804693222045898 + ], + [ + "sti", + -12.804807662963867 + ], + [ + "cutting", + -12.804828643798828 + ], + [ + "▁Startup", + -12.80486297607422 + ], + [ + "▁Necklace", + -12.804987907409668 + ], + [ + "▁clasp", + -12.804987907409668 + ], + [ + "▁naval", + -12.805063247680664 + ], + [ + "added", + -12.80517292022705 + ], + [ + "▁Ecuador", + -12.805188179016112 + ], + [ + "▁cling", + -12.805293083190918 + ], + [ + "▁pear", + -12.805313110351562 + ], + [ + "▁Victory", + -12.805418968200684 + ], + [ + "▁Vas", + -12.805524826049805 + ], + [ + "IB", + -12.805584907531738 + ], + [ + "▁Bergen", + -12.805662155151367 + ], + [ + "▁1880", + -12.80573844909668 + ], + [ + "▁cue", + -12.805770874023438 + ], + [ + "5-", + -12.805872917175291 + ], + [ + "[/", + -12.805932998657228 + ], + [ + "▁Mitsubishi", + -12.805956840515137 + ], + [ + "▁grad", + -12.80599308013916 + ], + [ + "▁2.7", + -12.80605125427246 + ], + [ + "Kim", + -12.806110382080078 + ], + [ + "▁chi", + -12.80613899230957 + ], + [ + "▁cheering", + -12.80616569519043 + ], + [ + "▁incremental", + -12.806222915649414 + ], + [ + "▁graffiti", + -12.806233406066896 + ], + [ + "▁NASCAR", + -12.806235313415527 + ], + [ + "▁Gujarat", + -12.806471824645996 + ], + [ + "▁organisational", + -12.806478500366213 + ], + [ + "▁consciously", + -12.80673122406006 + ], + [ + "ilo", + -12.806800842285156 + ], + [ + "▁1/4\"", + -12.8069486618042 + ], + [ + "▁mugs", + -12.807196617126465 + ], + [ + "▁aqua", + -12.80731201171875 + ], + [ + "gui", + -12.80741024017334 + ], + [ + "280", + -12.807466506958008 + ], + [ + "▁turmoil", + -12.807480812072754 + ], + [ + "▁activism", + -12.807503700256348 + ], + [ + "▁2.8", + -12.80751895904541 + ], + [ + "▁Kol", + -12.807538032531738 + ], + [ + "▁annoyed", + -12.807644844055176 + ], + [ + "▁MAN", + -12.80768585205078 + ], + [ + "▁shook", + -12.807751655578612 + ], + [ + "/2017", + -12.807777404785156 + ], + [ + "▁Noir", + -12.807859420776367 + ], + [ + "Pen", + -12.807927131652832 + ], + [ + "▁11:30", + -12.807933807373049 + ], + [ + "▁Wheat", + -12.80807876586914 + ], + [ + "aan", + -12.808090209960938 + ], + [ + "▁hover", + -12.808122634887695 + ], + [ + "▁yr", + -12.80823802947998 + ], + [ + "▁Cure", + -12.808518409729004 + ], + [ + "▁Jaipur", + -12.808752059936523 + ], + [ + "▁thigh", + -12.80878734588623 + ], + [ + "▁recruiters", + -12.80880355834961 + ], + [ + "▁settlers", + -12.808813095092772 + ], + [ + "▁neurons", + -12.80890941619873 + ], + [ + "DOT", + -12.809089660644531 + ], + [ + "Register", + -12.809096336364746 + ], + [ + "zar", + -12.80911350250244 + ], + [ + "lk", + -12.809160232543944 + ], + [ + "bike", + -12.809226036071776 + ], + [ + "▁disks", + -12.80947971343994 + ], + [ + "phy", + -12.809885025024414 + ], + [ + "▁Bundle", + -12.80989933013916 + ], + [ + "▁readable", + -12.810012817382812 + ], + [ + "▁sam", + -12.81003189086914 + ], + [ + "▁Singles", + -12.810091972351074 + ], + [ + "KO", + -12.810104370117188 + ], + [ + "▁Av", + -12.810166358947754 + ], + [ + "▁diligently", + -12.810199737548828 + ], + [ + "mor", + -12.810221672058104 + ], + [ + "▁Wat", + -12.810260772705078 + ], + [ + "lem", + -12.810335159301758 + ], + [ + "▁skins", + -12.810359001159668 + ], + [ + "▁Intermediate", + -12.810419082641602 + ], + [ + "▁plumbers", + -12.8104887008667 + ], + [ + "▁Nate", + -12.8107271194458 + ], + [ + "Title", + -12.810894966125488 + ], + [ + "▁webinars", + -12.810918807983398 + ], + [ + "▁offenders", + -12.811052322387695 + ], + [ + "▁SMEs", + -12.811095237731934 + ], + [ + "ines", + -12.811254501342772 + ], + [ + "▁MasterCard", + -12.811287879943848 + ], + [ + "▁Visitor", + -12.811321258544922 + ], + [ + "Rev", + -12.811365127563477 + ], + [ + "▁redevelopment", + -12.81136703491211 + ], + [ + "▁surreal", + -12.811397552490234 + ], + [ + "▁welded", + -12.811495780944824 + ], + [ + "▁bleach", + -12.811503410339355 + ], + [ + "▁var", + -12.811588287353516 + ], + [ + "URL", + -12.811625480651855 + ], + [ + "▁complements", + -12.8118314743042 + ], + [ + "▁attaching", + -12.811972618103027 + ], + [ + "Joe", + -12.812005996704102 + ], + [ + "▁PNG", + -12.812009811401367 + ], + [ + "Strong", + -12.812030792236328 + ], + [ + "▁eighteen", + -12.812142372131348 + ], + [ + "song", + -12.812192916870115 + ], + [ + "▁pavilion", + -12.812226295471191 + ], + [ + "▁Transformation", + -12.812426567077637 + ], + [ + "▁Claims", + -12.812427520751951 + ], + [ + "▁Ministries", + -12.812490463256836 + ], + [ + "▁thieves", + -12.812490463256836 + ], + [ + "▁prudent", + -12.812503814697266 + ], + [ + "▁labelled", + -12.812813758850098 + ], + [ + "Pet", + -12.81302261352539 + ], + [ + "bow", + -12.813057899475098 + ], + [ + "Install", + -12.813066482543944 + ], + [ + "ook", + -12.813076972961426 + ], + [ + "▁Grandma", + -12.81312084197998 + ], + [ + "▁ashes", + -12.813199996948242 + ], + [ + "cash", + -12.813237190246582 + ], + [ + "▁aftermarket", + -12.813265800476074 + ], + [ + "▁sorrow", + -12.813307762145996 + ], + [ + "▁ascertain", + -12.813345909118652 + ], + [ + "▁recorder", + -12.813387870788574 + ], + [ + "▁0.4", + -12.813408851623535 + ], + [ + "▁hitch", + -12.813522338867188 + ], + [ + "▁JS", + -12.813558578491213 + ], + [ + "▁courageous", + -12.813558578491213 + ], + [ + "▁outlining", + -12.813634872436523 + ], + [ + "▁placements", + -12.8136625289917 + ], + [ + "Parents", + -12.81370449066162 + ], + [ + "▁peacefully", + -12.813814163208008 + ], + [ + "▁RD", + -12.813905715942385 + ], + [ + "▁Jared", + -12.813992500305176 + ], + [ + "▁importing", + -12.814087867736816 + ], + [ + "▁confrontation", + -12.814132690429688 + ], + [ + "▁2020,", + -12.814133644104004 + ], + [ + "AW", + -12.814197540283203 + ], + [ + "▁Mutual", + -12.814268112182615 + ], + [ + "▁handler", + -12.814412117004396 + ], + [ + "▁occupants", + -12.81441593170166 + ], + [ + "fc", + -12.814424514770508 + ], + [ + "▁formations", + -12.814499855041504 + ], + [ + "▁spills", + -12.814533233642578 + ], + [ + "▁litre", + -12.81471347808838 + ], + [ + "Pad", + -12.814715385437012 + ], + [ + "▁threatens", + -12.814723014831545 + ], + [ + "▁Hydraulic", + -12.814884185791016 + ], + [ + "Camp", + -12.814970970153809 + ], + [ + "▁tram", + -12.81500244140625 + ], + [ + "processing", + -12.81508731842041 + ], + [ + "▁pony", + -12.815089225769045 + ], + [ + "▁whimsical", + -12.815089225769045 + ], + [ + "▁Humanities", + -12.815117835998535 + ], + [ + "▁respectable", + -12.815174102783203 + ], + [ + "▁physiological", + -12.815192222595217 + ], + [ + "▁FAA", + -12.815220832824709 + ], + [ + "▁jo", + -12.815361976623535 + ], + [ + "▁nourishing", + -12.815481185913086 + ], + [ + "nier", + -12.815587043762209 + ], + [ + "▁Combat", + -12.815630912780762 + ], + [ + "Dream", + -12.815807342529297 + ], + [ + "Sweet", + -12.815921783447266 + ], + [ + "▁Tibet", + -12.816164016723633 + ], + [ + "vir", + -12.816184997558594 + ], + [ + "▁Worcester", + -12.81626319885254 + ], + [ + "▁ivory", + -12.816267013549805 + ], + [ + "▁Pentagon", + -12.81627082824707 + ], + [ + "▁Zurich", + -12.81628131866455 + ], + [ + "playing", + -12.81630516052246 + ], + [ + "▁Boards", + -12.816398620605469 + ], + [ + "rez", + -12.81642246246338 + ], + [ + "nig", + -12.816434860229492 + ], + [ + "lessness", + -12.81644344329834 + ], + [ + "HI", + -12.81650733947754 + ], + [ + "▁Rap", + -12.81651210784912 + ], + [ + "▁spun", + -12.816548347473145 + ], + [ + "▁Abuse", + -12.81669807434082 + ], + [ + "▁Vince", + -12.8167724609375 + ], + [ + "▁intervene", + -12.816936492919922 + ], + [ + "▁scatter", + -12.81706714630127 + ], + [ + "▁descendants", + -12.81711769104004 + ], + [ + "▁sidewalks", + -12.817384719848633 + ], + [ + "▁Temp", + -12.817404747009276 + ], + [ + "▁Welfare", + -12.81740951538086 + ], + [ + "above", + -12.817421913146973 + ], + [ + "▁Cod", + -12.817514419555664 + ], + [ + "▁acknowledging", + -12.817530632019045 + ], + [ + "▁Served", + -12.81755256652832 + ], + [ + "▁patiently", + -12.817585945129396 + ], + [ + "▁Chic", + -12.81762409210205 + ], + [ + "▁stiffness", + -12.817667007446287 + ], + [ + "▁summers", + -12.817684173583984 + ], + [ + "▁sequencing", + -12.817723274230955 + ], + [ + "lah", + -12.817774772644045 + ], + [ + "farm", + -12.817805290222168 + ], + [ + "▁bullets", + -12.81790828704834 + ], + [ + "▁Shelf", + -12.81792449951172 + ], + [ + "▁remix", + -12.817930221557615 + ], + [ + "ike", + -12.81808853149414 + ], + [ + "▁wrought", + -12.818158149719238 + ], + [ + "▁Consumers", + -12.818161964416504 + ], + [ + "Technology", + -12.81851863861084 + ], + [ + "Chinese", + -12.818520545959473 + ], + [ + "▁Rank", + -12.81859016418457 + ], + [ + "▁incumbent", + -12.818594932556152 + ], + [ + "▁didnt", + -12.818693161010742 + ], + [ + "▁ur", + -12.818713188171388 + ], + [ + "▁inject", + -12.818735122680664 + ], + [ + "▁humility", + -12.8187894821167 + ], + [ + "▁teak", + -12.818793296813965 + ], + [ + "▁levy", + -12.818818092346191 + ], + [ + "▁stare", + -12.818939208984377 + ], + [ + "WR", + -12.819071769714355 + ], + [ + "▁Kali", + -12.819072723388672 + ], + [ + "▁Recipes", + -12.819398880004885 + ], + [ + "▁sprouts", + -12.81949520111084 + ], + [ + "▁Tr", + -12.819561004638672 + ], + [ + "Daniel", + -12.819748878479004 + ], + [ + "▁BD", + -12.819806098937988 + ], + [ + "abi", + -12.819843292236328 + ], + [ + "▁trench", + -12.819989204406738 + ], + [ + "▁tho", + -12.820024490356444 + ], + [ + "▁£6", + -12.820093154907228 + ], + [ + "Original", + -12.82010269165039 + ], + [ + "▁UL", + -12.82021427154541 + ], + [ + "▁Woo", + -12.82027816772461 + ], + [ + "▁plateau", + -12.820307731628418 + ], + [ + "▁witnessing", + -12.820310592651367 + ], + [ + "▁Yourself", + -12.82044792175293 + ], + [ + "▁disciplined", + -12.820631980895996 + ], + [ + "▁rebate", + -12.820649147033691 + ], + [ + "▁Oval", + -12.820743560791016 + ], + [ + "▁Bermuda", + -12.82091236114502 + ], + [ + "▁Thousands", + -12.820990562438965 + ], + [ + "▁Visiting", + -12.821036338806152 + ], + [ + "▁pharmacies", + -12.821318626403809 + ], + [ + "▁veil", + -12.82137393951416 + ], + [ + "uga", + -12.821438789367676 + ], + [ + "▁soothe", + -12.821468353271484 + ], + [ + "▁Alps", + -12.821477890014648 + ], + [ + "▁propel", + -12.821524620056152 + ], + [ + "follow", + -12.821603775024414 + ], + [ + "▁lobbying", + -12.821697235107422 + ], + [ + "▁envisioned", + -12.821746826171877 + ], + [ + "UR", + -12.821828842163086 + ], + [ + "▁NB", + -12.821942329406738 + ], + [ + "Vision", + -12.821945190429688 + ], + [ + "▁Alien", + -12.821969985961914 + ], + [ + "▁Dryer", + -12.821975708007812 + ], + [ + "▁washable", + -12.822016716003418 + ], + [ + "trans", + -12.822111129760742 + ], + [ + "▁Flint", + -12.822120666503906 + ], + [ + "▁Saskatchewan", + -12.822216987609863 + ], + [ + "▁Loch", + -12.82228183746338 + ], + [ + "▁bezel", + -12.82230281829834 + ], + [ + "▁Skill", + -12.822345733642578 + ], + [ + "▁standby", + -12.822386741638184 + ], + [ + "▁mosque", + -12.82244110107422 + ], + [ + "▁interpreting", + -12.822525024414062 + ], + [ + "▁furthermore", + -12.822529792785645 + ], + [ + "▁transgender", + -12.822588920593262 + ], + [ + "▁Slovenia", + -12.82261848449707 + ], + [ + "▁Himself", + -12.82262897491455 + ], + [ + "▁scuba", + -12.822629928588867 + ], + [ + "▁Dig", + -12.82270336151123 + ], + [ + "▁Coco", + -12.82280158996582 + ], + [ + "▁Fees", + -12.822842597961426 + ], + [ + "Case", + -12.822868347167969 + ], + [ + "▁Pets", + -12.822890281677246 + ], + [ + "▁Disneyland", + -12.8229341506958 + ], + [ + "▁Topic", + -12.82300090789795 + ], + [ + "▁stuffing", + -12.823046684265137 + ], + [ + "102", + -12.823187828063965 + ], + [ + "Kit", + -12.823288917541504 + ], + [ + "▁Um", + -12.823319435119627 + ], + [ + "▁arrests", + -12.823491096496582 + ], + [ + "▁Dome", + -12.823503494262695 + ], + [ + "▁PP", + -12.823674201965332 + ], + [ + "central", + -12.823675155639648 + ], + [ + "▁impulse", + -12.823724746704102 + ], + [ + "▁apprenticeship", + -12.823734283447266 + ], + [ + "▁ICE", + -12.823738098144531 + ], + [ + "▁infestation", + -12.823844909667969 + ], + [ + "ти", + -12.82388401031494 + ], + [ + "▁casing", + -12.82400894165039 + ], + [ + "German", + -12.824031829833984 + ], + [ + "▁Motorcycle", + -12.824067115783691 + ], + [ + "▁instrumentation", + -12.824088096618652 + ], + [ + "▁Lind", + -12.824098587036133 + ], + [ + "▁Tiles", + -12.824179649353027 + ], + [ + "▁APR", + -12.824182510375977 + ], + [ + "▁IPO", + -12.824209213256836 + ], + [ + "▁molecule", + -12.824298858642578 + ], + [ + "Medical", + -12.82433795928955 + ], + [ + "▁Specific", + -12.824342727661133 + ], + [ + "▁Hoffman", + -12.824407577514648 + ], + [ + "▁Smile", + -12.824629783630373 + ], + [ + "▁Fairy", + -12.82468032836914 + ], + [ + "▁Washer", + -12.824747085571287 + ], + [ + "▁Paso", + -12.824751853942873 + ], + [ + "-08", + -12.824827194213867 + ], + [ + "▁Barrett", + -12.824960708618164 + ], + [ + "▁cigar", + -12.825010299682615 + ], + [ + "▁Xiaomi", + -12.825091361999512 + ], + [ + "▁Wong", + -12.825105667114258 + ], + [ + "▁untreated", + -12.82517433166504 + ], + [ + "▁Crypto", + -12.825220108032228 + ], + [ + "boards", + -12.825228691101074 + ], + [ + "nder", + -12.825417518615724 + ], + [ + "▁polo", + -12.825469017028809 + ], + [ + "▁fluent", + -12.825483322143556 + ], + [ + "▁1938", + -12.825693130493164 + ], + [ + "▁fist", + -12.825827598571776 + ], + [ + "Pat", + -12.825831413269045 + ], + [ + "kie", + -12.825963973999023 + ], + [ + "▁Measure", + -12.825965881347656 + ], + [ + "rap", + -12.82614517211914 + ], + [ + "Guide", + -12.82622241973877 + ], + [ + "▁liaison", + -12.826396942138672 + ], + [ + "▁trivial", + -12.826440811157228 + ], + [ + "▁casualties", + -12.82644748687744 + ], + [ + "▁ghosts", + -12.8265380859375 + ], + [ + "▁drafts", + -12.826539039611816 + ], + [ + "▁Quantum", + -12.826788902282717 + ], + [ + "save", + -12.826951026916504 + ], + [ + "▁unsubscribe", + -12.82710075378418 + ], + [ + "▁Sq", + -12.827102661132812 + ], + [ + "▁Goal", + -12.827258110046388 + ], + [ + "▁grabs", + -12.827265739440918 + ], + [ + "▁irony", + -12.827314376831056 + ], + [ + "▁rag", + -12.82738971710205 + ], + [ + "▁Hollow", + -12.827439308166504 + ], + [ + "▁ethanol", + -12.82746124267578 + ], + [ + "▁greed", + -12.827566146850586 + ], + [ + "▁airbags", + -12.827698707580566 + ], + [ + "▁Irving", + -12.827699661254885 + ], + [ + "▁posing", + -12.827707290649414 + ], + [ + "▁inventive", + -12.827709197998049 + ], + [ + "deck", + -12.827746391296388 + ], + [ + "DN", + -12.82778263092041 + ], + [ + "▁Cheers", + -12.827784538269045 + ], + [ + "▁LNG", + -12.82783031463623 + ], + [ + "▁feminist", + -12.827997207641602 + ], + [ + "▁suede", + -12.828110694885254 + ], + [ + "▁Insights", + -12.828216552734377 + ], + [ + "kids", + -12.828312873840332 + ], + [ + "▁Values", + -12.828380584716797 + ], + [ + "▁bleed", + -12.828483581542969 + ], + [ + "iu", + -12.828524589538574 + ], + [ + "▁Willie", + -12.828689575195312 + ], + [ + "▁Odyssey", + -12.82876682281494 + ], + [ + "▁lymph", + -12.828939437866213 + ], + [ + "evo", + -12.828946113586426 + ], + [ + "▁consolidating", + -12.828947067260742 + ], + [ + "▁procedural", + -12.828947067260742 + ], + [ + "fy", + -12.828981399536133 + ], + [ + "▁Provincial", + -12.828989028930664 + ], + [ + "Par", + -12.829110145568848 + ], + [ + "amide", + -12.829124450683594 + ], + [ + "▁probate", + -12.829379081726074 + ], + [ + "▁trailing", + -12.829530715942385 + ], + [ + "▁substrate", + -12.829574584960938 + ], + [ + "▁Norwich", + -12.829628944396973 + ], + [ + "▁Raja", + -12.82966423034668 + ], + [ + "▁Loft", + -12.82973289489746 + ], + [ + "▁excerpt", + -12.829745292663574 + ], + [ + "Australia", + -12.829751014709473 + ], + [ + "▁Chiefs", + -12.829758644104004 + ], + [ + "▁Cannon", + -12.829829216003418 + ], + [ + "▁Amen", + -12.82988452911377 + ], + [ + "▁Thomson", + -12.829901695251465 + ], + [ + "▁Democracy", + -12.829911231994627 + ], + [ + "Op", + -12.829995155334473 + ], + [ + "▁dc", + -12.830044746398926 + ], + [ + "▁financed", + -12.830220222473145 + ], + [ + "▁circulating", + -12.83022403717041 + ], + [ + "▁leggings", + -12.830224990844728 + ], + [ + "Chief", + -12.830227851867676 + ], + [ + "▁moisturizer", + -12.830240249633787 + ], + [ + "▁Difference", + -12.83024787902832 + ], + [ + "Outside", + -12.830310821533203 + ], + [ + "Value", + -12.830355644226074 + ], + [ + "▁Lisbon", + -12.83038330078125 + ], + [ + "▁ADA", + -12.830425262451172 + ], + [ + "plant", + -12.8304443359375 + ], + [ + "▁Aussie", + -12.83047103881836 + ], + [ + "▁arose", + -12.830500602722168 + ], + [ + "▁candid", + -12.83064079284668 + ], + [ + "▁Walton", + -12.83071231842041 + ], + [ + "▁Rhodes", + -12.830727577209473 + ], + [ + "▁GOD", + -12.830793380737305 + ], + [ + "sync", + -12.830900192260742 + ], + [ + "▁wetlands", + -12.831035614013672 + ], + [ + "▁Butterfly", + -12.831196784973145 + ], + [ + "▁Beam", + -12.831417083740234 + ], + [ + "▁disturbance", + -12.831470489501951 + ], + [ + "▁Gonzalez", + -12.831475257873535 + ], + [ + "▁gigantic", + -12.831502914428713 + ], + [ + "▁Counties", + -12.83155345916748 + ], + [ + "yang", + -12.831788063049316 + ], + [ + "▁fermented", + -12.831788063049316 + ], + [ + "2.1", + -12.83206558227539 + ], + [ + "▁baptism", + -12.8320894241333 + ], + [ + "5%", + -12.832114219665527 + ], + [ + "ars", + -12.83219051361084 + ], + [ + "pick", + -12.832280158996582 + ], + [ + "▁informs", + -12.83232307434082 + ], + [ + "▁Vir", + -12.832606315612791 + ], + [ + "▁disappears", + -12.832759857177734 + ], + [ + "▁ornamental", + -12.832876205444336 + ], + [ + "▁KC", + -12.832938194274902 + ], + [ + "▁formulate", + -12.8329439163208 + ], + [ + "▁leased", + -12.832955360412598 + ], + [ + "▁tightening", + -12.833069801330566 + ], + [ + "mir", + -12.833141326904297 + ], + [ + "▁1915", + -12.833148002624512 + ], + [ + "▁Taliban", + -12.83317756652832 + ], + [ + "▁Starter", + -12.83322525024414 + ], + [ + "▁declines", + -12.83324909210205 + ], + [ + "/17", + -12.833366394042969 + ], + [ + "▁dorm", + -12.833483695983888 + ], + [ + "hem", + -12.833488464355469 + ], + [ + "▁Teach", + -12.833691596984863 + ], + [ + "▁pharmacist", + -12.8340482711792 + ], + [ + "▁restorative", + -12.834065437316896 + ], + [ + "Summary", + -12.83416748046875 + ], + [ + "▁toolkit", + -12.834210395812988 + ], + [ + "custom", + -12.83430290222168 + ], + [ + "▁Portsmouth", + -12.834428787231444 + ], + [ + "▁organisers", + -12.834452629089355 + ], + [ + "2).", + -12.834528923034668 + ], + [ + "carbon", + -12.834579467773438 + ], + [ + "▁Sei", + -12.834650039672852 + ], + [ + "corp", + -12.8346586227417 + ], + [ + "▁Carlo", + -12.834687232971191 + ], + [ + "▁PAC", + -12.834783554077148 + ], + [ + "moor", + -12.834784507751465 + ], + [ + "▁::", + -12.834875106811523 + ], + [ + "▁Frozen", + -12.834961891174316 + ], + [ + "▁cervical", + -12.834975242614746 + ], + [ + "▁Kur", + -12.835091590881348 + ], + [ + "reviewed", + -12.835095405578612 + ], + [ + "ер", + -12.83515167236328 + ], + [ + "▁preach", + -12.83517837524414 + ], + [ + "▁prolong", + -12.835271835327148 + ], + [ + "▁8\"", + -12.835286140441896 + ], + [ + "▁jumper", + -12.83531379699707 + ], + [ + "▁emerald", + -12.83534812927246 + ], + [ + "▁formidable", + -12.835349082946776 + ], + [ + "▁CAP", + -12.835352897644045 + ], + [ + "▁infographic", + -12.835389137268066 + ], + [ + "▁Potential", + -12.835429191589355 + ], + [ + "▁haunting", + -12.835471153259276 + ], + [ + "▁ML", + -12.835668563842772 + ], + [ + "MY", + -12.83568286895752 + ], + [ + "▁PET", + -12.835737228393556 + ], + [ + "▁Shiva", + -12.835954666137695 + ], + [ + "▁temper", + -12.83603858947754 + ], + [ + "▁stimulates", + -12.836053848266602 + ], + [ + "▁1962", + -12.836100578308104 + ], + [ + "▁1933", + -12.83612060546875 + ], + [ + "▁Luna", + -12.836161613464355 + ], + [ + "▁DON", + -12.836219787597656 + ], + [ + "pine", + -12.83623218536377 + ], + [ + "Indian", + -12.83627700805664 + ], + [ + "▁Lac", + -12.836296081542969 + ], + [ + "▁delve", + -12.836423873901367 + ], + [ + "▁morale", + -12.836445808410645 + ], + [ + "▁144", + -12.836525917053224 + ], + [ + "▁Everyday", + -12.83659839630127 + ], + [ + "▁unforeseen", + -12.836633682250977 + ], + [ + "▁oatmeal", + -12.836634635925291 + ], + [ + "▁adherence", + -12.836647987365724 + ], + [ + "▁hacks", + -12.83668613433838 + ], + [ + "▁muddy", + -12.836697578430176 + ], + [ + "Silver", + -12.836819648742676 + ], + [ + "▁Maxwell", + -12.836841583251951 + ], + [ + "▁vocalist", + -12.836852073669434 + ], + [ + "sets", + -12.836913108825684 + ], + [ + "▁spiders", + -12.837111473083496 + ], + [ + "Focus", + -12.83713436126709 + ], + [ + "▁Governance", + -12.83718967437744 + ], + [ + "▁Dragons", + -12.83724594116211 + ], + [ + "▁Gender", + -12.837278366088867 + ], + [ + "▁firsthand", + -12.837440490722656 + ], + [ + "▁Vel", + -12.83758544921875 + ], + [ + "▁Dawson", + -12.837616920471191 + ], + [ + "tom", + -12.837750434875488 + ], + [ + "▁****", + -12.838006019592283 + ], + [ + "characterization", + -12.838078498840332 + ], + [ + "▁Roland", + -12.838237762451172 + ], + [ + "▁slew", + -12.83828067779541 + ], + [ + "▁rebels", + -12.838309288024902 + ], + [ + "▁Wed", + -12.838385581970217 + ], + [ + "▁reconnect", + -12.838385581970217 + ], + [ + "▁closures", + -12.83839511871338 + ], + [ + "▁hormonal", + -12.838834762573242 + ], + [ + "▁Aviv", + -12.838929176330566 + ], + [ + "▁Camping", + -12.839056015014648 + ], + [ + "▁linens", + -12.839146614074709 + ], + [ + "▁3-0", + -12.839167594909668 + ], + [ + "ides", + -12.839221954345703 + ], + [ + ".00.", + -12.839337348937988 + ], + [ + "▁GPA", + -12.839351654052734 + ], + [ + "▁mon", + -12.83945369720459 + ], + [ + "▁blockbuster", + -12.839469909667969 + ], + [ + "▁Watts", + -12.839587211608888 + ], + [ + "ATE", + -12.839749336242676 + ], + [ + "▁fruitful", + -12.840006828308104 + ], + [ + "ddle", + -12.840063095092772 + ], + [ + "▁interns", + -12.840076446533203 + ], + [ + "▁1914", + -12.840123176574709 + ], + [ + "crafted", + -12.840276718139648 + ], + [ + "mmer", + -12.840323448181152 + ], + [ + "Fe", + -12.840394020080566 + ], + [ + "▁Giveaway", + -12.84052848815918 + ], + [ + "motion", + -12.840548515319824 + ], + [ + "miss", + -12.84058952331543 + ], + [ + "▁unsecured", + -12.840595245361328 + ], + [ + "▁sinking", + -12.840743064880373 + ], + [ + "▁differentiation", + -12.840837478637695 + ], + [ + "▁coaster", + -12.841009140014648 + ], + [ + "▁6.0", + -12.841041564941406 + ], + [ + "Sha", + -12.841081619262695 + ], + [ + "▁Soil", + -12.84109115600586 + ], + [ + "▁Headquarters", + -12.841191291809082 + ], + [ + "TI", + -12.84120273590088 + ], + [ + "▁coupling", + -12.841459274291992 + ], + [ + "Wi", + -12.841604232788086 + ], + [ + "ente", + -12.841614723205566 + ], + [ + "▁thyme", + -12.841650009155272 + ], + [ + "▁campground", + -12.84195327758789 + ], + [ + "▁sided", + -12.842005729675291 + ], + [ + "Gu", + -12.84200668334961 + ], + [ + "Alex", + -12.8421049118042 + ], + [ + "▁Adapter", + -12.84222412109375 + ], + [ + "Cross", + -12.84225368499756 + ], + [ + "▁unrealistic", + -12.842262268066406 + ], + [ + "▁successive", + -12.84227466583252 + ], + [ + "▁Wax", + -12.842279434204102 + ], + [ + "anda", + -12.84243106842041 + ], + [ + "▁fungi", + -12.842434883117676 + ], + [ + "▁flea", + -12.84251308441162 + ], + [ + "Te", + -12.842516899108888 + ], + [ + "▁possessed", + -12.842521667480469 + ], + [ + "DY", + -12.842535018920898 + ], + [ + "lap", + -12.842556953430176 + ], + [ + "cf", + -12.842586517333984 + ], + [ + "▁interrupt", + -12.84265422821045 + ], + [ + "▁Floral", + -12.842671394348145 + ], + [ + "1/", + -12.842758178710938 + ], + [ + "▁Winds", + -12.842824935913086 + ], + [ + "gor", + -12.842905044555664 + ], + [ + "▁salvage", + -12.842928886413574 + ], + [ + "Sat", + -12.842938423156738 + ], + [ + "chip", + -12.843022346496582 + ], + [ + "▁bouncing", + -12.843085289001465 + ], + [ + "▁contemplating", + -12.843085289001465 + ], + [ + "▁fu", + -12.843154907226562 + ], + [ + "▁Kee", + -12.84322738647461 + ], + [ + "▁Bucks", + -12.843286514282228 + ], + [ + "▁Coaches", + -12.843335151672363 + ], + [ + "bil", + -12.84338092803955 + ], + [ + "▁POST", + -12.843448638916016 + ], + [ + "connected", + -12.84349536895752 + ], + [ + "fired", + -12.843512535095217 + ], + [ + "▁crossword", + -12.843539237976074 + ], + [ + "▁ASTM", + -12.843562126159668 + ], + [ + "▁adolescent", + -12.84362506866455 + ], + [ + "▁infinity", + -12.843658447265623 + ], + [ + "tested", + -12.843711853027344 + ], + [ + "▁finer", + -12.84377098083496 + ], + [ + "bath", + -12.843807220458984 + ], + [ + "never", + -12.843854904174805 + ], + [ + "▁Nadu", + -12.84394073486328 + ], + [ + "▁imperial", + -12.844314575195312 + ], + [ + "rak", + -12.844327926635742 + ], + [ + "▁Notification", + -12.844383239746094 + ], + [ + "Cash", + -12.844412803649902 + ], + [ + "▁identifier", + -12.844488143920898 + ], + [ + "▁eggplant", + -12.844501495361328 + ], + [ + "2.2", + -12.844565391540527 + ], + [ + "▁notions", + -12.8446626663208 + ], + [ + "▁realizes", + -12.844671249389648 + ], + [ + "▁monkeys", + -12.84471321105957 + ], + [ + "▁Irvine", + -12.844772338867188 + ], + [ + "▁Drupal", + -12.844781875610352 + ], + [ + "▁Ensemble", + -12.84481143951416 + ], + [ + "▁latency", + -12.844921112060549 + ], + [ + "▁rpm", + -12.844922065734863 + ], + [ + "▁inexperienced", + -12.8450345993042 + ], + [ + "▁postcard", + -12.845123291015623 + ], + [ + "▁crest", + -12.845209121704102 + ], + [ + "▁trustees", + -12.84524631500244 + ], + [ + "HT", + -12.845253944396973 + ], + [ + "▁linguistic", + -12.845314979553224 + ], + [ + "▁cops", + -12.845401763916016 + ], + [ + "-45", + -12.845613479614258 + ], + [ + "Glad", + -12.845622062683104 + ], + [ + "▁platter", + -12.845693588256836 + ], + [ + "▁tallest", + -12.84585952758789 + ], + [ + "Wait", + -12.845991134643556 + ], + [ + "▁sanitary", + -12.846043586730955 + ], + [ + "▁saga", + -12.846080780029297 + ], + [ + "▁overdue", + -12.84616470336914 + ], + [ + "▁headphone", + -12.846176147460938 + ], + [ + "▁1918", + -12.846264839172363 + ], + [ + "Town", + -12.84629726409912 + ], + [ + "▁massively", + -12.846324920654297 + ], + [ + "iso", + -12.846383094787598 + ], + [ + "▁DAYS", + -12.84650421142578 + ], + [ + "nut", + -12.846521377563477 + ], + [ + "found", + -12.846572875976562 + ], + [ + "▁McCain", + -12.846604347229004 + ], + [ + "Added", + -12.84671688079834 + ], + [ + "▁helper", + -12.846918106079102 + ], + [ + "▁complexes", + -12.847049713134766 + ], + [ + "mie", + -12.847122192382812 + ], + [ + "▁unanimously", + -12.847164154052734 + ], + [ + "▁Gra", + -12.847186088562012 + ], + [ + "▁organism", + -12.847247123718262 + ], + [ + "▁sigh", + -12.847288131713867 + ], + [ + "▁Documents", + -12.847302436828612 + ], + [ + "▁plantation", + -12.847314834594728 + ], + [ + "▁Rolls", + -12.847481727600098 + ], + [ + "▁groundwater", + -12.847509384155272 + ], + [ + "▁IQ", + -12.847545623779297 + ], + [ + "mani", + -12.847715377807615 + ], + [ + "Ann", + -12.847816467285156 + ], + [ + "▁bulletin", + -12.84787654876709 + ], + [ + "/15", + -12.848087310791016 + ], + [ + "▁thumbs", + -12.848230361938477 + ], + [ + "▁8:", + -12.848260879516602 + ], + [ + "▁nesting", + -12.848273277282717 + ], + [ + "▁hesitant", + -12.84827709197998 + ], + [ + "▁negligent", + -12.84827709197998 + ], + [ + "▁convictions", + -12.84827995300293 + ], + [ + "▁proposing", + -12.848299980163574 + ], + [ + "▁Backyard", + -12.848407745361328 + ], + [ + "▁Delicious", + -12.848435401916504 + ], + [ + "▁freak", + -12.84847354888916 + ], + [ + "▁Flu", + -12.848577499389648 + ], + [ + "▁biting", + -12.848698616027832 + ], + [ + "▁Kitty", + -12.848745346069336 + ], + [ + "▁Silent", + -12.848777770996094 + ], + [ + "ej", + -12.848790168762209 + ], + [ + "▁Rabbi", + -12.848816871643066 + ], + [ + "▁receptors", + -12.8490571975708 + ], + [ + "▁archived", + -12.84906768798828 + ], + [ + "▁Ella", + -12.849081039428713 + ], + [ + "▁Vet", + -12.849193572998049 + ], + [ + "▁aura", + -12.849279403686523 + ], + [ + "▁Sitting", + -12.849331855773926 + ], + [ + "▁gamble", + -12.849356651306152 + ], + [ + ".......", + -12.849365234375 + ], + [ + "Sales", + -12.849559783935549 + ], + [ + "▁Moroccan", + -12.849621772766112 + ], + [ + "▁coded", + -12.8497314453125 + ], + [ + "▁Goldman", + -12.849814414978027 + ], + [ + "▁Polar", + -12.849828720092772 + ], + [ + "▁IE", + -12.849836349487305 + ], + [ + "▁Pandora", + -12.850016593933104 + ], + [ + "Washington", + -12.850090980529783 + ], + [ + "Network", + -12.850120544433594 + ], + [ + "▁8.5", + -12.850286483764648 + ], + [ + "▁Reception", + -12.850464820861816 + ], + [ + "▁Bran", + -12.850500106811523 + ], + [ + "lton", + -12.850598335266112 + ], + [ + "▁4.3", + -12.85059928894043 + ], + [ + "▁Aging", + -12.85060214996338 + ], + [ + "▁Childhood", + -12.850916862487791 + ], + [ + "▁dismissal", + -12.851028442382812 + ], + [ + "AV", + -12.851103782653809 + ], + [ + "▁Bonnie", + -12.851136207580566 + ], + [ + "▁Bulk", + -12.851237297058104 + ], + [ + "▁Hale", + -12.85130786895752 + ], + [ + "▁ARC", + -12.851348876953123 + ], + [ + "dha", + -12.85137176513672 + ], + [ + "▁swimmers", + -12.851421356201172 + ], + [ + "▁subset", + -12.851428985595703 + ], + [ + "▁outset", + -12.851448059082031 + ], + [ + "▁br", + -12.85156536102295 + ], + [ + "raj", + -12.851577758789062 + ], + [ + "payment", + -12.851740837097168 + ], + [ + "lip", + -12.851778984069824 + ], + [ + "′′", + -12.851826667785645 + ], + [ + "▁stamping", + -12.851846694946287 + ], + [ + "▁lighthouse", + -12.851861000061035 + ], + [ + "▁implements", + -12.852117538452148 + ], + [ + "▁chlorine", + -12.852188110351562 + ], + [ + "▁alarming", + -12.852221488952637 + ], + [ + "Van", + -12.852334022521973 + ], + [ + "▁senator", + -12.852380752563477 + ], + [ + "▁ponder", + -12.852635383605955 + ], + [ + "▁Lowe", + -12.852668762207031 + ], + [ + "▁Waterloo", + -12.852747917175291 + ], + [ + "▁Hawks", + -12.852845191955566 + ], + [ + "▁3-1", + -12.852888107299805 + ], + [ + "▁inspirations", + -12.852985382080078 + ], + [ + "▁Calling", + -12.853070259094238 + ], + [ + "hra", + -12.85320281982422 + ], + [ + "study", + -12.85325813293457 + ], + [ + "▁acidic", + -12.853330612182615 + ], + [ + "▁Baba", + -12.853336334228516 + ], + [ + "▁fuller", + -12.853392601013184 + ], + [ + "▁Kas", + -12.853455543518066 + ], + [ + "▁Monterey", + -12.85350227355957 + ], + [ + "▁Tehran", + -12.853522300720217 + ], + [ + "▁flipped", + -12.85353660583496 + ], + [ + "▁512", + -12.853586196899414 + ], + [ + "▁reviewer", + -12.853609085083008 + ], + [ + "▁Hd", + -12.853681564331056 + ], + [ + "▁snowy", + -12.853761672973633 + ], + [ + "▁vibrations", + -12.853829383850098 + ], + [ + "5.00", + -12.853907585144045 + ], + [ + "▁Cla", + -12.853952407836914 + ], + [ + "▁familiarity", + -12.854033470153809 + ], + [ + "Mu", + -12.854056358337402 + ], + [ + "▁Boom", + -12.854164123535156 + ], + [ + "▁intrigue", + -12.85421085357666 + ], + [ + "single", + -12.85426139831543 + ], + [ + "Computer", + -12.854315757751465 + ], + [ + "ANCE", + -12.854389190673828 + ], + [ + "▁courtroom", + -12.85440444946289 + ], + [ + "▁quartet", + -12.854535102844238 + ], + [ + "▁Anchor", + -12.854660034179688 + ], + [ + "▁alternating", + -12.85480499267578 + ], + [ + "▁fertile", + -12.85480785369873 + ], + [ + "▁handicap", + -12.854808807373049 + ], + [ + "▁Sid", + -12.85487461090088 + ], + [ + "logic", + -12.854912757873535 + ], + [ + "▁germs", + -12.85497760772705 + ], + [ + "Hotel", + -12.854999542236328 + ], + [ + "tools", + -12.855005264282228 + ], + [ + "Bus", + -12.85508918762207 + ], + [ + "▁RP", + -12.855107307434082 + ], + [ + "Side", + -12.855219841003418 + ], + [ + "▁suppress", + -12.855231285095217 + ], + [ + "▁shorten", + -12.855340003967283 + ], + [ + "▁Schmidt", + -12.855381965637209 + ], + [ + "▁equitable", + -12.855470657348633 + ], + [ + "▁Ax", + -12.85552978515625 + ], + [ + "▁sleeps", + -12.855573654174805 + ], + [ + "▁grille", + -12.855598449707031 + ], + [ + "jin", + -12.855609893798828 + ], + [ + "EV", + -12.855786323547363 + ], + [ + "rata", + -12.855795860290527 + ], + [ + "genic", + -12.855964660644531 + ], + [ + "Compare", + -12.856082916259766 + ], + [ + "cing", + -12.856098175048828 + ], + [ + "▁chap", + -12.856114387512209 + ], + [ + "▁Dover", + -12.856121063232422 + ], + [ + "▁lunar", + -12.85612678527832 + ], + [ + "▁1911", + -12.856173515319824 + ], + [ + "▁Wei", + -12.856186866760254 + ], + [ + "375", + -12.856204986572266 + ], + [ + "▁capitalize", + -12.85627555847168 + ], + [ + "▁é", + -12.856283187866213 + ], + [ + "▁diminish", + -12.856361389160156 + ], + [ + "oko", + -12.856417655944824 + ], + [ + "▁antiques", + -12.856431007385254 + ], + [ + "▁rogue", + -12.856465339660645 + ], + [ + "▁punished", + -12.856488227844238 + ], + [ + "▁staining", + -12.856501579284668 + ], + [ + "dem", + -12.856609344482422 + ], + [ + "▁acknowledges", + -12.85661506652832 + ], + [ + "node", + -12.856648445129396 + ], + [ + "▁Coverage", + -12.856654167175291 + ], + [ + "▁enlarged", + -12.856700897216797 + ], + [ + "▁utilised", + -12.856767654418944 + ], + [ + "combe", + -12.857043266296388 + ], + [ + "▁Showcase", + -12.8570556640625 + ], + [ + "▁roaming", + -12.857078552246094 + ], + [ + "▁Preparation", + -12.857080459594728 + ], + [ + "▁Laws", + -12.85708236694336 + ], + [ + "▁superficial", + -12.857196807861328 + ], + [ + "▁toothbrush", + -12.857300758361816 + ], + [ + "▁YORK", + -12.857429504394531 + ], + [ + "▁1890", + -12.857439041137695 + ], + [ + "▁MTV", + -12.857483863830566 + ], + [ + "▁Bedford", + -12.8575439453125 + ], + [ + "▁pane", + -12.857619285583496 + ], + [ + "▁wicker", + -12.857625007629396 + ], + [ + "▁rollers", + -12.857667922973633 + ], + [ + "▁6\"", + -12.85768222808838 + ], + [ + "▁Kathleen", + -12.85769271850586 + ], + [ + "▁scientifically", + -12.857741355895996 + ], + [ + "▁takeover", + -12.857770919799805 + ], + [ + "eti", + -12.857778549194336 + ], + [ + "▁smoothies", + -12.857821464538574 + ], + [ + "7%", + -12.857955932617188 + ], + [ + "▁Coins", + -12.858263969421388 + ], + [ + "Firstly", + -12.858264923095703 + ], + [ + "volume", + -12.858335494995115 + ], + [ + "▁Cube", + -12.858345985412598 + ], + [ + "Advanced", + -12.858372688293455 + ], + [ + "▁Overnight", + -12.85842227935791 + ], + [ + "▁pointer", + -12.85865306854248 + ], + [ + "▁spikes", + -12.858710289001465 + ], + [ + "▁concierge", + -12.858741760253906 + ], + [ + "▁leopard", + -12.858741760253906 + ], + [ + "▁painless", + -12.85893726348877 + ], + [ + "▁hacker", + -12.859048843383787 + ], + [ + "▁Marin", + -12.859063148498535 + ], + [ + "▁Meadow", + -12.8590669631958 + ], + [ + "▁Shine", + -12.859219551086426 + ], + [ + "▁fragrances", + -12.859275817871094 + ], + [ + "▁departed", + -12.859405517578123 + ], + [ + "▁dumped", + -12.859716415405272 + ], + [ + "▁Matthews", + -12.859838485717772 + ], + [ + "does", + -12.85985279083252 + ], + [ + "scope", + -12.859990119934082 + ], + [ + "▁polyurethane", + -12.86005687713623 + ], + [ + "▁auditorium", + -12.860068321228027 + ], + [ + "etz", + -12.86017894744873 + ], + [ + "▁Joomla", + -12.860198974609377 + ], + [ + "▁Reiki", + -12.860260009765623 + ], + [ + "230", + -12.860280990600586 + ], + [ + "▁Chess", + -12.860370635986328 + ], + [ + "Round", + -12.86053466796875 + ], + [ + "▁chore", + -12.860735893249512 + ], + [ + "Providing", + -12.86081600189209 + ], + [ + "▁Fal", + -12.860902786254885 + ], + [ + "▁Unified", + -12.860902786254885 + ], + [ + "▁transformer", + -12.860974311828612 + ], + [ + "▁legit", + -12.861024856567385 + ], + [ + "▁stroller", + -12.861056327819824 + ], + [ + "▁cha", + -12.861166000366213 + ], + [ + "▁Managed", + -12.861200332641602 + ], + [ + "▁raced", + -12.86129379272461 + ], + [ + "URE", + -12.861397743225098 + ], + [ + "atic", + -12.861442565917969 + ], + [ + "▁corresponds", + -12.861452102661133 + ], + [ + "▁Gaga", + -12.861492156982422 + ], + [ + "▁antibody", + -12.861541748046877 + ], + [ + "connect", + -12.86163330078125 + ], + [ + "▁Kha", + -12.861701011657717 + ], + [ + "▁sonic", + -12.861760139465332 + ], + [ + "images", + -12.861777305603027 + ], + [ + "every", + -12.861988067626951 + ], + [ + "▁disciplinary", + -12.862010955810549 + ], + [ + "▁villagers", + -12.86202907562256 + ], + [ + "▁terraces", + -12.862030029296877 + ], + [ + "brain", + -12.86209487915039 + ], + [ + "▁Leisure", + -12.862241744995115 + ], + [ + "▁boasting", + -12.862266540527344 + ], + [ + "▁specialising", + -12.862266540527344 + ], + [ + "CV", + -12.862396240234377 + ], + [ + "▁Upgrade", + -12.862406730651855 + ], + [ + "▁Applicants", + -12.86244010925293 + ], + [ + "▁Decorations", + -12.862554550170898 + ], + [ + "▁1963", + -12.862629890441896 + ], + [ + "▁Richards", + -12.86269187927246 + ], + [ + "▁Kuala", + -12.86271858215332 + ], + [ + "▁indices", + -12.86275577545166 + ], + [ + "▁buzzing", + -12.862780570983888 + ], + [ + "▁Nixon", + -12.862796783447266 + ], + [ + "▁Surely", + -12.862838745117188 + ], + [ + "largest", + -12.862868309020996 + ], + [ + "▁contention", + -12.862923622131348 + ], + [ + "vol", + -12.862926483154297 + ], + [ + "▁cupboards", + -12.863176345825195 + ], + [ + "▁TCP", + -12.863241195678713 + ], + [ + "▁USC", + -12.86324977874756 + ], + [ + "▁Numerous", + -12.863327026367188 + ], + [ + "enter", + -12.86334228515625 + ], + [ + "▁trainees", + -12.863435745239258 + ], + [ + "▁Roast", + -12.863460540771484 + ], + [ + "Companies", + -12.863527297973633 + ], + [ + "▁Themes", + -12.863544464111328 + ], + [ + "▁quieter", + -12.86359405517578 + ], + [ + "▁Receive", + -12.863600730895996 + ], + [ + "▁Clayton", + -12.863612174987791 + ], + [ + "▁volt", + -12.8636474609375 + ], + [ + "▁hoop", + -12.863718032836914 + ], + [ + "▁Equal", + -12.86377239227295 + ], + [ + "▁Germans", + -12.863914489746094 + ], + [ + "▁accompaniment", + -12.864015579223633 + ], + [ + "▁consequential", + -12.864020347595217 + ], + [ + "▁sovereignty", + -12.864042282104492 + ], + [ + "▁extinction", + -12.864060401916504 + ], + [ + "▁Sutton", + -12.864062309265137 + ], + [ + "erman", + -12.86411190032959 + ], + [ + "▁shedding", + -12.864160537719728 + ], + [ + "seed", + -12.864161491394045 + ], + [ + "ат", + -12.864335060119627 + ], + [ + "▁appetizer", + -12.86435604095459 + ], + [ + "▁missionary", + -12.864376068115234 + ], + [ + "▁lawful", + -12.864401817321776 + ], + [ + "▁mast", + -12.864420890808104 + ], + [ + "▁nineteenth", + -12.864432334899902 + ], + [ + "▁abound", + -12.864513397216797 + ], + [ + "▁Gui", + -12.86473274230957 + ], + [ + ")\"", + -12.864848136901855 + ], + [ + "▁AIR", + -12.864940643310549 + ], + [ + "▁dread", + -12.864957809448242 + ], + [ + "▁URLs", + -12.86516571044922 + ], + [ + "▁Couch", + -12.865182876586914 + ], + [ + "▁grilling", + -12.865266799926758 + ], + [ + "▁Loved", + -12.8652925491333 + ], + [ + "▁staples", + -12.86533546447754 + ], + [ + "▁untouched", + -12.865338325500488 + ], + [ + "▁indicative", + -12.865339279174805 + ], + [ + "▁cremation", + -12.865345001220703 + ], + [ + "▁Greenwich", + -12.86538028717041 + ], + [ + "rl", + -12.86548137664795 + ], + [ + "UI", + -12.865486145019531 + ], + [ + "Trans", + -12.865537643432615 + ], + [ + "cept", + -12.865571975708008 + ], + [ + "▁recyclable", + -12.865586280822754 + ], + [ + "tip", + -12.865608215332031 + ], + [ + "mostly", + -12.86569118499756 + ], + [ + "▁kitty", + -12.865885734558104 + ], + [ + "Info", + -12.865954399108888 + ], + [ + "▁Bernie", + -12.86596965789795 + ], + [ + "▁Acting", + -12.866121292114258 + ], + [ + "▁Grants", + -12.866169929504396 + ], + [ + "▁Shelby", + -12.866230964660645 + ], + [ + "heavy", + -12.866249084472656 + ], + [ + "Rose", + -12.866326332092283 + ], + [ + "▁Population", + -12.866551399230955 + ], + [ + "▁1929", + -12.866586685180664 + ], + [ + "▁Cubs", + -12.86660861968994 + ], + [ + "▁outskirts", + -12.86666774749756 + ], + [ + "▁1959", + -12.866808891296388 + ], + [ + "▁verge", + -12.866812705993652 + ], + [ + "▁Mentor", + -12.866843223571776 + ], + [ + "▁outsource", + -12.866844177246094 + ], + [ + "▁graceful", + -12.866877555847168 + ], + [ + "▁flick", + -12.8668794631958 + ], + [ + "▁potty", + -12.866888999938965 + ], + [ + "▁cassette", + -12.86691188812256 + ], + [ + "BF", + -12.866951942443848 + ], + [ + "▁Peters", + -12.86705207824707 + ], + [ + "▁Europa", + -12.86705493927002 + ], + [ + "▁12%", + -12.867056846618652 + ], + [ + "▁Ala", + -12.867342948913574 + ], + [ + "▁shocks", + -12.8674898147583 + ], + [ + "▁testosterone", + -12.867497444152832 + ], + [ + "ency", + -12.867531776428224 + ], + [ + "Featuring", + -12.867595672607422 + ], + [ + "▁Resorts", + -12.867639541625977 + ], + [ + "roy", + -12.867648124694824 + ], + [ + "▁BACK", + -12.867713928222656 + ], + [ + "▁ringing", + -12.867782592773438 + ], + [ + "▁Tattoo", + -12.867788314819336 + ], + [ + "▁OD", + -12.867867469787598 + ], + [ + "▁poke", + -12.867884635925291 + ], + [ + "pf", + -12.867918968200684 + ], + [ + "▁cheesy", + -12.86798858642578 + ], + [ + "▁varsity", + -12.86798858642578 + ], + [ + "▁horns", + -12.868170738220217 + ], + [ + "▁nuances", + -12.86829662322998 + ], + [ + "▁influx", + -12.868443489074709 + ], + [ + "▁optics", + -12.868474960327148 + ], + [ + "▁TWO", + -12.868491172790527 + ], + [ + "rar", + -12.868563652038574 + ], + [ + "▁innovators", + -12.86864185333252 + ], + [ + "▁Decorative", + -12.868755340576172 + ], + [ + "▁underestimate", + -12.868807792663574 + ], + [ + "Form", + -12.868856430053713 + ], + [ + "▁Sy", + -12.868878364562988 + ], + [ + "▁og", + -12.868937492370604 + ], + [ + "▁java", + -12.868963241577148 + ], + [ + "▁Wallpapers", + -12.869024276733398 + ], + [ + "▁diversion", + -12.869077682495115 + ], + [ + "▁kite", + -12.86913013458252 + ], + [ + "▁portals", + -12.869197845458984 + ], + [ + "lick", + -12.869214057922363 + ], + [ + "▁lettering", + -12.869256019592283 + ], + [ + "▁relocating", + -12.869338989257812 + ], + [ + "▁Webster", + -12.869356155395508 + ], + [ + "▁NPR", + -12.86936092376709 + ], + [ + "Cam", + -12.86937141418457 + ], + [ + "▁Ang", + -12.869406700134276 + ], + [ + "zin", + -12.869433403015137 + ], + [ + "▁ref", + -12.869543075561523 + ], + [ + "▁inpatient", + -12.869636535644531 + ], + [ + "▁interruption", + -12.869757652282717 + ], + [ + "▁scraps", + -12.869759559631348 + ], + [ + "Club", + -12.869830131530762 + ], + [ + "▁litres", + -12.869882583618164 + ], + [ + "▁Hazel", + -12.869894981384276 + ], + [ + "▁stat", + -12.869905471801758 + ], + [ + "▁Jungle", + -12.86997127532959 + ], + [ + "bottom", + -12.870020866394045 + ], + [ + "▁goalie", + -12.870129585266112 + ], + [ + "▁horizontally", + -12.870197296142578 + ], + [ + "unt", + -12.870256423950195 + ], + [ + "▁Shade", + -12.870305061340332 + ], + [ + "▁cleanser", + -12.870333671569824 + ], + [ + "▁Sk", + -12.870410919189451 + ], + [ + "political", + -12.870479583740234 + ], + [ + "1.3", + -12.87048053741455 + ], + [ + "▁Midnight", + -12.870556831359863 + ], + [ + "▁Johnston", + -12.870561599731444 + ], + [ + "users", + -12.87059211730957 + ], + [ + "▁Cody", + -12.87063694000244 + ], + [ + "▁skeleton", + -12.871014595031738 + ], + [ + "▁blouse", + -12.871066093444824 + ], + [ + "▁Buyers", + -12.871100425720217 + ], + [ + "▁Forge", + -12.87112808227539 + ], + [ + "Fair", + -12.871355056762695 + ], + [ + "▁Disorder", + -12.871386528015137 + ], + [ + "▁1992,", + -12.871430397033691 + ], + [ + "▁pastors", + -12.87168025970459 + ], + [ + "Col", + -12.87170124053955 + ], + [ + "Sarah", + -12.87186336517334 + ], + [ + "JA", + -12.871871948242188 + ], + [ + "director", + -12.871919631958008 + ], + [ + "▁Kensington", + -12.871977806091309 + ], + [ + "▁exhilarating", + -12.871977806091309 + ], + [ + "▁meditate", + -12.871981620788574 + ], + [ + "Cola", + -12.872053146362305 + ], + [ + "lex", + -12.872102737426758 + ], + [ + "▁tirelessly", + -12.872102737426758 + ], + [ + "▁FTP", + -12.872121810913086 + ], + [ + "▁Lib", + -12.872132301330566 + ], + [ + "▁Priority", + -12.872191429138184 + ], + [ + "inate", + -12.872200965881348 + ], + [ + "▁PTSD", + -12.872251510620115 + ], + [ + "nov", + -12.872519493103027 + ], + [ + "▁Ribbon", + -12.87252426147461 + ], + [ + "▁slips", + -12.872538566589355 + ], + [ + "ographic", + -12.872688293457031 + ], + [ + "▁Explain", + -12.872710227966309 + ], + [ + "▁Soap", + -12.8727388381958 + ], + [ + "▁Develop", + -12.872814178466797 + ], + [ + "▁Hunting", + -12.872833251953123 + ], + [ + "▁bland", + -12.87283992767334 + ], + [ + "▁assembling", + -12.872875213623049 + ], + [ + "▁Screening", + -12.872884750366213 + ], + [ + "Brian", + -12.872913360595703 + ], + [ + "▁paperback", + -12.872989654541016 + ], + [ + "▁Mug", + -12.873040199279783 + ], + [ + "lé", + -12.873085021972656 + ], + [ + "▁Actor", + -12.873172760009766 + ], + [ + "▁TH", + -12.873209953308104 + ], + [ + "▁eccentric", + -12.873275756835938 + ], + [ + "▁affinity", + -12.873311042785645 + ], + [ + "▁loneliness", + -12.873313903808594 + ], + [ + "EW", + -12.873320579528809 + ], + [ + "▁biomass", + -12.87332534790039 + ], + [ + "▁Entrance", + -12.873470306396484 + ], + [ + "▁refinance", + -12.873491287231444 + ], + [ + "eke", + -12.873516082763672 + ], + [ + "▁incomes", + -12.87358856201172 + ], + [ + "▁drywall", + -12.873895645141602 + ], + [ + "▁Pas", + -12.873943328857422 + ], + [ + "▁70-", + -12.87399959564209 + ], + [ + "▁Marks", + -12.874035835266112 + ], + [ + "▁sack", + -12.874095916748049 + ], + [ + "▁MHz", + -12.874162673950195 + ], + [ + "▁prophet", + -12.87418270111084 + ], + [ + "▁saturation", + -12.87419891357422 + ], + [ + "▁explorer", + -12.874290466308594 + ], + [ + "Ci", + -12.874353408813477 + ], + [ + "OF", + -12.874354362487791 + ], + [ + "▁bedside", + -12.874384880065918 + ], + [ + "▁flashes", + -12.874469757080078 + ], + [ + "ske", + -12.874558448791504 + ], + [ + "▁intimacy", + -12.874646186828612 + ], + [ + "▁accountants", + -12.874744415283203 + ], + [ + "▁Conflict", + -12.874777793884276 + ], + [ + "▁signify", + -12.87480640411377 + ], + [ + "▁liter", + -12.87480926513672 + ], + [ + "Sky", + -12.874810218811035 + ], + [ + "▁Cosmetic", + -12.874850273132324 + ], + [ + "▁slideshow", + -12.874855041503906 + ], + [ + "oku", + -12.874863624572754 + ], + [ + "▁1916", + -12.874917030334473 + ], + [ + "▁dynamically", + -12.875 + ], + [ + "▁geological", + -12.875285148620604 + ], + [ + "▁treasured", + -12.875368118286133 + ], + [ + "▁derivatives", + -12.87540340423584 + ], + [ + "wu", + -12.875423431396484 + ], + [ + "▁dev", + -12.87571144104004 + ], + [ + "ovich", + -12.875760078430176 + ], + [ + "bah", + -12.875773429870604 + ], + [ + "Financial", + -12.8757905960083 + ], + [ + "▁Einstein", + -12.87579345703125 + ], + [ + "▁resonate", + -12.875863075256348 + ], + [ + "▁Wilderness", + -12.875894546508787 + ], + [ + "▁foyer", + -12.87599277496338 + ], + [ + "▁tack", + -12.876006126403809 + ], + [ + "cn", + -12.876323699951172 + ], + [ + "shu", + -12.876442909240724 + ], + [ + "▁sunflower", + -12.876548767089844 + ], + [ + "▁Paulo", + -12.87655258178711 + ], + [ + "▁funk", + -12.876651763916016 + ], + [ + "▁downturn", + -12.876717567443848 + ], + [ + "SEC", + -12.876729011535645 + ], + [ + "▁subconscious", + -12.876803398132324 + ], + [ + "▁Raiders", + -12.876949310302734 + ], + [ + "▁Bunny", + -12.877001762390137 + ], + [ + "▁25,000", + -12.87717056274414 + ], + [ + "▁shaded", + -12.877171516418455 + ], + [ + "nite", + -12.877174377441406 + ], + [ + "Researchers", + -12.877405166625977 + ], + [ + "▁Martial", + -12.877513885498049 + ], + [ + "▁leafy", + -12.877524375915527 + ], + [ + "▁tsp", + -12.87753963470459 + ], + [ + "▁Fork", + -12.877628326416016 + ], + [ + "▁Edgar", + -12.877897262573242 + ], + [ + "3/", + -12.878040313720703 + ], + [ + "Israel", + -12.87814712524414 + ], + [ + "▁Wrong", + -12.878175735473633 + ], + [ + "▁375", + -12.878217697143556 + ], + [ + "▁restroom", + -12.87844944000244 + ], + [ + "station", + -12.87848663330078 + ], + [ + "▁Exeter", + -12.878676414489746 + ], + [ + "▁crowdfunding", + -12.878680229187012 + ], + [ + "▁nominees", + -12.878703117370604 + ], + [ + "▁fisheries", + -12.878711700439451 + ], + [ + "▁KNOW", + -12.878747940063477 + ], + [ + "▁Roe", + -12.878756523132324 + ], + [ + "▁transitioning", + -12.878771781921388 + ], + [ + "▁replication", + -12.878799438476562 + ], + [ + "kick", + -12.878939628601074 + ], + [ + "▁Costco", + -12.878968238830566 + ], + [ + "ре", + -12.879063606262209 + ], + [ + "▁OVER", + -12.879064559936523 + ], + [ + "bug", + -12.879090309143066 + ], + [ + "▁Cathy", + -12.879227638244627 + ], + [ + "▁cellphone", + -12.879302978515623 + ], + [ + "▁garnish", + -12.879323959350586 + ], + [ + "Phil", + -12.879354476928713 + ], + [ + "▁circa", + -12.879356384277344 + ], + [ + "▁Herman", + -12.87939453125 + ], + [ + "▁Winners", + -12.879405975341797 + ], + [ + "▁enlarge", + -12.879671096801758 + ], + [ + "▁PBS", + -12.879762649536133 + ], + [ + "▁Armenia", + -12.879815101623535 + ], + [ + "▁Cali", + -12.879842758178713 + ], + [ + "▁emerges", + -12.880001068115234 + ], + [ + "▁Voices", + -12.880083084106444 + ], + [ + "201", + -12.880375862121582 + ], + [ + "Cl", + -12.880419731140137 + ], + [ + "▁disregard", + -12.880444526672363 + ], + [ + "▁wh", + -12.880444526672363 + ], + [ + "▁preservatives", + -12.880619049072266 + ], + [ + "lig", + -12.880651473999023 + ], + [ + "▁handbag", + -12.880829811096191 + ], + [ + "▁Paleo", + -12.880849838256836 + ], + [ + "▁fireplaces", + -12.88090991973877 + ], + [ + "▁Watt", + -12.880971908569336 + ], + [ + "▁Cro", + -12.881011009216309 + ], + [ + "▁Trace", + -12.881024360656738 + ], + [ + "▁shortcuts", + -12.881178855895996 + ], + [ + "Basically", + -12.881307601928713 + ], + [ + "Bur", + -12.88133430480957 + ], + [ + "▁Flexible", + -12.88133716583252 + ], + [ + "▁Capture", + -12.88134479522705 + ], + [ + "positive", + -12.881440162658691 + ], + [ + "ched", + -12.881477355957031 + ], + [ + "fine", + -12.881488800048828 + ], + [ + "▁hut", + -12.881669044494627 + ], + [ + "▁columnist", + -12.881718635559082 + ], + [ + "▁commanded", + -12.881747245788574 + ], + [ + "tube", + -12.881901741027832 + ], + [ + "None", + -12.881939888000488 + ], + [ + "▁Holt", + -12.881980895996094 + ], + [ + "▁Opt", + -12.882057189941406 + ], + [ + "ako", + -12.8821382522583 + ], + [ + "▁Secrets", + -12.882145881652832 + ], + [ + "▁Lok", + -12.882159233093262 + ], + [ + "Education", + -12.882219314575195 + ], + [ + "official", + -12.882411003112791 + ], + [ + "▁Weber", + -12.882513046264648 + ], + [ + "▁Marilyn", + -12.882686614990234 + ], + [ + "▁budding", + -12.882705688476562 + ], + [ + "Alternatively", + -12.882756233215332 + ], + [ + "boro", + -12.882789611816406 + ], + [ + "▁fairs", + -12.88289737701416 + ], + [ + "▁compiler", + -12.883014678955078 + ], + [ + "Fixed", + -12.883024215698242 + ], + [ + "either", + -12.88305950164795 + ], + [ + "▁bustle", + -12.883124351501465 + ], + [ + "▁dyed", + -12.883158683776855 + ], + [ + "▁Mounted", + -12.883164405822754 + ], + [ + "▁Lone", + -12.88330364227295 + ], + [ + "▁NEED", + -12.883315086364746 + ], + [ + "▁Pr", + -12.883501052856444 + ], + [ + "1.0", + -12.88365077972412 + ], + [ + "ADA", + -12.883745193481444 + ], + [ + "▁aloud", + -12.883753776550291 + ], + [ + "bor", + -12.88376235961914 + ], + [ + "Micro", + -12.883792877197266 + ], + [ + "▁mop", + -12.883882522583008 + ], + [ + "▁Arabian", + -12.88389015197754 + ], + [ + "▁begging", + -12.883917808532717 + ], + [ + "▁Tweet", + -12.883947372436523 + ], + [ + "NH", + -12.88396167755127 + ], + [ + "▁cheddar", + -12.884042739868164 + ], + [ + "▁LL", + -12.88425636291504 + ], + [ + "▁adjoining", + -12.884306907653809 + ], + [ + "▁weakened", + -12.884403228759766 + ], + [ + "hou", + -12.88454246520996 + ], + [ + "▁Helping", + -12.88462257385254 + ], + [ + "hotel", + -12.884623527526855 + ], + [ + "▁condominium", + -12.884626388549805 + ], + [ + "Drop", + -12.884883880615234 + ], + [ + "▁Aim", + -12.884957313537598 + ], + [ + "▁graded", + -12.88497543334961 + ], + [ + "▁regiment", + -12.884976387023926 + ], + [ + "▁8,000", + -12.885039329528809 + ], + [ + "▁Riding", + -12.885064125061035 + ], + [ + "▁pitfalls", + -12.885112762451172 + ], + [ + "▁Kiwi", + -12.885164260864258 + ], + [ + "▁Mandarin", + -12.885194778442385 + ], + [ + "▁warrants", + -12.88527011871338 + ], + [ + "▁Fiji", + -12.885309219360352 + ], + [ + "▁nu", + -12.885323524475098 + ], + [ + "▁optimisation", + -12.88539218902588 + ], + [ + "▁bilingual", + -12.885398864746094 + ], + [ + "▁rubbed", + -12.885421752929688 + ], + [ + "▁DIS", + -12.885425567626951 + ], + [ + "▁adept", + -12.88542938232422 + ], + [ + "▁soaps", + -12.885486602783203 + ], + [ + "▁BAR", + -12.885627746582031 + ], + [ + "plex", + -12.885631561279297 + ], + [ + "▁Breaking", + -12.886004447937012 + ], + [ + "▁subdivision", + -12.886004447937012 + ], + [ + "▁derivative", + -12.886157035827637 + ], + [ + "▁Airbnb", + -12.88620948791504 + ], + [ + "RED", + -12.886292457580566 + ], + [ + "fen", + -12.886309623718262 + ], + [ + "▁graders", + -12.886311531066896 + ], + [ + "Phone", + -12.886404037475586 + ], + [ + "▁Finals", + -12.886412620544434 + ], + [ + "▁Rex", + -12.886451721191406 + ], + [ + "Scan", + -12.886481285095217 + ], + [ + "▁Molecular", + -12.886682510375977 + ], + [ + "▁balconies", + -12.886743545532228 + ], + [ + "▁skipping", + -12.886821746826172 + ], + [ + "Daily", + -12.886876106262209 + ], + [ + "▁MMA", + -12.886906623840332 + ], + [ + "▁Cub", + -12.88701629638672 + ], + [ + "▁Treaty", + -12.887103080749512 + ], + [ + "TIC", + -12.887227058410645 + ], + [ + "▁Efficiency", + -12.887421607971191 + ], + [ + "▁Charm", + -12.887565612792969 + ], + [ + "OUT", + -12.88761043548584 + ], + [ + "▁Glory", + -12.887727737426758 + ], + [ + "cause", + -12.887742042541504 + ], + [ + "▁cords", + -12.88776683807373 + ], + [ + "▁coworkers", + -12.887868881225586 + ], + [ + "▁bunny", + -12.887927055358888 + ], + [ + "▁preferable", + -12.888094902038574 + ], + [ + "▁apology", + -12.888104438781738 + ], + [ + "▁Helsinki", + -12.888242721557615 + ], + [ + "Recent", + -12.888301849365234 + ], + [ + "▁reversal", + -12.888318061828612 + ], + [ + "▁encompass", + -12.888405799865724 + ], + [ + "OG", + -12.88844108581543 + ], + [ + "▁Wisdom", + -12.888442993164062 + ], + [ + "▁nets", + -12.888535499572754 + ], + [ + "▁hardened", + -12.8886079788208 + ], + [ + "▁puff", + -12.888716697692873 + ], + [ + "▁veneer", + -12.888758659362791 + ], + [ + "▁Nav", + -12.88887882232666 + ], + [ + "▁Chill", + -12.888957977294922 + ], + [ + "▁departing", + -12.888971328735352 + ], + [ + "▁exhaustive", + -12.889131546020508 + ], + [ + "▁Tent", + -12.889230728149414 + ], + [ + "▁rethink", + -12.889252662658691 + ], + [ + "▁Abdul", + -12.88942050933838 + ], + [ + "▁fascination", + -12.88945198059082 + ], + [ + "▁poignant", + -12.88945198059082 + ], + [ + "▁factual", + -12.889458656311035 + ], + [ + "▁Anton", + -12.889482498168944 + ], + [ + "▁farewell", + -12.889554023742676 + ], + [ + "▁climates", + -12.8897066116333 + ], + [ + "▁1954", + -12.88987159729004 + ], + [ + "▁devised", + -12.88991641998291 + ], + [ + "sense", + -12.889997482299805 + ], + [ + "hon", + -12.890029907226562 + ], + [ + "▁1943", + -12.890151023864746 + ], + [ + "▁(2015)", + -12.890257835388184 + ], + [ + "▁Classroom", + -12.890368461608888 + ], + [ + "850", + -12.890443801879885 + ], + [ + "▁Philly", + -12.890563011169434 + ], + [ + "▁Totally", + -12.890613555908203 + ], + [ + "▁unbeatable", + -12.89080810546875 + ], + [ + "▁Bruno", + -12.890827178955078 + ], + [ + "▁blueberry", + -12.89098834991455 + ], + [ + "generated", + -12.891141891479492 + ], + [ + "▁limitless", + -12.891157150268556 + ], + [ + "▁JUST", + -12.89129638671875 + ], + [ + "▁repeal", + -12.89130401611328 + ], + [ + "▁nominate", + -12.891568183898926 + ], + [ + "▁doctorate", + -12.89161491394043 + ], + [ + "▁Hansen", + -12.891632080078123 + ], + [ + "▁TW", + -12.89163303375244 + ], + [ + "igo", + -12.891767501831056 + ], + [ + "config", + -12.891838073730469 + ], + [ + "та", + -12.89188289642334 + ], + [ + "▁Dept", + -12.891904830932615 + ], + [ + "Manager", + -12.891937255859377 + ], + [ + "▁unofficial", + -12.891993522644045 + ], + [ + "▁prompting", + -12.892035484313965 + ], + [ + "duct", + -12.892119407653809 + ], + [ + "▁extravagant", + -12.892167091369627 + ], + [ + "▁LB", + -12.892168998718262 + ], + [ + "▁wallets", + -12.892183303833008 + ], + [ + "CG", + -12.892289161682127 + ], + [ + "▁massacre", + -12.892306327819824 + ], + [ + "▁Treasurer", + -12.89250373840332 + ], + [ + "▁Tale", + -12.8925142288208 + ], + [ + "▁Inventory", + -12.8925199508667 + ], + [ + "▁Relax", + -12.892574310302734 + ], + [ + "ih", + -12.8926420211792 + ], + [ + "▁embarked", + -12.892777442932127 + ], + [ + "▁Tata", + -12.892809867858888 + ], + [ + "▁prototypes", + -12.892852783203123 + ], + [ + "▁HT", + -12.892918586730955 + ], + [ + "▁runtime", + -12.893020629882812 + ], + [ + "▁Bain", + -12.893196105957031 + ], + [ + "omo", + -12.893218040466309 + ], + [ + "▁EVER", + -12.89322280883789 + ], + [ + "AND", + -12.893243789672852 + ], + [ + "▁vigorous", + -12.893291473388672 + ], + [ + "ingly", + -12.893362998962402 + ], + [ + "▁bald", + -12.89346694946289 + ], + [ + "▁Buddhism", + -12.89352798461914 + ], + [ + "▁watermelon", + -12.89352798461914 + ], + [ + "▁opaque", + -12.89353084564209 + ], + [ + "▁1992.", + -12.893678665161133 + ], + [ + "▁Yum", + -12.89373779296875 + ], + [ + "▁Understand", + -12.89378547668457 + ], + [ + "▁latitude", + -12.893866539001465 + ], + [ + "▁inland", + -12.8939208984375 + ], + [ + "opening", + -12.893957138061523 + ], + [ + "billion", + -12.894068717956545 + ], + [ + "▁7%", + -12.894155502319336 + ], + [ + "▁offspring", + -12.894156455993652 + ], + [ + "▁GHz", + -12.894166946411133 + ], + [ + "▁Kindergarten", + -12.894244194030762 + ], + [ + "▁crowned", + -12.894311904907228 + ], + [ + "jun", + -12.894511222839355 + ], + [ + "▁113", + -12.89451503753662 + ], + [ + "▁Techniques", + -12.894619941711426 + ], + [ + "bang", + -12.894640922546388 + ], + [ + "▁Camden", + -12.894673347473145 + ], + [ + "▁township", + -12.894774436950684 + ], + [ + "▁Till", + -12.894792556762695 + ], + [ + "▁Refer", + -12.894801139831545 + ], + [ + "Number", + -12.894879341125488 + ], + [ + "▁cerebral", + -12.894889831542969 + ], + [ + "▁melodic", + -12.894890785217283 + ], + [ + "cis", + -12.89504337310791 + ], + [ + "technical", + -12.895123481750488 + ], + [ + "him", + -12.895275115966797 + ], + [ + "▁Caesar", + -12.895341873168944 + ], + [ + "Road", + -12.895434379577637 + ], + [ + "▁adidas", + -12.895514488220217 + ], + [ + "▁deposition", + -12.895554542541504 + ], + [ + "▁inhabit", + -12.895556449890137 + ], + [ + "Reading", + -12.895750045776367 + ], + [ + "mul", + -12.895773887634276 + ], + [ + "▁Violet", + -12.895896911621094 + ], + [ + "▁Moody", + -12.895992279052734 + ], + [ + "▁designate", + -12.89610195159912 + ], + [ + "▁Ethan", + -12.896159172058104 + ], + [ + "▁innocence", + -12.896254539489746 + ], + [ + "▁perseverance", + -12.896254539489746 + ], + [ + "▁approvals", + -12.89627742767334 + ], + [ + "VER", + -12.896512031555176 + ], + [ + "▁888", + -12.89660358428955 + ], + [ + "▁overlapping", + -12.896613121032717 + ], + [ + "▁Robotics", + -12.896655082702637 + ], + [ + "▁Meg", + -12.896756172180176 + ], + [ + "▁Enable", + -12.896815299987791 + ], + [ + "▁aches", + -12.896927833557127 + ], + [ + "▁outrageous", + -12.896994590759276 + ], + [ + "▁unlocking", + -12.89704132080078 + ], + [ + "▁additive", + -12.897119522094728 + ], + [ + "▁Regulatory", + -12.897346496582031 + ], + [ + "▁JA", + -12.897401809692385 + ], + [ + "odo", + -12.89742946624756 + ], + [ + "▁Listed", + -12.897489547729492 + ], + [ + "arra", + -12.897497177124023 + ], + [ + "▁quinoa", + -12.89762020111084 + ], + [ + "▁href", + -12.897640228271484 + ], + [ + "▁predicts", + -12.897686004638672 + ], + [ + "▁Collaboration", + -12.897789001464844 + ], + [ + "▁Duo", + -12.897859573364258 + ], + [ + "gn", + -12.897932052612305 + ], + [ + "▁bounds", + -12.897937774658203 + ], + [ + "▁notebooks", + -12.89802360534668 + ], + [ + "▁Lafayette", + -12.898141860961914 + ], + [ + "▁citations", + -12.898162841796877 + ], + [ + "▁Instructions", + -12.898240089416504 + ], + [ + "▁attackers", + -12.898269653320312 + ], + [ + "ient", + -12.898282051086426 + ], + [ + "▁Supervisor", + -12.89836597442627 + ], + [ + "▁Winning", + -12.89836883544922 + ], + [ + "▁Meetings", + -12.89840316772461 + ], + [ + "▁buckets", + -12.89849090576172 + ], + [ + "▁alteration", + -12.8984956741333 + ], + [ + "▁remembers", + -12.898496627807615 + ], + [ + "▁alternatively", + -12.898587226867676 + ], + [ + "▁ta", + -12.898629188537598 + ], + [ + "111", + -12.898972511291504 + ], + [ + "▁Abe", + -12.898978233337402 + ], + [ + "▁Nairobi", + -12.898988723754885 + ], + [ + "▁Witch", + -12.899009704589844 + ], + [ + "▁Perez", + -12.899112701416016 + ], + [ + "▁Moo", + -12.899137496948242 + ], + [ + "▁ACA", + -12.899149894714355 + ], + [ + "▁cheesecake", + -12.899209022521973 + ], + [ + "ен", + -12.899249076843262 + ], + [ + "▁contender", + -12.899282455444336 + ], + [ + "▁Linen", + -12.899321556091309 + ], + [ + "Fantastic", + -12.89936351776123 + ], + [ + "▁1993.", + -12.899458885192873 + ], + [ + "▁discerning", + -12.899497985839844 + ], + [ + "kle", + -12.899662971496582 + ], + [ + "▁incidental", + -12.899840354919434 + ], + [ + "als", + -12.899968147277832 + ], + [ + "▁Profit", + -12.900023460388184 + ], + [ + "▁monks", + -12.90007495880127 + ], + [ + "▁Payday", + -12.900181770324709 + ], + [ + "IX", + -12.900223731994627 + ], + [ + "NF", + -12.900253295898438 + ], + [ + "▁Producer", + -12.900257110595703 + ], + [ + "▁Essentially", + -12.900321006774902 + ], + [ + "▁genealogy", + -12.900358200073242 + ], + [ + "▁Calculator", + -12.900360107421877 + ], + [ + "▁sworn", + -12.900537490844728 + ], + [ + "▁Herbert", + -12.900548934936523 + ], + [ + "▁????", + -12.900551795959473 + ], + [ + "▁analyzes", + -12.900681495666504 + ], + [ + "▁Dove", + -12.90068817138672 + ], + [ + "▁Azerbaijan", + -12.900701522827148 + ], + [ + "▁louder", + -12.900701522827148 + ], + [ + "10)", + -12.90071964263916 + ], + [ + "ote", + -12.90075397491455 + ], + [ + "storage", + -12.90081787109375 + ], + [ + "▁Names", + -12.90084171295166 + ], + [ + "lol", + -12.900843620300291 + ], + [ + "▁Sonoma", + -12.900861740112305 + ], + [ + "▁Erie", + -12.90086841583252 + ], + [ + "▁Crest", + -12.90087604522705 + ], + [ + "▁corrective", + -12.900935173034668 + ], + [ + "Lord", + -12.900997161865234 + ], + [ + "▁TI", + -12.901020050048828 + ], + [ + "▁Bolt", + -12.9011869430542 + ], + [ + "▁itching", + -12.901205062866213 + ], + [ + "▁2.6", + -12.901250839233398 + ], + [ + "▁Leigh", + -12.901272773742676 + ], + [ + "▁receivers", + -12.901296615600586 + ], + [ + "▁aide", + -12.901359558105469 + ], + [ + "▁Dixon", + -12.90153980255127 + ], + [ + "▁headers", + -12.901570320129396 + ], + [ + "▁goddess", + -12.901588439941406 + ], + [ + "▁territorial", + -12.901598930358888 + ], + [ + "▁Stud", + -12.901619911193848 + ], + [ + "keeper", + -12.9016695022583 + ], + [ + "▁dinosaurs", + -12.90172004699707 + ], + [ + "▁borough", + -12.901728630065918 + ], + [ + "▁devastated", + -12.901827812194824 + ], + [ + "apple", + -12.901939392089844 + ], + [ + "▁nourish", + -12.902000427246094 + ], + [ + "▁concession", + -12.902021408081056 + ], + [ + "▁Stretch", + -12.902040481567385 + ], + [ + "▁Dayton", + -12.902140617370604 + ], + [ + "pac", + -12.902196884155272 + ], + [ + "▁Shock", + -12.902198791503906 + ], + [ + "▁jams", + -12.9022216796875 + ], + [ + "▁Strawberry", + -12.902251243591309 + ], + [ + "▁presses", + -12.902321815490724 + ], + [ + "▁bots", + -12.902337074279783 + ], + [ + "3%", + -12.902528762817385 + ], + [ + "▁Fridays", + -12.902542114257812 + ], + [ + "▁(17", + -12.902554512023926 + ], + [ + "▁inactive", + -12.90258502960205 + ], + [ + "▁WIN", + -12.902626037597656 + ], + [ + "▁OT", + -12.902668952941896 + ], + [ + "alia", + -12.902752876281738 + ], + [ + "hol", + -12.902755737304688 + ], + [ + "210", + -12.902896881103516 + ], + [ + "▁Container", + -12.902897834777832 + ], + [ + "▁Bern", + -12.902909278869627 + ], + [ + "ril", + -12.902981758117676 + ], + [ + "▁hy", + -12.903005599975586 + ], + [ + "▁collaborators", + -12.903040885925291 + ], + [ + "▁stationed", + -12.903051376342772 + ], + [ + "▁cod", + -12.903169631958008 + ], + [ + "▁commencement", + -12.90318202972412 + ], + [ + "▁shading", + -12.903191566467283 + ], + [ + "▁sexually", + -12.903310775756836 + ], + [ + "▁pulmonary", + -12.903326034545898 + ], + [ + "▁outrage", + -12.903359413146973 + ], + [ + "▁entryway", + -12.903538703918455 + ], + [ + "▁Didn", + -12.903641700744627 + ], + [ + "▁Bicycle", + -12.903763771057127 + ], + [ + "▁Quiet", + -12.903901100158691 + ], + [ + "▁spectacle", + -12.903922080993652 + ], + [ + "placed", + -12.903987884521484 + ], + [ + "▁mastering", + -12.904023170471191 + ], + [ + "▁boutiques", + -12.904064178466797 + ], + [ + "▁dent", + -12.904094696044922 + ], + [ + "Boy", + -12.904180526733398 + ], + [ + "named", + -12.90422821044922 + ], + [ + "▁Murder", + -12.904263496398926 + ], + [ + "▁Carmen", + -12.904295921325684 + ], + [ + "▁learns", + -12.904356002807615 + ], + [ + "▁luckily", + -12.904385566711426 + ], + [ + "Nick", + -12.90443515777588 + ], + [ + "▁persecution", + -12.904479026794434 + ], + [ + "▁indictment", + -12.90447998046875 + ], + [ + "▁Winchester", + -12.90448760986328 + ], + [ + "▁Recognition", + -12.904552459716797 + ], + [ + "▁Tune", + -12.90458869934082 + ], + [ + "phil", + -12.904593467712402 + ], + [ + "▁catastrophe", + -12.904611587524414 + ], + [ + "trick", + -12.904633522033691 + ], + [ + "▁Nicola", + -12.90467643737793 + ], + [ + "ais", + -12.904691696166992 + ], + [ + "▁uphill", + -12.904743194580078 + ], + [ + "480", + -12.904793739318848 + ], + [ + "▁Philips", + -12.904827117919922 + ], + [ + "▁Mau", + -12.905035972595217 + ], + [ + "▁Noise", + -12.905141830444336 + ], + [ + "▁proactively", + -12.905165672302246 + ], + [ + "▁Desire", + -12.905192375183104 + ], + [ + "▁registrations", + -12.905226707458496 + ], + [ + "▁rejoice", + -12.905254364013672 + ], + [ + "Develop", + -12.905305862426758 + ], + [ + "▁contend", + -12.905317306518556 + ], + [ + "agan", + -12.9054594039917 + ], + [ + "▁Bala", + -12.905471801757812 + ], + [ + "▁solvent", + -12.905517578125 + ], + [ + "▁buff", + -12.905526161193848 + ], + [ + "▁righteous", + -12.90556526184082 + ], + [ + "button", + -12.905704498291016 + ], + [ + "DU", + -12.905722618103027 + ], + [ + "▁Maintain", + -12.905746459960938 + ], + [ + "dance", + -12.90578556060791 + ], + [ + "reaching", + -12.905811309814451 + ], + [ + "▁tote", + -12.905828475952148 + ], + [ + "▁marital", + -12.905831336975098 + ], + [ + "▁Russ", + -12.905837059020996 + ], + [ + "▁york", + -12.905858993530272 + ], + [ + "ectomy", + -12.905887603759766 + ], + [ + "▁assassination", + -12.905893325805664 + ], + [ + "▁reflux", + -12.905893325805664 + ], + [ + "▁moderation", + -12.905923843383787 + ], + [ + "▁inspectors", + -12.905959129333496 + ], + [ + "▁soar", + -12.905965805053713 + ], + [ + "lov", + -12.905975341796877 + ], + [ + "▁eu", + -12.906126022338867 + ], + [ + "▁Nickel", + -12.906166076660156 + ], + [ + "▁addict", + -12.906246185302734 + ], + [ + "KS", + -12.90633487701416 + ], + [ + "▁Hide", + -12.906471252441406 + ], + [ + "▁Insight", + -12.906550407409668 + ], + [ + "▁Epi", + -12.906611442565918 + ], + [ + "▁Tucker", + -12.906673431396484 + ], + [ + "▁Actual", + -12.906689643859863 + ], + [ + "mbre", + -12.906939506530762 + ], + [ + "▁wager", + -12.907299041748049 + ], + [ + "TU", + -12.907408714294434 + ], + [ + "▁detects", + -12.907703399658203 + ], + [ + "▁preserves", + -12.907770156860352 + ], + [ + "▁Priest", + -12.90782070159912 + ], + [ + "▁vain", + -12.907835006713867 + ], + [ + "▁shortened", + -12.907861709594728 + ], + [ + "litre", + -12.907955169677734 + ], + [ + "▁lawns", + -12.907995223999023 + ], + [ + "llo", + -12.90806484222412 + ], + [ + "▁excavation", + -12.90810775756836 + ], + [ + "▁Hamas", + -12.908184051513672 + ], + [ + "▁suitcase", + -12.908230781555176 + ], + [ + "▁hotspot", + -12.908231735229492 + ], + [ + "▁Iris", + -12.908310890197754 + ], + [ + "Ya", + -12.908535957336426 + ], + [ + "▁deterioration", + -12.90861701965332 + ], + [ + "▁CSR", + -12.908660888671877 + ], + [ + "▁sperm", + -12.908809661865234 + ], + [ + "▁Survival", + -12.908931732177734 + ], + [ + "▁Whit", + -12.908987045288086 + ], + [ + "▁Graph", + -12.909080505371094 + ], + [ + "▁Augusta", + -12.909125328063965 + ], + [ + "event", + -12.90915298461914 + ], + [ + "uti", + -12.909172058105469 + ], + [ + "dex", + -12.909263610839844 + ], + [ + "▁rocker", + -12.909330368041992 + ], + [ + "renowned", + -12.909337997436523 + ], + [ + "▁sufferers", + -12.90937042236328 + ], + [ + "▁Essentials", + -12.909423828125 + ], + [ + "▁chipset", + -12.909462928771973 + ], + [ + "▁profoundly", + -12.909468650817873 + ], + [ + "▁Aston", + -12.90947151184082 + ], + [ + "▁Carry", + -12.909505844116213 + ], + [ + "osh", + -12.90961456298828 + ], + [ + "▁Structural", + -12.909635543823242 + ], + [ + "Editor", + -12.909662246704102 + ], + [ + "▁watt", + -12.909666061401367 + ], + [ + "Staff", + -12.90969467163086 + ], + [ + "▁winters", + -12.90972137451172 + ], + [ + "▁cl", + -12.909737586975098 + ], + [ + "ivo", + -12.90982151031494 + ], + [ + "▁Torres", + -12.90982151031494 + ], + [ + "▁premiered", + -12.909975051879885 + ], + [ + "▁responsibly", + -12.90999984741211 + ], + [ + "▁anthology", + -12.910005569458008 + ], + [ + "▁undesirable", + -12.910006523132324 + ], + [ + "▁rested", + -12.91005039215088 + ], + [ + "▁Birch", + -12.910176277160645 + ], + [ + "imi", + -12.910222053527832 + ], + [ + "▁PF", + -12.910293579101562 + ], + [ + "▁1939", + -12.910358428955078 + ], + [ + "stead", + -12.910411834716797 + ], + [ + "▁Rita", + -12.910412788391112 + ], + [ + "▁Architectural", + -12.910457611083984 + ], + [ + "▁Finder", + -12.910479545593262 + ], + [ + "▁iced", + -12.910601615905762 + ], + [ + "▁Investor", + -12.91070556640625 + ], + [ + "ENT", + -12.9107084274292 + ], + [ + "BD", + -12.910747528076172 + ], + [ + "▁exploited", + -12.910832405090332 + ], + [ + "▁mystical", + -12.910943984985352 + ], + [ + "▁4.4", + -12.91102123260498 + ], + [ + "▁Sheikh", + -12.911104202270508 + ], + [ + "CAP", + -12.911115646362305 + ], + [ + "▁Zambia", + -12.911115646362305 + ], + [ + "TT", + -12.911120414733888 + ], + [ + "▁buildup", + -12.911126136779783 + ], + [ + "LI", + -12.911163330078123 + ], + [ + "▁blunt", + -12.91117000579834 + ], + [ + "▁IPA", + -12.911189079284668 + ], + [ + "▁coronary", + -12.911395072937012 + ], + [ + "▁electromagnetic", + -12.911506652832031 + ], + [ + "vas", + -12.911514282226562 + ], + [ + "▁idyllic", + -12.911558151245115 + ], + [ + "▁til", + -12.911887168884276 + ], + [ + "/30", + -12.912208557128906 + ], + [ + ",700", + -12.912271499633787 + ], + [ + "▁levitra", + -12.91239070892334 + ], + [ + "Charles", + -12.91257667541504 + ], + [ + "▁Verde", + -12.91259765625 + ], + [ + "▁Printed", + -12.912615776062012 + ], + [ + "▁chilling", + -12.912748336791992 + ], + [ + "▁Crescent", + -12.912806510925291 + ], + [ + "▁BL", + -12.912814140319824 + ], + [ + "▁exporters", + -12.91281509399414 + ], + [ + "▁tally", + -12.912854194641112 + ], + [ + "▁blackjack", + -12.91295337677002 + ], + [ + "▁termed", + -12.912982940673828 + ], + [ + "▁Zoe", + -12.913384437561035 + ], + [ + "▁Honestly", + -12.913533210754396 + ], + [ + "▁Encyclopedia", + -12.913688659667969 + ], + [ + "▁DH", + -12.913780212402344 + ], + [ + "ICA", + -12.913784980773926 + ], + [ + "give", + -12.913810729980469 + ], + [ + ":12", + -12.913969993591309 + ], + [ + "▁anonymity", + -12.914161682128906 + ], + [ + "▁occupies", + -12.914161682128906 + ], + [ + "▁studs", + -12.91417121887207 + ], + [ + "▁Lumpur", + -12.914172172546388 + ], + [ + "▁19-", + -12.914332389831545 + ], + [ + "▁Attractive", + -12.914389610290527 + ], + [ + "▁doesnt", + -12.914437294006348 + ], + [ + "Someone", + -12.914440155029297 + ], + [ + "▁tapered", + -12.914552688598633 + ], + [ + "onic", + -12.91461181640625 + ], + [ + "▁apprentice", + -12.914697647094728 + ], + [ + "ARD", + -12.9147310256958 + ], + [ + "▁simulated", + -12.914871215820312 + ], + [ + "▁Qi", + -12.91500473022461 + ], + [ + "22.", + -12.915037155151367 + ], + [ + "Notice", + -12.91506004333496 + ], + [ + "▁Mira", + -12.91511058807373 + ], + [ + "▁Rwanda", + -12.915183067321776 + ], + [ + "▁Ada", + -12.915202140808104 + ], + [ + "▁fountains", + -12.91523265838623 + ], + [ + "▁strengthens", + -12.915297508239746 + ], + [ + "▁harmonious", + -12.915308952331545 + ], + [ + "▁Approved", + -12.915328979492188 + ], + [ + "▁purchasers", + -12.915376663208008 + ], + [ + "eni", + -12.915377616882324 + ], + [ + "▁118", + -12.915664672851562 + ], + [ + "▁Piper", + -12.915678024291992 + ], + [ + "▁aesthetically", + -12.915685653686523 + ], + [ + "▁peeling", + -12.915690422058104 + ], + [ + "span", + -12.915724754333496 + ], + [ + "▁gardeners", + -12.915736198425291 + ], + [ + "▁walkers", + -12.915749549865724 + ], + [ + "Ken", + -12.91576099395752 + ], + [ + "▁Dor", + -12.916098594665527 + ], + [ + "arian", + -12.916099548339844 + ], + [ + "won", + -12.91610622406006 + ], + [ + "ops", + -12.91616153717041 + ], + [ + "▁OTHER", + -12.91631031036377 + ], + [ + "▁HIGH", + -12.916329383850098 + ], + [ + "ses", + -12.916366577148438 + ], + [ + "▁fractures", + -12.916370391845703 + ], + [ + "▁lovingly", + -12.91647243499756 + ], + [ + "ensis", + -12.916728973388672 + ], + [ + "▁imaginary", + -12.91694450378418 + ], + [ + "▁cradle", + -12.916945457458496 + ], + [ + "▁bipolar", + -12.916952133178713 + ], + [ + "▁Landscaping", + -12.916954040527344 + ], + [ + "▁7,000", + -12.917120933532717 + ], + [ + "▁1934", + -12.917213439941406 + ], + [ + "▁au", + -12.91728687286377 + ], + [ + "▁tipped", + -12.917405128479004 + ], + [ + "▁Nut", + -12.917479515075684 + ], + [ + "▁diapers", + -12.917609214782717 + ], + [ + "polis", + -12.917656898498535 + ], + [ + "tea", + -12.917696952819824 + ], + [ + "Front", + -12.917744636535645 + ], + [ + "▁Resistance", + -12.91786289215088 + ], + [ + "▁Pont", + -12.917895317077637 + ], + [ + "▁disposed", + -12.91790771484375 + ], + [ + "▁Katrina", + -12.917943954467772 + ], + [ + "zan", + -12.917946815490724 + ], + [ + "keep", + -12.91798973083496 + ], + [ + "▁diligent", + -12.918097496032717 + ], + [ + "ador", + -12.918105125427246 + ], + [ + "▁Laguna", + -12.918193817138672 + ], + [ + "▁chops", + -12.918231964111328 + ], + [ + "ega", + -12.918330192565918 + ], + [ + "Tu", + -12.918333053588867 + ], + [ + "▁embassy", + -12.918339729309082 + ], + [ + "▁scarves", + -12.918349266052246 + ], + [ + "interest", + -12.918357849121094 + ], + [ + "▁suppression", + -12.918380737304688 + ], + [ + "▁Mak", + -12.918429374694824 + ], + [ + "/14", + -12.91845989227295 + ], + [ + "Services", + -12.918599128723145 + ], + [ + "▁famously", + -12.918661117553713 + ], + [ + "aud", + -12.918706893920898 + ], + [ + "▁Chloe", + -12.918766975402832 + ], + [ + "▁Pak", + -12.918842315673828 + ], + [ + "▁Personalized", + -12.918846130371094 + ], + [ + "▁rob", + -12.918883323669434 + ], + [ + "▁zombies", + -12.918946266174316 + ], + [ + "static", + -12.918957710266112 + ], + [ + "▁Athletics", + -12.918980598449709 + ], + [ + "mia", + -12.91901683807373 + ], + [ + "invasive", + -12.919028282165527 + ], + [ + "edit", + -12.919075012207031 + ], + [ + "▁Diamonds", + -12.91913604736328 + ], + [ + "▁violet", + -12.919194221496582 + ], + [ + "▁acclaim", + -12.919344902038574 + ], + [ + "Sound", + -12.91940689086914 + ], + [ + "▁vis", + -12.91948699951172 + ], + [ + "▁Conditioners", + -12.91952896118164 + ], + [ + "▁Def", + -12.919593811035156 + ], + [ + "▁progressively", + -12.9196138381958 + ], + [ + "▁POWER", + -12.919689178466797 + ], + [ + "▁auxiliary", + -12.9197359085083 + ], + [ + "▁mildew", + -12.919769287109377 + ], + [ + "▁flipping", + -12.919788360595703 + ], + [ + "Gar", + -12.91983699798584 + ], + [ + "▁Mormon", + -12.91985034942627 + ], + [ + "▁Sands", + -12.919901847839355 + ], + [ + "egg", + -12.919936180114746 + ], + [ + "▁Curtain", + -12.919955253601074 + ], + [ + "kara", + -12.92002010345459 + ], + [ + "▁Slip", + -12.920125961303713 + ], + [ + "▁Botanical", + -12.920190811157228 + ], + [ + "▁Lent", + -12.920190811157228 + ], + [ + "▁grim", + -12.920248031616213 + ], + [ + "▁punish", + -12.920262336730955 + ], + [ + "▁plague", + -12.92029857635498 + ], + [ + "▁GMC", + -12.920373916625977 + ], + [ + "104", + -12.920454978942873 + ], + [ + "▁anthem", + -12.920523643493652 + ], + [ + "▁usb", + -12.920612335205078 + ], + [ + "yam", + -12.920693397521973 + ], + [ + "▁Bir", + -12.920821189880373 + ], + [ + "grad", + -12.920848846435549 + ], + [ + "dress", + -12.920855522155762 + ], + [ + "▁Planner", + -12.920886993408203 + ], + [ + "Paper", + -12.9209623336792 + ], + [ + "cart", + -12.921026229858398 + ], + [ + "▁pave", + -12.921109199523926 + ], + [ + "▁skipped", + -12.921187400817873 + ], + [ + "▁escaping", + -12.921192169189451 + ], + [ + "▁exchanging", + -12.921223640441896 + ], + [ + "▁DU", + -12.92123317718506 + ], + [ + "▁Bog", + -12.9213285446167 + ], + [ + "Books", + -12.92142391204834 + ], + [ + "fried", + -12.921597480773926 + ], + [ + "▁entice", + -12.921772003173828 + ], + [ + "▁Avery", + -12.921842575073242 + ], + [ + "▁WB", + -12.921995162963867 + ], + [ + "▁Romeo", + -12.92214012145996 + ], + [ + "▁dunes", + -12.922245979309082 + ], + [ + "▁footing", + -12.922348022460938 + ], + [ + "▁aided", + -12.922358512878418 + ], + [ + "▁Biz", + -12.922481536865234 + ], + [ + "▁Bihar", + -12.922523498535156 + ], + [ + "106", + -12.922526359558104 + ], + [ + "▁defeating", + -12.922576904296877 + ], + [ + "▁Warsaw", + -12.922624588012695 + ], + [ + "▁maiden", + -12.922680854797363 + ], + [ + "▁portrayal", + -12.92270565032959 + ], + [ + "▁postings", + -12.922751426696776 + ], + [ + "kas", + -12.92275333404541 + ], + [ + "▁tabletop", + -12.922791481018066 + ], + [ + "▁Bulletin", + -12.922794342041016 + ], + [ + "performing", + -12.922966957092283 + ], + [ + "trust", + -12.923087120056152 + ], + [ + "Bit", + -12.92310619354248 + ], + [ + "▁contextual", + -12.923187255859377 + ], + [ + "▁Gur", + -12.92323398590088 + ], + [ + "▁Emerging", + -12.92330265045166 + ], + [ + "▁Candle", + -12.923316955566406 + ], + [ + "▁ornate", + -12.923336029052734 + ], + [ + "▁occupations", + -12.923375129699709 + ], + [ + "▁Stark", + -12.923491477966309 + ], + [ + "choice", + -12.923529624938965 + ], + [ + "▁Packages", + -12.923605918884276 + ], + [ + "▁(50", + -12.923664093017578 + ], + [ + "▁Hunger", + -12.92369270324707 + ], + [ + "▁PSA", + -12.923713684082031 + ], + [ + "pper", + -12.923747062683104 + ], + [ + "Similarly", + -12.92379665374756 + ], + [ + "▁Exp", + -12.923802375793455 + ], + [ + "▁Invitation", + -12.923857688903809 + ], + [ + "panel", + -12.92393684387207 + ], + [ + "▁Consortium", + -12.923938751220703 + ], + [ + "▁Jer", + -12.92393970489502 + ], + [ + "▁Danielle", + -12.92400360107422 + ], + [ + "▁Directive", + -12.924028396606444 + ], + [ + "▁managerial", + -12.924224853515623 + ], + [ + "▁Lim", + -12.924267768859863 + ], + [ + "▁Vine", + -12.92435073852539 + ], + [ + "▁Node", + -12.924356460571287 + ], + [ + "▁Hairstyles", + -12.924546241760254 + ], + [ + "▁Currency", + -12.924642562866213 + ], + [ + "▁Lieutenant", + -12.9246826171875 + ], + [ + "▁Older", + -12.924827575683594 + ], + [ + "▁Peach", + -12.924875259399414 + ], + [ + "dig", + -12.924922943115234 + ], + [ + "▁Steelers", + -12.92492961883545 + ], + [ + "distance", + -12.92495346069336 + ], + [ + "▁Ava", + -12.92503833770752 + ], + [ + "▁earthy", + -12.925158500671388 + ], + [ + "▁prestige", + -12.925342559814451 + ], + [ + "▁Patterson", + -12.92534637451172 + ], + [ + "▁Perkins", + -12.925369262695312 + ], + [ + "▁EVERY", + -12.925416946411133 + ], + [ + "Wild", + -12.925491333007812 + ], + [ + "▁twisting", + -12.92551612854004 + ], + [ + "▁wipes", + -12.92555332183838 + ], + [ + "▁manuscripts", + -12.925572395324709 + ], + [ + "▁intensely", + -12.925894737243652 + ], + [ + "▁Len", + -12.92591953277588 + ], + [ + "Fest", + -12.926138877868652 + ], + [ + "TW", + -12.926154136657717 + ], + [ + "Bal", + -12.926155090332031 + ], + [ + "hub", + -12.9262056350708 + ], + [ + "▁unravel", + -12.926262855529783 + ], + [ + "▁restraint", + -12.926267623901367 + ], + [ + "▁AW", + -12.926414489746094 + ], + [ + "ule", + -12.926480293273926 + ], + [ + "▁thinkers", + -12.926512718200684 + ], + [ + "▁flux", + -12.92655086517334 + ], + [ + "Hold", + -12.926651000976562 + ], + [ + "▁motifs", + -12.926694869995115 + ], + [ + "▁binge", + -12.926712036132812 + ], + [ + "▁retrieval", + -12.926749229431152 + ], + [ + "▁Glacier", + -12.926751136779783 + ], + [ + "▁Roses", + -12.92676067352295 + ], + [ + "directed", + -12.926776885986328 + ], + [ + "▁Tavern", + -12.926809310913086 + ], + [ + "▁tint", + -12.926857948303224 + ], + [ + "▁staffed", + -12.926915168762209 + ], + [ + "WT", + -12.926996231079102 + ], + [ + "uba", + -12.927081108093262 + ], + [ + "▁Penguin", + -12.92719841003418 + ], + [ + "▁ribbons", + -12.927201271057127 + ], + [ + "▁215", + -12.92722988128662 + ], + [ + "▁reclaim", + -12.927362442016602 + ], + [ + "▁1936", + -12.927376747131348 + ], + [ + "▁averages", + -12.927376747131348 + ], + [ + "▁QA", + -12.927431106567385 + ], + [ + "▁Laundry", + -12.927495956420898 + ], + [ + "▁Layout", + -12.927611351013184 + ], + [ + "▁toaster", + -12.92768669128418 + ], + [ + "▁Tail", + -12.927764892578123 + ], + [ + "VS", + -12.927786827087402 + ], + [ + "▁Cooling", + -12.927796363830566 + ], + [ + "▁smokers", + -12.927803993225098 + ], + [ + "▁Kardashian", + -12.927923202514648 + ], + [ + "▁ovens", + -12.92792510986328 + ], + [ + "▁theatres", + -12.927988052368164 + ], + [ + "▁ku", + -12.92802619934082 + ], + [ + "▁barcode", + -12.928139686584473 + ], + [ + "▁Coventry", + -12.928142547607422 + ], + [ + "▁superintendent", + -12.928156852722168 + ], + [ + "▁Catering", + -12.928271293640137 + ], + [ + "hd", + -12.928519248962402 + ], + [ + "Coin", + -12.928543090820312 + ], + [ + "cip", + -12.928597450256348 + ], + [ + "Yo", + -12.928624153137209 + ], + [ + "▁MOD", + -12.92872428894043 + ], + [ + "▁sweating", + -12.928725242614746 + ], + [ + "160", + -12.928730010986328 + ], + [ + "FO", + -12.92882251739502 + ], + [ + "▁sailors", + -12.928834915161133 + ], + [ + "▁Jung", + -12.928890228271484 + ], + [ + "Sir", + -12.928918838500977 + ], + [ + "▁Garrett", + -12.928930282592772 + ], + [ + "▁evergreen", + -12.928967475891112 + ], + [ + "▁Sketch", + -12.929193496704102 + ], + [ + "▁toolbar", + -12.929329872131348 + ], + [ + "▁Ari", + -12.92934799194336 + ], + [ + "▁Enough", + -12.929357528686523 + ], + [ + "mé", + -12.929621696472168 + ], + [ + "▁luncheon", + -12.929635047912598 + ], + [ + "▁abrasive", + -12.92974853515625 + ], + [ + "▁slap", + -12.929940223693848 + ], + [ + "▁Velvet", + -12.929950714111328 + ], + [ + "stack", + -12.929965019226074 + ], + [ + "▁organically", + -12.930014610290527 + ], + [ + "▁disguise", + -12.930379867553713 + ], + [ + "▁emulate", + -12.930381774902344 + ], + [ + "▁daycare", + -12.930413246154783 + ], + [ + "▁goose", + -12.930435180664062 + ], + [ + "▁Lance", + -12.930588722229004 + ], + [ + "▁22-", + -12.930597305297852 + ], + [ + "▁leisurely", + -12.930684089660645 + ], + [ + "▁auditing", + -12.930700302124023 + ], + [ + "probably", + -12.93073558807373 + ], + [ + "▁Utilities", + -12.93073844909668 + ], + [ + "▁aspire", + -12.930761337280272 + ], + [ + "▁decimal", + -12.930790901184082 + ], + [ + "▁divert", + -12.930801391601562 + ], + [ + "pul", + -12.930803298950195 + ], + [ + "▁debug", + -12.930828094482422 + ], + [ + "▁114", + -12.930862426757812 + ], + [ + "▁(13", + -12.930931091308594 + ], + [ + "▁danced", + -12.930963516235352 + ], + [ + "▁Tribunal", + -12.93097972869873 + ], + [ + "▁emperor", + -12.93097972869873 + ], + [ + "▁nuisance", + -12.93097972869873 + ], + [ + "▁terrified", + -12.93097972869873 + ], + [ + "▁Shared", + -12.931021690368652 + ], + [ + "pod", + -12.93110179901123 + ], + [ + "▁Doyle", + -12.93111801147461 + ], + [ + "▁morality", + -12.931166648864746 + ], + [ + "Personally", + -12.931198120117188 + ], + [ + "▁glare", + -12.931416511535645 + ], + [ + "▁sanding", + -12.931519508361816 + ], + [ + "▁blooming", + -12.931526184082031 + ], + [ + "FP", + -12.931774139404297 + ], + [ + "▁indexed", + -12.931852340698242 + ], + [ + "▁Stefan", + -12.931867599487305 + ], + [ + "times", + -12.931915283203123 + ], + [ + "▁Owens", + -12.931931495666504 + ], + [ + "▁afforded", + -12.932007789611816 + ], + [ + "▁reckless", + -12.932089805603027 + ], + [ + "▁concurrent", + -12.932174682617188 + ], + [ + "meaning", + -12.932272911071776 + ], + [ + "▁Hardy", + -12.932284355163574 + ], + [ + "▁inventor", + -12.93232536315918 + ], + [ + "▁urinary", + -12.93239402770996 + ], + [ + "▁imprisonment", + -12.932398796081545 + ], + [ + "▁10.1", + -12.932435035705566 + ], + [ + "another", + -12.932451248168944 + ], + [ + "gers", + -12.932515144348145 + ], + [ + "RU", + -12.932599067687988 + ], + [ + "▁prisoner", + -12.932619094848633 + ], + [ + "cone", + -12.932695388793944 + ], + [ + "Same", + -12.932701110839844 + ], + [ + "▁franchises", + -12.93271541595459 + ], + [ + "▁hides", + -12.932771682739258 + ], + [ + "hook", + -12.932921409606934 + ], + [ + "▁Input", + -12.93295669555664 + ], + [ + "▁oxidation", + -12.932963371276855 + ], + [ + "knit", + -12.93303108215332 + ], + [ + "▁analogy", + -12.93305778503418 + ], + [ + "▁Meal", + -12.93315887451172 + ], + [ + "▁sunsets", + -12.93317413330078 + ], + [ + "▁Tide", + -12.933247566223145 + ], + [ + "ders", + -12.933266639709473 + ], + [ + "▁EZ", + -12.933323860168455 + ], + [ + "hee", + -12.933324813842772 + ], + [ + "peer", + -12.933364868164062 + ], + [ + "▁fearful", + -12.933554649353027 + ], + [ + "▁plaintiffs", + -12.933650970458984 + ], + [ + "contained", + -12.933710098266602 + ], + [ + "▁nook", + -12.93372917175293 + ], + [ + "▁Guidance", + -12.933783531188965 + ], + [ + "vik", + -12.933804512023926 + ], + [ + "▁physique", + -12.93381118774414 + ], + [ + "▁stamina", + -12.933812141418455 + ], + [ + "▁universally", + -12.933825492858888 + ], + [ + "▁ingenious", + -12.933836936950684 + ], + [ + "▁dealerships", + -12.93385887145996 + ], + [ + "▁arteries", + -12.933908462524414 + ], + [ + "▁Lima", + -12.933932304382324 + ], + [ + "▁easing", + -12.933942794799805 + ], + [ + "iza", + -12.93394660949707 + ], + [ + "▁Lev", + -12.933961868286133 + ], + [ + "kes", + -12.933985710144045 + ], + [ + "Government", + -12.934029579162598 + ], + [ + "▁Cir", + -12.934080123901367 + ], + [ + "seven", + -12.934112548828123 + ], + [ + "▁minimally", + -12.934484481811523 + ], + [ + "▁21-", + -12.934493064880373 + ], + [ + "▁fig", + -12.934535026550291 + ], + [ + "▁ammo", + -12.934582710266112 + ], + [ + "▁Comparison", + -12.934585571289062 + ], + [ + "isha", + -12.934686660766602 + ], + [ + "▁RX", + -12.934797286987305 + ], + [ + "▁intrusion", + -12.934900283813477 + ], + [ + "ometer", + -12.934908866882324 + ], + [ + "▁Tasmania", + -12.934948921203612 + ], + [ + "▁Rogue", + -12.935063362121582 + ], + [ + "heat", + -12.935099601745604 + ], + [ + "▁Courts", + -12.935144424438477 + ], + [ + "▁throughput", + -12.935229301452637 + ], + [ + "▁INFORMATION", + -12.93523120880127 + ], + [ + "▁325", + -12.935239791870115 + ], + [ + "▁specifies", + -12.935245513916016 + ], + [ + "▁Cluster", + -12.935287475585938 + ], + [ + "▁doom", + -12.935309410095217 + ], + [ + "▁drowning", + -12.935545921325684 + ], + [ + "▁sprayed", + -12.93581771850586 + ], + [ + "/2016", + -12.935820579528809 + ], + [ + "eco", + -12.935867309570312 + ], + [ + "▁Plum", + -12.935962677001951 + ], + [ + "▁Bridal", + -12.9360933303833 + ], + [ + "▁majors", + -12.936116218566896 + ], + [ + "▁Fabulous", + -12.936277389526367 + ], + [ + "▁Countries", + -12.936382293701172 + ], + [ + "▁benchmarks", + -12.93638515472412 + ], + [ + "115", + -12.936437606811523 + ], + [ + "▁glacier", + -12.936461448669434 + ], + [ + "▁Watching", + -12.936495780944824 + ], + [ + "Mom", + -12.936620712280272 + ], + [ + "▁registers", + -12.936635971069336 + ], + [ + "▁1991,", + -12.936670303344728 + ], + [ + "▁biomedical", + -12.936671257019045 + ], + [ + "▁piston", + -12.936819076538086 + ], + [ + "▁melts", + -12.93684196472168 + ], + [ + "▁sucker", + -12.936849594116213 + ], + [ + "▁ticks", + -12.936866760253906 + ], + [ + "▁hanger", + -12.936922073364258 + ], + [ + "Bad", + -12.936929702758787 + ], + [ + "eer", + -12.936993598937988 + ], + [ + "▁esteemed", + -12.93699550628662 + ], + [ + "▁Messiah", + -12.937028884887695 + ], + [ + "▁sinus", + -12.93708324432373 + ], + [ + "▁Feeling", + -12.93727970123291 + ], + [ + "linger", + -12.937355041503906 + ], + [ + "Active", + -12.937418937683104 + ], + [ + "▁Sync", + -12.937557220458984 + ], + [ + "▁2/3", + -12.937592506408691 + ], + [ + "380", + -12.937637329101562 + ], + [ + "PAC", + -12.937647819519045 + ], + [ + "▁Geek", + -12.937906265258787 + ], + [ + "Different", + -12.937932014465332 + ], + [ + "▁tees", + -12.93796157836914 + ], + [ + "▁Sponsor", + -12.93797779083252 + ], + [ + "▁irritating", + -12.93808364868164 + ], + [ + "▁YMCA", + -12.93808650970459 + ], + [ + "▁embossed", + -12.93813705444336 + ], + [ + "▁marriages", + -12.938172340393066 + ], + [ + "▁117", + -12.9382963180542 + ], + [ + "▁pseudo", + -12.93840503692627 + ], + [ + "chal", + -12.938563346862791 + ], + [ + "tell", + -12.93859577178955 + ], + [ + "▁conceal", + -12.938636779785156 + ], + [ + "▁Divorce", + -12.938709259033203 + ], + [ + "▁ACE", + -12.938756942749023 + ], + [ + "Trade", + -12.93889045715332 + ], + [ + "▁Styles", + -12.938905715942385 + ], + [ + "emia", + -12.939050674438477 + ], + [ + "▁hinge", + -12.939057350158691 + ], + [ + "▁widgets", + -12.939058303833008 + ], + [ + "▁seizure", + -12.939085960388184 + ], + [ + "▁resign", + -12.93915557861328 + ], + [ + "▁popup", + -12.93917465209961 + ], + [ + "▁Arduino", + -12.939224243164062 + ], + [ + "▁Crane", + -12.939360618591309 + ], + [ + "▁Stern", + -12.93938446044922 + ], + [ + "▁irresistible", + -12.939496994018556 + ], + [ + "Texas", + -12.939498901367188 + ], + [ + "▁Distinguished", + -12.939498901367188 + ], + [ + "Fall", + -12.939520835876465 + ], + [ + "Draw", + -12.939529418945312 + ], + [ + "▁Carey", + -12.939541816711426 + ], + [ + "▁genus", + -12.939599990844728 + ], + [ + "▁craftsmen", + -12.939610481262209 + ], + [ + "▁demolished", + -12.93962287902832 + ], + [ + "▁Spoon", + -12.939645767211914 + ], + [ + "▁shattered", + -12.939727783203123 + ], + [ + "▁steals", + -12.939756393432615 + ], + [ + "▁CITY", + -12.939774513244627 + ], + [ + "▁enlisted", + -12.939831733703612 + ], + [ + "cke", + -12.940021514892578 + ], + [ + "▁assay", + -12.940110206604004 + ], + [ + "▁USPS", + -12.940128326416016 + ], + [ + "▁Franco", + -12.940133094787598 + ], + [ + "▁rotor", + -12.940193176269531 + ], + [ + "▁Configuration", + -12.94027614593506 + ], + [ + "▁Scene", + -12.940314292907717 + ], + [ + "ér", + -12.940316200256348 + ], + [ + "▁anonymously", + -12.940316200256348 + ], + [ + "▁bob", + -12.940409660339355 + ], + [ + "ply", + -12.94052505493164 + ], + [ + "▁Components", + -12.940558433532717 + ], + [ + "development", + -12.940619468688965 + ], + [ + "▁Unknown", + -12.940784454345703 + ], + [ + "Connor", + -12.940811157226562 + ], + [ + "nm", + -12.94081974029541 + ], + [ + "storey", + -12.940853118896484 + ], + [ + "Interesting", + -12.940903663635254 + ], + [ + "▁Confederate", + -12.940923690795898 + ], + [ + "▁foundational", + -12.940925598144531 + ], + [ + "▁pilgrimage", + -12.940942764282228 + ], + [ + "▁cruelty", + -12.940958023071287 + ], + [ + "▁119", + -12.940975189208984 + ], + [ + "▁insertion", + -12.941075325012209 + ], + [ + "▁hyperlink", + -12.9410982131958 + ], + [ + "▁laminated", + -12.941125869750977 + ], + [ + "▁vowed", + -12.941150665283203 + ], + [ + "bia", + -12.941158294677734 + ], + [ + "Understanding", + -12.941296577453612 + ], + [ + "nh", + -12.941302299499512 + ], + [ + "▁Shops", + -12.941314697265623 + ], + [ + "Fox", + -12.94140911102295 + ], + [ + "▁idol", + -12.941466331481934 + ], + [ + "▁evidenced", + -12.94155979156494 + ], + [ + "fact", + -12.941571235656738 + ], + [ + "▁Fletcher", + -12.941574096679688 + ], + [ + "▁novelist", + -12.941679954528809 + ], + [ + "▁$25,000", + -12.941813468933104 + ], + [ + "▁OB", + -12.941814422607422 + ], + [ + "▁lookup", + -12.941829681396484 + ], + [ + "▁Dashboard", + -12.941858291625977 + ], + [ + "▁earthquakes", + -12.941922187805176 + ], + [ + "▁Mets", + -12.941930770874023 + ], + [ + "▁Breath", + -12.94198989868164 + ], + [ + "▁401", + -12.94204330444336 + ], + [ + "▁lesions", + -12.942069053649902 + ], + [ + "▁RFID", + -12.942098617553713 + ], + [ + "▁rainforest", + -12.942253112792969 + ], + [ + "Us", + -12.942368507385254 + ], + [ + "▁shady", + -12.942419052124023 + ], + [ + "▁(14", + -12.942532539367676 + ], + [ + "kill", + -12.94260025024414 + ], + [ + "▁crumbs", + -12.942652702331545 + ], + [ + "appropriate", + -12.942709922790527 + ], + [ + "▁Telecom", + -12.942715644836426 + ], + [ + "▁youths", + -12.942745208740234 + ], + [ + "Training", + -12.942767143249512 + ], + [ + "domain", + -12.942951202392578 + ], + [ + "▁Collaborative", + -12.942996978759766 + ], + [ + "wd", + -12.94304084777832 + ], + [ + "▁Boca", + -12.943167686462402 + ], + [ + "▁Dive", + -12.943170547485352 + ], + [ + "▁2030", + -12.943172454833984 + ], + [ + "OA", + -12.943187713623049 + ], + [ + "▁tort", + -12.943195343017578 + ], + [ + "▁Ivory", + -12.943220138549805 + ], + [ + "math", + -12.943267822265623 + ], + [ + "achi", + -12.943456649780272 + ], + [ + "▁axle", + -12.94347858428955 + ], + [ + "▁Colleges", + -12.943538665771484 + ], + [ + "▁Ara", + -12.943643569946287 + ], + [ + "▁orthodontic", + -12.943670272827148 + ], + [ + "▁ignorant", + -12.943782806396484 + ], + [ + "▁issuance", + -12.943782806396484 + ], + [ + "▁cafeteria", + -12.9437837600708 + ], + [ + "Kevin", + -12.943902015686035 + ], + [ + "▁marinade", + -12.943923950195312 + ], + [ + "▁pom", + -12.944043159484863 + ], + [ + "▁housekeeping", + -12.944084167480469 + ], + [ + "▁pensions", + -12.944170951843262 + ], + [ + "▁spawn", + -12.944172859191896 + ], + [ + "▁REST", + -12.944208145141602 + ], + [ + "▁carve", + -12.944278717041016 + ], + [ + "▁EMC", + -12.94437026977539 + ], + [ + "2004", + -12.944393157958984 + ], + [ + "▁3:30", + -12.944419860839844 + ], + [ + "▁craftsman", + -12.944544792175291 + ], + [ + "▁dwarf", + -12.944639205932615 + ], + [ + "▁pursuits", + -12.944901466369627 + ], + [ + "▁defenses", + -12.94498062133789 + ], + [ + "▁qualifies", + -12.945215225219728 + ], + [ + "ENCE", + -12.94522762298584 + ], + [ + "SI", + -12.945338249206545 + ], + [ + "▁Willis", + -12.945422172546388 + ], + [ + "cd", + -12.945430755615234 + ], + [ + "▁suitability", + -12.945526123046877 + ], + [ + "▁butcher", + -12.945545196533203 + ], + [ + "▁Lea", + -12.94561767578125 + ], + [ + "▁wrench", + -12.94573211669922 + ], + [ + "▁hallmark", + -12.945761680603027 + ], + [ + "▁Hear", + -12.946149826049805 + ], + [ + "lp", + -12.946240425109863 + ], + [ + "tens", + -12.94626808166504 + ], + [ + "▁sailed", + -12.946351051330566 + ], + [ + "▁redness", + -12.94639778137207 + ], + [ + "▁cuffs", + -12.94640064239502 + ], + [ + "▁Aires", + -12.946441650390623 + ], + [ + "▁partitions", + -12.946495056152344 + ], + [ + "▁lu", + -12.946514129638672 + ], + [ + "▁barley", + -12.946526527404783 + ], + [ + "▁kiln", + -12.946673393249512 + ], + [ + "▁mentorship", + -12.94674301147461 + ], + [ + "▁canals", + -12.94676113128662 + ], + [ + "▁Lucia", + -12.946821212768556 + ], + [ + "▁Isaiah", + -12.946894645690918 + ], + [ + "▁Weston", + -12.94697380065918 + ], + [ + "▁surpassed", + -12.947027206420898 + ], + [ + "▁CES", + -12.947179794311523 + ], + [ + ":16", + -12.947209358215332 + ], + [ + "▁Suddenly", + -12.94725227355957 + ], + [ + "▁shortages", + -12.947253227233888 + ], + [ + "▁roam", + -12.947311401367188 + ], + [ + "▁5-6", + -12.947439193725586 + ], + [ + "▁Optical", + -12.947474479675291 + ], + [ + "▁sealer", + -12.947489738464355 + ], + [ + "▁Que", + -12.947518348693848 + ], + [ + "21.", + -12.94752025604248 + ], + [ + "▁succulent", + -12.947558403015137 + ], + [ + "VT", + -12.947628021240234 + ], + [ + "▁Schwartz", + -12.947698593139648 + ], + [ + "▁floss", + -12.947771072387695 + ], + [ + "▁Bedding", + -12.947787284851074 + ], + [ + "TEC", + -12.947845458984377 + ], + [ + "▁850", + -12.947932243347168 + ], + [ + "▁ramps", + -12.947932243347168 + ], + [ + "sac", + -12.947938919067385 + ], + [ + "▁hammock", + -12.948026657104492 + ], + [ + "▁drapes", + -12.94807243347168 + ], + [ + "▁trophies", + -12.948086738586426 + ], + [ + "▁assert", + -12.948118209838867 + ], + [ + "▁Alexandra", + -12.948137283325195 + ], + [ + "▁righteousness", + -12.94815158843994 + ], + [ + "rb", + -12.948163032531738 + ], + [ + "▁horseback", + -12.948445320129396 + ], + [ + "▁sender", + -12.948521614074709 + ], + [ + "Website", + -12.948535919189451 + ], + [ + "▁relish", + -12.948575973510742 + ], + [ + "cul", + -12.948626518249512 + ], + [ + "▁Sole", + -12.948802947998049 + ], + [ + "really", + -12.948917388916016 + ], + [ + "▁tweaks", + -12.94892692565918 + ], + [ + "▁snug", + -12.948948860168455 + ], + [ + "Wonderful", + -12.948975563049316 + ], + [ + "▁sap", + -12.949013710021973 + ], + [ + "▁ter", + -12.949257850646973 + ], + [ + "▁wolves", + -12.949380874633787 + ], + [ + "▁Ceremony", + -12.949525833129885 + ], + [ + "▁bipartisan", + -12.94953155517578 + ], + [ + "▁encoding", + -12.949546813964844 + ], + [ + "▁respite", + -12.949556350708008 + ], + [ + "▁debated", + -12.94955825805664 + ], + [ + "▁Carolyn", + -12.949614524841309 + ], + [ + "▁Courtney", + -12.949646949768066 + ], + [ + "▁troublesome", + -12.94969654083252 + ], + [ + "Congrats", + -12.949729919433594 + ], + [ + "Basic", + -12.949749946594238 + ], + [ + "▁nightstand", + -12.949857711791992 + ], + [ + "existent", + -12.949939727783203 + ], + [ + "▁Hiring", + -12.949957847595217 + ], + [ + "Germany", + -12.950039863586426 + ], + [ + "▁15-20", + -12.950093269348145 + ], + [ + "tner", + -12.95018196105957 + ], + [ + "▁Immediately", + -12.950186729431152 + ], + [ + "▁Villas", + -12.950311660766602 + ], + [ + "▁Killer", + -12.95037078857422 + ], + [ + "▁BIOS", + -12.950409889221191 + ], + [ + "Touch", + -12.950521469116213 + ], + [ + "▁RPM", + -12.950584411621094 + ], + [ + "▁Vital", + -12.950616836547852 + ], + [ + "▁Concord", + -12.950620651245115 + ], + [ + "▁$80", + -12.950682640075684 + ], + [ + "▁RED", + -12.950724601745604 + ], + [ + "▁Bulls", + -12.950743675231934 + ], + [ + "developed", + -12.95075511932373 + ], + [ + "▁Dub", + -12.950787544250488 + ], + [ + "▁testify", + -12.95083999633789 + ], + [ + "▁breweries", + -12.950984001159668 + ], + [ + "▁Ottoman", + -12.951074600219728 + ], + [ + "▁prohibit", + -12.95108413696289 + ], + [ + "▁1919", + -12.951111793518066 + ], + [ + "▁Kyoto", + -12.951120376586914 + ], + [ + "tina", + -12.95126724243164 + ], + [ + "▁typed", + -12.95129680633545 + ], + [ + "▁Anonymous", + -12.951316833496094 + ], + [ + "OL", + -12.951536178588867 + ], + [ + "Royal", + -12.951576232910156 + ], + [ + "▁claw", + -12.951611518859863 + ], + [ + "▁softened", + -12.95164680480957 + ], + [ + "▁Sum", + -12.951648712158203 + ], + [ + "Rob", + -12.9517240524292 + ], + [ + "▁XS", + -12.951730728149414 + ], + [ + "]]", + -12.951913833618164 + ], + [ + "▁Nie", + -12.95194149017334 + ], + [ + "▁comforts", + -12.952065467834473 + ], + [ + "▁Identification", + -12.9520845413208 + ], + [ + "▁cashback", + -12.95212459564209 + ], + [ + "▁Beacon", + -12.952381134033203 + ], + [ + "▁Miracle", + -12.952398300170898 + ], + [ + "▁deficiencies", + -12.952409744262695 + ], + [ + "▁Brief", + -12.95256805419922 + ], + [ + "agent", + -12.952631950378418 + ], + [ + "▁similarity", + -12.95265007019043 + ], + [ + "▁Springer", + -12.952651023864746 + ], + [ + "women", + -12.952765464782717 + ], + [ + "wn", + -12.952771186828612 + ], + [ + "PV", + -12.952823638916016 + ], + [ + "▁dreaded", + -12.952842712402344 + ], + [ + "▁influencer", + -12.952848434448242 + ], + [ + "▁300,000", + -12.952946662902832 + ], + [ + "▁lame", + -12.95310878753662 + ], + [ + "▁Geography", + -12.953160285949709 + ], + [ + "▁Beard", + -12.953206062316896 + ], + [ + "▁Fact", + -12.95322322845459 + ], + [ + "WM", + -12.953269004821776 + ], + [ + "▁1961", + -12.953272819519045 + ], + [ + "▁Connector", + -12.95341682434082 + ], + [ + "▁underwear", + -12.95341968536377 + ], + [ + "▁temps", + -12.953423500061035 + ], + [ + "▁Goose", + -12.953476905822754 + ], + [ + "▁Circus", + -12.953481674194336 + ], + [ + "uan", + -12.95351505279541 + ], + [ + "▁HDR", + -12.953553199768066 + ], + [ + "▁dime", + -12.95361042022705 + ], + [ + "▁Mos", + -12.953697204589844 + ], + [ + "▁Qualcomm", + -12.95383071899414 + ], + [ + "▁Preservation", + -12.95387840270996 + ], + [ + "▁witty", + -12.95390796661377 + ], + [ + "GU", + -12.953920364379885 + ], + [ + "▁recruits", + -12.953988075256348 + ], + [ + "▁forwarding", + -12.95411205291748 + ], + [ + "summer", + -12.954115867614746 + ], + [ + "pocket", + -12.95412254333496 + ], + [ + "biz", + -12.954188346862791 + ], + [ + "Studio", + -12.954257011413574 + ], + [ + "▁1952", + -12.954258918762209 + ], + [ + "▁partake", + -12.954264640808104 + ], + [ + "▁Gang", + -12.954272270202637 + ], + [ + "▁murals", + -12.954362869262695 + ], + [ + "River", + -12.954407691955566 + ], + [ + "eller", + -12.954513549804688 + ], + [ + "▁Scar", + -12.954584121704102 + ], + [ + "▁Meghan", + -12.954607963562012 + ], + [ + "▁ironic", + -12.954636573791504 + ], + [ + "▁personalization", + -12.95468044281006 + ], + [ + "▁warns", + -12.954691886901855 + ], + [ + "▁alpine", + -12.95474624633789 + ], + [ + "▁Jade", + -12.954812049865724 + ], + [ + "▁selves", + -12.954834938049316 + ], + [ + "▁Controls", + -12.955031394958496 + ], + [ + "▁Tal", + -12.95505428314209 + ], + [ + "Path", + -12.955102920532228 + ], + [ + "▁Pir", + -12.955134391784668 + ], + [ + "▁Nolan", + -12.95518684387207 + ], + [ + "▁Fitzgerald", + -12.955204963684082 + ], + [ + "▁overwhelmingly", + -12.95525074005127 + ], + [ + "▁Cheshire", + -12.955312728881836 + ], + [ + "▁hurdle", + -12.95533561706543 + ], + [ + "▁CLICK", + -12.955363273620604 + ], + [ + "▁Facts", + -12.955389976501465 + ], + [ + "▁Dropbox", + -12.95549774169922 + ], + [ + "tch", + -12.955639839172363 + ], + [ + "▁policymakers", + -12.955647468566896 + ], + [ + "Event", + -12.955758094787598 + ], + [ + "▁Biological", + -12.955791473388672 + ], + [ + "▁shortlisted", + -12.955804824829102 + ], + [ + "▁humanities", + -12.955903053283691 + ], + [ + "▁communist", + -12.955941200256348 + ], + [ + "2009", + -12.95596694946289 + ], + [ + "FB", + -12.955971717834473 + ], + [ + "▁Accident", + -12.955978393554688 + ], + [ + "▁Mach", + -12.956177711486816 + ], + [ + "BG", + -12.956198692321776 + ], + [ + "▁pu", + -12.956218719482422 + ], + [ + "▁Greenville", + -12.956275939941406 + ], + [ + "▁Artistic", + -12.956279754638672 + ], + [ + "▁suspense", + -12.956356048583984 + ], + [ + "▁socialist", + -12.956406593322754 + ], + [ + "NU", + -12.956419944763184 + ], + [ + "▁Bakery", + -12.95643138885498 + ], + [ + "tale", + -12.956449508666992 + ], + [ + "▁hops", + -12.95650863647461 + ], + [ + "lings", + -12.956721305847168 + ], + [ + "▁deli", + -12.956883430480955 + ], + [ + "▁ip", + -12.956947326660156 + ], + [ + "▁Blank", + -12.957037925720217 + ], + [ + "▁rabbits", + -12.957053184509276 + ], + [ + "▁farmland", + -12.957091331481934 + ], + [ + "▁glider", + -12.957111358642578 + ], + [ + "weed", + -12.957131385803224 + ], + [ + "Japan", + -12.957173347473145 + ], + [ + "▁depict", + -12.95720100402832 + ], + [ + "▁Ability", + -12.957204818725586 + ], + [ + "▁Porto", + -12.957275390625 + ], + [ + "▁Journalism", + -12.957289695739746 + ], + [ + "▁thunder", + -12.957359313964844 + ], + [ + "▁Towers", + -12.957406997680664 + ], + [ + "Soon", + -12.957515716552734 + ], + [ + "▁rink", + -12.957944869995115 + ], + [ + "▁Ravens", + -12.957987785339355 + ], + [ + "▁inward", + -12.958051681518556 + ], + [ + "▁anchored", + -12.958074569702148 + ], + [ + "itter", + -12.958086013793944 + ], + [ + "▁Said", + -12.958159446716309 + ], + [ + "▁Wife", + -12.958178520202637 + ], + [ + "steel", + -12.958208084106444 + ], + [ + "▁Anaheim", + -12.958211898803713 + ], + [ + "ket", + -12.95828628540039 + ], + [ + "▁Illustrator", + -12.95828628540039 + ], + [ + "▁Quiz", + -12.958295822143556 + ], + [ + "eka", + -12.958359718322754 + ], + [ + "▁distributions", + -12.958460807800291 + ], + [ + "▁depressing", + -12.958477020263672 + ], + [ + "suit", + -12.95852756500244 + ], + [ + "▁Handmade", + -12.9585542678833 + ], + [ + "▁washes", + -12.95864963531494 + ], + [ + "▁towering", + -12.958687782287598 + ], + [ + "Common", + -12.958706855773926 + ], + [ + "▁MFA", + -12.95871925354004 + ], + [ + "▁Household", + -12.9588041305542 + ], + [ + "▁ph", + -12.958806037902832 + ], + [ + "▁vows", + -12.95882797241211 + ], + [ + "▁ICC", + -12.958894729614258 + ], + [ + "rain", + -12.958901405334473 + ], + [ + "▁Makeup", + -12.958904266357422 + ], + [ + "▁boilers", + -12.958916664123535 + ], + [ + "▁Thermal", + -12.959007263183594 + ], + [ + "chel", + -12.959166526794434 + ], + [ + "▁midfield", + -12.959188461303713 + ], + [ + "▁Behavioral", + -12.959482192993164 + ], + [ + "▁Geoff", + -12.959514617919922 + ], + [ + "thin", + -12.959587097167969 + ], + [ + "multi", + -12.959598541259766 + ], + [ + "▁Pty", + -12.9596529006958 + ], + [ + "▁freed", + -12.959688186645508 + ], + [ + "▁datasets", + -12.95971965789795 + ], + [ + "▁Canton", + -12.95975112915039 + ], + [ + "▁122", + -12.959908485412598 + ], + [ + "▁501(", + -12.959918975830078 + ], + [ + "▁AMAZING", + -12.959957122802734 + ], + [ + "▁Tests", + -12.960052490234377 + ], + [ + "▁sway", + -12.960053443908691 + ], + [ + "▁frontier", + -12.960126876831056 + ], + [ + "surgical", + -12.960171699523926 + ], + [ + "▁Titanium", + -12.960217475891112 + ], + [ + "▁solids", + -12.96025848388672 + ], + [ + "▁Renewable", + -12.960309028625488 + ], + [ + "▁Alma", + -12.960322380065918 + ], + [ + "▁Bert", + -12.960338592529297 + ], + [ + "neu", + -12.96037483215332 + ], + [ + "▁ridiculously", + -12.960392951965332 + ], + [ + "▁Toshiba", + -12.960467338562012 + ], + [ + "▁Dy", + -12.960493087768556 + ], + [ + "▁exile", + -12.960604667663574 + ], + [ + "▁Slam", + -12.960630416870115 + ], + [ + "▁swell", + -12.960813522338867 + ], + [ + "HO", + -12.960857391357422 + ], + [ + "▁standings", + -12.96100902557373 + ], + [ + "▁dripping", + -12.961141586303713 + ], + [ + "▁Filename", + -12.96116828918457 + ], + [ + "▁pellet", + -12.961237907409668 + ], + [ + "▁Warwick", + -12.961238861083984 + ], + [ + "hel", + -12.96125316619873 + ], + [ + "▁eg", + -12.961352348327637 + ], + [ + "loaded", + -12.961371421813965 + ], + [ + "pak", + -12.961442947387695 + ], + [ + "▁rests", + -12.96151351928711 + ], + [ + "rac", + -12.96152687072754 + ], + [ + "▁awakening", + -12.961532592773438 + ], + [ + "▁LM", + -12.961615562438965 + ], + [ + "BU", + -12.961629867553713 + ], + [ + "nus", + -12.96168327331543 + ], + [ + "▁Muscle", + -12.961711883544922 + ], + [ + "▁psycho", + -12.961780548095703 + ], + [ + "▁3\"", + -12.961816787719728 + ], + [ + "community", + -12.96186065673828 + ], + [ + "ulo", + -12.961898803710938 + ], + [ + "▁gardener", + -12.961986541748049 + ], + [ + "▁usefulness", + -12.962115287780762 + ], + [ + "Dry", + -12.962313652038574 + ], + [ + "rib", + -12.962397575378418 + ], + [ + "▁toothpaste", + -12.9625825881958 + ], + [ + "LINE", + -12.962616920471191 + ], + [ + "Patients", + -12.96263313293457 + ], + [ + "▁sediment", + -12.962635040283203 + ], + [ + "▁ethnicity", + -12.962706565856934 + ], + [ + "▁DV", + -12.962899208068848 + ], + [ + "▁synth", + -12.962964057922363 + ], + [ + "▁Jude", + -12.963010787963867 + ], + [ + "▁MV", + -12.963101387023926 + ], + [ + "▁halves", + -12.963116645812988 + ], + [ + "HB", + -12.96313762664795 + ], + [ + "computer", + -12.963152885437012 + ], + [ + "▁reefs", + -12.963173866271973 + ], + [ + "UX", + -12.96326732635498 + ], + [ + "▁Feedback", + -12.963268280029297 + ], + [ + "▁Albuquerque", + -12.963276863098145 + ], + [ + "WI", + -12.963308334350586 + ], + [ + "Gas", + -12.963470458984377 + ], + [ + "cum", + -12.963496208190918 + ], + [ + "pictured", + -12.96349811553955 + ], + [ + "lecommunications", + -12.963501930236816 + ], + [ + "▁evoke", + -12.963576316833496 + ], + [ + "pipe", + -12.963645935058594 + ], + [ + "▁Gy", + -12.963711738586426 + ], + [ + "LB", + -12.963717460632324 + ], + [ + "▁Telegraph", + -12.963863372802734 + ], + [ + "omi", + -12.963869094848633 + ], + [ + "▁Preserve", + -12.963933944702148 + ], + [ + "▁Sarasota", + -12.96396827697754 + ], + [ + "mad", + -12.96408748626709 + ], + [ + "EB", + -12.964167594909668 + ], + [ + "▁oath", + -12.964173316955566 + ], + [ + "▁slabs", + -12.96417999267578 + ], + [ + "▁exponentially", + -12.964204788208008 + ], + [ + "alla", + -12.964280128479004 + ], + [ + "▁decadent", + -12.964296340942385 + ], + [ + "▁triggering", + -12.96437931060791 + ], + [ + "▁looming", + -12.964391708374023 + ], + [ + "▁dem", + -12.964590072631836 + ], + [ + "▁Gluten", + -12.96462059020996 + ], + [ + "Customers", + -12.964651107788086 + ], + [ + "▁silica", + -12.964698791503906 + ], + [ + "History", + -12.9647216796875 + ], + [ + "iter", + -12.964752197265623 + ], + [ + "erie", + -12.964768409729004 + ], + [ + "▁embody", + -12.965001106262209 + ], + [ + "▁bu", + -12.965049743652344 + ], + [ + "▁$22", + -12.965088844299316 + ], + [ + "Mini", + -12.965134620666504 + ], + [ + "▁tracing", + -12.965264320373535 + ], + [ + "▁Kun", + -12.965360641479492 + ], + [ + "▁Sophia", + -12.965435028076172 + ], + [ + "▁skid", + -12.965456008911133 + ], + [ + "▁dissemination", + -12.96549129486084 + ], + [ + "▁nautical", + -12.965494155883787 + ], + [ + "▁bikini", + -12.965495109558104 + ], + [ + "▁Deutsche", + -12.96550464630127 + ], + [ + "▁analogue", + -12.965545654296877 + ], + [ + "▁elasticity", + -12.965588569641112 + ], + [ + "▁GCSE", + -12.965635299682615 + ], + [ + "▁telescope", + -12.965638160705566 + ], + [ + "330", + -12.965852737426758 + ], + [ + "▁cowboy", + -12.965886116027832 + ], + [ + "▁grated", + -12.96600341796875 + ], + [ + "▁concentrates", + -12.966036796569824 + ], + [ + "▁Latvia", + -12.966053009033203 + ], + [ + "▁camel", + -12.966063499450684 + ], + [ + "▁PK", + -12.96606731414795 + ], + [ + "Nevertheless", + -12.966106414794922 + ], + [ + "GF", + -12.966109275817873 + ], + [ + "Adding", + -12.96615505218506 + ], + [ + "facebook", + -12.966180801391602 + ], + [ + "▁Lux", + -12.966182708740234 + ], + [ + "feel", + -12.96621036529541 + ], + [ + "OW", + -12.966225624084473 + ], + [ + "▁304", + -12.966323852539062 + ], + [ + "▁optimise", + -12.966355323791504 + ], + [ + "▁Kenyan", + -12.966482162475586 + ], + [ + "▁Bharat", + -12.966636657714844 + ], + [ + "▁Tue", + -12.966696739196776 + ], + [ + "▁intermittent", + -12.96671199798584 + ], + [ + "▁heroine", + -12.966726303100586 + ], + [ + "▁Koch", + -12.966803550720217 + ], + [ + "▁Sodium", + -12.966870307922363 + ], + [ + "Sport", + -12.966872215270996 + ], + [ + "▁bourbon", + -12.966955184936523 + ], + [ + "▁vaping", + -12.966970443725586 + ], + [ + "▁Peanut", + -12.967036247253418 + ], + [ + "kowski", + -12.967236518859863 + ], + [ + "▁forestry", + -12.967302322387695 + ], + [ + "▁overlooks", + -12.967519760131836 + ], + [ + "▁maid", + -12.967573165893556 + ], + [ + "▁Chick", + -12.967615127563477 + ], + [ + "Applications", + -12.96764087677002 + ], + [ + "Safety", + -12.967645645141602 + ], + [ + "▁Trap", + -12.967880249023438 + ], + [ + "azi", + -12.967887878417969 + ], + [ + "▁Obamacare", + -12.967974662780762 + ], + [ + "▁rents", + -12.968072891235352 + ], + [ + "▁Websites", + -12.968173027038574 + ], + [ + "▁ADD", + -12.968216896057127 + ], + [ + "▁Aus", + -12.968221664428713 + ], + [ + "▁retrieved", + -12.968449592590332 + ], + [ + "▁postcards", + -12.968491554260254 + ], + [ + "▁heap", + -12.968533515930176 + ], + [ + "▁Finished", + -12.96855354309082 + ], + [ + "▁Ek", + -12.96855640411377 + ], + [ + "▁DSLR", + -12.968600273132324 + ], + [ + "▁Sachs", + -12.968732833862305 + ], + [ + "Lake", + -12.968767166137695 + ], + [ + "▁Saul", + -12.968894958496094 + ], + [ + "ichi", + -12.969160079956056 + ], + [ + "▁Missing", + -12.969263076782228 + ], + [ + "▁Simulation", + -12.969319343566896 + ], + [ + "▁deprived", + -12.969366073608398 + ], + [ + "nas", + -12.969468116760254 + ], + [ + "▁floats", + -12.96950340270996 + ], + [ + "▁Garlic", + -12.969568252563477 + ], + [ + "▁Merchant", + -12.969674110412598 + ], + [ + "CAR", + -12.969719886779783 + ], + [ + "▁Whale", + -12.969741821289062 + ], + [ + "▁walnuts", + -12.969882011413574 + ], + [ + "▁merging", + -12.969894409179688 + ], + [ + "▁misdemeanor", + -12.96989917755127 + ], + [ + "▁Reduction", + -12.96993923187256 + ], + [ + "▁renters", + -12.969969749450684 + ], + [ + "▁augment", + -12.970098495483398 + ], + [ + "▁Neal", + -12.97017765045166 + ], + [ + "▁malaria", + -12.970181465148926 + ], + [ + "Chat", + -12.970210075378418 + ], + [ + "▁Default", + -12.970345497131348 + ], + [ + "bre", + -12.970403671264648 + ], + [ + "▁dumping", + -12.970499992370604 + ], + [ + "▁disrupted", + -12.970538139343262 + ], + [ + "▁ducts", + -12.970663070678713 + ], + [ + "▁brewed", + -12.970742225646973 + ], + [ + "Jeff", + -12.97087860107422 + ], + [ + "▁confession", + -12.971004486083984 + ], + [ + "▁condolences", + -12.97100830078125 + ], + [ + "▁Celebrity", + -12.97106647491455 + ], + [ + "▁analysed", + -12.971101760864258 + ], + [ + "Material", + -12.971117973327637 + ], + [ + "▁embarrassment", + -12.9713716506958 + ], + [ + "Fix", + -12.97140884399414 + ], + [ + "DJ", + -12.971478462219238 + ], + [ + "▁Tone", + -12.971613883972168 + ], + [ + "▁bishop", + -12.971614837646484 + ], + [ + "▁Tacoma", + -12.971616744995115 + ], + [ + "▁ballots", + -12.971637725830078 + ], + [ + "▁121", + -12.971647262573242 + ], + [ + "▁jerk", + -12.971654891967772 + ], + [ + "▁redirected", + -12.972046852111816 + ], + [ + "▁TU", + -12.972162246704102 + ], + [ + "dock", + -12.972169876098633 + ], + [ + "research", + -12.97228717803955 + ], + [ + "Doc", + -12.972387313842772 + ], + [ + "serving", + -12.972417831420898 + ], + [ + "ни", + -12.972418785095217 + ], + [ + "▁Dj", + -12.972472190856934 + ], + [ + "▁stab", + -12.972481727600098 + ], + [ + "▁hugs", + -12.972484588623049 + ], + [ + "▁etched", + -12.972504615783691 + ], + [ + "▁migrant", + -12.972537994384766 + ], + [ + "103", + -12.972559928894045 + ], + [ + "▁Tau", + -12.972658157348633 + ], + [ + "▁annum", + -12.972681999206545 + ], + [ + "▁subscribed", + -12.97268295288086 + ], + [ + "▁decal", + -12.972696304321287 + ], + [ + "▁kidneys", + -12.972733497619627 + ], + [ + "ania", + -12.97275161743164 + ], + [ + "▁interconnected", + -12.972811698913574 + ], + [ + "▁brilliance", + -12.972832679748535 + ], + [ + "▁pneumonia", + -12.972832679748535 + ], + [ + "▁suffice", + -12.972833633422852 + ], + [ + "▁disappearance", + -12.972925186157228 + ], + [ + "saw", + -12.973020553588867 + ], + [ + "▁va", + -12.973020553588867 + ], + [ + "▁Revelation", + -12.97307300567627 + ], + [ + "fs", + -12.973098754882812 + ], + [ + "▁favourable", + -12.973099708557127 + ], + [ + "cate", + -12.973109245300291 + ], + [ + "▁Wis", + -12.973119735717772 + ], + [ + "▁sweaters", + -12.973164558410645 + ], + [ + "▁inventions", + -12.973224639892578 + ], + [ + "global", + -12.973299026489258 + ], + [ + "▁dominating", + -12.97334098815918 + ], + [ + "▁blossoms", + -12.973384857177734 + ], + [ + "▁Functional", + -12.97341251373291 + ], + [ + "▁Reverse", + -12.973621368408203 + ], + [ + "Choosing", + -12.973631858825684 + ], + [ + "Provides", + -12.973639488220217 + ], + [ + "▁realism", + -12.973657608032228 + ], + [ + "▁Comm", + -12.973679542541504 + ], + [ + "▁Freddie", + -12.973709106445312 + ], + [ + "▁militants", + -12.973800659179688 + ], + [ + "▁shear", + -12.973881721496582 + ], + [ + "▁taco", + -12.973922729492188 + ], + [ + "▁NU", + -12.974090576171877 + ], + [ + "▁Wallet", + -12.974105834960938 + ], + [ + "fur", + -12.974241256713867 + ], + [ + "▁unwilling", + -12.97426414489746 + ], + [ + "▁smoky", + -12.974308013916016 + ], + [ + "▁Garcinia", + -12.974312782287598 + ], + [ + "Bot", + -12.974343299865724 + ], + [ + "Less", + -12.97434902191162 + ], + [ + "▁gripping", + -12.97437572479248 + ], + [ + "idi", + -12.974409103393556 + ], + [ + "ifer", + -12.974410057067873 + ], + [ + "▁Sofia", + -12.974431037902832 + ], + [ + "▁understated", + -12.974444389343262 + ], + [ + "▁spraying", + -12.974468231201172 + ], + [ + "▁warmly", + -12.974472045898438 + ], + [ + "▁shutting", + -12.974495887756348 + ], + [ + "trial", + -12.974499702453612 + ], + [ + "▁BU", + -12.974555015563965 + ], + [ + "▁Yep", + -12.974564552307127 + ], + [ + "▁Digi", + -12.974569320678713 + ], + [ + "▁leases", + -12.97464084625244 + ], + [ + "▁Churches", + -12.974835395812988 + ], + [ + "▁smashed", + -12.974853515625 + ], + [ + "▁reckon", + -12.974888801574709 + ], + [ + "▁slowdown", + -12.974966049194336 + ], + [ + "▁9:", + -12.974987983703612 + ], + [ + "coated", + -12.97506618499756 + ], + [ + "▁Heads", + -12.975110054016112 + ], + [ + "Luckily", + -12.975149154663086 + ], + [ + "▁locksmiths", + -12.975178718566896 + ], + [ + "Typically", + -12.975232124328612 + ], + [ + "▁snag", + -12.975311279296877 + ], + [ + "▁stumble", + -12.975482940673828 + ], + [ + "▁helicopters", + -12.975586891174316 + ], + [ + "▁Icon", + -12.97563362121582 + ], + [ + "▁Jakarta", + -12.975786209106444 + ], + [ + "▁commuting", + -12.975786209106444 + ], + [ + "▁Physician", + -12.97583293914795 + ], + [ + "▁unseen", + -12.975838661193848 + ], + [ + "ht", + -12.975884437561035 + ], + [ + "▁admitting", + -12.976003646850586 + ], + [ + "▁Raven", + -12.976014137268066 + ], + [ + "▁randomized", + -12.976021766662598 + ], + [ + "▁Scrum", + -12.976120948791504 + ], + [ + "▁vocation", + -12.976432800292969 + ], + [ + "▁gi", + -12.976442337036133 + ], + [ + "igen", + -12.976508140563965 + ], + [ + "▁Jew", + -12.976529121398926 + ], + [ + "▁facet", + -12.976591110229492 + ], + [ + "Property", + -12.976675033569336 + ], + [ + "dian", + -12.976678848266602 + ], + [ + "5000", + -12.976699829101562 + ], + [ + "▁discern", + -12.97671127319336 + ], + [ + "Field", + -12.976723670959473 + ], + [ + "▁pesto", + -12.976743698120115 + ], + [ + "▁Rub", + -12.97689723968506 + ], + [ + "▁trivia", + -12.976945877075195 + ], + [ + "▁mergers", + -12.976963996887209 + ], + [ + "▁Accordingly", + -12.97698974609375 + ], + [ + "▁sprinkled", + -12.977178573608398 + ], + [ + "▁Fernando", + -12.97726821899414 + ], + [ + "▁Spare", + -12.977286338806152 + ], + [ + "lus", + -12.977340698242188 + ], + [ + "▁taxis", + -12.977346420288086 + ], + [ + "install", + -12.977375984191896 + ], + [ + "esa", + -12.977545738220217 + ], + [ + "▁obstruction", + -12.97756004333496 + ], + [ + "▁refinement", + -12.977606773376465 + ], + [ + "▁indefinitely", + -12.977645874023438 + ], + [ + "▁yelling", + -12.977667808532717 + ], + [ + "/24", + -12.977712631225586 + ], + [ + "HM", + -12.977760314941406 + ], + [ + "▁spoiled", + -12.977824211120604 + ], + [ + "▁dyes", + -12.977828025817873 + ], + [ + "▁Mali", + -12.977951049804688 + ], + [ + "▁Ox", + -12.977957725524902 + ], + [ + "hun", + -12.97797393798828 + ], + [ + "pha", + -12.978132247924805 + ], + [ + "▁Crop", + -12.978257179260254 + ], + [ + "▁principals", + -12.978261947631836 + ], + [ + "marketing", + -12.978399276733398 + ], + [ + "▁EMS", + -12.978453636169434 + ], + [ + "▁qui", + -12.97857666015625 + ], + [ + "▁Pirate", + -12.978580474853516 + ], + [ + "▁indexes", + -12.978644371032717 + ], + [ + "▁Borders", + -12.978665351867676 + ], + [ + "▁Hundreds", + -12.978858947753906 + ], + [ + "created", + -12.978877067565918 + ], + [ + "Cast", + -12.978904724121094 + ], + [ + "chem", + -12.979018211364746 + ], + [ + "▁1989,", + -12.979049682617188 + ], + [ + "▁pity", + -12.97919750213623 + ], + [ + "stress", + -12.979202270507812 + ], + [ + "▁lineage", + -12.979235649108888 + ], + [ + "END", + -12.979276657104492 + ], + [ + "▁Seats", + -12.979281425476074 + ], + [ + "▁Oasis", + -12.979350090026855 + ], + [ + "▁allergens", + -12.979379653930664 + ], + [ + "▁Fighting", + -12.979435920715332 + ], + [ + "▁1989.", + -12.979490280151367 + ], + [ + "▁externally", + -12.979491233825684 + ], + [ + "Ri", + -12.979640007019045 + ], + [ + "▁statistically", + -12.97966194152832 + ], + [ + "▁pointers", + -12.979686737060549 + ], + [ + "Council", + -12.979745864868164 + ], + [ + "▁sleepy", + -12.979777336120604 + ], + [ + "▁Edison", + -12.97983741760254 + ], + [ + "below", + -12.97988224029541 + ], + [ + "▁Mango", + -12.979939460754396 + ], + [ + "▁col", + -12.979997634887695 + ], + [ + "▁stride", + -12.979999542236328 + ], + [ + "RG", + -12.980002403259276 + ], + [ + "hin", + -12.98000717163086 + ], + [ + "lian", + -12.980026245117188 + ], + [ + "▁Portrait", + -12.980134963989258 + ], + [ + "▁owl", + -12.98015594482422 + ], + [ + "▁prized", + -12.980191230773926 + ], + [ + "▁tendencies", + -12.980254173278809 + ], + [ + "▁Christchurch", + -12.98027229309082 + ], + [ + "▁conservatives", + -12.980430603027344 + ], + [ + "▁Proper", + -12.980462074279783 + ], + [ + "▁Winery", + -12.980463981628418 + ], + [ + "tap", + -12.980486869812012 + ], + [ + "Ph", + -12.980676651000977 + ], + [ + "▁epi", + -12.980812072753906 + ], + [ + "▁Lap", + -12.98086643218994 + ], + [ + "▁WAY", + -12.980904579162598 + ], + [ + "Aug", + -12.980916976928713 + ], + [ + "▁outweigh", + -12.98093605041504 + ], + [ + "▁Compass", + -12.98104190826416 + ], + [ + "▁Pablo", + -12.981131553649902 + ], + [ + "▁remodeled", + -12.981278419494627 + ], + [ + "Nearly", + -12.981431007385254 + ], + [ + "▁Continuous", + -12.98147964477539 + ], + [ + "▁Barton", + -12.981500625610352 + ], + [ + "▁Congrats", + -12.981507301330566 + ], + [ + "▁Physicians", + -12.981555938720703 + ], + [ + "raw", + -12.981576919555664 + ], + [ + "▁scrape", + -12.981654167175291 + ], + [ + "\"?", + -12.981680870056152 + ], + [ + "▁admiration", + -12.981715202331545 + ], + [ + "▁cultivating", + -12.981715202331545 + ], + [ + "▁unnoticed", + -12.981715202331545 + ], + [ + "Beat", + -12.981822967529297 + ], + [ + "Example", + -12.981849670410156 + ], + [ + "▁climbs", + -12.98188018798828 + ], + [ + "▁bash", + -12.98188304901123 + ], + [ + "▁(#", + -12.981982231140137 + ], + [ + "▁testified", + -12.982091903686523 + ], + [ + "▁Quilt", + -12.982098579406738 + ], + [ + "▁outings", + -12.982257843017578 + ], + [ + "editor", + -12.98232650756836 + ], + [ + "▁Bri", + -12.98249626159668 + ], + [ + "▁Responsible", + -12.982508659362791 + ], + [ + "▁tumble", + -12.982588768005373 + ], + [ + "▁prevail", + -12.982660293579102 + ], + [ + "▁Trinidad", + -12.98280906677246 + ], + [ + "▁Instruments", + -12.982833862304688 + ], + [ + "ibility", + -12.98286247253418 + ], + [ + "▁DG", + -12.982894897460938 + ], + [ + "▁marching", + -12.983002662658691 + ], + [ + "▁12:30", + -12.983006477355955 + ], + [ + "WHAT", + -12.98301124572754 + ], + [ + "▁mutation", + -12.983017921447754 + ], + [ + "▁KL", + -12.983040809631348 + ], + [ + "mah", + -12.983124732971191 + ], + [ + "▁insecurity", + -12.983162879943848 + ], + [ + "▁chuck", + -12.983192443847656 + ], + [ + "▁decentralized", + -12.983203887939451 + ], + [ + "▁purification", + -12.983209609985352 + ], + [ + "▁Tahoe", + -12.983210563659668 + ], + [ + "▁Thermo", + -12.98334503173828 + ], + [ + "▁distorted", + -12.983405113220217 + ], + [ + "▁XI", + -12.983421325683594 + ], + [ + "▁sugary", + -12.983427047729492 + ], + [ + "▁Quartet", + -12.983428955078123 + ], + [ + "▁Required", + -12.983494758605955 + ], + [ + "▁boxed", + -12.983548164367676 + ], + [ + "▁weekdays", + -12.983601570129396 + ], + [ + "▁insult", + -12.98365879058838 + ], + [ + "appointed", + -12.983802795410156 + ], + [ + "▁concessions", + -12.983823776245115 + ], + [ + "▁1946", + -12.98387622833252 + ], + [ + "▁shakes", + -12.983891487121582 + ], + [ + "LO", + -12.984012603759766 + ], + [ + "could", + -12.984046936035156 + ], + [ + "▁Rated", + -12.984216690063477 + ], + [ + "Miss", + -12.984238624572754 + ], + [ + "cylinder", + -12.984260559082031 + ], + [ + "etto", + -12.98426914215088 + ], + [ + "studio", + -12.984271049499512 + ], + [ + "▁SHA", + -12.984289169311523 + ], + [ + "bla", + -12.98430061340332 + ], + [ + "▁lyrical", + -12.984352111816406 + ], + [ + "▁Dil", + -12.984404563903809 + ], + [ + "▁foolish", + -12.9844388961792 + ], + [ + "▁Lease", + -12.98446559906006 + ], + [ + "RW", + -12.984466552734377 + ], + [ + "▁1935", + -12.984477043151855 + ], + [ + "▁radicals", + -12.984596252441406 + ], + [ + "▁Sm", + -12.984618186950684 + ], + [ + "▁Brooke", + -12.984686851501465 + ], + [ + "▁embodies", + -12.984696388244627 + ], + [ + "▁profiling", + -12.984705924987791 + ], + [ + "eat", + -12.984728813171388 + ], + [ + "▁knitted", + -12.984736442565918 + ], + [ + "▁Boxing", + -12.984784126281738 + ], + [ + "▁Edwin", + -12.984786033630373 + ], + [ + "tai", + -12.984795570373535 + ], + [ + "Mad", + -12.984837532043455 + ], + [ + "▁prominently", + -12.984877586364746 + ], + [ + "▁revolves", + -12.984883308410645 + ], + [ + "Mind", + -12.984893798828123 + ], + [ + "▁MAX", + -12.984926223754885 + ], + [ + "▁Cooperation", + -12.98493194580078 + ], + [ + "▁runoff", + -12.984986305236816 + ], + [ + "▁Hank", + -12.985161781311035 + ], + [ + "▁SSC", + -12.985240936279297 + ], + [ + "▁stern", + -12.985485076904297 + ], + [ + "▁quota", + -12.985820770263672 + ], + [ + "▁odors", + -12.985934257507324 + ], + [ + "▁Lebanese", + -12.986013412475586 + ], + [ + "▁1:30", + -12.986026763916016 + ], + [ + "▁collars", + -12.98615550994873 + ], + [ + "▁Sauvignon", + -12.986186027526855 + ], + [ + "▁concussion", + -12.986188888549805 + ], + [ + "▁lesbian", + -12.98618984222412 + ], + [ + "▁undocumented", + -12.986215591430664 + ], + [ + "▁Pension", + -12.98625946044922 + ], + [ + "▁originating", + -12.986260414123535 + ], + [ + "Bridge", + -12.986261367797852 + ], + [ + "▁1991.", + -12.986312866210938 + ], + [ + "▁Lambert", + -12.98641586303711 + ], + [ + "▁vending", + -12.986437797546388 + ], + [ + "▁parcels", + -12.986438751220703 + ], + [ + "▁gemstone", + -12.986656188964844 + ], + [ + "▁nozzle", + -12.986883163452148 + ], + [ + "reg", + -12.9869966506958 + ], + [ + "2:", + -12.986998558044434 + ], + [ + "▁freshest", + -12.987008094787598 + ], + [ + "Radio", + -12.987059593200684 + ], + [ + "▁Workplace", + -12.987069129943848 + ], + [ + "near", + -12.987147331237791 + ], + [ + "num", + -12.987165451049805 + ], + [ + "▁nm", + -12.98717212677002 + ], + [ + "Bay", + -12.987239837646484 + ], + [ + "unda", + -12.987276077270508 + ], + [ + "secret", + -12.987380027770996 + ], + [ + "▁pragmatic", + -12.98768138885498 + ], + [ + "▁spruce", + -12.98768138885498 + ], + [ + "▁apnea", + -12.987682342529297 + ], + [ + "▁piercing", + -12.987685203552246 + ], + [ + "▁Assurance", + -12.987714767456056 + ], + [ + "▁misery", + -12.987722396850586 + ], + [ + "▁Designers", + -12.98780632019043 + ], + [ + "▁fined", + -12.987984657287598 + ], + [ + "Dave", + -12.988004684448242 + ], + [ + "▁SME", + -12.988035202026367 + ], + [ + "▁Mona", + -12.988133430480955 + ], + [ + "▁Cannabis", + -12.988245964050291 + ], + [ + "▁pipelines", + -12.988253593444824 + ], + [ + "legal", + -12.98830223083496 + ], + [ + "▁Pitch", + -12.988381385803224 + ], + [ + "▁Mei", + -12.988442420959473 + ], + [ + "5\"", + -12.98850154876709 + ], + [ + "▁Slice", + -12.988512992858888 + ], + [ + "-34", + -12.988574981689451 + ], + [ + "▁directive", + -12.988628387451172 + ], + [ + "▁Trio", + -12.988643646240234 + ], + [ + "nda", + -12.988771438598633 + ], + [ + "▁Pharmaceutical", + -12.98879623413086 + ], + [ + "▁Purdue", + -12.988815307617188 + ], + [ + "▁descend", + -12.988840103149414 + ], + [ + "Street", + -12.988863945007324 + ], + [ + "▁Drag", + -12.988863945007324 + ], + [ + "function", + -12.98900318145752 + ], + [ + "▁abs", + -12.989090919494627 + ], + [ + "▁DevOps", + -12.989178657531738 + ], + [ + "vert", + -12.98928451538086 + ], + [ + "ibo", + -12.989531517028809 + ], + [ + "▁ally", + -12.98957061767578 + ], + [ + "0.", + -12.98960781097412 + ], + [ + "▁AED", + -12.98969268798828 + ], + [ + "-2017", + -12.989821434020996 + ], + [ + "▁Roots", + -12.989832878112791 + ], + [ + "▁1993,", + -12.98984432220459 + ], + [ + "oda", + -12.989922523498535 + ], + [ + "▁Cynthia", + -12.989967346191406 + ], + [ + "Lead", + -12.989984512329102 + ], + [ + "Snow", + -12.990005493164062 + ], + [ + "▁Collect", + -12.990045547485352 + ], + [ + "▁Encourage", + -12.99008560180664 + ], + [ + "▁sash", + -12.990166664123535 + ], + [ + "▁Michelin", + -12.990178108215332 + ], + [ + "▁nieces", + -12.990219116210938 + ], + [ + "Journal", + -12.990397453308104 + ], + [ + "▁Premiere", + -12.990497589111328 + ], + [ + "▁2:30", + -12.99059772491455 + ], + [ + "▁Lie", + -12.990639686584473 + ], + [ + "▁spooky", + -12.990763664245604 + ], + [ + "▁3.4", + -12.990767478942873 + ], + [ + "▁bullish", + -12.99077033996582 + ], + [ + "▁Catholics", + -12.99081039428711 + ], + [ + "▁shovel", + -12.990840911865234 + ], + [ + "▁webcam", + -12.99086093902588 + ], + [ + "▁Royale", + -12.990882873535156 + ], + [ + "▁Wesley", + -12.99096965789795 + ], + [ + "▁BTC", + -12.990973472595217 + ], + [ + "▁Seeds", + -12.990991592407228 + ], + [ + "▁Fen", + -12.99104118347168 + ], + [ + "▁stunt", + -12.991076469421388 + ], + [ + "▁Authorities", + -12.991094589233398 + ], + [ + "▁souvenir", + -12.991125106811523 + ], + [ + "▁Lace", + -12.991155624389648 + ], + [ + "▁filings", + -12.991238594055176 + ], + [ + "mother", + -12.99124526977539 + ], + [ + "▁Honors", + -12.99126434326172 + ], + [ + "▁respecting", + -12.99142837524414 + ], + [ + "scenes", + -12.991472244262695 + ], + [ + "ор", + -12.991501808166504 + ], + [ + "▁abstraction", + -12.99155330657959 + ], + [ + "aja", + -12.99167251586914 + ], + [ + "▁takeaway", + -12.991766929626465 + ], + [ + "▁Becky", + -12.99177074432373 + ], + [ + "▁presenters", + -12.991783142089844 + ], + [ + "▁GU", + -12.99184226989746 + ], + [ + "▁discreet", + -12.991936683654783 + ], + [ + "▁perpetual", + -12.991936683654783 + ], + [ + "▁Batch", + -12.991949081420898 + ], + [ + "Jones", + -12.992149353027344 + ], + [ + "▁refrigeration", + -12.992178916931152 + ], + [ + "▁trampoline", + -12.992178916931152 + ], + [ + "▁deletion", + -12.992179870605469 + ], + [ + "hor", + -12.992199897766112 + ], + [ + "Force", + -12.992205619812012 + ], + [ + "▁Savage", + -12.99229621887207 + ], + [ + "▁tipping", + -12.99232578277588 + ], + [ + "▁airborne", + -12.99240016937256 + ], + [ + "Pe", + -12.992412567138672 + ], + [ + "▁peaches", + -12.992419242858888 + ], + [ + "▁diluted", + -12.992524147033691 + ], + [ + "▁Dresses", + -12.992598533630373 + ], + [ + "▁dragons", + -12.992630004882812 + ], + [ + "Ge", + -12.992691040039062 + ], + [ + "blind", + -12.99271011352539 + ], + [ + "▁diners", + -12.992753028869627 + ], + [ + "▁Photographer", + -12.992779731750488 + ], + [ + "▁Nico", + -12.992890357971191 + ], + [ + "quest", + -12.992979049682615 + ], + [ + "▁500,000", + -12.993115425109863 + ], + [ + "vent", + -12.993121147155762 + ], + [ + "▁fillings", + -12.993226051330566 + ], + [ + "pir", + -12.993300437927246 + ], + [ + "▁hoses", + -12.993328094482422 + ], + [ + "▁revel", + -12.993435859680176 + ], + [ + "▁Kro", + -12.9934720993042 + ], + [ + "▁Cumberland", + -12.993507385253906 + ], + [ + "▁serenity", + -12.993682861328123 + ], + [ + "▁tsunami", + -12.993682861328123 + ], + [ + "▁Havana", + -12.99370002746582 + ], + [ + "▁sucked", + -12.993754386901855 + ], + [ + "ition", + -12.99389362335205 + ], + [ + "▁diagonal", + -12.993902206420898 + ], + [ + "▁(‘", + -12.993925094604492 + ], + [ + "bly", + -12.99396514892578 + ], + [ + "▁Gerald", + -12.994081497192385 + ], + [ + "▁cores", + -12.99410915374756 + ], + [ + "▁slam", + -12.994114875793455 + ], + [ + "nai", + -12.994136810302734 + ], + [ + "▁Denise", + -12.994174003601074 + ], + [ + "▁tortilla", + -12.994174003601074 + ], + [ + "eda", + -12.994233131408691 + ], + [ + "▁advert", + -12.994333267211914 + ], + [ + "▁referencing", + -12.99472427368164 + ], + [ + "▁Fancy", + -12.99484920501709 + ], + [ + "▁guaranteeing", + -12.99488353729248 + ], + [ + "MX", + -12.994902610778809 + ], + [ + "▁pretending", + -12.99492645263672 + ], + [ + "▁summarize", + -12.995083808898926 + ], + [ + "▁troubling", + -12.99518871307373 + ], + [ + "▁Buenos", + -12.995189666748049 + ], + [ + "▁IKEA", + -12.995194435119627 + ], + [ + "▁sipping", + -12.99521541595459 + ], + [ + "▁$600", + -12.995222091674805 + ], + [ + "▁Ebola", + -12.99527359008789 + ], + [ + "▁edits", + -12.995293617248535 + ], + [ + "berries", + -12.99539279937744 + ], + [ + "ein", + -12.995561599731444 + ], + [ + "much", + -12.995596885681152 + ], + [ + "dating", + -12.995735168457031 + ], + [ + "▁1700", + -12.995741844177246 + ], + [ + "▁garner", + -12.995823860168455 + ], + [ + "▁Dollars", + -12.99587345123291 + ], + [ + ":1", + -12.99599552154541 + ], + [ + "bol", + -12.996021270751951 + ], + [ + "▁Felix", + -12.99604034423828 + ], + [ + "▁catcher", + -12.996044158935549 + ], + [ + "Questions", + -12.996057510375977 + ], + [ + "▁Irrigation", + -12.996334075927734 + ], + [ + "wife", + -12.996356010437012 + ], + [ + "▁Kristen", + -12.996445655822754 + ], + [ + "▁bookcase", + -12.99649715423584 + ], + [ + "▁Trails", + -12.996505737304688 + ], + [ + "points", + -12.996589660644531 + ], + [ + "▁Pl", + -12.996668815612791 + ], + [ + "Sadly", + -12.996682167053224 + ], + [ + "▁1944", + -12.996689796447754 + ], + [ + "▁Brigade", + -12.996707916259766 + ], + [ + "hay", + -12.99679946899414 + ], + [ + "▁Migration", + -12.996855735778809 + ], + [ + "6%", + -12.996866226196287 + ], + [ + "▁intestinal", + -12.996896743774414 + ], + [ + "▁Ai", + -12.996906280517578 + ], + [ + "ads", + -12.996912956237791 + ], + [ + "▁1932", + -12.996928215026855 + ], + [ + "ulating", + -12.99695873260498 + ], + [ + "▁Bulldogs", + -12.997036933898926 + ], + [ + "▁hive", + -12.99708080291748 + ], + [ + "▁iCloud", + -12.997127532958984 + ], + [ + "▁CN", + -12.997154235839844 + ], + [ + "▁customary", + -12.997267723083496 + ], + [ + "▁embraces", + -12.99728012084961 + ], + [ + "▁OnePlus", + -12.99729347229004 + ], + [ + "▁Rafael", + -12.997307777404783 + ], + [ + "▁contestants", + -12.99736213684082 + ], + [ + "Shirt", + -12.997394561767578 + ], + [ + "▁MAR", + -12.997565269470217 + ], + [ + "▁FI", + -12.997586250305176 + ], + [ + "▁skis", + -12.997587203979492 + ], + [ + "▁$2,000", + -12.997653007507324 + ], + [ + "▁plagued", + -12.9978666305542 + ], + [ + "▁lunchtime", + -12.99787425994873 + ], + [ + "rri", + -12.997878074645996 + ], + [ + "gee", + -12.997881889343262 + ], + [ + "▁nailed", + -12.997969627380373 + ], + [ + "▁Dish", + -12.997980117797852 + ], + [ + "▁reconsider", + -12.99806785583496 + ], + [ + "▁clergy", + -12.99810791015625 + ], + [ + "▁Franchise", + -12.998161315917969 + ], + [ + "▁contingency", + -12.998208045959473 + ], + [ + "▁initiation", + -12.998209953308104 + ], + [ + "▁oppression", + -12.9982271194458 + ], + [ + "▁unreasonable", + -12.9982328414917 + ], + [ + "▁(6)", + -12.998250961303713 + ], + [ + "▁Kemp", + -12.998251914978027 + ], + [ + "▁Hatch", + -12.99842929840088 + ], + [ + "/2014", + -12.998520851135254 + ], + [ + "zz", + -12.998528480529783 + ], + [ + "▁anglers", + -12.998565673828123 + ], + [ + "▁Nagar", + -12.99860668182373 + ], + [ + "▁gland", + -12.998666763305664 + ], + [ + "▁5-7", + -12.998672485351562 + ], + [ + "▁fastening", + -12.998781204223633 + ], + [ + "▁manifestation", + -12.99880313873291 + ], + [ + "2%", + -12.99881649017334 + ], + [ + "▁Saga", + -12.998988151550291 + ], + [ + "▁Nicaragua", + -12.999009132385254 + ], + [ + "▁downright", + -12.999061584472656 + ], + [ + "Americans", + -12.999093055725098 + ], + [ + "▁absorbs", + -12.999163627624512 + ], + [ + "learn", + -12.999229431152344 + ], + [ + "▁Sort", + -12.999320030212402 + ], + [ + "▁appetizers", + -12.999330520629885 + ], + [ + "▁Shortly", + -12.999372482299805 + ], + [ + "▁daddy", + -12.999536514282228 + ], + [ + "▁hog", + -12.999540328979492 + ], + [ + "950", + -12.999706268310549 + ], + [ + "▁abrasion", + -12.99972152709961 + ], + [ + "lights", + -12.999736785888672 + ], + [ + "friend", + -12.999750137329102 + ], + [ + "▁Domino", + -12.999837875366213 + ], + [ + "▁bounced", + -12.999921798706056 + ], + [ + "▁surpass", + -12.999955177307127 + ], + [ + "/9", + -12.999980926513672 + ], + [ + "▁Gorge", + -13.000021934509276 + ], + [ + ".000", + -13.000052452087402 + ], + [ + "▁Oman", + -13.00009059906006 + ], + [ + "▁awaited", + -13.00017547607422 + ], + [ + "▁Sight", + -13.000226020812988 + ], + [ + "copy", + -13.000279426574709 + ], + [ + "▁Payne", + -13.000300407409668 + ], + [ + "▁dispenser", + -13.000404357910156 + ], + [ + "▁shimmer", + -13.000466346740724 + ], + [ + "together", + -13.000489234924316 + ], + [ + "▁Smash", + -13.000564575195312 + ], + [ + "▁rang", + -13.00082778930664 + ], + [ + "▁Regina", + -13.001042366027832 + ], + [ + "▁ICO", + -13.00107765197754 + ], + [ + "▁Cargo", + -13.001133918762209 + ], + [ + "▁memberships", + -13.001157760620115 + ], + [ + "▁traveller", + -13.001164436340332 + ], + [ + "▁fluctuate", + -13.001235961914062 + ], + [ + "▁impurities", + -13.001235961914062 + ], + [ + "iers", + -13.001264572143556 + ], + [ + "▁Comcast", + -13.00128173828125 + ], + [ + "▁Vogue", + -13.001296997070312 + ], + [ + "▁canon", + -13.001367568969728 + ], + [ + "▁THC", + -13.001439094543455 + ], + [ + "▁Airbus", + -13.001449584960938 + ], + [ + "▁Turns", + -13.001463890075684 + ], + [ + "▁remover", + -13.001484870910645 + ], + [ + "▁Md", + -13.001605033874512 + ], + [ + "▁inefficient", + -13.001739501953123 + ], + [ + "24.", + -13.00174617767334 + ], + [ + "▁Agencies", + -13.001788139343262 + ], + [ + "▁Sandwich", + -13.001907348632812 + ], + [ + "/13", + -13.001913070678713 + ], + [ + "▁anticipating", + -13.002753257751465 + ], + [ + "▁grapefruit", + -13.002758026123049 + ], + [ + "Chapter", + -13.002768516540527 + ], + [ + "yellow", + -13.002779006958008 + ], + [ + "▁edging", + -13.002836227416992 + ], + [ + "UD", + -13.002862930297852 + ], + [ + "▁spatula", + -13.002975463867188 + ], + [ + "▁Paid", + -13.00300121307373 + ], + [ + "▁pathology", + -13.00301742553711 + ], + [ + "▁matchup", + -13.003068923950195 + ], + [ + "▁theological", + -13.00308322906494 + ], + [ + "▁Vanilla", + -13.003087997436523 + ], + [ + "▁postponed", + -13.003185272216797 + ], + [ + "▁originality", + -13.003256797790527 + ], + [ + "▁facilitator", + -13.003266334533691 + ], + [ + "▁migraine", + -13.003347396850586 + ], + [ + "▁huh", + -13.0033540725708 + ], + [ + "▁Yun", + -13.00356388092041 + ], + [ + "Knowing", + -13.003621101379396 + ], + [ + "Candidates", + -13.003728866577148 + ], + [ + "▁yielded", + -13.003766059875488 + ], + [ + "ester", + -13.003806114196776 + ], + [ + "▁estates", + -13.004161834716797 + ], + [ + "▁Covenant", + -13.004250526428224 + ], + [ + "▁whirlpool", + -13.004274368286133 + ], + [ + "▁Taco", + -13.004290580749512 + ], + [ + "kul", + -13.004311561584473 + ], + [ + "defense", + -13.004328727722168 + ], + [ + "Probably", + -13.0043363571167 + ], + [ + "▁obligated", + -13.0043363571167 + ], + [ + "▁slightest", + -13.004387855529783 + ], + [ + "▁Shades", + -13.004400253295898 + ], + [ + "▁planetary", + -13.004454612731934 + ], + [ + "motor", + -13.004484176635742 + ], + [ + "▁Telephone", + -13.004523277282717 + ], + [ + "▁fibres", + -13.004526138305664 + ], + [ + "TRO", + -13.004533767700195 + ], + [ + "▁virtualization", + -13.00461769104004 + ], + [ + "▁instincts", + -13.004722595214844 + ], + [ + "▁earns", + -13.004849433898926 + ], + [ + "sbury", + -13.004862785339355 + ], + [ + "▁campaigning", + -13.004884719848633 + ], + [ + "▁biscuit", + -13.004887580871582 + ], + [ + "▁bays", + -13.005027770996094 + ], + [ + "▁watts", + -13.005107879638672 + ], + [ + "rack", + -13.005145072937012 + ], + [ + "▁Mixer", + -13.00515365600586 + ], + [ + "▁Kap", + -13.00517749786377 + ], + [ + "▁proclaimed", + -13.005290031433104 + ], + [ + "▁installs", + -13.005480766296388 + ], + [ + "fund", + -13.005549430847168 + ], + [ + "▁chatter", + -13.005617141723633 + ], + [ + "▁BEFORE", + -13.005692481994627 + ], + [ + "▁conducive", + -13.005794525146484 + ], + [ + "▁Bordeaux", + -13.005807876586914 + ], + [ + "▁biotechnology", + -13.005809783935549 + ], + [ + "▁enquire", + -13.005815505981444 + ], + [ + "ges", + -13.005818367004396 + ], + [ + "▁illuminate", + -13.005818367004396 + ], + [ + "▁Tough", + -13.005837440490724 + ], + [ + "particularly", + -13.005929946899414 + ], + [ + "-32", + -13.005931854248049 + ], + [ + "Submit", + -13.00602912902832 + ], + [ + "Subscribe", + -13.006053924560549 + ], + [ + "▁($2", + -13.006105422973633 + ], + [ + "▁joyous", + -13.006152153015137 + ], + [ + "▁presidents", + -13.00616455078125 + ], + [ + "▁curly", + -13.00627899169922 + ], + [ + "▁SIP", + -13.006285667419434 + ], + [ + "▁def", + -13.00631046295166 + ], + [ + "▁Designing", + -13.006356239318848 + ], + [ + "▁samsung", + -13.00644588470459 + ], + [ + "▁remark", + -13.006580352783203 + ], + [ + "▁Aircraft", + -13.006664276123049 + ], + [ + "▁Diy", + -13.00670337677002 + ], + [ + "▁18\"", + -13.00688934326172 + ], + [ + "▁vigilant", + -13.007060050964355 + ], + [ + "▁Contains", + -13.007184982299805 + ], + [ + "Regular", + -13.00721836090088 + ], + [ + "▁mediums", + -13.007245063781738 + ], + [ + "▁3.7", + -13.007301330566406 + ], + [ + "▁inclination", + -13.007320404052734 + ], + [ + "▁quizzes", + -13.007322311401367 + ], + [ + "▁Variable", + -13.007326126098633 + ], + [ + "▁toiletries", + -13.007403373718262 + ], + [ + "▁Hammond", + -13.007410049438477 + ], + [ + "rounded", + -13.007454872131348 + ], + [ + "▁Blackberry", + -13.007530212402344 + ], + [ + "lers", + -13.007704734802246 + ], + [ + "▁Tracker", + -13.007761001586914 + ], + [ + "▁Integrity", + -13.007782936096191 + ], + [ + "▁frank", + -13.007824897766112 + ], + [ + "▁completeness", + -13.007852554321287 + ], + [ + "Ryan", + -13.007862091064451 + ], + [ + "Participants", + -13.008257865905762 + ], + [ + "▁Toby", + -13.008308410644531 + ], + [ + "▁pizzas", + -13.008405685424805 + ], + [ + "▁bankers", + -13.008407592773438 + ], + [ + "▁DOT", + -13.008463859558104 + ], + [ + "▁tails", + -13.008539199829102 + ], + [ + "visual", + -13.00856113433838 + ], + [ + "average", + -13.00864315032959 + ], + [ + "▁Tha", + -13.00873851776123 + ], + [ + "▁Truly", + -13.008769035339355 + ], + [ + "▁expo", + -13.008798599243164 + ], + [ + "▁tornado", + -13.008808135986328 + ], + [ + "▁sociology", + -13.00885009765625 + ], + [ + "▁collaboratively", + -13.008867263793944 + ], + [ + "▁hates", + -13.008877754211426 + ], + [ + "▁quoting", + -13.008910179138184 + ], + [ + "cie", + -13.008918762207031 + ], + [ + "Child", + -13.009078979492188 + ], + [ + "▁stump", + -13.009121894836426 + ], + [ + "wiki", + -13.00913906097412 + ], + [ + "Term", + -13.009215354919434 + ], + [ + "ische", + -13.009294509887695 + ], + [ + "▁muted", + -13.00934886932373 + ], + [ + "▁prides", + -13.009401321411133 + ], + [ + "▁Greatest", + -13.009478569030762 + ], + [ + "▁livelihood", + -13.009515762329102 + ], + [ + "tian", + -13.009561538696287 + ], + [ + "bodied", + -13.009613990783691 + ], + [ + "visit", + -13.009662628173828 + ], + [ + "▁Putting", + -13.00968074798584 + ], + [ + "bush", + -13.009695053100586 + ], + [ + "▁elective", + -13.009703636169434 + ], + [ + "▁1990,", + -13.009705543518066 + ], + [ + "▁Speakers", + -13.009724617004396 + ], + [ + "▁Chanel", + -13.009882926940918 + ], + [ + "▁Hospitals", + -13.00995922088623 + ], + [ + "▁div", + -13.010026931762695 + ], + [ + "▁surfaced", + -13.010188102722168 + ], + [ + "▁SET", + -13.010293006896973 + ], + [ + "▁sentiments", + -13.010347366333008 + ], + [ + "▁thief", + -13.010381698608398 + ], + [ + "▁Bathtub", + -13.010384559631348 + ], + [ + "▁Outreach", + -13.010411262512209 + ], + [ + "▁interchangeable", + -13.010442733764648 + ], + [ + "▁1928", + -13.010457038879396 + ], + [ + "▁mellow", + -13.01046371459961 + ], + [ + "▁rewrite", + -13.010478973388672 + ], + [ + "▁Ramsey", + -13.010512351989746 + ], + [ + "Tax", + -13.010584831237791 + ], + [ + "Chicago", + -13.010594367980955 + ], + [ + "Running", + -13.010642051696776 + ], + [ + "▁visitation", + -13.010765075683594 + ], + [ + "onym", + -13.01079559326172 + ], + [ + "▁RIGHT", + -13.010804176330566 + ], + [ + "▁medically", + -13.01095199584961 + ], + [ + "ships", + -13.011022567749023 + ], + [ + "3.1", + -13.011059761047363 + ], + [ + "▁Hua", + -13.01120376586914 + ], + [ + "Ban", + -13.011220932006836 + ], + [ + "▁Agenda", + -13.011239051818848 + ], + [ + "garden", + -13.011262893676758 + ], + [ + "▁Passenger", + -13.01129150390625 + ], + [ + "▁Cheat", + -13.011387825012209 + ], + [ + "▁PRE", + -13.011419296264648 + ], + [ + "▁str", + -13.011475563049316 + ], + [ + "▁sharpen", + -13.011569023132324 + ], + [ + "▁blazer", + -13.01164722442627 + ], + [ + "▁saints", + -13.011726379394531 + ], + [ + "liner", + -13.011817932128906 + ], + [ + "▁mascot", + -13.011911392211914 + ], + [ + "▁cor", + -13.011945724487305 + ], + [ + "tler", + -13.011957168579102 + ], + [ + "▁Byrne", + -13.011978149414062 + ], + [ + "▁quitting", + -13.012041091918944 + ], + [ + "▁entrusted", + -13.012114524841309 + ], + [ + "Residential", + -13.012164115905762 + ], + [ + "▁Bliss", + -13.012396812438965 + ], + [ + "DH", + -13.0125732421875 + ], + [ + "▁formatted", + -13.012591361999512 + ], + [ + "▁Tractor", + -13.01265811920166 + ], + [ + "▁broadcasts", + -13.01274585723877 + ], + [ + "▁keyboards", + -13.01280117034912 + ], + [ + "▁Feet", + -13.01285457611084 + ], + [ + "▁TF", + -13.012907028198242 + ], + [ + "▁collegiate", + -13.012957572937012 + ], + [ + "▁procession", + -13.012975692749023 + ], + [ + "▁exporter", + -13.01299285888672 + ], + [ + "▁angular", + -13.013018608093262 + ], + [ + "▁threaded", + -13.01302719116211 + ], + [ + "▁Emmanuel", + -13.01305866241455 + ], + [ + "▁hardness", + -13.013140678405762 + ], + [ + "▁Lyme", + -13.013291358947754 + ], + [ + "dro", + -13.013307571411133 + ], + [ + "▁Gourmet", + -13.013338088989258 + ], + [ + "▁simplistic", + -13.013442039489746 + ], + [ + "▁ovarian", + -13.013445854187012 + ], + [ + "▁Aerospace", + -13.013457298278809 + ], + [ + "▁marrow", + -13.013505935668944 + ], + [ + "Mag", + -13.01356601715088 + ], + [ + "▁reacted", + -13.013585090637209 + ], + [ + "qualified", + -13.01366138458252 + ], + [ + "paying", + -13.013681411743164 + ], + [ + "▁phosphate", + -13.013686180114746 + ], + [ + "▁Fairfield", + -13.013701438903809 + ], + [ + "▁Fringe", + -13.01371955871582 + ], + [ + "▁Legislative", + -13.013795852661133 + ], + [ + "▁Leonardo", + -13.013853073120115 + ], + [ + "▁searchable", + -13.013894081115724 + ], + [ + "▁Cad", + -13.01393222808838 + ], + [ + "ises", + -13.013993263244627 + ], + [ + "▁Nigel", + -13.014280319213867 + ], + [ + "▁shrinking", + -13.01443576812744 + ], + [ + "▁screenplay", + -13.01444149017334 + ], + [ + "▁Combining", + -13.014453887939451 + ], + [ + "▁HM", + -13.014544486999512 + ], + [ + "▁Laurie", + -13.014549255371094 + ], + [ + "▁gemstones", + -13.014580726623535 + ], + [ + "solid", + -13.014646530151367 + ], + [ + "▁Cra", + -13.014660835266112 + ], + [ + "▁AMC", + -13.01473617553711 + ], + [ + "▁Authors", + -13.014771461486816 + ], + [ + "▁grasses", + -13.01480770111084 + ], + [ + "▁Connections", + -13.014822006225586 + ], + [ + "▁Patel", + -13.014887809753418 + ], + [ + "/7", + -13.014899253845217 + ], + [ + "▁arrivals", + -13.014968872070312 + ], + [ + "▁moisturizing", + -13.014976501464844 + ], + [ + "▁Ful", + -13.015055656433104 + ], + [ + "▁earnest", + -13.015057563781738 + ], + [ + "▁RH", + -13.015073776245115 + ], + [ + "▁HER", + -13.01508331298828 + ], + [ + "▁cancellations", + -13.01508903503418 + ], + [ + "uel", + -13.015177726745604 + ], + [ + "▁Hastings", + -13.015219688415527 + ], + [ + "▁correlated", + -13.015381813049316 + ], + [ + "▁AFC", + -13.015393257141112 + ], + [ + "▁borne", + -13.015425682067873 + ], + [ + "▁garages", + -13.015461921691896 + ], + [ + "▁Finger", + -13.015510559082031 + ], + [ + "2005", + -13.015557289123535 + ], + [ + "▁screwed", + -13.01557445526123 + ], + [ + "XP", + -13.015607833862305 + ], + [ + "▁python", + -13.015621185302734 + ], + [ + "EK", + -13.015680313110352 + ], + [ + "tent", + -13.01571559906006 + ], + [ + "▁Vineyard", + -13.015734672546388 + ], + [ + "▁waffle", + -13.015745162963867 + ], + [ + "Cell", + -13.015748977661133 + ], + [ + "metry", + -13.015838623046877 + ], + [ + "▁SAM", + -13.015853881835938 + ], + [ + "▁schematic", + -13.015854835510254 + ], + [ + "▁distracting", + -13.01585578918457 + ], + [ + "▁buttery", + -13.015911102294922 + ], + [ + "▁underside", + -13.016032218933104 + ], + [ + "▁souvenirs", + -13.016056060791016 + ], + [ + "RIS", + -13.01611042022705 + ], + [ + "▁Ming", + -13.016141891479492 + ], + [ + "▁physiology", + -13.016215324401855 + ], + [ + "▁schema", + -13.016236305236816 + ], + [ + "▁scramble", + -13.016387939453123 + ], + [ + "GN", + -13.016397476196287 + ], + [ + "▁Pasadena", + -13.016514778137209 + ], + [ + "happy", + -13.01654052734375 + ], + [ + "▁Dorset", + -13.016584396362305 + ], + [ + "▁Hogan", + -13.016728401184082 + ], + [ + "▁spas", + -13.016873359680176 + ], + [ + "width", + -13.016958236694336 + ], + [ + "▁HELP", + -13.016969680786133 + ], + [ + "▁Weaver", + -13.017070770263672 + ], + [ + "▁refrigerated", + -13.017097473144531 + ], + [ + "▁moreover", + -13.017168998718262 + ], + [ + "▁DX", + -13.01719570159912 + ], + [ + "anza", + -13.017244338989258 + ], + [ + "▁fleeing", + -13.017255783081056 + ], + [ + "otto", + -13.017374038696287 + ], + [ + "FN", + -13.017376899719238 + ], + [ + "▁encompassing", + -13.017382621765137 + ], + [ + "▁Xavier", + -13.017390251159668 + ], + [ + "▁freeway", + -13.017589569091797 + ], + [ + "▁OPEN", + -13.01778793334961 + ], + [ + "▁raisins", + -13.0178861618042 + ], + [ + "▁routers", + -13.017956733703612 + ], + [ + "▁legitimacy", + -13.018057823181152 + ], + [ + "▁vibrating", + -13.018058776855469 + ], + [ + "▁plaza", + -13.018065452575684 + ], + [ + "▁accolades", + -13.018074989318848 + ], + [ + "▁panorama", + -13.018074989318848 + ], + [ + "▁Pray", + -13.0182523727417 + ], + [ + "▁$99", + -13.018320083618164 + ], + [ + "▁Romance", + -13.018392562866213 + ], + [ + "Commercial", + -13.018404960632324 + ], + [ + "Pretty", + -13.018476486206056 + ], + [ + "▁unloading", + -13.018477439880373 + ], + [ + "Western", + -13.018488883972168 + ], + [ + "▁avoidance", + -13.018545150756836 + ], + [ + "▁ignite", + -13.018669128417969 + ], + [ + "▁incorporation", + -13.018678665161133 + ], + [ + "▁Kirby", + -13.018754959106444 + ], + [ + "▁Rh", + -13.018758773803713 + ], + [ + "▁EOS", + -13.018780708312988 + ], + [ + "▁Tak", + -13.018818855285645 + ], + [ + "▁jewels", + -13.01887035369873 + ], + [ + "▁Ng", + -13.019072532653809 + ], + [ + "trend", + -13.019092559814451 + ], + [ + "▁quantify", + -13.019094467163086 + ], + [ + "ICS", + -13.019119262695312 + ], + [ + "▁charms", + -13.019250869750977 + ], + [ + "▁FHA", + -13.019290924072266 + ], + [ + "JC", + -13.019299507141112 + ], + [ + "▁wo", + -13.019373893737791 + ], + [ + "▁yen", + -13.019535064697266 + ], + [ + "▁equilibrium", + -13.019566535949709 + ], + [ + "HU", + -13.0195951461792 + ], + [ + "▁Episcopal", + -13.019598960876465 + ], + [ + "▁memorabilia", + -13.019598960876465 + ], + [ + "▁subsidy", + -13.01959991455078 + ], + [ + "▁Consult", + -13.019631385803224 + ], + [ + "▁Happiness", + -13.019676208496094 + ], + [ + "▁Maison", + -13.01972484588623 + ], + [ + "▁missionaries", + -13.019740104675291 + ], + [ + "▁Practitioner", + -13.019780158996582 + ], + [ + "▁crater", + -13.019789695739746 + ], + [ + "▁Investing", + -13.019808769226074 + ], + [ + "▁mosquitoes", + -13.019810676574709 + ], + [ + "▁Barb", + -13.019920349121094 + ], + [ + "▁$70", + -13.019939422607422 + ], + [ + "▁womb", + -13.019949913024902 + ], + [ + "▁Sail", + -13.019954681396484 + ], + [ + "Commerce", + -13.019962310791016 + ], + [ + "important", + -13.020039558410645 + ], + [ + "▁individuality", + -13.02015209197998 + ], + [ + "▁congress", + -13.020343780517578 + ], + [ + "▁Relay", + -13.020429611206056 + ], + [ + "▁FAQs", + -13.02046012878418 + ], + [ + "▁broadcaster", + -13.020575523376465 + ], + [ + "▁FP", + -13.020586013793944 + ], + [ + "Russian", + -13.020596504211426 + ], + [ + "▁Prevent", + -13.020663261413574 + ], + [ + "▁john", + -13.020670890808104 + ], + [ + "▁yachts", + -13.02068328857422 + ], + [ + "\">", + -13.020722389221191 + ], + [ + "▁angled", + -13.02072525024414 + ], + [ + "▁compartments", + -13.020800590515137 + ], + [ + "▁assures", + -13.020804405212402 + ], + [ + "▁pallets", + -13.020873069763184 + ], + [ + "▁shooters", + -13.020933151245115 + ], + [ + "▁exemptions", + -13.020981788635254 + ], + [ + "Receive", + -13.021032333374023 + ], + [ + "▁renders", + -13.02104949951172 + ], + [ + "EF", + -13.021078109741213 + ], + [ + "▁Puzzle", + -13.021121978759766 + ], + [ + "▁coriander", + -13.02114486694336 + ], + [ + "▁epilepsy", + -13.02114486694336 + ], + [ + "▁seismic", + -13.021145820617676 + ], + [ + "ode", + -13.021224975585938 + ], + [ + "▁prohibition", + -13.021272659301758 + ], + [ + "▁Intellectual", + -13.02138900756836 + ], + [ + "▁sexuality", + -13.021402359008787 + ], + [ + "▁Ling", + -13.021414756774902 + ], + [ + "▁pathogens", + -13.02147388458252 + ], + [ + "▁algebra", + -13.021513938903809 + ], + [ + "▁changer", + -13.021799087524414 + ], + [ + "▁Scholars", + -13.021800994873049 + ], + [ + "▁exploits", + -13.021927833557127 + ], + [ + "▁Mist", + -13.022031784057615 + ], + [ + "▁JC", + -13.022134780883787 + ], + [ + "▁kitten", + -13.02224063873291 + ], + [ + "▁gauges", + -13.02238941192627 + ], + [ + "Block", + -13.022470474243164 + ], + [ + "util", + -13.022547721862791 + ], + [ + "▁correcting", + -13.02259349822998 + ], + [ + "Russia", + -13.022686004638672 + ], + [ + "▁disappearing", + -13.022706031799316 + ], + [ + "▁exp", + -13.022706031799316 + ], + [ + "▁DLC", + -13.022720336914062 + ], + [ + "ABC", + -13.022747039794922 + ], + [ + "Student", + -13.022747039794922 + ], + [ + "▁CEOs", + -13.02276611328125 + ], + [ + "▁Flynn", + -13.022804260253906 + ], + [ + "▁Scratch", + -13.022852897644045 + ], + [ + "▁harmed", + -13.023053169250488 + ], + [ + "ack", + -13.023200988769531 + ], + [ + "▁Pork", + -13.023218154907228 + ], + [ + "Japanese", + -13.023231506347656 + ], + [ + "Queen", + -13.02333641052246 + ], + [ + "▁approachable", + -13.02342700958252 + ], + [ + "ying", + -13.023486137390137 + ], + [ + "▁waterways", + -13.023635864257812 + ], + [ + "▁Rhino", + -13.02371311187744 + ], + [ + "Score", + -13.023771286010742 + ], + [ + "▁requisite", + -13.02379035949707 + ], + [ + "▁WD", + -13.02389430999756 + ], + [ + "ulu", + -13.024003982543944 + ], + [ + "▁Barrier", + -13.024092674255373 + ], + [ + "▁objections", + -13.024092674255373 + ], + [ + "writer", + -13.024093627929688 + ], + [ + "▁Tribe", + -13.024127960205078 + ], + [ + "▁invariably", + -13.024242401123049 + ], + [ + "▁Gravity", + -13.024247169494627 + ], + [ + "▁Rifle", + -13.024247169494627 + ], + [ + "▁dependencies", + -13.02425479888916 + ], + [ + "▁foreground", + -13.024255752563477 + ], + [ + "▁curls", + -13.02425765991211 + ], + [ + "▁CJ", + -13.024310111999512 + ], + [ + "▁multicultural", + -13.024374008178713 + ], + [ + "voice", + -13.02444839477539 + ], + [ + "winter", + -13.024582862854004 + ], + [ + "▁Detailed", + -13.0245943069458 + ], + [ + "▁Jong", + -13.024598121643066 + ], + [ + "▁Waiting", + -13.024656295776367 + ], + [ + "cons", + -13.024666786193848 + ], + [ + "▁cones", + -13.024855613708496 + ], + [ + "▁remnants", + -13.024879455566406 + ], + [ + "Major", + -13.024884223937988 + ], + [ + "▁Convert", + -13.024907112121582 + ], + [ + "fitting", + -13.024953842163086 + ], + [ + "▁Roberto", + -13.024953842163086 + ], + [ + "Dar", + -13.024967193603516 + ], + [ + "hrs", + -13.02498722076416 + ], + [ + "▁spotting", + -13.024999618530272 + ], + [ + "ZE", + -13.02502155303955 + ], + [ + "▁pleasures", + -13.025124549865724 + ], + [ + "medicine", + -13.025288581848145 + ], + [ + "▁Macro", + -13.02546215057373 + ], + [ + "▁Mitt", + -13.025464057922363 + ], + [ + "▁strides", + -13.025471687316896 + ], + [ + "worm", + -13.025473594665527 + ], + [ + "garde", + -13.025555610656738 + ], + [ + "▁watershed", + -13.02565097808838 + ], + [ + "company", + -13.02572536468506 + ], + [ + "▁cartilage", + -13.025795936584473 + ], + [ + "▁contagious", + -13.025795936584473 + ], + [ + "pati", + -13.025879859924316 + ], + [ + "▁nephews", + -13.02588176727295 + ], + [ + "dial", + -13.025919914245604 + ], + [ + "ugh", + -13.025946617126465 + ], + [ + "▁snorkeling", + -13.02615451812744 + ], + [ + "▁distinctly", + -13.02622890472412 + ], + [ + "uer", + -13.026250839233398 + ], + [ + "▁Boise", + -13.026254653930664 + ], + [ + "GAR", + -13.026284217834473 + ], + [ + "▁capita", + -13.026418685913086 + ], + [ + "SK", + -13.026456832885742 + ], + [ + "▁shale", + -13.026528358459473 + ], + [ + "▁coop", + -13.026554107666016 + ], + [ + "▁chats", + -13.026599884033203 + ], + [ + "▁exiting", + -13.026628494262695 + ], + [ + "tee", + -13.02669906616211 + ], + [ + "▁Hose", + -13.026735305786133 + ], + [ + "▁Challenges", + -13.02676010131836 + ], + [ + "▁sands", + -13.026796340942385 + ], + [ + "▁Rust", + -13.02682113647461 + ], + [ + "▁Honolulu", + -13.026898384094238 + ], + [ + "▁Mirrors", + -13.026927947998049 + ], + [ + "▁ideals", + -13.026979446411133 + ], + [ + "▁castles", + -13.027033805847168 + ], + [ + "▁Downs", + -13.027052879333496 + ], + [ + "▁stacking", + -13.027057647705078 + ], + [ + "oke", + -13.027069091796877 + ], + [ + "▁ceased", + -13.02708625793457 + ], + [ + "▁ideological", + -13.027107238769531 + ], + [ + "▁Supplement", + -13.027132034301758 + ], + [ + "▁Recording", + -13.02721118927002 + ], + [ + "▁Peri", + -13.0272216796875 + ], + [ + "endo", + -13.027307510375977 + ], + [ + "▁possessing", + -13.027351379394531 + ], + [ + "▁Thornton", + -13.02739429473877 + ], + [ + "▁Corinthians", + -13.02757167816162 + ], + [ + "▁CAS", + -13.027628898620604 + ], + [ + "▁riot", + -13.027663230895996 + ], + [ + "-1)", + -13.027812004089355 + ], + [ + "▁rulers", + -13.027826309204102 + ], + [ + "▁Salvation", + -13.027872085571287 + ], + [ + "▁psychologists", + -13.02790069580078 + ], + [ + "▁MIDI", + -13.028059005737305 + ], + [ + "church", + -13.02809238433838 + ], + [ + "▁frustrations", + -13.028106689453123 + ], + [ + "▁Fraud", + -13.028136253356934 + ], + [ + "▁Cognitive", + -13.02824878692627 + ], + [ + "▁corrupted", + -13.02830410003662 + ], + [ + "▁circulate", + -13.028308868408203 + ], + [ + "▁501", + -13.028360366821287 + ], + [ + "▁hepatitis", + -13.0283784866333 + ], + [ + "Blog", + -13.028392791748049 + ], + [ + "105", + -13.028509140014648 + ], + [ + "▁shin", + -13.028544425964355 + ], + [ + "Simon", + -13.028650283813477 + ], + [ + "▁imagining", + -13.028675079345703 + ], + [ + "▁Modeling", + -13.028742790222168 + ], + [ + "▁alliances", + -13.02884578704834 + ], + [ + "▁cavities", + -13.028908729553224 + ], + [ + "▁mediocre", + -13.028908729553224 + ], + [ + "▁Canterbury", + -13.02890968322754 + ], + [ + "▁diversification", + -13.028919219970703 + ], + [ + "▁loudly", + -13.028977394104004 + ], + [ + "▁ambassadors", + -13.02901554107666 + ], + [ + "▁participates", + -13.029090881347656 + ], + [ + "▁Fiat", + -13.029096603393556 + ], + [ + "Eat", + -13.02912425994873 + ], + [ + "▁Russians", + -13.029167175292969 + ], + [ + "▁Reason", + -13.02917194366455 + ], + [ + "▁stripping", + -13.029272079467772 + ], + [ + "▁Formal", + -13.029309272766112 + ], + [ + "horse", + -13.029386520385742 + ], + [ + "▁Benson", + -13.029474258422852 + ], + [ + "▁vampire", + -13.029474258422852 + ], + [ + "▁Nielsen", + -13.029525756835938 + ], + [ + "▁$19", + -13.029755592346191 + ], + [ + "mAh", + -13.029767036437988 + ], + [ + "▁motivations", + -13.02978515625 + ], + [ + "▁Twist", + -13.029807090759276 + ], + [ + "served", + -13.029874801635742 + ], + [ + "▁handbags", + -13.029943466186523 + ], + [ + "format", + -13.02997589111328 + ], + [ + "boxes", + -13.030028343200684 + ], + [ + "▁$1.5", + -13.030038833618164 + ], + [ + "▁Schneider", + -13.030062675476074 + ], + [ + "▁(2016)", + -13.030068397521973 + ], + [ + "▁reopen", + -13.030089378356934 + ], + [ + "Sand", + -13.030137062072754 + ], + [ + "▁Gavin", + -13.030190467834473 + ], + [ + "▁Cheryl", + -13.030202865600586 + ], + [ + "▁Admissions", + -13.03031063079834 + ], + [ + "▁MAKE", + -13.030412673950195 + ], + [ + "▁Cabernet", + -13.030468940734863 + ], + [ + "▁prominence", + -13.030468940734863 + ], + [ + "fusion", + -13.03049373626709 + ], + [ + "▁Brit", + -13.030557632446287 + ], + [ + "▁sweetheart", + -13.030567169189451 + ], + [ + "▁variance", + -13.030613899230955 + ], + [ + "▁sprawling", + -13.030678749084473 + ], + [ + "▁estimating", + -13.0306978225708 + ], + [ + "▁squeezed", + -13.030720710754396 + ], + [ + "ICAL", + -13.03073787689209 + ], + [ + "▁softness", + -13.030794143676758 + ], + [ + "▁greasy", + -13.03083038330078 + ], + [ + "▁Judith", + -13.030969619750977 + ], + [ + "▁hailed", + -13.031103134155272 + ], + [ + "Rich", + -13.031126976013184 + ], + [ + "within", + -13.03116512298584 + ], + [ + "▁INR", + -13.031291007995604 + ], + [ + "▁shrub", + -13.031435012817385 + ], + [ + "▁airfare", + -13.031492233276367 + ], + [ + "▁155", + -13.03175449371338 + ], + [ + "hong", + -13.03185749053955 + ], + [ + "▁biodegradable", + -13.032034873962402 + ], + [ + "illy", + -13.032099723815918 + ], + [ + "▁Monaco", + -13.032132148742676 + ], + [ + "▁disturb", + -13.032261848449709 + ], + [ + "▁CSA", + -13.032442092895508 + ], + [ + "▁Lands", + -13.032511711120604 + ], + [ + "▁solder", + -13.032525062561035 + ], + [ + "▁Mina", + -13.032591819763184 + ], + [ + "▁Pound", + -13.032593727111816 + ], + [ + "▁conditioners", + -13.032719612121582 + ], + [ + "▁dusk", + -13.032732009887695 + ], + [ + "▁127", + -13.032750129699709 + ], + [ + "6-", + -13.03276824951172 + ], + [ + "inal", + -13.032896995544434 + ], + [ + "▁binder", + -13.032994270324709 + ], + [ + "▁Crash", + -13.033123016357422 + ], + [ + "tun", + -13.033164978027344 + ], + [ + "▁noun", + -13.033221244812012 + ], + [ + "▁authorize", + -13.033236503601074 + ], + [ + "▁inserting", + -13.033251762390137 + ], + [ + "▁lids", + -13.03325366973877 + ], + [ + "Yep", + -13.033278465270996 + ], + [ + "AZ", + -13.03328800201416 + ], + [ + "▁shaker", + -13.033310890197754 + ], + [ + "etic", + -13.033487319946287 + ], + [ + "label", + -13.033503532409668 + ], + [ + "nos", + -13.0335054397583 + ], + [ + "▁Honduras", + -13.03359603881836 + ], + [ + "▁negativity", + -13.033598899841309 + ], + [ + "▁duvet", + -13.033610343933104 + ], + [ + "▁fiery", + -13.033610343933104 + ], + [ + "runner", + -13.033647537231444 + ], + [ + "▁Gloria", + -13.033738136291504 + ], + [ + "▁weaken", + -13.033793449401855 + ], + [ + "▁voiced", + -13.033839225769045 + ], + [ + "8.5", + -13.034013748168944 + ], + [ + "▁Liv", + -13.03421688079834 + ], + [ + "ional", + -13.034295082092283 + ], + [ + "▁wedges", + -13.034334182739258 + ], + [ + "Notes", + -13.034439086914062 + ], + [ + "ender", + -13.034465789794922 + ], + [ + "Adam", + -13.034470558166504 + ], + [ + "▁disclosures", + -13.034472465515137 + ], + [ + "▁Herb", + -13.034639358520508 + ], + [ + "▁Bau", + -13.034761428833008 + ], + [ + "▁succeeding", + -13.034761428833008 + ], + [ + "pub", + -13.034870147705078 + ], + [ + "▁transcripts", + -13.034904479980469 + ], + [ + "teau", + -13.03491497039795 + ], + [ + "▁freestyle", + -13.03498649597168 + ], + [ + "hab", + -13.034987449645996 + ], + [ + "▁salted", + -13.03504467010498 + ], + [ + "Label", + -13.035078048706056 + ], + [ + "▁enrolling", + -13.035083770751951 + ], + [ + "▁Celsius", + -13.035163879394531 + ], + [ + "▁nectar", + -13.035181999206545 + ], + [ + "▁Jonas", + -13.03525447845459 + ], + [ + "▁Aquarium", + -13.035270690917969 + ], + [ + "▁Mata", + -13.035377502441406 + ], + [ + "TG", + -13.035426139831545 + ], + [ + "▁PMID", + -13.035449981689451 + ], + [ + "▁supervise", + -13.035552978515623 + ], + [ + "hope", + -13.035577774047852 + ], + [ + "5.5", + -13.035636901855469 + ], + [ + "-2016", + -13.035755157470703 + ], + [ + "▁Bowling", + -13.035898208618164 + ], + [ + "▁endpoint", + -13.035934448242188 + ], + [ + "Reuters", + -13.036006927490234 + ], + [ + "▁Nights", + -13.03604221343994 + ], + [ + "▁commanding", + -13.036060333251951 + ], + [ + "▁Temperature", + -13.036133766174316 + ], + [ + "▁138", + -13.03615665435791 + ], + [ + "▁gearing", + -13.03615951538086 + ], + [ + "hide", + -13.036163330078123 + ], + [ + "Extra", + -13.036206245422363 + ], + [ + "urban", + -13.036280632019045 + ], + [ + "letter", + -13.036284446716309 + ], + [ + "▁Surgical", + -13.036299705505373 + ], + [ + "iche", + -13.036367416381836 + ], + [ + "▁gathers", + -13.036494255065918 + ], + [ + "▁Bates", + -13.036580085754396 + ], + [ + "▁Hanging", + -13.036625862121582 + ], + [ + "▁Siemens", + -13.036666870117188 + ], + [ + "▁elegantly", + -13.036718368530272 + ], + [ + "▁narration", + -13.036733627319336 + ], + [ + "▁scalability", + -13.036733627319336 + ], + [ + "▁Observatory", + -13.036734580993652 + ], + [ + "▁MLA", + -13.036754608154297 + ], + [ + "croft", + -13.03675651550293 + ], + [ + "▁Basket", + -13.03677463531494 + ], + [ + "som", + -13.03695297241211 + ], + [ + "Purchase", + -13.036956787109377 + ], + [ + "▁Maths", + -13.036956787109377 + ], + [ + "cker", + -13.037004470825195 + ], + [ + "wyn", + -13.037012100219728 + ], + [ + "▁1927", + -13.037075996398926 + ], + [ + "▁offenses", + -13.037116050720217 + ], + [ + "fuel", + -13.037139892578123 + ], + [ + "▁doomed", + -13.03717803955078 + ], + [ + "faced", + -13.03730010986328 + ], + [ + "serv", + -13.037310600280762 + ], + [ + "▁om", + -13.03732681274414 + ], + [ + "▁Publishers", + -13.037344932556152 + ], + [ + "▁workstation", + -13.037484169006348 + ], + [ + "▁brownies", + -13.037583351135254 + ], + [ + "▁safeguarding", + -13.0379056930542 + ], + [ + "png", + -13.037959098815918 + ], + [ + "▁Bil", + -13.03799819946289 + ], + [ + "▁Occasionally", + -13.038022994995115 + ], + [ + "▁behavioural", + -13.038137435913086 + ], + [ + "vc", + -13.038262367248535 + ], + [ + "▁Af", + -13.038284301757812 + ], + [ + "▁sublime", + -13.038325309753418 + ], + [ + "▁uninsured", + -13.03834342956543 + ], + [ + "Nu", + -13.038352012634276 + ], + [ + "▁Fencing", + -13.038352012634276 + ], + [ + "▁Classics", + -13.038352966308594 + ], + [ + "▁CHA", + -13.03838348388672 + ], + [ + "▁songwriting", + -13.038467407226562 + ], + [ + "▁condensed", + -13.038501739501951 + ], + [ + "oto", + -13.03859806060791 + ], + [ + "▁Seminary", + -13.038640022277832 + ], + [ + "▁NEC", + -13.038651466369627 + ], + [ + "▁inhabited", + -13.038766860961914 + ], + [ + "▁310", + -13.03886604309082 + ], + [ + "ref", + -13.0388822555542 + ], + [ + "▁Renault", + -13.038917541503906 + ], + [ + "▁1990.", + -13.038928031921388 + ], + [ + "▁Mora", + -13.039057731628418 + ], + [ + "aar", + -13.039109230041504 + ], + [ + "▁weekday", + -13.039166450500488 + ], + [ + "▁Forecast", + -13.039179801940918 + ], + [ + "title", + -13.039236068725586 + ], + [ + "▁urgently", + -13.03933811187744 + ], + [ + "▁acronym", + -13.03934097290039 + ], + [ + "▁assertion", + -13.039380073547363 + ], + [ + "▁hr", + -13.039392471313477 + ], + [ + "Report", + -13.039395332336426 + ], + [ + "wed", + -13.039451599121094 + ], + [ + "▁Titans", + -13.039477348327637 + ], + [ + "flu", + -13.039627075195312 + ], + [ + "▁masculine", + -13.039774894714355 + ], + [ + "▁60,000", + -13.03977870941162 + ], + [ + "▁Brendan", + -13.03978443145752 + ], + [ + "▁fortress", + -13.03980541229248 + ], + [ + "▁errands", + -13.039854049682615 + ], + [ + "▁swollen", + -13.039881706237791 + ], + [ + "▁Decking", + -13.040105819702148 + ], + [ + "▁Assad", + -13.040115356445312 + ], + [ + "und", + -13.040295600891112 + ], + [ + "▁disclaimer", + -13.040337562561035 + ], + [ + "▁SALE", + -13.040382385253906 + ], + [ + "▁pumpkins", + -13.040576934814451 + ], + [ + "▁Computers", + -13.040637969970703 + ], + [ + "▁Pony", + -13.040684700012209 + ], + [ + "▁Kaiser", + -13.04087734222412 + ], + [ + "▁Bracelet", + -13.040938377380373 + ], + [ + "▁Patterns", + -13.040966033935549 + ], + [ + "▁Shea", + -13.041074752807615 + ], + [ + "INS", + -13.041172981262209 + ], + [ + "▁gowns", + -13.041176795959473 + ], + [ + "▁Coil", + -13.041193962097168 + ], + [ + "physical", + -13.041252136230469 + ], + [ + "▁LIMITED", + -13.041277885437012 + ], + [ + "chief", + -13.04128360748291 + ], + [ + "2-3", + -13.041287422180176 + ], + [ + "▁glitch", + -13.0413179397583 + ], + [ + "Classic", + -13.04141902923584 + ], + [ + "▁shred", + -13.041424751281738 + ], + [ + "▁steaks", + -13.041452407836914 + ], + [ + "▁Salisbury", + -13.041459083557127 + ], + [ + "rou", + -13.041475296020508 + ], + [ + "▁exhaustion", + -13.041614532470703 + ], + [ + "▁patriotic", + -13.04173755645752 + ], + [ + "▁deduct", + -13.0418119430542 + ], + [ + "▁acknowledgement", + -13.04185962677002 + ], + [ + "▁comparatively", + -13.041877746582031 + ], + [ + "▁airplanes", + -13.04191780090332 + ], + [ + "▁pups", + -13.04192352294922 + ], + [ + "▁reg", + -13.041934967041016 + ], + [ + "▁1948", + -13.042152404785156 + ], + [ + "▁administering", + -13.042222023010254 + ], + [ + "▁cranes", + -13.042302131652832 + ], + [ + "league", + -13.042312622070312 + ], + [ + "▁Acoustic", + -13.042314529418944 + ], + [ + "▁carbohydrate", + -13.042352676391602 + ], + [ + "Oil", + -13.042447090148926 + ], + [ + "▁cutest", + -13.04278564453125 + ], + [ + "▁Boxes", + -13.042808532714844 + ], + [ + "▁eh", + -13.042999267578123 + ], + [ + "▁substituted", + -13.04301929473877 + ], + [ + "▁Myrtle", + -13.04303741455078 + ], + [ + "ction", + -13.04310131072998 + ], + [ + "▁Procedure", + -13.043137550354004 + ], + [ + "healthy", + -13.043173789978027 + ], + [ + "common", + -13.043387413024902 + ], + [ + "▁Zion", + -13.04344654083252 + ], + [ + "ROM", + -13.043485641479492 + ], + [ + "▁SUVs", + -13.043553352355955 + ], + [ + "▁Belmont", + -13.043561935424805 + ], + [ + "goers", + -13.043563842773438 + ], + [ + "tig", + -13.043572425842283 + ], + [ + "▁ETFs", + -13.043581008911133 + ], + [ + "▁VII", + -13.043648719787598 + ], + [ + "bir", + -13.043667793273926 + ], + [ + "▁Qua", + -13.043763160705566 + ], + [ + "pra", + -13.043804168701172 + ], + [ + "▁rampant", + -13.043952941894531 + ], + [ + "▁Corey", + -13.044018745422363 + ], + [ + "▁Tex", + -13.044023513793944 + ], + [ + "Jason", + -13.044144630432127 + ], + [ + "▁Zinc", + -13.04422092437744 + ], + [ + "▁matt", + -13.044289588928224 + ], + [ + "Protect", + -13.044351577758787 + ], + [ + "▁Fil", + -13.044390678405762 + ], + [ + "▁imperfect", + -13.044602394104004 + ], + [ + "▁Blessed", + -13.044615745544434 + ], + [ + "▁sumptuous", + -13.0446195602417 + ], + [ + "▁Disabilities", + -13.04462432861328 + ], + [ + "▁variability", + -13.044626235961914 + ], + [ + "▁OM", + -13.04463005065918 + ], + [ + "▁Pratt", + -13.04464626312256 + ], + [ + "located", + -13.044672966003418 + ], + [ + "▁construed", + -13.044684410095217 + ], + [ + "▁Panther", + -13.044856071472168 + ], + [ + "▁roommate", + -13.044861793518066 + ], + [ + "▁constrained", + -13.04501724243164 + ], + [ + "▁intolerance", + -13.045068740844728 + ], + [ + "-95", + -13.04514217376709 + ], + [ + "▁smoker", + -13.04519748687744 + ], + [ + "▁Levy", + -13.045211791992188 + ], + [ + "TD", + -13.045280456542969 + ], + [ + "▁tug", + -13.045357704162598 + ], + [ + "▁Defender", + -13.04544734954834 + ], + [ + "▁Woodland", + -13.045476913452148 + ], + [ + "▁Responsibility", + -13.045538902282717 + ], + [ + "kitchen", + -13.045573234558104 + ], + [ + "▁Bistro", + -13.045584678649902 + ], + [ + "focus", + -13.045615196228027 + ], + [ + "▁Costs", + -13.04563331604004 + ], + [ + "▁lust", + -13.045681953430176 + ], + [ + "▁culprit", + -13.045737266540527 + ], + [ + "................", + -13.045778274536133 + ], + [ + "▁Ent", + -13.045940399169922 + ], + [ + "▁Ton", + -13.045994758605955 + ], + [ + "▁Battalion", + -13.046204566955566 + ], + [ + "▁philanthropic", + -13.046204566955566 + ], + [ + "▁masonry", + -13.046234130859377 + ], + [ + "▁Bark", + -13.04629611968994 + ], + [ + "2\"", + -13.0463228225708 + ], + [ + "NER", + -13.04635524749756 + ], + [ + "▁401(", + -13.046399116516112 + ], + [ + "▁Futures", + -13.046552658081056 + ], + [ + "▁Stereo", + -13.046557426452637 + ], + [ + "▁roundabout", + -13.046634674072266 + ], + [ + "▁Released", + -13.04668426513672 + ], + [ + "▁Benedict", + -13.046744346618652 + ], + [ + "▁Otto", + -13.046805381774902 + ], + [ + "ACT", + -13.046895027160645 + ], + [ + "▁pun", + -13.046913146972656 + ], + [ + "▁eminent", + -13.046927452087402 + ], + [ + "▁loveseat", + -13.046951293945312 + ], + [ + "stat", + -13.046954154968262 + ], + [ + "gur", + -13.046988487243652 + ], + [ + "▁focussed", + -13.04702377319336 + ], + [ + "Los", + -13.047072410583496 + ], + [ + "Traditional", + -13.047174453735352 + ], + [ + "▁refrigerate", + -13.047191619873049 + ], + [ + "▁Passport", + -13.047233581542969 + ], + [ + "▁Hernandez", + -13.047401428222656 + ], + [ + "conf", + -13.047457695007324 + ], + [ + "▁Shuttle", + -13.047612190246582 + ], + [ + "▁Archived", + -13.047618865966797 + ], + [ + "uku", + -13.047622680664062 + ], + [ + "▁£100", + -13.047714233398438 + ], + [ + "▁Fighter", + -13.047748565673828 + ], + [ + "▁debilitating", + -13.047791481018066 + ], + [ + "▁utilising", + -13.047791481018066 + ], + [ + "▁nationality", + -13.047845840454102 + ], + [ + "count", + -13.047921180725098 + ], + [ + "berger", + -13.048043251037598 + ], + [ + "▁instruct", + -13.048078536987305 + ], + [ + "▁coupe", + -13.048160552978516 + ], + [ + "atha", + -13.048248291015623 + ], + [ + "▁Lighthouse", + -13.048413276672363 + ], + [ + "▁smelling", + -13.048489570617676 + ], + [ + "▁Brenda", + -13.048542976379396 + ], + [ + "3).", + -13.04869556427002 + ], + [ + "▁Cypress", + -13.048725128173828 + ], + [ + "▁emphasizing", + -13.048761367797852 + ], + [ + "▁elbows", + -13.048810958862305 + ], + [ + "intensive", + -13.04896640777588 + ], + [ + "library", + -13.048970222473145 + ], + [ + "Schedule", + -13.048990249633787 + ], + [ + "hari", + -13.049073219299316 + ], + [ + "▁buys", + -13.049078941345217 + ], + [ + "▁mouths", + -13.049192428588867 + ], + [ + "▁Lara", + -13.049202919006348 + ], + [ + "▁Combo", + -13.049262046813965 + ], + [ + "▁Nurses", + -13.049285888671877 + ], + [ + "▁deleting", + -13.049382209777832 + ], + [ + "▁accompanies", + -13.049383163452148 + ], + [ + "▁winnings", + -13.049509048461914 + ], + [ + "▁Lavender", + -13.04953956604004 + ], + [ + "rea", + -13.049585342407228 + ], + [ + "420", + -13.04960823059082 + ], + [ + "▁portability", + -13.049628257751465 + ], + [ + "▁outbound", + -13.049701690673828 + ], + [ + "▁Kh", + -13.049723625183104 + ], + [ + "Ru", + -13.049779891967772 + ], + [ + "▁Shares", + -13.04981803894043 + ], + [ + "__", + -13.049847602844238 + ], + [ + "▁blackout", + -13.049934387207031 + ], + [ + "▁Gut", + -13.050030708312988 + ], + [ + "▁Napoleon", + -13.050048828125 + ], + [ + "▁Prospect", + -13.050056457519531 + ], + [ + "▁rims", + -13.050092697143556 + ], + [ + "▁Plot", + -13.05023956298828 + ], + [ + "▁Irene", + -13.050304412841797 + ], + [ + "lop", + -13.050331115722656 + ], + [ + "▁entrants", + -13.050352096557615 + ], + [ + "maid", + -13.05035400390625 + ], + [ + "▁132", + -13.050504684448242 + ], + [ + "fields", + -13.050573348999023 + ], + [ + "7-2", + -13.050639152526855 + ], + [ + "LU", + -13.05064582824707 + ], + [ + "Flow", + -13.05066204071045 + ], + [ + "▁longstanding", + -13.050676345825195 + ], + [ + "▁roadway", + -13.05068016052246 + ], + [ + "DW", + -13.050726890563965 + ], + [ + "▁senators", + -13.05073356628418 + ], + [ + "atta", + -13.050763130187988 + ], + [ + "▁insecure", + -13.05087947845459 + ], + [ + "depending", + -13.05088710784912 + ], + [ + "▁Shooting", + -13.050898551940918 + ], + [ + "▁Wage", + -13.050979614257812 + ], + [ + "▁technologically", + -13.050989151000977 + ], + [ + "▁Everest", + -13.05117416381836 + ], + [ + "▁saliva", + -13.0512056350708 + ], + [ + "informed", + -13.051228523254396 + ], + [ + "Week", + -13.051246643066406 + ], + [ + "▁Lunar", + -13.05145263671875 + ], + [ + "▁Bash", + -13.051483154296877 + ], + [ + "▁Upload", + -13.051547050476074 + ], + [ + "native", + -13.051579475402832 + ], + [ + "ART", + -13.051701545715332 + ], + [ + "sent", + -13.051782608032228 + ], + [ + "▁pointless", + -13.051786422729492 + ], + [ + "3:", + -13.051791191101074 + ], + [ + "▁200-", + -13.05191135406494 + ], + [ + "▁False", + -13.051931381225586 + ], + [ + "▁powerpoint", + -13.051947593688965 + ], + [ + "ject", + -13.052102088928224 + ], + [ + "industrial", + -13.052135467529297 + ], + [ + "▁forgiven", + -13.052168846130373 + ], + [ + "▁thumbnail", + -13.052179336547852 + ], + [ + "▁trolley", + -13.052188873291016 + ], + [ + "▁manipulated", + -13.052265167236328 + ], + [ + "until", + -13.05231475830078 + ], + [ + "▁unconditional", + -13.052342414855955 + ], + [ + "▁Objective", + -13.052396774291992 + ], + [ + "▁delectable", + -13.052568435668944 + ], + [ + "▁Ahmedabad", + -13.052572250366213 + ], + [ + "played", + -13.052693367004396 + ], + [ + "▁Scha", + -13.052706718444824 + ], + [ + "Val", + -13.052739143371582 + ], + [ + "▁darkest", + -13.053144454956056 + ], + [ + "▁condemn", + -13.05320930480957 + ], + [ + "mass", + -13.053303718566896 + ], + [ + "ume", + -13.0534086227417 + ], + [ + "▁heartbreaking", + -13.053532600402832 + ], + [ + "▁INTER", + -13.053600311279297 + ], + [ + "▁Wheeler", + -13.053604125976562 + ], + [ + "▁inflated", + -13.053811073303224 + ], + [ + "▁Elaine", + -13.053855895996094 + ], + [ + "▁Powerful", + -13.053927421569824 + ], + [ + "▁retrospective", + -13.053932189941406 + ], + [ + "▁Column", + -13.053979873657228 + ], + [ + "▁Cer", + -13.054025650024414 + ], + [ + "▁diced", + -13.054035186767578 + ], + [ + "Bu", + -13.054072380065918 + ], + [ + "▁misunderstood", + -13.054168701171877 + ], + [ + "▁Sioux", + -13.054169654846191 + ], + [ + "▁equities", + -13.054169654846191 + ], + [ + "▁increments", + -13.054171562194824 + ], + [ + "▁sustainably", + -13.05419921875 + ], + [ + "▁grin", + -13.05432415008545 + ], + [ + "lei", + -13.054398536682127 + ], + [ + "▁Burning", + -13.054409980773926 + ], + [ + "003", + -13.05454444885254 + ], + [ + "eil", + -13.054553031921388 + ], + [ + "-75", + -13.054713249206545 + ], + [ + "▁Leah", + -13.054803848266602 + ], + [ + "▁Policies", + -13.054856300354004 + ], + [ + "▁Fairfax", + -13.054871559143066 + ], + [ + "▁qu", + -13.05490016937256 + ], + [ + "▁Width", + -13.054911613464355 + ], + [ + "MH", + -13.05495834350586 + ], + [ + "▁gritty", + -13.054965019226074 + ], + [ + "▁meatballs", + -13.054974555969238 + ], + [ + "Grab", + -13.054988861083984 + ], + [ + "430", + -13.055102348327637 + ], + [ + "▁GSM", + -13.055124282836914 + ], + [ + "▁Gallagher", + -13.055221557617188 + ], + [ + "▁Marines", + -13.055242538452148 + ], + [ + "▁coincide", + -13.05527114868164 + ], + [ + "USB", + -13.055327415466309 + ], + [ + "▁squirrel", + -13.05540943145752 + ], + [ + "▁HC", + -13.05541706085205 + ], + [ + "Wash", + -13.05545139312744 + ], + [ + "escent", + -13.055455207824709 + ], + [ + "▁atom", + -13.055548667907717 + ], + [ + "▁Realtor", + -13.055551528930664 + ], + [ + "quick", + -13.055665016174316 + ], + [ + "▁conveyed", + -13.055692672729492 + ], + [ + "▁radios", + -13.055739402770996 + ], + [ + "▁reassuring", + -13.055766105651855 + ], + [ + "▁rebellion", + -13.055768966674805 + ], + [ + "▁Quarry", + -13.055819511413574 + ], + [ + "▁tenancy", + -13.055834770202637 + ], + [ + "▁deferred", + -13.0558443069458 + ], + [ + "▁corridors", + -13.055882453918455 + ], + [ + "AIDS", + -13.055940628051758 + ], + [ + "▁LV", + -13.05605411529541 + ], + [ + "▁bully", + -13.05612564086914 + ], + [ + "▁Millions", + -13.056182861328123 + ], + [ + "ony", + -13.05620574951172 + ], + [ + "▁FO", + -13.056222915649414 + ], + [ + "▁countdown", + -13.056256294250488 + ], + [ + "▁ATP", + -13.056297302246094 + ], + [ + "▁punches", + -13.056354522705078 + ], + [ + "▁furnish", + -13.056473731994627 + ], + [ + "▁homestead", + -13.056510925292969 + ], + [ + "bearing", + -13.05652141571045 + ], + [ + "▁plains", + -13.05659008026123 + ], + [ + "▁Boyd", + -13.056593894958496 + ], + [ + "▁hindi", + -13.056676864624023 + ], + [ + "▁roman", + -13.056736946105955 + ], + [ + "asia", + -13.056743621826172 + ], + [ + "▁AGO", + -13.056760787963867 + ], + [ + "Move", + -13.056797981262209 + ], + [ + "▁Evo", + -13.056836128234863 + ], + [ + "drug", + -13.056869506835938 + ], + [ + "▁glands", + -13.056883811950684 + ], + [ + "window", + -13.056913375854492 + ], + [ + "▁crawling", + -13.05693817138672 + ], + [ + "▁Amateur", + -13.05698013305664 + ], + [ + "Program", + -13.056981086730955 + ], + [ + "CEO", + -13.057076454162598 + ], + [ + "▁Kerr", + -13.057100296020508 + ], + [ + "▁prosper", + -13.057122230529783 + ], + [ + "Canadian", + -13.05714988708496 + ], + [ + "perhaps", + -13.057153701782228 + ], + [ + "▁alloys", + -13.057171821594238 + ], + [ + "▁Adjust", + -13.057195663452148 + ], + [ + "▁mommy", + -13.05721664428711 + ], + [ + "▁Permit", + -13.057239532470703 + ], + [ + "Department", + -13.0572509765625 + ], + [ + "▁Basil", + -13.057343482971191 + ], + [ + "▁Registrar", + -13.057369232177734 + ], + [ + "▁prowess", + -13.057369232177734 + ], + [ + "▁undeniable", + -13.057369232177734 + ], + [ + "▁dubious", + -13.057421684265137 + ], + [ + "▁hopeless", + -13.057429313659668 + ], + [ + "▁Duct", + -13.057446479797363 + ], + [ + "▁Wonderland", + -13.057480812072754 + ], + [ + "▁Eggs", + -13.057604789733888 + ], + [ + "▁signup", + -13.057613372802734 + ], + [ + "▁Hue", + -13.057721138000488 + ], + [ + "Angel", + -13.05772590637207 + ], + [ + "25.", + -13.057788848876951 + ], + [ + "▁SMART", + -13.057809829711914 + ], + [ + "▁9.5", + -13.057816505432127 + ], + [ + "▁Chow", + -13.057916641235352 + ], + [ + "▁shortcomings", + -13.057990074157717 + ], + [ + "▁Rugs", + -13.058062553405762 + ], + [ + "▁lien", + -13.05829620361328 + ], + [ + "treatment", + -13.058313369750977 + ], + [ + "olds", + -13.058399200439451 + ], + [ + "▁Almond", + -13.058443069458008 + ], + [ + "▁Bubble", + -13.058443069458008 + ], + [ + "▁firewood", + -13.05855655670166 + ], + [ + "Ideal", + -13.058646202087402 + ], + [ + "complete", + -13.05866527557373 + ], + [ + "▁Dumpster", + -13.05881118774414 + ], + [ + "truck", + -13.05882167816162 + ], + [ + "▁Potato", + -13.058873176574709 + ], + [ + "lai", + -13.058899879455566 + ], + [ + "▁harmonic", + -13.05896282196045 + ], + [ + "▁hemisphere", + -13.058974266052246 + ], + [ + "▁sluggish", + -13.058974266052246 + ], + [ + "▁synagogue", + -13.058974266052246 + ], + [ + "▁Regiment", + -13.05898380279541 + ], + [ + "▁Ahmad", + -13.058990478515623 + ], + [ + "▁autoimmune", + -13.058992385864258 + ], + [ + "▁boycott", + -13.059022903442385 + ], + [ + "▁nfl", + -13.059029579162598 + ], + [ + "ils", + -13.05903434753418 + ], + [ + "▁firstly", + -13.059087753295898 + ], + [ + "▁rivalry", + -13.059099197387695 + ], + [ + "▁Costumes", + -13.059118270874023 + ], + [ + "▁Output", + -13.059120178222656 + ], + [ + "▁Gale", + -13.059144973754885 + ], + [ + "essay", + -13.05925178527832 + ], + [ + "▁Assam", + -13.059328079223633 + ], + [ + "managed", + -13.059337615966797 + ], + [ + "0.5", + -13.059393882751465 + ], + [ + "▁radioactive", + -13.05947208404541 + ], + [ + "▁chemo", + -13.059488296508787 + ], + [ + "Secondly", + -13.059548377990724 + ], + [ + "▁grounding", + -13.05960464477539 + ], + [ + "▁turnkey", + -13.059675216674805 + ], + [ + "▁summertime", + -13.059749603271484 + ], + [ + "▁Aloe", + -13.059877395629885 + ], + [ + "▁Poster", + -13.059903144836426 + ], + [ + "hawk", + -13.059921264648438 + ], + [ + "▁1-5", + -13.059953689575195 + ], + [ + "▁Sis", + -13.060056686401367 + ], + [ + "▁reload", + -13.06014633178711 + ], + [ + "▁Advocate", + -13.06018352508545 + ], + [ + "Development", + -13.060247421264648 + ], + [ + "radio", + -13.06033992767334 + ], + [ + "▁Variety", + -13.060341835021973 + ], + [ + "▁peeled", + -13.060351371765137 + ], + [ + "College", + -13.060433387756348 + ], + [ + "▁Drill", + -13.060439109802246 + ], + [ + "▁swirl", + -13.060501098632812 + ], + [ + "▁unrest", + -13.060559272766112 + ], + [ + "▁eldest", + -13.060582160949709 + ], + [ + "▁quotations", + -13.060589790344238 + ], + [ + "▁jeep", + -13.060632705688477 + ], + [ + "Hz", + -13.060712814331056 + ], + [ + "▁birthplace", + -13.06071662902832 + ], + [ + "hip", + -13.060749053955078 + ], + [ + "▁Adv", + -13.060768127441406 + ], + [ + "▁10:", + -13.06077480316162 + ], + [ + "▁rifles", + -13.060812950134276 + ], + [ + "▁tooling", + -13.0608549118042 + ], + [ + "stuff", + -13.06088638305664 + ], + [ + "▁Rider", + -13.060911178588867 + ], + [ + "▁Pediatric", + -13.06100082397461 + ], + [ + "▁Acura", + -13.061002731323242 + ], + [ + "AK", + -13.061149597167969 + ], + [ + "▁SERVICES", + -13.061223030090332 + ], + [ + "▁seventy", + -13.06122875213623 + ], + [ + "▁explode", + -13.061299324035645 + ], + [ + "▁paw", + -13.06130599975586 + ], + [ + "▁DOES", + -13.061440467834473 + ], + [ + "▁Triumph", + -13.061559677124023 + ], + [ + "early", + -13.061577796936035 + ], + [ + "▁Porch", + -13.0615816116333 + ], + [ + "blood", + -13.061636924743652 + ], + [ + "▁640", + -13.061758995056152 + ], + [ + "▁Hil", + -13.061798095703123 + ], + [ + "immer", + -13.061918258666992 + ], + [ + "▁Roulette", + -13.061941146850586 + ], + [ + "TUR", + -13.061957359313965 + ], + [ + "▁Limo", + -13.06212043762207 + ], + [ + "▁Bron", + -13.06217098236084 + ], + [ + "▁philanthropy", + -13.0621919631958 + ], + [ + "▁stabilization", + -13.0621919631958 + ], + [ + "▁transient", + -13.062206268310549 + ], + [ + "▁sensations", + -13.062278747558594 + ], + [ + "ANG", + -13.06235694885254 + ], + [ + "▁Knit", + -13.062390327453612 + ], + [ + "▁45-", + -13.06255340576172 + ], + [ + "atti", + -13.062640190124512 + ], + [ + "▁monk", + -13.062763214111328 + ], + [ + "▁£20", + -13.06281566619873 + ], + [ + "▁fiddle", + -13.062865257263184 + ], + [ + "fighting", + -13.063150405883787 + ], + [ + "KB", + -13.063233375549316 + ], + [ + "▁infertility", + -13.063346862792969 + ], + [ + "▁Sher", + -13.063359260559082 + ], + [ + "▁Heidi", + -13.063379287719728 + ], + [ + "▁Sw", + -13.063416481018066 + ], + [ + "▁gymnastics", + -13.063444137573242 + ], + [ + "Br", + -13.063475608825684 + ], + [ + "▁Tourist", + -13.06349277496338 + ], + [ + "▁weary", + -13.063508987426758 + ], + [ + "▁tackled", + -13.063563346862791 + ], + [ + "▁Elections", + -13.063580513000488 + ], + [ + "▁meaningless", + -13.063627243041992 + ], + [ + "▁pedals", + -13.06364631652832 + ], + [ + "Planning", + -13.063766479492188 + ], + [ + "▁HY", + -13.063796043395996 + ], + [ + "▁distillery", + -13.063840866088867 + ], + [ + "▁wearer", + -13.063859939575195 + ], + [ + "▁playwright", + -13.063912391662598 + ], + [ + "into", + -13.06394386291504 + ], + [ + "▁honorary", + -13.06399917602539 + ], + [ + "micro", + -13.064061164855955 + ], + [ + "▁Slack", + -13.064081192016602 + ], + [ + "Count", + -13.064188957214355 + ], + [ + "▁Yay", + -13.06427001953125 + ], + [ + "toe", + -13.064309120178224 + ], + [ + "▁Belarus", + -13.064321517944336 + ], + [ + "mart", + -13.064386367797852 + ], + [ + "▁massages", + -13.064483642578123 + ], + [ + "▁optimised", + -13.064519882202148 + ], + [ + "lag", + -13.06454849243164 + ], + [ + "▁boo", + -13.064557075500488 + ], + [ + "▁Cage", + -13.064663887023926 + ], + [ + "▁upto", + -13.064667701721191 + ], + [ + "▁horizons", + -13.06468677520752 + ], + [ + "▁Sword", + -13.064704895019531 + ], + [ + "▁Jerome", + -13.064827919006348 + ], + [ + "Sal", + -13.064923286437988 + ], + [ + "▁Employers", + -13.064949035644531 + ], + [ + "Jun", + -13.065074920654297 + ], + [ + "▁3/4\"", + -13.065093040466309 + ], + [ + "▁Daniels", + -13.065129280090332 + ], + [ + "▁Scientist", + -13.065147399902344 + ], + [ + "watt", + -13.065299034118652 + ], + [ + "▁Jelly", + -13.065342903137209 + ], + [ + "▁Possible", + -13.065384864807127 + ], + [ + "▁Polyester", + -13.065394401550291 + ], + [ + "▁Bedrooms", + -13.065448760986328 + ], + [ + "▁fisherman", + -13.065545082092283 + ], + [ + "▁gearbox", + -13.06556224822998 + ], + [ + "▁leveraged", + -13.065600395202637 + ], + [ + "▁IDE", + -13.06562614440918 + ], + [ + "▁#6", + -13.065866470336914 + ], + [ + "NES", + -13.065897941589355 + ], + [ + "▁Replica", + -13.065902709960938 + ], + [ + "nnen", + -13.06591796875 + ], + [ + "▁interpreter", + -13.065959930419922 + ], + [ + "▁Sixth", + -13.06601619720459 + ], + [ + "23.", + -13.06606101989746 + ], + [ + "TRA", + -13.06607151031494 + ], + [ + "▁nearing", + -13.066083908081056 + ], + [ + "▁Sig", + -13.06611442565918 + ], + [ + "Comp", + -13.06621551513672 + ], + [ + "ango", + -13.06628704071045 + ], + [ + "▁needy", + -13.066301345825195 + ], + [ + "▁Drugs", + -13.066411018371582 + ], + [ + "▁traverse", + -13.066475868225098 + ], + [ + "▁battled", + -13.066479682922363 + ], + [ + "6.5", + -13.066506385803224 + ], + [ + "▁wand", + -13.066583633422852 + ], + [ + "▁JR", + -13.066591262817385 + ], + [ + "▁grouping", + -13.066874504089355 + ], + [ + "▁dehydration", + -13.067038536071776 + ], + [ + "Seven", + -13.067049026489258 + ], + [ + "Apparently", + -13.067057609558104 + ], + [ + "Eventually", + -13.067057609558104 + ], + [ + "▁sculptor", + -13.067194938659668 + ], + [ + "▁Constitutional", + -13.067203521728516 + ], + [ + "ffer", + -13.067285537719728 + ], + [ + "▁murders", + -13.067301750183104 + ], + [ + "▁sandstone", + -13.067323684692385 + ], + [ + "▁translators", + -13.067480087280272 + ], + [ + "gam", + -13.067625045776367 + ], + [ + "▁inhibit", + -13.067630767822266 + ], + [ + "▁phishing", + -13.067697525024414 + ], + [ + "Min", + -13.06772232055664 + ], + [ + "▁Flo", + -13.067813873291016 + ], + [ + "inski", + -13.067831993103027 + ], + [ + "▁Bravo", + -13.067896842956545 + ], + [ + "▁midway", + -13.06793975830078 + ], + [ + "▁(2013)", + -13.067981719970703 + ], + [ + "▁Ned", + -13.068004608154297 + ], + [ + "lev", + -13.068026542663574 + ], + [ + "▁inhibitors", + -13.068245887756348 + ], + [ + "▁retirees", + -13.068302154541016 + ], + [ + "▁ny", + -13.068307876586914 + ], + [ + "▁dams", + -13.068415641784668 + ], + [ + "▁Spotlight", + -13.068434715270996 + ], + [ + "▁mound", + -13.068475723266602 + ], + [ + "Guard", + -13.068501472473145 + ], + [ + "▁Valentines", + -13.068514823913574 + ], + [ + "▁NF", + -13.068591117858888 + ], + [ + "▁Gee", + -13.068601608276367 + ], + [ + "▁duly", + -13.068623542785645 + ], + [ + "yar", + -13.068638801574709 + ], + [ + "▁Barbie", + -13.068642616271973 + ], + [ + "▁bubbly", + -13.068659782409668 + ], + [ + "▁geothermal", + -13.068662643432615 + ], + [ + "▁benign", + -13.068674087524414 + ], + [ + "▁Doll", + -13.06869888305664 + ], + [ + "eating", + -13.06870937347412 + ], + [ + "Guests", + -13.068775177001951 + ], + [ + "▁MUCH", + -13.0687894821167 + ], + [ + "▁radial", + -13.068814277648926 + ], + [ + "▁Brittany", + -13.068842887878418 + ], + [ + "▁energized", + -13.069012641906738 + ], + [ + "▁201", + -13.069028854370115 + ], + [ + "▁Lyrics", + -13.069028854370115 + ], + [ + "▁Realtors", + -13.06907081604004 + ], + [ + "Ability", + -13.069084167480469 + ], + [ + "▁Glow", + -13.069086074829102 + ], + [ + "▁Humanity", + -13.069108963012695 + ], + [ + "▁ABOUT", + -13.069116592407228 + ], + [ + "▁undo", + -13.069268226623535 + ], + [ + "▁resumed", + -13.069334983825684 + ], + [ + "▁Exploration", + -13.06941032409668 + ], + [ + "▁Mbps", + -13.069442749023438 + ], + [ + "Mother", + -13.069658279418944 + ], + [ + "▁1850", + -13.06982135772705 + ], + [ + "▁harp", + -13.069890022277832 + ], + [ + "▁accelerator", + -13.070087432861328 + ], + [ + "pee", + -13.070101737976074 + ], + [ + "▁sidelines", + -13.070148468017578 + ], + [ + "Important", + -13.070164680480955 + ], + [ + "▁craze", + -13.070323944091797 + ], + [ + "▁foreseeable", + -13.070340156555176 + ], + [ + "▁pawn", + -13.070425987243652 + ], + [ + "beam", + -13.07049560546875 + ], + [ + "▁deco", + -13.070508003234863 + ], + [ + "▁dads", + -13.070511817932127 + ], + [ + "▁scanners", + -13.070605278015137 + ], + [ + "▁Perform", + -13.07071018218994 + ], + [ + "▁GTX", + -13.070711135864258 + ], + [ + "▁Ricky", + -13.070817947387695 + ], + [ + "▁directional", + -13.070823669433594 + ], + [ + "▁Chronicle", + -13.070874214172363 + ], + [ + "▁inlet", + -13.070874214172363 + ], + [ + "▁ACM", + -13.070890426635742 + ], + [ + "dent", + -13.070995330810549 + ], + [ + "720", + -13.07105827331543 + ], + [ + "▁Cobb", + -13.071072578430176 + ], + [ + "-150", + -13.07109832763672 + ], + [ + "PU", + -13.071099281311035 + ], + [ + "▁Mohammad", + -13.07114601135254 + ], + [ + "▁breads", + -13.071208000183104 + ], + [ + "RON", + -13.071236610412598 + ], + [ + "PN", + -13.071277618408203 + ], + [ + "▁librarian", + -13.0714750289917 + ], + [ + "▁Alexis", + -13.071524620056152 + ], + [ + "▁£7", + -13.07158374786377 + ], + [ + "JS", + -13.071635246276855 + ], + [ + "NJ", + -13.071714401245115 + ], + [ + "▁crumble", + -13.07178783416748 + ], + [ + "▁analysing", + -13.07190990447998 + ], + [ + "▁Pea", + -13.0719633102417 + ], + [ + "▁slender", + -13.072023391723633 + ], + [ + "feed", + -13.072066307067873 + ], + [ + "PLE", + -13.07234001159668 + ], + [ + "printed", + -13.072376251220703 + ], + [ + "▁202", + -13.072500228881836 + ], + [ + "▁sideways", + -13.072502136230469 + ], + [ + "ssa", + -13.07261848449707 + ], + [ + "▁Intervention", + -13.072643280029297 + ], + [ + "MN", + -13.07265567779541 + ], + [ + "▁[[", + -13.072725296020508 + ], + [ + "blade", + -13.072765350341797 + ], + [ + "▁Fahrenheit", + -13.072901725769045 + ], + [ + "att", + -13.072917938232422 + ], + [ + "▁poop", + -13.073050498962402 + ], + [ + "▁halloween", + -13.073063850402832 + ], + [ + "▁6000", + -13.073101997375488 + ], + [ + "rz", + -13.07314682006836 + ], + [ + "tain", + -13.07320499420166 + ], + [ + "▁relational", + -13.073241233825684 + ], + [ + "▁restricting", + -13.073254585266112 + ], + [ + "rica", + -13.073267936706545 + ], + [ + "▁chaired", + -13.073287010192873 + ], + [ + "▁Exhibit", + -13.073339462280272 + ], + [ + "▁christian", + -13.07336139678955 + ], + [ + "▁Thousand", + -13.073387145996094 + ], + [ + "▁Feather", + -13.073393821716309 + ], + [ + "▁Fresno", + -13.073538780212402 + ], + [ + "▁calves", + -13.073551177978516 + ], + [ + "▁rechargeable", + -13.073687553405762 + ], + [ + "▁plump", + -13.073702812194824 + ], + [ + "gang", + -13.073732376098633 + ], + [ + "Script", + -13.073742866516112 + ], + [ + "▁Bridges", + -13.073800086975098 + ], + [ + "▁iPads", + -13.073810577392578 + ], + [ + "dah", + -13.07387638092041 + ], + [ + "engine", + -13.073908805847168 + ], + [ + "▁handwriting", + -13.074029922485352 + ], + [ + "▁skipper", + -13.074068069458008 + ], + [ + "▁28-", + -13.074126243591309 + ], + [ + "▁$20,000", + -13.074172973632812 + ], + [ + "▁textual", + -13.074177742004396 + ], + [ + "▁golfer", + -13.07419204711914 + ], + [ + "number", + -13.074211120605469 + ], + [ + "▁cropped", + -13.074244499206545 + ], + [ + "▁Rot", + -13.074292182922363 + ], + [ + "▁$500,000", + -13.074400901794434 + ], + [ + "▁Stra", + -13.074498176574709 + ], + [ + "Fly", + -13.074654579162598 + ], + [ + "▁Bolivia", + -13.075032234191896 + ], + [ + "haul", + -13.075088500976562 + ], + [ + "▁Bethlehem", + -13.07516860961914 + ], + [ + "▁clarified", + -13.07516860961914 + ], + [ + "▁Browns", + -13.07529354095459 + ], + [ + "▁Wiley", + -13.075369834899902 + ], + [ + "▁rebates", + -13.075387001037598 + ], + [ + "▁Odd", + -13.075421333312988 + ], + [ + "▁methane", + -13.075434684753418 + ], + [ + "▁4)", + -13.075457572937012 + ], + [ + "▁curing", + -13.075483322143556 + ], + [ + "▁withdrawals", + -13.075546264648438 + ], + [ + "Established", + -13.075568199157717 + ], + [ + "▁deducted", + -13.075578689575195 + ], + [ + "▁bookkeeping", + -13.07558536529541 + ], + [ + "president", + -13.075640678405762 + ], + [ + "▁Leaving", + -13.075751304626465 + ], + [ + "▁Prom", + -13.075761795043944 + ], + [ + "▁Fits", + -13.07579517364502 + ], + [ + "▁Sor", + -13.076172828674316 + ], + [ + "wel", + -13.07630729675293 + ], + [ + "▁SAVE", + -13.076321601867676 + ], + [ + "▁Clan", + -13.076335906982422 + ], + [ + "jen", + -13.076440811157228 + ], + [ + "▁Scrap", + -13.076571464538574 + ], + [ + "▁deprivation", + -13.076802253723145 + ], + [ + "▁Species", + -13.076830863952637 + ], + [ + "▁tranquility", + -13.076866149902344 + ], + [ + "▁subtitles", + -13.076903343200684 + ], + [ + "▁Lei", + -13.076991081237791 + ], + [ + "Disclaimer", + -13.07704734802246 + ], + [ + "▁Accreditation", + -13.077141761779783 + ], + [ + "▁LIKE", + -13.07714557647705 + ], + [ + "▁carte", + -13.077146530151367 + ], + [ + "1.4", + -13.077193260192873 + ], + [ + "175", + -13.077223777770996 + ], + [ + "▁backlash", + -13.07737159729004 + ], + [ + "balanced", + -13.077418327331545 + ], + [ + "▁Juliet", + -13.077466011047363 + ], + [ + "▁Needle", + -13.07751178741455 + ], + [ + "▁collisions", + -13.07754611968994 + ], + [ + "enda", + -13.077614784240724 + ], + [ + "▁lasers", + -13.077675819396973 + ], + [ + "▁Vacuum", + -13.077826499938965 + ], + [ + "RV", + -13.077899932861328 + ], + [ + "▁gamer", + -13.07790470123291 + ], + [ + "▁Andreas", + -13.0781888961792 + ], + [ + "▁vitro", + -13.078378677368164 + ], + [ + "▁Magnet", + -13.07840061187744 + ], + [ + "▁safeguards", + -13.07841968536377 + ], + [ + "▁bankrupt", + -13.078466415405272 + ], + [ + "▁dormant", + -13.078466415405272 + ], + [ + "▁Jio", + -13.078493118286133 + ], + [ + "▁inadvertently", + -13.078495025634766 + ], + [ + "▁outage", + -13.078495979309082 + ], + [ + "▁graphite", + -13.07850742340088 + ], + [ + "▁Stat", + -13.078600883483888 + ], + [ + "▁Lindsey", + -13.078658103942873 + ], + [ + "▁contenders", + -13.078692436218262 + ], + [ + "AO", + -13.078750610351562 + ], + [ + "▁LO", + -13.07877254486084 + ], + [ + ":11", + -13.078826904296877 + ], + [ + "USE", + -13.078903198242188 + ], + [ + "▁Angle", + -13.078985214233398 + ], + [ + "Body", + -13.078994750976562 + ], + [ + "▁freaking", + -13.079111099243164 + ], + [ + "wig", + -13.079143524169922 + ], + [ + "Ju", + -13.0791654586792 + ], + [ + "▁Exactly", + -13.079233169555664 + ], + [ + "▁unfolding", + -13.079322814941406 + ], + [ + "Friends", + -13.079378128051758 + ], + [ + "▁Tottenham", + -13.079390525817873 + ], + [ + "▁45%", + -13.079442024230955 + ], + [ + "▁Idol", + -13.079483032226562 + ], + [ + "Season", + -13.07949447631836 + ], + [ + "▁taxed", + -13.07969093322754 + ], + [ + "sorry", + -13.079829216003418 + ], + [ + "▁completes", + -13.080011367797852 + ], + [ + "▁Stratford", + -13.080092430114746 + ], + [ + "▁Guinness", + -13.080093383789062 + ], + [ + "▁fatalities", + -13.080114364624023 + ], + [ + "▁Surprise", + -13.080144882202148 + ], + [ + "▁Billion", + -13.080282211303713 + ], + [ + "clip", + -13.08029079437256 + ], + [ + "▁axe", + -13.080304145812988 + ], + [ + "▁TLC", + -13.080367088317873 + ], + [ + "▁Kot", + -13.080416679382324 + ], + [ + "▁thematic", + -13.080421447753906 + ], + [ + "▁Mercer", + -13.080472946166992 + ], + [ + "▁Buhari", + -13.080628395080566 + ], + [ + "Sync", + -13.080632209777832 + ], + [ + "▁Implementation", + -13.08066463470459 + ], + [ + "Dog", + -13.080790519714355 + ], + [ + "cold", + -13.08084487915039 + ], + [ + "340", + -13.080920219421388 + ], + [ + "▁revert", + -13.080972671508787 + ], + [ + "▁Alicia", + -13.081006050109863 + ], + [ + "▁sheltered", + -13.081100463867188 + ], + [ + "▁sauté", + -13.081324577331545 + ], + [ + "▁Tory", + -13.081356048583984 + ], + [ + "▁preseason", + -13.08137321472168 + ], + [ + "▁Toilet", + -13.081426620483398 + ], + [ + "Spot", + -13.08145523071289 + ], + [ + "things", + -13.081510543823242 + ], + [ + "▁Jedi", + -13.081536293029783 + ], + [ + "▁Outer", + -13.081604957580566 + ], + [ + "▁puppet", + -13.081670761108398 + ], + [ + "▁noodle", + -13.08168888092041 + ], + [ + "▁happiest", + -13.081720352172852 + ], + [ + "▁revolving", + -13.081731796264648 + ], + [ + "centre", + -13.081744194030762 + ], + [ + "▁paddling", + -13.081767082214355 + ], + [ + "▁Casual", + -13.081891059875488 + ], + [ + "secondary", + -13.081904411315918 + ], + [ + "fruit", + -13.081985473632812 + ], + [ + "▁Turning", + -13.08199405670166 + ], + [ + "▁iceberg", + -13.082030296325684 + ], + [ + "subject", + -13.082144737243652 + ], + [ + "▁Trucks", + -13.082165718078612 + ], + [ + "analysis", + -13.082183837890623 + ], + [ + "Legal", + -13.082186698913574 + ], + [ + "▁WORK", + -13.08219051361084 + ], + [ + "ulator", + -13.08228874206543 + ], + [ + "▁Tsu", + -13.082358360290527 + ], + [ + "▁redeemed", + -13.082465171813965 + ], + [ + "uploads", + -13.08250331878662 + ], + [ + "▁Reply", + -13.08250331878662 + ], + [ + "▁Identify", + -13.082529067993164 + ], + [ + "▁hires", + -13.082545280456545 + ], + [ + "LIC", + -13.08259105682373 + ], + [ + "▁Wouldn", + -13.082660675048828 + ], + [ + "▁Macy", + -13.082687377929688 + ], + [ + "▁Stem", + -13.082716941833496 + ], + [ + "▁Destination", + -13.083029747009276 + ], + [ + "▁AJ", + -13.083230018615724 + ], + [ + "30.", + -13.083335876464844 + ], + [ + "▁policing", + -13.083366394042969 + ], + [ + "▁Hyatt", + -13.083369255065918 + ], + [ + "▁QuickBooks", + -13.083380699157717 + ], + [ + "▁Tomb", + -13.083414077758787 + ], + [ + "▁Certificates", + -13.083451271057127 + ], + [ + "▁uplift", + -13.083488464355469 + ], + [ + "▁signifies", + -13.08350658416748 + ], + [ + "Jay", + -13.083521842956545 + ], + [ + "▁saute", + -13.083608627319336 + ], + [ + "▁Tutorial", + -13.08370876312256 + ], + [ + "▁UNC", + -13.083714485168455 + ], + [ + "▁joys", + -13.08380126953125 + ], + [ + "▁Newspaper", + -13.083806991577148 + ], + [ + "mut", + -13.083888053894045 + ], + [ + "▁Miranda", + -13.083922386169434 + ], + [ + "idae", + -13.083984375 + ], + [ + "Earth", + -13.08425235748291 + ], + [ + "Features", + -13.084263801574709 + ], + [ + "▁flourishing", + -13.08436107635498 + ], + [ + "▁timed", + -13.084404945373535 + ], + [ + "▁Selected", + -13.084559440612791 + ], + [ + "▁Companion", + -13.084670066833496 + ], + [ + "▁promoter", + -13.084720611572266 + ], + [ + "▁deployments", + -13.08494758605957 + ], + [ + "▁impartial", + -13.084993362426758 + ], + [ + "AE", + -13.085047721862791 + ], + [ + "▁Instruction", + -13.085068702697754 + ], + [ + "▁Intern", + -13.085070610046388 + ], + [ + "▁exposes", + -13.0850830078125 + ], + [ + "▁tanning", + -13.085089683532717 + ], + [ + "▁Knox", + -13.085113525390623 + ], + [ + "fledged", + -13.085115432739258 + ], + [ + "▁EF", + -13.08511734008789 + ], + [ + "-49", + -13.085187911987305 + ], + [ + "UA", + -13.08521556854248 + ], + [ + "▁cumin", + -13.085335731506348 + ], + [ + "▁woo", + -13.085360527038574 + ], + [ + "/100", + -13.08536148071289 + ], + [ + "▁pea", + -13.085443496704102 + ], + [ + "▁commonplace", + -13.085497856140137 + ], + [ + "▁partisan", + -13.08549976348877 + ], + [ + "Entrepreneurship", + -13.085556030273438 + ], + [ + "▁CLI", + -13.085633277893066 + ], + [ + "▁creditor", + -13.085687637329102 + ], + [ + "liquid", + -13.085711479187012 + ], + [ + "HOW", + -13.085731506347656 + ], + [ + "▁microsoft", + -13.0858736038208 + ], + [ + "feet", + -13.085895538330078 + ], + [ + "▁teal", + -13.085933685302734 + ], + [ + "▁Photograph", + -13.085978507995604 + ], + [ + "▁Stoke", + -13.086050987243652 + ], + [ + "▁inconvenient", + -13.086057662963867 + ], + [ + "▁(40", + -13.086177825927734 + ], + [ + "▁Somehow", + -13.086254119873049 + ], + [ + "▁1400", + -13.086288452148438 + ], + [ + "▁messed", + -13.086492538452148 + ], + [ + "▁BLACK", + -13.086496353149414 + ], + [ + "hero", + -13.086503028869627 + ], + [ + "▁1912", + -13.086503982543944 + ], + [ + "▁Owl", + -13.08652400970459 + ], + [ + "▁contraction", + -13.086536407470703 + ], + [ + "cation", + -13.086556434631348 + ], + [ + "▁newbie", + -13.086584091186523 + ], + [ + "Stir", + -13.086606979370115 + ], + [ + "▁1931", + -13.086609840393066 + ], + [ + "testing", + -13.086642265319824 + ], + [ + "▁Chronic", + -13.086668968200684 + ], + [ + "▁mildly", + -13.08668613433838 + ], + [ + "HF", + -13.086687088012695 + ], + [ + "▁Galway", + -13.086743354797363 + ], + [ + "▁Hazard", + -13.086753845214844 + ], + [ + "▁Planned", + -13.086762428283691 + ], + [ + "▁functionalities", + -13.086803436279297 + ], + [ + "▁halftime", + -13.086907386779783 + ], + [ + "▁Anatomy", + -13.086966514587402 + ], + [ + "▁Polaris", + -13.087035179138184 + ], + [ + "▁localized", + -13.087173461914062 + ], + [ + "▁1/8", + -13.087313652038574 + ], + [ + "▁420", + -13.087358474731444 + ], + [ + "creation", + -13.08737850189209 + ], + [ + "▁meme", + -13.08745288848877 + ], + [ + "-33", + -13.087516784667969 + ], + [ + "/04", + -13.087536811828612 + ], + [ + "▁Compatible", + -13.087559700012209 + ], + [ + "▁numb", + -13.08760929107666 + ], + [ + "▁occupying", + -13.087689399719238 + ], + [ + "dynamic", + -13.087764739990234 + ], + [ + "▁decree", + -13.087779998779297 + ], + [ + "▁hotter", + -13.087835311889648 + ], + [ + "▁Agri", + -13.087836265563965 + ], + [ + "▁Helena", + -13.087921142578123 + ], + [ + "lining", + -13.087973594665527 + ], + [ + "▁Lama", + -13.088107109069824 + ], + [ + "▁Font", + -13.088111877441406 + ], + [ + "▁Ard", + -13.088136672973633 + ], + [ + "▁Falcons", + -13.088187217712402 + ], + [ + "sters", + -13.088297843933104 + ], + [ + "▁convergence", + -13.088319778442385 + ], + [ + "▁bluetooth", + -13.088380813598633 + ], + [ + "▁payouts", + -13.08857536315918 + ], + [ + "▁Searching", + -13.088623046875 + ], + [ + "▁lighted", + -13.08863925933838 + ], + [ + "▁MAP", + -13.088719367980955 + ], + [ + "▁Ellie", + -13.088743209838867 + ], + [ + "▁Sultan", + -13.088778495788574 + ], + [ + "▁1945", + -13.088814735412598 + ], + [ + "▁Grip", + -13.088891983032228 + ], + [ + "▁Trusted", + -13.088911056518556 + ], + [ + "gly", + -13.088948249816896 + ], + [ + "+1", + -13.089052200317385 + ], + [ + "▁overwhelm", + -13.089065551757812 + ], + [ + "▁;)", + -13.089312553405762 + ], + [ + "blo", + -13.089315414428713 + ], + [ + "same", + -13.089369773864746 + ], + [ + "WG", + -13.08937931060791 + ], + [ + "▁Purchasing", + -13.08946132659912 + ], + [ + "▁Merit", + -13.089556694030762 + ], + [ + "esco", + -13.089618682861328 + ], + [ + "stars", + -13.089653968811035 + ], + [ + "gri", + -13.089704513549805 + ], + [ + "▁Lecture", + -13.089764595031738 + ], + [ + "▁1.9", + -13.08981227874756 + ], + [ + "total", + -13.089889526367188 + ], + [ + "▁obtainable", + -13.089916229248049 + ], + [ + "▁steroid", + -13.089919090270996 + ], + [ + "▁Lancashire", + -13.089971542358398 + ], + [ + "▁affluent", + -13.089971542358398 + ], + [ + "▁nutmeg", + -13.089971542358398 + ], + [ + "▁culmination", + -13.089972496032717 + ], + [ + "▁Cologne", + -13.089974403381348 + ], + [ + "▁charismatic", + -13.090014457702637 + ], + [ + "▁chipped", + -13.090039253234863 + ], + [ + "▁din", + -13.09018611907959 + ], + [ + "▁bri", + -13.090205192565918 + ], + [ + "▁ATS", + -13.09027099609375 + ], + [ + "▁Weddings", + -13.090280532836914 + ], + [ + "camera", + -13.090336799621582 + ], + [ + "▁sump", + -13.09038257598877 + ], + [ + "▁minors", + -13.090476036071776 + ], + [ + "Creative", + -13.090572357177734 + ], + [ + "▁Cel", + -13.09061336517334 + ], + [ + "310", + -13.090631484985352 + ], + [ + "target", + -13.090651512145996 + ], + [ + "▁disney", + -13.090656280517578 + ], + [ + "▁DES", + -13.090764999389648 + ], + [ + "▁Strand", + -13.09079360961914 + ], + [ + "Bee", + -13.090825080871582 + ], + [ + "iman", + -13.090896606445312 + ], + [ + "▁Kelley", + -13.090914726257324 + ], + [ + ".#", + -13.090967178344728 + ], + [ + "Ki", + -13.09106159210205 + ], + [ + "Multiple", + -13.091062545776367 + ], + [ + "▁sal", + -13.091106414794922 + ], + [ + "▁invent", + -13.09111785888672 + ], + [ + "▁IG", + -13.09117031097412 + ], + [ + "▁attest", + -13.09127140045166 + ], + [ + "▁Wimbledon", + -13.091312408447266 + ], + [ + "Marketing", + -13.091362953186035 + ], + [ + "inated", + -13.091520309448242 + ], + [ + "▁liberation", + -13.091639518737791 + ], + [ + "14)", + -13.091656684875488 + ], + [ + "▁locality", + -13.09169864654541 + ], + [ + "▁mantle", + -13.091839790344238 + ], + [ + "▁Hum", + -13.091867446899414 + ], + [ + "lace", + -13.091911315917969 + ], + [ + "pages", + -13.091980934143066 + ], + [ + "Bear", + -13.092114448547363 + ], + [ + "Tour", + -13.092377662658691 + ], + [ + "▁Automobile", + -13.09238624572754 + ], + [ + "▁spreadsheets", + -13.092429161071776 + ], + [ + "▁pronounce", + -13.092430114746094 + ], + [ + "lli", + -13.09245491027832 + ], + [ + "▁Lis", + -13.092536926269531 + ], + [ + "▁(2014)", + -13.092540740966797 + ], + [ + "▁couches", + -13.09255027770996 + ], + [ + "▁spaced", + -13.092598915100098 + ], + [ + "CW", + -13.092742919921877 + ], + [ + "▁spirited", + -13.092778205871582 + ], + [ + "▁scratched", + -13.092844009399414 + ], + [ + "▁Attention", + -13.092950820922852 + ], + [ + "▁Ade", + -13.092981338500977 + ], + [ + "▁Hate", + -13.092998504638672 + ], + [ + "▁Settlement", + -13.092998504638672 + ], + [ + "▁helpless", + -13.093143463134766 + ], + [ + "▁POP", + -13.093234062194824 + ], + [ + "▁Dura", + -13.093241691589355 + ], + [ + "▁Directions", + -13.093244552612305 + ], + [ + "▁incubator", + -13.093291282653809 + ], + [ + "▁Mansfield", + -13.09331512451172 + ], + [ + "▁Floors", + -13.09332275390625 + ], + [ + "▁tossing", + -13.093344688415527 + ], + [ + "▁1024", + -13.093382835388184 + ], + [ + "▁doorway", + -13.09341526031494 + ], + [ + "▁Fluid", + -13.093513488769531 + ], + [ + "▁Angus", + -13.093557357788086 + ], + [ + "▁tolerant", + -13.093635559082031 + ], + [ + "▁Lamps", + -13.093802452087402 + ], + [ + "▁payload", + -13.093839645385742 + ], + [ + ":0", + -13.09390926361084 + ], + [ + "▁8-10", + -13.09397792816162 + ], + [ + "7.5", + -13.094064712524414 + ], + [ + "▁testers", + -13.094149589538574 + ], + [ + "▁Factors", + -13.094233512878418 + ], + [ + "▁ou", + -13.09425163269043 + ], + [ + "Growing", + -13.094261169433594 + ], + [ + "▁Archbishop", + -13.094298362731934 + ], + [ + "▁Brokers", + -13.09476089477539 + ], + [ + "CMS", + -13.094804763793944 + ], + [ + "▁dreamy", + -13.094916343688965 + ], + [ + "▁prairie", + -13.094955444335938 + ], + [ + "▁federation", + -13.094965934753418 + ], + [ + "▁Kapoor", + -13.094969749450684 + ], + [ + "▁regulates", + -13.095102310180664 + ], + [ + "organic", + -13.095155715942385 + ], + [ + "thi", + -13.095155715942385 + ], + [ + "education", + -13.095195770263672 + ], + [ + "▁heavyweight", + -13.09522819519043 + ], + [ + "▁poorest", + -13.095439910888672 + ], + [ + "▁Spurs", + -13.095508575439451 + ], + [ + "collection", + -13.095608711242676 + ], + [ + "Either", + -13.095681190490724 + ], + [ + "▁revived", + -13.09580135345459 + ], + [ + "▁gyms", + -13.095865249633787 + ], + [ + "▁Rie", + -13.095869064331056 + ], + [ + "▁infuse", + -13.095952033996582 + ], + [ + "xx", + -13.095956802368164 + ], + [ + "oso", + -13.095958709716797 + ], + [ + "▁receptions", + -13.096062660217283 + ], + [ + "▁Tran", + -13.096064567565918 + ], + [ + "▁Jail", + -13.09611988067627 + ], + [ + "appa", + -13.096132278442385 + ], + [ + "▁.....", + -13.096336364746094 + ], + [ + "▁Astro", + -13.096357345581056 + ], + [ + "▁cloves", + -13.096357345581056 + ], + [ + "▁fungal", + -13.096593856811523 + ], + [ + "▁linebacker", + -13.096627235412598 + ], + [ + "▁etiquette", + -13.09663200378418 + ], + [ + "▁emblem", + -13.096633911132812 + ], + [ + "▁Emerson", + -13.096644401550291 + ], + [ + "▁Rebel", + -13.09665298461914 + ], + [ + "▁backpacks", + -13.09673023223877 + ], + [ + "▁beacon", + -13.096734046936035 + ], + [ + "▁Indy", + -13.09674072265625 + ], + [ + "▁IPv", + -13.096762657165527 + ], + [ + "lux", + -13.096796989440918 + ], + [ + "Tony", + -13.09681510925293 + ], + [ + "▁Gina", + -13.096848487854004 + ], + [ + "▁Stable", + -13.096953392028809 + ], + [ + "▁membranes", + -13.097137451171877 + ], + [ + "zine", + -13.09714412689209 + ], + [ + "rig", + -13.097206115722656 + ], + [ + "▁Introducing", + -13.097206115722656 + ], + [ + "IDE", + -13.09726619720459 + ], + [ + "▁hardworking", + -13.097291946411133 + ], + [ + "▁blaze", + -13.097302436828612 + ], + [ + "Subject", + -13.097331047058104 + ], + [ + "▁galvanized", + -13.097360610961914 + ], + [ + "Android", + -13.097498893737791 + ], + [ + "121", + -13.097525596618652 + ], + [ + "▁Loving", + -13.097657203674316 + ], + [ + "ulate", + -13.097663879394531 + ], + [ + "▁substantive", + -13.097681045532228 + ], + [ + "▁accomplishing", + -13.097685813903809 + ], + [ + "▁Buildings", + -13.097725868225098 + ], + [ + "Games", + -13.09776782989502 + ], + [ + "▁edged", + -13.09780502319336 + ], + [ + "Film", + -13.097859382629396 + ], + [ + "Cr", + -13.09788990020752 + ], + [ + "▁silently", + -13.097917556762695 + ], + [ + "▁Cons", + -13.098007202148438 + ], + [ + "▁TJ", + -13.09803295135498 + ], + [ + "▁midday", + -13.098284721374512 + ], + [ + "▁granola", + -13.098297119140623 + ], + [ + "▁workplaces", + -13.098329544067385 + ], + [ + "▁freedoms", + -13.09835147857666 + ], + [ + "▁Jag", + -13.09835433959961 + ], + [ + "▁McLaren", + -13.098390579223633 + ], + [ + "▁ITS", + -13.098422050476074 + ], + [ + "▁Measures", + -13.098508834838867 + ], + [ + "▁Vault", + -13.098618507385254 + ], + [ + "▁Notebook", + -13.098645210266112 + ], + [ + "▁carpeting", + -13.098663330078123 + ], + [ + "▁rhythmic", + -13.098709106445312 + ], + [ + "▁patios", + -13.098783493041992 + ], + [ + "▁Sabha", + -13.098844528198242 + ], + [ + "▁Chances", + -13.09885025024414 + ], + [ + "admin", + -13.098881721496582 + ], + [ + "▁Stool", + -13.098917961120604 + ], + [ + "485", + -13.098998069763184 + ], + [ + "▁offences", + -13.099098205566406 + ], + [ + "▁NVIDIA", + -13.099178314208984 + ], + [ + "50%", + -13.099184036254885 + ], + [ + "▁anchors", + -13.099209785461426 + ], + [ + "rom", + -13.099275588989258 + ], + [ + "Anne", + -13.09934139251709 + ], + [ + "2.3", + -13.099367141723633 + ], + [ + "▁1986.", + -13.099371910095217 + ], + [ + "▁renovate", + -13.099420547485352 + ], + [ + "▁transformations", + -13.099435806274414 + ], + [ + "Load", + -13.099472045898438 + ], + [ + "▁tempered", + -13.09952163696289 + ], + [ + "▁ledger", + -13.099814414978027 + ], + [ + "▁resourceful", + -13.099847793579102 + ], + [ + "▁Kei", + -13.099860191345217 + ], + [ + "Wire", + -13.099863052368164 + ], + [ + "▁GBP", + -13.099925994873049 + ], + [ + "▁jogging", + -13.099969863891602 + ], + [ + "▁Hemp", + -13.099998474121094 + ], + [ + "▁Ecology", + -13.100008964538574 + ], + [ + "▁pancake", + -13.100152969360352 + ], + [ + "▁clashes", + -13.100312232971191 + ], + [ + "AIN", + -13.100403785705566 + ], + [ + "▁Xiao", + -13.100403785705566 + ], + [ + "register", + -13.100412368774414 + ], + [ + "▁manners", + -13.10047435760498 + ], + [ + "▁html", + -13.100491523742676 + ], + [ + "obi", + -13.100521087646484 + ], + [ + "pressed", + -13.100578308105469 + ], + [ + "extra", + -13.100638389587402 + ], + [ + "▁bows", + -13.10075569152832 + ], + [ + "▁Clin", + -13.100838661193848 + ], + [ + "▁lighten", + -13.100845336914062 + ], + [ + "▁cello", + -13.100863456726074 + ], + [ + "▁hid", + -13.100934982299805 + ], + [ + "▁warp", + -13.100955963134766 + ], + [ + "MHz", + -13.10099983215332 + ], + [ + "▁Chandra", + -13.101114273071287 + ], + [ + "▁Somalia", + -13.10117244720459 + ], + [ + "▁tendon", + -13.101192474365234 + ], + [ + "▁Olympia", + -13.101211547851562 + ], + [ + "▁commuters", + -13.101223945617676 + ], + [ + "psy", + -13.101242065429688 + ], + [ + "▁ascent", + -13.101253509521484 + ], + [ + "Iron", + -13.10133934020996 + ], + [ + "1).", + -13.101390838623049 + ], + [ + "▁porous", + -13.10141944885254 + ], + [ + "▁Thirty", + -13.101449012756348 + ], + [ + "sma", + -13.101500511169434 + ], + [ + "SOL", + -13.101508140563965 + ], + [ + "▁Shoot", + -13.10157871246338 + ], + [ + "▁apologies", + -13.101638793945312 + ], + [ + "▁counterfeit", + -13.101638793945312 + ], + [ + "▁$0", + -13.101640701293944 + ], + [ + "▁clauses", + -13.10166072845459 + ], + [ + "▁Covered", + -13.10185432434082 + ], + [ + "▁sheath", + -13.101893424987791 + ], + [ + "Considering", + -13.1019287109375 + ], + [ + "▁Marty", + -13.101966857910156 + ], + [ + "lene", + -13.10204029083252 + ], + [ + "▁Coke", + -13.102055549621582 + ], + [ + "▁Pines", + -13.102084159851074 + ], + [ + "logy", + -13.10214900970459 + ], + [ + "▁descending", + -13.10220718383789 + ], + [ + "▁Ethiopian", + -13.102261543273926 + ], + [ + "▁narrowed", + -13.102312088012695 + ], + [ + "▁0.00", + -13.102397918701172 + ], + [ + "dou", + -13.102408409118652 + ], + [ + "▁narrowly", + -13.102420806884766 + ], + [ + "▁Kendall", + -13.102530479431152 + ], + [ + "Saving", + -13.102737426757812 + ], + [ + "/2015", + -13.102863311767578 + ], + [ + "▁Vendor", + -13.10300636291504 + ], + [ + "▁Nationwide", + -13.103010177612305 + ], + [ + "▁statistic", + -13.103028297424316 + ], + [ + "▁drawback", + -13.10322093963623 + ], + [ + "Skin", + -13.103279113769531 + ], + [ + "▁grieving", + -13.103316307067873 + ], + [ + "▁Judicial", + -13.103317260742188 + ], + [ + "▁buns", + -13.103402137756348 + ], + [ + "▁Felt", + -13.103416442871094 + ], + [ + "▁campfire", + -13.10346508026123 + ], + [ + "-55", + -13.103527069091797 + ], + [ + "▁py", + -13.1035737991333 + ], + [ + "▁Cannes", + -13.103631973266602 + ], + [ + "▁Mines", + -13.10364818572998 + ], + [ + "SAT", + -13.103650093078612 + ], + [ + "hil", + -13.103800773620604 + ], + [ + "manship", + -13.103875160217283 + ], + [ + "▁solicitors", + -13.103918075561523 + ], + [ + "fel", + -13.10403060913086 + ], + [ + "▁Pillow", + -13.104103088378906 + ], + [ + "▁drawbacks", + -13.104118347167969 + ], + [ + "▁Anime", + -13.10417366027832 + ], + [ + "▁hurricanes", + -13.1041898727417 + ], + [ + "▁Oreo", + -13.104238510131836 + ], + [ + "wine", + -13.104337692260742 + ], + [ + "assi", + -13.104416847229004 + ], + [ + "▁ISA", + -13.104421615600586 + ], + [ + "▁splits", + -13.104605674743652 + ], + [ + "▁Torrent", + -13.104707717895508 + ], + [ + "Cho", + -13.104713439941406 + ], + [ + "daughter", + -13.104721069335938 + ], + [ + "rer", + -13.104723930358888 + ], + [ + "▁Maurice", + -13.104726791381836 + ], + [ + "▁surveying", + -13.104756355285645 + ], + [ + "Buying", + -13.104836463928224 + ], + [ + "▁Insta", + -13.10484218597412 + ], + [ + "▁Rodgers", + -13.104846954345703 + ], + [ + "▁disparate", + -13.10499668121338 + ], + [ + "▁imaginable", + -13.10499668121338 + ], + [ + "▁migrating", + -13.104997634887695 + ], + [ + "▁enforcing", + -13.105005264282228 + ], + [ + "▁communion", + -13.105013847351074 + ], + [ + "▁Cent", + -13.10502815246582 + ], + [ + "Tree", + -13.105064392089844 + ], + [ + "Wind", + -13.105177879333496 + ], + [ + "rim", + -13.105219841003418 + ], + [ + "hof", + -13.105314254760742 + ], + [ + "▁irritated", + -13.105527877807615 + ], + [ + "nies", + -13.105621337890623 + ], + [ + "▁Conn", + -13.105664253234863 + ], + [ + "▁owes", + -13.105727195739746 + ], + [ + "▁Rip", + -13.10572910308838 + ], + [ + "▁sac", + -13.105743408203123 + ], + [ + "▁Frances", + -13.105759620666504 + ], + [ + "▁Gucci", + -13.105878829956056 + ], + [ + "Stephen", + -13.106016159057615 + ], + [ + "▁rejuvenate", + -13.106080055236816 + ], + [ + "▁logistical", + -13.106149673461914 + ], + [ + "Anna", + -13.106208801269531 + ], + [ + "▁Feast", + -13.106264114379885 + ], + [ + "▁wee", + -13.106268882751465 + ], + [ + "Zero", + -13.106304168701172 + ], + [ + "128", + -13.10630989074707 + ], + [ + "▁Twilight", + -13.106316566467283 + ], + [ + "▁unanimous", + -13.106438636779783 + ], + [ + "▁131", + -13.106460571289062 + ], + [ + "▁alluring", + -13.10646915435791 + ], + [ + "▁Womens", + -13.106518745422363 + ], + [ + "▁scrapbook", + -13.10662841796875 + ], + [ + "▁Medieval", + -13.106681823730469 + ], + [ + "▁rocked", + -13.106801986694336 + ], + [ + "▁revered", + -13.1068115234375 + ], + [ + "▁berth", + -13.106815338134766 + ], + [ + "Wave", + -13.106840133666992 + ], + [ + "eus", + -13.106955528259276 + ], + [ + "▁CBC", + -13.106998443603516 + ], + [ + "▁billionaire", + -13.107032775878906 + ], + [ + "▁descended", + -13.107128143310549 + ], + [ + "▁Melanie", + -13.10723876953125 + ], + [ + "Ku", + -13.107447624206545 + ], + [ + "sse", + -13.107464790344238 + ], + [ + "▁omissions", + -13.107521057128906 + ], + [ + "EPA", + -13.107704162597656 + ], + [ + "Performance", + -13.107748031616213 + ], + [ + "▁recruiter", + -13.107789993286133 + ], + [ + "hoo", + -13.107797622680664 + ], + [ + "▁redefine", + -13.107815742492676 + ], + [ + "▁Afro", + -13.107833862304688 + ], + [ + "▁Cartoon", + -13.10785675048828 + ], + [ + "fits", + -13.107885360717772 + ], + [ + "▁Amar", + -13.10793685913086 + ], + [ + "were", + -13.108049392700195 + ], + [ + "▁stealth", + -13.108341217041016 + ], + [ + "▁blindly", + -13.10841178894043 + ], + [ + "▁Shopify", + -13.108464241027832 + ], + [ + "▁1923", + -13.108555793762209 + ], + [ + "▁Colony", + -13.108572006225586 + ], + [ + "▁aloe", + -13.108619689941406 + ], + [ + "▁semiconductor", + -13.108624458312988 + ], + [ + "301", + -13.108692169189451 + ], + [ + "▁packers", + -13.108695030212402 + ], + [ + "▁brainstorming", + -13.108793258666992 + ], + [ + "return", + -13.108799934387209 + ], + [ + "▁Bowie", + -13.10890007019043 + ], + [ + "▁Claim", + -13.10899543762207 + ], + [ + "▁HDD", + -13.109000205993652 + ], + [ + "▁Facial", + -13.109086990356444 + ], + [ + "▁Benz", + -13.109098434448242 + ], + [ + "▁slaughter", + -13.10910415649414 + ], + [ + "▁echoed", + -13.109139442443848 + ], + [ + "▁Indie", + -13.109230995178224 + ], + [ + "▁Admit", + -13.10925006866455 + ], + [ + "▁integer", + -13.109315872192385 + ], + [ + "▁prisons", + -13.109329223632812 + ], + [ + "▁Audrey", + -13.109349250793455 + ], + [ + "Error", + -13.109458923339844 + ], + [ + "▁Creed", + -13.109498977661133 + ], + [ + "Target", + -13.10949993133545 + ], + [ + "▁Ravi", + -13.109533309936523 + ], + [ + "▁WHY", + -13.109556198120115 + ], + [ + "▁Blast", + -13.109798431396484 + ], + [ + "▁Workshops", + -13.10995864868164 + ], + [ + "▁objection", + -13.11000156402588 + ], + [ + "metre", + -13.110018730163574 + ], + [ + "▁prophecy", + -13.110055923461914 + ], + [ + "▁concluding", + -13.110057830810549 + ], + [ + "▁Peruvian", + -13.110058784484863 + ], + [ + "▁adversity", + -13.110061645507812 + ], + [ + "▁Illustrated", + -13.110069274902344 + ], + [ + "▁complies", + -13.110069274902344 + ], + [ + "hus", + -13.110072135925291 + ], + [ + "▁Rutgers", + -13.110090255737305 + ], + [ + "▁sanity", + -13.110185623168944 + ], + [ + "▁itchy", + -13.110210418701172 + ], + [ + "▁chiropractor", + -13.11032772064209 + ], + [ + "▁Chest", + -13.110380172729492 + ], + [ + "▁cordless", + -13.110382080078123 + ], + [ + "▁ozone", + -13.110414505004885 + ], + [ + "▁monopoly", + -13.110483169555664 + ], + [ + "Aid", + -13.110533714294434 + ], + [ + "▁Zombie", + -13.110742568969728 + ], + [ + "Meeting", + -13.110751152038574 + ], + [ + "▁sting", + -13.110755920410156 + ], + [ + "▁attendant", + -13.110762596130373 + ], + [ + "▁Williamson", + -13.110841751098633 + ], + [ + "▁Netanyahu", + -13.11091136932373 + ], + [ + "pens", + -13.110950469970703 + ], + [ + "▁inc", + -13.111007690429688 + ], + [ + "▁Pace", + -13.111056327819824 + ], + [ + "grove", + -13.111058235168455 + ], + [ + "▁instill", + -13.11114501953125 + ], + [ + "▁worthless", + -13.111160278320312 + ], + [ + "▁Arcade", + -13.111199378967283 + ], + [ + "Construction", + -13.111204147338867 + ], + [ + "▁Painted", + -13.111266136169434 + ], + [ + "inen", + -13.111394882202148 + ], + [ + "uw", + -13.111427307128906 + ], + [ + "▁embellished", + -13.111440658569336 + ], + [ + "▁heavens", + -13.111455917358398 + ], + [ + "mere", + -13.111499786376951 + ], + [ + "▁Cran", + -13.111541748046877 + ], + [ + "▁Stroke", + -13.111584663391112 + ], + [ + "▁Sn", + -13.111746788024902 + ], + [ + "▁CALL", + -13.111749649047852 + ], + [ + "▁bleak", + -13.111797332763672 + ], + [ + "south", + -13.11180019378662 + ], + [ + "▁multiplication", + -13.111801147460938 + ], + [ + "▁316", + -13.111824989318848 + ], + [ + "▁Harlem", + -13.111879348754885 + ], + [ + "▁Ernest", + -13.111910820007324 + ], + [ + "▁mason", + -13.11195468902588 + ], + [ + "▁802.11", + -13.112070083618164 + ], + [ + "ISE", + -13.11215114593506 + ], + [ + "▁1924", + -13.112226486206056 + ], + [ + "▁purposeful", + -13.112369537353516 + ], + [ + "▁Preschool", + -13.112394332885742 + ], + [ + "acting", + -13.112466812133787 + ], + [ + "▁Mommy", + -13.112568855285645 + ], + [ + "discipline", + -13.112618446350098 + ], + [ + "hereinafter", + -13.112683296203612 + ], + [ + "▁enriching", + -13.112738609313965 + ], + [ + "olin", + -13.112743377685549 + ], + [ + "lund", + -13.112786293029783 + ], + [ + "▁Shed", + -13.112863540649414 + ], + [ + "Mat", + -13.112866401672363 + ], + [ + "review", + -13.112902641296388 + ], + [ + "Software", + -13.112934112548828 + ], + [ + "▁naive", + -13.113158226013184 + ], + [ + "▁statutes", + -13.113301277160645 + ], + [ + "▁disputed", + -13.113313674926758 + ], + [ + "▁slows", + -13.113373756408691 + ], + [ + "▁FOX", + -13.113399505615234 + ], + [ + "▁Judaism", + -13.113442420959473 + ], + [ + "▁irrational", + -13.113443374633787 + ], + [ + "▁allotted", + -13.113451957702637 + ], + [ + "▁choreography", + -13.113508224487305 + ], + [ + "▁scorer", + -13.113564491271973 + ], + [ + "▁exercised", + -13.113686561584473 + ], + [ + "▁Reich", + -13.1137113571167 + ], + [ + "▁Subway", + -13.113739967346191 + ], + [ + "▁Ebay", + -13.11375331878662 + ], + [ + "▁coffin", + -13.113804817199709 + ], + [ + "WB", + -13.113816261291504 + ], + [ + "IER", + -13.113874435424805 + ], + [ + "icing", + -13.11389446258545 + ], + [ + "erator", + -13.113896369934082 + ], + [ + "▁Institutions", + -13.113926887512209 + ], + [ + "▁Pla", + -13.114065170288086 + ], + [ + "Hop", + -13.114079475402832 + ], + [ + "turned", + -13.11411190032959 + ], + [ + "▁ALWAYS", + -13.114177703857422 + ], + [ + "wara", + -13.11430549621582 + ], + [ + "▁grooves", + -13.114319801330566 + ], + [ + "▁DEC", + -13.114344596862791 + ], + [ + "▁185", + -13.114347457885742 + ], + [ + "▁Graves", + -13.11441421508789 + ], + [ + "▁Buffet", + -13.11443328857422 + ], + [ + "▁cyclist", + -13.114521026611328 + ], + [ + "ifier", + -13.114553451538086 + ], + [ + "▁migrated", + -13.114583969116213 + ], + [ + "parent", + -13.11465549468994 + ], + [ + "Physical", + -13.11469268798828 + ], + [ + "▁Rancho", + -13.114822387695312 + ], + [ + "▁reactor", + -13.11489486694336 + ], + [ + "wish", + -13.11500358581543 + ], + [ + "▁lied", + -13.115057945251465 + ], + [ + "▁blower", + -13.115094184875488 + ], + [ + "▁Parmesan", + -13.115140914916992 + ], + [ + "▁eczema", + -13.115140914916992 + ], + [ + "▁tertiary", + -13.115140914916992 + ], + [ + "▁Wharf", + -13.115141868591309 + ], + [ + "gastrointestinal", + -13.115144729614258 + ], + [ + "▁Telugu", + -13.115147590637209 + ], + [ + "▁blurred", + -13.115166664123535 + ], + [ + "▁screwdriver", + -13.115219116210938 + ], + [ + "▁Naomi", + -13.115357398986816 + ], + [ + "▁bitterness", + -13.11539077758789 + ], + [ + "▁emulator", + -13.115474700927734 + ], + [ + "thought", + -13.115524291992188 + ], + [ + "▁Pep", + -13.11552619934082 + ], + [ + "▁Highlights", + -13.115550994873049 + ], + [ + "▁furnishing", + -13.115578651428224 + ], + [ + "▁Kathryn", + -13.115654945373535 + ], + [ + "▁flashlight", + -13.115681648254396 + ], + [ + "▁rumor", + -13.115684509277344 + ], + [ + "▁tractors", + -13.115694046020508 + ], + [ + "▁magically", + -13.115724563598633 + ], + [ + "▁Lightweight", + -13.115756034851074 + ], + [ + "wei", + -13.115774154663086 + ], + [ + "▁Psycho", + -13.11579418182373 + ], + [ + "nez", + -13.115883827209473 + ], + [ + "xa", + -13.115989685058594 + ], + [ + "api", + -13.116007804870604 + ], + [ + "enza", + -13.11607265472412 + ], + [ + "stic", + -13.11616325378418 + ], + [ + "▁commuter", + -13.116168975830078 + ], + [ + "MAC", + -13.11618423461914 + ], + [ + "▁Disorders", + -13.116303443908691 + ], + [ + "▁UFO", + -13.116387367248535 + ], + [ + "▁timelines", + -13.116409301757812 + ], + [ + "▁AGM", + -13.116535186767578 + ], + [ + "Left", + -13.116539001464844 + ], + [ + "tical", + -13.116690635681152 + ], + [ + "tim", + -13.116719245910645 + ], + [ + "files", + -13.116765975952148 + ], + [ + "▁lacrosse", + -13.116841316223145 + ], + [ + "▁tribunal", + -13.116841316223145 + ], + [ + "▁Brotherhood", + -13.116857528686523 + ], + [ + "▁endowment", + -13.116868019104004 + ], + [ + "▁Kristin", + -13.116888999938965 + ], + [ + "hoff", + -13.11693286895752 + ], + [ + "Act", + -13.116938591003418 + ], + [ + "▁foray", + -13.116968154907228 + ], + [ + "3-4", + -13.117097854614258 + ], + [ + "ovi", + -13.117246627807615 + ], + [ + "▁Lesson", + -13.11725902557373 + ], + [ + "▁1870", + -13.117326736450195 + ], + [ + "Sep", + -13.117329597473145 + ], + [ + "▁fasteners", + -13.11733055114746 + ], + [ + "bate", + -13.117409706115724 + ], + [ + "▁adhered", + -13.117432594299316 + ], + [ + "▁oyster", + -13.11750030517578 + ], + [ + "wah", + -13.117507934570312 + ], + [ + "▁peri", + -13.117511749267578 + ], + [ + "▁р", + -13.117521286010742 + ], + [ + "▁Cop", + -13.117524147033691 + ], + [ + "▁emit", + -13.117602348327637 + ], + [ + "▁glide", + -13.117624282836914 + ], + [ + "▁curling", + -13.11773681640625 + ], + [ + "▁postseason", + -13.1177978515625 + ], + [ + "▁1988.", + -13.117825508117676 + ], + [ + "▁slash", + -13.117849349975586 + ], + [ + "▁Chau", + -13.117927551269531 + ], + [ + "▁Candidate", + -13.117947578430176 + ], + [ + "Jean", + -13.117949485778809 + ], + [ + "5/", + -13.118016242980955 + ], + [ + "▁diversify", + -13.118046760559082 + ], + [ + "▁HF", + -13.11807918548584 + ], + [ + "NAS", + -13.118289947509766 + ], + [ + "▁Boiler", + -13.118304252624512 + ], + [ + "▁Blossom", + -13.118313789367676 + ], + [ + "▁340", + -13.11833381652832 + ], + [ + "▁affidavit", + -13.118545532226562 + ], + [ + "▁cascade", + -13.118547439575195 + ], + [ + "aver", + -13.118556022644045 + ], + [ + "▁unreliable", + -13.11855697631836 + ], + [ + "▁Euros", + -13.11864948272705 + ], + [ + "2008", + -13.118685722351074 + ], + [ + "▁banker", + -13.118714332580566 + ], + [ + "▁chinese", + -13.118866920471191 + ], + [ + "▁shrine", + -13.118925094604492 + ], + [ + "ARY", + -13.119098663330078 + ], + [ + "▁fest", + -13.119108200073242 + ], + [ + "cen", + -13.119176864624023 + ], + [ + "▁Sharks", + -13.119206428527832 + ], + [ + "▁slump", + -13.119380950927734 + ], + [ + "▁exits", + -13.119394302368164 + ], + [ + "▁blasting", + -13.119443893432615 + ], + [ + "process", + -13.11948013305664 + ], + [ + "oro", + -13.119497299194336 + ], + [ + "nch", + -13.119589805603027 + ], + [ + "▁Randall", + -13.119726181030272 + ], + [ + "birds", + -13.119837760925291 + ], + [ + "▁Teddy", + -13.119839668273926 + ], + [ + "drawn", + -13.119853019714355 + ], + [ + "▁rut", + -13.119855880737305 + ], + [ + "colour", + -13.119858741760254 + ], + [ + "▁Sham", + -13.119860649108888 + ], + [ + "▁0800", + -13.119887351989746 + ], + [ + "▁jig", + -13.120015144348145 + ], + [ + "▁£50", + -13.120020866394045 + ], + [ + "OM", + -13.120125770568848 + ], + [ + "▁kiddos", + -13.12021255493164 + ], + [ + "▁Jubilee", + -13.120251655578612 + ], + [ + "▁Ventura", + -13.120268821716309 + ], + [ + "▁subscribing", + -13.120279312133787 + ], + [ + "▁rapport", + -13.120285034179688 + ], + [ + "▁AVAILABLE", + -13.12039279937744 + ], + [ + "▁sizable", + -13.120455741882324 + ], + [ + "▁ra", + -13.120502471923828 + ], + [ + "▁knowingly", + -13.120546340942385 + ], + [ + "ws", + -13.120664596557615 + ], + [ + "rone", + -13.120715141296388 + ], + [ + "quel", + -13.120756149291992 + ], + [ + "ved", + -13.12091827392578 + ], + [ + "rf", + -13.12093448638916 + ], + [ + "▁Rehab", + -13.12109661102295 + ], + [ + "▁Appliances", + -13.121214866638184 + ], + [ + "punk", + -13.12121868133545 + ], + [ + "▁ballroom", + -13.121315956115724 + ], + [ + "tory", + -13.12139129638672 + ], + [ + "▁Fellows", + -13.121420860290527 + ], + [ + "▁washers", + -13.121426582336426 + ], + [ + "▁braid", + -13.121489524841309 + ], + [ + "01.", + -13.121552467346191 + ], + [ + "▁rant", + -13.121662139892578 + ], + [ + "▁onward", + -13.121749877929688 + ], + [ + "▁hikers", + -13.121826171875 + ], + [ + "▁evolves", + -13.1218900680542 + ], + [ + "▁constituency", + -13.121960639953612 + ], + [ + "▁multiplied", + -13.121960639953612 + ], + [ + "▁Snapdragon", + -13.121962547302246 + ], + [ + "▁typography", + -13.121962547302246 + ], + [ + "▁Tat", + -13.122000694274902 + ], + [ + "▁MPG", + -13.122015953063965 + ], + [ + "▁loot", + -13.122129440307615 + ], + [ + "BM", + -13.12217617034912 + ], + [ + "▁Carlisle", + -13.1221923828125 + ], + [ + "▁Basics", + -13.12221908569336 + ], + [ + "▁apex", + -13.122270584106444 + ], + [ + "issa", + -13.122346878051758 + ], + [ + "▁Toledo", + -13.122403144836426 + ], + [ + "▁212", + -13.122515678405762 + ], + [ + "information", + -13.12264347076416 + ], + [ + "▁kittens", + -13.122809410095217 + ], + [ + "▁SJ", + -13.122840881347656 + ], + [ + "SV", + -13.122922897338867 + ], + [ + "▁Mani", + -13.122976303100586 + ], + [ + "9.5", + -13.12299633026123 + ], + [ + "▁126", + -13.123016357421877 + ], + [ + "▁......", + -13.12309455871582 + ], + [ + "▁unfolds", + -13.123250961303713 + ], + [ + "Para", + -13.123435974121094 + ], + [ + "linked", + -13.12346363067627 + ], + [ + "▁BG", + -13.123472213745115 + ], + [ + "-2015", + -13.123573303222656 + ], + [ + "▁salts", + -13.123645782470703 + ], + [ + "▁Kathmandu", + -13.12367820739746 + ], + [ + "▁compiling", + -13.123679161071776 + ], + [ + "▁peppermint", + -13.123686790466309 + ], + [ + "▁Appalachian", + -13.123801231384276 + ], + [ + "▁Shaun", + -13.123841285705566 + ], + [ + "▁Gul", + -13.123845100402832 + ], + [ + "▁Tuscany", + -13.123878479003906 + ], + [ + "▁complying", + -13.123971939086914 + ], + [ + "▁Vanessa", + -13.124038696289062 + ], + [ + "eq", + -13.124105453491213 + ], + [ + "▁semantic", + -13.124144554138184 + ], + [ + "bear", + -13.12425136566162 + ], + [ + "▁contrasts", + -13.124428749084473 + ], + [ + "▁ste", + -13.124489784240724 + ], + [ + "▁26-", + -13.124556541442873 + ], + [ + "▁dealings", + -13.124584197998049 + ], + [ + "▁Memo", + -13.124615669250488 + ], + [ + "ulated", + -13.124627113342283 + ], + [ + "▁RJ", + -13.124648094177246 + ], + [ + "▁Comfortable", + -13.124650955200195 + ], + [ + "Tools", + -13.124655723571776 + ], + [ + "▁cosplay", + -13.124703407287598 + ], + [ + "▁Observer", + -13.124725341796877 + ], + [ + "▁Toni", + -13.124736785888672 + ], + [ + "▁Holden", + -13.124774932861328 + ], + [ + "oud", + -13.124791145324709 + ], + [ + "▁rev", + -13.124813079833984 + ], + [ + "mers", + -13.124933242797852 + ], + [ + "holes", + -13.124978065490724 + ], + [ + "pas", + -13.125042915344238 + ], + [ + "rit", + -13.125102996826172 + ], + [ + "rank", + -13.125158309936523 + ], + [ + "GG", + -13.1251859664917 + ], + [ + "▁railing", + -13.125228881835938 + ], + [ + "▁esteem", + -13.125232696533203 + ], + [ + "▁startling", + -13.12525463104248 + ], + [ + "▁BH", + -13.125255584716797 + ], + [ + "pon", + -13.12537956237793 + ], + [ + "▁Filters", + -13.125402450561523 + ], + [ + "▁nanny", + -13.125409126281738 + ], + [ + "▁4.7", + -13.12541961669922 + ], + [ + "▁sanctioned", + -13.125422477722168 + ], + [ + "▁choral", + -13.125428199768066 + ], + [ + "▁offseason", + -13.12543773651123 + ], + [ + "▁Lawson", + -13.125513076782228 + ], + [ + "▁Thick", + -13.125571250915527 + ], + [ + "▁JSON", + -13.125592231750488 + ], + [ + "▁Separate", + -13.125598907470703 + ], + [ + "▁inhibitor", + -13.125645637512209 + ], + [ + "▁Gathering", + -13.125764846801758 + ], + [ + "▁Faucet", + -13.125845909118652 + ], + [ + "kb", + -13.125877380371094 + ], + [ + "▁ancestor", + -13.125914573669434 + ], + [ + "▁merry", + -13.12594985961914 + ], + [ + ":25", + -13.126113891601562 + ], + [ + "▁Meter", + -13.126132011413574 + ], + [ + "▁artistry", + -13.126334190368652 + ], + [ + "cion", + -13.126581192016602 + ], + [ + "▁Goodman", + -13.126595497131348 + ], + [ + "▁Listening", + -13.12667465209961 + ], + [ + "▁nests", + -13.126683235168455 + ], + [ + "▁1926", + -13.126699447631836 + ], + [ + "▁Navi", + -13.126803398132324 + ], + [ + "▁diff", + -13.126882553100586 + ], + [ + "▁troubleshoot", + -13.126893997192385 + ], + [ + "chon", + -13.126906394958496 + ], + [ + "graphic", + -13.126953125 + ], + [ + "▁Correct", + -13.127044677734377 + ], + [ + "capacity", + -13.127071380615234 + ], + [ + "▁monoxide", + -13.127108573913574 + ], + [ + "shed", + -13.127110481262209 + ], + [ + "▁subtly", + -13.12714385986328 + ], + [ + "▁Strait", + -13.127168655395508 + ], + [ + "107", + -13.127253532409668 + ], + [ + "Clearly", + -13.127301216125488 + ], + [ + "▁Kimberly", + -13.12741470336914 + ], + [ + "▁1925", + -13.127431869506836 + ], + [ + "▁sparse", + -13.12747287750244 + ], + [ + "▁Seating", + -13.127544403076172 + ], + [ + "supported", + -13.127582550048828 + ], + [ + "Word", + -13.127705574035645 + ], + [ + "▁arches", + -13.127725601196287 + ], + [ + "▁Antarctica", + -13.127821922302246 + ], + [ + "▁Pel", + -13.127832412719728 + ], + [ + "▁regrets", + -13.127962112426758 + ], + [ + "▁cashier", + -13.12797737121582 + ], + [ + "▁specifying", + -13.128036499023438 + ], + [ + "▁humane", + -13.12803840637207 + ], + [ + "coloured", + -13.1281156539917 + ], + [ + "▁offended", + -13.128213882446287 + ], + [ + "sche", + -13.128229141235352 + ], + [ + "▁Hag", + -13.12824535369873 + ], + [ + "és", + -13.12827205657959 + ], + [ + "▁restrooms", + -13.12829303741455 + ], + [ + "Wa", + -13.128325462341309 + ], + [ + "▁pals", + -13.128403663635254 + ], + [ + "Europe", + -13.128548622131348 + ], + [ + "▁Adirondack", + -13.128594398498535 + ], + [ + "▁realtor", + -13.128621101379396 + ], + [ + "Regarding", + -13.128670692443848 + ], + [ + "icon", + -13.128674507141112 + ], + [ + "▁HMRC", + -13.128786087036133 + ], + [ + "▁debating", + -13.128828048706056 + ], + [ + "▁hypothetical", + -13.128828048706056 + ], + [ + "▁provocative", + -13.128828048706056 + ], + [ + "▁swoop", + -13.128839492797852 + ], + [ + "▁amps", + -13.12885856628418 + ], + [ + "▁sentencing", + -13.128864288330078 + ], + [ + "▁originate", + -13.128870010375977 + ], + [ + "▁imprisoned", + -13.12889575958252 + ], + [ + "▁exert", + -13.12894344329834 + ], + [ + "union", + -13.128944396972656 + ], + [ + "▁THANK", + -13.129079818725586 + ], + [ + "▁planks", + -13.129091262817385 + ], + [ + "Edge", + -13.129135131835938 + ], + [ + "▁reunited", + -13.129281997680664 + ], + [ + "▁$24", + -13.12936305999756 + ], + [ + "▁1921", + -13.129388809204102 + ], + [ + "▁disagreement", + -13.129434585571287 + ], + [ + "Payment", + -13.12956714630127 + ], + [ + "PB", + -13.129616737365724 + ], + [ + "▁PSU", + -13.129640579223633 + ], + [ + "lix", + -13.12967300415039 + ], + [ + "▁catheter", + -13.129674911499023 + ], + [ + "Audio", + -13.129753112792969 + ], + [ + "lash", + -13.129754066467283 + ], + [ + "▁bomber", + -13.129915237426758 + ], + [ + "▁APA", + -13.12999153137207 + ], + [ + "▁Electricity", + -13.13011646270752 + ], + [ + "NW", + -13.13015842437744 + ], + [ + "▁Kart", + -13.130309104919434 + ], + [ + "▁implementations", + -13.130426406860352 + ], + [ + "elect", + -13.130456924438477 + ], + [ + ",''", + -13.130510330200195 + ], + [ + "▁Chesapeake", + -13.130522727966309 + ], + [ + "▁contempt", + -13.130552291870115 + ], + [ + "▁mermaid", + -13.13055419921875 + ], + [ + "▁kidnapped", + -13.13056182861328 + ], + [ + "▁wrath", + -13.130573272705078 + ], + [ + "▁alkaline", + -13.130576133728027 + ], + [ + "▁Scottsdale", + -13.130579948425291 + ], + [ + "▁edgy", + -13.13058376312256 + ], + [ + "▁RAW", + -13.130819320678713 + ], + [ + "▁Editorial", + -13.130958557128906 + ], + [ + "▁COP", + -13.131006240844728 + ], + [ + "256", + -13.131026268005373 + ], + [ + "ult", + -13.13110065460205 + ], + [ + "▁biometric", + -13.131105422973633 + ], + [ + "▁Daytona", + -13.13111972808838 + ], + [ + "▁Cory", + -13.131153106689451 + ], + [ + "▁retina", + -13.1311616897583 + ], + [ + "▁interchange", + -13.131223678588867 + ], + [ + "▁Lamar", + -13.131227493286133 + ], + [ + "▁Placement", + -13.13122844696045 + ], + [ + "▁Romantic", + -13.131352424621582 + ], + [ + "ndi", + -13.131412506103516 + ], + [ + "Mill", + -13.131423950195312 + ], + [ + "iling", + -13.131467819213867 + ], + [ + "item", + -13.131507873535156 + ], + [ + "▁Narrow", + -13.131638526916504 + ], + [ + "▁pioneered", + -13.13167667388916 + ], + [ + "▁chloride", + -13.131684303283691 + ], + [ + "▁Lose", + -13.131718635559082 + ], + [ + "▁Disclosure", + -13.131768226623535 + ], + [ + "▁Huang", + -13.131786346435549 + ], + [ + "▁knight", + -13.131952285766602 + ], + [ + "▁Playa", + -13.13209056854248 + ], + [ + "grave", + -13.132230758666992 + ], + [ + "yourself", + -13.132243156433104 + ], + [ + "▁Months", + -13.132257461547852 + ], + [ + "▁Swarovski", + -13.132279396057127 + ], + [ + "▁cumbersome", + -13.132279396057127 + ], + [ + "▁disabling", + -13.132279396057127 + ], + [ + "▁tbsp", + -13.13228988647461 + ], + [ + "▁waving", + -13.132290840148926 + ], + [ + "▁Hancock", + -13.13229274749756 + ], + [ + "????", + -13.13231086730957 + ], + [ + "route", + -13.132319450378418 + ], + [ + "▁stirred", + -13.132394790649414 + ], + [ + "Engine", + -13.13239860534668 + ], + [ + "nothing", + -13.132412910461426 + ], + [ + "▁NSF", + -13.132440567016602 + ], + [ + "▁Entrepreneur", + -13.132492065429688 + ], + [ + "▁Uniform", + -13.132505416870115 + ], + [ + "▁Spacious", + -13.13254165649414 + ], + [ + "lau", + -13.132549285888672 + ], + [ + "▁Tray", + -13.132569313049316 + ], + [ + "▁dives", + -13.132576942443848 + ], + [ + "▁playable", + -13.132609367370604 + ], + [ + "▁Satisfaction", + -13.132680892944336 + ], + [ + "FD", + -13.13277530670166 + ], + [ + "▁deficits", + -13.132816314697266 + ], + [ + "vita", + -13.132858276367188 + ], + [ + "update", + -13.133079528808594 + ], + [ + "▁Stu", + -13.133244514465332 + ], + [ + "▁nighttime", + -13.133257865905762 + ], + [ + "iation", + -13.133271217346191 + ], + [ + "umba", + -13.13332462310791 + ], + [ + "▁provoke", + -13.133416175842283 + ], + [ + "▁Moran", + -13.133452415466309 + ], + [ + "?).", + -13.13346004486084 + ], + [ + "▁sweeter", + -13.133485794067385 + ], + [ + "zia", + -13.133527755737305 + ], + [ + "▁scout", + -13.13353157043457 + ], + [ + "▁BMI", + -13.133554458618164 + ], + [ + "▁gentlemen", + -13.13363265991211 + ], + [ + "▁indian", + -13.133708953857422 + ], + [ + "▁TL", + -13.133796691894531 + ], + [ + "▁landowners", + -13.133909225463867 + ], + [ + "iger", + -13.133923530578612 + ], + [ + "▁Direction", + -13.133987426757812 + ], + [ + "▁fluoride", + -13.13400936126709 + ], + [ + "▁luscious", + -13.13400936126709 + ], + [ + "▁Gupta", + -13.13401222229004 + ], + [ + "ceiling", + -13.134041786193848 + ], + [ + "▁Ramadan", + -13.13405990600586 + ], + [ + "▁SUPER", + -13.134061813354492 + ], + [ + "▁UEFA", + -13.134085655212402 + ], + [ + "▁Angelo", + -13.134137153625488 + ], + [ + "▁Elena", + -13.134248733520508 + ], + [ + "▁Thoughts", + -13.13427448272705 + ], + [ + "goal", + -13.134355545043944 + ], + [ + "▁sentimental", + -13.13438892364502 + ], + [ + "▁squirrels", + -13.134394645690918 + ], + [ + "Pin", + -13.13441562652588 + ], + [ + "fight", + -13.13448429107666 + ], + [ + "▁Connected", + -13.134530067443848 + ], + [ + "▁accommodates", + -13.134769439697266 + ], + [ + "guided", + -13.13486099243164 + ], + [ + "▁marquee", + -13.134870529174805 + ], + [ + "▁168", + -13.134968757629396 + ], + [ + "▁Dishwasher", + -13.135051727294922 + ], + [ + "▁Abby", + -13.135108947753906 + ], + [ + "boarding", + -13.135169982910156 + ], + [ + "systems", + -13.135248184204102 + ], + [ + "▁Lakers", + -13.13531494140625 + ], + [ + "▁criticize", + -13.135358810424805 + ], + [ + "▁(2017)", + -13.135366439819336 + ], + [ + "▁2050", + -13.135397911071776 + ], + [ + "▁selfies", + -13.135401725769045 + ], + [ + "▁Lets", + -13.13540744781494 + ], + [ + "▁Statistical", + -13.13547420501709 + ], + [ + "lat", + -13.13569450378418 + ], + [ + "▁bog", + -13.135725021362305 + ], + [ + "▁Diocese", + -13.1357421875 + ], + [ + "▁resembling", + -13.1357421875 + ], + [ + "▁modalities", + -13.135747909545898 + ], + [ + "▁plight", + -13.135797500610352 + ], + [ + "▁registrar", + -13.135823249816896 + ], + [ + "▁Clearance", + -13.135836601257324 + ], + [ + "▁airtight", + -13.135882377624512 + ], + [ + "FG", + -13.135936737060549 + ], + [ + "▁scraping", + -13.135940551757812 + ], + [ + "▁outsourced", + -13.135986328125 + ], + [ + "getting", + -13.136027336120604 + ], + [ + "Joseph", + -13.136054039001465 + ], + [ + "▁incense", + -13.136067390441896 + ], + [ + "▁Bonds", + -13.136173248291016 + ], + [ + "▁Yelp", + -13.136286735534668 + ], + [ + "▁65%", + -13.136502265930176 + ], + [ + "▁bursts", + -13.136621475219728 + ], + [ + "▁electrons", + -13.13669776916504 + ], + [ + "▁wandered", + -13.136701583862305 + ], + [ + "Girl", + -13.136794090270996 + ], + [ + "cci", + -13.136797904968262 + ], + [ + "▁Concepts", + -13.13681983947754 + ], + [ + "135", + -13.137028694152832 + ], + [ + "hear", + -13.137282371520996 + ], + [ + "TED", + -13.137301445007324 + ], + [ + "▁Supporting", + -13.137325286865234 + ], + [ + "ality", + -13.137409210205078 + ], + [ + "▁reinforcing", + -13.137478828430176 + ], + [ + "▁rejuvenation", + -13.137478828430176 + ], + [ + "▁lumbar", + -13.137479782104492 + ], + [ + "▁positivity", + -13.137483596801758 + ], + [ + "▁caste", + -13.13748550415039 + ], + [ + "▁Fro", + -13.137495994567873 + ], + [ + "▁richly", + -13.13758373260498 + ], + [ + "▁Tyson", + -13.137592315673828 + ], + [ + "▁ruby", + -13.13762378692627 + ], + [ + "▁exposures", + -13.137764930725098 + ], + [ + "▁124", + -13.137821197509766 + ], + [ + "ime", + -13.137887001037598 + ], + [ + "▁tangy", + -13.138016700744627 + ], + [ + "▁Unlock", + -13.13822078704834 + ], + [ + "▁Publication", + -13.138237953186035 + ], + [ + "bourne", + -13.13826847076416 + ], + [ + "▁customizing", + -13.13828182220459 + ], + [ + "▁ii", + -13.138401985168455 + ], + [ + "lak", + -13.138422966003418 + ], + [ + "▁faithfully", + -13.138483047485352 + ], + [ + "▁Trademark", + -13.138592720031738 + ], + [ + "cca", + -13.13860321044922 + ], + [ + "obo", + -13.13860321044922 + ], + [ + "▁attends", + -13.138626098632812 + ], + [ + "▁Docker", + -13.138713836669922 + ], + [ + "▁Mondays", + -13.138750076293944 + ], + [ + "▁ransomware", + -13.138808250427246 + ], + [ + "▁sacks", + -13.138808250427246 + ], + [ + "▁Prescription", + -13.138840675354004 + ], + [ + "uru", + -13.138864517211914 + ], + [ + "winner", + -13.138882637023926 + ], + [ + "▁Folding", + -13.138912200927734 + ], + [ + "▁interviewer", + -13.138920783996582 + ], + [ + "▁Sym", + -13.139022827148438 + ], + [ + "▁philosopher", + -13.139103889465332 + ], + [ + "▁distilled", + -13.13917636871338 + ], + [ + "▁Cunningham", + -13.139217376708984 + ], + [ + "▁ordained", + -13.1392183303833 + ], + [ + "▁soulful", + -13.139235496520996 + ], + [ + "▁geology", + -13.13926124572754 + ], + [ + "▁prohibits", + -13.139280319213867 + ], + [ + "▁motherhood", + -13.139434814453123 + ], + [ + "▁£8", + -13.139533042907717 + ], + [ + "spire", + -13.139551162719728 + ], + [ + "Gal", + -13.13963794708252 + ], + [ + "rather", + -13.139762878417969 + ], + [ + "OFF", + -13.13984489440918 + ], + [ + "▁complication", + -13.139857292175291 + ], + [ + "gt", + -13.13985824584961 + ], + [ + "▁Eg", + -13.139883041381836 + ], + [ + "▁BUY", + -13.14003849029541 + ], + [ + "▁bassist", + -13.140161514282228 + ], + [ + "▁Wick", + -13.140262603759766 + ], + [ + "SET", + -13.140298843383787 + ], + [ + "htm", + -13.140379905700684 + ], + [ + "Reduce", + -13.140451431274414 + ], + [ + "▁mingle", + -13.140464782714844 + ], + [ + "▁adorn", + -13.14047622680664 + ], + [ + "▁choke", + -13.140485763549805 + ], + [ + "▁strained", + -13.140510559082031 + ], + [ + "▁replicated", + -13.14072036743164 + ], + [ + "▁hashtags", + -13.14073085784912 + ], + [ + "▁youngster", + -13.140807151794434 + ], + [ + "▁pee", + -13.140825271606444 + ], + [ + "▁spheres", + -13.141020774841309 + ], + [ + "▁burglary", + -13.141124725341797 + ], + [ + "izz", + -13.141175270080566 + ], + [ + "▁disgusting", + -13.141197204589844 + ], + [ + "▁27-", + -13.141314506530762 + ], + [ + "▁Rough", + -13.141339302062988 + ], + [ + "▁Heck", + -13.141420364379885 + ], + [ + "Unit", + -13.141444206237791 + ], + [ + "odi", + -13.141462326049805 + ], + [ + "▁Luc", + -13.141480445861816 + ], + [ + "TING", + -13.141530990600586 + ], + [ + "▁Quarterly", + -13.1415433883667 + ], + [ + "▁flawlessly", + -13.141544342041016 + ], + [ + "▁...\"", + -13.141545295715332 + ], + [ + "1200", + -13.14166259765625 + ], + [ + "▁practicality", + -13.141834259033203 + ], + [ + "dead", + -13.14186954498291 + ], + [ + "▁fooled", + -13.141874313354492 + ], + [ + "▁dans", + -13.141897201538086 + ], + [ + "▁postcode", + -13.14200210571289 + ], + [ + "”—", + -13.142047882080078 + ], + [ + "titled", + -13.142072677612305 + ], + [ + "▁Venezuelan", + -13.142114639282228 + ], + [ + "▁barred", + -13.142114639282228 + ], + [ + "▁fused", + -13.14224624633789 + ], + [ + "▁DOC", + -13.14228630065918 + ], + [ + "Magic", + -13.142362594604492 + ], + [ + "▁4.6", + -13.142379760742188 + ], + [ + "ific", + -13.142416000366213 + ], + [ + "rooms", + -13.142446517944336 + ], + [ + "▁topper", + -13.142533302307127 + ], + [ + "▁za", + -13.142614364624023 + ], + [ + "▁catalogs", + -13.14266586303711 + ], + [ + "32.", + -13.14266872406006 + ], + [ + "▁rollout", + -13.142669677734377 + ], + [ + "▁Texans", + -13.142714500427246 + ], + [ + "▁Sabbath", + -13.142719268798828 + ], + [ + "▁unbeaten", + -13.142723083496094 + ], + [ + "▁Gir", + -13.142729759216309 + ], + [ + "▁brink", + -13.142730712890623 + ], + [ + "▁wordpress", + -13.142763137817385 + ], + [ + "▁Litigation", + -13.1427640914917 + ], + [ + "▁Evergreen", + -13.142766952514648 + ], + [ + "Abstract", + -13.142770767211914 + ], + [ + "ect", + -13.142786979675291 + ], + [ + "Florida", + -13.142895698547363 + ], + [ + "▁stressing", + -13.142914772033691 + ], + [ + "Prime", + -13.143024444580078 + ], + [ + "filter", + -13.143242835998535 + ], + [ + "▁Franz", + -13.143296241760254 + ], + [ + "▁moderated", + -13.143412590026855 + ], + [ + "▁rye", + -13.143445014953612 + ], + [ + "harm", + -13.143461227416992 + ], + [ + "▁£9", + -13.14349365234375 + ], + [ + "▁Noel", + -13.14357566833496 + ], + [ + "▁urges", + -13.14362621307373 + ], + [ + "▁Cascade", + -13.14365291595459 + ], + [ + "▁speculate", + -13.143857955932615 + ], + [ + "▁globalization", + -13.14399242401123 + ], + [ + "oids", + -13.144160270690918 + ], + [ + "pun", + -13.144232749938965 + ], + [ + "▁recurrent", + -13.144258499145508 + ], + [ + "Solar", + -13.144339561462402 + ], + [ + "▁pellets", + -13.144343376159668 + ], + [ + "▁24\"", + -13.144362449645996 + ], + [ + "▁improvisation", + -13.144362449645996 + ], + [ + "▁tame", + -13.144417762756348 + ], + [ + "▁sterile", + -13.144455909729004 + ], + [ + "▁extraordinarily", + -13.14445972442627 + ], + [ + "▁brittle", + -13.144460678100586 + ], + [ + "▁Jacques", + -13.144476890563965 + ], + [ + "▁penned", + -13.144503593444824 + ], + [ + "▁birch", + -13.14450454711914 + ], + [ + "opt", + -13.144550323486328 + ], + [ + "▁Avi", + -13.144583702087402 + ], + [ + "▁Seek", + -13.14458465576172 + ], + [ + "▁Clothes", + -13.144622802734377 + ], + [ + "440", + -13.144675254821776 + ], + [ + "▁lash", + -13.144675254821776 + ], + [ + "awaited", + -13.144676208496094 + ], + [ + "▁Boris", + -13.1447114944458 + ], + [ + "▁Fool", + -13.144781112670898 + ], + [ + "▁Afterwards", + -13.144794464111328 + ], + [ + "▁chewy", + -13.145015716552734 + ], + [ + "▁Conrad", + -13.14511489868164 + ], + [ + "▁congregations", + -13.145139694213867 + ], + [ + "▁truffle", + -13.145148277282717 + ], + [ + "▁forklift", + -13.145182609558104 + ], + [ + "tern", + -13.145183563232422 + ], + [ + "3.3", + -13.14524269104004 + ], + [ + "CAM", + -13.145292282104492 + ], + [ + "▁7.0", + -13.145305633544922 + ], + [ + "▁Petro", + -13.145350456237791 + ], + [ + "▁Rae", + -13.145612716674805 + ], + [ + "Band", + -13.145636558532717 + ], + [ + "▁tre", + -13.145638465881348 + ], + [ + "▁Cur", + -13.145684242248535 + ], + [ + "108", + -13.145713806152344 + ], + [ + "▁extinct", + -13.145766258239746 + ], + [ + "▁workaround", + -13.14585018157959 + ], + [ + "Account", + -13.146005630493164 + ], + [ + "▁Senators", + -13.146050453186035 + ], + [ + "▁Languages", + -13.146061897277832 + ], + [ + "▁pneumatic", + -13.146204948425291 + ], + [ + "▁beauties", + -13.14620876312256 + ], + [ + "shelf", + -13.146220207214355 + ], + [ + "▁genocide", + -13.146349906921388 + ], + [ + "▁Barclays", + -13.146373748779297 + ], + [ + "▁Brun", + -13.146403312683104 + ], + [ + "▁proportional", + -13.146408081054688 + ], + [ + "▁Ashton", + -13.146434783935549 + ], + [ + "udi", + -13.146842956542969 + ], + [ + "▁entitlement", + -13.14692497253418 + ], + [ + "-44", + -13.146953582763672 + ], + [ + "▁Experiment", + -13.14713191986084 + ], + [ + "▁raids", + -13.147252082824709 + ], + [ + "▁Archer", + -13.147266387939451 + ], + [ + "▁conceive", + -13.14736557006836 + ], + [ + "▁Badge", + -13.147400856018066 + ], + [ + "mite", + -13.147500991821287 + ], + [ + "▁thoughtfully", + -13.14758586883545 + ], + [ + "▁Cruises", + -13.147645950317385 + ], + [ + "WOW", + -13.1476469039917 + ], + [ + "bass", + -13.147808074951172 + ], + [ + "4%", + -13.147828102111816 + ], + [ + "▁simplifies", + -13.147958755493164 + ], + [ + "▁Hanoi", + -13.147981643676758 + ], + [ + "▁Mosque", + -13.147992134094238 + ], + [ + "▁misplaced", + -13.148052215576172 + ], + [ + "▁clad", + -13.148090362548828 + ], + [ + "formed", + -13.148119926452637 + ], + [ + "▁CrossFit", + -13.148202896118164 + ], + [ + "▁Experimental", + -13.14820671081543 + ], + [ + "▁Elijah", + -13.148324966430664 + ], + [ + "ake", + -13.148341178894045 + ], + [ + "Garden", + -13.14836883544922 + ], + [ + "▁laundering", + -13.148390769958496 + ], + [ + "▁improperly", + -13.148397445678713 + ], + [ + "identifying", + -13.148431777954102 + ], + [ + "▁HIPAA", + -13.148545265197754 + ], + [ + "▁crafty", + -13.14861297607422 + ], + [ + "▁Bees", + -13.148677825927734 + ], + [ + "▁Hostel", + -13.148704528808594 + ], + [ + "▁attaches", + -13.148736953735352 + ], + [ + "▁23-", + -13.148837089538574 + ], + [ + "▁surfacing", + -13.148859024047852 + ], + [ + "▁Trout", + -13.148896217346191 + ], + [ + "different", + -13.149352073669434 + ], + [ + "▁mandates", + -13.14936637878418 + ], + [ + "▁MH", + -13.14947509765625 + ], + [ + "Identify", + -13.149559020996094 + ], + [ + "▁Hanna", + -13.149598121643066 + ], + [ + "▁Corona", + -13.149691581726074 + ], + [ + "▁summaries", + -13.1497163772583 + ], + [ + "▁Xperia", + -13.149717330932615 + ], + [ + "▁Venetian", + -13.149718284606934 + ], + [ + "▁Hobby", + -13.149755477905272 + ], + [ + "Stage", + -13.1498441696167 + ], + [ + "▁Slate", + -13.149883270263672 + ], + [ + "▁Chronicles", + -13.149965286254885 + ], + [ + "▁Economist", + -13.150015830993652 + ], + [ + "provoking", + -13.150064468383787 + ], + [ + "▁2018-19", + -13.150116920471191 + ], + [ + "▁wrapper", + -13.15012550354004 + ], + [ + "Inspired", + -13.150165557861328 + ], + [ + "▁unveiling", + -13.150184631347656 + ], + [ + "▁RR", + -13.150296211242676 + ], + [ + "▁Lilly", + -13.150298118591309 + ], + [ + "▁rafting", + -13.15045928955078 + ], + [ + "▁Detection", + -13.150492668151855 + ], + [ + "▁motel", + -13.15054988861084 + ], + [ + "mina", + -13.15060043334961 + ], + [ + "▁Handling", + -13.150629997253418 + ], + [ + "005", + -13.150799751281738 + ], + [ + "▁nerd", + -13.150809288024902 + ], + [ + "▁blah", + -13.150834083557127 + ], + [ + "▁eradicate", + -13.15087604522705 + ], + [ + "▁Vodafone", + -13.150914192199709 + ], + [ + "▁Liberia", + -13.15096378326416 + ], + [ + "Santa", + -13.15096950531006 + ], + [ + "▁Commissioners", + -13.150997161865234 + ], + [ + "2.4", + -13.151008605957031 + ], + [ + "▁spiced", + -13.151089668273926 + ], + [ + "▁Respect", + -13.151095390319824 + ], + [ + "▁Suzanne", + -13.15135097503662 + ], + [ + "▁Vis", + -13.15138053894043 + ], + [ + "▁menopause", + -13.151476860046388 + ], + [ + "▁turbulent", + -13.151476860046388 + ], + [ + "▁Sculpture", + -13.151477813720703 + ], + [ + "▁Jensen", + -13.15149211883545 + ], + [ + "▁Vor", + -13.151625633239746 + ], + [ + "▁salute", + -13.15168571472168 + ], + [ + "MBA", + -13.15178394317627 + ], + [ + "Bet", + -13.151803970336914 + ], + [ + "▁conceded", + -13.151914596557615 + ], + [ + "▁$21", + -13.1519193649292 + ], + [ + "▁ridden", + -13.151999473571776 + ], + [ + "history", + -13.152164459228516 + ], + [ + "▁Gomez", + -13.15217113494873 + ], + [ + "▁Yorker", + -13.15236473083496 + ], + [ + "▁solicit", + -13.152679443359377 + ], + [ + "▁fortunately", + -13.152740478515623 + ], + [ + "▁Pharmaceuticals", + -13.152776718139648 + ], + [ + "▁lacquer", + -13.152843475341797 + ], + [ + "▁seedlings", + -13.15302562713623 + ], + [ + "▁Riviera", + -13.153240203857422 + ], + [ + "▁intangible", + -13.153240203857422 + ], + [ + "Secure", + -13.153346061706545 + ], + [ + "▁aligning", + -13.153386116027832 + ], + [ + "general", + -13.153457641601562 + ], + [ + "▁AX", + -13.153499603271484 + ], + [ + "▁Router", + -13.153531074523926 + ], + [ + "UPDATE", + -13.153557777404783 + ], + [ + "pped", + -13.153719902038574 + ], + [ + "▁greener", + -13.153722763061523 + ], + [ + "▁halo", + -13.153731346130373 + ], + [ + "▁touted", + -13.153733253479004 + ], + [ + "LAN", + -13.153759956359863 + ], + [ + "▁Bard", + -13.153814315795898 + ], + [ + "earned", + -13.153825759887695 + ], + [ + "children", + -13.153857231140137 + ], + [ + "▁walkway", + -13.15402603149414 + ], + [ + "▁Vampire", + -13.154133796691896 + ], + [ + "▁corps", + -13.15414810180664 + ], + [ + "▁Twelve", + -13.15424346923828 + ], + [ + "▁swag", + -13.15430736541748 + ], + [ + "nit", + -13.154315948486328 + ], + [ + "▁picky", + -13.154394149780272 + ], + [ + "▁fave", + -13.154440879821776 + ], + [ + "ogen", + -13.154474258422852 + ], + [ + "▁Artisan", + -13.154483795166016 + ], + [ + "bt", + -13.154559135437012 + ], + [ + "▁Gail", + -13.154581069946287 + ], + [ + "▁Jessie", + -13.154617309570312 + ], + [ + "▁Malawi", + -13.154706001281738 + ], + [ + "▁Ein", + -13.1547212600708 + ], + [ + "▁Interiors", + -13.154726028442385 + ], + [ + "▁Apparel", + -13.154757499694824 + ], + [ + "▁Brave", + -13.154791831970217 + ], + [ + "▁Fleming", + -13.154953956604004 + ], + [ + "▁Mozart", + -13.155021667480469 + ], + [ + "nova", + -13.155035018920898 + ], + [ + "▁boredom", + -13.155041694641112 + ], + [ + "▁(60", + -13.155056953430176 + ], + [ + "▁preparedness", + -13.15510368347168 + ], + [ + "▁Becker", + -13.155393600463867 + ], + [ + "▁Techno", + -13.155403137207031 + ], + [ + "▁armies", + -13.155444145202637 + ], + [ + "▁Qualified", + -13.15557861328125 + ], + [ + "always", + -13.155582427978516 + ], + [ + "▁academically", + -13.155648231506348 + ], + [ + "▁Sheep", + -13.155653953552246 + ], + [ + "▁supple", + -13.155806541442873 + ], + [ + "▁Nazis", + -13.155827522277832 + ], + [ + "▁Flame", + -13.15587043762207 + ], + [ + "▁prerequisite", + -13.15588092803955 + ], + [ + "Copy", + -13.155888557434082 + ], + [ + "abad", + -13.155922889709473 + ], + [ + "▁Countertops", + -13.155933380126951 + ], + [ + "▁airing", + -13.156217575073242 + ], + [ + "▁MINI", + -13.15622901916504 + ], + [ + "▁Caravan", + -13.156235694885254 + ], + [ + "▁JE", + -13.15642547607422 + ], + [ + "▁Sociology", + -13.15644073486328 + ], + [ + "inches", + -13.156534194946287 + ], + [ + "▁Nag", + -13.15661334991455 + ], + [ + "▁1860", + -13.156756401062012 + ], + [ + "▁Essays", + -13.156758308410645 + ], + [ + "▁classify", + -13.15676498413086 + ], + [ + "▁Auditorium", + -13.156786918640137 + ], + [ + "▁metropolis", + -13.156786918640137 + ], + [ + "▁Influence", + -13.156793594360352 + ], + [ + "▁Uttar", + -13.15682888031006 + ], + [ + "ORE", + -13.156829833984377 + ], + [ + "▁supplementation", + -13.157033920288086 + ], + [ + "▁mountainous", + -13.157038688659668 + ], + [ + "109", + -13.157050132751465 + ], + [ + "1.6", + -13.157100677490234 + ], + [ + "▁revamped", + -13.157155990600586 + ], + [ + "▁dec", + -13.157170295715332 + ], + [ + "▁Friendship", + -13.157177925109863 + ], + [ + "sector", + -13.157224655151367 + ], + [ + "TAL", + -13.157233238220217 + ], + [ + "excluding", + -13.157316207885742 + ], + [ + "density", + -13.157325744628906 + ], + [ + "gene", + -13.157340049743652 + ], + [ + "▁screams", + -13.157344818115234 + ], + [ + "▁bouquets", + -13.157462120056152 + ], + [ + "mentioned", + -13.157464981079102 + ], + [ + "▁mortal", + -13.157519340515137 + ], + [ + "▁Basel", + -13.157654762268066 + ], + [ + "▁ti", + -13.157933235168455 + ], + [ + "▁Burma", + -13.157950401306152 + ], + [ + "▁marched", + -13.15796184539795 + ], + [ + "account", + -13.15805435180664 + ], + [ + "ckle", + -13.158122062683104 + ], + [ + "GER", + -13.158238410949709 + ], + [ + "▁lug", + -13.158291816711426 + ], + [ + "▁virtues", + -13.15829849243164 + ], + [ + "▁carport", + -13.15830421447754 + ], + [ + "▁Tomato", + -13.15837574005127 + ], + [ + "Sen", + -13.158439636230469 + ], + [ + "nga", + -13.158480644226074 + ], + [ + "▁grime", + -13.15849494934082 + ], + [ + "▁climax", + -13.158552169799805 + ], + [ + "experience", + -13.158615112304688 + ], + [ + "▁succulents", + -13.15864372253418 + ], + [ + "▁riches", + -13.158796310424805 + ], + [ + "rable", + -13.15879726409912 + ], + [ + "▁regimes", + -13.158856391906738 + ], + [ + "▁quart", + -13.158864974975586 + ], + [ + "-36", + -13.158930778503418 + ], + [ + "▁Stockton", + -13.15895175933838 + ], + [ + "▁hardy", + -13.158980369567873 + ], + [ + "claim", + -13.159058570861816 + ], + [ + "▁widen", + -13.159181594848633 + ], + [ + "authored", + -13.159235954284668 + ], + [ + "▁perched", + -13.159250259399414 + ], + [ + "enburg", + -13.159256935119627 + ], + [ + "eren", + -13.159332275390623 + ], + [ + "ologists", + -13.159343719482422 + ], + [ + "▁AH", + -13.159405708312988 + ], + [ + "▁Leap", + -13.159441947937012 + ], + [ + "▁Heated", + -13.159465789794922 + ], + [ + "▁Amelia", + -13.159642219543455 + ], + [ + "▁AMP", + -13.159658432006836 + ], + [ + "▁Kho", + -13.159683227539062 + ], + [ + "▁markup", + -13.159794807434082 + ], + [ + "Fri", + -13.159829139709473 + ], + [ + "▁motorway", + -13.1599760055542 + ], + [ + "▁fascia", + -13.16015911102295 + ], + [ + "▁Wichita", + -13.16025161743164 + ], + [ + "▁Cinderella", + -13.160290718078612 + ], + [ + "▁restless", + -13.160307884216309 + ], + [ + "▁Dundee", + -13.16033172607422 + ], + [ + "▁Stafford", + -13.16036891937256 + ], + [ + "joy", + -13.160375595092772 + ], + [ + "▁Yellowstone", + -13.160415649414062 + ], + [ + "▁Bha", + -13.160433769226074 + ], + [ + "▁microbes", + -13.160444259643556 + ], + [ + "▁thinning", + -13.160463333129885 + ], + [ + "▁hoops", + -13.160642623901367 + ], + [ + "▁Technique", + -13.160694122314451 + ], + [ + "▁mourning", + -13.16071319580078 + ], + [ + "▁spelled", + -13.160736083984377 + ], + [ + "▁ACL", + -13.160846710205078 + ], + [ + "▁scouting", + -13.160850524902344 + ], + [ + "tico", + -13.16085720062256 + ], + [ + "▁entail", + -13.160918235778809 + ], + [ + "Already", + -13.160971641540527 + ], + [ + "▁bur", + -13.16098976135254 + ], + [ + "▁sprinkler", + -13.16111660003662 + ], + [ + "▁Admiral", + -13.16115951538086 + ], + [ + "▁Literacy", + -13.161182403564451 + ], + [ + "▁PU", + -13.161327362060549 + ], + [ + "hala", + -13.161330223083496 + ], + [ + "▁oysters", + -13.161426544189451 + ], + [ + "▁fra", + -13.161484718322754 + ], + [ + "▁Tao", + -13.161510467529297 + ], + [ + "▁alerted", + -13.16156768798828 + ], + [ + "▁Ama", + -13.16158390045166 + ], + [ + "▁trainee", + -13.161606788635254 + ], + [ + "▁adverts", + -13.161653518676758 + ], + [ + "yd", + -13.16166877746582 + ], + [ + "moon", + -13.1616792678833 + ], + [ + "nde", + -13.16169548034668 + ], + [ + "▁Boyle", + -13.161696434020996 + ], + [ + "bby", + -13.161705017089844 + ], + [ + "Jon", + -13.161794662475586 + ], + [ + "wait", + -13.1618013381958 + ], + [ + "▁301", + -13.161822319030762 + ], + [ + "▁poorer", + -13.161871910095217 + ], + [ + "finals", + -13.161947250366213 + ], + [ + "Founder", + -13.162034034729004 + ], + [ + "▁Steak", + -13.162104606628418 + ], + [ + "▁Jacuzzi", + -13.162105560302734 + ], + [ + "▁hygienic", + -13.162105560302734 + ], + [ + "▁slammed", + -13.162115097045898 + ], + [ + "▁reassurance", + -13.162139892578123 + ], + [ + "▁haunt", + -13.16215705871582 + ], + [ + "225", + -13.162226676940918 + ], + [ + "▁supplementary", + -13.162229537963867 + ], + [ + "▁Clyde", + -13.162296295166016 + ], + [ + "uo", + -13.162308692932127 + ], + [ + "▁1908", + -13.162396430969238 + ], + [ + "▁Citrus", + -13.162470817565918 + ], + [ + "▁Kraft", + -13.162479400634766 + ], + [ + "▁Waterproof", + -13.162500381469728 + ], + [ + "▁competitively", + -13.16262149810791 + ], + [ + "0:", + -13.16264533996582 + ], + [ + "▁HUD", + -13.16264820098877 + ], + [ + "ARC", + -13.162668228149414 + ], + [ + "▁Cases", + -13.162691116333008 + ], + [ + "uke", + -13.162696838378906 + ], + [ + "ced", + -13.162734985351562 + ], + [ + "▁coined", + -13.162781715393066 + ], + [ + "▁Developed", + -13.162818908691406 + ], + [ + "Saint", + -13.162859916687012 + ], + [ + "▁Asphalt", + -13.16286849975586 + ], + [ + "-2000", + -13.162886619567873 + ], + [ + "rier", + -13.16291046142578 + ], + [ + "▁elicit", + -13.162967681884766 + ], + [ + "▁Bail", + -13.162994384765623 + ], + [ + "▁Lowell", + -13.163097381591797 + ], + [ + "▁Deadline", + -13.163171768188477 + ], + [ + "▁$29", + -13.163179397583008 + ], + [ + "▁persuaded", + -13.163225173950195 + ], + [ + "▁Chattanooga", + -13.163325309753418 + ], + [ + "lav", + -13.163461685180664 + ], + [ + "410", + -13.163525581359863 + ], + [ + "▁hangers", + -13.163555145263672 + ], + [ + "4.1", + -13.163763046264648 + ], + [ + "▁Gazette", + -13.163783073425291 + ], + [ + "▁Completely", + -13.163799285888672 + ], + [ + "▁warranted", + -13.163814544677734 + ], + [ + "▁scarcity", + -13.16390323638916 + ], + [ + "▁totaled", + -13.16390323638916 + ], + [ + "▁deportation", + -13.163920402526855 + ], + [ + "▁vivo", + -13.163985252380373 + ], + [ + "laden", + -13.1641206741333 + ], + [ + "▁goodwill", + -13.164141654968262 + ], + [ + "▁tease", + -13.164142608642578 + ], + [ + "DER", + -13.164145469665527 + ], + [ + "▁Collier", + -13.164179801940918 + ], + [ + "▁iPhones", + -13.164319038391112 + ], + [ + "▁meadow", + -13.16433048248291 + ], + [ + "▁casually", + -13.164393424987791 + ], + [ + "▁Husband", + -13.16444206237793 + ], + [ + "▁Lack", + -13.164493560791016 + ], + [ + "▁clog", + -13.16465663909912 + ], + [ + "▁Snyder", + -13.164746284484863 + ], + [ + "▁worsen", + -13.164772033691406 + ], + [ + "treated", + -13.164828300476074 + ], + [ + "▁133", + -13.16484832763672 + ], + [ + "Glass", + -13.164850234985352 + ], + [ + "▁5.2", + -13.164913177490234 + ], + [ + "Mal", + -13.165205001831056 + ], + [ + "Arts", + -13.1652250289917 + ], + [ + "▁gifting", + -13.165292739868164 + ], + [ + "Bel", + -13.165340423583984 + ], + [ + "urge", + -13.16542148590088 + ], + [ + "▁1913", + -13.16543197631836 + ], + [ + "ACC", + -13.165441513061523 + ], + [ + "Del", + -13.16547393798828 + ], + [ + "▁sampled", + -13.165562629699709 + ], + [ + "▁3-2", + -13.165581703186035 + ], + [ + "▁Orion", + -13.165618896484377 + ], + [ + "▁Inquiry", + -13.165674209594728 + ], + [ + "▁monsoon", + -13.16567611694336 + ], + [ + "▁hiatus", + -13.165678977966309 + ], + [ + "Comment", + -13.16568660736084 + ], + [ + "▁hamper", + -13.165804862976074 + ], + [ + "▁MF", + -13.165874481201172 + ], + [ + "▁melon", + -13.16594409942627 + ], + [ + "▁bookshelf", + -13.165953636169434 + ], + [ + "▁GTA", + -13.166072845458984 + ], + [ + "▁centred", + -13.166136741638184 + ], + [ + "▁ironing", + -13.166139602661133 + ], + [ + "▁asserted", + -13.166166305541992 + ], + [ + "▁10.5", + -13.16617488861084 + ], + [ + "tten", + -13.166205406188965 + ], + [ + "rule", + -13.16627025604248 + ], + [ + "▁tinted", + -13.16628360748291 + ], + [ + "▁Silence", + -13.16636562347412 + ], + [ + "▁Swap", + -13.166387557983398 + ], + [ + "▁Randolph", + -13.166400909423828 + ], + [ + "sdale", + -13.166414260864258 + ], + [ + "▁Pumps", + -13.166433334350586 + ], + [ + "▁Schu", + -13.166440963745115 + ], + [ + "▁refrigerators", + -13.166561126708984 + ], + [ + "▁proprietor", + -13.166571617126465 + ], + [ + "nto", + -13.166681289672852 + ], + [ + "Express", + -13.166683197021484 + ], + [ + "235", + -13.16676425933838 + ], + [ + "▁resonates", + -13.166821479797363 + ], + [ + "sites", + -13.166866302490234 + ], + [ + "▁troop", + -13.166914939880373 + ], + [ + "▁ji", + -13.166958808898926 + ], + [ + "▁shaken", + -13.166967391967772 + ], + [ + "▁cheats", + -13.16700839996338 + ], + [ + "brick", + -13.167021751403809 + ], + [ + "▁avatar", + -13.16702365875244 + ], + [ + "▁softly", + -13.167068481445312 + ], + [ + "▁Meredith", + -13.1671142578125 + ], + [ + "uv", + -13.167166709899902 + ], + [ + "1/2", + -13.167291641235352 + ], + [ + "▁Parisian", + -13.16734790802002 + ], + [ + "wang", + -13.167415618896484 + ], + [ + "▁cortisol", + -13.167463302612305 + ], + [ + "▁rallied", + -13.167463302612305 + ], + [ + "▁remembrance", + -13.167463302612305 + ], + [ + "▁Tek", + -13.167486190795898 + ], + [ + "▁Oyster", + -13.167522430419922 + ], + [ + "▁Cecil", + -13.167534828186035 + ], + [ + "▁banning", + -13.167542457580566 + ], + [ + "▁eateries", + -13.1675443649292 + ], + [ + "▁Workforce", + -13.167548179626465 + ], + [ + "▁Soldier", + -13.167584419250488 + ], + [ + "otti", + -13.167632102966309 + ], + [ + "▁mediator", + -13.167729377746582 + ], + [ + "▁fortunes", + -13.167747497558594 + ], + [ + "▁EQ", + -13.167779922485352 + ], + [ + "img", + -13.167823791503906 + ], + [ + "▁knockout", + -13.167885780334473 + ], + [ + "▁investigates", + -13.167914390563965 + ], + [ + "▁docs", + -13.168134689331056 + ], + [ + "▁CSV", + -13.16817569732666 + ], + [ + "▁Fre", + -13.168246269226074 + ], + [ + "▁fearless", + -13.168316841125488 + ], + [ + "▁AE", + -13.168390274047852 + ], + [ + "▁diffuser", + -13.168402671813965 + ], + [ + "▁shouted", + -13.16843032836914 + ], + [ + "▁debtor", + -13.168499946594238 + ], + [ + "▁Mash", + -13.168527603149414 + ], + [ + "114", + -13.168627738952637 + ], + [ + "▁facelift", + -13.168635368347168 + ], + [ + "▁ankles", + -13.16872501373291 + ], + [ + "▁Accord", + -13.168758392333984 + ], + [ + "ord", + -13.168824195861816 + ], + [ + "▁tolerated", + -13.168845176696776 + ], + [ + "▁MJ", + -13.16889190673828 + ], + [ + "terra", + -13.168972969055176 + ], + [ + "Picture", + -13.168973922729492 + ], + [ + "▁fundraisers", + -13.169028282165527 + ], + [ + "▁Philharmonic", + -13.169255256652832 + ], + [ + "▁appreciative", + -13.169255256652832 + ], + [ + "▁artillery", + -13.169255256652832 + ], + [ + "▁glaucoma", + -13.169256210327148 + ], + [ + "▁Phuket", + -13.169257164001465 + ], + [ + "▁Barbados", + -13.169259071350098 + ], + [ + "▁blaming", + -13.169260025024414 + ], + [ + "▁neuroscience", + -13.169266700744627 + ], + [ + "▁avant", + -13.169414520263672 + ], + [ + "▁Psalm", + -13.169434547424316 + ], + [ + "▁Volt", + -13.169442176818848 + ], + [ + "▁detour", + -13.169512748718262 + ], + [ + "score", + -13.169585227966309 + ], + [ + "▁toned", + -13.169732093811035 + ], + [ + "▁Pyramid", + -13.1697416305542 + ], + [ + "▁Darkness", + -13.169756889343262 + ], + [ + "▁Andhra", + -13.169797897338867 + ], + [ + "▁Rou", + -13.16983699798584 + ], + [ + "▁tenor", + -13.170010566711426 + ], + [ + "▁unleash", + -13.17001724243164 + ], + [ + "▁injunction", + -13.170065879821776 + ], + [ + "technology", + -13.170074462890623 + ], + [ + "▁Assignment", + -13.170201301574709 + ], + [ + "▁(£", + -13.170208930969238 + ], + [ + "▁Samples", + -13.170218467712402 + ], + [ + "▁bungalow", + -13.17025661468506 + ], + [ + "▁saver", + -13.170342445373535 + ], + [ + "Visual", + -13.170351028442385 + ], + [ + "▁commissioners", + -13.170352935791016 + ], + [ + "▁Fuse", + -13.170421600341797 + ], + [ + "▁skiers", + -13.170494079589844 + ], + [ + "▁Kiev", + -13.170499801635742 + ], + [ + "▁docks", + -13.170729637145996 + ], + [ + "▁Institutes", + -13.170769691467283 + ], + [ + "▁carton", + -13.170785903930664 + ], + [ + "Australian", + -13.170790672302246 + ], + [ + "▁excessively", + -13.17082691192627 + ], + [ + "icum", + -13.171010971069336 + ], + [ + "▁Rosemary", + -13.171021461486816 + ], + [ + "sz", + -13.171069145202637 + ], + [ + "▁ions", + -13.171102523803713 + ], + [ + "DX", + -13.17113971710205 + ], + [ + "▁pigments", + -13.171204566955566 + ], + [ + "lost", + -13.171286582946776 + ], + [ + "record", + -13.171514511108398 + ], + [ + "xml", + -13.171517372131348 + ], + [ + "▁guardians", + -13.171629905700684 + ], + [ + "▁accented", + -13.171630859375 + ], + [ + "▁ale", + -13.17169952392578 + ], + [ + "▁whisper", + -13.171831130981444 + ], + [ + "▁fruition", + -13.171916007995604 + ], + [ + "▁Rig", + -13.171916961669922 + ], + [ + "▁Persons", + -13.171939849853516 + ], + [ + "▁Typical", + -13.171955108642578 + ], + [ + "pil", + -13.171982765197754 + ], + [ + "IoT", + -13.17202854156494 + ], + [ + "▁storey", + -13.172082901000977 + ], + [ + "▁pesticide", + -13.172088623046877 + ], + [ + "▁Divide", + -13.172106742858888 + ], + [ + "▁206", + -13.172111511230469 + ], + [ + "esse", + -13.172112464904783 + ], + [ + "silver", + -13.172118186950684 + ], + [ + "LN", + -13.172283172607422 + ], + [ + "Stand", + -13.172319412231444 + ], + [ + "▁rowing", + -13.172459602355955 + ], + [ + "▁LU", + -13.172481536865234 + ], + [ + "ppi", + -13.172621726989746 + ], + [ + "▁kms", + -13.17274284362793 + ], + [ + "▁$23", + -13.17282009124756 + ], + [ + "▁Chardonnay", + -13.17284870147705 + ], + [ + "▁expiry", + -13.17284870147705 + ], + [ + "▁reclining", + -13.172852516174316 + ], + [ + "▁astrology", + -13.172855377197266 + ], + [ + "▁cladding", + -13.172858238220217 + ], + [ + "▁Expedition", + -13.172860145568848 + ], + [ + "▁trough", + -13.17288875579834 + ], + [ + "▁poisonous", + -13.172924041748049 + ], + [ + "dol", + -13.172995567321776 + ], + [ + "▁cigars", + -13.173016548156738 + ], + [ + "▁Mastercard", + -13.173032760620115 + ], + [ + "▁Anita", + -13.173099517822266 + ], + [ + "4\"", + -13.173273086547852 + ], + [ + "▁Fiona", + -13.173276901245115 + ], + [ + "▁constraint", + -13.173311233520508 + ], + [ + "▁Invitations", + -13.173324584960938 + ], + [ + "watering", + -13.173361778259276 + ], + [ + "▁burdens", + -13.173504829406738 + ], + [ + "scan", + -13.17354965209961 + ], + [ + "▁1100", + -13.173603057861328 + ], + [ + "▁checkpoint", + -13.17362117767334 + ], + [ + "▁1984.", + -13.17370319366455 + ], + [ + "Atlantic", + -13.173806190490724 + ], + [ + "lander", + -13.173851013183594 + ], + [ + "▁adversely", + -13.173916816711426 + ], + [ + "dec", + -13.173917770385742 + ], + [ + "▁newcomer", + -13.173954963684082 + ], + [ + "▁drifting", + -13.173993110656738 + ], + [ + "002", + -13.174002647399902 + ], + [ + "▁THEY", + -13.174005508422852 + ], + [ + "▁Spike", + -13.17404556274414 + ], + [ + "4-2", + -13.174047470092772 + ], + [ + "▁reconcile", + -13.174049377441406 + ], + [ + "▁Kle", + -13.174073219299316 + ], + [ + "▁oncology", + -13.174092292785645 + ], + [ + "nak", + -13.174273490905762 + ], + [ + "▁microbial", + -13.174283981323242 + ], + [ + "▁Millennials", + -13.174285888671877 + ], + [ + "▁Frames", + -13.174386024475098 + ], + [ + "UE", + -13.174405097961426 + ], + [ + "▁Valle", + -13.174410820007324 + ], + [ + "▁Says", + -13.174500465393066 + ], + [ + "▁lil", + -13.174527168273926 + ], + [ + "▁Fowler", + -13.174595832824709 + ], + [ + "▁viewpoints", + -13.174605369567873 + ], + [ + "ugu", + -13.174607276916504 + ], + [ + "▁Cain", + -13.174626350402832 + ], + [ + "▁microscopic", + -13.174659729003906 + ], + [ + "▁sinister", + -13.174671173095703 + ], + [ + "ners", + -13.17475414276123 + ], + [ + "brush", + -13.174810409545898 + ], + [ + "▁scooters", + -13.174944877624512 + ], + [ + "▁Infinite", + -13.17495059967041 + ], + [ + "▁Kr", + -13.17495822906494 + ], + [ + "▁Muse", + -13.174962997436523 + ], + [ + "▁Jasper", + -13.175018310546877 + ], + [ + "▁conveying", + -13.17504596710205 + ], + [ + "▁Pipeline", + -13.175366401672363 + ], + [ + "▁antennas", + -13.175491333007812 + ], + [ + "▁Steele", + -13.175548553466797 + ], + [ + "xo", + -13.175562858581545 + ], + [ + "▁meds", + -13.175670623779297 + ], + [ + "dish", + -13.17568016052246 + ], + [ + "-200", + -13.175698280334473 + ], + [ + "fra", + -13.17571258544922 + ], + [ + "▁shepherd", + -13.175722122192385 + ], + [ + "annual", + -13.1757230758667 + ], + [ + "▁Showing", + -13.175745964050291 + ], + [ + "▁129", + -13.175776481628418 + ], + [ + "▁swimmer", + -13.175820350646973 + ], + [ + "▁Pale", + -13.175904273986816 + ], + [ + "dit", + -13.17593479156494 + ], + [ + "▁Pos", + -13.176011085510254 + ], + [ + "▁articulated", + -13.176011085510254 + ], + [ + "▁Closed", + -13.176012992858888 + ], + [ + "▁Bose", + -13.176060676574709 + ], + [ + "▁Boone", + -13.176186561584473 + ], + [ + "▁Kinder", + -13.176209449768066 + ], + [ + "▁Centennial", + -13.17624568939209 + ], + [ + "▁linux", + -13.176257133483888 + ], + [ + "▁Genius", + -13.17629623413086 + ], + [ + "▁Newfoundland", + -13.1764554977417 + ], + [ + "▁imitation", + -13.176465034484863 + ], + [ + "lm", + -13.176493644714355 + ], + [ + "borg", + -13.176512718200684 + ], + [ + "▁frightened", + -13.176608085632324 + ], + [ + "▁Alive", + -13.17668914794922 + ], + [ + "▁Invite", + -13.176730155944824 + ], + [ + "▁echoes", + -13.176750183105469 + ], + [ + "▁Malik", + -13.176751136779783 + ], + [ + "▁handwritten", + -13.176979064941406 + ], + [ + "raz", + -13.176994323730469 + ], + [ + "▁coolant", + -13.17705249786377 + ], + [ + "Display", + -13.177275657653809 + ], + [ + "▁headlight", + -13.177408218383787 + ], + [ + "▁homicide", + -13.177545547485352 + ], + [ + "▁alum", + -13.17758846282959 + ], + [ + "▁Ei", + -13.177589416503906 + ], + [ + "▁characterize", + -13.177592277526855 + ], + [ + "ELL", + -13.17759895324707 + ], + [ + "ienne", + -13.17765998840332 + ], + [ + "▁shotgun", + -13.177748680114746 + ], + [ + "▁Presented", + -13.177807807922363 + ], + [ + "DG", + -13.177881240844728 + ], + [ + "gat", + -13.177971839904783 + ], + [ + "▁monarch", + -13.178008079528809 + ], + [ + "▁Serious", + -13.178043365478516 + ], + [ + "cri", + -13.178080558776855 + ], + [ + "▁predecessors", + -13.17813491821289 + ], + [ + "▁Streaming", + -13.178150177001951 + ], + [ + "▁stared", + -13.178277969360352 + ], + [ + "▁Swansea", + -13.178372383117676 + ], + [ + "phon", + -13.178461074829102 + ], + [ + "▁XT", + -13.178526878356934 + ], + [ + "▁cries", + -13.178537368774414 + ], + [ + "▁remarked", + -13.1788330078125 + ], + [ + "▁vectors", + -13.17883586883545 + ], + [ + "rex", + -13.178885459899902 + ], + [ + "▁cannon", + -13.178930282592772 + ], + [ + "▁kinetic", + -13.178990364074709 + ], + [ + "▁wagering", + -13.17900562286377 + ], + [ + "▁Cert", + -13.17905616760254 + ], + [ + "▁SPA", + -13.179062843322754 + ], + [ + "mn", + -13.17910861968994 + ], + [ + "▁Courier", + -13.179110527038574 + ], + [ + "▁Kang", + -13.17918586730957 + ], + [ + "producing", + -13.179218292236328 + ], + [ + "Rep", + -13.179222106933594 + ], + [ + "▁DVR", + -13.17926788330078 + ], + [ + "▁Keto", + -13.179304122924805 + ], + [ + "▁1986,", + -13.17940616607666 + ], + [ + "▁Worksheet", + -13.179436683654783 + ], + [ + "▁crease", + -13.179455757141112 + ], + [ + "▁Produce", + -13.17953872680664 + ], + [ + "▁gastric", + -13.179553985595703 + ], + [ + "▁DOWNLOAD", + -13.179587364196776 + ], + [ + "▁Saddle", + -13.1796875 + ], + [ + "EAR", + -13.179738998413086 + ], + [ + "▁salons", + -13.179765701293944 + ], + [ + "VIS", + -13.17977809906006 + ], + [ + "▁lanterns", + -13.179919242858888 + ], + [ + "▁205", + -13.179941177368164 + ], + [ + "▁Wen", + -13.18002223968506 + ], + [ + "▁gelatin", + -13.18006992340088 + ], + [ + "▁faculties", + -13.180075645446776 + ], + [ + "▁speculative", + -13.180075645446776 + ], + [ + "▁Leopard", + -13.180078506469728 + ], + [ + "▁plantations", + -13.180103302001951 + ], + [ + "▁insignificant", + -13.180145263671877 + ], + [ + "▁Cayman", + -13.180163383483888 + ], + [ + "▁fullness", + -13.180168151855469 + ], + [ + "▁hardships", + -13.1804838180542 + ], + [ + "wich", + -13.180489540100098 + ], + [ + "▁brows", + -13.180582046508787 + ], + [ + "▁backlog", + -13.180615425109863 + ], + [ + "▁dialysis", + -13.18068504333496 + ], + [ + "▁hospitalization", + -13.18071174621582 + ], + [ + "rina", + -13.180746078491213 + ], + [ + "▁Santos", + -13.180818557739258 + ], + [ + "▁transactional", + -13.180947303771973 + ], + [ + "▁chick", + -13.181075096130373 + ], + [ + "▁BOOK", + -13.181109428405762 + ], + [ + "▁Advocacy", + -13.181129455566406 + ], + [ + "▁Pai", + -13.181132316589355 + ], + [ + "▁Shar", + -13.181214332580566 + ], + [ + "YA", + -13.18122673034668 + ], + [ + "▁devise", + -13.181391716003418 + ], + [ + "▁linger", + -13.181425094604492 + ], + [ + "▁Lem", + -13.181451797485352 + ], + [ + "▁reseller", + -13.181471824645996 + ], + [ + "▁Bac", + -13.18151569366455 + ], + [ + "▁Sang", + -13.181535720825195 + ], + [ + "▁withholding", + -13.181546211242676 + ], + [ + "▁Rican", + -13.181610107421877 + ], + [ + "▁Pencil", + -13.181767463684082 + ], + [ + "SIA", + -13.181840896606444 + ], + [ + "MV", + -13.181862831115724 + ], + [ + "▁Crimson", + -13.181890487670898 + ], + [ + "▁dynasty", + -13.181890487670898 + ], + [ + "▁rallies", + -13.181890487670898 + ], + [ + "▁Gurgaon", + -13.181892395019531 + ], + [ + "▁Terri", + -13.181941986083984 + ], + [ + "▁Osaka", + -13.181955337524414 + ], + [ + "▁Minn", + -13.181976318359377 + ], + [ + "cooked", + -13.18199634552002 + ], + [ + "▁tidal", + -13.18201732635498 + ], + [ + "▁symmetry", + -13.18210220336914 + ], + [ + "▁Trouble", + -13.182116508483888 + ], + [ + "▁Cai", + -13.182169914245604 + ], + [ + "▁scripting", + -13.182169914245604 + ], + [ + "▁vigorously", + -13.182214736938477 + ], + [ + "▁invade", + -13.182251930236816 + ], + [ + "▁Pala", + -13.182323455810549 + ], + [ + "▁notoriously", + -13.182343482971191 + ], + [ + "Late", + -13.182416915893556 + ], + [ + "▁Kou", + -13.18242359161377 + ], + [ + "▁Teeth", + -13.182456970214844 + ], + [ + "▁extracting", + -13.18246841430664 + ], + [ + "Fish", + -13.182528495788574 + ], + [ + "▁inherit", + -13.182598114013672 + ], + [ + "Mor", + -13.182629585266112 + ], + [ + "▁Woody", + -13.182662010192873 + ], + [ + "Room", + -13.182826042175291 + ], + [ + "▁Sinai", + -13.18295669555664 + ], + [ + "▁#7", + -13.183015823364258 + ], + [ + "▁limousine", + -13.183049201965332 + ], + [ + "▁upheld", + -13.18309497833252 + ], + [ + "▁Alphabet", + -13.183106422424316 + ], + [ + "poor", + -13.183149337768556 + ], + [ + "▁pervasive", + -13.18317985534668 + ], + [ + "SAN", + -13.183246612548828 + ], + [ + "▁repeats", + -13.183256149291992 + ], + [ + "▁11%", + -13.183327674865724 + ], + [ + "▁Charming", + -13.183349609375 + ], + [ + "▁Licensing", + -13.183353424072266 + ], + [ + "▁masterpieces", + -13.18345832824707 + ], + [ + "▁explosions", + -13.18360424041748 + ], + [ + "▁IVF", + -13.183609008789062 + ], + [ + "rov", + -13.18362045288086 + ], + [ + "▁adhesion", + -13.183709144592283 + ], + [ + "▁carcinoma", + -13.183709144592283 + ], + [ + "▁conduit", + -13.183709144592283 + ], + [ + "▁Karachi", + -13.183753967285156 + ], + [ + "190", + -13.183767318725586 + ], + [ + "▁harden", + -13.183771133422852 + ], + [ + "ALS", + -13.18377685546875 + ], + [ + "▁Hawkins", + -13.183853149414062 + ], + [ + "▁verifying", + -13.18388843536377 + ], + [ + "▁aspiration", + -13.183895111083984 + ], + [ + "emon", + -13.183916091918944 + ], + [ + "▁cardigan", + -13.183960914611816 + ], + [ + "▁hopping", + -13.18407917022705 + ], + [ + "weekly", + -13.184082984924316 + ], + [ + "99.", + -13.184088706970217 + ], + [ + "Grant", + -13.184101104736328 + ], + [ + "▁tufted", + -13.184259414672852 + ], + [ + "burning", + -13.184290885925291 + ], + [ + "▁endlessly", + -13.18434238433838 + ], + [ + "ked", + -13.184383392333984 + ], + [ + "▁sockets", + -13.18438720703125 + ], + [ + "▁rumours", + -13.184727668762209 + ], + [ + "▁passionately", + -13.184844970703123 + ], + [ + "MER", + -13.184929847717283 + ], + [ + "Kitchen", + -13.184941291809082 + ], + [ + "-47", + -13.184975624084473 + ], + [ + "▁1985.", + -13.18511199951172 + ], + [ + "▁cc", + -13.185115814208984 + ], + [ + "▁Dealers", + -13.185126304626465 + ], + [ + "moral", + -13.18517780303955 + ], + [ + "moto", + -13.185178756713867 + ], + [ + "▁german", + -13.18520164489746 + ], + [ + "Gate", + -13.18520736694336 + ], + [ + "TECH", + -13.185208320617676 + ], + [ + "▁(24", + -13.185211181640623 + ], + [ + "5).", + -13.18533420562744 + ], + [ + "▁Interaction", + -13.18537712097168 + ], + [ + "▁waiter", + -13.185397148132324 + ], + [ + "holding", + -13.185407638549805 + ], + [ + "surface", + -13.18544101715088 + ], + [ + "1-0", + -13.1854887008667 + ], + [ + "▁ache", + -13.185519218444824 + ], + [ + "▁reindeer", + -13.18553066253662 + ], + [ + "▁reversing", + -13.18553066253662 + ], + [ + "▁Baroque", + -13.185546875 + ], + [ + "▁punched", + -13.185577392578123 + ], + [ + "▁(2012)", + -13.185585021972656 + ], + [ + "NM", + -13.18564510345459 + ], + [ + "▁Tuesdays", + -13.18576717376709 + ], + [ + "▁conferencing", + -13.18578815460205 + ], + [ + "poly", + -13.185839653015137 + ], + [ + "▁KU", + -13.185846328735352 + ], + [ + "ри", + -13.185863494873049 + ], + [ + "▁picket", + -13.185869216918944 + ], + [ + "▁motivates", + -13.185967445373535 + ], + [ + "nea", + -13.186092376708984 + ], + [ + "▁Grape", + -13.186260223388672 + ], + [ + "select", + -13.186286926269531 + ], + [ + "▁tweaking", + -13.18637752532959 + ], + [ + "itas", + -13.186556816101074 + ], + [ + "Eric", + -13.186637878417969 + ], + [ + "AME", + -13.186653137207031 + ], + [ + "▁Sellers", + -13.186717987060549 + ], + [ + "▁kiosk", + -13.186741828918455 + ], + [ + "ease", + -13.186766624450684 + ], + [ + "Death", + -13.186817169189451 + ], + [ + "bathroom", + -13.186835289001465 + ], + [ + "exclusive", + -13.186840057373049 + ], + [ + "dream", + -13.186856269836426 + ], + [ + "Bra", + -13.186920166015623 + ], + [ + "▁viola", + -13.186972618103027 + ], + [ + "▁Touring", + -13.18697738647461 + ], + [ + "▁Financing", + -13.187003135681152 + ], + [ + "aran", + -13.18704319000244 + ], + [ + "▁NSA", + -13.18706512451172 + ], + [ + "▁queer", + -13.187071800231934 + ], + [ + "▁Cle", + -13.18707275390625 + ], + [ + "▁woes", + -13.187100410461426 + ], + [ + "▁Burr", + -13.18714427947998 + ], + [ + "▁Scriptures", + -13.18716812133789 + ], + [ + "▁streamed", + -13.187228202819824 + ], + [ + "$2", + -13.187236785888672 + ], + [ + "270", + -13.187312126159668 + ], + [ + "▁Whirlpool", + -13.187355041503906 + ], + [ + "▁practising", + -13.187355041503906 + ], + [ + "▁yoghurt", + -13.187355041503906 + ], + [ + "▁robbed", + -13.187370300292969 + ], + [ + "▁Reddit", + -13.187410354614258 + ], + [ + "▁Lydia", + -13.18752098083496 + ], + [ + "▁lurking", + -13.187552452087402 + ], + [ + "▁REALTORS", + -13.187562942504885 + ], + [ + "▁shootings", + -13.187570571899414 + ], + [ + "3000", + -13.187591552734377 + ], + [ + "▁29.", + -13.18760871887207 + ], + [ + "▁torso", + -13.187631607055664 + ], + [ + "▁Michele", + -13.187856674194336 + ], + [ + "▁Meals", + -13.187880516052246 + ], + [ + "▁Serena", + -13.187884330749512 + ], + [ + "LEY", + -13.188068389892578 + ], + [ + "▁---", + -13.188127517700195 + ], + [ + "Previously", + -13.18822956085205 + ], + [ + "▁Ober", + -13.188264846801758 + ], + [ + "▁IMF", + -13.188284873962402 + ], + [ + "▁retainer", + -13.18828582763672 + ], + [ + "▁embarking", + -13.18840503692627 + ], + [ + "▁Availability", + -13.188426971435549 + ], + [ + "▁Atlantis", + -13.188521385192873 + ], + [ + "▁manicure", + -13.188604354858398 + ], + [ + "Except", + -13.188611030578612 + ], + [ + "290", + -13.18861484527588 + ], + [ + "▁Marian", + -13.1886625289917 + ], + [ + "Chair", + -13.188750267028809 + ], + [ + "Initially", + -13.188835144042969 + ], + [ + "▁overflowing", + -13.188854217529297 + ], + [ + "rush", + -13.18887996673584 + ], + [ + "tub", + -13.189016342163086 + ], + [ + "2-2", + -13.189042091369627 + ], + [ + "▁Transactions", + -13.189058303833008 + ], + [ + "▁blazing", + -13.189186096191406 + ], + [ + "▁uranium", + -13.189191818237305 + ], + [ + "▁Asheville", + -13.189194679260254 + ], + [ + "▁Mozilla", + -13.189203262329102 + ], + [ + "nose", + -13.18925952911377 + ], + [ + "aha", + -13.189329147338867 + ], + [ + "▁203", + -13.189401626586914 + ], + [ + "▁HOT", + -13.189459800720217 + ], + [ + "ryl", + -13.189468383789062 + ], + [ + "▁0.01", + -13.189489364624023 + ], + [ + "▁plc", + -13.189537048339844 + ], + [ + "3.0", + -13.18957805633545 + ], + [ + "▁Broadcasting", + -13.189655303955078 + ], + [ + "▁Sleeping", + -13.189675331115724 + ], + [ + "▁Examiner", + -13.189704895019531 + ], + [ + "API", + -13.189722061157228 + ], + [ + "▁Denis", + -13.189777374267578 + ], + [ + "▁invaded", + -13.18979263305664 + ], + [ + "▁chased", + -13.189863204956056 + ], + [ + "▁Dirty", + -13.189903259277344 + ], + [ + "▁solicitation", + -13.18993091583252 + ], + [ + "▁researches", + -13.189937591552734 + ], + [ + "/25", + -13.189964294433594 + ], + [ + "▁Ped", + -13.190007209777832 + ], + [ + "1-3", + -13.190056800842283 + ], + [ + "▁deem", + -13.190074920654297 + ], + [ + "▁achieves", + -13.190105438232422 + ], + [ + "Offer", + -13.19019889831543 + ], + [ + "▁honed", + -13.19028377532959 + ], + [ + "▁ESL", + -13.190319061279297 + ], + [ + "▁occurrences", + -13.190354347229004 + ], + [ + "raising", + -13.190389633178713 + ], + [ + "▁disciple", + -13.190433502197266 + ], + [ + "▁CAGR", + -13.190531730651855 + ], + [ + "governmental", + -13.190546989440918 + ], + [ + "fal", + -13.19057559967041 + ], + [ + "▁collectible", + -13.190576553344728 + ], + [ + "▁bookmarks", + -13.190664291381836 + ], + [ + "▁organizes", + -13.190672874450684 + ], + [ + "▁Held", + -13.190751075744627 + ], + [ + "▁Bathrooms", + -13.190756797790527 + ], + [ + ":59", + -13.190765380859377 + ], + [ + "▁lent", + -13.190780639648438 + ], + [ + "sler", + -13.19081974029541 + ], + [ + "▁petitions", + -13.190842628479004 + ], + [ + "▁percentages", + -13.19084644317627 + ], + [ + "▁MSP", + -13.190868377685549 + ], + [ + "▁removals", + -13.190879821777344 + ], + [ + "▁vaccinations", + -13.191010475158691 + ], + [ + "▁osteoporosis", + -13.191015243530272 + ], + [ + "▁Diablo", + -13.191020011901855 + ], + [ + "▁devoid", + -13.191020965576172 + ], + [ + "▁flax", + -13.191035270690918 + ], + [ + "▁nourishment", + -13.191073417663574 + ], + [ + "▁Himalayan", + -13.191089630126951 + ], + [ + "▁Hanover", + -13.191116333007812 + ], + [ + "▁lemonade", + -13.191118240356444 + ], + [ + "▁UA", + -13.19113826751709 + ], + [ + "▁Fax", + -13.191176414489746 + ], + [ + "remember", + -13.19120979309082 + ], + [ + "▁CARE", + -13.191219329833984 + ], + [ + "jit", + -13.191298484802246 + ], + [ + "bread", + -13.191301345825195 + ], + [ + "▁nuanced", + -13.191417694091797 + ], + [ + "▁dispersed", + -13.191423416137695 + ], + [ + "▁snowboard", + -13.191458702087402 + ], + [ + "▁therein", + -13.191497802734377 + ], + [ + "▁7\"", + -13.191523551940918 + ], + [ + "wala", + -13.19163990020752 + ], + [ + "CES", + -13.191678047180176 + ], + [ + "tang", + -13.191683769226074 + ], + [ + "Char", + -13.191754341125488 + ], + [ + "Sh", + -13.191802978515623 + ], + [ + "Screen", + -13.191866874694824 + ], + [ + "bull", + -13.19187831878662 + ], + [ + "▁declares", + -13.191888809204102 + ], + [ + "▁Infant", + -13.191899299621582 + ], + [ + "▁pant", + -13.191903114318848 + ], + [ + "▁USS", + -13.192052841186523 + ], + [ + "▁Lor", + -13.192222595214844 + ], + [ + "▁messing", + -13.192276000976562 + ], + [ + "▁hob", + -13.19239330291748 + ], + [ + "Che", + -13.19241428375244 + ], + [ + "▁Digest", + -13.192434310913086 + ], + [ + "fied", + -13.192453384399414 + ], + [ + "▁Angie", + -13.19246768951416 + ], + [ + "▁Lum", + -13.192471504211426 + ], + [ + "▁planters", + -13.19252872467041 + ], + [ + "▁Ariel", + -13.192543029785156 + ], + [ + "ет", + -13.192614555358888 + ], + [ + "vs", + -13.19265365600586 + ], + [ + "Manage", + -13.19277286529541 + ], + [ + "▁clown", + -13.192837715148926 + ], + [ + "▁Reservoir", + -13.192849159240724 + ], + [ + "▁mesmerizing", + -13.19285011291504 + ], + [ + "▁AdWords", + -13.192858695983888 + ], + [ + "fus", + -13.192950248718262 + ], + [ + "Hall", + -13.192962646484377 + ], + [ + "▁adaptations", + -13.192978858947754 + ], + [ + "▁Mansion", + -13.193013191223145 + ], + [ + "▁Waterfront", + -13.193033218383787 + ], + [ + "odor", + -13.193082809448242 + ], + [ + "▁Sac", + -13.19318675994873 + ], + [ + "▁contours", + -13.19348430633545 + ], + [ + "illi", + -13.193533897399902 + ], + [ + "▁Mayan", + -13.193540573120115 + ], + [ + "▁Toro", + -13.193727493286133 + ], + [ + "▁Reliable", + -13.193809509277344 + ], + [ + "▁Destiny", + -13.193829536437988 + ], + [ + "crest", + -13.1939058303833 + ], + [ + "▁PSP", + -13.193940162658691 + ], + [ + "▁honorable", + -13.19396686553955 + ], + [ + "▁Palma", + -13.193984985351562 + ], + [ + "▁24/7.", + -13.194003105163574 + ], + [ + "▁Larson", + -13.194056510925291 + ], + [ + "▁Tumblr", + -13.194153785705566 + ], + [ + "▁Therapist", + -13.1941556930542 + ], + [ + "▁Hamlet", + -13.194208145141602 + ], + [ + "bean", + -13.19431495666504 + ], + [ + "▁powering", + -13.19431972503662 + ], + [ + "character", + -13.194371223449709 + ], + [ + "260", + -13.19438362121582 + ], + [ + "Sample", + -13.19441032409668 + ], + [ + "tank", + -13.194512367248535 + ], + [ + "▁Fog", + -13.194523811340332 + ], + [ + "Johnson", + -13.19459629058838 + ], + [ + "Interestingly", + -13.19463062286377 + ], + [ + "▁reversible", + -13.194687843322754 + ], + [ + "▁TripAdvisor", + -13.194690704345703 + ], + [ + "▁psychotherapy", + -13.194692611694336 + ], + [ + "▁nutshell", + -13.19471263885498 + ], + [ + "▁victorious", + -13.19475269317627 + ], + [ + "▁seaweed", + -13.1947603225708 + ], + [ + "▁9%", + -13.19476318359375 + ], + [ + "▁Lagoon", + -13.194811820983888 + ], + [ + "▁rooting", + -13.194811820983888 + ], + [ + "▁forgiving", + -13.194816589355469 + ], + [ + "▁faction", + -13.19489860534668 + ], + [ + "▁José", + -13.19497013092041 + ], + [ + "▁fresher", + -13.195218086242676 + ], + [ + "shape", + -13.195302963256836 + ], + [ + "itude", + -13.19539737701416 + ], + [ + "▁conquered", + -13.195412635803224 + ], + [ + "▁malfunction", + -13.195508003234863 + ], + [ + "▁swords", + -13.19550895690918 + ], + [ + "graduate", + -13.195531845092772 + ], + [ + "today", + -13.195542335510254 + ], + [ + "Fig", + -13.19555377960205 + ], + [ + "▁meadows", + -13.195622444152832 + ], + [ + "Fine", + -13.19569969177246 + ], + [ + "anger", + -13.19573974609375 + ], + [ + "shin", + -13.195806503295898 + ], + [ + "▁fosters", + -13.19589138031006 + ], + [ + "▁Moms", + -13.195919036865234 + ], + [ + "▁contentious", + -13.195934295654297 + ], + [ + "▁uptake", + -13.195937156677246 + ], + [ + "26.", + -13.19603157043457 + ], + [ + "lom", + -13.196078300476074 + ], + [ + "spin", + -13.196106910705566 + ], + [ + "deal", + -13.196145057678224 + ], + [ + "▁inverter", + -13.196149826049805 + ], + [ + "Applicants", + -13.196165084838867 + ], + [ + "Father", + -13.196229934692385 + ], + [ + "▁Marcos", + -13.19624137878418 + ], + [ + "▁Spell", + -13.196269035339355 + ], + [ + "▁dunk", + -13.196281433105469 + ], + [ + "▁immaculate", + -13.19628620147705 + ], + [ + "Mount", + -13.196301460266112 + ], + [ + "▁honours", + -13.196322441101074 + ], + [ + "FUL", + -13.19635009765625 + ], + [ + "050", + -13.196352005004885 + ], + [ + "▁Emotional", + -13.196352005004885 + ], + [ + "▁cul", + -13.196372985839844 + ], + [ + "▁armour", + -13.196375846862791 + ], + [ + "SIS", + -13.196399688720703 + ], + [ + "later", + -13.196407318115234 + ], + [ + "▁Smithsonian", + -13.196529388427734 + ], + [ + "▁unavoidable", + -13.196529388427734 + ], + [ + "▁Lennon", + -13.196544647216797 + ], + [ + "▁antimicrobial", + -13.19654655456543 + ], + [ + "▁diplomacy", + -13.196550369262695 + ], + [ + "▁jargon", + -13.196550369262695 + ], + [ + "original", + -13.196560859680176 + ], + [ + "▁Strain", + -13.196581840515137 + ], + [ + "▁Freight", + -13.19664192199707 + ], + [ + "▁reused", + -13.196707725524902 + ], + [ + "▁abruptly", + -13.19671630859375 + ], + [ + "▁Reset", + -13.196739196777344 + ], + [ + "▁Speedway", + -13.19676685333252 + ], + [ + "▁enrolment", + -13.19681453704834 + ], + [ + "▁Earned", + -13.19700813293457 + ], + [ + "ISO", + -13.197028160095217 + ], + [ + "202", + -13.197151184082031 + ], + [ + "PORT", + -13.197198867797852 + ], + [ + "▁3.8", + -13.19727897644043 + ], + [ + "▁2018)", + -13.197328567504885 + ], + [ + "▁addicts", + -13.19733428955078 + ], + [ + "▁Modular", + -13.197469711303713 + ], + [ + "▁Diseases", + -13.197556495666504 + ], + [ + "▁fluff", + -13.1976957321167 + ], + [ + "▁arrays", + -13.197705268859863 + ], + [ + "▁APC", + -13.197766304016112 + ], + [ + "▁recognises", + -13.197772026062012 + ], + [ + "▁Constant", + -13.197906494140623 + ], + [ + "▁Daughter", + -13.197911262512209 + ], + [ + "(5)", + -13.197932243347168 + ], + [ + "▁cleverly", + -13.19794464111328 + ], + [ + "▁Bournemouth", + -13.197978973388672 + ], + [ + "▁baker", + -13.198022842407228 + ], + [ + "▁seventeen", + -13.198037147521973 + ], + [ + "▁computerized", + -13.198144912719728 + ], + [ + "▁furious", + -13.19817066192627 + ], + [ + "Thankfully", + -13.198200225830078 + ], + [ + "▁awaken", + -13.198250770568848 + ], + [ + "▁Laboratories", + -13.19837474822998 + ], + [ + "▁unanswered", + -13.19837474822998 + ], + [ + "▁kosher", + -13.198376655578612 + ], + [ + "▁Heathrow", + -13.198386192321776 + ], + [ + "▁sled", + -13.19843864440918 + ], + [ + "rh", + -13.198484420776367 + ], + [ + "▁Fisheries", + -13.198485374450684 + ], + [ + "▁Staples", + -13.198495864868164 + ], + [ + "▁tagging", + -13.198504447937012 + ], + [ + "▁Shack", + -13.198522567749023 + ], + [ + "▁Chili", + -13.19855785369873 + ], + [ + "▁Midland", + -13.19857692718506 + ], + [ + "▁vera", + -13.198610305786133 + ], + [ + "▁rumored", + -13.19880485534668 + ], + [ + "▁GH", + -13.198835372924805 + ], + [ + "▁$26", + -13.19896125793457 + ], + [ + "▁sleigh", + -13.199045181274414 + ], + [ + "otta", + -13.19911766052246 + ], + [ + "▁Metallic", + -13.199310302734377 + ], + [ + "▁hairs", + -13.199350357055664 + ], + [ + "Dev", + -13.199398040771484 + ], + [ + "▁pickle", + -13.19946002960205 + ], + [ + "sko", + -13.199464797973633 + ], + [ + "▁woodwork", + -13.199501991271973 + ], + [ + "▁anterior", + -13.19954776763916 + ], + [ + "▁Auditor", + -13.199600219726562 + ], + [ + "▁Sco", + -13.199625015258787 + ], + [ + "▁1988,", + -13.199708938598633 + ], + [ + "▁ci", + -13.19974136352539 + ], + [ + "▁heirloom", + -13.199746131896973 + ], + [ + "▁Vocal", + -13.199748039245604 + ], + [ + "▁supplemented", + -13.199877738952637 + ], + [ + "▁Competitive", + -13.199969291687012 + ], + [ + "▁kitchenette", + -13.200011253356934 + ], + [ + "▁thanking", + -13.200030326843262 + ], + [ + "▁1901", + -13.20004940032959 + ], + [ + "Prices", + -13.200063705444336 + ], + [ + "▁Alamo", + -13.200180053710938 + ], + [ + "▁spear", + -13.20020866394043 + ], + [ + "▁debugging", + -13.200231552124023 + ], + [ + "▁chopping", + -13.200264930725098 + ], + [ + "▁Anchorage", + -13.20026683807373 + ], + [ + "▁baptized", + -13.20032024383545 + ], + [ + "▁Levels", + -13.200324058532717 + ], + [ + "▁fragment", + -13.20033073425293 + ], + [ + "▁Robbie", + -13.200332641601562 + ], + [ + "▁Sche", + -13.200389862060549 + ], + [ + "Luke", + -13.200440406799316 + ], + [ + "▁credential", + -13.200448989868164 + ], + [ + "▁governors", + -13.200496673583984 + ], + [ + "LING", + -13.200559616088867 + ], + [ + "▁diffuse", + -13.200590133666992 + ], + [ + "▁Creativity", + -13.200684547424316 + ], + [ + "unk", + -13.200754165649414 + ], + [ + "▁Karma", + -13.20083713531494 + ], + [ + "▁Bills", + -13.20094871520996 + ], + [ + "tol", + -13.200994491577148 + ], + [ + "Zone", + -13.201087951660156 + ], + [ + "▁revamp", + -13.201178550720217 + ], + [ + "▁Monarch", + -13.201282501220703 + ], + [ + "▁frogs", + -13.201321601867676 + ], + [ + "▁Franc", + -13.201349258422852 + ], + [ + "▁Earn", + -13.201375007629396 + ], + [ + "Spec", + -13.201416015625 + ], + [ + "▁1983,", + -13.201424598693848 + ], + [ + "▁Polytechnic", + -13.201440811157228 + ], + [ + "▁Hurry", + -13.201522827148438 + ], + [ + "▁EE", + -13.201531410217283 + ], + [ + "▁Colo", + -13.20162296295166 + ], + [ + "▁deviation", + -13.201626777648926 + ], + [ + "▁electricians", + -13.201630592346191 + ], + [ + "▁submarine", + -13.201655387878418 + ], + [ + "▁FPS", + -13.201695442199709 + ], + [ + "ogenic", + -13.20183563232422 + ], + [ + "▁Cary", + -13.201852798461914 + ], + [ + "registration", + -13.201886177062988 + ], + [ + "▁Guns", + -13.202000617980955 + ], + [ + "▁Dynasty", + -13.202075004577637 + ], + [ + "▁Secretariat", + -13.202075004577637 + ], + [ + "▁schizophrenia", + -13.202075004577637 + ], + [ + "▁Armour", + -13.202115058898926 + ], + [ + "9%", + -13.202176094055176 + ], + [ + "Flash", + -13.202255249023438 + ], + [ + "▁exaggerated", + -13.202298164367676 + ], + [ + "▁Inspire", + -13.202420234680176 + ], + [ + "Wide", + -13.20268440246582 + ], + [ + "▁Traditionally", + -13.202771186828612 + ], + [ + "▁perfected", + -13.203073501586914 + ], + [ + "▁Ministers", + -13.203079223632812 + ], + [ + "▁1905", + -13.203109741210938 + ], + [ + "▁Alley", + -13.203251838684082 + ], + [ + "▁agendas", + -13.203362464904783 + ], + [ + "▁orchard", + -13.203397750854492 + ], + [ + "▁Kung", + -13.20341968536377 + ], + [ + "▁WORLD", + -13.203421592712402 + ], + [ + "▁Paw", + -13.203451156616213 + ], + [ + "▁Amish", + -13.203572273254396 + ], + [ + "▁Dates", + -13.203600883483888 + ], + [ + "▁Mongolia", + -13.20362377166748 + ], + [ + "▁Knee", + -13.203659057617188 + ], + [ + "rice", + -13.203668594360352 + ], + [ + "OUR", + -13.20371150970459 + ], + [ + "Grade", + -13.203744888305664 + ], + [ + "Photos", + -13.203782081604004 + ], + [ + "▁golfing", + -13.203798294067385 + ], + [ + "rot", + -13.203819274902344 + ], + [ + "▁Chal", + -13.203862190246582 + ], + [ + "▁Smoking", + -13.203866958618164 + ], + [ + "▁excludes", + -13.203875541687012 + ], + [ + "ln", + -13.2039155960083 + ], + [ + "▁facilitation", + -13.203930854797363 + ], + [ + "pink", + -13.20407009124756 + ], + [ + "picked", + -13.204123497009276 + ], + [ + "▁Lao", + -13.204127311706545 + ], + [ + "▁racer", + -13.20415496826172 + ], + [ + "Prof", + -13.204164505004885 + ], + [ + "▁upbringing", + -13.204187393188477 + ], + [ + "▁valuations", + -13.204208374023438 + ], + [ + "▁erupted", + -13.204212188720703 + ], + [ + "▁compress", + -13.204432487487791 + ], + [ + "Future", + -13.204498291015623 + ], + [ + "▁motorist", + -13.204505920410156 + ], + [ + "▁Catalan", + -13.204540252685549 + ], + [ + "acy", + -13.204572677612305 + ], + [ + "approx", + -13.204638481140137 + ], + [ + "▁Acres", + -13.204642295837402 + ], + [ + "▁pirates", + -13.2046480178833 + ], + [ + ")(3)", + -13.204719543457031 + ], + [ + "4.0", + -13.204744338989258 + ], + [ + "▁Fuller", + -13.204753875732422 + ], + [ + "▁chicks", + -13.204785346984863 + ], + [ + "▁muse", + -13.20482349395752 + ], + [ + "▁Cocktail", + -13.204833984375 + ], + [ + "▁Soldiers", + -13.204838752746582 + ], + [ + "lighting", + -13.204888343811035 + ], + [ + "▁dumpsters", + -13.204930305480955 + ], + [ + "▁perk", + -13.204988479614258 + ], + [ + "megapixel", + -13.205062866210938 + ], + [ + "▁Oncology", + -13.205089569091797 + ], + [ + "▁desktops", + -13.205159187316896 + ], + [ + "▁mined", + -13.205184936523438 + ], + [ + "simple", + -13.20522403717041 + ], + [ + "▁presume", + -13.205243110656738 + ], + [ + "▁Lex", + -13.205484390258787 + ], + [ + "▁villains", + -13.20551586151123 + ], + [ + "▁Beech", + -13.205565452575684 + ], + [ + "▁erect", + -13.205588340759276 + ], + [ + "▁Collector", + -13.2056245803833 + ], + [ + "lean", + -13.205625534057615 + ], + [ + "employment", + -13.20567512512207 + ], + [ + "▁latch", + -13.20574188232422 + ], + [ + "▁GD", + -13.205743789672852 + ], + [ + "zh", + -13.20580005645752 + ], + [ + "▁conservatory", + -13.205853462219238 + ], + [ + "▁Sheila", + -13.205877304077148 + ], + [ + "▁foodie", + -13.205892562866213 + ], + [ + "cos", + -13.20589828491211 + ], + [ + "▁trims", + -13.205904006958008 + ], + [ + "▁punt", + -13.206103324890137 + ], + [ + "▁Lar", + -13.2061185836792 + ], + [ + "currently", + -13.206391334533691 + ], + [ + "▁Canary", + -13.206415176391602 + ], + [ + "▁MOT", + -13.206424713134766 + ], + [ + "▁cheers", + -13.206585884094238 + ], + [ + "▁reacting", + -13.206643104553224 + ], + [ + "1.7", + -13.20672607421875 + ], + [ + "▁scriptures", + -13.206738471984863 + ], + [ + "▁(2010)", + -13.20677375793457 + ], + [ + "▁knack", + -13.206781387329102 + ], + [ + "▁diner", + -13.206809043884276 + ], + [ + "CAT", + -13.206884384155272 + ], + [ + "Increase", + -13.206986427307127 + ], + [ + "▁XR", + -13.207046508789062 + ], + [ + "▁powders", + -13.207064628601074 + ], + [ + "iate", + -13.207134246826172 + ], + [ + "Kate", + -13.207181930541992 + ], + [ + "▁VIII", + -13.207267761230469 + ], + [ + "▁Servers", + -13.20729160308838 + ], + [ + "cards", + -13.207489967346191 + ], + [ + "Found", + -13.207511901855469 + ], + [ + "google", + -13.20762825012207 + ], + [ + "▁escrow", + -13.20765495300293 + ], + [ + "▁antibacterial", + -13.20767879486084 + ], + [ + "▁1987,", + -13.20776653289795 + ], + [ + "zhou", + -13.20781135559082 + ], + [ + "▁Ballroom", + -13.207834243774414 + ], + [ + "▁Sapphire", + -13.20785140991211 + ], + [ + "▁brownie", + -13.207880973815918 + ], + [ + "pid", + -13.207903861999512 + ], + [ + "▁Blogging", + -13.207919120788574 + ], + [ + "▁london", + -13.207921981811523 + ], + [ + "▁Atom", + -13.20823574066162 + ], + [ + "▁ebooks", + -13.208239555358888 + ], + [ + "anya", + -13.208243370056152 + ], + [ + "▁exceedingly", + -13.208319664001465 + ], + [ + "▁yell", + -13.208393096923828 + ], + [ + "▁piled", + -13.208406448364258 + ], + [ + "▁Messages", + -13.20844841003418 + ], + [ + "▁Doom", + -13.208452224731444 + ], + [ + "▁distributes", + -13.208542823791504 + ], + [ + "880", + -13.208768844604492 + ], + [ + "▁LR", + -13.20893096923828 + ], + [ + "▁heir", + -13.208978652954102 + ], + [ + "▁Veronica", + -13.209019660949709 + ], + [ + "▁Peugeot", + -13.209020614624023 + ], + [ + "▁Powers", + -13.209263801574709 + ], + [ + "Golden", + -13.209376335144045 + ], + [ + "...?", + -13.209379196166992 + ], + [ + "▁frenzy", + -13.209516525268556 + ], + [ + "▁illicit", + -13.209518432617188 + ], + [ + "▁pelvic", + -13.209518432617188 + ], + [ + "▁rectify", + -13.20952033996582 + ], + [ + "▁autistic", + -13.209522247314451 + ], + [ + "Samsung", + -13.209528923034668 + ], + [ + "▁tiring", + -13.20954132080078 + ], + [ + "▁equine", + -13.209583282470703 + ], + [ + "▁excerpts", + -13.209585189819336 + ], + [ + "▁Vit", + -13.209635734558104 + ], + [ + "▁Blocks", + -13.20968532562256 + ], + [ + "▁glaciers", + -13.20976734161377 + ], + [ + "10,", + -13.209792137145996 + ], + [ + "▁affirmed", + -13.209904670715332 + ], + [ + "▁ninety", + -13.209951400756836 + ], + [ + "▁dove", + -13.209990501403809 + ], + [ + "ald", + -13.210101127624512 + ], + [ + "▁cre", + -13.21011734008789 + ], + [ + "▁microphones", + -13.210164070129396 + ], + [ + "arrow", + -13.210272789001465 + ], + [ + "▁Botox", + -13.210329055786133 + ], + [ + "▁dvd", + -13.210339546203612 + ], + [ + "▁Axis", + -13.21037769317627 + ], + [ + "▁Frei", + -13.210553169250488 + ], + [ + "▁Governors", + -13.210570335388184 + ], + [ + "▁brim", + -13.210611343383787 + ], + [ + "calc", + -13.210681915283203 + ], + [ + "7).", + -13.21074676513672 + ], + [ + "▁conversational", + -13.210752487182615 + ], + [ + "▁customise", + -13.210792541503906 + ], + [ + "▁Thursdays", + -13.210877418518066 + ], + [ + "▁Bab", + -13.21090602874756 + ], + [ + "▁hauling", + -13.210999488830566 + ], + [ + "▁Couple", + -13.211012840270996 + ], + [ + "16)", + -13.211053848266602 + ], + [ + "▁relieving", + -13.211113929748535 + ], + [ + "EZ", + -13.211136817932127 + ], + [ + "forth", + -13.211278915405272 + ], + [ + "▁avalanche", + -13.211385726928713 + ], + [ + "▁Meridian", + -13.211387634277344 + ], + [ + "▁polka", + -13.21140956878662 + ], + [ + "cp", + -13.21141529083252 + ], + [ + "▁fav", + -13.21145248413086 + ], + [ + "▁Pru", + -13.21168613433838 + ], + [ + "▁homeschool", + -13.211734771728516 + ], + [ + "▁responsiveness", + -13.211827278137209 + ], + [ + "▁Tender", + -13.211857795715332 + ], + [ + "▁Yankee", + -13.211934089660645 + ], + [ + "▁manpower", + -13.211989402770996 + ], + [ + "▁Transform", + -13.212044715881348 + ], + [ + "▁Accountants", + -13.212061882019045 + ], + [ + "▁scrambled", + -13.212117195129396 + ], + [ + "▁businessmen", + -13.212233543395996 + ], + [ + "▁solves", + -13.212267875671388 + ], + [ + "sq", + -13.212297439575195 + ], + [ + "▁Twins", + -13.21234893798828 + ], + [ + "▁cert", + -13.212398529052734 + ], + [ + "Isn", + -13.212533950805664 + ], + [ + "▁tro", + -13.212552070617676 + ], + [ + "doing", + -13.212629318237305 + ], + [ + "▁Moment", + -13.212654113769531 + ], + [ + "▁abuses", + -13.212657928466797 + ], + [ + "bd", + -13.212787628173828 + ], + [ + "Face", + -13.212966918945312 + ], + [ + "▁Isles", + -13.212967872619627 + ], + [ + "▁Tunisia", + -13.212970733642578 + ], + [ + "▁Hog", + -13.212998390197754 + ], + [ + "▁Subscription", + -13.213054656982422 + ], + [ + "bies", + -13.213057518005373 + ], + [ + "▁Nikki", + -13.213123321533203 + ], + [ + "▁Cameroon", + -13.213150024414062 + ], + [ + "▁yuan", + -13.213178634643556 + ], + [ + "▁depreciation", + -13.213258743286133 + ], + [ + "▁pedigree", + -13.213258743286133 + ], + [ + "▁posterior", + -13.213261604309082 + ], + [ + "▁tactile", + -13.213262557983398 + ], + [ + "Dance", + -13.213275909423828 + ], + [ + "▁resentment", + -13.213305473327637 + ], + [ + "osi", + -13.213384628295898 + ], + [ + "▁Anthem", + -13.213394165039062 + ], + [ + "▁submerged", + -13.213396072387695 + ], + [ + "▁circulated", + -13.213415145874023 + ], + [ + "▁donuts", + -13.213448524475098 + ], + [ + "oC", + -13.21362590789795 + ], + [ + "▁underserved", + -13.21365451812744 + ], + [ + "▁Shire", + -13.213669776916504 + ], + [ + "Spread", + -13.213672637939451 + ], + [ + "▁braided", + -13.213695526123049 + ], + [ + "▁Zelda", + -13.21371078491211 + ], + [ + "▁defeats", + -13.213741302490234 + ], + [ + "▁idiot", + -13.213763236999512 + ], + [ + "▁Toll", + -13.213821411132812 + ], + [ + "▁Sidney", + -13.213854789733888 + ], + [ + "▁presumed", + -13.213863372802734 + ], + [ + "▁Exams", + -13.213872909545898 + ], + [ + "▁nightclub", + -13.213950157165527 + ], + [ + "▁waterproofing", + -13.21396541595459 + ], + [ + "slow", + -13.214029312133787 + ], + [ + "▁Lowes", + -13.214118957519531 + ], + [ + "▁Rodney", + -13.214128494262695 + ], + [ + "nix", + -13.21414852142334 + ], + [ + "▁repellent", + -13.214200973510742 + ], + [ + "▁Sanford", + -13.214247703552246 + ], + [ + "▁192", + -13.21434211730957 + ], + [ + "7-", + -13.214349746704102 + ], + [ + "▁Accu", + -13.214365005493164 + ], + [ + "▁Griffith", + -13.21437168121338 + ], + [ + "▁Psychological", + -13.214629173278809 + ], + [ + "▁Barker", + -13.21484088897705 + ], + [ + "▁RAID", + -13.214888572692873 + ], + [ + "▁sacrificed", + -13.214978218078612 + ], + [ + "▁Grim", + -13.2150239944458 + ], + [ + "▁RG", + -13.215043067932127 + ], + [ + "werk", + -13.215051651000977 + ], + [ + "▁hillside", + -13.215106010437012 + ], + [ + "▁evangelical", + -13.21513557434082 + ], + [ + "▁triangular", + -13.215136528015137 + ], + [ + "▁Acrobat", + -13.215142250061035 + ], + [ + "▁Buick", + -13.215150833129885 + ], + [ + "▁Efficient", + -13.21515655517578 + ], + [ + "▁Baxter", + -13.21517276763916 + ], + [ + "lack", + -13.21518325805664 + ], + [ + "ende", + -13.215200424194336 + ], + [ + "17)", + -13.215202331542969 + ], + [ + "▁Kom", + -13.215203285217283 + ], + [ + "▁ensembles", + -13.215261459350586 + ], + [ + "▁147", + -13.215287208557127 + ], + [ + "▁Plugin", + -13.21529483795166 + ], + [ + "medium", + -13.215352058410645 + ], + [ + "eger", + -13.215449333190918 + ], + [ + "▁symbolism", + -13.21551513671875 + ], + [ + "▁Salle", + -13.21567153930664 + ], + [ + "pull", + -13.2156982421875 + ], + [ + "cole", + -13.215734481811523 + ], + [ + "Fo", + -13.215770721435549 + ], + [ + "▁LONG", + -13.21580982208252 + ], + [ + "▁Lantern", + -13.215856552124023 + ], + [ + "0.1", + -13.21595859527588 + ], + [ + "▁Aluminium", + -13.215991973876951 + ], + [ + "▁Deli", + -13.216068267822266 + ], + [ + "▁Greenwood", + -13.216073036193848 + ], + [ + "▁Pools", + -13.216158866882324 + ], + [ + "Frame", + -13.216200828552246 + ], + [ + "▁undercover", + -13.216259002685549 + ], + [ + "▁revelations", + -13.216365814208984 + ], + [ + "▁duplex", + -13.21638298034668 + ], + [ + "▁swarm", + -13.21640396118164 + ], + [ + "▁Turf", + -13.21641445159912 + ], + [ + "▁Mechanics", + -13.216425895690918 + ], + [ + "▁PJ", + -13.216438293457031 + ], + [ + "▁varnish", + -13.216477394104004 + ], + [ + "▁conveys", + -13.216561317443848 + ], + [ + "▁Mosaic", + -13.216581344604492 + ], + [ + "▁EDT", + -13.21660041809082 + ], + [ + "young", + -13.21664333343506 + ], + [ + "▁assigning", + -13.216668128967283 + ], + [ + "▁ventured", + -13.216681480407717 + ], + [ + "Sleep", + -13.2167329788208 + ], + [ + "Mer", + -13.216837882995604 + ], + [ + "▁leaflet", + -13.216859817504885 + ], + [ + "Geo", + -13.21688747406006 + ], + [ + "mental", + -13.21696662902832 + ], + [ + "Th", + -13.21699047088623 + ], + [ + "▁cranberry", + -13.21701431274414 + ], + [ + "▁ensuing", + -13.21701431274414 + ], + [ + "▁mahogany", + -13.21701431274414 + ], + [ + "▁pinnacle", + -13.21701431274414 + ], + [ + "▁strenuous", + -13.21701431274414 + ], + [ + "▁luminous", + -13.21702480316162 + ], + [ + "▁bistro", + -13.217034339904783 + ], + [ + "▁omitted", + -13.21704387664795 + ], + [ + "▁Moines", + -13.217044830322266 + ], + [ + "▁friday", + -13.217069625854492 + ], + [ + "▁hath", + -13.217103004455566 + ], + [ + "▁Conversation", + -13.217185020446776 + ], + [ + "▁BIM", + -13.21721363067627 + ], + [ + "plated", + -13.217241287231444 + ], + [ + "Taylor", + -13.217242240905762 + ], + [ + "▁fer", + -13.217299461364746 + ], + [ + "▁courthouse", + -13.217318534851074 + ], + [ + "▁lodged", + -13.217337608337402 + ], + [ + "▁Clemson", + -13.217514038085938 + ], + [ + "▁Plantation", + -13.217558860778809 + ], + [ + "▁Mana", + -13.217602729797363 + ], + [ + "▁Bulgarian", + -13.217714309692385 + ], + [ + "ardi", + -13.217727661132812 + ], + [ + "▁hovering", + -13.217778205871582 + ], + [ + "daily", + -13.21779441833496 + ], + [ + "unc", + -13.217835426330566 + ], + [ + "Half", + -13.217865943908691 + ], + [ + "▁kW", + -13.21786880493164 + ], + [ + "▁Flora", + -13.217920303344728 + ], + [ + "▁rigs", + -13.218008041381836 + ], + [ + "Flex", + -13.218032836914062 + ], + [ + "▁eine", + -13.218059539794922 + ], + [ + "▁Avalon", + -13.218066215515137 + ], + [ + "▁Bullet", + -13.21817684173584 + ], + [ + "mbu", + -13.21827220916748 + ], + [ + "Created", + -13.218273162841797 + ], + [ + "▁Usage", + -13.218289375305176 + ], + [ + "▁sans", + -13.218364715576172 + ], + [ + "▁detectives", + -13.21838665008545 + ], + [ + "▁lounges", + -13.218542098999023 + ], + [ + "28.", + -13.218546867370604 + ], + [ + "▁1984,", + -13.218587875366213 + ], + [ + "▁(2011)", + -13.218635559082031 + ], + [ + "▁ticking", + -13.218734741210938 + ], + [ + "▁BF", + -13.218825340270996 + ], + [ + "▁Spokane", + -13.218897819519045 + ], + [ + "▁Conveyor", + -13.218899726867676 + ], + [ + "▁Reporter", + -13.218976974487305 + ], + [ + "former", + -13.218992233276367 + ], + [ + "▁Southeastern", + -13.218995094299316 + ], + [ + "▁backpacking", + -13.219082832336426 + ], + [ + "▁shields", + -13.219101905822754 + ], + [ + "▁finder", + -13.21913242340088 + ], + [ + "▁noticeably", + -13.219173431396484 + ], + [ + "▁10-12", + -13.219185829162598 + ], + [ + "▁flo", + -13.219286918640137 + ], + [ + "pos", + -13.219326972961426 + ], + [ + "▁sightings", + -13.219345092773438 + ], + [ + "▁Penguins", + -13.219348907470703 + ], + [ + "jer", + -13.219390869140623 + ], + [ + "▁perch", + -13.219438552856444 + ], + [ + "▁Landmark", + -13.219481468200684 + ], + [ + "▁biases", + -13.219496726989746 + ], + [ + "▁Tesco", + -13.219554901123049 + ], + [ + "▁infinitely", + -13.219626426696776 + ], + [ + "Marie", + -13.219629287719728 + ], + [ + "2007", + -13.219639778137209 + ], + [ + "▁swath", + -13.219670295715332 + ], + [ + "▁Diagnostic", + -13.219731330871582 + ], + [ + "bright", + -13.219783782958984 + ], + [ + "▁Fake", + -13.219837188720703 + ], + [ + "▁149", + -13.219858169555664 + ], + [ + "▁YEAR", + -13.219908714294434 + ], + [ + "▁Quay", + -13.220020294189451 + ], + [ + "▁manure", + -13.220067024230955 + ], + [ + "cot", + -13.22016716003418 + ], + [ + "Mod", + -13.220256805419922 + ], + [ + "▁Weiss", + -13.220260620117188 + ], + [ + "▁Trauma", + -13.22036838531494 + ], + [ + "tooth", + -13.220385551452637 + ], + [ + "▁temperament", + -13.220486640930176 + ], + [ + "▁rotated", + -13.220489501953123 + ], + [ + "▁decidedly", + -13.220536231994627 + ], + [ + "▁Goat", + -13.220541954040527 + ], + [ + "▁Reds", + -13.220662117004396 + ], + [ + "▁adhering", + -13.220722198486328 + ], + [ + "▁nifty", + -13.220785140991213 + ], + [ + "STA", + -13.220786094665527 + ], + [ + "▁solitude", + -13.220786094665527 + ], + [ + "▁grills", + -13.22081470489502 + ], + [ + "▁Janeiro", + -13.22082805633545 + ], + [ + "▁Delete", + -13.220833778381348 + ], + [ + "▁Peterborough", + -13.22083568572998 + ], + [ + "▁chaise", + -13.220854759216309 + ], + [ + "rrell", + -13.22099781036377 + ], + [ + "▁LEED", + -13.22115993499756 + ], + [ + "▁homeschooling", + -13.221299171447754 + ], + [ + "ono", + -13.221405029296877 + ], + [ + "▁chant", + -13.22141456604004 + ], + [ + "Weight", + -13.221539497375488 + ], + [ + "▁takeout", + -13.221565246582031 + ], + [ + "▁Ships", + -13.221596717834473 + ], + [ + "ISS", + -13.221611976623535 + ], + [ + "forum", + -13.221647262573242 + ], + [ + "Train", + -13.221692085266112 + ], + [ + "kun", + -13.221696853637695 + ], + [ + "▁CIS", + -13.221722602844238 + ], + [ + "▁LSU", + -13.221742630004885 + ], + [ + "▁sausages", + -13.221820831298828 + ], + [ + "▁sandbox", + -13.221928596496582 + ], + [ + "▁AOL", + -13.222038269042969 + ], + [ + "kia", + -13.222163200378418 + ], + [ + "▁leaps", + -13.222277641296388 + ], + [ + "otropic", + -13.222281455993652 + ], + [ + "▁Consultation", + -13.22243309020996 + ], + [ + "▁pings", + -13.222441673278809 + ], + [ + "▁Undergraduate", + -13.222474098205566 + ], + [ + "MIT", + -13.222488403320312 + ], + [ + "▁formulations", + -13.222549438476562 + ], + [ + "▁goggles", + -13.22255802154541 + ], + [ + "▁directs", + -13.22256088256836 + ], + [ + "ITA", + -13.222620010375977 + ], + [ + "▁evocative", + -13.222674369812012 + ], + [ + "▁plausible", + -13.222674369812012 + ], + [ + "▁visibly", + -13.222676277160645 + ], + [ + "▁tapestry", + -13.222684860229492 + ], + [ + "▁SATA", + -13.222755432128906 + ], + [ + "▁Slowly", + -13.222813606262209 + ], + [ + "▁adjunct", + -13.222826957702637 + ], + [ + "▁barber", + -13.222830772399902 + ], + [ + "▁vanished", + -13.222942352294922 + ], + [ + "▁sculpted", + -13.222999572753906 + ], + [ + "Thinking", + -13.22301197052002 + ], + [ + "▁duplication", + -13.22307014465332 + ], + [ + "▁Invisalign", + -13.223114013671877 + ], + [ + "▁Spending", + -13.223164558410645 + ], + [ + "wald", + -13.223207473754885 + ], + [ + "▁$10.", + -13.223247528076172 + ], + [ + "▁squid", + -13.223294258117676 + ], + [ + "▁Cham", + -13.22337245941162 + ], + [ + "▁Leone", + -13.223397254943848 + ], + [ + "▁ancestry", + -13.223443984985352 + ], + [ + "▁troll", + -13.223533630371094 + ], + [ + "▁impart", + -13.223551750183104 + ], + [ + "▁cutters", + -13.223628044128418 + ], + [ + "NK", + -13.2236328125 + ], + [ + "▁receptionist", + -13.223737716674805 + ], + [ + "NIC", + -13.22396469116211 + ], + [ + "▁previews", + -13.224217414855955 + ], + [ + "shock", + -13.224312782287598 + ], + [ + "▁Virus", + -13.224339485168455 + ], + [ + "▁Overseas", + -13.224390029907228 + ], + [ + "▁evokes", + -13.224411010742188 + ], + [ + "▁Caps", + -13.224446296691896 + ], + [ + "▁happenings", + -13.224448204040527 + ], + [ + "▁gorge", + -13.224531173706056 + ], + [ + "▁embellishments", + -13.224553108215332 + ], + [ + "▁aggregation", + -13.224568367004396 + ], + [ + "▁approving", + -13.224568367004396 + ], + [ + "▁custard", + -13.224568367004396 + ], + [ + "▁mayonnaise", + -13.224568367004396 + ], + [ + "▁balsamic", + -13.224569320678713 + ], + [ + "▁HL", + -13.224571228027344 + ], + [ + "▁polyethylene", + -13.22457218170166 + ], + [ + "▁Monetary", + -13.224579811096191 + ], + [ + "▁southeastern", + -13.224604606628418 + ], + [ + "forming", + -13.22469997406006 + ], + [ + "aji", + -13.224750518798828 + ], + [ + "application", + -13.22483730316162 + ], + [ + "▁segmentation", + -13.224932670593262 + ], + [ + "negative", + -13.224942207336426 + ], + [ + "mom", + -13.224967956542969 + ], + [ + "Visitors", + -13.224973678588867 + ], + [ + "▁fragmented", + -13.224977493286133 + ], + [ + "Everybody", + -13.2250337600708 + ], + [ + "tung", + -13.225037574768066 + ], + [ + "▁Essence", + -13.225042343139648 + ], + [ + "▁Joanne", + -13.225126266479492 + ], + [ + "▁slippers", + -13.225183486938477 + ], + [ + "binding", + -13.22518539428711 + ], + [ + "▁Closing", + -13.225378036499023 + ], + [ + "▁Deliver", + -13.225398063659668 + ], + [ + "kawa", + -13.225448608398438 + ], + [ + "aza", + -13.22547435760498 + ], + [ + "▁preset", + -13.225496292114258 + ], + [ + "▁Pho", + -13.22551155090332 + ], + [ + "kid", + -13.225526809692385 + ], + [ + "▁XC", + -13.2255277633667 + ], + [ + "▁4-1", + -13.225603103637695 + ], + [ + "▁440", + -13.225634574890137 + ], + [ + "▁beware", + -13.225672721862791 + ], + [ + "Sim", + -13.225759506225586 + ], + [ + "▁Scotch", + -13.225763320922852 + ], + [ + "▁reinforces", + -13.225781440734863 + ], + [ + "▁Champ", + -13.225797653198242 + ], + [ + "▁deteriorate", + -13.225865364074709 + ], + [ + "uto", + -13.22590446472168 + ], + [ + "▁synchronization", + -13.22596263885498 + ], + [ + "String", + -13.226045608520508 + ], + [ + "▁Git", + -13.22607421875 + ], + [ + "▁intending", + -13.226207733154297 + ], + [ + "▁Theology", + -13.22627067565918 + ], + [ + "▁Protestant", + -13.226351737976074 + ], + [ + "dong", + -13.226444244384766 + ], + [ + "▁astronomy", + -13.226466178894045 + ], + [ + "▁Biomedical", + -13.22649383544922 + ], + [ + "▁Protective", + -13.226506233215332 + ], + [ + "inder", + -13.226507186889648 + ], + [ + "OMG", + -13.226573944091797 + ], + [ + "▁Shoulder", + -13.226580619812012 + ], + [ + "▁Chop", + -13.226584434509276 + ], + [ + "▁Blackjack", + -13.226609230041504 + ], + [ + "▁intellect", + -13.226611137390137 + ], + [ + "▁Hoover", + -13.22666358947754 + ], + [ + "▁Glo", + -13.22667121887207 + ], + [ + "▁feline", + -13.226690292358398 + ], + [ + "▁Bul", + -13.226702690124512 + ], + [ + "kw", + -13.226862907409668 + ], + [ + "rec", + -13.226935386657717 + ], + [ + "▁Gru", + -13.226977348327637 + ], + [ + "▁Henri", + -13.22702693939209 + ], + [ + "▁sequential", + -13.227044105529783 + ], + [ + "▁adjusts", + -13.22704792022705 + ], + [ + "▁soldering", + -13.227234840393066 + ], + [ + "▁Memories", + -13.227242469787598 + ], + [ + "▁Directorate", + -13.227253913879396 + ], + [ + "▁aggregated", + -13.227336883544922 + ], + [ + "▁waffles", + -13.227445602416992 + ], + [ + "git", + -13.227619171142578 + ], + [ + "installed", + -13.227645874023438 + ], + [ + "▁Hun", + -13.227749824523926 + ], + [ + "▁irritate", + -13.22776699066162 + ], + [ + "▁outages", + -13.227771759033203 + ], + [ + "Er", + -13.227819442749023 + ], + [ + "▁RAF", + -13.227829933166504 + ], + [ + "uchi", + -13.227965354919434 + ], + [ + "▁awarding", + -13.228019714355469 + ], + [ + "▁emoji", + -13.228253364562988 + ], + [ + "▁arrogant", + -13.228367805480955 + ], + [ + "▁Dwight", + -13.228368759155272 + ], + [ + "▁origami", + -13.228368759155272 + ], + [ + "▁Taipei", + -13.22836971282959 + ], + [ + "▁folklore", + -13.228376388549805 + ], + [ + "Ash", + -13.228379249572754 + ], + [ + "▁uncomplicated", + -13.228384017944336 + ], + [ + "▁robe", + -13.22860336303711 + ], + [ + "▁repo", + -13.228684425354004 + ], + [ + "nom", + -13.228785514831545 + ], + [ + "▁bulls", + -13.228800773620604 + ], + [ + "ards", + -13.228809356689451 + ], + [ + "Knowledge", + -13.228849411010742 + ], + [ + "Machine", + -13.228862762451172 + ], + [ + "▁greetings", + -13.22888469696045 + ], + [ + "regular", + -13.228899002075195 + ], + [ + "▁Agra", + -13.228926658630373 + ], + [ + "▁punching", + -13.228960037231444 + ], + [ + "▁Flush", + -13.228965759277344 + ], + [ + "▁Laurent", + -13.22903823852539 + ], + [ + "▁mare", + -13.2290620803833 + ], + [ + "efficiency", + -13.229108810424805 + ], + [ + "▁everlasting", + -13.229108810424805 + ], + [ + "Bike", + -13.229191780090332 + ], + [ + "▁affirm", + -13.229212760925291 + ], + [ + "▁unveil", + -13.22922706604004 + ], + [ + "▁Streets", + -13.22923183441162 + ], + [ + "▁Investigations", + -13.229287147521973 + ], + [ + "▁underwriting", + -13.229347229003906 + ], + [ + "▁CNA", + -13.22934913635254 + ], + [ + "8-", + -13.229409217834473 + ], + [ + "EG", + -13.22956371307373 + ], + [ + "▁Optional", + -13.229657173156738 + ], + [ + "▁toolbox", + -13.22977066040039 + ], + [ + "3.2", + -13.229782104492188 + ], + [ + "▁Scots", + -13.229832649230955 + ], + [ + "▁graves", + -13.230100631713867 + ], + [ + "stro", + -13.23018741607666 + ], + [ + "▁Zuckerberg", + -13.230195045471191 + ], + [ + "▁Taka", + -13.23024559020996 + ], + [ + "interoperability", + -13.23027229309082 + ], + [ + "▁Rotterdam", + -13.23027229309082 + ], + [ + "▁periodontal", + -13.23027229309082 + ], + [ + "▁stature", + -13.230307579040527 + ], + [ + "Gift", + -13.230317115783691 + ], + [ + "▁Madness", + -13.230558395385742 + ], + [ + "▁pressured", + -13.23060131072998 + ], + [ + "profits", + -13.230642318725586 + ], + [ + "Pink", + -13.23073387145996 + ], + [ + "purchase", + -13.230748176574709 + ], + [ + "▁certify", + -13.230769157409668 + ], + [ + "default", + -13.230819702148438 + ], + [ + "▁Detox", + -13.230951309204102 + ], + [ + "▁Booster", + -13.230957984924316 + ], + [ + "▁Cl", + -13.23095989227295 + ], + [ + "correct", + -13.231019020080566 + ], + [ + "wl", + -13.231250762939451 + ], + [ + "▁waive", + -13.2313232421875 + ], + [ + "▁champ", + -13.231409072875977 + ], + [ + "move", + -13.231525421142578 + ], + [ + "▁Proposal", + -13.23167324066162 + ], + [ + "▁Temporary", + -13.231791496276855 + ], + [ + "▁NAB", + -13.23184299468994 + ], + [ + "▁Dixie", + -13.231844902038574 + ], + [ + "▁winger", + -13.23189926147461 + ], + [ + "gies", + -13.231922149658203 + ], + [ + "▁chargers", + -13.231926918029783 + ], + [ + "▁strategist", + -13.231953620910645 + ], + [ + "Fully", + -13.232050895690918 + ], + [ + "kos", + -13.232050895690918 + ], + [ + "sort", + -13.232134819030762 + ], + [ + "▁VPS", + -13.232150077819824 + ], + [ + "▁Craigslist", + -13.232181549072266 + ], + [ + "▁weblog", + -13.232280731201172 + ], + [ + "▁MAD", + -13.232300758361816 + ], + [ + "▁retrofit", + -13.232337951660156 + ], + [ + "▁425", + -13.232451438903809 + ], + [ + "FOR", + -13.232460975646973 + ], + [ + "▁GIF", + -13.23252773284912 + ], + [ + "▁Greenland", + -13.23256778717041 + ], + [ + "▁nonfiction", + -13.23256778717041 + ], + [ + "logical", + -13.232685089111328 + ], + [ + "Twitter", + -13.232759475708008 + ], + [ + "ACE", + -13.232816696166992 + ], + [ + "▁Eaton", + -13.232829093933104 + ], + [ + "▁hen", + -13.232832908630373 + ], + [ + "▁Elle", + -13.232837677001951 + ], + [ + "▁Ou", + -13.232919692993164 + ], + [ + "Saharan", + -13.232942581176758 + ], + [ + "▁Haley", + -13.232953071594238 + ], + [ + "Individual", + -13.23302173614502 + ], + [ + "▁Nicolas", + -13.23305606842041 + ], + [ + "▁flange", + -13.233092308044434 + ], + [ + "▁TRI", + -13.2330961227417 + ], + [ + "▁workbook", + -13.233149528503418 + ], + [ + "▁dropdown", + -13.233156204223633 + ], + [ + "▁devotees", + -13.23320484161377 + ], + [ + "▁14%", + -13.233222007751465 + ], + [ + "▁Wor", + -13.233226776123049 + ], + [ + "Offering", + -13.233295440673828 + ], + [ + "▁breakage", + -13.23332691192627 + ], + [ + "▁Stability", + -13.233354568481444 + ], + [ + "Exception", + -13.233502388000488 + ], + [ + "Treat", + -13.23351001739502 + ], + [ + "tight", + -13.233522415161133 + ], + [ + "▁tuberculosis", + -13.2335844039917 + ], + [ + "▁wigs", + -13.233634948730469 + ], + [ + "dn", + -13.233636856079102 + ], + [ + "▁NIH", + -13.233734130859377 + ], + [ + "inus", + -13.233782768249512 + ], + [ + "▁cucumbers", + -13.233858108520508 + ], + [ + "lect", + -13.233892440795898 + ], + [ + "▁renewing", + -13.233929634094238 + ], + [ + "▁Cancel", + -13.233942031860352 + ], + [ + "▁Citation", + -13.23396110534668 + ], + [ + "▁astronauts", + -13.234028816223145 + ], + [ + "▁depiction", + -13.234052658081056 + ], + [ + "▁applause", + -13.234092712402344 + ], + [ + "▁pergola", + -13.23409366607666 + ], + [ + "▁Flotation", + -13.23409652709961 + ], + [ + "▁marginalized", + -13.234135627746582 + ], + [ + "▁Lobby", + -13.234140396118164 + ], + [ + "▁Frequently", + -13.234153747558594 + ], + [ + "kers", + -13.23415470123291 + ], + [ + "▁repent", + -13.234190940856934 + ], + [ + "▁Crab", + -13.234288215637209 + ], + [ + "mara", + -13.234318733215332 + ], + [ + "▁Baghdad", + -13.23432731628418 + ], + [ + "flash", + -13.234333992004396 + ], + [ + "▁snowfall", + -13.234341621398926 + ], + [ + "▁criticised", + -13.234352111816406 + ], + [ + "▁Govt", + -13.234451293945312 + ], + [ + "▁WASHINGTON", + -13.234543800354004 + ], + [ + "ahan", + -13.234570503234863 + ], + [ + "▁Bally", + -13.234732627868652 + ], + [ + "▁indexing", + -13.234753608703612 + ], + [ + "▁swamp", + -13.234858512878418 + ], + [ + "▁Dior", + -13.23497200012207 + ], + [ + "carb", + -13.234983444213867 + ], + [ + "▁Jasmine", + -13.235090255737305 + ], + [ + "▁prefix", + -13.235093116760254 + ], + [ + "dling", + -13.235101699829102 + ], + [ + "aware", + -13.235116004943848 + ], + [ + "▁inaccessible", + -13.235188484191896 + ], + [ + "▁hunts", + -13.235201835632324 + ], + [ + "▁finalize", + -13.235215187072754 + ], + [ + "ttle", + -13.235217094421388 + ], + [ + "▁sank", + -13.235377311706545 + ], + [ + "▁listens", + -13.235466003417969 + ], + [ + "▁miraculous", + -13.23576545715332 + ], + [ + "▁Sus", + -13.235772132873535 + ], + [ + "▁sheen", + -13.235804557800291 + ], + [ + "▁sinful", + -13.235836029052734 + ], + [ + "▁machined", + -13.235857009887695 + ], + [ + "▁attaining", + -13.235920906066896 + ], + [ + "▁soybean", + -13.235955238342283 + ], + [ + "▁Renovation", + -13.2359619140625 + ], + [ + "▁expanse", + -13.236008644104004 + ], + [ + "▁prototyping", + -13.236008644104004 + ], + [ + "▁culminating", + -13.23600959777832 + ], + [ + "▁hummus", + -13.236014366149902 + ], + [ + "▁Spiral", + -13.236112594604492 + ], + [ + "▁Marvin", + -13.236336708068848 + ], + [ + "alu", + -13.23637580871582 + ], + [ + "Party", + -13.236438751220703 + ], + [ + "Maintain", + -13.236441612243652 + ], + [ + "▁Africans", + -13.2364501953125 + ], + [ + "▁escalate", + -13.236470222473145 + ], + [ + "▁Mead", + -13.23647403717041 + ], + [ + "▁ASEAN", + -13.236557960510254 + ], + [ + "shoot", + -13.236587524414062 + ], + [ + "▁Jeremiah", + -13.236590385437012 + ], + [ + "desk", + -13.236796379089355 + ], + [ + "▁sm", + -13.236946105957031 + ], + [ + "▁Mori", + -13.23695468902588 + ], + [ + "▁Siding", + -13.236957550048828 + ], + [ + "▁Cloth", + -13.237051963806152 + ], + [ + "▁fleeting", + -13.237183570861816 + ], + [ + "▁Mou", + -13.23723316192627 + ], + [ + "▁grassy", + -13.237258911132812 + ], + [ + "▁Roads", + -13.237420082092283 + ], + [ + "insky", + -13.237441062927246 + ], + [ + "FDA", + -13.237448692321776 + ], + [ + "▁CVS", + -13.237591743469238 + ], + [ + "▁resize", + -13.237611770629885 + ], + [ + "EDIT", + -13.237680435180664 + ], + [ + "▁Sew", + -13.237698554992676 + ], + [ + "▁ruthless", + -13.237710952758787 + ], + [ + "#1", + -13.237741470336914 + ], + [ + "▁snippets", + -13.237874984741213 + ], + [ + "▁viscosity", + -13.237927436828612 + ], + [ + "▁shabby", + -13.237929344177246 + ], + [ + "▁continuum", + -13.237934112548828 + ], + [ + "▁familiarize", + -13.23793888092041 + ], + [ + "▁Tudor", + -13.237944602966309 + ], + [ + "▁PSD", + -13.237961769104004 + ], + [ + "▁Olympus", + -13.23798370361328 + ], + [ + "▁masking", + -13.237991333007812 + ], + [ + "▁Bhutan", + -13.23805046081543 + ], + [ + "aze", + -13.238051414489746 + ], + [ + "▁Seafood", + -13.238085746765137 + ], + [ + "font", + -13.238107681274414 + ], + [ + "ffe", + -13.23826026916504 + ], + [ + "▁TED", + -13.23858642578125 + ], + [ + "▁siege", + -13.238590240478516 + ], + [ + "▁equipments", + -13.238653182983398 + ], + [ + "▁motorhome", + -13.238656044006348 + ], + [ + "material", + -13.238760948181152 + ], + [ + "▁Cutter", + -13.238764762878418 + ], + [ + "▁geographically", + -13.238791465759276 + ], + [ + "▁Kale", + -13.238802909851074 + ], + [ + "▁:-)", + -13.23883056640625 + ], + [ + "▁Billing", + -13.238906860351562 + ], + [ + "2001", + -13.238936424255373 + ], + [ + "▁lofty", + -13.238968849182127 + ], + [ + "YC", + -13.238980293273926 + ], + [ + "▁Kylie", + -13.23898696899414 + ], + [ + "▁5\"", + -13.238994598388672 + ], + [ + "▁verbs", + -13.23900318145752 + ], + [ + "▁killers", + -13.23902416229248 + ], + [ + "▁theoretically", + -13.23902702331543 + ], + [ + "▁Nur", + -13.239059448242188 + ], + [ + "▁pickles", + -13.23910140991211 + ], + [ + "▁pitchers", + -13.239120483398438 + ], + [ + "▁cot", + -13.23914623260498 + ], + [ + "riel", + -13.239248275756836 + ], + [ + "jay", + -13.23935604095459 + ], + [ + "▁Til", + -13.23940372467041 + ], + [ + "▁Bourbon", + -13.239487648010254 + ], + [ + "ionic", + -13.239510536193848 + ], + [ + "▁Middleton", + -13.2396879196167 + ], + [ + "▁constipation", + -13.239850997924805 + ], + [ + "▁fiduciary", + -13.239850997924805 + ], + [ + "▁Swivel", + -13.239852905273438 + ], + [ + "comb", + -13.239998817443848 + ], + [ + "▁Dickens", + -13.24001407623291 + ], + [ + "▁starving", + -13.24002742767334 + ], + [ + "20,000", + -13.2400484085083 + ], + [ + "linear", + -13.2400484085083 + ], + [ + "ordination", + -13.240079879760742 + ], + [ + "▁Ola", + -13.240150451660156 + ], + [ + "ifi", + -13.240180015563965 + ], + [ + "▁Minh", + -13.24018669128418 + ], + [ + "▁Diaz", + -13.240421295166016 + ], + [ + "▁storefront", + -13.240429878234863 + ], + [ + "▁ANC", + -13.240463256835938 + ], + [ + "▁ACS", + -13.240559577941896 + ], + [ + "Channel", + -13.240612983703612 + ], + [ + "Vitamin", + -13.240625381469728 + ], + [ + "▁ballistic", + -13.24062728881836 + ], + [ + "▁characterised", + -13.24063777923584 + ], + [ + "Williams", + -13.24071979522705 + ], + [ + "▁adapters", + -13.240824699401855 + ], + [ + "Nobody", + -13.240890502929688 + ], + [ + "▁Named", + -13.240899085998535 + ], + [ + "▁LD", + -13.240952491760254 + ], + [ + "▁ALS", + -13.240954399108888 + ], + [ + "▁vistas", + -13.241003036499023 + ], + [ + ":26", + -13.241132736206056 + ], + [ + "▁factions", + -13.241166114807127 + ], + [ + "▁Baton", + -13.24120044708252 + ], + [ + "▁Ratings", + -13.24120044708252 + ], + [ + "ATA", + -13.241230010986328 + ], + [ + "▁HOA", + -13.241280555725098 + ], + [ + "Orange", + -13.241332054138184 + ], + [ + "Returns", + -13.24136447906494 + ], + [ + "▁dab", + -13.2413969039917 + ], + [ + "▁33%", + -13.241506576538086 + ], + [ + "▁turnovers", + -13.241537094116213 + ], + [ + "▁Helm", + -13.241581916809082 + ], + [ + "▁Slave", + -13.24162769317627 + ], + [ + "▁Citi", + -13.2416410446167 + ], + [ + "▁Signed", + -13.241655349731444 + ], + [ + "Wear", + -13.24166488647461 + ], + [ + "▁Maid", + -13.241705894470217 + ], + [ + "▁Sheridan", + -13.241780281066896 + ], + [ + "▁biopsy", + -13.241798400878906 + ], + [ + "▁speciality", + -13.241843223571776 + ], + [ + "trop", + -13.241976737976074 + ], + [ + "▁Linear", + -13.242008209228516 + ], + [ + "▁404", + -13.242077827453612 + ], + [ + "vu", + -13.242084503173828 + ], + [ + "rv", + -13.242101669311523 + ], + [ + "▁mowing", + -13.24211311340332 + ], + [ + "▁Opinion", + -13.242145538330078 + ], + [ + "▁Deaf", + -13.242198944091797 + ], + [ + "yana", + -13.24222183227539 + ], + [ + "jam", + -13.242308616638184 + ], + [ + "▁skeletal", + -13.242476463317873 + ], + [ + "▁stoves", + -13.242561340332031 + ], + [ + "Minimum", + -13.242594718933104 + ], + [ + "Casino", + -13.242599487304688 + ], + [ + "▁Threat", + -13.24262809753418 + ], + [ + "answer", + -13.242630958557127 + ], + [ + "lung", + -13.24271297454834 + ], + [ + "▁supervisory", + -13.24272346496582 + ], + [ + "▁dal", + -13.242790222167969 + ], + [ + "▁selects", + -13.242827415466309 + ], + [ + "▁Tobacco", + -13.24289894104004 + ], + [ + "▁fractured", + -13.242938041687012 + ], + [ + "▁aligns", + -13.242945671081545 + ], + [ + "▁prioritized", + -13.243048667907717 + ], + [ + "▁walker", + -13.243230819702148 + ], + [ + "▁Goes", + -13.243258476257324 + ], + [ + "▁$200,000", + -13.243263244628906 + ], + [ + "▁moods", + -13.243321418762209 + ], + [ + "▁Sporting", + -13.243370056152344 + ], + [ + "▁planter", + -13.243378639221191 + ], + [ + "Sense", + -13.243401527404783 + ], + [ + "▁psyche", + -13.24342441558838 + ], + [ + "ERA", + -13.243426322937012 + ], + [ + "▁subcontractors", + -13.24343490600586 + ], + [ + "▁Wildcats", + -13.243572235107422 + ], + [ + "▁payoff", + -13.2435884475708 + ], + [ + "▁Socket", + -13.24366569519043 + ], + [ + "▁raspberries", + -13.243709564208984 + ], + [ + "▁waistband", + -13.243748664855955 + ], + [ + "▁awning", + -13.243757247924805 + ], + [ + "▁Trustee", + -13.243793487548828 + ], + [ + "▁Chiang", + -13.243818283081056 + ], + [ + "4.2", + -13.24382781982422 + ], + [ + "▁Mighty", + -13.24384307861328 + ], + [ + "▁Herbal", + -13.243863105773926 + ], + [ + "WAY", + -13.24392032623291 + ], + [ + "▁insulating", + -13.24393081665039 + ], + [ + "▁asteroid", + -13.24418830871582 + ], + [ + "▁digs", + -13.24434757232666 + ], + [ + "▁sharpening", + -13.244409561157228 + ], + [ + "▁Esq", + -13.244418144226074 + ], + [ + "▁Drinking", + -13.24445343017578 + ], + [ + "▁foe", + -13.244551658630373 + ], + [ + "357", + -13.244565963745115 + ], + [ + "vr", + -13.244582176208496 + ], + [ + "▁misconceptions", + -13.244751930236816 + ], + [ + "▁outfitted", + -13.244779586791992 + ], + [ + "▁ordeal", + -13.244847297668455 + ], + [ + "-65", + -13.244872093200684 + ], + [ + "▁Actress", + -13.245102882385254 + ], + [ + "▁Clair", + -13.245107650756836 + ], + [ + "▁Cooler", + -13.245125770568848 + ], + [ + "▁divides", + -13.2451753616333 + ], + [ + "▁guts", + -13.245182037353516 + ], + [ + "aco", + -13.245219230651855 + ], + [ + "rna", + -13.245610237121582 + ], + [ + "▁Maldives", + -13.24564266204834 + ], + [ + "▁segregation", + -13.24564266204834 + ], + [ + "▁Fiesta", + -13.245647430419922 + ], + [ + "▁embodied", + -13.245650291442873 + ], + [ + "▁snippet", + -13.24570083618164 + ], + [ + "hale", + -13.24570369720459 + ], + [ + "▁spyware", + -13.245725631713867 + ], + [ + "▁Invitational", + -13.245782852172852 + ], + [ + "▁Gerry", + -13.245842933654783 + ], + [ + "▁intimidated", + -13.24594497680664 + ], + [ + "▁Diva", + -13.245965003967283 + ], + [ + "cake", + -13.246051788330078 + ], + [ + "cloth", + -13.24607753753662 + ], + [ + "▁Acc", + -13.246257781982422 + ], + [ + "▁Matte", + -13.246317863464355 + ], + [ + "▁Grammar", + -13.246356010437012 + ], + [ + "▁Dodgers", + -13.246415138244627 + ], + [ + "▁Charger", + -13.24643898010254 + ], + [ + "▁Pico", + -13.246538162231444 + ], + [ + "▁aiding", + -13.24654483795166 + ], + [ + "compliant", + -13.246562957763672 + ], + [ + "Giving", + -13.24660587310791 + ], + [ + "▁Ahead", + -13.246699333190918 + ], + [ + "▁AFTER", + -13.24672031402588 + ], + [ + "▁275", + -13.246742248535156 + ], + [ + "Za", + -13.246753692626951 + ], + [ + "waste", + -13.246787071228027 + ], + [ + "▁Indo", + -13.246831893920898 + ], + [ + "▁steaming", + -13.246845245361328 + ], + [ + "lou", + -13.24692726135254 + ], + [ + "APP", + -13.246956825256348 + ], + [ + "Tonight", + -13.247248649597168 + ], + [ + "gua", + -13.247305870056152 + ], + [ + "▁realms", + -13.24738311767578 + ], + [ + "▁dryers", + -13.247447967529297 + ], + [ + "▁computation", + -13.247477531433104 + ], + [ + "575", + -13.24748706817627 + ], + [ + "▁parmesan", + -13.247580528259276 + ], + [ + "▁Quincy", + -13.247588157653809 + ], + [ + "▁lymphoma", + -13.247588157653809 + ], + [ + "iano", + -13.247628211975098 + ], + [ + "▁retractable", + -13.24763011932373 + ], + [ + "▁Hash", + -13.24764919281006 + ], + [ + "205", + -13.247708320617676 + ], + [ + "▁Mats", + -13.247746467590332 + ], + [ + "▁Absolute", + -13.247777938842772 + ], + [ + "issue", + -13.247873306274414 + ], + [ + "▁localization", + -13.247915267944336 + ], + [ + "▁denies", + -13.2479248046875 + ], + [ + "▁ASUS", + -13.247964859008787 + ], + [ + "▁dryness", + -13.248150825500488 + ], + [ + "▁matured", + -13.248306274414062 + ], + [ + "▁pep", + -13.248318672180176 + ], + [ + "launch", + -13.248395919799805 + ], + [ + "TEN", + -13.24842357635498 + ], + [ + "▁Splash", + -13.24846649169922 + ], + [ + "finished", + -13.24856948852539 + ], + [ + "Unique", + -13.24858856201172 + ], + [ + "roid", + -13.24871063232422 + ], + [ + "▁Alfa", + -13.248736381530762 + ], + [ + "▁Ritz", + -13.248787879943848 + ], + [ + "▁Particularly", + -13.24880313873291 + ], + [ + "tham", + -13.248933792114258 + ], + [ + "DNA", + -13.248940467834473 + ], + [ + "lid", + -13.248977661132812 + ], + [ + "▁provisional", + -13.248993873596191 + ], + [ + "▁beets", + -13.249028205871582 + ], + [ + "▁SPECIAL", + -13.249051094055176 + ], + [ + "▁prehistoric", + -13.249126434326172 + ], + [ + "▁Shrimp", + -13.249205589294434 + ], + [ + "▁notation", + -13.249256134033203 + ], + [ + "gai", + -13.249329566955566 + ], + [ + "▁differentiated", + -13.249356269836426 + ], + [ + "▁Traveling", + -13.24937629699707 + ], + [ + "sample", + -13.24943733215332 + ], + [ + "overs", + -13.24952507019043 + ], + [ + "▁discoloration", + -13.249541282653809 + ], + [ + "▁phosphorus", + -13.249545097351074 + ], + [ + "145", + -13.249679565429688 + ], + [ + "▁undue", + -13.24969482421875 + ], + [ + "333", + -13.249699592590332 + ], + [ + "▁Stitch", + -13.24970245361328 + ], + [ + "wai", + -13.249725341796877 + ], + [ + "▁020", + -13.24978733062744 + ], + [ + "▁Malibu", + -13.249814987182615 + ], + [ + "▁seriousness", + -13.249863624572754 + ], + [ + "▁Heavenly", + -13.249999046325684 + ], + [ + "Shi", + -13.250032424926758 + ], + [ + "▁portrays", + -13.250072479248049 + ], + [ + "▁spool", + -13.250207901000977 + ], + [ + "▁ITV", + -13.250226020812988 + ], + [ + "ARA", + -13.250234603881836 + ], + [ + "▁Venue", + -13.250340461730955 + ], + [ + "▁Tribute", + -13.250361442565918 + ], + [ + "▁Pap", + -13.250391006469728 + ], + [ + "▁stale", + -13.25045680999756 + ], + [ + "Various", + -13.25057315826416 + ], + [ + "imp", + -13.25062084197998 + ], + [ + "▁une", + -13.250810623168944 + ], + [ + "TION", + -13.250818252563477 + ], + [ + "▁vases", + -13.250829696655272 + ], + [ + "▁Hicks", + -13.25087070465088 + ], + [ + "oga", + -13.250946044921877 + ], + [ + "▁Enhanced", + -13.251036643981934 + ], + [ + "onne", + -13.251089096069336 + ], + [ + "499", + -13.25111198425293 + ], + [ + "sai", + -13.251116752624512 + ], + [ + "hoe", + -13.251255989074709 + ], + [ + "edi", + -13.251273155212402 + ], + [ + "▁riff", + -13.251359939575195 + ], + [ + "▁OECD", + -13.251432418823242 + ], + [ + "▁quintessential", + -13.25145149230957 + ], + [ + "▁Conservatory", + -13.25146770477295 + ], + [ + "▁galaxies", + -13.25146770477295 + ], + [ + "▁(2003)", + -13.251511573791504 + ], + [ + "▁Elliot", + -13.251541137695312 + ], + [ + "▁sloppy", + -13.251543045043944 + ], + [ + "▁Toss", + -13.25164794921875 + ], + [ + "dict", + -13.2516508102417 + ], + [ + "▁bravery", + -13.251657485961914 + ], + [ + "▁recliner", + -13.251699447631836 + ], + [ + "ullah", + -13.251773834228516 + ], + [ + "▁depleted", + -13.251776695251465 + ], + [ + "percent", + -13.251789093017578 + ], + [ + "Player", + -13.25180435180664 + ], + [ + "▁NEWS", + -13.251829147338867 + ], + [ + "▁auditor", + -13.251838684082031 + ], + [ + "Greg", + -13.25190544128418 + ], + [ + "▁IPS", + -13.251928329467772 + ], + [ + "▁resisted", + -13.25194263458252 + ], + [ + "logist", + -13.252039909362791 + ], + [ + "▁Apex", + -13.252230644226074 + ], + [ + "▁Sau", + -13.252318382263184 + ], + [ + "▁probes", + -13.252361297607422 + ], + [ + "▁adopts", + -13.252485275268556 + ], + [ + "▁Graduation", + -13.252503395080566 + ], + [ + "▁Attempt", + -13.252533912658691 + ], + [ + "everything", + -13.25255012512207 + ], + [ + "..........", + -13.2525634765625 + ], + [ + "▁Dominic", + -13.252635955810549 + ], + [ + "nat", + -13.25268840789795 + ], + [ + "▁weld", + -13.252690315246582 + ], + [ + "▁Gorilla", + -13.252735137939451 + ], + [ + "▁enclosures", + -13.252752304077148 + ], + [ + "cla", + -13.252766609191896 + ], + [ + "bok", + -13.252798080444336 + ], + [ + "▁Milo", + -13.252870559692385 + ], + [ + "▁Dim", + -13.252891540527344 + ], + [ + "▁Challenger", + -13.252971649169922 + ], + [ + "▁Everett", + -13.25300121307373 + ], + [ + "▁1985,", + -13.253116607666016 + ], + [ + "▁incapable", + -13.2532377243042 + ], + [ + "▁Stripe", + -13.25334358215332 + ], + [ + "Mass", + -13.253398895263672 + ], + [ + "▁Fortnite", + -13.253450393676758 + ], + [ + "▁Niger", + -13.253525733947754 + ], + [ + "Quest", + -13.253532409667969 + ], + [ + "Lite", + -13.253539085388184 + ], + [ + "ques", + -13.253572463989258 + ], + [ + "▁Culinary", + -13.25367546081543 + ], + [ + "▁Brennan", + -13.253680229187012 + ], + [ + "▁Called", + -13.253691673278809 + ], + [ + "▁Plato", + -13.253703117370604 + ], + [ + "▁workloads", + -13.253721237182615 + ], + [ + "tik", + -13.253722190856934 + ], + [ + "▁incarnation", + -13.253737449645996 + ], + [ + "▁Employer", + -13.253748893737791 + ], + [ + "▁Pediatrics", + -13.253793716430664 + ], + [ + "▁Gardening", + -13.253803253173828 + ], + [ + "▁curd", + -13.253841400146484 + ], + [ + "▁apron", + -13.253844261169434 + ], + [ + "▁Flour", + -13.253862380981444 + ], + [ + "▁revoked", + -13.253921508789062 + ], + [ + "▁1922", + -13.253979682922363 + ], + [ + "▁Sources", + -13.254076957702637 + ], + [ + "▁menstrual", + -13.254228591918944 + ], + [ + "▁savor", + -13.254240036010742 + ], + [ + "▁JU", + -13.254291534423828 + ], + [ + "▁tailoring", + -13.25430965423584 + ], + [ + "▁Esther", + -13.254347801208496 + ], + [ + "productive", + -13.254414558410645 + ], + [ + "lessly", + -13.254505157470703 + ], + [ + "protein", + -13.25452709197998 + ], + [ + "▁stalled", + -13.25452995300293 + ], + [ + "Je", + -13.254541397094728 + ], + [ + "associated", + -13.254579544067385 + ], + [ + "▁Spec", + -13.25467586517334 + ], + [ + "▁freeing", + -13.25470733642578 + ], + [ + "▁Gou", + -13.254764556884766 + ], + [ + "▁pharmaceuticals", + -13.25487232208252 + ], + [ + "uld", + -13.254889488220217 + ], + [ + "116", + -13.254919052124023 + ], + [ + "▁Rowe", + -13.25503635406494 + ], + [ + "▁helpers", + -13.255242347717283 + ], + [ + "▁Lahore", + -13.255370140075684 + ], + [ + "▁burgeoning", + -13.255370140075684 + ], + [ + "▁initiating", + -13.255370140075684 + ], + [ + "▁Bombay", + -13.255383491516112 + ], + [ + "▁Infantry", + -13.255403518676758 + ], + [ + "▁plaid", + -13.255496978759766 + ], + [ + "friends", + -13.255525588989258 + ], + [ + "▁pardon", + -13.255532264709473 + ], + [ + "▁constituted", + -13.255681037902832 + ], + [ + "▁spout", + -13.255728721618652 + ], + [ + "▁Mothers", + -13.25580596923828 + ], + [ + "anto", + -13.25592803955078 + ], + [ + "8.6", + -13.255946159362791 + ], + [ + "eration", + -13.255962371826172 + ], + [ + "▁reopened", + -13.25596809387207 + ], + [ + "▁mythical", + -13.256120681762695 + ], + [ + "▁Protector", + -13.256197929382324 + ], + [ + "▁Bankruptcy", + -13.25626277923584 + ], + [ + "phe", + -13.2562837600708 + ], + [ + "BH", + -13.256287574768066 + ], + [ + "▁VE", + -13.256356239318848 + ], + [ + "▁applaud", + -13.256420135498049 + ], + [ + "▁breeders", + -13.256420135498049 + ], + [ + "▁integrations", + -13.256511688232422 + ], + [ + "Bottom", + -13.256542205810549 + ], + [ + "WAN", + -13.256568908691406 + ], + [ + "kee", + -13.256648063659668 + ], + [ + "▁ripping", + -13.25667667388916 + ], + [ + "▁candies", + -13.256707191467283 + ], + [ + "▁Processor", + -13.256921768188477 + ], + [ + "▁whiteboard", + -13.256957054138184 + ], + [ + "forest", + -13.257051467895508 + ], + [ + "▁2018:", + -13.257071495056152 + ], + [ + "▁rejecting", + -13.257086753845217 + ], + [ + "▁PLAY", + -13.25718879699707 + ], + [ + "▁dictator", + -13.257267951965332 + ], + [ + "▁Kala", + -13.257268905639648 + ], + [ + "▁EMI", + -13.25729751586914 + ], + [ + "▁inauguration", + -13.25732707977295 + ], + [ + "▁Merkel", + -13.25735092163086 + ], + [ + "▁inventories", + -13.25737190246582 + ], + [ + "▁Kaplan", + -13.257396697998049 + ], + [ + "▁oddly", + -13.257485389709473 + ], + [ + "▁sails", + -13.257506370544434 + ], + [ + "▁terrestrial", + -13.257546424865724 + ], + [ + "▁extremes", + -13.257648468017578 + ], + [ + "nath", + -13.257650375366213 + ], + [ + "gem", + -13.257702827453612 + ], + [ + "▁MSc", + -13.257834434509276 + ], + [ + "▁1-4", + -13.257878303527832 + ], + [ + "▁cardstock", + -13.257929801940918 + ], + [ + "▁Participation", + -13.258000373840332 + ], + [ + "▁enact", + -13.25802230834961 + ], + [ + "▁Hanson", + -13.258145332336426 + ], + [ + "▁SUP", + -13.258150100708008 + ], + [ + "Convert", + -13.258223533630373 + ], + [ + "▁Nex", + -13.258259773254396 + ], + [ + "wet", + -13.258272171020508 + ], + [ + "▁sich", + -13.258356094360352 + ], + [ + "▁Friedman", + -13.258481979370115 + ], + [ + "▁Knife", + -13.258514404296877 + ], + [ + "Voice", + -13.258545875549316 + ], + [ + "hopefully", + -13.258560180664062 + ], + [ + "▁ENT", + -13.258567810058594 + ], + [ + "ogram", + -13.258580207824709 + ], + [ + "oj", + -13.2586669921875 + ], + [ + "▁Lol", + -13.258748054504396 + ], + [ + "▁surcharge", + -13.258824348449709 + ], + [ + "GY", + -13.258926391601562 + ], + [ + "iba", + -13.25892734527588 + ], + [ + "▁Sem", + -13.258981704711914 + ], + [ + "Perform", + -13.25905418395996 + ], + [ + "LK", + -13.259078025817873 + ], + [ + "▁Preferred", + -13.259102821350098 + ], + [ + "uria", + -13.259115219116213 + ], + [ + "▁#8", + -13.259178161621094 + ], + [ + "▁Mam", + -13.259218215942385 + ], + [ + "▁SHOW", + -13.25925350189209 + ], + [ + "▁ingenuity", + -13.25928783416748 + ], + [ + "▁renovating", + -13.25928783416748 + ], + [ + "▁treasury", + -13.25928783416748 + ], + [ + "▁accusing", + -13.259295463562012 + ], + [ + "▁Wolves", + -13.259302139282228 + ], + [ + "▁swapped", + -13.259322166442873 + ], + [ + "▁yielding", + -13.25934600830078 + ], + [ + "▁Fischer", + -13.25939655303955 + ], + [ + "▁quasi", + -13.259397506713867 + ], + [ + "▁undertook", + -13.25942039489746 + ], + [ + "KR", + -13.259454727172852 + ], + [ + "▁understatement", + -13.259547233581545 + ], + [ + "▁chatted", + -13.25957202911377 + ], + [ + "zza", + -13.259618759155272 + ], + [ + "▁Jorge", + -13.259653091430664 + ], + [ + "hosted", + -13.259674072265623 + ], + [ + "▁reiterated", + -13.259726524353027 + ], + [ + "▁Lung", + -13.25976848602295 + ], + [ + "ape", + -13.259791374206545 + ], + [ + "▁Nic", + -13.25985336303711 + ], + [ + "▁Rockies", + -13.25987148284912 + ], + [ + "▁Cakes", + -13.259969711303713 + ], + [ + "▁DDR", + -13.26007843017578 + ], + [ + "elia", + -13.260101318359377 + ], + [ + "▁Ping", + -13.260248184204102 + ], + [ + "▁Trojan", + -13.260486602783203 + ], + [ + "Han", + -13.260516166687012 + ], + [ + "bul", + -13.260546684265137 + ], + [ + "definition", + -13.260621070861816 + ], + [ + "▁tortillas", + -13.260622024536133 + ], + [ + "dhar", + -13.260741233825684 + ], + [ + "▁Reservations", + -13.26078987121582 + ], + [ + "thon", + -13.260915756225586 + ], + [ + "▁lantern", + -13.2609281539917 + ], + [ + "▁outsole", + -13.260929107666016 + ], + [ + "nge", + -13.261013984680176 + ], + [ + "▁materially", + -13.261040687561035 + ], + [ + "▁umbrellas", + -13.26108455657959 + ], + [ + "plo", + -13.261086463928224 + ], + [ + "mage", + -13.261091232299805 + ], + [ + "▁Chaos", + -13.261120796203612 + ], + [ + "▁Heater", + -13.261123657226562 + ], + [ + "▁Clifton", + -13.261228561401367 + ], + [ + "▁pancreatic", + -13.261252403259276 + ], + [ + "▁inducted", + -13.261260032653809 + ], + [ + "insurance", + -13.261272430419922 + ], + [ + "▁baton", + -13.261277198791504 + ], + [ + "▁Milford", + -13.261302947998049 + ], + [ + "▁Municipality", + -13.26133918762207 + ], + [ + "▁Patriot", + -13.261388778686523 + ], + [ + "ubi", + -13.261457443237305 + ], + [ + "DAY", + -13.261735916137695 + ], + [ + "▁Thesis", + -13.261749267578123 + ], + [ + "hh", + -13.261751174926758 + ], + [ + "▁Kona", + -13.261785507202148 + ], + [ + "▁Exposure", + -13.261863708496094 + ], + [ + "▁Accept", + -13.261974334716797 + ], + [ + "▁squat", + -13.261975288391112 + ], + [ + "▁rad", + -13.26206874847412 + ], + [ + "855", + -13.262150764465332 + ], + [ + "kW", + -13.262168884277344 + ], + [ + "adh", + -13.262316703796388 + ], + [ + "▁Patti", + -13.262333869934082 + ], + [ + "▁Acupuncture", + -13.262337684631348 + ], + [ + "▁Tilt", + -13.262359619140623 + ], + [ + "▁sling", + -13.262408256530762 + ], + [ + "4).", + -13.262531280517578 + ], + [ + "▁Colombian", + -13.262554168701172 + ], + [ + "strength", + -13.26257038116455 + ], + [ + "WW", + -13.262572288513184 + ], + [ + "▁Experienced", + -13.262575149536133 + ], + [ + "▁Montessori", + -13.262649536132812 + ], + [ + "▁Cree", + -13.262673377990724 + ], + [ + "▁quests", + -13.262741088867188 + ], + [ + "▁freelancers", + -13.262781143188477 + ], + [ + "houses", + -13.26279067993164 + ], + [ + "▁modulation", + -13.262799263000488 + ], + [ + "▁COO", + -13.262807846069336 + ], + [ + "▁Tay", + -13.26282787322998 + ], + [ + "lots", + -13.262941360473633 + ], + [ + "▁chronological", + -13.262977600097656 + ], + [ + "grand", + -13.263005256652832 + ], + [ + "▁entrances", + -13.263009071350098 + ], + [ + "▁follicles", + -13.263039588928224 + ], + [ + "▁cites", + -13.263110160827637 + ], + [ + "pho", + -13.263121604919434 + ], + [ + "arts", + -13.263150215148926 + ], + [ + "▁resemblance", + -13.26322078704834 + ], + [ + "▁Corvette", + -13.263222694396973 + ], + [ + "▁satisfies", + -13.263223648071287 + ], + [ + "▁Narendra", + -13.263224601745604 + ], + [ + "▁triathlon", + -13.263224601745604 + ], + [ + "▁corrugated", + -13.263256072998049 + ], + [ + "emo", + -13.263290405273438 + ], + [ + "▁deception", + -13.263340950012209 + ], + [ + "▁18%", + -13.263385772705078 + ], + [ + "Walking", + -13.26340103149414 + ], + [ + "/22", + -13.263482093811035 + ], + [ + "▁spindle", + -13.263513565063477 + ], + [ + "forms", + -13.263516426086426 + ], + [ + "▁Clover", + -13.263583183288574 + ], + [ + "▁Greens", + -13.263630867004396 + ], + [ + "rama", + -13.263654708862305 + ], + [ + "god", + -13.26365566253662 + ], + [ + "▁Ayurveda", + -13.263678550720217 + ], + [ + "vio", + -13.263761520385742 + ], + [ + "▁SEM", + -13.26380729675293 + ], + [ + "▁NAV", + -13.26393699645996 + ], + [ + "▁tra", + -13.263976097106934 + ], + [ + "▁claimant", + -13.264104843139648 + ], + [ + "▁Eleanor", + -13.264122009277344 + ], + [ + "ushi", + -13.264232635498049 + ], + [ + "▁footer", + -13.264325141906738 + ], + [ + "sf", + -13.264395713806152 + ], + [ + "▁Ble", + -13.26443099975586 + ], + [ + "▁crow", + -13.26448917388916 + ], + [ + "370", + -13.264513969421388 + ], + [ + "▁360-", + -13.264593124389648 + ], + [ + "Jennifer", + -13.26460075378418 + ], + [ + "▁collide", + -13.264618873596191 + ], + [ + "Managing", + -13.264641761779783 + ], + [ + "pla", + -13.26470947265625 + ], + [ + "▁380", + -13.264732360839844 + ], + [ + "LESS", + -13.264802932739258 + ], + [ + "▁tiers", + -13.264822959899902 + ], + [ + "kk", + -13.264931678771973 + ], + [ + "▁smoothing", + -13.264933586120604 + ], + [ + "▁AAC", + -13.264955520629885 + ], + [ + "▁forging", + -13.264981269836426 + ], + [ + "▁Addition", + -13.264995574951172 + ], + [ + "▁Fore", + -13.265010833740234 + ], + [ + "▁UM", + -13.26503562927246 + ], + [ + "▁equestrian", + -13.265192985534668 + ], + [ + "▁supervising", + -13.265192985534668 + ], + [ + "▁arduous", + -13.265193939208984 + ], + [ + "▁punitive", + -13.2651948928833 + ], + [ + "fee", + -13.265233039855955 + ], + [ + "Row", + -13.265250205993652 + ], + [ + "▁Scho", + -13.265280723571776 + ], + [ + "▁argumentative", + -13.26528263092041 + ], + [ + "▁Pregnancy", + -13.265291213989258 + ], + [ + "▁Guyana", + -13.26529598236084 + ], + [ + "▁Yukon", + -13.26530647277832 + ], + [ + "▁Austen", + -13.265315055847168 + ], + [ + "▁Midtown", + -13.26534652709961 + ], + [ + "cess", + -13.26534938812256 + ], + [ + "▁Morton", + -13.265509605407717 + ], + [ + "▁realistically", + -13.265509605407717 + ], + [ + "12)", + -13.265610694885254 + ], + [ + "▁Ref", + -13.265625953674316 + ], + [ + "▁cho", + -13.265671730041504 + ], + [ + "blogspot", + -13.26567268371582 + ], + [ + "fect", + -13.265741348266602 + ], + [ + "oci", + -13.26577091217041 + ], + [ + "bak", + -13.265838623046877 + ], + [ + "hydr", + -13.265896797180176 + ], + [ + "▁hospitalized", + -13.265902519226074 + ], + [ + "▁sulfur", + -13.266019821166992 + ], + [ + "seal", + -13.266047477722168 + ], + [ + "▁Minerals", + -13.266199111938477 + ], + [ + "▁Depth", + -13.266222953796388 + ], + [ + "▁replenish", + -13.266229629516602 + ], + [ + "+)", + -13.26624584197998 + ], + [ + "wild", + -13.26640796661377 + ], + [ + "▁Jai", + -13.266434669494627 + ], + [ + "college", + -13.266450881958008 + ], + [ + "▁confronting", + -13.266473770141602 + ], + [ + "▁converge", + -13.266565322875977 + ], + [ + "inspiring", + -13.266566276550291 + ], + [ + "groups", + -13.266754150390623 + ], + [ + "▁gait", + -13.266779899597168 + ], + [ + "▁amaze", + -13.26699447631836 + ], + [ + "prop", + -13.267057418823242 + ], + [ + "▁cynical", + -13.267057418823242 + ], + [ + "lists", + -13.267062187194824 + ], + [ + "▁Croydon", + -13.267169952392578 + ], + [ + "YOU", + -13.267178535461426 + ], + [ + "▁alleging", + -13.267178535461426 + ], + [ + "▁Groove", + -13.26728630065918 + ], + [ + "Eco", + -13.267389297485352 + ], + [ + "▁advocated", + -13.267398834228516 + ], + [ + "▁Granted", + -13.26746654510498 + ], + [ + "▁dungeon", + -13.267475128173828 + ], + [ + "▁manifold", + -13.267476081848145 + ], + [ + "uter", + -13.267550468444824 + ], + [ + "13)", + -13.267572402954102 + ], + [ + "▁Grange", + -13.267614364624023 + ], + [ + "owitz", + -13.267694473266602 + ], + [ + "27.", + -13.267695426940918 + ], + [ + "Story", + -13.26776123046875 + ], + [ + "▁55%", + -13.267784118652344 + ], + [ + "▁AVI", + -13.267794609069824 + ], + [ + "pong", + -13.267817497253418 + ], + [ + "▁Procedures", + -13.267842292785645 + ], + [ + "▁JB", + -13.267910957336426 + ], + [ + "gren", + -13.268115997314451 + ], + [ + "Plant", + -13.268121719360352 + ], + [ + "attack", + -13.268122673034668 + ], + [ + "▁Vivo", + -13.268136978149414 + ], + [ + "ovo", + -13.268351554870604 + ], + [ + "▁Bos", + -13.268473625183104 + ], + [ + "device", + -13.268584251403809 + ], + [ + "wedding", + -13.268688201904297 + ], + [ + "sworth", + -13.26869297027588 + ], + [ + "▁Eid", + -13.26870059967041 + ], + [ + "League", + -13.268733978271484 + ], + [ + "▁$49", + -13.268778800964355 + ], + [ + "▁Manuals", + -13.268962860107422 + ], + [ + "▁Sushi", + -13.268967628479004 + ], + [ + "▁enlist", + -13.269021034240724 + ], + [ + "▁IPL", + -13.269047737121582 + ], + [ + "▁annuity", + -13.269150733947754 + ], + [ + "▁fraternity", + -13.269150733947754 + ], + [ + "▁Paramount", + -13.269170761108398 + ], + [ + "▁nicht", + -13.26918888092041 + ], + [ + "▁Communion", + -13.269192695617676 + ], + [ + "▁cramped", + -13.269289016723633 + ], + [ + "▁persists", + -13.26928997039795 + ], + [ + "▁Stools", + -13.269323348999023 + ], + [ + "ope", + -13.269407272338867 + ], + [ + "▁Ronaldo", + -13.26962947845459 + ], + [ + "▁mc", + -13.26963233947754 + ], + [ + "ece", + -13.2697114944458 + ], + [ + "▁SHE", + -13.269753456115724 + ], + [ + "▁loader", + -13.26984405517578 + ], + [ + "usi", + -13.270133018493652 + ], + [ + "▁OG", + -13.270172119140623 + ], + [ + "riya", + -13.270275115966797 + ], + [ + "▁Bless", + -13.270333290100098 + ], + [ + "ASS", + -13.270353317260742 + ], + [ + "▁Bucket", + -13.270370483398438 + ], + [ + "▁widening", + -13.270467758178713 + ], + [ + "want", + -13.270479202270508 + ], + [ + "parts", + -13.27049446105957 + ], + [ + "▁Kad", + -13.270756721496582 + ], + [ + "▁Hz", + -13.270869255065918 + ], + [ + "▁obsessive", + -13.27089786529541 + ], + [ + "▁Injection", + -13.271133422851562 + ], + [ + "▁desperation", + -13.271133422851562 + ], + [ + "▁Duchess", + -13.27113437652588 + ], + [ + "▁barbeque", + -13.27113437652588 + ], + [ + "▁photovoltaic", + -13.27114963531494 + ], + [ + "▁Decide", + -13.271162986755373 + ], + [ + "▁capitalist", + -13.271183967590332 + ], + [ + "▁receptive", + -13.27118682861328 + ], + [ + "▁?????", + -13.27122402191162 + ], + [ + "jon", + -13.27126693725586 + ], + [ + "▁pageant", + -13.27128791809082 + ], + [ + "▁experimented", + -13.271291732788086 + ], + [ + "▁incision", + -13.27132511138916 + ], + [ + "▁unicorn", + -13.271492004394531 + ], + [ + "uno", + -13.271525382995604 + ], + [ + "▁Mk", + -13.271541595458984 + ], + [ + "▁Malone", + -13.2715425491333 + ], + [ + "▁driveways", + -13.271618843078612 + ], + [ + "uca", + -13.27163791656494 + ], + [ + "▁Papua", + -13.271668434143066 + ], + [ + "▁fertilizers", + -13.271678924560549 + ], + [ + "▁treasurer", + -13.271716117858888 + ], + [ + "▁degeneration", + -13.27171802520752 + ], + [ + "ely", + -13.271732330322266 + ], + [ + "Cu", + -13.27187156677246 + ], + [ + "▁Expression", + -13.272003173828123 + ], + [ + "blast", + -13.27210807800293 + ], + [ + "▁2021", + -13.272130966186523 + ], + [ + "▁ECO", + -13.272144317626951 + ], + [ + "transformational", + -13.272192001342772 + ], + [ + "tailed", + -13.272233963012695 + ], + [ + "▁WANT", + -13.272256851196287 + ], + [ + "▁DESIGN", + -13.272289276123049 + ], + [ + "typically", + -13.272306442260742 + ], + [ + "Break", + -13.272391319274902 + ], + [ + "▁Witness", + -13.272393226623535 + ], + [ + "▁Fir", + -13.272469520568848 + ], + [ + "bian", + -13.272480010986328 + ], + [ + "▁LMS", + -13.272579193115234 + ], + [ + "▁Suicide", + -13.27263641357422 + ], + [ + "▁Kavanaugh", + -13.272643089294434 + ], + [ + "▁Mare", + -13.272680282592772 + ], + [ + "▁webcast", + -13.272865295410156 + ], + [ + "Normally", + -13.272869110107422 + ], + [ + "▁accentuate", + -13.272950172424316 + ], + [ + "Eight", + -13.273016929626465 + ], + [ + "▁Majesty", + -13.27312183380127 + ], + [ + "▁criterion", + -13.27312183380127 + ], + [ + "▁scrumptious", + -13.27312183380127 + ], + [ + "▁annoyance", + -13.273144721984863 + ], + [ + "meyer", + -13.273148536682127 + ], + [ + "▁paranormal", + -13.273186683654783 + ], + [ + "GEN", + -13.27322006225586 + ], + [ + "▁Playground", + -13.273239135742188 + ], + [ + "▁aggregates", + -13.273240089416504 + ], + [ + "▁psychedelic", + -13.273248672485352 + ], + [ + "▁Osborne", + -13.273274421691896 + ], + [ + "355", + -13.273277282714844 + ], + [ + "▁Phys", + -13.27329158782959 + ], + [ + "▁Bound", + -13.273308753967283 + ], + [ + "▁Bour", + -13.273348808288574 + ], + [ + "▁Poli", + -13.273383140563965 + ], + [ + "▁mpg", + -13.27338695526123 + ], + [ + "▁Trash", + -13.273457527160645 + ], + [ + "sufficient", + -13.273550987243652 + ], + [ + "▁Nylon", + -13.273552894592283 + ], + [ + "▁pacing", + -13.273558616638184 + ], + [ + "▁Hou", + -13.273615837097168 + ], + [ + "▁TG", + -13.273664474487305 + ], + [ + "UST", + -13.273701667785645 + ], + [ + "4-5", + -13.273720741271973 + ], + [ + "▁heirs", + -13.27375602722168 + ], + [ + "▁Context", + -13.273781776428224 + ], + [ + "▁1983.", + -13.273795127868652 + ], + [ + "▁Collar", + -13.27381706237793 + ], + [ + "▁waxing", + -13.273865699768066 + ], + [ + "▁Merrill", + -13.273870468139648 + ], + [ + "▁Quran", + -13.273927688598633 + ], + [ + "▁Wie", + -13.273937225341797 + ], + [ + "▁JJ", + -13.274014472961426 + ], + [ + "▁Shaker", + -13.274141311645508 + ], + [ + "▁nationalist", + -13.274168014526367 + ], + [ + "▁Accommodation", + -13.27420711517334 + ], + [ + "▁Formation", + -13.274242401123049 + ], + [ + "sens", + -13.274300575256348 + ], + [ + "reference", + -13.27444076538086 + ], + [ + "▁insisting", + -13.27450180053711 + ], + [ + "▁Babies", + -13.274560928344728 + ], + [ + "▁Pieces", + -13.27458381652832 + ], + [ + "▁Dolphins", + -13.274632453918455 + ], + [ + "▁anyways", + -13.274650573730469 + ], + [ + "▁Cocoa", + -13.274673461914062 + ], + [ + "▁1907", + -13.274704933166504 + ], + [ + "▁lentils", + -13.274734497070312 + ], + [ + "▁Elastic", + -13.27474308013916 + ], + [ + "▁Rails", + -13.274755477905272 + ], + [ + "cord", + -13.274786949157717 + ], + [ + "Outdoor", + -13.274809837341309 + ], + [ + "/2013", + -13.274839401245115 + ], + [ + "888", + -13.274924278259276 + ], + [ + "charged", + -13.27496337890625 + ], + [ + "▁Yoshi", + -13.275020599365234 + ], + [ + "▁HU", + -13.27504539489746 + ], + [ + "▁Rosie", + -13.275070190429688 + ], + [ + "▁Dye", + -13.275073051452637 + ], + [ + "sole", + -13.275087356567385 + ], + [ + "Ve", + -13.275094032287598 + ], + [ + "▁attribution", + -13.275114059448242 + ], + [ + "▁retaliation", + -13.275114059448242 + ], + [ + "▁Hulu", + -13.275116920471191 + ], + [ + "▁aptitude", + -13.275117874145508 + ], + [ + "▁Sergeant", + -13.275138854980469 + ], + [ + "▁coma", + -13.275152206420898 + ], + [ + "▁rhino", + -13.275225639343262 + ], + [ + "▁narrowing", + -13.275251388549805 + ], + [ + "▁indulgence", + -13.275267601013184 + ], + [ + "▁dehydrated", + -13.275286674499512 + ], + [ + "▁Broadcast", + -13.275306701660156 + ], + [ + "▁Operators", + -13.275449752807615 + ], + [ + "▁parasites", + -13.275552749633787 + ], + [ + "▁Clo", + -13.275598526000977 + ], + [ + "▁waived", + -13.27574062347412 + ], + [ + "Forum", + -13.275827407836914 + ], + [ + "▁diminishing", + -13.27591609954834 + ], + [ + "▁Sawyer", + -13.275935173034668 + ], + [ + "▁Mahal", + -13.275970458984377 + ], + [ + "▁Bruins", + -13.275997161865234 + ], + [ + "egan", + -13.27599811553955 + ], + [ + "Vo", + -13.276055335998535 + ], + [ + "▁dolphin", + -13.276060104370115 + ], + [ + "▁tapas", + -13.276086807250977 + ], + [ + "ssel", + -13.27613925933838 + ], + [ + "▁Reasons", + -13.276188850402832 + ], + [ + "MIC", + -13.276199340820312 + ], + [ + "liness", + -13.27622127532959 + ], + [ + "ometric", + -13.276239395141602 + ], + [ + "vana", + -13.276290893554688 + ], + [ + "6).", + -13.276411056518556 + ], + [ + "RAN", + -13.276411056518556 + ], + [ + "▁410", + -13.276535034179688 + ], + [ + "▁federally", + -13.276546478271484 + ], + [ + "▁Loose", + -13.27655029296875 + ], + [ + "▁Adaptive", + -13.27659797668457 + ], + [ + "▁NON", + -13.276668548583984 + ], + [ + "Studies", + -13.276810646057127 + ], + [ + "sible", + -13.27685546875 + ], + [ + "▁2015).", + -13.276867866516112 + ], + [ + "▁SYSTEM", + -13.276932716369627 + ], + [ + "▁Carlton", + -13.277050971984863 + ], + [ + "▁Batt", + -13.277064323425291 + ], + [ + "▁Monastery", + -13.27711009979248 + ], + [ + "▁ambiguous", + -13.27711009979248 + ], + [ + "▁stimuli", + -13.27711009979248 + ], + [ + "▁Saunders", + -13.277111053466797 + ], + [ + "kra", + -13.277132034301758 + ], + [ + "selected", + -13.277135848999023 + ], + [ + "▁wiping", + -13.277138710021973 + ], + [ + "▁Knoxville", + -13.277158737182615 + ], + [ + "▁transplantation", + -13.27720832824707 + ], + [ + "LAS", + -13.2772216796875 + ], + [ + "Neil", + -13.277255058288574 + ], + [ + "▁Roche", + -13.27729034423828 + ], + [ + "gent", + -13.277456283569336 + ], + [ + "▁departmental", + -13.277498245239258 + ], + [ + "▁prosecuted", + -13.277575492858888 + ], + [ + "▁Lecturer", + -13.27760410308838 + ], + [ + "▁reptiles", + -13.27760410308838 + ], + [ + "▁AFL", + -13.277617454528809 + ], + [ + "▁boldly", + -13.277734756469728 + ], + [ + "manager", + -13.277828216552734 + ], + [ + "▁Trials", + -13.277924537658691 + ], + [ + "▁Croatian", + -13.277925491333008 + ], + [ + "▁husbands", + -13.277960777282717 + ], + [ + "▁ticketing", + -13.278069496154783 + ], + [ + "▁della", + -13.278094291687012 + ], + [ + "cleaning", + -13.278120994567873 + ], + [ + "▁Florist", + -13.278141021728516 + ], + [ + "▁rusty", + -13.278183937072754 + ], + [ + "orn", + -13.278226852416992 + ], + [ + "▁Lund", + -13.278278350830078 + ], + [ + "▁Kanye", + -13.278315544128418 + ], + [ + "▁Ness", + -13.278546333312988 + ], + [ + "df", + -13.278547286987305 + ], + [ + "▁Relationships", + -13.278611183166504 + ], + [ + "▁Laminate", + -13.27861499786377 + ], + [ + "ISH", + -13.278731346130373 + ], + [ + "▁Leaves", + -13.278815269470217 + ], + [ + "sid", + -13.278820991516112 + ], + [ + "Bedroom", + -13.278868675231934 + ], + [ + "▁2017)", + -13.27892780303955 + ], + [ + "▁Addiction", + -13.278931617736816 + ], + [ + "▁pickled", + -13.27894115447998 + ], + [ + "▁midterm", + -13.279009819030762 + ], + [ + "sexual", + -13.279077529907228 + ], + [ + "clad", + -13.279092788696287 + ], + [ + "▁Corridor", + -13.279109954833984 + ], + [ + "▁admirable", + -13.279109954833984 + ], + [ + "▁illustrating", + -13.279109954833984 + ], + [ + "▁bridging", + -13.2791109085083 + ], + [ + "▁saloon", + -13.279114723205566 + ], + [ + "▁unrestricted", + -13.279136657714844 + ], + [ + "▁Ibiza", + -13.279163360595703 + ], + [ + "lish", + -13.279251098632812 + ], + [ + "▁degrade", + -13.279309272766112 + ], + [ + "▁cushioning", + -13.27931022644043 + ], + [ + "tw", + -13.279311180114746 + ], + [ + "▁Horton", + -13.279354095458984 + ], + [ + "▁Conway", + -13.279444694519045 + ], + [ + "▁sapphire", + -13.279555320739746 + ], + [ + "▁synchronized", + -13.279608726501465 + ], + [ + "▁Vanguard", + -13.279664039611816 + ], + [ + "▁gall", + -13.279739379882812 + ], + [ + "▁Subsequently", + -13.279740333557127 + ], + [ + "Setting", + -13.27975082397461 + ], + [ + "chia", + -13.27977466583252 + ], + [ + "▁backstage", + -13.2798490524292 + ], + [ + "Rest", + -13.279913902282717 + ], + [ + "▁groundwork", + -13.279990196228027 + ], + [ + "▁$15,000", + -13.280016899108888 + ], + [ + "▁estrogen", + -13.280076026916504 + ], + [ + "▁boarded", + -13.280080795288086 + ], + [ + "▁$250,000", + -13.280170440673828 + ], + [ + "▁35-", + -13.280170440673828 + ], + [ + "phen", + -13.28017807006836 + ], + [ + "▁spilled", + -13.28018569946289 + ], + [ + "▁dictated", + -13.280211448669434 + ], + [ + "KING", + -13.280231475830078 + ], + [ + "▁cluttered", + -13.280258178710938 + ], + [ + "Talking", + -13.280477523803713 + ], + [ + "construction", + -13.280570030212402 + ], + [ + "▁Gibraltar", + -13.280580520629885 + ], + [ + "▁surfers", + -13.280603408813477 + ], + [ + "qa", + -13.28065586090088 + ], + [ + "structure", + -13.280659675598145 + ], + [ + "▁Cinnamon", + -13.280675888061523 + ], + [ + "▁brainstorm", + -13.280710220336914 + ], + [ + "▁mas", + -13.280797004699709 + ], + [ + "▁Quickly", + -13.280797958374023 + ], + [ + "Ce", + -13.280866622924805 + ], + [ + "▁unnatural", + -13.280876159667969 + ], + [ + "▁offender", + -13.280913352966309 + ], + [ + "region", + -13.28099250793457 + ], + [ + "Ram", + -13.281006813049316 + ], + [ + "▁Congressman", + -13.281044960021973 + ], + [ + "▁devastation", + -13.281113624572754 + ], + [ + "▁illuminating", + -13.281113624572754 + ], + [ + "▁octopus", + -13.281113624572754 + ], + [ + "▁slicing", + -13.281115531921388 + ], + [ + "▁warehousing", + -13.281115531921388 + ], + [ + "▁Chargers", + -13.281132698059082 + ], + [ + "Rain", + -13.281177520751951 + ], + [ + "▁chalkboard", + -13.281185150146484 + ], + [ + "fore", + -13.281221389770508 + ], + [ + "▁Cylinder", + -13.281271934509276 + ], + [ + "loo", + -13.281293869018556 + ], + [ + "▁Keystone", + -13.28135108947754 + ], + [ + "▁Stor", + -13.28139877319336 + ], + [ + "Brain", + -13.28140354156494 + ], + [ + "▁activating", + -13.281414031982422 + ], + [ + "▁pilgrims", + -13.281575202941896 + ], + [ + "▁intestine", + -13.281590461730955 + ], + [ + "777", + -13.281631469726562 + ], + [ + "firm", + -13.281856536865234 + ], + [ + "▁2023", + -13.281880378723145 + ], + [ + "▁Sylvia", + -13.281909942626951 + ], + [ + "▁Revolutionary", + -13.282004356384276 + ], + [ + "ité", + -13.282105445861816 + ], + [ + "ashi", + -13.28225803375244 + ], + [ + "elected", + -13.282339096069336 + ], + [ + "▁Sectional", + -13.282357215881348 + ], + [ + "session", + -13.282463073730469 + ], + [ + "▁Melt", + -13.282463073730469 + ], + [ + "▁bon", + -13.282647132873535 + ], + [ + "322", + -13.282679557800291 + ], + [ + "kor", + -13.282685279846191 + ], + [ + "given", + -13.282758712768556 + ], + [ + "▁Macedonia", + -13.28277587890625 + ], + [ + "203", + -13.282838821411133 + ], + [ + "Mel", + -13.282875061035156 + ], + [ + "ssen", + -13.282909393310549 + ], + [ + "beautiful", + -13.28296947479248 + ], + [ + "▁$3,000", + -13.282981872558594 + ], + [ + "▁salesman", + -13.283000946044922 + ], + [ + "▁millennial", + -13.283007621765137 + ], + [ + "▁pounding", + -13.283024787902832 + ], + [ + "struct", + -13.283050537109377 + ], + [ + "▁climbers", + -13.283108711242676 + ], + [ + "▁punctuation", + -13.283123016357422 + ], + [ + "▁suicidal", + -13.283123970031738 + ], + [ + "▁Dialogue", + -13.283136367797852 + ], + [ + "▁Prescott", + -13.283151626586914 + ], + [ + "▁consignment", + -13.283160209655762 + ], + [ + "▁inkjet", + -13.283163070678713 + ], + [ + "▁(2002)", + -13.28318214416504 + ], + [ + "▁Statue", + -13.283221244812012 + ], + [ + "McC", + -13.283279418945312 + ], + [ + "▁inhibition", + -13.283283233642578 + ], + [ + "▁taped", + -13.283296585083008 + ], + [ + "Match", + -13.283339500427246 + ], + [ + "▁wok", + -13.283349990844728 + ], + [ + "▁sunburn", + -13.283369064331056 + ], + [ + "▁hypnosis", + -13.283438682556152 + ], + [ + "▁termites", + -13.283503532409668 + ], + [ + "▁Forestry", + -13.283516883850098 + ], + [ + "▁greedy", + -13.28353500366211 + ], + [ + "▁Fixtures", + -13.28355312347412 + ], + [ + "▁Harmon", + -13.28355884552002 + ], + [ + "=”", + -13.283589363098145 + ], + [ + "lain", + -13.283609390258787 + ], + [ + "▁lagoon", + -13.283628463745115 + ], + [ + "▁(2009)", + -13.283636093139648 + ], + [ + "CHA", + -13.283646583557127 + ], + [ + "▁Philippe", + -13.283714294433594 + ], + [ + "▁Dexter", + -13.2838716506958 + ], + [ + "▁Anywhere", + -13.283941268920898 + ], + [ + "▁Tian", + -13.284008979797363 + ], + [ + "fm", + -13.284058570861816 + ], + [ + "wr", + -13.284095764160156 + ], + [ + "IFF", + -13.28419589996338 + ], + [ + "▁rake", + -13.284202575683594 + ], + [ + "▁Reliance", + -13.284231185913086 + ], + [ + "JP", + -13.28432559967041 + ], + [ + "▁handyman", + -13.284358978271484 + ], + [ + "9).", + -13.28447723388672 + ], + [ + "gm", + -13.28456211090088 + ], + [ + "hani", + -13.284586906433104 + ], + [ + "fashion", + -13.284623146057127 + ], + [ + "▁Indigo", + -13.284669876098633 + ], + [ + "▁detract", + -13.28475856781006 + ], + [ + "▁psi", + -13.284790992736816 + ], + [ + "▁16%", + -13.284852027893066 + ], + [ + "▁1987.", + -13.284930229187012 + ], + [ + "bh", + -13.285012245178224 + ], + [ + "bey", + -13.285035133361816 + ], + [ + "▁admittedly", + -13.285099029541016 + ], + [ + "▁condensation", + -13.285134315490724 + ], + [ + "▁recurrence", + -13.285134315490724 + ], + [ + "▁Revival", + -13.285143852233888 + ], + [ + "▁gingerbread", + -13.285143852233888 + ], + [ + "▁counteract", + -13.28524684906006 + ], + [ + "yp", + -13.28530979156494 + ], + [ + "▁Serbian", + -13.285343170166016 + ], + [ + "▁nationalism", + -13.285388946533203 + ], + [ + "▁Viva", + -13.285402297973633 + ], + [ + "▁nightmares", + -13.285455703735352 + ], + [ + "▁Vegetable", + -13.285539627075195 + ], + [ + "10,000", + -13.28559684753418 + ], + [ + "▁MSU", + -13.28565788269043 + ], + [ + "▁Ple", + -13.285700798034668 + ], + [ + "▁Rory", + -13.285828590393066 + ], + [ + "▁ladders", + -13.285905838012695 + ], + [ + "▁rename", + -13.285965919494627 + ], + [ + "▁FD", + -13.285975456237791 + ], + [ + "▁Dolphin", + -13.285991668701172 + ], + [ + "Steel", + -13.286028861999512 + ], + [ + "▁surrendered", + -13.286056518554688 + ], + [ + "tman", + -13.286092758178713 + ], + [ + "▁Madonna", + -13.286162376403809 + ], + [ + "▁Officials", + -13.28619384765625 + ], + [ + "▁Asked", + -13.286324501037598 + ], + [ + "tero", + -13.286325454711914 + ], + [ + "▁Livingston", + -13.286575317382812 + ], + [ + "▁KT", + -13.28660774230957 + ], + [ + "ith", + -13.28665542602539 + ], + [ + "/27", + -13.286656379699709 + ], + [ + "▁filmmaking", + -13.286665916442873 + ], + [ + "lage", + -13.286763191223145 + ], + [ + "▁erased", + -13.286763191223145 + ], + [ + "▁Mic", + -13.286823272705078 + ], + [ + "nio", + -13.28684425354004 + ], + [ + "▁exponential", + -13.28691291809082 + ], + [ + "29.", + -13.28693389892578 + ], + [ + "▁Attorneys", + -13.286989212036133 + ], + [ + "▁Argentine", + -13.28704833984375 + ], + [ + "Electronic", + -13.28712558746338 + ], + [ + "▁ancestral", + -13.287150382995604 + ], + [ + "▁fudge", + -13.287184715270996 + ], + [ + "/01/", + -13.287201881408691 + ], + [ + "▁Magnolia", + -13.287324905395508 + ], + [ + "tant", + -13.28734302520752 + ], + [ + "▁pears", + -13.287357330322266 + ], + [ + "▁Keyword", + -13.2874116897583 + ], + [ + "▁Puppy", + -13.28742504119873 + ], + [ + "▁Blizzard", + -13.287471771240234 + ], + [ + "▁630", + -13.287476539611816 + ], + [ + "▁marinated", + -13.287485122680664 + ], + [ + "▁UNI", + -13.287508010864258 + ], + [ + "▁captivated", + -13.287546157836914 + ], + [ + "▁spared", + -13.287546157836914 + ], + [ + "▁INFO", + -13.287549018859863 + ], + [ + "▁$28", + -13.287557601928713 + ], + [ + "990", + -13.287598609924316 + ], + [ + "▁mistakenly", + -13.287605285644531 + ], + [ + ":44", + -13.28761386871338 + ], + [ + "▁blindness", + -13.287674903869627 + ], + [ + "▁tonal", + -13.287766456604004 + ], + [ + "▁Gau", + -13.287790298461914 + ], + [ + "▁reacts", + -13.287835121154783 + ], + [ + "▁bidder", + -13.287851333618164 + ], + [ + "▁circumvent", + -13.287911415100098 + ], + [ + "305", + -13.287941932678224 + ], + [ + "▁beg", + -13.287996292114258 + ], + [ + "wag", + -13.288002014160156 + ], + [ + "▁humbled", + -13.288004875183104 + ], + [ + "▁capitalization", + -13.288005828857422 + ], + [ + "safety", + -13.288095474243164 + ], + [ + "▁Arte", + -13.288105010986328 + ], + [ + "Metal", + -13.288200378417969 + ], + [ + "▁Hari", + -13.288202285766602 + ], + [ + "▁flatter", + -13.288352966308594 + ], + [ + "types", + -13.288359642028809 + ], + [ + "nish", + -13.288460731506348 + ], + [ + "▁violates", + -13.288626670837402 + ], + [ + "▁Apprentice", + -13.288674354553224 + ], + [ + "701", + -13.28892993927002 + ], + [ + "▁Mohamed", + -13.289090156555176 + ], + [ + "▁Cambogia", + -13.289170265197754 + ], + [ + "▁admiring", + -13.289170265197754 + ], + [ + "▁Rubin", + -13.289180755615234 + ], + [ + "▁undefeated", + -13.289183616638184 + ], + [ + "-72", + -13.289188385009766 + ], + [ + "▁tubular", + -13.289198875427246 + ], + [ + "▁detachable", + -13.289224624633787 + ], + [ + "▁predetermined", + -13.289234161376951 + ], + [ + "▁disturbances", + -13.289241790771484 + ], + [ + "▁fumes", + -13.28925323486328 + ], + [ + "example", + -13.28930377960205 + ], + [ + "dash", + -13.289307594299316 + ], + [ + "mission", + -13.289374351501465 + ], + [ + "▁Howell", + -13.289470672607422 + ], + [ + "▁karma", + -13.289545059204102 + ], + [ + "▁evacuated", + -13.28960418701172 + ], + [ + "▁Sikh", + -13.289615631103516 + ], + [ + "▁dips", + -13.28969955444336 + ], + [ + "▁Ops", + -13.289732933044434 + ], + [ + "▁Wednesdays", + -13.289755821228027 + ], + [ + "▁Stove", + -13.289756774902344 + ], + [ + "▁rockets", + -13.28976058959961 + ], + [ + "▁leveling", + -13.28980541229248 + ], + [ + "▁Curious", + -13.289849281311035 + ], + [ + "▁(2018)", + -13.289999008178713 + ], + [ + "▁Zhu", + -13.29012680053711 + ], + [ + "▁italian", + -13.29030418395996 + ], + [ + "▁fetal", + -13.290318489074709 + ], + [ + "▁sow", + -13.290353775024414 + ], + [ + "blown", + -13.290392875671388 + ], + [ + "▁derives", + -13.290432929992676 + ], + [ + "520", + -13.29049587249756 + ], + [ + "leave", + -13.290508270263672 + ], + [ + "▁176", + -13.290534973144531 + ], + [ + "6:", + -13.290639877319336 + ], + [ + "past", + -13.290663719177246 + ], + [ + "YES", + -13.290781021118164 + ], + [ + "▁wrinkle", + -13.290818214416504 + ], + [ + "▁freestanding", + -13.29084587097168 + ], + [ + "rada", + -13.290863990783691 + ], + [ + "Tune", + -13.290928840637209 + ], + [ + "▁Dedicated", + -13.291010856628418 + ], + [ + "acid", + -13.291013717651367 + ], + [ + "▁delicacies", + -13.291194915771484 + ], + [ + "▁executable", + -13.291194915771484 + ], + [ + "▁wickets", + -13.291223526000977 + ], + [ + "▁unidentified", + -13.29123878479004 + ], + [ + "PLEASE", + -13.291239738464355 + ], + [ + "▁cages", + -13.29124927520752 + ], + [ + "Storage", + -13.291253089904783 + ], + [ + "▁Dante", + -13.291309356689451 + ], + [ + "Technical", + -13.2914400100708 + ], + [ + "▁intrusive", + -13.29145622253418 + ], + [ + "▁Lightroom", + -13.29147243499756 + ], + [ + "▁beachfront", + -13.29148006439209 + ], + [ + "Rent", + -13.291497230529783 + ], + [ + "▁DID", + -13.291501998901367 + ], + [ + "▁aggravated", + -13.29150676727295 + ], + [ + "▁beet", + -13.291522979736328 + ], + [ + "Blood", + -13.291608810424805 + ], + [ + "▁Proc", + -13.291620254516602 + ], + [ + "▁smartest", + -13.29178237915039 + ], + [ + "▁ABA", + -13.291794776916504 + ], + [ + "▁Albion", + -13.291826248168944 + ], + [ + "▁peaked", + -13.291973114013672 + ], + [ + "▁DIN", + -13.292040824890137 + ], + [ + "▁Espa", + -13.292149543762209 + ], + [ + "▁rein", + -13.292166709899902 + ], + [ + "▁*****", + -13.292224884033203 + ], + [ + "-2014", + -13.292240142822266 + ], + [ + "ame", + -13.292343139648438 + ], + [ + "bos", + -13.292415618896484 + ], + [ + "duc", + -13.29258918762207 + ], + [ + "▁Responses", + -13.292619705200195 + ], + [ + "▁spinner", + -13.29269313812256 + ], + [ + "▁Instrument", + -13.292731285095217 + ], + [ + "▁Zara", + -13.292763710021973 + ], + [ + "▁confines", + -13.292790412902832 + ], + [ + "▁fresco", + -13.292994499206545 + ], + [ + "Certain", + -13.293071746826172 + ], + [ + "▁slid", + -13.29308795928955 + ], + [ + "kah", + -13.293100357055664 + ], + [ + "▁Kul", + -13.29311180114746 + ], + [ + "▁sprays", + -13.293176651000977 + ], + [ + "▁ecstatic", + -13.29322338104248 + ], + [ + "▁redundancy", + -13.29322338104248 + ], + [ + "▁audible", + -13.293228149414062 + ], + [ + "▁Bieber", + -13.29323387145996 + ], + [ + "▁Squadron", + -13.293246269226074 + ], + [ + "▁savage", + -13.2933349609375 + ], + [ + "▁Icelandic", + -13.293378829956056 + ], + [ + "▁guarded", + -13.293427467346191 + ], + [ + "▁php", + -13.29343318939209 + ], + [ + "▁blasted", + -13.293513298034668 + ], + [ + "304", + -13.293533325195312 + ], + [ + "▁Knock", + -13.293563842773438 + ], + [ + "ids", + -13.293573379516602 + ], + [ + "▁enlightening", + -13.29357624053955 + ], + [ + "CERT", + -13.293654441833496 + ], + [ + "▁Rite", + -13.293779373168944 + ], + [ + "▁liters", + -13.293779373168944 + ], + [ + "▁Mouth", + -13.293785095214844 + ], + [ + "▁Caleb", + -13.293910026550291 + ], + [ + "-300", + -13.29392433166504 + ], + [ + "pose", + -13.293991088867188 + ], + [ + "▁Deloitte", + -13.29399871826172 + ], + [ + "▁HIS", + -13.29401969909668 + ], + [ + "▁risking", + -13.294084548950195 + ], + [ + "▁verbally", + -13.294092178344728 + ], + [ + "▁latte", + -13.294112205505373 + ], + [ + "Connell", + -13.2943115234375 + ], + [ + "▁DMV", + -13.29443359375 + ], + [ + "ury", + -13.29448127746582 + ], + [ + "▁IU", + -13.294590950012209 + ], + [ + "▁procure", + -13.29465103149414 + ], + [ + "lant", + -13.294665336608888 + ], + [ + "▁thrives", + -13.294699668884276 + ], + [ + "▁freelancer", + -13.294720649719238 + ], + [ + "▁dreadful", + -13.294737815856934 + ], + [ + "dee", + -13.294899940490724 + ], + [ + "▁neighbourhoods", + -13.295029640197754 + ], + [ + "2.8", + -13.29506015777588 + ], + [ + "▁hammered", + -13.295147895812988 + ], + [ + "▁scones", + -13.29518222808838 + ], + [ + "oki", + -13.295195579528809 + ], + [ + "▁manifesto", + -13.295238494873049 + ], + [ + "▁Beethoven", + -13.295255661010742 + ], + [ + "▁cylindrical", + -13.295255661010742 + ], + [ + "▁harmonies", + -13.295255661010742 + ], + [ + "▁ramifications", + -13.295255661010742 + ], + [ + "▁spherical", + -13.29527759552002 + ], + [ + "organized", + -13.295299530029297 + ], + [ + "▁allotment", + -13.295323371887209 + ], + [ + "▁Wakefield", + -13.295330047607422 + ], + [ + "▁betrayal", + -13.295337677001951 + ], + [ + "meat", + -13.295443534851074 + ], + [ + "▁racers", + -13.295452117919922 + ], + [ + "▁Dharma", + -13.295459747314451 + ], + [ + "clusion", + -13.29546070098877 + ], + [ + "▁devotional", + -13.295466423034668 + ], + [ + "Neither", + -13.29552173614502 + ], + [ + "ordered", + -13.295530319213867 + ], + [ + "Repeat", + -13.295570373535156 + ], + [ + "▁Giovanni", + -13.295637130737305 + ], + [ + "▁Wifi", + -13.295671463012695 + ], + [ + "▁unleashed", + -13.295705795288086 + ], + [ + "▁mayo", + -13.295737266540527 + ], + [ + "▁netting", + -13.295865058898926 + ], + [ + "ados", + -13.295903205871582 + ], + [ + "adult", + -13.295949935913086 + ], + [ + "▁1982.", + -13.295982360839844 + ], + [ + "UG", + -13.295991897583008 + ], + [ + "Cy", + -13.296069145202637 + ], + [ + "▁Praise", + -13.296082496643066 + ], + [ + "▁Docs", + -13.296173095703123 + ], + [ + "▁Damon", + -13.296341896057127 + ], + [ + "▁31.", + -13.29636001586914 + ], + [ + "▁preacher", + -13.29636287689209 + ], + [ + "▁compressors", + -13.296438217163086 + ], + [ + "river", + -13.296439170837402 + ], + [ + "▁churn", + -13.296443939208984 + ], + [ + "Member", + -13.29648208618164 + ], + [ + "finding", + -13.296485900878906 + ], + [ + "instead", + -13.29654598236084 + ], + [ + "▁Greeks", + -13.296576499938965 + ], + [ + "▁MIL", + -13.296597480773926 + ], + [ + "nte", + -13.296667098999023 + ], + [ + "▁Circular", + -13.29671859741211 + ], + [ + "▁$27", + -13.296754837036133 + ], + [ + "▁SKF", + -13.296768188476562 + ], + [ + "death", + -13.296844482421877 + ], + [ + "\";", + -13.296876907348633 + ], + [ + "bite", + -13.296940803527832 + ], + [ + "▁Thom", + -13.297030448913574 + ], + [ + "▁impatient", + -13.29706859588623 + ], + [ + "▁Nichols", + -13.297122955322266 + ], + [ + "▁Automated", + -13.29713249206543 + ], + [ + "▁Produced", + -13.29718017578125 + ], + [ + "▁immortal", + -13.297231674194336 + ], + [ + "▁diaphragm", + -13.297292709350586 + ], + [ + "▁karaoke", + -13.297292709350586 + ], + [ + "▁synergy", + -13.297293663024902 + ], + [ + "▁enormously", + -13.29729461669922 + ], + [ + "▁Simulator", + -13.297300338745115 + ], + [ + "▁Citizenship", + -13.297323226928713 + ], + [ + "▁calligraphy", + -13.297338485717772 + ], + [ + "▁Charlottesville", + -13.29734992980957 + ], + [ + "▁snapping", + -13.297365188598633 + ], + [ + "▁feeders", + -13.297371864318848 + ], + [ + "▁carpenter", + -13.297392845153809 + ], + [ + "▁diagnoses", + -13.297444343566896 + ], + [ + "▁sy", + -13.297607421875 + ], + [ + "▁Conversely", + -13.297796249389648 + ], + [ + "▁READ", + -13.297869682312012 + ], + [ + "Tan", + -13.297876358032228 + ], + [ + "▁Submission", + -13.297881126403809 + ], + [ + "program", + -13.297941207885742 + ], + [ + "▁Briggs", + -13.297969818115234 + ], + [ + "▁motorbike", + -13.298015594482422 + ], + [ + "▁Curtains", + -13.298052787780762 + ], + [ + "020", + -13.298060417175291 + ], + [ + "gear", + -13.298065185546877 + ], + [ + "▁interacts", + -13.298072814941406 + ], + [ + "▁#10", + -13.298160552978516 + ], + [ + "task", + -13.29822826385498 + ], + [ + "▁cer", + -13.298262596130373 + ], + [ + "▁redo", + -13.298274040222168 + ], + [ + "ark", + -13.298364639282228 + ], + [ + "finger", + -13.29853630065918 + ], + [ + "8%)", + -13.298554420471191 + ], + [ + "ulla", + -13.29863739013672 + ], + [ + "▁Dh", + -13.29865837097168 + ], + [ + "released", + -13.298680305480955 + ], + [ + "▁Poverty", + -13.298681259155272 + ], + [ + "▁heaps", + -13.298771858215332 + ], + [ + "ticket", + -13.298829078674316 + ], + [ + "▁ab", + -13.29906940460205 + ], + [ + "▁allowances", + -13.299079895019531 + ], + [ + "▁fractions", + -13.29917049407959 + ], + [ + "▁Sunni", + -13.299186706542969 + ], + [ + "▁148", + -13.299278259277344 + ], + [ + "▁8000", + -13.299306869506836 + ], + [ + "▁pleasurable", + -13.299333572387695 + ], + [ + "▁Socialist", + -13.299335479736328 + ], + [ + "▁abnormalities", + -13.299360275268556 + ], + [ + "▁monies", + -13.299396514892578 + ], + [ + "▁Toast", + -13.299420356750488 + ], + [ + "▁1979.", + -13.299494743347168 + ], + [ + "▁1906", + -13.29951000213623 + ], + [ + "▁Killing", + -13.29956340789795 + ], + [ + "▁plotting", + -13.29963207244873 + ], + [ + "▁WF", + -13.29966640472412 + ], + [ + "sted", + -13.299686431884766 + ], + [ + "▁willingly", + -13.299701690673828 + ], + [ + "isch", + -13.29976749420166 + ], + [ + "▁ascending", + -13.299812316894531 + ], + [ + "aca", + -13.29985523223877 + ], + [ + "▁Forrest", + -13.299941062927246 + ], + [ + "▁Assassin", + -13.299969673156738 + ], + [ + "stor", + -13.300008773803713 + ], + [ + "▁insanely", + -13.300008773803713 + ], + [ + "▁militant", + -13.300036430358888 + ], + [ + "▁Din", + -13.300135612487791 + ], + [ + "atu", + -13.30015754699707 + ], + [ + "RNA", + -13.300178527832031 + ], + [ + "▁Suit", + -13.300280570983888 + ], + [ + "▁salesperson", + -13.300289154052734 + ], + [ + "▁flashy", + -13.300374031066896 + ], + [ + "▁GREEN", + -13.300464630126951 + ], + [ + "nau", + -13.300467491149902 + ], + [ + "▁lodges", + -13.300474166870115 + ], + [ + "▁Chips", + -13.30049991607666 + ], + [ + "Ham", + -13.300528526306152 + ], + [ + "Susan", + -13.300625801086426 + ], + [ + "treat", + -13.30063819885254 + ], + [ + "branded", + -13.300713539123535 + ], + [ + "▁maxi", + -13.300868034362791 + ], + [ + "▁Ware", + -13.300875663757324 + ], + [ + "Math", + -13.30097198486328 + ], + [ + "HAM", + -13.30103874206543 + ], + [ + "eff", + -13.301069259643556 + ], + [ + "▁modernization", + -13.301098823547363 + ], + [ + "▁doable", + -13.301112174987791 + ], + [ + "outside", + -13.30119514465332 + ], + [ + "▁Mayer", + -13.301246643066406 + ], + [ + "▁Cruiser", + -13.30125904083252 + ], + [ + "386", + -13.301259994506836 + ], + [ + "▁PIC", + -13.301342964172363 + ], + [ + "▁believable", + -13.30137825012207 + ], + [ + "▁wavelength", + -13.30137825012207 + ], + [ + "▁lang", + -13.301379203796388 + ], + [ + "▁skim", + -13.301387786865234 + ], + [ + "▁vir", + -13.301392555236816 + ], + [ + "▁trimester", + -13.301399230957031 + ], + [ + "Bed", + -13.301435470581056 + ], + [ + "Message", + -13.30144500732422 + ], + [ + "emic", + -13.3014497756958 + ], + [ + "▁Glendale", + -13.301450729370115 + ], + [ + "▁impede", + -13.301496505737305 + ], + [ + "▁17%", + -13.301504135131836 + ], + [ + "▁america", + -13.301507949829102 + ], + [ + "▁bagged", + -13.301538467407228 + ], + [ + "▁librarians", + -13.301642417907717 + ], + [ + "amu", + -13.301658630371094 + ], + [ + "▁writ", + -13.301719665527344 + ], + [ + "▁Isabel", + -13.301742553710938 + ], + [ + "▁inline", + -13.301758766174316 + ], + [ + "Ltd", + -13.30176830291748 + ], + [ + "sugar", + -13.301776885986328 + ], + [ + "sphere", + -13.30179500579834 + ], + [ + "▁derby", + -13.301800727844238 + ], + [ + "▁billboards", + -13.30186367034912 + ], + [ + "▁blackberry", + -13.301868438720703 + ], + [ + "▁datasheet", + -13.30193042755127 + ], + [ + "▁petty", + -13.301961898803713 + ], + [ + "▁Fever", + -13.301974296569824 + ], + [ + "▁FCA", + -13.30204963684082 + ], + [ + "cken", + -13.30213451385498 + ], + [ + "Enable", + -13.302152633666992 + ], + [ + "▁Funk", + -13.302156448364258 + ], + [ + "▁Curve", + -13.302288055419922 + ], + [ + "sight", + -13.302597045898438 + ], + [ + "▁Chai", + -13.302610397338867 + ], + [ + "▁Ronnie", + -13.302679061889648 + ], + [ + "▁prophets", + -13.302749633789062 + ], + [ + "rani", + -13.302850723266602 + ], + [ + "links", + -13.302882194519045 + ], + [ + "bought", + -13.302990913391112 + ], + [ + "▁Pants", + -13.302990913391112 + ], + [ + "640", + -13.303006172180176 + ], + [ + "idad", + -13.303122520446776 + ], + [ + "pher", + -13.303245544433594 + ], + [ + "▁Spaces", + -13.303262710571287 + ], + [ + "seen", + -13.303387641906738 + ], + [ + "▁participatory", + -13.303427696228027 + ], + [ + "Reg", + -13.30348014831543 + ], + [ + "▁televisions", + -13.303486824035645 + ], + [ + "▁parole", + -13.30349349975586 + ], + [ + "Fer", + -13.30349826812744 + ], + [ + "▁Pantry", + -13.30350399017334 + ], + [ + "Updated", + -13.30356788635254 + ], + [ + "▁multipurpose", + -13.303622245788574 + ], + [ + "pia", + -13.303633689880373 + ], + [ + "▁Braves", + -13.303665161132812 + ], + [ + "▁cardinal", + -13.303730010986328 + ], + [ + "cru", + -13.303735733032228 + ], + [ + "increasing", + -13.30378246307373 + ], + [ + "dated", + -13.303794860839844 + ], + [ + "Delivery", + -13.303913116455078 + ], + [ + "Pure", + -13.30398654937744 + ], + [ + "▁alias", + -13.304058074951172 + ], + [ + "Fans", + -13.304075241088867 + ], + [ + "▁Lumber", + -13.30411148071289 + ], + [ + "pura", + -13.304123878479004 + ], + [ + "Others", + -13.30417251586914 + ], + [ + "▁fins", + -13.30418586730957 + ], + [ + "▁eruption", + -13.30420207977295 + ], + [ + "Tel", + -13.304211616516112 + ], + [ + "shower", + -13.304254531860352 + ], + [ + "▁NRA", + -13.304299354553224 + ], + [ + "▁SKU", + -13.304404258728027 + ], + [ + "▁Tong", + -13.304414749145508 + ], + [ + "▁Chinatown", + -13.304494857788086 + ], + [ + "LV", + -13.3045015335083 + ], + [ + "▁Titanic", + -13.304577827453612 + ], + [ + "▁Pinnacle", + -13.304619789123535 + ], + [ + "LER", + -13.304817199707031 + ], + [ + "▁Namibia", + -13.30484104156494 + ], + [ + "▁Britt", + -13.304842948913574 + ], + [ + "Article", + -13.304845809936523 + ], + [ + "▁reassure", + -13.304880142211914 + ], + [ + "▁6.1", + -13.30488109588623 + ], + [ + "▁puncture", + -13.30488395690918 + ], + [ + "▁originates", + -13.304919242858888 + ], + [ + "coded", + -13.304960250854492 + ], + [ + "gill", + -13.304962158203123 + ], + [ + "▁ep", + -13.304990768432615 + ], + [ + "Lin", + -13.305007934570312 + ], + [ + "▁Pamela", + -13.305046081542969 + ], + [ + "▁Cala", + -13.305112838745115 + ], + [ + "▁soluble", + -13.305160522460938 + ], + [ + "lent", + -13.305184364318848 + ], + [ + "8).", + -13.305185317993164 + ], + [ + "▁DW", + -13.30522918701172 + ], + [ + "eme", + -13.305259704589844 + ], + [ + "▁CAM", + -13.305273056030272 + ], + [ + "▁dearly", + -13.305285453796388 + ], + [ + "▁PRICE", + -13.305359840393066 + ], + [ + "▁Santo", + -13.305399894714355 + ], + [ + "ott", + -13.305431365966797 + ], + [ + "▁penetrating", + -13.305493354797363 + ], + [ + "Kar", + -13.305517196655272 + ], + [ + "▁blemishes", + -13.30556297302246 + ], + [ + "▁Langley", + -13.30560302734375 + ], + [ + "▁Poole", + -13.305611610412598 + ], + [ + "Wedding", + -13.305672645568848 + ], + [ + "232", + -13.305693626403809 + ], + [ + "▁Bearings", + -13.305730819702148 + ], + [ + "isco", + -13.305761337280272 + ], + [ + "▁THERE", + -13.305782318115234 + ], + [ + "frequency", + -13.305842399597168 + ], + [ + "competitive", + -13.30588722229004 + ], + [ + "Len", + -13.305891036987305 + ], + [ + "▁Skull", + -13.30591106414795 + ], + [ + "▁cameo", + -13.305936813354492 + ], + [ + "▁gangs", + -13.305938720703123 + ], + [ + "software", + -13.305940628051758 + ], + [ + "▁elk", + -13.305978775024414 + ], + [ + "adventure", + -13.305984497070312 + ], + [ + "ider", + -13.306257247924805 + ], + [ + "▁Possibly", + -13.30629062652588 + ], + [ + "▁cruiser", + -13.30641746520996 + ], + [ + "▁Thur", + -13.306662559509276 + ], + [ + "▁$2,500", + -13.306666374206545 + ], + [ + "▁Contra", + -13.30672836303711 + ], + [ + "▁yarns", + -13.30676555633545 + ], + [ + "ask", + -13.306780815124512 + ], + [ + "▁Hind", + -13.30679416656494 + ], + [ + "Images", + -13.30703830718994 + ], + [ + "sara", + -13.307052612304688 + ], + [ + "TEL", + -13.30709457397461 + ], + [ + "▁Taxes", + -13.30723476409912 + ], + [ + "▁reinstall", + -13.307250022888184 + ], + [ + "sei", + -13.307350158691406 + ], + [ + "▁hotline", + -13.307398796081545 + ], + [ + "▁ot", + -13.307437896728516 + ], + [ + "▁dizziness", + -13.307538032531738 + ], + [ + "▁itineraries", + -13.307538032531738 + ], + [ + "▁Thumbnail", + -13.307538986206056 + ], + [ + "▁Seahawks", + -13.3075532913208 + ], + [ + "Ze", + -13.307600021362305 + ], + [ + "▁Islamist", + -13.307604789733888 + ], + [ + "▁transmitting", + -13.3076171875 + ], + [ + "▁unnamed", + -13.30764102935791 + ], + [ + "▁Woodstock", + -13.307662010192873 + ], + [ + "▁spoiler", + -13.307732582092283 + ], + [ + "Effective", + -13.307753562927246 + ], + [ + "▁introductions", + -13.307759284973145 + ], + [ + "▁Bellevue", + -13.307762145996094 + ], + [ + "hire", + -13.307846069335938 + ], + [ + "▁IMO", + -13.30788230895996 + ], + [ + "▁Rope", + -13.307890892028809 + ], + [ + "Written", + -13.307990074157717 + ], + [ + "Corporate", + -13.30799674987793 + ], + [ + "▁locale", + -13.308051109313965 + ], + [ + "Harry", + -13.308053016662598 + ], + [ + "Quite", + -13.308120727539062 + ], + [ + "298", + -13.308150291442873 + ], + [ + "▁keypad", + -13.308177947998049 + ], + [ + "255", + -13.308194160461426 + ], + [ + "▁propelled", + -13.308220863342283 + ], + [ + "▁workstations", + -13.308290481567385 + ], + [ + "265", + -13.308302879333496 + ], + [ + "▁Flyer", + -13.308330535888672 + ], + [ + "▁NET", + -13.308469772338867 + ], + [ + "▁Abbas", + -13.30849266052246 + ], + [ + "Farm", + -13.30854606628418 + ], + [ + "MON", + -13.308554649353027 + ], + [ + "▁Chico", + -13.308565139770508 + ], + [ + "Paris", + -13.30861759185791 + ], + [ + "▁deliciously", + -13.308643341064451 + ], + [ + "”;", + -13.30864715576172 + ], + [ + "▁LOOK", + -13.308817863464355 + ], + [ + "▁avoids", + -13.308883666992188 + ], + [ + "▁chia", + -13.30892562866211 + ], + [ + "2006", + -13.308960914611816 + ], + [ + "switch", + -13.30918312072754 + ], + [ + "▁Shenzhen", + -13.309198379516602 + ], + [ + "▁cuisines", + -13.309199333190918 + ], + [ + "▁provoking", + -13.309199333190918 + ], + [ + "Mate", + -13.30923080444336 + ], + [ + "▁Arabs", + -13.309250831604004 + ], + [ + "vat", + -13.309269905090332 + ], + [ + "Sp", + -13.309280395507812 + ], + [ + "▁Pottery", + -13.309304237365724 + ], + [ + "fax", + -13.309311866760254 + ], + [ + "zzo", + -13.309350967407228 + ], + [ + "▁Nvidia", + -13.309514045715332 + ], + [ + "Vol", + -13.309589385986328 + ], + [ + "▁CLA", + -13.309595108032228 + ], + [ + "▁Caldwell", + -13.309599876403809 + ], + [ + "▁relatable", + -13.309602737426758 + ], + [ + "▁configuring", + -13.309605598449709 + ], + [ + "▁Bloomington", + -13.309621810913086 + ], + [ + "▁Seventh", + -13.309663772583008 + ], + [ + "▁inaugurated", + -13.309710502624512 + ], + [ + "avo", + -13.309720039367676 + ], + [ + "hello", + -13.309747695922852 + ], + [ + "▁Chateau", + -13.309752464294434 + ], + [ + "▁Homestead", + -13.30976104736328 + ], + [ + "mati", + -13.309769630432127 + ], + [ + "ales", + -13.309802055358888 + ], + [ + "▁particulate", + -13.30982494354248 + ], + [ + "tera", + -13.309866905212402 + ], + [ + "▁tailgate", + -13.309880256652832 + ], + [ + "▁2017-18", + -13.309895515441896 + ], + [ + "▁Abel", + -13.309911727905272 + ], + [ + "▁Blackburn", + -13.309925079345703 + ], + [ + "▁Valerie", + -13.309927940368652 + ], + [ + "gift", + -13.309991836547852 + ], + [ + "▁Manufactured", + -13.31001091003418 + ], + [ + "wk", + -13.31012725830078 + ], + [ + "▁straighten", + -13.310275077819824 + ], + [ + "kept", + -13.310380935668944 + ], + [ + "▁Floating", + -13.310432434082031 + ], + [ + "▁hinged", + -13.310480117797852 + ], + [ + "▁Gloucester", + -13.310486793518066 + ], + [ + "▁cuddle", + -13.310542106628418 + ], + [ + "TIN", + -13.3106050491333 + ], + [ + "▁Frankie", + -13.3106050491333 + ], + [ + "▁Mozambique", + -13.310653686523438 + ], + [ + "▁2.9", + -13.31067180633545 + ], + [ + "ously", + -13.310676574707031 + ], + [ + "▁sliders", + -13.310684204101562 + ], + [ + "zed", + -13.31070327758789 + ], + [ + "inning", + -13.310880661010742 + ], + [ + "▁setbacks", + -13.310961723327637 + ], + [ + "doi", + -13.311055183410645 + ], + [ + "oti", + -13.311110496520996 + ], + [ + "▁Crisp", + -13.311134338378906 + ], + [ + "▁bunker", + -13.311180114746094 + ], + [ + "▁rattle", + -13.311264991760254 + ], + [ + "brew", + -13.311270713806152 + ], + [ + "▁BUSINESS", + -13.31134033203125 + ], + [ + "Products", + -13.3113431930542 + ], + [ + "▁realising", + -13.3113431930542 + ], + [ + "▁Taj", + -13.311388969421388 + ], + [ + "▁Madagascar", + -13.311426162719728 + ], + [ + "▁hears", + -13.311467170715332 + ], + [ + "▁5.3", + -13.31147289276123 + ], + [ + "▁mites", + -13.311492919921877 + ], + [ + "matter", + -13.311559677124023 + ], + [ + "▁bans", + -13.311567306518556 + ], + [ + "▁Rivera", + -13.311641693115234 + ], + [ + "▁careless", + -13.31164264678955 + ], + [ + "666", + -13.311653137207031 + ], + [ + "▁freezes", + -13.31166172027588 + ], + [ + "▁saxophone", + -13.31166648864746 + ], + [ + "▁dissatisfied", + -13.311668395996094 + ], + [ + "career", + -13.311686515808104 + ], + [ + "▁Hague", + -13.31175422668457 + ], + [ + "▁dosing", + -13.311845779418944 + ], + [ + "▁Documentation", + -13.311915397644045 + ], + [ + "▁Schultz", + -13.311966896057127 + ], + [ + "▁hostage", + -13.311997413635254 + ], + [ + "▁texas", + -13.31200122833252 + ], + [ + "▁Shrine", + -13.312004089355469 + ], + [ + "▁tasteful", + -13.312115669250488 + ], + [ + "▁pediatrician", + -13.312138557434082 + ], + [ + "▁wholesalers", + -13.312190055847168 + ], + [ + "neutral", + -13.312192916870115 + ], + [ + "Tomorrow", + -13.31222438812256 + ], + [ + "▁Stevenson", + -13.31227207183838 + ], + [ + "▁mechanically", + -13.31235122680664 + ], + [ + "▁136", + -13.312362670898438 + ], + [ + "▁Abdullah", + -13.312423706054688 + ], + [ + "▁destroys", + -13.312424659729004 + ], + [ + "▁Beverage", + -13.312515258789062 + ], + [ + "▁Handy", + -13.31251621246338 + ], + [ + "▁boon", + -13.312540054321287 + ], + [ + "Annual", + -13.312573432922363 + ], + [ + "esi", + -13.312780380249023 + ], + [ + "▁feta", + -13.312813758850098 + ], + [ + "▁Blo", + -13.312819480895996 + ], + [ + "▁Screw", + -13.31282901763916 + ], + [ + "VPN", + -13.312872886657717 + ], + [ + "▁Pear", + -13.312888145446776 + ], + [ + "▁Ethical", + -13.312891006469728 + ], + [ + "xia", + -13.312987327575684 + ], + [ + "▁SEA", + -13.31308364868164 + ], + [ + "▁Disco", + -13.313152313232422 + ], + [ + "▁wetland", + -13.313196182250977 + ], + [ + "aught", + -13.313231468200684 + ], + [ + "▁thinly", + -13.313257217407228 + ], + [ + "▁blockage", + -13.31338882446289 + ], + [ + "international", + -13.313446998596191 + ], + [ + "▁outsider", + -13.31350803375244 + ], + [ + "▁BV", + -13.313518524169922 + ], + [ + "▁simplifying", + -13.313546180725098 + ], + [ + "2002", + -13.313557624816896 + ], + [ + "▁Ev", + -13.313581466674805 + ], + [ + "address", + -13.313584327697754 + ], + [ + "Julie", + -13.3136568069458 + ], + [ + "▁Anglican", + -13.31373691558838 + ], + [ + "▁Labrador", + -13.31373691558838 + ], + [ + "▁ceremonial", + -13.31373691558838 + ], + [ + "▁repercussions", + -13.31373691558838 + ], + [ + "▁Pluto", + -13.313800811767578 + ], + [ + "Study", + -13.313855171203612 + ], + [ + "▁wakes", + -13.31386947631836 + ], + [ + "▁gothic", + -13.313870429992676 + ], + [ + "▁Fender", + -13.31393337249756 + ], + [ + "▁Motel", + -13.313947677612305 + ], + [ + "▁clipart", + -13.314085960388184 + ], + [ + "▁Pres", + -13.31411075592041 + ], + [ + "4/", + -13.314144134521484 + ], + [ + "▁5)", + -13.314165115356444 + ], + [ + "▁Darling", + -13.31421947479248 + ], + [ + "▁cont", + -13.314266204833984 + ], + [ + "▁boxer", + -13.314270973205566 + ], + [ + "▁Institutional", + -13.314324378967283 + ], + [ + "Federal", + -13.314352989196776 + ], + [ + "▁VO", + -13.314419746398926 + ], + [ + "▁Intra", + -13.314423561096191 + ], + [ + "▁737", + -13.314444541931152 + ], + [ + "▁formative", + -13.314448356628418 + ], + [ + "▁implication", + -13.31463623046875 + ], + [ + "cil", + -13.314845085144045 + ], + [ + "object", + -13.314889907836914 + ], + [ + "▁narrower", + -13.31491756439209 + ], + [ + "▁reinvent", + -13.314937591552734 + ], + [ + "▁enlightenment", + -13.315022468566896 + ], + [ + "▁TEAM", + -13.315203666687012 + ], + [ + "▁plead", + -13.315208435058594 + ], + [ + "▁Haus", + -13.315250396728516 + ], + [ + "▁Algebra", + -13.315264701843262 + ], + [ + "Tea", + -13.315382957458496 + ], + [ + "▁rom", + -13.315386772155762 + ], + [ + "▁Bak", + -13.31558609008789 + ], + [ + "▁clamps", + -13.315692901611328 + ], + [ + "▁unnecessarily", + -13.315814971923828 + ], + [ + "electronic", + -13.315824508666992 + ], + [ + "▁parlor", + -13.315845489501951 + ], + [ + "Gary", + -13.315852165222168 + ], + [ + "▁buggy", + -13.31588363647461 + ], + [ + "ress", + -13.315946578979492 + ], + [ + "▁pagan", + -13.31606101989746 + ], + [ + "3-2", + -13.316132545471191 + ], + [ + "▁tur", + -13.31625270843506 + ], + [ + "▁Dentist", + -13.31628704071045 + ], + [ + "Bru", + -13.316335678100586 + ], + [ + "TZ", + -13.316373825073242 + ], + [ + "▁Sant", + -13.316376686096191 + ], + [ + "FW", + -13.316452026367188 + ], + [ + "maintenance", + -13.316481590270996 + ], + [ + "Linux", + -13.316487312316896 + ], + [ + "pure", + -13.316520690917969 + ], + [ + "▁FIT", + -13.316527366638184 + ], + [ + "▁electrode", + -13.316539764404297 + ], + [ + "▁142", + -13.31655979156494 + ], + [ + "▁mend", + -13.316611289978027 + ], + [ + "▁moderator", + -13.316773414611816 + ], + [ + "▁freeware", + -13.316779136657717 + ], + [ + "▁communicates", + -13.316792488098145 + ], + [ + "▁beaded", + -13.316888809204102 + ], + [ + "▁Raise", + -13.316899299621582 + ], + [ + "▁Elf", + -13.316920280456545 + ], + [ + "▁fellows", + -13.316927909851074 + ], + [ + "▁Zebra", + -13.316953659057615 + ], + [ + "kri", + -13.3169584274292 + ], + [ + "AMA", + -13.317082405090332 + ], + [ + "▁FAST", + -13.317124366760254 + ], + [ + "Tek", + -13.31712818145752 + ], + [ + "▁res", + -13.317136764526367 + ], + [ + "▁Jenna", + -13.317181587219238 + ], + [ + "▁3,500", + -13.317218780517578 + ], + [ + "▁Baking", + -13.317233085632324 + ], + [ + "hur", + -13.317333221435549 + ], + [ + "▁Blinds", + -13.31741714477539 + ], + [ + "▁Monsters", + -13.317487716674805 + ], + [ + "OE", + -13.317508697509766 + ], + [ + "bber", + -13.317564010620115 + ], + [ + "▁prune", + -13.317631721496582 + ], + [ + "▁accommodated", + -13.31764316558838 + ], + [ + "▁Breeze", + -13.31765079498291 + ], + [ + ":17", + -13.317666053771973 + ], + [ + "▁Viewer", + -13.317668914794922 + ], + [ + "magic", + -13.317684173583984 + ], + [ + "▁Genetics", + -13.317808151245115 + ], + [ + "▁Internship", + -13.317858695983888 + ], + [ + "▁manipulating", + -13.317890167236328 + ], + [ + "▁unsurpassed", + -13.317890167236328 + ], + [ + "▁Ludwig", + -13.31789207458496 + ], + [ + "▁ethereal", + -13.31789493560791 + ], + [ + "▁pesky", + -13.317912101745604 + ], + [ + "▁orphanage", + -13.317991256713867 + ], + [ + "▁169", + -13.318013191223145 + ], + [ + "▁spectral", + -13.318100929260254 + ], + [ + "Understand", + -13.318154335021973 + ], + [ + "cb", + -13.318175315856934 + ], + [ + "▁paypal", + -13.318394660949709 + ], + [ + "WF", + -13.31840991973877 + ], + [ + "▁dominates", + -13.318411827087402 + ], + [ + "▁robes", + -13.318546295166016 + ], + [ + "Collection", + -13.31867790222168 + ], + [ + "▁strangely", + -13.318696022033691 + ], + [ + "875", + -13.3187255859375 + ], + [ + "planned", + -13.318763732910156 + ], + [ + "CAS", + -13.318771362304688 + ], + [ + "▁Sentinel", + -13.318840980529783 + ], + [ + "Yoga", + -13.31886863708496 + ], + [ + "▁raging", + -13.318886756896973 + ], + [ + "▁bury", + -13.318899154663086 + ], + [ + "▁FTC", + -13.319074630737305 + ], + [ + "▁Seeking", + -13.319098472595217 + ], + [ + "▁dictates", + -13.319127082824709 + ], + [ + "dose", + -13.319292068481444 + ], + [ + "▁discontinue", + -13.319379806518556 + ], + [ + "ents", + -13.319584846496582 + ], + [ + "▁13%", + -13.319677352905272 + ], + [ + "▁peat", + -13.319710731506348 + ], + [ + "▁Salman", + -13.319725036621094 + ], + [ + "ain", + -13.319786071777344 + ], + [ + "▁Composition", + -13.319849014282228 + ], + [ + "TEX", + -13.319894790649414 + ], + [ + "folk", + -13.31991958618164 + ], + [ + "▁10\"", + -13.319923400878906 + ], + [ + "▁sculptural", + -13.319976806640623 + ], + [ + "▁Dartmouth", + -13.31997776031494 + ], + [ + "▁commencing", + -13.319978713989258 + ], + [ + "▁ultrasonic", + -13.319989204406738 + ], + [ + "▁crappy", + -13.320002555847168 + ], + [ + "▁Reunion", + -13.320038795471191 + ], + [ + "▁wholeheartedly", + -13.320046424865724 + ], + [ + "▁abandonment", + -13.320079803466797 + ], + [ + "▁Cache", + -13.320096015930176 + ], + [ + "▁bloodstream", + -13.32024097442627 + ], + [ + "▁fro", + -13.3202486038208 + ], + [ + "▁teaming", + -13.320267677307127 + ], + [ + "▁rainwater", + -13.320327758789062 + ], + [ + "▁Bracket", + -13.320405960083008 + ], + [ + "Risk", + -13.3204345703125 + ], + [ + "lion", + -13.320688247680664 + ], + [ + "▁Crafted", + -13.32071018218994 + ], + [ + "Nowadays", + -13.320741653442385 + ], + [ + "▁Sailing", + -13.320754051208496 + ], + [ + "Country", + -13.32077693939209 + ], + [ + "112", + -13.320806503295898 + ], + [ + "▁censorship", + -13.320831298828123 + ], + [ + "▁Petit", + -13.320877075195312 + ], + [ + "Hair", + -13.320914268493652 + ], + [ + "▁misconception", + -13.320937156677246 + ], + [ + "▁Float", + -13.321106910705566 + ], + [ + "▁GROUP", + -13.321185111999512 + ], + [ + "ables", + -13.321345329284668 + ], + [ + "▁Marcel", + -13.32146453857422 + ], + [ + "▁roar", + -13.321473121643066 + ], + [ + "▁Linn", + -13.32151699066162 + ], + [ + "▁amplify", + -13.321553230285645 + ], + [ + "▁miner", + -13.321587562561035 + ], + [ + "▁MON", + -13.321720123291016 + ], + [ + "▁pastures", + -13.321730613708496 + ], + [ + "▁Huffington", + -13.321849822998049 + ], + [ + "▁playlists", + -13.32196807861328 + ], + [ + "▁Neg", + -13.322009086608888 + ], + [ + "▁McAfee", + -13.322028160095217 + ], + [ + "Les", + -13.32203769683838 + ], + [ + "▁Ambulance", + -13.322061538696287 + ], + [ + "▁Damascus", + -13.322061538696287 + ], + [ + "▁Wrestling", + -13.322061538696287 + ], + [ + "▁cutlery", + -13.322061538696287 + ], + [ + "▁Picasso", + -13.322064399719238 + ], + [ + "▁toothpick", + -13.32209587097168 + ], + [ + "▁flagged", + -13.322111129760742 + ], + [ + "holm", + -13.322121620178224 + ], + [ + "Station", + -13.32214069366455 + ], + [ + "▁Hassan", + -13.322181701660156 + ], + [ + "▁transmissions", + -13.322219848632812 + ], + [ + "▁abstracts", + -13.322253227233888 + ], + [ + "▁Incorporated", + -13.322265625 + ], + [ + "▁Symptoms", + -13.322412490844728 + ], + [ + "▁galley", + -13.32244110107422 + ], + [ + "Bat", + -13.32247257232666 + ], + [ + "▁Cul", + -13.322492599487305 + ], + [ + "▁McGill", + -13.32249927520752 + ], + [ + "Burn", + -13.322667121887209 + ], + [ + "ots", + -13.32272720336914 + ], + [ + "▁Tooth", + -13.32277488708496 + ], + [ + "▁NYU", + -13.322798728942873 + ], + [ + "▁encyclopedia", + -13.32284164428711 + ], + [ + "▁reliant", + -13.322860717773438 + ], + [ + "▁Magnus", + -13.322869300842283 + ], + [ + "derived", + -13.322894096374512 + ], + [ + "Discussion", + -13.322957038879396 + ], + [ + "Peace", + -13.32297420501709 + ], + [ + "Depend", + -13.32298755645752 + ], + [ + "▁Courtesy", + -13.32305145263672 + ], + [ + "Stick", + -13.323060035705566 + ], + [ + "actually", + -13.323076248168944 + ], + [ + "▁Vs", + -13.32310676574707 + ], + [ + "1-7", + -13.32314682006836 + ], + [ + "▁Wanted", + -13.323152542114258 + ], + [ + "words", + -13.323193550109863 + ], + [ + "▁subs", + -13.32321548461914 + ], + [ + "▁Sloan", + -13.323476791381836 + ], + [ + "scher", + -13.323627471923828 + ], + [ + "1-4", + -13.323787689208984 + ], + [ + "▁7.1", + -13.323824882507324 + ], + [ + "▁implicit", + -13.323934555053713 + ], + [ + "▁ambiguity", + -13.324152946472168 + ], + [ + "▁lounging", + -13.324152946472168 + ], + [ + "▁manganese", + -13.3241548538208 + ], + [ + "▁affords", + -13.3241605758667 + ], + [ + "▁shredding", + -13.324163436889648 + ], + [ + "▁Hobart", + -13.324167251586914 + ], + [ + "▁galore", + -13.324167251586914 + ], + [ + "▁eviction", + -13.324201583862305 + ], + [ + "▁southwestern", + -13.324206352233888 + ], + [ + "▁Renewal", + -13.32421588897705 + ], + [ + "▁loom", + -13.324238777160645 + ], + [ + "▁wrongdoing", + -13.324243545532228 + ], + [ + "▁Helps", + -13.324262619018556 + ], + [ + "-400", + -13.324435234069824 + ], + [ + "▁afternoons", + -13.324450492858888 + ], + [ + "▁plating", + -13.3245849609375 + ], + [ + "▁400,000", + -13.32469081878662 + ], + [ + "von", + -13.324732780456545 + ], + [ + "Hay", + -13.3247652053833 + ], + [ + "▁Marijuana", + -13.324788093566896 + ], + [ + "tering", + -13.324819564819336 + ], + [ + "Virtual", + -13.32482624053955 + ], + [ + "xie", + -13.324938774108888 + ], + [ + "▁Oprah", + -13.32501983642578 + ], + [ + "▁ole", + -13.32505989074707 + ], + [ + "Middle", + -13.325140953063965 + ], + [ + "▁12.5", + -13.325160026550291 + ], + [ + "▁Approval", + -13.325257301330566 + ], + [ + "▁landline", + -13.325282096862791 + ], + [ + "11)", + -13.325469970703123 + ], + [ + "▁Aquatic", + -13.325472831726074 + ], + [ + "▁Stephens", + -13.325555801391602 + ], + [ + "▁walkways", + -13.325641632080078 + ], + [ + "▁expedite", + -13.32564926147461 + ], + [ + "▁populate", + -13.325657844543455 + ], + [ + "▁scouts", + -13.325687408447266 + ], + [ + "▁bellow", + -13.325724601745604 + ], + [ + "▁PTA", + -13.325813293457031 + ], + [ + "▁watercolour", + -13.325823783874512 + ], + [ + "/28", + -13.325864791870115 + ], + [ + "▁glam", + -13.32595920562744 + ], + [ + "▁steadfast", + -13.326009750366213 + ], + [ + "loading", + -13.32601261138916 + ], + [ + "lith", + -13.32602882385254 + ], + [ + "▁nicest", + -13.326071739196776 + ], + [ + "▁Scanner", + -13.326242446899414 + ], + [ + "▁palliative", + -13.326250076293944 + ], + [ + "▁fairytale", + -13.32625961303711 + ], + [ + "▁Watkins", + -13.326273918151855 + ], + [ + "▁Gambling", + -13.32633113861084 + ], + [ + "▁prenatal", + -13.32633113861084 + ], + [ + "▁servo", + -13.326333045959473 + ], + [ + "▁Tanner", + -13.326435089111328 + ], + [ + "▁Patty", + -13.326444625854492 + ], + [ + "▁whistles", + -13.326462745666504 + ], + [ + "▁disruptions", + -13.32654094696045 + ], + [ + "▁volcanoes", + -13.326574325561523 + ], + [ + "▁SN", + -13.326621055603027 + ], + [ + "▁Mourinho", + -13.32663345336914 + ], + [ + "▁playgrounds", + -13.326653480529783 + ], + [ + "OX", + -13.326661109924316 + ], + [ + "none", + -13.326711654663086 + ], + [ + "▁Locks", + -13.326739311218262 + ], + [ + "fry", + -13.326773643493652 + ], + [ + "dz", + -13.32679271697998 + ], + [ + "county", + -13.32680320739746 + ], + [ + "▁Catalyst", + -13.326842308044434 + ], + [ + "▁succeeds", + -13.3268461227417 + ], + [ + "▁Lending", + -13.326851844787598 + ], + [ + "expression", + -13.326930046081545 + ], + [ + "bru", + -13.326991081237791 + ], + [ + "enne", + -13.327061653137209 + ], + [ + "kol", + -13.327072143554688 + ], + [ + "Henry", + -13.327171325683594 + ], + [ + "Obama", + -13.327178001403809 + ], + [ + "financial", + -13.327187538146973 + ], + [ + "▁Loc", + -13.327195167541504 + ], + [ + "▁editable", + -13.327248573303224 + ], + [ + "▁Stocks", + -13.327346801757812 + ], + [ + "▁Julius", + -13.327366828918455 + ], + [ + "▁resisting", + -13.32748794555664 + ], + [ + "GW", + -13.327752113342283 + ], + [ + "icular", + -13.327818870544434 + ], + [ + "▁pra", + -13.327872276306152 + ], + [ + "▁Breed", + -13.32789134979248 + ], + [ + "▁Becoming", + -13.327895164489746 + ], + [ + "Drawing", + -13.327909469604492 + ], + [ + "sam", + -13.328110694885254 + ], + [ + "▁esp", + -13.328179359436035 + ], + [ + "▁axes", + -13.328221321105955 + ], + [ + "ungen", + -13.32827377319336 + ], + [ + "▁Alone", + -13.328277587890623 + ], + [ + "0-5", + -13.328289985656738 + ], + [ + "sho", + -13.328332901000977 + ], + [ + "▁deputies", + -13.328351020812988 + ], + [ + "▁psoriasis", + -13.328351020812988 + ], + [ + "▁oregano", + -13.32835292816162 + ], + [ + "▁strolling", + -13.328373908996582 + ], + [ + "▁Lyft", + -13.328412055969238 + ], + [ + "▁maize", + -13.328432083129885 + ], + [ + "▁cabling", + -13.32851219177246 + ], + [ + "▁nick", + -13.328579902648926 + ], + [ + "▁consumes", + -13.32858657836914 + ], + [ + "▁prepping", + -13.328598976135254 + ], + [ + "▁Sexual", + -13.328614234924316 + ], + [ + "mpl", + -13.328615188598633 + ], + [ + "▁Vest", + -13.32862663269043 + ], + [ + "dried", + -13.328628540039062 + ], + [ + "▁Surveillance", + -13.32862949371338 + ], + [ + "▁piling", + -13.328661918640137 + ], + [ + "▁3500", + -13.328774452209473 + ], + [ + "nna", + -13.328822135925291 + ], + [ + "▁roundup", + -13.328859329223633 + ], + [ + "▁backlinks", + -13.328877449035645 + ], + [ + "▁activates", + -13.32888889312744 + ], + [ + "Spend", + -13.328916549682615 + ], + [ + "▁kan", + -13.32895278930664 + ], + [ + "▁collectibles", + -13.329005241394045 + ], + [ + "▁gro", + -13.329163551330566 + ], + [ + "▁Plane", + -13.329178810119627 + ], + [ + "▁Simone", + -13.329289436340332 + ], + [ + "▁unload", + -13.32940673828125 + ], + [ + "Cheap", + -13.3294095993042 + ], + [ + "▁innovator", + -13.329424858093262 + ], + [ + "▁computed", + -13.329438209533691 + ], + [ + "▁Measurement", + -13.329442024230955 + ], + [ + "▁RSA", + -13.329461097717283 + ], + [ + "encia", + -13.329545974731444 + ], + [ + "▁kin", + -13.329611778259276 + ], + [ + "▁Provided", + -13.32969856262207 + ], + [ + "etch", + -13.329706192016602 + ], + [ + "▁fir", + -13.32974624633789 + ], + [ + "▁Bauer", + -13.329784393310549 + ], + [ + "▁Theft", + -13.32981777191162 + ], + [ + "▁postpone", + -13.329845428466797 + ], + [ + "garh", + -13.3298978805542 + ], + [ + "▁confer", + -13.32991886138916 + ], + [ + "▁DSP", + -13.32996940612793 + ], + [ + "▁Ingredients", + -13.32998752593994 + ], + [ + "▁Thorn", + -13.330031394958496 + ], + [ + "BOX", + -13.330062866210938 + ], + [ + "sana", + -13.330089569091797 + ], + [ + "▁Linden", + -13.330147743225098 + ], + [ + "▁OCD", + -13.330205917358398 + ], + [ + "▁intimately", + -13.330329895019531 + ], + [ + "▁stent", + -13.330333709716797 + ], + [ + "▁4-0", + -13.330337524414062 + ], + [ + "▁smack", + -13.33034610748291 + ], + [ + "VP", + -13.330411911010742 + ], + [ + "rip", + -13.33043384552002 + ], + [ + "▁exploding", + -13.330455780029297 + ], + [ + "▁withdrew", + -13.330458641052246 + ], + [ + "▁Jennings", + -13.330461502075195 + ], + [ + "▁amassed", + -13.33046531677246 + ], + [ + "▁Robbins", + -13.330476760864258 + ], + [ + "▁Harriet", + -13.330483436584473 + ], + [ + "hav", + -13.330514907836914 + ], + [ + "▁prescribing", + -13.33054256439209 + ], + [ + "5:", + -13.330575942993164 + ], + [ + "▁reddish", + -13.330575942993164 + ], + [ + "▁Lithium", + -13.330578804016112 + ], + [ + "▁enlightened", + -13.330602645874023 + ], + [ + "▁taxing", + -13.330629348754885 + ], + [ + "▁=>", + -13.330705642700195 + ], + [ + "▁Contracts", + -13.330796241760254 + ], + [ + "▁CFA", + -13.330802917480469 + ], + [ + "SCO", + -13.330833435058594 + ], + [ + "illion", + -13.330878257751465 + ], + [ + "▁git", + -13.330965042114258 + ], + [ + "▁rehearsals", + -13.331040382385254 + ], + [ + "aru", + -13.331269264221191 + ], + [ + "▁ripple", + -13.3313570022583 + ], + [ + "▁Scrub", + -13.331416130065918 + ], + [ + "▁Oxygen", + -13.331453323364258 + ], + [ + "budget", + -13.331497192382812 + ], + [ + "Coffee", + -13.33150577545166 + ], + [ + "▁structuring", + -13.331523895263672 + ], + [ + "...).", + -13.331571578979492 + ], + [ + "STAR", + -13.331649780273438 + ], + [ + "cial", + -13.331660270690918 + ], + [ + "woven", + -13.331727981567385 + ], + [ + "▁Presence", + -13.331745147705078 + ], + [ + "▁lightness", + -13.331974029541016 + ], + [ + "male", + -13.33199405670166 + ], + [ + "▁SPI", + -13.332115173339844 + ], + [ + "▁curate", + -13.33214282989502 + ], + [ + "344", + -13.332175254821776 + ], + [ + "▁Rao", + -13.332257270812988 + ], + [ + "wort", + -13.332267761230469 + ], + [ + "error", + -13.332298278808594 + ], + [ + "▁Bosnia", + -13.33236312866211 + ], + [ + "▁distinguishing", + -13.33238697052002 + ], + [ + "3.4", + -13.332393646240234 + ], + [ + "▁Thy", + -13.33242130279541 + ], + [ + "-52", + -13.332451820373535 + ], + [ + "▁underpin", + -13.332480430603027 + ], + [ + "▁sailor", + -13.332491874694824 + ], + [ + "YS", + -13.332560539245604 + ], + [ + "▁Endowment", + -13.332565307617188 + ], + [ + "▁aerosol", + -13.332571029663086 + ], + [ + "AX", + -13.33261775970459 + ], + [ + "▁Catalogue", + -13.332635879516602 + ], + [ + "rx", + -13.332646369934082 + ], + [ + "Fort", + -13.332751274108888 + ], + [ + "balance", + -13.332756996154783 + ], + [ + "grey", + -13.332812309265137 + ], + [ + "▁hai", + -13.332828521728516 + ], + [ + "▁sparks", + -13.332839012145996 + ], + [ + "erton", + -13.332876205444336 + ], + [ + "▁optimally", + -13.332893371582031 + ], + [ + "▁valet", + -13.332938194274902 + ], + [ + "▁Gus", + -13.332953453063965 + ], + [ + "▁forks", + -13.332977294921877 + ], + [ + "▁Diary", + -13.33298683166504 + ], + [ + "oba", + -13.333087921142578 + ], + [ + "▁Personnel", + -13.333110809326172 + ], + [ + "▁walled", + -13.333111763000488 + ], + [ + "▁Jewel", + -13.333155632019045 + ], + [ + "▁roaring", + -13.333189964294434 + ], + [ + "▁Adoption", + -13.333222389221191 + ], + [ + "▁shimmering", + -13.333303451538086 + ], + [ + "Animal", + -13.333459854125977 + ], + [ + "▁captions", + -13.333503723144531 + ], + [ + "events", + -13.333592414855955 + ], + [ + "believe", + -13.333614349365234 + ], + [ + "graphy", + -13.33373737335205 + ], + [ + "▁Proxy", + -13.333772659301758 + ], + [ + "1/4", + -13.333789825439451 + ], + [ + "iber", + -13.33381462097168 + ], + [ + "▁migraines", + -13.333852767944336 + ], + [ + "▁soprano", + -13.333889961242676 + ], + [ + "▁Pc", + -13.333902359008787 + ], + [ + "▁invoke", + -13.334166526794434 + ], + [ + "▁20-30", + -13.334232330322266 + ], + [ + "bold", + -13.334333419799805 + ], + [ + "upon", + -13.334338188171388 + ], + [ + "▁mute", + -13.334416389465332 + ], + [ + "▁Katz", + -13.334525108337402 + ], + [ + "5.0", + -13.334552764892578 + ], + [ + "▁nearer", + -13.33456325531006 + ], + [ + "▁spectator", + -13.33457374572754 + ], + [ + "▁Improved", + -13.33461856842041 + ], + [ + "▁frontline", + -13.334654808044434 + ], + [ + "2.6", + -13.334662437438965 + ], + [ + "▁Rue", + -13.33467960357666 + ], + [ + "▁cosmopolitan", + -13.33467960357666 + ], + [ + "▁embodiment", + -13.33467960357666 + ], + [ + "▁subpoena", + -13.33467960357666 + ], + [ + "▁bubbling", + -13.334680557250977 + ], + [ + "vac", + -13.33468246459961 + ], + [ + "▁whirlwind", + -13.334683418273926 + ], + [ + "▁Urdu", + -13.334697723388672 + ], + [ + "▁rarity", + -13.334699630737305 + ], + [ + "▁hostess", + -13.334712982177734 + ], + [ + "▁sqft", + -13.334724426269531 + ], + [ + "▁Rinse", + -13.334794044494627 + ], + [ + "▁gratis", + -13.334894180297852 + ], + [ + "Official", + -13.334908485412598 + ], + [ + "▁1903", + -13.33494758605957 + ], + [ + "Gun", + -13.335091590881348 + ], + [ + "▁infancy", + -13.335129737854004 + ], + [ + "▁Pom", + -13.335137367248535 + ], + [ + "driver", + -13.335180282592772 + ], + [ + "▁Shen", + -13.33525848388672 + ], + [ + "▁5.7", + -13.335350036621094 + ], + [ + "▁Vari", + -13.335360527038574 + ], + [ + "BN", + -13.335375785827637 + ], + [ + "▁Sleeper", + -13.335405349731444 + ], + [ + "▁$55", + -13.335412979125977 + ], + [ + "▁infect", + -13.335515022277832 + ], + [ + "▁152", + -13.3356294631958 + ], + [ + "▁Tenn", + -13.335716247558594 + ], + [ + "▁Forensic", + -13.335726737976074 + ], + [ + "regulation", + -13.335737228393556 + ], + [ + "460", + -13.335779190063477 + ], + [ + "Definitely", + -13.335829734802246 + ], + [ + "Disney", + -13.335866928100586 + ], + [ + "-94", + -13.335867881774902 + ], + [ + "HER", + -13.335891723632812 + ], + [ + "Honestly", + -13.33591079711914 + ], + [ + "▁eyelid", + -13.335917472839355 + ], + [ + "maintained", + -13.335968017578123 + ], + [ + "Kelly", + -13.335973739624023 + ], + [ + "youtube", + -13.336081504821776 + ], + [ + "▁137", + -13.336209297180176 + ], + [ + "▁sharper", + -13.336288452148438 + ], + [ + "▁truthful", + -13.336297035217283 + ], + [ + "▁Vil", + -13.336396217346191 + ], + [ + "ummy", + -13.336418151855469 + ], + [ + "▁Claudia", + -13.336496353149414 + ], + [ + "washing", + -13.33653450012207 + ], + [ + "▁spoons", + -13.336560249328612 + ], + [ + "WH", + -13.336613655090332 + ], + [ + "▁Composer", + -13.336637496948242 + ], + [ + "▁amplifiers", + -13.336668014526367 + ], + [ + "▁emp", + -13.336671829223633 + ], + [ + "▁Straw", + -13.336709022521973 + ], + [ + "kari", + -13.336795806884766 + ], + [ + "▁brigade", + -13.336798667907717 + ], + [ + "▁scrambling", + -13.336798667907717 + ], + [ + "▁parachute", + -13.336800575256348 + ], + [ + "▁sedation", + -13.336800575256348 + ], + [ + "JO", + -13.336809158325195 + ], + [ + "ady", + -13.336816787719728 + ], + [ + "▁scarring", + -13.33682918548584 + ], + [ + "NESS", + -13.33683967590332 + ], + [ + "▁swapping", + -13.336857795715332 + ], + [ + "▁pulses", + -13.336861610412598 + ], + [ + "▁checkbox", + -13.336874008178713 + ], + [ + "▁Salzburg", + -13.336889266967772 + ], + [ + "▁Tradition", + -13.336889266967772 + ], + [ + "hya", + -13.33689785003662 + ], + [ + "▁Vanderbilt", + -13.33697509765625 + ], + [ + "▁Scope", + -13.33699893951416 + ], + [ + "tc", + -13.337016105651855 + ], + [ + "4:", + -13.337041854858398 + ], + [ + "▁carbonate", + -13.33712387084961 + ], + [ + "▁Alder", + -13.337201118469238 + ], + [ + "Owner", + -13.33729076385498 + ], + [ + "Flight", + -13.337316513061523 + ], + [ + "▁transferable", + -13.337335586547852 + ], + [ + "▁Hosted", + -13.337336540222168 + ], + [ + "▁graft", + -13.337359428405762 + ], + [ + "▁Sewing", + -13.33741855621338 + ], + [ + "▁pcs", + -13.33743953704834 + ], + [ + "▁Rath", + -13.337637901306152 + ], + [ + "▁Riders", + -13.33769416809082 + ], + [ + "▁breakfasts", + -13.337700843811035 + ], + [ + "▁sprung", + -13.3377103805542 + ], + [ + "STEP", + -13.337716102600098 + ], + [ + "▁uber", + -13.337724685668944 + ], + [ + "8.1", + -13.33775806427002 + ], + [ + "akis", + -13.337801933288574 + ], + [ + "▁analytic", + -13.33785343170166 + ], + [ + "▁Dent", + -13.33791160583496 + ], + [ + "▁2019!", + -13.33791446685791 + ], + [ + "omer", + -13.33792209625244 + ], + [ + "▁sparkly", + -13.338006973266602 + ], + [ + "▁Conservatives", + -13.33801555633545 + ], + [ + "▁Cane", + -13.338025093078612 + ], + [ + "Mbps", + -13.33806324005127 + ], + [ + "Christ", + -13.338132858276367 + ], + [ + "▁Lately", + -13.33820343017578 + ], + [ + "actor", + -13.338226318359377 + ], + [ + "▁Forty", + -13.338228225708008 + ], + [ + "▁vets", + -13.33838939666748 + ], + [ + "▁commits", + -13.338393211364746 + ], + [ + "▁relapse", + -13.338445663452148 + ], + [ + "aspx", + -13.338717460632324 + ], + [ + "▁Expansion", + -13.33877182006836 + ], + [ + "tempo", + -13.338778495788574 + ], + [ + "▁Lad", + -13.33885669708252 + ], + [ + "▁pecans", + -13.338913917541504 + ], + [ + "▁horoscope", + -13.338921546936035 + ], + [ + "▁memorandum", + -13.338921546936035 + ], + [ + "▁EXPRESS", + -13.338924407958984 + ], + [ + "▁MacDonald", + -13.338955879211426 + ], + [ + "▁refinancing", + -13.338979721069336 + ], + [ + "Bell", + -13.33900260925293 + ], + [ + "▁brutality", + -13.339034080505373 + ], + [ + "▁snowmobile", + -13.33904266357422 + ], + [ + "▁dashboards", + -13.339156150817873 + ], + [ + "2-1", + -13.33920669555664 + ], + [ + "830", + -13.339241027832031 + ], + [ + "▁+/-", + -13.339241027832031 + ], + [ + "▁Jeans", + -13.33926010131836 + ], + [ + "▁bloc", + -13.339329719543455 + ], + [ + "▁socialize", + -13.339340209960938 + ], + [ + "▁triangles", + -13.339351654052734 + ], + [ + "hs", + -13.339466094970703 + ], + [ + "▁Yin", + -13.339484214782717 + ], + [ + "▁Dane", + -13.339495658874512 + ], + [ + "▁Terre", + -13.339540481567385 + ], + [ + "▁Calcium", + -13.339556694030762 + ], + [ + "▁Sherlock", + -13.339588165283203 + ], + [ + "▁dB", + -13.339675903320312 + ], + [ + "▁208", + -13.339702606201172 + ], + [ + "▁Joanna", + -13.339798927307127 + ], + [ + "IDA", + -13.339909553527832 + ], + [ + "▁recess", + -13.340126037597656 + ], + [ + "▁Theological", + -13.340131759643556 + ], + [ + "Captain", + -13.340208053588867 + ], + [ + "▁Sao", + -13.340350151062012 + ], + [ + "▁Tun", + -13.34036636352539 + ], + [ + "disproportionate", + -13.340423583984377 + ], + [ + "▁Detail", + -13.340572357177734 + ], + [ + "igne", + -13.340583801269531 + ], + [ + "udu", + -13.340821266174316 + ], + [ + "▁breakup", + -13.340982437133787 + ], + [ + "▁shank", + -13.340982437133787 + ], + [ + "▁bibliography", + -13.341049194335938 + ], + [ + "▁pomegranate", + -13.341049194335938 + ], + [ + "lice", + -13.341050148010254 + ], + [ + "▁deterrent", + -13.341054916381836 + ], + [ + "▁unattended", + -13.341068267822266 + ], + [ + "▁Labels", + -13.341106414794922 + ], + [ + "nzo", + -13.341121673583984 + ], + [ + "operated", + -13.341205596923828 + ], + [ + "spa", + -13.341219902038574 + ], + [ + "▁Adi", + -13.34125518798828 + ], + [ + "▁Watches", + -13.34128475189209 + ], + [ + "▁Majority", + -13.341310501098633 + ], + [ + "rman", + -13.341357231140137 + ], + [ + "Trip", + -13.341360092163086 + ], + [ + "▁Dillon", + -13.341421127319336 + ], + [ + "▁architectures", + -13.341506958007812 + ], + [ + "▁ECU", + -13.341769218444824 + ], + [ + "▁carat", + -13.341781616210938 + ], + [ + "▁chartered", + -13.341882705688477 + ], + [ + "345", + -13.341898918151855 + ], + [ + "▁solvents", + -13.341911315917969 + ], + [ + "▁juniors", + -13.34196662902832 + ], + [ + "▁Plumbers", + -13.342012405395508 + ], + [ + "Ring", + -13.342031478881836 + ], + [ + "hwa", + -13.34203815460205 + ], + [ + "eaux", + -13.342126846313477 + ], + [ + "▁broom", + -13.342129707336426 + ], + [ + "▁shaved", + -13.342215538024902 + ], + [ + "▁Gall", + -13.342267036437988 + ], + [ + "assessment", + -13.342308044433594 + ], + [ + "Tool", + -13.34231185913086 + ], + [ + "▁Armor", + -13.342342376708984 + ], + [ + "CRA", + -13.342366218566896 + ], + [ + "Fashion", + -13.342369079589844 + ], + [ + "▁Incident", + -13.342377662658691 + ], + [ + "▁Rye", + -13.342415809631348 + ], + [ + "whatever", + -13.342459678649902 + ], + [ + "NV", + -13.342491149902344 + ], + [ + "▁PDP", + -13.342517852783203 + ], + [ + "▁camouflage", + -13.342578887939451 + ], + [ + "60,000", + -13.342632293701172 + ], + [ + "▁reimbursed", + -13.34271240234375 + ], + [ + "▁Buch", + -13.342779159545898 + ], + [ + "▁plaques", + -13.342809677124023 + ], + [ + "slide", + -13.34282398223877 + ], + [ + "▁Yas", + -13.342825889587402 + ], + [ + "Naturally", + -13.34284210205078 + ], + [ + "▁delaying", + -13.342903137207031 + ], + [ + "▁forwards", + -13.342949867248535 + ], + [ + "▁climatic", + -13.343005180358888 + ], + [ + "▁STOP", + -13.34305191040039 + ], + [ + "▁thunderstorms", + -13.343110084533691 + ], + [ + "▁Botswana", + -13.343181610107422 + ], + [ + "▁appreciating", + -13.343181610107422 + ], + [ + "▁repositories", + -13.343181610107422 + ], + [ + "▁juggling", + -13.343184471130373 + ], + [ + "▁cacao", + -13.343186378479004 + ], + [ + "▁98%", + -13.343201637268066 + ], + [ + "▁assuring", + -13.343278884887695 + ], + [ + "RAM", + -13.34330940246582 + ], + [ + "▁facto", + -13.343341827392578 + ], + [ + "▁Nose", + -13.343356132507324 + ], + [ + "▁IDs", + -13.343366622924805 + ], + [ + "▁trenches", + -13.343420028686523 + ], + [ + "▁tenders", + -13.343445777893066 + ], + [ + "▁Dodd", + -13.343460083007812 + ], + [ + "▁Editors", + -13.343461036682127 + ], + [ + "uu", + -13.343467712402344 + ], + [ + "▁adaptor", + -13.343500137329102 + ], + [ + ":34", + -13.343548774719238 + ], + [ + "▁Mapping", + -13.34364414215088 + ], + [ + "▁explorers", + -13.344098091125488 + ], + [ + "▁(2000)", + -13.34415054321289 + ], + [ + "▁TAKE", + -13.344343185424805 + ], + [ + "▁boils", + -13.344343185424805 + ], + [ + "#2", + -13.34434700012207 + ], + [ + "▁decipher", + -13.344367027282717 + ], + [ + "▁Atomic", + -13.344372749328612 + ], + [ + "▁densely", + -13.344420433044434 + ], + [ + "▁constituent", + -13.344441413879396 + ], + [ + "▁affirmation", + -13.344463348388672 + ], + [ + "-64", + -13.344505310058594 + ], + [ + "Spanish", + -13.344579696655272 + ], + [ + "▁fad", + -13.344626426696776 + ], + [ + "mura", + -13.344788551330566 + ], + [ + "▁Claude", + -13.344799041748049 + ], + [ + "▁perfumes", + -13.344820022583008 + ], + [ + "funding", + -13.344866752624512 + ], + [ + "▁Dolly", + -13.34488582611084 + ], + [ + "▁Dump", + -13.344958305358888 + ], + [ + "▁PAN", + -13.345001220703123 + ], + [ + "▁archaeology", + -13.345016479492188 + ], + [ + "grams", + -13.345155715942385 + ], + [ + "▁veto", + -13.34517765045166 + ], + [ + "▁attainment", + -13.345271110534668 + ], + [ + "chic", + -13.34528350830078 + ], + [ + "▁judiciary", + -13.345317840576172 + ], + [ + "▁drumming", + -13.345365524291992 + ], + [ + "▁Brewer", + -13.345392227172852 + ], + [ + "▁tonic", + -13.345393180847168 + ], + [ + "▁Trigger", + -13.345459938049316 + ], + [ + "▁CIO", + -13.345650672912598 + ], + [ + "ucker", + -13.345685958862305 + ], + [ + "dk", + -13.345718383789062 + ], + [ + "▁Nis", + -13.345726013183594 + ], + [ + "▁$1,500", + -13.34574031829834 + ], + [ + "▁Homer", + -13.345752716064451 + ], + [ + "▁eighty", + -13.345783233642578 + ], + [ + "▁SLA", + -13.34579086303711 + ], + [ + "▁breached", + -13.345796585083008 + ], + [ + "▁Zimmerman", + -13.345842361450195 + ], + [ + "▁Barney", + -13.345845222473145 + ], + [ + "Sta", + -13.345891952514648 + ], + [ + "ICO", + -13.345909118652344 + ], + [ + "-42", + -13.345924377441406 + ], + [ + "▁summon", + -13.346001625061035 + ], + [ + "▁Means", + -13.346017837524414 + ], + [ + "▁killings", + -13.346031188964844 + ], + [ + "PSC", + -13.34604263305664 + ], + [ + "▁Lid", + -13.346071243286133 + ], + [ + "211", + -13.346112251281738 + ], + [ + "▁Inclusion", + -13.34613800048828 + ], + [ + "▁sulfate", + -13.346231460571287 + ], + [ + "▁casts", + -13.346251487731934 + ], + [ + "▁Cabo", + -13.34628200531006 + ], + [ + "Der", + -13.34633731842041 + ], + [ + "▁Intensive", + -13.34640121459961 + ], + [ + "▁Describe", + -13.346449851989746 + ], + [ + "rates", + -13.346555709838867 + ], + [ + "bomb", + -13.346558570861816 + ], + [ + "▁Coupe", + -13.346595764160156 + ], + [ + "▁storyteller", + -13.346681594848633 + ], + [ + "▁(32", + -13.34674835205078 + ], + [ + "Elizabeth", + -13.346753120422363 + ], + [ + "▁1902", + -13.346776008605955 + ], + [ + "▁SAR", + -13.34682559967041 + ], + [ + "▁shuffle", + -13.346840858459473 + ], + [ + "▁Magnesium", + -13.346881866455078 + ], + [ + "Square", + -13.34695816040039 + ], + [ + "Past", + -13.347041130065918 + ], + [ + "▁Customize", + -13.34706974029541 + ], + [ + "EO", + -13.347197532653809 + ], + [ + "▁battered", + -13.347214698791504 + ], + [ + "dler", + -13.347333908081056 + ], + [ + "▁Psychiatry", + -13.34745979309082 + ], + [ + "▁visceral", + -13.34745979309082 + ], + [ + "▁existential", + -13.347475051879885 + ], + [ + "▁pennies", + -13.347481727600098 + ], + [ + "▁buttermilk", + -13.347490310668944 + ], + [ + "▁Fulton", + -13.347545623779297 + ], + [ + "▁archery", + -13.347558975219728 + ], + [ + "▁childbirth", + -13.34758186340332 + ], + [ + "▁larvae", + -13.34758186340332 + ], + [ + "▁infantry", + -13.347599029541016 + ], + [ + "▁Alba", + -13.347611427307127 + ], + [ + "▁Spine", + -13.34768009185791 + ], + [ + "▁Editing", + -13.347686767578123 + ], + [ + "▁diffusion", + -13.347691535949709 + ], + [ + "thus", + -13.347732543945312 + ], + [ + "▁adored", + -13.347748756408691 + ], + [ + "▁auditory", + -13.347779273986816 + ], + [ + "▁2015)", + -13.347780227661133 + ], + [ + "▁handsets", + -13.347830772399902 + ], + [ + "▁presets", + -13.347841262817385 + ], + [ + "▁80’", + -13.347845077514648 + ], + [ + "▁lapse", + -13.34788417816162 + ], + [ + "▁Dominion", + -13.347904205322266 + ], + [ + "▁stimulated", + -13.347938537597656 + ], + [ + "▁150,000", + -13.348105430603027 + ], + [ + "▁Centres", + -13.34816551208496 + ], + [ + "▁Plasma", + -13.348212242126465 + ], + [ + "▁docking", + -13.348291397094728 + ], + [ + "Assist", + -13.348363876342772 + ], + [ + "north", + -13.348403930664062 + ], + [ + "▁Specifications", + -13.34846305847168 + ], + [ + "▁lastly", + -13.34852123260498 + ], + [ + "▁wardrobes", + -13.34866428375244 + ], + [ + "▁healthiest", + -13.348711967468262 + ], + [ + "ively", + -13.348843574523926 + ], + [ + "▁Achilles", + -13.348877906799316 + ], + [ + "Executive", + -13.348939895629885 + ], + [ + "▁Personality", + -13.349112510681152 + ], + [ + "▁Sliding", + -13.34913444519043 + ], + [ + "▁shafts", + -13.349159240722656 + ], + [ + "▁4.8", + -13.34919261932373 + ], + [ + "MED", + -13.349201202392578 + ], + [ + "▁Sanskrit", + -13.349268913269045 + ], + [ + "▁sizeable", + -13.349281311035156 + ], + [ + "stem", + -13.349327087402344 + ], + [ + "rial", + -13.349336624145508 + ], + [ + "▁Careers", + -13.34941577911377 + ], + [ + "▁fre", + -13.349453926086426 + ], + [ + "▁malignant", + -13.34952449798584 + ], + [ + "mies", + -13.349580764770508 + ], + [ + "▁camaraderie", + -13.349605560302734 + ], + [ + "▁consolation", + -13.349605560302734 + ], + [ + "▁dispersion", + -13.349605560302734 + ], + [ + "▁escalating", + -13.349605560302734 + ], + [ + "▁Kurdish", + -13.349608421325684 + ], + [ + "▁stumbling", + -13.349610328674316 + ], + [ + "▁Stevie", + -13.349615097045898 + ], + [ + "▁Whatsapp", + -13.34961986541748 + ], + [ + "▁relive", + -13.34962558746338 + ], + [ + "▁microfiber", + -13.349629402160645 + ], + [ + "kom", + -13.349651336669922 + ], + [ + "▁dummy", + -13.349651336669922 + ], + [ + "▁Natasha", + -13.349671363830566 + ], + [ + "▁fortified", + -13.34969425201416 + ], + [ + "▁16\"", + -13.349732398986816 + ], + [ + "▁bridesmaids", + -13.349735260009766 + ], + [ + "ffy", + -13.349774360656738 + ], + [ + "▁haircuts", + -13.34983253479004 + ], + [ + "▁Battlefield", + -13.349872589111328 + ], + [ + "▁gag", + -13.349900245666504 + ], + [ + "▁Fiscal", + -13.349918365478516 + ], + [ + "▁Parsons", + -13.349937438964844 + ], + [ + "hunter", + -13.34997844696045 + ], + [ + "▁flushing", + -13.349989891052246 + ], + [ + "207", + -13.350017547607422 + ], + [ + "▁petite", + -13.350027084350586 + ], + [ + "finder", + -13.35009479522705 + ], + [ + "biotic", + -13.350123405456545 + ], + [ + "▁Dinosaur", + -13.350135803222656 + ], + [ + "TK", + -13.350147247314451 + ], + [ + "tley", + -13.35029411315918 + ], + [ + "▁Pasta", + -13.350324630737305 + ], + [ + "▁interplay", + -13.350329399108888 + ], + [ + "155", + -13.350333213806152 + ], + [ + "▁flowed", + -13.350399017333984 + ], + [ + "▁divider", + -13.35042953491211 + ], + [ + "▁DOS", + -13.35044002532959 + ], + [ + "▁Moose", + -13.35044765472412 + ], + [ + "▁Clickfunnels", + -13.350479125976562 + ], + [ + "▁foreclosures", + -13.35060214996338 + ], + [ + "pk", + -13.350603103637695 + ], + [ + "Client", + -13.350619316101074 + ], + [ + "▁ransom", + -13.35065460205078 + ], + [ + "▁sieve", + -13.350811958312988 + ], + [ + "▁barefoot", + -13.350823402404783 + ], + [ + "report", + -13.350964546203612 + ], + [ + "▁(2015).", + -13.351110458374023 + ], + [ + "▁imitate", + -13.351160049438477 + ], + [ + "208", + -13.35120677947998 + ], + [ + "▁sw", + -13.351222038269045 + ], + [ + "awareness", + -13.351225852966309 + ], + [ + "Jacob", + -13.351236343383787 + ], + [ + "▁Hint", + -13.351275444030762 + ], + [ + "▁JPEG", + -13.351298332214355 + ], + [ + "DV", + -13.351299285888672 + ], + [ + "chet", + -13.351350784301758 + ], + [ + "▁sham", + -13.351438522338867 + ], + [ + "▁calculates", + -13.351533889770508 + ], + [ + "▁Algeria", + -13.351572036743164 + ], + [ + "1500", + -13.351577758789062 + ], + [ + "hora", + -13.35166072845459 + ], + [ + "upper", + -13.351696014404297 + ], + [ + "▁unintended", + -13.351783752441406 + ], + [ + "unga", + -13.351795196533203 + ], + [ + "▁RES", + -13.35181999206543 + ], + [ + "▁Jackpot", + -13.35185432434082 + ], + [ + "▁Revision", + -13.351889610290527 + ], + [ + "▁drier", + -13.351900100708008 + ], + [ + "▁Carlson", + -13.351980209350586 + ], + [ + "▁Jaya", + -13.351981163024902 + ], + [ + "▁Wolfe", + -13.351993560791016 + ], + [ + "rei", + -13.352041244506836 + ], + [ + "▁overarching", + -13.352079391479492 + ], + [ + "▁boardwalk", + -13.352080345153809 + ], + [ + "▁Kla", + -13.35214614868164 + ], + [ + "eed", + -13.35232925415039 + ], + [ + "▁139", + -13.352335929870604 + ], + [ + "▁rodents", + -13.35239028930664 + ], + [ + "Playing", + -13.352632522583008 + ], + [ + "▁Compound", + -13.352707862854004 + ], + [ + "▁deficient", + -13.352818489074709 + ], + [ + "2-4", + -13.35283088684082 + ], + [ + "▁Jab", + -13.352875709533691 + ], + [ + "doo", + -13.352897644042969 + ], + [ + "▁aero", + -13.352899551391602 + ], + [ + "▁WEB", + -13.352947235107422 + ], + [ + "▁repayments", + -13.352985382080078 + ], + [ + "Tor", + -13.35307502746582 + ], + [ + "▁reigning", + -13.353214263916016 + ], + [ + "▁Preheat", + -13.353217124938965 + ], + [ + "▁Kari", + -13.35325527191162 + ], + [ + "▁toughness", + -13.353260040283203 + ], + [ + "▁Connie", + -13.353278160095217 + ], + [ + "▁Beckham", + -13.353331565856934 + ], + [ + "Beauty", + -13.353370666503906 + ], + [ + "▁Guam", + -13.353379249572754 + ], + [ + "▁Sparks", + -13.353379249572754 + ], + [ + "Anything", + -13.3533935546875 + ], + [ + "icker", + -13.353400230407717 + ], + [ + "▁logically", + -13.353402137756348 + ], + [ + "▁brine", + -13.353437423706056 + ], + [ + "▁Booker", + -13.35349464416504 + ], + [ + "▁cloak", + -13.353513717651367 + ], + [ + "Seeing", + -13.353550910949709 + ], + [ + "▁elemental", + -13.353554725646973 + ], + [ + "▁$30,000", + -13.353713989257812 + ], + [ + "CIA", + -13.353747367858888 + ], + [ + "dust", + -13.353811264038086 + ], + [ + "▁Metals", + -13.35387134552002 + ], + [ + "lica", + -13.353877067565918 + ], + [ + "▁paprika", + -13.353911399841309 + ], + [ + "▁unbelievably", + -13.353911399841309 + ], + [ + "▁Staffordshire", + -13.353917121887209 + ], + [ + "▁slotted", + -13.354103088378906 + ], + [ + "tani", + -13.354147911071776 + ], + [ + "▁Everywhere", + -13.354165077209473 + ], + [ + "▁casualty", + -13.354167938232422 + ], + [ + "▁Karate", + -13.354222297668455 + ], + [ + "▁thirsty", + -13.354269981384276 + ], + [ + "▁graciously", + -13.354345321655272 + ], + [ + "▁Distributor", + -13.35436725616455 + ], + [ + "orous", + -13.35438346862793 + ], + [ + "▁Og", + -13.354387283325195 + ], + [ + "▁FG", + -13.35440731048584 + ], + [ + "▁Abroad", + -13.35442066192627 + ], + [ + "height", + -13.354453086853027 + ], + [ + "▁Gartner", + -13.35446548461914 + ], + [ + "▁Juniper", + -13.354530334472656 + ], + [ + "▁bends", + -13.354551315307615 + ], + [ + "▁snowboarding", + -13.354559898376465 + ], + [ + "▁Sergio", + -13.3546781539917 + ], + [ + "2),", + -13.354869842529297 + ], + [ + "▁lecturers", + -13.354960441589355 + ], + [ + "20%", + -13.354995727539062 + ], + [ + "Residents", + -13.355147361755373 + ], + [ + "▁excite", + -13.355175018310549 + ], + [ + "Uni", + -13.355244636535645 + ], + [ + "Hol", + -13.35526180267334 + ], + [ + "▁Milling", + -13.355292320251465 + ], + [ + "30,000", + -13.355307579040527 + ], + [ + "▁plunged", + -13.355345726013184 + ], + [ + "▁interruptions", + -13.355422973632812 + ], + [ + "tonic", + -13.35551643371582 + ], + [ + "Holiday", + -13.355558395385742 + ], + [ + "9.00", + -13.355578422546388 + ], + [ + "▁Tomas", + -13.355634689331056 + ], + [ + "England", + -13.35563850402832 + ], + [ + "KU", + -13.355666160583496 + ], + [ + "Friendly", + -13.355682373046877 + ], + [ + "poli", + -13.35572910308838 + ], + [ + "▁Regal", + -13.35573959350586 + ], + [ + "▁seater", + -13.355752944946287 + ], + [ + "▁Blueprint", + -13.355786323547363 + ], + [ + "CBS", + -13.35586929321289 + ], + [ + "Electric", + -13.355948448181152 + ], + [ + "effects", + -13.356000900268556 + ], + [ + "▁miscarriage", + -13.356072425842283 + ], + [ + "▁Fremont", + -13.356080055236816 + ], + [ + "▁Parenthood", + -13.356086730957031 + ], + [ + "▁rubble", + -13.35611343383789 + ], + [ + "▁namesake", + -13.356142044067385 + ], + [ + "walking", + -13.356152534484863 + ], + [ + "▁Multimedia", + -13.356159210205078 + ], + [ + "▁Westchester", + -13.356159210205078 + ], + [ + "▁longitudinal", + -13.356160163879396 + ], + [ + "▁commentators", + -13.356218338012695 + ], + [ + "▁painfully", + -13.356240272521973 + ], + [ + "▁smelled", + -13.356300354003906 + ], + [ + "Gra", + -13.356315612792969 + ], + [ + "▁neutrality", + -13.356375694274902 + ], + [ + "iam", + -13.356380462646484 + ], + [ + "gp", + -13.35639190673828 + ], + [ + "▁endemic", + -13.356411933898926 + ], + [ + "▁peg", + -13.35641860961914 + ], + [ + "▁val", + -13.356453895568848 + ], + [ + "▁70,000", + -13.356578826904297 + ], + [ + "▁Angry", + -13.356616973876951 + ], + [ + "▁Organizational", + -13.356806755065918 + ], + [ + "▁Packing", + -13.35682773590088 + ], + [ + "worn", + -13.356837272644045 + ], + [ + "▁fiercely", + -13.356850624084473 + ], + [ + "▁80,000", + -13.356852531433104 + ], + [ + "zier", + -13.35690689086914 + ], + [ + "▁heed", + -13.35705852508545 + ], + [ + "▁Anxiety", + -13.35710334777832 + ], + [ + "▁connective", + -13.35710334777832 + ], + [ + "▁6.2", + -13.357131958007812 + ], + [ + "▁Hel", + -13.357150077819824 + ], + [ + "ahi", + -13.357186317443848 + ], + [ + "kr", + -13.357196807861328 + ], + [ + "▁disrupting", + -13.357291221618652 + ], + [ + "▁mr", + -13.35741138458252 + ], + [ + "gil", + -13.357431411743164 + ], + [ + "▁pickups", + -13.357431411743164 + ], + [ + "▁napkins", + -13.357476234436035 + ], + [ + "Forex", + -13.35753345489502 + ], + [ + "▁pertains", + -13.357549667358398 + ], + [ + "umb", + -13.357569694519045 + ], + [ + "▁ninja", + -13.357600212097168 + ], + [ + "▁Accountant", + -13.357645988464355 + ], + [ + "ANS", + -13.357647895812988 + ], + [ + "▁Adopt", + -13.35764980316162 + ], + [ + "edition", + -13.357728004455566 + ], + [ + "▁Broadband", + -13.357732772827148 + ], + [ + "supply", + -13.357759475708008 + ], + [ + "▁Jeanne", + -13.3577880859375 + ], + [ + "Shipping", + -13.35780429840088 + ], + [ + "Become", + -13.357852935791016 + ], + [ + "Golf", + -13.357881546020508 + ], + [ + "▁containment", + -13.357953071594238 + ], + [ + "consumer", + -13.357994079589844 + ], + [ + "Laura", + -13.358012199401855 + ], + [ + "▁tonne", + -13.358072280883787 + ], + [ + "▁Liga", + -13.358101844787598 + ], + [ + "▁secrecy", + -13.358235359191896 + ], + [ + "▁unwavering", + -13.358235359191896 + ], + [ + "▁lattice", + -13.358236312866213 + ], + [ + "▁stagnant", + -13.358236312866213 + ], + [ + "▁Miniature", + -13.358248710632324 + ], + [ + "▁detoxification", + -13.35825538635254 + ], + [ + "▁(800)", + -13.358264923095703 + ], + [ + "▁multilingual", + -13.35830020904541 + ], + [ + "▁Shelves", + -13.358306884765623 + ], + [ + "▁jog", + -13.358312606811523 + ], + [ + "▁Accountability", + -13.358366012573242 + ], + [ + "▁nutty", + -13.358370780944824 + ], + [ + "▁Lava", + -13.358433723449709 + ], + [ + "▁gloomy", + -13.358433723449709 + ], + [ + "▁Sweepstakes", + -13.358524322509766 + ], + [ + "▁Existing", + -13.3585786819458 + ], + [ + "▁Illustration", + -13.358607292175291 + ], + [ + "▁commend", + -13.358678817749023 + ], + [ + "▁Alaskan", + -13.358707427978516 + ], + [ + "▁Bands", + -13.358750343322754 + ], + [ + "▁(100", + -13.358844757080078 + ], + [ + "grant", + -13.358869552612305 + ], + [ + "▁voicemail", + -13.358880043029783 + ], + [ + "▁Rhythm", + -13.358911514282228 + ], + [ + "▁WELL", + -13.35891819000244 + ], + [ + "▁Playhouse", + -13.35895824432373 + ], + [ + "-38", + -13.35901927947998 + ], + [ + "Parking", + -13.359037399291992 + ], + [ + "▁hotspots", + -13.359081268310549 + ], + [ + "263", + -13.359230995178224 + ], + [ + "▁protesting", + -13.35926914215088 + ], + [ + "▁bib", + -13.35932731628418 + ], + [ + "plug", + -13.359333038330078 + ], + [ + "122", + -13.359355926513672 + ], + [ + "▁deity", + -13.35940933227539 + ], + [ + "▁sunk", + -13.35952377319336 + ], + [ + "▁comm", + -13.359529495239258 + ], + [ + "sboro", + -13.359644889831545 + ], + [ + "▁manifestations", + -13.359644889831545 + ], + [ + "▁tastings", + -13.359747886657717 + ], + [ + "▁Capt", + -13.359787940979004 + ], + [ + "▁funnels", + -13.35987377166748 + ], + [ + "FU", + -13.35996150970459 + ], + [ + "▁Devi", + -13.360000610351562 + ], + [ + "Certified", + -13.36001682281494 + ], + [ + "▁2016)", + -13.36009693145752 + ], + [ + "▁skillful", + -13.360193252563477 + ], + [ + "▁Ernst", + -13.360261917114258 + ], + [ + "▁anthropology", + -13.36040496826172 + ], + [ + "▁irresponsible", + -13.36040496826172 + ], + [ + "▁merchandising", + -13.36040496826172 + ], + [ + "▁paralysis", + -13.360414505004885 + ], + [ + "▁socioeconomic", + -13.360418319702148 + ], + [ + "pies", + -13.360428810119627 + ], + [ + "▁Bernstein", + -13.360429763793944 + ], + [ + "▁Autodesk", + -13.36043930053711 + ], + [ + "cliff", + -13.360504150390623 + ], + [ + "▁foresee", + -13.360529899597168 + ], + [ + "▁134", + -13.360593795776367 + ], + [ + "hia", + -13.360689163208008 + ], + [ + "▁commended", + -13.36080551147461 + ], + [ + "▁typeface", + -13.36080837249756 + ], + [ + "dome", + -13.360902786254885 + ], + [ + "▁Oc", + -13.360958099365234 + ], + [ + "Process", + -13.360984802246094 + ], + [ + "▁Hare", + -13.361031532287598 + ], + [ + "▁slang", + -13.361056327819824 + ], + [ + "▁ascend", + -13.361103057861328 + ], + [ + "▁£25", + -13.361106872558594 + ], + [ + "▁Spins", + -13.361248970031738 + ], + [ + "▁pondering", + -13.361275672912598 + ], + [ + "▁uprising", + -13.36132526397705 + ], + [ + "▁jailbreak", + -13.361373901367188 + ], + [ + "ranking", + -13.36142635345459 + ], + [ + "▁shi", + -13.361427307128906 + ], + [ + "rien", + -13.361448287963867 + ], + [ + "▁Jain", + -13.36147403717041 + ], + [ + "lower", + -13.361547470092772 + ], + [ + "cic", + -13.36158847808838 + ], + [ + "▁livelihoods", + -13.361626625061035 + ], + [ + "▁appreciates", + -13.36167049407959 + ], + [ + "▁HAR", + -13.361690521240234 + ], + [ + "▁wrongly", + -13.361693382263184 + ], + [ + "Cold", + -13.361740112304688 + ], + [ + "▁Classification", + -13.361741065979004 + ], + [ + "Wil", + -13.361836433410645 + ], + [ + "ggs", + -13.361856460571287 + ], + [ + "▁Haz", + -13.361981391906738 + ], + [ + "▁gasket", + -13.361997604370115 + ], + [ + "▁spoilers", + -13.362021446228027 + ], + [ + "limited", + -13.362034797668455 + ], + [ + "enthusiastically", + -13.362043380737305 + ], + [ + "▁coli", + -13.362062454223633 + ], + [ + "insert", + -13.362106323242188 + ], + [ + "lman", + -13.362109184265137 + ], + [ + "effect", + -13.362135887145996 + ], + [ + "quarters", + -13.362176895141602 + ], + [ + "haired", + -13.362211227416992 + ], + [ + "Practice", + -13.362225532531738 + ], + [ + "rico", + -13.362227439880373 + ], + [ + "Beginning", + -13.3622407913208 + ], + [ + "-39", + -13.362262725830078 + ], + [ + "▁Educators", + -13.362285614013672 + ], + [ + "▁Sizes", + -13.362344741821287 + ], + [ + "▁attendee", + -13.362360000610352 + ], + [ + "▁installments", + -13.362375259399414 + ], + [ + "▁Balls", + -13.36241626739502 + ], + [ + "▁Cellular", + -13.36244297027588 + ], + [ + "▁Greensboro", + -13.36246395111084 + ], + [ + "▁brisk", + -13.362496376037598 + ], + [ + "Mission", + -13.362557411193848 + ], + [ + "▁exclusions", + -13.362563133239746 + ], + [ + "▁congenital", + -13.362578392028809 + ], + [ + "▁beneficiation", + -13.362579345703123 + ], + [ + "▁precarious", + -13.362579345703123 + ], + [ + "▁LeBron", + -13.362589836120604 + ], + [ + "▁teddy", + -13.36260986328125 + ], + [ + "▁Judges", + -13.362618446350098 + ], + [ + "▁eyeshadow", + -13.362627029418944 + ], + [ + "▁tweeting", + -13.362676620483398 + ], + [ + "▁refinery", + -13.362678527832031 + ], + [ + "▁monogram", + -13.362680435180664 + ], + [ + "▁wiser", + -13.362738609313965 + ], + [ + "▁Andersen", + -13.362810134887695 + ], + [ + "▁aides", + -13.362834930419922 + ], + [ + "▁personable", + -13.362911224365234 + ], + [ + "SIM", + -13.3629150390625 + ], + [ + "▁eyewear", + -13.36296844482422 + ], + [ + "▁summoned", + -13.362981796264648 + ], + [ + "▁Acquisition", + -13.363011360168455 + ], + [ + "▁kisses", + -13.363035202026367 + ], + [ + "▁Addison", + -13.363044738769531 + ], + [ + "opa", + -13.36308765411377 + ], + [ + "pg", + -13.363139152526855 + ], + [ + "▁domestically", + -13.363144874572754 + ], + [ + "▁politely", + -13.36324691772461 + ], + [ + "▁charters", + -13.363273620605469 + ], + [ + "▁Seasonal", + -13.363276481628418 + ], + [ + "1999", + -13.363286018371582 + ], + [ + "▁GRE", + -13.363320350646973 + ], + [ + "Coach", + -13.363329887390137 + ], + [ + "bolt", + -13.36349391937256 + ], + [ + "▁fleets", + -13.363508224487305 + ], + [ + "▁escalation", + -13.36357307434082 + ], + [ + "930", + -13.36357879638672 + ], + [ + "▁ESP", + -13.36357879638672 + ], + [ + "ibi", + -13.363593101501465 + ], + [ + "Met", + -13.363601684570312 + ], + [ + "▁pleading", + -13.363648414611816 + ], + [ + "▁fallout", + -13.363876342773438 + ], + [ + "sourced", + -13.3639554977417 + ], + [ + "▁earthly", + -13.363970756530762 + ], + [ + "nj", + -13.364027976989746 + ], + [ + "▁psychiatrist", + -13.364084243774414 + ], + [ + "▁precursor", + -13.36410140991211 + ], + [ + "▁JV", + -13.36411476135254 + ], + [ + "draft", + -13.36418628692627 + ], + [ + "▁regenerate", + -13.364200592041016 + ], + [ + "▁omit", + -13.364283561706545 + ], + [ + "secure", + -13.364327430725098 + ], + [ + "▁Guardians", + -13.364410400390623 + ], + [ + "▁coefficient", + -13.36441707611084 + ], + [ + "Coordinate", + -13.36444091796875 + ], + [ + "Choice", + -13.36446475982666 + ], + [ + "▁20+", + -13.364486694335938 + ], + [ + "▁sooo", + -13.364496231079102 + ], + [ + "▁Nassau", + -13.36450481414795 + ], + [ + "▁steamer", + -13.364507675170898 + ], + [ + "Seriously", + -13.364530563354492 + ], + [ + "▁Pew", + -13.364537239074709 + ], + [ + "275", + -13.364612579345703 + ], + [ + "▁bohemian", + -13.36461353302002 + ], + [ + "▁praises", + -13.364627838134766 + ], + [ + "▁lads", + -13.364726066589355 + ], + [ + "ellen", + -13.364727020263672 + ], + [ + "▁Helicopter", + -13.36474323272705 + ], + [ + "▁hydrating", + -13.364757537841797 + ], + [ + "▁interfering", + -13.364757537841797 + ], + [ + "▁chandeliers", + -13.36475944519043 + ], + [ + "▁vantage", + -13.364771842956545 + ], + [ + "▁Jammu", + -13.364778518676758 + ], + [ + "▁vandalism", + -13.364782333374023 + ], + [ + "blowing", + -13.364790916442873 + ], + [ + "▁Lansing", + -13.3648099899292 + ], + [ + "▁hairdryer", + -13.364855766296388 + ], + [ + "▁PLA", + -13.364876747131348 + ], + [ + "▁IOS", + -13.36489486694336 + ], + [ + "▁Gould", + -13.364951133728027 + ], + [ + "YO", + -13.364986419677734 + ], + [ + "▁Prints", + -13.364995002746582 + ], + [ + "athi", + -13.36502456665039 + ], + [ + "▁Folder", + -13.365038871765137 + ], + [ + "▁calibrated", + -13.365059852600098 + ], + [ + "Ag", + -13.365141868591309 + ], + [ + "coach", + -13.365166664123535 + ], + [ + "▁saltwater", + -13.365175247192385 + ], + [ + "ardo", + -13.36520004272461 + ], + [ + "Jul", + -13.365209579467772 + ], + [ + "chart", + -13.365289688110352 + ], + [ + "▁Erica", + -13.365365028381348 + ], + [ + "Tro", + -13.365463256835938 + ], + [ + "▁Trainers", + -13.365564346313477 + ], + [ + "Ice", + -13.365618705749512 + ], + [ + "display", + -13.365618705749512 + ], + [ + "▁GNU", + -13.365676879882812 + ], + [ + "8-0", + -13.365694046020508 + ], + [ + "▁Eureka", + -13.365699768066406 + ], + [ + "Wish", + -13.365740776062012 + ], + [ + "dx", + -13.365800857543944 + ], + [ + "Improve", + -13.365856170654297 + ], + [ + "▁deserted", + -13.36587905883789 + ], + [ + "▁Stain", + -13.365896224975586 + ], + [ + "Fold", + -13.365968704223633 + ], + [ + "▁Kitchens", + -13.365971565246582 + ], + [ + "▁Makers", + -13.366103172302246 + ], + [ + "▁Pil", + -13.366223335266112 + ], + [ + "▁cv", + -13.366297721862791 + ], + [ + "▁lament", + -13.3663330078125 + ], + [ + "-2013", + -13.36634349822998 + ], + [ + "▁een", + -13.366350173950195 + ], + [ + "▁orally", + -13.36635684967041 + ], + [ + "▁binds", + -13.366372108459473 + ], + [ + "peak", + -13.366389274597168 + ], + [ + "lv", + -13.3663911819458 + ], + [ + "kil", + -13.366419792175291 + ], + [ + "▁exploiting", + -13.36643409729004 + ], + [ + "Jane", + -13.36653995513916 + ], + [ + "▁ethically", + -13.366594314575195 + ], + [ + "422", + -13.366605758666992 + ], + [ + "nol", + -13.366689682006836 + ], + [ + "Language", + -13.366695404052734 + ], + [ + "artist", + -13.36675262451172 + ], + [ + "▁Driven", + -13.366755485534668 + ], + [ + "▁favoured", + -13.366806030273438 + ], + [ + "assisted", + -13.366848945617676 + ], + [ + "▁magician", + -13.366848945617676 + ], + [ + "awi", + -13.366849899291992 + ], + [ + "▁Rid", + -13.366872787475586 + ], + [ + "▁cranberries", + -13.36694049835205 + ], + [ + "▁unrivalled", + -13.36694049835205 + ], + [ + "▁tavern", + -13.366944313049316 + ], + [ + "▁Peoria", + -13.366945266723633 + ], + [ + "▁filament", + -13.36695957183838 + ], + [ + "▁Crosby", + -13.366971015930176 + ], + [ + "▁northeastern", + -13.366979598999023 + ], + [ + "▁Constable", + -13.367005348205566 + ], + [ + "▁worldview", + -13.367018699645996 + ], + [ + "▁Clev", + -13.367021560668944 + ], + [ + "/02/", + -13.367067337036133 + ], + [ + "▁doubtful", + -13.367103576660156 + ], + [ + "▁Bayer", + -13.367143630981444 + ], + [ + "▁setback", + -13.367197036743164 + ], + [ + "▁Hin", + -13.367206573486328 + ], + [ + "▁Kayak", + -13.367352485656738 + ], + [ + "▁intercept", + -13.367363929748535 + ], + [ + "▁urn", + -13.367395401000977 + ], + [ + "Son", + -13.367486000061035 + ], + [ + "?!?", + -13.3674898147583 + ], + [ + "20)", + -13.367636680603027 + ], + [ + "▁forbid", + -13.367660522460938 + ], + [ + "▁seperate", + -13.367731094360352 + ], + [ + "aly", + -13.367815971374512 + ], + [ + "399", + -13.367900848388672 + ], + [ + "▁scattering", + -13.367900848388672 + ], + [ + "▁vested", + -13.36797332763672 + ], + [ + "▁uptime", + -13.3679838180542 + ], + [ + "▁Utiliz", + -13.368086814880373 + ], + [ + "ATS", + -13.368107795715332 + ], + [ + "▁inverted", + -13.368120193481444 + ], + [ + "▁OU", + -13.368138313293455 + ], + [ + "TAN", + -13.368145942687988 + ], + [ + "buck", + -13.368156433105469 + ], + [ + "▁Span", + -13.36819553375244 + ], + [ + "itive", + -13.368250846862791 + ], + [ + "Bright", + -13.368268966674805 + ], + [ + "▁Mot", + -13.368383407592772 + ], + [ + "▁hu", + -13.36838436126709 + ], + [ + "▁cub", + -13.36843204498291 + ], + [ + "▁Cosmetics", + -13.368435859680176 + ], + [ + "▁expeditions", + -13.368465423583984 + ], + [ + "▁embryo", + -13.368534088134766 + ], + [ + "▁parallels", + -13.368538856506348 + ], + [ + "vos", + -13.368599891662598 + ], + [ + "belt", + -13.368692398071287 + ], + [ + "▁asap", + -13.368715286254885 + ], + [ + "OO", + -13.368836402893066 + ], + [ + "tid", + -13.368852615356444 + ], + [ + "popular", + -13.368896484375 + ], + [ + "-2012", + -13.36890983581543 + ], + [ + "▁Assets", + -13.368972778320312 + ], + [ + "ffen", + -13.369026184082031 + ], + [ + "fei", + -13.369072914123535 + ], + [ + "▁prognosis", + -13.369129180908203 + ], + [ + "▁Islamabad", + -13.369138717651367 + ], + [ + "▁crossroads", + -13.369159698486328 + ], + [ + "▁hassles", + -13.369171142578123 + ], + [ + "▁exposition", + -13.369178771972656 + ], + [ + "▁grandeur", + -13.36923122406006 + ], + [ + "▁moth", + -13.369244575500488 + ], + [ + "▁handgun", + -13.369282722473145 + ], + [ + "scoring", + -13.36929416656494 + ], + [ + "▁Sala", + -13.369348526000977 + ], + [ + "▁Extremely", + -13.369364738464355 + ], + [ + "▁Wol", + -13.369365692138672 + ], + [ + "▁quilted", + -13.36938190460205 + ], + [ + "▁acreage", + -13.369383811950684 + ], + [ + "Lisa", + -13.369577407836914 + ], + [ + "▁sta", + -13.369606971740724 + ], + [ + "▁Caramel", + -13.369629859924316 + ], + [ + "▁OMG", + -13.369708061218262 + ], + [ + "▁grids", + -13.369757652282717 + ], + [ + "▁Welch", + -13.36977481842041 + ], + [ + "▁Yar", + -13.3699312210083 + ], + [ + "▁1840", + -13.369942665100098 + ], + [ + "▁Abigail", + -13.370022773742676 + ], + [ + "unconstitutional", + -13.3700590133667 + ], + [ + "▁wrists", + -13.37009048461914 + ], + [ + "icide", + -13.370177268981934 + ], + [ + "▁purge", + -13.37020206451416 + ], + [ + "rano", + -13.370203018188477 + ], + [ + "stadt", + -13.370208740234377 + ], + [ + "shima", + -13.37026023864746 + ], + [ + "▁250,000", + -13.370296478271484 + ], + [ + "▁Laos", + -13.370320320129396 + ], + [ + "▁Satin", + -13.370428085327148 + ], + [ + "▁twine", + -13.37047290802002 + ], + [ + "CDC", + -13.370604515075684 + ], + [ + "▁$350", + -13.370607376098633 + ], + [ + "bond", + -13.370624542236328 + ], + [ + "apo", + -13.370640754699709 + ], + [ + "▁Yield", + -13.370670318603516 + ], + [ + "eries", + -13.370781898498535 + ], + [ + "▁wirelessly", + -13.370878219604492 + ], + [ + "▁Abrams", + -13.370893478393556 + ], + [ + "essa", + -13.370915412902832 + ], + [ + "tura", + -13.370926856994627 + ], + [ + "western", + -13.37097454071045 + ], + [ + "▁Harness", + -13.370991706848145 + ], + [ + "▁Peggy", + -13.371005058288574 + ], + [ + "ENS", + -13.371116638183594 + ], + [ + "▁ASA", + -13.371146202087402 + ], + [ + "MAP", + -13.371182441711426 + ], + [ + "▁fleas", + -13.37118434906006 + ], + [ + "rob", + -13.371185302734377 + ], + [ + "▁aspirin", + -13.371187210083008 + ], + [ + "▁wreak", + -13.371224403381348 + ], + [ + "Management", + -13.371235847473145 + ], + [ + "Rachel", + -13.371288299560549 + ], + [ + "▁frantic", + -13.37130069732666 + ], + [ + "▁Anthropology", + -13.37132167816162 + ], + [ + "▁impromptu", + -13.37132167816162 + ], + [ + "▁rhubarb", + -13.37132167816162 + ], + [ + "▁Bazaar", + -13.371323585510254 + ], + [ + "▁Piazza", + -13.371330261230469 + ], + [ + "▁eyeglasses", + -13.371373176574709 + ], + [ + "▁PBX", + -13.37142848968506 + ], + [ + "▁Whistler", + -13.371441841125488 + ], + [ + "▁confessed", + -13.371533393859863 + ], + [ + "▁Surgeons", + -13.371541023254396 + ], + [ + "▁amounted", + -13.371546745300291 + ], + [ + "▁Grad", + -13.371560096740724 + ], + [ + "▁suitably", + -13.37156105041504 + ], + [ + "▁advertiser", + -13.371637344360352 + ], + [ + "▁yelled", + -13.371788024902344 + ], + [ + "▁Farmhouse", + -13.371792793273926 + ], + [ + "▁symmetrical", + -13.371800422668455 + ], + [ + "▁Humane", + -13.37180233001709 + ], + [ + "▁showdown", + -13.37180519104004 + ], + [ + "▁CGI", + -13.371938705444336 + ], + [ + "▁queues", + -13.37195110321045 + ], + [ + "▁tweaked", + -13.372118949890137 + ], + [ + "▁Mild", + -13.37216091156006 + ], + [ + "▁eagles", + -13.372204780578612 + ], + [ + "▁Everton", + -13.372392654418944 + ], + [ + "▁Scholarships", + -13.372425079345703 + ], + [ + "▁Installing", + -13.372483253479004 + ], + [ + "▁och", + -13.37252426147461 + ], + [ + "logo", + -13.372611045837402 + ], + [ + "▁Kidd", + -13.372640609741213 + ], + [ + "▁bumped", + -13.37266731262207 + ], + [ + "▁sift", + -13.3727445602417 + ], + [ + "▁Poe", + -13.372754096984863 + ], + [ + "▁Wipe", + -13.372772216796877 + ], + [ + "Cons", + -13.37277889251709 + ], + [ + "▁Aztec", + -13.37286376953125 + ], + [ + "▁behaved", + -13.37292766571045 + ], + [ + "▁manor", + -13.37294864654541 + ], + [ + "rix", + -13.37297534942627 + ], + [ + "▁snorkel", + -13.372980117797852 + ], + [ + "Yu", + -13.37307357788086 + ], + [ + "▁WAN", + -13.373085975646973 + ], + [ + "▁mince", + -13.373102188110352 + ], + [ + "def", + -13.373144149780272 + ], + [ + "▁assesses", + -13.37315845489502 + ], + [ + "igi", + -13.373163223266602 + ], + [ + "▁booze", + -13.373169898986816 + ], + [ + "▁probiotics", + -13.373229026794434 + ], + [ + "▁Viola", + -13.373428344726562 + ], + [ + "nium", + -13.37345027923584 + ], + [ + "▁Finch", + -13.3734769821167 + ], + [ + "▁(26", + -13.37349796295166 + ], + [ + "▁Redmi", + -13.37350845336914 + ], + [ + "▁anesthetic", + -13.373519897460938 + ], + [ + "▁dispensing", + -13.373519897460938 + ], + [ + "▁Colombo", + -13.37352466583252 + ], + [ + "▁Taken", + -13.37354564666748 + ], + [ + "Pakistan", + -13.37355136871338 + ], + [ + "▁hinted", + -13.37356948852539 + ], + [ + "▁augmentation", + -13.373600006103516 + ], + [ + "▁Shelley", + -13.373629570007324 + ], + [ + "▁SOS", + -13.373637199401855 + ], + [ + "▁minivan", + -13.373697280883787 + ], + [ + "ettes", + -13.373699188232422 + ], + [ + "website", + -13.3737211227417 + ], + [ + "dding", + -13.373723030090332 + ], + [ + "▁subdued", + -13.373733520507812 + ], + [ + "▁Akron", + -13.37386417388916 + ], + [ + "▁transitioned", + -13.373943328857422 + ], + [ + "▁Moments", + -13.373976707458496 + ], + [ + "▁(21", + -13.3740234375 + ], + [ + "▁Increased", + -13.374056816101074 + ], + [ + "▁Dani", + -13.37419605255127 + ], + [ + "▁Hulk", + -13.374207496643066 + ], + [ + "▁fussy", + -13.374253273010254 + ], + [ + "▁guidebook", + -13.374370574951172 + ], + [ + "▁crates", + -13.374526977539062 + ], + [ + "▁iterations", + -13.374533653259276 + ], + [ + "▁Unix", + -13.3745756149292 + ], + [ + "adas", + -13.374788284301758 + ], + [ + "▁Easton", + -13.374940872192385 + ], + [ + "1.8", + -13.375032424926758 + ], + [ + "▁McCo", + -13.375100135803224 + ], + [ + "Mont", + -13.375179290771484 + ], + [ + "▁bum", + -13.37522792816162 + ], + [ + "urs", + -13.375239372253418 + ], + [ + "3-5", + -13.375263214111328 + ], + [ + "emer", + -13.37529754638672 + ], + [ + "watching", + -13.3753023147583 + ], + [ + "LIN", + -13.37536907196045 + ], + [ + "▁WHEN", + -13.375426292419434 + ], + [ + "trap", + -13.375450134277344 + ], + [ + "▁plural", + -13.37554931640625 + ], + [ + "▁($3", + -13.375595092773438 + ], + [ + "▁Ramirez", + -13.375632286071776 + ], + [ + "▁Framing", + -13.375717163085938 + ], + [ + "▁attributable", + -13.37572193145752 + ], + [ + "▁endearing", + -13.375722885131836 + ], + [ + "▁Neuroscience", + -13.37573528289795 + ], + [ + "▁Monk", + -13.375760078430176 + ], + [ + "▁Scenic", + -13.375775337219238 + ], + [ + "▁configurable", + -13.375787734985352 + ], + [ + "▁Promote", + -13.375800132751465 + ], + [ + "▁PPI", + -13.37580394744873 + ], + [ + "▁wares", + -13.375845909118652 + ], + [ + "uj", + -13.375859260559082 + ], + [ + "▁cyclone", + -13.375892639160156 + ], + [ + "Certainly", + -13.37596321105957 + ], + [ + "▁$32", + -13.375967979431152 + ], + [ + "contract", + -13.375982284545898 + ], + [ + "▁Carefully", + -13.375990867614746 + ], + [ + "▁sitter", + -13.37605094909668 + ], + [ + "▁Dustin", + -13.376158714294434 + ], + [ + "▁ply", + -13.376239776611328 + ], + [ + "▁aptly", + -13.376285552978516 + ], + [ + "Sight", + -13.376298904418944 + ], + [ + "▁Combination", + -13.376474380493164 + ], + [ + "▁290", + -13.376521110534668 + ], + [ + "mination", + -13.37668228149414 + ], + [ + "having", + -13.376684188842772 + ], + [ + "▁pur", + -13.376689910888672 + ], + [ + "Villa", + -13.376733779907228 + ], + [ + "▁lore", + -13.376836776733398 + ], + [ + "▁potency", + -13.376935005187988 + ], + [ + "▁Conan", + -13.376967430114746 + ], + [ + "CPA", + -13.377010345458984 + ], + [ + "▁1978.", + -13.377016067504885 + ], + [ + "▁Collegiate", + -13.377068519592283 + ], + [ + "▁£30", + -13.37708854675293 + ], + [ + "▁BW", + -13.377127647399902 + ], + [ + "▁kon", + -13.377410888671877 + ], + [ + "▁Pon", + -13.377423286437988 + ], + [ + "▁outperform", + -13.37745189666748 + ], + [ + "▁skateboard", + -13.377467155456545 + ], + [ + "rug", + -13.377469062805176 + ], + [ + "▁stereotype", + -13.377500534057615 + ], + [ + "▁encountering", + -13.377503395080566 + ], + [ + "▁Wagon", + -13.37755012512207 + ], + [ + "▁Referral", + -13.377573013305664 + ], + [ + "▁numeric", + -13.377581596374512 + ], + [ + "▁diver", + -13.37758445739746 + ], + [ + "▁allure", + -13.377630233764648 + ], + [ + "▁Durable", + -13.377680778503418 + ], + [ + "▁Promotional", + -13.377714157104492 + ], + [ + "▁clinician", + -13.377724647521973 + ], + [ + "quist", + -13.377921104431152 + ], + [ + "▁zebra", + -13.377930641174316 + ], + [ + "▁Mermaid", + -13.377937316894531 + ], + [ + "▁kinase", + -13.377939224243164 + ], + [ + "▁WITHOUT", + -13.37794589996338 + ], + [ + "▁Norris", + -13.377970695495604 + ], + [ + "Times", + -13.3779935836792 + ], + [ + "▁segmented", + -13.378066062927246 + ], + [ + "▁BJ", + -13.37809944152832 + ], + [ + "▁grate", + -13.378170013427734 + ], + [ + "▁Tuscan", + -13.378189086914062 + ], + [ + "▁setups", + -13.378218650817873 + ], + [ + "▁Seniors", + -13.378229141235352 + ], + [ + "▁weathered", + -13.378235816955566 + ], + [ + "▁eerie", + -13.378246307373049 + ], + [ + "soluble", + -13.378275871276855 + ], + [ + "▁pouches", + -13.37829875946045 + ], + [ + "RIC", + -13.378304481506348 + ], + [ + "▁VB", + -13.378334045410156 + ], + [ + "▁STR", + -13.378397941589355 + ], + [ + "▁steeped", + -13.378409385681152 + ], + [ + "▁backers", + -13.378453254699709 + ], + [ + "▁Antarctic", + -13.378460884094238 + ], + [ + "▁Cheng", + -13.378506660461426 + ], + [ + "▁Token", + -13.378552436828612 + ], + [ + "Lower", + -13.378554344177246 + ], + [ + "▁dart", + -13.378595352172852 + ], + [ + "▁swirling", + -13.378626823425291 + ], + [ + "▁Staying", + -13.37867832183838 + ], + [ + "▁(2017).", + -13.378708839416504 + ], + [ + "Wine", + -13.37877082824707 + ], + [ + "speak", + -13.37881851196289 + ], + [ + "▁Recorder", + -13.378867149353027 + ], + [ + "▁Cairns", + -13.378876686096191 + ], + [ + "▁Dart", + -13.378900527954102 + ], + [ + "3-0", + -13.378976821899414 + ], + [ + "Gov", + -13.379016876220703 + ], + [ + "Tec", + -13.379046440124512 + ], + [ + "allow", + -13.379047393798828 + ], + [ + "▁Lorenzo", + -13.379053115844728 + ], + [ + "them", + -13.37911891937256 + ], + [ + "ар", + -13.37917423248291 + ], + [ + "▁Largest", + -13.37918186187744 + ], + [ + "dai", + -13.379223823547363 + ], + [ + "1-8", + -13.379233360290527 + ], + [ + "▁servings", + -13.37923526763916 + ], + [ + "▁smear", + -13.37926197052002 + ], + [ + "Steven", + -13.379311561584473 + ], + [ + "▁Ele", + -13.37938404083252 + ], + [ + "Catch", + -13.379451751708984 + ], + [ + "▁converse", + -13.379511833190918 + ], + [ + "▁rhyme", + -13.379524230957031 + ], + [ + "▁Berger", + -13.379560470581056 + ], + [ + "▁shortlist", + -13.379615783691406 + ], + [ + "▁Edmund", + -13.379633903503418 + ], + [ + "▁Nigerians", + -13.37969207763672 + ], + [ + "▁hoc", + -13.379716873168944 + ], + [ + "2003", + -13.379722595214844 + ], + [ + "▁aerobic", + -13.379863739013672 + ], + [ + "▁Aren", + -13.379887580871582 + ], + [ + "dur", + -13.37989330291748 + ], + [ + "iwa", + -13.3799467086792 + ], + [ + "▁hopped", + -13.379990577697754 + ], + [ + "▁respondent", + -13.380064010620115 + ], + [ + "▁carefree", + -13.380066871643066 + ], + [ + "▁conscientious", + -13.380142211914062 + ], + [ + "▁unrivaled", + -13.380142211914062 + ], + [ + "▁Residency", + -13.38014316558838 + ], + [ + "▁snoring", + -13.380144119262695 + ], + [ + "▁Theodore", + -13.38014793395996 + ], + [ + "▁unheard", + -13.38017463684082 + ], + [ + "arily", + -13.380178451538086 + ], + [ + "▁defaults", + -13.38019561767578 + ], + [ + "▁Branson", + -13.380207061767578 + ], + [ + "Believe", + -13.380270957946776 + ], + [ + "▁thanksgiving", + -13.380290985107422 + ], + [ + "▁intertwined", + -13.38031768798828 + ], + [ + "▁weakest", + -13.380352973937988 + ], + [ + "▁stormwater", + -13.380367279052734 + ], + [ + "▁Durban", + -13.380375862121582 + ], + [ + "HN", + -13.38039493560791 + ], + [ + "HAN", + -13.380398750305176 + ], + [ + "Pool", + -13.38045597076416 + ], + [ + "Limited", + -13.38052749633789 + ], + [ + "HQ", + -13.380558013916016 + ], + [ + "▁8.0", + -13.380569458007812 + ], + [ + "▁Jenner", + -13.380596160888672 + ], + [ + "▁Juvenile", + -13.38060188293457 + ], + [ + "▁Nana", + -13.380731582641602 + ], + [ + "▁Otter", + -13.380744934082031 + ], + [ + "▁Mb", + -13.38075351715088 + ], + [ + "owners", + -13.380796432495115 + ], + [ + "▁Seamless", + -13.380825996398926 + ], + [ + "▁parishes", + -13.380836486816406 + ], + [ + "▁como", + -13.380851745605469 + ], + [ + "▁rattan", + -13.38086223602295 + ], + [ + "-1,", + -13.380898475646973 + ], + [ + "▁Dem", + -13.38106632232666 + ], + [ + "▁Debit", + -13.38112735748291 + ], + [ + "Holy", + -13.381268501281738 + ], + [ + "▁Larger", + -13.381305694580078 + ], + [ + "cab", + -13.381369590759276 + ], + [ + "▁Textile", + -13.38145923614502 + ], + [ + "▁dill", + -13.381540298461914 + ], + [ + "▁Worst", + -13.381683349609377 + ], + [ + "▁copyrights", + -13.381754875183104 + ], + [ + "▁Messi", + -13.381850242614746 + ], + [ + "▁nationals", + -13.38185977935791 + ], + [ + "▁Joining", + -13.38187313079834 + ], + [ + "journal", + -13.38188648223877 + ], + [ + "phal", + -13.38190460205078 + ], + [ + "▁memoirs", + -13.381998062133787 + ], + [ + "▁valuables", + -13.381998062133787 + ], + [ + "▁Substance", + -13.382153511047363 + ], + [ + "▁pope", + -13.382169723510742 + ], + [ + "▁Aggregate", + -13.382256507873535 + ], + [ + "▁obligatory", + -13.38236141204834 + ], + [ + "▁ammonia", + -13.382375717163086 + ], + [ + "▁Normandy", + -13.382410049438477 + ], + [ + "proper", + -13.382413864135742 + ], + [ + "▁plow", + -13.38241958618164 + ], + [ + "▁cashmere", + -13.38247299194336 + ], + [ + "Bitcoin", + -13.382501602172852 + ], + [ + "lord", + -13.382512092590332 + ], + [ + "LOVE", + -13.38252067565918 + ], + [ + "▁1979,", + -13.382528305053713 + ], + [ + "▁patina", + -13.382550239562988 + ], + [ + "▁195", + -13.3826322555542 + ], + [ + "lying", + -13.382635116577148 + ], + [ + "▁Cannot", + -13.382650375366213 + ], + [ + "▁HDTV", + -13.382683753967283 + ], + [ + "▁Vijay", + -13.382685661315918 + ], + [ + "essential", + -13.38272762298584 + ], + [ + "bai", + -13.382755279541016 + ], + [ + "Boost", + -13.38276195526123 + ], + [ + "▁workable", + -13.38276195526123 + ], + [ + "▁salespeople", + -13.382789611816406 + ], + [ + "én", + -13.38286304473877 + ], + [ + "Joy", + -13.382875442504885 + ], + [ + "▁sprayer", + -13.382952690124512 + ], + [ + "▁Punjabi", + -13.38305950164795 + ], + [ + "ZZ", + -13.383086204528809 + ], + [ + "▁Analog", + -13.38312816619873 + ], + [ + "▁Cobra", + -13.383139610290527 + ], + [ + "rion", + -13.383172035217283 + ], + [ + "▁$1.2", + -13.383305549621582 + ], + [ + "▁Dalton", + -13.38331127166748 + ], + [ + "RAY", + -13.38333225250244 + ], + [ + "▁calmly", + -13.383340835571287 + ], + [ + "▁tightened", + -13.38342571258545 + ], + [ + "▁eBooks", + -13.383475303649902 + ], + [ + "6.3", + -13.3836088180542 + ], + [ + "▁omni", + -13.383633613586426 + ], + [ + "▁anecdotes", + -13.383649826049805 + ], + [ + "erated", + -13.383736610412598 + ], + [ + "▁Extensions", + -13.383749961853027 + ], + [ + "▁Coll", + -13.383834838867188 + ], + [ + "▁hymn", + -13.383983612060549 + ], + [ + "▁err", + -13.383999824523926 + ], + [ + "▁NES", + -13.384114265441896 + ], + [ + "▁tumour", + -13.384129524230955 + ], + [ + "urg", + -13.384188652038574 + ], + [ + "010", + -13.384278297424316 + ], + [ + "▁needless", + -13.384331703186035 + ], + [ + "▁chickpeas", + -13.384364128112791 + ], + [ + "▁Ducks", + -13.384379386901855 + ], + [ + "▁Donate", + -13.384435653686523 + ], + [ + "▁incline", + -13.384461402893066 + ], + [ + "▁Navigator", + -13.38454532623291 + ], + [ + "▁Yosemite", + -13.384581565856934 + ], + [ + "▁allegiance", + -13.384581565856934 + ], + [ + "▁executor", + -13.384581565856934 + ], + [ + "▁monochrome", + -13.384587287902832 + ], + [ + "▁Emp", + -13.384603500366213 + ], + [ + "▁bluff", + -13.384671211242676 + ], + [ + "▁antitrust", + -13.384674072265623 + ], + [ + "▁eyelashes", + -13.384702682495115 + ], + [ + "▁Foley", + -13.384703636169434 + ], + [ + "▁Geological", + -13.38473892211914 + ], + [ + "▁2018-2019", + -13.38489055633545 + ], + [ + "▁Symbol", + -13.384915351867676 + ], + [ + "▁assaulted", + -13.38492202758789 + ], + [ + "▁billboard", + -13.384926795959473 + ], + [ + "▁RW", + -13.384937286376951 + ], + [ + "volt", + -13.384964942932127 + ], + [ + "mato", + -13.385018348693848 + ], + [ + "151", + -13.385045051574709 + ], + [ + "▁Mob", + -13.385101318359377 + ], + [ + "▁hamlet", + -13.385104179382324 + ], + [ + "udge", + -13.3851900100708 + ], + [ + "RIT", + -13.38522243499756 + ], + [ + "▁sl", + -13.38523006439209 + ], + [ + "▁Suspension", + -13.385376930236816 + ], + [ + "▁Farrell", + -13.385400772094728 + ], + [ + "▁understandably", + -13.385490417480469 + ], + [ + "▁Actions", + -13.385516166687012 + ], + [ + "▁Aga", + -13.385543823242188 + ], + [ + "shared", + -13.385544776916504 + ], + [ + "▁kickoff", + -13.385598182678224 + ], + [ + "▁Equality", + -13.385713577270508 + ], + [ + "▁Mimi", + -13.385743141174316 + ], + [ + "inda", + -13.385756492614746 + ], + [ + "▁Stranger", + -13.38578987121582 + ], + [ + "▁fishes", + -13.385878562927246 + ], + [ + "▁Tagged", + -13.385916709899902 + ], + [ + "AMP", + -13.386028289794922 + ], + [ + "▁Stair", + -13.386040687561035 + ], + [ + "▁Wherever", + -13.38608169555664 + ], + [ + "▁Uruguay", + -13.386085510253906 + ], + [ + "▁freshmen", + -13.386292457580566 + ], + [ + "scott", + -13.386360168457031 + ], + [ + "arc", + -13.386472702026367 + ], + [ + "▁Dre", + -13.38648796081543 + ], + [ + "▁oriental", + -13.386512756347656 + ], + [ + "▁Inland", + -13.38662052154541 + ], + [ + "▁minimizes", + -13.386637687683104 + ], + [ + "▁sweetener", + -13.386666297912598 + ], + [ + "▁anemia", + -13.386701583862305 + ], + [ + "▁accumulating", + -13.386808395385742 + ], + [ + "▁Cheltenham", + -13.38680934906006 + ], + [ + "▁McConnell", + -13.386825561523438 + ], + [ + "ан", + -13.386829376220703 + ], + [ + "▁Taiwanese", + -13.386841773986816 + ], + [ + "▁Watford", + -13.38684368133545 + ], + [ + "▁cooperating", + -13.386857986450195 + ], + [ + "avan", + -13.386862754821776 + ], + [ + "▁Blender", + -13.386893272399902 + ], + [ + "▁Thunderbird", + -13.386908531188965 + ], + [ + "▁companionship", + -13.386921882629396 + ], + [ + "polar", + -13.387005805969238 + ], + [ + "1%)", + -13.387045860290527 + ], + [ + "coffee", + -13.387066841125488 + ], + [ + "▁lice", + -13.387150764465332 + ], + [ + "Jump", + -13.387224197387695 + ], + [ + "Bonus", + -13.387276649475098 + ], + [ + "▁Jal", + -13.387309074401855 + ], + [ + "▁1861", + -13.38731861114502 + ], + [ + "▁restraints", + -13.38734531402588 + ], + [ + "▁Noida", + -13.387578010559082 + ], + [ + "▁weakening", + -13.38758945465088 + ], + [ + "▁Vinci", + -13.387689590454102 + ], + [ + "eca", + -13.387714385986328 + ], + [ + "▁watchdog", + -13.38772964477539 + ], + [ + "leader", + -13.387758255004885 + ], + [ + "▁Homeowners", + -13.38776397705078 + ], + [ + "rav", + -13.38781452178955 + ], + [ + "▁Indies", + -13.38798713684082 + ], + [ + "▁smashing", + -13.388147354125977 + ], + [ + "▁Muir", + -13.388216018676758 + ], + [ + "Compared", + -13.388266563415527 + ], + [ + "tho", + -13.388283729553224 + ], + [ + "▁177", + -13.38833713531494 + ], + [ + "▁Bund", + -13.38837432861328 + ], + [ + "▁Sweep", + -13.388432502746582 + ], + [ + "▁SSH", + -13.388489723205566 + ], + [ + "gni", + -13.38856029510498 + ], + [ + "▁FAT", + -13.388578414916992 + ], + [ + "▁Cartridge", + -13.38865852355957 + ], + [ + "soul", + -13.38869571685791 + ], + [ + "▁eyebrow", + -13.388715744018556 + ], + [ + "▁sealant", + -13.388877868652344 + ], + [ + "▁Sap", + -13.388924598693848 + ], + [ + "▁anomalies", + -13.38904094696045 + ], + [ + "▁melanoma", + -13.38904094696045 + ], + [ + "▁sobriety", + -13.38904094696045 + ], + [ + "▁liberties", + -13.389042854309082 + ], + [ + "▁Scarborough", + -13.389044761657717 + ], + [ + "▁astronomical", + -13.389047622680664 + ], + [ + "▁proclamation", + -13.389049530029297 + ], + [ + "▁incarcerated", + -13.389065742492676 + ], + [ + "▁Jesuit", + -13.389087677001951 + ], + [ + "▁downfall", + -13.389092445373535 + ], + [ + "▁televised", + -13.389095306396484 + ], + [ + "▁Cath", + -13.389130592346191 + ], + [ + "dina", + -13.389134407043455 + ], + [ + "▁Silicone", + -13.389162063598633 + ], + [ + "ody", + -13.38917064666748 + ], + [ + "convert", + -13.389190673828123 + ], + [ + "Players", + -13.389265060424805 + ], + [ + "▁Hom", + -13.389290809631348 + ], + [ + "▁Buckley", + -13.389300346374512 + ], + [ + "Breakfast", + -13.389365196228027 + ], + [ + "Southern", + -13.389376640319824 + ], + [ + "▁onboarding", + -13.389460563659668 + ], + [ + "▁Equi", + -13.389464378356934 + ], + [ + "▁gracefully", + -13.389497756958008 + ], + [ + "kala", + -13.389549255371094 + ], + [ + "▁Calm", + -13.38956356048584 + ], + [ + "▁coke", + -13.389567375183104 + ], + [ + "▁Dare", + -13.389595985412598 + ], + [ + "▁Reese", + -13.3897123336792 + ], + [ + "IAL", + -13.389877319335938 + ], + [ + "▁curled", + -13.389900207519531 + ], + [ + "Solid", + -13.389907836914062 + ], + [ + "▁DOM", + -13.389935493469238 + ], + [ + "ogue", + -13.389974594116213 + ], + [ + "jee", + -13.389996528625488 + ], + [ + "LOL", + -13.39003562927246 + ], + [ + "LX", + -13.390089988708496 + ], + [ + "▁Ibrahim", + -13.390138626098633 + ], + [ + "▁manifested", + -13.3902587890625 + ], + [ + "luck", + -13.390398025512695 + ], + [ + "▁Dora", + -13.39052963256836 + ], + [ + "▁disseminate", + -13.390677452087402 + ], + [ + "▁leaky", + -13.390748023986816 + ], + [ + "strict", + -13.390753746032717 + ], + [ + "bits", + -13.390776634216309 + ], + [ + "orange", + -13.391016006469728 + ], + [ + "▁Bones", + -13.391016960144045 + ], + [ + "vez", + -13.391045570373535 + ], + [ + "▁Peoples", + -13.39109706878662 + ], + [ + "▁floated", + -13.391108512878418 + ], + [ + "▁graced", + -13.391188621520996 + ], + [ + "article", + -13.391213417053224 + ], + [ + "▁Lena", + -13.391216278076172 + ], + [ + "▁Dickinson", + -13.391278266906738 + ], + [ + "▁disparity", + -13.391278266906738 + ], + [ + "▁Masa", + -13.391279220581056 + ], + [ + "▁bureaucracy", + -13.391279220581056 + ], + [ + "▁bodice", + -13.39128303527832 + ], + [ + "temp", + -13.391289710998535 + ], + [ + "hander", + -13.391292572021484 + ], + [ + "▁dissolution", + -13.391303062438965 + ], + [ + "Ground", + -13.391340255737305 + ], + [ + "▁Northeastern", + -13.39137840270996 + ], + [ + "▁flaky", + -13.391395568847656 + ], + [ + "▁emitted", + -13.391396522521973 + ], + [ + "▁frontage", + -13.391621589660645 + ], + [ + "Yellow", + -13.391653060913086 + ], + [ + "▁Varsity", + -13.391678810119627 + ], + [ + "▁Reel", + -13.391701698303224 + ], + [ + "Judge", + -13.391705513000488 + ], + [ + "▁Dat", + -13.391706466674805 + ], + [ + "▁Supper", + -13.391783714294434 + ], + [ + "▁Tribal", + -13.39182949066162 + ], + [ + "▁LOS", + -13.39187240600586 + ], + [ + "ambi", + -13.391931533813477 + ], + [ + "▁Amos", + -13.392035484313965 + ], + [ + "▁TSA", + -13.39203929901123 + ], + [ + "eum", + -13.392069816589355 + ], + [ + "▁extractor", + -13.392102241516112 + ], + [ + "Interview", + -13.39211082458496 + ], + [ + "▁liquidation", + -13.39215850830078 + ], + [ + "yal", + -13.392169952392578 + ], + [ + "▁summarized", + -13.392169952392578 + ], + [ + "▁Evelyn", + -13.392206192016602 + ], + [ + "▁watermark", + -13.392256736755373 + ], + [ + "Sale", + -13.392351150512695 + ], + [ + "▁Viet", + -13.392352104187012 + ], + [ + "▁$1.3", + -13.392355918884276 + ], + [ + "Hence", + -13.392364501953123 + ], + [ + "▁1904", + -13.392374038696287 + ], + [ + "zio", + -13.39240550994873 + ], + [ + "pedia", + -13.392463684082031 + ], + [ + "▁preventable", + -13.39246654510498 + ], + [ + "▁combating", + -13.39247989654541 + ], + [ + "▁Italia", + -13.392523765563965 + ], + [ + "▁stalk", + -13.392589569091797 + ], + [ + "▁Gently", + -13.392613410949709 + ], + [ + "▁Hur", + -13.392655372619627 + ], + [ + "8.7", + -13.392717361450195 + ], + [ + "▁duet", + -13.39276123046875 + ], + [ + "▁Lal", + -13.392807006835938 + ], + [ + "sail", + -13.392831802368164 + ], + [ + "▁microorganisms", + -13.392849922180176 + ], + [ + "▁Fury", + -13.392850875854492 + ], + [ + "▁Pell", + -13.392987251281738 + ], + [ + "Opening", + -13.39301300048828 + ], + [ + "optional", + -13.393035888671877 + ], + [ + "▁timeout", + -13.393153190612791 + ], + [ + "▁Determine", + -13.393187522888184 + ], + [ + "▁Eternal", + -13.393195152282717 + ], + [ + "MIN", + -13.393230438232422 + ], + [ + "▁Dressing", + -13.39334201812744 + ], + [ + "▁tannins", + -13.3934326171875 + ], + [ + "gap", + -13.393503189086914 + ], + [ + "▁Bethesda", + -13.39352035522461 + ], + [ + "▁disparities", + -13.39352035522461 + ], + [ + "▁funniest", + -13.393521308898926 + ], + [ + "▁cactus", + -13.393525123596191 + ], + [ + "▁holiness", + -13.393527030944824 + ], + [ + "Days", + -13.39355182647705 + ], + [ + "▁Regis", + -13.39357089996338 + ], + [ + "▁cringe", + -13.39367961883545 + ], + [ + "▁regal", + -13.393684387207031 + ], + [ + "▁Nguyen", + -13.393762588500977 + ], + [ + "▁potted", + -13.393771171569824 + ], + [ + "▁Amount", + -13.393823623657228 + ], + [ + "Guest", + -13.393898010253906 + ], + [ + "hammer", + -13.39390754699707 + ], + [ + "▁Shields", + -13.394288063049316 + ], + [ + "▁falsely", + -13.394369125366213 + ], + [ + "▁vid", + -13.39439582824707 + ], + [ + "▁Wit", + -13.39454746246338 + ], + [ + "▁calculus", + -13.394603729248049 + ], + [ + "MOS", + -13.394608497619627 + ], + [ + "▁Attached", + -13.39478874206543 + ], + [ + "▁AIM", + -13.394946098327637 + ], + [ + "▁Ladder", + -13.395008087158203 + ], + [ + "▁germ", + -13.395047187805176 + ], + [ + "▁woody", + -13.395064353942873 + ], + [ + "18)", + -13.395065307617188 + ], + [ + "▁sweaty", + -13.395148277282717 + ], + [ + "▁Belly", + -13.395268440246582 + ], + [ + "award", + -13.395360946655272 + ], + [ + "▁Petr", + -13.395371437072754 + ], + [ + "▁impair", + -13.395379066467283 + ], + [ + "SSL", + -13.395400047302246 + ], + [ + "▁Applying", + -13.39540672302246 + ], + [ + "▁recital", + -13.39544677734375 + ], + [ + "▁Bett", + -13.39549732208252 + ], + [ + "▁Avatar", + -13.395566940307615 + ], + [ + "▁tentative", + -13.395594596862791 + ], + [ + "lima", + -13.395617485046388 + ], + [ + "▁infringe", + -13.395648956298828 + ], + [ + "465", + -13.395699501037598 + ], + [ + "lein", + -13.395699501037598 + ], + [ + "▁electorate", + -13.395767211914062 + ], + [ + "▁guinea", + -13.395777702331545 + ], + [ + "▁spurred", + -13.395794868469238 + ], + [ + "▁Pinoy", + -13.395798683166504 + ], + [ + "▁kiwi", + -13.395819664001465 + ], + [ + "▁stemming", + -13.39582347869873 + ], + [ + "▁Hayward", + -13.395831108093262 + ], + [ + "][", + -13.39584732055664 + ], + [ + "▁Cri", + -13.395870208740234 + ], + [ + "Lost", + -13.39592742919922 + ], + [ + "▁cultured", + -13.395928382873535 + ], + [ + "▁turntable", + -13.395959854125977 + ], + [ + "▁flank", + -13.39596462249756 + ], + [ + "▁foes", + -13.39600944519043 + ], + [ + "▁(22", + -13.396050453186035 + ], + [ + "▁minimized", + -13.396060943603516 + ], + [ + "8′′", + -13.396106719970703 + ], + [ + "Nor", + -13.396228790283203 + ], + [ + "gger", + -13.396245002746582 + ], + [ + "Category", + -13.396255493164062 + ], + [ + "approximately", + -13.396260261535645 + ], + [ + "▁Vail", + -13.396270751953123 + ], + [ + "▁Sour", + -13.396471977233888 + ], + [ + "▁stalking", + -13.396488189697266 + ], + [ + "comp", + -13.396605491638184 + ], + [ + "▁Cebu", + -13.39663028717041 + ], + [ + "▁photoshoot", + -13.396905899047852 + ], + [ + "oya", + -13.396970748901367 + ], + [ + "▁insiders", + -13.397006034851074 + ], + [ + "▁phased", + -13.397079467773438 + ], + [ + "▁callers", + -13.397147178649902 + ], + [ + "▁Confirmation", + -13.39727020263672 + ], + [ + "▁TAP", + -13.397374153137209 + ], + [ + "▁1982,", + -13.397409439086914 + ], + [ + "206", + -13.397462844848633 + ], + [ + "lamp", + -13.397509574890137 + ], + [ + "WHO", + -13.3975248336792 + ], + [ + "Trac", + -13.397539138793944 + ], + [ + "ideas", + -13.397624969482422 + ], + [ + "▁taper", + -13.397643089294434 + ], + [ + "▁Verified", + -13.397789001464844 + ], + [ + "itor", + -13.39780616760254 + ], + [ + "▁outbreaks", + -13.397830963134766 + ], + [ + "▁AUD", + -13.397831916809082 + ], + [ + "▁morally", + -13.397836685180664 + ], + [ + "▁Organizer", + -13.39785385131836 + ], + [ + "▁Confidence", + -13.39786148071289 + ], + [ + "▁operative", + -13.397921562194824 + ], + [ + "▁nurtured", + -13.397930145263672 + ], + [ + "▁DOWN", + -13.397950172424316 + ], + [ + "▁automaker", + -13.397966384887695 + ], + [ + "▁Kawasaki", + -13.39798355102539 + ], + [ + "▁Gloss", + -13.398008346557615 + ], + [ + "▁unsightly", + -13.398019790649414 + ], + [ + "▁Marseille", + -13.398025512695312 + ], + [ + "▁extrusion", + -13.398027420043944 + ], + [ + "1080", + -13.398030281066896 + ], + [ + "▁photoshop", + -13.39804744720459 + ], + [ + "▁Paige", + -13.398065567016602 + ], + [ + "▁Townsend", + -13.398077964782717 + ], + [ + "▁Humans", + -13.398107528686523 + ], + [ + "POS", + -13.398160934448242 + ], + [ + "▁revolve", + -13.398195266723633 + ], + [ + "▁Kodak", + -13.398202896118164 + ], + [ + "▁cliche", + -13.398276329040527 + ], + [ + "▁greased", + -13.398295402526855 + ], + [ + "Italian", + -13.398311614990234 + ], + [ + "▁Mast", + -13.398329734802246 + ], + [ + "▁Europeans", + -13.398370742797852 + ], + [ + "▁$800", + -13.398483276367188 + ], + [ + "Switch", + -13.398493766784668 + ], + [ + "similar", + -13.398539543151855 + ], + [ + "poll", + -13.39854335784912 + ], + [ + "Reilly", + -13.398577690124512 + ], + [ + "▁Luca", + -13.39859104156494 + ], + [ + "▁EPC", + -13.398643493652344 + ], + [ + "▁Fries", + -13.398661613464355 + ], + [ + "▁Albania", + -13.398699760437012 + ], + [ + "kou", + -13.398860931396484 + ], + [ + "gging", + -13.398880958557127 + ], + [ + "guy", + -13.39888858795166 + ], + [ + "bler", + -13.398927688598633 + ], + [ + "▁torment", + -13.398933410644531 + ], + [ + "▁rundown", + -13.399003982543944 + ], + [ + "ulin", + -13.399012565612791 + ], + [ + "▁beasts", + -13.399099349975586 + ], + [ + "▁Yarn", + -13.399129867553713 + ], + [ + "▁ipad", + -13.399140357971191 + ], + [ + "independent", + -13.399141311645508 + ], + [ + "uki", + -13.399212837219238 + ], + [ + "▁Reward", + -13.399250030517578 + ], + [ + "▁tiered", + -13.399277687072754 + ], + [ + "bud", + -13.399375915527344 + ], + [ + "▁Demon", + -13.399503707885742 + ], + [ + "........", + -13.39963150024414 + ], + [ + "▁WATER", + -13.399667739868164 + ], + [ + "▁141", + -13.399757385253906 + ], + [ + "Fla", + -13.39976978302002 + ], + [ + "ANA", + -13.399849891662598 + ], + [ + "tique", + -13.399868965148926 + ], + [ + "▁Couture", + -13.400016784667969 + ], + [ + "▁Elon", + -13.400140762329102 + ], + [ + "KER", + -13.400217056274414 + ], + [ + "40.", + -13.40023136138916 + ], + [ + "▁aspirants", + -13.400277137756348 + ], + [ + "▁commemorative", + -13.400277137756348 + ], + [ + "▁unsolicited", + -13.400277137756348 + ], + [ + "▁authenticate", + -13.40028190612793 + ], + [ + "▁cortex", + -13.400290489196776 + ], + [ + "▁Oppo", + -13.400309562683104 + ], + [ + "▁Unicorn", + -13.400336265563965 + ], + [ + "▁instituted", + -13.400357246398926 + ], + [ + "▁plugging", + -13.400357246398926 + ], + [ + "▁Chatham", + -13.400379180908203 + ], + [ + "▁lint", + -13.40046215057373 + ], + [ + "▁natives", + -13.400498390197754 + ], + [ + "6.1", + -13.400517463684082 + ], + [ + "izo", + -13.40068817138672 + ], + [ + "▁kar", + -13.400723457336426 + ], + [ + "▁EDM", + -13.400738716125488 + ], + [ + "father", + -13.400754928588867 + ], + [ + "▁Harp", + -13.400781631469728 + ], + [ + "▁Beautifully", + -13.400851249694824 + ], + [ + "▁excels", + -13.400853157043455 + ], + [ + "▁[...]", + -13.40089988708496 + ], + [ + "▁iMac", + -13.40105438232422 + ], + [ + "Serving", + -13.401090621948242 + ], + [ + "▁Massive", + -13.40114688873291 + ], + [ + "▁Holm", + -13.401165008544922 + ], + [ + "▁Durant", + -13.4011869430542 + ], + [ + "▁hopper", + -13.40120792388916 + ], + [ + "▁creeping", + -13.401244163513184 + ], + [ + "ordinate", + -13.401333808898926 + ], + [ + "Known", + -13.401408195495604 + ], + [ + "▁CODE", + -13.40142059326172 + ], + [ + "▁SY", + -13.401429176330566 + ], + [ + "▁prerequisites", + -13.401483535766602 + ], + [ + "crusher", + -13.401630401611328 + ], + [ + "RH", + -13.401652336120604 + ], + [ + "itic", + -13.40166473388672 + ], + [ + "▁Edith", + -13.4016752243042 + ], + [ + "▁Naka", + -13.40174961090088 + ], + [ + "▁propecia", + -13.401938438415527 + ], + [ + "▁overseen", + -13.40199851989746 + ], + [ + "▁Salary", + -13.402012825012209 + ], + [ + "▁rodeo", + -13.40204620361328 + ], + [ + "hung", + -13.40212345123291 + ], + [ + "▁Redwood", + -13.402152061462402 + ], + [ + "▁Technicians", + -13.402180671691896 + ], + [ + "▁unpack", + -13.402305603027344 + ], + [ + "▁haze", + -13.402318000793455 + ], + [ + "25)", + -13.402390480041504 + ], + [ + "Across", + -13.402467727661133 + ], + [ + "▁7.30", + -13.40247917175293 + ], + [ + "Sec", + -13.402482986450195 + ], + [ + "▁illustrious", + -13.402539253234863 + ], + [ + "▁millennium", + -13.402539253234863 + ], + [ + "▁undeniably", + -13.402539253234863 + ], + [ + "▁unsettling", + -13.402539253234863 + ], + [ + "▁bullied", + -13.402557373046877 + ], + [ + "▁trusty", + -13.40259838104248 + ], + [ + "▁encoded", + -13.402639389038086 + ], + [ + "Earn", + -13.402776718139648 + ], + [ + "▁sabotage", + -13.40278434753418 + ], + [ + "▁Lobster", + -13.402853965759276 + ], + [ + "▁residues", + -13.402867317199709 + ], + [ + "▁roomy", + -13.402910232543944 + ], + [ + "▁Array", + -13.402968406677246 + ], + [ + "▁inhale", + -13.40305995941162 + ], + [ + "▁Nope", + -13.403074264526367 + ], + [ + "▁manned", + -13.403180122375488 + ], + [ + "Capital", + -13.403183937072754 + ], + [ + "▁typo", + -13.403212547302246 + ], + [ + "mania", + -13.403234481811523 + ], + [ + "▁START", + -13.403242111206056 + ], + [ + "▁bla", + -13.403255462646484 + ], + [ + "▁Directed", + -13.40328598022461 + ], + [ + "strip", + -13.403319358825684 + ], + [ + "▁ROCK", + -13.403395652770996 + ], + [ + "Asia", + -13.403409004211426 + ], + [ + "▁Worksheets", + -13.40342617034912 + ], + [ + "▁airway", + -13.40343189239502 + ], + [ + "FORD", + -13.403496742248535 + ], + [ + "▁Bowen", + -13.403534889221191 + ], + [ + "rta", + -13.40366268157959 + ], + [ + "▁BLUE", + -13.403664588928224 + ], + [ + "▁fidelity", + -13.403748512268066 + ], + [ + "Rick", + -13.403804779052734 + ], + [ + "▁Synthetic", + -13.403849601745604 + ], + [ + "Bird", + -13.403890609741213 + ], + [ + "boo", + -13.40390396118164 + ], + [ + "▁promoters", + -13.403945922851562 + ], + [ + "Leading", + -13.40396785736084 + ], + [ + "▁WG", + -13.403970718383787 + ], + [ + "▁Picnic", + -13.403983116149902 + ], + [ + "▁Perspectives", + -13.404149055480955 + ], + [ + "▁categorize", + -13.40422821044922 + ], + [ + "iPhone", + -13.404229164123535 + ], + [ + "ozo", + -13.404247283935549 + ], + [ + "▁bothering", + -13.404294967651367 + ], + [ + "Hy", + -13.404315948486328 + ], + [ + "▁solemn", + -13.404411315917969 + ], + [ + "▁370", + -13.404500007629396 + ], + [ + "343", + -13.404526710510254 + ], + [ + "▁CPC", + -13.40457534790039 + ], + [ + "▁duplicated", + -13.404611587524414 + ], + [ + "▁heals", + -13.40463924407959 + ], + [ + "▁Cay", + -13.404698371887209 + ], + [ + "▁ICD", + -13.404788970947266 + ], + [ + "▁db", + -13.404793739318848 + ], + [ + "▁nucleus", + -13.404807090759276 + ], + [ + "▁WARRANTIES", + -13.404808044433594 + ], + [ + "▁purified", + -13.404814720153809 + ], + [ + "▁unethical", + -13.404827117919922 + ], + [ + "▁spree", + -13.404833793640137 + ], + [ + "▁Bernardino", + -13.404836654663086 + ], + [ + "RING", + -13.404845237731934 + ], + [ + "▁AFP", + -13.404915809631348 + ], + [ + "▁Cyrus", + -13.40493106842041 + ], + [ + "haw", + -13.404951095581056 + ], + [ + "4%)", + -13.40499496459961 + ], + [ + "▁spanish", + -13.405014038085938 + ], + [ + "▁han", + -13.405030250549316 + ], + [ + "▁elevations", + -13.40506362915039 + ], + [ + "Asked", + -13.40510082244873 + ], + [ + "ege", + -13.405158996582031 + ], + [ + "atz", + -13.405250549316406 + ], + [ + "▁Rockets", + -13.405271530151367 + ], + [ + "▁Tablets", + -13.405281066894531 + ], + [ + "▁marsh", + -13.405305862426758 + ], + [ + "▁chanting", + -13.405418395996094 + ], + [ + "▁CRC", + -13.405430793762209 + ], + [ + "▁430", + -13.405559539794922 + ], + [ + "▁305", + -13.405563354492188 + ], + [ + "ail", + -13.405739784240724 + ], + [ + "clo", + -13.405765533447266 + ], + [ + "▁Amir", + -13.405805587768556 + ], + [ + "REC", + -13.405807495117188 + ], + [ + "▁Jamaican", + -13.405820846557615 + ], + [ + "▁hunted", + -13.405956268310549 + ], + [ + "wearing", + -13.40616226196289 + ], + [ + "▁Listings", + -13.406195640563965 + ], + [ + "jobs", + -13.40621852874756 + ], + [ + "▁Pul", + -13.406230926513672 + ], + [ + "▁Beginners", + -13.406238555908203 + ], + [ + "▁objectively", + -13.406289100646973 + ], + [ + "▁considerate", + -13.40633487701416 + ], + [ + "▁quake", + -13.406423568725586 + ], + [ + "uza", + -13.406502723693848 + ], + [ + "▁injure", + -13.406634330749512 + ], + [ + "▁passports", + -13.406641960144045 + ], + [ + "----", + -13.406694412231444 + ], + [ + "▁compromises", + -13.406746864318848 + ], + [ + "▁Schi", + -13.406810760498049 + ], + [ + "15)", + -13.406869888305664 + ], + [ + "▁chemically", + -13.407026290893556 + ], + [ + "▁medic", + -13.40703010559082 + ], + [ + "▁Hussein", + -13.407081604003906 + ], + [ + "▁contradictory", + -13.407085418701172 + ], + [ + "▁Pun", + -13.407124519348145 + ], + [ + "▁Abuja", + -13.407169342041016 + ], + [ + "▁Surgeon", + -13.407206535339355 + ], + [ + "▁ballast", + -13.407292366027832 + ], + [ + "▁Baylor", + -13.407304763793944 + ], + [ + "▁callback", + -13.40731143951416 + ], + [ + "▁sketching", + -13.40732479095459 + ], + [ + "▁Guided", + -13.407355308532717 + ], + [ + "hate", + -13.407391548156738 + ], + [ + "▁Casting", + -13.407435417175291 + ], + [ + "▁(23", + -13.40744686126709 + ], + [ + "33.", + -13.407620429992676 + ], + [ + "▁bestowed", + -13.40764331817627 + ], + [ + "pitch", + -13.407742500305176 + ], + [ + "sang", + -13.407822608947754 + ], + [ + "▁Married", + -13.40783977508545 + ], + [ + "Analysis", + -13.407876968383787 + ], + [ + "▁overheating", + -13.407891273498535 + ], + [ + "iden", + -13.407910346984863 + ], + [ + "oak", + -13.408031463623049 + ], + [ + "wart", + -13.408116340637209 + ], + [ + "▁examiner", + -13.408188819885254 + ], + [ + "▁Clint", + -13.408218383789062 + ], + [ + "▁Starr", + -13.408223152160645 + ], + [ + "rhythm", + -13.408236503601074 + ], + [ + "(1", + -13.408286094665527 + ], + [ + "▁drown", + -13.408461570739746 + ], + [ + "illon", + -13.408581733703612 + ], + [ + "Rate", + -13.408614158630373 + ], + [ + "kum", + -13.408671379089355 + ], + [ + "▁legalization", + -13.408692359924316 + ], + [ + "▁appellate", + -13.408738136291504 + ], + [ + "5-9", + -13.40877914428711 + ], + [ + "▁PSI", + -13.408796310424805 + ], + [ + "▁Crush", + -13.408843994140623 + ], + [ + "▁KD", + -13.408886909484863 + ], + [ + "▁overpriced", + -13.408935546875 + ], + [ + "▁Morph", + -13.409050941467283 + ], + [ + "gre", + -13.409152030944824 + ], + [ + "▁SVG", + -13.409172058105469 + ], + [ + "▁precinct", + -13.409184455871582 + ], + [ + "▁Gli", + -13.40920352935791 + ], + [ + "▁tat", + -13.409215927124023 + ], + [ + "▁React", + -13.409279823303224 + ], + [ + "147", + -13.409300804138184 + ], + [ + "▁causal", + -13.409326553344728 + ], + [ + "▁LASIK", + -13.409358024597168 + ], + [ + "▁Broward", + -13.409395217895508 + ], + [ + "▁keto", + -13.40940284729004 + ], + [ + "▁royalties", + -13.409409523010254 + ], + [ + "▁whereabouts", + -13.409412384033203 + ], + [ + "▁Gemini", + -13.409420013427734 + ], + [ + "▁AWESOME", + -13.409433364868164 + ], + [ + "▁discretionary", + -13.409448623657228 + ], + [ + "▁relegated", + -13.409460067749023 + ], + [ + "castle", + -13.409550666809082 + ], + [ + "▁liberals", + -13.409687042236328 + ], + [ + "▁Jiang", + -13.409734725952148 + ], + [ + "060", + -13.409786224365234 + ], + [ + "▁Kata", + -13.409797668457031 + ], + [ + "(6)", + -13.409812927246094 + ], + [ + "XX", + -13.409814834594728 + ], + [ + "▁Qing", + -13.409830093383787 + ], + [ + "▁Chilean", + -13.409860610961914 + ], + [ + "▁Tho", + -13.409862518310549 + ], + [ + "▁LINE", + -13.409887313842772 + ], + [ + "▁audiobook", + -13.409892082214355 + ], + [ + "▁Afternoon", + -13.409893035888672 + ], + [ + "▁2-5", + -13.410058975219728 + ], + [ + "knowledge", + -13.41010570526123 + ], + [ + "▁termite", + -13.410120010375977 + ], + [ + "Cyber", + -13.410388946533203 + ], + [ + "▁Contents", + -13.410430908203123 + ], + [ + "▁Gareth", + -13.41049861907959 + ], + [ + "▁racking", + -13.410614013671877 + ], + [ + "▁(2014).", + -13.410696029663086 + ], + [ + "▁inverse", + -13.410711288452148 + ], + [ + "▁Quin", + -13.410762786865234 + ], + [ + "TIME", + -13.410844802856444 + ], + [ + "401", + -13.410859107971191 + ], + [ + "▁lipid", + -13.410889625549316 + ], + [ + "▁Wines", + -13.410911560058594 + ], + [ + "▁orphan", + -13.410948753356934 + ], + [ + "Ron", + -13.411014556884766 + ], + [ + "▁antics", + -13.411026000976562 + ], + [ + "▁100+", + -13.41106128692627 + ], + [ + "amo", + -13.411088943481444 + ], + [ + "dri", + -13.411104202270508 + ], + [ + "ades", + -13.411147117614746 + ], + [ + "Song", + -13.411222457885742 + ], + [ + "▁marshmallows", + -13.411285400390623 + ], + [ + "▁diplomat", + -13.411297798156738 + ], + [ + "▁newfound", + -13.411382675170898 + ], + [ + "▁Rudd", + -13.411453247070312 + ], + [ + "▁deceptive", + -13.41146183013916 + ], + [ + "▁bridesmaid", + -13.411507606506348 + ], + [ + "▁silhouettes", + -13.41151523590088 + ], + [ + "▁Legendary", + -13.41152572631836 + ], + [ + "▁breakthroughs", + -13.411548614501951 + ], + [ + "mask", + -13.411574363708496 + ], + [ + "jal", + -13.411584854125977 + ], + [ + "▁clipping", + -13.41161060333252 + ], + [ + "▁Burgundy", + -13.411641120910645 + ], + [ + "▁clarinet", + -13.411641120910645 + ], + [ + "▁ketchup", + -13.411641120910645 + ], + [ + "▁notwithstanding", + -13.411641120910645 + ], + [ + "▁stipend", + -13.411641120910645 + ], + [ + "▁zodiac", + -13.411641120910645 + ], + [ + "▁synopsis", + -13.41164207458496 + ], + [ + "Builder", + -13.41166877746582 + ], + [ + "▁Toolkit", + -13.41166877746582 + ], + [ + "▁starvation", + -13.411673545837402 + ], + [ + "▁motorized", + -13.411690711975098 + ], + [ + "▁syn", + -13.411741256713867 + ], + [ + "▁Clearwater", + -13.411747932434082 + ], + [ + "▁afflicted", + -13.411783218383787 + ], + [ + "▁Pixar", + -13.411786079406738 + ], + [ + "eira", + -13.411798477172852 + ], + [ + "▁Delay", + -13.411825180053713 + ], + [ + "▁KEY", + -13.411828994750977 + ], + [ + "▁routed", + -13.411921501159668 + ], + [ + "▁condenser", + -13.411949157714844 + ], + [ + "▁Cosplay", + -13.411951065063477 + ], + [ + "▁Voyager", + -13.41198444366455 + ], + [ + "▁Useful", + -13.411985397338867 + ], + [ + "▁Colts", + -13.412012100219728 + ], + [ + "caps", + -13.412035942077637 + ], + [ + "Bro", + -13.412176132202148 + ], + [ + "▁spontaneously", + -13.412220001220703 + ], + [ + "▁Merlin", + -13.412221908569336 + ], + [ + "▁prematurely", + -13.412261009216309 + ], + [ + "▁Polymer", + -13.412304878234863 + ], + [ + "ACS", + -13.412368774414062 + ], + [ + "▁Soho", + -13.41245460510254 + ], + [ + "▁MPEG", + -13.41256046295166 + ], + [ + "▁Pant", + -13.41261863708496 + ], + [ + "Nature", + -13.412632942199709 + ], + [ + "Driving", + -13.412652969360352 + ], + [ + "Lady", + -13.412741661071776 + ], + [ + "▁Buffett", + -13.41275691986084 + ], + [ + "wee", + -13.412764549255373 + ], + [ + "▁Vu", + -13.412800788879396 + ], + [ + "oza", + -13.412938117980955 + ], + [ + "▁Marx", + -13.412999153137209 + ], + [ + "▁poolside", + -13.413018226623535 + ], + [ + "island", + -13.413030624389648 + ], + [ + "greg", + -13.41306495666504 + ], + [ + "bau", + -13.413101196289062 + ], + [ + "▁sweetened", + -13.413214683532717 + ], + [ + "Scale", + -13.413315773010254 + ], + [ + "▁CK", + -13.413349151611328 + ], + [ + "▁fueling", + -13.41335105895996 + ], + [ + "▁OWN", + -13.413463592529297 + ], + [ + "▁Roh", + -13.413568496704102 + ], + [ + "ility", + -13.413580894470217 + ], + [ + "ит", + -13.413601875305176 + ], + [ + "ATIONS", + -13.413719177246094 + ], + [ + "▁comedic", + -13.413793563842772 + ], + [ + "▁granular", + -13.41391658782959 + ], + [ + "▁incarceration", + -13.413931846618652 + ], + [ + "INA", + -13.41400146484375 + ], + [ + "▁SCHOOL", + -13.414006233215332 + ], + [ + "▁Madden", + -13.414052963256836 + ], + [ + "▁revolt", + -13.41407871246338 + ], + [ + "▁fossils", + -13.414103507995604 + ], + [ + "▁correctness", + -13.414114952087402 + ], + [ + "▁Fei", + -13.414159774780272 + ], + [ + "chie", + -13.414226531982422 + ], + [ + "Highly", + -13.4142484664917 + ], + [ + "▁Williamsburg", + -13.414284706115724 + ], + [ + "▁255", + -13.414324760437012 + ], + [ + "Grid", + -13.414389610290527 + ], + [ + "▁trunks", + -13.414490699768066 + ], + [ + "▁heartburn", + -13.414575576782228 + ], + [ + "▁Bare", + -13.414587020874023 + ], + [ + "goo", + -13.414640426635742 + ], + [ + "▁artificially", + -13.414645195007324 + ], + [ + "▁Betting", + -13.41468906402588 + ], + [ + "▁CCC", + -13.414709091186523 + ], + [ + "▁jailed", + -13.414752960205078 + ], + [ + "▁Mandy", + -13.41476821899414 + ], + [ + "asso", + -13.414801597595217 + ], + [ + "▁trapping", + -13.41481113433838 + ], + [ + "Begin", + -13.414859771728516 + ], + [ + "▁MOST", + -13.414888381958008 + ], + [ + "▁limp", + -13.414920806884766 + ], + [ + "▁Backpack", + -13.414949417114258 + ], + [ + "▁hiked", + -13.41496467590332 + ], + [ + "▁Terror", + -13.41501522064209 + ], + [ + "▁Clifford", + -13.41504192352295 + ], + [ + "▁Groupon", + -13.415088653564451 + ], + [ + "/05/", + -13.41508960723877 + ], + [ + "Fr", + -13.41508960723877 + ], + [ + "▁pendants", + -13.41509246826172 + ], + [ + "▁yolks", + -13.415101051330566 + ], + [ + "▁Eau", + -13.415189743041992 + ], + [ + "nock", + -13.415206909179688 + ], + [ + "IAN", + -13.415257453918455 + ], + [ + "▁Mateo", + -13.415297508239746 + ], + [ + "/12/", + -13.415339469909668 + ], + [ + "▁Stacey", + -13.415435791015623 + ], + [ + "Fact", + -13.415499687194824 + ], + [ + "▁artefacts", + -13.41554069519043 + ], + [ + "▁Pics", + -13.415573120117188 + ], + [ + "4.4", + -13.415575981140137 + ], + [ + "▁ONLINE", + -13.415603637695312 + ], + [ + "▁intersections", + -13.41563892364502 + ], + [ + "▁endorsements", + -13.41568660736084 + ], + [ + "▁Mixing", + -13.4157075881958 + ], + [ + "sleep", + -13.41574764251709 + ], + [ + "▁Feeder", + -13.415855407714844 + ], + [ + "▁Dumps", + -13.415931701660156 + ], + [ + "Logic", + -13.41594123840332 + ], + [ + "▁Gibbs", + -13.4159517288208 + ], + [ + "▁dysfunctional", + -13.415993690490724 + ], + [ + "▁benchmarking", + -13.41603660583496 + ], + [ + "▁Braun", + -13.416048049926758 + ], + [ + "UTC", + -13.416145324707031 + ], + [ + "▁mover", + -13.416168212890623 + ], + [ + "▁Aroma", + -13.416170120239258 + ], + [ + "▁exited", + -13.41617202758789 + ], + [ + "▁Wyatt", + -13.41619110107422 + ], + [ + "▁Cialis", + -13.41623306274414 + ], + [ + "ough", + -13.416266441345217 + ], + [ + "▁mastermind", + -13.416279792785645 + ], + [ + "▁Wordpress", + -13.416300773620604 + ], + [ + "▁Denny", + -13.416388511657717 + ], + [ + "▁CTO", + -13.416412353515623 + ], + [ + "▁dimmer", + -13.416443824768066 + ], + [ + "▁accessorize", + -13.416478157043455 + ], + [ + "▁Midway", + -13.416540145874023 + ], + [ + "▁overcame", + -13.416547775268556 + ], + [ + "▁Firestone", + -13.416574478149414 + ], + [ + "▁Clarence", + -13.416604042053224 + ], + [ + "▁dramas", + -13.41661548614502 + ], + [ + "inside", + -13.416675567626951 + ], + [ + "▁paleo", + -13.416692733764648 + ], + [ + "▁soooo", + -13.416769981384276 + ], + [ + "▁Souls", + -13.416815757751465 + ], + [ + "▁Gras", + -13.416885375976562 + ], + [ + "▁1909", + -13.416899681091309 + ], + [ + "▁headband", + -13.41696548461914 + ], + [ + "▁seasonally", + -13.417176246643066 + ], + [ + "Vintage", + -13.417234420776367 + ], + [ + "religious", + -13.41725254058838 + ], + [ + "▁swallowing", + -13.417284965515137 + ], + [ + "Cur", + -13.417319297790527 + ], + [ + "-2)", + -13.41738224029541 + ], + [ + "▁$90", + -13.41739273071289 + ], + [ + "80,000", + -13.417439460754396 + ], + [ + "▁refundable", + -13.417441368103027 + ], + [ + "626", + -13.417454719543455 + ], + [ + "▁Bead", + -13.417495727539062 + ], + [ + "▁Zak", + -13.41755199432373 + ], + [ + "▁strata", + -13.417633056640623 + ], + [ + "▁dispense", + -13.417683601379396 + ], + [ + "▁inscription", + -13.41775608062744 + ], + [ + "▁Increasing", + -13.41784954071045 + ], + [ + "▁Donation", + -13.41787338256836 + ], + [ + "lating", + -13.417950630187988 + ], + [ + "itation", + -13.417954444885254 + ], + [ + "▁sniper", + -13.417998313903809 + ], + [ + "enstein", + -13.418033599853516 + ], + [ + "▁Tad", + -13.418099403381348 + ], + [ + "▁byte", + -13.418099403381348 + ], + [ + "▁Attendees", + -13.418278694152832 + ], + [ + "▁Superstar", + -13.418292045593262 + ], + [ + "▁CRA", + -13.418482780456545 + ], + [ + "▁Ortiz", + -13.418484687805176 + ], + [ + "utter", + -13.418502807617188 + ], + [ + "▁unauthorised", + -13.418521881103516 + ], + [ + "▁Charitable", + -13.41852855682373 + ], + [ + "amazon", + -13.418551445007324 + ], + [ + "▁foothills", + -13.418557167053224 + ], + [ + "▁whipping", + -13.41856288909912 + ], + [ + "▁invests", + -13.418593406677246 + ], + [ + "▁sane", + -13.418612480163574 + ], + [ + "▁infrastructures", + -13.418725967407228 + ], + [ + "▁172", + -13.418787956237791 + ], + [ + "▁Mandela", + -13.418789863586426 + ], + [ + "▁950", + -13.418792724609377 + ], + [ + "▁Relative", + -13.418804168701172 + ], + [ + "▁Bethel", + -13.418825149536133 + ], + [ + "▁worldly", + -13.418973922729492 + ], + [ + "duction", + -13.41930866241455 + ], + [ + "6000", + -13.419363975524902 + ], + [ + "doctoral", + -13.419411659240724 + ], + [ + "INGS", + -13.419468879699709 + ], + [ + "Doing", + -13.419546127319336 + ], + [ + "Warning", + -13.41960906982422 + ], + [ + "Patrick", + -13.4196138381958 + ], + [ + "▁stopper", + -13.41966152191162 + ], + [ + "Tagged", + -13.419678688049316 + ], + [ + "▁Perl", + -13.419721603393556 + ], + [ + "protected", + -13.419767379760742 + ], + [ + "▁versed", + -13.419848442077637 + ], + [ + "paint", + -13.419901847839355 + ], + [ + "Spa", + -13.42000675201416 + ], + [ + "nary", + -13.420038223266602 + ], + [ + "▁overturned", + -13.420051574707031 + ], + [ + "▁Qur", + -13.420108795166016 + ], + [ + "▁Leak", + -13.42015266418457 + ], + [ + "▁Gregg", + -13.420228004455566 + ], + [ + "▁Corning", + -13.42038631439209 + ], + [ + "▁Brig", + -13.420624732971191 + ], + [ + "▁iris", + -13.420625686645508 + ], + [ + "▁diocese", + -13.420825958251951 + ], + [ + "▁miscellaneous", + -13.420825958251951 + ], + [ + "▁propagation", + -13.420825958251951 + ], + [ + "▁reggae", + -13.420825958251951 + ], + [ + "▁splendor", + -13.42082691192627 + ], + [ + "▁troupe", + -13.4208345413208 + ], + [ + "▁Harrisburg", + -13.42088222503662 + ], + [ + "▁Grease", + -13.420916557312012 + ], + [ + "▁Fifty", + -13.420933723449709 + ], + [ + "▁sitcom", + -13.421011924743652 + ], + [ + "dium", + -13.42104721069336 + ], + [ + "▁Bellow", + -13.421074867248535 + ], + [ + "voting", + -13.421083450317385 + ], + [ + "▁commentator", + -13.421086311340332 + ], + [ + "▁shui", + -13.421093940734863 + ], + [ + "▁Mut", + -13.421114921569824 + ], + [ + "▁Zan", + -13.42116928100586 + ], + [ + "▁gravitational", + -13.42119026184082 + ], + [ + "▁hoodie", + -13.421271324157717 + ], + [ + "▁hymns", + -13.421290397644045 + ], + [ + "▁cheeky", + -13.421459197998049 + ], + [ + "buying", + -13.421491622924805 + ], + [ + "▁ulcers", + -13.421588897705078 + ], + [ + "▁dwellings", + -13.42159366607666 + ], + [ + "▁gypsum", + -13.421648979187012 + ], + [ + "▁swallowed", + -13.421660423278809 + ], + [ + "▁gar", + -13.421704292297363 + ], + [ + "▁licences", + -13.42171859741211 + ], + [ + "▁orderly", + -13.421725273132324 + ], + [ + "▁Vene", + -13.421743392944336 + ], + [ + "Weather", + -13.421903610229492 + ], + [ + "▁projecting", + -13.42192554473877 + ], + [ + "liv", + -13.421952247619627 + ], + [ + "savvy", + -13.421953201293944 + ], + [ + "▁Kobe", + -13.42196559906006 + ], + [ + "taught", + -13.422041893005373 + ], + [ + "plication", + -13.422061920166016 + ], + [ + "literally", + -13.422076225280762 + ], + [ + "▁Joker", + -13.422101974487305 + ], + [ + "▁disrespect", + -13.422198295593262 + ], + [ + "▁Lav", + -13.422205924987791 + ], + [ + "-51", + -13.422211647033691 + ], + [ + "idine", + -13.422473907470703 + ], + [ + "▁dumplings", + -13.422487258911133 + ], + [ + "middle", + -13.422547340393066 + ], + [ + "element", + -13.422616004943848 + ], + [ + "▁Nora", + -13.422637939453123 + ], + [ + "SEL", + -13.42264175415039 + ], + [ + "lange", + -13.422670364379885 + ], + [ + "5.1", + -13.42270851135254 + ], + [ + "▁dissent", + -13.422818183898926 + ], + [ + "▁restores", + -13.422877311706545 + ], + [ + "▁1981.", + -13.422950744628906 + ], + [ + "▁52-", + -13.422954559326172 + ], + [ + "▁Polished", + -13.423035621643066 + ], + [ + "▁MGM", + -13.423041343688965 + ], + [ + "▁cracker", + -13.423101425170898 + ], + [ + "▁Liquor", + -13.423135757446287 + ], + [ + "▁heaviest", + -13.423135757446287 + ], + [ + "▁dissatisfaction", + -13.423136711120604 + ], + [ + "▁archiving", + -13.423138618469238 + ], + [ + "▁allegation", + -13.423142433166504 + ], + [ + "▁vaccinated", + -13.423255920410156 + ], + [ + "▁Gerard", + -13.423295974731444 + ], + [ + "▁poppy", + -13.423367500305176 + ], + [ + "▁postdoctoral", + -13.423378944396973 + ], + [ + "▁DK", + -13.42341136932373 + ], + [ + "/21", + -13.42343044281006 + ], + [ + "ppy", + -13.423466682434082 + ], + [ + "▁compounded", + -13.423480033874512 + ], + [ + "▁Inspirations", + -13.42354679107666 + ], + [ + "▁Cozy", + -13.423617362976074 + ], + [ + "▁Soda", + -13.42361831665039 + ], + [ + "▁jaws", + -13.423625946044922 + ], + [ + "▁Towards", + -13.423745155334473 + ], + [ + "▁flatten", + -13.423747062683104 + ], + [ + "▁preached", + -13.423850059509276 + ], + [ + "▁cellulose", + -13.424057960510254 + ], + [ + "▁topography", + -13.4241304397583 + ], + [ + "smoking", + -13.424336433410645 + ], + [ + "283", + -13.42433738708496 + ], + [ + "/26", + -13.424341201782228 + ], + [ + "▁meteor", + -13.42442512512207 + ], + [ + "Recommended", + -13.424429893493652 + ], + [ + "303", + -13.424642562866213 + ], + [ + "▁wicket", + -13.424654960632324 + ], + [ + "Tab", + -13.42466640472412 + ], + [ + "Chicken", + -13.42466926574707 + ], + [ + "▁tortured", + -13.424744606018066 + ], + [ + "▁Exploring", + -13.424796104431152 + ], + [ + "▁revitalize", + -13.424848556518556 + ], + [ + "putting", + -13.424860000610352 + ], + [ + "605", + -13.424915313720703 + ], + [ + "204", + -13.42507553100586 + ], + [ + "▁increment", + -13.425178527832031 + ], + [ + "▁guesthouse", + -13.425189971923828 + ], + [ + "▁Inca", + -13.425289154052734 + ], + [ + "▁paradox", + -13.425302505493164 + ], + [ + "▁Jewellery", + -13.425357818603516 + ], + [ + "rand", + -13.425358772277832 + ], + [ + "OV", + -13.425374031066896 + ], + [ + "995", + -13.42538356781006 + ], + [ + "formal", + -13.425406455993652 + ], + [ + "▁Clever", + -13.425406455993652 + ], + [ + "aph", + -13.425429344177246 + ], + [ + "▁drape", + -13.425444602966309 + ], + [ + "▁Sunderland", + -13.425450325012209 + ], + [ + "▁kidnapping", + -13.425456047058104 + ], + [ + "▁Chestnut", + -13.425460815429688 + ], + [ + "throw", + -13.425466537475586 + ], + [ + "▁abode", + -13.425481796264648 + ], + [ + "▁Offshore", + -13.425493240356444 + ], + [ + "▁Apprenticeship", + -13.425512313842772 + ], + [ + "▁Honorary", + -13.425521850585938 + ], + [ + "▁offend", + -13.42553997039795 + ], + [ + "▁townhouse", + -13.42555332183838 + ], + [ + "▁Parliamentary", + -13.425564765930176 + ], + [ + "▁JO", + -13.425577163696287 + ], + [ + "▁famine", + -13.425601959228516 + ], + [ + "Record", + -13.425609588623049 + ], + [ + "▁6-4", + -13.425629615783691 + ], + [ + "▁couture", + -13.42564296722412 + ], + [ + "Industrial", + -13.425699234008787 + ], + [ + "▁Ely", + -13.425707817077637 + ], + [ + "fiber", + -13.425734519958496 + ], + [ + "chee", + -13.425837516784668 + ], + [ + "▁drugstore", + -13.425847053527832 + ], + [ + "▁Surprisingly", + -13.425856590270996 + ], + [ + "▁Crete", + -13.425865173339844 + ], + [ + "Union", + -13.425914764404297 + ], + [ + "Spy", + -13.425950050354004 + ], + [ + "▁Plates", + -13.425969123840332 + ], + [ + "marine", + -13.4260835647583 + ], + [ + "▁slag", + -13.426091194152832 + ], + [ + "▁claws", + -13.426100730895996 + ], + [ + "breaker", + -13.426127433776855 + ], + [ + "▁Pha", + -13.426260948181152 + ], + [ + "390", + -13.426309585571287 + ], + [ + "▁symbolizes", + -13.42632293701172 + ], + [ + "▁Porcelain", + -13.426325798034668 + ], + [ + "-37", + -13.426474571228027 + ], + [ + "▁cereals", + -13.42652416229248 + ], + [ + "▁Amer", + -13.426676750183104 + ], + [ + "Success", + -13.426678657531738 + ], + [ + "Submitted", + -13.4266939163208 + ], + [ + "Introduction", + -13.4266996383667 + ], + [ + "IQ", + -13.426716804504396 + ], + [ + "ATT", + -13.42672634124756 + ], + [ + "nard", + -13.426796913146973 + ], + [ + "Andy", + -13.426831245422363 + ], + [ + "ules", + -13.42693042755127 + ], + [ + "sberg", + -13.426934242248535 + ], + [ + "▁Blogs", + -13.426939010620115 + ], + [ + "▁Agro", + -13.426959037780762 + ], + [ + "▁2013)", + -13.426994323730469 + ], + [ + "rne", + -13.427007675170898 + ], + [ + "▁Homo", + -13.427129745483398 + ], + [ + "▁Hydra", + -13.427146911621094 + ], + [ + "▁revoke", + -13.427210807800291 + ], + [ + "▁Hana", + -13.427250862121582 + ], + [ + "▁Imam", + -13.427258491516112 + ], + [ + "▁forfeit", + -13.427444458007812 + ], + [ + "▁broadcasters", + -13.427577018737791 + ], + [ + "lka", + -13.427624702453612 + ], + [ + "FIN", + -13.427666664123535 + ], + [ + "▁6-3", + -13.427722930908203 + ], + [ + "Fat", + -13.427730560302734 + ], + [ + "▁Buchanan", + -13.427770614624023 + ], + [ + "▁Pulitzer", + -13.427770614624023 + ], + [ + "▁discrepancy", + -13.427770614624023 + ], + [ + "▁twilight", + -13.427775382995604 + ], + [ + "▁nodded", + -13.427780151367188 + ], + [ + "▁stabbed", + -13.427788734436035 + ], + [ + "▁Bartlett", + -13.427791595458984 + ], + [ + "▁Velo", + -13.427867889404297 + ], + [ + "▁Frequency", + -13.427895545959473 + ], + [ + "meta", + -13.427910804748535 + ], + [ + "▁improv", + -13.427918434143066 + ], + [ + "▁praising", + -13.427945137023926 + ], + [ + "ALE", + -13.427966117858888 + ], + [ + "▁3-6", + -13.427974700927734 + ], + [ + "upa", + -13.42797565460205 + ], + [ + "▁Trainee", + -13.42812728881836 + ], + [ + "lina", + -13.428211212158203 + ], + [ + "▁thickened", + -13.428217887878418 + ], + [ + "▁HIM", + -13.42827606201172 + ], + [ + "▁Creations", + -13.428343772888184 + ], + [ + "▁Waterford", + -13.42834758758545 + ], + [ + "▁gunshot", + -13.42843246459961 + ], + [ + "▁hallways", + -13.42843246459961 + ], + [ + "▁superiority", + -13.428455352783203 + ], + [ + "▁Sent", + -13.428488731384276 + ], + [ + "methyl", + -13.428513526916504 + ], + [ + "▁Yorkers", + -13.428646087646484 + ], + [ + "▁keyless", + -13.428746223449709 + ], + [ + "1.9", + -13.428759574890137 + ], + [ + "▁webmaster", + -13.428759574890137 + ], + [ + "▁leverages", + -13.428807258605955 + ], + [ + "▁Appraisal", + -13.428832054138184 + ], + [ + "▁brews", + -13.428895950317385 + ], + [ + "▁ordinances", + -13.428946495056152 + ], + [ + "960", + -13.429054260253906 + ], + [ + "Boston", + -13.429119110107422 + ], + [ + "▁evaluates", + -13.429131507873535 + ], + [ + "▁masked", + -13.429161071777344 + ], + [ + "▁sniff", + -13.429210662841797 + ], + [ + "▁Dirt", + -13.429221153259276 + ], + [ + "containing", + -13.429227828979492 + ], + [ + "▁Myth", + -13.429276466369627 + ], + [ + "▁Americana", + -13.429465293884276 + ], + [ + "Trying", + -13.429598808288574 + ], + [ + "enia", + -13.429697036743164 + ], + [ + "▁springtime", + -13.429770469665527 + ], + [ + "▁imbalances", + -13.429774284362791 + ], + [ + "▁162", + -13.42986297607422 + ], + [ + "▁Treatments", + -13.429869651794434 + ], + [ + "▁Ef", + -13.42995262145996 + ], + [ + "Amy", + -13.429969787597656 + ], + [ + "▁settles", + -13.430014610290527 + ], + [ + "▁Sega", + -13.430023193359377 + ], + [ + "▁Almighty", + -13.430095672607422 + ], + [ + "▁Ipswich", + -13.430095672607422 + ], + [ + "▁amplitude", + -13.430095672607422 + ], + [ + "▁serotonin", + -13.430095672607422 + ], + [ + "▁turbulence", + -13.430095672607422 + ], + [ + "▁CCS", + -13.430098533630373 + ], + [ + "▁elliptical", + -13.43010139465332 + ], + [ + "enti", + -13.430118560791016 + ], + [ + "▁sk", + -13.430119514465332 + ], + [ + "▁astronaut", + -13.430135726928713 + ], + [ + "▁multiplier", + -13.430176734924316 + ], + [ + "▁chime", + -13.430201530456545 + ], + [ + "▁bonnet", + -13.43020248413086 + ], + [ + "▁interception", + -13.430224418640137 + ], + [ + "Dating", + -13.430294036865234 + ], + [ + "▁meltdown", + -13.430315017700195 + ], + [ + "activity", + -13.430327415466309 + ], + [ + "▁Engel", + -13.430495262145996 + ], + [ + "concept", + -13.430516242980955 + ], + [ + "▁disguised", + -13.430524826049805 + ], + [ + "▁:).", + -13.430541038513184 + ], + [ + "student", + -13.430588722229004 + ], + [ + "▁scaffolding", + -13.430598258972168 + ], + [ + "▁Haitian", + -13.430611610412598 + ], + [ + "▁Drone", + -13.430623054504396 + ], + [ + "▁Tas", + -13.43062686920166 + ], + [ + "▁cabs", + -13.43064022064209 + ], + [ + "▁loser", + -13.430665016174316 + ], + [ + "kali", + -13.430694580078123 + ], + [ + "▁exc", + -13.430702209472656 + ], + [ + "▁Prompt", + -13.430822372436523 + ], + [ + "▁depended", + -13.430872917175291 + ], + [ + "▁Rei", + -13.43087673187256 + ], + [ + "▁Estonian", + -13.43089485168457 + ], + [ + "▁suppressed", + -13.43101978302002 + ], + [ + "212", + -13.431097030639648 + ], + [ + "▁watered", + -13.431124687194824 + ], + [ + "▁Vega", + -13.431148529052734 + ], + [ + "Bond", + -13.431272506713867 + ], + [ + "▁seeded", + -13.4313325881958 + ], + [ + "response", + -13.431459426879885 + ], + [ + "Exactly", + -13.43150520324707 + ], + [ + "uric", + -13.431558609008787 + ], + [ + "▁Strat", + -13.431586265563965 + ], + [ + "▁fades", + -13.43158721923828 + ], + [ + "▁jug", + -13.431602478027344 + ], + [ + "MEN", + -13.431605339050291 + ], + [ + "▁Prosecutor", + -13.431615829467772 + ], + [ + "Filter", + -13.431618690490724 + ], + [ + "▁Appointment", + -13.431634902954102 + ], + [ + "sharp", + -13.431767463684082 + ], + [ + "▁Aria", + -13.431912422180176 + ], + [ + "docs", + -13.432111740112305 + ], + [ + "▁cinemas", + -13.432143211364746 + ], + [ + "▁Invoice", + -13.432193756103516 + ], + [ + "hydroxy", + -13.432273864746094 + ], + [ + "CY", + -13.432293891906738 + ], + [ + "▁flop", + -13.432350158691406 + ], + [ + "▁Samaritan", + -13.43242645263672 + ], + [ + "▁invoicing", + -13.43242645263672 + ], + [ + "▁ominous", + -13.432429313659668 + ], + [ + "▁burlap", + -13.432433128356934 + ], + [ + "▁affirmative", + -13.432437896728516 + ], + [ + "▁pharmacists", + -13.432456970214844 + ], + [ + "▁unaffected", + -13.432459831237791 + ], + [ + "▁Reverend", + -13.432461738586426 + ], + [ + "▁Tactical", + -13.432467460632324 + ], + [ + "▁macOS", + -13.43247127532959 + ], + [ + "▁Bren", + -13.432480812072754 + ], + [ + "▁PARK", + -13.432490348815918 + ], + [ + "▁pharma", + -13.432512283325195 + ], + [ + "▁precaution", + -13.432535171508787 + ], + [ + "▁collaborator", + -13.432536125183104 + ], + [ + "▁crabs", + -13.432567596435549 + ], + [ + "▁retinal", + -13.432618141174316 + ], + [ + "▁Rus", + -13.432649612426758 + ], + [ + "voir", + -13.43268585205078 + ], + [ + "▁5.6", + -13.432711601257324 + ], + [ + "udo", + -13.432801246643066 + ], + [ + "2%)", + -13.432817459106444 + ], + [ + "▁superbly", + -13.432825088500977 + ], + [ + "▁sculpt", + -13.432939529418944 + ], + [ + "aker", + -13.433002471923828 + ], + [ + "▁Cleaners", + -13.433109283447266 + ], + [ + "▁Tanya", + -13.433160781860352 + ], + [ + "▁nationalities", + -13.433185577392578 + ], + [ + "▁sweetest", + -13.433213233947754 + ], + [ + "▁153", + -13.433237075805664 + ], + [ + "lytic", + -13.433247566223145 + ], + [ + "▁Rein", + -13.433250427246094 + ], + [ + "▁wipers", + -13.433265686035156 + ], + [ + "▁actresses", + -13.433411598205566 + ], + [ + "▁Took", + -13.433417320251465 + ], + [ + "mack", + -13.433479309082031 + ], + [ + "▁Arn", + -13.433488845825195 + ], + [ + "▁lemons", + -13.43352222442627 + ], + [ + "▁Turks", + -13.433611869812012 + ], + [ + "▁situ", + -13.433667182922363 + ], + [ + "▁parse", + -13.43370532989502 + ], + [ + "▁Locations", + -13.433820724487305 + ], + [ + "rup", + -13.433835983276367 + ], + [ + "Object", + -13.433871269226074 + ], + [ + "Morning", + -13.43387508392334 + ], + [ + "▁Chandelier", + -13.433895111083984 + ], + [ + "131", + -13.433908462524414 + ], + [ + "▁SIG", + -13.433910369873049 + ], + [ + "▁Parallel", + -13.433975219726562 + ], + [ + "▁OCR", + -13.434075355529783 + ], + [ + "▁pos", + -13.434090614318848 + ], + [ + "mortar", + -13.434127807617188 + ], + [ + "itha", + -13.43430233001709 + ], + [ + "▁Grave", + -13.434313774108888 + ], + [ + "▁bitten", + -13.434345245361328 + ], + [ + "▁amplified", + -13.434433937072754 + ], + [ + "fulness", + -13.434438705444336 + ], + [ + "▁departures", + -13.43447208404541 + ], + [ + "lade", + -13.434562683105469 + ], + [ + "scribe", + -13.434649467468262 + ], + [ + "apply", + -13.434717178344728 + ], + [ + "▁waitress", + -13.434727668762209 + ], + [ + "▁Cola", + -13.4347562789917 + ], + [ + "▁Stamford", + -13.434762954711914 + ], + [ + "▁arterial", + -13.434762954711914 + ], + [ + "▁sludge", + -13.434764862060549 + ], + [ + "▁ultraviolet", + -13.434770584106444 + ], + [ + "▁caching", + -13.434786796569824 + ], + [ + "▁rotten", + -13.434808731079102 + ], + [ + "▁gosh", + -13.43483543395996 + ], + [ + "▁wield", + -13.43487548828125 + ], + [ + "222", + -13.434907913208008 + ], + [ + "ebel", + -13.43492031097412 + ], + [ + "▁dangerously", + -13.435073852539062 + ], + [ + "▁proofreading", + -13.435125350952148 + ], + [ + "▁murderer", + -13.435160636901855 + ], + [ + "▁Strap", + -13.43524169921875 + ], + [ + "boys", + -13.435257911682127 + ], + [ + "▁kennel", + -13.435264587402344 + ], + [ + "skill", + -13.435310363769531 + ], + [ + "tiv", + -13.435325622558594 + ], + [ + "▁distinguishes", + -13.435335159301758 + ], + [ + "▁Renee", + -13.435364723205566 + ], + [ + "▁Magi", + -13.435446739196776 + ], + [ + "▁Workout", + -13.435481071472168 + ], + [ + "▁Tissue", + -13.435491561889648 + ], + [ + "▁Bethany", + -13.435506820678713 + ], + [ + "▁THEIR", + -13.435532569885254 + ], + [ + "▁SOLD", + -13.43555736541748 + ], + [ + "roc", + -13.435568809509276 + ], + [ + "▁Bale", + -13.435574531555176 + ], + [ + "▁sleepers", + -13.435608863830566 + ], + [ + "Desk", + -13.435616493225098 + ], + [ + "▁tending", + -13.435702323913574 + ], + [ + "▁Daly", + -13.435786247253418 + ], + [ + "▁RAC", + -13.435791969299316 + ], + [ + "▁LAW", + -13.435809135437012 + ], + [ + "▁Tango", + -13.435823440551758 + ], + [ + "▁biotech", + -13.435842514038086 + ], + [ + "▁Installer", + -13.43590259552002 + ], + [ + "ivan", + -13.435925483703612 + ], + [ + "uche", + -13.435931205749512 + ], + [ + "▁Packet", + -13.435935020446776 + ], + [ + "GIS", + -13.435940742492676 + ], + [ + "blank", + -13.436166763305664 + ], + [ + "Shape", + -13.436195373535156 + ], + [ + "organization", + -13.436205863952637 + ], + [ + "automatic", + -13.436325073242188 + ], + [ + "izable", + -13.436352729797363 + ], + [ + "▁coughing", + -13.436365127563477 + ], + [ + "QA", + -13.436397552490234 + ], + [ + "▁Mole", + -13.436407089233398 + ], + [ + "Condition", + -13.436453819274902 + ], + [ + "▁trove", + -13.436516761779783 + ], + [ + "burgh", + -13.436552047729492 + ], + [ + "plain", + -13.436600685119627 + ], + [ + "▁denomination", + -13.436603546142578 + ], + [ + "▁CSI", + -13.436659812927246 + ], + [ + "▁acquaintances", + -13.43675136566162 + ], + [ + "▁CPI", + -13.436841011047363 + ], + [ + "▁realisation", + -13.436860084533691 + ], + [ + "127", + -13.43686866760254 + ], + [ + "nad", + -13.436951637268066 + ], + [ + "Letter", + -13.436959266662598 + ], + [ + "▁Rho", + -13.43696117401123 + ], + [ + "▁paws", + -13.43706512451172 + ], + [ + "▁Conservancy", + -13.437105178833008 + ], + [ + "▁predicament", + -13.437105178833008 + ], + [ + "▁autobiography", + -13.437110900878906 + ], + [ + "▁Amherst", + -13.437125205993652 + ], + [ + "▁patties", + -13.437128067016602 + ], + [ + "▁9001", + -13.437152862548828 + ], + [ + "▁Hayden", + -13.437175750732422 + ], + [ + "▁Nit", + -13.437203407287598 + ], + [ + "tape", + -13.43722438812256 + ], + [ + "▁offending", + -13.43722438812256 + ], + [ + "▁california", + -13.437241554260254 + ], + [ + "▁Flores", + -13.43728256225586 + ], + [ + "▁militia", + -13.437298774719238 + ], + [ + "▁sweatshirt", + -13.437332153320312 + ], + [ + "▁Dahl", + -13.43736743927002 + ], + [ + "▁Garland", + -13.437371253967283 + ], + [ + "▁alleges", + -13.437378883361816 + ], + [ + "▁McCoy", + -13.437385559082031 + ], + [ + "eba", + -13.437405586242676 + ], + [ + "▁Homemade", + -13.43746566772461 + ], + [ + "▁subsection", + -13.437469482421877 + ], + [ + "▁Clash", + -13.437519073486328 + ], + [ + "rity", + -13.437592506408691 + ], + [ + "▁Skyline", + -13.437593460083008 + ], + [ + "▁oftentimes", + -13.43771743774414 + ], + [ + "Luc", + -13.437817573547363 + ], + [ + "kwa", + -13.437828063964844 + ], + [ + "▁Knob", + -13.437888145446776 + ], + [ + "▁Therapeutics", + -13.43789291381836 + ], + [ + "▁Tum", + -13.437928199768066 + ], + [ + "Poor", + -13.43803596496582 + ], + [ + "▁burnout", + -13.438039779663086 + ], + [ + "470", + -13.438117027282717 + ], + [ + "rma", + -13.438192367553713 + ], + [ + "signed", + -13.438197135925291 + ], + [ + "▁Dyna", + -13.438243865966797 + ], + [ + "▁Lakewood", + -13.438271522521973 + ], + [ + "mins", + -13.438311576843262 + ], + [ + "lapse", + -13.43836498260498 + ], + [ + "2-5", + -13.438365936279297 + ], + [ + "bac", + -13.438422203063965 + ], + [ + "▁Voting", + -13.438467025756836 + ], + [ + "▁Extraordinary", + -13.438530921936035 + ], + [ + "Person", + -13.438563346862791 + ], + [ + "▁memes", + -13.438570976257324 + ], + [ + "▁Sick", + -13.438608169555664 + ], + [ + "▁od", + -13.43863582611084 + ], + [ + "▁shortening", + -13.438650131225586 + ], + [ + "mining", + -13.438651084899902 + ], + [ + "Lewis", + -13.43865966796875 + ], + [ + "▁inflict", + -13.438665390014648 + ], + [ + "activated", + -13.438680648803713 + ], + [ + "▁Mystic", + -13.43876838684082 + ], + [ + "▁discriminate", + -13.438851356506348 + ], + [ + "ight", + -13.438859939575195 + ], + [ + "▁losers", + -13.438876152038574 + ], + [ + "▁ligaments", + -13.438908576965332 + ], + [ + "▁incompatible", + -13.438936233520508 + ], + [ + "▁scaffold", + -13.438944816589355 + ], + [ + "▁ordinarily", + -13.439003944396973 + ], + [ + "▁6-1", + -13.439007759094238 + ], + [ + "naut", + -13.439034461975098 + ], + [ + "▁BBB", + -13.43906021118164 + ], + [ + "▁Hex", + -13.439066886901855 + ], + [ + "▁Fay", + -13.43910312652588 + ], + [ + "▁latent", + -13.439109802246094 + ], + [ + "▁Seymour", + -13.43912124633789 + ], + [ + "▁Rx", + -13.4392671585083 + ], + [ + "Arm", + -13.439348220825195 + ], + [ + "masters", + -13.439352989196776 + ], + [ + "▁Genetic", + -13.439382553100586 + ], + [ + "▁(2012).", + -13.43943691253662 + ], + [ + "▁Piedmont", + -13.439452171325684 + ], + [ + "▁surrogate", + -13.439452171325684 + ], + [ + "▁sneaker", + -13.439496040344238 + ], + [ + "▁permissible", + -13.439502716064451 + ], + [ + "▁Oscars", + -13.439556121826172 + ], + [ + "▁Cushion", + -13.439641952514648 + ], + [ + "▁11:59", + -13.439672470092772 + ], + [ + "▁aching", + -13.439674377441406 + ], + [ + "coding", + -13.439691543579102 + ], + [ + "▁Lionel", + -13.439692497253418 + ], + [ + "▁Doha", + -13.43971347808838 + ], + [ + "▁Caring", + -13.439723014831545 + ], + [ + "repair", + -13.439743995666504 + ], + [ + "▁Approx", + -13.439764022827148 + ], + [ + "wh", + -13.439800262451172 + ], + [ + "Rank", + -13.439813613891602 + ], + [ + "▁blogged", + -13.439826965332031 + ], + [ + "113", + -13.43984603881836 + ], + [ + "▁stylists", + -13.43988037109375 + ], + [ + "Maker", + -13.439993858337402 + ], + [ + "▁Shores", + -13.440021514892578 + ], + [ + "▁spawned", + -13.440128326416016 + ], + [ + "▁Ratio", + -13.440170288085938 + ], + [ + "▁Vander", + -13.440192222595217 + ], + [ + "▁6-7", + -13.440194129943848 + ], + [ + "▁protectors", + -13.44019889831543 + ], + [ + "▁Majestic", + -13.440234184265137 + ], + [ + "lore", + -13.44031810760498 + ], + [ + "▁Barre", + -13.440364837646484 + ], + [ + "▁Scam", + -13.440460205078123 + ], + [ + "icle", + -13.44050121307373 + ], + [ + "UH", + -13.440574645996094 + ], + [ + "▁Ctrl", + -13.440723419189451 + ], + [ + "PX", + -13.440804481506348 + ], + [ + "▁André", + -13.440828323364258 + ], + [ + "nett", + -13.44085693359375 + ], + [ + "▁MSN", + -13.44085693359375 + ], + [ + "wana", + -13.440873146057127 + ], + [ + "yen", + -13.44088363647461 + ], + [ + "▁Branding", + -13.440958023071287 + ], + [ + "▁Fortress", + -13.441036224365234 + ], + [ + "▁Zack", + -13.441072463989258 + ], + [ + "Steam", + -13.441205978393556 + ], + [ + "▁triumphant", + -13.441250801086426 + ], + [ + "▁turkeys", + -13.44136905670166 + ], + [ + "mund", + -13.441393852233888 + ], + [ + "odd", + -13.441487312316896 + ], + [ + "▁trickle", + -13.441487312316896 + ], + [ + "bli", + -13.441490173339844 + ], + [ + "Expert", + -13.441566467285156 + ], + [ + "▁arenas", + -13.441661834716797 + ], + [ + "Cro", + -13.44167423248291 + ], + [ + "exchange", + -13.441776275634766 + ], + [ + "▁oxidative", + -13.441805839538574 + ], + [ + "▁saffron", + -13.441805839538574 + ], + [ + "▁sclerosis", + -13.441805839538574 + ], + [ + "▁fennel", + -13.441807746887209 + ], + [ + "▁marvellous", + -13.44180965423584 + ], + [ + "▁Gloucestershire", + -13.441810607910156 + ], + [ + "▁Hubbard", + -13.441810607910156 + ], + [ + "▁bruising", + -13.441812515258787 + ], + [ + "▁bordering", + -13.441838264465332 + ], + [ + "▁philosophies", + -13.441838264465332 + ], + [ + "African", + -13.441850662231444 + ], + [ + "▁IX", + -13.441889762878418 + ], + [ + "▁Garfield", + -13.441929817199709 + ], + [ + "baked", + -13.441944122314451 + ], + [ + "▁Kis", + -13.442011833190918 + ], + [ + "▁mussels", + -13.442028045654297 + ], + [ + "▁Borg", + -13.442036628723145 + ], + [ + "▁NAC", + -13.442056655883787 + ], + [ + "▁Sgt", + -13.442086219787598 + ], + [ + "▁1/1", + -13.44215488433838 + ], + [ + "▁Maintaining", + -13.44216537475586 + ], + [ + "▁Conor", + -13.442298889160156 + ], + [ + "▁substitutes", + -13.442301750183104 + ], + [ + "GPS", + -13.44234561920166 + ], + [ + "Gr", + -13.442399978637695 + ], + [ + "▁draped", + -13.442415237426758 + ], + [ + "pus", + -13.44244384765625 + ], + [ + "▁Barrow", + -13.442452430725098 + ], + [ + "▁Rockwell", + -13.442458152770996 + ], + [ + "▁piracy", + -13.44257354736328 + ], + [ + "▁Tristan", + -13.44264030456543 + ], + [ + "▁Pepsi", + -13.442654609680176 + ], + [ + "▁audited", + -13.44277000427246 + ], + [ + "▁#9", + -13.442789077758787 + ], + [ + "▁Mobil", + -13.442816734313965 + ], + [ + "▁scallops", + -13.442873001098633 + ], + [ + "▁Hui", + -13.442893028259276 + ], + [ + "▁hostels", + -13.442962646484377 + ], + [ + "fri", + -13.443029403686523 + ], + [ + "▁540", + -13.443062782287598 + ], + [ + "▁thicken", + -13.443120002746582 + ], + [ + "paste", + -13.443122863769531 + ], + [ + "▁Lola", + -13.44313621520996 + ], + [ + "▁critters", + -13.44318962097168 + ], + [ + "▁146", + -13.443194389343262 + ], + [ + "ault", + -13.443263053894045 + ], + [ + "▁Klo", + -13.443330764770508 + ], + [ + "▁cheaply", + -13.443334579467772 + ], + [ + "▁barren", + -13.443341255187988 + ], + [ + "angi", + -13.443405151367188 + ], + [ + "Twenty", + -13.443461418151855 + ], + [ + "practice", + -13.443467140197754 + ], + [ + "nts", + -13.443475723266602 + ], + [ + "▁lucid", + -13.443548202514648 + ], + [ + "integrated", + -13.443557739257812 + ], + [ + "virtual", + -13.443591117858888 + ], + [ + "319", + -13.443614959716797 + ], + [ + "according", + -13.443618774414062 + ], + [ + "qi", + -13.443645477294922 + ], + [ + "▁Gat", + -13.443657875061035 + ], + [ + "toria", + -13.443713188171388 + ], + [ + "▁NIC", + -13.44372272491455 + ], + [ + "▁999", + -13.4437255859375 + ], + [ + "▁Rahul", + -13.4437255859375 + ], + [ + "▁arched", + -13.443745613098145 + ], + [ + "chill", + -13.443753242492676 + ], + [ + "▁cob", + -13.443784713745115 + ], + [ + "fault", + -13.443793296813965 + ], + [ + "▁Disclaimer", + -13.443811416625977 + ], + [ + "viv", + -13.443832397460938 + ], + [ + "sak", + -13.443836212158203 + ], + [ + "Ohio", + -13.443883895874023 + ], + [ + "stv", + -13.443907737731934 + ], + [ + "▁concealer", + -13.444040298461914 + ], + [ + "▁Cavaliers", + -13.44404125213623 + ], + [ + "70,000", + -13.44408893585205 + ], + [ + "▁Kosovo", + -13.444168090820312 + ], + [ + "▁Lies", + -13.444170951843262 + ], + [ + "▁crumbling", + -13.444194793701172 + ], + [ + "▁Firmware", + -13.444221496582031 + ], + [ + "por", + -13.444222450256348 + ], + [ + "▁stipulated", + -13.444344520568848 + ], + [ + "▁elongated", + -13.44434642791748 + ], + [ + "▁cf", + -13.444384574890137 + ], + [ + "на", + -13.444406509399414 + ], + [ + "▁Pilgrim", + -13.444416999816896 + ], + [ + "▁Else", + -13.44443130493164 + ], + [ + "planning", + -13.444555282592772 + ], + [ + "▁Weed", + -13.444558143615724 + ], + [ + "▁outsiders", + -13.444584846496582 + ], + [ + "▁Bitter", + -13.444613456726074 + ], + [ + "lub", + -13.444622993469238 + ], + [ + "iac", + -13.444781303405762 + ], + [ + "▁vie", + -13.44482421875 + ], + [ + "anu", + -13.44485855102539 + ], + [ + "▁santa", + -13.44490909576416 + ], + [ + "40,000", + -13.445013999938965 + ], + [ + "▁Procurement", + -13.445066452026367 + ], + [ + "▁Gather", + -13.445074081420898 + ], + [ + "6-5", + -13.44510555267334 + ], + [ + "▁2024", + -13.445405960083008 + ], + [ + "▁musculoskeletal", + -13.445428848266602 + ], + [ + "▁pathetic", + -13.4456148147583 + ], + [ + "▁looms", + -13.44563102722168 + ], + [ + "▁posh", + -13.445648193359377 + ], + [ + "▁Parenting", + -13.4456787109375 + ], + [ + "▁Org", + -13.445706367492676 + ], + [ + "▁Obi", + -13.44576930999756 + ], + [ + "7%)", + -13.445878028869627 + ], + [ + "▁CMA", + -13.445889472961426 + ], + [ + "hospital", + -13.445895195007324 + ], + [ + "Automatic", + -13.44590187072754 + ], + [ + "▁apprentices", + -13.44596004486084 + ], + [ + "▁Traders", + -13.446035385131836 + ], + [ + "▁resistor", + -13.446056365966797 + ], + [ + "▁restful", + -13.446064949035645 + ], + [ + "▁Guangzhou", + -13.446136474609377 + ], + [ + "▁harming", + -13.44615650177002 + ], + [ + "▁alas", + -13.446202278137209 + ], + [ + "YP", + -13.44622802734375 + ], + [ + "▁acquaintance", + -13.44624137878418 + ], + [ + "▁Delight", + -13.446277618408203 + ], + [ + "lingual", + -13.446354866027832 + ], + [ + "▁Westfield", + -13.446381568908691 + ], + [ + "arity", + -13.446420669555664 + ], + [ + "▁cohesion", + -13.446527481079102 + ], + [ + "▁diagnosing", + -13.446528434753418 + ], + [ + "▁indulging", + -13.446528434753418 + ], + [ + "▁proverbial", + -13.446535110473633 + ], + [ + "sym", + -13.446552276611328 + ], + [ + "sio", + -13.44656467437744 + ], + [ + "▁streamlining", + -13.44656467437744 + ], + [ + "▁yearning", + -13.446584701538086 + ], + [ + "▁extracurricular", + -13.44663906097412 + ], + [ + "▁stately", + -13.446643829345703 + ], + [ + "▁Whisk", + -13.446688652038574 + ], + [ + "▁460", + -13.44678783416748 + ], + [ + "▁adoptive", + -13.446788787841797 + ], + [ + "▁firefighter", + -13.446869850158691 + ], + [ + "▁witches", + -13.446882247924805 + ], + [ + "▁Supported", + -13.44691562652588 + ], + [ + "▁Pointe", + -13.446954727172852 + ], + [ + "▁Hopper", + -13.446962356567385 + ], + [ + "UAL", + -13.446972846984863 + ], + [ + "▁brood", + -13.446972846984863 + ], + [ + "▁tangled", + -13.44698715209961 + ], + [ + "▁Nearby", + -13.447071075439451 + ], + [ + "LOW", + -13.447093963623049 + ], + [ + "Ali", + -13.447118759155272 + ], + [ + "▁veterinarians", + -13.447128295898438 + ], + [ + "757", + -13.44713306427002 + ], + [ + "▁Trunk", + -13.447136878967283 + ], + [ + "▁waved", + -13.447229385375977 + ], + [ + "▁Sherry", + -13.447258949279783 + ], + [ + "ife", + -13.447265625 + ], + [ + "KP", + -13.447391510009766 + ], + [ + "▁medley", + -13.447447776794434 + ], + [ + "1-5", + -13.44747829437256 + ], + [ + "rini", + -13.44760513305664 + ], + [ + "▁Enrollment", + -13.447622299194336 + ], + [ + "endra", + -13.447624206542969 + ], + [ + "▁yum", + -13.447826385498049 + ], + [ + "cade", + -13.447874069213867 + ], + [ + "▁Gri", + -13.447924613952637 + ], + [ + "1997", + -13.447956085205078 + ], + [ + "▁Royals", + -13.448010444641112 + ], + [ + "STU", + -13.44806671142578 + ], + [ + "▁philosophers", + -13.448068618774414 + ], + [ + "▁guarding", + -13.448150634765623 + ], + [ + "▁hitter", + -13.448216438293455 + ], + [ + "▁(0)", + -13.448244094848633 + ], + [ + "▁memorize", + -13.448298454284668 + ], + [ + "Especially", + -13.448338508605955 + ], + [ + "Northern", + -13.448355674743652 + ], + [ + "Entry", + -13.448373794555664 + ], + [ + "▁masterful", + -13.448379516601562 + ], + [ + "Likewise", + -13.448429107666016 + ], + [ + "Otherwise", + -13.44843292236328 + ], + [ + "DEN", + -13.448467254638672 + ], + [ + "prepared", + -13.448512077331545 + ], + [ + "▁enchanted", + -13.44857120513916 + ], + [ + "ience", + -13.44859218597412 + ], + [ + "▁Beans", + -13.448719024658203 + ], + [ + "ends", + -13.448887825012209 + ], + [ + "▁observance", + -13.448898315429688 + ], + [ + "▁carousel", + -13.44890022277832 + ], + [ + "▁Babylon", + -13.448902130126951 + ], + [ + "kWh", + -13.448904037475586 + ], + [ + "▁shaky", + -13.448908805847168 + ], + [ + "▁Annapolis", + -13.448917388916016 + ], + [ + "▁dressings", + -13.448929786682127 + ], + [ + "▁Zoning", + -13.448930740356444 + ], + [ + "▁kilograms", + -13.448931694030762 + ], + [ + "▁mums", + -13.44894027709961 + ], + [ + "▁equipping", + -13.448941230773926 + ], + [ + "▁vetted", + -13.448983192443848 + ], + [ + "▁crooked", + -13.449040412902832 + ], + [ + "▁Elias", + -13.44905948638916 + ], + [ + "▁Attachment", + -13.449084281921388 + ], + [ + "▁endowed", + -13.449121475219728 + ], + [ + "▁1978,", + -13.4491548538208 + ], + [ + "▁tendons", + -13.449162483215332 + ], + [ + "▁bling", + -13.449213027954102 + ], + [ + "▁ODI", + -13.449223518371582 + ], + [ + "495", + -13.44927215576172 + ], + [ + "pour", + -13.449278831481934 + ], + [ + "tk", + -13.44944667816162 + ], + [ + "▁LX", + -13.449460983276367 + ], + [ + "Scientists", + -13.449461936950684 + ], + [ + "Die", + -13.449480056762695 + ], + [ + "Healthy", + -13.449493408203123 + ], + [ + "▁recite", + -13.449512481689451 + ], + [ + "601", + -13.449557304382324 + ], + [ + "▁Query", + -13.44980239868164 + ], + [ + "neo", + -13.449804306030272 + ], + [ + "▁Mosquito", + -13.449821472167969 + ], + [ + "▁lotions", + -13.449910163879396 + ], + [ + "▁Clive", + -13.449960708618164 + ], + [ + "▁Spam", + -13.449962615966797 + ], + [ + "notes", + -13.450033187866213 + ], + [ + "080", + -13.450048446655272 + ], + [ + "rst", + -13.450200080871582 + ], + [ + "▁Bayern", + -13.45023250579834 + ], + [ + "▁reservoirs", + -13.45026683807373 + ], + [ + "chester", + -13.450267791748049 + ], + [ + "Carl", + -13.45028305053711 + ], + [ + "▁Functions", + -13.450288772583008 + ], + [ + "▁Gamble", + -13.450310707092283 + ], + [ + "hos", + -13.450358390808104 + ], + [ + "▁lad", + -13.450400352478027 + ], + [ + "▁143", + -13.450453758239746 + ], + [ + "▁Exceptional", + -13.450479507446287 + ], + [ + "▁Matching", + -13.450494766235352 + ], + [ + "▁hives", + -13.450605392456056 + ], + [ + "gru", + -13.45068073272705 + ], + [ + "discovery", + -13.45069694519043 + ], + [ + "Pull", + -13.450705528259276 + ], + [ + "▁nudge", + -13.450714111328123 + ], + [ + "Challenge", + -13.450757026672363 + ], + [ + "▁Harding", + -13.45079517364502 + ], + [ + "▁lest", + -13.450823783874512 + ], + [ + "▁Mathematical", + -13.450844764709473 + ], + [ + "rach", + -13.450845718383787 + ], + [ + "▁Ami", + -13.450875282287598 + ], + [ + "▁Genealogy", + -13.45093822479248 + ], + [ + "▁McGraw", + -13.450942993164062 + ], + [ + "▁Rudy", + -13.450956344604492 + ], + [ + "Skip", + -13.450968742370604 + ], + [ + "Directions", + -13.451218605041504 + ], + [ + "▁Wrangler", + -13.45127296447754 + ], + [ + "▁symphony", + -13.45127296447754 + ], + [ + "▁measles", + -13.451273918151855 + ], + [ + "▁healer", + -13.451286315917969 + ], + [ + "▁Rift", + -13.451390266418455 + ], + [ + "MAR", + -13.451416015625 + ], + [ + "▁rigidity", + -13.451430320739746 + ], + [ + "▁Kop", + -13.451436042785645 + ], + [ + "604", + -13.451445579528809 + ], + [ + "hiro", + -13.451518058776855 + ], + [ + "▁OLED", + -13.45155143737793 + ], + [ + "▁crocodile", + -13.4515962600708 + ], + [ + "▁Cumbria", + -13.45163631439209 + ], + [ + "▁Camaro", + -13.451672554016112 + ], + [ + "▁Baja", + -13.45167636871338 + ], + [ + "▁151", + -13.451708793640137 + ], + [ + "▁predator", + -13.451748847961426 + ], + [ + "▁orchestral", + -13.45178508758545 + ], + [ + "▁snail", + -13.451847076416016 + ], + [ + "▁cures", + -13.45188808441162 + ], + [ + "SY", + -13.451958656311035 + ], + [ + "Poly", + -13.45199203491211 + ], + [ + "▁9,000", + -13.452025413513184 + ], + [ + "3.6", + -13.452054977416992 + ], + [ + "abel", + -13.452094078063965 + ], + [ + "closed", + -13.452103614807127 + ], + [ + "RK", + -13.452157020568848 + ], + [ + "ARE", + -13.452256202697754 + ], + [ + "Africa", + -13.452425003051758 + ], + [ + "▁UPDATE", + -13.452521324157717 + ], + [ + "erate", + -13.45263957977295 + ], + [ + "▁Gunn", + -13.452664375305176 + ], + [ + "Dress", + -13.45267391204834 + ], + [ + "▁popularly", + -13.452759742736816 + ], + [ + "▁1977,", + -13.45285415649414 + ], + [ + "▁prawns", + -13.452933311462402 + ], + [ + "COR", + -13.45294189453125 + ], + [ + "▁DEL", + -13.452993392944336 + ], + [ + "esis", + -13.453102111816406 + ], + [ + "▁reimburse", + -13.453123092651367 + ], + [ + "▁civilizations", + -13.453126907348633 + ], + [ + "▁Aber", + -13.453154563903809 + ], + [ + "▁Reveal", + -13.453176498413086 + ], + [ + "JM", + -13.453177452087402 + ], + [ + "sql", + -13.453216552734377 + ], + [ + "creative", + -13.453229904174805 + ], + [ + "▁Rovers", + -13.453296661376951 + ], + [ + "MES", + -13.453384399414062 + ], + [ + "▁slit", + -13.453398704528809 + ], + [ + "bic", + -13.453407287597656 + ], + [ + "▁abrupt", + -13.453420639038086 + ], + [ + "▁breakdowns", + -13.453421592712402 + ], + [ + "vier", + -13.453429222106934 + ], + [ + "▁crossings", + -13.453429222106934 + ], + [ + "▁Wayfair", + -13.453484535217283 + ], + [ + "skilled", + -13.453560829162598 + ], + [ + "Bow", + -13.45364475250244 + ], + [ + "▁SHOULD", + -13.453654289245604 + ], + [ + "▁gymnasium", + -13.453654289245604 + ], + [ + "▁tungsten", + -13.453654289245604 + ], + [ + "▁kaufen", + -13.453656196594238 + ], + [ + "▁meditative", + -13.453656196594238 + ], + [ + "▁cramps", + -13.453660011291504 + ], + [ + "▁GeForce", + -13.45366668701172 + ], + [ + "▁handicapped", + -13.453667640686035 + ], + [ + "owa", + -13.453715324401855 + ], + [ + "▁Negative", + -13.453715324401855 + ], + [ + "▁(2001)", + -13.453717231750488 + ], + [ + "▁entrenched", + -13.453725814819336 + ], + [ + "▁backside", + -13.45373821258545 + ], + [ + "builder", + -13.453778266906738 + ], + [ + "▁Ryder", + -13.453824043273926 + ], + [ + "▁peroxide", + -13.453838348388672 + ], + [ + "▁Olivier", + -13.453912734985352 + ], + [ + "Gain", + -13.453922271728516 + ], + [ + "dil", + -13.453953742980955 + ], + [ + "▁cove", + -13.454014778137209 + ], + [ + "▁stronghold", + -13.454033851623535 + ], + [ + "▁artifact", + -13.454068183898926 + ], + [ + "▁(2008)", + -13.454083442687988 + ], + [ + "▁drowned", + -13.454099655151367 + ], + [ + "▁Xerox", + -13.454127311706545 + ], + [ + "iere", + -13.454160690307615 + ], + [ + "▁whim", + -13.454190254211426 + ], + [ + "▁Nuts", + -13.454288482666016 + ], + [ + "▁elevators", + -13.454453468322754 + ], + [ + "hri", + -13.4545316696167 + ], + [ + "▁BPA", + -13.45453929901123 + ], + [ + "▁polymers", + -13.454618453979492 + ], + [ + "Writer", + -13.454659461975098 + ], + [ + "▁postures", + -13.45469856262207 + ], + [ + "▁luxe", + -13.454708099365234 + ], + [ + "▁2021.", + -13.45472526550293 + ], + [ + "gain", + -13.454778671264648 + ], + [ + "▁Aspire", + -13.454878807067873 + ], + [ + "▁muster", + -13.454914093017578 + ], + [ + "▁PEN", + -13.455049514770508 + ], + [ + "▁Ayurvedic", + -13.455108642578123 + ], + [ + "Spin", + -13.45514678955078 + ], + [ + "▁wonderland", + -13.455199241638184 + ], + [ + "-2010", + -13.455276489257812 + ], + [ + "disc", + -13.455280303955078 + ], + [ + "▁percentile", + -13.455280303955078 + ], + [ + "▁coverings", + -13.45535659790039 + ], + [ + "▁Sud", + -13.45537281036377 + ], + [ + "▁Malt", + -13.455410957336426 + ], + [ + "Race", + -13.455430030822754 + ], + [ + "Lay", + -13.455451965332031 + ], + [ + "▁tigers", + -13.455463409423828 + ], + [ + "▁luster", + -13.455528259277344 + ], + [ + "VILLE", + -13.45553970336914 + ], + [ + "Dynamic", + -13.455586433410645 + ], + [ + "▁Pelosi", + -13.455595016479492 + ], + [ + "▁Wired", + -13.45561981201172 + ], + [ + "▁ballad", + -13.455638885498049 + ], + [ + "▁Trudeau", + -13.455644607543944 + ], + [ + "liked", + -13.455656051635742 + ], + [ + "▁Hiking", + -13.45566177368164 + ], + [ + "lal", + -13.45566463470459 + ], + [ + "oral", + -13.455671310424805 + ], + [ + "▁Fare", + -13.455687522888184 + ], + [ + "OTA", + -13.455714225769045 + ], + [ + "▁begs", + -13.455788612365724 + ], + [ + "Economic", + -13.455812454223633 + ], + [ + "dena", + -13.455850601196287 + ], + [ + "▁unjust", + -13.455850601196287 + ], + [ + "Honey", + -13.45586395263672 + ], + [ + "Latest", + -13.455875396728516 + ], + [ + "▁veranda", + -13.455963134765623 + ], + [ + "▁CSU", + -13.455965995788574 + ], + [ + "Kick", + -13.456029891967772 + ], + [ + "ICT", + -13.456039428710938 + ], + [ + "▁adolescence", + -13.456040382385254 + ], + [ + "▁intermediary", + -13.456040382385254 + ], + [ + "▁intravenous", + -13.45604133605957 + ], + [ + "▁choking", + -13.456053733825684 + ], + [ + "▁rigging", + -13.45607566833496 + ], + [ + "▁Huntsville", + -13.456082344055176 + ], + [ + "Flat", + -13.456085205078123 + ], + [ + "▁seminal", + -13.456122398376465 + ], + [ + "▁Stephan", + -13.456194877624512 + ], + [ + "▁soreness", + -13.456291198730469 + ], + [ + "SSA", + -13.456311225891112 + ], + [ + "▁evade", + -13.456390380859377 + ], + [ + "▁visualisation", + -13.456390380859377 + ], + [ + "▁Faux", + -13.45639419555664 + ], + [ + "▁valentine", + -13.456419944763184 + ], + [ + "▁concurrently", + -13.456472396850586 + ], + [ + "▁Rin", + -13.45668125152588 + ], + [ + "▁2022", + -13.456787109375 + ], + [ + "▁Ricardo", + -13.45683765411377 + ], + [ + "▁jumbo", + -13.457022666931152 + ], + [ + "▁snapshots", + -13.457023620605469 + ], + [ + "▁Trick", + -13.457024574279783 + ], + [ + "▁hybrids", + -13.457063674926758 + ], + [ + "▁9\"", + -13.457108497619627 + ], + [ + "▁vividly", + -13.457120895385742 + ], + [ + "till", + -13.457196235656738 + ], + [ + "445", + -13.457367897033691 + ], + [ + "▁upsetting", + -13.457510948181152 + ], + [ + "▁pavers", + -13.457527160644531 + ], + [ + "▁THEM", + -13.457571983337402 + ], + [ + "▁steward", + -13.457589149475098 + ], + [ + "▁directives", + -13.457778930664062 + ], + [ + "▁soloist", + -13.457782745361328 + ], + [ + "▁panda", + -13.457828521728516 + ], + [ + "Justin", + -13.457948684692385 + ], + [ + "mesh", + -13.457975387573242 + ], + [ + "creating", + -13.458003997802734 + ], + [ + "▁Buster", + -13.458009719848633 + ], + [ + "▁auditors", + -13.458023071289062 + ], + [ + "▁171", + -13.458100318908691 + ], + [ + "Ultra", + -13.4581880569458 + ], + [ + "▁Goo", + -13.458245277404783 + ], + [ + "▁Berk", + -13.458251953125 + ], + [ + "Hawk", + -13.458282470703123 + ], + [ + "▁Bengali", + -13.458327293395996 + ], + [ + "Phase", + -13.458330154418944 + ], + [ + "▁WL", + -13.458375930786133 + ], + [ + "▁Eucharist", + -13.458386421203612 + ], + [ + "▁sensual", + -13.458402633666992 + ], + [ + "▁Vaughan", + -13.458433151245115 + ], + [ + "▁Navajo", + -13.458438873291016 + ], + [ + "▁anguish", + -13.458447456359863 + ], + [ + "▁ESA", + -13.458451271057127 + ], + [ + "Admin", + -13.458463668823242 + ], + [ + "▁Chesterfield", + -13.45853042602539 + ], + [ + "rang", + -13.458587646484377 + ], + [ + "▁Southwestern", + -13.458595275878906 + ], + [ + "structured", + -13.458783149719238 + ], + [ + "Suite", + -13.45880889892578 + ], + [ + "Spain", + -13.458876609802246 + ], + [ + "litz", + -13.45888900756836 + ], + [ + "▁Banquet", + -13.458932876586914 + ], + [ + "▁Stampin", + -13.45897388458252 + ], + [ + "▁gmail", + -13.459100723266602 + ], + [ + "DES", + -13.459151268005373 + ], + [ + "▁eatery", + -13.45915985107422 + ], + [ + "▁DAC", + -13.459373474121094 + ], + [ + "4.3", + -13.459385871887209 + ], + [ + "▁5.4", + -13.45938777923584 + ], + [ + "▁tempt", + -13.459390640258787 + ], + [ + "▁velvety", + -13.459418296813965 + ], + [ + "▁Alerts", + -13.459477424621582 + ], + [ + "▁Bore", + -13.459532737731934 + ], + [ + "▁EVA", + -13.459545135498049 + ], + [ + "▁MADE", + -13.459569931030272 + ], + [ + "▁OA", + -13.45957374572754 + ], + [ + "▁MSC", + -13.459609031677246 + ], + [ + "▁unborn", + -13.459639549255373 + ], + [ + "▁drinkers", + -13.459708213806152 + ], + [ + "▁Rene", + -13.459732055664062 + ], + [ + "jp", + -13.459741592407228 + ], + [ + "▁NEXT", + -13.459749221801758 + ], + [ + "mediated", + -13.459882736206056 + ], + [ + "▁Asking", + -13.459911346435549 + ], + [ + "▁watery", + -13.460010528564451 + ], + [ + "lox", + -13.460288047790527 + ], + [ + "▁reflex", + -13.460458755493164 + ], + [ + "oria", + -13.460492134094238 + ], + [ + "aunt", + -13.460512161254885 + ], + [ + "▁Sovereign", + -13.460559844970703 + ], + [ + "apps", + -13.460570335388184 + ], + [ + "Heavy", + -13.460597038269045 + ], + [ + "CRC", + -13.460630416870115 + ], + [ + "▁pore", + -13.460634231567385 + ], + [ + "▁Etc", + -13.460722923278809 + ], + [ + "▁hens", + -13.460772514343262 + ], + [ + "lateral", + -13.46081829071045 + ], + [ + "▁magnification", + -13.460830688476562 + ], + [ + "▁gamut", + -13.460843086242676 + ], + [ + "▁Derbyshire", + -13.460847854614258 + ], + [ + "▁confinement", + -13.460851669311523 + ], + [ + "▁Dangerous", + -13.460870742797852 + ], + [ + "▁temperate", + -13.460870742797852 + ], + [ + "▁Vessel", + -13.4608793258667 + ], + [ + "meal", + -13.460895538330078 + ], + [ + "▁buttercream", + -13.460896492004396 + ], + [ + "▁Campground", + -13.460926055908203 + ], + [ + "▁KG", + -13.460939407348633 + ], + [ + "fox", + -13.461003303527832 + ], + [ + "▁angst", + -13.461045265197754 + ], + [ + "▁restrained", + -13.461163520812988 + ], + [ + "▁leaflets", + -13.461230278015137 + ], + [ + "▁improvised", + -13.461256980895996 + ], + [ + "▁cataract", + -13.46126937866211 + ], + [ + "▁mutant", + -13.461274147033691 + ], + [ + "▁$65", + -13.461296081542969 + ], + [ + "▁keepsake", + -13.46131420135498 + ], + [ + "▁ignores", + -13.461381912231444 + ], + [ + "▁convened", + -13.461480140686035 + ], + [ + "▁blockade", + -13.46149444580078 + ], + [ + "▁Saba", + -13.461522102355955 + ], + [ + "▁soared", + -13.46155071258545 + ], + [ + "eder", + -13.46161651611328 + ], + [ + "▁BOX", + -13.461623191833496 + ], + [ + "▁rained", + -13.46165943145752 + ], + [ + "▁RL", + -13.461700439453123 + ], + [ + "▁weaves", + -13.46171760559082 + ], + [ + "133", + -13.46172332763672 + ], + [ + "▁frosted", + -13.46189308166504 + ], + [ + "▁HPE", + -13.46194076538086 + ], + [ + "▁207", + -13.461991310119627 + ], + [ + "▁trailhead", + -13.462024688720703 + ], + [ + "▁Innovations", + -13.462251663208008 + ], + [ + "▁GOLD", + -13.462322235107422 + ], + [ + "325", + -13.462324142456056 + ], + [ + "-85", + -13.462349891662598 + ], + [ + "▁ROOM", + -13.462421417236328 + ], + [ + "assa", + -13.462532997131348 + ], + [ + "▁Entrepreneurs", + -13.46254062652588 + ], + [ + "▁complimented", + -13.462675094604492 + ], + [ + "▁Celtics", + -13.46273422241211 + ], + [ + "▁trance", + -13.462745666503906 + ], + [ + "▁WO", + -13.462787628173828 + ], + [ + "▁caveat", + -13.462821006774902 + ], + [ + "▁proclaim", + -13.462835311889648 + ], + [ + "▁duel", + -13.462876319885254 + ], + [ + "▁(2007)", + -13.462933540344238 + ], + [ + "▁cas", + -13.462957382202148 + ], + [ + "▁Arrange", + -13.46297550201416 + ], + [ + "▁1975.", + -13.463059425354004 + ], + [ + "▁generalized", + -13.46313190460205 + ], + [ + "▁elm", + -13.463176727294922 + ], + [ + "Den", + -13.463177680969238 + ], + [ + "▁evaporation", + -13.463234901428224 + ], + [ + "▁impoverished", + -13.463234901428224 + ], + [ + "▁Buckingham", + -13.463239669799805 + ], + [ + "▁Sparrow", + -13.46325397491455 + ], + [ + "▁treble", + -13.463268280029297 + ], + [ + "▁reprint", + -13.463271141052246 + ], + [ + "▁Cuomo", + -13.463281631469728 + ], + [ + "▁Earrings", + -13.463323593139648 + ], + [ + "cession", + -13.463330268859863 + ], + [ + "▁coordinators", + -13.46338176727295 + ], + [ + "▁Advancement", + -13.463454246520996 + ], + [ + "▁sagging", + -13.463506698608398 + ], + [ + "▁DMC", + -13.463533401489258 + ], + [ + "iol", + -13.463661193847656 + ], + [ + "cata", + -13.46367073059082 + ], + [ + "rigg", + -13.463671684265137 + ], + [ + "▁Prestige", + -13.46373462677002 + ], + [ + "among", + -13.463735580444336 + ], + [ + "▁Sav", + -13.463767051696776 + ], + [ + "▁Regent", + -13.463805198669434 + ], + [ + "▁widths", + -13.463889122009276 + ], + [ + "brainer", + -13.463940620422363 + ], + [ + "milk", + -13.463940620422363 + ], + [ + "▁Anytime", + -13.463953018188477 + ], + [ + "▁vaguely", + -13.463987350463867 + ], + [ + "▁footed", + -13.464089393615724 + ], + [ + "Davis", + -13.46412181854248 + ], + [ + "▁STA", + -13.464288711547852 + ], + [ + "▁incl", + -13.46432399749756 + ], + [ + "▁EXP", + -13.46434211730957 + ], + [ + "Ship", + -13.464363098144531 + ], + [ + "▁roundtable", + -13.46437644958496 + ], + [ + "period", + -13.464395523071287 + ], + [ + "▁Fault", + -13.464396476745604 + ], + [ + "▁Neutral", + -13.464404106140137 + ], + [ + "▁Lexi", + -13.464407920837402 + ], + [ + "▁Aerial", + -13.464448928833008 + ], + [ + "né", + -13.464593887329102 + ], + [ + "▁Howe", + -13.464607238769531 + ], + [ + "▁parable", + -13.464664459228516 + ], + [ + "▁UNDER", + -13.464667320251465 + ], + [ + "▁nicknamed", + -13.464679718017578 + ], + [ + "semi", + -13.464683532714844 + ], + [ + "ISA", + -13.464752197265623 + ], + [ + "rator", + -13.464818954467772 + ], + [ + "VN", + -13.464826583862305 + ], + [ + "▁510", + -13.464916229248049 + ], + [ + "▁Sahara", + -13.46492862701416 + ], + [ + "▁(10)", + -13.464970588684082 + ], + [ + "▁Profiles", + -13.465021133422852 + ], + [ + "oun", + -13.465051651000977 + ], + [ + "▁entertainers", + -13.465073585510254 + ], + [ + "Individuals", + -13.465194702148438 + ], + [ + "UB", + -13.465265274047852 + ], + [ + "▁journaling", + -13.46531105041504 + ], + [ + "▁grammatical", + -13.465401649475098 + ], + [ + "▁Downloads", + -13.465417861938477 + ], + [ + "▁escapes", + -13.465461730957031 + ], + [ + "▁dj", + -13.465530395507812 + ], + [ + "▁Spartan", + -13.46555995941162 + ], + [ + "uther", + -13.46560764312744 + ], + [ + "yk", + -13.465620994567873 + ], + [ + "▁Sicily", + -13.46564483642578 + ], + [ + "▁misfortune", + -13.46564483642578 + ], + [ + "▁approximation", + -13.465649604797363 + ], + [ + "▁Gatwick", + -13.46565055847168 + ], + [ + "▁aromatherapy", + -13.46566104888916 + ], + [ + "▁Antivirus", + -13.465678215026855 + ], + [ + "▁Lists", + -13.465678215026855 + ], + [ + "scribing", + -13.46572208404541 + ], + [ + "Urban", + -13.465725898742676 + ], + [ + "▁(7)", + -13.465744018554688 + ], + [ + "▁BEEN", + -13.465753555297852 + ], + [ + "▁ironically", + -13.465757369995115 + ], + [ + "▁CTA", + -13.46582317352295 + ], + [ + "▁automating", + -13.465867042541504 + ], + [ + "picture", + -13.465876579284668 + ], + [ + "▁$125", + -13.46596336364746 + ], + [ + "▁BPM", + -13.46599292755127 + ], + [ + "recorded", + -13.466034889221191 + ], + [ + "▁jacks", + -13.46613311767578 + ], + [ + "quat", + -13.46619987487793 + ], + [ + "▁caretaker", + -13.466243743896484 + ], + [ + "▁Tick", + -13.4662446975708 + ], + [ + "vad", + -13.46632957458496 + ], + [ + "▁Mela", + -13.466424942016602 + ], + [ + "▁questionnaires", + -13.466424942016602 + ], + [ + "▁Impressive", + -13.466483116149902 + ], + [ + "▁brutally", + -13.46652126312256 + ], + [ + "warm", + -13.466526985168455 + ], + [ + "▁Attach", + -13.466547012329102 + ], + [ + "▁megapixel", + -13.466592788696287 + ], + [ + "▁1976.", + -13.466673851013184 + ], + [ + "▁7000", + -13.46672821044922 + ], + [ + "▁Nai", + -13.466753959655762 + ], + [ + "▁Restore", + -13.466795921325684 + ], + [ + "070", + -13.466800689697266 + ], + [ + "▁withhold", + -13.46688461303711 + ], + [ + "▁networked", + -13.466938972473145 + ], + [ + "▁3.9", + -13.46695041656494 + ], + [ + "▁afloat", + -13.467034339904783 + ], + [ + "▁PAR", + -13.467103004455566 + ], + [ + "▁nav", + -13.467103004455566 + ], + [ + "atan", + -13.467123031616213 + ], + [ + "ograph", + -13.467126846313477 + ], + [ + "▁fingerprints", + -13.467140197753906 + ], + [ + "▁Embedded", + -13.467170715332031 + ], + [ + "▁synchronize", + -13.467238426208496 + ], + [ + "▁vat", + -13.467374801635742 + ], + [ + "▁tagline", + -13.467429161071776 + ], + [ + "▁busted", + -13.467524528503418 + ], + [ + "▁Ordinary", + -13.467528343200684 + ], + [ + "▁Pagan", + -13.46753215789795 + ], + [ + "6-7", + -13.467537879943848 + ], + [ + "▁landfills", + -13.467562675476074 + ], + [ + "▁mourn", + -13.467653274536133 + ], + [ + "tama", + -13.467723846435549 + ], + [ + "Prince", + -13.467724800109863 + ], + [ + "▁Weeks", + -13.467788696289062 + ], + [ + "▁FW", + -13.467836380004885 + ], + [ + "NOVA", + -13.46784496307373 + ], + [ + "▁Chung", + -13.467970848083496 + ], + [ + "50)", + -13.467994689941406 + ], + [ + "▁spares", + -13.468012809753418 + ], + [ + "DK", + -13.46803092956543 + ], + [ + "▁Ordinance", + -13.468059539794922 + ], + [ + "▁gratification", + -13.468059539794922 + ], + [ + "▁tragedies", + -13.468059539794922 + ], + [ + "▁Electoral", + -13.468060493469238 + ], + [ + "▁epitome", + -13.468060493469238 + ], + [ + "▁glaring", + -13.468060493469238 + ], + [ + "▁liqueur", + -13.468060493469238 + ], + [ + "▁squeezing", + -13.468060493469238 + ], + [ + "▁dopamine", + -13.4680757522583 + ], + [ + "▁floppy", + -13.468082427978516 + ], + [ + "▁stabilized", + -13.468111991882324 + ], + [ + "▁revolutionize", + -13.468133926391602 + ], + [ + "221", + -13.468218803405762 + ], + [ + "▁uneasy", + -13.468238830566406 + ], + [ + "▁Vivian", + -13.468274116516112 + ], + [ + "▁Blackpool", + -13.468291282653809 + ], + [ + "▁savoury", + -13.46829605102539 + ], + [ + "▁£15", + -13.468317985534668 + ], + [ + "▁Chew", + -13.468381881713867 + ], + [ + "▁Str", + -13.468461990356444 + ], + [ + "▁narrated", + -13.46846866607666 + ], + [ + "▁Nero", + -13.46853733062744 + ], + [ + "▁aerodynamic", + -13.468586921691896 + ], + [ + "▁jealousy", + -13.468775749206545 + ], + [ + "▁PDA", + -13.468908309936523 + ], + [ + "nx", + -13.468934059143066 + ], + [ + "▁railways", + -13.468985557556152 + ], + [ + "▁CPS", + -13.46901798248291 + ], + [ + "▁Burial", + -13.469036102294922 + ], + [ + "▁Ris", + -13.469265937805176 + ], + [ + "308", + -13.4694242477417 + ], + [ + "▁Turk", + -13.469465255737305 + ], + [ + "gha", + -13.469598770141602 + ], + [ + "▁Pes", + -13.469603538513184 + ], + [ + "solar", + -13.469644546508787 + ], + [ + "▁implying", + -13.46967315673828 + ], + [ + "▁Soy", + -13.469730377197266 + ], + [ + "eem", + -13.469761848449709 + ], + [ + "▁Qa", + -13.469792366027832 + ], + [ + "▁Superb", + -13.469794273376465 + ], + [ + "▁Mindfulness", + -13.46979522705078 + ], + [ + "▁159", + -13.46980094909668 + ], + [ + "▁Provence", + -13.469844818115234 + ], + [ + "▁EHR", + -13.470027923583984 + ], + [ + "Sara", + -13.470081329345703 + ], + [ + "nf", + -13.470091819763184 + ], + [ + "Comm", + -13.470232009887695 + ], + [ + "thal", + -13.470269203186035 + ], + [ + "ohn", + -13.470311164855955 + ], + [ + "Charge", + -13.470385551452637 + ], + [ + "▁Toner", + -13.470386505126951 + ], + [ + "Version", + -13.470419883728027 + ], + [ + "Gorgeous", + -13.470443725585938 + ], + [ + "traffic", + -13.470446586608888 + ], + [ + "▁Senegal", + -13.470478057861328 + ], + [ + "▁Macintosh", + -13.470480918884276 + ], + [ + "▁Sutherland", + -13.470480918884276 + ], + [ + "▁celebratory", + -13.470480918884276 + ], + [ + "▁licensure", + -13.470480918884276 + ], + [ + "▁renaissance", + -13.470480918884276 + ], + [ + "▁monologue", + -13.47048568725586 + ], + [ + "▁McLean", + -13.470494270324709 + ], + [ + "▁PPP", + -13.470504760742188 + ], + [ + "▁potluck", + -13.470508575439451 + ], + [ + "Donnell", + -13.470528602600098 + ], + [ + "Relax", + -13.470566749572754 + ], + [ + "▁Helmet", + -13.47059726715088 + ], + [ + "▁peptide", + -13.470669746398926 + ], + [ + "Rod", + -13.470720291137695 + ], + [ + "ADO", + -13.47081184387207 + ], + [ + "dul", + -13.470901489257812 + ], + [ + "▁extremist", + -13.470921516418455 + ], + [ + "▁Mellon", + -13.47097110748291 + ], + [ + "▁ligament", + -13.471043586730955 + ], + [ + "▁Alas", + -13.47104835510254 + ], + [ + "1-9", + -13.471111297607422 + ], + [ + "▁admins", + -13.471142768859863 + ], + [ + "aris", + -13.471162796020508 + ], + [ + "▁Deere", + -13.471182823181152 + ], + [ + "▁DF", + -13.471187591552734 + ], + [ + "mente", + -13.47124195098877 + ], + [ + "▁harnesses", + -13.471303939819336 + ], + [ + "Officials", + -13.47131061553955 + ], + [ + "▁florists", + -13.471447944641112 + ], + [ + "tting", + -13.471616744995115 + ], + [ + "▁stillness", + -13.471625328063965 + ], + [ + "▁KR", + -13.4717435836792 + ], + [ + "▁EVERYTHING", + -13.471840858459473 + ], + [ + "▁KW", + -13.471851348876951 + ], + [ + "yeah", + -13.471896171569824 + ], + [ + "▁Brought", + -13.471903800964355 + ], + [ + "▁Gig", + -13.47193431854248 + ], + [ + "topia", + -13.47194480895996 + ], + [ + "▁truffles", + -13.471945762634276 + ], + [ + "▁Brits", + -13.471962928771973 + ], + [ + "emann", + -13.472009658813477 + ], + [ + "▁WY", + -13.472057342529297 + ], + [ + "▁Payroll", + -13.472084999084473 + ], + [ + "▁dependant", + -13.472159385681152 + ], + [ + "▁relics", + -13.472193717956545 + ], + [ + "▁webpages", + -13.472264289855955 + ], + [ + "▁303", + -13.472325325012209 + ], + [ + "▁marrying", + -13.47238540649414 + ], + [ + "yman", + -13.472393035888672 + ], + [ + "ception", + -13.472450256347656 + ], + [ + "nominated", + -13.472464561462402 + ], + [ + "▁suing", + -13.472495079040527 + ], + [ + "-2011", + -13.472514152526855 + ], + [ + "▁Requires", + -13.472676277160645 + ], + [ + "▁joking", + -13.472685813903809 + ], + [ + "▁Timing", + -13.472744941711426 + ], + [ + "Loved", + -13.472784042358398 + ], + [ + "▁arithmetic", + -13.472908020019531 + ], + [ + "▁cognition", + -13.472908973693848 + ], + [ + "▁Pebble", + -13.47291374206543 + ], + [ + "▁unlucky", + -13.472949028015137 + ], + [ + "▁eyesight", + -13.4730806350708 + ], + [ + "erra", + -13.473129272460938 + ], + [ + "nger", + -13.47314167022705 + ], + [ + "uting", + -13.47336196899414 + ], + [ + "▁drunken", + -13.473362922668455 + ], + [ + "▁Prada", + -13.473478317260742 + ], + [ + "▁constellation", + -13.473492622375488 + ], + [ + "7.6", + -13.473563194274902 + ], + [ + "▁outpost", + -13.473566055297852 + ], + [ + "▁30+", + -13.473679542541504 + ], + [ + "▁Torch", + -13.47377109527588 + ], + [ + "nette", + -13.473810195922852 + ], + [ + "pathy", + -13.473894119262695 + ], + [ + "been", + -13.474016189575195 + ], + [ + "▁Moldova", + -13.47415542602539 + ], + [ + "throp", + -13.474220275878906 + ], + [ + "▁hairy", + -13.474246978759766 + ], + [ + "▁Linked", + -13.474268913269045 + ], + [ + "▁Butcher", + -13.474284172058104 + ], + [ + "Stack", + -13.474538803100586 + ], + [ + "▁deepening", + -13.474552154541016 + ], + [ + "▁Ext", + -13.474742889404297 + ], + [ + "▁peruse", + -13.47478485107422 + ], + [ + "▁curricula", + -13.474884033203123 + ], + [ + "▁$2.5", + -13.474934577941896 + ], + [ + "psi", + -13.474936485290527 + ], + [ + "mok", + -13.474989891052246 + ], + [ + "▁Westwood", + -13.475028038024902 + ], + [ + "▁Shak", + -13.475037574768066 + ], + [ + "▁abreast", + -13.47508144378662 + ], + [ + "▁18,000", + -13.47510814666748 + ], + [ + "▁Travels", + -13.475143432617188 + ], + [ + "▁freebies", + -13.475176811218262 + ], + [ + "▁29-", + -13.475196838378906 + ], + [ + "▁Bah", + -13.475204467773438 + ], + [ + "▁WM", + -13.475205421447754 + ], + [ + "▁tides", + -13.475207328796388 + ], + [ + "▁exploratory", + -13.475341796875 + ], + [ + "▁resurgence", + -13.475341796875 + ], + [ + "▁bouncy", + -13.475342750549316 + ], + [ + "▁nimble", + -13.475358963012695 + ], + [ + "▁unpopular", + -13.475418090820312 + ], + [ + "▁PERFECT", + -13.475422859191896 + ], + [ + "▁Camille", + -13.475430488586426 + ], + [ + "Emergency", + -13.475434303283691 + ], + [ + "▁Minority", + -13.47544288635254 + ], + [ + "▁grapple", + -13.475449562072754 + ], + [ + "▁microSD", + -13.475451469421388 + ], + [ + "▁Zy", + -13.475542068481444 + ], + [ + "▁1899", + -13.475564002990724 + ], + [ + "Innovation", + -13.475567817687988 + ], + [ + "▁wastes", + -13.475619316101074 + ], + [ + "bp", + -13.47563934326172 + ], + [ + "bles", + -13.475680351257324 + ], + [ + "он", + -13.475740432739258 + ], + [ + "beta", + -13.475775718688965 + ], + [ + "▁Remix", + -13.475875854492188 + ], + [ + "▁Corpus", + -13.47591495513916 + ], + [ + "chuk", + -13.47592830657959 + ], + [ + "▁Furnace", + -13.475969314575195 + ], + [ + "bane", + -13.475970268249512 + ], + [ + "syn", + -13.47598648071289 + ], + [ + "▁Colon", + -13.476011276245115 + ], + [ + "▁DHL", + -13.47602367401123 + ], + [ + "capital", + -13.476052284240724 + ], + [ + "▁Salsa", + -13.476136207580566 + ], + [ + "▁Tammy", + -13.47616481781006 + ], + [ + "BTW", + -13.476205825805664 + ], + [ + "▁abiding", + -13.476216316223145 + ], + [ + "▁unfolded", + -13.476344108581545 + ], + [ + "▁Connecting", + -13.47639274597168 + ], + [ + "tele", + -13.47640323638916 + ], + [ + "▁1977.", + -13.476434707641602 + ], + [ + "▁DN", + -13.476438522338867 + ], + [ + "OU", + -13.476441383361816 + ], + [ + "▁Lovers", + -13.476454734802246 + ], + [ + "▁Sheldon", + -13.476468086242676 + ], + [ + "▁Exodus", + -13.476506233215332 + ], + [ + "▁Raton", + -13.476598739624023 + ], + [ + "▁Ease", + -13.47667121887207 + ], + [ + "pez", + -13.476679801940918 + ], + [ + "▁adhesives", + -13.476826667785645 + ], + [ + "▁Drawer", + -13.476846694946287 + ], + [ + "eron", + -13.476881980895996 + ], + [ + "lez", + -13.477018356323242 + ], + [ + "CBC", + -13.477021217346191 + ], + [ + "▁composting", + -13.477133750915527 + ], + [ + "▁Fate", + -13.477301597595217 + ], + [ + "▁wards", + -13.477359771728516 + ], + [ + "▁showrooms", + -13.477380752563477 + ], + [ + "NX", + -13.477540969848633 + ], + [ + "▁terra", + -13.47757053375244 + ], + [ + "rev", + -13.477606773376465 + ], + [ + "▁clarifying", + -13.477614402770996 + ], + [ + "▁lactose", + -13.477638244628906 + ], + [ + "leo", + -13.47768783569336 + ], + [ + "▁composites", + -13.477690696716309 + ], + [ + "▁indemnify", + -13.47778034210205 + ], + [ + "▁pancreas", + -13.47778034210205 + ], + [ + "▁bottling", + -13.477781295776367 + ], + [ + "▁cobalt", + -13.477784156799316 + ], + [ + "▁Pleasure", + -13.477789878845217 + ], + [ + "▁issuer", + -13.47780418395996 + ], + [ + "▁Emanuel", + -13.477828979492188 + ], + [ + "▁Hutchinson", + -13.477832794189451 + ], + [ + "▁unequal", + -13.47783660888672 + ], + [ + "▁hostility", + -13.4778470993042 + ], + [ + "▁EPUB", + -13.477899551391602 + ], + [ + "Secret", + -13.47791862487793 + ], + [ + "Easily", + -13.477924346923828 + ], + [ + "▁deformation", + -13.477938652038574 + ], + [ + "exempt", + -13.477941513061523 + ], + [ + "▁canister", + -13.47795867919922 + ], + [ + "hap", + -13.478013038635254 + ], + [ + "Sugar", + -13.47802734375 + ], + [ + "major", + -13.478039741516112 + ], + [ + "▁predefined", + -13.478079795837402 + ], + [ + "▁Lehigh", + -13.478090286254885 + ], + [ + "▁Tight", + -13.478097915649414 + ], + [ + "▁Limestone", + -13.478140830993652 + ], + [ + "▁eased", + -13.478140830993652 + ], + [ + "RX", + -13.478144645690918 + ], + [ + "▁Roasted", + -13.47814655303955 + ], + [ + "Tips", + -13.478211402893066 + ], + [ + "rence", + -13.47821807861328 + ], + [ + "ulous", + -13.478300094604492 + ], + [ + "Runner", + -13.478382110595703 + ], + [ + "▁perfecting", + -13.47839641571045 + ], + [ + "▁walkthrough", + -13.47840404510498 + ], + [ + "▁SMA", + -13.478424072265623 + ], + [ + "606", + -13.478473663330078 + ], + [ + "▁Equipped", + -13.478513717651367 + ], + [ + "patch", + -13.47858715057373 + ], + [ + "Businesses", + -13.478652954101562 + ], + [ + "▁budgetary", + -13.478683471679688 + ], + [ + "▁Sweat", + -13.478754043579102 + ], + [ + "▁Edu", + -13.47883415222168 + ], + [ + "▁nuggets", + -13.478857040405272 + ], + [ + "mobil", + -13.478936195373535 + ], + [ + "▁provisioning", + -13.4789400100708 + ], + [ + "rash", + -13.47898292541504 + ], + [ + "▁36-", + -13.479209899902344 + ], + [ + "▁shatter", + -13.47922134399414 + ], + [ + "sonic", + -13.47923183441162 + ], + [ + "▁Patron", + -13.47926902770996 + ], + [ + "▁Locker", + -13.479339599609377 + ], + [ + "▁defer", + -13.479347229003906 + ], + [ + "▁Choices", + -13.47934913635254 + ], + [ + "▁$700", + -13.47938060760498 + ], + [ + "▁immuno", + -13.479464530944824 + ], + [ + "▁violently", + -13.479514122009276 + ], + [ + "▁Ranking", + -13.479610443115234 + ], + [ + "▁kissing", + -13.4796142578125 + ], + [ + "▁concede", + -13.47962760925293 + ], + [ + "CLOSE", + -13.479751586914062 + ], + [ + "040", + -13.479769706726074 + ], + [ + "117", + -13.479992866516112 + ], + [ + "▁bristles", + -13.48000144958496 + ], + [ + "hing", + -13.480059623718262 + ], + [ + "hands", + -13.48010540008545 + ], + [ + "oop", + -13.480119705200195 + ], + [ + "▁petrochemical", + -13.480226516723633 + ], + [ + "▁Sinclair", + -13.480230331420898 + ], + [ + "▁Verification", + -13.480273246765137 + ], + [ + "▁Biography", + -13.48027515411377 + ], + [ + "▁blurry", + -13.480308532714844 + ], + [ + "exit", + -13.48032569885254 + ], + [ + "▁Engage", + -13.48035717010498 + ], + [ + "▁Cowboy", + -13.48036003112793 + ], + [ + "▁McKenzie", + -13.480369567871094 + ], + [ + "▁Hiro", + -13.480389595031738 + ], + [ + "Ocean", + -13.480408668518066 + ], + [ + "bonne", + -13.48041820526123 + ], + [ + "▁Generate", + -13.480430603027344 + ], + [ + "540", + -13.48050022125244 + ], + [ + "▁2016-17", + -13.480504035949709 + ], + [ + "▁thresholds", + -13.480531692504885 + ], + [ + "▁workday", + -13.48055362701416 + ], + [ + "▁BMC", + -13.480629920959473 + ], + [ + "273", + -13.480636596679688 + ], + [ + "▁Covering", + -13.480658531188965 + ], + [ + "▁Limousine", + -13.480690002441406 + ], + [ + "oshi", + -13.48070240020752 + ], + [ + "▁Phones", + -13.480710983276367 + ], + [ + "▁$39", + -13.480863571166992 + ], + [ + "Reports", + -13.480870246887209 + ], + [ + "▁dermatologist", + -13.480897903442385 + ], + [ + "yah", + -13.480907440185549 + ], + [ + "oops", + -13.480931282043455 + ], + [ + "▁diverted", + -13.480938911437988 + ], + [ + "80%", + -13.48112678527832 + ], + [ + "▁2-2", + -13.481134414672852 + ], + [ + "▁Mention", + -13.481135368347168 + ], + [ + "yle", + -13.481164932250977 + ], + [ + "Paint", + -13.481186866760254 + ], + [ + "▁Braid", + -13.48121452331543 + ], + [ + "▁Browne", + -13.481261253356934 + ], + [ + "▁Flavor", + -13.481261253356934 + ], + [ + "▁seeming", + -13.4812650680542 + ], + [ + "0.000", + -13.481268882751465 + ], + [ + "Wheel", + -13.481430053710938 + ], + [ + "DON", + -13.481433868408203 + ], + [ + "▁halted", + -13.481462478637695 + ], + [ + "▁underestimated", + -13.481515884399414 + ], + [ + "/2012", + -13.481579780578612 + ], + [ + "▁50+", + -13.481597900390623 + ], + [ + "▁Jurassic", + -13.481639862060549 + ], + [ + "▁kayaks", + -13.481645584106444 + ], + [ + "Hack", + -13.481711387634276 + ], + [ + "▁pact", + -13.481778144836426 + ], + [ + "▁DATA", + -13.481843948364258 + ], + [ + "disk", + -13.481955528259276 + ], + [ + "xin", + -13.482036590576172 + ], + [ + "▁expandable", + -13.482041358947754 + ], + [ + "▁Gentle", + -13.48206901550293 + ], + [ + "▁cautioned", + -13.48206901550293 + ], + [ + "▁lockers", + -13.482073783874512 + ], + [ + "HH", + -13.482240676879885 + ], + [ + "▁MEP", + -13.482248306274414 + ], + [ + "▁Exhaust", + -13.482260704040527 + ], + [ + "▁ECB", + -13.482349395751951 + ], + [ + "▁Travelers", + -13.482362747192385 + ], + [ + "▁solidify", + -13.482441902160645 + ], + [ + "▁Revised", + -13.482443809509276 + ], + [ + "▁Neptune", + -13.48245334625244 + ], + [ + "▁glittering", + -13.482549667358398 + ], + [ + "vote", + -13.482574462890623 + ], + [ + "▁Requests", + -13.482592582702637 + ], + [ + "▁bookshelves", + -13.48267650604248 + ], + [ + "▁splurge", + -13.48267650604248 + ], + [ + "▁Pigeon", + -13.482683181762695 + ], + [ + "▁bittersweet", + -13.482693672180176 + ], + [ + "Successful", + -13.482718467712402 + ], + [ + "▁populous", + -13.482718467712402 + ], + [ + "▁behaving", + -13.482720375061035 + ], + [ + "▁roaster", + -13.482769966125488 + ], + [ + "▁Woodward", + -13.482861518859863 + ], + [ + "▁Belgrade", + -13.482871055603027 + ], + [ + "Ireland", + -13.48298168182373 + ], + [ + "chlor", + -13.483050346374512 + ], + [ + "oste", + -13.483052253723145 + ], + [ + "▁JM", + -13.483187675476074 + ], + [ + "York", + -13.48319149017334 + ], + [ + "▁Yuan", + -13.483202934265137 + ], + [ + "▁sculpting", + -13.48321533203125 + ], + [ + "▁tenderness", + -13.483345985412598 + ], + [ + "Donald", + -13.483352661132812 + ], + [ + "▁164", + -13.483355522155762 + ], + [ + "▁biologist", + -13.483436584472656 + ], + [ + "5′′", + -13.483560562133787 + ], + [ + "▁MAG", + -13.48362636566162 + ], + [ + "▁Luxe", + -13.48365306854248 + ], + [ + "▁roadways", + -13.483747482299805 + ], + [ + "akh", + -13.483749389648438 + ], + [ + "▁Medina", + -13.48376178741455 + ], + [ + "dley", + -13.483774185180664 + ], + [ + "▁sem", + -13.483837127685549 + ], + [ + "▁sneaky", + -13.483844757080078 + ], + [ + "rump", + -13.483914375305176 + ], + [ + "▁matchmaking", + -13.483963966369627 + ], + [ + "▁pertain", + -13.483979225158691 + ], + [ + "▁ventilated", + -13.48402976989746 + ], + [ + "rage", + -13.484036445617676 + ], + [ + "▁penguins", + -13.484046936035156 + ], + [ + "6.00", + -13.484073638916016 + ], + [ + "▁heartbreak", + -13.484127044677734 + ], + [ + "▁Lore", + -13.484159469604492 + ], + [ + "oper", + -13.484185218811035 + ], + [ + "▁palaces", + -13.48428726196289 + ], + [ + "▁Dou", + -13.484346389770508 + ], + [ + "▁173", + -13.484365463256836 + ], + [ + "▁decked", + -13.48438835144043 + ], + [ + "▁delightfully", + -13.484399795532228 + ], + [ + "▁hierarchical", + -13.48448085784912 + ], + [ + "▁158", + -13.484512329101562 + ], + [ + "▁Falling", + -13.484570503234863 + ], + [ + "▁bestseller", + -13.484603881835938 + ], + [ + "▁moulding", + -13.4846830368042 + ], + [ + "▁Aura", + -13.484707832336426 + ], + [ + "dge", + -13.484719276428224 + ], + [ + "▁Rookie", + -13.484889030456545 + ], + [ + "560", + -13.484947204589844 + ], + [ + "lous", + -13.485010147094728 + ], + [ + "WL", + -13.485061645507812 + ], + [ + "▁Galveston", + -13.485133171081545 + ], + [ + "▁Mauritius", + -13.485133171081545 + ], + [ + "▁deodorant", + -13.485133171081545 + ], + [ + "▁perforated", + -13.485133171081545 + ], + [ + "▁undisclosed", + -13.485133171081545 + ], + [ + "▁leukemia", + -13.485135078430176 + ], + [ + "▁Warfare", + -13.485180854797363 + ], + [ + "▁ghee", + -13.485189437866213 + ], + [ + "SES", + -13.485237121582031 + ], + [ + "▁Cornish", + -13.485239028930664 + ], + [ + "mans", + -13.48524284362793 + ], + [ + "▁slats", + -13.485246658325195 + ], + [ + "▁chipping", + -13.485255241394045 + ], + [ + "▁PAL", + -13.485271453857422 + ], + [ + "▁reserving", + -13.485368728637695 + ], + [ + "▁crock", + -13.485371589660645 + ], + [ + "▁Collin", + -13.485394477844238 + ], + [ + "idge", + -13.4854097366333 + ], + [ + "truth", + -13.485461235046388 + ], + [ + "▁Elsa", + -13.485466003417969 + ], + [ + "Mexico", + -13.485478401184082 + ], + [ + "▁Stanton", + -13.485566139221191 + ], + [ + "▁lightest", + -13.485575675964355 + ], + [ + "tara", + -13.485620498657228 + ], + [ + "iran", + -13.48564910888672 + ], + [ + "Davidson", + -13.48568344116211 + ], + [ + "▁Ajax", + -13.485705375671388 + ], + [ + "▁principally", + -13.485715866088867 + ], + [ + "▁Comparative", + -13.485729217529297 + ], + [ + "▁dé", + -13.48574161529541 + ], + [ + "▁Boil", + -13.485767364501951 + ], + [ + "kova", + -13.48580265045166 + ], + [ + "▁167", + -13.485804557800291 + ], + [ + "▁20\"", + -13.485815048217772 + ], + [ + "▁Outline", + -13.485941886901855 + ], + [ + "▁twenties", + -13.48595142364502 + ], + [ + "movie", + -13.48601531982422 + ], + [ + "▁Christi", + -13.486053466796877 + ], + [ + "Ride", + -13.48609733581543 + ], + [ + "▁Crate", + -13.486136436462402 + ], + [ + "PK", + -13.486170768737791 + ], + [ + "▁Cine", + -13.486227989196776 + ], + [ + "Course", + -13.486306190490724 + ], + [ + "▁11:", + -13.486309051513672 + ], + [ + "Developing", + -13.48636531829834 + ], + [ + "▁Component", + -13.486373901367188 + ], + [ + "gray", + -13.486392974853516 + ], + [ + "▁SUN", + -13.486464500427246 + ], + [ + "▁morph", + -13.48651885986328 + ], + [ + "▁MAT", + -13.486525535583496 + ], + [ + "bers", + -13.486684799194336 + ], + [ + "▁Alongside", + -13.48670482635498 + ], + [ + "6.4", + -13.486876487731934 + ], + [ + "▁preclude", + -13.486884117126465 + ], + [ + "▁Interim", + -13.486920356750488 + ], + [ + "▁Abs", + -13.486950874328612 + ], + [ + "▁ATMs", + -13.486953735351562 + ], + [ + "Warm", + -13.486973762512209 + ], + [ + "406", + -13.487035751342772 + ], + [ + "▁catalytic", + -13.487116813659668 + ], + [ + "▁Reservation", + -13.48715591430664 + ], + [ + "tah", + -13.487162590026855 + ], + [ + "▁Voucher", + -13.487241744995115 + ], + [ + "▁Sl", + -13.487342834472656 + ], + [ + "▁Insect", + -13.487417221069336 + ], + [ + "toy", + -13.487422943115234 + ], + [ + "tori", + -13.48745346069336 + ], + [ + "ordinator", + -13.487476348876951 + ], + [ + "▁Perspective", + -13.487528800964355 + ], + [ + "▁frugal", + -13.48756217956543 + ], + [ + "▁Ing", + -13.487568855285645 + ], + [ + "▁Printers", + -13.487586975097656 + ], + [ + "▁GMO", + -13.487587928771973 + ], + [ + "▁celestial", + -13.48759651184082 + ], + [ + "▁emulsion", + -13.48759651184082 + ], + [ + "▁promenade", + -13.48759651184082 + ], + [ + "▁Odisha", + -13.487600326538086 + ], + [ + "▁Trafford", + -13.487600326538086 + ], + [ + "▁Onion", + -13.487611770629885 + ], + [ + "▁venturing", + -13.4876127243042 + ], + [ + "▁Whiskey", + -13.487618446350098 + ], + [ + "▁graveyard", + -13.487651824951172 + ], + [ + "▁condemnation", + -13.487676620483398 + ], + [ + "UND", + -13.487822532653809 + ], + [ + "▁etching", + -13.487848281860352 + ], + [ + "▁Authorized", + -13.487909317016602 + ], + [ + "▁1981,", + -13.487930297851562 + ], + [ + "temperature", + -13.487985610961914 + ], + [ + "Explain", + -13.487991333007812 + ], + [ + "▁Concerto", + -13.488011360168455 + ], + [ + "▁Clause", + -13.488018035888672 + ], + [ + "pretty", + -13.488019943237305 + ], + [ + "▁Punk", + -13.48807144165039 + ], + [ + "▁indulgent", + -13.488091468811035 + ], + [ + "Import", + -13.48811149597168 + ], + [ + "▁Jenn", + -13.488163948059082 + ], + [ + "▁(2006)", + -13.488219261169434 + ], + [ + "eter", + -13.488245010375977 + ], + [ + "▁prevailed", + -13.488253593444824 + ], + [ + "▁ridges", + -13.488286018371582 + ], + [ + "rana", + -13.488344192504885 + ], + [ + "Ideally", + -13.488363265991213 + ], + [ + "deserved", + -13.48836612701416 + ], + [ + "▁Fidelity", + -13.48837661743164 + ], + [ + "scapes", + -13.488397598266602 + ], + [ + "trail", + -13.488410949707031 + ], + [ + "▁donut", + -13.488527297973633 + ], + [ + "Environmental", + -13.488563537597656 + ], + [ + "bring", + -13.48859405517578 + ], + [ + "▁councillors", + -13.488597869873049 + ], + [ + "gay", + -13.488621711730955 + ], + [ + "▁Raider", + -13.48862648010254 + ], + [ + "134", + -13.488699913024902 + ], + [ + "▁polishes", + -13.488713264465332 + ], + [ + "▁flourished", + -13.48873233795166 + ], + [ + "tana", + -13.488761901855469 + ], + [ + "Fan", + -13.488773345947266 + ], + [ + "Organic", + -13.488799095153809 + ], + [ + "vista", + -13.48880100250244 + ], + [ + "▁SOME", + -13.488811492919922 + ], + [ + "▁willow", + -13.488876342773438 + ], + [ + "▁Promotions", + -13.488959312438965 + ], + [ + "▁genomic", + -13.489007949829102 + ], + [ + "elman", + -13.489038467407228 + ], + [ + "▁Empty", + -13.48909854888916 + ], + [ + "▁GCC", + -13.48914623260498 + ], + [ + "Qua", + -13.48922061920166 + ], + [ + "▁blisters", + -13.48927879333496 + ], + [ + "kirk", + -13.48938274383545 + ], + [ + "▁Pickup", + -13.489383697509766 + ], + [ + "▁dismantle", + -13.489466667175291 + ], + [ + "▁Muk", + -13.48948860168457 + ], + [ + "Few", + -13.489618301391602 + ], + [ + "iste", + -13.489663124084473 + ], + [ + "▁bc", + -13.489694595336914 + ], + [ + "Haha", + -13.489798545837402 + ], + [ + "▁volts", + -13.489816665649414 + ], + [ + "Ford", + -13.489882469177246 + ], + [ + "▁Hawke", + -13.489951133728027 + ], + [ + "▁Neon", + -13.489954948425291 + ], + [ + "▁Needed", + -13.489995002746582 + ], + [ + "angel", + -13.490034103393556 + ], + [ + "▁Mess", + -13.490056037902832 + ], + [ + "▁Northumberland", + -13.490065574645996 + ], + [ + "▁moniker", + -13.490067481994627 + ], + [ + "▁smudge", + -13.490067481994627 + ], + [ + "▁carpentry", + -13.490068435668944 + ], + [ + "▁174", + -13.490084648132324 + ], + [ + "▁Nak", + -13.490089416503906 + ], + [ + "▁Haynes", + -13.49010181427002 + ], + [ + "▁Fel", + -13.490113258361816 + ], + [ + "▁nagging", + -13.490118026733398 + ], + [ + "▁Dew", + -13.490179061889648 + ], + [ + "168", + -13.490242958068848 + ], + [ + "▁lathe", + -13.490249633789062 + ], + [ + "▁Egyptians", + -13.49025821685791 + ], + [ + "▁crackdown", + -13.490326881408691 + ], + [ + "▁Kern", + -13.490338325500488 + ], + [ + "MRI", + -13.490367889404297 + ], + [ + "▁readership", + -13.490461349487305 + ], + [ + "▁Horses", + -13.490503311157228 + ], + [ + "Gear", + -13.490509033203123 + ], + [ + "Louis", + -13.49057674407959 + ], + [ + "▁Airtel", + -13.49057960510254 + ], + [ + "▁newborns", + -13.490598678588867 + ], + [ + "▁relentlessly", + -13.490610122680664 + ], + [ + "Chef", + -13.490612030029297 + ], + [ + "▁sto", + -13.490630149841309 + ], + [ + "vant", + -13.490639686584473 + ], + [ + "▁13,000", + -13.490690231323242 + ], + [ + "▁Johan", + -13.49069595336914 + ], + [ + "▁Yong", + -13.490697860717772 + ], + [ + "▁subsidized", + -13.490768432617188 + ], + [ + "▁dat", + -13.490779876708984 + ], + [ + "▁watchful", + -13.490835189819336 + ], + [ + "▁locomotive", + -13.490867614746094 + ], + [ + "▁likeness", + -13.490986824035645 + ], + [ + "118", + -13.491016387939451 + ], + [ + "▁EW", + -13.491023063659668 + ], + [ + "▁(2013).", + -13.491161346435549 + ], + [ + "tips", + -13.49121379852295 + ], + [ + "baum", + -13.491230010986328 + ], + [ + "mata", + -13.491286277770996 + ], + [ + "▁Chemicals", + -13.49129867553711 + ], + [ + "▁Sed", + -13.491399765014648 + ], + [ + "▁semesters", + -13.49143409729004 + ], + [ + "YL", + -13.491501808166504 + ], + [ + "▁Oo", + -13.491521835327148 + ], + [ + "equity", + -13.491656303405762 + ], + [ + "▁Yama", + -13.491689682006836 + ], + [ + "▁Clone", + -13.491692543029783 + ], + [ + "▁hydrate", + -13.49191188812256 + ], + [ + "▁Sko", + -13.492008209228516 + ], + [ + "CENT", + -13.492016792297363 + ], + [ + "▁Weird", + -13.492025375366213 + ], + [ + "▁Dice", + -13.492029190063477 + ], + [ + "export", + -13.492079734802246 + ], + [ + "▁31-", + -13.492164611816406 + ], + [ + "▁Balloon", + -13.49225902557373 + ], + [ + "▁coolers", + -13.492271423339844 + ], + [ + "▁RCA", + -13.492290496826172 + ], + [ + "▁2025", + -13.492292404174805 + ], + [ + "▁Cups", + -13.49234676361084 + ], + [ + "▁HOUSE", + -13.49234676361084 + ], + [ + "▁incomparable", + -13.492364883422852 + ], + [ + "woods", + -13.492403030395508 + ], + [ + "▁Burg", + -13.492448806762695 + ], + [ + "▁circumference", + -13.492541313171388 + ], + [ + "▁lubrication", + -13.492541313171388 + ], + [ + "▁polycarbonate", + -13.492544174194336 + ], + [ + "▁discipleship", + -13.49256706237793 + ], + [ + "▁Bryce", + -13.492581367492676 + ], + [ + "▁Orientation", + -13.492588996887209 + ], + [ + "▁Brentwood", + -13.492615699768066 + ], + [ + "▁excelled", + -13.492701530456545 + ], + [ + "▁166", + -13.492714881896973 + ], + [ + "dryer", + -13.492843627929688 + ], + [ + "▁faithfulness", + -13.492915153503418 + ], + [ + "▁MPH", + -13.492923736572266 + ], + [ + "▁bikers", + -13.492959976196287 + ], + [ + "▁chauffeur", + -13.492964744567873 + ], + [ + "▁AO", + -13.492986679077148 + ], + [ + "jaw", + -13.4930419921875 + ], + [ + "Exercise", + -13.493045806884766 + ], + [ + "▁explosives", + -13.49305248260498 + ], + [ + "mai", + -13.49307918548584 + ], + [ + "eux", + -13.493124961853027 + ], + [ + "▁masala", + -13.493139266967772 + ], + [ + "Insurance", + -13.493144035339355 + ], + [ + "▁distrust", + -13.493170738220217 + ], + [ + "kay", + -13.493210792541504 + ], + [ + "RIA", + -13.493243217468262 + ], + [ + "ATED", + -13.493246078491213 + ], + [ + "▁addictions", + -13.49325942993164 + ], + [ + "233", + -13.49335765838623 + ], + [ + "▁hangover", + -13.493369102478027 + ], + [ + "▁Alam", + -13.493483543395996 + ], + [ + "-2008", + -13.493513107299805 + ], + [ + "fixed", + -13.493577003479004 + ], + [ + "▁strikeouts", + -13.493593215942385 + ], + [ + "pcs", + -13.49363613128662 + ], + [ + "lady", + -13.493744850158691 + ], + [ + "31.", + -13.49376106262207 + ], + [ + "▁Cesar", + -13.493776321411133 + ], + [ + "09.", + -13.493856430053713 + ], + [ + "▁congrats", + -13.49385929107666 + ], + [ + "▁1,400", + -13.49388313293457 + ], + [ + "RATE", + -13.493907928466797 + ], + [ + "▁Freak", + -13.49390983581543 + ], + [ + "▁cheated", + -13.493936538696287 + ], + [ + "▁Wilder", + -13.49407958984375 + ], + [ + "▁Alberto", + -13.494152069091797 + ], + [ + "▁1865", + -13.494159698486328 + ], + [ + "▁isnt", + -13.494193077087402 + ], + [ + "Driver", + -13.494242668151855 + ], + [ + "Lauren", + -13.494254112243652 + ], + [ + "765", + -13.494258880615234 + ], + [ + "▁primed", + -13.494269371032717 + ], + [ + "625", + -13.494346618652344 + ], + [ + "7.8", + -13.494426727294922 + ], + [ + "▁overboard", + -13.49443531036377 + ], + [ + "guest", + -13.494473457336426 + ], + [ + "Exporter", + -13.494529724121094 + ], + [ + "▁dyeing", + -13.49456024169922 + ], + [ + "Hit", + -13.494614601135254 + ], + [ + "▁Critics", + -13.494626998901367 + ], + [ + "Profit", + -13.494796752929688 + ], + [ + "▁Bust", + -13.494900703430176 + ], + [ + "▁Noon", + -13.495007514953612 + ], + [ + "▁Hemisphere", + -13.495023727416992 + ], + [ + "▁jigsaw", + -13.495028495788574 + ], + [ + "770", + -13.495043754577637 + ], + [ + "▁Carla", + -13.495075225830078 + ], + [ + "lium", + -13.495095252990724 + ], + [ + "oca", + -13.495125770568848 + ], + [ + "▁accordion", + -13.495132446289062 + ], + [ + "▁Stirling", + -13.495200157165527 + ], + [ + "▁pigmentation", + -13.495224952697754 + ], + [ + "▁paused", + -13.495227813720703 + ], + [ + "Package", + -13.495254516601562 + ], + [ + "shaft", + -13.4953031539917 + ], + [ + "Ly", + -13.49532985687256 + ], + [ + "▁sincerity", + -13.495332717895508 + ], + [ + "▁guise", + -13.495367050170898 + ], + [ + "statement", + -13.495402336120604 + ], + [ + "-500", + -13.495407104492188 + ], + [ + "▁polluted", + -13.495407104492188 + ], + [ + "▁skillfully", + -13.49540901184082 + ], + [ + "ahu", + -13.495491981506348 + ], + [ + "▁Mg", + -13.495563507080078 + ], + [ + "Lift", + -13.49557113647461 + ], + [ + "▁2:1", + -13.49560260772705 + ], + [ + "▁IELTS", + -13.495603561401367 + ], + [ + "Demand", + -13.495615005493164 + ], + [ + "gallery", + -13.495623588562012 + ], + [ + "▁Athletes", + -13.495624542236328 + ], + [ + "▁gangster", + -13.495628356933594 + ], + [ + "▁crimp", + -13.49563217163086 + ], + [ + "▁Historically", + -13.495662689208984 + ], + [ + "kir", + -13.495673179626465 + ], + [ + "Jackson", + -13.495687484741213 + ], + [ + "Dead", + -13.495702743530272 + ], + [ + "▁TOO", + -13.495741844177246 + ], + [ + "▁trimmer", + -13.495759963989258 + ], + [ + "vé", + -13.495776176452637 + ], + [ + "5-5", + -13.495814323425291 + ], + [ + "▁shellfish", + -13.495821952819824 + ], + [ + "cars", + -13.495840072631836 + ], + [ + "regulated", + -13.495851516723633 + ], + [ + "▁Adviser", + -13.495894432067873 + ], + [ + "▁rover", + -13.495901107788086 + ], + [ + "▁pl", + -13.49592113494873 + ], + [ + "▁diode", + -13.495931625366213 + ], + [ + "▁sharpness", + -13.495938301086426 + ], + [ + "▁flake", + -13.496000289916992 + ], + [ + "THIS", + -13.496028900146484 + ], + [ + "▁indifferent", + -13.496078491210938 + ], + [ + "▁Nau", + -13.496126174926758 + ], + [ + "▁245", + -13.49613094329834 + ], + [ + "▁Voltage", + -13.496159553527832 + ], + [ + "223", + -13.496166229248049 + ], + [ + "▁vous", + -13.49621868133545 + ], + [ + "▁specificity", + -13.496222496032717 + ], + [ + "▁156", + -13.496237754821776 + ], + [ + "Shortly", + -13.496240615844728 + ], + [ + "▁QS", + -13.496240615844728 + ], + [ + "▁Intro", + -13.496294975280762 + ], + [ + "▁lyric", + -13.49635887145996 + ], + [ + "▁dir", + -13.496384620666504 + ], + [ + "▁Cheats", + -13.496421813964844 + ], + [ + "▁timings", + -13.49648094177246 + ], + [ + "radi", + -13.496658325195312 + ], + [ + "ures", + -13.496665954589844 + ], + [ + "ogo", + -13.496777534484863 + ], + [ + "▁brag", + -13.496822357177734 + ], + [ + "▁dys", + -13.496853828430176 + ], + [ + "auer", + -13.496899604797363 + ], + [ + "fake", + -13.49707317352295 + ], + [ + "▁spying", + -13.497090339660645 + ], + [ + "▁snuggle", + -13.497135162353516 + ], + [ + "▁undergraduates", + -13.497152328491213 + ], + [ + "▁endeavours", + -13.497193336486816 + ], + [ + "▁Nikola", + -13.497246742248535 + ], + [ + "▁Benton", + -13.497285842895508 + ], + [ + "▁1830", + -13.49730396270752 + ], + [ + "▁transcend", + -13.497358322143556 + ], + [ + "▁Northampton", + -13.497379302978516 + ], + [ + "▁delicately", + -13.49739933013916 + ], + [ + "doubt", + -13.497486114501951 + ], + [ + "gaard", + -13.497506141662598 + ], + [ + "▁Humphrey", + -13.497509956359863 + ], + [ + "▁Dill", + -13.497512817382812 + ], + [ + "▁Flipkart", + -13.497512817382812 + ], + [ + "▁Bengaluru", + -13.497513771057127 + ], + [ + "▁Usa", + -13.497513771057127 + ], + [ + "▁opulent", + -13.497513771057127 + ], + [ + "▁Taurus", + -13.497522354125977 + ], + [ + "▁disclosing", + -13.497522354125977 + ], + [ + "▁misguided", + -13.497525215148926 + ], + [ + "Sean", + -13.49752712249756 + ], + [ + "▁revising", + -13.49752712249756 + ], + [ + "▁Cajun", + -13.497528076171877 + ], + [ + "▁geese", + -13.49753761291504 + ], + [ + "▁Higgins", + -13.49754810333252 + ], + [ + "▁midwife", + -13.497563362121582 + ], + [ + "▁Sundance", + -13.497581481933594 + ], + [ + "▁summed", + -13.497598648071287 + ], + [ + "▁convict", + -13.497611045837402 + ], + [ + "▁resounding", + -13.497636795043944 + ], + [ + "dick", + -13.497641563415527 + ], + [ + "enta", + -13.49764919281006 + ], + [ + "▁importers", + -13.497661590576172 + ], + [ + "▁Debian", + -13.497687339782717 + ], + [ + "esta", + -13.49770164489746 + ], + [ + "▁Claw", + -13.497730255126951 + ], + [ + "▁Nim", + -13.49776840209961 + ], + [ + "uja", + -13.497769355773926 + ], + [ + "▁Distributed", + -13.497769355773926 + ], + [ + "▁stink", + -13.497775077819824 + ], + [ + "▁lumps", + -13.497783660888672 + ], + [ + "▁Gauge", + -13.497787475585938 + ], + [ + "circle", + -13.49780559539795 + ], + [ + "▁recalling", + -13.497817993164062 + ], + [ + "▁Como", + -13.49789333343506 + ], + [ + "▁1,300", + -13.497926712036133 + ], + [ + "▁restraining", + -13.49794578552246 + ], + [ + "iku", + -13.497983932495115 + ], + [ + "Determine", + -13.498019218444824 + ], + [ + "▁gamma", + -13.498021125793455 + ], + [ + "▁bearish", + -13.498046875 + ], + [ + "▁tuner", + -13.498053550720217 + ], + [ + "ues", + -13.498075485229492 + ], + [ + "▁Wicked", + -13.498117446899414 + ], + [ + "Qaeda", + -13.49813175201416 + ], + [ + "industry", + -13.498164176940918 + ], + [ + "Scope", + -13.498165130615234 + ], + [ + "Dun", + -13.49818992614746 + ], + [ + "▁invoked", + -13.49821949005127 + ], + [ + "▁labelling", + -13.498250961303713 + ], + [ + "Photograph", + -13.498251914978027 + ], + [ + "▁florida", + -13.498266220092772 + ], + [ + "▁flaps", + -13.498269081115724 + ], + [ + "solutions", + -13.498330116271973 + ], + [ + "▁2017).", + -13.49839973449707 + ], + [ + "Syn", + -13.49850368499756 + ], + [ + "▁Wah", + -13.498617172241213 + ], + [ + "510", + -13.498635292053224 + ], + [ + "▁Recordings", + -13.498714447021484 + ], + [ + "▁Ding", + -13.498854637145996 + ], + [ + "▁ent", + -13.49892234802246 + ], + [ + "▁Rem", + -13.499177932739258 + ], + [ + "▁damned", + -13.499213218688965 + ], + [ + "404", + -13.49924659729004 + ], + [ + "▁Polk", + -13.499275207519531 + ], + [ + "▁CDL", + -13.499506950378418 + ], + [ + "▁Surround", + -13.499600410461426 + ], + [ + "▁juggle", + -13.499608039855955 + ], + [ + "▁Firms", + -13.499616622924805 + ], + [ + "uration", + -13.4996337890625 + ], + [ + "▁Oro", + -13.499641418457031 + ], + [ + "▁Precious", + -13.499796867370604 + ], + [ + "▁Alf", + -13.499835968017578 + ], + [ + "▁1898", + -13.499855995178224 + ], + [ + "▁flicker", + -13.499895095825195 + ], + [ + "▁laden", + -13.499929428100586 + ], + [ + "▁battalion", + -13.500003814697266 + ], + [ + "▁recognising", + -13.500003814697266 + ], + [ + "▁physiotherapy", + -13.500005722045898 + ], + [ + ")(", + -13.50005340576172 + ], + [ + "▁biographical", + -13.5000581741333 + ], + [ + "▁insanity", + -13.500059127807615 + ], + [ + "IK", + -13.500089645385742 + ], + [ + "▁sha", + -13.500116348266602 + ], + [ + "840", + -13.50015640258789 + ], + [ + "▁CNBC", + -13.500181198120115 + ], + [ + "▁MANY", + -13.500261306762695 + ], + [ + "mbra", + -13.500288009643556 + ], + [ + "▁clinch", + -13.500311851501465 + ], + [ + "▁coax", + -13.50047492980957 + ], + [ + "▁footprints", + -13.500642776489258 + ], + [ + "▁seconded", + -13.500687599182127 + ], + [ + "▁captains", + -13.500689506530762 + ], + [ + "prone", + -13.500692367553713 + ], + [ + "▁sill", + -13.50074863433838 + ], + [ + "▁eyeliner", + -13.500792503356934 + ], + [ + "▁Measuring", + -13.50080394744873 + ], + [ + "▁revolutionized", + -13.500811576843262 + ], + [ + "▁instructing", + -13.500812530517578 + ], + [ + "▁projectors", + -13.500933647155762 + ], + [ + "Nine", + -13.500954627990724 + ], + [ + "▁SBI", + -13.500999450683594 + ], + [ + "▁Israelis", + -13.50102710723877 + ], + [ + "UF", + -13.501072883605955 + ], + [ + "▁forearm", + -13.501212120056152 + ], + [ + "▁Fitz", + -13.501265525817873 + ], + [ + "agon", + -13.501297950744627 + ], + [ + "02.", + -13.501307487487791 + ], + [ + "rek", + -13.501349449157717 + ], + [ + "36.", + -13.501462936401367 + ], + [ + "▁Flames", + -13.501513481140137 + ], + [ + "▁Ike", + -13.501517295837402 + ], + [ + "▁LINK", + -13.501585960388184 + ], + [ + "▁Macau", + -13.501605987548828 + ], + [ + "▁Upholstery", + -13.501652717590332 + ], + [ + "5.6", + -13.501666069030762 + ], + [ + "Agent", + -13.501709938049316 + ], + [ + "▁Tires", + -13.50172233581543 + ], + [ + "▁SCP", + -13.50173282623291 + ], + [ + "▁Haha", + -13.5017671585083 + ], + [ + "▁Zenith", + -13.50178050994873 + ], + [ + "promise", + -13.501805305480955 + ], + [ + "LOCK", + -13.50185203552246 + ], + [ + "▁pledges", + -13.501886367797852 + ], + [ + "▁vomit", + -13.501895904541016 + ], + [ + "▁trackers", + -13.501946449279783 + ], + [ + "▁Fail", + -13.501973152160645 + ], + [ + "▁Hurt", + -13.502036094665527 + ], + [ + "▁bis", + -13.502209663391112 + ], + [ + "▁1896", + -13.502283096313477 + ], + [ + "▁handouts", + -13.502339363098145 + ], + [ + "dm", + -13.502365112304688 + ], + [ + "▁ple", + -13.502373695373535 + ], + [ + "▁Kahn", + -13.502412796020508 + ], + [ + "▁convection", + -13.502503395080566 + ], + [ + "▁kamagra", + -13.502503395080566 + ], + [ + "▁philanthropist", + -13.502503395080566 + ], + [ + "▁impractical", + -13.502509117126465 + ], + [ + "▁mousse", + -13.502509117126465 + ], + [ + "▁flurry", + -13.502511978149414 + ], + [ + "▁Dudley", + -13.502528190612791 + ], + [ + "▁Chavez", + -13.502533912658691 + ], + [ + "▁Gracie", + -13.502534866333008 + ], + [ + "reported", + -13.502548217773438 + ], + [ + "▁Caucus", + -13.502555847167969 + ], + [ + "▁Oculus", + -13.502581596374512 + ], + [ + "▁Discard", + -13.50259780883789 + ], + [ + "Bug", + -13.50261116027832 + ], + [ + "JB", + -13.502735137939451 + ], + [ + "▁cookware", + -13.502802848815918 + ], + [ + "▁teller", + -13.502874374389648 + ], + [ + "411", + -13.502901077270508 + ], + [ + "▁europe", + -13.502927780151367 + ], + [ + "▁Rupert", + -13.502965927124023 + ], + [ + "▁tastefully", + -13.502981185913086 + ], + [ + "▁palettes", + -13.5029935836792 + ], + [ + "▁Stairs", + -13.503039360046388 + ], + [ + "▁405", + -13.503046989440918 + ], + [ + "▁Sammy", + -13.503082275390623 + ], + [ + "551", + -13.503167152404783 + ], + [ + "ival", + -13.50323486328125 + ], + [ + "▁Instructors", + -13.503246307373049 + ], + [ + "wrote", + -13.503292083740234 + ], + [ + "▁selectively", + -13.503328323364258 + ], + [ + "▁Refund", + -13.503388404846191 + ], + [ + "▁Bram", + -13.503414154052734 + ], + [ + "▁outspoken", + -13.503443717956545 + ], + [ + "▁fondly", + -13.503507614135742 + ], + [ + "▁unfairly", + -13.50352954864502 + ], + [ + "▁bartender", + -13.503531455993652 + ], + [ + "BLE", + -13.503536224365234 + ], + [ + "▁peep", + -13.503655433654783 + ], + [ + "▁joked", + -13.503694534301758 + ], + [ + "▁VOC", + -13.503710746765137 + ], + [ + "▁Heartland", + -13.503722190856934 + ], + [ + "004", + -13.503756523132324 + ], + [ + "▁importer", + -13.503768920898438 + ], + [ + "▁Audience", + -13.503790855407717 + ], + [ + "▁Timor", + -13.503791809082031 + ], + [ + "▁biker", + -13.503832817077637 + ], + [ + "те", + -13.503876686096191 + ], + [ + "ether", + -13.503891944885254 + ], + [ + "lana", + -13.503928184509276 + ], + [ + "pain", + -13.503934860229492 + ], + [ + "▁END", + -13.50397491455078 + ], + [ + "▁Condos", + -13.50400161743164 + ], + [ + "▁Assuming", + -13.504035949707031 + ], + [ + "OLE", + -13.50404930114746 + ], + [ + "moo", + -13.50404930114746 + ], + [ + "▁oversaw", + -13.504106521606444 + ], + [ + "▁Cameras", + -13.504120826721191 + ], + [ + "nay", + -13.504167556762695 + ], + [ + "Tar", + -13.50418472290039 + ], + [ + "▁adapts", + -13.504246711730955 + ], + [ + "▁tablecloth", + -13.504284858703612 + ], + [ + "Tr", + -13.50429630279541 + ], + [ + "rill", + -13.50436782836914 + ], + [ + "lg", + -13.504380226135254 + ], + [ + "▁conveniences", + -13.50438404083252 + ], + [ + "SAP", + -13.504446983337402 + ], + [ + "▁ISS", + -13.504534721374512 + ], + [ + "▁imaginations", + -13.504547119140623 + ], + [ + "▁bookmarking", + -13.504613876342772 + ], + [ + "▁Blonde", + -13.50464153289795 + ], + [ + "▁withdrawing", + -13.504719734191896 + ], + [ + "▁Went", + -13.504758834838867 + ], + [ + "ppa", + -13.504796028137209 + ], + [ + "coast", + -13.504862785339355 + ], + [ + "▁intensified", + -13.505009651184082 + ], + [ + "▁microscopy", + -13.505010604858398 + ], + [ + "▁hawk", + -13.505011558532717 + ], + [ + "▁disbelief", + -13.505016326904297 + ], + [ + "▁Irwin", + -13.50502109527588 + ], + [ + "▁Activation", + -13.505023956298828 + ], + [ + "▁amenity", + -13.505023956298828 + ], + [ + "▁misinformation", + -13.505024909973145 + ], + [ + "▁transnational", + -13.505041122436523 + ], + [ + "▁Lords", + -13.50505542755127 + ], + [ + "▁AMA", + -13.505062103271484 + ], + [ + "▁abusing", + -13.505070686340332 + ], + [ + "▁encrypt", + -13.505077362060549 + ], + [ + "▁Glitter", + -13.505088806152344 + ], + [ + "Blu", + -13.505133628845217 + ], + [ + "mation", + -13.505141258239746 + ], + [ + "▁idols", + -13.505141258239746 + ], + [ + "4000", + -13.5051908493042 + ], + [ + "ALLY", + -13.505208969116213 + ], + [ + "▁Tow", + -13.505210876464844 + ], + [ + "gler", + -13.505260467529297 + ], + [ + "▁204", + -13.50527286529541 + ], + [ + "▁Isabella", + -13.505290031433104 + ], + [ + "▁betrayed", + -13.505348205566406 + ], + [ + "▁EG", + -13.505484580993652 + ], + [ + "▁atheist", + -13.505549430847168 + ], + [ + "▁tetra", + -13.505719184875488 + ], + [ + "suited", + -13.50578784942627 + ], + [ + "tary", + -13.50584602355957 + ], + [ + "Industry", + -13.505855560302734 + ], + [ + "enden", + -13.50590705871582 + ], + [ + "encies", + -13.505913734436035 + ], + [ + "stones", + -13.50601577758789 + ], + [ + "▁Vac", + -13.506065368652344 + ], + [ + "basic", + -13.506206512451172 + ], + [ + "▁Characters", + -13.50622272491455 + ], + [ + "alty", + -13.506229400634766 + ], + [ + "▁6-2", + -13.506234169006348 + ], + [ + "▁Burt", + -13.506251335144045 + ], + [ + "▁Hive", + -13.50627613067627 + ], + [ + "▁riots", + -13.506295204162598 + ], + [ + "▁Hoodie", + -13.506302833557127 + ], + [ + "▁frontal", + -13.506434440612791 + ], + [ + "▁shutout", + -13.506620407104492 + ], + [ + "431", + -13.506741523742676 + ], + [ + "▁bh", + -13.506797790527344 + ], + [ + "570", + -13.506834030151367 + ], + [ + "▁dusting", + -13.506880760192873 + ], + [ + "▁Cheer", + -13.506902694702148 + ], + [ + "▁Lud", + -13.50693130493164 + ], + [ + "▁wrestle", + -13.507055282592772 + ], + [ + "▁runny", + -13.507116317749023 + ], + [ + "▁cushioned", + -13.507171630859377 + ], + [ + "uber", + -13.507231712341309 + ], + [ + "china", + -13.507251739501951 + ], + [ + "▁fasten", + -13.507315635681152 + ], + [ + "Ste", + -13.507318496704102 + ], + [ + "▁perpetrators", + -13.507381439208984 + ], + [ + "▁sn", + -13.507437705993652 + ], + [ + "▁applicator", + -13.507522583007812 + ], + [ + "Asian", + -13.507523536682127 + ], + [ + "▁Aristotle", + -13.507523536682127 + ], + [ + "▁hindsight", + -13.507524490356444 + ], + [ + "▁Archaeology", + -13.507525444030762 + ], + [ + "▁Crochet", + -13.507525444030762 + ], + [ + "▁inflamed", + -13.507532119750977 + ], + [ + "▁Judah", + -13.50758457183838 + ], + [ + "▁multifaceted", + -13.50758457183838 + ], + [ + "▁Widow", + -13.507609367370604 + ], + [ + "321", + -13.507655143737791 + ], + [ + "uva", + -13.50766658782959 + ], + [ + "▁Authorization", + -13.507680892944336 + ], + [ + "▁Keynes", + -13.507718086242676 + ], + [ + "gol", + -13.507741928100586 + ], + [ + "▁Mock", + -13.507819175720217 + ], + [ + "ication", + -13.507851600646973 + ], + [ + "▁honing", + -13.507862091064451 + ], + [ + "▁stabilizer", + -13.50791358947754 + ], + [ + "wani", + -13.507948875427246 + ], + [ + "▁Freeze", + -13.507951736450195 + ], + [ + "icia", + -13.508116722106934 + ], + [ + "WOOD", + -13.508132934570312 + ], + [ + "pilot", + -13.50818920135498 + ], + [ + "▁Benchmark", + -13.50831413269043 + ], + [ + "SMS", + -13.50833797454834 + ], + [ + "▁rounding", + -13.508338928222656 + ], + [ + "▁Museums", + -13.508342742919922 + ], + [ + "Resume", + -13.50842571258545 + ], + [ + "8\"", + -13.508445739746094 + ], + [ + "▁brandy", + -13.508492469787598 + ], + [ + "▁refresher", + -13.508621215820312 + ], + [ + "VU", + -13.508685111999512 + ], + [ + "▁hipster", + -13.5087251663208 + ], + [ + "bred", + -13.508800506591797 + ], + [ + "Sell", + -13.508853912353516 + ], + [ + "Root", + -13.50887393951416 + ], + [ + "▁Stacy", + -13.508935928344728 + ], + [ + "▁22%", + -13.508947372436523 + ], + [ + "▁Butte", + -13.508994102478027 + ], + [ + "▁XD", + -13.509008407592772 + ], + [ + "▁NG", + -13.509069442749023 + ], + [ + "▁ich", + -13.509119033813477 + ], + [ + "▁Beginner", + -13.50915813446045 + ], + [ + "▁electrodes", + -13.509174346923828 + ], + [ + "▁evidently", + -13.509188652038574 + ], + [ + "▁Smaller", + -13.509215354919434 + ], + [ + "cure", + -13.509238243103027 + ], + [ + "▁NR", + -13.509267807006836 + ], + [ + "starting", + -13.509284019470217 + ], + [ + "Exchange", + -13.509320259094238 + ], + [ + "▁7.2", + -13.509353637695312 + ], + [ + "/06/", + -13.509385108947754 + ], + [ + "▁births", + -13.509434700012209 + ], + [ + "0-4", + -13.509448051452637 + ], + [ + "▁rupees", + -13.509456634521484 + ], + [ + "▁corals", + -13.509482383728027 + ], + [ + "585", + -13.509483337402344 + ], + [ + "burner", + -13.50949001312256 + ], + [ + "▁1.25", + -13.50949478149414 + ], + [ + "Ol", + -13.509587287902832 + ], + [ + "▁Pip", + -13.509625434875488 + ], + [ + "▁Mobi", + -13.509637832641602 + ], + [ + "▁bishops", + -13.509681701660156 + ], + [ + ".25\"", + -13.50976276397705 + ], + [ + "▁dishonest", + -13.509784698486328 + ], + [ + "▁cropping", + -13.50979995727539 + ], + [ + "▁austerity", + -13.509812355041504 + ], + [ + "MIS", + -13.509851455688477 + ], + [ + "meters", + -13.509894371032717 + ], + [ + "producer", + -13.509918212890623 + ], + [ + "▁CHE", + -13.509930610656738 + ], + [ + "▁1973,", + -13.509942054748535 + ], + [ + "tall", + -13.51004123687744 + ], + [ + "▁inaccuracies", + -13.51004123687744 + ], + [ + "▁invigorating", + -13.51004123687744 + ], + [ + "▁fraught", + -13.510046005249023 + ], + [ + "▁Picchu", + -13.510047912597656 + ], + [ + "▁embedding", + -13.510065078735352 + ], + [ + "▁Lincolnshire", + -13.510088920593262 + ], + [ + "▁seductive", + -13.510091781616213 + ], + [ + "▁SUCH", + -13.510101318359377 + ], + [ + "▁barking", + -13.510112762451172 + ], + [ + "▁european", + -13.510135650634766 + ], + [ + "▁Whip", + -13.510150909423828 + ], + [ + "▁ranger", + -13.51015567779541 + ], + [ + "▁Yates", + -13.51024341583252 + ], + [ + "▁Camino", + -13.51027011871338 + ], + [ + "▁conductors", + -13.5103120803833 + ], + [ + "▁Zeus", + -13.510345458984377 + ], + [ + "▁bal", + -13.510348320007324 + ], + [ + "▁Getty", + -13.510485649108888 + ], + [ + "126", + -13.510517120361328 + ], + [ + "▁(28", + -13.51054859161377 + ], + [ + "▁TRA", + -13.51055908203125 + ], + [ + "▁Antigua", + -13.510679244995115 + ], + [ + "▁Giles", + -13.510700225830078 + ], + [ + "▁Agnes", + -13.510735511779783 + ], + [ + "▁Dhaka", + -13.51084041595459 + ], + [ + "Fred", + -13.510848999023438 + ], + [ + "▁Ballard", + -13.510879516601562 + ], + [ + "bble", + -13.510993957519531 + ], + [ + "obligation", + -13.511001586914062 + ], + [ + "Emily", + -13.511031150817873 + ], + [ + "Michelle", + -13.511052131652832 + ], + [ + "future", + -13.511053085327148 + ], + [ + "▁Investigators", + -13.511115074157717 + ], + [ + "▁gra", + -13.51127815246582 + ], + [ + "▁teaspoons", + -13.511300086975098 + ], + [ + "ishi", + -13.511399269104004 + ], + [ + "▁Petite", + -13.511404991149902 + ], + [ + "▁distinctions", + -13.51157283782959 + ], + [ + "▁rightful", + -13.51159381866455 + ], + [ + "▁Counselor", + -13.511635780334473 + ], + [ + "▁Stokes", + -13.511734008789062 + ], + [ + "▁namespace", + -13.511738777160645 + ], + [ + "▁Sul", + -13.511743545532228 + ], + [ + "▁Botanic", + -13.51175022125244 + ], + [ + "Config", + -13.511783599853516 + ], + [ + "405", + -13.511880874633787 + ], + [ + "▁scares", + -13.511929512023926 + ], + [ + "▁nested", + -13.512017250061035 + ], + [ + "iding", + -13.5120210647583 + ], + [ + "▁gamblers", + -13.51205825805664 + ], + [ + "▁1,600", + -13.51206398010254 + ], + [ + "▁Babe", + -13.512139320373535 + ], + [ + "NOT", + -13.512263298034668 + ], + [ + "DOS", + -13.512316703796388 + ], + [ + "saver", + -13.512368202209473 + ], + [ + "▁fortnight", + -13.51246166229248 + ], + [ + "▁Newtown", + -13.512510299682615 + ], + [ + "8-4", + -13.512528419494627 + ], + [ + "▁articulation", + -13.512566566467283 + ], + [ + "▁skepticism", + -13.512582778930664 + ], + [ + "asset", + -13.512614250183104 + ], + [ + "▁Poppy", + -13.512649536132812 + ], + [ + "▁adjuster", + -13.512654304504396 + ], + [ + ",000.00", + -13.512673377990724 + ], + [ + "▁Twain", + -13.512676239013672 + ], + [ + "▁chainsaw", + -13.512678146362305 + ], + [ + "▁Came", + -13.512691497802734 + ], + [ + "▁scorching", + -13.512702941894531 + ], + [ + "▁stoked", + -13.512799263000488 + ], + [ + "▁arab", + -13.512821197509766 + ], + [ + "635", + -13.512856483459473 + ], + [ + "▁Bowman", + -13.512909889221191 + ], + [ + "▁compel", + -13.512954711914062 + ], + [ + "wat", + -13.51304054260254 + ], + [ + "Graduate", + -13.513055801391602 + ], + [ + "Pros", + -13.513086318969728 + ], + [ + "▁Objects", + -13.51319408416748 + ], + [ + "▁foldable", + -13.51323127746582 + ], + [ + "▁11,000", + -13.513235092163086 + ], + [ + "▁Animated", + -13.513286590576172 + ], + [ + "Fear", + -13.513323783874512 + ], + [ + "▁Juno", + -13.513394355773926 + ], + [ + "▁workbench", + -13.513457298278809 + ], + [ + "athletes", + -13.513509750366213 + ], + [ + "Shift", + -13.513530731201172 + ], + [ + "▁communicator", + -13.51364803314209 + ], + [ + "emotional", + -13.513676643371582 + ], + [ + "▁Plastics", + -13.513696670532228 + ], + [ + "▁armchair", + -13.513711929321287 + ], + [ + "Reach", + -13.513751983642578 + ], + [ + "▁homo", + -13.513940811157228 + ], + [ + "▁HTTPS", + -13.514062881469728 + ], + [ + "119", + -13.51419448852539 + ], + [ + "pn", + -13.51428508758545 + ], + [ + "▁shortfall", + -13.514296531677246 + ], + [ + "yat", + -13.514385223388672 + ], + [ + "▁hearth", + -13.514391899108888 + ], + [ + "▁Operational", + -13.514450073242188 + ], + [ + "▁reunite", + -13.514498710632324 + ], + [ + "▁Orient", + -13.514522552490234 + ], + [ + "▁modernize", + -13.5145845413208 + ], + [ + "loe", + -13.514596939086914 + ], + [ + "▁farmed", + -13.514630317687988 + ], + [ + "▁feng", + -13.514753341674805 + ], + [ + "▁millionaire", + -13.514827728271484 + ], + [ + "▁instantaneous", + -13.514863014221191 + ], + [ + "▁2013).", + -13.514925956726074 + ], + [ + "▁Spi", + -13.514962196350098 + ], + [ + "▁entertainer", + -13.514962196350098 + ], + [ + "Document", + -13.514974594116213 + ], + [ + "▁PEOPLE", + -13.51499366760254 + ], + [ + "▁McCartney", + -13.515098571777344 + ], + [ + "▁Rutherford", + -13.515098571777344 + ], + [ + "▁fibromyalgia", + -13.515098571777344 + ], + [ + "▁inequalities", + -13.515098571777344 + ], + [ + "▁impetus", + -13.51509952545166 + ], + [ + "▁lovable", + -13.515101432800291 + ], + [ + "▁panelists", + -13.51511287689209 + ], + [ + "▁parody", + -13.515113830566406 + ], + [ + "▁unsustainable", + -13.515129089355469 + ], + [ + "▁Olsen", + -13.515151977539062 + ], + [ + "▁Bray", + -13.515182495117188 + ], + [ + "▁maximal", + -13.51519775390625 + ], + [ + "Option", + -13.51523208618164 + ], + [ + "▁unpublished", + -13.515239715576172 + ], + [ + "▁90’", + -13.515240669250488 + ], + [ + "▁embossing", + -13.515247344970703 + ], + [ + "▁reed", + -13.515253067016602 + ], + [ + "▁annotated", + -13.51529312133789 + ], + [ + "▁interacted", + -13.515308380126951 + ], + [ + "▁Pascal", + -13.51536464691162 + ], + [ + "▁purest", + -13.515374183654783 + ], + [ + "escu", + -13.515398979187012 + ], + [ + "▁$300,000", + -13.515419960021973 + ], + [ + "▁Mohan", + -13.515520095825195 + ], + [ + "Experts", + -13.515661239624023 + ], + [ + "Moon", + -13.515877723693848 + ], + [ + "▁Abi", + -13.515880584716797 + ], + [ + "▁funders", + -13.51595687866211 + ], + [ + "▁Xen", + -13.515985488891602 + ], + [ + "46.", + -13.516061782836914 + ], + [ + "Josh", + -13.516077995300291 + ], + [ + "cancer", + -13.516170501708984 + ], + [ + "▁GRA", + -13.516194343566896 + ], + [ + "yna", + -13.516201972961426 + ], + [ + "Jonathan", + -13.516206741333008 + ], + [ + "▁apricot", + -13.516213417053224 + ], + [ + "▁penguin", + -13.516234397888184 + ], + [ + "during", + -13.516249656677246 + ], + [ + "▁Goldberg", + -13.516304969787598 + ], + [ + "Tele", + -13.516311645507812 + ], + [ + "ept", + -13.516366004943848 + ], + [ + "▁wildfire", + -13.51644515991211 + ], + [ + "pb", + -13.51656436920166 + ], + [ + "KM", + -13.51663589477539 + ], + [ + "yahoo", + -13.516669273376465 + ], + [ + "▁Chand", + -13.516682624816896 + ], + [ + "▁Kum", + -13.516712188720703 + ], + [ + "iner", + -13.516731262207031 + ], + [ + "▁Drinks", + -13.516737937927246 + ], + [ + "HG", + -13.51675796508789 + ], + [ + "▁Liberals", + -13.51682472229004 + ], + [ + "Euro", + -13.516825675964355 + ], + [ + "mot", + -13.516926765441896 + ], + [ + "guan", + -13.51693344116211 + ], + [ + "▁Meek", + -13.516995429992676 + ], + [ + "ww", + -13.517019271850586 + ], + [ + "▁opal", + -13.5170316696167 + ], + [ + "▁patched", + -13.517038345336914 + ], + [ + "Bon", + -13.517075538635254 + ], + [ + "▁prog", + -13.517147064208984 + ], + [ + "▁CSC", + -13.517159461975098 + ], + [ + "▁Stakes", + -13.5172119140625 + ], + [ + "▁GED", + -13.517298698425291 + ], + [ + "Jam", + -13.517309188842772 + ], + [ + "cious", + -13.51750946044922 + ], + [ + "▁Flags", + -13.517603874206545 + ], + [ + "anni", + -13.517623901367188 + ], + [ + "▁remuneration", + -13.5176362991333 + ], + [ + "▁brimming", + -13.51765251159668 + ], + [ + "▁maturing", + -13.517663955688477 + ], + [ + "▁SSA", + -13.51766586303711 + ], + [ + "▁Naked", + -13.517681121826172 + ], + [ + "Maria", + -13.51773738861084 + ], + [ + "▁Oxide", + -13.517759323120115 + ], + [ + "▁Monet", + -13.517772674560549 + ], + [ + "dim", + -13.517776489257812 + ], + [ + "423", + -13.517895698547363 + ], + [ + "825", + -13.517908096313477 + ], + [ + "▁Zhou", + -13.517919540405272 + ], + [ + "uce", + -13.517964363098145 + ], + [ + "▁Kidney", + -13.517964363098145 + ], + [ + "bhp", + -13.517972946166992 + ], + [ + "▁launcher", + -13.518060684204102 + ], + [ + "▁Vegetarian", + -13.51809310913086 + ], + [ + "▁recourse", + -13.518156051635742 + ], + [ + "▁Packs", + -13.518196105957031 + ], + [ + "▁potter", + -13.51822566986084 + ], + [ + "▁seeker", + -13.518228530883787 + ], + [ + "zor", + -13.518259048461914 + ], + [ + "▁Hedge", + -13.51836395263672 + ], + [ + "▁Presents", + -13.518370628356934 + ], + [ + "▁Tec", + -13.518370628356934 + ], + [ + "JD", + -13.518402099609377 + ], + [ + "138", + -13.518495559692385 + ], + [ + "▁surety", + -13.518671989440918 + ], + [ + "▁whack", + -13.518733024597168 + ], + [ + "▁filename", + -13.518766403198242 + ], + [ + "▁Airline", + -13.518770217895508 + ], + [ + "Attempt", + -13.518790245056152 + ], + [ + "Average", + -13.518802642822266 + ], + [ + "memory", + -13.518810272216797 + ], + [ + "alli", + -13.51883316040039 + ], + [ + "745", + -13.518924713134766 + ], + [ + "alt", + -13.519060134887695 + ], + [ + "▁Toddler", + -13.51906394958496 + ], + [ + "loan", + -13.519076347351074 + ], + [ + "▁facilitators", + -13.51921558380127 + ], + [ + "Dragon", + -13.519222259521484 + ], + [ + "Shot", + -13.519342422485352 + ], + [ + "34.", + -13.519350051879885 + ], + [ + "580", + -13.519393920898438 + ], + [ + "▁Contracting", + -13.51944065093994 + ], + [ + "▁DRM", + -13.51953411102295 + ], + [ + "1998", + -13.51955223083496 + ], + [ + "5-6", + -13.519641876220703 + ], + [ + "▁musically", + -13.519721031188965 + ], + [ + "▁Camel", + -13.519775390625 + ], + [ + "▁clap", + -13.51979637145996 + ], + [ + "MAT", + -13.519854545593262 + ], + [ + "▁Loma", + -13.519952774047852 + ], + [ + "▁GLA", + -13.520011901855469 + ], + [ + "▁sprout", + -13.520018577575684 + ], + [ + "▁dub", + -13.52011013031006 + ], + [ + "▁Rockefeller", + -13.520180702209473 + ], + [ + "▁remedial", + -13.520180702209473 + ], + [ + "▁wondrous", + -13.520181655883787 + ], + [ + "▁propulsion", + -13.520194053649902 + ], + [ + "Karen", + -13.520203590393066 + ], + [ + "▁Ericsson", + -13.52023220062256 + ], + [ + "▁asserts", + -13.52028465270996 + ], + [ + "▁expelled", + -13.52028465270996 + ], + [ + "▁helpline", + -13.520309448242188 + ], + [ + "ORS", + -13.520328521728516 + ], + [ + "2.7", + -13.520356178283691 + ], + [ + "0.2", + -13.520365715026855 + ], + [ + "Assess", + -13.520365715026855 + ], + [ + "▁Wembley", + -13.520386695861816 + ], + [ + "▁Tarot", + -13.520435333251951 + ], + [ + "▁Romero", + -13.520442962646484 + ], + [ + "▁Paddle", + -13.520452499389648 + ], + [ + "elin", + -13.520492553710938 + ], + [ + "Alan", + -13.520624160766602 + ], + [ + "▁pampering", + -13.520635604858398 + ], + [ + "▁clipboard", + -13.520672798156738 + ], + [ + "▁Sticker", + -13.520751953125 + ], + [ + "audio", + -13.520752906799316 + ], + [ + "▁spores", + -13.520756721496582 + ], + [ + "▁sup", + -13.520811080932615 + ], + [ + "▁YEARS", + -13.520831108093262 + ], + [ + "/31", + -13.520838737487791 + ], + [ + "▁Boh", + -13.520848274230955 + ], + [ + "progress", + -13.520855903625488 + ], + [ + "▁minced", + -13.520899772644045 + ], + [ + "▁Kri", + -13.52106475830078 + ], + [ + "▁Dion", + -13.52106761932373 + ], + [ + "▁Engines", + -13.521154403686523 + ], + [ + "▁expat", + -13.521164894104004 + ], + [ + "▁cb", + -13.521198272705078 + ], + [ + "▁powerfully", + -13.521220207214355 + ], + [ + "send", + -13.521236419677734 + ], + [ + "▁liar", + -13.521286010742188 + ], + [ + "▁occupant", + -13.521371841430664 + ], + [ + "Celebrate", + -13.521410942077637 + ], + [ + "▁CFR", + -13.52141571044922 + ], + [ + "▁Cancellation", + -13.52141571044922 + ], + [ + "grandchildren", + -13.5214204788208 + ], + [ + "▁Gai", + -13.521421432495115 + ], + [ + "shield", + -13.52142333984375 + ], + [ + "▁Alec", + -13.521478652954102 + ], + [ + "▁Xin", + -13.521531105041504 + ], + [ + "Session", + -13.521653175354004 + ], + [ + "method", + -13.521713256835938 + ], + [ + "▁regained", + -13.521724700927734 + ], + [ + "liest", + -13.521803855895996 + ], + [ + "ERT", + -13.521822929382324 + ], + [ + "▁Radical", + -13.521944046020508 + ], + [ + "ross", + -13.52199363708496 + ], + [ + "▁Brownie", + -13.522064208984377 + ], + [ + "▁178", + -13.522256851196287 + ], + [ + "cutter", + -13.522278785705566 + ], + [ + "▁herald", + -13.522387504577637 + ], + [ + "aur", + -13.522418022155762 + ], + [ + "▁ré", + -13.522446632385254 + ], + [ + "▁1976,", + -13.522457122802734 + ], + [ + "▁deserts", + -13.52246379852295 + ], + [ + "▁attendants", + -13.522513389587402 + ], + [ + "▁wartime", + -13.522513389587402 + ], + [ + "▁154", + -13.522533416748049 + ], + [ + "▁Modified", + -13.522588729858398 + ], + [ + "▁PDFs", + -13.522607803344728 + ], + [ + "▁heterogeneous", + -13.52273178100586 + ], + [ + "▁Beirut", + -13.522732734680176 + ], + [ + "▁anabolic", + -13.522733688354492 + ], + [ + "▁Panorama", + -13.522745132446287 + ], + [ + "▁unresolved", + -13.522754669189451 + ], + [ + "▁palpable", + -13.522764205932615 + ], + [ + "▁(1999)", + -13.522768020629885 + ], + [ + "▁biochemical", + -13.522770881652832 + ], + [ + "ologic", + -13.522797584533691 + ], + [ + "▁inspecting", + -13.52286148071289 + ], + [ + "▁puppets", + -13.522863388061523 + ], + [ + "▁lauded", + -13.522886276245115 + ], + [ + "▁adaptability", + -13.522904396057127 + ], + [ + "488", + -13.522957801818848 + ], + [ + "▁$120", + -13.52300262451172 + ], + [ + "cote", + -13.523015022277832 + ], + [ + "lax", + -13.523032188415527 + ], + [ + "Surrounded", + -13.523046493530272 + ], + [ + "▁PRODUCTS", + -13.52305030822754 + ], + [ + "▁complicate", + -13.52333164215088 + ], + [ + "▁Paste", + -13.52341365814209 + ], + [ + "nne", + -13.523468017578123 + ], + [ + "▁ineligible", + -13.523598670959473 + ], + [ + "▁makeshift", + -13.523626327514648 + ], + [ + "▁insulting", + -13.52378749847412 + ], + [ + "asse", + -13.523831367492676 + ], + [ + "▁11\"", + -13.523983001708984 + ], + [ + "▁Countertop", + -13.524009704589844 + ], + [ + "▁Debate", + -13.5241117477417 + ], + [ + "456", + -13.524144172668455 + ], + [ + "styled", + -13.524231910705566 + ], + [ + "Charlie", + -13.524243354797363 + ], + [ + "delivery", + -13.524255752563477 + ], + [ + "▁dismal", + -13.524277687072754 + ], + [ + "IRE", + -13.52428913116455 + ], + [ + "tak", + -13.524306297302246 + ], + [ + "▁binoculars", + -13.524319648742676 + ], + [ + "kim", + -13.524433135986328 + ], + [ + "▁huts", + -13.524490356445312 + ], + [ + "telling", + -13.524571418762209 + ], + [ + "sold", + -13.524605751037598 + ], + [ + "▁Busy", + -13.524616241455078 + ], + [ + "mx", + -13.524652481079102 + ], + [ + "▁Himalayas", + -13.52480125427246 + ], + [ + "▁Spruce", + -13.524807929992676 + ], + [ + "▁Zac", + -13.524821281433104 + ], + [ + "typical", + -13.524827003479004 + ], + [ + "▁Licence", + -13.524867057800291 + ], + [ + "lad", + -13.524877548217772 + ], + [ + "▁bitcoins", + -13.524938583374023 + ], + [ + "▁(2005)", + -13.525089263916016 + ], + [ + "▁discriminatory", + -13.525238037109377 + ], + [ + "ARS", + -13.525273323059082 + ], + [ + "▁demeanor", + -13.52528953552246 + ], + [ + "▁divisive", + -13.52528953552246 + ], + [ + "▁persuasion", + -13.52528953552246 + ], + [ + "▁upheaval", + -13.52528953552246 + ], + [ + "▁Alameda", + -13.52529239654541 + ], + [ + "▁Velcro", + -13.52529525756836 + ], + [ + "▁Expedia", + -13.525300979614258 + ], + [ + "▁flask", + -13.525306701660156 + ], + [ + "▁ware", + -13.52536964416504 + ], + [ + "▁Betsy", + -13.525407791137695 + ], + [ + "▁onstage", + -13.525407791137695 + ], + [ + "aves", + -13.525425910949709 + ], + [ + "▁Delphi", + -13.525445938110352 + ], + [ + "165", + -13.525476455688477 + ], + [ + "▁Lankan", + -13.525480270385742 + ], + [ + "▁SMB", + -13.525480270385742 + ], + [ + "▁Ner", + -13.525484085083008 + ], + [ + "Storm", + -13.525500297546388 + ], + [ + "Area", + -13.525529861450195 + ], + [ + "Muslim", + -13.525586128234863 + ], + [ + "PRESS", + -13.52564525604248 + ], + [ + "▁protagonists", + -13.525650024414062 + ], + [ + "▁netted", + -13.525762557983398 + ], + [ + "▁parity", + -13.525776863098145 + ], + [ + "▁handlers", + -13.525873184204102 + ], + [ + "▁cookbooks", + -13.526083946228027 + ], + [ + "▁Bry", + -13.526237487792969 + ], + [ + "▁Pauline", + -13.526237487792969 + ], + [ + "▁Magical", + -13.526384353637695 + ], + [ + "▁canning", + -13.526391983032228 + ], + [ + "▁Iso", + -13.526400566101074 + ], + [ + "▁seo", + -13.526500701904297 + ], + [ + "▁Transparency", + -13.526514053344728 + ], + [ + "▁866", + -13.526524543762209 + ], + [ + "amer", + -13.526529312133787 + ], + [ + "▁Roku", + -13.526537895202637 + ], + [ + "▁yolk", + -13.526551246643066 + ], + [ + "hom", + -13.526564598083496 + ], + [ + "▁BAC", + -13.526670455932615 + ], + [ + "▁MSI", + -13.526703834533691 + ], + [ + "▁inmate", + -13.52671718597412 + ], + [ + "▁snare", + -13.526751518249512 + ], + [ + "▁negatives", + -13.526825904846191 + ], + [ + "kamp", + -13.526826858520508 + ], + [ + "▁Founding", + -13.52683448791504 + ], + [ + "▁Shadows", + -13.526863098144531 + ], + [ + "command", + -13.526911735534668 + ], + [ + "ATING", + -13.5269136428833 + ], + [ + "▁Cult", + -13.526933670043944 + ], + [ + "▁blueprints", + -13.527018547058104 + ], + [ + "pio", + -13.527213096618652 + ], + [ + "▁railings", + -13.527267456054688 + ], + [ + "Eastern", + -13.527297973632812 + ], + [ + "▁chute", + -13.527427673339844 + ], + [ + "▁LPG", + -13.527524948120115 + ], + [ + "146", + -13.527565956115724 + ], + [ + "▁Sacrament", + -13.527572631835938 + ], + [ + "▁ruining", + -13.527587890625 + ], + [ + "▁amused", + -13.52761173248291 + ], + [ + "Include", + -13.5276460647583 + ], + [ + "▁Principle", + -13.5276460647583 + ], + [ + "▁Vid", + -13.527649879455566 + ], + [ + "▁Scoop", + -13.527718544006348 + ], + [ + "jes", + -13.527751922607422 + ], + [ + "▁Hallmark", + -13.527758598327637 + ], + [ + "6.6", + -13.527765274047852 + ], + [ + "eek", + -13.527782440185549 + ], + [ + "▁Painter", + -13.527804374694824 + ], + [ + "▁Sasha", + -13.527804374694824 + ], + [ + "▁Byzantine", + -13.527853965759276 + ], + [ + "▁innovating", + -13.527853965759276 + ], + [ + "▁mitigating", + -13.527853965759276 + ], + [ + "▁reverence", + -13.52785587310791 + ], + [ + "classic", + -13.52786922454834 + ], + [ + "▁Derrick", + -13.527871131896973 + ], + [ + "▁Fundraising", + -13.527888298034668 + ], + [ + "▁Granada", + -13.527891159057615 + ], + [ + "▁contra", + -13.52790355682373 + ], + [ + "▁hoax", + -13.527908325195312 + ], + [ + "007", + -13.527920722961426 + ], + [ + "▁ea", + -13.527945518493652 + ], + [ + "▁damper", + -13.527948379516602 + ], + [ + "subscribe", + -13.5279541015625 + ], + [ + "▁Neville", + -13.528036117553713 + ], + [ + "enham", + -13.52807903289795 + ], + [ + "▁epub", + -13.528084754943848 + ], + [ + "▁Worse", + -13.528111457824709 + ], + [ + "▁regionally", + -13.52824592590332 + ], + [ + "▁crayons", + -13.52835464477539 + ], + [ + "▁LF", + -13.52859115600586 + ], + [ + "Ty", + -13.528730392456056 + ], + [ + "▁profiled", + -13.52875804901123 + ], + [ + "understand", + -13.52882480621338 + ], + [ + "▁screamed", + -13.528852462768556 + ], + [ + "aise", + -13.528882026672363 + ], + [ + "abe", + -13.528943061828612 + ], + [ + "abba", + -13.528966903686523 + ], + [ + "▁freshen", + -13.528971672058104 + ], + [ + "TEM", + -13.529006958007812 + ], + [ + "760", + -13.52903175354004 + ], + [ + "▁feud", + -13.529129028320312 + ], + [ + "▁omission", + -13.529175758361816 + ], + [ + "▁expats", + -13.529226303100586 + ], + [ + "authentic", + -13.529287338256836 + ], + [ + "emergency", + -13.52929401397705 + ], + [ + "▁campsites", + -13.529353141784668 + ], + [ + "▁Spear", + -13.529362678527832 + ], + [ + "Putting", + -13.529398918151855 + ], + [ + "▁purposely", + -13.529459953308104 + ], + [ + "▁tru", + -13.529531478881836 + ], + [ + "9%)", + -13.52958869934082 + ], + [ + "▁bookstores", + -13.529655456542969 + ], + [ + "▁glimpses", + -13.529871940612791 + ], + [ + "▁235", + -13.52989673614502 + ], + [ + "▁bl", + -13.529905319213867 + ], + [ + "▁Grounds", + -13.52993869781494 + ], + [ + "▁orbital", + -13.53005027770996 + ], + [ + "KT", + -13.530118942260742 + ], + [ + "wax", + -13.530132293701172 + ], + [ + "▁sulphur", + -13.530134201049805 + ], + [ + "names", + -13.530144691467283 + ], + [ + "▁intelligently", + -13.530174255371094 + ], + [ + "AY", + -13.530303955078123 + ], + [ + "▁Demi", + -13.53030490875244 + ], + [ + "draw", + -13.530324935913086 + ], + [ + "2020", + -13.53033447265625 + ], + [ + "▁Johann", + -13.530353546142578 + ], + [ + "chemistry", + -13.530359268188477 + ], + [ + "ANE", + -13.530400276184082 + ], + [ + "▁1980,", + -13.530424118041992 + ], + [ + "▁ancillary", + -13.530424118041992 + ], + [ + "▁bragging", + -13.530447959899902 + ], + [ + "▁naughty", + -13.530516624450684 + ], + [ + "▁subtract", + -13.530526161193848 + ], + [ + "▁Vocational", + -13.530534744262695 + ], + [ + "▁sailboat", + -13.530574798583984 + ], + [ + "▁Passover", + -13.530654907226562 + ], + [ + "▁Separation", + -13.530702590942385 + ], + [ + "▁Bod", + -13.53073024749756 + ], + [ + "▁1,800", + -13.530755043029783 + ], + [ + "▁Xmas", + -13.530762672424316 + ], + [ + "rade", + -13.530885696411133 + ], + [ + "▁abbreviation", + -13.530895233154297 + ], + [ + "▁thinker", + -13.530999183654783 + ], + [ + "▁Outback", + -13.531109809875488 + ], + [ + "▁Locate", + -13.53111743927002 + ], + [ + "▁cliché", + -13.531123161315918 + ], + [ + "▁Couples", + -13.531137466430664 + ], + [ + "▁assigns", + -13.531169891357422 + ], + [ + "-41", + -13.531342506408691 + ], + [ + "Monitor", + -13.53134822845459 + ], + [ + "▁Clement", + -13.531363487243652 + ], + [ + "▁Punta", + -13.53139305114746 + ], + [ + "▁worsening", + -13.53139591217041 + ], + [ + "▁(2016).", + -13.531429290771484 + ], + [ + "▁throwback", + -13.531482696533203 + ], + [ + "▁straws", + -13.53149700164795 + ], + [ + "olu", + -13.531713485717772 + ], + [ + "foil", + -13.531728744506836 + ], + [ + "▁hamburger", + -13.531744956970217 + ], + [ + "▁Balm", + -13.531745910644531 + ], + [ + "▁Yak", + -13.531867980957031 + ], + [ + "Res", + -13.531879425048828 + ], + [ + "▁condominiums", + -13.531886100769045 + ], + [ + "rgen", + -13.53190803527832 + ], + [ + "?),", + -13.531977653503418 + ], + [ + "engineering", + -13.53215217590332 + ], + [ + "john", + -13.532174110412598 + ], + [ + "▁Dispatch", + -13.532177925109863 + ], + [ + "▁7-10", + -13.53222942352295 + ], + [ + "▁Novo", + -13.532313346862791 + ], + [ + "ndez", + -13.532373428344728 + ], + [ + "▁prosecute", + -13.532403945922852 + ], + [ + "wale", + -13.532410621643066 + ], + [ + "▁Refuge", + -13.532501220703123 + ], + [ + "▁Donations", + -13.53250503540039 + ], + [ + "▁bottoms", + -13.532575607299805 + ], + [ + "▁Kota", + -13.532794952392578 + ], + [ + "▁predominant", + -13.53279972076416 + ], + [ + "▁Somewhere", + -13.532831192016602 + ], + [ + "▁brethren", + -13.533001899719238 + ], + [ + "▁collapsing", + -13.533001899719238 + ], + [ + "▁melancholy", + -13.533001899719238 + ], + [ + "▁Semiconductor", + -13.533003807067873 + ], + [ + "▁Mavericks", + -13.533008575439451 + ], + [ + "▁inclement", + -13.533008575439451 + ], + [ + "▁relegation", + -13.533026695251465 + ], + [ + "▁jockey", + -13.533087730407717 + ], + [ + "▁ponytail", + -13.533101081848145 + ], + [ + "▁Vapor", + -13.53311252593994 + ], + [ + "▁Marshal", + -13.53311824798584 + ], + [ + "▁rebirth", + -13.53313159942627 + ], + [ + "▁Twitch", + -13.533143997192385 + ], + [ + "wit", + -13.533163070678713 + ], + [ + "▁absentee", + -13.533205032348633 + ], + [ + "▁Craftsman", + -13.533243179321287 + ], + [ + "▁impulses", + -13.53327178955078 + ], + [ + "▁narcotics", + -13.533352851867676 + ], + [ + "▁mole", + -13.533366203308104 + ], + [ + "▁heartache", + -13.53338623046875 + ], + [ + "▁behaves", + -13.53343105316162 + ], + [ + "▁superheroes", + -13.53345775604248 + ], + [ + "▁Shelton", + -13.533514022827148 + ], + [ + "stru", + -13.533547401428224 + ], + [ + "▁Owned", + -13.533554077148438 + ], + [ + "Parent", + -13.53355598449707 + ], + [ + "▁CRE", + -13.533576011657717 + ], + [ + "▁Odds", + -13.533608436584473 + ], + [ + "▁parades", + -13.533625602722168 + ], + [ + "▁THESE", + -13.533638954162598 + ], + [ + "▁batters", + -13.533735275268556 + ], + [ + "amento", + -13.53373908996582 + ], + [ + "▁Ramos", + -13.533818244934082 + ], + [ + "137", + -13.533820152282717 + ], + [ + "Items", + -13.533838272094728 + ], + [ + "▁Waves", + -13.533905029296877 + ], + [ + "augh", + -13.53391456604004 + ], + [ + "▁inventors", + -13.53395175933838 + ], + [ + "▁vista", + -13.533957481384276 + ], + [ + "▁Proud", + -13.533960342407228 + ], + [ + "▁Maxim", + -13.533975601196287 + ], + [ + "▁$36", + -13.534008979797363 + ], + [ + "▁NOR", + -13.534059524536133 + ], + [ + "▁Apostle", + -13.534117698669434 + ], + [ + "▁waivers", + -13.53414821624756 + ], + [ + "american", + -13.534154891967772 + ], + [ + "▁clump", + -13.534189224243164 + ], + [ + "inho", + -13.53433322906494 + ], + [ + "erry", + -13.534368515014648 + ], + [ + "▁xp", + -13.534406661987305 + ], + [ + "▁MUSIC", + -13.534417152404783 + ], + [ + "RAIN", + -13.534478187561035 + ], + [ + "▁renewables", + -13.53451442718506 + ], + [ + "kl", + -13.534549713134766 + ], + [ + "▁Bei", + -13.53455638885498 + ], + [ + "Ref", + -13.534564971923828 + ], + [ + "Anthony", + -13.53457736968994 + ], + [ + "▁Rossi", + -13.534629821777344 + ], + [ + "Plastic", + -13.534669876098633 + ], + [ + "Fab", + -13.534676551818848 + ], + [ + "▁Lyons", + -13.53468132019043 + ], + [ + "177", + -13.53470230102539 + ], + [ + "Sept", + -13.5347261428833 + ], + [ + "▁Turin", + -13.534729957580566 + ], + [ + "▁logistic", + -13.534738540649414 + ], + [ + "bha", + -13.534746170043944 + ], + [ + "-84", + -13.53480625152588 + ], + [ + "▁Charges", + -13.534836769104004 + ], + [ + "▁disagreements", + -13.534862518310549 + ], + [ + "▁EVO", + -13.535030364990234 + ], + [ + "620", + -13.535038948059082 + ], + [ + "▁malaysia", + -13.535093307495115 + ], + [ + "bour", + -13.535101890563965 + ], + [ + "▁Beats", + -13.535126686096191 + ], + [ + "▁forecasted", + -13.535154342651367 + ], + [ + "adjusted", + -13.535235404968262 + ], + [ + "NOW", + -13.53525161743164 + ], + [ + "447", + -13.535270690917969 + ], + [ + "▁clam", + -13.535391807556152 + ], + [ + "▁denominations", + -13.53541088104248 + ], + [ + "▁Grandpa", + -13.53544807434082 + ], + [ + "▁Richie", + -13.53551959991455 + ], + [ + "▁Chandigarh", + -13.535585403442385 + ], + [ + "▁giraffe", + -13.535585403442385 + ], + [ + "▁meringue", + -13.5355863571167 + ], + [ + "▁nike", + -13.535589218139648 + ], + [ + "▁corpus", + -13.535592079162598 + ], + [ + "▁Lamborghini", + -13.535597801208496 + ], + [ + "▁gooey", + -13.535605430603027 + ], + [ + "▁applique", + -13.53566837310791 + ], + [ + "▁Eleven", + -13.535701751708984 + ], + [ + "▁Shaft", + -13.535703659057615 + ], + [ + "▁Tyne", + -13.535778999328612 + ], + [ + "▁Minnie", + -13.53578758239746 + ], + [ + "nox", + -13.535799026489258 + ], + [ + "▁SCI", + -13.535812377929688 + ], + [ + "Upload", + -13.535922050476074 + ], + [ + "▁Fathers", + -13.535935401916504 + ], + [ + "▁Spe", + -13.535968780517578 + ], + [ + "▁reconstructed", + -13.536049842834473 + ], + [ + "tac", + -13.536149978637695 + ], + [ + "▁Blow", + -13.536199569702148 + ], + [ + "▁Cider", + -13.536209106445312 + ], + [ + "04.", + -13.536312103271484 + ], + [ + "oxin", + -13.53632640838623 + ], + [ + "▁opioids", + -13.536332130432127 + ], + [ + "hydro", + -13.536357879638672 + ], + [ + "omb", + -13.536365509033203 + ], + [ + "▁lotus", + -13.536367416381836 + ], + [ + "▁sacked", + -13.536370277404783 + ], + [ + "▁Sup", + -13.53639793395996 + ], + [ + "REN", + -13.536450386047363 + ], + [ + "▁seasonings", + -13.536633491516112 + ], + [ + "▁Angola", + -13.536718368530272 + ], + [ + "▁preschoolers", + -13.536772727966309 + ], + [ + "MAL", + -13.536890029907228 + ], + [ + "▁prof", + -13.53691291809082 + ], + [ + "Collect", + -13.536925315856934 + ], + [ + "cular", + -13.536925315856934 + ], + [ + "▁Levin", + -13.53693389892578 + ], + [ + "Speak", + -13.536949157714844 + ], + [ + "▁HSBC", + -13.536964416503906 + ], + [ + "▁shingle", + -13.53703784942627 + ], + [ + "▁swam", + -13.5370512008667 + ], + [ + "EUR", + -13.537100791931152 + ], + [ + "VEL", + -13.537114143371582 + ], + [ + "2+", + -13.537137985229492 + ], + [ + "educated", + -13.53717041015625 + ], + [ + "▁Francesco", + -13.537178993225098 + ], + [ + "Junior", + -13.537216186523438 + ], + [ + "ENE", + -13.5372314453125 + ], + [ + "jah", + -13.53725242614746 + ], + [ + "▁adrenal", + -13.537254333496094 + ], + [ + "▁Espresso", + -13.537359237670898 + ], + [ + "▁Jackets", + -13.537419319152832 + ], + [ + "LIGHT", + -13.537435531616213 + ], + [ + "Wiki", + -13.537453651428224 + ], + [ + "768", + -13.537464141845703 + ], + [ + "▁noir", + -13.537468910217283 + ], + [ + "▁Volleyball", + -13.537487983703612 + ], + [ + "▁boardroom", + -13.537564277648926 + ], + [ + "124", + -13.537592887878418 + ], + [ + "natal", + -13.537598609924316 + ], + [ + "▁ergonomics", + -13.537755012512209 + ], + [ + "ejo", + -13.537786483764648 + ], + [ + "hna", + -13.537851333618164 + ], + [ + "▁effected", + -13.537952423095703 + ], + [ + "730", + -13.537972450256348 + ], + [ + "Foot", + -13.53798484802246 + ], + [ + "xel", + -13.538061141967772 + ], + [ + "▁Volcano", + -13.538065910339355 + ], + [ + "Rail", + -13.53807544708252 + ], + [ + "▁Ferdinand", + -13.53817653656006 + ], + [ + "▁horribly", + -13.53817653656006 + ], + [ + "▁motivator", + -13.53817653656006 + ], + [ + "▁movable", + -13.538178443908691 + ], + [ + "▁complied", + -13.538195610046388 + ], + [ + "129", + -13.538208961486816 + ], + [ + "▁Regency", + -13.538209915161133 + ], + [ + "▁Centro", + -13.538249015808104 + ], + [ + "▁unprepared", + -13.538251876831056 + ], + [ + "BW", + -13.538272857666016 + ], + [ + "▁Serum", + -13.538311004638672 + ], + [ + "▁yearbook", + -13.53831672668457 + ], + [ + "▁Atkinson", + -13.538336753845217 + ], + [ + "6.0", + -13.53842544555664 + ], + [ + "seeded", + -13.53844165802002 + ], + [ + "▁Irma", + -13.5385103225708 + ], + [ + "Flo", + -13.538515090942385 + ], + [ + "▁Texture", + -13.538572311401367 + ], + [ + "dip", + -13.538596153259276 + ], + [ + "Measure", + -13.538619995117188 + ], + [ + "▁stairway", + -13.5386381149292 + ], + [ + "▁Rab", + -13.538678169250488 + ], + [ + "▁firmer", + -13.538816452026367 + ], + [ + "▁mater", + -13.538931846618652 + ], + [ + "Wise", + -13.538941383361816 + ], + [ + "▁Loading", + -13.53899097442627 + ], + [ + "asp", + -13.539010047912598 + ], + [ + "raf", + -13.53903865814209 + ], + [ + "▁2018!", + -13.539073944091797 + ], + [ + "NIA", + -13.539130210876465 + ], + [ + "▁acumen", + -13.539199829101562 + ], + [ + "▁Singing", + -13.539206504821776 + ], + [ + "mean", + -13.539224624633787 + ], + [ + "▁Locke", + -13.53939151763916 + ], + [ + "▁dividers", + -13.539410591125488 + ], + [ + "opathy", + -13.539414405822754 + ], + [ + "▁Selecting", + -13.53945255279541 + ], + [ + "▁fuses", + -13.539484977722168 + ], + [ + "!!!!!!", + -13.539557456970217 + ], + [ + "▁lubricant", + -13.539595603942873 + ], + [ + "588", + -13.539627075195312 + ], + [ + "▁UF", + -13.539688110351562 + ], + [ + "185", + -13.53969669342041 + ], + [ + "▁agony", + -13.53975772857666 + ], + [ + "LET", + -13.539796829223633 + ], + [ + "Madison", + -13.53989601135254 + ], + [ + "Dental", + -13.539897918701172 + ], + [ + "213", + -13.539932250976562 + ], + [ + "▁IRC", + -13.539941787719728 + ], + [ + "Treatment", + -13.53998851776123 + ], + [ + "population", + -13.540035247802734 + ], + [ + "▁conformity", + -13.540048599243164 + ], + [ + "▁$34", + -13.540082931518556 + ], + [ + "38.", + -13.54017734527588 + ], + [ + "▁Avid", + -13.540191650390623 + ], + [ + "429", + -13.540196418762209 + ], + [ + "▁Brace", + -13.540263175964355 + ], + [ + "▁EO", + -13.540288925170898 + ], + [ + "▁Pfizer", + -13.5403470993042 + ], + [ + "▁Sesame", + -13.540348052978516 + ], + [ + "▁Caterpillar", + -13.54040813446045 + ], + [ + "ych", + -13.54052734375 + ], + [ + "yla", + -13.54054832458496 + ], + [ + "igno", + -13.540550231933594 + ], + [ + "▁2014)", + -13.540629386901855 + ], + [ + "▁Trooper", + -13.540644645690918 + ], + [ + "▁Separator", + -13.540773391723633 + ], + [ + "▁Smoky", + -13.540773391723633 + ], + [ + "▁ointment", + -13.540773391723633 + ], + [ + "▁spectroscopy", + -13.540773391723633 + ], + [ + "▁storied", + -13.54077434539795 + ], + [ + "▁Latitude", + -13.540775299072266 + ], + [ + "▁Backsplash", + -13.54078769683838 + ], + [ + "▁Collision", + -13.540788650512695 + ], + [ + "▁unsuitable", + -13.54079532623291 + ], + [ + "▁conquest", + -13.540803909301758 + ], + [ + "▁Phillies", + -13.540812492370604 + ], + [ + "▁trellis", + -13.54084300994873 + ], + [ + "▁520", + -13.540861129760742 + ], + [ + "-46", + -13.540863990783691 + ], + [ + "▁postpartum", + -13.540863990783691 + ], + [ + "▁Opti", + -13.540898323059082 + ], + [ + "▁substrates", + -13.540907859802246 + ], + [ + "▁teapot", + -13.540990829467772 + ], + [ + "▁Intelli", + -13.540997505187988 + ], + [ + "▁Stalin", + -13.54099941253662 + ], + [ + "▁yoke", + -13.541017532348633 + ], + [ + "▁stadiums", + -13.541150093078612 + ], + [ + "▁bordered", + -13.541172981262209 + ], + [ + "▁Comb", + -13.541258811950684 + ], + [ + "728", + -13.541306495666504 + ], + [ + "▁Hahn", + -13.54142951965332 + ], + [ + "Near", + -13.541516304016112 + ], + [ + "gloss", + -13.541523933410645 + ], + [ + "▁crystalline", + -13.541534423828123 + ], + [ + "quite", + -13.541596412658691 + ], + [ + "▁btw", + -13.541722297668455 + ], + [ + "▁Postcard", + -13.541828155517578 + ], + [ + "XF", + -13.54184913635254 + ], + [ + "▁VGA", + -13.541869163513184 + ], + [ + "▁cutoff", + -13.541977882385254 + ], + [ + "VB", + -13.54201602935791 + ], + [ + "▁AUTO", + -13.54201602935791 + ], + [ + "▁Carb", + -13.542064666748049 + ], + [ + "-54", + -13.542070388793944 + ], + [ + "19)", + -13.542244911193848 + ], + [ + "▁Gris", + -13.5422945022583 + ], + [ + "NING", + -13.542401313781738 + ], + [ + "Optional", + -13.54250717163086 + ], + [ + "▁underrated", + -13.542557716369627 + ], + [ + "marked", + -13.542580604553224 + ], + [ + "tted", + -13.542610168457031 + ], + [ + "cles", + -13.542645454406738 + ], + [ + "reel", + -13.54279899597168 + ], + [ + "▁Charcoal", + -13.54287338256836 + ], + [ + "stamp", + -13.542990684509276 + ], + [ + "▁camped", + -13.542990684509276 + ], + [ + "725", + -13.543002128601074 + ], + [ + "asha", + -13.543002128601074 + ], + [ + "▁Plateau", + -13.543006896972656 + ], + [ + "▁Sonny", + -13.543013572692873 + ], + [ + "▁Hats", + -13.543052673339844 + ], + [ + "▁Councillor", + -13.5430908203125 + ], + [ + "dum", + -13.543091773986816 + ], + [ + "lep", + -13.543094635009766 + ], + [ + "rhome", + -13.54312515258789 + ], + [ + "▁dialect", + -13.543220520019531 + ], + [ + "▁Nik", + -13.543269157409668 + ], + [ + "OPS", + -13.543307304382324 + ], + [ + "▁eyelash", + -13.543359756469728 + ], + [ + "▁Audubon", + -13.543377876281738 + ], + [ + "▁reciprocal", + -13.543377876281738 + ], + [ + "▁Assault", + -13.543381690979004 + ], + [ + "▁Galleries", + -13.543387413024902 + ], + [ + "▁Societies", + -13.54338836669922 + ], + [ + "▁Liberation", + -13.543405532836914 + ], + [ + "▁lilies", + -13.54343605041504 + ], + [ + "▁mitochondrial", + -13.543437004089355 + ], + [ + "▁Lat", + -13.543441772460938 + ], + [ + "▁sag", + -13.543471336364746 + ], + [ + "▁blitz", + -13.543492317199709 + ], + [ + "▁Rodeo", + -13.543527603149414 + ], + [ + "SAR", + -13.543620109558104 + ], + [ + "▁multitasking", + -13.543646812438965 + ], + [ + "▁fri", + -13.543648719787598 + ], + [ + "eric", + -13.543673515319824 + ], + [ + "▁socializing", + -13.543713569641112 + ], + [ + "▁Hawai", + -13.543737411499023 + ], + [ + "Palm", + -13.543824195861816 + ], + [ + "▁Able", + -13.543904304504396 + ], + [ + "▁Wilde", + -13.543936729431152 + ], + [ + "cop", + -13.54394817352295 + ], + [ + "express", + -13.543990135192873 + ], + [ + "▁Sling", + -13.543994903564451 + ], + [ + "▁Ska", + -13.544105529785156 + ], + [ + "woo", + -13.544109344482422 + ], + [ + "▁GoPro", + -13.544135093688965 + ], + [ + "▁Basque", + -13.544181823730469 + ], + [ + "loader", + -13.54419994354248 + ], + [ + "icky", + -13.544233322143556 + ], + [ + "▁eighteenth", + -13.544267654418944 + ], + [ + "rino", + -13.5443115234375 + ], + [ + "▁birding", + -13.544320106506348 + ], + [ + "▁Violin", + -13.54434299468994 + ], + [ + "▁vow", + -13.544384002685549 + ], + [ + "Window", + -13.544397354125977 + ], + [ + "▁Checklist", + -13.544414520263672 + ], + [ + "tracking", + -13.54449462890625 + ], + [ + "dden", + -13.544501304626465 + ], + [ + "▁Credits", + -13.544513702392578 + ], + [ + "rew", + -13.54458713531494 + ], + [ + "Roman", + -13.544615745544434 + ], + [ + "533", + -13.544722557067873 + ], + [ + "▁VIDEO", + -13.544733047485352 + ], + [ + "▁neutralize", + -13.544794082641602 + ], + [ + "▁corrosive", + -13.544819831848145 + ], + [ + "▁referees", + -13.544857025146484 + ], + [ + "901", + -13.544893264770508 + ], + [ + "▁Diver", + -13.544977188110352 + ], + [ + "▁softening", + -13.54503059387207 + ], + [ + "curricular", + -13.54506492614746 + ], + [ + "tract", + -13.545110702514648 + ], + [ + "▁Checking", + -13.545110702514648 + ], + [ + "▁FORM", + -13.545133590698242 + ], + [ + "▁erupt", + -13.54517936706543 + ], + [ + "▁headsets", + -13.545279502868652 + ], + [ + "376", + -13.54534149169922 + ], + [ + "AIR", + -13.545355796813965 + ], + [ + "enna", + -13.54535675048828 + ], + [ + "▁stretchy", + -13.545395851135254 + ], + [ + "▁dilute", + -13.54539966583252 + ], + [ + "dropping", + -13.545418739318848 + ], + [ + "lynn", + -13.545419692993164 + ], + [ + "▁Specification", + -13.545432090759276 + ], + [ + "8000", + -13.54545783996582 + ], + [ + "dB", + -13.545486450195312 + ], + [ + "▁Hundred", + -13.545488357543944 + ], + [ + "▁Saying", + -13.54549789428711 + ], + [ + "WO", + -13.545512199401855 + ], + [ + "▁Discounts", + -13.5455322265625 + ], + [ + "guru", + -13.545584678649902 + ], + [ + "▁2012)", + -13.54558563232422 + ], + [ + "▁Electrician", + -13.545682907104492 + ], + [ + "▁inscribed", + -13.545778274536133 + ], + [ + "▁Graz", + -13.545802116394045 + ], + [ + "▁Soc", + -13.54580307006836 + ], + [ + "▁hangout", + -13.54580783843994 + ], + [ + "▁froth", + -13.545918464660645 + ], + [ + "lek", + -13.545920372009276 + ], + [ + "▁Vibe", + -13.545940399169922 + ], + [ + "▁Citrix", + -13.54599380493164 + ], + [ + "▁swung", + -13.546000480651855 + ], + [ + "▁THREE", + -13.546001434326172 + ], + [ + "▁Dolby", + -13.546037673950195 + ], + [ + "▁brewers", + -13.54605197906494 + ], + [ + "▁satire", + -13.546055793762209 + ], + [ + "▁Chorus", + -13.546103477478027 + ], + [ + "▁Plumber", + -13.546133041381836 + ], + [ + "▁bluegrass", + -13.546168327331545 + ], + [ + "▁waxed", + -13.546182632446287 + ], + [ + "▁Oils", + -13.546221733093262 + ], + [ + "5.7", + -13.546222686767578 + ], + [ + "▁avert", + -13.546282768249512 + ], + [ + "Career", + -13.54628562927246 + ], + [ + "▁metering", + -13.546309471130373 + ], + [ + "rique", + -13.546337127685549 + ], + [ + "twin", + -13.546381950378418 + ], + [ + "▁Gwen", + -13.546513557434082 + ], + [ + "rub", + -13.546587944030762 + ], + [ + "eld", + -13.546643257141112 + ], + [ + "▁Huron", + -13.54665756225586 + ], + [ + "10%", + -13.54685115814209 + ], + [ + "▁Tuition", + -13.546853065490724 + ], + [ + "▁Magnificent", + -13.547053337097168 + ], + [ + "dus", + -13.547124862670898 + ], + [ + "lude", + -13.547249794006348 + ], + [ + "815", + -13.547264099121094 + ], + [ + "evi", + -13.547327041625977 + ], + [ + "▁Passage", + -13.547372817993164 + ], + [ + "▁1974.", + -13.547391891479492 + ], + [ + "▁6.4", + -13.547433853149414 + ], + [ + "▁curators", + -13.547554969787598 + ], + [ + "arina", + -13.54755687713623 + ], + [ + "▁blanks", + -13.547581672668455 + ], + [ + "▁fostered", + -13.547609329223633 + ], + [ + "▁mAh", + -13.547738075256348 + ], + [ + "Beach", + -13.54777717590332 + ], + [ + "▁partying", + -13.54781723022461 + ], + [ + "▁Bloody", + -13.547871589660645 + ], + [ + "bench", + -13.547937393188477 + ], + [ + "▁notifying", + -13.54794216156006 + ], + [ + "▁Lakeland", + -13.547974586486816 + ], + [ + "▁furnaces", + -13.547977447509766 + ], + [ + "ebo", + -13.54804229736328 + ], + [ + "▁artisanal", + -13.548052787780762 + ], + [ + "▁Iphone", + -13.54818344116211 + ], + [ + "Alt", + -13.54822063446045 + ], + [ + "▁strut", + -13.548235893249512 + ], + [ + "monthly", + -13.548307418823242 + ], + [ + "lisa", + -13.548312187194824 + ], + [ + "▁ANSI", + -13.54834270477295 + ], + [ + "▁adheres", + -13.548383712768556 + ], + [ + "▁Assoc", + -13.548474311828612 + ], + [ + "abilities", + -13.54850959777832 + ], + [ + "▁poo", + -13.548524856567385 + ], + [ + "▁dieting", + -13.54860496520996 + ], + [ + "▁Potomac", + -13.548606872558594 + ], + [ + "▁archipelago", + -13.548606872558594 + ], + [ + "▁auspicious", + -13.548606872558594 + ], + [ + "▁innumerable", + -13.548606872558594 + ], + [ + "▁Dresden", + -13.54860782623291 + ], + [ + "▁Orioles", + -13.54860782623291 + ], + [ + "▁Sinatra", + -13.548609733581545 + ], + [ + "▁tortoise", + -13.548609733581545 + ], + [ + "▁uns", + -13.548626899719238 + ], + [ + "▁Biotechnology", + -13.548629760742188 + ], + [ + "GAN", + -13.54863166809082 + ], + [ + "▁softener", + -13.548704147338867 + ], + [ + "▁Provo", + -13.548714637756348 + ], + [ + "▁whey", + -13.548725128173828 + ], + [ + "▁Ally", + -13.548768043518066 + ], + [ + "▁Epub", + -13.548871994018556 + ], + [ + "▁Completed", + -13.548891067504885 + ], + [ + "▁sermons", + -13.548892974853516 + ], + [ + "▁Clarkson", + -13.54895305633545 + ], + [ + "().", + -13.548990249633787 + ], + [ + "595", + -13.548990249633787 + ], + [ + "9.9", + -13.548991203308104 + ], + [ + "▁Nook", + -13.548998832702637 + ], + [ + "▁stalks", + -13.54902458190918 + ], + [ + "▁INS", + -13.549047470092772 + ], + [ + "▁$1.1", + -13.549081802368164 + ], + [ + "▁staffers", + -13.549190521240234 + ], + [ + "425", + -13.54921817779541 + ], + [ + "▁VG", + -13.549299240112305 + ], + [ + "▁Advances", + -13.54935359954834 + ], + [ + "▁Poems", + -13.549368858337402 + ], + [ + "▁183", + -13.549449920654297 + ], + [ + "▁Lai", + -13.549460411071776 + ], + [ + "▁tripping", + -13.549540519714355 + ], + [ + "▁Stro", + -13.549606323242188 + ], + [ + "SRC", + -13.549700736999512 + ], + [ + "▁FIRE", + -13.549747467041016 + ], + [ + "0-2", + -13.549867630004885 + ], + [ + "mud", + -13.549943923950195 + ], + [ + "ibly", + -13.549972534179688 + ], + [ + "▁Losing", + -13.550016403198242 + ], + [ + "package", + -13.550074577331545 + ], + [ + "▁administrations", + -13.550143241882324 + ], + [ + "▁syringe", + -13.55015468597412 + ], + [ + "▁Isla", + -13.55015754699707 + ], + [ + "-1000", + -13.55016803741455 + ], + [ + "Expect", + -13.55017375946045 + ], + [ + "▁strapped", + -13.550214767456056 + ], + [ + "▁councillor", + -13.550235748291016 + ], + [ + "▁stockings", + -13.55027675628662 + ], + [ + "UTE", + -13.550312042236328 + ], + [ + "▁sul", + -13.550393104553224 + ], + [ + "minus", + -13.550422668457031 + ], + [ + "▁kilo", + -13.550430297851562 + ], + [ + "▁fender", + -13.550456047058104 + ], + [ + "uddin", + -13.550460815429688 + ], + [ + "▁Mediation", + -13.550493240356444 + ], + [ + "▁ORDER", + -13.55051612854004 + ], + [ + "▁contestant", + -13.550529479980469 + ], + [ + "▁Carrot", + -13.550543785095217 + ], + [ + "▁cheered", + -13.550543785095217 + ], + [ + "Singapore", + -13.5506010055542 + ], + [ + "museum", + -13.550626754760742 + ], + [ + "KK", + -13.550644874572754 + ], + [ + "mint", + -13.550697326660156 + ], + [ + "Gb", + -13.550703048706056 + ], + [ + "▁1969,", + -13.550707817077637 + ], + [ + "▁Lois", + -13.550774574279783 + ], + [ + "▁blatant", + -13.55099105834961 + ], + [ + "▁mating", + -13.550992012023926 + ], + [ + "County", + -13.551025390625 + ], + [ + "▁DEF", + -13.551043510437012 + ], + [ + "▁erratic", + -13.551207542419434 + ], + [ + "▁anniversaries", + -13.551231384277344 + ], + [ + "▁incandescent", + -13.551231384277344 + ], + [ + "▁ricotta", + -13.551231384277344 + ], + [ + "▁lieutenant", + -13.551234245300291 + ], + [ + "▁immature", + -13.551239013671877 + ], + [ + "▁convene", + -13.551241874694824 + ], + [ + "▁rambling", + -13.551247596740724 + ], + [ + "▁gulf", + -13.551264762878418 + ], + [ + "▁dictatorship", + -13.55129623413086 + ], + [ + "▁paralyzed", + -13.551353454589844 + ], + [ + "▁Honeywell", + -13.551361083984377 + ], + [ + "▁townhomes", + -13.551368713378906 + ], + [ + "Wolf", + -13.551383018493652 + ], + [ + "▁Sherwood", + -13.551397323608398 + ], + [ + "▁Israelites", + -13.551399230957031 + ], + [ + "▁Sven", + -13.551410675048828 + ], + [ + "▁orphans", + -13.551431655883787 + ], + [ + "12,", + -13.55147647857666 + ], + [ + "▁Tinder", + -13.55151653289795 + ], + [ + "▁Rays", + -13.55152702331543 + ], + [ + "▁culminated", + -13.551560401916504 + ], + [ + "beer", + -13.551650047302246 + ], + [ + "▁Grind", + -13.551717758178713 + ], + [ + "▁Tracks", + -13.551735877990724 + ], + [ + "▁withheld", + -13.55177402496338 + ], + [ + "clair", + -13.551838874816896 + ], + [ + "▁longitude", + -13.551857948303224 + ], + [ + "▁whine", + -13.551864624023438 + ], + [ + "910", + -13.551963806152344 + ], + [ + "theme", + -13.551968574523926 + ], + [ + "▁powerless", + -13.55197048187256 + ], + [ + "▁CAL", + -13.552040100097656 + ], + [ + "BX", + -13.552074432373049 + ], + [ + "▁Slo", + -13.552082061767578 + ], + [ + "▁Founders", + -13.55221939086914 + ], + [ + "tire", + -13.552278518676758 + ], + [ + "SEA", + -13.552335739135742 + ], + [ + "▁midi", + -13.552364349365234 + ], + [ + "sik", + -13.552380561828612 + ], + [ + "teacher", + -13.552411079406738 + ], + [ + "▁Bonn", + -13.552412986755373 + ], + [ + "▁NLP", + -13.55252170562744 + ], + [ + "▁burners", + -13.552539825439451 + ], + [ + "▁waits", + -13.552685737609863 + ], + [ + "▁Tuck", + -13.552733421325684 + ], + [ + "▁sect", + -13.552775382995604 + ], + [ + "377", + -13.552786827087402 + ], + [ + "▁hauled", + -13.552865028381348 + ], + [ + "▁clo", + -13.552899360656738 + ], + [ + "▁scandals", + -13.552906036376951 + ], + [ + "▁tights", + -13.553032875061035 + ], + [ + "3\"", + -13.55311393737793 + ], + [ + "SHA", + -13.55316925048828 + ], + [ + "▁miso", + -13.553200721740724 + ], + [ + "tf", + -13.553213119506836 + ], + [ + "PIC", + -13.55322265625 + ], + [ + "Diamond", + -13.553269386291504 + ], + [ + "Kill", + -13.5532865524292 + ], + [ + "Premium", + -13.553298950195312 + ], + [ + "▁Ket", + -13.55348300933838 + ], + [ + "238", + -13.553521156311035 + ], + [ + "▁simmering", + -13.553604125976562 + ], + [ + "▁figurative", + -13.553622245788574 + ], + [ + "Fuel", + -13.553680419921877 + ], + [ + "vox", + -13.553854942321776 + ], + [ + "▁escorted", + -13.553862571716309 + ], + [ + "▁pregnancies", + -13.553863525390623 + ], + [ + "▁vinaigrette", + -13.553863525390623 + ], + [ + "▁atlanta", + -13.553866386413574 + ], + [ + "▁Wolfgang", + -13.553875923156738 + ], + [ + "drink", + -13.553878784179688 + ], + [ + "▁Batteries", + -13.553881645202637 + ], + [ + "▁mediate", + -13.553888320922852 + ], + [ + "▁FPGA", + -13.55390739440918 + ], + [ + "▁poking", + -13.55390739440918 + ], + [ + "▁Oxfordshire", + -13.553908348083496 + ], + [ + "▁tensile", + -13.553912162780762 + ], + [ + "▁20-25", + -13.553915977478027 + ], + [ + "▁OTC", + -13.553935050964355 + ], + [ + "▁Allows", + -13.553936004638672 + ], + [ + "▁surged", + -13.553946495056152 + ], + [ + "▁choirs", + -13.553948402404783 + ], + [ + "▁teasing", + -13.553959846496582 + ], + [ + "421", + -13.554003715515137 + ], + [ + "akan", + -13.554047584533691 + ], + [ + "▁VIC", + -13.554059982299805 + ], + [ + "▁glitches", + -13.554099082946776 + ], + [ + "▁fiat", + -13.554141998291016 + ], + [ + "▁Attend", + -13.554144859313965 + ], + [ + "▁Mummy", + -13.554159164428713 + ], + [ + "▁deliverables", + -13.554200172424316 + ], + [ + "▁Regions", + -13.55426788330078 + ], + [ + "▁barns", + -13.554280281066896 + ], + [ + "ivi", + -13.554414749145508 + ], + [ + "▁Donovan", + -13.554442405700684 + ], + [ + "Hmm", + -13.554537773132324 + ], + [ + "▁JK", + -13.554543495178224 + ], + [ + "▁Geoffrey", + -13.554550170898438 + ], + [ + "▁purses", + -13.554683685302734 + ], + [ + "▁Blitz", + -13.554686546325684 + ], + [ + "▁speculated", + -13.554776191711426 + ], + [ + "estate", + -13.554792404174805 + ], + [ + "▁Sip", + -13.554815292358398 + ], + [ + "▁alcoholism", + -13.554819107055664 + ], + [ + "▁stout", + -13.55492877960205 + ], + [ + "▁Burnett", + -13.554940223693848 + ], + [ + "2.50", + -13.554994583129885 + ], + [ + "▁Eileen", + -13.55506992340088 + ], + [ + "▁rigorously", + -13.555242538452148 + ], + [ + "▁selector", + -13.55524444580078 + ], + [ + "▁Documentary", + -13.555262565612791 + ], + [ + "CRM", + -13.555310249328612 + ], + [ + "Cart", + -13.55532455444336 + ], + [ + "▁Celebrating", + -13.555432319641112 + ], + [ + "wrap", + -13.55545711517334 + ], + [ + "▁Moral", + -13.555500030517578 + ], + [ + "▁Swami", + -13.55551052093506 + ], + [ + "Ghost", + -13.55556297302246 + ], + [ + "▁Kite", + -13.555574417114258 + ], + [ + "▁Laden", + -13.555583953857422 + ], + [ + "HK", + -13.555591583251951 + ], + [ + "noise", + -13.555630683898926 + ], + [ + "▁Chia", + -13.555710792541504 + ], + [ + "141", + -13.55585765838623 + ], + [ + "▁apologized", + -13.555868148803713 + ], + [ + "▁Fol", + -13.55587387084961 + ], + [ + "vehicle", + -13.555984497070312 + ], + [ + "332", + -13.555992126464844 + ], + [ + "341", + -13.556002616882324 + ], + [ + "▁auditions", + -13.556013107299805 + ], + [ + "367", + -13.55605411529541 + ], + [ + "▁196", + -13.55606746673584 + ], + [ + "Attorney", + -13.556167602539062 + ], + [ + "▁Wir", + -13.556217193603516 + ], + [ + "cler", + -13.556218147277832 + ], + [ + "HTML", + -13.556282997131348 + ], + [ + "▁24%", + -13.556286811828612 + ], + [ + "mf", + -13.556333541870115 + ], + [ + "▁Towel", + -13.55638313293457 + ], + [ + "▁cascading", + -13.556501388549805 + ], + [ + "▁sensibility", + -13.556501388549805 + ], + [ + "▁unmistakable", + -13.556501388549805 + ], + [ + "▁unquestionably", + -13.556501388549805 + ], + [ + "▁Shampoo", + -13.556503295898438 + ], + [ + "▁obscene", + -13.556504249572754 + ], + [ + "▁empathetic", + -13.556507110595703 + ], + [ + "▁crescent", + -13.556509017944336 + ], + [ + "▁esophagus", + -13.556509971618652 + ], + [ + "▁peacock", + -13.556535720825195 + ], + [ + "▁exhale", + -13.556543350219728 + ], + [ + "▁spanned", + -13.556551933288574 + ], + [ + "▁shredder", + -13.556594848632812 + ], + [ + "88.", + -13.556642532348633 + ], + [ + "▁Mater", + -13.556666374206545 + ], + [ + "Profile", + -13.55674934387207 + ], + [ + "▁deported", + -13.556763648986816 + ], + [ + "▁Pana", + -13.556772232055664 + ], + [ + "▁Supermarket", + -13.556798934936523 + ], + [ + "▁Criteria", + -13.556845664978027 + ], + [ + "Brush", + -13.556855201721191 + ], + [ + "▁basins", + -13.55690097808838 + ], + [ + "iak", + -13.556918144226074 + ], + [ + "Pic", + -13.556929588317873 + ], + [ + "ehr", + -13.557072639465332 + ], + [ + "▁от", + -13.557076454162598 + ], + [ + "hhh", + -13.557114601135254 + ], + [ + "Clo", + -13.557148933410645 + ], + [ + "▁criticisms", + -13.557167053222656 + ], + [ + "ECE", + -13.557260513305664 + ], + [ + "IMS", + -13.557293891906738 + ], + [ + "▁riverside", + -13.557303428649902 + ], + [ + "▁LOVED", + -13.557363510131836 + ], + [ + "▁Jules", + -13.557476997375488 + ], + [ + "▁Kant", + -13.557476997375488 + ], + [ + "▁jpeg", + -13.557573318481444 + ], + [ + "5-7", + -13.557578086853027 + ], + [ + "peri", + -13.557680130004885 + ], + [ + "▁Ingram", + -13.557717323303224 + ], + [ + "tsu", + -13.55772590637207 + ], + [ + "▁bakers", + -13.557765007019045 + ], + [ + "▁Miriam", + -13.557886123657228 + ], + [ + "▁yahoo", + -13.558043479919434 + ], + [ + "▁scammers", + -13.55809211730957 + ], + [ + "▁chalet", + -13.55819034576416 + ], + [ + "▁afar", + -13.558198928833008 + ], + [ + "Discuss", + -13.558308601379396 + ], + [ + "bling", + -13.558337211608888 + ], + [ + "247", + -13.55841827392578 + ], + [ + "customer", + -13.558521270751951 + ], + [ + "4-7", + -13.558571815490724 + ], + [ + "▁picnics", + -13.55859375 + ], + [ + "deductible", + -13.558688163757324 + ], + [ + "▁157", + -13.55868911743164 + ], + [ + "Behind", + -13.558708190917969 + ], + [ + "▁Gamma", + -13.558723449707031 + ], + [ + "▁1895", + -13.558801651000977 + ], + [ + "itus", + -13.558804512023926 + ], + [ + "Georgia", + -13.558805465698242 + ], + [ + "▁CHI", + -13.55898094177246 + ], + [ + "corner", + -13.558999061584473 + ], + [ + "▁backcountry", + -13.559005737304688 + ], + [ + "▁Corr", + -13.559115409851074 + ], + [ + "▁jasmine", + -13.559146881103516 + ], + [ + "▁unsuspecting", + -13.559146881103516 + ], + [ + "▁Astronomy", + -13.559147834777832 + ], + [ + "▁breezy", + -13.559148788452148 + ], + [ + "▁uterus", + -13.559149742126465 + ], + [ + "▁Consultancy", + -13.559151649475098 + ], + [ + "▁Hershey", + -13.55915641784668 + ], + [ + "▁Gainesville", + -13.559162139892578 + ], + [ + "Roger", + -13.559165954589844 + ], + [ + "▁blocker", + -13.559168815612791 + ], + [ + "▁paddock", + -13.559168815612791 + ], + [ + "▁ribbed", + -13.559185981750488 + ], + [ + "▁tumbling", + -13.559186935424805 + ], + [ + "▁finesse", + -13.559192657470703 + ], + [ + "▁Ports", + -13.55921459197998 + ], + [ + "▁affliction", + -13.559215545654297 + ], + [ + "=2", + -13.55950927734375 + ], + [ + "▁orchestrated", + -13.559535026550291 + ], + [ + "▁refinishing", + -13.559552192687988 + ], + [ + "▁powertrain", + -13.559669494628906 + ], + [ + "▁vor", + -13.559700965881348 + ], + [ + "▁Gol", + -13.559706687927246 + ], + [ + "▁Wikimedia", + -13.559717178344728 + ], + [ + "=0", + -13.559721946716309 + ], + [ + "eros", + -13.559786796569824 + ], + [ + "▁Spartans", + -13.559814453125 + ], + [ + "▁rhymes", + -13.559886932373049 + ], + [ + "▁uh", + -13.559954643249512 + ], + [ + "▁14,000", + -13.560036659240724 + ], + [ + "bun", + -13.56004524230957 + ], + [ + "▁Bikes", + -13.560067176818848 + ], + [ + "▁proponents", + -13.560113906860352 + ], + [ + "▁adage", + -13.56016445159912 + ], + [ + "▁MCA", + -13.56018352508545 + ], + [ + "▁7-8", + -13.560214042663574 + ], + [ + "▁1974,", + -13.56035041809082 + ], + [ + "00)", + -13.560379028320312 + ], + [ + "890", + -13.560422897338867 + ], + [ + "cafe", + -13.56042766571045 + ], + [ + "▁assaults", + -13.560474395751951 + ], + [ + "▁symbolize", + -13.56047534942627 + ], + [ + "▁trump", + -13.560502052307127 + ], + [ + "▁chakra", + -13.560507774353027 + ], + [ + "▁Brewers", + -13.560511589050291 + ], + [ + "▁Mé", + -13.56061840057373 + ], + [ + "▁Infrared", + -13.560630798339844 + ], + [ + "▁Faster", + -13.560683250427246 + ], + [ + "▁calculators", + -13.56069564819336 + ], + [ + "▁knocks", + -13.560714721679688 + ], + [ + "aldo", + -13.560730934143066 + ], + [ + "▁Xu", + -13.560758590698242 + ], + [ + "▁lightening", + -13.560812950134276 + ], + [ + "YM", + -13.560837745666504 + ], + [ + "▁reconstruct", + -13.560840606689451 + ], + [ + "630", + -13.5608549118042 + ], + [ + "▁Accelerator", + -13.56096363067627 + ], + [ + "▁spilling", + -13.56096649169922 + ], + [ + "▁Technological", + -13.56108856201172 + ], + [ + "tiny", + -13.561092376708984 + ], + [ + "thorpe", + -13.561177253723145 + ], + [ + "▁secondly", + -13.561186790466309 + ], + [ + "▁extremists", + -13.561219215393066 + ], + [ + "▁coasters", + -13.561226844787598 + ], + [ + "▁fashions", + -13.561272621154783 + ], + [ + "▁plantings", + -13.56131649017334 + ], + [ + "387", + -13.561317443847656 + ], + [ + "finish", + -13.561352729797363 + ], + [ + "drum", + -13.561371803283691 + ], + [ + "6′′", + -13.561400413513184 + ], + [ + "charging", + -13.561410903930664 + ], + [ + "escence", + -13.561420440673828 + ], + [ + "Popular", + -13.56143569946289 + ], + [ + "64.", + -13.561473846435549 + ], + [ + "gne", + -13.56149959564209 + ], + [ + "226", + -13.561514854431152 + ], + [ + "▁Saver", + -13.561524391174316 + ], + [ + "hoc", + -13.561535835266112 + ], + [ + "engineered", + -13.561628341674805 + ], + [ + "▁Maz", + -13.5617094039917 + ], + [ + "▁Curse", + -13.56177043914795 + ], + [ + "▁Confederation", + -13.56179904937744 + ], + [ + "▁alternator", + -13.56179904937744 + ], + [ + "▁intimidation", + -13.56179904937744 + ], + [ + "▁osteoarthritis", + -13.56179904937744 + ], + [ + "Motion", + -13.561800003051758 + ], + [ + "▁monetize", + -13.561800956726074 + ], + [ + "▁Psychic", + -13.561814308166504 + ], + [ + "▁Brunei", + -13.56181812286377 + ], + [ + "▁repentance", + -13.56181812286377 + ], + [ + "▁Moderate", + -13.561827659606934 + ], + [ + "Kay", + -13.561829566955566 + ], + [ + "rolling", + -13.561866760253906 + ], + [ + "Italy", + -13.561884880065918 + ], + [ + "▁unplanned", + -13.561887741088867 + ], + [ + "stable", + -13.561894416809082 + ], + [ + "1900", + -13.561992645263672 + ], + [ + "▁Eddy", + -13.561994552612305 + ], + [ + "lope", + -13.562074661254885 + ], + [ + "▁35,000", + -13.562088966369627 + ], + [ + "▁lockout", + -13.562115669250488 + ], + [ + "▁Guan", + -13.56215000152588 + ], + [ + "▁windscreen", + -13.562215805053713 + ], + [ + "▁unites", + -13.56232738494873 + ], + [ + "▁fishery", + -13.56244659423828 + ], + [ + "reduction", + -13.562479972839355 + ], + [ + "▁chests", + -13.56252098083496 + ], + [ + "▁Rihanna", + -13.562618255615234 + ], + [ + "▁Brunch", + -13.562634468078612 + ], + [ + "▁skaters", + -13.562772750854492 + ], + [ + "▁PW", + -13.562870025634766 + ], + [ + "▁coldest", + -13.562894821166992 + ], + [ + "▁Partly", + -13.562939643859863 + ], + [ + "▁dei", + -13.562970161437988 + ], + [ + "(5):", + -13.562992095947266 + ], + [ + "▁Haas", + -13.563032150268556 + ], + [ + "kur", + -13.563065528869627 + ], + [ + "▁horrors", + -13.563117027282717 + ], + [ + "522", + -13.56312656402588 + ], + [ + "▁recoup", + -13.563155174255373 + ], + [ + "▁Vineyards", + -13.56317138671875 + ], + [ + "4-6", + -13.563361167907717 + ], + [ + "raja", + -13.563376426696776 + ], + [ + "▁roommates", + -13.563467979431152 + ], + [ + "▁Sharif", + -13.563482284545898 + ], + [ + "▁girlfriends", + -13.563490867614746 + ], + [ + "▁Tasting", + -13.563573837280272 + ], + [ + "Tra", + -13.56361484527588 + ], + [ + "Events", + -13.563691139221191 + ], + [ + "▁CCA", + -13.563724517822266 + ], + [ + "▁211", + -13.563735008239746 + ], + [ + "JU", + -13.56374740600586 + ], + [ + "▁antigen", + -13.5637845993042 + ], + [ + "EDA", + -13.563848495483398 + ], + [ + "▁Nasdaq", + -13.563965797424316 + ], + [ + "▁REC", + -13.564029693603516 + ], + [ + "▁SCR", + -13.564071655273438 + ], + [ + "Categories", + -13.56414031982422 + ], + [ + "Jordan", + -13.564149856567385 + ], + [ + "riding", + -13.564160346984863 + ], + [ + "▁Sti", + -13.56419849395752 + ], + [ + "▁Yen", + -13.564223289489746 + ], + [ + "OUS", + -13.564263343811035 + ], + [ + "▁530", + -13.564269065856934 + ], + [ + "▁Orthopaedic", + -13.564458847045898 + ], + [ + "▁atrocities", + -13.564458847045898 + ], + [ + "▁baguette", + -13.564458847045898 + ], + [ + "▁blizzard", + -13.564458847045898 + ], + [ + "▁chutney", + -13.564458847045898 + ], + [ + "▁intricacies", + -13.564458847045898 + ], + [ + "▁unstoppable", + -13.564458847045898 + ], + [ + "▁scrubbing", + -13.564472198486328 + ], + [ + "▁bandwagon", + -13.564485549926758 + ], + [ + "▁wavy", + -13.56448745727539 + ], + [ + "ALI", + -13.564515113830566 + ], + [ + "Gro", + -13.56459140777588 + ], + [ + "▁Redmond", + -13.564630508422852 + ], + [ + "▁vac", + -13.564669609069824 + ], + [ + "▁Beetle", + -13.564702033996582 + ], + [ + "▁handpicked", + -13.564736366271973 + ], + [ + "136", + -13.564764022827148 + ], + [ + "eater", + -13.564801216125488 + ], + [ + "CHAR", + -13.564851760864258 + ], + [ + "▁banter", + -13.56485366821289 + ], + [ + "▁14\"", + -13.564871788024902 + ], + [ + "TOWN", + -13.56489372253418 + ], + [ + "▁breaths", + -13.564895629882812 + ], + [ + "▁HOPE", + -13.56501579284668 + ], + [ + "▁Underwood", + -13.565069198608398 + ], + [ + "▁mouthpiece", + -13.565104484558104 + ], + [ + "eja", + -13.56513500213623 + ], + [ + "▁Timeline", + -13.565136909484863 + ], + [ + "▁Mariners", + -13.565147399902344 + ], + [ + "▁Gems", + -13.565231323242188 + ], + [ + "▁marginally", + -13.565235137939451 + ], + [ + "▁Luton", + -13.565274238586426 + ], + [ + "▁Guards", + -13.565309524536133 + ], + [ + "tuned", + -13.56533432006836 + ], + [ + "▁Cells", + -13.56533432006836 + ], + [ + "▁diva", + -13.565464973449709 + ], + [ + "▁karate", + -13.565486907958984 + ], + [ + "▁absorbent", + -13.565526962280272 + ], + [ + "006", + -13.565564155578612 + ], + [ + "▁equate", + -13.565621376037598 + ], + [ + "▁declarations", + -13.56562614440918 + ], + [ + "▁resellers", + -13.565634727478027 + ], + [ + "215", + -13.565637588500977 + ], + [ + "▁OTA", + -13.565720558166504 + ], + [ + "▁LAST", + -13.565759658813477 + ], + [ + "gaming", + -13.565826416015623 + ], + [ + "▁23%", + -13.565876960754396 + ], + [ + "Gui", + -13.566010475158691 + ], + [ + "▁Auxiliary", + -13.56601905822754 + ], + [ + "476", + -13.566088676452637 + ], + [ + "▁Permission", + -13.566173553466797 + ], + [ + "▁Kodi", + -13.566182136535645 + ], + [ + "▁Malay", + -13.566214561462402 + ], + [ + "▁swimsuit", + -13.566245079040527 + ], + [ + "Acts", + -13.5662841796875 + ], + [ + "▁Skate", + -13.566291809082031 + ], + [ + "▁abandoning", + -13.56630039215088 + ], + [ + "▁$150,000", + -13.566347122192385 + ], + [ + "JR", + -13.566360473632812 + ], + [ + "▁80'", + -13.56637954711914 + ], + [ + "▁basal", + -13.566450119018556 + ], + [ + "agging", + -13.566502571105955 + ], + [ + "▁Hike", + -13.566540718078612 + ], + [ + "▁methodological", + -13.566600799560549 + ], + [ + "terrorism", + -13.566603660583496 + ], + [ + "trol", + -13.566664695739746 + ], + [ + "▁zo", + -13.566716194152832 + ], + [ + "699", + -13.566741943359377 + ], + [ + "▁Bib", + -13.566798210144045 + ], + [ + "▁tunic", + -13.566849708557127 + ], + [ + "▁owls", + -13.566885948181152 + ], + [ + "▁Teens", + -13.566943168640137 + ], + [ + "▁PN", + -13.567047119140623 + ], + [ + "▁Greenhouse", + -13.567119598388672 + ], + [ + "▁Humboldt", + -13.56712532043457 + ], + [ + "▁mesothelioma", + -13.56712532043457 + ], + [ + "fps", + -13.567131996154783 + ], + [ + "▁Arbitration", + -13.567132949829102 + ], + [ + "▁Diwali", + -13.567132949829102 + ], + [ + "▁jQuery", + -13.567137718200684 + ], + [ + "▁tarot", + -13.5671968460083 + ], + [ + "▁disqualified", + -13.567197799682615 + ], + [ + "▁Administrators", + -13.567200660705566 + ], + [ + "▁Scher", + -13.567200660705566 + ], + [ + "▁enlargement", + -13.567312240600586 + ], + [ + "▁50/50", + -13.5673246383667 + ], + [ + "▁bullion", + -13.567331314086914 + ], + [ + "tow", + -13.567352294921877 + ], + [ + "▁Retain", + -13.56736660003662 + ], + [ + "▁Nadal", + -13.567399978637695 + ], + [ + "▁radon", + -13.567404747009276 + ], + [ + "▁sprinkles", + -13.567482948303224 + ], + [ + "▁mentored", + -13.567523956298828 + ], + [ + "▁chronicles", + -13.567560195922852 + ], + [ + "▁banjo", + -13.567593574523926 + ], + [ + "yx", + -13.56760311126709 + ], + [ + "-82", + -13.567645072937012 + ], + [ + "fic", + -13.567669868469238 + ], + [ + "CCA", + -13.567693710327148 + ], + [ + "▁IB", + -13.567702293395996 + ], + [ + "Lib", + -13.56778621673584 + ], + [ + "nav", + -13.567789077758787 + ], + [ + "▁loo", + -13.56792449951172 + ], + [ + "▁Div", + -13.567936897277832 + ], + [ + "seater", + -13.567940711975098 + ], + [ + "▁Steph", + -13.56797218322754 + ], + [ + "▁Maureen", + -13.568015098571776 + ], + [ + "▁SX", + -13.568069458007812 + ], + [ + "▁Improving", + -13.56809902191162 + ], + [ + "khan", + -13.568192481994627 + ], + [ + "internet", + -13.568214416503906 + ], + [ + "▁leaderboard", + -13.568233489990234 + ], + [ + "▁Icons", + -13.568256378173828 + ], + [ + "▁Quantity", + -13.568314552307127 + ], + [ + "LIS", + -13.568365097045898 + ], + [ + "▁scripted", + -13.568373680114746 + ], + [ + "hle", + -13.568382263183594 + ], + [ + "▁TLS", + -13.568499565124512 + ], + [ + "▁lg", + -13.568524360656738 + ], + [ + "▁riffs", + -13.568563461303713 + ], + [ + "▁CPD", + -13.568599700927734 + ], + [ + "CAL", + -13.568679809570312 + ], + [ + "▁BM", + -13.568760871887209 + ], + [ + "▁Phu", + -13.568872451782228 + ], + [ + "bark", + -13.568880081176758 + ], + [ + "▁summarizes", + -13.568921089172363 + ], + [ + "599", + -13.56894874572754 + ], + [ + "BAR", + -13.568977355957031 + ], + [ + "▁APS", + -13.569086074829102 + ], + [ + "▁FAR", + -13.569086074829102 + ], + [ + "fro", + -13.569087982177734 + ], + [ + "▁intensify", + -13.569291114807127 + ], + [ + "Metro", + -13.569330215454102 + ], + [ + "Forward", + -13.569341659545898 + ], + [ + "▁cowl", + -13.56937313079834 + ], + [ + "▁checker", + -13.569392204284668 + ], + [ + "▁1).", + -13.569401741027832 + ], + [ + "▁Wilkinson", + -13.569414138793944 + ], + [ + "▁Hara", + -13.569458961486816 + ], + [ + "▁Nicholson", + -13.569485664367676 + ], + [ + "▁aisles", + -13.5695219039917 + ], + [ + "Deck", + -13.569568634033203 + ], + [ + "▁uncovering", + -13.569602966308594 + ], + [ + "▁Rahman", + -13.569615364074709 + ], + [ + "burger", + -13.569624900817873 + ], + [ + "▁Llan", + -13.569626808166504 + ], + [ + "▁Anand", + -13.569663047790527 + ], + [ + "gallon", + -13.56966495513916 + ], + [ + "▁Observation", + -13.569750785827637 + ], + [ + "Seems", + -13.56978702545166 + ], + [ + "▁chaplain", + -13.569798469543455 + ], + [ + "▁cardamom", + -13.569799423217772 + ], + [ + "▁preparatory", + -13.569799423217772 + ], + [ + "▁Cassidy", + -13.569802284240724 + ], + [ + "▁crammed", + -13.569805145263672 + ], + [ + "▁interrogation", + -13.569805145263672 + ], + [ + "▁Radiation", + -13.569814682006836 + ], + [ + "▁Bromley", + -13.56983470916748 + ], + [ + "▁Kremlin", + -13.569843292236328 + ], + [ + "hk", + -13.569857597351074 + ], + [ + "▁moose", + -13.569865226745604 + ], + [ + "expected", + -13.569886207580566 + ], + [ + "▁Enhancement", + -13.56989288330078 + ], + [ + "tick", + -13.56991481781006 + ], + [ + "Changing", + -13.569916725158691 + ], + [ + "▁butternut", + -13.569923400878906 + ], + [ + "unique", + -13.569938659667969 + ], + [ + "Wrap", + -13.56994342803955 + ], + [ + "Grey", + -13.569960594177246 + ], + [ + "▁500-", + -13.569981575012209 + ], + [ + "▁Alvarez", + -13.56999683380127 + ], + [ + "Intel", + -13.570025444030762 + ], + [ + "▁centerpieces", + -13.570032119750977 + ], + [ + "Discount", + -13.57007122039795 + ], + [ + "▁620", + -13.57012939453125 + ], + [ + "▁repaid", + -13.57026481628418 + ], + [ + "▁Prius", + -13.570266723632812 + ], + [ + "▁Marvelous", + -13.570286750793455 + ], + [ + "▁annotations", + -13.57029151916504 + ], + [ + "▁oat", + -13.57031536102295 + ], + [ + "▁hilly", + -13.570343017578123 + ], + [ + "tly", + -13.570348739624023 + ], + [ + "▁Ambassadors", + -13.57037353515625 + ], + [ + "DIS", + -13.570399284362791 + ], + [ + "▁Ind", + -13.5704984664917 + ], + [ + "TAR", + -13.570528984069824 + ], + [ + "▁IO", + -13.57070541381836 + ], + [ + "▁Haunted", + -13.570743560791016 + ], + [ + "▁Umbrella", + -13.570755958557127 + ], + [ + "respect", + -13.57076644897461 + ], + [ + "▁Libyan", + -13.570789337158203 + ], + [ + "▁6.3", + -13.57081699371338 + ], + [ + "▁PLAN", + -13.570823669433594 + ], + [ + "▁hoist", + -13.570825576782228 + ], + [ + "▁vampires", + -13.570931434631348 + ], + [ + "oir", + -13.570947647094728 + ], + [ + "▁Kip", + -13.570952415466309 + ], + [ + "▁Spectacular", + -13.57095718383789 + ], + [ + "▁resonated", + -13.571001052856444 + ], + [ + "▁fabricate", + -13.571050643920898 + ], + [ + "▁chock", + -13.571130752563477 + ], + [ + "▁Sewer", + -13.571187019348145 + ], + [ + "alle", + -13.571197509765623 + ], + [ + "▁licensors", + -13.571258544921877 + ], + [ + "rita", + -13.571281433105469 + ], + [ + "▁Coo", + -13.571281433105469 + ], + [ + "▁Tala", + -13.5712890625 + ], + [ + "▁CONTROL", + -13.571290016174316 + ], + [ + "▁Hertz", + -13.571338653564451 + ], + [ + "▁1980.", + -13.57136058807373 + ], + [ + "Crew", + -13.571413040161133 + ], + [ + "▁Lon", + -13.571491241455078 + ], + [ + "▁Gloves", + -13.571574211120604 + ], + [ + "7.1", + -13.571577072143556 + ], + [ + "▁kinder", + -13.571710586547852 + ], + [ + "▁Therapeutic", + -13.57172966003418 + ], + [ + "▁Kelsey", + -13.571776390075684 + ], + [ + "▁bolted", + -13.571874618530272 + ], + [ + "▁prioritise", + -13.571879386901855 + ], + [ + "▁reiterate", + -13.571880340576172 + ], + [ + "▁McGregor", + -13.571914672851562 + ], + [ + "▁ingest", + -13.572014808654783 + ], + [ + "▁silos", + -13.572073936462402 + ], + [ + "90.", + -13.572089195251465 + ], + [ + "▁WAR", + -13.572120666503906 + ], + [ + "▁Isa", + -13.572125434875488 + ], + [ + "▁halogen", + -13.572271347045898 + ], + [ + "▁OE", + -13.572285652160645 + ], + [ + "nity", + -13.572340965270996 + ], + [ + "Suitable", + -13.572386741638184 + ], + [ + "▁Manu", + -13.572400093078612 + ], + [ + "ORA", + -13.572466850280762 + ], + [ + "▁Debra", + -13.572480201721191 + ], + [ + "▁conceivable", + -13.572480201721191 + ], + [ + "▁penchant", + -13.572480201721191 + ], + [ + "▁saddened", + -13.572480201721191 + ], + [ + "▁Attitude", + -13.572482109069824 + ], + [ + "▁precedence", + -13.57248306274414 + ], + [ + "▁lymphatic", + -13.57248592376709 + ], + [ + "▁Seneca", + -13.57249927520752 + ], + [ + "▁Telescope", + -13.572500228881836 + ], + [ + "▁(1998)", + -13.572511672973633 + ], + [ + "▁Silverado", + -13.572650909423828 + ], + [ + "▁Jonah", + -13.572660446166992 + ], + [ + "▁spearheaded", + -13.57269287109375 + ], + [ + "▁sloping", + -13.57277774810791 + ], + [ + "gge", + -13.572877883911133 + ], + [ + "▁Mans", + -13.572885513305664 + ], + [ + "▁Goodwill", + -13.572898864746094 + ], + [ + "▁flattened", + -13.572940826416016 + ], + [ + "▁Tracey", + -13.573013305664062 + ], + [ + "▁sweeteners", + -13.573062896728516 + ], + [ + "▁darts", + -13.573116302490234 + ], + [ + "▁BIO", + -13.573128700256348 + ], + [ + "▁mor", + -13.573158264160156 + ], + [ + "▁Meri", + -13.573204040527344 + ], + [ + "▁formulating", + -13.573264122009276 + ], + [ + "▁1975,", + -13.57327365875244 + ], + [ + "salt", + -13.573274612426758 + ], + [ + "▁rue", + -13.57329559326172 + ], + [ + "▁wheelchairs", + -13.573321342468262 + ], + [ + "MAS", + -13.573376655578612 + ], + [ + "▁fulfills", + -13.57339096069336 + ], + [ + "▁UVA", + -13.57351016998291 + ], + [ + "▁Cav", + -13.573556900024414 + ], + [ + "▁ACCESS", + -13.573570251464844 + ], + [ + "342", + -13.573644638061523 + ], + [ + "▁Qualification", + -13.57373046875 + ], + [ + "▁gels", + -13.573786735534668 + ], + [ + "▁Manny", + -13.573790550231934 + ], + [ + "▁Bought", + -13.573840141296388 + ], + [ + "▁copywriter", + -13.573844909667969 + ], + [ + "▁Tess", + -13.573898315429688 + ], + [ + "oxi", + -13.573906898498535 + ], + [ + "▁Conf", + -13.573939323425291 + ], + [ + "▁Scarlet", + -13.573965072631836 + ], + [ + "Psych", + -13.574076652526855 + ], + [ + "▁politic", + -13.57423973083496 + ], + [ + "▁216", + -13.574387550354004 + ], + [ + "▁Cora", + -13.574498176574709 + ], + [ + "▁russian", + -13.57463836669922 + ], + [ + "▁slime", + -13.5746488571167 + ], + [ + "▁Gull", + -13.574657440185549 + ], + [ + "▁comedians", + -13.57472324371338 + ], + [ + "▁1972.", + -13.57487964630127 + ], + [ + "glo", + -13.574935913085938 + ], + [ + "Dad", + -13.574936866760254 + ], + [ + "144", + -13.574956893920898 + ], + [ + "Insert", + -13.574970245361328 + ], + [ + "fishing", + -13.57497787475586 + ], + [ + "▁savour", + -13.575034141540527 + ], + [ + "▁bombers", + -13.575037002563477 + ], + [ + "wrapped", + -13.575060844421388 + ], + [ + "▁Grie", + -13.57516098022461 + ], + [ + "▁burgundy", + -13.57516860961914 + ], + [ + "▁liposuction", + -13.57516860961914 + ], + [ + "▁cessation", + -13.575169563293455 + ], + [ + "directional", + -13.575170516967772 + ], + [ + "▁negligible", + -13.57517147064209 + ], + [ + "▁flaunt", + -13.575173377990724 + ], + [ + "▁Radeon", + -13.575180053710938 + ], + [ + "▁Basilica", + -13.57518196105957 + ], + [ + "leton", + -13.575215339660645 + ], + [ + "▁vogue", + -13.575223922729492 + ], + [ + "leen", + -13.575237274169922 + ], + [ + "▁Hunters", + -13.57526397705078 + ], + [ + "▁Gustav", + -13.57528305053711 + ], + [ + "▁drawstring", + -13.575295448303224 + ], + [ + "▁1969.", + -13.575304985046388 + ], + [ + "▁Allergy", + -13.57533836364746 + ], + [ + "025", + -13.575372695922852 + ], + [ + "▁neutron", + -13.575399398803713 + ], + [ + "▁Naga", + -13.575411796569824 + ], + [ + "▁Apo", + -13.575428009033203 + ], + [ + "opi", + -13.575441360473633 + ], + [ + "▁defy", + -13.57544994354248 + ], + [ + "pier", + -13.57553005218506 + ], + [ + "mixed", + -13.575616836547852 + ], + [ + "Selling", + -13.575663566589355 + ], + [ + "▁detriment", + -13.575727462768556 + ], + [ + "▁modernity", + -13.575733184814451 + ], + [ + "▁Versa", + -13.57573413848877 + ], + [ + "▁ebb", + -13.575809478759766 + ], + [ + "▁Diving", + -13.575845718383787 + ], + [ + "▁thrills", + -13.5759859085083 + ], + [ + "kot", + -13.576035499572754 + ], + [ + "MET", + -13.57606601715088 + ], + [ + "eep", + -13.576104164123535 + ], + [ + "india", + -13.57614803314209 + ], + [ + "chrome", + -13.57630443572998 + ], + [ + "▁Andes", + -13.576339721679688 + ], + [ + "diy", + -13.576343536376951 + ], + [ + "ahead", + -13.576375961303713 + ], + [ + "▁plums", + -13.576416015625 + ], + [ + "▁Devin", + -13.57644271850586 + ], + [ + "Girls", + -13.576491355895996 + ], + [ + "▁MLM", + -13.576531410217283 + ], + [ + "▁streaks", + -13.576531410217283 + ], + [ + "▁underlined", + -13.576542854309082 + ], + [ + "011", + -13.576545715332031 + ], + [ + "Shell", + -13.576654434204102 + ], + [ + "▁attractiveness", + -13.576655387878418 + ], + [ + "grow", + -13.576659202575684 + ], + [ + "Reference", + -13.576809883117676 + ], + [ + "297", + -13.576825141906738 + ], + [ + "Raw", + -13.576851844787598 + ], + [ + "▁Arma", + -13.576915740966797 + ], + [ + "346", + -13.577016830444336 + ], + [ + "▁Charts", + -13.577193260192873 + ], + [ + "▁rodent", + -13.577216148376465 + ], + [ + "▁SCA", + -13.577275276184082 + ], + [ + "felt", + -13.57729434967041 + ], + [ + "hound", + -13.577407836914062 + ], + [ + "ECT", + -13.577547073364258 + ], + [ + "▁Contrary", + -13.577607154846191 + ], + [ + "▁Cadet", + -13.577627182006836 + ], + [ + "Proof", + -13.57763957977295 + ], + [ + "▁$40,000", + -13.577648162841797 + ], + [ + "▁Grille", + -13.57774257659912 + ], + [ + "▁Truman", + -13.57776165008545 + ], + [ + "problem", + -13.577787399291992 + ], + [ + "centred", + -13.5778226852417 + ], + [ + "▁Roo", + -13.577837944030762 + ], + [ + "▁THROUGH", + -13.577860832214355 + ], + [ + "▁captivity", + -13.577863693237305 + ], + [ + "▁polypropylene", + -13.577863693237305 + ], + [ + "▁vapour", + -13.577866554260254 + ], + [ + "listing", + -13.577885627746582 + ], + [ + "Maximum", + -13.577889442443848 + ], + [ + "▁Kramer", + -13.577913284301758 + ], + [ + "▁Gutter", + -13.577942848205566 + ], + [ + "▁Vicki", + -13.577942848205566 + ], + [ + "▁intranet", + -13.577970504760742 + ], + [ + "▁Laurence", + -13.57797145843506 + ], + [ + "▁barrage", + -13.57797908782959 + ], + [ + "zal", + -13.578001022338867 + ], + [ + "▁ALSO", + -13.578028678894045 + ], + [ + "Cycle", + -13.578041076660156 + ], + [ + "▁1968,", + -13.578154563903809 + ], + [ + "▁Fernandez", + -13.57815647125244 + ], + [ + "486", + -13.57833194732666 + ], + [ + "trie", + -13.57838249206543 + ], + [ + "▁blasts", + -13.578384399414062 + ], + [ + "▁authorisation", + -13.578457832336426 + ], + [ + "▁Galli", + -13.578531265258787 + ], + [ + "▁Jaime", + -13.57858657836914 + ], + [ + "▁kissed", + -13.57862377166748 + ], + [ + "worms", + -13.578636169433594 + ], + [ + "▁16,000", + -13.578655242919922 + ], + [ + "361", + -13.57868194580078 + ], + [ + "spor", + -13.578773498535156 + ], + [ + "▁Nah", + -13.578801155090332 + ], + [ + "witch", + -13.578811645507812 + ], + [ + "RAP", + -13.5789155960083 + ], + [ + "▁plainly", + -13.578935623168944 + ], + [ + "▁respectfully", + -13.578936576843262 + ], + [ + "▁bumpy", + -13.578960418701172 + ], + [ + "Emma", + -13.579034805297852 + ], + [ + "rud", + -13.579042434692385 + ], + [ + "nail", + -13.579060554504396 + ], + [ + "vari", + -13.579071044921877 + ], + [ + "▁tightness", + -13.579100608825684 + ], + [ + "pec", + -13.579172134399414 + ], + [ + "▁bbq", + -13.579230308532717 + ], + [ + "▁peels", + -13.57923984527588 + ], + [ + "▁Garnish", + -13.579364776611328 + ], + [ + "▁Machu", + -13.579407691955566 + ], + [ + "▁Liver", + -13.579442024230955 + ], + [ + "▁amateurs", + -13.579458236694336 + ], + [ + "▁Developmental", + -13.57960033416748 + ], + [ + "▁welder", + -13.579626083374023 + ], + [ + "▁tripled", + -13.579646110534668 + ], + [ + "ature", + -13.579673767089844 + ], + [ + "neuter", + -13.579699516296388 + ], + [ + "Buck", + -13.579730987548828 + ], + [ + "▁MDF", + -13.579791069030762 + ], + [ + "▁Beware", + -13.579809188842772 + ], + [ + "▁testimonial", + -13.579822540283203 + ], + [ + "dag", + -13.5798978805542 + ], + [ + "phones", + -13.579914093017578 + ], + [ + "bility", + -13.579919815063477 + ], + [ + "Drug", + -13.579961776733398 + ], + [ + "▁nz", + -13.57996654510498 + ], + [ + "▁Shorts", + -13.579980850219728 + ], + [ + "▁implanted", + -13.58000659942627 + ], + [ + "▁bathe", + -13.58000946044922 + ], + [ + "▁Mainly", + -13.58003044128418 + ], + [ + "920", + -13.58005142211914 + ], + [ + "▁Flyers", + -13.58013153076172 + ], + [ + "▁mainstay", + -13.580150604248049 + ], + [ + "▁HSE", + -13.580270767211914 + ], + [ + "yak", + -13.580292701721191 + ], + [ + "▁disperse", + -13.580315589904783 + ], + [ + "dw", + -13.580327033996582 + ], + [ + "▁Sweater", + -13.58039093017578 + ], + [ + "▁everytime", + -13.580418586730955 + ], + [ + "▁Burk", + -13.580498695373535 + ], + [ + "$3", + -13.5805025100708 + ], + [ + "▁rejects", + -13.580510139465332 + ], + [ + "broken", + -13.58053970336914 + ], + [ + "▁Hewlett", + -13.58056640625 + ], + [ + "▁Juventus", + -13.58056640625 + ], + [ + "▁Resurrection", + -13.58056640625 + ], + [ + "▁Stuttgart", + -13.58056640625 + ], + [ + "▁delicacy", + -13.58056640625 + ], + [ + "▁exclusivity", + -13.58056640625 + ], + [ + "▁Magnum", + -13.58057689666748 + ], + [ + "agged", + -13.58059310913086 + ], + [ + "lette", + -13.580618858337402 + ], + [ + "▁Madhya", + -13.58064079284668 + ], + [ + "Jewish", + -13.580660820007324 + ], + [ + "▁regenerative", + -13.580662727355955 + ], + [ + "▁Ashford", + -13.580698013305664 + ], + [ + "▁Rubio", + -13.580703735351562 + ], + [ + "vice", + -13.580735206604004 + ], + [ + "▁bureaucratic", + -13.580771446228027 + ], + [ + "▁Loud", + -13.580774307250977 + ], + [ + "riot", + -13.58087158203125 + ], + [ + "0.6", + -13.58090877532959 + ], + [ + "▁Focusing", + -13.580921173095703 + ], + [ + "▁wiper", + -13.58092212677002 + ], + [ + "▁Oakley", + -13.58095645904541 + ], + [ + "designs", + -13.580973625183104 + ], + [ + "▁RFP", + -13.58099365234375 + ], + [ + "usr", + -13.581019401550291 + ], + [ + "▁PTO", + -13.58104133605957 + ], + [ + "gis", + -13.581172943115234 + ], + [ + "▁Jes", + -13.581236839294434 + ], + [ + "509", + -13.581262588500977 + ], + [ + "▁rh", + -13.581262588500977 + ], + [ + "strum", + -13.581263542175291 + ], + [ + "clar", + -13.581299781799316 + ], + [ + "▁Biden", + -13.58134651184082 + ], + [ + "▁Harm", + -13.581348419189451 + ], + [ + "▁1968.", + -13.5813627243042 + ], + [ + "▁Waterfall", + -13.581398010253906 + ], + [ + "▁ether", + -13.58142375946045 + ], + [ + "▁Drilling", + -13.581427574157717 + ], + [ + "Youth", + -13.581449508666992 + ], + [ + "▁interpreters", + -13.581485748291016 + ], + [ + "▁Filled", + -13.58151912689209 + ], + [ + "▁877", + -13.581548690795898 + ], + [ + "chron", + -13.58164405822754 + ], + [ + "sci", + -13.581758499145508 + ], + [ + "ISP", + -13.581790924072266 + ], + [ + "Feed", + -13.58181095123291 + ], + [ + "458", + -13.58184814453125 + ], + [ + "▁Raising", + -13.581879615783691 + ], + [ + "▁combos", + -13.581941604614258 + ], + [ + "▁Protected", + -13.582003593444824 + ], + [ + "▁minimalistic", + -13.582069396972656 + ], + [ + "boss", + -13.582073211669922 + ], + [ + "▁Sponsored", + -13.582119941711426 + ], + [ + "▁Figures", + -13.582182884216309 + ], + [ + "▁Agreements", + -13.582223892211914 + ], + [ + "▁linkage", + -13.582228660583496 + ], + [ + "▁6-0", + -13.58241844177246 + ], + [ + "▁DWI", + -13.582427024841309 + ], + [ + "Af", + -13.58244800567627 + ], + [ + "Chart", + -13.582480430603027 + ], + [ + "▁Predator", + -13.582483291625977 + ], + [ + "pending", + -13.582523345947266 + ], + [ + "329", + -13.582549095153809 + ], + [ + "▁Eligible", + -13.582592964172363 + ], + [ + "(1),", + -13.58261013031006 + ], + [ + "locks", + -13.582616806030272 + ], + [ + "IU", + -13.582629203796388 + ], + [ + "mex", + -13.58264446258545 + ], + [ + "▁ра", + -13.582663536071776 + ], + [ + "▁Structures", + -13.58266830444336 + ], + [ + "▁tarp", + -13.58269500732422 + ], + [ + "alter", + -13.582707405090332 + ], + [ + "▁Glue", + -13.582708358764648 + ], + [ + "pis", + -13.58273696899414 + ], + [ + "▁Mackenzie", + -13.582759857177734 + ], + [ + "eared", + -13.582767486572266 + ], + [ + "vv", + -13.582886695861816 + ], + [ + "ost", + -13.58290672302246 + ], + [ + "▁lick", + -13.582932472229004 + ], + [ + "Tag", + -13.583002090454102 + ], + [ + "▁skimp", + -13.583014488220217 + ], + [ + "▁muck", + -13.583020210266112 + ], + [ + "▁unearth", + -13.583035469055176 + ], + [ + "▁Witt", + -13.583047866821287 + ], + [ + "afa", + -13.583064079284668 + ], + [ + "evan", + -13.583099365234377 + ], + [ + "opened", + -13.583101272583008 + ], + [ + "Present", + -13.583155632019045 + ], + [ + "Designer", + -13.58321475982666 + ], + [ + "368", + -13.583229064941406 + ], + [ + "▁Occasion", + -13.583250999450684 + ], + [ + "raised", + -13.583261489868164 + ], + [ + "▁cellulite", + -13.583276748657228 + ], + [ + "▁exfoliate", + -13.583276748657228 + ], + [ + "▁Wharton", + -13.583277702331545 + ], + [ + "▁leveled", + -13.583277702331545 + ], + [ + "▁velcro", + -13.583279609680176 + ], + [ + "▁pelvis", + -13.583281517028809 + ], + [ + "▁invading", + -13.583285331726074 + ], + [ + "▁humbling", + -13.583292961120604 + ], + [ + "▁1862", + -13.583319664001465 + ], + [ + "▁Minds", + -13.58336353302002 + ], + [ + "executive", + -13.58342170715332 + ], + [ + "▁Rak", + -13.583426475524902 + ], + [ + "Approximately", + -13.58346939086914 + ], + [ + "mountain", + -13.583544731140137 + ], + [ + "kumar", + -13.583610534667969 + ], + [ + "636", + -13.583767890930176 + ], + [ + "▁honda", + -13.583768844604492 + ], + [ + "Clark", + -13.583772659301758 + ], + [ + "▁vaporizer", + -13.583807945251465 + ], + [ + "Wal", + -13.583876609802246 + ], + [ + "▁purposefully", + -13.583946228027344 + ], + [ + "▁gt", + -13.583949089050291 + ], + [ + "▁AES", + -13.58403491973877 + ], + [ + "▁Flea", + -13.5840425491333 + ], + [ + "▁cooktop", + -13.584125518798828 + ], + [ + "▁bailout", + -13.5841703414917 + ], + [ + "nta", + -13.584187507629396 + ], + [ + "▁socialism", + -13.58420467376709 + ], + [ + "565", + -13.584270477294922 + ], + [ + "▁swaps", + -13.584325790405272 + ], + [ + "▁DHS", + -13.584449768066406 + ], + [ + "735", + -13.584471702575684 + ], + [ + "▁friendliness", + -13.584489822387695 + ], + [ + "▁breeder", + -13.584495544433594 + ], + [ + "▁eyelids", + -13.584522247314451 + ], + [ + "49.", + -13.584553718566896 + ], + [ + "idis", + -13.58462142944336 + ], + [ + "9001", + -13.584623336791992 + ], + [ + "reader", + -13.584630966186523 + ], + [ + "vine", + -13.58464527130127 + ], + [ + "▁199", + -13.58466339111328 + ], + [ + "ctor", + -13.584716796875 + ], + [ + "▁1891", + -13.584722518920898 + ], + [ + "eering", + -13.584747314453123 + ], + [ + "▁ferment", + -13.584754943847656 + ], + [ + "Forest", + -13.584766387939451 + ], + [ + "▁(2018).", + -13.584966659545898 + ], + [ + "▁163", + -13.58506202697754 + ], + [ + "aug", + -13.585071563720703 + ], + [ + "CEL", + -13.58509922027588 + ], + [ + "▁Voyage", + -13.585124015808104 + ], + [ + "▁Patagonia", + -13.585137367248535 + ], + [ + "▁conglomerate", + -13.585289001464844 + ], + [ + "SPA", + -13.585318565368652 + ], + [ + "▁Bluff", + -13.585350036621094 + ], + [ + "▁Ani", + -13.585351943969728 + ], + [ + "▁mobilize", + -13.585418701171877 + ], + [ + "▁Panic", + -13.585548400878906 + ], + [ + "▁contrasted", + -13.585566520690918 + ], + [ + "▁factoring", + -13.585609436035156 + ], + [ + "▁224", + -13.585640907287598 + ], + [ + "crime", + -13.585662841796877 + ], + [ + "▁observational", + -13.58581256866455 + ], + [ + "▁OSU", + -13.5858793258667 + ], + [ + "Mari", + -13.585931777954102 + ], + [ + "▁Sending", + -13.585976600646973 + ], + [ + "▁cornea", + -13.585988998413086 + ], + [ + "▁Kelowna", + -13.585993766784668 + ], + [ + "▁deteriorating", + -13.585993766784668 + ], + [ + "▁tequila", + -13.585993766784668 + ], + [ + "▁Eiffel", + -13.585994720458984 + ], + [ + "▁agitation", + -13.586000442504885 + ], + [ + "cai", + -13.586002349853516 + ], + [ + "▁Beaumont", + -13.586009979248049 + ], + [ + "▁metastatic", + -13.586016654968262 + ], + [ + "▁muzzle", + -13.586017608642578 + ], + [ + "▁penthouse", + -13.586044311523438 + ], + [ + "▁Javier", + -13.586054801940918 + ], + [ + "▁conductivity", + -13.586069107055664 + ], + [ + "▁Fitbit", + -13.586115837097168 + ], + [ + "sad", + -13.586207389831545 + ], + [ + "▁Razor", + -13.586221694946287 + ], + [ + "▁ante", + -13.58622932434082 + ], + [ + "▁NICE", + -13.586236000061035 + ], + [ + "▁trolls", + -13.586237907409668 + ], + [ + "jou", + -13.58626937866211 + ], + [ + "▁DIA", + -13.58637523651123 + ], + [ + "Layer", + -13.586408615112305 + ], + [ + "▁chopper", + -13.58642578125 + ], + [ + "Chain", + -13.586444854736328 + ], + [ + "▁Chaplin", + -13.586573600769045 + ], + [ + "▁xl", + -13.586596488952637 + ], + [ + "flor", + -13.586679458618164 + ], + [ + "285", + -13.586803436279297 + ], + [ + "▁Gian", + -13.58681583404541 + ], + [ + "188", + -13.586867332458496 + ], + [ + "▁classed", + -13.586871147155762 + ], + [ + "▁uncompromising", + -13.586956977844238 + ], + [ + "▁comprehensively", + -13.586987495422363 + ], + [ + "▁franc", + -13.586990356445312 + ], + [ + "▁28%", + -13.58700942993164 + ], + [ + "]:", + -13.587054252624512 + ], + [ + "▁bleaching", + -13.587095260620115 + ], + [ + "▁VIN", + -13.587096214294434 + ], + [ + "Poll", + -13.587120056152344 + ], + [ + "▁Cris", + -13.587153434753418 + ], + [ + "▁sanction", + -13.587157249450684 + ], + [ + "▁pol", + -13.587262153625488 + ], + [ + "-48", + -13.587332725524902 + ], + [ + "▁rediscover", + -13.58739948272705 + ], + [ + "▁Yogi", + -13.587422370910645 + ], + [ + "195", + -13.5875244140625 + ], + [ + "▁Nitro", + -13.587563514709473 + ], + [ + "amon", + -13.587564468383787 + ], + [ + "aye", + -13.587569236755373 + ], + [ + "▁2014).", + -13.587571144104004 + ], + [ + "▁robin", + -13.587608337402344 + ], + [ + "▁ltd", + -13.587644577026367 + ], + [ + "▁Shooter", + -13.58766269683838 + ], + [ + "▁CONTACT", + -13.587780952453612 + ], + [ + "Feeling", + -13.5878267288208 + ], + [ + "▁Prizes", + -13.587841987609863 + ], + [ + "gul", + -13.587925910949709 + ], + [ + "▁Conte", + -13.58796501159668 + ], + [ + "Mine", + -13.58802890777588 + ], + [ + "4-9", + -13.588064193725586 + ], + [ + "▁Loyola", + -13.588136672973633 + ], + [ + "▁banging", + -13.588143348693848 + ], + [ + "▁organiser", + -13.588245391845703 + ], + [ + "▁stewards", + -13.588269233703612 + ], + [ + "▁nitrate", + -13.58830738067627 + ], + [ + "▁Spicy", + -13.5883207321167 + ], + [ + "▁Webinar", + -13.588496208190918 + ], + [ + "Shares", + -13.588541984558104 + ], + [ + "▁NDP", + -13.58860683441162 + ], + [ + "▁Savi", + -13.588696479797363 + ], + [ + "determin", + -13.588705062866213 + ], + [ + "▁Davenport", + -13.588719367980955 + ], + [ + "▁camcorder", + -13.588719367980955 + ], + [ + "▁congested", + -13.588719367980955 + ], + [ + "▁dietitian", + -13.588719367980955 + ], + [ + "▁intoxicated", + -13.588719367980955 + ], + [ + "▁revitalization", + -13.588719367980955 + ], + [ + "▁unimaginable", + -13.588719367980955 + ], + [ + "▁wink", + -13.58872413635254 + ], + [ + "▁anomaly", + -13.588726043701172 + ], + [ + "▁Nordstrom", + -13.588726997375488 + ], + [ + "▁resurfacing", + -13.588743209838867 + ], + [ + "▁Melinda", + -13.588759422302246 + ], + [ + "▁Cancun", + -13.58876132965088 + ], + [ + "▁Roz", + -13.58876609802246 + ], + [ + "▁degenerative", + -13.588822364807127 + ], + [ + "▁policeman", + -13.588826179504396 + ], + [ + "▁Cornerstone", + -13.588866233825684 + ], + [ + "353", + -13.588876724243164 + ], + [ + "▁Emery", + -13.588911056518556 + ], + [ + "Kind", + -13.588919639587402 + ], + [ + "▁implicated", + -13.588932991027832 + ], + [ + "▁Skinner", + -13.588971138000488 + ], + [ + "▁vegas", + -13.588988304138184 + ], + [ + "intensity", + -13.589012145996094 + ], + [ + "wordpress", + -13.589018821716309 + ], + [ + "morning", + -13.589027404785156 + ], + [ + "▁Madras", + -13.589093208312988 + ], + [ + "hui", + -13.589150428771973 + ], + [ + "boom", + -13.589177131652832 + ], + [ + "▁Muller", + -13.589179039001465 + ], + [ + "▁checklists", + -13.589262962341309 + ], + [ + "▁accrued", + -13.589273452758787 + ], + [ + "sufficiency", + -13.589285850524902 + ], + [ + "▁Scal", + -13.589298248291016 + ], + [ + "▁Levine", + -13.589308738708496 + ], + [ + "Featured", + -13.589335441589355 + ], + [ + "Describe", + -13.589343070983888 + ], + [ + "QUE", + -13.58944034576416 + ], + [ + "▁STI", + -13.589482307434082 + ], + [ + "tronics", + -13.589486122131348 + ], + [ + "▁resided", + -13.589508056640623 + ], + [ + "trin", + -13.58952522277832 + ], + [ + "compatible", + -13.58961009979248 + ], + [ + "▁(2011).", + -13.589645385742188 + ], + [ + "▁cautiously", + -13.58969020843506 + ], + [ + "▁RFC", + -13.58976936340332 + ], + [ + "TOP", + -13.589883804321287 + ], + [ + "▁canoeing", + -13.589948654174805 + ], + [ + "008", + -13.589987754821776 + ], + [ + "▁0-0", + -13.590004920959473 + ], + [ + "Taste", + -13.5900297164917 + ], + [ + "▁irons", + -13.5900297164917 + ], + [ + "▁Radar", + -13.590033531188965 + ], + [ + "558", + -13.590058326721191 + ], + [ + "660", + -13.590242385864258 + ], + [ + "▁nuance", + -13.59024715423584 + ], + [ + "▁corpse", + -13.590365409851074 + ], + [ + "▁physicist", + -13.590371131896973 + ], + [ + "▁caster", + -13.59059715270996 + ], + [ + "▁Gabe", + -13.590656280517578 + ], + [ + "▁Mennonite", + -13.59079360961914 + ], + [ + "▁hamstring", + -13.590828895568848 + ], + [ + "▁fumble", + -13.590848922729492 + ], + [ + "ин", + -13.590909957885742 + ], + [ + "▁Gon", + -13.590972900390623 + ], + [ + "148", + -13.59104824066162 + ], + [ + "▁Brom", + -13.591226577758787 + ], + [ + "mak", + -13.59125518798828 + ], + [ + "▁mantel", + -13.591370582580566 + ], + [ + "▁Pharaoh", + -13.591450691223145 + ], + [ + "▁delinquent", + -13.591450691223145 + ], + [ + "▁retrieving", + -13.591450691223145 + ], + [ + "▁testimonies", + -13.591450691223145 + ], + [ + "▁COMPANY", + -13.591453552246094 + ], + [ + "▁Odessa", + -13.59145450592041 + ], + [ + "▁Rav", + -13.591455459594728 + ], + [ + "▁Convertible", + -13.59146213531494 + ], + [ + "▁mucus", + -13.591464042663574 + ], + [ + "▁Hathaway", + -13.59146499633789 + ], + [ + "▁fantasies", + -13.59146785736084 + ], + [ + "▁Adwords", + -13.591470718383787 + ], + [ + "▁insolvency", + -13.591480255126951 + ], + [ + "▁Diameter", + -13.591485023498535 + ], + [ + "▁Pretoria", + -13.591489791870115 + ], + [ + "▁enviable", + -13.591500282287598 + ], + [ + "caster", + -13.59152126312256 + ], + [ + "onda", + -13.59154224395752 + ], + [ + "▁stylistic", + -13.591561317443848 + ], + [ + "▁Percy", + -13.591567993164062 + ], + [ + "▁onslaught", + -13.591639518737791 + ], + [ + "▁Olson", + -13.591650009155272 + ], + [ + "otherapy", + -13.591665267944336 + ], + [ + "▁contractions", + -13.5917387008667 + ], + [ + "▁Warcraft", + -13.591767311096191 + ], + [ + "▁pinning", + -13.591782569885254 + ], + [ + "▁302", + -13.59181022644043 + ], + [ + "▁Heinz", + -13.591814041137695 + ], + [ + "Library", + -13.591814994812012 + ], + [ + "Faith", + -13.591821670532228 + ], + [ + "▁rind", + -13.59184741973877 + ], + [ + "▁allowable", + -13.5918550491333 + ], + [ + "▁Amenities", + -13.59189796447754 + ], + [ + "kla", + -13.591914176940918 + ], + [ + "6.7", + -13.591938972473145 + ], + [ + "Edward", + -13.591959953308104 + ], + [ + "▁flavoring", + -13.591981887817385 + ], + [ + "▁intellectually", + -13.592044830322266 + ], + [ + "processor", + -13.59213924407959 + ], + [ + "LOG", + -13.592214584350586 + ], + [ + "Spark", + -13.592342376708984 + ], + [ + "▁timeshare", + -13.592412948608398 + ], + [ + "fare", + -13.59242343902588 + ], + [ + "181", + -13.592483520507812 + ], + [ + "▁fearing", + -13.592571258544922 + ], + [ + "▁lambs", + -13.592597007751465 + ], + [ + "▁repel", + -13.59262752532959 + ], + [ + "Truth", + -13.592655181884766 + ], + [ + "156", + -13.59267807006836 + ], + [ + "▁portraying", + -13.592681884765623 + ], + [ + "0.3", + -13.592711448669434 + ], + [ + "▁Contrast", + -13.592727661132812 + ], + [ + "▁Cougars", + -13.592761039733888 + ], + [ + "▁Concentrate", + -13.592799186706545 + ], + [ + "▁Shirts", + -13.592803001403809 + ], + [ + "▁247", + -13.592897415161133 + ], + [ + "▁spotless", + -13.592989921569824 + ], + [ + "▁rework", + -13.593016624450684 + ], + [ + "▁Zap", + -13.593052864074709 + ], + [ + "▁Opposition", + -13.59305477142334 + ], + [ + "▁Improvements", + -13.593416213989258 + ], + [ + "▁sinners", + -13.593416213989258 + ], + [ + "▁lowers", + -13.593539237976074 + ], + [ + "▁solace", + -13.593544006347656 + ], + [ + "174", + -13.593592643737791 + ], + [ + "asco", + -13.59359645843506 + ], + [ + "▁Adele", + -13.59362506866455 + ], + [ + "▁Trey", + -13.59363842010498 + ], + [ + "▁Outfit", + -13.59364891052246 + ], + [ + "/03/", + -13.593653678894045 + ], + [ + "▁SHOP", + -13.593778610229492 + ], + [ + "▁earring", + -13.59378433227539 + ], + [ + "▁eater", + -13.593870162963867 + ], + [ + "▁groomed", + -13.593880653381348 + ], + [ + "▁Magna", + -13.594016075134276 + ], + [ + "▁Doris", + -13.594053268432615 + ], + [ + "▁acetate", + -13.594071388244627 + ], + [ + "derma", + -13.59407901763916 + ], + [ + "ibu", + -13.59409236907959 + ], + [ + "▁diaspora", + -13.59419059753418 + ], + [ + "▁sprinkling", + -13.59419059753418 + ], + [ + "▁contemplation", + -13.594191551208496 + ], + [ + "▁contraceptive", + -13.594191551208496 + ], + [ + "▁philippines", + -13.594191551208496 + ], + [ + "▁Okinawa", + -13.594192504882812 + ], + [ + "▁geopolitical", + -13.594202041625977 + ], + [ + "▁Pathology", + -13.594260215759276 + ], + [ + "▁soybeans", + -13.594277381896973 + ], + [ + "▁Paddy", + -13.59431266784668 + ], + [ + "▁novella", + -13.594327926635742 + ], + [ + "▁vying", + -13.594331741333008 + ], + [ + "9-0", + -13.594337463378906 + ], + [ + "ances", + -13.594364166259766 + ], + [ + "▁Preparing", + -13.594377517700195 + ], + [ + "▁bombarded", + -13.594415664672852 + ], + [ + "▁extender", + -13.594510078430176 + ], + [ + "▁copier", + -13.594526290893556 + ], + [ + "755", + -13.594561576843262 + ], + [ + "hundred", + -13.594598770141602 + ], + [ + "Turkey", + -13.594639778137209 + ], + [ + "▁Alvin", + -13.594735145568848 + ], + [ + "▁Gad", + -13.594756126403809 + ], + [ + "▁cyst", + -13.59477424621582 + ], + [ + "▁Prima", + -13.594792366027832 + ], + [ + "▁Worm", + -13.594806671142578 + ], + [ + "646", + -13.594902992248535 + ], + [ + "▁925", + -13.594904899597168 + ], + [ + "▁saline", + -13.594962120056152 + ], + [ + "CSS", + -13.594979286193848 + ], + [ + "Pra", + -13.594985961914062 + ], + [ + "tasking", + -13.59500789642334 + ], + [ + "▁oder", + -13.595117568969728 + ], + [ + "▁Brin", + -13.595130920410156 + ], + [ + "nico", + -13.59515380859375 + ], + [ + "▁Nia", + -13.595174789428713 + ], + [ + "▁SpaceX", + -13.595264434814451 + ], + [ + "▁crumb", + -13.595356941223145 + ], + [ + "▁hetero", + -13.595377922058104 + ], + [ + "▁screenwriter", + -13.5953950881958 + ], + [ + "-66", + -13.595409393310549 + ], + [ + "ITS", + -13.595415115356444 + ], + [ + "358", + -13.595436096191406 + ], + [ + "▁FEMA", + -13.59550952911377 + ], + [ + "dump", + -13.595718383789062 + ], + [ + "▁Representation", + -13.595735549926758 + ], + [ + "onium", + -13.595791816711426 + ], + [ + "▁Dumb", + -13.595818519592283 + ], + [ + "NDI", + -13.596039772033691 + ], + [ + "hardt", + -13.596070289611816 + ], + [ + "▁SNP", + -13.5961275100708 + ], + [ + "▁accession", + -13.596163749694824 + ], + [ + "▁Accredited", + -13.596198081970217 + ], + [ + "7.00", + -13.596278190612791 + ], + [ + "hag", + -13.596285820007324 + ], + [ + "DIA", + -13.59630012512207 + ], + [ + "dg", + -13.59644889831543 + ], + [ + "39.", + -13.596457481384276 + ], + [ + "▁breakers", + -13.596461296081545 + ], + [ + "▁smoothness", + -13.596473693847656 + ], + [ + "Eating", + -13.596485137939451 + ], + [ + "▁Expand", + -13.596609115600586 + ], + [ + "▁Regulator", + -13.596683502197266 + ], + [ + "▁Jos", + -13.596726417541504 + ], + [ + "brow", + -13.596747398376465 + ], + [ + "Finish", + -13.596772193908691 + ], + [ + "Pal", + -13.59684944152832 + ], + [ + "▁gateways", + -13.596877098083496 + ], + [ + "cina", + -13.59688663482666 + ], + [ + "▁pastime", + -13.59691047668457 + ], + [ + "▁monstrous", + -13.596938133239746 + ], + [ + "▁physiotherapist", + -13.596938133239746 + ], + [ + "▁SHIPPING", + -13.596939086914062 + ], + [ + "▁milligrams", + -13.59694004058838 + ], + [ + "▁LOW", + -13.596942901611328 + ], + [ + "▁WooCommerce", + -13.596942901611328 + ], + [ + "▁Ord", + -13.596945762634276 + ], + [ + "▁INCLUDING", + -13.596951484680176 + ], + [ + "▁mischief", + -13.59695529937744 + ], + [ + "▁Partition", + -13.596985816955566 + ], + [ + "▁Charities", + -13.5969877243042 + ], + [ + "Refer", + -13.597020149230955 + ], + [ + "▁Significant", + -13.597042083740234 + ], + [ + "▁stressors", + -13.597065925598145 + ], + [ + "▁Inspiring", + -13.597076416015623 + ], + [ + "▁Dyson", + -13.597115516662598 + ], + [ + "▁programmatic", + -13.597122192382812 + ], + [ + "▁mir", + -13.597389221191406 + ], + [ + "▁1897", + -13.59740924835205 + ], + [ + "▁intercepted", + -13.597436904907228 + ], + [ + "Chelsea", + -13.597439765930176 + ], + [ + "Motor", + -13.597476959228516 + ], + [ + "shade", + -13.59749984741211 + ], + [ + "lance", + -13.59755802154541 + ], + [ + "▁Gent", + -13.597593307495115 + ], + [ + "however", + -13.597601890563965 + ], + [ + "▁Lester", + -13.597623825073242 + ], + [ + "ump", + -13.597638130187988 + ], + [ + "flip", + -13.597685813903809 + ], + [ + "arium", + -13.597698211669922 + ], + [ + "▁expedited", + -13.59772777557373 + ], + [ + "▁allocations", + -13.59773063659668 + ], + [ + "▁Amor", + -13.597774505615234 + ], + [ + "evolving", + -13.597782135009766 + ], + [ + "aine", + -13.59781551361084 + ], + [ + "▁Bangor", + -13.597818374633787 + ], + [ + "ncy", + -13.59786891937256 + ], + [ + "▁persisted", + -13.597887992858888 + ], + [ + "▁parasite", + -13.597929954528809 + ], + [ + "▁2022.", + -13.597941398620604 + ], + [ + "uze", + -13.597951889038086 + ], + [ + "▁OL", + -13.598013877868652 + ], + [ + "provided", + -13.598016738891602 + ], + [ + "Hong", + -13.598034858703612 + ], + [ + "▁spaceship", + -13.598105430603027 + ], + [ + "▁Poison", + -13.59812068939209 + ], + [ + "▁285", + -13.598151206970217 + ], + [ + "▁Zhao", + -13.59815788269043 + ], + [ + "▁Stag", + -13.598233222961426 + ], + [ + "▁Partnerships", + -13.598275184631348 + ], + [ + "STER", + -13.598286628723145 + ], + [ + "▁(2010).", + -13.598296165466309 + ], + [ + "▁Comparing", + -13.598456382751465 + ], + [ + "UNT", + -13.59846019744873 + ], + [ + "INO", + -13.598625183105469 + ], + [ + "03.", + -13.598708152770996 + ], + [ + "▁Lange", + -13.598721504211426 + ], + [ + "FORT", + -13.598756790161133 + ], + [ + "novo", + -13.59877872467041 + ], + [ + "Panel", + -13.5988187789917 + ], + [ + "▁Camps", + -13.598836898803713 + ], + [ + "▁Cot", + -13.598910331726074 + ], + [ + "▁Dag", + -13.598936080932615 + ], + [ + "▁Chalk", + -13.59900951385498 + ], + [ + "▁Soo", + -13.599040985107422 + ], + [ + "▁не", + -13.599044799804688 + ], + [ + "264", + -13.59913444519043 + ], + [ + "▁infiltration", + -13.59920883178711 + ], + [ + "▁shawl", + -13.599220275878906 + ], + [ + "▁fil", + -13.599225044250488 + ], + [ + "▁1972,", + -13.599255561828612 + ], + [ + "▁Baum", + -13.599275588989258 + ], + [ + "▁Wearing", + -13.599276542663574 + ], + [ + "▁Solicitors", + -13.599283218383787 + ], + [ + "▁Vy", + -13.599287986755373 + ], + [ + "edged", + -13.59936237335205 + ], + [ + "▁punctual", + -13.59941577911377 + ], + [ + "lator", + -13.599454879760742 + ], + [ + "▁Rowan", + -13.599514961242676 + ], + [ + "▁constructs", + -13.599520683288574 + ], + [ + "omy", + -13.599543571472168 + ], + [ + "toned", + -13.5995512008667 + ], + [ + "▁Teenage", + -13.59956169128418 + ], + [ + "▁NOAA", + -13.59959316253662 + ], + [ + "/2011", + -13.599628448486328 + ], + [ + "Sculpt", + -13.59968376159668 + ], + [ + "▁Judiciary", + -13.599693298339844 + ], + [ + "▁McCormick", + -13.599693298339844 + ], + [ + "▁Syllabus", + -13.599693298339844 + ], + [ + "▁populist", + -13.599693298339844 + ], + [ + "▁vibrancy", + -13.599693298339844 + ], + [ + "▁Middlesex", + -13.599698066711426 + ], + [ + "▁biographies", + -13.599699020385742 + ], + [ + "▁Flats", + -13.599713325500488 + ], + [ + "▁Ogden", + -13.599719047546388 + ], + [ + "▁bracing", + -13.599737167358398 + ], + [ + "▁grader", + -13.599770545959473 + ], + [ + "3.7", + -13.599773406982422 + ], + [ + "▁stockpile", + -13.599777221679688 + ], + [ + "▁Kawa", + -13.599802017211914 + ], + [ + "▁nc", + -13.599809646606444 + ], + [ + "▁Pledge", + -13.59988498687744 + ], + [ + "Words", + -13.599897384643556 + ], + [ + "▁gleaming", + -13.59995174407959 + ], + [ + "▁nonlinear", + -13.59995460510254 + ], + [ + "▁diplomats", + -13.599994659423828 + ], + [ + "▁(27", + -13.60001277923584 + ], + [ + "Ian", + -13.600025177001951 + ], + [ + "▁Wasp", + -13.60004425048828 + ], + [ + "▁sighting", + -13.600133895874023 + ], + [ + "▁70’", + -13.600141525268556 + ], + [ + "▁5-8", + -13.600152015686035 + ], + [ + "ACH", + -13.60022258758545 + ], + [ + "▁48-", + -13.600269317626951 + ], + [ + "Alpha", + -13.60027313232422 + ], + [ + "▁Controlled", + -13.6002836227417 + ], + [ + "Freedom", + -13.60032844543457 + ], + [ + "▁phyto", + -13.60034465789795 + ], + [ + "▁automakers", + -13.600349426269531 + ], + [ + "aman", + -13.600502967834473 + ], + [ + "▁Zar", + -13.600540161132812 + ], + [ + "▁reassured", + -13.600560188293455 + ], + [ + "▁deteriorated", + -13.60056495666504 + ], + [ + "7-8", + -13.600582122802734 + ], + [ + "Mur", + -13.600587844848633 + ], + [ + "▁Chap", + -13.600610733032228 + ], + [ + "▁equates", + -13.600626945495604 + ], + [ + "▁Nun", + -13.600704193115234 + ], + [ + "hampton", + -13.600727081298828 + ], + [ + "▁Hurst", + -13.600774765014648 + ], + [ + "▁Tart", + -13.600810050964355 + ], + [ + "6-1", + -13.600942611694336 + ], + [ + "EIN", + -13.600956916809082 + ], + [ + "AAA", + -13.601069450378418 + ], + [ + "▁wil", + -13.601097106933594 + ], + [ + "▁Traveler", + -13.60116481781006 + ], + [ + "▁contentment", + -13.601181983947754 + ], + [ + "296", + -13.601202011108398 + ], + [ + "▁Showroom", + -13.601239204406738 + ], + [ + "derm", + -13.601292610168455 + ], + [ + "ROW", + -13.601301193237305 + ], + [ + "▁Josef", + -13.601326942443848 + ], + [ + "▁distressing", + -13.60133934020996 + ], + [ + "sounding", + -13.601351737976074 + ], + [ + "-3)", + -13.601438522338867 + ], + [ + "▁mayhem", + -13.601505279541016 + ], + [ + "▁3/8", + -13.601563453674316 + ], + [ + "▁ADC", + -13.601571083068848 + ], + [ + "▁2016).", + -13.601576805114746 + ], + [ + "hosting", + -13.601591110229492 + ], + [ + "▁Ruben", + -13.601606369018556 + ], + [ + "aaS", + -13.60163116455078 + ], + [ + "▁nach", + -13.601680755615234 + ], + [ + "LAR", + -13.601682662963867 + ], + [ + "▁burglar", + -13.601696014404297 + ], + [ + "▁capacitor", + -13.601716995239258 + ], + [ + "beach", + -13.601800918579102 + ], + [ + "▁shove", + -13.601871490478516 + ], + [ + "▁recreated", + -13.601874351501465 + ], + [ + "▁annotation", + -13.601948738098145 + ], + [ + "plot", + -13.601975440979004 + ], + [ + "364", + -13.602044105529783 + ], + [ + "CX", + -13.602057456970217 + ], + [ + "▁marshal", + -13.60218620300293 + ], + [ + "▁realtors", + -13.602261543273926 + ], + [ + "▁Longer", + -13.602280616760254 + ], + [ + "▁Oy", + -13.602313995361328 + ], + [ + "LEA", + -13.602333068847656 + ], + [ + "▁motorsport", + -13.602365493774414 + ], + [ + "fla", + -13.602375984191896 + ], + [ + "▁Versailles", + -13.602455139160156 + ], + [ + "▁elevating", + -13.602455139160156 + ], + [ + "▁recognisable", + -13.602455139160156 + ], + [ + "▁supremacy", + -13.602455139160156 + ], + [ + "▁unbearable", + -13.602455139160156 + ], + [ + "▁Librarian", + -13.602458000183104 + ], + [ + "▁muffler", + -13.602476119995115 + ], + [ + "▁Perfume", + -13.60251522064209 + ], + [ + "▁Billings", + -13.602532386779783 + ], + [ + "▁Catalonia", + -13.602548599243164 + ], + [ + "▁bogus", + -13.602572441101074 + ], + [ + "▁creme", + -13.60260009765625 + ], + [ + "▁Hits", + -13.602606773376465 + ], + [ + "▁sf", + -13.602633476257324 + ], + [ + "▁niches", + -13.60265064239502 + ], + [ + "Seal", + -13.602676391601562 + ], + [ + "▁Krakow", + -13.602701187133787 + ], + [ + "▁Kohl", + -13.602707862854004 + ], + [ + "▁mediated", + -13.602737426757812 + ], + [ + "▁blot", + -13.602795600891112 + ], + [ + "▁Complimentary", + -13.602871894836426 + ], + [ + "▁Preliminary", + -13.602871894836426 + ], + [ + "▁Lillian", + -13.602910041809082 + ], + [ + "sett", + -13.602944374084473 + ], + [ + "▁Palin", + -13.603026390075684 + ], + [ + "hie", + -13.603041648864746 + ], + [ + "Shield", + -13.60308074951172 + ], + [ + "Mountain", + -13.603092193603516 + ], + [ + "Church", + -13.603094100952148 + ], + [ + "female", + -13.603094100952148 + ], + [ + "▁CMOS", + -13.60310173034668 + ], + [ + "permanent", + -13.60312557220459 + ], + [ + "▁Packed", + -13.603163719177246 + ], + [ + "▁fastened", + -13.60316562652588 + ], + [ + "NDA", + -13.603187561035156 + ], + [ + "▁Cookbook", + -13.603232383728027 + ], + [ + "▁(2007).", + -13.603276252746582 + ], + [ + "▁patrols", + -13.60331916809082 + ], + [ + "10/", + -13.603331565856934 + ], + [ + "▁CFD", + -13.603339195251465 + ], + [ + "Production", + -13.603364944458008 + ], + [ + "▁executes", + -13.603381156921388 + ], + [ + "294", + -13.603384017944336 + ], + [ + "transfer", + -13.603386878967283 + ], + [ + "ination", + -13.60344696044922 + ], + [ + "▁Remind", + -13.603461265563965 + ], + [ + "▁1,700", + -13.603548049926758 + ], + [ + "▁Completion", + -13.60356616973877 + ], + [ + "▁Mirage", + -13.603678703308104 + ], + [ + "618", + -13.603683471679688 + ], + [ + "azz", + -13.60370922088623 + ], + [ + "civil", + -13.60374641418457 + ], + [ + "▁api", + -13.603864669799805 + ], + [ + "-62", + -13.603910446166992 + ], + [ + "▁DDS", + -13.603940963745115 + ], + [ + "▁Optic", + -13.603946685791016 + ], + [ + "▁Shank", + -13.603976249694824 + ], + [ + "▁Danger", + -13.604029655456545 + ], + [ + "Independent", + -13.604040145874023 + ], + [ + "7:30", + -13.604086875915527 + ], + [ + "usable", + -13.604166030883787 + ], + [ + "▁recounts", + -13.60418701171875 + ], + [ + "567", + -13.604248046875 + ], + [ + "▁Ironically", + -13.604266166687012 + ], + [ + "▁scooped", + -13.60427188873291 + ], + [ + "48.", + -13.604321479797363 + ], + [ + "▁Hound", + -13.604341506958008 + ], + [ + "▁0.05", + -13.604410171508787 + ], + [ + "▁Karan", + -13.604484558105469 + ], + [ + "▁bidders", + -13.60450267791748 + ], + [ + "▁Quant", + -13.604676246643066 + ], + [ + "▁parishioners", + -13.60470962524414 + ], + [ + "▁lily", + -13.604758262634276 + ], + [ + "▁Puma", + -13.604764938354492 + ], + [ + "chev", + -13.604802131652832 + ], + [ + "▁expectant", + -13.60483741760254 + ], + [ + "▁Convenient", + -13.60484790802002 + ], + [ + "straight", + -13.60486888885498 + ], + [ + "▁tx", + -13.604920387268066 + ], + [ + "chai", + -13.60492992401123 + ], + [ + "▁Rox", + -13.605009078979492 + ], + [ + "▁Survivor", + -13.605053901672363 + ], + [ + "ores", + -13.605154037475586 + ], + [ + "▁Kama", + -13.605164527893066 + ], + [ + "cult", + -13.605178833007812 + ], + [ + "▁martini", + -13.605209350585938 + ], + [ + "▁Telangana", + -13.605225563049316 + ], + [ + "▁officiating", + -13.605225563049316 + ], + [ + "▁pedagogy", + -13.605225563049316 + ], + [ + "▁probabilities", + -13.605226516723633 + ], + [ + "▁bunnies", + -13.605231285095217 + ], + [ + "▁futile", + -13.605237007141112 + ], + [ + "▁Microgaming", + -13.605238914489746 + ], + [ + "▁humidifier", + -13.605286598205566 + ], + [ + "▁defrost", + -13.60530948638916 + ], + [ + "▁(2019)", + -13.605326652526855 + ], + [ + "▁favorably", + -13.60534381866455 + ], + [ + "▁tarmac", + -13.60538387298584 + ], + [ + "▁hatchback", + -13.605384826660156 + ], + [ + "▁tins", + -13.60545825958252 + ], + [ + "▁validating", + -13.605474472045898 + ], + [ + "▁Adjustment", + -13.605485916137695 + ], + [ + "▁Organizing", + -13.605488777160645 + ], + [ + "▁Aruba", + -13.605517387390137 + ], + [ + "▁WIFI", + -13.605595588684082 + ], + [ + "ipe", + -13.605613708496094 + ], + [ + "▁Fibre", + -13.60562229156494 + ], + [ + "▁palsy", + -13.605657577514648 + ], + [ + "▁Pills", + -13.605660438537598 + ], + [ + "▁Groom", + -13.605670928955078 + ], + [ + "▁Nicky", + -13.605759620666504 + ], + [ + "Blend", + -13.605799674987791 + ], + [ + "▁complainant", + -13.605917930603027 + ], + [ + "Delete", + -13.605939865112305 + ], + [ + "iston", + -13.605944633483888 + ], + [ + "▁shone", + -13.606012344360352 + ], + [ + "Court", + -13.606043815612791 + ], + [ + "▁soggy", + -13.606134414672852 + ], + [ + "▁Nerd", + -13.606165885925291 + ], + [ + "▁exacting", + -13.606196403503418 + ], + [ + "▁flossing", + -13.606206893920898 + ], + [ + "lumin", + -13.606236457824709 + ], + [ + "▁Wig", + -13.606300354003906 + ], + [ + "▁spoilt", + -13.606307983398438 + ], + [ + "655", + -13.606308937072754 + ], + [ + "▁impairments", + -13.606367111206056 + ], + [ + "▁tinting", + -13.606409072875977 + ], + [ + "▁9.30", + -13.606433868408203 + ], + [ + "erne", + -13.606480598449709 + ], + [ + "Shopping", + -13.606494903564451 + ], + [ + "Miller", + -13.6065034866333 + ], + [ + "▁cafés", + -13.60653018951416 + ], + [ + "▁congratulated", + -13.60658836364746 + ], + [ + "▁Kappa", + -13.606643676757812 + ], + [ + "▁vests", + -13.606646537780762 + ], + [ + "Reviews", + -13.606672286987305 + ], + [ + "▁heighten", + -13.60669994354248 + ], + [ + "nest", + -13.606773376464844 + ], + [ + "▁XV", + -13.60677719116211 + ], + [ + "Increased", + -13.6068115234375 + ], + [ + "▁ETH", + -13.606928825378418 + ], + [ + "▁Amnesty", + -13.606978416442873 + ], + [ + "(6):", + -13.606992721557615 + ], + [ + "555", + -13.607012748718262 + ], + [ + "▁mai", + -13.60703945159912 + ], + [ + "SIC", + -13.607139587402344 + ], + [ + "▁Clubhouse", + -13.607150077819824 + ], + [ + "37.", + -13.60721492767334 + ], + [ + "▁Holistic", + -13.60731029510498 + ], + [ + "Ingredients", + -13.607375144958496 + ], + [ + "▁apologise", + -13.607401847839355 + ], + [ + "▁puzzled", + -13.607427597045898 + ], + [ + "▁rightfully", + -13.607427597045898 + ], + [ + "5-4", + -13.607441902160645 + ], + [ + "381", + -13.607479095458984 + ], + [ + "801", + -13.60752296447754 + ], + [ + "442", + -13.607544898986816 + ], + [ + "▁Educator", + -13.607561111450195 + ], + [ + "▁counsellor", + -13.607588768005373 + ], + [ + "ncia", + -13.607614517211914 + ], + [ + "▁Jas", + -13.607661247253418 + ], + [ + "▁Baked", + -13.607776641845703 + ], + [ + "▁Willy", + -13.607810020446776 + ], + [ + "▁terraced", + -13.607864379882812 + ], + [ + "atur", + -13.607881546020508 + ], + [ + "▁1893", + -13.607908248901367 + ], + [ + "▁Honest", + -13.60797882080078 + ], + [ + "▁Kalamazoo", + -13.608003616333008 + ], + [ + "▁Saratoga", + -13.608003616333008 + ], + [ + "▁jeopardy", + -13.608003616333008 + ], + [ + "▁sedentary", + -13.608003616333008 + ], + [ + "▁Chimney", + -13.608013153076172 + ], + [ + "▁Limerick", + -13.608016967773438 + ], + [ + "▁Maduro", + -13.608027458190918 + ], + [ + "▁Trilogy", + -13.60802936553955 + ], + [ + "▁Actors", + -13.608062744140623 + ], + [ + "▁zipped", + -13.608091354370115 + ], + [ + "▁Ascend", + -13.608107566833496 + ], + [ + "▁filthy", + -13.608107566833496 + ], + [ + "▁Eliza", + -13.608139991760254 + ], + [ + "IBLE", + -13.60820198059082 + ], + [ + "▁NRC", + -13.608210563659668 + ], + [ + "▁foregoing", + -13.60827350616455 + ], + [ + "▁Duffy", + -13.608366012573242 + ], + [ + "▁competes", + -13.608393669128418 + ], + [ + "▁Welding", + -13.608412742614746 + ], + [ + "▁timers", + -13.608482360839844 + ], + [ + "atric", + -13.608512878417969 + ], + [ + "▁Zika", + -13.60857105255127 + ], + [ + "▁exacerbated", + -13.60859203338623 + ], + [ + "ories", + -13.608596801757812 + ], + [ + "▁Lotion", + -13.608647346496582 + ], + [ + "▁leaned", + -13.608769416809082 + ], + [ + "restaurant", + -13.60877513885498 + ], + [ + "District", + -13.608784675598145 + ], + [ + "property", + -13.608819007873535 + ], + [ + "voltage", + -13.608824729919434 + ], + [ + "peace", + -13.608844757080078 + ], + [ + "eurs", + -13.608880996704102 + ], + [ + "▁BTW", + -13.608949661254885 + ], + [ + "▁Powerpoint", + -13.608968734741213 + ], + [ + "employee", + -13.60901927947998 + ], + [ + "APA", + -13.609065055847168 + ], + [ + "fjord", + -13.609068870544434 + ], + [ + "▁JW", + -13.609090805053713 + ], + [ + "▁300-", + -13.60910987854004 + ], + [ + "▁tireless", + -13.60914134979248 + ], + [ + "▁Concordia", + -13.609195709228516 + ], + [ + "▁Kron", + -13.609200477600098 + ], + [ + "▁Skilled", + -13.609302520751951 + ], + [ + "▁Staten", + -13.609315872192385 + ], + [ + "▁donkey", + -13.60933780670166 + ], + [ + "bras", + -13.609416007995604 + ], + [ + "polymer", + -13.609430313110352 + ], + [ + "▁inhaler", + -13.609471321105955 + ], + [ + "▁personas", + -13.609475135803224 + ], + [ + "▁abundantly", + -13.609540939331056 + ], + [ + "ени", + -13.609749794006348 + ], + [ + "▁blockages", + -13.609780311584473 + ], + [ + "398", + -13.60981273651123 + ], + [ + "hul", + -13.609844207763672 + ], + [ + "▁remit", + -13.609893798828123 + ], + [ + "▁Wanna", + -13.609936714172363 + ], + [ + "eko", + -13.609994888305664 + ], + [ + "▁dialogues", + -13.610051155090332 + ], + [ + "▁Mardi", + -13.610114097595217 + ], + [ + "▁Asbestos", + -13.610249519348145 + ], + [ + "pani", + -13.610307693481444 + ], + [ + "/08/", + -13.610365867614746 + ], + [ + "-2006", + -13.610366821289062 + ], + [ + "bourg", + -13.61046314239502 + ], + [ + "▁babe", + -13.61047077178955 + ], + [ + "▁Casinos", + -13.610508918762209 + ], + [ + "▁appraiser", + -13.610548973083496 + ], + [ + "▁headings", + -13.610594749450684 + ], + [ + "▁Seen", + -13.61066436767578 + ], + [ + "▁fetus", + -13.610671997070312 + ], + [ + "▁Mik", + -13.610676765441896 + ], + [ + "▁Este", + -13.61069679260254 + ], + [ + "▁Feeding", + -13.610724449157717 + ], + [ + "▁melee", + -13.610763549804688 + ], + [ + "▁curvature", + -13.610788345336914 + ], + [ + "▁lukewarm", + -13.610788345336914 + ], + [ + "▁refrigerant", + -13.610788345336914 + ], + [ + "▁WEBSITE", + -13.61078929901123 + ], + [ + "▁emptied", + -13.61078929901123 + ], + [ + "▁injuring", + -13.610795974731444 + ], + [ + "▁Refugee", + -13.610797882080078 + ], + [ + "▁Haryana", + -13.610800743103027 + ], + [ + "▁Talbot", + -13.61080837249756 + ], + [ + "▁gazing", + -13.610811233520508 + ], + [ + "▁Ulster", + -13.61082363128662 + ], + [ + "▁Finishing", + -13.610835075378418 + ], + [ + "▁Puget", + -13.610840797424316 + ], + [ + "▁SUNY", + -13.610849380493164 + ], + [ + "▁unplug", + -13.610867500305176 + ], + [ + "▁Palette", + -13.61087131500244 + ], + [ + "▁Recorded", + -13.610872268676758 + ], + [ + "▁payers", + -13.610906600952148 + ], + [ + "▁Heli", + -13.610921859741213 + ], + [ + "balls", + -13.610941886901855 + ], + [ + "▁remission", + -13.611016273498535 + ], + [ + "▁Bridgewater", + -13.611031532287598 + ], + [ + "dun", + -13.611130714416504 + ], + [ + "boost", + -13.611136436462402 + ], + [ + "▁Termite", + -13.611173629760742 + ], + [ + "▁wie", + -13.61117935180664 + ], + [ + "1\"", + -13.61118221282959 + ], + [ + "▁Chet", + -13.61127471923828 + ], + [ + "Drink", + -13.611297607421877 + ], + [ + "▁KIND", + -13.611297607421877 + ], + [ + "▁Difficult", + -13.611379623413086 + ], + [ + "▁Jacqueline", + -13.611394882202148 + ], + [ + "▁Attic", + -13.611553192138672 + ], + [ + "Factor", + -13.611571311950684 + ], + [ + "▁Fallout", + -13.611581802368164 + ], + [ + "Volume", + -13.611601829528809 + ], + [ + "0′′", + -13.611624717712402 + ], + [ + "▁Duff", + -13.611772537231444 + ], + [ + "▁PART", + -13.61178970336914 + ], + [ + "Colour", + -13.611812591552734 + ], + [ + "▁auch", + -13.61188507080078 + ], + [ + "▁demonstrators", + -13.611913681030272 + ], + [ + "▁kicker", + -13.611934661865234 + ], + [ + "▁classifieds", + -13.611961364746094 + ], + [ + "▁Received", + -13.611981391906738 + ], + [ + "▁Esp", + -13.61202335357666 + ], + [ + "▁Supervisors", + -13.612116813659668 + ], + [ + "painting", + -13.61212921142578 + ], + [ + "▁Lumia", + -13.61215591430664 + ], + [ + "▁Milano", + -13.612181663513184 + ], + [ + "▁1886", + -13.612204551696776 + ], + [ + "▁dell", + -13.612204551696776 + ], + [ + "▁Employ", + -13.61221981048584 + ], + [ + "▁21%", + -13.612229347229004 + ], + [ + "710", + -13.61227321624756 + ], + [ + "393", + -13.612347602844238 + ], + [ + "▁actuality", + -13.612406730651855 + ], + [ + "logging", + -13.612417221069336 + ], + [ + "▁skates", + -13.612432479858398 + ], + [ + "▁Baha", + -13.612435340881348 + ], + [ + "▁pla", + -13.612525939941406 + ], + [ + "DAS", + -13.612558364868164 + ], + [ + "▁Huddersfield", + -13.61256217956543 + ], + [ + "LON", + -13.612608909606934 + ], + [ + "▁chills", + -13.61262321472168 + ], + [ + "▁Leads", + -13.6126708984375 + ], + [ + "▁carvings", + -13.612715721130373 + ], + [ + "versa", + -13.612730026245115 + ], + [ + "▁seminary", + -13.612780570983888 + ], + [ + "▁ticked", + -13.612783432006836 + ], + [ + "posing", + -13.612808227539062 + ], + [ + "80.", + -13.612945556640623 + ], + [ + "▁mana", + -13.613134384155272 + ], + [ + "▁Aged", + -13.613234519958496 + ], + [ + "▁Fond", + -13.613250732421877 + ], + [ + "▁Roanoke", + -13.61326026916504 + ], + [ + "▁choreographer", + -13.613287925720217 + ], + [ + "Sur", + -13.613290786743164 + ], + [ + "▁ASU", + -13.613306045532228 + ], + [ + "▁Cong", + -13.613353729248049 + ], + [ + "Ye", + -13.61335563659668 + ], + [ + "▁Weapons", + -13.613370895385742 + ], + [ + "chino", + -13.613375663757324 + ], + [ + "birth", + -13.613409042358398 + ], + [ + "▁Blaze", + -13.613451957702637 + ], + [ + "▁Weir", + -13.613511085510254 + ], + [ + "▁Tops", + -13.613525390625 + ], + [ + "▁deforestation", + -13.613581657409668 + ], + [ + "▁discrepancies", + -13.613581657409668 + ], + [ + "▁dismantling", + -13.613581657409668 + ], + [ + "▁perpendicular", + -13.613581657409668 + ], + [ + "▁sergeant", + -13.613581657409668 + ], + [ + "▁irreversible", + -13.613582611083984 + ], + [ + "▁candidacy", + -13.613588333129885 + ], + [ + "▁hypnotic", + -13.613595008850098 + ], + [ + "▁infographics", + -13.613605499267578 + ], + [ + "▁consulate", + -13.61361312866211 + ], + [ + "▁aortic", + -13.613614082336426 + ], + [ + "▁fragmentation", + -13.613630294799805 + ], + [ + "▁Bourne", + -13.613637924194336 + ], + [ + "▁jammed", + -13.613649368286133 + ], + [ + "▁preferential", + -13.613688468933104 + ], + [ + "▁diaries", + -13.6137056350708 + ], + [ + "▁limelight", + -13.613729476928713 + ], + [ + "▁patronage", + -13.613729476928713 + ], + [ + "▁goofy", + -13.613775253295898 + ], + [ + "▁flammable", + -13.613804817199709 + ], + [ + "▁Fragrance", + -13.613805770874023 + ], + [ + "▁Bavarian", + -13.613848686218262 + ], + [ + "▁Tweed", + -13.613859176635742 + ], + [ + "0.7", + -13.61386775970459 + ], + [ + "▁shielding", + -13.613869667053224 + ], + [ + "740", + -13.613956451416016 + ], + [ + "▁Sailor", + -13.61400318145752 + ], + [ + "▁Athlete", + -13.61413288116455 + ], + [ + "▁initials", + -13.6141357421875 + ], + [ + "▁disagreed", + -13.614225387573242 + ], + [ + "245", + -13.6143217086792 + ], + [ + "OST", + -13.614325523376465 + ], + [ + "▁reckoned", + -13.614334106445312 + ], + [ + "iton", + -13.614335060119627 + ], + [ + "▁chancellor", + -13.614385604858398 + ], + [ + "▁$33", + -13.61442756652832 + ], + [ + "ioli", + -13.614453315734863 + ], + [ + "▁undergoes", + -13.614486694335938 + ], + [ + "traumatic", + -13.614506721496582 + ], + [ + "Crystal", + -13.614557266235352 + ], + [ + "▁Psychologist", + -13.6146240234375 + ], + [ + "Tube", + -13.614631652832031 + ], + [ + "-53", + -13.614643096923828 + ], + [ + "▁Gha", + -13.61468505859375 + ], + [ + "Doctor", + -13.614748001098633 + ], + [ + "▁moulded", + -13.614749908447266 + ], + [ + "▁outboard", + -13.614806175231934 + ], + [ + "▁tiling", + -13.614813804626465 + ], + [ + "LOS", + -13.615008354187012 + ], + [ + "▁petitioner", + -13.615066528320312 + ], + [ + "Grace", + -13.615078926086426 + ], + [ + "▁gout", + -13.615141868591309 + ], + [ + "▁Publish", + -13.615280151367188 + ], + [ + "Fed", + -13.615365028381348 + ], + [ + "▁autograph", + -13.615418434143066 + ], + [ + "▁Scot", + -13.615421295166016 + ], + [ + "▁Baptism", + -13.61548900604248 + ], + [ + "▁centimeters", + -13.615551948547363 + ], + [ + "allo", + -13.615631103515623 + ], + [ + "▁Robo", + -13.61563491821289 + ], + [ + "▁Wolverine", + -13.615735054016112 + ], + [ + "lease", + -13.615766525268556 + ], + [ + "▁evacuate", + -13.615782737731934 + ], + [ + "▁rupture", + -13.615782737731934 + ], + [ + "▁1885", + -13.61579132080078 + ], + [ + "▁teased", + -13.61581039428711 + ], + [ + "▁Discipline", + -13.615838050842283 + ], + [ + "▁inorganic", + -13.615938186645508 + ], + [ + "uris", + -13.615958213806152 + ], + [ + "820", + -13.615968704223633 + ], + [ + "▁lures", + -13.616023063659668 + ], + [ + "▁cyclical", + -13.616025924682615 + ], + [ + "seeing", + -13.616100311279297 + ], + [ + "▁Wyn", + -13.61634349822998 + ], + [ + "▁Alcatel", + -13.61638355255127 + ], + [ + "▁Tobago", + -13.61638355255127 + ], + [ + "▁rebellious", + -13.61638355255127 + ], + [ + "▁bullpen", + -13.616453170776367 + ], + [ + "azo", + -13.61651611328125 + ], + [ + "▁Chel", + -13.61652660369873 + ], + [ + "▁DAMAGES", + -13.616559982299805 + ], + [ + "▁boulders", + -13.616609573364258 + ], + [ + "▁ballpark", + -13.616621017456056 + ], + [ + "▁Acorn", + -13.616632461547852 + ], + [ + "▁Corbyn", + -13.616646766662598 + ], + [ + "▁Taxation", + -13.616790771484377 + ], + [ + "▁Haram", + -13.61680030822754 + ], + [ + "mh", + -13.61683464050293 + ], + [ + "1-6", + -13.616921424865724 + ], + [ + "ulp", + -13.616924285888672 + ], + [ + "▁BN", + -13.61693000793457 + ], + [ + "▁adopters", + -13.616960525512695 + ], + [ + "▁Autonomous", + -13.617036819458008 + ], + [ + "-43", + -13.617067337036133 + ], + [ + "/07/", + -13.617121696472168 + ], + [ + "▁overrun", + -13.617182731628418 + ], + [ + "KC", + -13.617217063903809 + ], + [ + ")—", + -13.617222785949709 + ], + [ + "▁Tears", + -13.617228507995604 + ], + [ + "▁overthrow", + -13.617287635803224 + ], + [ + "Catholic", + -13.617372512817385 + ], + [ + "protection", + -13.617401123046877 + ], + [ + "Chan", + -13.61744499206543 + ], + [ + "▁arresting", + -13.61746311187744 + ], + [ + "infused", + -13.617536544799805 + ], + [ + "182", + -13.617546081542969 + ], + [ + "▁Bayou", + -13.617570877075195 + ], + [ + "▁helium", + -13.617648124694824 + ], + [ + "▁bruises", + -13.617709159851074 + ], + [ + "▁logger", + -13.617932319641112 + ], + [ + "530", + -13.61803150177002 + ], + [ + "▁($4", + -13.618036270141602 + ], + [ + "▁illusions", + -13.618062019348145 + ], + [ + "▁concerted", + -13.618244171142578 + ], + [ + "▁overuse", + -13.61829948425293 + ], + [ + "feeding", + -13.618322372436523 + ], + [ + "insured", + -13.6183443069458 + ], + [ + "▁Ameri", + -13.618446350097656 + ], + [ + "▁Psy", + -13.618452072143556 + ], + [ + "▁Zin", + -13.618515014648438 + ], + [ + "▁$37", + -13.618566513061523 + ], + [ + "▁10+", + -13.618630409240724 + ], + [ + "▁reschedule", + -13.618684768676758 + ], + [ + "Brad", + -13.618770599365234 + ], + [ + "stre", + -13.618772506713867 + ], + [ + "Gray", + -13.618799209594728 + ], + [ + "▁chemist", + -13.618829727172852 + ], + [ + "▁parrot", + -13.618887901306152 + ], + [ + "▁Supports", + -13.618926048278809 + ], + [ + "thia", + -13.618968963623049 + ], + [ + "telli", + -13.618999481201172 + ], + [ + "▁resell", + -13.619009017944336 + ], + [ + "▁Moe", + -13.619020462036133 + ], + [ + "ULT", + -13.61907196044922 + ], + [ + "▁fuzz", + -13.619136810302734 + ], + [ + "▁Bucharest", + -13.619192123413086 + ], + [ + "▁mammoth", + -13.619192123413086 + ], + [ + "▁locator", + -13.619205474853516 + ], + [ + "▁chevy", + -13.619211196899414 + ], + [ + "▁floorplan", + -13.61928939819336 + ], + [ + "▁Lonely", + -13.619296073913574 + ], + [ + "Employees", + -13.619335174560549 + ], + [ + "cry", + -13.619345664978027 + ], + [ + "501", + -13.619382858276367 + ], + [ + "▁Gogh", + -13.61940097808838 + ], + [ + "▁fandom", + -13.619427680969238 + ], + [ + "▁typos", + -13.619452476501465 + ], + [ + "▁990", + -13.6195068359375 + ], + [ + "▁2015-16", + -13.619535446166992 + ], + [ + "▁printables", + -13.619537353515623 + ], + [ + "▁accuse", + -13.61954116821289 + ], + [ + "▁Reaction", + -13.619674682617188 + ], + [ + "▁kernels", + -13.619688034057615 + ], + [ + "▁endings", + -13.619688987731934 + ], + [ + "▁Natal", + -13.619745254516602 + ], + [ + "▁Invisible", + -13.619902610778809 + ], + [ + "▁Meets", + -13.619916915893556 + ], + [ + "▁figs", + -13.619924545288086 + ], + [ + "▁RAD", + -13.619928359985352 + ], + [ + "▁flashed", + -13.61999225616455 + ], + [ + "▁Waterproofing", + -13.620019912719728 + ], + [ + "▁crores", + -13.6200532913208 + ], + [ + "▁Colleen", + -13.620110511779783 + ], + [ + "▁Sequence", + -13.62012767791748 + ], + [ + "▁Maxi", + -13.62013053894043 + ], + [ + "▁franchisees", + -13.620172500610352 + ], + [ + "▁Sz", + -13.620197296142578 + ], + [ + "Exclusive", + -13.620258331298828 + ], + [ + "Purpose", + -13.620261192321776 + ], + [ + "husband", + -13.620267868041992 + ], + [ + "▁Cant", + -13.620332717895508 + ], + [ + "▁Younger", + -13.620333671569824 + ], + [ + "▁bouts", + -13.62034034729004 + ], + [ + "▁clocked", + -13.620433807373049 + ], + [ + "▁IEC", + -13.620442390441896 + ], + [ + "▁Newly", + -13.620476722717283 + ], + [ + "Dade", + -13.62051010131836 + ], + [ + "еа", + -13.620527267456056 + ], + [ + "Rub", + -13.620567321777344 + ], + [ + "▁Practitioners", + -13.62057876586914 + ], + [ + "▁Receiver", + -13.62077522277832 + ], + [ + "▁Freezer", + -13.620792388916016 + ], + [ + "expert", + -13.620829582214355 + ], + [ + "▁Courthouse", + -13.620875358581545 + ], + [ + "▁Raman", + -13.620911598205566 + ], + [ + "▁NX", + -13.620945930480955 + ], + [ + "▁Vaccine", + -13.620957374572754 + ], + [ + "▁newbies", + -13.620975494384766 + ], + [ + "▁Appreciation", + -13.621050834655762 + ], + [ + "▁Maiden", + -13.621084213256836 + ], + [ + "▁Vig", + -13.621099472045898 + ], + [ + "ede", + -13.621195793151855 + ], + [ + "▁restarted", + -13.621212005615234 + ], + [ + "agne", + -13.621319770812988 + ], + [ + "dp", + -13.621365547180176 + ], + [ + "▁energize", + -13.62138557434082 + ], + [ + "▁Glasses", + -13.621508598327637 + ], + [ + "▁Maga", + -13.621520042419434 + ], + [ + "▁Wh", + -13.621572494506836 + ], + [ + "▁Dyer", + -13.621628761291504 + ], + [ + "▁Horizontal", + -13.621648788452148 + ], + [ + "▁abolish", + -13.621648788452148 + ], + [ + "▁selfless", + -13.62166976928711 + ], + [ + "▁Lew", + -13.621702194213867 + ], + [ + "▁coloration", + -13.621718406677246 + ], + [ + "▁RU", + -13.621768951416016 + ], + [ + "▁beak", + -13.621960639953612 + ], + [ + "▁familial", + -13.62200927734375 + ], + [ + "▁reluctance", + -13.62200927734375 + ], + [ + "▁Evangelical", + -13.6220121383667 + ], + [ + "▁rheumatoid", + -13.622014045715332 + ], + [ + "▁vortex", + -13.622014045715332 + ], + [ + "▁ponies", + -13.62201976776123 + ], + [ + "▁Puppet", + -13.622021675109863 + ], + [ + "▁iterative", + -13.622021675109863 + ], + [ + "FIELD", + -13.622035026550291 + ], + [ + "raven", + -13.62203884124756 + ], + [ + "bowl", + -13.622042655944824 + ], + [ + "factory", + -13.622061729431152 + ], + [ + "▁carbide", + -13.62209129333496 + ], + [ + "▁Libby", + -13.622157096862791 + ], + [ + "▁taboo", + -13.622164726257324 + ], + [ + "opia", + -13.622177124023438 + ], + [ + "▁flue", + -13.622183799743652 + ], + [ + "▁venting", + -13.622214317321776 + ], + [ + "Beth", + -13.622224807739258 + ], + [ + "rella", + -13.622306823730469 + ], + [ + "mono", + -13.622332572937012 + ], + [ + "▁doorways", + -13.622364044189451 + ], + [ + "GX", + -13.622384071350098 + ], + [ + "▁Aw", + -13.622536659240724 + ], + [ + "▁radiators", + -13.622547149658203 + ], + [ + "▁Shad", + -13.62270450592041 + ], + [ + "▁thaw", + -13.62270736694336 + ], + [ + "▁Greenway", + -13.622756004333496 + ], + [ + "▁Dispute", + -13.622760772705078 + ], + [ + "▁Revit", + -13.622812271118164 + ], + [ + "▁161", + -13.622825622558594 + ], + [ + "▁Morse", + -13.62283706665039 + ], + [ + "CCC", + -13.622920989990234 + ], + [ + "tops", + -13.62297248840332 + ], + [ + "▁forage", + -13.622983932495115 + ], + [ + "EI", + -13.623008728027344 + ], + [ + "▁Chant", + -13.62301540374756 + ], + [ + "gum", + -13.62310028076172 + ], + [ + "Enterprise", + -13.62314510345459 + ], + [ + "Finance", + -13.623173713684082 + ], + [ + "▁affiliations", + -13.623218536376951 + ], + [ + "minster", + -13.623231887817385 + ], + [ + "▁179", + -13.623263359069824 + ], + [ + "▁infer", + -13.623353004455566 + ], + [ + "▁42%", + -13.623368263244627 + ], + [ + "1-1", + -13.623394012451172 + ], + [ + "Sharing", + -13.623446464538574 + ], + [ + "▁2017:", + -13.623498916625977 + ], + [ + "▁Yvonne", + -13.623509407043455 + ], + [ + "▁washroom", + -13.623509407043455 + ], + [ + "▁Marcia", + -13.623552322387695 + ], + [ + "▁momentous", + -13.623560905456545 + ], + [ + "▁Analysts", + -13.623604774475098 + ], + [ + "▁(90", + -13.623624801635742 + ], + [ + "▁Doctoral", + -13.62364101409912 + ], + [ + "▁emptying", + -13.623684883117676 + ], + [ + "Wake", + -13.623695373535156 + ], + [ + "Scroll", + -13.623700141906738 + ], + [ + "itan", + -13.623703956604004 + ], + [ + "▁Shame", + -13.623703956604004 + ], + [ + "▁eaters", + -13.6237211227417 + ], + [ + "gens", + -13.623799324035645 + ], + [ + "▁Stickers", + -13.62388801574707 + ], + [ + "▁beetles", + -13.624041557312012 + ], + [ + "732", + -13.624051094055176 + ], + [ + "▁departs", + -13.624255180358888 + ], + [ + "▁Ost", + -13.624308586120604 + ], + [ + "ROC", + -13.624312400817873 + ], + [ + "zzi", + -13.624381065368652 + ], + [ + "▁lizard", + -13.624576568603516 + ], + [ + "▁affectionate", + -13.62466049194336 + ], + [ + "▁etsy", + -13.624778747558594 + ], + [ + "/32", + -13.624802589416504 + ], + [ + "▁vengeance", + -13.624834060668944 + ], + [ + "▁Orthopedic", + -13.624835014343262 + ], + [ + "▁rinsing", + -13.624835014343262 + ], + [ + "▁alumnus", + -13.624837875366213 + ], + [ + "▁Orchid", + -13.624838829040527 + ], + [ + "▁clomid", + -13.624838829040527 + ], + [ + "▁Hadoop", + -13.62484359741211 + ], + [ + "▁babysitter", + -13.624873161315918 + ], + [ + "▁midsole", + -13.62491512298584 + ], + [ + "▁repression", + -13.624929428100586 + ], + [ + "▁clipped", + -13.624945640563965 + ], + [ + "▁QLD", + -13.624948501586914 + ], + [ + "▁soles", + -13.624951362609863 + ], + [ + "▁reorganization", + -13.625003814697266 + ], + [ + "▁Janice", + -13.625044822692873 + ], + [ + "ones", + -13.625066757202148 + ], + [ + "▁mummy", + -13.625077247619627 + ], + [ + "▁sms", + -13.625113487243652 + ], + [ + "▁adware", + -13.62514591217041 + ], + [ + "▁Takes", + -13.625165939331056 + ], + [ + "▁Ninth", + -13.625187873840332 + ], + [ + "▁jumpsuit", + -13.625203132629396 + ], + [ + "▁Quit", + -13.625216484069824 + ], + [ + "▁ramen", + -13.625255584716797 + ], + [ + "▁meld", + -13.625283241271973 + ], + [ + "ENG", + -13.625378608703612 + ], + [ + "▁preferring", + -13.625384330749512 + ], + [ + "▁challenger", + -13.625452995300291 + ], + [ + "▁eraser", + -13.62548542022705 + ], + [ + "quis", + -13.625598907470703 + ], + [ + "tags", + -13.62562084197998 + ], + [ + "▁govt", + -13.62563705444336 + ], + [ + "Id", + -13.6256685256958 + ], + [ + "▁ivy", + -13.625810623168944 + ], + [ + "investment", + -13.625916481018066 + ], + [ + "▁Ether", + -13.625986099243164 + ], + [ + "Hear", + -13.626030921936035 + ], + [ + "cine", + -13.62611198425293 + ], + [ + "Arc", + -13.626222610473633 + ], + [ + "▁Reflect", + -13.62623405456543 + ], + [ + "vitamin", + -13.626262664794922 + ], + [ + "", + -13.626270294189451 + ], + [ + "9-8", + -13.62633228302002 + ], + [ + "▁concerto", + -13.626334190368652 + ], + [ + "rry", + -13.626343727111816 + ], + [ + "6-8", + -13.626359939575195 + ], + [ + "▁nil", + -13.62636661529541 + ], + [ + "8:", + -13.626399040222168 + ], + [ + "▁Surg", + -13.626446723937988 + ], + [ + "▁Edo", + -13.62651824951172 + ], + [ + "seeking", + -13.626520156860352 + ], + [ + "Victoria", + -13.626529693603516 + ], + [ + "▁fielding", + -13.626575469970703 + ], + [ + "Laser", + -13.626703262329102 + ], + [ + "▁Extraction", + -13.626715660095217 + ], + [ + "327", + -13.626748085021973 + ], + [ + "join", + -13.626761436462402 + ], + [ + "525", + -13.626832962036133 + ], + [ + "robe", + -13.626864433288574 + ], + [ + "Hal", + -13.626893043518066 + ], + [ + "▁boulder", + -13.62692928314209 + ], + [ + "dj", + -13.627028465270996 + ], + [ + "▁exacerbate", + -13.627066612243652 + ], + [ + "▁meaty", + -13.627158164978027 + ], + [ + "▁Penta", + -13.627178192138672 + ], + [ + "6\"", + -13.62720012664795 + ], + [ + "▁chisel", + -13.627245903015137 + ], + [ + "quo", + -13.627286911010742 + ], + [ + "▁HARD", + -13.627375602722168 + ], + [ + "▁Tricks", + -13.627391815185549 + ], + [ + "Pol", + -13.627402305603027 + ], + [ + "▁hairdresser", + -13.627431869506836 + ], + [ + "▁alligator", + -13.627433776855469 + ], + [ + "▁avec", + -13.62744426727295 + ], + [ + "▁FAMILY", + -13.627479553222656 + ], + [ + "▁Squirrel", + -13.627479553222656 + ], + [ + "▁Malaga", + -13.62753200531006 + ], + [ + "▁breadcrumbs", + -13.627538681030272 + ], + [ + "▁Artwork", + -13.62754249572754 + ], + [ + "▁777", + -13.627620697021484 + ], + [ + "▁Cummins", + -13.627620697021484 + ], + [ + "▁Vuitton", + -13.627666473388672 + ], + [ + "▁horrendous", + -13.627666473388672 + ], + [ + "▁transducer", + -13.627666473388672 + ], + [ + "▁Cummings", + -13.627668380737305 + ], + [ + "▁fledgling", + -13.62766933441162 + ], + [ + "▁Hobbit", + -13.627687454223633 + ], + [ + "gles", + -13.62768840789795 + ], + [ + "▁SAFE", + -13.627692222595217 + ], + [ + "eval", + -13.627699851989746 + ], + [ + "▁infotainment", + -13.62770175933838 + ], + [ + "▁swimwear", + -13.627755165100098 + ], + [ + "▁sampler", + -13.627760887145996 + ], + [ + "▁Statements", + -13.627772331237791 + ], + [ + "▁Draper", + -13.62777328491211 + ], + [ + "▁Skinny", + -13.62782382965088 + ], + [ + "▁Tyre", + -13.62782859802246 + ], + [ + "▁subordinate", + -13.627842903137209 + ], + [ + "▁injectable", + -13.627900123596191 + ], + [ + "▁Harrow", + -13.627959251403809 + ], + [ + "▁Polly", + -13.62803840637207 + ], + [ + "▁Optimize", + -13.628144264221191 + ], + [ + "CIS", + -13.628190994262695 + ], + [ + "▁Rockford", + -13.628196716308594 + ], + [ + "KL", + -13.628241539001465 + ], + [ + "▁SDS", + -13.628273010253906 + ], + [ + "▁Eligibility", + -13.62830638885498 + ], + [ + "provide", + -13.628321647644045 + ], + [ + "▁critiques", + -13.628348350524902 + ], + [ + "▁unpacking", + -13.628361701965332 + ], + [ + "GMO", + -13.62836742401123 + ], + [ + "▁solos", + -13.628373146057127 + ], + [ + "Release", + -13.628375053405762 + ], + [ + "▁usher", + -13.62838077545166 + ], + [ + "roe", + -13.628421783447266 + ], + [ + "▁BYU", + -13.628425598144531 + ], + [ + "6/", + -13.628676414489746 + ], + [ + "▁benefitted", + -13.62873649597168 + ], + [ + "▁1888", + -13.628761291503906 + ], + [ + "compliance", + -13.628780364990234 + ], + [ + "orders", + -13.62887477874756 + ], + [ + "▁assertive", + -13.62888240814209 + ], + [ + "▁Royce", + -13.628920555114746 + ], + [ + "▁blinking", + -13.628942489624023 + ], + [ + "possibly", + -13.628957748413086 + ], + [ + "ube", + -13.628965377807615 + ], + [ + "▁fracking", + -13.629036903381348 + ], + [ + "▁CBT", + -13.629056930541992 + ], + [ + "atomic", + -13.629063606262209 + ], + [ + "Solution", + -13.629107475280762 + ], + [ + "Honor", + -13.629148483276367 + ], + [ + "▁Dresser", + -13.629250526428224 + ], + [ + "posts", + -13.629262924194336 + ], + [ + "cliffe", + -13.629348754882812 + ], + [ + "▁pg", + -13.629374504089355 + ], + [ + "▁Horizons", + -13.629515647888184 + ], + [ + "▁pamper", + -13.629603385925291 + ], + [ + "RIES", + -13.629630088806152 + ], + [ + "▁Foto", + -13.629632949829102 + ], + [ + "Transform", + -13.629634857177734 + ], + [ + "bg", + -13.629717826843262 + ], + [ + "▁nun", + -13.629727363586426 + ], + [ + "Included", + -13.62982177734375 + ], + [ + "▁Fitzpatrick", + -13.62989616394043 + ], + [ + "iOS", + -13.629907608032228 + ], + [ + "▁marketplaces", + -13.629918098449709 + ], + [ + "690", + -13.630047798156738 + ], + [ + "▁buyout", + -13.63018035888672 + ], + [ + "▁psychologically", + -13.630207061767578 + ], + [ + "])", + -13.630270957946776 + ], + [ + "▁alphabetical", + -13.630292892456056 + ], + [ + "qual", + -13.630293846130373 + ], + [ + "▁contradiction", + -13.630342483520508 + ], + [ + "▁equi", + -13.63034439086914 + ], + [ + "haha", + -13.63041877746582 + ], + [ + "▁downgrade", + -13.63047695159912 + ], + [ + "▁Riga", + -13.630477905273438 + ], + [ + "ROUND", + -13.630495071411133 + ], + [ + "0.9", + -13.63050365447998 + ], + [ + "▁fluorescence", + -13.630508422851562 + ], + [ + "▁oscillator", + -13.630510330200195 + ], + [ + "▁Reis", + -13.630517959594728 + ], + [ + "▁macroeconomic", + -13.630536079406738 + ], + [ + "▁Emory", + -13.630539894104004 + ], + [ + "ational", + -13.630558967590332 + ], + [ + "859", + -13.630590438842772 + ], + [ + "▁Merchandise", + -13.630603790283203 + ], + [ + "▁__", + -13.630677223205566 + ], + [ + "▁sliver", + -13.630678176879885 + ], + [ + "▁209", + -13.63072109222412 + ], + [ + "▁CIP", + -13.630735397338867 + ], + [ + "3-6", + -13.630749702453612 + ], + [ + "▁bloated", + -13.630772590637209 + ], + [ + "▁Cartier", + -13.63080596923828 + ], + [ + "▁blond", + -13.63085651397705 + ], + [ + "▁copious", + -13.631004333496094 + ], + [ + "lani", + -13.631013870239258 + ], + [ + "Marc", + -13.631101608276367 + ], + [ + "▁Bhd", + -13.631230354309082 + ], + [ + "▁glassware", + -13.631287574768066 + ], + [ + "▁NEO", + -13.631410598754885 + ], + [ + "Consumers", + -13.631549835205078 + ], + [ + "▁MVC", + -13.631592750549316 + ], + [ + "Ross", + -13.631689071655272 + ], + [ + "▁strainer", + -13.631693840026855 + ], + [ + "▁Processes", + -13.631754875183104 + ], + [ + "▁exertion", + -13.631765365600586 + ], + [ + "▁anticipates", + -13.631842613220217 + ], + [ + "individual", + -13.63186264038086 + ], + [ + "Foreign", + -13.631875991821287 + ], + [ + "▁Bulb", + -13.631887435913086 + ], + [ + "Colorado", + -13.631900787353516 + ], + [ + "faceted", + -13.631905555725098 + ], + [ + "catering", + -13.631964683532717 + ], + [ + "▁Producers", + -13.63197898864746 + ], + [ + "▁£12", + -13.63199234008789 + ], + [ + "chor", + -13.632081031799316 + ], + [ + "▁racket", + -13.63209629058838 + ], + [ + "▁Miya", + -13.6322021484375 + ], + [ + "▁peril", + -13.632257461547852 + ], + [ + "▁bangs", + -13.6322603225708 + ], + [ + "wc", + -13.632266998291016 + ], + [ + "ancing", + -13.632299423217772 + ], + [ + "offer", + -13.632307052612305 + ], + [ + "▁exchanger", + -13.63232135772705 + ], + [ + "oodle", + -13.632396697998049 + ], + [ + "DIY", + -13.632405281066896 + ], + [ + "▁Universit", + -13.632437705993652 + ], + [ + "▁devour", + -13.632445335388184 + ], + [ + "▁relays", + -13.63246726989746 + ], + [ + "Mostly", + -13.632471084594728 + ], + [ + "▁unisex", + -13.632513999938965 + ], + [ + "▁Paying", + -13.632527351379396 + ], + [ + "▁Uzbekistan", + -13.632529258728027 + ], + [ + "Years", + -13.632553100585938 + ], + [ + "▁takeaways", + -13.632554054260254 + ], + [ + "▁laces", + -13.632567405700684 + ], + [ + "wards", + -13.632575988769531 + ], + [ + "angan", + -13.63258171081543 + ], + [ + "▁Merc", + -13.632617950439451 + ], + [ + "Contract", + -13.632644653320312 + ], + [ + "▁Transformers", + -13.632649421691896 + ], + [ + "▁Lever", + -13.63274097442627 + ], + [ + "sensor", + -13.632762908935549 + ], + [ + "▁Clarity", + -13.632847785949709 + ], + [ + "▁9.0", + -13.632858276367188 + ], + [ + "▁amalgam", + -13.632896423339844 + ], + [ + "▁manifests", + -13.632925033569336 + ], + [ + "NBC", + -13.633086204528809 + ], + [ + "Linda", + -13.633146286010742 + ], + [ + "Upgrade", + -13.63315200805664 + ], + [ + "409", + -13.633158683776855 + ], + [ + "▁Cate", + -13.633186340332031 + ], + [ + "▁Rog", + -13.63319206237793 + ], + [ + "▁overtake", + -13.63332462310791 + ], + [ + "▁McMahon", + -13.633357048034668 + ], + [ + "▁acknowledgment", + -13.633357048034668 + ], + [ + "▁exuberant", + -13.633357048034668 + ], + [ + "▁prettiest", + -13.633357048034668 + ], + [ + "▁rejuvenating", + -13.633357048034668 + ], + [ + "▁utilitarian", + -13.633357048034668 + ], + [ + "▁isolating", + -13.633362770080566 + ], + [ + "▁Louvre", + -13.633366584777832 + ], + [ + "▁1971,", + -13.63336944580078 + ], + [ + "▁porridge", + -13.633378028869627 + ], + [ + "▁KI", + -13.63338851928711 + ], + [ + "▁squadron", + -13.633453369140623 + ], + [ + "▁STILL", + -13.63351058959961 + ], + [ + "▁Jumbo", + -13.633522033691406 + ], + [ + "▁Mecca", + -13.633540153503418 + ], + [ + "▁Whisky", + -13.633687019348145 + ], + [ + "▁Quaker", + -13.633715629577637 + ], + [ + "▁Margin", + -13.633783340454102 + ], + [ + "downs", + -13.633807182312012 + ], + [ + "▁Bryn", + -13.633809089660645 + ], + [ + "▁Shay", + -13.633852005004885 + ], + [ + "▁1,100", + -13.633866310119627 + ], + [ + "▁vibrate", + -13.633984565734863 + ], + [ + "aceae", + -13.63408374786377 + ], + [ + "▁undone", + -13.634166717529297 + ], + [ + "▁Heron", + -13.634343147277832 + ], + [ + "Corn", + -13.63435173034668 + ], + [ + "▁uv", + -13.634495735168455 + ], + [ + "860", + -13.63450813293457 + ], + [ + "▁removalists", + -13.634516716003418 + ], + [ + "▁apocalypse", + -13.63461685180664 + ], + [ + "ZO", + -13.634675979614258 + ], + [ + "ungs", + -13.634693145751951 + ], + [ + "jung", + -13.63469409942627 + ], + [ + "Vote", + -13.634702682495115 + ], + [ + "▁operatives", + -13.634836196899414 + ], + [ + "NED", + -13.634925842285156 + ], + [ + "▁Stations", + -13.634937286376951 + ], + [ + "▁Zam", + -13.63499355316162 + ], + [ + "▁1864", + -13.634995460510254 + ], + [ + "▁elites", + -13.635039329528809 + ], + [ + "ONS", + -13.635083198547363 + ], + [ + "Systems", + -13.635090827941896 + ], + [ + "▁Scala", + -13.635190963745115 + ], + [ + "▁Fundamentals", + -13.635202407836914 + ], + [ + "looks", + -13.63528060913086 + ], + [ + "Domain", + -13.635294914245604 + ], + [ + "▁baseman", + -13.6353178024292 + ], + [ + "▁Tron", + -13.635424613952637 + ], + [ + "▁mockup", + -13.63552951812744 + ], + [ + "gsm", + -13.635538101196287 + ], + [ + "▁grieve", + -13.63561725616455 + ], + [ + "bye", + -13.635643005371094 + ], + [ + "POL", + -13.635655403137209 + ], + [ + "zol", + -13.635665893554688 + ], + [ + "▁188", + -13.635677337646484 + ], + [ + "Mode", + -13.635679244995115 + ], + [ + "strand", + -13.635878562927246 + ], + [ + "▁vin", + -13.635963439941406 + ], + [ + "▁girly", + -13.636021614074709 + ], + [ + "exp", + -13.636029243469238 + ], + [ + "▁Gua", + -13.636189460754396 + ], + [ + "▁studded", + -13.636194229125977 + ], + [ + "▁CY", + -13.63620948791504 + ], + [ + "▁dispensary", + -13.63621425628662 + ], + [ + "▁endocrine", + -13.63621425628662 + ], + [ + "▁molasses", + -13.63621425628662 + ], + [ + "▁integrative", + -13.636216163635254 + ], + [ + "▁extravaganza", + -13.63621711730957 + ], + [ + "▁Fulbright", + -13.636218070983888 + ], + [ + "▁fluency", + -13.636234283447266 + ], + [ + "▁(1996)", + -13.636244773864746 + ], + [ + "▁Burbank", + -13.636258125305176 + ], + [ + "▁scrapped", + -13.636258125305176 + ], + [ + "▁Equally", + -13.636311531066896 + ], + [ + "▁Outcomes", + -13.636378288269045 + ], + [ + "▁resultant", + -13.636392593383787 + ], + [ + "01)", + -13.63640308380127 + ], + [ + "▁synonyms", + -13.636456489562988 + ], + [ + "▁hyperlinks", + -13.63650131225586 + ], + [ + "WIN", + -13.636509895324709 + ], + [ + "Ber", + -13.636512756347656 + ], + [ + "▁Seville", + -13.636528968811035 + ], + [ + "▁prophetic", + -13.636530876159668 + ], + [ + "▁ensued", + -13.636566162109377 + ], + [ + "optic", + -13.636568069458008 + ], + [ + "▁Drizzle", + -13.636587142944336 + ], + [ + "Pain", + -13.636621475219728 + ], + [ + "CJ", + -13.63671875 + ], + [ + "▁315", + -13.636744499206545 + ], + [ + "▁Comet", + -13.636763572692873 + ], + [ + "▁Hector", + -13.63676643371582 + ], + [ + "▁Gla", + -13.636780738830566 + ], + [ + "▁(8)", + -13.636784553527832 + ], + [ + "▁applauded", + -13.63679313659668 + ], + [ + "▁classmate", + -13.636796951293944 + ], + [ + "▁wiggle", + -13.636818885803224 + ], + [ + "stration", + -13.636921882629396 + ], + [ + "▁swan", + -13.636922836303713 + ], + [ + "▁Tera", + -13.636992454528809 + ], + [ + "▁Busch", + -13.63701057434082 + ], + [ + "heimer", + -13.63701629638672 + ], + [ + "▁stencils", + -13.6370267868042 + ], + [ + "▁Packard", + -13.637036323547363 + ], + [ + "▁Tamara", + -13.637046813964844 + ], + [ + "▁Slater", + -13.637078285217283 + ], + [ + "Twin", + -13.63709545135498 + ], + [ + "▁summons", + -13.637096405029297 + ], + [ + "▁degraded", + -13.637141227722168 + ], + [ + "lover", + -13.637207984924316 + ], + [ + "▁ingrained", + -13.637236595153809 + ], + [ + "▁Moreno", + -13.637282371520996 + ], + [ + "▁apiece", + -13.63730239868164 + ], + [ + "▁EI", + -13.637368202209473 + ], + [ + "▁fam", + -13.637377738952637 + ], + [ + "wipe", + -13.637393951416016 + ], + [ + "▁Static", + -13.637502670288086 + ], + [ + "ERP", + -13.63753890991211 + ], + [ + "▁zippered", + -13.637577056884766 + ], + [ + "ool", + -13.637592315673828 + ], + [ + "Flu", + -13.637632369995115 + ], + [ + "▁Obtain", + -13.6376371383667 + ], + [ + "▁eternally", + -13.637653350830078 + ], + [ + "▁mL", + -13.637653350830078 + ], + [ + "▁skewer", + -13.637721061706545 + ], + [ + "▁punishing", + -13.637735366821287 + ], + [ + "IMM", + -13.637773513793944 + ], + [ + "▁pri", + -13.637804985046388 + ], + [ + "▁unblock", + -13.63795280456543 + ], + [ + "wolf", + -13.637964248657228 + ], + [ + "-74", + -13.637978553771973 + ], + [ + "cies", + -13.637986183166504 + ], + [ + "0.8", + -13.638025283813477 + ], + [ + ":00", + -13.638039588928224 + ], + [ + "trace", + -13.638042449951172 + ], + [ + "▁2018).", + -13.638069152832031 + ], + [ + "Civil", + -13.638072967529297 + ], + [ + "▁Spirits", + -13.638094902038574 + ], + [ + "adu", + -13.63820457458496 + ], + [ + "ECO", + -13.63822078704834 + ], + [ + "▁McCa", + -13.638251304626465 + ], + [ + "AMI", + -13.638275146484377 + ], + [ + "bola", + -13.638335227966309 + ], + [ + "▁BRA", + -13.638426780700684 + ], + [ + "ledge", + -13.638442993164062 + ], + [ + "Aaron", + -13.638471603393556 + ], + [ + "BTC", + -13.63862133026123 + ], + [ + "▁Puff", + -13.638757705688477 + ], + [ + "▁Hungry", + -13.638765335083008 + ], + [ + "▁Yup", + -13.638812065124512 + ], + [ + "▁cloths", + -13.638838768005373 + ], + [ + "Ski", + -13.638968467712402 + ], + [ + "▁Merri", + -13.639062881469728 + ], + [ + "▁orthopaedic", + -13.639079093933104 + ], + [ + "▁propensity", + -13.639079093933104 + ], + [ + "▁spoof", + -13.639079093933104 + ], + [ + "▁unthinkable", + -13.639079093933104 + ], + [ + "▁Scrabble", + -13.639080047607422 + ], + [ + "Options", + -13.639081954956056 + ], + [ + "▁Knicks", + -13.639083862304688 + ], + [ + "▁(1995)", + -13.63911247253418 + ], + [ + "▁Lucknow", + -13.639114379882812 + ], + [ + "▁polarization", + -13.639172554016112 + ], + [ + "▁Mohawk", + -13.63917636871338 + ], + [ + "▁lather", + -13.63918113708496 + ], + [ + "▁fulfilment", + -13.639195442199709 + ], + [ + "▁Apk", + -13.639201164245604 + ], + [ + "▁handshake", + -13.639227867126465 + ], + [ + "▁Christy", + -13.639229774475098 + ], + [ + "▁folic", + -13.63924503326416 + ], + [ + "▁segregated", + -13.639286994934082 + ], + [ + "▁Convenience", + -13.639307022094728 + ], + [ + "▁unchecked", + -13.639309883117676 + ], + [ + "Links", + -13.639321327209473 + ], + [ + "▁Define", + -13.639331817626951 + ], + [ + "▁5.8", + -13.639349937438965 + ], + [ + "▁resins", + -13.639381408691406 + ], + [ + "▁Atkins", + -13.639387130737305 + ], + [ + "▁Chalet", + -13.63939380645752 + ], + [ + "▁uniformity", + -13.639409065246582 + ], + [ + "▁Sten", + -13.63941764831543 + ], + [ + "otte", + -13.639479637145996 + ], + [ + "▁Denim", + -13.63955020904541 + ], + [ + "▁Wins", + -13.639554023742676 + ], + [ + "strike", + -13.639610290527344 + ], + [ + "▁assays", + -13.63961124420166 + ], + [ + "▁broiler", + -13.639623641967772 + ], + [ + "▁Jury", + -13.639656066894531 + ], + [ + "ethyl", + -13.639657974243164 + ], + [ + "▁(80", + -13.639691352844238 + ], + [ + "urant", + -13.639714241027832 + ], + [ + "Teachers", + -13.639737129211426 + ], + [ + "VAL", + -13.639763832092283 + ], + [ + "▁Arrival", + -13.63987159729004 + ], + [ + "▁Pivot", + -13.639914512634276 + ], + [ + "▁cemented", + -13.639921188354492 + ], + [ + "▁humankind", + -13.640036582946776 + ], + [ + "▁Marino", + -13.64003849029541 + ], + [ + "▁Warhol", + -13.640043258666992 + ], + [ + "beth", + -13.640094757080078 + ], + [ + "▁Capri", + -13.640094757080078 + ], + [ + "▁Dine", + -13.640137672424316 + ], + [ + "▁Sens", + -13.640174865722656 + ], + [ + "349", + -13.640185356140137 + ], + [ + "▁underdog", + -13.640228271484377 + ], + [ + "60.", + -13.640243530273438 + ], + [ + "▁BTS", + -13.640280723571776 + ], + [ + "cook", + -13.640399932861328 + ], + [ + "5.4", + -13.64040470123291 + ], + [ + "▁ECM", + -13.64044952392578 + ], + [ + "▁rollover", + -13.64051914215088 + ], + [ + "▁Bent", + -13.64052677154541 + ], + [ + "▁borderline", + -13.640565872192385 + ], + [ + "▁Missions", + -13.640583992004396 + ], + [ + "motivated", + -13.640607833862305 + ], + [ + "▁Folks", + -13.640624046325684 + ], + [ + "Admission", + -13.640658378601074 + ], + [ + "Length", + -13.640668869018556 + ], + [ + "858", + -13.640750885009766 + ], + [ + "▁nonstop", + -13.640777587890623 + ], + [ + "bop", + -13.64084529876709 + ], + [ + "Bag", + -13.640868186950684 + ], + [ + "Communication", + -13.64087963104248 + ], + [ + "menu", + -13.64090633392334 + ], + [ + "▁parkland", + -13.640942573547363 + ], + [ + "partner", + -13.640948295593262 + ], + [ + "kart", + -13.64095687866211 + ], + [ + "catch", + -13.64100456237793 + ], + [ + "-99", + -13.64102268218994 + ], + [ + "Teaching", + -13.641030311584473 + ], + [ + "cad", + -13.641072273254396 + ], + [ + "▁Engineered", + -13.641093254089355 + ], + [ + "▁Farming", + -13.641228675842283 + ], + [ + "AHA", + -13.641263008117676 + ], + [ + "registered", + -13.64128589630127 + ], + [ + "▁Tw", + -13.64132595062256 + ], + [ + "25,000", + -13.64134120941162 + ], + [ + "▁bios", + -13.64136028289795 + ], + [ + "▁COME", + -13.641398429870604 + ], + [ + "▁Affect", + -13.64140796661377 + ], + [ + "LOC", + -13.641460418701172 + ], + [ + "▁dew", + -13.641517639160156 + ], + [ + "▁mixers", + -13.6415376663208 + ], + [ + "▁gloom", + -13.641719818115234 + ], + [ + "dive", + -13.641780853271484 + ], + [ + "▁legumes", + -13.641820907592772 + ], + [ + "▁WEST", + -13.641843795776367 + ], + [ + "roth", + -13.641873359680176 + ], + [ + "archive", + -13.641927719116213 + ], + [ + "▁IMPLIED", + -13.641952514648438 + ], + [ + "▁Zodiac", + -13.64195442199707 + ], + [ + "▁draught", + -13.641956329345703 + ], + [ + "▁rotates", + -13.641961097717283 + ], + [ + "▁WARRANTY", + -13.641962051391602 + ], + [ + "fell", + -13.64198112487793 + ], + [ + "▁solidly", + -13.641996383666992 + ], + [ + "pex", + -13.642027854919434 + ], + [ + "▁detachment", + -13.64203643798828 + ], + [ + "▁polarized", + -13.642047882080078 + ], + [ + "▁Distillery", + -13.642049789428713 + ], + [ + "mau", + -13.64206314086914 + ], + [ + "▁Wight", + -13.64207363128662 + ], + [ + "lew", + -13.642088890075684 + ], + [ + "capped", + -13.642105102539062 + ], + [ + "▁dollop", + -13.642171859741213 + ], + [ + "▁1812", + -13.642236709594728 + ], + [ + "▁Chipotle", + -13.642264366149902 + ], + [ + "▁eff", + -13.642295837402344 + ], + [ + "▁Lakh", + -13.642301559448242 + ], + [ + "▁toxin", + -13.642325401306152 + ], + [ + "bots", + -13.642348289489746 + ], + [ + "▁Makeover", + -13.64236831665039 + ], + [ + "▁catchment", + -13.64246654510498 + ], + [ + "▁agri", + -13.64247703552246 + ], + [ + "▁Rebels", + -13.642496109008787 + ], + [ + "Sony", + -13.64252471923828 + ], + [ + "▁agreeable", + -13.642560958862305 + ], + [ + "edited", + -13.642562866210938 + ], + [ + "▁Neb", + -13.642614364624023 + ], + [ + "▁enigmatic", + -13.642681121826172 + ], + [ + "▁$5.00", + -13.642694473266602 + ], + [ + "wheeler", + -13.642722129821776 + ], + [ + "GMT", + -13.642723083496094 + ], + [ + "-77", + -13.642727851867676 + ], + [ + "ryn", + -13.642743110656738 + ], + [ + "▁presided", + -13.642745018005373 + ], + [ + "▁barge", + -13.642802238464355 + ], + [ + "▁stag", + -13.64281177520752 + ], + [ + "gut", + -13.642914772033691 + ], + [ + "▁Dont", + -13.643004417419434 + ], + [ + "▁Colours", + -13.64303207397461 + ], + [ + "oise", + -13.643085479736328 + ], + [ + "folding", + -13.643141746520996 + ], + [ + "▁recon", + -13.643214225769045 + ], + [ + "cks", + -13.64326000213623 + ], + [ + "4.9", + -13.643288612365724 + ], + [ + "▁snowball", + -13.643291473388672 + ], + [ + "▁1889", + -13.643328666687012 + ], + [ + "▁orient", + -13.643516540527344 + ], + [ + "alcoholic", + -13.643571853637695 + ], + [ + "riz", + -13.643590927124023 + ], + [ + "25%", + -13.643593788146973 + ], + [ + "acious", + -13.643622398376465 + ], + [ + "behind", + -13.643651008605955 + ], + [ + "▁lull", + -13.643683433532717 + ], + [ + "▁sen", + -13.643742561340332 + ], + [ + "▁Entries", + -13.643766403198242 + ], + [ + "java", + -13.643768310546877 + ], + [ + "Horse", + -13.64378547668457 + ], + [ + "▁shampoos", + -13.643808364868164 + ], + [ + "▁adjectives", + -13.643813133239746 + ], + [ + "▁Riot", + -13.64386749267578 + ], + [ + "▁Snack", + -13.643877983093262 + ], + [ + "Integrated", + -13.64404582977295 + ], + [ + "-59", + -13.64415454864502 + ], + [ + "▁Bene", + -13.644176483154297 + ], + [ + "singer", + -13.644214630126951 + ], + [ + "rounder", + -13.644232749938965 + ], + [ + "▁dissipate", + -13.644232749938965 + ], + [ + "▁Lub", + -13.644234657287598 + ], + [ + "▁locales", + -13.644254684448242 + ], + [ + "BK", + -13.644320487976074 + ], + [ + "Investors", + -13.644350051879885 + ], + [ + "Planet", + -13.644454956054688 + ], + [ + "▁Sonia", + -13.64449405670166 + ], + [ + "▁Widget", + -13.644533157348633 + ], + [ + "cid", + -13.644536018371582 + ], + [ + "▁Revenge", + -13.644575119018556 + ], + [ + "▁habitual", + -13.644630432128906 + ], + [ + "TMt", + -13.644675254821776 + ], + [ + "799", + -13.644742965698242 + ], + [ + "collar", + -13.644814491271973 + ], + [ + "▁Accessibility", + -13.644814491271973 + ], + [ + "▁$38", + -13.64482307434082 + ], + [ + "▁exaggeration", + -13.644834518432615 + ], + [ + "▁illustrative", + -13.644834518432615 + ], + [ + "▁smuggling", + -13.644834518432615 + ], + [ + "▁unattractive", + -13.644847869873049 + ], + [ + "▁readability", + -13.644852638244627 + ], + [ + "▁garland", + -13.644856452941896 + ], + [ + "▁HAND", + -13.644859313964844 + ], + [ + "462", + -13.644875526428224 + ], + [ + "DEM", + -13.644908905029297 + ], + [ + "▁lagging", + -13.64492893218994 + ], + [ + "▁decrypt", + -13.64493465423584 + ], + [ + "2,500", + -13.64496612548828 + ], + [ + "▁Leighton", + -13.644973754882812 + ], + [ + "▁catwalk", + -13.644973754882812 + ], + [ + "▁LH", + -13.64500617980957 + ], + [ + "bek", + -13.645029067993164 + ], + [ + "▁Firewall", + -13.645071983337402 + ], + [ + "▁Leica", + -13.645075798034668 + ], + [ + "▁scavenger", + -13.645100593566896 + ], + [ + "▁caramelized", + -13.645119667053224 + ], + [ + "▁Maroon", + -13.645184516906738 + ], + [ + "492", + -13.645196914672852 + ], + [ + "fx", + -13.645204544067385 + ], + [ + "▁Duluth", + -13.645218849182127 + ], + [ + "▁BRAND", + -13.64524745941162 + ], + [ + "dhan", + -13.645256996154783 + ], + [ + "▁Sponsors", + -13.645299911499023 + ], + [ + "1996", + -13.645305633544922 + ], + [ + "rius", + -13.645341873168944 + ], + [ + "▁flanked", + -13.645344734191896 + ], + [ + "▁7.3", + -13.645413398742676 + ], + [ + "▁Ashland", + -13.645440101623535 + ], + [ + "Topic", + -13.645458221435549 + ], + [ + "▁Hippo", + -13.645463943481444 + ], + [ + "$5", + -13.645480155944824 + ], + [ + "eza", + -13.64553165435791 + ], + [ + "▁Reddy", + -13.645538330078123 + ], + [ + "▁Pineapple", + -13.645594596862791 + ], + [ + "▁creeks", + -13.645636558532717 + ], + [ + "Fund", + -13.64569854736328 + ], + [ + "030", + -13.64578628540039 + ], + [ + "▁zeal", + -13.645824432373049 + ], + [ + "▁Imported", + -13.64586067199707 + ], + [ + "▁mule", + -13.645862579345703 + ], + [ + "▁functionally", + -13.645977973937988 + ], + [ + "▁moths", + -13.645977973937988 + ], + [ + "▁milli", + -13.64602279663086 + ], + [ + "▁Rashid", + -13.646034240722656 + ], + [ + "Ob", + -13.646068572998049 + ], + [ + "Radi", + -13.64610767364502 + ], + [ + "678", + -13.64613151550293 + ], + [ + "▁CST", + -13.646163940429688 + ], + [ + "▁Ortho", + -13.646240234375 + ], + [ + "6.2", + -13.64627170562744 + ], + [ + "▁Responsive", + -13.64628791809082 + ], + [ + "gated", + -13.646302223205566 + ], + [ + "▁570", + -13.64637851715088 + ], + [ + "pig", + -13.64641284942627 + ], + [ + "378", + -13.646442413330078 + ], + [ + "▁parting", + -13.646470069885254 + ], + [ + "component", + -13.646562576293944 + ], + [ + "▁IIT", + -13.646573066711426 + ], + [ + "improvement", + -13.64660358428955 + ], + [ + "▁undertakes", + -13.64669132232666 + ], + [ + "▁Enhance", + -13.64671802520752 + ], + [ + "zig", + -13.646822929382324 + ], + [ + "▁Lana", + -13.64687728881836 + ], + [ + "▁£500", + -13.646949768066406 + ], + [ + "▁Genie", + -13.646963119506836 + ], + [ + "295", + -13.64698314666748 + ], + [ + "OSH", + -13.647119522094728 + ], + [ + "▁uniformly", + -13.647123336791992 + ], + [ + "RAD", + -13.647180557250977 + ], + [ + "▁shameful", + -13.647244453430176 + ], + [ + "iao", + -13.647310256958008 + ], + [ + "▁Highlight", + -13.647339820861816 + ], + [ + "itarian", + -13.64735507965088 + ], + [ + "426", + -13.647377014160156 + ], + [ + "▁qt", + -13.647431373596191 + ], + [ + "▁SAC", + -13.647445678710938 + ], + [ + "▁phoenix", + -13.647461891174316 + ], + [ + "▁182", + -13.647583961486816 + ], + [ + "4.7", + -13.64768409729004 + ], + [ + "▁netbook", + -13.647686958312988 + ], + [ + "▁uncanny", + -13.647725105285645 + ], + [ + "▁unmanned", + -13.647725105285645 + ], + [ + "▁Unilever", + -13.64772891998291 + ], + [ + "▁Antenna", + -13.647729873657228 + ], + [ + "▁ratchet", + -13.647729873657228 + ], + [ + "▁indicted", + -13.64773178100586 + ], + [ + "▁paranoid", + -13.647732734680176 + ], + [ + "▁untuk", + -13.647732734680176 + ], + [ + "▁cunning", + -13.647751808166504 + ], + [ + "▁(1997)", + -13.6477632522583 + ], + [ + "▁nat", + -13.647790908813477 + ], + [ + "▁pecan", + -13.647794723510742 + ], + [ + "▁Burgess", + -13.647820472717283 + ], + [ + "▁elves", + -13.647829055786133 + ], + [ + "▁stricter", + -13.647871017456056 + ], + [ + "▁222", + -13.64787483215332 + ], + [ + "▁structurally", + -13.64787483215332 + ], + [ + "▁Leasing", + -13.6478853225708 + ], + [ + "▁abortions", + -13.64789581298828 + ], + [ + ">>>", + -13.647945404052734 + ], + [ + "545", + -13.647984504699709 + ], + [ + "▁rift", + -13.64805030822754 + ], + [ + "▁Chevron", + -13.648117065429688 + ], + [ + "▁branching", + -13.648133277893066 + ], + [ + "▁badass", + -13.64815902709961 + ], + [ + "178", + -13.648234367370604 + ], + [ + "▁Univ", + -13.648244857788086 + ], + [ + "▁Jays", + -13.648249626159668 + ], + [ + "Initial", + -13.648283004760742 + ], + [ + "▁sind", + -13.648290634155272 + ], + [ + "▁Committees", + -13.648293495178224 + ], + [ + "jama", + -13.648359298706056 + ], + [ + "▁INT", + -13.648369789123535 + ], + [ + "▁signalling", + -13.648423194885254 + ], + [ + "▁towed", + -13.648432731628418 + ], + [ + "0).", + -13.648455619812012 + ], + [ + "▁Scent", + -13.648515701293944 + ], + [ + "▁CRO", + -13.648548126220703 + ], + [ + "▁foto", + -13.648627281188965 + ], + [ + "tix", + -13.648639678955078 + ], + [ + "10:", + -13.648710250854492 + ], + [ + "▁Malm", + -13.648722648620604 + ], + [ + "▁Fau", + -13.64873504638672 + ], + [ + "▁meander", + -13.648760795593262 + ], + [ + "▁Medication", + -13.64877986907959 + ], + [ + "790", + -13.648799896240234 + ], + [ + "▁Barnett", + -13.64886474609375 + ], + [ + "enge", + -13.64890480041504 + ], + [ + "▁noses", + -13.648932456970217 + ], + [ + "▁Rival", + -13.64899730682373 + ], + [ + "7:", + -13.649003982543944 + ], + [ + "▁lubricants", + -13.64903736114502 + ], + [ + "orb", + -13.649057388305664 + ], + [ + "▁Koo", + -13.649062156677246 + ], + [ + "▁stave", + -13.64910125732422 + ], + [ + "389", + -13.649145126342772 + ], + [ + "Descriptions", + -13.649165153503418 + ], + [ + "▁Adler", + -13.649178504943848 + ], + [ + "▁Tailor", + -13.64919662475586 + ], + [ + "▁Ec", + -13.649292945861816 + ], + [ + "▁wildfires", + -13.649343490600586 + ], + [ + "▁afield", + -13.64937686920166 + ], + [ + "dhu", + -13.649413108825684 + ], + [ + "▁Interviews", + -13.649456977844238 + ], + [ + "ahl", + -13.649523735046388 + ], + [ + "Internal", + -13.649584770202637 + ], + [ + "Helping", + -13.649621963500977 + ], + [ + "▁grievances", + -13.649663925170898 + ], + [ + "anticipated", + -13.64968490600586 + ], + [ + "quer", + -13.649691581726074 + ], + [ + "▁denture", + -13.64976978302002 + ], + [ + "▁Clinics", + -13.649774551391602 + ], + [ + "▁Peck", + -13.649866104125977 + ], + [ + "7-0", + -13.649880409240724 + ], + [ + "▁Talks", + -13.64990520477295 + ], + [ + "▁biz", + -13.649940490722656 + ], + [ + "4-8", + -13.64999294281006 + ], + [ + "▁fend", + -13.650218963623049 + ], + [ + "Pla", + -13.650230407714844 + ], + [ + "▁clockwise", + -13.650327682495115 + ], + [ + "Improved", + -13.65032958984375 + ], + [ + "▁Cooke", + -13.650384902954102 + ], + [ + "▁campgrounds", + -13.650453567504885 + ], + [ + "667", + -13.65049934387207 + ], + [ + "▁Quint", + -13.650522232055664 + ], + [ + "Hunter", + -13.650532722473145 + ], + [ + "0)", + -13.650537490844728 + ], + [ + "▁interceptions", + -13.650537490844728 + ], + [ + "hea", + -13.65054988861084 + ], + [ + "Older", + -13.650552749633787 + ], + [ + "▁racked", + -13.650609970092772 + ], + [ + "▁antidote", + -13.650623321533203 + ], + [ + "▁hereditary", + -13.650623321533203 + ], + [ + "▁ukulele", + -13.650623321533203 + ], + [ + "▁colossal", + -13.65062427520752 + ], + [ + "▁Paraguay", + -13.650625228881836 + ], + [ + "▁proxies", + -13.650625228881836 + ], + [ + "▁lousy", + -13.650630950927734 + ], + [ + "▁seeding", + -13.650662422180176 + ], + [ + "▁Blanket", + -13.65069580078125 + ], + [ + "▁Offered", + -13.650771141052246 + ], + [ + "▁Chairperson", + -13.650798797607422 + ], + [ + "▁foothold", + -13.650806427001951 + ], + [ + "▁Lag", + -13.650811195373535 + ], + [ + "▁Ames", + -13.650815963745115 + ], + [ + "Chip", + -13.65082550048828 + ], + [ + "▁SLS", + -13.650853157043455 + ], + [ + "▁Petty", + -13.650879859924316 + ], + [ + "▁Beaches", + -13.650938987731934 + ], + [ + "▁Immuno", + -13.650989532470703 + ], + [ + "▁Climbing", + -13.651066780090332 + ], + [ + "▁COR", + -13.651130676269531 + ], + [ + "▁Peyton", + -13.651155471801758 + ], + [ + "▁9000", + -13.651178359985352 + ], + [ + "▁Avocado", + -13.65130615234375 + ], + [ + "▁Lithuanian", + -13.651372909545898 + ], + [ + "▁dar", + -13.65139865875244 + ], + [ + "▁Calder", + -13.65145492553711 + ], + [ + "Trader", + -13.651494026184082 + ], + [ + "aldi", + -13.651544570922852 + ], + [ + "div", + -13.651548385620115 + ], + [ + "▁2012).", + -13.651564598083496 + ], + [ + "▁kiosks", + -13.65159797668457 + ], + [ + "9-7", + -13.651629447937012 + ], + [ + "enny", + -13.651650428771973 + ], + [ + "▁lifeline", + -13.651665687561035 + ], + [ + "▁Saxon", + -13.65176010131836 + ], + [ + "▁11.5", + -13.651778221130373 + ], + [ + "▁blistering", + -13.65181827545166 + ], + [ + "▁Wigan", + -13.651843070983888 + ], + [ + "▁rallying", + -13.651844024658203 + ], + [ + "▁joe", + -13.65187931060791 + ], + [ + "▁UNICEF", + -13.652024269104004 + ], + [ + "▁Monty", + -13.652043342590332 + ], + [ + "▁Upholstered", + -13.652106285095217 + ], + [ + "yun", + -13.652146339416504 + ], + [ + "oco", + -13.65216064453125 + ], + [ + "▁butler", + -13.652339935302734 + ], + [ + "▁Whoever", + -13.65234088897705 + ], + [ + "▁Parkland", + -13.652503967285156 + ], + [ + "Introducing", + -13.652510643005373 + ], + [ + "▁constructor", + -13.652589797973633 + ], + [ + "Austin", + -13.652596473693848 + ], + [ + "rrh", + -13.652676582336426 + ], + [ + "Restaurant", + -13.652745246887209 + ], + [ + "nature", + -13.652749061584473 + ], + [ + "▁Waist", + -13.65278148651123 + ], + [ + "Consumer", + -13.652883529663086 + ], + [ + "▁NPC", + -13.652918815612791 + ], + [ + "roo", + -13.652942657470703 + ], + [ + "▁emits", + -13.652958869934082 + ], + [ + "classified", + -13.652971267700195 + ], + [ + "cute", + -13.652993202209473 + ], + [ + "▁detach", + -13.653019905090332 + ], + [ + "▁Raised", + -13.65302848815918 + ], + [ + "EAT", + -13.65305519104004 + ], + [ + "regional", + -13.65308952331543 + ], + [ + "▁clueless", + -13.653106689453123 + ], + [ + "▁tak", + -13.653162956237791 + ], + [ + "▁lite", + -13.653252601623535 + ], + [ + "drift", + -13.65326690673828 + ], + [ + "848", + -13.653275489807127 + ], + [ + "▁queens", + -13.653311729431152 + ], + [ + "Connection", + -13.65340805053711 + ], + [ + "Egg", + -13.653419494628906 + ], + [ + "▁Gamb", + -13.653425216674805 + ], + [ + "▁inflicted", + -13.653444290161133 + ], + [ + "▁nanoparticles", + -13.653467178344728 + ], + [ + "▁Cheyenne", + -13.65353012084961 + ], + [ + "▁Hezbollah", + -13.65353012084961 + ], + [ + "▁Mississauga", + -13.65353012084961 + ], + [ + "▁Phentermine", + -13.65353012084961 + ], + [ + "▁Nightmare", + -13.653532028198242 + ], + [ + "▁holster", + -13.653533935546877 + ], + [ + "nip", + -13.65353775024414 + ], + [ + "▁Fayetteville", + -13.653554916381836 + ], + [ + "▁charting", + -13.653559684753418 + ], + [ + "pton", + -13.653599739074709 + ], + [ + "zek", + -13.6536226272583 + ], + [ + "FTA", + -13.6536283493042 + ], + [ + "ilia", + -13.6536283493042 + ], + [ + "▁Passive", + -13.653660774230955 + ], + [ + "▁JFK", + -13.653668403625488 + ], + [ + "▁Mushroom", + -13.65369987487793 + ], + [ + "▁Brochure", + -13.653700828552246 + ], + [ + "▁clippings", + -13.653708457946776 + ], + [ + "▁videographer", + -13.653728485107422 + ], + [ + "▁resent", + -13.653801918029783 + ], + [ + "▁mosques", + -13.653870582580566 + ], + [ + "▁macaroni", + -13.65389919281006 + ], + [ + "eers", + -13.65392017364502 + ], + [ + "▁keygen", + -13.65394401550293 + ], + [ + "▁patchwork", + -13.653949737548828 + ], + [ + "3′′", + -13.653950691223145 + ], + [ + "▁darkened", + -13.654004096984863 + ], + [ + "▁Lash", + -13.65404987335205 + ], + [ + "▁highlighter", + -13.654057502746582 + ], + [ + "tivo", + -13.654081344604492 + ], + [ + "448", + -13.6541166305542 + ], + [ + "▁airbag", + -13.654126167297363 + ], + [ + "▁emphasised", + -13.654159545898438 + ], + [ + "▁Vie", + -13.654165267944336 + ], + [ + "aven", + -13.654170989990234 + ], + [ + "dhi", + -13.654291152954102 + ], + [ + "Door", + -13.654326438903809 + ], + [ + "▁PCR", + -13.654341697692873 + ], + [ + "▁Favor", + -13.654385566711426 + ], + [ + "Slide", + -13.654438018798828 + ], + [ + "pont", + -13.654449462890623 + ], + [ + "▁Gl", + -13.654467582702637 + ], + [ + "▁hind", + -13.654540061950684 + ], + [ + "▁Appearance", + -13.654632568359377 + ], + [ + "▁someplace", + -13.65477466583252 + ], + [ + "bara", + -13.65480899810791 + ], + [ + "Operation", + -13.654855728149414 + ], + [ + "-69", + -13.654902458190918 + ], + [ + "▁Haute", + -13.654903411865234 + ], + [ + "▁Checkout", + -13.65493392944336 + ], + [ + "Sit", + -13.655047416687012 + ], + [ + "▁boasted", + -13.655071258544922 + ], + [ + "Higher", + -13.65507984161377 + ], + [ + "139", + -13.655094146728516 + ], + [ + "chris", + -13.655200958251951 + ], + [ + "▁tai", + -13.65522289276123 + ], + [ + "inted", + -13.655298233032228 + ], + [ + "Response", + -13.655417442321776 + ], + [ + "Positive", + -13.655447006225586 + ], + [ + "neur", + -13.655471801757812 + ], + [ + "inducing", + -13.655476570129396 + ], + [ + "Favorite", + -13.65548610687256 + ], + [ + "Sprinkle", + -13.65550136566162 + ], + [ + "▁enlighten", + -13.655510902404783 + ], + [ + "▁IDF", + -13.655513763427734 + ], + [ + "424", + -13.655516624450684 + ], + [ + "224", + -13.655576705932615 + ], + [ + "Funny", + -13.65559196472168 + ], + [ + "▁succumb", + -13.655594825744627 + ], + [ + "Status", + -13.65561580657959 + ], + [ + "▁Keynote", + -13.65561580657959 + ], + [ + "▁Nomad", + -13.65566062927246 + ], + [ + "4-0", + -13.655828475952148 + ], + [ + "▁Filing", + -13.655838966369627 + ], + [ + "▁baffle", + -13.655908584594728 + ], + [ + "▁renter", + -13.655921936035156 + ], + [ + "nant", + -13.655943870544434 + ], + [ + "▁standardization", + -13.656002044677734 + ], + [ + "▁Keen", + -13.656020164489746 + ], + [ + "▁jumpers", + -13.656035423278809 + ], + [ + "idea", + -13.656052589416504 + ], + [ + "▁curtail", + -13.656055450439451 + ], + [ + "▁cursed", + -13.656189918518066 + ], + [ + "▁Carole", + -13.656197547912598 + ], + [ + "rogen", + -13.656230926513672 + ], + [ + "Cute", + -13.65626335144043 + ], + [ + "Replace", + -13.656267166137695 + ], + [ + "1995", + -13.656268119812012 + ], + [ + "▁Libra", + -13.656274795532228 + ], + [ + "▁bas", + -13.65630340576172 + ], + [ + "▁Attribution", + -13.656445503234863 + ], + [ + "▁Hygiene", + -13.656445503234863 + ], + [ + "▁umpire", + -13.656445503234863 + ], + [ + "▁levied", + -13.656449317932127 + ], + [ + "▁References", + -13.65645980834961 + ], + [ + "▁Closure", + -13.656460762023926 + ], + [ + "▁Fairbanks", + -13.65646266937256 + ], + [ + "neurotransmitter", + -13.656472206115724 + ], + [ + "▁Peacock", + -13.65648365020752 + ], + [ + "▁Vila", + -13.656490325927734 + ], + [ + "▁arid", + -13.656502723693848 + ], + [ + "▁fintech", + -13.656509399414062 + ], + [ + "199", + -13.656526565551758 + ], + [ + "▁Moh", + -13.656542778015137 + ], + [ + "▁subtraction", + -13.656542778015137 + ], + [ + "▁herds", + -13.65654468536377 + ], + [ + "▁ledge", + -13.656571388244627 + ], + [ + "▁unintentionally", + -13.656661987304688 + ], + [ + "▁commentaries", + -13.656688690185549 + ], + [ + "orie", + -13.656708717346191 + ], + [ + "▁Commitment", + -13.656730651855469 + ], + [ + "▁214", + -13.65680694580078 + ], + [ + "chenko", + -13.65687370300293 + ], + [ + "▁Benny", + -13.656937599182127 + ], + [ + "▁Proto", + -13.656996726989746 + ], + [ + "▁guesswork", + -13.657063484191896 + ], + [ + "506", + -13.657064437866213 + ], + [ + "drama", + -13.65709114074707 + ], + [ + "rif", + -13.65711498260498 + ], + [ + "arte", + -13.657267570495604 + ], + [ + "Hell", + -13.65732479095459 + ], + [ + "▁Pill", + -13.65732479095459 + ], + [ + "▁STO", + -13.657386779785156 + ], + [ + "▁Acquire", + -13.657577514648438 + ], + [ + "rte", + -13.65767765045166 + ], + [ + "Temp", + -13.657716751098633 + ], + [ + "▁hernia", + -13.657788276672363 + ], + [ + "lais", + -13.65780544281006 + ], + [ + "▁($5", + -13.6578369140625 + ], + [ + "bait", + -13.65786361694336 + ], + [ + "alpha", + -13.65786838531494 + ], + [ + "moment", + -13.6578950881958 + ], + [ + "▁drifted", + -13.657896995544434 + ], + [ + "mala", + -13.657919883728027 + ], + [ + "▁Buff", + -13.657964706420898 + ], + [ + "▁Matsu", + -13.657999038696287 + ], + [ + "▁Pt", + -13.658025741577148 + ], + [ + "▁vitally", + -13.658039093017578 + ], + [ + "▁ken", + -13.65806484222412 + ], + [ + "▁contemplated", + -13.658066749572754 + ], + [ + "▁Sami", + -13.658114433288574 + ], + [ + "▁Lowest", + -13.658146858215332 + ], + [ + "▁droplets", + -13.658173561096191 + ], + [ + "▁comical", + -13.658217430114746 + ], + [ + "breath", + -13.658247947692873 + ], + [ + "Kat", + -13.658252716064451 + ], + [ + "MK", + -13.65825653076172 + ], + [ + "▁51%", + -13.658281326293944 + ], + [ + "▁overhang", + -13.658294677734377 + ], + [ + "▁distort", + -13.658425331115724 + ], + [ + "▁padlock", + -13.65846061706543 + ], + [ + "Query", + -13.658503532409668 + ], + [ + "ASA", + -13.658565521240234 + ], + [ + "▁Scorpio", + -13.65858554840088 + ], + [ + "▁Moisture", + -13.658587455749512 + ], + [ + "▁Uri", + -13.658614158630373 + ], + [ + "▁ACTION", + -13.658703804016112 + ], + [ + "tourism", + -13.658705711364746 + ], + [ + "evaluate", + -13.658716201782228 + ], + [ + "▁fitter", + -13.65882396697998 + ], + [ + "Needless", + -13.658825874328612 + ], + [ + "▁PAT", + -13.658827781677246 + ], + [ + "▁1973.", + -13.658894538879396 + ], + [ + "Soul", + -13.658896446228027 + ], + [ + "608", + -13.658906936645508 + ], + [ + "▁DPS", + -13.65893840789795 + ], + [ + "47.", + -13.659161567687988 + ], + [ + "Problem", + -13.659260749816896 + ], + [ + "▁Highest", + -13.659268379211426 + ], + [ + "▁Warrant", + -13.65928840637207 + ], + [ + "astic", + -13.659306526184082 + ], + [ + "▁cambogia", + -13.659369468688965 + ], + [ + "▁fibrosis", + -13.659369468688965 + ], + [ + "▁Margarita", + -13.659371376037598 + ], + [ + "▁meditating", + -13.659372329711914 + ], + [ + "▁Fertilizer", + -13.659374237060549 + ], + [ + "▁Goodreads", + -13.659375190734863 + ], + [ + "▁Gigabit", + -13.659388542175291 + ], + [ + "▁Recover", + -13.659431457519531 + ], + [ + "STEM", + -13.659525871276855 + ], + [ + "▁entrees", + -13.659561157226562 + ], + [ + "▁drivetrain", + -13.659683227539062 + ], + [ + "521", + -13.65971851348877 + ], + [ + "▁Outdoors", + -13.659725189208984 + ], + [ + "▁strung", + -13.659833908081056 + ], + [ + "▁Serra", + -13.65984058380127 + ], + [ + "▁Fell", + -13.659842491149902 + ], + [ + "▁notary", + -13.65990924835205 + ], + [ + "▁hampered", + -13.660018920898438 + ], + [ + "yrs", + -13.660101890563965 + ], + [ + "STRUCT", + -13.66010856628418 + ], + [ + "READ", + -13.660151481628418 + ], + [ + "Lean", + -13.660181045532228 + ], + [ + "▁Respondent", + -13.660208702087402 + ], + [ + "▁_____", + -13.660259246826172 + ], + [ + "tide", + -13.660268783569336 + ], + [ + "237", + -13.66028881072998 + ], + [ + "▁itch", + -13.66029930114746 + ], + [ + "▁Climb", + -13.660356521606444 + ], + [ + "shoulder", + -13.660401344299316 + ], + [ + "▁Ning", + -13.66046142578125 + ], + [ + "▁VER", + -13.660475730895996 + ], + [ + "▁grandpa", + -13.660521507263184 + ], + [ + "▁Bury", + -13.66053295135498 + ], + [ + "▁quant", + -13.660574913024902 + ], + [ + "Promote", + -13.660670280456545 + ], + [ + "Zo", + -13.6607027053833 + ], + [ + "▁fury", + -13.660853385925291 + ], + [ + "▁tongues", + -13.66086196899414 + ], + [ + "▁Hypo", + -13.66090488433838 + ], + [ + "AQ", + -13.660947799682615 + ], + [ + "▁Sno", + -13.66099452972412 + ], + [ + "▁Ramon", + -13.6610107421875 + ], + [ + "mona", + -13.661043167114258 + ], + [ + "▁Decisions", + -13.661080360412598 + ], + [ + "▁Nad", + -13.661083221435549 + ], + [ + "-71", + -13.661103248596191 + ], + [ + "▁scoreboard", + -13.661120414733888 + ], + [ + "Bull", + -13.661237716674805 + ], + [ + "Device", + -13.661299705505373 + ], + [ + "stretch", + -13.66130542755127 + ], + [ + "▁1/8\"", + -13.661327362060549 + ], + [ + "▁moody", + -13.661375045776367 + ], + [ + "▁ortho", + -13.661484718322754 + ], + [ + "newsletter", + -13.66150188446045 + ], + [ + "▁Shui", + -13.661502838134766 + ], + [ + "Wholesale", + -13.661511421203612 + ], + [ + "▁expansions", + -13.66151523590088 + ], + [ + "▁moles", + -13.66152000427246 + ], + [ + "▁chile", + -13.661537170410156 + ], + [ + "359", + -13.661553382873535 + ], + [ + "metallic", + -13.661561965942385 + ], + [ + "rath", + -13.66157054901123 + ], + [ + "Congress", + -13.661602020263672 + ], + [ + "Pur", + -13.66164207458496 + ], + [ + "wak", + -13.66172218322754 + ], + [ + "▁+44", + -13.66173267364502 + ], + [ + "315", + -13.661735534667969 + ], + [ + "MEA", + -13.66175937652588 + ], + [ + "-83", + -13.661788940429688 + ], + [ + "339", + -13.661797523498535 + ], + [ + "▁IDEA", + -13.661840438842772 + ], + [ + "▁shack", + -13.661904335021973 + ], + [ + "Cars", + -13.661919593811035 + ], + [ + "▁Estimate", + -13.661983489990234 + ], + [ + "7.2", + -13.661998748779297 + ], + [ + "Walker", + -13.66205596923828 + ], + [ + "ande", + -13.662090301513672 + ], + [ + "casting", + -13.66213321685791 + ], + [ + "▁Voters", + -13.66214656829834 + ], + [ + "geo", + -13.662274360656738 + ], + [ + "'", + -13.662302017211914 + ], + [ + "▁magistrate", + -13.662302017211914 + ], + [ + "▁phantom", + -13.662302017211914 + ], + [ + "▁procrastination", + -13.662302017211914 + ], + [ + "▁flannel", + -13.662303924560549 + ], + [ + "▁Lexmark", + -13.662317276000977 + ], + [ + "68.", + -13.662321090698242 + ], + [ + "▁Kannada", + -13.66232204437256 + ], + [ + "▁FTSE", + -13.662335395812988 + ], + [ + "▁irregularities", + -13.662338256835938 + ], + [ + "▁sneaking", + -13.66237735748291 + ], + [ + "▁mixtape", + -13.662384033203123 + ], + [ + "▁journalistic", + -13.66240406036377 + ], + [ + "▁flares", + -13.662407875061035 + ], + [ + "▁Walters", + -13.66242504119873 + ], + [ + "▁sucking", + -13.662474632263184 + ], + [ + "▁pebble", + -13.66248607635498 + ], + [ + "▁hitters", + -13.662537574768066 + ], + [ + "leather", + -13.662653923034668 + ], + [ + "distinguishable", + -13.662761688232422 + ], + [ + "▁wreaths", + -13.662772178649902 + ], + [ + "▁raving", + -13.662797927856444 + ], + [ + "issi", + -13.662813186645508 + ], + [ + "▁1967,", + -13.662816047668455 + ], + [ + "▁Appropriate", + -13.662818908691406 + ], + [ + "SEE", + -13.662840843200684 + ], + [ + "▁potting", + -13.662856101989746 + ], + [ + "▁briefed", + -13.662867546081545 + ], + [ + "▁Nonprofit", + -13.663070678710938 + ], + [ + "▁Madame", + -13.663118362426758 + ], + [ + "▁trolling", + -13.663158416748049 + ], + [ + "▁Athena", + -13.66322135925293 + ], + [ + "802", + -13.663294792175291 + ], + [ + "pointer", + -13.663304328918455 + ], + [ + "rul", + -13.663348197937012 + ], + [ + "▁gm", + -13.663369178771973 + ], + [ + "itos", + -13.663384437561035 + ], + [ + "▁Ferris", + -13.66345500946045 + ], + [ + "▁injecting", + -13.663498878479004 + ], + [ + "▁widened", + -13.663620948791504 + ], + [ + "383", + -13.663634300231934 + ], + [ + "PAR", + -13.663665771484377 + ], + [ + "EXAM", + -13.663681983947754 + ], + [ + "tite", + -13.663747787475586 + ], + [ + "▁Paz", + -13.663789749145508 + ], + [ + "▁Charging", + -13.663890838623049 + ], + [ + "wired", + -13.663934707641602 + ], + [ + "▁15\"", + -13.663952827453612 + ], + [ + "▁markedly", + -13.664010047912598 + ], + [ + "ylo", + -13.664015769958496 + ], + [ + "Jaw", + -13.664055824279783 + ], + [ + "Grow", + -13.664090156555176 + ], + [ + "▁PLACE", + -13.66413688659668 + ], + [ + "bf", + -13.664177894592283 + ], + [ + "stall", + -13.664258003234863 + ], + [ + "gaon", + -13.664273262023926 + ], + [ + "▁eleventh", + -13.66428279876709 + ], + [ + "▁rhetorical", + -13.664335250854492 + ], + [ + "eyes", + -13.664350509643556 + ], + [ + "mous", + -13.664361953735352 + ], + [ + "547", + -13.664389610290527 + ], + [ + "pag", + -13.66451644897461 + ], + [ + "Authorities", + -13.664563179016112 + ], + [ + "Guess", + -13.664575576782228 + ], + [ + "▁Hail", + -13.664617538452148 + ], + [ + "▁Astros", + -13.664631843566896 + ], + [ + "▁Clicking", + -13.664639472961426 + ], + [ + "nuclear", + -13.664644241333008 + ], + [ + "▁Dermatology", + -13.664755821228027 + ], + [ + "pixel", + -13.664807319641112 + ], + [ + "▁reshape", + -13.664867401123049 + ], + [ + "▁Galle", + -13.664875030517578 + ], + [ + "▁\"[", + -13.664924621582031 + ], + [ + "▁Ley", + -13.664926528930664 + ], + [ + "▁analyzer", + -13.664998054504396 + ], + [ + "dow", + -13.66502285003662 + ], + [ + "▁Elsevier", + -13.665051460266112 + ], + [ + "▁chromosome", + -13.665081024169922 + ], + [ + "xe", + -13.66523265838623 + ], + [ + "▁contiguous", + -13.665243148803713 + ], + [ + "▁inquisitive", + -13.665243148803713 + ], + [ + "▁risotto", + -13.665243148803713 + ], + [ + "▁hypoallergenic", + -13.665245056152344 + ], + [ + "▁plenary", + -13.665245056152344 + ], + [ + "▁Galactic", + -13.66525173187256 + ], + [ + "bas", + -13.66529941558838 + ], + [ + "▁unusable", + -13.66531753540039 + ], + [ + "▁AutoCAD", + -13.665325164794922 + ], + [ + "▁Drainage", + -13.665345191955566 + ], + [ + "Irish", + -13.66535472869873 + ], + [ + "▁$4,", + -13.665363311767578 + ], + [ + "▁Jolly", + -13.665406227111816 + ], + [ + "▁oppressive", + -13.665480613708496 + ], + [ + "render", + -13.665488243103027 + ], + [ + "▁oppressed", + -13.66554069519043 + ], + [ + "4.6", + -13.665541648864746 + ], + [ + "473", + -13.665553092956545 + ], + [ + "mitted", + -13.665569305419922 + ], + [ + "▁violinist", + -13.665579795837402 + ], + [ + "▁Ramp", + -13.6655855178833 + ], + [ + "ALT", + -13.665640830993652 + ], + [ + "▁(2004)", + -13.665690422058104 + ], + [ + "whole", + -13.665709495544434 + ], + [ + "▁pierced", + -13.66571044921875 + ], + [ + "▁survives", + -13.66579246520996 + ], + [ + "▁shifter", + -13.665830612182615 + ], + [ + "Ware", + -13.665853500366213 + ], + [ + "▁Pence", + -13.66586685180664 + ], + [ + "clinical", + -13.665871620178224 + ], + [ + "evaluation", + -13.665904998779297 + ], + [ + "▁Parrot", + -13.665929794311523 + ], + [ + "tat", + -13.666030883789062 + ], + [ + "▁1971.", + -13.666068077087402 + ], + [ + "▁Seaside", + -13.666086196899414 + ], + [ + "▁blissful", + -13.666171073913574 + ], + [ + "▁Westbrook", + -13.66625690460205 + ], + [ + "sick", + -13.66632843017578 + ], + [ + "▁venous", + -13.666451454162598 + ], + [ + "okay", + -13.66646957397461 + ], + [ + "▁majoring", + -13.666479110717772 + ], + [ + "▁jab", + -13.666550636291504 + ], + [ + "▁sores", + -13.666584014892578 + ], + [ + "▁Wenger", + -13.666637420654297 + ], + [ + "EAL", + -13.66670036315918 + ], + [ + "ought", + -13.666746139526367 + ], + [ + "▁datacenter", + -13.666826248168944 + ], + [ + "▁bask", + -13.666994094848633 + ], + [ + "ARM", + -13.667085647583008 + ], + [ + "hunt", + -13.667146682739258 + ], + [ + "▁Appendix", + -13.66716766357422 + ], + [ + "VING", + -13.66724681854248 + ], + [ + "BALL", + -13.667263984680176 + ], + [ + "▁SIZE", + -13.66728401184082 + ], + [ + "▁betray", + -13.66728973388672 + ], + [ + "▁Upstairs", + -13.66733169555664 + ], + [ + "▁6.7", + -13.667423248291016 + ], + [ + "463", + -13.667503356933594 + ], + [ + "▁closeness", + -13.667552947998049 + ], + [ + "-57", + -13.667564392089844 + ], + [ + "▁accrue", + -13.667593002319336 + ], + [ + "Craig", + -13.667595863342283 + ], + [ + "▁deceive", + -13.667695045471191 + ], + [ + "▁Literally", + -13.667710304260254 + ], + [ + "▁Flights", + -13.667848587036133 + ], + [ + "acha", + -13.66788101196289 + ], + [ + "▁dope", + -13.667914390563965 + ], + [ + "publishing", + -13.668002128601074 + ], + [ + "▁Customized", + -13.668010711669922 + ], + [ + "▁HMS", + -13.668012619018556 + ], + [ + "▁emergent", + -13.668041229248049 + ], + [ + "▁Tik", + -13.66815185546875 + ], + [ + "▁chuckle", + -13.668164253234863 + ], + [ + "▁DEN", + -13.668185234069824 + ], + [ + "▁anecdotal", + -13.668192863464355 + ], + [ + "▁depletion", + -13.668192863464355 + ], + [ + "▁gratifying", + -13.668192863464355 + ], + [ + "▁guacamole", + -13.668192863464355 + ], + [ + "▁presumption", + -13.668192863464355 + ], + [ + "▁conveyancing", + -13.668201446533203 + ], + [ + "▁quadrant", + -13.668204307556152 + ], + [ + "▁greek", + -13.66821002960205 + ], + [ + "▁bisexual", + -13.668211936950684 + ], + [ + "680", + -13.66822338104248 + ], + [ + "Pac", + -13.668224334716797 + ], + [ + "▁tulle", + -13.66822624206543 + ], + [ + "▁shrug", + -13.668227195739746 + ], + [ + "▁Suburban", + -13.668234825134276 + ], + [ + "▁charred", + -13.668237686157228 + ], + [ + "▁polio", + -13.668238639831545 + ], + [ + "▁northwestern", + -13.668261528015137 + ], + [ + "▁Ownership", + -13.66828155517578 + ], + [ + "Clients", + -13.668292999267578 + ], + [ + "▁Coordination", + -13.668316841125488 + ], + [ + "▁Releases", + -13.668334007263184 + ], + [ + "▁Blaine", + -13.668387413024902 + ], + [ + "152", + -13.668389320373535 + ], + [ + "▁accusation", + -13.668450355529783 + ], + [ + "▁Doctorate", + -13.66850471496582 + ], + [ + "▁pampered", + -13.66860580444336 + ], + [ + "▁ethno", + -13.668614387512209 + ], + [ + "wage", + -13.668623924255373 + ], + [ + "▁ingested", + -13.668633460998535 + ], + [ + "▁Presidents", + -13.668648719787598 + ], + [ + "▁rotational", + -13.668778419494627 + ], + [ + "▁prob", + -13.668814659118652 + ], + [ + "▁modernist", + -13.668867111206056 + ], + [ + "▁Chapters", + -13.668890953063965 + ], + [ + "▁Kub", + -13.66891860961914 + ], + [ + "footed", + -13.668977737426758 + ], + [ + "▁bison", + -13.668993949890137 + ], + [ + "▁Somali", + -13.669060707092283 + ], + [ + "▁proofing", + -13.66909408569336 + ], + [ + "▁analogous", + -13.669114112854004 + ], + [ + "rgan", + -13.6691312789917 + ], + [ + "yte", + -13.669172286987305 + ], + [ + "▁Empower", + -13.669201850891112 + ], + [ + "▁provoked", + -13.669219017028809 + ], + [ + "▁woodlands", + -13.669234275817873 + ], + [ + "meet", + -13.66924285888672 + ], + [ + "▁Festivals", + -13.669248580932615 + ], + [ + "▁bankroll", + -13.669381141662598 + ], + [ + "kow", + -13.669451713562012 + ], + [ + "362", + -13.669499397277832 + ], + [ + "▁Faces", + -13.66954517364502 + ], + [ + "▁ATL", + -13.669581413269045 + ], + [ + "ulus", + -13.66965675354004 + ], + [ + "▁USP", + -13.66968059539795 + ], + [ + "▁interlock", + -13.66969871520996 + ], + [ + "ии", + -13.669699668884276 + ], + [ + "3.9", + -13.669764518737791 + ], + [ + "▁Islanders", + -13.669803619384766 + ], + [ + "▁vanish", + -13.669842720031738 + ], + [ + "grained", + -13.669854164123535 + ], + [ + "▁retailing", + -13.669933319091797 + ], + [ + "siphon", + -13.66999053955078 + ], + [ + "IMA", + -13.66999340057373 + ], + [ + "arro", + -13.670034408569336 + ], + [ + "▁6.30", + -13.670127868652344 + ], + [ + "▁Contacts", + -13.670201301574709 + ], + [ + "▁sunday", + -13.670235633850098 + ], + [ + "▁lager", + -13.670368194580078 + ], + [ + "▁Plank", + -13.670419692993164 + ], + [ + "olla", + -13.670520782470703 + ], + [ + "facts", + -13.670543670654297 + ], + [ + "▁Genoa", + -13.670550346374512 + ], + [ + "Episode", + -13.67055606842041 + ], + [ + "▁1280", + -13.670588493347168 + ], + [ + "utz", + -13.670615196228027 + ], + [ + "▁sacrament", + -13.670654296875 + ], + [ + "▁bre", + -13.67065715789795 + ], + [ + "767", + -13.67067527770996 + ], + [ + "▁Tempe", + -13.670723915100098 + ], + [ + "▁dune", + -13.670724868774414 + ], + [ + "scientific", + -13.67072582244873 + ], + [ + "causing", + -13.670732498168944 + ], + [ + "▁SQ", + -13.67080783843994 + ], + [ + "YouTube", + -13.67087745666504 + ], + [ + "▁porta", + -13.670928001403809 + ], + [ + "▁adamant", + -13.670950889587402 + ], + [ + "▁blockers", + -13.670957565307615 + ], + [ + "▁restrain", + -13.671052932739258 + ], + [ + "morph", + -13.67105484008789 + ], + [ + "ANTA", + -13.67105770111084 + ], + [ + "CHE", + -13.671090126037598 + ], + [ + "▁confection", + -13.671140670776367 + ], + [ + "premise", + -13.671143531799316 + ], + [ + "▁PURPOSE", + -13.671152114868164 + ], + [ + "▁Sainsbury", + -13.671152114868164 + ], + [ + "▁cavalry", + -13.671152114868164 + ], + [ + "▁majesty", + -13.671152114868164 + ], + [ + "▁Wiltshire", + -13.671154022216797 + ], + [ + "▁Ithaca", + -13.671156883239746 + ], + [ + "▁eucalyptus", + -13.671157836914062 + ], + [ + "▁Saloon", + -13.671170234680176 + ], + [ + "▁slurry", + -13.671175003051758 + ], + [ + "▁Skirt", + -13.671194076538086 + ], + [ + "Method", + -13.671317100524902 + ], + [ + "rde", + -13.671404838562012 + ], + [ + "▁5-0", + -13.671411514282228 + ], + [ + "▁Gillian", + -13.671465873718262 + ], + [ + "▁Weapon", + -13.671468734741213 + ], + [ + "▁chestnut", + -13.671472549438477 + ], + [ + "▁splashing", + -13.67151927947998 + ], + [ + "ndo", + -13.671561241149902 + ], + [ + "276", + -13.671663284301758 + ], + [ + "▁Sweeney", + -13.671728134155272 + ], + [ + "interesting", + -13.67173671722412 + ], + [ + "▁JPG", + -13.671762466430664 + ], + [ + "▁Hitachi", + -13.671853065490724 + ], + [ + "▁blurb", + -13.671907424926758 + ], + [ + "azza", + -13.671955108642578 + ], + [ + "▁Extensive", + -13.671977996826172 + ], + [ + "▁Harsh", + -13.671978950500488 + ], + [ + "▁Rota", + -13.671992301940918 + ], + [ + "evil", + -13.672000885009766 + ], + [ + "rooted", + -13.67200756072998 + ], + [ + "9/", + -13.67211627960205 + ], + [ + "▁Arun", + -13.6721830368042 + ], + [ + "▁Facing", + -13.672183990478516 + ], + [ + "▁clover", + -13.672248840332031 + ], + [ + "▁SNAP", + -13.672298431396484 + ], + [ + "DAL", + -13.672317504882812 + ], + [ + "MMA", + -13.672350883483888 + ], + [ + "▁Interpretation", + -13.672359466552734 + ], + [ + "▁Kudos", + -13.672476768493652 + ], + [ + "▁floured", + -13.672574043273926 + ], + [ + "starter", + -13.672924041748049 + ], + [ + "▁animate", + -13.672926902770996 + ], + [ + "▁Billie", + -13.673120498657228 + ], + [ + "▁Candles", + -13.673130989074709 + ], + [ + "uid", + -13.67314624786377 + ], + [ + "▁slogans", + -13.673163414001465 + ], + [ + "Hour", + -13.673210144042969 + ], + [ + "▁bribe", + -13.67325496673584 + ], + [ + "▁laced", + -13.673315048217772 + ], + [ + "505", + -13.673428535461426 + ], + [ + "▁Inspections", + -13.673431396484377 + ], + [ + "Contains", + -13.67360019683838 + ], + [ + "▁Manga", + -13.673633575439451 + ], + [ + "▁Letting", + -13.673654556274414 + ], + [ + "▁4.9", + -13.673670768737791 + ], + [ + "sink", + -13.67367458343506 + ], + [ + "▁187", + -13.673677444458008 + ], + [ + "▁Harden", + -13.673723220825195 + ], + [ + "▁modelled", + -13.673772811889648 + ], + [ + "Reason", + -13.67380142211914 + ], + [ + "▁catfish", + -13.673837661743164 + ], + [ + "Smoke", + -13.67385959625244 + ], + [ + "▁correlate", + -13.67395305633545 + ], + [ + "▁FRA", + -13.674009323120115 + ], + [ + "▁indefinite", + -13.674029350280762 + ], + [ + "▁derail", + -13.674036979675291 + ], + [ + "▁discouraging", + -13.674118995666504 + ], + [ + "▁confetti", + -13.67412281036377 + ], + [ + "▁ireland", + -13.674127578735352 + ], + [ + "▁mattered", + -13.674131393432615 + ], + [ + "▁solenoid", + -13.674134254455566 + ], + [ + "▁Fulham", + -13.674152374267578 + ], + [ + "▁Maltese", + -13.674155235290527 + ], + [ + "▁refineries", + -13.674159049987791 + ], + [ + "▁Leroy", + -13.674184799194336 + ], + [ + "▁pairings", + -13.674213409423828 + ], + [ + "▁pliers", + -13.674213409423828 + ], + [ + "ngo", + -13.67421531677246 + ], + [ + "▁abbreviated", + -13.674232482910156 + ], + [ + "CNN", + -13.67426586151123 + ], + [ + "▁riveting", + -13.674296379089355 + ], + [ + "▁confiscated", + -13.674307823181152 + ], + [ + "0.4", + -13.67433261871338 + ], + [ + "Thread", + -13.674334526062012 + ], + [ + "Hum", + -13.674339294433594 + ], + [ + "▁hoodies", + -13.674415588378906 + ], + [ + "▁Annette", + -13.6744384765625 + ], + [ + "▁mismatch", + -13.674471855163574 + ], + [ + "785", + -13.674477577209473 + ], + [ + "227", + -13.674531936645508 + ], + [ + "▁cusp", + -13.674562454223633 + ], + [ + "Ac", + -13.674609184265137 + ], + [ + "▁Constructed", + -13.674654960632324 + ], + [ + "Dual", + -13.674702644348145 + ], + [ + "▁roost", + -13.674720764160156 + ], + [ + "mma", + -13.674739837646484 + ], + [ + "▁dared", + -13.67483615875244 + ], + [ + "▁bombings", + -13.674877166748049 + ], + [ + "▁Modules", + -13.674880027770996 + ], + [ + "▁5-1", + -13.674957275390623 + ], + [ + "▁Cattle", + -13.6749906539917 + ], + [ + "▁Tote", + -13.675041198730469 + ], + [ + "▁Lumin", + -13.675151824951172 + ], + [ + "▁LIGHT", + -13.675196647644045 + ], + [ + "909", + -13.675219535827637 + ], + [ + "cino", + -13.675259590148926 + ], + [ + "▁veer", + -13.67531394958496 + ], + [ + "▁1863", + -13.675329208374023 + ], + [ + "▁Milky", + -13.675457000732422 + ], + [ + "-2009", + -13.675464630126951 + ], + [ + "▁$750", + -13.675670623779297 + ], + [ + "▁Exo", + -13.675715446472168 + ], + [ + "▁hangar", + -13.67581844329834 + ], + [ + "▁Rhine", + -13.67592716217041 + ], + [ + "▁PAY", + -13.675995826721191 + ], + [ + "eze", + -13.676117897033691 + ], + [ + "▁Jing", + -13.676121711730955 + ], + [ + "output", + -13.67612648010254 + ], + [ + "TIS", + -13.67613124847412 + ], + [ + "mort", + -13.676188468933104 + ], + [ + "Conduct", + -13.6762113571167 + ], + [ + "▁aft", + -13.676368713378906 + ], + [ + "▁Ville", + -13.6763916015625 + ], + [ + "▁6.6", + -13.67643928527832 + ], + [ + "▁songwriters", + -13.676447868347168 + ], + [ + "partisan", + -13.67646026611328 + ], + [ + "▁hails", + -13.67658233642578 + ], + [ + "▁Nectar", + -13.676605224609377 + ], + [ + "Specifically", + -13.676675796508787 + ], + [ + "cube", + -13.676708221435549 + ], + [ + "▁Raid", + -13.676709175109863 + ], + [ + "Installation", + -13.676755905151367 + ], + [ + "fon", + -13.676994323730469 + ], + [ + "▁japanese", + -13.677053451538086 + ], + [ + "▁EDI", + -13.677084922790527 + ], + [ + "▁enclave", + -13.677095413208008 + ], + [ + "▁impedance", + -13.677095413208008 + ], + [ + "▁oblivious", + -13.677095413208008 + ], + [ + "▁orthodontist", + -13.677095413208008 + ], + [ + "▁remorse", + -13.677095413208008 + ], + [ + "▁savior", + -13.677095413208008 + ], + [ + "▁tinnitus", + -13.677095413208008 + ], + [ + "▁Shabbat", + -13.67709732055664 + ], + [ + "▁THINK", + -13.677099227905272 + ], + [ + "▁wacky", + -13.677102088928224 + ], + [ + "▁BOTH", + -13.67711353302002 + ], + [ + "▁Garrison", + -13.677124977111816 + ], + [ + "CRE", + -13.677169799804688 + ], + [ + "animal", + -13.67719554901123 + ], + [ + "▁twigs", + -13.677218437194824 + ], + [ + "trac", + -13.677226066589355 + ], + [ + "▁Dementia", + -13.677270889282228 + ], + [ + "▁ox", + -13.67728042602539 + ], + [ + "pointers", + -13.677295684814451 + ], + [ + "▁frown", + -13.677295684814451 + ], + [ + "ruff", + -13.677306175231934 + ], + [ + "▁Ankara", + -13.677353858947754 + ], + [ + "featured", + -13.677391052246094 + ], + [ + "▁pissed", + -13.677398681640623 + ], + [ + "▁caterpillar", + -13.677449226379396 + ], + [ + "▁Norma", + -13.677473068237305 + ], + [ + "7000", + -13.677481651306152 + ], + [ + "▁Herbs", + -13.677586555480955 + ], + [ + "▁IDC", + -13.677594184875488 + ], + [ + "▁circuitry", + -13.677627563476562 + ], + [ + "▁interlocking", + -13.677644729614258 + ], + [ + "▁Bigger", + -13.677650451660156 + ], + [ + "▁awakened", + -13.677762031555176 + ], + [ + "ggin", + -13.67778205871582 + ], + [ + "talking", + -13.677791595458984 + ], + [ + "▁Squeeze", + -13.67780590057373 + ], + [ + "steam", + -13.677830696105955 + ], + [ + "iated", + -13.677889823913574 + ], + [ + "thread", + -13.677895545959473 + ], + [ + "crib", + -13.677916526794434 + ], + [ + "▁403", + -13.677979469299316 + ], + [ + "▁Ancestry", + -13.678013801574709 + ], + [ + "▁Reeves", + -13.678041458129885 + ], + [ + "Iran", + -13.678058624267578 + ], + [ + "▁tiredness", + -13.678176879882812 + ], + [ + "etter", + -13.678202629089355 + ], + [ + "▁ASME", + -13.678258895874023 + ], + [ + "▁Routine", + -13.678303718566896 + ], + [ + "significant", + -13.67834186553955 + ], + [ + "▁sanded", + -13.67835807800293 + ], + [ + "▁Traverse", + -13.678359031677246 + ], + [ + "▁eyeing", + -13.678361892700195 + ], + [ + "▁secretion", + -13.678385734558104 + ], + [ + "▁wilt", + -13.67841625213623 + ], + [ + "▁approves", + -13.678447723388672 + ], + [ + "▁brewer", + -13.678482055664062 + ], + [ + "▁Tac", + -13.678483963012695 + ], + [ + "▁Nathaniel", + -13.678492546081545 + ], + [ + "▁fps", + -13.678524017333984 + ], + [ + "Guy", + -13.678555488586426 + ], + [ + "meric", + -13.678613662719728 + ], + [ + "▁Reconciliation", + -13.678618431091309 + ], + [ + "ROS", + -13.678668022155762 + ], + [ + "CSA", + -13.67868423461914 + ], + [ + "▁prospecting", + -13.67878532409668 + ], + [ + "828", + -13.678845405578612 + ], + [ + "▁endpoints", + -13.678892135620115 + ], + [ + "▁Crimea", + -13.678942680358888 + ], + [ + "▁Elton", + -13.678947448730469 + ], + [ + "VL", + -13.678994178771973 + ], + [ + "▁Gemma", + -13.67900848388672 + ], + [ + "143", + -13.679227828979492 + ], + [ + "▁juicer", + -13.679235458374023 + ], + [ + "▁skyscrapers", + -13.679353713989258 + ], + [ + "spiel", + -13.679357528686523 + ], + [ + "▁Goodwin", + -13.679366111755373 + ], + [ + "KEN", + -13.679378509521484 + ], + [ + "▁semifinals", + -13.6793851852417 + ], + [ + "▁fiancé", + -13.679400444030762 + ], + [ + "byte", + -13.679588317871094 + ], + [ + "▁(2006).", + -13.679614067077637 + ], + [ + "Primary", + -13.679728507995604 + ], + [ + "Battle", + -13.679813385009766 + ], + [ + "▁emphasise", + -13.67982006072998 + ], + [ + "calorie", + -13.679848670959473 + ], + [ + "▁Tul", + -13.679865837097168 + ], + [ + "▁WHERE", + -13.679891586303713 + ], + [ + "FY", + -13.679903984069824 + ], + [ + "rami", + -13.679965019226074 + ], + [ + "Carbon", + -13.679974555969238 + ], + [ + "▁Skid", + -13.679977416992188 + ], + [ + "Factory", + -13.68000316619873 + ], + [ + "▁astrologer", + -13.68008041381836 + ], + [ + "▁cinematography", + -13.68008041381836 + ], + [ + "▁feminism", + -13.68008041381836 + ], + [ + "▁inconsistencies", + -13.68008041381836 + ], + [ + "▁luxuries", + -13.68008041381836 + ], + [ + "▁quarantine", + -13.68008041381836 + ], + [ + "▁turbocharged", + -13.680086135864258 + ], + [ + "▁primaries", + -13.680089950561523 + ], + [ + "▁Redskins", + -13.68009090423584 + ], + [ + "▁Marquis", + -13.680112838745115 + ], + [ + "▁Pte", + -13.680133819580078 + ], + [ + "▁CENTER", + -13.680155754089355 + ], + [ + "opoulos", + -13.680180549621582 + ], + [ + "▁clones", + -13.680181503295898 + ], + [ + "▁Axe", + -13.680201530456545 + ], + [ + "▁md", + -13.68027687072754 + ], + [ + "▁diss", + -13.680426597595217 + ], + [ + "▁Faust", + -13.68045139312744 + ], + [ + "▁shipyard", + -13.680519104003906 + ], + [ + "▁217", + -13.68052864074707 + ], + [ + "▁probiotic", + -13.680530548095703 + ], + [ + "▁2005)", + -13.680545806884766 + ], + [ + "▁Middletown", + -13.680583000183104 + ], + [ + "▁alerting", + -13.680590629577637 + ], + [ + "▁tombs", + -13.68061637878418 + ], + [ + "Wonder", + -13.68070125579834 + ], + [ + "▁Knot", + -13.680709838867188 + ], + [ + "condition", + -13.680715560913086 + ], + [ + "▁Endo", + -13.68074893951416 + ], + [ + "▁orchards", + -13.680816650390623 + ], + [ + "▁grower", + -13.680855751037598 + ], + [ + "388", + -13.680902481079102 + ], + [ + "▁scrum", + -13.680935859680176 + ], + [ + "eden", + -13.68094253540039 + ], + [ + "role", + -13.680983543395996 + ], + [ + "▁Scatter", + -13.680996894836426 + ], + [ + "715", + -13.681093215942385 + ], + [ + "HRA", + -13.68111801147461 + ], + [ + "▁Clippers", + -13.68124008178711 + ], + [ + "▁ignited", + -13.681248664855955 + ], + [ + "▁voor", + -13.681291580200195 + ], + [ + "▁looping", + -13.681297302246094 + ], + [ + "▁sys", + -13.681313514709473 + ], + [ + "▁reigns", + -13.681325912475586 + ], + [ + "▁flavoured", + -13.681416511535645 + ], + [ + "▁OLD", + -13.68155574798584 + ], + [ + "▁Sensors", + -13.681563377380373 + ], + [ + "414", + -13.6815824508667 + ], + [ + "▁Tol", + -13.681641578674316 + ], + [ + "▁casters", + -13.681645393371582 + ], + [ + "Settings", + -13.681646347045898 + ], + [ + "MAG", + -13.681682586669922 + ], + [ + "▁moons", + -13.681748390197754 + ], + [ + "▁Brushed", + -13.681756019592283 + ], + [ + "▁armored", + -13.681777954101562 + ], + [ + "373", + -13.681790351867676 + ], + [ + "▁Zionist", + -13.68181324005127 + ], + [ + "ghan", + -13.681843757629396 + ], + [ + "▁bumpers", + -13.68191909790039 + ], + [ + "girls", + -13.682076454162598 + ], + [ + "▁NDA", + -13.682114601135254 + ], + [ + "Mas", + -13.682141304016112 + ], + [ + "▁VHS", + -13.68218994140625 + ], + [ + "Mis", + -13.682214736938477 + ], + [ + "might", + -13.68223476409912 + ], + [ + "catcher", + -13.682308197021484 + ], + [ + "▁Photographs", + -13.68235683441162 + ], + [ + "onga", + -13.68238353729248 + ], + [ + "ished", + -13.68244457244873 + ], + [ + "▁rooster", + -13.682479858398438 + ], + [ + "soon", + -13.68249225616455 + ], + [ + "VD", + -13.682538032531738 + ], + [ + "▁Comey", + -13.682580947875977 + ], + [ + "▁Conventional", + -13.682656288146973 + ], + [ + "▁Inclusive", + -13.682701110839844 + ], + [ + "70%", + -13.682740211486816 + ], + [ + "Position", + -13.682757377624512 + ], + [ + "inson", + -13.682780265808104 + ], + [ + "▁Pads", + -13.682785987854004 + ], + [ + "▁Fabrics", + -13.682806015014648 + ], + [ + "▁mowers", + -13.682830810546877 + ], + [ + "OOP", + -13.68294906616211 + ], + [ + "454", + -13.682988166809082 + ], + [ + "▁Casper", + -13.683001518249512 + ], + [ + "anes", + -13.68302059173584 + ], + [ + "▁Nepali", + -13.683037757873535 + ], + [ + "▁wildflowers", + -13.683050155639648 + ], + [ + "▁610", + -13.68307113647461 + ], + [ + "▁ferocious", + -13.683074951171877 + ], + [ + "▁Emeritus", + -13.683075904846191 + ], + [ + "▁Kendrick", + -13.683082580566406 + ], + [ + "▁stylized", + -13.683085441589355 + ], + [ + "▁stylus", + -13.683143615722656 + ], + [ + "▁rotting", + -13.683171272277832 + ], + [ + "▁Holders", + -13.68319320678711 + ], + [ + "▁Orr", + -13.683218002319336 + ], + [ + "▁Farmington", + -13.683250427246094 + ], + [ + "▁Somebody", + -13.683405876159668 + ], + [ + "magnetic", + -13.683419227600098 + ], + [ + "▁skewed", + -13.683435440063477 + ], + [ + "illus", + -13.683439254760742 + ], + [ + "▁musicals", + -13.683460235595703 + ], + [ + "▁concoction", + -13.683501243591309 + ], + [ + "▁preheated", + -13.68354606628418 + ], + [ + "309", + -13.683557510375977 + ], + [ + "▁synthesized", + -13.683585166931152 + ], + [ + "roma", + -13.683591842651367 + ], + [ + "▁inference", + -13.683594703674316 + ], + [ + "▁pajamas", + -13.683605194091797 + ], + [ + "Developed", + -13.683629989624023 + ], + [ + "▁Maa", + -13.683823585510254 + ], + [ + "▁Champs", + -13.683833122253418 + ], + [ + "▁fools", + -13.683856964111328 + ], + [ + "▁Elevation", + -13.683923721313477 + ], + [ + "np", + -13.683954238891602 + ], + [ + "▁synchronous", + -13.684037208557127 + ], + [ + "jak", + -13.68404483795166 + ], + [ + "▁downwards", + -13.684066772460938 + ], + [ + "▁refills", + -13.684078216552734 + ], + [ + "▁ATI", + -13.684086799621582 + ], + [ + "Led", + -13.684100151062012 + ], + [ + "▁Donuts", + -13.684106826782228 + ], + [ + "▁cubicle", + -13.684125900268556 + ], + [ + "▁buffs", + -13.684182167053224 + ], + [ + "▁ut", + -13.684191703796388 + ], + [ + "▁Baskets", + -13.68423843383789 + ], + [ + "▁firmness", + -13.684270858764648 + ], + [ + "▁reins", + -13.68434238433838 + ], + [ + "▁acoustics", + -13.684392929077148 + ], + [ + "▁Sag", + -13.684453964233398 + ], + [ + "▁ISPs", + -13.68448543548584 + ], + [ + "▁760", + -13.68449592590332 + ], + [ + "▁bile", + -13.684504508972168 + ], + [ + "▁Preferences", + -13.684574127197266 + ], + [ + "▁DOE", + -13.684577941894531 + ], + [ + "▁disclaims", + -13.684669494628906 + ], + [ + "▁DOI", + -13.684762954711914 + ], + [ + "▁detergents", + -13.684779167175291 + ], + [ + "▁layoffs", + -13.68500518798828 + ], + [ + "▁sweeps", + -13.685017585754396 + ], + [ + "rep", + -13.68502426147461 + ], + [ + "▁Melody", + -13.685062408447266 + ], + [ + "▁Poul", + -13.685081481933594 + ], + [ + "cep", + -13.685235023498535 + ], + [ + "▁hairline", + -13.685237884521484 + ], + [ + "▁dur", + -13.68525505065918 + ], + [ + "atus", + -13.68529224395752 + ], + [ + "▁Qaeda", + -13.68532943725586 + ], + [ + "-87", + -13.685391426086426 + ], + [ + "▁squared", + -13.685498237609863 + ], + [ + "▁governs", + -13.685503959655762 + ], + [ + "▁312", + -13.68553638458252 + ], + [ + "▁Ida", + -13.685558319091797 + ], + [ + "▁Proven", + -13.685662269592283 + ], + [ + "▁Anybody", + -13.685720443725586 + ], + [ + "▁Bras", + -13.685720443725586 + ], + [ + "pros", + -13.685768127441406 + ], + [ + "511", + -13.685772895812988 + ], + [ + "isan", + -13.685790061950684 + ], + [ + "Snap", + -13.685799598693848 + ], + [ + "▁erroneous", + -13.685834884643556 + ], + [ + "Instant", + -13.68605613708496 + ], + [ + "Responsibilities", + -13.686077117919922 + ], + [ + "▁Guernsey", + -13.686077117919922 + ], + [ + "▁apprehension", + -13.686077117919922 + ], + [ + "▁contemporaries", + -13.686077117919922 + ], + [ + "▁gondola", + -13.686077117919922 + ], + [ + "▁dilution", + -13.686091423034668 + ], + [ + "▁breathed", + -13.68610382080078 + ], + [ + "▁conferred", + -13.686104774475098 + ], + [ + "▁untold", + -13.686111450195312 + ], + [ + "▁Costello", + -13.686124801635742 + ], + [ + "nec", + -13.686138153076172 + ], + [ + "▁glossary", + -13.686147689819336 + ], + [ + "▁Buena", + -13.686148643493652 + ], + [ + "Divide", + -13.68615436553955 + ], + [ + "▁blob", + -13.686161041259766 + ], + [ + "▁Freddy", + -13.686184883117676 + ], + [ + "▁Fannie", + -13.686186790466309 + ], + [ + "▁skewers", + -13.686237335205078 + ], + [ + "▁choreographed", + -13.686238288879396 + ], + [ + "▁townhouses", + -13.686241149902344 + ], + [ + "Fran", + -13.68639850616455 + ], + [ + "▁hoard", + -13.686470985412598 + ], + [ + "▁Ecosystem", + -13.68647289276123 + ], + [ + "fie", + -13.686473846435549 + ], + [ + "▁undermining", + -13.68650245666504 + ], + [ + "▁marshmallow", + -13.686546325683594 + ], + [ + "▁subside", + -13.68659496307373 + ], + [ + "▁Suk", + -13.686607360839844 + ], + [ + "GET", + -13.68673324584961 + ], + [ + "▁Advocates", + -13.686752319335938 + ], + [ + "▁Sabrina", + -13.686779975891112 + ], + [ + "▁successors", + -13.686787605285645 + ], + [ + "nac", + -13.686817169189451 + ], + [ + "▁Augmented", + -13.686853408813477 + ], + [ + "▁Hitch", + -13.686859130859377 + ], + [ + "Robin", + -13.686982154846191 + ], + [ + "▁Serie", + -13.686984062194824 + ], + [ + "▁MASTER", + -13.68700885772705 + ], + [ + "▁TRE", + -13.687010765075684 + ], + [ + "249", + -13.687028884887695 + ], + [ + "▁Fas", + -13.687056541442873 + ], + [ + "▁deems", + -13.687064170837402 + ], + [ + "▁2).", + -13.687199592590332 + ], + [ + "izers", + -13.687223434448242 + ], + [ + "▁fanatic", + -13.687250137329102 + ], + [ + "▁3/8\"", + -13.687281608581545 + ], + [ + "▁clinging", + -13.687301635742188 + ], + [ + "▁Cambodian", + -13.68732452392578 + ], + [ + "nap", + -13.68744659423828 + ], + [ + "316", + -13.687448501586914 + ], + [ + "▁4-2", + -13.68748378753662 + ], + [ + "includes", + -13.68749713897705 + ], + [ + "ITE", + -13.687528610229492 + ], + [ + "lmer", + -13.68780517578125 + ], + [ + "▁stoppage", + -13.68781852722168 + ], + [ + "▁Digit", + -13.687829971313477 + ], + [ + "▁Adel", + -13.687989234924316 + ], + [ + "▁tanker", + -13.68801212310791 + ], + [ + "▁cartons", + -13.688053131103516 + ], + [ + "enni", + -13.688055038452148 + ], + [ + "ppel", + -13.688060760498049 + ], + [ + "▁colorway", + -13.688079833984377 + ], + [ + "▁fielder", + -13.688106536865234 + ], + [ + "▁suspicions", + -13.688175201416016 + ], + [ + "layered", + -13.688183784484863 + ], + [ + "603", + -13.68820571899414 + ], + [ + "▁stunts", + -13.688222885131836 + ], + [ + "▁Ethnic", + -13.688233375549316 + ], + [ + "Jobs", + -13.688265800476074 + ], + [ + "▁clove", + -13.68835735321045 + ], + [ + "DRA", + -13.688420295715332 + ], + [ + "▁carp", + -13.68842601776123 + ], + [ + "▁Jana", + -13.688546180725098 + ], + [ + "▁levers", + -13.688560485839844 + ], + [ + "▁knead", + -13.688581466674805 + ], + [ + "▁Schr", + -13.68863010406494 + ], + [ + "▁Lough", + -13.688636779785156 + ], + [ + "omba", + -13.68864917755127 + ], + [ + "▁thwart", + -13.68870735168457 + ], + [ + "▁WEEK", + -13.688721656799316 + ], + [ + "▁Sealing", + -13.688738822937012 + ], + [ + "ncies", + -13.688828468322754 + ], + [ + "▁smith", + -13.68883991241455 + ], + [ + "▁GAME", + -13.688888549804688 + ], + [ + "Ur", + -13.688892364501951 + ], + [ + "kyo", + -13.688912391662598 + ], + [ + "Vehicle", + -13.688932418823242 + ], + [ + "Conference", + -13.688942909240724 + ], + [ + "Cel", + -13.68894386291504 + ], + [ + "Brexit", + -13.688949584960938 + ], + [ + "AJ", + -13.68895149230957 + ], + [ + "▁charisma", + -13.689011573791504 + ], + [ + "Lion", + -13.68901252746582 + ], + [ + "▁cabaret", + -13.68908977508545 + ], + [ + "▁occult", + -13.689090728759766 + ], + [ + "tru", + -13.689091682434082 + ], + [ + "veld", + -13.689101219177246 + ], + [ + "▁papaya", + -13.689105033874512 + ], + [ + "grin", + -13.689108848571776 + ], + [ + "▁Zagreb", + -13.689111709594728 + ], + [ + "▁voila", + -13.689111709594728 + ], + [ + "▁NK", + -13.689122200012209 + ], + [ + "▁decomposition", + -13.689133644104004 + ], + [ + "▁Joaquin", + -13.689141273498535 + ], + [ + "▁baroque", + -13.689141273498535 + ], + [ + "▁microchip", + -13.689151763916016 + ], + [ + "▁Recession", + -13.689156532287598 + ], + [ + "▁tha", + -13.689164161682127 + ], + [ + "▁mimics", + -13.689170837402344 + ], + [ + "▁optionally", + -13.689181327819824 + ], + [ + "Justice", + -13.689187049865724 + ], + [ + "▁commendable", + -13.689196586608888 + ], + [ + "▁Filling", + -13.689233779907228 + ], + [ + "▁kph", + -13.689252853393556 + ], + [ + "▁Gallup", + -13.689278602600098 + ], + [ + "bile", + -13.689292907714844 + ], + [ + "hidden", + -13.689337730407717 + ], + [ + "CLC", + -13.689339637756348 + ], + [ + "eel", + -13.689410209655762 + ], + [ + "8.8", + -13.689465522766112 + ], + [ + "2.9", + -13.689467430114746 + ], + [ + "▁entrust", + -13.689493179321287 + ], + [ + "▁tampering", + -13.689566612243652 + ], + [ + "▁eLearning", + -13.6895751953125 + ], + [ + "▁hateful", + -13.689594268798828 + ], + [ + "▁overpowering", + -13.68960189819336 + ], + [ + "▁observes", + -13.689608573913574 + ], + [ + "Login", + -13.689621925354004 + ], + [ + "780", + -13.689705848693848 + ], + [ + "▁chicago", + -13.689892768859863 + ], + [ + "▁escalated", + -13.69003677368164 + ], + [ + "SCA", + -13.690038681030272 + ], + [ + "▁Lorraine", + -13.690093994140623 + ], + [ + "▁BAD", + -13.690203666687012 + ], + [ + "▁COLOR", + -13.690252304077148 + ], + [ + "▁Damn", + -13.69044303894043 + ], + [ + "enberger", + -13.69044589996338 + ], + [ + "ipa", + -13.690485000610352 + ], + [ + "rance", + -13.690521240234377 + ], + [ + "▁vigil", + -13.690521240234377 + ], + [ + "572", + -13.690542221069336 + ], + [ + "tir", + -13.690546035766602 + ], + [ + "▁Ey", + -13.69057559967041 + ], + [ + "▁Computational", + -13.690616607666016 + ], + [ + "▁$85", + -13.69071102142334 + ], + [ + "▁dislikes", + -13.690719604492188 + ], + [ + "▁wheeled", + -13.690728187561035 + ], + [ + "▁Pens", + -13.690735816955566 + ], + [ + "▁Apt", + -13.690771102905272 + ], + [ + "▁7.4", + -13.690825462341309 + ], + [ + "▁retract", + -13.690828323364258 + ], + [ + "egra", + -13.690892219543455 + ], + [ + "▁vegetarians", + -13.691007614135742 + ], + [ + "▁Vans", + -13.691014289855955 + ], + [ + "▁Biggest", + -13.691118240356444 + ], + [ + "▁Injuries", + -13.691146850585938 + ], + [ + "▁intuitively", + -13.691164016723633 + ], + [ + "elf", + -13.691316604614258 + ], + [ + "▁sie", + -13.69133472442627 + ], + [ + "Clinical", + -13.691386222839355 + ], + [ + "asser", + -13.691429138183594 + ], + [ + "Limit", + -13.691611289978027 + ], + [ + "verb", + -13.691645622253418 + ], + [ + "tenant", + -13.691676139831545 + ], + [ + "▁Rudolph", + -13.691685676574709 + ], + [ + "rium", + -13.691736221313477 + ], + [ + "▁Suggest", + -13.691781997680664 + ], + [ + "potential", + -13.691811561584473 + ], + [ + "arga", + -13.691856384277344 + ], + [ + "▁Resin", + -13.691856384277344 + ], + [ + "▁Kru", + -13.69185733795166 + ], + [ + "quire", + -13.691924095153809 + ], + [ + "bend", + -13.691999435424805 + ], + [ + "cycling", + -13.692008018493652 + ], + [ + "developer", + -13.692060470581056 + ], + [ + "835", + -13.692065238952637 + ], + [ + "noma", + -13.692093849182127 + ], + [ + "▁cayenne", + -13.692111015319824 + ], + [ + "▁extremism", + -13.69211769104004 + ], + [ + "▁Phelps", + -13.692118644714355 + ], + [ + "▁fodder", + -13.692127227783203 + ], + [ + "-68", + -13.692140579223633 + ], + [ + "▁rigged", + -13.692140579223633 + ], + [ + "▁Fedora", + -13.692161560058594 + ], + [ + "▁clogging", + -13.692163467407228 + ], + [ + "▁asymmetric", + -13.692172050476074 + ], + [ + "▁Pins", + -13.692183494567873 + ], + [ + "uous", + -13.692222595214844 + ], + [ + "▁heartwarming", + -13.692232131958008 + ], + [ + "received", + -13.692266464233398 + ], + [ + "▁TRUE", + -13.692277908325195 + ], + [ + "▁fol", + -13.692304611206056 + ], + [ + "▁bestow", + -13.692331314086914 + ], + [ + "▁Tons", + -13.69235134124756 + ], + [ + "▁416", + -13.692373275756836 + ], + [ + "▁Simi", + -13.692395210266112 + ], + [ + "TIA", + -13.69243049621582 + ], + [ + "▁beech", + -13.69243049621582 + ], + [ + "oplasty", + -13.692434310913086 + ], + [ + "det", + -13.6925048828125 + ], + [ + "smooth", + -13.69252586364746 + ], + [ + "▁fuelled", + -13.692586898803713 + ], + [ + "▁qualifier", + -13.692648887634276 + ], + [ + "▁ringtones", + -13.69268798828125 + ], + [ + "Volunteers", + -13.692693710327148 + ], + [ + "▁Dealing", + -13.692744255065918 + ], + [ + "7/", + -13.692870140075684 + ], + [ + "▁accelerates", + -13.69288158416748 + ], + [ + "Israeli", + -13.692910194396973 + ], + [ + "▁bagel", + -13.692999839782717 + ], + [ + "▁4-3", + -13.693042755126951 + ], + [ + "ischen", + -13.693063735961914 + ], + [ + "minutes", + -13.693202018737791 + ], + [ + "ivist", + -13.693325996398926 + ], + [ + "▁VAR", + -13.693359375 + ], + [ + "647", + -13.693439483642578 + ], + [ + "lova", + -13.693443298339844 + ], + [ + "▁dredge", + -13.693503379821776 + ], + [ + "651", + -13.693544387817385 + ], + [ + "▁SUB", + -13.69357681274414 + ], + [ + "474", + -13.693633079528809 + ], + [ + "Task", + -13.693733215332031 + ], + [ + "+2", + -13.693767547607422 + ], + [ + "orama", + -13.693840980529783 + ], + [ + "gian", + -13.69391632080078 + ], + [ + "▁Kry", + -13.693920135498049 + ], + [ + "▁Kab", + -13.693921089172363 + ], + [ + "Steal", + -13.69393253326416 + ], + [ + "▁Guil", + -13.693992614746094 + ], + [ + "▁Immediate", + -13.69399642944336 + ], + [ + "▁smartly", + -13.694000244140623 + ], + [ + "ologies", + -13.694169998168944 + ], + [ + "Zip", + -13.694208145141602 + ], + [ + "▁replying", + -13.694254875183104 + ], + [ + "▁onshore", + -13.694422721862791 + ], + [ + "▁Fy", + -13.69443416595459 + ], + [ + "▁encode", + -13.694534301757812 + ], + [ + "thos", + -13.694572448730469 + ], + [ + "Gene", + -13.694575309753418 + ], + [ + "▁pvc", + -13.694585800170898 + ], + [ + "▁Bona", + -13.694706916809082 + ], + [ + "▁legalized", + -13.69474983215332 + ], + [ + "▁Bam", + -13.694759368896484 + ], + [ + "262", + -13.694769859313965 + ], + [ + "▁Baa", + -13.694889068603516 + ], + [ + "Continuing", + -13.69513988494873 + ], + [ + "▁Bologna", + -13.695140838623049 + ], + [ + "▁Guthrie", + -13.695140838623049 + ], + [ + "▁monasteries", + -13.695140838623049 + ], + [ + "message", + -13.695147514343262 + ], + [ + "▁biochemistry", + -13.695167541503906 + ], + [ + "▁Paterson", + -13.695171356201172 + ], + [ + "▁jamming", + -13.695195198059082 + ], + [ + "▁Verona", + -13.69524383544922 + ], + [ + "▁Gilmore", + -13.695244789123535 + ], + [ + "▁Clipart", + -13.695250511169434 + ], + [ + "▁numbness", + -13.695253372192385 + ], + [ + "▁rumble", + -13.69526481628418 + ], + [ + "lach", + -13.695310592651367 + ], + [ + "▁1887", + -13.695324897766112 + ], + [ + "▁Gotham", + -13.695359230041504 + ], + [ + "▁EASY", + -13.695443153381348 + ], + [ + "▁teh", + -13.695451736450195 + ], + [ + "fle", + -13.695462226867676 + ], + [ + "▁menace", + -13.695569038391112 + ], + [ + "08.", + -13.695573806762695 + ], + [ + "▁PORT", + -13.695592880249023 + ], + [ + "▁briefcase", + -13.695650100708008 + ], + [ + "mei", + -13.69578456878662 + ], + [ + "▁princesses", + -13.69578742980957 + ], + [ + "▁Origins", + -13.695839881896973 + ], + [ + "astro", + -13.69587230682373 + ], + [ + "▁USSR", + -13.69587230682373 + ], + [ + "▁Denton", + -13.69588851928711 + ], + [ + "quet", + -13.69596004486084 + ], + [ + "▁inhaled", + -13.69598388671875 + ], + [ + "benefit", + -13.69602108001709 + ], + [ + "▁Arri", + -13.696122169494627 + ], + [ + "▁Bachelors", + -13.696124076843262 + ], + [ + "▁busier", + -13.69615077972412 + ], + [ + "labor", + -13.696174621582031 + ], + [ + "905", + -13.696189880371094 + ], + [ + "Native", + -13.696202278137209 + ], + [ + "chloro", + -13.696205139160156 + ], + [ + "▁transformers", + -13.696211814880373 + ], + [ + "▁Limits", + -13.696246147155762 + ], + [ + "▁issuers", + -13.696300506591797 + ], + [ + "▁sequins", + -13.696308135986328 + ], + [ + "▁encoder", + -13.696313858032228 + ], + [ + "cooled", + -13.696340560913086 + ], + [ + "ehl", + -13.696348190307615 + ], + [ + "▁JT", + -13.69636344909668 + ], + [ + "▁insurgents", + -13.696388244628906 + ], + [ + "gla", + -13.69646453857422 + ], + [ + "speaker", + -13.696490287780762 + ], + [ + "▁dampen", + -13.69649887084961 + ], + [ + "▁amazoncom", + -13.69658088684082 + ], + [ + "blend", + -13.696590423583984 + ], + [ + "GD", + -13.696647644042969 + ], + [ + "Dogs", + -13.6967191696167 + ], + [ + "▁Quan", + -13.696773529052734 + ], + [ + "▁Grades", + -13.696806907653809 + ], + [ + "Ser", + -13.696817398071287 + ], + [ + "▁Melvin", + -13.696856498718262 + ], + [ + "pore", + -13.696924209594728 + ], + [ + "Fourth", + -13.696990966796877 + ], + [ + "▁deadlock", + -13.69699764251709 + ], + [ + "spirit", + -13.697015762329102 + ], + [ + "▁Channels", + -13.697036743164062 + ], + [ + "▁Headboard", + -13.69704818725586 + ], + [ + "spread", + -13.69705295562744 + ], + [ + "▁Tired", + -13.69705295562744 + ], + [ + "gawa", + -13.69705867767334 + ], + [ + "ABS", + -13.697070121765137 + ], + [ + "aster", + -13.697211265563965 + ], + [ + "▁Tuna", + -13.697260856628418 + ], + [ + "▁herbicides", + -13.69728183746338 + ], + [ + "▁Ledger", + -13.69729995727539 + ], + [ + "734", + -13.697364807128906 + ], + [ + "andi", + -13.697394371032717 + ], + [ + "Yay", + -13.697418212890623 + ], + [ + "▁Carver", + -13.69746208190918 + ], + [ + "8.3", + -13.697556495666504 + ], + [ + "uge", + -13.697603225708008 + ], + [ + "fourth", + -13.697613716125488 + ], + [ + "▁wagons", + -13.697613716125488 + ], + [ + "▁inflate", + -13.697669982910156 + ], + [ + "▁Tableau", + -13.697684288024902 + ], + [ + "▁Clips", + -13.69770622253418 + ], + [ + "▁(2008).", + -13.69775390625 + ], + [ + "invent", + -13.697842597961426 + ], + [ + "▁PES", + -13.697872161865234 + ], + [ + "rosa", + -13.697911262512209 + ], + [ + "▁BEAUTIFUL", + -13.697978973388672 + ], + [ + "▁converters", + -13.698057174682615 + ], + [ + "▁claimants", + -13.69806671142578 + ], + [ + "loud", + -13.698118209838867 + ], + [ + "▁1-888-", + -13.698143005371094 + ], + [ + "▁chevron", + -13.698180198669434 + ], + [ + "▁horrifying", + -13.698180198669434 + ], + [ + "▁jacuzzi", + -13.698180198669434 + ], + [ + "▁liturgical", + -13.698180198669434 + ], + [ + "▁glimmer", + -13.698182106018066 + ], + [ + "▁Bugs", + -13.6981840133667 + ], + [ + "▁microcontroller", + -13.698190689086914 + ], + [ + "▁Isuzu", + -13.698209762573242 + ], + [ + "▁arisen", + -13.698212623596191 + ], + [ + "mL", + -13.698230743408203 + ], + [ + "▁homecoming", + -13.698234558105469 + ], + [ + "▁indigo", + -13.698235511779783 + ], + [ + "434", + -13.698243141174316 + ], + [ + "Possible", + -13.698249816894531 + ], + [ + "Corp", + -13.698259353637695 + ], + [ + "▁Reconstruction", + -13.698262214660645 + ], + [ + "Jessica", + -13.698266983032228 + ], + [ + "visible", + -13.698287963867188 + ], + [ + "****", + -13.698296546936035 + ], + [ + "Outstanding", + -13.698301315307615 + ], + [ + "▁Cyclone", + -13.69832706451416 + ], + [ + "NYC", + -13.69833278656006 + ], + [ + "▁feral", + -13.698384284973145 + ], + [ + "Christopher", + -13.698397636413574 + ], + [ + "▁frequented", + -13.698504447937012 + ], + [ + "▁dwellers", + -13.698528289794922 + ], + [ + "▁sensed", + -13.698529243469238 + ], + [ + "▁Breton", + -13.698541641235352 + ], + [ + "SEN", + -13.698543548583984 + ], + [ + "▁bruised", + -13.69858455657959 + ], + [ + "▁REG", + -13.698604583740234 + ], + [ + "▁insuring", + -13.69860553741455 + ], + [ + "▁Motorsports", + -13.698619842529297 + ], + [ + "▁Infiniti", + -13.698622703552246 + ], + [ + "▁Plc", + -13.698723793029783 + ], + [ + "Hire", + -13.69875144958496 + ], + [ + "▁skillset", + -13.69883918762207 + ], + [ + "▁fished", + -13.698884963989258 + ], + [ + "▁exudes", + -13.698901176452637 + ], + [ + "▁Jur", + -13.698914527893066 + ], + [ + "▁Downing", + -13.698942184448242 + ], + [ + "▁malfunctioning", + -13.699021339416504 + ], + [ + "▁Derry", + -13.699045181274414 + ], + [ + "ILL", + -13.69905948638916 + ], + [ + "▁Diner", + -13.699084281921388 + ], + [ + "RAGE", + -13.699121475219728 + ], + [ + "▁reluctantly", + -13.699122428894045 + ], + [ + "▁ADP", + -13.699139595031738 + ], + [ + "▁robustness", + -13.699159622192385 + ], + [ + "Offers", + -13.699177742004396 + ], + [ + "Ros", + -13.69918727874756 + ], + [ + "▁*******", + -13.69919204711914 + ], + [ + "▁NAR", + -13.699193000793455 + ], + [ + "307", + -13.699196815490724 + ], + [ + "▁Glover", + -13.699286460876465 + ], + [ + "▁Hoo", + -13.699333190917969 + ], + [ + "Dimensions", + -13.699346542358398 + ], + [ + "▁Klaus", + -13.69946002960205 + ], + [ + "▁bagels", + -13.699469566345217 + ], + [ + "PET", + -13.699548721313477 + ], + [ + "▁debtors", + -13.69967269897461 + ], + [ + "▁lesion", + -13.699682235717772 + ], + [ + "▁Remembrance", + -13.699736595153809 + ], + [ + "tures", + -13.699786186218262 + ], + [ + "▁Shifting", + -13.69979190826416 + ], + [ + "167", + -13.699798583984377 + ], + [ + "▁??????", + -13.6998291015625 + ], + [ + "▁cyclo", + -13.70007038116455 + ], + [ + "372", + -13.700080871582031 + ], + [ + "deb", + -13.70014190673828 + ], + [ + "▁WHITE", + -13.70032024383545 + ], + [ + "Util", + -13.700356483459473 + ], + [ + "07.", + -13.700387954711914 + ], + [ + "ogi", + -13.700450897216797 + ], + [ + "/60", + -13.70045280456543 + ], + [ + "▁fixation", + -13.700461387634276 + ], + [ + "▁LARGE", + -13.700477600097656 + ], + [ + "▁Spur", + -13.700496673583984 + ], + [ + "▁CDN", + -13.700641632080078 + ], + [ + "Boat", + -13.700663566589355 + ], + [ + "503", + -13.70069980621338 + ], + [ + "chrom", + -13.700807571411133 + ], + [ + "focal", + -13.700845718383787 + ], + [ + "Hours", + -13.700865745544434 + ], + [ + "▁Gam", + -13.700889587402344 + ], + [ + "Boot", + -13.700922966003418 + ], + [ + "▁undercut", + -13.700952529907228 + ], + [ + "golf", + -13.701004028320312 + ], + [ + "import", + -13.701022148132324 + ], + [ + "▁Cen", + -13.701054573059082 + ], + [ + "▁brooch", + -13.701099395751951 + ], + [ + "▁memorials", + -13.70114803314209 + ], + [ + "▁Pang", + -13.70115089416504 + ], + [ + "▁INTO", + -13.70115852355957 + ], + [ + "nunciation", + -13.701202392578123 + ], + [ + "tong", + -13.701211929321287 + ], + [ + "▁Territories", + -13.701229095458984 + ], + [ + "▁Tolkien", + -13.701229095458984 + ], + [ + "▁constituencies", + -13.701229095458984 + ], + [ + "▁venerable", + -13.701229095458984 + ], + [ + "▁McKinney", + -13.7012300491333 + ], + [ + "▁alpaca", + -13.701231002807615 + ], + [ + "▁gliding", + -13.701231002807615 + ], + [ + "▁Pontiac", + -13.701231956481934 + ], + [ + "▁Epsom", + -13.701234817504885 + ], + [ + "▁Tenant", + -13.701236724853516 + ], + [ + "▁rabies", + -13.701236724853516 + ], + [ + "▁unexplained", + -13.701251983642578 + ], + [ + "▁Frankenstein", + -13.701257705688477 + ], + [ + "▁insistence", + -13.70126247406006 + ], + [ + "▁Dewey", + -13.701264381408691 + ], + [ + "695", + -13.701269149780272 + ], + [ + "▁molten", + -13.701284408569336 + ], + [ + "▁Hillsborough", + -13.701292037963867 + ], + [ + "▁convoy", + -13.701324462890623 + ], + [ + "Albert", + -13.70133113861084 + ], + [ + "▁toffee", + -13.70135498046875 + ], + [ + "▁traceability", + -13.7013578414917 + ], + [ + "logists", + -13.701379776000977 + ], + [ + "Nonetheless", + -13.701383590698242 + ], + [ + "▁avocados", + -13.701394081115724 + ], + [ + "▁Cassandra", + -13.70148754119873 + ], + [ + "puri", + -13.701492309570312 + ], + [ + "cellular", + -13.701510429382324 + ], + [ + "tons", + -13.70156192779541 + ], + [ + "Dutch", + -13.70156478881836 + ], + [ + "RAC", + -13.701574325561523 + ], + [ + "▁Louie", + -13.701586723327637 + ], + [ + "▁doorbell", + -13.701610565185549 + ], + [ + "▁Honorable", + -13.701704978942873 + ], + [ + "Au", + -13.701788902282717 + ], + [ + "▁Keene", + -13.701821327209473 + ], + [ + "▁designations", + -13.701831817626951 + ], + [ + "ssing", + -13.701849937438965 + ], + [ + "DNS", + -13.701862335205078 + ], + [ + "▁ISD", + -13.701913833618164 + ], + [ + "/09/", + -13.701937675476074 + ], + [ + "▁ANA", + -13.701990127563477 + ], + [ + "▁Penang", + -13.702022552490234 + ], + [ + "▁embryos", + -13.70209503173828 + ], + [ + "▁Hutch", + -13.702095985412598 + ], + [ + "▁metaphors", + -13.702095985412598 + ], + [ + "▁BASE", + -13.702216148376465 + ], + [ + "▁Marley", + -13.70222282409668 + ], + [ + "Marine", + -13.702265739440918 + ], + [ + "▁basalt", + -13.70236587524414 + ], + [ + "▁asynchronous", + -13.70236873626709 + ], + [ + "▁hardcover", + -13.70246696472168 + ], + [ + "Ticket", + -13.702467918395996 + ], + [ + "cea", + -13.702482223510742 + ], + [ + "-78", + -13.702499389648438 + ], + [ + "▁Kami", + -13.70254135131836 + ], + [ + "Bruce", + -13.702580451965332 + ], + [ + "▁pulley", + -13.702618598937988 + ], + [ + "golden", + -13.702624320983888 + ], + [ + "▁OPEC", + -13.702666282653809 + ], + [ + "pri", + -13.702667236328123 + ], + [ + "▁Productivity", + -13.702682495117188 + ], + [ + "Carol", + -13.702780723571776 + ], + [ + "▁GOT", + -13.702799797058104 + ], + [ + "▁Od", + -13.7028226852417 + ], + [ + "449", + -13.702840805053713 + ], + [ + "▁broadening", + -13.702869415283203 + ], + [ + "▁Lover", + -13.702938079833984 + ], + [ + "linking", + -13.70295238494873 + ], + [ + "▁Reb", + -13.702958106994627 + ], + [ + "▁notched", + -13.703109741210938 + ], + [ + "labs", + -13.703117370605469 + ], + [ + "guitar", + -13.703168869018556 + ], + [ + "frag", + -13.703195571899414 + ], + [ + "736", + -13.70324993133545 + ], + [ + "xl", + -13.703255653381348 + ], + [ + "▁deflect", + -13.703282356262209 + ], + [ + "▁Divi", + -13.703349113464355 + ], + [ + "▁$42", + -13.703387260437012 + ], + [ + "▁Immortal", + -13.703399658203123 + ], + [ + "ELLO", + -13.703441619873049 + ], + [ + "439", + -13.703585624694824 + ], + [ + "▁spoonful", + -13.703661918640137 + ], + [ + "7,500", + -13.703680038452148 + ], + [ + "▁captivate", + -13.703686714172363 + ], + [ + "imba", + -13.703760147094728 + ], + [ + "▁persevere", + -13.70383071899414 + ], + [ + "bhi", + -13.7039155960083 + ], + [ + "705", + -13.703978538513184 + ], + [ + "▁jade", + -13.704092979431152 + ], + [ + "zak", + -13.704141616821287 + ], + [ + "▁synths", + -13.704150199890137 + ], + [ + "214", + -13.704183578491213 + ], + [ + "▁bono", + -13.704259872436523 + ], + [ + "▁retrospect", + -13.704270362854004 + ], + [ + "spira", + -13.70427703857422 + ], + [ + "▁Commodore", + -13.7042875289917 + ], + [ + "▁vehicular", + -13.7042875289917 + ], + [ + "▁mobilization", + -13.704288482666016 + ], + [ + "▁Hertfordshire", + -13.704289436340332 + ], + [ + "▁malnutrition", + -13.704289436340332 + ], + [ + "▁CONTENT", + -13.704290390014648 + ], + [ + "▁strife", + -13.704290390014648 + ], + [ + "▁appendix", + -13.704293251037598 + ], + [ + "▁slumber", + -13.704318046569824 + ], + [ + "▁timid", + -13.704349517822266 + ], + [ + "▁tallied", + -13.704386711120604 + ], + [ + "▁undead", + -13.704407691955566 + ], + [ + "▁Wreck", + -13.704421997070312 + ], + [ + "▁Allowance", + -13.704453468322754 + ], + [ + "Lunch", + -13.704519271850586 + ], + [ + "Whisk", + -13.704520225524902 + ], + [ + "Bathroom", + -13.704549789428713 + ], + [ + "holiday", + -13.70457363128662 + ], + [ + "▁ppt", + -13.704633712768556 + ], + [ + "▁BSc", + -13.704636573791504 + ], + [ + "▁esports", + -13.704704284667969 + ], + [ + "▁Favourite", + -13.704710006713867 + ], + [ + "▁Garry", + -13.70471477508545 + ], + [ + "▁Homework", + -13.70472812652588 + ], + [ + "▁denotes", + -13.704747200012209 + ], + [ + "▁Eliot", + -13.704794883728027 + ], + [ + "▁Paolo", + -13.70485019683838 + ], + [ + "rupt", + -13.70485782623291 + ], + [ + "brother", + -13.704867362976074 + ], + [ + "OY", + -13.704874992370604 + ], + [ + "▁EFT", + -13.704911231994627 + ], + [ + "▁sine", + -13.704944610595703 + ], + [ + "▁harnessing", + -13.704952239990234 + ], + [ + "▁classifications", + -13.70498275756836 + ], + [ + "graf", + -13.704983711242676 + ], + [ + "tronic", + -13.705000877380373 + ], + [ + "Dump", + -13.705001831054688 + ], + [ + "ppe", + -13.705029487609863 + ], + [ + "▁kindle", + -13.70509147644043 + ], + [ + "STON", + -13.705118179321287 + ], + [ + "▁Foul", + -13.705143928527832 + ], + [ + "171", + -13.70514965057373 + ], + [ + "▁topology", + -13.705244064331056 + ], + [ + "Cup", + -13.70540714263916 + ], + [ + "▁flared", + -13.705469131469728 + ], + [ + "broker", + -13.705507278442385 + ], + [ + "Patient", + -13.705533981323242 + ], + [ + "▁Heading", + -13.705551147460938 + ], + [ + "▁classically", + -13.705652236938477 + ], + [ + "▁intersect", + -13.705711364746094 + ], + [ + "▁sts", + -13.70574188232422 + ], + [ + "push", + -13.705904960632324 + ], + [ + "lini", + -13.70600700378418 + ], + [ + "Invest", + -13.706021308898926 + ], + [ + "ulis", + -13.706021308898926 + ], + [ + "oven", + -13.70603370666504 + ], + [ + "▁Mao", + -13.706035614013672 + ], + [ + "fighter", + -13.706068992614746 + ], + [ + "EEE", + -13.706151962280272 + ], + [ + "▁Swe", + -13.706184387207031 + ], + [ + "2500", + -13.706195831298828 + ], + [ + "cham", + -13.7062406539917 + ], + [ + "▁Ting", + -13.70627212524414 + ], + [ + "BRE", + -13.706320762634276 + ], + [ + "invest", + -13.706374168395996 + ], + [ + "Melt", + -13.706417083740234 + ], + [ + "▁Couldn", + -13.706440925598145 + ], + [ + "illard", + -13.706542015075684 + ], + [ + "▁Dei", + -13.706579208374023 + ], + [ + "▁breakouts", + -13.706586837768556 + ], + [ + "▁scoreless", + -13.7066068649292 + ], + [ + "▁Hod", + -13.706673622131348 + ], + [ + "▁Implant", + -13.70674991607666 + ], + [ + "utta", + -13.706847190856934 + ], + [ + "▁Salah", + -13.706897735595703 + ], + [ + "▁YO", + -13.706911087036133 + ], + [ + "371", + -13.706917762756348 + ], + [ + "▁STATE", + -13.706924438476562 + ], + [ + "▁tarnish", + -13.706964492797852 + ], + [ + "30%", + -13.70704746246338 + ], + [ + "thick", + -13.707049369812012 + ], + [ + "▁sped", + -13.707058906555176 + ], + [ + "▁milled", + -13.707197189331056 + ], + [ + "▁Annex", + -13.707201957702637 + ], + [ + "▁Magistrate", + -13.70722770690918 + ], + [ + "monitor", + -13.707252502441406 + ], + [ + "▁phentermine", + -13.707354545593262 + ], + [ + "▁neoprene", + -13.707356452941896 + ], + [ + "▁DREAM", + -13.707358360290527 + ], + [ + "▁Maestro", + -13.707362174987791 + ], + [ + "modal", + -13.707365036010742 + ], + [ + "▁erectile", + -13.707369804382324 + ], + [ + "▁distillation", + -13.70737648010254 + ], + [ + "▁Oahu", + -13.707475662231444 + ], + [ + "Dia", + -13.707481384277344 + ], + [ + "Rom", + -13.70754337310791 + ], + [ + "▁apostles", + -13.707562446594238 + ], + [ + "▁NAT", + -13.7075834274292 + ], + [ + "▁overcast", + -13.707594871520996 + ], + [ + "▁Eber", + -13.707619667053224 + ], + [ + "▁dependability", + -13.707649230957031 + ], + [ + "Remote", + -13.707691192626951 + ], + [ + "Retail", + -13.707691192626951 + ], + [ + "▁Colored", + -13.707723617553713 + ], + [ + "▁$1.6", + -13.707775115966797 + ], + [ + "Larry", + -13.707794189453123 + ], + [ + "▁Martian", + -13.70779800415039 + ], + [ + "▁Archery", + -13.70785427093506 + ], + [ + "-86", + -13.707894325256348 + ], + [ + "tane", + -13.707963943481444 + ], + [ + "▁Crunch", + -13.707964897155762 + ], + [ + "▁Québec", + -13.707993507385254 + ], + [ + "▁annoy", + -13.70804214477539 + ], + [ + "▁bedrock", + -13.708057403564451 + ], + [ + "depend", + -13.70815086364746 + ], + [ + "▁Poet", + -13.708187103271484 + ], + [ + "▁Petra", + -13.708227157592772 + ], + [ + "▁procured", + -13.708285331726074 + ], + [ + "gir", + -13.708317756652832 + ], + [ + "▁wp", + -13.708395957946776 + ], + [ + "703", + -13.708507537841797 + ], + [ + "▁keg", + -13.70852279663086 + ], + [ + "Artist", + -13.708539962768556 + ], + [ + "▁Consistent", + -13.708551406860352 + ], + [ + "hence", + -13.708632469177246 + ], + [ + "▁musk", + -13.708738327026367 + ], + [ + "pronounced", + -13.708741188049316 + ], + [ + "▁Gotta", + -13.708828926086426 + ], + [ + "▁treaties", + -13.70883846282959 + ], + [ + "▁Una", + -13.708847045898438 + ], + [ + "ELS", + -13.708885192871094 + ], + [ + "▁Heard", + -13.708999633789062 + ], + [ + "asu", + -13.70900058746338 + ], + [ + "Booking", + -13.709041595458984 + ], + [ + "▁sus", + -13.709047317504885 + ], + [ + "ASP", + -13.709086418151855 + ], + [ + "▁sax", + -13.709136962890623 + ], + [ + ".........", + -13.709195137023926 + ], + [ + "▁Socks", + -13.709283828735352 + ], + [ + "JE", + -13.709364891052246 + ], + [ + "▁XM", + -13.70946979522705 + ], + [ + "loy", + -13.70948886871338 + ], + [ + "▁Karim", + -13.709495544433594 + ], + [ + "8.2", + -13.70949649810791 + ], + [ + "▁marathons", + -13.70950984954834 + ], + [ + "▁232", + -13.709579467773438 + ], + [ + "▁Leh", + -13.70964813232422 + ], + [ + "TLE", + -13.709664344787598 + ], + [ + "bore", + -13.709768295288086 + ], + [ + "▁Performer", + -13.709779739379885 + ], + [ + "BOR", + -13.709824562072754 + ], + [ + "▁perpetuate", + -13.709829330444336 + ], + [ + "▁Mastery", + -13.709904670715332 + ], + [ + "▁premieres", + -13.709909439086914 + ], + [ + "upp", + -13.70994472503662 + ], + [ + "▁healthful", + -13.710017204284668 + ], + [ + "▁Achieve", + -13.710023880004885 + ], + [ + "▁endanger", + -13.710027694702148 + ], + [ + "▁Fife", + -13.710036277770996 + ], + [ + "▁FOOD", + -13.710075378417969 + ], + [ + "▁Initiatives", + -13.710100173950195 + ], + [ + "question", + -13.710164070129396 + ], + [ + "▁SLR", + -13.710174560546877 + ], + [ + "▁(200", + -13.710195541381836 + ], + [ + "▁Beige", + -13.71020221710205 + ], + [ + "FRA", + -13.71020793914795 + ], + [ + "▁positives", + -13.710265159606934 + ], + [ + "▁snip", + -13.710272789001465 + ], + [ + "▁FRONT", + -13.710285186767578 + ], + [ + "inner", + -13.71030044555664 + ], + [ + "▁zum", + -13.710352897644045 + ], + [ + "▁cemeteries", + -13.710432052612305 + ], + [ + "▁commemoration", + -13.710432052612305 + ], + [ + "▁Universidad", + -13.710434913635254 + ], + [ + "▁Grocery", + -13.71043586730957 + ], + [ + "▁sweepstakes", + -13.710436820983888 + ], + [ + "▁begged", + -13.710447311401367 + ], + [ + "▁Geometry", + -13.710453033447266 + ], + [ + "▁Culver", + -13.710491180419922 + ], + [ + "▁Doncaster", + -13.710522651672363 + ], + [ + "▁iodine", + -13.710552215576172 + ], + [ + "▁persecuted", + -13.7105712890625 + ], + [ + "▁ASC", + -13.710577011108398 + ], + [ + "rado", + -13.710589408874512 + ], + [ + "▁Bragg", + -13.710594177246094 + ], + [ + "▁semifinal", + -13.710601806640623 + ], + [ + "▁speck", + -13.710617065429688 + ], + [ + "fb", + -13.71061897277832 + ], + [ + "▁Siena", + -13.710647583007812 + ], + [ + "▁Anger", + -13.710692405700684 + ], + [ + "▁atrium", + -13.71076488494873 + ], + [ + "▁brute", + -13.71079158782959 + ], + [ + "▁bloating", + -13.710800170898438 + ], + [ + "Bachelor", + -13.710810661315918 + ], + [ + "Academic", + -13.710823059082031 + ], + [ + "▁\"...", + -13.710833549499512 + ], + [ + "▁catalysts", + -13.710838317871094 + ], + [ + "▁Disabled", + -13.710870742797852 + ], + [ + "▁nourished", + -13.710954666137695 + ], + [ + "-92", + -13.71096897125244 + ], + [ + "▁Cooker", + -13.710969924926758 + ], + [ + "▁comma", + -13.710996627807615 + ], + [ + "▁visor", + -13.711042404174805 + ], + [ + "243", + -13.711090087890623 + ], + [ + "▁Waco", + -13.711100578308104 + ], + [ + "▁coincides", + -13.711176872253418 + ], + [ + "▁churning", + -13.711199760437012 + ], + [ + "▁fillet", + -13.711244583129885 + ], + [ + "772", + -13.711249351501465 + ], + [ + "SEB", + -13.71127700805664 + ], + [ + "▁$1.4", + -13.711295127868652 + ], + [ + "tender", + -13.711318016052246 + ], + [ + "▁Boats", + -13.71133804321289 + ], + [ + "▁collages", + -13.711387634277344 + ], + [ + "windows", + -13.7114839553833 + ], + [ + "AKA", + -13.711527824401855 + ], + [ + "▁221", + -13.71156120300293 + ], + [ + "sah", + -13.711631774902344 + ], + [ + "9-6", + -13.711642265319824 + ], + [ + "OSS", + -13.71164608001709 + ], + [ + "schen", + -13.71169090270996 + ], + [ + "132", + -13.711695671081545 + ], + [ + "▁porches", + -13.71171760559082 + ], + [ + "▁Alain", + -13.711753845214844 + ], + [ + "▁macros", + -13.711773872375488 + ], + [ + "▁27%", + -13.711783409118652 + ], + [ + "▁Walla", + -13.711846351623535 + ], + [ + "▁Surveys", + -13.711889266967772 + ], + [ + "▁Droid", + -13.711932182312012 + ], + [ + "students", + -13.711992263793944 + ], + [ + "suke", + -13.712006568908691 + ], + [ + "▁Thatcher", + -13.712042808532717 + ], + [ + "▁Bite", + -13.712095260620115 + ], + [ + "Teacher", + -13.712113380432127 + ], + [ + "▁2010).", + -13.712115287780762 + ], + [ + "XI", + -13.71213150024414 + ], + [ + "176", + -13.712150573730469 + ], + [ + "▁secretive", + -13.712159156799316 + ], + [ + "▁DHA", + -13.712160110473633 + ], + [ + "nb", + -13.71230697631836 + ], + [ + "▁elf", + -13.712329864501951 + ], + [ + "▁KE", + -13.71237564086914 + ], + [ + "▁Entire", + -13.712517738342283 + ], + [ + "▁Mantra", + -13.7125244140625 + ], + [ + "Points", + -13.712533950805664 + ], + [ + "▁Lotto", + -13.71255588531494 + ], + [ + "Node", + -13.712574005126951 + ], + [ + "▁Camry", + -13.71257781982422 + ], + [ + "spam", + -13.712586402893066 + ], + [ + "▁SBS", + -13.71261501312256 + ], + [ + "▁Hmmm", + -13.71261978149414 + ], + [ + "▁REIT", + -13.712736129760742 + ], + [ + "▁Hurricanes", + -13.712779998779297 + ], + [ + "▁184", + -13.712825775146484 + ], + [ + "parents", + -13.712828636169434 + ], + [ + "ICC", + -13.712830543518066 + ], + [ + "▁betterment", + -13.712848663330078 + ], + [ + "▁propagate", + -13.712916374206545 + ], + [ + "▁STEP", + -13.712964057922363 + ], + [ + "▁McLaughlin", + -13.713006973266602 + ], + [ + "▁peptides", + -13.713022232055664 + ], + [ + "▁cinch", + -13.71306037902832 + ], + [ + "▁shielded", + -13.713094711303713 + ], + [ + "▁Scandinavia", + -13.713133811950684 + ], + [ + "zhi", + -13.713178634643556 + ], + [ + "solve", + -13.713260650634766 + ], + [ + "▁froze", + -13.71332550048828 + ], + [ + "EPS", + -13.713383674621582 + ], + [ + "▁ROAD", + -13.713407516479492 + ], + [ + "sheets", + -13.713417053222656 + ], + [ + "oire", + -13.713468551635742 + ], + [ + "▁undergrad", + -13.713494300842283 + ], + [ + "Pages", + -13.71349811553955 + ], + [ + "▁academies", + -13.713518142700195 + ], + [ + "▁amplification", + -13.713518142700195 + ], + [ + "▁assemblage", + -13.713518142700195 + ], + [ + "▁commemorating", + -13.713518142700195 + ], + [ + "▁obedient", + -13.713518142700195 + ], + [ + "▁sensibilities", + -13.713518142700195 + ], + [ + "▁synergies", + -13.713518142700195 + ], + [ + "2-6", + -13.713520050048828 + ], + [ + "▁Santorini", + -13.713520050048828 + ], + [ + "▁energizing", + -13.713522911071776 + ], + [ + "▁ArcGIS", + -13.71352481842041 + ], + [ + "▁Sausage", + -13.71352481842041 + ], + [ + "▁inhalation", + -13.713530540466309 + ], + [ + "▁Putnam", + -13.713552474975586 + ], + [ + "tral", + -13.713573455810549 + ], + [ + "▁Meth", + -13.713581085205078 + ], + [ + "▁2011)", + -13.713603019714355 + ], + [ + "▁PLLC", + -13.713644981384276 + ], + [ + "▁Vader", + -13.713736534118652 + ], + [ + "▁rejoin", + -13.713753700256348 + ], + [ + "▁Alternate", + -13.713756561279297 + ], + [ + "▁Tram", + -13.713834762573242 + ], + [ + "▁Conner", + -13.713852882385254 + ], + [ + "▁ger", + -13.713881492614746 + ], + [ + "▁Enfield", + -13.713955879211426 + ], + [ + "▁chairperson", + -13.713973999023438 + ], + [ + "Century", + -13.7139892578125 + ], + [ + "▁liberated", + -13.714008331298828 + ], + [ + "▁$15.", + -13.71401309967041 + ], + [ + "▁Maori", + -13.714116096496582 + ], + [ + "Loan", + -13.714296340942385 + ], + [ + "▁mike", + -13.714327812194824 + ], + [ + "▁kom", + -13.714329719543455 + ], + [ + "▁upstate", + -13.714345932006836 + ], + [ + "against", + -13.714349746704102 + ], + [ + "yce", + -13.714350700378418 + ], + [ + "075", + -13.714359283447266 + ], + [ + "Tiger", + -13.714462280273438 + ], + [ + "▁QUALITY", + -13.714547157287598 + ], + [ + "▁Lynne", + -13.714573860168455 + ], + [ + "Valid", + -13.7146577835083 + ], + [ + "▁Dino", + -13.71467113494873 + ], + [ + "vest", + -13.71469783782959 + ], + [ + "▁cashew", + -13.714746475219728 + ], + [ + "kki", + -13.714824676513672 + ], + [ + "Index", + -13.71489715576172 + ], + [ + "▁runaway", + -13.714966773986816 + ], + [ + "WU", + -13.71500301361084 + ], + [ + "645", + -13.715100288391112 + ], + [ + "▁openers", + -13.715137481689451 + ], + [ + "▁wasteful", + -13.71513843536377 + ], + [ + "▁tulips", + -13.715140342712402 + ], + [ + "▁TEST", + -13.715147018432615 + ], + [ + "▁Psalms", + -13.71517276763916 + ], + [ + "▁medalist", + -13.715181350708008 + ], + [ + "▁squarely", + -13.71524143218994 + ], + [ + "▁calmer", + -13.71524429321289 + ], + [ + "▁MER", + -13.715246200561523 + ], + [ + "▁latin", + -13.71525764465332 + ], + [ + "ERN", + -13.715259552001951 + ], + [ + "installation", + -13.715261459350586 + ], + [ + "▁flirt", + -13.715324401855469 + ], + [ + "Gbps", + -13.715332984924316 + ], + [ + "fear", + -13.715377807617188 + ], + [ + "▁cy", + -13.715411186218262 + ], + [ + "▁7.6", + -13.715444564819336 + ], + [ + "▁Endless", + -13.715473175048828 + ], + [ + "▁fryer", + -13.715492248535156 + ], + [ + "189", + -13.715503692626951 + ], + [ + "413", + -13.715503692626951 + ], + [ + "▁reliever", + -13.715665817260742 + ], + [ + "Sol", + -13.715691566467283 + ], + [ + "▁axial", + -13.715784072875977 + ], + [ + "▁quotas", + -13.715815544128418 + ], + [ + "pala", + -13.71599578857422 + ], + [ + "▁hugging", + -13.716024398803713 + ], + [ + "▁Laugh", + -13.71603298187256 + ], + [ + "609", + -13.716154098510742 + ], + [ + "▁DSM", + -13.716191291809082 + ], + [ + "▁Vance", + -13.71635627746582 + ], + [ + "▁Slope", + -13.716474533081056 + ], + [ + "Schools", + -13.716497421264648 + ], + [ + "▁preheat", + -13.71653938293457 + ], + [ + "verbal", + -13.716601371765137 + ], + [ + "▁navigator", + -13.71661376953125 + ], + [ + "▁prioritizing", + -13.71661376953125 + ], + [ + "▁susceptibility", + -13.71661376953125 + ], + [ + "▁sustenance", + -13.71661376953125 + ], + [ + "▁unfavorable", + -13.71661376953125 + ], + [ + "▁voicing", + -13.71661376953125 + ], + [ + "▁holographic", + -13.716623306274414 + ], + [ + "▁MONEY", + -13.71662425994873 + ], + [ + "▁ROS", + -13.716638565063477 + ], + [ + "▁Vero", + -13.716670989990234 + ], + [ + "bash", + -13.716678619384766 + ], + [ + "▁Cigar", + -13.716683387756348 + ], + [ + "tzel", + -13.71675968170166 + ], + [ + "▁TK", + -13.716856002807615 + ], + [ + "▁cocoon", + -13.71688747406006 + ], + [ + "kbps", + -13.716952323913574 + ], + [ + "▁repurposed", + -13.717005729675291 + ], + [ + "▁Kirsten", + -13.717039108276367 + ], + [ + "▁34-", + -13.717056274414062 + ], + [ + "assuming", + -13.717090606689451 + ], + [ + "00:1", + -13.717097282409668 + ], + [ + "▁Arthritis", + -13.7171049118042 + ], + [ + "348", + -13.71718406677246 + ], + [ + "▁TIP", + -13.717192649841309 + ], + [ + "favorite", + -13.71719741821289 + ], + [ + "▁clans", + -13.717241287231444 + ], + [ + "▁Greetings", + -13.71729564666748 + ], + [ + "▁Seo", + -13.717297554016112 + ], + [ + "shots", + -13.717330932617188 + ], + [ + "▁aux", + -13.7173490524292 + ], + [ + "▁biomarkers", + -13.717365264892578 + ], + [ + "▁thumbnails", + -13.717381477355955 + ], + [ + "Dining", + -13.71744155883789 + ], + [ + "IPP", + -13.717450141906738 + ], + [ + "EMS", + -13.717535018920898 + ], + [ + "Publication", + -13.717604637145996 + ], + [ + "▁Ghanaian", + -13.717698097229004 + ], + [ + "yuan", + -13.717719078063965 + ], + [ + "▁spawning", + -13.717720985412598 + ], + [ + "wor", + -13.717780113220217 + ], + [ + "voy", + -13.7178316116333 + ], + [ + "isin", + -13.71785831451416 + ], + [ + "▁Inlet", + -13.71796989440918 + ], + [ + "▁fray", + -13.717971801757812 + ], + [ + "ipp", + -13.718092918395996 + ], + [ + "▁ADT", + -13.718099594116213 + ], + [ + "▁cron", + -13.718111038208008 + ], + [ + "xy", + -13.718165397644045 + ], + [ + "imon", + -13.718239784240724 + ], + [ + "ondo", + -13.71830940246582 + ], + [ + "▁lengthen", + -13.718310356140137 + ], + [ + "ssian", + -13.718338012695312 + ], + [ + "▁laborious", + -13.718377113342283 + ], + [ + "▁sideboard", + -13.718413352966309 + ], + [ + "workout", + -13.71850872039795 + ], + [ + "▁homegrown", + -13.71859645843506 + ], + [ + "▁Offline", + -13.718636512756348 + ], + [ + "▁resembled", + -13.718704223632812 + ], + [ + "▁RBIs", + -13.718714714050291 + ], + [ + "KY", + -13.71884059906006 + ], + [ + "▁sunroom", + -13.718853950500488 + ], + [ + "▁Predict", + -13.718897819519045 + ], + [ + "369", + -13.718974113464355 + ], + [ + "Centre", + -13.718996047973633 + ], + [ + "▁Aries", + -13.719024658203123 + ], + [ + "ouch", + -13.719186782836914 + ], + [ + "▁beautify", + -13.71921157836914 + ], + [ + "Joining", + -13.7194185256958 + ], + [ + "266", + -13.719507217407228 + ], + [ + "▁Heal", + -13.719517707824709 + ], + [ + "▁Moth", + -13.71953010559082 + ], + [ + "▁Samoa", + -13.719595909118652 + ], + [ + "quir", + -13.719656944274902 + ], + [ + "walker", + -13.719711303710938 + ], + [ + "▁impulsive", + -13.719719886779783 + ], + [ + "▁migratory", + -13.719719886779783 + ], + [ + "▁pandemic", + -13.719719886779783 + ], + [ + "▁Swindon", + -13.719720840454102 + ], + [ + "▁untimely", + -13.719721794128418 + ], + [ + "▁Matilda", + -13.71972370147705 + ], + [ + "▁Clutch", + -13.719725608825684 + ], + [ + "▁Cranberry", + -13.719731330871582 + ], + [ + "▁(1994)", + -13.719745635986328 + ], + [ + "▁McQueen", + -13.719751358032228 + ], + [ + "-1/2", + -13.719755172729492 + ], + [ + "▁Prasad", + -13.719767570495604 + ], + [ + "▁franchisee", + -13.719781875610352 + ], + [ + "▁parsing", + -13.7197904586792 + ], + [ + "▁CME", + -13.719793319702148 + ], + [ + "▁incompetent", + -13.71979522705078 + ], + [ + "▁guestrooms", + -13.719802856445312 + ], + [ + "▁DSTV", + -13.71983814239502 + ], + [ + "▁unbalanced", + -13.719850540161133 + ], + [ + "▁Strauss", + -13.719852447509766 + ], + [ + "149", + -13.719861030578612 + ], + [ + "▁Relocation", + -13.71987247467041 + ], + [ + "461", + -13.719950675964355 + ], + [ + "▁grating", + -13.719980239868164 + ], + [ + "▁Awakening", + -13.720004081726074 + ], + [ + "▁Recruiting", + -13.720023155212402 + ], + [ + "▁fob", + -13.720029830932615 + ], + [ + "echo", + -13.72003173828125 + ], + [ + "ussi", + -13.720049858093262 + ], + [ + "▁carer", + -13.720054626464844 + ], + [ + "snow", + -13.72007179260254 + ], + [ + "▁Lesley", + -13.72008991241455 + ], + [ + "▁accountancy", + -13.720091819763184 + ], + [ + "▁Garda", + -13.720131874084473 + ], + [ + "Stress", + -13.720151901245115 + ], + [ + "▁ushered", + -13.72016716003418 + ], + [ + "Arab", + -13.720220565795898 + ], + [ + "▁protested", + -13.720239639282228 + ], + [ + "▁Archie", + -13.720271110534668 + ], + [ + "▁inhibits", + -13.720271110534668 + ], + [ + "vul", + -13.720284461975098 + ], + [ + "Spirit", + -13.720295906066896 + ], + [ + "bags", + -13.72029972076416 + ], + [ + "Function", + -13.72031593322754 + ], + [ + "Universal", + -13.720340728759766 + ], + [ + "military", + -13.720340728759766 + ], + [ + "Membership", + -13.72039031982422 + ], + [ + "amazing", + -13.720406532287598 + ], + [ + "▁Cuts", + -13.72040843963623 + ], + [ + "flavored", + -13.720458030700684 + ], + [ + "▁Zee", + -13.720466613769531 + ], + [ + "472", + -13.720510482788086 + ], + [ + "mall", + -13.720565795898438 + ], + [ + "163", + -13.720636367797852 + ], + [ + "signal", + -13.720653533935549 + ], + [ + "Been", + -13.720672607421877 + ], + [ + "▁Viewing", + -13.720680236816406 + ], + [ + "▁keepers", + -13.720681190490724 + ], + [ + "▁quirks", + -13.720695495605469 + ], + [ + "▁Reyes", + -13.720744132995604 + ], + [ + "IRS", + -13.720783233642578 + ], + [ + "▁Sab", + -13.720791816711426 + ], + [ + "Traditionally", + -13.720810890197754 + ], + [ + "XO", + -13.72083854675293 + ], + [ + "311", + -13.720877647399902 + ], + [ + "false", + -13.720942497253418 + ], + [ + "40%", + -13.720980644226074 + ], + [ + "▁airspace", + -13.721034049987791 + ], + [ + "▁kohler", + -13.721054077148438 + ], + [ + "▁McCall", + -13.72105884552002 + ], + [ + ":19", + -13.72109031677246 + ], + [ + "▁600,000", + -13.721214294433594 + ], + [ + "▁Dol", + -13.72130298614502 + ], + [ + "bob", + -13.721309661865234 + ], + [ + "670", + -13.721319198608398 + ], + [ + "▁unreal", + -13.72134017944336 + ], + [ + "uate", + -13.721358299255373 + ], + [ + "▁digress", + -13.72136402130127 + ], + [ + "▁Grimm", + -13.721385955810549 + ], + [ + "issued", + -13.721426010131836 + ], + [ + "kke", + -13.721476554870604 + ], + [ + "▁218", + -13.721492767333984 + ], + [ + "foam", + -13.721549987792969 + ], + [ + "5.2", + -13.721579551696776 + ], + [ + "▁firewalls", + -13.72162628173828 + ], + [ + "▁beetle", + -13.721644401550291 + ], + [ + "▁Sections", + -13.72169303894043 + ], + [ + "▁commandments", + -13.721710205078123 + ], + [ + "▁impressively", + -13.72178077697754 + ], + [ + "▁Mule", + -13.72185516357422 + ], + [ + "▁NCR", + -13.721879005432127 + ], + [ + "▁Trades", + -13.721918106079102 + ], + [ + "▁Mister", + -13.722015380859377 + ], + [ + "Context", + -13.722039222717283 + ], + [ + "Glen", + -13.722111701965332 + ], + [ + "uffle", + -13.722177505493164 + ], + [ + "▁97%", + -13.722264289855955 + ], + [ + "▁GMP", + -13.72232723236084 + ], + [ + "▁Seas", + -13.72232723236084 + ], + [ + "▁KK", + -13.72243595123291 + ], + [ + "▁cleanly", + -13.72243881225586 + ], + [ + "▁Zombies", + -13.722444534301758 + ], + [ + "▁fellowships", + -13.72247314453125 + ], + [ + "▁Vaughn", + -13.722552299499512 + ], + [ + "▁Poo", + -13.722600936889648 + ], + [ + "▁BFF", + -13.722612380981444 + ], + [ + "mov", + -13.722620964050291 + ], + [ + "adding", + -13.722626686096191 + ], + [ + "meister", + -13.722639083862305 + ], + [ + "pathetic", + -13.722657203674316 + ], + [ + "▁rite", + -13.722753524780272 + ], + [ + "▁carburetor", + -13.722834587097168 + ], + [ + "▁espionage", + -13.722834587097168 + ], + [ + "lte", + -13.722840309143066 + ], + [ + "▁liberating", + -13.7228422164917 + ], + [ + "▁earbuds", + -13.72284698486328 + ], + [ + "Rourke", + -13.722856521606444 + ], + [ + "▁CLUB", + -13.72286319732666 + ], + [ + "▁Friedrich", + -13.722867965698242 + ], + [ + "▁memento", + -13.722875595092772 + ], + [ + "▁sar", + -13.722888946533203 + ], + [ + "premises", + -13.72292709350586 + ], + [ + "▁Counselling", + -13.722935676574709 + ], + [ + "▁Kennel", + -13.72295379638672 + ], + [ + "represented", + -13.722963333129885 + ], + [ + "▁Wag", + -13.722996711730955 + ], + [ + "atum", + -13.723002433776855 + ], + [ + "tay", + -13.723072052001951 + ], + [ + "▁sowing", + -13.723164558410645 + ], + [ + "▁Californian", + -13.723185539245604 + ], + [ + "▁Josephine", + -13.72323989868164 + ], + [ + "▁cosmos", + -13.723283767700195 + ], + [ + "▁Trains", + -13.723358154296877 + ], + [ + "▁Bells", + -13.723400115966797 + ], + [ + "Dean", + -13.723423957824709 + ], + [ + "Political", + -13.723543167114258 + ], + [ + "tem", + -13.723544120788574 + ], + [ + "Sitting", + -13.723566055297852 + ], + [ + "▁focussing", + -13.723572731018066 + ], + [ + "▁ATC", + -13.7235746383667 + ], + [ + "Adjust", + -13.723583221435549 + ], + [ + "ulf", + -13.723589897155762 + ], + [ + "44.", + -13.723590850830078 + ], + [ + "23)", + -13.723672866821287 + ], + [ + "ured", + -13.723702430725098 + ], + [ + "▁rotations", + -13.723716735839844 + ], + [ + "▁Macron", + -13.723725318908691 + ], + [ + "▁transistor", + -13.7237548828125 + ], + [ + "▁herbicide", + -13.72376537322998 + ], + [ + "▁odour", + -13.723769187927246 + ], + [ + "▁pel", + -13.723807334899902 + ], + [ + "024", + -13.72380828857422 + ], + [ + "▁determinants", + -13.723810195922852 + ], + [ + "BIT", + -13.723884582519531 + ], + [ + "▁Tulip", + -13.723907470703123 + ], + [ + "hma", + -13.723918914794922 + ], + [ + "▁HMO", + -13.723936080932615 + ], + [ + "▁rejuvenated", + -13.72394847869873 + ], + [ + "cache", + -13.723958969116213 + ], + [ + "743", + -13.72396469116211 + ], + [ + "▁Tet", + -13.724039077758787 + ], + [ + "Quote", + -13.724138259887695 + ], + [ + "▁relic", + -13.724148750305176 + ], + [ + "-58", + -13.724149703979492 + ], + [ + "▁democrat", + -13.724156379699709 + ], + [ + "exam", + -13.724181175231934 + ], + [ + "▁26%", + -13.724218368530272 + ], + [ + "▁heats", + -13.724218368530272 + ], + [ + "▁189", + -13.724224090576172 + ], + [ + "hitting", + -13.72429370880127 + ], + [ + "▁Cari", + -13.724342346191406 + ], + [ + "▁Toxic", + -13.724401473999023 + ], + [ + "▁tilted", + -13.724474906921388 + ], + [ + "▁Skye", + -13.724529266357422 + ], + [ + "eastern", + -13.724627494812012 + ], + [ + "kus", + -13.724664688110352 + ], + [ + "▁Gong", + -13.724664688110352 + ], + [ + "▁Meng", + -13.724685668945312 + ], + [ + "nton", + -13.724713325500488 + ], + [ + "Trail", + -13.724724769592283 + ], + [ + "Rip", + -13.724769592285156 + ], + [ + "▁Fallon", + -13.72479248046875 + ], + [ + "▁Enlightenment", + -13.724833488464355 + ], + [ + "▁Troll", + -13.72484302520752 + ], + [ + "manage", + -13.724873542785645 + ], + [ + "hausen", + -13.724946022033691 + ], + [ + "▁WTO", + -13.724977493286133 + ], + [ + "rut", + -13.725017547607422 + ], + [ + "▁dilemmas", + -13.725057601928713 + ], + [ + "Strengthen", + -13.725154876708984 + ], + [ + "▁Slovak", + -13.72515869140625 + ], + [ + "acc", + -13.72520351409912 + ], + [ + "▁overturn", + -13.725298881530762 + ], + [ + "▁Grilled", + -13.725316047668455 + ], + [ + "▁scraper", + -13.725377082824709 + ], + [ + "▁HH", + -13.725380897521973 + ], + [ + "▁jurors", + -13.725399017333984 + ], + [ + "Andrea", + -13.725464820861816 + ], + [ + "kita", + -13.725479125976562 + ], + [ + "▁COST", + -13.72559928894043 + ], + [ + "lz", + -13.725664138793944 + ], + [ + "▁blister", + -13.725701332092283 + ], + [ + "▁Congregation", + -13.725726127624512 + ], + [ + "▁flushed", + -13.72572898864746 + ], + [ + "▁Hung", + -13.725794792175291 + ], + [ + "HOL", + -13.72590446472168 + ], + [ + "▁labyrinth", + -13.725937843322754 + ], + [ + "BON", + -13.725942611694336 + ], + [ + "▁Montenegro", + -13.725959777832031 + ], + [ + "▁Pueblo", + -13.725959777832031 + ], + [ + "▁Saskatoon", + -13.725959777832031 + ], + [ + "▁Vauxhall", + -13.725959777832031 + ], + [ + "▁clumsy", + -13.725959777832031 + ], + [ + "▁consummate", + -13.725959777832031 + ], + [ + "▁pendulum", + -13.725959777832031 + ], + [ + "▁vernacular", + -13.725959777832031 + ], + [ + "▁telephony", + -13.725961685180664 + ], + [ + "▁chiefly", + -13.725967407226562 + ], + [ + "▁rites", + -13.725976943969728 + ], + [ + "▁legality", + -13.725979804992676 + ], + [ + "▁Saf", + -13.725985527038574 + ], + [ + "▁gastronomic", + -13.726024627685549 + ], + [ + "STR", + -13.726030349731444 + ], + [ + "Launch", + -13.726045608520508 + ], + [ + "▁Cullen", + -13.72606372833252 + ], + [ + "liners", + -13.726082801818848 + ], + [ + "▁husk", + -13.72609043121338 + ], + [ + "equal", + -13.726151466369627 + ], + [ + "▁tornadoes", + -13.72624969482422 + ], + [ + "RIN", + -13.726253509521484 + ], + [ + "alism", + -13.726405143737791 + ], + [ + "▁calibre", + -13.72645664215088 + ], + [ + "▁Organized", + -13.726483345031738 + ], + [ + "▁LW", + -13.726518630981444 + ], + [ + "▁unifying", + -13.726543426513672 + ], + [ + "▁Eh", + -13.72657871246338 + ], + [ + "URA", + -13.72663116455078 + ], + [ + "▁pours", + -13.726669311523438 + ], + [ + "▁prong", + -13.726670265197754 + ], + [ + "awesome", + -13.726765632629396 + ], + [ + "Attention", + -13.726806640625 + ], + [ + "Employers", + -13.726808547973633 + ], + [ + "Range", + -13.72683048248291 + ], + [ + "▁nonstick", + -13.726887702941896 + ], + [ + "737", + -13.726899147033691 + ], + [ + "▁pigeon", + -13.72691249847412 + ], + [ + "Prevent", + -13.726975440979004 + ], + [ + "amar", + -13.726998329162598 + ], + [ + "▁Farr", + -13.727057456970217 + ], + [ + "Des", + -13.727096557617188 + ], + [ + "▁prism", + -13.727120399475098 + ], + [ + "▁Russo", + -13.727195739746094 + ], + [ + "fitted", + -13.727203369140623 + ], + [ + "▁Installations", + -13.727261543273926 + ], + [ + "lone", + -13.727302551269531 + ], + [ + "▁Marker", + -13.72744083404541 + ], + [ + "direction", + -13.727474212646484 + ], + [ + "▁Sung", + -13.727517127990724 + ], + [ + "ologie", + -13.727529525756836 + ], + [ + "earn", + -13.727652549743652 + ], + [ + "Terms", + -13.72767162322998 + ], + [ + "Mil", + -13.72776222229004 + ], + [ + "▁guesses", + -13.727856636047363 + ], + [ + "bbe", + -13.727932929992676 + ], + [ + "liberal", + -13.72799587249756 + ], + [ + "mira", + -13.728074073791504 + ], + [ + "▁freebie", + -13.72809600830078 + ], + [ + "▁Sofas", + -13.728222846984863 + ], + [ + "▁AMG", + -13.728243827819824 + ], + [ + "▁Jian", + -13.728289604187012 + ], + [ + "▁werden", + -13.728315353393556 + ], + [ + "▁BEA", + -13.72831916809082 + ], + [ + "▁Lob", + -13.72832489013672 + ], + [ + "▁1945.", + -13.728328704833984 + ], + [ + "▁sedans", + -13.728336334228516 + ], + [ + "thru", + -13.728349685668944 + ], + [ + "abu", + -13.728357315063477 + ], + [ + "▁stub", + -13.728371620178224 + ], + [ + "▁Fang", + -13.728418350219728 + ], + [ + "▁legitimately", + -13.72842788696289 + ], + [ + "▁retardant", + -13.728485107421877 + ], + [ + "45.", + -13.72866153717041 + ], + [ + "▁Solve", + -13.72867202758789 + ], + [ + "467", + -13.728697776794434 + ], + [ + "▁sever", + -13.72870922088623 + ], + [ + "0001", + -13.728742599487305 + ], + [ + "▁ashore", + -13.728751182556152 + ], + [ + "▁8.4", + -13.72876262664795 + ], + [ + "▁30-40", + -13.728766441345217 + ], + [ + "eiro", + -13.728827476501465 + ], + [ + "flo", + -13.728883743286133 + ], + [ + "▁4500", + -13.728959083557127 + ], + [ + "▁Sca", + -13.728965759277344 + ], + [ + "rge", + -13.72899341583252 + ], + [ + "xing", + -13.729080200195312 + ], + [ + "▁mannequin", + -13.72909450531006 + ], + [ + "▁mezzanine", + -13.72909450531006 + ], + [ + "▁obnoxious", + -13.72909450531006 + ], + [ + "▁Mallorca", + -13.729103088378906 + ], + [ + "▁paperless", + -13.729114532470703 + ], + [ + "▁Accenture", + -13.729116439819336 + ], + [ + "▁Hilary", + -13.729143142700195 + ], + [ + "023", + -13.729150772094728 + ], + [ + "▁evasion", + -13.729150772094728 + ], + [ + "▁Chopra", + -13.729162216186523 + ], + [ + "▁babysitting", + -13.72916316986084 + ], + [ + "said", + -13.7291841506958 + ], + [ + "▁neglecting", + -13.72920036315918 + ], + [ + "▁Dela", + -13.729203224182127 + ], + [ + "▁sparingly", + -13.72922134399414 + ], + [ + "▁Mustard", + -13.729233741760254 + ], + [ + "▁prequel", + -13.729278564453123 + ], + [ + "331", + -13.729310989379885 + ], + [ + "▁ICU", + -13.72931957244873 + ], + [ + "▁Pearce", + -13.729394912719728 + ], + [ + "▁Downey", + -13.729432106018066 + ], + [ + "▁AKA", + -13.729543685913086 + ], + [ + "▁hamster", + -13.729570388793944 + ], + [ + "▁rubric", + -13.729572296142578 + ], + [ + "Pod", + -13.729589462280272 + ], + [ + "▁syncing", + -13.72961711883545 + ], + [ + "▁Bounce", + -13.729618072509766 + ], + [ + "marks", + -13.729681968688965 + ], + [ + "template", + -13.729823112487791 + ], + [ + "▁sautéed", + -13.729853630065918 + ], + [ + "▁clutches", + -13.729859352111816 + ], + [ + "▁37%", + -13.729901313781738 + ], + [ + "▁Sever", + -13.72990608215332 + ], + [ + "builders", + -13.72990894317627 + ], + [ + "Canon", + -13.72993278503418 + ], + [ + "▁Banker", + -13.729997634887695 + ], + [ + "▁stallion", + -13.73000144958496 + ], + [ + "▁fathom", + -13.730024337768556 + ], + [ + "99)", + -13.730051040649414 + ], + [ + "▁Siva", + -13.73011875152588 + ], + [ + "▁orchid", + -13.73012351989746 + ], + [ + "▁grove", + -13.730216979980469 + ], + [ + "arti", + -13.730225563049316 + ], + [ + "▁mystic", + -13.730274200439451 + ], + [ + ".5′′", + -13.73029327392578 + ], + [ + "▁Abram", + -13.73029327392578 + ], + [ + "Impact", + -13.730299949645996 + ], + [ + "▁580", + -13.730316162109377 + ], + [ + "▁coincided", + -13.730423927307127 + ], + [ + "▁naturalist", + -13.730443954467772 + ], + [ + "▁earphones", + -13.730518341064451 + ], + [ + "▁Shim", + -13.730586051940918 + ], + [ + "Bath", + -13.73061466217041 + ], + [ + "▁Conclusion", + -13.730660438537598 + ], + [ + "▁DRIVE", + -13.730671882629396 + ], + [ + "bare", + -13.730724334716797 + ], + [ + "Fu", + -13.730745315551758 + ], + [ + "576", + -13.730756759643556 + ], + [ + "wrenching", + -13.730792045593262 + ], + [ + "▁Elect", + -13.73080348968506 + ], + [ + "shark", + -13.730932235717772 + ], + [ + "▁KP", + -13.730989456176758 + ], + [ + "sir", + -13.731061935424805 + ], + [ + "urb", + -13.731114387512209 + ], + [ + "▁transcends", + -13.731120109558104 + ], + [ + "▁COS", + -13.731135368347168 + ], + [ + "▁Parcel", + -13.731167793273926 + ], + [ + "loch", + -13.731233596801758 + ], + [ + "▁Governments", + -13.731247901916504 + ], + [ + "▁Kea", + -13.731298446655272 + ], + [ + "crew", + -13.731342315673828 + ], + [ + "Leo", + -13.73139762878418 + ], + [ + "▁Stout", + -13.731398582458496 + ], + [ + "▁Wishing", + -13.731405258178713 + ], + [ + "▁electrically", + -13.73142147064209 + ], + [ + "▁Void", + -13.731433868408203 + ], + [ + "▁Nicol", + -13.731475830078123 + ], + [ + "▁Outlaw", + -13.731477737426758 + ], + [ + "▁Hav", + -13.731485366821287 + ], + [ + "▁Erika", + -13.73149871826172 + ], + [ + "iary", + -13.731512069702148 + ], + [ + "▁Norm", + -13.731557846069336 + ], + [ + "Geek", + -13.731569290161133 + ], + [ + "cient", + -13.731590270996094 + ], + [ + "handedly", + -13.731611251831056 + ], + [ + "▁reactors", + -13.731658935546877 + ], + [ + "▁Maze", + -13.731682777404783 + ], + [ + "▁erode", + -13.73179817199707 + ], + [ + "▁transports", + -13.731823921203612 + ], + [ + "▁Riva", + -13.731922149658203 + ], + [ + "▁80-", + -13.73194408416748 + ], + [ + "▁anatomical", + -13.731996536254885 + ], + [ + "▁Picks", + -13.732013702392578 + ], + [ + "▁Cue", + -13.732067108154297 + ], + [ + "487", + -13.73211669921875 + ], + [ + "▁Converse", + -13.732148170471191 + ], + [ + "filling", + -13.732215881347656 + ], + [ + "▁Lazy", + -13.732215881347656 + ], + [ + "▁Tallahassee", + -13.732239723205566 + ], + [ + "▁radiance", + -13.732239723205566 + ], + [ + "▁lasagna", + -13.732242584228516 + ], + [ + "▁pedicure", + -13.73224639892578 + ], + [ + "▁(888)", + -13.732247352600098 + ], + [ + "▁Amtrak", + -13.73225212097168 + ], + [ + "▁Greyhound", + -13.73225212097168 + ], + [ + "▁forfeiture", + -13.732258796691896 + ], + [ + "▁Cybersecurity", + -13.732260704040527 + ], + [ + "▁Remington", + -13.732281684875488 + ], + [ + "▁PowerShell", + -13.732295036315918 + ], + [ + "Mayor", + -13.732317924499512 + ], + [ + "▁unstructured", + -13.732330322265623 + ], + [ + "▁erection", + -13.732404708862305 + ], + [ + "zione", + -13.732465744018556 + ], + [ + "▁Gabba", + -13.732468605041504 + ], + [ + "mr", + -13.732501983642578 + ], + [ + "▁Stony", + -13.7326021194458 + ], + [ + "▁Stall", + -13.732659339904783 + ], + [ + "-76", + -13.732715606689451 + ], + [ + "▁morals", + -13.732717514038086 + ], + [ + "▁aneurysm", + -13.732730865478516 + ], + [ + "▁jerky", + -13.73280906677246 + ], + [ + "▁ASE", + -13.732810974121094 + ], + [ + "handle", + -13.732818603515623 + ], + [ + "▁whispered", + -13.73293685913086 + ], + [ + "▁DLL", + -13.732951164245604 + ], + [ + "▁recieve", + -13.73298168182373 + ], + [ + "▁Bootstrap", + -13.732991218566896 + ], + [ + ".'\"", + -13.733024597167969 + ], + [ + "▁Nails", + -13.733038902282717 + ], + [ + "▁situational", + -13.733060836791992 + ], + [ + "Export", + -13.733068466186523 + ], + [ + "▁kWh", + -13.733159065246582 + ], + [ + "▁Sparkle", + -13.733169555664062 + ], + [ + "Disclosure", + -13.733173370361328 + ], + [ + "▁Crossword", + -13.73317527770996 + ], + [ + "academic", + -13.73317813873291 + ], + [ + "▁ppm", + -13.733202934265137 + ], + [ + "chairs", + -13.733220100402832 + ], + [ + "identified", + -13.733223915100098 + ], + [ + "▁perennials", + -13.733229637145996 + ], + [ + "▁1894", + -13.733232498168944 + ], + [ + "▁japan", + -13.733254432678224 + ], + [ + "Instagram", + -13.733261108398438 + ], + [ + "▁grievance", + -13.733281135559082 + ], + [ + "▁wafer", + -13.733295440673828 + ], + [ + "▁prick", + -13.733331680297852 + ], + [ + "▁waveform", + -13.733396530151367 + ], + [ + "▁Sadie", + -13.733407974243164 + ], + [ + "leman", + -13.733487129211426 + ], + [ + "▁kHz", + -13.733503341674805 + ], + [ + "▁gorilla", + -13.733514785766602 + ], + [ + "▁Accurate", + -13.733556747436523 + ], + [ + "▁coworker", + -13.733558654785156 + ], + [ + "▁Picking", + -13.733585357666016 + ], + [ + "json", + -13.733643531799316 + ], + [ + "▁hare", + -13.73367404937744 + ], + [ + "ssl", + -13.733683586120604 + ], + [ + "-79", + -13.733713150024414 + ], + [ + "▁Barcode", + -13.733718872070312 + ], + [ + "meeting", + -13.733746528625488 + ], + [ + "▁Supplements", + -13.73378849029541 + ], + [ + "▁kann", + -13.733840942382812 + ], + [ + "▁grantee", + -13.733863830566406 + ], + [ + "▁Peg", + -13.733881950378418 + ], + [ + "zilla", + -13.733929634094238 + ], + [ + "▁manic", + -13.7340087890625 + ], + [ + "▁checkpoints", + -13.734017372131348 + ], + [ + "▁homebuyers", + -13.73401927947998 + ], + [ + "Truck", + -13.734041213989258 + ], + [ + "gator", + -13.734145164489746 + ], + [ + "▁frontman", + -13.734148025512695 + ], + [ + "▁canvases", + -13.734160423278809 + ], + [ + "▁McDonalds", + -13.734169960021973 + ], + [ + "▁Dep", + -13.734188079833984 + ], + [ + "▁drenched", + -13.734230041503906 + ], + [ + "Were", + -13.734282493591309 + ], + [ + "▁2.0.", + -13.73430347442627 + ], + [ + "▁Impossible", + -13.734312057495115 + ], + [ + "▁reeling", + -13.73439121246338 + ], + [ + "▁pita", + -13.734448432922363 + ], + [ + "▁Kana", + -13.734455108642578 + ], + [ + "▁Courage", + -13.734469413757324 + ], + [ + "▁semantics", + -13.734533309936523 + ], + [ + "▁EJ", + -13.734597206115724 + ], + [ + "Agreed", + -13.734607696533203 + ], + [ + "▁Concierge", + -13.73463535308838 + ], + [ + "▁Tutor", + -13.734733581542969 + ], + [ + "gaz", + -13.734736442565918 + ], + [ + "▁castor", + -13.734766006469728 + ], + [ + "hash", + -13.73477268218994 + ], + [ + "▁Pere", + -13.734781265258787 + ], + [ + "▁fatally", + -13.734795570373535 + ], + [ + "▁Pisa", + -13.734796524047852 + ], + [ + "▁SIGN", + -13.73482608795166 + ], + [ + "phenyl", + -13.734906196594238 + ], + [ + "dorf", + -13.734926223754885 + ], + [ + "▁shootout", + -13.734966278076172 + ], + [ + "▁2030.", + -13.734989166259766 + ], + [ + "STE", + -13.735025405883787 + ], + [ + "▁pines", + -13.735039710998535 + ], + [ + "▁Advertisement", + -13.735047340393066 + ], + [ + "BJ", + -13.7350492477417 + ], + [ + "uar", + -13.73508071899414 + ], + [ + "Sri", + -13.735090255737305 + ], + [ + "▁LAS", + -13.735103607177734 + ], + [ + "▁Chas", + -13.735158920288086 + ], + [ + "LEX", + -13.735218048095703 + ], + [ + "▁forested", + -13.735222816467283 + ], + [ + "oxide", + -13.73523235321045 + ], + [ + "▁Anglia", + -13.735249519348145 + ], + [ + "338", + -13.735295295715332 + ], + [ + "▁bien", + -13.735321998596191 + ], + [ + "▁Guelph", + -13.735394477844238 + ], + [ + "▁PIXMA", + -13.735394477844238 + ], + [ + "▁Utrecht", + -13.735394477844238 + ], + [ + "▁alimony", + -13.735394477844238 + ], + [ + "▁sizzling", + -13.735394477844238 + ], + [ + "▁Dorchester", + -13.735397338867188 + ], + [ + "▁Hitchcock", + -13.735397338867188 + ], + [ + "▁SXSW", + -13.73539924621582 + ], + [ + "▁pictorial", + -13.73539924621582 + ], + [ + "▁loaves", + -13.735400199890137 + ], + [ + "▁metaphysical", + -13.7354097366333 + ], + [ + "▁Crossroads", + -13.73541259765625 + ], + [ + "▁Saviour", + -13.73541259765625 + ], + [ + "▁Sera", + -13.735414505004885 + ], + [ + "▁Rooney", + -13.7354154586792 + ], + [ + "▁ratified", + -13.735419273376465 + ], + [ + "379", + -13.735426902770996 + ], + [ + "▁Schwarz", + -13.735427856445312 + ], + [ + "▁Sharpe", + -13.735435485839844 + ], + [ + "▁snacking", + -13.73543643951416 + ], + [ + "▁mouthwatering", + -13.73545265197754 + ], + [ + "▁astonished", + -13.73545742034912 + ], + [ + "▁criticizing", + -13.735464096069336 + ], + [ + "▁whatsapp", + -13.735503196716309 + ], + [ + "▁Fabian", + -13.735532760620115 + ], + [ + "▁playroom", + -13.735559463500977 + ], + [ + "DIC", + -13.73558235168457 + ], + [ + "▁painstakingly", + -13.735586166381836 + ], + [ + "▁Biol", + -13.73567008972168 + ], + [ + "▁Recall", + -13.735697746276855 + ], + [ + "▁commanders", + -13.735750198364258 + ], + [ + "562", + -13.735830307006836 + ], + [ + "▁grinders", + -13.73586082458496 + ], + [ + "▁0.25", + -13.735878944396973 + ], + [ + "1994", + -13.735915184020996 + ], + [ + "▁printout", + -13.73595905303955 + ], + [ + "▁cancelling", + -13.73597240447998 + ], + [ + "▁FH", + -13.73598861694336 + ], + [ + "Deb", + -13.736007690429688 + ], + [ + "904", + -13.736098289489746 + ], + [ + "▁EMA", + -13.736122131347656 + ], + [ + "eny", + -13.736161231994627 + ], + [ + "Normal", + -13.736190795898438 + ], + [ + "228", + -13.736204147338867 + ], + [ + "▁recoil", + -13.736224174499512 + ], + [ + "Importer", + -13.736249923706056 + ], + [ + "uted", + -13.736255645751951 + ], + [ + "▁cheques", + -13.736297607421877 + ], + [ + "▁proofs", + -13.736312866210938 + ], + [ + "▁assassin", + -13.736336708068848 + ], + [ + "▁furthering", + -13.736368179321287 + ], + [ + "▁Maven", + -13.73642635345459 + ], + [ + "Amanda", + -13.73645305633545 + ], + [ + "▁ailment", + -13.736492156982422 + ], + [ + "▁sqm", + -13.736494064331056 + ], + [ + "branch", + -13.736580848693848 + ], + [ + "▁crumbled", + -13.736586570739746 + ], + [ + "pace", + -13.736676216125488 + ], + [ + "ebe", + -13.736735343933104 + ], + [ + "▁toppers", + -13.736770629882812 + ], + [ + "396", + -13.73677921295166 + ], + [ + "▁tracts", + -13.736791610717772 + ], + [ + "▁Mane", + -13.73681354522705 + ], + [ + "▁Fossil", + -13.736844062805176 + ], + [ + "itta", + -13.736984252929688 + ], + [ + "9.95", + -13.73699188232422 + ], + [ + "colonial", + -13.737058639526367 + ], + [ + "▁michael", + -13.737133026123049 + ], + [ + "mainly", + -13.737133979797363 + ], + [ + "▁Shall", + -13.737196922302246 + ], + [ + "seller", + -13.737297058105469 + ], + [ + "▁Lakeside", + -13.737306594848633 + ], + [ + "▁Chun", + -13.737312316894531 + ], + [ + "tuning", + -13.7374267578125 + ], + [ + "161", + -13.737470626831056 + ], + [ + "lington", + -13.737499237060549 + ], + [ + "prints", + -13.737508773803713 + ], + [ + "oooo", + -13.737584114074709 + ], + [ + "andra", + -13.73769474029541 + ], + [ + "Stars", + -13.737699508666992 + ], + [ + "▁Nd", + -13.737701416015623 + ], + [ + "▁sab", + -13.737719535827637 + ], + [ + "▁Staffing", + -13.737818717956545 + ], + [ + "Hor", + -13.737892150878906 + ], + [ + "▁scribe", + -13.737892150878906 + ], + [ + "▁watchers", + -13.737987518310549 + ], + [ + "▁Nets", + -13.737991333007812 + ], + [ + "▁Redemption", + -13.73803424835205 + ], + [ + "012", + -13.73808765411377 + ], + [ + "▁Wilhelm", + -13.73814868927002 + ], + [ + "▁romp", + -13.738201141357422 + ], + [ + "▁770", + -13.738221168518066 + ], + [ + "▁£200", + -13.738225936889648 + ], + [ + "▁MSRP", + -13.738248825073242 + ], + [ + "▁Trojans", + -13.738252639770508 + ], + [ + "▁numbering", + -13.738325119018556 + ], + [ + "▁Rit", + -13.738401412963867 + ], + [ + "-250", + -13.738428115844728 + ], + [ + "▁Embrace", + -13.73849105834961 + ], + [ + "Flip", + -13.738507270812988 + ], + [ + "▁homosexual", + -13.738524436950684 + ], + [ + "▁Oli", + -13.73854923248291 + ], + [ + "▁Aegean", + -13.738558769226074 + ], + [ + "▁Coliseum", + -13.738558769226074 + ], + [ + "▁ENERGY", + -13.738558769226074 + ], + [ + "▁badminton", + -13.738558769226074 + ], + [ + "▁horrified", + -13.738558769226074 + ], + [ + "▁vaginal", + -13.738558769226074 + ], + [ + "▁Physiology", + -13.73855972290039 + ], + [ + "▁regatta", + -13.738563537597656 + ], + [ + "▁Marquette", + -13.738581657409668 + ], + [ + "▁religiously", + -13.738592147827148 + ], + [ + "▁OpenStack", + -13.738622665405272 + ], + [ + "▁hallmarks", + -13.73864459991455 + ], + [ + "▁lingerie", + -13.738664627075195 + ], + [ + "▁Rachael", + -13.738665580749512 + ], + [ + "/2010", + -13.738697052001951 + ], + [ + "▁eras", + -13.738707542419434 + ], + [ + "▁Sys", + -13.738758087158203 + ], + [ + "▁MRT", + -13.738842010498049 + ], + [ + "▁Mig", + -13.73892307281494 + ], + [ + "▁enforceable", + -13.739008903503418 + ], + [ + "limit", + -13.739025115966797 + ], + [ + "▁Reducing", + -13.739026069641112 + ], + [ + "▁Suede", + -13.73905086517334 + ], + [ + "▁awoke", + -13.739058494567873 + ], + [ + "▁geeks", + -13.739089012145996 + ], + [ + "ghi", + -13.739168167114258 + ], + [ + "▁harms", + -13.739168167114258 + ], + [ + "▁teleport", + -13.739224433898926 + ], + [ + "elen", + -13.7392578125 + ], + [ + "▁Royalty", + -13.7392578125 + ], + [ + "▁Anders", + -13.739455223083496 + ], + [ + "970", + -13.739580154418944 + ], + [ + "magazine", + -13.739666938781738 + ], + [ + "Thousands", + -13.73967456817627 + ], + [ + "appear", + -13.739676475524902 + ], + [ + "Cambridge", + -13.739702224731444 + ], + [ + "▁excavations", + -13.739728927612305 + ], + [ + "▁Fighters", + -13.739761352539062 + ], + [ + "▁stun", + -13.739776611328123 + ], + [ + "wrong", + -13.73978042602539 + ], + [ + "Floor", + -13.7399320602417 + ], + [ + "residential", + -13.7399320602417 + ], + [ + "▁Teak", + -13.739940643310549 + ], + [ + "determined", + -13.73995876312256 + ], + [ + "354", + -13.739959716796877 + ], + [ + "▁napkin", + -13.739974975585938 + ], + [ + "▁ul", + -13.74000072479248 + ], + [ + "▁boomers", + -13.740069389343262 + ], + [ + "▁Dina", + -13.740117073059082 + ], + [ + "▁Proctor", + -13.740233421325684 + ], + [ + "arna", + -13.740276336669922 + ], + [ + "▁awfully", + -13.74030876159668 + ], + [ + "buzz", + -13.740631103515623 + ], + [ + "aden", + -13.740643501281738 + ], + [ + "vailing", + -13.740645408630373 + ], + [ + "▁tph", + -13.74065399169922 + ], + [ + "▁Upcoming", + -13.740817070007324 + ], + [ + "▁Rankings", + -13.740891456604004 + ], + [ + "Spending", + -13.74097728729248 + ], + [ + "▁consumables", + -13.740978240966797 + ], + [ + "▁brainchild", + -13.741056442260742 + ], + [ + "▁littered", + -13.741082191467283 + ], + [ + "▁nuclei", + -13.74109935760498 + ], + [ + "alan", + -13.741175651550291 + ], + [ + "▁appraisals", + -13.741287231445312 + ], + [ + "relli", + -13.74130630493164 + ], + [ + "402", + -13.741398811340332 + ], + [ + "1600", + -13.74146556854248 + ], + [ + "▁strikingly", + -13.74147605895996 + ], + [ + "▁Dissertation", + -13.741484642028809 + ], + [ + "Hundreds", + -13.741552352905272 + ], + [ + "▁wir", + -13.7415771484375 + ], + [ + "pres", + -13.74163818359375 + ], + [ + "▁linguistics", + -13.741643905639648 + ], + [ + "Exp", + -13.741655349731444 + ], + [ + "/50", + -13.741692543029783 + ], + [ + "▁Extend", + -13.74169635772705 + ], + [ + "▁Pattaya", + -13.74173355102539 + ], + [ + "▁centrifugal", + -13.74173355102539 + ], + [ + "▁emptiness", + -13.74173355102539 + ], + [ + "▁exemplifies", + -13.74173355102539 + ], + [ + "▁treacherous", + -13.74173355102539 + ], + [ + "▁Calvary", + -13.741739273071287 + ], + [ + "▁granny", + -13.741750717163086 + ], + [ + "▁morphology", + -13.741750717163086 + ], + [ + "▁Houghton", + -13.741768836975098 + ], + [ + "▁99.9%", + -13.741790771484377 + ], + [ + "▁radiology", + -13.741817474365234 + ], + [ + "▁ASIC", + -13.74183177947998 + ], + [ + "810", + -13.741918563842772 + ], + [ + "▁weeding", + -13.741929054260254 + ], + [ + "▁BEACH", + -13.741954803466797 + ], + [ + "SOFT", + -13.741969108581545 + ], + [ + "Tex", + -13.742032051086426 + ], + [ + "▁EMEA", + -13.74210262298584 + ], + [ + "▁Amit", + -13.742130279541016 + ], + [ + "▁Fundamental", + -13.742143630981444 + ], + [ + "▁unison", + -13.742164611816406 + ], + [ + "▁Calls", + -13.742167472839355 + ], + [ + "▁moulds", + -13.742207527160645 + ], + [ + "printing", + -13.74222183227539 + ], + [ + "▁Seg", + -13.742368698120115 + ], + [ + "▁spies", + -13.742375373840332 + ], + [ + "nuts", + -13.742414474487305 + ], + [ + "192", + -13.742451667785645 + ], + [ + "▁Processed", + -13.742465019226074 + ], + [ + "▁Alton", + -13.742501258850098 + ], + [ + "▁transplants", + -13.74263858795166 + ], + [ + "▁Whi", + -13.742742538452148 + ], + [ + "▁10-20", + -13.742751121520996 + ], + [ + "541", + -13.742777824401855 + ], + [ + "nke", + -13.742815017700195 + ], + [ + "tul", + -13.742840766906738 + ], + [ + "deo", + -13.742874145507812 + ], + [ + "Psalm", + -13.742897987365724 + ], + [ + "7-9", + -13.742902755737305 + ], + [ + "mirror", + -13.742910385131836 + ], + [ + "Encourage", + -13.742916107177734 + ], + [ + "recognized", + -13.742927551269531 + ], + [ + "▁Bits", + -13.742993354797363 + ], + [ + "▁1892", + -13.743000030517578 + ], + [ + "pai", + -13.743029594421388 + ], + [ + "▁1965,", + -13.743036270141602 + ], + [ + "pli", + -13.743086814880373 + ], + [ + "Essentially", + -13.743151664733888 + ], + [ + "_1", + -13.743227005004885 + ], + [ + "▁Situation", + -13.74325466156006 + ], + [ + "▁GX", + -13.743301391601562 + ], + [ + "PHP", + -13.743345260620115 + ], + [ + "▁adjective", + -13.743366241455078 + ], + [ + "Ger", + -13.74348258972168 + ], + [ + "▁Dies", + -13.743504524230955 + ], + [ + "Started", + -13.743542671203612 + ], + [ + "▁Aly", + -13.743597030639648 + ], + [ + "▁Luo", + -13.743599891662598 + ], + [ + "projects", + -13.743690490722656 + ], + [ + "▁wrappers", + -13.7437162399292 + ], + [ + "▁Mull", + -13.743809700012209 + ], + [ + "foundation", + -13.743839263916016 + ], + [ + "▁BSE", + -13.743891716003418 + ], + [ + "▁zenith", + -13.74392032623291 + ], + [ + "▁Ré", + -13.744056701660156 + ], + [ + "rane", + -13.744059562683104 + ], + [ + "▁Bison", + -13.744180679321287 + ], + [ + "Edited", + -13.744240760803224 + ], + [ + "▁insults", + -13.744255065917969 + ], + [ + "8.00", + -13.744256019592283 + ], + [ + "▁19%", + -13.74436092376709 + ], + [ + "cian", + -13.744379043579102 + ], + [ + "▁Vase", + -13.744389533996582 + ], + [ + "▁drool", + -13.744415283203123 + ], + [ + "▁Lars", + -13.744421005249023 + ], + [ + "▁RIP", + -13.74443531036377 + ], + [ + "▁Garr", + -13.74444580078125 + ], + [ + "▁fructose", + -13.744457244873049 + ], + [ + "Files", + -13.744470596313477 + ], + [ + "▁TOEFL", + -13.744548797607422 + ], + [ + "▁gunman", + -13.744551658630373 + ], + [ + "▁660", + -13.744563102722168 + ], + [ + "▁GPL", + -13.744589805603027 + ], + [ + "918", + -13.74460792541504 + ], + [ + "FACT", + -13.74463939666748 + ], + [ + "▁Tx", + -13.744698524475098 + ], + [ + "▁Attendance", + -13.74471378326416 + ], + [ + "Forget", + -13.74471950531006 + ], + [ + "▁snowflakes", + -13.74474048614502 + ], + [ + "oom", + -13.744741439819336 + ], + [ + "Hmmm", + -13.744750022888184 + ], + [ + "▁ceases", + -13.744769096374512 + ], + [ + "▁Antwerp", + -13.744917869567873 + ], + [ + "▁Mechanism", + -13.744917869567873 + ], + [ + "▁Montréal", + -13.744917869567873 + ], + [ + "▁arrogance", + -13.744917869567873 + ], + [ + "▁Walgreens", + -13.744918823242188 + ], + [ + "▁Capricorn", + -13.744921684265137 + ], + [ + "▁murky", + -13.744921684265137 + ], + [ + "▁Citadel", + -13.744924545288086 + ], + [ + "▁beeswax", + -13.744927406311035 + ], + [ + "▁Spielberg", + -13.744929313659668 + ], + [ + "▁Sephora", + -13.744930267333984 + ], + [ + "▁stabbing", + -13.744930267333984 + ], + [ + "▁slapped", + -13.74493408203125 + ], + [ + "▁surging", + -13.744946479797363 + ], + [ + "▁depressive", + -13.744973182678224 + ], + [ + "▁Brasil", + -13.74499797821045 + ], + [ + "▁Acne", + -13.74504280090332 + ], + [ + "▁Encore", + -13.745051383972168 + ], + [ + "▁ZERO", + -13.745088577270508 + ], + [ + "Las", + -13.745105743408203 + ], + [ + "▁unprofessional", + -13.745109558105469 + ], + [ + "glow", + -13.745123863220217 + ], + [ + "▁Bridgeport", + -13.745131492614746 + ], + [ + "▁shrinkage", + -13.745144844055176 + ], + [ + "dates", + -13.74515438079834 + ], + [ + "▁orchestration", + -13.745172500610352 + ], + [ + "▁Scores", + -13.745192527770996 + ], + [ + "▁Clown", + -13.745201110839844 + ], + [ + "3.8", + -13.745203971862791 + ], + [ + "▁prepped", + -13.745209693908691 + ], + [ + "Interest", + -13.745267868041992 + ], + [ + "▁Viktor", + -13.745298385620115 + ], + [ + "616", + -13.745327949523926 + ], + [ + "Semitic", + -13.74543285369873 + ], + [ + "cey", + -13.745457649230955 + ], + [ + "▁Spo", + -13.745463371276855 + ], + [ + "▁Dried", + -13.745468139648438 + ], + [ + "▁recount", + -13.745492935180664 + ], + [ + "guin", + -13.745495796203612 + ], + [ + "cogni", + -13.745562553405762 + ], + [ + "▁Alter", + -13.745615005493164 + ], + [ + "▁involuntary", + -13.745644569396973 + ], + [ + "611", + -13.745668411254885 + ], + [ + "▁grub", + -13.745676040649414 + ], + [ + "▁Manuscript", + -13.745684623718262 + ], + [ + "Advance", + -13.745699882507324 + ], + [ + "Thorough", + -13.745701789855955 + ], + [ + "▁Bridget", + -13.745719909667969 + ], + [ + "▁Hermann", + -13.745734214782717 + ], + [ + "Accept", + -13.745742797851562 + ], + [ + "▁Stellar", + -13.745745658874512 + ], + [ + "▁reachable", + -13.745777130126951 + ], + [ + "anny", + -13.7457914352417 + ], + [ + "▁Loren", + -13.745830535888672 + ], + [ + "610", + -13.745944023132324 + ], + [ + "▁polled", + -13.746052742004396 + ], + [ + "knowing", + -13.746066093444824 + ], + [ + "agen", + -13.746068954467772 + ], + [ + "▁wrecked", + -13.746091842651367 + ], + [ + "Brazil", + -13.746146202087402 + ], + [ + "▁ESC", + -13.74616527557373 + ], + [ + "▁smelly", + -13.74616813659668 + ], + [ + "▁handout", + -13.746171951293944 + ], + [ + "▁Bender", + -13.74620532989502 + ], + [ + "festival", + -13.746217727661133 + ], + [ + "ippy", + -13.746217727661133 + ], + [ + "▁CET", + -13.746235847473145 + ], + [ + "▁(1).", + -13.746251106262209 + ], + [ + "Wilson", + -13.746260643005373 + ], + [ + "Birth", + -13.746281623840332 + ], + [ + "▁refreshment", + -13.74630069732666 + ], + [ + "▁multiplying", + -13.746307373046877 + ], + [ + "468", + -13.746317863464355 + ], + [ + "▁coasts", + -13.746338844299316 + ], + [ + "▁MMO", + -13.746415138244627 + ], + [ + "highest", + -13.746471405029297 + ], + [ + "posted", + -13.746472358703612 + ], + [ + "Portable", + -13.746478080749512 + ], + [ + "Calling", + -13.746479988098145 + ], + [ + "cano", + -13.746520042419434 + ], + [ + "1993", + -13.746535301208496 + ], + [ + "matched", + -13.74655532836914 + ], + [ + "▁gurus", + -13.746567726135254 + ], + [ + "▁FIA", + -13.74658489227295 + ], + [ + "▁kingdoms", + -13.746634483337402 + ], + [ + "015", + -13.74673080444336 + ], + [ + "▁evangelist", + -13.74674129486084 + ], + [ + "▁reproductions", + -13.746745109558104 + ], + [ + "▁Molding", + -13.74676513671875 + ], + [ + "407", + -13.746856689453123 + ], + [ + "pair", + -13.746901512145996 + ], + [ + "Aqua", + -13.746935844421388 + ], + [ + "▁GPUs", + -13.746953010559082 + ], + [ + "▁Analytical", + -13.747082710266112 + ], + [ + "▁Urgent", + -13.747260093688965 + ], + [ + "▁6.8", + -13.747302055358888 + ], + [ + "Kid", + -13.747310638427734 + ], + [ + "▁Hmm", + -13.747386932373049 + ], + [ + "▁Miner", + -13.747485160827637 + ], + [ + "▁cancerous", + -13.747526168823242 + ], + [ + "▁nightclubs", + -13.74755859375 + ], + [ + "▁Brow", + -13.74757957458496 + ], + [ + "rze", + -13.747608184814451 + ], + [ + "mica", + -13.74770450592041 + ], + [ + "▁Barth", + -13.747757911682127 + ], + [ + "▁SAE", + -13.74779224395752 + ], + [ + "▁insulate", + -13.747803688049316 + ], + [ + "▁Queue", + -13.747867584228516 + ], + [ + "▁bounded", + -13.747910499572754 + ], + [ + "▁administers", + -13.74795150756836 + ], + [ + "nique", + -13.748016357421877 + ], + [ + "SPECT", + -13.7481050491333 + ], + [ + "▁Hercules", + -13.748112678527832 + ], + [ + "▁Rohingya", + -13.748112678527832 + ], + [ + "▁horticulture", + -13.748112678527832 + ], + [ + "▁nocturnal", + -13.748112678527832 + ], + [ + "▁optometrist", + -13.748112678527832 + ], + [ + "▁tuxedo", + -13.748112678527832 + ], + [ + "▁resonant", + -13.748113632202148 + ], + [ + "▁Merritt", + -13.748123168945312 + ], + [ + "▁Peabody", + -13.748126983642578 + ], + [ + "▁Durango", + -13.748149871826172 + ], + [ + "▁Ernie", + -13.748160362243652 + ], + [ + "▁mRNA", + -13.748207092285156 + ], + [ + "Tam", + -13.748298645019531 + ], + [ + "▁Shown", + -13.748330116271973 + ], + [ + "▁decoding", + -13.748332023620604 + ], + [ + "598", + -13.74834442138672 + ], + [ + "Var", + -13.74835968017578 + ], + [ + "▁excavated", + -13.748361587524414 + ], + [ + "▁taping", + -13.748371124267578 + ], + [ + "▁Rea", + -13.748406410217283 + ], + [ + "▁2017-2018", + -13.748414993286133 + ], + [ + "▁uncontrolled", + -13.748419761657717 + ], + [ + "▁Sonata", + -13.748448371887209 + ], + [ + "▁harassed", + -13.748477935791016 + ], + [ + "▁1876", + -13.74849796295166 + ], + [ + "▁salvaged", + -13.74850082397461 + ], + [ + "▁Colby", + -13.748533248901367 + ], + [ + "▁Dalai", + -13.748564720153809 + ], + [ + "▁germany", + -13.748590469360352 + ], + [ + "▁Sigh", + -13.74867820739746 + ], + [ + "udging", + -13.748713493347168 + ], + [ + "▁CLEAR", + -13.748785972595217 + ], + [ + "ican", + -13.74879550933838 + ], + [ + "382", + -13.74880313873291 + ], + [ + "▁Uptown", + -13.748808860778809 + ], + [ + "▁arctic", + -13.748854637145996 + ], + [ + "▁Solaris", + -13.748934745788574 + ], + [ + "meier", + -13.74893569946289 + ], + [ + "▁DBA", + -13.749003410339355 + ], + [ + "▁scrubs", + -13.749011993408203 + ], + [ + "nza", + -13.749021530151367 + ], + [ + "▁pooch", + -13.7490816116333 + ], + [ + "KEY", + -13.74919605255127 + ], + [ + "nian", + -13.749208450317385 + ], + [ + "▁311", + -13.74923038482666 + ], + [ + "mari", + -13.749252319335938 + ], + [ + "▁replicas", + -13.749303817749023 + ], + [ + "▁instalment", + -13.749349594116213 + ], + [ + "imper", + -13.749350547790527 + ], + [ + "▁entrant", + -13.749369621276855 + ], + [ + "▁1967.", + -13.749451637268066 + ], + [ + "fifth", + -13.74948024749756 + ], + [ + "479", + -13.749531745910645 + ], + [ + "faith", + -13.749563217163086 + ], + [ + "sig", + -13.749568939208984 + ], + [ + "Valley", + -13.749582290649414 + ], + [ + "wari", + -13.749600410461426 + ], + [ + "▁Applicant", + -13.749624252319336 + ], + [ + "▁disgust", + -13.749631881713867 + ], + [ + "▁commune", + -13.749733924865724 + ], + [ + "▁authoritarian", + -13.74977970123291 + ], + [ + "mineral", + -13.74989414215088 + ], + [ + "▁Scholastic", + -13.749897003173828 + ], + [ + "▁Tah", + -13.74990177154541 + ], + [ + "▁1871", + -13.750011444091797 + ], + [ + "Manual", + -13.75002384185791 + ], + [ + "▁hier", + -13.750039100646973 + ], + [ + "▁inversion", + -13.750039100646973 + ], + [ + "compete", + -13.750113487243652 + ], + [ + "▁cog", + -13.750120162963867 + ], + [ + "VIP", + -13.750182151794434 + ], + [ + "▁locomotives", + -13.750277519226074 + ], + [ + "▁orchids", + -13.750296592712402 + ], + [ + "▁Spinal", + -13.750311851501465 + ], + [ + "EMA", + -13.750314712524414 + ], + [ + "Progress", + -13.750356674194336 + ], + [ + "Pictures", + -13.75043487548828 + ], + [ + "▁Indicator", + -13.750473022460938 + ], + [ + "flies", + -13.750473976135254 + ], + [ + "▁Energ", + -13.750503540039062 + ], + [ + "▁Byrd", + -13.750523567199709 + ], + [ + "▁Verify", + -13.750636100769045 + ], + [ + "mori", + -13.750764846801758 + ], + [ + "▁Markle", + -13.750767707824709 + ], + [ + "mash", + -13.750797271728516 + ], + [ + "oles", + -13.7508544921875 + ], + [ + "▁nineteen", + -13.750981330871582 + ], + [ + "856", + -13.751116752624512 + ], + [ + "▁Boro", + -13.751193046569824 + ], + [ + "▁sporadic", + -13.75129222869873 + ], + [ + "▁anxieties", + -13.751317977905272 + ], + [ + "▁dyslexia", + -13.751317977905272 + ], + [ + "▁liturgy", + -13.751317977905272 + ], + [ + "▁midwives", + -13.751317977905272 + ], + [ + "▁barrister", + -13.75131893157959 + ], + [ + "▁communicative", + -13.751320838928224 + ], + [ + "▁Thunderbolt", + -13.751324653625488 + ], + [ + "▁disposing", + -13.751331329345703 + ], + [ + "▁beekeeping", + -13.751334190368652 + ], + [ + "▁Decatur", + -13.751340866088867 + ], + [ + "▁Presidency", + -13.751346588134766 + ], + [ + "foo", + -13.751348495483398 + ], + [ + "▁390", + -13.75136375427246 + ], + [ + "▁Lazarus", + -13.75136947631836 + ], + [ + "▁hazy", + -13.751373291015623 + ], + [ + "▁unwelcome", + -13.751373291015623 + ], + [ + "▁grandkids", + -13.751376152038574 + ], + [ + "▁Mafia", + -13.75137710571289 + ], + [ + "▁midsize", + -13.751378059387209 + ], + [ + "▁Hasbro", + -13.751391410827637 + ], + [ + "▁Playstation", + -13.751392364501951 + ], + [ + "▁invaders", + -13.751394271850586 + ], + [ + "▁meth", + -13.75145435333252 + ], + [ + "▁breezes", + -13.751471519470217 + ], + [ + "nada", + -13.75148582458496 + ], + [ + "Sue", + -13.751526832580566 + ], + [ + "▁Fairview", + -13.751550674438477 + ], + [ + "▁swatches", + -13.751562118530272 + ], + [ + "▁vigor", + -13.751564979553224 + ], + [ + "-89", + -13.751566886901855 + ], + [ + "founders", + -13.75161361694336 + ], + [ + "cuts", + -13.751631736755373 + ], + [ + "unde", + -13.751653671264648 + ], + [ + "▁LDL", + -13.751681327819824 + ], + [ + "▁Hewitt", + -13.751701354980469 + ], + [ + "▁Barrie", + -13.751704216003418 + ], + [ + "▁Decker", + -13.75174331665039 + ], + [ + "▁foodservice", + -13.751744270324709 + ], + [ + "vina", + -13.751775741577148 + ], + [ + "▁Blum", + -13.751776695251465 + ], + [ + "▁Jardin", + -13.751782417297363 + ], + [ + "buyer", + -13.75182056427002 + ], + [ + "▁Homecoming", + -13.75186538696289 + ], + [ + "187", + -13.751876831054688 + ], + [ + "▁orphaned", + -13.751877784729004 + ], + [ + "ikan", + -13.751893997192385 + ], + [ + "Listing", + -13.751965522766112 + ], + [ + "▁Rt", + -13.751982688903809 + ], + [ + "▁infested", + -13.752002716064451 + ], + [ + "valent", + -13.752119064331056 + ], + [ + "▁Pelican", + -13.752119064331056 + ], + [ + "iction", + -13.752158164978027 + ], + [ + "tial", + -13.752171516418455 + ], + [ + "▁postoperative", + -13.752302169799805 + ], + [ + "▁indifference", + -13.752324104309082 + ], + [ + "Inch", + -13.752363204956056 + ], + [ + "▁upholding", + -13.752405166625977 + ], + [ + "▁1963,", + -13.752459526062012 + ], + [ + "detect", + -13.75253200531006 + ], + [ + "▁reissue", + -13.752549171447754 + ], + [ + "▁cyan", + -13.752748489379885 + ], + [ + "tate", + -13.752764701843262 + ], + [ + "▁barring", + -13.752769470214844 + ], + [ + "▁blunders", + -13.75277328491211 + ], + [ + "nonsense", + -13.752784729003906 + ], + [ + "indi", + -13.752860069274902 + ], + [ + "▁picker", + -13.752948760986328 + ], + [ + "▁Quilts", + -13.752974510192873 + ], + [ + "rings", + -13.75301456451416 + ], + [ + "▁plunger", + -13.75305461883545 + ], + [ + "▁Conferences", + -13.753110885620115 + ], + [ + "▁Displays", + -13.753138542175291 + ], + [ + "Que", + -13.753162384033203 + ], + [ + "▁Lyle", + -13.753198623657228 + ], + [ + "▁stormed", + -13.753273010253906 + ], + [ + "▁moto", + -13.753311157226562 + ], + [ + "lips", + -13.753314018249512 + ], + [ + "rare", + -13.753321647644045 + ], + [ + "▁Experiences", + -13.753372192382812 + ], + [ + "▁prod", + -13.753424644470217 + ], + [ + "▁squads", + -13.753512382507324 + ], + [ + "IMO", + -13.753534317016602 + ], + [ + "▁mysteriously", + -13.7535400390625 + ], + [ + "▁234", + -13.753633499145508 + ], + [ + "▁Emilia", + -13.753667831420898 + ], + [ + "Lack", + -13.75376796722412 + ], + [ + "▁BFA", + -13.753817558288574 + ], + [ + "▁skylight", + -13.753820419311523 + ], + [ + "iPad", + -13.753828048706056 + ], + [ + "▁braids", + -13.753847122192385 + ], + [ + "▁millennia", + -13.753864288330078 + ], + [ + "▁bul", + -13.753904342651367 + ], + [ + "▁Tami", + -13.75394344329834 + ], + [ + "avon", + -13.753989219665527 + ], + [ + "jie", + -13.75399398803711 + ], + [ + "▁Agree", + -13.75402545928955 + ], + [ + "▁paddles", + -13.754067420959473 + ], + [ + "APS", + -13.754094123840332 + ], + [ + "▁780", + -13.754098892211914 + ], + [ + "▁33-", + -13.75418472290039 + ], + [ + "978", + -13.7542142868042 + ], + [ + "▁Merchants", + -13.75421905517578 + ], + [ + "DING", + -13.75422191619873 + ], + [ + "▁homogeneous", + -13.75424098968506 + ], + [ + "▁identifiers", + -13.754267692565918 + ], + [ + "▁mort", + -13.754283905029297 + ], + [ + "▁AGE", + -13.7543363571167 + ], + [ + "PW", + -13.75434112548828 + ], + [ + "▁Chor", + -13.754385948181152 + ], + [ + "mple", + -13.754387855529783 + ], + [ + "335", + -13.754456520080566 + ], + [ + "ident", + -13.754461288452148 + ], + [ + "▁grappling", + -13.754533767700195 + ], + [ + "▁gruesome", + -13.754533767700195 + ], + [ + "▁incubation", + -13.754533767700195 + ], + [ + "▁snatched", + -13.754533767700195 + ], + [ + "▁Phoebe", + -13.754535675048828 + ], + [ + "▁Pollution", + -13.75453758239746 + ], + [ + "▁Cosmic", + -13.75454330444336 + ], + [ + "▁bonfire", + -13.75454807281494 + ], + [ + "▁patriotism", + -13.75455379486084 + ], + [ + "▁lilac", + -13.754576683044434 + ], + [ + "▁Titus", + -13.754683494567873 + ], + [ + "▁magnifying", + -13.754812240600586 + ], + [ + "▁Boardwalk", + -13.754813194274902 + ], + [ + "▁blackberries", + -13.754816055297852 + ], + [ + "▁Gerber", + -13.75485610961914 + ], + [ + "▁Garbage", + -13.754884719848633 + ], + [ + "▁moisturiser", + -13.754886627197266 + ], + [ + "▁Scr", + -13.754923820495604 + ], + [ + "Salt", + -13.754944801330566 + ], + [ + "booking", + -13.754974365234377 + ], + [ + "Lan", + -13.755016326904297 + ], + [ + "▁horseshoe", + -13.755026817321776 + ], + [ + "ood", + -13.75507640838623 + ], + [ + "▁Georg", + -13.755084991455078 + ], + [ + "▁Patton", + -13.755127906799316 + ], + [ + "▁meandering", + -13.755135536193848 + ], + [ + "631", + -13.755279541015623 + ], + [ + "▁Stands", + -13.755284309387209 + ], + [ + "▁Hug", + -13.755367279052734 + ], + [ + "▁43%", + -13.755393028259276 + ], + [ + "▁Liber", + -13.755464553833008 + ], + [ + "▁Jamestown", + -13.755467414855955 + ], + [ + "▁Mood", + -13.755504608154297 + ], + [ + "EH", + -13.755532264709473 + ], + [ + "8-9", + -13.755553245544434 + ], + [ + "Hara", + -13.755558967590332 + ], + [ + "▁giggle", + -13.755562782287598 + ], + [ + "773", + -13.75556468963623 + ], + [ + "▁Steer", + -13.755574226379396 + ], + [ + "▁HAD", + -13.755602836608888 + ], + [ + "reich", + -13.755620002746582 + ], + [ + "thra", + -13.755651473999023 + ], + [ + "bari", + -13.755677223205566 + ], + [ + "4.8", + -13.755763053894045 + ], + [ + "▁warmest", + -13.755855560302734 + ], + [ + "Lou", + -13.755922317504885 + ], + [ + "▁AREA", + -13.755924224853516 + ], + [ + "▁2025.", + -13.755970001220703 + ], + [ + "IOUS", + -13.756011962890623 + ], + [ + "▁44%", + -13.756047248840332 + ], + [ + "albeit", + -13.756061553955078 + ], + [ + "dealer", + -13.75607681274414 + ], + [ + "▁fateful", + -13.756085395812988 + ], + [ + "▁Reflections", + -13.75611400604248 + ], + [ + "gone", + -13.756128311157228 + ], + [ + "218", + -13.756146430969238 + ], + [ + "Alongside", + -13.756180763244627 + ], + [ + "Sci", + -13.756217002868652 + ], + [ + "INT", + -13.7562894821167 + ], + [ + "▁Porta", + -13.756290435791016 + ], + [ + "Owned", + -13.756402969360352 + ], + [ + "▁transplanted", + -13.756447792053224 + ], + [ + "▁Newbury", + -13.756452560424805 + ], + [ + "▁Associations", + -13.756463050842283 + ], + [ + "▁RMS", + -13.756468772888184 + ], + [ + "623", + -13.756518363952637 + ], + [ + "culus", + -13.756518363952637 + ], + [ + ".75\"", + -13.756534576416016 + ], + [ + "▁Exciting", + -13.756566047668455 + ], + [ + "▁Bohemian", + -13.756685256958008 + ], + [ + "pieces", + -13.756780624389648 + ], + [ + "▁Amongst", + -13.756784439086914 + ], + [ + "CMA", + -13.756796836853027 + ], + [ + "▁Twice", + -13.756821632385254 + ], + [ + "Ran", + -13.756824493408203 + ], + [ + "Discovery", + -13.756874084472656 + ], + [ + "▁Confirm", + -13.756911277770996 + ], + [ + "-98", + -13.756914138793944 + ], + [ + "housing", + -13.756943702697754 + ], + [ + "▁470", + -13.757034301757812 + ], + [ + "monte", + -13.757184028625488 + ], + [ + "▁PASS", + -13.757207870483398 + ], + [ + "▁tamper", + -13.757220268249512 + ], + [ + "▁Sino", + -13.75723934173584 + ], + [ + "▁pastels", + -13.75723934173584 + ], + [ + "▁enclose", + -13.757277488708496 + ], + [ + "twist", + -13.757380485534668 + ], + [ + "feeling", + -13.757431030273438 + ], + [ + "▁synced", + -13.757476806640623 + ], + [ + "vite", + -13.757525444030762 + ], + [ + "▁sponsorships", + -13.757539749145508 + ], + [ + "iting", + -13.757554054260254 + ], + [ + "▁signaled", + -13.75755500793457 + ], + [ + "▁willpower", + -13.757564544677734 + ], + [ + "▁Paine", + -13.757573127746582 + ], + [ + "217", + -13.757608413696287 + ], + [ + "▁Landlord", + -13.757729530334473 + ], + [ + "Credential", + -13.75775909423828 + ], + [ + "▁CMYK", + -13.75775909423828 + ], + [ + "▁Lockheed", + -13.75775909423828 + ], + [ + "▁Macquarie", + -13.75775909423828 + ], + [ + "▁Memorandum", + -13.75775909423828 + ], + [ + "▁genealogical", + -13.75775909423828 + ], + [ + "▁grumpy", + -13.75775909423828 + ], + [ + "▁sensitivities", + -13.75775909423828 + ], + [ + "▁Burundi", + -13.75776481628418 + ], + [ + "▁daisy", + -13.757769584655762 + ], + [ + "▁tingling", + -13.757784843444824 + ], + [ + "▁unlicensed", + -13.757818222045898 + ], + [ + "▁detainees", + -13.757842063903809 + ], + [ + "▁centenary", + -13.757868766784668 + ], + [ + "▁nurseries", + -13.757883071899414 + ], + [ + "▁Railways", + -13.757925033569336 + ], + [ + "▁foresight", + -13.757929801940918 + ], + [ + "▁Vanities", + -13.7579984664917 + ], + [ + "▁Chakra", + -13.758041381835938 + ], + [ + "Listed", + -13.758044242858888 + ], + [ + "▁Massey", + -13.758105278015137 + ], + [ + "870", + -13.758108139038086 + ], + [ + "IAC", + -13.758138656616213 + ], + [ + "▁Cliffs", + -13.75814151763916 + ], + [ + "joint", + -13.758180618286133 + ], + [ + "Testing", + -13.758213996887209 + ], + [ + "▁Wicker", + -13.758217811584473 + ], + [ + "▁Treasures", + -13.758234024047852 + ], + [ + "▁caravans", + -13.758301734924316 + ], + [ + "▁Samson", + -13.758403778076172 + ], + [ + "▁frail", + -13.758417129516602 + ], + [ + "▁ven", + -13.758431434631348 + ], + [ + "▁ardent", + -13.75844383239746 + ], + [ + "557", + -13.7584867477417 + ], + [ + "▁Raz", + -13.758490562438965 + ], + [ + "▁Axel", + -13.758540153503418 + ], + [ + "▁Canoe", + -13.758549690246582 + ], + [ + "▁jolt", + -13.75861644744873 + ], + [ + "60%", + -13.758647918701172 + ], + [ + "▁hedging", + -13.758746147155762 + ], + [ + "▁Resistant", + -13.758758544921877 + ], + [ + "▁:-", + -13.758784294128418 + ], + [ + "▁sponges", + -13.75887680053711 + ], + [ + "▁Mechanic", + -13.758930206298828 + ], + [ + "▁Bl", + -13.758991241455078 + ], + [ + "▁Chee", + -13.759062767028809 + ], + [ + "186", + -13.759076118469238 + ], + [ + "muth", + -13.759078025817873 + ], + [ + "▁winemaking", + -13.759161949157717 + ], + [ + "Graph", + -13.759195327758787 + ], + [ + "▁HEAD", + -13.759224891662598 + ], + [ + "▁Buses", + -13.759307861328123 + ], + [ + "527", + -13.759328842163086 + ], + [ + "▁archaeologists", + -13.759336471557615 + ], + [ + "proclaimed", + -13.75936508178711 + ], + [ + "▁0.5%", + -13.759376525878906 + ], + [ + "concrete", + -13.759390830993652 + ], + [ + "▁Branded", + -13.759443283081056 + ], + [ + "▁Hess", + -13.759477615356444 + ], + [ + "▁Mein", + -13.759499549865724 + ], + [ + "▁Kok", + -13.759535789489746 + ], + [ + "▁NXT", + -13.759543418884276 + ], + [ + "▁Nestled", + -13.759560585021973 + ], + [ + "▁Paro", + -13.759651184082031 + ], + [ + "OLA", + -13.759666442871094 + ], + [ + "Orders", + -13.75968074798584 + ], + [ + "mony", + -13.759713172912598 + ], + [ + "Counsel", + -13.759796142578123 + ], + [ + "▁Mica", + -13.759828567504885 + ], + [ + "▁Specialized", + -13.75987720489502 + ], + [ + "▁esta", + -13.75995635986328 + ], + [ + "▁Luxurious", + -13.759964942932127 + ], + [ + "▁licensee", + -13.760025024414062 + ], + [ + "446", + -13.76007843017578 + ], + [ + "▁forensics", + -13.760210037231444 + ], + [ + "▁Graffiti", + -13.760220527648926 + ], + [ + "Hart", + -13.760238647460938 + ], + [ + "▁encapsulated", + -13.76024341583252 + ], + [ + "(2", + -13.760345458984377 + ], + [ + "vascular", + -13.76041316986084 + ], + [ + "▁restricts", + -13.760418891906738 + ], + [ + "▁tumbler", + -13.760421752929688 + ], + [ + "▁rearrange", + -13.760507583618164 + ], + [ + "uy", + -13.760513305664062 + ], + [ + "▁Spar", + -13.76052188873291 + ], + [ + "▁MIS", + -13.760557174682615 + ], + [ + "▁DOJ", + -13.760746955871582 + ], + [ + "▁conspicuous", + -13.760754585266112 + ], + [ + "gauge", + -13.76077938079834 + ], + [ + "▁Erdogan", + -13.760785102844238 + ], + [ + "▁Damian", + -13.760896682739258 + ], + [ + "hod", + -13.760913848876951 + ], + [ + "▁equator", + -13.760984420776367 + ], + [ + "▁crippling", + -13.760995864868164 + ], + [ + "▁emanating", + -13.76099681854248 + ], + [ + "159", + -13.761001586914062 + ], + [ + "▁disdain", + -13.76100254058838 + ], + [ + "▁thermoplastic", + -13.761004447937012 + ], + [ + "▁Paisley", + -13.761005401611328 + ], + [ + "▁whining", + -13.76100730895996 + ], + [ + "▁NYS", + -13.761012077331545 + ], + [ + "▁Alyssa", + -13.761016845703123 + ], + [ + "▁Brigham", + -13.76102066040039 + ], + [ + "▁caddy", + -13.761030197143556 + ], + [ + "▁foodies", + -13.761038780212402 + ], + [ + "▁Zachary", + -13.761046409606934 + ], + [ + "▁Maddie", + -13.76104736328125 + ], + [ + "▁spousal", + -13.7610502243042 + ], + [ + "▁Monmouth", + -13.76105785369873 + ], + [ + "▁Forsyth", + -13.761131286621094 + ], + [ + "▁Shepard", + -13.761181831359863 + ], + [ + "▁Snowden", + -13.761191368103027 + ], + [ + "▁appease", + -13.761201858520508 + ], + [ + "▁Rowling", + -13.76122760772705 + ], + [ + "▁WAV", + -13.761249542236328 + ], + [ + "GV", + -13.761281967163086 + ], + [ + "▁Potatoes", + -13.761282920837402 + ], + [ + "▁Merlot", + -13.761284828186035 + ], + [ + "lifetime", + -13.761343002319336 + ], + [ + "▁weaponry", + -13.761366844177246 + ], + [ + "Responding", + -13.76140022277832 + ], + [ + "▁Boul", + -13.761425971984863 + ], + [ + "▁chronically", + -13.761454582214355 + ], + [ + "▁Seminole", + -13.761502265930176 + ], + [ + "▁Exotic", + -13.761556625366213 + ], + [ + "▁sideline", + -13.76155948638916 + ], + [ + "▁Obesity", + -13.761577606201172 + ], + [ + "▁bmw", + -13.761581420898438 + ], + [ + "▁offsite", + -13.761610984802246 + ], + [ + "▁Courtyard", + -13.761614799499512 + ], + [ + "ament", + -13.761638641357422 + ], + [ + "lva", + -13.761679649353027 + ], + [ + "▁Tasmanian", + -13.761699676513672 + ], + [ + "iev", + -13.761740684509276 + ], + [ + "▁favours", + -13.76180648803711 + ], + [ + "GRA", + -13.761948585510254 + ], + [ + "respected", + -13.762033462524414 + ], + [ + "▁medial", + -13.762041091918944 + ], + [ + "▁naps", + -13.762089729309082 + ], + [ + "▁clots", + -13.762211799621582 + ], + [ + "▁dissertations", + -13.76225757598877 + ], + [ + "abo", + -13.762356758117676 + ], + [ + "▁rulings", + -13.762374877929688 + ], + [ + "▁Tort", + -13.762474060058594 + ], + [ + "▁soot", + -13.762548446655272 + ], + [ + "▁SHARE", + -13.762557983398438 + ], + [ + "▁Voluntary", + -13.76255989074707 + ], + [ + "Demonstrate", + -13.762643814086914 + ], + [ + "column", + -13.76267433166504 + ], + [ + "Numerous", + -13.762688636779783 + ], + [ + "Charlotte", + -13.762694358825684 + ], + [ + "Luxury", + -13.762699127197266 + ], + [ + "512", + -13.762741088867188 + ], + [ + "▁Schwab", + -13.762837409973145 + ], + [ + "portable", + -13.76286792755127 + ], + [ + "ayo", + -13.762887954711914 + ], + [ + "prov", + -13.762909889221191 + ], + [ + "▁underage", + -13.762925148010254 + ], + [ + "-56", + -13.76295280456543 + ], + [ + "(2),", + -13.763104438781738 + ], + [ + "5-8", + -13.763163566589355 + ], + [ + "▁timepiece", + -13.763166427612305 + ], + [ + "▁dizzy", + -13.763175010681152 + ], + [ + "▁Troop", + -13.763185501098633 + ], + [ + "Element", + -13.7632474899292 + ], + [ + "▁messes", + -13.76326847076416 + ], + [ + "▁Silhouette", + -13.763299942016602 + ], + [ + "▁IOC", + -13.763318061828612 + ], + [ + "▁jean", + -13.763325691223145 + ], + [ + "trix", + -13.763391494750977 + ], + [ + "▁Francois", + -13.763453483581545 + ], + [ + "ggie", + -13.763497352600098 + ], + [ + "▁lan", + -13.763631820678713 + ], + [ + "231", + -13.763663291931152 + ], + [ + "grapher", + -13.763686180114746 + ], + [ + "▁purify", + -13.763748168945312 + ], + [ + "▁Hermes", + -13.76376724243164 + ], + [ + "▁zippers", + -13.763854026794434 + ], + [ + "▁purport", + -13.76386547088623 + ], + [ + "▁dismay", + -13.763891220092772 + ], + [ + "dew", + -13.76389503479004 + ], + [ + "▁30\"", + -13.76396656036377 + ], + [ + "▁EPIC", + -13.763991355895996 + ], + [ + "▁Khu", + -13.7640962600708 + ], + [ + "ELA", + -13.76409912109375 + ], + [ + "▁inked", + -13.764113426208496 + ], + [ + "anything", + -13.764190673828123 + ], + [ + "▁overloaded", + -13.76421356201172 + ], + [ + "CSE", + -13.764240264892578 + ], + [ + "Semitism", + -13.764242172241213 + ], + [ + "▁dermatitis", + -13.764242172241213 + ], + [ + "▁hypocrisy", + -13.764242172241213 + ], + [ + "▁mischievous", + -13.764242172241213 + ], + [ + "▁observable", + -13.764242172241213 + ], + [ + "▁snooze", + -13.764242172241213 + ], + [ + "▁Somerville", + -13.764243125915527 + ], + [ + "▁Earthquake", + -13.764260292053224 + ], + [ + "▁OFFICE", + -13.76427173614502 + ], + [ + "▁moonlight", + -13.764272689819336 + ], + [ + "rog", + -13.764278411865234 + ], + [ + "▁astute", + -13.764294624328612 + ], + [ + "▁abduction", + -13.764305114746094 + ], + [ + "▁terminating", + -13.764327049255373 + ], + [ + "nex", + -13.764337539672852 + ], + [ + "tov", + -13.764349937438965 + ], + [ + "sqm", + -13.764400482177734 + ], + [ + "▁Kirkland", + -13.76445770263672 + ], + [ + "▁DBS", + -13.764459609985352 + ], + [ + "97.", + -13.764467239379885 + ], + [ + "▁Retailers", + -13.764525413513184 + ], + [ + "▁Curator", + -13.764607429504396 + ], + [ + "▁223", + -13.764683723449709 + ], + [ + "▁deg", + -13.764703750610352 + ], + [ + "▁Marta", + -13.76471710205078 + ], + [ + "▁notepad", + -13.764768600463867 + ], + [ + "▁broadest", + -13.764779090881348 + ], + [ + "boa", + -13.764795303344728 + ], + [ + "▁Aud", + -13.764810562133787 + ], + [ + "▁Fridge", + -13.764843940734863 + ], + [ + "Movie", + -13.764869689941406 + ], + [ + "▁joystick", + -13.76487922668457 + ], + [ + "QU", + -13.764914512634276 + ], + [ + "▁explorations", + -13.764923095703123 + ], + [ + "▁dismantled", + -13.765033721923828 + ], + [ + "▁conjure", + -13.765085220336914 + ], + [ + "Ap", + -13.76512050628662 + ], + [ + "▁david", + -13.765149116516112 + ], + [ + "Sys", + -13.76518726348877 + ], + [ + "eps", + -13.765219688415527 + ], + [ + "imus", + -13.765243530273438 + ], + [ + "tional", + -13.765292167663574 + ], + [ + "▁airways", + -13.765386581420898 + ], + [ + "PHI", + -13.765469551086426 + ], + [ + "▁rescues", + -13.765575408935549 + ], + [ + "RTC", + -13.765620231628418 + ], + [ + "▁Dude", + -13.765642166137695 + ], + [ + "▁censor", + -13.765765190124512 + ], + [ + "▁Ald", + -13.765809059143066 + ], + [ + "▁propeller", + -13.765820503234863 + ], + [ + "UFF", + -13.765894889831545 + ], + [ + "▁VIA", + -13.765915870666504 + ], + [ + "▁Moro", + -13.76601505279541 + ], + [ + "Traffic", + -13.766030311584473 + ], + [ + "▁climber", + -13.766075134277344 + ], + [ + "creator", + -13.766077995300291 + ], + [ + "Fight", + -13.766098022460938 + ], + [ + "▁skylights", + -13.7662353515625 + ], + [ + "▁Filipinos", + -13.766243934631348 + ], + [ + "▁Plano", + -13.766313552856444 + ], + [ + "▁pundits", + -13.766343116760254 + ], + [ + "▁chit", + -13.766432762145996 + ], + [ + "▁stunningly", + -13.766446113586426 + ], + [ + "▁Authentication", + -13.766552925109863 + ], + [ + "uen", + -13.766557693481444 + ], + [ + "▁Protecting", + -13.766584396362305 + ], + [ + "▁explanatory", + -13.76658821105957 + ], + [ + "Repair", + -13.766761779785156 + ], + [ + "▁Kiri", + -13.76677417755127 + ], + [ + "▁Fingers", + -13.766932487487791 + ], + [ + "▁Mainland", + -13.766935348510742 + ], + [ + "925", + -13.767111778259276 + ], + [ + "Pak", + -13.76711368560791 + ], + [ + "906", + -13.76712703704834 + ], + [ + "▁1881", + -13.767138481140137 + ], + [ + "▁2019:", + -13.76716136932373 + ], + [ + "mell", + -13.767233848571776 + ], + [ + "▁platters", + -13.7672758102417 + ], + [ + "▁Biennale", + -13.767499923706056 + ], + [ + "▁Enemy", + -13.767499923706056 + ], + [ + "▁Hickory", + -13.767499923706056 + ], + [ + "▁Jehovah", + -13.767499923706056 + ], + [ + "▁consonant", + -13.767499923706056 + ], + [ + "▁humiliation", + -13.767499923706056 + ], + [ + "▁stabilizing", + -13.767499923706056 + ], + [ + "▁Felipe", + -13.767501831054688 + ], + [ + "▁nexus", + -13.767501831054688 + ], + [ + "▁Dunedin", + -13.76750373840332 + ], + [ + "▁stucco", + -13.76750373840332 + ], + [ + "boggling", + -13.767565727233888 + ], + [ + "▁Depp", + -13.767566680908203 + ], + [ + "▁barista", + -13.767786026000977 + ], + [ + "▁copywriting", + -13.767805099487305 + ], + [ + "▁encore", + -13.767817497253418 + ], + [ + "Adult", + -13.76784324645996 + ], + [ + "▁conductive", + -13.76785659790039 + ], + [ + "▁modernized", + -13.76789665222168 + ], + [ + "▁slaw", + -13.767898559570312 + ], + [ + "▁2014-15", + -13.76791763305664 + ], + [ + "▁outlaw", + -13.767950057983398 + ], + [ + "▁rotors", + -13.767964363098145 + ], + [ + "▁unconditionally", + -13.767995834350586 + ], + [ + "▁dispel", + -13.76801586151123 + ], + [ + "Aw", + -13.768036842346191 + ], + [ + "▁Mathews", + -13.768041610717772 + ], + [ + "▁Simmer", + -13.768054008483888 + ], + [ + "Format", + -13.76809024810791 + ], + [ + "▁Mongolian", + -13.768099784851074 + ], + [ + "rau", + -13.768128395080566 + ], + [ + "552", + -13.768206596374512 + ], + [ + "▁Lak", + -13.768247604370115 + ], + [ + "▁sixties", + -13.768280029296877 + ], + [ + "241", + -13.76828670501709 + ], + [ + "▁Dirk", + -13.768291473388672 + ], + [ + "▁Primer", + -13.768335342407228 + ], + [ + "▁physicists", + -13.768336296081545 + ], + [ + "▁KIT", + -13.768360137939451 + ], + [ + "ramp", + -13.768380165100098 + ], + [ + "▁Gravel", + -13.76843547821045 + ], + [ + "▁Suppose", + -13.768484115600586 + ], + [ + "▁literate", + -13.768580436706545 + ], + [ + "stal", + -13.768583297729492 + ], + [ + "▁Woodlands", + -13.768726348876951 + ], + [ + "▁keel", + -13.768787384033203 + ], + [ + "▁priv", + -13.768866539001465 + ], + [ + "574", + -13.768935203552246 + ], + [ + "▁commences", + -13.768943786621094 + ], + [ + "alaya", + -13.768953323364258 + ], + [ + "Specific", + -13.769110679626465 + ], + [ + "▁Saha", + -13.769110679626465 + ], + [ + "▁Momentum", + -13.769120216369627 + ], + [ + "serious", + -13.769187927246094 + ], + [ + "▁Recycle", + -13.769189834594728 + ], + [ + "surgery", + -13.769371032714844 + ], + [ + "Leadership", + -13.769386291503906 + ], + [ + "Feedback", + -13.769399642944336 + ], + [ + "▁ops", + -13.769410133361816 + ], + [ + "▁finalised", + -13.769463539123535 + ], + [ + "kies", + -13.769501686096191 + ], + [ + "everyone", + -13.76951503753662 + ], + [ + "▁shortstop", + -13.769550323486328 + ], + [ + "snap", + -13.769564628601074 + ], + [ + "Chairman", + -13.76956558227539 + ], + [ + "scribed", + -13.76956558227539 + ], + [ + "▁8.3", + -13.769665718078612 + ], + [ + "▁apartheid", + -13.769665718078612 + ], + [ + "▁methyl", + -13.769668579101562 + ], + [ + "▁Confession", + -13.769683837890623 + ], + [ + "483", + -13.769685745239258 + ], + [ + "buster", + -13.769782066345217 + ], + [ + "789", + -13.76983642578125 + ], + [ + "▁ENG", + -13.769912719726562 + ], + [ + "▁Ruff", + -13.769957542419434 + ], + [ + "ите", + -13.7699613571167 + ], + [ + "▁Hussain", + -13.769983291625977 + ], + [ + "lr", + -13.77001953125 + ], + [ + "jump", + -13.770220756530762 + ], + [ + "▁shroud", + -13.770373344421388 + ], + [ + "login", + -13.770383834838867 + ], + [ + "unlike", + -13.7705078125 + ], + [ + "▁unintentional", + -13.770545959472656 + ], + [ + "▁Oceania", + -13.7705659866333 + ], + [ + "291", + -13.770587921142578 + ], + [ + "Mining", + -13.770617485046388 + ], + [ + "cheng", + -13.770685195922852 + ], + [ + "dice", + -13.770689964294434 + ], + [ + "▁tickle", + -13.77075481414795 + ], + [ + "Negotiation", + -13.770767211914062 + ], + [ + "▁Cheddar", + -13.770767211914062 + ], + [ + "▁Murdoch", + -13.770767211914062 + ], + [ + "▁clunky", + -13.770767211914062 + ], + [ + "▁kangaroo", + -13.770767211914062 + ], + [ + "▁crimson", + -13.77076816558838 + ], + [ + "▁Angular", + -13.77077293395996 + ], + [ + "▁menswear", + -13.770776748657228 + ], + [ + "▁untapped", + -13.77078342437744 + ], + [ + "▁unobtrusive", + -13.770788192749023 + ], + [ + "▁honeycomb", + -13.77080249786377 + ], + [ + "lind", + -13.7708101272583 + ], + [ + "▁tyranny", + -13.770818710327148 + ], + [ + "▁Harrington", + -13.770825386047363 + ], + [ + "▁Envy", + -13.770837783813477 + ], + [ + "▁redistribution", + -13.77084732055664 + ], + [ + "▁gif", + -13.770862579345703 + ], + [ + "▁poached", + -13.770899772644045 + ], + [ + "▁Yugoslavia", + -13.770915031433104 + ], + [ + "▁Brookfield", + -13.770915985107422 + ], + [ + "▁Susie", + -13.770918846130373 + ], + [ + "▁audiobooks", + -13.770995140075684 + ], + [ + "▁monday", + -13.771011352539062 + ], + [ + "▁emojis", + -13.771025657653809 + ], + [ + "302", + -13.771047592163086 + ], + [ + "nning", + -13.771089553833008 + ], + [ + "▁IBS", + -13.771153450012209 + ], + [ + "▁Lille", + -13.771193504333496 + ], + [ + "99%", + -13.771249771118164 + ], + [ + "▁ester", + -13.77126407623291 + ], + [ + "▁Fitted", + -13.77129364013672 + ], + [ + "pane", + -13.771333694458008 + ], + [ + "▁Graduates", + -13.771358489990234 + ], + [ + "SEM", + -13.771368026733398 + ], + [ + "▁sunken", + -13.771374702453612 + ], + [ + "ADE", + -13.771416664123535 + ], + [ + "erta", + -13.771434783935549 + ], + [ + "▁Spl", + -13.771441459655762 + ], + [ + "▁slashed", + -13.771530151367188 + ], + [ + "▁Baths", + -13.771565437316896 + ], + [ + "▁sparking", + -13.771604537963867 + ], + [ + "Pu", + -13.771662712097168 + ], + [ + "▁CHECK", + -13.771728515625 + ], + [ + "phor", + -13.771746635437012 + ], + [ + "▁scrapbooking", + -13.771761894226074 + ], + [ + "▁Myself", + -13.771770477294922 + ], + [ + "▁Handel", + -13.771778106689451 + ], + [ + "▁psychiatry", + -13.771857261657717 + ], + [ + "▁orchestras", + -13.771868705749512 + ], + [ + "Stra", + -13.77193832397461 + ], + [ + "PY", + -13.771947860717772 + ], + [ + "▁psychopath", + -13.77200412750244 + ], + [ + "enga", + -13.77207088470459 + ], + [ + "elu", + -13.772083282470703 + ], + [ + "IUM", + -13.772170066833496 + ], + [ + "▁Blas", + -13.77221965789795 + ], + [ + "▁aversion", + -13.77224063873291 + ], + [ + "▁Spade", + -13.77236557006836 + ], + [ + "locate", + -13.77242946624756 + ], + [ + "▁socialization", + -13.772459030151367 + ], + [ + "▁Chromebook", + -13.772490501403809 + ], + [ + "Comfort", + -13.772507667541504 + ], + [ + "posi", + -13.772550582885742 + ], + [ + "5:00", + -13.772648811340332 + ], + [ + "▁weep", + -13.772720336914062 + ], + [ + "Sullivan", + -13.772727966308594 + ], + [ + "Medium", + -13.772793769836426 + ], + [ + "gress", + -13.772826194763184 + ], + [ + "Copper", + -13.77286434173584 + ], + [ + "▁showering", + -13.772871017456056 + ], + [ + "▁RSI", + -13.772871971130373 + ], + [ + "▁RDA", + -13.772884368896484 + ], + [ + "ethnic", + -13.77288818359375 + ], + [ + "IED", + -13.773041725158691 + ], + [ + "9.7", + -13.773096084594728 + ], + [ + "▁AIA", + -13.773118019104004 + ], + [ + "operate", + -13.773138046264648 + ], + [ + "▁ITC", + -13.773219108581545 + ], + [ + "random", + -13.77322769165039 + ], + [ + "▁Earnings", + -13.773266792297363 + ], + [ + "▁Gio", + -13.773359298706056 + ], + [ + "▁Darth", + -13.773418426513672 + ], + [ + "▁Degrees", + -13.77343463897705 + ], + [ + "▁Doo", + -13.77347183227539 + ], + [ + "▁Tetra", + -13.773507118225098 + ], + [ + "ctic", + -13.773512840270996 + ], + [ + "rax", + -13.773518562316896 + ], + [ + "▁needn", + -13.773531913757324 + ], + [ + "TEK", + -13.773540496826172 + ], + [ + "▁BET", + -13.773602485656738 + ], + [ + "▁4,500", + -13.773612022399902 + ], + [ + "▁relinquish", + -13.77364730834961 + ], + [ + "▁URI", + -13.773655891418455 + ], + [ + "ANI", + -13.77369213104248 + ], + [ + "▁Flute", + -13.773719787597656 + ], + [ + "▁90'", + -13.773720741271973 + ], + [ + "▁Sanjay", + -13.773721694946287 + ], + [ + "TAC", + -13.77382755279541 + ], + [ + "▁Pok", + -13.773836135864258 + ], + [ + "▁SAL", + -13.773887634277344 + ], + [ + "▁Wand", + -13.773951530456545 + ], + [ + "▁Dracula", + -13.774045944213867 + ], + [ + "▁dwindling", + -13.774045944213867 + ], + [ + "▁hypotheses", + -13.774045944213867 + ], + [ + "▁intrepid", + -13.774045944213867 + ], + [ + "▁paternal", + -13.774045944213867 + ], + [ + "▁shenanigans", + -13.774045944213867 + ], + [ + "▁tumultuous", + -13.774045944213867 + ], + [ + "▁Moz", + -13.774093627929688 + ], + [ + "▁discontent", + -13.77409839630127 + ], + [ + "▁Robyn", + -13.774107933044434 + ], + [ + "▁unarmed", + -13.774109840393066 + ], + [ + "gence", + -13.774115562438965 + ], + [ + "▁seatbelt", + -13.77411937713623 + ], + [ + "▁grandchild", + -13.774124145507812 + ], + [ + "▁Burnley", + -13.774160385131836 + ], + [ + "▁Federer", + -13.774203300476074 + ], + [ + "Distinct", + -13.774216651916504 + ], + [ + "-120", + -13.774301528930664 + ], + [ + "▁Timer", + -13.774330139160156 + ], + [ + "▁invalidate", + -13.774425506591797 + ], + [ + "▁||", + -13.774507522583008 + ], + [ + "VF", + -13.774517059326172 + ], + [ + "nbsp", + -13.774544715881348 + ], + [ + "▁TIFF", + -13.774560928344728 + ], + [ + "cee", + -13.77460479736328 + ], + [ + "mmmm", + -13.774645805358888 + ], + [ + "▁coffees", + -13.774656295776367 + ], + [ + "▁Ruiz", + -13.774658203125 + ], + [ + "▁Sensing", + -13.774683952331545 + ], + [ + "LEN", + -13.77472686767578 + ], + [ + "▁rewriting", + -13.774768829345703 + ], + [ + "▁creed", + -13.774785041809082 + ], + [ + "▁snowman", + -13.774802207946776 + ], + [ + "▁laborers", + -13.774805068969728 + ], + [ + "▁booklets", + -13.774868965148926 + ], + [ + "Spi", + -13.774907112121582 + ], + [ + "▁Boon", + -13.774911880493164 + ], + [ + "▁sho", + -13.774981498718262 + ], + [ + "▁collided", + -13.775046348571776 + ], + [ + "▁ephemeral", + -13.77504825592041 + ], + [ + "▁contoured", + -13.775100708007812 + ], + [ + "ours", + -13.77511501312256 + ], + [ + "▁:(", + -13.77512550354004 + ], + [ + "checking", + -13.7752046585083 + ], + [ + "397", + -13.775338172912598 + ], + [ + "▁SBA", + -13.77543830871582 + ], + [ + "▁pegs", + -13.775453567504885 + ], + [ + "editing", + -13.775490760803224 + ], + [ + "▁Ripple", + -13.775609016418455 + ], + [ + "jj", + -13.775632858276367 + ], + [ + "esch", + -13.775647163391112 + ], + [ + "▁Zones", + -13.775668144226074 + ], + [ + "▁Det", + -13.775755882263184 + ], + [ + "▁buzzer", + -13.77577781677246 + ], + [ + "▁doxycycline", + -13.77580738067627 + ], + [ + "▁$47", + -13.775839805603027 + ], + [ + "▁Switching", + -13.775885581970217 + ], + [ + "Attach", + -13.775893211364746 + ], + [ + "844", + -13.77589988708496 + ], + [ + "4′′", + -13.775901794433594 + ], + [ + "▁exempted", + -13.775948524475098 + ], + [ + "▁swab", + -13.776001930236816 + ], + [ + "sister", + -13.77606201171875 + ], + [ + "Furniture", + -13.776101112365724 + ], + [ + "Michigan", + -13.776103019714355 + ], + [ + "Alexander", + -13.776108741760254 + ], + [ + "vintage", + -13.776108741760254 + ], + [ + "▁kart", + -13.77611255645752 + ], + [ + "▁Tres", + -13.776114463806152 + ], + [ + "323", + -13.776122093200684 + ], + [ + "itra", + -13.776124000549316 + ], + [ + "purple", + -13.776135444641112 + ], + [ + "password", + -13.776152610778809 + ], + [ + "Immediately", + -13.776175498962402 + ], + [ + "▁DSS", + -13.776251792907717 + ], + [ + "▁riser", + -13.776280403137209 + ], + [ + "▁AWD", + -13.776328086853027 + ], + [ + "277", + -13.776341438293455 + ], + [ + "▁Edmond", + -13.776349067687988 + ], + [ + "Straight", + -13.776365280151367 + ], + [ + "▁bazaar", + -13.776369094848633 + ], + [ + "▁Stamps", + -13.776448249816896 + ], + [ + "née", + -13.776616096496582 + ], + [ + "866", + -13.776620864868164 + ], + [ + "aton", + -13.776630401611328 + ], + [ + "lime", + -13.776631355285645 + ], + [ + "▁Reilly", + -13.776703834533691 + ], + [ + "monger", + -13.776865005493164 + ], + [ + "Rice", + -13.776898384094238 + ], + [ + "▁TRANS", + -13.776949882507324 + ], + [ + "▁Boko", + -13.777087211608888 + ], + [ + "tically", + -13.777111053466797 + ], + [ + "▁Crea", + -13.777111053466797 + ], + [ + "▁defenseman", + -13.777121543884276 + ], + [ + "Philip", + -13.77712631225586 + ], + [ + "eviating", + -13.777276039123535 + ], + [ + "Dy", + -13.77730655670166 + ], + [ + "▁moot", + -13.777328491210938 + ], + [ + "▁Monsanto", + -13.777336120605469 + ], + [ + "▁Pensacola", + -13.777336120605469 + ], + [ + "▁populace", + -13.777336120605469 + ], + [ + "▁circling", + -13.777337074279783 + ], + [ + "▁hindrance", + -13.777337074279783 + ], + [ + "▁McGrath", + -13.777338027954102 + ], + [ + "▁abatement", + -13.777338027954102 + ], + [ + "▁Rainforest", + -13.777347564697266 + ], + [ + "▁maturation", + -13.777351379394531 + ], + [ + "▁Tallinn", + -13.777358055114746 + ], + [ + "▁Blanchard", + -13.77735996246338 + ], + [ + "▁Bellingham", + -13.777383804321287 + ], + [ + "▁Cardiac", + -13.777403831481934 + ], + [ + "Lot", + -13.77742862701416 + ], + [ + "464", + -13.777435302734377 + ], + [ + "▁plough", + -13.777443885803224 + ], + [ + "▁primal", + -13.777460098266602 + ], + [ + "▁Coachella", + -13.777461051940918 + ], + [ + "▁Proposition", + -13.777524948120115 + ], + [ + "▁Hock", + -13.777579307556152 + ], + [ + "kovic", + -13.777585983276367 + ], + [ + "727", + -13.777643203735352 + ], + [ + "▁agro", + -13.777650833129885 + ], + [ + "Ky", + -13.777664184570312 + ], + [ + "ojo", + -13.777717590332031 + ], + [ + "8.0", + -13.777722358703612 + ], + [ + "▁resists", + -13.777734756469728 + ], + [ + "▁abolished", + -13.777799606323242 + ], + [ + "▁Evolve", + -13.777841567993164 + ], + [ + "▁Severe", + -13.777889251708984 + ], + [ + "▁AMS", + -13.778053283691406 + ], + [ + "wain", + -13.778056144714355 + ], + [ + "▁SPD", + -13.778178215026855 + ], + [ + "▁Aur", + -13.77822971343994 + ], + [ + "490", + -13.778284072875977 + ], + [ + "▁Judgment", + -13.778350830078123 + ], + [ + "▁inducing", + -13.77843952178955 + ], + [ + "▁Removals", + -13.778578758239746 + ], + [ + "537", + -13.778619766235352 + ], + [ + "tres", + -13.778647422790527 + ], + [ + "OME", + -13.778648376464844 + ], + [ + "▁skew", + -13.77867031097412 + ], + [ + "-001", + -13.778681755065918 + ], + [ + "▁Chamberlain", + -13.778697967529297 + ], + [ + "sack", + -13.778727531433104 + ], + [ + "OMA", + -13.778743743896484 + ], + [ + "▁Thou", + -13.778765678405762 + ], + [ + "ripper", + -13.778778076171877 + ], + [ + "dominated", + -13.77878761291504 + ], + [ + "▁Yards", + -13.778904914855955 + ], + [ + "Ooh", + -13.778918266296388 + ], + [ + "▁Scheduling", + -13.779007911682127 + ], + [ + "▁Atari", + -13.779024124145508 + ], + [ + "▁undermined", + -13.77904224395752 + ], + [ + "▁mb", + -13.779047966003418 + ], + [ + "pattern", + -13.779065132141112 + ], + [ + "ql", + -13.77914810180664 + ], + [ + "▁decorator", + -13.779157638549805 + ], + [ + "▁delegated", + -13.779163360595703 + ], + [ + "Essential", + -13.779216766357422 + ], + [ + "▁canadian", + -13.77927017211914 + ], + [ + "11,", + -13.779281616210938 + ], + [ + "EXE", + -13.779287338256836 + ], + [ + "▁Dali", + -13.77929973602295 + ], + [ + "▁benefitting", + -13.779443740844728 + ], + [ + "Leaving", + -13.779463768005373 + ], + [ + "External", + -13.77946949005127 + ], + [ + "Scotland", + -13.77947998046875 + ], + [ + "vali", + -13.779546737670898 + ], + [ + "▁SERP", + -13.779548645019531 + ], + [ + "1992", + -13.77956771850586 + ], + [ + "▁Boe", + -13.77959156036377 + ], + [ + "illian", + -13.779598236083984 + ], + [ + "versed", + -13.779600143432615 + ], + [ + "▁convex", + -13.779666900634766 + ], + [ + "▁Browning", + -13.779722213745115 + ], + [ + "▁Cyril", + -13.78007984161377 + ], + [ + "▁Ritchie", + -13.780091285705566 + ], + [ + "iye", + -13.78013038635254 + ], + [ + "▁defensively", + -13.780202865600586 + ], + [ + "Slice", + -13.780263900756836 + ], + [ + "▁restructure", + -13.780264854431152 + ], + [ + "vani", + -13.780290603637695 + ], + [ + "▁Bavaria", + -13.780324935913086 + ], + [ + "bei", + -13.780381202697754 + ], + [ + "▁payback", + -13.780387878417969 + ], + [ + "▁jot", + -13.78046989440918 + ], + [ + "▁zen", + -13.780474662780762 + ], + [ + "▁Croft", + -13.780558586120604 + ], + [ + "cant", + -13.78060245513916 + ], + [ + "▁thermostats", + -13.780604362487791 + ], + [ + "▁exe", + -13.78062343597412 + ], + [ + "▁Harrogate", + -13.780635833740234 + ], + [ + "▁Shenandoah", + -13.780635833740234 + ], + [ + "▁WOULD", + -13.780635833740234 + ], + [ + "▁cappuccino", + -13.780635833740234 + ], + [ + "▁tenacity", + -13.780635833740234 + ], + [ + "▁COUNTY", + -13.78063678741455 + ], + [ + "▁meridian", + -13.78063678741455 + ], + [ + "▁Informatics", + -13.7806396484375 + ], + [ + "▁Adriatic", + -13.780640602111816 + ], + [ + "▁appalling", + -13.780640602111816 + ], + [ + "▁Pathfinder", + -13.780646324157717 + ], + [ + "▁Saddam", + -13.780647277832031 + ], + [ + "▁Worcestershire", + -13.780649185180664 + ], + [ + "▁interspersed", + -13.78065586090088 + ], + [ + "▁Danube", + -13.78066349029541 + ], + [ + "▁McKenna", + -13.780667304992676 + ], + [ + "▁Shetland", + -13.780682563781738 + ], + [ + "umo", + -13.780692100524902 + ], + [ + "▁casket", + -13.7807035446167 + ], + [ + "▁Rolf", + -13.780739784240724 + ], + [ + "▁Khmer", + -13.780741691589355 + ], + [ + "mechanical", + -13.780743598937988 + ], + [ + "▁infill", + -13.78076171875 + ], + [ + "▁Illness", + -13.780810356140137 + ], + [ + "▁HW", + -13.780835151672363 + ], + [ + "▁Forgotten", + -13.780912399291992 + ], + [ + "▁Beads", + -13.780961036682127 + ], + [ + "egen", + -13.78096866607666 + ], + [ + "▁Trin", + -13.781012535095217 + ], + [ + "▁32%", + -13.781023979187012 + ], + [ + "▁Tobias", + -13.781035423278809 + ], + [ + "▁Kayla", + -13.781041145324709 + ], + [ + "▁succumbed", + -13.781085968017578 + ], + [ + "▁Unite", + -13.781102180480955 + ], + [ + "▁Pipes", + -13.781126976013184 + ], + [ + "▁tel", + -13.781133651733398 + ], + [ + "dara", + -13.781279563903809 + ], + [ + "▁redress", + -13.781320571899414 + ], + [ + "topping", + -13.781335830688477 + ], + [ + "▁Albanian", + -13.781380653381348 + ], + [ + "▁Eck", + -13.7814359664917 + ], + [ + "meri", + -13.781476020812988 + ], + [ + "▁13.5", + -13.781488418579102 + ], + [ + "142", + -13.781519889831545 + ], + [ + "▁Tae", + -13.781781196594238 + ], + [ + "▁Cau", + -13.781789779663086 + ], + [ + "▁coherence", + -13.781819343566896 + ], + [ + "229", + -13.78183650970459 + ], + [ + "▁calmness", + -13.781842231750488 + ], + [ + "▁Samba", + -13.781895637512209 + ], + [ + "▁Porte", + -13.781923294067385 + ], + [ + "NASA", + -13.781960487365724 + ], + [ + "▁OEMs", + -13.781964302062988 + ], + [ + "▁Nicki", + -13.781978607177734 + ], + [ + "▁Narrative", + -13.781987190246582 + ], + [ + "▁DATE", + -13.78200626373291 + ], + [ + "▁Trips", + -13.782034873962402 + ], + [ + "Typ", + -13.782036781311035 + ], + [ + "▁gimmick", + -13.782037734985352 + ], + [ + "nner", + -13.782074928283691 + ], + [ + "▁plantar", + -13.782194137573242 + ], + [ + "▁digested", + -13.782270431518556 + ], + [ + "▁appropriation", + -13.782305717468262 + ], + [ + "Officers", + -13.782393455505373 + ], + [ + "Kan", + -13.782410621643066 + ], + [ + "▁Tama", + -13.78242301940918 + ], + [ + "Eds", + -13.782567024230955 + ], + [ + "▁Homeless", + -13.782583236694336 + ], + [ + "▁Acu", + -13.782647132873535 + ], + [ + "▁Reduced", + -13.782684326171877 + ], + [ + "▁modal", + -13.78279972076416 + ], + [ + "5.9", + -13.782855987548828 + ], + [ + "Oxford", + -13.782883644104004 + ], + [ + "▁camo", + -13.782899856567385 + ], + [ + "▁Dortmund", + -13.782903671264648 + ], + [ + "▁endures", + -13.78293228149414 + ], + [ + "Minister", + -13.782968521118164 + ], + [ + "GDPR", + -13.782973289489746 + ], + [ + "HRC", + -13.78309440612793 + ], + [ + "▁MAIN", + -13.783214569091797 + ], + [ + "624", + -13.783226013183594 + ], + [ + "▁victor", + -13.783269882202148 + ], + [ + "▁Faber", + -13.783275604248049 + ], + [ + "▁Runners", + -13.783384323120115 + ], + [ + "9.1", + -13.78343391418457 + ], + [ + "cox", + -13.78348159790039 + ], + [ + "722", + -13.783541679382324 + ], + [ + "▁deductibles", + -13.783570289611816 + ], + [ + "▁960", + -13.783577919006348 + ], + [ + "pert", + -13.78369140625 + ], + [ + "Fabric", + -13.78378677368164 + ], + [ + "9:", + -13.783869743347168 + ], + [ + "bbi", + -13.783902168273926 + ], + [ + "▁Bungalow", + -13.783946990966797 + ], + [ + "▁Shrewsbury", + -13.783946990966797 + ], + [ + "▁controversies", + -13.783946990966797 + ], + [ + "▁dignified", + -13.783946990966797 + ], + [ + "▁ideologies", + -13.783946990966797 + ], + [ + "▁podiatrist", + -13.783946990966797 + ], + [ + "Explanation", + -13.783947944641112 + ], + [ + "▁8.2", + -13.78395175933838 + ], + [ + "▁adversary", + -13.783963203430176 + ], + [ + "▁Huskies", + -13.783967971801758 + ], + [ + "▁Stealth", + -13.783971786499023 + ], + [ + "cala", + -13.783997535705566 + ], + [ + "▁Xfinity", + -13.78402042388916 + ], + [ + "hali", + -13.784021377563477 + ], + [ + "▁Kroger", + -13.78403377532959 + ], + [ + "▁staggered", + -13.784090995788574 + ], + [ + "252", + -13.78415298461914 + ], + [ + "▁Jae", + -13.784154891967772 + ], + [ + "▁Acute", + -13.784249305725098 + ], + [ + "▁Norwood", + -13.784266471862791 + ], + [ + "428", + -13.784284591674805 + ], + [ + "▁Meyers", + -13.784432411193848 + ], + [ + "dyne", + -13.78443431854248 + ], + [ + "grip", + -13.78447437286377 + ], + [ + "▁Baz", + -13.784480094909668 + ], + [ + "▁Summers", + -13.784523963928224 + ], + [ + "911", + -13.78453254699707 + ], + [ + "▁(45", + -13.78458023071289 + ], + [ + "▁eroded", + -13.78458023071289 + ], + [ + "▁Swamp", + -13.784591674804688 + ], + [ + "PEX", + -13.784653663635254 + ], + [ + "▁Devo", + -13.784656524658203 + ], + [ + "▁pedagogical", + -13.78471851348877 + ], + [ + "▁LDS", + -13.784728050231934 + ], + [ + "Prayer", + -13.78479290008545 + ], + [ + "▁moisturize", + -13.784804344177246 + ], + [ + "Essay", + -13.78489589691162 + ], + [ + "jang", + -13.784944534301758 + ], + [ + "▁supra", + -13.784953117370604 + ], + [ + "▁anodized", + -13.784992218017578 + ], + [ + "013", + -13.785053253173828 + ], + [ + "Cleaning", + -13.785097122192385 + ], + [ + "▁hatched", + -13.785136222839355 + ], + [ + "▁hindered", + -13.785164833068848 + ], + [ + "▁perilous", + -13.7852144241333 + ], + [ + "▁Madeline", + -13.785238265991213 + ], + [ + "khi", + -13.785284042358398 + ], + [ + "▁bureaus", + -13.78533935546875 + ], + [ + "▁Lining", + -13.785407066345217 + ], + [ + "teri", + -13.785420417785645 + ], + [ + "▁Bishops", + -13.785433769226074 + ], + [ + "AFF", + -13.78552532196045 + ], + [ + "▁Ganga", + -13.785537719726562 + ], + [ + "bry", + -13.785567283630373 + ], + [ + "▁syndicate", + -13.785619735717772 + ], + [ + "▁colt", + -13.785653114318848 + ], + [ + "▁TNT", + -13.785656929016112 + ], + [ + "▁prosthetic", + -13.785656929016112 + ], + [ + "▁Carly", + -13.785698890686035 + ], + [ + "▁birthing", + -13.78573226928711 + ], + [ + "iar", + -13.785771369934082 + ], + [ + "▁finisher", + -13.785794258117676 + ], + [ + "▁overlays", + -13.78587532043457 + ], + [ + "▁easter", + -13.78602123260498 + ], + [ + "▁Bao", + -13.786044120788574 + ], + [ + "▁Partial", + -13.7860746383667 + ], + [ + "MEX", + -13.786126136779783 + ], + [ + "▁Soak", + -13.78612995147705 + ], + [ + "dela", + -13.78615379333496 + ], + [ + "▁Platt", + -13.786166191101074 + ], + [ + "▁seep", + -13.786174774169922 + ], + [ + "▁REM", + -13.786185264587402 + ], + [ + "scratch", + -13.78626537322998 + ], + [ + "despite", + -13.78628921508789 + ], + [ + "▁beaver", + -13.786304473876951 + ], + [ + "-2,", + -13.786322593688965 + ], + [ + "▁SERVPRO", + -13.786375999450684 + ], + [ + "▁Realm", + -13.786436080932615 + ], + [ + "▁PMI", + -13.786506652832031 + ], + [ + "▁tang", + -13.786516189575195 + ], + [ + "▁Forks", + -13.786548614501951 + ], + [ + "▁Kew", + -13.78654956817627 + ], + [ + "▁Molina", + -13.786591529846191 + ], + [ + "▁Prem", + -13.786645889282228 + ], + [ + "▁Tonga", + -13.78668212890625 + ], + [ + "segment", + -13.78668975830078 + ], + [ + "ALA", + -13.786828994750977 + ], + [ + "poo", + -13.786870002746582 + ], + [ + "evich", + -13.786884307861328 + ], + [ + "fig", + -13.786907196044922 + ], + [ + "▁timeliness", + -13.786907196044922 + ], + [ + "▁suffix", + -13.787127494812012 + ], + [ + "▁shaman", + -13.78713607788086 + ], + [ + "▁technologists", + -13.78718376159668 + ], + [ + "istan", + -13.78719997406006 + ], + [ + "▁dependents", + -13.787208557128906 + ], + [ + "▁serpent", + -13.787245750427246 + ], + [ + "628", + -13.787263870239258 + ], + [ + "▁Lacrosse", + -13.787269592285156 + ], + [ + "▁Turquoise", + -13.787269592285156 + ], + [ + "▁arrears", + -13.787269592285156 + ], + [ + "▁biennial", + -13.787269592285156 + ], + [ + "▁convoluted", + -13.787269592285156 + ], + [ + "▁licorice", + -13.787269592285156 + ], + [ + "▁densities", + -13.787270545959473 + ], + [ + "▁Kangaroo", + -13.787271499633787 + ], + [ + "▁CompTIA", + -13.787273406982422 + ], + [ + "▁Vishnu", + -13.787273406982422 + ], + [ + "▁Wardrobe", + -13.78727912902832 + ], + [ + "▁Enrique", + -13.787290573120115 + ], + [ + "▁Ezra", + -13.787307739257812 + ], + [ + "lty", + -13.787309646606444 + ], + [ + "dana", + -13.787324905395508 + ], + [ + "therm", + -13.787361145019531 + ], + [ + "▁objected", + -13.787370681762695 + ], + [ + "▁Validation", + -13.787391662597656 + ], + [ + "▁Governing", + -13.787395477294922 + ], + [ + "763", + -13.78740119934082 + ], + [ + "▁peripherals", + -13.78742218017578 + ], + [ + "▁PARTY", + -13.787426948547363 + ], + [ + "▁doping", + -13.787440299987791 + ], + [ + "▁encased", + -13.787446022033691 + ], + [ + "▁Koreans", + -13.78754711151123 + ], + [ + "▁hor", + -13.787557601928713 + ], + [ + "▁1964,", + -13.787620544433594 + ], + [ + "▁diminishes", + -13.787627220153809 + ], + [ + "▁Kors", + -13.787708282470703 + ], + [ + "▁5.30", + -13.787737846374512 + ], + [ + "EVER", + -13.787739753723145 + ], + [ + "▁arborist", + -13.78782844543457 + ], + [ + "▁Werner", + -13.787875175476074 + ], + [ + "▁winch", + -13.787914276123049 + ], + [ + "42.", + -13.787915229797363 + ], + [ + "▁boosters", + -13.787951469421388 + ], + [ + "▁CPAs", + -13.787999153137209 + ], + [ + "ismo", + -13.78803253173828 + ], + [ + "▁covert", + -13.788043975830078 + ], + [ + "▁propositions", + -13.788049697875977 + ], + [ + "hunting", + -13.788056373596191 + ], + [ + "864", + -13.7880859375 + ], + [ + "worthiness", + -13.788222312927246 + ], + [ + "▁Brut", + -13.78822422027588 + ], + [ + "▁CASE", + -13.788250923156738 + ], + [ + "▁Wizards", + -13.788288116455078 + ], + [ + "▁Estimated", + -13.788304328918455 + ], + [ + "▁Bess", + -13.788317680358888 + ], + [ + "▁barb", + -13.788416862487791 + ], + [ + "▁underscores", + -13.788455963134766 + ], + [ + "mba", + -13.78847599029541 + ], + [ + "▁negotiable", + -13.788507461547852 + ], + [ + "effer", + -13.78859519958496 + ], + [ + "▁Potts", + -13.788661003112791 + ], + [ + "▁Narayan", + -13.788711547851562 + ], + [ + "SMA", + -13.788848876953123 + ], + [ + "▁purifier", + -13.789024353027344 + ], + [ + "BBC", + -13.78908920288086 + ], + [ + "778", + -13.789113998413086 + ], + [ + "GEL", + -13.789118766784668 + ], + [ + "▁Bengals", + -13.789183616638184 + ], + [ + "▁surfer", + -13.789223670959473 + ], + [ + "172", + -13.789268493652344 + ], + [ + "306", + -13.78927993774414 + ], + [ + "▁wrestler", + -13.789299011230469 + ], + [ + "661", + -13.789359092712402 + ], + [ + "▁Lenders", + -13.78942584991455 + ], + [ + "▁mainframe", + -13.78946018218994 + ], + [ + "sighted", + -13.789568901062012 + ], + [ + "▁Igor", + -13.78959846496582 + ], + [ + "326", + -13.789624214172363 + ], + [ + "Saxon", + -13.789653778076172 + ], + [ + "288", + -13.789679527282717 + ], + [ + "twitter", + -13.78968334197998 + ], + [ + "▁Infection", + -13.789703369140623 + ], + [ + "Islamic", + -13.789729118347168 + ], + [ + "▁jute", + -13.789743423461914 + ], + [ + "duck", + -13.789803504943848 + ], + [ + "▁intestines", + -13.789813041687012 + ], + [ + "▁Rah", + -13.789860725402832 + ], + [ + "Push", + -13.789868354797363 + ], + [ + "209", + -13.78991985321045 + ], + [ + "▁toning", + -13.789950370788574 + ], + [ + "jia", + -13.789961814880373 + ], + [ + "▁atlas", + -13.789962768554688 + ], + [ + "▁Returning", + -13.7899808883667 + ], + [ + "▁Auctions", + -13.790011405944824 + ], + [ + "izen", + -13.790016174316406 + ], + [ + "▁Wedge", + -13.790042877197266 + ], + [ + "▁vacate", + -13.790047645568848 + ], + [ + "Rx", + -13.790160179138184 + ], + [ + "athon", + -13.790181159973145 + ], + [ + "▁Erasmus", + -13.790217399597168 + ], + [ + "▁tinker", + -13.79022979736328 + ], + [ + "▁burrow", + -13.790266036987305 + ], + [ + "622", + -13.790267944335938 + ], + [ + "▁telescopes", + -13.790276527404783 + ], + [ + "▁TEN", + -13.790298461914062 + ], + [ + "▁replaceable", + -13.790335655212402 + ], + [ + "▁alleys", + -13.790364265441896 + ], + [ + "ddington", + -13.79042625427246 + ], + [ + "▁beacons", + -13.79042911529541 + ], + [ + "▁scant", + -13.790477752685549 + ], + [ + "LIVE", + -13.790499687194824 + ], + [ + "▁Elisabeth", + -13.790501594543455 + ], + [ + "▁Cutler", + -13.7905912399292 + ], + [ + "Excerpt", + -13.790602684020996 + ], + [ + "▁Hemingway", + -13.790602684020996 + ], + [ + "▁periphery", + -13.790602684020996 + ], + [ + "▁confluence", + -13.790607452392578 + ], + [ + "▁McBride", + -13.790610313415527 + ], + [ + "▁Empowerment", + -13.790637969970703 + ], + [ + "▁Deployment", + -13.790655136108398 + ], + [ + "habit", + -13.790657043457031 + ], + [ + "86)", + -13.790661811828612 + ], + [ + "▁Skating", + -13.790663719177246 + ], + [ + "▁fanfare", + -13.79067611694336 + ], + [ + "▁autopsy", + -13.79069995880127 + ], + [ + "▁Geology", + -13.790716171264648 + ], + [ + "▁10000", + -13.790761947631836 + ], + [ + "544", + -13.79079818725586 + ], + [ + "▁slant", + -13.79080581665039 + ], + [ + "▁leftist", + -13.79084014892578 + ], + [ + "▁cadets", + -13.79085922241211 + ], + [ + "▁northbound", + -13.790900230407717 + ], + [ + "drill", + -13.79090976715088 + ], + [ + "▁Elgin", + -13.790916442871094 + ], + [ + "▁gust", + -13.790952682495115 + ], + [ + "▁38%", + -13.790959358215332 + ], + [ + "Publisher", + -13.79100227355957 + ], + [ + "▁Creme", + -13.791017532348633 + ], + [ + "▁Ariana", + -13.791033744812012 + ], + [ + "quit", + -13.791098594665527 + ], + [ + "▁Yam", + -13.79115867614746 + ], + [ + "INFO", + -13.791165351867676 + ], + [ + "▁hoarding", + -13.791165351867676 + ], + [ + "▁Freud", + -13.791186332702637 + ], + [ + "▁Shankar", + -13.791196823120115 + ], + [ + "▁2019?", + -13.791272163391112 + ], + [ + "roof", + -13.791361808776855 + ], + [ + "▁2011).", + -13.791418075561523 + ], + [ + "▁inset", + -13.791463851928713 + ], + [ + "Huge", + -13.791501998901367 + ], + [ + "▁paul", + -13.791638374328612 + ], + [ + "▁JL", + -13.791665077209473 + ], + [ + "Rome", + -13.791709899902344 + ], + [ + "▁DFW", + -13.79171371459961 + ], + [ + "▁CVE", + -13.791723251342772 + ], + [ + "▁Bowls", + -13.791747093200684 + ], + [ + "▁chai", + -13.79177951812744 + ], + [ + "▁Helix", + -13.791788101196287 + ], + [ + "▁Donkey", + -13.79185962677002 + ], + [ + "▁Stats", + -13.791969299316406 + ], + [ + "IEC", + -13.79200839996338 + ], + [ + "▁chillies", + -13.792091369628906 + ], + [ + "antha", + -13.792139053344728 + ], + [ + "voc", + -13.792143821716309 + ], + [ + "▁toasty", + -13.792213439941406 + ], + [ + "Possess", + -13.792278289794922 + ], + [ + "▁Hul", + -13.792353630065918 + ], + [ + "▁Hodge", + -13.792384147644045 + ], + [ + "▁preservative", + -13.792417526245115 + ], + [ + "▁pip", + -13.79245948791504 + ], + [ + "rude", + -13.792463302612305 + ], + [ + "▁Measurements", + -13.792518615722656 + ], + [ + "Meta", + -13.792583465576172 + ], + [ + "fail", + -13.792604446411133 + ], + [ + "▁MCU", + -13.792845726013184 + ], + [ + "▁RUN", + -13.792953491210938 + ], + [ + "▁ect", + -13.79296875 + ], + [ + "▁Poke", + -13.79301643371582 + ], + [ + "▁stormy", + -13.793025016784668 + ], + [ + "▁(2009).", + -13.793146133422852 + ], + [ + "Alright", + -13.793160438537598 + ], + [ + "Supply", + -13.793238639831545 + ], + [ + "gara", + -13.793241500854492 + ], + [ + "▁Tori", + -13.793245315551758 + ], + [ + "▁adventurers", + -13.793245315551758 + ], + [ + "Output", + -13.79328727722168 + ], + [ + "21)", + -13.793336868286133 + ], + [ + "▁deviate", + -13.79335117340088 + ], + [ + "rton", + -13.793353080749512 + ], + [ + "▁florals", + -13.793375015258787 + ], + [ + "payments", + -13.793500900268556 + ], + [ + "▁dusted", + -13.79375171661377 + ], + [ + "nack", + -13.793757438659668 + ], + [ + "▁Pau", + -13.793801307678224 + ], + [ + "▁Bye", + -13.793856620788574 + ], + [ + "▁authoring", + -13.79387664794922 + ], + [ + "butter", + -13.793890953063965 + ], + [ + "▁quarterbacks", + -13.793906211853027 + ], + [ + "▁sarcastic", + -13.79392147064209 + ], + [ + "Sin", + -13.79394245147705 + ], + [ + "▁2014–2018", + -13.793947219848633 + ], + [ + "▁Macmillan", + -13.793947219848633 + ], + [ + "▁evangelism", + -13.79394817352295 + ], + [ + "▁Karaoke", + -13.79395580291748 + ], + [ + "▁jolly", + -13.793963432312012 + ], + [ + "▁rebuke", + -13.793964385986328 + ], + [ + "▁ferries", + -13.793967247009276 + ], + [ + "▁Charlton", + -13.794001579284668 + ], + [ + "▁discernment", + -13.794017791748049 + ], + [ + "▁defraud", + -13.79403018951416 + ], + [ + "Standing", + -13.79405689239502 + ], + [ + "lua", + -13.79409408569336 + ], + [ + "lut", + -13.794106483459473 + ], + [ + "ambo", + -13.794111251831056 + ], + [ + "▁Woodworking", + -13.794127464294434 + ], + [ + "▁neurologist", + -13.7941312789917 + ], + [ + "▁Detector", + -13.79413890838623 + ], + [ + "▁Hornets", + -13.794267654418944 + ], + [ + "hak", + -13.794316291809082 + ], + [ + "pill", + -13.794316291809082 + ], + [ + "boe", + -13.794384956359863 + ], + [ + "▁nit", + -13.794405937194824 + ], + [ + "/#", + -13.794408798217772 + ], + [ + "CUR", + -13.794427871704102 + ], + [ + "heated", + -13.79448699951172 + ], + [ + "Whole", + -13.794520378112791 + ], + [ + "▁aficionados", + -13.79452419281006 + ], + [ + "yg", + -13.794598579406738 + ], + [ + "▁Mn", + -13.79461669921875 + ], + [ + "▁FSC", + -13.794652938842772 + ], + [ + "▁civilized", + -13.79465389251709 + ], + [ + "ROOF", + -13.794658660888672 + ], + [ + "▁EVENT", + -13.794673919677734 + ], + [ + "▁lakeside", + -13.794685363769531 + ], + [ + "▁typewriter", + -13.794730186462402 + ], + [ + "▁Rash", + -13.794794082641602 + ], + [ + "248", + -13.794798851013184 + ], + [ + "41.", + -13.794809341430664 + ], + [ + "Robot", + -13.794821739196776 + ], + [ + "▁Paws", + -13.79482364654541 + ], + [ + "Speaker", + -13.79483127593994 + ], + [ + "pate", + -13.794856071472168 + ], + [ + "▁Palms", + -13.794894218444824 + ], + [ + "▁stereotypical", + -13.794897079467772 + ], + [ + "000000", + -13.79494571685791 + ], + [ + "▁Density", + -13.795065879821776 + ], + [ + "▁prohibiting", + -13.79512882232666 + ], + [ + "▁36%", + -13.795212745666504 + ], + [ + "▁Ach", + -13.795217514038086 + ], + [ + "▁Crimes", + -13.79523468017578 + ], + [ + "paw", + -13.79527187347412 + ], + [ + "rij", + -13.795275688171388 + ], + [ + "Lesson", + -13.795297622680664 + ], + [ + "234", + -13.79531192779541 + ], + [ + "▁Routledge", + -13.795313835144045 + ], + [ + "▁gran", + -13.79531478881836 + ], + [ + "▁dissolves", + -13.79537296295166 + ], + [ + "▁skulls", + -13.795377731323242 + ], + [ + "▁Repository", + -13.79539680480957 + ], + [ + "▁outraged", + -13.795415878295898 + ], + [ + "arms", + -13.795432090759276 + ], + [ + "Award", + -13.795520782470703 + ], + [ + "ETS", + -13.7955322265625 + ], + [ + "466", + -13.795557975769045 + ], + [ + "▁Volk", + -13.79555892944336 + ], + [ + "▁asymmetrical", + -13.795574188232422 + ], + [ + "▁cheery", + -13.795610427856444 + ], + [ + "▁regroup", + -13.795645713806152 + ], + [ + "ieri", + -13.795669555664062 + ], + [ + "kull", + -13.795787811279297 + ], + [ + "Suggested", + -13.795831680297852 + ], + [ + "5.8", + -13.795843124389648 + ], + [ + "▁Encounter", + -13.795849800109863 + ], + [ + "▁carol", + -13.795870780944824 + ], + [ + "▁clams", + -13.795920372009276 + ], + [ + "nil", + -13.795933723449709 + ], + [ + "chang", + -13.795963287353516 + ], + [ + "▁2100", + -13.796034812927246 + ], + [ + "90%", + -13.796067237854004 + ], + [ + "ijn", + -13.79611587524414 + ], + [ + "liche", + -13.796164512634276 + ], + [ + "▁storyboard", + -13.796183586120604 + ], + [ + "▁refocus", + -13.796194076538086 + ], + [ + "▁Breathe", + -13.796239852905272 + ], + [ + "▁Peaks", + -13.796319007873535 + ], + [ + "▁248", + -13.796363830566406 + ], + [ + "▁1945,", + -13.79638385772705 + ], + [ + "▁7/8", + -13.796399116516112 + ], + [ + "hc", + -13.796442985534668 + ], + [ + "DMA", + -13.796494483947754 + ], + [ + "▁tadalafil", + -13.79649543762207 + ], + [ + "LTA", + -13.79651927947998 + ], + [ + "▁Cousins", + -13.796660423278809 + ], + [ + "bucket", + -13.796679496765137 + ], + [ + "lob", + -13.796689987182615 + ], + [ + "▁qualifiers", + -13.79670524597168 + ], + [ + "morphic", + -13.796746253967283 + ], + [ + "▁Resilience", + -13.796747207641602 + ], + [ + "▁Snacks", + -13.796761512756348 + ], + [ + "empty", + -13.79676628112793 + ], + [ + "▁Sak", + -13.79680061340332 + ], + [ + "▁convent", + -13.79687786102295 + ], + [ + "otter", + -13.797019958496094 + ], + [ + "JJ", + -13.797025680541992 + ], + [ + "Hat", + -13.797039985656738 + ], + [ + "▁Permalink", + -13.79708766937256 + ], + [ + "pump", + -13.797109603881836 + ], + [ + "Searching", + -13.797115325927734 + ], + [ + "awan", + -13.79714012145996 + ], + [ + "▁reminisce", + -13.797279357910156 + ], + [ + "▁swatch", + -13.797293663024902 + ], + [ + "▁eponymous", + -13.797303199768066 + ], + [ + "▁exfoliation", + -13.797303199768066 + ], + [ + "▁fertilization", + -13.797303199768066 + ], + [ + "▁Liaison", + -13.797306060791016 + ], + [ + "▁Ubisoft", + -13.797306060791016 + ], + [ + "▁menacing", + -13.797306060791016 + ], + [ + "▁Django", + -13.797307968139648 + ], + [ + "▁Biodiversity", + -13.797313690185549 + ], + [ + "▁uPVC", + -13.797320365905762 + ], + [ + "▁Metropolis", + -13.797334671020508 + ], + [ + "▁SPACE", + -13.797341346740724 + ], + [ + "▁transact", + -13.79734230041504 + ], + [ + "▁collusion", + -13.797346115112305 + ], + [ + "LOT", + -13.797385215759276 + ], + [ + "▁unmarked", + -13.797393798828123 + ], + [ + "▁Woven", + -13.797412872314451 + ], + [ + "▁Hebrews", + -13.79741382598877 + ], + [ + "▁Fullerton", + -13.79745101928711 + ], + [ + "▁Plush", + -13.797452926635742 + ], + [ + "▁Apocalypse", + -13.79755973815918 + ], + [ + "▁renewals", + -13.797564506530762 + ], + [ + "▁envious", + -13.797595024108888 + ], + [ + "prime", + -13.797619819641112 + ], + [ + "UU", + -13.797627449035645 + ], + [ + "▁hooded", + -13.79765796661377 + ], + [ + "▁Singers", + -13.797658920288086 + ], + [ + "▁Ankle", + -13.79766082763672 + ], + [ + "▁surveyor", + -13.797674179077148 + ], + [ + "▁sire", + -13.79768180847168 + ], + [ + "▁miraculously", + -13.797727584838867 + ], + [ + "▁Baku", + -13.797832489013672 + ], + [ + "ASC", + -13.797894477844238 + ], + [ + "▁Turtles", + -13.797933578491213 + ], + [ + "▁wildest", + -13.797940254211426 + ], + [ + "▁TAG", + -13.798020362854004 + ], + [ + "▁bodywork", + -13.798032760620115 + ], + [ + "▁kr", + -13.798032760620115 + ], + [ + "▁Fritz", + -13.798052787780762 + ], + [ + "▁instilled", + -13.798115730285645 + ], + [ + "ingen", + -13.798134803771973 + ], + [ + "handler", + -13.798154830932615 + ], + [ + "tones", + -13.79816722869873 + ], + [ + "▁downsides", + -13.798190116882324 + ], + [ + "ethane", + -13.798195838928224 + ], + [ + "563", + -13.79822063446045 + ], + [ + "▁angeles", + -13.798272132873535 + ], + [ + "▁Torre", + -13.79831314086914 + ], + [ + "OTS", + -13.798379898071287 + ], + [ + "▁anise", + -13.7983980178833 + ], + [ + "▁manicured", + -13.798409461975098 + ], + [ + "▁Arcadia", + -13.798412322998049 + ], + [ + "▁pigmented", + -13.798466682434082 + ], + [ + "6.9", + -13.798474311828612 + ], + [ + "Oak", + -13.798578262329102 + ], + [ + "▁suitcases", + -13.798579216003418 + ], + [ + "▁Mika", + -13.798580169677734 + ], + [ + "cada", + -13.798628807067873 + ], + [ + "▁eds", + -13.798660278320312 + ], + [ + "▁Gradually", + -13.798663139343262 + ], + [ + "▁tankless", + -13.798738479614258 + ], + [ + "668", + -13.79885196685791 + ], + [ + "▁deci", + -13.798870086669922 + ], + [ + "▁Viz", + -13.798914909362791 + ], + [ + "bars", + -13.798933029174805 + ], + [ + "uddy", + -13.798972129821776 + ], + [ + "lass", + -13.799041748046877 + ], + [ + "▁WPC", + -13.799043655395508 + ], + [ + "Roy", + -13.799055099487305 + ], + [ + "1100", + -13.799079895019531 + ], + [ + "▁BK", + -13.799222946166992 + ], + [ + "▁inactivity", + -13.799267768859863 + ], + [ + "▁Steiner", + -13.79928207397461 + ], + [ + "▁uncles", + -13.79928970336914 + ], + [ + "▁Arjun", + -13.799297332763672 + ], + [ + "rider", + -13.799307823181152 + ], + [ + "▁Gord", + -13.799413681030272 + ], + [ + "iji", + -13.799498558044434 + ], + [ + "▁weathering", + -13.799508094787598 + ], + [ + "▁Swi", + -13.799549102783203 + ], + [ + "enbach", + -13.79955005645752 + ], + [ + "▁interconnect", + -13.799631118774414 + ], + [ + "▁1867", + -13.799643516540527 + ], + [ + "▁COMP", + -13.799647331237791 + ], + [ + "▁Beloved", + -13.799657821655272 + ], + [ + "▁paris", + -13.79969596862793 + ], + [ + "▁weeknight", + -13.79970645904541 + ], + [ + "▁10:3", + -13.799715995788574 + ], + [ + "▁1882", + -13.799738883972168 + ], + [ + "▁pimples", + -13.799872398376465 + ], + [ + "lec", + -13.799921989440918 + ], + [ + "▁Atmospheric", + -13.799943923950195 + ], + [ + "▁Endurance", + -13.799944877624512 + ], + [ + "Smooth", + -13.799983024597168 + ], + [ + "society", + -13.79999542236328 + ], + [ + "▁Thru", + -13.799996376037598 + ], + [ + "▁Bora", + -13.800020217895508 + ], + [ + "619", + -13.800024032592772 + ], + [ + "Hospital", + -13.800095558166504 + ], + [ + "▁vented", + -13.800124168395996 + ], + [ + "▁Plaintiff", + -13.800145149230955 + ], + [ + "▁groves", + -13.800178527832031 + ], + [ + "▁thrived", + -13.80019474029541 + ], + [ + "▁Scouting", + -13.800213813781738 + ], + [ + "▁mala", + -13.800214767456056 + ], + [ + "▁Finest", + -13.800249099731444 + ], + [ + "▁Bea", + -13.800289154052734 + ], + [ + "▁taster", + -13.800301551818848 + ], + [ + "▁Treats", + -13.800323486328123 + ], + [ + "▁Wound", + -13.800337791442873 + ], + [ + "▁Residences", + -13.800375938415527 + ], + [ + "▁800,000", + -13.800533294677734 + ], + [ + "▁Calcutta", + -13.80066967010498 + ], + [ + "▁intermediaries", + -13.80066967010498 + ], + [ + "▁unknowingly", + -13.80066967010498 + ], + [ + "▁Kellogg", + -13.800670623779297 + ], + [ + "▁Saigon", + -13.80068588256836 + ], + [ + "▁dagger", + -13.80069065093994 + ], + [ + "FORM", + -13.800691604614258 + ], + [ + "mbo", + -13.800698280334473 + ], + [ + "-96", + -13.800721168518066 + ], + [ + "▁montage", + -13.800776481628418 + ], + [ + "▁Transplant", + -13.800799369812012 + ], + [ + "902", + -13.800814628601074 + ], + [ + "▁Lenny", + -13.80081558227539 + ], + [ + "688", + -13.800841331481934 + ], + [ + "▁vetting", + -13.800850868225098 + ], + [ + "▁Antiques", + -13.800864219665527 + ], + [ + "idy", + -13.800875663757324 + ], + [ + "▁1875", + -13.800893783569336 + ], + [ + "577", + -13.800902366638184 + ], + [ + "turning", + -13.800909996032717 + ], + [ + "▁Coyote", + -13.800912857055664 + ], + [ + "▁Whitman", + -13.800914764404297 + ], + [ + "complex", + -13.800923347473145 + ], + [ + "▁Dolce", + -13.800943374633787 + ], + [ + "▁minecraft", + -13.800952911376951 + ], + [ + "▁$1.8", + -13.800955772399902 + ], + [ + "▁siren", + -13.800965309143066 + ], + [ + "pets", + -13.80097770690918 + ], + [ + "▁differentiating", + -13.80100440979004 + ], + [ + "▁glacial", + -13.80102825164795 + ], + [ + "▁loopholes", + -13.80102825164795 + ], + [ + "ugg", + -13.801076889038086 + ], + [ + "▁228", + -13.801079750061035 + ], + [ + "ssy", + -13.801095008850098 + ], + [ + "▁allergen", + -13.80109691619873 + ], + [ + "este", + -13.801117897033691 + ], + [ + "▁Bookstore", + -13.801153182983398 + ], + [ + "▁recline", + -13.801170349121094 + ], + [ + "▁sportswear", + -13.801183700561523 + ], + [ + "▁Dough", + -13.801226615905762 + ], + [ + "392", + -13.801230430603027 + ], + [ + "▁Kabul", + -13.801249504089355 + ], + [ + "▁glut", + -13.801313400268556 + ], + [ + "▁COPD", + -13.801342010498049 + ], + [ + "771", + -13.801379203796388 + ], + [ + "▁layup", + -13.80140495300293 + ], + [ + "LIT", + -13.801480293273926 + ], + [ + "▁Smiles", + -13.801514625549316 + ], + [ + "▁Paintings", + -13.801518440246582 + ], + [ + "683", + -13.801542282104492 + ], + [ + "BEC", + -13.801586151123049 + ], + [ + "▁SMTP", + -13.801668167114258 + ], + [ + "▁Haul", + -13.801709175109863 + ], + [ + "Qui", + -13.80172634124756 + ], + [ + "rito", + -13.801798820495604 + ], + [ + "▁Zel", + -13.801824569702148 + ], + [ + "▁insecurities", + -13.80197525024414 + ], + [ + "▁XVI", + -13.802021026611328 + ], + [ + "▁ruffle", + -13.802069664001465 + ], + [ + "▁2019-20", + -13.802087783813477 + ], + [ + "ksha", + -13.802090644836426 + ], + [ + "▁OFFER", + -13.802132606506348 + ], + [ + "mbi", + -13.802199363708496 + ], + [ + "▁ocular", + -13.80223274230957 + ], + [ + "Materials", + -13.80228328704834 + ], + [ + "azine", + -13.802284240722656 + ], + [ + "ие", + -13.802292823791504 + ], + [ + "rough", + -13.802303314208984 + ], + [ + "▁backstory", + -13.802303314208984 + ], + [ + "$4", + -13.802308082580566 + ], + [ + "▁Bey", + -13.802370071411133 + ], + [ + "BUT", + -13.802397727966309 + ], + [ + "▁cipher", + -13.802423477172852 + ], + [ + "▁Establish", + -13.802530288696287 + ], + [ + "ESA", + -13.802592277526855 + ], + [ + "▁Themed", + -13.802610397338867 + ], + [ + "▁gable", + -13.802639961242676 + ], + [ + "RCC", + -13.80270767211914 + ], + [ + "▁championed", + -13.8027982711792 + ], + [ + "phyl", + -13.802838325500488 + ], + [ + "▁Wander", + -13.802997589111328 + ], + [ + "▁Bumble", + -13.803062438964844 + ], + [ + "▁shallots", + -13.803109169006348 + ], + [ + "▁Reign", + -13.80325412750244 + ], + [ + "▁1965.", + -13.803263664245604 + ], + [ + "▁Ov", + -13.80337142944336 + ], + [ + "7-6", + -13.80339527130127 + ], + [ + "▁stripper", + -13.803415298461914 + ], + [ + "Plenty", + -13.80344295501709 + ], + [ + "▁VF", + -13.803444862365724 + ], + [ + "kn", + -13.803511619567873 + ], + [ + "lens", + -13.803550720214844 + ], + [ + "Sad", + -13.80359172821045 + ], + [ + "▁Towing", + -13.803689002990724 + ], + [ + "▁doula", + -13.80368995666504 + ], + [ + "NAME", + -13.803690910339355 + ], + [ + "urus", + -13.803691864013672 + ], + [ + "▁Decorate", + -13.803781509399414 + ], + [ + "▁succinct", + -13.803805351257324 + ], + [ + "▁rockers", + -13.80381202697754 + ], + [ + "▁pebbles", + -13.803840637207031 + ], + [ + "arr", + -13.8038969039917 + ], + [ + "▁Equine", + -13.803915977478027 + ], + [ + "949", + -13.803924560546877 + ], + [ + "▁(35", + -13.803924560546877 + ], + [ + "heri", + -13.803946495056152 + ], + [ + "▁Estimates", + -13.803972244262695 + ], + [ + "▁Caitlin", + -13.803994178771973 + ], + [ + "184", + -13.803997993469238 + ], + [ + "▁$44", + -13.804014205932615 + ], + [ + "▁docked", + -13.804046630859377 + ], + [ + "▁chamomile", + -13.804048538208008 + ], + [ + "▁chiffon", + -13.804048538208008 + ], + [ + "▁frivolous", + -13.804048538208008 + ], + [ + "▁hospitable", + -13.804048538208008 + ], + [ + "▁intervening", + -13.804048538208008 + ], + [ + "▁libertarian", + -13.804048538208008 + ], + [ + "▁opacity", + -13.804048538208008 + ], + [ + "▁protracted", + -13.804048538208008 + ], + [ + "▁Alonso", + -13.804049491882324 + ], + [ + "▁Demonstration", + -13.80405044555664 + ], + [ + "▁Triathlon", + -13.804059028625488 + ], + [ + "▁Versace", + -13.804069519042969 + ], + [ + "▁Bites", + -13.80411148071289 + ], + [ + "▁Eduardo", + -13.804118156433104 + ], + [ + "▁Pistol", + -13.804123878479004 + ], + [ + "▁gastro", + -13.804136276245115 + ], + [ + "pence", + -13.80413818359375 + ], + [ + "▁slimmer", + -13.804154396057127 + ], + [ + "▁gilt", + -13.804181098937988 + ], + [ + "▁Wilkes", + -13.80418300628662 + ], + [ + "▁tragically", + -13.804207801818848 + ], + [ + "▁oceanfront", + -13.80423355102539 + ], + [ + "▁Chou", + -13.804241180419922 + ], + [ + "▁vouch", + -13.804275512695312 + ], + [ + "weak", + -13.804287910461426 + ], + [ + "9.6", + -13.804290771484377 + ], + [ + "▁rv", + -13.804313659667969 + ], + [ + "RAL", + -13.804407119750977 + ], + [ + "Recruiter", + -13.80444049835205 + ], + [ + "▁splatter", + -13.804468154907228 + ], + [ + "chur", + -13.804518699645996 + ], + [ + "▁monarchy", + -13.804539680480955 + ], + [ + "▁BMX", + -13.804545402526855 + ], + [ + "▁roofers", + -13.804570198059082 + ], + [ + "Crusher", + -13.804582595825195 + ], + [ + "▁bona", + -13.804585456848145 + ], + [ + "▁Hays", + -13.804593086242676 + ], + [ + "▁truss", + -13.804604530334473 + ], + [ + "▁Wad", + -13.804607391357422 + ], + [ + "▁Haf", + -13.804675102233888 + ], + [ + "▁Slightly", + -13.80469036102295 + ], + [ + "5.3", + -13.804709434509276 + ], + [ + "washed", + -13.804803848266602 + ], + [ + "bolic", + -13.804804801940918 + ], + [ + "▁viz", + -13.804829597473145 + ], + [ + "▁SOA", + -13.804862022399902 + ], + [ + "▁Verse", + -13.804876327514648 + ], + [ + "▁Assessments", + -13.80500602722168 + ], + [ + "▁authenticated", + -13.805143356323242 + ], + [ + "Sources", + -13.805176734924316 + ], + [ + "▁Redding", + -13.805197715759276 + ], + [ + "▁JOIN", + -13.805213928222656 + ], + [ + "▁paintball", + -13.80526638031006 + ], + [ + "▁NAME", + -13.805269241333008 + ], + [ + "▁MCC", + -13.805275917053224 + ], + [ + "▁Pulp", + -13.805342674255373 + ], + [ + "ols", + -13.805362701416016 + ], + [ + "tress", + -13.805408477783203 + ], + [ + "▁overdraft", + -13.805447578430176 + ], + [ + "▁sidelined", + -13.80549144744873 + ], + [ + "▁decode", + -13.805511474609377 + ], + [ + "▁Pow", + -13.80553150177002 + ], + [ + "bodies", + -13.80554485321045 + ], + [ + "igu", + -13.805582046508787 + ], + [ + "▁Ensur", + -13.805642127990724 + ], + [ + "arat", + -13.805648803710938 + ], + [ + "LAB", + -13.805686950683594 + ], + [ + "▁AVG", + -13.80575466156006 + ], + [ + "▁bothers", + -13.805784225463867 + ], + [ + "▁UD", + -13.805790901184082 + ], + [ + "Diet", + -13.805809020996094 + ], + [ + "▁Pioneers", + -13.805813789367676 + ], + [ + "▁Implementing", + -13.805832862854004 + ], + [ + "▁Rana", + -13.805850982666016 + ], + [ + "▁complains", + -13.805875778198242 + ], + [ + "paul", + -13.805928230285645 + ], + [ + "▁Rollins", + -13.80593490600586 + ], + [ + "▁Gypsy", + -13.805940628051758 + ], + [ + "▁8.30", + -13.805947303771973 + ], + [ + "▁correlations", + -13.805978775024414 + ], + [ + "▁remnant", + -13.806045532226562 + ], + [ + "rba", + -13.806097030639648 + ], + [ + "▁******", + -13.80609893798828 + ], + [ + "tower", + -13.806121826171877 + ], + [ + "▁pasted", + -13.806129455566406 + ], + [ + "measure", + -13.806187629699709 + ], + [ + "▁Genre", + -13.806241989135742 + ], + [ + "lena", + -13.806257247924805 + ], + [ + "QS", + -13.806281089782717 + ], + [ + "▁sprite", + -13.806302070617676 + ], + [ + "▁wavelengths", + -13.806317329406738 + ], + [ + "Nation", + -13.80635929107666 + ], + [ + "scaping", + -13.80635929107666 + ], + [ + "▁crepe", + -13.806360244750977 + ], + [ + "▁Peppermint", + -13.806363105773926 + ], + [ + "▁Remain", + -13.806368827819824 + ], + [ + "SYS", + -13.806394577026367 + ], + [ + "▁dents", + -13.806485176086426 + ], + [ + "▁dpi", + -13.80651569366455 + ], + [ + "VID", + -13.806707382202148 + ], + [ + "▁Vert", + -13.80678939819336 + ], + [ + "▁meditations", + -13.806843757629396 + ], + [ + "hei", + -13.806854248046877 + ], + [ + "▁generational", + -13.80688190460205 + ], + [ + "MODE", + -13.806914329528809 + ], + [ + "▁GG", + -13.806991577148438 + ], + [ + "Fin", + -13.807000160217283 + ], + [ + "Arrange", + -13.807043075561523 + ], + [ + "Watt", + -13.807049751281738 + ], + [ + "afi", + -13.807199478149414 + ], + [ + "▁arbor", + -13.807206153869627 + ], + [ + "▁inertia", + -13.80722427368164 + ], + [ + "enthal", + -13.807337760925291 + ], + [ + "▁pong", + -13.80735206604004 + ], + [ + "▁microbiome", + -13.807437896728516 + ], + [ + "▁inundated", + -13.807438850402832 + ], + [ + "▁frigid", + -13.807449340820312 + ], + [ + "▁cobblestone", + -13.80746364593506 + ], + [ + "▁Consulate", + -13.807466506958008 + ], + [ + "▁Duterte", + -13.807469367980955 + ], + [ + "▁frantically", + -13.80747890472412 + ], + [ + "▁Denali", + -13.807482719421388 + ], + [ + "▁ethernet", + -13.80748462677002 + ], + [ + "iler", + -13.807485580444336 + ], + [ + "▁amazement", + -13.80749225616455 + ], + [ + "▁revocation", + -13.807523727416992 + ], + [ + "▁compulsive", + -13.807591438293455 + ], + [ + "▁wealthiest", + -13.807631492614746 + ], + [ + "▁Beyonce", + -13.80764102935791 + ], + [ + "▁Premiership", + -13.80767822265625 + ], + [ + "hler", + -13.807744979858398 + ], + [ + "▁Kristina", + -13.807756423950195 + ], + [ + "▁(2005).", + -13.807759284973145 + ], + [ + "▁breathability", + -13.80777645111084 + ], + [ + "unta", + -13.807794570922852 + ], + [ + "▁digitized", + -13.807809829711914 + ], + [ + "ndra", + -13.807846069335938 + ], + [ + "▁HCA", + -13.80787181854248 + ], + [ + "▁Anyways", + -13.807916641235352 + ], + [ + "Educate", + -13.807924270629885 + ], + [ + "▁Pinch", + -13.807926177978516 + ], + [ + "▁Reformation", + -13.807944297790527 + ], + [ + "kine", + -13.807947158813477 + ], + [ + "▁nomadic", + -13.807950973510742 + ], + [ + "▁ascended", + -13.808059692382812 + ], + [ + "colo", + -13.808117866516112 + ], + [ + "rra", + -13.808170318603516 + ], + [ + "▁Expressway", + -13.808274269104004 + ], + [ + "▁homeopathic", + -13.80827522277832 + ], + [ + "flying", + -13.808320045471191 + ], + [ + "▁decoder", + -13.808428764343262 + ], + [ + "▁Scooter", + -13.80850601196289 + ], + [ + "aemia", + -13.808510780334473 + ], + [ + "▁$20.", + -13.808547019958496 + ], + [ + "▁GEM", + -13.808676719665527 + ], + [ + "883", + -13.808688163757324 + ], + [ + "Loc", + -13.808728218078612 + ], + [ + "▁Thermostat", + -13.808773040771484 + ], + [ + "Pray", + -13.808805465698242 + ], + [ + "▁HCG", + -13.808805465698242 + ], + [ + "▁Raphael", + -13.80888843536377 + ], + [ + "▁TOUR", + -13.808978080749512 + ], + [ + "▁Hr", + -13.80899429321289 + ], + [ + "▁Ain", + -13.809049606323242 + ], + [ + "▁CRS", + -13.809114456176758 + ], + [ + "▁surpassing", + -13.809125900268556 + ], + [ + "DAC", + -13.809163093566896 + ], + [ + "fici", + -13.809165000915527 + ], + [ + "ivism", + -13.80919361114502 + ], + [ + "months", + -13.809271812438965 + ], + [ + "▁Willem", + -13.809300422668455 + ], + [ + "469", + -13.809380531311035 + ], + [ + "tered", + -13.809419631958008 + ], + [ + "▁Paralympic", + -13.80946159362793 + ], + [ + "Allen", + -13.809529304504396 + ], + [ + "▁Astra", + -13.809572219848633 + ], + [ + "▁Allies", + -13.809584617614746 + ], + [ + "▁puddle", + -13.809621810913086 + ], + [ + "▁bran", + -13.809625625610352 + ], + [ + "uya", + -13.809711456298828 + ], + [ + "▁doughnuts", + -13.809712409973145 + ], + [ + "▁sequin", + -13.809727668762209 + ], + [ + "▁prank", + -13.809869766235352 + ], + [ + "▁SUR", + -13.80988883972168 + ], + [ + "cyl", + -13.809927940368652 + ], + [ + "▁Ives", + -13.809954643249512 + ], + [ + "▁Oilers", + -13.809969902038574 + ], + [ + "Java", + -13.809986114501951 + ], + [ + "zl", + -13.810044288635254 + ], + [ + "▁Petition", + -13.81008243560791 + ], + [ + "bella", + -13.810089111328123 + ], + [ + "▁materialize", + -13.810176849365234 + ], + [ + "fluor", + -13.810200691223145 + ], + [ + "YT", + -13.810251235961914 + ], + [ + "instrumentalist", + -13.810253143310549 + ], + [ + "Cop", + -13.810273170471191 + ], + [ + "▁Haw", + -13.810373306274414 + ], + [ + "▁920", + -13.810380935668944 + ], + [ + "Background", + -13.810391426086426 + ], + [ + "Brilliant", + -13.810401916503906 + ], + [ + "Crazy", + -13.810401916503906 + ], + [ + "▁Clamp", + -13.810416221618652 + ], + [ + "beauty", + -13.810425758361816 + ], + [ + "lovers", + -13.81042766571045 + ], + [ + "Balance", + -13.810450553894045 + ], + [ + "▁finishers", + -13.810468673706056 + ], + [ + "▁capitalized", + -13.81047248840332 + ], + [ + "▁Aman", + -13.810480117797852 + ], + [ + "▁dit", + -13.810571670532228 + ], + [ + "▁INTERNATIONAL", + -13.81061553955078 + ], + [ + "ief", + -13.810670852661133 + ], + [ + "▁LET", + -13.810674667358398 + ], + [ + "▁Carn", + -13.810699462890623 + ], + [ + "299", + -13.81080150604248 + ], + [ + "exist", + -13.810826301574709 + ], + [ + "▁Burmese", + -13.810839653015137 + ], + [ + "▁Inflatable", + -13.810839653015137 + ], + [ + "▁caricature", + -13.810839653015137 + ], + [ + "▁harrowing", + -13.810839653015137 + ], + [ + "▁sociable", + -13.81084156036377 + ], + [ + "Cause", + -13.810846328735352 + ], + [ + "▁fuselage", + -13.810851097106934 + ], + [ + "▁Loyalty", + -13.81085205078125 + ], + [ + "▁Trident", + -13.810895919799805 + ], + [ + "cats", + -13.810908317565918 + ], + [ + "▁unprotected", + -13.810941696166992 + ], + [ + "▁grunt", + -13.810942649841309 + ], + [ + "▁Provost", + -13.810988426208496 + ], + [ + "CER", + -13.811017990112305 + ], + [ + "▁Tir", + -13.811063766479492 + ], + [ + "atsu", + -13.811081886291504 + ], + [ + "▁orthodox", + -13.811117172241213 + ], + [ + "▁Imagination", + -13.811139106750488 + ], + [ + "▁Defendant", + -13.811165809631348 + ], + [ + "▁Journalists", + -13.811217308044434 + ], + [ + "▁SOUTH", + -13.811244010925291 + ], + [ + "▁Binding", + -13.811294555664062 + ], + [ + "atten", + -13.81134033203125 + ], + [ + "▁rashes", + -13.811363220214844 + ], + [ + "VISION", + -13.81138515472412 + ], + [ + "▁journeyman", + -13.81141185760498 + ], + [ + "▁Shandong", + -13.81154727935791 + ], + [ + "▁UAV", + -13.81157684326172 + ], + [ + "▁Corporations", + -13.8115816116333 + ], + [ + "▁shopped", + -13.811622619628906 + ], + [ + "agency", + -13.811654090881348 + ], + [ + "onian", + -13.811671257019045 + ], + [ + "Michel", + -13.81169891357422 + ], + [ + "▁USED", + -13.811717987060549 + ], + [ + "▁scraped", + -13.81177806854248 + ], + [ + "erating", + -13.811837196350098 + ], + [ + "711", + -13.811849594116213 + ], + [ + "000+", + -13.811859130859377 + ], + [ + "507", + -13.811872482299805 + ], + [ + "▁Brie", + -13.811901092529297 + ], + [ + "▁Westport", + -13.81198501586914 + ], + [ + "▁Drift", + -13.812049865722656 + ], + [ + "▁Nm", + -13.812127113342283 + ], + [ + "478", + -13.812145233154297 + ], + [ + "▁clustering", + -13.812174797058104 + ], + [ + "▁sorely", + -13.812252044677734 + ], + [ + "▁momentary", + -13.812294960021973 + ], + [ + "▁Changed", + -13.812307357788086 + ], + [ + "sama", + -13.812335014343262 + ], + [ + "▁321", + -13.812358856201172 + ], + [ + "▁2400", + -13.812422752380373 + ], + [ + "▁pretzels", + -13.812424659729004 + ], + [ + "wad", + -13.8124418258667 + ], + [ + "justice", + -13.81248378753662 + ], + [ + "▁PCS", + -13.812633514404297 + ], + [ + "▁Madeira", + -13.81272315979004 + ], + [ + "▁stow", + -13.81277847290039 + ], + [ + "▁Tutors", + -13.812808990478516 + ], + [ + "hren", + -13.81287956237793 + ], + [ + "▁Napoli", + -13.81287956237793 + ], + [ + "VOL", + -13.812911033630373 + ], + [ + "assembly", + -13.813084602355955 + ], + [ + "▁blowout", + -13.813102722167969 + ], + [ + "▁sewers", + -13.81318473815918 + ], + [ + "ACA", + -13.813261032104492 + ], + [ + "▁Particle", + -13.8133544921875 + ], + [ + "638", + -13.81337070465088 + ], + [ + "▁Honour", + -13.813373565673828 + ], + [ + "▁disrespectful", + -13.813380241394045 + ], + [ + "rome", + -13.81339168548584 + ], + [ + "▁2002)", + -13.813444137573242 + ], + [ + "Latin", + -13.813506126403809 + ], + [ + "▁Launched", + -13.813519477844238 + ], + [ + "▁bounces", + -13.813547134399414 + ], + [ + "Split", + -13.81354808807373 + ], + [ + "Changes", + -13.813568115234377 + ], + [ + "modo", + -13.813570022583008 + ], + [ + "▁potholes", + -13.813570022583008 + ], + [ + "crypt", + -13.813623428344728 + ], + [ + "▁dashed", + -13.813632011413574 + ], + [ + "▁codec", + -13.813660621643066 + ], + [ + "▁Gaz", + -13.813682556152344 + ], + [ + "▁VH", + -13.813693046569824 + ], + [ + "▁Kah", + -13.813711166381836 + ], + [ + "BOOK", + -13.813712120056152 + ], + [ + "153", + -13.813740730285645 + ], + [ + "▁GEO", + -13.813812255859377 + ], + [ + "bana", + -13.81381893157959 + ], + [ + "icate", + -13.813858032226562 + ], + [ + "▁banish", + -13.813886642456056 + ], + [ + "Operating", + -13.813916206359863 + ], + [ + "▁KDE", + -13.81391716003418 + ], + [ + "Manchester", + -13.813928604125977 + ], + [ + "ани", + -13.813983917236328 + ], + [ + "▁lumens", + -13.813983917236328 + ], + [ + "▁flops", + -13.814035415649414 + ], + [ + "rene", + -13.814095497131348 + ], + [ + "oux", + -13.814129829406738 + ], + [ + "8/", + -13.814183235168455 + ], + [ + "▁Rage", + -13.814191818237305 + ], + [ + "▁mishaps", + -13.814201354980469 + ], + [ + "▁GIFT", + -13.814207077026367 + ], + [ + "▁Kelvin", + -13.81423282623291 + ], + [ + "▁IND", + -13.81424617767334 + ], + [ + "▁Daimler", + -13.814251899719238 + ], + [ + "▁Nouveau", + -13.814252853393556 + ], + [ + "▁arugula", + -13.814252853393556 + ], + [ + "▁disobedience", + -13.814252853393556 + ], + [ + "▁singapore", + -13.814252853393556 + ], + [ + "▁Affinity", + -13.814254760742188 + ], + [ + "▁bronchitis", + -13.814254760742188 + ], + [ + "▁quarrel", + -13.814260482788086 + ], + [ + "▁Sedona", + -13.814263343811035 + ], + [ + "▁Adolescent", + -13.8142671585083 + ], + [ + "Fruit", + -13.814269065856934 + ], + [ + "▁jagged", + -13.814270973205566 + ], + [ + "▁brisket", + -13.814301490783691 + ], + [ + "▁modality", + -13.814309120178224 + ], + [ + "▁decedent", + -13.814311027526855 + ], + [ + "▁Cambridgeshire", + -13.814312934875488 + ], + [ + "▁Peas", + -13.814312934875488 + ], + [ + "▁senate", + -13.814326286315918 + ], + [ + "▁Bloomfield", + -13.814346313476562 + ], + [ + "▁Legs", + -13.814353942871094 + ], + [ + "Helen", + -13.814356803894045 + ], + [ + "▁Akin", + -13.81436824798584 + ], + [ + "bottle", + -13.81443977355957 + ], + [ + "▁fairway", + -13.814447402954102 + ], + [ + "▁Chrys", + -13.81446361541748 + ], + [ + "▁Shelly", + -13.814465522766112 + ], + [ + "packing", + -13.814473152160645 + ], + [ + "▁airbrush", + -13.814542770385742 + ], + [ + "▁CCD", + -13.814545631408691 + ], + [ + "lur", + -13.81456184387207 + ], + [ + "gno", + -13.814619064331056 + ], + [ + "▁leach", + -13.81466579437256 + ], + [ + "!!)", + -13.814695358276367 + ], + [ + "▁1970,", + -13.814729690551758 + ], + [ + "919", + -13.81479835510254 + ], + [ + "494", + -13.814855575561523 + ], + [ + "▁tangle", + -13.814864158630373 + ], + [ + "▁yu", + -13.814901351928713 + ], + [ + "539", + -13.81497573852539 + ], + [ + "kana", + -13.81504249572754 + ], + [ + "▁rescheduled", + -13.815078735351562 + ], + [ + "▁patching", + -13.815130233764648 + ], + [ + "▁Wellbeing", + -13.815176963806152 + ], + [ + "▁Gia", + -13.815228462219238 + ], + [ + "▁BANK", + -13.815253257751465 + ], + [ + "ovsky", + -13.81525421142578 + ], + [ + "haz", + -13.815299987792969 + ], + [ + "▁capitalise", + -13.815316200256348 + ], + [ + "▁hexagon", + -13.815360069274902 + ], + [ + "0/", + -13.815418243408203 + ], + [ + "rsquo", + -13.815423011779783 + ], + [ + "▁Jamal", + -13.815442085266112 + ], + [ + "▁Contributions", + -13.81548309326172 + ], + [ + "COP", + -13.815555572509766 + ], + [ + "▁Choi", + -13.81557273864746 + ], + [ + "▁marches", + -13.815574645996094 + ], + [ + "▁Overhead", + -13.8156099319458 + ], + [ + "minating", + -13.8156156539917 + ], + [ + "▁Honours", + -13.81567096710205 + ], + [ + "DOC", + -13.815751075744627 + ], + [ + "enz", + -13.815752983093262 + ], + [ + "916", + -13.815811157226562 + ], + [ + "▁grasping", + -13.815817832946776 + ], + [ + "▁cleric", + -13.815919876098633 + ], + [ + "▁Outsourcing", + -13.815922737121582 + ], + [ + "DOR", + -13.815958023071287 + ], + [ + "rrington", + -13.81600570678711 + ], + [ + "replace", + -13.816035270690918 + ], + [ + "24)", + -13.816088676452637 + ], + [ + "▁CPAP", + -13.816088676452637 + ], + [ + "▁Reformed", + -13.816097259521484 + ], + [ + "708", + -13.816283226013184 + ], + [ + "▁1970’", + -13.816298484802246 + ], + [ + "▁Editions", + -13.816354751586914 + ], + [ + "ULA", + -13.816407203674316 + ], + [ + "▁Eastwood", + -13.816511154174805 + ], + [ + "rden", + -13.816575050354004 + ], + [ + "▁Kunst", + -13.816733360290527 + ], + [ + "▁Beyoncé", + -13.816853523254396 + ], + [ + "Islam", + -13.816875457763672 + ], + [ + "▁Photographers", + -13.816949844360352 + ], + [ + "Hydro", + -13.817026138305664 + ], + [ + "▁Replacing", + -13.81710433959961 + ], + [ + "▁Coding", + -13.817140579223633 + ], + [ + "▁Addis", + -13.817270278930664 + ], + [ + "▁Archi", + -13.817270278930664 + ], + [ + "▁Symantec", + -13.817280769348145 + ], + [ + "resistance", + -13.817428588867188 + ], + [ + "▁Presently", + -13.817453384399414 + ], + [ + "▁Copa", + -13.817516326904297 + ], + [ + "▁Glas", + -13.817543029785156 + ], + [ + "vre", + -13.817569732666016 + ], + [ + "▁Cli", + -13.817608833312988 + ], + [ + "▁boulevard", + -13.81767749786377 + ], + [ + "▁cornstarch", + -13.81767749786377 + ], + [ + "▁definately", + -13.81767749786377 + ], + [ + "▁irritable", + -13.81767749786377 + ], + [ + "▁Nazareth", + -13.817681312561035 + ], + [ + "Ana", + -13.817694664001465 + ], + [ + "▁maroon", + -13.81769847869873 + ], + [ + "cpp", + -13.81770133972168 + ], + [ + "▁Perception", + -13.817703247070312 + ], + [ + "▁Veggie", + -13.817719459533691 + ], + [ + "Hiring", + -13.817757606506348 + ], + [ + "▁RMB", + -13.817793846130373 + ], + [ + "▁underprivileged", + -13.817797660827637 + ], + [ + "LATE", + -13.81780242919922 + ], + [ + "▁Garner", + -13.817838668823242 + ], + [ + "▁purported", + -13.817853927612305 + ], + [ + "▁substandard", + -13.817855834960938 + ], + [ + "▁Sou", + -13.817980766296388 + ], + [ + "▁remixes", + -13.818060874938965 + ], + [ + "▁Acre", + -13.818102836608888 + ], + [ + "Finder", + -13.818109512329102 + ], + [ + "edo", + -13.818124771118164 + ], + [ + "Sho", + -13.818254470825195 + ], + [ + "▁vacated", + -13.818310737609863 + ], + [ + "ecki", + -13.818325996398926 + ], + [ + "boats", + -13.818360328674316 + ], + [ + "▁IIS", + -13.81838035583496 + ], + [ + "iyah", + -13.818397521972656 + ], + [ + "▁skateboarding", + -13.81847095489502 + ], + [ + "cialis", + -13.81851863861084 + ], + [ + "zawa", + -13.81851863861084 + ], + [ + "▁Drums", + -13.81857204437256 + ], + [ + "genetic", + -13.81860065460205 + ], + [ + "▁Nel", + -13.81861972808838 + ], + [ + "▁BAL", + -13.818650245666504 + ], + [ + "▁Sphere", + -13.818655014038086 + ], + [ + "eres", + -13.818666458129885 + ], + [ + "▁Gim", + -13.818758010864258 + ], + [ + "▁Bleach", + -13.818764686584473 + ], + [ + "▁inquiring", + -13.81877613067627 + ], + [ + "▁407", + -13.8187837600708 + ], + [ + "▁beau", + -13.81879711151123 + ], + [ + "▁Sati", + -13.81884765625 + ], + [ + "UNI", + -13.81887435913086 + ], + [ + "▁mustache", + -13.818984985351562 + ], + [ + "▁Mentoring", + -13.818997383117676 + ], + [ + "▁Korn", + -13.819034576416016 + ], + [ + "▁Flor", + -13.81904125213623 + ], + [ + "▁$31", + -13.819064140319824 + ], + [ + "Ep", + -13.819099426269531 + ], + [ + "▁gusts", + -13.819159507751465 + ], + [ + "▁lobbyists", + -13.81917667388916 + ], + [ + "erer", + -13.819199562072754 + ], + [ + "▁0.04", + -13.819221496582031 + ], + [ + "437", + -13.819252014160156 + ], + [ + "▁clogs", + -13.819331169128418 + ], + [ + "Bas", + -13.819465637207031 + ], + [ + "▁relieves", + -13.819467544555664 + ], + [ + "▁Cigarette", + -13.819473266601562 + ], + [ + "UNE", + -13.819488525390623 + ], + [ + "▁Julien", + -13.81949234008789 + ], + [ + "▁dermal", + -13.819503784179688 + ], + [ + "▁tolerances", + -13.819528579711914 + ], + [ + "demo", + -13.819572448730469 + ], + [ + "Terra", + -13.819576263427734 + ], + [ + "▁retake", + -13.819589614868164 + ], + [ + "fford", + -13.819659233093262 + ], + [ + "▁QT", + -13.81980037689209 + ], + [ + "▁Camo", + -13.81984806060791 + ], + [ + "▁proponent", + -13.819857597351074 + ], + [ + "452", + -13.820106506347656 + ], + [ + "▁Wondering", + -13.820125579833984 + ], + [ + "▁$25.", + -13.82020378112793 + ], + [ + "▁cashews", + -13.82022190093994 + ], + [ + "▁alarmed", + -13.820231437683104 + ], + [ + "▁classifier", + -13.820239067077637 + ], + [ + "mey", + -13.820359230041504 + ], + [ + "▁Tint", + -13.820383071899414 + ], + [ + "▁Samui", + -13.820420265197754 + ], + [ + "▁nip", + -13.820509910583496 + ], + [ + "▁infiltrate", + -13.820511817932127 + ], + [ + "▁collaborates", + -13.82054328918457 + ], + [ + "itating", + -13.82065486907959 + ], + [ + "oline", + -13.820663452148438 + ], + [ + "▁Swa", + -13.820701599121094 + ], + [ + "▁355", + -13.82071304321289 + ], + [ + "3-7", + -13.820775032043455 + ], + [ + "▁36\"", + -13.82077980041504 + ], + [ + "Regional", + -13.820874214172363 + ], + [ + "ometry", + -13.820937156677246 + ], + [ + "Knight", + -13.82094383239746 + ], + [ + "▁Deacon", + -13.82095718383789 + ], + [ + "▁gasp", + -13.8211030960083 + ], + [ + "▁chromium", + -13.82111358642578 + ], + [ + "▁dexterity", + -13.82111358642578 + ], + [ + "▁estimator", + -13.82111358642578 + ], + [ + "▁infringing", + -13.82111358642578 + ], + [ + "▁monetization", + -13.82111358642578 + ], + [ + "▁Archaeological", + -13.821114540100098 + ], + [ + "▁Equifax", + -13.82111644744873 + ], + [ + "documented", + -13.821117401123049 + ], + [ + "▁Neurology", + -13.821125984191896 + ], + [ + "▁2010)", + -13.82112979888916 + ], + [ + "▁Boundary", + -13.82114028930664 + ], + [ + "▁Damien", + -13.821141242980955 + ], + [ + "▁McKay", + -13.821151733398438 + ], + [ + "▁Popcorn", + -13.821151733398438 + ], + [ + "▁Inverness", + -13.821161270141602 + ], + [ + "▁Peng", + -13.82116985321045 + ], + [ + "▁rosy", + -13.821170806884766 + ], + [ + "▁ovation", + -13.821174621582031 + ], + [ + "▁mooring", + -13.82117748260498 + ], + [ + "▁monolithic", + -13.821202278137209 + ], + [ + "▁audiovisual", + -13.821215629577637 + ], + [ + "▁Angelina", + -13.821233749389648 + ], + [ + "pom", + -13.82123851776123 + ], + [ + "▁regretted", + -13.821249008178713 + ], + [ + "Um", + -13.82126522064209 + ], + [ + "▁Minimal", + -13.821290969848633 + ], + [ + "▁interconnection", + -13.821308135986328 + ], + [ + "▁meteorite", + -13.821337699890137 + ], + [ + "▁mushy", + -13.821361541748049 + ], + [ + "▁Cradle", + -13.82138442993164 + ], + [ + "wie", + -13.821406364440918 + ], + [ + "▁theorem", + -13.821407318115234 + ], + [ + "▁alma", + -13.821413040161133 + ], + [ + "▁Eighth", + -13.821414947509766 + ], + [ + "▁civilisation", + -13.82146453857422 + ], + [ + "▁Conveniently", + -13.821507453918455 + ], + [ + "▁Milestone", + -13.821528434753418 + ], + [ + "▁CAA", + -13.82153034210205 + ], + [ + "▁Civilization", + -13.821540832519531 + ], + [ + "pea", + -13.82155418395996 + ], + [ + "▁paver", + -13.821568489074709 + ], + [ + "▁Aquarius", + -13.82158088684082 + ], + [ + "▁Fior", + -13.821602821350098 + ], + [ + "▁evaporated", + -13.821640014648438 + ], + [ + "ooo", + -13.821678161621094 + ], + [ + "▁multifamily", + -13.821681022644045 + ], + [ + "▁Signing", + -13.821786880493164 + ], + [ + "▁Bookcase", + -13.82186508178711 + ], + [ + "Alice", + -13.821907043457031 + ], + [ + "rse", + -13.821945190429688 + ], + [ + "loft", + -13.821955680847168 + ], + [ + "▁recorders", + -13.8219575881958 + ], + [ + "(7)", + -13.8219633102417 + ], + [ + "▁griddle", + -13.821969032287598 + ], + [ + "Cab", + -13.821999549865724 + ], + [ + "▁yellowish", + -13.822017669677734 + ], + [ + "▁CHF", + -13.822068214416504 + ], + [ + "▁totality", + -13.82206916809082 + ], + [ + "▁GATE", + -13.822114944458008 + ], + [ + "▁bowed", + -13.822178840637209 + ], + [ + "▁Spatial", + -13.822195053100586 + ], + [ + "▁Cosmos", + -13.822203636169434 + ], + [ + "akka", + -13.82227611541748 + ], + [ + "zag", + -13.822279930114746 + ], + [ + "▁Sedan", + -13.822280883789062 + ], + [ + "▁MOV", + -13.82229995727539 + ], + [ + "▁PRINT", + -13.82233715057373 + ], + [ + "affe", + -13.822340965270996 + ], + [ + "rear", + -13.82236671447754 + ], + [ + "▁bowlers", + -13.82236671447754 + ], + [ + "▁PARA", + -13.822405815124512 + ], + [ + "▁entre", + -13.822432518005373 + ], + [ + "1;", + -13.822492599487305 + ], + [ + "▁Dolls", + -13.82249355316162 + ], + [ + "▁LOCATION", + -13.822528839111328 + ], + [ + "▁Bono", + -13.82256031036377 + ], + [ + "cking", + -13.822571754455566 + ], + [ + "▁timestamp", + -13.822587966918944 + ], + [ + "▁reopening", + -13.822590827941896 + ], + [ + "▁revisited", + -13.822649002075195 + ], + [ + "-1990", + -13.822673797607422 + ], + [ + "009", + -13.822673797607422 + ], + [ + "633", + -13.822696685791016 + ], + [ + "▁Phen", + -13.822720527648926 + ], + [ + "dinner", + -13.82277011871338 + ], + [ + "▁Ironman", + -13.822783470153809 + ], + [ + "▁intel", + -13.82278823852539 + ], + [ + "▁Blessings", + -13.82280731201172 + ], + [ + "mada", + -13.822893142700195 + ], + [ + "568", + -13.822895050048828 + ], + [ + "Examples", + -13.822979927062988 + ], + [ + "▁mixtures", + -13.82309341430664 + ], + [ + "Plans", + -13.823094367980955 + ], + [ + "▁Rees", + -13.823115348815918 + ], + [ + "1001", + -13.823163032531738 + ], + [ + "▁hardening", + -13.823209762573242 + ], + [ + "▁pamphlet", + -13.823335647583008 + ], + [ + "▁Faye", + -13.82335376739502 + ], + [ + "▁underscore", + -13.823382377624512 + ], + [ + "▁Hoi", + -13.823404312133787 + ], + [ + "AMS", + -13.823436737060549 + ], + [ + "▁spiked", + -13.823436737060549 + ], + [ + "▁adventurer", + -13.823488235473633 + ], + [ + "▁Aidan", + -13.823610305786133 + ], + [ + "▁Sherri", + -13.823617935180664 + ], + [ + "▁Sensitive", + -13.823637008666992 + ], + [ + "Done", + -13.823647499084473 + ], + [ + "▁rabbi", + -13.823676109313965 + ], + [ + "▁560", + -13.823712348937988 + ], + [ + "▁Handicap", + -13.823726654052734 + ], + [ + "REM", + -13.823832511901855 + ], + [ + "▁Terrier", + -13.823833465576172 + ], + [ + "▁2003)", + -13.823843955993652 + ], + [ + "▁cartoonist", + -13.823872566223145 + ], + [ + "▁WT", + -13.823897361755373 + ], + [ + "doctor", + -13.823919296264648 + ], + [ + "▁fanatics", + -13.823935508728027 + ], + [ + "▁diameters", + -13.823970794677734 + ], + [ + "▁Consent", + -13.82404327392578 + ], + [ + "▁acrobatic", + -13.82404613494873 + ], + [ + "▁Wynn", + -13.824151039123535 + ], + [ + "▁Galileo", + -13.824163436889648 + ], + [ + "▁Dubbed", + -13.824178695678713 + ], + [ + "▁Squash", + -13.82421588897705 + ], + [ + "aglia", + -13.824235916137695 + ], + [ + "▁Siberia", + -13.82430648803711 + ], + [ + "▁#12", + -13.824326515197754 + ], + [ + "ocytes", + -13.824334144592283 + ], + [ + "▁Bois", + -13.824342727661133 + ], + [ + "Elsewhere", + -13.824419021606444 + ], + [ + "cited", + -13.824444770812988 + ], + [ + "803", + -13.824445724487305 + ], + [ + "Stunning", + -13.824466705322266 + ], + [ + "enhancing", + -13.824468612670898 + ], + [ + "IRA", + -13.824482917785645 + ], + [ + "Ven", + -13.824496269226074 + ], + [ + "▁slay", + -13.824535369873049 + ], + [ + "▁interestingly", + -13.824542045593262 + ], + [ + "▁Heidelberg", + -13.824562072753906 + ], + [ + "▁Tenerife", + -13.824562072753906 + ], + [ + "▁grueling", + -13.824562072753906 + ], + [ + "▁sedative", + -13.824563026428224 + ], + [ + "▁Neighbourhood", + -13.824565887451172 + ], + [ + "▁Respiratory", + -13.824565887451172 + ], + [ + "▁PROGRAM", + -13.824566841125488 + ], + [ + "▁veritable", + -13.82456874847412 + ], + [ + "▁THEN", + -13.824572563171388 + ], + [ + "▁Cricut", + -13.824580192565918 + ], + [ + "▁chives", + -13.824584007263184 + ], + [ + "Eleven", + -13.824586868286133 + ], + [ + "▁AdSense", + -13.824588775634766 + ], + [ + "▁subcommittee", + -13.824589729309082 + ], + [ + "▁unmet", + -13.824590682983398 + ], + [ + "harvest", + -13.82459545135498 + ], + [ + "▁Telstra", + -13.82460594177246 + ], + [ + "▁vandal", + -13.82461643218994 + ], + [ + "▁unresponsive", + -13.82462215423584 + ], + [ + "▁outerwear", + -13.824661254882812 + ], + [ + "maniac", + -13.824665069580078 + ], + [ + "394", + -13.824667930603027 + ], + [ + "WORK", + -13.824668884277344 + ], + [ + "▁tartar", + -13.82466983795166 + ], + [ + "imposed", + -13.824673652648926 + ], + [ + "Bryan", + -13.824681282043455 + ], + [ + "▁Helpful", + -13.82469367980957 + ], + [ + "BIA", + -13.824758529663086 + ], + [ + "▁marshes", + -13.824779510498049 + ], + [ + "▁volley", + -13.824779510498049 + ], + [ + "▁Stri", + -13.824798583984377 + ], + [ + "▁UH", + -13.824798583984377 + ], + [ + "▁VLAN", + -13.824813842773438 + ], + [ + "Delta", + -13.824841499328612 + ], + [ + "▁Zuma", + -13.824902534484863 + ], + [ + "▁PAGE", + -13.825030326843262 + ], + [ + "▁Kindly", + -13.82507038116455 + ], + [ + "▁unwise", + -13.825075149536133 + ], + [ + "saurus", + -13.825108528137209 + ], + [ + "▁PwC", + -13.825196266174316 + ], + [ + "▁Nev", + -13.825270652770996 + ], + [ + "▁middleware", + -13.825321197509766 + ], + [ + "▁miniatures", + -13.825323104858398 + ], + [ + "tastic", + -13.825364112854004 + ], + [ + "▁standoff", + -13.825383186340332 + ], + [ + "▁Examine", + -13.825390815734863 + ], + [ + "▁waistline", + -13.825489044189451 + ], + [ + "▁proton", + -13.82550811767578 + ], + [ + "▁playtime", + -13.825554847717283 + ], + [ + "▁VISA", + -13.825636863708496 + ], + [ + "▁Bara", + -13.825726509094238 + ], + [ + "▁asserting", + -13.825782775878906 + ], + [ + "rolled", + -13.825798034667969 + ], + [ + "▁Terrain", + -13.825822830200195 + ], + [ + "▁$48", + -13.825864791870115 + ], + [ + "▁contradict", + -13.8258695602417 + ], + [ + "▁CLEAN", + -13.825934410095217 + ], + [ + "▁unwell", + -13.825940132141112 + ], + [ + "titu", + -13.825953483581545 + ], + [ + "▁struts", + -13.825960159301758 + ], + [ + "▁addon", + -13.826037406921388 + ], + [ + "от", + -13.826043128967283 + ], + [ + "Away", + -13.826067924499512 + ], + [ + "▁Freshman", + -13.82607364654541 + ], + [ + "▁Brod", + -13.826144218444824 + ], + [ + "oiled", + -13.826420783996582 + ], + [ + "▁Posting", + -13.826457023620604 + ], + [ + "stories", + -13.826473236083984 + ], + [ + "▁funerals", + -13.82653522491455 + ], + [ + "▁Commentary", + -13.826558113098145 + ], + [ + "▁duplicates", + -13.826570510864258 + ], + [ + "▁mer", + -13.826595306396484 + ], + [ + "XY", + -13.826610565185549 + ], + [ + "▁Boxer", + -13.82664680480957 + ], + [ + "BRI", + -13.826689720153809 + ], + [ + "▁10.30", + -13.826711654663086 + ], + [ + "642", + -13.826743125915527 + ], + [ + "handling", + -13.826812744140623 + ], + [ + "788", + -13.826845169067385 + ], + [ + "-63", + -13.826910018920898 + ], + [ + "internal", + -13.82706356048584 + ], + [ + "▁Pathway", + -13.827065467834473 + ], + [ + "▁figurines", + -13.827101707458496 + ], + [ + "lug", + -13.82716464996338 + ], + [ + "▁Recommend", + -13.827256202697754 + ], + [ + "▁bumping", + -13.827272415161133 + ], + [ + "mber", + -13.82728099822998 + ], + [ + "1991", + -13.82741355895996 + ], + [ + "▁wade", + -13.82742977142334 + ], + [ + "SAM", + -13.82750129699707 + ], + [ + "▁HAPPY", + -13.82761001586914 + ], + [ + "▁KN", + -13.827763557434082 + ], + [ + "▁Rau", + -13.827765464782717 + ], + [ + "izi", + -13.827873229980469 + ], + [ + "▁CAF", + -13.827889442443848 + ], + [ + "▁medi", + -13.827920913696287 + ], + [ + "rpm", + -13.827960968017578 + ], + [ + "criminal", + -13.827980041503906 + ], + [ + "agar", + -13.82798194885254 + ], + [ + "746", + -13.827999114990234 + ], + [ + "▁estranged", + -13.828022003173828 + ], + [ + "▁hygienist", + -13.828022003173828 + ], + [ + "▁selenium", + -13.828022003173828 + ], + [ + "▁sterilization", + -13.828022003173828 + ], + [ + "▁twelfth", + -13.828022003173828 + ], + [ + "▁dreary", + -13.828022956848145 + ], + [ + "▁methamphetamine", + -13.828022956848145 + ], + [ + "▁Rebellion", + -13.828025817871094 + ], + [ + "▁estuary", + -13.82802677154541 + ], + [ + "▁Balinese", + -13.828027725219728 + ], + [ + "▁Gingerbread", + -13.828027725219728 + ], + [ + "▁nascent", + -13.828028678894045 + ], + [ + "▁Domingo", + -13.82802963256836 + ], + [ + "▁aquaculture", + -13.828030586242676 + ], + [ + "▁FLOOR", + -13.828032493591309 + ], + [ + "▁Sunglasses", + -13.82803440093994 + ], + [ + "▁Hurley", + -13.828083038330078 + ], + [ + "▁Reggie", + -13.828094482421877 + ], + [ + "collectively", + -13.828102111816406 + ], + [ + "Jamie", + -13.828106880187988 + ], + [ + "tasting", + -13.828109741210938 + ], + [ + "▁Pedal", + -13.828225135803224 + ], + [ + "▁Presley", + -13.828235626220703 + ], + [ + "▁Sergei", + -13.828239440917969 + ], + [ + "▁Shut", + -13.828243255615234 + ], + [ + "817", + -13.828245162963867 + ], + [ + "▁Morales", + -13.828254699707031 + ], + [ + "497", + -13.828275680541992 + ], + [ + "▁newsroom", + -13.828328132629396 + ], + [ + "▁Goodbye", + -13.828359603881836 + ], + [ + "▁apprenticeships", + -13.828392028808594 + ], + [ + "▁campaigned", + -13.82843780517578 + ], + [ + "▁docket", + -13.828450202941896 + ], + [ + "▁fisher", + -13.828479766845703 + ], + [ + "▁Trem", + -13.82855224609375 + ], + [ + "▁Weld", + -13.82855987548828 + ], + [ + "wright", + -13.828654289245604 + ], + [ + "▁rayon", + -13.828657150268556 + ], + [ + "▁hutch", + -13.82866096496582 + ], + [ + "▁ADR", + -13.828682899475098 + ], + [ + "/0", + -13.82869815826416 + ], + [ + "KG", + -13.82874870300293 + ], + [ + "▁Sunflower", + -13.828755378723145 + ], + [ + "warming", + -13.828774452209473 + ], + [ + "▁Organisations", + -13.828868865966797 + ], + [ + "▁Burner", + -13.828911781311035 + ], + [ + "444", + -13.828961372375488 + ], + [ + "▁Gives", + -13.8291015625 + ], + [ + "lose", + -13.82911491394043 + ], + [ + "▁yay", + -13.829134941101074 + ], + [ + "JPY", + -13.829157829284668 + ], + [ + "▁surfboard", + -13.829168319702148 + ], + [ + "▁Transitional", + -13.829188346862791 + ], + [ + "▁Pag", + -13.829246520996094 + ], + [ + "tails", + -13.829254150390623 + ], + [ + "▁predictor", + -13.829312324523926 + ], + [ + "tresses", + -13.829315185546877 + ], + [ + "▁75-", + -13.82936668395996 + ], + [ + "argo", + -13.82948112487793 + ], + [ + "▁Lush", + -13.829565048217772 + ], + [ + "▁Merck", + -13.829569816589355 + ], + [ + "isle", + -13.829612731933594 + ], + [ + "flag", + -13.829614639282228 + ], + [ + "Seller", + -13.829645156860352 + ], + [ + "breed", + -13.82966423034668 + ], + [ + "▁Submitted", + -13.829731941223145 + ], + [ + "▁Hoe", + -13.829747200012209 + ], + [ + "▁caterers", + -13.829771041870115 + ], + [ + "UCK", + -13.829815864562988 + ], + [ + "LH", + -13.82983684539795 + ], + [ + "ocean", + -13.829885482788086 + ], + [ + "▁2015;", + -13.829903602600098 + ], + [ + "▁clumps", + -13.829928398132324 + ], + [ + "Baker", + -13.829948425292969 + ], + [ + "▁2200", + -13.82998275756836 + ], + [ + "eson", + -13.830077171325684 + ], + [ + "▁restorations", + -13.830111503601074 + ], + [ + "▁ulcer", + -13.830246925354004 + ], + [ + "▁maximizes", + -13.830280303955078 + ], + [ + "▁EEA", + -13.83030891418457 + ], + [ + "Jen", + -13.830350875854492 + ], + [ + "▁maneuvers", + -13.83044147491455 + ], + [ + "▁POW", + -13.830451965332031 + ], + [ + "▁Identifying", + -13.830472946166992 + ], + [ + "▁resolves", + -13.830472946166992 + ], + [ + "▁reformed", + -13.830485343933104 + ], + [ + "▁godly", + -13.830544471740724 + ], + [ + "▁handover", + -13.830558776855469 + ], + [ + "geni", + -13.830570220947266 + ], + [ + "▁Marlborough", + -13.830574035644531 + ], + [ + "Xi", + -13.830589294433594 + ], + [ + "▁Pullman", + -13.83059024810791 + ], + [ + "pants", + -13.830591201782228 + ], + [ + "709", + -13.830618858337402 + ], + [ + "▁obscured", + -13.830642700195312 + ], + [ + "▁projectile", + -13.830647468566896 + ], + [ + "ZI", + -13.830676078796388 + ], + [ + "▁Recommendation", + -13.830729484558104 + ], + [ + "▁MISS", + -13.830937385559082 + ], + [ + "▁skater", + -13.83098030090332 + ], + [ + "▁leeks", + -13.830988883972168 + ], + [ + "▁213", + -13.830989837646484 + ], + [ + "▁reverb", + -13.831000328063965 + ], + [ + "▁nutritionist", + -13.831012725830078 + ], + [ + "742", + -13.831018447875977 + ], + [ + "▁catalogues", + -13.831035614013672 + ], + [ + "judge", + -13.831171989440918 + ], + [ + "hane", + -13.831274032592772 + ], + [ + "▁obstetric", + -13.831284523010254 + ], + [ + "▁squeak", + -13.831284523010254 + ], + [ + "ampa", + -13.831289291381836 + ], + [ + "▁Allowing", + -13.831334114074709 + ], + [ + "▁Petrol", + -13.831357955932615 + ], + [ + "▁insole", + -13.831372261047363 + ], + [ + "▁Optics", + -13.831469535827637 + ], + [ + "▁vulgar", + -13.831473350524902 + ], + [ + "▁Calhoun", + -13.831494331359863 + ], + [ + "▁Himachal", + -13.831494331359863 + ], + [ + "▁dispensaries", + -13.831494331359863 + ], + [ + "▁apprehensive", + -13.83149528503418 + ], + [ + "▁arsenic", + -13.831498146057127 + ], + [ + "▁oblique", + -13.831498146057127 + ], + [ + "▁(1992)", + -13.83152675628662 + ], + [ + "▁ballerina", + -13.83152675628662 + ], + [ + "▁Bayesian", + -13.831531524658203 + ], + [ + "▁Fleece", + -13.831534385681152 + ], + [ + "▁jaunt", + -13.831534385681152 + ], + [ + "oul", + -13.83155345916748 + ], + [ + "▁foggy", + -13.83155345916748 + ], + [ + "▁disbursement", + -13.83155918121338 + ], + [ + "▁dutch", + -13.831572532653809 + ], + [ + "Flexible", + -13.831573486328123 + ], + [ + "▁Jarrett", + -13.831585884094238 + ], + [ + "Dallas", + -13.831586837768556 + ], + [ + "▁NULL", + -13.831588745117188 + ], + [ + "▁unearned", + -13.831631660461426 + ], + [ + "Jimmy", + -13.831652641296388 + ], + [ + "▁Decree", + -13.831655502319336 + ], + [ + "▁ploy", + -13.83166790008545 + ], + [ + "▁abyss", + -13.83168125152588 + ], + [ + "▁Franciscan", + -13.83168601989746 + ], + [ + "▁Wesleyan", + -13.83169937133789 + ], + [ + "▁Prediction", + -13.831705093383787 + ], + [ + "▁interpretive", + -13.831743240356444 + ], + [ + "▁harass", + -13.831750869750977 + ], + [ + "489", + -13.831753730773926 + ], + [ + "▁Dist", + -13.831761360168455 + ], + [ + "▁rainforests", + -13.831804275512695 + ], + [ + "▁resurrected", + -13.83182430267334 + ], + [ + "▁doggie", + -13.831849098205566 + ], + [ + "nado", + -13.83187198638916 + ], + [ + "jur", + -13.831917762756348 + ], + [ + "▁AAP", + -13.83191967010498 + ], + [ + "▁quench", + -13.831950187683104 + ], + [ + "▁sleeveless", + -13.831954002380373 + ], + [ + "▁Ugly", + -13.83196258544922 + ], + [ + "▁debuts", + -13.8319730758667 + ], + [ + "recipe", + -13.832050323486328 + ], + [ + "614", + -13.832086563110352 + ], + [ + "▁syndicated", + -13.832124710083008 + ], + [ + "738", + -13.83217430114746 + ], + [ + "▁baffled", + -13.832209587097168 + ], + [ + "77.", + -13.832233428955078 + ], + [ + "▁Bhag", + -13.832258224487305 + ], + [ + "PhD", + -13.832283973693848 + ], + [ + "▁sauteed", + -13.8323335647583 + ], + [ + "IBM", + -13.832352638244627 + ], + [ + "▁PADI", + -13.832379341125488 + ], + [ + "▁17,000", + -13.832391738891602 + ], + [ + "WARD", + -13.83247184753418 + ], + [ + "ieren", + -13.832537651062012 + ], + [ + "kei", + -13.832602500915527 + ], + [ + "▁DISH", + -13.83260726928711 + ], + [ + "▁Liner", + -13.832674980163574 + ], + [ + "riff", + -13.832718849182127 + ], + [ + "asan", + -13.83275318145752 + ], + [ + "▁$0.00", + -13.8328275680542 + ], + [ + "query", + -13.83286476135254 + ], + [ + "▁Hir", + -13.83290195465088 + ], + [ + "▁Sinks", + -13.832950592041016 + ], + [ + "▁Yao", + -13.833003044128418 + ], + [ + "ooh", + -13.833089828491213 + ], + [ + "#3", + -13.833122253417969 + ], + [ + "dahl", + -13.8331880569458 + ], + [ + "▁bodybuilding", + -13.833240509033203 + ], + [ + "▁Hof", + -13.833284378051758 + ], + [ + "graphs", + -13.833374977111816 + ], + [ + "▁policemen", + -13.833422660827637 + ], + [ + "▁Pharisees", + -13.8334321975708 + ], + [ + "▁Chao", + -13.833456039428713 + ], + [ + "▁34%", + -13.833545684814451 + ], + [ + "erly", + -13.833568572998049 + ], + [ + "▁checkup", + -13.83359146118164 + ], + [ + "ontology", + -13.833653450012209 + ], + [ + "▁NCC", + -13.833700180053713 + ], + [ + "topped", + -13.833723068237305 + ], + [ + "quil", + -13.83376693725586 + ], + [ + "704", + -13.833807945251465 + ], + [ + "▁Waterway", + -13.833807945251465 + ], + [ + "▁tilting", + -13.833847045898438 + ], + [ + "----------------", + -13.833879470825195 + ], + [ + "▁clot", + -13.833882331848145 + ], + [ + "▁carriages", + -13.83392333984375 + ], + [ + "▁Boer", + -13.833974838256836 + ], + [ + "gating", + -13.833998680114746 + ], + [ + "▁Gali", + -13.834013938903809 + ], + [ + "▁swearing", + -13.83415412902832 + ], + [ + "▁Sass", + -13.834237098693848 + ], + [ + "Rule", + -13.83424186706543 + ], + [ + "Parts", + -13.834253311157228 + ], + [ + "codes", + -13.834258079528809 + ], + [ + "rena", + -13.834270477294922 + ], + [ + "▁Macon", + -13.834295272827148 + ], + [ + "▁Adhesive", + -13.834314346313477 + ], + [ + "▁cleansed", + -13.834325790405272 + ], + [ + "imate", + -13.834328651428224 + ], + [ + "Haven", + -13.834343910217283 + ], + [ + "dressed", + -13.834344863891602 + ], + [ + "▁Frenchman", + -13.834380149841309 + ], + [ + "lique", + -13.834407806396484 + ], + [ + "▁“[", + -13.834425926208496 + ], + [ + "orium", + -13.834439277648926 + ], + [ + "CHI", + -13.834519386291504 + ], + [ + "▁Arrive", + -13.834562301635742 + ], + [ + "▁Balanced", + -13.834593772888184 + ], + [ + "016", + -13.83461570739746 + ], + [ + "witz", + -13.834689140319824 + ], + [ + "▁tether", + -13.834711074829102 + ], + [ + "Island", + -13.834742546081545 + ], + [ + "oose", + -13.83476448059082 + ], + [ + "▁Surya", + -13.834792137145996 + ], + [ + "PUB", + -13.834842681884766 + ], + [ + "▁ADS", + -13.83486557006836 + ], + [ + "Teen", + -13.834867477416992 + ], + [ + "Doug", + -13.834871292114258 + ], + [ + "Alternative", + -13.834881782531738 + ], + [ + "▁contr", + -13.834940910339355 + ], + [ + "▁Distributors", + -13.834968566894531 + ], + [ + "821", + -13.834972381591797 + ], + [ + "▁flimsy", + -13.834979057312012 + ], + [ + "▁freelancing", + -13.834979057312012 + ], + [ + "▁manipulative", + -13.834979057312012 + ], + [ + "▁notoriety", + -13.834979057312012 + ], + [ + "▁Equinox", + -13.834980964660645 + ], + [ + "▁aqueous", + -13.83498191833496 + ], + [ + "▁antiseptic", + -13.835006713867188 + ], + [ + "▁Fujitsu", + -13.83500862121582 + ], + [ + "▁(1993)", + -13.835009574890137 + ], + [ + "▁MSNBC", + -13.83501148223877 + ], + [ + "RESULT", + -13.83501434326172 + ], + [ + "▁adobe", + -13.835044860839844 + ], + [ + "▁Scarlett", + -13.83505153656006 + ], + [ + "▁Marathi", + -13.835061073303224 + ], + [ + "▁raster", + -13.835079193115234 + ], + [ + "▁Donegal", + -13.83508014678955 + ], + [ + "▁glorified", + -13.835094451904297 + ], + [ + "▁RCMP", + -13.835121154785156 + ], + [ + "Advisor", + -13.835123062133787 + ], + [ + "▁concur", + -13.83515739440918 + ], + [ + "Miami", + -13.835165977478027 + ], + [ + "▁Winnie", + -13.835177421569824 + ], + [ + "▁Compton", + -13.835192680358888 + ], + [ + "▁Crowe", + -13.835200309753418 + ], + [ + "ATH", + -13.835225105285645 + ], + [ + "052", + -13.83523178100586 + ], + [ + "quan", + -13.835243225097656 + ], + [ + "▁Parc", + -13.835251808166504 + ], + [ + "▁macular", + -13.835264205932615 + ], + [ + "▁cobbled", + -13.83534049987793 + ], + [ + "538", + -13.835378646850586 + ], + [ + "▁recounted", + -13.83541202545166 + ], + [ + "▁booties", + -13.835529327392578 + ], + [ + "▁NPS", + -13.835579872131348 + ], + [ + "eez", + -13.835623741149902 + ], + [ + "▁Seri", + -13.835659980773926 + ], + [ + "villa", + -13.83570671081543 + ], + [ + "mish", + -13.835707664489746 + ], + [ + "Role", + -13.835844993591309 + ], + [ + "▁Aesthetic", + -13.835853576660156 + ], + [ + "▁Bj", + -13.83586311340332 + ], + [ + "▁oiled", + -13.835887908935549 + ], + [ + "▁Advertiser", + -13.835901260375977 + ], + [ + "▁mocking", + -13.835969924926758 + ], + [ + "6.8", + -13.835992813110352 + ], + [ + "▁holo", + -13.836134910583496 + ], + [ + "▁straightening", + -13.836151123046877 + ], + [ + "▁Shia", + -13.836188316345217 + ], + [ + "▁inert", + -13.836227416992188 + ], + [ + "▁imprinted", + -13.836237907409668 + ], + [ + "▁191", + -13.8362455368042 + ], + [ + "▁chiefs", + -13.836246490478516 + ], + [ + "▁Vue", + -13.83625030517578 + ], + [ + "▁bolstered", + -13.836255073547363 + ], + [ + "▁ripen", + -13.836371421813965 + ], + [ + "▁Connectivity", + -13.836437225341797 + ], + [ + "▁KTM", + -13.836459159851074 + ], + [ + "RZ", + -13.836533546447754 + ], + [ + "status", + -13.83653450012207 + ], + [ + "ehn", + -13.83657169342041 + ], + [ + "7.4", + -13.836592674255373 + ], + [ + "igma", + -13.836605072021484 + ], + [ + "Stat", + -13.83670711517334 + ], + [ + "keepers", + -13.836708068847656 + ], + [ + "749", + -13.836724281311035 + ], + [ + "▁CLASS", + -13.836899757385254 + ], + [ + "▁Strings", + -13.836943626403809 + ], + [ + "▁Mano", + -13.8369779586792 + ], + [ + "▁exquisitely", + -13.83698558807373 + ], + [ + "▁echoing", + -13.83702278137207 + ], + [ + "▁lat", + -13.837122917175291 + ], + [ + "▁lectured", + -13.837122917175291 + ], + [ + "▁billet", + -13.83716106414795 + ], + [ + "▁Placing", + -13.837180137634276 + ], + [ + "ext", + -13.837217330932615 + ], + [ + "rrie", + -13.837242126464844 + ], + [ + "▁810", + -13.837246894836426 + ], + [ + "▁Formerly", + -13.83728313446045 + ], + [ + "▁brightening", + -13.837289810180664 + ], + [ + "▁Zag", + -13.837400436401367 + ], + [ + "▁Norse", + -13.837479591369627 + ], + [ + "042", + -13.837549209594728 + ], + [ + "▁Galvanized", + -13.83755874633789 + ], + [ + "Arch", + -13.837603569030762 + ], + [ + "heel", + -13.83761978149414 + ], + [ + "▁Waltz", + -13.837665557861328 + ], + [ + "DEX", + -13.83771800994873 + ], + [ + "▁NOTICE", + -13.837727546691896 + ], + [ + "▁groin", + -13.837730407714844 + ], + [ + "esca", + -13.837733268737791 + ], + [ + "▁gravitate", + -13.837872505187988 + ], + [ + "▁jeopardize", + -13.837872505187988 + ], + [ + "▁boundless", + -13.838129043579102 + ], + [ + "▁conf", + -13.838129043579102 + ], + [ + "igger", + -13.838178634643556 + ], + [ + "▁saucer", + -13.838186264038086 + ], + [ + "▁visualizations", + -13.838265419006348 + ], + [ + "Pot", + -13.838322639465332 + ], + [ + "▁Fifteen", + -13.83839511871338 + ], + [ + "▁wishlist", + -13.838454246520996 + ], + [ + "▁Hendrix", + -13.838475227355955 + ], + [ + "▁aggregator", + -13.838475227355955 + ], + [ + "▁applicability", + -13.838475227355955 + ], + [ + "▁exfoliating", + -13.838475227355955 + ], + [ + "▁guerrilla", + -13.838475227355955 + ], + [ + "▁undisturbed", + -13.838475227355955 + ], + [ + "▁Condominium", + -13.838476181030272 + ], + [ + "▁Gonzales", + -13.838476181030272 + ], + [ + "▁Palermo", + -13.838476181030272 + ], + [ + "▁esoteric", + -13.838476181030272 + ], + [ + "▁reviving", + -13.83847713470459 + ], + [ + "▁Sichuan", + -13.838478088378906 + ], + [ + "▁sourdough", + -13.83847999572754 + ], + [ + "▁Gettysburg", + -13.838482856750488 + ], + [ + "▁Awning", + -13.838488578796388 + ], + [ + "▁Humanitarian", + -13.838507652282717 + ], + [ + "▁Neural", + -13.838534355163574 + ], + [ + "▁astronomers", + -13.838545799255373 + ], + [ + "▁Mower", + -13.838562965393066 + ], + [ + "▁mandolin", + -13.838573455810549 + ], + [ + "▁jawbone", + -13.83858585357666 + ], + [ + "▁VCE", + -13.838634490966797 + ], + [ + "github", + -13.838675498962402 + ], + [ + "▁stemmed", + -13.83869171142578 + ], + [ + "Shadow", + -13.838716506958008 + ], + [ + "Doctors", + -13.83872127532959 + ], + [ + "relevant", + -13.83873176574707 + ], + [ + "▁HID", + -13.83873176574707 + ], + [ + "agin", + -13.83873462677002 + ], + [ + "decision", + -13.838747024536133 + ], + [ + "▁plotted", + -13.838778495788574 + ], + [ + "torn", + -13.838892936706545 + ], + [ + "▁Hirsch", + -13.838953971862791 + ], + [ + "Nancy", + -13.838964462280272 + ], + [ + "239", + -13.839025497436523 + ], + [ + "▁Render", + -13.839131355285645 + ], + [ + "▁sherry", + -13.839151382446287 + ], + [ + "▁Sib", + -13.839214324951172 + ], + [ + "offers", + -13.839273452758787 + ], + [ + "▁Petal", + -13.839310646057127 + ], + [ + "venture", + -13.83936882019043 + ], + [ + "hira", + -13.839430809020996 + ], + [ + "ORG", + -13.839522361755373 + ], + [ + "756", + -13.839533805847168 + ], + [ + "242", + -13.839534759521484 + ], + [ + "listen", + -13.839559555053713 + ], + [ + "uci", + -13.839600563049316 + ], + [ + "tological", + -13.839634895324709 + ], + [ + "▁Motown", + -13.839682579040527 + ], + [ + "swing", + -13.839683532714844 + ], + [ + "▁Glide", + -13.83970832824707 + ], + [ + "▁improvise", + -13.839750289916992 + ], + [ + "▁repeatable", + -13.83984661102295 + ], + [ + "cini", + -13.839863777160645 + ], + [ + "kami", + -13.839900016784668 + ], + [ + "metic", + -13.83991527557373 + ], + [ + "▁Sonos", + -13.83994960784912 + ], + [ + "▁DSC", + -13.839977264404297 + ], + [ + "lifting", + -13.839994430541992 + ], + [ + "▁Seb", + -13.839998245239258 + ], + [ + "▁Dunes", + -13.840048789978027 + ], + [ + "Hang", + -13.84008502960205 + ], + [ + "BIC", + -13.840224266052246 + ], + [ + "915", + -13.84028434753418 + ], + [ + "60)", + -13.84034252166748 + ], + [ + "▁Cour", + -13.840357780456545 + ], + [ + "▁ky", + -13.840391159057615 + ], + [ + "▁fantastical", + -13.840415954589844 + ], + [ + "empt", + -13.84043312072754 + ], + [ + "▁uptick", + -13.840476036071776 + ], + [ + "Labs", + -13.84048557281494 + ], + [ + "▁excise", + -13.84048557281494 + ], + [ + "quote", + -13.840536117553713 + ], + [ + "▁illuminates", + -13.840546607971191 + ], + [ + "▁Gratis", + -13.840583801269531 + ], + [ + "832", + -13.84059238433838 + ], + [ + "▁Acceptance", + -13.840716361999512 + ], + [ + "▁Constantine", + -13.840819358825684 + ], + [ + "Watching", + -13.840882301330566 + ], + [ + "▁5-2", + -13.840913772583008 + ], + [ + "bora", + -13.840919494628906 + ], + [ + "▁555", + -13.840933799743652 + ], + [ + "▁Departments", + -13.84096336364746 + ], + [ + "REL", + -13.841020584106444 + ], + [ + "▁bookcases", + -13.841036796569824 + ], + [ + "▁Liza", + -13.841068267822266 + ], + [ + "▁preorder", + -13.841115951538086 + ], + [ + "173", + -13.841118812561035 + ], + [ + "▁ASD", + -13.84113597869873 + ], + [ + "▁Decals", + -13.84116554260254 + ], + [ + "▁Mav", + -13.841216087341309 + ], + [ + "▁Pembroke", + -13.841222763061523 + ], + [ + "477", + -13.841266632080078 + ], + [ + "▁CNS", + -13.841349601745604 + ], + [ + "▁Passing", + -13.841350555419922 + ], + [ + "▁fiance", + -13.84147834777832 + ], + [ + "▁winemaker", + -13.841500282287598 + ], + [ + "YD", + -13.841512680053713 + ], + [ + "▁kiddie", + -13.841516494750977 + ], + [ + "Establish", + -13.84160614013672 + ], + [ + "▁solver", + -13.841611862182615 + ], + [ + "enjoy", + -13.841619491577148 + ], + [ + "▁sediments", + -13.841723442077637 + ], + [ + "484", + -13.841748237609863 + ], + [ + "▁REVIEW", + -13.841784477233888 + ], + [ + "▁regularity", + -13.841800689697266 + ], + [ + "chter", + -13.841836929321287 + ], + [ + "▁blemish", + -13.841843605041504 + ], + [ + "Penn", + -13.841917991638184 + ], + [ + "▁Aron", + -13.841949462890623 + ], + [ + "▁Legislation", + -13.841983795166016 + ], + [ + "▁Raccoon", + -13.841983795166016 + ], + [ + "▁inseparable", + -13.841983795166016 + ], + [ + "▁phenotype", + -13.841983795166016 + ], + [ + "▁puzzling", + -13.841983795166016 + ], + [ + "▁Gatsby", + -13.841984748840332 + ], + [ + "▁Aleppo", + -13.84198760986328 + ], + [ + "▁squirt", + -13.841988563537598 + ], + [ + "▁Astrology", + -13.841989517211914 + ], + [ + "▁paraphrase", + -13.84199047088623 + ], + [ + "▁Assessor", + -13.841991424560549 + ], + [ + "▁brawl", + -13.841998100280762 + ], + [ + "▁GN", + -13.842010498046877 + ], + [ + "▁unmarried", + -13.842020988464355 + ], + [ + "▁intercultural", + -13.842058181762695 + ], + [ + "▁17\"", + -13.842061042785645 + ], + [ + "▁Softball", + -13.84210968017578 + ], + [ + "▁Larsen", + -13.842123985290527 + ], + [ + "▁Muscat", + -13.842129707336426 + ], + [ + "▁fondant", + -13.842169761657717 + ], + [ + "propos", + -13.842201232910156 + ], + [ + "▁runways", + -13.842229843139648 + ], + [ + "▁Km", + -13.842232704162598 + ], + [ + "▁repurchase", + -13.842241287231444 + ], + [ + "▁Sakura", + -13.842245101928713 + ], + [ + "▁Albums", + -13.842248916625977 + ], + [ + "▁shipbuilding", + -13.842329978942873 + ], + [ + "Genesis", + -13.84233283996582 + ], + [ + "Theme", + -13.84233283996582 + ], + [ + "▁ICS", + -13.842336654663086 + ], + [ + "dogs", + -13.842351913452148 + ], + [ + "Pulse", + -13.842363357543944 + ], + [ + "▁foreigner", + -13.842374801635742 + ], + [ + "Senator", + -13.84239387512207 + ], + [ + "▁methodical", + -13.84241771697998 + ], + [ + "▁feats", + -13.842427253723145 + ], + [ + "▁reinstated", + -13.842500686645508 + ], + [ + "621", + -13.842517852783203 + ], + [ + "▁($6", + -13.842528343200684 + ], + [ + "proc", + -13.842534065246582 + ], + [ + "▁clinched", + -13.842541694641112 + ], + [ + "▁Pf", + -13.84254264831543 + ], + [ + "▁$1.00", + -13.842573165893556 + ], + [ + "▁Macs", + -13.84260082244873 + ], + [ + "▁Rusty", + -13.84261703491211 + ], + [ + "▁Consumption", + -13.842629432678224 + ], + [ + "▁Recycled", + -13.842641830444336 + ], + [ + "Aim", + -13.842650413513184 + ], + [ + "▁Munro", + -13.84265422821045 + ], + [ + "61.", + -13.842658042907717 + ], + [ + "▁52%", + -13.842695236206056 + ], + [ + "▁pyramids", + -13.842737197875977 + ], + [ + "▁shoved", + -13.842751502990724 + ], + [ + "▁Corre", + -13.842758178710938 + ], + [ + "не", + -13.842764854431152 + ], + [ + "▁Ghosts", + -13.84278964996338 + ], + [ + "▁MPS", + -13.84282112121582 + ], + [ + "▁Forrester", + -13.842891693115234 + ], + [ + "▁pls", + -13.842927932739258 + ], + [ + "▁Masonic", + -13.843017578125 + ], + [ + "▁hookup", + -13.843087196350098 + ], + [ + "1989", + -13.843122482299805 + ], + [ + "782", + -13.843144416809082 + ], + [ + "▁lapel", + -13.843256950378418 + ], + [ + "▁millimeters", + -13.843374252319336 + ], + [ + "▁cubs", + -13.8433837890625 + ], + [ + "dense", + -13.843475341796877 + ], + [ + "mutation", + -13.843576431274414 + ], + [ + "STONE", + -13.843599319458008 + ], + [ + "▁dovetail", + -13.843602180480955 + ], + [ + "▁MMS", + -13.843713760375977 + ], + [ + "▁Nutri", + -13.84371852874756 + ], + [ + "ONY", + -13.84372329711914 + ], + [ + "▁Boomers", + -13.843743324279783 + ], + [ + "▁Ollie", + -13.843757629394531 + ], + [ + "▁fide", + -13.843789100646973 + ], + [ + "FIT", + -13.843817710876465 + ], + [ + "▁Lightly", + -13.84386920928955 + ], + [ + "▁#11", + -13.843918800354004 + ], + [ + "▁Blink", + -13.84414291381836 + ], + [ + "▁comrades", + -13.844155311584473 + ], + [ + "▁genesis", + -13.844165802001951 + ], + [ + "559", + -13.844168663024902 + ], + [ + "▁Withdrawal", + -13.844198226928713 + ], + [ + "▁PRIVATE", + -13.844220161437988 + ], + [ + "▁Luz", + -13.844313621520996 + ], + [ + "▁Kli", + -13.844388961791992 + ], + [ + "▁Greenfield", + -13.844401359558104 + ], + [ + "▁shouts", + -13.844463348388672 + ], + [ + "▁EBITDA", + -13.84447193145752 + ], + [ + "▁Georges", + -13.844518661499023 + ], + [ + "▁penetrates", + -13.84455108642578 + ], + [ + "▁Pug", + -13.844575881958008 + ], + [ + "▁avoidable", + -13.844688415527344 + ], + [ + "ishing", + -13.844718933105469 + ], + [ + "jim", + -13.844775199890137 + ], + [ + "▁Asher", + -13.844799995422363 + ], + [ + "▁Regents", + -13.844823837280272 + ], + [ + "▁abstain", + -13.844825744628906 + ], + [ + "▁Receipt", + -13.844837188720703 + ], + [ + "▁Warden", + -13.844907760620115 + ], + [ + "bron", + -13.844908714294434 + ], + [ + "▁WATCH", + -13.844910621643066 + ], + [ + "▁Mahindra", + -13.84492301940918 + ], + [ + "▁CAC", + -13.845023155212402 + ], + [ + "▁twitch", + -13.845088005065918 + ], + [ + "▁335", + -13.845138549804688 + ], + [ + "QUA", + -13.845221519470217 + ], + [ + "▁anew", + -13.845438957214355 + ], + [ + "▁Eisenhower", + -13.845504760742188 + ], + [ + "▁geriatric", + -13.845504760742188 + ], + [ + "▁laziness", + -13.845504760742188 + ], + [ + "▁Detached", + -13.845508575439451 + ], + [ + "▁Shapiro", + -13.845508575439451 + ], + [ + "▁DuPont", + -13.845511436462402 + ], + [ + "▁Catalina", + -13.845521926879885 + ], + [ + "▁OTT", + -13.845523834228516 + ], + [ + "Historian", + -13.84553050994873 + ], + [ + "▁steampunk", + -13.845541000366213 + ], + [ + "▁Paving", + -13.845553398132324 + ], + [ + "▁mickey", + -13.84555435180664 + ], + [ + "hila", + -13.845562934875488 + ], + [ + "▁restitution", + -13.845579147338867 + ], + [ + "▁2019)", + -13.845582962036133 + ], + [ + "▁Granny", + -13.84559154510498 + ], + [ + "▁Crowley", + -13.845613479614258 + ], + [ + "SAC", + -13.845621109008787 + ], + [ + "▁Shout", + -13.845657348632812 + ], + [ + "554", + -13.845687866210938 + ], + [ + "▁instinctively", + -13.845704078674316 + ], + [ + "▁Reliability", + -13.84572410583496 + ], + [ + "▁Virgo", + -13.84575653076172 + ], + [ + "▁504", + -13.845819473266602 + ], + [ + "▁coexist", + -13.845884323120115 + ], + [ + "▁Elevator", + -13.8458890914917 + ], + [ + "▁implicitly", + -13.845913887023926 + ], + [ + "advisor", + -13.845929145812988 + ], + [ + "artificial", + -13.84593105316162 + ], + [ + "Lincoln", + -13.84593677520752 + ], + [ + "▁overcrowded", + -13.845947265625 + ], + [ + "▁Sdn", + -13.845951080322266 + ], + [ + "▁roadblocks", + -13.845951080322266 + ], + [ + "▁Jumper", + -13.845985412597656 + ], + [ + "▁tinkering", + -13.84604263305664 + ], + [ + "▁Shiite", + -13.846096992492676 + ], + [ + "Flowers", + -13.846166610717772 + ], + [ + "Morgan", + -13.846196174621582 + ], + [ + "▁Cottages", + -13.846236228942873 + ], + [ + "▁loin", + -13.846236228942873 + ], + [ + "personally", + -13.846258163452148 + ], + [ + "▁capacitors", + -13.84628200531006 + ], + [ + "▁Ars", + -13.846356391906738 + ], + [ + "▁Trenton", + -13.8463716506958 + ], + [ + "BRA", + -13.846400260925291 + ], + [ + "▁snapper", + -13.846441268920898 + ], + [ + "nti", + -13.846442222595217 + ], + [ + "alternative", + -13.8464994430542 + ], + [ + "▁Balkan", + -13.84650421142578 + ], + [ + "▁cartel", + -13.846534729003906 + ], + [ + "▁Ellison", + -13.846566200256348 + ], + [ + "▁carpeted", + -13.846571922302246 + ], + [ + "▁breathes", + -13.846660614013672 + ], + [ + "Roof", + -13.8466796875 + ], + [ + "patri", + -13.846725463867188 + ], + [ + "///", + -13.84674835205078 + ], + [ + "dura", + -13.846759796142578 + ], + [ + "tini", + -13.84677505493164 + ], + [ + "otype", + -13.846864700317385 + ], + [ + "▁expend", + -13.846872329711914 + ], + [ + "▁rangers", + -13.846901893615724 + ], + [ + "▁als", + -13.847047805786133 + ], + [ + "▁raided", + -13.847049713134766 + ], + [ + "cities", + -13.847070693969728 + ], + [ + "▁whispers", + -13.847082138061523 + ], + [ + "▁fantastically", + -13.8472261428833 + ], + [ + "523", + -13.847253799438477 + ], + [ + "▁Spit", + -13.847285270690918 + ], + [ + "▁Ying", + -13.847308158874512 + ], + [ + "498", + -13.847329139709473 + ], + [ + "gau", + -13.847366333007812 + ], + [ + "▁novices", + -13.847397804260254 + ], + [ + "riv", + -13.847417831420898 + ], + [ + "Pie", + -13.847421646118164 + ], + [ + "DEC", + -13.84744930267334 + ], + [ + "uran", + -13.847456932067873 + ], + [ + "▁reasoned", + -13.847497940063477 + ], + [ + "▁micron", + -13.847545623779297 + ], + [ + "reen", + -13.8475980758667 + ], + [ + "▁altitudes", + -13.847686767578123 + ], + [ + "tium", + -13.84768772125244 + ], + [ + "▁265", + -13.847695350646973 + ], + [ + "▁Oldham", + -13.847750663757324 + ], + [ + "▁hyped", + -13.84776210784912 + ], + [ + "4.5%", + -13.847789764404297 + ], + [ + "enc", + -13.84781265258789 + ], + [ + "▁perish", + -13.84784698486328 + ], + [ + "▁Cosmo", + -13.847963333129885 + ], + [ + "▁synthesize", + -13.847966194152832 + ], + [ + "▁Vela", + -13.847994804382324 + ], + [ + "Mixed", + -13.848028182983398 + ], + [ + "thermal", + -13.84805393218994 + ], + [ + "▁bearer", + -13.848102569580078 + ], + [ + "racing", + -13.848281860351562 + ], + [ + "amble", + -13.848424911499023 + ], + [ + "▁lactic", + -13.848438262939451 + ], + [ + "▁Traveller", + -13.848454475402832 + ], + [ + "▁stomp", + -13.848562240600586 + ], + [ + "Esc", + -13.848600387573242 + ], + [ + "▁Tories", + -13.848647117614746 + ], + [ + "GST", + -13.848660469055176 + ], + [ + "▁doodle", + -13.848663330078123 + ], + [ + "▁Exact", + -13.848676681518556 + ], + [ + "GAL", + -13.848745346069336 + ], + [ + "▁194", + -13.848814010620115 + ], + [ + "▁Glam", + -13.84884548187256 + ], + [ + "▁USCIS", + -13.848896980285645 + ], + [ + "▁Compassion", + -13.848921775817873 + ], + [ + "▁Guggenheim", + -13.849039077758787 + ], + [ + "▁juxtaposition", + -13.849039077758787 + ], + [ + "▁obituary", + -13.849039077758787 + ], + [ + "▁plunging", + -13.849039077758787 + ], + [ + "▁incontinence", + -13.849040031433104 + ], + [ + "7.5%", + -13.849044799804688 + ], + [ + "▁Bezos", + -13.84904670715332 + ], + [ + "▁Colchester", + -13.849050521850586 + ], + [ + "▁Riyadh", + -13.849050521850586 + ], + [ + "▁Turnbull", + -13.849063873291016 + ], + [ + "▁Consciousness", + -13.849084854125977 + ], + [ + "pile", + -13.849088668823242 + ], + [ + "▁Establishment", + -13.84914779663086 + ], + [ + "▁Genome", + -13.849149703979492 + ], + [ + "▁LDAP", + -13.849157333374023 + ], + [ + "cookies", + -13.849178314208984 + ], + [ + "▁devout", + -13.849187850952148 + ], + [ + "▁$4,000", + -13.849238395690918 + ], + [ + "atar", + -13.849247932434082 + ], + [ + "▁Wid", + -13.84931755065918 + ], + [ + "▁VMs", + -13.849326133728027 + ], + [ + "▁Zulu", + -13.849342346191406 + ], + [ + "▁imposes", + -13.849392890930176 + ], + [ + "Sar", + -13.849403381347656 + ], + [ + "aqua", + -13.849424362182615 + ], + [ + "▁extinguish", + -13.849492073059082 + ], + [ + "Toronto", + -13.84955883026123 + ], + [ + "Sydney", + -13.84956169128418 + ], + [ + "▁Utopia", + -13.849624633789062 + ], + [ + "Including", + -13.849642753601074 + ], + [ + "▁Nin", + -13.849644660949709 + ], + [ + "▁conforms", + -13.849663734436035 + ], + [ + "usive", + -13.849684715270996 + ], + [ + "▁Conversations", + -13.849692344665527 + ], + [ + "▁hordes", + -13.84970474243164 + ], + [ + "▁???????", + -13.84971046447754 + ], + [ + "▁puffy", + -13.8497314453125 + ], + [ + "▁Fruits", + -13.849740982055664 + ], + [ + "56.", + -13.849749565124512 + ], + [ + "66.", + -13.84976863861084 + ], + [ + "▁probing", + -13.849785804748535 + ], + [ + "▁Wishes", + -13.849831581115724 + ], + [ + "▁Julio", + -13.84985637664795 + ], + [ + "▁Goff", + -13.849875450134276 + ], + [ + "▁origination", + -13.849905967712402 + ], + [ + "▁Xtreme", + -13.849937438964844 + ], + [ + "▁Yea", + -13.849942207336426 + ], + [ + "Merry", + -13.84994888305664 + ], + [ + "▁NORTH", + -13.84996223449707 + ], + [ + "zee", + -13.849967956542969 + ], + [ + "Py", + -13.850001335144045 + ], + [ + "▁gl", + -13.850065231323242 + ], + [ + "oor", + -13.850176811218262 + ], + [ + "▁Organize", + -13.850282669067385 + ], + [ + "itate", + -13.85029411315918 + ], + [ + "▁Destin", + -13.850299835205078 + ], + [ + "▁CMO", + -13.85033130645752 + ], + [ + "▁Forte", + -13.850358963012695 + ], + [ + "▁nj", + -13.85036563873291 + ], + [ + "IONS", + -13.85037612915039 + ], + [ + "833", + -13.850414276123049 + ], + [ + "▁genders", + -13.850414276123049 + ], + [ + "queen", + -13.85054874420166 + ], + [ + "▁intricately", + -13.850594520568848 + ], + [ + "▁Learners", + -13.85068416595459 + ], + [ + "remove", + -13.850708961486816 + ], + [ + "kash", + -13.850727081298828 + ], + [ + "▁Promoter", + -13.850752830505373 + ], + [ + "NAC", + -13.850857734680176 + ], + [ + "▁quail", + -13.850919723510742 + ], + [ + "▁murderous", + -13.850996971130373 + ], + [ + "▁burdened", + -13.851025581359863 + ], + [ + "Neal", + -13.851032257080078 + ], + [ + "▁317", + -13.851037979125977 + ], + [ + "▁Screens", + -13.851043701171877 + ], + [ + "▁Ez", + -13.85106372833252 + ], + [ + "▁WOOD", + -13.851120948791504 + ], + [ + "▁Hereford", + -13.851215362548828 + ], + [ + "▁Stre", + -13.851288795471191 + ], + [ + "geek", + -13.851310729980469 + ], + [ + "Whereas", + -13.851340293884276 + ], + [ + "▁Allie", + -13.8513765335083 + ], + [ + "▁kilometre", + -13.851385116577148 + ], + [ + "543", + -13.851414680480955 + ], + [ + "246", + -13.851421356201172 + ], + [ + "▁Recreational", + -13.851470947265623 + ], + [ + "▁333", + -13.851521492004396 + ], + [ + "▁Kool", + -13.851547241210938 + ], + [ + "▁dropout", + -13.851672172546388 + ], + [ + "▁UBC", + -13.85171127319336 + ], + [ + "idia", + -13.85174560546875 + ], + [ + "▁Potassium", + -13.851820945739746 + ], + [ + "▁FIND", + -13.85182762145996 + ], + [ + "▁dangle", + -13.851899147033691 + ], + [ + "Slow", + -13.851945877075195 + ], + [ + "▁Wald", + -13.851981163024902 + ], + [ + "▁Whichever", + -13.852008819580078 + ], + [ + "471", + -13.85202693939209 + ], + [ + "Lip", + -13.852217674255373 + ], + [ + "▁overgrown", + -13.852217674255373 + ], + [ + "technic", + -13.85231590270996 + ], + [ + "▁ZTE", + -13.852336883544922 + ], + [ + "neau", + -13.852340698242188 + ], + [ + "▁Vape", + -13.852359771728516 + ], + [ + "▁induces", + -13.852365493774414 + ], + [ + "▁BART", + -13.85236930847168 + ], + [ + "▁Tester", + -13.852476119995115 + ], + [ + "536", + -13.852497100830078 + ], + [ + "▁Serenity", + -13.852543830871582 + ], + [ + "Correct", + -13.852572441101074 + ], + [ + "▁Connolly", + -13.852584838867188 + ], + [ + "▁Vanuatu", + -13.852584838867188 + ], + [ + "▁dandruff", + -13.852584838867188 + ], + [ + "▁denominator", + -13.852584838867188 + ], + [ + "▁margarine", + -13.852584838867188 + ], + [ + "▁pseudonym", + -13.852584838867188 + ], + [ + "▁rescuing", + -13.852584838867188 + ], + [ + "▁urethane", + -13.852588653564451 + ], + [ + "▁undecided", + -13.852602005004885 + ], + [ + "▁legion", + -13.852604866027832 + ], + [ + "▁Erickson", + -13.85260772705078 + ], + [ + "▁Cardi", + -13.852609634399414 + ], + [ + "▁Compression", + -13.85261344909668 + ], + [ + "▁Kinect", + -13.852633476257324 + ], + [ + "▁deadliest", + -13.852641105651855 + ], + [ + "▁Jem", + -13.852702140808104 + ], + [ + "▁shuts", + -13.852710723876951 + ], + [ + "▁Nowhere", + -13.852745056152344 + ], + [ + "Thought", + -13.852752685546877 + ], + [ + "▁formaldehyde", + -13.852755546569824 + ], + [ + "▁Entity", + -13.85276222229004 + ], + [ + "▁mistress", + -13.852802276611328 + ], + [ + "▁misled", + -13.852835655212402 + ], + [ + "Div", + -13.852846145629885 + ], + [ + "▁InDesign", + -13.852880477905272 + ], + [ + "▁'80", + -13.852900505065918 + ], + [ + "▁shortbread", + -13.852919578552246 + ], + [ + "Harris", + -13.85300636291504 + ], + [ + "few", + -13.85300636291504 + ], + [ + "writers", + -13.853031158447266 + ], + [ + "▁autographs", + -13.853090286254885 + ], + [ + "▁Stripes", + -13.853099822998049 + ], + [ + "▁McCann", + -13.853106498718262 + ], + [ + "▁Tripp", + -13.853111267089844 + ], + [ + "▁Kwa", + -13.85317039489746 + ], + [ + "Privacy", + -13.853194236755373 + ], + [ + "Premier", + -13.85320281982422 + ], + [ + "primary", + -13.85321044921875 + ], + [ + "Battery", + -13.85322093963623 + ], + [ + "▁reworked", + -13.853227615356444 + ], + [ + "▁Curb", + -13.85325050354004 + ], + [ + "▁Lizzie", + -13.85325050354004 + ], + [ + "▁Haze", + -13.853350639343262 + ], + [ + "▁ре", + -13.85337257385254 + ], + [ + "▁slaughtered", + -13.85342025756836 + ], + [ + "▁Orbit", + -13.853431701660156 + ], + [ + "▁spurt", + -13.853521347045898 + ], + [ + "Anderson", + -13.853525161743164 + ], + [ + "selves", + -13.853557586669922 + ], + [ + "▁Webber", + -13.853632926940918 + ], + [ + "hopper", + -13.853633880615234 + ], + [ + "respond", + -13.853718757629396 + ], + [ + "Moore", + -13.853779792785645 + ], + [ + "▁liens", + -13.853796005249023 + ], + [ + "▁1963.", + -13.853797912597656 + ], + [ + "Apps", + -13.853859901428224 + ], + [ + "▁easel", + -13.853867530822754 + ], + [ + "Chem", + -13.853958129882812 + ], + [ + "▁$95", + -13.853970527648926 + ], + [ + "021", + -13.854016304016112 + ], + [ + "shall", + -13.854095458984377 + ], + [ + "▁Frey", + -13.85416030883789 + ], + [ + "▁angelic", + -13.854228973388672 + ], + [ + "▁Intent", + -13.854289054870604 + ], + [ + "▁Remarkable", + -13.854300498962402 + ], + [ + "▁wrestlers", + -13.85431671142578 + ], + [ + "▁DRC", + -13.854325294494627 + ], + [ + "▁deities", + -13.854354858398438 + ], + [ + "▁Markus", + -13.854378700256348 + ], + [ + "▁Zhi", + -13.854437828063965 + ], + [ + "▁superpower", + -13.854490280151367 + ], + [ + "▁Ora", + -13.854491233825684 + ], + [ + "▁anxiously", + -13.854516983032228 + ], + [ + "shown", + -13.854531288146973 + ], + [ + "▁Utilize", + -13.854547500610352 + ], + [ + "▁Bashar", + -13.85456371307373 + ], + [ + "solv", + -13.85459327697754 + ], + [ + "Visiting", + -13.854604721069336 + ], + [ + "▁eyed", + -13.854729652404783 + ], + [ + "gree", + -13.854735374450684 + ], + [ + "▁enabler", + -13.854769706726074 + ], + [ + "▁REI", + -13.85478687286377 + ], + [ + "▁hors", + -13.854788780212402 + ], + [ + "▁MOVE", + -13.854838371276855 + ], + [ + "▁6-9", + -13.85488510131836 + ], + [ + "manual", + -13.85490894317627 + ], + [ + "NEY", + -13.85500717163086 + ], + [ + "loom", + -13.855161666870115 + ], + [ + "▁Perfectly", + -13.855183601379396 + ], + [ + "▁nozzles", + -13.85521411895752 + ], + [ + "▁reptile", + -13.855281829833984 + ], + [ + "▁outflow", + -13.8552827835083 + ], + [ + "UCC", + -13.85533332824707 + ], + [ + "▁1883", + -13.855485916137695 + ], + [ + "Workshop", + -13.855491638183594 + ], + [ + "ferring", + -13.855494499206545 + ], + [ + "▁7-9", + -13.855504989624023 + ], + [ + "▁narrate", + -13.85555362701416 + ], + [ + "▁Puck", + -13.855558395385742 + ], + [ + "▁boho", + -13.85557746887207 + ], + [ + "3:00", + -13.85560417175293 + ], + [ + "▁MID", + -13.85563850402832 + ], + [ + "15%", + -13.855642318725586 + ], + [ + "▁Quentin", + -13.855643272399902 + ], + [ + "▁stretcher", + -13.855667114257812 + ], + [ + "▁SOC", + -13.855679512023926 + ], + [ + "▁flutter", + -13.855688095092772 + ], + [ + "▁arcs", + -13.855732917785645 + ], + [ + "pear", + -13.855734825134276 + ], + [ + "▁squats", + -13.855740547180176 + ], + [ + "▁Riv", + -13.855952262878418 + ], + [ + "▁fastener", + -13.856001853942873 + ], + [ + "806", + -13.856036186218262 + ], + [ + "▁cordial", + -13.856063842773438 + ], + [ + "▁Academies", + -13.856143951416016 + ], + [ + "▁Leipzig", + -13.856143951416016 + ], + [ + "▁Solstice", + -13.856143951416016 + ], + [ + "▁auspices", + -13.856143951416016 + ], + [ + "▁collapsible", + -13.856143951416016 + ], + [ + "▁deciduous", + -13.856143951416016 + ], + [ + "▁exemplified", + -13.856143951416016 + ], + [ + "▁minimising", + -13.856143951416016 + ], + [ + "▁prosecuting", + -13.856143951416016 + ], + [ + "▁trundle", + -13.856143951416016 + ], + [ + "▁Angkor", + -13.856148719787598 + ], + [ + "▁magenta", + -13.856149673461914 + ], + [ + "▁arbitrator", + -13.856156349182127 + ], + [ + "▁cloning", + -13.856157302856444 + ], + [ + "▁allocating", + -13.856162071228027 + ], + [ + "▁Marietta", + -13.856196403503418 + ], + [ + "▁dangling", + -13.856209754943848 + ], + [ + "▁antagonist", + -13.85622501373291 + ], + [ + "▁Nath", + -13.856263160705566 + ], + [ + "▁Cortex", + -13.856350898742676 + ], + [ + "▁depositing", + -13.856350898742676 + ], + [ + "▁Vogel", + -13.856410026550291 + ], + [ + "526", + -13.856416702270508 + ], + [ + "Victor", + -13.856423377990724 + ], + [ + "▁Chaise", + -13.856430053710938 + ], + [ + "Vis", + -13.856439590454102 + ], + [ + "▁scalloped", + -13.856443405151367 + ], + [ + "▁Showtime", + -13.856475830078123 + ], + [ + "▁Railing", + -13.856492042541504 + ], + [ + "ried", + -13.856573104858398 + ], + [ + "▁mow", + -13.856826782226562 + ], + [ + "rubber", + -13.85692310333252 + ], + [ + "▁pulpit", + -13.85694980621338 + ], + [ + "district", + -13.856998443603516 + ], + [ + "EON", + -13.85702896118164 + ], + [ + "804", + -13.85703468322754 + ], + [ + "ETH", + -13.857096672058104 + ], + [ + "Claim", + -13.85710906982422 + ], + [ + "dragon", + -13.857129096984863 + ], + [ + "▁Bianca", + -13.85715389251709 + ], + [ + "▁Pickle", + -13.857227325439451 + ], + [ + "ongo", + -13.85723114013672 + ], + [ + "underrepresented", + -13.857237815856934 + ], + [ + "438", + -13.85731601715088 + ], + [ + "Via", + -13.857352256774902 + ], + [ + "▁Planter", + -13.857378959655762 + ], + [ + "▁Proceed", + -13.85743236541748 + ], + [ + "▁Recommendations", + -13.857497215270996 + ], + [ + "▁pala", + -13.857501983642578 + ], + [ + "kaya", + -13.85754680633545 + ], + [ + "▁fairies", + -13.857623100280762 + ], + [ + "▁algorithmic", + -13.857684135437012 + ], + [ + "▁mindless", + -13.857695579528809 + ], + [ + "▁dengan", + -13.857715606689451 + ], + [ + "▁surveyors", + -13.857831001281738 + ], + [ + "▁homely", + -13.857867240905762 + ], + [ + "▁zig", + -13.857909202575684 + ], + [ + "▁WIRE", + -13.857962608337402 + ], + [ + "stands", + -13.858040809631348 + ], + [ + "▁Henrik", + -13.858047485351562 + ], + [ + "▁appointing", + -13.85804843902588 + ], + [ + "▁Stump", + -13.85805892944336 + ], + [ + "▁Cin", + -13.858060836791992 + ], + [ + "▁petal", + -13.858061790466309 + ], + [ + "Didn", + -13.858064651489258 + ], + [ + "▁Gage", + -13.858099937438965 + ], + [ + "▁rushes", + -13.858114242553713 + ], + [ + "alta", + -13.85816764831543 + ], + [ + "▁Robots", + -13.858177185058594 + ], + [ + "▁Spill", + -13.858192443847656 + ], + [ + "0\"", + -13.858205795288086 + ], + [ + "inating", + -13.858238220214844 + ], + [ + "▁crusade", + -13.85823917388916 + ], + [ + "▁Veri", + -13.85831069946289 + ], + [ + "tou", + -13.858407020568848 + ], + [ + "▁1966.", + -13.858485221862791 + ], + [ + "▁Installed", + -13.858509063720703 + ], + [ + "tari", + -13.85856819152832 + ], + [ + "▁Jia", + -13.858585357666016 + ], + [ + "Yup", + -13.858607292175291 + ], + [ + "▁Foundations", + -13.858619689941406 + ], + [ + "QC", + -13.85870361328125 + ], + [ + "▁Thrill", + -13.858741760253906 + ], + [ + "▁Nino", + -13.85886573791504 + ], + [ + "▁numerals", + -13.858949661254885 + ], + [ + "▁exude", + -13.858960151672363 + ], + [ + "▁chatbot", + -13.8590087890625 + ], + [ + "▁Andover", + -13.859081268310549 + ], + [ + "▁silenced", + -13.859081268310549 + ], + [ + "▁gals", + -13.859148979187012 + ], + [ + "▁Owls", + -13.85923194885254 + ], + [ + "NUS", + -13.859280586242676 + ], + [ + "▁zoned", + -13.859383583068848 + ], + [ + "WELL", + -13.859394073486328 + ], + [ + "▁gnome", + -13.859420776367188 + ], + [ + "▁Colorful", + -13.859424591064451 + ], + [ + "▁grocer", + -13.859430313110352 + ], + [ + "heal", + -13.859456062316896 + ], + [ + "▁Speedy", + -13.859485626220703 + ], + [ + "PCC", + -13.859515190124512 + ], + [ + "shon", + -13.859540939331056 + ], + [ + "▁booted", + -13.85955047607422 + ], + [ + "-67", + -13.85959243774414 + ], + [ + "▁overdrive", + -13.859596252441406 + ], + [ + "sung", + -13.859627723693848 + ], + [ + "▁localities", + -13.859651565551758 + ], + [ + "ently", + -13.85969352722168 + ], + [ + "BIS", + -13.859702110290527 + ], + [ + "▁catamaran", + -13.85971450805664 + ], + [ + "▁indemnity", + -13.85971450805664 + ], + [ + "▁sturdiness", + -13.85971450805664 + ], + [ + "▁moratorium", + -13.859715461730955 + ], + [ + "▁scrimmage", + -13.859716415405272 + ], + [ + "▁Sheraton", + -13.859718322753906 + ], + [ + "▁rosé", + -13.85972499847412 + ], + [ + "▁Rochelle", + -13.859725952148438 + ], + [ + "▁Summon", + -13.859740257263184 + ], + [ + "▁Mekong", + -13.8597412109375 + ], + [ + "▁humming", + -13.859745025634766 + ], + [ + "▁Valuation", + -13.859752655029297 + ], + [ + "▁piecing", + -13.859756469726562 + ], + [ + "▁Ascension", + -13.859784126281738 + ], + [ + "▁Parma", + -13.8597993850708 + ], + [ + "▁Cleansing", + -13.859800338745115 + ], + [ + "▁Flanders", + -13.85980224609375 + ], + [ + "677", + -13.859813690185549 + ], + [ + "flam", + -13.859819412231444 + ], + [ + "▁CCI", + -13.859821319580078 + ], + [ + "▁Vick", + -13.85982894897461 + ], + [ + "▁granules", + -13.859848022460938 + ], + [ + "▁hasten", + -13.85987663269043 + ], + [ + "▁Brakes", + -13.859891891479492 + ], + [ + "▁Roundtable", + -13.859891891479492 + ], + [ + "▁rife", + -13.859917640686035 + ], + [ + "Virginia", + -13.859920501708984 + ], + [ + "▁Ori", + -13.859926223754885 + ], + [ + "▁sander", + -13.859959602355955 + ], + [ + "▁Grimes", + -13.859997749328612 + ], + [ + "▁affixed", + -13.860024452209473 + ], + [ + "CAC", + -13.860060691833496 + ], + [ + "▁predatory", + -13.86011028289795 + ], + [ + "▁Keeper", + -13.860207557678224 + ], + [ + "▁acc", + -13.860251426696776 + ], + [ + "▁harassing", + -13.860313415527344 + ], + [ + "ATIVE", + -13.860326766967772 + ], + [ + "▁swine", + -13.860356330871582 + ], + [ + "▁Tried", + -13.860371589660645 + ], + [ + "agic", + -13.86041259765625 + ], + [ + "Culture", + -13.860454559326172 + ], + [ + "▁verticals", + -13.860491752624512 + ], + [ + "ember", + -13.860496520996094 + ], + [ + "policy", + -13.86052703857422 + ], + [ + "▁elaborated", + -13.860604286193848 + ], + [ + "pable", + -13.860787391662598 + ], + [ + "$20", + -13.860796928405762 + ], + [ + "circ", + -13.860879898071287 + ], + [ + "CIO", + -13.860954284667969 + ], + [ + "violent", + -13.86097240447998 + ], + [ + "sina", + -13.86101245880127 + ], + [ + "Weekly", + -13.861054420471191 + ], + [ + "olite", + -13.861069679260254 + ], + [ + "▁fern", + -13.861218452453612 + ], + [ + "▁1964.", + -13.861310005187988 + ], + [ + "takes", + -13.861345291137695 + ], + [ + "Easter", + -13.861350059509276 + ], + [ + "ARCH", + -13.86135482788086 + ], + [ + "vore", + -13.861409187316896 + ], + [ + "whose", + -13.861421585083008 + ], + [ + "▁Ultrasound", + -13.86142921447754 + ], + [ + "Maintaining", + -13.861433029174805 + ], + [ + "generational", + -13.861449241638184 + ], + [ + "40)", + -13.861472129821776 + ], + [ + "216", + -13.861617088317873 + ], + [ + "▁slain", + -13.861647605895996 + ], + [ + "kool", + -13.861652374267578 + ], + [ + "▁caterer", + -13.86165714263916 + ], + [ + "clav", + -13.861873626708984 + ], + [ + "▁mosaics", + -13.862000465393066 + ], + [ + "▁FYI", + -13.86201286315918 + ], + [ + "▁inclusions", + -13.86205005645752 + ], + [ + "▁Parr", + -13.862085342407228 + ], + [ + "▁Shir", + -13.86209201812744 + ], + [ + "tard", + -13.862095832824709 + ], + [ + "980", + -13.862123489379885 + ], + [ + "hud", + -13.862183570861816 + ], + [ + "09)", + -13.862212181091309 + ], + [ + "Bone", + -13.862213134765623 + ], + [ + "AAS", + -13.862238883972168 + ], + [ + "▁Krista", + -13.86224365234375 + ], + [ + "▁steered", + -13.862255096435549 + ], + [ + "▁offside", + -13.862263679504396 + ], + [ + "amos", + -13.862295150756836 + ], + [ + "▁Cata", + -13.862322807312012 + ], + [ + "▁Accepted", + -13.862333297729492 + ], + [ + "▁Spears", + -13.862340927124023 + ], + [ + "656", + -13.862401008605955 + ], + [ + "reward", + -13.862452507019045 + ], + [ + "▁legislator", + -13.862499237060549 + ], + [ + "▁Bulldog", + -13.86253261566162 + ], + [ + "▁splinter", + -13.86265468597412 + ], + [ + "▁Attraction", + -13.862677574157717 + ], + [ + "▁Cracker", + -13.862712860107422 + ], + [ + "Leary", + -13.862785339355469 + ], + [ + "679", + -13.862822532653809 + ], + [ + "▁affectionately", + -13.862844467163086 + ], + [ + "lipid", + -13.862874984741213 + ], + [ + "▁Ethel", + -13.862885475158691 + ], + [ + "▁yea", + -13.86289882659912 + ], + [ + "▁dowel", + -13.862929344177246 + ], + [ + "▁(29", + -13.862944602966309 + ], + [ + "FSA", + -13.863077163696287 + ], + [ + "▁Priced", + -13.863080978393556 + ], + [ + "nv", + -13.863121032714844 + ], + [ + "▁Corners", + -13.863122940063477 + ], + [ + "ecker", + -13.863144874572754 + ], + [ + "ety", + -13.86317253112793 + ], + [ + "▁Excavator", + -13.863299369812012 + ], + [ + "▁McKinsey", + -13.863299369812012 + ], + [ + "▁Mykonos", + -13.863299369812012 + ], + [ + "▁subwoofer", + -13.863299369812012 + ], + [ + "▁Samurai", + -13.863300323486328 + ], + [ + "▁dribble", + -13.863301277160645 + ], + [ + "▁Turbine", + -13.863304138183594 + ], + [ + "▁Grenada", + -13.863308906555176 + ], + [ + "phr", + -13.863310813903809 + ], + [ + "▁Jarvis", + -13.863330841064451 + ], + [ + "▁Eurovision", + -13.863350868225098 + ], + [ + "▁Sermon", + -13.86335563659668 + ], + [ + "▁lifter", + -13.863409996032717 + ], + [ + "▁voltages", + -13.863430976867676 + ], + [ + "▁tainted", + -13.8634672164917 + ], + [ + "▁Belleville", + -13.863503456115724 + ], + [ + "▁abducted", + -13.863517761230469 + ], + [ + "▁Wilcox", + -13.863530158996582 + ], + [ + "▁Framed", + -13.86354923248291 + ], + [ + "▁Tomatoes", + -13.863587379455566 + ], + [ + "UNG", + -13.863603591918944 + ], + [ + "oman", + -13.863656997680664 + ], + [ + "▁nourishes", + -13.86366844177246 + ], + [ + "Organ", + -13.863670349121094 + ], + [ + "▁JEE", + -13.863677978515623 + ], + [ + "chio", + -13.863704681396484 + ], + [ + "stated", + -13.863707542419434 + ], + [ + "▁wading", + -13.863714218139648 + ], + [ + "dak", + -13.863736152648926 + ], + [ + "▁Unable", + -13.863832473754885 + ], + [ + "▁Cote", + -13.863840103149414 + ], + [ + "▁COOL", + -13.863873481750488 + ], + [ + "▁conveyors", + -13.863937377929688 + ], + [ + "notice", + -13.864022254943848 + ], + [ + "▁Cardiovascular", + -13.864048957824709 + ], + [ + "▁sleepless", + -13.864102363586426 + ], + [ + "Character", + -13.864154815673828 + ], + [ + "Marriage", + -13.864178657531738 + ], + [ + "▁inhibited", + -13.864184379577637 + ], + [ + "Assistant", + -13.86422061920166 + ], + [ + "▁Spiel", + -13.864248275756836 + ], + [ + "587", + -13.864272117614746 + ], + [ + "▁bodyguard", + -13.864274978637695 + ], + [ + "Activities", + -13.86428165435791 + ], + [ + "676", + -13.864339828491213 + ], + [ + "▁Failing", + -13.86434841156006 + ], + [ + "▁uniting", + -13.864356994628906 + ], + [ + "▁puffed", + -13.86435890197754 + ], + [ + "▁Mural", + -13.86439609527588 + ], + [ + "Ace", + -13.864402770996094 + ], + [ + "▁Segment", + -13.864511489868164 + ], + [ + "▁inlay", + -13.864590644836426 + ], + [ + "▁tween", + -13.86463737487793 + ], + [ + "▁triumphs", + -13.864670753479004 + ], + [ + "▁£1,", + -13.864680290222168 + ], + [ + "▁Ugh", + -13.864701271057127 + ], + [ + "ANN", + -13.864705085754396 + ], + [ + "▁Played", + -13.86471939086914 + ], + [ + "▁contouring", + -13.864771842956545 + ], + [ + "549", + -13.864912033081056 + ], + [ + "MTM", + -13.864946365356444 + ], + [ + "475", + -13.86495876312256 + ], + [ + "▁Nemo", + -13.864970207214355 + ], + [ + "▁Subjects", + -13.864991188049316 + ], + [ + "▁paneling", + -13.864995956420898 + ], + [ + "▁trailed", + -13.865042686462402 + ], + [ + "▁Tem", + -13.865081787109377 + ], + [ + "▁parted", + -13.865161895751951 + ], + [ + "wings", + -13.865224838256836 + ], + [ + "▁22\"", + -13.8652925491333 + ], + [ + "Rising", + -13.86544418334961 + ], + [ + "▁CSP", + -13.865445137023926 + ], + [ + "▁telegraph", + -13.865445137023926 + ], + [ + "▁Orb", + -13.865492820739746 + ], + [ + "▁acutely", + -13.865548133850098 + ], + [ + "▁Execution", + -13.865571975708008 + ], + [ + "▁Aloha", + -13.865602493286133 + ], + [ + "▁wick", + -13.865623474121094 + ], + [ + "▁2013;", + -13.865635871887209 + ], + [ + "Peel", + -13.865660667419434 + ], + [ + "RID", + -13.865677833557127 + ], + [ + "▁Seals", + -13.865740776062012 + ], + [ + "blower", + -13.865768432617188 + ], + [ + "▁pathogen", + -13.865777969360352 + ], + [ + "nese", + -13.865824699401855 + ], + [ + "▁Hau", + -13.86589241027832 + ], + [ + "▁backlight", + -13.865937232971191 + ], + [ + "▁Sponsorship", + -13.865939140319824 + ], + [ + "712", + -13.865973472595217 + ], + [ + "▁splitter", + -13.866039276123049 + ], + [ + "Kam", + -13.866065979003906 + ], + [ + "8:30", + -13.866071701049805 + ], + [ + "▁$1.7", + -13.866084098815918 + ], + [ + "▁outlay", + -13.866148948669434 + ], + [ + "▁Fixture", + -13.8661527633667 + ], + [ + "▁afterlife", + -13.86616039276123 + ], + [ + "nim", + -13.866205215454102 + ], + [ + "armed", + -13.866209030151367 + ], + [ + "269", + -13.86630630493164 + ], + [ + "nei", + -13.866310119628906 + ], + [ + "▁reheat", + -13.866389274597168 + ], + [ + "▁deactivate", + -13.866413116455078 + ], + [ + "▁hound", + -13.866442680358888 + ], + [ + "feng", + -13.866497039794922 + ], + [ + "179", + -13.866548538208008 + ], + [ + "▁LONDON", + -13.866557121276855 + ], + [ + "▁Lys", + -13.866583824157717 + ], + [ + "▁Medic", + -13.866601943969728 + ], + [ + "▁Shiv", + -13.866689682006836 + ], + [ + "▁Spence", + -13.86671257019043 + ], + [ + "▁diseased", + -13.866716384887695 + ], + [ + "▁briefs", + -13.866738319396973 + ], + [ + "▁Appointments", + -13.866868019104004 + ], + [ + "▁Barbecue", + -13.866896629333496 + ], + [ + "▁McGuire", + -13.866896629333496 + ], + [ + "▁dedicating", + -13.866896629333496 + ], + [ + "▁hydroponic", + -13.866896629333496 + ], + [ + "Collaborate", + -13.866898536682127 + ], + [ + "▁stagnation", + -13.866900444030762 + ], + [ + "▁palatable", + -13.866907119750977 + ], + [ + "▁astrological", + -13.86691188812256 + ], + [ + "▁psychosis", + -13.866912841796877 + ], + [ + "▁Kauai", + -13.866913795471191 + ], + [ + "▁lackluster", + -13.866934776306152 + ], + [ + "▁unspecified", + -13.866942405700684 + ], + [ + "▁Unicode", + -13.866948127746582 + ], + [ + "▁Cardio", + -13.866952896118164 + ], + [ + "▁Ridley", + -13.866960525512695 + ], + [ + "▁Scream", + -13.867035865783691 + ], + [ + "Tank", + -13.867048263549805 + ], + [ + "▁intruders", + -13.867057800292969 + ], + [ + "▁vertebrae", + -13.867070198059082 + ], + [ + "▁Jolla", + -13.86712646484375 + ], + [ + "axton", + -13.867148399353027 + ], + [ + "▁Durga", + -13.867149353027344 + ], + [ + "▁dearest", + -13.867149353027344 + ], + [ + "kap", + -13.867170333862305 + ], + [ + "▁Shy", + -13.86717128753662 + ], + [ + "Own", + -13.867173194885254 + ], + [ + "▁pleated", + -13.867194175720217 + ], + [ + "▁Sirius", + -13.86720085144043 + ], + [ + "▁Puffy", + -13.867230415344238 + ], + [ + "▁herring", + -13.867263793945312 + ], + [ + "▁relaxes", + -13.867264747619627 + ], + [ + "▁Ezekiel", + -13.867287635803224 + ], + [ + "▁820", + -13.867295265197754 + ], + [ + "▁capitalists", + -13.867362022399902 + ], + [ + "enough", + -13.867387771606444 + ], + [ + "LIE", + -13.86744213104248 + ], + [ + "▁Chlor", + -13.867576599121094 + ], + [ + "obe", + -13.867613792419434 + ], + [ + "▁Aba", + -13.8677396774292 + ], + [ + "gable", + -13.86778736114502 + ], + [ + "▁Moves", + -13.867799758911133 + ], + [ + "Alcohol", + -13.86782932281494 + ], + [ + "▁Xero", + -13.86785125732422 + ], + [ + "circuit", + -13.867854118347168 + ], + [ + "Elegant", + -13.86787223815918 + ], + [ + "Chrome", + -13.867878913879396 + ], + [ + "Football", + -13.867891311645508 + ], + [ + "▁Freeway", + -13.867891311645508 + ], + [ + "70)", + -13.867894172668455 + ], + [ + "▁FSA", + -13.867895126342772 + ], + [ + "9,900", + -13.867907524108888 + ], + [ + "Katie", + -13.86797046661377 + ], + [ + "Lucky", + -13.868014335632324 + ], + [ + "665", + -13.868037223815918 + ], + [ + "wha", + -13.868063926696776 + ], + [ + "▁bru", + -13.868133544921877 + ], + [ + "▁Croc", + -13.86816692352295 + ], + [ + "rks", + -13.868215560913086 + ], + [ + "▁Beckett", + -13.868219375610352 + ], + [ + "▁Cana", + -13.868230819702148 + ], + [ + "▁hoof", + -13.868243217468262 + ], + [ + "▁2600", + -13.868245124816896 + ], + [ + "455", + -13.868294715881348 + ], + [ + "▁interrupting", + -13.868313789367676 + ], + [ + "▁glanced", + -13.86834716796875 + ], + [ + "opolis", + -13.868363380432127 + ], + [ + "Growth", + -13.868393898010254 + ], + [ + "▁#0", + -13.868395805358888 + ], + [ + "▁Garc", + -13.868401527404783 + ], + [ + "▁poisoned", + -13.868409156799316 + ], + [ + "▁bathed", + -13.868420600891112 + ], + [ + "sip", + -13.868430137634276 + ], + [ + "osity", + -13.868431091308594 + ], + [ + "Throw", + -13.868440628051758 + ], + [ + "090", + -13.86844539642334 + ], + [ + "▁tableware", + -13.868464469909668 + ], + [ + "sounds", + -13.868467330932615 + ], + [ + "▁branched", + -13.86854648590088 + ], + [ + "/23", + -13.86856460571289 + ], + [ + "▁Pyr", + -13.868603706359863 + ], + [ + "▁Sponge", + -13.868607521057127 + ], + [ + "UGH", + -13.868806838989258 + ], + [ + "▁1851", + -13.868812561035156 + ], + [ + "▁earners", + -13.868856430053713 + ], + [ + "▁PEP", + -13.868929862976074 + ], + [ + "amino", + -13.86893367767334 + ], + [ + "XT", + -13.868940353393556 + ], + [ + "▁GPs", + -13.868951797485352 + ], + [ + "▁factored", + -13.869006156921388 + ], + [ + "▁amputation", + -13.869041442871094 + ], + [ + "328", + -13.869068145751951 + ], + [ + "▁Capp", + -13.869105339050291 + ], + [ + "▁skips", + -13.86910629272461 + ], + [ + "▁Rockstar", + -13.869189262390137 + ], + [ + "▁carcinogen", + -13.869195938110352 + ], + [ + "681", + -13.869218826293944 + ], + [ + "irs", + -13.86922550201416 + ], + [ + "▁swiss", + -13.869257926940918 + ], + [ + ",750", + -13.86927318572998 + ], + [ + "▁opposes", + -13.869318008422852 + ], + [ + "ogy", + -13.869324684143066 + ], + [ + "▁Cort", + -13.86935329437256 + ], + [ + "▁fieldwork", + -13.869379043579102 + ], + [ + "▁CARD", + -13.86938190460205 + ], + [ + "resources", + -13.869465827941896 + ], + [ + "▁bunkers", + -13.86952304840088 + ], + [ + "▁Faucets", + -13.869543075561523 + ], + [ + "hog", + -13.869668960571287 + ], + [ + "Bri", + -13.869807243347168 + ], + [ + "95.", + -13.86981201171875 + ], + [ + "rese", + -13.869813919067385 + ], + [ + "▁frustrate", + -13.869906425476074 + ], + [ + "▁hardwoods", + -13.86993408203125 + ], + [ + "▁CIT", + -13.869967460632324 + ], + [ + "▁putter", + -13.870027542114258 + ], + [ + "▁1820", + -13.870039939880373 + ], + [ + "lson", + -13.870071411132812 + ], + [ + "SPORT", + -13.870155334472656 + ], + [ + "▁ISSN", + -13.870156288146973 + ], + [ + "bahn", + -13.870162010192873 + ], + [ + "QR", + -13.870238304138184 + ], + [ + "▁castings", + -13.870241165161133 + ], + [ + "▁staunch", + -13.870265007019045 + ], + [ + "▁blacklist", + -13.87027072906494 + ], + [ + "▁cornerback", + -13.870343208312988 + ], + [ + "▁navigated", + -13.87037467956543 + ], + [ + "▁Lehman", + -13.870415687561035 + ], + [ + "arri", + -13.870427131652832 + ], + [ + "▁Constellation", + -13.870506286621094 + ], + [ + "▁bigotry", + -13.870506286621094 + ], + [ + "▁conundrum", + -13.870506286621094 + ], + [ + "▁juniper", + -13.870506286621094 + ], + [ + "▁observatory", + -13.870506286621094 + ], + [ + "▁fictitious", + -13.870511054992676 + ], + [ + "▁kudos", + -13.870518684387209 + ], + [ + "▁feces", + -13.870519638061523 + ], + [ + "▁shipwreck", + -13.870525360107422 + ], + [ + "▁Epstein", + -13.870526313781738 + ], + [ + "▁Hove", + -13.870537757873535 + ], + [ + "▁Imran", + -13.87055778503418 + ], + [ + "▁Jinping", + -13.87055778503418 + ], + [ + "▁amounting", + -13.870583534240724 + ], + [ + "▁Behaviour", + -13.870619773864746 + ], + [ + "▁Diaries", + -13.87062644958496 + ], + [ + "▁unimportant", + -13.870668411254885 + ], + [ + "▁Bodies", + -13.870675086975098 + ], + [ + "▁bakeries", + -13.870682716369627 + ], + [ + "cour", + -13.870685577392578 + ], + [ + "▁Hinduism", + -13.87070083618164 + ], + [ + "icus", + -13.870758056640623 + ], + [ + "▁milder", + -13.870858192443848 + ], + [ + "▁retelling", + -13.870889663696287 + ], + [ + "▁==", + -13.870894432067873 + ], + [ + "▁scum", + -13.871000289916992 + ], + [ + "▁Whisper", + -13.871027946472168 + ], + [ + "▁Gayle", + -13.871028900146484 + ], + [ + "gami", + -13.8710355758667 + ], + [ + "▁Mort", + -13.871094703674316 + ], + [ + "fd", + -13.87112522125244 + ], + [ + "baugh", + -13.871136665344238 + ], + [ + "▁MTB", + -13.871222496032717 + ], + [ + "▁Koe", + -13.871243476867676 + ], + [ + "▁proclaiming", + -13.87126922607422 + ], + [ + "traction", + -13.871272087097168 + ], + [ + "▁Anil", + -13.871308326721191 + ], + [ + "▁Davey", + -13.871345520019531 + ], + [ + "▁Alfredo", + -13.87135410308838 + ], + [ + "▁rigor", + -13.87137222290039 + ], + [ + "bust", + -13.871377944946287 + ], + [ + "requisite", + -13.87143611907959 + ], + [ + "▁diabetics", + -13.871451377868652 + ], + [ + "▁ESR", + -13.871487617492676 + ], + [ + "▁MTS", + -13.871501922607422 + ], + [ + "▁MULTI", + -13.8715238571167 + ], + [ + "Certificate", + -13.871564865112305 + ], + [ + "Families", + -13.871573448181152 + ], + [ + "Loving", + -13.87158203125 + ], + [ + "Memory", + -13.871591567993164 + ], + [ + "591", + -13.871599197387695 + ], + [ + "antes", + -13.871623992919922 + ], + [ + "lke", + -13.871646881103516 + ], + [ + "schaft", + -13.87165641784668 + ], + [ + "deen", + -13.871699333190918 + ], + [ + "7.3", + -13.871707916259766 + ], + [ + "▁canyons", + -13.87187957763672 + ], + [ + "shing", + -13.872002601623535 + ], + [ + "▁hailing", + -13.872031211853027 + ], + [ + "generally", + -13.872078895568848 + ], + [ + "▁bungalows", + -13.87210464477539 + ], + [ + "▁Andres", + -13.872152328491213 + ], + [ + "▁7.8", + -13.872177124023438 + ], + [ + "▁Camper", + -13.872199058532717 + ], + [ + "hw", + -13.872201919555664 + ], + [ + "corporate", + -13.87220859527588 + ], + [ + "isy", + -13.872276306152344 + ], + [ + "▁Coles", + -13.872291564941406 + ], + [ + "▁pips", + -13.87231159210205 + ], + [ + "▁Penh", + -13.872340202331545 + ], + [ + "Doesn", + -13.872376441955566 + ], + [ + "374", + -13.87238311767578 + ], + [ + "rzy", + -13.872462272644045 + ], + [ + "Hare", + -13.872472763061523 + ], + [ + "Jar", + -13.872477531433104 + ], + [ + "▁kor", + -13.87252140045166 + ], + [ + "▁inbuilt", + -13.872522354125977 + ], + [ + "buch", + -13.872536659240724 + ], + [ + "▁letterhead", + -13.872640609741213 + ], + [ + "▁Koz", + -13.872642517089844 + ], + [ + "▁Lips", + -13.872747421264648 + ], + [ + "agua", + -13.87281322479248 + ], + [ + "kho", + -13.872842788696287 + ], + [ + "cakes", + -13.872870445251465 + ], + [ + "Oriented", + -13.872886657714844 + ], + [ + "▁conquering", + -13.87298583984375 + ], + [ + "▁Assemble", + -13.872998237609863 + ], + [ + "▁10:1", + -13.873126029968262 + ], + [ + "▁1960’", + -13.873132705688477 + ], + [ + "migo", + -13.873241424560549 + ], + [ + "▁Buffer", + -13.873265266418455 + ], + [ + "hotmail", + -13.873306274414062 + ], + [ + "bz", + -13.873376846313477 + ], + [ + "gender", + -13.873383522033691 + ], + [ + "▁Dab", + -13.873406410217283 + ], + [ + "▁Chim", + -13.873407363891602 + ], + [ + "▁Koi", + -13.87342357635498 + ], + [ + "uwa", + -13.873427391052246 + ], + [ + "▁Poets", + -13.873449325561523 + ], + [ + "▁sighted", + -13.873468399047852 + ], + [ + "▁burr", + -13.873485565185549 + ], + [ + "▁unfit", + -13.873501777648926 + ], + [ + "Electro", + -13.873517036437988 + ], + [ + "sticks", + -13.873523712158203 + ], + [ + "▁dazzle", + -13.873539924621582 + ], + [ + "▁despise", + -13.873555183410645 + ], + [ + "▁Antoine", + -13.87360382080078 + ], + [ + "apache", + -13.87373161315918 + ], + [ + "▁Influencer", + -13.873766899108888 + ], + [ + "▁warden", + -13.873777389526367 + ], + [ + "ADD", + -13.873778343200684 + ], + [ + "▁Vendors", + -13.87388515472412 + ], + [ + "▁paediatric", + -13.87390422821045 + ], + [ + "▁physio", + -13.873995780944824 + ], + [ + "822", + -13.874004364013672 + ], + [ + "iglia", + -13.874075889587402 + ], + [ + "▁Gymnastics", + -13.87412929534912 + ], + [ + "▁Khalifa", + -13.87412929534912 + ], + [ + "▁virtuous", + -13.87412929534912 + ], + [ + "▁Mubarak", + -13.874130249023438 + ], + [ + "▁Nutcracker", + -13.874130249023438 + ], + [ + "▁Okanagan", + -13.874130249023438 + ], + [ + "▁Wreath", + -13.874130249023438 + ], + [ + "▁Frazier", + -13.874134063720703 + ], + [ + "▁paternity", + -13.874134063720703 + ], + [ + "▁underpinned", + -13.874138832092283 + ], + [ + "▁symptomatic", + -13.87414264678955 + ], + [ + "▁Falmouth", + -13.874144554138184 + ], + [ + "▁dimming", + -13.874155044555664 + ], + [ + "▁spandex", + -13.874157905578612 + ], + [ + "ORY", + -13.87416172027588 + ], + [ + "▁manoeuvre", + -13.874167442321776 + ], + [ + "▁Covington", + -13.874175071716309 + ], + [ + "▁Contour", + -13.874191284179688 + ], + [ + "▁Lago", + -13.87419605255127 + ], + [ + "▁gelato", + -13.874202728271484 + ], + [ + "▁Lipstick", + -13.8742036819458 + ], + [ + "▁cardiologist", + -13.874267578125 + ], + [ + "▁consultative", + -13.874268531799316 + ], + [ + "▁radiotherapy", + -13.874275207519531 + ], + [ + "▁Fitch", + -13.874276161193848 + ], + [ + "▁dystopian", + -13.874289512634276 + ], + [ + "▁Foo", + -13.874296188354492 + ], + [ + "▁blight", + -13.874302864074709 + ], + [ + "▁Hayley", + -13.874324798583984 + ], + [ + "lary", + -13.874350547790527 + ], + [ + "▁engulfed", + -13.87436580657959 + ], + [ + "▁scour", + -13.87436580657959 + ], + [ + "▁Happens", + -13.874390602111816 + ], + [ + "reviews", + -13.874393463134766 + ], + [ + "▁drab", + -13.874411582946776 + ], + [ + "▁Cellar", + -13.874431610107422 + ], + [ + "259", + -13.874467849731444 + ], + [ + "▁infestations", + -13.874553680419922 + ], + [ + "adam", + -13.874614715576172 + ], + [ + "▁webbing", + -13.874703407287598 + ], + [ + "Attend", + -13.874730110168455 + ], + [ + "▁1970.", + -13.874732971191406 + ], + [ + "yc", + -13.874866485595703 + ], + [ + "▁lax", + -13.874881744384766 + ], + [ + "▁unclean", + -13.874910354614258 + ], + [ + "▁PCA", + -13.874924659729004 + ], + [ + "▁minimalism", + -13.874985694885254 + ], + [ + "▁Nach", + -13.875045776367188 + ], + [ + "athlete", + -13.875160217285156 + ], + [ + "▁sweeten", + -13.875174522399902 + ], + [ + "Lets", + -13.875195503234863 + ], + [ + "▁splashes", + -13.875266075134276 + ], + [ + "▁Vai", + -13.87529182434082 + ], + [ + "Arizona", + -13.87529754638672 + ], + [ + "Swiss", + -13.875300407409668 + ], + [ + "jas", + -13.875336647033691 + ], + [ + "xes", + -13.87533950805664 + ], + [ + "▁Pallet", + -13.875386238098145 + ], + [ + "Investment", + -13.87542724609375 + ], + [ + "lford", + -13.875441551208496 + ], + [ + "▁calmed", + -13.875463485717772 + ], + [ + "▁RIM", + -13.875494003295898 + ], + [ + "▁thi", + -13.875508308410645 + ], + [ + "lapping", + -13.875618934631348 + ], + [ + "truction", + -13.875629425048828 + ], + [ + "▁traitor", + -13.87563705444336 + ], + [ + "▁FICO", + -13.875686645507812 + ], + [ + "gid", + -13.875794410705566 + ], + [ + "▁Pellet", + -13.875804901123049 + ], + [ + "▁70'", + -13.875855445861816 + ], + [ + "▁DEV", + -13.875855445861816 + ], + [ + "▁bathtubs", + -13.875860214233398 + ], + [ + "▁booting", + -13.875950813293455 + ], + [ + "▁UDP", + -13.875988006591797 + ], + [ + "▁Kira", + -13.87605094909668 + ], + [ + "CEP", + -13.876069068908691 + ], + [ + "-101", + -13.876138687133787 + ], + [ + "▁revisiting", + -13.876181602478027 + ], + [ + "ween", + -13.876192092895508 + ], + [ + "hhhh", + -13.876258850097656 + ], + [ + "consider", + -13.876418113708496 + ], + [ + "▁Refrigerate", + -13.876425743103027 + ], + [ + "▁Quo", + -13.876479148864746 + ], + [ + "▁Jumping", + -13.876522064208984 + ], + [ + "mega", + -13.876739501953123 + ], + [ + "▁Wally", + -13.87677001953125 + ], + [ + "▁Dietary", + -13.876801490783691 + ], + [ + "DIR", + -13.87686252593994 + ], + [ + "▁Smell", + -13.876871109008787 + ], + [ + "017", + -13.876893043518066 + ], + [ + "▁Cog", + -13.876911163330078 + ], + [ + "▁motoring", + -13.876933097839355 + ], + [ + "▁Daley", + -13.876935005187988 + ], + [ + "▁preface", + -13.87700080871582 + ], + [ + "iru", + -13.877063751220703 + ], + [ + "▁Anastasia", + -13.87707805633545 + ], + [ + "▁availed", + -13.87708854675293 + ], + [ + "▁AIS", + -13.877127647399902 + ], + [ + "▁constrain", + -13.877154350280762 + ], + [ + "▁KX", + -13.87736701965332 + ], + [ + "▁ringtone", + -13.87740421295166 + ], + [ + "acho", + -13.877461433410645 + ], + [ + "enger", + -13.877482414245604 + ], + [ + "ubbing", + -13.87748908996582 + ], + [ + "▁Clar", + -13.87751293182373 + ], + [ + "drome", + -13.877516746520996 + ], + [ + "BV", + -13.87763214111328 + ], + [ + "▁Tasty", + -13.877730369567873 + ], + [ + "▁Olympian", + -13.8777437210083 + ], + [ + "▁Asylum", + -13.877765655517578 + ], + [ + "▁Vodka", + -13.877765655517578 + ], + [ + "▁contemplative", + -13.877765655517578 + ], + [ + "▁desiring", + -13.877765655517578 + ], + [ + "▁resiliency", + -13.877765655517578 + ], + [ + "▁Laureate", + -13.877766609191896 + ], + [ + "▁mandarin", + -13.877769470214844 + ], + [ + "▁monastic", + -13.87777042388916 + ], + [ + "491", + -13.877775192260742 + ], + [ + "▁Vedic", + -13.877781867980955 + ], + [ + "▁Retriever", + -13.877782821655272 + ], + [ + "tp", + -13.87778663635254 + ], + [ + "dang", + -13.877790451049805 + ], + [ + "rmi", + -13.877851486206056 + ], + [ + "▁Forester", + -13.87786102294922 + ], + [ + "▁Colbert", + -13.877872467041016 + ], + [ + "alam", + -13.87790584564209 + ], + [ + "-2007", + -13.87790870666504 + ], + [ + "▁instructive", + -13.87791347503662 + ], + [ + "▁repeater", + -13.877915382385254 + ], + [ + "▁punishable", + -13.877965927124023 + ], + [ + "▁bullies", + -13.877967834472656 + ], + [ + "▁Kimball", + -13.877969741821287 + ], + [ + "▁spe", + -13.878073692321776 + ], + [ + "▁Blueberry", + -13.878084182739258 + ], + [ + "▁Capitals", + -13.87810516357422 + ], + [ + "▁Hartley", + -13.87810516357422 + ], + [ + "▁plucked", + -13.878125190734863 + ], + [ + "▁415", + -13.878188133239746 + ], + [ + "hman", + -13.878239631652832 + ], + [ + "▁foxes", + -13.878293991088867 + ], + [ + "169", + -13.87833309173584 + ], + [ + "Nav", + -13.87834644317627 + ], + [ + "Gem", + -13.878366470336914 + ], + [ + "challenge", + -13.878413200378418 + ], + [ + "607", + -13.878422737121582 + ], + [ + "Experienced", + -13.878429412841797 + ], + [ + "▁Vibration", + -13.87843418121338 + ], + [ + "▁mailboxes", + -13.878435134887695 + ], + [ + "▁takeoff", + -13.878478050231934 + ], + [ + "▁Soundtrack", + -13.8784818649292 + ], + [ + "▁Daryl", + -13.878521919250488 + ], + [ + "▁Zumba", + -13.87852668762207 + ], + [ + "▁Perfection", + -13.878592491149902 + ], + [ + "▁Bankers", + -13.878697395324709 + ], + [ + "culo", + -13.878790855407717 + ], + [ + "▁Signals", + -13.878811836242676 + ], + [ + "awn", + -13.87881565093994 + ], + [ + "▁281", + -13.878870964050291 + ], + [ + "$100", + -13.87891674041748 + ], + [ + "Liquid", + -13.878985404968262 + ], + [ + "bent", + -13.879010200500488 + ], + [ + "Combining", + -13.879014015197754 + ], + [ + "detached", + -13.879023551940918 + ], + [ + "Failure", + -13.87902545928955 + ], + [ + "molecular", + -13.87902545928955 + ], + [ + "corruption", + -13.87903881072998 + ], + [ + "Graham", + -13.879042625427246 + ], + [ + "iw", + -13.87906551361084 + ], + [ + "▁creases", + -13.879072189331056 + ], + [ + "pix", + -13.879084587097168 + ], + [ + "▁Hume", + -13.879094123840332 + ], + [ + "▁Schle", + -13.879100799560549 + ], + [ + "▁Scheduled", + -13.879111289978027 + ], + [ + "▁skier", + -13.87913417816162 + ], + [ + "recommended", + -13.879170417785645 + ], + [ + "Historically", + -13.879172325134276 + ], + [ + "▁$6,000", + -13.879258155822754 + ], + [ + "▁Lep", + -13.879260063171388 + ], + [ + "copper", + -13.879342079162598 + ], + [ + "▁BCE", + -13.87937068939209 + ], + [ + "▁clamping", + -13.879372596740724 + ], + [ + "▁judgements", + -13.879395484924316 + ], + [ + "▁hammers", + -13.879396438598633 + ], + [ + "▁thoracic", + -13.879402160644531 + ], + [ + "packaged", + -13.879412651062012 + ], + [ + "gina", + -13.879426956176758 + ], + [ + "▁collapses", + -13.87950325012207 + ], + [ + "▁Racer", + -13.879508972167969 + ], + [ + "▁aunts", + -13.879534721374512 + ], + [ + "▁neonatal", + -13.87956714630127 + ], + [ + "▁Loader", + -13.879602432250977 + ], + [ + "▁NGC", + -13.879652976989746 + ], + [ + "▁SAY", + -13.879698753356934 + ], + [ + "▁THAN", + -13.879752159118652 + ], + [ + "▁soundtracks", + -13.879813194274902 + ], + [ + "transport", + -13.879868507385254 + ], + [ + "▁SAME", + -13.87989616394043 + ], + [ + "▁ETC", + -13.879901885986328 + ], + [ + "Breaking", + -13.879948616027832 + ], + [ + "▁Aki", + -13.879975318908691 + ], + [ + "▁electrolyte", + -13.879976272583008 + ], + [ + "▁whitepaper", + -13.87997817993164 + ], + [ + "▁Vive", + -13.880016326904297 + ], + [ + "▁oracle", + -13.880032539367676 + ], + [ + "shita", + -13.88016414642334 + ], + [ + "▁redesigning", + -13.880203247070312 + ], + [ + "▁greets", + -13.880316734313965 + ], + [ + "Romans", + -13.880331993103027 + ], + [ + "wav", + -13.880365371704102 + ], + [ + "▁Lec", + -13.880488395690918 + ], + [ + "▁2009)", + -13.880522727966309 + ], + [ + "planet", + -13.880542755126951 + ], + [ + "accessible", + -13.880650520324709 + ], + [ + "Authors", + -13.88065242767334 + ], + [ + "csv", + -13.880659103393556 + ], + [ + "▁Vall", + -13.880661010742188 + ], + [ + "▁Nay", + -13.8807373046875 + ], + [ + "lama", + -13.880752563476562 + ], + [ + "sica", + -13.880756378173828 + ], + [ + "▁comforters", + -13.880823135375977 + ], + [ + "▁LIST", + -13.880925178527832 + ], + [ + "unity", + -13.881103515625 + ], + [ + "▁226", + -13.881118774414062 + ], + [ + "7′′", + -13.881166458129885 + ], + [ + "▁eloquent", + -13.881173133850098 + ], + [ + "▁Kaufman", + -13.881190299987791 + ], + [ + "▁Edmonds", + -13.881329536437988 + ], + [ + "icious", + -13.881336212158203 + ], + [ + "▁Wren", + -13.881392478942873 + ], + [ + "NEL", + -13.881402015686035 + ], + [ + "731", + -13.88140869140625 + ], + [ + "▁embargo", + -13.881413459777832 + ], + [ + "▁Chihuahua", + -13.881415367126465 + ], + [ + "▁dainty", + -13.881415367126465 + ], + [ + "▁Pyongyang", + -13.88141632080078 + ], + [ + "▁Nifty", + -13.881421089172363 + ], + [ + "▁centric", + -13.88142204284668 + ], + [ + "▁valuing", + -13.881425857543944 + ], + [ + "▁Kimberley", + -13.881430625915527 + ], + [ + "▁giddy", + -13.88143253326416 + ], + [ + "▁polluting", + -13.88143253326416 + ], + [ + "▁emulation", + -13.881440162658691 + ], + [ + "▁imposition", + -13.881454467773438 + ], + [ + "▁Vortex", + -13.881471633911133 + ], + [ + "▁$59", + -13.881481170654297 + ], + [ + "▁EVEN", + -13.881525993347168 + ], + [ + "▁Waffle", + -13.88153839111328 + ], + [ + "▁Armani", + -13.881548881530762 + ], + [ + "▁curio", + -13.881548881530762 + ], + [ + "rust", + -13.881609916687012 + ], + [ + "▁Doodle", + -13.881689071655272 + ], + [ + "cce", + -13.8817138671875 + ], + [ + "aque", + -13.88171672821045 + ], + [ + "▁lizards", + -13.881749153137209 + ], + [ + "▁mexico", + -13.881784439086914 + ], + [ + "▁233", + -13.881818771362305 + ], + [ + "▁heyday", + -13.881857872009276 + ], + [ + "▁silverware", + -13.881879806518556 + ], + [ + "▁cryptic", + -13.881887435913086 + ], + [ + "elon", + -13.88197422027588 + ], + [ + "▁Coul", + -13.882004737854004 + ], + [ + "▁Diagnostics", + -13.88200569152832 + ], + [ + "Reader", + -13.882018089294434 + ], + [ + "1,500", + -13.882022857666016 + ], + [ + "▁keenly", + -13.88202953338623 + ], + [ + "hearing", + -13.88204574584961 + ], + [ + "▁swirls", + -13.882070541381836 + ], + [ + "7.50", + -13.882076263427734 + ], + [ + "418", + -13.88211727142334 + ], + [ + "-1/2\"", + -13.882184982299805 + ], + [ + "azar", + -13.88221836090088 + ], + [ + "▁Considered", + -13.88222885131836 + ], + [ + "▁proto", + -13.882238388061523 + ], + [ + "▁deviations", + -13.882304191589355 + ], + [ + "▁Kamal", + -13.882314682006836 + ], + [ + "▁46%", + -13.882331848144531 + ], + [ + "▁Syl", + -13.882376670837402 + ], + [ + "▁5-4", + -13.882378578186035 + ], + [ + "states", + -13.882389068603516 + ], + [ + "Leg", + -13.882418632507324 + ], + [ + "ODA", + -13.882463455200195 + ], + [ + "▁Freeport", + -13.88251495361328 + ], + [ + "QL", + -13.882516860961914 + ], + [ + "▁888-3", + -13.882582664489746 + ], + [ + "bab", + -13.882586479187012 + ], + [ + "▁Swanson", + -13.88260269165039 + ], + [ + "Bass", + -13.882634162902832 + ], + [ + "▁Bunch", + -13.882665634155272 + ], + [ + "Analyse", + -13.882684707641602 + ], + [ + "▁rung", + -13.882741928100586 + ], + [ + "Columbia", + -13.882772445678713 + ], + [ + "▁minibar", + -13.882852554321287 + ], + [ + "▁Grout", + -13.882866859436035 + ], + [ + "ummer", + -13.882867813110352 + ], + [ + "crack", + -13.882895469665527 + ], + [ + "scr", + -13.883037567138672 + ], + [ + "AWS", + -13.88306713104248 + ], + [ + "▁reunions", + -13.88310432434082 + ], + [ + "58.", + -13.883123397827148 + ], + [ + "▁fenders", + -13.883123397827148 + ], + [ + "university", + -13.883126258850098 + ], + [ + "▁Proceeds", + -13.88312816619873 + ], + [ + "▁Guer", + -13.883179664611816 + ], + [ + "▁bobs", + -13.883193969726562 + ], + [ + "▁multifunctional", + -13.883195877075195 + ], + [ + "teaching", + -13.883209228515623 + ], + [ + "▁Shal", + -13.883326530456545 + ], + [ + "▁Bok", + -13.88332748413086 + ], + [ + "VDC", + -13.883338928222656 + ], + [ + "Liz", + -13.88336181640625 + ], + [ + "▁Thirteen", + -13.883374214172363 + ], + [ + "▁precast", + -13.883381843566896 + ], + [ + "bbie", + -13.883482933044434 + ], + [ + "▁$43", + -13.883520126342772 + ], + [ + "▁Coop", + -13.883533477783203 + ], + [ + "NBA", + -13.883622169494627 + ], + [ + "▁SIL", + -13.883644104003906 + ], + [ + "▁2.5%", + -13.883688926696776 + ], + [ + "tika", + -13.88377571105957 + ], + [ + "▁Pouch", + -13.883888244628906 + ], + [ + "cw", + -13.883951187133787 + ], + [ + "▁contactless", + -13.88396167755127 + ], + [ + "▁Bloggers", + -13.8839693069458 + ], + [ + "▁transistors", + -13.883997917175291 + ], + [ + ".............", + -13.884000778198242 + ], + [ + "atology", + -13.884040832519531 + ], + [ + "raki", + -13.88410758972168 + ], + [ + "▁embellish", + -13.884137153625488 + ], + [ + "fina", + -13.884322166442873 + ], + [ + "6,500", + -13.884339332580566 + ], + [ + "vation", + -13.884385108947754 + ], + [ + "▁Dancers", + -13.884403228759766 + ], + [ + "Rad", + -13.88442611694336 + ], + [ + "937", + -13.884446144104004 + ], + [ + "nach", + -13.8844575881958 + ], + [ + "▁oblige", + -13.884481430053713 + ], + [ + "▁ridicule", + -13.88448429107666 + ], + [ + "Shirts", + -13.884506225585938 + ], + [ + "laid", + -13.884507179260254 + ], + [ + "▁1961,", + -13.884631156921388 + ], + [ + "▁stimulant", + -13.884675979614258 + ], + [ + "Draft", + -13.88469696044922 + ], + [ + "▁wikipedia", + -13.884703636169434 + ], + [ + "▁bodied", + -13.884706497192385 + ], + [ + "▁bowler", + -13.884756088256836 + ], + [ + "403", + -13.88476276397705 + ], + [ + "NIT", + -13.884766578674316 + ], + [ + "▁unilateral", + -13.884840965270996 + ], + [ + "▁samba", + -13.884872436523438 + ], + [ + "▁Cubic", + -13.884920120239258 + ], + [ + "▁leaping", + -13.88492202758789 + ], + [ + "forget", + -13.884930610656738 + ], + [ + "bala", + -13.885035514831545 + ], + [ + "▁Ponte", + -13.88504123687744 + ], + [ + "▁Linguistics", + -13.88507843017578 + ], + [ + "▁Wolverhampton", + -13.88507843017578 + ], + [ + "▁bountiful", + -13.88507843017578 + ], + [ + "▁fluctuating", + -13.88507843017578 + ], + [ + "▁guarantor", + -13.88507843017578 + ], + [ + "▁paranoia", + -13.88507843017578 + ], + [ + "autobiographical", + -13.885079383850098 + ], + [ + "▁mogul", + -13.885088920593262 + ], + [ + "▁refractive", + -13.885089874267578 + ], + [ + "▁Seuss", + -13.88509464263916 + ], + [ + "Seat", + -13.88510799407959 + ], + [ + "▁aboriginal", + -13.88511562347412 + ], + [ + "▁BAS", + -13.88523769378662 + ], + [ + "▁exclaimed", + -13.88530158996582 + ], + [ + "ambu", + -13.885332107543944 + ], + [ + "▁pinball", + -13.88533878326416 + ], + [ + "▁Cochrane", + -13.885340690612791 + ], + [ + "▁overshadowed", + -13.885357856750488 + ], + [ + "▁RISK", + -13.88537311553955 + ], + [ + "▁storylines", + -13.885379791259766 + ], + [ + "fection", + -13.885393142700195 + ], + [ + "▁Micah", + -13.885395050048828 + ], + [ + "▁PCM", + -13.885395050048828 + ], + [ + "bogged", + -13.885408401489258 + ], + [ + "▁Hors", + -13.885415077209473 + ], + [ + "sofa", + -13.88542366027832 + ], + [ + "▁surpasses", + -13.88542366027832 + ], + [ + "ographer", + -13.885428428649902 + ], + [ + "▁jan", + -13.885497093200684 + ], + [ + "protect", + -13.885571479797363 + ], + [ + "▁sectarian", + -13.885573387145996 + ], + [ + "▁Worry", + -13.885621070861816 + ], + [ + "Route", + -13.885708808898926 + ], + [ + "▁Nutritional", + -13.885744094848633 + ], + [ + "▁conserved", + -13.885797500610352 + ], + [ + "▁tacky", + -13.885807037353516 + ], + [ + "▁Utica", + -13.885825157165527 + ], + [ + "9\"", + -13.88596248626709 + ], + [ + "▁Hydrogen", + -13.885976791381836 + ], + [ + "▁pathological", + -13.88602352142334 + ], + [ + "▁Transparent", + -13.886218070983888 + ], + [ + "▁assistive", + -13.886244773864746 + ], + [ + "▁Johannes", + -13.886259078979492 + ], + [ + "▁TAR", + -13.88632869720459 + ], + [ + "taste", + -13.886384963989258 + ], + [ + "▁sketchbook", + -13.886385917663574 + ], + [ + "▁uttered", + -13.886396408081056 + ], + [ + "30)", + -13.8864164352417 + ], + [ + "oran", + -13.886465072631836 + ], + [ + "▁Mila", + -13.886489868164062 + ], + [ + "Assuming", + -13.886507034301758 + ], + [ + "approval", + -13.886510848999023 + ], + [ + "▁Hotline", + -13.886523246765137 + ], + [ + "mino", + -13.886567115783691 + ], + [ + "▁Lyric", + -13.886590003967283 + ], + [ + "▁hippo", + -13.886659622192385 + ], + [ + "▁outfielder", + -13.88667106628418 + ], + [ + "Winning", + -13.886688232421877 + ], + [ + "▁Shear", + -13.886690139770508 + ], + [ + "▁cached", + -13.886723518371582 + ], + [ + "uzzi", + -13.886812210083008 + ], + [ + "▁251", + -13.886826515197754 + ], + [ + "▁2016:", + -13.88687515258789 + ], + [ + "Dinner", + -13.88691234588623 + ], + [ + "▁Francesca", + -13.88691234588623 + ], + [ + "▁UNITED", + -13.887060165405272 + ], + [ + "Vice", + -13.887076377868652 + ], + [ + "▁680", + -13.887093544006348 + ], + [ + "▁drizzled", + -13.887115478515623 + ], + [ + "▁970", + -13.887120246887209 + ], + [ + "anc", + -13.88716983795166 + ], + [ + "965", + -13.887178421020508 + ], + [ + "▁nouns", + -13.88718032836914 + ], + [ + "someone", + -13.887181282043455 + ], + [ + "KF", + -13.887242317199709 + ], + [ + "spend", + -13.88730812072754 + ], + [ + "▁Checker", + -13.88731575012207 + ], + [ + "▁prejudices", + -13.887321472167969 + ], + [ + "▁undertones", + -13.887346267700195 + ], + [ + "vish", + -13.887351989746094 + ], + [ + "▁EACH", + -13.887369155883787 + ], + [ + "▁invert", + -13.887417793273926 + ], + [ + "▁normalized", + -13.887418746948242 + ], + [ + "tram", + -13.887494087219238 + ], + [ + "hout", + -13.887547492980955 + ], + [ + "▁42-", + -13.887617111206056 + ], + [ + "blanc", + -13.887639999389648 + ], + [ + "▁biologists", + -13.887739181518556 + ], + [ + "▁Blades", + -13.887884140014648 + ], + [ + "yong", + -13.887919425964355 + ], + [ + "ент", + -13.887924194335938 + ], + [ + "ideal", + -13.887927055358888 + ], + [ + "Vista", + -13.887934684753418 + ], + [ + "genesis", + -13.887939453125 + ], + [ + "▁refinish", + -13.888044357299805 + ], + [ + "nami", + -13.88804817199707 + ], + [ + "▁Eph", + -13.888189315795898 + ], + [ + "▁keychain", + -13.888421058654783 + ], + [ + "Howard", + -13.888471603393556 + ], + [ + "isto", + -13.888472557067873 + ], + [ + "▁reappear", + -13.888504028320312 + ], + [ + "▁Latinos", + -13.888587951660156 + ], + [ + "hz", + -13.888593673706056 + ], + [ + "▁NIGHT", + -13.888620376586914 + ], + [ + "▁Doe", + -13.888632774353027 + ], + [ + "▁Turmeric", + -13.888689994812012 + ], + [ + "▁Carriage", + -13.888754844665527 + ], + [ + "▁divulge", + -13.888754844665527 + ], + [ + "▁WHICH", + -13.88875675201416 + ], + [ + "▁Mounting", + -13.888761520385742 + ], + [ + "▁EVERYONE", + -13.888763427734377 + ], + [ + "▁alfresco", + -13.88876724243164 + ], + [ + "▁Claremont", + -13.888782501220703 + ], + [ + "▁delving", + -13.88880443572998 + ], + [ + "▁Mughal", + -13.888813018798828 + ], + [ + "▁neuronal", + -13.8888521194458 + ], + [ + "▁autofocus", + -13.88886547088623 + ], + [ + "▁Edible", + -13.888886451721191 + ], + [ + "▁Mackay", + -13.888951301574709 + ], + [ + "▁reapply", + -13.88898754119873 + ], + [ + "▁Siberian", + -13.889030456542969 + ], + [ + "▁pianos", + -13.889056205749512 + ], + [ + "▁cru", + -13.88905906677246 + ], + [ + "▁darken", + -13.88905906677246 + ], + [ + "▁penalized", + -13.889090538024902 + ], + [ + "▁scrolls", + -13.88912868499756 + ], + [ + "▁Sweatshirt", + -13.889142990112305 + ], + [ + "▁Empress", + -13.889161109924316 + ], + [ + "▁Currie", + -13.889169692993164 + ], + [ + "CNA", + -13.889219284057615 + ], + [ + "umming", + -13.889223098754885 + ], + [ + "▁chants", + -13.889242172241213 + ], + [ + "▁Richter", + -13.889254570007324 + ], + [ + "▁Trained", + -13.889259338378906 + ], + [ + "erf", + -13.889283180236816 + ], + [ + "▁Pacers", + -13.889283180236816 + ], + [ + "▁Kyiv", + -13.889296531677246 + ], + [ + "▁leaching", + -13.889302253723145 + ], + [ + "▁authentically", + -13.88932991027832 + ], + [ + "▁weeping", + -13.889373779296877 + ], + [ + "▁Refresh", + -13.88937759399414 + ], + [ + "▁Frag", + -13.889525413513184 + ], + [ + "finance", + -13.889569282531738 + ], + [ + "▁5/8\"", + -13.889592170715332 + ], + [ + "▁JH", + -13.889602661132812 + ], + [ + "▁sustains", + -13.8896484375 + ], + [ + "▁Performed", + -13.889718055725098 + ], + [ + "▁CDI", + -13.889779090881348 + ], + [ + "▁landowner", + -13.889785766601562 + ], + [ + "Conclusion", + -13.889904022216797 + ], + [ + "928", + -13.88991355895996 + ], + [ + "▁Researcher", + -13.889970779418944 + ], + [ + "expect", + -13.889976501464844 + ], + [ + "▁Moonlight", + -13.890028953552246 + ], + [ + "tali", + -13.890044212341309 + ], + [ + "materials", + -13.890141487121582 + ], + [ + "env", + -13.890151023864746 + ], + [ + "/2009", + -13.89017105102539 + ], + [ + "▁canola", + -13.890175819396973 + ], + [ + "▁48%", + -13.890251159667969 + ], + [ + "Availability", + -13.890288352966309 + ], + [ + "Eagle", + -13.890292167663574 + ], + [ + "immigrant", + -13.89029312133789 + ], + [ + "▁Rector", + -13.890302658081056 + ], + [ + "Jeremy", + -13.890303611755373 + ], + [ + "Contemporary", + -13.890310287475586 + ], + [ + "Ladies", + -13.890344619750977 + ], + [ + "▁Resolve", + -13.890348434448242 + ], + [ + "▁Webmaster", + -13.890382766723633 + ], + [ + "▁Hacker", + -13.89041805267334 + ], + [ + "ghe", + -13.890443801879885 + ], + [ + "minimum", + -13.890484809875488 + ], + [ + "fleet", + -13.89055061340332 + ], + [ + "casino", + -13.8905668258667 + ], + [ + "Eng", + -13.89060115814209 + ], + [ + "imento", + -13.89064884185791 + ], + [ + "Morris", + -13.890650749206545 + ], + [ + "▁KPIs", + -13.890663146972656 + ], + [ + "INK", + -13.890697479248049 + ], + [ + "drawing", + -13.890722274780272 + ], + [ + "esti", + -13.89073657989502 + ], + [ + "stitch", + -13.890750885009766 + ], + [ + "ayan", + -13.890827178955078 + ], + [ + "▁binders", + -13.89085578918457 + ], + [ + "maps", + -13.890984535217283 + ], + [ + "bine", + -13.89106273651123 + ], + [ + "avia", + -13.891119956970217 + ], + [ + "▁Decorators", + -13.891170501708984 + ], + [ + "yin", + -13.89117431640625 + ], + [ + "▁specialities", + -13.891178131103516 + ], + [ + "▁Crater", + -13.89124870300293 + ], + [ + "272", + -13.89125156402588 + ], + [ + "▁crusty", + -13.891277313232422 + ], + [ + "odon", + -13.891300201416016 + ], + [ + "▁spotlights", + -13.891345977783203 + ], + [ + "NCH", + -13.89137077331543 + ], + [ + "▁cutie", + -13.891488075256348 + ], + [ + "▁Retired", + -13.891547203063965 + ], + [ + "cgi", + -13.891592025756836 + ], + [ + "▁PMS", + -13.891620635986328 + ], + [ + "▁bras", + -13.89170265197754 + ], + [ + "▁Grun", + -13.891732215881348 + ], + [ + "▁Canine", + -13.891786575317385 + ], + [ + "crystal", + -13.891827583312988 + ], + [ + "▁deprive", + -13.891844749450684 + ], + [ + "▁Nap", + -13.89184856414795 + ], + [ + "▁watercolors", + -13.891879081726074 + ], + [ + "▁Baden", + -13.891888618469238 + ], + [ + "▁craps", + -13.891940116882324 + ], + [ + "▁Buckeye", + -13.89198875427246 + ], + [ + "▁Eb", + -13.892078399658203 + ], + [ + "▁Motorsport", + -13.892088890075684 + ], + [ + "▁cask", + -13.89219093322754 + ], + [ + "648", + -13.89219856262207 + ], + [ + "▁Deva", + -13.89222526550293 + ], + [ + "sou", + -13.892324447631836 + ], + [ + "Rat", + -13.89234447479248 + ], + [ + "▁BSA", + -13.892383575439451 + ], + [ + "▁crucified", + -13.892444610595703 + ], + [ + "▁grotesque", + -13.892444610595703 + ], + [ + "▁hyaluronic", + -13.892444610595703 + ], + [ + "▁incompetence", + -13.892444610595703 + ], + [ + "▁sympathize", + -13.892444610595703 + ], + [ + "▁NUMBER", + -13.892446517944336 + ], + [ + "▁Patreon", + -13.892449378967283 + ], + [ + "▁Chengdu", + -13.892457962036133 + ], + [ + "▁Ritual", + -13.892462730407717 + ], + [ + "▁gourd", + -13.892471313476562 + ], + [ + "▁MacArthur", + -13.892477989196776 + ], + [ + "▁epoch", + -13.892498016357422 + ], + [ + "▁Grandfather", + -13.892502784729004 + ], + [ + "TRI", + -13.89250373840332 + ], + [ + "▁rousing", + -13.892511367797852 + ], + [ + "▁Accidents", + -13.892516136169434 + ], + [ + "▁intrinsically", + -13.892522811889648 + ], + [ + "▁Millionaire", + -13.892523765563965 + ], + [ + "▁bloke", + -13.892537117004396 + ], + [ + "▁discord", + -13.892550468444824 + ], + [ + "▁wattage", + -13.892589569091797 + ], + [ + "▁massaging", + -13.892651557922363 + ], + [ + "▁hoods", + -13.89266872406006 + ], + [ + "▁Hamm", + -13.892677307128906 + ], + [ + "▁Gigi", + -13.892682075500488 + ], + [ + "enable", + -13.892688751220703 + ], + [ + "▁agitated", + -13.892690658569336 + ], + [ + "▁saree", + -13.892694473266602 + ], + [ + "▁Sargent", + -13.892725944519045 + ], + [ + "▁befriend", + -13.892739295959473 + ], + [ + "▁0.02", + -13.892752647399902 + ], + [ + "▁Goldstein", + -13.89277172088623 + ], + [ + "▁1878", + -13.89281940460205 + ], + [ + "▁Relevant", + -13.892834663391112 + ], + [ + "▁Underwater", + -13.892889976501465 + ], + [ + "▁mounds", + -13.892930030822754 + ], + [ + "▁winged", + -13.892984390258787 + ], + [ + "▁Playlist", + -13.892988204956056 + ], + [ + "Neuro", + -13.893012046813965 + ], + [ + "▁Vox", + -13.893157958984377 + ], + [ + "▁Worked", + -13.893194198608398 + ], + [ + "▁Kush", + -13.893198013305664 + ], + [ + "▁fable", + -13.89321517944336 + ], + [ + "andy", + -13.893224716186523 + ], + [ + "▁Inches", + -13.893242835998535 + ], + [ + "▁Toe", + -13.8932523727417 + ], + [ + "014", + -13.893263816833496 + ], + [ + "utu", + -13.893312454223633 + ], + [ + "▁Newer", + -13.893394470214844 + ], + [ + "▁transporter", + -13.893489837646484 + ], + [ + "locked", + -13.89353847503662 + ], + [ + "continue", + -13.893567085266112 + ], + [ + "▁fetched", + -13.893571853637695 + ], + [ + "▁Pose", + -13.893598556518556 + ], + [ + "▁flamingo", + -13.893619537353516 + ], + [ + "Backed", + -13.893635749816896 + ], + [ + "noid", + -13.893698692321776 + ], + [ + "AKE", + -13.89370822906494 + ], + [ + "TOM", + -13.893749237060549 + ], + [ + "786", + -13.893754005432127 + ], + [ + "▁Inno", + -13.893781661987305 + ], + [ + "▁Portage", + -13.893813133239746 + ], + [ + "cite", + -13.893935203552246 + ], + [ + "▁860", + -13.89394474029541 + ], + [ + "482", + -13.89394760131836 + ], + [ + "90,000", + -13.89396858215332 + ], + [ + "Rec", + -13.893988609313965 + ], + [ + "rab", + -13.894034385681152 + ], + [ + "Adobe", + -13.894097328186035 + ], + [ + "description", + -13.894132614135742 + ], + [ + "jian", + -13.894165992736816 + ], + [ + "noun", + -13.894173622131348 + ], + [ + "▁wholesaler", + -13.894235610961914 + ], + [ + "azzi", + -13.894246101379396 + ], + [ + "Strike", + -13.894309043884276 + ], + [ + "portrait", + -13.894342422485352 + ], + [ + "communication", + -13.894344329833984 + ], + [ + "▁Clearing", + -13.894394874572754 + ], + [ + "laus", + -13.894407272338867 + ], + [ + "▁FEEL", + -13.894472122192385 + ], + [ + "▁239", + -13.894516944885254 + ], + [ + "Athlete", + -13.89452075958252 + ], + [ + "Nurse", + -13.89454174041748 + ], + [ + "gala", + -13.89460277557373 + ], + [ + "▁Opal", + -13.894634246826172 + ], + [ + "PUR", + -13.894675254821776 + ], + [ + "▁Meteor", + -13.894691467285156 + ], + [ + "7-11", + -13.894699096679688 + ], + [ + "▁finalizing", + -13.89476203918457 + ], + [ + "▁Avis", + -13.894786834716797 + ], + [ + "▁cleanest", + -13.894786834716797 + ], + [ + "▁Qt", + -13.894800186157228 + ], + [ + "236", + -13.894858360290527 + ], + [ + "▁787", + -13.894930839538574 + ], + [ + "▁misused", + -13.89494800567627 + ], + [ + "mead", + -13.894951820373535 + ], + [ + "Nope", + -13.895042419433594 + ], + [ + "▁Chit", + -13.895105361938477 + ], + [ + "OTT", + -13.895109176635742 + ], + [ + "healing", + -13.895133018493652 + ], + [ + "EQ", + -13.8951416015625 + ], + [ + "▁railroads", + -13.895158767700195 + ], + [ + "502", + -13.895188331604004 + ], + [ + "itable", + -13.895256042480469 + ], + [ + "▁HPV", + -13.895306587219238 + ], + [ + "▁informally", + -13.895386695861816 + ], + [ + "▁Transfers", + -13.895430564880373 + ], + [ + "▁Fos", + -13.895468711853027 + ], + [ + "champ", + -13.895484924316406 + ], + [ + "▁Jug", + -13.89553165435791 + ], + [ + "▁chiropractors", + -13.895551681518556 + ], + [ + "oph", + -13.895557403564451 + ], + [ + "nomic", + -13.895566940307615 + ], + [ + "191", + -13.895623207092283 + ], + [ + "▁Hoping", + -13.895648956298828 + ], + [ + "▁Audible", + -13.895705223083496 + ], + [ + "▁renown", + -13.895753860473633 + ], + [ + "▁whi", + -13.89582061767578 + ], + [ + "985", + -13.895827293395996 + ], + [ + "▁Curl", + -13.895873069763184 + ], + [ + "351", + -13.896061897277832 + ], + [ + "RCA", + -13.896062850952148 + ], + [ + "282", + -13.896096229553224 + ], + [ + "▁Vida", + -13.896102905273438 + ], + [ + "▁Chelmsford", + -13.896148681640623 + ], + [ + "▁apoptosis", + -13.896148681640623 + ], + [ + "▁complacent", + -13.896148681640623 + ], + [ + "▁procuring", + -13.896148681640623 + ], + [ + "▁Pisces", + -13.896150588989258 + ], + [ + "▁divergence", + -13.896154403686523 + ], + [ + "▁186", + -13.896156311035156 + ], + [ + "▁Stewardship", + -13.896162986755373 + ], + [ + "▁Compaq", + -13.896173477172852 + ], + [ + "▁Trolley", + -13.896173477172852 + ], + [ + "▁Vinegar", + -13.896174430847168 + ], + [ + "▁curating", + -13.896178245544434 + ], + [ + "▁slimming", + -13.896201133728027 + ], + [ + "▁Frisco", + -13.896209716796877 + ], + [ + "▁garnet", + -13.896214485168455 + ], + [ + "▁Ripley", + -13.896228790283203 + ], + [ + "▁unassuming", + -13.896230697631836 + ], + [ + "▁sloped", + -13.89626121520996 + ], + [ + "▁patty", + -13.896278381347656 + ], + [ + "▁straining", + -13.896297454833984 + ], + [ + "▁incentivize", + -13.896300315856934 + ], + [ + "upload", + -13.896317481994627 + ], + [ + "▁NAP", + -13.89632797241211 + ], + [ + "▁Rooster", + -13.89634132385254 + ], + [ + "▁Hailey", + -13.89642333984375 + ], + [ + "▁Tripoli", + -13.89643383026123 + ], + [ + "▁(12)", + -13.896479606628418 + ], + [ + "▁Originals", + -13.896510124206545 + ], + [ + "tris", + -13.896535873413086 + ], + [ + "zai", + -13.896543502807615 + ], + [ + "▁1884", + -13.896591186523438 + ], + [ + "▁zooming", + -13.896605491638184 + ], + [ + "uct", + -13.896621704101562 + ], + [ + "▁Novice", + -13.896644592285156 + ], + [ + "▁drinker", + -13.896663665771484 + ], + [ + "Deal", + -13.89667510986328 + ], + [ + "▁sprouted", + -13.896729469299316 + ], + [ + "▁Lear", + -13.896736145019531 + ], + [ + "ihan", + -13.896751403808594 + ], + [ + "▁227", + -13.89678192138672 + ], + [ + "comfort", + -13.89679718017578 + ], + [ + "emu", + -13.896804809570312 + ], + [ + "▁easement", + -13.896810531616213 + ], + [ + "157", + -13.896824836730955 + ], + [ + "▁Burch", + -13.896900177001951 + ], + [ + "▁Curly", + -13.896912574768066 + ], + [ + "▁affirming", + -13.896942138671877 + ], + [ + "rias", + -13.896966934204102 + ], + [ + "▁urbanization", + -13.896982192993164 + ], + [ + "▁SES", + -13.896989822387695 + ], + [ + "ologia", + -13.897005081176758 + ], + [ + "▁Textiles", + -13.897024154663086 + ], + [ + "▁Milli", + -13.897071838378906 + ], + [ + "ifera", + -13.897137641906738 + ], + [ + "▁Uses", + -13.897254943847656 + ], + [ + "▁Quantitative", + -13.89738655090332 + ], + [ + "Lex", + -13.897394180297852 + ], + [ + "▁SEP", + -13.897421836853027 + ], + [ + "▁55-", + -13.89749813079834 + ], + [ + "▁Gallo", + -13.897510528564451 + ], + [ + "▁Rockville", + -13.89755916595459 + ], + [ + "input", + -13.897590637207031 + ], + [ + "▁underline", + -13.897625923156738 + ], + [ + "▁maximis", + -13.89765453338623 + ], + [ + "▁BCS", + -13.89768886566162 + ], + [ + "▁MIA", + -13.897817611694336 + ], + [ + "▁Benin", + -13.897863388061523 + ], + [ + "▁graze", + -13.897865295410156 + ], + [ + "Responsible", + -13.897882461547852 + ], + [ + "▁distal", + -13.8978910446167 + ], + [ + "Equipment", + -13.89792251586914 + ], + [ + "▁facades", + -13.89797019958496 + ], + [ + "Hands", + -13.898024559020996 + ], + [ + "▁Wim", + -13.898042678833008 + ], + [ + "imer", + -13.898049354553224 + ], + [ + "▁2017!", + -13.898054122924805 + ], + [ + "▁Odin", + -13.898063659667969 + ], + [ + "previously", + -13.89806842803955 + ], + [ + "▁scheduler", + -13.898080825805664 + ], + [ + "▁Departure", + -13.898138046264648 + ], + [ + "▁stead", + -13.898205757141112 + ], + [ + "Wh", + -13.898207664489746 + ], + [ + "Hons", + -13.898234367370604 + ], + [ + "▁crawled", + -13.898276329040527 + ], + [ + "▁Nee", + -13.898295402526855 + ], + [ + "rley", + -13.898301124572754 + ], + [ + "▁0-1", + -13.898330688476562 + ], + [ + "Logo", + -13.898334503173828 + ], + [ + "▁6-12", + -13.898345947265623 + ], + [ + "Hide", + -13.898388862609863 + ], + [ + "▁Buckle", + -13.898463249206545 + ], + [ + "Viewer", + -13.898518562316896 + ], + [ + "▁Remy", + -13.89857006072998 + ], + [ + "▁Martini", + -13.898673057556152 + ], + [ + "▁dongle", + -13.898762702941896 + ], + [ + "▁Hindus", + -13.898842811584473 + ], + [ + "Lang", + -13.898869514465332 + ], + [ + "surf", + -13.898879051208496 + ], + [ + "photos", + -13.898919105529783 + ], + [ + "28)", + -13.898951530456545 + ], + [ + "jima", + -13.898995399475098 + ], + [ + "▁Desi", + -13.899049758911133 + ], + [ + "▁mislead", + -13.89906406402588 + ], + [ + "▁Presentations", + -13.899070739746094 + ], + [ + "▁demonic", + -13.899097442626951 + ], + [ + "▁pike", + -13.89911651611328 + ], + [ + "Command", + -13.899127960205078 + ], + [ + "Coal", + -13.899235725402832 + ], + [ + "▁Balkans", + -13.899273872375488 + ], + [ + "Sold", + -13.899401664733888 + ], + [ + "▁400-", + -13.899428367614746 + ], + [ + "lounge", + -13.899435997009276 + ], + [ + "ltd", + -13.899442672729492 + ], + [ + "NEC", + -13.899484634399414 + ], + [ + "▁elevates", + -13.89950942993164 + ], + [ + "▁destroyer", + -13.899523735046388 + ], + [ + "▁glee", + -13.899538040161133 + ], + [ + "▁mysql", + -13.89955997467041 + ], + [ + "2(", + -13.899577140808104 + ], + [ + "▁Compressor", + -13.899593353271484 + ], + [ + "▁Berman", + -13.8995943069458 + ], + [ + "aligned", + -13.899646759033203 + ], + [ + "▁193", + -13.899650573730469 + ], + [ + "cable", + -13.899795532226562 + ], + [ + "▁biologic", + -13.899807929992676 + ], + [ + "▁Etihad", + -13.899866104125977 + ], + [ + "▁MailChimp", + -13.899866104125977 + ], + [ + "▁Toulouse", + -13.899866104125977 + ], + [ + "▁Varanasi", + -13.899866104125977 + ], + [ + "▁fertiliser", + -13.899866104125977 + ], + [ + "▁unrelenting", + -13.899866104125977 + ], + [ + "▁unruly", + -13.899866104125977 + ], + [ + "▁contraception", + -13.89986801147461 + ], + [ + "▁Michelangelo", + -13.899868965148926 + ], + [ + "▁Wrote", + -13.899868965148926 + ], + [ + "▁Ducati", + -13.899872779846191 + ], + [ + "▁hackathon", + -13.899877548217772 + ], + [ + "▁lanyard", + -13.899885177612305 + ], + [ + "▁wobble", + -13.89989185333252 + ], + [ + "▁Desmond", + -13.899895668029783 + ], + [ + "▁orlistat", + -13.899908065795898 + ], + [ + "dat", + -13.899919509887695 + ], + [ + "▁tiniest", + -13.899942398071287 + ], + [ + "▁Yadav", + -13.899979591369627 + ], + [ + "▁Fanatics", + -13.899982452392578 + ], + [ + "▁shrunk", + -13.899982452392578 + ], + [ + "▁Cron", + -13.89998722076416 + ], + [ + "▁unreleased", + -13.899991035461426 + ], + [ + "owicz", + -13.899996757507324 + ], + [ + "▁ductwork", + -13.90004825592041 + ], + [ + "▁34,", + -13.90005111694336 + ], + [ + "▁supp", + -13.900059700012209 + ], + [ + "▁ECG", + -13.900087356567385 + ], + [ + "pta", + -13.900123596191406 + ], + [ + "▁mush", + -13.900131225585938 + ], + [ + "RAT", + -13.900151252746582 + ], + [ + "▁undefined", + -13.900179862976074 + ], + [ + "▁Olga", + -13.900190353393556 + ], + [ + "▁Favorites", + -13.900192260742188 + ], + [ + "▁KEEP", + -13.900202751159668 + ], + [ + "▁arson", + -13.900213241577148 + ], + [ + "▁Flamingo", + -13.900224685668944 + ], + [ + "?!?!", + -13.900226593017578 + ], + [ + "POP", + -13.900250434875488 + ], + [ + "▁gleaned", + -13.900250434875488 + ], + [ + "▁Hawkeye", + -13.900287628173828 + ], + [ + "852", + -13.900477409362791 + ], + [ + "▁231", + -13.900506019592283 + ], + [ + "▁Otis", + -13.900521278381348 + ], + [ + "▁nag", + -13.900550842285156 + ], + [ + "liant", + -13.900568962097168 + ], + [ + "▁9-11", + -13.900622367858888 + ], + [ + "XML", + -13.90064525604248 + ], + [ + "▁9.7", + -13.900653839111328 + ], + [ + "▁$199", + -13.90066909790039 + ], + [ + "▁adjusters", + -13.90067195892334 + ], + [ + "kola", + -13.900694847106934 + ], + [ + "▁spar", + -13.900699615478516 + ], + [ + "▁Rowland", + -13.900728225708008 + ], + [ + "▁Valuable", + -13.900762557983398 + ], + [ + "▁Blessing", + -13.900775909423828 + ], + [ + ")--", + -13.900805473327637 + ], + [ + "▁Rang", + -13.900838851928713 + ], + [ + "PAT", + -13.900858879089355 + ], + [ + "▁worsened", + -13.90089988708496 + ], + [ + "success", + -13.90097427368164 + ], + [ + "▁meetup", + -13.900983810424805 + ], + [ + "rop", + -13.900989532470703 + ], + [ + "▁inhibiting", + -13.901039123535156 + ], + [ + "899", + -13.901040077209473 + ], + [ + "experienced", + -13.901055335998535 + ], + [ + "▁Thrive", + -13.90111255645752 + ], + [ + "dging", + -13.901174545288086 + ], + [ + "▁assurances", + -13.901209831237791 + ], + [ + "▁suspensions", + -13.901259422302246 + ], + [ + "▁instructs", + -13.90128231048584 + ], + [ + "▁ailing", + -13.901302337646484 + ], + [ + "Sustain", + -13.90130615234375 + ], + [ + "thousand", + -13.901359558105469 + ], + [ + "Till", + -13.901363372802734 + ], + [ + "jiang", + -13.901402473449709 + ], + [ + "sleeve", + -13.901405334472656 + ], + [ + "▁Stil", + -13.90143871307373 + ], + [ + "▁browns", + -13.901562690734863 + ], + [ + "▁sodas", + -13.901565551757812 + ], + [ + "▁Travelling", + -13.901612281799316 + ], + [ + "Month", + -13.901650428771973 + ], + [ + "▁abort", + -13.9016695022583 + ], + [ + "lita", + -13.901681900024414 + ], + [ + "reason", + -13.901698112487791 + ], + [ + "osphere", + -13.901715278625488 + ], + [ + "Concrete", + -13.901719093322754 + ], + [ + "▁freehold", + -13.901766777038574 + ], + [ + "trading", + -13.901836395263672 + ], + [ + "▁Php", + -13.901853561401367 + ], + [ + "Piece", + -13.901887893676758 + ], + [ + "Presentation", + -13.901907920837402 + ], + [ + "Transport", + -13.901932716369627 + ], + [ + "▁Assistants", + -13.901954650878906 + ], + [ + "▁Gallon", + -13.901996612548828 + ], + [ + "pd", + -13.902000427246094 + ], + [ + "▁Brim", + -13.902018547058104 + ], + [ + "MOD", + -13.90207576751709 + ], + [ + "▁Diagnosis", + -13.902130126953123 + ], + [ + "RIP", + -13.902213096618652 + ], + [ + "REE", + -13.902238845825195 + ], + [ + "432", + -13.90224266052246 + ], + [ + "sensory", + -13.902263641357422 + ], + [ + "▁Argos", + -13.902310371398926 + ], + [ + "▁raped", + -13.902310371398926 + ], + [ + "▁backseat", + -13.902344703674316 + ], + [ + "ubu", + -13.902393341064451 + ], + [ + "▁Aldi", + -13.902393341064451 + ], + [ + "▁Acro", + -13.902458190917969 + ], + [ + "audi", + -13.902480125427246 + ], + [ + "chard", + -13.902488708496094 + ], + [ + "itu", + -13.902509689331056 + ], + [ + "▁Envelope", + -13.902650833129885 + ], + [ + "▁Shingle", + -13.902685165405272 + ], + [ + "▁chiller", + -13.902762413024902 + ], + [ + "trim", + -13.9027738571167 + ], + [ + "▁Chefs", + -13.90280055999756 + ], + [ + "▁Implants", + -13.902806282043455 + ], + [ + "289", + -13.902822494506836 + ], + [ + "▁Blazers", + -13.902832984924316 + ], + [ + "fang", + -13.902847290039062 + ], + [ + "713", + -13.902871131896973 + ], + [ + "▁pix", + -13.90287971496582 + ], + [ + "▁basements", + -13.90291976928711 + ], + [ + "▁Highways", + -13.902950286865234 + ], + [ + "▁Musicians", + -13.902952194213867 + ], + [ + "▁prospectus", + -13.902998924255373 + ], + [ + "VET", + -13.903023719787598 + ], + [ + "spray", + -13.903051376342772 + ], + [ + "▁xml", + -13.90305519104004 + ], + [ + "seam", + -13.903067588806152 + ], + [ + "▁boar", + -13.903100967407228 + ], + [ + "▁ABB", + -13.903121948242188 + ], + [ + "▁Incentive", + -13.903230667114258 + ], + [ + "▁junkie", + -13.903257369995115 + ], + [ + "▁Florian", + -13.90329647064209 + ], + [ + "barrel", + -13.903324127197266 + ], + [ + "▁loophole", + -13.903355598449709 + ], + [ + "▁Wilkins", + -13.903447151184082 + ], + [ + "▁allege", + -13.903470993041992 + ], + [ + "▁hunch", + -13.903529167175291 + ], + [ + "▁1879", + -13.903549194335938 + ], + [ + "▁1857", + -13.90359115600586 + ], + [ + "Hodgkin", + -13.903597831726074 + ], + [ + "▁saturday", + -13.903597831726074 + ], + [ + "▁tenderloin", + -13.903597831726074 + ], + [ + "▁cholera", + -13.90359878540039 + ], + [ + "▁Kaspersky", + -13.90360164642334 + ], + [ + "▁Questionnaire", + -13.903602600097656 + ], + [ + "ibble", + -13.903623580932615 + ], + [ + "▁microprocessor", + -13.90363311767578 + ], + [ + "▁Equestrian", + -13.903664588928224 + ], + [ + "▁spitting", + -13.903676986694336 + ], + [ + "▁colonization", + -13.903698921203612 + ], + [ + "▁hoo", + -13.90371799468994 + ], + [ + "▁Thereafter", + -13.903763771057127 + ], + [ + "▁molar", + -13.903778076171877 + ], + [ + "▁Emb", + -13.90380859375 + ], + [ + "Keith", + -13.903826713562012 + ], + [ + "Ter", + -13.903830528259276 + ], + [ + "Eli", + -13.903831481933594 + ], + [ + "▁untrue", + -13.903838157653809 + ], + [ + "linda", + -13.903884887695312 + ], + [ + "wf", + -13.904037475585938 + ], + [ + "▁Sicilian", + -13.904130935668944 + ], + [ + "elder", + -13.904145240783691 + ], + [ + "▁unification", + -13.904145240783691 + ], + [ + "logue", + -13.904152870178224 + ], + [ + "▁grassland", + -13.904182434082031 + ], + [ + "▁dielectric", + -13.904192924499512 + ], + [ + "▁Oakville", + -13.904193878173828 + ], + [ + "▁spasms", + -13.90424633026123 + ], + [ + "▁Siege", + -13.90427303314209 + ], + [ + "chase", + -13.904279708862305 + ], + [ + "TAS", + -13.904294967651367 + ], + [ + "▁Mound", + -13.904314041137695 + ], + [ + "▁sparkles", + -13.904321670532228 + ], + [ + "tension", + -13.904397010803224 + ], + [ + "gran", + -13.904428482055664 + ], + [ + "▁allot", + -13.904434204101562 + ], + [ + "Opt", + -13.90443992614746 + ], + [ + "changer", + -13.90445327758789 + ], + [ + "▁joinery", + -13.904460906982422 + ], + [ + "▁Arundel", + -13.904483795166016 + ], + [ + "▁Shul", + -13.904501914978027 + ], + [ + "▁Guar", + -13.904516220092772 + ], + [ + "cow", + -13.904552459716797 + ], + [ + "▁seawater", + -13.90455436706543 + ], + [ + "XXXX", + -13.90458869934082 + ], + [ + "rope", + -13.904610633850098 + ], + [ + "▁Stephenson", + -13.904611587524414 + ], + [ + "effectiveness", + -13.904613494873049 + ], + [ + "▁PROJECT", + -13.904659271240234 + ], + [ + "TCC", + -13.904666900634766 + ], + [ + "▁recited", + -13.904706001281738 + ], + [ + "▁Appropriations", + -13.90484619140625 + ], + [ + "▁overriding", + -13.904878616333008 + ], + [ + "minator", + -13.904903411865234 + ], + [ + "LOR", + -13.904914855957031 + ], + [ + "MOND", + -13.904942512512209 + ], + [ + "▁Lulu", + -13.905115127563477 + ], + [ + "climate", + -13.905118942260742 + ], + [ + "imeter", + -13.905150413513184 + ], + [ + "fn", + -13.90518856048584 + ], + [ + "581", + -13.90521240234375 + ], + [ + "▁10.3", + -13.905232429504396 + ], + [ + "INST", + -13.905235290527344 + ], + [ + "▁Choo", + -13.905264854431152 + ], + [ + "▁Southport", + -13.90536117553711 + ], + [ + "RUM", + -13.905364990234377 + ], + [ + "▁shader", + -13.905508995056152 + ], + [ + "Hamilton", + -13.90553092956543 + ], + [ + "Barbara", + -13.905559539794922 + ], + [ + "▁redefined", + -13.905571937561035 + ], + [ + "▁Muni", + -13.905580520629885 + ], + [ + "▁CFL", + -13.905595779418944 + ], + [ + "▁Teal", + -13.905640602111816 + ], + [ + "ppo", + -13.90565299987793 + ], + [ + "▁motels", + -13.90573501586914 + ], + [ + "686", + -13.905792236328123 + ], + [ + "freeze", + -13.905917167663574 + ], + [ + "▁redwood", + -13.905936241149902 + ], + [ + "tice", + -13.905938148498535 + ], + [ + "▁Pali", + -13.905945777893066 + ], + [ + "hut", + -13.90595245361328 + ], + [ + "▁louis", + -13.905976295471191 + ], + [ + "▁Barclay", + -13.906054496765137 + ], + [ + "▁assessor", + -13.906192779541016 + ], + [ + "▁Pops", + -13.906206130981444 + ], + [ + "XR", + -13.906209945678713 + ], + [ + "PPC", + -13.906238555908203 + ], + [ + "▁chronicle", + -13.90633487701416 + ], + [ + "▁Octa", + -13.906405448913574 + ], + [ + "▁vegans", + -13.906488418579102 + ], + [ + "-81", + -13.906538009643556 + ], + [ + "▁kilometer", + -13.906594276428224 + ], + [ + "▁cuttings", + -13.906601905822754 + ], + [ + "Clip", + -13.90661334991455 + ], + [ + "guer", + -13.906639099121094 + ], + [ + "uation", + -13.90664005279541 + ], + [ + "/09", + -13.906715393066406 + ], + [ + "▁Alejandro", + -13.906723022460938 + ], + [ + "▁synonym", + -13.906814575195312 + ], + [ + "▁kat", + -13.906818389892578 + ], + [ + "▁wastage", + -13.90687084197998 + ], + [ + "▁bylaws", + -13.90689182281494 + ], + [ + "▁catapult", + -13.90694522857666 + ], + [ + "692", + -13.90695571899414 + ], + [ + "▁correlates", + -13.907001495361328 + ], + [ + "flame", + -13.90700340270996 + ], + [ + "▁Roach", + -13.907015800476074 + ], + [ + "▁turret", + -13.907021522521973 + ], + [ + "▁denote", + -13.907129287719728 + ], + [ + "unbound", + -13.907158851623535 + ], + [ + "▁Sava", + -13.907329559326172 + ], + [ + "Arguably", + -13.907342910766602 + ], + [ + "▁Marrakech", + -13.907342910766602 + ], + [ + "▁Oswego", + -13.907342910766602 + ], + [ + "▁colorectal", + -13.907342910766602 + ], + [ + "▁cuddly", + -13.907342910766602 + ], + [ + "▁exemplify", + -13.907342910766602 + ], + [ + "▁undisputed", + -13.907342910766602 + ], + [ + "▁Wyndham", + -13.907343864440918 + ], + [ + "▁Aubrey", + -13.90734577178955 + ], + [ + "▁FITNESS", + -13.90734577178955 + ], + [ + "▁livable", + -13.90734577178955 + ], + [ + "▁canopies", + -13.907346725463867 + ], + [ + "▁Humble", + -13.90736484527588 + ], + [ + "▁Loretta", + -13.90736484527588 + ], + [ + "▁substituting", + -13.907365798950195 + ], + [ + "▁Mildred", + -13.907367706298828 + ], + [ + "▁anthropologist", + -13.907382011413574 + ], + [ + "▁Burberry", + -13.907383918762209 + ], + [ + "▁merciful", + -13.907395362854004 + ], + [ + "fred", + -13.907434463500977 + ], + [ + "Wing", + -13.907463073730469 + ], + [ + "▁Nal", + -13.907483100891112 + ], + [ + "▁splice", + -13.907483100891112 + ], + [ + "▁worshipped", + -13.90749168395996 + ], + [ + "▁Inform", + -13.90749454498291 + ], + [ + "▁DHCP", + -13.907503128051758 + ], + [ + "lok", + -13.907504081726074 + ], + [ + "▁Bypass", + -13.907538414001465 + ], + [ + "▁Kendra", + -13.907588958740234 + ], + [ + "▁Geis", + -13.907591819763184 + ], + [ + "▁flywheel", + -13.907609939575195 + ], + [ + "Billy", + -13.907648086547852 + ], + [ + "▁Raceway", + -13.907737731933594 + ], + [ + "▁Bhu", + -13.907763481140137 + ], + [ + "▁crippled", + -13.907791137695312 + ], + [ + "▁Rut", + -13.907822608947754 + ], + [ + "HY", + -13.907917022705078 + ], + [ + "▁bulge", + -13.908020973205566 + ], + [ + "▁Pou", + -13.908052444458008 + ], + [ + "▁frizz", + -13.908082008361816 + ], + [ + "▁Velocity", + -13.908138275146484 + ], + [ + "rui", + -13.9081392288208 + ], + [ + "VH", + -13.908167839050291 + ], + [ + "685", + -13.908254623413086 + ], + [ + "▁nervousness", + -13.908319473266602 + ], + [ + "cig", + -13.90834617614746 + ], + [ + "Nut", + -13.908409118652344 + ], + [ + "Dell", + -13.908430099487305 + ], + [ + "BOO", + -13.90845012664795 + ], + [ + "gata", + -13.908475875854492 + ], + [ + "Borrow", + -13.908482551574709 + ], + [ + "▁coefficients", + -13.908499717712402 + ], + [ + "RIO", + -13.908527374267578 + ], + [ + "blocking", + -13.908547401428224 + ], + [ + "▁loaned", + -13.908547401428224 + ], + [ + "018", + -13.908555030822754 + ], + [ + "comment", + -13.908616065979004 + ], + [ + "▁£40", + -13.908631324768066 + ], + [ + "▁219", + -13.908674240112305 + ], + [ + "▁afterthought", + -13.908738136291504 + ], + [ + "▁jobless", + -13.908748626708984 + ], + [ + "▁thicknesses", + -13.908782958984377 + ], + [ + "▁1874", + -13.908785820007324 + ], + [ + "-2020", + -13.908807754516602 + ], + [ + "Locate", + -13.908809661865234 + ], + [ + "Stu", + -13.9088134765625 + ], + [ + "▁contends", + -13.9088134765625 + ], + [ + "▁blu", + -13.908819198608398 + ], + [ + "itzer", + -13.909090042114258 + ], + [ + "RUS", + -13.909095764160156 + ], + [ + "▁Essen", + -13.909228324890137 + ], + [ + "SX", + -13.90926742553711 + ], + [ + "▁Weis", + -13.909339904785156 + ], + [ + "currency", + -13.909366607666016 + ], + [ + "grabbing", + -13.909388542175291 + ], + [ + "▁carols", + -13.909388542175291 + ], + [ + "Climate", + -13.90939235687256 + ], + [ + "Shea", + -13.909412384033203 + ], + [ + "licensed", + -13.909446716308594 + ], + [ + "MPP", + -13.909478187561035 + ], + [ + "opo", + -13.909494400024414 + ], + [ + "▁Ric", + -13.909507751464844 + ], + [ + "terminal", + -13.909555435180664 + ], + [ + "Cancer", + -13.909564971923828 + ], + [ + "▁Blanco", + -13.909578323364258 + ], + [ + "▁Frankly", + -13.909586906433104 + ], + [ + "▁Indicators", + -13.909687995910645 + ], + [ + "▁rooftops", + -13.909700393676758 + ], + [ + "▁signings", + -13.909700393676758 + ], + [ + "▁tim", + -13.909728050231934 + ], + [ + "eki", + -13.909762382507324 + ], + [ + "Flower", + -13.909775733947754 + ], + [ + "▁(70", + -13.909798622131348 + ], + [ + "ngel", + -13.909829139709473 + ], + [ + "Rap", + -13.909831047058104 + ], + [ + "▁wat", + -13.9098539352417 + ], + [ + "IRC", + -13.909914016723633 + ], + [ + "▁canteen", + -13.90991497039795 + ], + [ + "pause", + -13.909915924072266 + ], + [ + "hipped", + -13.91004467010498 + ], + [ + "mmie", + -13.910074234008787 + ], + [ + "▁SCH", + -13.91010284423828 + ], + [ + "Bid", + -13.910205841064451 + ], + [ + "integr", + -13.910239219665527 + ], + [ + "flowing", + -13.910296440124512 + ], + [ + "pey", + -13.910345077514648 + ], + [ + "aires", + -13.91049861907959 + ], + [ + "Cruise", + -13.91050910949707 + ], + [ + "▁springboard", + -13.910526275634766 + ], + [ + "acion", + -13.910531997680664 + ], + [ + "Fiber", + -13.910600662231444 + ], + [ + "▁Corrections", + -13.91064453125 + ], + [ + "leak", + -13.910669326782228 + ], + [ + "▁brook", + -13.910727500915527 + ], + [ + "▁Technically", + -13.910755157470703 + ], + [ + "Smile", + -13.910810470581056 + ], + [ + "▁whistleblower", + -13.910818099975586 + ], + [ + "--(", + -13.910822868347168 + ], + [ + "-61", + -13.91090488433838 + ], + [ + "&#", + -13.91098403930664 + ], + [ + "▁retriever", + -13.911016464233398 + ], + [ + "▁synthesizer", + -13.91102695465088 + ], + [ + "▁TOR", + -13.911067008972168 + ], + [ + "menopausal", + -13.911102294921877 + ], + [ + "▁Diaspora", + -13.911102294921877 + ], + [ + "▁Shropshire", + -13.911102294921877 + ], + [ + "▁Willamette", + -13.911102294921877 + ], + [ + "▁Hiroshima", + -13.911105155944824 + ], + [ + "▁pegged", + -13.91110897064209 + ], + [ + "▁unobstructed", + -13.911110877990724 + ], + [ + "▁communism", + -13.911136627197266 + ], + [ + "▁Adsense", + -13.911157608032228 + ], + [ + "▁cockroaches", + -13.911203384399414 + ], + [ + "▁Zhong", + -13.91120433807373 + ], + [ + "▁Differential", + -13.911205291748049 + ], + [ + "Input", + -13.911263465881348 + ], + [ + "▁Hummer", + -13.91128158569336 + ], + [ + "▁MTA", + -13.911297798156738 + ], + [ + "▁Vulcan", + -13.911298751831056 + ], + [ + "▁198", + -13.911331176757812 + ], + [ + "PAD", + -13.911385536193848 + ], + [ + "▁Keywords", + -13.91138744354248 + ], + [ + "HEL", + -13.911401748657228 + ], + [ + "▁livery", + -13.911420822143556 + ], + [ + "▁solidified", + -13.911506652832031 + ], + [ + "524", + -13.91152000427246 + ], + [ + "▁vowel", + -13.911534309387209 + ], + [ + "▁envisaged", + -13.911540985107422 + ], + [ + "crea", + -13.911648750305176 + ], + [ + "▁1866", + -13.911701202392578 + ], + [ + "▁denounced", + -13.91171169281006 + ], + [ + "rative", + -13.9117431640625 + ], + [ + "▁Sniper", + -13.911746978759766 + ], + [ + "▁mont", + -13.911816596984863 + ], + [ + "▁belated", + -13.911865234375 + ], + [ + "▁Symbols", + -13.911954879760742 + ], + [ + "▁Bue", + -13.911961555480955 + ], + [ + "Reflecting", + -13.911980628967283 + ], + [ + "▁Topaz", + -13.911986351013184 + ], + [ + "▁Jacobson", + -13.911995887756348 + ], + [ + "LINK", + -13.912059783935549 + ], + [ + "▁314", + -13.91208839416504 + ], + [ + "tsch", + -13.912141799926758 + ], + [ + "▁walkable", + -13.91216278076172 + ], + [ + "▁vial", + -13.912179946899414 + ], + [ + "▁canoes", + -13.912223815917969 + ], + [ + "Powered", + -13.91222858428955 + ], + [ + "▁Maw", + -13.91231918334961 + ], + [ + "▁authorizing", + -13.912352561950684 + ], + [ + "▁dashing", + -13.912376403808594 + ], + [ + "▁Pathways", + -13.912433624267578 + ], + [ + "▁casement", + -13.912468910217283 + ], + [ + "Kon", + -13.912690162658691 + ], + [ + "714", + -13.912691116333008 + ], + [ + "sui", + -13.912884712219238 + ], + [ + "Rider", + -13.912958145141602 + ], + [ + "▁Provision", + -13.912994384765623 + ], + [ + "▁BID", + -13.913004875183104 + ], + [ + "▁cannabinoids", + -13.913050651550291 + ], + [ + "▁clerical", + -13.913050651550291 + ], + [ + "▁schematics", + -13.913057327270508 + ], + [ + "vl", + -13.913091659545898 + ], + [ + "▁tango", + -13.913110733032228 + ], + [ + "▁#1:", + -13.913119316101074 + ], + [ + "▁telephones", + -13.913126945495604 + ], + [ + "Missing", + -13.913129806518556 + ], + [ + "$10", + -13.913137435913086 + ], + [ + "RPG", + -13.91317081451416 + ], + [ + "apart", + -13.913179397583008 + ], + [ + "▁Expertise", + -13.913227081298828 + ], + [ + "Phoenix", + -13.913251876831056 + ], + [ + "rising", + -13.91330909729004 + ], + [ + "Ashley", + -13.913321495056152 + ], + [ + "Lol", + -13.913326263427734 + ], + [ + "▁$10.00", + -13.913329124450684 + ], + [ + "juice", + -13.913330078125 + ], + [ + "phenol", + -13.913372993469238 + ], + [ + "1988", + -13.913426399230955 + ], + [ + "▁knuckle", + -13.91344165802002 + ], + [ + "662", + -13.91346836090088 + ], + [ + "wheat", + -13.913500785827637 + ], + [ + "▁Intake", + -13.913503646850586 + ], + [ + "▁Orwell", + -13.913566589355469 + ], + [ + "▁Activate", + -13.913567543029783 + ], + [ + "▁295", + -13.913592338562012 + ], + [ + "▁sitemap", + -13.913592338562012 + ], + [ + "▁fatigued", + -13.913626670837402 + ], + [ + "Advantage", + -13.913691520690918 + ], + [ + "Couple", + -13.913702011108398 + ], + [ + "▁childish", + -13.913719177246094 + ], + [ + "uchen", + -13.913750648498535 + ], + [ + "▁Abra", + -13.91377067565918 + ], + [ + "workshop", + -13.91378116607666 + ], + [ + "▁WWW", + -13.913861274719238 + ], + [ + "639", + -13.913862228393556 + ], + [ + "ussie", + -13.91386890411377 + ], + [ + "▁complementing", + -13.913872718811035 + ], + [ + "▁Italians", + -13.913877487182615 + ], + [ + "▁HSA", + -13.913907051086426 + ], + [ + "▁severed", + -13.91396427154541 + ], + [ + "▁sprawl", + -13.913969993591309 + ], + [ + "813", + -13.914119720458984 + ], + [ + "▁CAMP", + -13.9141206741333 + ], + [ + "▁subtitle", + -13.914134979248049 + ], + [ + "diet", + -13.91416835784912 + ], + [ + "▁rehabilitate", + -13.914274215698242 + ], + [ + "▁contaminate", + -13.914276123046877 + ], + [ + "▁PRI", + -13.914284706115724 + ], + [ + "▁FIS", + -13.91428565979004 + ], + [ + "1.50", + -13.914308547973633 + ], + [ + "▁surgically", + -13.914314270019531 + ], + [ + "▁deplete", + -13.914358139038086 + ], + [ + "FAQ", + -13.914377212524414 + ], + [ + "==", + -13.914385795593262 + ], + [ + "LIA", + -13.91443157196045 + ], + [ + "ISON", + -13.914438247680664 + ], + [ + "Zen", + -13.914441108703612 + ], + [ + "385", + -13.914542198181152 + ], + [ + "▁retweet", + -13.914602279663086 + ], + [ + "▁immunization", + -13.91463565826416 + ], + [ + "▁CORE", + -13.914685249328612 + ], + [ + "▁wristband", + -13.914703369140623 + ], + [ + "Waiting", + -13.9147310256958 + ], + [ + "eille", + -13.914742469787598 + ], + [ + "stained", + -13.914767265319824 + ], + [ + "▁352", + -13.914777755737305 + ], + [ + "▁Tear", + -13.914798736572266 + ], + [ + "▁awnings", + -13.914822578430176 + ], + [ + "▁Archdiocese", + -13.914875984191896 + ], + [ + "▁Endangered", + -13.914875984191896 + ], + [ + "▁McKinley", + -13.914875984191896 + ], + [ + "▁commensurate", + -13.914875984191896 + ], + [ + "▁exorbitant", + -13.914875984191896 + ], + [ + "▁tantalizing", + -13.914875984191896 + ], + [ + "▁unscrupulous", + -13.914875984191896 + ], + [ + "▁Divinity", + -13.914877891540527 + ], + [ + "▁Yarmouth", + -13.91488552093506 + ], + [ + "▁Tornado", + -13.91489315032959 + ], + [ + "▁Slider", + -13.914898872375488 + ], + [ + "▁Penalty", + -13.914913177490234 + ], + [ + "▁drapery", + -13.91492748260498 + ], + [ + "▁Canopy", + -13.914942741394045 + ], + [ + "▁Carrera", + -13.914945602416992 + ], + [ + "▁Accra", + -13.914952278137209 + ], + [ + "▁downsizing", + -13.91497802734375 + ], + [ + "▁digitization", + -13.91498565673828 + ], + [ + "▁Kaz", + -13.914992332458496 + ], + [ + "▁cyberspace", + -13.914996147155762 + ], + [ + "▁Waiver", + -13.915019989013672 + ], + [ + "▁batted", + -13.915027618408203 + ], + [ + "CSC", + -13.9150390625 + ], + [ + "▁HEALTH", + -13.91504955291748 + ], + [ + "▁punctuated", + -13.91509246826172 + ], + [ + "▁tropics", + -13.915101051330566 + ], + [ + "▁contradictions", + -13.915139198303224 + ], + [ + "▁deacon", + -13.91514015197754 + ], + [ + "needs", + -13.915197372436523 + ], + [ + "▁headlining", + -13.915230751037598 + ], + [ + "▁Niki", + -13.915258407592772 + ], + [ + "▁QM", + -13.915289878845217 + ], + [ + "fluoro", + -13.915410995483398 + ], + [ + "▁axles", + -13.915445327758787 + ], + [ + "logged", + -13.915555953979492 + ], + [ + "Salem", + -13.91556167602539 + ], + [ + "▁Donaldson", + -13.915570259094238 + ], + [ + "▁Heaters", + -13.915609359741213 + ], + [ + "▁PMC", + -13.91571807861328 + ], + [ + "▁COL", + -13.915796279907228 + ], + [ + "▁Virtually", + -13.915813446044922 + ], + [ + "denominational", + -13.915942192077637 + ], + [ + "▁Abdel", + -13.915958404541016 + ], + [ + "hama", + -13.91602897644043 + ], + [ + "nita", + -13.916070938110352 + ], + [ + "shadow", + -13.916180610656738 + ], + [ + "gant", + -13.916196823120115 + ], + [ + "Consult", + -13.916210174560549 + ], + [ + "▁Heels", + -13.91623878479004 + ], + [ + "▁529", + -13.9163179397583 + ], + [ + "▁Dw", + -13.916318893432615 + ], + [ + "▁Sufi", + -13.9163236618042 + ], + [ + "Julia", + -13.916332244873049 + ], + [ + "gage", + -13.91643238067627 + ], + [ + "6:30", + -13.916484832763672 + ], + [ + "MBO", + -13.91652011871338 + ], + [ + "encing", + -13.916622161865234 + ], + [ + "▁kilos", + -13.916705131530762 + ], + [ + "bé", + -13.916763305664062 + ], + [ + "jana", + -13.91690444946289 + ], + [ + "1(", + -13.91693115234375 + ], + [ + "▁Hoch", + -13.916966438293455 + ], + [ + "tickets", + -13.91698932647705 + ], + [ + "ght", + -13.9169921875 + ], + [ + "tray", + -13.916998863220217 + ], + [ + "▁pap", + -13.917021751403809 + ], + [ + "▁Gaia", + -13.917046546936035 + ], + [ + "▁entitlements", + -13.917125701904297 + ], + [ + "exposed", + -13.91729736328125 + ], + [ + "NAL", + -13.917313575744627 + ], + [ + "football", + -13.917365074157717 + ], + [ + "Camera", + -13.91737174987793 + ], + [ + "▁knights", + -13.917410850524902 + ], + [ + "spiritual", + -13.917413711547852 + ], + [ + "▁Innova", + -13.917423248291016 + ], + [ + "..!!", + -13.917426109313965 + ], + [ + "VAT", + -13.917438507080078 + ], + [ + "▁strewn", + -13.917457580566406 + ], + [ + "▁VIEW", + -13.917524337768556 + ], + [ + "ermann", + -13.917613983154297 + ], + [ + "MIP", + -13.91761875152588 + ], + [ + "▁Soto", + -13.917638778686523 + ], + [ + "▁Cato", + -13.917656898498535 + ], + [ + "▁Caucasian", + -13.917658805847168 + ], + [ + "▁dissect", + -13.917725563049316 + ], + [ + "▁Grown", + -13.917749404907228 + ], + [ + "adia", + -13.917840957641602 + ], + [ + "essen", + -13.917856216430664 + ], + [ + "lode", + -13.917888641357422 + ], + [ + "▁Robb", + -13.917909622192385 + ], + [ + "Disc", + -13.917941093444824 + ], + [ + "▁necks", + -13.918105125427246 + ], + [ + "▁888-4", + -13.918118476867676 + ], + [ + "palm", + -13.918207168579102 + ], + [ + "313", + -13.918212890625 + ], + [ + "antly", + -13.91824436187744 + ], + [ + "▁idiom", + -13.918261528015137 + ], + [ + "▁gre", + -13.91831874847412 + ], + [ + "▁Privilege", + -13.918319702148438 + ], + [ + "22)", + -13.918378829956056 + ], + [ + "▁Guo", + -13.918392181396484 + ], + [ + "▁hurried", + -13.918421745300291 + ], + [ + "▁14.5", + -13.918437957763672 + ], + [ + "shake", + -13.918448448181152 + ], + [ + "▁Construct", + -13.918464660644531 + ], + [ + "lino", + -13.918497085571287 + ], + [ + "▁Tension", + -13.918503761291504 + ], + [ + "vian", + -13.918577194213867 + ], + [ + "▁39%", + -13.918601036071776 + ], + [ + "▁extruded", + -13.91866397857666 + ], + [ + "▁solstice", + -13.91866397857666 + ], + [ + "▁Kyrgyzstan", + -13.918665885925291 + ], + [ + "▁Vascular", + -13.918667793273926 + ], + [ + "▁Chaplain", + -13.918668746948242 + ], + [ + "▁Hobbs", + -13.918670654296877 + ], + [ + "▁farthest", + -13.918670654296877 + ], + [ + "▁venison", + -13.918670654296877 + ], + [ + "▁aquifer", + -13.918675422668455 + ], + [ + "▁Corolla", + -13.91867733001709 + ], + [ + "▁tabloid", + -13.918686866760254 + ], + [ + "▁daemon", + -13.91868782043457 + ], + [ + "▁REPORT", + -13.91869068145752 + ], + [ + "▁Radiology", + -13.918712615966797 + ], + [ + "▁amalgamation", + -13.91871738433838 + ], + [ + "▁respiration", + -13.918730735778809 + ], + [ + "▁Fitting", + -13.91873550415039 + ], + [ + "▁Radiator", + -13.91873550415039 + ], + [ + "Fee", + -13.918749809265137 + ], + [ + "ebra", + -13.91876220703125 + ], + [ + "bake", + -13.918792724609377 + ], + [ + "▁transverse", + -13.918793678283691 + ], + [ + "▁glycerin", + -13.91882610321045 + ], + [ + "774", + -13.918827056884766 + ], + [ + "▁Colt", + -13.918863296508787 + ], + [ + "▁Ecological", + -13.918951988220217 + ], + [ + "▁Greeting", + -13.918957710266112 + ], + [ + "▁Severn", + -13.919024467468262 + ], + [ + "▁Penal", + -13.91904640197754 + ], + [ + "▁ohne", + -13.919078826904297 + ], + [ + "▁DIRECT", + -13.919116973876951 + ], + [ + "▁UMass", + -13.919116973876951 + ], + [ + "▁MIX", + -13.919142723083496 + ], + [ + "▁Merge", + -13.919143676757812 + ], + [ + "▁ACH", + -13.919149398803713 + ], + [ + "▁Slash", + -13.919151306152344 + ], + [ + "▁misty", + -13.919190406799316 + ], + [ + "▁Analyzer", + -13.919228553771973 + ], + [ + "▁Mem", + -13.919234275817873 + ], + [ + "▁Raptors", + -13.91923713684082 + ], + [ + "▁marketable", + -13.919249534606934 + ], + [ + "244", + -13.919302940368652 + ], + [ + "▁softens", + -13.919376373291016 + ], + [ + "▁Ajay", + -13.919382095336914 + ], + [ + "▁Steward", + -13.919391632080078 + ], + [ + "▁upped", + -13.91939926147461 + ], + [ + "avis", + -13.919415473937988 + ], + [ + "BSD", + -13.91942310333252 + ], + [ + "init", + -13.919440269470217 + ], + [ + "▁Klopp", + -13.91953945159912 + ], + [ + "▁TMJ", + -13.919541358947754 + ], + [ + "▁perils", + -13.919567108154297 + ], + [ + "▁Gambia", + -13.919607162475586 + ], + [ + "▁Tut", + -13.919645309448242 + ], + [ + "emba", + -13.91967487335205 + ], + [ + "tunes", + -13.919692993164062 + ], + [ + "▁120,000", + -13.91972827911377 + ], + [ + "▁grandparent", + -13.919779777526855 + ], + [ + "Bush", + -13.919848442077637 + ], + [ + "▁Induction", + -13.919882774353027 + ], + [ + "▁ode", + -13.919893264770508 + ], + [ + "▁Fallen", + -13.919913291931152 + ], + [ + "2;", + -13.919998168945312 + ], + [ + "▁Ago", + -13.920037269592283 + ], + [ + "▁Elise", + -13.920042037963867 + ], + [ + "▁(?)", + -13.920116424560549 + ], + [ + "▁Gratitude", + -13.92011833190918 + ], + [ + "▁linkages", + -13.920133590698242 + ], + [ + "▁505", + -13.920204162597656 + ], + [ + "▁Hawking", + -13.920297622680664 + ], + [ + "▁Crypt", + -13.92030143737793 + ], + [ + "▁Eno", + -13.92030429840088 + ], + [ + "▁Swallow", + -13.920352935791016 + ], + [ + "▁Rabin", + -13.920355796813965 + ], + [ + "ABA", + -13.920366287231444 + ], + [ + "▁Dusty", + -13.920381546020508 + ], + [ + "MISSION", + -13.920417785644531 + ], + [ + "stocked", + -13.920451164245604 + ], + [ + "Il", + -13.920502662658691 + ], + [ + "KD", + -13.920539855957031 + ], + [ + "▁atleast", + -13.92055606842041 + ], + [ + "▁racially", + -13.920587539672852 + ], + [ + "▁Hob", + -13.92061710357666 + ], + [ + "CBN", + -13.920637130737305 + ], + [ + "imal", + -13.920639038085938 + ], + [ + "▁decorators", + -13.920682907104492 + ], + [ + "▁practised", + -13.92084789276123 + ], + [ + "ucha", + -13.92087173461914 + ], + [ + "vino", + -13.92087745666504 + ], + [ + "altering", + -13.920889854431152 + ], + [ + "Excel", + -13.920916557312012 + ], + [ + "▁LAB", + -13.920920372009276 + ], + [ + "finity", + -13.920955657958984 + ], + [ + "▁Copies", + -13.920976638793944 + ], + [ + "Stainless", + -13.921016693115234 + ], + [ + "Halloween", + -13.921019554138184 + ], + [ + "Critical", + -13.921021461486816 + ], + [ + "Suddenly", + -13.921063423156738 + ], + [ + "aber", + -13.921067237854004 + ], + [ + "▁Watershed", + -13.921069145202637 + ], + [ + "▁41%", + -13.921074867248535 + ], + [ + "▁Caves", + -13.921106338500977 + ], + [ + "▁faves", + -13.92112636566162 + ], + [ + "lma", + -13.921215057373049 + ], + [ + "▁22,000", + -13.921274185180664 + ], + [ + "kHz", + -13.921278953552246 + ], + [ + "▁skeletons", + -13.92135524749756 + ], + [ + "▁Dau", + -13.921412467956545 + ], + [ + "▁Mailing", + -13.92143440246582 + ], + [ + "▁pistols", + -13.921501159667969 + ], + [ + "▁Hud", + -13.921507835388184 + ], + [ + "Mun", + -13.92155933380127 + ], + [ + "312", + -13.921564102172852 + ], + [ + "blin", + -13.92161464691162 + ], + [ + "doll", + -13.92164421081543 + ], + [ + "▁Rosh", + -13.921676635742188 + ], + [ + "▁Ute", + -13.921676635742188 + ], + [ + "Shock", + -13.921703338623049 + ], + [ + "▁livestream", + -13.921758651733398 + ], + [ + "9000", + -13.921788215637209 + ], + [ + "eezy", + -13.921802520751951 + ], + [ + "▁rehearse", + -13.92186450958252 + ], + [ + "▁lego", + -13.921913146972656 + ], + [ + "▁Tungsten", + -13.9219331741333 + ], + [ + "546", + -13.92196559906006 + ], + [ + "src", + -13.922012329101562 + ], + [ + "▁Rectangular", + -13.922027587890623 + ], + [ + "▁conclusive", + -13.922229766845703 + ], + [ + "▁sprints", + -13.922266960144045 + ], + [ + "banks", + -13.922449111938477 + ], + [ + "▁intruder", + -13.92245864868164 + ], + [ + "▁Mendoza", + -13.922466278076172 + ], + [ + "▁furthest", + -13.922466278076172 + ], + [ + "▁irritability", + -13.922466278076172 + ], + [ + "▁forcibly", + -13.922467231750488 + ], + [ + "▁Qantas", + -13.922468185424805 + ], + [ + "▁STREET", + -13.922468185424805 + ], + [ + "▁Github", + -13.922470092773438 + ], + [ + "▁KPMG", + -13.922470092773438 + ], + [ + "▁SUPPORT", + -13.922470092773438 + ], + [ + "▁Sotheby", + -13.922470092773438 + ], + [ + "▁bundling", + -13.92247486114502 + ], + [ + "▁Beaufort", + -13.922475814819336 + ], + [ + "▁archaic", + -13.922475814819336 + ], + [ + "▁Solidarity", + -13.922476768493652 + ], + [ + "▁Luigi", + -13.92248249053955 + ], + [ + "▁antiquity", + -13.9224853515625 + ], + [ + "▁haiku", + -13.922507286071776 + ], + [ + "▁redefining", + -13.922545433044434 + ], + [ + "▁Hackney", + -13.922564506530762 + ], + [ + "▁witchcraft", + -13.92259120941162 + ], + [ + "▁SOP", + -13.922613143920898 + ], + [ + "▁footpath", + -13.922651290893556 + ], + [ + "Dam", + -13.922754287719728 + ], + [ + "▁cram", + -13.922754287719728 + ], + [ + "▁mineralization", + -13.922842025756836 + ], + [ + "▁dreamt", + -13.922857284545898 + ], + [ + "▁Ripper", + -13.922918319702148 + ], + [ + "Button", + -13.922972679138184 + ], + [ + "▁Jolie", + -13.923003196716309 + ], + [ + "▁autographed", + -13.923006057739258 + ], + [ + "rox", + -13.92304515838623 + ], + [ + "–19", + -13.923083305358888 + ], + [ + "▁chow", + -13.923101425170898 + ], + [ + "older", + -13.923128128051758 + ], + [ + "▁overdo", + -13.923182487487791 + ], + [ + "cele", + -13.923216819763184 + ], + [ + "▁republican", + -13.923227310180664 + ], + [ + "▁hooking", + -13.923327445983888 + ], + [ + "▁Jig", + -13.92335033416748 + ], + [ + "▁seventeenth", + -13.923351287841797 + ], + [ + "▁Alessandro", + -13.92335605621338 + ], + [ + "laying", + -13.923394203186035 + ], + [ + "request", + -13.923407554626465 + ], + [ + "▁thickening", + -13.923454284667969 + ], + [ + "▁authorship", + -13.923527717590332 + ], + [ + "▁2012;", + -13.923583984375 + ], + [ + "▁geeky", + -13.923602104187012 + ], + [ + "unia", + -13.923603057861328 + ], + [ + "▁isolates", + -13.923666954040527 + ], + [ + "▁condemning", + -13.923676490783691 + ], + [ + "▁gaskets", + -13.92368507385254 + ], + [ + "▁Defensive", + -13.923707962036133 + ], + [ + "▁Curr", + -13.92372989654541 + ], + [ + "▁tit", + -13.923735618591309 + ], + [ + "▁parser", + -13.923857688903809 + ], + [ + "▁atrial", + -13.923949241638184 + ], + [ + "Buzz", + -13.923983573913574 + ], + [ + "err", + -13.9240083694458 + ], + [ + "Gil", + -13.924046516418455 + ], + [ + "▁BAM", + -13.924149513244627 + ], + [ + "▁Prim", + -13.924184799194336 + ], + [ + "NCA", + -13.924216270446776 + ], + [ + "▁2008).", + -13.92422103881836 + ], + [ + "▁getaways", + -13.924266815185549 + ], + [ + "tima", + -13.924416542053224 + ], + [ + "641", + -13.924421310424805 + ], + [ + "numbered", + -13.924481391906738 + ], + [ + "935", + -13.924552917480469 + ], + [ + "▁Sportsman", + -13.924630165100098 + ], + [ + "▁AHL", + -13.924633026123049 + ], + [ + "▁Thumb", + -13.924665451049805 + ], + [ + "▁Yuri", + -13.92478084564209 + ], + [ + "▁Sergey", + -13.92483139038086 + ], + [ + "▁squa", + -13.924843788146973 + ], + [ + "9.4", + -13.924875259399414 + ], + [ + "▁reaping", + -13.924883842468262 + ], + [ + "examination", + -13.92491340637207 + ], + [ + "Comprehensive", + -13.924920082092283 + ], + [ + "debit", + -13.924928665161133 + ], + [ + "saturated", + -13.924967765808104 + ], + [ + "00:0", + -13.925030708312988 + ], + [ + "▁Mahogany", + -13.92508029937744 + ], + [ + "▁eyeballs", + -13.925141334533691 + ], + [ + "▁cutout", + -13.925230026245115 + ], + [ + "EY", + -13.925244331359863 + ], + [ + "▁Simpsons", + -13.925265312194824 + ], + [ + "▁cadillac", + -13.925359725952148 + ], + [ + "▁Patna", + -13.925374984741213 + ], + [ + "▁herpes", + -13.925396919250488 + ], + [ + "bier", + -13.92540168762207 + ], + [ + "spirited", + -13.925442695617676 + ], + [ + "Wireless", + -13.925498962402344 + ], + [ + "▁Ends", + -13.925507545471191 + ], + [ + "▁Adorable", + -13.925626754760742 + ], + [ + "▁Caller", + -13.92568016052246 + ], + [ + "▁cruisers", + -13.925718307495115 + ], + [ + "Jazz", + -13.925832748413086 + ], + [ + "▁Snell", + -13.925885200500488 + ], + [ + "Workers", + -13.92591667175293 + ], + [ + "▁Commodity", + -13.92593765258789 + ], + [ + "▁Sooner", + -13.925962448120115 + ], + [ + "WHY", + -13.92597770690918 + ], + [ + "▁Ego", + -13.925982475280762 + ], + [ + "▁Metric", + -13.925990104675291 + ], + [ + "▁RICH", + -13.926077842712402 + ], + [ + "▁tumbled", + -13.92607879638672 + ], + [ + "▁whisking", + -13.926104545593262 + ], + [ + "▁downed", + -13.92613410949707 + ], + [ + "▁ETS", + -13.926145553588867 + ], + [ + "zes", + -13.926170349121094 + ], + [ + "quip", + -13.92617893218994 + ], + [ + "▁granul", + -13.926264762878418 + ], + [ + "▁Tongue", + -13.926267623901367 + ], + [ + "▁Arboretum", + -13.92628288269043 + ], + [ + "▁Doctrine", + -13.92628288269043 + ], + [ + "▁NOTHING", + -13.92628288269043 + ], + [ + "▁Seychelles", + -13.92628288269043 + ], + [ + "▁clarifies", + -13.92628288269043 + ], + [ + "▁eradication", + -13.92628288269043 + ], + [ + "▁stochastic", + -13.92628288269043 + ], + [ + "▁Mammoth", + -13.926283836364746 + ], + [ + "▁ganache", + -13.926283836364746 + ], + [ + "▁Indi", + -13.926284790039062 + ], + [ + "▁Riesling", + -13.926284790039062 + ], + [ + "▁Palazzo", + -13.926288604736328 + ], + [ + "▁chariot", + -13.926288604736328 + ], + [ + "▁LEVEL", + -13.92629051208496 + ], + [ + "▁Waldorf", + -13.926291465759276 + ], + [ + "▁myocardial", + -13.926292419433594 + ], + [ + "▁Hammock", + -13.926294326782228 + ], + [ + "▁lorry", + -13.926294326782228 + ], + [ + "▁Emulator", + -13.926298141479492 + ], + [ + "▁Biochemistry", + -13.92630672454834 + ], + [ + "▁Fundraiser", + -13.926308631896973 + ], + [ + "▁homosexuality", + -13.926324844360352 + ], + [ + "▁Pursuit", + -13.926325798034668 + ], + [ + "▁marries", + -13.926339149475098 + ], + [ + "▁SoC", + -13.926344871520996 + ], + [ + "▁Horseshoe", + -13.926347732543944 + ], + [ + "▁Nativity", + -13.926353454589844 + ], + [ + "▁Postgraduate", + -13.926376342773438 + ], + [ + "▁milky", + -13.926556587219238 + ], + [ + "▁Kochi", + -13.92656135559082 + ], + [ + "643", + -13.9265718460083 + ], + [ + "▁Cordova", + -13.926583290100098 + ], + [ + "▁comforted", + -13.926591873168944 + ], + [ + "▁CZ", + -13.926613807678224 + ], + [ + "251", + -13.926615715026855 + ], + [ + "▁clustered", + -13.926623344421388 + ], + [ + "mortem", + -13.926628112792969 + ], + [ + "▁Gardener", + -13.92666244506836 + ], + [ + "▁Crook", + -13.926692962646484 + ], + [ + "▁Grayson", + -13.926751136779783 + ], + [ + "sponsor", + -13.92678451538086 + ], + [ + "▁Wanda", + -13.92679214477539 + ], + [ + "educational", + -13.926820755004885 + ], + [ + "▁retin", + -13.926830291748049 + ], + [ + "▁VAN", + -13.926849365234377 + ], + [ + "▁MSG", + -13.926910400390623 + ], + [ + "▁Amid", + -13.926935195922852 + ], + [ + "▁Akbar", + -13.92694091796875 + ], + [ + "cous", + -13.926963806152344 + ], + [ + "▁Solicitor", + -13.926984786987305 + ], + [ + "▁entree", + -13.92702865600586 + ], + [ + "lated", + -13.927057266235352 + ], + [ + "▁Fad", + -13.92714500427246 + ], + [ + "▁ranchers", + -13.927164077758787 + ], + [ + "lver", + -13.92720890045166 + ], + [ + "▁taxonomy", + -13.927218437194824 + ], + [ + "▁flickering", + -13.927254676818848 + ], + [ + "6-9", + -13.92727279663086 + ], + [ + "tau", + -13.927306175231934 + ], + [ + "FV", + -13.927325248718262 + ], + [ + "Andre", + -13.927330017089844 + ], + [ + "▁Therapists", + -13.927391052246094 + ], + [ + "mee", + -13.927407264709473 + ], + [ + "▁FU", + -13.927411079406738 + ], + [ + "586", + -13.92742156982422 + ], + [ + "maa", + -13.927449226379396 + ], + [ + "▁Alternatives", + -13.927460670471191 + ], + [ + "▁Delivered", + -13.927485466003418 + ], + [ + "utra", + -13.927495002746582 + ], + [ + "onta", + -13.92759132385254 + ], + [ + "▁Excess", + -13.92762851715088 + ], + [ + "nham", + -13.927631378173828 + ], + [ + "fp", + -13.92776584625244 + ], + [ + "oxa", + -13.927816390991213 + ], + [ + "param", + -13.927886009216309 + ], + [ + "▁unlocks", + -13.927912712097168 + ], + [ + "8:00", + -13.92797565460205 + ], + [ + "▁Sticky", + -13.928025245666504 + ], + [ + "▁punters", + -13.928117752075195 + ], + [ + "▁Qualifying", + -13.928168296813965 + ], + [ + "▁playbook", + -13.92825412750244 + ], + [ + "▁waterway", + -13.928271293640137 + ], + [ + "▁plat", + -13.928311347961426 + ], + [ + "loid", + -13.928316116333008 + ], + [ + "▁misrepresentati", + -13.92836570739746 + ], + [ + "▁GAS", + -13.928431510925291 + ], + [ + "▁pierce", + -13.928484916687012 + ], + [ + "▁townhome", + -13.928504943847656 + ], + [ + "▁(31", + -13.928549766540527 + ], + [ + "▁batsman", + -13.928555488586426 + ], + [ + "packs", + -13.9285888671875 + ], + [ + "sectional", + -13.928630828857422 + ], + [ + "▁groupings", + -13.92863941192627 + ], + [ + "▁rematch", + -13.9286470413208 + ], + [ + "▁bartenders", + -13.928648948669434 + ], + [ + "ritz", + -13.928756713867188 + ], + [ + "▁Kano", + -13.928815841674805 + ], + [ + "Alibaba", + -13.928837776184082 + ], + [ + "75,000", + -13.928844451904297 + ], + [ + "496", + -13.928875923156738 + ], + [ + "435", + -13.928877830505373 + ], + [ + "▁orientated", + -13.928881645202637 + ], + [ + "▁Adolf", + -13.928882598876951 + ], + [ + "malware", + -13.928900718688965 + ], + [ + "roa", + -13.928937911987305 + ], + [ + "▁underworld", + -13.928948402404783 + ], + [ + "▁giver", + -13.92900848388672 + ], + [ + "▁auctioned", + -13.929034233093262 + ], + [ + "trailer", + -13.929049491882324 + ], + [ + "phile", + -13.929099082946776 + ], + [ + "shows", + -13.929171562194824 + ], + [ + "jacket", + -13.92918300628662 + ], + [ + "▁mammal", + -13.929244995117188 + ], + [ + "▁telecoms", + -13.929265975952148 + ], + [ + "▁Oceans", + -13.929288864135742 + ], + [ + "Launched", + -13.929302215576172 + ], + [ + "▁hydrocarbons", + -13.929315567016602 + ], + [ + "pkg", + -13.929353713989258 + ], + [ + "▁Arden", + -13.929402351379396 + ], + [ + "198", + -13.929415702819824 + ], + [ + "▁REF", + -13.929443359375 + ], + [ + "907", + -13.929451942443848 + ], + [ + "▁spectra", + -13.929488182067873 + ], + [ + "CBD", + -13.92951202392578 + ], + [ + "▁intimidate", + -13.929513931274414 + ], + [ + "origin", + -13.92963981628418 + ], + [ + "inos", + -13.929692268371582 + ], + [ + "▁Romano", + -13.929709434509276 + ], + [ + "MEL", + -13.929710388183594 + ], + [ + "▁Roc", + -13.929781913757324 + ], + [ + "812", + -13.929800033569336 + ], + [ + "▁1848", + -13.929871559143066 + ], + [ + "require", + -13.929872512817385 + ], + [ + "▁groomer", + -13.92990779876709 + ], + [ + "▁damping", + -13.929914474487305 + ], + [ + "▁Bacteria", + -13.929949760437012 + ], + [ + "453", + -13.929962158203123 + ], + [ + "▁Tzu", + -13.929965019226074 + ], + [ + "oche", + -13.930004119873049 + ], + [ + "▁accolade", + -13.930065155029297 + ], + [ + "▁kilogram", + -13.930068969726562 + ], + [ + "▁/>", + -13.930088996887209 + ], + [ + "▁protruding", + -13.930113792419434 + ], + [ + "▁Phyllis", + -13.93011474609375 + ], + [ + "▁adversaries", + -13.93011474609375 + ], + [ + "▁satirical", + -13.93011474609375 + ], + [ + "▁superlative", + -13.93011474609375 + ], + [ + "coal", + -13.93012809753418 + ], + [ + "▁AGAIN", + -13.930139541625977 + ], + [ + "▁sneeze", + -13.930140495300291 + ], + [ + "▁throng", + -13.930143356323242 + ], + [ + "▁obstructive", + -13.930153846740724 + ], + [ + "▁envoy", + -13.930161476135254 + ], + [ + "▁Buk", + -13.93016529083252 + ], + [ + "▁waltz", + -13.93017578125 + ], + [ + "▁enterprising", + -13.930176734924316 + ], + [ + "▁hobbit", + -13.93020725250244 + ], + [ + "▁decompression", + -13.93021297454834 + ], + [ + "axe", + -13.930215835571287 + ], + [ + "32)", + -13.930225372314451 + ], + [ + "▁Duane", + -13.930245399475098 + ], + [ + "▁corneal", + -13.930251121520996 + ], + [ + "▁Firefly", + -13.930262565612791 + ], + [ + "▁Proposed", + -13.930288314819336 + ], + [ + "▁earmarked", + -13.930312156677246 + ], + [ + "▁stopover", + -13.93031883239746 + ], + [ + "▁soapy", + -13.930328369140623 + ], + [ + "▁Workflow", + -13.93032932281494 + ], + [ + "836", + -13.930363655090332 + ], + [ + "▁lifeless", + -13.930367469787598 + ], + [ + "▁shalt", + -13.93040370941162 + ], + [ + "inky", + -13.930469512939451 + ], + [ + "314", + -13.930479049682615 + ], + [ + "quette", + -13.930534362792969 + ], + [ + "▁lengthwise", + -13.93054485321045 + ], + [ + "▁strategists", + -13.930570602416992 + ], + [ + "4.99", + -13.930606842041016 + ], + [ + "▁Lipp", + -13.930609703063965 + ], + [ + "34)", + -13.93061065673828 + ], + [ + "▁Plated", + -13.93066692352295 + ], + [ + "▁cpu", + -13.930724143981934 + ], + [ + "aille", + -13.930730819702148 + ], + [ + "▁scholastic", + -13.930806159973145 + ], + [ + "▁Gamer", + -13.930838584899902 + ], + [ + "▁STAT", + -13.930838584899902 + ], + [ + "▁wizards", + -13.93088436126709 + ], + [ + "▁Castillo", + -13.93088722229004 + ], + [ + "Mah", + -13.930923461914062 + ], + [ + "▁scarcely", + -13.930932998657228 + ], + [ + "▁sanitizer", + -13.930956840515137 + ], + [ + "▁EEG", + -13.931022644042969 + ], + [ + "QM", + -13.931087493896484 + ], + [ + "weg", + -13.931093215942385 + ], + [ + "▁disseminated", + -13.931145668029783 + ], + [ + "sproportionately", + -13.931241035461426 + ], + [ + "▁robo", + -13.931282997131348 + ], + [ + "▁glean", + -13.931286811828612 + ], + [ + "▁Containers", + -13.931333541870115 + ], + [ + "▁differed", + -13.931357383728027 + ], + [ + "▁excites", + -13.931488990783691 + ], + [ + "powerful", + -13.931509017944336 + ], + [ + "▁telco", + -13.93160915374756 + ], + [ + "▁Layers", + -13.931614875793455 + ], + [ + "▁insensitive", + -13.931617736816406 + ], + [ + "944", + -13.931629180908203 + ], + [ + "▁Abbot", + -13.93163013458252 + ], + [ + "kV", + -13.931635856628418 + ], + [ + "▁droughts", + -13.931720733642578 + ], + [ + "▁printmaking", + -13.931730270385742 + ], + [ + "▁pram", + -13.93173122406006 + ], + [ + "Trace", + -13.931777954101562 + ], + [ + "▁enactment", + -13.93178653717041 + ], + [ + "-2-", + -13.931803703308104 + ], + [ + "erty", + -13.931843757629396 + ], + [ + "solution", + -13.931896209716797 + ], + [ + "▁1.00", + -13.93191146850586 + ], + [ + "795", + -13.931931495666504 + ], + [ + "halt", + -13.93193817138672 + ], + [ + "▁originator", + -13.931955337524414 + ], + [ + "fw", + -13.932073593139648 + ], + [ + "▁Engaging", + -13.932074546813965 + ], + [ + "hills", + -13.932127952575684 + ], + [ + "metrics", + -13.932127952575684 + ], + [ + "▁Districts", + -13.932169914245604 + ], + [ + "▁panes", + -13.932173728942873 + ], + [ + "erin", + -13.932188987731934 + ], + [ + "▁outlier", + -13.93225383758545 + ], + [ + "▁CHS", + -13.932292938232422 + ], + [ + "▁Purse", + -13.93229866027832 + ], + [ + "▁CQ", + -13.932369232177734 + ], + [ + "▁canonical", + -13.932528495788574 + ], + [ + "clare", + -13.932579040527344 + ], + [ + "zad", + -13.93258285522461 + ], + [ + "▁Mott", + -13.932587623596191 + ], + [ + "fet", + -13.93259620666504 + ], + [ + "▁LTC", + -13.932641983032228 + ], + [ + "▁ingestion", + -13.932727813720703 + ], + [ + "▁Duran", + -13.93273639678955 + ], + [ + "SED", + -13.932790756225586 + ], + [ + "determination", + -13.93279266357422 + ], + [ + "reflection", + -13.932812690734863 + ], + [ + "Spider", + -13.932827949523926 + ], + [ + "Pilot", + -13.932843208312988 + ], + [ + "▁Exc", + -13.93285083770752 + ], + [ + "Terry", + -13.932981491088867 + ], + [ + "766", + -13.932984352111816 + ], + [ + "▁UHD", + -13.932987213134766 + ], + [ + "613", + -13.933037757873535 + ], + [ + "637", + -13.933050155639648 + ], + [ + "USH", + -13.933050155639648 + ], + [ + "▁resorting", + -13.933085441589355 + ], + [ + "▁Tweets", + -13.933099746704102 + ], + [ + "vera", + -13.93324089050293 + ], + [ + "vec", + -13.933277130126951 + ], + [ + "ining", + -13.933349609375 + ], + [ + "cula", + -13.933351516723633 + ], + [ + "▁Soros", + -13.933392524719238 + ], + [ + "▁liberate", + -13.933399200439451 + ], + [ + "▁Islander", + -13.933404922485352 + ], + [ + "seo", + -13.9334135055542 + ], + [ + "▁Serge", + -13.93348217010498 + ], + [ + "▁proofread", + -13.933485984802246 + ], + [ + "729", + -13.93349838256836 + ], + [ + "programmed", + -13.933502197265623 + ], + [ + "▁Pacifica", + -13.933520317077637 + ], + [ + "▁threading", + -13.933554649353027 + ], + [ + "YR", + -13.933578491210938 + ], + [ + "oxid", + -13.933591842651367 + ], + [ + "▁JOB", + -13.93360424041748 + ], + [ + "▁Verne", + -13.93362045288086 + ], + [ + "Classes", + -13.933627128601074 + ], + [ + "▁ERROR", + -13.933648109436035 + ], + [ + "▁Dynamo", + -13.933650016784668 + ], + [ + "889", + -13.9336576461792 + ], + [ + "afternoon", + -13.93370246887207 + ], + [ + "▁Phra", + -13.933733940124512 + ], + [ + "▁esthetic", + -13.93376350402832 + ], + [ + "▁carpenters", + -13.9337739944458 + ], + [ + "▁canceling", + -13.933780670166016 + ], + [ + "▁bombed", + -13.933794021606444 + ], + [ + "lha", + -13.933853149414062 + ], + [ + "▁bottlenecks", + -13.9338960647583 + ], + [ + "WAL", + -13.933930397033691 + ], + [ + "▁behemoth", + -13.9339599609375 + ], + [ + "▁diuretic", + -13.9339599609375 + ], + [ + "▁ludicrous", + -13.9339599609375 + ], + [ + "▁summarizing", + -13.9339599609375 + ], + [ + "▁PRINCE", + -13.933961868286133 + ], + [ + "▁celiac", + -13.933961868286133 + ], + [ + "▁ENGINE", + -13.933964729309082 + ], + [ + "▁germination", + -13.933964729309082 + ], + [ + "▁geographies", + -13.933965682983398 + ], + [ + "▁Cirque", + -13.93399143218994 + ], + [ + "▁Coinbase", + -13.934000968933104 + ], + [ + "▁Modification", + -13.934012413024902 + ], + [ + "Surely", + -13.934017181396484 + ], + [ + "▁slush", + -13.9340238571167 + ], + [ + "▁Barkley", + -13.934049606323242 + ], + [ + "Keywords", + -13.934074401855469 + ], + [ + "▁Countdown", + -13.934083938598633 + ], + [ + "▁landslide", + -13.934100151062012 + ], + [ + "▁Ebook", + -13.93411636352539 + ], + [ + "▁Algorithm", + -13.934123992919922 + ], + [ + "▁ASK", + -13.934131622314451 + ], + [ + "feature", + -13.93414306640625 + ], + [ + "▁Missionary", + -13.934171676635742 + ], + [ + "ologically", + -13.934173583984377 + ], + [ + "▁Jaguars", + -13.93420124053955 + ], + [ + "▁fusing", + -13.934239387512209 + ], + [ + "split", + -13.934247970581056 + ], + [ + "▁liaise", + -13.934298515319824 + ], + [ + "▁VX", + -13.93431568145752 + ], + [ + "▁bale", + -13.934325218200684 + ], + [ + "▁Nara", + -13.93434238433838 + ], + [ + "▁Verge", + -13.93436336517334 + ], + [ + "▁Bigg", + -13.934371948242188 + ], + [ + "▁Offset", + -13.93441677093506 + ], + [ + "▁Novak", + -13.934422492980955 + ], + [ + "▁restocking", + -13.934465408325195 + ], + [ + "▁Died", + -13.93448257446289 + ], + [ + "▁CCR", + -13.934552192687988 + ], + [ + "cated", + -13.934558868408203 + ], + [ + "▁mythological", + -13.934588432312012 + ], + [ + "typing", + -13.934606552124023 + ], + [ + "▁MIN", + -13.934618949890137 + ], + [ + "▁affective", + -13.934645652770996 + ], + [ + "ogenesis", + -13.934646606445312 + ], + [ + "▁dispatcher", + -13.934694290161133 + ], + [ + "▁MySpace", + -13.934776306152344 + ], + [ + "▁ECC", + -13.934822082519531 + ], + [ + "EXT", + -13.934914588928224 + ], + [ + "267", + -13.934916496276855 + ], + [ + "▁gras", + -13.93492603302002 + ], + [ + "clinic", + -13.934968948364258 + ], + [ + "tables", + -13.93501091003418 + ], + [ + "▁swayed", + -13.935020446777344 + ], + [ + "▁Spoiler", + -13.935040473937988 + ], + [ + "▁UG", + -13.935051918029783 + ], + [ + "▁Texan", + -13.935165405273438 + ], + [ + "▁supplementing", + -13.935187339782717 + ], + [ + "▁simulators", + -13.9352388381958 + ], + [ + "vada", + -13.93526554107666 + ], + [ + "chim", + -13.935284614562988 + ], + [ + "436", + -13.935297966003418 + ], + [ + "gins", + -13.935300827026367 + ], + [ + "Kin", + -13.935306549072266 + ], + [ + "▁Expectations", + -13.935364723205566 + ], + [ + "279", + -13.935394287109377 + ], + [ + "dani", + -13.935428619384766 + ], + [ + "▁sixteenth", + -13.935457229614258 + ], + [ + "▁tidbits", + -13.935457229614258 + ], + [ + "895", + -13.935592651367188 + ], + [ + "▁widescreen", + -13.935704231262209 + ], + [ + "▁TECH", + -13.935763359069824 + ], + [ + "▁Primo", + -13.935797691345217 + ], + [ + "EAS", + -13.935830116271973 + ], + [ + "nich", + -13.935853004455566 + ], + [ + "hah", + -13.935861587524414 + ], + [ + "▁standardised", + -13.935930252075195 + ], + [ + "▁beep", + -13.936016082763672 + ], + [ + "183", + -13.93605899810791 + ], + [ + "▁Electron", + -13.93606185913086 + ], + [ + "oen", + -13.936144828796388 + ], + [ + "/40", + -13.936175346374512 + ], + [ + "movies", + -13.936196327209473 + ], + [ + "plans", + -13.936200141906738 + ], + [ + "▁workhorse", + -13.93620491027832 + ], + [ + "Staying", + -13.936227798461914 + ], + [ + "▁Bergman", + -13.93626308441162 + ], + [ + "▁12:1", + -13.936302185058594 + ], + [ + "▁Fools", + -13.93630313873291 + ], + [ + "7;", + -13.936321258544922 + ], + [ + "▁Phyto", + -13.93638038635254 + ], + [ + "cola", + -13.93639087677002 + ], + [ + "Reserve", + -13.93639850616455 + ], + [ + "▁Shel", + -13.936429977416992 + ], + [ + "IPS", + -13.93644905090332 + ], + [ + "▁boxers", + -13.936619758605955 + ], + [ + "olate", + -13.93662929534912 + ], + [ + "Britain", + -13.93674087524414 + ], + [ + "Default", + -13.93674373626709 + ], + [ + "elles", + -13.936789512634276 + ], + [ + "Password", + -13.93680191040039 + ], + [ + "equi", + -13.936803817749023 + ], + [ + "Collins", + -13.93683910369873 + ], + [ + "▁Compu", + -13.936841011047363 + ], + [ + "▁Silica", + -13.936875343322754 + ], + [ + "▁Intense", + -13.936898231506348 + ], + [ + "▁reaffirm", + -13.936915397644045 + ], + [ + "▁candlestick", + -13.936922073364258 + ], + [ + "Rental", + -13.937015533447266 + ], + [ + "▁Malware", + -13.937047004699709 + ], + [ + "▁1.5%", + -13.937101364135742 + ], + [ + "▁XF", + -13.937101364135742 + ], + [ + "QUI", + -13.937173843383787 + ], + [ + "▁Organics", + -13.93718719482422 + ], + [ + "▁45,000", + -13.937259674072266 + ], + [ + "▁gam", + -13.937294960021973 + ], + [ + "Biz", + -13.937308311462402 + ], + [ + "senior", + -13.937342643737791 + ], + [ + "Fra", + -13.93748950958252 + ], + [ + "scene", + -13.93752098083496 + ], + [ + "▁Cushions", + -13.937522888183594 + ], + [ + "▁alkali", + -13.937522888183594 + ], + [ + "CAST", + -13.937529563903809 + ], + [ + "▁Maverick", + -13.937561988830566 + ], + [ + "▁driverless", + -13.937582015991213 + ], + [ + "▁hydroelectric", + -13.937652587890623 + ], + [ + "▁disappointments", + -13.937664031982422 + ], + [ + "▁Twi", + -13.93772792816162 + ], + [ + "532", + -13.937745094299316 + ], + [ + "PRNewswire", + -13.937821388244627 + ], + [ + "▁Disposal", + -13.937821388244627 + ], + [ + "▁Experian", + -13.937821388244627 + ], + [ + "▁Mitzvah", + -13.937821388244627 + ], + [ + "▁benevolent", + -13.937821388244627 + ], + [ + "▁insurmountable", + -13.937821388244627 + ], + [ + "▁masculinity", + -13.937821388244627 + ], + [ + "▁ostensibly", + -13.937821388244627 + ], + [ + "▁Goddard", + -13.937825202941896 + ], + [ + "▁extremities", + -13.937825202941896 + ], + [ + "▁Merseyside", + -13.93783187866211 + ], + [ + "▁Norwalk", + -13.937858581542969 + ], + [ + "▁Nutella", + -13.937860488891602 + ], + [ + "▁bungee", + -13.937862396240234 + ], + [ + "▁Darcy", + -13.937875747680664 + ], + [ + "▁embellishment", + -13.937891960144045 + ], + [ + "▁jarring", + -13.93797492980957 + ], + [ + "▁Bluegrass", + -13.937996864318848 + ], + [ + "▁painkillers", + -13.93800926208496 + ], + [ + "▁PROVIDED", + -13.938034057617188 + ], + [ + "▁backlit", + -13.938044548034668 + ], + [ + "9:00", + -13.938054084777832 + ], + [ + "▁colonialism", + -13.93806266784668 + ], + [ + "▁Tig", + -13.93809413909912 + ], + [ + "▁TMZ", + -13.938135147094728 + ], + [ + "1920", + -13.938178062438965 + ], + [ + "AID", + -13.938196182250977 + ], + [ + "▁Programmer", + -13.938199996948242 + ], + [ + "▁Madeleine", + -13.93824291229248 + ], + [ + "▁Khalid", + -13.938279151916504 + ], + [ + "▁Divers", + -13.938288688659668 + ], + [ + "▁Growers", + -13.938321113586426 + ], + [ + "ESE", + -13.938334465026855 + ], + [ + "▁accesses", + -13.938345909118652 + ], + [ + "▁unearthed", + -13.938387870788574 + ], + [ + "▁Wilbur", + -13.938399314880373 + ], + [ + "▁Pudding", + -13.93845272064209 + ], + [ + "▁idiots", + -13.93846035003662 + ], + [ + "nable", + -13.938505172729492 + ], + [ + "▁custodial", + -13.938507080078123 + ], + [ + "▁senseless", + -13.938543319702148 + ], + [ + "/2008", + -13.938563346862791 + ], + [ + "▁morbidity", + -13.938570022583008 + ], + [ + "esen", + -13.93859577178955 + ], + [ + "▁Refugees", + -13.938608169555664 + ], + [ + "459", + -13.938663482666016 + ], + [ + "▁condiments", + -13.93867301940918 + ], + [ + "LIP", + -13.938718795776367 + ], + [ + "tergenerational", + -13.938720703125 + ], + [ + "▁pessimistic", + -13.938761711120604 + ], + [ + "▁Eur", + -13.938769340515137 + ], + [ + "▁Paperback", + -13.938907623291016 + ], + [ + "Alert", + -13.938940048217772 + ], + [ + "▁uniformed", + -13.93897819519043 + ], + [ + "▁slowest", + -13.93898105621338 + ], + [ + "80)", + -13.939007759094238 + ], + [ + "▁IDP", + -13.939021110534668 + ], + [ + "-91", + -13.939035415649414 + ], + [ + "▁“...", + -13.939064979553224 + ], + [ + "▁Kohli", + -13.93908977508545 + ], + [ + "▁Sticks", + -13.939200401306152 + ], + [ + "rero", + -13.939209938049316 + ], + [ + "▁1872", + -13.939209938049316 + ], + [ + "▁Sandals", + -13.9393310546875 + ], + [ + "▁96%", + -13.939398765563965 + ], + [ + "-93", + -13.939454078674316 + ], + [ + "Holding", + -13.939471244812012 + ], + [ + "▁Proverbs", + -13.939473152160645 + ], + [ + "▁av", + -13.939493179321287 + ], + [ + "▁homer", + -13.939560890197754 + ], + [ + "nou", + -13.939618110656738 + ], + [ + "▁Mok", + -13.939634323120115 + ], + [ + "NTS", + -13.93968105316162 + ], + [ + "▁wird", + -13.939698219299316 + ], + [ + "955", + -13.939722061157228 + ], + [ + "lara", + -13.93974494934082 + ], + [ + "Turning", + -13.9397611618042 + ], + [ + "cura", + -13.939831733703612 + ], + [ + "▁shiver", + -13.939837455749512 + ], + [ + "▁IMAP", + -13.93984031677246 + ], + [ + "▁PRODUCT", + -13.939844131469728 + ], + [ + "nathan", + -13.939871788024902 + ], + [ + "Inc", + -13.93992805480957 + ], + [ + ":24", + -13.939996719360352 + ], + [ + "▁rectangles", + -13.940031051635742 + ], + [ + "▁handlebars", + -13.940107345581056 + ], + [ + "IMAGE", + -13.940126419067385 + ], + [ + "OEM", + -13.940140724182127 + ], + [ + "________________", + -13.940171241760254 + ], + [ + "lde", + -13.94021224975586 + ], + [ + "▁buffers", + -13.940248489379885 + ], + [ + "▁Tula", + -13.940253257751465 + ], + [ + "▁bottleneck", + -13.940277099609377 + ], + [ + "▁Hubert", + -13.940295219421388 + ], + [ + "▁millionaires", + -13.940340995788574 + ], + [ + "▁aces", + -13.940382957458496 + ], + [ + "▁loathe", + -13.940523147583008 + ], + [ + "▁exec", + -13.940593719482422 + ], + [ + "569", + -13.940601348876951 + ], + [ + "Governor", + -13.94069004058838 + ], + [ + "dedicated", + -13.940729141235352 + ], + [ + "apur", + -13.940770149230955 + ], + [ + "variable", + -13.940835952758787 + ], + [ + "▁Hepatitis", + -13.940862655639648 + ], + [ + "▁glycol", + -13.94088363647461 + ], + [ + "▁Hearth", + -13.940899848937988 + ], + [ + "▁Cou", + -13.940967559814451 + ], + [ + "▁outfield", + -13.940967559814451 + ], + [ + "▁Clam", + -13.940975189208984 + ], + [ + "▁Robins", + -13.940980911254885 + ], + [ + "akov", + -13.94098949432373 + ], + [ + "▁252", + -13.940999984741213 + ], + [ + "▁multiplex", + -13.941006660461426 + ], + [ + "zzle", + -13.941017150878906 + ], + [ + "▁Ergonomic", + -13.941044807434082 + ], + [ + "destructive", + -13.941067695617676 + ], + [ + "browser", + -13.941076278686523 + ], + [ + "▁dungeons", + -13.941099166870115 + ], + [ + "▁supersede", + -13.94110107421875 + ], + [ + "▁doctrines", + -13.941116333007812 + ], + [ + "▁sex", + -13.941120147705078 + ], + [ + "▁CTS", + -13.941146850585938 + ], + [ + "▁Meetup", + -13.941150665283203 + ], + [ + "▁radiate", + -13.94117259979248 + ], + [ + "▁Habits", + -13.941229820251465 + ], + [ + "▁Embroidery", + -13.94124984741211 + ], + [ + "▁phobia", + -13.941274642944336 + ], + [ + "▁THR", + -13.941327095031738 + ], + [ + "closing", + -13.941328048706056 + ], + [ + "▁mishap", + -13.941483497619627 + ], + [ + "▁Leafs", + -13.94151496887207 + ], + [ + "▁caucus", + -13.941570281982422 + ], + [ + "▁thickens", + -13.941576957702637 + ], + [ + "assembled", + -13.941598892211914 + ], + [ + "▁jalape", + -13.941633224487305 + ], + [ + "▁cp", + -13.94166088104248 + ], + [ + "▁perfectionist", + -13.941690444946287 + ], + [ + "▁defamation", + -13.941697120666504 + ], + [ + "▁franchisor", + -13.941697120666504 + ], + [ + "▁paraphernalia", + -13.941697120666504 + ], + [ + "▁dictionaries", + -13.941699981689451 + ], + [ + "▁Arroyo", + -13.94170093536377 + ], + [ + "▁Missoula", + -13.941704750061035 + ], + [ + "▁Radiant", + -13.941707611083984 + ], + [ + "▁Ealing", + -13.9417085647583 + ], + [ + "▁muslim", + -13.9417085647583 + ], + [ + "▁desolate", + -13.94171142578125 + ], + [ + "▁Encryption", + -13.941723823547363 + ], + [ + "▁priesthood", + -13.941737174987791 + ], + [ + "▁strobe", + -13.94174861907959 + ], + [ + "▁parasitic", + -13.941749572753906 + ], + [ + "▁teeming", + -13.941750526428224 + ], + [ + "▁TAS", + -13.941755294799805 + ], + [ + "▁Antioch", + -13.941766738891602 + ], + [ + "▁Bunk", + -13.941780090332031 + ], + [ + "▁Holiness", + -13.941816329956056 + ], + [ + "TCO", + -13.94182300567627 + ], + [ + "mens", + -13.941842079162598 + ], + [ + "▁Fade", + -13.94187355041504 + ], + [ + "▁undeveloped", + -13.941883087158203 + ], + [ + "▁cooperatives", + -13.941884994506836 + ], + [ + "▁baht", + -13.941892623901367 + ], + [ + "▁recreating", + -13.94192600250244 + ], + [ + "▁FOUR", + -13.941926956176758 + ], + [ + "395", + -13.941949844360352 + ], + [ + "▁508", + -13.941960334777832 + ], + [ + "▁BLOG", + -13.94196319580078 + ], + [ + "▁tomography", + -13.941970825195312 + ], + [ + "®)", + -13.941971778869627 + ], + [ + "▁Holl", + -13.941994667053224 + ], + [ + "▁ATA", + -13.94199562072754 + ], + [ + "▁Memoir", + -13.94199562072754 + ], + [ + "▁SSI", + -13.942005157470703 + ], + [ + "MOBILE", + -13.942036628723145 + ], + [ + "anthrop", + -13.942042350769045 + ], + [ + "▁Buffy", + -13.942054748535156 + ], + [ + "▁Marxist", + -13.94209098815918 + ], + [ + "▁riddled", + -13.94210147857666 + ], + [ + "▁Pensions", + -13.94210433959961 + ], + [ + "▁Keygen", + -13.94211769104004 + ], + [ + "▁transmitters", + -13.942235946655272 + ], + [ + "2:20", + -13.942261695861816 + ], + [ + "▁foal", + -13.942337036132812 + ], + [ + "▁anchoring", + -13.942343711853027 + ], + [ + "▁cur", + -13.942353248596191 + ], + [ + "▁1966,", + -13.942357063293455 + ], + [ + "▁clearances", + -13.942461967468262 + ], + [ + "▁paramedics", + -13.942463874816896 + ], + [ + "▁minions", + -13.942465782165527 + ], + [ + "▁Kettle", + -13.942479133605955 + ], + [ + "OKE", + -13.942487716674805 + ], + [ + "▁2014;", + -13.942556381225586 + ], + [ + "▁dang", + -13.94259262084961 + ], + [ + "▁Chandeliers", + -13.942593574523926 + ], + [ + "▁Saab", + -13.942593574523926 + ], + [ + "▁Uninstall", + -13.942625045776367 + ], + [ + "asta", + -13.942627906799316 + ], + [ + "▁Acton", + -13.942744255065918 + ], + [ + "walt", + -13.942824363708496 + ], + [ + "abble", + -13.942841529846191 + ], + [ + "Generous", + -13.942954063415527 + ], + [ + "▁channeling", + -13.943029403686523 + ], + [ + "▁768", + -13.943037033081056 + ], + [ + "trader", + -13.943058013916016 + ], + [ + "▁uploads", + -13.943140029907228 + ], + [ + "▁Elmo", + -13.943229675292969 + ], + [ + "umber", + -13.943304061889648 + ], + [ + "▁Uno", + -13.943374633789062 + ], + [ + "HOST", + -13.943384170532228 + ], + [ + "▁McDermott", + -13.943392753601074 + ], + [ + "▁spacer", + -13.943440437316896 + ], + [ + "▁Aj", + -13.943464279174805 + ], + [ + "▁262", + -13.94351577758789 + ], + [ + "▁wits", + -13.943521499633787 + ], + [ + "CBM", + -13.943615913391112 + ], + [ + "▁curricular", + -13.943621635437012 + ], + [ + "▁REV", + -13.943655014038086 + ], + [ + "▁outcry", + -13.94379425048828 + ], + [ + "kut", + -13.943821907043455 + ], + [ + "▁Aziz", + -13.943829536437988 + ], + [ + "▁enrol", + -13.94383716583252 + ], + [ + "▁Rosenberg", + -13.943946838378906 + ], + [ + "▁Maxx", + -13.943950653076172 + ], + [ + "▁dawned", + -13.943981170654297 + ], + [ + "818", + -13.944040298461914 + ], + [ + "▁Focused", + -13.944083213806152 + ], + [ + "▁trot", + -13.94408893585205 + ], + [ + "▁Occupation", + -13.944147109985352 + ], + [ + "▁Retailer", + -13.944149017333984 + ], + [ + "79.", + -13.944164276123049 + ], + [ + "WIS", + -13.944184303283691 + ], + [ + "▁bravely", + -13.944226264953612 + ], + [ + "▁sig", + -13.944239616394045 + ], + [ + "▁beading", + -13.944252014160156 + ], + [ + "▁Eliminate", + -13.944260597229004 + ], + [ + "following", + -13.944271087646484 + ], + [ + "▁Burgers", + -13.944289207458496 + ], + [ + "MORE", + -13.944334030151367 + ], + [ + "bacterial", + -13.944355964660645 + ], + [ + "99.99", + -13.944448471069336 + ], + [ + "Obtain", + -13.94455337524414 + ], + [ + "▁Aff", + -13.944578170776367 + ], + [ + "▁descends", + -13.94459629058838 + ], + [ + "grace", + -13.94462776184082 + ], + [ + "733", + -13.94463062286377 + ], + [ + "Improving", + -13.944664001464844 + ], + [ + "847", + -13.94466781616211 + ], + [ + "WASHINGTON", + -13.944669723510742 + ], + [ + "Angela", + -13.944674491882324 + ], + [ + "Hannah", + -13.944692611694336 + ], + [ + "▁Terrorism", + -13.944705963134766 + ], + [ + "▁educates", + -13.94478988647461 + ], + [ + "miles", + -13.944830894470217 + ], + [ + "Hawaii", + -13.944836616516112 + ], + [ + "css", + -13.94487190246582 + ], + [ + "STO", + -13.944879531860352 + ], + [ + "Woman", + -13.944902420043944 + ], + [ + "▁stifle", + -13.944986343383787 + ], + [ + "Meaning", + -13.9450101852417 + ], + [ + "interior", + -13.945024490356444 + ], + [ + "▁snails", + -13.94505786895752 + ], + [ + "▁frontend", + -13.945168495178224 + ], + [ + "▁Boll", + -13.94526195526123 + ], + [ + "▁introvert", + -13.945304870605469 + ], + [ + "▁excruciating", + -13.945344924926758 + ], + [ + "278", + -13.94536018371582 + ], + [ + "▁hal", + -13.945364952087402 + ], + [ + "▁25-30", + -13.945392608642578 + ], + [ + "▁poaching", + -13.945439338684082 + ], + [ + "▁personalise", + -13.945523262023926 + ], + [ + "Thi", + -13.94554328918457 + ], + [ + "▁Campo", + -13.945552825927734 + ], + [ + "▁amnesty", + -13.94558811187744 + ], + [ + "▁congregate", + -13.94558811187744 + ], + [ + "▁linoleum", + -13.94558811187744 + ], + [ + "▁monotonous", + -13.94558811187744 + ], + [ + "▁1-866-", + -13.945589065551758 + ], + [ + "▁Maguire", + -13.945590019226074 + ], + [ + "▁benefactor", + -13.945590019226074 + ], + [ + "▁humbly", + -13.945595741271973 + ], + [ + "▁Suggestions", + -13.945596694946287 + ], + [ + "▁extremity", + -13.945598602294922 + ], + [ + "▁impeachment", + -13.945605278015137 + ], + [ + "▁Glamour", + -13.945606231689451 + ], + [ + "▁graham", + -13.945624351501465 + ], + [ + "▁cystic", + -13.945667266845703 + ], + [ + "▁Islington", + -13.94566822052002 + ], + [ + "▁Grandmother", + -13.945680618286133 + ], + [ + "▁indonesia", + -13.945687294006348 + ], + [ + "▁Syrup", + -13.945696830749512 + ], + [ + "▁disinfectant", + -13.94571304321289 + ], + [ + "▁cheerleader", + -13.945717811584473 + ], + [ + "▁thunderstorm", + -13.945727348327637 + ], + [ + "▁KING", + -13.945731163024902 + ], + [ + "▁minibus", + -13.945733070373535 + ], + [ + "Bla", + -13.945741653442385 + ], + [ + "▁Whitby", + -13.945752143859863 + ], + [ + "▁likened", + -13.945791244506836 + ], + [ + "▁Maynard", + -13.945796012878418 + ], + [ + "▁affordably", + -13.945806503295898 + ], + [ + "▁pitted", + -13.945806503295898 + ], + [ + "▁treatise", + -13.945806503295898 + ], + [ + "▁chard", + -13.945813179016112 + ], + [ + "▁Fireworks", + -13.945868492126465 + ], + [ + "Shark", + -13.94590950012207 + ], + [ + "-600", + -13.945932388305664 + ], + [ + "▁Episodes", + -13.945952415466309 + ], + [ + "898", + -13.945977210998535 + ], + [ + "▁widows", + -13.945984840393066 + ], + [ + "▁Lili", + -13.945992469787598 + ], + [ + "Cookies", + -13.946022987365724 + ], + [ + "▁prix", + -13.946096420288086 + ], + [ + "▁soothes", + -13.946131706237791 + ], + [ + "▁mustang", + -13.946161270141602 + ], + [ + "shopping", + -13.946218490600586 + ], + [ + "▁prednisone", + -13.94630527496338 + ], + [ + "▁mary", + -13.946385383605955 + ], + [ + "▁crawler", + -13.946436882019045 + ], + [ + "▁Slovenian", + -13.94648551940918 + ], + [ + "AIC", + -13.946681022644045 + ], + [ + "▁secondhand", + -13.946687698364258 + ], + [ + "▁supremely", + -13.946706771850586 + ], + [ + "▁culprits", + -13.94674015045166 + ], + [ + "▁Pictured", + -13.946863174438477 + ], + [ + "▁Maher", + -13.94688606262207 + ], + [ + "ergy", + -13.947038650512695 + ], + [ + "▁swans", + -13.947047233581545 + ], + [ + "heating", + -13.947062492370604 + ], + [ + "▁perf", + -13.947078704833984 + ], + [ + "▁Mehta", + -13.947135925292969 + ], + [ + "taro", + -13.947223663330078 + ], + [ + "318", + -13.94724178314209 + ], + [ + "Sharp", + -13.947254180908203 + ], + [ + "kam", + -13.947342872619627 + ], + [ + "▁mah", + -13.94735622406006 + ], + [ + "▁angler", + -13.947432518005373 + ], + [ + "naya", + -13.947447776794434 + ], + [ + "▁MAS", + -13.947562217712402 + ], + [ + "▁disgrace", + -13.9475736618042 + ], + [ + "▁9.1", + -13.947680473327637 + ], + [ + "▁Hae", + -13.947696685791016 + ], + [ + "▁Importantly", + -13.947757720947266 + ], + [ + "aban", + -13.94780158996582 + ], + [ + "ITION", + -13.947810173034668 + ], + [ + "▁Recognizing", + -13.947835922241213 + ], + [ + "▁$60,000", + -13.947871208190918 + ], + [ + "▁Boast", + -13.947962760925291 + ], + [ + "▁Cock", + -13.947975158691406 + ], + [ + "▁1962,", + -13.948052406311035 + ], + [ + "▁Supplemental", + -13.948112487792969 + ], + [ + "▁40+", + -13.948142051696776 + ], + [ + "gong", + -13.948149681091309 + ], + [ + "▁Shining", + -13.948190689086914 + ], + [ + "▁nous", + -13.948200225830078 + ], + [ + "▁181", + -13.948260307312012 + ], + [ + "781", + -13.948264122009276 + ], + [ + "▁Ariz", + -13.948277473449709 + ], + [ + "▁gradients", + -13.948278427124023 + ], + [ + "roller", + -13.948307037353516 + ], + [ + "▁Saved", + -13.94831085205078 + ], + [ + "7.7", + -13.94831371307373 + ], + [ + "usta", + -13.948326110839844 + ], + [ + "rati", + -13.948330879211426 + ], + [ + "▁Blanca", + -13.948334693908691 + ], + [ + "▁$900", + -13.948362350463867 + ], + [ + "▁publicized", + -13.948413848876951 + ], + [ + "▁Pinto", + -13.948434829711914 + ], + [ + "▁Tanks", + -13.948519706726074 + ], + [ + "inator", + -13.948534965515137 + ], + [ + "peck", + -13.948569297790527 + ], + [ + "▁FIR", + -13.948593139648438 + ], + [ + "▁007", + -13.94861888885498 + ], + [ + "DATA", + -13.948644638061523 + ], + [ + "manufacturing", + -13.948691368103027 + ], + [ + ".._", + -13.948716163635254 + ], + [ + "Housing", + -13.948724746704102 + ], + [ + "▁barter", + -13.948738098144531 + ], + [ + "HIS", + -13.94876766204834 + ], + [ + "protective", + -13.948776245117188 + ], + [ + "754", + -13.948779106140137 + ], + [ + "aol", + -13.948782920837402 + ], + [ + "folder", + -13.94887351989746 + ], + [ + "▁75,000", + -13.948875427246094 + ], + [ + "blogging", + -13.948917388916016 + ], + [ + "NCE", + -13.948922157287598 + ], + [ + "▁WIP", + -13.948938369750977 + ], + [ + "Tal", + -13.948942184448242 + ], + [ + "▁UBS", + -13.94896125793457 + ], + [ + "prox", + -13.949019432067873 + ], + [ + "▁Bong", + -13.94902801513672 + ], + [ + "▁67%", + -13.949057579040527 + ], + [ + "division", + -13.949085235595703 + ], + [ + "▁Sle", + -13.949143409729004 + ], + [ + "▁necessitate", + -13.94919490814209 + ], + [ + "▁hiccups", + -13.949207305908203 + ], + [ + "▁executions", + -13.94923973083496 + ], + [ + "▁Wonders", + -13.949254035949709 + ], + [ + "▁dist", + -13.949259757995604 + ], + [ + "Lately", + -13.9492769241333 + ], + [ + "usual", + -13.949280738830566 + ], + [ + "Expand", + -13.949286460876465 + ], + [ + "Millions", + -13.949301719665527 + ], + [ + "Tie", + -13.949342727661133 + ], + [ + "▁venom", + -13.949418067932127 + ], + [ + "kka", + -13.94948673248291 + ], + [ + "▁VBA", + -13.949492454528809 + ], + [ + "▁Ljubljana", + -13.94949436187744 + ], + [ + "▁Nehru", + -13.94949436187744 + ], + [ + "▁Rhapsody", + -13.94949436187744 + ], + [ + "▁Zeppelin", + -13.94949436187744 + ], + [ + "▁analgesic", + -13.94949436187744 + ], + [ + "▁dandelion", + -13.94949436187744 + ], + [ + "▁ginseng", + -13.94949436187744 + ], + [ + "▁worrisome", + -13.94949436187744 + ], + [ + "▁Synthesis", + -13.949498176574709 + ], + [ + "Apartment", + -13.949499130249023 + ], + [ + "▁Buckinghamshire", + -13.949501991271973 + ], + [ + "▁Rodrigo", + -13.949501991271973 + ], + [ + "▁Nacional", + -13.949502944946287 + ], + [ + "▁loosing", + -13.949502944946287 + ], + [ + "▁Monopoly", + -13.949503898620604 + ], + [ + "▁Greeley", + -13.94952392578125 + ], + [ + "▁awry", + -13.94953155517578 + ], + [ + "▁piss", + -13.949533462524414 + ], + [ + "▁tenacious", + -13.949562072753906 + ], + [ + "▁currant", + -13.949578285217283 + ], + [ + "▁Duties", + -13.949591636657717 + ], + [ + "▁Satya", + -13.949592590332031 + ], + [ + "▁Waltham", + -13.949602127075195 + ], + [ + "▁bitumen", + -13.949609756469728 + ], + [ + "▁microservices", + -13.949610710144045 + ], + [ + "▁unsurprisingly", + -13.949626922607422 + ], + [ + "▁groan", + -13.94965362548828 + ], + [ + "iddle", + -13.949665069580078 + ], + [ + "▁modifier", + -13.94968032836914 + ], + [ + "▁risers", + -13.94968318939209 + ], + [ + "▁Reflex", + -13.949687004089355 + ], + [ + "▁Collectibles", + -13.949692726135254 + ], + [ + "▁inscriptions", + -13.94969367980957 + ], + [ + "Bud", + -13.949713706970217 + ], + [ + "▁beanie", + -13.949715614318848 + ], + [ + "▁Gabrielle", + -13.949755668640137 + ], + [ + "Lemon", + -13.949764251708984 + ], + [ + "OTC", + -13.949782371520996 + ], + [ + "Foundation", + -13.949811935424805 + ], + [ + "▁Druid", + -13.949836730957031 + ], + [ + "▁Wolff", + -13.949843406677246 + ], + [ + "KIN", + -13.949933052062988 + ], + [ + "▁Priya", + -13.950005531311035 + ], + [ + "▁Participant", + -13.950010299682615 + ], + [ + "Cite", + -13.950048446655272 + ], + [ + "Ahead", + -13.950055122375488 + ], + [ + "▁vacuuming", + -13.950074195861816 + ], + [ + "Castle", + -13.950085639953612 + ], + [ + "▁piloted", + -13.950098991394045 + ], + [ + "▁drooling", + -13.950118064880373 + ], + [ + "▁Latvian", + -13.950278282165527 + ], + [ + "utan", + -13.950284004211426 + ], + [ + "▁Dorado", + -13.950294494628906 + ], + [ + "▁411", + -13.950346946716309 + ], + [ + "▁Edna", + -13.950380325317385 + ], + [ + "▁Kem", + -13.950384140014648 + ], + [ + "▁SIN", + -13.950488090515137 + ], + [ + "▁Dz", + -13.950529098510742 + ], + [ + "▁Rolled", + -13.95066261291504 + ], + [ + "▁Tana", + -13.950773239135742 + ], + [ + "actual", + -13.950811386108398 + ], + [ + "337", + -13.95083236694336 + ], + [ + "▁assertions", + -13.950848579406738 + ], + [ + "▁millet", + -13.95096206665039 + ], + [ + "▁2008)", + -13.950977325439451 + ], + [ + "▁Bait", + -13.950993537902832 + ], + [ + "▁Cavs", + -13.951054573059082 + ], + [ + "EGA", + -13.951066970825195 + ], + [ + "▁TAX", + -13.951066970825195 + ], + [ + "▁TYPE", + -13.95108127593994 + ], + [ + "PVC", + -13.951156616210938 + ], + [ + "▁Loom", + -13.951247215270996 + ], + [ + "umab", + -13.951263427734377 + ], + [ + "hibit", + -13.951271057128906 + ], + [ + "▁1.75", + -13.951321601867676 + ], + [ + "onde", + -13.951430320739746 + ], + [ + "▁Chicagoland", + -13.95143222808838 + ], + [ + "Firm", + -13.951440811157228 + ], + [ + "scot", + -13.951461791992188 + ], + [ + "▁Moshe", + -13.951613426208496 + ], + [ + "▁UNIT", + -13.951634407043455 + ], + [ + "▁shuttles", + -13.951651573181152 + ], + [ + "▁Prism", + -13.951656341552734 + ], + [ + "makes", + -13.95179843902588 + ], + [ + "amma", + -13.951839447021484 + ], + [ + "cue", + -13.951849937438965 + ], + [ + "Kai", + -13.951881408691406 + ], + [ + "▁670", + -13.951882362365724 + ], + [ + "evin", + -13.951924324035645 + ], + [ + "rung", + -13.95205307006836 + ], + [ + "▁Fleur", + -13.95206356048584 + ], + [ + "▁blinded", + -13.952163696289062 + ], + [ + "Hyper", + -13.95217990875244 + ], + [ + "drums", + -13.952197074890137 + ], + [ + "Mega", + -13.952363967895508 + ], + [ + "▁angling", + -13.952423095703123 + ], + [ + "slot", + -13.952451705932615 + ], + [ + "▁Zed", + -13.952510833740234 + ], + [ + "bub", + -13.952513694763184 + ], + [ + "▁whiplash", + -13.952570915222168 + ], + [ + "▁Cerv", + -13.952588081359863 + ], + [ + "EAN", + -13.952693939208984 + ], + [ + "Autumn", + -13.9527006149292 + ], + [ + "Greek", + -13.952704429626465 + ], + [ + "external", + -13.95270824432373 + ], + [ + "rw", + -13.952728271484377 + ], + [ + "▁Cleanup", + -13.952730178833008 + ], + [ + "▁GSA", + -13.95286750793457 + ], + [ + "▁Scenes", + -13.952943801879885 + ], + [ + "citizen", + -13.953001976013184 + ], + [ + "▁evaporate", + -13.953014373779297 + ], + [ + "cheek", + -13.95311450958252 + ], + [ + "Hip", + -13.953121185302734 + ], + [ + "391", + -13.953134536743164 + ], + [ + "fellow", + -13.953166007995604 + ], + [ + "others", + -13.95318603515625 + ], + [ + "Drain", + -13.953396797180176 + ], + [ + "▁Artemis", + -13.953404426574709 + ], + [ + "▁immoral", + -13.953404426574709 + ], + [ + "▁Gladiator", + -13.953415870666504 + ], + [ + "▁arbitrarily", + -13.953415870666504 + ], + [ + "▁euphoria", + -13.953415870666504 + ], + [ + "▁geyser", + -13.953415870666504 + ], + [ + "▁mackerel", + -13.953415870666504 + ], + [ + "▁terroir", + -13.95341682434082 + ], + [ + "▁presiding", + -13.953417778015137 + ], + [ + "▁misalignment", + -13.953420639038086 + ], + [ + "▁dodgy", + -13.95342254638672 + ], + [ + "▁Maternity", + -13.953429222106934 + ], + [ + "▁Pendleton", + -13.953435897827148 + ], + [ + "▁sandalwood", + -13.953441619873049 + ], + [ + "▁reassess", + -13.953444480895996 + ], + [ + "▁Melrose", + -13.953446388244627 + ], + [ + "▁hugged", + -13.953453063964844 + ], + [ + "▁Mayfield", + -13.95345401763916 + ], + [ + "▁photosynthesis", + -13.95345973968506 + ], + [ + "▁Buccaneers", + -13.953482627868652 + ], + [ + "▁sunroof", + -13.953487396240234 + ], + [ + "▁annex", + -13.953495979309082 + ], + [ + "▁Arrowhead", + -13.953502655029297 + ], + [ + "▁popularized", + -13.953508377075195 + ], + [ + "umbling", + -13.95352554321289 + ], + [ + "▁proverb", + -13.953536987304688 + ], + [ + "RUSH", + -13.953548431396484 + ], + [ + "▁cabana", + -13.95357894897461 + ], + [ + "▁excitedly", + -13.953580856323242 + ], + [ + "▁fractional", + -13.953619003295898 + ], + [ + "▁receivable", + -13.953627586364746 + ], + [ + "▁perpetrator", + -13.953638076782228 + ], + [ + "▁Cheapest", + -13.953646659851074 + ], + [ + "▁snowflake", + -13.953651428222656 + ], + [ + "inar", + -13.953697204589844 + ], + [ + "▁podcasting", + -13.953746795654297 + ], + [ + "batch", + -13.953768730163574 + ], + [ + "▁Invasion", + -13.953802108764648 + ], + [ + "▁Hawthorne", + -13.953804969787598 + ], + [ + "▁Bott", + -13.953855514526367 + ], + [ + "▁Puja", + -13.953951835632324 + ], + [ + "LCS", + -13.954025268554688 + ], + [ + "▁perpetually", + -13.95405101776123 + ], + [ + "uq", + -13.954174041748049 + ], + [ + "▁kn", + -13.954182624816896 + ], + [ + "▁hurtful", + -13.954217910766602 + ], + [ + "dik", + -13.954232215881348 + ], + [ + "sodium", + -13.954333305358888 + ], + [ + "bones", + -13.954404830932615 + ], + [ + "▁Ske", + -13.954486846923828 + ], + [ + "▁Bonuses", + -13.95451545715332 + ], + [ + "▁goers", + -13.954535484313965 + ], + [ + "bugs", + -13.954553604125977 + ], + [ + "1984", + -13.954678535461426 + ], + [ + "▁DJI", + -13.95468521118164 + ], + [ + "6;", + -13.954715728759766 + ], + [ + "▁PERSONAL", + -13.954837799072266 + ], + [ + "571", + -13.954848289489746 + ], + [ + "9.0", + -13.954851150512695 + ], + [ + "▁AARP", + -13.95485782623291 + ], + [ + "dele", + -13.954874038696287 + ], + [ + "▁Bleu", + -13.954919815063477 + ], + [ + "▁Watercolor", + -13.954938888549805 + ], + [ + "5:30", + -13.9551420211792 + ], + [ + "▁Mayfair", + -13.955230712890623 + ], + [ + "▁Errors", + -13.955239295959473 + ], + [ + "▁orthotics", + -13.95526885986328 + ], + [ + "▁pooling", + -13.95530891418457 + ], + [ + "CIN", + -13.955309867858888 + ], + [ + "uko", + -13.955365180969238 + ], + [ + "▁Gren", + -13.955415725708008 + ], + [ + "▁525", + -13.95542335510254 + ], + [ + "▁ghostly", + -13.955440521240234 + ], + [ + "▁Officially", + -13.95559024810791 + ], + [ + "▁Kg", + -13.955599784851074 + ], + [ + "▁Grit", + -13.95560359954834 + ], + [ + "▁crossbar", + -13.955615997314451 + ], + [ + "▁Polit", + -13.9556245803833 + ], + [ + "▁stumps", + -13.955643653869627 + ], + [ + "▁$450", + -13.955647468566896 + ], + [ + "▁Johanna", + -13.955647468566896 + ], + [ + "▁MOR", + -13.955652236938477 + ], + [ + "▁Subsequent", + -13.955660820007324 + ], + [ + "▁Victims", + -13.955707550048828 + ], + [ + "ridden", + -13.955710411071776 + ], + [ + "158", + -13.95576286315918 + ], + [ + "▁XII", + -13.955802917480469 + ], + [ + "▁Mere", + -13.955815315246582 + ], + [ + "Methods", + -13.955843925476074 + ], + [ + "▁1099", + -13.955849647521973 + ], + [ + "▁Translate", + -13.95588493347168 + ], + [ + "Joint", + -13.955972671508787 + ], + [ + "▁flips", + -13.955976486206056 + ], + [ + "▁Blackwell", + -13.955997467041016 + ], + [ + "▁Accessible", + -13.956049919128418 + ], + [ + "ikh", + -13.956122398376465 + ], + [ + "▁soya", + -13.956165313720703 + ], + [ + "Define", + -13.956188201904297 + ], + [ + "▁highlands", + -13.95622444152832 + ], + [ + "▁Ott", + -13.956254005432127 + ], + [ + "▁bolder", + -13.95626163482666 + ], + [ + "▁eject", + -13.956403732299805 + ], + [ + "dep", + -13.956422805786133 + ], + [ + "jd", + -13.956480026245115 + ], + [ + "▁Bowers", + -13.956501960754396 + ], + [ + "▁Blaster", + -13.956528663635254 + ], + [ + "▁Header", + -13.956528663635254 + ], + [ + "▁UR", + -13.956535339355469 + ], + [ + "agna", + -13.956587791442873 + ], + [ + "▁Acadia", + -13.956625938415527 + ], + [ + "/07", + -13.95664882659912 + ], + [ + "▁infrequent", + -13.956672668457031 + ], + [ + "▁Chroma", + -13.956713676452637 + ], + [ + "Melissa", + -13.956744194030762 + ], + [ + "phosphate", + -13.956747055053713 + ], + [ + "▁harmonize", + -13.956754684448242 + ], + [ + "▁Firearms", + -13.956843376159668 + ], + [ + "▁Lun", + -13.956923484802246 + ], + [ + "tention", + -13.956960678100586 + ], + [ + "dozen", + -13.95697021484375 + ], + [ + "phan", + -13.957073211669922 + ], + [ + "▁negate", + -13.957074165344238 + ], + [ + "▁obstructed", + -13.957098007202148 + ], + [ + "▁apostle", + -13.95710277557373 + ], + [ + "worked", + -13.957114219665527 + ], + [ + "▁Campers", + -13.957159996032717 + ], + [ + "SLA", + -13.957165718078612 + ], + [ + "Rise", + -13.9572172164917 + ], + [ + "iola", + -13.957229614257812 + ], + [ + "uary", + -13.957231521606444 + ], + [ + "▁Hus", + -13.957265853881836 + ], + [ + "option", + -13.957280158996582 + ], + [ + "required", + -13.957345962524414 + ], + [ + "▁Bundesliga", + -13.957353591918944 + ], + [ + "▁Reggae", + -13.957353591918944 + ], + [ + "▁Suarez", + -13.957353591918944 + ], + [ + "▁pakistan", + -13.957353591918944 + ], + [ + "▁pontoon", + -13.957353591918944 + ], + [ + "▁kimono", + -13.957358360290527 + ], + [ + "▁Josiah", + -13.95736026763916 + ], + [ + "▁Turnpike", + -13.957365036010742 + ], + [ + "▁Cadbury", + -13.957367897033691 + ], + [ + "▁Ambrose", + -13.957371711730955 + ], + [ + "▁Villanova", + -13.95737648010254 + ], + [ + "generative", + -13.957378387451172 + ], + [ + "▁vSphere", + -13.957379341125488 + ], + [ + "Height", + -13.95738697052002 + ], + [ + "Against", + -13.957420349121094 + ], + [ + "▁_______", + -13.957426071166992 + ], + [ + "▁defies", + -13.957438468933104 + ], + [ + "▁Graf", + -13.957446098327637 + ], + [ + "guess", + -13.957490921020508 + ], + [ + "▁FaceTime", + -13.95749568939209 + ], + [ + "8.4", + -13.957496643066406 + ], + [ + "▁delves", + -13.957496643066406 + ], + [ + "▁McK", + -13.957500457763672 + ], + [ + "▁Surge", + -13.95752239227295 + ], + [ + "▁Darn", + -13.957523345947266 + ], + [ + "Issue", + -13.957550048828123 + ], + [ + "917", + -13.957551956176758 + ], + [ + "▁Queenstown", + -13.957562446594238 + ], + [ + "▁Dots", + -13.95756721496582 + ], + [ + "▁Missile", + -13.95756721496582 + ], + [ + "▁unfounded", + -13.957611083984377 + ], + [ + "▁Rely", + -13.95761489868164 + ], + [ + "▁loudspeaker", + -13.957627296447754 + ], + [ + "▁Kiel", + -13.95763111114502 + ], + [ + "▁DAM", + -13.957690238952637 + ], + [ + "▁rumoured", + -13.957698822021484 + ], + [ + "95)", + -13.957754135131836 + ], + [ + "▁transcribed", + -13.957770347595217 + ], + [ + "▁tentatively", + -13.957777976989746 + ], + [ + "cheese", + -13.95780086517334 + ], + [ + "EET", + -13.957802772521973 + ], + [ + "▁Goldsmith", + -13.957812309265137 + ], + [ + "▁koi", + -13.957836151123049 + ], + [ + "Sy", + -13.95784854888916 + ], + [ + "▁Latina", + -13.957863807678224 + ], + [ + "aceous", + -13.9578857421875 + ], + [ + "▁importation", + -13.957930564880373 + ], + [ + "▁Lowry", + -13.958013534545898 + ], + [ + "0.4%", + -13.958038330078123 + ], + [ + "hner", + -13.958056449890137 + ], + [ + "rette", + -13.958080291748049 + ], + [ + "▁Loves", + -13.958110809326172 + ], + [ + "▁OneDrive", + -13.958110809326172 + ], + [ + "▁GAA", + -13.958115577697754 + ], + [ + "▁Cit", + -13.958125114440918 + ], + [ + "Ahh", + -13.958144187927246 + ], + [ + "854", + -13.95817756652832 + ], + [ + "▁unsuccessfully", + -13.958212852478027 + ], + [ + "▁Showers", + -13.958221435546877 + ], + [ + "giant", + -13.958226203918455 + ], + [ + "Aww", + -13.95826816558838 + ], + [ + "▁3:1", + -13.958276748657228 + ], + [ + "▁Frequent", + -13.958316802978516 + ], + [ + "▁11:3", + -13.95832633972168 + ], + [ + "▁grooms", + -13.958365440368652 + ], + [ + "▁pug", + -13.958436965942385 + ], + [ + "gad", + -13.958489418029783 + ], + [ + "inia", + -13.95851993560791 + ], + [ + "▁scorn", + -13.958578109741213 + ], + [ + "Enhance", + -13.958664894104004 + ], + [ + "▁Tere", + -13.958712577819824 + ], + [ + "▁swaying", + -13.958738327026367 + ], + [ + "▁Cello", + -13.95877456665039 + ], + [ + "758", + -13.958824157714844 + ], + [ + "▁Oops", + -13.958876609802246 + ], + [ + "75%", + -13.958921432495115 + ], + [ + "▁Bhatt", + -13.95896816253662 + ], + [ + "▁Puri", + -13.95898723602295 + ], + [ + "SHIP", + -13.959046363830566 + ], + [ + "drying", + -13.959070205688477 + ], + [ + "▁timezone", + -13.95909023284912 + ], + [ + "▁fairways", + -13.959161758422852 + ], + [ + "▁Pall", + -13.959275245666504 + ], + [ + "aren", + -13.959277153015137 + ], + [ + "▁haunts", + -13.959300994873049 + ], + [ + "▁Uh", + -13.959310531616213 + ], + [ + "rive", + -13.959328651428224 + ], + [ + "▁silt", + -13.959427833557127 + ], + [ + "IDS", + -13.959436416625977 + ], + [ + "▁POV", + -13.959463119506836 + ], + [ + "Cities", + -13.959501266479492 + ], + [ + "▁Beet", + -13.959593772888184 + ], + [ + "leaning", + -13.959668159484863 + ], + [ + "▁apr", + -13.959698677062988 + ], + [ + "548", + -13.95970058441162 + ], + [ + "▁Dum", + -13.95971393585205 + ], + [ + "▁Badger", + -13.959882736206056 + ], + [ + "hti", + -13.95988941192627 + ], + [ + "▁IMPORTANT", + -13.960015296936035 + ], + [ + "aroo", + -13.960119247436523 + ], + [ + "▁Conc", + -13.960123062133787 + ], + [ + "▁bor", + -13.960138320922852 + ], + [ + "1:20", + -13.960142135620115 + ], + [ + "yuki", + -13.960150718688965 + ], + [ + "▁opportunistic", + -13.96019172668457 + ], + [ + "(3),", + -13.960227966308594 + ], + [ + "▁Stur", + -13.960317611694336 + ], + [ + "▁obstruct", + -13.960326194763184 + ], + [ + "▁biologically", + -13.960370063781738 + ], + [ + "▁Triton", + -13.960381507873535 + ], + [ + "▁Ranges", + -13.960418701171877 + ], + [ + "1]", + -13.960469245910645 + ], + [ + "▁Giorgio", + -13.960503578186035 + ], + [ + "pox", + -13.960527420043944 + ], + [ + "bump", + -13.960532188415527 + ], + [ + "▁suggestive", + -13.96060848236084 + ], + [ + "▁Tested", + -13.960650444030762 + ], + [ + "trophic", + -13.960701942443848 + ], + [ + "Rebecca", + -13.960782051086426 + ], + [ + "▁smog", + -13.96078395843506 + ], + [ + "Memorial", + -13.960786819458008 + ], + [ + "▁diplomas", + -13.960793495178224 + ], + [ + "▁mansions", + -13.96080493927002 + ], + [ + "▁kal", + -13.960877418518066 + ], + [ + "plast", + -13.96088409423828 + ], + [ + "▁mouthful", + -13.960962295532228 + ], + [ + "▁Wiz", + -13.960968017578123 + ], + [ + "842", + -13.96097183227539 + ], + [ + "▁INF", + -13.961048126220703 + ], + [ + "wiring", + -13.961050987243652 + ], + [ + "▁consequent", + -13.961051940917969 + ], + [ + "PON", + -13.96108055114746 + ], + [ + "▁Executives", + -13.961130142211914 + ], + [ + "▁Pint", + -13.961151123046877 + ], + [ + "eley", + -13.961194038391112 + ], + [ + "94.", + -13.961206436157228 + ], + [ + "jing", + -13.96129322052002 + ], + [ + "▁inconsistency", + -13.961305618286133 + ], + [ + "▁irrevocable", + -13.961305618286133 + ], + [ + "▁juncture", + -13.961305618286133 + ], + [ + "▁uterine", + -13.961307525634766 + ], + [ + "▁Starts", + -13.961312294006348 + ], + [ + "▁adoration", + -13.96131420135498 + ], + [ + "▁RESULTS", + -13.961318969726562 + ], + [ + "▁stevia", + -13.961320877075195 + ], + [ + "▁divergent", + -13.961322784423828 + ], + [ + "strap", + -13.961323738098145 + ], + [ + "▁concave", + -13.96132469177246 + ], + [ + "▁prozac", + -13.961331367492676 + ], + [ + "▁bribery", + -13.961349487304688 + ], + [ + "▁Bouquet", + -13.9613618850708 + ], + [ + "omic", + -13.961395263671877 + ], + [ + "▁mk", + -13.961421012878418 + ], + [ + "▁multilateral", + -13.961421966552734 + ], + [ + "▁Dunbar", + -13.96142292022705 + ], + [ + "▁substation", + -13.961430549621582 + ], + [ + "▁spanking", + -13.96144199371338 + ], + [ + "▁Godfrey", + -13.961442947387695 + ], + [ + "▁mandala", + -13.961455345153809 + ], + [ + "dual", + -13.96153450012207 + ], + [ + "▁brotherhood", + -13.961543083190918 + ], + [ + "▁gunfire", + -13.961581230163574 + ], + [ + "versions", + -13.961589813232422 + ], + [ + "2′′", + -13.961594581604004 + ], + [ + "▁1855", + -13.961605072021484 + ], + [ + "▁grenade", + -13.961612701416016 + ], + [ + "▁kl", + -13.96161651611328 + ], + [ + "▁Ascot", + -13.961647987365724 + ], + [ + "TCH", + -13.961715698242188 + ], + [ + "gé", + -13.9617280960083 + ], + [ + "▁lumen", + -13.961739540100098 + ], + [ + "▁Vicky", + -13.961753845214844 + ], + [ + "▁pus", + -13.961770057678224 + ], + [ + "Ox", + -13.96178150177002 + ], + [ + "▁cohorts", + -13.961797714233398 + ], + [ + "▁Nevis", + -13.961813926696776 + ], + [ + "▁Buckeyes", + -13.961836814880373 + ], + [ + "Cru", + -13.961868286132812 + ], + [ + "▁landings", + -13.961874008178713 + ], + [ + "Male", + -13.961881637573242 + ], + [ + "▁lifeboat", + -13.961923599243164 + ], + [ + "▁Chambre", + -13.961933135986328 + ], + [ + "▁snuff", + -13.961952209472656 + ], + [ + "▁sidekick", + -13.961978912353516 + ], + [ + "▁Getaway", + -13.961999893188477 + ], + [ + "▁mangoes", + -13.96200180053711 + ], + [ + "▁mentee", + -13.962013244628906 + ], + [ + "▁electrolytes", + -13.962016105651855 + ], + [ + "▁mech", + -13.96204662322998 + ], + [ + "▁1948,", + -13.962069511413574 + ], + [ + "▁Spr", + -13.962115287780762 + ], + [ + "▁png", + -13.96220874786377 + ], + [ + "rights", + -13.962329864501951 + ], + [ + "▁Gott", + -13.962393760681152 + ], + [ + "witt", + -13.962458610534668 + ], + [ + "▁repealed", + -13.962465286254885 + ], + [ + "glia", + -13.962472915649414 + ], + [ + "▁strat", + -13.962638854980469 + ], + [ + "lough", + -13.96269702911377 + ], + [ + "▁hump", + -13.962766647338867 + ], + [ + "aggi", + -13.962830543518066 + ], + [ + "▁weatherproof", + -13.962841033935549 + ], + [ + "▁(33", + -13.962878227233888 + ], + [ + "▁domes", + -13.962952613830566 + ], + [ + "CET", + -13.962960243225098 + ], + [ + "▁tartan", + -13.962970733642578 + ], + [ + "bery", + -13.963013648986816 + ], + [ + "▁CHARGE", + -13.963052749633787 + ], + [ + "▁equivalents", + -13.96306324005127 + ], + [ + "aron", + -13.963074684143066 + ], + [ + "▁takers", + -13.963092803955078 + ], + [ + "▁Garth", + -13.96311092376709 + ], + [ + "▁Nou", + -13.963170051574709 + ], + [ + "▁Slu", + -13.96336841583252 + ], + [ + "▁CPM", + -13.963376998901367 + ], + [ + "GUE", + -13.963406562805176 + ], + [ + "Vent", + -13.963422775268556 + ], + [ + "▁Detect", + -13.963454246520996 + ], + [ + "▁Miley", + -13.963454246520996 + ], + [ + "▁carats", + -13.963475227355955 + ], + [ + "▁Smartphones", + -13.96348476409912 + ], + [ + "Rear", + -13.963531494140623 + ], + [ + "knee", + -13.963568687438965 + ], + [ + "▁Verb", + -13.963647842407228 + ], + [ + "fol", + -13.963652610778809 + ], + [ + "▁walkout", + -13.96366024017334 + ], + [ + "▁Bump", + -13.963763236999512 + ], + [ + "Addressing", + -13.963984489440918 + ], + [ + "▁posit", + -13.964029312133787 + ], + [ + "▁poked", + -13.964044570922852 + ], + [ + "▁loco", + -13.96407699584961 + ], + [ + "▁Spectra", + -13.964152336120604 + ], + [ + "▁piers", + -13.964176177978516 + ], + [ + "Electrical", + -13.964202880859377 + ], + [ + "▁tricked", + -13.964299201965332 + ], + [ + "vane", + -13.964417457580566 + ], + [ + "epi", + -13.964509963989258 + ], + [ + "▁Daylight", + -13.964509963989258 + ], + [ + "▁luc", + -13.964513778686523 + ], + [ + "▁Pix", + -13.964587211608888 + ], + [ + "resident", + -13.964662551879885 + ], + [ + "▁Cassie", + -13.964670181274414 + ], + [ + "029", + -13.96467399597168 + ], + [ + "mera", + -13.964704513549805 + ], + [ + "responsive", + -13.9647798538208 + ], + [ + "▁Condor", + -13.964792251586914 + ], + [ + "beyond", + -13.964852333068848 + ], + [ + "excellent", + -13.964853286743164 + ], + [ + "Delicious", + -13.964873313903809 + ], + [ + "▁tarts", + -13.964892387390137 + ], + [ + "Exam", + -13.964905738830566 + ], + [ + "structural", + -13.964908599853516 + ], + [ + "PHA", + -13.964936256408691 + ], + [ + "Toy", + -13.964972496032717 + ], + [ + "vah", + -13.96498680114746 + ], + [ + "applied", + -13.965018272399902 + ], + [ + "▁gui", + -13.96503448486328 + ], + [ + "▁intently", + -13.965038299560549 + ], + [ + "photography", + -13.965051651000977 + ], + [ + "mouse", + -13.965088844299316 + ], + [ + "talented", + -13.965165138244627 + ], + [ + "ylene", + -13.965171813964844 + ], + [ + "uffer", + -13.965222358703612 + ], + [ + "▁38-", + -13.96524143218994 + ], + [ + "mailed", + -13.96524429321289 + ], + [ + "chef", + -13.965253829956056 + ], + [ + "▁Avalanche", + -13.9652738571167 + ], + [ + "▁supremacist", + -13.9652738571167 + ], + [ + "▁braindumps", + -13.965274810791016 + ], + [ + "▁Vilnius", + -13.965275764465332 + ], + [ + "▁Brampton", + -13.965277671813965 + ], + [ + "▁Consolidated", + -13.965283393859863 + ], + [ + "▁nanotechnology", + -13.965286254882812 + ], + [ + "▁rudder", + -13.965290069580078 + ], + [ + "▁jellyfish", + -13.965325355529783 + ], + [ + "▁Creole", + -13.965343475341797 + ], + [ + "▁Burnham", + -13.96534538269043 + ], + [ + "Saudi", + -13.96534824371338 + ], + [ + "▁Fleetwood", + -13.9653902053833 + ], + [ + "▁Henley", + -13.965421676635742 + ], + [ + "▁Foundry", + -13.965424537658691 + ], + [ + "▁Slav", + -13.965435028076172 + ], + [ + "litter", + -13.965441703796388 + ], + [ + "▁omelette", + -13.965441703796388 + ], + [ + "▁Gentleman", + -13.965442657470703 + ], + [ + "IZE", + -13.965484619140623 + ], + [ + "▁Virgil", + -13.965486526489258 + ], + [ + "▁reflector", + -13.965496063232422 + ], + [ + "▁Dungeon", + -13.965520858764648 + ], + [ + "4:30", + -13.965576171875 + ], + [ + "▁Inf", + -13.965587615966797 + ], + [ + "▁Kenmore", + -13.965599060058594 + ], + [ + "▁follicle", + -13.9656400680542 + ], + [ + "▁stoop", + -13.965651512145996 + ], + [ + "▁Jody", + -13.965673446655272 + ], + [ + "owing", + -13.965676307678224 + ], + [ + "cg", + -13.965681076049805 + ], + [ + "▁1,000,000", + -13.965812683105469 + ], + [ + "diff", + -13.965826988220217 + ], + [ + "▁Lust", + -13.965853691101074 + ], + [ + "▁hopelessness", + -13.965886116027832 + ], + [ + "Spray", + -13.96589183807373 + ], + [ + "Corner", + -13.965897560119627 + ], + [ + "Myth", + -13.965903282165527 + ], + [ + "▁Dole", + -13.965904235839844 + ], + [ + "▁rebranded", + -13.965943336486816 + ], + [ + "▁13\"", + -13.965964317321776 + ], + [ + "▁peeps", + -13.96597957611084 + ], + [ + "▁MSM", + -13.965980529785156 + ], + [ + "▁lamented", + -13.965993881225586 + ], + [ + "▁Ll", + -13.965994834899902 + ], + [ + "lberg", + -13.965996742248535 + ], + [ + "aste", + -13.965999603271484 + ], + [ + "7.9", + -13.96601104736328 + ], + [ + "▁307", + -13.966033935546877 + ], + [ + "▁ALA", + -13.966033935546877 + ], + [ + "SKU", + -13.96605110168457 + ], + [ + "▁resetting", + -13.966148376464844 + ], + [ + "PEC", + -13.966158866882324 + ], + [ + "▁3+", + -13.966238021850586 + ], + [ + "▁reprinted", + -13.96627140045166 + ], + [ + "sellers", + -13.966286659240724 + ], + [ + "▁scallions", + -13.96630573272705 + ], + [ + "▁Lien", + -13.966354370117188 + ], + [ + "EBA", + -13.966385841369627 + ], + [ + "▁footballer", + -13.96660041809082 + ], + [ + "▁Kinetic", + -13.966626167297363 + ], + [ + "▁Vigil", + -13.966628074645996 + ], + [ + "▁transmits", + -13.966649055480955 + ], + [ + "▁Parque", + -13.96665382385254 + ], + [ + "▁DACA", + -13.966675758361816 + ], + [ + "beds", + -13.966692924499512 + ], + [ + "▁pollute", + -13.966741561889648 + ], + [ + "▁alters", + -13.966761589050291 + ], + [ + "▁29%", + -13.966840744018556 + ], + [ + "▁talc", + -13.966874122619627 + ], + [ + "kama", + -13.966924667358398 + ], + [ + "mum", + -13.96698760986328 + ], + [ + "▁Santana", + -13.96702766418457 + ], + [ + "dini", + -13.96710205078125 + ], + [ + "▁ALT", + -13.967177391052246 + ], + [ + "876", + -13.967198371887209 + ], + [ + "Integrate", + -13.96720027923584 + ], + [ + "▁mocked", + -13.967242240905762 + ], + [ + "Rosa", + -13.967246055603027 + ], + [ + "Om", + -13.967276573181152 + ], + [ + "166", + -13.9673433303833 + ], + [ + "▁sewed", + -13.967411994934082 + ], + [ + "▁Filed", + -13.967435836791992 + ], + [ + "940", + -13.967462539672852 + ], + [ + "ugi", + -13.96760082244873 + ], + [ + "shine", + -13.967626571655272 + ], + [ + "▁Gao", + -13.967633247375488 + ], + [ + "welcome", + -13.967656135559082 + ], + [ + "▁WIC", + -13.967680931091309 + ], + [ + "▁Zor", + -13.967737197875977 + ], + [ + "▁Discussions", + -13.967825889587402 + ], + [ + "▁asian", + -13.967851638793944 + ], + [ + "▁Oster", + -13.967870712280272 + ], + [ + "805", + -13.967917442321776 + ], + [ + "▁redirects", + -13.967971801757812 + ], + [ + "▁318", + -13.96798038482666 + ], + [ + "–20", + -13.96801471710205 + ], + [ + "nka", + -13.968050003051758 + ], + [ + "8-11", + -13.968096733093262 + ], + [ + "Pair", + -13.968103408813477 + ], + [ + "hed", + -13.968124389648438 + ], + [ + "anka", + -13.968205451965332 + ], + [ + "▁8.8", + -13.968228340148926 + ], + [ + "▁CRT", + -13.96824550628662 + ], + [ + "ISTER", + -13.968388557434082 + ], + [ + "▁Improv", + -13.96843147277832 + ], + [ + "▁LAND", + -13.968545913696287 + ], + [ + "672", + -13.968572616577148 + ], + [ + "253", + -13.9686279296875 + ], + [ + "▁incisions", + -13.968643188476562 + ], + [ + "▁Sixty", + -13.96865940093994 + ], + [ + "▁Antony", + -13.968730926513672 + ], + [ + "▁Tubes", + -13.968741416931152 + ], + [ + "▁marinate", + -13.968862533569336 + ], + [ + "▁mitigated", + -13.968866348266602 + ], + [ + "Douglas", + -13.968931198120115 + ], + [ + "Slot", + -13.968941688537598 + ], + [ + "▁MEN", + -13.968951225280762 + ], + [ + "bytes", + -13.968984603881836 + ], + [ + "header", + -13.968999862670898 + ], + [ + "Brother", + -13.969005584716797 + ], + [ + "▁painstaking", + -13.969048500061035 + ], + [ + "▁Schulz", + -13.969066619873049 + ], + [ + "▁relaunch", + -13.969148635864258 + ], + [ + "Bloom", + -13.96920108795166 + ], + [ + "▁Vlad", + -13.96920394897461 + ], + [ + "▁rebrand", + -13.969215393066406 + ], + [ + "tractor", + -13.969240188598633 + ], + [ + "▁evaluator", + -13.969258308410645 + ], + [ + "▁indigestion", + -13.969258308410645 + ], + [ + "▁mammogram", + -13.969258308410645 + ], + [ + "▁1-877-", + -13.96925926208496 + ], + [ + "▁Hoosier", + -13.96925926208496 + ], + [ + "▁intoxicating", + -13.96925926208496 + ], + [ + "▁Tufts", + -13.969260215759276 + ], + [ + "Tin", + -13.969265937805176 + ], + [ + "▁Terminator", + -13.969265937805176 + ], + [ + "▁Messaging", + -13.969266891479492 + ], + [ + "▁Amb", + -13.96926975250244 + ], + [ + "▁transfusion", + -13.96928596496582 + ], + [ + "crystalline", + -13.969289779663086 + ], + [ + "▁undulating", + -13.96929168701172 + ], + [ + "▁Borneo", + -13.969314575195312 + ], + [ + "CIP", + -13.96932315826416 + ], + [ + "▁HughesNet", + -13.96932601928711 + ], + [ + "XS", + -13.969351768493652 + ], + [ + "▁Relic", + -13.969385147094728 + ], + [ + "▁deflection", + -13.96939468383789 + ], + [ + "▁UNIX", + -13.969430923461914 + ], + [ + "▁portraiture", + -13.969436645507812 + ], + [ + "▁burdensome", + -13.969439506530762 + ], + [ + "▁Straits", + -13.969457626342772 + ], + [ + "▁agave", + -13.969494819641112 + ], + [ + "Fried", + -13.96950626373291 + ], + [ + "▁UCL", + -13.969544410705566 + ], + [ + "Prep", + -13.969545364379885 + ], + [ + "▁chimneys", + -13.969552993774414 + ], + [ + "weighted", + -13.96955680847168 + ], + [ + "▁implantation", + -13.969575881958008 + ], + [ + "Funding", + -13.96965503692627 + ], + [ + "▁ef", + -13.96967887878418 + ], + [ + "286", + -13.96970272064209 + ], + [ + "Boss", + -13.969715118408203 + ], + [ + "laine", + -13.969722747802734 + ], + [ + "▁SKY", + -13.969732284545898 + ], + [ + "▁grainy", + -13.969761848449709 + ], + [ + "▁devoured", + -13.96981430053711 + ], + [ + "-1980", + -13.969855308532717 + ], + [ + "homes", + -13.96988010406494 + ], + [ + "▁strep", + -13.9699068069458 + ], + [ + "▁touchpad", + -13.969930648803713 + ], + [ + "Represented", + -13.96994400024414 + ], + [ + "ISM", + -13.969956398010254 + ], + [ + "▁Curran", + -13.969964981079102 + ], + [ + "▁SWOT", + -13.969985961914062 + ], + [ + "▁css", + -13.97002124786377 + ], + [ + "▁Selma", + -13.970032691955566 + ], + [ + "picking", + -13.970046043395996 + ], + [ + "▁breakaway", + -13.970070838928224 + ], + [ + "cera", + -13.970105171203612 + ], + [ + "924", + -13.970128059387209 + ], + [ + "▁tripped", + -13.970151901245115 + ], + [ + "URI", + -13.970158576965332 + ], + [ + "▁bristle", + -13.97016429901123 + ], + [ + "oyle", + -13.970166206359863 + ], + [ + "▁barcodes", + -13.97018051147461 + ], + [ + "gou", + -13.97019100189209 + ], + [ + "▁epithelial", + -13.970191955566406 + ], + [ + "▁Configure", + -13.970206260681152 + ], + [ + "▁treason", + -13.97026538848877 + ], + [ + "▁baits", + -13.97033405303955 + ], + [ + "Ang", + -13.970337867736816 + ], + [ + "vern", + -13.97035789489746 + ], + [ + "▁cleft", + -13.970377922058104 + ], + [ + "abra", + -13.970383644104004 + ], + [ + "▁stackable", + -13.97038459777832 + ], + [ + "▁Exe", + -13.970401763916016 + ], + [ + "▁osteo", + -13.97041130065918 + ], + [ + "PAL", + -13.970460891723633 + ], + [ + "▁Pistons", + -13.970492362976074 + ], + [ + "▁inventing", + -13.97052764892578 + ], + [ + "▁Ghi", + -13.970555305480955 + ], + [ + "▁centrepiece", + -13.97061538696289 + ], + [ + "751", + -13.970672607421877 + ], + [ + "kop", + -13.97071361541748 + ], + [ + "▁DAN", + -13.970727920532228 + ], + [ + "▁rearing", + -13.97075653076172 + ], + [ + "▁Donor", + -13.970773696899414 + ], + [ + "Verify", + -13.9708251953125 + ], + [ + "+8", + -13.970858573913574 + ], + [ + "▁CPUs", + -13.970947265625 + ], + [ + "spoken", + -13.971121788024902 + ], + [ + "▁exhibitor", + -13.97112274169922 + ], + [ + "hail", + -13.971182823181152 + ], + [ + "clu", + -13.97119140625 + ], + [ + "▁Landscapes", + -13.97119426727295 + ], + [ + "▁Blush", + -13.97120761871338 + ], + [ + "▁Notable", + -13.971226692199709 + ], + [ + "▁(9)", + -13.971588134765623 + ], + [ + "317", + -13.971590042114258 + ], + [ + "▁Complaint", + -13.971590995788574 + ], + [ + "Woo", + -13.971667289733888 + ], + [ + "Supporting", + -13.971814155578612 + ], + [ + "▁Rahm", + -13.971832275390623 + ], + [ + "▁meso", + -13.97183322906494 + ], + [ + "Artists", + -13.971835136413574 + ], + [ + "Boil", + -13.971869468688965 + ], + [ + "SUN", + -13.971877098083496 + ], + [ + "-2005", + -13.971956253051758 + ], + [ + "NFL", + -13.972075462341309 + ], + [ + "▁Drawings", + -13.97207736968994 + ], + [ + "amic", + -13.972210884094238 + ], + [ + "▁1959,", + -13.972233772277832 + ], + [ + "▁prima", + -13.972235679626465 + ], + [ + "▁bootstrap", + -13.972244262695312 + ], + [ + "▁AFB", + -13.972277641296388 + ], + [ + "▁hideous", + -13.972332000732422 + ], + [ + "ibel", + -13.97234058380127 + ], + [ + "ATURE", + -13.972359657287598 + ], + [ + "▁Vegetables", + -13.972360610961914 + ], + [ + "▁actuators", + -13.972381591796877 + ], + [ + "mock", + -13.972394943237305 + ], + [ + "▁Kyr", + -13.972442626953123 + ], + [ + "▁Horde", + -13.972479820251465 + ], + [ + "▁Slides", + -13.972489356994627 + ], + [ + "▁435", + -13.972586631774902 + ], + [ + "YE", + -13.972627639770508 + ], + [ + "mou", + -13.972683906555176 + ], + [ + "Pastor", + -13.972738265991213 + ], + [ + "▁sequels", + -13.972784996032717 + ], + [ + "▁NAM", + -13.97282886505127 + ], + [ + "▁Resist", + -13.972830772399902 + ], + [ + "▁august", + -13.972845077514648 + ], + [ + "GES", + -13.972864151000977 + ], + [ + "rish", + -13.972865104675291 + ], + [ + "▁CBI", + -13.972929000854492 + ], + [ + "▁psychotic", + -13.972960472106934 + ], + [ + "Potential", + -13.973018646240234 + ], + [ + "girlfriend", + -13.97305679321289 + ], + [ + "easing", + -13.97312831878662 + ], + [ + "powder", + -13.973136901855469 + ], + [ + "▁subnet", + -13.973173141479492 + ], + [ + "funk", + -13.973186492919922 + ], + [ + "▁Venues", + -13.973210334777832 + ], + [ + "▁Rud", + -13.973237037658691 + ], + [ + "▁Restrictions", + -13.973258018493652 + ], + [ + "▁uncontrollable", + -13.973258018493652 + ], + [ + "▁annuities", + -13.973258972167969 + ], + [ + "▁Gillespie", + -13.973261833190918 + ], + [ + "▁Lennox", + -13.973261833190918 + ], + [ + "▁Troubleshooting", + -13.973265647888184 + ], + [ + "▁quarries", + -13.9732666015625 + ], + [ + "▁neuropathy", + -13.973267555236816 + ], + [ + "▁Cardiology", + -13.973275184631348 + ], + [ + "▁accomplice", + -13.97327995300293 + ], + [ + "▁Croix", + -13.97328281402588 + ], + [ + "▁McDonnell", + -13.973283767700195 + ], + [ + "▁jingle", + -13.973283767700195 + ], + [ + "▁Harcourt", + -13.973297119140623 + ], + [ + "▁hectare", + -13.973301887512209 + ], + [ + "▁ceasefire", + -13.973308563232422 + ], + [ + "▁Geelong", + -13.973315238952637 + ], + [ + "▁Evernote", + -13.9733304977417 + ], + [ + "▁autopilot", + -13.973334312438965 + ], + [ + "▁Hubble", + -13.973347663879396 + ], + [ + "▁Gillette", + -13.973370552062988 + ], + [ + "▁STEAM", + -13.97337818145752 + ], + [ + "▁Pandit", + -13.973390579223633 + ], + [ + "carotene", + -13.973397254943848 + ], + [ + "▁Sparta", + -13.973520278930664 + ], + [ + "634", + -13.973566055297852 + ], + [ + "raiser", + -13.973572731018066 + ], + [ + "HACK", + -13.973575592041016 + ], + [ + "▁rafters", + -13.973600387573242 + ], + [ + "716", + -13.973647117614746 + ], + [ + "▁visualise", + -13.973652839660645 + ], + [ + "▁futon", + -13.973689079284668 + ], + [ + "quiet", + -13.973692893981934 + ], + [ + "▁ORIGINAL", + -13.973714828491213 + ], + [ + "Cable", + -13.973719596862791 + ], + [ + "▁forcefully", + -13.973793029785156 + ], + [ + "Eighteen", + -13.973820686340332 + ], + [ + "onics", + -13.97385025024414 + ], + [ + "▁730", + -13.973896026611328 + ], + [ + "▁pistons", + -13.973901748657228 + ], + [ + "Calc", + -13.974063873291016 + ], + [ + "▁Bret", + -13.97409439086914 + ], + [ + "prising", + -13.974102020263672 + ], + [ + "▁intercom", + -13.97411060333252 + ], + [ + "xen", + -13.974125862121582 + ], + [ + "▁daydream", + -13.974141120910645 + ], + [ + "▁bandage", + -13.97418212890625 + ], + [ + "7\"", + -13.974206924438477 + ], + [ + "▁foraging", + -13.974207878112791 + ], + [ + "Marvel", + -13.974212646484377 + ], + [ + "443", + -13.974213600158691 + ], + [ + "freezer", + -13.97422981262207 + ], + [ + "▁Leach", + -13.974238395690918 + ], + [ + "▁Mojo", + -13.9742431640625 + ], + [ + "jane", + -13.97427749633789 + ], + [ + "-2004", + -13.974316596984863 + ], + [ + "▁certifying", + -13.974337577819824 + ], + [ + "/>", + -13.974352836608888 + ], + [ + "Coll", + -13.974361419677734 + ], + [ + "▁CFM", + -13.974397659301758 + ], + [ + "▁ABV", + -13.974421501159668 + ], + [ + "▁einen", + -13.974431037902832 + ], + [ + "▁10.00", + -13.97443962097168 + ], + [ + "▁HG", + -13.974445343017578 + ], + [ + "▁Conductor", + -13.97447395324707 + ], + [ + "▁Loo", + -13.974474906921388 + ], + [ + "issy", + -13.97452449798584 + ], + [ + "▁perishable", + -13.974560737609863 + ], + [ + "▁hepatic", + -13.974620819091797 + ], + [ + "▁Downloader", + -13.974644660949709 + ], + [ + "rona", + -13.974647521972656 + ], + [ + "ULL", + -13.97470474243164 + ], + [ + "▁Implement", + -13.974733352661133 + ], + [ + "▁rebounded", + -13.97476291656494 + ], + [ + "▁basing", + -13.974803924560549 + ], + [ + "hg", + -13.974836349487305 + ], + [ + "clicking", + -13.974848747253418 + ], + [ + "volv", + -13.97496509552002 + ], + [ + "sef", + -13.975017547607422 + ], + [ + "▁grande", + -13.975019454956056 + ], + [ + "90)", + -13.975055694580078 + ], + [ + "ERO", + -13.975098609924316 + ], + [ + "voi", + -13.975132942199709 + ], + [ + "ritt", + -13.975142478942873 + ], + [ + "calling", + -13.975149154663086 + ], + [ + "▁dismissing", + -13.975180625915527 + ], + [ + "▁sickle", + -13.975189208984377 + ], + [ + "ifiable", + -13.975234031677246 + ], + [ + "ACR", + -13.975259780883787 + ], + [ + "▁Tis", + -13.975297927856444 + ], + [ + "517", + -13.975367546081545 + ], + [ + "aul", + -13.975509643554688 + ], + [ + "▁gull", + -13.975549697875977 + ], + [ + "▁redeeming", + -13.975584983825684 + ], + [ + "elis", + -13.9755859375 + ], + [ + "▁DFS", + -13.975667953491213 + ], + [ + "bard", + -13.975701332092283 + ], + [ + "▁Hawthorn", + -13.975712776184082 + ], + [ + "▁QUE", + -13.975725173950195 + ], + [ + "▁lighthearted", + -13.975770950317385 + ], + [ + "▁succ", + -13.975948333740234 + ], + [ + "▁Manufacture", + -13.975998878479004 + ], + [ + "596", + -13.976031303405762 + ], + [ + "▁740", + -13.97604751586914 + ], + [ + "▁Dorm", + -13.976120948791504 + ], + [ + "tage", + -13.976176261901855 + ], + [ + "▁323", + -13.976263999938965 + ], + [ + "rgy", + -13.976312637329102 + ], + [ + "▁Abhi", + -13.976378440856934 + ], + [ + "▁Stru", + -13.97640323638916 + ], + [ + "timer", + -13.97640609741211 + ], + [ + "gf", + -13.97647762298584 + ], + [ + "RAS", + -13.976519584655762 + ], + [ + "▁Rafa", + -13.976629257202148 + ], + [ + "Notify", + -13.976651191711426 + ], + [ + "Contribute", + -13.976675033569336 + ], + [ + "vara", + -13.976797103881836 + ], + [ + "▁Inge", + -13.976856231689451 + ], + [ + "▁MODEL", + -13.976877212524414 + ], + [ + "▁invoking", + -13.976963996887209 + ], + [ + "pah", + -13.977005958557127 + ], + [ + "pods", + -13.977006912231444 + ], + [ + "stain", + -13.977028846740724 + ], + [ + "▁countered", + -13.977030754089355 + ], + [ + "Mineral", + -13.977049827575684 + ], + [ + "LAM", + -13.977093696594238 + ], + [ + "amente", + -13.9771089553833 + ], + [ + "ARK", + -13.977131843566896 + ], + [ + "marriage", + -13.977134704589844 + ], + [ + "CLICK", + -13.97713565826416 + ], + [ + "▁IPC", + -13.977143287658691 + ], + [ + "▁bureaucrats", + -13.977143287658691 + ], + [ + "Adventure", + -13.977148056030272 + ], + [ + "Houston", + -13.977149963378906 + ], + [ + "equipment", + -13.977182388305664 + ], + [ + "582", + -13.977210998535156 + ], + [ + "HW", + -13.977212905883787 + ], + [ + "Saw", + -13.977213859558104 + ], + [ + "drain", + -13.97726058959961 + ], + [ + "▁Gauteng", + -13.97727394104004 + ], + [ + "▁assimilation", + -13.97727394104004 + ], + [ + "▁magnificence", + -13.97727394104004 + ], + [ + "▁sneezing", + -13.97727394104004 + ], + [ + "▁Leopold", + -13.977274894714355 + ], + [ + "▁utilisation", + -13.977274894714355 + ], + [ + "▁entangled", + -13.977279663085938 + ], + [ + "▁Wentworth", + -13.977289199829102 + ], + [ + "bubble", + -13.977296829223633 + ], + [ + "▁Kushner", + -13.977300643920898 + ], + [ + "▁stanza", + -13.977302551269531 + ], + [ + "▁Phnom", + -13.977306365966797 + ], + [ + "▁choppy", + -13.977317810058594 + ], + [ + "▁snorkelling", + -13.977333068847656 + ], + [ + "▁pleas", + -13.977339744567873 + ], + [ + "▁decency", + -13.977370262145996 + ], + [ + "▁Gaelic", + -13.977373123168944 + ], + [ + "64)", + -13.977388381958008 + ], + [ + "▁unqualified", + -13.977455139160156 + ], + [ + "▁Tutorials", + -13.9774751663208 + ], + [ + "▁Mandi", + -13.977506637573242 + ], + [ + "▁Illegal", + -13.977513313293455 + ], + [ + "▁congressman", + -13.977537155151367 + ], + [ + "▁Furious", + -13.977540016174316 + ], + [ + "▁curation", + -13.977540969848633 + ], + [ + "▁sassy", + -13.97754192352295 + ], + [ + "▁viewfinder", + -13.977560997009276 + ], + [ + "▁halved", + -13.97756290435791 + ], + [ + "▁2.00", + -13.97758674621582 + ], + [ + "▁scoops", + -13.977611541748049 + ], + [ + "Stan", + -13.977618217468262 + ], + [ + "▁formalities", + -13.977627754211426 + ], + [ + "▁Westlake", + -13.97762966156006 + ], + [ + "esser", + -13.97764015197754 + ], + [ + "▁blatantly", + -13.977645874023438 + ], + [ + "Ji", + -13.977646827697754 + ], + [ + "▁southbound", + -13.977659225463867 + ], + [ + "dung", + -13.97771453857422 + ], + [ + "▁Herring", + -13.977727890014648 + ], + [ + "NEX", + -13.977761268615724 + ], + [ + "▁deliberation", + -13.977775573730469 + ], + [ + "▁underpinning", + -13.977787017822266 + ], + [ + "▁conservationist", + -13.977792739868164 + ], + [ + "▁Fabrication", + -13.977795600891112 + ], + [ + "Earned", + -13.97782039642334 + ], + [ + "▁Becca", + -13.97787094116211 + ], + [ + "▁appraisers", + -13.977910041809082 + ], + [ + "▁porter", + -13.977943420410156 + ], + [ + "▁FAO", + -13.977959632873535 + ], + [ + "▁cuz", + -13.977968215942385 + ], + [ + "▁TDS", + -13.97797679901123 + ], + [ + "▁treads", + -13.977998733520508 + ], + [ + "▁Aladdin", + -13.97801399230957 + ], + [ + "Operate", + -13.978062629699709 + ], + [ + "successful", + -13.978087425231934 + ], + [ + "nw", + -13.97810173034668 + ], + [ + "▁recollection", + -13.978132247924805 + ], + [ + "9-5", + -13.978145599365234 + ], + [ + "Winner", + -13.978147506713867 + ], + [ + "415", + -13.978160858154297 + ], + [ + "nja", + -13.978163719177246 + ], + [ + "▁heady", + -13.978264808654783 + ], + [ + "▁delish", + -13.978275299072266 + ], + [ + "▁72%", + -13.978294372558594 + ], + [ + "▁intolerant", + -13.978320121765137 + ], + [ + "normally", + -13.97836208343506 + ], + [ + "Template", + -13.978485107421877 + ], + [ + "▁cadre", + -13.97849178314209 + ], + [ + "functioning", + -13.978511810302734 + ], + [ + "▁definitively", + -13.978561401367188 + ], + [ + "▁px", + -13.978582382202148 + ], + [ + "▁forceful", + -13.978588104248049 + ], + [ + "Mus", + -13.978628158569336 + ], + [ + "▁mn", + -13.97863483428955 + ], + [ + "▁oaks", + -13.97872829437256 + ], + [ + "HOP", + -13.978747367858888 + ], + [ + "onna", + -13.97876262664795 + ], + [ + "plates", + -13.978790283203123 + ], + [ + "maya", + -13.978901863098145 + ], + [ + "▁Crewe", + -13.978922843933104 + ], + [ + "tear", + -13.978933334350586 + ], + [ + "▁Trav", + -13.97896671295166 + ], + [ + "▁Shiraz", + -13.97903823852539 + ], + [ + "▁headstone", + -13.979046821594238 + ], + [ + "CHR", + -13.97907257080078 + ], + [ + "▁Zig", + -13.979121208190918 + ], + [ + "▁payer", + -13.979182243347168 + ], + [ + "ndon", + -13.979198455810549 + ], + [ + "imation", + -13.979218482971191 + ], + [ + "▁chewed", + -13.979225158691406 + ], + [ + "environment", + -13.979247093200684 + ], + [ + "rti", + -13.97927188873291 + ], + [ + "▁eighties", + -13.97930145263672 + ], + [ + "destruct", + -13.9793062210083 + ], + [ + "▁Motivation", + -13.979307174682615 + ], + [ + "▁Jewels", + -13.979351043701172 + ], + [ + "COT", + -13.979424476623535 + ], + [ + "▁Bondi", + -13.979469299316406 + ], + [ + "▁phy", + -13.979531288146973 + ], + [ + "UPS", + -13.979544639587402 + ], + [ + "▁Glee", + -13.9795503616333 + ], + [ + "preci", + -13.979602813720703 + ], + [ + "▁onerous", + -13.97964859008789 + ], + [ + "▁liberally", + -13.979674339294434 + ], + [ + "▁Bland", + -13.979710578918455 + ], + [ + "umbo", + -13.979714393615724 + ], + [ + "534", + -13.979769706726074 + ], + [ + "plat", + -13.979771614074709 + ], + [ + "DAR", + -13.979804039001465 + ], + [ + "▁'90", + -13.97987174987793 + ], + [ + "mitting", + -13.979928970336914 + ], + [ + "▁pluck", + -13.979957580566406 + ], + [ + "▁Nava", + -13.98000717163086 + ], + [ + "SION", + -13.98008918762207 + ], + [ + "lden", + -13.980114936828612 + ], + [ + "▁Meh", + -13.980179786682127 + ], + [ + "Plate", + -13.980186462402344 + ], + [ + "579", + -13.98018741607666 + ], + [ + "▁Outlets", + -13.980216026306152 + ], + [ + "▁Fidel", + -13.980328559875488 + ], + [ + "wil", + -13.980329513549805 + ], + [ + "▁120-", + -13.980408668518066 + ], + [ + "spun", + -13.980513572692873 + ], + [ + "\"--", + -13.98055934906006 + ], + [ + "▁Padres", + -13.980603218078612 + ], + [ + "412", + -13.980670928955078 + ], + [ + "566", + -13.980725288391112 + ], + [ + "▁elucidate", + -13.980783462524414 + ], + [ + "gung", + -13.980806350708008 + ], + [ + "▁Infra", + -13.980846405029297 + ], + [ + "408", + -13.980854988098145 + ], + [ + "▁resistors", + -13.980899810791016 + ], + [ + "critic", + -13.980964660644531 + ], + [ + "blaze", + -13.981029510498049 + ], + [ + "▁restarting", + -13.981034278869627 + ], + [ + "▁Sochi", + -13.981199264526367 + ], + [ + "tino", + -13.981270790100098 + ], + [ + "maximum", + -13.981284141540527 + ], + [ + "WordPress", + -13.981290817260742 + ], + [ + "▁Frigidaire", + -13.981306076049805 + ], + [ + "▁deceiving", + -13.981306076049805 + ], + [ + "▁discriminating", + -13.981306076049805 + ], + [ + "▁extermination", + -13.981306076049805 + ], + [ + "▁customisable", + -13.98130702972412 + ], + [ + "▁radiating", + -13.981307983398438 + ], + [ + "▁telescopic", + -13.981308937072754 + ], + [ + "▁Skrill", + -13.98130989074707 + ], + [ + "▁pungent", + -13.98131275177002 + ], + [ + "▁Cactus", + -13.981318473815918 + ], + [ + "▁Microbiology", + -13.981318473815918 + ], + [ + "▁citrate", + -13.981318473815918 + ], + [ + "▁holly", + -13.98132038116455 + ], + [ + "▁armoire", + -13.981340408325195 + ], + [ + "▁Fatima", + -13.98137092590332 + ], + [ + "▁Soleil", + -13.981379508972168 + ], + [ + "Assessment", + -13.981386184692385 + ], + [ + "▁Balcony", + -13.981401443481444 + ], + [ + "▁Correctional", + -13.981411933898926 + ], + [ + "▁Santander", + -13.981416702270508 + ], + [ + "602", + -13.98143482208252 + ], + [ + "PAP", + -13.981435775756836 + ], + [ + "▁azure", + -13.981444358825684 + ], + [ + "▁Brokerage", + -13.98144817352295 + ], + [ + "▁retrograde", + -13.98146152496338 + ], + [ + "▁snowstorm", + -13.98147201538086 + ], + [ + "▁Dessert", + -13.981484413146973 + ], + [ + "▁Cristiano", + -13.981490135192873 + ], + [ + "▁depictions", + -13.981502532958984 + ], + [ + "▁Torque", + -13.9815034866333 + ], + [ + "enhanced", + -13.98151397705078 + ], + [ + "Detailed", + -13.981526374816896 + ], + [ + "▁Woodbridge", + -13.981558799743652 + ], + [ + "qué", + -13.98159122467041 + ], + [ + "▁Zheng", + -13.981610298156738 + ], + [ + "▁Nga", + -13.9816312789917 + ], + [ + "Respect", + -13.981643676757812 + ], + [ + "▁courting", + -13.981717109680176 + ], + [ + "Pierre", + -13.981724739074709 + ], + [ + "▁Drops", + -13.981764793395996 + ], + [ + "Chu", + -13.981805801391602 + ], + [ + "▁negotiator", + -13.981856346130373 + ], + [ + "▁hazelnut", + -13.98185920715332 + ], + [ + "▁goof", + -13.981865882873535 + ], + [ + "▁Usher", + -13.981894493103027 + ], + [ + "enson", + -13.98193073272705 + ], + [ + "▁Bower", + -13.981935501098633 + ], + [ + "▁breather", + -13.982038497924805 + ], + [ + "▁Causes", + -13.982072830200195 + ], + [ + "254", + -13.982085227966309 + ], + [ + "IFY", + -13.98216152191162 + ], + [ + "▁PCC", + -13.982166290283203 + ], + [ + "ulent", + -13.982210159301758 + ], + [ + "daw", + -13.982220649719238 + ], + [ + "oji", + -13.98222541809082 + ], + [ + "▁Attractions", + -13.982226371765137 + ], + [ + "PEL", + -13.982237815856934 + ], + [ + "▁Ching", + -13.982243537902832 + ], + [ + "jr", + -13.98226833343506 + ], + [ + "Totally", + -13.982271194458008 + ], + [ + "▁superpowers", + -13.982279777526855 + ], + [ + "▁unconsciously", + -13.982332229614258 + ], + [ + "▁mot", + -13.982343673706056 + ], + [ + "ader", + -13.982396125793455 + ], + [ + "▁glorify", + -13.982524871826172 + ], + [ + "▁Glove", + -13.982550621032717 + ], + [ + "Engage", + -13.982572555541992 + ], + [ + "▁$400,000", + -13.982573509216309 + ], + [ + "jones", + -13.982582092285156 + ], + [ + "▁equips", + -13.982598304748535 + ], + [ + "Selecting", + -13.982662200927734 + ], + [ + "▁combustible", + -13.982757568359377 + ], + [ + "▁breathless", + -13.98280906677246 + ], + [ + "▁suppressing", + -13.98287868499756 + ], + [ + "▁complet", + -13.982933044433594 + ], + [ + "▁Affair", + -13.982940673828123 + ], + [ + "▁Crawl", + -13.982972145080566 + ], + [ + "▁Synchron", + -13.982991218566896 + ], + [ + "▁Cupcakes", + -13.983081817626951 + ], + [ + "▁Jiu", + -13.9830904006958 + ], + [ + "▁197", + -13.983156204223633 + ], + [ + "Coat", + -13.98317527770996 + ], + [ + "▁Dak", + -13.98320770263672 + ], + [ + "▁suspending", + -13.983237266540527 + ], + [ + "▁stiffer", + -13.983254432678224 + ], + [ + "Kings", + -13.98327350616455 + ], + [ + "Forms", + -13.98330783843994 + ], + [ + "lucky", + -13.983394622802734 + ], + [ + "6+", + -13.983428955078123 + ], + [ + "▁pasting", + -13.983442306518556 + ], + [ + "▁scopes", + -13.98344898223877 + ], + [ + "▁Minsk", + -13.983461380004885 + ], + [ + "eber", + -13.983470916748049 + ], + [ + "▁torrents", + -13.983474731445312 + ], + [ + "▁Participate", + -13.983484268188477 + ], + [ + "▁OO", + -13.98360538482666 + ], + [ + "▁CBA", + -13.983609199523926 + ], + [ + "▁aftercare", + -13.98361110687256 + ], + [ + "mv", + -13.983612060546877 + ], + [ + "lava", + -13.98363971710205 + ], + [ + "uity", + -13.983760833740234 + ], + [ + "blogger", + -13.983779907226562 + ], + [ + "uille", + -13.983819007873535 + ], + [ + "▁KH", + -13.98386001586914 + ], + [ + "▁phono", + -13.98399829864502 + ], + [ + "▁EMT", + -13.984001159667969 + ], + [ + "fli", + -13.984013557434082 + ], + [ + "achy", + -13.98403263092041 + ], + [ + "▁seedling", + -13.984067916870115 + ], + [ + "RIE", + -13.984088897705078 + ], + [ + "CLE", + -13.984106063842772 + ], + [ + "▁Illusion", + -13.98416805267334 + ], + [ + "entine", + -13.984210968017578 + ], + [ + "zman", + -13.98423194885254 + ], + [ + "▁Vacations", + -13.984256744384766 + ], + [ + "lter", + -13.984289169311523 + ], + [ + "▁inquired", + -13.984333038330078 + ], + [ + "terior", + -13.98434352874756 + ], + [ + "292", + -13.98446559906006 + ], + [ + "▁pathogenic", + -13.98447322845459 + ], + [ + "▁Freelance", + -13.98454475402832 + ], + [ + "▁Coating", + -13.98454761505127 + ], + [ + "▁disassemble", + -13.984758377075195 + ], + [ + "Heading", + -13.984855651855469 + ], + [ + "▁milliseconds", + -13.984888076782228 + ], + [ + "▁Medici", + -13.984920501708984 + ], + [ + "▁SCC", + -13.984966278076172 + ], + [ + "▁discredit", + -13.98499584197998 + ], + [ + "spiele", + -13.985017776489258 + ], + [ + "▁Appleton", + -13.985149383544922 + ], + [ + "▁54%", + -13.985172271728516 + ], + [ + "▁vu", + -13.985183715820312 + ], + [ + "fas", + -13.985254287719728 + ], + [ + "▁Appreciate", + -13.985279083251951 + ], + [ + "▁1950’", + -13.985336303710938 + ], + [ + "▁Lubbock", + -13.985355377197266 + ], + [ + "▁Parfum", + -13.985355377197266 + ], + [ + "▁chipotle", + -13.985355377197266 + ], + [ + "▁craziness", + -13.985355377197266 + ], + [ + "▁flamboyant", + -13.985355377197266 + ], + [ + "▁fragility", + -13.985355377197266 + ], + [ + "▁perplexed", + -13.985355377197266 + ], + [ + "▁scorpion", + -13.985355377197266 + ], + [ + "▁metformin", + -13.985356330871582 + ], + [ + "▁Weinstein", + -13.985358238220217 + ], + [ + "▁eschew", + -13.985359191894531 + ], + [ + "▁sorbet", + -13.985369682312012 + ], + [ + "▁Delaney", + -13.98537254333496 + ], + [ + "▁Macdonald", + -13.98537826538086 + ], + [ + "▁verifies", + -13.985379219055176 + ], + [ + "▁Otago", + -13.985382080078123 + ], + [ + "▁psychosocial", + -13.98538875579834 + ], + [ + "studded", + -13.98539924621582 + ], + [ + "Indicate", + -13.98540496826172 + ], + [ + "isson", + -13.985413551330566 + ], + [ + "▁Thief", + -13.985413551330566 + ], + [ + "disclosure", + -13.985416412353516 + ], + [ + "▁Bullock", + -13.985418319702148 + ], + [ + "Distance", + -13.985422134399414 + ], + [ + "▁Burst", + -13.985424041748049 + ], + [ + "Cultural", + -13.98542594909668 + ], + [ + "▁hotly", + -13.985441207885742 + ], + [ + "▁LEFT", + -13.985451698303224 + ], + [ + "▁unopened", + -13.985458374023438 + ], + [ + "▁supercar", + -13.985487937927246 + ], + [ + "▁Spreadsheet", + -13.985490798950195 + ], + [ + "▁controllable", + -13.985491752624512 + ], + [ + "▁Canaan", + -13.985495567321776 + ], + [ + "▁Precise", + -13.985499382019045 + ], + [ + "▁Isabelle", + -13.98550796508789 + ], + [ + "▁mouthwash", + -13.985515594482422 + ], + [ + "▁Supervision", + -13.985551834106444 + ], + [ + "▁Baird", + -13.98557472229004 + ], + [ + "TSX", + -13.985575675964355 + ], + [ + "▁308", + -13.985576629638672 + ], + [ + "killing", + -13.985596656799316 + ], + [ + "▁Threads", + -13.985629081726074 + ], + [ + "▁Overwatch", + -13.985668182373049 + ], + [ + "▁championing", + -13.985695838928224 + ], + [ + "configured", + -13.985745429992676 + ], + [ + "▁ejected", + -13.985761642456056 + ], + [ + "▁Habit", + -13.985800743103027 + ], + [ + "▁extinguished", + -13.985810279846191 + ], + [ + "▁PEG", + -13.98587703704834 + ], + [ + "SCC", + -13.985877990722656 + ], + [ + "Loss", + -13.985952377319336 + ], + [ + "PING", + -13.985952377319336 + ], + [ + "▁Mutant", + -13.985971450805664 + ], + [ + "▁appraised", + -13.985983848571776 + ], + [ + "▁heralded", + -13.985986709594728 + ], + [ + "▁lipids", + -13.986005783081056 + ], + [ + "▁Spiderman", + -13.986029624938965 + ], + [ + "▁snugly", + -13.986040115356444 + ], + [ + "▁subcontractor", + -13.986166954040527 + ], + [ + "KES", + -13.986210823059082 + ], + [ + "▁Inbox", + -13.986215591430664 + ], + [ + "▁rarest", + -13.986231803894045 + ], + [ + "▁announcer", + -13.986244201660156 + ], + [ + "▁POD", + -13.986281394958496 + ], + [ + "Marco", + -13.986309051513672 + ], + [ + "▁unloaded", + -13.986313819885254 + ], + [ + "Milk", + -13.986320495605469 + ], + [ + "▁cysts", + -13.986376762390137 + ], + [ + "▁HPC", + -13.986425399780272 + ], + [ + "AVA", + -13.986435890197754 + ], + [ + "▁Mong", + -13.986455917358398 + ], + [ + "mium", + -13.986610412597656 + ], + [ + "Organization", + -13.986732482910156 + ], + [ + "▁Leb", + -13.986732482910156 + ], + [ + "Pictured", + -13.986761093139648 + ], + [ + "Wo", + -13.986814498901367 + ], + [ + "▁Blackstone", + -13.98681640625 + ], + [ + "▁Destruction", + -13.986818313598633 + ], + [ + "717", + -13.986924171447754 + ], + [ + "classes", + -13.9869384765625 + ], + [ + "508", + -13.986942291259766 + ], + [ + "▁Viper", + -13.98696517944336 + ], + [ + "▁OSX", + -13.987003326416016 + ], + [ + "▁seared", + -13.98701000213623 + ], + [ + "▁evoked", + -13.987024307250977 + ], + [ + "enic", + -13.987035751342772 + ], + [ + "▁auctioneer", + -13.98714542388916 + ], + [ + "allergenic", + -13.987210273742676 + ], + [ + "▁dari", + -13.987295150756836 + ], + [ + "▁defunct", + -13.987403869628906 + ], + [ + "747", + -13.987409591674805 + ], + [ + "▁celeb", + -13.987422943115234 + ], + [ + "▁Crystals", + -13.987424850463867 + ], + [ + "pard", + -13.987465858459473 + ], + [ + "▁10:5", + -13.987505912780762 + ], + [ + "▁#2:", + -13.98759937286377 + ], + [ + "▁workspaces", + -13.987676620483398 + ], + [ + "▁CREATE", + -13.98769474029541 + ], + [ + "▁Roundup", + -13.987719535827637 + ], + [ + "▁Knu", + -13.987740516662598 + ], + [ + "041", + -13.98776912689209 + ], + [ + "9.8", + -13.987805366516112 + ], + [ + "alloy", + -13.987850189208984 + ], + [ + "▁Agu", + -13.987870216369627 + ], + [ + "721", + -13.987892150878906 + ], + [ + "▁multifunction", + -13.987906455993652 + ], + [ + "▁nitro", + -13.987921714782717 + ], + [ + "▁Sack", + -13.987951278686523 + ], + [ + "village", + -13.987977981567385 + ], + [ + "▁Ingrid", + -13.987996101379396 + ], + [ + "▁operas", + -13.988021850585938 + ], + [ + "alfa", + -13.988037109375 + ], + [ + "MARK", + -13.988055229187012 + ], + [ + "▁paychecks", + -13.988085746765137 + ], + [ + "haya", + -13.98810863494873 + ], + [ + "▁ration", + -13.988200187683104 + ], + [ + "Deliver", + -13.988214492797852 + ], + [ + "▁Opus", + -13.98827075958252 + ], + [ + "▁weighting", + -13.988303184509276 + ], + [ + "▁Bann", + -13.98830795288086 + ], + [ + "▁HSC", + -13.988320350646973 + ], + [ + "▁pinched", + -13.988329887390137 + ], + [ + "PRI", + -13.98837184906006 + ], + [ + "▁EH", + -13.988439559936523 + ], + [ + "▁+2", + -13.988665580749512 + ], + [ + "▁robbers", + -13.988698959350586 + ], + [ + "▁fillets", + -13.988704681396484 + ], + [ + "▁Hover", + -13.98875617980957 + ], + [ + "iculate", + -13.988779067993164 + ], + [ + "▁covenants", + -13.988816261291504 + ], + [ + "hull", + -13.988846778869627 + ], + [ + "▁Sali", + -13.988901138305664 + ], + [ + "Seek", + -13.988961219787598 + ], + [ + "951", + -13.98900032043457 + ], + [ + "ktor", + -13.989019393920898 + ], + [ + "▁Gator", + -13.989112854003906 + ], + [ + "▁Mustangs", + -13.989165306091309 + ], + [ + "iform", + -13.989180564880373 + ], + [ + "▁(*", + -13.9891939163208 + ], + [ + "NAN", + -13.98936653137207 + ], + [ + "▁connoisseur", + -13.98936653137207 + ], + [ + "▁retard", + -13.989398956298828 + ], + [ + "▁Amritsar", + -13.989419937133787 + ], + [ + "▁Liechtenstein", + -13.989419937133787 + ], + [ + "▁Reebok", + -13.989419937133787 + ], + [ + "▁bauxite", + -13.989419937133787 + ], + [ + "▁conjecture", + -13.989419937133787 + ], + [ + "▁reproducible", + -13.989419937133787 + ], + [ + "▁acquitted", + -13.989422798156738 + ], + [ + "▁Prakash", + -13.989426612854004 + ], + [ + "▁diffraction", + -13.98942756652832 + ], + [ + "▁PUBG", + -13.989428520202637 + ], + [ + "▁Paddington", + -13.98943328857422 + ], + [ + "▁HGTV", + -13.989435195922852 + ], + [ + "▁electrostatic", + -13.989435195922852 + ], + [ + "▁extortion", + -13.989436149597168 + ], + [ + "▁Vikram", + -13.989441871643066 + ], + [ + "▁Dickson", + -13.989449501037598 + ], + [ + "▁Holloway", + -13.989452362060549 + ], + [ + "▁ratification", + -13.989455223083496 + ], + [ + "▁Britney", + -13.989456176757812 + ], + [ + "▁PACKAGE", + -13.989459991455078 + ], + [ + "556", + -13.98946475982666 + ], + [ + "Excuse", + -13.989471435546877 + ], + [ + "▁Sentence", + -13.989474296569824 + ], + [ + "generating", + -13.98947525024414 + ], + [ + "▁USPTO", + -13.989487648010254 + ], + [ + "▁interactivity", + -13.98951816558838 + ], + [ + "▁unregulated", + -13.989532470703123 + ], + [ + "▁Causeway", + -13.989554405212402 + ], + [ + "▁Discord", + -13.989561080932615 + ], + [ + "Palestinian", + -13.989577293395996 + ], + [ + "Guitar", + -13.989584922790527 + ], + [ + "Budget", + -13.989603996276855 + ], + [ + "Seattle", + -13.989606857299805 + ], + [ + "zaki", + -13.98960781097412 + ], + [ + "Oracle", + -13.989608764648438 + ], + [ + "predict", + -13.989630699157717 + ], + [ + "Denver", + -13.98964786529541 + ], + [ + "▁Godzilla", + -13.989665031433104 + ], + [ + "▁parenthood", + -13.989665031433104 + ], + [ + "▁mani", + -13.989683151245115 + ], + [ + "fetched", + -13.989778518676758 + ], + [ + "▁capping", + -13.989797592163086 + ], + [ + "thorn", + -13.989809036254885 + ], + [ + "Johnny", + -13.989815711975098 + ], + [ + "▁deceptively", + -13.989852905273438 + ], + [ + "▁equalizer", + -13.989864349365234 + ], + [ + "▁nettle", + -13.9899263381958 + ], + [ + "▁seventies", + -13.989967346191406 + ], + [ + "▁Pee", + -13.990004539489746 + ], + [ + "roz", + -13.990007400512695 + ], + [ + "▁reforming", + -13.99006175994873 + ], + [ + "▁silo", + -13.99006462097168 + ], + [ + "novate", + -13.990068435668944 + ], + [ + "▁mathematician", + -13.990069389343262 + ], + [ + "▁coz", + -13.990169525146484 + ], + [ + "▁UCI", + -13.990184783935549 + ], + [ + "wand", + -13.990256309509276 + ], + [ + "VGA", + -13.990338325500488 + ], + [ + "spoon", + -13.99034595489502 + ], + [ + "coating", + -13.990365982055664 + ], + [ + "▁placenta", + -13.990410804748535 + ], + [ + "rika", + -13.99049949645996 + ], + [ + "Wishing", + -13.990594863891602 + ], + [ + "▁typographical", + -13.990594863891602 + ], + [ + "▁Bree", + -13.990608215332031 + ], + [ + "▁Fam", + -13.990641593933104 + ], + [ + "726", + -13.990707397460938 + ], + [ + "▁4-8", + -13.990720748901367 + ], + [ + "▁PPG", + -13.990732192993164 + ], + [ + "▁Occupy", + -13.99084186553955 + ], + [ + "PAN", + -13.990947723388672 + ], + [ + "wallet", + -13.990949630737305 + ], + [ + "▁tweeter", + -13.99100112915039 + ], + [ + "ibe", + -13.991065979003906 + ], + [ + "▁viewable", + -13.991106033325195 + ], + [ + "▁doggy", + -13.991111755371094 + ], + [ + "wool", + -13.991164207458496 + ], + [ + "▁stables", + -13.991193771362305 + ], + [ + "FH", + -13.991219520568848 + ], + [ + "Screenshot", + -13.991230010986328 + ], + [ + "PLO", + -13.991254806518556 + ], + [ + "▁Dyke", + -13.991263389587402 + ], + [ + "▁Hoff", + -13.991388320922852 + ], + [ + "orum", + -13.991410255432127 + ], + [ + "▁Winn", + -13.991419792175291 + ], + [ + "lured", + -13.991548538208008 + ], + [ + "submitted", + -13.99155616760254 + ], + [ + "761", + -13.991610527038574 + ], + [ + "▁budgeted", + -13.991656303405762 + ], + [ + "tius", + -13.99167823791504 + ], + [ + "▁Barnard", + -13.991683959960938 + ], + [ + "detail", + -13.991703033447266 + ], + [ + "▁moderators", + -13.991703033447266 + ], + [ + "8,500", + -13.991724967956545 + ], + [ + "▁Griffiths", + -13.991726875305176 + ], + [ + "ryan", + -13.99173641204834 + ], + [ + "▁spade", + -13.9918212890625 + ], + [ + "▁utilises", + -13.991822242736816 + ], + [ + "▁vari", + -13.991856575012209 + ], + [ + "Pr", + -13.991944313049316 + ], + [ + "▁dispensers", + -13.99207592010498 + ], + [ + "▁Pamper", + -13.992095947265623 + ], + [ + "▁AHA", + -13.992119789123535 + ], + [ + "▁oppress", + -13.992154121398926 + ], + [ + "▁oversize", + -13.99217128753662 + ], + [ + "▁TCM", + -13.992280960083008 + ], + [ + "▁backdoor", + -13.99232006072998 + ], + [ + "acher", + -13.992338180541992 + ], + [ + "iert", + -13.99240493774414 + ], + [ + "▁Creamy", + -13.992431640625 + ], + [ + "orah", + -13.992496490478516 + ], + [ + "▁teeny", + -13.992573738098145 + ], + [ + "363", + -13.992646217346191 + ], + [ + "▁grossly", + -13.99265193939209 + ], + [ + "lysis", + -13.992658615112305 + ], + [ + "ffi", + -13.9927396774292 + ], + [ + "▁slicer", + -13.992813110351562 + ], + [ + "▁Conti", + -13.99290657043457 + ], + [ + "localhost", + -13.992916107177734 + ], + [ + "▁hogs", + -13.992928504943848 + ], + [ + "▁Sax", + -13.992968559265137 + ], + [ + "hanging", + -13.992990493774414 + ], + [ + "▁whirl", + -13.993002891540527 + ], + [ + "857", + -13.993047714233398 + ], + [ + "▁309", + -13.993056297302246 + ], + [ + "▁soiled", + -13.993188858032228 + ], + [ + "conduct", + -13.9932222366333 + ], + [ + "▁aprons", + -13.99322509765625 + ], + [ + "bali", + -13.99331283569336 + ], + [ + "▁mirroring", + -13.993313789367676 + ], + [ + "Thirdly", + -13.993314743041992 + ], + [ + "▁Hacking", + -13.993321418762209 + ], + [ + "dman", + -13.993349075317385 + ], + [ + "▁pho", + -13.9933500289917 + ], + [ + "▁37.5", + -13.993435859680176 + ], + [ + "▁710", + -13.993438720703123 + ], + [ + "433", + -13.993485450744627 + ], + [ + "▁Optimisation", + -13.993501663208008 + ], + [ + "▁accomodate", + -13.993501663208008 + ], + [ + "▁centrifuge", + -13.993501663208008 + ], + [ + "▁ghetto", + -13.993501663208008 + ], + [ + "▁prophecies", + -13.993501663208008 + ], + [ + "▁rattling", + -13.993501663208008 + ], + [ + "▁rudimentary", + -13.993501663208008 + ], + [ + "▁caloric", + -13.993502616882324 + ], + [ + "▁gastronomy", + -13.99350357055664 + ], + [ + "▁pagoda", + -13.993505477905272 + ], + [ + "▁titular", + -13.99350643157959 + ], + [ + "▁Magellan", + -13.993507385253906 + ], + [ + "▁sepsis", + -13.993507385253906 + ], + [ + "▁Jockey", + -13.993513107299805 + ], + [ + "▁corral", + -13.993515968322754 + ], + [ + "▁neurology", + -13.993515968322754 + ], + [ + "▁Herrera", + -13.99351692199707 + ], + [ + "▁Synod", + -13.99351978302002 + ], + [ + "▁Sienna", + -13.993522644042969 + ], + [ + "▁Luncheon", + -13.993523597717283 + ], + [ + "▁Schumer", + -13.993535041809082 + ], + [ + "▁Morrow", + -13.993552207946776 + ], + [ + "▁Paragon", + -13.993558883666992 + ], + [ + "▁Supporters", + -13.993586540222168 + ], + [ + "▁Brat", + -13.99359130859375 + ], + [ + "▁Oversight", + -13.9935941696167 + ], + [ + "▁Ramsay", + -13.993605613708496 + ], + [ + "▁sandpaper", + -13.99361801147461 + ], + [ + "▁submersible", + -13.99361801147461 + ], + [ + "▁Ennis", + -13.993629455566406 + ], + [ + "▁DIRECTV", + -13.993639945983888 + ], + [ + "▁vocalists", + -13.993671417236328 + ], + [ + "▁Jonny", + -13.993684768676758 + ], + [ + "Leveraging", + -13.993690490722656 + ], + [ + "ukh", + -13.993698120117188 + ], + [ + "usse", + -13.993709564208984 + ], + [ + "ikki", + -13.993731498718262 + ], + [ + "▁Atelier", + -13.993744850158691 + ], + [ + "equivalent", + -13.993767738342283 + ], + [ + "passenger", + -13.993769645690918 + ], + [ + "Military", + -13.993776321411133 + ], + [ + "Thompson", + -13.99377727508545 + ], + [ + "▁Banff", + -13.99380111694336 + ], + [ + "VIC", + -13.993816375732422 + ], + [ + "776", + -13.99384307861328 + ], + [ + "Lit", + -13.993852615356444 + ], + [ + "discount", + -13.993868827819824 + ], + [ + "▁stupidity", + -13.993868827819824 + ], + [ + "▁domesticated", + -13.993895530700684 + ], + [ + "▁headlined", + -13.993905067443848 + ], + [ + "fg", + -13.993921279907228 + ], + [ + "▁CHANGE", + -13.993958473205566 + ], + [ + "IND", + -13.993978500366213 + ], + [ + "▁licensees", + -13.993982315063477 + ], + [ + "▁shrouded", + -13.99399757385254 + ], + [ + "department", + -13.994024276733398 + ], + [ + "▁Hendricks", + -13.994028091430664 + ], + [ + "▁Sinn", + -13.994084358215332 + ], + [ + "649", + -13.99412441253662 + ], + [ + "authorized", + -13.994129180908203 + ], + [ + "▁Humor", + -13.994129180908203 + ], + [ + "▁KFC", + -13.99418830871582 + ], + [ + "▁Sainte", + -13.99418830871582 + ], + [ + "khu", + -13.994194984436035 + ], + [ + "▁Correction", + -13.994200706481934 + ], + [ + "▁artichoke", + -13.99420166015625 + ], + [ + "▁1944.", + -13.994219779968262 + ], + [ + "fid", + -13.994288444519045 + ], + [ + "▁Diagrams", + -13.994307518005373 + ], + [ + "bauer", + -13.994308471679688 + ], + [ + "▁Ababa", + -13.994309425354004 + ], + [ + "▁deceived", + -13.994333267211914 + ], + [ + "bani", + -13.994346618652344 + ], + [ + "▁Noodle", + -13.99435043334961 + ], + [ + "▁buckles", + -13.99441623687744 + ], + [ + "NOS", + -13.994437217712402 + ], + [ + "▁Compatibility", + -13.994458198547363 + ], + [ + "▁cranky", + -13.994464874267578 + ], + [ + "jek", + -13.994484901428224 + ], + [ + "▁940", + -13.99450969696045 + ], + [ + "▁$79", + -13.994555473327637 + ], + [ + "▁sharpened", + -13.994634628295898 + ], + [ + "697", + -13.994638442993164 + ], + [ + "▁bogey", + -13.994638442993164 + ], + [ + "Tone", + -13.994745254516602 + ], + [ + "ombo", + -13.994770050048828 + ], + [ + "spheric", + -13.99479866027832 + ], + [ + "gd", + -13.99481964111328 + ], + [ + "▁stinging", + -13.99483871459961 + ], + [ + "Bella", + -13.99485969543457 + ], + [ + "pada", + -13.994874000549316 + ], + [ + "▁Promoting", + -13.994956016540527 + ], + [ + "▁cad", + -13.994986534118652 + ], + [ + "▁lance", + -13.994997024536133 + ], + [ + "gc", + -13.995007514953612 + ], + [ + "▁TAC", + -13.995047569274902 + ], + [ + "▁FAG", + -13.995070457458496 + ], + [ + "▁Instantly", + -13.99509620666504 + ], + [ + "lithic", + -13.995110511779783 + ], + [ + "▁jeweler", + -13.995161056518556 + ], + [ + "▁germinate", + -13.99517059326172 + ], + [ + "▁permeable", + -13.995172500610352 + ], + [ + "publication", + -13.995214462280272 + ], + [ + "▁expel", + -13.995288848876951 + ], + [ + "▁checkups", + -13.99530792236328 + ], + [ + "▁SAG", + -13.995322227478027 + ], + [ + "▁woken", + -13.99533748626709 + ], + [ + "▁HHS", + -13.995363235473633 + ], + [ + "▁custodian", + -13.995401382446287 + ], + [ + "▁lawmaker", + -13.99549388885498 + ], + [ + "Prev", + -13.995519638061523 + ], + [ + "▁suv", + -13.995543479919434 + ], + [ + "▁Elmer", + -13.995579719543455 + ], + [ + "cello", + -13.995635032653809 + ], + [ + "▁llama", + -13.995660781860352 + ], + [ + "Documents", + -13.995672225952148 + ], + [ + "hitter", + -13.995675086975098 + ], + [ + "▁Calculate", + -13.9957275390625 + ], + [ + "▁Greer", + -13.995759963989258 + ], + [ + "acute", + -13.995863914489746 + ], + [ + "allah", + -13.995890617370604 + ], + [ + "▁Participating", + -13.995963096618652 + ], + [ + "▁Chapp", + -13.99599838256836 + ], + [ + "▁Bumper", + -13.996047019958496 + ], + [ + "nikov", + -13.996153831481934 + ], + [ + "347", + -13.996198654174805 + ], + [ + "▁336", + -13.996220588684082 + ], + [ + "▁ladle", + -13.99623203277588 + ], + [ + "Generate", + -13.99632453918457 + ], + [ + "▁Trish", + -13.996368408203123 + ], + [ + "valid", + -13.996371269226074 + ], + [ + "▁bb", + -13.996386528015137 + ], + [ + "giri", + -13.99639892578125 + ], + [ + "▁DEP", + -13.996408462524414 + ], + [ + "27)", + -13.996416091918944 + ], + [ + "▁Maka", + -13.99648380279541 + ], + [ + "257", + -13.996492385864258 + ], + [ + "AVE", + -13.996525764465332 + ], + [ + "9.3", + -13.996527671813965 + ], + [ + "IRT", + -13.99652862548828 + ], + [ + "762", + -13.996553421020508 + ], + [ + "OVER", + -13.996562957763672 + ], + [ + "Lyn", + -13.99665069580078 + ], + [ + "▁Pena", + -13.996665000915527 + ], + [ + "▁Grover", + -13.996706008911133 + ], + [ + "▁Quill", + -13.99677562713623 + ], + [ + "Nano", + -13.99681282043457 + ], + [ + "$50", + -13.99691104888916 + ], + [ + "▁hawks", + -13.996926307678224 + ], + [ + "▁ANZ", + -13.997003555297852 + ], + [ + "JSC", + -13.997005462646484 + ], + [ + "▁Kell", + -13.997007369995115 + ], + [ + "▁Yogurt", + -13.997025489807127 + ], + [ + "▁RIA", + -13.997035026550291 + ], + [ + "▁Huck", + -13.997047424316406 + ], + [ + "▁accredit", + -13.997164726257324 + ], + [ + "826", + -13.99721336364746 + ], + [ + "▁CIC", + -13.997230529785156 + ], + [ + "Into", + -13.997243881225586 + ], + [ + "ogan", + -13.997247695922852 + ], + [ + "BUL", + -13.997276306152344 + ], + [ + "1.8%", + -13.997302055358888 + ], + [ + "Cas", + -13.99732780456543 + ], + [ + "▁Hap", + -13.997371673583984 + ], + [ + "▁clerks", + -13.997377395629885 + ], + [ + "652", + -13.99741554260254 + ], + [ + "▁incite", + -13.997441291809082 + ], + [ + "▁omelet", + -13.997441291809082 + ], + [ + "▁lotto", + -13.997455596923828 + ], + [ + "▁COMPLETE", + -13.99758529663086 + ], + [ + "▁transceiver", + -13.997598648071287 + ], + [ + "▁enthralled", + -13.997599601745604 + ], + [ + "▁horseradish", + -13.997599601745604 + ], + [ + "▁Fertility", + -13.997600555419922 + ], + [ + "▁Oktoberfest", + -13.997600555419922 + ], + [ + "▁bariatric", + -13.997600555419922 + ], + [ + "▁cinematographer", + -13.997600555419922 + ], + [ + "▁femininity", + -13.997600555419922 + ], + [ + "▁Sharjah", + -13.997602462768556 + ], + [ + "▁Navarro", + -13.997603416442873 + ], + [ + "▁Gardiner", + -13.997605323791504 + ], + [ + "▁Integrative", + -13.997605323791504 + ], + [ + "▁gamification", + -13.99760913848877 + ], + [ + "▁differentiator", + -13.997610092163086 + ], + [ + "▁Graeme", + -13.9976167678833 + ], + [ + "▁Leicestershire", + -13.997621536254885 + ], + [ + "▁unsettled", + -13.9976224899292 + ], + [ + "903", + -13.997624397277832 + ], + [ + "▁precautionary", + -13.99762725830078 + ], + [ + "▁(1991)", + -13.997637748718262 + ], + [ + "▁devising", + -13.997637748718262 + ], + [ + "▁lard", + -13.997651100158691 + ], + [ + "▁reshaping", + -13.99766731262207 + ], + [ + "▁graphene", + -13.997669219970703 + ], + [ + "▁brunt", + -13.997687339782717 + ], + [ + "▁Casio", + -13.99770450592041 + ], + [ + "▁Wanderers", + -13.997724533081056 + ], + [ + "▁objectivity", + -13.997732162475586 + ], + [ + "▁cleanses", + -13.99775218963623 + ], + [ + "▁goaltender", + -13.99775218963623 + ], + [ + "▁momentarily", + -13.997776985168455 + ], + [ + "▁waterfowl", + -13.997802734375 + ], + [ + "▁undervalued", + -13.9978609085083 + ], + [ + "tack", + -13.997861862182615 + ], + [ + "▁seizing", + -13.997875213623049 + ], + [ + "▁Novi", + -13.997892379760742 + ], + [ + "▁254", + -13.99794864654541 + ], + [ + "▁crux", + -13.997950553894045 + ], + [ + "volunteer", + -13.997961044311523 + ], + [ + "Leather", + -13.997980117797852 + ], + [ + "Random", + -13.997981071472168 + ], + [ + "Rapid", + -13.997987747192385 + ], + [ + "killer", + -13.998010635375977 + ], + [ + "peptide", + -13.998018264770508 + ], + [ + "Policy", + -13.998022079467772 + ], + [ + "▁rosette", + -13.998044967651367 + ], + [ + "Refresh", + -13.998048782348633 + ], + [ + "Constant", + -13.998085021972656 + ], + [ + "617", + -13.998120307922363 + ], + [ + "copter", + -13.998126983642578 + ], + [ + "broad", + -13.99814510345459 + ], + [ + "delete", + -13.998156547546388 + ], + [ + "▁Judd", + -13.998162269592283 + ], + [ + "▁cola", + -13.998176574707031 + ], + [ + "▁Opener", + -13.998220443725586 + ], + [ + "bati", + -13.998255729675291 + ], + [ + "arranged", + -13.998265266418455 + ], + [ + "▁Spaniard", + -13.99834442138672 + ], + [ + "▁Cucumber", + -13.998355865478516 + ], + [ + "▁burrito", + -13.998358726501465 + ], + [ + "▁PACK", + -13.99838161468506 + ], + [ + "Nicole", + -13.998393058776855 + ], + [ + "▁jalapeno", + -13.998407363891602 + ], + [ + "facial", + -13.99847412109375 + ], + [ + "folia", + -13.998475074768066 + ], + [ + "FAC", + -13.998476028442385 + ], + [ + "classical", + -13.998497009277344 + ], + [ + "0-0", + -13.998541831970217 + ], + [ + "▁preamp", + -13.99855899810791 + ], + [ + "Principal", + -13.998644828796388 + ], + [ + "▁backdrops", + -13.998656272888184 + ], + [ + "▁thier", + -13.998686790466309 + ], + [ + "▁angered", + -13.998744010925291 + ], + [ + "▁downgraded", + -13.998753547668455 + ], + [ + "▁malfunctions", + -13.99875545501709 + ], + [ + "▁Heel", + -13.998791694641112 + ], + [ + "fone", + -13.998793601989746 + ], + [ + "oney", + -13.998808860778809 + ], + [ + "▁Kno", + -13.998857498168944 + ], + [ + "▁customisation", + -13.998862266540527 + ], + [ + "▁1440", + -13.998906135559082 + ], + [ + "▁McGee", + -13.99899959564209 + ], + [ + "Wright", + -13.99909210205078 + ], + [ + "▁Nadia", + -13.99910831451416 + ], + [ + "▁Moodle", + -13.999141693115234 + ], + [ + "Benefits", + -13.9991455078125 + ], + [ + "▁FLV", + -13.999216079711914 + ], + [ + "Resource", + -13.999343872070312 + ], + [ + "384", + -13.999356269836426 + ], + [ + "oeuvres", + -13.999371528625488 + ], + [ + "▁bettors", + -13.99937915802002 + ], + [ + "tual", + -13.999396324157717 + ], + [ + "▁OCC", + -13.999406814575195 + ], + [ + "196", + -13.999412536621094 + ], + [ + "▁Leu", + -13.999462127685549 + ], + [ + "▁Dying", + -13.99948024749756 + ], + [ + "▁Coaster", + -13.999481201171877 + ], + [ + "▁emitting", + -13.999504089355469 + ], + [ + "accessed", + -13.99951171875 + ], + [ + "embe", + -13.999521255493164 + ], + [ + "▁Glaze", + -13.999542236328123 + ], + [ + "▁mayoral", + -13.999604225158691 + ], + [ + "acetyl", + -13.999605178833008 + ], + [ + "▁chakras", + -13.999608993530272 + ], + [ + "▁Rac", + -13.999683380126951 + ], + [ + "ulli", + -13.999699592590332 + ], + [ + "▁chum", + -13.999728202819824 + ], + [ + "▁creatives", + -13.99973201751709 + ], + [ + "▁ripples", + -13.999795913696287 + ], + [ + "▁Roto", + -13.99981689453125 + ], + [ + "cari", + -13.999852180480955 + ], + [ + "▁subdivisions", + -13.999887466430664 + ], + [ + "▁revolver", + -13.999903678894045 + ], + [ + "▁legalize", + -13.999905586242676 + ], + [ + "Sister", + -13.99997901916504 + ], + [ + "neh", + -14.000038146972656 + ], + [ + "▁sidewall", + -14.000067710876465 + ], + [ + "▁$46", + -14.000161170959473 + ], + [ + "▁31%", + -14.000202178955078 + ], + [ + "SOC", + -14.000214576721191 + ], + [ + "▁differentiates", + -14.000238418579102 + ], + [ + "▁Removable", + -14.000266075134276 + ], + [ + "▁navigational", + -14.000274658203123 + ], + [ + "838", + -14.000317573547363 + ], + [ + "775", + -14.000351905822754 + ], + [ + "▁kur", + -14.000360488891602 + ], + [ + "▁BLM", + -14.000383377075195 + ], + [ + "3.50", + -14.000423431396484 + ], + [ + "▁pigeons", + -14.000455856323242 + ], + [ + "▁8-12", + -14.000495910644531 + ], + [ + "kta", + -14.000557899475098 + ], + [ + "▁Lifting", + -14.00072956085205 + ], + [ + "▁Interact", + -14.000783920288086 + ], + [ + "beg", + -14.000895500183104 + ], + [ + "Wouldn", + -14.000916481018066 + ], + [ + "elec", + -14.000993728637695 + ], + [ + "724", + -14.00102710723877 + ], + [ + "▁CRI", + -14.001041412353516 + ], + [ + "▁Likely", + -14.001080513000488 + ], + [ + "▁Barlow", + -14.00115966796875 + ], + [ + "▁wholeness", + -14.001215934753418 + ], + [ + "holy", + -14.001222610473633 + ], + [ + "▁bunches", + -14.001272201538086 + ], + [ + "▁(180", + -14.001333236694336 + ], + [ + "▁musings", + -14.001384735107422 + ], + [ + ".264", + -14.001399040222168 + ], + [ + "deva", + -14.00141429901123 + ], + [ + "▁2007).", + -14.00143814086914 + ], + [ + "egh", + -14.001490592956545 + ], + [ + "▁$69", + -14.001495361328123 + ], + [ + "▁handicrafts", + -14.001509666442873 + ], + [ + "▁felon", + -14.001517295837402 + ], + [ + "882", + -14.001521110534668 + ], + [ + "▁sweatshirts", + -14.001526832580566 + ], + [ + "▁greenhouses", + -14.001587867736816 + ], + [ + "▁UB", + -14.001616477966309 + ], + [ + "▁morbid", + -14.001632690429688 + ], + [ + "▁Buttons", + -14.00163459777832 + ], + [ + "▁Pigs", + -14.001681327819824 + ], + [ + "707", + -14.001704216003418 + ], + [ + "▁COURSE", + -14.001715660095217 + ], + [ + "▁PostgreSQL", + -14.001715660095217 + ], + [ + "▁defiance", + -14.001715660095217 + ], + [ + "▁inefficiencies", + -14.001715660095217 + ], + [ + "▁rectified", + -14.001715660095217 + ], + [ + "▁appellant", + -14.001716613769531 + ], + [ + "▁effluent", + -14.001716613769531 + ], + [ + "▁outpouring", + -14.001716613769531 + ], + [ + "▁illiterate", + -14.001717567443848 + ], + [ + "▁kilowatt", + -14.001718521118164 + ], + [ + "▁Sconce", + -14.001739501953123 + ], + [ + "▁prognostic", + -14.00174331665039 + ], + [ + "▁altercation", + -14.001744270324709 + ], + [ + "▁thrice", + -14.00174617767334 + ], + [ + "▁texted", + -14.001749992370604 + ], + [ + "▁mimicking", + -14.001752853393556 + ], + [ + "▁adverb", + -14.001764297485352 + ], + [ + "▁Lyndon", + -14.001787185668944 + ], + [ + "▁Nutrient", + -14.001790046691896 + ], + [ + "▁colonists", + -14.001791000366213 + ], + [ + "▁Steakhouse", + -14.001800537109377 + ], + [ + "▁Breaks", + -14.001803398132324 + ], + [ + "▁snappy", + -14.00182056427002 + ], + [ + "▁magnified", + -14.001845359802246 + ], + [ + "▁waning", + -14.001864433288574 + ], + [ + "▁Lut", + -14.001890182495115 + ], + [ + "▁GENERAL", + -14.001893043518066 + ], + [ + "▁bulldog", + -14.001896858215332 + ], + [ + "▁coyotes", + -14.001897811889648 + ], + [ + "▁Savoy", + -14.001907348632812 + ], + [ + "▁Jodi", + -14.001914978027344 + ], + [ + "album", + -14.001935958862305 + ], + [ + "▁Walden", + -14.001993179321287 + ], + [ + "▁cyto", + -14.002002716064451 + ], + [ + "▁transpired", + -14.002066612243652 + ], + [ + "▁folly", + -14.002068519592283 + ], + [ + "▁orbits", + -14.002092361450195 + ], + [ + "417", + -14.00209617614746 + ], + [ + "Teach", + -14.002102851867676 + ], + [ + "kasi", + -14.00216007232666 + ], + [ + "Dedicated", + -14.002179145812988 + ], + [ + "Thirty", + -14.002182960510254 + ], + [ + "Contrary", + -14.00218677520752 + ], + [ + "Survey", + -14.002190589904783 + ], + [ + "preferably", + -14.002193450927734 + ], + [ + "▁shortness", + -14.002195358276367 + ], + [ + "trex", + -14.0022554397583 + ], + [ + "phobia", + -14.00228500366211 + ], + [ + "described", + -14.002299308776855 + ], + [ + "▁Probate", + -14.002302169799805 + ], + [ + "cree", + -14.002313613891602 + ], + [ + "▁Ashes", + -14.002318382263184 + ], + [ + "▁73%", + -14.002326965332031 + ], + [ + "ague", + -14.002461433410645 + ], + [ + "quart", + -14.002476692199709 + ], + [ + "▁receptacle", + -14.002513885498049 + ], + [ + "Overview", + -14.00252914428711 + ], + [ + "▁scissor", + -14.002532958984377 + ], + [ + "▁Bier", + -14.0025634765625 + ], + [ + "▁412", + -14.00256633758545 + ], + [ + "specifically", + -14.002569198608398 + ], + [ + "sek", + -14.002589225769045 + ], + [ + "▁visualizing", + -14.002628326416016 + ], + [ + "823", + -14.002649307250977 + ], + [ + "▁Fordham", + -14.002655029296877 + ], + [ + "produce", + -14.00269889831543 + ], + [ + "▁Snip", + -14.002714157104492 + ], + [ + "▁Baj", + -14.002735137939451 + ], + [ + "▁accu", + -14.002799034118652 + ], + [ + "▁boaters", + -14.002924919128418 + ], + [ + "▁Daughters", + -14.002962112426758 + ], + [ + "▁Hispanics", + -14.002981185913086 + ], + [ + "AMD", + -14.003039360046388 + ], + [ + "▁Adapt", + -14.003058433532717 + ], + [ + "▁Offensive", + -14.003090858459473 + ], + [ + "tella", + -14.003094673156738 + ], + [ + "2]", + -14.003131866455078 + ], + [ + "traded", + -14.00315761566162 + ], + [ + "puram", + -14.003168106079102 + ], + [ + "▁ele", + -14.003168106079102 + ], + [ + "▁stuffy", + -14.003188133239746 + ], + [ + "▁$5000", + -14.00324821472168 + ], + [ + "▁defends", + -14.003273963928224 + ], + [ + "▁housekeeper", + -14.003315925598145 + ], + [ + "▁broadened", + -14.003405570983888 + ], + [ + "▁dialects", + -14.003524780273438 + ], + [ + "597", + -14.003543853759766 + ], + [ + "▁reborn", + -14.00356388092041 + ], + [ + "hla", + -14.003646850585938 + ], + [ + "▁intervened", + -14.003682136535645 + ], + [ + "▁Instructional", + -14.00370979309082 + ], + [ + "▁TFS", + -14.00378131866455 + ], + [ + "SIP", + -14.003870964050291 + ], + [ + "▁Grief", + -14.003945350646973 + ], + [ + "▁boroughs", + -14.004020690917969 + ], + [ + "xc", + -14.004136085510254 + ], + [ + "ctus", + -14.00417423248291 + ], + [ + "ugged", + -14.004323959350586 + ], + [ + "885", + -14.004441261291504 + ], + [ + "▁atypical", + -14.004464149475098 + ], + [ + "ение", + -14.004547119140623 + ], + [ + "kv", + -14.004563331604004 + ], + [ + "dek", + -14.00461483001709 + ], + [ + "1_", + -14.004619598388672 + ], + [ + "▁Ganesh", + -14.004633903503418 + ], + [ + "▁WALL", + -14.004718780517578 + ], + [ + "▁Bottles", + -14.004768371582031 + ], + [ + "▁Gow", + -14.004817008972168 + ], + [ + "aday", + -14.004853248596191 + ], + [ + "154", + -14.00495147705078 + ], + [ + "▁lube", + -14.004956245422363 + ], + [ + "▁700,000", + -14.005081176757812 + ], + [ + "shah", + -14.005083084106444 + ], + [ + "▁Hoa", + -14.00510597229004 + ], + [ + "inf", + -14.005215644836426 + ], + [ + "▁COD", + -14.00522232055664 + ], + [ + "▁Primal", + -14.00525188446045 + ], + [ + "pw", + -14.005362510681152 + ], + [ + "▁merges", + -14.00540256500244 + ], + [ + "336", + -14.005420684814451 + ], + [ + "▁scorecard", + -14.00542163848877 + ], + [ + "865", + -14.005481719970703 + ], + [ + "▁(!)", + -14.00554084777832 + ], + [ + "geb", + -14.00555419921875 + ], + [ + "▁328", + -14.005558013916016 + ], + [ + "▁CNY", + -14.00564670562744 + ], + [ + "▁mitochondria", + -14.005752563476562 + ], + [ + "▁Vibrating", + -14.005783081054688 + ], + [ + "Bashir", + -14.005793571472168 + ], + [ + "Attribute", + -14.005826950073242 + ], + [ + "kina", + -14.00583267211914 + ], + [ + "9:30", + -14.00584316253662 + ], + [ + "▁Djokovic", + -14.005847930908203 + ], + [ + "▁Giuseppe", + -14.005847930908203 + ], + [ + "▁fracturing", + -14.005847930908203 + ], + [ + "▁garcinia", + -14.005847930908203 + ], + [ + "▁keratin", + -14.005847930908203 + ], + [ + "▁pheasant", + -14.005847930908203 + ], + [ + "▁samurai", + -14.005847930908203 + ], + [ + "▁sceptical", + -14.005847930908203 + ], + [ + "▁sunbathing", + -14.005847930908203 + ], + [ + "▁Gnome", + -14.00584888458252 + ], + [ + "▁fibrous", + -14.00584888458252 + ], + [ + "▁Johansson", + -14.005855560302734 + ], + [ + "▁salient", + -14.005855560302734 + ], + [ + "▁swore", + -14.005855560302734 + ], + [ + "▁robbing", + -14.005858421325684 + ], + [ + "▁alluded", + -14.005859375 + ], + [ + "▁Gifford", + -14.005866050720217 + ], + [ + "▁matrices", + -14.005887985229492 + ], + [ + "▁antiviral", + -14.00589656829834 + ], + [ + "▁fixings", + -14.005898475646973 + ], + [ + "▁FinTech", + -14.005901336669922 + ], + [ + "▁unscrew", + -14.00591278076172 + ], + [ + "▁preeminent", + -14.005927085876465 + ], + [ + "▁DOL", + -14.00595760345459 + ], + [ + "▁wreckage", + -14.005964279174805 + ], + [ + "▁biogas", + -14.005979537963867 + ], + [ + "▁reformer", + -14.00601291656494 + ], + [ + "RITE", + -14.006037712097168 + ], + [ + "▁Renew", + -14.00607681274414 + ], + [ + "▁Moy", + -14.006148338317873 + ], + [ + "691", + -14.00624942779541 + ], + [ + "▁Mab", + -14.006266593933104 + ], + [ + "▁subtlety", + -14.006266593933104 + ], + [ + "▁Continent", + -14.006275177001951 + ], + [ + "▁Zer", + -14.006290435791016 + ], + [ + "lied", + -14.006309509277344 + ], + [ + "▁athleticism", + -14.006325721740724 + ], + [ + "Capture", + -14.006372451782228 + ], + [ + "Artificial", + -14.006429672241213 + ], + [ + "▁Ringtone", + -14.006430625915527 + ], + [ + "▁provenance", + -14.00645351409912 + ], + [ + "877", + -14.006457328796388 + ], + [ + "Secretary", + -14.006464004516602 + ], + [ + "Module", + -14.006499290466309 + ], + [ + "▁BIS", + -14.00650691986084 + ], + [ + "▁aftertaste", + -14.00656032562256 + ], + [ + "▁limbo", + -14.006566047668455 + ], + [ + "▁238", + -14.006599426269531 + ], + [ + "▁FIVE", + -14.006622314453123 + ], + [ + "Fitness", + -14.006661415100098 + ], + [ + "▁Markham", + -14.00669765472412 + ], + [ + "▁sconce", + -14.006723403930664 + ], + [ + "EMP", + -14.00677490234375 + ], + [ + "219", + -14.006790161132812 + ], + [ + "quent", + -14.006802558898926 + ], + [ + "▁campaigners", + -14.00680923461914 + ], + [ + "Poker", + -14.006830215454102 + ], + [ + "▁fists", + -14.006864547729492 + ], + [ + "▁37-", + -14.006937980651855 + ], + [ + "surfing", + -14.006957054138184 + ], + [ + "▁Valentino", + -14.00701141357422 + ], + [ + "▁soliciting", + -14.007040023803713 + ], + [ + "▁Notably", + -14.007192611694336 + ], + [ + "▁Reviewed", + -14.00721263885498 + ], + [ + "▁whispering", + -14.00721549987793 + ], + [ + "▁Persia", + -14.007242202758787 + ], + [ + "ggling", + -14.007266998291016 + ], + [ + "▁discloses", + -14.007267951965332 + ], + [ + "▁hydrocarbon", + -14.007274627685549 + ], + [ + "843", + -14.007287979125977 + ], + [ + "▁47%", + -14.007301330566406 + ], + [ + "▁Plat", + -14.007356643676758 + ], + [ + "▁ev", + -14.00740909576416 + ], + [ + "998", + -14.007475852966309 + ], + [ + "281", + -14.007518768310549 + ], + [ + "wati", + -14.007534980773926 + ], + [ + "▁$1,2", + -14.00759220123291 + ], + [ + "▁scorers", + -14.007638931274414 + ], + [ + "erville", + -14.007683753967283 + ], + [ + "remote", + -14.007772445678713 + ], + [ + "included", + -14.00778102874756 + ], + [ + "▁pts", + -14.007792472839355 + ], + [ + "▁Racks", + -14.007811546325684 + ], + [ + "▁hamburgers", + -14.007877349853516 + ], + [ + "volu", + -14.007949829101562 + ], + [ + "▁infra", + -14.007960319519045 + ], + [ + "632", + -14.007964134216309 + ], + [ + "▁Raa", + -14.007977485656738 + ], + [ + "▁mach", + -14.007978439331056 + ], + [ + "▁Belong", + -14.008026123046877 + ], + [ + "Village", + -14.008034706115724 + ], + [ + "▁Halal", + -14.00804328918457 + ], + [ + "▁Kimmel", + -14.00805950164795 + ], + [ + "753", + -14.008062362670898 + ], + [ + "Crown", + -14.008070945739746 + ], + [ + "liga", + -14.008116722106934 + ], + [ + "▁Emil", + -14.008173942565918 + ], + [ + "idio", + -14.008207321166992 + ], + [ + "tainment", + -14.008233070373535 + ], + [ + "talling", + -14.008256912231444 + ], + [ + "▁5.00", + -14.00829029083252 + ], + [ + "▁quell", + -14.00850772857666 + ], + [ + "▁286", + -14.00856876373291 + ], + [ + "▁setter", + -14.008615493774414 + ], + [ + "VES", + -14.008641242980955 + ], + [ + "▁FER", + -14.008682250976562 + ], + [ + "▁AMI", + -14.00871467590332 + ], + [ + "▁alternately", + -14.008753776550291 + ], + [ + "589", + -14.00878620147705 + ], + [ + "lovely", + -14.00879955291748 + ], + [ + "▁Lube", + -14.008844375610352 + ], + [ + "▁Admittedly", + -14.008856773376465 + ], + [ + "fuse", + -14.008882522583008 + ], + [ + "▁Carp", + -14.008889198303224 + ], + [ + "▁meager", + -14.008993148803713 + ], + [ + "▁Dhan", + -14.00904655456543 + ], + [ + "▁SDG", + -14.00908374786377 + ], + [ + "▁DVI", + -14.009093284606934 + ], + [ + "▁frameless", + -14.009174346923828 + ], + [ + "▁Crank", + -14.009237289428713 + ], + [ + "lender", + -14.009242057800291 + ], + [ + "▁glides", + -14.009318351745604 + ], + [ + "variant", + -14.00932788848877 + ], + [ + "gret", + -14.009334564208984 + ], + [ + "▁EFI", + -14.009374618530272 + ], + [ + "▁ballads", + -14.009536743164062 + ], + [ + "devi", + -14.009543418884276 + ], + [ + "ANDA", + -14.009578704833984 + ], + [ + "162", + -14.00961208343506 + ], + [ + "Elect", + -14.00962257385254 + ], + [ + "details", + -14.009748458862305 + ], + [ + "▁1962.", + -14.009751319885254 + ], + [ + "▁downloader", + -14.009807586669922 + ], + [ + "aloo", + -14.009824752807615 + ], + [ + "▁10:2", + -14.009836196899414 + ], + [ + "▁strollers", + -14.009888648986816 + ], + [ + "▁taut", + -14.00992488861084 + ], + [ + "koff", + -14.009940147399902 + ], + [ + "▁elixir", + -14.00999641418457 + ], + [ + "▁Prudential", + -14.009997367858888 + ], + [ + "▁chlorophyll", + -14.009997367858888 + ], + [ + "▁debacle", + -14.009997367858888 + ], + [ + "▁dignitaries", + -14.009997367858888 + ], + [ + "▁manslaughter", + -14.009998321533203 + ], + [ + "▁kefir", + -14.010001182556152 + ], + [ + "▁voluminous", + -14.010002136230469 + ], + [ + "▁spleen", + -14.010004997253418 + ], + [ + "▁derelict", + -14.010005950927734 + ], + [ + "▁telemetry", + -14.010005950927734 + ], + [ + "▁outlandish", + -14.010007858276367 + ], + [ + "▁Salazar", + -14.010008811950684 + ], + [ + "▁expulsion", + -14.010010719299316 + ], + [ + "▁Flagstaff", + -14.01001262664795 + ], + [ + "▁Houzz", + -14.01001262664795 + ], + [ + "▁Galleria", + -14.010017395019531 + ], + [ + "▁Englewood", + -14.010038375854492 + ], + [ + "▁Duplex", + -14.010045051574709 + ], + [ + "▁Galloway", + -14.010046005249023 + ], + [ + "▁Gud", + -14.010047912597656 + ], + [ + "▁Carleton", + -14.010058403015137 + ], + [ + "▁skein", + -14.01006317138672 + ], + [ + "▁Tundra", + -14.01007080078125 + ], + [ + "▁Natalia", + -14.010076522827148 + ], + [ + "▁preoccupied", + -14.010080337524414 + ], + [ + "▁lineman", + -14.010086059570312 + ], + [ + "▁tact", + -14.010086059570312 + ], + [ + "▁Emoji", + -14.010092735290527 + ], + [ + "▁Spirituality", + -14.01013469696045 + ], + [ + "▁firefox", + -14.010143280029297 + ], + [ + "▁straddle", + -14.010171890258787 + ], + [ + "▁Ira", + -14.01019287109375 + ], + [ + "▁Owing", + -14.010194778442385 + ], + [ + "▁QE", + -14.010207176208496 + ], + [ + "▁folate", + -14.010207176208496 + ], + [ + "▁strait", + -14.010225296020508 + ], + [ + "▁Remedy", + -14.01024341583252 + ], + [ + "▁minted", + -14.01024341583252 + ], + [ + "yung", + -14.010247230529783 + ], + [ + "▁Daw", + -14.010249137878418 + ], + [ + "NGO", + -14.010300636291504 + ], + [ + "658", + -14.01031494140625 + ], + [ + "▁orthodontics", + -14.010320663452148 + ], + [ + "▁hearted", + -14.01032257080078 + ], + [ + "bps", + -14.010324478149414 + ], + [ + "▁Uninstaller", + -14.01032543182373 + ], + [ + "▁agribusiness", + -14.010357856750488 + ], + [ + "▁MED", + -14.0103759765625 + ], + [ + "effectively", + -14.010380744934082 + ], + [ + "Disk", + -14.010395050048828 + ], + [ + "▁(36", + -14.010480880737305 + ], + [ + "▁Selena", + -14.010589599609377 + ], + [ + "BOT", + -14.010604858398438 + ], + [ + "▁Rational", + -14.010625839233398 + ], + [ + "▁Castel", + -14.010652542114258 + ], + [ + "▁Teri", + -14.010687828063965 + ], + [ + "Maintenance", + -14.010690689086914 + ], + [ + "▁Pounds", + -14.01069450378418 + ], + [ + "Liverpool", + -14.010695457458496 + ], + [ + "Toyota", + -14.010695457458496 + ], + [ + "soaked", + -14.010697364807127 + ], + [ + "contemporary", + -14.010702133178713 + ], + [ + "▁dll", + -14.010714530944824 + ], + [ + "cabinet", + -14.010866165161133 + ], + [ + "▁actuator", + -14.010906219482422 + ], + [ + "▁Mahi", + -14.010988235473633 + ], + [ + "▁MSA", + -14.011003494262695 + ], + [ + "expanding", + -14.011022567749023 + ], + [ + "▁Pecan", + -14.011030197143556 + ], + [ + "▁useable", + -14.01108741760254 + ], + [ + "▁Vantage", + -14.011098861694336 + ], + [ + "▁burying", + -14.011116027832031 + ], + [ + "▁frosty", + -14.011210441589355 + ], + [ + "Institute", + -14.011215209960938 + ], + [ + "Yum", + -14.011218070983888 + ], + [ + "▁Blazer", + -14.011228561401367 + ], + [ + "uver", + -14.011272430419922 + ], + [ + "brake", + -14.011287689208984 + ], + [ + "▁rapids", + -14.01131820678711 + ], + [ + "cigarettes", + -14.011327743530272 + ], + [ + "▁Jax", + -14.011364936828612 + ], + [ + "▁Qualifications", + -14.01140308380127 + ], + [ + "▁outperformed", + -14.011414527893066 + ], + [ + "▁dizzying", + -14.011419296264648 + ], + [ + "haka", + -14.01142120361328 + ], + [ + "▁slur", + -14.011630058288574 + ], + [ + "▁gallop", + -14.01165771484375 + ], + [ + "CTE", + -14.011719703674316 + ], + [ + "▁faceted", + -14.011731147766112 + ], + [ + "TRACK", + -14.011798858642578 + ], + [ + "▁Hollis", + -14.011834144592283 + ], + [ + "▁polygon", + -14.011834144592283 + ], + [ + "lj", + -14.011858940124512 + ], + [ + "▁Detectives", + -14.011860847473145 + ], + [ + "▁winemakers", + -14.01188850402832 + ], + [ + "Def", + -14.012006759643556 + ], + [ + "imat", + -14.012046813964844 + ], + [ + "▁APAC", + -14.012064933776855 + ], + [ + "Sail", + -14.012097358703612 + ], + [ + "tala", + -14.01211166381836 + ], + [ + "wadi", + -14.012187957763672 + ], + [ + "▁injectors", + -14.012191772460938 + ], + [ + "▁passively", + -14.012192726135254 + ], + [ + "connection", + -14.012296676635742 + ], + [ + "▁Balancing", + -14.012327194213867 + ], + [ + "▁Birk", + -14.012330055236816 + ], + [ + "Leon", + -14.01235294342041 + ], + [ + "▁Lynda", + -14.012362480163574 + ], + [ + "inian", + -14.01250171661377 + ], + [ + "▁wasps", + -14.01251983642578 + ], + [ + "▁Kuhn", + -14.012535095214844 + ], + [ + "▁atheists", + -14.012578964233398 + ], + [ + "▁clothed", + -14.01258659362793 + ], + [ + "▁lingers", + -14.01258659362793 + ], + [ + "▁Berna", + -14.012657165527344 + ], + [ + "Nearest", + -14.012779235839844 + ], + [ + "▁Sah", + -14.012810707092283 + ], + [ + "rows", + -14.012815475463867 + ], + [ + "▁Hodges", + -14.012849807739258 + ], + [ + "▁sildenafil", + -14.012850761413574 + ], + [ + "▁whitewater", + -14.012887001037598 + ], + [ + "▁candied", + -14.012892723083496 + ], + [ + "suits", + -14.012899398803713 + ], + [ + "keys", + -14.012921333312988 + ], + [ + "▁paradigms", + -14.012998580932615 + ], + [ + "2,800", + -14.013052940368652 + ], + [ + "bk", + -14.013099670410156 + ], + [ + "324", + -14.013140678405762 + ], + [ + "▁Merger", + -14.013155937194824 + ], + [ + "▁moldings", + -14.013181686401367 + ], + [ + "▁prevails", + -14.013195991516112 + ], + [ + "SAL", + -14.01320457458496 + ], + [ + "▁WWF", + -14.0132417678833 + ], + [ + "▁Derma", + -14.013293266296388 + ], + [ + "Hero", + -14.01335906982422 + ], + [ + "▁examiners", + -14.0133638381958 + ], + [ + "ckel", + -14.013382911682127 + ], + [ + "▁Duc", + -14.013386726379396 + ], + [ + "▁rusted", + -14.013392448425291 + ], + [ + "▁rips", + -14.013437271118164 + ], + [ + "▁Tread", + -14.013497352600098 + ], + [ + "Cube", + -14.013505935668944 + ], + [ + "ECA", + -14.013527870178224 + ], + [ + "▁Commonly", + -14.013541221618652 + ], + [ + "▁Portraits", + -14.01356315612793 + ], + [ + "VIEW", + -14.01358127593994 + ], + [ + "esha", + -14.01366138458252 + ], + [ + "1945", + -14.013731956481934 + ], + [ + "664", + -14.013757705688477 + ], + [ + "▁sardines", + -14.013779640197754 + ], + [ + "▁Reputation", + -14.013787269592283 + ], + [ + "▁celebs", + -14.013849258422852 + ], + [ + "▁crowning", + -14.013860702514648 + ], + [ + "fran", + -14.01388931274414 + ], + [ + "▁heli", + -14.013896942138672 + ], + [ + "advanced", + -14.013916015625 + ], + [ + "nvestigative", + -14.01395320892334 + ], + [ + "▁Cycles", + -14.01398754119873 + ], + [ + "▁WSU", + -14.01402473449707 + ], + [ + "eid", + -14.014063835144045 + ], + [ + "iello", + -14.014076232910156 + ], + [ + "▁McA", + -14.01410961151123 + ], + [ + "▁Flare", + -14.014161109924316 + ], + [ + "▁tincture", + -14.01416301727295 + ], + [ + "▁Strasbourg", + -14.014163970947266 + ], + [ + "▁bereavement", + -14.014163970947266 + ], + [ + "▁dermatology", + -14.014163970947266 + ], + [ + "▁optimising", + -14.014163970947266 + ], + [ + "▁osmosis", + -14.014163970947266 + ], + [ + "▁psychotherapist", + -14.014163970947266 + ], + [ + "▁september", + -14.014163970947266 + ], + [ + "▁xenical", + -14.014163970947266 + ], + [ + "▁Mikhail", + -14.014164924621582 + ], + [ + "▁composure", + -14.014164924621582 + ], + [ + "▁asterisk", + -14.014165878295898 + ], + [ + "▁observant", + -14.014165878295898 + ], + [ + "▁Radisson", + -14.014168739318848 + ], + [ + "▁Synergy", + -14.014168739318848 + ], + [ + "▁sebum", + -14.014171600341797 + ], + [ + "▁quantified", + -14.01417350769043 + ], + [ + "▁BETTER", + -14.01418113708496 + ], + [ + "▁Crohn", + -14.014183044433594 + ], + [ + "▁unsatisfied", + -14.014189720153809 + ], + [ + "▁cadence", + -14.014190673828123 + ], + [ + "▁protestors", + -14.01419448852539 + ], + [ + "▁Penelope", + -14.014202117919922 + ], + [ + "▁dislocation", + -14.014203071594238 + ], + [ + "▁biopic", + -14.014223098754885 + ], + [ + "▁electrifying", + -14.014245986938477 + ], + [ + "▁edema", + -14.014257431030272 + ], + [ + "▁Dimitri", + -14.01426124572754 + ], + [ + "▁guardianship", + -14.01430606842041 + ], + [ + "▁sportsmanship", + -14.014348983764648 + ], + [ + "▁GMAT", + -14.014434814453123 + ], + [ + "▁Livestock", + -14.014434814453123 + ], + [ + "▁braised", + -14.014480590820312 + ], + [ + "▁Onyx", + -14.014481544494627 + ], + [ + "itative", + -14.014511108398438 + ], + [ + "▁2018/19", + -14.01461410522461 + ], + [ + "▁hoot", + -14.014630317687988 + ], + [ + "▁gent", + -14.014640808105469 + ], + [ + "Increasing", + -14.01469898223877 + ], + [ + "▁Ember", + -14.014727592468262 + ], + [ + "▁gp", + -14.01473903656006 + ], + [ + "▁GHG", + -14.01474952697754 + ], + [ + "▁fingernails", + -14.014758110046388 + ], + [ + "▁ambush", + -14.014798164367676 + ], + [ + "▁discreetly", + -14.014838218688965 + ], + [ + "▁Specs", + -14.01484489440918 + ], + [ + "hosts", + -14.014849662780762 + ], + [ + "▁AQ", + -14.014866828918455 + ], + [ + "▁Greet", + -14.01486873626709 + ], + [ + "Font", + -14.014901161193848 + ], + [ + "▁downpour", + -14.01491641998291 + ], + [ + "▁Modelling", + -14.014923095703123 + ], + [ + "Nasdaq", + -14.014938354492188 + ], + [ + "▁pacific", + -14.014949798583984 + ], + [ + "Occasionally", + -14.0149507522583 + ], + [ + "Domestic", + -14.01496124267578 + ], + [ + "Coconut", + -14.014963150024414 + ], + [ + "premium", + -14.01496410369873 + ], + [ + "seekers", + -14.014966011047363 + ], + [ + "spectrum", + -14.014966011047363 + ], + [ + "Kyle", + -14.014969825744627 + ], + [ + "▁sealants", + -14.01501178741455 + ], + [ + "bili", + -14.015020370483398 + ], + [ + "Carlton", + -14.015064239501951 + ], + [ + "diesel", + -14.015118598937988 + ], + [ + "▁rupee", + -14.01513385772705 + ], + [ + "▁reusing", + -14.0152006149292 + ], + [ + "▁dud", + -14.015241622924805 + ], + [ + "▁stumped", + -14.015249252319336 + ], + [ + "▁327", + -14.01536464691162 + ], + [ + "▁Clouds", + -14.015398979187012 + ], + [ + "matrix", + -14.015413284301758 + ], + [ + "▁ADV", + -14.015421867370604 + ], + [ + "righteous", + -14.015440940856934 + ], + [ + "▁Deploy", + -14.015453338623049 + ], + [ + "scrap", + -14.015487670898438 + ], + [ + "Assad", + -14.015494346618652 + ], + [ + "933", + -14.015588760375977 + ], + [ + "Adopt", + -14.015654563903809 + ], + [ + "chil", + -14.015664100646973 + ], + [ + "thomas", + -14.015702247619627 + ], + [ + "▁Trapp", + -14.01571559906006 + ], + [ + "▁gambler", + -14.01576805114746 + ], + [ + "▁TBR", + -14.015813827514648 + ], + [ + "▁Callum", + -14.015819549560549 + ], + [ + "martin", + -14.01585292816162 + ], + [ + "tied", + -14.015871047973633 + ], + [ + "▁Stub", + -14.01587963104248 + ], + [ + "▁Loaded", + -14.01594066619873 + ], + [ + "▁empathize", + -14.015951156616213 + ], + [ + "▁Miz", + -14.015952110290527 + ], + [ + "▁Germain", + -14.01600170135498 + ], + [ + "▁retreated", + -14.01602268218994 + ], + [ + "bloom", + -14.016048431396484 + ], + [ + "+4", + -14.016050338745115 + ], + [ + "▁jackpots", + -14.016058921813965 + ], + [ + "HRM", + -14.01611328125 + ], + [ + "▁Saa", + -14.016117095947266 + ], + [ + "▁WARNING", + -14.016130447387695 + ], + [ + "845", + -14.016249656677246 + ], + [ + "▁Hp", + -14.016304969787598 + ], + [ + "022", + -14.016324043273926 + ], + [ + "▁lei", + -14.016337394714355 + ], + [ + "868", + -14.016338348388672 + ], + [ + "amen", + -14.01634120941162 + ], + [ + "▁ETA", + -14.016343116760254 + ], + [ + "▁COVER", + -14.01636028289795 + ], + [ + "MINE", + -14.0164155960083 + ], + [ + "Bun", + -14.016472816467283 + ], + [ + "SIDE", + -14.016517639160156 + ], + [ + "▁Hamid", + -14.016550064086914 + ], + [ + "▁sketchy", + -14.016558647155762 + ], + [ + "FET", + -14.016562461853027 + ], + [ + "▁highland", + -14.016566276550291 + ], + [ + "▁rags", + -14.016657829284668 + ], + [ + "▁90,000", + -14.01669692993164 + ], + [ + "▁sprinter", + -14.016807556152344 + ], + [ + "-6)", + -14.01681423187256 + ], + [ + "▁Receiving", + -14.016885757446287 + ], + [ + "▁Serb", + -14.016922950744627 + ], + [ + "hro", + -14.01699924468994 + ], + [ + "POWER", + -14.01707363128662 + ], + [ + "TRU", + -14.017099380493164 + ], + [ + "▁shattering", + -14.017117500305176 + ], + [ + "▁EDGE", + -14.01713752746582 + ], + [ + "▁bowled", + -14.017244338989258 + ], + [ + "▁DPI", + -14.017247200012209 + ], + [ + "Drivers", + -14.01726245880127 + ], + [ + "▁revolutions", + -14.01729679107666 + ], + [ + "▁MRC", + -14.017343521118164 + ], + [ + "▁entitles", + -14.017386436462402 + ], + [ + "▁1.5-", + -14.017419815063477 + ], + [ + "dham", + -14.017428398132324 + ], + [ + "▁rubs", + -14.01745891571045 + ], + [ + "▁VST", + -14.017492294311523 + ], + [ + "▁Siem", + -14.017521858215332 + ], + [ + "▁WTF", + -14.017660140991213 + ], + [ + "Dig", + -14.017661094665527 + ], + [ + "Leaders", + -14.017707824707031 + ], + [ + "▁Jed", + -14.017715454101562 + ], + [ + "▁Sana", + -14.017760276794434 + ], + [ + "Kha", + -14.017826080322266 + ], + [ + "Provided", + -14.017840385437012 + ], + [ + "▁microwaves", + -14.01788330078125 + ], + [ + "▁resurrect", + -14.017891883850098 + ], + [ + "▁jour", + -14.017926216125488 + ], + [ + "jc", + -14.01795768737793 + ], + [ + "▁reposition", + -14.017985343933104 + ], + [ + "▁overwrite", + -14.01801300048828 + ], + [ + "▁XO", + -14.01803493499756 + ], + [ + "rett", + -14.018054008483888 + ], + [ + "▁prohibitive", + -14.018150329589844 + ], + [ + "▁Ester", + -14.018163681030272 + ], + [ + "oin", + -14.01817226409912 + ], + [ + "▁Juniors", + -14.01817512512207 + ], + [ + "oscopy", + -14.018181800842283 + ], + [ + "▁Notifications", + -14.018206596374512 + ], + [ + "establish", + -14.018207550048828 + ], + [ + "organ", + -14.018234252929688 + ], + [ + "▁Particular", + -14.018255233764648 + ], + [ + "Grill", + -14.018292427062988 + ], + [ + "▁Britannia", + -14.01834774017334 + ], + [ + "▁Christensen", + -14.01834774017334 + ], + [ + "▁ENJOY", + -14.01834774017334 + ], + [ + "▁Libertarian", + -14.01834774017334 + ], + [ + "▁Marshmallow", + -14.01834774017334 + ], + [ + "▁Nuremberg", + -14.01834774017334 + ], + [ + "▁egregious", + -14.01834774017334 + ], + [ + "▁enthralling", + -14.01834774017334 + ], + [ + "▁epidemiology", + -14.01834774017334 + ], + [ + "▁euthanasia", + -14.01834774017334 + ], + [ + "▁virtuoso", + -14.01834774017334 + ], + [ + "▁Carlsbad", + -14.018349647521973 + ], + [ + "▁zoloft", + -14.018349647521973 + ], + [ + "▁neurodegenerati", + -14.018350601196287 + ], + [ + "▁Nanjing", + -14.018352508544922 + ], + [ + "▁Malvern", + -14.018354415893556 + ], + [ + "▁Outfitters", + -14.018354415893556 + ], + [ + "▁airwaves", + -14.018356323242188 + ], + [ + "▁ASCII", + -14.018357276916504 + ], + [ + "▁beetroot", + -14.018366813659668 + ], + [ + "▁dissimilar", + -14.018366813659668 + ], + [ + "▁vigilance", + -14.018367767333984 + ], + [ + "▁Zimmer", + -14.018370628356934 + ], + [ + "▁BYOD", + -14.018376350402832 + ], + [ + "▁hcg", + -14.01837921142578 + ], + [ + "▁aberration", + -14.018399238586426 + ], + [ + "▁Kieran", + -14.01840114593506 + ], + [ + "▁Midwestern", + -14.01840591430664 + ], + [ + "▁Told", + -14.01841163635254 + ], + [ + "▁Kosher", + -14.01844596862793 + ], + [ + "Staple", + -14.018494606018066 + ], + [ + "▁Seventy", + -14.018495559692385 + ], + [ + "▁foaming", + -14.018506050109863 + ], + [ + "▁pelt", + -14.018508911132812 + ], + [ + "BAC", + -14.018512725830078 + ], + [ + "UGG", + -14.018513679504396 + ], + [ + "▁Dividend", + -14.018550872802734 + ], + [ + "-105", + -14.01857852935791 + ], + [ + "▁Prohibition", + -14.018595695495604 + ], + [ + "foss", + -14.01860237121582 + ], + [ + "▁lifesaving", + -14.018656730651855 + ], + [ + "▁Sabres", + -14.018667221069336 + ], + [ + "▁????????", + -14.018712997436523 + ], + [ + "▁instantaneously", + -14.018749237060549 + ], + [ + "▁repetitions", + -14.018756866455078 + ], + [ + "▁Fah", + -14.018808364868164 + ], + [ + "ере", + -14.018841743469238 + ], + [ + "▁Arrangements", + -14.01886749267578 + ], + [ + "brough", + -14.01888942718506 + ], + [ + "Comes", + -14.018905639648438 + ], + [ + "▁Sabah", + -14.018915176391602 + ], + [ + "▁Bari", + -14.018924713134766 + ], + [ + "osaurus", + -14.018932342529297 + ], + [ + "align", + -14.018953323364258 + ], + [ + "Sew", + -14.01897430419922 + ], + [ + "▁avian", + -14.019000053405762 + ], + [ + "▁Fascinating", + -14.019001007080078 + ], + [ + "issen", + -14.019020080566406 + ], + [ + "deliver", + -14.019036293029783 + ], + [ + "▁flourishes", + -14.019039154052734 + ], + [ + "▁Appl", + -14.019042015075684 + ], + [ + "▁fairer", + -14.019048690795898 + ], + [ + "05)", + -14.019054412841797 + ], + [ + "Won", + -14.019116401672363 + ], + [ + "▁Richland", + -14.019128799438477 + ], + [ + "EIGHT", + -14.019143104553224 + ], + [ + "chance", + -14.019222259521484 + ], + [ + "Portrait", + -14.0192289352417 + ], + [ + "bullying", + -14.019250869750977 + ], + [ + "Funeral", + -14.019253730773926 + ], + [ + "▁artistically", + -14.019259452819824 + ], + [ + "Russell", + -14.01926040649414 + ], + [ + "811", + -14.019265174865724 + ], + [ + "Hollywood", + -14.019285202026367 + ], + [ + "▁Astoria", + -14.019326210021973 + ], + [ + "▁Garg", + -14.01934814453125 + ], + [ + "Airport", + -14.019350051879885 + ], + [ + "▁skyscraper", + -14.019367218017578 + ], + [ + "Anybody", + -14.019386291503906 + ], + [ + "Malaysia", + -14.01939582824707 + ], + [ + "▁correctional", + -14.01941204071045 + ], + [ + "▁alto", + -14.01943016052246 + ], + [ + "▁Faithful", + -14.019455909729004 + ], + [ + "Scar", + -14.019460678100586 + ], + [ + "▁Salam", + -14.019492149353027 + ], + [ + "▁sis", + -14.019500732421877 + ], + [ + "▁opp", + -14.01951026916504 + ], + [ + "Nathan", + -14.019542694091797 + ], + [ + "mec", + -14.019566535949709 + ], + [ + "Antique", + -14.019567489624023 + ], + [ + "▁comedies", + -14.019617080688477 + ], + [ + "▁phony", + -14.019625663757324 + ], + [ + "▁cel", + -14.01967430114746 + ], + [ + "164", + -14.019678115844728 + ], + [ + "Rates", + -14.019729614257812 + ], + [ + "▁wearables", + -14.019816398620604 + ], + [ + "BET", + -14.019820213317873 + ], + [ + "▁Siegel", + -14.01982593536377 + ], + [ + "193", + -14.019904136657717 + ], + [ + "▁SELECT", + -14.019964218139648 + ], + [ + "▁formality", + -14.02003574371338 + ], + [ + "kate", + -14.020187377929688 + ], + [ + "▁Lus", + -14.020254135131836 + ], + [ + "Qual", + -14.020255088806152 + ], + [ + "▁980", + -14.020309448242188 + ], + [ + "▁Wheelchair", + -14.020319938659668 + ], + [ + "028", + -14.020353317260742 + ], + [ + "▁Huber", + -14.020395278930664 + ], + [ + "PTA", + -14.020478248596191 + ], + [ + "Duke", + -14.020649909973145 + ], + [ + "Employee", + -14.0206937789917 + ], + [ + "Portland", + -14.02076244354248 + ], + [ + "▁potable", + -14.020769119262695 + ], + [ + "latin", + -14.020787239074709 + ], + [ + "▁Freestyle", + -14.020813941955566 + ], + [ + "▁Yarra", + -14.020841598510742 + ], + [ + "yuk", + -14.020856857299805 + ], + [ + "▁Syrians", + -14.020858764648438 + ], + [ + "▁disliked", + -14.020883560180664 + ], + [ + "luc", + -14.020919799804688 + ], + [ + "▁sufferer", + -14.021007537841797 + ], + [ + "▁lobe", + -14.021011352539062 + ], + [ + "▁gist", + -14.02108097076416 + ], + [ + "ronic", + -14.021100044250488 + ], + [ + "▁PubMed", + -14.021112442016602 + ], + [ + "▁wowed", + -14.02122974395752 + ], + [ + "RPC", + -14.021418571472168 + ], + [ + "Commenting", + -14.021488189697266 + ], + [ + "Donna", + -14.021524429321287 + ], + [ + "▁Ordering", + -14.021566390991213 + ], + [ + "ITCH", + -14.021615982055664 + ], + [ + "Activ", + -14.021632194519045 + ], + [ + "4:15", + -14.02163791656494 + ], + [ + "PRE", + -14.021642684936523 + ], + [ + "Mama", + -14.021676063537598 + ], + [ + "▁Presenting", + -14.021679878234863 + ], + [ + "forced", + -14.02171230316162 + ], + [ + "▁EAS", + -14.021727561950684 + ], + [ + "porous", + -14.021864891052246 + ], + [ + "pian", + -14.021865844726562 + ], + [ + "evolution", + -14.021955490112305 + ], + [ + "Shin", + -14.02205753326416 + ], + [ + "▁Toward", + -14.022077560424805 + ], + [ + "melt", + -14.022086143493652 + ], + [ + "▁£1,000", + -14.022133827209473 + ], + [ + "vig", + -14.022269248962402 + ], + [ + "▁Aide", + -14.022278785705566 + ], + [ + "▁Optimal", + -14.02231502532959 + ], + [ + "▁PROVIDE", + -14.02232551574707 + ], + [ + "▁Nya", + -14.022337913513184 + ], + [ + "▁grasped", + -14.02235984802246 + ], + [ + "▁justifying", + -14.022379875183104 + ], + [ + "▁CFS", + -14.022415161132812 + ], + [ + "1:15", + -14.02243709564209 + ], + [ + "▁Gaul", + -14.022442817687988 + ], + [ + "ologi", + -14.02248764038086 + ], + [ + "lister", + -14.022538185119627 + ], + [ + "▁ANGELES", + -14.022549629211426 + ], + [ + "▁Mysteries", + -14.022549629211426 + ], + [ + "▁articulating", + -14.022549629211426 + ], + [ + "▁divinity", + -14.022549629211426 + ], + [ + "▁fibrillation", + -14.022549629211426 + ], + [ + "▁kimchi", + -14.022549629211426 + ], + [ + "▁privatization", + -14.022549629211426 + ], + [ + "▁endorsing", + -14.022550582885742 + ], + [ + "wack", + -14.02255153656006 + ], + [ + "▁Folsom", + -14.022553443908691 + ], + [ + "▁immunotherapy", + -14.022555351257324 + ], + [ + "▁Montclair", + -14.022558212280272 + ], + [ + "▁attrition", + -14.022558212280272 + ], + [ + "▁juicing", + -14.022558212280272 + ], + [ + "▁unsatisfactory", + -14.022561073303224 + ], + [ + "▁dossier", + -14.02256202697754 + ], + [ + "▁NetEnt", + -14.022562980651855 + ], + [ + "▁Semester", + -14.022563934326172 + ], + [ + "▁cowboys", + -14.022567749023438 + ], + [ + "▁Staircase", + -14.022573471069336 + ], + [ + "thwaite", + -14.022576332092283 + ], + [ + "▁dissolving", + -14.022586822509766 + ], + [ + "▁cornbread", + -14.022591590881348 + ], + [ + "▁genotype", + -14.022594451904297 + ], + [ + "▁Beatrice", + -14.022604942321776 + ], + [ + "▁Deadpool", + -14.02260971069336 + ], + [ + "▁Goblin", + -14.022613525390623 + ], + [ + "▁Stuck", + -14.022631645202637 + ], + [ + "▁napping", + -14.022668838500977 + ], + [ + "▁Scuba", + -14.022669792175291 + ], + [ + "▁Pollock", + -14.022675514221191 + ], + [ + "▁mojo", + -14.022697448730469 + ], + [ + "tests", + -14.022732734680176 + ], + [ + "▁peaking", + -14.022768020629885 + ], + [ + "endorf", + -14.022774696350098 + ], + [ + "▁$25.00", + -14.022793769836426 + ], + [ + "▁ousted", + -14.022850036621094 + ], + [ + "ouche", + -14.022878646850586 + ], + [ + "▁ESD", + -14.02289867401123 + ], + [ + "▁hiker", + -14.022899627685549 + ], + [ + "▁scarlet", + -14.022910118103027 + ], + [ + "921", + -14.02291202545166 + ], + [ + "▁Famer", + -14.02291202545166 + ], + [ + "▁Kruger", + -14.022920608520508 + ], + [ + "▁dogged", + -14.02296257019043 + ], + [ + "▁fondness", + -14.022976875305176 + ], + [ + "▁moan", + -14.023035049438477 + ], + [ + "reality", + -14.023162841796877 + ], + [ + "MCA", + -14.023207664489746 + ], + [ + "▁purifying", + -14.023213386535645 + ], + [ + "▁infrequently", + -14.023256301879885 + ], + [ + "334", + -14.023273468017578 + ], + [ + "▁dissection", + -14.02329158782959 + ], + [ + "▁Monde", + -14.023392677307127 + ], + [ + "▁ventricular", + -14.023454666137695 + ], + [ + "▁outlast", + -14.023548126220703 + ], + [ + "confident", + -14.023557662963867 + ], + [ + "Murray", + -14.023579597473145 + ], + [ + "tzen", + -14.023604393005373 + ], + [ + "Stories", + -14.023609161376951 + ], + [ + "▁revitalized", + -14.023642539978027 + ], + [ + "▁Matches", + -14.023666381835938 + ], + [ + "Monster", + -14.023754119873049 + ], + [ + "Decide", + -14.023794174194336 + ], + [ + "dica", + -14.023818969726562 + ], + [ + "▁statuses", + -14.023846626281738 + ], + [ + "▁dumbbells", + -14.023869514465332 + ], + [ + "▁doubted", + -14.023887634277344 + ], + [ + "▁ranches", + -14.023889541625977 + ], + [ + "▁budge", + -14.023892402648926 + ], + [ + "ckman", + -14.023921966552734 + ], + [ + "DED", + -14.02393913269043 + ], + [ + "▁Bris", + -14.023947715759276 + ], + [ + "Generation", + -14.024007797241213 + ], + [ + "▁bushel", + -14.024025917053224 + ], + [ + "▁blossomed", + -14.02405071258545 + ], + [ + "Lucy", + -14.024067878723145 + ], + [ + "▁specif", + -14.02407455444336 + ], + [ + "▁warped", + -14.024092674255373 + ], + [ + "▁smoothed", + -14.024093627929688 + ], + [ + "▁PCIe", + -14.02410125732422 + ], + [ + "▁Bandit", + -14.024106979370115 + ], + [ + "▁801", + -14.024176597595217 + ], + [ + "minor", + -14.024185180664062 + ], + [ + "▁iii", + -14.024235725402832 + ], + [ + "SRA", + -14.024267196655272 + ], + [ + "NSA", + -14.024346351623535 + ], + [ + "▁maneuvering", + -14.024352073669434 + ], + [ + "▁Amin", + -14.024362564086914 + ], + [ + "▁Navigate", + -14.02446460723877 + ], + [ + "RANGE", + -14.024518013000488 + ], + [ + "▁Augustus", + -14.02451992034912 + ], + [ + "▁compounding", + -14.024531364440918 + ], + [ + "31)", + -14.024544715881348 + ], + [ + "abl", + -14.024613380432127 + ], + [ + "▁appropriations", + -14.02463722229004 + ], + [ + "skaya", + -14.024645805358888 + ], + [ + "▁Mice", + -14.02466106414795 + ], + [ + "▁houston", + -14.024686813354492 + ], + [ + "skirt", + -14.024735450744627 + ], + [ + "689", + -14.024742126464844 + ], + [ + "▁STD", + -14.024765014648438 + ], + [ + "elite", + -14.02481460571289 + ], + [ + "LITE", + -14.024837493896484 + ], + [ + "Est", + -14.02488899230957 + ], + [ + "dron", + -14.024895668029783 + ], + [ + "environmental", + -14.02489948272705 + ], + [ + "▁ravaged", + -14.02489948272705 + ], + [ + "lawn", + -14.024917602539062 + ], + [ + "lala", + -14.024972915649414 + ], + [ + "▁Decal", + -14.024978637695312 + ], + [ + "falls", + -14.024994850158691 + ], + [ + "uhl", + -14.025016784667969 + ], + [ + "▁surrendering", + -14.025019645690918 + ], + [ + "▁Barra", + -14.025059700012209 + ], + [ + "aboo", + -14.025081634521484 + ], + [ + "▁Announcement", + -14.025081634521484 + ], + [ + "▁LAKE", + -14.025150299072266 + ], + [ + "▁superstars", + -14.025150299072266 + ], + [ + "687", + -14.025185585021973 + ], + [ + "kok", + -14.02529525756836 + ], + [ + "drivers", + -14.025303840637209 + ], + [ + "Extend", + -14.02535915374756 + ], + [ + "▁Dara", + -14.02542781829834 + ], + [ + "dae", + -14.025472640991213 + ], + [ + "▁NOS", + -14.025493621826172 + ], + [ + "▁Orca", + -14.025497436523438 + ], + [ + "lase", + -14.025520324707031 + ], + [ + "▁Townhouse", + -14.025520324707031 + ], + [ + "▁Brum", + -14.025583267211914 + ], + [ + "Lynn", + -14.025687217712402 + ], + [ + "▁Retina", + -14.025689125061035 + ], + [ + "▁PRC", + -14.025771141052246 + ], + [ + "▁DMA", + -14.025809288024902 + ], + [ + "ange", + -14.025888442993164 + ], + [ + "HOME", + -14.025908470153809 + ], + [ + "▁Enroll", + -14.025956153869627 + ], + [ + "Jake", + -14.025996208190918 + ], + [ + "▁cataracts", + -14.025997161865234 + ], + [ + "▁PSC", + -14.026026725769045 + ], + [ + "▁Mariah", + -14.026068687438965 + ], + [ + "▁Reminder", + -14.02606964111328 + ], + [ + "1985", + -14.026185035705566 + ], + [ + "CIE", + -14.026225090026855 + ], + [ + "uddle", + -14.026260375976562 + ], + [ + "▁REALTOR", + -14.026301383972168 + ], + [ + "▁chestnuts", + -14.026389122009276 + ], + [ + "pond", + -14.026416778564451 + ], + [ + "▁CCM", + -14.026449203491213 + ], + [ + "▁Photographic", + -14.026469230651855 + ], + [ + "▁eventful", + -14.026504516601562 + ], + [ + "▁gps", + -14.026511192321776 + ], + [ + "▁Bev", + -14.026532173156738 + ], + [ + "▁Invent", + -14.02654266357422 + ], + [ + "▁Brandy", + -14.02657413482666 + ], + [ + "▁rout", + -14.026612281799316 + ], + [ + "▁Cobalt", + -14.026613235473633 + ], + [ + "result", + -14.026630401611328 + ], + [ + "setter", + -14.026630401611328 + ], + [ + "▁carcass", + -14.026639938354492 + ], + [ + "▁swells", + -14.026646614074709 + ], + [ + "▁Biotech", + -14.026694297790527 + ], + [ + "▁indebted", + -14.02671241760254 + ], + [ + "▁Brides", + -14.026721954345703 + ], + [ + "▁underarm", + -14.02675437927246 + ], + [ + "saccharide", + -14.026768684387209 + ], + [ + "▁Guadalupe", + -14.026768684387209 + ], + [ + "▁Kilimanjaro", + -14.026768684387209 + ], + [ + "▁fugitive", + -14.026768684387209 + ], + [ + "▁irreplaceable", + -14.026768684387209 + ], + [ + "▁terracotta", + -14.026768684387209 + ], + [ + "▁varicose", + -14.026768684387209 + ], + [ + "▁Guangdong", + -14.026769638061523 + ], + [ + "▁ventilator", + -14.026773452758787 + ], + [ + "▁buckwheat", + -14.026774406433104 + ], + [ + "▁skunk", + -14.026774406433104 + ], + [ + "▁Chloride", + -14.026780128479004 + ], + [ + "▁Fujifilm", + -14.026782035827637 + ], + [ + "▁Montague", + -14.026782989501951 + ], + [ + "▁Gibbons", + -14.02678394317627 + ], + [ + "▁Fisk", + -14.02678680419922 + ], + [ + "▁Skywalker", + -14.026793479919434 + ], + [ + "▁Chichester", + -14.02679443359375 + ], + [ + "▁catholic", + -14.026800155639648 + ], + [ + "▁Foil", + -14.026810646057127 + ], + [ + "▁Grafton", + -14.026827812194824 + ], + [ + "▁agnostic", + -14.026857376098633 + ], + [ + "▁marred", + -14.026863098144531 + ], + [ + "▁chronograph", + -14.026885986328123 + ], + [ + "▁rewind", + -14.026890754699709 + ], + [ + "▁$9.99", + -14.026910781860352 + ], + [ + "▁Sixteen", + -14.026938438415527 + ], + [ + "▁601", + -14.026947021484377 + ], + [ + "▁poodle", + -14.026959419250488 + ], + [ + "khar", + -14.02698802947998 + ], + [ + "▁Ofsted", + -14.026995658874512 + ], + [ + "▁epicenter", + -14.027006149291992 + ], + [ + "▁Newspapers", + -14.027017593383787 + ], + [ + "Lighting", + -14.027079582214355 + ], + [ + "▁plummeted", + -14.02711296081543 + ], + [ + "schedule", + -14.027154922485352 + ], + [ + "▁fortnightly", + -14.027174949645996 + ], + [ + "▁Pearls", + -14.027196884155272 + ], + [ + "▁reaffirmed", + -14.027202606201172 + ], + [ + "ital", + -14.027240753173828 + ], + [ + "Jin", + -14.027249336242676 + ], + [ + "mason", + -14.027286529541016 + ], + [ + "9′′", + -14.027311325073242 + ], + [ + "▁Probe", + -14.027337074279783 + ], + [ + "LDC", + -14.027344703674316 + ], + [ + "TIE", + -14.027348518371582 + ], + [ + "▁Prag", + -14.027352333068848 + ], + [ + "Gab", + -14.027379035949709 + ], + [ + "▁diagonally", + -14.027451515197754 + ], + [ + "▁licking", + -14.027613639831545 + ], + [ + "▁Lacey", + -14.027649879455566 + ], + [ + "088", + -14.027692794799805 + ], + [ + "▁inaction", + -14.027692794799805 + ], + [ + "▁Runway", + -14.027718544006348 + ], + [ + "▁Beside", + -14.027725219726562 + ], + [ + "TTY", + -14.02782154083252 + ], + [ + "▁sb", + -14.027841567993164 + ], + [ + "Chocolate", + -14.027853965759276 + ], + [ + "Preparation", + -14.027884483337402 + ], + [ + "Fabulous", + -14.027886390686035 + ], + [ + "prescription", + -14.027891159057615 + ], + [ + "Faculty", + -14.027894973754885 + ], + [ + "▁figurine", + -14.027905464172363 + ], + [ + "Trading", + -14.027908325195312 + ], + [ + "Monkey", + -14.027932167053224 + ], + [ + "▁rivets", + -14.027968406677246 + ], + [ + "fac", + -14.028013229370115 + ], + [ + "▁RAV", + -14.028013229370115 + ], + [ + "Az", + -14.028066635131836 + ], + [ + "▁corpses", + -14.028100967407228 + ], + [ + "Rh", + -14.028131484985352 + ], + [ + "inde", + -14.028159141540527 + ], + [ + "▁Aldo", + -14.028209686279297 + ], + [ + "egy", + -14.028258323669434 + ], + [ + "abb", + -14.028290748596191 + ], + [ + "▁wich", + -14.028318405151367 + ], + [ + "EED", + -14.028483390808104 + ], + [ + "eezer", + -14.028496742248535 + ], + [ + "▁Yummy", + -14.028536796569824 + ], + [ + "▁ACI", + -14.028544425964355 + ], + [ + "▁overheat", + -14.028571128845217 + ], + [ + "▁276", + -14.028590202331545 + ], + [ + "▁perusing", + -14.028599739074709 + ], + [ + "▁YUM", + -14.028605461120604 + ], + [ + "4.50", + -14.02861785888672 + ], + [ + "▁spectacularly", + -14.028703689575195 + ], + [ + "▁starry", + -14.028726577758787 + ], + [ + "▁apocalyptic", + -14.028759002685549 + ], + [ + "bao", + -14.028767585754396 + ], + [ + "▁penal", + -14.028770446777344 + ], + [ + "orient", + -14.028800964355469 + ], + [ + "ladder", + -14.028802871704102 + ], + [ + "Remarks", + -14.02885627746582 + ], + [ + "▁debuting", + -14.02889919281006 + ], + [ + "dental", + -14.029056549072266 + ], + [ + "271", + -14.02910614013672 + ], + [ + "▁CASH", + -14.029186248779297 + ], + [ + "▁Robe", + -14.02920913696289 + ], + [ + "072", + -14.029214859008787 + ], + [ + "BIG", + -14.029230117797852 + ], + [ + "▁overkill", + -14.029231071472168 + ], + [ + "▁este", + -14.02924919128418 + ], + [ + "Aero", + -14.029273986816406 + ], + [ + "cyclin", + -14.029279708862305 + ], + [ + "▁Inspect", + -14.029308319091797 + ], + [ + "/08", + -14.029326438903809 + ], + [ + "▁Corruption", + -14.029332160949709 + ], + [ + "marathon", + -14.029413223266602 + ], + [ + "▁Specially", + -14.029417991638184 + ], + [ + "▁Aya", + -14.029480934143066 + ], + [ + "▁Marlin", + -14.029488563537598 + ], + [ + "▁Scorpion", + -14.029521942138672 + ], + [ + "▁Trivia", + -14.029534339904783 + ], + [ + "Directed", + -14.029545783996582 + ], + [ + "▁Shaped", + -14.029546737670898 + ], + [ + "▁2016;", + -14.02955722808838 + ], + [ + "chte", + -14.029579162597656 + ], + [ + "▁345", + -14.029618263244627 + ], + [ + "checked", + -14.029623985290527 + ], + [ + "▁promos", + -14.029624938964844 + ], + [ + "▁punchy", + -14.029644966125488 + ], + [ + "▁Gunners", + -14.029701232910156 + ], + [ + "Allows", + -14.029702186584473 + ], + [ + "▁Dems", + -14.029743194580078 + ], + [ + "▁trimmings", + -14.02975082397461 + ], + [ + "▁NAD", + -14.02981662750244 + ], + [ + "▁Pei", + -14.030062675476074 + ], + [ + "▁pent", + -14.030070304870604 + ], + [ + "▁Tessa", + -14.030094146728516 + ], + [ + "▁Determin", + -14.030122756958008 + ], + [ + "▁sol", + -14.030133247375488 + ], + [ + "▁preside", + -14.030134201049805 + ], + [ + "ICI", + -14.030208587646484 + ], + [ + "icki", + -14.030237197875977 + ], + [ + "STAN", + -14.030261039733888 + ], + [ + "BASE", + -14.030275344848633 + ], + [ + "oha", + -14.030292510986328 + ], + [ + "▁roofer", + -14.030364990234377 + ], + [ + "▁summarise", + -14.030404090881348 + ], + [ + "debug", + -14.0304536819458 + ], + [ + "▁taunt", + -14.0304536819458 + ], + [ + "additional", + -14.030454635620115 + ], + [ + "▁Immigrant", + -14.03051471710205 + ], + [ + "▁spat", + -14.030540466308594 + ], + [ + "▁LOSS", + -14.030566215515137 + ], + [ + "▁Scranton", + -14.030622482299805 + ], + [ + "BIN", + -14.03064250946045 + ], + [ + "▁58%", + -14.030658721923828 + ], + [ + "▁Argent", + -14.030683517456056 + ], + [ + "pike", + -14.030688285827637 + ], + [ + "-4)", + -14.030725479125977 + ], + [ + "▁trad", + -14.030741691589355 + ], + [ + "nata", + -14.03076457977295 + ], + [ + "▁harnessed", + -14.030805587768556 + ], + [ + "lipped", + -14.03081512451172 + ], + [ + "moderate", + -14.030831336975098 + ], + [ + "▁awesomeness", + -14.030864715576172 + ], + [ + "▁apa", + -14.030987739562988 + ], + [ + "▁Lakshmi", + -14.031005859375 + ], + [ + "▁Refrigeration", + -14.031005859375 + ], + [ + "▁extrovert", + -14.031005859375 + ], + [ + "▁impeccably", + -14.031005859375 + ], + [ + "▁miserably", + -14.031005859375 + ], + [ + "▁permeability", + -14.031005859375 + ], + [ + "▁Malayalam", + -14.031010627746582 + ], + [ + "▁Mugabe", + -14.031013488769531 + ], + [ + "▁cypress", + -14.031014442443848 + ], + [ + "▁meddling", + -14.031014442443848 + ], + [ + "▁penultimate", + -14.031018257141112 + ], + [ + "▁panicked", + -14.031027793884276 + ], + [ + "▁Fiberglass", + -14.031030654907228 + ], + [ + "▁Levitra", + -14.03104305267334 + ], + [ + "▁scotch", + -14.031044960021973 + ], + [ + "▁Millennial", + -14.031055450439451 + ], + [ + "▁unify", + -14.031085968017578 + ], + [ + "▁Pilots", + -14.03109359741211 + ], + [ + "▁twinkle", + -14.03109645843506 + ], + [ + "zil", + -14.03110122680664 + ], + [ + "▁ZO", + -14.03110408782959 + ], + [ + "▁debugger", + -14.031105995178224 + ], + [ + "▁Dao", + -14.031132698059082 + ], + [ + "▁Goodyear", + -14.031132698059082 + ], + [ + "▁Hillsboro", + -14.031135559082031 + ], + [ + "citing", + -14.031192779541016 + ], + [ + "▁legible", + -14.031204223632812 + ], + [ + "▁renderings", + -14.031225204467772 + ], + [ + "▁Polynesian", + -14.031229972839355 + ], + [ + "▁Babu", + -14.031234741210938 + ], + [ + "▁Hotspot", + -14.03127098083496 + ], + [ + "▁reddit", + -14.031316757202148 + ], + [ + "▁Darrell", + -14.031350135803224 + ], + [ + "▁Puppies", + -14.031364440917969 + ], + [ + "▁Evansville", + -14.031367301940918 + ], + [ + "WARE", + -14.031392097473145 + ], + [ + "SSS", + -14.031394958496094 + ], + [ + "▁tethered", + -14.031489372253418 + ], + [ + "elson", + -14.031490325927734 + ], + [ + "▁slanted", + -14.031511306762695 + ], + [ + "▁grasslands", + -14.03158473968506 + ], + [ + "▁amending", + -14.031608581542969 + ], + [ + "stakes", + -14.031614303588867 + ], + [ + "▁mcg", + -14.031676292419434 + ], + [ + "Bean", + -14.031696319580078 + ], + [ + "▁BLOCK", + -14.03171157836914 + ], + [ + "▁Alexandre", + -14.03171443939209 + ], + [ + "rules", + -14.03171730041504 + ], + [ + "▁Nang", + -14.031736373901367 + ], + [ + "▁STS", + -14.031766891479492 + ], + [ + "atos", + -14.031779289245604 + ], + [ + "▁recieved", + -14.031816482543944 + ], + [ + "▁busting", + -14.031855583190918 + ], + [ + "PHONE", + -14.031867027282717 + ], + [ + "▁Hartman", + -14.031875610351562 + ], + [ + "▁0-2", + -14.03189182281494 + ], + [ + "▁Ignore", + -14.031909942626951 + ], + [ + "▁heather", + -14.031930923461914 + ], + [ + "▁Ngo", + -14.031949043273926 + ], + [ + "dala", + -14.032001495361328 + ], + [ + "▁grafting", + -14.03203582763672 + ], + [ + "▁5.9", + -14.032062530517578 + ], + [ + "▁discriminated", + -14.032093048095703 + ], + [ + "ravel", + -14.032105445861816 + ], + [ + "▁Payton", + -14.032110214233398 + ], + [ + "necessary", + -14.032177925109863 + ], + [ + "▁Luce", + -14.032190322875977 + ], + [ + "Jersey", + -14.032235145568848 + ], + [ + "reasonable", + -14.032238960266112 + ], + [ + "breakfast", + -14.03226089477539 + ], + [ + "preserved", + -14.032269477844238 + ], + [ + "▁shelling", + -14.032279014587402 + ], + [ + "▁diversifying", + -14.0322847366333 + ], + [ + "▁IMP", + -14.032294273376465 + ], + [ + "mature", + -14.032296180725098 + ], + [ + "vene", + -14.032312393188477 + ], + [ + "077", + -14.03235149383545 + ], + [ + "▁IMS", + -14.032353401184082 + ], + [ + "▁Coolers", + -14.032397270202637 + ], + [ + "Heather", + -14.032407760620115 + ], + [ + "pany", + -14.032430648803713 + ], + [ + "bou", + -14.032453536987305 + ], + [ + "▁butts", + -14.032464027404783 + ], + [ + "▁RTP", + -14.032610893249512 + ], + [ + "fork", + -14.032658576965332 + ], + [ + "▁ecologically", + -14.03266143798828 + ], + [ + "▁Minimalist", + -14.032672882080078 + ], + [ + "▁spectacles", + -14.032726287841797 + ], + [ + "▁Institut", + -14.032758712768556 + ], + [ + "EMU", + -14.032777786254885 + ], + [ + "▁Interval", + -14.032835006713867 + ], + [ + "▁MIND", + -14.0328369140625 + ], + [ + "benz", + -14.032838821411133 + ], + [ + "coil", + -14.032917976379396 + ], + [ + "SME", + -14.032936096191406 + ], + [ + "▁normalize", + -14.03294277191162 + ], + [ + "827", + -14.032999992370604 + ], + [ + "Selected", + -14.033058166503906 + ], + [ + "0.50", + -14.033129692077637 + ], + [ + "▁ohm", + -14.033159255981444 + ], + [ + "Surf", + -14.03327178955078 + ], + [ + "▁Lanc", + -14.033332824707031 + ], + [ + "▁thr", + -14.03337860107422 + ], + [ + "▁Swar", + -14.033394813537598 + ], + [ + "▁geometrical", + -14.033418655395508 + ], + [ + "▁Apostles", + -14.033419609069824 + ], + [ + "▁DOOR", + -14.03348445892334 + ], + [ + "908", + -14.033493041992188 + ], + [ + "Registered", + -14.033586502075195 + ], + [ + "CIL", + -14.033623695373535 + ], + [ + "2”", + -14.033646583557127 + ], + [ + "▁MCT", + -14.033740997314451 + ], + [ + "▁sunflowers", + -14.033778190612791 + ], + [ + "VY", + -14.033846855163574 + ], + [ + "▁peasants", + -14.033864974975586 + ], + [ + "▁Sting", + -14.03390121459961 + ], + [ + "▁Easier", + -14.033907890319824 + ], + [ + "▁PPO", + -14.033928871154783 + ], + [ + "Ruth", + -14.033970832824709 + ], + [ + "▁Dune", + -14.034017562866213 + ], + [ + "▁Dang", + -14.034128189086914 + ], + [ + "▁Gaddafi", + -14.034132957458496 + ], + [ + "OPP", + -14.034135818481444 + ], + [ + "975", + -14.0341796875 + ], + [ + "744", + -14.034185409545898 + ], + [ + "Semi", + -14.034185409545898 + ], + [ + "▁Briar", + -14.03428077697754 + ], + [ + "▁bloat", + -14.034358978271484 + ], + [ + "▁aquariums", + -14.034371376037598 + ], + [ + "▁Mathew", + -14.034608840942385 + ], + [ + "▁LOCAL", + -14.03463077545166 + ], + [ + "SIN", + -14.034669876098633 + ], + [ + "▁Hoop", + -14.034677505493164 + ], + [ + "Origin", + -14.034697532653809 + ], + [ + "▁4:1", + -14.034708023071287 + ], + [ + "▁zoomed", + -14.034767150878906 + ], + [ + "vious", + -14.034786224365234 + ], + [ + "▁foundry", + -14.034865379333496 + ], + [ + "▁hysterical", + -14.03501796722412 + ], + [ + "CPC", + -14.035061836242676 + ], + [ + "KILL", + -14.035066604614258 + ], + [ + "kc", + -14.035110473632812 + ], + [ + "▁BIT", + -14.035130500793455 + ], + [ + "▁necessitates", + -14.035139083862305 + ], + [ + "zoo", + -14.035154342651367 + ], + [ + "escape", + -14.035161972045898 + ], + [ + "▁cir", + -14.03524398803711 + ], + [ + "▁Everglades", + -14.035261154174805 + ], + [ + "▁Ignition", + -14.035261154174805 + ], + [ + "▁PURCHASE", + -14.035261154174805 + ], + [ + "▁accomodation", + -14.035261154174805 + ], + [ + "▁alfalfa", + -14.035261154174805 + ], + [ + "▁antiquated", + -14.035261154174805 + ], + [ + "▁sabbatical", + -14.035261154174805 + ], + [ + "▁Renegade", + -14.03526210784912 + ], + [ + "▁Bhopal", + -14.03526496887207 + ], + [ + "▁lupus", + -14.035265922546388 + ], + [ + "▁Sligo", + -14.035266876220703 + ], + [ + "▁rollercoaster", + -14.03526782989502 + ], + [ + "▁Psychotherapy", + -14.035268783569336 + ], + [ + "▁verifiable", + -14.035268783569336 + ], + [ + "▁ICANN", + -14.035271644592283 + ], + [ + "▁Allahabad", + -14.035272598266602 + ], + [ + "▁Corfu", + -14.035301208496094 + ], + [ + "▁KitchenAid", + -14.035333633422852 + ], + [ + "▁Judas", + -14.035338401794434 + ], + [ + "▁resizing", + -14.03533935546875 + ], + [ + "▁SHALL", + -14.035369873046877 + ], + [ + "▁subvert", + -14.035372734069824 + ], + [ + "▁wainscoting", + -14.035375595092772 + ], + [ + "▁Surfer", + -14.035385131835938 + ], + [ + "▁grail", + -14.035419464111328 + ], + [ + "Nar", + -14.0354642868042 + ], + [ + "▁tiresome", + -14.035494804382324 + ], + [ + "▁Beg", + -14.035500526428224 + ], + [ + "Rot", + -14.035516738891602 + ], + [ + "▁Granville", + -14.035527229309082 + ], + [ + "▁GRAND", + -14.03553581237793 + ], + [ + "▁Doubt", + -14.03557586669922 + ], + [ + "▁quadruple", + -14.03565502166748 + ], + [ + "▁Sumatra", + -14.03566074371338 + ], + [ + "▁unending", + -14.035747528076172 + ], + [ + "QUEST", + -14.035761833190918 + ], + [ + "▁Align", + -14.035783767700195 + ], + [ + "zik", + -14.035785675048828 + ], + [ + "▁1/2-", + -14.035807609558104 + ], + [ + "Labor", + -14.035881042480469 + ], + [ + "▁Shawnee", + -14.035886764526367 + ], + [ + "▁rebranding", + -14.035916328430176 + ], + [ + "▁Directly", + -14.035918235778809 + ], + [ + "▁morphed", + -14.035964965820312 + ], + [ + "00.00", + -14.035977363586426 + ], + [ + "▁filesystem", + -14.035996437072754 + ], + [ + "▁airfield", + -14.036011695861816 + ], + [ + "▁endo", + -14.03603172302246 + ], + [ + "▁Invention", + -14.03604507446289 + ], + [ + "▁savers", + -14.036088943481444 + ], + [ + "▁Coates", + -14.036197662353516 + ], + [ + "▁Millie", + -14.03620433807373 + ], + [ + "▁Petersen", + -14.036247253417969 + ], + [ + "compile", + -14.036266326904297 + ], + [ + "hmmm", + -14.036311149597168 + ], + [ + "▁Smokey", + -14.03632640838623 + ], + [ + "HZ", + -14.036331176757812 + ], + [ + "▁calipers", + -14.036398887634276 + ], + [ + "▁summits", + -14.036410331726074 + ], + [ + "▁Danville", + -14.036413192749023 + ], + [ + "Parameter", + -14.036428451538086 + ], + [ + "valu", + -14.036484718322754 + ], + [ + "814", + -14.036503791809082 + ], + [ + "▁Pickering", + -14.036516189575195 + ], + [ + "tweet", + -14.0365571975708 + ], + [ + "▁Tempo", + -14.03657341003418 + ], + [ + "▁wannabe", + -14.036591529846191 + ], + [ + "wealth", + -14.036646842956545 + ], + [ + "Activity", + -14.036649703979492 + ], + [ + "theatre", + -14.036663055419922 + ], + [ + "▁Spelling", + -14.036666870117188 + ], + [ + "▁initiates", + -14.0366792678833 + ], + [ + "▁Mortal", + -14.0366849899292 + ], + [ + "▁MCI", + -14.036686897277832 + ], + [ + "Jerry", + -14.036717414855955 + ], + [ + "▁Humber", + -14.03672695159912 + ], + [ + "▁SLI", + -14.036746978759766 + ], + [ + "udan", + -14.03675937652588 + ], + [ + "▁magicians", + -14.036770820617676 + ], + [ + "▁PPS", + -14.036782264709473 + ], + [ + "▁Vat", + -14.036879539489746 + ], + [ + "Mental", + -14.036880493164062 + ], + [ + "vati", + -14.036947250366213 + ], + [ + "fab", + -14.03695297241211 + ], + [ + "Clock", + -14.036959648132324 + ], + [ + "ILA", + -14.036961555480955 + ], + [ + "▁slander", + -14.03696346282959 + ], + [ + "▁shepherds", + -14.036998748779297 + ], + [ + "▁Terrific", + -14.037009239196776 + ], + [ + "▁1873", + -14.037013053894045 + ], + [ + "Makes", + -14.03705596923828 + ], + [ + "crit", + -14.037056922912598 + ], + [ + "036", + -14.03706169128418 + ], + [ + "▁regimens", + -14.037063598632812 + ], + [ + "▁Cyto", + -14.037141799926758 + ], + [ + "▁vignettes", + -14.037148475646973 + ], + [ + "Reported", + -14.037174224853516 + ], + [ + "▁Saar", + -14.037182807922363 + ], + [ + "▁endorphins", + -14.037186622619627 + ], + [ + "ignon", + -14.037200927734377 + ], + [ + "▁49%", + -14.037201881408691 + ], + [ + "▁Cuff", + -14.037226676940918 + ], + [ + "▁horticultural", + -14.037230491638184 + ], + [ + "▁Elders", + -14.03723430633545 + ], + [ + "▁sinner", + -14.03729248046875 + ], + [ + "▁persistently", + -14.037348747253418 + ], + [ + "▁Seine", + -14.037357330322266 + ], + [ + "793", + -14.037463188171388 + ], + [ + "▁empires", + -14.037477493286133 + ], + [ + "▁pints", + -14.037477493286133 + ], + [ + "%20", + -14.037504196166992 + ], + [ + "ariah", + -14.037528038024902 + ], + [ + "▁BCC", + -14.037714004516602 + ], + [ + "zoom", + -14.0377197265625 + ], + [ + "▁pauses", + -14.037734985351562 + ], + [ + "rke", + -14.037763595581056 + ], + [ + "▁headshot", + -14.03779411315918 + ], + [ + "coloring", + -14.037830352783203 + ], + [ + "▁artsy", + -14.037934303283691 + ], + [ + "▁Glu", + -14.037939071655272 + ], + [ + "▁Loveland", + -14.037982940673828 + ], + [ + "utch", + -14.037986755371094 + ], + [ + "▁substitutions", + -14.03805923461914 + ], + [ + "Token", + -14.03806209564209 + ], + [ + "AIS", + -14.038132667541504 + ], + [ + "▁Passengers", + -14.03817367553711 + ], + [ + "▁Programmes", + -14.038199424743652 + ], + [ + "▁Griff", + -14.038203239440918 + ], + [ + "jel", + -14.03822135925293 + ], + [ + "▁Falk", + -14.038263320922852 + ], + [ + "▁conveyance", + -14.038277626037598 + ], + [ + "Barb", + -14.038291931152344 + ], + [ + "Installing", + -14.03831386566162 + ], + [ + "▁Suits", + -14.03832721710205 + ], + [ + "▁theorists", + -14.038334846496582 + ], + [ + "nyi", + -14.038344383239746 + ], + [ + "licit", + -14.038415908813477 + ], + [ + "▁chained", + -14.038503646850586 + ], + [ + "▁saber", + -14.0385160446167 + ], + [ + "100,000", + -14.038537979125977 + ], + [ + "robot", + -14.03854751586914 + ], + [ + "PED", + -14.038634300231934 + ], + [ + "▁uncovers", + -14.038636207580566 + ], + [ + "designer", + -14.038728713989258 + ], + [ + "▁278", + -14.03873348236084 + ], + [ + "▁migrations", + -14.038885116577148 + ], + [ + "▁swoon", + -14.03894329071045 + ], + [ + "▁mittens", + -14.038949012756348 + ], + [ + "▁speechless", + -14.038952827453612 + ], + [ + "▁starve", + -14.03900146484375 + ], + [ + "▁Drawers", + -14.0390043258667 + ], + [ + "▁slits", + -14.039022445678713 + ], + [ + "venue", + -14.039032936096191 + ], + [ + "083", + -14.03903579711914 + ], + [ + "▁OCT", + -14.039066314697266 + ], + [ + "▁Wap", + -14.039071083068848 + ], + [ + "▁PHI", + -14.039091110229492 + ], + [ + "▁Defeat", + -14.039094924926758 + ], + [ + "marking", + -14.039116859436035 + ], + [ + "▁Planting", + -14.039130210876465 + ], + [ + "▁tera", + -14.039146423339844 + ], + [ + "posed", + -14.039175987243652 + ], + [ + "odic", + -14.03918170928955 + ], + [ + "akti", + -14.039191246032717 + ], + [ + "ERI", + -14.039192199707031 + ], + [ + "honest", + -14.039196968078612 + ], + [ + "▁1957.", + -14.03928565979004 + ], + [ + "▁Kernel", + -14.039286613464355 + ], + [ + "▁Enchanted", + -14.039346694946287 + ], + [ + "258", + -14.03939723968506 + ], + [ + "anese", + -14.039401054382324 + ], + [ + "▁Campaigns", + -14.039403915405272 + ], + [ + "ADI", + -14.039420127868652 + ], + [ + "ciones", + -14.03944206237793 + ], + [ + "▁biceps", + -14.039491653442385 + ], + [ + "▁Biennial", + -14.03953456878662 + ], + [ + "▁Facilitator", + -14.03953456878662 + ], + [ + "▁Pegasus", + -14.03953456878662 + ], + [ + "▁anorexia", + -14.03953456878662 + ], + [ + "▁circulatory", + -14.03953456878662 + ], + [ + "▁disgruntled", + -14.03953456878662 + ], + [ + "▁luminaries", + -14.03953456878662 + ], + [ + "▁surfactant", + -14.03953456878662 + ], + [ + "▁unexplored", + -14.03953456878662 + ], + [ + "▁contraption", + -14.039535522460938 + ], + [ + "▁prettier", + -14.039536476135254 + ], + [ + "▁rhinoplasty", + -14.039536476135254 + ], + [ + "▁Pompeo", + -14.03953742980957 + ], + [ + "▁Truffle", + -14.039538383483888 + ], + [ + "▁Dolores", + -14.039539337158203 + ], + [ + "▁impeller", + -14.039544105529783 + ], + [ + "▁Oswald", + -14.03954792022705 + ], + [ + "ordinated", + -14.039549827575684 + ], + [ + "▁Drummond", + -14.03955078125 + ], + [ + "▁ferret", + -14.03955078125 + ], + [ + "▁Lesbian", + -14.039552688598633 + ], + [ + "▁Struggle", + -14.03957462310791 + ], + [ + "▁spoils", + -14.03957748413086 + ], + [ + "▁poise", + -14.039583206176758 + ], + [ + "▁dunno", + -14.039597511291504 + ], + [ + "▁Spinning", + -14.039603233337402 + ], + [ + "▁Rhonda", + -14.039615631103516 + ], + [ + "▁tal", + -14.03964614868164 + ], + [ + "▁ASL", + -14.03965663909912 + ], + [ + "▁Horace", + -14.039714813232422 + ], + [ + "▁sciatica", + -14.03976821899414 + ], + [ + "▁Handyman", + -14.039776802062988 + ], + [ + "▁Salinas", + -14.03989315032959 + ], + [ + "▁onscreen", + -14.039907455444336 + ], + [ + "zzy", + -14.0399751663208 + ], + [ + "▁2013-14", + -14.039996147155762 + ], + [ + "▁standstill", + -14.039998054504396 + ], + [ + "▁nuns", + -14.040040969848633 + ], + [ + "▁Corbin", + -14.040082931518556 + ], + [ + "▁repost", + -14.040118217468262 + ], + [ + "▁1960'", + -14.040122985839844 + ], + [ + "shy", + -14.040213584899902 + ], + [ + "▁summery", + -14.040273666381836 + ], + [ + "cana", + -14.040277481079102 + ], + [ + "▁Silo", + -14.040283203125 + ], + [ + "▁Bowes", + -14.040335655212402 + ], + [ + "▁Boarding", + -14.040371894836426 + ], + [ + "▁kickstart", + -14.040385246276855 + ], + [ + "▁Sophomore", + -14.040403366088867 + ], + [ + "▁Roxy", + -14.04043674468994 + ], + [ + "▁Openings", + -14.040474891662598 + ], + [ + "AFP", + -14.040492057800291 + ], + [ + "dera", + -14.040519714355469 + ], + [ + "▁Landry", + -14.040519714355469 + ], + [ + "093", + -14.040528297424316 + ], + [ + "Goal", + -14.040538787841797 + ], + [ + "▁Spouse", + -14.04055881500244 + ], + [ + "▁Locally", + -14.040587425231934 + ], + [ + "bord", + -14.040629386901855 + ], + [ + "▁Interchange", + -14.040656089782717 + ], + [ + "▁falcons", + -14.040658950805664 + ], + [ + "▁impresses", + -14.040682792663574 + ], + [ + "▁TABLE", + -14.040740966796877 + ], + [ + "IENT", + -14.040771484375 + ], + [ + "▁Ronan", + -14.040796279907228 + ], + [ + "▁prawn", + -14.04084014892578 + ], + [ + "Analyze", + -14.04094696044922 + ], + [ + "tsy", + -14.040948867797852 + ], + [ + "IMPORTANT", + -14.04095458984375 + ], + [ + "▁Addict", + -14.040989875793455 + ], + [ + "Margaret", + -14.040995597839355 + ], + [ + "ption", + -14.040996551513672 + ], + [ + "luxury", + -14.041003227233888 + ], + [ + "Coupon", + -14.041007041931152 + ], + [ + "precision", + -14.041019439697266 + ], + [ + "hine", + -14.041021347045898 + ], + [ + "crazy", + -14.041022300720217 + ], + [ + "▁zur", + -14.04103946685791 + ], + [ + "▁306", + -14.04104709625244 + ], + [ + "▁aspires", + -14.041049003601074 + ], + [ + "▁incidentally", + -14.041050910949709 + ], + [ + "▁industrialized", + -14.041053771972656 + ], + [ + "▁Wur", + -14.04107666015625 + ], + [ + "▁vlog", + -14.041093826293944 + ], + [ + "▁Foreman", + -14.041165351867676 + ], + [ + "Becoming", + -14.041167259216309 + ], + [ + "experts", + -14.04116916656494 + ], + [ + "Mile", + -14.041190147399902 + ], + [ + "Truly", + -14.04122257232666 + ], + [ + "terre", + -14.041229248046877 + ], + [ + "▁DNC", + -14.041229248046877 + ], + [ + "▁Cultures", + -14.041234970092772 + ], + [ + "Monthly", + -14.041271209716797 + ], + [ + "▁Mow", + -14.041290283203123 + ], + [ + "Veteran", + -14.04136085510254 + ], + [ + "▁racetrack", + -14.04136848449707 + ], + [ + "trigger", + -14.041372299194336 + ], + [ + "certain", + -14.041373252868652 + ], + [ + "▁FFA", + -14.04141616821289 + ], + [ + "▁Shutters", + -14.04143238067627 + ], + [ + "▁Transformer", + -14.04145336151123 + ], + [ + "▁combing", + -14.041485786437988 + ], + [ + "ately", + -14.04148769378662 + ], + [ + "▁blossoming", + -14.04148769378662 + ], + [ + "Barry", + -14.041508674621582 + ], + [ + "▁proprietors", + -14.041529655456545 + ], + [ + "▁loosening", + -14.041549682617188 + ], + [ + "lity", + -14.041563034057615 + ], + [ + "▁hampers", + -14.041596412658691 + ], + [ + "▁einem", + -14.041607856750488 + ], + [ + "acon", + -14.041613578796388 + ], + [ + "couple", + -14.041624069213867 + ], + [ + "▁prefab", + -14.041665077209473 + ], + [ + "oja", + -14.041670799255373 + ], + [ + "▁Cleanse", + -14.041678428649902 + ], + [ + "▁OST", + -14.041686058044434 + ], + [ + "Investing", + -14.041693687438965 + ], + [ + "Ensur", + -14.041712760925291 + ], + [ + "▁Exquisite", + -14.041712760925291 + ], + [ + "▁Pentecost", + -14.041789054870604 + ], + [ + "▁mecca", + -14.041796684265137 + ], + [ + "▁fitment", + -14.041808128356934 + ], + [ + "▁libel", + -14.041828155517578 + ], + [ + "Dip", + -14.04190444946289 + ], + [ + "▁Hines", + -14.04206085205078 + ], + [ + "▁rotator", + -14.04213047027588 + ], + [ + "▁Juneau", + -14.042160987854004 + ], + [ + "▁Zane", + -14.042180061340332 + ], + [ + "▁conc", + -14.042268753051758 + ], + [ + "▁Oxy", + -14.042288780212402 + ], + [ + "▁droid", + -14.042299270629885 + ], + [ + "▁flashback", + -14.042346954345703 + ], + [ + "cible", + -14.042362213134766 + ], + [ + "▁syllable", + -14.042415618896484 + ], + [ + "Educational", + -14.04242992401123 + ], + [ + "uji", + -14.042460441589355 + ], + [ + "–8", + -14.042503356933594 + ], + [ + "▁inferred", + -14.042536735534668 + ], + [ + "Visa", + -14.04261589050293 + ], + [ + "occhi", + -14.042764663696287 + ], + [ + "▁Neat", + -14.042840003967283 + ], + [ + "BEL", + -14.042919158935549 + ], + [ + "▁9-5", + -14.042977333068848 + ], + [ + "▁Physiotherapy", + -14.043011665344238 + ], + [ + "▁57%", + -14.043025970458984 + ], + [ + "▁2026", + -14.043177604675291 + ], + [ + "orio", + -14.043182373046877 + ], + [ + "Reward", + -14.043220520019531 + ], + [ + "▁batching", + -14.04322338104248 + ], + [ + "▁donning", + -14.043254852294922 + ], + [ + "▁frontiers", + -14.043283462524414 + ], + [ + "▁hating", + -14.043336868286133 + ], + [ + "anche", + -14.043383598327637 + ], + [ + "▁Generations", + -14.043400764465332 + ], + [ + "0-3", + -14.043447494506836 + ], + [ + "▁Complaints", + -14.043492317199709 + ], + [ + "TIPS", + -14.043517112731934 + ], + [ + "▁Yon", + -14.043569564819336 + ], + [ + "▁Rafi", + -14.043590545654297 + ], + [ + "ffa", + -14.043597221374512 + ], + [ + "808", + -14.043654441833496 + ], + [ + "▁biofuels", + -14.04366397857666 + ], + [ + "▁leans", + -14.0438232421875 + ], + [ + "▁Anthology", + -14.043827056884766 + ], + [ + "▁Cabaret", + -14.043827056884766 + ], + [ + "▁Corbett", + -14.043827056884766 + ], + [ + "▁PUBLIC", + -14.043827056884766 + ], + [ + "▁Schwarzenegger", + -14.043827056884766 + ], + [ + "▁knowledgable", + -14.043827056884766 + ], + [ + "▁laparoscopic", + -14.043827056884766 + ], + [ + "▁ophthalmologist", + -14.043827056884766 + ], + [ + "▁Fukushima", + -14.043828010559082 + ], + [ + "CURRENT", + -14.043828964233398 + ], + [ + "▁Galapagos", + -14.043828964233398 + ], + [ + "▁Whitaker", + -14.043832778930664 + ], + [ + "▁Infectious", + -14.04384708404541 + ], + [ + "pulse", + -14.043856620788574 + ], + [ + "▁digestible", + -14.043861389160156 + ], + [ + "▁embryonic", + -14.043861389160156 + ], + [ + "▁mammalian", + -14.043862342834473 + ], + [ + "▁subtropical", + -14.043866157531738 + ], + [ + "▁sexism", + -14.043869972229004 + ], + [ + "▁hydropower", + -14.04387664794922 + ], + [ + "▁Delicate", + -14.043903350830078 + ], + [ + "▁shunt", + -14.043919563293455 + ], + [ + "▁RTI", + -14.043931007385254 + ], + [ + "▁scrapping", + -14.04393482208252 + ], + [ + "MIA", + -14.043936729431152 + ], + [ + "▁Marianne", + -14.043947219848633 + ], + [ + "▁Manifesto", + -14.043974876403809 + ], + [ + "▁Coloriage", + -14.043975830078123 + ], + [ + "▁pepperoni", + -14.04400634765625 + ], + [ + "▁swamps", + -14.0440092086792 + ], + [ + "▁Ending", + -14.044010162353516 + ], + [ + "▁wy", + -14.044055938720703 + ], + [ + "thumb", + -14.044065475463867 + ], + [ + "iques", + -14.044086456298828 + ], + [ + "▁voiceover", + -14.044087409973145 + ], + [ + "cene", + -14.044110298156738 + ], + [ + "zit", + -14.044123649597168 + ], + [ + "ifiers", + -14.044129371643066 + ], + [ + "buri", + -14.044134140014648 + ], + [ + "▁AVE", + -14.044156074523926 + ], + [ + "▁skyrocketed", + -14.044190406799316 + ], + [ + "▁startled", + -14.044190406799316 + ], + [ + "Eu", + -14.044193267822266 + ], + [ + "uin", + -14.044196128845217 + ], + [ + "▁videography", + -14.04423999786377 + ], + [ + "▁Curt", + -14.04424285888672 + ], + [ + "▁Trucking", + -14.044244766235352 + ], + [ + "▁Fortuna", + -14.044246673583984 + ], + [ + "▁halal", + -14.044273376464844 + ], + [ + "▁1868", + -14.044373512268066 + ], + [ + "▁72-", + -14.04440975189209 + ], + [ + "▁7.9", + -14.044458389282228 + ], + [ + "▁trooper", + -14.044570922851562 + ], + [ + "▁Cabot", + -14.044608116149902 + ], + [ + "▁Bespoke", + -14.04461669921875 + ], + [ + "tze", + -14.044677734375 + ], + [ + "ikka", + -14.044745445251465 + ], + [ + "▁goldfish", + -14.044792175292969 + ], + [ + "▁Laptops", + -14.04490566253662 + ], + [ + "▁caregiving", + -14.044930458068848 + ], + [ + "skills", + -14.045008659362791 + ], + [ + "593", + -14.04503059387207 + ], + [ + "▁crowding", + -14.04514217376709 + ], + [ + "Typical", + -14.045153617858888 + ], + [ + "▁hobbyists", + -14.045193672180176 + ], + [ + "electing", + -14.045293807983398 + ], + [ + "▁stews", + -14.045336723327637 + ], + [ + "▁pamphlets", + -14.045354843139648 + ], + [ + "Ironically", + -14.045381546020508 + ], + [ + "episode", + -14.045390129089355 + ], + [ + "exposure", + -14.045392990112305 + ], + [ + "Gordon", + -14.045401573181152 + ], + [ + "Gallery", + -14.045404434204102 + ], + [ + "▁Knoll", + -14.045414924621582 + ], + [ + "▁Sv", + -14.045430183410645 + ], + [ + "sitting", + -14.045472145080566 + ], + [ + "▁twill", + -14.045479774475098 + ], + [ + "cado", + -14.045534133911133 + ], + [ + "▁Findings", + -14.045540809631348 + ], + [ + "Pine", + -14.045546531677246 + ], + [ + "▁passer", + -14.045656204223633 + ], + [ + "organised", + -14.04568862915039 + ], + [ + "Pressure", + -14.045706748962402 + ], + [ + "Extended", + -14.045790672302246 + ], + [ + "▁’90", + -14.04586124420166 + ], + [ + "356", + -14.045896530151367 + ], + [ + "▁Kristian", + -14.045903205871582 + ], + [ + "▁spoiling", + -14.045905113220217 + ], + [ + "JF", + -14.04593563079834 + ], + [ + "▁USER", + -14.04596710205078 + ], + [ + "▁2006)", + -14.045992851257324 + ], + [ + "▁AUS", + -14.046003341674805 + ], + [ + "▁rethinking", + -14.04605770111084 + ], + [ + "teo", + -14.046101570129396 + ], + [ + "▁NTN", + -14.04612159729004 + ], + [ + "261", + -14.046150207519531 + ], + [ + "vpn", + -14.046165466308594 + ], + [ + "▁Sund", + -14.04617404937744 + ], + [ + "▁MARKET", + -14.046192169189451 + ], + [ + "alien", + -14.046242713928224 + ], + [ + "▁sprinklers", + -14.046263694763184 + ], + [ + "▁fling", + -14.046270370483398 + ], + [ + "▁SHIP", + -14.046294212341309 + ], + [ + "pow", + -14.046329498291016 + ], + [ + "processed", + -14.046368598937988 + ], + [ + "▁Datin", + -14.046379089355469 + ], + [ + "▁spay", + -14.04652500152588 + ], + [ + "▁NRL", + -14.04656219482422 + ], + [ + "▁eruptions", + -14.046576499938965 + ], + [ + "837", + -14.046632766723633 + ], + [ + "▁SSR", + -14.046690940856934 + ], + [ + "▁headboards", + -14.04678440093994 + ], + [ + "▁partitioning", + -14.046833038330078 + ], + [ + "3.00", + -14.046878814697266 + ], + [ + "▁Aust", + -14.04689121246338 + ], + [ + "higher", + -14.046975135803224 + ], + [ + "Rum", + -14.046993255615234 + ], + [ + "▁Marlins", + -14.047035217285156 + ], + [ + "Readers", + -14.047052383422852 + ], + [ + "531", + -14.047074317932127 + ], + [ + "Pri", + -14.047123908996582 + ], + [ + "immune", + -14.047148704528809 + ], + [ + "Trent", + -14.04722785949707 + ], + [ + "▁Schwe", + -14.047234535217283 + ], + [ + "▁sconces", + -14.047245025634766 + ], + [ + "▁2,400", + -14.047314643859863 + ], + [ + "ifolia", + -14.04736042022705 + ], + [ + "▁Races", + -14.047381401062012 + ], + [ + "▁Alp", + -14.047408103942873 + ], + [ + "▁5+", + -14.047525405883787 + ], + [ + "ILE", + -14.047544479370115 + ], + [ + "teach", + -14.047574996948242 + ], + [ + "▁Visitation", + -14.047584533691406 + ], + [ + "▁1050", + -14.047652244567873 + ], + [ + "NON", + -14.047722816467283 + ], + [ + "▁Inflation", + -14.047770500183104 + ], + [ + "Sw", + -14.047785758972168 + ], + [ + "LOAD", + -14.047796249389648 + ], + [ + "Por", + -14.047985076904297 + ], + [ + "▁Shiny", + -14.048081398010254 + ], + [ + "▁british", + -14.048083305358888 + ], + [ + "▁sandwiched", + -14.048117637634276 + ], + [ + "▁Gideon", + -14.048136711120604 + ], + [ + "▁Glastonbury", + -14.048136711120604 + ], + [ + "▁Syndicate", + -14.048136711120604 + ], + [ + "▁gnocchi", + -14.048136711120604 + ], + [ + "▁hibernation", + -14.048136711120604 + ], + [ + "▁methadone", + -14.048136711120604 + ], + [ + "▁progesterone", + -14.048136711120604 + ], + [ + "▁sacrificial", + -14.048136711120604 + ], + [ + "▁Ombudsman", + -14.048137664794922 + ], + [ + "▁audacious", + -14.048137664794922 + ], + [ + "▁undetected", + -14.048137664794922 + ], + [ + "▁Possession", + -14.048142433166504 + ], + [ + "▁MANUAL", + -14.04814338684082 + ], + [ + "▁Faulkner", + -14.048147201538086 + ], + [ + "▁Subcommittee", + -14.04814910888672 + ], + [ + "▁Sardinia", + -14.048150062561035 + ], + [ + "▁Honeymoon", + -14.048151969909668 + ], + [ + "▁Bootcamp", + -14.048154830932615 + ], + [ + "▁BELOW", + -14.048163414001465 + ], + [ + "▁Bradshaw", + -14.048169136047363 + ], + [ + "▁Mosul", + -14.048185348510742 + ], + [ + "▁emotive", + -14.04819679260254 + ], + [ + "▁cPanel", + -14.048197746276855 + ], + [ + "▁clapping", + -14.048200607299805 + ], + [ + "▁IMEI", + -14.0482177734375 + ], + [ + "▁trustworthiness", + -14.04822063446045 + ], + [ + "▁Fintech", + -14.04823875427246 + ], + [ + "Duck", + -14.048293113708496 + ], + [ + "▁Belo", + -14.04830837249756 + ], + [ + "Anyways", + -14.048310279846191 + ], + [ + "battle", + -14.048354148864746 + ], + [ + "▁bookkeeper", + -14.048354148864746 + ], + [ + "▁recharging", + -14.048364639282228 + ], + [ + "▁Darius", + -14.04837417602539 + ], + [ + "▁Musik", + -14.048375129699709 + ], + [ + "▁MET", + -14.048391342163086 + ], + [ + "ceive", + -14.04842472076416 + ], + [ + "▁Risks", + -14.04847240447998 + ], + [ + "▁2010;", + -14.048483848571776 + ], + [ + "▁Yves", + -14.048498153686523 + ], + [ + "NTP", + -14.048528671264648 + ], + [ + "▁cramping", + -14.04853057861328 + ], + [ + "▁Drives", + -14.04854679107666 + ], + [ + "▁optimizes", + -14.04856014251709 + ], + [ + "Hahaha", + -14.048582077026367 + ], + [ + "▁emitter", + -14.04859733581543 + ], + [ + "capture", + -14.048606872558594 + ], + [ + "▁Readiness", + -14.04865550994873 + ], + [ + "▁Redlands", + -14.048657417297363 + ], + [ + "▁hilltop", + -14.048705101013184 + ], + [ + "▁Exhibitor", + -14.048734664916992 + ], + [ + "rgue", + -14.048741340637209 + ], + [ + "▁forts", + -14.048748970031738 + ], + [ + "Nazi", + -14.04875659942627 + ], + [ + "▁lakhs", + -14.048765182495115 + ], + [ + "arco", + -14.04879665374756 + ], + [ + "▁blacksmith", + -14.048809051513672 + ], + [ + "izzle", + -14.048835754394531 + ], + [ + "▁Lancer", + -14.04894733428955 + ], + [ + "▁Evangelist", + -14.049006462097168 + ], + [ + "▁BAT", + -14.049025535583496 + ], + [ + "▁crucially", + -14.049036979675291 + ], + [ + "▁growl", + -14.049089431762695 + ], + [ + "Trend", + -14.049102783203123 + ], + [ + "iker", + -14.04912281036377 + ], + [ + "739", + -14.049128532409668 + ], + [ + "081", + -14.049182891845703 + ], + [ + "rocket", + -14.049291610717772 + ], + [ + "▁Lazar", + -14.04947280883789 + ], + [ + "yum", + -14.049495697021484 + ], + [ + "▁Mazz", + -14.049528121948242 + ], + [ + "▁RBC", + -14.049543380737305 + ], + [ + "▁parabens", + -14.049560546875 + ], + [ + "readers", + -14.049566268920898 + ], + [ + "590", + -14.049567222595217 + ], + [ + "articles", + -14.049567222595217 + ], + [ + "▁descendant", + -14.049603462219238 + ], + [ + "gher", + -14.049636840820312 + ], + [ + "buk", + -14.049675941467283 + ], + [ + "aryn", + -14.049689292907717 + ], + [ + "▁circled", + -14.04970645904541 + ], + [ + "Female", + -14.049727439880373 + ], + [ + "▁$1500", + -14.049734115600586 + ], + [ + "▁alder", + -14.0497465133667 + ], + [ + "▁Classified", + -14.049762725830078 + ], + [ + "Concerning", + -14.04977035522461 + ], + [ + "ubuntu", + -14.049778938293455 + ], + [ + "tropical", + -14.04978084564209 + ], + [ + "▁Jihad", + -14.04981517791748 + ], + [ + "▁Strom", + -14.049818992614746 + ], + [ + "Deputy", + -14.04982566833496 + ], + [ + "▁Sik", + -14.04983615875244 + ], + [ + "Landscape", + -14.049885749816896 + ], + [ + "Circle", + -14.049996376037598 + ], + [ + "▁Carousel", + -14.049997329711914 + ], + [ + "▁tiffany", + -14.050013542175291 + ], + [ + "▁237", + -14.050028800964355 + ], + [ + "apu", + -14.050045013427734 + ], + [ + "▁Inventor", + -14.05006980895996 + ], + [ + "Ward", + -14.050092697143556 + ], + [ + "▁Freelancer", + -14.05011749267578 + ], + [ + "Mars", + -14.050150871276855 + ], + [ + "Produced", + -14.0501708984375 + ], + [ + "▁Ferrer", + -14.05017375946045 + ], + [ + "axial", + -14.050178527832031 + ], + [ + "Malley", + -14.05019187927246 + ], + [ + "▁acquires", + -14.05019760131836 + ], + [ + "Danny", + -14.050198554992676 + ], + [ + "▁Northwood", + -14.050219535827637 + ], + [ + "gley", + -14.050222396850586 + ], + [ + "▁Macbook", + -14.050222396850586 + ], + [ + "▁Aut", + -14.05023956298828 + ], + [ + "▁Keane", + -14.050329208374023 + ], + [ + "▁Norte", + -14.050345420837402 + ], + [ + "▁Loki", + -14.050384521484377 + ], + [ + "Upper", + -14.0504789352417 + ], + [ + "▁PPT", + -14.05048656463623 + ], + [ + "mote", + -14.050704956054688 + ], + [ + "Polish", + -14.050708770751951 + ], + [ + "▁Steep", + -14.050715446472168 + ], + [ + "spark", + -14.050742149353027 + ], + [ + "▁eyeball", + -14.050775527954102 + ], + [ + "▁GIVE", + -14.05078125 + ], + [ + "Gee", + -14.050813674926758 + ], + [ + "▁Plays", + -14.050820350646973 + ], + [ + "HILL", + -14.050825119018556 + ], + [ + "▁Fon", + -14.050847053527832 + ], + [ + "▁cellphones", + -14.05087661743164 + ], + [ + "dustrialization", + -14.050878524780272 + ], + [ + "033", + -14.05091667175293 + ], + [ + "▁Klu", + -14.050941467285156 + ], + [ + "finally", + -14.050966262817385 + ], + [ + "▁Ismail", + -14.051043510437012 + ], + [ + "mita", + -14.051085472106934 + ], + [ + "▁Corel", + -14.051104545593262 + ], + [ + "▁flaming", + -14.051106452941896 + ], + [ + "▁Vitamins", + -14.05111312866211 + ], + [ + "▁genomics", + -14.051294326782228 + ], + [ + "▁10:4", + -14.051308631896973 + ], + [ + "Ul", + -14.051328659057615 + ], + [ + "▁Josie", + -14.051434516906738 + ], + [ + "BEE", + -14.051498413085938 + ], + [ + "▁Consensus", + -14.05153465270996 + ], + [ + "VIN", + -14.051535606384276 + ], + [ + "Actor", + -14.051573753356934 + ], + [ + "▁Weave", + -14.051630973815918 + ], + [ + "▁Concentration", + -14.051642417907717 + ], + [ + "▁1859", + -14.051645278930664 + ], + [ + "Returning", + -14.051685333251951 + ], + [ + "▁waiters", + -14.05174446105957 + ], + [ + "eshwar", + -14.051824569702148 + ], + [ + "629", + -14.051861763000488 + ], + [ + "▁WWI", + -14.05192756652832 + ], + [ + "▁Workbook", + -14.051961898803713 + ], + [ + "▁2009).", + -14.051965713500977 + ], + [ + "Hosting", + -14.052048683166504 + ], + [ + "▁Veg", + -14.052061080932615 + ], + [ + "SSI", + -14.052106857299805 + ], + [ + "▁bou", + -14.0521240234375 + ], + [ + "▁Prospective", + -14.052172660827637 + ], + [ + "▁Lark", + -14.052227020263672 + ], + [ + "▁espa", + -14.052266120910645 + ], + [ + "▁Decade", + -14.05234718322754 + ], + [ + "TTC", + -14.052393913269045 + ], + [ + "▁illustrators", + -14.052433967590332 + ], + [ + "Coloring", + -14.052434921264648 + ], + [ + "APPLICATION", + -14.05246639251709 + ], + [ + "▁Complainant", + -14.05246639251709 + ], + [ + "▁Kubernetes", + -14.05246639251709 + ], + [ + "▁Mahmoud", + -14.05246639251709 + ], + [ + "▁exodus", + -14.05246639251709 + ], + [ + "▁saxophonist", + -14.05246639251709 + ], + [ + "▁separatist", + -14.05246639251709 + ], + [ + "▁swiping", + -14.05246639251709 + ], + [ + "▁COULD", + -14.052467346191406 + ], + [ + "/04/2019", + -14.05247402191162 + ], + [ + "▁sociological", + -14.052477836608888 + ], + [ + "▁Litecoin", + -14.052482604980469 + ], + [ + "▁discharging", + -14.052482604980469 + ], + [ + "▁severance", + -14.052485466003418 + ], + [ + "▁Cromwell", + -14.052486419677734 + ], + [ + "▁Syrah", + -14.05249309539795 + ], + [ + "redo", + -14.052494049072266 + ], + [ + "▁bridle", + -14.052495002746582 + ], + [ + "▁Salmonella", + -14.05250644683838 + ], + [ + "fective", + -14.052509307861328 + ], + [ + "▁flint", + -14.052510261535645 + ], + [ + "▁foolproof", + -14.05251693725586 + ], + [ + "▁Councilman", + -14.052534103393556 + ], + [ + "▁Dwayne", + -14.052559852600098 + ], + [ + "▁Fairtrade", + -14.052563667297363 + ], + [ + "▁Buzzard", + -14.052608489990234 + ], + [ + "▁Salford", + -14.052611351013184 + ], + [ + "▁Flux", + -14.052618026733398 + ], + [ + "▁microfilm", + -14.052675247192385 + ], + [ + "▁dun", + -14.052693367004396 + ], + [ + "▁Exist", + -14.052694320678713 + ], + [ + "▁Digg", + -14.052789688110352 + ], + [ + "▁mountaineering", + -14.052789688110352 + ], + [ + "▁stinky", + -14.052934646606444 + ], + [ + "kman", + -14.052942276000977 + ], + [ + "huis", + -14.053004264831545 + ], + [ + "umu", + -14.053014755249023 + ], + [ + "▁Robson", + -14.053019523620604 + ], + [ + "Peer", + -14.053030014038086 + ], + [ + "▁netball", + -14.053034782409668 + ], + [ + "Hur", + -14.053071022033691 + ], + [ + "▁Locking", + -14.05307388305664 + ], + [ + "Sally", + -14.053077697753906 + ], + [ + "▁std", + -14.053092002868652 + ], + [ + "▁dorsal", + -14.053168296813965 + ], + [ + "LAT", + -14.05318832397461 + ], + [ + "▁Podcasts", + -14.053231239318848 + ], + [ + "▁Towns", + -14.053304672241213 + ], + [ + "▁drips", + -14.053305625915527 + ], + [ + "▁Cryptocurrency", + -14.053375244140623 + ], + [ + "▁leaner", + -14.053380966186523 + ], + [ + "▁triage", + -14.053427696228027 + ], + [ + "CARE", + -14.053457260131836 + ], + [ + "▁Catholicism", + -14.053475379943848 + ], + [ + "▁Revere", + -14.053518295288086 + ], + [ + "▁9.2", + -14.053533554077148 + ], + [ + "▁Kau", + -14.053549766540527 + ], + [ + "▁flatbed", + -14.053563117980955 + ], + [ + "Mb", + -14.053587913513184 + ], + [ + "▁Rui", + -14.053648948669434 + ], + [ + "Karl", + -14.05372428894043 + ], + [ + "▁2001)", + -14.053726196289062 + ], + [ + "▁smiley", + -14.053759574890137 + ], + [ + "ELLA", + -14.053762435913086 + ], + [ + "▁2.5-", + -14.053765296936035 + ], + [ + "soever", + -14.053860664367676 + ], + [ + "▁storybook", + -14.053887367248535 + ], + [ + "elena", + -14.05393886566162 + ], + [ + "▁conduction", + -14.053966522216797 + ], + [ + "▁Arrangement", + -14.05397891998291 + ], + [ + "▁doughnut", + -14.053983688354492 + ], + [ + "MSA", + -14.054015159606934 + ], + [ + "▁peasant", + -14.05403995513916 + ], + [ + "Amid", + -14.054073333740234 + ], + [ + "belief", + -14.05418586730957 + ], + [ + "Extension", + -14.05426025390625 + ], + [ + "EEC", + -14.0542631149292 + ], + [ + "▁heath", + -14.05426788330078 + ], + [ + "accredited", + -14.054271697998049 + ], + [ + "▁Evaluate", + -14.054306983947754 + ], + [ + "acoustic", + -14.054356575012209 + ], + [ + "▁browning", + -14.054443359375 + ], + [ + "Wherever", + -14.05451488494873 + ], + [ + "▁Simons", + -14.05454921722412 + ], + [ + "▁rebounding", + -14.05454921722412 + ], + [ + "▁12-15", + -14.054573059082031 + ], + [ + "▁1961.", + -14.054576873779297 + ], + [ + "▁circum", + -14.054609298706056 + ], + [ + "▁discharges", + -14.05461597442627 + ], + [ + "Kra", + -14.054616928100586 + ], + [ + "rival", + -14.054624557495115 + ], + [ + "▁Cecilia", + -14.054638862609863 + ], + [ + "Mold", + -14.054654121398926 + ], + [ + "nker", + -14.054699897766112 + ], + [ + "trak", + -14.054706573486328 + ], + [ + "▁242", + -14.05476188659668 + ], + [ + "▁Herm", + -14.05480670928955 + ], + [ + "▁Toolbox", + -14.054874420166016 + ], + [ + "▁Verma", + -14.054905891418455 + ], + [ + "▁___", + -14.054924011230469 + ], + [ + "▁annualized", + -14.05495548248291 + ], + [ + "▁Viral", + -14.05498504638672 + ], + [ + "struck", + -14.055047035217283 + ], + [ + "-5)", + -14.055123329162598 + ], + [ + "▁LOTS", + -14.055124282836914 + ], + [ + "Crack", + -14.055137634277344 + ], + [ + "ambling", + -14.055163383483888 + ], + [ + "contractors", + -14.05519962310791 + ], + [ + "▁Jou", + -14.055205345153809 + ], + [ + "▁psych", + -14.055206298828123 + ], + [ + "▁commun", + -14.055211067199709 + ], + [ + "▁Heller", + -14.055243492126465 + ], + [ + "▁WALK", + -14.055309295654297 + ], + [ + "▁Agua", + -14.055319786071776 + ], + [ + "▁Loh", + -14.055325508117676 + ], + [ + "▁VALUE", + -14.055325508117676 + ], + [ + "▁Goss", + -14.05536937713623 + ], + [ + "0.6%", + -14.055401802062988 + ], + [ + "cotton", + -14.055434226989746 + ], + [ + "arius", + -14.055471420288086 + ], + [ + "▁1.30", + -14.055481910705566 + ], + [ + "▁bookmarked", + -14.05557346343994 + ], + [ + "▁marque", + -14.055602073669434 + ], + [ + "eir", + -14.055739402770996 + ], + [ + "owe", + -14.05575180053711 + ], + [ + "▁Lucie", + -14.055835723876951 + ], + [ + "Allah", + -14.055848121643066 + ], + [ + "ONA", + -14.055875778198242 + ], + [ + "PCI", + -14.055893898010254 + ], + [ + "mist", + -14.055975914001465 + ], + [ + "▁Forskolin", + -14.055987358093262 + ], + [ + "▁Thorne", + -14.056011199951172 + ], + [ + "▁informatics", + -14.056084632873535 + ], + [ + "▁conforming", + -14.056119918823242 + ], + [ + "cili", + -14.056203842163086 + ], + [ + "▁aggravate", + -14.05621337890625 + ], + [ + "▁zap", + -14.05623722076416 + ], + [ + "▁Musica", + -14.056260108947754 + ], + [ + "▁blinding", + -14.056319236755373 + ], + [ + "▁Patience", + -14.056321144104004 + ], + [ + "▁Zacks", + -14.056474685668944 + ], + [ + "▁Sindh", + -14.056538581848145 + ], + [ + "▁Cristina", + -14.056547164916992 + ], + [ + "Nike", + -14.056565284729004 + ], + [ + "▁WoW", + -14.056604385375977 + ], + [ + "eque", + -14.056646347045898 + ], + [ + "INC", + -14.056711196899414 + ], + [ + "erine", + -14.056732177734377 + ], + [ + "▁sayings", + -14.056747436523438 + ], + [ + "▁Tiru", + -14.056777000427246 + ], + [ + "▁Gertrude", + -14.056814193725586 + ], + [ + "▁Guillermo", + -14.056814193725586 + ], + [ + "▁caviar", + -14.056814193725586 + ], + [ + "▁gubernatorial", + -14.056814193725586 + ], + [ + "▁raucous", + -14.056814193725586 + ], + [ + "▁PvP", + -14.056815147399902 + ], + [ + "▁Distinction", + -14.05681610107422 + ], + [ + "▁assailant", + -14.05681610107422 + ], + [ + "▁onlookers", + -14.056818008422852 + ], + [ + "▁traversing", + -14.056818008422852 + ], + [ + "▁BuzzFeed", + -14.056818962097168 + ], + [ + "▁insidious", + -14.056822776794434 + ], + [ + "▁emblematic", + -14.056825637817385 + ], + [ + "▁Bugatti", + -14.0568265914917 + ], + [ + "▁capitol", + -14.056838989257812 + ], + [ + "▁conifer", + -14.05685043334961 + ], + [ + "▁phpBB", + -14.056852340698242 + ], + [ + "▁Importance", + -14.056866645812988 + ], + [ + "▁reclamation", + -14.056872367858888 + ], + [ + "▁Dianne", + -14.056896209716797 + ], + [ + "▁replete", + -14.05690097808838 + ], + [ + "▁enchantment", + -14.056924819946287 + ], + [ + "▁Rosetta", + -14.056947708129885 + ], + [ + "▁blogosphere", + -14.056965827941896 + ], + [ + "▁waged", + -14.056973457336426 + ], + [ + "1:10", + -14.056978225708008 + ], + [ + "▁Redeemer", + -14.057093620300291 + ], + [ + "▁Gemstone", + -14.057108879089355 + ], + [ + "▁intraday", + -14.057123184204102 + ], + [ + "938", + -14.057135581970217 + ], + [ + "▁FDIC", + -14.057151794433594 + ], + [ + "Counter", + -14.057177543640137 + ], + [ + "▁figuratively", + -14.057215690612791 + ], + [ + "▁distro", + -14.057280540466309 + ], + [ + "▁pixie", + -14.057303428649902 + ], + [ + "819", + -14.057449340820312 + ], + [ + "▁nerdy", + -14.05747127532959 + ], + [ + "▁realtime", + -14.057486534118652 + ], + [ + "▁beautifying", + -14.057525634765623 + ], + [ + "29)", + -14.05754566192627 + ], + [ + "▁reorder", + -14.0575532913208 + ], + [ + "▁$130", + -14.057564735412598 + ], + [ + "nibal", + -14.057573318481444 + ], + [ + "▁NASDAQ", + -14.05758571624756 + ], + [ + "Complement", + -14.057622909545898 + ], + [ + "Patch", + -14.057645797729492 + ], + [ + "▁Whitening", + -14.057708740234377 + ], + [ + "▁Joo", + -14.057762145996094 + ], + [ + "▁purist", + -14.05776596069336 + ], + [ + "748", + -14.057766914367676 + ], + [ + "▁bevel", + -14.057783126831056 + ], + [ + "▁munch", + -14.05780792236328 + ], + [ + "▁-1", + -14.057890892028809 + ], + [ + "nsley", + -14.057891845703123 + ], + [ + "▁memorized", + -14.057918548583984 + ], + [ + "▁ITIL", + -14.057927131652832 + ], + [ + "▁Barba", + -14.05801773071289 + ], + [ + "▁Reaching", + -14.058067321777344 + ], + [ + "▁Menlo", + -14.05811595916748 + ], + [ + "▁ICA", + -14.058150291442873 + ], + [ + "▁Enes", + -14.05815601348877 + ], + [ + "inka", + -14.0582275390625 + ], + [ + "▁Angler", + -14.058250427246094 + ], + [ + "▁Trumpet", + -14.058284759521484 + ], + [ + "AST", + -14.05831527709961 + ], + [ + "▁bashing", + -14.058327674865724 + ], + [ + "▁IBC", + -14.058451652526855 + ], + [ + "▁Loco", + -14.058510780334473 + ], + [ + "CTA", + -14.058523178100586 + ], + [ + "▁veal", + -14.058553695678713 + ], + [ + "807", + -14.058636665344238 + ], + [ + "marie", + -14.058648109436035 + ], + [ + "▁contented", + -14.058670043945312 + ], + [ + "▁Routes", + -14.0587158203125 + ], + [ + "Mirror", + -14.058716773986816 + ], + [ + "▁747", + -14.058722496032717 + ], + [ + "explore", + -14.058730125427246 + ], + [ + "▁knotted", + -14.058794975280762 + ], + [ + "Worth", + -14.058833122253418 + ], + [ + "▁Judging", + -14.058856010437012 + ], + [ + "Cutting", + -14.058890342712402 + ], + [ + "casual", + -14.05906105041504 + ], + [ + "duh", + -14.059078216552734 + ], + [ + "ohan", + -14.059080123901367 + ], + [ + "▁1854", + -14.059096336364746 + ], + [ + "etra", + -14.05910873413086 + ], + [ + "compared", + -14.059124946594238 + ], + [ + "▁dials", + -14.059158325195312 + ], + [ + "igue", + -14.05919075012207 + ], + [ + "▁capitals", + -14.059261322021484 + ], + [ + "▁Vik", + -14.059310913085938 + ], + [ + "▁Swans", + -14.059426307678224 + ], + [ + "HEAD", + -14.059430122375488 + ], + [ + "eler", + -14.059467315673828 + ], + [ + "▁Consist", + -14.059511184692385 + ], + [ + "▁SMP", + -14.059551239013672 + ], + [ + "fluid", + -14.059581756591797 + ], + [ + "▁Discovering", + -14.059603691101074 + ], + [ + "▁Homepage", + -14.059767723083496 + ], + [ + "▁Resolutions", + -14.059771537780762 + ], + [ + "▁LOB", + -14.059773445129396 + ], + [ + "IGA", + -14.059896469116213 + ], + [ + "AUD", + -14.059922218322754 + ], + [ + "▁ported", + -14.059992790222168 + ], + [ + "▁Http", + -14.060004234313965 + ], + [ + "▁Naj", + -14.060007095336914 + ], + [ + "Ted", + -14.060020446777344 + ], + [ + "▁259", + -14.060036659240724 + ], + [ + "▁slug", + -14.060051918029783 + ], + [ + "meth", + -14.060105323791504 + ], + [ + "kula", + -14.060200691223145 + ], + [ + "▁cleansers", + -14.06020736694336 + ], + [ + "Vin", + -14.060280799865724 + ], + [ + "▁Pia", + -14.060312271118164 + ], + [ + "rok", + -14.060347557067873 + ], + [ + "▁Mish", + -14.060518264770508 + ], + [ + "▁hazelnuts", + -14.060585021972656 + ], + [ + "▁crocodiles", + -14.060586929321287 + ], + [ + "recht", + -14.060601234436035 + ], + [ + "dean", + -14.060612678527832 + ], + [ + "▁decaying", + -14.060628890991213 + ], + [ + "Manufacturer", + -14.06063175201416 + ], + [ + "iata", + -14.060659408569336 + ], + [ + "Ear", + -14.060684204101562 + ], + [ + "▁Dancer", + -14.060709953308104 + ], + [ + "2:30", + -14.060718536376951 + ], + [ + "Kris", + -14.060725212097168 + ], + [ + "LAD", + -14.06084442138672 + ], + [ + "▁Cull", + -14.060866355895996 + ], + [ + "▁Wester", + -14.060877799987791 + ], + [ + "formation", + -14.060901641845703 + ], + [ + "▁cellars", + -14.060994148254396 + ], + [ + "706", + -14.061004638671877 + ], + [ + "cumbe", + -14.061031341552734 + ], + [ + "▁Britton", + -14.061049461364746 + ], + [ + "▁Esta", + -14.06106185913086 + ], + [ + "▁Huff", + -14.061074256896973 + ], + [ + "▁Hogwarts", + -14.06118106842041 + ], + [ + "▁accelerometer", + -14.06118106842041 + ], + [ + "▁sauerkraut", + -14.06118106842041 + ], + [ + "▁zithromax", + -14.06118106842041 + ], + [ + "968", + -14.06118392944336 + ], + [ + "▁monochromatic", + -14.061184883117676 + ], + [ + "▁grudge", + -14.06118869781494 + ], + [ + "▁bulging", + -14.061189651489258 + ], + [ + "▁morsel", + -14.06119441986084 + ], + [ + "▁Mysore", + -14.061197280883787 + ], + [ + "▁rouge", + -14.061205863952637 + ], + [ + "chamber", + -14.061216354370115 + ], + [ + "▁zoos", + -14.0612211227417 + ], + [ + "▁Alignment", + -14.061223983764648 + ], + [ + "▁Virtue", + -14.061233520507812 + ], + [ + "▁tanned", + -14.061239242553713 + ], + [ + "▁Macbeth", + -14.06124210357666 + ], + [ + "▁Kafka", + -14.061297416687012 + ], + [ + "▁prelude", + -14.061347007751465 + ], + [ + "▁CPP", + -14.061354637145996 + ], + [ + "Commission", + -14.061426162719728 + ], + [ + "▁ministerial", + -14.061427116394045 + ], + [ + "▁Mage", + -14.061479568481444 + ], + [ + "lari", + -14.061488151550291 + ], + [ + "▁Fenton", + -14.061509132385254 + ], + [ + "▁nas", + -14.061542510986328 + ], + [ + "▁Officejet", + -14.06157112121582 + ], + [ + "Ren", + -14.061601638793944 + ], + [ + "▁MINER", + -14.061620712280272 + ], + [ + "▁Eats", + -14.061628341674805 + ], + [ + "avat", + -14.061664581298828 + ], + [ + "debt", + -14.06170654296875 + ], + [ + "▁octa", + -14.061731338500977 + ], + [ + "Graphic", + -14.061747550964355 + ], + [ + "▁thwarted", + -14.06175708770752 + ], + [ + "▁Gretchen", + -14.061767578125 + ], + [ + "▁admirers", + -14.06179428100586 + ], + [ + "▁MOOC", + -14.061830520629885 + ], + [ + "▁torches", + -14.0618314743042 + ], + [ + "stopping", + -14.061832427978516 + ], + [ + "▁GV", + -14.061878204345703 + ], + [ + "▁fluidity", + -14.06189250946045 + ], + [ + "▁pero", + -14.061923027038574 + ], + [ + "▁forfeited", + -14.061928749084473 + ], + [ + "▁snowshoe", + -14.061992645263672 + ], + [ + "▁1959.", + -14.061999320983888 + ], + [ + "▁Dé", + -14.062004089355469 + ], + [ + "▁fringes", + -14.062034606933594 + ], + [ + "ROI", + -14.06203842163086 + ], + [ + "roughly", + -14.06203842163086 + ], + [ + "▁morphological", + -14.062097549438477 + ], + [ + "▁1954.", + -14.062098503112791 + ], + [ + "▁hammering", + -14.062105178833008 + ], + [ + "▁nominating", + -14.062167167663574 + ], + [ + "chik", + -14.06218433380127 + ], + [ + "▁cultivars", + -14.062265396118164 + ], + [ + "▁Carney", + -14.062312126159668 + ], + [ + "cutaneous", + -14.062383651733398 + ], + [ + "▁STL", + -14.062406539916992 + ], + [ + "cases", + -14.062410354614258 + ], + [ + "ammy", + -14.062432289123535 + ], + [ + "▁bachelorette", + -14.062459945678713 + ], + [ + "▁collab", + -14.062467575073242 + ], + [ + "▁3.5%", + -14.062488555908203 + ], + [ + "expensive", + -14.06253147125244 + ], + [ + "▁sweetie", + -14.062549591064451 + ], + [ + "▁Duval", + -14.062631607055664 + ], + [ + "▁32\"", + -14.06263542175293 + ], + [ + "▁62%", + -14.062739372253418 + ], + [ + "▁optimist", + -14.062772750854492 + ], + [ + "ishly", + -14.062827110290527 + ], + [ + "▁Lizard", + -14.062843322753906 + ], + [ + "▁CREDIT", + -14.06286907196045 + ], + [ + "▁Hillside", + -14.062878608703612 + ], + [ + "▁EMF", + -14.062883377075195 + ], + [ + "ERC", + -14.062929153442385 + ], + [ + "▁airfares", + -14.062935829162598 + ], + [ + "851", + -14.06293773651123 + ], + [ + "▁1877", + -14.062973976135254 + ], + [ + "Tyr", + -14.062978744506836 + ], + [ + "▁Cros", + -14.062981605529783 + ], + [ + "▁Garment", + -14.062993049621582 + ], + [ + "▁BUR", + -14.063044548034668 + ], + [ + "▁Brill", + -14.063104629516602 + ], + [ + "▁Hardcover", + -14.063135147094728 + ], + [ + "▁Proudly", + -14.063148498535156 + ], + [ + "▁____", + -14.063148498535156 + ], + [ + "▁SOAP", + -14.063189506530762 + ], + [ + "Participate", + -14.063202857971191 + ], + [ + "oyo", + -14.063207626342772 + ], + [ + "hoy", + -14.063213348388672 + ], + [ + "Claire", + -14.063228607177734 + ], + [ + "vector", + -14.063239097595217 + ], + [ + "Gaming", + -14.06325340270996 + ], + [ + "▁229", + -14.063278198242188 + ], + [ + "▁avatars", + -14.063324928283691 + ], + [ + "HEN", + -14.063353538513184 + ], + [ + "▁decompose", + -14.063546180725098 + ], + [ + "compatibility", + -14.06356430053711 + ], + [ + "▁Studying", + -14.063641548156738 + ], + [ + "lust", + -14.063643455505373 + ], + [ + "▁SAA", + -14.063740730285645 + ], + [ + "▁forklifts", + -14.063836097717283 + ], + [ + "▁rainbows", + -14.063838958740234 + ], + [ + "▁Excessive", + -14.06385898590088 + ], + [ + "KN", + -14.063886642456056 + ], + [ + "▁Lloyds", + -14.0639066696167 + ], + [ + "Kbps", + -14.06395435333252 + ], + [ + "▁92%", + -14.063982963562012 + ], + [ + "▁Yer", + -14.063989639282228 + ], + [ + "▁Neighbors", + -14.06399154663086 + ], + [ + "▁hunk", + -14.06401252746582 + ], + [ + "▁Airports", + -14.0640287399292 + ], + [ + "▁pokies", + -14.064038276672363 + ], + [ + "▁diodes", + -14.064046859741213 + ], + [ + "▁injustices", + -14.064095497131348 + ], + [ + "▁$0.1", + -14.06414794921875 + ], + [ + "▁Refine", + -14.064202308654783 + ], + [ + "tano", + -14.064265251159668 + ], + [ + "274", + -14.064297676086426 + ], + [ + "▁buffering", + -14.064382553100586 + ], + [ + "1800", + -14.064388275146484 + ], + [ + "▁messengers", + -14.064610481262209 + ], + [ + "thesis", + -14.06464672088623 + ], + [ + "▁Crist", + -14.064647674560549 + ], + [ + "▁Nestle", + -14.064714431762695 + ], + [ + "702", + -14.06477165222168 + ], + [ + "AVI", + -14.064876556396484 + ], + [ + "▁SDGs", + -14.064879417419434 + ], + [ + "▁recovers", + -14.064881324768066 + ], + [ + "▁Latte", + -14.06490707397461 + ], + [ + "gw", + -14.06495475769043 + ], + [ + "▁sketched", + -14.064983367919922 + ], + [ + "▁HMI", + -14.065001487731934 + ], + [ + "▁amazes", + -14.065013885498049 + ], + [ + "estimated", + -14.06505298614502 + ], + [ + "▁offsets", + -14.065057754516602 + ], + [ + "▁Frederic", + -14.06506061553955 + ], + [ + "▁1869", + -14.065071105957031 + ], + [ + "▁1-6", + -14.065138816833496 + ], + [ + "Todd", + -14.065162658691406 + ], + [ + "▁Raul", + -14.065194129943848 + ], + [ + "▁Philipp", + -14.065321922302246 + ], + [ + "▁Carrying", + -14.065345764160156 + ], + [ + "Selection", + -14.065351486206056 + ], + [ + "▁lifeguard", + -14.065390586853027 + ], + [ + "▁bygone", + -14.065449714660645 + ], + [ + "usp", + -14.06546115875244 + ], + [ + "▁kitchenaid", + -14.06546115875244 + ], + [ + "ELT", + -14.065475463867188 + ], + [ + "cona", + -14.065509796142578 + ], + [ + "audit", + -14.065523147583008 + ], + [ + "amphetamine", + -14.06556510925293 + ], + [ + "▁Psychiatric", + -14.065567016601562 + ], + [ + "▁complacency", + -14.065567016601562 + ], + [ + "▁hickory", + -14.065567016601562 + ], + [ + "▁Naperville", + -14.06556797027588 + ], + [ + "▁Amman", + -14.065568923950195 + ], + [ + "▁matrimonial", + -14.06557846069336 + ], + [ + "▁heuristic", + -14.065582275390623 + ], + [ + "▁Guilford", + -14.065596580505373 + ], + [ + "▁Tariff", + -14.06560516357422 + ], + [ + "▁Drexel", + -14.065611839294434 + ], + [ + "▁windsurfing", + -14.0656156539917 + ], + [ + "▁salami", + -14.065624237060549 + ], + [ + "waiting", + -14.065638542175291 + ], + [ + "▁immortality", + -14.06564235687256 + ], + [ + "STRONG", + -14.065651893615724 + ], + [ + "Angelo", + -14.06567668914795 + ], + [ + "▁ergonomically", + -14.065682411193848 + ], + [ + "▁HEART", + -14.065690994262695 + ], + [ + "▁extant", + -14.065690994262695 + ], + [ + "ACP", + -14.065696716308594 + ], + [ + "vue", + -14.065714836120604 + ], + [ + "KW", + -14.06572437286377 + ], + [ + "CVD", + -14.065762519836426 + ], + [ + "▁Nanny", + -14.065778732299805 + ], + [ + "▁Marissa", + -14.065800666809082 + ], + [ + "▁Mistakes", + -14.065825462341309 + ], + [ + "Traveling", + -14.06586456298828 + ], + [ + "▁Slab", + -14.065874099731444 + ], + [ + "▁squeaky", + -14.065899848937988 + ], + [ + "▁TEDx", + -14.06590175628662 + ], + [ + "▁jetty", + -14.065903663635254 + ], + [ + "▁SID", + -14.065974235534668 + ], + [ + "▁Trailers", + -14.06601619720459 + ], + [ + "871", + -14.066020011901855 + ], + [ + "▁Platforms", + -14.066132545471191 + ], + [ + "▁mistrust", + -14.066225051879885 + ], + [ + "▁2300", + -14.066232681274414 + ], + [ + "268", + -14.066309928894045 + ], + [ + "▁masterfully", + -14.066313743591309 + ], + [ + "rae", + -14.066341400146484 + ], + [ + "TTER", + -14.066346168518066 + ], + [ + "ilis", + -14.066363334655762 + ], + [ + "▁noncommercial", + -14.06639289855957 + ], + [ + "3,600", + -14.066411018371582 + ], + [ + "▁eagerness", + -14.06642246246338 + ], + [ + "phonic", + -14.066449165344238 + ], + [ + "▁Delegates", + -14.066450119018556 + ], + [ + "▁fascist", + -14.066472053527832 + ], + [ + "▁TRY", + -14.06650447845459 + ], + [ + "▁daybed", + -14.066526412963867 + ], + [ + "▁Titles", + -14.066529273986816 + ], + [ + "busters", + -14.066576957702637 + ], + [ + "▁overworked", + -14.066588401794434 + ], + [ + "decent", + -14.066595077514648 + ], + [ + "▁Spas", + -14.066643714904783 + ], + [ + "▁Slayer", + -14.06664752960205 + ], + [ + "▁nameplate", + -14.066654205322266 + ], + [ + "▁Posters", + -14.06666374206543 + ], + [ + "▁dispensed", + -14.066715240478516 + ], + [ + "DMS", + -14.06672191619873 + ], + [ + "▁stardom", + -14.066763877868652 + ], + [ + "▁trams", + -14.066792488098145 + ], + [ + "▁NIV", + -14.066795349121094 + ], + [ + "▁Entering", + -14.066802024841309 + ], + [ + "Coast", + -14.06680679321289 + ], + [ + "▁253", + -14.06682014465332 + ], + [ + "▁rhinestone", + -14.066854476928713 + ], + [ + "▁Persona", + -14.066892623901367 + ], + [ + "▁calibrate", + -14.066917419433594 + ], + [ + "▁Collie", + -14.06691837310791 + ], + [ + "dora", + -14.06702709197998 + ], + [ + "▁choked", + -14.067100524902344 + ], + [ + "hell", + -14.06712245941162 + ], + [ + "needle", + -14.067139625549316 + ], + [ + "matically", + -14.06714153289795 + ], + [ + "▁Latter", + -14.067178726196287 + ], + [ + "▁lob", + -14.067201614379885 + ], + [ + "Rush", + -14.067203521728516 + ], + [ + "▁buffets", + -14.067245483398438 + ], + [ + "▁Michal", + -14.067291259765623 + ], + [ + "Cape", + -14.06736946105957 + ], + [ + "▁Horne", + -14.067392349243164 + ], + [ + "Greater", + -14.067465782165527 + ], + [ + "▁decently", + -14.067466735839844 + ], + [ + "1.2%", + -14.067676544189451 + ], + [ + "627", + -14.067699432373049 + ], + [ + "▁($7", + -14.067699432373049 + ], + [ + "Critics", + -14.067706108093262 + ], + [ + "741", + -14.067709922790527 + ], + [ + "Uh", + -14.06772518157959 + ], + [ + "Yahoo", + -14.0677490234375 + ], + [ + "▁Runs", + -14.067750930786133 + ], + [ + "ivu", + -14.06775951385498 + ], + [ + "unning", + -14.06780242919922 + ], + [ + "▁Vivid", + -14.06785774230957 + ], + [ + "nek", + -14.067898750305176 + ], + [ + "ookie", + -14.067944526672363 + ], + [ + "THANK", + -14.067946434020996 + ], + [ + "aje", + -14.06798267364502 + ], + [ + "▁Atwood", + -14.067998886108398 + ], + [ + "boiled", + -14.068037033081056 + ], + [ + "▁Verdi", + -14.068087577819824 + ], + [ + "▁SURE", + -14.068107604980469 + ], + [ + "Cream", + -14.068127632141112 + ], + [ + "▁ECS", + -14.06814193725586 + ], + [ + "▁DAR", + -14.068202018737791 + ], + [ + "RMS", + -14.068217277526855 + ], + [ + "▁Ano", + -14.068232536315918 + ], + [ + "stance", + -14.068238258361816 + ], + [ + "ouli", + -14.068244934082031 + ], + [ + "▁3).", + -14.06830596923828 + ], + [ + "▁Marketers", + -14.068314552307127 + ], + [ + "▁Integral", + -14.068330764770508 + ], + [ + "Finished", + -14.068350791931152 + ], + [ + "breeding", + -14.068405151367188 + ], + [ + "LID", + -14.06844997406006 + ], + [ + "▁dishwashers", + -14.068523406982422 + ], + [ + "▁drummers", + -14.068556785583496 + ], + [ + "▁Magician", + -14.06857681274414 + ], + [ + "flop", + -14.068643569946287 + ], + [ + "▁flashbacks", + -14.068708419799805 + ], + [ + "▁Afterward", + -14.068760871887209 + ], + [ + "▁Amino", + -14.068840026855469 + ], + [ + "▁CONNECT", + -14.068875312805176 + ], + [ + "genre", + -14.069010734558104 + ], + [ + "Reply", + -14.06911563873291 + ], + [ + "▁obstructions", + -14.069137573242188 + ], + [ + "▁jon", + -14.069146156311035 + ], + [ + "▁Faire", + -14.069151878356934 + ], + [ + "steps", + -14.06919288635254 + ], + [ + "▁GLO", + -14.069199562072754 + ], + [ + "BOY", + -14.069218635559082 + ], + [ + "▁Moist", + -14.06923007965088 + ], + [ + "nearly", + -14.069233894348145 + ], + [ + "▁pleats", + -14.069254875183104 + ], + [ + "nosed", + -14.06926727294922 + ], + [ + "▁Lender", + -14.069311141967772 + ], + [ + "▁MATERIALS", + -14.06931209564209 + ], + [ + "▁MTN", + -14.069342613220217 + ], + [ + "▁guitarists", + -14.069343566894531 + ], + [ + "SDR", + -14.069412231445312 + ], + [ + "▁billionaires", + -14.06944179534912 + ], + [ + "▁MFC", + -14.069462776184082 + ], + [ + "▁feathered", + -14.069462776184082 + ], + [ + "▁donned", + -14.069467544555664 + ], + [ + "bind", + -14.06955909729004 + ], + [ + "▁surges", + -14.06963062286377 + ], + [ + "▁Holo", + -14.069636344909668 + ], + [ + "liz", + -14.06969165802002 + ], + [ + "▁ticker", + -14.069765090942385 + ], + [ + "▁instinctive", + -14.069786071777344 + ], + [ + "Sn", + -14.069819450378418 + ], + [ + "▁apathy", + -14.069911003112791 + ], + [ + "▁Mang", + -14.06997013092041 + ], + [ + "▁disembark", + -14.069971084594728 + ], + [ + "▁Immaculate", + -14.069972038269045 + ], + [ + "▁Kilkenny", + -14.069972038269045 + ], + [ + "▁Nickelodeon", + -14.069972038269045 + ], + [ + "▁SOFTWARE", + -14.069972038269045 + ], + [ + "▁craigslist", + -14.069972038269045 + ], + [ + "▁entrée", + -14.069972038269045 + ], + [ + "▁uncharted", + -14.069972038269045 + ], + [ + "▁ovulation", + -14.06997299194336 + ], + [ + "▁strategize", + -14.069976806640623 + ], + [ + "▁unplugged", + -14.06997776031494 + ], + [ + "▁metastasis", + -14.069978713989258 + ], + [ + "▁Innocent", + -14.069979667663574 + ], + [ + "▁Warwickshire", + -14.069986343383787 + ], + [ + "cara", + -14.069995880126951 + ], + [ + "▁1948.", + -14.069995880126951 + ], + [ + "▁Quotation", + -14.070003509521484 + ], + [ + "▁profess", + -14.070013046264648 + ], + [ + "▁kinship", + -14.070013999938965 + ], + [ + "▁Copeland", + -14.070016860961914 + ], + [ + "▁khaki", + -14.070022583007812 + ], + [ + "▁GoDaddy", + -14.070027351379396 + ], + [ + "▁chromatography", + -14.070029258728027 + ], + [ + "Confident", + -14.070049285888672 + ], + [ + "0-100", + -14.070058822631836 + ], + [ + "▁Guildford", + -14.07011890411377 + ], + [ + "▁preclinical", + -14.07012176513672 + ], + [ + "▁SBM", + -14.070178031921388 + ], + [ + "▁Paxton", + -14.070180892944336 + ], + [ + "olino", + -14.070181846618652 + ], + [ + "▁Overlook", + -14.070183753967283 + ], + [ + "▁emigrated", + -14.07020092010498 + ], + [ + "▁(£1", + -14.070249557495115 + ], + [ + "584", + -14.070252418518066 + ], + [ + "▁tradesmen", + -14.070262908935549 + ], + [ + "▁2011;", + -14.070323944091797 + ], + [ + "▁privy", + -14.070334434509276 + ], + [ + "▁Differences", + -14.070335388183594 + ], + [ + "▁Breach", + -14.070337295532228 + ], + [ + "▁Maul", + -14.07034969329834 + ], + [ + "tribut", + -14.070351600646973 + ], + [ + "phin", + -14.07038402557373 + ], + [ + "▁Evanston", + -14.070449829101562 + ], + [ + "▁usurp", + -14.070518493652344 + ], + [ + "CODE", + -14.070531845092772 + ], + [ + "▁Armory", + -14.070545196533203 + ], + [ + "▁EDC", + -14.070545196533203 + ], + [ + "▁Martina", + -14.070551872253418 + ], + [ + "▁crackling", + -14.07059383392334 + ], + [ + "Applying", + -14.070595741271973 + ], + [ + "▁Edwardian", + -14.070626258850098 + ], + [ + "-$1", + -14.070655822753906 + ], + [ + "EME", + -14.07076644897461 + ], + [ + "iglio", + -14.07077980041504 + ], + [ + "roi", + -14.07078742980957 + ], + [ + "▁2006).", + -14.070818901062012 + ], + [ + "▁bypassing", + -14.070833206176758 + ], + [ + "1.00", + -14.070846557617188 + ], + [ + "▁pooled", + -14.070849418640137 + ], + [ + "▁murdering", + -14.070882797241213 + ], + [ + "▁CAB", + -14.070920944213867 + ], + [ + "▁CMC", + -14.07093334197998 + ], + [ + "▁aligners", + -14.07093334197998 + ], + [ + "▁Trendy", + -14.070985794067385 + ], + [ + "▁LJ", + -14.071016311645508 + ], + [ + "Waste", + -14.071017265319824 + ], + [ + "▁Ozone", + -14.071074485778809 + ], + [ + "calls", + -14.071084022521973 + ], + [ + "Flag", + -14.071162223815918 + ], + [ + "▁IFA", + -14.071179389953612 + ], + [ + "inny", + -14.071249961853027 + ], + [ + "svn", + -14.07131290435791 + ], + [ + "▁tourney", + -14.071319580078123 + ], + [ + "▁Neighbor", + -14.07136344909668 + ], + [ + "▁anthropo", + -14.071374893188477 + ], + [ + "Dust", + -14.071488380432127 + ], + [ + "▁Hao", + -14.071503639221191 + ], + [ + "portal", + -14.071517944335938 + ], + [ + "itty", + -14.071556091308594 + ], + [ + "▁organist", + -14.071561813354492 + ], + [ + "▁addicting", + -14.071608543395996 + ], + [ + "▁Kitten", + -14.071624755859377 + ], + [ + "shifting", + -14.07165241241455 + ], + [ + "▁9.8", + -14.071685791015623 + ], + [ + "▁rookies", + -14.071775436401367 + ], + [ + "nab", + -14.071813583374023 + ], + [ + "NTA", + -14.07186508178711 + ], + [ + "naire", + -14.07187557220459 + ], + [ + "▁crocheted", + -14.071949005126951 + ], + [ + "lement", + -14.072002410888672 + ], + [ + "▁OIL", + -14.07201099395752 + ], + [ + "Stretching", + -14.07203197479248 + ], + [ + "CPS", + -14.072102546691896 + ], + [ + "▁whit", + -14.07215976715088 + ], + [ + "▁WELCOME", + -14.07221794128418 + ], + [ + "Adapt", + -14.072226524353027 + ], + [ + "zze", + -14.072237014770508 + ], + [ + "Pricing", + -14.072245597839355 + ], + [ + "dwelling", + -14.072246551513672 + ], + [ + "▁Mop", + -14.072394371032717 + ], + [ + "▁hindering", + -14.07240390777588 + ], + [ + "Champion", + -14.072416305541992 + ], + [ + "Korean", + -14.072458267211914 + ], + [ + "GAAP", + -14.07248306274414 + ], + [ + "▁artful", + -14.072521209716797 + ], + [ + "Tru", + -14.07254409790039 + ], + [ + "▁pronouns", + -14.072626113891602 + ], + [ + "▁smokey", + -14.07262897491455 + ], + [ + "zell", + -14.07264232635498 + ], + [ + "psych", + -14.072646141052246 + ], + [ + "license", + -14.072660446166992 + ], + [ + "▁Sylvester", + -14.072671890258787 + ], + [ + "IFT", + -14.072691917419434 + ], + [ + "values", + -14.072691917419434 + ], + [ + "urn", + -14.072693824768066 + ], + [ + "▁miter", + -14.072724342346191 + ], + [ + "Proper", + -14.072815895080566 + ], + [ + "▁Nast", + -14.0728178024292 + ], + [ + "▁SEK", + -14.072936058044434 + ], + [ + "▁bha", + -14.072958946228027 + ], + [ + "996", + -14.072991371154783 + ], + [ + "873", + -14.073006629943848 + ], + [ + "▁PHOTO", + -14.073030471801758 + ], + [ + "▁manifesting", + -14.073050498962402 + ], + [ + "▁birdie", + -14.073272705078123 + ], + [ + "▁McCar", + -14.073307037353516 + ], + [ + "targeted", + -14.07331085205078 + ], + [ + "gist", + -14.07333755493164 + ], + [ + "▁Mullen", + -14.073440551757812 + ], + [ + "▁Nume", + -14.07352352142334 + ], + [ + "FTP", + -14.07353401184082 + ], + [ + "▁7-5", + -14.073546409606934 + ], + [ + "▁justifies", + -14.073670387268066 + ], + [ + "resist", + -14.073699951171877 + ], + [ + "▁fizz", + -14.073773384094238 + ], + [ + "▁newlyweds", + -14.073781967163086 + ], + [ + "SAS", + -14.073797225952148 + ], + [ + "▁Yom", + -14.073801040649414 + ], + [ + "multiple", + -14.073835372924805 + ], + [ + "▁deliberations", + -14.073844909667969 + ], + [ + "▁1841", + -14.073850631713867 + ], + [ + "▁Bassett", + -14.073895454406738 + ], + [ + "▁236", + -14.073966979980469 + ], + [ + "engage", + -14.073991775512695 + ], + [ + "CEA", + -14.074052810668944 + ], + [ + "0;", + -14.07409381866455 + ], + [ + "▁Hormone", + -14.074108123779297 + ], + [ + "▁incessant", + -14.074155807495115 + ], + [ + "▁Dunne", + -14.074199676513672 + ], + [ + "▁cadet", + -14.074220657348633 + ], + [ + "▁Blackhawks", + -14.07427215576172 + ], + [ + "▁resorted", + -14.074275970458984 + ], + [ + "Surviving", + -14.074397087097168 + ], + [ + "▁chorizo", + -14.074397087097168 + ], + [ + "▁improbable", + -14.074397087097168 + ], + [ + "▁geospatial", + -14.074399948120115 + ], + [ + "▁conflicted", + -14.074408531188965 + ], + [ + "▁Amarillo", + -14.07440948486328 + ], + [ + "▁registries", + -14.07440948486328 + ], + [ + "▁prefecture", + -14.074418067932127 + ], + [ + "▁unhappiness", + -14.074419975280762 + ], + [ + "▁Constance", + -14.07442569732666 + ], + [ + "▁rumbling", + -14.07442569732666 + ], + [ + "▁(1989)", + -14.074434280395508 + ], + [ + "▁Suisse", + -14.074442863464355 + ], + [ + "ANSWER", + -14.07445240020752 + ], + [ + "▁normative", + -14.074468612670898 + ], + [ + "▁Leiden", + -14.074508666992188 + ], + [ + "▁Harlow", + -14.074519157409668 + ], + [ + "▁Painters", + -14.074540138244627 + ], + [ + "▁predominately", + -14.074540138244627 + ], + [ + "▁NYPD", + -14.074542045593262 + ], + [ + "▁lacing", + -14.07457447052002 + ], + [ + "dence", + -14.074588775634766 + ], + [ + "EIT", + -14.074590682983398 + ], + [ + "▁subunit", + -14.074603080749512 + ], + [ + "zw", + -14.07461166381836 + ], + [ + "▁#14", + -14.07461929321289 + ], + [ + "▁Medford", + -14.0746431350708 + ], + [ + "▁CFP", + -14.07468318939209 + ], + [ + "▁cams", + -14.074711799621582 + ], + [ + "▁Johor", + -14.074721336364746 + ], + [ + "▁reproducing", + -14.074740409851074 + ], + [ + "▁kitties", + -14.074771881103516 + ], + [ + "amy", + -14.074793815612791 + ], + [ + "▁textural", + -14.074795722961426 + ], + [ + "DIE", + -14.074798583984377 + ], + [ + "DAP", + -14.074871063232422 + ], + [ + "▁Mayweather", + -14.074883460998535 + ], + [ + "▁Gianni", + -14.074902534484863 + ], + [ + "▁Californians", + -14.074904441833496 + ], + [ + "MUN", + -14.074963569641112 + ], + [ + "▁deflected", + -14.07497215270996 + ], + [ + "▁covet", + -14.074994087219238 + ], + [ + "▁Sheng", + -14.075000762939451 + ], + [ + "427", + -14.075014114379885 + ], + [ + "▁DQ", + -14.075102806091309 + ], + [ + "▁VV", + -14.07514762878418 + ], + [ + "▁rediscovered", + -14.075162887573242 + ], + [ + "dap", + -14.075181007385254 + ], + [ + "▁Cramer", + -14.07518482208252 + ], + [ + "ranium", + -14.075241088867188 + ], + [ + "▁9780", + -14.07534885406494 + ], + [ + "database", + -14.075366020202637 + ], + [ + "▁num", + -14.075435638427734 + ], + [ + "055", + -14.07547378540039 + ], + [ + "Metadata", + -14.075485229492188 + ], + [ + "ibil", + -14.07548713684082 + ], + [ + "▁alertness", + -14.075559616088867 + ], + [ + "▁Matu", + -14.075596809387209 + ], + [ + "Shake", + -14.075651168823242 + ], + [ + "angu", + -14.075676918029783 + ], + [ + "▁contended", + -14.07568359375 + ], + [ + "▁YP", + -14.075691223144531 + ], + [ + "▁SON", + -14.07571029663086 + ], + [ + "▁Niche", + -14.075718879699709 + ], + [ + "▁nobility", + -14.075721740722656 + ], + [ + "Realize", + -14.075776100158691 + ], + [ + "▁Breathing", + -14.075987815856934 + ], + [ + "▁Lagu", + -14.076102256774902 + ], + [ + "952", + -14.076127052307127 + ], + [ + "Ygo", + -14.076144218444824 + ], + [ + "▁pronoun", + -14.076147079467772 + ], + [ + "▁medallion", + -14.076156616210938 + ], + [ + "4.00", + -14.076199531555176 + ], + [ + "▁affirmations", + -14.076244354248049 + ], + [ + "▁flocks", + -14.076300621032717 + ], + [ + "▁2018?", + -14.076340675354004 + ], + [ + "787", + -14.076374053955078 + ], + [ + "gest", + -14.076385498046877 + ], + [ + "▁Cd", + -14.076407432556152 + ], + [ + "▁dv", + -14.076493263244627 + ], + [ + "▁Compute", + -14.076494216918944 + ], + [ + "apocalyptic", + -14.076748847961426 + ], + [ + "▁reigned", + -14.076749801635742 + ], + [ + "WARNING", + -14.07677936553955 + ], + [ + "▁jumpstart", + -14.076790809631348 + ], + [ + "microsoft", + -14.076797485351562 + ], + [ + "Entertainment", + -14.076802253723145 + ], + [ + "Broadway", + -14.076826095581056 + ], + [ + "cyclic", + -14.076868057250977 + ], + [ + "▁Savvy", + -14.076882362365724 + ], + [ + "▁GEN", + -14.07690715789795 + ], + [ + "highly", + -14.076943397521973 + ], + [ + "Structure", + -14.076990127563477 + ], + [ + "▁apologetic", + -14.076997756958008 + ], + [ + "▁breaching", + -14.077017784118652 + ], + [ + "▁Dominica", + -14.077020645141602 + ], + [ + "▁liven", + -14.07703685760498 + ], + [ + "▁modestly", + -14.07703971862793 + ], + [ + "threat", + -14.07704734802246 + ], + [ + "▁Cline", + -14.077116012573242 + ], + [ + "▁Failed", + -14.077142715454102 + ], + [ + "▁PDT", + -14.07725429534912 + ], + [ + "▁Urbana", + -14.07730770111084 + ], + [ + "▁cryo", + -14.07737922668457 + ], + [ + "inous", + -14.077383995056152 + ], + [ + "Rooms", + -14.07740879058838 + ], + [ + "829", + -14.077421188354492 + ], + [ + "Hearing", + -14.077580451965332 + ], + [ + "placement", + -14.07758903503418 + ], + [ + "▁Gown", + -14.077595710754396 + ], + [ + "535", + -14.077630043029783 + ], + [ + "assist", + -14.07768440246582 + ], + [ + "▁confronts", + -14.077774047851562 + ], + [ + "▁outwards", + -14.077787399291992 + ], + [ + "▁Submissions", + -14.077789306640623 + ], + [ + "tilt", + -14.077835083007812 + ], + [ + "▁personalisation", + -14.077847480773926 + ], + [ + "▁wasnt", + -14.077865600585938 + ], + [ + "▁Stefano", + -14.077958106994627 + ], + [ + "▁Embroidered", + -14.077996253967283 + ], + [ + "▁Jak", + -14.078024864196776 + ], + [ + "▁burritos", + -14.078028678894045 + ], + [ + "CTS", + -14.07819652557373 + ], + [ + "074", + -14.07822322845459 + ], + [ + "▁primates", + -14.07827377319336 + ], + [ + "▁Num", + -14.078359603881836 + ], + [ + "iasis", + -14.078425407409668 + ], + [ + "gman", + -14.078448295593262 + ], + [ + "▁Tribes", + -14.078458786010742 + ], + [ + "maine", + -14.07847499847412 + ], + [ + "▁colorless", + -14.078563690185549 + ], + [ + "▁shaver", + -14.07858943939209 + ], + [ + "▁reagents", + -14.078621864318848 + ], + [ + "▁#13", + -14.078682899475098 + ], + [ + "Kent", + -14.07877254486084 + ], + [ + "Sum", + -14.078787803649902 + ], + [ + "Sym", + -14.078816413879396 + ], + [ + "▁Auschwitz", + -14.07884120941162 + ], + [ + "▁Caucasus", + -14.07884120941162 + ], + [ + "▁Jodhpur", + -14.07884120941162 + ], + [ + "▁Maarten", + -14.07884120941162 + ], + [ + "▁PARTICULAR", + -14.07884120941162 + ], + [ + "▁Philanthropy", + -14.07884120941162 + ], + [ + "▁ibuprofen", + -14.07884120941162 + ], + [ + "▁shrewd", + -14.07884120941162 + ], + [ + "▁tectonic", + -14.07884120941162 + ], + [ + "▁typhoon", + -14.07884120941162 + ], + [ + "▁unforgiving", + -14.07884120941162 + ], + [ + "▁yesteryear", + -14.07884120941162 + ], + [ + "▁curfew", + -14.078842163085938 + ], + [ + "▁hysteria", + -14.078843116760254 + ], + [ + "▁Cornelius", + -14.07884693145752 + ], + [ + "▁gauze", + -14.078847885131836 + ], + [ + "▁otherworldly", + -14.078847885131836 + ], + [ + "vus", + -14.07886028289795 + ], + [ + "▁Wellesley", + -14.078863143920898 + ], + [ + "▁LaserJet", + -14.078866958618164 + ], + [ + "angal", + -14.078869819641112 + ], + [ + "▁provocation", + -14.078871726989746 + ], + [ + "▁unannounced", + -14.078882217407228 + ], + [ + "▁Berwick", + -14.078883171081545 + ], + [ + "▁inhaling", + -14.078890800476074 + ], + [ + "▁Yeezy", + -14.078893661499023 + ], + [ + "▁blurring", + -14.078901290893556 + ], + [ + "▁incurring", + -14.07890796661377 + ], + [ + "Prim", + -14.078911781311035 + ], + [ + "▁Knitting", + -14.078911781311035 + ], + [ + "sino", + -14.078927040100098 + ], + [ + "acle", + -14.078941345214844 + ], + [ + "▁armpit", + -14.07895278930664 + ], + [ + "▁spritz", + -14.078967094421388 + ], + [ + "▁merchantability", + -14.078990936279297 + ], + [ + "Acknowledge", + -14.079008102416992 + ], + [ + "▁Charl", + -14.079031944274902 + ], + [ + "DLE", + -14.07905387878418 + ], + [ + "▁WLAN", + -14.079061508178713 + ], + [ + "▁skimmer", + -14.079068183898926 + ], + [ + "▁teacup", + -14.079099655151367 + ], + [ + "▁sentient", + -14.0791015625 + ], + [ + "▁barometer", + -14.07911491394043 + ], + [ + "▁GIFs", + -14.07911777496338 + ], + [ + "▁Solving", + -14.079119682312012 + ], + [ + "▁restock", + -14.079312324523926 + ], + [ + "deals", + -14.079326629638672 + ], + [ + "Berlin", + -14.079339027404783 + ], + [ + "▁dugout", + -14.079339027404783 + ], + [ + "▁sept", + -14.079339981079102 + ], + [ + "▁Rosario", + -14.079345703125 + ], + [ + "▁313", + -14.079354286193848 + ], + [ + "quad", + -14.079370498657228 + ], + [ + "▁Oceanside", + -14.079462051391602 + ], + [ + "▁interferes", + -14.079475402832031 + ], + [ + "▁Expected", + -14.079554557800291 + ], + [ + "▁Arya", + -14.079556465148926 + ], + [ + "CAA", + -14.079569816589355 + ], + [ + "undi", + -14.07958984375 + ], + [ + "SJ", + -14.079594612121582 + ], + [ + "▁ruffled", + -14.079634666442873 + ], + [ + "Saver", + -14.079635620117188 + ], + [ + "▁Cartridges", + -14.079642295837402 + ], + [ + "▁Tia", + -14.079665184020996 + ], + [ + "▁ACLU", + -14.079681396484377 + ], + [ + "▁Nebula", + -14.07969570159912 + ], + [ + "▁directorial", + -14.079713821411133 + ], + [ + "▁midweek", + -14.079737663269045 + ], + [ + "▁iv", + -14.079755783081056 + ], + [ + "▁Bargain", + -14.079784393310549 + ], + [ + "▁Dys", + -14.079822540283203 + ], + [ + "Boys", + -14.07982349395752 + ], + [ + "Elder", + -14.079853057861328 + ], + [ + "▁exerted", + -14.07986831665039 + ], + [ + "▁integrators", + -14.079874992370604 + ], + [ + "▁Decay", + -14.079894065856934 + ], + [ + "▁aerobics", + -14.079904556274414 + ], + [ + "▁Stowe", + -14.079909324645996 + ], + [ + "licious", + -14.079991340637209 + ], + [ + "RICK", + -14.080093383789062 + ], + [ + "ganj", + -14.080102920532228 + ], + [ + "▁Reject", + -14.080134391784668 + ], + [ + "yeong", + -14.08014678955078 + ], + [ + "inha", + -14.080156326293944 + ], + [ + "0.05)", + -14.080205917358398 + ], + [ + "▁accentuated", + -14.08029556274414 + ], + [ + "▁casa", + -14.080336570739746 + ], + [ + "Cent", + -14.080382347106934 + ], + [ + "▁conformation", + -14.080409049987791 + ], + [ + "8+", + -14.0805082321167 + ], + [ + "▁Auf", + -14.080562591552734 + ], + [ + "▁Sanct", + -14.080626487731934 + ], + [ + "PFS", + -14.08065700531006 + ], + [ + "▁exiled", + -14.080680847167969 + ], + [ + "▁Learned", + -14.080793380737305 + ], + [ + "CSR", + -14.080877304077148 + ], + [ + "▁awkwardly", + -14.080883026123049 + ], + [ + "▁gratefully", + -14.080883026123049 + ], + [ + "fluent", + -14.080891609191896 + ], + [ + "▁Lookout", + -14.080892562866213 + ], + [ + "352", + -14.080973625183104 + ], + [ + "ipper", + -14.08102035522461 + ], + [ + "discover", + -14.081047058105469 + ], + [ + "Pearl", + -14.081080436706545 + ], + [ + "▁Rik", + -14.081233978271484 + ], + [ + "▁612", + -14.081243515014648 + ], + [ + "654", + -14.081290245056152 + ], + [ + "▁NEA", + -14.08130168914795 + ], + [ + "Arthur", + -14.081307411193848 + ], + [ + "▁platelet", + -14.08132553100586 + ], + [ + "▁NYSE", + -14.081338882446287 + ], + [ + "ographers", + -14.081344604492188 + ], + [ + "▁electives", + -14.08135223388672 + ], + [ + "▁bookmakers", + -14.081360816955566 + ], + [ + "Netflix", + -14.081364631652832 + ], + [ + "▁MEM", + -14.081393241882324 + ], + [ + "▁toronto", + -14.081412315368652 + ], + [ + "baud", + -14.081496238708496 + ], + [ + "Bought", + -14.081536293029783 + ], + [ + "▁ACP", + -14.081550598144531 + ], + [ + "▁append", + -14.081562042236328 + ], + [ + "▁Grossman", + -14.081565856933594 + ], + [ + "▁Effectively", + -14.0816650390625 + ], + [ + "▁italic", + -14.081714630126951 + ], + [ + "Dimensional", + -14.081727981567385 + ], + [ + "▁commandment", + -14.081750869750977 + ], + [ + "influenced", + -14.081768035888672 + ], + [ + "delivered", + -14.081783294677734 + ], + [ + "▁coupler", + -14.08179759979248 + ], + [ + "▁Dunkin", + -14.08182144165039 + ], + [ + "▁JBL", + -14.081897735595703 + ], + [ + "▁charmed", + -14.08192253112793 + ], + [ + "675", + -14.08194065093994 + ], + [ + "▁triad", + -14.081985473632812 + ], + [ + "▁Capcom", + -14.08207893371582 + ], + [ + "nika", + -14.082100868225098 + ], + [ + "▁anyhow", + -14.082127571105955 + ], + [ + "billy", + -14.08212947845459 + ], + [ + "▁mailings", + -14.082135200500488 + ], + [ + "▁10.0", + -14.082152366638184 + ], + [ + "Adrian", + -14.082263946533203 + ], + [ + "▁fishy", + -14.082313537597656 + ], + [ + "tablet", + -14.082341194152832 + ], + [ + "include", + -14.082368850708008 + ], + [ + "▁Seton", + -14.08241844177246 + ], + [ + "▁demolish", + -14.082430839538574 + ], + [ + "heading", + -14.082463264465332 + ], + [ + "jol", + -14.082525253295898 + ], + [ + "decor", + -14.082549095153809 + ], + [ + "Donate", + -14.082566261291504 + ], + [ + "bikes", + -14.082571983337402 + ], + [ + "sofar", + -14.08257293701172 + ], + [ + "▁Priory", + -14.082634925842283 + ], + [ + "TSU", + -14.082720756530762 + ], + [ + "▁cautions", + -14.082728385925291 + ], + [ + "▁WERE", + -14.082731246948242 + ], + [ + "huge", + -14.082856178283691 + ], + [ + "▁CONDITION", + -14.082866668701172 + ], + [ + "Biker", + -14.082941055297852 + ], + [ + "287", + -14.082953453063965 + ], + [ + "anz", + -14.083070755004885 + ], + [ + "▁Caledonia", + -14.0830717086792 + ], + [ + "▁metaphorical", + -14.083097457885742 + ], + [ + "▁Margot", + -14.083100318908691 + ], + [ + "bj", + -14.08315372467041 + ], + [ + "Fka", + -14.083165168762209 + ], + [ + "▁radiates", + -14.083227157592772 + ], + [ + "heath", + -14.083271026611328 + ], + [ + "▁1/2′′", + -14.083280563354492 + ], + [ + "▁tropes", + -14.08329963684082 + ], + [ + "▁Algarve", + -14.08330535888672 + ], + [ + "▁Dempsey", + -14.08330535888672 + ], + [ + "▁Maserati", + -14.08330535888672 + ], + [ + "▁Rothschild", + -14.08330535888672 + ], + [ + "▁iridescent", + -14.08330535888672 + ], + [ + "▁sanctuaries", + -14.08330535888672 + ], + [ + "▁sarcasm", + -14.08330535888672 + ], + [ + "▁trombone", + -14.08330535888672 + ], + [ + "▁doubly", + -14.083306312561035 + ], + [ + "▁hitherto", + -14.083306312561035 + ], + [ + "▁agonizing", + -14.083308219909668 + ], + [ + "▁geranium", + -14.083311080932615 + ], + [ + "▁hedgehog", + -14.083312034606934 + ], + [ + "▁implore", + -14.083316802978516 + ], + [ + "▁trojan", + -14.083316802978516 + ], + [ + "▁Cantonese", + -14.083324432373049 + ], + [ + "▁Ceramics", + -14.083324432373049 + ], + [ + "amyloid", + -14.083328247070312 + ], + [ + "▁Trimble", + -14.083351135253906 + ], + [ + "▁Brewster", + -14.08336067199707 + ], + [ + "▁Tbsp", + -14.08336067199707 + ], + [ + "ARI", + -14.083378791809082 + ], + [ + "▁Munster", + -14.083415031433104 + ], + [ + "▁Renovations", + -14.083453178405762 + ], + [ + "▁Barrington", + -14.083489418029783 + ], + [ + "▁Familiar", + -14.083498001098633 + ], + [ + "angelo", + -14.083514213562012 + ], + [ + "▁smartwatch", + -14.083550453186035 + ], + [ + "Cole", + -14.08358669281006 + ], + [ + "▁sizzle", + -14.083599090576172 + ], + [ + "▁Thorpe", + -14.083624839782717 + ], + [ + "▁Blasio", + -14.083636283874512 + ], + [ + "Representing", + -14.083664894104004 + ], + [ + "guardian", + -14.083687782287598 + ], + [ + "▁crumbly", + -14.083700180053713 + ], + [ + "SIST", + -14.083763122558594 + ], + [ + "▁TEC", + -14.083763122558594 + ], + [ + "▁clout", + -14.08377742767334 + ], + [ + "▁chemists", + -14.083824157714844 + ], + [ + "Sil", + -14.083828926086426 + ], + [ + "retail", + -14.083869934082031 + ], + [ + "▁Motorhome", + -14.08390998840332 + ], + [ + "▁Wheaton", + -14.083913803100586 + ], + [ + "▁Hebron", + -14.083917617797852 + ], + [ + "▁teething", + -14.083983421325684 + ], + [ + "▁1939,", + -14.084029197692873 + ], + [ + "cooking", + -14.084031105041504 + ], + [ + "▁395", + -14.084077835083008 + ], + [ + "ghost", + -14.084232330322266 + ], + [ + "▁Wrinkle", + -14.084239959716797 + ], + [ + "▁Soma", + -14.084240913391112 + ], + [ + "▁Manafort", + -14.08430290222168 + ], + [ + "▁Kwan", + -14.084341049194336 + ], + [ + "▁Translator", + -14.084368705749512 + ], + [ + "▁Pieter", + -14.08437728881836 + ], + [ + "▁Twisted", + -14.084453582763672 + ], + [ + "Pipe", + -14.08445644378662 + ], + [ + "▁refreshes", + -14.084460258483888 + ], + [ + "▁Plo", + -14.084567070007324 + ], + [ + "▁chromatic", + -14.08457088470459 + ], + [ + "lun", + -14.08461093902588 + ], + [ + "LIFE", + -14.08465576171875 + ], + [ + "▁SEAL", + -14.084756851196287 + ], + [ + "▁MIC", + -14.084761619567873 + ], + [ + "azoo", + -14.084768295288086 + ], + [ + "▁REACH", + -14.084872245788574 + ], + [ + "GMA", + -14.084890365600586 + ], + [ + "▁inhuman", + -14.08489990234375 + ], + [ + "rimmed", + -14.08498191833496 + ], + [ + "▁tenets", + -14.08501148223877 + ], + [ + "▁845", + -14.085107803344728 + ], + [ + "AEA", + -14.085128784179688 + ], + [ + "▁einer", + -14.08513069152832 + ], + [ + "▁anecdote", + -14.085183143615724 + ], + [ + "▁Shrink", + -14.085203170776367 + ], + [ + "▁percussionist", + -14.08521556854248 + ], + [ + "▁glittery", + -14.085223197937012 + ], + [ + "▁2,200", + -14.08527660369873 + ], + [ + "▁Oat", + -14.085288047790527 + ], + [ + "Showing", + -14.0853271484375 + ], + [ + "▁meaningfully", + -14.085345268249512 + ], + [ + "▁nur", + -14.085369110107422 + ], + [ + "ORM", + -14.085441589355469 + ], + [ + "▁Musa", + -14.085467338562012 + ], + [ + "▁vise", + -14.085509300231934 + ], + [ + "▁denser", + -14.085518836975098 + ], + [ + "▁applet", + -14.08562183380127 + ], + [ + "booth", + -14.085630416870115 + ], + [ + "▁usernames", + -14.085631370544434 + ], + [ + "blad", + -14.085822105407717 + ], + [ + "▁Finale", + -14.085838317871094 + ], + [ + "▁Blanche", + -14.085899353027344 + ], + [ + "▁sonar", + -14.085909843444824 + ], + [ + "Accommodation", + -14.08591365814209 + ], + [ + "zano", + -14.085918426513672 + ], + [ + "▁McIntyre", + -14.085929870605469 + ], + [ + "0+", + -14.086009979248049 + ], + [ + "▁rampage", + -14.086050987243652 + ], + [ + "Breast", + -14.086071014404297 + ], + [ + "▁throats", + -14.086082458496094 + ], + [ + "809", + -14.08609390258789 + ], + [ + "▁GAP", + -14.086104393005373 + ], + [ + "▁Dah", + -14.086143493652344 + ], + [ + "Towards", + -14.086182594299316 + ], + [ + "Chill", + -14.086198806762695 + ], + [ + "oski", + -14.086201667785645 + ], + [ + "bec", + -14.08620262145996 + ], + [ + "798", + -14.086212158203123 + ], + [ + "chicken", + -14.086283683776855 + ], + [ + "▁gripe", + -14.086387634277344 + ], + [ + "ITT", + -14.086424827575684 + ], + [ + "▁bleached", + -14.086445808410645 + ], + [ + "▁Alia", + -14.08653736114502 + ], + [ + "7.0", + -14.086539268493652 + ], + [ + "mentation", + -14.086575508117676 + ], + [ + "peal", + -14.086577415466309 + ], + [ + "▁shim", + -14.086597442626951 + ], + [ + "Pl", + -14.086613655090332 + ], + [ + "TYPE", + -14.086650848388672 + ], + [ + "excel", + -14.086675643920898 + ], + [ + "▁piloting", + -14.086681365966797 + ], + [ + "▁Announcements", + -14.08670425415039 + ], + [ + "▁clearest", + -14.086772918701172 + ], + [ + "Sharon", + -14.086777687072754 + ], + [ + "▁shrines", + -14.08680820465088 + ], + [ + "akin", + -14.08681583404541 + ], + [ + "▁Madi", + -14.086886405944824 + ], + [ + "cao", + -14.08688735961914 + ], + [ + "▁Chari", + -14.086939811706545 + ], + [ + "▁Striped", + -14.08694076538086 + ], + [ + "started", + -14.087055206298828 + ], + [ + "Gram", + -14.087121963500977 + ], + [ + "▁shortcode", + -14.087136268615724 + ], + [ + "▁Perse", + -14.087162017822266 + ], + [ + "045", + -14.087182998657228 + ], + [ + "▁Sele", + -14.087228775024414 + ], + [ + "capable", + -14.087326049804688 + ], + [ + "▁genera", + -14.087352752685549 + ], + [ + "labeled", + -14.087379455566406 + ], + [ + "▁16.5", + -14.087461471557615 + ], + [ + "alys", + -14.087542533874512 + ], + [ + "▁reinforcements", + -14.087543487548828 + ], + [ + "▁fervent", + -14.087552070617676 + ], + [ + "▁Wink", + -14.087570190429688 + ], + [ + "BUS", + -14.087575912475586 + ], + [ + "▁sobre", + -14.087596893310549 + ], + [ + "elic", + -14.087617874145508 + ], + [ + "▁Snapshot", + -14.08774185180664 + ], + [ + "886", + -14.087743759155272 + ], + [ + "▁Wrist", + -14.08775520324707 + ], + [ + "▁Sensei", + -14.087767601013184 + ], + [ + "▁Fourteen", + -14.087772369384766 + ], + [ + "▁Buxton", + -14.08778953552246 + ], + [ + "▁Genealogical", + -14.08778953552246 + ], + [ + "▁Jekyll", + -14.08778953552246 + ], + [ + "▁McDowell", + -14.08778953552246 + ], + [ + "▁Promenade", + -14.08778953552246 + ], + [ + "▁Uranus", + -14.08778953552246 + ], + [ + "▁Windermere", + -14.08778953552246 + ], + [ + "▁franchising", + -14.08778953552246 + ], + [ + "▁grizzly", + -14.08778953552246 + ], + [ + "▁impervious", + -14.08778953552246 + ], + [ + "▁restaurateur", + -14.08778953552246 + ], + [ + "▁KITCHEN", + -14.087790489196776 + ], + [ + "▁Nippon", + -14.087791442871094 + ], + [ + "▁tyrant", + -14.087791442871094 + ], + [ + "▁Pharmacology", + -14.08779239654541 + ], + [ + "▁Prefecture", + -14.08779239654541 + ], + [ + "▁CARFAX", + -14.087793350219728 + ], + [ + "▁Galilee", + -14.08780002593994 + ], + [ + "▁exclamation", + -14.087800979614258 + ], + [ + "Accelerate", + -14.087803840637209 + ], + [ + "▁sparring", + -14.08780574798584 + ], + [ + "▁Citigroup", + -14.087806701660156 + ], + [ + "▁paralegal", + -14.08781909942627 + ], + [ + "mapping", + -14.087824821472168 + ], + [ + "▁Dijon", + -14.087824821472168 + ], + [ + "▁banding", + -14.0878267288208 + ], + [ + "▁antifungal", + -14.08782958984375 + ], + [ + "▁tolerable", + -14.08782958984375 + ], + [ + "▁redecorating", + -14.087831497192385 + ], + [ + "▁vacationing", + -14.087843894958496 + ], + [ + "▁EPDM", + -14.087852478027344 + ], + [ + "▁tangent", + -14.087852478027344 + ], + [ + "Groups", + -14.087862968444824 + ], + [ + "▁Unitarian", + -14.087891578674316 + ], + [ + "▁resettlement", + -14.087892532348633 + ], + [ + "Defining", + -14.087896347045898 + ], + [ + "▁prowl", + -14.087963104248049 + ], + [ + "enial", + -14.087984085083008 + ], + [ + "▁heartbroken", + -14.088019371032717 + ], + [ + "▁Producing", + -14.08804416656494 + ], + [ + "▁tribulations", + -14.088061332702637 + ], + [ + "▁Craven", + -14.088080406188965 + ], + [ + "▁fared", + -14.08808708190918 + ], + [ + "iani", + -14.088120460510254 + ], + [ + "krishna", + -14.088126182556152 + ], + [ + "▁warms", + -14.088142395019531 + ], + [ + "▁loudest", + -14.08816623687744 + ], + [ + "▁punishments", + -14.0881929397583 + ], + [ + "SSP", + -14.08819580078125 + ], + [ + "▁gluing", + -14.088216781616213 + ], + [ + "ibid", + -14.088224411010742 + ], + [ + "olic", + -14.088279724121094 + ], + [ + "Damage", + -14.088296890258787 + ], + [ + "Blade", + -14.088348388671877 + ], + [ + "▁wasteland", + -14.088363647460938 + ], + [ + "▁sparsely", + -14.088423728942873 + ], + [ + "▁SMC", + -14.088452339172363 + ], + [ + "▁Seng", + -14.08852767944336 + ], + [ + "▁Apa", + -14.0885591506958 + ], + [ + "493", + -14.088624000549316 + ], + [ + "ZY", + -14.088648796081545 + ], + [ + "▁wetsuit", + -14.08865451812744 + ], + [ + "▁informant", + -14.088688850402832 + ], + [ + "▁Roadmap", + -14.088698387145996 + ], + [ + "octane", + -14.088785171508787 + ], + [ + "▁HEPA", + -14.088828086853027 + ], + [ + "▁MOS", + -14.088866233825684 + ], + [ + "▁McKe", + -14.088900566101074 + ], + [ + "▁pollinators", + -14.08901023864746 + ], + [ + "▁Mandel", + -14.08902645111084 + ], + [ + "▁625", + -14.089037895202637 + ], + [ + "▁caches", + -14.089085578918455 + ], + [ + "▁Dramatic", + -14.089144706726074 + ], + [ + "034", + -14.089156150817873 + ], + [ + "bhu", + -14.08917236328125 + ], + [ + "▁torsion", + -14.089183807373049 + ], + [ + "791", + -14.089186668395996 + ], + [ + "▁Fist", + -14.089198112487791 + ], + [ + "▁те", + -14.089198112487791 + ], + [ + "biology", + -14.089210510253906 + ], + [ + "▁lifeblood", + -14.089241981506348 + ], + [ + "▁Palla", + -14.08924674987793 + ], + [ + "▁Chrono", + -14.089290618896484 + ], + [ + "▁garnering", + -14.089303970336914 + ], + [ + "▁Bunker", + -14.089325904846191 + ], + [ + "▁Kamp", + -14.089411735534668 + ], + [ + "▁Stol", + -14.089415550231934 + ], + [ + "▁Dojo", + -14.089542388916016 + ], + [ + "▁puddles", + -14.089543342590332 + ], + [ + "▁71%", + -14.089550971984863 + ], + [ + "583", + -14.089567184448242 + ], + [ + "▁Alber", + -14.089590072631836 + ], + [ + "▁brad", + -14.089603424072266 + ], + [ + "▁periodicals", + -14.089689254760742 + ], + [ + "▁DARK", + -14.089716911315918 + ], + [ + "▁Funnel", + -14.089726448059082 + ], + [ + "▁Blur", + -14.089727401733398 + ], + [ + "fitness", + -14.089729309082031 + ], + [ + "▁lords", + -14.089788436889648 + ], + [ + "▁Herr", + -14.089902877807615 + ], + [ + "▁spines", + -14.089924812316896 + ], + [ + "PART", + -14.089950561523438 + ], + [ + "▁TFT", + -14.089958190917969 + ], + [ + "KAR", + -14.08997631072998 + ], + [ + "▁Vos", + -14.090088844299316 + ], + [ + "▁Catching", + -14.090105056762695 + ], + [ + "Cancellation", + -14.090120315551758 + ], + [ + "jh", + -14.09014129638672 + ], + [ + "▁tassel", + -14.090252876281738 + ], + [ + "▁Fatigue", + -14.090309143066406 + ], + [ + "▁Teton", + -14.090340614318848 + ], + [ + "wake", + -14.090448379516602 + ], + [ + "▁tweed", + -14.090459823608398 + ], + [ + "modified", + -14.09052562713623 + ], + [ + "▁nostrils", + -14.090539932250977 + ], + [ + "Philadelphia", + -14.090558052062988 + ], + [ + "primarily", + -14.09055995941162 + ], + [ + "Engineering", + -14.090569496154783 + ], + [ + "Royce", + -14.090570449829102 + ], + [ + "PAY", + -14.090593338012695 + ], + [ + "specialty", + -14.090608596801758 + ], + [ + "Warren", + -14.090633392333984 + ], + [ + "▁pullover", + -14.090635299682615 + ], + [ + "▁distortions", + -14.090689659118652 + ], + [ + "decker", + -14.090726852416992 + ], + [ + "▁favoring", + -14.090742111206056 + ], + [ + "shops", + -14.090749740600586 + ], + [ + "ndre", + -14.090777397155762 + ], + [ + "▁$41", + -14.090803146362305 + ], + [ + "▁FSU", + -14.09087085723877 + ], + [ + "▁GPO", + -14.090923309326172 + ], + [ + "▁donates", + -14.090968132019045 + ], + [ + "▁Hummingbird", + -14.090988159179688 + ], + [ + "OTE", + -14.091005325317385 + ], + [ + "▁Supra", + -14.091005325317385 + ], + [ + "▁Shih", + -14.091008186340332 + ], + [ + "▁TRUST", + -14.091009140014648 + ], + [ + "▁Smoothie", + -14.091073036193848 + ], + [ + "3:30", + -14.09112548828125 + ], + [ + "▁stiffen", + -14.09114933013916 + ], + [ + "Menu", + -14.091185569763184 + ], + [ + "Require", + -14.091215133666992 + ], + [ + "bearer", + -14.091242790222168 + ], + [ + "usha", + -14.091279029846191 + ], + [ + "punch", + -14.091316223144531 + ], + [ + "▁sportsbook", + -14.091344833374023 + ], + [ + "▁handlebar", + -14.091395378112791 + ], + [ + "Prop", + -14.091413497924805 + ], + [ + "▁mais", + -14.091440200805664 + ], + [ + "▁PIP", + -14.09145450592041 + ], + [ + "▁techs", + -14.09149169921875 + ], + [ + "FLA", + -14.091634750366213 + ], + [ + "▁Chil", + -14.091655731201172 + ], + [ + "▁Pari", + -14.09165859222412 + ], + [ + "Butter", + -14.09166145324707 + ], + [ + "Gill", + -14.09172248840332 + ], + [ + "▁displace", + -14.091726303100586 + ], + [ + "▁peeking", + -14.091741561889648 + ], + [ + "▁257", + -14.091744422912598 + ], + [ + "Erik", + -14.091761589050291 + ], + [ + "Declar", + -14.091768264770508 + ], + [ + "raya", + -14.091837882995604 + ], + [ + "lud", + -14.091876029968262 + ], + [ + "Seed", + -14.09188175201416 + ], + [ + "PCS", + -14.091955184936523 + ], + [ + "▁lowland", + -14.09202766418457 + ], + [ + "▁publicize", + -14.09205150604248 + ], + [ + "▁Dex", + -14.092120170593262 + ], + [ + "▁lagu", + -14.092123031616213 + ], + [ + "jk", + -14.09221649169922 + ], + [ + "kyu", + -14.092287063598633 + ], + [ + "▁Jharkhand", + -14.092293739318848 + ], + [ + "▁Characteristics", + -14.092294692993164 + ], + [ + "▁Goliath", + -14.092294692993164 + ], + [ + "▁Pacquiao", + -14.092294692993164 + ], + [ + "▁adequacy", + -14.092294692993164 + ], + [ + "▁dispersal", + -14.092294692993164 + ], + [ + "▁permaculture", + -14.092294692993164 + ], + [ + "▁pilates", + -14.092294692993164 + ], + [ + "▁proximal", + -14.092294692993164 + ], + [ + "▁Daredevil", + -14.09229564666748 + ], + [ + "▁fidget", + -14.092296600341797 + ], + [ + "▁galactic", + -14.092302322387695 + ], + [ + "▁residencies", + -14.092304229736328 + ], + [ + "▁Cavalry", + -14.092312812805176 + ], + [ + "▁buttocks", + -14.092315673828123 + ], + [ + "▁Nottinghamshire", + -14.09231662750244 + ], + [ + "▁curries", + -14.092317581176758 + ], + [ + "▁kaolin", + -14.092320442199709 + ], + [ + "▁unsupervised", + -14.092324256896973 + ], + [ + "▁Calvert", + -14.092339515686035 + ], + [ + "▁Conquest", + -14.09234619140625 + ], + [ + "▁Eurozone", + -14.09239101409912 + ], + [ + "▁CenturyLink", + -14.09239387512207 + ], + [ + "▁astounded", + -14.092422485351562 + ], + [ + "▁Ignite", + -14.092442512512209 + ], + [ + "▁driftwood", + -14.09246063232422 + ], + [ + "allows", + -14.092496871948242 + ], + [ + "shoe", + -14.092509269714355 + ], + [ + "▁nigeria", + -14.092512130737305 + ], + [ + "Partner", + -14.092544555664062 + ], + [ + "▁Carrollton", + -14.092556953430176 + ], + [ + "▁sociologist", + -14.09256076812744 + ], + [ + "▁Toilets", + -14.092576026916504 + ], + [ + "▁Basis", + -14.092623710632324 + ], + [ + "▁Wilton", + -14.09266757965088 + ], + [ + "▁sinuses", + -14.09274959564209 + ], + [ + "▁Brody", + -14.09277057647705 + ], + [ + "▁eaves", + -14.092839241027832 + ], + [ + "▁banished", + -14.092840194702148 + ], + [ + "▁bagging", + -14.092846870422363 + ], + [ + "▁Subtitle", + -14.092857360839844 + ], + [ + "hanna", + -14.09289836883545 + ], + [ + "▁grits", + -14.092903137207031 + ], + [ + "977", + -14.092933654785156 + ], + [ + "KV", + -14.09299373626709 + ], + [ + "Grocer", + -14.093023300170898 + ], + [ + "▁looser", + -14.093024253845217 + ], + [ + "▁shards", + -14.093025207519531 + ], + [ + "▁loungers", + -14.093124389648438 + ], + [ + "cheon", + -14.093130111694336 + ], + [ + "▁Adventist", + -14.0931396484375 + ], + [ + "▁Tackle", + -14.093169212341309 + ], + [ + "▁crossovers", + -14.09322452545166 + ], + [ + "▁rewritten", + -14.093246459960938 + ], + [ + "didn", + -14.09326171875 + ], + [ + "kata", + -14.093273162841797 + ], + [ + "Gri", + -14.093303680419922 + ], + [ + "lize", + -14.093329429626465 + ], + [ + "▁westward", + -14.093332290649414 + ], + [ + "▁Acad", + -14.093335151672363 + ], + [ + "▁Observ", + -14.093335151672363 + ], + [ + "▁sifting", + -14.093338012695312 + ], + [ + "▁Deciding", + -14.093343734741213 + ], + [ + "▁268", + -14.0933837890625 + ], + [ + "▁Drying", + -14.09343433380127 + ], + [ + "▁EAST", + -14.093475341796877 + ], + [ + "QT", + -14.09356689453125 + ], + [ + "▁Asthma", + -14.09357738494873 + ], + [ + "▁garnished", + -14.093631744384766 + ], + [ + "▁SSE", + -14.093634605407717 + ], + [ + "▁Objectives", + -14.093676567077637 + ], + [ + "▁Emmett", + -14.093708038330078 + ], + [ + "▁TURN", + -14.09373378753662 + ], + [ + "jad", + -14.093745231628418 + ], + [ + "▁Gue", + -14.093757629394531 + ], + [ + "▁Peep", + -14.093843460083008 + ], + [ + "SSC", + -14.093883514404297 + ], + [ + "▁HEC", + -14.09389591217041 + ], + [ + "▁lawfully", + -14.093935012817385 + ], + [ + "▁Reload", + -14.09397029876709 + ], + [ + "▁rumour", + -14.093984603881836 + ], + [ + "iving", + -14.09402847290039 + ], + [ + "▁bookshop", + -14.094034194946287 + ], + [ + "▁matting", + -14.094133377075195 + ], + [ + "▁barbed", + -14.094143867492676 + ], + [ + "▁FLAC", + -14.094176292419434 + ], + [ + "▁Shara", + -14.094178199768066 + ], + [ + "▁policyholders", + -14.094181060791016 + ], + [ + "▁flocked", + -14.094265937805176 + ], + [ + "▁excavator", + -14.094278335571287 + ], + [ + "3,200", + -14.094285011291504 + ], + [ + "▁Throne", + -14.094313621520996 + ], + [ + "515", + -14.094389915466309 + ], + [ + "▁Wether", + -14.09439182281494 + ], + [ + "rios", + -14.094430923461914 + ], + [ + "▁mealtime", + -14.094470024108888 + ], + [ + "▁mediators", + -14.094473838806152 + ], + [ + "▁Lieb", + -14.094476699829102 + ], + [ + "shares", + -14.094531059265137 + ], + [ + "▁3200", + -14.094563484191896 + ], + [ + "alarm", + -14.094569206237791 + ], + [ + "▁GRP", + -14.094605445861816 + ], + [ + "671", + -14.094667434692385 + ], + [ + "States", + -14.09476089477539 + ], + [ + "947", + -14.094766616821287 + ], + [ + "▁Notwithstanding", + -14.094802856445312 + ], + [ + "696", + -14.094857215881348 + ], + [ + "”...", + -14.094951629638672 + ], + [ + "988", + -14.09500217437744 + ], + [ + "GDP", + -14.09503936767578 + ], + [ + "▁Ugandan", + -14.095050811767578 + ], + [ + "▁Beneath", + -14.095111846923828 + ], + [ + "omatic", + -14.095135688781738 + ], + [ + "▁68%", + -14.095166206359863 + ], + [ + "Platform", + -14.095170974731444 + ], + [ + "867", + -14.095173835754396 + ], + [ + "▁spacers", + -14.095176696777344 + ], + [ + "Ontario", + -14.09518814086914 + ], + [ + "besides", + -14.095213890075684 + ], + [ + "possess", + -14.09523105621338 + ], + [ + "▁ASI", + -14.095244407653809 + ], + [ + "▁Dorn", + -14.095342636108398 + ], + [ + "Controller", + -14.095376014709473 + ], + [ + "SPO", + -14.095376014709473 + ], + [ + "▁Pele", + -14.09542179107666 + ], + [ + "▁reupholster", + -14.095542907714844 + ], + [ + "▁Passat", + -14.095548629760742 + ], + [ + "▁Thurs", + -14.095573425292969 + ], + [ + "▁Victim", + -14.095580101013184 + ], + [ + "▁Pax", + -14.095670700073242 + ], + [ + "▁Reserves", + -14.095677375793455 + ], + [ + "▁Fein", + -14.09573745727539 + ], + [ + "▁watercraft", + -14.095797538757324 + ], + [ + "carrier", + -14.095799446105955 + ], + [ + "Presented", + -14.095824241638184 + ], + [ + "▁NSK", + -14.095845222473145 + ], + [ + "▁Gazprom", + -14.095958709716797 + ], + [ + "ENA", + -14.096025466918944 + ], + [ + "▁Ferro", + -14.096091270446776 + ], + [ + "▁288", + -14.096110343933104 + ], + [ + "BACK", + -14.096153259277344 + ], + [ + "▁generalization", + -14.09617519378662 + ], + [ + "▁spook", + -14.09618854522705 + ], + [ + "Guardian", + -14.096267700195312 + ], + [ + "entity", + -14.096296310424805 + ], + [ + "▁Driveway", + -14.096305847167969 + ], + [ + "▁Filler", + -14.096314430236816 + ], + [ + "OPA", + -14.096355438232422 + ], + [ + "▁upmarket", + -14.096379280090332 + ], + [ + "▁Rani", + -14.096418380737305 + ], + [ + "▁KV", + -14.096445083618164 + ], + [ + "operators", + -14.096465110778809 + ], + [ + "Dir", + -14.096502304077148 + ], + [ + "banking", + -14.09652042388916 + ], + [ + "▁valor", + -14.09654712677002 + ], + [ + "TTA", + -14.096593856811523 + ], + [ + "Dav", + -14.096598625183104 + ], + [ + "▁10.2", + -14.096677780151367 + ], + [ + "ursuant", + -14.096697807312012 + ], + [ + "▁Qian", + -14.096718788146973 + ], + [ + "▁Ephesians", + -14.096722602844238 + ], + [ + "▁bequest", + -14.096760749816896 + ], + [ + "leh", + -14.096765518188477 + ], + [ + "▁Henning", + -14.09677028656006 + ], + [ + "▁Sprout", + -14.096778869628906 + ], + [ + "nter", + -14.09678077697754 + ], + [ + "▁Grizzly", + -14.096818923950195 + ], + [ + "▁Nantucket", + -14.096818923950195 + ], + [ + "▁Shreveport", + -14.096818923950195 + ], + [ + "▁elapsed", + -14.096818923950195 + ], + [ + "▁humiliating", + -14.096818923950195 + ], + [ + "▁tangerine", + -14.096819877624512 + ], + [ + "▁coroner", + -14.096820831298828 + ], + [ + "▁smitten", + -14.096820831298828 + ], + [ + "▁MDA", + -14.096821784973145 + ], + [ + "▁Belief", + -14.096830368041992 + ], + [ + "▁Guardiola", + -14.096831321716309 + ], + [ + "▁Soundproofing", + -14.096832275390623 + ], + [ + "▁bicycling", + -14.096837997436523 + ], + [ + "▁Montrose", + -14.096843719482422 + ], + [ + "▁ODBC", + -14.096846580505373 + ], + [ + "▁Avondale", + -14.096860885620115 + ], + [ + "▁Diabetic", + -14.09686279296875 + ], + [ + "▁Spectator", + -14.09686279296875 + ], + [ + "▁Percentage", + -14.096869468688965 + ], + [ + "▁Retention", + -14.096879959106444 + ], + [ + "▁hologram", + -14.096885681152344 + ], + [ + "▁bootcamp", + -14.096893310546877 + ], + [ + "684", + -14.096929550170898 + ], + [ + "▁Sexton", + -14.096932411193848 + ], + [ + "▁pressurized", + -14.096932411193848 + ], + [ + "▁unbroken", + -14.096935272216797 + ], + [ + "▁predictably", + -14.096938133239746 + ], + [ + "▁Minaj", + -14.096942901611328 + ], + [ + "▁beggar", + -14.097018241882324 + ], + [ + "Gone", + -14.09702205657959 + ], + [ + "▁secretaries", + -14.09702968597412 + ], + [ + "▁Airplane", + -14.09706974029541 + ], + [ + "▁footrest", + -14.097086906433104 + ], + [ + "▁quiche", + -14.09711742401123 + ], + [ + "Capturing", + -14.09713077545166 + ], + [ + "▁HDL", + -14.097210884094238 + ], + [ + "ound", + -14.097212791442873 + ], + [ + "▁nods", + -14.09726905822754 + ], + [ + "▁TIM", + -14.097283363342283 + ], + [ + "▁Pastoral", + -14.097286224365234 + ], + [ + "▁Cabins", + -14.097368240356444 + ], + [ + "Chen", + -14.097427368164062 + ], + [ + "▁6-10", + -14.097471237182615 + ], + [ + "vod", + -14.097527503967283 + ], + [ + "▁Lynx", + -14.097538948059082 + ], + [ + "▁Saffron", + -14.097541809082031 + ], + [ + "Strength", + -14.09755516052246 + ], + [ + "Linguist", + -14.097604751586914 + ], + [ + "▁Sheer", + -14.097627639770508 + ], + [ + "mk", + -14.097634315490724 + ], + [ + "▁camels", + -14.09764289855957 + ], + [ + "volve", + -14.097648620605469 + ], + [ + "▁Grateful", + -14.097658157348633 + ], + [ + "▁scab", + -14.097705841064451 + ], + [ + "▁Bec", + -14.097710609436035 + ], + [ + "▁Kaka", + -14.097738265991213 + ], + [ + "▁Rebate", + -14.09776210784912 + ], + [ + "▁parasol", + -14.097790718078612 + ], + [ + "▁whiff", + -14.09781551361084 + ], + [ + "mog", + -14.097851753234863 + ], + [ + ".95.", + -14.097885131835938 + ], + [ + "▁lowly", + -14.097919464111328 + ], + [ + "▁mane", + -14.097970008850098 + ], + [ + "▁Nicaraguan", + -14.09797191619873 + ], + [ + "▁artfully", + -14.097982406616213 + ], + [ + "guchi", + -14.098013877868652 + ], + [ + "▁94%", + -14.098055839538574 + ], + [ + "▁Trusts", + -14.098085403442385 + ], + [ + "▁enhancer", + -14.098090171813965 + ], + [ + "032", + -14.098132133483888 + ], + [ + "shipping", + -14.098140716552734 + ], + [ + "▁NIST", + -14.098156929016112 + ], + [ + "▁mart", + -14.098220825195312 + ], + [ + "▁psychiatrists", + -14.098220825195312 + ], + [ + "▁attuned", + -14.098273277282717 + ], + [ + "▁Spinach", + -14.098278045654297 + ], + [ + "9.2", + -14.098355293273926 + ], + [ + "dox", + -14.098359107971191 + ], + [ + "▁merino", + -14.098376274108888 + ], + [ + "1990", + -14.098408699035645 + ], + [ + "▁Compost", + -14.098429679870604 + ], + [ + "▁ICF", + -14.098491668701172 + ], + [ + "▁diverting", + -14.09850025177002 + ], + [ + "▁Jacqui", + -14.098505973815918 + ], + [ + "▁nodding", + -14.098526000976562 + ], + [ + "#10", + -14.098580360412598 + ], + [ + "▁BLS", + -14.09859561920166 + ], + [ + "Strangely", + -14.098604202270508 + ], + [ + "▁Profession", + -14.09867000579834 + ], + [ + "▁bindings", + -14.098849296569824 + ], + [ + "Democrats", + -14.098869323730469 + ], + [ + "▁Isolate", + -14.09895133972168 + ], + [ + "▁Textbook", + -14.099021911621094 + ], + [ + "Cooper", + -14.099037170410156 + ], + [ + "▁Analyze", + -14.099190711975098 + ], + [ + "▁Completing", + -14.099238395690918 + ], + [ + "85,000", + -14.099267959594728 + ], + [ + "frog", + -14.099275588989258 + ], + [ + "kko", + -14.099334716796877 + ], + [ + "▁automates", + -14.09934425354004 + ], + [ + "▁SAD", + -14.099350929260254 + ], + [ + "▁JOHN", + -14.099353790283203 + ], + [ + "▁apricots", + -14.099364280700684 + ], + [ + "▁Rufus", + -14.099366188049316 + ], + [ + "▁Welt", + -14.099405288696287 + ], + [ + "▁astro", + -14.099411964416504 + ], + [ + "▁9:15", + -14.099424362182615 + ], + [ + "▁Pli", + -14.099428176879885 + ], + [ + "atch", + -14.099467277526855 + ], + [ + "▁Poi", + -14.099502563476562 + ], + [ + "▁Hanks", + -14.099514961242676 + ], + [ + "▁PID", + -14.0995454788208 + ], + [ + "▁COA", + -14.099587440490724 + ], + [ + "PSA", + -14.099597930908203 + ], + [ + "▁Andro", + -14.099600791931152 + ], + [ + "▁tablecloths", + -14.099615097045898 + ], + [ + "fuge", + -14.099616050720217 + ], + [ + "▁Introduce", + -14.09963321685791 + ], + [ + "▁Theoretical", + -14.099671363830566 + ], + [ + "▁Venom", + -14.099724769592283 + ], + [ + "▁EVs", + -14.099788665771484 + ], + [ + "▁esc", + -14.099791526794434 + ], + [ + "Samuel", + -14.0997953414917 + ], + [ + "▁Tasks", + -14.0997953414917 + ], + [ + "Evidence", + -14.099828720092772 + ], + [ + "several", + -14.099858283996582 + ], + [ + "onset", + -14.099885940551758 + ], + [ + "Generic", + -14.099902153015137 + ], + [ + "Cache", + -14.099905967712402 + ], + [ + "anov", + -14.09991455078125 + ], + [ + "species", + -14.099926948547363 + ], + [ + "obviously", + -14.09994888305664 + ], + [ + "▁nova", + -14.099961280822754 + ], + [ + "▁daffodils", + -14.099987030029297 + ], + [ + "▁cyclic", + -14.10001277923584 + ], + [ + "▁meek", + -14.100032806396484 + ], + [ + "CWA", + -14.100059509277344 + ], + [ + "▁Rankin", + -14.10006618499756 + ], + [ + "Buddy", + -14.100074768066406 + ], + [ + "chman", + -14.100080490112305 + ], + [ + "pé", + -14.100137710571287 + ], + [ + "▁$89", + -14.100183486938477 + ], + [ + "▁Priyanka", + -14.100275039672852 + ], + [ + "▁BDS", + -14.100292205810549 + ], + [ + "▁putty", + -14.100317001342772 + ], + [ + "Norman", + -14.100319862365724 + ], + [ + "▁pique", + -14.100340843200684 + ], + [ + "▁Strut", + -14.100419044494627 + ], + [ + "azole", + -14.100439071655272 + ], + [ + "▁Dae", + -14.100460052490234 + ], + [ + "▁hijacker", + -14.10048007965088 + ], + [ + "▁8-9", + -14.100543022155762 + ], + [ + "ché", + -14.10059928894043 + ], + [ + "▁Abd", + -14.100618362426758 + ], + [ + "▁Tipp", + -14.10070514678955 + ], + [ + "▁Pillows", + -14.100811004638672 + ], + [ + "Boo", + -14.10084056854248 + ], + [ + "logies", + -14.100844383239746 + ], + [ + "Guru", + -14.10085678100586 + ], + [ + "▁HIT", + -14.100862503051758 + ], + [ + "▁deliciousness", + -14.100886344909668 + ], + [ + "▁ramping", + -14.100893020629885 + ], + [ + "Chuck", + -14.100939750671388 + ], + [ + "▁9.3", + -14.100947380065918 + ], + [ + "▁287", + -14.100960731506348 + ], + [ + "Rex", + -14.100963592529297 + ], + [ + "pective", + -14.10100555419922 + ], + [ + "▁accumulates", + -14.101134300231934 + ], + [ + "▁fx", + -14.10114860534668 + ], + [ + "694", + -14.10116195678711 + ], + [ + "▁DY", + -14.101191520690918 + ], + [ + "foul", + -14.101202964782717 + ], + [ + "▁hermit", + -14.10128116607666 + ], + [ + "▁Linde", + -14.101288795471191 + ], + [ + "▁psalm", + -14.101306915283203 + ], + [ + "▁Dubrovnik", + -14.101365089416504 + ], + [ + "▁Fremantle", + -14.101365089416504 + ], + [ + "▁Lausanne", + -14.101365089416504 + ], + [ + "▁Vallarta", + -14.101365089416504 + ], + [ + "▁Yahweh", + -14.101365089416504 + ], + [ + "▁amethyst", + -14.101365089416504 + ], + [ + "▁procrastinate", + -14.101365089416504 + ], + [ + "▁Hagerstown", + -14.10136604309082 + ], + [ + "▁meningitis", + -14.10136604309082 + ], + [ + "▁barracks", + -14.101367950439451 + ], + [ + "▁shudder", + -14.10136890411377 + ], + [ + "▁tranquillity", + -14.10136890411377 + ], + [ + "▁odometer", + -14.101369857788086 + ], + [ + "▁Swirl", + -14.101372718811035 + ], + [ + "▁Bikini", + -14.101374626159668 + ], + [ + "▁impossibly", + -14.101383209228516 + ], + [ + "▁Foss", + -14.101385116577148 + ], + [ + "▁intermission", + -14.101404190063477 + ], + [ + "▁sugarcane", + -14.10141944885254 + ], + [ + "▁Graphite", + -14.101423263549805 + ], + [ + "▁pail", + -14.101428985595703 + ], + [ + "▁Skoda", + -14.101431846618652 + ], + [ + "Appoint", + -14.101439476013184 + ], + [ + "592", + -14.1014404296875 + ], + [ + "▁shippers", + -14.101446151733398 + ], + [ + "▁SoundCloud", + -14.10146427154541 + ], + [ + "▁Vitality", + -14.101483345031738 + ], + [ + "▁Skincare", + -14.101537704467772 + ], + [ + "▁$2.2", + -14.101539611816406 + ], + [ + "▁OpenGL", + -14.10157871246338 + ], + [ + "▁Jewelers", + -14.101585388183594 + ], + [ + "▁wristbands", + -14.101651191711426 + ], + [ + "zion", + -14.101666450500488 + ], + [ + "▁Hak", + -14.101673126220703 + ], + [ + "▁Cabinetry", + -14.101754188537598 + ], + [ + "▁gulp", + -14.101762771606444 + ], + [ + "▁pry", + -14.101770401000977 + ], + [ + "▁quar", + -14.10178279876709 + ], + [ + "▁Townsville", + -14.101787567138672 + ], + [ + "▁roundtrip", + -14.10191822052002 + ], + [ + "997", + -14.101940155029297 + ], + [ + "▁IPTV", + -14.101974487304688 + ], + [ + "▁scouring", + -14.102004051208496 + ], + [ + "▁heroism", + -14.102066040039062 + ], + [ + "▁Aliens", + -14.102109909057615 + ], + [ + "icio", + -14.102184295654297 + ], + [ + "▁moisturized", + -14.102190017700195 + ], + [ + "▁Aiken", + -14.102203369140623 + ], + [ + "▁Atmos", + -14.102211952209473 + ], + [ + "▁sprouting", + -14.102280616760254 + ], + [ + "▁RK", + -14.10232925415039 + ], + [ + "scroll", + -14.102339744567873 + ], + [ + "▁Melon", + -14.102428436279297 + ], + [ + "▁12′′", + -14.102446556091309 + ], + [ + "#11", + -14.102468490600586 + ], + [ + "▁quits", + -14.102527618408203 + ], + [ + "▁;-)", + -14.10254955291748 + ], + [ + "197", + -14.102559089660645 + ], + [ + "▁Chong", + -14.10257625579834 + ], + [ + "helmed", + -14.102578163146973 + ], + [ + "-2003", + -14.102581977844238 + ], + [ + "noy", + -14.10262393951416 + ], + [ + "▁Eastman", + -14.10263729095459 + ], + [ + "?????", + -14.102694511413574 + ], + [ + "518", + -14.102733612060549 + ], + [ + "▁XIV", + -14.10273551940918 + ], + [ + "▁Lukas", + -14.102807998657228 + ], + [ + "▁Seminars", + -14.102813720703123 + ], + [ + "▁jungles", + -14.102823257446287 + ], + [ + "▁searing", + -14.102964401245115 + ], + [ + "▁Leverage", + -14.103090286254885 + ], + [ + "NCC", + -14.10313320159912 + ], + [ + "▁Greenberg", + -14.103156089782717 + ], + [ + "CELL", + -14.103189468383787 + ], + [ + "▁impersonal", + -14.103203773498535 + ], + [ + "▁THANKS", + -14.103215217590332 + ], + [ + "▁Iranians", + -14.103249549865724 + ], + [ + "▁Scrapbook", + -14.103283882141112 + ], + [ + "▁mathematically", + -14.103318214416504 + ], + [ + "▁somber", + -14.103331565856934 + ], + [ + "shooting", + -14.103392601013184 + ], + [ + "884", + -14.103436470031738 + ], + [ + "783", + -14.103466033935549 + ], + [ + "TOOL", + -14.103519439697266 + ], + [ + "\"[", + -14.10352897644043 + ], + [ + "Ent", + -14.103530883789062 + ], + [ + "▁blackmail", + -14.103546142578123 + ], + [ + "▁fal", + -14.103562355041504 + ], + [ + "▁Logos", + -14.10358715057373 + ], + [ + "▁12:2", + -14.10360050201416 + ], + [ + "▁Syed", + -14.103657722473145 + ], + [ + "▁Unreal", + -14.10367488861084 + ], + [ + "dyed", + -14.10378646850586 + ], + [ + "▁Coated", + -14.103852272033691 + ], + [ + "SSE", + -14.10385513305664 + ], + [ + "activate", + -14.103866577148438 + ], + [ + "Vic", + -14.10389804840088 + ], + [ + "▁Carpets", + -14.10390567779541 + ], + [ + "▁Stow", + -14.10390567779541 + ], + [ + "▁kneel", + -14.104034423828123 + ], + [ + "Fragment", + -14.104124069213867 + ], + [ + "879", + -14.104130744934082 + ], + [ + "ETF", + -14.104130744934082 + ], + [ + "▁EXPERIENCE", + -14.104150772094728 + ], + [ + "▁ubuntu", + -14.104195594787598 + ], + [ + "Beau", + -14.104212760925291 + ], + [ + "vill", + -14.10422134399414 + ], + [ + "cave", + -14.104222297668455 + ], + [ + "preneur", + -14.104222297668455 + ], + [ + "▁xbox", + -14.104249000549316 + ], + [ + "▁Kut", + -14.104255676269531 + ], + [ + "akura", + -14.104270935058594 + ], + [ + "863", + -14.104294776916504 + ], + [ + "▁Lesser", + -14.104340553283691 + ], + [ + "ential", + -14.1043701171875 + ], + [ + "▁Sari", + -14.10438060760498 + ], + [ + "▁Saute", + -14.104488372802734 + ], + [ + "▁DOG", + -14.104503631591797 + ], + [ + "onie", + -14.104521751403809 + ], + [ + "▁Mature", + -14.104522705078123 + ], + [ + "▁brat", + -14.104596138000488 + ], + [ + "Diane", + -14.10461711883545 + ], + [ + "transform", + -14.104634284973145 + ], + [ + "578", + -14.104649543762209 + ], + [ + "Mainland", + -14.104652404785156 + ], + [ + "Closing", + -14.104655265808104 + ], + [ + "▁muti", + -14.104692459106444 + ], + [ + "esso", + -14.104698181152344 + ], + [ + "▁JN", + -14.104743957519531 + ], + [ + "▁Shred", + -14.104762077331545 + ], + [ + "Compact", + -14.104788780212402 + ], + [ + "▁whet", + -14.10484218597412 + ], + [ + "▁Nicely", + -14.104934692382812 + ], + [ + "▁Commit", + -14.104984283447266 + ], + [ + "rky", + -14.105009078979492 + ], + [ + "553", + -14.10507869720459 + ], + [ + "▁Sock", + -14.10509204864502 + ], + [ + "Shared", + -14.10520362854004 + ], + [ + "▁----", + -14.105242729187012 + ], + [ + "screw", + -14.105254173278809 + ], + [ + "honey", + -14.105376243591309 + ], + [ + "▁Uncover", + -14.105406761169434 + ], + [ + "▁reread", + -14.105513572692873 + ], + [ + "▁dudes", + -14.10553741455078 + ], + [ + "▁debunk", + -14.105596542358398 + ], + [ + "turbo", + -14.10561752319336 + ], + [ + "▁272", + -14.105740547180176 + ], + [ + "▁Patriarch", + -14.105891227722168 + ], + [ + "▁Lager", + -14.105892181396484 + ], + [ + "▁Peppers", + -14.105911254882812 + ], + [ + "cardi", + -14.105916976928713 + ], + [ + "▁Bhubaneswar", + -14.105931282043455 + ], + [ + "▁Logitech", + -14.105931282043455 + ], + [ + "▁Reuben", + -14.105931282043455 + ], + [ + "▁feisty", + -14.105931282043455 + ], + [ + "▁queuing", + -14.105931282043455 + ], + [ + "▁reconnaissance", + -14.105931282043455 + ], + [ + "▁travertine", + -14.105931282043455 + ], + [ + "▁unbreakable", + -14.105931282043455 + ], + [ + "▁Penthouse", + -14.10593318939209 + ], + [ + "▁Datsun", + -14.105938911437988 + ], + [ + "▁snuck", + -14.10594081878662 + ], + [ + "▁Donnelly", + -14.10594367980957 + ], + [ + "▁Barnet", + -14.105944633483888 + ], + [ + "▁acuity", + -14.105944633483888 + ], + [ + "▁Tomlinson", + -14.105951309204102 + ], + [ + "▁Shamrock", + -14.10595417022705 + ], + [ + "▁Botany", + -14.105963706970217 + ], + [ + "NCR", + -14.105982780456545 + ], + [ + "▁Err", + -14.105992317199709 + ], + [ + "▁dubbing", + -14.105998039245604 + ], + [ + "▁pediatrics", + -14.106008529663086 + ], + [ + "MPS", + -14.106040954589844 + ], + [ + "▁marinara", + -14.10605812072754 + ], + [ + "▁replicating", + -14.106108665466309 + ], + [ + "▁reprise", + -14.106120109558104 + ], + [ + "▁Luka", + -14.1061429977417 + ], + [ + "▁Systematic", + -14.106158256530762 + ], + [ + "▁firefighting", + -14.106172561645508 + ], + [ + "▁Launcher", + -14.106183052062988 + ], + [ + "▁Marjorie", + -14.10622501373291 + ], + [ + "▁Monash", + -14.10625171661377 + ], + [ + "▁affirms", + -14.106266975402832 + ], + [ + "▁remodelling", + -14.106287956237791 + ], + [ + "▁Selective", + -14.106362342834473 + ], + [ + "▁fatality", + -14.106367111206056 + ], + [ + "▁beveled", + -14.106437683105469 + ], + [ + "▁Kohler", + -14.1064453125 + ], + [ + "▁amphibians", + -14.106475830078123 + ], + [ + "Signal", + -14.1065034866333 + ], + [ + "▁Linkedin", + -14.106511116027832 + ], + [ + "▁Hubby", + -14.106547355651855 + ], + [ + "▁Compete", + -14.106559753417969 + ], + [ + "▁Expanded", + -14.106600761413574 + ], + [ + "▁Presenter", + -14.10660171508789 + ], + [ + "▁SPR", + -14.106613159179688 + ], + [ + "▁lipsticks", + -14.10661792755127 + ], + [ + "zha", + -14.106632232666016 + ], + [ + "▁DCS", + -14.106640815734863 + ], + [ + "▁Chur", + -14.106658935546877 + ], + [ + "▁CTR", + -14.106660842895508 + ], + [ + "▁flirting", + -14.106680870056152 + ], + [ + "▁retiree", + -14.106744766235352 + ], + [ + "831", + -14.106781005859377 + ], + [ + "▁284", + -14.106816291809082 + ], + [ + "▁Misty", + -14.106857299804688 + ], + [ + "1987", + -14.106863021850586 + ], + [ + "▁Firefighters", + -14.106882095336914 + ], + [ + "▁78%", + -14.106900215148926 + ], + [ + "▁Kob", + -14.106900215148926 + ], + [ + "Shan", + -14.10692024230957 + ], + [ + "brun", + -14.106943130493164 + ], + [ + "▁compaction", + -14.106971740722656 + ], + [ + "▁SLC", + -14.1069974899292 + ], + [ + "cyst", + -14.107110977172852 + ], + [ + "Broad", + -14.107137680053713 + ], + [ + "agra", + -14.10719108581543 + ], + [ + "tomy", + -14.107245445251465 + ], + [ + "▁Roughly", + -14.10726833343506 + ], + [ + "▁psd", + -14.107281684875488 + ], + [ + "▁Pile", + -14.10728645324707 + ], + [ + "▁Nei", + -14.107345581054688 + ], + [ + "Friend", + -14.107376098632812 + ], + [ + "▁Alc", + -14.107449531555176 + ], + [ + "682", + -14.10747528076172 + ], + [ + "▁rector", + -14.107573509216309 + ], + [ + "▁hexa", + -14.107603073120115 + ], + [ + "▁facials", + -14.107660293579102 + ], + [ + "▁Spra", + -14.107661247253418 + ], + [ + "▁Slat", + -14.107779502868652 + ], + [ + "Wang", + -14.107904434204102 + ], + [ + "▁downsize", + -14.10791301727295 + ], + [ + "▁crackle", + -14.108078956604004 + ], + [ + "▁tulip", + -14.108131408691406 + ], + [ + "▁Lufthansa", + -14.108158111572266 + ], + [ + "▁mobs", + -14.108224868774414 + ], + [ + "conv", + -14.108231544494627 + ], + [ + "▁ales", + -14.10824489593506 + ], + [ + "▁Saad", + -14.108277320861816 + ], + [ + "▁UVB", + -14.10828685760498 + ], + [ + "▁wagers", + -14.108294486999512 + ], + [ + "▁Feldman", + -14.108319282531738 + ], + [ + "OWER", + -14.108359336853027 + ], + [ + "▁Meade", + -14.10840892791748 + ], + [ + "▁Squid", + -14.10840892791748 + ], + [ + "wiz", + -14.108443260192873 + ], + [ + "enka", + -14.108461380004885 + ], + [ + "▁Cyn", + -14.108461380004885 + ], + [ + "▁plume", + -14.108466148376465 + ], + [ + "gula", + -14.1085205078125 + ], + [ + "▁Smoked", + -14.108552932739258 + ], + [ + "▁pico", + -14.108555793762209 + ], + [ + "954", + -14.108566284179688 + ], + [ + "▁Karin", + -14.108599662780762 + ], + [ + "▁WAIT", + -14.108684539794922 + ], + [ + "▁2019).", + -14.108689308166504 + ], + [ + "▁Shoppe", + -14.10875129699707 + ], + [ + "▁Telling", + -14.108845710754396 + ], + [ + "▁Phan", + -14.109004974365234 + ], + [ + "▁Judi", + -14.109025955200195 + ], + [ + "ogie", + -14.109057426452637 + ], + [ + "913", + -14.109116554260254 + ], + [ + "eru", + -14.109130859375 + ], + [ + "▁Zipper", + -14.109148025512695 + ], + [ + "issues", + -14.10915184020996 + ], + [ + "perishable", + -14.109167098999023 + ], + [ + "Recipe", + -14.10918140411377 + ], + [ + "Flood", + -14.109207153320312 + ], + [ + "SCE", + -14.109207153320312 + ], + [ + "Curious", + -14.10922622680664 + ], + [ + "roots", + -14.10926914215088 + ], + [ + "visibility", + -14.109289169311523 + ], + [ + "ences", + -14.109328269958496 + ], + [ + "LW", + -14.109367370605469 + ], + [ + "iyo", + -14.10939598083496 + ], + [ + "▁Mendel", + -14.109447479248049 + ], + [ + "PHO", + -14.10953426361084 + ], + [ + "▁canisters", + -14.109535217285156 + ], + [ + "▁Advantages", + -14.109582901000977 + ], + [ + "▁trespass", + -14.109609603881836 + ], + [ + "▁falter", + -14.109642028808594 + ], + [ + "▁scribble", + -14.109733581542969 + ], + [ + "locker", + -14.109770774841309 + ], + [ + "▁Ryu", + -14.10977554321289 + ], + [ + "▁indulged", + -14.109819412231444 + ], + [ + "▁meteorological", + -14.109855651855469 + ], + [ + "streaming", + -14.109872817993164 + ], + [ + "▁misc", + -14.109944343566896 + ], + [ + "▁prim", + -14.10997486114502 + ], + [ + "olan", + -14.110002517700195 + ], + [ + "▁caulk", + -14.110010147094728 + ], + [ + "▁Rok", + -14.110065460205078 + ], + [ + "Sort", + -14.110126495361328 + ], + [ + ".50.", + -14.11014175415039 + ], + [ + "▁Eritrea", + -14.110194206237791 + ], + [ + "aldehyde", + -14.11029815673828 + ], + [ + "situ", + -14.11032009124756 + ], + [ + "▁trinkets", + -14.11036491394043 + ], + [ + "Zi", + -14.110514640808104 + ], + [ + "▁Bounty", + -14.110518455505373 + ], + [ + "▁Nirvana", + -14.110518455505373 + ], + [ + "▁Oaxaca", + -14.110518455505373 + ], + [ + "▁Tuxedo", + -14.110518455505373 + ], + [ + "▁couscous", + -14.110518455505373 + ], + [ + "▁gypsy", + -14.110518455505373 + ], + [ + "▁semblance", + -14.110518455505373 + ], + [ + "▁tycoon", + -14.110518455505373 + ], + [ + "▁Allianz", + -14.110519409179688 + ], + [ + "▁TITLE", + -14.110522270202637 + ], + [ + "▁impunity", + -14.110522270202637 + ], + [ + "▁sultry", + -14.110528945922852 + ], + [ + "▁puberty", + -14.110530853271484 + ], + [ + "▁Brookings", + -14.110559463500977 + ], + [ + "▁Exposition", + -14.110560417175291 + ], + [ + "▁Schuster", + -14.110560417175291 + ], + [ + "▁Springsteen", + -14.110575675964355 + ], + [ + "carat", + -14.110588073730469 + ], + [ + "▁Gladstone", + -14.110620498657228 + ], + [ + "▁Shangri", + -14.110625267028809 + ], + [ + "▁DMS", + -14.110665321350098 + ], + [ + "arin", + -14.110706329345703 + ], + [ + "▁haute", + -14.110709190368652 + ], + [ + "▁Gael", + -14.110711097717283 + ], + [ + "3.3%", + -14.11072826385498 + ], + [ + "▁Reps", + -14.110733032226562 + ], + [ + "▁Haines", + -14.110740661621094 + ], + [ + "▁predictability", + -14.110758781433104 + ], + [ + "▁Rarely", + -14.11076545715332 + ], + [ + "▁Darlington", + -14.110772132873535 + ], + [ + "Managed", + -14.110782623291016 + ], + [ + "▁Dwarf", + -14.110791206359863 + ], + [ + "customers", + -14.110800743103027 + ], + [ + "▁OX", + -14.110835075378418 + ], + [ + "/$", + -14.110840797424316 + ], + [ + "▁barbecues", + -14.110858917236328 + ], + [ + "▁gyro", + -14.110867500305176 + ], + [ + "▁Clue", + -14.110885620117188 + ], + [ + "vertical", + -14.11089324951172 + ], + [ + "▁Prosper", + -14.110929489135742 + ], + [ + "▁Locksmiths", + -14.110980987548828 + ], + [ + "▁princes", + -14.110986709594728 + ], + [ + "▁Lovecraft", + -14.110995292663574 + ], + [ + "▁creeps", + -14.110998153686523 + ], + [ + "prof", + -14.111000061035156 + ], + [ + "▁parrots", + -14.111051559448242 + ], + [ + "TPH", + -14.111053466796877 + ], + [ + "▁Kera", + -14.111109733581545 + ], + [ + "Whew", + -14.111127853393556 + ], + [ + "▁derailed", + -14.111132621765137 + ], + [ + "ullo", + -14.11113452911377 + ], + [ + "▁Scotty", + -14.111150741577148 + ], + [ + "▁Kitts", + -14.111164093017578 + ], + [ + "▁IMG", + -14.111230850219728 + ], + [ + "▁ripening", + -14.111286163330078 + ], + [ + "▁Stefani", + -14.111323356628418 + ], + [ + "▁nooks", + -14.111363410949709 + ], + [ + "▁treks", + -14.111363410949709 + ], + [ + "▁Eo", + -14.111376762390137 + ], + [ + "▁lowercase", + -14.11147117614746 + ], + [ + "ouf", + -14.11151885986328 + ], + [ + "▁ramble", + -14.11152172088623 + ], + [ + "Tender", + -14.11153507232666 + ], + [ + "▁GRI", + -14.111580848693848 + ], + [ + "kms", + -14.111632347106934 + ], + [ + "▁recur", + -14.11168384552002 + ], + [ + "▁Pah", + -14.111702919006348 + ], + [ + "supporting", + -14.111709594726562 + ], + [ + "▁Malo", + -14.111726760864258 + ], + [ + "▁kneeling", + -14.11172866821289 + ], + [ + "GLOBE", + -14.111737251281738 + ], + [ + "▁EIA", + -14.111761093139648 + ], + [ + "▁Bung", + -14.111772537231444 + ], + [ + "▁ascension", + -14.111774444580078 + ], + [ + "blogs", + -14.111825942993164 + ], + [ + "▁10/10", + -14.111846923828123 + ], + [ + "▁sau", + -14.111865043640137 + ], + [ + "▁Barron", + -14.11190128326416 + ], + [ + "roads", + -14.111908912658691 + ], + [ + "▁Acta", + -14.111910820007324 + ], + [ + "▁merch", + -14.111913681030272 + ], + [ + "▁Ornament", + -14.1119384765625 + ], + [ + "▁brooding", + -14.111964225769045 + ], + [ + "▁Bigfoot", + -14.112004280090332 + ], + [ + "▁lags", + -14.112006187438965 + ], + [ + "+3", + -14.11201000213623 + ], + [ + "▁Keyless", + -14.112027168273926 + ], + [ + "▁muy", + -14.112086296081545 + ], + [ + "jor", + -14.112096786499023 + ], + [ + "PHY", + -14.1121244430542 + ], + [ + "▁APT", + -14.112147331237791 + ], + [ + "Called", + -14.11216163635254 + ], + [ + "084", + -14.112183570861816 + ], + [ + "▁infidelity", + -14.112186431884766 + ], + [ + "▁trumps", + -14.112189292907717 + ], + [ + "UID", + -14.112194061279297 + ], + [ + "hawks", + -14.11229133605957 + ], + [ + "▁Elo", + -14.11232566833496 + ], + [ + "flowers", + -14.112351417541504 + ], + [ + "▁Kash", + -14.11235809326172 + ], + [ + "▁Roberta", + -14.11240577697754 + ], + [ + "▁Referee", + -14.11241054534912 + ], + [ + "▁384", + -14.11244010925293 + ], + [ + "isk", + -14.112542152404783 + ], + [ + "Jenn", + -14.112683296203612 + ], + [ + "▁insoles", + -14.112695693969728 + ], + [ + "▁cit", + -14.112696647644045 + ], + [ + "▁fluctuation", + -14.112775802612305 + ], + [ + "542", + -14.112855911254885 + ], + [ + "▁nibble", + -14.112885475158691 + ], + [ + "▁Ode", + -14.112934112548828 + ], + [ + "cay", + -14.112936973571776 + ], + [ + "▁passcode", + -14.112961769104004 + ], + [ + "▁IEP", + -14.113073348999023 + ], + [ + "Papa", + -14.113077163696287 + ], + [ + "aside", + -14.113129615783691 + ], + [ + "booked", + -14.113144874572754 + ], + [ + "SKA", + -14.113154411315918 + ], + [ + "▁Beh", + -14.1132173538208 + ], + [ + "schools", + -14.113292694091797 + ], + [ + "bod", + -14.11331272125244 + ], + [ + "▁BRCA", + -14.113337516784668 + ], + [ + "Exit", + -14.113348007202148 + ], + [ + "IPA", + -14.11337184906006 + ], + [ + "▁Thrift", + -14.113397598266602 + ], + [ + "▁fetching", + -14.113585472106934 + ], + [ + "▁interprets", + -14.113638877868652 + ], + [ + "▁BST", + -14.113741874694824 + ], + [ + "mores", + -14.113792419433594 + ], + [ + "Mono", + -14.113799095153809 + ], + [ + "Dennis", + -14.11390781402588 + ], + [ + "Affordable", + -14.11391830444336 + ], + [ + "Atlanta", + -14.11392307281494 + ], + [ + "Hudson", + -14.11392307281494 + ], + [ + "▁CIR", + -14.113940238952637 + ], + [ + "927", + -14.1139497756958 + ], + [ + "Hidden", + -14.11396312713623 + ], + [ + "gnu", + -14.11396598815918 + ], + [ + "hungry", + -14.113987922668455 + ], + [ + "satellite", + -14.11402702331543 + ], + [ + "english", + -14.114053726196287 + ], + [ + "Stamp", + -14.11406421661377 + ], + [ + "▁René", + -14.114079475402832 + ], + [ + "▁Alchemy", + -14.114134788513184 + ], + [ + "▁Confidential", + -14.1141357421875 + ], + [ + "generic", + -14.114161491394045 + ], + [ + "▁RTO", + -14.114258766174316 + ], + [ + "▁quarterfinals", + -14.114301681518556 + ], + [ + "▁unicorns", + -14.11430835723877 + ], + [ + "▁haters", + -14.11440086364746 + ], + [ + "▁archeological", + -14.114457130432127 + ], + [ + "▁Lef", + -14.114462852478027 + ], + [ + "▁Socio", + -14.114477157592772 + ], + [ + "▁(2004).", + -14.114505767822266 + ], + [ + "Described", + -14.114513397216797 + ], + [ + "▁mainline", + -14.114521026611328 + ], + [ + "▁subsidize", + -14.11452579498291 + ], + [ + "▁functioned", + -14.114531517028809 + ], + [ + "Chemical", + -14.114543914794922 + ], + [ + "STD", + -14.114566802978516 + ], + [ + "anan", + -14.114629745483398 + ], + [ + "▁Crit", + -14.114630699157717 + ], + [ + "▁Voi", + -14.114706993103027 + ], + [ + "koo", + -14.114709854125977 + ], + [ + "jean", + -14.11471176147461 + ], + [ + "▁Roar", + -14.11471176147461 + ], + [ + "▁recapture", + -14.11471176147461 + ], + [ + "▁militias", + -14.11474609375 + ], + [ + "▁abolition", + -14.114789009094238 + ], + [ + "SOURCE", + -14.114811897277832 + ], + [ + "▁staircases", + -14.114824295043944 + ], + [ + "FER", + -14.114888191223145 + ], + [ + "▁Conquer", + -14.1149320602417 + ], + [ + "▁sandal", + -14.11495304107666 + ], + [ + "▁Badgers", + -14.114964485168455 + ], + [ + "▁FILE", + -14.115023612976074 + ], + [ + "▁Assemblies", + -14.115126609802246 + ], + [ + "▁Mojave", + -14.115126609802246 + ], + [ + "▁Remedies", + -14.115126609802246 + ], + [ + "▁Thessaloniki", + -14.115126609802246 + ], + [ + "▁cacti", + -14.115126609802246 + ], + [ + "▁nefarious", + -14.115126609802246 + ], + [ + "▁nemesis", + -14.115126609802246 + ], + [ + "▁pizzeria", + -14.115126609802246 + ], + [ + "▁unspoiled", + -14.115126609802246 + ], + [ + "▁Boehner", + -14.115127563476562 + ], + [ + "▁Eminem", + -14.115127563476562 + ], + [ + "▁stenosis", + -14.115127563476562 + ], + [ + "▁terrarium", + -14.11512851715088 + ], + [ + "▁Thieves", + -14.115131378173828 + ], + [ + "Investigate", + -14.11513328552246 + ], + [ + "▁Zillow", + -14.11513614654541 + ], + [ + "▁Tyrone", + -14.115141868591309 + ], + [ + "▁pewter", + -14.115142822265623 + ], + [ + "▁SNMP", + -14.115157127380373 + ], + [ + "▁Whittier", + -14.115169525146484 + ], + [ + "▁morphine", + -14.115169525146484 + ], + [ + "▁MBBS", + -14.115172386169434 + ], + [ + "▁Commandments", + -14.115179061889648 + ], + [ + "hae", + -14.115188598632812 + ], + [ + "▁Huntingdon", + -14.115196228027344 + ], + [ + "▁endothelial", + -14.115198135375977 + ], + [ + "▁Coordinating", + -14.115203857421877 + ], + [ + "▁Duvet", + -14.115204811096191 + ], + [ + "▁Exemption", + -14.11521339416504 + ], + [ + "▁lamination", + -14.115220069885254 + ], + [ + "▁Drywall", + -14.115222930908203 + ], + [ + "▁Creatures", + -14.115225791931152 + ], + [ + "▁fondue", + -14.115232467651367 + ], + [ + "▁NICU", + -14.115252494812012 + ], + [ + "▁Orthodontics", + -14.115320205688477 + ], + [ + "▁hammocks", + -14.115325927734377 + ], + [ + "ONG", + -14.115327835083008 + ], + [ + "▁Cushing", + -14.115333557128906 + ], + [ + "▁Elsewhere", + -14.115337371826172 + ], + [ + "▁Aspects", + -14.115349769592283 + ], + [ + "▁moisturizers", + -14.115398406982422 + ], + [ + "▁phonics", + -14.11542510986328 + ], + [ + "▁Rosary", + -14.115434646606444 + ], + [ + "▁Blunt", + -14.115452766418455 + ], + [ + "lemon", + -14.11551284790039 + ], + [ + "lise", + -14.115525245666504 + ], + [ + "▁affix", + -14.115609169006348 + ], + [ + "▁Anu", + -14.11562156677246 + ], + [ + "bias", + -14.115643501281738 + ], + [ + "▁expatriates", + -14.115655899047852 + ], + [ + "▁nerds", + -14.115689277648926 + ], + [ + "▁sequentially", + -14.115718841552734 + ], + [ + "▁Crude", + -14.115730285644531 + ], + [ + "▁goblin", + -14.115750312805176 + ], + [ + "▁Pong", + -14.115800857543944 + ], + [ + "stine", + -14.11587905883789 + ], + [ + "▁kata", + -14.11587905883789 + ], + [ + "▁5/8", + -14.115893363952637 + ], + [ + "▁#18", + -14.11589527130127 + ], + [ + "▁kra", + -14.11595630645752 + ], + [ + "▁explodes", + -14.115970611572266 + ], + [ + "▁subconsciously", + -14.11598300933838 + ], + [ + "▁Hymn", + -14.11604118347168 + ], + [ + "▁carpool", + -14.116050720214844 + ], + [ + "▁BASIC", + -14.116209030151367 + ], + [ + "hiri", + -14.116215705871582 + ], + [ + "▁Dolan", + -14.116219520568848 + ], + [ + "▁glycemic", + -14.116262435913086 + ], + [ + "▁cribs", + -14.11630916595459 + ], + [ + "▁Smyth", + -14.116352081298828 + ], + [ + "▁Dir", + -14.11637020111084 + ], + [ + "669", + -14.116413116455078 + ], + [ + "▁Emphasis", + -14.11643886566162 + ], + [ + "▁unread", + -14.116497993469238 + ], + [ + "historical", + -14.116504669189451 + ], + [ + "pacing", + -14.116517066955566 + ], + [ + "▁$3.5", + -14.116518020629885 + ], + [ + "rology", + -14.116520881652832 + ], + [ + "▁Kes", + -14.116520881652832 + ], + [ + "▁soldered", + -14.116586685180664 + ], + [ + "▁Khe", + -14.116606712341309 + ], + [ + "▁Laval", + -14.116647720336914 + ], + [ + "Prize", + -14.116654396057127 + ], + [ + "▁Glan", + -14.11669635772705 + ], + [ + "oglu", + -14.116700172424316 + ], + [ + "▁shockingly", + -14.116898536682127 + ], + [ + "▁injector", + -14.116920471191406 + ], + [ + "▁convincingly", + -14.116925239562988 + ], + [ + "▁Stroll", + -14.117000579833984 + ], + [ + "▁Kyo", + -14.11700439453125 + ], + [ + "charger", + -14.117015838623049 + ], + [ + "▁200%", + -14.117090225219728 + ], + [ + "▁Harman", + -14.117095947265623 + ], + [ + "▁Lodging", + -14.117143630981444 + ], + [ + "▁Attract", + -14.11716365814209 + ], + [ + "robert", + -14.11717128753662 + ], + [ + "▁1,5", + -14.117183685302734 + ], + [ + "2.00", + -14.117201805114746 + ], + [ + "▁Rhein", + -14.117204666137695 + ], + [ + "▁Ewing", + -14.11722183227539 + ], + [ + "824", + -14.117234230041504 + ], + [ + "▁Discharge", + -14.117291450500488 + ], + [ + "▁Isis", + -14.117292404174805 + ], + [ + "▁Watchers", + -14.117302894592283 + ], + [ + "▁Whistle", + -14.117338180541992 + ], + [ + "▁Mech", + -14.117364883422852 + ], + [ + "▁duke", + -14.117369651794434 + ], + [ + "▁sobering", + -14.11739444732666 + ], + [ + "▁cofounder", + -14.117435455322266 + ], + [ + "▁Ches", + -14.117462158203123 + ], + [ + "▁Bled", + -14.117469787597656 + ], + [ + "EEA", + -14.117491722106934 + ], + [ + "▁overtly", + -14.1174955368042 + ], + [ + "decade", + -14.117504119873049 + ], + [ + "663", + -14.11751651763916 + ], + [ + "▁AEG", + -14.117523193359377 + ], + [ + "929", + -14.11756706237793 + ], + [ + "▁PSG", + -14.117612838745115 + ], + [ + "▁jamb", + -14.117616653442385 + ], + [ + "▁Sheen", + -14.117650032043455 + ], + [ + "▁AML", + -14.117671012878418 + ], + [ + "▁fortify", + -14.11768627166748 + ], + [ + "CSI", + -14.117688179016112 + ], + [ + "▁KPI", + -14.117732048034668 + ], + [ + "FLOW", + -14.11774444580078 + ], + [ + "▁Bosco", + -14.11776351928711 + ], + [ + "▁Preference", + -14.11777114868164 + ], + [ + "▁extinguisher", + -14.117772102355955 + ], + [ + "▁footnote", + -14.117798805236816 + ], + [ + "2:15", + -14.117810249328612 + ], + [ + "BAND", + -14.117810249328612 + ], + [ + "Circ", + -14.117912292480469 + ], + [ + "▁cityscape", + -14.117928504943848 + ], + [ + "▁Schiff", + -14.117979049682615 + ], + [ + "parking", + -14.11802101135254 + ], + [ + "tracks", + -14.118022918701172 + ], + [ + "-110", + -14.118045806884766 + ], + [ + "Conf", + -14.118059158325195 + ], + [ + "eche", + -14.118072509765623 + ], + [ + "▁Reap", + -14.11807632446289 + ], + [ + "▁Projection", + -14.118080139160156 + ], + [ + "lge", + -14.118096351623535 + ], + [ + "▁PICK", + -14.118206977844238 + ], + [ + "leu", + -14.118255615234377 + ], + [ + "▁Liter", + -14.118280410766602 + ], + [ + "▁Ethn", + -14.118290901184082 + ], + [ + "▁Faced", + -14.118325233459473 + ], + [ + "chine", + -14.11846160888672 + ], + [ + "certification", + -14.118511199951172 + ], + [ + "▁Depart", + -14.118517875671388 + ], + [ + "inta", + -14.118552207946776 + ], + [ + "Brief", + -14.118565559387209 + ], + [ + "Named", + -14.118599891662598 + ], + [ + "promotion", + -14.118617057800291 + ], + [ + "▁2700", + -14.118633270263672 + ], + [ + "Minnesota", + -14.11865520477295 + ], + [ + "▁connotations", + -14.118658065795898 + ], + [ + "Packard", + -14.118684768676758 + ], + [ + "concert", + -14.1187162399292 + ], + [ + "▁PTI", + -14.11881160736084 + ], + [ + "▁Affiliates", + -14.118826866149902 + ], + [ + "Powerful", + -14.11894702911377 + ], + [ + "971", + -14.118963241577148 + ], + [ + "▁Xing", + -14.118972778320312 + ], + [ + "viewer", + -14.119056701660156 + ], + [ + "▁sacraments", + -14.119218826293944 + ], + [ + "CSD", + -14.119245529174805 + ], + [ + "Breath", + -14.119266510009766 + ], + [ + "TAG", + -14.119279861450195 + ], + [ + "czyk", + -14.119282722473145 + ], + [ + "▁sheeting", + -14.11934757232666 + ], + [ + "▁Bana", + -14.119455337524414 + ], + [ + "▁264", + -14.11955451965332 + ], + [ + "Invite", + -14.11956787109375 + ], + [ + "▁Continued", + -14.1195707321167 + ], + [ + "▁slum", + -14.1195707321167 + ], + [ + "ITC", + -14.119593620300291 + ], + [ + "FORCE", + -14.119616508483888 + ], + [ + "▁diverge", + -14.119683265686035 + ], + [ + "▁Geographical", + -14.119715690612791 + ], + [ + "synaptic", + -14.119755744934082 + ], + [ + "▁INSURANCE", + -14.119755744934082 + ], + [ + "▁Tequila", + -14.119755744934082 + ], + [ + "▁aggravating", + -14.119755744934082 + ], + [ + "▁curcumin", + -14.119755744934082 + ], + [ + "▁dissipation", + -14.119755744934082 + ], + [ + "▁glutathione", + -14.119755744934082 + ], + [ + "▁phrasing", + -14.119755744934082 + ], + [ + "▁trajectories", + -14.119755744934082 + ], + [ + "▁votre", + -14.119756698608398 + ], + [ + "▁Nagasaki", + -14.119757652282717 + ], + [ + "▁Unisex", + -14.119757652282717 + ], + [ + "▁melbourne", + -14.119757652282717 + ], + [ + "▁Bakersfield", + -14.119759559631348 + ], + [ + "▁JPMorgan", + -14.119759559631348 + ], + [ + "▁agitator", + -14.119759559631348 + ], + [ + "▁INCLUDED", + -14.119760513305664 + ], + [ + "▁NAACP", + -14.119763374328612 + ], + [ + "▁transatlantic", + -14.119763374328612 + ], + [ + "▁Naruto", + -14.119766235351562 + ], + [ + "▁cardiology", + -14.119768142700195 + ], + [ + "▁citric", + -14.119771003723145 + ], + [ + "▁Subdivision", + -14.11977195739746 + ], + [ + "▁ablation", + -14.119780540466309 + ], + [ + "▁Inuit", + -14.11978816986084 + ], + [ + "▁uproar", + -14.119800567626951 + ], + [ + "▁Jericho", + -14.11980152130127 + ], + [ + "▁Spectre", + -14.11981964111328 + ], + [ + "▁Detention", + -14.11982536315918 + ], + [ + "▁unspoken", + -14.11983585357666 + ], + [ + "▁Tinker", + -14.119843482971191 + ], + [ + "▁Sumner", + -14.119844436645508 + ], + [ + "▁unanticipated", + -14.119865417480469 + ], + [ + "▁radishes", + -14.119890213012695 + ], + [ + "▁Exile", + -14.119892120361328 + ], + [ + "▁ligne", + -14.119911193847656 + ], + [ + "▁Roswell", + -14.11992645263672 + ], + [ + "▁Chrissy", + -14.1199369430542 + ], + [ + "▁Fairmont", + -14.119945526123049 + ], + [ + "▁kelp", + -14.119952201843262 + ], + [ + "659", + -14.11996364593506 + ], + [ + "▁perceptive", + -14.11996841430664 + ], + [ + "▁Bangladeshi", + -14.11999797821045 + ], + [ + "▁immigrated", + -14.12000846862793 + ], + [ + "▁Dreaming", + -14.120074272155762 + ], + [ + "vous", + -14.120080947875977 + ], + [ + "▁Reproduction", + -14.120084762573242 + ], + [ + "▁Hearst", + -14.120136260986328 + ], + [ + "▁2017/18", + -14.12013816833496 + ], + [ + "▁Prove", + -14.120155334472656 + ], + [ + "▁redeemable", + -14.120162963867188 + ], + [ + "▁alphabetically", + -14.120166778564451 + ], + [ + "▁pitting", + -14.120192527770996 + ], + [ + "▁kam", + -14.120193481445312 + ], + [ + "▁standardize", + -14.120198249816896 + ], + [ + "▁Singaporean", + -14.12020206451416 + ], + [ + "▁Ministerial", + -14.120267868041992 + ], + [ + "heard", + -14.120285034179688 + ], + [ + "▁Cashback", + -14.120317459106444 + ], + [ + "▁Properly", + -14.120319366455078 + ], + [ + "▁Safeway", + -14.120363235473633 + ], + [ + "▁Moderne", + -14.120378494262695 + ], + [ + "nae", + -14.12038516998291 + ], + [ + "▁Aden", + -14.120396614074709 + ], + [ + "▁scourge", + -14.120443344116213 + ], + [ + "Olive", + -14.120573043823242 + ], + [ + "▁Robust", + -14.120598793029783 + ], + [ + "Buyers", + -14.120611190795898 + ], + [ + "BHK", + -14.12063694000244 + ], + [ + "▁Rosewood", + -14.120682716369627 + ], + [ + "ebr", + -14.12069320678711 + ], + [ + "▁tumours", + -14.120701789855955 + ], + [ + "kiss", + -14.12073802947998 + ], + [ + "9999", + -14.120800018310549 + ], + [ + "WEAR", + -14.12086296081543 + ], + [ + "▁Kav", + -14.12087631225586 + ], + [ + "Equip", + -14.12091827392578 + ], + [ + "▁Least", + -14.120970726013184 + ], + [ + "otherwise", + -14.12100601196289 + ], + [ + "eland", + -14.12100887298584 + ], + [ + "▁amends", + -14.121009826660156 + ], + [ + "▁overtaken", + -14.121047973632812 + ], + [ + "▁4).", + -14.121052742004396 + ], + [ + "▁faxes", + -14.121057510375977 + ], + [ + "Enhanc", + -14.121100425720217 + ], + [ + "▁CERN", + -14.121142387390137 + ], + [ + "▁sighed", + -14.121175765991213 + ], + [ + "▁kites", + -14.121192932128906 + ], + [ + "proven", + -14.121201515197754 + ], + [ + "iggy", + -14.121204376220703 + ], + [ + "$6", + -14.12126636505127 + ], + [ + "▁VPNs", + -14.121336936950684 + ], + [ + "▁acronyms", + -14.121346473693848 + ], + [ + "▁barbell", + -14.12135410308838 + ], + [ + "▁Grady", + -14.121444702148438 + ], + [ + "▁fundamentalist", + -14.121479034423828 + ], + [ + "Designated", + -14.121633529663086 + ], + [ + "▁sown", + -14.121651649475098 + ], + [ + "▁plastered", + -14.12165355682373 + ], + [ + "▁EURO", + -14.121664047241213 + ], + [ + "sourcing", + -14.121675491333008 + ], + [ + "▁beater", + -14.121702194213867 + ], + [ + "Hai", + -14.12171459197998 + ], + [ + "▁Temperatures", + -14.121726989746094 + ], + [ + "▁Flange", + -14.121787071228027 + ], + [ + "▁faraway", + -14.12182903289795 + ], + [ + "▁Barley", + -14.12184715270996 + ], + [ + "▁Pregnant", + -14.121885299682615 + ], + [ + "▁hatching", + -14.121892929077148 + ], + [ + "spice", + -14.121918678283691 + ], + [ + "▁Handcrafted", + -14.12193775177002 + ], + [ + "Tai", + -14.121942520141602 + ], + [ + "▁KIDS", + -14.121950149536133 + ], + [ + "mobi", + -14.121975898742676 + ], + [ + "qr", + -14.122020721435549 + ], + [ + "▁archaeologist", + -14.1221342086792 + ], + [ + "oslav", + -14.122152328491213 + ], + [ + "Av", + -14.122234344482422 + ], + [ + "▁Toad", + -14.122332572937012 + ], + [ + "gling", + -14.122464179992676 + ], + [ + "▁Profits", + -14.122572898864746 + ], + [ + "095", + -14.122612953186035 + ], + [ + "▁WBC", + -14.122614860534668 + ], + [ + "▁Glycol", + -14.122661590576172 + ], + [ + "▁tithe", + -14.122674942016602 + ], + [ + "▁1952,", + -14.122688293457031 + ], + [ + "▁RECORD", + -14.122718811035156 + ], + [ + "▁Headquarter", + -14.122727394104004 + ], + [ + "00000", + -14.122739791870115 + ], + [ + "▁pretended", + -14.122760772705078 + ], + [ + "▁Hasan", + -14.12283420562744 + ], + [ + "▁motorbikes", + -14.122909545898438 + ], + [ + "SIL", + -14.12297821044922 + ], + [ + "cier", + -14.123068809509276 + ], + [ + "particle", + -14.123106956481934 + ], + [ + "▁PY", + -14.123113632202148 + ], + [ + "iment", + -14.12315273284912 + ], + [ + "indoor", + -14.12315273284912 + ], + [ + "▁accomplishes", + -14.123174667358398 + ], + [ + "▁redone", + -14.123228073120115 + ], + [ + "tiful", + -14.123254776000977 + ], + [ + "bner", + -14.12327480316162 + ], + [ + "vince", + -14.12329387664795 + ], + [ + "troph", + -14.12331771850586 + ], + [ + "▁Magistrates", + -14.123367309570312 + ], + [ + "aim", + -14.123406410217283 + ], + [ + "reliance", + -14.12341594696045 + ], + [ + "Clinton", + -14.12342929840088 + ], + [ + "Joshua", + -14.123431205749512 + ], + [ + "foreign", + -14.123433113098145 + ], + [ + "Analysts", + -14.123435974121094 + ], + [ + "grinding", + -14.12343692779541 + ], + [ + "▁lipo", + -14.123442649841309 + ], + [ + "Bishop", + -14.123465538024902 + ], + [ + "basically", + -14.123477935791016 + ], + [ + "▁Inver", + -14.123479843139648 + ], + [ + "navi", + -14.123480796813965 + ], + [ + "Christine", + -14.12348747253418 + ], + [ + "Database", + -14.123501777648926 + ], + [ + "wik", + -14.123592376708984 + ], + [ + "▁Amethyst", + -14.123639106750488 + ], + [ + "AAC", + -14.123651504516602 + ], + [ + "▁mathematicians", + -14.123663902282717 + ], + [ + "▁Windy", + -14.12367820739746 + ], + [ + "▁$175", + -14.1237154006958 + ], + [ + "enfeld", + -14.12373161315918 + ], + [ + "▁negotiators", + -14.123773574829102 + ], + [ + "▁scrutinize", + -14.123855590820312 + ], + [ + "▁stutter", + -14.123860359191896 + ], + [ + "▁Carne", + -14.123871803283691 + ], + [ + "carrying", + -14.123919486999512 + ], + [ + "Thin", + -14.123921394348145 + ], + [ + "MHS", + -14.124017715454102 + ], + [ + "vb", + -14.124032020568848 + ], + [ + "▁7-6", + -14.124061584472656 + ], + [ + "▁Subs", + -14.124100685119627 + ], + [ + "▁Warming", + -14.124103546142578 + ], + [ + "▁github", + -14.124141693115234 + ], + [ + "Absent", + -14.124144554138184 + ], + [ + "▁harvests", + -14.124150276184082 + ], + [ + "▁SAGE", + -14.124181747436523 + ], + [ + "richard", + -14.124190330505373 + ], + [ + "792", + -14.124235153198242 + ], + [ + "rkin", + -14.124237060546877 + ], + [ + "▁Rel", + -14.124250411987305 + ], + [ + "yev", + -14.124260902404783 + ], + [ + "▁Overland", + -14.124346733093262 + ], + [ + "▁Shouldn", + -14.124347686767578 + ], + [ + "▁Aadhaar", + -14.124406814575195 + ], + [ + "▁Housewives", + -14.124406814575195 + ], + [ + "▁LITTLE", + -14.124406814575195 + ], + [ + "▁ambulatory", + -14.124406814575195 + ], + [ + "▁evaporator", + -14.124406814575195 + ], + [ + "▁introspection", + -14.124406814575195 + ], + [ + "▁lollipop", + -14.124406814575195 + ], + [ + "▁paraffin", + -14.124406814575195 + ], + [ + "▁twinkling", + -14.124406814575195 + ], + [ + "▁accutane", + -14.124407768249512 + ], + [ + "▁Electromagnetic", + -14.124411582946776 + ], + [ + "▁Compliant", + -14.124412536621094 + ], + [ + "▁SWAT", + -14.124418258666992 + ], + [ + "▁raymond", + -14.124423027038574 + ], + [ + "▁Sevilla", + -14.124425888061523 + ], + [ + "▁Dorsey", + -14.124430656433104 + ], + [ + "▁robberies", + -14.124430656433104 + ], + [ + "▁Decimal", + -14.124432563781738 + ], + [ + "piste", + -14.124442100524902 + ], + [ + "▁(1990)", + -14.12444305419922 + ], + [ + "▁taupe", + -14.124479293823242 + ], + [ + "▁Inverter", + -14.124504089355469 + ], + [ + "▁Attending", + -14.124506950378418 + ], + [ + "▁1999)", + -14.124524116516112 + ], + [ + "▁thermodynamic", + -14.124547004699709 + ], + [ + "▁Simeon", + -14.124550819396973 + ], + [ + "▁Delray", + -14.12466049194336 + ], + [ + "▁insecticides", + -14.124669075012209 + ], + [ + "▁reflexes", + -14.12468147277832 + ], + [ + "▁harshest", + -14.12470531463623 + ], + [ + "▁glistening", + -14.124719619750977 + ], + [ + "▁Monkeys", + -14.124726295471191 + ], + [ + "▁$2000", + -14.124747276306152 + ], + [ + "▁CREB", + -14.12480926513672 + ], + [ + "jerk", + -14.124841690063477 + ], + [ + "▁dimple", + -14.12489128112793 + ], + [ + "▁Nell", + -14.124893188476562 + ], + [ + "holic", + -14.124895095825195 + ], + [ + "▁Lug", + -14.1249418258667 + ], + [ + "▁sooooo", + -14.124947547912598 + ], + [ + "isen", + -14.124950408935549 + ], + [ + "▁Grin", + -14.125006675720217 + ], + [ + "▁harmonica", + -14.125006675720217 + ], + [ + "MAIL", + -14.12510108947754 + ], + [ + "bending", + -14.125144004821776 + ], + [ + "▁Tav", + -14.12520980834961 + ], + [ + "▁Ramona", + -14.125213623046877 + ], + [ + "▁Achieving", + -14.125238418579102 + ], + [ + "▁Ghat", + -14.125248908996582 + ], + [ + "15,000", + -14.125283241271973 + ], + [ + "▁Edmunds", + -14.125306129455566 + ], + [ + "▁TPP", + -14.125329971313477 + ], + [ + "▁digi", + -14.125425338745115 + ], + [ + "▁esophageal", + -14.12543487548828 + ], + [ + "▁SMSF", + -14.12549114227295 + ], + [ + "▁Eurasian", + -14.125494003295898 + ], + [ + "rbe", + -14.125523567199709 + ], + [ + "helm", + -14.125529289245604 + ], + [ + "▁submarines", + -14.125544548034668 + ], + [ + "▁reinventing", + -14.125548362731934 + ], + [ + "▁Ohm", + -14.125550270080566 + ], + [ + "huang", + -14.12557888031006 + ], + [ + "olysis", + -14.125580787658691 + ], + [ + "COMM", + -14.125652313232422 + ], + [ + "glyph", + -14.125655174255373 + ], + [ + "ppellate", + -14.125693321228027 + ], + [ + "▁pruned", + -14.125755310058594 + ], + [ + "▁touchpoint", + -14.12581729888916 + ], + [ + "▁$140", + -14.125824928283691 + ], + [ + "Pete", + -14.125852584838867 + ], + [ + "▁telecast", + -14.12586784362793 + ], + [ + "▁petting", + -14.125894546508787 + ], + [ + "jac", + -14.125953674316406 + ], + [ + "▁sep", + -14.126035690307615 + ], + [ + "agus", + -14.126039505004885 + ], + [ + "▁diese", + -14.12605094909668 + ], + [ + "EASE", + -14.126104354858398 + ], + [ + "719", + -14.126120567321776 + ], + [ + "▁Notch", + -14.126178741455078 + ], + [ + "▁Apron", + -14.126180648803713 + ], + [ + "FIP", + -14.12621021270752 + ], + [ + "▁Nitrogen", + -14.12628173828125 + ], + [ + "CLA", + -14.12630844116211 + ], + [ + "Adapted", + -14.126314163208008 + ], + [ + "flavin", + -14.126344680786133 + ], + [ + "INGTON", + -14.126346588134766 + ], + [ + "▁maxim", + -14.126347541809082 + ], + [ + "▁dressers", + -14.126371383666992 + ], + [ + "▁gong", + -14.126373291015623 + ], + [ + "▁Eater", + -14.12649631500244 + ], + [ + "▁ambulances", + -14.126520156860352 + ], + [ + "096", + -14.126538276672363 + ], + [ + "embo", + -14.126554489135742 + ], + [ + "Drag", + -14.12662124633789 + ], + [ + "azu", + -14.126643180847168 + ], + [ + "▁Passed", + -14.126675605773926 + ], + [ + "Untitled", + -14.126705169677734 + ], + [ + "proxy", + -14.12671184539795 + ], + [ + "TABLE", + -14.126737594604492 + ], + [ + "▁lurk", + -14.126758575439451 + ], + [ + "▁stalwart", + -14.126836776733398 + ], + [ + "▁animator", + -14.126843452453612 + ], + [ + "heartedly", + -14.12686538696289 + ], + [ + "▁Hugs", + -14.126893043518066 + ], + [ + "bw", + -14.12690258026123 + ], + [ + "▁Roles", + -14.126977920532228 + ], + [ + "Dur", + -14.127007484436035 + ], + [ + "▁BUILD", + -14.127035140991213 + ], + [ + "▁CCP", + -14.127135276794434 + ], + [ + "▁Emer", + -14.127209663391112 + ], + [ + "▁Bitcoins", + -14.12727165222168 + ], + [ + "▁Captiva", + -14.127306938171388 + ], + [ + "▁’80", + -14.127325057983398 + ], + [ + "usan", + -14.127357482910156 + ], + [ + "▁simplification", + -14.127361297607422 + ], + [ + "▁exchangers", + -14.127378463745115 + ], + [ + "ohm", + -14.127470016479492 + ], + [ + "▁Jac", + -14.127497673034668 + ], + [ + "cron", + -14.12751293182373 + ], + [ + "▁Yikes", + -14.127610206604004 + ], + [ + "▁backrest", + -14.127615928649902 + ], + [ + "▁stints", + -14.1276216506958 + ], + [ + "▁Asians", + -14.127631187438965 + ], + [ + "medi", + -14.127660751342772 + ], + [ + "Went", + -14.127668380737305 + ], + [ + "lark", + -14.12767219543457 + ], + [ + "834", + -14.127680778503418 + ], + [ + "▁commenter", + -14.127696990966797 + ], + [ + "▁2025,", + -14.12770652770996 + ], + [ + "Tier", + -14.12777042388916 + ], + [ + "lodge", + -14.127802848815918 + ], + [ + "▁Gib", + -14.127827644348145 + ], + [ + "▁Scenario", + -14.12782859802246 + ], + [ + "SER", + -14.127920150756836 + ], + [ + "▁CTC", + -14.128023147583008 + ], + [ + "LEC", + -14.128029823303224 + ], + [ + "acco", + -14.128043174743652 + ], + [ + "constructed", + -14.128068923950195 + ], + [ + "▁PRA", + -14.128108024597168 + ], + [ + "▁Telegram", + -14.12812328338623 + ], + [ + "079", + -14.12816333770752 + ], + [ + "▁CUT", + -14.12816333770752 + ], + [ + "skinned", + -14.128183364868164 + ], + [ + "encompassing", + -14.128195762634276 + ], + [ + "Kansas", + -14.128209114074709 + ], + [ + "▁banded", + -14.128247261047363 + ], + [ + "Verse", + -14.128276824951172 + ], + [ + "Marcus", + -14.128299713134766 + ], + [ + "Purple", + -14.128347396850586 + ], + [ + "▁Pawn", + -14.128374099731444 + ], + [ + "EAP", + -14.128376960754396 + ], + [ + "Region", + -14.12852954864502 + ], + [ + "▁Debut", + -14.12852954864502 + ], + [ + "Jenny", + -14.128548622131348 + ], + [ + "▁YouTuber", + -14.128621101379396 + ], + [ + "▁Iain", + -14.128670692443848 + ], + [ + "bride", + -14.128671646118164 + ], + [ + "▁haste", + -14.128724098205566 + ], + [ + "▁bruise", + -14.128745079040527 + ], + [ + "▁breastfeed", + -14.128807067871094 + ], + [ + "damaged", + -14.128824234008787 + ], + [ + "▁howling", + -14.12884521484375 + ], + [ + "▁2800", + -14.128946304321287 + ], + [ + "963", + -14.128971099853516 + ], + [ + "dores", + -14.129003524780272 + ], + [ + "Ease", + -14.129020690917969 + ], + [ + "▁proportionate", + -14.129051208496094 + ], + [ + "▁Rowing", + -14.12906551361084 + ], + [ + "▁Acacia", + -14.129079818725586 + ], + [ + "▁Hodgson", + -14.129079818725586 + ], + [ + "▁Priscilla", + -14.129079818725586 + ], + [ + "▁Sheppard", + -14.129079818725586 + ], + [ + "▁melatonin", + -14.129079818725586 + ], + [ + "▁reminiscing", + -14.129079818725586 + ], + [ + "▁superstition", + -14.129079818725586 + ], + [ + "▁syndication", + -14.129079818725586 + ], + [ + "▁volkswagen", + -14.129079818725586 + ], + [ + "▁Argentinian", + -14.129080772399902 + ], + [ + "▁Floridian", + -14.129080772399902 + ], + [ + "▁grandiose", + -14.129080772399902 + ], + [ + "▁Styling", + -14.129082679748535 + ], + [ + "▁fiasco", + -14.129085540771484 + ], + [ + "▁cufflinks", + -14.129088401794434 + ], + [ + "▁telemedicine", + -14.12908935546875 + ], + [ + "▁Cusco", + -14.129090309143066 + ], + [ + "▁purée", + -14.129093170166016 + ], + [ + "▁emcee", + -14.129097938537598 + ], + [ + "▁mingling", + -14.129104614257812 + ], + [ + "▁practicable", + -14.129106521606444 + ], + [ + "▁lazada", + -14.129109382629396 + ], + [ + "▁Clem", + -14.129115104675291 + ], + [ + "▁Davos", + -14.129115104675291 + ], + [ + "▁Gosling", + -14.129115104675291 + ], + [ + "▁Joplin", + -14.12911605834961 + ], + [ + "▁NFPA", + -14.12911605834961 + ], + [ + "▁Shimano", + -14.129120826721191 + ], + [ + "▁Steamboat", + -14.129121780395508 + ], + [ + "▁Ellington", + -14.129130363464355 + ], + [ + "▁Skyrim", + -14.129133224487305 + ], + [ + "▁oligo", + -14.129141807556152 + ], + [ + "LAC", + -14.129145622253418 + ], + [ + "▁NSE", + -14.12917423248291 + ], + [ + "▁Methodology", + -14.129178047180176 + ], + [ + "▁GERD", + -14.129179954528809 + ], + [ + "Specifies", + -14.129185676574709 + ], + [ + "▁perpetrated", + -14.129207611083984 + ], + [ + "▁stairwell", + -14.129210472106934 + ], + [ + "Vertical", + -14.129233360290527 + ], + [ + "asana", + -14.129267692565918 + ], + [ + "▁unturned", + -14.129270553588867 + ], + [ + "▁fielded", + -14.129353523254396 + ], + [ + "▁employability", + -14.129356384277344 + ], + [ + "▁BODY", + -14.12936305999756 + ], + [ + "▁Childcare", + -14.129377365112305 + ], + [ + "▁assassinated", + -14.129383087158203 + ], + [ + "▁adjourned", + -14.12938404083252 + ], + [ + "▁eloquently", + -14.129390716552734 + ], + [ + "▁symmetric", + -14.12939739227295 + ], + [ + "CEN", + -14.129429817199709 + ], + [ + "▁Reasonable", + -14.129461288452148 + ], + [ + "Absorb", + -14.12946319580078 + ], + [ + "▁LOST", + -14.129549026489258 + ], + [ + "Sage", + -14.12962532043457 + ], + [ + "CEE", + -14.129650115966797 + ], + [ + "▁IAS", + -14.129683494567873 + ], + [ + "▁manger", + -14.129761695861816 + ], + [ + "#4", + -14.12986660003662 + ], + [ + "▁Kingsley", + -14.129956245422363 + ], + [ + "Coco", + -14.129968643188477 + ], + [ + "▁Raff", + -14.129987716674805 + ], + [ + "▁Conscious", + -14.12998867034912 + ], + [ + "▁Yangon", + -14.130005836486816 + ], + [ + "▁STC", + -14.130043029785156 + ], + [ + "▁fright", + -14.130049705505373 + ], + [ + "▁jay", + -14.130074501037598 + ], + [ + "▁humanoid", + -14.130080223083496 + ], + [ + "▁costco", + -14.130087852478027 + ], + [ + "Vine", + -14.130088806152344 + ], + [ + "ORD", + -14.130099296569824 + ], + [ + "▁FDR", + -14.130105018615724 + ], + [ + "▁Plugins", + -14.13011360168457 + ], + [ + "▁avenge", + -14.130200386047363 + ], + [ + "▁Sensory", + -14.130204200744627 + ], + [ + "▁temptations", + -14.130227088928224 + ], + [ + "Sending", + -14.13028621673584 + ], + [ + "▁spp", + -14.13033390045166 + ], + [ + "▁Domaine", + -14.130334854125977 + ], + [ + "▁clowns", + -14.130358695983888 + ], + [ + "▁Cables", + -14.13037109375 + ], + [ + "▁agonist", + -14.13039493560791 + ], + [ + "▁VL", + -14.130406379699709 + ], + [ + "▁Paragraph", + -14.130407333374023 + ], + [ + "▁Doherty", + -14.130410194396973 + ], + [ + "emission", + -14.130444526672363 + ], + [ + "▁VEN", + -14.130449295043944 + ], + [ + "▁(56", + -14.130464553833008 + ], + [ + "▁RMA", + -14.130473136901855 + ], + [ + "▁homebrew", + -14.130534172058104 + ], + [ + "▁Oneida", + -14.130558967590332 + ], + [ + "Backup", + -14.130562782287598 + ], + [ + "spots", + -14.130586624145508 + ], + [ + "(10)", + -14.13063621520996 + ], + [ + "▁Defenders", + -14.130715370178224 + ], + [ + "▁Scarf", + -14.130752563476562 + ], + [ + "▁Handler", + -14.130772590637209 + ], + [ + "▁browsed", + -14.130775451660156 + ], + [ + "▁Sheds", + -14.1307954788208 + ], + [ + "walled", + -14.130946159362791 + ], + [ + "COS", + -14.130949020385742 + ], + [ + "▁63%", + -14.130988121032717 + ], + [ + "▁9-12", + -14.13101291656494 + ], + [ + "▁Avant", + -14.13101577758789 + ], + [ + "▁converged", + -14.131134033203123 + ], + [ + "▁Mastering", + -14.131226539611816 + ], + [ + "▁discarding", + -14.13124942779541 + ], + [ + "▁NIS", + -14.13129997253418 + ], + [ + "▁phoned", + -14.131516456604004 + ], + [ + "Fil", + -14.131601333618164 + ], + [ + "SAFE", + -14.131601333618164 + ], + [ + "▁shakers", + -14.131608963012695 + ], + [ + "▁peter", + -14.131690979003906 + ], + [ + "EVA", + -14.131694793701172 + ], + [ + "▁Lumen", + -14.131706237792969 + ], + [ + "framed", + -14.131741523742676 + ], + [ + "▁pretzel", + -14.13174533843994 + ], + [ + "▁freezers", + -14.131746292114258 + ], + [ + "▁Gables", + -14.13180923461914 + ], + [ + "avoid", + -14.131821632385254 + ], + [ + "▁genital", + -14.13188648223877 + ], + [ + "▁lockable", + -14.131898880004885 + ], + [ + "▁SER", + -14.131940841674805 + ], + [ + "▁Topper", + -14.131994247436523 + ], + [ + "▁64%", + -14.132024765014648 + ], + [ + "Ry", + -14.132040977478027 + ], + [ + "▁cardholder", + -14.132142066955566 + ], + [ + "▁Indica", + -14.13216495513916 + ], + [ + "????????????????", + -14.13217830657959 + ], + [ + "▁Specify", + -14.132193565368652 + ], + [ + "▁ATO", + -14.132204055786133 + ], + [ + "▁overpower", + -14.132248878479004 + ], + [ + "Adams", + -14.132283210754396 + ], + [ + "▁amuse", + -14.13228702545166 + ], + [ + "resource", + -14.132319450378418 + ], + [ + "▁TBI", + -14.13232421875 + ], + [ + "multidimensional", + -14.132411003112791 + ], + [ + "options", + -14.13246726989746 + ], + [ + "▁Qin", + -14.132539749145508 + ], + [ + "THA", + -14.13259506225586 + ], + [ + "▁unclog", + -14.132597923278809 + ], + [ + "▁Mindy", + -14.13262939453125 + ], + [ + "▁Vino", + -14.132654190063477 + ], + [ + "ternity", + -14.132696151733398 + ], + [ + "▁footwork", + -14.132697105407717 + ], + [ + "rsa", + -14.132729530334473 + ], + [ + "owsky", + -14.132770538330078 + ], + [ + "▁Nuggets", + -14.132826805114746 + ], + [ + "▁skyrocket", + -14.132895469665527 + ], + [ + "▁Windmill", + -14.132980346679688 + ], + [ + "Oliver", + -14.133017539978027 + ], + [ + "Investigators", + -14.133028984069824 + ], + [ + "witness", + -14.133037567138672 + ], + [ + "▁bushings", + -14.133057594299316 + ], + [ + "▁carmaker", + -14.133065223693848 + ], + [ + "▁Mee", + -14.133087158203123 + ], + [ + "▁66%", + -14.13314723968506 + ], + [ + "▁Flavour", + -14.13314723968506 + ], + [ + "▁Dominique", + -14.133234977722168 + ], + [ + "▁Swords", + -14.133245468139648 + ], + [ + "evident", + -14.13326930999756 + ], + [ + "Neo", + -14.133289337158203 + ], + [ + "481", + -14.133333206176758 + ], + [ + "Daddy", + -14.133343696594238 + ], + [ + "▁1750", + -14.133371353149414 + ], + [ + "▁MONTH", + -14.133374214172363 + ], + [ + "▁defences", + -14.13338565826416 + ], + [ + "Recipient", + -14.133493423461914 + ], + [ + "starred", + -14.133537292480469 + ], + [ + "▁Scrolls", + -14.133593559265137 + ], + [ + "▁Tahiti", + -14.13366985321045 + ], + [ + "▁outback", + -14.13371467590332 + ], + [ + "▁ESS", + -14.133721351623535 + ], + [ + "Lux", + -14.133763313293455 + ], + [ + "▁Chromium", + -14.133774757385254 + ], + [ + "▁McPherson", + -14.133774757385254 + ], + [ + "▁compatriot", + -14.133774757385254 + ], + [ + "▁entropy", + -14.133774757385254 + ], + [ + "▁hemorrhage", + -14.133774757385254 + ], + [ + "▁prostitution", + -14.133774757385254 + ], + [ + "▁satisfactorily", + -14.133774757385254 + ], + [ + "▁wobbly", + -14.133774757385254 + ], + [ + "▁spectrometry", + -14.13377571105957 + ], + [ + "▁Psi", + -14.133779525756836 + ], + [ + "▁hon", + -14.133779525756836 + ], + [ + "▁mak", + -14.133779525756836 + ], + [ + "▁Cremation", + -14.133780479431152 + ], + [ + "▁Sudbury", + -14.13378620147705 + ], + [ + "▁ketosis", + -14.13378620147705 + ], + [ + "▁NAFTA", + -14.133792877197266 + ], + [ + "▁Stylist", + -14.133827209472656 + ], + [ + "▁Headphones", + -14.133830070495604 + ], + [ + "chapter", + -14.133837699890137 + ], + [ + "▁indebtedness", + -14.13384246826172 + ], + [ + "▁Towne", + -14.133844375610352 + ], + [ + "▁Stratton", + -14.13388729095459 + ], + [ + "▁tutu", + -14.13389492034912 + ], + [ + "affi", + -14.133898735046388 + ], + [ + "▁Reproductive", + -14.133904457092283 + ], + [ + "▁thorn", + -14.133915901184082 + ], + [ + "▁Planetary", + -14.133935928344728 + ], + [ + "▁fanbase", + -14.133950233459473 + ], + [ + "▁vertebrate", + -14.133990287780762 + ], + [ + "▁385", + -14.134004592895508 + ], + [ + "Painting", + -14.134015083312988 + ], + [ + "▁Sauna", + -14.134021759033203 + ], + [ + "▁IFC", + -14.13407039642334 + ], + [ + "▁ohio", + -14.134071350097656 + ], + [ + "▁Husky", + -14.134089469909668 + ], + [ + "▁Cyclo", + -14.134113311767578 + ], + [ + "▁collated", + -14.134122848510742 + ], + [ + "▁strictest", + -14.134145736694336 + ], + [ + "▁curiously", + -14.13417148590088 + ], + [ + "▁Fett", + -14.134209632873535 + ], + [ + "▁Proposals", + -14.134209632873535 + ], + [ + "▁Flexi", + -14.134220123291016 + ], + [ + "▁Hadley", + -14.134221076965332 + ], + [ + "▁Masque", + -14.134227752685549 + ], + [ + "▁CTE", + -14.134233474731444 + ], + [ + "lution", + -14.134234428405762 + ], + [ + "Num", + -14.13427448272705 + ], + [ + "▁Microphone", + -14.134305000305176 + ], + [ + "▁Weigh", + -14.134355545043944 + ], + [ + "ANY", + -14.134395599365234 + ], + [ + "▁rearview", + -14.13440227508545 + ], + [ + "erna", + -14.134476661682127 + ], + [ + "▁takedown", + -14.134529113769531 + ], + [ + "▁hedges", + -14.134540557861328 + ], + [ + "aad", + -14.134608268737791 + ], + [ + "▁beaming", + -14.134647369384766 + ], + [ + "taken", + -14.13467788696289 + ], + [ + "Antibiotic", + -14.134730339050291 + ], + [ + "▁UCC", + -14.1348237991333 + ], + [ + "Fisher", + -14.13483428955078 + ], + [ + "▁fran", + -14.13485336303711 + ], + [ + "▁CBSE", + -14.134855270385742 + ], + [ + "▁Vivi", + -14.134875297546388 + ], + [ + "▁Oversized", + -14.134886741638184 + ], + [ + "ethylene", + -14.134907722473145 + ], + [ + "HEAR", + -14.134917259216309 + ], + [ + "▁overstated", + -14.134976387023926 + ], + [ + "▁timbers", + -14.134995460510254 + ], + [ + "enbaum", + -14.13499641418457 + ], + [ + "▁Bli", + -14.135004043579102 + ], + [ + "▁quid", + -14.135010719299316 + ], + [ + "▁revolved", + -14.135034561157228 + ], + [ + "▁brownish", + -14.135061264038086 + ], + [ + "▁fowl", + -14.135117530822754 + ], + [ + "▁Awakens", + -14.135120391845703 + ], + [ + "pons", + -14.135171890258787 + ], + [ + "▁BSN", + -14.135211944580078 + ], + [ + "eddy", + -14.135247230529783 + ], + [ + "Matic", + -14.135286331176758 + ], + [ + "▁dodging", + -14.135315895080566 + ], + [ + "▁Planners", + -14.135327339172363 + ], + [ + "bhai", + -14.135342597961426 + ], + [ + "bbling", + -14.135420799255373 + ], + [ + "▁Jordanian", + -14.13542652130127 + ], + [ + "DEL", + -14.135526657104492 + ], + [ + "▁cashing", + -14.135541915893556 + ], + [ + "▁Sneakers", + -14.13556957244873 + ], + [ + "runners", + -14.13557243347168 + ], + [ + "▁softest", + -14.13558292388916 + ], + [ + "▁Strato", + -14.135583877563477 + ], + [ + "▁Yue", + -14.13566780090332 + ], + [ + "Tue", + -14.13571548461914 + ], + [ + "▁Decks", + -14.135786056518556 + ], + [ + "▁scallop", + -14.135955810546877 + ], + [ + "yani", + -14.136077880859377 + ], + [ + "iente", + -14.136123657226562 + ], + [ + "▁Crust", + -14.136133193969728 + ], + [ + "▁collectable", + -14.1361665725708 + ], + [ + "olina", + -14.136185646057127 + ], + [ + "▁mashup", + -14.136212348937988 + ], + [ + "▁FRP", + -14.13622760772705 + ], + [ + "▁blunder", + -14.136375427246094 + ], + [ + "REG", + -14.136388778686523 + ], + [ + "▁antidepressant", + -14.136393547058104 + ], + [ + "oeuvre", + -14.1364107131958 + ], + [ + "tien", + -14.136439323425291 + ], + [ + "-1970", + -14.136476516723633 + ], + [ + "▁Mog", + -14.136483192443848 + ], + [ + "lch", + -14.13649845123291 + ], + [ + "▁Amana", + -14.136552810668944 + ], + [ + "▁Caesars", + -14.136558532714844 + ], + [ + "▁defying", + -14.13657283782959 + ], + [ + "Resources", + -14.136614799499512 + ], + [ + "085", + -14.1367826461792 + ], + [ + "aic", + -14.13685703277588 + ], + [ + "▁BMP", + -14.136858940124512 + ], + [ + "Winners", + -14.136916160583496 + ], + [ + "▁Awa", + -14.136982917785645 + ], + [ + "TORS", + -14.13698387145996 + ], + [ + "▁Tees", + -14.137005805969238 + ], + [ + "Awards", + -14.13703727722168 + ], + [ + "▁greenish", + -14.137055397033691 + ], + [ + "▁Heng", + -14.137178421020508 + ], + [ + "▁Reflection", + -14.137240409851074 + ], + [ + "▁Gospels", + -14.137248039245604 + ], + [ + "▁Voter", + -14.137354850769045 + ], + [ + "REX", + -14.137385368347168 + ], + [ + "▁Yoko", + -14.137405395507812 + ], + [ + "dien", + -14.13742733001709 + ], + [ + "▁Intl", + -14.137432098388672 + ], + [ + "▁dropper", + -14.137456893920898 + ], + [ + "▁Unusual", + -14.137493133544922 + ], + [ + "Banks", + -14.137499809265137 + ], + [ + "tok", + -14.13750457763672 + ], + [ + "cluster", + -14.137506484985352 + ], + [ + "▁momma", + -14.13751220703125 + ], + [ + "▁tributes", + -14.137518882751465 + ], + [ + "▁wayfair", + -14.137540817260742 + ], + [ + "▁4-7", + -14.137548446655272 + ], + [ + "QF", + -14.13755226135254 + ], + [ + "CHS", + -14.137587547302246 + ], + [ + "▁mutants", + -14.1376314163208 + ], + [ + "▁mos", + -14.13778305053711 + ], + [ + "▁Establishing", + -14.137799263000488 + ], + [ + "Might", + -14.137810707092283 + ], + [ + "Surprisingly", + -14.137823104858398 + ], + [ + "cli", + -14.137823104858398 + ], + [ + "signature", + -14.137843132019045 + ], + [ + "pleasing", + -14.137849807739258 + ], + [ + "insulated", + -14.137885093688965 + ], + [ + "▁SHO", + -14.137898445129396 + ], + [ + "instant", + -14.137911796569824 + ], + [ + "doodle", + -14.13792324066162 + ], + [ + "boyfriend", + -14.137967109680176 + ], + [ + "hrer", + -14.138050079345703 + ], + [ + "▁$52", + -14.138050079345703 + ], + [ + "▁Strictly", + -14.138055801391602 + ], + [ + "torrent", + -14.138063430786133 + ], + [ + "▁Survive", + -14.138067245483398 + ], + [ + "▁Ela", + -14.13807773590088 + ], + [ + "▁CDP", + -14.138094902038574 + ], + [ + "roasted", + -14.138096809387209 + ], + [ + "▁Crews", + -14.138097763061523 + ], + [ + "881", + -14.138129234313965 + ], + [ + "teller", + -14.13818645477295 + ], + [ + "▁1837", + -14.138202667236328 + ], + [ + "bong", + -14.138243675231934 + ], + [ + "cane", + -14.138262748718262 + ], + [ + "Submission", + -14.13828182220459 + ], + [ + "crushing", + -14.138324737548828 + ], + [ + "▁showered", + -14.138349533081056 + ], + [ + "lbert", + -14.138372421264648 + ], + [ + "▁Matthias", + -14.138421058654783 + ], + [ + "Advise", + -14.138429641723633 + ], + [ + "▁chimpanzee", + -14.1384916305542 + ], + [ + "▁diminutive", + -14.1384916305542 + ], + [ + "▁Clooney", + -14.138493537902832 + ], + [ + "▁Schumacher", + -14.138493537902832 + ], + [ + "▁baritone", + -14.138493537902832 + ], + [ + "▁Hanukkah", + -14.138495445251465 + ], + [ + "▁Reykjavik", + -14.13849639892578 + ], + [ + "▁rosacea", + -14.13849639892578 + ], + [ + "▁Stupid", + -14.13849925994873 + ], + [ + "▁circadian", + -14.13849925994873 + ], + [ + "▁lampshade", + -14.138519287109377 + ], + [ + "▁dubai", + -14.138541221618652 + ], + [ + "▁annexation", + -14.13854694366455 + ], + [ + "▁Deutschland", + -14.13855266571045 + ], + [ + "▁undiscovered", + -14.138556480407717 + ], + [ + "▁Armchair", + -14.138587951660156 + ], + [ + "Dee", + -14.138598442077637 + ], + [ + "CBA", + -14.138605117797852 + ], + [ + "▁unregistered", + -14.138605117797852 + ], + [ + "honored", + -14.13861846923828 + ], + [ + "Tur", + -14.138636589050291 + ], + [ + "▁perspiration", + -14.138638496398926 + ], + [ + "▁Bonita", + -14.138665199279783 + ], + [ + "▁rinsed", + -14.138671875 + ], + [ + "▁hideaway", + -14.13867473602295 + ], + [ + "▁Jeffery", + -14.138681411743164 + ], + [ + "▁Nem", + -14.138683319091797 + ], + [ + "▁carnage", + -14.138686180114746 + ], + [ + "▁totalitarian", + -14.138689041137695 + ], + [ + "936", + -14.138721466064451 + ], + [ + "▁corset", + -14.138733863830566 + ], + [ + "megawatt", + -14.13874626159668 + ], + [ + "▁Canucks", + -14.13880729675293 + ], + [ + "Carry", + -14.138813972473145 + ], + [ + "Yea", + -14.13890266418457 + ], + [ + "HV", + -14.138904571533203 + ], + [ + "Mall", + -14.13896656036377 + ], + [ + "▁Ritter", + -14.138972282409668 + ], + [ + "▁Gentlemen", + -14.138976097106934 + ], + [ + "Shu", + -14.139004707336426 + ], + [ + "▁Asbury", + -14.139080047607422 + ], + [ + "uang", + -14.139081001281738 + ], + [ + "▁Maxine", + -14.139130592346191 + ], + [ + "▁frowned", + -14.139195442199709 + ], + [ + "Mai", + -14.139204978942873 + ], + [ + "▁Upton", + -14.139230728149414 + ], + [ + "cruise", + -14.139274597167969 + ], + [ + "044", + -14.1392822265625 + ], + [ + "▁Holi", + -14.1392822265625 + ], + [ + "▁truthfully", + -14.139307975769045 + ], + [ + "▁april", + -14.13930892944336 + ], + [ + "Maps", + -14.139326095581056 + ], + [ + "▁Osama", + -14.139366149902344 + ], + [ + "366", + -14.13946533203125 + ], + [ + "BIO", + -14.139484405517578 + ], + [ + "▁FIN", + -14.139497756958008 + ], + [ + "▁astray", + -14.13955783843994 + ], + [ + "ITIES", + -14.139680862426758 + ], + [ + "UW", + -14.139692306518556 + ], + [ + "▁Willard", + -14.13973903656006 + ], + [ + "▁Collected", + -14.139764785766602 + ], + [ + "▁enveloped", + -14.13983917236328 + ], + [ + "▁peering", + -14.139866828918455 + ], + [ + "deg", + -14.139947891235352 + ], + [ + "▁wielding", + -14.14000129699707 + ], + [ + "▁Tourists", + -14.140047073364258 + ], + [ + "hex", + -14.140052795410156 + ], + [ + "▁Dul", + -14.140081405639648 + ], + [ + "mora", + -14.140100479125977 + ], + [ + "Elite", + -14.14011573791504 + ], + [ + "▁Briefing", + -14.140167236328123 + ], + [ + "alah", + -14.140204429626465 + ], + [ + "▁tau", + -14.140212059020996 + ], + [ + "coli", + -14.14022445678711 + ], + [ + "▁Remover", + -14.140239715576172 + ], + [ + "mik", + -14.14027214050293 + ], + [ + "644", + -14.140297889709473 + ], + [ + "▁waxes", + -14.14030647277832 + ], + [ + "962", + -14.140315055847168 + ], + [ + "▁Rabi", + -14.14033031463623 + ], + [ + "▁Cao", + -14.140337944030762 + ], + [ + "▁Slade", + -14.14035415649414 + ], + [ + "Sent", + -14.140400886535645 + ], + [ + "▁detain", + -14.140413284301758 + ], + [ + "▁Exercises", + -14.140425682067873 + ], + [ + "enzi", + -14.140427589416504 + ], + [ + "▁Avec", + -14.140478134155272 + ], + [ + "▁crevices", + -14.14060878753662 + ], + [ + "sdorf", + -14.14063835144043 + ], + [ + "▁Cougar", + -14.140667915344238 + ], + [ + "37)", + -14.140731811523438 + ], + [ + "▁Rockland", + -14.140742301940918 + ], + [ + "▁Hardin", + -14.140811920166016 + ], + [ + "8;", + -14.140836715698242 + ], + [ + "Variation", + -14.140841484069824 + ], + [ + "egi", + -14.140850067138672 + ], + [ + "▁piped", + -14.140878677368164 + ], + [ + "▁pesos", + -14.140891075134276 + ], + [ + "▁LSD", + -14.140926361083984 + ], + [ + "▁skype", + -14.14097023010254 + ], + [ + "MING", + -14.140997886657717 + ], + [ + "▁2015:", + -14.141002655029297 + ], + [ + "▁Dose", + -14.141040802001951 + ], + [ + "▁outgrown", + -14.141075134277344 + ], + [ + "MJ", + -14.141106605529783 + ], + [ + "▁vignette", + -14.141133308410645 + ], + [ + "▁nugget", + -14.141140937805176 + ], + [ + "▁Kapil", + -14.14114475250244 + ], + [ + "▁fantas", + -14.14116096496582 + ], + [ + "▁Newsweek", + -14.141172409057615 + ], + [ + "▁Misc", + -14.141183853149414 + ], + [ + "▁communicators", + -14.141201972961426 + ], + [ + "BMC", + -14.141204833984377 + ], + [ + "rava", + -14.141222953796388 + ], + [ + "MENTS", + -14.141226768493652 + ], + [ + "▁Versatile", + -14.141243934631348 + ], + [ + "▁soulmate", + -14.14130973815918 + ], + [ + "▁Bomber", + -14.141331672668455 + ], + [ + "Nova", + -14.141386985778809 + ], + [ + "opy", + -14.141407012939451 + ], + [ + "ELD", + -14.141427040100098 + ], + [ + "▁Levant", + -14.141457557678224 + ], + [ + "▁Muh", + -14.141510009765623 + ], + [ + "Cert", + -14.141538619995115 + ], + [ + "▁hippie", + -14.141566276550291 + ], + [ + "194", + -14.141576766967772 + ], + [ + "nchen", + -14.141576766967772 + ], + [ + "▁Rhys", + -14.141581535339355 + ], + [ + "▁Ashok", + -14.141643524169922 + ], + [ + "cé", + -14.141711235046388 + ], + [ + "▁3.00", + -14.14177703857422 + ], + [ + "Meal", + -14.141783714294434 + ], + [ + "▁Liang", + -14.141793251037598 + ], + [ + "1:45", + -14.141804695129396 + ], + [ + "cells", + -14.141885757446287 + ], + [ + "▁Begins", + -14.141908645629885 + ], + [ + "▁FAIR", + -14.141937255859377 + ], + [ + "▁2′′", + -14.141968727111816 + ], + [ + "▁Olu", + -14.14198398590088 + ], + [ + "▁UNO", + -14.14200496673584 + ], + [ + "ANO", + -14.14208698272705 + ], + [ + "ollen", + -14.142155647277832 + ], + [ + "▁XPS", + -14.14217758178711 + ], + [ + "▁Vidya", + -14.142215728759766 + ], + [ + "▁frighten", + -14.142297744750977 + ], + [ + "Massage", + -14.142314910888672 + ], + [ + "▁Statutory", + -14.142330169677734 + ], + [ + "EXP", + -14.14245319366455 + ], + [ + "▁[1", + -14.142491340637209 + ], + [ + "▁stu", + -14.142577171325684 + ], + [ + "▁Clari", + -14.142580032348633 + ], + [ + "chak", + -14.142600059509276 + ], + [ + "▁Gardeners", + -14.142621040344238 + ], + [ + "▁Peek", + -14.142667770385742 + ], + [ + "grandfather", + -14.14267635345459 + ], + [ + "Strategic", + -14.14269733428955 + ], + [ + "reducing", + -14.142704963684082 + ], + [ + "Advice", + -14.142707824707031 + ], + [ + "ceramic", + -14.142707824707031 + ], + [ + "Tall", + -14.142708778381348 + ], + [ + "Skills", + -14.14272117614746 + ], + [ + "simply", + -14.142727851867676 + ], + [ + "▁mics", + -14.142727851867676 + ], + [ + "Transportation", + -14.14273452758789 + ], + [ + "▁Ambient", + -14.142744064331056 + ], + [ + "diamond", + -14.142752647399902 + ], + [ + "▁CEU", + -14.1427583694458 + ], + [ + "▁fixer", + -14.142765998840332 + ], + [ + "▁Primarily", + -14.142791748046877 + ], + [ + "▁conical", + -14.142827033996582 + ], + [ + "▁servi", + -14.142884254455566 + ], + [ + "gravity", + -14.1428861618042 + ], + [ + "▁Monta", + -14.142919540405272 + ], + [ + "RQ", + -14.142937660217283 + ], + [ + "▁Faso", + -14.142950057983398 + ], + [ + "▁bandages", + -14.142998695373535 + ], + [ + "1:18", + -14.143035888671877 + ], + [ + "▁Catcher", + -14.143051147460938 + ], + [ + "Oops", + -14.143106460571287 + ], + [ + "▁SMU", + -14.14311981201172 + ], + [ + "SBA", + -14.14313507080078 + ], + [ + "floating", + -14.143214225769045 + ], + [ + "sseldorf", + -14.143224716186523 + ], + [ + "▁Alicante", + -14.143231391906738 + ], + [ + "▁DELIVERY", + -14.143231391906738 + ], + [ + "▁Excelsior", + -14.143231391906738 + ], + [ + "▁Genevieve", + -14.143231391906738 + ], + [ + "▁Hampstead", + -14.143231391906738 + ], + [ + "▁Wrigley", + -14.143231391906738 + ], + [ + "▁defamatory", + -14.143231391906738 + ], + [ + "▁dilapidated", + -14.143231391906738 + ], + [ + "▁fifties", + -14.143231391906738 + ], + [ + "▁obscurity", + -14.143231391906738 + ], + [ + "▁persuading", + -14.143231391906738 + ], + [ + "▁reciprocate", + -14.143231391906738 + ], + [ + "▁umbilical", + -14.143231391906738 + ], + [ + "▁unenforceable", + -14.143231391906738 + ], + [ + "▁Transylvania", + -14.143232345581056 + ], + [ + "▁compulsion", + -14.14323616027832 + ], + [ + "▁Guzman", + -14.143237113952637 + ], + [ + "▁Hanuman", + -14.14323902130127 + ], + [ + "▁fervor", + -14.143239974975586 + ], + [ + "▁sprang", + -14.14324188232422 + ], + [ + "▁expiring", + -14.14324951171875 + ], + [ + "▁Schubert", + -14.143256187438965 + ], + [ + "▁Winthrop", + -14.143260955810549 + ], + [ + "▁corona", + -14.143270492553713 + ], + [ + "▁briefings", + -14.14327335357666 + ], + [ + "▁Bharti", + -14.14327907562256 + ], + [ + "▁Deskjet", + -14.143280029296877 + ], + [ + "▁Vicente", + -14.143282890319824 + ], + [ + "▁APPLY", + -14.143285751342772 + ], + [ + "▁Srinagar", + -14.14328670501709 + ], + [ + "▁expound", + -14.143288612365724 + ], + [ + "▁crutches", + -14.143296241760254 + ], + [ + "▁AEC", + -14.143318176269531 + ], + [ + "atable", + -14.143353462219238 + ], + [ + "▁Elbow", + -14.143355369567873 + ], + [ + "▁10.4", + -14.143359184265137 + ], + [ + "▁heatsink", + -14.143393516540527 + ], + [ + "▁IMAX", + -14.143404960632324 + ], + [ + "▁unsupported", + -14.143421173095703 + ], + [ + "▁enslaved", + -14.1434326171875 + ], + [ + "▁underwhelming", + -14.143441200256348 + ], + [ + "▁alienation", + -14.143460273742676 + ], + [ + "▁246", + -14.143479347229004 + ], + [ + "▁Belarusian", + -14.143484115600586 + ], + [ + "▁Pantone", + -14.14348602294922 + ], + [ + "hb", + -14.14352035522461 + ], + [ + "▁1040", + -14.143527030944824 + ], + [ + "▁Rainier", + -14.143531799316406 + ], + [ + "▁WMV", + -14.143543243408203 + ], + [ + "▁Olympiad", + -14.143560409545898 + ], + [ + "▁Effectiveness", + -14.143566131591797 + ], + [ + "Seeking", + -14.14357852935791 + ], + [ + "▁endoscopy", + -14.143595695495604 + ], + [ + "▁Marr", + -14.14369773864746 + ], + [ + "▁HGH", + -14.143706321716309 + ], + [ + "tsi", + -14.143726348876951 + ], + [ + "▁Newborn", + -14.143749237060549 + ], + [ + "▁ClickFunnels", + -14.14375114440918 + ], + [ + "cv", + -14.14376735687256 + ], + [ + "▁SIDE", + -14.143778800964355 + ], + [ + "terrorist", + -14.143781661987305 + ], + [ + "▁775", + -14.143786430358888 + ], + [ + "▁accelerators", + -14.143802642822266 + ], + [ + "▁disgusted", + -14.14381980895996 + ], + [ + "▁PUT", + -14.143840789794922 + ], + [ + "▁Blocked", + -14.143872261047363 + ], + [ + "▁recitals", + -14.143921852111816 + ], + [ + "Driven", + -14.14392375946045 + ], + [ + "operating", + -14.143935203552246 + ], + [ + "▁Aggies", + -14.1439847946167 + ], + [ + "zani", + -14.143985748291016 + ], + [ + "apt", + -14.14401912689209 + ], + [ + "▁Bernhard", + -14.144033432006836 + ], + [ + "▁Supernatural", + -14.144076347351074 + ], + [ + "▁bloodline", + -14.144081115722656 + ], + [ + "Unlock", + -14.144082069396973 + ], + [ + "▁Vickers", + -14.144119262695312 + ], + [ + "▁Fisherman", + -14.144150733947754 + ], + [ + "▁CDS", + -14.144152641296388 + ], + [ + "hyper", + -14.144158363342283 + ], + [ + "▁Templeton", + -14.144209861755373 + ], + [ + "▁healers", + -14.14423942565918 + ], + [ + "▁Westin", + -14.144246101379396 + ], + [ + "▁WASH", + -14.144378662109377 + ], + [ + "0.7%", + -14.14441204071045 + ], + [ + "epa", + -14.14441204071045 + ], + [ + "MARC", + -14.144417762756348 + ], + [ + "▁Unter", + -14.144475936889648 + ], + [ + "▁Ditch", + -14.144495964050291 + ], + [ + "oretic", + -14.144512176513672 + ], + [ + "▁AWARD", + -14.144512176513672 + ], + [ + "emme", + -14.144522666931152 + ], + [ + "▁Sita", + -14.144532203674316 + ], + [ + "▁Datasheet", + -14.144538879394531 + ], + [ + "▁deceit", + -14.144587516784668 + ], + [ + "▁encompassed", + -14.144625663757324 + ], + [ + "▁freaked", + -14.144685745239258 + ], + [ + "▁Burl", + -14.14468765258789 + ], + [ + "▁8.00", + -14.144689559936523 + ], + [ + "▁cruised", + -14.14473819732666 + ], + [ + "grab", + -14.144747734069824 + ], + [ + "▁compacted", + -14.14475154876709 + ], + [ + "▁Shots", + -14.14476490020752 + ], + [ + "ppen", + -14.144815444946287 + ], + [ + "▁Cheek", + -14.144901275634766 + ], + [ + "ckley", + -14.144947052001951 + ], + [ + "▁CIF", + -14.144972801208496 + ], + [ + "▁Flexibility", + -14.14504337310791 + ], + [ + "▁Replay", + -14.145075798034668 + ], + [ + "▁Starch", + -14.145097732543944 + ], + [ + "▁Ergo", + -14.145182609558104 + ], + [ + "dynamics", + -14.145185470581056 + ], + [ + "▁benzo", + -14.145197868347168 + ], + [ + "ggio", + -14.14522933959961 + ], + [ + "Alaska", + -14.14527416229248 + ], + [ + "▁Christoph", + -14.145319938659668 + ], + [ + "▁Bias", + -14.14534854888916 + ], + [ + "▁Lombardi", + -14.14534854888916 + ], + [ + "▁herding", + -14.145391464233398 + ], + [ + "▁mari", + -14.145462036132812 + ], + [ + "▁ENTER", + -14.145477294921877 + ], + [ + "llywood", + -14.145493507385254 + ], + [ + "reck", + -14.145503044128418 + ], + [ + "mama", + -14.145573616027832 + ], + [ + "tao", + -14.145605087280272 + ], + [ + "▁abbreviations", + -14.14561939239502 + ], + [ + "hlen", + -14.145682334899902 + ], + [ + "779", + -14.14568328857422 + ], + [ + "_____", + -14.145685195922852 + ], + [ + "heeled", + -14.145697593688965 + ], + [ + "▁Algo", + -14.145712852478027 + ], + [ + "▁Cura", + -14.14586353302002 + ], + [ + "▁MCS", + -14.14589023590088 + ], + [ + "▁oncologist", + -14.145931243896484 + ], + [ + "▁Enclosure", + -14.145943641662598 + ], + [ + "▁Cupcake", + -14.146020889282228 + ], + [ + "▁SEN", + -14.146023750305176 + ], + [ + "abh", + -14.146026611328123 + ], + [ + "Tang", + -14.146114349365234 + ], + [ + "chest", + -14.146151542663574 + ], + [ + "▁Kath", + -14.146224975585938 + ], + [ + "▁platelets", + -14.14626407623291 + ], + [ + "▁Crucial", + -14.146373748779297 + ], + [ + "Erin", + -14.14638900756836 + ], + [ + "▁93%", + -14.146453857421877 + ], + [ + "▁2017;", + -14.146466255187988 + ], + [ + "▁Soli", + -14.146498680114746 + ], + [ + "698", + -14.14655303955078 + ], + [ + "▁selectable", + -14.146596908569336 + ], + [ + "▁04/24/2019", + -14.146612167358398 + ], + [ + "▁61%", + -14.146682739257812 + ], + [ + "▁GK", + -14.146772384643556 + ], + [ + "CNC", + -14.146878242492676 + ], + [ + "nelli", + -14.146942138671877 + ], + [ + "estimate", + -14.146964073181152 + ], + [ + "▁harshly", + -14.147028923034668 + ], + [ + "▁drifts", + -14.147114753723145 + ], + [ + "Sets", + -14.147116661071776 + ], + [ + "989", + -14.147215843200684 + ], + [ + "▁Iraqis", + -14.147318840026855 + ], + [ + "atto", + -14.147381782531738 + ], + [ + "▁OZ", + -14.147401809692385 + ], + [ + "▁£35", + -14.147454261779783 + ], + [ + "▁Excited", + -14.147563934326172 + ], + [ + "chau", + -14.147574424743652 + ], + [ + "Famous", + -14.147578239440918 + ], + [ + "Oregon", + -14.147583961486816 + ], + [ + "ordinary", + -14.147626876831056 + ], + [ + "economy", + -14.147665977478027 + ], + [ + "5,000.", + -14.147765159606934 + ], + [ + "Silicon", + -14.147788047790527 + ], + [ + "chol", + -14.147889137268066 + ], + [ + "▁BPI", + -14.147974967956545 + ], + [ + "▁Canadiens", + -14.147993087768556 + ], + [ + "▁Guwahati", + -14.147993087768556 + ], + [ + "▁Hepburn", + -14.147993087768556 + ], + [ + "▁Sampson", + -14.147993087768556 + ], + [ + "▁baccarat", + -14.147993087768556 + ], + [ + "▁BEDROOM", + -14.147994041442873 + ], + [ + "▁MMORPG", + -14.147994041442873 + ], + [ + "▁gallbladder", + -14.147994041442873 + ], + [ + "▁hijab", + -14.147994041442873 + ], + [ + "▁antelope", + -14.147995948791504 + ], + [ + "▁perforation", + -14.14799690246582 + ], + [ + "Driscoll", + -14.147998809814451 + ], + [ + "▁generico", + -14.148000717163086 + ], + [ + "▁terrier", + -14.148001670837402 + ], + [ + "▁infarction", + -14.148003578186035 + ], + [ + "▁Taxis", + -14.148009300231934 + ], + [ + "▁cryptography", + -14.148011207580566 + ], + [ + "▁HubSpot", + -14.148015022277832 + ], + [ + "▁Insulated", + -14.148022651672363 + ], + [ + "▁serrated", + -14.148022651672363 + ], + [ + "▁DEA", + -14.148024559020996 + ], + [ + "WORTH", + -14.14803695678711 + ], + [ + "36)", + -14.14803981781006 + ], + [ + "▁Darryl", + -14.148041725158691 + ], + [ + "▁cheerleading", + -14.14804458618164 + ], + [ + "▁seg", + -14.148056030273438 + ], + [ + "▁Traction", + -14.148064613342283 + ], + [ + "▁Crusaders", + -14.148077011108398 + ], + [ + "metro", + -14.148082733154297 + ], + [ + "▁Coronado", + -14.148115158081056 + ], + [ + "▁degrading", + -14.148126602172852 + ], + [ + "▁arbitrage", + -14.148130416870115 + ], + [ + "define", + -14.148140907287598 + ], + [ + "▁coincidentally", + -14.148141860961914 + ], + [ + "▁EY", + -14.148157119750977 + ], + [ + "isc", + -14.148178100585938 + ], + [ + "▁SPAM", + -14.148215293884276 + ], + [ + "▁musicianship", + -14.14822006225586 + ], + [ + "▁supercomputer", + -14.148221015930176 + ], + [ + "▁Lambda", + -14.148221969604492 + ], + [ + "▁42\"", + -14.148235321044922 + ], + [ + "▁SEND", + -14.148235321044922 + ], + [ + "▁Muhammadu", + -14.148306846618652 + ], + [ + "▁Jillian", + -14.14833164215088 + ], + [ + "▁Prototype", + -14.148345947265623 + ], + [ + "inu", + -14.148444175720217 + ], + [ + "▁Celine", + -14.148487091064451 + ], + [ + "▁Darby", + -14.1484956741333 + ], + [ + "▁Shree", + -14.14853286743164 + ], + [ + "▁Callaway", + -14.148545265197754 + ], + [ + "▁nonexistent", + -14.14858055114746 + ], + [ + "943", + -14.148597717285156 + ], + [ + "▁frayed", + -14.148608207702637 + ], + [ + "▁Motorcycles", + -14.148609161376951 + ], + [ + "▁casks", + -14.14864444732666 + ], + [ + "▁fearsome", + -14.14871883392334 + ], + [ + "▁cleats", + -14.148725509643556 + ], + [ + "▁coworking", + -14.148829460144045 + ], + [ + "▁CEC", + -14.148850440979004 + ], + [ + "▁Edited", + -14.148877143859863 + ], + [ + "Pets", + -14.148906707763672 + ], + [ + "vind", + -14.148950576782228 + ], + [ + "▁CLS", + -14.148961067199709 + ], + [ + "Nest", + -14.1489896774292 + ], + [ + "cough", + -14.149054527282717 + ], + [ + "▁Luxor", + -14.149106979370115 + ], + [ + "▁1,2", + -14.149109840393066 + ], + [ + "▁Leno", + -14.149109840393066 + ], + [ + "uet", + -14.149144172668455 + ], + [ + "owska", + -14.149168968200684 + ], + [ + "▁Cera", + -14.149222373962402 + ], + [ + "▁Elderly", + -14.149230003356934 + ], + [ + "avar", + -14.149311065673828 + ], + [ + "died", + -14.14932346343994 + ], + [ + "fog", + -14.149331092834473 + ], + [ + "ordinating", + -14.149341583251951 + ], + [ + "▁initialize", + -14.149348258972168 + ], + [ + "LIM", + -14.14938259124756 + ], + [ + "▁nam", + -14.149418830871582 + ], + [ + "VG", + -14.149458885192873 + ], + [ + "▁Impress", + -14.149489402770996 + ], + [ + "▁Caul", + -14.149503707885742 + ], + [ + "MTC", + -14.149577140808104 + ], + [ + "▁monograph", + -14.149581909179688 + ], + [ + "▁DTS", + -14.149590492248535 + ], + [ + "authors", + -14.14965534210205 + ], + [ + "▁Vern", + -14.14971160888672 + ], + [ + "▁powdery", + -14.149751663208008 + ], + [ + "IGN", + -14.14975357055664 + ], + [ + "▁Cortez", + -14.149845123291016 + ], + [ + "▁underlie", + -14.149849891662598 + ], + [ + "▁HAL", + -14.149932861328123 + ], + [ + "▁captioning", + -14.14996337890625 + ], + [ + "▁Khi", + -14.149971961975098 + ], + [ + "▁Libre", + -14.149979591369627 + ], + [ + "▁DAW", + -14.150043487548828 + ], + [ + "▁Shutter", + -14.1500825881958 + ], + [ + "▁Beit", + -14.15010929107666 + ], + [ + "prim", + -14.150156021118164 + ], + [ + "▁culminate", + -14.150161743164062 + ], + [ + "▁Skateboard", + -14.15025234222412 + ], + [ + "holt", + -14.150327682495115 + ], + [ + "▁gimmicks", + -14.150367736816406 + ], + [ + "▁Ecommerce", + -14.150407791137695 + ], + [ + "onen", + -14.150432586669922 + ], + [ + "▁2009;", + -14.150460243225098 + ], + [ + "▁Vigo", + -14.150463104248049 + ], + [ + "▁Khalil", + -14.150486946105955 + ], + [ + "▁Hoy", + -14.150514602661133 + ], + [ + "▁somatic", + -14.15054416656494 + ], + [ + "▁REAR", + -14.150562286376951 + ], + [ + "▁Rami", + -14.150594711303713 + ], + [ + "▁Candida", + -14.150616645812988 + ], + [ + "setup", + -14.150628089904783 + ], + [ + "▁pomp", + -14.150651931762695 + ], + [ + "atura", + -14.150663375854492 + ], + [ + "▁0.1%", + -14.15066909790039 + ], + [ + "SHE", + -14.150736808776855 + ], + [ + "▁botanicals", + -14.150785446166992 + ], + [ + "▁puffs", + -14.15079116821289 + ], + [ + "▁Jameson", + -14.150832176208496 + ], + [ + "▁$$", + -14.15084457397461 + ], + [ + "nomy", + -14.15087890625 + ], + [ + "▁Danes", + -14.150897026062012 + ], + [ + "–10", + -14.150923728942873 + ], + [ + "▁Tou", + -14.150973320007324 + ], + [ + "silicate", + -14.151114463806152 + ], + [ + "Tee", + -14.151159286499023 + ], + [ + "▁Aida", + -14.15119743347168 + ], + [ + "▁togetherness", + -14.15122127532959 + ], + [ + "▁Classifieds", + -14.151227951049805 + ], + [ + "▁longed", + -14.15122890472412 + ], + [ + "65)", + -14.151256561279297 + ], + [ + "▁Musician", + -14.151273727416992 + ], + [ + "▁unter", + -14.15130615234375 + ], + [ + "iating", + -14.151348114013672 + ], + [ + "Shoe", + -14.151403427124023 + ], + [ + "vate", + -14.15145492553711 + ], + [ + "▁WeChat", + -14.151491165161133 + ], + [ + "▁passers", + -14.151564598083496 + ], + [ + "▁Boomer", + -14.15164566040039 + ], + [ + "Teams", + -14.151659965515137 + ], + [ + "▁Noor", + -14.151687622070312 + ], + [ + "Hunt", + -14.151817321777344 + ], + [ + "gado", + -14.151817321777344 + ], + [ + "▁hallucinations", + -14.151832580566406 + ], + [ + "▁intakes", + -14.15183925628662 + ], + [ + "▁pushy", + -14.151866912841797 + ], + [ + "▁artichokes", + -14.151957511901855 + ], + [ + "▁shadowy", + -14.152024269104004 + ], + [ + "▁reinvest", + -14.152044296264648 + ], + [ + "▁Hamptons", + -14.152069091796877 + ], + [ + "Gran", + -14.152080535888672 + ], + [ + "▁Continuity", + -14.152107238769531 + ], + [ + "▁touristic", + -14.152146339416504 + ], + [ + "▁Louisa", + -14.152179718017578 + ], + [ + "Practical", + -14.152227401733398 + ], + [ + "▁Periodic", + -14.15225315093994 + ], + [ + "▁Projector", + -14.152297973632812 + ], + [ + "kro", + -14.152328491210938 + ], + [ + "▁Herzegovina", + -14.15234661102295 + ], + [ + "▁Schla", + -14.152438163757324 + ], + [ + "Preparing", + -14.15245246887207 + ], + [ + "Genuine", + -14.152470588684082 + ], + [ + "Python", + -14.15248203277588 + ], + [ + "URING", + -14.152499198913574 + ], + [ + "▁Crusade", + -14.152543067932127 + ], + [ + "Senate", + -14.152552604675291 + ], + [ + "feedback", + -14.152555465698242 + ], + [ + "Labour", + -14.15258502960205 + ], + [ + "▁4.30", + -14.152658462524414 + ], + [ + "mea", + -14.152679443359377 + ], + [ + "▁fluke", + -14.152698516845703 + ], + [ + "Brit", + -14.15270709991455 + ], + [ + "colon", + -14.152711868286133 + ], + [ + "▁algebraic", + -14.15273094177246 + ], + [ + "▁Breaker", + -14.152745246887209 + ], + [ + "▁synergistic", + -14.152753829956056 + ], + [ + "▁Brandi", + -14.15276336669922 + ], + [ + "▁Atletico", + -14.152777671813965 + ], + [ + "▁Benghazi", + -14.152777671813965 + ], + [ + "▁Gerrard", + -14.152777671813965 + ], + [ + "▁Obispo", + -14.152777671813965 + ], + [ + "▁overcrowding", + -14.152777671813965 + ], + [ + "▁Felicity", + -14.15277862548828 + ], + [ + "▁Nab", + -14.15278148651123 + ], + [ + "▁Wexford", + -14.15278148651123 + ], + [ + "▁curvy", + -14.152783393859863 + ], + [ + "▁cortical", + -14.15278434753418 + ], + [ + "▁DDoS", + -14.152785301208496 + ], + [ + "▁distilleries", + -14.152790069580078 + ], + [ + "▁ravine", + -14.152791976928713 + ], + [ + "▁Sykes", + -14.152792930603027 + ], + [ + "▁snagged", + -14.152792930603027 + ], + [ + "▁thoroughfare", + -14.152807235717772 + ], + [ + "▁Bathurst", + -14.152812957763672 + ], + [ + "▁Impala", + -14.15281581878662 + ], + [ + "▁Mallory", + -14.152833938598633 + ], + [ + "▁Breakthrough", + -14.15284252166748 + ], + [ + "▁binaries", + -14.152859687805176 + ], + [ + "▁interfacing", + -14.152884483337402 + ], + [ + "▁Citroen", + -14.152891159057615 + ], + [ + "▁multivitamin", + -14.1528959274292 + ], + [ + "▁ASM", + -14.152897834777832 + ], + [ + "JK", + -14.152939796447754 + ], + [ + "▁Plainfield", + -14.152948379516602 + ], + [ + "Deals", + -14.15295124053955 + ], + [ + "082", + -14.152998924255373 + ], + [ + "1.6%", + -14.153033256530762 + ], + [ + "PJ", + -14.153040885925291 + ], + [ + "▁Locked", + -14.153042793273926 + ], + [ + "▁lakefront", + -14.153048515319824 + ], + [ + "digest", + -14.153074264526367 + ], + [ + "▁westbound", + -14.153087615966797 + ], + [ + "▁Airborne", + -14.153088569641112 + ], + [ + "▁Dealership", + -14.153096199035645 + ], + [ + "▁paddlers", + -14.15310001373291 + ], + [ + "▁paddy", + -14.153100967407228 + ], + [ + "▁Gonna", + -14.153132438659668 + ], + [ + "Rocket", + -14.153181076049805 + ], + [ + "▁Workspace", + -14.15318489074707 + ], + [ + "SID", + -14.153203964233398 + ], + [ + "▁Fastest", + -14.153221130371094 + ], + [ + "▁Mayhem", + -14.153240203857422 + ], + [ + "▁trickier", + -14.153277397155762 + ], + [ + "▁periodical", + -14.153291702270508 + ], + [ + "Producer", + -14.153301239013672 + ], + [ + "boosting", + -14.15335178375244 + ], + [ + "▁evils", + -14.15335750579834 + ], + [ + "▁steakhouse", + -14.153362274169922 + ], + [ + "▁quiver", + -14.15341567993164 + ], + [ + "Blake", + -14.153545379638672 + ], + [ + "▁EMAIL", + -14.153700828552246 + ], + [ + "cide", + -14.153705596923828 + ], + [ + "▁perished", + -14.153716087341309 + ], + [ + "iously", + -14.15373706817627 + ], + [ + "Merc", + -14.153767585754396 + ], + [ + "doesn", + -14.153833389282228 + ], + [ + "▁Alfie", + -14.153878211975098 + ], + [ + "▁Janus", + -14.15388298034668 + ], + [ + "(8)", + -14.15390968322754 + ], + [ + "▁straightened", + -14.153940200805664 + ], + [ + "▁POR", + -14.15399169921875 + ], + [ + "▁Litter", + -14.154004096984863 + ], + [ + "2.9%", + -14.154023170471191 + ], + [ + "▁imparting", + -14.154064178466797 + ], + [ + "▁Dha", + -14.154078483581545 + ], + [ + "▁bunion", + -14.154129981994627 + ], + [ + "heit", + -14.154131889343262 + ], + [ + "▁Issa", + -14.154150009155272 + ], + [ + "ovski", + -14.154193878173828 + ], + [ + "▁callous", + -14.154207229614258 + ], + [ + "912", + -14.15428352355957 + ], + [ + "▁filaments", + -14.154318809509276 + ], + [ + "Plug", + -14.154343605041504 + ], + [ + "▁productively", + -14.15436840057373 + ], + [ + "▁roughness", + -14.154369354248049 + ], + [ + "▁masterclass", + -14.154390335083008 + ], + [ + "▁241", + -14.154401779174805 + ], + [ + "▁thefts", + -14.15446949005127 + ], + [ + "▁CUP", + -14.15455150604248 + ], + [ + "▁sprites", + -14.154645919799805 + ], + [ + "DIN", + -14.154657363891602 + ], + [ + "Raj", + -14.154714584350586 + ], + [ + "▁Trot", + -14.154719352722168 + ], + [ + "▁Healy", + -14.15474796295166 + ], + [ + "▁Fatal", + -14.15475368499756 + ], + [ + "▁kt", + -14.15479850769043 + ], + [ + "▁ruffles", + -14.154831886291504 + ], + [ + "▁Gibb", + -14.154844284057615 + ], + [ + "▁graphically", + -14.154860496520996 + ], + [ + "▁Kalam", + -14.154946327209473 + ], + [ + "▁978-", + -14.154963493347168 + ], + [ + "▁Martyr", + -14.154963493347168 + ], + [ + "rms", + -14.155073165893556 + ], + [ + "tran", + -14.155122756958008 + ], + [ + "rious", + -14.155131340026855 + ], + [ + "▁Compa", + -14.15513515472412 + ], + [ + "▁Nazar", + -14.155207633972168 + ], + [ + "▁letterpress", + -14.155213356018066 + ], + [ + "▁LEO", + -14.15522289276123 + ], + [ + "Tara", + -14.155253410339355 + ], + [ + "▁Mako", + -14.15526294708252 + ], + [ + "▁skirting", + -14.155274391174316 + ], + [ + "–12", + -14.155343055725098 + ], + [ + "▁cookery", + -14.155362129211426 + ], + [ + "▁Surfing", + -14.155388832092283 + ], + [ + "1.4%", + -14.155394554138184 + ], + [ + "▁Wants", + -14.15541172027588 + ], + [ + "▁53%", + -14.155540466308594 + ], + [ + "▁$50.00", + -14.155543327331545 + ], + [ + "▁SBC", + -14.155590057373049 + ], + [ + "▁dearth", + -14.15560531616211 + ], + [ + "▁Hashem", + -14.15561580657959 + ], + [ + "▁Capsule", + -14.155631065368652 + ], + [ + "dub", + -14.155691146850586 + ], + [ + "▁4+", + -14.15571403503418 + ], + [ + "▁Emissions", + -14.15571403503418 + ], + [ + "YY", + -14.155747413635254 + ], + [ + "▁RUB", + -14.15575122833252 + ], + [ + "▁TPC", + -14.155781745910645 + ], + [ + "▁genomes", + -14.155811309814451 + ], + [ + "▁cuticle", + -14.15585708618164 + ], + [ + "▁Settle", + -14.155864715576172 + ], + [ + "▁Portman", + -14.15586757659912 + ], + [ + "uela", + -14.156012535095217 + ], + [ + "▁SDN", + -14.156017303466797 + ], + [ + "▁hexagonal", + -14.156036376953123 + ], + [ + "▁Glock", + -14.156081199645996 + ], + [ + "ее", + -14.156085014343262 + ], + [ + "▁Brei", + -14.156118392944336 + ], + [ + "▁safaris", + -14.156184196472168 + ], + [ + "▁intensively", + -14.15632438659668 + ], + [ + "radical", + -14.156328201293944 + ], + [ + "▁majorly", + -14.15639877319336 + ], + [ + "PLA", + -14.15640354156494 + ], + [ + "▁rf", + -14.156502723693848 + ], + [ + "693", + -14.156519889831545 + ], + [ + "NSF", + -14.156522750854492 + ], + [ + "views", + -14.156529426574709 + ], + [ + "leon", + -14.156542778015137 + ], + [ + "▁6.9", + -14.156564712524414 + ], + [ + "099", + -14.156600952148438 + ], + [ + "▁reeds", + -14.156633377075195 + ], + [ + "Oral", + -14.156649589538574 + ], + [ + "▁offensively", + -14.156655311584473 + ], + [ + "deco", + -14.156682014465332 + ], + [ + "▁Carriers", + -14.156702041625977 + ], + [ + "▁Nite", + -14.156723022460938 + ], + [ + "▁stumbles", + -14.156745910644531 + ], + [ + "atom", + -14.156758308410645 + ], + [ + "▁MKV", + -14.156842231750488 + ], + [ + "fah", + -14.156880378723145 + ], + [ + "itten", + -14.156883239746094 + ], + [ + "APC", + -14.156935691833496 + ], + [ + "▁assimilate", + -14.156983375549316 + ], + [ + "▁refractory", + -14.15703582763672 + ], + [ + "landers", + -14.157045364379885 + ], + [ + "▁customizations", + -14.15705108642578 + ], + [ + "ampere", + -14.157064437866213 + ], + [ + "chronic", + -14.15707778930664 + ], + [ + "▁grenades", + -14.15721321105957 + ], + [ + "▁reinstate", + -14.1572265625 + ], + [ + "Exhibit", + -14.157312393188477 + ], + [ + "▁Remov", + -14.157353401184082 + ], + [ + "▁atrophy", + -14.15736484527588 + ], + [ + "Competition", + -14.157371520996094 + ], + [ + "Temperature", + -14.157378196716309 + ], + [ + "Existing", + -14.15738582611084 + ], + [ + "results", + -14.157504081726074 + ], + [ + "Tyler", + -14.1575288772583 + ], + [ + "dero", + -14.157551765441896 + ], + [ + "▁Fayette", + -14.157573699951172 + ], + [ + "▁Glycerin", + -14.157585144042969 + ], + [ + "▁Grizzlies", + -14.157585144042969 + ], + [ + "▁dolomite", + -14.157585144042969 + ], + [ + "▁inimitable", + -14.157585144042969 + ], + [ + "▁insatiable", + -14.157585144042969 + ], + [ + "▁jojoba", + -14.157585144042969 + ], + [ + "▁lecturing", + -14.157585144042969 + ], + [ + "▁mellitus", + -14.157586097717283 + ], + [ + "▁recuperate", + -14.157586097717283 + ], + [ + "▁Stonehenge", + -14.157594680786133 + ], + [ + "▁unorthodox", + -14.157601356506348 + ], + [ + "▁Aran", + -14.157602310180664 + ], + [ + "▁cyanide", + -14.157604217529297 + ], + [ + "▁cynicism", + -14.157604217529297 + ], + [ + "▁simulating", + -14.157604217529297 + ], + [ + "▁GIRL", + -14.157611846923828 + ], + [ + "▁spelt", + -14.157618522644045 + ], + [ + "▁clotting", + -14.157623291015623 + ], + [ + "▁Assessing", + -14.157632827758787 + ], + [ + "▁connoisseurs", + -14.157648086547852 + ], + [ + "▁nachos", + -14.157649993896484 + ], + [ + "▁aviator", + -14.157651901245115 + ], + [ + "populated", + -14.157657623291016 + ], + [ + "▁dichotomy", + -14.157675743103027 + ], + [ + "Lac", + -14.157678604125977 + ], + [ + "▁Polaroid", + -14.15768337249756 + ], + [ + "▁Rutland", + -14.157684326171877 + ], + [ + "▁Accessory", + -14.15770149230957 + ], + [ + "wane", + -14.157703399658203 + ], + [ + "▁2022,", + -14.157706260681152 + ], + [ + "▁antlers", + -14.157708168029783 + ], + [ + "▁gripped", + -14.157724380493164 + ], + [ + "Jitsu", + -14.15776252746582 + ], + [ + "seriously", + -14.15777587890625 + ], + [ + "▁Visible", + -14.157780647277832 + ], + [ + "▁Sparkling", + -14.157781600952148 + ], + [ + "▁polarity", + -14.157807350158691 + ], + [ + "▁fallacy", + -14.15782642364502 + ], + [ + "▁Carlyle", + -14.157851219177246 + ], + [ + "Berg", + -14.157865524291992 + ], + [ + "ETHER", + -14.157896995544434 + ], + [ + "▁battleground", + -14.157922744750977 + ], + [ + "▁sweats", + -14.157959938049316 + ], + [ + "▁19\"", + -14.157968521118164 + ], + [ + "▁unzip", + -14.157968521118164 + ], + [ + "▁Tok", + -14.15797519683838 + ], + [ + "▁Chron", + -14.158000946044922 + ], + [ + "Objective", + -14.158008575439451 + ], + [ + "▁EMR", + -14.158073425292969 + ], + [ + "▁Sutter", + -14.158123016357422 + ], + [ + "▁ark", + -14.158207893371582 + ], + [ + "▁CLE", + -14.158230781555176 + ], + [ + "▁spurs", + -14.158239364624023 + ], + [ + "Brandon", + -14.158257484436035 + ], + [ + "▁stomping", + -14.15827751159668 + ], + [ + "weld", + -14.158296585083008 + ], + [ + "/2.8", + -14.158308029174805 + ], + [ + "▁roasts", + -14.158331871032717 + ], + [ + "▁Instance", + -14.158344268798828 + ], + [ + "▁Gossip", + -14.158361434936523 + ], + [ + "▁endangering", + -14.158380508422852 + ], + [ + "▁intermittently", + -14.1583833694458 + ], + [ + "Released", + -14.158414840698242 + ], + [ + "▁USAID", + -14.158422470092772 + ], + [ + "▁dojo", + -14.158443450927734 + ], + [ + "▁sloth", + -14.158559799194336 + ], + [ + "▁Scary", + -14.158581733703612 + ], + [ + "▁432", + -14.158604621887209 + ], + [ + "▁cutouts", + -14.158610343933104 + ], + [ + "▁housework", + -14.158706665039062 + ], + [ + "Dash", + -14.15874195098877 + ], + [ + "Somehow", + -14.158757209777832 + ], + [ + "▁transcendent", + -14.158783912658691 + ], + [ + "▁Opel", + -14.158806800842283 + ], + [ + "▁Cic", + -14.158867835998535 + ], + [ + "869", + -14.15888786315918 + ], + [ + "missing", + -14.158905029296877 + ], + [ + "boston", + -14.158912658691406 + ], + [ + "▁Madam", + -14.158921241760254 + ], + [ + "▁15.6", + -14.15895175933838 + ], + [ + "resume", + -14.15895652770996 + ], + [ + "▁Assume", + -14.15902042388916 + ], + [ + "Reduced", + -14.159038543701172 + ], + [ + "▁touristy", + -14.159066200256348 + ], + [ + "Medi", + -14.159070014953612 + ], + [ + "gif", + -14.15921688079834 + ], + [ + "Provider", + -14.159276008605955 + ], + [ + "PRA", + -14.159334182739258 + ], + [ + "▁OPC", + -14.159357070922852 + ], + [ + "efer", + -14.159367561340332 + ], + [ + "▁CUSTOM", + -14.159457206726074 + ], + [ + "▁maximized", + -14.159462928771973 + ], + [ + "▁UAB", + -14.159507751464844 + ], + [ + "▁dressage", + -14.159523010253906 + ], + [ + "gyn", + -14.15955924987793 + ], + [ + "▁Clu", + -14.159564971923828 + ], + [ + "Beautifully", + -14.159570693969728 + ], + [ + "boul", + -14.159612655639648 + ], + [ + "▁Zab", + -14.159640312194824 + ], + [ + "▁Shab", + -14.159648895263672 + ], + [ + "887", + -14.159687995910645 + ], + [ + "NAT", + -14.159700393676758 + ], + [ + "Zach", + -14.159724235534668 + ], + [ + "PIN", + -14.159772872924805 + ], + [ + "elecommunication", + -14.159819602966309 + ], + [ + "▁£18", + -14.159929275512695 + ], + [ + "▁beachside", + -14.159971237182615 + ], + [ + "▁Marko", + -14.15997314453125 + ], + [ + "▁2.5\"", + -14.159982681274414 + ], + [ + "▁colle", + -14.160125732421877 + ], + [ + "▁balancer", + -14.160181045532228 + ], + [ + "eagle", + -14.16022491455078 + ], + [ + "516", + -14.16025733947754 + ], + [ + "▁Devine", + -14.1603422164917 + ], + [ + "▁bearded", + -14.160348892211914 + ], + [ + "ESC", + -14.160369873046877 + ], + [ + "▁Herd", + -14.16038417816162 + ], + [ + "▁Muth", + -14.160412788391112 + ], + [ + "▁Renting", + -14.160439491271973 + ], + [ + "euil", + -14.16047191619873 + ], + [ + "▁gaping", + -14.160492897033691 + ], + [ + "038", + -14.160515785217283 + ], + [ + "▁wad", + -14.160534858703612 + ], + [ + "▁mite", + -14.16054630279541 + ], + [ + "uco", + -14.16055679321289 + ], + [ + "▁Demons", + -14.160600662231444 + ], + [ + "846", + -14.160606384277344 + ], + [ + "hé", + -14.16071319580078 + ], + [ + "▁Mund", + -14.160740852355955 + ], + [ + "▁Blasting", + -14.160746574401855 + ], + [ + "LAP", + -14.16076946258545 + ], + [ + "▁Hosts", + -14.160816192626951 + ], + [ + "▁Simplicity", + -14.16088581085205 + ], + [ + "IVA", + -14.160886764526367 + ], + [ + "▁Rotation", + -14.160892486572266 + ], + [ + "▁Radon", + -14.16096305847168 + ], + [ + "ammon", + -14.160964965820312 + ], + [ + "▁lobbyist", + -14.160977363586426 + ], + [ + "▁underfoot", + -14.160995483398438 + ], + [ + "grav", + -14.161001205444336 + ], + [ + "FID", + -14.161004066467283 + ], + [ + "▁broil", + -14.161086082458496 + ], + [ + "▁1852", + -14.161092758178713 + ], + [ + "▁counterpoint", + -14.16111946105957 + ], + [ + "NOR", + -14.161124229431152 + ], + [ + "▁realign", + -14.161185264587402 + ], + [ + "▁devs", + -14.161218643188477 + ], + [ + "▁condense", + -14.161253929138184 + ], + [ + "▁Tumble", + -14.161280632019045 + ], + [ + "dyn", + -14.161331176757812 + ], + [ + "▁spammers", + -14.161375045776367 + ], + [ + "▁Westside", + -14.161447525024414 + ], + [ + "funny", + -14.161524772644045 + ], + [ + "COMING", + -14.161572456359863 + ], + [ + "egu", + -14.16159439086914 + ], + [ + "▁Selenium", + -14.16174030303955 + ], + [ + "1940", + -14.161809921264648 + ], + [ + "▁1-10", + -14.161867141723633 + ], + [ + "▁Ransomware", + -14.161870956420898 + ], + [ + "Cry", + -14.161874771118164 + ], + [ + "▁Jee", + -14.16192626953125 + ], + [ + "omat", + -14.161989212036133 + ], + [ + "▁disarm", + -14.161993026733398 + ], + [ + "▁saunas", + -14.162039756774902 + ], + [ + "▁milking", + -14.1620512008667 + ], + [ + "▁patriarch", + -14.162052154541016 + ], + [ + "ое", + -14.16209316253662 + ], + [ + "841", + -14.162105560302734 + ], + [ + "HHS", + -14.162118911743164 + ], + [ + "▁##", + -14.162139892578123 + ], + [ + "▁AKC", + -14.162301063537598 + ], + [ + "▁bombard", + -14.16232681274414 + ], + [ + "▁Tropic", + -14.162342071533203 + ], + [ + "Stephanie", + -14.162352561950684 + ], + [ + "Division", + -14.162385940551758 + ], + [ + "▁Percent", + -14.16240119934082 + ], + [ + "Challenging", + -14.162416458129885 + ], + [ + "▁Cleopatra", + -14.162416458129885 + ], + [ + "▁Dmitry", + -14.162416458129885 + ], + [ + "▁dehumidifier", + -14.162416458129885 + ], + [ + "▁deprecated", + -14.162416458129885 + ], + [ + "▁Alistair", + -14.1624174118042 + ], + [ + "▁Schroeder", + -14.1624174118042 + ], + [ + "▁Casablanca", + -14.162418365478516 + ], + [ + "▁REQUIRED", + -14.162419319152832 + ], + [ + "▁Clapton", + -14.162420272827148 + ], + [ + "▁muslin", + -14.162420272827148 + ], + [ + "▁mucous", + -14.16242218017578 + ], + [ + "▁Reclaimed", + -14.16242504119873 + ], + [ + "▁TechCrunch", + -14.16242504119873 + ], + [ + "▁telemarketing", + -14.162426948547363 + ], + [ + "▁louver", + -14.162429809570312 + ], + [ + "▁Peachtree", + -14.162435531616213 + ], + [ + "▁abbey", + -14.162435531616213 + ], + [ + "▁slamming", + -14.162435531616213 + ], + [ + "▁Dupont", + -14.162436485290527 + ], + [ + "Struggling", + -14.16244125366211 + ], + [ + "▁Prosperity", + -14.162452697753906 + ], + [ + "transferable", + -14.162480354309082 + ], + [ + "Carefully", + -14.162481307983398 + ], + [ + "▁prefabricated", + -14.16248607635498 + ], + [ + "▁undermines", + -14.16251277923584 + ], + [ + "▁Greenpeace", + -14.162516593933104 + ], + [ + "▁Dietrich", + -14.162519454956056 + ], + [ + "▁ePub", + -14.162532806396484 + ], + [ + "▁Perpetual", + -14.162538528442385 + ], + [ + "marital", + -14.162551879882812 + ], + [ + "Mentor", + -14.16257667541504 + ], + [ + "visor", + -14.16259479522705 + ], + [ + "fibro", + -14.162623405456545 + ], + [ + "▁objectionable", + -14.162635803222656 + ], + [ + "RELATED", + -14.162700653076172 + ], + [ + "▁megawatts", + -14.162704467773438 + ], + [ + "▁rigour", + -14.162717819213867 + ], + [ + "gah", + -14.162742614746094 + ], + [ + "▁hijacked", + -14.162793159484863 + ], + [ + "▁nineties", + -14.162793159484863 + ], + [ + "▁occured", + -14.16280460357666 + ], + [ + "7200", + -14.162824630737305 + ], + [ + "▁imbued", + -14.162834167480469 + ], + [ + "▁hyphen", + -14.162858963012695 + ], + [ + "▁Sek", + -14.16286563873291 + ], + [ + "▁1998)", + -14.16289234161377 + ], + [ + "▁Larkin", + -14.162956237792969 + ], + [ + "▁Rides", + -14.162978172302246 + ], + [ + "▁NAI", + -14.162986755371094 + ], + [ + "Paid", + -14.16305923461914 + ], + [ + "▁octave", + -14.163076400756836 + ], + [ + "itious", + -14.163084030151367 + ], + [ + "▁Sith", + -14.163128852844238 + ], + [ + "▁Grail", + -14.163171768188477 + ], + [ + "▁enriches", + -14.163207054138184 + ], + [ + "▁Eisen", + -14.163227081298828 + ], + [ + "▁headrest", + -14.163246154785156 + ], + [ + "wold", + -14.16327667236328 + ], + [ + "▁fireproof", + -14.163278579711914 + ], + [ + "▁Brandt", + -14.163326263427734 + ], + [ + "statin", + -14.163336753845217 + ], + [ + "▁Mada", + -14.163339614868164 + ], + [ + "ulia", + -14.163363456726074 + ], + [ + "▁Rajiv", + -14.16338348388672 + ], + [ + "▁erasing", + -14.163400650024414 + ], + [ + "▁£250", + -14.16343879699707 + ], + [ + "▁haben", + -14.16347312927246 + ], + [ + "▁touting", + -14.163495063781738 + ], + [ + "crim", + -14.163496971130373 + ], + [ + "▁ketones", + -14.163508415222168 + ], + [ + "mez", + -14.163509368896484 + ], + [ + "▁Albans", + -14.163515090942385 + ], + [ + "▁DirectX", + -14.163541793823242 + ], + [ + "nope", + -14.163559913635254 + ], + [ + "▁STYLE", + -14.163585662841797 + ], + [ + "▁heartily", + -14.163681030273438 + ], + [ + "kane", + -14.163710594177246 + ], + [ + "iscal", + -14.163721084594728 + ], + [ + "MAD", + -14.163747787475586 + ], + [ + "▁CVT", + -14.163808822631836 + ], + [ + "▁Transcript", + -14.163833618164062 + ], + [ + "▁Cheque", + -14.163835525512695 + ], + [ + "▁Niall", + -14.16384983062744 + ], + [ + "▁slashing", + -14.163861274719238 + ], + [ + "▁Fewer", + -14.163890838623049 + ], + [ + "KH", + -14.163894653320312 + ], + [ + "▁TCS", + -14.163944244384766 + ], + [ + "816", + -14.163947105407717 + ], + [ + "▁domino", + -14.163957595825195 + ], + [ + "▁checkered", + -14.163995742797852 + ], + [ + "▁USF", + -14.16403579711914 + ], + [ + "▁Mera", + -14.164040565490724 + ], + [ + "pelling", + -14.164047241210938 + ], + [ + "deh", + -14.164081573486328 + ], + [ + "▁potions", + -14.164085388183594 + ], + [ + "▁suicides", + -14.164106369018556 + ], + [ + "4:00", + -14.16411590576172 + ], + [ + "▁Kass", + -14.164117813110352 + ], + [ + "cement", + -14.164140701293944 + ], + [ + "▁24,000", + -14.164178848266602 + ], + [ + "▁Aces", + -14.164189338684082 + ], + [ + "673", + -14.16420078277588 + ], + [ + "ORR", + -14.16421127319336 + ], + [ + "▁Plex", + -14.164212226867676 + ], + [ + "▁Brink", + -14.164260864257812 + ], + [ + "/01", + -14.164278030395508 + ], + [ + "▁Capsules", + -14.16439437866211 + ], + [ + "▁purr", + -14.164405822753906 + ], + [ + "▁CSM", + -14.16443157196045 + ], + [ + "▁nokia", + -14.164438247680664 + ], + [ + "▁Skyscanner", + -14.16452407836914 + ], + [ + "ician", + -14.164551734924316 + ], + [ + "▁peppered", + -14.164630889892578 + ], + [ + "▁knits", + -14.164666175842283 + ], + [ + "–7", + -14.164717674255373 + ], + [ + "▁northward", + -14.1647310256958 + ], + [ + "089", + -14.164774894714355 + ], + [ + "cancel", + -14.164819717407228 + ], + [ + "ugger", + -14.164939880371094 + ], + [ + "45)", + -14.16495132446289 + ], + [ + "▁zine", + -14.164972305297852 + ], + [ + "uska", + -14.164973258972168 + ], + [ + "conic", + -14.164998054504396 + ], + [ + "▁homestay", + -14.165083885192873 + ], + [ + "▁townships", + -14.165101051330566 + ], + [ + "seconds", + -14.165130615234377 + ], + [ + "nson", + -14.165172576904297 + ], + [ + "▁Amal", + -14.165199279785156 + ], + [ + "▁oxides", + -14.16522216796875 + ], + [ + "vard", + -14.165283203125 + ], + [ + "▁giggles", + -14.16529655456543 + ], + [ + "america", + -14.165297508239746 + ], + [ + "▁Tamar", + -14.165367126464844 + ], + [ + "izzi", + -14.165375709533691 + ], + [ + "▁sprinting", + -14.165375709533691 + ], + [ + "▁heartland", + -14.165417671203612 + ], + [ + "▁sears", + -14.165526390075684 + ], + [ + "▁Clipper", + -14.165536880493164 + ], + [ + "bund", + -14.165559768676758 + ], + [ + "▁Counting", + -14.165579795837402 + ], + [ + "Deposit", + -14.165818214416504 + ], + [ + "Diag", + -14.165982246398926 + ], + [ + "float", + -14.166032791137695 + ], + [ + "▁Pella", + -14.16603660583496 + ], + [ + "icidal", + -14.166069984436035 + ], + [ + "cester", + -14.166091918945312 + ], + [ + "▁Yuma", + -14.166109085083008 + ], + [ + "BAN", + -14.166118621826172 + ], + [ + "▁SIX", + -14.166184425354004 + ], + [ + "Triple", + -14.166204452514648 + ], + [ + "▁269", + -14.166204452514648 + ], + [ + "▁steeper", + -14.166285514831545 + ], + [ + "▁Silas", + -14.166296005249023 + ], + [ + "Birds", + -14.166346549987791 + ], + [ + "▁refurbish", + -14.16634750366211 + ], + [ + "▁(=", + -14.16635799407959 + ], + [ + "3:45", + -14.166410446166992 + ], + [ + "▁Aids", + -14.166447639465332 + ], + [ + "▁halter", + -14.166547775268556 + ], + [ + "▁Tali", + -14.166548728942873 + ], + [ + "▁CID", + -14.166585922241213 + ], + [ + "1-11", + -14.166616439819336 + ], + [ + "▁Coatings", + -14.166618347167969 + ], + [ + "aea", + -14.166640281677246 + ], + [ + "abella", + -14.166646003723145 + ], + [ + "▁whiter", + -14.166690826416016 + ], + [ + "HEA", + -14.166739463806152 + ], + [ + "▁2-6", + -14.16680145263672 + ], + [ + "▁doubting", + -14.166858673095703 + ], + [ + "▁murmur", + -14.166875839233398 + ], + [ + "wami", + -14.166894912719728 + ], + [ + "uster", + -14.166898727416992 + ], + [ + "ision", + -14.166958808898926 + ], + [ + "▁resp", + -14.166985511779783 + ], + [ + "▁Concerts", + -14.16700267791748 + ], + [ + "▁opus", + -14.167043685913086 + ], + [ + "athlon", + -14.167084693908691 + ], + [ + "▁Joke", + -14.167193412780762 + ], + [ + "▁lux", + -14.167207717895508 + ], + [ + "▁Stel", + -14.16721534729004 + ], + [ + "▁9.6", + -14.167221069335938 + ], + [ + "Egypt", + -14.167230606079102 + ], + [ + "carry", + -14.16724967956543 + ], + [ + "IPO", + -14.167261123657228 + ], + [ + "▁Correspondent", + -14.16727066040039 + ], + [ + "▁RTS", + -14.16727066040039 + ], + [ + "▁polystyrene", + -14.16727066040039 + ], + [ + "▁superannuation", + -14.16727066040039 + ], + [ + "▁verdant", + -14.16727066040039 + ], + [ + "▁Bexley", + -14.167271614074709 + ], + [ + "▁Fairgrounds", + -14.167272567749023 + ], + [ + "▁anthologies", + -14.167272567749023 + ], + [ + "▁cognac", + -14.167272567749023 + ], + [ + "▁Ayrshire", + -14.167274475097656 + ], + [ + "▁shrugged", + -14.167276382446287 + ], + [ + "▁WMA", + -14.167281150817873 + ], + [ + "▁misaligned", + -14.167284965515137 + ], + [ + "▁unincorporated", + -14.167284965515137 + ], + [ + "▁Submarine", + -14.167303085327148 + ], + [ + "▁Refinery", + -14.167305946350098 + ], + [ + "▁Baidu", + -14.16731071472168 + ], + [ + "▁Palmetto", + -14.167311668395996 + ], + [ + "comfortable", + -14.167313575744627 + ], + [ + "exercise", + -14.167319297790527 + ], + [ + "▁evoking", + -14.167319297790527 + ], + [ + "Illumina", + -14.167323112487791 + ], + [ + "1930", + -14.16732406616211 + ], + [ + "▁torrential", + -14.167327880859377 + ], + [ + "▁Napier", + -14.167329788208008 + ], + [ + "▁aloof", + -14.167330741882324 + ], + [ + "▁deterred", + -14.167330741882324 + ], + [ + "▁presentable", + -14.16733169555664 + ], + [ + "jersey", + -14.167333602905272 + ], + [ + "Kathy", + -14.167353630065918 + ], + [ + "Employment", + -14.167356491088867 + ], + [ + "▁Eraser", + -14.167359352111816 + ], + [ + "▁Janssen", + -14.167363166809082 + ], + [ + "▁toyota", + -14.167366981506348 + ], + [ + "IFIC", + -14.167398452758787 + ], + [ + "▁Chromecast", + -14.167399406433104 + ], + [ + "▁rubles", + -14.167400360107422 + ], + [ + "▁Seward", + -14.167428970336914 + ], + [ + "▁absurdity", + -14.167431831359863 + ], + [ + "physics", + -14.167444229125977 + ], + [ + "▁sexes", + -14.16749095916748 + ], + [ + "affected", + -14.16750717163086 + ], + [ + "Audit", + -14.167525291442873 + ], + [ + "▁eastbound", + -14.1675443649292 + ], + [ + "▁discolored", + -14.16758918762207 + ], + [ + "Takeaway", + -14.167638778686523 + ], + [ + "▁NFS", + -14.167691230773926 + ], + [ + "lawyer", + -14.167705535888672 + ], + [ + "▁Spots", + -14.167720794677734 + ], + [ + "▁Butch", + -14.167744636535645 + ], + [ + "▁developmentally", + -14.167760848999023 + ], + [ + "Navy", + -14.16777229309082 + ], + [ + "▁accuses", + -14.167823791503906 + ], + [ + "▁stoic", + -14.167858123779297 + ], + [ + "▁Approaches", + -14.167908668518066 + ], + [ + "956", + -14.167951583862305 + ], + [ + "french", + -14.16797924041748 + ], + [ + "▁Holes", + -14.16799545288086 + ], + [ + "Quin", + -14.167996406555176 + ], + [ + "▁deformed", + -14.168004989624023 + ], + [ + "Xpress", + -14.168014526367188 + ], + [ + "▁Bello", + -14.168015480041504 + ], + [ + "▁backpackers", + -14.16801929473877 + ], + [ + "Verlag", + -14.16804313659668 + ], + [ + "▁Henson", + -14.168068885803224 + ], + [ + "▁checksum", + -14.168110847473145 + ], + [ + "▁Eich", + -14.168127059936523 + ], + [ + "▁firepower", + -14.168163299560549 + ], + [ + "▁goo", + -14.168190956115724 + ], + [ + "spell", + -14.168220520019531 + ], + [ + "▁reared", + -14.168302536010742 + ], + [ + "▁sleeved", + -14.16833209991455 + ], + [ + "▁Rink", + -14.168365478515623 + ], + [ + "Zu", + -14.168410301208496 + ], + [ + "▁Collectors", + -14.168461799621582 + ], + [ + "kew", + -14.168469429016112 + ], + [ + "▁Supplied", + -14.168522834777832 + ], + [ + "▁penta", + -14.168581008911133 + ], + [ + "▁millimeter", + -14.16860294342041 + ], + [ + "▁anaesthetic", + -14.168622016906738 + ], + [ + "Bomb", + -14.16862964630127 + ], + [ + "trying", + -14.168661117553713 + ], + [ + "▁invocation", + -14.168701171875 + ], + [ + "▁Permits", + -14.168716430664062 + ], + [ + "▁Valves", + -14.168787956237791 + ], + [ + "674", + -14.168855667114258 + ], + [ + "▁$4.5", + -14.168956756591797 + ], + [ + "COUNT", + -14.169004440307615 + ], + [ + "__________", + -14.169004440307615 + ], + [ + "▁steamy", + -14.169007301330566 + ], + [ + "▁dialed", + -14.169013023376465 + ], + [ + "pharma", + -14.169021606445312 + ], + [ + "cato", + -14.169047355651855 + ], + [ + "▁searchers", + -14.169057846069336 + ], + [ + "▁inappropriately", + -14.169122695922852 + ], + [ + "griff", + -14.16915512084961 + ], + [ + "poster", + -14.169156074523926 + ], + [ + "▁Lodi", + -14.16920280456543 + ], + [ + "▁spinoff", + -14.169239044189451 + ], + [ + "▁PROFESSIONAL", + -14.16927433013916 + ], + [ + "▁culminates", + -14.169319152832031 + ], + [ + "chari", + -14.169352531433104 + ], + [ + "esthesia", + -14.16938018798828 + ], + [ + "▁tinge", + -14.169407844543455 + ], + [ + "▁17.5", + -14.169456481933594 + ], + [ + "Fits", + -14.169499397277832 + ], + [ + "Carter", + -14.169544219970703 + ], + [ + "▁backfire", + -14.16957664489746 + ], + [ + "cyclo", + -14.169607162475586 + ], + [ + "yur", + -14.16961669921875 + ], + [ + "cena", + -14.169636726379396 + ], + [ + "▁ensue", + -14.169727325439451 + ], + [ + "IAM", + -14.169739723205566 + ], + [ + "dric", + -14.169825553894045 + ], + [ + "MTS", + -14.169882774353027 + ], + [ + "▁VLC", + -14.16989040374756 + ], + [ + "723", + -14.169906616210938 + ], + [ + "bula", + -14.169960021972656 + ], + [ + "Calculat", + -14.170034408569336 + ], + [ + "Chop", + -14.170036315917969 + ], + [ + "ucking", + -14.170036315917969 + ], + [ + "▁signpost", + -14.170085906982422 + ], + [ + "▁bitterly", + -14.170121192932127 + ], + [ + "provider", + -14.170185089111328 + ], + [ + "osta", + -14.17025375366211 + ], + [ + "akshi", + -14.17025661468506 + ], + [ + "▁multinationals", + -14.170260429382324 + ], + [ + "zam", + -14.170307159423828 + ], + [ + "opathic", + -14.170315742492676 + ], + [ + "▁sims", + -14.17041301727295 + ], + [ + "IZED", + -14.170429229736328 + ], + [ + "refer", + -14.170433044433594 + ], + [ + "TRIP", + -14.17047119140625 + ], + [ + "▁tongs", + -14.170473098754885 + ], + [ + "▁Mio", + -14.170485496520996 + ], + [ + "932", + -14.170576095581056 + ], + [ + "NCI", + -14.170588493347168 + ], + [ + "071", + -14.170626640319824 + ], + [ + "▁tradeoff", + -14.17093276977539 + ], + [ + "▁1836", + -14.171062469482422 + ], + [ + "coder", + -14.171066284179688 + ], + [ + "▁FEE", + -14.171095848083496 + ], + [ + "▁243", + -14.17111587524414 + ], + [ + "▁plummet", + -14.171263694763184 + ], + [ + "▁sectionals", + -14.171281814575195 + ], + [ + "OTO", + -14.171289443969728 + ], + [ + "BZ", + -14.171311378479004 + ], + [ + "▁Qingdao", + -14.171342849731444 + ], + [ + "biological", + -14.17139720916748 + ], + [ + "▁337", + -14.171557426452637 + ], + [ + "▁NSC", + -14.171568870544434 + ], + [ + "▁Prostate", + -14.171623229980469 + ], + [ + "▁Unexpected", + -14.171669006347656 + ], + [ + "▁tattooed", + -14.171669960021973 + ], + [ + "▁Newell", + -14.17167854309082 + ], + [ + "HIN", + -14.171708106994627 + ], + [ + "▁MPC", + -14.171833992004396 + ], + [ + "▁Canaria", + -14.17198085784912 + ], + [ + "loose", + -14.172011375427246 + ], + [ + "853", + -14.172104835510254 + ], + [ + "activation", + -14.172125816345217 + ], + [ + "▁ANYTHING", + -14.172148704528809 + ], + [ + "▁Coimbatore", + -14.172148704528809 + ], + [ + "▁Consolidation", + -14.172148704528809 + ], + [ + "▁Immersion", + -14.172148704528809 + ], + [ + "▁McClure", + -14.172148704528809 + ], + [ + "▁admirably", + -14.172148704528809 + ], + [ + "▁decimated", + -14.172148704528809 + ], + [ + "▁dynamism", + -14.172148704528809 + ], + [ + "▁electrification", + -14.172148704528809 + ], + [ + "▁groovy", + -14.172148704528809 + ], + [ + "▁liquefied", + -14.172148704528809 + ], + [ + "▁ravioli", + -14.172148704528809 + ], + [ + "▁SiriusXM", + -14.172149658203123 + ], + [ + "▁condensate", + -14.172149658203123 + ], + [ + "4/20/2019", + -14.17215061187744 + ], + [ + "▁appalled", + -14.17215061187744 + ], + [ + "▁dissident", + -14.17215061187744 + ], + [ + "▁Dobson", + -14.172151565551758 + ], + [ + "▁BAFTA", + -14.172163009643556 + ], + [ + "Decrease", + -14.17216968536377 + ], + [ + "▁crept", + -14.17216968536377 + ], + [ + "▁Conroe", + -14.1721773147583 + ], + [ + "▁england", + -14.172185897827148 + ], + [ + "▁unfiltered", + -14.17218780517578 + ], + [ + "▁Pereira", + -14.172198295593262 + ], + [ + "▁Zapier", + -14.17220401763916 + ], + [ + "▁Bryson", + -14.172221183776855 + ], + [ + "▁valance", + -14.172268867492676 + ], + [ + "Smoking", + -14.172314643859863 + ], + [ + "conflict", + -14.172317504882812 + ], + [ + "Xinhua", + -14.172324180603027 + ], + [ + "academy", + -14.172325134277344 + ], + [ + "▁2019-2020", + -14.172327995300291 + ], + [ + "Superior", + -14.17232894897461 + ], + [ + "pharmacy", + -14.17233943939209 + ], + [ + "▁REP", + -14.172341346740724 + ], + [ + "properties", + -14.17234992980957 + ], + [ + "priority", + -14.172369003295898 + ], + [ + "Weighing", + -14.17237377166748 + ], + [ + "▁Kaye", + -14.172391891479492 + ], + [ + "reflective", + -14.172396659851074 + ], + [ + "Variable", + -14.172402381896973 + ], + [ + "Broker", + -14.1724271774292 + ], + [ + "▁nonpartisan", + -14.172439575195312 + ], + [ + "▁RTA", + -14.172462463378906 + ], + [ + "▁elated", + -14.172465324401855 + ], + [ + "ILLA", + -14.172467231750488 + ], + [ + "RTE", + -14.172477722167969 + ], + [ + "▁unneeded", + -14.17249870300293 + ], + [ + "Blind", + -14.172511100769045 + ], + [ + "▁Fanny", + -14.172521591186523 + ], + [ + "▁wheelbase", + -14.172527313232422 + ], + [ + "Retro", + -14.172548294067385 + ], + [ + "beau", + -14.172563552856444 + ], + [ + "▁CLO", + -14.172572135925291 + ], + [ + "▁randomised", + -14.172608375549316 + ], + [ + "▁Housekeeping", + -14.172619819641112 + ], + [ + "LIABLE", + -14.17266845703125 + ], + [ + "▁devotee", + -14.172690391540527 + ], + [ + "▁blackened", + -14.172714233398438 + ], + [ + "▁Targeted", + -14.17271614074707 + ], + [ + "▁orchestrate", + -14.17271614074707 + ], + [ + "Tiny", + -14.172720909118652 + ], + [ + "561", + -14.172727584838867 + ], + [ + "▁coo", + -14.172755241394045 + ], + [ + "▁Knowles", + -14.172794342041016 + ], + [ + "▁Prat", + -14.172853469848633 + ], + [ + "▁abusers", + -14.17290210723877 + ], + [ + "▁STATES", + -14.172938346862791 + ], + [ + "Pushing", + -14.172956466674805 + ], + [ + "tts", + -14.173007011413574 + ], + [ + "▁267", + -14.17301082611084 + ], + [ + "▁Observe", + -14.1730318069458 + ], + [ + "▁3.5\"", + -14.173056602478027 + ], + [ + "▁Kya", + -14.173072814941406 + ], + [ + "▁biome", + -14.173100471496582 + ], + [ + "▁windfall", + -14.173125267028809 + ], + [ + "▁anchorage", + -14.173133850097656 + ], + [ + "▁Beasts", + -14.173172950744627 + ], + [ + "▁Firth", + -14.173194885253906 + ], + [ + "PLUS", + -14.173243522644045 + ], + [ + "coupled", + -14.173258781433104 + ], + [ + "▁SECOND", + -14.173299789428713 + ], + [ + "▁Beers", + -14.17330551147461 + ], + [ + "▁Qatari", + -14.173383712768556 + ], + [ + "▁chilies", + -14.17338752746582 + ], + [ + "▁spears", + -14.17340087890625 + ], + [ + "▁258", + -14.173437118530272 + ], + [ + "▁heinous", + -14.173471450805664 + ], + [ + "▁dogma", + -14.173491477966309 + ], + [ + "▁pensioners", + -14.173542976379396 + ], + [ + "▁3.30", + -14.173564910888672 + ], + [ + "▁adorning", + -14.173576354980469 + ], + [ + "Quickly", + -14.173657417297363 + ], + [ + "▁675", + -14.17365837097168 + ], + [ + "035", + -14.173720359802246 + ], + [ + "▁diarrhoea", + -14.173779487609863 + ], + [ + "▁$110", + -14.173791885375977 + ], + [ + "▁Padma", + -14.173805236816406 + ], + [ + "▁1776", + -14.17380714416504 + ], + [ + "▁joyfully", + -14.173820495605469 + ], + [ + "Thai", + -14.173973083496094 + ], + [ + "IMP", + -14.174028396606444 + ], + [ + "▁Bartholomew", + -14.17404079437256 + ], + [ + "ilah", + -14.174068450927734 + ], + [ + "▁thugs", + -14.174072265625 + ], + [ + "hola", + -14.17408847808838 + ], + [ + "rete", + -14.174126625061035 + ], + [ + "▁6:15", + -14.174236297607422 + ], + [ + "▁Deutsch", + -14.174238204956056 + ], + [ + "▁Veda", + -14.174246788024902 + ], + [ + "nir", + -14.174320220947266 + ], + [ + "▁fem", + -14.174333572387695 + ], + [ + "Pump", + -14.174344062805176 + ], + [ + "▁TPS", + -14.17435073852539 + ], + [ + "▁variances", + -14.17442512512207 + ], + [ + "drick", + -14.17445182800293 + ], + [ + "achie", + -14.174480438232422 + ], + [ + "▁28\"", + -14.174506187438965 + ], + [ + "▁lute", + -14.174593925476074 + ], + [ + "▁Hypnosis", + -14.174637794494627 + ], + [ + "▁Ses", + -14.174664497375488 + ], + [ + "ezza", + -14.174671173095703 + ], + [ + "▁$54", + -14.174689292907717 + ], + [ + "▁glo", + -14.174692153930664 + ], + [ + "▁Passes", + -14.174747467041016 + ], + [ + "▁mam", + -14.174762725830078 + ], + [ + "▁commemorates", + -14.174766540527344 + ], + [ + "BIL", + -14.17481517791748 + ], + [ + "▁PPA", + -14.174819946289062 + ], + [ + "▁Vito", + -14.174834251403809 + ], + [ + "▁Hustle", + -14.174850463867188 + ], + [ + "▁Cem", + -14.174941062927246 + ], + [ + "▁Furnishings", + -14.174989700317385 + ], + [ + "nano", + -14.17511749267578 + ], + [ + "▁crypt", + -14.175141334533691 + ], + [ + "itia", + -14.175219535827637 + ], + [ + "polo", + -14.17526149749756 + ], + [ + "▁HCM", + -14.175283432006836 + ], + [ + "769", + -14.175284385681152 + ], + [ + "▁Someday", + -14.175296783447266 + ], + [ + "▁strangest", + -14.175347328186035 + ], + [ + "laser", + -14.17543888092041 + ], + [ + "▁sala", + -14.17551040649414 + ], + [ + "CRAFT", + -14.175549507141112 + ], + [ + "▁Luv", + -14.175613403320312 + ], + [ + "players", + -14.17565631866455 + ], + [ + "▁PIA", + -14.17568016052246 + ], + [ + "▁aspirational", + -14.17569637298584 + ], + [ + "Cats", + -14.175731658935549 + ], + [ + "▁falcon", + -14.175753593444824 + ], + [ + "rce", + -14.175787925720217 + ], + [ + "▁Bhi", + -14.17582893371582 + ], + [ + "▁SOM", + -14.175863265991213 + ], + [ + "▁Collage", + -14.176012992858888 + ], + [ + "Organizations", + -14.176036834716797 + ], + [ + "▁********", + -14.176036834716797 + ], + [ + "▁Xuan", + -14.176055908203123 + ], + [ + "Outcome", + -14.176066398620604 + ], + [ + "▁Cope", + -14.176076889038086 + ], + [ + "▁299", + -14.176143646240234 + ], + [ + "module", + -14.17614459991455 + ], + [ + "ulant", + -14.176186561584473 + ], + [ + "▁peeve", + -14.17620086669922 + ], + [ + "▁Zealanders", + -14.176201820373535 + ], + [ + "reactive", + -14.176243782043455 + ], + [ + "▁hostname", + -14.176321029663086 + ], + [ + "▁82%", + -14.176332473754885 + ], + [ + "Whit", + -14.17634105682373 + ], + [ + "▁Funky", + -14.176392555236816 + ], + [ + "▁Oka", + -14.176422119140623 + ], + [ + "Plain", + -14.176448822021484 + ], + [ + "▁invigorate", + -14.176448822021484 + ], + [ + "▁misinterpret", + -14.176505088806152 + ], + [ + "▁Egan", + -14.176558494567873 + ], + [ + "hiba", + -14.1765775680542 + ], + [ + "▁DNR", + -14.17662525177002 + ], + [ + "▁CHP", + -14.176640510559082 + ], + [ + "▁enliven", + -14.176653861999512 + ], + [ + "▁Destroy", + -14.176703453063965 + ], + [ + "▁ashley", + -14.176758766174316 + ], + [ + "▁chromosomes", + -14.17676067352295 + ], + [ + "▁receivables", + -14.176785469055176 + ], + [ + "phthal", + -14.176816940307615 + ], + [ + "▁1858", + -14.176817893981934 + ], + [ + "Healthcare", + -14.176898956298828 + ], + [ + "mela", + -14.17690658569336 + ], + [ + "▁48\"", + -14.176936149597168 + ], + [ + "CCI", + -14.176949501037598 + ], + [ + "upgrade", + -14.176996231079102 + ], + [ + "▁590", + -14.177029609680176 + ], + [ + "conjugate", + -14.17703628540039 + ], + [ + "▁$2.7", + -14.177042007446287 + ], + [ + "Facilitate", + -14.177050590515137 + ], + [ + "▁AMOLED", + -14.177050590515137 + ], + [ + "▁Alhambra", + -14.177050590515137 + ], + [ + "▁Cupertino", + -14.177050590515137 + ], + [ + "▁Madurai", + -14.177050590515137 + ], + [ + "▁PROPERTY", + -14.177050590515137 + ], + [ + "▁Rinpoche", + -14.177050590515137 + ], + [ + "▁Zanzibar", + -14.177050590515137 + ], + [ + "▁claustrophobic", + -14.177050590515137 + ], + [ + "▁debatable", + -14.177050590515137 + ], + [ + "▁defibrillator", + -14.177050590515137 + ], + [ + "▁sorority", + -14.177050590515137 + ], + [ + "▁trembling", + -14.177050590515137 + ], + [ + "▁euphoric", + -14.177051544189451 + ], + [ + "▁Excursion", + -14.17705535888672 + ], + [ + "▁dominion", + -14.17705535888672 + ], + [ + "▁Chalmers", + -14.177057266235352 + ], + [ + "▁Medigap", + -14.177057266235352 + ], + [ + "▁fiesta", + -14.177059173583984 + ], + [ + "▁hypervisor", + -14.177059173583984 + ], + [ + "▁flexibly", + -14.177062034606934 + ], + [ + "▁Kilo", + -14.177064895629885 + ], + [ + "▁alcove", + -14.177064895629885 + ], + [ + "▁applesauce", + -14.1770658493042 + ], + [ + "▁Dunlop", + -14.177066802978516 + ], + [ + "▁Delegation", + -14.17707061767578 + ], + [ + "▁modifies", + -14.177088737487791 + ], + [ + "forums", + -14.17708969116211 + ], + [ + "▁McCabe", + -14.17710781097412 + ], + [ + "▁Wooster", + -14.17710781097412 + ], + [ + "▁Indira", + -14.177111625671388 + ], + [ + "▁hastily", + -14.177116394042969 + ], + [ + "▁Prepared", + -14.177118301391602 + ], + [ + "▁Shimmer", + -14.177122116088867 + ], + [ + "▁Routing", + -14.17713737487793 + ], + [ + "▁deriving", + -14.177152633666992 + ], + [ + "▁Pollard", + -14.177154541015623 + ], + [ + "Worried", + -14.177156448364258 + ], + [ + "▁Wuhan", + -14.177180290222168 + ], + [ + "▁unequivocally", + -14.177206993103027 + ], + [ + "▁Agatha", + -14.17721939086914 + ], + [ + "▁mesmerized", + -14.177238464355469 + ], + [ + "▁whimsy", + -14.177252769470217 + ], + [ + "conventional", + -14.17725658416748 + ], + [ + "Spoon", + -14.177263259887695 + ], + [ + "ghat", + -14.17726707458496 + ], + [ + "▁reevaluate", + -14.177286148071287 + ], + [ + "committee", + -14.17729377746582 + ], + [ + "▁Elsie", + -14.177306175231934 + ], + [ + "▁Shale", + -14.177318572998049 + ], + [ + "Vietnam", + -14.17735195159912 + ], + [ + "▁Convent", + -14.177353858947754 + ], + [ + "▁HUB", + -14.17735481262207 + ], + [ + "QUESTION", + -14.177390098571776 + ], + [ + "brief", + -14.177459716796877 + ], + [ + "extremely", + -14.177515029907228 + ], + [ + "▁matchups", + -14.177530288696287 + ], + [ + "▁Razer", + -14.177581787109377 + ], + [ + "▁Enviro", + -14.177583694458008 + ], + [ + "▁pulverizer", + -14.177584648132324 + ], + [ + "residence", + -14.177640914916992 + ], + [ + "Bobby", + -14.177651405334473 + ], + [ + "▁anaerobic", + -14.1776704788208 + ], + [ + "Stuff", + -14.177688598632812 + ], + [ + "burst", + -14.17770004272461 + ], + [ + "NNA", + -14.177720069885254 + ], + [ + "HOUSE", + -14.177739143371582 + ], + [ + "peg", + -14.177769660949709 + ], + [ + "Fur", + -14.177778244018556 + ], + [ + "Confused", + -14.177799224853516 + ], + [ + "▁Sia", + -14.177845001220703 + ], + [ + "Ginger", + -14.177921295166016 + ], + [ + "▁wayside", + -14.17795753479004 + ], + [ + "▁Cleanser", + -14.17800235748291 + ], + [ + "▁dept", + -14.178030014038086 + ], + [ + "019", + -14.178045272827148 + ], + [ + "▁Diaper", + -14.178086280822754 + ], + [ + "▁Agility", + -14.178092002868652 + ], + [ + "▁Indra", + -14.178190231323242 + ], + [ + "▁2+", + -14.17839527130127 + ], + [ + "Costa", + -14.178458213806152 + ], + [ + "dallas", + -14.178485870361328 + ], + [ + "extension", + -14.17850112915039 + ], + [ + "▁mania", + -14.17851448059082 + ], + [ + "▁Synth", + -14.17852783203125 + ], + [ + "activ", + -14.178613662719728 + ], + [ + "▁risked", + -14.178621292114258 + ], + [ + "▁cytokines", + -14.178629875183104 + ], + [ + "Minor", + -14.178648948669434 + ], + [ + "▁IFSC", + -14.17874813079834 + ], + [ + "eben", + -14.17879581451416 + ], + [ + "▁mules", + -14.178799629211426 + ], + [ + "tock", + -14.17880916595459 + ], + [ + "Astro", + -14.178811073303224 + ], + [ + "clus", + -14.178821563720703 + ], + [ + "crunch", + -14.1788969039917 + ], + [ + "publish", + -14.178959846496582 + ], + [ + "▁absences", + -14.178977012634276 + ], + [ + "▁Impressions", + -14.178977966308594 + ], + [ + "▁9.4", + -14.178983688354492 + ], + [ + "▁Yemeni", + -14.179003715515137 + ], + [ + "▁Tsar", + -14.17904567718506 + ], + [ + "▁Tract", + -14.179047584533691 + ], + [ + "▁Optima", + -14.179177284240724 + ], + [ + "1970", + -14.17919635772705 + ], + [ + "▁buttered", + -14.179327011108398 + ], + [ + "▁appropriated", + -14.179407119750977 + ], + [ + "Beer", + -14.17942237854004 + ], + [ + "aaa", + -14.179445266723633 + ], + [ + "▁LIN", + -14.17945384979248 + ], + [ + "056", + -14.17949104309082 + ], + [ + "SAVE", + -14.179508209228516 + ], + [ + "▁ses", + -14.17955207824707 + ], + [ + "▁Trench", + -14.179580688476562 + ], + [ + "▁CNET", + -14.17959690093994 + ], + [ + "▁Determination", + -14.179606437683104 + ], + [ + "▁324", + -14.179615020751951 + ], + [ + "ears", + -14.17966651916504 + ], + [ + "▁dreamer", + -14.179670333862305 + ], + [ + "HOO", + -14.179682731628418 + ], + [ + "▁sudo", + -14.179723739624023 + ], + [ + "▁SALT", + -14.179734230041504 + ], + [ + "▁Conceptual", + -14.179763793945312 + ], + [ + "▁Ricoh", + -14.179811477661133 + ], + [ + "yra", + -14.17981243133545 + ], + [ + "ATI", + -14.179831504821776 + ], + [ + "89%", + -14.179861068725586 + ], + [ + "gotten", + -14.179866790771484 + ], + [ + "▁Elisa", + -14.179950714111328 + ], + [ + "▁Kaepernick", + -14.180179595947266 + ], + [ + "▁pharmacology", + -14.180277824401855 + ], + [ + "▁Pepe", + -14.180468559265137 + ], + [ + "048", + -14.1804780960083 + ], + [ + "▁1970'", + -14.180481910705566 + ], + [ + "▁Highness", + -14.180506706237791 + ], + [ + "▁kidnap", + -14.180540084838867 + ], + [ + "▁Neel", + -14.180587768554688 + ], + [ + "mire", + -14.180665016174316 + ], + [ + "▁tem", + -14.180719375610352 + ], + [ + "▁preachers", + -14.180793762207031 + ], + [ + "chun", + -14.180840492248535 + ], + [ + "cort", + -14.180877685546877 + ], + [ + "▁troopers", + -14.180950164794922 + ], + [ + "affle", + -14.1809663772583 + ], + [ + "▁Flashing", + -14.18101692199707 + ], + [ + "▁1:3", + -14.181029319763184 + ], + [ + "took", + -14.18108081817627 + ], + [ + "▁Sena", + -14.18109893798828 + ], + [ + "▁Camb", + -14.181175231933594 + ], + [ + "▁BAY", + -14.181236267089844 + ], + [ + "▁pointy", + -14.181273460388184 + ], + [ + "▁Bub", + -14.181303024291992 + ], + [ + "▁Bae", + -14.181325912475586 + ], + [ + "▁Responder", + -14.181538581848145 + ], + [ + "Designing", + -14.181548118591309 + ], + [ + "▁Aviva", + -14.1815767288208 + ], + [ + "▁Wau", + -14.181591987609863 + ], + [ + "▁lessening", + -14.181629180908203 + ], + [ + "prior", + -14.181634902954102 + ], + [ + "XXX", + -14.181652069091797 + ], + [ + "mian", + -14.181678771972656 + ], + [ + "▁455", + -14.18172836303711 + ], + [ + "anja", + -14.18173122406006 + ], + [ + "▁centering", + -14.181751251220703 + ], + [ + "eous", + -14.181769371032717 + ], + [ + "WAS", + -14.181792259216309 + ], + [ + "▁WK", + -14.181814193725586 + ], + [ + "▁Helens", + -14.181822776794434 + ], + [ + "▁subcontract", + -14.181839942932127 + ], + [ + "▁kha", + -14.181859016418455 + ], + [ + "▁crockpot", + -14.181914329528809 + ], + [ + "▁Yearbook", + -14.18193817138672 + ], + [ + "▁Novartis", + -14.181976318359377 + ], + [ + "▁Mesopotamia", + -14.181977272033691 + ], + [ + "▁ecstasy", + -14.181977272033691 + ], + [ + "▁emeritus", + -14.181977272033691 + ], + [ + "▁flamenco", + -14.181977272033691 + ], + [ + "▁indestructible", + -14.181977272033691 + ], + [ + "▁Maricopa", + -14.181978225708008 + ], + [ + "▁subsistence", + -14.181982040405272 + ], + [ + "▁Horowitz", + -14.181984901428224 + ], + [ + "▁Nominee", + -14.18198585510254 + ], + [ + "Affirm", + -14.181986808776855 + ], + [ + "▁Regatta", + -14.18199062347412 + ], + [ + "▁visualized", + -14.181998252868652 + ], + [ + "▁preoccupation", + -14.182031631469728 + ], + [ + "▁Cheesecake", + -14.18203353881836 + ], + [ + "▁deflation", + -14.182044982910156 + ], + [ + "▁READY", + -14.1820650100708 + ], + [ + "▁Doppler", + -14.182066917419434 + ], + [ + "SUP", + -14.18207550048828 + ], + [ + "▁bummer", + -14.18207836151123 + ], + [ + "▁Marlene", + -14.182079315185549 + ], + [ + "▁unwittingly", + -14.182080268859863 + ], + [ + "▁Orphan", + -14.18211555480957 + ], + [ + "▁paintbrush", + -14.182124137878418 + ], + [ + "▁Ductless", + -14.182135581970217 + ], + [ + "▁subculture", + -14.182150840759276 + ], + [ + "▁Deepak", + -14.182215690612791 + ], + [ + "▁HDFC", + -14.182232856750488 + ], + [ + "▁282", + -14.18223762512207 + ], + [ + "▁Neymar", + -14.18224811553955 + ], + [ + "▁RELEASE", + -14.182271003723145 + ], + [ + "▁utterance", + -14.182283401489258 + ], + [ + "▁Tatum", + -14.18230438232422 + ], + [ + "▁reinsurance", + -14.182330131530762 + ], + [ + "▁vowels", + -14.182345390319824 + ], + [ + "CKS", + -14.18235206604004 + ], + [ + "Innovative", + -14.18239688873291 + ], + [ + "Consequently", + -14.182402610778809 + ], + [ + "Catherine", + -14.18240451812744 + ], + [ + "Society", + -14.18241024017334 + ], + [ + "battery", + -14.182411193847656 + ], + [ + "Interpret", + -14.182415008544922 + ], + [ + "translation", + -14.182437896728516 + ], + [ + "▁Stillwater", + -14.182438850402832 + ], + [ + "▁TEA", + -14.182451248168944 + ], + [ + "▁WHOLE", + -14.182456016540527 + ], + [ + "▁adultery", + -14.182477951049805 + ], + [ + "ecca", + -14.182489395141602 + ], + [ + "▁rarer", + -14.182507514953612 + ], + [ + "▁alienated", + -14.18258285522461 + ], + [ + "Verdict", + -14.182730674743652 + ], + [ + "bik", + -14.182732582092283 + ], + [ + "▁HOLD", + -14.18274211883545 + ], + [ + "▁ин", + -14.182777404785156 + ], + [ + "▁counsellors", + -14.182783126831056 + ], + [ + "▁Lick", + -14.18279266357422 + ], + [ + "MHA", + -14.182823181152344 + ], + [ + "Securing", + -14.18287181854248 + ], + [ + "▁Impression", + -14.18287467956543 + ], + [ + "▁plantain", + -14.18287467956543 + ], + [ + "529", + -14.182881355285645 + ], + [ + "anyone", + -14.182923316955566 + ], + [ + "watched", + -14.182957649230955 + ], + [ + "784", + -14.182968139648438 + ], + [ + "573", + -14.18298053741455 + ], + [ + "▁pavilions", + -14.183016777038574 + ], + [ + "viral", + -14.183027267456056 + ], + [ + "▁decoy", + -14.183070182800291 + ], + [ + "▁Exporters", + -14.18308925628662 + ], + [ + "Prov", + -14.18309211730957 + ], + [ + "▁Shira", + -14.183109283447266 + ], + [ + "▁Kev", + -14.183135986328123 + ], + [ + "▁Plaster", + -14.183201789855955 + ], + [ + "▁unsere", + -14.183237075805664 + ], + [ + "violence", + -14.183245658874512 + ], + [ + "NIM", + -14.18325901031494 + ], + [ + "▁Cray", + -14.183263778686523 + ], + [ + "launched", + -14.183269500732422 + ], + [ + "▁Granger", + -14.18328094482422 + ], + [ + "▁paylines", + -14.183282852172852 + ], + [ + "▁Gaston", + -14.18330192565918 + ], + [ + "▁reverted", + -14.18330192565918 + ], + [ + "brig", + -14.183314323425291 + ], + [ + "-3000", + -14.183399200439451 + ], + [ + "attached", + -14.18341064453125 + ], + [ + "essel", + -14.183426856994627 + ], + [ + "▁booty", + -14.183465957641602 + ], + [ + "▁overclock", + -14.183571815490724 + ], + [ + "CME", + -14.18358325958252 + ], + [ + "▁reconciled", + -14.183624267578123 + ], + [ + "284", + -14.18378448486328 + ], + [ + "▁mallet", + -14.183818817138672 + ], + [ + "lgi", + -14.183823585510254 + ], + [ + "Barr", + -14.183833122253418 + ], + [ + "▁Reaper", + -14.183843612670898 + ], + [ + "▁MENA", + -14.18388557434082 + ], + [ + "▁Stuffed", + -14.183887481689451 + ], + [ + "▁vamp", + -14.183902740478516 + ], + [ + "▁Charters", + -14.183952331542969 + ], + [ + "alto", + -14.18396282196045 + ], + [ + "FAR", + -14.184027671813965 + ], + [ + "▁86%", + -14.184036254882812 + ], + [ + "▁Beale", + -14.18407917022705 + ], + [ + "Minimize", + -14.184082984924316 + ], + [ + "▁Prerequisite", + -14.184146881103516 + ], + [ + "ETA", + -14.184167861938477 + ], + [ + "▁sirens", + -14.184178352355955 + ], + [ + "▁Pixie", + -14.184270858764648 + ], + [ + "▁Filip", + -14.184289932250977 + ], + [ + "95%", + -14.184314727783203 + ], + [ + "▁Traditions", + -14.184319496154783 + ], + [ + "gestion", + -14.18432903289795 + ], + [ + "▁juveniles", + -14.184431076049805 + ], + [ + "▁deepened", + -14.1844482421875 + ], + [ + "▁bearable", + -14.184460639953612 + ], + [ + "▁Lé", + -14.184488296508787 + ], + [ + "▁WAC", + -14.184525489807127 + ], + [ + "▁ftp", + -14.184553146362305 + ], + [ + "▁makeovers", + -14.18455982208252 + ], + [ + "chick", + -14.184563636779783 + ], + [ + "muscle", + -14.184566497802734 + ], + [ + "▁stalker", + -14.184636116027832 + ], + [ + "CHO", + -14.184647560119627 + ], + [ + "▁conjures", + -14.184730529785156 + ], + [ + "–9", + -14.1847562789917 + ], + [ + "erium", + -14.18488597869873 + ], + [ + "▁Experiments", + -14.184934616088867 + ], + [ + "▁Perch", + -14.184969902038574 + ], + [ + "frames", + -14.185044288635254 + ], + [ + "mburg", + -14.185049057006836 + ], + [ + "PLC", + -14.185053825378418 + ], + [ + "▁XLR", + -14.1851167678833 + ], + [ + "▁Argument", + -14.185141563415527 + ], + [ + "▁noch", + -14.18520164489746 + ], + [ + "cage", + -14.185223579406738 + ], + [ + "KET", + -14.18522834777832 + ], + [ + "▁Increases", + -14.18524169921875 + ], + [ + "▁277", + -14.185307502746582 + ], + [ + "▁9.00", + -14.185432434082031 + ], + [ + "▁biometrics", + -14.185462951660156 + ], + [ + "Feature", + -14.185471534729004 + ], + [ + "cuff", + -14.185531616210938 + ], + [ + "▁Vouchers", + -14.185562133789062 + ], + [ + "945", + -14.185565948486328 + ], + [ + "▁Suspended", + -14.18561553955078 + ], + [ + "▁voyages", + -14.185745239257812 + ], + [ + "NAM", + -14.18575954437256 + ], + [ + "DRO", + -14.185778617858888 + ], + [ + "092", + -14.185811042785645 + ], + [ + "ggy", + -14.185922622680664 + ], + [ + "▁badger", + -14.185940742492676 + ], + [ + "▁Marg", + -14.185968399047852 + ], + [ + "▁840", + -14.186005592346191 + ], + [ + "Benefit", + -14.18608570098877 + ], + [ + "▁Golan", + -14.186113357543944 + ], + [ + "▁Tenants", + -14.186161994934082 + ], + [ + "▁244", + -14.186235427856444 + ], + [ + "▁physicality", + -14.18630027770996 + ], + [ + "▁Queer", + -14.1863431930542 + ], + [ + "▁TOS", + -14.186394691467283 + ], + [ + "noon", + -14.186463356018066 + ], + [ + "mary", + -14.186553001403809 + ], + [ + "terol", + -14.18659782409668 + ], + [ + "blatt", + -14.186602592468262 + ], + [ + "opter", + -14.18661880493164 + ], + [ + "▁IRB", + -14.186635971069336 + ], + [ + "▁SPRING", + -14.1866455078125 + ], + [ + "Cub", + -14.18664836883545 + ], + [ + "eet", + -14.186732292175291 + ], + [ + "▁impressing", + -14.186745643615724 + ], + [ + "emma", + -14.18674659729004 + ], + [ + "imagine", + -14.186747550964355 + ], + [ + "arissa", + -14.186771392822266 + ], + [ + "▁Oftentimes", + -14.186777114868164 + ], + [ + "▁Jakob", + -14.186795234680176 + ], + [ + "▁OBE", + -14.186860084533691 + ], + [ + "▁radish", + -14.186861991882324 + ], + [ + "Nutrition", + -14.18690586090088 + ], + [ + "▁headlamps", + -14.186917304992676 + ], + [ + "Kiss", + -14.186925888061523 + ], + [ + "▁Knives", + -14.186927795410156 + ], + [ + "▁Precinct", + -14.186927795410156 + ], + [ + "▁associating", + -14.186927795410156 + ], + [ + "▁glyphosate", + -14.186927795410156 + ], + [ + "▁hibiscus", + -14.186927795410156 + ], + [ + "▁hypnotherapy", + -14.186927795410156 + ], + [ + "▁splendour", + -14.186927795410156 + ], + [ + "▁spontaneity", + -14.186927795410156 + ], + [ + "▁unwarranted", + -14.186927795410156 + ], + [ + "▁Hedgehog", + -14.186928749084473 + ], + [ + "▁ostrich", + -14.186928749084473 + ], + [ + "▁Haskell", + -14.186930656433104 + ], + [ + "▁Caspian", + -14.186931610107422 + ], + [ + "▁rosary", + -14.186933517456056 + ], + [ + "▁lustrous", + -14.186936378479004 + ], + [ + "▁STEEL", + -14.186939239501951 + ], + [ + "▁Epoxy", + -14.18694019317627 + ], + [ + "▁1/4′′", + -14.18695831298828 + ], + [ + "▁FDI", + -14.186973571777344 + ], + [ + "▁Comfy", + -14.186979293823242 + ], + [ + "▁swagger", + -14.186985969543455 + ], + [ + "▁Footwear", + -14.186996459960938 + ], + [ + "▁Sommer", + -14.186996459960938 + ], + [ + "▁Laredo", + -14.18699836730957 + ], + [ + "▁Gators", + -14.187003135681152 + ], + [ + "▁cisco", + -14.187017440795898 + ], + [ + "ABB", + -14.187030792236328 + ], + [ + "▁Outpatient", + -14.187044143676758 + ], + [ + "foods", + -14.1870698928833 + ], + [ + "▁Increasingly", + -14.187101364135742 + ], + [ + "▁disorderly", + -14.187142372131348 + ], + [ + "▁synchro", + -14.187163352966309 + ], + [ + "karan", + -14.187182426452637 + ], + [ + "▁selfishness", + -14.187204360961914 + ], + [ + "Nan", + -14.187237739562988 + ], + [ + "rida", + -14.187243461608888 + ], + [ + "phrase", + -14.187270164489746 + ], + [ + "▁Congregational", + -14.187298774719238 + ], + [ + "ewski", + -14.18730354309082 + ], + [ + "▁encapsulates", + -14.187329292297363 + ], + [ + "▁6′′", + -14.18733024597168 + ], + [ + "▁visionaries", + -14.187338829040527 + ], + [ + "▁Entre", + -14.187342643737791 + ], + [ + "experiment", + -14.187372207641602 + ], + [ + "cheat", + -14.18737506866455 + ], + [ + "▁Dala", + -14.1873779296875 + ], + [ + "stood", + -14.187408447265623 + ], + [ + "wikipedia", + -14.18744945526123 + ], + [ + "Attendees", + -14.187464714050291 + ], + [ + "Festival", + -14.18747901916504 + ], + [ + "discrimination", + -14.187482833862305 + ], + [ + "Tesla", + -14.18748378753662 + ], + [ + "▁idealistic", + -14.18748378753662 + ], + [ + "evidence", + -14.18748664855957 + ], + [ + "retirement", + -14.18748664855957 + ], + [ + "Advertising", + -14.187487602233888 + ], + [ + "luna", + -14.187545776367188 + ], + [ + "▁Bolts", + -14.1875638961792 + ], + [ + "Downtown", + -14.187570571899414 + ], + [ + "ECH", + -14.18757152557373 + ], + [ + "▁Upstate", + -14.187581062316896 + ], + [ + "▁keystroke", + -14.18758487701416 + ], + [ + "▁victimized", + -14.187586784362791 + ], + [ + "desi", + -14.187602043151855 + ], + [ + "Gabriel", + -14.187629699707031 + ], + [ + "competition", + -14.187644004821776 + ], + [ + "wound", + -14.187644004821776 + ], + [ + "Hood", + -14.18764877319336 + ], + [ + "▁Xp", + -14.187651634216309 + ], + [ + "ELO", + -14.187697410583496 + ], + [ + "philic", + -14.187840461730955 + ], + [ + "▁repainted", + -14.187881469726562 + ], + [ + "Lie", + -14.187891006469728 + ], + [ + "Campus", + -14.187899589538574 + ], + [ + "▁Surrounding", + -14.18794059753418 + ], + [ + "▁2007;", + -14.187942504882812 + ], + [ + "matching", + -14.187952995300291 + ], + [ + "INTER", + -14.187971115112305 + ], + [ + "linux", + -14.187983512878418 + ], + [ + "zle", + -14.188000679016112 + ], + [ + "viagra", + -14.188031196594238 + ], + [ + "▁Oceanic", + -14.18806266784668 + ], + [ + "▁Hajj", + -14.188064575195312 + ], + [ + "ethanol", + -14.188190460205078 + ], + [ + "totally", + -14.188233375549316 + ], + [ + "HSA", + -14.18828582763672 + ], + [ + "▁Mattel", + -14.188372611999512 + ], + [ + "Insight", + -14.188450813293455 + ], + [ + "▁pondered", + -14.188486099243164 + ], + [ + "ECS", + -14.188575744628906 + ], + [ + "▁WILD", + -14.188603401184082 + ], + [ + "▁invasions", + -14.188618659973145 + ], + [ + "▁$64", + -14.188651084899902 + ], + [ + "▁Contribution", + -14.188651084899902 + ], + [ + "▁Globes", + -14.188776969909668 + ], + [ + "stressed", + -14.188820838928224 + ], + [ + "▁0-3", + -14.188824653625488 + ], + [ + "Sk", + -14.188835144042969 + ], + [ + "lized", + -14.188876152038574 + ], + [ + "5.5%", + -14.18887996673584 + ], + [ + "GRADE", + -14.188883781433104 + ], + [ + "▁1856", + -14.188913345336914 + ], + [ + "2:45", + -14.188920974731444 + ], + [ + "▁streamers", + -14.188929557800291 + ], + [ + "elio", + -14.18893051147461 + ], + [ + "6.5%", + -14.189024925231934 + ], + [ + "hura", + -14.189050674438477 + ], + [ + "Blo", + -14.189058303833008 + ], + [ + "▁ERC", + -14.189088821411133 + ], + [ + "auth", + -14.189090728759766 + ], + [ + "Span", + -14.189091682434082 + ], + [ + "Taxi", + -14.189108848571776 + ], + [ + "▁.5", + -14.189122200012209 + ], + [ + "▁Indore", + -14.189164161682127 + ], + [ + "glue", + -14.189167976379396 + ], + [ + "Epi", + -14.189383506774902 + ], + [ + "293", + -14.18942642211914 + ], + [ + "564", + -14.189441680908203 + ], + [ + "ERVE", + -14.18950080871582 + ], + [ + "▁2021,", + -14.189510345458984 + ], + [ + "ignan", + -14.189632415771484 + ], + [ + "▁CASA", + -14.18964958190918 + ], + [ + "Commissioner", + -14.189689636230469 + ], + [ + "▁raiding", + -14.189690589904783 + ], + [ + "▁needlessly", + -14.189692497253418 + ], + [ + "cun", + -14.18972110748291 + ], + [ + "ksi", + -14.18978786468506 + ], + [ + "▁Idle", + -14.189814567565918 + ], + [ + "▁sero", + -14.189826011657717 + ], + [ + "oxidant", + -14.189854621887209 + ], + [ + "Laugh", + -14.189886093139648 + ], + [ + "046", + -14.189913749694824 + ], + [ + "euro", + -14.189919471740724 + ], + [ + "▁Micron", + -14.189937591552734 + ], + [ + "▁Automatically", + -14.189950942993164 + ], + [ + "▁Closer", + -14.190024375915527 + ], + [ + "▁denounce", + -14.190053939819336 + ], + [ + "▁Hindustan", + -14.1900634765625 + ], + [ + "▁83%", + -14.19012451171875 + ], + [ + "▁Critic", + -14.19019603729248 + ], + [ + "electro", + -14.19019889831543 + ], + [ + "▁flutes", + -14.190261840820312 + ], + [ + "ELLE", + -14.190275192260742 + ], + [ + "▁1846", + -14.190335273742676 + ], + [ + "▁goodie", + -14.190349578857422 + ], + [ + "leb", + -14.190458297729492 + ], + [ + "Cafe", + -14.190629005432127 + ], + [ + "▁policyholder", + -14.19072437286377 + ], + [ + "Fake", + -14.190792083740234 + ], + [ + "▁passageway", + -14.190884590148926 + ], + [ + "Mario", + -14.190903663635254 + ], + [ + "▁superfood", + -14.19090461730957 + ], + [ + "Concept", + -14.190910339355469 + ], + [ + "▁Lend", + -14.19093132019043 + ], + [ + "brian", + -14.190970420837402 + ], + [ + "981", + -14.19100856781006 + ], + [ + "zeit", + -14.191020011901855 + ], + [ + "▁src", + -14.19102668762207 + ], + [ + "▁Acids", + -14.191027641296388 + ], + [ + "▁piggy", + -14.191033363342283 + ], + [ + "ophyll", + -14.191235542297363 + ], + [ + "▁exaggerate", + -14.191302299499512 + ], + [ + "▁refute", + -14.191317558288574 + ], + [ + "▁FAN", + -14.191482543945312 + ], + [ + "Rio", + -14.191511154174805 + ], + [ + "▁workpiece", + -14.191534042358398 + ], + [ + "OIS", + -14.19156265258789 + ], + [ + "poker", + -14.191572189331056 + ], + [ + "▁silvery", + -14.191651344299316 + ], + [ + "▁coves", + -14.191685676574709 + ], + [ + "▁timetables", + -14.191725730895996 + ], + [ + "▁Nye", + -14.191765785217283 + ], + [ + "Equi", + -14.191798210144045 + ], + [ + "▁Paradox", + -14.191826820373535 + ], + [ + "▁gestation", + -14.191826820373535 + ], + [ + "▁llvm", + -14.191901206970217 + ], + [ + "Opponents", + -14.191902160644531 + ], + [ + "▁Constantinople", + -14.191902160644531 + ], + [ + "▁Dispenser", + -14.191902160644531 + ], + [ + "▁Forbidden", + -14.191902160644531 + ], + [ + "▁Ludhiana", + -14.191902160644531 + ], + [ + "▁MongoDB", + -14.191902160644531 + ], + [ + "▁capacitance", + -14.191902160644531 + ], + [ + "▁dormitory", + -14.191902160644531 + ], + [ + "▁litigator", + -14.191902160644531 + ], + [ + "▁magnolia", + -14.191902160644531 + ], + [ + "wracking", + -14.191903114318848 + ], + [ + "▁Agarwal", + -14.191903114318848 + ], + [ + "▁appliqué", + -14.191903114318848 + ], + [ + "▁chaperone", + -14.191903114318848 + ], + [ + "▁exterminator", + -14.191903114318848 + ], + [ + "▁Goodness", + -14.191905975341797 + ], + [ + "▁Calculation", + -14.191906929016112 + ], + [ + "▁Abilene", + -14.191909790039062 + ], + [ + "▁Pruitt", + -14.19191074371338 + ], + [ + "▁Percussion", + -14.191911697387695 + ], + [ + "▁Shiloh", + -14.19191551208496 + ], + [ + "▁disrepair", + -14.191924095153809 + ], + [ + "▁Braille", + -14.191929817199709 + ], + [ + "▁(1988)", + -14.191938400268556 + ], + [ + "▁Buddies", + -14.191950798034668 + ], + [ + "Associate", + -14.191969871520996 + ], + [ + "▁Shabby", + -14.191978454589844 + ], + [ + "▁Niko", + -14.19199275970459 + ], + [ + "▁frills", + -14.192042350769045 + ], + [ + "▁unsigned", + -14.192049980163574 + ], + [ + "▁Stages", + -14.192098617553713 + ], + [ + "shak", + -14.19212245941162 + ], + [ + "▁tenet", + -14.192132949829102 + ], + [ + "▁Distressed", + -14.192201614379885 + ], + [ + "▁pounded", + -14.192235946655272 + ], + [ + "▁UPVC", + -14.19223976135254 + ], + [ + "▁402", + -14.192294120788574 + ], + [ + "tzer", + -14.1923189163208 + ], + [ + "▁Whitt", + -14.192325592041016 + ], + [ + "▁DRAM", + -14.19233512878418 + ], + [ + "▁feedstock", + -14.192337036132812 + ], + [ + "▁scrapes", + -14.192340850830078 + ], + [ + "▁Geeks", + -14.192352294921877 + ], + [ + "▁1845", + -14.192441940307615 + ], + [ + "Confirm", + -14.192459106445312 + ], + [ + "▁Tack", + -14.192472457885742 + ], + [ + "▁274", + -14.192502975463867 + ], + [ + "▁ZX", + -14.19252872467041 + ], + [ + "▁Crouch", + -14.192547798156738 + ], + [ + "▁TRADE", + -14.192549705505373 + ], + [ + "▁colander", + -14.19256591796875 + ], + [ + "nursing", + -14.192583084106444 + ], + [ + "substantial", + -14.192586898803713 + ], + [ + "▁postsecondary", + -14.192639350891112 + ], + [ + "crust", + -14.192657470703123 + ], + [ + "vital", + -14.192703247070312 + ], + [ + "▁Activated", + -14.192716598510742 + ], + [ + "circular", + -14.192729949951172 + ], + [ + "Nat", + -14.192773818969728 + ], + [ + "Extremely", + -14.192776679992676 + ], + [ + "▁bony", + -14.19278335571289 + ], + [ + "conductor", + -14.192805290222168 + ], + [ + "yrus", + -14.192815780639648 + ], + [ + "carpet", + -14.192862510681152 + ], + [ + "▁Coeur", + -14.19286823272705 + ], + [ + "tropic", + -14.192912101745604 + ], + [ + "▁disapprove", + -14.192919731140137 + ], + [ + "▁BNP", + -14.192923545837402 + ], + [ + "▁11.1", + -14.192960739135742 + ], + [ + "baker", + -14.19299602508545 + ], + [ + "#5", + -14.193036079406738 + ], + [ + "Grilled", + -14.193037986755373 + ], + [ + "▁rosewood", + -14.19304084777832 + ], + [ + "yck", + -14.193049430847168 + ], + [ + "▁restructured", + -14.193102836608888 + ], + [ + "▁IUCN", + -14.193207740783691 + ], + [ + "BFC", + -14.193215370178224 + ], + [ + "yay", + -14.193219184875488 + ], + [ + "trainer", + -14.193233489990234 + ], + [ + "▁CDT", + -14.193257331848145 + ], + [ + "▁cryptographic", + -14.193263053894045 + ], + [ + "▁Godly", + -14.193264961242676 + ], + [ + "▁compositional", + -14.193278312683104 + ], + [ + "gz", + -14.193310737609863 + ], + [ + "Illus", + -14.193364143371582 + ], + [ + "▁organises", + -14.193395614624023 + ], + [ + "Wondering", + -14.193408012390137 + ], + [ + "▁Surveyors", + -14.19341278076172 + ], + [ + "3/4", + -14.193614959716797 + ], + [ + "▁Dakar", + -14.19362735748291 + ], + [ + "▁331", + -14.193632125854492 + ], + [ + "▁loosened", + -14.193684577941896 + ], + [ + "38)", + -14.193696975708008 + ], + [ + "▁Fou", + -14.193754196166992 + ], + [ + "▁layover", + -14.19377899169922 + ], + [ + "▁underfloor", + -14.193865776062012 + ], + [ + "▁ferns", + -14.193921089172363 + ], + [ + "ild", + -14.193958282470703 + ], + [ + "▁classifying", + -14.19401741027832 + ], + [ + "▁microns", + -14.194140434265137 + ], + [ + "tactic", + -14.194178581237791 + ], + [ + "▁2007)", + -14.194196701049805 + ], + [ + "▁Vend", + -14.194257736206056 + ], + [ + "▁EAT", + -14.194293975830078 + ], + [ + "▁Jazeera", + -14.194316864013672 + ], + [ + "▁UMC", + -14.19434642791748 + ], + [ + "Portion", + -14.194388389587402 + ], + [ + "▁locus", + -14.194422721862791 + ], + [ + "▁Koro", + -14.194432258605955 + ], + [ + "▁Mania", + -14.194528579711914 + ], + [ + "▁calci", + -14.194561004638672 + ], + [ + "▁Crossfit", + -14.194573402404783 + ], + [ + "▁Maharaja", + -14.194595336914062 + ], + [ + "▁Drip", + -14.194624900817873 + ], + [ + "▁Foreclosure", + -14.19469928741455 + ], + [ + "yds", + -14.194721221923828 + ], + [ + "Boom", + -14.194775581359863 + ], + [ + "▁codecs", + -14.194849014282228 + ], + [ + "risto", + -14.194849967956545 + ], + [ + "▁Tej", + -14.194872856140137 + ], + [ + "Tracking", + -14.194873809814451 + ], + [ + "bonus", + -14.194950103759766 + ], + [ + "▁Ambi", + -14.19495677947998 + ], + [ + "▁GAAP", + -14.195137977600098 + ], + [ + "▁805", + -14.195157051086426 + ], + [ + "▁tumblr", + -14.1951904296875 + ], + [ + "▁quirk", + -14.195196151733398 + ], + [ + "TIL", + -14.195256233215332 + ], + [ + "▁jest", + -14.195338249206545 + ], + [ + "▁determinant", + -14.195342063903809 + ], + [ + "▁toenail", + -14.195355415344238 + ], + [ + "▁dumbbell", + -14.195356369018556 + ], + [ + "▁Bela", + -14.195416450500488 + ], + [ + "mio", + -14.19542121887207 + ], + [ + "▁25+", + -14.195516586303713 + ], + [ + "flops", + -14.19555950164795 + ], + [ + "▁garb", + -14.195575714111328 + ], + [ + "▁Kasa", + -14.195609092712402 + ], + [ + "▁Und", + -14.195634841918944 + ], + [ + "schema", + -14.195635795593262 + ], + [ + "▁2,300", + -14.195639610290527 + ], + [ + "Compose", + -14.19564151763916 + ], + [ + "▁Sash", + -14.195645332336426 + ], + [ + "items", + -14.1956787109375 + ], + [ + "▁'70", + -14.19570541381836 + ], + [ + "TRON", + -14.195741653442385 + ], + [ + "kB", + -14.195744514465332 + ], + [ + "▁2.30", + -14.195758819580078 + ], + [ + "Exec", + -14.195764541625977 + ], + [ + "▁Lutz", + -14.19577980041504 + ], + [ + "▁Manger", + -14.19582462310791 + ], + [ + "RANK", + -14.19583511352539 + ], + [ + "EES", + -14.195905685424805 + ], + [ + "lunk", + -14.195987701416016 + ], + [ + "rile", + -14.196002960205078 + ], + [ + "Connecting", + -14.196022987365724 + ], + [ + "Asset", + -14.19602870941162 + ], + [ + "▁Tally", + -14.196048736572266 + ], + [ + "penny", + -14.196123123168944 + ], + [ + "▁quill", + -14.19614028930664 + ], + [ + "Hon", + -14.196147918701172 + ], + [ + "mpel", + -14.196282386779783 + ], + [ + "kashi", + -14.196297645568848 + ], + [ + "▁crows", + -14.196331977844238 + ], + [ + "0.001)", + -14.196386337280272 + ], + [ + "LIB", + -14.19639778137207 + ], + [ + "▁Stepping", + -14.196415901184082 + ], + [ + "iska", + -14.196447372436523 + ], + [ + "IME", + -14.196499824523926 + ], + [ + "0.8%", + -14.196532249450684 + ], + [ + "purring", + -14.196550369262695 + ], + [ + "▁excellently", + -14.19655704498291 + ], + [ + "▁Behold", + -14.196566581726074 + ], + [ + "▁Budd", + -14.196657180786133 + ], + [ + "▁Anfield", + -14.196690559387209 + ], + [ + "▁mA", + -14.196707725524902 + ], + [ + "Templates", + -14.196722984313965 + ], + [ + "▁Nutr", + -14.196772575378418 + ], + [ + "▁Seater", + -14.196819305419922 + ], + [ + "Experiencing", + -14.19690227508545 + ], + [ + "▁Badminton", + -14.19690227508545 + ], + [ + "▁Epidemiology", + -14.19690227508545 + ], + [ + "▁Paediatric", + -14.19690227508545 + ], + [ + "▁Scripps", + -14.19690227508545 + ], + [ + "▁Sequoia", + -14.19690227508545 + ], + [ + "▁Tajikistan", + -14.19690227508545 + ], + [ + "▁Ulysses", + -14.19690227508545 + ], + [ + "▁inexplicable", + -14.19690227508545 + ], + [ + "▁overjoyed", + -14.196903228759766 + ], + [ + "▁Flanagan", + -14.196904182434082 + ], + [ + "▁brunette", + -14.196905136108398 + ], + [ + "▁3-3", + -14.196907043457031 + ], + [ + "▁Spiegel", + -14.196907043457031 + ], + [ + "▁Hangzhou", + -14.19690990447998 + ], + [ + "▁Champlain", + -14.196910858154297 + ], + [ + "▁FAFSA", + -14.196910858154297 + ], + [ + "▁Chassis", + -14.196913719177246 + ], + [ + "▁faxing", + -14.196918487548828 + ], + [ + "▁Accelerated", + -14.196919441223145 + ], + [ + "▁sledding", + -14.196925163269045 + ], + [ + "▁parquet", + -14.196927070617676 + ], + [ + "▁didactic", + -14.19693660736084 + ], + [ + "▁greyhound", + -14.196940422058104 + ], + [ + "▁vacationers", + -14.196945190429688 + ], + [ + "▁tamarind", + -14.19695281982422 + ], + [ + "▁fraudsters", + -14.19696044921875 + ], + [ + "▁ragged", + -14.19696044921875 + ], + [ + "Mit", + -14.196969032287598 + ], + [ + "▁Octopus", + -14.196990966796877 + ], + [ + "▁Fredericksburg", + -14.196992874145508 + ], + [ + "▁Cowgirl", + -14.19699478149414 + ], + [ + "▁Aimee", + -14.197004318237305 + ], + [ + "▁Maruti", + -14.197029113769531 + ], + [ + "▁Tsi", + -14.197038650512695 + ], + [ + "▁Propane", + -14.197053909301758 + ], + [ + "▁cautionary", + -14.197053909301758 + ], + [ + "▁Aub", + -14.19708251953125 + ], + [ + "▁Snoop", + -14.197092056274414 + ], + [ + "653", + -14.197098731994627 + ], + [ + "▁gutted", + -14.197103500366213 + ], + [ + "▁Beatty", + -14.197105407714844 + ], + [ + "▁Corsair", + -14.197113037109377 + ], + [ + "sult", + -14.197128295898438 + ], + [ + "▁femur", + -14.197144508361816 + ], + [ + "▁flak", + -14.197162628173828 + ], + [ + "1,900", + -14.197178840637209 + ], + [ + "▁Mishra", + -14.19719123840332 + ], + [ + "mines", + -14.197193145751951 + ], + [ + "▁tombstone", + -14.197265625 + ], + [ + "RDA", + -14.19728660583496 + ], + [ + "Bang", + -14.19729232788086 + ], + [ + "▁candlelight", + -14.197327613830566 + ], + [ + "▁untrained", + -14.197361946105955 + ], + [ + "▁oxidized", + -14.197362899780272 + ], + [ + "Uncle", + -14.19742488861084 + ], + [ + "▁Curiosity", + -14.1974515914917 + ], + [ + "▁predicated", + -14.197495460510254 + ], + [ + "occupied", + -14.19754123687744 + ], + [ + "▁Slipper", + -14.197566986083984 + ], + [ + "▁Brahma", + -14.197592735290527 + ], + [ + "▁superseded", + -14.197678565979004 + ], + [ + "BUSINESS", + -14.197693824768066 + ], + [ + "conservative", + -14.197696685791016 + ], + [ + "Continuous", + -14.19770050048828 + ], + [ + "▁Piston", + -14.19770050048828 + ], + [ + "▁Aragon", + -14.197704315185549 + ], + [ + "▁Weak", + -14.197705268859863 + ], + [ + "Museum", + -14.197707176208496 + ], + [ + "Enough", + -14.197712898254396 + ], + [ + "acquired", + -14.197717666625977 + ], + [ + "Campbell", + -14.197729110717772 + ], + [ + "Cherry", + -14.197745323181152 + ], + [ + "theater", + -14.19775390625 + ], + [ + "▁Lancet", + -14.197755813598633 + ], + [ + "FK", + -14.197757720947266 + ], + [ + "Surface", + -14.19776725769043 + ], + [ + "▁Lakeview", + -14.19794750213623 + ], + [ + "hued", + -14.197975158691406 + ], + [ + "▁Recognized", + -14.19797706604004 + ], + [ + "▁Leia", + -14.197990417480469 + ], + [ + "▁INDIA", + -14.197991371154783 + ], + [ + "interview", + -14.197993278503418 + ], + [ + "movement", + -14.197996139526367 + ], + [ + "Mistake", + -14.198006629943848 + ], + [ + "Arg", + -14.198037147521973 + ], + [ + "worst", + -14.198067665100098 + ], + [ + "ossa", + -14.198134422302246 + ], + [ + "coastal", + -14.198174476623535 + ], + [ + "Valentine", + -14.198198318481444 + ], + [ + "LIFT", + -14.198211669921877 + ], + [ + "▁Kava", + -14.198240280151367 + ], + [ + "▁Jap", + -14.1982421875 + ], + [ + "vp", + -14.198288917541504 + ], + [ + "rigo", + -14.198320388793944 + ], + [ + "▁DONE", + -14.198370933532717 + ], + [ + "rink", + -14.198375701904297 + ], + [ + "▁Yoda", + -14.198379516601562 + ], + [ + "▁detectable", + -14.198381423950195 + ], + [ + "mila", + -14.198392868041992 + ], + [ + "▁hinting", + -14.198404312133787 + ], + [ + "aram", + -14.1984281539917 + ], + [ + "▁disqualify", + -14.198478698730469 + ], + [ + "▁blowers", + -14.198504447937012 + ], + [ + "▁Bum", + -14.198509216308594 + ], + [ + "▁oceanic", + -14.19851016998291 + ], + [ + "ifica", + -14.198614120483398 + ], + [ + "Sheet", + -14.198657989501951 + ], + [ + "yster", + -14.198675155639648 + ], + [ + "▁chris", + -14.19875431060791 + ], + [ + "▁ensues", + -14.198779106140137 + ], + [ + "▁Latex", + -14.198834419250488 + ], + [ + "▁responder", + -14.198854446411133 + ], + [ + "▁courtship", + -14.198949813842772 + ], + [ + "▁bibs", + -14.198967933654783 + ], + [ + "▁Blackboard", + -14.199076652526855 + ], + [ + "031", + -14.199182510375977 + ], + [ + "Bowl", + -14.199188232421877 + ], + [ + "Infographic", + -14.19926643371582 + ], + [ + "universal", + -14.199295043945312 + ], + [ + "raid", + -14.199400901794434 + ], + [ + "Savor", + -14.19943141937256 + ], + [ + "▁Siren", + -14.19947338104248 + ], + [ + "▁doubtless", + -14.199514389038086 + ], + [ + "▁hanged", + -14.19955825805664 + ], + [ + "Masters", + -14.19960594177246 + ], + [ + "dé", + -14.199641227722168 + ], + [ + "▁failings", + -14.19969367980957 + ], + [ + "▁delusion", + -14.199800491333008 + ], + [ + "▁fundraise", + -14.199979782104492 + ], + [ + "▁brightens", + -14.200031280517578 + ], + [ + "▁underlines", + -14.200037002563477 + ], + [ + "▁LRT", + -14.200072288513184 + ], + [ + "▁lessened", + -14.200146675109863 + ], + [ + "Damn", + -14.20020866394043 + ], + [ + "▁Sealed", + -14.200213432312012 + ], + [ + "Solve", + -14.200268745422363 + ], + [ + "Rite", + -14.200355529785156 + ], + [ + "addy", + -14.200400352478027 + ], + [ + "▁varietal", + -14.20040225982666 + ], + [ + "▁fairing", + -14.200428009033203 + ], + [ + "ijo", + -14.200499534606934 + ], + [ + "759", + -14.200531959533691 + ], + [ + "49)", + -14.200602531433104 + ], + [ + "▁pacemaker", + -14.200604438781738 + ], + [ + "▁Sill", + -14.200641632080078 + ], + [ + "▁slipper", + -14.20065689086914 + ], + [ + "odine", + -14.20072078704834 + ], + [ + "ggi", + -14.200722694396973 + ], + [ + "▁Sake", + -14.200794219970703 + ], + [ + "862", + -14.201016426086426 + ], + [ + "▁Moda", + -14.201030731201172 + ], + [ + "Natal", + -14.2011137008667 + ], + [ + "cosa", + -14.201120376586914 + ], + [ + "▁valentines", + -14.201129913330078 + ], + [ + "▁Wallis", + -14.20120620727539 + ], + [ + "UBE", + -14.201208114624023 + ], + [ + "NTU", + -14.201210021972656 + ], + [ + "▁Obtaining", + -14.201210975646973 + ], + [ + "▁js", + -14.201251029968262 + ], + [ + "▁Donut", + -14.20133113861084 + ], + [ + "logger", + -14.201334953308104 + ], + [ + "▁cripple", + -14.201350212097168 + ], + [ + "▁615", + -14.201436042785645 + ], + [ + "▁SVP", + -14.201462745666504 + ], + [ + "▁Abba", + -14.201467514038086 + ], + [ + "▁refreshingly", + -14.201501846313477 + ], + [ + "Lev", + -14.201563835144045 + ], + [ + "▁scandalous", + -14.201600074768066 + ], + [ + "▁Broccoli", + -14.201601028442385 + ], + [ + "▁Choral", + -14.201642036437988 + ], + [ + "▁kennels", + -14.201654434204102 + ], + [ + "▁Rez", + -14.201736450195312 + ], + [ + "MRS", + -14.201752662658691 + ], + [ + "▁OPI", + -14.20177173614502 + ], + [ + "▁Khar", + -14.201772689819336 + ], + [ + "▁Pavel", + -14.201787948608398 + ], + [ + "▁refract", + -14.201821327209473 + ], + [ + "▁Exxon", + -14.201826095581056 + ], + [ + "NAR", + -14.201859474182127 + ], + [ + "SHIFT", + -14.201911926269531 + ], + [ + "▁Alfonso", + -14.201927185058594 + ], + [ + "▁Bombardier", + -14.201927185058594 + ], + [ + "▁Breckenridge", + -14.201927185058594 + ], + [ + "▁Mahatma", + -14.201927185058594 + ], + [ + "▁Typhoon", + -14.201927185058594 + ], + [ + "▁almighty", + -14.201927185058594 + ], + [ + "▁cassava", + -14.201927185058594 + ], + [ + "▁clonazepam", + -14.201927185058594 + ], + [ + "▁Obligation", + -14.20192813873291 + ], + [ + "▁appellation", + -14.20192813873291 + ], + [ + "▁indelible", + -14.20192813873291 + ], + [ + "▁phasing", + -14.20192813873291 + ], + [ + "▁solitaire", + -14.20192813873291 + ], + [ + "▁Xinjiang", + -14.201930046081545 + ], + [ + "▁caustic", + -14.201930046081545 + ], + [ + "▁entanglement", + -14.201930046081545 + ], + [ + "▁illegitimate", + -14.201930046081545 + ], + [ + "▁Servicing", + -14.20193099975586 + ], + [ + "▁fasciitis", + -14.20193099975586 + ], + [ + "▁Bertrand", + -14.201931953430176 + ], + [ + "▁bonsai", + -14.201931953430176 + ], + [ + "Pseudo", + -14.201936721801758 + ], + [ + "▁Claudio", + -14.201939582824709 + ], + [ + "▁Ghent", + -14.201942443847656 + ], + [ + "▁Dioxide", + -14.20195198059082 + ], + [ + "▁Meteorological", + -14.201959609985352 + ], + [ + "▁Bubba", + -14.20197296142578 + ], + [ + "▁Mabel", + -14.201976776123049 + ], + [ + "Navigating", + -14.201995849609377 + ], + [ + "ICH", + -14.202008247375488 + ], + [ + "▁Warrington", + -14.202025413513184 + ], + [ + "▁Barefoot", + -14.202033996582031 + ], + [ + "▁gentler", + -14.20205307006836 + ], + [ + "▁wurde", + -14.202058792114258 + ], + [ + "▁meteorologist", + -14.202077865600586 + ], + [ + "▁equaliser", + -14.202078819274902 + ], + [ + "▁impasse", + -14.202085494995115 + ], + [ + "39)", + -14.202093124389648 + ], + [ + "GUARD", + -14.202170372009276 + ], + [ + "▁foreclosed", + -14.202227592468262 + ], + [ + "Philippians", + -14.202239990234377 + ], + [ + "▁profitably", + -14.202274322509766 + ], + [ + "▁HAIR", + -14.202324867248535 + ], + [ + "▁gummy", + -14.202324867248535 + ], + [ + "-130", + -14.202348709106444 + ], + [ + "▁Findlay", + -14.202354431152344 + ], + [ + "hne", + -14.202363967895508 + ], + [ + "PCA", + -14.202364921569824 + ], + [ + "▁whomever", + -14.202374458312988 + ], + [ + "▁Ubud", + -14.202428817749023 + ], + [ + "▁disconnection", + -14.202484130859377 + ], + [ + "▁APO", + -14.20249080657959 + ], + [ + "▁Shady", + -14.202519416809082 + ], + [ + "▁ditches", + -14.202542304992676 + ], + [ + "yaki", + -14.202563285827637 + ], + [ + "▁(1.5", + -14.202624320983888 + ], + [ + "▁Adrienne", + -14.202627182006836 + ], + [ + "LAW", + -14.202655792236328 + ], + [ + "SOM", + -14.202680587768556 + ], + [ + "▁flickr", + -14.202680587768556 + ], + [ + "loa", + -14.202692985534668 + ], + [ + "zana", + -14.202713966369627 + ], + [ + "VAR", + -14.202767372131348 + ], + [ + "Grass", + -14.202800750732422 + ], + [ + "▁underscored", + -14.20280647277832 + ], + [ + "HTC", + -14.202840805053713 + ], + [ + "▁consented", + -14.202850341796877 + ], + [ + "Detroit", + -14.202869415283203 + ], + [ + "Soviet", + -14.202869415283203 + ], + [ + "▁Intuitive", + -14.20287036895752 + ], + [ + "▁Maytag", + -14.202926635742188 + ], + [ + "Franklin", + -14.20295524597168 + ], + [ + "discovered", + -14.202963829040527 + ], + [ + "Utah", + -14.202972412109377 + ], + [ + "▁Dug", + -14.203017234802246 + ], + [ + "Trak", + -14.203033447265623 + ], + [ + "▁FTA", + -14.20305061340332 + ], + [ + "▁Diff", + -14.20311164855957 + ], + [ + "▁Atta", + -14.203166961669922 + ], + [ + "Slowly", + -14.203198432922363 + ], + [ + "▁Whitetail", + -14.203274726867676 + ], + [ + "cito", + -14.203280448913574 + ], + [ + "▁Nha", + -14.203288078308104 + ], + [ + "Sunny", + -14.203322410583496 + ], + [ + "▁Behr", + -14.203357696533203 + ], + [ + "▁Reseller", + -14.203365325927734 + ], + [ + "scheduled", + -14.203375816345217 + ], + [ + "▁crosswalk", + -14.203377723693848 + ], + [ + "▁Xeno", + -14.20338249206543 + ], + [ + "laf", + -14.203425407409668 + ], + [ + "▁LEARN", + -14.203463554382324 + ], + [ + "▁Princes", + -14.20347785949707 + ], + [ + "▁Surveyor", + -14.203479766845703 + ], + [ + "▁Jat", + -14.203497886657717 + ], + [ + "▁toenails", + -14.20351219177246 + ], + [ + "Ruby", + -14.203531265258787 + ], + [ + "▁tooltip", + -14.203536987304688 + ], + [ + "Pav", + -14.203584671020508 + ], + [ + "jm", + -14.203628540039062 + ], + [ + "▁Cyan", + -14.203646659851074 + ], + [ + "SCI", + -14.203713417053224 + ], + [ + "▁(11)", + -14.203717231750488 + ], + [ + "▁Restart", + -14.203728675842283 + ], + [ + "sheng", + -14.203742980957031 + ], + [ + "▁Tic", + -14.203760147094728 + ], + [ + "▁EXTRA", + -14.203786849975586 + ], + [ + "▁888-2", + -14.203787803649902 + ], + [ + "BOC", + -14.20385456085205 + ], + [ + "▁reloading", + -14.203861236572266 + ], + [ + "▁Fink", + -14.203914642333984 + ], + [ + "▁£11", + -14.20400047302246 + ], + [ + "plants", + -14.204008102416992 + ], + [ + "▁7:15", + -14.20403003692627 + ], + [ + "QP", + -14.204054832458496 + ], + [ + "▁potion", + -14.204059600830078 + ], + [ + "ection", + -14.20407009124756 + ], + [ + "Tile", + -14.204109191894531 + ], + [ + "deposit", + -14.204154014587402 + ], + [ + "▁-0.", + -14.204276084899902 + ], + [ + "▁Pp", + -14.204351425170898 + ], + [ + "▁JF", + -14.2044038772583 + ], + [ + "Brew", + -14.204413414001465 + ], + [ + "▁rebooted", + -14.204421997070312 + ], + [ + "▁tolls", + -14.204438209533691 + ], + [ + "Acting", + -14.204460144042969 + ], + [ + "adjustable", + -14.204506874084473 + ], + [ + "▁asymmetry", + -14.204523086547852 + ], + [ + "tracked", + -14.20457935333252 + ], + [ + "▁flocking", + -14.204713821411133 + ], + [ + "▁Structured", + -14.204752922058104 + ], + [ + "rake", + -14.20479679107666 + ], + [ + "▁plasticity", + -14.204819679260254 + ], + [ + "▁bachelors", + -14.20482349395752 + ], + [ + "Oooh", + -14.204861640930176 + ], + [ + "trem", + -14.204999923706056 + ], + [ + "▁metered", + -14.205025672912598 + ], + [ + "▁2008;", + -14.205029487609863 + ], + [ + "pik", + -14.205039978027344 + ], + [ + "Requires", + -14.205095291137695 + ], + [ + "▁5-3", + -14.20514965057373 + ], + [ + "▁FAB", + -14.205204010009766 + ], + [ + "Sandy", + -14.205215454101562 + ], + [ + "Adults", + -14.2052583694458 + ], + [ + "Cooking", + -14.205341339111328 + ], + [ + "prove", + -14.20535373687744 + ], + [ + "▁LCC", + -14.205377578735352 + ], + [ + "▁translational", + -14.20542049407959 + ], + [ + "▁Giga", + -14.205526351928713 + ], + [ + "▁MOU", + -14.20555591583252 + ], + [ + "Statement", + -14.205645561218262 + ], + [ + "▁totem", + -14.20565700531006 + ], + [ + "radar", + -14.205679893493652 + ], + [ + "▁pared", + -14.20569133758545 + ], + [ + "▁lingo", + -14.205730438232422 + ], + [ + "alcohol", + -14.205817222595217 + ], + [ + "▁Expenses", + -14.205899238586426 + ], + [ + "▁PQ", + -14.205900192260742 + ], + [ + "▁Sow", + -14.20590877532959 + ], + [ + "parties", + -14.205924987792969 + ], + [ + "cherry", + -14.205955505371094 + ], + [ + "timers", + -14.206058502197266 + ], + [ + "▁thrillers", + -14.206076622009276 + ], + [ + "Leader", + -14.206110954284668 + ], + [ + "Kor", + -14.20628261566162 + ], + [ + "▁Giro", + -14.206297874450684 + ], + [ + "▁bribes", + -14.206366539001465 + ], + [ + "451", + -14.206429481506348 + ], + [ + "▁neg", + -14.206497192382812 + ], + [ + "prince", + -14.20655345916748 + ], + [ + "intuitive", + -14.206647872924805 + ], + [ + "▁Ammon", + -14.206772804260254 + ], + [ + "uggle", + -14.206825256347656 + ], + [ + "lston", + -14.206827163696287 + ], + [ + "▁rappers", + -14.20683479309082 + ], + [ + "▁byproduct", + -14.206843376159668 + ], + [ + "TORY", + -14.20688247680664 + ], + [ + "Republican", + -14.20689296722412 + ], + [ + "oce", + -14.206913948059082 + ], + [ + "▁Olaf", + -14.20695972442627 + ], + [ + "SOLUTION", + -14.206966400146484 + ], + [ + "▁Colosseum", + -14.20697784423828 + ], + [ + "▁Excellency", + -14.20697784423828 + ], + [ + "▁Southwark", + -14.20697784423828 + ], + [ + "▁Taekwondo", + -14.20697784423828 + ], + [ + "▁Trafalgar", + -14.20697784423828 + ], + [ + "▁monotony", + -14.20697784423828 + ], + [ + "▁reciprocity", + -14.20697784423828 + ], + [ + "▁superfluous", + -14.20697784423828 + ], + [ + "▁frenetic", + -14.206978797912598 + ], + [ + "▁wharf", + -14.206978797912598 + ], + [ + "▁lobbies", + -14.206979751586914 + ], + [ + "▁outscored", + -14.20698070526123 + ], + [ + "▁viscous", + -14.206981658935549 + ], + [ + "luk", + -14.206982612609863 + ], + [ + "▁Ebony", + -14.206982612609863 + ], + [ + "▁resuscitation", + -14.20698356628418 + ], + [ + "▁ABOVE", + -14.206984519958496 + ], + [ + "▁herbaceous", + -14.206985473632812 + ], + [ + "▁cremated", + -14.206991195678713 + ], + [ + "▁creatine", + -14.206993103027344 + ], + [ + "▁commotion", + -14.20701026916504 + ], + [ + "▁Sudanese", + -14.207013130187988 + ], + [ + "carp", + -14.207014083862305 + ], + [ + "▁Vargas", + -14.20701503753662 + ], + [ + "▁capillary", + -14.207021713256836 + ], + [ + "▁staffer", + -14.207023620605469 + ], + [ + "▁disinfection", + -14.207024574279783 + ], + [ + "▁prioritization", + -14.207029342651367 + ], + [ + "▁Reflux", + -14.207033157348633 + ], + [ + "▁tiara", + -14.207038879394531 + ], + [ + "▁buoyant", + -14.207045555114746 + ], + [ + "▁masse", + -14.207051277160645 + ], + [ + "▁Wiggins", + -14.207056045532228 + ], + [ + "▁Bandcamp", + -14.207060813903809 + ], + [ + "▁Lauder", + -14.207090377807615 + ], + [ + "▁Valium", + -14.207098960876465 + ], + [ + "▁(43", + -14.207103729248049 + ], + [ + "▁flowchart", + -14.207107543945312 + ], + [ + "compact", + -14.20711612701416 + ], + [ + "catalog", + -14.207139015197754 + ], + [ + "▁mocha", + -14.207148551940918 + ], + [ + "▁unproductive", + -14.207157135009766 + ], + [ + "▁reciting", + -14.20717430114746 + ], + [ + "▁Prepaid", + -14.207178115844728 + ], + [ + "▁footpaths", + -14.207181930541992 + ], + [ + "▁Finley", + -14.207191467285156 + ], + [ + "▁Operated", + -14.20722484588623 + ], + [ + "▁Woodbury", + -14.207226753234863 + ], + [ + "▁Breakdown", + -14.207250595092772 + ], + [ + "▁seashore", + -14.207258224487305 + ], + [ + "yoga", + -14.207334518432615 + ], + [ + "▁scrubber", + -14.20742893218994 + ], + [ + "shoring", + -14.20744514465332 + ], + [ + "▁Poem", + -14.207502365112305 + ], + [ + "▁snob", + -14.207552909851074 + ], + [ + "▁528", + -14.207560539245604 + ], + [ + "Lil", + -14.207645416259766 + ], + [ + "philip", + -14.207700729370115 + ], + [ + "▁Communicate", + -14.207769393920898 + ], + [ + "FILE", + -14.20780086517334 + ], + [ + "▁burglars", + -14.207836151123049 + ], + [ + "▁ssh", + -14.207840919494627 + ], + [ + "▁Tito", + -14.207877159118652 + ], + [ + "▁weighty", + -14.207965850830078 + ], + [ + "▁IPR", + -14.208017349243164 + ], + [ + "Dorothy", + -14.20803928375244 + ], + [ + "sustaining", + -14.208047866821287 + ], + [ + "Broken", + -14.208060264587402 + ], + [ + "▁WPA", + -14.208065032958984 + ], + [ + "▁mobilized", + -14.20809841156006 + ], + [ + "Plants", + -14.208136558532717 + ], + [ + "Webster", + -14.208185195922852 + ], + [ + "avu", + -14.208187103271484 + ], + [ + "▁entailed", + -14.208203315734863 + ], + [ + "Attack", + -14.208239555358888 + ], + [ + "▁subsided", + -14.208240509033203 + ], + [ + "▁Germ", + -14.208252906799316 + ], + [ + "symmetric", + -14.208294868469238 + ], + [ + "914", + -14.208416938781738 + ], + [ + "▁Thriller", + -14.20843505859375 + ], + [ + "▁plagues", + -14.208455085754396 + ], + [ + "lump", + -14.208463668823242 + ], + [ + "Tired", + -14.208480834960938 + ], + [ + "bacter", + -14.208497047424316 + ], + [ + "▁Cadets", + -14.208511352539062 + ], + [ + "▁444", + -14.20860195159912 + ], + [ + "▁Burrow", + -14.208612442016602 + ], + [ + "glazed", + -14.208641052246094 + ], + [ + "▁eradicated", + -14.208707809448242 + ], + [ + "▁Shipment", + -14.208758354187012 + ], + [ + "▁Predictions", + -14.208792686462402 + ], + [ + "EEK", + -14.208806037902832 + ], + [ + "talent", + -14.208822250366213 + ], + [ + "▁gratuit", + -14.208822250366213 + ], + [ + "Bil", + -14.208969116210938 + ], + [ + "▁Cui", + -14.20898151397705 + ], + [ + "▁Sama", + -14.209047317504885 + ], + [ + "Mg", + -14.209065437316896 + ], + [ + "▁uninstalled", + -14.209142684936523 + ], + [ + "▁Matcha", + -14.209177017211914 + ], + [ + "▁faire", + -14.209211349487305 + ], + [ + "TTS", + -14.209257125854492 + ], + [ + "_2", + -14.20938491821289 + ], + [ + "researched", + -14.209494590759276 + ], + [ + "▁grads", + -14.209512710571287 + ], + [ + "getter", + -14.209519386291504 + ], + [ + "▁slums", + -14.209540367126465 + ], + [ + "Highway", + -14.209553718566896 + ], + [ + "nutrition", + -14.209558486938477 + ], + [ + "bool", + -14.209590911865234 + ], + [ + "▁11:1", + -14.20960807800293 + ], + [ + "knock", + -14.20961570739746 + ], + [ + "▁Manson", + -14.209616661071776 + ], + [ + "Projects", + -14.209620475769045 + ], + [ + "hq", + -14.20963191986084 + ], + [ + "OMS", + -14.209635734558104 + ], + [ + "▁Esca", + -14.209638595581056 + ], + [ + "▁extinguishers", + -14.209662437438965 + ], + [ + "▁VCR", + -14.209725379943848 + ], + [ + "Result", + -14.20975399017334 + ], + [ + "▁commemorated", + -14.209754943847656 + ], + [ + "▁DCP", + -14.209756851196287 + ], + [ + "COG", + -14.209760665893556 + ], + [ + "▁EAP", + -14.209783554077148 + ], + [ + "chey", + -14.209895133972168 + ], + [ + "Oz", + -14.20995044708252 + ], + [ + "herb", + -14.20995807647705 + ], + [ + "daddy", + -14.209964752197266 + ], + [ + "▁Beverages", + -14.209968566894531 + ], + [ + "▁courtyards", + -14.210000038146973 + ], + [ + "▁blaster", + -14.2100248336792 + ], + [ + "Brite", + -14.210041046142578 + ], + [ + "▁france", + -14.210078239440918 + ], + [ + "▁Chev", + -14.210118293762209 + ], + [ + "▁peppery", + -14.2101469039917 + ], + [ + "OBA", + -14.210175514221191 + ], + [ + "▁Plas", + -14.210195541381836 + ], + [ + "Archive", + -14.210200309753418 + ], + [ + "▁cuteness", + -14.210245132446287 + ], + [ + "accurate", + -14.210298538208008 + ], + [ + "habitant", + -14.210307121276855 + ], + [ + "▁Toto", + -14.210329055786133 + ], + [ + "Beta", + -14.210345268249512 + ], + [ + "SMO", + -14.210375785827637 + ], + [ + "dinger", + -14.2103910446167 + ], + [ + "▁embarrass", + -14.210391998291016 + ], + [ + "nanda", + -14.21047306060791 + ], + [ + "spent", + -14.210494995117188 + ], + [ + "▁Protest", + -14.210607528686523 + ], + [ + "▁Tiki", + -14.2106294631958 + ], + [ + "▁pistachio", + -14.210634231567385 + ], + [ + "lige", + -14.210640907287598 + ], + [ + "▁hehe", + -14.210646629333496 + ], + [ + "Miles", + -14.210649490356444 + ], + [ + "▁brainer", + -14.210671424865724 + ], + [ + "▁TAB", + -14.21067237854004 + ], + [ + "▁Estes", + -14.210803031921388 + ], + [ + "▁constellations", + -14.210833549499512 + ], + [ + "itol", + -14.210850715637209 + ], + [ + "▁1946,", + -14.210861206054688 + ], + [ + "▁Zipp", + -14.210870742797852 + ], + [ + "ilde", + -14.210891723632812 + ], + [ + "▁APN", + -14.21099090576172 + ], + [ + "▁Collecting", + -14.21101188659668 + ], + [ + "blocks", + -14.211036682128906 + ], + [ + "▁(42", + -14.211037635803224 + ], + [ + "▁15.5", + -14.211041450500488 + ], + [ + "▁hobbyist", + -14.211100578308104 + ], + [ + "illegal", + -14.211122512817385 + ], + [ + "▁RVs", + -14.211136817932127 + ], + [ + "rmo", + -14.211138725280762 + ], + [ + "FISH", + -14.211195945739746 + ], + [ + "imbo", + -14.211268424987791 + ], + [ + "▁249", + -14.211339950561523 + ], + [ + "▁Mohs", + -14.211363792419434 + ], + [ + "▁Upgrades", + -14.211383819580078 + ], + [ + "▁naturalistic", + -14.211384773254396 + ], + [ + "erland", + -14.211400985717772 + ], + [ + "▁predicate", + -14.211453437805176 + ], + [ + "▁handrail", + -14.211488723754885 + ], + [ + "▁SSN", + -14.2116117477417 + ], + [ + "▁soloists", + -14.211706161499023 + ], + [ + "OSA", + -14.211713790893556 + ], + [ + "▁slouch", + -14.211810111999512 + ], + [ + "▁photocopy", + -14.211873054504396 + ], + [ + "anji", + -14.211894989013672 + ], + [ + "▁cheerleaders", + -14.211905479431152 + ], + [ + "realistic", + -14.21191120147705 + ], + [ + "eppe", + -14.211989402770996 + ], + [ + "▁emphatic", + -14.212028503417969 + ], + [ + "▁Cabbage", + -14.212054252624512 + ], + [ + "▁Gothenburg", + -14.212054252624512 + ], + [ + "▁Lollipop", + -14.212054252624512 + ], + [ + "▁capacitive", + -14.212054252624512 + ], + [ + "▁colliding", + -14.212054252624512 + ], + [ + "▁confidant", + -14.212054252624512 + ], + [ + "▁contrived", + -14.212054252624512 + ], + [ + "▁disconcerting", + -14.212054252624512 + ], + [ + "▁embassies", + -14.212054252624512 + ], + [ + "▁hemoglobin", + -14.212054252624512 + ], + [ + "▁maestro", + -14.212054252624512 + ], + [ + "▁pretentious", + -14.212054252624512 + ], + [ + "▁shuffling", + -14.212054252624512 + ], + [ + "▁ebony", + -14.212055206298828 + ], + [ + "▁Bridesmaid", + -14.21205711364746 + ], + [ + "▁Dickerson", + -14.21205711364746 + ], + [ + "▁protrude", + -14.21205711364746 + ], + [ + "▁proliferate", + -14.212058067321776 + ], + [ + "▁zigzag", + -14.21205997467041 + ], + [ + "▁cursing", + -14.21206283569336 + ], + [ + "▁Maternal", + -14.212063789367676 + ], + [ + "▁Politico", + -14.212066650390623 + ], + [ + "▁LIVING", + -14.212068557739258 + ], + [ + "▁FINALLY", + -14.212069511413574 + ], + [ + "▁recursive", + -14.212082862854004 + ], + [ + "▁soffit", + -14.212084770202637 + ], + [ + "▁Champaign", + -14.212087631225586 + ], + [ + "▁Marbella", + -14.212096214294434 + ], + [ + "▁Akshay", + -14.212098121643066 + ], + [ + "▁hollywood", + -14.212098121643066 + ], + [ + "▁impenetrable", + -14.212100982666016 + ], + [ + "▁Heavyweight", + -14.212121963500977 + ], + [ + "▁Hadid", + -14.21214771270752 + ], + [ + "▁Enduro", + -14.212159156799316 + ], + [ + "▁jugs", + -14.212173461914062 + ], + [ + "▁Massacre", + -14.212218284606934 + ], + [ + "▁Danbury", + -14.212257385253906 + ], + [ + "▁webmasters", + -14.212294578552246 + ], + [ + "▁categorised", + -14.212299346923828 + ], + [ + "▁remastered", + -14.212300300598145 + ], + [ + "ERY", + -14.212309837341309 + ], + [ + "▁Beagle", + -14.212337493896484 + ], + [ + "▁$8,000", + -14.212340354919434 + ], + [ + "▁memos", + -14.212346076965332 + ], + [ + "▁orientations", + -14.212349891662598 + ], + [ + "▁worshipping", + -14.212374687194824 + ], + [ + "DAVID", + -14.212383270263672 + ], + [ + "▁constable", + -14.212448120117188 + ], + [ + "–18", + -14.212481498718262 + ], + [ + "Sponsor", + -14.212498664855955 + ], + [ + "▁lanai", + -14.212509155273438 + ], + [ + "Striking", + -14.212510108947754 + ], + [ + "▁MEDIA", + -14.212604522705078 + ], + [ + "▁Shopper", + -14.212635040283203 + ], + [ + "▁1818", + -14.212716102600098 + ], + [ + "▁Koda", + -14.212719917297363 + ], + [ + "▁dabble", + -14.212726593017578 + ], + [ + "Mala", + -14.212841987609863 + ], + [ + "▁interrelated", + -14.212846755981444 + ], + [ + "▁holidaymakers", + -14.212875366210938 + ], + [ + "valve", + -14.212945938110352 + ], + [ + "▁channeled", + -14.212965965270996 + ], + [ + "▁jammer", + -14.212968826293944 + ], + [ + "ERRY", + -14.213011741638184 + ], + [ + "▁ingesting", + -14.213027954101562 + ], + [ + "▁Mede", + -14.213075637817385 + ], + [ + "▁criticise", + -14.213092803955078 + ], + [ + "▁oilfield", + -14.21311378479004 + ], + [ + "Lecture", + -14.21322536468506 + ], + [ + "▁Correspondence", + -14.213247299194336 + ], + [ + "Crime", + -14.21325969696045 + ], + [ + "Melbourne", + -14.213263511657717 + ], + [ + "Lenovo", + -14.213266372680664 + ], + [ + "financing", + -14.21326732635498 + ], + [ + "Turkish", + -14.213272094726562 + ], + [ + "▁Parental", + -14.213272094726562 + ], + [ + "▁$2.4", + -14.213275909423828 + ], + [ + "▁Respond", + -14.213318824768066 + ], + [ + "▁trolleys", + -14.213324546813965 + ], + [ + "diversity", + -14.213351249694824 + ], + [ + "Interactive", + -14.213353157043455 + ], + [ + "▁Fabio", + -14.213358879089355 + ], + [ + "oye", + -14.213363647460938 + ], + [ + "▁raccoons", + -14.213415145874023 + ], + [ + "boli", + -14.213448524475098 + ], + [ + "▁Haji", + -14.213489532470703 + ], + [ + "▁nay", + -14.213558197021484 + ], + [ + "UPDATED", + -14.2135648727417 + ], + [ + "Agree", + -14.213566780090332 + ], + [ + "▁UE", + -14.213615417480469 + ], + [ + "▁VBS", + -14.21363925933838 + ], + [ + "ulan", + -14.213666915893556 + ], + [ + "▁Coupled", + -14.213717460632324 + ], + [ + "▁boldness", + -14.213746070861816 + ], + [ + "LTD", + -14.213783264160156 + ], + [ + "441", + -14.213790893554688 + ], + [ + "▁BAN", + -14.213796615600586 + ], + [ + "▁paintwork", + -14.213808059692385 + ], + [ + "stasis", + -14.213919639587402 + ], + [ + "STC", + -14.213981628417969 + ], + [ + "graft", + -14.213984489440918 + ], + [ + "limiting", + -14.214105606079102 + ], + [ + "▁storytellers", + -14.214107513427734 + ], + [ + "prison", + -14.214109420776367 + ], + [ + "bats", + -14.214115142822266 + ], + [ + "Kal", + -14.214131355285645 + ], + [ + "uble", + -14.214171409606934 + ], + [ + "▁GUIDE", + -14.214218139648438 + ], + [ + "▁vanishing", + -14.214225769042969 + ], + [ + "▁Limb", + -14.214241027832031 + ], + [ + "frost", + -14.21426486968994 + ], + [ + "▁PCBs", + -14.214354515075684 + ], + [ + "▁Swag", + -14.214360237121582 + ], + [ + "▁logon", + -14.214384078979492 + ], + [ + "▁Garnet", + -14.214390754699709 + ], + [ + "▁Plaintiffs", + -14.214405059814451 + ], + [ + "▁crafters", + -14.214421272277832 + ], + [ + "Shah", + -14.21445369720459 + ], + [ + "958", + -14.21449089050293 + ], + [ + "Tower", + -14.214537620544434 + ], + [ + "silk", + -14.214585304260254 + ], + [ + "agu", + -14.214651107788086 + ], + [ + "▁contesting", + -14.214725494384766 + ], + [ + "▁Celebrities", + -14.214773178100586 + ], + [ + "▁Hama", + -14.214777946472168 + ], + [ + "▁discourses", + -14.214783668518066 + ], + [ + "▁wounding", + -14.214791297912598 + ], + [ + "▁Cade", + -14.214794158935549 + ], + [ + "▁Shap", + -14.214797973632812 + ], + [ + "Nam", + -14.21482753753662 + ], + [ + "▁Concern", + -14.214874267578123 + ], + [ + "▁Tyres", + -14.214882850646973 + ], + [ + "▁(30)", + -14.214942932128906 + ], + [ + "Scout", + -14.21495532989502 + ], + [ + "▁Robotic", + -14.215002059936523 + ], + [ + "▁519", + -14.215004920959473 + ], + [ + "▁Pillar", + -14.215049743652344 + ], + [ + "▁Alexandr", + -14.215054512023926 + ], + [ + "tough", + -14.21509075164795 + ], + [ + "▁precede", + -14.215113639831545 + ], + [ + "▁09:00", + -14.215134620666504 + ], + [ + "cmd", + -14.215139389038086 + ], + [ + "▁Gazebo", + -14.215184211730955 + ], + [ + "▁cookers", + -14.215232849121094 + ], + [ + "▁Pastel", + -14.21524143218994 + ], + [ + "HOU", + -14.215325355529783 + ], + [ + "▁Circles", + -14.215407371520996 + ], + [ + "eker", + -14.215510368347168 + ], + [ + "▁Smit", + -14.215557098388672 + ], + [ + "797", + -14.21564769744873 + ], + [ + "▁LOVES", + -14.215662956237791 + ], + [ + "▁(48", + -14.215696334838867 + ], + [ + "ALC", + -14.215699195861816 + ], + [ + "▁specialisation", + -14.21571922302246 + ], + [ + "▁Cheri", + -14.215734481811523 + ], + [ + "5-11", + -14.215761184692385 + ], + [ + "▁raccoon", + -14.215789794921877 + ], + [ + "▁caliper", + -14.215794563293455 + ], + [ + "▁utensil", + -14.215807914733888 + ], + [ + "▁Tofu", + -14.215822219848633 + ], + [ + "▁Mandir", + -14.215826988220217 + ], + [ + "▁feudal", + -14.21583652496338 + ], + [ + "▁YU", + -14.215864181518556 + ], + [ + "Combined", + -14.215879440307615 + ], + [ + "▁heft", + -14.215916633605955 + ], + [ + "▁Luk", + -14.215988159179688 + ], + [ + "▁Arriving", + -14.21603012084961 + ], + [ + "▁Batter", + -14.21607494354248 + ], + [ + "▁enchant", + -14.216076850891112 + ], + [ + "▁refuel", + -14.216142654418944 + ], + [ + "▁BONUS", + -14.216180801391602 + ], + [ + "▁environs", + -14.21620750427246 + ], + [ + "▁Kib", + -14.216211318969728 + ], + [ + "bucks", + -14.216243743896484 + ], + [ + "▁Poles", + -14.216374397277832 + ], + [ + "Clay", + -14.216421127319336 + ], + [ + "▁divest", + -14.216509819030762 + ], + [ + "▁Pietro", + -14.216516494750977 + ], + [ + "+/-", + -14.216540336608888 + ], + [ + "▁repaint", + -14.216562271118164 + ], + [ + "▁modulate", + -14.21658420562744 + ], + [ + "▁Smarter", + -14.216628074645996 + ], + [ + "aksha", + -14.216681480407717 + ], + [ + "▁feasts", + -14.216713905334473 + ], + [ + "▁curbing", + -14.21673583984375 + ], + [ + "▁Ln", + -14.216753005981444 + ], + [ + "zewski", + -14.216776847839355 + ], + [ + "▁skeptics", + -14.216812133789062 + ], + [ + "959", + -14.216856956481934 + ], + [ + "Volunteer", + -14.216889381408691 + ], + [ + "Bandit", + -14.216894149780272 + ], + [ + "homing", + -14.21695327758789 + ], + [ + "utt", + -14.21697235107422 + ], + [ + "▁BTU", + -14.21713638305664 + ], + [ + "▁exponent", + -14.217144966125488 + ], + [ + "Evaluating", + -14.217156410217283 + ], + [ + "▁Stylus", + -14.217156410217283 + ], + [ + "▁Tolerance", + -14.217156410217283 + ], + [ + "▁croutons", + -14.217156410217283 + ], + [ + "▁disseminating", + -14.217156410217283 + ], + [ + "▁Activision", + -14.217157363891602 + ], + [ + "▁Ortega", + -14.217157363891602 + ], + [ + "▁Primitive", + -14.217158317565918 + ], + [ + "▁egos", + -14.217161178588867 + ], + [ + "▁nebula", + -14.217162132263184 + ], + [ + "▁platoon", + -14.217162132263184 + ], + [ + "ISSUE", + -14.2171630859375 + ], + [ + "Galatians", + -14.21716594696045 + ], + [ + "▁giggling", + -14.217166900634766 + ], + [ + "▁leeway", + -14.217166900634766 + ], + [ + "▁Winfrey", + -14.217170715332031 + ], + [ + "▁biodiesel", + -14.217177391052246 + ], + [ + "▁Hammersmith", + -14.217185020446776 + ], + [ + "Restoring", + -14.217190742492676 + ], + [ + "▁occlusion", + -14.217191696166992 + ], + [ + "SCREEN", + -14.217199325561523 + ], + [ + "Scaling", + -14.217202186584473 + ], + [ + "▁Shredder", + -14.217204093933104 + ], + [ + "▁Keegan", + -14.217211723327637 + ], + [ + "▁changeable", + -14.217225074768066 + ], + [ + "▁knitwear", + -14.217228889465332 + ], + [ + "▁tut", + -14.217242240905762 + ], + [ + "▁Muay", + -14.217243194580078 + ], + [ + "▁nitric", + -14.217248916625977 + ], + [ + "ected", + -14.217262268066406 + ], + [ + "Fry", + -14.217265129089355 + ], + [ + "▁Candice", + -14.217269897460938 + ], + [ + "cured", + -14.217279434204102 + ], + [ + "▁Plaque", + -14.217283248901367 + ], + [ + "▁hijack", + -14.217284202575684 + ], + [ + "▁retrace", + -14.217317581176758 + ], + [ + "▁indentation", + -14.217329978942873 + ], + [ + "▁lightbulb", + -14.21734619140625 + ], + [ + "▁patrolling", + -14.217355728149414 + ], + [ + "▁Yeshua", + -14.217365264892578 + ], + [ + "▁martyrs", + -14.21739101409912 + ], + [ + "▁Chum", + -14.217391967773438 + ], + [ + "▁phospho", + -14.217398643493652 + ], + [ + "▁rationality", + -14.217452049255373 + ], + [ + "▁hazel", + -14.21749496459961 + ], + [ + "uzi", + -14.217498779296877 + ], + [ + "▁Kindness", + -14.217512130737305 + ], + [ + "615", + -14.217530250549316 + ], + [ + "CSP", + -14.217561721801758 + ], + [ + "qt", + -14.21757698059082 + ], + [ + "▁YRS", + -14.217632293701172 + ], + [ + "▁Semantic", + -14.21772003173828 + ], + [ + "▁clocking", + -14.217735290527344 + ], + [ + "▁Pup", + -14.217812538146973 + ], + [ + "▁charted", + -14.217828750610352 + ], + [ + "▁wilted", + -14.217865943908691 + ], + [ + "▁Revel", + -14.217877388000488 + ], + [ + "Hooray", + -14.217912673950195 + ], + [ + "▁annexed", + -14.217925071716309 + ], + [ + "▁266", + -14.217950820922852 + ], + [ + "▁homeownership", + -14.217951774597168 + ], + [ + "Brent", + -14.217960357666016 + ], + [ + "▁flowery", + -14.21798324584961 + ], + [ + "▁CER", + -14.217998504638672 + ], + [ + "kini", + -14.218066215515137 + ], + [ + "▁Odo", + -14.218066215515137 + ], + [ + "▁($8", + -14.218091011047363 + ], + [ + "939", + -14.218104362487791 + ], + [ + "CDMA", + -14.21814250946045 + ], + [ + "vn", + -14.218161582946776 + ], + [ + "752", + -14.218185424804688 + ], + [ + "Ultimate", + -14.218250274658203 + ], + [ + "▁arranger", + -14.21828269958496 + ], + [ + "▁lacquered", + -14.21830940246582 + ], + [ + "▁hauler", + -14.218348503112791 + ], + [ + "▁Kast", + -14.218399047851562 + ], + [ + "temporal", + -14.218443870544434 + ], + [ + "Journey", + -14.21844482421875 + ], + [ + "Mexican", + -14.218466758728027 + ], + [ + "▁cycled", + -14.21846866607666 + ], + [ + "Statistics", + -14.218491554260254 + ], + [ + "Cotton", + -14.21849536895752 + ], + [ + "vendor", + -14.218523025512695 + ], + [ + "457", + -14.218531608581545 + ], + [ + "Shoot", + -14.218554496765137 + ], + [ + "▁Indulge", + -14.21860122680664 + ], + [ + "Garage", + -14.218653678894045 + ], + [ + "▁kraft", + -14.21866512298584 + ], + [ + "▁1980'", + -14.218682289123535 + ], + [ + "▁Shoppers", + -14.218868255615234 + ], + [ + "Dancing", + -14.21889591217041 + ], + [ + "fleur", + -14.218921661376951 + ], + [ + "affiliated", + -14.21893310546875 + ], + [ + "YING", + -14.218955993652344 + ], + [ + "▁Basse", + -14.218966484069824 + ], + [ + "REAL", + -14.219012260437012 + ], + [ + "drained", + -14.21901798248291 + ], + [ + "Unknown", + -14.219023704528809 + ], + [ + "▁Axle", + -14.219075202941896 + ], + [ + "hatch", + -14.21908950805664 + ], + [ + "Medicare", + -14.219110488891602 + ], + [ + "cco", + -14.219120979309082 + ], + [ + "▁Kish", + -14.219120979309082 + ], + [ + "Containing", + -14.219130516052246 + ], + [ + "▁Arg", + -14.21920108795166 + ], + [ + "Voila", + -14.219402313232422 + ], + [ + "▁Blackout", + -14.219459533691406 + ], + [ + "tinib", + -14.219522476196287 + ], + [ + "Align", + -14.219531059265137 + ], + [ + "▁Geb", + -14.219569206237791 + ], + [ + "▁renounce", + -14.219621658325195 + ], + [ + "▁techie", + -14.219681739807127 + ], + [ + "▁penetrated", + -14.219749450683594 + ], + [ + "▁Delightful", + -14.21975040435791 + ], + [ + "▁grafts", + -14.21980094909668 + ], + [ + "/]", + -14.21981430053711 + ], + [ + "▁mink", + -14.219855308532717 + ], + [ + "▁Gave", + -14.219857215881348 + ], + [ + "▁POINT", + -14.219886779785156 + ], + [ + "▁servicemen", + -14.21993637084961 + ], + [ + "Border", + -14.219969749450684 + ], + [ + "mpg", + -14.220014572143556 + ], + [ + "▁henna", + -14.220027923583984 + ], + [ + "CCO", + -14.220056533813477 + ], + [ + "vra", + -14.220069885253906 + ], + [ + "▁Theta", + -14.220088005065918 + ], + [ + "drops", + -14.220100402832031 + ], + [ + "▁Counselors", + -14.2201509475708 + ], + [ + "▁PUR", + -14.220226287841797 + ], + [ + "▁blip", + -14.220229148864746 + ], + [ + "▁40\"", + -14.220499038696287 + ], + [ + "haga", + -14.220523834228516 + ], + [ + "▁Opened", + -14.22061252593994 + ], + [ + "wg", + -14.220646858215332 + ], + [ + "▁£300", + -14.220693588256836 + ], + [ + "Farmers", + -14.220742225646973 + ], + [ + "itte", + -14.220791816711426 + ], + [ + "▁Berliner", + -14.220808029174805 + ], + [ + "Income", + -14.220836639404297 + ], + [ + "▁mated", + -14.22085189819336 + ], + [ + "▁Positions", + -14.22095012664795 + ], + [ + "diving", + -14.221034049987791 + ], + [ + "▁Piet", + -14.221034049987791 + ], + [ + "▁293", + -14.221057891845703 + ], + [ + "▁PSL", + -14.221102714538574 + ], + [ + "▁Licht", + -14.22121810913086 + ], + [ + "▁insurgency", + -14.22129726409912 + ], + [ + "▁451", + -14.221330642700195 + ], + [ + "1:50", + -14.221351623535156 + ], + [ + "▁iterate", + -14.221352577209473 + ], + [ + "▁subgroup", + -14.2213716506958 + ], + [ + "GPU", + -14.221389770507812 + ], + [ + "▁pretext", + -14.221452713012695 + ], + [ + "▁caretakers", + -14.221465110778809 + ], + [ + "Kas", + -14.221477508544922 + ], + [ + "▁Erd", + -14.221477508544922 + ], + [ + "Ellen", + -14.221498489379885 + ], + [ + "▁disrupts", + -14.221498489379885 + ], + [ + "ECC", + -14.2214994430542 + ], + [ + "▁spearhead", + -14.221597671508787 + ], + [ + "▁292", + -14.221607208251951 + ], + [ + "▁Beauti", + -14.221628189086914 + ], + [ + "▁emanate", + -14.221699714660645 + ], + [ + "▁Sometime", + -14.221729278564451 + ], + [ + "Pig", + -14.221750259399414 + ], + [ + "▁Headlight", + -14.22193431854248 + ], + [ + "ossi", + -14.221935272216797 + ], + [ + "▁kw", + -14.221967697143556 + ], + [ + "▁Yee", + -14.22202491760254 + ], + [ + "▁Fuente", + -14.222182273864746 + ], + [ + "Served", + -14.222197532653809 + ], + [ + "▁Octo", + -14.222224235534668 + ], + [ + "▁PX", + -14.222228050231934 + ], + [ + "▁dosages", + -14.222262382507324 + ], + [ + "▁Allegheny", + -14.222284317016602 + ], + [ + "▁Guantanamo", + -14.222284317016602 + ], + [ + "▁Hoboken", + -14.222284317016602 + ], + [ + "▁Renfrew", + -14.222284317016602 + ], + [ + "▁Sagittarius", + -14.222284317016602 + ], + [ + "▁alchemy", + -14.222284317016602 + ], + [ + "▁florets", + -14.222284317016602 + ], + [ + "▁gilded", + -14.222284317016602 + ], + [ + "▁lobbied", + -14.222284317016602 + ], + [ + "▁sommelier", + -14.222284317016602 + ], + [ + "▁unfulfilled", + -14.222284317016602 + ], + [ + "▁Guerrero", + -14.222285270690918 + ], + [ + "▁PROCESS", + -14.222285270690918 + ], + [ + "▁clandestine", + -14.222286224365234 + ], + [ + "▁intoxication", + -14.222286224365234 + ], + [ + "▁mexican", + -14.222286224365234 + ], + [ + "▁CHOICE", + -14.222288131713867 + ], + [ + "▁Simultaneously", + -14.2222900390625 + ], + [ + "▁tailgating", + -14.22230339050293 + ], + [ + "▁Bachchan", + -14.222308158874512 + ], + [ + "▁yelp", + -14.222310066223145 + ], + [ + "▁pantries", + -14.222312927246094 + ], + [ + "▁Burkina", + -14.222320556640623 + ], + [ + "Took", + -14.222323417663574 + ], + [ + "▁Bradenton", + -14.222326278686523 + ], + [ + "▁Meuble", + -14.222326278686523 + ], + [ + "▁Lambeth", + -14.222332954406738 + ], + [ + "▁289", + -14.222333908081056 + ], + [ + "▁deluge", + -14.222333908081056 + ], + [ + "▁mismanagement", + -14.222333908081056 + ], + [ + "▁opium", + -14.222368240356444 + ], + [ + "▁reconstructive", + -14.222369194030762 + ], + [ + "▁forerunner", + -14.22237491607666 + ], + [ + "▁Segway", + -14.222394943237305 + ], + [ + "hmer", + -14.222405433654783 + ], + [ + "▁OpenVPN", + -14.222406387329102 + ], + [ + "▁Erwin", + -14.222418785095217 + ], + [ + "Dwyer", + -14.22242546081543 + ], + [ + "Completed", + -14.222457885742188 + ], + [ + "liver", + -14.22248363494873 + ], + [ + "▁peacekeeping", + -14.222518920898438 + ], + [ + "▁Ongoing", + -14.222563743591309 + ], + [ + "▁Jagger", + -14.222599029541016 + ], + [ + "▁apprehended", + -14.22262477874756 + ], + [ + "▁AFR", + -14.222626686096191 + ], + [ + "▁Kesh", + -14.222655296325684 + ], + [ + "▁VHF", + -14.222697257995604 + ], + [ + "▁defied", + -14.222719192504885 + ], + [ + "▁flirty", + -14.2227201461792 + ], + [ + "▁mobilise", + -14.222797393798828 + ], + [ + "▁toothbrushes", + -14.222837448120115 + ], + [ + "▁Keating", + -14.222847938537598 + ], + [ + "▁Spaghetti", + -14.222847938537598 + ], + [ + "▁priming", + -14.222867965698242 + ], + [ + "▁Wounded", + -14.222890853881836 + ], + [ + "▁Lenses", + -14.222926139831545 + ], + [ + "▁Ferri", + -14.222947120666504 + ], + [ + "▁AVC", + -14.22296142578125 + ], + [ + "▁Halle", + -14.222972869873049 + ], + [ + "▁clique", + -14.222993850708008 + ], + [ + "▁compiles", + -14.22312068939209 + ], + [ + "▁FINAL", + -14.22313404083252 + ], + [ + "!!!!!!!!", + -14.22314739227295 + ], + [ + "yield", + -14.223153114318848 + ], + [ + "▁treatable", + -14.223237037658691 + ], + [ + "▁Tonic", + -14.223237991333008 + ], + [ + "▁Huss", + -14.223248481750488 + ], + [ + "▁reinvented", + -14.223270416259766 + ], + [ + "BEST", + -14.223305702209473 + ], + [ + "▁vile", + -14.223383903503418 + ], + [ + "▁Careful", + -14.223402976989746 + ], + [ + "▁centralised", + -14.223470687866213 + ], + [ + "▁56%", + -14.223573684692385 + ], + [ + "ATES", + -14.223677635192873 + ], + [ + "Pattern", + -14.22368335723877 + ], + [ + "▁Cantor", + -14.223712921142578 + ], + [ + "Authentic", + -14.223719596862791 + ], + [ + "explanatory", + -14.223722457885742 + ], + [ + "▁Addresses", + -14.22372341156006 + ], + [ + "esian", + -14.22372817993164 + ], + [ + "Affiliate", + -14.223731994628906 + ], + [ + "▁Rwandan", + -14.223731994628906 + ], + [ + "Shar", + -14.223737716674805 + ], + [ + "Liberty", + -14.22376823425293 + ], + [ + "twice", + -14.22377872467041 + ], + [ + "▁disregarded", + -14.223814964294434 + ], + [ + "Employ", + -14.22385025024414 + ], + [ + "▁Slick", + -14.22386360168457 + ], + [ + "▁experi", + -14.223868370056152 + ], + [ + "▁deliverance", + -14.223888397216797 + ], + [ + "ilian", + -14.223889350891112 + ], + [ + "racial", + -14.224004745483398 + ], + [ + "Wayne", + -14.224017143249512 + ], + [ + "abil", + -14.224029541015623 + ], + [ + "Brick", + -14.224032402038574 + ], + [ + "▁predictors", + -14.224059104919434 + ], + [ + "(3", + -14.224096298217772 + ], + [ + "▁Sheryl", + -14.22410011291504 + ], + [ + "buds", + -14.224119186401367 + ], + [ + "▁mull", + -14.22417163848877 + ], + [ + "▁supervises", + -14.22418975830078 + ], + [ + "▁loch", + -14.224231719970703 + ], + [ + "Licensed", + -14.224292755126951 + ], + [ + "maki", + -14.224339485168455 + ], + [ + "▁cuticles", + -14.224384307861328 + ], + [ + "ZER", + -14.224394798278809 + ], + [ + "▁syndromes", + -14.224458694458008 + ], + [ + "clay", + -14.22454833984375 + ], + [ + "udra", + -14.224601745605469 + ], + [ + "026", + -14.22464656829834 + ], + [ + "VIA", + -14.224668502807615 + ], + [ + "▁Runtime", + -14.224786758422852 + ], + [ + "▁loon", + -14.224788665771484 + ], + [ + "▁Manly", + -14.224793434143066 + ], + [ + "▁Believer", + -14.224847793579102 + ], + [ + "▁Vidal", + -14.22487735748291 + ], + [ + "Casey", + -14.224884033203123 + ], + [ + "▁Sourcing", + -14.224908828735352 + ], + [ + "▁cloned", + -14.224980354309082 + ], + [ + "▁marinas", + -14.225028038024902 + ], + [ + "cius", + -14.225086212158203 + ], + [ + "OON", + -14.225167274475098 + ], + [ + "▁levelled", + -14.225175857543944 + ], + [ + "▁zealous", + -14.22518539428711 + ], + [ + "6.50", + -14.22522258758545 + ], + [ + "▁$75,000", + -14.225242614746094 + ], + [ + "Movies", + -14.225296020507812 + ], + [ + "▁JCB", + -14.225337028503418 + ], + [ + "▁Hump", + -14.225337982177734 + ], + [ + "▁inlaid", + -14.225397109985352 + ], + [ + "▁Baer", + -14.22546100616455 + ], + [ + "▁competently", + -14.225544929504396 + ], + [ + "Hans", + -14.22560214996338 + ], + [ + "doors", + -14.22560214996338 + ], + [ + "amara", + -14.225603103637695 + ], + [ + "urian", + -14.225634574890137 + ], + [ + "Pointer", + -14.22566032409668 + ], + [ + "0.08", + -14.225716590881348 + ], + [ + "NATION", + -14.225763320922852 + ], + [ + "▁realty", + -14.225787162780762 + ], + [ + "▁orbiting", + -14.225811004638672 + ], + [ + "already", + -14.225812911987305 + ], + [ + "▁338", + -14.225817680358888 + ], + [ + "▁Museo", + -14.225829124450684 + ], + [ + "▁Farah", + -14.225996017456056 + ], + [ + "▁Sask", + -14.22602081298828 + ], + [ + "LIG", + -14.226037979125977 + ], + [ + "▁Clements", + -14.226045608520508 + ], + [ + "rgh", + -14.226078033447266 + ], + [ + "▁tradeshow", + -14.22612762451172 + ], + [ + "Cow", + -14.226140975952148 + ], + [ + "▁Fixing", + -14.226144790649414 + ], + [ + "RILL", + -14.226170539855955 + ], + [ + "▁margarita", + -14.226173400878906 + ], + [ + "▁biomarker", + -14.226188659667969 + ], + [ + "▁pullback", + -14.226189613342283 + ], + [ + "▁Modify", + -14.226284980773926 + ], + [ + "▁homers", + -14.226341247558594 + ], + [ + "▁Castell", + -14.226347923278809 + ], + [ + "Altogether", + -14.22637176513672 + ], + [ + "Listening", + -14.226377487182615 + ], + [ + "hurt", + -14.226449966430664 + ], + [ + "ARIA", + -14.226459503173828 + ], + [ + "▁Crib", + -14.226503372192385 + ], + [ + "Utiliz", + -14.226512908935549 + ], + [ + "▁indent", + -14.226580619812012 + ], + [ + "lobo", + -14.226597785949709 + ], + [ + "▁hesitated", + -14.226777076721191 + ], + [ + "▁envisage", + -14.226839065551758 + ], + [ + "▁envisions", + -14.22689437866211 + ], + [ + "OZ", + -14.226923942565918 + ], + [ + "▁BMS", + -14.226930618286133 + ], + [ + "phobic", + -14.226946830749512 + ], + [ + "▁Mili", + -14.226972579956056 + ], + [ + "▁Midi", + -14.227030754089355 + ], + [ + "▁pandas", + -14.22704029083252 + ], + [ + "▁Douglass", + -14.22707176208496 + ], + [ + "Webcam", + -14.227089881896973 + ], + [ + "▁$1.9", + -14.227091789245604 + ], + [ + "▁Internationally", + -14.227197647094728 + ], + [ + "▁interrupts", + -14.227237701416016 + ], + [ + "▁ingress", + -14.227313995361328 + ], + [ + "!!\"", + -14.227357864379885 + ], + [ + "▁Eucalyptus", + -14.227438926696776 + ], + [ + "▁McLeod", + -14.227438926696776 + ], + [ + "▁Osprey", + -14.227438926696776 + ], + [ + "▁Xanax", + -14.227438926696776 + ], + [ + "▁elastomer", + -14.227438926696776 + ], + [ + "▁laureate", + -14.227438926696776 + ], + [ + "▁misspelled", + -14.227438926696776 + ], + [ + "▁recombinant", + -14.227438926696776 + ], + [ + "▁subterranean", + -14.227438926696776 + ], + [ + "▁causation", + -14.227439880371094 + ], + [ + "▁oblivion", + -14.227439880371094 + ], + [ + "▁traumatized", + -14.227439880371094 + ], + [ + "▁Baccarat", + -14.22744083404541 + ], + [ + "▁cologne", + -14.22744083404541 + ], + [ + "▁cmdlet", + -14.227442741394045 + ], + [ + "▁CONTRACT", + -14.22744369506836 + ], + [ + "▁insignia", + -14.227444648742676 + ], + [ + "▁levies", + -14.227444648742676 + ], + [ + "▁Ghostbusters", + -14.227445602416992 + ], + [ + "▁Penrith", + -14.227446556091309 + ], + [ + "▁Rasmussen", + -14.227447509765623 + ], + [ + "▁garrison", + -14.22744846343994 + ], + [ + "▁austere", + -14.227450370788574 + ], + [ + "▁likable", + -14.227450370788574 + ], + [ + "▁Optimus", + -14.22745132446289 + ], + [ + "▁Bogota", + -14.227453231811523 + ], + [ + "▁Mustafa", + -14.227458953857422 + ], + [ + "▁unhelpful", + -14.227466583251951 + ], + [ + "▁FMCG", + -14.227473258972168 + ], + [ + "▁Nawaz", + -14.227474212646484 + ], + [ + "▁(1987)", + -14.227476119995115 + ], + [ + "▁pokemon", + -14.227477073669434 + ], + [ + "▁quorum", + -14.22747802734375 + ], + [ + "▁cornmeal", + -14.227478981018066 + ], + [ + "▁Guiding", + -14.227479934692385 + ], + [ + "▁interwoven", + -14.227481842041016 + ], + [ + "▁Townhomes", + -14.227494239807127 + ], + [ + "▁bombardment", + -14.22752571105957 + ], + [ + "▁misunderstand", + -14.2275390625 + ], + [ + "▁nitty", + -14.2275390625 + ], + [ + "▁toothache", + -14.22756004333496 + ], + [ + "▁475", + -14.227563858032228 + ], + [ + "▁nappy", + -14.227569580078123 + ], + [ + "▁Cahill", + -14.227578163146973 + ], + [ + "▁operatic", + -14.227581024169922 + ], + [ + "▁Clarksville", + -14.227596282958984 + ], + [ + "▁placard", + -14.22760772705078 + ], + [ + "▁consecrated", + -14.22762393951416 + ], + [ + "▁Illuminated", + -14.227633476257324 + ], + [ + "asca", + -14.227635383605955 + ], + [ + "azione", + -14.22769832611084 + ], + [ + "▁terry", + -14.227703094482422 + ], + [ + "▁Notepad", + -14.227713584899902 + ], + [ + "▁dancefloor", + -14.227721214294434 + ], + [ + "▁smothered", + -14.227774620056152 + ], + [ + "BRO", + -14.227779388427734 + ], + [ + "▁buyback", + -14.227848052978516 + ], + [ + "▁(51", + -14.227852821350098 + ], + [ + "▁Pooh", + -14.227893829345703 + ], + [ + "▁FACE", + -14.227935791015623 + ], + [ + "▁Boa", + -14.22806167602539 + ], + [ + "erno", + -14.228067398071287 + ], + [ + "▁Girlfriend", + -14.228067398071287 + ], + [ + "▁Marti", + -14.228108406066896 + ], + [ + "▁mismatched", + -14.22814655303955 + ], + [ + "appy", + -14.22817325592041 + ], + [ + "▁MPA", + -14.228198051452637 + ], + [ + "▁Husk", + -14.228209495544434 + ], + [ + "▁croissants", + -14.228276252746582 + ], + [ + "▁republished", + -14.228277206420898 + ], + [ + "-2002", + -14.22834587097168 + ], + [ + "▁TBA", + -14.228367805480955 + ], + [ + "▁Zeta", + -14.228400230407717 + ], + [ + "▁Lenin", + -14.228410720825195 + ], + [ + "▁permeates", + -14.228464126586914 + ], + [ + "▁traceable", + -14.228468894958496 + ], + [ + "▁gunmen", + -14.228504180908203 + ], + [ + "▁Brianna", + -14.228522300720217 + ], + [ + "2.7%", + -14.22852611541748 + ], + [ + "993", + -14.228543281555176 + ], + [ + "▁Woolf", + -14.228575706481934 + ], + [ + "▁271", + -14.228595733642578 + ], + [ + "▁Deanna", + -14.228611946105955 + ], + [ + "seated", + -14.228643417358398 + ], + [ + "▁Pending", + -14.22864818572998 + ], + [ + "▁slumped", + -14.228650093078612 + ], + [ + "sensitivity", + -14.228694915771484 + ], + [ + "▁Serv", + -14.228760719299316 + ], + [ + "Slip", + -14.228765487670898 + ], + [ + "▁Brae", + -14.228792190551758 + ], + [ + "promote", + -14.228797912597656 + ], + [ + "racht", + -14.228816986083984 + ], + [ + "▁Macedonian", + -14.22882843017578 + ], + [ + "▁$1.50", + -14.228877067565918 + ], + [ + "acetic", + -14.228915214538574 + ], + [ + "restricted", + -14.22898006439209 + ], + [ + "Tweet", + -14.228986740112305 + ], + [ + "Able", + -14.228996276855469 + ], + [ + "▁Wasn", + -14.229000091552734 + ], + [ + "plum", + -14.229009628295898 + ], + [ + "▁441", + -14.22903060913086 + ], + [ + "abortion", + -14.229031562805176 + ], + [ + "Purchasing", + -14.229032516479492 + ], + [ + "Switzerland", + -14.229048728942873 + ], + [ + "Promotion", + -14.22905158996582 + ], + [ + "domestic", + -14.229081153869627 + ], + [ + "affir", + -14.229105949401855 + ], + [ + "linguistic", + -14.229113578796388 + ], + [ + "Nelson", + -14.229155540466309 + ], + [ + "▁Aesthetics", + -14.22916030883789 + ], + [ + "▁Hazardous", + -14.229211807250977 + ], + [ + "▁Shaman", + -14.229212760925291 + ], + [ + "▁joists", + -14.22921657562256 + ], + [ + "▁Reza", + -14.229286193847656 + ], + [ + "▁Momo", + -14.229326248168944 + ], + [ + "▁Moderator", + -14.229331016540527 + ], + [ + "▁7.7", + -14.229443550109863 + ], + [ + "1.25", + -14.229557037353516 + ], + [ + "Photography", + -14.229594230651855 + ], + [ + "▁Alright", + -14.229625701904297 + ], + [ + "▁SEARCH", + -14.229649543762209 + ], + [ + "▁Pageant", + -14.229655265808104 + ], + [ + "Glo", + -14.229708671569824 + ], + [ + "▁reworking", + -14.22977066040039 + ], + [ + "▁4:3", + -14.229788780212402 + ], + [ + "DEA", + -14.229838371276855 + ], + [ + "▁remodels", + -14.229846000671388 + ], + [ + "completely", + -14.229849815368652 + ], + [ + "▁pak", + -14.229924201965332 + ], + [ + "▁Foodie", + -14.229973793029783 + ], + [ + "Thru", + -14.230034828186035 + ], + [ + "divi", + -14.230080604553224 + ], + [ + "▁outcast", + -14.230109214782717 + ], + [ + "1986", + -14.230111122131348 + ], + [ + "Getty", + -14.230112075805664 + ], + [ + "replacement", + -14.230120658874512 + ], + [ + "▁seniority", + -14.230140686035156 + ], + [ + "▁thorns", + -14.230194091796877 + ], + [ + "uding", + -14.23022174835205 + ], + [ + "MHC", + -14.230250358581545 + ], + [ + "▁Appel", + -14.23031234741211 + ], + [ + ".25%", + -14.2303466796875 + ], + [ + "Annie", + -14.230423927307127 + ], + [ + "▁scaring", + -14.23045253753662 + ], + [ + "Defending", + -14.23049259185791 + ], + [ + "Newer", + -14.230518341064451 + ], + [ + "▁tracer", + -14.23058795928955 + ], + [ + "▁ontology", + -14.230599403381348 + ], + [ + "▁Jurisdiction", + -14.230602264404297 + ], + [ + "▁helplessness", + -14.230629920959473 + ], + [ + "▁excused", + -14.23064136505127 + ], + [ + "▁TLD", + -14.23066520690918 + ], + [ + "jn", + -14.230687141418455 + ], + [ + "▁sly", + -14.230706214904783 + ], + [ + "1979", + -14.230786323547363 + ], + [ + "ccompanied", + -14.230789184570312 + ], + [ + "▁layman", + -14.230819702148438 + ], + [ + "crete", + -14.23083782196045 + ], + [ + "▁SALES", + -14.230862617492676 + ], + [ + "Flying", + -14.23088550567627 + ], + [ + "▁therapeutics", + -14.230896949768066 + ], + [ + "▁Tend", + -14.23099422454834 + ], + [ + "▁vaults", + -14.231083869934082 + ], + [ + "▁Bé", + -14.231144905090332 + ], + [ + "▁AOC", + -14.231216430664062 + ], + [ + "▁Tenth", + -14.231237411499023 + ], + [ + "Hitting", + -14.231245994567873 + ], + [ + "MSC", + -14.231304168701172 + ], + [ + "▁366", + -14.231348991394045 + ], + [ + "▁nigh", + -14.231353759765623 + ], + [ + "▁pilgrim", + -14.23135757446289 + ], + [ + "rescue", + -14.231377601623535 + ], + [ + "▁Ransom", + -14.231398582458496 + ], + [ + "▁Kreme", + -14.231407165527344 + ], + [ + "BOL", + -14.231427192687988 + ], + [ + "Zoom", + -14.231431007385254 + ], + [ + "▁integrator", + -14.23144817352295 + ], + [ + "▁Pura", + -14.231465339660645 + ], + [ + "Setup", + -14.231568336486816 + ], + [ + "4:45", + -14.231613159179688 + ], + [ + "▁Pomegranate", + -14.23163604736328 + ], + [ + "▁Deadly", + -14.231675148010254 + ], + [ + "ouille", + -14.231683731079102 + ], + [ + "▁Thane", + -14.231735229492188 + ], + [ + "▁Dele", + -14.231766700744627 + ], + [ + "PCR", + -14.231782913208008 + ], + [ + "▁porting", + -14.231809616088867 + ], + [ + "▁Ionic", + -14.231866836547852 + ], + [ + "▁Broom", + -14.231976509094238 + ], + [ + "▁splashed", + -14.231996536254885 + ], + [ + "▁caterpillars", + -14.232011795043944 + ], + [ + "▁redistribute", + -14.23201847076416 + ], + [ + "▁seduce", + -14.232033729553224 + ], + [ + "$7", + -14.232035636901855 + ], + [ + "grossing", + -14.232037544250488 + ], + [ + "ographies", + -14.232135772705078 + ], + [ + "Cass", + -14.232148170471191 + ], + [ + "▁Grinch", + -14.232162475585938 + ], + [ + "oge", + -14.232183456420898 + ], + [ + "▁Assign", + -14.232194900512695 + ], + [ + "▁718", + -14.232223510742188 + ], + [ + "▁tasteless", + -14.232373237609863 + ], + [ + "▁EFF", + -14.232498168945312 + ], + [ + "▁quali", + -14.232519149780272 + ], + [ + "BAS", + -14.232577323913574 + ], + [ + "▁Notices", + -14.232592582702637 + ], + [ + "incomprehensible", + -14.232620239257812 + ], + [ + "▁04/21/2019", + -14.232620239257812 + ], + [ + "▁Davao", + -14.232620239257812 + ], + [ + "▁Middlesbrough", + -14.232620239257812 + ], + [ + "▁Nightingale", + -14.232620239257812 + ], + [ + "▁atherosclerosis", + -14.232620239257812 + ], + [ + "▁cleavage", + -14.232620239257812 + ], + [ + "▁expedient", + -14.232620239257812 + ], + [ + "▁incapacitated", + -14.232620239257812 + ], + [ + "▁odyssey", + -14.232620239257812 + ], + [ + "▁rhyming", + -14.232620239257812 + ], + [ + "▁salaried", + -14.232621192932127 + ], + [ + "▁lotteries", + -14.232622146606444 + ], + [ + "▁pledging", + -14.232622146606444 + ], + [ + "▁Mitigation", + -14.232623100280762 + ], + [ + "▁tremble", + -14.232626914978027 + ], + [ + "▁Lucifer", + -14.23263454437256 + ], + [ + "▁Bedfordshire", + -14.232638359069824 + ], + [ + "▁Boynton", + -14.23263931274414 + ], + [ + "▁Carbide", + -14.232640266418455 + ], + [ + "▁interchangeably", + -14.23264503479004 + ], + [ + "▁crucifixion", + -14.232646942138672 + ], + [ + "▁(1985)", + -14.232657432556152 + ], + [ + "▁Mercado", + -14.232662200927734 + ], + [ + "▁flaxseed", + -14.232670783996582 + ], + [ + "▁pausing", + -14.232672691345217 + ], + [ + "▁requisition", + -14.23267936706543 + ], + [ + "▁coffers", + -14.232684135437012 + ], + [ + "▁surmise", + -14.23268699645996 + ], + [ + "▁Puritan", + -14.232690811157228 + ], + [ + "▁pegboard", + -14.232690811157228 + ], + [ + "▁Racecourse", + -14.232698440551758 + ], + [ + "erz", + -14.232707023620604 + ], + [ + "▁Barnsley", + -14.23271942138672 + ], + [ + "OIL", + -14.23272705078125 + ], + [ + "▁Iberian", + -14.232757568359377 + ], + [ + "▁ethnicities", + -14.232779502868652 + ], + [ + "Hook", + -14.232789993286133 + ], + [ + "▁Workstation", + -14.232803344726562 + ], + [ + "▁dingy", + -14.23280429840088 + ], + [ + "▁Snowboard", + -14.232806205749512 + ], + [ + "▁Lenox", + -14.232844352722168 + ], + [ + "▁Steady", + -14.232864379882812 + ], + [ + "▁IHS", + -14.23293113708496 + ], + [ + "▁Merino", + -14.23294162750244 + ], + [ + "▁Lua", + -14.23297882080078 + ], + [ + "▁outburst", + -14.233003616333008 + ], + [ + "849", + -14.23301601409912 + ], + [ + "▁Valet", + -14.23302173614502 + ], + [ + "▁Concerned", + -14.233024597167969 + ], + [ + "▁STORY", + -14.233040809631348 + ], + [ + "▁Glenwood", + -14.233044624328612 + ], + [ + "▁uptrend", + -14.233054161071776 + ], + [ + "lever", + -14.23309326171875 + ], + [ + "bead", + -14.233108520507812 + ], + [ + "▁Zoho", + -14.233109474182127 + ], + [ + "▁smuggled", + -14.233136177062988 + ], + [ + "Prosecutors", + -14.233144760131836 + ], + [ + "▁baggy", + -14.23322582244873 + ], + [ + "▁Shepherds", + -14.233264923095703 + ], + [ + "▁Nea", + -14.233274459838867 + ], + [ + "1960", + -14.233321189880373 + ], + [ + "▁Activist", + -14.233356475830078 + ], + [ + "Loop", + -14.233373641967772 + ], + [ + "▁crawlers", + -14.233390808105469 + ], + [ + "▁FK", + -14.233416557312012 + ], + [ + "▁Knott", + -14.23342227935791 + ], + [ + "SHS", + -14.233427047729492 + ], + [ + "▁Judea", + -14.233428955078123 + ], + [ + "ovan", + -14.233514785766602 + ], + [ + "3300", + -14.233531951904297 + ], + [ + "▁Guido", + -14.233563423156738 + ], + [ + "Advising", + -14.233597755432127 + ], + [ + "lada", + -14.233620643615724 + ], + [ + "▁MQ", + -14.233665466308594 + ], + [ + "ution", + -14.233705520629885 + ], + [ + "FCC", + -14.233710289001465 + ], + [ + "splash", + -14.233725547790527 + ], + [ + "▁Zub", + -14.233831405639648 + ], + [ + "▁LUN", + -14.23390007019043 + ], + [ + "▁shar", + -14.233942031860352 + ], + [ + "▁Tuning", + -14.233963966369627 + ], + [ + "impos", + -14.2340087890625 + ], + [ + "Separate", + -14.234081268310549 + ], + [ + "Million", + -14.234102249145508 + ], + [ + "▁shears", + -14.234108924865724 + ], + [ + "bern", + -14.234119415283203 + ], + [ + "▁resection", + -14.234137535095217 + ], + [ + "▁outfitter", + -14.234270095825195 + ], + [ + "Lived", + -14.234289169311523 + ], + [ + "Exploring", + -14.234344482421877 + ], + [ + "Pocket", + -14.23434829711914 + ], + [ + "Association", + -14.23435878753662 + ], + [ + "infected", + -14.23435878753662 + ], + [ + "surprise", + -14.23435878753662 + ], + [ + "Stretch", + -14.234390258789062 + ], + [ + "▁WSOP", + -14.234416961669922 + ], + [ + "regardless", + -14.23442268371582 + ], + [ + "2.4%", + -14.234439849853516 + ], + [ + "4+", + -14.234448432922363 + ], + [ + "▁sehr", + -14.234451293945312 + ], + [ + "▁Succeed", + -14.234469413757324 + ], + [ + "consult", + -14.234487533569336 + ], + [ + "concern", + -14.234501838684082 + ], + [ + "absolutely", + -14.23452377319336 + ], + [ + "Sonic", + -14.234529495239258 + ], + [ + "Achieve", + -14.23458480834961 + ], + [ + "Donations", + -14.234627723693848 + ], + [ + "▁Hinge", + -14.234638214111328 + ], + [ + "framing", + -14.234665870666504 + ], + [ + "cushion", + -14.2346830368042 + ], + [ + "▁autonomously", + -14.234692573547363 + ], + [ + "Rama", + -14.234704971313477 + ], + [ + "▁multimillion", + -14.234704971313477 + ], + [ + "▁273", + -14.234732627868652 + ], + [ + "AIA", + -14.234789848327637 + ], + [ + "▁Amex", + -14.234862327575684 + ], + [ + "Fancy", + -14.234938621520996 + ], + [ + "▁0.00000", + -14.234992027282717 + ], + [ + "▁assaulting", + -14.235088348388672 + ], + [ + "▁1997)", + -14.235095024108888 + ], + [ + "▁Turned", + -14.235147476196287 + ], + [ + "sleeved", + -14.235170364379885 + ], + [ + "▁Demonstrate", + -14.235187530517578 + ], + [ + "▁MSCI", + -14.235203742980955 + ], + [ + "Joan", + -14.235206604003906 + ], + [ + "LAY", + -14.235210418701172 + ], + [ + "▁bod", + -14.235215187072754 + ], + [ + "osine", + -14.235220909118652 + ], + [ + "▁adv", + -14.235297203063965 + ], + [ + "▁Bernardo", + -14.235336303710938 + ], + [ + "lago", + -14.235355377197266 + ], + [ + "[0", + -14.23539924621582 + ], + [ + "▁Timeless", + -14.23540210723877 + ], + [ + "flora", + -14.235485076904297 + ], + [ + "PASS", + -14.235565185546877 + ], + [ + "▁Gosh", + -14.235590934753418 + ], + [ + "▁crocheting", + -14.235595703125 + ], + [ + "▁Selfie", + -14.235623359680176 + ], + [ + "▁4/5", + -14.23564338684082 + ], + [ + "▁armchairs", + -14.235645294189451 + ], + [ + "highway", + -14.235650062561035 + ], + [ + "▁Accuracy", + -14.23565673828125 + ], + [ + "Trek", + -14.235770225524902 + ], + [ + "▁minefield", + -14.235827445983888 + ], + [ + "▁490", + -14.235840797424316 + ], + [ + "rides", + -14.235851287841797 + ], + [ + "jour", + -14.23585605621338 + ], + [ + "riano", + -14.23587703704834 + ], + [ + "▁1849", + -14.235949516296388 + ], + [ + "▁65,000", + -14.235965728759766 + ], + [ + "▁Nj", + -14.235987663269045 + ], + [ + "Hosted", + -14.236076354980469 + ], + [ + "ffel", + -14.236199378967283 + ], + [ + "sealed", + -14.236204147338867 + ], + [ + "▁88%", + -14.236213684082031 + ], + [ + "▁Bind", + -14.23622226715088 + ], + [ + "▁Schne", + -14.236231803894045 + ], + [ + "Debt", + -14.236273765563965 + ], + [ + "▁Taft", + -14.236318588256836 + ], + [ + "13-14", + -14.236374855041504 + ], + [ + "Cad", + -14.236384391784668 + ], + [ + "choose", + -14.236412048339844 + ], + [ + "▁CPE", + -14.236475944519045 + ], + [ + "Raf", + -14.23647689819336 + ], + [ + "▁steeply", + -14.236493110656738 + ], + [ + "kade", + -14.236506462097168 + ], + [ + "▁lagoons", + -14.236544609069824 + ], + [ + "VAC", + -14.236549377441406 + ], + [ + "▁Kristi", + -14.236577033996582 + ], + [ + "severe", + -14.23660659790039 + ], + [ + "▁Valor", + -14.236618995666504 + ], + [ + "MRA", + -14.236668586730955 + ], + [ + "▁condiment", + -14.236698150634766 + ], + [ + "▁aerodynamics", + -14.236699104309082 + ], + [ + "▁Necessary", + -14.23671054840088 + ], + [ + "▁disinfect", + -14.236713409423828 + ], + [ + "▁nutritionally", + -14.236754417419434 + ], + [ + "▁ketone", + -14.236818313598633 + ], + [ + "vag", + -14.23682975769043 + ], + [ + "Parker", + -14.236881256103516 + ], + [ + "▁mime", + -14.236883163452148 + ], + [ + "▁1853", + -14.236906051635742 + ], + [ + "null", + -14.236944198608398 + ], + [ + "▁splint", + -14.237019538879396 + ], + [ + "▁eyepiece", + -14.237077713012695 + ], + [ + "▁Emilio", + -14.237102508544922 + ], + [ + "HUR", + -14.237153053283691 + ], + [ + "▁Pharm", + -14.237153053283691 + ], + [ + "▁Forced", + -14.237156867980955 + ], + [ + "▁befitting", + -14.23719596862793 + ], + [ + "▁geologic", + -14.237228393554688 + ], + [ + "▁Shapes", + -14.2372407913208 + ], + [ + "983", + -14.2372465133667 + ], + [ + "Bolt", + -14.237303733825684 + ], + [ + "▁lichen", + -14.237330436706545 + ], + [ + "aided", + -14.237375259399414 + ], + [ + "relationship", + -14.23738956451416 + ], + [ + "▁beamed", + -14.237465858459473 + ], + [ + "Graphics", + -14.23754596710205 + ], + [ + "OVE", + -14.237821578979492 + ], + [ + "▁Astronomical", + -14.237829208374023 + ], + [ + "▁MERCHANTABILITY", + -14.237829208374023 + ], + [ + "▁bereaved", + -14.237829208374023 + ], + [ + "▁dredging", + -14.237829208374023 + ], + [ + "▁dysplasia", + -14.237829208374023 + ], + [ + "▁epidural", + -14.237829208374023 + ], + [ + "▁irreverent", + -14.237829208374023 + ], + [ + "▁retribution", + -14.237829208374023 + ], + [ + "▁sydney", + -14.237829208374023 + ], + [ + "▁Daphne", + -14.23783016204834 + ], + [ + "▁Qualifier", + -14.23783016204834 + ], + [ + "▁admonish", + -14.23783016204834 + ], + [ + "▁extruder", + -14.23783016204834 + ], + [ + "▁Dummies", + -14.237831115722656 + ], + [ + "▁tahini", + -14.237832069396973 + ], + [ + "▁Sightseeing", + -14.237833976745604 + ], + [ + "▁relativity", + -14.237836837768556 + ], + [ + "▁urination", + -14.237836837768556 + ], + [ + "▁Enamel", + -14.23784637451172 + ], + [ + "▁Corinth", + -14.237847328186035 + ], + [ + "▁Kepler", + -14.237850189208984 + ], + [ + "▁Darfur", + -14.2378511428833 + ], + [ + "▁deformity", + -14.237857818603516 + ], + [ + "▁interlude", + -14.237863540649414 + ], + [ + "▁fickle", + -14.237872123718262 + ], + [ + "▁vehemently", + -14.237881660461426 + ], + [ + "▁Forgot", + -14.23788356781006 + ], + [ + "▁Elevate", + -14.23791217803955 + ], + [ + "▁preconceived", + -14.237919807434082 + ], + [ + "▁TOTAL", + -14.237921714782717 + ], + [ + "▁Ultima", + -14.237930297851562 + ], + [ + "▁doubleheader", + -14.237936973571776 + ], + [ + "3100", + -14.237953186035156 + ], + [ + "hoot", + -14.237970352172852 + ], + [ + "bria", + -14.237975120544434 + ], + [ + "▁worshippers", + -14.237994194030762 + ], + [ + "▁attestation", + -14.238025665283203 + ], + [ + "▁Assyrian", + -14.238031387329102 + ], + [ + "▁truncated", + -14.23805046081543 + ], + [ + "▁Palais", + -14.238059043884276 + ], + [ + "▁Fillmore", + -14.238064765930176 + ], + [ + "EJ", + -14.23808765411377 + ], + [ + "▁Robles", + -14.23809051513672 + ], + [ + "▁idioms", + -14.238103866577148 + ], + [ + "▁flanges", + -14.238107681274414 + ], + [ + "means", + -14.238121032714844 + ], + [ + "▁PRP", + -14.23812770843506 + ], + [ + "▁Variations", + -14.23814868927002 + ], + [ + "▁Aetna", + -14.23819637298584 + ], + [ + "▁Visualization", + -14.238276481628418 + ], + [ + "knot", + -14.23827838897705 + ], + [ + "▁magma", + -14.23829746246338 + ], + [ + "▁GQ", + -14.238309860229492 + ], + [ + "Zoo", + -14.238313674926758 + ], + [ + "▁audi", + -14.238344192504885 + ], + [ + "▁Jennie", + -14.23836612701416 + ], + [ + "▁18:00", + -14.238378524780272 + ], + [ + "▁retouch", + -14.23838996887207 + ], + [ + "▁obstructing", + -14.23849868774414 + ], + [ + "▁sawmill", + -14.238523483276367 + ], + [ + "demon", + -14.238602638244627 + ], + [ + "▁Cohn", + -14.238661766052246 + ], + [ + "▁overshadow", + -14.238682746887209 + ], + [ + "▁wintry", + -14.238714218139648 + ], + [ + "zah", + -14.238810539245604 + ], + [ + "▁RET", + -14.238821029663086 + ], + [ + "▁Sex", + -14.238836288452148 + ], + [ + "▁fluently", + -14.238845825195312 + ], + [ + "aaaa", + -14.238850593566896 + ], + [ + "▁degenerate", + -14.238859176635742 + ], + [ + "▁rigors", + -14.238889694213867 + ], + [ + "Conclusions", + -14.238892555236816 + ], + [ + "▁0.75", + -14.238953590393066 + ], + [ + "▁ferrous", + -14.238958358764648 + ], + [ + "coco", + -14.238959312438965 + ], + [ + "▁sniffing", + -14.238972663879396 + ], + [ + "▁gale", + -14.238978385925291 + ], + [ + "▁slaying", + -14.238986015319824 + ], + [ + "▁solicited", + -14.239006996154783 + ], + [ + "▁cuppa", + -14.23903751373291 + ], + [ + "Rules", + -14.239142417907717 + ], + [ + "▁Gully", + -14.239184379577637 + ], + [ + "ehler", + -14.23919677734375 + ], + [ + "▁estimations", + -14.23922634124756 + ], + [ + "▁bodysuit", + -14.239229202270508 + ], + [ + "moz", + -14.239267349243164 + ], + [ + "▁Lifeline", + -14.239274978637695 + ], + [ + "valued", + -14.239282608032228 + ], + [ + "▁appropriateness", + -14.239287376403809 + ], + [ + "assie", + -14.239298820495604 + ], + [ + "▁CBR", + -14.239340782165527 + ], + [ + "▁tints", + -14.23936653137207 + ], + [ + "▁CORP", + -14.239371299743652 + ], + [ + "▁DEAD", + -14.239405632019045 + ], + [ + "▁Waller", + -14.239418029785156 + ], + [ + "glas", + -14.239453315734863 + ], + [ + "▁Mountaineer", + -14.239462852478027 + ], + [ + "IDENT", + -14.239508628845217 + ], + [ + "Lawmakers", + -14.239554405212402 + ], + [ + "▁digger", + -14.23965549468994 + ], + [ + "acea", + -14.239667892456056 + ], + [ + "advantage", + -14.239686012268066 + ], + [ + "▁bong", + -14.239705085754396 + ], + [ + "Scientific", + -14.239712715148926 + ], + [ + "antique", + -14.23971462249756 + ], + [ + "intelligence", + -14.239717483520508 + ], + [ + "▁sé", + -14.239718437194824 + ], + [ + "Unlimited", + -14.239737510681152 + ], + [ + "AKI", + -14.239738464355469 + ], + [ + "fueled", + -14.23976707458496 + ], + [ + "eminent", + -14.239784240722656 + ], + [ + "▁aristocratic", + -14.239789009094238 + ], + [ + "Sustainable", + -14.2398099899292 + ], + [ + "▁Modem", + -14.239850997924805 + ], + [ + "Recovery", + -14.239913940429688 + ], + [ + "NHS", + -14.239917755126951 + ], + [ + "bosch", + -14.239920616149902 + ], + [ + "▁Kuro", + -14.239964485168455 + ], + [ + "▁Joni", + -14.23997974395752 + ], + [ + "extraterrestrial", + -14.240042686462402 + ], + [ + "▁Econ", + -14.240042686462402 + ], + [ + "2:10", + -14.24004364013672 + ], + [ + "Guys", + -14.240050315856934 + ], + [ + "films", + -14.24006175994873 + ], + [ + "▁modernisation", + -14.24006175994873 + ], + [ + "▁disprove", + -14.240113258361816 + ], + [ + "piano", + -14.240208625793455 + ], + [ + "visa", + -14.240269660949709 + ], + [ + "▁wrecking", + -14.240286827087402 + ], + [ + "▁cranking", + -14.24030303955078 + ], + [ + "▁ferro", + -14.240416526794434 + ], + [ + "▁pada", + -14.240428924560549 + ], + [ + "▁Stew", + -14.240500450134276 + ], + [ + "▁Attacks", + -14.240592002868652 + ], + [ + "RSA", + -14.24065399169922 + ], + [ + "ENDER", + -14.240817070007324 + ], + [ + "▁mayors", + -14.240840911865234 + ], + [ + "▁Quart", + -14.240856170654297 + ], + [ + "▁Homeschool", + -14.240948677062988 + ], + [ + "shant", + -14.24099063873291 + ], + [ + "itti", + -14.241223335266112 + ], + [ + "▁Balt", + -14.241290092468262 + ], + [ + "▁Warp", + -14.24135971069336 + ], + [ + "tributing", + -14.241395950317385 + ], + [ + "▁Hagen", + -14.241397857666016 + ], + [ + "natured", + -14.241410255432127 + ], + [ + "▁tentacles", + -14.241714477539062 + ], + [ + "SITE", + -14.241738319396973 + ], + [ + "▁fliers", + -14.241782188415527 + ], + [ + "▁(41", + -14.241806030273438 + ], + [ + "▁LPC", + -14.24188995361328 + ], + [ + "▁Lawton", + -14.241929054260254 + ], + [ + "payer", + -14.24194622039795 + ], + [ + "▁DIV", + -14.24197483062744 + ], + [ + "0-9", + -14.241978645324709 + ], + [ + "▁Debug", + -14.241990089416504 + ], + [ + "retin", + -14.24201202392578 + ], + [ + "Maine", + -14.24203395843506 + ], + [ + "▁george", + -14.24205207824707 + ], + [ + "▁nephr", + -14.242055892944336 + ], + [ + "▁streetcar", + -14.24216651916504 + ], + [ + "cooling", + -14.242279052734377 + ], + [ + "▁semiconductors", + -14.242289543151855 + ], + [ + "united", + -14.242295265197754 + ], + [ + "▁Germanic", + -14.24233627319336 + ], + [ + "holz", + -14.242379188537598 + ], + [ + "▁spades", + -14.24238395690918 + ], + [ + "00:00", + -14.242384910583496 + ], + [ + "▁CCG", + -14.242388725280762 + ], + [ + "Crowd", + -14.242403030395508 + ], + [ + "Partners", + -14.24243450164795 + ], + [ + "▁flung", + -14.242467880249023 + ], + [ + "▁fifteenth", + -14.242473602294922 + ], + [ + "▁Mould", + -14.24248504638672 + ], + [ + "NIS", + -14.242526054382324 + ], + [ + "▁Schw", + -14.242545127868652 + ], + [ + "chairman", + -14.242547035217283 + ], + [ + "▁coco", + -14.24269199371338 + ], + [ + "▁Crafting", + -14.2427396774292 + ], + [ + "▁resurface", + -14.242868423461914 + ], + [ + "▁computations", + -14.242881774902344 + ], + [ + "▁cockroach", + -14.24292278289795 + ], + [ + "ULE", + -14.243034362792969 + ], + [ + "▁COMMUNITY", + -14.243063926696776 + ], + [ + "▁Teflon", + -14.243063926696776 + ], + [ + "▁engrossed", + -14.243063926696776 + ], + [ + "▁epiphany", + -14.243063926696776 + ], + [ + "▁fissure", + -14.243063926696776 + ], + [ + "▁introspective", + -14.243063926696776 + ], + [ + "▁kostenlos", + -14.243063926696776 + ], + [ + "▁unspoilt", + -14.243063926696776 + ], + [ + "▁xanax", + -14.243063926696776 + ], + [ + "▁Ophelia", + -14.243064880371094 + ], + [ + "▁disheartening", + -14.243064880371094 + ], + [ + "▁nomenclature", + -14.243064880371094 + ], + [ + "▁Cosmopolitan", + -14.24306583404541 + ], + [ + "▁Genocide", + -14.24306583404541 + ], + [ + "▁immersing", + -14.24306583404541 + ], + [ + "▁epigenetic", + -14.243067741394045 + ], + [ + "▁BitTorrent", + -14.24306869506836 + ], + [ + "▁melanin", + -14.243069648742676 + ], + [ + "▁Snowflake", + -14.243070602416992 + ], + [ + "avage", + -14.24307346343994 + ], + [ + "▁Poultry", + -14.24307346343994 + ], + [ + "▁bicarbonate", + -14.24307346343994 + ], + [ + "▁EDITION", + -14.24307632446289 + ], + [ + "▁Kampala", + -14.243078231811523 + ], + [ + "benzene", + -14.24307918548584 + ], + [ + "▁imitating", + -14.243082046508787 + ], + [ + "▁SERIES", + -14.24308967590332 + ], + [ + "▁Gregorian", + -14.243099212646484 + ], + [ + "▁Cupid", + -14.24310302734375 + ], + [ + "▁Chadwick", + -14.243104934692385 + ], + [ + "▁damask", + -14.243106842041016 + ], + [ + "▁shorthand", + -14.243109703063965 + ], + [ + "▁Winslow", + -14.243131637573242 + ], + [ + "Zhengzhou", + -14.243133544921877 + ], + [ + "▁scarred", + -14.243149757385254 + ], + [ + "▁Unemployment", + -14.243152618408203 + ], + [ + "ZIP", + -14.243168830871582 + ], + [ + "▁Multicultural", + -14.243175506591797 + ], + [ + "vyn", + -14.243188858032228 + ], + [ + "▁bicyclists", + -14.243199348449709 + ], + [ + "▁bedspread", + -14.243219375610352 + ], + [ + "▁subtleties", + -14.243241310119627 + ], + [ + "▁(64", + -14.243295669555664 + ], + [ + "uil", + -14.243353843688965 + ], + [ + "kku", + -14.243370056152344 + ], + [ + "▁humiliated", + -14.24338436126709 + ], + [ + "uating", + -14.243396759033203 + ], + [ + "ayne", + -14.243419647216797 + ], + [ + "▁precincts", + -14.243462562561035 + ], + [ + "▁latches", + -14.243473052978516 + ], + [ + "▁LAX", + -14.24348258972168 + ], + [ + "▁MARCH", + -14.243485450744627 + ], + [ + "▁Ormond", + -14.24349594116211 + ], + [ + "▁Squire", + -14.243542671203612 + ], + [ + "▁Hutton", + -14.243552207946776 + ], + [ + "tiered", + -14.243597984313965 + ], + [ + "▁CONDITIONS", + -14.243630409240724 + ], + [ + "▁panning", + -14.243653297424316 + ], + [ + "▁expended", + -14.24366569519043 + ], + [ + "TEST", + -14.243755340576172 + ], + [ + "Hint", + -14.243829727172852 + ], + [ + "Calculate", + -14.24386501312256 + ], + [ + "▁chipboard", + -14.243926048278809 + ], + [ + "▁Hooper", + -14.243931770324709 + ], + [ + "▁Delivering", + -14.24394702911377 + ], + [ + "▁patchy", + -14.243953704833984 + ], + [ + "uman", + -14.24398708343506 + ], + [ + "▁Flake", + -14.243990898132324 + ], + [ + "▁Prog", + -14.244065284729004 + ], + [ + "thio", + -14.244070053100586 + ], + [ + "▁headliner", + -14.244102478027344 + ], + [ + "▁Forster", + -14.244110107421877 + ], + [ + "Ferra", + -14.24411392211914 + ], + [ + "randa", + -14.244144439697266 + ], + [ + "Trim", + -14.244163513183594 + ], + [ + "▁metabolites", + -14.244183540344238 + ], + [ + "▁paramedic", + -14.244239807128906 + ], + [ + "cigarette", + -14.244280815124512 + ], + [ + "/06", + -14.244436264038086 + ], + [ + "abuse", + -14.244486808776855 + ], + [ + "▁Oath", + -14.244504928588867 + ], + [ + "▁351", + -14.244515419006348 + ], + [ + "▁handrails", + -14.244531631469728 + ], + [ + "▁wicking", + -14.244549751281738 + ], + [ + "▁Garde", + -14.244574546813965 + ], + [ + "▁Prin", + -14.244599342346191 + ], + [ + "holme", + -14.244638442993164 + ], + [ + "lauren", + -14.244650840759276 + ], + [ + "▁Branches", + -14.244688987731934 + ], + [ + "Portal", + -14.244695663452148 + ], + [ + "mington", + -14.244757652282717 + ], + [ + "▁EXC", + -14.244763374328612 + ], + [ + "olog", + -14.244781494140623 + ], + [ + "▁wry", + -14.244829177856444 + ], + [ + "▁Infants", + -14.244839668273926 + ], + [ + "across", + -14.244843482971191 + ], + [ + "OSHA", + -14.244867324829102 + ], + [ + "▁Fences", + -14.24489688873291 + ], + [ + "CHES", + -14.24489974975586 + ], + [ + "▁Bh", + -14.244951248168944 + ], + [ + "▁Cllr", + -14.244961738586426 + ], + [ + "▁incursion", + -14.24498462677002 + ], + [ + "▁carbonated", + -14.24500560760498 + ], + [ + "▁OTP", + -14.245059967041016 + ], + [ + "▁EYE", + -14.245060920715332 + ], + [ + "Arsenal", + -14.245086669921877 + ], + [ + "▁bodyweight", + -14.245088577270508 + ], + [ + "Benjamin", + -14.245089530944824 + ], + [ + "Illinois", + -14.24509334564209 + ], + [ + "▁murderers", + -14.24510669708252 + ], + [ + "leaving", + -14.245128631591797 + ], + [ + "precious", + -14.245128631591797 + ], + [ + "▁natively", + -14.245137214660645 + ], + [ + "▁scuff", + -14.24514389038086 + ], + [ + "▁Cavern", + -14.245161056518556 + ], + [ + "referred", + -14.245226860046388 + ], + [ + "Vet", + -14.24528980255127 + ], + [ + "Creation", + -14.245293617248535 + ], + [ + "▁Positioning", + -14.245312690734863 + ], + [ + "▁nuke", + -14.245314598083496 + ], + [ + "▁IMHO", + -14.245359420776367 + ], + [ + "▁Worn", + -14.245438575744627 + ], + [ + "▁secs", + -14.245461463928224 + ], + [ + "▁photons", + -14.245491027832031 + ], + [ + "lga", + -14.245553016662598 + ], + [ + "wali", + -14.245562553405762 + ], + [ + "▁Kraus", + -14.24560260772705 + ], + [ + "Thr", + -14.245627403259276 + ], + [ + "▁DROP", + -14.24562931060791 + ], + [ + "▁interned", + -14.24567985534668 + ], + [ + "Grave", + -14.245712280273438 + ], + [ + "FLO", + -14.245723724365234 + ], + [ + "oxygen", + -14.245742797851562 + ], + [ + "▁BOM", + -14.245769500732422 + ], + [ + "▁Fak", + -14.245858192443848 + ], + [ + "Caution", + -14.245874404907228 + ], + [ + "▁1′′", + -14.245908737182615 + ], + [ + "▁Zand", + -14.24594783782959 + ], + [ + "SCH", + -14.245957374572754 + ], + [ + "027", + -14.246058464050291 + ], + [ + "▁emphasises", + -14.246169090270996 + ], + [ + "▁itemized", + -14.246183395385742 + ], + [ + "NDC", + -14.246441841125488 + ], + [ + "icu", + -14.246454238891602 + ], + [ + "416", + -14.246478080749512 + ], + [ + "▁Synopsis", + -14.246514320373535 + ], + [ + "▁folio", + -14.246541023254396 + ], + [ + "▁IFS", + -14.24659824371338 + ], + [ + "scented", + -14.246646881103516 + ], + [ + "▁permeate", + -14.246710777282717 + ], + [ + "▁publically", + -14.246710777282717 + ], + [ + "▁Peaceful", + -14.246726036071776 + ], + [ + "▁Bourg", + -14.24672794342041 + ], + [ + "AIM", + -14.246801376342772 + ], + [ + "pari", + -14.246811866760254 + ], + [ + "▁Jeb", + -14.246814727783203 + ], + [ + "987", + -14.246819496154783 + ], + [ + "mw", + -14.246826171875 + ], + [ + "▁Canna", + -14.24687385559082 + ], + [ + "HAR", + -14.246888160705566 + ], + [ + "aglio", + -14.24696445465088 + ], + [ + "▁cashless", + -14.24699878692627 + ], + [ + "covering", + -14.2470121383667 + ], + [ + "▁fb", + -14.247020721435549 + ], + [ + "schl", + -14.247027397155762 + ], + [ + "Serv", + -14.247057914733888 + ], + [ + "/29", + -14.247137069702148 + ], + [ + "MLA", + -14.24714469909668 + ], + [ + "Drill", + -14.247175216674805 + ], + [ + "▁Schoen", + -14.247211456298828 + ], + [ + "▁Frontiers", + -14.247264862060549 + ], + [ + "▁Happen", + -14.247296333312988 + ], + [ + "▁descriptor", + -14.24729824066162 + ], + [ + "▁crayon", + -14.247300148010254 + ], + [ + "GYN", + -14.247346878051758 + ], + [ + "cx", + -14.247369766235352 + ], + [ + "▁smug", + -14.247492790222168 + ], + [ + "▁Whitehead", + -14.247495651245115 + ], + [ + "vance", + -14.247583389282228 + ], + [ + "▁canines", + -14.247614860534668 + ], + [ + "steroid", + -14.247688293457031 + ], + [ + "wb", + -14.24778938293457 + ], + [ + "▁Zim", + -14.247910499572754 + ], + [ + "intend", + -14.247919082641602 + ], + [ + "junk", + -14.247946739196776 + ], + [ + "Buyer", + -14.247963905334473 + ], + [ + "▁ideation", + -14.24798583984375 + ], + [ + "▁$180", + -14.248011589050291 + ], + [ + "982", + -14.24802303314209 + ], + [ + "▁10.6", + -14.24802303314209 + ], + [ + "▁PMP", + -14.24803352355957 + ], + [ + "▁Organizers", + -14.24807071685791 + ], + [ + "▁Malin", + -14.248083114624023 + ], + [ + "▁defiant", + -14.248085021972656 + ], + [ + "75)", + -14.248265266418455 + ], + [ + "▁ACO", + -14.248296737670898 + ], + [ + "▁Tren", + -14.248299598693848 + ], + [ + "ERR", + -14.248311042785645 + ], + [ + "▁Cessna", + -14.248327255249023 + ], + [ + "▁Gutierrez", + -14.248327255249023 + ], + [ + "▁Ophthalmology", + -14.248327255249023 + ], + [ + "▁chennai", + -14.248327255249023 + ], + [ + "▁cuddling", + -14.248327255249023 + ], + [ + "▁daisies", + -14.248327255249023 + ], + [ + "▁lawnmower", + -14.248327255249023 + ], + [ + "▁tributaries", + -14.248327255249023 + ], + [ + "▁Aguilar", + -14.24832820892334 + ], + [ + "▁Wrought", + -14.24832820892334 + ], + [ + "▁NEWSWIRE", + -14.248329162597656 + ], + [ + "Communicating", + -14.248331069946287 + ], + [ + "▁jasper", + -14.248331069946287 + ], + [ + "▁Timberlake", + -14.248343467712402 + ], + [ + "▁crockery", + -14.248348236083984 + ], + [ + "▁VOIP", + -14.248351097106934 + ], + [ + "▁Steampunk", + -14.24835205078125 + ], + [ + "ceding", + -14.248355865478516 + ], + [ + "▁Viejo", + -14.248357772827148 + ], + [ + "▁Purcell", + -14.248371124267578 + ], + [ + "▁IMPACT", + -14.248372077941896 + ], + [ + "▁Neumann", + -14.24837875366211 + ], + [ + "▁fawn", + -14.248401641845703 + ], + [ + "Gay", + -14.2484130859375 + ], + [ + "▁myeloma", + -14.2484130859375 + ], + [ + "▁polarizing", + -14.248456001281738 + ], + [ + "▁ethnographic", + -14.248462677001951 + ], + [ + "▁Lia", + -14.24847412109375 + ], + [ + "▁concentric", + -14.24847412109375 + ], + [ + "NHTSA", + -14.248477935791016 + ], + [ + "▁weightlifting", + -14.248486518859863 + ], + [ + "▁Walkthrough", + -14.248517036437988 + ], + [ + "ultra", + -14.248520851135254 + ], + [ + "▁Modifier", + -14.24852466583252 + ], + [ + "Bron", + -14.248549461364746 + ], + [ + "▁Sway", + -14.24855899810791 + ], + [ + "▁Aussies", + -14.248579978942873 + ], + [ + "▁Longhorn", + -14.248579978942873 + ], + [ + "▁strapping", + -14.248581886291504 + ], + [ + "▁Patr", + -14.24858856201172 + ], + [ + "▁Bingham", + -14.24860382080078 + ], + [ + "▁lurch", + -14.248611450195312 + ], + [ + "▁unilaterally", + -14.248676300048828 + ], + [ + "NFP", + -14.248701095581056 + ], + [ + "▁Fittings", + -14.248724937438965 + ], + [ + "▁CarPlay", + -14.24876594543457 + ], + [ + "▁Hatfield", + -14.248786926269531 + ], + [ + "▁Aiden", + -14.248908996582031 + ], + [ + "▁Brownsville", + -14.248910903930664 + ], + [ + "▁2018/2019", + -14.248931884765623 + ], + [ + "Mic", + -14.248934745788574 + ], + [ + "▁Addressing", + -14.248973846435549 + ], + [ + "buckle", + -14.248993873596191 + ], + [ + "▁CHO", + -14.24900722503662 + ], + [ + "▁Gabon", + -14.249055862426758 + ], + [ + "973", + -14.24908447265625 + ], + [ + "▁2004).", + -14.249112129211426 + ], + [ + "▁Muz", + -14.24913215637207 + ], + [ + "Handle", + -14.249226570129396 + ], + [ + "ACO", + -14.24927043914795 + ], + [ + "▁snowing", + -14.249277114868164 + ], + [ + "AAP", + -14.24930477142334 + ], + [ + "edible", + -14.249311447143556 + ], + [ + "▁speculations", + -14.249320030212402 + ], + [ + "NEXT", + -14.249334335327148 + ], + [ + "▁googled", + -14.249428749084473 + ], + [ + "IKO", + -14.249663352966309 + ], + [ + "▁Melton", + -14.249713897705078 + ], + [ + ".75%", + -14.249767303466797 + ], + [ + "Chasing", + -14.24981689453125 + ], + [ + "▁allusion", + -14.249817848205566 + ], + [ + "▁$2.50", + -14.249839782714844 + ], + [ + "▁vert", + -14.249892234802246 + ], + [ + "/05", + -14.249895095825195 + ], + [ + "▁Demographic", + -14.250066757202148 + ], + [ + "▁$2.3", + -14.25019645690918 + ], + [ + "nall", + -14.250198364257812 + ], + [ + "▁Massa", + -14.250198364257812 + ], + [ + "Shower", + -14.250240325927734 + ], + [ + "▁antigens", + -14.250284194946287 + ], + [ + "▁contending", + -14.250288009643556 + ], + [ + "Solutions", + -14.250349044799805 + ], + [ + "Fellow", + -14.250414848327637 + ], + [ + "Pharma", + -14.250417709350586 + ], + [ + "GLOBAL", + -14.250422477722168 + ], + [ + "Celebrating", + -14.250469207763672 + ], + [ + "▁Hamper", + -14.250479698181152 + ], + [ + "Detective", + -14.250480651855469 + ], + [ + "▁demi", + -14.250495910644531 + ], + [ + "privacy", + -14.25050163269043 + ], + [ + "recovery", + -14.25051212310791 + ], + [ + "jurisdictional", + -14.25057888031006 + ], + [ + "▁Lathe", + -14.250596046447754 + ], + [ + "judicial", + -14.25062370300293 + ], + [ + "Honda", + -14.25068187713623 + ], + [ + "▁Moll", + -14.250718116760254 + ], + [ + "VV", + -14.250725746154783 + ], + [ + "hok", + -14.250750541687012 + ], + [ + "▁stipulations", + -14.250754356384276 + ], + [ + "▁CRP", + -14.250757217407228 + ], + [ + "Chances", + -14.250765800476074 + ], + [ + "Str", + -14.250767707824709 + ], + [ + "▁Lamont", + -14.250781059265137 + ], + [ + "shank", + -14.250846862792969 + ], + [ + "heid", + -14.25086498260498 + ], + [ + "▁CSE", + -14.250925064086914 + ], + [ + "▁ipod", + -14.251008987426758 + ], + [ + "grading", + -14.251014709472656 + ], + [ + "▁envi", + -14.251100540161133 + ], + [ + "▁Trang", + -14.2511625289917 + ], + [ + "Somewhere", + -14.251185417175291 + ], + [ + "Pitch", + -14.251273155212402 + ], + [ + "▁Seventeen", + -14.251447677612305 + ], + [ + "▁Prefer", + -14.251461029052734 + ], + [ + "▁Braz", + -14.251484870910645 + ], + [ + "▁STOCK", + -14.251521110534668 + ], + [ + "italia", + -14.251557350158691 + ], + [ + "WIND", + -14.251574516296388 + ], + [ + "▁MWC", + -14.251629829406738 + ], + [ + "▁horde", + -14.251636505126951 + ], + [ + "SMC", + -14.25164794921875 + ], + [ + "▁TMS", + -14.251665115356444 + ], + [ + "▁headroom", + -14.25173568725586 + ], + [ + "tained", + -14.251747131347656 + ], + [ + "▁707", + -14.251879692077637 + ], + [ + "dapat", + -14.251890182495115 + ], + [ + "▁74%", + -14.251893043518066 + ], + [ + "rle", + -14.251914978027344 + ], + [ + "▁tankers", + -14.252016067504885 + ], + [ + "▁363", + -14.252025604248049 + ], + [ + "▁Czechoslovakia", + -14.252059936523438 + ], + [ + "▁modesty", + -14.252060890197754 + ], + [ + "▁syllables", + -14.252094268798828 + ], + [ + "▁Bonner", + -14.252150535583496 + ], + [ + "kem", + -14.252323150634766 + ], + [ + "▁Counts", + -14.252352714538574 + ], + [ + "▁PEI", + -14.252357482910156 + ], + [ + "▁ICP", + -14.252459526062012 + ], + [ + "prep", + -14.252480506896973 + ], + [ + "INDER", + -14.252646446228027 + ], + [ + "▁insistent", + -14.252711296081545 + ], + [ + "Fishing", + -14.252713203430176 + ], + [ + "▁Noodles", + -14.252714157104492 + ], + [ + "NCT", + -14.252906799316406 + ], + [ + "065", + -14.252917289733888 + ], + [ + "▁Pupils", + -14.252943992614746 + ], + [ + "enterprise", + -14.252995491027832 + ], + [ + "▁7,500", + -14.253090858459473 + ], + [ + "▁mangrove", + -14.253097534179688 + ], + [ + "▁deport", + -14.253124237060549 + ], + [ + "▁detoxify", + -14.253156661987305 + ], + [ + "▁surcharges", + -14.253179550170898 + ], + [ + "▁bulletins", + -14.253191947937012 + ], + [ + "atorium", + -14.253217697143556 + ], + [ + "▁Caffeine", + -14.253276824951172 + ], + [ + "▁SMALL", + -14.253430366516112 + ], + [ + "dea", + -14.253443717956545 + ], + [ + "▁Fergus", + -14.253472328186035 + ], + [ + "▁Rave", + -14.253525733947754 + ], + [ + "Nm", + -14.253579139709473 + ], + [ + "cfm", + -14.253589630126951 + ], + [ + "fertilize", + -14.2535982131958 + ], + [ + "▁Dunk", + -14.253613471984863 + ], + [ + "Analyzing", + -14.253618240356444 + ], + [ + "Sputnik", + -14.253618240356444 + ], + [ + "▁Phosphate", + -14.253618240356444 + ], + [ + "▁Sturgeon", + -14.253618240356444 + ], + [ + "▁boisterous", + -14.253618240356444 + ], + [ + "▁citadel", + -14.253618240356444 + ], + [ + "▁moisturising", + -14.253618240356444 + ], + [ + "▁reprieve", + -14.253618240356444 + ], + [ + "▁unnerving", + -14.253618240356444 + ], + [ + "▁Beneficiation", + -14.253619194030762 + ], + [ + "▁bewildered", + -14.253619194030762 + ], + [ + "▁SUMMER", + -14.253620147705078 + ], + [ + "▁embroiled", + -14.253622055053713 + ], + [ + "▁Primrose", + -14.253623008728027 + ], + [ + "▁herringbone", + -14.253623962402344 + ], + [ + "▁granulated", + -14.253634452819824 + ], + [ + "▁Counsellor", + -14.253639221191406 + ], + [ + "▁Discuz", + -14.25364112854004 + ], + [ + "▁Citibank", + -14.253643035888672 + ], + [ + "▁APRIL", + -14.25364589691162 + ], + [ + "▁undivided", + -14.25364875793457 + ], + [ + "▁Kurds", + -14.253656387329102 + ], + [ + "▁Preventive", + -14.253673553466797 + ], + [ + "▁snort", + -14.25367546081543 + ], + [ + "▁(34", + -14.25367832183838 + ], + [ + "▁Discourse", + -14.25368595123291 + ], + [ + "▁Moritz", + -14.253707885742188 + ], + [ + "▁raking", + -14.253714561462402 + ], + [ + "▁NATIONAL", + -14.253745079040527 + ], + [ + "▁Moab", + -14.25374698638916 + ], + [ + "hina", + -14.253768920898438 + ], + [ + "▁supersonic", + -14.25377082824707 + ], + [ + "▁lcd", + -14.253791809082031 + ], + [ + "▁symphonic", + -14.25381565093994 + ], + [ + "▁rusting", + -14.253826141357422 + ], + [ + "▁Asperger", + -14.253846168518066 + ], + [ + "▁worksite", + -14.253854751586914 + ], + [ + "▁victorian", + -14.253859519958496 + ], + [ + "▁Morningstar", + -14.25387477874756 + ], + [ + "▁Dez", + -14.253877639770508 + ], + [ + "▁Schemes", + -14.253944396972656 + ], + [ + "▁purportedly", + -14.25399398803711 + ], + [ + "garage", + -14.25401496887207 + ], + [ + "nih", + -14.254104614257812 + ], + [ + "▁pyro", + -14.25416088104248 + ], + [ + "zada", + -14.254199981689451 + ], + [ + "▁midrange", + -14.254227638244627 + ], + [ + "ebi", + -14.254257202148438 + ], + [ + "▁loaders", + -14.254294395446776 + ], + [ + "HIV", + -14.254311561584473 + ], + [ + "laptop", + -14.254364967346191 + ], + [ + "▁Cloudy", + -14.25437355041504 + ], + [ + "▁Fac", + -14.25445556640625 + ], + [ + "ENSE", + -14.254493713378906 + ], + [ + "41)", + -14.254511833190918 + ], + [ + "тер", + -14.254520416259766 + ], + [ + "▁$12,000", + -14.254631996154783 + ], + [ + "▁3600", + -14.254646301269531 + ], + [ + "▁propagated", + -14.254651069641112 + ], + [ + "▁catalyze", + -14.254749298095703 + ], + [ + "▁Neiman", + -14.25475025177002 + ], + [ + "▁£150", + -14.25477123260498 + ], + [ + "▁splints", + -14.254804611206056 + ], + [ + "▁Tien", + -14.254996299743652 + ], + [ + "1234", + -14.25507926940918 + ], + [ + "▁footbed", + -14.255105018615724 + ], + [ + "▁Predators", + -14.255107879638672 + ], + [ + "▁colourway", + -14.255117416381836 + ], + [ + "▁50-60", + -14.255142211914062 + ], + [ + "▁JAMA", + -14.25518035888672 + ], + [ + "▁Cinco", + -14.25519847869873 + ], + [ + "▁DMCA", + -14.255231857299805 + ], + [ + "eyer", + -14.255237579345703 + ], + [ + "▁shrinks", + -14.25525188446045 + ], + [ + "illas", + -14.25526523590088 + ], + [ + "▁Gorman", + -14.2553071975708 + ], + [ + "▁Expires", + -14.255393028259276 + ], + [ + "▁Wraps", + -14.255406379699709 + ], + [ + "Gather", + -14.25543212890625 + ], + [ + "▁caries", + -14.255464553833008 + ], + [ + "▁XIII", + -14.255478858947754 + ], + [ + "▁Kud", + -14.25551414489746 + ], + [ + "▁Skies", + -14.255517959594728 + ], + [ + "Convey", + -14.25551986694336 + ], + [ + "violet", + -14.255536079406738 + ], + [ + "Videos", + -14.255603790283203 + ], + [ + "▁chalets", + -14.255623817443848 + ], + [ + "▁comrade", + -14.25566864013672 + ], + [ + "▁spasm", + -14.25578498840332 + ], + [ + "YG", + -14.255826950073242 + ], + [ + "Twelve", + -14.255914688110352 + ], + [ + "TIVE", + -14.255915641784668 + ], + [ + "Strategy", + -14.25593090057373 + ], + [ + "apparently", + -14.25593376159668 + ], + [ + "▁2040", + -14.256062507629396 + ], + [ + "apartment", + -14.256118774414062 + ], + [ + "SHOT", + -14.256135940551758 + ], + [ + "ogg", + -14.25618839263916 + ], + [ + "▁consign", + -14.256299018859863 + ], + [ + "sprit", + -14.25629997253418 + ], + [ + "quoi", + -14.25636100769043 + ], + [ + "▁Encrypt", + -14.256443977355955 + ], + [ + "▁Chromebooks", + -14.25653839111328 + ], + [ + "▁Abortion", + -14.256587028503418 + ], + [ + "inne", + -14.25658893585205 + ], + [ + "▁oversold", + -14.256635665893556 + ], + [ + "canada", + -14.256636619567873 + ], + [ + "mography", + -14.2566556930542 + ], + [ + "▁clashed", + -14.256742477416992 + ], + [ + "TERRA", + -14.256786346435549 + ], + [ + "▁tran", + -14.256973266601562 + ], + [ + "▁Missed", + -14.25698184967041 + ], + [ + "dade", + -14.257038116455078 + ], + [ + "▁slugs", + -14.257086753845217 + ], + [ + "retta", + -14.257098197937012 + ], + [ + "Enhanced", + -14.25716495513916 + ], + [ + "▁1847", + -14.257184982299805 + ], + [ + "▁eel", + -14.257208824157717 + ], + [ + "Involve", + -14.257225036621094 + ], + [ + "aciones", + -14.257225036621094 + ], + [ + "▁TSB", + -14.25722599029541 + ], + [ + "▁playfully", + -14.257293701171877 + ], + [ + "pointed", + -14.257309913635254 + ], + [ + "▁Shun", + -14.257431983947754 + ], + [ + "▁BUS", + -14.257526397705078 + ], + [ + "▁AMT", + -14.257580757141112 + ], + [ + "▁flicks", + -14.257617950439451 + ], + [ + "▁23,000", + -14.25762939453125 + ], + [ + "Reporting", + -14.257644653320312 + ], + [ + "3]", + -14.25781536102295 + ], + [ + "Austria", + -14.257862091064451 + ], + [ + "▁Professionally", + -14.25791835784912 + ], + [ + "▁condone", + -14.257949829101562 + ], + [ + "▁DIG", + -14.257965087890623 + ], + [ + "RANT", + -14.257980346679688 + ], + [ + "engineer", + -14.258001327514648 + ], + [ + "▁Packer", + -14.258001327514648 + ], + [ + "enci", + -14.258007049560549 + ], + [ + "▁Males", + -14.258015632629396 + ], + [ + "igg", + -14.258016586303713 + ], + [ + "▁aficionado", + -14.258020401000977 + ], + [ + "▁Blom", + -14.258023262023926 + ], + [ + "▁Lined", + -14.258031845092772 + ], + [ + "▁DART", + -14.25804328918457 + ], + [ + "OMB", + -14.258113861083984 + ], + [ + "Turns", + -14.258118629455566 + ], + [ + "▁opiate", + -14.258132934570312 + ], + [ + "▁misfit", + -14.258169174194336 + ], + [ + "▁appraise", + -14.25820255279541 + ], + [ + "▁lubricate", + -14.258336067199709 + ], + [ + "asti", + -14.258342742919922 + ], + [ + "/2007", + -14.258343696594238 + ], + [ + "Oreal", + -14.25849437713623 + ], + [ + "▁SAI", + -14.258546829223633 + ], + [ + "valley", + -14.258588790893556 + ], + [ + "REP", + -14.258615493774414 + ], + [ + "▁utopia", + -14.258711814880373 + ], + [ + "pora", + -14.258736610412598 + ], + [ + "▁261", + -14.25875186920166 + ], + [ + "▁870", + -14.258807182312012 + ], + [ + "saki", + -14.258819580078123 + ], + [ + "Taken", + -14.258852005004885 + ], + [ + "▁PCP", + -14.258864402770996 + ], + [ + "▁Saleh", + -14.258878707885742 + ], + [ + "▁Bamb", + -14.25888442993164 + ], + [ + "puff", + -14.2589111328125 + ], + [ + "Strict", + -14.258934020996094 + ], + [ + "nelle", + -14.258934020996094 + ], + [ + "Evidently", + -14.25893783569336 + ], + [ + "▁DeKalb", + -14.25893783569336 + ], + [ + "▁Launceston", + -14.25893783569336 + ], + [ + "▁MANAGEMENT", + -14.25893783569336 + ], + [ + "▁Preparatory", + -14.25893783569336 + ], + [ + "▁Salomon", + -14.25893783569336 + ], + [ + "▁Wrexham", + -14.25893783569336 + ], + [ + "▁calamity", + -14.25893783569336 + ], + [ + "▁derogatory", + -14.25893783569336 + ], + [ + "▁drowsiness", + -14.25893783569336 + ], + [ + "▁epidermis", + -14.25893783569336 + ], + [ + "▁quintet", + -14.25893783569336 + ], + [ + "▁renaming", + -14.25893783569336 + ], + [ + "▁tapestries", + -14.25893783569336 + ], + [ + "▁unscathed", + -14.25893783569336 + ], + [ + "▁Ladbroke", + -14.258938789367676 + ], + [ + "▁dynamite", + -14.258938789367676 + ], + [ + "▁hydrochloride", + -14.258939743041992 + ], + [ + "luminescent", + -14.258941650390623 + ], + [ + "▁Prosecco", + -14.258941650390623 + ], + [ + "▁Quinoa", + -14.258944511413574 + ], + [ + "▁Butterflies", + -14.258947372436523 + ], + [ + "▁Sheehan", + -14.258966445922852 + ], + [ + "▁NetSuite", + -14.258971214294434 + ], + [ + "▁mafia", + -14.258973121643066 + ], + [ + "▁carafe", + -14.258974075317385 + ], + [ + "▁Clancy", + -14.258975982666016 + ], + [ + "▁subcutaneous", + -14.25898551940918 + ], + [ + "▁tricycle", + -14.258999824523926 + ], + [ + "itution", + -14.259008407592772 + ], + [ + "▁Instrumental", + -14.25901699066162 + ], + [ + "▁Monique", + -14.259035110473633 + ], + [ + "▁Wha", + -14.25904655456543 + ], + [ + "▁cull", + -14.25904655456543 + ], + [ + "▁Additive", + -14.259057998657228 + ], + [ + "▁cinder", + -14.259061813354492 + ], + [ + "inck", + -14.259063720703123 + ], + [ + "▁Motley", + -14.259066581726074 + ], + [ + "▁Paradigm", + -14.2590913772583 + ], + [ + "▁Rugged", + -14.25912380218506 + ], + [ + "▁Testimonials", + -14.25912857055664 + ], + [ + "igny", + -14.25916576385498 + ], + [ + "500,000", + -14.259181022644045 + ], + [ + "MeToo", + -14.259200096130373 + ], + [ + "▁2016-2017", + -14.259215354919434 + ], + [ + "greater", + -14.259224891662598 + ], + [ + "▁Cottonwood", + -14.259259223937988 + ], + [ + "TENS", + -14.25926685333252 + ], + [ + "▁Eun", + -14.259305953979492 + ], + [ + "vast", + -14.25930881500244 + ], + [ + "disqualification", + -14.259318351745604 + ], + [ + "▁Rocker", + -14.25932788848877 + ], + [ + "▁NAF", + -14.259329795837402 + ], + [ + "TERS", + -14.25934886932373 + ], + [ + "Monte", + -14.259358406066896 + ], + [ + "▁Moana", + -14.259389877319336 + ], + [ + "▁embarks", + -14.259418487548828 + ], + [ + "▁Happening", + -14.25945281982422 + ], + [ + "▁sexist", + -14.25945281982422 + ], + [ + "▁Sprite", + -14.259464263916016 + ], + [ + "▁ironed", + -14.259519577026367 + ], + [ + "▁scoured", + -14.259523391723633 + ], + [ + "▁Jodie", + -14.259570121765137 + ], + [ + "▁perceives", + -14.259747505187988 + ], + [ + "anski", + -14.25974941253662 + ], + [ + "▁Odor", + -14.25981330871582 + ], + [ + "▁#15", + -14.259879112243652 + ], + [ + "iken", + -14.260014533996582 + ], + [ + "▁Marking", + -14.260048866271973 + ], + [ + "▁lifesaver", + -14.260066032409668 + ], + [ + "▁18.5", + -14.260068893432615 + ], + [ + "▁ruptured", + -14.260082244873049 + ], + [ + "▁lockdown", + -14.260104179382324 + ], + [ + "hacker", + -14.260111808776855 + ], + [ + "▁Colton", + -14.26014518737793 + ], + [ + "▁outbursts", + -14.260177612304688 + ], + [ + "▁july", + -14.260213851928713 + ], + [ + "RIDE", + -14.26021957397461 + ], + [ + "▁Shakti", + -14.260235786437988 + ], + [ + "1983", + -14.260287284851074 + ], + [ + "▁Vibrant", + -14.260321617126465 + ], + [ + "#8", + -14.260486602783203 + ], + [ + "▁Refill", + -14.260527610778809 + ], + [ + "▁Imports", + -14.260528564453123 + ], + [ + "▁postmodern", + -14.26052951812744 + ], + [ + "▁Eighty", + -14.260533332824709 + ], + [ + "numer", + -14.26053524017334 + ], + [ + "▁lashing", + -14.26059627532959 + ], + [ + "vocal", + -14.260608673095703 + ], + [ + "▁Bagel", + -14.260733604431152 + ], + [ + "▁FAM", + -14.260734558105469 + ], + [ + "▁bandana", + -14.260782241821287 + ], + [ + "MIR", + -14.26081371307373 + ], + [ + "massive", + -14.26086711883545 + ], + [ + "▁parametric", + -14.260897636413574 + ], + [ + "▁halting", + -14.260900497436523 + ], + [ + "junction", + -14.260950088500977 + ], + [ + "▁Tunes", + -14.260968208312988 + ], + [ + "▁Sev", + -14.261040687561035 + ], + [ + "▁hasty", + -14.26105785369873 + ], + [ + "▁Regan", + -14.261093139648438 + ], + [ + "▁underwriters", + -14.261102676391602 + ], + [ + "▁Serves", + -14.261104583740234 + ], + [ + "hida", + -14.261176109313965 + ], + [ + "Francis", + -14.26133155822754 + ], + [ + "▁antidepressants", + -14.26136589050293 + ], + [ + "Compatible", + -14.261380195617676 + ], + [ + "obsessed", + -14.261393547058104 + ], + [ + "Defense", + -14.26140022277832 + ], + [ + "Stuart", + -14.26142692565918 + ], + [ + "▁judgmental", + -14.261431694030762 + ], + [ + "▁Biel", + -14.26144790649414 + ], + [ + "▁tucking", + -14.261466026306152 + ], + [ + "Legend", + -14.26154899597168 + ], + [ + "popping", + -14.26156520843506 + ], + [ + "▁Whose", + -14.261597633361816 + ], + [ + "ligh", + -14.261615753173828 + ], + [ + "▁mehr", + -14.26162052154541 + ], + [ + "creek", + -14.261636734008787 + ], + [ + "Imp", + -14.261656761169434 + ], + [ + "▁Vul", + -14.261659622192385 + ], + [ + "▁Roadster", + -14.261674880981444 + ], + [ + "Plex", + -14.261725425720217 + ], + [ + "▁Wach", + -14.261746406555176 + ], + [ + "intended", + -14.26175022125244 + ], + [ + "▁Drawn", + -14.26175594329834 + ], + [ + "ugo", + -14.261762619018556 + ], + [ + "▁molybdenum", + -14.261801719665527 + ], + [ + "▁Juris", + -14.261859893798828 + ], + [ + "▁Cowan", + -14.26186466217041 + ], + [ + "▁Audition", + -14.261870384216309 + ], + [ + "▁Gadget", + -14.261882781982422 + ], + [ + "▁Undoubtedly", + -14.261907577514648 + ], + [ + "419", + -14.261943817138672 + ], + [ + "▁NRI", + -14.262004852294922 + ], + [ + "▁Cena", + -14.262055397033691 + ], + [ + "▁RAS", + -14.26210117340088 + ], + [ + "▁bystanders", + -14.26211643218994 + ], + [ + "7:00", + -14.26224422454834 + ], + [ + "▁Judo", + -14.262248992919922 + ], + [ + "instrument", + -14.262323379516602 + ], + [ + "▁Manda", + -14.262332916259766 + ], + [ + "▁0.2%", + -14.262343406677246 + ], + [ + "▁Bluebird", + -14.262384414672852 + ], + [ + "▁Tania", + -14.262428283691406 + ], + [ + "Uber", + -14.262495994567873 + ], + [ + "rbo", + -14.262516021728516 + ], + [ + "▁Riddle", + -14.262537002563477 + ], + [ + "Granted", + -14.26255702972412 + ], + [ + "▁bode", + -14.262616157531738 + ], + [ + "▁Joss", + -14.26262092590332 + ], + [ + "reporting", + -14.262664794921877 + ], + [ + "▁crunching", + -14.262736320495604 + ], + [ + "▁dupe", + -14.262738227844238 + ], + [ + "▁Mamma", + -14.262784957885742 + ], + [ + "▁Thal", + -14.262791633605955 + ], + [ + "]\"", + -14.262832641601562 + ], + [ + "SSD", + -14.262874603271484 + ], + [ + "uzz", + -14.263004302978516 + ], + [ + "shooter", + -14.263063430786133 + ], + [ + "▁discounting", + -14.263145446777344 + ], + [ + "▁coiled", + -14.26315212249756 + ], + [ + "▁Maru", + -14.263176918029783 + ], + [ + "PERS", + -14.263205528259276 + ], + [ + "platz", + -14.263239860534668 + ], + [ + "▁tuners", + -14.263351440429688 + ], + [ + "ROB", + -14.263355255126951 + ], + [ + "▁STORE", + -14.2633638381958 + ], + [ + "GRAPH", + -14.263378143310549 + ], + [ + "arse", + -14.263400077819824 + ], + [ + "android", + -14.26340389251709 + ], + [ + "▁croissant", + -14.26341724395752 + ], + [ + "▁DAB", + -14.263426780700684 + ], + [ + "▁keyhole", + -14.26353645324707 + ], + [ + "▁Cedric", + -14.26353931427002 + ], + [ + "binary", + -14.263628005981444 + ], + [ + "Enjoyed", + -14.263628959655762 + ], + [ + "▁viva", + -14.263712882995604 + ], + [ + "▁816", + -14.263748168945312 + ], + [ + "tke", + -14.263774871826172 + ], + [ + "▁trawl", + -14.263789176940918 + ], + [ + "▁hula", + -14.263861656188965 + ], + [ + "resort", + -14.263968467712402 + ], + [ + "McK", + -14.264023780822754 + ], + [ + "retto", + -14.264023780822754 + ], + [ + "mentor", + -14.26405143737793 + ], + [ + "▁Avoiding", + -14.26418685913086 + ], + [ + "▁Newest", + -14.26420783996582 + ], + [ + "▁Haber", + -14.264217376708984 + ], + [ + "▁thai", + -14.264240264892578 + ], + [ + "▁strikers", + -14.264243125915527 + ], + [ + "fridge", + -14.264249801635742 + ], + [ + "85%", + -14.264266967773438 + ], + [ + "▁Burroughs", + -14.26428508758545 + ], + [ + "▁compensating", + -14.26428508758545 + ], + [ + "▁malleable", + -14.26428508758545 + ], + [ + "▁sanctity", + -14.26428508758545 + ], + [ + "▁telematics", + -14.26428508758545 + ], + [ + "▁thirties", + -14.26428508758545 + ], + [ + "▁Socrates", + -14.264287948608398 + ], + [ + "▁fungicide", + -14.264287948608398 + ], + [ + "▁Optimum", + -14.264288902282717 + ], + [ + "▁ischemic", + -14.264288902282717 + ], + [ + "▁vertigo", + -14.264288902282717 + ], + [ + "▁Braxton", + -14.264289855957031 + ], + [ + "▁Herzog", + -14.264291763305664 + ], + [ + "▁TRAVEL", + -14.264291763305664 + ], + [ + "▁neoliberal", + -14.264291763305664 + ], + [ + "▁Quattro", + -14.26429271697998 + ], + [ + "▁Radcliffe", + -14.264293670654297 + ], + [ + "▁ELISA", + -14.264294624328612 + ], + [ + "▁grapevine", + -14.264294624328612 + ], + [ + "▁Balboa", + -14.26429557800293 + ], + [ + "▁Reinforced", + -14.264301300048828 + ], + [ + "▁bearers", + -14.264302253723145 + ], + [ + "▁polenta", + -14.264307975769045 + ], + [ + "▁Roblox", + -14.264309883117676 + ], + [ + "acci", + -14.264311790466309 + ], + [ + "▁woofer", + -14.264314651489258 + ], + [ + "▁microbiology", + -14.264317512512209 + ], + [ + "▁Betfair", + -14.264322280883787 + ], + [ + "▁shun", + -14.264364242553713 + ], + [ + "Hobbies", + -14.264370918273926 + ], + [ + "▁discography", + -14.264389038085938 + ], + [ + "▁TIMES", + -14.264394760131836 + ], + [ + "▁Outlander", + -14.264396667480469 + ], + [ + "▁Sheeran", + -14.26440143585205 + ], + [ + "▁sinned", + -14.264404296875 + ], + [ + "▁interviewers", + -14.264412879943848 + ], + [ + "▁Grammys", + -14.264469146728516 + ], + [ + "▁Sanitation", + -14.264498710632324 + ], + [ + "▁2015-2016", + -14.264509201049805 + ], + [ + "▁chateau", + -14.26452350616455 + ], + [ + "issimo", + -14.264617919921877 + ], + [ + "▁Protestants", + -14.264633178710938 + ], + [ + "▁Footprint", + -14.264687538146973 + ], + [ + "▁SENS", + -14.2647123336792 + ], + [ + "▁Whitehall", + -14.264719009399414 + ], + [ + "▁Intention", + -14.264756202697754 + ], + [ + "▁husky", + -14.26476001739502 + ], + [ + "▁SSDI", + -14.264780044555664 + ], + [ + "alpa", + -14.264814376831056 + ], + [ + "▁Incentives", + -14.264816284179688 + ], + [ + "▁hijacking", + -14.264826774597168 + ], + [ + "▁Ayala", + -14.264892578125 + ], + [ + "▁seafront", + -14.264986991882324 + ], + [ + "russia", + -14.26499366760254 + ], + [ + "aholic", + -14.264999389648438 + ], + [ + "▁Disable", + -14.265007972717283 + ], + [ + "оо", + -14.26501178741455 + ], + [ + "▁UTI", + -14.265023231506348 + ], + [ + "▁Enix", + -14.26503849029541 + ], + [ + "▁Ratna", + -14.265069961547852 + ], + [ + "▁Friar", + -14.26508617401123 + ], + [ + "▁housewife", + -14.265093803405762 + ], + [ + "▁TIG", + -14.265178680419922 + ], + [ + "Courses", + -14.265212059020996 + ], + [ + "▁bacterium", + -14.265260696411133 + ], + [ + "▁HOTEL", + -14.265308380126951 + ], + [ + "-3/4\"", + -14.265318870544434 + ], + [ + "▁Overlay", + -14.265339851379396 + ], + [ + "▁schoolers", + -14.26534366607666 + ], + [ + "▁Kato", + -14.265381813049316 + ], + [ + "attend", + -14.265400886535645 + ], + [ + "raptor", + -14.265403747558594 + ], + [ + "▁Lewiston", + -14.265409469604492 + ], + [ + "lusion", + -14.265419006347656 + ], + [ + "branding", + -14.26544952392578 + ], + [ + "/2000", + -14.26546859741211 + ], + [ + "orian", + -14.26547622680664 + ], + [ + "Insurers", + -14.26550579071045 + ], + [ + "ruby", + -14.265517234802246 + ], + [ + "▁Rumors", + -14.265533447265623 + ], + [ + "▁Downton", + -14.26553726196289 + ], + [ + "▁asses", + -14.265564918518066 + ], + [ + "pang", + -14.265647888183594 + ], + [ + "▁Wentz", + -14.26567268371582 + ], + [ + "scopic", + -14.26569652557373 + ], + [ + "▁Sundar", + -14.265710830688477 + ], + [ + "▁zesty", + -14.265767097473145 + ], + [ + "▁Sanderson", + -14.265769004821776 + ], + [ + "▁Dozens", + -14.265817642211914 + ], + [ + "▁SIT", + -14.265857696533203 + ], + [ + "▁Eri", + -14.265889167785645 + ], + [ + "iquette", + -14.265936851501465 + ], + [ + "cratic", + -14.265965461730955 + ], + [ + "zard", + -14.265993118286133 + ], + [ + "▁TSX", + -14.266074180603027 + ], + [ + "▁Fonda", + -14.26608943939209 + ], + [ + "▁offbeat", + -14.26611042022705 + ], + [ + "▁Capitan", + -14.266157150268556 + ], + [ + "▁nave", + -14.26616382598877 + ], + [ + "▁(14)", + -14.26616668701172 + ], + [ + "▁hookups", + -14.26630401611328 + ], + [ + "izar", + -14.26630973815918 + ], + [ + "westerly", + -14.26634407043457 + ], + [ + "▁thirteenth", + -14.266349792480469 + ], + [ + "erous", + -14.26636028289795 + ], + [ + "▁BLU", + -14.26638412475586 + ], + [ + "▁nba", + -14.266395568847656 + ], + [ + "ATM", + -14.266432762145996 + ], + [ + "▁6:45", + -14.266595840454102 + ], + [ + "uha", + -14.266613006591797 + ], + [ + "luster", + -14.266636848449709 + ], + [ + "Mau", + -14.266642570495604 + ], + [ + "▁Aye", + -14.266648292541504 + ], + [ + "▁bulkhead", + -14.266661643981934 + ], + [ + "previous", + -14.2666654586792 + ], + [ + "▁Startups", + -14.266799926757812 + ], + [ + "▁gab", + -14.26685905456543 + ], + [ + "Laptop", + -14.266887664794922 + ], + [ + "▁Mullins", + -14.266890525817873 + ], + [ + "Missouri", + -14.266891479492188 + ], + [ + "Vinyl", + -14.26689910888672 + ], + [ + "▁subcontinent", + -14.266948699951172 + ], + [ + "▁uninstalling", + -14.267019271850586 + ], + [ + "▁cannons", + -14.2670316696167 + ], + [ + "Warner", + -14.26704216003418 + ], + [ + "▁Announced", + -14.267045974731444 + ], + [ + "▁shavings", + -14.267046928405762 + ], + [ + "Afterwards", + -14.267051696777344 + ], + [ + "▁Strips", + -14.26705265045166 + ], + [ + "▁CART", + -14.267138481140137 + ], + [ + "eln", + -14.267139434814451 + ], + [ + "wink", + -14.267149925231934 + ], + [ + "shoes", + -14.267241477966309 + ], + [ + "CORE", + -14.267248153686523 + ], + [ + "Princess", + -14.267279624938965 + ], + [ + "REF", + -14.26730728149414 + ], + [ + "police", + -14.26731300354004 + ], + [ + "dain", + -14.267328262329102 + ], + [ + "▁automat", + -14.26736068725586 + ], + [ + "▁DCC", + -14.267366409301758 + ], + [ + "▁fabulously", + -14.267389297485352 + ], + [ + "pré", + -14.26743507385254 + ], + [ + "KIM", + -14.26747226715088 + ], + [ + "gig", + -14.26748752593994 + ], + [ + "practical", + -14.267502784729004 + ], + [ + "Horn", + -14.267585754394531 + ], + [ + "Resolution", + -14.267627716064451 + ], + [ + "▁pearly", + -14.267633438110352 + ], + [ + "chek", + -14.267765998840332 + ], + [ + "gross", + -14.267766952514648 + ], + [ + "▁swish", + -14.267779350280762 + ], + [ + "▁Petri", + -14.26782512664795 + ], + [ + "▁Teo", + -14.267865180969238 + ], + [ + "▁insures", + -14.26789379119873 + ], + [ + "▁Sly", + -14.26794147491455 + ], + [ + "▁solvers", + -14.267979621887209 + ], + [ + "▁Argan", + -14.267990112304688 + ], + [ + "XE", + -14.2680025100708 + ], + [ + "▁mead", + -14.268016815185549 + ], + [ + "▁Safer", + -14.26803493499756 + ], + [ + "hose", + -14.268040657043455 + ], + [ + "▁Astor", + -14.268047332763672 + ], + [ + "raze", + -14.26806354522705 + ], + [ + "▁pavements", + -14.268099784851074 + ], + [ + "theft", + -14.268110275268556 + ], + [ + "▁Flavors", + -14.268112182617188 + ], + [ + "▁jib", + -14.268224716186523 + ], + [ + "stellar", + -14.268280029296877 + ], + [ + "▁OKC", + -14.268287658691406 + ], + [ + "▁Tumor", + -14.26829433441162 + ], + [ + "nberg", + -14.26838493347168 + ], + [ + "▁ppl", + -14.268434524536133 + ], + [ + "taka", + -14.268465042114258 + ], + [ + "designated", + -14.268475532531738 + ], + [ + "ansi", + -14.268521308898926 + ], + [ + "▁Headset", + -14.268563270568848 + ], + [ + "micron", + -14.268607139587402 + ], + [ + "modules", + -14.268617630004885 + ], + [ + "922", + -14.268701553344728 + ], + [ + "ONT", + -14.26870346069336 + ], + [ + "MUS", + -14.268714904785156 + ], + [ + "stilling", + -14.268752098083496 + ], + [ + "URS", + -14.268802642822266 + ], + [ + "atio", + -14.26882553100586 + ], + [ + "▁Raptor", + -14.268933296203612 + ], + [ + "▁PATH", + -14.268990516662598 + ], + [ + "▁imperfection", + -14.26899242401123 + ], + [ + "▁armrest", + -14.269003868103027 + ], + [ + "olone", + -14.269071578979492 + ], + [ + "▁patriots", + -14.269092559814451 + ], + [ + "▁dente", + -14.269170761108398 + ], + [ + "▁Yad", + -14.269193649291992 + ], + [ + "nost", + -14.26921272277832 + ], + [ + "yng", + -14.269227027893066 + ], + [ + "▁plunder", + -14.269289016723633 + ], + [ + "lavi", + -14.269356727600098 + ], + [ + "▁433", + -14.269403457641602 + ], + [ + "pei", + -14.26941967010498 + ], + [ + "▁Voodoo", + -14.269448280334473 + ], + [ + "▁modifiers", + -14.269450187683104 + ], + [ + "ceptive", + -14.269464492797852 + ], + [ + "▁auth", + -14.26951503753662 + ], + [ + "▁torpedo", + -14.269519805908203 + ], + [ + "1981", + -14.269601821899414 + ], + [ + "Articles", + -14.269624710083008 + ], + [ + "▁Pitts", + -14.269647598266602 + ], + [ + "▁04/23/2019", + -14.269661903381348 + ], + [ + "▁24/7/365", + -14.269661903381348 + ], + [ + "▁Amusement", + -14.269661903381348 + ], + [ + "▁Liturgy", + -14.269661903381348 + ], + [ + "▁Lothian", + -14.269661903381348 + ], + [ + "▁Minolta", + -14.269661903381348 + ], + [ + "▁Monticello", + -14.269661903381348 + ], + [ + "▁Parramatta", + -14.269661903381348 + ], + [ + "▁Pulmonary", + -14.269661903381348 + ], + [ + "▁Texarkana", + -14.269661903381348 + ], + [ + "▁alleviation", + -14.269661903381348 + ], + [ + "▁concentrator", + -14.269661903381348 + ], + [ + "▁democracies", + -14.269661903381348 + ], + [ + "▁extraordinaire", + -14.269661903381348 + ], + [ + "▁handkerchief", + -14.269661903381348 + ], + [ + "▁lactation", + -14.269661903381348 + ], + [ + "▁lexapro", + -14.269661903381348 + ], + [ + "▁metallurgy", + -14.269661903381348 + ], + [ + "▁prescriptive", + -14.269661903381348 + ], + [ + "▁prosciutto", + -14.269661903381348 + ], + [ + "▁Ocasio", + -14.269662857055664 + ], + [ + "▁perplexing", + -14.269662857055664 + ], + [ + "▁Heinrich", + -14.269664764404297 + ], + [ + "▁Wilfred", + -14.269665718078612 + ], + [ + "878", + -14.269670486450195 + ], + [ + "▁janitorial", + -14.269670486450195 + ], + [ + "▁disarray", + -14.269671440124512 + ], + [ + "▁prosthesis", + -14.269675254821776 + ], + [ + "▁hyperactivity", + -14.269681930541992 + ], + [ + "▁Calories", + -14.26968765258789 + ], + [ + "▁emphatically", + -14.269689559936523 + ], + [ + "▁Zia", + -14.269691467285156 + ], + [ + "▁campervan", + -14.269696235656738 + ], + [ + "▁unsolved", + -14.269697189331056 + ], + [ + "▁cavernous", + -14.26970386505127 + ], + [ + "▁Boogie", + -14.26970672607422 + ], + [ + "▁Patsy", + -14.269719123840332 + ], + [ + "▁hvac", + -14.26972198486328 + ], + [ + "▁disillusioned", + -14.26973819732666 + ], + [ + "50.00", + -14.26974105834961 + ], + [ + "▁hush", + -14.26974105834961 + ], + [ + "▁pervade", + -14.269767761230469 + ], + [ + "▁UConn", + -14.26979637145996 + ], + [ + "Amp", + -14.269798278808594 + ], + [ + "▁snarl", + -14.269807815551758 + ], + [ + "▁Crafty", + -14.269882202148438 + ], + [ + "▁foiled", + -14.269888877868652 + ], + [ + "▁Plaid", + -14.269935607910156 + ], + [ + "▁Servant", + -14.269944190979004 + ], + [ + "▁Damp", + -14.269997596740724 + ], + [ + "▁gusto", + -14.270014762878418 + ], + [ + "▁sashes", + -14.27017307281494 + ], + [ + "▁SPORTS", + -14.270218849182127 + ], + [ + "▁dishonesty", + -14.270222663879396 + ], + [ + "▁Southside", + -14.270249366760254 + ], + [ + "612", + -14.270261764526367 + ], + [ + "▁imparts", + -14.270292282104492 + ], + [ + "▁Merced", + -14.27029514312744 + ], + [ + "▁Missy", + -14.27036476135254 + ], + [ + "ksa", + -14.27038288116455 + ], + [ + "▁cred", + -14.270413398742676 + ], + [ + "▁jackson", + -14.270423889160156 + ], + [ + "Chase", + -14.270474433898926 + ], + [ + "mongering", + -14.270496368408203 + ], + [ + "▁RHP", + -14.27052879333496 + ], + [ + "gib", + -14.270562171936035 + ], + [ + "▁$100.00", + -14.2705659866333 + ], + [ + "▁stimulants", + -14.270573616027832 + ], + [ + "▁brav", + -14.270574569702148 + ], + [ + "throne", + -14.270623207092283 + ], + [ + "icity", + -14.270630836486816 + ], + [ + "▁Ties", + -14.27066135406494 + ], + [ + "▁capstone", + -14.270699501037598 + ], + [ + "▁Guitars", + -14.270700454711914 + ], + [ + "▁subtracting", + -14.270726203918455 + ], + [ + "▁validates", + -14.270727157592772 + ], + [ + "▁Journeys", + -14.27079963684082 + ], + [ + "896", + -14.270825386047363 + ], + [ + "▁OneNote", + -14.270870208740234 + ], + [ + "▁decisively", + -14.270910263061523 + ], + [ + "▁offshoot", + -14.270919799804688 + ], + [ + "▁Whoop", + -14.270984649658203 + ], + [ + "▁starboard", + -14.271082878112791 + ], + [ + "▁EFL", + -14.271172523498535 + ], + [ + "▁enacting", + -14.2711763381958 + ], + [ + "▁Practicing", + -14.271183967590332 + ], + [ + "▁Amiga", + -14.271233558654783 + ], + [ + "▁overtones", + -14.271258354187012 + ], + [ + "Extract", + -14.271300315856934 + ], + [ + "▁aurora", + -14.271342277526855 + ], + [ + "Vir", + -14.271370887756348 + ], + [ + "Strange", + -14.271377563476562 + ], + [ + "▁topple", + -14.271389961242676 + ], + [ + "▁captioned", + -14.271403312683104 + ], + [ + "▁accorded", + -14.271459579467772 + ], + [ + "▁Defined", + -14.271507263183594 + ], + [ + "3.8%", + -14.271516799926758 + ], + [ + "▁wildcard", + -14.271559715270996 + ], + [ + "liev", + -14.27158260345459 + ], + [ + "▁443", + -14.27158546447754 + ], + [ + "sweep", + -14.27160930633545 + ], + [ + "▁worktops", + -14.27161693572998 + ], + [ + "▁Sarri", + -14.271624565124512 + ], + [ + "▁CSF", + -14.27165412902832 + ], + [ + "▁combed", + -14.271678924560549 + ], + [ + "▁18+", + -14.271688461303713 + ], + [ + "▁Mena", + -14.27177619934082 + ], + [ + "▁homicides", + -14.271803855895996 + ], + [ + "JT", + -14.271862983703612 + ], + [ + "Keefe", + -14.271891593933104 + ], + [ + "▁Laughter", + -14.271897315979004 + ], + [ + "872", + -14.27189826965332 + ], + [ + "1.7%", + -14.271917343139648 + ], + [ + "▁triglycerides", + -14.271950721740724 + ], + [ + "▁overtaking", + -14.27197551727295 + ], + [ + "nais", + -14.2720365524292 + ], + [ + "931", + -14.27206039428711 + ], + [ + "filing", + -14.272101402282717 + ], + [ + "▁characterizes", + -14.272154808044434 + ], + [ + "▁benefic", + -14.272218704223633 + ], + [ + "▁consumerism", + -14.27221965789795 + ], + [ + "▁Infections", + -14.272226333618164 + ], + [ + "▁2.25", + -14.2722806930542 + ], + [ + "▁Tung", + -14.272286415100098 + ], + [ + "▁PERSON", + -14.272337913513184 + ], + [ + "RDC", + -14.272368431091309 + ], + [ + "▁Barak", + -14.27237606048584 + ], + [ + "▁CIOs", + -14.272391319274902 + ], + [ + "acclaimed", + -14.272415161132812 + ], + [ + "difficult", + -14.272418022155762 + ], + [ + "Lawrence", + -14.272421836853027 + ], + [ + "speech", + -14.272446632385254 + ], + [ + "▁rela", + -14.272520065307615 + ], + [ + "▁12-14", + -14.272522926330566 + ], + [ + "thyroid", + -14.272531509399414 + ], + [ + "residents", + -14.272550582885742 + ], + [ + "Protection", + -14.272553443908691 + ], + [ + "Layout", + -14.272573471069336 + ], + [ + "Whoever", + -14.272649765014648 + ], + [ + "▁belle", + -14.272711753845217 + ], + [ + "freedom", + -14.272769927978516 + ], + [ + "▁BDO", + -14.272775650024414 + ], + [ + "▁positional", + -14.272802352905272 + ], + [ + "▁Exhibitions", + -14.272882461547852 + ], + [ + "▁Rive", + -14.272896766662598 + ], + [ + "RIM", + -14.272919654846191 + ], + [ + "Mason", + -14.272954940795898 + ], + [ + "thur", + -14.272984504699709 + ], + [ + "Restore", + -14.27302360534668 + ], + [ + "▁99.9", + -14.273032188415527 + ], + [ + "▁CFC", + -14.273112297058104 + ], + [ + "▁brash", + -14.273116111755373 + ], + [ + "Cheers", + -14.273123741149902 + ], + [ + "inney", + -14.273152351379396 + ], + [ + "Virgin", + -14.273308753967283 + ], + [ + "▁BEN", + -14.273404121398926 + ], + [ + "▁PRS", + -14.273422241210938 + ], + [ + "Expo", + -14.273436546325684 + ], + [ + "▁lifelike", + -14.273502349853516 + ], + [ + "▁Vets", + -14.27354335784912 + ], + [ + "▁Dressed", + -14.273555755615234 + ], + [ + "▁bales", + -14.273555755615234 + ], + [ + "▁Concerning", + -14.2735595703125 + ], + [ + "ifa", + -14.273571014404297 + ], + [ + "▁Mie", + -14.273603439331056 + ], + [ + "conversion", + -14.273619651794434 + ], + [ + "▁LEAD", + -14.27364444732666 + ], + [ + "▁Nij", + -14.273659706115724 + ], + [ + "▁Moi", + -14.27367877960205 + ], + [ + "ROOM", + -14.273733139038086 + ], + [ + "▁Forgive", + -14.273740768432615 + ], + [ + "omics", + -14.273743629455566 + ], + [ + "alert", + -14.273807525634766 + ], + [ + "▁Mikey", + -14.273963928222656 + ], + [ + "▁DIP", + -14.274004936218262 + ], + [ + "▁Immune", + -14.274032592773438 + ], + [ + "▁DOUBLE", + -14.274040222167969 + ], + [ + "▁Zn", + -14.274130821228027 + ], + [ + "▁Miners", + -14.274140357971191 + ], + [ + "▁3:15", + -14.274158477783203 + ], + [ + "▁808", + -14.274187088012695 + ], + [ + "Ker", + -14.274199485778809 + ], + [ + "ikin", + -14.274236679077148 + ], + [ + "PEN", + -14.274258613586426 + ], + [ + "▁Apologies", + -14.2742919921875 + ], + [ + "▁impediment", + -14.274309158325195 + ], + [ + "▁narcotic", + -14.27432918548584 + ], + [ + "▁Caregiver", + -14.274346351623535 + ], + [ + "▁geologist", + -14.274359703063965 + ], + [ + "fans", + -14.274367332458496 + ], + [ + "▁recollections", + -14.274402618408203 + ], + [ + "▁WIDE", + -14.27443504333496 + ], + [ + "▁FSB", + -14.274442672729492 + ], + [ + "▁huddle", + -14.27449893951416 + ], + [ + "▁banger", + -14.274502754211426 + ], + [ + "▁nucleotide", + -14.2745361328125 + ], + [ + "▁lattes", + -14.274560928344728 + ], + [ + "▁flounder", + -14.27458381652832 + ], + [ + "▁84%", + -14.27459716796875 + ], + [ + "▁Conta", + -14.274741172790527 + ], + [ + "▁Faz", + -14.274785041809082 + ], + [ + "▁handball", + -14.274815559387209 + ], + [ + "▁Skiing", + -14.27483081817627 + ], + [ + "yote", + -14.274930000305176 + ], + [ + "Owners", + -14.274948120117188 + ], + [ + "Asking", + -14.274999618530272 + ], + [ + "▁OSI", + -14.27502727508545 + ], + [ + "▁Survivors", + -14.275028228759766 + ], + [ + "▁Jimenez", + -14.275067329406738 + ], + [ + "▁Swaziland", + -14.275067329406738 + ], + [ + "▁Tbilisi", + -14.275067329406738 + ], + [ + "▁Waikiki", + -14.275067329406738 + ], + [ + "▁botanist", + -14.275067329406738 + ], + [ + "▁gizmo", + -14.275067329406738 + ], + [ + "▁parlour", + -14.275067329406738 + ], + [ + "▁percussive", + -14.275067329406738 + ], + [ + "▁perpetuity", + -14.275067329406738 + ], + [ + "▁stifling", + -14.275067329406738 + ], + [ + "▁tutelage", + -14.275067329406738 + ], + [ + "▁Akademi", + -14.275069236755373 + ], + [ + "▁HISTORY", + -14.275069236755373 + ], + [ + "▁displeasure", + -14.275069236755373 + ], + [ + "▁succesful", + -14.275069236755373 + ], + [ + "▁valtrex", + -14.275069236755373 + ], + [ + "▁Immunology", + -14.275071144104004 + ], + [ + "▁SILVER", + -14.275071144104004 + ], + [ + "▁Taunton", + -14.275071144104004 + ], + [ + "▁ovaries", + -14.275071144104004 + ], + [ + "▁Federico", + -14.27507209777832 + ], + [ + "▁sojourn", + -14.275076866149902 + ], + [ + "▁Forklift", + -14.2750825881958 + ], + [ + "▁reinstatement", + -14.275114059448242 + ], + [ + "ombi", + -14.275140762329102 + ], + [ + "▁Jimi", + -14.275140762329102 + ], + [ + "michigan", + -14.275152206420898 + ], + [ + "▁spamming", + -14.275152206420898 + ], + [ + "▁flied", + -14.275154113769531 + ], + [ + "Dale", + -14.275174140930176 + ], + [ + "▁Stakeholder", + -14.275175094604492 + ], + [ + "▁Tribeca", + -14.275175094604492 + ], + [ + "▁coaxial", + -14.275208473205566 + ], + [ + "▁oblong", + -14.275208473205566 + ], + [ + "▁ghana", + -14.275238990783691 + ], + [ + "▁trafficked", + -14.275270462036133 + ], + [ + "▁HIIT", + -14.27528190612793 + ], + [ + "▁QuickTime", + -14.275314331054688 + ], + [ + "Jojoba", + -14.275335311889648 + ], + [ + "▁DPA", + -14.275345802307127 + ], + [ + "▁Fudge", + -14.275348663330078 + ], + [ + "▁Charley", + -14.27538776397705 + ], + [ + "▁Vivienne", + -14.275395393371582 + ], + [ + "▁glen", + -14.275396347045898 + ], + [ + "Polar", + -14.275397300720217 + ], + [ + "▁Carrick", + -14.275437355041504 + ], + [ + "▁2012-13", + -14.275467872619627 + ], + [ + "comma", + -14.275485038757324 + ], + [ + "тр", + -14.27549648284912 + ], + [ + "▁$53", + -14.275501251220703 + ], + [ + "▁MST", + -14.275530815124512 + ], + [ + "▁Janelle", + -14.275534629821776 + ], + [ + "▁nightstands", + -14.27553653717041 + ], + [ + "▁unworthy", + -14.275561332702637 + ], + [ + "▁medallist", + -14.275623321533203 + ], + [ + "▁Skipper", + -14.275642395019531 + ], + [ + "▁campaigner", + -14.275691986083984 + ], + [ + "▁tarnished", + -14.275768280029297 + ], + [ + "▁Farley", + -14.275792121887209 + ], + [ + "▁schoolchildren", + -14.275948524475098 + ], + [ + "12.00", + -14.275955200195312 + ], + [ + "isce", + -14.275962829589844 + ], + [ + "nius", + -14.27596950531006 + ], + [ + "▁1/10", + -14.275971412658691 + ], + [ + "▁flatware", + -14.276000022888184 + ], + [ + "▁Domains", + -14.276080131530762 + ], + [ + "▁Twp", + -14.276097297668455 + ], + [ + "▁IVR", + -14.276123046875 + ], + [ + "dist", + -14.27612590789795 + ], + [ + "▁Galerie", + -14.276126861572266 + ], + [ + "▁statistician", + -14.276236534118652 + ], + [ + "▁Lineage", + -14.276327133178713 + ], + [ + "▁empirically", + -14.27634334564209 + ], + [ + "▁Skel", + -14.276365280151367 + ], + [ + "▁SCSI", + -14.27639389038086 + ], + [ + "▁Breeding", + -14.276395797729492 + ], + [ + "▁kun", + -14.276418685913086 + ], + [ + "▁MPV", + -14.2764310836792 + ], + [ + "▁balmy", + -14.276453971862791 + ], + [ + "ditch", + -14.27649974822998 + ], + [ + "WV", + -14.276568412780762 + ], + [ + "Btw", + -14.276572227478027 + ], + [ + "-1-1", + -14.27659511566162 + ], + [ + "BMI", + -14.276647567749023 + ], + [ + "▁tacked", + -14.276697158813477 + ], + [ + "▁Explained", + -14.276704788208008 + ], + [ + "▁tapering", + -14.276862144470217 + ], + [ + "sections", + -14.27699851989746 + ], + [ + "▁watersports", + -14.277007102966309 + ], + [ + "▁Cochin", + -14.277009963989258 + ], + [ + "▁glu", + -14.277029037475586 + ], + [ + "926", + -14.277088165283203 + ], + [ + "conspirator", + -14.277112007141112 + ], + [ + "▁Andean", + -14.277132987976074 + ], + [ + "2100", + -14.277260780334473 + ], + [ + "▁Teng", + -14.277274131774902 + ], + [ + "▁Ono", + -14.277352333068848 + ], + [ + "▁weakens", + -14.27735424041748 + ], + [ + "▁Kwon", + -14.27745246887207 + ], + [ + "Effect", + -14.27749729156494 + ], + [ + "▁staking", + -14.277503967285156 + ], + [ + "▁Diverse", + -14.277750968933104 + ], + [ + "▁warping", + -14.277824401855469 + ], + [ + "▁notches", + -14.277862548828123 + ], + [ + "nana", + -14.27786922454834 + ], + [ + "Emotional", + -14.277959823608398 + ], + [ + "Committee", + -14.277962684631348 + ], + [ + "Swift", + -14.27797031402588 + ], + [ + "responsible", + -14.277974128723145 + ], + [ + "▁Yusuf", + -14.27797794342041 + ], + [ + "▁Sey", + -14.27798843383789 + ], + [ + "▁SCM", + -14.278036117553713 + ], + [ + "▁Swarm", + -14.278048515319824 + ], + [ + "Comfortable", + -14.278081893920898 + ], + [ + "latest", + -14.278100967407228 + ], + [ + "Gently", + -14.278118133544922 + ], + [ + "warning", + -14.278138160705566 + ], + [ + "▁Proton", + -14.278162956237791 + ], + [ + "▁CPT", + -14.278194427490234 + ], + [ + "▁Quinta", + -14.278215408325195 + ], + [ + "Thanksgiving", + -14.278217315673828 + ], + [ + "yog", + -14.278217315673828 + ], + [ + "aylor", + -14.278278350830078 + ], + [ + "Relationship", + -14.278314590454102 + ], + [ + "▁rerun", + -14.278332710266112 + ], + [ + "Pizza", + -14.278364181518556 + ], + [ + "▁deconstruct", + -14.27836799621582 + ], + [ + "Kara", + -14.27840805053711 + ], + [ + "Beware", + -14.278441429138184 + ], + [ + "stitched", + -14.278559684753418 + ], + [ + "▁Inbound", + -14.278677940368652 + ], + [ + "▁12.2", + -14.278718948364258 + ], + [ + "▁brokenness", + -14.278731346130373 + ], + [ + "▁Rish", + -14.278753280639648 + ], + [ + "▁Ogun", + -14.278754234313965 + ], + [ + "▁prioritizes", + -14.278759002685549 + ], + [ + "▁kindred", + -14.278770446777344 + ], + [ + "▁Brau", + -14.278778076171877 + ], + [ + "▁$15.00", + -14.278802871704102 + ], + [ + "▁($10", + -14.278853416442873 + ], + [ + "▁Ponds", + -14.278897285461426 + ], + [ + "▁PAPER", + -14.278960227966309 + ], + [ + "vention", + -14.27896213531494 + ], + [ + "Stor", + -14.27898406982422 + ], + [ + "-232", + -14.278997421264648 + ], + [ + "Empower", + -14.279032707214355 + ], + [ + "▁(38", + -14.279040336608888 + ], + [ + "nato", + -14.27915096282959 + ], + [ + "otomy", + -14.279237747192385 + ], + [ + "▁downspouts", + -14.279237747192385 + ], + [ + "▁Covent", + -14.27924633026123 + ], + [ + "▁Execute", + -14.279316902160645 + ], + [ + "ZT", + -14.27932357788086 + ], + [ + "Bol", + -14.279346466064451 + ], + [ + "▁natal", + -14.279419898986816 + ], + [ + "Mul", + -14.27944564819336 + ], + [ + "▁jigs", + -14.279447555541992 + ], + [ + "▁DESCRIPTION", + -14.27945613861084 + ], + [ + "▁warring", + -14.27950954437256 + ], + [ + "▁26,000", + -14.279533386230469 + ], + [ + "▁outgrow", + -14.279540061950684 + ], + [ + "▁Delegate", + -14.279542922973633 + ], + [ + "▁mitts", + -14.279576301574709 + ], + [ + "Pix", + -14.279586791992188 + ], + [ + "Peak", + -14.27962875366211 + ], + [ + "▁Nix", + -14.27965259552002 + ], + [ + "▁streamlines", + -14.279667854309082 + ], + [ + "▁Bobbi", + -14.279780387878418 + ], + [ + "Kh", + -14.279809951782228 + ], + [ + "▁Fetch", + -14.279873847961426 + ], + [ + "▁pious", + -14.279902458190918 + ], + [ + "MRO", + -14.27990436553955 + ], + [ + "PCB", + -14.279913902282717 + ], + [ + "▁Teh", + -14.280006408691406 + ], + [ + "ollo", + -14.280014991760254 + ], + [ + "▁Hardly", + -14.280035018920898 + ], + [ + "▁spline", + -14.280051231384276 + ], + [ + "eee", + -14.28010368347168 + ], + [ + "▁cellist", + -14.280150413513184 + ], + [ + "▁transparently", + -14.2801513671875 + ], + [ + "▁Hallway", + -14.280162811279297 + ], + [ + "▁topically", + -14.280196189880373 + ], + [ + "screened", + -14.28021240234375 + ], + [ + "smoke", + -14.280233383178713 + ], + [ + "Printed", + -14.280319213867188 + ], + [ + "▁abscess", + -14.280365943908691 + ], + [ + "ddi", + -14.280477523803713 + ], + [ + "▁04/22/2019", + -14.28050136566162 + ], + [ + "▁Epiphany", + -14.28050136566162 + ], + [ + "▁Ignatius", + -14.28050136566162 + ], + [ + "▁emblazoned", + -14.28050136566162 + ], + [ + "▁facsimile", + -14.28050136566162 + ], + [ + "▁hexadecimal", + -14.28050136566162 + ], + [ + "▁moustache", + -14.28050136566162 + ], + [ + "▁rehearsing", + -14.28050136566162 + ], + [ + "▁Bratislava", + -14.280502319335938 + ], + [ + "▁Darjeeling", + -14.280502319335938 + ], + [ + "▁Kurdistan", + -14.280502319335938 + ], + [ + "▁abysmal", + -14.280503273010254 + ], + [ + "▁Oatmeal", + -14.28050422668457 + ], + [ + "▁pretense", + -14.28050422668457 + ], + [ + "▁pliable", + -14.280506134033203 + ], + [ + "▁shoddy", + -14.280506134033203 + ], + [ + "▁FINRA", + -14.280508041381836 + ], + [ + "▁Spaniel", + -14.280513763427734 + ], + [ + "▁Wabash", + -14.280515670776367 + ], + [ + "▁amicable", + -14.280515670776367 + ], + [ + "▁Gutenberg", + -14.28052043914795 + ], + [ + "▁Archangel", + -14.280522346496582 + ], + [ + "▁privatisation", + -14.280523300170898 + ], + [ + "▁Mulberry", + -14.280529975891112 + ], + [ + "▁crowdsourcing", + -14.280535697937012 + ], + [ + "▁hostilities", + -14.28055191040039 + ], + [ + "ORO", + -14.280569076538086 + ], + [ + "▁marginalised", + -14.280573844909668 + ], + [ + "▁boogie", + -14.280590057373049 + ], + [ + "▁aeration", + -14.280599594116213 + ], + [ + "▁Benedictine", + -14.280604362487791 + ], + [ + "Shawn", + -14.280613899230955 + ], + [ + "▁munitions", + -14.280616760253906 + ], + [ + "▁Slaughter", + -14.280624389648438 + ], + [ + "▁Sentiment", + -14.280631065368652 + ], + [ + "▁Loads", + -14.28064250946045 + ], + [ + "▁Youngstown", + -14.280646324157717 + ], + [ + "▁PSN", + -14.280653953552246 + ], + [ + "▁receding", + -14.28065586090088 + ], + [ + "▁Deerfield", + -14.28071403503418 + ], + [ + "▁hypothesized", + -14.280733108520508 + ], + [ + "▁Secured", + -14.280756950378418 + ], + [ + "▁Breeders", + -14.280771255493164 + ], + [ + "slave", + -14.280790328979492 + ], + [ + "▁TCU", + -14.280797004699709 + ], + [ + "▁disservice", + -14.280801773071287 + ], + [ + "Adj", + -14.280811309814451 + ], + [ + "▁HOURS", + -14.28081512451172 + ], + [ + "pano", + -14.280835151672363 + ], + [ + "▁busses", + -14.280840873718262 + ], + [ + "▁timepieces", + -14.280847549438477 + ], + [ + "Northamptonshire", + -14.28090476989746 + ], + [ + "▁spreader", + -14.28090476989746 + ], + [ + "▁peddle", + -14.28091049194336 + ], + [ + "EFA", + -14.280930519104004 + ], + [ + "▁Databases", + -14.280943870544434 + ], + [ + "▁stovetop", + -14.280963897705078 + ], + [ + "▁Melville", + -14.28099536895752 + ], + [ + "NPC", + -14.280996322631836 + ], + [ + "VOC", + -14.281007766723633 + ], + [ + "▁clang", + -14.281044960021973 + ], + [ + "proofing", + -14.28108024597168 + ], + [ + "▁Toolbar", + -14.281134605407717 + ], + [ + "▁foie", + -14.281139373779297 + ], + [ + "▁Koon", + -14.281145095825195 + ], + [ + "▁piqued", + -14.28122329711914 + ], + [ + "▁Recon", + -14.281245231628418 + ], + [ + "▁prying", + -14.281265258789062 + ], + [ + "▁Pec", + -14.281268119812012 + ], + [ + "abha", + -14.281349182128906 + ], + [ + "▁Ballot", + -14.281350135803224 + ], + [ + "DAV", + -14.281357765197754 + ], + [ + "▁farce", + -14.281412124633787 + ], + [ + "▁despised", + -14.281415939331056 + ], + [ + "▁jus", + -14.281438827514648 + ], + [ + "Walt", + -14.281457901000977 + ], + [ + "▁Arrived", + -14.281502723693848 + ], + [ + "spoke", + -14.281529426574709 + ], + [ + "▁PACE", + -14.28154182434082 + ], + [ + "▁abounds", + -14.281572341918944 + ], + [ + "FCO", + -14.281614303588867 + ], + [ + "▁plz", + -14.28173542022705 + ], + [ + "vala", + -14.281747817993164 + ], + [ + "▁deactivated", + -14.281761169433594 + ], + [ + "▁labored", + -14.281835556030272 + ], + [ + "▁cleave", + -14.281845092773438 + ], + [ + "singh", + -14.281901359558104 + ], + [ + "▁Ascent", + -14.281937599182127 + ], + [ + "▁pistachios", + -14.282024383544922 + ], + [ + "▁MACHINE", + -14.282052040100098 + ], + [ + "▁legions", + -14.282089233398438 + ], + [ + "▁Zune", + -14.282177925109863 + ], + [ + "▁Bronco", + -14.282186508178713 + ], + [ + "▁Switches", + -14.282231330871582 + ], + [ + "▁AST", + -14.282238960266112 + ], + [ + "▁offload", + -14.282401084899902 + ], + [ + "▁NATURAL", + -14.282462120056152 + ], + [ + "▁collie", + -14.282463073730469 + ], + [ + "choke", + -14.282464027404783 + ], + [ + "▁nailing", + -14.282472610473633 + ], + [ + "▁Determined", + -14.282513618469238 + ], + [ + "▁91%", + -14.282523155212402 + ], + [ + "▁bronzer", + -14.282569885253906 + ], + [ + "▁Knapp", + -14.282577514648438 + ], + [ + "▁DoD", + -14.282590866088867 + ], + [ + "▁Cassel", + -14.282672882080078 + ], + [ + "Colin", + -14.282679557800291 + ], + [ + "▁papa", + -14.282804489135742 + ], + [ + "▁Yoon", + -14.282858848571776 + ], + [ + "▁$1,0", + -14.282876968383787 + ], + [ + "▁authorizes", + -14.282880783081056 + ], + [ + "EAM", + -14.282885551452637 + ], + [ + "hau", + -14.282885551452637 + ], + [ + "▁selectors", + -14.282946586608888 + ], + [ + "vay", + -14.282979011535645 + ], + [ + "▁279", + -14.283063888549805 + ], + [ + "HON", + -14.28314208984375 + ], + [ + "stria", + -14.283143997192385 + ], + [ + "▁Lipo", + -14.283163070678713 + ], + [ + "▁347", + -14.28325080871582 + ], + [ + "lh", + -14.283305168151855 + ], + [ + "rott", + -14.283347129821776 + ], + [ + "▁Loaf", + -14.283388137817385 + ], + [ + "Optimize", + -14.283409118652344 + ], + [ + "Plot", + -14.283425331115724 + ], + [ + "▁Bubbles", + -14.283448219299316 + ], + [ + "▁16:1", + -14.283464431762695 + ], + [ + "Claude", + -14.28348445892334 + ], + [ + "likely", + -14.28349494934082 + ], + [ + "▁Kak", + -14.283504486083984 + ], + [ + "Navigate", + -14.283512115478516 + ], + [ + "specified", + -14.28352165222168 + ], + [ + "Shanghai", + -14.28355598449707 + ], + [ + "transparent", + -14.28356647491455 + ], + [ + "Dylan", + -14.283573150634766 + ], + [ + "4]", + -14.28357982635498 + ], + [ + "▁nib", + -14.283644676208496 + ], + [ + "ceded", + -14.283650398254396 + ], + [ + "▁Sunil", + -14.283674240112305 + ], + [ + "▁Watermelon", + -14.283687591552734 + ], + [ + "Belt", + -14.283693313598633 + ], + [ + "▁quarts", + -14.28371810913086 + ], + [ + "▁Ibn", + -14.28388214111328 + ], + [ + "▁edibles", + -14.283897399902344 + ], + [ + "▁LESS", + -14.283926963806152 + ], + [ + "goda", + -14.283944129943848 + ], + [ + "▁bowels", + -14.284008026123049 + ], + [ + "MPL", + -14.284016609191896 + ], + [ + "▁$$$", + -14.284050941467283 + ], + [ + "GUI", + -14.284059524536133 + ], + [ + "youth", + -14.284090995788574 + ], + [ + "verted", + -14.284181594848633 + ], + [ + "▁burials", + -14.28425407409668 + ], + [ + "▁Lateral", + -14.28428840637207 + ], + [ + "▁SPL", + -14.284303665161133 + ], + [ + "979", + -14.284310340881348 + ], + [ + "vail", + -14.284341812133787 + ], + [ + "JACK", + -14.284385681152344 + ], + [ + "▁Followed", + -14.284396171569824 + ], + [ + "▁£14", + -14.284406661987305 + ], + [ + "electrical", + -14.284443855285645 + ], + [ + "oque", + -14.284501075744627 + ], + [ + "▁Gaussian", + -14.28452205657959 + ], + [ + "Escape", + -14.284546852111816 + ], + [ + "▁inquest", + -14.284601211547852 + ], + [ + "▁UPC", + -14.284607887268066 + ], + [ + "▁Loot", + -14.284643173217772 + ], + [ + "▁Harlan", + -14.284662246704102 + ], + [ + "psychological", + -14.284671783447266 + ], + [ + "▁Gav", + -14.28468418121338 + ], + [ + "assess", + -14.28474235534668 + ], + [ + "vam", + -14.2847900390625 + ], + [ + "▁Nil", + -14.284820556640623 + ], + [ + "▁Lombard", + -14.284823417663574 + ], + [ + "Raise", + -14.284876823425291 + ], + [ + "▁yak", + -14.284937858581545 + ], + [ + "▁keepsakes", + -14.284975051879885 + ], + [ + "▁Huy", + -14.285211563110352 + ], + [ + "▁Floss", + -14.285234451293944 + ], + [ + "▁516", + -14.285242080688477 + ], + [ + "▁confessions", + -14.28528118133545 + ], + [ + "▁JAC", + -14.285306930541992 + ], + [ + "▁expatriate", + -14.285338401794434 + ], + [ + "▁Requirement", + -14.285355567932127 + ], + [ + "ersen", + -14.285393714904783 + ], + [ + "consciousness", + -14.285451889038086 + ], + [ + "▁spiraling", + -14.285600662231444 + ], + [ + "▁infusions", + -14.285604476928713 + ], + [ + "▁jewelers", + -14.285625457763672 + ], + [ + "udder", + -14.28565502166748 + ], + [ + "Chiropractor", + -14.285684585571287 + ], + [ + "WAR", + -14.285738945007324 + ], + [ + "▁nyc", + -14.285775184631348 + ], + [ + "▁attested", + -14.28577709197998 + ], + [ + "▁Maharaj", + -14.28579807281494 + ], + [ + "anthus", + -14.285807609558104 + ], + [ + "▁2006;", + -14.285861015319824 + ], + [ + "▁Haru", + -14.285866737365724 + ], + [ + "tuit", + -14.28587818145752 + ], + [ + "▁Sukh", + -14.28591251373291 + ], + [ + "▁Blankets", + -14.285916328430176 + ], + [ + "▁CUSTOMER", + -14.285927772521973 + ], + [ + "▁Amazingly", + -14.2859468460083 + ], + [ + "2.6%", + -14.2859525680542 + ], + [ + "Carbohydrate", + -14.285965919494627 + ], + [ + "TORONTO", + -14.285965919494627 + ], + [ + "▁Abbotsford", + -14.285965919494627 + ], + [ + "▁Panoramic", + -14.285965919494627 + ], + [ + "▁Sarajevo", + -14.285965919494627 + ], + [ + "▁amoxicillin", + -14.285965919494627 + ], + [ + "▁enamored", + -14.285965919494627 + ], + [ + "▁pulsating", + -14.285965919494627 + ], + [ + "▁Iqbal", + -14.285966873168944 + ], + [ + "▁innocuous", + -14.285966873168944 + ], + [ + "Believing", + -14.285967826843262 + ], + [ + "▁cathode", + -14.285968780517578 + ], + [ + "▁gimbal", + -14.285968780517578 + ], + [ + "▁Arbitra", + -14.285969734191896 + ], + [ + "▁december", + -14.285969734191896 + ], + [ + "▁aeroplane", + -14.28597354888916 + ], + [ + "▁quantification", + -14.28597927093506 + ], + [ + "▁skimming", + -14.285983085632324 + ], + [ + "▁neutered", + -14.285984992980955 + ], + [ + "▁telephoto", + -14.285988807678224 + ], + [ + "▁histogram", + -14.28598976135254 + ], + [ + "▁Bandar", + -14.285991668701172 + ], + [ + "▁Abyss", + -14.28600025177002 + ], + [ + "▁Pomona", + -14.286002159118652 + ], + [ + "▁seclusion", + -14.286002159118652 + ], + [ + "▁tundra", + -14.286005020141602 + ], + [ + "▁Brixton", + -14.286016464233398 + ], + [ + "Vac", + -14.286018371582031 + ], + [ + "▁tamil", + -14.286029815673828 + ], + [ + "▁Norbert", + -14.286036491394045 + ], + [ + "▁Frankfort", + -14.286052703857422 + ], + [ + "▁Yaz", + -14.286069869995115 + ], + [ + "▁Barbour", + -14.286107063293455 + ], + [ + "▁anointed", + -14.286124229431152 + ], + [ + "▁ridership", + -14.286165237426758 + ], + [ + "▁subdivided", + -14.286181449890137 + ], + [ + "▁SOUND", + -14.286214828491213 + ], + [ + "▁Yank", + -14.286242485046388 + ], + [ + "▁exalted", + -14.286276817321776 + ], + [ + "wach", + -14.28628635406494 + ], + [ + "▁gradation", + -14.28630542755127 + ], + [ + "▁holistically", + -14.28631591796875 + ], + [ + "▁unveils", + -14.286373138427734 + ], + [ + "▁Attendant", + -14.286389350891112 + ], + [ + "vion", + -14.28643035888672 + ], + [ + "▁wanders", + -14.286434173583984 + ], + [ + "▁Gunnar", + -14.286439895629885 + ], + [ + "▁Schema", + -14.28650951385498 + ], + [ + "ACTION", + -14.28657341003418 + ], + [ + "indicate", + -14.28657341003418 + ], + [ + "▁Newcomer", + -14.286585807800291 + ], + [ + "▁Pry", + -14.286588668823242 + ], + [ + "▁amenable", + -14.286592483520508 + ], + [ + "rong", + -14.286611557006836 + ], + [ + "raco", + -14.286617279052734 + ], + [ + "Auth", + -14.28661823272705 + ], + [ + "▁urea", + -14.28663444519043 + ], + [ + "▁Interment", + -14.286687850952148 + ], + [ + "▁Fai", + -14.286697387695312 + ], + [ + "▁lyricist", + -14.286714553833008 + ], + [ + "▁WTA", + -14.286773681640623 + ], + [ + "▁tattooing", + -14.286806106567385 + ], + [ + "▁Bord", + -14.286845207214355 + ], + [ + "6:00", + -14.286856651306152 + ], + [ + "▁Conception", + -14.286876678466797 + ], + [ + "ATOR", + -14.28691577911377 + ], + [ + "▁427", + -14.28691577911377 + ], + [ + "▁harmoniously", + -14.286922454833984 + ], + [ + "▁starved", + -14.286924362182615 + ], + [ + "avant", + -14.286946296691896 + ], + [ + "▁(500", + -14.286948204040527 + ], + [ + "▁Myths", + -14.28712272644043 + ], + [ + "▁Momma", + -14.287148475646973 + ], + [ + "▁failover", + -14.287152290344238 + ], + [ + "▁inflicting", + -14.287152290344238 + ], + [ + "▁AMERICAN", + -14.287179946899414 + ], + [ + "ieu", + -14.287242889404297 + ], + [ + "rental", + -14.287259101867676 + ], + [ + "▁Sampler", + -14.287307739257812 + ], + [ + "▁Roseville", + -14.287376403808594 + ], + [ + "▁Swedes", + -14.28745460510254 + ], + [ + "▁Yat", + -14.287455558776855 + ], + [ + "▁unleashing", + -14.287518501281738 + ], + [ + "▁13.3", + -14.287574768066406 + ], + [ + "▁$160", + -14.28757667541504 + ], + [ + "1700", + -14.28761863708496 + ], + [ + "▁raked", + -14.287672996520996 + ], + [ + "▁Crocker", + -14.28768825531006 + ], + [ + "▁typefaces", + -14.287796974182127 + ], + [ + "▁EMP", + -14.28783893585205 + ], + [ + "▁Bracelets", + -14.287857055664062 + ], + [ + "▁Gantt", + -14.287878036499023 + ], + [ + "▁twofold", + -14.287886619567873 + ], + [ + "atori", + -14.287981033325195 + ], + [ + "FAST", + -14.28808307647705 + ], + [ + "ruch", + -14.288119316101074 + ], + [ + "▁tangles", + -14.288124084472656 + ], + [ + "▁502", + -14.288126945495604 + ], + [ + "Reverse", + -14.288130760192873 + ], + [ + "▁515", + -14.288132667541504 + ], + [ + "▁Glade", + -14.288206100463867 + ], + [ + "▁Pima", + -14.288206100463867 + ], + [ + "ilon", + -14.288456916809082 + ], + [ + "Cord", + -14.288491249084473 + ], + [ + "▁stipulates", + -14.288556098937988 + ], + [ + "▁Rune", + -14.2886323928833 + ], + [ + "▁legislatures", + -14.288708686828612 + ], + [ + "1950", + -14.288729667663574 + ], + [ + "▁vide", + -14.288785934448242 + ], + [ + "▁CSO", + -14.288891792297363 + ], + [ + "▁gloriously", + -14.28890609741211 + ], + [ + "Suggest", + -14.2889986038208 + ], + [ + "▁(186", + -14.28902816772461 + ], + [ + "▁McNe", + -14.289055824279783 + ], + [ + "existence", + -14.289095878601074 + ], + [ + "Evaluate", + -14.289128303527832 + ], + [ + "adhesive", + -14.289158821105955 + ], + [ + "Nintendo", + -14.28917407989502 + ], + [ + "appointment", + -14.289175987243652 + ], + [ + "python", + -14.289196968078612 + ], + [ + "exterior", + -14.289202690124512 + ], + [ + "▁AVA", + -14.28920841217041 + ], + [ + "Revenue", + -14.28921127319336 + ], + [ + "▁Janis", + -14.28921604156494 + ], + [ + "Beam", + -14.289230346679688 + ], + [ + "▁Sato", + -14.28925323486328 + ], + [ + "MID", + -14.289261817932127 + ], + [ + "▁medallions", + -14.289299011230469 + ], + [ + "891", + -14.289356231689451 + ], + [ + "▁numbing", + -14.2894287109375 + ], + [ + "▁forearms", + -14.289432525634766 + ], + [ + "Rare", + -14.289443969726562 + ], + [ + "Qu", + -14.289448738098145 + ], + [ + "▁recycler", + -14.2894926071167 + ], + [ + "▁690", + -14.289501190185549 + ], + [ + "/29/", + -14.28952693939209 + ], + [ + "interface", + -14.28957176208496 + ], + [ + "epe", + -14.28964900970459 + ], + [ + "▁Sift", + -14.289649963378906 + ], + [ + "▁Kuni", + -14.289718627929688 + ], + [ + "▁SPC", + -14.289745330810549 + ], + [ + "▁hardback", + -14.289772987365724 + ], + [ + "Inform", + -14.28977870941162 + ], + [ + "▁titan", + -14.289782524108888 + ], + [ + "▁Calle", + -14.289884567260742 + ], + [ + "prise", + -14.289916038513184 + ], + [ + "PES", + -14.28994846343994 + ], + [ + "Trial", + -14.289965629577637 + ], + [ + "▁HST", + -14.289994239807127 + ], + [ + "Army", + -14.290000915527344 + ], + [ + "zler", + -14.290009498596191 + ], + [ + "▁Kurtz", + -14.290019989013672 + ], + [ + "COU", + -14.290066719055176 + ], + [ + "▁primers", + -14.290083885192873 + ], + [ + "▁roadblock", + -14.290099143981934 + ], + [ + "Wu", + -14.29015827178955 + ], + [ + "airline", + -14.290224075317385 + ], + [ + "Instructions", + -14.29023551940918 + ], + [ + "▁Pedestrian", + -14.29028034210205 + ], + [ + "▁Snowy", + -14.29029941558838 + ], + [ + "seeker", + -14.290301322937012 + ], + [ + "FYI", + -14.290325164794922 + ], + [ + "Scene", + -14.290431022644045 + ], + [ + "▁Nip", + -14.290556907653809 + ], + [ + "▁lobes", + -14.290583610534668 + ], + [ + "OWS", + -14.290608406066896 + ], + [ + "▁BED", + -14.290613174438477 + ], + [ + "657", + -14.29069709777832 + ], + [ + "1.3%", + -14.290735244750977 + ], + [ + "▁ergo", + -14.290749549865724 + ], + [ + "▁Naughty", + -14.2908296585083 + ], + [ + "ateur", + -14.290839195251465 + ], + [ + "▁Mandala", + -14.29085922241211 + ], + [ + "Reclaim", + -14.290888786315918 + ], + [ + "▁Ozark", + -14.290898323059082 + ], + [ + "▁Pancake", + -14.29092502593994 + ], + [ + "agno", + -14.29094409942627 + ], + [ + "▁£60", + -14.290976524353027 + ], + [ + "▁Arrivals", + -14.29107666015625 + ], + [ + "▁Matteo", + -14.29116439819336 + ], + [ + "1300", + -14.29117488861084 + ], + [ + "rka", + -14.291187286376951 + ], + [ + "Muffin", + -14.2911958694458 + ], + [ + "▁Northland", + -14.291298866271973 + ], + [ + "bbin", + -14.291418075561523 + ], + [ + "peritoneal", + -14.291460990905762 + ], + [ + "▁Eindhoven", + -14.291460990905762 + ], + [ + "▁Ordnance", + -14.291460990905762 + ], + [ + "▁Scientology", + -14.291460990905762 + ], + [ + "▁WikiLeaks", + -14.291460990905762 + ], + [ + "▁extravagance", + -14.291460990905762 + ], + [ + "▁grumble", + -14.291460990905762 + ], + [ + "▁lethargic", + -14.291460990905762 + ], + [ + "▁navigable", + -14.291460990905762 + ], + [ + "▁revitalizing", + -14.291460990905762 + ], + [ + "▁sorghum", + -14.291460990905762 + ], + [ + "▁trepidation", + -14.291460990905762 + ], + [ + "▁adipose", + -14.291461944580078 + ], + [ + "▁Bozeman", + -14.291462898254396 + ], + [ + "▁unaccompanied", + -14.291463851928713 + ], + [ + "▁curving", + -14.29146671295166 + ], + [ + "▁Thierry", + -14.291467666625977 + ], + [ + "▁Caicos", + -14.291470527648926 + ], + [ + "▁chubby", + -14.291492462158203 + ], + [ + "▁Carmichael", + -14.29149341583252 + ], + [ + "▁dengue", + -14.291501998901367 + ], + [ + "▁barbershop", + -14.29151439666748 + ], + [ + "▁Geometric", + -14.291533470153809 + ], + [ + "▁ThinkPad", + -14.291539192199709 + ], + [ + "▁Kessler", + -14.291544914245604 + ], + [ + "▁culvert", + -14.291549682617188 + ], + [ + "▁Recliner", + -14.291561126708984 + ], + [ + "▁execs", + -14.291563034057615 + ], + [ + "▁Lumbar", + -14.291570663452148 + ], + [ + "▁ExxonMobil", + -14.291571617126465 + ], + [ + "▁Locator", + -14.291585922241213 + ], + [ + "▁Sturdy", + -14.291587829589844 + ], + [ + "▁unsung", + -14.29160213470459 + ], + [ + "▁STP", + -14.29163646697998 + ], + [ + "▁UCSF", + -14.29164218902588 + ], + [ + "▁erasure", + -14.291645050048828 + ], + [ + "▁Sophisticated", + -14.291653633117676 + ], + [ + "▁Capability", + -14.29165744781494 + ], + [ + "aert", + -14.29166030883789 + ], + [ + "▁cutscene", + -14.291664123535156 + ], + [ + "▁Chosen", + -14.291669845581056 + ], + [ + "frey", + -14.291692733764648 + ], + [ + "▁Ginny", + -14.291698455810549 + ], + [ + "asked", + -14.29170036315918 + ], + [ + "▁Laz", + -14.29171371459961 + ], + [ + "▁Variant", + -14.29173469543457 + ], + [ + "▁Windham", + -14.291763305664062 + ], + [ + "▁homeostasis", + -14.29177474975586 + ], + [ + "Guided", + -14.291781425476074 + ], + [ + "▁Rowley", + -14.291825294494627 + ], + [ + "Reinforce", + -14.291845321655272 + ], + [ + "▁XXL", + -14.291857719421388 + ], + [ + "▁Allentown", + -14.291873931884766 + ], + [ + "▁Pulling", + -14.291874885559082 + ], + [ + "▁Morley", + -14.291878700256348 + ], + [ + "▁neuron", + -14.291921615600586 + ], + [ + "0.9%", + -14.291936874389648 + ], + [ + "Alexa", + -14.29194450378418 + ], + [ + "▁Shil", + -14.291947364807127 + ], + [ + "▁Peking", + -14.292011260986328 + ], + [ + "▁Silverlight", + -14.292024612426758 + ], + [ + "▁volvo", + -14.292054176330566 + ], + [ + "▁Celia", + -14.292062759399414 + ], + [ + "▁Discrimination", + -14.292073249816896 + ], + [ + "Casa", + -14.292116165161133 + ], + [ + "▁MacOS", + -14.292139053344728 + ], + [ + "▁outlawed", + -14.292146682739258 + ], + [ + "Swing", + -14.292154312133787 + ], + [ + "▁scrutinized", + -14.292181015014648 + ], + [ + "quia", + -14.292190551757812 + ], + [ + "▁Numerical", + -14.292341232299805 + ], + [ + "▁Polyurethane", + -14.292341232299805 + ], + [ + "▁recoveries", + -14.292360305786133 + ], + [ + "▁WU", + -14.29237937927246 + ], + [ + "▁Riverdale", + -14.292428016662598 + ], + [ + "▁Weil", + -14.292485237121582 + ], + [ + "angled", + -14.292502403259276 + ], + [ + "▁BHP", + -14.292552947998049 + ], + [ + "▁playfulness", + -14.292567253112791 + ], + [ + "▁Fluor", + -14.292580604553224 + ], + [ + "▁Choco", + -14.292588233947754 + ], + [ + "▁strived", + -14.292658805847168 + ], + [ + "▁Andaman", + -14.2926607131958 + ], + [ + "Hotels", + -14.292669296264648 + ], + [ + "▁MOTOR", + -14.292682647705078 + ], + [ + "▁Caution", + -14.292686462402344 + ], + [ + "Cul", + -14.292688369750977 + ], + [ + "Cards", + -14.292746543884276 + ], + [ + "▁Argo", + -14.29282569885254 + ], + [ + "▁Enthusiast", + -14.292858123779297 + ], + [ + "osu", + -14.292888641357422 + ], + [ + "▁Storey", + -14.293075561523438 + ], + [ + "▁WIL", + -14.293092727661133 + ], + [ + "▁moped", + -14.293169021606444 + ], + [ + "▁Scales", + -14.293218612670898 + ], + [ + "▁Rigg", + -14.29322910308838 + ], + [ + "▁Starring", + -14.29323959350586 + ], + [ + "▁airliner", + -14.293286323547363 + ], + [ + "▁werewolf", + -14.293343544006348 + ], + [ + "▁widowed", + -14.293375015258787 + ], + [ + "Silk", + -14.2933988571167 + ], + [ + "timed", + -14.293399810791016 + ], + [ + "WEB", + -14.293407440185549 + ], + [ + "▁concealing", + -14.293426513671877 + ], + [ + "▁globalisation", + -14.29347038269043 + ], + [ + "▁Gladys", + -14.293496131896973 + ], + [ + "094", + -14.293587684631348 + ], + [ + "▁adjoin", + -14.293619155883787 + ], + [ + "▁Brawl", + -14.2936429977417 + ], + [ + "▁prepayment", + -14.29371452331543 + ], + [ + "thorne", + -14.293771743774414 + ], + [ + "▁RRB", + -14.293793678283691 + ], + [ + "▁dreading", + -14.293810844421388 + ], + [ + "▁Audiobook", + -14.293841361999512 + ], + [ + "gga", + -14.293858528137209 + ], + [ + "▁(+1", + -14.293950080871582 + ], + [ + "Records", + -14.294014930725098 + ], + [ + "▁Lott", + -14.294148445129396 + ], + [ + "riva", + -14.29415225982666 + ], + [ + "acca", + -14.294178009033203 + ], + [ + "▁mailer", + -14.294207572937012 + ], + [ + "▁sundown", + -14.294256210327148 + ], + [ + "-350", + -14.294293403625488 + ], + [ + "▁licks", + -14.29432487487793 + ], + [ + "▁raved", + -14.29434299468994 + ], + [ + "quina", + -14.294367790222168 + ], + [ + "Scribe", + -14.294382095336914 + ], + [ + "▁Scoring", + -14.29438304901123 + ], + [ + "▁FJ", + -14.294425964355469 + ], + [ + "▁Platte", + -14.294437408447266 + ], + [ + "mane", + -14.294569969177246 + ], + [ + "▁Dividers", + -14.294575691223145 + ], + [ + "▁Greta", + -14.294614791870115 + ], + [ + "partners", + -14.294638633728027 + ], + [ + "enactment", + -14.29464054107666 + ], + [ + "▁Approximate", + -14.294716835021973 + ], + [ + "friendliness", + -14.294803619384766 + ], + [ + "Possibly", + -14.294804573059082 + ], + [ + "possible", + -14.294805526733398 + ], + [ + "Nestled", + -14.294818878173828 + ], + [ + "▁Dentists", + -14.294830322265623 + ], + [ + "aggressive", + -14.29483127593994 + ], + [ + "relief", + -14.294835090637209 + ], + [ + "Wendy", + -14.29483699798584 + ], + [ + "▁redshirt", + -14.294864654541016 + ], + [ + "▁Jalan", + -14.294865608215332 + ], + [ + "Scrap", + -14.294882774353027 + ], + [ + "Mommy", + -14.29491138458252 + ], + [ + "HTTP", + -14.294912338256836 + ], + [ + "▁eclipsed", + -14.294918060302734 + ], + [ + "contrast", + -14.294995307922363 + ], + [ + "3.6%", + -14.295034408569336 + ], + [ + "gathering", + -14.295039176940918 + ], + [ + "▁Lectures", + -14.295045852661133 + ], + [ + "nutrient", + -14.29515266418457 + ], + [ + "▁fooling", + -14.295164108276367 + ], + [ + "▁june", + -14.29518699645996 + ], + [ + "hedron", + -14.295207023620604 + ], + [ + "▁decayed", + -14.295238494873049 + ], + [ + "▁facil", + -14.295246124267578 + ], + [ + "CCM", + -14.29526424407959 + ], + [ + "commission", + -14.29532241821289 + ], + [ + "▁odours", + -14.295370101928713 + ], + [ + "▁1835", + -14.295432090759276 + ], + [ + "Holly", + -14.295442581176758 + ], + [ + "Kr", + -14.29550552368164 + ], + [ + "SPEC", + -14.295512199401855 + ], + [ + "▁3-7", + -14.295514106750488 + ], + [ + "rtz", + -14.295519828796388 + ], + [ + "▁Timo", + -14.295519828796388 + ], + [ + "swap", + -14.295591354370115 + ], + [ + "▁clickable", + -14.295631408691406 + ], + [ + "▁SWF", + -14.295636177062988 + ], + [ + "▁screwing", + -14.295746803283691 + ], + [ + "FOS", + -14.29576301574707 + ], + [ + "Bread", + -14.295774459838867 + ], + [ + "▁Playoffs", + -14.295778274536133 + ], + [ + "▁recharged", + -14.29579257965088 + ], + [ + "anic", + -14.295842170715332 + ], + [ + "078", + -14.295873641967772 + ], + [ + "Alas", + -14.2959623336792 + ], + [ + "dancing", + -14.295963287353516 + ], + [ + "▁Luggage", + -14.296075820922852 + ], + [ + "▁12.30", + -14.296130180358888 + ], + [ + "▁0.03", + -14.29613971710205 + ], + [ + "IOS", + -14.296154022216797 + ], + [ + "Walter", + -14.296172142028809 + ], + [ + "▁Marsha", + -14.296213150024414 + ], + [ + "Unable", + -14.296256065368652 + ], + [ + "Identifying", + -14.296384811401367 + ], + [ + "cub", + -14.29641342163086 + ], + [ + "▁Coats", + -14.29641342163086 + ], + [ + "▁Biscuit", + -14.296469688415527 + ], + [ + "▁squeal", + -14.296476364135742 + ], + [ + "▁Nationally", + -14.296488761901855 + ], + [ + "▁Dungeons", + -14.296561241149902 + ], + [ + "▁Cochran", + -14.296589851379396 + ], + [ + "▁PINK", + -14.296590805053713 + ], + [ + "▁arouse", + -14.296649932861328 + ], + [ + "xyz", + -14.296706199645996 + ], + [ + "▁PDC", + -14.296733856201172 + ], + [ + "kau", + -14.296798706054688 + ], + [ + "▁divisional", + -14.296810150146484 + ], + [ + "matsu", + -14.296846389770508 + ], + [ + "▁Bien", + -14.29687786102295 + ], + [ + "▁distinctively", + -14.296916007995604 + ], + [ + "ruth", + -14.29692268371582 + ], + [ + "▁Methyl", + -14.296950340270996 + ], + [ + "▁subdomains", + -14.29697608947754 + ], + [ + "Inevitably", + -14.296985626220703 + ], + [ + "▁Artillery", + -14.296985626220703 + ], + [ + "▁Saginaw", + -14.296985626220703 + ], + [ + "▁Simcoe", + -14.296985626220703 + ], + [ + "▁actuarial", + -14.296985626220703 + ], + [ + "▁anchovies", + -14.296985626220703 + ], + [ + "▁bulldozer", + -14.296985626220703 + ], + [ + "▁escorts", + -14.296985626220703 + ], + [ + "▁exuberance", + -14.296985626220703 + ], + [ + "▁memorization", + -14.296985626220703 + ], + [ + "▁Aikido", + -14.29698657989502 + ], + [ + "▁mammography", + -14.29698657989502 + ], + [ + "▁WONDER", + -14.296987533569336 + ], + [ + "▁thrombosis", + -14.296987533569336 + ], + [ + "▁Solitaire", + -14.296988487243652 + ], + [ + "▁Brevard", + -14.296990394592283 + ], + [ + "▁hydrophobic", + -14.296996116638184 + ], + [ + "▁Liberties", + -14.296998023986816 + ], + [ + "▁Sarawak", + -14.296998977661133 + ], + [ + "▁nappies", + -14.296998977661133 + ], + [ + "▁Witnesses", + -14.297006607055664 + ], + [ + "▁monoclonal", + -14.29700756072998 + ], + [ + "▁Komodo", + -14.297012329101562 + ], + [ + "▁Storytelling", + -14.29701805114746 + ], + [ + "▁Pirelli", + -14.297025680541992 + ], + [ + "innovation", + -14.297030448913574 + ], + [ + "▁EXPO", + -14.297030448913574 + ], + [ + "▁SketchUp", + -14.297039031982422 + ], + [ + "▁Soundcloud", + -14.29705810546875 + ], + [ + "▁subsurface", + -14.297103881835938 + ], + [ + "▁Belinda", + -14.297117233276367 + ], + [ + "chains", + -14.297130584716797 + ], + [ + "▁batsmen", + -14.297138214111328 + ], + [ + "▁EDU", + -14.297168731689451 + ], + [ + "▁AWAY", + -14.29716968536377 + ], + [ + "▁Opinions", + -14.297179222106934 + ], + [ + "CALL", + -14.297192573547363 + ], + [ + "▁enshrined", + -14.297224044799805 + ], + [ + "▁deftly", + -14.297232627868652 + ], + [ + "▁Rol", + -14.297252655029297 + ], + [ + "vak", + -14.297266006469728 + ], + [ + "▁imperialism", + -14.29727268218994 + ], + [ + "Programs", + -14.29729461669922 + ], + [ + "Terri", + -14.297320365905762 + ], + [ + "LTE", + -14.29733657836914 + ], + [ + "norm", + -14.29733657836914 + ], + [ + "zinger", + -14.29739475250244 + ], + [ + "▁Hanley", + -14.297436714172363 + ], + [ + "▁Api", + -14.297480583190918 + ], + [ + "▁Taman", + -14.29751682281494 + ], + [ + "Mysterious", + -14.297581672668455 + ], + [ + "594", + -14.29758644104004 + ], + [ + "▁lubricated", + -14.297611236572266 + ], + [ + "Customize", + -14.297635078430176 + ], + [ + "phony", + -14.297650337219238 + ], + [ + "▁(2017", + -14.297661781311035 + ], + [ + "▁Breakout", + -14.297703742980955 + ], + [ + "ombe", + -14.297737121582031 + ], + [ + "▁Namely", + -14.297741889953612 + ], + [ + "crop", + -14.297746658325195 + ], + [ + "iosis", + -14.29783821105957 + ], + [ + "▁28,000", + -14.297935485839844 + ], + [ + "▁basking", + -14.29796314239502 + ], + [ + "▁AIG", + -14.298007011413574 + ], + [ + "▁Const", + -14.298033714294434 + ], + [ + "▁freaks", + -14.298043251037598 + ], + [ + "▁overheard", + -14.298068046569824 + ], + [ + "partial", + -14.298080444335938 + ], + [ + "оро", + -14.298103332519531 + ], + [ + "UIT", + -14.298110008239746 + ], + [ + "▁Kalyan", + -14.298112869262695 + ], + [ + "▁WMS", + -14.298113822937012 + ], + [ + "▁Smiley", + -14.298150062561035 + ], + [ + "▁Hurdle", + -14.298179626464844 + ], + [ + "▁Hanger", + -14.298184394836426 + ], + [ + "▁Heil", + -14.29819679260254 + ], + [ + "▁Zil", + -14.29821491241455 + ], + [ + "▁Shard", + -14.29824161529541 + ], + [ + "▁Jog", + -14.298274993896484 + ], + [ + "▁thatched", + -14.298428535461426 + ], + [ + "uminous", + -14.298453330993652 + ], + [ + "ivate", + -14.298535346984863 + ], + [ + "▁1790", + -14.298545837402344 + ], + [ + "▁abnormally", + -14.298547744750977 + ], + [ + "dey", + -14.298620223999023 + ], + [ + "▁dorms", + -14.298624992370604 + ], + [ + "▁dass", + -14.298635482788086 + ], + [ + "PTC", + -14.298701286315918 + ], + [ + "▁boneless", + -14.298724174499512 + ], + [ + "▁RIGHTS", + -14.29873275756836 + ], + [ + "▁laughable", + -14.29881191253662 + ], + [ + "▁Acquisitions", + -14.298834800720217 + ], + [ + "▁Cavan", + -14.29885482788086 + ], + [ + "▁Caption", + -14.298911094665527 + ], + [ + "ROOT", + -14.298941612243652 + ], + [ + "pedal", + -14.298952102661133 + ], + [ + "▁(55", + -14.298970222473145 + ], + [ + "▁Teja", + -14.299102783203123 + ], + [ + "Authorised", + -14.299114227294922 + ], + [ + "▁Cob", + -14.299156188964844 + ], + [ + "▁wonky", + -14.299205780029297 + ], + [ + "▁hoe", + -14.299269676208496 + ], + [ + "appe", + -14.299297332763672 + ], + [ + "hadi", + -14.299356460571287 + ], + [ + "▁Chilli", + -14.299408912658691 + ], + [ + "rabi", + -14.299415588378906 + ], + [ + "▁Kup", + -14.299521446228027 + ], + [ + "▁Joshi", + -14.299543380737305 + ], + [ + "▁animators", + -14.299640655517578 + ], + [ + "▁Girard", + -14.299694061279297 + ], + [ + "▁martin", + -14.29970932006836 + ], + [ + "Tis", + -14.299735069274902 + ], + [ + "▁$20.00", + -14.299745559692385 + ], + [ + "▁mica", + -14.29975128173828 + ], + [ + "baba", + -14.299756050109863 + ], + [ + "▁FED", + -14.299761772155762 + ], + [ + "commit", + -14.299772262573242 + ], + [ + "-12)", + -14.299826622009276 + ], + [ + "Sham", + -14.299830436706545 + ], + [ + "▁marc", + -14.29985809326172 + ], + [ + "bq", + -14.299914360046388 + ], + [ + "▁Treating", + -14.299931526184082 + ], + [ + "▁Hornet", + -14.300183296203612 + ], + [ + "$25", + -14.30019187927246 + ], + [ + "▁Esk", + -14.300244331359863 + ], + [ + "▁ITA", + -14.300253868103027 + ], + [ + "jacking", + -14.300387382507324 + ], + [ + "▁instalments", + -14.300393104553224 + ], + [ + "▁gunner", + -14.300399780273438 + ], + [ + "▁FBA", + -14.30043125152588 + ], + [ + "Cortez", + -14.300455093383787 + ], + [ + "Mortgage", + -14.300456047058104 + ], + [ + "negotiable", + -14.300471305847168 + ], + [ + "Swimming", + -14.300503730773926 + ], + [ + "verso", + -14.300503730773926 + ], + [ + "Intelligence", + -14.300506591796877 + ], + [ + "Vancouver", + -14.300508499145508 + ], + [ + "Brooklyn", + -14.300512313842772 + ], + [ + "▁Bernadette", + -14.300515174865724 + ], + [ + "biggest", + -14.300518035888672 + ], + [ + "▁Enchant", + -14.300521850585938 + ], + [ + "Cheese", + -14.30052375793457 + ], + [ + "royal", + -14.300531387329102 + ], + [ + "▁Bangla", + -14.300542831420898 + ], + [ + "Caroline", + -14.300554275512695 + ], + [ + "naka", + -14.300583839416504 + ], + [ + "▁334", + -14.300626754760742 + ], + [ + "statistics", + -14.30069065093994 + ], + [ + "Held", + -14.300748825073242 + ], + [ + "▁356", + -14.300843238830566 + ], + [ + "▁Boyer", + -14.300846099853516 + ], + [ + "Quiet", + -14.300867080688477 + ], + [ + "▁Kline", + -14.300920486450195 + ], + [ + "ATC", + -14.300929069519045 + ], + [ + "▁scoff", + -14.300935745239258 + ], + [ + "▁Foxes", + -14.301087379455566 + ], + [ + "▁Tach", + -14.301091194152832 + ], + [ + "▁tidying", + -14.301166534423828 + ], + [ + "▁Avila", + -14.301210403442385 + ], + [ + "▁AMERICA", + -14.301308631896973 + ], + [ + "archi", + -14.301329612731934 + ], + [ + "fam", + -14.30133056640625 + ], + [ + "Jer", + -14.301383018493652 + ], + [ + "▁encapsulate", + -14.30148983001709 + ], + [ + "▁LAC", + -14.301499366760254 + ], + [ + "▁59%", + -14.301547050476074 + ], + [ + "▁sprains", + -14.301556587219238 + ], + [ + "Kenya", + -14.30156421661377 + ], + [ + "▁Pockets", + -14.301583290100098 + ], + [ + "▁Drunk", + -14.301617622375488 + ], + [ + "▁birdies", + -14.301617622375488 + ], + [ + "▁Deed", + -14.301637649536133 + ], + [ + "providing", + -14.301735877990724 + ], + [ + "kommen", + -14.30174732208252 + ], + [ + "▁OJ", + -14.301788330078123 + ], + [ + "hte", + -14.301912307739258 + ], + [ + "Phe", + -14.301913261413574 + ], + [ + "▁butchers", + -14.30193042755127 + ], + [ + "▁epitomize", + -14.3019380569458 + ], + [ + "idium", + -14.301952362060549 + ], + [ + "▁Sonar", + -14.301963806152344 + ], + [ + "▁Infor", + -14.301994323730469 + ], + [ + "cule", + -14.30202293395996 + ], + [ + "▁bung", + -14.302069664001465 + ], + [ + "accident", + -14.302074432373049 + ], + [ + "▁thump", + -14.302104949951172 + ], + [ + "▁stateroom", + -14.302111625671388 + ], + [ + "▁malts", + -14.302116394042969 + ], + [ + "NIE", + -14.302154541015623 + ], + [ + "▁SEI", + -14.302170753479004 + ], + [ + "▁Somers", + -14.302230834960938 + ], + [ + "ILO", + -14.302239418029783 + ], + [ + "▁Suresh", + -14.30225944519043 + ], + [ + "▁Kitchener", + -14.302312850952148 + ], + [ + "▁Conv", + -14.302422523498535 + ], + [ + "gine", + -14.302465438842772 + ], + [ + "sorption", + -14.302497863769531 + ], + [ + "▁20:1", + -14.302515983581545 + ], + [ + "▁Rectangle", + -14.30254077911377 + ], + [ + "▁Rhetoric", + -14.30254077911377 + ], + [ + "▁Spalding", + -14.30254077911377 + ], + [ + "▁avionics", + -14.30254077911377 + ], + [ + "▁enraged", + -14.30254077911377 + ], + [ + "▁gauntlet", + -14.30254077911377 + ], + [ + "▁mitsubishi", + -14.30254077911377 + ], + [ + "▁rejoicing", + -14.30254077911377 + ], + [ + "▁wretched", + -14.30254077911377 + ], + [ + "▁relevancy", + -14.302541732788086 + ], + [ + "▁amortization", + -14.302542686462402 + ], + [ + "▁endogenous", + -14.302542686462402 + ], + [ + "▁Pérez", + -14.302544593811035 + ], + [ + "▁grunge", + -14.302545547485352 + ], + [ + "▁turbocharger", + -14.302545547485352 + ], + [ + "▁Nougat", + -14.302546501159668 + ], + [ + "▁amorphous", + -14.302546501159668 + ], + [ + "▁transponder", + -14.302547454833984 + ], + [ + "▁reminiscence", + -14.302549362182615 + ], + [ + "▁scalar", + -14.302549362182615 + ], + [ + "▁Elimination", + -14.302553176879885 + ], + [ + "▁Magenta", + -14.302556991577148 + ], + [ + "▁purging", + -14.302559852600098 + ], + [ + "Welcoming", + -14.302565574645996 + ], + [ + "▁Regime", + -14.302568435668944 + ], + [ + "▁Dhoni", + -14.302574157714844 + ], + [ + "▁fortitude", + -14.302608489990234 + ], + [ + "▁Preparedness", + -14.302611351013184 + ], + [ + "▁undamaged", + -14.302623748779297 + ], + [ + "▁Lemonade", + -14.302624702453612 + ], + [ + "communicable", + -14.302629470825195 + ], + [ + "▁lemongrass", + -14.302635192871094 + ], + [ + "▁coexistence", + -14.30264949798584 + ], + [ + "▁1843", + -14.302658081054688 + ], + [ + "coni", + -14.302666664123535 + ], + [ + "▁Fenway", + -14.302667617797852 + ], + [ + "▁Gracia", + -14.30267333984375 + ], + [ + "▁457", + -14.302678108215332 + ], + [ + "▁Tasman", + -14.302721977233888 + ], + [ + "POSITION", + -14.302730560302734 + ], + [ + "▁Miracles", + -14.30274486541748 + ], + [ + "▁Lora", + -14.302753448486328 + ], + [ + "▁Minster", + -14.302760124206545 + ], + [ + "▁helical", + -14.302785873413086 + ], + [ + "Consideration", + -14.302804946899414 + ], + [ + "lunch", + -14.302839279174805 + ], + [ + "▁Lakeshore", + -14.302850723266602 + ], + [ + "▁2016/17", + -14.302865028381348 + ], + [ + "arda", + -14.30286693572998 + ], + [ + "▁Dillard", + -14.302881240844728 + ], + [ + "nchez", + -14.30289077758789 + ], + [ + "▁magnetism", + -14.302892684936523 + ], + [ + "▁Username", + -14.302923202514648 + ], + [ + "▁deregulation", + -14.302935600280762 + ], + [ + "▁Ruskin", + -14.302955627441406 + ], + [ + "▁Happened", + -14.302961349487305 + ], + [ + "▁chirping", + -14.30296230316162 + ], + [ + "▁zirconia", + -14.30296516418457 + ], + [ + "▁stockholders", + -14.302968978881836 + ], + [ + "▁panther", + -14.30300998687744 + ], + [ + "▁Quail", + -14.303030967712402 + ], + [ + "jewel", + -14.303051948547363 + ], + [ + "▁2004)", + -14.30305290222168 + ], + [ + "bog", + -14.303071022033691 + ], + [ + "▁multichannel", + -14.303099632263184 + ], + [ + "▁Peet", + -14.303114891052246 + ], + [ + "OPE", + -14.303122520446776 + ], + [ + "▁CTRL", + -14.303143501281738 + ], + [ + "▁Insured", + -14.303149223327637 + ], + [ + "ooooo", + -14.303180694580078 + ], + [ + "▁Pave", + -14.30319881439209 + ], + [ + "TSA", + -14.303207397460938 + ], + [ + "▁MEGA", + -14.303211212158203 + ], + [ + "dson", + -14.303216934204102 + ], + [ + "▁Mariana", + -14.303226470947266 + ], + [ + "ikar", + -14.30323886871338 + ], + [ + "merge", + -14.303257942199709 + ], + [ + "▁Herod", + -14.303260803222656 + ], + [ + "▁spirals", + -14.303339004516602 + ], + [ + "repeat", + -14.303340911865234 + ], + [ + "WAVE", + -14.303345680236816 + ], + [ + "Fracture", + -14.303375244140623 + ], + [ + "▁incumbents", + -14.30339241027832 + ], + [ + "▁knick", + -14.303422927856444 + ], + [ + "arty", + -14.30343246459961 + ], + [ + "treating", + -14.303438186645508 + ], + [ + "▁Festive", + -14.30353832244873 + ], + [ + "▁feeble", + -14.303574562072754 + ], + [ + "JG", + -14.303585052490234 + ], + [ + "▁Parry", + -14.303618431091309 + ], + [ + "OUND", + -14.303709030151367 + ], + [ + "ivities", + -14.303732872009276 + ], + [ + "managing", + -14.303743362426758 + ], + [ + "▁wearers", + -14.303772926330566 + ], + [ + "natch", + -14.30378246307373 + ], + [ + "▁hopelessly", + -14.30383014678955 + ], + [ + "tude", + -14.303844451904297 + ], + [ + "▁Depor", + -14.303899765014648 + ], + [ + "embl", + -14.303921699523926 + ], + [ + "RSS", + -14.303980827331545 + ], + [ + "▁PCT", + -14.304001808166504 + ], + [ + "▁SoCal", + -14.304018020629885 + ], + [ + "Kil", + -14.30402946472168 + ], + [ + "▁revamping", + -14.304070472717283 + ], + [ + "▁stricken", + -14.304085731506348 + ], + [ + "locking", + -14.304150581359863 + ], + [ + "NIR", + -14.304248809814451 + ], + [ + "▁localised", + -14.3042631149292 + ], + [ + "▁TSP", + -14.30428409576416 + ], + [ + "eligible", + -14.304291725158691 + ], + [ + "▁Strath", + -14.304495811462402 + ], + [ + "▁Avast", + -14.304511070251465 + ], + [ + "▁WON", + -14.3046236038208 + ], + [ + "▁VCU", + -14.304698944091797 + ], + [ + "hao", + -14.30470371246338 + ], + [ + "▁Thar", + -14.304723739624023 + ], + [ + "▁landfall", + -14.304737091064451 + ], + [ + "▁Endpoint", + -14.304831504821776 + ], + [ + "▁reconnecting", + -14.30483341217041 + ], + [ + "TAIL", + -14.30485725402832 + ], + [ + "Grav", + -14.304939270019531 + ], + [ + "▁14001", + -14.304946899414062 + ], + [ + "897", + -14.3049898147583 + ], + [ + "arney", + -14.30500030517578 + ], + [ + "▁RDS", + -14.305021286010742 + ], + [ + "▁8:15", + -14.305115699768066 + ], + [ + "Odd", + -14.305171012878418 + ], + [ + "numbers", + -14.305176734924316 + ], + [ + "symbol", + -14.305187225341797 + ], + [ + "Yan", + -14.30525016784668 + ], + [ + "▁bootleg", + -14.305316925048828 + ], + [ + "trell", + -14.305349349975586 + ], + [ + "▁Milne", + -14.305349349975586 + ], + [ + "▁Sabre", + -14.30536651611328 + ], + [ + "▁boron", + -14.305379867553713 + ], + [ + "outlet", + -14.305414199829102 + ], + [ + "▁sweeper", + -14.305474281311035 + ], + [ + "jazz", + -14.305492401123049 + ], + [ + "▁deliverable", + -14.305500984191896 + ], + [ + "Nicola", + -14.30552864074707 + ], + [ + "▁Incidentally", + -14.305559158325195 + ], + [ + "▁Olde", + -14.305599212646484 + ], + [ + "▁Prospects", + -14.305689811706545 + ], + [ + "▁9781", + -14.305777549743652 + ], + [ + "extricably", + -14.305778503417969 + ], + [ + "claw", + -14.305829048156738 + ], + [ + "depressant", + -14.305832862854004 + ], + [ + "934", + -14.305933952331545 + ], + [ + "▁MFT", + -14.305952072143556 + ], + [ + "48)", + -14.305974960327148 + ], + [ + "covers", + -14.306007385253906 + ], + [ + "Foster", + -14.306013107299805 + ], + [ + "occurring", + -14.306086540222168 + ], + [ + "flux", + -14.306095123291016 + ], + [ + "TRACT", + -14.306124687194824 + ], + [ + "▁rhinestones", + -14.306127548217772 + ], + [ + "Participation", + -14.306201934814451 + ], + [ + "▁Dissertations", + -14.306228637695312 + ], + [ + "Thunder", + -14.306245803833008 + ], + [ + "▁Stocking", + -14.306248664855955 + ], + [ + "Thailand", + -14.306267738342283 + ], + [ + "Hybrid", + -14.306268692016602 + ], + [ + "796", + -14.3062744140625 + ], + [ + "Venture", + -14.306291580200195 + ], + [ + "Stefan", + -14.306331634521484 + ], + [ + "▁couriers", + -14.306351661682127 + ], + [ + "psychology", + -14.306365966796877 + ], + [ + "▁Foll", + -14.306440353393556 + ], + [ + "▁Tacos", + -14.306448936462402 + ], + [ + "528", + -14.306469917297363 + ], + [ + "advised", + -14.306493759155272 + ], + [ + "favor", + -14.306507110595703 + ], + [ + "Dude", + -14.306540489196776 + ], + [ + "bene", + -14.306571006774902 + ], + [ + "▁Avent", + -14.306652069091797 + ], + [ + "▁11.6", + -14.306679725646973 + ], + [ + "REST", + -14.306685447692873 + ], + [ + "▁steelhead", + -14.306696891784668 + ], + [ + "▁Tsun", + -14.30677604675293 + ], + [ + "▁Commodities", + -14.306788444519045 + ], + [ + "▁simulates", + -14.306877136230469 + ], + [ + "Poster", + -14.30690860748291 + ], + [ + "esses", + -14.30695629119873 + ], + [ + "seeds", + -14.307008743286133 + ], + [ + "roux", + -14.307013511657717 + ], + [ + "Examine", + -14.307032585144045 + ], + [ + "▁PAD", + -14.3070650100708 + ], + [ + "SKI", + -14.307083129882812 + ], + [ + "Ideas", + -14.307206153869627 + ], + [ + "Shallow", + -14.307215690612791 + ], + [ + "▁squander", + -14.307226181030272 + ], + [ + "▁buoy", + -14.307275772094728 + ], + [ + "▁Turing", + -14.30734157562256 + ], + [ + "Lowering", + -14.30734634399414 + ], + [ + "▁Londoners", + -14.307376861572266 + ], + [ + "▁inefficiency", + -14.307382583618164 + ], + [ + "rva", + -14.307450294494627 + ], + [ + "Formerly", + -14.307456970214844 + ], + [ + "reports", + -14.307470321655272 + ], + [ + "strategic", + -14.307518005371094 + ], + [ + "▁VOD", + -14.307549476623535 + ], + [ + "▁quads", + -14.30756664276123 + ], + [ + "051", + -14.307581901550291 + ], + [ + "▁gymnasts", + -14.30760669708252 + ], + [ + "ppm", + -14.307608604431152 + ], + [ + "▁recede", + -14.307703018188477 + ], + [ + "▁Pavl", + -14.307732582092283 + ], + [ + "▁Chamb", + -14.307734489440918 + ], + [ + "▁Integrating", + -14.307761192321776 + ], + [ + "▁Mindful", + -14.307785034179688 + ], + [ + "▁Recognize", + -14.307835578918455 + ], + [ + "Mann", + -14.307836532592772 + ], + [ + "▁Osh", + -14.307883262634276 + ], + [ + "▁11.2", + -14.30798625946045 + ], + [ + "▁fashionista", + -14.308042526245115 + ], + [ + "▁eyewitness", + -14.308086395263672 + ], + [ + "Losing", + -14.308100700378418 + ], + [ + "GLE", + -14.308120727539062 + ], + [ + "▁Atmosphere", + -14.308127403259276 + ], + [ + "▁Azalea", + -14.308127403259276 + ], + [ + "▁Cavendish", + -14.308127403259276 + ], + [ + "▁Commencement", + -14.308127403259276 + ], + [ + "▁entourage", + -14.308127403259276 + ], + [ + "▁fibroblast", + -14.308127403259276 + ], + [ + "▁jacquard", + -14.308127403259276 + ], + [ + "▁oligarch", + -14.308127403259276 + ], + [ + "▁remedied", + -14.308127403259276 + ], + [ + "▁tweezers", + -14.308127403259276 + ], + [ + "▁adjudication", + -14.308128356933594 + ], + [ + "▁alopecia", + -14.308128356933594 + ], + [ + "▁embankment", + -14.308128356933594 + ], + [ + "▁gigabit", + -14.308128356933594 + ], + [ + "▁perceptual", + -14.308128356933594 + ], + [ + "▁XYZ", + -14.30812931060791 + ], + [ + "▁Gonzaga", + -14.308133125305176 + ], + [ + "▁overpass", + -14.308135986328123 + ], + [ + "▁Clemens", + -14.308141708374023 + ], + [ + "▁Enzyme", + -14.308141708374023 + ], + [ + "▁Publix", + -14.30814266204834 + ], + [ + "▁Pentium", + -14.308143615722656 + ], + [ + "▁generika", + -14.308143615722656 + ], + [ + "▁speculating", + -14.308146476745604 + ], + [ + "▁Bendigo", + -14.308148384094238 + ], + [ + "▁carcinogenic", + -14.308150291442873 + ], + [ + "▁Circulation", + -14.308151245117188 + ], + [ + "▁Bainbridge", + -14.30815887451172 + ], + [ + "▁Wicklow", + -14.308165550231934 + ], + [ + "▁Flesh", + -14.308168411254885 + ], + [ + "Embracing", + -14.308171272277832 + ], + [ + "▁Tamworth", + -14.308183670043944 + ], + [ + "▁McDaniel", + -14.308198928833008 + ], + [ + "▁fancier", + -14.308198928833008 + ], + [ + "▁Scanning", + -14.30820369720459 + ], + [ + "▁Hackett", + -14.30821418762207 + ], + [ + "▁HELL", + -14.308237075805664 + ], + [ + "▁recitation", + -14.30824089050293 + ], + [ + "▁extradition", + -14.308242797851562 + ], + [ + "▁MARK", + -14.308246612548828 + ], + [ + "▁Mahler", + -14.308246612548828 + ], + [ + "▁repossession", + -14.30824851989746 + ], + [ + "▁Tripura", + -14.308255195617676 + ], + [ + "▁tailings", + -14.308262825012209 + ], + [ + "▁Technologist", + -14.308274269104004 + ], + [ + "▁bobcat", + -14.308279037475586 + ], + [ + "▁Bukit", + -14.30828094482422 + ], + [ + "▁McMaster", + -14.30828857421875 + ], + [ + "▁Teatro", + -14.308294296264648 + ], + [ + "▁countering", + -14.308300018310549 + ], + [ + "▁bastion", + -14.308324813842772 + ], + [ + "▁shelved", + -14.308349609375 + ], + [ + "▁abnormality", + -14.308355331420898 + ], + [ + "▁Tweak", + -14.308366775512695 + ], + [ + "▁Morristown", + -14.308380126953123 + ], + [ + "▁sawdust", + -14.308382034301758 + ], + [ + "▁Mileage", + -14.308382987976074 + ], + [ + "Educating", + -14.308453559875488 + ], + [ + "4,900", + -14.308496475219728 + ], + [ + "▁Glorious", + -14.30852508544922 + ], + [ + "▁furrow", + -14.308545112609863 + ], + [ + "▁mulled", + -14.308561325073242 + ], + [ + "brace", + -14.308575630187988 + ], + [ + "▁slapping", + -14.30860424041748 + ], + [ + "▁Cathay", + -14.308613777160645 + ], + [ + "▁DEAL", + -14.308613777160645 + ], + [ + "▁turrets", + -14.30861759185791 + ], + [ + "▁Koll", + -14.308631896972656 + ], + [ + "▁Tulane", + -14.308636665344238 + ], + [ + "▁overused", + -14.30866241455078 + ], + [ + "▁retouching", + -14.308672904968262 + ], + [ + "▁citizenry", + -14.30868148803711 + ], + [ + "mox", + -14.308698654174805 + ], + [ + "▁casings", + -14.308727264404297 + ], + [ + "▁408", + -14.308743476867676 + ], + [ + "▁Canning", + -14.30875015258789 + ], + [ + "▁Athlon", + -14.308758735656738 + ], + [ + "▁resurfaced", + -14.308770179748535 + ], + [ + "▁manipulations", + -14.308783531188965 + ], + [ + "▁spate", + -14.30881404876709 + ], + [ + "▁$4.99", + -14.308866500854492 + ], + [ + "▁Gentry", + -14.308871269226074 + ], + [ + "▁Treg", + -14.308924674987791 + ], + [ + "BLA", + -14.308982849121094 + ], + [ + "articular", + -14.309091567993164 + ], + [ + "▁dinnerware", + -14.309163093566896 + ], + [ + "▁platformer", + -14.309165954589844 + ], + [ + "▁stoneware", + -14.30918025970459 + ], + [ + "fib", + -14.309191703796388 + ], + [ + "▁ecologist", + -14.309200286865234 + ], + [ + "▁mired", + -14.309231758117676 + ], + [ + "▁Rocking", + -14.309316635131836 + ], + [ + "ETT", + -14.309332847595217 + ], + [ + "▁2011-12", + -14.309344291687012 + ], + [ + "VCA", + -14.309366226196287 + ], + [ + "▁equated", + -14.309391021728516 + ], + [ + "▁NEET", + -14.309415817260742 + ], + [ + "▁727", + -14.30955982208252 + ], + [ + "▁Cinematic", + -14.309571266174316 + ], + [ + "▁Sof", + -14.309603691101074 + ], + [ + "▁reinstalled", + -14.30962371826172 + ], + [ + "▁ISM", + -14.309627532958984 + ], + [ + "FSC", + -14.309647560119627 + ], + [ + "▁MOM", + -14.309738159179688 + ], + [ + "▁Gujarati", + -14.309752464294434 + ], + [ + "▁rattled", + -14.30976104736328 + ], + [ + "▁Successfully", + -14.309769630432127 + ], + [ + "▁delle", + -14.30982780456543 + ], + [ + "▁proofreader", + -14.309881210327148 + ], + [ + "gym", + -14.309945106506348 + ], + [ + "▁ditched", + -14.310029983520508 + ], + [ + "▁Mauri", + -14.310052871704102 + ], + [ + "▁Jayne", + -14.310073852539062 + ], + [ + "▁contradicts", + -14.310076713562012 + ], + [ + "Thor", + -14.310125350952148 + ], + [ + "▁Harare", + -14.310125350952148 + ], + [ + "▁JOY", + -14.31016445159912 + ], + [ + "▁invertebrates", + -14.310192108154297 + ], + [ + "▁narcissistic", + -14.310287475585938 + ], + [ + "▁Aux", + -14.310314178466797 + ], + [ + "ROY", + -14.310401916503906 + ], + [ + "▁nutritionists", + -14.310469627380373 + ], + [ + "▁Rant", + -14.31048583984375 + ], + [ + "parc", + -14.31052017211914 + ], + [ + "▁Arrows", + -14.310551643371582 + ], + [ + "▁Silly", + -14.31057357788086 + ], + [ + "hint", + -14.31057834625244 + ], + [ + "▁Shari", + -14.310625076293944 + ], + [ + "lora", + -14.31067943572998 + ], + [ + "▁Upgraded", + -14.310747146606444 + ], + [ + "illum", + -14.31079387664795 + ], + [ + "▁326", + -14.310800552368164 + ], + [ + "▁419", + -14.310820579528809 + ], + [ + "▁affording", + -14.31083869934082 + ], + [ + "▁Troops", + -14.310848236083984 + ], + [ + "▁Moved", + -14.31098747253418 + ], + [ + "▁Woot", + -14.31100845336914 + ], + [ + "058", + -14.311017036437988 + ], + [ + "berman", + -14.311025619506836 + ], + [ + "elig", + -14.31104564666748 + ], + [ + "▁vastness", + -14.311078071594238 + ], + [ + "gale", + -14.311123847961426 + ], + [ + "▁Notary", + -14.311182975769045 + ], + [ + "▁inlays", + -14.31122589111328 + ], + [ + "ophile", + -14.311239242553713 + ], + [ + "▁coolness", + -14.311264038085938 + ], + [ + "▁fouled", + -14.311305046081545 + ], + [ + "gier", + -14.311322212219238 + ], + [ + "▁Funnels", + -14.311355590820312 + ], + [ + "▁Swann", + -14.311446189880373 + ], + [ + "colla", + -14.311553001403809 + ], + [ + "features", + -14.31157112121582 + ], + [ + "▁SCORE", + -14.31157112121582 + ], + [ + "▁publicist", + -14.3115873336792 + ], + [ + "ulga", + -14.311639785766602 + ], + [ + "4.25", + -14.31165885925293 + ], + [ + "Mesh", + -14.311694145202637 + ], + [ + "Roast", + -14.311753273010254 + ], + [ + "balancing", + -14.311829566955566 + ], + [ + "Glenn", + -14.31184196472168 + ], + [ + "prevent", + -14.311877250671388 + ], + [ + "romantic", + -14.31191635131836 + ], + [ + "Deploy", + -14.311923027038574 + ], + [ + "Measuring", + -14.31194305419922 + ], + [ + "Chancellor", + -14.311960220336914 + ], + [ + "Belgium", + -14.311967849731444 + ], + [ + "▁Afri", + -14.31212329864502 + ], + [ + "computing", + -14.312140464782717 + ], + [ + "organisation", + -14.3121976852417 + ], + [ + "VAN", + -14.312232971191406 + ], + [ + "monkey", + -14.312313079833984 + ], + [ + "Grinding", + -14.312329292297363 + ], + [ + "Dawn", + -14.312341690063477 + ], + [ + "pression", + -14.31243896484375 + ], + [ + "▁Siam", + -14.312442779541016 + ], + [ + "Recycle", + -14.312469482421877 + ], + [ + "▁McKee", + -14.312477111816406 + ], + [ + "▁sapphires", + -14.312494277954102 + ], + [ + "▁Digger", + -14.312495231628418 + ], + [ + "▁Susanna", + -14.312506675720217 + ], + [ + "▁fen", + -14.312511444091797 + ], + [ + "▁Hype", + -14.312543869018556 + ], + [ + "▁Pause", + -14.312559127807615 + ], + [ + "▁docker", + -14.312617301940918 + ], + [ + "▁Bahama", + -14.312644004821776 + ], + [ + "gham", + -14.312674522399902 + ], + [ + "▁Tillman", + -14.312685012817385 + ], + [ + "▁Pract", + -14.312785148620604 + ], + [ + "▁Lauri", + -14.3128080368042 + ], + [ + "▁harbors", + -14.312808990478516 + ], + [ + "▁SIS", + -14.312823295593262 + ], + [ + "Reset", + -14.31291389465332 + ], + [ + "▁BATH", + -14.312915802001951 + ], + [ + "▁FALL", + -14.3129243850708 + ], + [ + "OOO", + -14.312952041625977 + ], + [ + "▁prefect", + -14.312968254089355 + ], + [ + "▁DOD", + -14.313020706176758 + ], + [ + "▁Sach", + -14.313028335571287 + ], + [ + "▁$35,000", + -14.31313705444336 + ], + [ + "chow", + -14.313210487365724 + ], + [ + "prin", + -14.313214302062988 + ], + [ + "PRC", + -14.313240051269531 + ], + [ + "▁cooperated", + -14.313355445861816 + ], + [ + "▁SEAT", + -14.313366889953612 + ], + [ + "changed", + -14.313368797302246 + ], + [ + "▁pinching", + -14.313400268554688 + ], + [ + "▁Wunder", + -14.313405990600586 + ], + [ + "▁Bohemia", + -14.313410758972168 + ], + [ + "▁Markers", + -14.313436508178713 + ], + [ + "▁insecticide", + -14.31344223022461 + ], + [ + "Wan", + -14.313446044921877 + ], + [ + "Thoughts", + -14.313485145568848 + ], + [ + "▁neue", + -14.313488960266112 + ], + [ + "▁$62", + -14.313529014587402 + ], + [ + "▁glitz", + -14.313556671142578 + ], + [ + "dhara", + -14.313579559326172 + ], + [ + "szt", + -14.313589096069336 + ], + [ + "▁PNC", + -14.313589096069336 + ], + [ + "▁Kashmiri", + -14.31361484527588 + ], + [ + "▁Defend", + -14.313632011413574 + ], + [ + "▁Sylvan", + -14.313633918762209 + ], + [ + "▁Expensive", + -14.3136625289917 + ], + [ + "▁pout", + -14.313688278198242 + ], + [ + "066", + -14.313735008239746 + ], + [ + "▁Hilda", + -14.313742637634276 + ], + [ + "9001:2008", + -14.313745498657228 + ], + [ + "▁Annapurna", + -14.313745498657228 + ], + [ + "▁Apricot", + -14.313745498657228 + ], + [ + "▁HOLIDAY", + -14.313745498657228 + ], + [ + "▁Hilfiger", + -14.313745498657228 + ], + [ + "▁Implications", + -14.313745498657228 + ], + [ + "▁Labyrinth", + -14.313745498657228 + ], + [ + "▁apologizing", + -14.313745498657228 + ], + [ + "▁appetizing", + -14.313745498657228 + ], + [ + "▁equivalence", + -14.313745498657228 + ], + [ + "▁fentanyl", + -14.313745498657228 + ], + [ + "▁gentrification", + -14.313745498657228 + ], + [ + "▁legacies", + -14.313745498657228 + ], + [ + "▁memorizing", + -14.313745498657228 + ], + [ + "▁palladium", + -14.313745498657228 + ], + [ + "▁unclaimed", + -14.313745498657228 + ], + [ + "▁Feminist", + -14.313746452331545 + ], + [ + "▁Membrane", + -14.313746452331545 + ], + [ + "▁reunification", + -14.313746452331545 + ], + [ + "STUDIO", + -14.31374740600586 + ], + [ + "▁matinee", + -14.31374740600586 + ], + [ + "▁ZenFone", + -14.313751220703123 + ], + [ + "▁minuscule", + -14.313751220703123 + ], + [ + "▁Feinstein", + -14.31375503540039 + ], + [ + "▁camshaft", + -14.31375789642334 + ], + [ + "▁illogical", + -14.31375789642334 + ], + [ + "MZ", + -14.313758850097656 + ], + [ + "▁Lucille", + -14.313761711120604 + ], + [ + "▁uneventful", + -14.313761711120604 + ], + [ + "▁Ryerson", + -14.313764572143556 + ], + [ + "▁bummed", + -14.313766479492188 + ], + [ + "▁Redesign", + -14.31377410888672 + ], + [ + "▁Bateman", + -14.313777923583984 + ], + [ + "▁Veritas", + -14.31378173828125 + ], + [ + "▁Quadrant", + -14.313782691955566 + ], + [ + "▁unsweetened", + -14.313785552978516 + ], + [ + "▁Accountancy", + -14.313804626464844 + ], + [ + "▁roving", + -14.313804626464844 + ], + [ + "▁maneuverability", + -14.313822746276855 + ], + [ + "▁unoccupied", + -14.313822746276855 + ], + [ + "▁materialized", + -14.31383228302002 + ], + [ + "▁manmade", + -14.31385612487793 + ], + [ + "▁gestational", + -14.313867568969728 + ], + [ + "▁devolution", + -14.313902854919434 + ], + [ + "▁Cardigan", + -14.313904762268066 + ], + [ + "▁chasm", + -14.313918113708496 + ], + [ + "▁Rove", + -14.313934326171877 + ], + [ + "▁Sherpa", + -14.313987731933594 + ], + [ + "▁Glazing", + -14.313990592956545 + ], + [ + "▁watchmaker", + -14.313995361328123 + ], + [ + "breasted", + -14.314014434814451 + ], + [ + "▁mutated", + -14.314043045043944 + ], + [ + "▁rubies", + -14.314078330993652 + ], + [ + "▁MILL", + -14.314112663269045 + ], + [ + "▁1.5\"", + -14.31411361694336 + ], + [ + "▁photogenic", + -14.31417465209961 + ], + [ + "▁postdoc", + -14.314189910888672 + ], + [ + "RSE", + -14.314204216003418 + ], + [ + "POST", + -14.314266204833984 + ], + [ + "▁Bridgestone", + -14.31427764892578 + ], + [ + "▁postnatal", + -14.31428050994873 + ], + [ + "▁Planck", + -14.314337730407717 + ], + [ + "▁weavers", + -14.314393043518066 + ], + [ + "▁Marquez", + -14.314445495605469 + ], + [ + "▁saith", + -14.314453125 + ], + [ + "▁gasses", + -14.314454078674316 + ], + [ + "▁Classy", + -14.314493179321287 + ], + [ + "▁furiously", + -14.314499855041504 + ], + [ + "▁55+", + -14.314504623413086 + ], + [ + "▁Remembering", + -14.314544677734377 + ], + [ + "▁Kita", + -14.314558029174805 + ], + [ + "▁Russel", + -14.31458568572998 + ], + [ + "▁levelling", + -14.31462287902832 + ], + [ + "▁Donnie", + -14.31466007232666 + ], + [ + "▁Wildwood", + -14.314717292785645 + ], + [ + "▁FUND", + -14.314737319946287 + ], + [ + "▁Justine", + -14.314748764038086 + ], + [ + "▁Circa", + -14.31479263305664 + ], + [ + "iera", + -14.314800262451172 + ], + [ + "▁Singleton", + -14.314921379089355 + ], + [ + "ownership", + -14.315007209777832 + ], + [ + "MOUNT", + -14.315023422241213 + ], + [ + "▁flexion", + -14.315103530883787 + ], + [ + "▁overland", + -14.315136909484863 + ], + [ + "▁ESTATE", + -14.315139770507812 + ], + [ + "environmentalist", + -14.315140724182127 + ], + [ + "comments", + -14.315178871154783 + ], + [ + "▁Rattle", + -14.315215110778809 + ], + [ + "▁precedes", + -14.315263748168944 + ], + [ + "STRA", + -14.31528091430664 + ], + [ + "▁Dorian", + -14.315308570861816 + ], + [ + "GOT", + -14.315315246582031 + ], + [ + "▁shuffled", + -14.31532859802246 + ], + [ + "▁anthems", + -14.315354347229004 + ], + [ + "▁19,000", + -14.31535530090332 + ], + [ + "▁ASB", + -14.315366744995115 + ], + [ + "▁Curved", + -14.31541347503662 + ], + [ + "▁$1,1", + -14.315430641174316 + ], + [ + "▁perverse", + -14.31545352935791 + ], + [ + "sauce", + -14.315465927124023 + ], + [ + "MRC", + -14.315481185913086 + ], + [ + "▁eastward", + -14.31551456451416 + ], + [ + "▁Altar", + -14.315563201904297 + ], + [ + "▁Sabin", + -14.315579414367676 + ], + [ + "▁hairpin", + -14.315589904785156 + ], + [ + "▁modernism", + -14.315645217895508 + ], + [ + "Checking", + -14.315668106079102 + ], + [ + "▁Shasta", + -14.3157320022583 + ], + [ + "▁admirer", + -14.31581211090088 + ], + [ + "bbl", + -14.31586742401123 + ], + [ + "difference", + -14.315926551818848 + ], + [ + "▁XE", + -14.316048622131348 + ], + [ + "architecture", + -14.316054344177246 + ], + [ + "▁commutes", + -14.316055297851562 + ], + [ + "▁deft", + -14.31606388092041 + ], + [ + "874", + -14.316170692443848 + ], + [ + "▁skinned", + -14.316201210021973 + ], + [ + "gary", + -14.316256523132324 + ], + [ + "document", + -14.316274642944336 + ], + [ + "MIL", + -14.316279411315918 + ], + [ + "Signed", + -14.316291809082031 + ], + [ + "▁Shoal", + -14.316375732421877 + ], + [ + "styling", + -14.316483497619627 + ], + [ + "4400", + -14.31652545928955 + ], + [ + "RET", + -14.316654205322266 + ], + [ + "▁extendable", + -14.316699028015137 + ], + [ + "▁OWNER", + -14.316715240478516 + ], + [ + "▁XXI", + -14.316768646240234 + ], + [ + "▁1842", + -14.31677532196045 + ], + [ + "▁generalist", + -14.316779136657717 + ], + [ + "▁Semp", + -14.31678295135498 + ], + [ + "ezi", + -14.31682300567627 + ], + [ + "▁Prado", + -14.316883087158203 + ], + [ + "fana", + -14.316940307617188 + ], + [ + "▁overbearing", + -14.316940307617188 + ], + [ + "▁Corral", + -14.317028999328612 + ], + [ + "▁NPP", + -14.317066192626951 + ], + [ + "machi", + -14.31710433959961 + ], + [ + "alina", + -14.317161560058594 + ], + [ + "odder", + -14.317190170288086 + ], + [ + "▁unfriendly", + -14.317264556884766 + ], + [ + "▁hulls", + -14.31729507446289 + ], + [ + "Lane", + -14.317296981811523 + ], + [ + "▁querying", + -14.31743335723877 + ], + [ + "Registr", + -14.317487716674805 + ], + [ + "idler", + -14.31755256652832 + ], + [ + "▁Mov", + -14.31757354736328 + ], + [ + "▁qi", + -14.317593574523926 + ], + [ + "median", + -14.317646980285645 + ], + [ + "Permanent", + -14.317672729492188 + ], + [ + "sustainable", + -14.317681312561035 + ], + [ + "▁Prefab", + -14.31772232055664 + ], + [ + "Councillor", + -14.317723274230955 + ], + [ + "Residence", + -14.31772804260254 + ], + [ + "▁leasehold", + -14.31772804260254 + ], + [ + "▁Updating", + -14.31774616241455 + ], + [ + "▁Terrell", + -14.317766189575195 + ], + [ + "crisis", + -14.317767143249512 + ], + [ + "Pixel", + -14.317800521850586 + ], + [ + "cq", + -14.317826271057127 + ], + [ + "Duty", + -14.317864418029783 + ], + [ + "▁Maximize", + -14.317874908447266 + ], + [ + "▁Surat", + -14.317882537841797 + ], + [ + "▁Kep", + -14.317885398864746 + ], + [ + "▁Brea", + -14.317917823791504 + ], + [ + "Tommy", + -14.317946434020996 + ], + [ + "▁relayed", + -14.317959785461426 + ], + [ + "borrow", + -14.317974090576172 + ], + [ + "▁Stinger", + -14.318011283874512 + ], + [ + "▁Gle", + -14.318015098571776 + ], + [ + "▁Englishman", + -14.318150520324709 + ], + [ + "(?)", + -14.318158149719238 + ], + [ + "Sponsored", + -14.31816291809082 + ], + [ + "iture", + -14.318181037902832 + ], + [ + "▁IAM", + -14.318204879760742 + ], + [ + "$15", + -14.318222045898438 + ], + [ + "disabled", + -14.318227767944336 + ], + [ + "▁GIA", + -14.318278312683104 + ], + [ + "understanding", + -14.318388938903809 + ], + [ + "2.8%", + -14.318400382995604 + ], + [ + "laugh", + -14.318458557128906 + ], + [ + "▁snowed", + -14.318467140197754 + ], + [ + "tich", + -14.318483352661133 + ], + [ + "▁inwards", + -14.31860065460205 + ], + [ + "10+", + -14.318604469299316 + ], + [ + "finch", + -14.318655014038086 + ], + [ + "▁wane", + -14.318666458129885 + ], + [ + "pitched", + -14.318751335144045 + ], + [ + "▁Emerge", + -14.31875705718994 + ], + [ + "idan", + -14.318792343139648 + ], + [ + "▁Raza", + -14.318889617919922 + ], + [ + "▁constrict", + -14.318910598754885 + ], + [ + "▁Fanta", + -14.318939208984377 + ], + [ + "reta", + -14.31894302368164 + ], + [ + "▁1250", + -14.31901741027832 + ], + [ + "▁sorta", + -14.319059371948242 + ], + [ + "▁thistle", + -14.319067001342772 + ], + [ + "posting", + -14.319091796875 + ], + [ + "▁Accel", + -14.319100379943848 + ], + [ + "▁elude", + -14.31914520263672 + ], + [ + "sund", + -14.319186210632324 + ], + [ + "bulb", + -14.319221496582031 + ], + [ + "▁Grund", + -14.319229125976562 + ], + [ + "▁coyote", + -14.319242477416992 + ], + [ + "Rating", + -14.31926155090332 + ], + [ + "▁Cano", + -14.319262504577637 + ], + [ + "▁Signup", + -14.31932544708252 + ], + [ + "▁alienate", + -14.319378852844238 + ], + [ + "▁Horticulture", + -14.3193941116333 + ], + [ + "▁Bedouin", + -14.319395065307615 + ], + [ + "▁Elixir", + -14.319395065307615 + ], + [ + "▁Strategist", + -14.319395065307615 + ], + [ + "▁Twentieth", + -14.319395065307615 + ], + [ + "▁acupuncturist", + -14.319395065307615 + ], + [ + "▁aubergine", + -14.319395065307615 + ], + [ + "▁dissuade", + -14.319395065307615 + ], + [ + "▁edifice", + -14.319395065307615 + ], + [ + "▁fiddling", + -14.319395065307615 + ], + [ + "▁irreparable", + -14.319395065307615 + ], + [ + "▁maverick", + -14.319395065307615 + ], + [ + "▁sweltering", + -14.319395065307615 + ], + [ + "encrusted", + -14.319396018981934 + ], + [ + "▁Zinfandel", + -14.319396018981934 + ], + [ + "▁marmalade", + -14.319396018981934 + ], + [ + "▁populism", + -14.319396018981934 + ], + [ + "▁pulsing", + -14.319396018981934 + ], + [ + "▁Involvement", + -14.31939697265625 + ], + [ + "▁masquerade", + -14.31939697265625 + ], + [ + "▁mobilizing", + -14.31939697265625 + ], + [ + "▁revitalise", + -14.31939697265625 + ], + [ + "▁equinox", + -14.319398880004885 + ], + [ + "▁lexicon", + -14.319398880004885 + ], + [ + "▁Lucerne", + -14.319400787353516 + ], + [ + "▁Pogba", + -14.319402694702148 + ], + [ + "▁ketogenic", + -14.319402694702148 + ], + [ + "▁Genomics", + -14.319409370422363 + ], + [ + "▁decibel", + -14.31941032409668 + ], + [ + "▁rabid", + -14.319425582885742 + ], + [ + "▁$1,000,000", + -14.319429397583008 + ], + [ + "▁curatorial", + -14.31944751739502 + ], + [ + "▁pathologist", + -14.31944751739502 + ], + [ + "▁Blockbuster", + -14.319453239440918 + ], + [ + "▁Cama", + -14.319472312927246 + ], + [ + "▁headway", + -14.319477081298828 + ], + [ + "▁unprocessed", + -14.319486618041992 + ], + [ + "▁scrappy", + -14.319497108459473 + ], + [ + "oule", + -14.319499015808104 + ], + [ + "▁ISLAND", + -14.319512367248535 + ], + [ + "▁Aware", + -14.319518089294434 + ], + [ + "▁Metrics", + -14.319572448730469 + ], + [ + "▁Headache", + -14.319583892822266 + ], + [ + "caught", + -14.319610595703123 + ], + [ + "▁Nagpur", + -14.319646835327148 + ], + [ + "▁slat", + -14.319646835327148 + ], + [ + "STOP", + -14.31965160369873 + ], + [ + "LONG", + -14.319729804992676 + ], + [ + "▁VFW", + -14.319745063781738 + ], + [ + "▁Squarespace", + -14.319747924804688 + ], + [ + "istically", + -14.319756507873535 + ], + [ + "opp", + -14.319772720336914 + ], + [ + "▁Shipped", + -14.319806098937988 + ], + [ + "▁(15)", + -14.319847106933594 + ], + [ + "▁$58", + -14.319853782653809 + ], + [ + "▁Vines", + -14.319866180419922 + ], + [ + "▁Xeon", + -14.319869995117188 + ], + [ + "▁ROG", + -14.319902420043944 + ], + [ + "▁VAC", + -14.31997776031494 + ], + [ + "▁VFX", + -14.320009231567385 + ], + [ + "▁compostable", + -14.320038795471191 + ], + [ + "stuck", + -14.32007884979248 + ], + [ + "▁Dishes", + -14.32009220123291 + ], + [ + "neal", + -14.32009506225586 + ], + [ + "▁hatches", + -14.320230484008787 + ], + [ + "▁Whey", + -14.320245742797852 + ], + [ + "▁Murano", + -14.32028865814209 + ], + [ + "▁champs", + -14.32030200958252 + ], + [ + "▁speedily", + -14.320308685302734 + ], + [ + "emos", + -14.32034397125244 + ], + [ + "▁thunderous", + -14.320362091064451 + ], + [ + "▁arcades", + -14.320379257202148 + ], + [ + "-2001", + -14.320394515991213 + ], + [ + "Skill", + -14.320426940917969 + ], + [ + "▁idling", + -14.320426940917969 + ], + [ + "▁Riz", + -14.320486068725586 + ], + [ + "▁Invalid", + -14.3204927444458 + ], + [ + "▁stares", + -14.320507049560549 + ], + [ + "▁seaport", + -14.32050895690918 + ], + [ + "65%", + -14.320586204528809 + ], + [ + "▁McCu", + -14.320643424987791 + ], + [ + "VII", + -14.320674896240234 + ], + [ + "Iowa", + -14.320688247680664 + ], + [ + "▁KLM", + -14.320792198181152 + ], + [ + "▁Alti", + -14.32090663909912 + ], + [ + "▁matcha", + -14.320916175842283 + ], + [ + "▁Havre", + -14.32091999053955 + ], + [ + "▁volunteerism", + -14.320950508117676 + ], + [ + "-002", + -14.320987701416016 + ], + [ + "▁oar", + -14.321062088012695 + ], + [ + "▁Banjo", + -14.321086883544922 + ], + [ + "▁Prod", + -14.321097373962402 + ], + [ + "▁Bobcats", + -14.321105003356934 + ], + [ + "▁Geno", + -14.32113265991211 + ], + [ + "loans", + -14.321171760559082 + ], + [ + "BMW", + -14.321208953857422 + ], + [ + "ACI", + -14.32127571105957 + ], + [ + "▁twig", + -14.321276664733888 + ], + [ + "▁glycogen", + -14.32127857208252 + ], + [ + "Spice", + -14.321358680725098 + ], + [ + "991", + -14.321414947509766 + ], + [ + "lée", + -14.32144260406494 + ], + [ + "historic", + -14.321517944335938 + ], + [ + "▁Examinations", + -14.321550369262695 + ], + [ + "▁MONT", + -14.321551322937012 + ], + [ + "▁Ghar", + -14.321579933166504 + ], + [ + "gnostic", + -14.321627616882324 + ], + [ + "vocation", + -14.321646690368652 + ], + [ + "▁Zeit", + -14.321664810180664 + ], + [ + "dau", + -14.321693420410156 + ], + [ + "▁Crows", + -14.32170295715332 + ], + [ + "▁syringes", + -14.3217191696167 + ], + [ + "▁preposition", + -14.321748733520508 + ], + [ + "▁banquets", + -14.321760177612305 + ], + [ + "▁baseboard", + -14.321870803833008 + ], + [ + "▁Fashioned", + -14.321907043457031 + ], + [ + "061", + -14.32194709777832 + ], + [ + "▁Helpline", + -14.32196807861328 + ], + [ + "iffy", + -14.32197380065918 + ], + [ + "514", + -14.322025299072266 + ], + [ + "▁Triad", + -14.32205581665039 + ], + [ + "PHE", + -14.322080612182615 + ], + [ + "AML", + -14.32219696044922 + ], + [ + "▁Ake", + -14.322280883789062 + ], + [ + "▁ACCEPT", + -14.322319984436035 + ], + [ + "▁treadmills", + -14.322513580322266 + ], + [ + "2.3%", + -14.32254123687744 + ], + [ + "▁motorways", + -14.322587013244627 + ], + [ + "IAS", + -14.322602272033691 + ], + [ + "▁envelop", + -14.322638511657717 + ], + [ + "▁topographic", + -14.322654724121094 + ], + [ + "luv", + -14.322680473327637 + ], + [ + "▁HV", + -14.322681427001951 + ], + [ + "▁4.00", + -14.322704315185549 + ], + [ + "Popularity", + -14.322745323181152 + ], + [ + "▁Pud", + -14.322766304016112 + ], + [ + "HUN", + -14.32284450531006 + ], + [ + "▁shadowing", + -14.322973251342772 + ], + [ + "rara", + -14.322980880737305 + ], + [ + "MCC", + -14.323017120361328 + ], + [ + "nid", + -14.32313060760498 + ], + [ + "▁warts", + -14.32315444946289 + ], + [ + "▁chauffeurs", + -14.323190689086914 + ], + [ + "▁wafers", + -14.323213577270508 + ], + [ + "lamb", + -14.323220252990724 + ], + [ + "ocracy", + -14.323257446289062 + ], + [ + "▁Tots", + -14.32336711883545 + ], + [ + "austin", + -14.323455810546877 + ], + [ + "stricken", + -14.323504447937012 + ], + [ + "▁Glynn", + -14.323511123657228 + ], + [ + "▁hydrates", + -14.323528289794922 + ], + [ + "william", + -14.323532104492188 + ], + [ + "organisms", + -14.323541641235352 + ], + [ + "Decision", + -14.323554039001465 + ], + [ + "companies", + -14.32355785369873 + ], + [ + "Tennessee", + -14.323563575744627 + ], + [ + "Cameron", + -14.323575973510742 + ], + [ + "socket", + -14.32357692718506 + ], + [ + "crawl", + -14.323594093322754 + ], + [ + "Lightweight", + -14.323603630065918 + ], + [ + "majority", + -14.323648452758787 + ], + [ + "Arrow", + -14.32367706298828 + ], + [ + "▁Keri", + -14.323695182800291 + ], + [ + "Grape", + -14.323699951171877 + ], + [ + "CHECK", + -14.323750495910645 + ], + [ + "OMO", + -14.323771476745604 + ], + [ + "NEWS", + -14.32384204864502 + ], + [ + "zac", + -14.323892593383787 + ], + [ + "▁stalling", + -14.32394027709961 + ], + [ + "▁receptacles", + -14.323974609375 + ], + [ + "1955", + -14.32404899597168 + ], + [ + "Sensor", + -14.324056625366213 + ], + [ + "NCS", + -14.324058532714844 + ], + [ + "▁mantras", + -14.324067115783691 + ], + [ + "▁Cv", + -14.324085235595703 + ], + [ + "▁Regularly", + -14.324189186096191 + ], + [ + "▁Poco", + -14.32421875 + ], + [ + "▁Lovell", + -14.32422161102295 + ], + [ + "▁Creators", + -14.324237823486328 + ], + [ + "▁Caster", + -14.324271202087402 + ], + [ + "▁Indus", + -14.324291229248049 + ], + [ + "▁hairdo", + -14.324334144592283 + ], + [ + "▁spindles", + -14.324344635009766 + ], + [ + "▁SELL", + -14.324360847473145 + ], + [ + "▁looped", + -14.324408531188965 + ], + [ + "▁interrogate", + -14.324475288391112 + ], + [ + "▁transcribe", + -14.32448673248291 + ], + [ + "monitoring", + -14.32450008392334 + ], + [ + "▁Expressions", + -14.32451343536377 + ], + [ + "pipes", + -14.324515342712402 + ], + [ + "retired", + -14.324536323547363 + ], + [ + "▁Hira", + -14.32458209991455 + ], + [ + "▁twirl", + -14.324585914611816 + ], + [ + "▁Threshold", + -14.324594497680664 + ], + [ + "oot", + -14.324613571166992 + ], + [ + "▁yawn", + -14.324641227722168 + ], + [ + "elastic", + -14.32468605041504 + ], + [ + "▁Freshly", + -14.324718475341797 + ], + [ + "––", + -14.324736595153809 + ], + [ + "▁Harr", + -14.324749946594238 + ], + [ + "comic", + -14.32479190826416 + ], + [ + "chung", + -14.324877738952637 + ], + [ + "Amongst", + -14.324894905090332 + ], + [ + "▁Galicia", + -14.324954986572266 + ], + [ + "▁resolute", + -14.32496738433838 + ], + [ + "▁Insure", + -14.32501983642578 + ], + [ + "ucine", + -14.325029373168944 + ], + [ + "▁Kazakh", + -14.32507610321045 + ], + [ + "epileptic", + -14.325077056884766 + ], + [ + "▁Ceremonies", + -14.325077056884766 + ], + [ + "▁FRANCISCO", + -14.325077056884766 + ], + [ + "▁attenuation", + -14.325077056884766 + ], + [ + "▁endometriosis", + -14.325077056884766 + ], + [ + "▁ventricle", + -14.325077056884766 + ], + [ + "▁profanity", + -14.325078010559082 + ], + [ + "▁fanciful", + -14.325078964233398 + ], + [ + "▁quantifiable", + -14.325078964233398 + ], + [ + "▁LIABILITY", + -14.325079917907717 + ], + [ + "▁MATLAB", + -14.325079917907717 + ], + [ + "▁crotch", + -14.325079917907717 + ], + [ + "▁immeasurable", + -14.325079917907717 + ], + [ + "▁Fraternity", + -14.325080871582031 + ], + [ + "▁Pinellas", + -14.325080871582031 + ], + [ + "Soprano", + -14.32508373260498 + ], + [ + "▁boolean", + -14.32508373260498 + ], + [ + "▁SAFETY", + -14.325085639953612 + ], + [ + "▁amnesia", + -14.32508659362793 + ], + [ + "▁Aquinas", + -14.325088500976562 + ], + [ + "▁omnichannel", + -14.32508945465088 + ], + [ + "▁Altitude", + -14.325090408325195 + ], + [ + "▁swanky", + -14.325091361999512 + ], + [ + "contraindication", + -14.325096130371094 + ], + [ + "▁brazen", + -14.325119018554688 + ], + [ + "▁McHenry", + -14.325119972229004 + ], + [ + "▁enrollees", + -14.325121879577637 + ], + [ + "▁Cheerleader", + -14.325125694274902 + ], + [ + "incarnate", + -14.325136184692385 + ], + [ + "relle", + -14.325145721435549 + ], + [ + "▁storefronts", + -14.325145721435549 + ], + [ + "occo", + -14.32515811920166 + ], + [ + "▁gaseous", + -14.32516384124756 + ], + [ + "▁satchel", + -14.325164794921877 + ], + [ + "▁Binghamton", + -14.325166702270508 + ], + [ + "▁paella", + -14.32518196105957 + ], + [ + "▁ejection", + -14.325202941894531 + ], + [ + "jutsu", + -14.32520580291748 + ], + [ + "▁fume", + -14.325217247009276 + ], + [ + "▁Henne", + -14.325221061706545 + ], + [ + "▁SHORT", + -14.32522964477539 + ], + [ + "▁nervously", + -14.325247764587402 + ], + [ + "▁tacit", + -14.325255393981934 + ], + [ + "▁Foursquare", + -14.32526683807373 + ], + [ + "JUST", + -14.325271606445312 + ], + [ + "▁Jeanette", + -14.325286865234377 + ], + [ + "▁uncooked", + -14.32529354095459 + ], + [ + "Aging", + -14.325307846069336 + ], + [ + "torial", + -14.325335502624512 + ], + [ + "CHEM", + -14.32534122467041 + ], + [ + "▁nonviolent", + -14.325359344482422 + ], + [ + "▁utopian", + -14.325369834899902 + ], + [ + "▁(95", + -14.325400352478027 + ], + [ + "▁SCS", + -14.325424194335938 + ], + [ + "zadeh", + -14.325501441955566 + ], + [ + "▁Defect", + -14.32551097869873 + ], + [ + "faq", + -14.325523376464844 + ], + [ + "Dana", + -14.32554054260254 + ], + [ + "tph", + -14.325550079345703 + ], + [ + "JV", + -14.325565338134766 + ], + [ + "▁bangle", + -14.325569152832031 + ], + [ + "▁Sabine", + -14.325626373291016 + ], + [ + "LRC", + -14.325701713562012 + ], + [ + "▁EXCLUSIVE", + -14.325703620910645 + ], + [ + "▁Centri", + -14.325761795043944 + ], + [ + "▁dismayed", + -14.325773239135742 + ], + [ + "shut", + -14.32579517364502 + ], + [ + "▁Garza", + -14.325798034667969 + ], + [ + "▁RARE", + -14.325871467590332 + ], + [ + "▁Shoreline", + -14.32587432861328 + ], + [ + "▁Wolverines", + -14.32588005065918 + ], + [ + "▁Ranging", + -14.325886726379396 + ], + [ + "▁Edel", + -14.325907707214355 + ], + [ + "▁disassembled", + -14.32593822479248 + ], + [ + "▁choreographers", + -14.325957298278809 + ], + [ + "nsa", + -14.325998306274414 + ], + [ + "▁MEAN", + -14.326016426086426 + ], + [ + "▁Woodford", + -14.32603359222412 + ], + [ + "1111", + -14.326109886169434 + ], + [ + "magi", + -14.326127052307127 + ], + [ + "▁moron", + -14.326128959655762 + ], + [ + "▁417", + -14.32620906829834 + ], + [ + "assar", + -14.32621955871582 + ], + [ + "▁SONG", + -14.32625675201416 + ], + [ + "0.05", + -14.32626724243164 + ], + [ + "UMA", + -14.326323509216309 + ], + [ + "▁Tusk", + -14.32634735107422 + ], + [ + "▁jr", + -14.326393127441406 + ], + [ + "▁carpark", + -14.32639503479004 + ], + [ + "▁duality", + -14.326422691345217 + ], + [ + "allen", + -14.32643985748291 + ], + [ + "992", + -14.32648468017578 + ], + [ + "▁chatbots", + -14.326510429382324 + ], + [ + "▁1844", + -14.326516151428224 + ], + [ + "▁Litre", + -14.326611518859863 + ], + [ + "fated", + -14.326617240905762 + ], + [ + "▁modems", + -14.32664680480957 + ], + [ + "▁USGS", + -14.326687812805176 + ], + [ + "Mum", + -14.32669448852539 + ], + [ + "ober", + -14.32669448852539 + ], + [ + "▁Prayers", + -14.326705932617188 + ], + [ + "▁kenya", + -14.3268404006958 + ], + [ + "▁precursors", + -14.326847076416016 + ], + [ + "gravi", + -14.326857566833496 + ], + [ + "??????", + -14.326865196228027 + ], + [ + "minant", + -14.32686996459961 + ], + [ + "▁Feder", + -14.326891899108888 + ], + [ + "▁tshirt", + -14.32691764831543 + ], + [ + "▁Foreclosures", + -14.32693576812744 + ], + [ + "maria", + -14.327033042907717 + ], + [ + "▁martyr", + -14.327043533325195 + ], + [ + "fortunate", + -14.32705020904541 + ], + [ + "kong", + -14.3270845413208 + ], + [ + "▁Jha", + -14.32709789276123 + ], + [ + "▁7-0", + -14.32711124420166 + ], + [ + "▁Redford", + -14.327146530151367 + ], + [ + "▁Ply", + -14.32724952697754 + ], + [ + "tried", + -14.32725429534912 + ], + [ + "▁Lobo", + -14.327272415161133 + ], + [ + "pointing", + -14.327306747436523 + ], + [ + "▁ooze", + -14.327347755432127 + ], + [ + "▁Nerve", + -14.327352523803713 + ], + [ + "▁Consul", + -14.327364921569824 + ], + [ + "▁Shingles", + -14.327381134033203 + ], + [ + "▁Angelica", + -14.327392578125 + ], + [ + "bolo", + -14.32740306854248 + ], + [ + "ggett", + -14.327411651611328 + ], + [ + "alco", + -14.32742691040039 + ], + [ + "▁Necro", + -14.327463150024414 + ], + [ + "establishment", + -14.327468872070312 + ], + [ + "054", + -14.327539443969728 + ], + [ + "▁councilor", + -14.32756233215332 + ], + [ + "▁wallow", + -14.327566146850586 + ], + [ + "▁Mush", + -14.327597618103027 + ], + [ + "CCF", + -14.327601432800291 + ], + [ + "▁Fonts", + -14.327629089355469 + ], + [ + "▁soundly", + -14.327638626098633 + ], + [ + "culi", + -14.32763957977295 + ], + [ + "▁pall", + -14.327699661254885 + ], + [ + "GRI", + -14.32772445678711 + ], + [ + "tolerance", + -14.327730178833008 + ], + [ + "▁surnames", + -14.327876091003418 + ], + [ + "▁RBS", + -14.32787799835205 + ], + [ + "▁invokes", + -14.32787799835205 + ], + [ + "▁14:2", + -14.327884674072266 + ], + [ + "halo", + -14.327893257141112 + ], + [ + "▁mortals", + -14.327899932861328 + ], + [ + "fini", + -14.327908515930176 + ], + [ + "Initiate", + -14.327943801879885 + ], + [ + "▁Harrier", + -14.32795524597168 + ], + [ + "▁Clon", + -14.327972412109377 + ], + [ + "▁Hern", + -14.327975273132324 + ], + [ + "062", + -14.327994346618652 + ], + [ + "▁rancher", + -14.328043937683104 + ], + [ + "▁Armin", + -14.328064918518066 + ], + [ + "▁reci", + -14.328107833862305 + ], + [ + "Whoa", + -14.32811164855957 + ], + [ + "▁BEING", + -14.32815647125244 + ], + [ + "▁Teller", + -14.32826328277588 + ], + [ + "▁tonnage", + -14.32829761505127 + ], + [ + "▁kung", + -14.3283052444458 + ], + [ + "OLO", + -14.328327178955078 + ], + [ + "coop", + -14.328357696533203 + ], + [ + "▁Fontana", + -14.328374862670898 + ], + [ + "▁(20)", + -14.328417778015137 + ], + [ + "▁effecting", + -14.32846736907959 + ], + [ + "plasma", + -14.328479766845703 + ], + [ + "cky", + -14.32850742340088 + ], + [ + "▁Landlords", + -14.328516960144045 + ], + [ + "▁redevelop", + -14.328653335571287 + ], + [ + "gency", + -14.32867431640625 + ], + [ + "Thu", + -14.328680038452148 + ], + [ + "▁(18)", + -14.328694343566896 + ], + [ + "▁Duma", + -14.328746795654297 + ], + [ + "▁Cress", + -14.328757286071776 + ], + [ + "▁Bookings", + -14.328790664672852 + ], + [ + "crash", + -14.328791618347168 + ], + [ + "▁Overflow", + -14.32888126373291 + ], + [ + "▁bowing", + -14.328972816467283 + ], + [ + "▁PIR", + -14.328983306884766 + ], + [ + "▁Cerebral", + -14.32899570465088 + ], + [ + "▁Giuliani", + -14.32901382446289 + ], + [ + "▁SPOT", + -14.329108238220217 + ], + [ + "printable", + -14.32912540435791 + ], + [ + "Bou", + -14.329146385192873 + ], + [ + "Extreme", + -14.329164505004885 + ], + [ + "▁Carers", + -14.32922649383545 + ], + [ + "strategi", + -14.329288482666016 + ], + [ + "STIC", + -14.329290390014648 + ], + [ + "Component", + -14.329371452331545 + ], + [ + "Extensive", + -14.329379081726074 + ], + [ + "Campaign", + -14.329394340515137 + ], + [ + "Anonymous", + -14.329398155212402 + ], + [ + "Rainbow", + -14.32941436767578 + ], + [ + "achieving", + -14.329416275024414 + ], + [ + "Blockchain", + -14.32941722869873 + ], + [ + "Ink", + -14.329418182373049 + ], + [ + "professed", + -14.32944107055664 + ], + [ + "▁£16", + -14.329480171203612 + ], + [ + "▁Brix", + -14.329487800598145 + ], + [ + "survey", + -14.329496383666992 + ], + [ + "AUDIO", + -14.329512596130373 + ], + [ + "dirty", + -14.329562187194824 + ], + [ + "Talent", + -14.329581260681152 + ], + [ + "57)", + -14.329582214355469 + ], + [ + "Desktop", + -14.329583168029783 + ], + [ + "Dubai", + -14.329588890075684 + ], + [ + "Randy", + -14.329594612121582 + ], + [ + "Replacement", + -14.329656600952148 + ], + [ + "went", + -14.329671859741213 + ], + [ + "Protein", + -14.329718589782717 + ], + [ + "039", + -14.329767227172852 + ], + [ + "array", + -14.329781532287598 + ], + [ + "TRE", + -14.32978630065918 + ], + [ + "olf", + -14.32982063293457 + ], + [ + "▁petit", + -14.32983684539795 + ], + [ + "▁Splendid", + -14.32985019683838 + ], + [ + "▁Ota", + -14.3298921585083 + ], + [ + "RIF", + -14.329910278320312 + ], + [ + "▁Ears", + -14.329919815063477 + ], + [ + "Wallet", + -14.32992935180664 + ], + [ + "▁513", + -14.329960823059082 + ], + [ + "drom", + -14.329980850219728 + ], + [ + "▁10:45", + -14.329981803894045 + ], + [ + "usage", + -14.32999038696289 + ], + [ + "RDS", + -14.330015182495115 + ], + [ + "fying", + -14.33005714416504 + ], + [ + "ELE", + -14.330101013183594 + ], + [ + "▁terminates", + -14.330158233642578 + ], + [ + "accio", + -14.330181121826172 + ], + [ + "NUM", + -14.330224990844728 + ], + [ + "!!!!!!!", + -14.330266952514648 + ], + [ + "▁gleam", + -14.33027458190918 + ], + [ + "▁Sna", + -14.330365180969238 + ], + [ + "Lyric", + -14.3303804397583 + ], + [ + "▁TRS", + -14.330414772033691 + ], + [ + "▁loudspeakers", + -14.330421447753906 + ], + [ + "▁launder", + -14.330422401428224 + ], + [ + "▁Copp", + -14.330427169799805 + ], + [ + "▁941", + -14.330452919006348 + ], + [ + "ancies", + -14.33053970336914 + ], + [ + "▁jordan", + -14.330626487731934 + ], + [ + "lera", + -14.330641746520996 + ], + [ + "▁astronomer", + -14.330677032470703 + ], + [ + "▁absolut", + -14.330689430236816 + ], + [ + "▁Collectible", + -14.330699920654297 + ], + [ + "▁COUNTRY", + -14.330791473388672 + ], + [ + "▁Hokkaido", + -14.330791473388672 + ], + [ + "▁Kinabalu", + -14.330791473388672 + ], + [ + "▁THIRD", + -14.330791473388672 + ], + [ + "▁disjointed", + -14.330791473388672 + ], + [ + "▁hierarchies", + -14.330791473388672 + ], + [ + "▁immediacy", + -14.330791473388672 + ], + [ + "▁invincible", + -14.330791473388672 + ], + [ + "▁predisposition", + -14.330791473388672 + ], + [ + "▁activator", + -14.330792427062988 + ], + [ + "▁infrastructural", + -14.330792427062988 + ], + [ + "▁Maurizio", + -14.330793380737305 + ], + [ + "▁Poppins", + -14.330793380737305 + ], + [ + "▁Sikkim", + -14.330793380737305 + ], + [ + "▁buoyancy", + -14.33079433441162 + ], + [ + "▁opulence", + -14.33079433441162 + ], + [ + "▁Neolithic", + -14.330798149108888 + ], + [ + "▁Breastfeeding", + -14.330799102783203 + ], + [ + "▁SEASON", + -14.330801010131836 + ], + [ + "▁noxious", + -14.330801010131836 + ], + [ + "▁Involved", + -14.33080768585205 + ], + [ + "▁categorization", + -14.330809593200684 + ], + [ + "▁excitation", + -14.33081340789795 + ], + [ + "▁gladiator", + -14.33081340789795 + ], + [ + "▁disorganized", + -14.330820083618164 + ], + [ + "▁(1986)", + -14.330828666687012 + ], + [ + "▁CMT", + -14.330829620361328 + ], + [ + "▁diorama", + -14.330833435058594 + ], + [ + "▁scariest", + -14.33084201812744 + ], + [ + "▁inductive", + -14.33084774017334 + ], + [ + "▁Shimla", + -14.330851554870604 + ], + [ + "▁Kenyon", + -14.330854415893556 + ], + [ + "▁flashcards", + -14.330854415893556 + ], + [ + "▁sheepskin", + -14.330854415893556 + ], + [ + "▁Paytm", + -14.33086109161377 + ], + [ + "▁Evolutionary", + -14.33089542388916 + ], + [ + "▁Spotted", + -14.330920219421388 + ], + [ + "▁Honourable", + -14.330924034118652 + ], + [ + "▁architecturally", + -14.3309965133667 + ], + [ + "▁Raffle", + -14.331043243408203 + ], + [ + "▁screensaver", + -14.331052780151367 + ], + [ + "▁Clarion", + -14.331060409545898 + ], + [ + "▁curbside", + -14.331079483032228 + ], + [ + "▁9-10", + -14.331082344055176 + ], + [ + "▁Fatty", + -14.331103324890137 + ], + [ + "▁carolina", + -14.331109046936035 + ], + [ + "▁Destinations", + -14.33116340637207 + ], + [ + "▁wept", + -14.33116626739502 + ], + [ + "adie", + -14.331171035766602 + ], + [ + "blooded", + -14.331185340881348 + ], + [ + "▁Felicia", + -14.331280708312988 + ], + [ + "composer", + -14.33128547668457 + ], + [ + "Contributing", + -14.331306457519531 + ], + [ + "turing", + -14.331385612487791 + ], + [ + "▁Topshop", + -14.331388473510742 + ], + [ + "▁Ackerman", + -14.331402778625488 + ], + [ + "Tail", + -14.331409454345703 + ], + [ + "▁Sahib", + -14.331411361694336 + ], + [ + "35)", + -14.33143424987793 + ], + [ + "▁foreseen", + -14.331443786621094 + ], + [ + "▁CHRIST", + -14.33149242401123 + ], + [ + "▁woodwind", + -14.331494331359863 + ], + [ + "▁Fontaine", + -14.331570625305176 + ], + [ + "▁cobbler", + -14.331604957580566 + ], + [ + "luvial", + -14.331629753112791 + ], + [ + "▁Carlin", + -14.331701278686523 + ], + [ + "TTP", + -14.331831932067873 + ], + [ + "SMART", + -14.331833839416504 + ], + [ + "▁fermenting", + -14.331862449645996 + ], + [ + "▁footballers", + -14.331939697265623 + ], + [ + "▁poisons", + -14.331965446472168 + ], + [ + "?!)", + -14.331974029541016 + ], + [ + "▁Viti", + -14.331978797912598 + ], + [ + "Pont", + -14.332018852233888 + ], + [ + "▁Greco", + -14.332036018371582 + ], + [ + "Startup", + -14.332130432128906 + ], + [ + "ggles", + -14.332188606262209 + ], + [ + "▁postural", + -14.33225154876709 + ], + [ + "▁BOARD", + -14.33227252960205 + ], + [ + "▁PPM", + -14.33228588104248 + ], + [ + "▁Flushing", + -14.332334518432615 + ], + [ + "ampton", + -14.33233642578125 + ], + [ + "▁Khor", + -14.332338333129885 + ], + [ + "▁enlisting", + -14.332342147827148 + ], + [ + "Charter", + -14.332372665405272 + ], + [ + "DEF", + -14.332404136657717 + ], + [ + "0-300", + -14.332423210144045 + ], + [ + "▁civility", + -14.33244800567627 + ], + [ + "▁APEX", + -14.33247184753418 + ], + [ + "▁Vr", + -14.332484245300291 + ], + [ + "▁Thra", + -14.332504272460938 + ], + [ + "▁donkeys", + -14.33253288269043 + ], + [ + "Barn", + -14.332534790039062 + ], + [ + "▁drainer", + -14.332561492919922 + ], + [ + "▁gentleness", + -14.332605361938477 + ], + [ + "(7):", + -14.33260726928711 + ], + [ + "lighted", + -14.33260726928711 + ], + [ + "▁Frontline", + -14.332674026489258 + ], + [ + "▁reckoning", + -14.332763671875 + ], + [ + "papers", + -14.332841873168944 + ], + [ + "▁Loy", + -14.332891464233398 + ], + [ + "agiri", + -14.33289909362793 + ], + [ + "▁(#1", + -14.332919120788574 + ], + [ + "crowd", + -14.33295440673828 + ], + [ + "▁changeover", + -14.332956314086914 + ], + [ + "▁randomness", + -14.33298397064209 + ], + [ + "▁RIVER", + -14.33302402496338 + ], + [ + "▁cranked", + -14.33302879333496 + ], + [ + "▁1831", + -14.333057403564451 + ], + [ + "stopper", + -14.333085060119627 + ], + [ + "ngu", + -14.333196640014648 + ], + [ + "▁Hyp", + -14.333208084106444 + ], + [ + "▁Saline", + -14.333213806152344 + ], + [ + "incl", + -14.333244323730469 + ], + [ + "▁Hillman", + -14.333248138427734 + ], + [ + "▁Cruze", + -14.333251953125 + ], + [ + "▁birthstone", + -14.33325481414795 + ], + [ + "▁ditching", + -14.333280563354492 + ], + [ + "▁Paired", + -14.33329963684082 + ], + [ + "stats", + -14.333375930786133 + ], + [ + "▁PIE", + -14.333392143249512 + ], + [ + "▁$2.00", + -14.333417892456056 + ], + [ + "obj", + -14.333460807800291 + ], + [ + "PIA", + -14.333600044250488 + ], + [ + "▁Formed", + -14.333617210388184 + ], + [ + "arama", + -14.33374309539795 + ], + [ + "thetic", + -14.333769798278809 + ], + [ + "▁Cami", + -14.33378791809082 + ], + [ + "9,500", + -14.333833694458008 + ], + [ + "▁cotta", + -14.333870887756348 + ], + [ + "▁Oke", + -14.33388900756836 + ], + [ + "leaders", + -14.333958625793455 + ], + [ + "commonly", + -14.334036827087402 + ], + [ + "▁Revo", + -14.33404541015625 + ], + [ + "rook", + -14.3340482711792 + ], + [ + "▁Fru", + -14.334077835083008 + ], + [ + "▁Pina", + -14.334091186523438 + ], + [ + "▁Lian", + -14.33412742614746 + ], + [ + "▁kong", + -14.334129333496094 + ], + [ + "BERT", + -14.334266662597656 + ], + [ + "cluded", + -14.334271430969238 + ], + [ + "abbing", + -14.334298133850098 + ], + [ + "▁04/25/2019", + -14.33430004119873 + ], + [ + "▁workgroup", + -14.334359169006348 + ], + [ + "▁Mato", + -14.334467887878418 + ], + [ + "▁STAND", + -14.3345365524292 + ], + [ + "▁AGA", + -14.334592819213867 + ], + [ + "Ralph", + -14.334620475769045 + ], + [ + "ntino", + -14.33464241027832 + ], + [ + "License", + -14.334643363952637 + ], + [ + "▁multicast", + -14.334660530090332 + ], + [ + "▁sanitize", + -14.334672927856444 + ], + [ + "increase", + -14.334680557250977 + ], + [ + "▁LSE", + -14.334688186645508 + ], + [ + "Praise", + -14.334733963012695 + ], + [ + "▁Fey", + -14.334781646728516 + ], + [ + "▁Donny", + -14.334796905517578 + ], + [ + "▁broadcasted", + -14.33484172821045 + ], + [ + "▁Stray", + -14.334900856018066 + ], + [ + "▁arse", + -14.334905624389648 + ], + [ + "▁Silverman", + -14.334921836853027 + ], + [ + "advert", + -14.334966659545898 + ], + [ + "839", + -14.33499240875244 + ], + [ + "▁combs", + -14.33501148223877 + ], + [ + "0:00", + -14.335038185119627 + ], + [ + "▁RSC", + -14.335076332092283 + ], + [ + "▁Boch", + -14.33508014678955 + ], + [ + "▁leapt", + -14.33509349822998 + ], + [ + "▁vend", + -14.335129737854004 + ], + [ + "xima", + -14.335156440734863 + ], + [ + "Paying", + -14.33521556854248 + ], + [ + "▁Syd", + -14.33523941040039 + ], + [ + "▁-->", + -14.335243225097656 + ], + [ + "Serial", + -14.335245132446287 + ], + [ + "Deciding", + -14.335251808166504 + ], + [ + "Suppose", + -14.335265159606934 + ], + [ + "▁Carta", + -14.33526611328125 + ], + [ + "▁sutures", + -14.335325241088867 + ], + [ + "Marshall", + -14.335342407226562 + ], + [ + "▁heist", + -14.335362434387209 + ], + [ + "▁depress", + -14.335384368896484 + ], + [ + "Ancient", + -14.33539581298828 + ], + [ + "▁Mex", + -14.335471153259276 + ], + [ + "SLE", + -14.33548641204834 + ], + [ + "SPAR", + -14.335501670837402 + ], + [ + "Deadline", + -14.335521697998049 + ], + [ + "become", + -14.335553169250488 + ], + [ + "Carpet", + -14.335556030273438 + ], + [ + "▁relievers", + -14.335559844970703 + ], + [ + "Configure", + -14.335600852966309 + ], + [ + "▁Emu", + -14.335640907287598 + ], + [ + "▁irre", + -14.335693359375 + ], + [ + "▁Purchaser", + -14.33574676513672 + ], + [ + "▁Sensation", + -14.335800170898438 + ], + [ + "▁embroider", + -14.335848808288574 + ], + [ + "▁stings", + -14.335860252380373 + ], + [ + "▁contactor", + -14.335867881774902 + ], + [ + "pone", + -14.335907936096191 + ], + [ + "▁Mariner", + -14.335908889770508 + ], + [ + "▁substantiate", + -14.335936546325684 + ], + [ + "▁ANTI", + -14.336055755615234 + ], + [ + "▁PAS", + -14.33606243133545 + ], + [ + "▁Chir", + -14.336081504821776 + ], + [ + "Representatives", + -14.336139678955078 + ], + [ + "▁Vein", + -14.336149215698242 + ], + [ + "FPS", + -14.336247444152832 + ], + [ + "scripts", + -14.33625316619873 + ], + [ + "▁Zeb", + -14.336264610290527 + ], + [ + "MDB", + -14.336307525634766 + ], + [ + "smokers", + -14.336309432983398 + ], + [ + "▁rota", + -14.336329460144045 + ], + [ + "▁dined", + -14.336376190185549 + ], + [ + "▁1:2", + -14.336419105529783 + ], + [ + "▁Quarters", + -14.336430549621582 + ], + [ + "▁ethylene", + -14.336519241333008 + ], + [ + "▁trouser", + -14.336524963378906 + ], + [ + "▁Brahms", + -14.336538314819336 + ], + [ + "▁Explosive", + -14.336538314819336 + ], + [ + "▁Grumman", + -14.336538314819336 + ], + [ + "▁Konstantin", + -14.336538314819336 + ], + [ + "▁Palliative", + -14.336538314819336 + ], + [ + "▁baffling", + -14.336538314819336 + ], + [ + "▁cheetah", + -14.336538314819336 + ], + [ + "▁Dalhousie", + -14.336539268493652 + ], + [ + "▁Masjid", + -14.336539268493652 + ], + [ + "▁Synagogue", + -14.336539268493652 + ], + [ + "▁Zhejiang", + -14.336539268493652 + ], + [ + "▁amphitheater", + -14.336539268493652 + ], + [ + "▁cistern", + -14.336539268493652 + ], + [ + "▁earplugs", + -14.336539268493652 + ], + [ + "▁orangutan", + -14.336539268493652 + ], + [ + "▁subversive", + -14.336539268493652 + ], + [ + "▁CHILDREN", + -14.336540222167969 + ], + [ + "▁Whelan", + -14.336540222167969 + ], + [ + "▁Ventilation", + -14.336541175842283 + ], + [ + "Ignoring", + -14.336543083190918 + ], + [ + "▁beehive", + -14.33654499053955 + ], + [ + "▁ICICI", + -14.336549758911133 + ], + [ + "▁stagnate", + -14.336549758911133 + ], + [ + "▁laxative", + -14.336553573608398 + ], + [ + "▁Medallion", + -14.336554527282717 + ], + [ + "▁disapproval", + -14.336555480957031 + ], + [ + "▁microcosm", + -14.336562156677246 + ], + [ + "▁VISIT", + -14.336566925048828 + ], + [ + "▁nativity", + -14.33657455444336 + ], + [ + "▁pillowcase", + -14.336576461791992 + ], + [ + "▁oxford", + -14.336580276489258 + ], + [ + "▁Restricted", + -14.336584091186523 + ], + [ + "▁Hadrian", + -14.33658504486084 + ], + [ + "▁prologue", + -14.336599349975586 + ], + [ + "▁Collagen", + -14.336602210998535 + ], + [ + "▁Papi", + -14.336604118347168 + ], + [ + "▁Grandparents", + -14.336606979370115 + ], + [ + "▁punctuality", + -14.336612701416016 + ], + [ + "▁1815", + -14.336615562438965 + ], + [ + "▁JetBlue", + -14.336624145507812 + ], + [ + "▁Celestial", + -14.336628913879396 + ], + [ + "AMC", + -14.336634635925291 + ], + [ + "▁unsaturated", + -14.336642265319824 + ], + [ + "▁706", + -14.336645126342772 + ], + [ + "▁Stoppers", + -14.336675643920898 + ], + [ + "▁$19.99", + -14.336682319641112 + ], + [ + "▁Janesville", + -14.336715698242188 + ], + [ + "▁Cabana", + -14.33671760559082 + ], + [ + "▁Crusader", + -14.336722373962402 + ], + [ + "▁Truro", + -14.336726188659668 + ], + [ + "▁dinning", + -14.336752891540527 + ], + [ + "▁Juliette", + -14.336785316467283 + ], + [ + "HCA", + -14.33680534362793 + ], + [ + "▁viewership", + -14.336810111999512 + ], + [ + "▁Woodrow", + -14.336822509765623 + ], + [ + "▁wrangler", + -14.336851119995115 + ], + [ + "966", + -14.336894989013672 + ], + [ + "▁BUILDING", + -14.336925506591797 + ], + [ + "▁reactivity", + -14.33693504333496 + ], + [ + "PaaS", + -14.337031364440918 + ], + [ + "Forty", + -14.337068557739258 + ], + [ + "▁Creature", + -14.337068557739258 + ], + [ + "▁honouring", + -14.337080955505373 + ], + [ + "▁trespassing", + -14.337175369262695 + ], + [ + "▁TRX", + -14.337246894836426 + ], + [ + "▁unsold", + -14.33730125427246 + ], + [ + "049", + -14.337302207946776 + ], + [ + "apk", + -14.33733081817627 + ], + [ + "idor", + -14.337331771850586 + ], + [ + "▁Soaking", + -14.33743953704834 + ], + [ + "AMCO", + -14.337525367736816 + ], + [ + "conceived", + -14.337554931640623 + ], + [ + "▁refinished", + -14.337579727172852 + ], + [ + "▁Speaks", + -14.33777141571045 + ], + [ + "Kel", + -14.337785720825195 + ], + [ + "▁Ruck", + -14.337794303894045 + ], + [ + "Kristin", + -14.337810516357422 + ], + [ + "▁stung", + -14.337818145751951 + ], + [ + "▁Enjoying", + -14.337838172912598 + ], + [ + "▁savesay", + -14.337852478027344 + ], + [ + "ayi", + -14.33791446685791 + ], + [ + "Soak", + -14.337944984436035 + ], + [ + "087", + -14.338000297546388 + ], + [ + "▁15-30", + -14.338159561157228 + ], + [ + "SDG", + -14.338199615478516 + ], + [ + "▁Flan", + -14.338249206542969 + ], + [ + "▁wrinkled", + -14.338260650634766 + ], + [ + "▁Fung", + -14.338271141052246 + ], + [ + "▁Ruler", + -14.338298797607422 + ], + [ + "oxidants", + -14.338315963745115 + ], + [ + "▁AY", + -14.338324546813965 + ], + [ + "LMS", + -14.338345527648926 + ], + [ + "▁ascertained", + -14.33836555480957 + ], + [ + "▁Eyre", + -14.338380813598633 + ], + [ + "▁registrants", + -14.33838176727295 + ], + [ + "▁Manipur", + -14.338418960571287 + ], + [ + "▁LUX", + -14.338427543640137 + ], + [ + "▁Dix", + -14.338449478149414 + ], + [ + "▁Praying", + -14.338492393493652 + ], + [ + "ffler", + -14.338502883911133 + ], + [ + "▁Divided", + -14.338502883911133 + ], + [ + "▁Gravi", + -14.33856201171875 + ], + [ + "hiko", + -14.338615417480469 + ], + [ + "renz", + -14.338635444641112 + ], + [ + "ussy", + -14.338653564453123 + ], + [ + "▁Eccles", + -14.338678359985352 + ], + [ + "▁Kenn", + -14.33886432647705 + ], + [ + "crying", + -14.338983535766602 + ], + [ + "▁Enclosures", + -14.339030265808104 + ], + [ + "Stim", + -14.33909034729004 + ], + [ + "___", + -14.339093208312988 + ], + [ + "wilson", + -14.339156150817873 + ], + [ + "▁Bout", + -14.3391695022583 + ], + [ + "▁(120", + -14.339181900024414 + ], + [ + "phae", + -14.33922290802002 + ], + [ + "▁PAX", + -14.339228630065918 + ], + [ + "▁Davie", + -14.339248657226562 + ], + [ + "▁AFM", + -14.339343070983888 + ], + [ + "utility", + -14.33937931060791 + ], + [ + "▁Councillors", + -14.339380264282228 + ], + [ + "▁CBO", + -14.33939266204834 + ], + [ + "▁Gastro", + -14.339423179626465 + ], + [ + "▁Capo", + -14.339494705200195 + ], + [ + "861", + -14.339597702026367 + ], + [ + "yres", + -14.339604377746582 + ], + [ + "▁04/19/2019", + -14.339621543884276 + ], + [ + "tolerant", + -14.339653968811035 + ], + [ + "▁Informed", + -14.339743614196776 + ], + [ + "▁Seminoles", + -14.339882850646973 + ], + [ + "▁phonetic", + -14.339930534362791 + ], + [ + "Separately", + -14.33999729156494 + ], + [ + "▁Zum", + -14.34006118774414 + ], + [ + "▁Vocabulary", + -14.340067863464355 + ], + [ + "▁Maas", + -14.340271949768066 + ], + [ + "▁$1.0", + -14.340298652648926 + ], + [ + "1.1%", + -14.340373992919922 + ], + [ + "▁lapses", + -14.340398788452148 + ], + [ + "▁Picard", + -14.34048557281494 + ], + [ + "▁Suburbs", + -14.340499877929688 + ], + [ + "hoon", + -14.340513229370115 + ], + [ + "▁cramp", + -14.34056282043457 + ], + [ + "▁spinners", + -14.340712547302246 + ], + [ + "▁Lumi", + -14.340874671936035 + ], + [ + "▁Pict", + -14.340886116027832 + ], + [ + "▁JET", + -14.340892791748049 + ], + [ + "piper", + -14.34091854095459 + ], + [ + "HIC", + -14.340970039367676 + ], + [ + "HAL", + -14.340972900390623 + ], + [ + "▁egress", + -14.341056823730469 + ], + [ + "Pub", + -14.341062545776367 + ], + [ + "45%", + -14.341071128845217 + ], + [ + "rni", + -14.34107780456543 + ], + [ + "▁crema", + -14.341127395629885 + ], + [ + "▁Kerri", + -14.341135025024414 + ], + [ + "ferrous", + -14.341194152832031 + ], + [ + "Automation", + -14.341211318969728 + ], + [ + "distinct", + -14.341216087341309 + ], + [ + "▁Naturalist", + -14.341235160827637 + ], + [ + "▁postwar", + -14.341238021850586 + ], + [ + "▁cobble", + -14.341242790222168 + ], + [ + "Complex", + -14.341246604919434 + ], + [ + "948", + -14.341289520263672 + ], + [ + "(9)", + -14.341300010681152 + ], + [ + "Brazilian", + -14.341307640075684 + ], + [ + "Honest", + -14.341317176818848 + ], + [ + "Entrepreneur", + -14.341327667236328 + ], + [ + "abstract", + -14.34138298034668 + ], + [ + "pepper", + -14.341489791870115 + ], + [ + "▁Donors", + -14.34153175354004 + ], + [ + "▁integers", + -14.341538429260254 + ], + [ + "regarded", + -14.34153938293457 + ], + [ + "Democratic", + -14.341544151306152 + ], + [ + "▁logbook", + -14.341559410095217 + ], + [ + "Yang", + -14.34158992767334 + ], + [ + "cional", + -14.34171199798584 + ], + [ + "▁bij", + -14.341829299926758 + ], + [ + "jetted", + -14.341898918151855 + ], + [ + "Required", + -14.34195041656494 + ], + [ + "Dealing", + -14.34197998046875 + ], + [ + "▁Toggle", + -14.341984748840332 + ], + [ + "▁Whom", + -14.342004776000977 + ], + [ + "▁EIS", + -14.342040061950684 + ], + [ + "▁exemplar", + -14.34206485748291 + ], + [ + "▁Gypsum", + -14.34208869934082 + ], + [ + "▁moat", + -14.34211254119873 + ], + [ + "NSW", + -14.342281341552734 + ], + [ + "▁Zephyr", + -14.342318534851074 + ], + [ + "▁arizona", + -14.342318534851074 + ], + [ + "04/18/2019", + -14.34231948852539 + ], + [ + "▁Algonquin", + -14.34231948852539 + ], + [ + "▁Confederacy", + -14.34231948852539 + ], + [ + "▁Dehradun", + -14.34231948852539 + ], + [ + "▁McMillan", + -14.34231948852539 + ], + [ + "▁PERFORMANCE", + -14.34231948852539 + ], + [ + "▁Prejudice", + -14.34231948852539 + ], + [ + "▁Prosecution", + -14.34231948852539 + ], + [ + "▁coleslaw", + -14.34231948852539 + ], + [ + "▁inconspicuous", + -14.34231948852539 + ], + [ + "▁poinsettia", + -14.34231948852539 + ], + [ + "▁Apostolic", + -14.342320442199709 + ], + [ + "▁Jeddah", + -14.34232234954834 + ], + [ + "▁inescapable", + -14.34232234954834 + ], + [ + "▁swerve", + -14.342324256896973 + ], + [ + "▁Grapefruit", + -14.342325210571287 + ], + [ + "▁SEMrush", + -14.342325210571287 + ], + [ + "▁crankshaft", + -14.342325210571287 + ], + [ + "▁condensing", + -14.342326164245604 + ], + [ + "▁ERISA", + -14.342329025268556 + ], + [ + "▁Acosta", + -14.342331886291504 + ], + [ + "▁Wendell", + -14.34233283996582 + ], + [ + "▁archivist", + -14.34233283996582 + ], + [ + "▁hydrant", + -14.34233283996582 + ], + [ + "References", + -14.342341423034668 + ], + [ + "▁Statute", + -14.3423433303833 + ], + [ + "▁retort", + -14.342345237731934 + ], + [ + "Monitoring", + -14.342348098754885 + ], + [ + "▁ALERT", + -14.3423490524292 + ], + [ + "▁adsorption", + -14.342354774475098 + ], + [ + "▁kibble", + -14.342363357543944 + ], + [ + "▁Cordoba", + -14.342364311218262 + ], + [ + "▁subspecies", + -14.342369079589844 + ], + [ + "▁Wylie", + -14.342370986938477 + ], + [ + "▁Schloss", + -14.342376708984377 + ], + [ + "SOLVED", + -14.342388153076172 + ], + [ + "▁Shipyard", + -14.342430114746094 + ], + [ + "▁Binance", + -14.342440605163574 + ], + [ + "▁Worthington", + -14.34244155883789 + ], + [ + "▁Rizzo", + -14.342442512512209 + ], + [ + "▁Nautical", + -14.342480659484863 + ], + [ + "▁subdomain", + -14.342486381530762 + ], + [ + "▁honeybee", + -14.34249496459961 + ], + [ + "angling", + -14.342551231384276 + ], + [ + "▁Manali", + -14.342552185058594 + ], + [ + "▁Kaur", + -14.342595100402832 + ], + [ + "▁LOG", + -14.34262752532959 + ], + [ + "roud", + -14.342680931091309 + ], + [ + "▁LIMITATION", + -14.342700958251951 + ], + [ + "▁headlamp", + -14.3427095413208 + ], + [ + "▁Valentina", + -14.34274673461914 + ], + [ + "▁SOON", + -14.342750549316406 + ], + [ + "Finger", + -14.34275245666504 + ], + [ + "▁Lifecycle", + -14.34275722503662 + ], + [ + "EIS", + -14.342805862426758 + ], + [ + "▁Lenten", + -14.342825889587402 + ], + [ + "▁knuckles", + -14.34282684326172 + ], + [ + "▁pulsed", + -14.342865943908691 + ], + [ + "▁detoxifying", + -14.342899322509766 + ], + [ + "FLAG", + -14.342940330505373 + ], + [ + "▁Kenwood", + -14.342961311340332 + ], + [ + "073", + -14.343045234680176 + ], + [ + "Dame", + -14.34306526184082 + ], + [ + "Holland", + -14.343087196350098 + ], + [ + "▁Maloney", + -14.343113899230955 + ], + [ + "▁77%", + -14.343254089355469 + ], + [ + "▁kerb", + -14.343299865722656 + ], + [ + "▁infiltrated", + -14.34333324432373 + ], + [ + "genous", + -14.343345642089844 + ], + [ + "ание", + -14.343493461608888 + ], + [ + "▁Lido", + -14.34353256225586 + ], + [ + "▁retraining", + -14.343535423278809 + ], + [ + "Aspect", + -14.343551635742188 + ], + [ + "▁outwardly", + -14.34356689453125 + ], + [ + "▁DePaul", + -14.343571662902832 + ], + [ + "▁Dema", + -14.343609809875488 + ], + [ + "ozi", + -14.343647003173828 + ], + [ + "cali", + -14.34366226196289 + ], + [ + "formatted", + -14.343695640563965 + ], + [ + "▁Organiser", + -14.343769073486328 + ], + [ + "▁Vending", + -14.343791961669922 + ], + [ + "PUT", + -14.343796730041504 + ], + [ + "Highlights", + -14.343828201293944 + ], + [ + "▁PWM", + -14.343831062316896 + ], + [ + "▁Lister", + -14.343836784362791 + ], + [ + "grate", + -14.34385871887207 + ], + [ + "▁alleyway", + -14.34389305114746 + ], + [ + "1982", + -14.344026565551758 + ], + [ + "▁varietals", + -14.344078063964844 + ], + [ + "Announcing", + -14.344109535217283 + ], + [ + "▁FEATURES", + -14.3441162109375 + ], + [ + "Closed", + -14.344133377075195 + ], + [ + "▁grater", + -14.344148635864258 + ], + [ + "▁trope", + -14.34416389465332 + ], + [ + "▁yin", + -14.3441743850708 + ], + [ + "▁flushes", + -14.344223976135254 + ], + [ + "distribution", + -14.344258308410645 + ], + [ + "▁swimsuits", + -14.344310760498049 + ], + [ + "▁heaping", + -14.344346046447754 + ], + [ + "▁Mw", + -14.344358444213867 + ], + [ + "▁hounds", + -14.34436321258545 + ], + [ + "▁Sander", + -14.344367027282717 + ], + [ + "▁Ramen", + -14.344452857971191 + ], + [ + "▁Sinha", + -14.344465255737305 + ], + [ + "EMC", + -14.344488143920898 + ], + [ + "PEA", + -14.344489097595217 + ], + [ + "▁axiom", + -14.344541549682615 + ], + [ + "laga", + -14.344564437866213 + ], + [ + "▁mirrorless", + -14.344637870788574 + ], + [ + "▁INSIDE", + -14.344694137573242 + ], + [ + "▁UPSC", + -14.344754219055176 + ], + [ + "▁20/20", + -14.344755172729492 + ], + [ + "rantz", + -14.34478759765625 + ], + [ + "▁Commando", + -14.344816207885742 + ], + [ + "honor", + -14.344828605651855 + ], + [ + "▁Chaser", + -14.344868659973145 + ], + [ + "▁Asda", + -14.344886779785156 + ], + [ + "▁JAN", + -14.344988822937012 + ], + [ + "impl", + -14.345030784606934 + ], + [ + "▁keyed", + -14.345030784606934 + ], + [ + "▁cornering", + -14.345046043395996 + ], + [ + "▁Sme", + -14.345142364501951 + ], + [ + "▁kilt", + -14.345224380493164 + ], + [ + "dren", + -14.345227241516112 + ], + [ + "▁offsetting", + -14.34532642364502 + ], + [ + "▁Marga", + -14.345343589782717 + ], + [ + "▁'60", + -14.34547233581543 + ], + [ + "▁Slipcover", + -14.34547233581543 + ], + [ + "▁EXCELLENT", + -14.345579147338867 + ], + [ + "▁spew", + -14.345629692077637 + ], + [ + "▁Ogle", + -14.345633506774902 + ], + [ + "▁Zur", + -14.345659255981444 + ], + [ + "▁domed", + -14.345717430114746 + ], + [ + "▁compels", + -14.345720291137695 + ], + [ + "▁Blending", + -14.345747947692873 + ], + [ + "▁EDS", + -14.345821380615234 + ], + [ + "▁waitlist", + -14.345833778381348 + ], + [ + "▁4′′", + -14.34584903717041 + ], + [ + "Org", + -14.345855712890623 + ], + [ + "▁STAY", + -14.345969200134276 + ], + [ + "teg", + -14.346013069152832 + ], + [ + "lata", + -14.346050262451172 + ], + [ + "▁habitation", + -14.34605884552002 + ], + [ + "rrr", + -14.346073150634766 + ], + [ + "kowitz", + -14.346123695373535 + ], + [ + "adian", + -14.346165657043455 + ], + [ + "▁bevy", + -14.34618091583252 + ], + [ + "▁smallholder", + -14.346211433410645 + ], + [ + "▁Ruins", + -14.34622573852539 + ], + [ + "▁fastball", + -14.346246719360352 + ], + [ + "2:05", + -14.346254348754885 + ], + [ + "994", + -14.346281051635742 + ], + [ + "▁Colouring", + -14.346288681030272 + ], + [ + "▁Juli", + -14.346373558044434 + ], + [ + "usch", + -14.346394538879396 + ], + [ + "▁socialising", + -14.34642219543457 + ], + [ + "▁LIS", + -14.346485137939451 + ], + [ + "▁param", + -14.346529006958008 + ], + [ + "▁Caro", + -14.346647262573242 + ], + [ + "throat", + -14.346674919128418 + ], + [ + "▁Loughborough", + -14.346745491027832 + ], + [ + "▁Broke", + -14.34678077697754 + ], + [ + "▁Subscribers", + -14.346782684326172 + ], + [ + "nui", + -14.346811294555664 + ], + [ + "▁IRIS", + -14.346842765808104 + ], + [ + "hain", + -14.346912384033203 + ], + [ + "iddy", + -14.346915245056152 + ], + [ + "▁SGD", + -14.346939086914062 + ], + [ + "Meat", + -14.347044944763184 + ], + [ + "▁7:45", + -14.347084045410156 + ], + [ + "lable", + -14.347084999084473 + ], + [ + "Trainer", + -14.347094535827637 + ], + [ + "ucked", + -14.3471097946167 + ], + [ + "egel", + -14.347142219543455 + ], + [ + "Conversely", + -14.347143173217772 + ], + [ + "Greetings", + -14.347145080566406 + ], + [ + "Isaiah", + -14.347149848937988 + ], + [ + "Symptoms", + -14.347149848937988 + ], + [ + "▁TBS", + -14.347152709960938 + ], + [ + "hybrid", + -14.347153663635254 + ], + [ + "Sole", + -14.34716510772705 + ], + [ + "▁UTF", + -14.347167015075684 + ], + [ + "Murphy", + -14.34716796875 + ], + [ + "Natalie", + -14.347220420837402 + ], + [ + "abri", + -14.347243309020996 + ], + [ + "▁Orton", + -14.347293853759766 + ], + [ + "conferencing", + -14.34731388092041 + ], + [ + "▁excesses", + -14.34742832183838 + ], + [ + "▁overrated", + -14.347463607788086 + ], + [ + "combined", + -14.34748077392578 + ], + [ + "▁SUS", + -14.347545623779297 + ], + [ + "▁bypassed", + -14.347579956054688 + ], + [ + "pronged", + -14.34760284423828 + ], + [ + "▁Forged", + -14.347615242004396 + ], + [ + "▁Ender", + -14.34762477874756 + ], + [ + "▁Jeh", + -14.347637176513672 + ], + [ + "cilla", + -14.347670555114746 + ], + [ + "wonderful", + -14.347766876220703 + ], + [ + "▁engender", + -14.347792625427246 + ], + [ + "skip", + -14.347796440124512 + ], + [ + "stik", + -14.347818374633787 + ], + [ + "▁respons", + -14.347871780395508 + ], + [ + "Giant", + -14.34787940979004 + ], + [ + "▁worktop", + -14.347893714904783 + ], + [ + "▁Hadi", + -14.347898483276367 + ], + [ + "▁criss", + -14.34792709350586 + ], + [ + "GON", + -14.347981452941896 + ], + [ + "▁5:15", + -14.347991943359377 + ], + [ + "gadi", + -14.34800910949707 + ], + [ + "▁Ria", + -14.34801959991455 + ], + [ + "margin", + -14.34812355041504 + ], + [ + "▁Hardie", + -14.348125457763672 + ], + [ + "▁Cabrera", + -14.348133087158203 + ], + [ + "▁Comptroller", + -14.348133087158203 + ], + [ + "▁Criterion", + -14.348133087158203 + ], + [ + "▁Lesotho", + -14.348133087158203 + ], + [ + "▁Smyrna", + -14.348133087158203 + ], + [ + "▁Tufted", + -14.348133087158203 + ], + [ + "▁Uncharted", + -14.348133087158203 + ], + [ + "▁accrual", + -14.348133087158203 + ], + [ + "▁jaundice", + -14.348133087158203 + ], + [ + "▁occassion", + -14.348133087158203 + ], + [ + "▁polynomial", + -14.348133087158203 + ], + [ + "▁sublimation", + -14.348133087158203 + ], + [ + "▁tastiest", + -14.348133087158203 + ], + [ + "▁Aquino", + -14.34813404083252 + ], + [ + "▁GARDEN", + -14.34813404083252 + ], + [ + "▁Waverly", + -14.34813404083252 + ], + [ + "▁oozing", + -14.34813404083252 + ], + [ + "▁Argyle", + -14.348134994506836 + ], + [ + "▁Fluorescent", + -14.348134994506836 + ], + [ + "▁impossibility", + -14.348134994506836 + ], + [ + "▁Acropolis", + -14.348136901855469 + ], + [ + "▁Nederland", + -14.348136901855469 + ], + [ + "▁Dawkins", + -14.348137855529783 + ], + [ + "▁Mushrooms", + -14.348138809204102 + ], + [ + "▁Recital", + -14.348138809204102 + ], + [ + "▁HUMAN", + -14.34814739227295 + ], + [ + "▁Laf", + -14.34814739227295 + ], + [ + "▁conceding", + -14.348152160644531 + ], + [ + "▁pennant", + -14.34817123413086 + ], + [ + "pity", + -14.34817886352539 + ], + [ + "▁Paranormal", + -14.348186492919922 + ], + [ + "▁eurozone", + -14.348188400268556 + ], + [ + "▁Higgs", + -14.348193168640137 + ], + [ + "▁Camilla", + -14.3482084274292 + ], + [ + "▁Nepalese", + -14.34821319580078 + ], + [ + "▁preparer", + -14.34821891784668 + ], + [ + "▁Equation", + -14.348224639892578 + ], + [ + "▁woefully", + -14.348225593566896 + ], + [ + "Jose", + -14.348258018493652 + ], + [ + "▁Merriam", + -14.34828281402588 + ], + [ + "CCP", + -14.348286628723145 + ], + [ + "▁regency", + -14.348286628723145 + ], + [ + "▁4800", + -14.348294258117676 + ], + [ + "▁anarchist", + -14.348295211791992 + ], + [ + "▁unskilled", + -14.34830093383789 + ], + [ + "▁Poplar", + -14.348309516906738 + ], + [ + "▁teardrop", + -14.34835433959961 + ], + [ + "▁Boyfriend", + -14.348359107971191 + ], + [ + "albert", + -14.348360061645508 + ], + [ + "▁Rooftop", + -14.348371505737305 + ], + [ + "▁endoscopic", + -14.348381042480469 + ], + [ + "Comic", + -14.348383903503418 + ], + [ + "▁symbolically", + -14.348414421081545 + ], + [ + "▁wildflower", + -14.348421096801758 + ], + [ + "WY", + -14.348422050476074 + ], + [ + "Irrespective", + -14.34842586517334 + ], + [ + "ananda", + -14.348435401916504 + ], + [ + "Intercontinental", + -14.348467826843262 + ], + [ + "ogh", + -14.348482131958008 + ], + [ + "▁Rumble", + -14.34850025177002 + ], + [ + "WCA", + -14.348528861999512 + ], + [ + "▁Parameters", + -14.34853458404541 + ], + [ + "▁Spectro", + -14.348544120788574 + ], + [ + "▁toasting", + -14.34855842590332 + ], + [ + "▁Sitka", + -14.34856128692627 + ], + [ + "▁juxtaposed", + -14.348565101623535 + ], + [ + "Cake", + -14.348578453063965 + ], + [ + "▁Nether", + -14.348581314086914 + ], + [ + "Areas", + -14.348587989807127 + ], + [ + "8500", + -14.348589897155762 + ], + [ + "▁erroneously", + -14.348603248596191 + ], + [ + "▁repurpose", + -14.348638534545898 + ], + [ + "▁Caught", + -14.348655700683594 + ], + [ + "▁incurable", + -14.34866428375244 + ], + [ + "▁Beryl", + -14.348678588867188 + ], + [ + "▁26\"", + -14.348705291748049 + ], + [ + "Sleeping", + -14.348719596862791 + ], + [ + "▁Bagley", + -14.34874153137207 + ], + [ + "▁eluded", + -14.348752975463867 + ], + [ + "▁AGREE", + -14.34877586364746 + ], + [ + "▁huddled", + -14.348780632019045 + ], + [ + "naga", + -14.348791122436523 + ], + [ + "▁errand", + -14.348806381225586 + ], + [ + "Listener", + -14.348809242248535 + ], + [ + "targeting", + -14.348810195922852 + ], + [ + "CFA", + -14.34884548187256 + ], + [ + "▁$2.6", + -14.34887409210205 + ], + [ + "▁commenters", + -14.348968505859377 + ], + [ + "TLS", + -14.34902286529541 + ], + [ + "▁honourable", + -14.349098205566406 + ], + [ + "lection", + -14.349143981933594 + ], + [ + "▁recounting", + -14.349166870117188 + ], + [ + "▁Kuk", + -14.349207878112791 + ], + [ + "▁Hamlin", + -14.349228858947754 + ], + [ + "▁Vary", + -14.349246978759766 + ], + [ + "▁attractively", + -14.349261283874512 + ], + [ + "953", + -14.349278450012209 + ], + [ + "1974", + -14.3493013381958 + ], + [ + "▁Azerbaijani", + -14.34933090209961 + ], + [ + "▁Kore", + -14.349339485168455 + ], + [ + "▁imparted", + -14.349357604980469 + ], + [ + "asaki", + -14.349428176879885 + ], + [ + "cluttering", + -14.34946632385254 + ], + [ + "▁underdeveloped", + -14.349468231201172 + ], + [ + "▁plowing", + -14.349472999572754 + ], + [ + "Admire", + -14.34947681427002 + ], + [ + "▁unpacked", + -14.349698066711426 + ], + [ + "▁Heuer", + -14.34989070892334 + ], + [ + "vira", + -14.349909782409668 + ], + [ + "▁tradesman", + -14.349921226501465 + ], + [ + "texas", + -14.35004711151123 + ], + [ + "▁Altman", + -14.350079536437988 + ], + [ + "Hoo", + -14.350099563598633 + ], + [ + "▁Redeem", + -14.35020637512207 + ], + [ + "▁Pleas", + -14.350225448608398 + ], + [ + "▁MFP", + -14.35024070739746 + ], + [ + "▁AXA", + -14.35026741027832 + ], + [ + "▁$2.1", + -14.350269317626951 + ], + [ + "fence", + -14.350295066833496 + ], + [ + "▁Nichol", + -14.350322723388672 + ], + [ + "▁childlike", + -14.350342750549316 + ], + [ + "▁Arne", + -14.350382804870604 + ], + [ + "▁Shahid", + -14.350436210632324 + ], + [ + "Supported", + -14.350454330444336 + ], + [ + "▁honorees", + -14.350528717041016 + ], + [ + "ishes", + -14.350531578063965 + ], + [ + "chosen", + -14.350655555725098 + ], + [ + "chiro", + -14.350699424743652 + ], + [ + "▁Cé", + -14.350749015808104 + ], + [ + "ucca", + -14.350777626037598 + ], + [ + "hera", + -14.350830078125 + ], + [ + "▁Trending", + -14.350850105285645 + ], + [ + "▁sharpener", + -14.350923538208008 + ], + [ + "▁firework", + -14.350937843322754 + ], + [ + "vegan", + -14.350958824157717 + ], + [ + "▁’70", + -14.350985527038574 + ], + [ + "oF", + -14.351045608520508 + ], + [ + "rwood", + -14.351066589355469 + ], + [ + "6:10", + -14.35108757019043 + ], + [ + "Obsess", + -14.351151466369627 + ], + [ + "▁inquires", + -14.351219177246094 + ], + [ + "▁motorcyclists", + -14.35122776031494 + ], + [ + "▁riddle", + -14.351269721984863 + ], + [ + "nale", + -14.351380348205566 + ], + [ + "▁Caus", + -14.351480484008787 + ], + [ + "▁Repeated", + -14.351505279541016 + ], + [ + "▁Snug", + -14.35156536102295 + ], + [ + "▁(75", + -14.351574897766112 + ], + [ + "arlington", + -14.35157585144043 + ], + [ + "▁Paulson", + -14.351581573486328 + ], + [ + "▁Civilian", + -14.351624488830566 + ], + [ + "▁Mikael", + -14.35165023803711 + ], + [ + "▁Kao", + -14.351731300354004 + ], + [ + "▁BAE", + -14.35181713104248 + ], + [ + "▁Dogg", + -14.351820945739746 + ], + [ + "Environment", + -14.351826667785645 + ], + [ + "▁Sidd", + -14.351848602294922 + ], + [ + "▁african", + -14.351964950561523 + ], + [ + "blu", + -14.351978302001951 + ], + [ + "▁WORKS", + -14.352058410644531 + ], + [ + "▁Maud", + -14.352069854736328 + ], + [ + "BURG", + -14.352083206176758 + ], + [ + "▁rasp", + -14.352103233337402 + ], + [ + "▁tipster", + -14.352103233337402 + ], + [ + "▁Bui", + -14.352105140686035 + ], + [ + "=0.0", + -14.352130889892578 + ], + [ + "ZAR", + -14.352153778076172 + ], + [ + "▁Disconnect", + -14.352190971374512 + ], + [ + "▁Osman", + -14.352235794067385 + ], + [ + "Problems", + -14.352294921875 + ], + [ + "▁NMR", + -14.352337837219238 + ], + [ + "MEC", + -14.35236644744873 + ], + [ + "▁tantrums", + -14.35239315032959 + ], + [ + "▁Expanding", + -14.352459907531738 + ], + [ + "Topics", + -14.352540969848633 + ], + [ + "horst", + -14.352569580078123 + ], + [ + "▁stepper", + -14.352631568908691 + ], + [ + "▁PURE", + -14.35273551940918 + ], + [ + "TES", + -14.35274600982666 + ], + [ + "▁RPA", + -14.352848052978516 + ], + [ + "▁obsess", + -14.352904319763184 + ], + [ + "asty", + -14.353014945983888 + ], + [ + "▁midge", + -14.353023529052734 + ], + [ + "Translation", + -14.353075981140137 + ], + [ + "Reducing", + -14.353116989135742 + ], + [ + "entrepreneur", + -14.353132247924805 + ], + [ + "chocolate", + -14.353139877319336 + ], + [ + "Spacious", + -14.353140830993652 + ], + [ + "Rubber", + -14.353164672851562 + ], + [ + "▁PAM", + -14.353189468383787 + ], + [ + "Recommendation", + -14.353242874145508 + ], + [ + "Handmade", + -14.353275299072266 + ], + [ + "Communicate", + -14.35330867767334 + ], + [ + "▁Naz", + -14.353326797485352 + ], + [ + "3,400", + -14.35334587097168 + ], + [ + "optimal", + -14.353350639343262 + ], + [ + "Retrieve", + -14.353354454040527 + ], + [ + "▁Criticism", + -14.353355407714844 + ], + [ + "▁delineate", + -14.353379249572754 + ], + [ + "Supervise", + -14.353382110595703 + ], + [ + "Gap", + -14.353452682495115 + ], + [ + "▁magnify", + -14.353473663330078 + ], + [ + "063", + -14.353489875793455 + ], + [ + "chuck", + -14.35353660583496 + ], + [ + "TIR", + -14.353550910949709 + ], + [ + "Minute", + -14.353582382202148 + ], + [ + "▁terminally", + -14.353595733642578 + ], + [ + "▁Yog", + -14.353639602661133 + ], + [ + "▁SAND", + -14.353641510009766 + ], + [ + "london", + -14.353795051574709 + ], + [ + "cak", + -14.353867530822754 + ], + [ + "▁Onions", + -14.35393524169922 + ], + [ + "Investigating", + -14.353981018066406 + ], + [ + "▁Belvedere", + -14.353981018066406 + ], + [ + "▁GORGEOUS", + -14.353981018066406 + ], + [ + "▁Kissimmee", + -14.353981018066406 + ], + [ + "▁Volatility", + -14.353981018066406 + ], + [ + "▁accumulator", + -14.353981018066406 + ], + [ + "▁carotid", + -14.353981018066406 + ], + [ + "▁disobey", + -14.353981018066406 + ], + [ + "▁elaboration", + -14.353981018066406 + ], + [ + "▁fractal", + -14.353981018066406 + ], + [ + "▁jellies", + -14.353981018066406 + ], + [ + "▁sulphate", + -14.353981018066406 + ], + [ + "▁heaviness", + -14.353981971740724 + ], + [ + "OFFICIAL", + -14.35398292541504 + ], + [ + "▁Celeste", + -14.353984832763672 + ], + [ + "▁Hotspur", + -14.353984832763672 + ], + [ + "▁celebrant", + -14.353985786437988 + ], + [ + "▁Shelving", + -14.353986740112305 + ], + [ + "▁Valdez", + -14.353988647460938 + ], + [ + "▁Rochdale", + -14.35399055480957 + ], + [ + "▁Bauhaus", + -14.353992462158203 + ], + [ + "▁REALTY", + -14.35400390625 + ], + [ + "▁fetish", + -14.354004859924316 + ], + [ + "▁(1982)", + -14.354008674621582 + ], + [ + "▁infusing", + -14.354008674621582 + ], + [ + "▁Gabby", + -14.354011535644531 + ], + [ + "▁Dundas", + -14.354018211364746 + ], + [ + "▁Appraiser", + -14.354019165039062 + ], + [ + "▁mistreatment", + -14.354029655456545 + ], + [ + "▁repurposing", + -14.354029655456545 + ], + [ + "▁Racine", + -14.354033470153809 + ], + [ + "▁peculiarities", + -14.35403537750244 + ], + [ + "▁Coronation", + -14.354046821594238 + ], + [ + "▁sparrow", + -14.354055404663086 + ], + [ + "▁Exede", + -14.354066848754885 + ], + [ + "▁TEFL", + -14.354068756103516 + ], + [ + "▁Algorithms", + -14.354083061218262 + ], + [ + "▁tenuous", + -14.354085922241213 + ], + [ + "BROWN", + -14.354090690612791 + ], + [ + "▁archetype", + -14.35412311553955 + ], + [ + "▁technologist", + -14.354147911071776 + ], + [ + "irish", + -14.354169845581056 + ], + [ + "Bahn", + -14.354171752929688 + ], + [ + "▁buzzword", + -14.354183197021484 + ], + [ + "▁occupiers", + -14.354191780090332 + ], + [ + "▁Hermitage", + -14.354205131530762 + ], + [ + "▁Telford", + -14.354233741760254 + ], + [ + "▁Middlebury", + -14.35427951812744 + ], + [ + "▁Palacio", + -14.354280471801758 + ], + [ + "▁Stockport", + -14.354281425476074 + ], + [ + "BHS", + -14.35428237915039 + ], + [ + "Measurement", + -14.35428524017334 + ], + [ + "▁untested", + -14.354294776916504 + ], + [ + "iyan", + -14.354305267333984 + ], + [ + "▁subordinates", + -14.3543119430542 + ], + [ + "▁Hwang", + -14.354315757751465 + ], + [ + "▁bol", + -14.354334831237791 + ], + [ + "DRC", + -14.35434627532959 + ], + [ + "▁macarons", + -14.354415893554688 + ], + [ + "Pho", + -14.354435920715332 + ], + [ + "▁Bulbs", + -14.354450225830078 + ], + [ + "▁synthesizers", + -14.354491233825684 + ], + [ + "▁Favre", + -14.35450267791748 + ], + [ + "▁Kildare", + -14.354525566101074 + ], + [ + "▁1825", + -14.354537010192873 + ], + [ + "▁Broth", + -14.35457992553711 + ], + [ + "ITH", + -14.354612350463867 + ], + [ + "▁Composites", + -14.35464859008789 + ], + [ + "Examining", + -14.354673385620115 + ], + [ + "▁Elemental", + -14.354683876037598 + ], + [ + "VW", + -14.354706764221191 + ], + [ + "▁Auctioneer", + -14.354719161987305 + ], + [ + "Render", + -14.354734420776367 + ], + [ + "▁craziest", + -14.354743003845217 + ], + [ + "▁FLEX", + -14.354744911193848 + ], + [ + "▁Fingerprint", + -14.35476303100586 + ], + [ + "▁76%", + -14.354833602905272 + ], + [ + "▁repainting", + -14.354859352111816 + ], + [ + "▁Wandering", + -14.354875564575195 + ], + [ + "▁kneading", + -14.35496997833252 + ], + [ + "▁sportsman", + -14.354974746704102 + ], + [ + "▁shimmery", + -14.354997634887695 + ], + [ + "▁Yeast", + -14.355016708374023 + ], + [ + "▁thawing", + -14.355056762695312 + ], + [ + "▁Gree", + -14.355132102966309 + ], + [ + "Vault", + -14.355183601379396 + ], + [ + "▁flattered", + -14.355228424072266 + ], + [ + "▁Bie", + -14.355314254760742 + ], + [ + "▁crushes", + -14.355324745178224 + ], + [ + "▁broan", + -14.355350494384766 + ], + [ + "▁carelessness", + -14.35542106628418 + ], + [ + "▁Fug", + -14.355456352233888 + ], + [ + "▁junctions", + -14.35548210144043 + ], + [ + "Indiana", + -14.355494499206545 + ], + [ + "EFF", + -14.355498313903809 + ], + [ + "▁Corrosion", + -14.355504989624023 + ], + [ + "schke", + -14.355506896972656 + ], + [ + "Bert", + -14.355542182922363 + ], + [ + "▁insulted", + -14.355552673339844 + ], + [ + "▁compressing", + -14.355591773986816 + ], + [ + "▁Transforming", + -14.355632781982422 + ], + [ + "▁£10,000", + -14.355642318725586 + ], + [ + "▁0.3%", + -14.355690956115724 + ], + [ + "ugan", + -14.355795860290527 + ], + [ + "▁Gag", + -14.35584545135498 + ], + [ + "march", + -14.355874061584473 + ], + [ + "▁(300", + -14.35589599609375 + ], + [ + "▁washington", + -14.355911254882812 + ], + [ + "5/16", + -14.35592555999756 + ], + [ + "imoto", + -14.355950355529783 + ], + [ + "▁Median", + -14.356064796447754 + ], + [ + "ONIC", + -14.356074333190918 + ], + [ + "urea", + -14.356118202209473 + ], + [ + "▁sawing", + -14.356131553649902 + ], + [ + "Newly", + -14.356134414672852 + ], + [ + "▁wrecks", + -14.356135368347168 + ], + [ + "illary", + -14.356158256530762 + ], + [ + "updated", + -14.356199264526367 + ], + [ + "Rag", + -14.356244087219238 + ], + [ + "▁endorses", + -14.356245040893556 + ], + [ + "▁11.30", + -14.356257438659668 + ], + [ + "▁raffles", + -14.356264114379885 + ], + [ + "zim", + -14.356295585632324 + ], + [ + "evening", + -14.356361389160156 + ], + [ + "▁jihad", + -14.3563814163208 + ], + [ + "mbr", + -14.356400489807127 + ], + [ + "▁astm", + -14.35641098022461 + ], + [ + "▁crepes", + -14.356425285339355 + ], + [ + "▁Defendants", + -14.356426239013672 + ], + [ + "▁Della", + -14.356457710266112 + ], + [ + "▁Spent", + -14.356513023376465 + ], + [ + "▁Commissioning", + -14.356552124023438 + ], + [ + "▁69%", + -14.356595039367676 + ], + [ + "▁Chore", + -14.356597900390623 + ], + [ + "mali", + -14.356606483459473 + ], + [ + "▁Kare", + -14.356690406799316 + ], + [ + "MOO", + -14.356695175170898 + ], + [ + "▁redirecting", + -14.35673713684082 + ], + [ + "▁______", + -14.356764793395996 + ], + [ + "6.25", + -14.356844902038574 + ], + [ + "ficent", + -14.356927871704102 + ], + [ + "MOR", + -14.356928825378418 + ], + [ + "###", + -14.35694408416748 + ], + [ + "meh", + -14.356976509094238 + ], + [ + "VERY", + -14.357030868530272 + ], + [ + "▁puns", + -14.35704231262207 + ], + [ + "▁nicknames", + -14.357072830200195 + ], + [ + "942", + -14.357093811035156 + ], + [ + "▁audiologist", + -14.357105255126951 + ], + [ + "▁Flap", + -14.357141494750977 + ], + [ + "▁Terrorist", + -14.35715389251709 + ], + [ + "▁Null", + -14.357206344604492 + ], + [ + "▁Abb", + -14.357258796691896 + ], + [ + "PLAN", + -14.357298851013184 + ], + [ + "▁Ganesha", + -14.357327461242676 + ], + [ + "▁1832", + -14.357366561889648 + ], + [ + "▁backpacker", + -14.357403755187988 + ], + [ + "▁(185", + -14.357426643371582 + ], + [ + "▁Saro", + -14.35752296447754 + ], + [ + "▁motherboards", + -14.357571601867676 + ], + [ + "TAM", + -14.35757541656494 + ], + [ + "▁ducted", + -14.357995986938477 + ], + [ + "▁Ayr", + -14.3580322265625 + ], + [ + "▁slams", + -14.35803508758545 + ], + [ + "chten", + -14.358074188232422 + ], + [ + "▁Jang", + -14.358227729797363 + ], + [ + "▁Thiel", + -14.35830307006836 + ], + [ + "TAT", + -14.358322143554688 + ], + [ + "rato", + -14.35835075378418 + ], + [ + "▁Philo", + -14.358379364013672 + ], + [ + "ROL", + -14.358430862426758 + ], + [ + "▁distill", + -14.358431816101074 + ], + [ + "▁Dime", + -14.35843563079834 + ], + [ + "FIA", + -14.358469009399414 + ], + [ + "Blast", + -14.35861110687256 + ], + [ + "rific", + -14.358623504638672 + ], + [ + "037", + -14.35865592956543 + ], + [ + "▁ABR", + -14.358709335327148 + ], + [ + "▁Berne", + -14.358722686767578 + ], + [ + "qualification", + -14.358747482299805 + ], + [ + "0:0", + -14.358804702758787 + ], + [ + "Illustration", + -14.35885238647461 + ], + [ + "ASTER", + -14.358874320983888 + ], + [ + "celli", + -14.358976364135742 + ], + [ + "▁1780", + -14.359039306640623 + ], + [ + "Historic", + -14.359055519104004 + ], + [ + "synthetic", + -14.3590669631958 + ], + [ + "busting", + -14.359130859375 + ], + [ + "Relative", + -14.359149932861328 + ], + [ + "▁Castor", + -14.359166145324709 + ], + [ + "Incredible", + -14.35916805267334 + ], + [ + "erri", + -14.359189987182615 + ], + [ + "failure", + -14.359197616577148 + ], + [ + "cruci", + -14.359210014343262 + ], + [ + "▁Ouch", + -14.359356880187988 + ], + [ + "892", + -14.359381675720217 + ], + [ + "Blank", + -14.359414100646973 + ], + [ + "▁Tinto", + -14.359416961669922 + ], + [ + "▁PPE", + -14.359456062316896 + ], + [ + "translated", + -14.359471321105955 + ], + [ + "▁#3:", + -14.35947322845459 + ], + [ + "accuracy", + -14.35958480834961 + ], + [ + "tapping", + -14.359588623046877 + ], + [ + "Erase", + -14.359625816345217 + ], + [ + "Solo", + -14.35963535308838 + ], + [ + "▁Boating", + -14.359697341918944 + ], + [ + "▁skirmish", + -14.359721183776855 + ], + [ + "INCLUDE", + -14.359783172607422 + ], + [ + "▁Zhen", + -14.359795570373535 + ], + [ + "▁DETAILS", + -14.35986328125 + ], + [ + "▁Ghaziabad", + -14.35986328125 + ], + [ + "▁Pemberton", + -14.35986328125 + ], + [ + "▁Prentice", + -14.35986328125 + ], + [ + "▁Udaipur", + -14.35986328125 + ], + [ + "▁adjuvant", + -14.35986328125 + ], + [ + "▁aggressor", + -14.35986328125 + ], + [ + "▁balustrade", + -14.35986328125 + ], + [ + "▁electrified", + -14.35986328125 + ], + [ + "▁exaggerating", + -14.35986328125 + ], + [ + "▁mesmerising", + -14.35986328125 + ], + [ + "▁parentheses", + -14.35986328125 + ], + [ + "▁sensibly", + -14.35986328125 + ], + [ + "▁symbiotic", + -14.35986328125 + ], + [ + "▁Benoit", + -14.359864234924316 + ], + [ + "▁Chilliwack", + -14.359864234924316 + ], + [ + "▁Hennessy", + -14.359864234924316 + ], + [ + "▁cathartic", + -14.359864234924316 + ], + [ + "▁unfaithful", + -14.359864234924316 + ], + [ + "▁Atherton", + -14.359865188598633 + ], + [ + "▁brioche", + -14.35986614227295 + ], + [ + "▁Assange", + -14.359868049621582 + ], + [ + "▁extrapolate", + -14.359868049621582 + ], + [ + "▁Rouhani", + -14.359869003295898 + ], + [ + "▁astringent", + -14.359869956970217 + ], + [ + "▁Mascara", + -14.359870910644531 + ], + [ + "▁Manatee", + -14.359871864318848 + ], + [ + "▁mumbai", + -14.35988426208496 + ], + [ + "▁landslides", + -14.35989761352539 + ], + [ + "▁NTFS", + -14.359905242919922 + ], + [ + "▁Focal", + -14.359912872314451 + ], + [ + "▁(37", + -14.35993480682373 + ], + [ + "▁Gasket", + -14.359936714172363 + ], + [ + "▁vitae", + -14.359936714172363 + ], + [ + "▁Adolph", + -14.35993766784668 + ], + [ + "▁Dowd", + -14.359963417053224 + ], + [ + "▁Procter", + -14.359990119934082 + ], + [ + "▁Ableton", + -14.35999870300293 + ], + [ + "▁pharmacological", + -14.360004425048828 + ], + [ + "▁tannin", + -14.360004425048828 + ], + [ + "▁gelding", + -14.360025405883787 + ], + [ + "▁panicking", + -14.360025405883787 + ], + [ + "▁Juicy", + -14.360071182250977 + ], + [ + "▁Obstetrics", + -14.360074043273926 + ], + [ + "Lachlan", + -14.360101699829102 + ], + [ + "▁MBS", + -14.360105514526367 + ], + [ + "▁Lari", + -14.360108375549316 + ], + [ + "listening", + -14.360124588012695 + ], + [ + "▁biofuel", + -14.360151290893556 + ], + [ + "▁GOING", + -14.36016082763672 + ], + [ + "▁Loveseat", + -14.36016082763672 + ], + [ + "▁irrigated", + -14.36021327972412 + ], + [ + "▁wickedness", + -14.360215187072754 + ], + [ + "▁Freshwater", + -14.36021614074707 + ], + [ + "▁truckload", + -14.360225677490234 + ], + [ + "▁Harmonic", + -14.360265731811523 + ], + [ + "adopt", + -14.360311508178713 + ], + [ + "▁Tuk", + -14.360369682312012 + ], + [ + "▁distilling", + -14.360398292541504 + ], + [ + "crown", + -14.360405921936035 + ], + [ + "OPTION", + -14.360414505004885 + ], + [ + "Crush", + -14.360417366027832 + ], + [ + "▁SONY", + -14.36042308807373 + ], + [ + "▁Muddy", + -14.360445022583008 + ], + [ + "▁bunting", + -14.36045742034912 + ], + [ + "▁introverted", + -14.360468864440918 + ], + [ + "▁PHYS", + -14.360496520996094 + ], + [ + "▁Mankind", + -14.360504150390623 + ], + [ + "fancy", + -14.360507011413574 + ], + [ + "▁chronologically", + -14.360589981079102 + ], + [ + "▁Idris", + -14.360591888427734 + ], + [ + "IFICATION", + -14.360648155212402 + ], + [ + "▁reconditioned", + -14.360678672790527 + ], + [ + "▁strapless", + -14.360688209533691 + ], + [ + "▁grandstand", + -14.360690116882324 + ], + [ + "EIA", + -14.360718727111816 + ], + [ + "▁starfish", + -14.360735893249512 + ], + [ + "waisted", + -14.36074447631836 + ], + [ + "ocyte", + -14.360767364501951 + ], + [ + "▁Tid", + -14.360788345336914 + ], + [ + "▁Sunlight", + -14.360803604125977 + ], + [ + "▁Grooming", + -14.360944747924805 + ], + [ + "▁Countryside", + -14.360965728759766 + ], + [ + "▁Cie", + -14.360991477966309 + ], + [ + "▁watertight", + -14.360995292663574 + ], + [ + "▁Supervisory", + -14.36099910736084 + ], + [ + "minder", + -14.361035346984863 + ], + [ + "▁jaded", + -14.361055374145508 + ], + [ + "▁Owning", + -14.361078262329102 + ], + [ + "▁footfall", + -14.361113548278809 + ], + [ + "▁acura", + -14.36111545562744 + ], + [ + "▁Vee", + -14.361141204833984 + ], + [ + "▁homolog", + -14.36118221282959 + ], + [ + "▁Germantown", + -14.361251831054688 + ], + [ + "ofsky", + -14.361287117004396 + ], + [ + "▁realist", + -14.361289978027344 + ], + [ + "▁armament", + -14.361331939697266 + ], + [ + "▁Nourish", + -14.361334800720217 + ], + [ + "▁Drago", + -14.361342430114746 + ], + [ + "▁ITU", + -14.3613862991333 + ], + [ + "▁dissenting", + -14.361468315124512 + ], + [ + "Strat", + -14.361470222473145 + ], + [ + "▁flagstone", + -14.361482620239258 + ], + [ + "▁Edi", + -14.361513137817385 + ], + [ + "▁Cocker", + -14.361525535583496 + ], + [ + "▁coward", + -14.36157512664795 + ], + [ + "▁FZ", + -14.361719131469728 + ], + [ + "Pope", + -14.361734390258787 + ], + [ + "▁scone", + -14.361759185791016 + ], + [ + "▁Ranked", + -14.361767768859863 + ], + [ + "▁cine", + -14.36186695098877 + ], + [ + "893", + -14.36186981201172 + ], + [ + "▁Truss", + -14.361879348754885 + ], + [ + "▁frigidaire", + -14.361979484558104 + ], + [ + "▁Mocha", + -14.361980438232422 + ], + [ + "▁inherits", + -14.36202335357666 + ], + [ + "skate", + -14.362030029296877 + ], + [ + "▁RRP", + -14.362141609191896 + ], + [ + "▁Urs", + -14.362156867980955 + ], + [ + "▁Transcription", + -14.362173080444336 + ], + [ + "vii", + -14.362201690673828 + ], + [ + "▁Prana", + -14.362225532531738 + ], + [ + "▁Yeh", + -14.362306594848633 + ], + [ + "▁expressway", + -14.362311363220217 + ], + [ + "▁Northfield", + -14.362323760986328 + ], + [ + "▁Folders", + -14.36244773864746 + ], + [ + "▁nerf", + -14.362459182739258 + ], + [ + "Frankly", + -14.36252498626709 + ], + [ + "▁Paco", + -14.362615585327148 + ], + [ + "rj", + -14.362659454345703 + ], + [ + "▁Mediator", + -14.362762451171877 + ], + [ + "TREE", + -14.362911224365234 + ], + [ + "Nokia", + -14.363059997558594 + ], + [ + "▁Dru", + -14.36308765411377 + ], + [ + "▁Dread", + -14.3630952835083 + ], + [ + "▁welt", + -14.363096237182615 + ], + [ + "▁Professions", + -14.363110542297363 + ], + [ + "▁hahaha", + -14.363142013549805 + ], + [ + "effi", + -14.363153457641602 + ], + [ + "▁Adjunct", + -14.363170623779297 + ], + [ + "Tac", + -14.363259315490724 + ], + [ + "examine", + -14.36333465576172 + ], + [ + "hanga", + -14.363337516784668 + ], + [ + "▁Oats", + -14.363438606262209 + ], + [ + "rho", + -14.363451957702637 + ], + [ + "▁RAL", + -14.3634672164917 + ], + [ + "STI", + -14.363480567932127 + ], + [ + "▁Kiln", + -14.363480567932127 + ], + [ + "▁stylishly", + -14.36352252960205 + ], + [ + "QD", + -14.363533020019531 + ], + [ + "antu", + -14.363555908203123 + ], + [ + "▁Derivatives", + -14.363641738891602 + ], + [ + "▁shameless", + -14.363656044006348 + ], + [ + "▁Pash", + -14.363672256469728 + ], + [ + "Lau", + -14.363792419433594 + ], + [ + "▁stubs", + -14.363866806030272 + ], + [ + "▁Sandi", + -14.36391830444336 + ], + [ + "▁blouses", + -14.363920211791992 + ], + [ + "ESP", + -14.36392593383789 + ], + [ + "▁Bild", + -14.363951683044434 + ], + [ + "▁1838", + -14.364035606384276 + ], + [ + "▁Chemist", + -14.364093780517578 + ], + [ + "▁engrave", + -14.364099502563477 + ], + [ + "▁Roi", + -14.364103317260742 + ], + [ + "▁quarrying", + -14.364175796508787 + ], + [ + "▁HATE", + -14.364192008972168 + ], + [ + "Fixer", + -14.364206314086914 + ], + [ + "▁BPO", + -14.364217758178713 + ], + [ + "mannered", + -14.36423397064209 + ], + [ + "▁dressy", + -14.36423397064209 + ], + [ + "CLU", + -14.36424732208252 + ], + [ + "▁Modul", + -14.364310264587402 + ], + [ + "▁Contributors", + -14.364314079284668 + ], + [ + "Frog", + -14.364334106445312 + ], + [ + "CORN", + -14.36435604095459 + ], + [ + "QUAL", + -14.364383697509766 + ], + [ + "=3", + -14.364469528198242 + ], + [ + "▁vibrational", + -14.364569664001465 + ], + [ + "Aren", + -14.364580154418944 + ], + [ + "▁UTM", + -14.36461067199707 + ], + [ + "▁Chino", + -14.364635467529297 + ], + [ + "Utilize", + -14.36463737487793 + ], + [ + "prepare", + -14.364662170410156 + ], + [ + "043", + -14.36477279663086 + ], + [ + "fronted", + -14.364808082580566 + ], + [ + "▁forego", + -14.364876747131348 + ], + [ + "▁602", + -14.364890098571776 + ], + [ + "Chic", + -14.365008354187012 + ], + [ + "Demo", + -14.365185737609863 + ], + [ + "categories", + -14.365199089050291 + ], + [ + "Container", + -14.36521816253662 + ], + [ + "flood", + -14.365235328674316 + ], + [ + "Greece", + -14.365236282348633 + ], + [ + "regulatory", + -14.36523723602295 + ], + [ + "Bulk", + -14.365239143371582 + ], + [ + "(1)(", + -14.365241050720217 + ], + [ + "Debbie", + -14.365251541137695 + ], + [ + "drol", + -14.36527156829834 + ], + [ + "enau", + -14.36527156829834 + ], + [ + "defence", + -14.365274429321287 + ], + [ + "reflect", + -14.365302085876465 + ], + [ + "carved", + -14.36530590057373 + ], + [ + "Mitchell", + -14.365315437316896 + ], + [ + "ensi", + -14.365325927734377 + ], + [ + "Rescue", + -14.365354537963867 + ], + [ + "▁acquaint", + -14.365368843078612 + ], + [ + "▁Sangam", + -14.365373611450195 + ], + [ + "▁Goldsmiths", + -14.365398406982422 + ], + [ + "BODY", + -14.365464210510254 + ], + [ + "tober", + -14.365480422973633 + ], + [ + "Spiritual", + -14.365485191345217 + ], + [ + "▁Moog", + -14.365581512451172 + ], + [ + "▁Thie", + -14.365614891052246 + ], + [ + "▁Sonora", + -14.365649223327637 + ], + [ + "▁paring", + -14.365694046020508 + ], + [ + "▁EHS", + -14.365730285644531 + ], + [ + "broadcast", + -14.365734100341797 + ], + [ + "▁cookout", + -14.365739822387695 + ], + [ + "▁Presto", + -14.365766525268556 + ], + [ + "▁Alchemist", + -14.3657808303833 + ], + [ + "▁Corcoran", + -14.3657808303833 + ], + [ + "▁GoFundMe", + -14.3657808303833 + ], + [ + "▁McAllister", + -14.3657808303833 + ], + [ + "▁Montserrat", + -14.3657808303833 + ], + [ + "▁Vijayawada", + -14.3657808303833 + ], + [ + "▁aggravation", + -14.3657808303833 + ], + [ + "▁episodic", + -14.3657808303833 + ], + [ + "▁Vacancies", + -14.365781784057615 + ], + [ + "▁googling", + -14.365781784057615 + ], + [ + "▁chronology", + -14.365782737731934 + ], + [ + "▁Alpaca", + -14.36578369140625 + ], + [ + "▁Bilbao", + -14.36578369140625 + ], + [ + "▁DISCOUNT", + -14.36578369140625 + ], + [ + "▁glutamate", + -14.365784645080566 + ], + [ + "▁Nakamura", + -14.365785598754885 + ], + [ + "▁Kaitlyn", + -14.3657865524292 + ], + [ + "▁Vacancy", + -14.365787506103516 + ], + [ + "▁Nanaimo", + -14.365790367126465 + ], + [ + "▁detractors", + -14.36579132080078 + ], + [ + "▁Browsing", + -14.365792274475098 + ], + [ + "Consistent", + -14.365793228149414 + ], + [ + "▁inductee", + -14.365793228149414 + ], + [ + "▁Delgado", + -14.36579418182373 + ], + [ + "▁Dulles", + -14.365801811218262 + ], + [ + "▁Graveyard", + -14.365812301635742 + ], + [ + "▁reflexology", + -14.365812301635742 + ], + [ + "▁hairstylist", + -14.36581325531006 + ], + [ + "▁colitis", + -14.365825653076172 + ], + [ + "▁Vettel", + -14.36582851409912 + ], + [ + "▁WHOIS", + -14.365829467773438 + ], + [ + "▁phoning", + -14.365836143493652 + ], + [ + "▁multivariate", + -14.3658447265625 + ], + [ + "▁Bradbury", + -14.36585521697998 + ], + [ + "landscape", + -14.365886688232422 + ], + [ + "▁Keaton", + -14.365904808044434 + ], + [ + "▁Elmwood", + -14.36593532562256 + ], + [ + "▁bothersome", + -14.365954399108888 + ], + [ + "▁recomend", + -14.365957260131836 + ], + [ + "▁gigabytes", + -14.365968704223633 + ], + [ + "▁Sleek", + -14.365984916687012 + ], + [ + "▁Asp", + -14.365985870361328 + ], + [ + "▁Northrop", + -14.366000175476074 + ], + [ + "▁Alek", + -14.36600399017334 + ], + [ + "vacation", + -14.366033554077148 + ], + [ + "▁Loire", + -14.366046905517578 + ], + [ + "2:40", + -14.366053581237791 + ], + [ + "▁Daytime", + -14.366059303283691 + ], + [ + "▁terminus", + -14.36606216430664 + ], + [ + "Neck", + -14.366071701049805 + ], + [ + "▁handicraft", + -14.366082191467283 + ], + [ + "▁Chiropractors", + -14.36608600616455 + ], + [ + "▁Incorporating", + -14.366124153137209 + ], + [ + "▁emirates", + -14.366143226623535 + ], + [ + "▁Arturo", + -14.366202354431152 + ], + [ + "▁compresses", + -14.36623191833496 + ], + [ + "▁lambda", + -14.366240501403809 + ], + [ + "Perry", + -14.366266250610352 + ], + [ + "▁alligators", + -14.366271018981934 + ], + [ + "▁Paladin", + -14.366296768188477 + ], + [ + "▁rowdy", + -14.366296768188477 + ], + [ + "cuit", + -14.366299629211426 + ], + [ + "▁Flori", + -14.366324424743652 + ], + [ + "▁Mura", + -14.366342544555664 + ], + [ + "RMA", + -14.366361618041992 + ], + [ + "▁Journalist", + -14.366371154785156 + ], + [ + "▁*********", + -14.366403579711914 + ], + [ + "▁Koko", + -14.366422653198242 + ], + [ + "▁caulking", + -14.366436958312988 + ], + [ + "▁Purchased", + -14.366439819335938 + ], + [ + "▁Disputes", + -14.36647891998291 + ], + [ + "▁reimagined", + -14.366497993469238 + ], + [ + "Clinic", + -14.366509437561035 + ], + [ + "wanted", + -14.366539001464844 + ], + [ + "▁Rosé", + -14.36659049987793 + ], + [ + "▁reagent", + -14.36664581298828 + ], + [ + "▁Topps", + -14.366658210754396 + ], + [ + "00:2", + -14.366663932800291 + ], + [ + "osaur", + -14.366677284240724 + ], + [ + "▁Racial", + -14.366677284240724 + ], + [ + "▁FILM", + -14.366683959960938 + ], + [ + "▁Coron", + -14.366698265075684 + ], + [ + "▁ridiculed", + -14.366754531860352 + ], + [ + "▁pronouncement", + -14.366798400878906 + ], + [ + "Bible", + -14.366827011108398 + ], + [ + "▁plowed", + -14.366847038269045 + ], + [ + "Chamber", + -14.36687183380127 + ], + [ + "▁fabled", + -14.366886138916016 + ], + [ + "▁Aram", + -14.366907119750977 + ], + [ + "▁Bennet", + -14.366915702819824 + ], + [ + "▁Karla", + -14.366945266723633 + ], + [ + "▁sorrows", + -14.366973876953123 + ], + [ + "CBT", + -14.3670015335083 + ], + [ + "▁Yel", + -14.367100715637209 + ], + [ + "▁phosphor", + -14.36710262298584 + ], + [ + "Indoor", + -14.367124557495115 + ], + [ + "▁Tattoos", + -14.367131233215332 + ], + [ + "▁Cocktails", + -14.367241859436035 + ], + [ + "Everyday", + -14.367291450500488 + ], + [ + "▁Finalist", + -14.367313385009766 + ], + [ + "▁Niro", + -14.367342948913574 + ], + [ + "▁pollination", + -14.367348670959473 + ], + [ + "▁dashes", + -14.367501258850098 + ], + [ + "▁delusions", + -14.367591857910156 + ], + [ + "▁infraction", + -14.367594718933104 + ], + [ + "▁looting", + -14.367610931396484 + ], + [ + "▁BEL", + -14.367634773254396 + ], + [ + "▁cairn", + -14.367688179016112 + ], + [ + "ually", + -14.367691040039062 + ], + [ + "9.5%", + -14.367717742919922 + ], + [ + "hopping", + -14.367754936218262 + ], + [ + "▁storming", + -14.367755889892578 + ], + [ + "▁Iconic", + -14.36778736114502 + ], + [ + "▁Mopar", + -14.367788314819336 + ], + [ + "▁kiddo", + -14.367880821228027 + ], + [ + "▁Clans", + -14.367905616760254 + ], + [ + "CIM", + -14.367965698242188 + ], + [ + "▁12.3", + -14.367990493774414 + ], + [ + "▁posses", + -14.367996215820312 + ], + [ + "▁shipper", + -14.368005752563477 + ], + [ + "▁transom", + -14.368024826049805 + ], + [ + "▁Wynne", + -14.368037223815918 + ], + [ + "▁bootable", + -14.368045806884766 + ], + [ + "Antoni", + -14.36809253692627 + ], + [ + "▁Watering", + -14.368115425109863 + ], + [ + "pino", + -14.36815357208252 + ], + [ + "stating", + -14.368168830871582 + ], + [ + "ondi", + -14.36822509765625 + ], + [ + "▁350,000", + -14.368239402770996 + ], + [ + "solute", + -14.368281364440918 + ], + [ + "▁Ricci", + -14.368300437927246 + ], + [ + "CPR", + -14.368322372436523 + ], + [ + "▁Embry", + -14.368330001831056 + ], + [ + "ilda", + -14.368419647216797 + ], + [ + "qq", + -14.368420600891112 + ], + [ + "▁514", + -14.368447303771973 + ], + [ + "▁Pedi", + -14.36846923828125 + ], + [ + "▁Blended", + -14.368525505065918 + ], + [ + "▁roadster", + -14.368544578552246 + ], + [ + "▁yank", + -14.368550300598145 + ], + [ + "513", + -14.368618965148926 + ], + [ + "OPEN", + -14.368656158447266 + ], + [ + "▁glob", + -14.368677139282228 + ], + [ + "▁Linking", + -14.36871337890625 + ], + [ + "▁Cotswolds", + -14.368721961975098 + ], + [ + "▁SRS", + -14.368721961975098 + ], + [ + "▁flavouring", + -14.368728637695312 + ], + [ + "▁.22", + -14.368839263916016 + ], + [ + "royd", + -14.36884307861328 + ], + [ + "%2", + -14.368851661682127 + ], + [ + "▁IGT", + -14.36885929107666 + ], + [ + "▁meshes", + -14.368971824645996 + ], + [ + "▁Hoot", + -14.369095802307127 + ], + [ + "Pirate", + -14.369101524353027 + ], + [ + "0.30", + -14.36915683746338 + ], + [ + "Lei", + -14.369168281555176 + ], + [ + "▁Britten", + -14.369250297546388 + ], + [ + "▁Safa", + -14.369321823120115 + ], + [ + "▁Kost", + -14.369352340698242 + ], + [ + "rme", + -14.369370460510254 + ], + [ + "▁maids", + -14.369450569152832 + ], + [ + "▁Wives", + -14.369463920593262 + ], + [ + "Shane", + -14.369539260864258 + ], + [ + "▁craved", + -14.36954402923584 + ], + [ + "▁Eastside", + -14.369574546813965 + ], + [ + "▁Mello", + -14.369586944580078 + ], + [ + "uum", + -14.36961841583252 + ], + [ + "▁Advisers", + -14.369680404663086 + ], + [ + "COL", + -14.36970329284668 + ], + [ + "▁Introduced", + -14.36972999572754 + ], + [ + "▁bookmaker", + -14.3697509765625 + ], + [ + "erary", + -14.369751930236816 + ], + [ + "▁deadlift", + -14.369895935058594 + ], + [ + "923", + -14.369915008544922 + ], + [ + "▁grantees", + -14.369969367980955 + ], + [ + "adora", + -14.369997024536133 + ], + [ + "backup", + -14.370033264160156 + ], + [ + "▁staked", + -14.370083808898926 + ], + [ + "▁FAI", + -14.370104789733888 + ], + [ + "PDP", + -14.370142936706545 + ], + [ + "▁Khal", + -14.370162010192873 + ], + [ + "▁Lotte", + -14.370206832885742 + ], + [ + "STP", + -14.370211601257324 + ], + [ + "▁Slag", + -14.370325088500977 + ], + [ + "▁REITs", + -14.370447158813477 + ], + [ + "▁strolled", + -14.370490074157717 + ], + [ + "rachi", + -14.370513916015623 + ], + [ + "▁Edelman", + -14.370580673217772 + ], + [ + "ahara", + -14.37058162689209 + ], + [ + "▁murphy", + -14.370590209960938 + ], + [ + "mick", + -14.370596885681152 + ], + [ + "akar", + -14.370635032653809 + ], + [ + "▁flashlights", + -14.370655059814451 + ], + [ + "dairy", + -14.370734214782717 + ], + [ + "▁gush", + -14.37087345123291 + ], + [ + "▁Physio", + -14.370949745178224 + ], + [ + "▁webshop", + -14.371024131774902 + ], + [ + "plin", + -14.371045112609863 + ], + [ + "▁Stal", + -14.37105941772461 + ], + [ + "Indonesia", + -14.371063232421877 + ], + [ + "▁fruitless", + -14.37108039855957 + ], + [ + "haba", + -14.371123313903809 + ], + [ + "▁repress", + -14.37112522125244 + ], + [ + "Historical", + -14.37119197845459 + ], + [ + "▁pharmaco", + -14.371217727661133 + ], + [ + "Transaction", + -14.371253967285156 + ], + [ + "vegetarian", + -14.371291160583496 + ], + [ + "▁YAY", + -14.371298789978027 + ], + [ + "Beijing", + -14.371332168579102 + ], + [ + "Swedish", + -14.371332168579102 + ], + [ + "Oklahoma", + -14.371334075927734 + ], + [ + "supplied", + -14.37133502960205 + ], + [ + "YER", + -14.371349334716797 + ], + [ + "Conserv", + -14.371397018432615 + ], + [ + "Eddie", + -14.3714017868042 + ], + [ + "▁CPL", + -14.371405601501465 + ], + [ + "oui", + -14.371463775634766 + ], + [ + "ochi", + -14.371495246887209 + ], + [ + "▁valiant", + -14.371496200561523 + ], + [ + "▁chiles", + -14.371501922607422 + ], + [ + "Zhang", + -14.371530532836914 + ], + [ + "METHODS", + -14.371545791625977 + ], + [ + "strange", + -14.371575355529783 + ], + [ + "▁Bax", + -14.37158203125 + ], + [ + "▁balk", + -14.37158489227295 + ], + [ + "▁USING", + -14.37159824371338 + ], + [ + "YN", + -14.37163257598877 + ], + [ + "▁Unbelievable", + -14.3716459274292 + ], + [ + "1′′", + -14.371649742126465 + ], + [ + "Berry", + -14.371652603149414 + ], + [ + "▁Melodi", + -14.37168025970459 + ], + [ + "GCC", + -14.37169075012207 + ], + [ + "504", + -14.371699333190918 + ], + [ + "▁Amplifier", + -14.371732711791992 + ], + [ + "▁Explosion", + -14.371732711791992 + ], + [ + "▁Lymphoma", + -14.371732711791992 + ], + [ + "▁Montpellier", + -14.371732711791992 + ], + [ + "▁Pikachu", + -14.371732711791992 + ], + [ + "▁Uttarakhand", + -14.371732711791992 + ], + [ + "▁distraught", + -14.371732711791992 + ], + [ + "▁paparazzi", + -14.371732711791992 + ], + [ + "▁tetracycline", + -14.371732711791992 + ], + [ + "▁uninhabited", + -14.371732711791992 + ], + [ + "Dandelion", + -14.371733665466309 + ], + [ + "▁Yokohama", + -14.371733665466309 + ], + [ + "▁cranial", + -14.371733665466309 + ], + [ + "▁fancied", + -14.371733665466309 + ], + [ + "▁homogenous", + -14.371733665466309 + ], + [ + "▁Creighton", + -14.371734619140623 + ], + [ + "▁luminaire", + -14.371734619140623 + ], + [ + "▁ENTIRE", + -14.371736526489258 + ], + [ + "▁scathing", + -14.371736526489258 + ], + [ + "▁ramekin", + -14.371737480163574 + ], + [ + "▁Glucose", + -14.371744155883787 + ], + [ + "▁Spitfire", + -14.371747970581056 + ], + [ + "▁Superhero", + -14.371748924255373 + ], + [ + "▁Musée", + -14.37175178527832 + ], + [ + "▁interstellar", + -14.37175178527832 + ], + [ + "▁trifle", + -14.371759414672852 + ], + [ + "Tennis", + -14.371761322021484 + ], + [ + "▁Gumtree", + -14.371761322021484 + ], + [ + "▁Ursula", + -14.371761322021484 + ], + [ + "▁Centaur", + -14.371769905090332 + ], + [ + "▁bluish", + -14.371773719787598 + ], + [ + "▁Wrapped", + -14.37177848815918 + ], + [ + "▁meniscus", + -14.37178897857666 + ], + [ + "▁renegotiate", + -14.371792793273926 + ], + [ + "▁photojournalist", + -14.37179946899414 + ], + [ + "▁cloakroom", + -14.371809005737305 + ], + [ + "▁tempura", + -14.37181568145752 + ], + [ + "▁arcane", + -14.371820449829102 + ], + [ + "▁encroachment", + -14.371825218200684 + ], + [ + "▁Prophets", + -14.37184715270996 + ], + [ + "▁Typing", + -14.371869087219238 + ], + [ + "▁Scully", + -14.371882438659668 + ], + [ + "▁trowel", + -14.371885299682615 + ], + [ + "diver", + -14.3718900680542 + ], + [ + "▁bullnose", + -14.371912002563477 + ], + [ + "▁5,500", + -14.371914863586426 + ], + [ + "▁#16", + -14.371923446655272 + ], + [ + "Delegate", + -14.371926307678224 + ], + [ + "▁SSO", + -14.371935844421388 + ], + [ + "▁Martinique", + -14.371943473815918 + ], + [ + "▁disbanded", + -14.37196445465088 + ], + [ + "Descarga", + -14.371980667114258 + ], + [ + "▁uninformed", + -14.371991157531738 + ], + [ + "iver", + -14.371994018554688 + ], + [ + "NQ", + -14.37199592590332 + ], + [ + "raga", + -14.37200164794922 + ], + [ + "▁welds", + -14.372010231018066 + ], + [ + "▁Streak", + -14.372113227844238 + ], + [ + "▁quash", + -14.372114181518556 + ], + [ + "▁dyno", + -14.37214183807373 + ], + [ + "▁hurling", + -14.372217178344728 + ], + [ + "▁reactionary", + -14.372268676757812 + ], + [ + "▁iMessage", + -14.37234878540039 + ], + [ + "▁Hala", + -14.372394561767578 + ], + [ + "SSM", + -14.372397422790527 + ], + [ + "▁(184", + -14.372417449951172 + ], + [ + "▁windmill", + -14.37243366241455 + ], + [ + "▁frock", + -14.372493743896484 + ], + [ + "▁Pyro", + -14.37252712249756 + ], + [ + "▁Onsite", + -14.372530937194824 + ], + [ + "▁Nightstand", + -14.37254810333252 + ], + [ + "sborough", + -14.372562408447266 + ], + [ + "▁Ghe", + -14.372577667236328 + ], + [ + "▁LEGAL", + -14.372601509094238 + ], + [ + "▁Subtle", + -14.372618675231934 + ], + [ + "flush", + -14.372660636901855 + ], + [ + "LUX", + -14.372665405273438 + ], + [ + "1969", + -14.372688293457031 + ], + [ + "▁sunscreens", + -14.372762680053713 + ], + [ + "Republicans", + -14.372769355773926 + ], + [ + "▁wealthier", + -14.372773170471191 + ], + [ + "▁treehouse", + -14.37280559539795 + ], + [ + "▁unkind", + -14.37282371520996 + ], + [ + "▁FRIDAY", + -14.372838020324709 + ], + [ + "▁combi", + -14.372883796691896 + ], + [ + "▁ETL", + -14.37292766571045 + ], + [ + "▁rearranged", + -14.373044967651367 + ], + [ + "▁Tapes", + -14.37306785583496 + ], + [ + "sensing", + -14.373080253601074 + ], + [ + "▁Lindy", + -14.37309741973877 + ], + [ + "▁Recruit", + -14.373125076293944 + ], + [ + "▁Marques", + -14.373135566711426 + ], + [ + "▁gatekeeper", + -14.373162269592283 + ], + [ + "▁$56", + -14.373167037963867 + ], + [ + "▁Novell", + -14.373167991638184 + ], + [ + "▁fewest", + -14.373172760009766 + ], + [ + "076", + -14.373177528381348 + ], + [ + "izzy", + -14.373196601867676 + ], + [ + "▁Watertown", + -14.373209953308104 + ], + [ + "▁brickwork", + -14.373215675354004 + ], + [ + "▁Clio", + -14.373223304748535 + ], + [ + "dawn", + -14.373238563537598 + ], + [ + "Significantly", + -14.373246192932127 + ], + [ + "▁clockwork", + -14.373263359069824 + ], + [ + "efe", + -14.373291969299316 + ], + [ + "▁Recap", + -14.373311042785645 + ], + [ + "▁marshall", + -14.37337875366211 + ], + [ + "▁geniuses", + -14.373435974121094 + ], + [ + "▁backlink", + -14.37344455718994 + ], + [ + "▁awakens", + -14.373465538024902 + ], + [ + "▁settee", + -14.373506546020508 + ], + [ + "▁Cavalier", + -14.373520851135254 + ], + [ + "york", + -14.373538970947266 + ], + [ + "▁Lula", + -14.37354850769043 + ], + [ + "▁Smithfield", + -14.37358570098877 + ], + [ + "▁AFS", + -14.373607635498049 + ], + [ + "OVA", + -14.373697280883787 + ], + [ + "ssus", + -14.373867988586426 + ], + [ + "▁Tash", + -14.373876571655272 + ], + [ + "▁reclaiming", + -14.373888969421388 + ], + [ + "▁Holtz", + -14.373910903930664 + ], + [ + "5100", + -14.373939514160156 + ], + [ + "3+", + -14.373946189880373 + ], + [ + "68)", + -14.373958587646484 + ], + [ + "Wooden", + -14.373973846435549 + ], + [ + "▁jobsite", + -14.374009132385254 + ], + [ + "▁conceptually", + -14.374038696289062 + ], + [ + "▁Stash", + -14.37404441833496 + ], + [ + "▁pinky", + -14.37404727935791 + ], + [ + "▁Blooms", + -14.374073028564451 + ], + [ + "UNCH", + -14.374074935913086 + ], + [ + "Publish", + -14.374126434326172 + ], + [ + "▁Gentiles", + -14.374134063720703 + ], + [ + "▁newscast", + -14.374138832092283 + ], + [ + "▁laurels", + -14.374200820922852 + ], + [ + "SMEs", + -14.374226570129396 + ], + [ + "Pit", + -14.374281883239746 + ], + [ + "PATH", + -14.374296188354492 + ], + [ + "rith", + -14.374322891235352 + ], + [ + "▁Yal", + -14.374363899230955 + ], + [ + "PRINT", + -14.374531745910645 + ], + [ + "Shine", + -14.374564170837402 + ], + [ + "▁Mooney", + -14.374567031860352 + ], + [ + "▁Cracked", + -14.3745698928833 + ], + [ + "uwen", + -14.374571800231934 + ], + [ + "reduce", + -14.37458038330078 + ], + [ + "▁checkbook", + -14.374590873718262 + ], + [ + "▁Followers", + -14.37459945678711 + ], + [ + "▁synch", + -14.374621391296388 + ], + [ + "▁MDR", + -14.37465763092041 + ], + [ + "Porter", + -14.374710083007812 + ], + [ + "▁Blower", + -14.374720573425291 + ], + [ + "▁Emotions", + -14.374726295471191 + ], + [ + "nage", + -14.374757766723633 + ], + [ + "avour", + -14.374774932861328 + ], + [ + "▁Broome", + -14.374826431274414 + ], + [ + "(8):", + -14.37482738494873 + ], + [ + "tumor", + -14.37483024597168 + ], + [ + "Belle", + -14.374832153320312 + ], + [ + "FILL", + -14.374881744384766 + ], + [ + "▁biopharmaceutic", + -14.374897003173828 + ], + [ + "▁bender", + -14.374958992004396 + ], + [ + "▁Thiru", + -14.375028610229492 + ], + [ + "Herb", + -14.37509059906006 + ], + [ + "▁Pairing", + -14.37509536743164 + ], + [ + "▁Arca", + -14.37514877319336 + ], + [ + "judgmental", + -14.375184059143066 + ], + [ + "urious", + -14.375214576721191 + ], + [ + "▁Rhe", + -14.37524127960205 + ], + [ + "▁Kazu", + -14.375328063964844 + ], + [ + "▁Gaud", + -14.375341415405272 + ], + [ + "pris", + -14.37534236907959 + ], + [ + "▁triceps", + -14.37534523010254 + ], + [ + "▁skeptic", + -14.375370979309082 + ], + [ + "▁Babel", + -14.375420570373535 + ], + [ + "transit", + -14.375443458557127 + ], + [ + "▁socialise", + -14.375446319580078 + ], + [ + "▁2005;", + -14.375471115112305 + ], + [ + "▁overeating", + -14.375486373901367 + ], + [ + "▁worsens", + -14.375514030456545 + ], + [ + "▁Hann", + -14.375560760498049 + ], + [ + "▁gobble", + -14.375645637512209 + ], + [ + "Doo", + -14.375715255737305 + ], + [ + "▁shareware", + -14.375744819641112 + ], + [ + "3,500", + -14.375776290893556 + ], + [ + "PHC", + -14.375778198242188 + ], + [ + "▁JAM", + -14.3757963180542 + ], + [ + "▁ramped", + -14.3759183883667 + ], + [ + "▁Bling", + -14.37597942352295 + ], + [ + "rood", + -14.375988960266112 + ], + [ + "▁Upland", + -14.37602710723877 + ], + [ + "▁Elegance", + -14.376129150390623 + ], + [ + "▁Manganese", + -14.376143455505373 + ], + [ + "▁Nair", + -14.376154899597168 + ], + [ + "▁battleship", + -14.376181602478027 + ], + [ + "papa", + -14.37619972229004 + ], + [ + "Jill", + -14.376328468322754 + ], + [ + "adequate", + -14.376364707946776 + ], + [ + "vinyl", + -14.376391410827637 + ], + [ + "▁£17", + -14.376415252685549 + ], + [ + "▁WORD", + -14.376431465148926 + ], + [ + "Quad", + -14.376440048217772 + ], + [ + "intentioned", + -14.37645435333252 + ], + [ + "▁chopsticks", + -14.376476287841797 + ], + [ + "definitely", + -14.376507759094238 + ], + [ + "PIX", + -14.3765869140625 + ], + [ + "718", + -14.376721382141112 + ], + [ + "▁PIT", + -14.376769065856934 + ], + [ + "▁SGS", + -14.376829147338867 + ], + [ + "CHF", + -14.376900672912598 + ], + [ + "3:20", + -14.377009391784668 + ], + [ + "▁Ack", + -14.377015113830566 + ], + [ + "NAP", + -14.377155303955078 + ], + [ + "▁whitelist", + -14.37717056274414 + ], + [ + "Korea", + -14.377195358276367 + ], + [ + "▁Gamers", + -14.377280235290527 + ], + [ + "▁10:15", + -14.377294540405272 + ], + [ + "▁cantilever", + -14.37732219696045 + ], + [ + "▁apprehend", + -14.377323150634766 + ], + [ + "rested", + -14.37734031677246 + ], + [ + "▁Pek", + -14.37734031677246 + ], + [ + "▁Celebrations", + -14.377362251281738 + ], + [ + "▁midwest", + -14.377386093139648 + ], + [ + "captain", + -14.37743091583252 + ], + [ + "Hammer", + -14.377447128295898 + ], + [ + "Conservative", + -14.37745475769043 + ], + [ + "McDonald", + -14.37746238708496 + ], + [ + "Fifty", + -14.377463340759276 + ], + [ + "Manufacturing", + -14.377466201782228 + ], + [ + "Meditation", + -14.377469062805176 + ], + [ + "frozen", + -14.377480506896973 + ], + [ + "▁aloft", + -14.377551078796388 + ], + [ + "Olympic", + -14.377561569213867 + ], + [ + "fought", + -14.377571105957031 + ], + [ + "slightly", + -14.377593994140623 + ], + [ + "counsel", + -14.377605438232422 + ], + [ + "Adjustable", + -14.377606391906738 + ], + [ + "cero", + -14.37761116027832 + ], + [ + "Birthday", + -14.377663612365724 + ], + [ + "▁518", + -14.377707481384276 + ], + [ + "ATTENTION", + -14.377720832824709 + ], + [ + "▁Aperture", + -14.377720832824709 + ], + [ + "▁Feminine", + -14.377720832824709 + ], + [ + "▁Jocelyn", + -14.377720832824709 + ], + [ + "▁Resonance", + -14.377720832824709 + ], + [ + "▁Telluride", + -14.377720832824709 + ], + [ + "▁Thoroughbred", + -14.377720832824709 + ], + [ + "▁cultivator", + -14.377720832824709 + ], + [ + "▁inexplicably", + -14.377720832824709 + ], + [ + "▁lethargy", + -14.377720832824709 + ], + [ + "▁mitzvah", + -14.377720832824709 + ], + [ + "▁resumption", + -14.377720832824709 + ], + [ + "▁Candace", + -14.377721786499023 + ], + [ + "▁ANOTHER", + -14.377724647521973 + ], + [ + "▁Walsall", + -14.377724647521973 + ], + [ + "▁Kearney", + -14.377732276916504 + ], + [ + "▁Pagoda", + -14.37773609161377 + ], + [ + "▁Versus", + -14.37773609161377 + ], + [ + "▁Nestlé", + -14.377737045288086 + ], + [ + "▁confectionery", + -14.37773895263672 + ], + [ + "▁Chinook", + -14.377742767333984 + ], + [ + "▁Stansted", + -14.377742767333984 + ], + [ + "▁discernible", + -14.37774658203125 + ], + [ + "Tokyo", + -14.377747535705566 + ], + [ + "▁Nespresso", + -14.37775421142578 + ], + [ + "▁bovine", + -14.37775421142578 + ], + [ + "▁Machado", + -14.377755165100098 + ], + [ + "Blogging", + -14.377758026123049 + ], + [ + "▁botany", + -14.377767562866213 + ], + [ + "▁SolidWorks", + -14.37777614593506 + ], + [ + "▁caving", + -14.377777099609377 + ], + [ + "▁Miramar", + -14.377798080444336 + ], + [ + "▁Solano", + -14.377820014953612 + ], + [ + "▁unrecognized", + -14.377851486206056 + ], + [ + "▁rearranging", + -14.37790870666504 + ], + [ + "▁Neighbour", + -14.37791347503662 + ], + [ + "Toss", + -14.37791919708252 + ], + [ + "▁Philosophical", + -14.37792682647705 + ], + [ + "▁forefoot", + -14.377935409545898 + ], + [ + "NIGHT", + -14.37794303894043 + ], + [ + "▁Rudolf", + -14.37795066833496 + ], + [ + "▁ombre", + -14.377954483032228 + ], + [ + "Effects", + -14.37795639038086 + ], + [ + "▁Assorted", + -14.3779935836792 + ], + [ + "▁fireball", + -14.378009796142578 + ], + [ + "▁Deng", + -14.378024101257324 + ], + [ + "unu", + -14.378026962280272 + ], + [ + "1914", + -14.378042221069336 + ], + [ + "▁Simplify", + -14.378071784973145 + ], + [ + "▁Suffering", + -14.378073692321776 + ], + [ + "▁whiskies", + -14.378073692321776 + ], + [ + "▁milieu", + -14.378084182739258 + ], + [ + "▁Spindle", + -14.37811279296875 + ], + [ + "▁popsicle", + -14.378159523010254 + ], + [ + "▁hiccup", + -14.378161430358888 + ], + [ + "amount", + -14.378276824951172 + ], + [ + "VERSE", + -14.378284454345703 + ], + [ + "▁liberalism", + -14.378291130065918 + ], + [ + "▁reintroduced", + -14.378300666809082 + ], + [ + "▁signatory", + -14.378326416015623 + ], + [ + "▁Wildcat", + -14.378355979919434 + ], + [ + "Correction", + -14.378358840942385 + ], + [ + "▁Wiener", + -14.378376007080078 + ], + [ + "IFIED", + -14.37843418121338 + ], + [ + "DENT", + -14.37843894958496 + ], + [ + "▁bandits", + -14.37845230102539 + ], + [ + "PPI", + -14.37846851348877 + ], + [ + "vila", + -14.378509521484377 + ], + [ + "Sisi", + -14.378520965576172 + ], + [ + "measured", + -14.378536224365234 + ], + [ + "Armed", + -14.378560066223145 + ], + [ + "▁Mauna", + -14.37856388092041 + ], + [ + "▁Torino", + -14.378568649291992 + ], + [ + "▁Sully", + -14.378575325012209 + ], + [ + "▁redirection", + -14.378586769104004 + ], + [ + "▁repressed", + -14.378620147705078 + ], + [ + "▁spillage", + -14.378643035888672 + ], + [ + "▁Racism", + -14.378682136535645 + ], + [ + "▁seamstress", + -14.378697395324709 + ], + [ + "▁insulator", + -14.378702163696287 + ], + [ + "▁Andrei", + -14.378738403320312 + ], + [ + "▁stalked", + -14.378786087036133 + ], + [ + "owl", + -14.37881565093994 + ], + [ + "quier", + -14.378835678100586 + ], + [ + "▁ASOS", + -14.378835678100586 + ], + [ + "▁dung", + -14.378849029541016 + ], + [ + "▁downplay", + -14.37885570526123 + ], + [ + "▁reverberate", + -14.378868103027344 + ], + [ + "▁Minion", + -14.378887176513672 + ], + [ + "willing", + -14.37890911102295 + ], + [ + "▁Marisa", + -14.37890911102295 + ], + [ + "▁Xian", + -14.37904453277588 + ], + [ + "▁plumb", + -14.379056930541992 + ], + [ + "container", + -14.379064559936523 + ], + [ + "▁Mille", + -14.379084587097168 + ], + [ + "XM", + -14.379132270812988 + ], + [ + "▁blacklisted", + -14.379144668579102 + ], + [ + "▁foi", + -14.37915325164795 + ], + [ + "▁1833", + -14.379164695739746 + ], + [ + "▁(250", + -14.379191398620604 + ], + [ + "▁tarps", + -14.3792724609375 + ], + [ + "▁Concerns", + -14.379344940185549 + ], + [ + "rao", + -14.379345893859863 + ], + [ + "Protecting", + -14.379347801208496 + ], + [ + "▁subgroups", + -14.37938117980957 + ], + [ + "BCC", + -14.37940502166748 + ], + [ + "mould", + -14.37946891784668 + ], + [ + "slim", + -14.379549026489258 + ], + [ + "▁commando", + -14.379558563232422 + ], + [ + "▁compactor", + -14.379590034484863 + ], + [ + "▁FCS", + -14.379687309265137 + ], + [ + "▁7/8\"", + -14.379691123962402 + ], + [ + "mundo", + -14.37969207763672 + ], + [ + "Candy", + -14.379701614379885 + ], + [ + "▁Varun", + -14.379708290100098 + ], + [ + "FCA", + -14.379715919494627 + ], + [ + "Mn", + -14.379761695861816 + ], + [ + "propyl", + -14.379834175109863 + ], + [ + "▁Mew", + -14.37985610961914 + ], + [ + "▁DEMO", + -14.379892349243164 + ], + [ + "▁carpal", + -14.379932403564451 + ], + [ + "▁spatially", + -14.37997817993164 + ], + [ + "▁Sabi", + -14.38003158569336 + ], + [ + "Schu", + -14.380091667175291 + ], + [ + "owen", + -14.38014316558838 + ], + [ + "Cheer", + -14.380342483520508 + ], + [ + "▁conventionally", + -14.38034725189209 + ], + [ + "RJ", + -14.380349159240724 + ], + [ + "▁Celeb", + -14.380367279052734 + ], + [ + "▁OPS", + -14.380443572998049 + ], + [ + "rty", + -14.380450248718262 + ], + [ + "kota", + -14.380454063415527 + ], + [ + "▁Giu", + -14.38049030303955 + ], + [ + "alise", + -14.380560874938965 + ], + [ + "▁Wilk", + -14.380576133728027 + ], + [ + "cek", + -14.38058090209961 + ], + [ + "▁Yeti", + -14.38058090209961 + ], + [ + "▁outcrop", + -14.380590438842772 + ], + [ + "▁CNG", + -14.380634307861328 + ], + [ + "EMI", + -14.380698204040527 + ], + [ + "▁DISC", + -14.380879402160645 + ], + [ + "▁OBC", + -14.380882263183594 + ], + [ + "Payments", + -14.380904197692873 + ], + [ + "▁fringed", + -14.380910873413086 + ], + [ + "chner", + -14.38103199005127 + ], + [ + "▁(95%", + -14.381046295166016 + ], + [ + "raise", + -14.38107967376709 + ], + [ + "rdi", + -14.381322860717772 + ], + [ + "*****", + -14.381364822387695 + ], + [ + "Knead", + -14.381385803222656 + ], + [ + "▁Rohit", + -14.381396293640137 + ], + [ + "▁TPU", + -14.381410598754885 + ], + [ + "▁ruck", + -14.381418228149414 + ], + [ + "EAST", + -14.381452560424805 + ], + [ + "▁(150", + -14.381467819213867 + ], + [ + "coff", + -14.381507873535156 + ], + [ + "addling", + -14.381668090820312 + ], + [ + "dna", + -14.38185691833496 + ], + [ + "Levi", + -14.381880760192873 + ], + [ + "▁Scrape", + -14.381912231445312 + ], + [ + "graphics", + -14.381925582885742 + ], + [ + "▁wayward", + -14.381976127624512 + ], + [ + "▁Progressives", + -14.38200855255127 + ], + [ + "▁papal", + -14.38217830657959 + ], + [ + "▁Gila", + -14.382196426391602 + ], + [ + "▁crooks", + -14.382254600524902 + ], + [ + "▁CBP", + -14.382262229919434 + ], + [ + "▁Origami", + -14.382304191589355 + ], + [ + "▁springing", + -14.382320404052734 + ], + [ + "oscope", + -14.382352828979492 + ], + [ + "awat", + -14.382405281066896 + ], + [ + "▁principled", + -14.382412910461426 + ], + [ + "▁remittances", + -14.382463455200195 + ], + [ + "stitute", + -14.382527351379396 + ], + [ + "transaction", + -14.382568359375 + ], + [ + "ukka", + -14.382577896118164 + ], + [ + "emble", + -14.382640838623049 + ], + [ + "▁Ghaz", + -14.38264274597168 + ], + [ + "▁Consistency", + -14.38274383544922 + ], + [ + "▁scooping", + -14.38277530670166 + ], + [ + "▁Dick", + -14.382932662963867 + ], + [ + "▁Recharge", + -14.382960319519045 + ], + [ + "▁IPCC", + -14.38304615020752 + ], + [ + "▁Desserts", + -14.383073806762695 + ], + [ + "▁Rua", + -14.383111000061035 + ], + [ + "▁Newmarket", + -14.383146286010742 + ], + [ + "▁Kazan", + -14.383196830749512 + ], + [ + "zumi", + -14.38323974609375 + ], + [ + "(9):", + -14.383317947387695 + ], + [ + "urdy", + -14.383330345153809 + ], + [ + "▁Fido", + -14.38337230682373 + ], + [ + "▁RHS", + -14.383386611938477 + ], + [ + "▁heavyweights", + -14.383424758911133 + ], + [ + "erge", + -14.38348388671875 + ], + [ + "▁complicit", + -14.383490562438965 + ], + [ + "Surprise", + -14.383508682250977 + ], + [ + "▁Banco", + -14.38353157043457 + ], + [ + "Carrie", + -14.38357925415039 + ], + [ + "Negative", + -14.383621215820312 + ], + [ + "altitude", + -14.383628845214844 + ], + [ + "Olivia", + -14.38363265991211 + ], + [ + "▁Boring", + -14.38364028930664 + ], + [ + "▁fic", + -14.383641242980955 + ], + [ + "tailored", + -14.38364315032959 + ], + [ + "▁Incredibly", + -14.383644104003906 + ], + [ + "Barcelona", + -14.383648872375488 + ], + [ + "Supplier", + -14.383660316467283 + ], + [ + "Granite", + -14.38366985321045 + ], + [ + "Racing", + -14.383670806884766 + ], + [ + "Efficient", + -14.383674621582031 + ], + [ + "jong", + -14.38369846343994 + ], + [ + "fraud", + -14.383713722229004 + ], + [ + "Trends", + -14.383726119995115 + ], + [ + "▁Statutes", + -14.383737564086914 + ], + [ + "▁BECAUSE", + -14.383745193481444 + ], + [ + "▁Chhattisgarh", + -14.383745193481444 + ], + [ + "▁Collingwood", + -14.383745193481444 + ], + [ + "▁EQUIPMENT", + -14.383745193481444 + ], + [ + "▁Lourdes", + -14.383745193481444 + ], + [ + "▁acetaminophen", + -14.383745193481444 + ], + [ + "▁compensatory", + -14.383745193481444 + ], + [ + "▁idiosyncratic", + -14.383745193481444 + ], + [ + "▁impassioned", + -14.383745193481444 + ], + [ + "▁lopsided", + -14.383745193481444 + ], + [ + "▁melancholic", + -14.383745193481444 + ], + [ + "▁procrastinating", + -14.383745193481444 + ], + [ + "▁résumé", + -14.383745193481444 + ], + [ + "▁secretariat", + -14.383746147155762 + ], + [ + "▁Yerevan", + -14.383747100830078 + ], + [ + "▁Afraid", + -14.383748054504396 + ], + [ + "▁Grapevine", + -14.383748054504396 + ], + [ + "▁Schaefer", + -14.383748054504396 + ], + [ + "▁Estuary", + -14.383749008178713 + ], + [ + "▁Yamamoto", + -14.383749008178713 + ], + [ + "▁Aggressive", + -14.383749961853027 + ], + [ + "▁necrosis", + -14.383749961853027 + ], + [ + "▁Battersea", + -14.383750915527344 + ], + [ + "▁Cayenne", + -14.383750915527344 + ], + [ + "▁QUICK", + -14.383750915527344 + ], + [ + "▁horrid", + -14.383750915527344 + ], + [ + "▁(877)", + -14.38375186920166 + ], + [ + "▁DELHI", + -14.38375186920166 + ], + [ + "▁Metabolic", + -14.38375186920166 + ], + [ + "▁shoving", + -14.38375186920166 + ], + [ + "▁Antoinette", + -14.383753776550291 + ], + [ + "▁Zazzle", + -14.383756637573242 + ], + [ + "▁Henrietta", + -14.383758544921877 + ], + [ + "▁kinematic", + -14.383761405944824 + ], + [ + "▁admissible", + -14.383763313293455 + ], + [ + "▁Bloomsbury", + -14.383764266967772 + ], + [ + "▁curative", + -14.383767127990724 + ], + [ + "▁Gasoline", + -14.383774757385254 + ], + [ + "▁alluvial", + -14.383790016174316 + ], + [ + "▁immutable", + -14.383793830871582 + ], + [ + "▁unfathomable", + -14.383793830871582 + ], + [ + "▁predeceased", + -14.383801460266112 + ], + [ + "▁Rambler", + -14.383814811706545 + ], + [ + "▁variegated", + -14.38382339477539 + ], + [ + "▁lightsaber", + -14.383824348449709 + ], + [ + "▁Cicero", + -14.383832931518556 + ], + [ + "▁boxy", + -14.38383674621582 + ], + [ + "▁FLASH", + -14.383840560913086 + ], + [ + "▁quay", + -14.38384246826172 + ], + [ + "▁Gorsuch", + -14.383843421936035 + ], + [ + "▁elasticated", + -14.383853912353516 + ], + [ + "riders", + -14.383858680725098 + ], + [ + "Newsletter", + -14.383861541748049 + ], + [ + "▁remanufactured", + -14.383867263793944 + ], + [ + "Digi", + -14.383868217468262 + ], + [ + "jic", + -14.383962631225586 + ], + [ + "▁Dota", + -14.383968353271484 + ], + [ + "▁Allocation", + -14.383980751037598 + ], + [ + "▁walleye", + -14.383991241455078 + ], + [ + "extended", + -14.38400173187256 + ], + [ + "▁OpenOffice", + -14.384013175964355 + ], + [ + "▁Orissa", + -14.38403034210205 + ], + [ + "▁Denial", + -14.384076118469238 + ], + [ + "bulk", + -14.384093284606934 + ], + [ + "▁Kanpur", + -14.384095191955566 + ], + [ + "▁KISS", + -14.38412857055664 + ], + [ + "nvidia", + -14.38417625427246 + ], + [ + "sightedness", + -14.384190559387209 + ], + [ + "vault", + -14.384204864501951 + ], + [ + "▁Kmart", + -14.3842191696167 + ], + [ + "▁Coldwell", + -14.384222030639648 + ], + [ + "▁biofilm", + -14.384259223937988 + ], + [ + "pso", + -14.384288787841797 + ], + [ + "▁milkshake", + -14.384307861328123 + ], + [ + "▁frothy", + -14.384358406066896 + ], + [ + "Plugin", + -14.38436222076416 + ], + [ + "tourist", + -14.384364128112791 + ], + [ + "▁refurbishing", + -14.384376525878906 + ], + [ + "▁Babylonian", + -14.384377479553224 + ], + [ + "Poland", + -14.38440227508545 + ], + [ + "▁HANA", + -14.384435653686523 + ], + [ + "KIT", + -14.384440422058104 + ], + [ + "▁Cooktop", + -14.384459495544434 + ], + [ + "▁censored", + -14.384485244750977 + ], + [ + "▁stuttering", + -14.384485244750977 + ], + [ + "976", + -14.384501457214355 + ], + [ + "▁Yuki", + -14.384532928466797 + ], + [ + "▁FCP", + -14.384550094604492 + ], + [ + "▁7500", + -14.384769439697266 + ], + [ + "Automate", + -14.384811401367188 + ], + [ + "▁535", + -14.384818077087402 + ], + [ + "▁VOTE", + -14.384820938110352 + ], + [ + "▁precipitate", + -14.3848295211792 + ], + [ + "068", + -14.384847640991213 + ], + [ + "Than", + -14.384864807128906 + ], + [ + "barn", + -14.384883880615234 + ], + [ + "▁Ditto", + -14.384888648986816 + ], + [ + "▁MAGIC", + -14.384900093078612 + ], + [ + "▁Yeo", + -14.384913444519045 + ], + [ + "▁inhumane", + -14.384936332702637 + ], + [ + "▁Janine", + -14.385026931762695 + ], + [ + "▁boarders", + -14.385028839111328 + ], + [ + "▁apologised", + -14.385052680969238 + ], + [ + "dvd", + -14.385064125061035 + ], + [ + "▁ADM", + -14.385087013244627 + ], + [ + "▁lingered", + -14.385119438171388 + ], + [ + "▁Renter", + -14.385149002075195 + ], + [ + "▁looted", + -14.3851900100708 + ], + [ + "▁dived", + -14.385214805603027 + ], + [ + "mailing", + -14.385252952575684 + ], + [ + "▁Committed", + -14.385346412658691 + ], + [ + "ambul", + -14.385347366333008 + ], + [ + "Numbers", + -14.385355949401855 + ], + [ + "▁yep", + -14.38545036315918 + ], + [ + "▁Henan", + -14.38548469543457 + ], + [ + "▁cheerfully", + -14.385513305664062 + ], + [ + "▁buttoned", + -14.3855562210083 + ], + [ + "cookie", + -14.385589599609377 + ], + [ + "▁capitalizing", + -14.385655403137209 + ], + [ + "▁Dire", + -14.385767936706545 + ], + [ + "ported", + -14.385787010192873 + ], + [ + "▁MMR", + -14.385808944702148 + ], + [ + "anum", + -14.385869979858398 + ], + [ + "▁limousines", + -14.38593864440918 + ], + [ + "▁smuggle", + -14.385939598083496 + ], + [ + "anje", + -14.385964393615724 + ], + [ + "▁Logical", + -14.385979652404783 + ], + [ + "lico", + -14.386000633239746 + ], + [ + "▁cli", + -14.386008262634276 + ], + [ + "▁quicken", + -14.38601016998291 + ], + [ + "Virtu", + -14.386049270629885 + ], + [ + "▁Emirati", + -14.38615894317627 + ], + [ + "▁55,000", + -14.386176109313965 + ], + [ + "▁rationally", + -14.386180877685549 + ], + [ + "▁Kaya", + -14.386190414428713 + ], + [ + "▁grates", + -14.386194229125977 + ], + [ + "bout", + -14.386260986328123 + ], + [ + "remain", + -14.386268615722656 + ], + [ + "▁Muriel", + -14.38629913330078 + ], + [ + "▁crit", + -14.386364936828612 + ], + [ + "▁Mehr", + -14.38638401031494 + ], + [ + "COMP", + -14.386415481567385 + ], + [ + "▁OBD", + -14.386433601379396 + ], + [ + "$8", + -14.386462211608888 + ], + [ + "▁Encounters", + -14.386468887329102 + ], + [ + "-1960", + -14.38657569885254 + ], + [ + "964", + -14.386584281921388 + ], + [ + "▁Southend", + -14.386615753173828 + ], + [ + "bronze", + -14.38664722442627 + ], + [ + "▁CAST", + -14.38666820526123 + ], + [ + "▁Elephants", + -14.386752128601074 + ], + [ + "▁GTO", + -14.386763572692873 + ], + [ + "▁Changer", + -14.386815071105955 + ], + [ + "▁9:45", + -14.386899948120115 + ], + [ + "▁Compress", + -14.386913299560549 + ], + [ + "▁Dati", + -14.386924743652344 + ], + [ + "ACHI", + -14.386961936950684 + ], + [ + "▁workbooks", + -14.38699436187744 + ], + [ + "Mean", + -14.38702392578125 + ], + [ + "ROT", + -14.387029647827148 + ], + [ + "ophilus", + -14.387043952941896 + ], + [ + "COE", + -14.387054443359377 + ], + [ + "macher", + -14.387078285217283 + ], + [ + "olytic", + -14.38709545135498 + ], + [ + "▁fella", + -14.38710594177246 + ], + [ + "▁Neve", + -14.387137413024902 + ], + [ + "▁mightily", + -14.387271881103516 + ], + [ + "BRIDGE", + -14.387344360351562 + ], + [ + "▁Southfield", + -14.387351989746094 + ], + [ + "▁Goode", + -14.387378692626951 + ], + [ + "antonio", + -14.387380599975586 + ], + [ + "rasse", + -14.38749885559082 + ], + [ + "Fen", + -14.387505531311035 + ], + [ + "Shade", + -14.387564659118652 + ], + [ + "▁Hoss", + -14.387590408325195 + ], + [ + "ective", + -14.387699127197266 + ], + [ + "questions", + -14.387799263000488 + ], + [ + "▁dismount", + -14.387834548950195 + ], + [ + "▁trike", + -14.387839317321776 + ], + [ + "obble", + -14.387845039367676 + ], + [ + "▁SSDs", + -14.387845039367676 + ], + [ + "▁billiards", + -14.387876510620115 + ], + [ + "terrain", + -14.387929916381836 + ], + [ + "▁Muj", + -14.387937545776367 + ], + [ + "▁(13)", + -14.387986183166504 + ], + [ + "ordering", + -14.38805103302002 + ], + [ + "▁NYT", + -14.388089179992676 + ], + [ + "▁Waldo", + -14.388089179992676 + ], + [ + "▁oust", + -14.388134956359863 + ], + [ + "▁Miscellaneous", + -14.388175010681152 + ], + [ + "CCS", + -14.388226509094238 + ], + [ + "▁#20", + -14.38830852508545 + ], + [ + "▁Meters", + -14.388319969177246 + ], + [ + "▁enigma", + -14.388333320617676 + ], + [ + "arena", + -14.38833999633789 + ], + [ + "▁chaff", + -14.388340950012209 + ], + [ + "▁Dependent", + -14.388341903686523 + ], + [ + "▁kinks", + -14.388399124145508 + ], + [ + "uren", + -14.388425827026367 + ], + [ + "▁Lila", + -14.38851261138916 + ], + [ + "Leonard", + -14.388544082641602 + ], + [ + "ipo", + -14.388557434082031 + ], + [ + "▁MDS", + -14.38863754272461 + ], + [ + "▁Gaw", + -14.388733863830566 + ], + [ + "mose", + -14.3887357711792 + ], + [ + "▁pews", + -14.38879108428955 + ], + [ + "FIRE", + -14.388813972473145 + ], + [ + "intra", + -14.388886451721191 + ], + [ + "Amen", + -14.388906478881836 + ], + [ + "Ronald", + -14.388909339904783 + ], + [ + "▁thrash", + -14.388949394226074 + ], + [ + "▁Ninety", + -14.3889799118042 + ], + [ + "magnet", + -14.389016151428224 + ], + [ + "Forge", + -14.389042854309082 + ], + [ + "▁gymnast", + -14.389047622680664 + ], + [ + "▁VSI", + -14.38905143737793 + ], + [ + "Crypto", + -14.38905906677246 + ], + [ + "▁DRY", + -14.389068603515623 + ], + [ + "ASI", + -14.389117240905762 + ], + [ + "▁Rach", + -14.389127731323242 + ], + [ + "▁soundscapes", + -14.389135360717772 + ], + [ + "▁Infosys", + -14.389182090759276 + ], + [ + "▁30-60", + -14.389230728149414 + ], + [ + "docx", + -14.38927173614502 + ], + [ + "▁scoot", + -14.389291763305664 + ], + [ + "▁Gino", + -14.389299392700195 + ], + [ + "▁feasting", + -14.389323234558104 + ], + [ + "▁Allstate", + -14.38933277130127 + ], + [ + "▁Kobo", + -14.389338493347168 + ], + [ + "SALE", + -14.389406204223633 + ], + [ + "Provision", + -14.389521598815918 + ], + [ + "▁pimp", + -14.3895263671875 + ], + [ + "judicat", + -14.38955783843994 + ], + [ + "▁Taxpayer", + -14.389586448669434 + ], + [ + "▁wintertime", + -14.38970947265625 + ], + [ + "▁Meso", + -14.389716148376465 + ], + [ + "▁Tse", + -14.389769554138184 + ], + [ + "Calendar", + -14.389800071716309 + ], + [ + "▁Aditya", + -14.38980484008789 + ], + [ + "Hallelujah", + -14.389805793762209 + ], + [ + "▁Innsbruck", + -14.389805793762209 + ], + [ + "▁Joachim", + -14.389805793762209 + ], + [ + "▁Sandusky", + -14.389805793762209 + ], + [ + "▁conservatism", + -14.389805793762209 + ], + [ + "▁delinquency", + -14.389805793762209 + ], + [ + "▁diflucan", + -14.389805793762209 + ], + [ + "▁mimosa", + -14.389805793762209 + ], + [ + "▁peonies", + -14.389805793762209 + ], + [ + "▁salinity", + -14.389805793762209 + ], + [ + "▁unattainable", + -14.389805793762209 + ], + [ + "▁unfettered", + -14.389805793762209 + ], + [ + "▁Fasteners", + -14.389806747436523 + ], + [ + "▁cappella", + -14.389806747436523 + ], + [ + "▁niacin", + -14.389806747436523 + ], + [ + "▁Competency", + -14.38980770111084 + ], + [ + "▁Skeleton", + -14.38980770111084 + ], + [ + "▁Equivalent", + -14.389808654785156 + ], + [ + "▁Oxfam", + -14.389808654785156 + ], + [ + "1:05", + -14.389809608459473 + ], + [ + "Gervais", + -14.389811515808104 + ], + [ + "▁Pokhara", + -14.389812469482422 + ], + [ + "▁credence", + -14.38981819152832 + ], + [ + "▁spectrometer", + -14.38982105255127 + ], + [ + "▁ULTRA", + -14.389826774597168 + ], + [ + "composite", + -14.38983154296875 + ], + [ + "▁Communism", + -14.389838218688965 + ], + [ + "▁hapless", + -14.38983917236328 + ], + [ + "Fifth", + -14.389849662780762 + ], + [ + "▁(1984)", + -14.389850616455078 + ], + [ + "Acquiring", + -14.38985824584961 + ], + [ + "Dollar", + -14.389859199523926 + ], + [ + "Jeffrey", + -14.389859199523926 + ], + [ + "Shannon", + -14.38986110687256 + ], + [ + "Architecture", + -14.389862060546877 + ], + [ + "Alfred", + -14.389863967895508 + ], + [ + "▁challengers", + -14.38987159729004 + ], + [ + "▁Queries", + -14.389873504638672 + ], + [ + "▁sipped", + -14.389877319335938 + ], + [ + "▁audiophile", + -14.389878273010254 + ], + [ + "Scholarship", + -14.389885902404783 + ], + [ + "▁cheeseburger", + -14.38990592956543 + ], + [ + "▁deadbolt", + -14.38991928100586 + ], + [ + "Evaluation", + -14.389925956726074 + ], + [ + "Pingback", + -14.389955520629885 + ], + [ + "▁Tricia", + -14.389971733093262 + ], + [ + "▁Chianti", + -14.389972686767578 + ], + [ + "▁Vinny", + -14.390008926391602 + ], + [ + "▁cowardly", + -14.390055656433104 + ], + [ + "buffer", + -14.390081405639648 + ], + [ + "▁rehash", + -14.390085220336914 + ], + [ + "▁Ghosh", + -14.390091896057127 + ], + [ + "▁Alderman", + -14.390101432800291 + ], + [ + "▁Zayed", + -14.390103340148926 + ], + [ + "▁Landon", + -14.39011573791504 + ], + [ + "▁democratically", + -14.390125274658203 + ], + [ + "▁Trimmer", + -14.390130043029783 + ], + [ + "▁hank", + -14.390140533447266 + ], + [ + "azzo", + -14.390148162841797 + ], + [ + "▁Salvage", + -14.390164375305176 + ], + [ + "▁evicted", + -14.39017391204834 + ], + [ + "inflation", + -14.390175819396973 + ], + [ + "▁immunizations", + -14.39019775390625 + ], + [ + "▁Bannon", + -14.390198707580566 + ], + [ + "▁sundry", + -14.39023208618164 + ], + [ + "▁DCI", + -14.390251159667969 + ], + [ + "Physician", + -14.39026165008545 + ], + [ + "Owen", + -14.390271186828612 + ], + [ + "steak", + -14.390294075012209 + ], + [ + "▁thumping", + -14.390360832214355 + ], + [ + "alp", + -14.390395164489746 + ], + [ + "▁chickpea", + -14.390405654907228 + ], + [ + "▁Seagate", + -14.390414237976074 + ], + [ + "▁NSX", + -14.39041519165039 + ], + [ + "▁prostitute", + -14.39041805267334 + ], + [ + "▁decluttering", + -14.390460968017578 + ], + [ + "▁Kombat", + -14.390477180480955 + ], + [ + "▁dampness", + -14.390488624572754 + ], + [ + "iha", + -14.390499114990234 + ], + [ + "▁wetting", + -14.390510559082031 + ], + [ + "▁Glossy", + -14.390521049499512 + ], + [ + "▁repositioning", + -14.390542984008787 + ], + [ + "Technically", + -14.39055347442627 + ], + [ + "▁nibs", + -14.39059829711914 + ], + [ + "▁geologists", + -14.390684127807615 + ], + [ + "▁fabricating", + -14.390703201293944 + ], + [ + "ир", + -14.390727996826172 + ], + [ + "▁Feld", + -14.39074420928955 + ], + [ + "▁Salute", + -14.390798568725586 + ], + [ + "GRO", + -14.390799522399902 + ], + [ + "/7/", + -14.390854835510254 + ], + [ + "▁hillsides", + -14.39085865020752 + ], + [ + "▁hemline", + -14.390867233276367 + ], + [ + "▁Conley", + -14.39089584350586 + ], + [ + "▁Merton", + -14.390896797180176 + ], + [ + "▁baccalaureate", + -14.390931129455566 + ], + [ + "ultimate", + -14.391031265258787 + ], + [ + "▁tinged", + -14.391050338745115 + ], + [ + "selective", + -14.391179084777832 + ], + [ + "garten", + -14.391201972961426 + ], + [ + "894", + -14.39121150970459 + ], + [ + "▁Crom", + -14.391234397888184 + ], + [ + "▁lengthening", + -14.39129638671875 + ], + [ + "▁Collectively", + -14.391305923461914 + ], + [ + "▁Bambi", + -14.391317367553713 + ], + [ + "▁Backing", + -14.391419410705566 + ], + [ + "▁Franken", + -14.39142608642578 + ], + [ + "▁Romain", + -14.391549110412598 + ], + [ + "▁ADF", + -14.391575813293455 + ], + [ + "stamped", + -14.39159870147705 + ], + [ + "▁(49", + -14.391690254211426 + ], + [ + "▁cartels", + -14.391706466674805 + ], + [ + "▁Dall", + -14.391718864440918 + ], + [ + "Completely", + -14.391742706298828 + ], + [ + "▁readjust", + -14.391807556152344 + ], + [ + "▁Passionate", + -14.391822814941406 + ], + [ + "▁shoveling", + -14.39193630218506 + ], + [ + "alike", + -14.391983032226562 + ], + [ + "▁Cereal", + -14.39203643798828 + ], + [ + "occi", + -14.392046928405762 + ], + [ + "▁rebalance", + -14.39220142364502 + ], + [ + "1.9%", + -14.39223575592041 + ], + [ + "▁Azul", + -14.392289161682127 + ], + [ + "▁Targeting", + -14.392401695251465 + ], + [ + "hic", + -14.392478942871094 + ], + [ + "IKA", + -14.392523765563965 + ], + [ + "▁handcraft", + -14.392611503601074 + ], + [ + "Preache", + -14.392650604248049 + ], + [ + "dpi", + -14.392655372619627 + ], + [ + "▁Bayside", + -14.39267921447754 + ], + [ + "FRC", + -14.39269733428955 + ], + [ + "▁midpoint", + -14.392708778381348 + ], + [ + "▁rockin", + -14.393113136291504 + ], + [ + "▁spi", + -14.39311408996582 + ], + [ + "▁27,000", + -14.393122673034668 + ], + [ + "▁flavonoids", + -14.393126487731934 + ], + [ + "▁HCC", + -14.393170356750488 + ], + [ + "specialist", + -14.393240928649902 + ], + [ + "omycin", + -14.393293380737305 + ], + [ + "axle", + -14.39334774017334 + ], + [ + "limp", + -14.39340877532959 + ], + [ + "4.75", + -14.393424987792969 + ], + [ + "admi", + -14.39345932006836 + ], + [ + "▁apothecary", + -14.393528938293455 + ], + [ + "slope", + -14.39354133605957 + ], + [ + "beard", + -14.393546104431152 + ], + [ + "▁Hometown", + -14.393548965454102 + ], + [ + "▁Pelicans", + -14.393548965454102 + ], + [ + "▁Yachts", + -14.393599510192873 + ], + [ + "▁rebooting", + -14.39365577697754 + ], + [ + "Adv", + -14.393657684326172 + ], + [ + "▁deform", + -14.393728256225586 + ], + [ + "▁Gaspar", + -14.393735885620115 + ], + [ + "▁LOA", + -14.393804550170898 + ], + [ + "turner", + -14.39383602142334 + ], + [ + "▁revolutionise", + -14.393837928771973 + ], + [ + "relative", + -14.393892288208008 + ], + [ + "▁bestellen", + -14.393912315368652 + ], + [ + "▁Rena", + -14.39396858215332 + ], + [ + "completed", + -14.3939790725708 + ], + [ + "capita", + -14.394087791442873 + ], + [ + "▁Chamomile", + -14.394137382507324 + ], + [ + "hopped", + -14.394142150878906 + ], + [ + "▁8.9", + -14.394142150878906 + ], + [ + "bina", + -14.394179344177246 + ], + [ + "emerge", + -14.394187927246094 + ], + [ + "▁dings", + -14.39418888092041 + ], + [ + "▁15:1", + -14.394246101379396 + ], + [ + "uction", + -14.394298553466797 + ], + [ + "▁Umm", + -14.394379615783691 + ], + [ + "/30/201", + -14.39438533782959 + ], + [ + "▁Cairn", + -14.394417762756348 + ], + [ + "▁Gregor", + -14.394432067871094 + ], + [ + "Changed", + -14.394495964050291 + ], + [ + "▁McLa", + -14.394498825073242 + ], + [ + "▁Activ", + -14.39449977874756 + ], + [ + "▁materialism", + -14.39452075958252 + ], + [ + "▁codeine", + -14.39454460144043 + ], + [ + "▁Tham", + -14.39462184906006 + ], + [ + "▁UCF", + -14.394657135009766 + ], + [ + "▁Jama", + -14.394679069519045 + ], + [ + "SUR", + -14.394722938537598 + ], + [ + "▁Marked", + -14.394728660583496 + ], + [ + "$200", + -14.39475917816162 + ], + [ + "mbal", + -14.394765853881836 + ], + [ + "validation", + -14.394770622253418 + ], + [ + "▁wishful", + -14.394834518432615 + ], + [ + "EGO", + -14.39486312866211 + ], + [ + "▁(65", + -14.394872665405272 + ], + [ + "▁industrialist", + -14.394935607910156 + ], + [ + "▁Remo", + -14.394991874694824 + ], + [ + "nster", + -14.395041465759276 + ], + [ + "▁(54", + -14.395042419433594 + ], + [ + "▁arabia", + -14.39511013031006 + ], + [ + "Construct", + -14.395177841186523 + ], + [ + "1901", + -14.395220756530762 + ], + [ + "▁12:4", + -14.395225524902344 + ], + [ + "▁hummingbirds", + -14.395254135131836 + ], + [ + "▁Anyhow", + -14.395299911499023 + ], + [ + "▁Dada", + -14.395323753356934 + ], + [ + "▁Suspect", + -14.395358085632324 + ], + [ + "▁patriot", + -14.395437240600586 + ], + [ + "▁deduce", + -14.395447731018066 + ], + [ + "Nex", + -14.395480155944824 + ], + [ + "▁adjourn", + -14.39550495147705 + ], + [ + "▁engulf", + -14.395505905151367 + ], + [ + "▁Meier", + -14.395666122436523 + ], + [ + "Rinse", + -14.39573097229004 + ], + [ + "purposed", + -14.395743370056152 + ], + [ + "DISCLAIMER", + -14.39578914642334 + ], + [ + "studies", + -14.395793914794922 + ], + [ + "▁surefire", + -14.395797729492188 + ], + [ + "▁bobbin", + -14.39580535888672 + ], + [ + "9-10", + -14.395812034606934 + ], + [ + "ILS", + -14.395872116088867 + ], + [ + "▁scapegoat", + -14.395902633666992 + ], + [ + "▁Gatlinburg", + -14.395903587341309 + ], + [ + "▁mediocrity", + -14.395903587341309 + ], + [ + "▁parallax", + -14.395903587341309 + ], + [ + "▁slalom", + -14.395903587341309 + ], + [ + "▁uncluttered", + -14.395903587341309 + ], + [ + "▁vashikaran", + -14.395903587341309 + ], + [ + "Colossians", + -14.395904541015623 + ], + [ + "Possibility", + -14.39590549468994 + ], + [ + "▁AMR", + -14.39590549468994 + ], + [ + "▁Cognac", + -14.39590549468994 + ], + [ + "Consuming", + -14.395907402038574 + ], + [ + "▁neuromuscular", + -14.395907402038574 + ], + [ + "▁cursive", + -14.395909309387209 + ], + [ + "▁Faisal", + -14.395910263061523 + ], + [ + "▁practicum", + -14.395915031433104 + ], + [ + "▁interracial", + -14.39592456817627 + ], + [ + "▁pitiful", + -14.395925521850586 + ], + [ + "▁Windshield", + -14.395928382873535 + ], + [ + "▁silliness", + -14.395931243896484 + ], + [ + "▁LEAF", + -14.395947456359863 + ], + [ + "▁Livermore", + -14.395953178405762 + ], + [ + "▁Duplicate", + -14.395955085754396 + ], + [ + "Ivoire", + -14.395956993103027 + ], + [ + "▁BSP", + -14.39597988128662 + ], + [ + "▁CMP", + -14.395981788635254 + ], + [ + "▁deterministic", + -14.395983695983888 + ], + [ + "Excluding", + -14.395991325378418 + ], + [ + "▁Haiku", + -14.39599895477295 + ], + [ + "▁lymphocytes", + -14.396044731140137 + ], + [ + "▁Santi", + -14.39604663848877 + ], + [ + "▁Tencent", + -14.396052360534668 + ], + [ + "isn", + -14.396074295043944 + ], + [ + "Sterling", + -14.396100044250488 + ], + [ + "Orlando", + -14.39610195159912 + ], + [ + "Ukraine", + -14.39610195159912 + ], + [ + "▁Krabi", + -14.396102905273438 + ], + [ + "Sweden", + -14.396103858947754 + ], + [ + "Academy", + -14.39610481262207 + ], + [ + "▁Pickens", + -14.396106719970703 + ], + [ + "Properties", + -14.396109580993652 + ], + [ + "▁Antrim", + -14.396109580993652 + ], + [ + "miller", + -14.39611530303955 + ], + [ + "▁extracellular", + -14.396145820617676 + ], + [ + "▁verandah", + -14.396170616149902 + ], + [ + "▁Angles", + -14.396172523498535 + ], + [ + "▁Ciara", + -14.3961763381958 + ], + [ + "▁bloodshed", + -14.396184921264648 + ], + [ + "▁Nominations", + -14.396211624145508 + ], + [ + "Frequently", + -14.39622402191162 + ], + [ + "▁phobias", + -14.396234512329102 + ], + [ + "▁Perri", + -14.396259307861328 + ], + [ + "▁Commuter", + -14.396273612976074 + ], + [ + "▁oozes", + -14.396297454833984 + ], + [ + "▁Masonry", + -14.396303176879885 + ], + [ + "▁Pentax", + -14.39631175994873 + ], + [ + "▁starchy", + -14.396319389343262 + ], + [ + "▁forsake", + -14.396340370178224 + ], + [ + "herself", + -14.396380424499512 + ], + [ + "▁Drones", + -14.396384239196776 + ], + [ + "Unleash", + -14.396411895751951 + ], + [ + "Catalog", + -14.396424293518066 + ], + [ + "bullet", + -14.3964262008667 + ], + [ + "▁bellies", + -14.396446228027344 + ], + [ + "▁raisin", + -14.396450996398926 + ], + [ + "dav", + -14.396454811096191 + ], + [ + "946", + -14.396480560302734 + ], + [ + "▁Silverstone", + -14.39654541015625 + ], + [ + "▁hummingbird", + -14.396564483642578 + ], + [ + "▁theologian", + -14.39656925201416 + ], + [ + "IEEE", + -14.396586418151855 + ], + [ + "dough", + -14.396610260009766 + ], + [ + "▁SPSS", + -14.396631240844728 + ], + [ + "▁Medley", + -14.396632194519045 + ], + [ + "▁snore", + -14.396673202514648 + ], + [ + "▁Wildflower", + -14.396679878234863 + ], + [ + "QB", + -14.396689414978027 + ], + [ + "▁smother", + -14.396698951721191 + ], + [ + "imagined", + -14.396703720092772 + ], + [ + "▁immaculately", + -14.396709442138672 + ], + [ + "▁Astana", + -14.396726608276367 + ], + [ + "▁Krist", + -14.396728515625 + ], + [ + "initial", + -14.39676570892334 + ], + [ + "▁Jena", + -14.39687442779541 + ], + [ + "▁worshiped", + -14.396954536437988 + ], + [ + "▁bluntly", + -14.39697551727295 + ], + [ + "▁Tucked", + -14.396981239318848 + ], + [ + "▁Tunisian", + -14.39699649810791 + ], + [ + "6.7%", + -14.397027015686035 + ], + [ + "Fighting", + -14.39703369140625 + ], + [ + "▁Tib", + -14.397034645080566 + ], + [ + "▁RISE", + -14.397062301635742 + ], + [ + "toro", + -14.39707851409912 + ], + [ + "Trees", + -14.39711570739746 + ], + [ + "▁Jansen", + -14.39711570739746 + ], + [ + "ICP", + -14.397132873535156 + ], + [ + "Drew", + -14.397165298461914 + ], + [ + "Immerse", + -14.397165298461914 + ], + [ + "▁Enabling", + -14.397174835205078 + ], + [ + "▁armoured", + -14.397224426269531 + ], + [ + "éta", + -14.397232055664062 + ], + [ + "▁OSA", + -14.397235870361328 + ], + [ + "Rig", + -14.397252082824709 + ], + [ + "ganda", + -14.397299766540527 + ], + [ + "▁SDL", + -14.397326469421388 + ], + [ + "GEAR", + -14.397442817687988 + ], + [ + "ranger", + -14.397497177124023 + ], + [ + "vado", + -14.397500038146973 + ], + [ + "hedral", + -14.397536277770996 + ], + [ + "▁widower", + -14.397564888000488 + ], + [ + "▁heave", + -14.397635459899902 + ], + [ + "PACK", + -14.397753715515137 + ], + [ + "▁Gutters", + -14.397775650024414 + ], + [ + "▁wrongfully", + -14.397781372070312 + ], + [ + "▁flavourful", + -14.397785186767578 + ], + [ + "▁playhouse", + -14.397805213928224 + ], + [ + "Unity", + -14.397896766662598 + ], + [ + "Beck", + -14.397963523864746 + ], + [ + "▁Quilting", + -14.39800262451172 + ], + [ + "WEST", + -14.398006439208984 + ], + [ + "▁Uma", + -14.398006439208984 + ], + [ + "scientists", + -14.398016929626465 + ], + [ + "Gross", + -14.39809799194336 + ], + [ + "▁Ener", + -14.398107528686523 + ], + [ + "fh", + -14.398170471191406 + ], + [ + "▁Faro", + -14.398192405700684 + ], + [ + "▁GDC", + -14.398222923278809 + ], + [ + "Refund", + -14.398263931274414 + ], + [ + "▁ACCOUNT", + -14.398282051086426 + ], + [ + "Drum", + -14.39835262298584 + ], + [ + "▁Bullying", + -14.398433685302734 + ], + [ + "Received", + -14.398449897766112 + ], + [ + "hini", + -14.398480415344238 + ], + [ + "submit", + -14.398508071899414 + ], + [ + "▁interventional", + -14.398536682128906 + ], + [ + "pedic", + -14.39854907989502 + ], + [ + "▁702", + -14.398555755615234 + ], + [ + "/70", + -14.398700714111328 + ], + [ + "▁firming", + -14.398783683776855 + ], + [ + "▁overhauled", + -14.398833274841309 + ], + [ + "été", + -14.398848533630373 + ], + [ + "▁normalization", + -14.39887237548828 + ], + [ + "▁matchmaker", + -14.398898124694824 + ], + [ + "▁TMC", + -14.39893627166748 + ], + [ + "▁individualism", + -14.398966789245604 + ], + [ + "▁Infect", + -14.39897346496582 + ], + [ + "▁Spicer", + -14.398977279663086 + ], + [ + "ibril", + -14.398984909057615 + ], + [ + "moe", + -14.39911937713623 + ], + [ + "Lance", + -14.399123191833496 + ], + [ + "MCE", + -14.399124145507812 + ], + [ + "▁Ornaments", + -14.399127006530762 + ], + [ + "▁Temper", + -14.399127960205078 + ], + [ + "▁Roper", + -14.399216651916504 + ], + [ + "Picking", + -14.399269104003906 + ], + [ + "tuber", + -14.399385452270508 + ], + [ + "Temple", + -14.399407386779783 + ], + [ + "▁LEE", + -14.399422645568848 + ], + [ + "▁Klan", + -14.399454116821287 + ], + [ + "uden", + -14.399478912353516 + ], + [ + "▁loam", + -14.399496078491213 + ], + [ + "▁MDC", + -14.399538040161133 + ], + [ + "▁herbalist", + -14.39962100982666 + ], + [ + "CCB", + -14.399648666381836 + ], + [ + "▁encrypts", + -14.399656295776367 + ], + [ + "desh", + -14.399706840515137 + ], + [ + "▁LAMP", + -14.39971160888672 + ], + [ + "▁lifeguards", + -14.399858474731444 + ], + [ + "▁Deputies", + -14.399913787841797 + ], + [ + "▁Ronda", + -14.399984359741213 + ], + [ + "▁Berber", + -14.400200843811035 + ], + [ + "gani", + -14.400219917297363 + ], + [ + "▁dampers", + -14.400224685668944 + ], + [ + "ISIS", + -14.400229454040527 + ], + [ + "▁shovels", + -14.40023708343506 + ], + [ + "Bai", + -14.40034008026123 + ], + [ + "▁fouling", + -14.400341033935549 + ], + [ + "Neu", + -14.40038776397705 + ], + [ + "▁christ", + -14.400430679321287 + ], + [ + "▁$63", + -14.40049648284912 + ], + [ + "▁(2011", + -14.40049934387207 + ], + [ + "▁clashing", + -14.400510787963867 + ], + [ + "korn", + -14.400650024414062 + ], + [ + "Mash", + -14.400668144226074 + ], + [ + "swim", + -14.400733947753906 + ], + [ + "perform", + -14.40108871459961 + ], + [ + "UNC", + -14.401123046875 + ], + [ + "▁Natur", + -14.401135444641112 + ], + [ + "▁riskier", + -14.40119457244873 + ], + [ + "\"\"\"", + -14.40123176574707 + ], + [ + "▁cubicles", + -14.401288986206056 + ], + [ + "Objects", + -14.401314735412598 + ], + [ + "▁Thurston", + -14.401328086853027 + ], + [ + "▁belted", + -14.401463508605955 + ], + [ + "iou", + -14.401471138000488 + ], + [ + "▁ASX", + -14.401487350463867 + ], + [ + "▁SMT", + -14.401573181152344 + ], + [ + "▁reimagine", + -14.401610374450684 + ], + [ + "Gru", + -14.401650428771973 + ], + [ + "▁IDX", + -14.401674270629885 + ], + [ + "▁Advertise", + -14.401705741882324 + ], + [ + "Amber", + -14.401742935180664 + ], + [ + "Chronic", + -14.401769638061523 + ], + [ + "passing", + -14.401777267456056 + ], + [ + "bello", + -14.401808738708496 + ], + [ + "▁Marge", + -14.401810646057127 + ], + [ + "[1", + -14.401823043823242 + ], + [ + "myth", + -14.401902198791504 + ], + [ + "▁Chez", + -14.40192699432373 + ], + [ + "▁worded", + -14.40194034576416 + ], + [ + "▁Rats", + -14.401942253112791 + ], + [ + "frei", + -14.401966094970703 + ], + [ + "▁videotape", + -14.401981353759766 + ], + [ + "▁feverish", + -14.402022361755373 + ], + [ + "▁inclusivity", + -14.402037620544434 + ], + [ + "Cuckoo", + -14.40203857421875 + ], + [ + "Intermittent", + -14.40203857421875 + ], + [ + "▁Fierce", + -14.40203857421875 + ], + [ + "▁Invoices", + -14.40203857421875 + ], + [ + "▁Waikato", + -14.40203857421875 + ], + [ + "▁echelon", + -14.40203857421875 + ], + [ + "▁eradicating", + -14.40203857421875 + ], + [ + "▁hydroxide", + -14.40203857421875 + ], + [ + "▁profusely", + -14.40203857421875 + ], + [ + "▁wellbutrin", + -14.40203857421875 + ], + [ + "Expired", + -14.402039527893066 + ], + [ + "▁Brandeis", + -14.402039527893066 + ], + [ + "▁Injured", + -14.402039527893066 + ], + [ + "▁ADDITION", + -14.402040481567385 + ], + [ + "▁angrily", + -14.402040481567385 + ], + [ + "▁Loudoun", + -14.4020414352417 + ], + [ + "chell", + -14.40204620361328 + ], + [ + "▁Torrance", + -14.402048110961914 + ], + [ + "▁Hannibal", + -14.40204906463623 + ], + [ + "▁carnations", + -14.402054786682127 + ], + [ + "▁Haydn", + -14.402059555053713 + ], + [ + "▁bemoan", + -14.402060508728027 + ], + [ + "▁probabilistic", + -14.40206813812256 + ], + [ + "▁Lerner", + -14.402070999145508 + ], + [ + "▁CentOS", + -14.402076721191406 + ], + [ + "▁Migrant", + -14.402076721191406 + ], + [ + "▁Finlay", + -14.40208339691162 + ], + [ + "▁Framingham", + -14.402085304260254 + ], + [ + "▁dapper", + -14.402090072631836 + ], + [ + "imiento", + -14.402101516723633 + ], + [ + "▁Identifier", + -14.40210247039795 + ], + [ + "▁burglaries", + -14.402107238769531 + ], + [ + "▁mysticism", + -14.402114868164062 + ], + [ + "▁Dearborn", + -14.40211582183838 + ], + [ + "▁interdependent", + -14.402128219604492 + ], + [ + "▁OAuth", + -14.402145385742188 + ], + [ + "▁Envision", + -14.40215301513672 + ], + [ + "Stein", + -14.402158737182615 + ], + [ + "ttinger", + -14.40216827392578 + ], + [ + "▁Stardust", + -14.402169227600098 + ], + [ + "heterosexual", + -14.40217113494873 + ], + [ + "schutz", + -14.402176856994627 + ], + [ + "▁TES", + -14.402177810668944 + ], + [ + "▁femme", + -14.40220832824707 + ], + [ + "▁iconography", + -14.402210235595703 + ], + [ + "▁flatbread", + -14.402213096618652 + ], + [ + "▁Eternity", + -14.402241706848145 + ], + [ + "discharge", + -14.402246475219728 + ], + [ + "▁Replication", + -14.402252197265623 + ], + [ + "▁Borrower", + -14.402254104614258 + ], + [ + "▁Meryl", + -14.402278900146484 + ], + [ + "▁beckons", + -14.402280807495115 + ], + [ + "takers", + -14.40231990814209 + ], + [ + "democratic", + -14.40232276916504 + ], + [ + "▁organza", + -14.402332305908203 + ], + [ + "▁resized", + -14.40234375 + ], + [ + "▁convening", + -14.402362823486328 + ], + [ + "basket", + -14.402377128601074 + ], + [ + "Intelligent", + -14.402379989624023 + ], + [ + "Snapshot", + -14.402379989624023 + ], + [ + "Precision", + -14.402392387390137 + ], + [ + "Stewart", + -14.402393341064451 + ], + [ + "▁Assured", + -14.402398109436035 + ], + [ + "promoting", + -14.40240478515625 + ], + [ + "▁Specialties", + -14.402446746826172 + ], + [ + "▁2015/16", + -14.402467727661133 + ], + [ + "▁Clarita", + -14.402470588684082 + ], + [ + "▁Kling", + -14.402484893798828 + ], + [ + "identity", + -14.40248680114746 + ], + [ + "settlement", + -14.402505874633787 + ], + [ + "curated", + -14.40252685546875 + ], + [ + "▁serif", + -14.40253448486328 + ], + [ + "▁jetting", + -14.402543067932127 + ], + [ + "Gateway", + -14.402544021606444 + ], + [ + "Calif", + -14.40256690979004 + ], + [ + "adhi", + -14.402567863464355 + ], + [ + "▁Panera", + -14.402576446533203 + ], + [ + "▁culled", + -14.402581214904783 + ], + [ + "optimized", + -14.402586936950684 + ], + [ + "▁dollhouse", + -14.402587890625 + ], + [ + "exploit", + -14.402606964111328 + ], + [ + "attention", + -14.40263557434082 + ], + [ + "▁friendlier", + -14.402643203735352 + ], + [ + "▁litho", + -14.402688026428224 + ], + [ + "Array", + -14.40269947052002 + ], + [ + "▁slippage", + -14.402715682983398 + ], + [ + "▁Seaport", + -14.40274429321289 + ], + [ + "▁superhuman", + -14.402749061584473 + ], + [ + "▁toppled", + -14.402749061584473 + ], + [ + "GRE", + -14.4027738571167 + ], + [ + "▁Sui", + -14.402789115905762 + ], + [ + "▁bayou", + -14.402789115905762 + ], + [ + "▁UGC", + -14.402799606323242 + ], + [ + "▁2048", + -14.402810096740724 + ], + [ + "▁Tambo", + -14.402876853942873 + ], + [ + "▁BOSS", + -14.402907371520996 + ], + [ + "Knit", + -14.40296459197998 + ], + [ + "▁stealthy", + -14.403040885925291 + ], + [ + "▁sparing", + -14.403067588806152 + ], + [ + "spawn", + -14.403093338012695 + ], + [ + "▁ADSL", + -14.403103828430176 + ], + [ + "▁$68", + -14.403125762939451 + ], + [ + "▁Rigid", + -14.403149604797363 + ], + [ + "▁matted", + -14.4031982421875 + ], + [ + "▁necessitated", + -14.403204917907717 + ], + [ + "▁Viv", + -14.40320873260498 + ], + [ + "Expression", + -14.403241157531738 + ], + [ + "▁underlining", + -14.40327262878418 + ], + [ + "Krebs", + -14.40328311920166 + ], + [ + "▁Avail", + -14.40329933166504 + ], + [ + "▁DAT", + -14.403313636779783 + ], + [ + "revolutionizing", + -14.403348922729492 + ], + [ + "operator", + -14.403411865234377 + ], + [ + "▁fiancée", + -14.40343952178955 + ], + [ + "▁staterooms", + -14.403465270996094 + ], + [ + "▁hamstrings", + -14.403481483459473 + ], + [ + "▁Capitalism", + -14.403523445129396 + ], + [ + "Lucas", + -14.403556823730469 + ], + [ + "▁BBA", + -14.403635025024414 + ], + [ + "▁Feelings", + -14.403657913208008 + ], + [ + "▁settler", + -14.403687477111816 + ], + [ + "classification", + -14.403745651245115 + ], + [ + "▁Columbian", + -14.403790473937988 + ], + [ + "kkad", + -14.403816223144531 + ], + [ + "▁Shareholders", + -14.403863906860352 + ], + [ + "Krispy", + -14.4038667678833 + ], + [ + "▁grossed", + -14.4038667678833 + ], + [ + "▁Ape", + -14.40391445159912 + ], + [ + "▁Opposite", + -14.403939247131348 + ], + [ + "▁Akt", + -14.403982162475586 + ], + [ + "▁Plata", + -14.404020309448242 + ], + [ + "eso", + -14.404067039489746 + ], + [ + "▁Strata", + -14.4041166305542 + ], + [ + "▁obeyed", + -14.40426254272461 + ], + [ + "Ghz", + -14.404319763183594 + ], + [ + "▁Concorde", + -14.404330253601074 + ], + [ + "▁licenced", + -14.404340744018556 + ], + [ + "▁MICRO", + -14.404362678527832 + ], + [ + "ucan", + -14.404483795166016 + ], + [ + "▁shotguns", + -14.40449047088623 + ], + [ + "▁Topping", + -14.404498100280762 + ], + [ + "chir", + -14.404512405395508 + ], + [ + "▁Sze", + -14.40453052520752 + ], + [ + "ischer", + -14.404568672180176 + ], + [ + "EFT", + -14.404571533203123 + ], + [ + "Liu", + -14.404574394226074 + ], + [ + "▁Terriers", + -14.404579162597656 + ], + [ + "AES", + -14.404582977294922 + ], + [ + "Measures", + -14.404597282409668 + ], + [ + "▁Partnering", + -14.404617309570312 + ], + [ + "▁invoiced", + -14.404619216918944 + ], + [ + "▁bioinformatics", + -14.40468406677246 + ], + [ + "▁Subscriber", + -14.40479850769043 + ], + [ + "▁batten", + -14.404864311218262 + ], + [ + "Camping", + -14.404932022094728 + ], + [ + "8.5%", + -14.405020713806152 + ], + [ + "INF", + -14.405094146728516 + ], + [ + "▁wis", + -14.405125617980955 + ], + [ + "etsu", + -14.405142784118652 + ], + [ + "▁Oddly", + -14.405158042907717 + ], + [ + "▁sear", + -14.40522575378418 + ], + [ + "▁bushy", + -14.405244827270508 + ], + [ + "▁Wahl", + -14.405247688293455 + ], + [ + "▁CHIP", + -14.405282020568848 + ], + [ + "nermost", + -14.405377388000488 + ], + [ + "manu", + -14.40542221069336 + ], + [ + "▁excavators", + -14.405495643615724 + ], + [ + "▁Gho", + -14.405508041381836 + ], + [ + "▁39;", + -14.405537605285645 + ], + [ + "8.50", + -14.40553855895996 + ], + [ + "▁tassels", + -14.405560493469238 + ], + [ + "nist", + -14.40559196472168 + ], + [ + "▁gridlock", + -14.405648231506348 + ], + [ + "▁TEXT", + -14.405773162841797 + ], + [ + "nung", + -14.405861854553224 + ], + [ + "Swan", + -14.405916213989258 + ], + [ + "OOOOO", + -14.40595817565918 + ], + [ + "▁Graff", + -14.405994415283203 + ], + [ + "▁Mammal", + -14.406044006347656 + ], + [ + "Appear", + -14.406067848205566 + ], + [ + "▁Hier", + -14.406113624572754 + ], + [ + "extract", + -14.406137466430664 + ], + [ + "▁Refunds", + -14.406153678894045 + ], + [ + "rangi", + -14.406188011169434 + ], + [ + "Mechanical", + -14.406227111816406 + ], + [ + "Mmmm", + -14.406246185302734 + ], + [ + "bbc", + -14.406248092651367 + ], + [ + "▁Schell", + -14.406249046325684 + ], + [ + "▁Travellers", + -14.406344413757324 + ], + [ + "wheeled", + -14.406367301940918 + ], + [ + "limits", + -14.406400680541992 + ], + [ + "▁(2016", + -14.406408309936523 + ], + [ + "ylate", + -14.406418800354004 + ], + [ + "986", + -14.406535148620604 + ], + [ + "lil", + -14.406570434570312 + ], + [ + "▁Spoke", + -14.406620979309082 + ], + [ + "adapt", + -14.406639099121094 + ], + [ + "pathology", + -14.406685829162598 + ], + [ + "▁whizz", + -14.40673542022705 + ], + [ + "▁Bodi", + -14.406851768493652 + ], + [ + "▁Taw", + -14.406868934631348 + ], + [ + "hypo", + -14.406871795654297 + ], + [ + "▁VCD", + -14.406888961791992 + ], + [ + "0,00", + -14.406892776489258 + ], + [ + "▁12:3", + -14.406903266906738 + ], + [ + "▁Baw", + -14.406943321228027 + ], + [ + "▁Soph", + -14.407026290893556 + ], + [ + "▁Ange", + -14.40707015991211 + ], + [ + "UTS", + -14.407078742980955 + ], + [ + "▁Martyn", + -14.407105445861816 + ], + [ + "▁upstart", + -14.40712070465088 + ], + [ + "RBI", + -14.407127380371094 + ], + [ + "▁interfered", + -14.407222747802734 + ], + [ + "Scape", + -14.407240867614746 + ], + [ + "elan", + -14.40724754333496 + ], + [ + "Worx", + -14.407293319702148 + ], + [ + "WHERE", + -14.407303810119627 + ], + [ + "▁harsher", + -14.40731143951416 + ], + [ + "▁Xamarin", + -14.40735149383545 + ], + [ + "▁plagu", + -14.407442092895508 + ], + [ + "▁underpinnings", + -14.407450675964355 + ], + [ + "GRAM", + -14.407453536987305 + ], + [ + "▁Dialog", + -14.40747356414795 + ], + [ + "ysi", + -14.407485008239746 + ], + [ + "Paramedic", + -14.40749168395996 + ], + [ + "1951", + -14.407523155212402 + ], + [ + "Yi", + -14.407554626464844 + ], + [ + "unya", + -14.40756893157959 + ], + [ + "▁Gonz", + -14.407577514648438 + ], + [ + "OLD", + -14.407626152038574 + ], + [ + "▁reorganize", + -14.40768337249756 + ], + [ + "CIT", + -14.407761573791504 + ], + [ + "▁incarnations", + -14.4077730178833 + ], + [ + "LMA", + -14.407793045043944 + ], + [ + "▁connectedness", + -14.40779972076416 + ], + [ + "▁subdue", + -14.40782356262207 + ], + [ + "loh", + -14.40792751312256 + ], + [ + "▁Happily", + -14.40795612335205 + ], + [ + "▁Prussia", + -14.407987594604492 + ], + [ + "Sunset", + -14.408014297485352 + ], + [ + "091", + -14.408023834228516 + ], + [ + "stove", + -14.408108711242676 + ], + [ + "▁wristwatch", + -14.40811538696289 + ], + [ + "▁decrees", + -14.408195495605469 + ], + [ + "▁FIX", + -14.40820026397705 + ], + [ + "Mecklenburg", + -14.408210754394531 + ], + [ + "▁Ephraim", + -14.408210754394531 + ], + [ + "▁NaNoWriMo", + -14.408210754394531 + ], + [ + "▁Tchaikovsky", + -14.408210754394531 + ], + [ + "▁autoresponder", + -14.408210754394531 + ], + [ + "▁indisputable", + -14.408210754394531 + ], + [ + "▁smattering", + -14.408210754394531 + ], + [ + "▁undetectable", + -14.408210754394531 + ], + [ + "emulsifier", + -14.408211708068848 + ], + [ + "▁Dumfries", + -14.408211708068848 + ], + [ + "▁Pizzeria", + -14.408211708068848 + ], + [ + "▁apparition", + -14.408211708068848 + ], + [ + "▁eerily", + -14.408211708068848 + ], + [ + "▁rickshaw", + -14.408212661743164 + ], + [ + "▁Barracks", + -14.408214569091797 + ], + [ + "▁Tangerine", + -14.408215522766112 + ], + [ + "▁airstrikes", + -14.408215522766112 + ], + [ + "▁Ladakh", + -14.40821647644043 + ], + [ + "▁SINGLE", + -14.40821647644043 + ], + [ + "▁Equatorial", + -14.408217430114746 + ], + [ + "▁Siamese", + -14.408217430114746 + ], + [ + "▁Eventbrite", + -14.408218383789062 + ], + [ + "▁unfurl", + -14.408220291137695 + ], + [ + "▁Rosenthal", + -14.40822696685791 + ], + [ + "▁hypocritical", + -14.408228874206545 + ], + [ + "▁Schumann", + -14.408245086669922 + ], + [ + "9.50", + -14.408249855041504 + ], + [ + "Throttle", + -14.408249855041504 + ], + [ + "▁Silvia", + -14.40825366973877 + ], + [ + "▁serpentine", + -14.408254623413086 + ], + [ + "▁Midsummer", + -14.408268928527832 + ], + [ + "▁Ulrich", + -14.408279418945312 + ], + [ + "▁Radial", + -14.408288955688477 + ], + [ + "▁midwifery", + -14.408289909362791 + ], + [ + "▁Northside", + -14.408292770385742 + ], + [ + "▁Pennington", + -14.408309936523438 + ], + [ + "▁Geraldine", + -14.408315658569336 + ], + [ + "▁flapping", + -14.408339500427246 + ], + [ + "▁NetApp", + -14.40834617614746 + ], + [ + "▁Brunson", + -14.408350944519045 + ], + [ + "▁16:9", + -14.408440589904783 + ], + [ + "▁Redevelopment", + -14.40844440460205 + ], + [ + "▁headbands", + -14.408474922180176 + ], + [ + "▁Succulents", + -14.408489227294922 + ], + [ + "Socket", + -14.408493041992188 + ], + [ + "▁bulletproof", + -14.40849781036377 + ], + [ + "ENIX", + -14.408499717712402 + ], + [ + "Formula", + -14.408499717712402 + ], + [ + "COME", + -14.408501625061035 + ], + [ + "Remind", + -14.408501625061035 + ], + [ + "▁Margie", + -14.4085111618042 + ], + [ + "▁Rotten", + -14.408517837524414 + ], + [ + "▁Netball", + -14.40851879119873 + ], + [ + "▁rece", + -14.408538818359377 + ], + [ + "▁Schul", + -14.408547401428224 + ], + [ + "▁24-48", + -14.408563613891602 + ], + [ + "▁serie", + -14.408567428588867 + ], + [ + "▁Pastry", + -14.408574104309082 + ], + [ + "▁Recipients", + -14.408580780029297 + ], + [ + "▁colonel", + -14.408583641052246 + ], + [ + "▁Ebb", + -14.40860080718994 + ], + [ + "SMITH", + -14.408602714538574 + ], + [ + "jani", + -14.408607482910156 + ], + [ + "▁supermodel", + -14.40863037109375 + ], + [ + "▁Refacing", + -14.408635139465332 + ], + [ + "▁Conde", + -14.40864086151123 + ], + [ + "▁indica", + -14.408645629882812 + ], + [ + "Harvest", + -14.40865421295166 + ], + [ + "kilometer", + -14.408672332763672 + ], + [ + "BED", + -14.408673286437988 + ], + [ + "▁spied", + -14.408683776855469 + ], + [ + "▁supercharged", + -14.408699989318848 + ], + [ + "infringement", + -14.408714294433594 + ], + [ + "Integration", + -14.40871524810791 + ], + [ + "Heritage", + -14.408720970153809 + ], + [ + "Applied", + -14.408724784851074 + ], + [ + "▁Avaya", + -14.408750534057615 + ], + [ + "▁timescales", + -14.40876007080078 + ], + [ + "Delhi", + -14.408767700195312 + ], + [ + "toilet", + -14.408788681030272 + ], + [ + "Robinson", + -14.408803939819336 + ], + [ + "▁loafers", + -14.408825874328612 + ], + [ + "awley", + -14.4088716506958 + ], + [ + "▁conversely", + -14.408889770507812 + ], + [ + "miscommunication", + -14.408905029296877 + ], + [ + "▁classifies", + -14.408923149108888 + ], + [ + "SPY", + -14.409008979797363 + ], + [ + "▁Miata", + -14.409010887145996 + ], + [ + "▁gushing", + -14.409011840820312 + ], + [ + "Molly", + -14.40903377532959 + ], + [ + "BURN", + -14.409061431884766 + ], + [ + "▁thawed", + -14.40908908843994 + ], + [ + "▁blackheads", + -14.40911865234375 + ], + [ + "4.2%", + -14.409141540527344 + ], + [ + "▁BAG", + -14.40914535522461 + ], + [ + "ANTE", + -14.409157752990724 + ], + [ + "▁primate", + -14.409172058105469 + ], + [ + "fabric", + -14.409184455871582 + ], + [ + "▁Flair", + -14.409185409545898 + ], + [ + "Reality", + -14.40920352935791 + ], + [ + "▁suppressor", + -14.409272193908691 + ], + [ + "improved", + -14.409290313720703 + ], + [ + "Automated", + -14.40931797027588 + ], + [ + "▁Chopin", + -14.409337997436523 + ], + [ + "▁psychoactive", + -14.409378051757812 + ], + [ + "▁Chord", + -14.409381866455078 + ], + [ + "Oversee", + -14.409423828125 + ], + [ + "indo", + -14.409454345703123 + ], + [ + "▁$67", + -14.409467697143556 + ], + [ + "▁incrementally", + -14.40948486328125 + ], + [ + "tere", + -14.409543991088867 + ], + [ + "▁Dona", + -14.409564018249512 + ], + [ + "▁bitmap", + -14.40965461730957 + ], + [ + "bills", + -14.409693717956545 + ], + [ + "gradation", + -14.409722328186035 + ], + [ + "▁gopher", + -14.409767150878906 + ], + [ + "▁Ebooks", + -14.40982151031494 + ], + [ + "aero", + -14.409830093383787 + ], + [ + "▁Zimbabwean", + -14.409838676452637 + ], + [ + "▁Bluestone", + -14.409912109375 + ], + [ + "▁FEI", + -14.40992546081543 + ], + [ + "NATE", + -14.409944534301758 + ], + [ + "▁Amity", + -14.409947395324709 + ], + [ + "▁Receipts", + -14.410062789916992 + ], + [ + "▁Masterpiece", + -14.410090446472168 + ], + [ + "/03", + -14.410106658935549 + ], + [ + "▁Fuku", + -14.410113334655762 + ], + [ + "▁Spiele", + -14.410126686096191 + ], + [ + "bori", + -14.410219192504885 + ], + [ + "bald", + -14.410231590270996 + ], + [ + "Miner", + -14.410244941711426 + ], + [ + "Sibling", + -14.410271644592283 + ], + [ + "keyword", + -14.410280227661133 + ], + [ + "▁PVP", + -14.410280227661133 + ], + [ + "▁clamped", + -14.410346031188965 + ], + [ + "Wearing", + -14.41035270690918 + ], + [ + "▁uncheck", + -14.410423278808594 + ], + [ + "▁Masala", + -14.410470008850098 + ], + [ + "▁landscaper", + -14.410515785217283 + ], + [ + "▁NGS", + -14.410554885864258 + ], + [ + "ATO", + -14.410572052001951 + ], + [ + "▁initialization", + -14.410578727722168 + ], + [ + "▁Skylight", + -14.410589218139648 + ], + [ + "▁dns", + -14.410590171813965 + ], + [ + "▁Pitcher", + -14.410602569580078 + ], + [ + "Complicat", + -14.410650253295898 + ], + [ + "HFA", + -14.410719871520996 + ], + [ + "usz", + -14.410755157470703 + ], + [ + "▁Ketone", + -14.41075611114502 + ], + [ + "Noah", + -14.410759925842283 + ], + [ + "▁humanist", + -14.410844802856444 + ], + [ + "▁enticed", + -14.410873413085938 + ], + [ + "token", + -14.410894393920898 + ], + [ + "▁10.8", + -14.410937309265137 + ], + [ + "▁Snowman", + -14.410984992980955 + ], + [ + "▁obeying", + -14.411043167114258 + ], + [ + "CAE", + -14.411055564880373 + ], + [ + "▁Hemi", + -14.411086082458496 + ], + [ + "▁barite", + -14.411088943481444 + ], + [ + "▁purifiers", + -14.411154747009276 + ], + [ + "▁Revive", + -14.411287307739258 + ], + [ + "▁Throwing", + -14.411296844482422 + ], + [ + "▁compa", + -14.411328315734863 + ], + [ + "ITO", + -14.411369323730469 + ], + [ + "opener", + -14.41140079498291 + ], + [ + "▁1995)", + -14.411401748657228 + ], + [ + "Pride", + -14.411413192749023 + ], + [ + "Fax", + -14.411426544189451 + ], + [ + "▁PTC", + -14.41146755218506 + ], + [ + "▁11-12", + -14.411508560180664 + ], + [ + "▁subclass", + -14.41153049468994 + ], + [ + "▁Sacrifice", + -14.411538124084473 + ], + [ + "itec", + -14.411569595336914 + ], + [ + "fond", + -14.41161060333252 + ], + [ + "pok", + -14.41162109375 + ], + [ + "▁magneto", + -14.411638259887695 + ], + [ + "▁shuttered", + -14.411684036254885 + ], + [ + "▁safeguarded", + -14.41183376312256 + ], + [ + "cfg", + -14.411994934082031 + ], + [ + "liment", + -14.412006378173828 + ], + [ + "▁moi", + -14.412046432495115 + ], + [ + "▁lair", + -14.412065505981444 + ], + [ + "Novel", + -14.412095069885254 + ], + [ + "JW", + -14.412128448486328 + ], + [ + "▁Nuevo", + -14.41214370727539 + ], + [ + "Ranger", + -14.412153244018556 + ], + [ + "▁modernizing", + -14.41220760345459 + ], + [ + "ASD", + -14.412270545959473 + ], + [ + "▁lithograph", + -14.412335395812988 + ], + [ + "▁sari", + -14.41234016418457 + ], + [ + "▁coauthor", + -14.412363052368164 + ], + [ + "▁cashed", + -14.412373542785645 + ], + [ + "▁FBS", + -14.41247844696045 + ], + [ + "cong", + -14.412562370300291 + ], + [ + "ilu", + -14.41259765625 + ], + [ + "▁polyphenols", + -14.41261386871338 + ], + [ + "▁Deen", + -14.412639617919922 + ], + [ + "fing", + -14.412665367126465 + ], + [ + "▁inflexible", + -14.412671089172363 + ], + [ + "Ful", + -14.412673950195312 + ], + [ + "novel", + -14.412705421447754 + ], + [ + "▁Hutt", + -14.412728309631348 + ], + [ + "▁Tempest", + -14.412755012512209 + ], + [ + "abelle", + -14.412764549255373 + ], + [ + "nse", + -14.412890434265137 + ], + [ + "▁possessive", + -14.412941932678224 + ], + [ + "▁storeys", + -14.412970542907717 + ], + [ + "▁retreating", + -14.413000106811523 + ], + [ + "Ny", + -14.413005828857422 + ], + [ + "▁Myles", + -14.413021087646484 + ], + [ + "▁Awarded", + -14.413046836853027 + ], + [ + "Megan", + -14.413094520568848 + ], + [ + "▁Coney", + -14.413200378417969 + ], + [ + "Cle", + -14.413204193115234 + ], + [ + "▁METAL", + -14.41323471069336 + ], + [ + "▁GMOs", + -14.413250923156738 + ], + [ + "▁homemaker", + -14.413265228271484 + ], + [ + "▁Introductory", + -14.413399696350098 + ], + [ + "▁overestimate", + -14.413501739501951 + ], + [ + "▁Holst", + -14.413538932800291 + ], + [ + "forma", + -14.413540840148926 + ], + [ + "6100", + -14.413544654846191 + ], + [ + "▁tremors", + -14.413564682006836 + ], + [ + "yoshi", + -14.413715362548828 + ], + [ + "▁Yah", + -14.413761138916016 + ], + [ + "▁experimentally", + -14.41378116607666 + ], + [ + "▁TALK", + -14.41380214691162 + ], + [ + "▁Andri", + -14.413847923278809 + ], + [ + "ector", + -14.41396141052246 + ], + [ + "▁declutter", + -14.41405200958252 + ], + [ + "Dawg", + -14.414052963256836 + ], + [ + "arching", + -14.41408348083496 + ], + [ + "▁Woj", + -14.414122581481934 + ], + [ + "▁Kuta", + -14.414134979248049 + ], + [ + "▁2,5", + -14.414143562316896 + ], + [ + "▁serviceable", + -14.414156913757324 + ], + [ + "ripe", + -14.414198875427246 + ], + [ + "ecological", + -14.41421127319336 + ], + [ + "▁Apes", + -14.414227485656738 + ], + [ + "▁Mahesh", + -14.414239883422852 + ], + [ + "▁tiller", + -14.414380073547363 + ], + [ + "▁(2015", + -14.414385795593262 + ], + [ + "▁mercedes", + -14.414421081542969 + ], + [ + "SUBSCRIBE", + -14.414422035217283 + ], + [ + "▁Clapham", + -14.414422035217283 + ], + [ + "▁Flemish", + -14.414422035217283 + ], + [ + "▁Louboutin", + -14.414422035217283 + ], + [ + "▁Viennese", + -14.414422035217283 + ], + [ + "▁besieged", + -14.414422035217283 + ], + [ + "▁conversing", + -14.414422035217283 + ], + [ + "▁dissonance", + -14.414422035217283 + ], + [ + "▁rendezvous", + -14.414422035217283 + ], + [ + "▁unbridled", + -14.414422035217283 + ], + [ + "▁10.1016/", + -14.414422988891602 + ], + [ + "▁Ephesus", + -14.414423942565918 + ], + [ + "▁Vasquez", + -14.414423942565918 + ], + [ + "Economiz", + -14.41442584991455 + ], + [ + "▁shunned", + -14.4144287109375 + ], + [ + "▁Zenfone", + -14.41443157196045 + ], + [ + "▁Kamagra", + -14.414433479309082 + ], + [ + "Heirloom", + -14.41445255279541 + ], + [ + "▁skydiving", + -14.41445255279541 + ], + [ + "▁Crawley", + -14.414458274841309 + ], + [ + "▁reproach", + -14.41446590423584 + ], + [ + "▁Mahoney", + -14.414473533630373 + ], + [ + "▁Tandem", + -14.414474487304688 + ], + [ + "▁Manfred", + -14.414491653442385 + ], + [ + "▁tastebuds", + -14.414493560791016 + ], + [ + "▁Dragonfly", + -14.41450023651123 + ], + [ + "▁Leyland", + -14.414507865905762 + ], + [ + "Knuckle", + -14.414525032043455 + ], + [ + "▁initiator", + -14.41452693939209 + ], + [ + "▁sundae", + -14.414528846740724 + ], + [ + "▁Nir", + -14.41454029083252 + ], + [ + "▁Patriotic", + -14.414569854736328 + ], + [ + "ansky", + -14.414589881896973 + ], + [ + "▁Sulfate", + -14.414615631103516 + ], + [ + "Cancel", + -14.414621353149414 + ], + [ + "▁Monika", + -14.414623260498049 + ], + [ + "▁Christophe", + -14.414669036865234 + ], + [ + "▁Tangier", + -14.414711952209473 + ], + [ + "▁aorta", + -14.414719581604004 + ], + [ + "▁Villain", + -14.414751052856444 + ], + [ + "▁Scotsman", + -14.41476058959961 + ], + [ + "▁Builderall", + -14.414775848388672 + ], + [ + "▁screeching", + -14.41478443145752 + ], + [ + "▁THINGS", + -14.414793014526367 + ], + [ + "▁Dwelling", + -14.414793968200684 + ], + [ + "▁karat", + -14.414802551269531 + ], + [ + "▁despatched", + -14.41481590270996 + ], + [ + "▁cordially", + -14.414847373962402 + ], + [ + "▁selectivity", + -14.414862632751465 + ], + [ + "▁sativa", + -14.414892196655272 + ], + [ + "▁whistleblowers", + -14.414897918701172 + ], + [ + "Appearing", + -14.41493320465088 + ], + [ + "▁hairdressers", + -14.41493797302246 + ], + [ + "▁Rosso", + -14.414963722229004 + ], + [ + "▁barges", + -14.414998054504396 + ], + [ + "▁jokingly", + -14.415044784545898 + ], + [ + "Competitive", + -14.415071487426758 + ], + [ + "Hurricane", + -14.415072441101074 + ], + [ + "Magazine", + -14.41507339477539 + ], + [ + "Motivated", + -14.415074348449709 + ], + [ + "approach", + -14.415087699890137 + ], + [ + "Wisconsin", + -14.415090560913086 + ], + [ + "Cleveland", + -14.415093421936035 + ], + [ + "▁17:00", + -14.415104866027832 + ], + [ + "caliber", + -14.415114402770996 + ], + [ + "families", + -14.415118217468262 + ], + [ + "Passion", + -14.415196418762209 + ], + [ + "Vel", + -14.415242195129396 + ], + [ + "▁retracted", + -14.415245056152344 + ], + [ + "▁Longview", + -14.415246963500977 + ], + [ + "▁Constantly", + -14.415252685546877 + ], + [ + "Alison", + -14.415264129638672 + ], + [ + "▁Schna", + -14.415267944335938 + ], + [ + "▁Tailored", + -14.415290832519531 + ], + [ + "Confirmed", + -14.415363311767578 + ], + [ + "▁Algerian", + -14.415447235107422 + ], + [ + "▁ROA", + -14.415451049804688 + ], + [ + "rustic", + -14.415482521057127 + ], + [ + "▁6061", + -14.415508270263672 + ], + [ + "▁chronicling", + -14.415538787841797 + ], + [ + "fatal", + -14.41554355621338 + ], + [ + "▁acorns", + -14.415568351745604 + ], + [ + "▁rescuers", + -14.415604591369627 + ], + [ + "▁descriptors", + -14.415657043457031 + ], + [ + "▁Generators", + -14.415685653686523 + ], + [ + "Comply", + -14.415785789489746 + ], + [ + "▁Munch", + -14.415785789489746 + ], + [ + "▁JVC", + -14.415812492370604 + ], + [ + "Delivering", + -14.415822982788086 + ], + [ + "▁Riverview", + -14.415886878967283 + ], + [ + "▁Textured", + -14.415891647338867 + ], + [ + "▁Vastu", + -14.41591739654541 + ], + [ + "▁Merle", + -14.415942192077637 + ], + [ + "silent", + -14.415964126586914 + ], + [ + "Omega", + -14.41598129272461 + ], + [ + "CNS", + -14.416004180908203 + ], + [ + "genes", + -14.41603183746338 + ], + [ + "LCC", + -14.416056632995604 + ], + [ + "▁Arches", + -14.41610050201416 + ], + [ + "hailing", + -14.416142463684082 + ], + [ + "▁underperforming", + -14.416145324707031 + ], + [ + "▁Holz", + -14.41617202758789 + ], + [ + "▁Sandbox", + -14.416223526000977 + ], + [ + "▁Etch", + -14.416251182556152 + ], + [ + "kiewicz", + -14.416290283203123 + ], + [ + "Resident", + -14.416297912597656 + ], + [ + "▁Hurd", + -14.416338920593262 + ], + [ + "Moss", + -14.416370391845703 + ], + [ + "▁Mongol", + -14.416391372680664 + ], + [ + "▁Tarzan", + -14.41641902923584 + ], + [ + "▁Mink", + -14.416458129882812 + ], + [ + "▁Bom", + -14.416481971740724 + ], + [ + "▁BIN", + -14.416484832763672 + ], + [ + "uven", + -14.41657829284668 + ], + [ + "#6", + -14.416627883911133 + ], + [ + "Influence", + -14.416634559631348 + ], + [ + "▁Packaged", + -14.416643142700195 + ], + [ + "▁averted", + -14.41679573059082 + ], + [ + "losing", + -14.416796684265137 + ], + [ + "nomi", + -14.416854858398438 + ], + [ + "▁87%", + -14.41686725616455 + ], + [ + "▁Staging", + -14.416899681091309 + ], + [ + "▁TAM", + -14.416936874389648 + ], + [ + "baout", + -14.416963577270508 + ], + [ + "3:10", + -14.417121887207031 + ], + [ + "4.7%", + -14.417173385620115 + ], + [ + "▁Curie", + -14.417193412780762 + ], + [ + "▁40-50", + -14.41721534729004 + ], + [ + "▁Sloane", + -14.417238235473633 + ], + [ + "▁banal", + -14.417283058166504 + ], + [ + "▁ATX", + -14.417308807373049 + ], + [ + "▁readout", + -14.417373657226562 + ], + [ + "cillin", + -14.417407035827637 + ], + [ + "Officer", + -14.417423248291016 + ], + [ + "reform", + -14.417481422424316 + ], + [ + "▁adaption", + -14.41749382019043 + ], + [ + "▁Jackman", + -14.417505264282228 + ], + [ + "▁mulching", + -14.417569160461426 + ], + [ + "$30", + -14.417570114135742 + ], + [ + "▁Chol", + -14.417573928833008 + ], + [ + "▁reestablish", + -14.417591094970703 + ], + [ + "▁Presumably", + -14.417601585388184 + ], + [ + "Azur", + -14.417670249938965 + ], + [ + "CSU", + -14.41767120361328 + ], + [ + "▁Astronaut", + -14.417683601379396 + ], + [ + "Bookmark", + -14.417688369750977 + ], + [ + "▁husbandry", + -14.417719841003418 + ], + [ + "▁TSS", + -14.417726516723633 + ], + [ + "mpi", + -14.4178466796875 + ], + [ + "▁Coy", + -14.41786289215088 + ], + [ + "serial", + -14.417877197265623 + ], + [ + "robin", + -14.41787815093994 + ], + [ + "▁Hexa", + -14.418107986450195 + ], + [ + "▁Learner", + -14.418131828308104 + ], + [ + "ERIC", + -14.418252944946287 + ], + [ + "▁sequenced", + -14.418269157409668 + ], + [ + "▁linemen", + -14.41828727722168 + ], + [ + "polish", + -14.418360710144045 + ], + [ + "Arena", + -14.418402671813965 + ], + [ + "▁ROLL", + -14.41847324371338 + ], + [ + "APR", + -14.418484687805176 + ], + [ + "onite", + -14.418512344360352 + ], + [ + "▁Krak", + -14.418951988220217 + ], + [ + "ORDER", + -14.418954849243164 + ], + [ + "-8)", + -14.418963432312012 + ], + [ + "▁Trad", + -14.418978691101074 + ], + [ + "henry", + -14.419063568115234 + ], + [ + "▁worshiping", + -14.41926383972168 + ], + [ + "▁Peso", + -14.419291496276855 + ], + [ + "▁Cep", + -14.419316291809082 + ], + [ + "▁insufficiency", + -14.419368743896484 + ], + [ + "enzyme", + -14.41937255859375 + ], + [ + "▁Veer", + -14.41943359375 + ], + [ + "3.4%", + -14.419512748718262 + ], + [ + "▁Spaniards", + -14.419536590576172 + ], + [ + "▁humanly", + -14.41957950592041 + ], + [ + "mbri", + -14.419650077819824 + ], + [ + "2160", + -14.419694900512695 + ], + [ + "▁HERO", + -14.419742584228516 + ], + [ + "▁discus", + -14.419757843017578 + ], + [ + "tmp", + -14.419769287109377 + ], + [ + "▁filly", + -14.419772148132324 + ], + [ + "▁delighting", + -14.419857025146484 + ], + [ + "Evening", + -14.41987133026123 + ], + [ + "RIG", + -14.419906616210938 + ], + [ + "▁fullback", + -14.41994857788086 + ], + [ + "iggins", + -14.42002010345459 + ], + [ + "irc", + -14.420061111450195 + ], + [ + "ophobia", + -14.420158386230469 + ], + [ + "tomi", + -14.420306205749512 + ], + [ + "Locker", + -14.420331001281738 + ], + [ + "▁CIM", + -14.420331001281738 + ], + [ + "Succulent", + -14.420390129089355 + ], + [ + "▁recliners", + -14.420391082763672 + ], + [ + "Grip", + -14.42039394378662 + ], + [ + "EHR", + -14.420400619506836 + ], + [ + "▁Gard", + -14.42041301727295 + ], + [ + "▁unambiguous", + -14.420428276062012 + ], + [ + "Specialist", + -14.420437812805176 + ], + [ + "▁Flick", + -14.420462608337402 + ], + [ + "Letters", + -14.42052936553955 + ], + [ + "▁ICOs", + -14.420609474182127 + ], + [ + "▁Tripod", + -14.420613288879396 + ], + [ + "▁refit", + -14.420615196228027 + ], + [ + "▁Chongqing", + -14.420672416687012 + ], + [ + "▁DIGITAL", + -14.420672416687012 + ], + [ + "▁Mercantile", + -14.420672416687012 + ], + [ + "▁PICTURE", + -14.420672416687012 + ], + [ + "▁Peshawar", + -14.420672416687012 + ], + [ + "▁Pyrenees", + -14.420672416687012 + ], + [ + "▁Rembrandt", + -14.420672416687012 + ], + [ + "▁Styrofoam", + -14.420672416687012 + ], + [ + "▁WrestleMania", + -14.420672416687012 + ], + [ + "▁archetypal", + -14.420672416687012 + ], + [ + "▁biopsies", + -14.420672416687012 + ], + [ + "▁bollard", + -14.420672416687012 + ], + [ + "▁frenzied", + -14.420672416687012 + ], + [ + "▁hypocrite", + -14.420672416687012 + ], + [ + "▁metastases", + -14.420672416687012 + ], + [ + "▁neurosurgeon", + -14.420672416687012 + ], + [ + "▁prerogative", + -14.420672416687012 + ], + [ + "▁sympathies", + -14.420672416687012 + ], + [ + "▁tetanus", + -14.420672416687012 + ], + [ + "▁Affleck", + -14.420673370361328 + ], + [ + "▁Alastair", + -14.420673370361328 + ], + [ + "▁Geriatric", + -14.420673370361328 + ], + [ + "▁oomph", + -14.420673370361328 + ], + [ + "▁Miyazaki", + -14.420674324035645 + ], + [ + "▁Stearns", + -14.420674324035645 + ], + [ + "▁vivacious", + -14.420674324035645 + ], + [ + "▁whaling", + -14.420674324035645 + ], + [ + "▁Lhasa", + -14.420679092407228 + ], + [ + "▁intracellular", + -14.420680046081545 + ], + [ + "▁porosity", + -14.420680046081545 + ], + [ + "▁omnipresent", + -14.420681953430176 + ], + [ + "▁squirm", + -14.42068576812744 + ], + [ + "▁Plague", + -14.42068862915039 + ], + [ + "▁Anatolia", + -14.420689582824709 + ], + [ + "▁Thistle", + -14.420689582824709 + ], + [ + "▁botched", + -14.420690536499023 + ], + [ + "▁Hannover", + -14.420697212219238 + ], + [ + "▁incisive", + -14.420697212219238 + ], + [ + "▁BUTTON", + -14.42070770263672 + ], + [ + "▁MotoGP", + -14.420709609985352 + ], + [ + "▁McKnight", + -14.420714378356934 + ], + [ + "▁BABY", + -14.42072582244873 + ], + [ + "▁profane", + -14.420730590820312 + ], + [ + "▁enclosing", + -14.420731544494627 + ], + [ + "▁Lakota", + -14.420737266540527 + ], + [ + "▁subfolder", + -14.420738220214844 + ], + [ + "▁SCADA", + -14.42074966430664 + ], + [ + "▁Bowser", + -14.420754432678224 + ], + [ + "▁slither", + -14.420754432678224 + ], + [ + "▁subplot", + -14.42076301574707 + ], + [ + "▁Teenagers", + -14.420768737792969 + ], + [ + "▁Purifier", + -14.420782089233398 + ], + [ + "▁levee", + -14.420790672302246 + ], + [ + "▁IgG", + -14.420808792114258 + ], + [ + "▁Relaxation", + -14.420823097229004 + ], + [ + "▁truest", + -14.420857429504396 + ], + [ + "▁Excludes", + -14.420863151550291 + ], + [ + "Energetic", + -14.420868873596191 + ], + [ + "▁Topeka", + -14.42088222503662 + ], + [ + "▁Splice", + -14.420884132385254 + ], + [ + "▁11:15", + -14.420889854431152 + ], + [ + "▁luring", + -14.420907974243164 + ], + [ + "▁baldness", + -14.4209566116333 + ], + [ + "ziel", + -14.420960426330566 + ], + [ + "▁Muffins", + -14.420992851257324 + ], + [ + "identify", + -14.421013832092283 + ], + [ + "damage", + -14.421028137207031 + ], + [ + "▁thorny", + -14.421031951904297 + ], + [ + "▁glyphs", + -14.421050071716309 + ], + [ + "▁Drummer", + -14.421098709106444 + ], + [ + "▁plumage", + -14.421102523803713 + ], + [ + "▁Delft", + -14.421127319335938 + ], + [ + "▁forthright", + -14.421148300170898 + ], + [ + "▁prickly", + -14.42115592956543 + ], + [ + "▁Provisions", + -14.421178817749023 + ], + [ + "▁Suzy", + -14.421210289001465 + ], + [ + "▁wronged", + -14.421217918395996 + ], + [ + "highlight", + -14.42123317718506 + ], + [ + "▁Dc", + -14.421273231506348 + ], + [ + "▁Incorporate", + -14.421274185180664 + ], + [ + "▁perfectionism", + -14.421295166015623 + ], + [ + "▁otters", + -14.421313285827637 + ], + [ + "outer", + -14.42131519317627 + ], + [ + "▁MBE", + -14.421324729919434 + ], + [ + "▁auger", + -14.421347618103027 + ], + [ + "▁SNOW", + -14.421381950378418 + ], + [ + "▁Vivek", + -14.42138385772705 + ], + [ + "▁Buon", + -14.421422004699709 + ], + [ + "▁streetwear", + -14.421428680419922 + ], + [ + "Sketch", + -14.42143726348877 + ], + [ + "▁tampered", + -14.42144775390625 + ], + [ + "▁Chak", + -14.42147445678711 + ], + [ + "destination", + -14.421486854553224 + ], + [ + "renewable", + -14.421488761901855 + ], + [ + "weapon", + -14.421488761901855 + ], + [ + "Baltimore", + -14.421500205993652 + ], + [ + "graded", + -14.421506881713867 + ], + [ + "Matrix", + -14.421510696411133 + ], + [ + "politics", + -14.421514511108398 + ], + [ + "Raymond", + -14.421521186828612 + ], + [ + "▁Steinberg", + -14.421570777893066 + ], + [ + "Costume", + -14.421608924865724 + ], + [ + "▁Bagh", + -14.421621322631836 + ], + [ + "Anchor", + -14.421627044677734 + ], + [ + "Tourism", + -14.42164421081543 + ], + [ + "▁Hogg", + -14.421670913696287 + ], + [ + "▁CROSS", + -14.421706199645996 + ], + [ + "▁intersecting", + -14.421710014343262 + ], + [ + "▁babes", + -14.42172622680664 + ], + [ + "secretary", + -14.421743392944336 + ], + [ + "Enrich", + -14.421791076660156 + ], + [ + "▁Luci", + -14.421833038330078 + ], + [ + "▁Acme", + -14.421918869018556 + ], + [ + "▁Lbs", + -14.42192840576172 + ], + [ + "▁Vim", + -14.421941757202148 + ], + [ + "▁hereafter", + -14.421958923339844 + ], + [ + "GIC", + -14.421984672546388 + ], + [ + "▁pastimes", + -14.422017097473145 + ], + [ + "Kindly", + -14.42203140258789 + ], + [ + "▁Zem", + -14.422036170959473 + ], + [ + "Rounding", + -14.422059059143066 + ], + [ + "Somebody", + -14.422061920166016 + ], + [ + "▁Specialization", + -14.42206573486328 + ], + [ + "▁vihar", + -14.422073364257812 + ], + [ + "candy", + -14.422168731689451 + ], + [ + "▁Coroner", + -14.422173500061035 + ], + [ + "▁Gopal", + -14.422197341918944 + ], + [ + "Frost", + -14.422200202941896 + ], + [ + "▁clings", + -14.42229175567627 + ], + [ + "threaded", + -14.422335624694824 + ], + [ + "nero", + -14.422356605529783 + ], + [ + "uelle", + -14.422411918640137 + ], + [ + "▁astonishingly", + -14.422452926635742 + ], + [ + "972", + -14.422454833984377 + ], + [ + "▁handguns", + -14.422480583190918 + ], + [ + "▁soundscape", + -14.42250633239746 + ], + [ + "▁rationalize", + -14.422526359558104 + ], + [ + "▁constructively", + -14.422629356384276 + ], + [ + "GAM", + -14.422630310058594 + ], + [ + "▁hovered", + -14.422666549682615 + ], + [ + "▁CGM", + -14.42271614074707 + ], + [ + "Fees", + -14.422736167907717 + ], + [ + "▁UHF", + -14.422770500183104 + ], + [ + "▁Tierra", + -14.422829627990724 + ], + [ + "▁Corte", + -14.422877311706545 + ], + [ + "AUG", + -14.42296028137207 + ], + [ + "▁NOC", + -14.422978401184082 + ], + [ + "Merge", + -14.42298984527588 + ], + [ + "SCAPE", + -14.422990798950195 + ], + [ + "▁Barrio", + -14.423019409179688 + ], + [ + "▁Pelt", + -14.423078536987305 + ], + [ + "▁materialistic", + -14.423112869262695 + ], + [ + "▁durch", + -14.423142433166504 + ], + [ + "▁stashed", + -14.423189163208008 + ], + [ + "▁adopter", + -14.423250198364258 + ], + [ + "KiB", + -14.423288345336914 + ], + [ + "scout", + -14.423324584960938 + ], + [ + "alen", + -14.423337936401367 + ], + [ + "▁$1,200", + -14.42337417602539 + ], + [ + "silicon", + -14.4234037399292 + ], + [ + "▁musicality", + -14.423516273498535 + ], + [ + "▁Mondo", + -14.423548698425291 + ], + [ + "▁£75", + -14.42373752593994 + ], + [ + "▁manly", + -14.42387580871582 + ], + [ + "▁playmaker", + -14.423892974853516 + ], + [ + "4.3%", + -14.42390251159668 + ], + [ + "giver", + -14.423912048339844 + ], + [ + "Focusing", + -14.423955917358398 + ], + [ + "▁ministering", + -14.424004554748535 + ], + [ + "0027", + -14.424031257629396 + ], + [ + "▁Globally", + -14.42403793334961 + ], + [ + "▁Stav", + -14.424047470092772 + ], + [ + "▁Pé", + -14.42404842376709 + ], + [ + "▁Generating", + -14.42408847808838 + ], + [ + "Marg", + -14.424141883850098 + ], + [ + "▁autumnal", + -14.42418098449707 + ], + [ + "Audi", + -14.424267768859863 + ], + [ + "▁Blackbird", + -14.42441177368164 + ], + [ + "▁sportsmen", + -14.42448616027832 + ], + [ + "▁Prometheus", + -14.424511909484863 + ], + [ + "▁PSE", + -14.42451286315918 + ], + [ + "▁abuser", + -14.424534797668455 + ], + [ + "▁ledges", + -14.424559593200684 + ], + [ + "▁MOVIE", + -14.424565315246582 + ], + [ + "kovich", + -14.424630165100098 + ], + [ + "1968", + -14.424644470214844 + ], + [ + "toire", + -14.42467212677002 + ], + [ + "▁Veil", + -14.424759864807127 + ], + [ + "IBA", + -14.42478370666504 + ], + [ + "isp", + -14.42495346069336 + ], + [ + "▁confuses", + -14.42496395111084 + ], + [ + "▁secreted", + -14.424968719482422 + ], + [ + "Lamb", + -14.425046920776367 + ], + [ + "kopf", + -14.425048828125 + ], + [ + "▁baba", + -14.425090789794922 + ], + [ + "Recall", + -14.425127983093262 + ], + [ + "▁RTG", + -14.425155639648438 + ], + [ + "▁finalise", + -14.425203323364258 + ], + [ + "▁Trev", + -14.425212860107422 + ], + [ + "FOL", + -14.425215721130373 + ], + [ + "▁Sprayer", + -14.425227165222168 + ], + [ + "▁suiting", + -14.42524528503418 + ], + [ + "▁yogi", + -14.42524528503418 + ], + [ + "▁commercialize", + -14.425246238708496 + ], + [ + "dama", + -14.425399780273438 + ], + [ + "▁200+", + -14.425517082214355 + ], + [ + "▁mich", + -14.425556182861328 + ], + [ + "musical", + -14.425615310668944 + ], + [ + "▁Manta", + -14.425647735595703 + ], + [ + "▁ATR", + -14.425651550292969 + ], + [ + "▁Lop", + -14.425721168518066 + ], + [ + "▁proclaims", + -14.425774574279783 + ], + [ + "▁caveats", + -14.42579460144043 + ], + [ + "▁blogpost", + -14.425901412963867 + ], + [ + "teria", + -14.426077842712402 + ], + [ + "▁berths", + -14.426107406616213 + ], + [ + "▁Erick", + -14.426115036010742 + ], + [ + "73)", + -14.426145553588867 + ], + [ + "▁clutching", + -14.42616844177246 + ], + [ + "migration", + -14.426183700561523 + ], + [ + "SOS", + -14.4262113571167 + ], + [ + "1944", + -14.426240921020508 + ], + [ + "764", + -14.42630100250244 + ], + [ + "▁Cartel", + -14.426342010498049 + ], + [ + "▁freshener", + -14.42636013031006 + ], + [ + "▁CDA", + -14.426395416259766 + ], + [ + "DSS", + -14.426438331604004 + ], + [ + "Distribute", + -14.42644500732422 + ], + [ + "infecting", + -14.426544189453123 + ], + [ + "▁Monogram", + -14.426565170288086 + ], + [ + "Acc", + -14.426572799682615 + ], + [ + "▁despatch", + -14.426600456237791 + ], + [ + "clockwise", + -14.426616668701172 + ], + [ + "rache", + -14.426663398742676 + ], + [ + "▁arming", + -14.426665306091309 + ], + [ + "▁Helper", + -14.426668167114258 + ], + [ + "▁seu", + -14.42672061920166 + ], + [ + "alkyl", + -14.42672348022461 + ], + [ + "IEF", + -14.426770210266112 + ], + [ + "тро", + -14.42677402496338 + ], + [ + "veda", + -14.426782608032228 + ], + [ + "frac", + -14.42682933807373 + ], + [ + "lef", + -14.426944732666016 + ], + [ + "Simultaneous", + -14.426956176757812 + ], + [ + "zimbabwe", + -14.426960945129396 + ], + [ + "▁Endeavour", + -14.426961898803713 + ], + [ + "▁McDonough", + -14.426961898803713 + ], + [ + "▁Pritchard", + -14.426961898803713 + ], + [ + "▁Sphinx", + -14.426961898803713 + ], + [ + "▁Vadodara", + -14.426961898803713 + ], + [ + "▁Valparaiso", + -14.426961898803713 + ], + [ + "▁Vashikaran", + -14.426961898803713 + ], + [ + "▁cognizant", + -14.426961898803713 + ], + [ + "▁crescendo", + -14.426961898803713 + ], + [ + "▁erstwhile", + -14.426961898803713 + ], + [ + "▁unharmed", + -14.426961898803713 + ], + [ + "▁unselfish", + -14.426961898803713 + ], + [ + "▁viscose", + -14.426961898803713 + ], + [ + "▁Enrichment", + -14.426962852478027 + ], + [ + "▁Hemsworth", + -14.426962852478027 + ], + [ + "▁halibut", + -14.426962852478027 + ], + [ + "▁lenient", + -14.426962852478027 + ], + [ + "▁stiletto", + -14.42696475982666 + ], + [ + "▁Moffat", + -14.426965713500977 + ], + [ + "▁Hempstead", + -14.42697048187256 + ], + [ + "▁brooklyn", + -14.426972389221191 + ], + [ + "▁Dreamweaver", + -14.426976203918455 + ], + [ + "▁gusset", + -14.426977157592772 + ], + [ + "▁girth", + -14.426986694335938 + ], + [ + "▁Basalt", + -14.426989555358888 + ], + [ + "▁(1983)", + -14.426996231079102 + ], + [ + "▁coronation", + -14.426997184753418 + ], + [ + "▁ovary", + -14.427000045776367 + ], + [ + "▁wistful", + -14.427002906799316 + ], + [ + "▁Brandenburg", + -14.427008628845217 + ], + [ + "4,500", + -14.427013397216797 + ], + [ + "▁Strickland", + -14.427021980285645 + ], + [ + "Petro", + -14.427029609680176 + ], + [ + "▁passersby", + -14.427041053771973 + ], + [ + "▁1/6", + -14.427081108093262 + ], + [ + "▁poppies", + -14.427083015441896 + ], + [ + "▁Dagger", + -14.42711067199707 + ], + [ + "▁Bronson", + -14.427116394042969 + ], + [ + "bock", + -14.427160263061523 + ], + [ + "▁CMM", + -14.42716121673584 + ], + [ + "▁Nellie", + -14.427164077758787 + ], + [ + "▁flagging", + -14.42717170715332 + ], + [ + "▁Clemente", + -14.42719268798828 + ], + [ + "▁choline", + -14.42719268798828 + ], + [ + "Descend", + -14.427227973937988 + ], + [ + "▁Davy", + -14.427236557006836 + ], + [ + "▁Aniston", + -14.427247047424316 + ], + [ + "▁appendage", + -14.42731475830078 + ], + [ + "▁HDPE", + -14.427343368530272 + ], + [ + "hta", + -14.427359580993652 + ], + [ + "▁bushing", + -14.427372932434082 + ], + [ + "▁Machining", + -14.427382469177246 + ], + [ + "▁brie", + -14.427433967590332 + ], + [ + "▁Ryanair", + -14.427461624145508 + ], + [ + "▁Personalised", + -14.427483558654783 + ], + [ + "▁Penney", + -14.427513122558594 + ], + [ + "▁Removed", + -14.427515029907228 + ], + [ + "▁ductless", + -14.427515983581545 + ], + [ + "▁OSS", + -14.427526473999023 + ], + [ + "10:00", + -14.42759609222412 + ], + [ + "▁(179", + -14.427613258361816 + ], + [ + "▁legroom", + -14.427618026733398 + ], + [ + "▁Zie", + -14.42764377593994 + ], + [ + "▁skyrocketing", + -14.42764377593994 + ], + [ + "▁llc", + -14.42766571044922 + ], + [ + "▁EBay", + -14.427668571472168 + ], + [ + "▁Elisha", + -14.42769718170166 + ], + [ + "CARD", + -14.427698135375977 + ], + [ + "▁opinionated", + -14.427700996398926 + ], + [ + "▁dabbled", + -14.427706718444824 + ], + [ + "Toys", + -14.427777290344238 + ], + [ + "▁suppressant", + -14.427777290344238 + ], + [ + "▁curtailed", + -14.42782497406006 + ], + [ + "moth", + -14.42782974243164 + ], + [ + "▁cupola", + -14.427833557128906 + ], + [ + "▁Placid", + -14.427834510803224 + ], + [ + "▁POOL", + -14.4278564453125 + ], + [ + "▁persian", + -14.427895545959473 + ], + [ + "grandmother", + -14.4279146194458 + ], + [ + "Warrior", + -14.427927017211914 + ], + [ + "Album", + -14.427940368652344 + ], + [ + "argument", + -14.427940368652344 + ], + [ + "Criminal", + -14.427947998046877 + ], + [ + "soccer", + -14.42795467376709 + ], + [ + "meets", + -14.42795753479004 + ], + [ + "walmart", + -14.427958488464355 + ], + [ + "constant", + -14.427960395812988 + ], + [ + "Taiwan", + -14.427967071533203 + ], + [ + "▁Atrium", + -14.427971839904783 + ], + [ + "Medicine", + -14.42798137664795 + ], + [ + "desktop", + -14.428006172180176 + ], + [ + "Christina", + -14.428007125854492 + ], + [ + "▁reputational", + -14.428009033203123 + ], + [ + "▁Weinberg", + -14.428013801574709 + ], + [ + "Kirk", + -14.428014755249023 + ], + [ + "LIST", + -14.428023338317873 + ], + [ + "supplier", + -14.42803192138672 + ], + [ + "▁Flax", + -14.428062438964844 + ], + [ + "kang", + -14.428065299987791 + ], + [ + "▁Crushed", + -14.428089141845703 + ], + [ + "ilio", + -14.42809009552002 + ], + [ + "virgin", + -14.4281005859375 + ], + [ + "Equally", + -14.428107261657717 + ], + [ + "Junk", + -14.428182601928713 + ], + [ + "▁desist", + -14.428193092346191 + ], + [ + "▁Jewell", + -14.428199768066406 + ], + [ + "assignment", + -14.428213119506836 + ], + [ + "▁Bock", + -14.428223609924316 + ], + [ + "▁Markup", + -14.428224563598633 + ], + [ + "▁Crosse", + -14.428245544433594 + ], + [ + "rvi", + -14.428258895874023 + ], + [ + "▁chanted", + -14.428292274475098 + ], + [ + "▁RTL", + -14.428339958190918 + ], + [ + "▁Saban", + -14.428362846374512 + ], + [ + "▁electronica", + -14.42837142944336 + ], + [ + "▁Marlon", + -14.428426742553713 + ], + [ + "▁virtualized", + -14.42842960357666 + ], + [ + "▁Kuz", + -14.42844009399414 + ], + [ + "epo", + -14.428478240966797 + ], + [ + "▁Gara", + -14.428479194641112 + ], + [ + "denominated", + -14.428482055664062 + ], + [ + "▁overlaid", + -14.428542137145996 + ], + [ + "▁Bluffs", + -14.428594589233398 + ], + [ + "knife", + -14.428692817687988 + ], + [ + "▁ticketed", + -14.428729057312012 + ], + [ + "ference", + -14.428770065307615 + ], + [ + "originally", + -14.428772926330566 + ], + [ + "▁Zing", + -14.428773880004885 + ], + [ + "▁1994)", + -14.428792953491213 + ], + [ + "curve", + -14.428808212280272 + ], + [ + "▁Seiko", + -14.428828239440918 + ], + [ + "▁intrude", + -14.428865432739258 + ], + [ + "▁inverters", + -14.428910255432127 + ], + [ + "▁SDI", + -14.42908000946045 + ], + [ + "▁Mutt", + -14.429106712341309 + ], + [ + "▁YoY", + -14.429106712341309 + ], + [ + "togethers", + -14.42912483215332 + ], + [ + "▁Dory", + -14.42913055419922 + ], + [ + "▁mingled", + -14.42914867401123 + ], + [ + "▁Yoo", + -14.429160118103027 + ], + [ + "▁memorialize", + -14.429281234741213 + ], + [ + "Leaf", + -14.429296493530272 + ], + [ + "udah", + -14.429298400878906 + ], + [ + "▁urinate", + -14.42930030822754 + ], + [ + "arika", + -14.429546356201172 + ], + [ + "snake", + -14.429561614990234 + ], + [ + "▁thro", + -14.429569244384766 + ], + [ + "▁timesheet", + -14.42958927154541 + ], + [ + "lichen", + -14.429633140563965 + ], + [ + "▁Sneak", + -14.429657936096191 + ], + [ + "▁$57", + -14.429710388183594 + ], + [ + "▁pew", + -14.42972183227539 + ], + [ + "▁Loser", + -14.429865837097168 + ], + [ + "cabin", + -14.429889678955078 + ], + [ + "ciency", + -14.429896354675291 + ], + [ + "WORD", + -14.429986953735352 + ], + [ + "antara", + -14.43001937866211 + ], + [ + "odont", + -14.43003749847412 + ], + [ + "▁clichés", + -14.430088996887209 + ], + [ + "▁HealthCare", + -14.430093765258787 + ], + [ + "produ", + -14.430130004882812 + ], + [ + "▁Boz", + -14.430145263671877 + ], + [ + "▁nibbles", + -14.43021297454834 + ], + [ + "▁repro", + -14.43021297454834 + ], + [ + "AWA", + -14.430254936218262 + ], + [ + "▁astral", + -14.430325508117676 + ], + [ + "▁SMBs", + -14.430368423461914 + ], + [ + "▁mountainside", + -14.43039608001709 + ], + [ + "▁stabilise", + -14.43049144744873 + ], + [ + "▁McIlroy", + -14.430511474609377 + ], + [ + "berlin", + -14.430559158325195 + ], + [ + "Muslims", + -14.430603981018066 + ], + [ + "kken", + -14.43061351776123 + ], + [ + "▁italics", + -14.430636405944824 + ], + [ + "cked", + -14.43068027496338 + ], + [ + "▁Dib", + -14.430692672729492 + ], + [ + "▁Killed", + -14.430715560913086 + ], + [ + "▁primetime", + -14.430724143981934 + ], + [ + "leri", + -14.430768966674805 + ], + [ + "▁slimy", + -14.430794715881348 + ], + [ + "▁pituitary", + -14.430825233459473 + ], + [ + "▁Milner", + -14.430907249450684 + ], + [ + "▁nameless", + -14.430919647216797 + ], + [ + "▁mace", + -14.43093967437744 + ], + [ + "programs", + -14.430967330932615 + ], + [ + "NUT", + -14.430974006652832 + ], + [ + "▁Demolition", + -14.430984497070312 + ], + [ + "ierung", + -14.43101692199707 + ], + [ + "FIC", + -14.431036949157717 + ], + [ + "▁30-45", + -14.431123733520508 + ], + [ + "▁Houl", + -14.43113136291504 + ], + [ + "▁hurl", + -14.43120288848877 + ], + [ + "▁5500", + -14.431206703186035 + ], + [ + "Superb", + -14.431246757507324 + ], + [ + "ickle", + -14.431265830993652 + ], + [ + "Philosopher", + -14.431411743164062 + ], + [ + "europa", + -14.431459426879885 + ], + [ + "▁Chubb", + -14.431468963623049 + ], + [ + "CRO", + -14.431473731994627 + ], + [ + "▁suc", + -14.43153190612793 + ], + [ + "▁EBS", + -14.43153953552246 + ], + [ + "Promo", + -14.431550979614258 + ], + [ + "▁EVE", + -14.431594848632812 + ], + [ + "graders", + -14.43164348602295 + ], + [ + "▁Assumption", + -14.431775093078612 + ], + [ + "▁Ayers", + -14.431817054748535 + ], + [ + "▁Dua", + -14.43184757232666 + ], + [ + "▁Sebasti", + -14.431859970092772 + ], + [ + "Gifts", + -14.43187427520752 + ], + [ + "▁recollect", + -14.431897163391112 + ], + [ + "▁SCAR", + -14.431914329528809 + ], + [ + "ower", + -14.431918144226074 + ], + [ + "EIC", + -14.43193531036377 + ], + [ + "aggy", + -14.43199348449707 + ], + [ + "9-11", + -14.432022094726562 + ], + [ + "▁Perf", + -14.432042121887209 + ], + [ + "ttles", + -14.432371139526367 + ], + [ + "EEM", + -14.432435989379885 + ], + [ + "▁22.5", + -14.432459831237791 + ], + [ + "▁Kinda", + -14.4325590133667 + ], + [ + "▁corrode", + -14.432690620422363 + ], + [ + "▁PFA", + -14.432862281799316 + ], + [ + "rner", + -14.433021545410156 + ], + [ + "▁citywide", + -14.433119773864746 + ], + [ + "spinal", + -14.433134078979492 + ], + [ + "▁Patrons", + -14.433145523071287 + ], + [ + "▁Manner", + -14.433150291442873 + ], + [ + "▁Elan", + -14.43315887451172 + ], + [ + "IRO", + -14.433201789855955 + ], + [ + "cosm", + -14.433218002319336 + ], + [ + "bild", + -14.43323802947998 + ], + [ + "Asparagus", + -14.433290481567385 + ], + [ + "SATURDAY", + -14.433290481567385 + ], + [ + "▁Bikram", + -14.433290481567385 + ], + [ + "▁Hibiscus", + -14.433290481567385 + ], + [ + "▁Recessed", + -14.433290481567385 + ], + [ + "▁Sandoval", + -14.433290481567385 + ], + [ + "▁Yiddish", + -14.433290481567385 + ], + [ + "▁contingencies", + -14.433290481567385 + ], + [ + "▁finpecia", + -14.433290481567385 + ], + [ + "▁heterogeneity", + -14.433290481567385 + ], + [ + "▁intensifies", + -14.433290481567385 + ], + [ + "▁kaleidoscope", + -14.433290481567385 + ], + [ + "▁metallurgical", + -14.433290481567385 + ], + [ + "▁nonsensical", + -14.433290481567385 + ], + [ + "▁pancreatitis", + -14.433290481567385 + ], + [ + "▁tretinoin", + -14.433290481567385 + ], + [ + "▁whistling", + -14.433290481567385 + ], + [ + "Optimizing", + -14.4332914352417 + ], + [ + "▁cadmium", + -14.4332914352417 + ], + [ + "▁cosmology", + -14.4332914352417 + ], + [ + "▁poncho", + -14.4332914352417 + ], + [ + "▁Boracay", + -14.433292388916016 + ], + [ + "▁intolerable", + -14.433292388916016 + ], + [ + "▁atrocious", + -14.433293342590332 + ], + [ + "▁stratosphere", + -14.433293342590332 + ], + [ + "▁Ragnarok", + -14.43329620361328 + ], + [ + "▁anthropological", + -14.433297157287598 + ], + [ + "▁hyperbole", + -14.433297157287598 + ], + [ + "▁marigold", + -14.433297157287598 + ], + [ + "▁menthol", + -14.43329906463623 + ], + [ + "▁Empathy", + -14.433305740356444 + ], + [ + "htaccess", + -14.433307647705078 + ], + [ + "▁furlough", + -14.433307647705078 + ], + [ + "▁Reginald", + -14.433309555053713 + ], + [ + "▁Cydia", + -14.433329582214355 + ], + [ + "▁bootloader", + -14.433332443237305 + ], + [ + "▁Makati", + -14.433347702026367 + ], + [ + "▁Instrumentation", + -14.433377265930176 + ], + [ + "▁garlands", + -14.433391571044922 + ], + [ + "▁nitrous", + -14.433405876159668 + ], + [ + "▁Eucharistic", + -14.4334135055542 + ], + [ + "▁1-1/2", + -14.433419227600098 + ], + [ + "▁Creamery", + -14.433453559875488 + ], + [ + "▁gingham", + -14.43346118927002 + ], + [ + "▁vanguard", + -14.433465003967283 + ], + [ + "aime", + -14.433469772338867 + ], + [ + "▁Dunham", + -14.433475494384766 + ], + [ + "▁LCA", + -14.433480262756348 + ], + [ + "▁Terence", + -14.433500289916992 + ], + [ + "▁irritants", + -14.433515548706056 + ], + [ + "▁Parton", + -14.43351936340332 + ], + [ + "helping", + -14.433524131774902 + ], + [ + "▁wraparound", + -14.43352508544922 + ], + [ + "▁Tanaka", + -14.433526992797852 + ], + [ + "969", + -14.433528900146484 + ], + [ + "▁complicity", + -14.43357276916504 + ], + [ + "▁Kovac", + -14.43358325958252 + ], + [ + "▁Lviv", + -14.43359088897705 + ], + [ + "▁Purity", + -14.433595657348633 + ], + [ + "Competing", + -14.433686256408691 + ], + [ + "▁Falkland", + -14.433696746826172 + ], + [ + "▁Binder", + -14.433697700500488 + ], + [ + "Mint", + -14.433701515197754 + ], + [ + "▁Countless", + -14.433706283569336 + ], + [ + "▁Mascot", + -14.433719635009766 + ], + [ + "▁succinctly", + -14.433745384216309 + ], + [ + "▁disengaged", + -14.433794975280762 + ], + [ + "▁sco", + -14.433844566345217 + ], + [ + "contest", + -14.43386173248291 + ], + [ + "rigue", + -14.433876991271973 + ], + [ + "▁Crooked", + -14.43394947052002 + ], + [ + "▁Frac", + -14.433956146240234 + ], + [ + "▁aroused", + -14.434025764465332 + ], + [ + "▁Rouse", + -14.43403148651123 + ], + [ + "▁sprain", + -14.434040069580078 + ], + [ + "▁Aventura", + -14.434078216552734 + ], + [ + "Noise", + -14.434101104736328 + ], + [ + "Mech", + -14.434184074401855 + ], + [ + "uder", + -14.434196472167969 + ], + [ + "▁triathlete", + -14.434207916259766 + ], + [ + "ococcus", + -14.434231758117676 + ], + [ + "anchor", + -14.43425750732422 + ], + [ + "▁robber", + -14.43426513671875 + ], + [ + "▁reelection", + -14.434279441833496 + ], + [ + "▁earnestly", + -14.434293746948242 + ], + [ + "configuration", + -14.43429470062256 + ], + [ + "▁Curling", + -14.434296607971191 + ], + [ + "▁GPM", + -14.434319496154783 + ], + [ + "Paw", + -14.434356689453123 + ], + [ + "AEP", + -14.434410095214844 + ], + [ + "Soccer", + -14.43442726135254 + ], + [ + "campaign", + -14.434429168701172 + ], + [ + "Cindy", + -14.43443489074707 + ], + [ + "Predict", + -14.43443489074707 + ], + [ + "▁juror", + -14.434439659118652 + ], + [ + "absorbing", + -14.434442520141602 + ], + [ + "Harvard", + -14.434449195861816 + ], + [ + "dangerous", + -14.434450149536133 + ], + [ + "involved", + -14.434452056884766 + ], + [ + "leadership", + -14.434494972229004 + ], + [ + "ropri", + -14.434514045715332 + ], + [ + "▁persevered", + -14.434539794921877 + ], + [ + "▁MEC", + -14.434544563293455 + ], + [ + "▁wimp", + -14.434568405151367 + ], + [ + "▁Anima", + -14.43460750579834 + ], + [ + "▁Rode", + -14.434633255004885 + ], + [ + "pita", + -14.434638023376465 + ], + [ + "▁Correa", + -14.434640884399414 + ], + [ + "▁Chorale", + -14.434656143188477 + ], + [ + "▁Suri", + -14.434662818908691 + ], + [ + "▁Disturb", + -14.434732437133787 + ], + [ + "MOVE", + -14.434747695922852 + ], + [ + "▁reissued", + -14.43476676940918 + ], + [ + "hazard", + -14.434770584106444 + ], + [ + "cert", + -14.434781074523926 + ], + [ + "95,000", + -14.43480110168457 + ], + [ + "Filmmaker", + -14.434826850891112 + ], + [ + "▁Seasoning", + -14.434861183166504 + ], + [ + "investor", + -14.434892654418944 + ], + [ + "▁tamp", + -14.434906005859377 + ], + [ + "▁Lively", + -14.434942245483398 + ], + [ + "hike", + -14.435003280639648 + ], + [ + "Citation", + -14.435009956359863 + ], + [ + "Wanna", + -14.435098648071287 + ], + [ + "▁Pinging", + -14.435124397277832 + ], + [ + "▁Fogg", + -14.435131072998049 + ], + [ + "▁Sind", + -14.43517780303955 + ], + [ + "▁$299", + -14.435184478759766 + ], + [ + "Gel", + -14.435194969177246 + ], + [ + "▁vials", + -14.435283660888672 + ], + [ + "Aware", + -14.435300827026367 + ], + [ + "▁Probation", + -14.435300827026367 + ], + [ + "umer", + -14.43544864654541 + ], + [ + "Slim", + -14.43545913696289 + ], + [ + "BAL", + -14.435510635375977 + ], + [ + "CMC", + -14.435552597045898 + ], + [ + "Hebrews", + -14.435564041137695 + ], + [ + "▁Oberlin", + -14.435572624206545 + ], + [ + "▁Sleigh", + -14.435592651367188 + ], + [ + "▁Dio", + -14.435623168945312 + ], + [ + "tunnel", + -14.435669898986816 + ], + [ + "drinking", + -14.435674667358398 + ], + [ + "▁Jacque", + -14.435696601867676 + ], + [ + "▁Scandal", + -14.43575382232666 + ], + [ + "▁disallow", + -14.435826301574709 + ], + [ + "▁Sine", + -14.435927391052246 + ], + [ + "▁shearing", + -14.435991287231444 + ], + [ + "roche", + -14.436017036437988 + ], + [ + "weave", + -14.43602180480957 + ], + [ + "▁leek", + -14.436053276062012 + ], + [ + "unge", + -14.43616771697998 + ], + [ + "▁warships", + -14.436245918273926 + ], + [ + "▁Keenan", + -14.436285972595217 + ], + [ + "urate", + -14.436392784118652 + ], + [ + "RSP", + -14.43652629852295 + ], + [ + "▁sulfide", + -14.436553001403809 + ], + [ + "▁HEAT", + -14.436564445495604 + ], + [ + "▁geno", + -14.436585426330566 + ], + [ + "▁Shorter", + -14.436670303344728 + ], + [ + "$500", + -14.436837196350098 + ], + [ + "érie", + -14.43694305419922 + ], + [ + "▁bugger", + -14.436949729919434 + ], + [ + "▁Gifted", + -14.436983108520508 + ], + [ + "▁navel", + -14.437092781066896 + ], + [ + "hava", + -14.437180519104004 + ], + [ + "▁bestsellers", + -14.43735694885254 + ], + [ + "Represent", + -14.437382698059082 + ], + [ + "▁4-4", + -14.437406539916992 + ], + [ + "11-13", + -14.437424659729004 + ], + [ + "▁(2006", + -14.43742561340332 + ], + [ + "▁Huh", + -14.437442779541016 + ], + [ + "▁$3.00", + -14.43748378753662 + ], + [ + "Joel", + -14.437492370605469 + ], + [ + "▁beekeepers", + -14.437494277954102 + ], + [ + "▁Epilepsy", + -14.437498092651367 + ], + [ + "piring", + -14.43750286102295 + ], + [ + "▁hostages", + -14.437507629394531 + ], + [ + "▁Steen", + -14.43769645690918 + ], + [ + "▁truckers", + -14.437772750854492 + ], + [ + "JAY", + -14.437785148620604 + ], + [ + "▁scrim", + -14.4378662109375 + ], + [ + "▁OPT", + -14.437928199768066 + ], + [ + "▁Frida", + -14.438000679016112 + ], + [ + "▁shined", + -14.438115119934082 + ], + [ + "GBP", + -14.438206672668455 + ], + [ + "reaction", + -14.43825340270996 + ], + [ + "▁centimetres", + -14.438255310058594 + ], + [ + "▁Tianjin", + -14.438302040100098 + ], + [ + "crush", + -14.438343048095703 + ], + [ + "Maya", + -14.43844985961914 + ], + [ + "▁morphs", + -14.43845272064209 + ], + [ + "Explorer", + -14.438530921936035 + ], + [ + "▁Sef", + -14.438551902770996 + ], + [ + "ROV", + -14.438737869262695 + ], + [ + "jum", + -14.438759803771973 + ], + [ + "iffer", + -14.438884735107422 + ], + [ + "▁Armada", + -14.439093589782717 + ], + [ + "▁afflict", + -14.439098358154297 + ], + [ + "glut", + -14.439105987548828 + ], + [ + "▁yumm", + -14.43914031982422 + ], + [ + "stud", + -14.43919849395752 + ], + [ + "noc", + -14.439229011535645 + ], + [ + "▁Ample", + -14.439242362976074 + ], + [ + "▁Parallels", + -14.439285278320312 + ], + [ + "steer", + -14.43929958343506 + ], + [ + "SaaS", + -14.439404487609863 + ], + [ + "Nico", + -14.439471244812012 + ], + [ + "▁exon", + -14.43948745727539 + ], + [ + "▁SMD", + -14.439509391784668 + ], + [ + "dab", + -14.439616203308104 + ], + [ + "onzo", + -14.439652442932127 + ], + [ + "Cholesterol", + -14.43966007232666 + ], + [ + "Inflammation", + -14.43966007232666 + ], + [ + "Influenza", + -14.43966007232666 + ], + [ + "Marguerite", + -14.43966007232666 + ], + [ + "SHIELD", + -14.43966007232666 + ], + [ + "unpredictability", + -14.43966007232666 + ], + [ + "▁BELIEVE", + -14.43966007232666 + ], + [ + "▁Pedestal", + -14.43966007232666 + ], + [ + "▁Salvatore", + -14.43966007232666 + ], + [ + "▁abstinence", + -14.43966007232666 + ], + [ + "▁astigmatism", + -14.43966007232666 + ], + [ + "▁kerosene", + -14.43966007232666 + ], + [ + "▁nematode", + -14.43966007232666 + ], + [ + "▁brevity", + -14.439661026000977 + ], + [ + "▁rucksack", + -14.439661026000977 + ], + [ + "▁Palladium", + -14.439661979675291 + ], + [ + "▁peppercorns", + -14.439661979675291 + ], + [ + "Exploit", + -14.439663887023926 + ], + [ + "▁acacia", + -14.43966579437256 + ], + [ + "▁immunology", + -14.43966579437256 + ], + [ + "▁juried", + -14.43966579437256 + ], + [ + "▁interdependence", + -14.439668655395508 + ], + [ + "▁Boolean", + -14.439671516418455 + ], + [ + "expand", + -14.439674377441406 + ], + [ + "▁Petaluma", + -14.43967628479004 + ], + [ + "▁grotto", + -14.439678192138672 + ], + [ + "▁CANNOT", + -14.439680099487305 + ], + [ + "imir", + -14.439682960510254 + ], + [ + "▁hallowed", + -14.439692497253418 + ], + [ + "▁£100,000", + -14.439701080322266 + ], + [ + "▁sporadically", + -14.439711570739746 + ], + [ + "▁Georgina", + -14.439715385437012 + ], + [ + "▁Lynchburg", + -14.439719200134276 + ], + [ + "TERM", + -14.439740180969238 + ], + [ + "amate", + -14.439741134643556 + ], + [ + "▁atelier", + -14.439764976501465 + ], + [ + "▁Flagler", + -14.43980312347412 + ], + [ + "▁Heim", + -14.43980598449707 + ], + [ + "▁contort", + -14.439809799194336 + ], + [ + "▁Anesthesia", + -14.439812660217283 + ], + [ + "▁2013-2014", + -14.4398193359375 + ], + [ + "▁Radford", + -14.439823150634766 + ], + [ + "▁launchers", + -14.43985652923584 + ], + [ + "▁propped", + -14.439871788024902 + ], + [ + "▁Slight", + -14.439876556396484 + ], + [ + "Satisfy", + -14.439879417419434 + ], + [ + "▁Fid", + -14.439885139465332 + ], + [ + "▁Drought", + -14.439908027648926 + ], + [ + "▁Pryor", + -14.439921379089355 + ], + [ + "▁botnet", + -14.439921379089355 + ], + [ + "▁decadence", + -14.43992805480957 + ], + [ + "▁Jah", + -14.43995475769043 + ], + [ + "▁SQLite", + -14.439974784851074 + ], + [ + "▁pressuring", + -14.439988136291504 + ], + [ + "▁stepfather", + -14.440022468566896 + ], + [ + "▁Complexity", + -14.440035820007324 + ], + [ + "▁Quakers", + -14.440075874328612 + ], + [ + "▁royale", + -14.440085411071776 + ], + [ + "▁Westpac", + -14.440115928649902 + ], + [ + "▁pathologists", + -14.440155982971191 + ], + [ + "▁Sema", + -14.44022274017334 + ], + [ + "▁Spinner", + -14.440227508544922 + ], + [ + "▁lvl", + -14.44023609161377 + ], + [ + "rello", + -14.440240859985352 + ], + [ + "▁flaking", + -14.440305709838867 + ], + [ + "▁CHICAGO", + -14.440352439880373 + ], + [ + "afel", + -14.440367698669434 + ], + [ + "▁Thyroid", + -14.4403715133667 + ], + [ + "Vit", + -14.440375328063965 + ], + [ + "▁Emblem", + -14.440417289733888 + ], + [ + "▁assimilated", + -14.440458297729492 + ], + [ + "▁nab", + -14.440567016601562 + ], + [ + "▁79%", + -14.44057273864746 + ], + [ + "ocha", + -14.440595626831056 + ], + [ + "▁materiality", + -14.440598487854004 + ], + [ + "1:30", + -14.440608978271484 + ], + [ + "▁acrobat", + -14.440621376037598 + ], + [ + "USC", + -14.440644264221191 + ], + [ + "trotting", + -14.440654754638672 + ], + [ + "▁Encouraging", + -14.44067096710205 + ], + [ + "▁Himalaya", + -14.440673828125 + ], + [ + "▁HRD", + -14.44068431854248 + ], + [ + "▁Voss", + -14.440712928771973 + ], + [ + "dominant", + -14.44073486328125 + ], + [ + "енн", + -14.440738677978516 + ], + [ + "activities", + -14.44075870513916 + ], + [ + "▁Capable", + -14.44075870513916 + ], + [ + "▁remediate", + -14.440789222717283 + ], + [ + "SPAN", + -14.440814018249512 + ], + [ + "restore", + -14.440820693969728 + ], + [ + "▁Gilles", + -14.440873146057127 + ], + [ + "utton", + -14.440874099731444 + ], + [ + "Worked", + -14.440885543823242 + ], + [ + "microbial", + -14.440889358520508 + ], + [ + "▁Brushes", + -14.44093132019043 + ], + [ + "▁devouring", + -14.440947532653809 + ], + [ + "▁lentil", + -14.44095516204834 + ], + [ + "Appliance", + -14.440958976745604 + ], + [ + "Temporary", + -14.440961837768556 + ], + [ + "Juice", + -14.440964698791504 + ], + [ + "Aluminum", + -14.4409761428833 + ], + [ + "Chin", + -14.440978050231934 + ], + [ + "Alabama", + -14.4409818649292 + ], + [ + "▁Buren", + -14.440990447998049 + ], + [ + "Bluetooth", + -14.441006660461426 + ], + [ + "disease", + -14.441014289855955 + ], + [ + "STS", + -14.441057205200195 + ], + [ + "▁BREAK", + -14.441062927246094 + ], + [ + "Equity", + -14.44106674194336 + ], + [ + "Durable", + -14.441081047058104 + ], + [ + "ADHD", + -14.441082954406738 + ], + [ + "diabetes", + -14.441164016723633 + ], + [ + "NOC", + -14.44119930267334 + ], + [ + "▁HSP", + -14.4412260055542 + ], + [ + "flung", + -14.441231727600098 + ], + [ + "▁MATCH", + -14.44123363494873 + ], + [ + "TIM", + -14.44123649597168 + ], + [ + "▁Budgeting", + -14.441247940063477 + ], + [ + "SPECIAL", + -14.441261291503906 + ], + [ + "▁Woodside", + -14.441264152526855 + ], + [ + "▁Izzy", + -14.441271781921388 + ], + [ + "Blessed", + -14.44130802154541 + ], + [ + "▁ABN", + -14.441308975219728 + ], + [ + "▁Stead", + -14.441308975219728 + ], + [ + "energize", + -14.441359519958496 + ], + [ + "00:3", + -14.441386222839355 + ], + [ + "▁Pillars", + -14.44140338897705 + ], + [ + "toxicity", + -14.441414833068848 + ], + [ + "▁Wetlands", + -14.44143295288086 + ], + [ + "▁mumble", + -14.441445350646973 + ], + [ + "▁Arsene", + -14.441554069519045 + ], + [ + "▁Noosa", + -14.44156551361084 + ], + [ + "▁Hatter", + -14.441617012023926 + ], + [ + "mob", + -14.441624641418455 + ], + [ + "▁IGF", + -14.441640853881836 + ], + [ + "VEN", + -14.441658020019531 + ], + [ + "▁MAKES", + -14.441667556762695 + ], + [ + "▁14:1", + -14.441892623901367 + ], + [ + "▁Earle", + -14.441914558410645 + ], + [ + "idder", + -14.44191551208496 + ], + [ + "▁VIR", + -14.441926956176758 + ], + [ + "▁Waterside", + -14.441954612731934 + ], + [ + "▁inept", + -14.441970825195312 + ], + [ + "coupon", + -14.441974639892578 + ], + [ + "kwe", + -14.441998481750488 + ], + [ + "▁ARP", + -14.442002296447754 + ], + [ + "▁headpiece", + -14.442071914672852 + ], + [ + "pyr", + -14.442106246948242 + ], + [ + "Survivors", + -14.44212245941162 + ], + [ + "gartner", + -14.442126274108888 + ], + [ + "OLI", + -14.442197799682615 + ], + [ + "▁Odell", + -14.4422025680542 + ], + [ + "Bend", + -14.442235946655272 + ], + [ + "▁peu", + -14.442270278930664 + ], + [ + "▁Impressionist", + -14.442350387573242 + ], + [ + "TEA", + -14.442427635192873 + ], + [ + "▁envisioning", + -14.442441940307615 + ], + [ + "Brook", + -14.442442893981934 + ], + [ + "▁Barking", + -14.442561149597168 + ], + [ + "▁CYP", + -14.442587852478027 + ], + [ + "wooden", + -14.442753791809082 + ], + [ + "steroidal", + -14.442761421203612 + ], + [ + "▁Substitute", + -14.44276237487793 + ], + [ + "▁Roadside", + -14.442888259887695 + ], + [ + "▁BCA", + -14.442909240722656 + ], + [ + "▁Modest", + -14.44295883178711 + ], + [ + "794", + -14.44296646118164 + ], + [ + "rasa", + -14.443008422851562 + ], + [ + "▁PED", + -14.443013191223145 + ], + [ + "GENT", + -14.443098068237305 + ], + [ + "behaved", + -14.44310188293457 + ], + [ + "Swap", + -14.443115234375 + ], + [ + "▁Wigs", + -14.44313907623291 + ], + [ + "▁SSB", + -14.443166732788086 + ], + [ + "ilin", + -14.4431734085083 + ], + [ + "▁Efforts", + -14.443198204040527 + ], + [ + "Nail", + -14.44321060180664 + ], + [ + "▁Suz", + -14.443281173706056 + ], + [ + "▁CBE", + -14.443284034729004 + ], + [ + "(11):", + -14.443307876586914 + ], + [ + "wir", + -14.44334888458252 + ], + [ + "▁facile", + -14.443365097045898 + ], + [ + "constitutionally", + -14.443367958068848 + ], + [ + "hamma", + -14.443374633789062 + ], + [ + "plugin", + -14.443432807922363 + ], + [ + "▁cashflow", + -14.443523406982422 + ], + [ + "Oxley", + -14.443525314331056 + ], + [ + "▁NAPA", + -14.443531036376951 + ], + [ + "▁Shave", + -14.44355583190918 + ], + [ + "Transition", + -14.44357967376709 + ], + [ + "Stella", + -14.443617820739746 + ], + [ + "▁Newberry", + -14.443660736083984 + ], + [ + "▁fleshy", + -14.443771362304688 + ], + [ + "▁Adamson", + -14.44379425048828 + ], + [ + "▁Masses", + -14.443825721740724 + ], + [ + "▁7-1", + -14.443839073181152 + ], + [ + "hrew", + -14.44387149810791 + ], + [ + "breakers", + -14.44389533996582 + ], + [ + "▁madly", + -14.443923950195312 + ], + [ + "▁Nama", + -14.44393825531006 + ], + [ + "▁Manas", + -14.443973541259766 + ], + [ + "▁Amba", + -14.44400119781494 + ], + [ + "dare", + -14.444087028503418 + ], + [ + "▁Stride", + -14.444162368774414 + ], + [ + "▁webcams", + -14.444208145141602 + ], + [ + "▁kitsch", + -14.444375038146973 + ], + [ + "alga", + -14.444409370422363 + ], + [ + "▁caffeinated", + -14.444520950317385 + ], + [ + "▁Cac", + -14.444558143615724 + ], + [ + "prize", + -14.44456958770752 + ], + [ + "Kur", + -14.444637298583984 + ], + [ + "▁NEEDS", + -14.444676399230955 + ], + [ + "▁fanatical", + -14.444818496704102 + ], + [ + "▁Elves", + -14.44487476348877 + ], + [ + "▁refinements", + -14.444908142089844 + ], + [ + "rabbit", + -14.444911003112791 + ], + [ + "▁quarterfinal", + -14.444923400878906 + ], + [ + "▁pleat", + -14.4449462890625 + ], + [ + "FZ", + -14.444989204406738 + ], + [ + "▁Ecole", + -14.44501495361328 + ], + [ + "▁Colle", + -14.445115089416504 + ], + [ + "▁fiscally", + -14.44512176513672 + ], + [ + "▁preterm", + -14.445273399353027 + ], + [ + "▁saddled", + -14.445277214050291 + ], + [ + "tonal", + -14.445420265197754 + ], + [ + "▁disintegrate", + -14.445467948913574 + ], + [ + "rilla", + -14.44548511505127 + ], + [ + "▁dished", + -14.445488929748535 + ], + [ + "▁Tsa", + -14.445552825927734 + ], + [ + "ANTS", + -14.44558334350586 + ], + [ + "▁reconfigure", + -14.44558334350586 + ], + [ + "▁Homewood", + -14.445601463317873 + ], + [ + "Mak", + -14.44560432434082 + ], + [ + "weaver", + -14.445648193359377 + ], + [ + "▁thine", + -14.445731163024902 + ], + [ + "▁airframe", + -14.445770263671877 + ], + [ + "▁Trig", + -14.44577980041504 + ], + [ + "undy", + -14.445780754089355 + ], + [ + "▁multipliers", + -14.44589138031006 + ], + [ + "▁squint", + -14.445899963378906 + ], + [ + "3.9%", + -14.445964813232422 + ], + [ + "Lime", + -14.44603443145752 + ], + [ + "▁MacKenzie", + -14.446069717407228 + ], + [ + "▁Stellenbosch", + -14.446069717407228 + ], + [ + "▁bergamot", + -14.446069717407228 + ], + [ + "▁comprehensible", + -14.446069717407228 + ], + [ + "Adequate", + -14.446070671081545 + ], + [ + "▁Archipelago", + -14.446070671081545 + ], + [ + "▁DiCaprio", + -14.446070671081545 + ], + [ + "▁Malhotra", + -14.446070671081545 + ], + [ + "▁McNamara", + -14.446070671081545 + ], + [ + "▁Scrooge", + -14.446070671081545 + ], + [ + "▁Solihull", + -14.446070671081545 + ], + [ + "▁contaminating", + -14.446070671081545 + ], + [ + "▁derivation", + -14.446070671081545 + ], + [ + "▁inequities", + -14.446070671081545 + ], + [ + "▁numeracy", + -14.446070671081545 + ], + [ + "▁pantomime", + -14.446070671081545 + ], + [ + "▁primordial", + -14.446070671081545 + ], + [ + "▁prodigious", + -14.446070671081545 + ], + [ + "▁uncomfortably", + -14.446070671081545 + ], + [ + "▁unpretentious", + -14.446070671081545 + ], + [ + "▁velocities", + -14.446070671081545 + ], + [ + "▁pricier", + -14.446072578430176 + ], + [ + "▁ELECTRIC", + -14.446073532104492 + ], + [ + "▁eroding", + -14.446073532104492 + ], + [ + "▁bankruptcies", + -14.446074485778809 + ], + [ + "▁Epcot", + -14.44607925415039 + ], + [ + "fontein", + -14.446080207824709 + ], + [ + "▁Gatorade", + -14.446084976196287 + ], + [ + "▁Darlene", + -14.446093559265137 + ], + [ + "▁Policing", + -14.446093559265137 + ], + [ + "▁Yunnan", + -14.44609546661377 + ], + [ + "▁cowhide", + -14.446101188659668 + ], + [ + "▁Habib", + -14.446102142333984 + ], + [ + "▁Predictive", + -14.446104049682615 + ], + [ + "▁geolocation", + -14.446104049682615 + ], + [ + "▁Vogt", + -14.44610595703125 + ], + [ + "▁Terrible", + -14.446114540100098 + ], + [ + "▁biosimilar", + -14.446118354797363 + ], + [ + "▁Jurgen", + -14.446125030517578 + ], + [ + "SESSION", + -14.446136474609377 + ], + [ + "▁sedimentary", + -14.446137428283691 + ], + [ + "▁Haifa", + -14.446139335632324 + ], + [ + "▁subprime", + -14.44614601135254 + ], + [ + "▁behest", + -14.446163177490234 + ], + [ + "▁methanol", + -14.446165084838867 + ], + [ + "▁Ryo", + -14.446172714233398 + ], + [ + "▁Ticketmaster", + -14.44617748260498 + ], + [ + "BRC", + -14.446212768554688 + ], + [ + "Html", + -14.446216583251951 + ], + [ + "▁reconsideration", + -14.446218490600586 + ], + [ + "▁Callahan", + -14.446239471435549 + ], + [ + "▁POLY", + -14.446247100830078 + ], + [ + "amiento", + -14.446249008178713 + ], + [ + "▁proprietorship", + -14.446281433105469 + ], + [ + "▁falsehood", + -14.446284294128418 + ], + [ + "▁disinterested", + -14.446288108825684 + ], + [ + "▁Pullover", + -14.44629955291748 + ], + [ + "▁Bremen", + -14.446303367614746 + ], + [ + "▁Priorities", + -14.44630527496338 + ], + [ + "▁DOHC", + -14.44631576538086 + ], + [ + "▁Adaptation", + -14.446331024169922 + ], + [ + "▁SPECIFIC", + -14.446333885192873 + ], + [ + "BELL", + -14.446358680725098 + ], + [ + "▁redecorate", + -14.446393013000488 + ], + [ + "▁lagged", + -14.446457862854004 + ], + [ + "▁recesses", + -14.44650173187256 + ], + [ + "▁ethnically", + -14.446516036987305 + ], + [ + "▁Jell", + -14.44653034210205 + ], + [ + "retro", + -14.446537017822266 + ], + [ + "▁(2007", + -14.44659423828125 + ], + [ + "▁Clos", + -14.446642875671388 + ], + [ + "▁Drape", + -14.446650505065918 + ], + [ + "▁spearheading", + -14.446709632873535 + ], + [ + "98)", + -14.44674301147461 + ], + [ + "▁PAYMENT", + -14.446755409240724 + ], + [ + "▁aborted", + -14.44675636291504 + ], + [ + "▁AHS", + -14.44679355621338 + ], + [ + "▁Installers", + -14.446800231933594 + ], + [ + "directly", + -14.446873664855955 + ], + [ + "▁Vijaya", + -14.446951866149902 + ], + [ + "▁nucleo", + -14.44703483581543 + ], + [ + "Slimming", + -14.447036743164062 + ], + [ + "▁Cranes", + -14.44704246520996 + ], + [ + "ENO", + -14.447056770324709 + ], + [ + "▁Formica", + -14.447080612182615 + ], + [ + "▁HALL", + -14.447104454040527 + ], + [ + "▁Revealed", + -14.447148323059082 + ], + [ + "▁Pugh", + -14.44721221923828 + ], + [ + "coagul", + -14.447237968444824 + ], + [ + "▁(44", + -14.447246551513672 + ], + [ + "▁Kaba", + -14.447283744812012 + ], + [ + "▁intensifying", + -14.44729232788086 + ], + [ + "▁sweetly", + -14.447339057922363 + ], + [ + "▁Nugget", + -14.447367668151855 + ], + [ + "▁Jahre", + -14.447381019592283 + ], + [ + "Activate", + -14.447428703308104 + ], + [ + "ROVE", + -14.447429656982422 + ], + [ + "▁Stoker", + -14.447449684143066 + ], + [ + "▁barricade", + -14.447452545166016 + ], + [ + "Arriving", + -14.44750690460205 + ], + [ + "Bundle", + -14.447521209716797 + ], + [ + "Lloyd", + -14.447542190551758 + ], + [ + "▁Yash", + -14.447545051574709 + ], + [ + "Approved", + -14.447547912597656 + ], + [ + "infrared", + -14.447551727294922 + ], + [ + "presumably", + -14.447553634643556 + ], + [ + "emphasis", + -14.447556495666504 + ], + [ + "False", + -14.44757843017578 + ], + [ + "▁Slavic", + -14.447623252868652 + ], + [ + "▁wrestled", + -14.4476318359375 + ], + [ + "heaven", + -14.44773006439209 + ], + [ + "Whichever", + -14.44774055480957 + ], + [ + "▁18-20", + -14.447747230529783 + ], + [ + "▁grommets", + -14.447748184204102 + ], + [ + "meadow", + -14.447761535644531 + ], + [ + "vocals", + -14.447809219360352 + ], + [ + "Organise", + -14.44781494140625 + ], + [ + "▁Horner", + -14.447829246520996 + ], + [ + "▁friendliest", + -14.447854042053224 + ], + [ + "▁Cryo", + -14.447870254516602 + ], + [ + "Grim", + -14.447874069213867 + ], + [ + "background", + -14.4479341506958 + ], + [ + "▁Entertainer", + -14.4479398727417 + ], + [ + "▁16-18", + -14.447951316833496 + ], + [ + "amber", + -14.447954177856444 + ], + [ + "89)", + -14.447972297668455 + ], + [ + "12\"", + -14.448015213012695 + ], + [ + "1024", + -14.448094367980955 + ], + [ + "instagram", + -14.448107719421388 + ], + [ + "▁Joined", + -14.448137283325195 + ], + [ + "▁Ivey", + -14.448195457458496 + ], + [ + "▁Sleeves", + -14.448217391967772 + ], + [ + "▁$3.7", + -14.448226928710938 + ], + [ + "DTM", + -14.448271751403809 + ], + [ + "▁Watermark", + -14.448307991027832 + ], + [ + "Improvement", + -14.448333740234377 + ], + [ + "ODE", + -14.448344230651855 + ], + [ + "equin", + -14.448344230651855 + ], + [ + "▁Pomp", + -14.448347091674805 + ], + [ + "toothed", + -14.44838047027588 + ], + [ + "▁Bast", + -14.448382377624512 + ], + [ + "versus", + -14.44839096069336 + ], + [ + "▁Arno", + -14.448431015014648 + ], + [ + "▁$2.8", + -14.44847583770752 + ], + [ + "▁Moira", + -14.448631286621094 + ], + [ + "▁Padre", + -14.4487886428833 + ], + [ + "Stoke", + -14.448798179626465 + ], + [ + "CUL", + -14.448800086975098 + ], + [ + "▁Shik", + -14.448893547058104 + ], + [ + "Mull", + -14.448936462402344 + ], + [ + "▁Scion", + -14.449102401733398 + ], + [ + "▁Docu", + -14.449143409729004 + ], + [ + "▁panelling", + -14.449285507202148 + ], + [ + "▁Amr", + -14.449310302734377 + ], + [ + "▁brightened", + -14.449312210083008 + ], + [ + "▁topcoat", + -14.449366569519045 + ], + [ + "marsh", + -14.449469566345217 + ], + [ + "kura", + -14.449478149414062 + ], + [ + "bottomed", + -14.44960117340088 + ], + [ + "develop", + -14.449641227722168 + ], + [ + "Translate", + -14.449688911437988 + ], + [ + "records", + -14.44969367980957 + ], + [ + "ZEN", + -14.449786186218262 + ], + [ + "▁Visibility", + -14.44984531402588 + ], + [ + "tada", + -14.449875831604004 + ], + [ + "Proc", + -14.449968338012695 + ], + [ + "▁Healer", + -14.449987411499023 + ], + [ + "▁banged", + -14.45002555847168 + ], + [ + "Dim", + -14.450026512145996 + ], + [ + "▁shortfalls", + -14.450037002563477 + ], + [ + "Behold", + -14.450082778930664 + ], + [ + "▁Kms", + -14.450197219848633 + ], + [ + "▁Garnett", + -14.450215339660645 + ], + [ + "hiya", + -14.450224876403809 + ], + [ + "▁fluted", + -14.450243949890137 + ], + [ + "▁OAK", + -14.45025634765625 + ], + [ + "unny", + -14.45034408569336 + ], + [ + "Contest", + -14.450384140014648 + ], + [ + "▁Pio", + -14.450389862060549 + ], + [ + "▁Avian", + -14.450406074523926 + ], + [ + "▁Launching", + -14.450450897216797 + ], + [ + "Fone", + -14.450458526611328 + ], + [ + "devel", + -14.450502395629885 + ], + [ + "▁Appealing", + -14.450535774230955 + ], + [ + "strategy", + -14.450580596923828 + ], + [ + "pou", + -14.450610160827637 + ], + [ + "avel", + -14.450634002685549 + ], + [ + "GAME", + -14.450653076171877 + ], + [ + "▁rockstar", + -14.450653076171877 + ], + [ + "▁Farewell", + -14.450881958007812 + ], + [ + "▁Cousin", + -14.450883865356444 + ], + [ + "▁Ranchi", + -14.45091152191162 + ], + [ + "▁honk", + -14.45097827911377 + ], + [ + "▁joker", + -14.450979232788086 + ], + [ + "Affected", + -14.45098876953125 + ], + [ + "▁wicks", + -14.45111846923828 + ], + [ + "MPA", + -14.45114803314209 + ], + [ + "/2003", + -14.45116901397705 + ], + [ + "▁pulleys", + -14.451169967651367 + ], + [ + "goth", + -14.45118808746338 + ], + [ + "aghan", + -14.451221466064451 + ], + [ + "▁shelled", + -14.451229095458984 + ], + [ + "▁Arlo", + -14.451250076293944 + ], + [ + "▁PAUL", + -14.451255798339844 + ], + [ + "▁proportioned", + -14.451332092285156 + ], + [ + "▁orbs", + -14.45135498046875 + ], + [ + "▁LTS", + -14.451386451721191 + ], + [ + "▁5/5", + -14.451498985290527 + ], + [ + "▁Ordered", + -14.45155906677246 + ], + [ + "▁Zamb", + -14.451631546020508 + ], + [ + "BIR", + -14.451666831970217 + ], + [ + "▁Restorative", + -14.451783180236816 + ], + [ + "▁Whiting", + -14.451802253723145 + ], + [ + "noting", + -14.451824188232422 + ], + [ + "guez", + -14.451854705810549 + ], + [ + "4100", + -14.451887130737305 + ], + [ + "zant", + -14.451911926269531 + ], + [ + "941", + -14.451944351196287 + ], + [ + "▁Bhai", + -14.451950073242188 + ], + [ + "▁Vya", + -14.452054977416992 + ], + [ + "▁colic", + -14.452064514160156 + ], + [ + "Gla", + -14.452219009399414 + ], + [ + "▁skilful", + -14.452284812927246 + ], + [ + "▁Talented", + -14.452350616455078 + ], + [ + "OOD", + -14.452364921569824 + ], + [ + "ARO", + -14.452437400817873 + ], + [ + "▁Evie", + -14.452441215515137 + ], + [ + "▁CONSULT", + -14.452522277832031 + ], + [ + "▁Columbine", + -14.452522277832031 + ], + [ + "▁Hickey", + -14.452522277832031 + ], + [ + "▁Kamloops", + -14.452522277832031 + ], + [ + "▁Reversible", + -14.452522277832031 + ], + [ + "▁Scunthorpe", + -14.452522277832031 + ], + [ + "▁antihistamine", + -14.452522277832031 + ], + [ + "▁archbishop", + -14.452522277832031 + ], + [ + "▁capillaries", + -14.452522277832031 + ], + [ + "▁deceleration", + -14.452522277832031 + ], + [ + "▁hibernate", + -14.452522277832031 + ], + [ + "▁interpolation", + -14.452522277832031 + ], + [ + "▁nauseous", + -14.452522277832031 + ], + [ + "▁redistricting", + -14.452522277832031 + ], + [ + "▁vegetative", + -14.452522277832031 + ], + [ + "▁extraneous", + -14.452523231506348 + ], + [ + "▁Medvedev", + -14.452524185180664 + ], + [ + "▁gyratory", + -14.452524185180664 + ], + [ + "▁corkscrew", + -14.452527046203612 + ], + [ + "▁Tarantino", + -14.45252799987793 + ], + [ + "▁Ibadan", + -14.452528953552246 + ], + [ + "▁hinterland", + -14.452533721923828 + ], + [ + "▁october", + -14.452533721923828 + ], + [ + "▁Safeguarding", + -14.452534675598145 + ], + [ + "▁regress", + -14.45254135131836 + ], + [ + "▁admittance", + -14.452542304992676 + ], + [ + "▁picures", + -14.452545166015623 + ], + [ + "▁Koran", + -14.45254898071289 + ], + [ + "▁pecking", + -14.45255184173584 + ], + [ + "▁electrochemical", + -14.452552795410156 + ], + [ + "▁NCLEX", + -14.452554702758787 + ], + [ + "▁(1980)", + -14.452556610107422 + ], + [ + "▁Assisi", + -14.452556610107422 + ], + [ + "▁CUNY", + -14.452558517456056 + ], + [ + "▁Inkjet", + -14.45256805419922 + ], + [ + "▁Sherwin", + -14.452571868896484 + ], + [ + "▁commode", + -14.452579498291016 + ], + [ + "▁Sidekick", + -14.452595710754396 + ], + [ + "▁Azad", + -14.452611923217772 + ], + [ + "▁pathogenesis", + -14.45262050628662 + ], + [ + "▁plurality", + -14.452625274658203 + ], + [ + "1400", + -14.452628135681152 + ], + [ + "▁Trekking", + -14.452634811401367 + ], + [ + "▁alienating", + -14.452645301818848 + ], + [ + "▁patriarchal", + -14.452655792236328 + ], + [ + "▁romper", + -14.45266342163086 + ], + [ + "▁leuk", + -14.452665328979492 + ], + [ + "▁bodybuilders", + -14.452677726745604 + ], + [ + "▁Suzhou", + -14.452681541442873 + ], + [ + "▁Motivational", + -14.45268440246582 + ], + [ + "USS", + -14.45272159576416 + ], + [ + "▁baggie", + -14.45273208618164 + ], + [ + "▁2004;", + -14.452770233154297 + ], + [ + "▁unwillingness", + -14.45279026031494 + ], + [ + "▁riveted", + -14.452808380126951 + ], + [ + "▁Mendes", + -14.452826499938965 + ], + [ + "▁refactoring", + -14.452836990356444 + ], + [ + "Inner", + -14.452875137329102 + ], + [ + "▁Wilco", + -14.452906608581545 + ], + [ + "▁Idiot", + -14.452926635742188 + ], + [ + "▁Urology", + -14.452937126159668 + ], + [ + "▁Luciano", + -14.452963829040527 + ], + [ + "▁headgear", + -14.453010559082031 + ], + [ + "Optimis", + -14.453110694885254 + ], + [ + "▁preexisting", + -14.453119277954102 + ], + [ + "▁12.9", + -14.453184127807615 + ], + [ + "irr", + -14.45318603515625 + ], + [ + "▁assessors", + -14.453216552734377 + ], + [ + "▁carriageway", + -14.453227043151855 + ], + [ + "▁shareholding", + -14.453248977661133 + ], + [ + "▁Llc", + -14.453265190124512 + ], + [ + "▁fixated", + -14.453327178955078 + ], + [ + "cyan", + -14.453364372253418 + ], + [ + "▁confine", + -14.453375816345217 + ], + [ + "▁Stonewall", + -14.453424453735352 + ], + [ + "Introduce", + -14.453529357910156 + ], + [ + "hydrate", + -14.453606605529783 + ], + [ + "Communications", + -14.453704833984377 + ], + [ + "ERE", + -14.453717231750488 + ], + [ + "▁UNHCR", + -14.453721046447754 + ], + [ + "▁sleepiness", + -14.45383071899414 + ], + [ + "inum", + -14.453838348388672 + ], + [ + "▁numeral", + -14.45391082763672 + ], + [ + "▁Expense", + -14.4539155960083 + ], + [ + "Drone", + -14.4539213180542 + ], + [ + "▁suture", + -14.45396900177002 + ], + [ + "▁remittance", + -14.45397663116455 + ], + [ + "5.50", + -14.454033851623535 + ], + [ + "assured", + -14.454042434692385 + ], + [ + "Thing", + -14.454058647155762 + ], + [ + "▁darkening", + -14.454060554504396 + ], + [ + "▁amplifying", + -14.454097747802734 + ], + [ + "▁margaritas", + -14.454109191894531 + ], + [ + "crist", + -14.454145431518556 + ], + [ + "Eligible", + -14.454154014587402 + ], + [ + "parallel", + -14.454166412353516 + ], + [ + "Regulation", + -14.45417594909668 + ], + [ + "Hungary", + -14.454178810119627 + ], + [ + "Nursing", + -14.454180717468262 + ], + [ + "Hunting", + -14.454185485839844 + ], + [ + "Bristol", + -14.454190254211426 + ], + [ + "Fantasy", + -14.454195976257324 + ], + [ + "Structural", + -14.454230308532717 + ], + [ + "rode", + -14.45424461364746 + ], + [ + "▁pothole", + -14.45424461364746 + ], + [ + "Derek", + -14.454248428344728 + ], + [ + "▁roamed", + -14.454315185546877 + ], + [ + "▁Chr", + -14.454358100891112 + ], + [ + "▁Tidy", + -14.454360961914062 + ], + [ + "▁marlin", + -14.454484939575195 + ], + [ + "▁anyplace", + -14.45450496673584 + ], + [ + "▁Thyme", + -14.45461368560791 + ], + [ + "▁eyesore", + -14.45462417602539 + ], + [ + "▁birthed", + -14.45464324951172 + ], + [ + "Notification", + -14.454659461975098 + ], + [ + "Election", + -14.454675674438477 + ], + [ + "possession", + -14.454726219177246 + ], + [ + "▁Lech", + -14.45477294921875 + ], + [ + "▁gorgeously", + -14.454793930053713 + ], + [ + "Scoop", + -14.45479965209961 + ], + [ + "stituting", + -14.45490264892578 + ], + [ + "presented", + -14.454910278320312 + ], + [ + "▁1824", + -14.454926490783691 + ], + [ + "▁resigning", + -14.454974174499512 + ], + [ + "▁enablement", + -14.454988479614258 + ], + [ + "▁congregational", + -14.455002784729004 + ], + [ + "2.2%", + -14.455052375793455 + ], + [ + "▁Lacy", + -14.455084800720217 + ], + [ + "Janet", + -14.455106735229492 + ], + [ + "utti", + -14.455124855041504 + ], + [ + "Tough", + -14.455133438110352 + ], + [ + "hier", + -14.455135345458984 + ], + [ + "▁15+", + -14.455142974853516 + ], + [ + "▁tunneling", + -14.455153465270996 + ], + [ + "▁Fares", + -14.455194473266602 + ], + [ + "▁Rotor", + -14.455204963684082 + ], + [ + "fixing", + -14.455211639404297 + ], + [ + "▁corny", + -14.455233573913574 + ], + [ + "Louise", + -14.455248832702637 + ], + [ + "Drinking", + -14.455348014831545 + ], + [ + "▁Carte", + -14.45543384552002 + ], + [ + "▁Carton", + -14.455434799194336 + ], + [ + "racist", + -14.45544719696045 + ], + [ + "▁premi", + -14.455451011657717 + ], + [ + "▁southward", + -14.45549774169922 + ], + [ + "trunk", + -14.455549240112305 + ], + [ + "▁ISC", + -14.455620765686035 + ], + [ + "Conn", + -14.455698013305664 + ], + [ + "▁Kann", + -14.455734252929688 + ], + [ + "▁1839", + -14.455910682678224 + ], + [ + "nect", + -14.456067085266112 + ], + [ + "quing", + -14.456119537353516 + ], + [ + "Diana", + -14.456138610839844 + ], + [ + "0.01", + -14.456356048583984 + ], + [ + "▁cardholders", + -14.456360816955566 + ], + [ + "▁patter", + -14.456427574157717 + ], + [ + "ratio", + -14.456494331359863 + ], + [ + "▁$80,000", + -14.45657730102539 + ], + [ + "▁Informal", + -14.45666790008545 + ], + [ + "▁coders", + -14.456764221191406 + ], + [ + "phant", + -14.456993103027344 + ], + [ + "▁PVT", + -14.457067489624023 + ], + [ + "▁1991)", + -14.457103729248049 + ], + [ + "morrow", + -14.457159996032717 + ], + [ + "ophilia", + -14.457175254821776 + ], + [ + "35%", + -14.457186698913574 + ], + [ + "▁Reels", + -14.457231521606444 + ], + [ + "▁buffing", + -14.457273483276367 + ], + [ + "interactive", + -14.457358360290527 + ], + [ + "weekend", + -14.4573974609375 + ], + [ + "▁globalized", + -14.457425117492676 + ], + [ + "ANDY", + -14.45747184753418 + ], + [ + "adj", + -14.457493782043455 + ], + [ + "▁Lying", + -14.457531929016112 + ], + [ + "▁Yearly", + -14.457558631896973 + ], + [ + "▁Logging", + -14.457573890686035 + ], + [ + "▁cricketer", + -14.457709312438965 + ], + [ + "opus", + -14.457763671875 + ], + [ + "DFA", + -14.457772254943848 + ], + [ + "▁(2014", + -14.457813262939451 + ], + [ + "▁jalapenos", + -14.457840919494627 + ], + [ + "CEC", + -14.457921028137209 + ], + [ + "ROLL", + -14.457927703857422 + ], + [ + "▁pare", + -14.458003997802734 + ], + [ + "!!!!!!!!!!", + -14.458033561706545 + ], + [ + "naz", + -14.458036422729492 + ], + [ + "▁Diners", + -14.458169937133787 + ], + [ + "▁2,700", + -14.458325386047363 + ], + [ + "▁ITEM", + -14.458373069763184 + ], + [ + "▁Maggi", + -14.458391189575195 + ], + [ + "▁Rude", + -14.458391189575195 + ], + [ + "▁dislodge", + -14.458426475524902 + ], + [ + "▁Maro", + -14.458490371704102 + ], + [ + "osomes", + -14.458494186401367 + ], + [ + "▁abduct", + -14.458520889282228 + ], + [ + "9300", + -14.458532333374023 + ], + [ + "▁Kristy", + -14.458768844604492 + ], + [ + "▁categorical", + -14.458779335021973 + ], + [ + "▁infractions", + -14.458780288696287 + ], + [ + "GOLD", + -14.458823204040527 + ], + [ + "Cham", + -14.458826065063477 + ], + [ + "FAT", + -14.458834648132324 + ], + [ + "▁middleman", + -14.45887279510498 + ], + [ + "▁Katha", + -14.458930015563965 + ], + [ + "vette", + -14.45899772644043 + ], + [ + "▁Kiran", + -14.459001541137695 + ], + [ + "▁Temecula", + -14.459014892578123 + ], + [ + "▁filibuster", + -14.459014892578123 + ], + [ + "▁Cypriot", + -14.45901584625244 + ], + [ + "▁Dubuque", + -14.45901584625244 + ], + [ + "▁Ebenezer", + -14.45901584625244 + ], + [ + "▁Harlequin", + -14.45901584625244 + ], + [ + "▁Litchfield", + -14.45901584625244 + ], + [ + "▁Pulaski", + -14.45901584625244 + ], + [ + "▁Sampling", + -14.45901584625244 + ], + [ + "▁Sclerosis", + -14.45901584625244 + ], + [ + "▁animosity", + -14.45901584625244 + ], + [ + "▁calamari", + -14.45901584625244 + ], + [ + "▁cougar", + -14.45901584625244 + ], + [ + "▁extensible", + -14.45901584625244 + ], + [ + "▁inconceivable", + -14.45901584625244 + ], + [ + "▁macaroon", + -14.45901584625244 + ], + [ + "▁oscillating", + -14.45901584625244 + ], + [ + "▁sequester", + -14.45901584625244 + ], + [ + "▁signatories", + -14.45901584625244 + ], + [ + "▁Pantheon", + -14.459016799926758 + ], + [ + "▁Houdini", + -14.45901870727539 + ], + [ + "▁Insolvency", + -14.459019660949709 + ], + [ + "▁Flirt", + -14.45902156829834 + ], + [ + "▁EXACTLY", + -14.459022521972656 + ], + [ + "▁Soyuz", + -14.459024429321287 + ], + [ + "▁scoping", + -14.459028244018556 + ], + [ + "▁possum", + -14.459029197692873 + ], + [ + "USDA", + -14.459035873413086 + ], + [ + "▁cybercrime", + -14.459043502807615 + ], + [ + "▁hydrotherapy", + -14.4590482711792 + ], + [ + "▁Kodiak", + -14.459050178527832 + ], + [ + "▁paltry", + -14.459053993225098 + ], + [ + "▁amassing", + -14.459054946899414 + ], + [ + "▁Kingfisher", + -14.45905876159668 + ], + [ + "▁DHgate", + -14.459065437316896 + ], + [ + "▁Realizing", + -14.45906925201416 + ], + [ + "smacking", + -14.459070205688477 + ], + [ + "▁Scallop", + -14.459070205688477 + ], + [ + "▁Tebow", + -14.459080696105955 + ], + [ + "▁Boucher", + -14.459091186523438 + ], + [ + "▁decompress", + -14.459121704101562 + ], + [ + "▁10-11", + -14.459136962890623 + ], + [ + "▁Goshen", + -14.45914363861084 + ], + [ + "▁Arkham", + -14.459149360656738 + ], + [ + "▁Netscape", + -14.45915985107422 + ], + [ + "▁Pocono", + -14.459165573120115 + ], + [ + "▁lexical", + -14.459173202514648 + ], + [ + "▁dinette", + -14.45917510986328 + ], + [ + "▁Probiotic", + -14.45920467376709 + ], + [ + "▁Shovel", + -14.459205627441406 + ], + [ + "▁suppresse", + -14.459227561950684 + ], + [ + "▁broach", + -14.459233283996582 + ], + [ + "▁Bibliography", + -14.459246635437012 + ], + [ + "alex", + -14.45926284790039 + ], + [ + "▁squishy", + -14.459263801574709 + ], + [ + "▁reinvention", + -14.45928955078125 + ], + [ + "alou", + -14.459309577941896 + ], + [ + "▁wanderlust", + -14.45931339263916 + ], + [ + "▁WPF", + -14.459352493286133 + ], + [ + "▁manhole", + -14.459362030029297 + ], + [ + "▁Selby", + -14.459385871887209 + ], + [ + "▁scrunch", + -14.459388732910156 + ], + [ + "▁NSS", + -14.459492683410645 + ], + [ + "▁dalam", + -14.45952606201172 + ], + [ + "vington", + -14.459589004516602 + ], + [ + "▁serialized", + -14.459604263305664 + ], + [ + "▁Tidal", + -14.459613800048828 + ], + [ + "opper", + -14.459623336791992 + ], + [ + "4:10", + -14.459630012512209 + ], + [ + "▁Honesty", + -14.459640502929688 + ], + [ + "▁Sauer", + -14.459640502929688 + ], + [ + "▁mulling", + -14.459661483764648 + ], + [ + "▁Engl", + -14.45968246459961 + ], + [ + "▁overclocking", + -14.459687232971191 + ], + [ + "▁unwritten", + -14.459688186645508 + ], + [ + "▁Phar", + -14.459704399108888 + ], + [ + "1.05", + -14.4597806930542 + ], + [ + "spare", + -14.459786415100098 + ], + [ + "chap", + -14.459792137145996 + ], + [ + "▁Niels", + -14.459794044494627 + ], + [ + "▁EVP", + -14.459854125976562 + ], + [ + "belly", + -14.459857940673828 + ], + [ + "▁outstation", + -14.45992374420166 + ], + [ + "▁duped", + -14.45994758605957 + ], + [ + "▁kickback", + -14.45998477935791 + ], + [ + "GED", + -14.460025787353516 + ], + [ + "▁outstretched", + -14.460025787353516 + ], + [ + "▁Samaria", + -14.460113525390623 + ], + [ + "Hubbell", + -14.460166931152344 + ], + [ + "Forgive", + -14.46018409729004 + ], + [ + "▁Martens", + -14.460195541381836 + ], + [ + "GPP", + -14.460199356079102 + ], + [ + "▁Disposable", + -14.460230827331545 + ], + [ + "▁sported", + -14.460257530212402 + ], + [ + "▁roared", + -14.46025848388672 + ], + [ + "8-10", + -14.46026611328125 + ], + [ + "leaved", + -14.460285186767578 + ], + [ + "▁replenished", + -14.4603271484375 + ], + [ + "auf", + -14.460344314575195 + ], + [ + "▁Lettuce", + -14.460386276245115 + ], + [ + "▁Teva", + -14.460402488708496 + ], + [ + "▁Stained", + -14.46048355102539 + ], + [ + "▁Kultur", + -14.460514068603516 + ], + [ + "Maj", + -14.46055030822754 + ], + [ + "▁(*)", + -14.460566520690918 + ], + [ + "Actual", + -14.460575103759766 + ], + [ + "developing", + -14.46061897277832 + ], + [ + "▁mba", + -14.460623741149902 + ], + [ + "▁antagonists", + -14.460633277893066 + ], + [ + "▁Bloc", + -14.460647583007812 + ], + [ + "married", + -14.46069622039795 + ], + [ + "▁Yr", + -14.46071434020996 + ], + [ + "▁dimly", + -14.460755348205566 + ], + [ + "▁Ashe", + -14.4607572555542 + ], + [ + "▁Alden", + -14.46076488494873 + ], + [ + "Admittedly", + -14.460803985595703 + ], + [ + "Fusion", + -14.46084213256836 + ], + [ + "aluminum", + -14.46084976196289 + ], + [ + "3-18", + -14.460858345031738 + ], + [ + "fungal", + -14.460880279541016 + ], + [ + "▁Chute", + -14.460899353027344 + ], + [ + "▁Squat", + -14.460923194885254 + ], + [ + "forgotten", + -14.460927963256836 + ], + [ + "▁danish", + -14.460946083068848 + ], + [ + "▁rations", + -14.460976600646973 + ], + [ + "▁Extractor", + -14.460977554321287 + ], + [ + "anywhere", + -14.460999488830566 + ], + [ + "▁Pimp", + -14.461003303527832 + ], + [ + "Betty", + -14.461095809936523 + ], + [ + "ringer", + -14.461105346679688 + ], + [ + "roch", + -14.461126327514648 + ], + [ + "Batman", + -14.461170196533203 + ], + [ + "Weekend", + -14.461185455322266 + ], + [ + "Entire", + -14.461201667785645 + ], + [ + "▁DMP", + -14.461298942565918 + ], + [ + "Gerald", + -14.461406707763672 + ], + [ + "▁Rech", + -14.461456298828123 + ], + [ + "▁Chatter", + -14.461515426635742 + ], + [ + "uvia", + -14.461578369140623 + ], + [ + "ambara", + -14.46157932281494 + ], + [ + "▁damning", + -14.461594581604004 + ], + [ + "CEF", + -14.461606979370115 + ], + [ + "skid", + -14.461613655090332 + ], + [ + "▁Rapper", + -14.461664199829102 + ], + [ + "▁SPEED", + -14.461687088012695 + ], + [ + "mike", + -14.461750030517578 + ], + [ + "▁caseload", + -14.46177101135254 + ], + [ + "▁Cristo", + -14.46177864074707 + ], + [ + "kron", + -14.461797714233398 + ], + [ + "▁Destroyer", + -14.461803436279297 + ], + [ + "Thou", + -14.46182918548584 + ], + [ + "firing", + -14.461836814880373 + ], + [ + "-160", + -14.461841583251951 + ], + [ + "▁spotty", + -14.461853981018066 + ], + [ + "▁Maize", + -14.461901664733888 + ], + [ + "▁hilariously", + -14.461917877197266 + ], + [ + "▁Ammo", + -14.461986541748049 + ], + [ + "came", + -14.462080955505373 + ], + [ + "▁wands", + -14.462088584899902 + ], + [ + "▁Registering", + -14.46208953857422 + ], + [ + "▁Nineteen", + -14.462093353271484 + ], + [ + "dressing", + -14.462116241455078 + ], + [ + "directory", + -14.462120056152344 + ], + [ + "kian", + -14.462182998657228 + ], + [ + "▁21,000", + -14.462193489074709 + ], + [ + "AAPL", + -14.46228313446045 + ], + [ + "Quarter", + -14.462346076965332 + ], + [ + "▁1803", + -14.462372779846191 + ], + [ + "MSI", + -14.462457656860352 + ], + [ + "Paste", + -14.462479591369627 + ], + [ + "▁0.15", + -14.462525367736816 + ], + [ + "▁Makar", + -14.46255588531494 + ], + [ + "Meri", + -14.462581634521484 + ], + [ + "▁syl", + -14.462895393371582 + ], + [ + "tje", + -14.462921142578123 + ], + [ + "avier", + -14.463075637817385 + ], + [ + "stedt", + -14.463105201721191 + ], + [ + "hnen", + -14.463193893432615 + ], + [ + "Roo", + -14.463217735290527 + ], + [ + "▁Loeb", + -14.463223457336426 + ], + [ + "▁financiers", + -14.463242530822754 + ], + [ + "mouthed", + -14.463262557983398 + ], + [ + "▁freckles", + -14.463265419006348 + ], + [ + "▁cementing", + -14.463286399841309 + ], + [ + "▁Manifest", + -14.463303565979004 + ], + [ + "▁Audiences", + -14.463313102722168 + ], + [ + "yacht", + -14.463316917419434 + ], + [ + "MTA", + -14.463335990905762 + ], + [ + "▁Torrey", + -14.46334457397461 + ], + [ + "▁fellas", + -14.463348388671877 + ], + [ + "MATIC", + -14.463455200195312 + ], + [ + "▁inordinate", + -14.463456153869627 + ], + [ + "Stem", + -14.463485717773438 + ], + [ + "1-14", + -14.463711738586426 + ], + [ + "curi", + -14.463730812072754 + ], + [ + "▁Sikhs", + -14.463739395141602 + ], + [ + "▁Loyal", + -14.463789939880373 + ], + [ + "▁NSP", + -14.463791847229004 + ], + [ + "▁checkers", + -14.463793754577637 + ], + [ + "ripened", + -14.46379566192627 + ], + [ + "crossing", + -14.463869094848633 + ], + [ + "Logger", + -14.463876724243164 + ], + [ + "▁Mirrored", + -14.463895797729492 + ], + [ + "▁Junkie", + -14.464034080505373 + ], + [ + "goes", + -14.46403980255127 + ], + [ + "Bytes", + -14.464059829711914 + ], + [ + "haa", + -14.464193344116213 + ], + [ + "Couldn", + -14.464245796203612 + ], + [ + "▁pickling", + -14.46435832977295 + ], + [ + "SPACE", + -14.464365005493164 + ], + [ + "Discussing", + -14.4644193649292 + ], + [ + "▁Glyn", + -14.464437484741213 + ], + [ + "▁manifolds", + -14.46456241607666 + ], + [ + "▁counterbalance", + -14.464591979980469 + ], + [ + "▁flexing", + -14.464627265930176 + ], + [ + "Dick", + -14.464658737182615 + ], + [ + "▁Adriana", + -14.464747428894045 + ], + [ + "Manufacturers", + -14.46476936340332 + ], + [ + "pple", + -14.464886665344238 + ], + [ + "▁dreamers", + -14.46489715576172 + ], + [ + "▁chimes", + -14.464903831481934 + ], + [ + "67)", + -14.4649076461792 + ], + [ + "1971", + -14.46494197845459 + ], + [ + "HPS", + -14.464959144592283 + ], + [ + "bundle", + -14.464964866638184 + ], + [ + "rpg", + -14.464990615844728 + ], + [ + "lona", + -14.465027809143066 + ], + [ + "▁Aro", + -14.46506118774414 + ], + [ + "iety", + -14.465073585510254 + ], + [ + "▁laud", + -14.465113639831545 + ], + [ + "RAIL", + -14.465121269226074 + ], + [ + "▁oxidize", + -14.465181350708008 + ], + [ + "immo", + -14.465206146240234 + ], + [ + "▁exclaim", + -14.465242385864258 + ], + [ + "▁alleviated", + -14.46524429321289 + ], + [ + "▁evict", + -14.465265274047852 + ], + [ + "▁multitask", + -14.4652738571167 + ], + [ + "▁payloads", + -14.465286254882812 + ], + [ + "jeet", + -14.465316772460938 + ], + [ + "ragon", + -14.465356826782228 + ], + [ + "▁Zai", + -14.465361595153809 + ], + [ + "▁Realistic", + -14.465436935424805 + ], + [ + "▁Kolo", + -14.465521812438965 + ], + [ + "▁NextGen", + -14.465529441833496 + ], + [ + "Affidavit", + -14.465551376342772 + ], + [ + "Fibromyalgia", + -14.465551376342772 + ], + [ + "▁Arunachal", + -14.465551376342772 + ], + [ + "▁Clermont", + -14.465551376342772 + ], + [ + "▁Coquitlam", + -14.465551376342772 + ], + [ + "▁Maastricht", + -14.465551376342772 + ], + [ + "▁allegory", + -14.465551376342772 + ], + [ + "▁bickering", + -14.465551376342772 + ], + [ + "▁chevrolet", + -14.465551376342772 + ], + [ + "▁congratulating", + -14.465551376342772 + ], + [ + "▁encampment", + -14.465551376342772 + ], + [ + "▁jurisprudence", + -14.465551376342772 + ], + [ + "▁lubricating", + -14.465551376342772 + ], + [ + "▁luminosity", + -14.465551376342772 + ], + [ + "▁silencing", + -14.465551376342772 + ], + [ + "▁suffocate", + -14.465551376342772 + ], + [ + "panasonic", + -14.46555233001709 + ], + [ + "▁inflection", + -14.46555233001709 + ], + [ + "▁resuming", + -14.46555233001709 + ], + [ + "▁synapse", + -14.46555233001709 + ], + [ + "acrylate", + -14.465553283691406 + ], + [ + "▁CHRISTMAS", + -14.465553283691406 + ], + [ + "▁Competitors", + -14.465553283691406 + ], + [ + "▁Conspiracy", + -14.465553283691406 + ], + [ + "▁dinghy", + -14.465553283691406 + ], + [ + "▁illiteracy", + -14.465553283691406 + ], + [ + "▁Convergence", + -14.465554237365724 + ], + [ + "▁Mauricio", + -14.46555519104004 + ], + [ + "▁shilling", + -14.46555519104004 + ], + [ + "▁Stencil", + -14.465556144714355 + ], + [ + "▁Zynga", + -14.465556144714355 + ], + [ + "▁Clarendon", + -14.465557098388672 + ], + [ + "▁Symbian", + -14.465558052062988 + ], + [ + "▁Weaving", + -14.465559005737305 + ], + [ + "▁motocross", + -14.46555995941162 + ], + [ + "▁olfactory", + -14.46555995941162 + ], + [ + "▁smirk", + -14.46555995941162 + ], + [ + "▁Unleashed", + -14.465561866760254 + ], + [ + "▁Arora", + -14.465567588806152 + ], + [ + "Prominent", + -14.46557903289795 + ], + [ + "▁Bilingual", + -14.465580940246582 + ], + [ + "▁atonement", + -14.465581893920898 + ], + [ + "▁undiagnosed", + -14.465588569641112 + ], + [ + "▁CHILD", + -14.465594291687012 + ], + [ + "Cultivate", + -14.465600967407228 + ], + [ + "▁felonies", + -14.465600967407228 + ], + [ + "▁Cortana", + -14.46560287475586 + ], + [ + "▁adalah", + -14.465604782104492 + ], + [ + "▁grinning", + -14.465604782104492 + ], + [ + "▁protease", + -14.465609550476074 + ], + [ + "▁Cashmere", + -14.46561050415039 + ], + [ + "▁SlideShare", + -14.465614318847656 + ], + [ + "▁methylation", + -14.465614318847656 + ], + [ + "zov", + -14.465619087219238 + ], + [ + "▁$29.99", + -14.465659141540527 + ], + [ + "▁waveguide", + -14.465673446655272 + ], + [ + "▁Shalom", + -14.465690612792969 + ], + [ + "▁GPIO", + -14.465694427490234 + ], + [ + "▁mouthfeel", + -14.46570110321045 + ], + [ + "▁REO", + -14.465717315673828 + ], + [ + "▁Stolen", + -14.465721130371094 + ], + [ + "▁RADIO", + -14.465754508972168 + ], + [ + "▁Kraken", + -14.465790748596191 + ], + [ + "▁miniseries", + -14.465794563293455 + ], + [ + "▁caddie", + -14.465828895568848 + ], + [ + "▁thinned", + -14.465832710266112 + ], + [ + "▁Lewes", + -14.465904235839844 + ], + [ + "▁GROW", + -14.465925216674805 + ], + [ + "▁Thrifty", + -14.465928077697754 + ], + [ + "namo", + -14.465967178344728 + ], + [ + "▁Sachin", + -14.465978622436523 + ], + [ + "▁Zul", + -14.465988159179688 + ], + [ + "▁fizzy", + -14.465989112854004 + ], + [ + "▁Jingle", + -14.466022491455078 + ], + [ + "TeX", + -14.466058731079102 + ], + [ + "▁AVR", + -14.466062545776367 + ], + [ + "Arche", + -14.46609878540039 + ], + [ + "KAT", + -14.466107368469238 + ], + [ + "▁Requiem", + -14.46611213684082 + ], + [ + "financed", + -14.46611499786377 + ], + [ + "▁Metroid", + -14.466121673583984 + ], + [ + "urie", + -14.46615219116211 + ], + [ + "▁Sandwiches", + -14.466164588928224 + ], + [ + "▁mitt", + -14.466179847717283 + ], + [ + "▁vertebral", + -14.466205596923828 + ], + [ + "▁poplar", + -14.466233253479004 + ], + [ + "▁fitters", + -14.466279029846191 + ], + [ + "rapid", + -14.466285705566406 + ], + [ + "dresses", + -14.466315269470217 + ], + [ + "▁followup", + -14.466353416442873 + ], + [ + "▁slings", + -14.466358184814451 + ], + [ + "▁Fourier", + -14.466367721557615 + ], + [ + "1–2", + -14.466480255126951 + ], + [ + "▁ASF", + -14.466510772705078 + ], + [ + "▁Ashby", + -14.466510772705078 + ], + [ + "▁Mackey", + -14.466513633728027 + ], + [ + "args", + -14.466556549072266 + ], + [ + "▁Cooked", + -14.466567039489746 + ], + [ + "▁ascribed", + -14.466642379760742 + ], + [ + "▁9/10", + -14.46668529510498 + ], + [ + "▁Mawr", + -14.466721534729004 + ], + [ + "▁entitle", + -14.466741561889648 + ], + [ + "▁scuffs", + -14.46674633026123 + ], + [ + "▁Dung", + -14.46682834625244 + ], + [ + "▁ACTIVE", + -14.466917991638184 + ], + [ + "▁appended", + -14.466923713684082 + ], + [ + "▁swamped", + -14.466931343078612 + ], + [ + "▁Decorated", + -14.466946601867676 + ], + [ + "▁cursory", + -14.466976165771484 + ], + [ + "▁ecu", + -14.467000961303713 + ], + [ + "▁11.00", + -14.467008590698242 + ], + [ + "▁pinkish", + -14.467011451721191 + ], + [ + "Meter", + -14.46702480316162 + ], + [ + "▁Substances", + -14.467058181762695 + ], + [ + "▁Gaye", + -14.46707820892334 + ], + [ + "▁Unsurprisingly", + -14.467093467712402 + ], + [ + "▁Suffice", + -14.46709442138672 + ], + [ + "▁connotation", + -14.4671049118042 + ], + [ + "▁consumable", + -14.467111587524414 + ], + [ + "▁condolence", + -14.467120170593262 + ], + [ + "▁Piping", + -14.467140197753906 + ], + [ + "▁$1.99", + -14.467161178588867 + ], + [ + "Entering", + -14.467187881469728 + ], + [ + "otoxin", + -14.467227935791016 + ], + [ + "▁Giulia", + -14.467233657836914 + ], + [ + "▁Crimean", + -14.467247009277344 + ], + [ + "▁Sagan", + -14.46728229522705 + ], + [ + "Functional", + -14.467350959777832 + ], + [ + "spouse", + -14.467414855957031 + ], + [ + "egger", + -14.467425346374512 + ], + [ + "rsten", + -14.46744441986084 + ], + [ + "▁subsystem", + -14.467453002929688 + ], + [ + "▁Stake", + -14.467521667480469 + ], + [ + "Vendor", + -14.467538833618164 + ], + [ + "Surgical", + -14.467546463012695 + ], + [ + "Frozen", + -14.46755313873291 + ], + [ + "Birmingham", + -14.467570304870604 + ], + [ + "impaired", + -14.467571258544922 + ], + [ + "▁carnivores", + -14.467571258544922 + ], + [ + "ISBN", + -14.467575073242188 + ], + [ + "▁appointees", + -14.46759796142578 + ], + [ + "▁kink", + -14.467642784118652 + ], + [ + "▁Reasoning", + -14.467726707458496 + ], + [ + "▁2,600", + -14.467741012573242 + ], + [ + "Secondary", + -14.46774959564209 + ], + [ + "▁Fearless", + -14.467777252197266 + ], + [ + "casa", + -14.467870712280272 + ], + [ + "PPP", + -14.467903137207031 + ], + [ + "▁addons", + -14.46791172027588 + ], + [ + "▁Nutt", + -14.467970848083496 + ], + [ + "YARD", + -14.468010902404783 + ], + [ + "▁Unc", + -14.46802043914795 + ], + [ + "▁pimple", + -14.468056678771973 + ], + [ + "Presently", + -14.468107223510742 + ], + [ + "kissed", + -14.46811580657959 + ], + [ + "Evolution", + -14.468144416809082 + ], + [ + "▁bystander", + -14.468221664428713 + ], + [ + "rbi", + -14.46829891204834 + ], + [ + "uten", + -14.468339920043944 + ], + [ + "dua", + -14.468342781066896 + ], + [ + "Sticking", + -14.468358039855955 + ], + [ + "itization", + -14.468360900878906 + ], + [ + "progressive", + -14.468403816223145 + ], + [ + "Bold", + -14.46841526031494 + ], + [ + "▁banked", + -14.46844482421875 + ], + [ + "Establishing", + -14.468449592590332 + ], + [ + "SUM", + -14.468491554260254 + ], + [ + "▁stalwarts", + -14.468700408935549 + ], + [ + "▁Hort", + -14.468793869018556 + ], + [ + "ENER", + -14.468815803527832 + ], + [ + "▁NOK", + -14.46884822845459 + ], + [ + "▁2.75", + -14.468852043151855 + ], + [ + "▁footboard", + -14.468924522399902 + ], + [ + "▁cleanroom", + -14.468929290771484 + ], + [ + "Corps", + -14.46893310546875 + ], + [ + "2021", + -14.468955993652344 + ], + [ + "▁Weiner", + -14.469016075134276 + ], + [ + "▁Earning", + -14.469023704528809 + ], + [ + "▁Widely", + -14.469073295593262 + ], + [ + "▁shareable", + -14.469449043273926 + ], + [ + "▁gifs", + -14.46949577331543 + ], + [ + "▁6500", + -14.46950626373291 + ], + [ + "▁narcissist", + -14.46959114074707 + ], + [ + "▁Sagar", + -14.469648361206056 + ], + [ + "▁MASS", + -14.46965789794922 + ], + [ + "▁lustre", + -14.469674110412598 + ], + [ + "▁veiled", + -14.469696044921877 + ], + [ + "alysis", + -14.469698905944824 + ], + [ + "oxo", + -14.469700813293455 + ], + [ + "▁TECHNOLOGY", + -14.469732284545898 + ], + [ + "▁Vulnerability", + -14.469897270202637 + ], + [ + "▁Iced", + -14.470012664794922 + ], + [ + "▁Yonge", + -14.470120429992676 + ], + [ + "▁ECP", + -14.470152854919434 + ], + [ + "HART", + -14.47017765045166 + ], + [ + "▁(46", + -14.470194816589355 + ], + [ + "▁confidentially", + -14.470212936401367 + ], + [ + "▁Shai", + -14.47022819519043 + ], + [ + "▁Squa", + -14.470232963562012 + ], + [ + "abol", + -14.470242500305176 + ], + [ + "Blow", + -14.470316886901855 + ], + [ + "SIT", + -14.470410346984863 + ], + [ + "▁grooved", + -14.470515251159668 + ], + [ + "▁sein", + -14.47060775756836 + ], + [ + "▁Sieg", + -14.47073459625244 + ], + [ + "▁Volunteering", + -14.470780372619627 + ], + [ + "▁Mather", + -14.470824241638184 + ], + [ + "▁panelist", + -14.470853805541992 + ], + [ + "▁milwaukee", + -14.4708833694458 + ], + [ + "▁Callie", + -14.470946311950684 + ], + [ + "▁reno", + -14.471033096313477 + ], + [ + "NATO", + -14.471126556396484 + ], + [ + "▁Circu", + -14.47126293182373 + ], + [ + "▁Nishi", + -14.471271514892578 + ], + [ + "▁inferences", + -14.471390724182127 + ], + [ + "yler", + -14.471500396728516 + ], + [ + "ollie", + -14.471551895141602 + ], + [ + "iferous", + -14.471646308898926 + ], + [ + "▁TNF", + -14.471668243408203 + ], + [ + "1:00", + -14.471675872802734 + ], + [ + "▁Crispy", + -14.471726417541504 + ], + [ + "▁windmills", + -14.4717378616333 + ], + [ + "Bridging", + -14.47183322906494 + ], + [ + "ripping", + -14.4718599319458 + ], + [ + "arbeit", + -14.471881866455078 + ], + [ + "▁DRS", + -14.47191333770752 + ], + [ + "▁Bb", + -14.471920013427734 + ], + [ + "▁assent", + -14.471960067749023 + ], + [ + "▁racquet", + -14.472116470336914 + ], + [ + "▁Pretend", + -14.472127914428713 + ], + [ + "▁impatience", + -14.472129821777344 + ], + [ + "▁Alligator", + -14.47213077545166 + ], + [ + "▁Banerjee", + -14.47213077545166 + ], + [ + "▁Calypso", + -14.47213077545166 + ], + [ + "▁Communicator", + -14.47213077545166 + ], + [ + "▁Curacao", + -14.47213077545166 + ], + [ + "▁Dalmatian", + -14.47213077545166 + ], + [ + "▁Gershwin", + -14.47213077545166 + ], + [ + "▁Neanderthal", + -14.47213077545166 + ], + [ + "▁chameleon", + -14.47213077545166 + ], + [ + "▁forefathers", + -14.47213077545166 + ], + [ + "▁melamine", + -14.47213077545166 + ], + [ + "▁mottled", + -14.47213077545166 + ], + [ + "▁petrified", + -14.47213077545166 + ], + [ + "▁subliminal", + -14.47213077545166 + ], + [ + "▁unadulterated", + -14.47213077545166 + ], + [ + "▁conceit", + -14.472131729125977 + ], + [ + "▁rotisserie", + -14.472132682800291 + ], + [ + "▁Emporium", + -14.47213363647461 + ], + [ + "▁bewildering", + -14.47213363647461 + ], + [ + "▁intractable", + -14.47213363647461 + ], + [ + "▁snuggling", + -14.47213363647461 + ], + [ + "▁SPIRIT", + -14.472134590148926 + ], + [ + "▁SUNDAY", + -14.472134590148926 + ], + [ + "▁WRONG", + -14.472135543823242 + ], + [ + "▁Crockett", + -14.472137451171877 + ], + [ + "▁Competence", + -14.472138404846191 + ], + [ + "▁(866)", + -14.472139358520508 + ], + [ + "▁Mombasa", + -14.472139358520508 + ], + [ + "▁lavatory", + -14.472143173217772 + ], + [ + "▁Dobbs", + -14.472146034240724 + ], + [ + "▁Mortimer", + -14.472148895263672 + ], + [ + "▁geophysical", + -14.47215461730957 + ], + [ + "▁Fenwick", + -14.472156524658203 + ], + [ + "▁MEDICAL", + -14.47216796875 + ], + [ + "▁Ferrell", + -14.472168922424316 + ], + [ + "▁summation", + -14.472168922424316 + ], + [ + "▁RoHS", + -14.472175598144531 + ], + [ + "▁intercession", + -14.47217845916748 + ], + [ + "▁puffiness", + -14.472185134887695 + ], + [ + "▁Bianco", + -14.472187995910645 + ], + [ + "▁Forgiveness", + -14.472207069396973 + ], + [ + "▁finial", + -14.472219467163086 + ], + [ + "▁renderer", + -14.4722318649292 + ], + [ + "▁CAMERA", + -14.472237586975098 + ], + [ + "▁breaststroke", + -14.47223949432373 + ], + [ + "▁Practically", + -14.472254753112791 + ], + [ + "▁Yatra", + -14.472262382507324 + ], + [ + "▁viking", + -14.47226333618164 + ], + [ + "ARMA", + -14.472265243530272 + ], + [ + "▁partaking", + -14.472277641296388 + ], + [ + "▁Nadine", + -14.472283363342283 + ], + [ + "9-13", + -14.472298622131348 + ], + [ + "▁Pact", + -14.472318649291992 + ], + [ + "▁Kitchenaid", + -14.47232437133789 + ], + [ + "▁refraction", + -14.47233772277832 + ], + [ + "▁maven", + -14.472341537475586 + ], + [ + "▁Biking", + -14.472347259521484 + ], + [ + "INVEST", + -14.472372055053713 + ], + [ + "▁raincoat", + -14.47238540649414 + ], + [ + "▁SYNC", + -14.472407341003418 + ], + [ + "benefits", + -14.472411155700684 + ], + [ + "▁Sawmill", + -14.472419738769531 + ], + [ + "▁PUP", + -14.472434997558594 + ], + [ + "▁Enoch", + -14.47243595123291 + ], + [ + "▁Cordless", + -14.472447395324709 + ], + [ + "▁gratuity", + -14.472472190856934 + ], + [ + "▁Exposed", + -14.472475051879885 + ], + [ + "▁incessantly", + -14.472492218017578 + ], + [ + "▁delhi", + -14.47249984741211 + ], + [ + "▁Lilac", + -14.472511291503906 + ], + [ + "▁Critter", + -14.472538948059082 + ], + [ + "▁Leanne", + -14.472616195678713 + ], + [ + "Tape", + -14.472631454467772 + ], + [ + "▁Tokens", + -14.472700119018556 + ], + [ + "▁sterilized", + -14.472721099853516 + ], + [ + "undu", + -14.47274684906006 + ], + [ + "▁oodles", + -14.47276496887207 + ], + [ + "brica", + -14.472768783569336 + ], + [ + "▁Elba", + -14.472782135009766 + ], + [ + "glen", + -14.472784042358398 + ], + [ + "▁Marcelo", + -14.47280979156494 + ], + [ + "▁maison", + -14.472814559936523 + ], + [ + "Hydroxy", + -14.472825050354004 + ], + [ + "▁Octagon", + -14.472826957702637 + ], + [ + "WSC", + -14.4728364944458 + ], + [ + "ujo", + -14.472854614257812 + ], + [ + "▁FIG", + -14.47286605834961 + ], + [ + "▁Compressors", + -14.47286891937256 + ], + [ + "▁outnumber", + -14.472881317138672 + ], + [ + "▁Messy", + -14.472905158996582 + ], + [ + "▁Radiohead", + -14.472920417785645 + ], + [ + "▁preoperative", + -14.472967147827148 + ], + [ + "▁posterity", + -14.472996711730955 + ], + [ + "▁whims", + -14.47300624847412 + ], + [ + "▁10.7", + -14.473081588745115 + ], + [ + "▁Remaining", + -14.473130226135254 + ], + [ + "▁normalcy", + -14.473130226135254 + ], + [ + "▁Signage", + -14.473154067993164 + ], + [ + "▁funnier", + -14.473179817199709 + ], + [ + "▁Riggs", + -14.473182678222656 + ], + [ + "liff", + -14.473226547241213 + ], + [ + "▁rewire", + -14.473233222961426 + ], + [ + "sorted", + -14.473258972167969 + ], + [ + "▁Krav", + -14.47329807281494 + ], + [ + "▁Lyman", + -14.473341941833496 + ], + [ + "▁Soybean", + -14.473360061645508 + ], + [ + "▁Balsamic", + -14.473410606384276 + ], + [ + "▁Rocha", + -14.47342300415039 + ], + [ + "▁crook", + -14.473433494567873 + ], + [ + "Issues", + -14.473444938659668 + ], + [ + "▁(100%", + -14.47348690032959 + ], + [ + "▁Posh", + -14.473508834838867 + ], + [ + "▁Subscriptions", + -14.47352695465088 + ], + [ + "▁Sentry", + -14.47356128692627 + ], + [ + "belle", + -14.47356414794922 + ], + [ + "uai", + -14.47356414794922 + ], + [ + "▁muddle", + -14.47362995147705 + ], + [ + "ipi", + -14.473631858825684 + ], + [ + "improve", + -14.473661422729492 + ], + [ + "novation", + -14.473722457885742 + ], + [ + "▁12.1", + -14.473730087280272 + ], + [ + "taker", + -14.473783493041992 + ], + [ + "ovitz", + -14.473796844482422 + ], + [ + "▁SYN", + -14.473798751831056 + ], + [ + "▁$600,000", + -14.473801612854004 + ], + [ + "▁cavern", + -14.473837852478027 + ], + [ + "▁sleepover", + -14.47385311126709 + ], + [ + "▁FOB", + -14.473898887634276 + ], + [ + "▁dynamo", + -14.473898887634276 + ], + [ + "enzel", + -14.473922729492188 + ], + [ + "▁Kuo", + -14.473958015441896 + ], + [ + "▁21.5", + -14.473995208740234 + ], + [ + "elsa", + -14.474038124084473 + ], + [ + "▁turnips", + -14.47408962249756 + ], + [ + "▁braiding", + -14.47411060333252 + ], + [ + "cita", + -14.474114418029783 + ], + [ + "▁FARM", + -14.474163055419922 + ], + [ + "▁Thakur", + -14.474170684814451 + ], + [ + "▁flier", + -14.474185943603516 + ], + [ + "▁Folio", + -14.474245071411133 + ], + [ + "Evans", + -14.47425937652588 + ], + [ + "▁Bedside", + -14.47425937652588 + ], + [ + "orientated", + -14.47429084777832 + ], + [ + "Stylish", + -14.4743070602417 + ], + [ + "Kentucky", + -14.47431755065918 + ], + [ + "Binary", + -14.474318504333496 + ], + [ + "effort", + -14.47432804107666 + ], + [ + "▁Cheval", + -14.474347114562988 + ], + [ + "Hardware", + -14.47438144683838 + ], + [ + "1966", + -14.474478721618652 + ], + [ + "▁Bette", + -14.47449779510498 + ], + [ + "▁trademarked", + -14.47454071044922 + ], + [ + "babies", + -14.474565505981444 + ], + [ + "▁Therese", + -14.474573135375977 + ], + [ + "suggest", + -14.474617958068848 + ], + [ + "▁Hager", + -14.474631309509276 + ], + [ + "▁Carat", + -14.474663734436035 + ], + [ + "▁Conditional", + -14.474666595458984 + ], + [ + "▁Tactics", + -14.474685668945312 + ], + [ + "▁Jol", + -14.474732398986816 + ], + [ + "▁FRIENDS", + -14.47474765777588 + ], + [ + "Mol", + -14.474781036376951 + ], + [ + "rillo", + -14.474842071533203 + ], + [ + "arca", + -14.474873542785645 + ], + [ + "Reservation", + -14.474876403808594 + ], + [ + "▁outdone", + -14.474898338317873 + ], + [ + "▁NMD", + -14.47500228881836 + ], + [ + "967", + -14.475061416625977 + ], + [ + "Durability", + -14.475130081176758 + ], + [ + "▁iBooks", + -14.47514533996582 + ], + [ + "▁RACE", + -14.475146293640137 + ], + [ + "▁Littleton", + -14.475224494934082 + ], + [ + "PAGE", + -14.475241661071776 + ], + [ + "▁foodstuffs", + -14.475249290466309 + ], + [ + "Mort", + -14.475281715393066 + ], + [ + "▁laterally", + -14.47531032562256 + ], + [ + "▁Beni", + -14.47537326812744 + ], + [ + "▁Realms", + -14.47540283203125 + ], + [ + "▁Palisades", + -14.47542667388916 + ], + [ + "▁SECTION", + -14.475533485412598 + ], + [ + "▁Segal", + -14.47555160522461 + ], + [ + "▁Violent", + -14.475667953491213 + ], + [ + "▁DSD", + -14.475719451904297 + ], + [ + "nag", + -14.475780487060549 + ], + [ + "wireless", + -14.475852012634276 + ], + [ + "2:21", + -14.475858688354492 + ], + [ + "▁Mulch", + -14.475915908813477 + ], + [ + "▁Foy", + -14.47600269317627 + ], + [ + "▁Kuma", + -14.476082801818848 + ], + [ + "Said", + -14.476120948791504 + ], + [ + "▁Playoff", + -14.476191520690918 + ], + [ + "▁5:45", + -14.476203918457031 + ], + [ + "Marsh", + -14.4763765335083 + ], + [ + "handled", + -14.476428031921388 + ], + [ + ".00%", + -14.476482391357422 + ], + [ + "▁’60", + -14.47655963897705 + ], + [ + "lien", + -14.476581573486328 + ], + [ + "keyboard", + -14.4766263961792 + ], + [ + "FAS", + -14.476634979248049 + ], + [ + "▁Brac", + -14.476658821105955 + ], + [ + "▁bagpipe", + -14.476709365844728 + ], + [ + "▁centralize", + -14.47671890258789 + ], + [ + "KCC", + -14.476799011230469 + ], + [ + "▁Muppets", + -14.476825714111328 + ], + [ + "billed", + -14.477002143859863 + ], + [ + "▁minty", + -14.477046012878418 + ], + [ + "▁Assisting", + -14.477096557617188 + ], + [ + "blow", + -14.477128028869627 + ], + [ + "▁Memes", + -14.477192878723145 + ], + [ + "▁36,000", + -14.477261543273926 + ], + [ + "▁SEMA", + -14.47732639312744 + ], + [ + "▁Haj", + -14.477341651916504 + ], + [ + "SCR", + -14.477377891540527 + ], + [ + "▁Jaffe", + -14.47739315032959 + ], + [ + "▁lessee", + -14.477395057678224 + ], + [ + "assigned", + -14.477428436279297 + ], + [ + "▁emulators", + -14.477473258972168 + ], + [ + "UCT", + -14.477499008178713 + ], + [ + "gramma", + -14.47751522064209 + ], + [ + "Developer", + -14.477550506591797 + ], + [ + "▁1813", + -14.477561950683594 + ], + [ + "▁Skar", + -14.477694511413574 + ], + [ + "-140", + -14.477846145629885 + ], + [ + "▁SRT", + -14.477888107299805 + ], + [ + "eia", + -14.477931022644045 + ], + [ + "HTS", + -14.4779634475708 + ], + [ + "▁absorber", + -14.477988243103027 + ], + [ + "▁languish", + -14.478069305419922 + ], + [ + "bap", + -14.478076934814451 + ], + [ + "▁McClellan", + -14.4781494140625 + ], + [ + "▁jumble", + -14.478150367736816 + ], + [ + "▁Nene", + -14.478279113769531 + ], + [ + "IFA", + -14.478291511535645 + ], + [ + "ieux", + -14.478307723999023 + ], + [ + "▁Trax", + -14.478330612182615 + ], + [ + "▁Ksh", + -14.478355407714844 + ], + [ + "▁evaporates", + -14.47841739654541 + ], + [ + "moore", + -14.478534698486328 + ], + [ + "▁fleshed", + -14.478564262390137 + ], + [ + "ASB", + -14.478589057922363 + ], + [ + "▁archetypes", + -14.478591918945312 + ], + [ + "bilis", + -14.478612899780272 + ], + [ + "▁Gry", + -14.47861671447754 + ], + [ + "TAP", + -14.478628158569336 + ], + [ + "▁commas", + -14.478711128234863 + ], + [ + "▁Marlboro", + -14.478750228881836 + ], + [ + "▁impeach", + -14.478752136230469 + ], + [ + "▁Abingdon", + -14.478753089904783 + ], + [ + "▁Bismarck", + -14.478753089904783 + ], + [ + "▁COMPUTER", + -14.478753089904783 + ], + [ + "▁Catalunya", + -14.478753089904783 + ], + [ + "▁Ceylon", + -14.478753089904783 + ], + [ + "▁Chisholm", + -14.478753089904783 + ], + [ + "▁Encinitas", + -14.478753089904783 + ], + [ + "▁Grosvenor", + -14.478753089904783 + ], + [ + "▁Locust", + -14.478753089904783 + ], + [ + "▁PROBLEM", + -14.478753089904783 + ], + [ + "▁Pompeii", + -14.478753089904783 + ], + [ + "▁amphibious", + -14.478753089904783 + ], + [ + "▁anomalous", + -14.478753089904783 + ], + [ + "▁cabochon", + -14.478753089904783 + ], + [ + "▁cuckoo", + -14.478753089904783 + ], + [ + "▁despicable", + -14.478753089904783 + ], + [ + "▁emoticon", + -14.478753089904783 + ], + [ + "▁hyderabad", + -14.478753089904783 + ], + [ + "▁macrophages", + -14.478753089904783 + ], + [ + "▁menstruation", + -14.478753089904783 + ], + [ + "▁quesadilla", + -14.478753089904783 + ], + [ + "▁surrogacy", + -14.478753089904783 + ], + [ + "▁trailblazer", + -14.478753089904783 + ], + [ + "▁dictating", + -14.478754043579102 + ], + [ + "▁november", + -14.478754043579102 + ], + [ + "Groovy", + -14.478754997253418 + ], + [ + "▁Leinster", + -14.478754997253418 + ], + [ + "▁STANDARD", + -14.478755950927734 + ], + [ + "▁Nicholls", + -14.478759765625 + ], + [ + "▁Programmable", + -14.478759765625 + ], + [ + "▁Periscope", + -14.478764533996582 + ], + [ + "▁hooves", + -14.478765487670898 + ], + [ + "▁Crayola", + -14.478769302368164 + ], + [ + "▁cymbalta", + -14.478774070739746 + ], + [ + "▁Jiangsu", + -14.47877597808838 + ], + [ + "▁unreasonably", + -14.478777885437012 + ], + [ + "▁Paddock", + -14.478778839111328 + ], + [ + "▁Finalists", + -14.478779792785645 + ], + [ + "▁turtleneck", + -14.478779792785645 + ], + [ + "▁maggot", + -14.47878360748291 + ], + [ + "▁Hallam", + -14.478788375854492 + ], + [ + "▁Kojima", + -14.478793144226074 + ], + [ + "▁DUP", + -14.47879409790039 + ], + [ + "▁Coulter", + -14.478811264038086 + ], + [ + "▁superyacht", + -14.478822708129885 + ], + [ + "▁Elli", + -14.478826522827148 + ], + [ + "▁laminating", + -14.47883415222168 + ], + [ + "▁Multiplayer", + -14.478873252868652 + ], + [ + "▁retrofitting", + -14.478874206542969 + ], + [ + "▁yurt", + -14.478878021240234 + ], + [ + "▁Carfax", + -14.478880882263184 + ], + [ + "hf", + -14.478887557983398 + ], + [ + "▁uninteresting", + -14.478907585144045 + ], + [ + "▁IBPS", + -14.478926658630373 + ], + [ + "▁Whitley", + -14.478928565979004 + ], + [ + "-10)", + -14.478938102722168 + ], + [ + "▁dandy", + -14.478941917419434 + ], + [ + "▁Lizzy", + -14.47895050048828 + ], + [ + "▁Armando", + -14.47896957397461 + ], + [ + "▁sickening", + -14.47898006439209 + ], + [ + "▁enthused", + -14.47903537750244 + ], + [ + "outlying", + -14.479053497314451 + ], + [ + "▁snout", + -14.479055404663086 + ], + [ + "▁Oleg", + -14.47906494140625 + ], + [ + "▁SECRET", + -14.47906494140625 + ], + [ + "▁seascape", + -14.479085922241213 + ], + [ + "▁Edgewater", + -14.479095458984377 + ], + [ + "▁Bantam", + -14.479108810424805 + ], + [ + "Stats", + -14.479132652282717 + ], + [ + "▁lincoln", + -14.479155540466309 + ], + [ + "▁Lockwood", + -14.47915744781494 + ], + [ + "▁Contain", + -14.479175567626951 + ], + [ + "▁squ", + -14.479199409484863 + ], + [ + "Hash", + -14.479206085205078 + ], + [ + "▁Andorra", + -14.479248046875 + ], + [ + "▁newsfeed", + -14.479275703430176 + ], + [ + "▁Custer", + -14.479366302490234 + ], + [ + "▁spayed", + -14.479372024536133 + ], + [ + "▁adjustability", + -14.479434967041016 + ], + [ + "▁drinkable", + -14.479436874389648 + ], + [ + "▁titans", + -14.47948932647705 + ], + [ + "▁tenured", + -14.479498863220217 + ], + [ + "▁bottomless", + -14.4795503616333 + ], + [ + "visi", + -14.479561805725098 + ], + [ + "jaya", + -14.47956657409668 + ], + [ + "▁Reactor", + -14.479574203491213 + ], + [ + "▁Dandy", + -14.47959804534912 + ], + [ + "▁Enzo", + -14.479618072509766 + ], + [ + "SUB", + -14.479628562927246 + ], + [ + "▁replant", + -14.479652404785156 + ], + [ + "▁overburden", + -14.479659080505373 + ], + [ + "umen", + -14.479701042175291 + ], + [ + "▁multicolored", + -14.479714393615724 + ], + [ + "▁regent", + -14.479724884033203 + ], + [ + "▁Keepers", + -14.479781150817873 + ], + [ + "▁Bane", + -14.479832649230955 + ], + [ + "▁Tutoring", + -14.479833602905272 + ], + [ + "HANA", + -14.479997634887695 + ], + [ + "Bride", + -14.480107307434082 + ], + [ + "▁HSS", + -14.480141639709473 + ], + [ + "064", + -14.480157852172852 + ], + [ + "961", + -14.480241775512695 + ], + [ + "burden", + -14.480311393737791 + ], + [ + "▁SOUL", + -14.480353355407717 + ], + [ + "Inspect", + -14.48049545288086 + ], + [ + "▁Taos", + -14.48052215576172 + ], + [ + "Nigeria", + -14.4805326461792 + ], + [ + "▁Showdown", + -14.480536460876465 + ], + [ + "cter", + -14.480616569519045 + ], + [ + "▁inflows", + -14.480658531188965 + ], + [ + "▁Aster", + -14.480661392211914 + ], + [ + "Enchanting", + -14.480708122253418 + ], + [ + "dition", + -14.48072338104248 + ], + [ + "▁Controlling", + -14.480725288391112 + ], + [ + "▁habitable", + -14.480757713317873 + ], + [ + "▁Optimizer", + -14.480794906616213 + ], + [ + "Amour", + -14.480841636657717 + ], + [ + "▁Freeview", + -14.480865478515623 + ], + [ + "▁forestall", + -14.480876922607422 + ], + [ + "▁headliners", + -14.480889320373535 + ], + [ + "▁resold", + -14.48090934753418 + ], + [ + "APT", + -14.48092269897461 + ], + [ + "integration", + -14.48101806640625 + ], + [ + "▁superfoods", + -14.481022834777832 + ], + [ + "Deputies", + -14.481050491333008 + ], + [ + "Safeguard", + -14.48106575012207 + ], + [ + "Receiving", + -14.481074333190918 + ], + [ + "Sophie", + -14.48111057281494 + ], + [ + "Clothing", + -14.481121063232422 + ], + [ + "Trinity", + -14.481124877929688 + ], + [ + "▁Dario", + -14.481124877929688 + ], + [ + "Magnetic", + -14.481151580810549 + ], + [ + "Leak", + -14.481249809265137 + ], + [ + "olon", + -14.481258392333984 + ], + [ + "mobility", + -14.481271743774414 + ], + [ + "Scottish", + -14.48129177093506 + ], + [ + "Gym", + -14.48129940032959 + ], + [ + "Harper", + -14.481307983398438 + ], + [ + "keh", + -14.481307983398438 + ], + [ + "birthday", + -14.48130989074707 + ], + [ + "▁Towels", + -14.481330871582031 + ], + [ + "▁Anwar", + -14.481353759765623 + ], + [ + "actively", + -14.481407165527344 + ], + [ + "consistent", + -14.481407165527344 + ], + [ + "▁sinkhole", + -14.481412887573242 + ], + [ + "▁Wien", + -14.481415748596191 + ], + [ + "▁slog", + -14.481431007385254 + ], + [ + "▁Ema", + -14.481435775756836 + ], + [ + "emphasize", + -14.48146915435791 + ], + [ + "Overnight", + -14.481471061706545 + ], + [ + "polished", + -14.481490135192873 + ], + [ + "Heating", + -14.48149871826172 + ], + [ + "▁crusader", + -14.481500625610352 + ], + [ + "▁Hansel", + -14.481525421142578 + ], + [ + "▁Withdraw", + -14.48164176940918 + ], + [ + "Publishing", + -14.481682777404783 + ], + [ + "▁Rebuild", + -14.481740951538086 + ], + [ + "Cisco", + -14.48174285888672 + ], + [ + "Harrison", + -14.48179817199707 + ], + [ + "▁Mandatory", + -14.481815338134766 + ], + [ + "▁12-18", + -14.48182487487793 + ], + [ + "Edition", + -14.481851577758787 + ], + [ + "▁Heir", + -14.482025146484377 + ], + [ + "▁Madhu", + -14.482048034667969 + ], + [ + "▁Pius", + -14.482072830200195 + ], + [ + "▁MERS", + -14.482097625732422 + ], + [ + "▁Umbria", + -14.482110977172852 + ], + [ + "▁backstroke", + -14.482115745544434 + ], + [ + "▁sills", + -14.482115745544434 + ], + [ + "▁Songwriter", + -14.482163429260254 + ], + [ + "▁idiotic", + -14.482261657714844 + ], + [ + "▁plagiarized", + -14.482284545898438 + ], + [ + "0:50", + -14.48231315612793 + ], + [ + "harga", + -14.482356071472168 + ], + [ + "SIG", + -14.482358932495115 + ], + [ + "▁MMC", + -14.482420921325684 + ], + [ + "▁Quicken", + -14.482477188110352 + ], + [ + "▁TDP", + -14.48251247406006 + ], + [ + "Auction", + -14.48252773284912 + ], + [ + "▁italy", + -14.482569694519045 + ], + [ + "▁Sleepy", + -14.48257064819336 + ], + [ + "▁Lilian", + -14.482586860656738 + ], + [ + "▁Breakers", + -14.482599258422852 + ], + [ + "▁faked", + -14.482605934143066 + ], + [ + "idal", + -14.482606887817385 + ], + [ + "00-115", + -14.48261833190918 + ], + [ + "▁Underneath", + -14.48264503479004 + ], + [ + "▁Valentin", + -14.48270320892334 + ], + [ + "oregon", + -14.48272705078125 + ], + [ + "wap", + -14.482745170593262 + ], + [ + "Performing", + -14.482831954956056 + ], + [ + "▁Statewide", + -14.482868194580078 + ], + [ + "▁Bask", + -14.482999801635742 + ], + [ + "forge", + -14.483038902282717 + ], + [ + "-3/4", + -14.483080863952637 + ], + [ + "▁STM", + -14.483213424682615 + ], + [ + "▁$3,500", + -14.48328971862793 + ], + [ + "MSP", + -14.483349800109863 + ], + [ + "▁Flies", + -14.483386039733888 + ], + [ + "pharm", + -14.483469009399414 + ], + [ + "▁deceitful", + -14.483512878417969 + ], + [ + "▁Dann", + -14.48372745513916 + ], + [ + "▁caverns", + -14.483770370483398 + ], + [ + "▁POC", + -14.483777046203612 + ], + [ + "Bound", + -14.483799934387209 + ], + [ + "akkar", + -14.483824729919434 + ], + [ + "▁+5", + -14.483846664428713 + ], + [ + "▁Azar", + -14.483847618103027 + ], + [ + "Essays", + -14.483885765075684 + ], + [ + "▁linky", + -14.483912467956545 + ], + [ + "▁Deo", + -14.483965873718262 + ], + [ + "yep", + -14.483969688415527 + ], + [ + "Manually", + -14.484007835388184 + ], + [ + "AYA", + -14.48416519165039 + ], + [ + "GHT", + -14.484196662902832 + ], + [ + "nitz", + -14.484278678894045 + ], + [ + "▁Verbal", + -14.484302520751951 + ], + [ + "rige", + -14.48442840576172 + ], + [ + "quotes", + -14.484502792358398 + ], + [ + "▁reducer", + -14.484545707702637 + ], + [ + "▁Karn", + -14.48460292816162 + ], + [ + "▁magnificently", + -14.484609603881836 + ], + [ + "TCP", + -14.48465061187744 + ], + [ + "ahem", + -14.484671592712402 + ], + [ + "▁Biome", + -14.484673500061035 + ], + [ + "cyber", + -14.484682083129885 + ], + [ + "Ahhh", + -14.484692573547363 + ], + [ + "▁SPS", + -14.48475170135498 + ], + [ + "stairs", + -14.484755516052246 + ], + [ + "Rust", + -14.48475742340088 + ], + [ + "▁Amara", + -14.484758377075195 + ], + [ + "▁idealism", + -14.484790802001951 + ], + [ + "▁ringer", + -14.48487949371338 + ], + [ + "Orthodontic", + -14.485137939453123 + ], + [ + "▁Intensity", + -14.485157012939451 + ], + [ + "▁Seinfeld", + -14.485157012939451 + ], + [ + "▁situate", + -14.485197067260742 + ], + [ + "▁Quake", + -14.485257148742676 + ], + [ + "▁Kondo", + -14.485268592834473 + ], + [ + "▁Sensitivity", + -14.48532485961914 + ], + [ + "▁FOLLOW", + -14.485394477844238 + ], + [ + "▁rebar", + -14.485407829284668 + ], + [ + "Longevity", + -14.485419273376465 + ], + [ + "Matrimony", + -14.485419273376465 + ], + [ + "▁Fibonacci", + -14.485419273376465 + ], + [ + "▁Holliday", + -14.485419273376465 + ], + [ + "▁Nietzsche", + -14.485419273376465 + ], + [ + "▁Pergola", + -14.485419273376465 + ], + [ + "▁Piccadilly", + -14.485419273376465 + ], + [ + "▁Repertory", + -14.485419273376465 + ], + [ + "▁alchemist", + -14.485419273376465 + ], + [ + "▁antiquities", + -14.485419273376465 + ], + [ + "▁apostolic", + -14.485419273376465 + ], + [ + "▁converging", + -14.485419273376465 + ], + [ + "▁lecithin", + -14.485419273376465 + ], + [ + "▁perpetuating", + -14.485419273376465 + ], + [ + "▁FOCUS", + -14.48542022705078 + ], + [ + "▁Swahili", + -14.485421180725098 + ], + [ + "▁VEGAS", + -14.485421180725098 + ], + [ + "▁tastier", + -14.485421180725098 + ], + [ + "▁FINISH", + -14.485422134399414 + ], + [ + "▁Tortoise", + -14.485422134399414 + ], + [ + "▁Groningen", + -14.48542308807373 + ], + [ + "▁Academia", + -14.485424041748049 + ], + [ + "Stomach", + -14.485424995422363 + ], + [ + "▁malformation", + -14.485424995422363 + ], + [ + "▁Bastille", + -14.485426902770996 + ], + [ + "▁Mathieu", + -14.485429763793944 + ], + [ + "▁Cucina", + -14.485433578491213 + ], + [ + "▁Discrete", + -14.48543643951416 + ], + [ + "▁TRUCK", + -14.485437393188477 + ], + [ + "▁plinth", + -14.485437393188477 + ], + [ + "▁netflix", + -14.48544216156006 + ], + [ + "▁skimmed", + -14.48544692993164 + ], + [ + "▁Satoshi", + -14.485447883605955 + ], + [ + "▁Caulfield", + -14.485451698303224 + ], + [ + "▁Wagga", + -14.485454559326172 + ], + [ + "▁futility", + -14.485461235046388 + ], + [ + "▁speckled", + -14.485469818115234 + ], + [ + "▁Intuit", + -14.48549461364746 + ], + [ + "▁Rapp", + -14.485499382019045 + ], + [ + "FUNCTION", + -14.485504150390623 + ], + [ + "▁skied", + -14.485508918762209 + ], + [ + "▁Perdue", + -14.485529899597168 + ], + [ + "▁compote", + -14.485533714294434 + ], + [ + "▁Gulfstream", + -14.485552787780762 + ], + [ + "▁Whitfield", + -14.485555648803713 + ], + [ + "▁remarried", + -14.48556423187256 + ], + [ + "▁Université", + -14.485605239868164 + ], + [ + "▁Alcott", + -14.485669136047363 + ], + [ + "▁errant", + -14.48567008972168 + ], + [ + "clawed", + -14.485674858093262 + ], + [ + "▁Nantes", + -14.485682487487791 + ], + [ + "▁Stroud", + -14.485718727111816 + ], + [ + "bum", + -14.485721588134766 + ], + [ + "▁houseplants", + -14.485735893249512 + ], + [ + "▁adoring", + -14.485747337341309 + ], + [ + "▁Dutton", + -14.485763549804688 + ], + [ + "IKI", + -14.485787391662598 + ], + [ + "▁Piggy", + -14.485795021057127 + ], + [ + "ozzi", + -14.485820770263672 + ], + [ + "▁rote", + -14.485830307006836 + ], + [ + "▁energetically", + -14.485831260681152 + ], + [ + "▁Environ", + -14.485837936401367 + ], + [ + "▁trappings", + -14.485859870910645 + ], + [ + "▁Schematic", + -14.485878944396973 + ], + [ + "badi", + -14.485885620117188 + ], + [ + "incident", + -14.485902786254885 + ], + [ + "▁Concurrent", + -14.48594093322754 + ], + [ + "▁Adair", + -14.485952377319336 + ], + [ + "▁bleachers", + -14.485962867736816 + ], + [ + "PCL", + -14.485984802246094 + ], + [ + "▁Insulin", + -14.48598575592041 + ], + [ + "▁forgery", + -14.485990524291992 + ], + [ + "▁Gyro", + -14.486018180847168 + ], + [ + "flare", + -14.486056327819824 + ], + [ + "▁remarketing", + -14.486056327819824 + ], + [ + "▁cameraman", + -14.486136436462402 + ], + [ + "lao", + -14.486149787902832 + ], + [ + "▁substantiated", + -14.486167907714844 + ], + [ + "▁seduced", + -14.486194610595703 + ], + [ + "RMC", + -14.486201286315918 + ], + [ + "Aldrich", + -14.486210823059082 + ], + [ + "yke", + -14.486220359802246 + ], + [ + "▁Deeds", + -14.486234664916992 + ], + [ + "▁chapbook", + -14.486288070678713 + ], + [ + "▁Howie", + -14.486321449279783 + ], + [ + "▁Nelly", + -14.486328125 + ], + [ + "▁fluttering", + -14.486373901367188 + ], + [ + "▁perfusion", + -14.486472129821776 + ], + [ + "▁Tracer", + -14.486482620239258 + ], + [ + "▁STATION", + -14.486526489257812 + ], + [ + "▁Extending", + -14.486527442932127 + ], + [ + "▁recklessly", + -14.486568450927734 + ], + [ + "▁PCOS", + -14.48659610748291 + ], + [ + "▁Bibb", + -14.486607551574709 + ], + [ + "▁Inexpensive", + -14.486610412597656 + ], + [ + "▁smeared", + -14.486627578735352 + ], + [ + "▁Palau", + -14.486663818359377 + ], + [ + "▁fiend", + -14.48670482635498 + ], + [ + "▁Tug", + -14.486889839172363 + ], + [ + "Tran", + -14.48691749572754 + ], + [ + "▁uncut", + -14.486933708190918 + ], + [ + "▁proffer", + -14.48696517944336 + ], + [ + "ageing", + -14.486967086791992 + ], + [ + "▁Contributor", + -14.486974716186523 + ], + [ + "▁seabed", + -14.486988067626951 + ], + [ + "Mach", + -14.48702335357666 + ], + [ + "bourn", + -14.48703956604004 + ], + [ + "▁lashed", + -14.487131118774414 + ], + [ + "▁heirlooms", + -14.48714828491211 + ], + [ + "▁formalized", + -14.487150192260742 + ], + [ + "LNA", + -14.487200736999512 + ], + [ + "bib", + -14.487217903137209 + ], + [ + "▁tantrum", + -14.487234115600586 + ], + [ + "generate", + -14.487242698669434 + ], + [ + "▁Jaz", + -14.48735237121582 + ], + [ + "▁lumpy", + -14.487359046936035 + ], + [ + "▁Corsa", + -14.4874906539917 + ], + [ + "influence", + -14.487541198730469 + ], + [ + "FOX", + -14.487579345703123 + ], + [ + "Ark", + -14.487658500671388 + ], + [ + "▁Invited", + -14.487672805786133 + ], + [ + "islav", + -14.487704277038574 + ], + [ + "interpret", + -14.487710952758787 + ], + [ + "▁Squ", + -14.487717628479004 + ], + [ + "▁Leila", + -14.487735748291016 + ], + [ + "▁archway", + -14.487743377685549 + ], + [ + "enu", + -14.487842559814451 + ], + [ + "▁Merida", + -14.48786449432373 + ], + [ + "▁Latch", + -14.48793601989746 + ], + [ + "renal", + -14.487945556640623 + ], + [ + "Entries", + -14.487954139709473 + ], + [ + "Patricia", + -14.487963676452637 + ], + [ + "Hispanic", + -14.487964630126951 + ], + [ + "Buffalo", + -14.48796844482422 + ], + [ + "Alliance", + -14.487971305847168 + ], + [ + "Desert", + -14.487984657287598 + ], + [ + "horsepower", + -14.487994194030762 + ], + [ + "▁Crump", + -14.488017082214355 + ], + [ + "▁clippers", + -14.488025665283203 + ], + [ + "▁purveyors", + -14.488030433654783 + ], + [ + "tact", + -14.488035202026367 + ], + [ + "Wrong", + -14.488041877746582 + ], + [ + "1:23", + -14.488052368164062 + ], + [ + "unknown", + -14.488091468811035 + ], + [ + "relatively", + -14.488112449645996 + ], + [ + "itsch", + -14.488163948059082 + ], + [ + "maintain", + -14.488168716430664 + ], + [ + "Partial", + -14.488221168518066 + ], + [ + "▁Embed", + -14.488245964050291 + ], + [ + "▁Roos", + -14.488247871398926 + ], + [ + "curtain", + -14.48826789855957 + ], + [ + "Thermal", + -14.4883451461792 + ], + [ + "▁Coyotes", + -14.488444328308104 + ], + [ + "▁Remains", + -14.48845386505127 + ], + [ + "▁Desperate", + -14.488500595092772 + ], + [ + "▁SNL", + -14.48850917816162 + ], + [ + "Chest", + -14.488516807556152 + ], + [ + "▁reselling", + -14.488655090332031 + ], + [ + "LOD", + -14.488677024841309 + ], + [ + "Anyhow", + -14.488677978515623 + ], + [ + "▁Livingstone", + -14.48872184753418 + ], + [ + "▁Anno", + -14.488728523254396 + ], + [ + "▁Ringo", + -14.48873233795166 + ], + [ + "▁Pelle", + -14.488849639892578 + ], + [ + "mapper", + -14.48891258239746 + ], + [ + "signing", + -14.488924026489258 + ], + [ + "EDGE", + -14.488983154296877 + ], + [ + "fz", + -14.48899745941162 + ], + [ + "mute", + -14.48899745941162 + ], + [ + "▁Politicians", + -14.489002227783203 + ], + [ + "▁RENT", + -14.48900318145752 + ], + [ + "fibre", + -14.489047050476074 + ], + [ + "VIT", + -14.489049911499023 + ], + [ + "miner", + -14.489202499389648 + ], + [ + "inform", + -14.489215850830078 + ], + [ + "letting", + -14.489221572875977 + ], + [ + "▁unwinding", + -14.489269256591797 + ], + [ + "▁gratuitous", + -14.48929500579834 + ], + [ + "▁PUC", + -14.489297866821287 + ], + [ + "▁Hackers", + -14.48937702178955 + ], + [ + "▁Kova", + -14.48941135406494 + ], + [ + "monster", + -14.48949909210205 + ], + [ + "097", + -14.489540100097656 + ], + [ + "Alcoholic", + -14.489557266235352 + ], + [ + "ruti", + -14.489568710327148 + ], + [ + "Centric", + -14.48957061767578 + ], + [ + "▁Exter", + -14.48966121673584 + ], + [ + "▁multilayer", + -14.48969841003418 + ], + [ + "9+", + -14.489806175231934 + ], + [ + "recording", + -14.48985481262207 + ], + [ + "▁$76", + -14.489933967590332 + ], + [ + "MSU", + -14.48995876312256 + ], + [ + "▁Chords", + -14.490025520324709 + ], + [ + "Wheat", + -14.49008846282959 + ], + [ + "▁FUE", + -14.490106582641602 + ], + [ + "047", + -14.490174293518066 + ], + [ + "▁waxy", + -14.490215301513672 + ], + [ + "DOE", + -14.490274429321287 + ], + [ + "▁PIL", + -14.490286827087402 + ], + [ + "▁Kipling", + -14.49036979675293 + ], + [ + "▁Desai", + -14.490396499633787 + ], + [ + "▁james", + -14.49040412902832 + ], + [ + "▁Tunis", + -14.490434646606444 + ], + [ + "▁Rook", + -14.49045753479004 + ], + [ + "▁0.06", + -14.490483283996582 + ], + [ + "quake", + -14.490488052368164 + ], + [ + "▁stoke", + -14.49049949645996 + ], + [ + "▁vom", + -14.490615844726562 + ], + [ + "▁PROTECT", + -14.490639686584473 + ], + [ + "CTION", + -14.490656852722168 + ], + [ + "▁GUARANTEE", + -14.490660667419434 + ], + [ + "IDER", + -14.49069595336914 + ], + [ + "▁Duque", + -14.490697860717772 + ], + [ + "▁$72", + -14.49074649810791 + ], + [ + "▁capers", + -14.49081039428711 + ], + [ + "▁Willing", + -14.490817070007324 + ], + [ + "▁Reporters", + -14.490974426269531 + ], + [ + "agawa", + -14.491086959838867 + ], + [ + "▁Agate", + -14.49111557006836 + ], + [ + "neuro", + -14.491216659545898 + ], + [ + "roger", + -14.491286277770996 + ], + [ + "ANZ", + -14.491415977478027 + ], + [ + "Cav", + -14.491475105285645 + ], + [ + "▁ALD", + -14.491514205932615 + ], + [ + "▁sterilize", + -14.49152946472168 + ], + [ + "Rolling", + -14.49166488647461 + ], + [ + "fir", + -14.491713523864746 + ], + [ + "▁characterise", + -14.491766929626465 + ], + [ + "▁Polynesia", + -14.491775512695312 + ], + [ + "opian", + -14.491787910461426 + ], + [ + "jevic", + -14.49180793762207 + ], + [ + "▁11.0", + -14.491840362548828 + ], + [ + "▁24-7", + -14.491867065429688 + ], + [ + "▁PREMIUM", + -14.491867065429688 + ], + [ + "▁19.5", + -14.491876602172852 + ], + [ + "▁Hepa", + -14.4918794631958 + ], + [ + "▁Anya", + -14.491886138916016 + ], + [ + "Browser", + -14.491891860961914 + ], + [ + "▁wpc", + -14.491894721984863 + ], + [ + "▁Cambria", + -14.491901397705078 + ], + [ + "ANNA", + -14.491905212402344 + ], + [ + "▁Ong", + -14.491948127746582 + ], + [ + "grader", + -14.491995811462402 + ], + [ + "▁chirp", + -14.492010116577148 + ], + [ + "layout", + -14.492063522338867 + ], + [ + "▁Threats", + -14.492101669311523 + ], + [ + "jna", + -14.49210262298584 + ], + [ + "Hypnotherapy", + -14.492131233215332 + ], + [ + "Regrettably", + -14.492131233215332 + ], + [ + "Rehearsal", + -14.492131233215332 + ], + [ + "▁AstraZeneca", + -14.492131233215332 + ], + [ + "▁Donetsk", + -14.492131233215332 + ], + [ + "▁Flinders", + -14.492131233215332 + ], + [ + "▁Instinct", + -14.492131233215332 + ], + [ + "▁LaSalle", + -14.492131233215332 + ], + [ + "▁Prophecy", + -14.492131233215332 + ], + [ + "▁cybercriminals", + -14.492131233215332 + ], + [ + "▁duplicating", + -14.492131233215332 + ], + [ + "▁efficacious", + -14.492131233215332 + ], + [ + "▁phenergan", + -14.492131233215332 + ], + [ + "▁Antelope", + -14.492132186889648 + ], + [ + "▁Kirsty", + -14.492132186889648 + ], + [ + "▁PLoS", + -14.492132186889648 + ], + [ + "▁Traci", + -14.492132186889648 + ], + [ + "▁quotient", + -14.492132186889648 + ], + [ + "▁tributary", + -14.492132186889648 + ], + [ + "▁DeVos", + -14.492133140563965 + ], + [ + "▁Murfreesboro", + -14.492133140563965 + ], + [ + "▁glancing", + -14.49213409423828 + ], + [ + "▁voracious", + -14.49213409423828 + ], + [ + "▁Geospatial", + -14.492137908935549 + ], + [ + "corporeal", + -14.492140769958496 + ], + [ + "▁sobbing", + -14.492140769958496 + ], + [ + "▁Armoire", + -14.492141723632812 + ], + [ + "▁Nazarene", + -14.492141723632812 + ], + [ + "▁Rotherham", + -14.492142677307127 + ], + [ + "▁Sauté", + -14.492142677307127 + ], + [ + "▁Hellenic", + -14.492143630981444 + ], + [ + "▁Kino", + -14.492147445678713 + ], + [ + "▁Boundaries", + -14.492148399353027 + ], + [ + "▁Décor", + -14.49215030670166 + ], + [ + "▁Gresham", + -14.49215030670166 + ], + [ + "▁Palsy", + -14.49215316772461 + ], + [ + "▁macadamia", + -14.492161750793455 + ], + [ + "▁irradiation", + -14.492162704467772 + ], + [ + "▁unrefined", + -14.492164611816406 + ], + [ + "▁Leuven", + -14.492170333862305 + ], + [ + "▁rummage", + -14.492170333862305 + ], + [ + "▁engrossing", + -14.492175102233888 + ], + [ + "▁smugglers", + -14.492177963256836 + ], + [ + "▁Microsystems", + -14.49218463897705 + ], + [ + "▁uninitiated", + -14.492189407348633 + ], + [ + "▁acetone", + -14.492191314697266 + ], + [ + "▁compressive", + -14.492195129394531 + ], + [ + "▁neurotic", + -14.492195129394531 + ], + [ + "▁juries", + -14.492196083068848 + ], + [ + "▁Braintree", + -14.492207527160645 + ], + [ + "Exceed", + -14.492226600646973 + ], + [ + "▁Spyder", + -14.492226600646973 + ], + [ + "▁charles", + -14.492238998413086 + ], + [ + "▁Fiverr", + -14.492243766784668 + ], + [ + "▁Morrissey", + -14.492243766784668 + ], + [ + "▁VirtualBox", + -14.4922513961792 + ], + [ + "VAS", + -14.492277145385742 + ], + [ + "▁RDP", + -14.492277145385742 + ], + [ + "▁Hillcrest", + -14.492281913757324 + ], + [ + "▁unreadable", + -14.492305755615234 + ], + [ + "▁stony", + -14.4923095703125 + ], + [ + "▁salmonella", + -14.49232578277588 + ], + [ + "▁Winona", + -14.492353439331056 + ], + [ + "▁neuroscientist", + -14.492358207702637 + ], + [ + "▁superstructure", + -14.492362022399902 + ], + [ + "▁truer", + -14.492422103881836 + ], + [ + "▁defiantly", + -14.492441177368164 + ], + [ + "▁Qlik", + -14.49244785308838 + ], + [ + "▁Leesburg", + -14.49245834350586 + ], + [ + "▁Stiles", + -14.492462158203123 + ], + [ + "▁Mandalay", + -14.492466926574709 + ], + [ + "worx", + -14.492467880249023 + ], + [ + "▁misrepresented", + -14.492483139038086 + ], + [ + "-6000", + -14.49249267578125 + ], + [ + "▁Kenney", + -14.49250316619873 + ], + [ + "clothes", + -14.492508888244627 + ], + [ + "▁Sandstone", + -14.49253749847412 + ], + [ + "▁Simplified", + -14.492539405822754 + ], + [ + "▁humpback", + -14.492545127868652 + ], + [ + "▁sprained", + -14.492551803588867 + ], + [ + "▁SARS", + -14.492554664611816 + ], + [ + "▁pageviews", + -14.492558479309082 + ], + [ + "▁Colette", + -14.492559432983398 + ], + [ + "▁aliases", + -14.492596626281738 + ], + [ + "▁Ponzi", + -14.492621421813965 + ], + [ + "▁Ocala", + -14.492631912231444 + ], + [ + "▁Waterbury", + -14.492635726928713 + ], + [ + "▁Riverwalk", + -14.492652893066406 + ], + [ + "pictures", + -14.492657661437988 + ], + [ + "▁jazzy", + -14.492746353149414 + ], + [ + "pip", + -14.492772102355955 + ], + [ + "▁refueling", + -14.4927978515625 + ], + [ + "aircraft", + -14.49281406402588 + ], + [ + "▁travail", + -14.492853164672852 + ], + [ + "▁itunes", + -14.492949485778809 + ], + [ + "PRIZE", + -14.492972373962402 + ], + [ + "▁Copic", + -14.493033409118652 + ], + [ + "▁Sober", + -14.49309253692627 + ], + [ + "▁freakin", + -14.493103981018066 + ], + [ + "Demon", + -14.49313735961914 + ], + [ + "▁Gerhard", + -14.493163108825684 + ], + [ + "▁Bundy", + -14.493167877197266 + ], + [ + "▁denoted", + -14.493215560913086 + ], + [ + "!!!!!!!!!!!!!!!!", + -14.493226051330566 + ], + [ + "▁Flock", + -14.493274688720703 + ], + [ + "Fixes", + -14.493318557739258 + ], + [ + "defining", + -14.493329048156738 + ], + [ + "▁Wanting", + -14.493335723876951 + ], + [ + "▁Georgie", + -14.493389129638672 + ], + [ + "▁Laughing", + -14.49340534210205 + ], + [ + "▁Motherboard", + -14.493409156799316 + ], + [ + "▁?????????", + -14.493454933166504 + ], + [ + "▁shawls", + -14.493481636047363 + ], + [ + "erina", + -14.493494987487791 + ], + [ + "▁Vere", + -14.493545532226562 + ], + [ + "▁Northridge", + -14.493568420410156 + ], + [ + "▁$2.99", + -14.4935884475708 + ], + [ + "▁thoughtfulness", + -14.493595123291016 + ], + [ + "HUS", + -14.49367618560791 + ], + [ + "▁Auntie", + -14.493709564208984 + ], + [ + "WIC", + -14.493719100952148 + ], + [ + "RCP", + -14.493741035461426 + ], + [ + "Burg", + -14.493745803833008 + ], + [ + "▁outperforms", + -14.493754386901855 + ], + [ + "▁Gaines", + -14.493796348571776 + ], + [ + "▁booksellers", + -14.493803977966309 + ], + [ + "▁wail", + -14.49383544921875 + ], + [ + "▁seasonality", + -14.49387264251709 + ], + [ + "▁$129", + -14.49398708343506 + ], + [ + "▁Hatton", + -14.49405288696289 + ], + [ + "patent", + -14.494088172912598 + ], + [ + "impregnated", + -14.494105339050291 + ], + [ + "cob", + -14.494111061096191 + ], + [ + "▁sheathing", + -14.494176864624023 + ], + [ + "UAE", + -14.494178771972656 + ], + [ + "▁barbie", + -14.494195938110352 + ], + [ + "▁Leven", + -14.494213104248049 + ], + [ + "Lymph", + -14.494223594665527 + ], + [ + "▁Traumatic", + -14.4942626953125 + ], + [ + "▁breakpoint", + -14.494437217712402 + ], + [ + "▁dumpling", + -14.494462966918944 + ], + [ + "Doors", + -14.494483947753906 + ], + [ + "▁tramp", + -14.4945068359375 + ], + [ + "Hailing", + -14.49450969696045 + ], + [ + "▁freighter", + -14.49456024169922 + ], + [ + "wagon", + -14.494563102722168 + ], + [ + "▁symbolized", + -14.494565963745115 + ], + [ + "teachers", + -14.494668006896973 + ], + [ + "▁Surveying", + -14.494733810424805 + ], + [ + "▁MESS", + -14.494745254516602 + ], + [ + "routing", + -14.494769096374512 + ], + [ + "Corruption", + -14.49481964111328 + ], + [ + "Participating", + -14.494821548461914 + ], + [ + "▁cooperatively", + -14.494832038879396 + ], + [ + "vigil", + -14.494836807250977 + ], + [ + "Empty", + -14.494840621948242 + ], + [ + "Mercury", + -14.494864463806152 + ], + [ + "ellan", + -14.494865417480469 + ], + [ + "offensive", + -14.494865417480469 + ], + [ + "unfortunately", + -14.494867324829102 + ], + [ + "democracy", + -14.494869232177734 + ], + [ + "injury", + -14.494869232177734 + ], + [ + "vf", + -14.494914054870604 + ], + [ + "▁hotkey", + -14.49496841430664 + ], + [ + "▁Moti", + -14.494988441467283 + ], + [ + "▁Kamala", + -14.494997024536133 + ], + [ + "scaling", + -14.49509334564209 + ], + [ + "portion", + -14.49513816833496 + ], + [ + "▁reinterpret", + -14.49516487121582 + ], + [ + "variance", + -14.49522876739502 + ], + [ + "▁Elizabethan", + -14.495306968688965 + ], + [ + "▁Pentecostal", + -14.495328903198242 + ], + [ + "Hole", + -14.49533748626709 + ], + [ + "▁08:4", + -14.495354652404783 + ], + [ + "PBM", + -14.495386123657228 + ], + [ + "▁Crocodile", + -14.495413780212402 + ], + [ + "'6\"", + -14.495438575744627 + ], + [ + "▁Tobin", + -14.495442390441896 + ], + [ + "▁Streamline", + -14.495476722717283 + ], + [ + "▁Ponder", + -14.495479583740234 + ], + [ + "▁fads", + -14.495479583740234 + ], + [ + "athletic", + -14.495506286621094 + ], + [ + "▁Spire", + -14.495530128479004 + ], + [ + "FIX", + -14.495553016662598 + ], + [ + "adin", + -14.495572090148926 + ], + [ + "committed", + -14.495595932006836 + ], + [ + "▁Vesta", + -14.495646476745604 + ], + [ + "▁leech", + -14.495649337768556 + ], + [ + "▁Powerhouse", + -14.495657920837402 + ], + [ + "hive", + -14.49569320678711 + ], + [ + "▁middlemen", + -14.49570369720459 + ], + [ + "RIGHT", + -14.49571704864502 + ], + [ + "NIH", + -14.495756149291992 + ], + [ + "▁Sudden", + -14.495762825012209 + ], + [ + "ferri", + -14.495872497558594 + ], + [ + "▁acces", + -14.49590015411377 + ], + [ + "▁PPL", + -14.495944023132324 + ], + [ + "▁treading", + -14.49594783782959 + ], + [ + "FAX", + -14.495960235595703 + ], + [ + "hoek", + -14.496000289916992 + ], + [ + "▁spotter", + -14.496095657348633 + ], + [ + "else", + -14.496100425720217 + ], + [ + "▁burp", + -14.496192932128906 + ], + [ + "Tourist", + -14.496203422546388 + ], + [ + "▁stinks", + -14.496293067932127 + ], + [ + "▁GTS", + -14.496335983276367 + ], + [ + "mindedness", + -14.496341705322266 + ], + [ + "▁Johnnie", + -14.496347427368164 + ], + [ + "▁Moyer", + -14.496349334716797 + ], + [ + "RANCE", + -14.49636173248291 + ], + [ + "▁Nato", + -14.49639129638672 + ], + [ + "Patent", + -14.496392250061035 + ], + [ + "▁Foxx", + -14.49653148651123 + ], + [ + "alaska", + -14.496562004089355 + ], + [ + "▁RCI", + -14.496562004089355 + ], + [ + "arelli", + -14.496655464172363 + ], + [ + "▁reframe", + -14.496684074401855 + ], + [ + "nog", + -14.496689796447754 + ], + [ + "Detail", + -14.49687385559082 + ], + [ + "damn", + -14.49689483642578 + ], + [ + "▁5:1", + -14.496910095214844 + ], + [ + "filed", + -14.496928215026855 + ], + [ + "▁Hagg", + -14.496933937072754 + ], + [ + "beaten", + -14.496966361999512 + ], + [ + "▁Yorktown", + -14.49708080291748 + ], + [ + "amycin", + -14.497127532958984 + ], + [ + "▁highschool", + -14.497148513793944 + ], + [ + "purge", + -14.497159004211426 + ], + [ + "▁Helios", + -14.497204780578612 + ], + [ + "CLI", + -14.497222900390623 + ], + [ + "SCP", + -14.49731159210205 + ], + [ + "Simpli", + -14.49733543395996 + ], + [ + "▁carver", + -14.497374534606934 + ], + [ + "▁polygons", + -14.497411727905272 + ], + [ + "▁woolly", + -14.497499465942385 + ], + [ + "hiv", + -14.497540473937988 + ], + [ + "napped", + -14.497572898864746 + ], + [ + "4.1%", + -14.49767780303955 + ], + [ + "qo", + -14.497748374938965 + ], + [ + "anian", + -14.497814178466797 + ], + [ + "uwe", + -14.497937202453612 + ], + [ + "Duration", + -14.49802303314209 + ], + [ + "▁bailed", + -14.498024940490724 + ], + [ + "▁Accommodations", + -14.498026847839355 + ], + [ + "Baptiste", + -14.498032569885254 + ], + [ + "▁Goku", + -14.498079299926758 + ], + [ + "▁EMV", + -14.498126983642578 + ], + [ + "▁Lidl", + -14.498159408569336 + ], + [ + "▁Melo", + -14.498167037963867 + ], + [ + "▁heartworm", + -14.49824333190918 + ], + [ + "EPT", + -14.498306274414062 + ], + [ + "Lessons", + -14.498320579528809 + ], + [ + "▁DUE", + -14.498347282409668 + ], + [ + "▁WHEEL", + -14.498409271240234 + ], + [ + "IANA", + -14.498433113098145 + ], + [ + "enie", + -14.49845027923584 + ], + [ + "jali", + -14.498526573181152 + ], + [ + "wada", + -14.49856662750244 + ], + [ + "▁Australasia", + -14.498652458190918 + ], + [ + "rima", + -14.498723983764648 + ], + [ + "▁exhort", + -14.498757362365724 + ], + [ + "ophi", + -14.498760223388672 + ], + [ + "czy", + -14.49883270263672 + ], + [ + "▁Chautauqua", + -14.498887062072754 + ], + [ + "▁cirrhosis", + -14.498887062072754 + ], + [ + "▁Brahmin", + -14.49888801574707 + ], + [ + "▁Hacienda", + -14.49888801574707 + ], + [ + "▁Kiwanis", + -14.49888801574707 + ], + [ + "▁McCormack", + -14.49888801574707 + ], + [ + "▁Mendocino", + -14.49888801574707 + ], + [ + "▁Perennial", + -14.49888801574707 + ], + [ + "▁Srinivas", + -14.49888801574707 + ], + [ + "▁adversarial", + -14.49888801574707 + ], + [ + "▁calamities", + -14.49888801574707 + ], + [ + "▁collegial", + -14.49888801574707 + ], + [ + "▁concomitant", + -14.49888801574707 + ], + [ + "▁dimmable", + -14.49888801574707 + ], + [ + "▁herbivore", + -14.49888801574707 + ], + [ + "▁progenitor", + -14.49888801574707 + ], + [ + "▁uncontested", + -14.49888801574707 + ], + [ + "▁Gottlieb", + -14.498888969421388 + ], + [ + "▁Kiosk", + -14.498888969421388 + ], + [ + "▁genealogist", + -14.498888969421388 + ], + [ + "▁woodpecker", + -14.498888969421388 + ], + [ + "▁Dmitri", + -14.498889923095703 + ], + [ + "▁Hinckley", + -14.498889923095703 + ], + [ + "▁Nervous", + -14.498889923095703 + ], + [ + "▁admiral", + -14.498889923095703 + ], + [ + "▁christened", + -14.498889923095703 + ], + [ + "▁lymphedema", + -14.498889923095703 + ], + [ + "▁lipoprotein", + -14.49889087677002 + ], + [ + "▁Khloe", + -14.498893737792969 + ], + [ + "▁Azores", + -14.498896598815918 + ], + [ + "▁pleural", + -14.498897552490234 + ], + [ + "▁romaine", + -14.498897552490234 + ], + [ + "▁Preferably", + -14.49889850616455 + ], + [ + "▁nabbed", + -14.4989013671875 + ], + [ + "▁Yakima", + -14.498903274536133 + ], + [ + "▁submissive", + -14.49892234802246 + ], + [ + "▁Osteopathic", + -14.498929977416992 + ], + [ + "▁thoroughbred", + -14.498930931091309 + ], + [ + "▁LaTeX", + -14.498946189880373 + ], + [ + "▁HIGHLY", + -14.498953819274902 + ], + [ + "▁Electra", + -14.498968124389648 + ], + [ + "▁foreword", + -14.49897003173828 + ], + [ + "▁Preacher", + -14.498984336853027 + ], + [ + "Citizens", + -14.499008178710938 + ], + [ + "▁Reverb", + -14.499011039733888 + ], + [ + "▁Panhandle", + -14.499032020568848 + ], + [ + "▁12.7", + -14.49904727935791 + ], + [ + "▁deference", + -14.499074935913086 + ], + [ + "▁Bohol", + -14.499079704284668 + ], + [ + "▁segue", + -14.499080657958984 + ], + [ + "▁subsidised", + -14.499101638793944 + ], + [ + "▁Bonham", + -14.49910831451416 + ], + [ + "▁scorched", + -14.499134063720703 + ], + [ + "▁$51", + -14.499155044555664 + ], + [ + "▁Magdalene", + -14.499192237854004 + ], + [ + "▁sera", + -14.499212265014648 + ], + [ + "▁Largo", + -14.49921703338623 + ], + [ + "▁Genesee", + -14.499228477478027 + ], + [ + "▁Luang", + -14.499282836914062 + ], + [ + "▁upping", + -14.499285697937012 + ], + [ + "Preview", + -14.499295234680176 + ], + [ + "▁QoS", + -14.499297142028809 + ], + [ + "▁rollback", + -14.4993257522583 + ], + [ + "▁frizzy", + -14.49936866760254 + ], + [ + "ате", + -14.499377250671388 + ], + [ + "▁PARTS", + -14.499382019042969 + ], + [ + "▁Leith", + -14.499387741088867 + ], + [ + "▁riverfront", + -14.499424934387209 + ], + [ + "iggle", + -14.49951171875 + ], + [ + "▁floorboards", + -14.499515533447266 + ], + [ + "▁Serrano", + -14.499531745910645 + ], + [ + "ZU", + -14.499536514282228 + ], + [ + "▁headcount", + -14.49954605102539 + ], + [ + "▁Neph", + -14.499547958374023 + ], + [ + "▁Ganges", + -14.499576568603516 + ], + [ + "▁Kod", + -14.499592781066896 + ], + [ + "▁Padded", + -14.49963092803955 + ], + [ + "▁stateside", + -14.499651908874512 + ], + [ + "▁Esco", + -14.499656677246094 + ], + [ + "▁munching", + -14.499679565429688 + ], + [ + "BUY", + -14.499736785888672 + ], + [ + "Applies", + -14.499795913696287 + ], + [ + "▁sooth", + -14.499832153320312 + ], + [ + "▁Placed", + -14.499853134155272 + ], + [ + "winged", + -14.499855041503906 + ], + [ + "▁devious", + -14.499879837036133 + ], + [ + "▁Industri", + -14.499885559082031 + ], + [ + "▁concourse", + -14.49989128112793 + ], + [ + "SOE", + -14.49991512298584 + ], + [ + "▁Khaled", + -14.49996566772461 + ], + [ + "▁Burnt", + -14.500005722045898 + ], + [ + "▁dissipated", + -14.5003023147583 + ], + [ + "▁Linus", + -14.500310897827148 + ], + [ + "▁Envoy", + -14.500354766845703 + ], + [ + "▁flattening", + -14.500380516052246 + ], + [ + "91)", + -14.50038719177246 + ], + [ + "▁tickled", + -14.50041961669922 + ], + [ + "▁Sutra", + -14.500447273254396 + ], + [ + "▁bouncer", + -14.500457763671877 + ], + [ + "3500", + -14.50050449371338 + ], + [ + "▁blesses", + -14.500508308410645 + ], + [ + "▁Arp", + -14.500571250915527 + ], + [ + "▁Giza", + -14.500571250915527 + ], + [ + "▁Rp", + -14.500582695007324 + ], + [ + "▁profiting", + -14.500617980957031 + ], + [ + "▁Persistent", + -14.500653266906738 + ], + [ + "▁888-6", + -14.500657081604004 + ], + [ + "▁homeopathy", + -14.500678062438965 + ], + [ + "&#;", + -14.50074577331543 + ], + [ + "▁escalator", + -14.500880241394045 + ], + [ + "▁artefact", + -14.50090503692627 + ], + [ + "▁Lame", + -14.500906944274902 + ], + [ + "▁Loon", + -14.500941276550291 + ], + [ + "▁centimeter", + -14.500962257385254 + ], + [ + "Implement", + -14.500981330871582 + ], + [ + "lute", + -14.500991821289062 + ], + [ + "▁Vow", + -14.501023292541504 + ], + [ + "▁Sixers", + -14.501097679138184 + ], + [ + "▁Bia", + -14.501140594482422 + ], + [ + "▁GSK", + -14.5011625289917 + ], + [ + "GRANT", + -14.501228332519531 + ], + [ + "▁detest", + -14.501260757446287 + ], + [ + "Tracker", + -14.501272201538086 + ], + [ + "clutter", + -14.501394271850586 + ], + [ + "accept", + -14.501399993896484 + ], + [ + "▁protons", + -14.501441955566406 + ], + [ + "▁Rooter", + -14.501521110534668 + ], + [ + "▁vip", + -14.501522064208984 + ], + [ + "gama", + -14.501550674438477 + ], + [ + "ATHER", + -14.50155544281006 + ], + [ + "▁Suh", + -14.501615524291992 + ], + [ + "contamination", + -14.501653671264648 + ], + [ + "welt", + -14.501659393310549 + ], + [ + "▁Deuteronomy", + -14.501670837402344 + ], + [ + "▁miller", + -14.501675605773926 + ], + [ + "▁lightened", + -14.501742362976074 + ], + [ + "Recognizing", + -14.501768112182615 + ], + [ + "drenched", + -14.501781463623049 + ], + [ + "Particularly", + -14.501791954040527 + ], + [ + "Dictionary", + -14.501795768737791 + ], + [ + "journey", + -14.501802444458008 + ], + [ + "reclaimed", + -14.501805305480955 + ], + [ + "Nissan", + -14.501806259155272 + ], + [ + "Pennsylvania", + -14.501811981201172 + ], + [ + "Jewelry", + -14.5018310546875 + ], + [ + "1978", + -14.501853942871094 + ], + [ + "Germain", + -14.501855850219728 + ], + [ + "▁Dutchman", + -14.50185775756836 + ], + [ + "▁Blau", + -14.5018892288208 + ], + [ + "▁Cale", + -14.50189971923828 + ], + [ + "▁COOK", + -14.50194263458252 + ], + [ + "COMMENTS", + -14.501945495605469 + ], + [ + "owed", + -14.501968383789062 + ], + [ + "▁$7,000", + -14.501988410949709 + ], + [ + "Instrument", + -14.502038955688477 + ], + [ + "▁lurks", + -14.50204086303711 + ], + [ + "crow", + -14.502148628234863 + ], + [ + "Cedar", + -14.502184867858888 + ], + [ + "actin", + -14.502184867858888 + ], + [ + "WATER", + -14.502249717712402 + ], + [ + "▁Rong", + -14.502320289611816 + ], + [ + "bila", + -14.502330780029297 + ], + [ + "▁Eton", + -14.502368927001951 + ], + [ + "▁300+", + -14.502373695373535 + ], + [ + "▁junkies", + -14.502378463745115 + ], + [ + "▁Upgrading", + -14.502398490905762 + ], + [ + "▁Daniela", + -14.502408981323242 + ], + [ + "▁defaulted", + -14.502448081970217 + ], + [ + "▁pinion", + -14.50247859954834 + ], + [ + "▁Assay", + -14.502500534057615 + ], + [ + "reduced", + -14.50251007080078 + ], + [ + "svg", + -14.50252628326416 + ], + [ + "▁Exporting", + -14.502692222595217 + ], + [ + "“[", + -14.50275421142578 + ], + [ + "▁sequencer", + -14.502764701843262 + ], + [ + "Clarke", + -14.502812385559082 + ], + [ + "3400", + -14.502832412719728 + ], + [ + "alba", + -14.502903938293455 + ], + [ + "▁Integra", + -14.50291347503662 + ], + [ + "▁$0.3", + -14.502973556518556 + ], + [ + "▁Puli", + -14.503044128417969 + ], + [ + "▁Screws", + -14.50315284729004 + ], + [ + "kru", + -14.503156661987305 + ], + [ + "craig", + -14.503182411193848 + ], + [ + "▁repaying", + -14.503183364868164 + ], + [ + "▁Excavation", + -14.503211975097656 + ], + [ + "▁isometric", + -14.503351211547852 + ], + [ + "▁Bully", + -14.503371238708496 + ], + [ + "▁omen", + -14.503384590148926 + ], + [ + "EVO", + -14.503393173217772 + ], + [ + "brev", + -14.503421783447266 + ], + [ + "▁Riches", + -14.503564834594728 + ], + [ + "▁Balloons", + -14.50359058380127 + ], + [ + "▁ephemera", + -14.503610610961914 + ], + [ + "7-18", + -14.503616333007812 + ], + [ + "david", + -14.503639221191406 + ], + [ + "▁Venn", + -14.503646850585938 + ], + [ + "▁GOAL", + -14.503652572631836 + ], + [ + "▁dif", + -14.503700256347656 + ], + [ + "▁musc", + -14.503722190856934 + ], + [ + "▁(16)", + -14.503761291503906 + ], + [ + "▁cytokine", + -14.503838539123535 + ], + [ + "▁weaved", + -14.504026412963867 + ], + [ + "VOR", + -14.504040718078612 + ], + [ + "-1800", + -14.504057884216309 + ], + [ + "▁Eurasia", + -14.504104614257812 + ], + [ + "▁Ganda", + -14.504233360290527 + ], + [ + "Nik", + -14.504280090332031 + ], + [ + "udin", + -14.504290580749512 + ], + [ + "Clicking", + -14.504349708557127 + ], + [ + "Centered", + -14.50444793701172 + ], + [ + "▁Cava", + -14.50455379486084 + ], + [ + "тор", + -14.504561424255373 + ], + [ + "meer", + -14.504589080810549 + ], + [ + "▁FAS", + -14.504596710205078 + ], + [ + "PNP", + -14.504605293273926 + ], + [ + "Shown", + -14.504611015319824 + ], + [ + "▁wetter", + -14.504680633544922 + ], + [ + "worldca", + -14.50478744506836 + ], + [ + "Organize", + -14.504810333251951 + ], + [ + "printer", + -14.504820823669434 + ], + [ + "▁Vla", + -14.504870414733888 + ], + [ + "rua", + -14.504880905151367 + ], + [ + "ég", + -14.5048828125 + ], + [ + "▁Bucs", + -14.504923820495604 + ], + [ + "▁gents", + -14.504929542541504 + ], + [ + "HMS", + -14.504952430725098 + ], + [ + "▁overloading", + -14.505019187927246 + ], + [ + "▁crud", + -14.505083084106444 + ], + [ + "▁PLM", + -14.505086898803713 + ], + [ + "▁busyness", + -14.505099296569824 + ], + [ + "Julian", + -14.505205154418944 + ], + [ + "▁reflectors", + -14.50526523590088 + ], + [ + "▁Ecker", + -14.505290031433104 + ], + [ + "▁rescind", + -14.505291938781738 + ], + [ + "GATE", + -14.50532341003418 + ], + [ + "Chunk", + -14.505373001098633 + ], + [ + "wong", + -14.50540828704834 + ], + [ + "Tracy", + -14.50542163848877 + ], + [ + "▁CAPTCHA", + -14.505423545837402 + ], + [ + "pode", + -14.505451202392578 + ], + [ + "▁Lump", + -14.505460739135742 + ], + [ + "▁$0.2", + -14.505508422851562 + ], + [ + "▁Haircuts", + -14.505577087402344 + ], + [ + "▁Equations", + -14.505610466003418 + ], + [ + "uzu", + -14.50563621520996 + ], + [ + "▁Euclid", + -14.505687713623049 + ], + [ + "▁Continuum", + -14.505690574645996 + ], + [ + "▁DIFFERENT", + -14.505690574645996 + ], + [ + "▁McGowan", + -14.505690574645996 + ], + [ + "▁McIntosh", + -14.505690574645996 + ], + [ + "▁Rishikesh", + -14.505690574645996 + ], + [ + "▁Witherspoon", + -14.505690574645996 + ], + [ + "▁amplifies", + -14.505690574645996 + ], + [ + "▁aperitif", + -14.505690574645996 + ], + [ + "▁appreciable", + -14.505690574645996 + ], + [ + "▁condescending", + -14.505690574645996 + ], + [ + "▁feldspar", + -14.505690574645996 + ], + [ + "▁filigree", + -14.505690574645996 + ], + [ + "▁interstitial", + -14.505690574645996 + ], + [ + "▁mausoleum", + -14.505690574645996 + ], + [ + "▁snazzy", + -14.505690574645996 + ], + [ + "▁Maitland", + -14.505691528320312 + ], + [ + "▁Tipperary", + -14.505691528320312 + ], + [ + "▁intensities", + -14.505691528320312 + ], + [ + "▁embezzlement", + -14.505692481994627 + ], + [ + "▁Toffee", + -14.505694389343262 + ], + [ + "▁WOMEN", + -14.505695343017578 + ], + [ + "▁fascism", + -14.505695343017578 + ], + [ + "▁Nagoya", + -14.505696296691896 + ], + [ + "▁barbarian", + -14.505696296691896 + ], + [ + "▁Bloomingdale", + -14.505697250366213 + ], + [ + "▁Killarney", + -14.505697250366213 + ], + [ + "▁Cognition", + -14.505699157714844 + ], + [ + "▁annihilation", + -14.505699157714844 + ], + [ + "▁elastin", + -14.50570011138916 + ], + [ + "▁polymorphism", + -14.505701065063477 + ], + [ + "▁opportune", + -14.505707740783691 + ], + [ + "▁Prabha", + -14.505712509155272 + ], + [ + "▁interfaith", + -14.505727767944336 + ], + [ + "▁Jura", + -14.505743980407717 + ], + [ + "▁Salty", + -14.505748748779297 + ], + [ + "▁malice", + -14.50575351715088 + ], + [ + "▁Togo", + -14.505767822265623 + ], + [ + "▁mockery", + -14.505769729614258 + ], + [ + "Formal", + -14.505776405334473 + ], + [ + "▁octagonal", + -14.505782127380373 + ], + [ + "▁Casualty", + -14.505784034729004 + ], + [ + "▁meetups", + -14.505784034729004 + ], + [ + "▁dutifully", + -14.505786895751951 + ], + [ + "CIC", + -14.505810737609863 + ], + [ + "▁Www", + -14.50581169128418 + ], + [ + "▁1789", + -14.505828857421877 + ], + [ + "▁multimodal", + -14.505850791931152 + ], + [ + "▁uneducated", + -14.505871772766112 + ], + [ + "dorp", + -14.505877494812012 + ], + [ + "urri", + -14.505895614624023 + ], + [ + "▁carcasses", + -14.505919456481934 + ], + [ + "▁glutton", + -14.5059232711792 + ], + [ + "▁BSNL", + -14.505932807922363 + ], + [ + "▁Embo", + -14.50594711303711 + ], + [ + "▁categorically", + -14.50594711303711 + ], + [ + "▁Aviator", + -14.505952835083008 + ], + [ + "Thermo", + -14.505964279174805 + ], + [ + "▁QVC", + -14.505969047546388 + ], + [ + "PPS", + -14.505974769592283 + ], + [ + "▁Vio", + -14.50599479675293 + ], + [ + "▁Britons", + -14.505999565124512 + ], + [ + "▁cornice", + -14.50601291656494 + ], + [ + "▁Calais", + -14.50601577758789 + ], + [ + "▁Feat", + -14.506023406982422 + ], + [ + "▁analogies", + -14.506073951721191 + ], + [ + "▁instigated", + -14.506098747253418 + ], + [ + "▁Boyce", + -14.506110191345217 + ], + [ + "▁Harwood", + -14.506183624267578 + ], + [ + "▁conspired", + -14.50619411468506 + ], + [ + "Turner", + -14.50624179840088 + ], + [ + "Pul", + -14.506267547607422 + ], + [ + "▁twee", + -14.506277084350586 + ], + [ + "▁amulet", + -14.506282806396484 + ], + [ + "▁Gif", + -14.506300926208496 + ], + [ + "▁SEATS", + -14.506321907043455 + ], + [ + "chka", + -14.50632667541504 + ], + [ + "▁Tipton", + -14.506339073181152 + ], + [ + "▁sapling", + -14.506370544433594 + ], + [ + "EEL", + -14.506387710571287 + ], + [ + "flac", + -14.506399154663086 + ], + [ + "BERG", + -14.506424903869627 + ], + [ + "▁naturalization", + -14.506431579589844 + ], + [ + "▁PUMP", + -14.506436347961426 + ], + [ + "▁Arrest", + -14.50647258758545 + ], + [ + "▁KJV", + -14.506549835205078 + ], + [ + "duce", + -14.506556510925291 + ], + [ + "▁Westerners", + -14.50655746459961 + ], + [ + "9100", + -14.506566047668455 + ], + [ + "ckler", + -14.506583213806152 + ], + [ + "▁luxuriously", + -14.506585121154783 + ], + [ + "1910", + -14.506669998168944 + ], + [ + "▁flanks", + -14.506683349609377 + ], + [ + "bale", + -14.506733894348145 + ], + [ + "rq", + -14.506735801696776 + ], + [ + "▁Gavi", + -14.506750106811523 + ], + [ + "▁rehearsed", + -14.506770133972168 + ], + [ + "▁Catskill", + -14.506793975830078 + ], + [ + "constituting", + -14.506797790527344 + ], + [ + "▁11.4", + -14.506821632385254 + ], + [ + "▁reorganized", + -14.506827354431152 + ], + [ + "▁graphing", + -14.506940841674805 + ], + [ + "▁20:2", + -14.507000923156738 + ], + [ + "▁dave", + -14.507028579711914 + ], + [ + "Nic", + -14.507091522216797 + ], + [ + "▁Tangle", + -14.507092475891112 + ], + [ + "33%", + -14.507105827331545 + ], + [ + "▁iPF", + -14.507198333740234 + ], + [ + "▁raged", + -14.507207870483398 + ], + [ + "Developers", + -14.507235527038574 + ], + [ + "▁Alu", + -14.507262229919434 + ], + [ + "▁Isolation", + -14.507311820983888 + ], + [ + "▁SYSTEMS", + -14.507339477539062 + ], + [ + "▁snuggled", + -14.507341384887695 + ], + [ + "▁Metallica", + -14.507363319396973 + ], + [ + "Insider", + -14.50737476348877 + ], + [ + "▁Miku", + -14.507404327392578 + ], + [ + "▁conglomerates", + -14.50745964050293 + ], + [ + "Goals", + -14.507472038269045 + ], + [ + "utm", + -14.507509231567385 + ], + [ + "aesthet", + -14.507530212402344 + ], + [ + "USER", + -14.507588386535645 + ], + [ + "odh", + -14.507609367370604 + ], + [ + "957", + -14.507781982421877 + ], + [ + "ebu", + -14.507819175720217 + ], + [ + "Relieve", + -14.507845878601074 + ], + [ + "▁Gobi", + -14.50790786743164 + ], + [ + "▁Gautam", + -14.507943153381348 + ], + [ + "atlantic", + -14.507980346679688 + ], + [ + "▁Sled", + -14.508037567138672 + ], + [ + "▁subpar", + -14.508049011230469 + ], + [ + "5.3%", + -14.50805377960205 + ], + [ + "▁seeping", + -14.508078575134276 + ], + [ + "▁MoU", + -14.508152961730955 + ], + [ + "yea", + -14.50817584991455 + ], + [ + "strapped", + -14.508191108703612 + ], + [ + "compare", + -14.508255004882812 + ], + [ + "Chad", + -14.50831699371338 + ], + [ + "blocker", + -14.50833225250244 + ], + [ + "▁helpdesk", + -14.508356094360352 + ], + [ + "▁Nomination", + -14.508368492126465 + ], + [ + "occur", + -14.508378982543944 + ], + [ + "▁petitioners", + -14.508411407470703 + ], + [ + "▁Zinn", + -14.508417129516602 + ], + [ + "▁Virat", + -14.50849723815918 + ], + [ + "Forgot", + -14.508502006530762 + ], + [ + "INR", + -14.508529663085938 + ], + [ + "▁jitters", + -14.50859832763672 + ], + [ + "▁shallot", + -14.508615493774414 + ], + [ + "▁Ballad", + -14.508692741394045 + ], + [ + "finiti", + -14.508706092834473 + ], + [ + "michael", + -14.50872802734375 + ], + [ + "Comparing", + -14.508774757385254 + ], + [ + "Emerging", + -14.5087890625 + ], + [ + "Comparison", + -14.508790016174316 + ], + [ + "Cycling", + -14.508793830871582 + ], + [ + "Configuration", + -14.508794784545898 + ], + [ + "Alumni", + -14.508798599243164 + ], + [ + "Politics", + -14.508800506591797 + ], + [ + "Portugal", + -14.508801460266112 + ], + [ + "Degree", + -14.508813858032228 + ], + [ + "Signature", + -14.508813858032228 + ], + [ + "Wizard", + -14.508818626403809 + ], + [ + "Automatically", + -14.508830070495604 + ], + [ + "Proxy", + -14.508831977844238 + ], + [ + "Voting", + -14.508864402770996 + ], + [ + "engagement", + -14.508872032165527 + ], + [ + "SHOP", + -14.508878707885742 + ], + [ + "Norway", + -14.508907318115234 + ], + [ + "STORM", + -14.508950233459473 + ], + [ + "Martha", + -14.509069442749023 + ], + [ + "Guaranteed", + -14.509075164794922 + ], + [ + "▁Promised", + -14.509087562561035 + ], + [ + "SERV", + -14.509099006652832 + ], + [ + "SERVER", + -14.509133338928224 + ], + [ + "▁corded", + -14.509138107299805 + ], + [ + "zul", + -14.509242057800291 + ], + [ + "strasse", + -14.50927734375 + ], + [ + "▁ESI", + -14.509278297424316 + ], + [ + "▁trashed", + -14.509282112121582 + ], + [ + "▁footnotes", + -14.509284973144531 + ], + [ + "▁buffered", + -14.509601593017578 + ], + [ + "▁Sprouts", + -14.509613037109377 + ], + [ + "▁Exa", + -14.509754180908203 + ], + [ + "▁nomads", + -14.509757995605469 + ], + [ + "▁demonstrator", + -14.509773254394531 + ], + [ + "bilitat", + -14.509818077087402 + ], + [ + "seems", + -14.509844779968262 + ], + [ + "anui", + -14.509921073913574 + ], + [ + "▁digesting", + -14.510015487670898 + ], + [ + "▁identi", + -14.510026931762695 + ], + [ + "cision", + -14.510160446166992 + ], + [ + "▁Geri", + -14.510167121887209 + ], + [ + "▁Downloading", + -14.510221481323242 + ], + [ + "▁assassins", + -14.510224342346191 + ], + [ + "▁gorillas", + -14.510302543640137 + ], + [ + "▁Waze", + -14.510388374328612 + ], + [ + "▁Gaze", + -14.510472297668455 + ], + [ + "▁8:45", + -14.510478973388672 + ], + [ + "turf", + -14.51050090789795 + ], + [ + "▁BILL", + -14.51051139831543 + ], + [ + "/1000", + -14.51055145263672 + ], + [ + "extend", + -14.5105562210083 + ], + [ + "▁phd", + -14.510573387145996 + ], + [ + "Barre", + -14.510604858398438 + ], + [ + "donald", + -14.510655403137209 + ], + [ + "▁dermatologists", + -14.510664939880373 + ], + [ + "/2006", + -14.510904312133787 + ], + [ + "▁NTT", + -14.510941505432127 + ], + [ + "▁Vite", + -14.510973930358888 + ], + [ + "▁0-6", + -14.511012077331545 + ], + [ + "▁homered", + -14.511181831359863 + ], + [ + "▁Param", + -14.51125144958496 + ], + [ + "▁HSM", + -14.51125717163086 + ], + [ + "oleum", + -14.511423110961914 + ], + [ + "▁mga", + -14.511459350585938 + ], + [ + "Saying", + -14.51164722442627 + ], + [ + "charter", + -14.51165008544922 + ], + [ + "rahman", + -14.511653900146484 + ], + [ + "▁Hé", + -14.51168155670166 + ], + [ + "7:10", + -14.511710166931152 + ], + [ + "vang", + -14.511850357055664 + ], + [ + "▁toolset", + -14.511897087097168 + ], + [ + "Tooth", + -14.511920928955078 + ], + [ + "▁litigate", + -14.511937141418455 + ], + [ + "ERG", + -14.511985778808594 + ], + [ + "sponsors", + -14.512097358703612 + ], + [ + "▁Proximity", + -14.512099266052246 + ], + [ + "▁Osborn", + -14.512174606323242 + ], + [ + "▁$3.2", + -14.512185096740724 + ], + [ + "forex", + -14.512256622314451 + ], + [ + "▁LPN", + -14.512274742126465 + ], + [ + "▁($9", + -14.512285232543944 + ], + [ + "army", + -14.512292861938477 + ], + [ + "▁judicious", + -14.512296676635742 + ], + [ + "-2-1", + -14.512298583984377 + ], + [ + "▁oct", + -14.512521743774414 + ], + [ + "Coincidentally", + -14.512539863586426 + ], + [ + "Vaccination", + -14.512539863586426 + ], + [ + "▁Abubakar", + -14.512539863586426 + ], + [ + "▁Biscayne", + -14.512539863586426 + ], + [ + "▁Yucatan", + -14.512539863586426 + ], + [ + "▁charcuterie", + -14.512539863586426 + ], + [ + "▁courgette", + -14.512539863586426 + ], + [ + "▁drowsy", + -14.512539863586426 + ], + [ + "▁dummies", + -14.512539863586426 + ], + [ + "▁embattled", + -14.512539863586426 + ], + [ + "▁larceny", + -14.512539863586426 + ], + [ + "▁tapioca", + -14.512539863586426 + ], + [ + "▁verbatim", + -14.512539863586426 + ], + [ + "▁Archibald", + -14.512540817260742 + ], + [ + "▁endodontic", + -14.512540817260742 + ], + [ + "▁nannies", + -14.512540817260742 + ], + [ + "▁Katharine", + -14.512543678283691 + ], + [ + "▁Moisturizer", + -14.512544631958008 + ], + [ + "▁christening", + -14.512544631958008 + ], + [ + "▁Pembrokeshire", + -14.512548446655272 + ], + [ + "▁conspiring", + -14.512548446655272 + ], + [ + "▁sashimi", + -14.512551307678224 + ], + [ + "▁Dunkirk", + -14.512554168701172 + ], + [ + "▁teleconference", + -14.512558937072754 + ], + [ + "▁Bruges", + -14.51255989074707 + ], + [ + "▁disassembly", + -14.51255989074707 + ], + [ + "▁Bonanza", + -14.512568473815918 + ], + [ + "▁Camelot", + -14.512569427490234 + ], + [ + "▁stilts", + -14.512569427490234 + ], + [ + "▁capsicum", + -14.512572288513184 + ], + [ + "▁Regression", + -14.512584686279297 + ], + [ + "▁dimmed", + -14.512601852416992 + ], + [ + "▁footstool", + -14.512611389160156 + ], + [ + "▁poachers", + -14.512611389160156 + ], + [ + "▁Annabelle", + -14.512612342834473 + ], + [ + "▁salve", + -14.512622833251951 + ], + [ + "▁Conroy", + -14.512629508972168 + ], + [ + "▁Towson", + -14.51264476776123 + ], + [ + "▁IFRS", + -14.512651443481444 + ], + [ + "▁vertex", + -14.51266098022461 + ], + [ + "▁posthumously", + -14.512665748596191 + ], + [ + "▁Scalia", + -14.512678146362305 + ], + [ + "▁Moorish", + -14.51271629333496 + ], + [ + "▁modding", + -14.51271629333496 + ], + [ + "▁Hawley", + -14.512720108032228 + ], + [ + "▁bream", + -14.512724876403809 + ], + [ + "▁volumetric", + -14.512800216674805 + ], + [ + "▁squish", + -14.51282787322998 + ], + [ + "▁Bergamo", + -14.512835502624512 + ], + [ + "▁MILLION", + -14.512840270996094 + ], + [ + "▁Kirkwood", + -14.512845993041992 + ], + [ + "▁Seder", + -14.512900352478027 + ], + [ + "▁Capstone", + -14.51293659210205 + ], + [ + "▁Footage", + -14.51293659210205 + ], + [ + "Vincent", + -14.512937545776367 + ], + [ + "MMC", + -14.51294994354248 + ], + [ + "▁topaz", + -14.512962341308594 + ], + [ + "Enriched", + -14.512980461120604 + ], + [ + "▁60+", + -14.512996673583984 + ], + [ + "▁odorless", + -14.513021469116213 + ], + [ + "▁specter", + -14.513054847717283 + ], + [ + "Enforce", + -14.513057708740234 + ], + [ + "▁Pvc", + -14.513108253479004 + ], + [ + "▁colocation", + -14.513123512268066 + ], + [ + "▁cath", + -14.513155937194824 + ], + [ + "conditional", + -14.513158798217772 + ], + [ + "▁2010-11", + -14.513178825378418 + ], + [ + "▁solarium", + -14.51326847076416 + ], + [ + "geist", + -14.513348579406738 + ], + [ + "1952", + -14.513350486755373 + ], + [ + "▁cooldown", + -14.513362884521484 + ], + [ + "▁middleweight", + -14.513370513916016 + ], + [ + "▁videogame", + -14.513391494750977 + ], + [ + "▁Oakwood", + -14.513413429260254 + ], + [ + "▁stringer", + -14.51348114013672 + ], + [ + "▁impediments", + -14.51350212097168 + ], + [ + "▁Frick", + -14.513507843017578 + ], + [ + "Assure", + -14.513538360595703 + ], + [ + "▁photonic", + -14.513545989990234 + ], + [ + "▁armrests", + -14.513583183288574 + ], + [ + "steal", + -14.513639450073242 + ], + [ + "▁dazzled", + -14.513680458068848 + ], + [ + "▁FIAT", + -14.513689041137695 + ], + [ + "▁tormented", + -14.513751983642578 + ], + [ + "▁croc", + -14.51376724243164 + ], + [ + "URO", + -14.51377773284912 + ], + [ + "▁drumstick", + -14.513781547546388 + ], + [ + "namely", + -14.5137939453125 + ], + [ + "binder", + -14.51379680633545 + ], + [ + "▁Ayo", + -14.513813972473145 + ], + [ + "▁Kidman", + -14.513815879821776 + ], + [ + "▁Gita", + -14.51389980316162 + ], + [ + "▁Holman", + -14.513903617858888 + ], + [ + "SRP", + -14.51395320892334 + ], + [ + "▁morphing", + -14.513992309570312 + ], + [ + "▁Morgen", + -14.51404857635498 + ], + [ + "▁inroads", + -14.51406478881836 + ], + [ + "panda", + -14.51406955718994 + ], + [ + "анн", + -14.514076232910156 + ], + [ + "▁Polishing", + -14.514080047607422 + ], + [ + "▁Topical", + -14.514114379882812 + ], + [ + "▁hornet", + -14.514130592346191 + ], + [ + "▁stubbornly", + -14.514168739318848 + ], + [ + "agree", + -14.51419448852539 + ], + [ + "▁Saks", + -14.514220237731934 + ], + [ + "▁nitrates", + -14.514237403869627 + ], + [ + "cracker", + -14.514287948608398 + ], + [ + "▁22:1", + -14.514305114746094 + ], + [ + "▁Atheist", + -14.514317512512209 + ], + [ + "▁justifiable", + -14.514328002929688 + ], + [ + "098", + -14.514384269714355 + ], + [ + "morphism", + -14.51443099975586 + ], + [ + "muir", + -14.514548301696776 + ], + [ + "▁fraudulently", + -14.514562606811523 + ], + [ + "▁midtown", + -14.51457977294922 + ], + [ + "▁Upside", + -14.514646530151367 + ], + [ + "SWA", + -14.51470947265625 + ], + [ + "▁billiard", + -14.514744758605955 + ], + [ + "▁ligand", + -14.514753341674805 + ], + [ + "▁quantifying", + -14.514813423156738 + ], + [ + "gressive", + -14.514832496643066 + ], + [ + "▁tryouts", + -14.514935493469238 + ], + [ + "NTON", + -14.51501750946045 + ], + [ + "▁fortification", + -14.515042304992676 + ], + [ + "▁handily", + -14.515047073364258 + ], + [ + "RACE", + -14.515155792236328 + ], + [ + "▁FOUND", + -14.515252113342283 + ], + [ + "▁Starwood", + -14.515265464782717 + ], + [ + "▁recoverable", + -14.515336036682127 + ], + [ + "▁8-1", + -14.515380859375 + ], + [ + "HANG", + -14.515426635742188 + ], + [ + "▁Ravel", + -14.515433311462402 + ], + [ + "▁theorist", + -14.51544189453125 + ], + [ + "infection", + -14.515490531921388 + ], + [ + "▁Fruity", + -14.515509605407717 + ], + [ + "1:17", + -14.51552963256836 + ], + [ + "▁choc", + -14.515546798706056 + ], + [ + "▁$97", + -14.515595436096191 + ], + [ + "Mich", + -14.515633583068848 + ], + [ + "▁subtype", + -14.515649795532228 + ], + [ + "▁refilling", + -14.515674591064451 + ], + [ + "▁gasping", + -14.5156888961792 + ], + [ + "Bullet", + -14.515735626220703 + ], + [ + "▁Combi", + -14.515750885009766 + ], + [ + "▁bolstering", + -14.515758514404297 + ], + [ + "problems", + -14.515777587890623 + ], + [ + "Incidentally", + -14.51579475402832 + ], + [ + "Architect", + -14.515809059143066 + ], + [ + "▁delved", + -14.515822410583496 + ], + [ + "Reliable", + -14.515827178955078 + ], + [ + "stakeholder", + -14.515836715698242 + ], + [ + "featuring", + -14.515851020812988 + ], + [ + "Heaven", + -14.515854835510254 + ], + [ + "recognition", + -14.515854835510254 + ], + [ + "Ctrl", + -14.515872955322266 + ], + [ + "▁Irina", + -14.515926361083984 + ], + [ + "▁Infusion", + -14.515969276428224 + ], + [ + "▁Kopp", + -14.516029357910156 + ], + [ + "weird", + -14.516063690185549 + ], + [ + "▁dispatching", + -14.516083717346191 + ], + [ + "▁copycat", + -14.516154289245604 + ], + [ + "RUN", + -14.51616668701172 + ], + [ + "▁Broadcom", + -14.516206741333008 + ], + [ + "greet", + -14.51625156402588 + ], + [ + "▁nymphs", + -14.516274452209473 + ], + [ + "▁Singaporeans", + -14.516303062438965 + ], + [ + "▁Ugg", + -14.516307830810549 + ], + [ + "TTI", + -14.516322135925291 + ], + [ + "▁500+", + -14.516345977783203 + ], + [ + "▁3.75", + -14.516348838806152 + ], + [ + "▁Ballast", + -14.516383171081545 + ], + [ + "▁150%", + -14.516402244567873 + ], + [ + "pearl", + -14.516414642333984 + ], + [ + "etha", + -14.516430854797363 + ], + [ + "▁sca", + -14.516460418701172 + ], + [ + "chand", + -14.516526222229004 + ], + [ + "▁watchmaking", + -14.516576766967772 + ], + [ + "RISE", + -14.516592979431152 + ], + [ + "Pale", + -14.516613960266112 + ], + [ + "▁Strang", + -14.516658782958984 + ], + [ + "▁drawdown", + -14.51666259765625 + ], + [ + "▁Hoops", + -14.516681671142578 + ], + [ + "soldier", + -14.516695022583008 + ], + [ + "Customized", + -14.516777992248535 + ], + [ + "0.02", + -14.516796112060549 + ], + [ + "▁quickness", + -14.516803741455078 + ], + [ + "fj", + -14.51681423187256 + ], + [ + "Brett", + -14.516864776611328 + ], + [ + "Barely", + -14.516905784606934 + ], + [ + "▁HIP", + -14.516949653625488 + ], + [ + "▁forked", + -14.516958236694336 + ], + [ + "cise", + -14.517003059387209 + ], + [ + "▁linguists", + -14.517003059387209 + ], + [ + "refresh", + -14.517017364501951 + ], + [ + "breathing", + -14.5170316696167 + ], + [ + "▁Grupo", + -14.517186164855955 + ], + [ + "▁slop", + -14.51731777191162 + ], + [ + "▁Oliva", + -14.517375946044922 + ], + [ + "▁Monks", + -14.51738739013672 + ], + [ + "▁Proficiency", + -14.51743984222412 + ], + [ + "▁mouldings", + -14.51747703552246 + ], + [ + "▁Wok", + -14.517494201660156 + ], + [ + "hug", + -14.5175142288208 + ], + [ + "EVE", + -14.517526626586914 + ], + [ + "▁11:45", + -14.51755142211914 + ], + [ + "▁IRL", + -14.517563819885254 + ], + [ + "fty", + -14.517565727233888 + ], + [ + "▁reformat", + -14.517627716064451 + ], + [ + "▁Eis", + -14.517651557922363 + ], + [ + "▁Breck", + -14.517688751220703 + ], + [ + "▁headshots", + -14.51780605316162 + ], + [ + "igation", + -14.5178861618042 + ], + [ + "▁Campos", + -14.517891883850098 + ], + [ + "▁chargeable", + -14.517935752868652 + ], + [ + "▁subsystems", + -14.517966270446776 + ], + [ + "▁Cheaper", + -14.517973899841309 + ], + [ + "▁1816", + -14.51797866821289 + ], + [ + "▁confrontational", + -14.518049240112305 + ], + [ + "credi", + -14.518102645874023 + ], + [ + "▁digitalization", + -14.518131256103516 + ], + [ + "▁blackboard", + -14.518199920654297 + ], + [ + "hosh", + -14.51820945739746 + ], + [ + "▁Fable", + -14.51825714111328 + ], + [ + "ает", + -14.518306732177734 + ], + [ + "ahar", + -14.518314361572266 + ], + [ + "12-0", + -14.518385887145996 + ], + [ + "▁Kidz", + -14.518392562866213 + ], + [ + "▁Notting", + -14.51841163635254 + ], + [ + "▁Davison", + -14.518524169921877 + ], + [ + "▁Johnstone", + -14.518526077270508 + ], + [ + "▁Brunel", + -14.51858615875244 + ], + [ + "▁Kean", + -14.518619537353516 + ], + [ + "▁??????????", + -14.518688201904297 + ], + [ + "▁Comer", + -14.518733978271484 + ], + [ + "ABILITY", + -14.51876449584961 + ], + [ + "▁transcriptional", + -14.51877498626709 + ], + [ + "▁prostitutes", + -14.518783569335938 + ], + [ + "FEED", + -14.518863677978516 + ], + [ + "▁larva", + -14.518998146057127 + ], + [ + "▁reactivate", + -14.519058227539062 + ], + [ + "▁PIM", + -14.519216537475586 + ], + [ + "▁occ", + -14.519229888916016 + ], + [ + "▁drench", + -14.51926326751709 + ], + [ + "steria", + -14.519328117370604 + ], + [ + "▁Carra", + -14.5193452835083 + ], + [ + "▁CLP", + -14.519346237182615 + ], + [ + "▁Scramble", + -14.519351959228516 + ], + [ + "▁Toyo", + -14.51935863494873 + ], + [ + "▁roti", + -14.519412994384766 + ], + [ + "Broil", + -14.519413948059082 + ], + [ + "Exercising", + -14.51943588256836 + ], + [ + "TOKYO", + -14.51943588256836 + ], + [ + "▁Indiegogo", + -14.51943588256836 + ], + [ + "▁Leukemia", + -14.51943588256836 + ], + [ + "▁Piscine", + -14.51943588256836 + ], + [ + "▁SOMETHING", + -14.51943588256836 + ], + [ + "▁Uppsala", + -14.51943588256836 + ], + [ + "▁biryani", + -14.51943588256836 + ], + [ + "▁hypothyroidism", + -14.51943588256836 + ], + [ + "▁immovable", + -14.51943588256836 + ], + [ + "▁recuperation", + -14.51943588256836 + ], + [ + "▁solubility", + -14.51943588256836 + ], + [ + "▁Announces", + -14.519436836242676 + ], + [ + "▁Chameleon", + -14.519436836242676 + ], + [ + "▁Kundalini", + -14.519436836242676 + ], + [ + "▁EDUCATION", + -14.519437789916992 + ], + [ + "▁Potsdam", + -14.519437789916992 + ], + [ + "▁Gecko", + -14.519438743591309 + ], + [ + "▁REPAIR", + -14.519438743591309 + ], + [ + "▁CHOOSE", + -14.519439697265623 + ], + [ + "▁cliffhanger", + -14.519439697265623 + ], + [ + "▁pelican", + -14.519439697265623 + ], + [ + "▁perturbation", + -14.519439697265623 + ], + [ + "▁chagrin", + -14.519442558288574 + ], + [ + "▁Pulverizer", + -14.51944351196289 + ], + [ + "▁upcycled", + -14.519444465637209 + ], + [ + "▁Prabhu", + -14.519445419311523 + ], + [ + "▁Scarsdale", + -14.519445419311523 + ], + [ + "▁barbaric", + -14.519445419311523 + ], + [ + "▁Acetate", + -14.51944637298584 + ], + [ + "▁Gigabyte", + -14.519447326660156 + ], + [ + "▁Frederik", + -14.519448280334473 + ], + [ + "▁Goethe", + -14.519448280334473 + ], + [ + "▁fertilized", + -14.519454956054688 + ], + [ + "▁magnum", + -14.519455909729004 + ], + [ + "▁Conceded", + -14.519457817077637 + ], + [ + "▁Connelly", + -14.519457817077637 + ], + [ + "▁Vespa", + -14.519458770751951 + ], + [ + "▁calender", + -14.5194673538208 + ], + [ + "▁suprise", + -14.519471168518066 + ], + [ + "▁COURT", + -14.519472122192385 + ], + [ + "▁Ultrasonic", + -14.519474029541016 + ], + [ + "▁Waitrose", + -14.519474983215332 + ], + [ + "▁supplant", + -14.519474983215332 + ], + [ + "▁Kubrick", + -14.519481658935549 + ], + [ + "▁Alum", + -14.519486427307127 + ], + [ + "▁subheading", + -14.519497871398926 + ], + [ + "▁enum", + -14.51950740814209 + ], + [ + "▁calcite", + -14.519524574279783 + ], + [ + "▁KYC", + -14.519539833068848 + ], + [ + "▁Moser", + -14.51955509185791 + ], + [ + "▁pectin", + -14.519559860229492 + ], + [ + "Nash", + -14.519569396972656 + ], + [ + "▁Tuttle", + -14.519569396972656 + ], + [ + "▁digitizing", + -14.519576072692873 + ], + [ + "▁Redondo", + -14.519579887390137 + ], + [ + "▁follicul", + -14.519603729248049 + ], + [ + "▁COLLECTION", + -14.519612312316896 + ], + [ + "▁WINDOWS", + -14.51963710784912 + ], + [ + "▁Authenticity", + -14.519648551940918 + ], + [ + "▁Boomerang", + -14.519661903381348 + ], + [ + "▁Refurbished", + -14.519683837890623 + ], + [ + "▁Thickness", + -14.51968479156494 + ], + [ + "▁Jimmie", + -14.519718170166016 + ], + [ + "▁sunnies", + -14.519731521606444 + ], + [ + "▁Entertaining", + -14.519747734069824 + ], + [ + "▁Osage", + -14.519757270812988 + ], + [ + "▁ornamentation", + -14.51977825164795 + ], + [ + "▁Mercier", + -14.51987648010254 + ], + [ + "drone", + -14.5198974609375 + ], + [ + "ALO", + -14.519916534423828 + ], + [ + "▁dander", + -14.519916534423828 + ], + [ + "▁Taber", + -14.519948959350586 + ], + [ + "▁Tomcat", + -14.519989967346191 + ], + [ + "onica", + -14.519999504089355 + ], + [ + "▁WestJet", + -14.520005226135254 + ], + [ + "▁Seekers", + -14.520074844360352 + ], + [ + "regard", + -14.520113945007324 + ], + [ + "▁Berri", + -14.520113945007324 + ], + [ + "▁Shang", + -14.520124435424805 + ], + [ + "▁sanitized", + -14.520159721374512 + ], + [ + "▁WebMD", + -14.520163536071776 + ], + [ + "▁bunt", + -14.520187377929688 + ], + [ + "▁Pancakes", + -14.520191192626951 + ], + [ + "Arte", + -14.520216941833496 + ], + [ + "#7", + -14.520227432250977 + ], + [ + "▁restlessness", + -14.52027416229248 + ], + [ + "▁Grandview", + -14.520333290100098 + ], + [ + "▁NAND", + -14.52035427093506 + ], + [ + "RTS", + -14.520452499389648 + ], + [ + "▁delusional", + -14.52045726776123 + ], + [ + "▁WISE", + -14.520458221435549 + ], + [ + "▁MCAT", + -14.520483016967772 + ], + [ + "▁Gama", + -14.520489692687988 + ], + [ + "▁choruses", + -14.52052116394043 + ], + [ + "▁Oskar", + -14.52053928375244 + ], + [ + "▁Limon", + -14.52054214477539 + ], + [ + "▁Laird", + -14.520627975463867 + ], + [ + "▁Glazed", + -14.520687103271484 + ], + [ + "▁Neem", + -14.520710945129396 + ], + [ + "Rack", + -14.520734786987305 + ], + [ + "drag", + -14.520751953125 + ], + [ + "LDA", + -14.520865440368652 + ], + [ + "fused", + -14.520971298217772 + ], + [ + "migra", + -14.521062850952148 + ], + [ + "▁Mainstream", + -14.52111530303955 + ], + [ + "PRES", + -14.52113151550293 + ], + [ + "Jess", + -14.521170616149902 + ], + [ + "▁Downstairs", + -14.521254539489746 + ], + [ + "▁reinstalling", + -14.521278381347656 + ], + [ + "▁carelessly", + -14.521279335021973 + ], + [ + "▁Tonk", + -14.521302223205566 + ], + [ + "▁Decades", + -14.52134609222412 + ], + [ + "stigator", + -14.5213623046875 + ], + [ + "▁Helene", + -14.521520614624023 + ], + [ + "984", + -14.521526336669922 + ], + [ + "Recover", + -14.521597862243652 + ], + [ + "WIT", + -14.52161979675293 + ], + [ + "/23/", + -14.52162742614746 + ], + [ + "▁onyx", + -14.521655082702637 + ], + [ + "▁Syr", + -14.521705627441406 + ], + [ + "2.1%", + -14.521721839904783 + ], + [ + "▁Karr", + -14.521808624267578 + ], + [ + "▁Watcher", + -14.52182388305664 + ], + [ + "voiced", + -14.521843910217283 + ], + [ + "choo", + -14.52194595336914 + ], + [ + "▁Miele", + -14.52202320098877 + ], + [ + "BOW", + -14.52204132080078 + ], + [ + "institute", + -14.522058486938477 + ], + [ + "▁FRESH", + -14.522066116333008 + ], + [ + "▁Freshers", + -14.52216911315918 + ], + [ + "▁overhanging", + -14.522181510925291 + ], + [ + "▁yardage", + -14.522181510925291 + ], + [ + "▁Plu", + -14.522226333618164 + ], + [ + "uvre", + -14.522259712219238 + ], + [ + "▁Zambian", + -14.522299766540527 + ], + [ + "▁Blocking", + -14.522354125976562 + ], + [ + "▁Lova", + -14.522360801696776 + ], + [ + "PLAY", + -14.52241325378418 + ], + [ + "Minutes", + -14.522421836853027 + ], + [ + "▁Solon", + -14.52247142791748 + ], + [ + "▁bled", + -14.522503852844238 + ], + [ + "4,5", + -14.522562980651855 + ], + [ + "▁outshine", + -14.52260971069336 + ], + [ + "Dropped", + -14.522613525390623 + ], + [ + "UME", + -14.522672653198242 + ], + [ + "▁Graduated", + -14.522685050964355 + ], + [ + "▁Ferm", + -14.522686958312988 + ], + [ + "awake", + -14.522699356079102 + ], + [ + "▁hotdog", + -14.522729873657228 + ], + [ + "▁Suppl", + -14.522734642028809 + ], + [ + "bex", + -14.522740364074709 + ], + [ + "continent", + -14.522774696350098 + ], + [ + "▁Roux", + -14.52279567718506 + ], + [ + "▁moisturizes", + -14.52283000946045 + ], + [ + "fabricated", + -14.522841453552246 + ], + [ + "Iraq", + -14.522855758666992 + ], + [ + "intel", + -14.522871017456056 + ], + [ + "Neutral", + -14.522912979125977 + ], + [ + "Identity", + -14.522933959960938 + ], + [ + "consumption", + -14.522939682006836 + ], + [ + "poverty", + -14.522939682006836 + ], + [ + "Composite", + -14.522968292236328 + ], + [ + "manufactured", + -14.522992134094238 + ], + [ + "▁Saber", + -14.523011207580566 + ], + [ + "Judy", + -14.523026466369627 + ], + [ + "Operator", + -14.523035049438477 + ], + [ + "Newcastle", + -14.523041725158691 + ], + [ + "Bottle", + -14.523061752319336 + ], + [ + "▁QX", + -14.52310276031494 + ], + [ + "Hillary", + -14.523110389709473 + ], + [ + "Usage", + -14.523116111755373 + ], + [ + "pellant", + -14.523159980773926 + ], + [ + "Inspiration", + -14.52317714691162 + ], + [ + "1976", + -14.523200988769531 + ], + [ + "▁Pardo", + -14.523233413696287 + ], + [ + "mandated", + -14.523283004760742 + ], + [ + "▁Daesh", + -14.523354530334473 + ], + [ + "MDA", + -14.523370742797852 + ], + [ + "▁KJ", + -14.523432731628418 + ], + [ + "▁Toddlers", + -14.523454666137695 + ], + [ + "naturally", + -14.52348804473877 + ], + [ + "▁Rohan", + -14.523491859436035 + ], + [ + "Gentle", + -14.523591995239258 + ], + [ + "▁$1,3", + -14.523717880249023 + ], + [ + "failed", + -14.52375602722168 + ], + [ + "lech", + -14.523780822753906 + ], + [ + "▁hepato", + -14.52381992340088 + ], + [ + "▁Dutt", + -14.523828506469728 + ], + [ + "▁filers", + -14.523839950561523 + ], + [ + "uerte", + -14.523873329162598 + ], + [ + "▁tingle", + -14.524003028869627 + ], + [ + "▁Ionian", + -14.52403163909912 + ], + [ + "Whirl", + -14.52422046661377 + ], + [ + "Psycho", + -14.524225234985352 + ], + [ + "DSL", + -14.52431583404541 + ], + [ + "▁escalators", + -14.524337768554688 + ], + [ + "anie", + -14.52436637878418 + ], + [ + "▁Nunn", + -14.524432182312012 + ], + [ + "▁Lach", + -14.524438858032228 + ], + [ + "▁Divisional", + -14.524486541748049 + ], + [ + "Eliminate", + -14.524493217468262 + ], + [ + "▁curler", + -14.524493217468262 + ], + [ + "▁Sonja", + -14.524534225463867 + ], + [ + "▁Hyaluronic", + -14.524544715881348 + ], + [ + "▁Altima", + -14.524577140808104 + ], + [ + "▁sonnet", + -14.524577140808104 + ], + [ + "Eliminat", + -14.5245943069458 + ], + [ + "▁Crore", + -14.524658203125 + ], + [ + "▁Scheduler", + -14.524680137634276 + ], + [ + "Matching", + -14.524698257446287 + ], + [ + "▁Loew", + -14.524739265441896 + ], + [ + "▁dreamlike", + -14.524794578552246 + ], + [ + "dye", + -14.524836540222168 + ], + [ + "alot", + -14.524860382080078 + ], + [ + "Alien", + -14.524862289428713 + ], + [ + "▁Gabi", + -14.524943351745604 + ], + [ + "miya", + -14.524954795837402 + ], + [ + "Cher", + -14.525010108947754 + ], + [ + "▁annoys", + -14.525047302246094 + ], + [ + "▁hideout", + -14.525096893310549 + ], + [ + "▁12:15", + -14.525108337402344 + ], + [ + "▁Darien", + -14.525114059448242 + ], + [ + "basi", + -14.525139808654783 + ], + [ + "46)", + -14.52516269683838 + ], + [ + "▁Roca", + -14.52517032623291 + ], + [ + "hj", + -14.525181770324709 + ], + [ + "NIF", + -14.52523422241211 + ], + [ + "35,000", + -14.525260925292969 + ], + [ + "▁VK", + -14.525338172912598 + ], + [ + "guardians", + -14.525379180908203 + ], + [ + "Charle", + -14.525424003601074 + ], + [ + "▁overdone", + -14.525443077087402 + ], + [ + "▁aphids", + -14.525585174560549 + ], + [ + "Angle", + -14.525588035583496 + ], + [ + "Referr", + -14.525642395019531 + ], + [ + "provision", + -14.525701522827148 + ], + [ + "ativa", + -14.525718688964844 + ], + [ + "▁imbue", + -14.525785446166992 + ], + [ + "▁SVN", + -14.52582550048828 + ], + [ + "Checkout", + -14.525836944580078 + ], + [ + "▁intonation", + -14.525876998901367 + ], + [ + "▁popsicles", + -14.525887489318848 + ], + [ + "▁20:00", + -14.525959014892578 + ], + [ + "TTF", + -14.525978088378906 + ], + [ + "▁Rhone", + -14.52610969543457 + ], + [ + "▁haphazard", + -14.526137351989746 + ], + [ + "▁Rudi", + -14.526167869567873 + ], + [ + "campo", + -14.526219367980955 + ], + [ + "BIRD", + -14.526220321655272 + ], + [ + "ilator", + -14.52624797821045 + ], + [ + "Olymp", + -14.526290893554688 + ], + [ + "ucher", + -14.526304244995115 + ], + [ + "ellini", + -14.52632999420166 + ], + [ + "▁absorbers", + -14.526358604431152 + ], + [ + "Reimbursement", + -14.52638053894043 + ], + [ + "Vulnerable", + -14.52638053894043 + ], + [ + "▁CONSEQUENTIAL", + -14.52638053894043 + ], + [ + "▁Cretaceous", + -14.52638053894043 + ], + [ + "▁Jamboree", + -14.52638053894043 + ], + [ + "▁Schengen", + -14.52638053894043 + ], + [ + "▁Sycamore", + -14.52638053894043 + ], + [ + "▁encapsulation", + -14.52638053894043 + ], + [ + "▁endocannabinoid", + -14.52638053894043 + ], + [ + "▁escapade", + -14.52638053894043 + ], + [ + "▁infuriating", + -14.52638053894043 + ], + [ + "▁insolvent", + -14.52638053894043 + ], + [ + "▁juggernaut", + -14.52638053894043 + ], + [ + "▁leotard", + -14.52638053894043 + ], + [ + "▁malevolent", + -14.52638053894043 + ], + [ + "▁salutation", + -14.52638053894043 + ], + [ + "▁speculators", + -14.52638053894043 + ], + [ + "▁unremarkable", + -14.52638053894043 + ], + [ + "▁maddening", + -14.526381492614746 + ], + [ + "▁mulberry", + -14.526381492614746 + ], + [ + "▁sleuth", + -14.526381492614746 + ], + [ + "▁Stapleton", + -14.526382446289062 + ], + [ + "▁vigour", + -14.526382446289062 + ], + [ + "Hydroponic", + -14.52638339996338 + ], + [ + "▁Nikkei", + -14.52638339996338 + ], + [ + "▁Wadsworth", + -14.526384353637695 + ], + [ + "multiculturalism", + -14.526385307312012 + ], + [ + "▁Neglect", + -14.526386260986328 + ], + [ + "▁ARISING", + -14.526387214660645 + ], + [ + "▁sentinel", + -14.52638816833496 + ], + [ + "▁deformities", + -14.526392936706545 + ], + [ + "▁energised", + -14.52640151977539 + ], + [ + "▁Escobar", + -14.526402473449709 + ], + [ + "▁Pandey", + -14.526402473449709 + ], + [ + "▁Spartanburg", + -14.526403427124023 + ], + [ + "▁Bollinger", + -14.526409149169922 + ], + [ + "▁Tyrol", + -14.526409149169922 + ], + [ + "▁Trieste", + -14.526412963867188 + ], + [ + "▁deutschland", + -14.526412963867188 + ], + [ + "▁Albrecht", + -14.52641773223877 + ], + [ + "▁Arlene", + -14.526418685913086 + ], + [ + "▁humanistic", + -14.526419639587402 + ], + [ + "Bissau", + -14.526421546936035 + ], + [ + "▁floodplain", + -14.526422500610352 + ], + [ + "▁inflating", + -14.526423454284668 + ], + [ + "▁sleet", + -14.526424407958984 + ], + [ + "▁groomsmen", + -14.526432991027832 + ], + [ + "6-11", + -14.526436805725098 + ], + [ + "▁Mollie", + -14.526446342468262 + ], + [ + "▁gibt", + -14.526472091674805 + ], + [ + "▁Nikita", + -14.526495933532717 + ], + [ + "▁bpd", + -14.526495933532717 + ], + [ + "▁hairdressing", + -14.526500701904297 + ], + [ + "▁Mago", + -14.526504516601562 + ], + [ + "▁atomizer", + -14.526506423950195 + ], + [ + "▁Kapa", + -14.526515007019045 + ], + [ + "▁litany", + -14.526517868041992 + ], + [ + "▁Pitney", + -14.52652645111084 + ], + [ + "▁rivalries", + -14.526543617248535 + ], + [ + "▁destitute", + -14.526554107666016 + ], + [ + "▁Lazio", + -14.526582717895508 + ], + [ + "▁Tanning", + -14.526586532592772 + ], + [ + "▁mutton", + -14.526592254638672 + ], + [ + "▁Thug", + -14.526606559753418 + ], + [ + "▁cochlear", + -14.526613235473633 + ], + [ + "▁camber", + -14.526631355285645 + ], + [ + "believers", + -14.52663230895996 + ], + [ + "▁Annals", + -14.52664566040039 + ], + [ + "▁trusses", + -14.52667236328125 + ], + [ + "▁turban", + -14.526676177978516 + ], + [ + "FJ", + -14.526721000671388 + ], + [ + "▁NPCs", + -14.526741981506348 + ], + [ + "▁underlay", + -14.526742935180664 + ], + [ + "▁11:4", + -14.526771545410156 + ], + [ + "▁Applicable", + -14.526803970336914 + ], + [ + "▁Distress", + -14.526825904846191 + ], + [ + "▁Coptic", + -14.526905059814451 + ], + [ + "▁confounding", + -14.526947021484377 + ], + [ + "▁excelling", + -14.527007102966309 + ], + [ + "▁Sandler", + -14.527029037475586 + ], + [ + "▁CBRE", + -14.52703094482422 + ], + [ + "Partly", + -14.527050971984863 + ], + [ + "▁crisscross", + -14.527103424072266 + ], + [ + "Phen", + -14.52716064453125 + ], + [ + "▁AJAX", + -14.52721118927002 + ], + [ + "▁Furman", + -14.52722454071045 + ], + [ + "zb", + -14.527260780334473 + ], + [ + "▁pyrotechnic", + -14.527280807495115 + ], + [ + "TAKE", + -14.527334213256836 + ], + [ + "technik", + -14.527336120605469 + ], + [ + "▁Hexagon", + -14.527347564697266 + ], + [ + "▁Mayflower", + -14.527359008789062 + ], + [ + "Newton", + -14.527376174926758 + ], + [ + "▁Timmy", + -14.527426719665527 + ], + [ + "▁inhibitory", + -14.5274658203125 + ], + [ + "▁fiancee", + -14.527485847473145 + ], + [ + "▁washcloth", + -14.52753734588623 + ], + [ + "▁Lure", + -14.527559280395508 + ], + [ + "▁MLK", + -14.527582168579102 + ], + [ + "▁cfl", + -14.527681350708008 + ], + [ + "▁coalesce", + -14.527685165405272 + ], + [ + "▁Xcode", + -14.527735710144045 + ], + [ + "▁Benches", + -14.527833938598633 + ], + [ + "▁Sark", + -14.527874946594238 + ], + [ + "pillar", + -14.527962684631348 + ], + [ + "▁Tonal", + -14.527976989746094 + ], + [ + "▁overridden", + -14.527987480163574 + ], + [ + "SSO", + -14.528029441833496 + ], + [ + "▁uplifted", + -14.528069496154783 + ], + [ + "▁Reston", + -14.528141021728516 + ], + [ + "peel", + -14.52816390991211 + ], + [ + "ATP", + -14.528218269348145 + ], + [ + "dieu", + -14.528270721435549 + ], + [ + "OTD", + -14.528278350830078 + ], + [ + "▁chronicled", + -14.528403282165527 + ], + [ + "▁chocolatey", + -14.52844524383545 + ], + [ + "▁corticosteroids", + -14.528446197509766 + ], + [ + "▁redraw", + -14.528454780578612 + ], + [ + "▁codebase", + -14.528462409973145 + ], + [ + "▁Houthi", + -14.52846336364746 + ], + [ + "▁Sharia", + -14.52847385406494 + ], + [ + "▁MCP", + -14.528532028198242 + ], + [ + "▁sneer", + -14.528557777404783 + ], + [ + "▁Frodo", + -14.528576850891112 + ], + [ + "▁forbids", + -14.52859878540039 + ], + [ + "CRS", + -14.528765678405762 + ], + [ + "redacted", + -14.528767585754396 + ], + [ + "▁beekeeper", + -14.528786659240724 + ], + [ + "VENT", + -14.528796195983888 + ], + [ + "▁recast", + -14.528827667236328 + ], + [ + "▁Plunge", + -14.528847694396973 + ], + [ + "▁Lifelong", + -14.528935432434082 + ], + [ + "gari", + -14.52894115447998 + ], + [ + "▁traversed", + -14.52898120880127 + ], + [ + "▁inhabiting", + -14.528998374938965 + ], + [ + "▁DLP", + -14.529034614562988 + ], + [ + "cognitive", + -14.529048919677734 + ], + [ + "hesi", + -14.52907371520996 + ], + [ + "▁fourteenth", + -14.529102325439451 + ], + [ + "▁Renton", + -14.529130935668944 + ], + [ + "▁Paralympics", + -14.529207229614258 + ], + [ + "▁Nef", + -14.529210090637209 + ], + [ + "▁DANCE", + -14.529304504394531 + ], + [ + "▁Sinus", + -14.529342651367188 + ], + [ + "lw", + -14.52939510345459 + ], + [ + "1942", + -14.529409408569336 + ], + [ + "974", + -14.529437065124512 + ], + [ + "▁artistes", + -14.529528617858888 + ], + [ + "▁Jetta", + -14.52954387664795 + ], + [ + "▁jewellers", + -14.52961254119873 + ], + [ + "1980", + -14.52961540222168 + ], + [ + "9:10", + -14.529658317565918 + ], + [ + "▁testifying", + -14.529745101928713 + ], + [ + "voted", + -14.529766082763672 + ], + [ + "▁Stronger", + -14.529837608337402 + ], + [ + "urra", + -14.529854774475098 + ], + [ + "▁bookies", + -14.529870986938477 + ], + [ + "▁Xue", + -14.52988338470459 + ], + [ + "▁sachets", + -14.52992057800293 + ], + [ + "▁aplenty", + -14.529984474182127 + ], + [ + "Courtesy", + -14.530059814453123 + ], + [ + "ZONE", + -14.53006362915039 + ], + [ + "Hungarian", + -14.530073165893556 + ], + [ + "laundry", + -14.530081748962402 + ], + [ + "Viking", + -14.53013038635254 + ], + [ + "Baseball", + -14.530156135559082 + ], + [ + "molded", + -14.53016185760498 + ], + [ + "▁enchiladas", + -14.53016757965088 + ], + [ + "Bradley", + -14.53017234802246 + ], + [ + "3.2%", + -14.530259132385254 + ], + [ + "Toole", + -14.530351638793944 + ], + [ + "addict", + -14.530388832092283 + ], + [ + "3,800", + -14.530393600463867 + ], + [ + "▁reshaped", + -14.53042984008789 + ], + [ + "▁playthrough", + -14.530449867248535 + ], + [ + "kite", + -14.530573844909668 + ], + [ + "JN", + -14.53071403503418 + ], + [ + "Tailor", + -14.530719757080078 + ], + [ + "▁WJ", + -14.5308256149292 + ], + [ + "Tomato", + -14.530888557434082 + ], + [ + "Devil", + -14.53091812133789 + ], + [ + "▁pv", + -14.530948638916016 + ], + [ + "itsa", + -14.53095817565918 + ], + [ + "▁lanka", + -14.530991554260254 + ], + [ + "▁GHS", + -14.530998229980469 + ], + [ + "shek", + -14.531002044677734 + ], + [ + "76)", + -14.531063079833984 + ], + [ + "▁Domi", + -14.531092643737791 + ], + [ + "Milan", + -14.531285285949709 + ], + [ + "dumping", + -14.531391143798828 + ], + [ + "iene", + -14.531410217285156 + ], + [ + "cannot", + -14.531412124633787 + ], + [ + "latch", + -14.531513214111328 + ], + [ + "0-010", + -14.531527519226074 + ], + [ + "▁spain", + -14.531563758850098 + ], + [ + "admission", + -14.53160572052002 + ], + [ + "▁Whyte", + -14.531678199768066 + ], + [ + "tired", + -14.531679153442385 + ], + [ + "Personalization", + -14.531719207763672 + ], + [ + "▁intersects", + -14.531784057617188 + ], + [ + "necked", + -14.531852722167969 + ], + [ + "tropin", + -14.53195095062256 + ], + [ + "RRC", + -14.532011032104492 + ], + [ + "▁Thumbs", + -14.5320463180542 + ], + [ + "▁Peripheral", + -14.532050132751465 + ], + [ + "ROCK", + -14.532248497009276 + ], + [ + "▁consecutively", + -14.532297134399414 + ], + [ + "Tear", + -14.532389640808104 + ], + [ + "▁Voted", + -14.532503128051758 + ], + [ + "▁weeklong", + -14.532546043395996 + ], + [ + "▁familiarise", + -14.532581329345703 + ], + [ + "hue", + -14.532594680786133 + ], + [ + "▁Cupboard", + -14.532639503479004 + ], + [ + "▁SUM", + -14.532651901245115 + ], + [ + "▁Yasmin", + -14.532663345336914 + ], + [ + "▁Burj", + -14.532745361328123 + ], + [ + "▁officiate", + -14.532772064208984 + ], + [ + "stitutionalized", + -14.532779693603516 + ], + [ + "▁pedi", + -14.53278350830078 + ], + [ + "Candidate", + -14.53287124633789 + ], + [ + "▁ITF", + -14.532892227172852 + ], + [ + "▁webmail", + -14.532916069030762 + ], + [ + "Mapper", + -14.532950401306152 + ], + [ + "Networking", + -14.532953262329102 + ], + [ + "Ivan", + -14.533114433288574 + ], + [ + "NICA", + -14.533191680908203 + ], + [ + "Lease", + -14.533209800720217 + ], + [ + "▁exalt", + -14.533267974853516 + ], + [ + "▁TJNBoost", + -14.533371925354004 + ], + [ + "Frustrated", + -14.533373832702637 + ], + [ + "▁Anguilla", + -14.533373832702637 + ], + [ + "▁CHALLENGE", + -14.533373832702637 + ], + [ + "▁DirecTV", + -14.533373832702637 + ], + [ + "▁Leavenworth", + -14.533373832702637 + ], + [ + "▁Pfeiffer", + -14.533373832702637 + ], + [ + "▁Susquehanna", + -14.533373832702637 + ], + [ + "▁Tuscaloosa", + -14.533373832702637 + ], + [ + "▁altruistic", + -14.533373832702637 + ], + [ + "▁butterscotch", + -14.533373832702637 + ], + [ + "▁conceiving", + -14.533373832702637 + ], + [ + "▁creatinine", + -14.533373832702637 + ], + [ + "▁inadequacy", + -14.533373832702637 + ], + [ + "▁incendiary", + -14.533373832702637 + ], + [ + "▁ophthalmic", + -14.533373832702637 + ], + [ + "▁prodigy", + -14.533373832702637 + ], + [ + "▁sequestration", + -14.533373832702637 + ], + [ + "▁tachometer", + -14.533373832702637 + ], + [ + "▁Relativity", + -14.533374786376951 + ], + [ + "▁Turkmenistan", + -14.533374786376951 + ], + [ + "▁elitist", + -14.533374786376951 + ], + [ + "▁Dougherty", + -14.53337574005127 + ], + [ + "▁liaising", + -14.533377647399902 + ], + [ + "▁scuttle", + -14.533377647399902 + ], + [ + "▁Estimation", + -14.53337860107422 + ], + [ + "▁spongy", + -14.533379554748535 + ], + [ + "▁georgia", + -14.533380508422852 + ], + [ + "▁Molloy", + -14.533387184143066 + ], + [ + "▁mastectomy", + -14.533387184143066 + ], + [ + "Gastroenterology", + -14.5333890914917 + ], + [ + "▁Snapdeal", + -14.533390045166016 + ], + [ + "▁bursary", + -14.533390045166016 + ], + [ + "▁Cacao", + -14.533392906188965 + ], + [ + "▁Caracas", + -14.53339385986328 + ], + [ + "▁TRAINING", + -14.53339385986328 + ], + [ + "uhan", + -14.533395767211914 + ], + [ + "▁telehealth", + -14.533406257629396 + ], + [ + "OLOGY", + -14.53341007232666 + ], + [ + "▁Lucasfilm", + -14.533411026000977 + ], + [ + "▁Tetris", + -14.533417701721191 + ], + [ + "▁pummel", + -14.533432960510254 + ], + [ + "▁Emtek", + -14.533438682556152 + ], + [ + "▁riddles", + -14.533441543579102 + ], + [ + "▁cubed", + -14.533443450927734 + ], + [ + "▁moored", + -14.533443450927734 + ], + [ + "▁truce", + -14.53344440460205 + ], + [ + "▁Merrick", + -14.533461570739746 + ], + [ + "▁Dailymotion", + -14.533467292785645 + ], + [ + "▁Templar", + -14.53347396850586 + ], + [ + "VERSION", + -14.533498764038086 + ], + [ + "▁conformance", + -14.533512115478516 + ], + [ + "▁rapping", + -14.533522605895996 + ], + [ + "▁pounce", + -14.533536911010742 + ], + [ + "▁Colgate", + -14.53353786468506 + ], + [ + "▁scriptural", + -14.533538818359377 + ], + [ + "▁retargeting", + -14.53354263305664 + ], + [ + "▁reoccurring", + -14.533551216125488 + ], + [ + "▁mistreated", + -14.533563613891602 + ], + [ + "▁banknotes", + -14.533574104309082 + ], + [ + "▁Purina", + -14.533575057983398 + ], + [ + "▁venomous", + -14.533575057983398 + ], + [ + "▁Carved", + -14.53358554840088 + ], + [ + "▁abutment", + -14.533601760864258 + ], + [ + "▁Fuzzy", + -14.533607482910156 + ], + [ + "▁Ramesh", + -14.533611297607422 + ], + [ + "▁weaning", + -14.533612251281738 + ], + [ + "▁Magdalena", + -14.53362274169922 + ], + [ + "▁Latham", + -14.533635139465332 + ], + [ + "Gig", + -14.533644676208496 + ], + [ + "▁Modesto", + -14.533648490905762 + ], + [ + "=4", + -14.533660888671877 + ], + [ + "▁toxicology", + -14.533679008483888 + ], + [ + "▁Haywood", + -14.533699035644531 + ], + [ + "▁HJ", + -14.53370475769043 + ], + [ + "2:13", + -14.533709526062012 + ], + [ + "▁Thanos", + -14.533723831176758 + ], + [ + "▁Marketer", + -14.533740997314451 + ], + [ + "▁footy", + -14.533838272094728 + ], + [ + "▁Richey", + -14.53384017944336 + ], + [ + "▁Uprising", + -14.533842086791992 + ], + [ + "FIL", + -14.53387451171875 + ], + [ + "▁texturing", + -14.533880233764648 + ], + [ + "▁Amgen", + -14.533886909484863 + ], + [ + "Banking", + -14.533949851989746 + ], + [ + "▁obsessing", + -14.53398323059082 + ], + [ + "▁Ruling", + -14.53404426574707 + ], + [ + "▁Instituto", + -14.534048080444336 + ], + [ + "▁SELF", + -14.534104347229004 + ], + [ + "▁heatwave", + -14.534153938293455 + ], + [ + "▁Vish", + -14.534174919128418 + ], + [ + "▁Narnia", + -14.534210205078123 + ], + [ + "▁trumpeter", + -14.534214973449709 + ], + [ + "Temporal", + -14.5342378616333 + ], + [ + "▁modulated", + -14.534238815307615 + ], + [ + "sweat", + -14.534256935119627 + ], + [ + "▁reconfigured", + -14.534273147583008 + ], + [ + "▁underdogs", + -14.534295082092283 + ], + [ + "varian", + -14.534344673156738 + ], + [ + "▁Rubi", + -14.534414291381836 + ], + [ + "▁radiated", + -14.534467697143556 + ], + [ + "▁Khanna", + -14.534480094909668 + ], + [ + "▁fallback", + -14.534506797790527 + ], + [ + "▁ushers", + -14.534558296203612 + ], + [ + "▁Patches", + -14.5346097946167 + ], + [ + "▁unchanging", + -14.534629821777344 + ], + [ + "▁Cellars", + -14.534771919250488 + ], + [ + "▁freaky", + -14.534795761108398 + ], + [ + "▁Tabletop", + -14.534835815429688 + ], + [ + "Pest", + -14.534956932067873 + ], + [ + "▁unlawfully", + -14.534994125366213 + ], + [ + "▁Guin", + -14.53501033782959 + ], + [ + "▁streamer", + -14.535101890563965 + ], + [ + "▁lavishly", + -14.535109519958496 + ], + [ + "▁creak", + -14.53512954711914 + ], + [ + "utin", + -14.535151481628418 + ], + [ + "▁Engaged", + -14.535162925720217 + ], + [ + "▁smoothest", + -14.535226821899414 + ], + [ + "hexa", + -14.535244941711426 + ], + [ + "▁optician", + -14.53538703918457 + ], + [ + "Paula", + -14.535470962524414 + ], + [ + "▁Kies", + -14.53551197052002 + ], + [ + "▁seine", + -14.535521507263184 + ], + [ + "stanbul", + -14.535568237304688 + ], + [ + "▁linguist", + -14.535599708557127 + ], + [ + "▁Longwood", + -14.535672187805176 + ], + [ + "▁FINE", + -14.535683631896973 + ], + [ + "059", + -14.535755157470703 + ], + [ + "▁taskbar", + -14.53577995300293 + ], + [ + "▁WSJ", + -14.53580379486084 + ], + [ + "▁corresponded", + -14.535804748535156 + ], + [ + "Tha", + -14.535841941833496 + ], + [ + "▁financier", + -14.535850524902344 + ], + [ + "▁Marz", + -14.535877227783203 + ], + [ + "▁pundit", + -14.535882949829102 + ], + [ + "TRANS", + -14.535919189453123 + ], + [ + "▁polyps", + -14.53592586517334 + ], + [ + "compassion", + -14.5359468460083 + ], + [ + "▁XRP", + -14.535985946655272 + ], + [ + "▁Decorator", + -14.53602695465088 + ], + [ + "▁kitchenware", + -14.536062240600586 + ], + [ + "▁10′′", + -14.536161422729492 + ], + [ + "aeus", + -14.536285400390623 + ], + [ + "cyn", + -14.53628921508789 + ], + [ + "▁statehood", + -14.53629207611084 + ], + [ + "▁Popeye", + -14.53639316558838 + ], + [ + "▁suspiciously", + -14.536465644836426 + ], + [ + "Sem", + -14.536520957946776 + ], + [ + "▁Mathis", + -14.53653621673584 + ], + [ + "▁Frosted", + -14.53654670715332 + ], + [ + "osc", + -14.536563873291016 + ], + [ + "2022", + -14.536615371704102 + ], + [ + "▁prosthetics", + -14.53679370880127 + ], + [ + "▁peasy", + -14.536893844604492 + ], + [ + "▁LOGO", + -14.536979675292969 + ], + [ + "▁sickly", + -14.536982536315918 + ], + [ + "nther", + -14.53700065612793 + ], + [ + "sequence", + -14.537031173706056 + ], + [ + "zma", + -14.537080764770508 + ], + [ + "▁complicates", + -14.537105560302734 + ], + [ + "2:50", + -14.537213325500488 + ], + [ + "▁Miro", + -14.537223815917969 + ], + [ + "Divorce", + -14.537261962890623 + ], + [ + "Twist", + -14.53726863861084 + ], + [ + "Lindsay", + -14.537270545959473 + ], + [ + "Empire", + -14.537275314331056 + ], + [ + "Moscow", + -14.537275314331056 + ], + [ + "beginning", + -14.537276268005373 + ], + [ + "Wikipedia", + -14.537277221679688 + ], + [ + "anxiety", + -14.537278175354004 + ], + [ + "▁cuddles", + -14.537291526794434 + ], + [ + "patio", + -14.537338256835938 + ], + [ + "▁woolen", + -14.537339210510254 + ], + [ + "pricing", + -14.537355422973633 + ], + [ + "WiFi", + -14.53739070892334 + ], + [ + "jeff", + -14.537396430969238 + ], + [ + "Hughes", + -14.537400245666504 + ], + [ + "advice", + -14.537419319152832 + ], + [ + "Craving", + -14.537446975708008 + ], + [ + "▁smelt", + -14.537453651428224 + ], + [ + "recently", + -14.537487983703612 + ], + [ + "▁Jordi", + -14.537528038024902 + ], + [ + "alliance", + -14.53760814666748 + ], + [ + "ANC", + -14.537622451782228 + ], + [ + "▁mending", + -14.53766632080078 + ], + [ + "GREE", + -14.537714004516602 + ], + [ + "forcing", + -14.537764549255373 + ], + [ + "▁Fielding", + -14.53779411315918 + ], + [ + "ANNE", + -14.53791332244873 + ], + [ + "▁Northam", + -14.537914276123049 + ], + [ + "SFC", + -14.538047790527344 + ], + [ + "Logan", + -14.53806972503662 + ], + [ + "Gotta", + -14.538082122802734 + ], + [ + "(0)", + -14.538114547729492 + ], + [ + "Associated", + -14.538121223449709 + ], + [ + "ikon", + -14.538208961486816 + ], + [ + "–17", + -14.538256645202637 + ], + [ + "engaged", + -14.53827953338623 + ], + [ + "Maryland", + -14.538305282592772 + ], + [ + "▁subtypes", + -14.538314819335938 + ], + [ + "DLL", + -14.538336753845217 + ], + [ + "taxi", + -14.53839874267578 + ], + [ + "amosa", + -14.538492202758787 + ], + [ + "▁Basti", + -14.538501739501951 + ], + [ + "IIT", + -14.538527488708496 + ], + [ + "least", + -14.53858470916748 + ], + [ + "▁modernise", + -14.538662910461426 + ], + [ + "▁sandbag", + -14.538681983947754 + ], + [ + "▁Brees", + -14.538715362548828 + ], + [ + "▁Brant", + -14.538729667663574 + ], + [ + "Themes", + -14.538909912109377 + ], + [ + "RUG", + -14.53891372680664 + ], + [ + "Acid", + -14.53892993927002 + ], + [ + "▁buzzed", + -14.538954734802246 + ], + [ + "▁SMOK", + -14.539040565490724 + ], + [ + "Keeper", + -14.539162635803224 + ], + [ + "rca", + -14.539246559143066 + ], + [ + "roman", + -14.539273262023926 + ], + [ + "▁patronize", + -14.539299964904783 + ], + [ + "JL", + -14.539417266845703 + ], + [ + "74)", + -14.539438247680664 + ], + [ + "▁concoctions", + -14.539443016052246 + ], + [ + "GFR", + -14.5394868850708 + ], + [ + "▁Meme", + -14.539502143859863 + ], + [ + "▁1:4", + -14.53952407836914 + ], + [ + "▁Banc", + -14.53964900970459 + ], + [ + "▁reinvigorate", + -14.539813041687012 + ], + [ + "▁transpire", + -14.53981876373291 + ], + [ + "▁Xiang", + -14.539883613586426 + ], + [ + "▁saturate", + -14.539888381958008 + ], + [ + "▁penalize", + -14.53994369506836 + ], + [ + "▁Astonishing", + -14.539968490600586 + ], + [ + "skiing", + -14.539978981018066 + ], + [ + "owning", + -14.54008674621582 + ], + [ + "Recognis", + -14.540109634399414 + ], + [ + "zyme", + -14.540136337280272 + ], + [ + "▁hourglass", + -14.540172576904297 + ], + [ + "sulfon", + -14.540204048156738 + ], + [ + "ZH", + -14.54023265838623 + ], + [ + "copies", + -14.540238380432127 + ], + [ + "▁Cf", + -14.54024887084961 + ], + [ + "▁AUTHOR", + -14.54040241241455 + ], + [ + "sophisticate", + -14.540403366088867 + ], + [ + "▁janitor", + -14.540410041809082 + ], + [ + "▁Uzbek", + -14.540414810180664 + ], + [ + "JOURNAL", + -14.54041576385498 + ], + [ + "▁Centenary", + -14.54041576385498 + ], + [ + "▁Gwinnett", + -14.54041576385498 + ], + [ + "▁Husqvarna", + -14.54041576385498 + ], + [ + "▁THOSE", + -14.54041576385498 + ], + [ + "▁compendium", + -14.54041576385498 + ], + [ + "▁complicating", + -14.54041576385498 + ], + [ + "▁convivial", + -14.54041576385498 + ], + [ + "▁crawfish", + -14.54041576385498 + ], + [ + "▁crustacean", + -14.54041576385498 + ], + [ + "▁oscilloscope", + -14.54041576385498 + ], + [ + "▁promulgated", + -14.54041576385498 + ], + [ + "▁reprisal", + -14.54041576385498 + ], + [ + "▁salamander", + -14.54041576385498 + ], + [ + "▁unappealing", + -14.54041576385498 + ], + [ + "▁unwieldy", + -14.54041576385498 + ], + [ + "▁Trampoline", + -14.540416717529297 + ], + [ + "▁Maybelline", + -14.540417671203612 + ], + [ + "▁militaris", + -14.540417671203612 + ], + [ + "▁Carthage", + -14.54041862487793 + ], + [ + "▁Horoscope", + -14.540419578552246 + ], + [ + "▁cheekbones", + -14.54042148590088 + ], + [ + "▁Bjorn", + -14.540423393249512 + ], + [ + "▁Surrender", + -14.540427207946776 + ], + [ + "▁deferral", + -14.540430068969728 + ], + [ + "▁bauble", + -14.540431022644045 + ], + [ + "▁DEVELOPMENT", + -14.540432929992676 + ], + [ + "▁consultancies", + -14.540432929992676 + ], + [ + "▁PETA", + -14.54043960571289 + ], + [ + "▁adventuring", + -14.54044246673584 + ], + [ + "▁conservatories", + -14.540444374084473 + ], + [ + "▁Amalfi", + -14.540445327758787 + ], + [ + "▁RESPECT", + -14.540446281433104 + ], + [ + "▁lipitor", + -14.540448188781738 + ], + [ + "▁modulator", + -14.54045867919922 + ], + [ + "▁grasshopper", + -14.540459632873535 + ], + [ + "▁Entrepreneurial", + -14.5404634475708 + ], + [ + "▁planetarium", + -14.54047966003418 + ], + [ + "sarcoma", + -14.540482521057127 + ], + [ + "▁consular", + -14.54049015045166 + ], + [ + "▁Zucchini", + -14.540494918823242 + ], + [ + "▁Tisch", + -14.54050350189209 + ], + [ + "culate", + -14.540515899658203 + ], + [ + "▁demystify", + -14.540536880493164 + ], + [ + "▁Mantel", + -14.54053783416748 + ], + [ + "▁miami", + -14.54053783416748 + ], + [ + "▁sapiens", + -14.54055118560791 + ], + [ + "▁FreeBSD", + -14.540572166442873 + ], + [ + "Annotation", + -14.54057502746582 + ], + [ + "▁Innis", + -14.54057788848877 + ], + [ + "▁rapture", + -14.540583610534668 + ], + [ + "▁Tusc", + -14.540603637695312 + ], + [ + "▁Tapping", + -14.540624618530272 + ], + [ + "Conventional", + -14.540631294250488 + ], + [ + "▁2014-2015", + -14.540648460388184 + ], + [ + "TEP", + -14.540650367736816 + ], + [ + "▁regenerating", + -14.54065990447998 + ], + [ + "tennis", + -14.540690422058104 + ], + [ + "Pace", + -14.54071044921875 + ], + [ + "▁remanded", + -14.540726661682127 + ], + [ + "▁Ecuadorian", + -14.540728569030762 + ], + [ + "Adria", + -14.540740966796877 + ], + [ + "▁BPD", + -14.540742874145508 + ], + [ + "▁Hyperion", + -14.540759086608888 + ], + [ + "▁Matchmaking", + -14.540761947631836 + ], + [ + "representational", + -14.54077434539795 + ], + [ + "DAN", + -14.540779113769531 + ], + [ + "guar", + -14.54079532623291 + ], + [ + "▁Haig", + -14.54084300994873 + ], + [ + "▁Steinway", + -14.540892601013184 + ], + [ + "▁Tilly", + -14.540903091430664 + ], + [ + "Lind", + -14.540904998779297 + ], + [ + "▁Rosedale", + -14.540908813476562 + ], + [ + "▁squandered", + -14.540918350219728 + ], + [ + "▁Grantham", + -14.540919303894045 + ], + [ + "▁commonality", + -14.540966987609863 + ], + [ + "▁uppercase", + -14.540976524353027 + ], + [ + "▁misinterpreted", + -14.540982246398926 + ], + [ + "▁Godfather", + -14.541007995605469 + ], + [ + "▁billable", + -14.541007995605469 + ], + [ + "ruder", + -14.541035652160645 + ], + [ + "▁reroute", + -14.54103660583496 + ], + [ + "▁Perks", + -14.541060447692873 + ], + [ + "▁Chiron", + -14.54108428955078 + ], + [ + "▁HCP", + -14.541105270385742 + ], + [ + "▁vicar", + -14.54112720489502 + ], + [ + "▁analyzers", + -14.54122829437256 + ], + [ + "▁compet", + -14.541236877441406 + ], + [ + "(10):", + -14.541244506835938 + ], + [ + "▁Aspiration", + -14.541248321533203 + ], + [ + "1–1", + -14.541274070739746 + ], + [ + "▁phage", + -14.541327476501465 + ], + [ + "FOOT", + -14.54133129119873 + ], + [ + "▁ACF", + -14.54135036468506 + ], + [ + "meme", + -14.541351318359377 + ], + [ + "▁naturalized", + -14.541373252868652 + ], + [ + "Vital", + -14.5414457321167 + ], + [ + "Singer", + -14.541460037231444 + ], + [ + "▁panini", + -14.541463851928713 + ], + [ + "▁skits", + -14.541510581970217 + ], + [ + "pirate", + -14.541574478149414 + ], + [ + "▁DAF", + -14.54159450531006 + ], + [ + "Brooks", + -14.541616439819336 + ], + [ + "▁outrageously", + -14.541683197021484 + ], + [ + "tenth", + -14.541691780090332 + ], + [ + "ITES", + -14.54171657562256 + ], + [ + "▁pinot", + -14.54171657562256 + ], + [ + "Vest", + -14.541763305664062 + ], + [ + "▁swat", + -14.541844367980955 + ], + [ + "▁Wearable", + -14.541850090026855 + ], + [ + "▁oem", + -14.541924476623535 + ], + [ + "▁heartening", + -14.541947364807127 + ], + [ + "HHH", + -14.542040824890137 + ], + [ + "heem", + -14.542073249816896 + ], + [ + "CUT", + -14.542131423950195 + ], + [ + "▁Halong", + -14.54213523864746 + ], + [ + "▁PRESENT", + -14.542162895202637 + ], + [ + "▁shopkeeper", + -14.542165756225586 + ], + [ + "JH", + -14.542207717895508 + ], + [ + "nyc", + -14.542213439941406 + ], + [ + "▁Aji", + -14.542244911193848 + ], + [ + "▁yellowing", + -14.54225254058838 + ], + [ + "TEF", + -14.542316436767578 + ], + [ + "▁swelled", + -14.542354583740234 + ], + [ + "▁mainstays", + -14.542417526245115 + ], + [ + "bung", + -14.542436599731444 + ], + [ + "▁riverbank", + -14.542576789855955 + ], + [ + "izan", + -14.54277229309082 + ], + [ + "▁doodles", + -14.5428466796875 + ], + [ + "gracing", + -14.54286003112793 + ], + [ + "▁Lieberman", + -14.54287815093994 + ], + [ + "▁destabilize", + -14.542940139770508 + ], + [ + "$9", + -14.542943000793455 + ], + [ + "▁Lupin", + -14.542969703674316 + ], + [ + "Finn", + -14.54298210144043 + ], + [ + "▁Avenger", + -14.54306983947754 + ], + [ + "▁whiz", + -14.543119430541992 + ], + [ + "▁pranks", + -14.54318141937256 + ], + [ + "▁Hangouts", + -14.543204307556152 + ], + [ + "▁fingerboard", + -14.543225288391112 + ], + [ + "▁Komen", + -14.543269157409668 + ], + [ + "▁Grading", + -14.543299674987791 + ], + [ + "▁bibliographic", + -14.543437004089355 + ], + [ + "5-18", + -14.543479919433594 + ], + [ + "▁SFX", + -14.543559074401855 + ], + [ + "ctive", + -14.543660163879396 + ], + [ + "hli", + -14.543787002563477 + ], + [ + "erus", + -14.543869018554688 + ], + [ + "CRIS", + -14.543872833251951 + ], + [ + "▁heresy", + -14.543977737426758 + ], + [ + "enheim", + -14.544014930725098 + ], + [ + "1973", + -14.54401969909668 + ], + [ + "Universit", + -14.54403305053711 + ], + [ + "cency", + -14.54405403137207 + ], + [ + "chao", + -14.54408359527588 + ], + [ + "PAW", + -14.544102668762209 + ], + [ + "2:00", + -14.544121742248535 + ], + [ + "▁Omo", + -14.544184684753418 + ], + [ + "▁necro", + -14.544204711914062 + ], + [ + "towel", + -14.544208526611328 + ], + [ + "▁enforcer", + -14.544225692749023 + ], + [ + "▁partitioned", + -14.54428482055664 + ], + [ + "▁Blame", + -14.544384002685549 + ], + [ + "Supplement", + -14.544391632080078 + ], + [ + "ENDING", + -14.544397354125977 + ], + [ + "CLS", + -14.54442310333252 + ], + [ + "gasp", + -14.544425010681152 + ], + [ + "pela", + -14.544475555419922 + ], + [ + "Proverbs", + -14.544479370117188 + ], + [ + "Facial", + -14.544515609741213 + ], + [ + "Aurora", + -14.544516563415527 + ], + [ + "Portfolio", + -14.544516563415527 + ], + [ + "religion", + -14.544519424438477 + ], + [ + "intelligent", + -14.54452133178711 + ], + [ + "federal", + -14.544530868530272 + ], + [ + "Vegas", + -14.544550895690918 + ], + [ + "▁Tyra", + -14.544560432434082 + ], + [ + "Kingdom", + -14.544564247131348 + ], + [ + "Judging", + -14.544596672058104 + ], + [ + "▁unbeliever", + -14.544602394104004 + ], + [ + "ACM", + -14.544611930847168 + ], + [ + "Theatre", + -14.544682502746582 + ], + [ + "▁Bew", + -14.544727325439451 + ], + [ + "acceptable", + -14.544737815856934 + ], + [ + "routine", + -14.5447416305542 + ], + [ + "Oscar", + -14.544801712036133 + ], + [ + "▁Meer", + -14.54482364654541 + ], + [ + "▁$3.3", + -14.544833183288574 + ], + [ + "ichler", + -14.544841766357422 + ], + [ + "tempered", + -14.54486083984375 + ], + [ + "RRR", + -14.54489803314209 + ], + [ + "opsis", + -14.54490089416504 + ], + [ + "▁Auditing", + -14.544977188110352 + ], + [ + "Tighten", + -14.545031547546388 + ], + [ + "cosmetic", + -14.545136451721191 + ], + [ + "kada", + -14.545181274414062 + ], + [ + "Homemade", + -14.545191764831545 + ], + [ + "oides", + -14.545210838317873 + ], + [ + "▁premade", + -14.545244216918944 + ], + [ + "Moral", + -14.545252799987791 + ], + [ + "▁Hagan", + -14.545254707336426 + ], + [ + "▁dismissive", + -14.545328140258787 + ], + [ + "vius", + -14.5453462600708 + ], + [ + "Functionality", + -14.545568466186523 + ], + [ + "leng", + -14.545584678649902 + ], + [ + "olini", + -14.545616149902344 + ], + [ + "RGB", + -14.545666694641112 + ], + [ + "▁OPM", + -14.545709609985352 + ], + [ + "reverse", + -14.545801162719728 + ], + [ + "thai", + -14.545863151550291 + ], + [ + "▁Hamad", + -14.545880317687988 + ], + [ + "6:20", + -14.545924186706545 + ], + [ + "climb", + -14.545934677124023 + ], + [ + "▁Grasp", + -14.5459566116333 + ], + [ + "▁Calibration", + -14.54600429534912 + ], + [ + "▁Fermi", + -14.546015739440918 + ], + [ + "▁Steri", + -14.54601764678955 + ], + [ + "1977", + -14.546052932739258 + ], + [ + "devil", + -14.546088218688965 + ], + [ + "johnson", + -14.546093940734863 + ], + [ + "goer", + -14.54612159729004 + ], + [ + "roni", + -14.546127319335938 + ], + [ + "▁Arco", + -14.546156883239746 + ], + [ + "5400", + -14.546319007873535 + ], + [ + "▁EXCEPT", + -14.546435356140137 + ], + [ + "▁Mour", + -14.546595573425291 + ], + [ + "▁roadshow", + -14.54660701751709 + ], + [ + "▁$78", + -14.546818733215332 + ], + [ + "▁Supercar", + -14.546884536743164 + ], + [ + "▁dwindle", + -14.54690647125244 + ], + [ + "▁Kashi", + -14.546916007995604 + ], + [ + "▁deflate", + -14.546930313110352 + ], + [ + "Plea", + -14.546931266784668 + ], + [ + "RENT", + -14.546935081481934 + ], + [ + "▁Mainz", + -14.54700756072998 + ], + [ + "▁secretions", + -14.54705810546875 + ], + [ + "▁Joliet", + -14.547074317932127 + ], + [ + "USP", + -14.547082901000977 + ], + [ + "junctive", + -14.547100067138672 + ], + [ + "durable", + -14.547123908996582 + ], + [ + "Poke", + -14.547158241271973 + ], + [ + "▁inadvertent", + -14.547268867492676 + ], + [ + "Coopers", + -14.54727554321289 + ], + [ + "▁missteps", + -14.547304153442385 + ], + [ + "innie", + -14.547337532043455 + ], + [ + "Evan", + -14.54734230041504 + ], + [ + "▁Analytica", + -14.547355651855469 + ], + [ + "diner", + -14.547361373901367 + ], + [ + "▁Babb", + -14.547389030456545 + ], + [ + "Adhere", + -14.547412872314451 + ], + [ + "jb", + -14.54748821258545 + ], + [ + "Insomnia", + -14.547505378723145 + ], + [ + "Obstacle", + -14.547507286071776 + ], + [ + "▁Beverley", + -14.547508239746094 + ], + [ + "▁Burnaby", + -14.547508239746094 + ], + [ + "▁Inheritance", + -14.547508239746094 + ], + [ + "▁Sarkozy", + -14.547508239746094 + ], + [ + "▁Saxophone", + -14.547508239746094 + ], + [ + "▁Scorsese", + -14.547508239746094 + ], + [ + "▁Torquay", + -14.547508239746094 + ], + [ + "▁carotenoid", + -14.547508239746094 + ], + [ + "▁divorcing", + -14.547508239746094 + ], + [ + "▁gauging", + -14.547508239746094 + ], + [ + "▁glucosamine", + -14.547508239746094 + ], + [ + "▁paisley", + -14.547508239746094 + ], + [ + "▁seagull", + -14.547508239746094 + ], + [ + "▁splicing", + -14.547508239746094 + ], + [ + "▁unflinching", + -14.547508239746094 + ], + [ + "▁CRUSHER", + -14.54750919342041 + ], + [ + "▁audacity", + -14.54750919342041 + ], + [ + "▁squeegee", + -14.54750919342041 + ], + [ + "▁squamous", + -14.547510147094728 + ], + [ + "▁Grenoble", + -14.547511100769045 + ], + [ + "▁Timberwolves", + -14.547511100769045 + ], + [ + "▁phenolic", + -14.54751205444336 + ], + [ + "▁ADVICE", + -14.547513961791992 + ], + [ + "▁propylene", + -14.547517776489258 + ], + [ + "▁finicky", + -14.547518730163574 + ], + [ + "▁afghan", + -14.547526359558104 + ], + [ + "▁thruster", + -14.547527313232422 + ], + [ + "▁Chatsworth", + -14.547540664672852 + ], + [ + "▁Vulture", + -14.547540664672852 + ], + [ + "▁Juncker", + -14.547541618347168 + ], + [ + "▁(1981)", + -14.547545433044434 + ], + [ + "▁herniated", + -14.547545433044434 + ], + [ + "▁empties", + -14.547550201416016 + ], + [ + "▁balsa", + -14.547551155090332 + ], + [ + "▁Radius", + -14.54757022857666 + ], + [ + "▁stalemate", + -14.54757022857666 + ], + [ + "▁Digestive", + -14.547571182250977 + ], + [ + "▁Woodruff", + -14.54757595062256 + ], + [ + "▁Picker", + -14.547576904296877 + ], + [ + "▁unavailability", + -14.547576904296877 + ], + [ + "▁machin", + -14.547590255737305 + ], + [ + "▁Diplomacy", + -14.547595977783203 + ], + [ + "▁squatting", + -14.547602653503418 + ], + [ + "▁stunner", + -14.547603607177734 + ], + [ + "▁Breweries", + -14.54762077331543 + ], + [ + "▁MEMS", + -14.54762077331543 + ], + [ + "friars", + -14.54763889312744 + ], + [ + "▁regrettable", + -14.547651290893556 + ], + [ + "Boc", + -14.547657012939451 + ], + [ + "esper", + -14.547667503356934 + ], + [ + "▁Freedman", + -14.547673225402832 + ], + [ + "▁Warhammer", + -14.54768180847168 + ], + [ + "▁Ivanka", + -14.547691345214844 + ], + [ + "▁reverent", + -14.547710418701172 + ], + [ + "▁Rhy", + -14.547718048095703 + ], + [ + "▁centrist", + -14.54772663116455 + ], + [ + "▁Mulder", + -14.5477294921875 + ], + [ + "▁Ashwin", + -14.54773998260498 + ], + [ + "▁telegram", + -14.547740936279297 + ], + [ + "▁CFPB", + -14.547747611999512 + ], + [ + "▁fancies", + -14.547747611999512 + ], + [ + "▁toughened", + -14.547748565673828 + ], + [ + "DOL", + -14.54776382446289 + ], + [ + "▁atheism", + -14.547797203063965 + ], + [ + "▁unmask", + -14.54780387878418 + ], + [ + "▁glutes", + -14.547819137573242 + ], + [ + "▁fait", + -14.54786205291748 + ], + [ + "▁integra", + -14.547930717468262 + ], + [ + "▁acclimated", + -14.54794979095459 + ], + [ + "▁otc", + -14.54794979095459 + ], + [ + "Liberal", + -14.547996520996094 + ], + [ + "▁criminally", + -14.548006057739258 + ], + [ + "▁Hatha", + -14.548029899597168 + ], + [ + "▁Partially", + -14.548035621643066 + ], + [ + "versary", + -14.54806900024414 + ], + [ + "▁weirdest", + -14.548101425170898 + ], + [ + "▁daft", + -14.548104286193848 + ], + [ + "homo", + -14.548197746276855 + ], + [ + "▁Mackie", + -14.548242568969728 + ], + [ + "▁Hersh", + -14.548247337341309 + ], + [ + "▁catapulted", + -14.548263549804688 + ], + [ + "▁Bajaj", + -14.54826831817627 + ], + [ + "▁Mirza", + -14.548280715942385 + ], + [ + "▁Pista", + -14.54834270477295 + ], + [ + "▁SRAM", + -14.54837131500244 + ], + [ + "conn", + -14.548410415649414 + ], + [ + "▁Sika", + -14.54841423034668 + ], + [ + "▁mailers", + -14.54843807220459 + ], + [ + "▁cupping", + -14.548481941223145 + ], + [ + "▁160,000", + -14.548502922058104 + ], + [ + "▁1828", + -14.54851245880127 + ], + [ + "▁uninspired", + -14.548545837402344 + ], + [ + "▁thickest", + -14.548653602600098 + ], + [ + "osse", + -14.54874038696289 + ], + [ + "▁signalled", + -14.548752784729004 + ], + [ + "▁opiates", + -14.548755645751951 + ], + [ + "▁Mistress", + -14.548863410949709 + ], + [ + "▁Octavia", + -14.548873901367188 + ], + [ + "▁trackpad", + -14.548897743225098 + ], + [ + "RAVE", + -14.548907279968262 + ], + [ + "▁USTA", + -14.548991203308104 + ], + [ + "generator", + -14.548995018005373 + ], + [ + "▁OneTravel", + -14.549009323120115 + ], + [ + "▁Goodrich", + -14.5490140914917 + ], + [ + "obie", + -14.549020767211914 + ], + [ + "emitting", + -14.549044609069824 + ], + [ + "▁chatty", + -14.549074172973633 + ], + [ + "▁resentful", + -14.549087524414062 + ], + [ + "Opti", + -14.549216270446776 + ], + [ + "▁carbonation", + -14.549221992492676 + ], + [ + "▁swirled", + -14.549270629882812 + ], + [ + "▁vigilante", + -14.549272537231444 + ], + [ + "▁SKIN", + -14.549287796020508 + ], + [ + "▁Nuke", + -14.549310684204102 + ], + [ + "▁KVM", + -14.549317359924316 + ], + [ + "▁stunted", + -14.54942798614502 + ], + [ + "▁Sheri", + -14.549477577209473 + ], + [ + "▁Salted", + -14.549578666687012 + ], + [ + "▁$66", + -14.54959487915039 + ], + [ + "Lori", + -14.549625396728516 + ], + [ + "peter", + -14.549657821655272 + ], + [ + "▁underwrite", + -14.549659729003906 + ], + [ + "▁lossless", + -14.549684524536133 + ], + [ + "▁Kelli", + -14.54971981048584 + ], + [ + "▁Racers", + -14.549819946289062 + ], + [ + "▁Converting", + -14.549887657165527 + ], + [ + "▁Cortes", + -14.549935340881348 + ], + [ + "Produce", + -14.549956321716309 + ], + [ + "▁Guatemalan", + -14.54997730255127 + ], + [ + "▁Vicar", + -14.55005168914795 + ], + [ + "Trick", + -14.55008316040039 + ], + [ + "plugins", + -14.55015468597412 + ], + [ + "▁auditioned", + -14.55015468597412 + ], + [ + "posable", + -14.550158500671388 + ], + [ + "▁statesman", + -14.550186157226562 + ], + [ + "▁binocular", + -14.550217628479004 + ], + [ + "▁Lander", + -14.550253868103027 + ], + [ + "▁jihadist", + -14.55027198791504 + ], + [ + "▁diego", + -14.55042552947998 + ], + [ + "▁Treated", + -14.550469398498535 + ], + [ + "septic", + -14.55048942565918 + ], + [ + "▁Kik", + -14.550577163696287 + ], + [ + "▁Bertha", + -14.550600051879885 + ], + [ + "ghar", + -14.550601959228516 + ], + [ + "▁Mattis", + -14.550609588623049 + ], + [ + "▁Thé", + -14.550701141357422 + ], + [ + "▁£5,000", + -14.550766944885254 + ], + [ + "sentence", + -14.550800323486328 + ], + [ + "▁Sounders", + -14.550976753234863 + ], + [ + "▁SIMPLE", + -14.550992965698242 + ], + [ + "▁SOLE", + -14.55099391937256 + ], + [ + "EOS", + -14.550994873046877 + ], + [ + "SIGN", + -14.551033973693848 + ], + [ + "▁defaulting", + -14.5510892868042 + ], + [ + "searching", + -14.551188468933104 + ], + [ + "▁Schar", + -14.551189422607422 + ], + [ + "▁£13", + -14.551222801208496 + ], + [ + "▁localize", + -14.551267623901367 + ], + [ + "▁lunchbox", + -14.55130672454834 + ], + [ + "characterisation", + -14.551313400268556 + ], + [ + "▁1020", + -14.551315307617188 + ], + [ + "▁Reuse", + -14.551447868347168 + ], + [ + "ACTIV", + -14.551462173461914 + ], + [ + "▁Rajesh", + -14.551522254943848 + ], + [ + "▁neurologic", + -14.551624298095703 + ], + [ + "▁braai", + -14.55165958404541 + ], + [ + "mortgage", + -14.551665306091309 + ], + [ + "crowded", + -14.55168914794922 + ], + [ + "ooooooooo", + -14.551708221435549 + ], + [ + "availability", + -14.551734924316406 + ], + [ + "CASE", + -14.551776885986328 + ], + [ + "Completion", + -14.551791191101074 + ], + [ + "cellulose", + -14.551796913146973 + ], + [ + "intervention", + -14.551798820495604 + ], + [ + "Compliance", + -14.551815032958984 + ], + [ + "Jefferson", + -14.551815032958984 + ], + [ + "Abraham", + -14.551817893981934 + ], + [ + "Powder", + -14.551817893981934 + ], + [ + "Exterior", + -14.551824569702148 + ], + [ + "advertising", + -14.551837921142578 + ], + [ + "Muscle", + -14.551838874816896 + ], + [ + "OMC", + -14.55186939239502 + ], + [ + "desert", + -14.551871299743652 + ], + [ + "Venue", + -14.551876068115234 + ], + [ + "▁Chav", + -14.551881790161133 + ], + [ + "Lum", + -14.551899909973145 + ], + [ + "Outlook", + -14.551916122436523 + ], + [ + "LCD", + -14.551939964294434 + ], + [ + "attribute", + -14.551965713500977 + ], + [ + "presence", + -14.55198860168457 + ], + [ + "▁bookshops", + -14.552017211914062 + ], + [ + "Axis", + -14.55203342437744 + ], + [ + "▁layoff", + -14.552085876464844 + ], + [ + "champion", + -14.552115440368652 + ], + [ + "Riding", + -14.552119255065918 + ], + [ + "▁Alte", + -14.552207946777344 + ], + [ + "▁Marcy", + -14.552237510681152 + ], + [ + "Exceptional", + -14.552268028259276 + ], + [ + "Implementing", + -14.552319526672363 + ], + [ + "ASX", + -14.552349090576172 + ], + [ + "combat", + -14.552363395690918 + ], + [ + "▁ligands", + -14.552382469177246 + ], + [ + "▁fortifications", + -14.552383422851562 + ], + [ + "ITU", + -14.55240249633789 + ], + [ + "easily", + -14.552410125732422 + ], + [ + "▁Quito", + -14.552431106567385 + ], + [ + "2.5%", + -14.552464485168455 + ], + [ + "▁steppe", + -14.55261516571045 + ], + [ + "venous", + -14.552635192871094 + ], + [ + "▁Shafi", + -14.552650451660156 + ], + [ + "Attached", + -14.552653312683104 + ], + [ + "▁continu", + -14.552653312683104 + ], + [ + "▁Lv", + -14.552668571472168 + ], + [ + "▁Sapp", + -14.552689552307127 + ], + [ + "Beef", + -14.55271053314209 + ], + [ + "▁Cleo", + -14.552720069885254 + ], + [ + "▁APM", + -14.552740097045898 + ], + [ + "5-26", + -14.552828788757324 + ], + [ + "ETTE", + -14.552842140197754 + ], + [ + "disable", + -14.552878379821776 + ], + [ + "▁VFD", + -14.552986145019531 + ], + [ + "sectoral", + -14.552987098693848 + ], + [ + "atorio", + -14.553048133850098 + ], + [ + "appu", + -14.553071022033691 + ], + [ + "▁colum", + -14.553096771240234 + ], + [ + "filtration", + -14.553101539611816 + ], + [ + "▁Coffin", + -14.553114891052246 + ], + [ + "▁HALF", + -14.553138732910156 + ], + [ + "plete", + -14.553160667419434 + ], + [ + "▁filth", + -14.553194999694824 + ], + [ + "Alberta", + -14.55322551727295 + ], + [ + "▁Enid", + -14.55322551727295 + ], + [ + "▁GTR", + -14.55323314666748 + ], + [ + "▁TGA", + -14.553369522094728 + ], + [ + "Fortune", + -14.55339241027832 + ], + [ + "ricate", + -14.553418159484863 + ], + [ + "▁Liners", + -14.55343246459961 + ], + [ + "▁SLO", + -14.55344009399414 + ], + [ + "iegel", + -14.553442001342772 + ], + [ + "Epic", + -14.55357551574707 + ], + [ + "erence", + -14.553608894348145 + ], + [ + "▁Banda", + -14.553608894348145 + ], + [ + "▁Faw", + -14.553631782531738 + ], + [ + "▁Accidental", + -14.553668975830078 + ], + [ + "▁acquirer", + -14.5537109375 + ], + [ + "seasonal", + -14.553717613220217 + ], + [ + "MAM", + -14.553730964660645 + ], + [ + "quant", + -14.553775787353516 + ], + [ + "▁plu", + -14.553831100463867 + ], + [ + "▁purl", + -14.55386447906494 + ], + [ + "NSC", + -14.553876876831056 + ], + [ + "▁Sree", + -14.553916931152344 + ], + [ + "lotte", + -14.553955078125 + ], + [ + "▁Flaming", + -14.55402946472168 + ], + [ + "FSP", + -14.554086685180664 + ], + [ + "▁IVA", + -14.554214477539062 + ], + [ + "▁Saif", + -14.554240226745604 + ], + [ + "▁Jira", + -14.554255485534668 + ], + [ + "▁11.3", + -14.554266929626465 + ], + [ + "acek", + -14.554293632507324 + ], + [ + "DFI", + -14.554327011108398 + ], + [ + "▁MEDI", + -14.554362297058104 + ], + [ + "Zilla", + -14.554407119750977 + ], + [ + "▁buzzwords", + -14.554503440856934 + ], + [ + "▁massaged", + -14.5545072555542 + ], + [ + "▁BSC", + -14.554556846618652 + ], + [ + "▁YET", + -14.554590225219728 + ], + [ + "Cannot", + -14.554612159729004 + ], + [ + "Distinguish", + -14.554641723632812 + ], + [ + "▁bangalore", + -14.55465030670166 + ], + [ + "▁sorcerer", + -14.55465030670166 + ], + [ + "Remarkably", + -14.554651260375977 + ], + [ + "SUMMARY", + -14.554651260375977 + ], + [ + "▁800-340-5756", + -14.554651260375977 + ], + [ + "▁Esplanade", + -14.554651260375977 + ], + [ + "▁FANTASTIC", + -14.554651260375977 + ], + [ + "▁Giclee", + -14.554651260375977 + ], + [ + "▁Knesset", + -14.554651260375977 + ], + [ + "▁Musharraf", + -14.554651260375977 + ], + [ + "▁Retractable", + -14.554651260375977 + ], + [ + "▁draconian", + -14.554651260375977 + ], + [ + "▁excavating", + -14.554651260375977 + ], + [ + "▁expository", + -14.554651260375977 + ], + [ + "▁macabre", + -14.554651260375977 + ], + [ + "▁retaliatory", + -14.554651260375977 + ], + [ + "▁AmeriCorps", + -14.554652214050291 + ], + [ + "▁Bernanke", + -14.55465316772461 + ], + [ + "▁Smollett", + -14.55465316772461 + ], + [ + "▁january", + -14.55465316772461 + ], + [ + "▁Dermatologist", + -14.554655075073242 + ], + [ + "▁Giraffe", + -14.554655075073242 + ], + [ + "▁hippocampus", + -14.554656982421877 + ], + [ + "▁Fitzroy", + -14.554658889770508 + ], + [ + "▁scintillating", + -14.554659843444824 + ], + [ + "▁biomechanics", + -14.554661750793455 + ], + [ + "▁Quezon", + -14.554665565490724 + ], + [ + "▁histamine", + -14.554670333862305 + ], + [ + "▁whooping", + -14.554676055908203 + ], + [ + "▁forecourt", + -14.554681777954102 + ], + [ + "▁secession", + -14.5546875 + ], + [ + "▁Saeed", + -14.554689407348633 + ], + [ + "▁monogrammed", + -14.554689407348633 + ], + [ + "Releasing", + -14.55469036102295 + ], + [ + "▁Ryzen", + -14.554699897766112 + ], + [ + "▁1010", + -14.554704666137695 + ], + [ + "▁Matrimonial", + -14.554706573486328 + ], + [ + "▁Ruffle", + -14.554719924926758 + ], + [ + "▁IAAF", + -14.554722785949709 + ], + [ + "▁Grotto", + -14.554727554321287 + ], + [ + "▁Prelude", + -14.554731369018556 + ], + [ + "▁taylor", + -14.554731369018556 + ], + [ + "▁Neteller", + -14.554733276367188 + ], + [ + "▁Jeju", + -14.55474090576172 + ], + [ + "Lucent", + -14.5547456741333 + ], + [ + "▁pillowcases", + -14.554746627807615 + ], + [ + "▁Cheung", + -14.554757118225098 + ], + [ + "Proven", + -14.554766654968262 + ], + [ + "▁Marlow", + -14.554773330688477 + ], + [ + "▁topsoil", + -14.554779052734377 + ], + [ + "▁2012-2013", + -14.55481243133545 + ], + [ + "▁Jaeger", + -14.554817199707031 + ], + [ + "▁RRSP", + -14.554828643798828 + ], + [ + "▁Wheatley", + -14.554858207702637 + ], + [ + "▁tranche", + -14.55487060546875 + ], + [ + "▁anointing", + -14.55490779876709 + ], + [ + "▁Sandberg", + -14.554916381835938 + ], + [ + "▁subfloor", + -14.55498504638672 + ], + [ + "▁SOCIAL", + -14.555025100708008 + ], + [ + "YOUR", + -14.555033683776855 + ], + [ + "TRC", + -14.555055618286133 + ], + [ + "▁nonverbal", + -14.55506420135498 + ], + [ + "▁Kickoff", + -14.555068969726562 + ], + [ + "▁biographer", + -14.555068969726562 + ], + [ + "▁subversion", + -14.555075645446776 + ], + [ + "▁Ache", + -14.555076599121094 + ], + [ + "▁dilated", + -14.555094718933104 + ], + [ + "▁Virtualization", + -14.555132865905762 + ], + [ + "▁waging", + -14.555140495300291 + ], + [ + "▁Gustavo", + -14.555176734924316 + ], + [ + "erik", + -14.555190086364746 + ], + [ + "▁curt", + -14.555219650268556 + ], + [ + "▁novelties", + -14.555233001708984 + ], + [ + "▁EDF", + -14.55534553527832 + ], + [ + "▁Ozarks", + -14.555402755737305 + ], + [ + "▁trucker", + -14.555485725402832 + ], + [ + "▁conceptualized", + -14.555499076843262 + ], + [ + "▁eSports", + -14.555505752563477 + ], + [ + "▁Gabor", + -14.55552101135254 + ], + [ + "▁cette", + -14.555533409118652 + ], + [ + "ESH", + -14.55555534362793 + ], + [ + "6400", + -14.555560111999512 + ], + [ + "acchi", + -14.555572509765623 + ], + [ + "▁corned", + -14.555609703063965 + ], + [ + "▁dissecting", + -14.555614471435549 + ], + [ + "▁tiki", + -14.555655479431152 + ], + [ + "Editing", + -14.55570125579834 + ], + [ + "▁Bosnian", + -14.555770874023438 + ], + [ + "▁Orbiter", + -14.55579662322998 + ], + [ + "▁flan", + -14.55579948425293 + ], + [ + "PFL", + -14.555838584899902 + ], + [ + "Cmd", + -14.555853843688965 + ], + [ + "▁Lina", + -14.55596923828125 + ], + [ + "▁mangroves", + -14.55599308013916 + ], + [ + "IANS", + -14.555994987487791 + ], + [ + "hobo", + -14.555994987487791 + ], + [ + "▁Autograph", + -14.556022644042969 + ], + [ + "itar", + -14.55605411529541 + ], + [ + "▁tillage", + -14.55609130859375 + ], + [ + "▁Syst", + -14.556112289428713 + ], + [ + "64/", + -14.556195259094238 + ], + [ + "▁Optimist", + -14.556201934814451 + ], + [ + "CAF", + -14.556288719177246 + ], + [ + "▁Serene", + -14.556315422058104 + ], + [ + "▁EPP", + -14.556321144104004 + ], + [ + "prac", + -14.556328773498535 + ], + [ + "▁SGA", + -14.556434631347656 + ], + [ + "▁vultures", + -14.556440353393556 + ], + [ + "▁cheater", + -14.556459426879885 + ], + [ + "▁Alessi", + -14.556482315063477 + ], + [ + "▁Karp", + -14.556524276733398 + ], + [ + "1975", + -14.556532859802246 + ], + [ + "Beside", + -14.556532859802246 + ], + [ + "ете", + -14.556558609008787 + ], + [ + "FAA", + -14.556621551513672 + ], + [ + "nego", + -14.55666160583496 + ], + [ + "▁Cyr", + -14.556706428527832 + ], + [ + "▁Dupl", + -14.556780815124512 + ], + [ + "▁fascinate", + -14.55682373046875 + ], + [ + "▁Mez", + -14.556890487670898 + ], + [ + "Operative", + -14.556961059570312 + ], + [ + "▁GAMES", + -14.556974411010742 + ], + [ + "▁Crowne", + -14.557072639465332 + ], + [ + "(12):", + -14.557188034057615 + ], + [ + "▁Samara", + -14.55720043182373 + ], + [ + "▁Armand", + -14.557238578796388 + ], + [ + "Omg", + -14.557239532470703 + ], + [ + "fru", + -14.557247161865234 + ], + [ + "▁conveyancer", + -14.557256698608398 + ], + [ + "Acquire", + -14.5573091506958 + ], + [ + "▁EPL", + -14.55732250213623 + ], + [ + "phu", + -14.557409286499023 + ], + [ + "▁nostril", + -14.55744457244873 + ], + [ + "▁Underwriter", + -14.557479858398438 + ], + [ + "▁Poetic", + -14.55749225616455 + ], + [ + "ierra", + -14.557493209838867 + ], + [ + "LAX", + -14.557580947875977 + ], + [ + "▁Remi", + -14.557608604431152 + ], + [ + "cape", + -14.557804107666016 + ], + [ + "Opera", + -14.55781364440918 + ], + [ + "uje", + -14.55784034729004 + ], + [ + "▁$3000", + -14.557865142822266 + ], + [ + "▁Publicity", + -14.557899475097656 + ], + [ + "ippo", + -14.557905197143556 + ], + [ + "▁Emission", + -14.557989120483398 + ], + [ + "▁healthily", + -14.55815887451172 + ], + [ + "meli", + -14.558159828186035 + ], + [ + "-1950", + -14.558180809020996 + ], + [ + "luxe", + -14.558263778686523 + ], + [ + "▁Dugan", + -14.558290481567385 + ], + [ + "HAND", + -14.558332443237305 + ], + [ + "▁Stroller", + -14.558367729187012 + ], + [ + "▁Dud", + -14.558436393737791 + ], + [ + "▁Martell", + -14.55848217010498 + ], + [ + "▁custodians", + -14.558500289916992 + ], + [ + "▁1810", + -14.558539390563965 + ], + [ + "▁3:16", + -14.558605194091797 + ], + [ + "▁passable", + -14.55862045288086 + ], + [ + "Quilt", + -14.558653831481934 + ], + [ + "▁schoolwork", + -14.558660507202148 + ], + [ + "izza", + -14.558680534362791 + ], + [ + "▁1680", + -14.558710098266602 + ], + [ + "rola", + -14.558727264404297 + ], + [ + "▁propellers", + -14.558733940124512 + ], + [ + "▁emphasising", + -14.558788299560549 + ], + [ + "akha", + -14.55889892578125 + ], + [ + "CID", + -14.558924674987791 + ], + [ + "ritic", + -14.55894374847412 + ], + [ + "formance", + -14.558978080749512 + ], + [ + "FRIEND", + -14.558989524841309 + ], + [ + "▁TOC", + -14.558992385864258 + ], + [ + "▁mindfully", + -14.55906105041504 + ], + [ + "LYN", + -14.55909538269043 + ], + [ + "Flexibility", + -14.559127807617188 + ], + [ + "encyclopedia", + -14.559151649475098 + ], + [ + "efficacy", + -14.559168815612791 + ], + [ + "Agriculture", + -14.55917263031006 + ], + [ + "Mumbai", + -14.559173583984377 + ], + [ + "Ministry", + -14.559175491333008 + ], + [ + "defying", + -14.559185981750488 + ], + [ + "Packaging", + -14.559197425842283 + ], + [ + "ovsk", + -14.559205055236816 + ], + [ + "▁Volta", + -14.55924129486084 + ], + [ + "Northwest", + -14.559258460998535 + ], + [ + "portfolio", + -14.559263229370115 + ], + [ + "stainless", + -14.559269905090332 + ], + [ + "▁Mundo", + -14.55927562713623 + ], + [ + "Raising", + -14.559284210205078 + ], + [ + "3–4", + -14.559368133544922 + ], + [ + "Canvas", + -14.559386253356934 + ], + [ + "Agile", + -14.5593900680542 + ], + [ + "disney", + -14.55942726135254 + ], + [ + "▁Ferran", + -14.559530258178713 + ], + [ + "Jesse", + -14.559605598449709 + ], + [ + "Perk", + -14.55961799621582 + ], + [ + "▁terse", + -14.55964469909668 + ], + [ + "▁Tintin", + -14.559651374816896 + ], + [ + "ocular", + -14.55965805053711 + ], + [ + "▁Narc", + -14.55966091156006 + ], + [ + "scatter", + -14.559678077697754 + ], + [ + "granite", + -14.559715270996094 + ], + [ + "▁TBD", + -14.559734344482422 + ], + [ + "Progressive", + -14.559738159179688 + ], + [ + "▁bitrate", + -14.559755325317385 + ], + [ + "▁disordered", + -14.559770584106444 + ], + [ + "uur", + -14.55978298187256 + ], + [ + "▁joggers", + -14.55984592437744 + ], + [ + "▁£50,000", + -14.55986213684082 + ], + [ + "trusted", + -14.559893608093262 + ], + [ + "▁antagonistic", + -14.559894561767578 + ], + [ + "drilled", + -14.560030937194824 + ], + [ + "Snack", + -14.560147285461426 + ], + [ + "stuffed", + -14.560151100158691 + ], + [ + "neke", + -14.560210227966309 + ], + [ + "1972", + -14.56021499633789 + ], + [ + "Wool", + -14.560225486755373 + ], + [ + "Evil", + -14.560302734375 + ], + [ + "jy", + -14.560357093811035 + ], + [ + "▁Dena", + -14.560568809509276 + ], + [ + "▁paddleboard", + -14.560626029968262 + ], + [ + "Caring", + -14.560717582702637 + ], + [ + "▁porters", + -14.56078815460205 + ], + [ + "Processing", + -14.5608549118042 + ], + [ + "▁thermally", + -14.560879707336426 + ], + [ + "Studying", + -14.56092929840088 + ], + [ + "▁$70,000", + -14.56093406677246 + ], + [ + "▁shriek", + -14.560945510864258 + ], + [ + "motivation", + -14.560995101928713 + ], + [ + "Loose", + -14.561062812805176 + ], + [ + "bounce", + -14.561114311218262 + ], + [ + "▁Extrem", + -14.561214447021484 + ], + [ + "8:20", + -14.561236381530762 + ], + [ + "▁domicile", + -14.56124210357666 + ], + [ + "▁retaliate", + -14.56124210357666 + ], + [ + "▁milkshakes", + -14.561249732971191 + ], + [ + "▁dolly", + -14.561256408691406 + ], + [ + "enowned", + -14.561288833618164 + ], + [ + "▁Pola", + -14.561294555664062 + ], + [ + "▁insulators", + -14.5613374710083 + ], + [ + "▁fester", + -14.561375617980955 + ], + [ + "▁Leib", + -14.561383247375488 + ], + [ + "ASTRO", + -14.561423301696776 + ], + [ + "reserve", + -14.561540603637695 + ], + [ + "▁Surprising", + -14.56161117553711 + ], + [ + "▁empathic", + -14.561616897583008 + ], + [ + "PSE", + -14.56163215637207 + ], + [ + "Rocky", + -14.561637878417969 + ], + [ + "ilty", + -14.56174087524414 + ], + [ + "haan", + -14.56174373626709 + ], + [ + "▁mucosa", + -14.561756134033203 + ], + [ + "magical", + -14.56180191040039 + ], + [ + "▁marbled", + -14.561829566955566 + ], + [ + "Negotiating", + -14.561844825744627 + ], + [ + "▁Centurion", + -14.561844825744627 + ], + [ + "▁Qualitative", + -14.561844825744627 + ], + [ + "▁Sorrento", + -14.561844825744627 + ], + [ + "▁cobwebs", + -14.561844825744627 + ], + [ + "▁fuchsia", + -14.561844825744627 + ], + [ + "▁indescribable", + -14.561844825744627 + ], + [ + "▁irrevocably", + -14.561844825744627 + ], + [ + "▁matriarch", + -14.561844825744627 + ], + [ + "▁mechanized", + -14.561844825744627 + ], + [ + "▁metamorphosis", + -14.561844825744627 + ], + [ + "▁neutrino", + -14.561844825744627 + ], + [ + "▁resonating", + -14.561844825744627 + ], + [ + "Persistence", + -14.561845779418944 + ], + [ + "▁Abhishek", + -14.561845779418944 + ], + [ + "▁Breitbart", + -14.561845779418944 + ], + [ + "▁COLLEGE", + -14.561845779418944 + ], + [ + "▁parfait", + -14.561845779418944 + ], + [ + "▁falafel", + -14.561846733093262 + ], + [ + "▁unboxing", + -14.561846733093262 + ], + [ + "▁Intercollegiate", + -14.561847686767578 + ], + [ + "▁tattered", + -14.561847686767578 + ], + [ + "▁Wandsworth", + -14.561848640441896 + ], + [ + "▁fronds", + -14.561848640441896 + ], + [ + "Hilarious", + -14.561849594116213 + ], + [ + "▁Optometry", + -14.561850547790527 + ], + [ + "▁machinations", + -14.561850547790527 + ], + [ + "▁Kenosha", + -14.561851501464844 + ], + [ + "▁Zeiss", + -14.561854362487791 + ], + [ + "▁Refreshments", + -14.561856269836426 + ], + [ + "abili", + -14.561863899230955 + ], + [ + "▁Kaduna", + -14.56186580657959 + ], + [ + "▁Zirconia", + -14.56186580657959 + ], + [ + "▁Termination", + -14.561869621276855 + ], + [ + "▁Valiant", + -14.561870574951172 + ], + [ + "▁Lucinda", + -14.561871528625488 + ], + [ + "▁kettlebell", + -14.561872482299805 + ], + [ + "▁Pinkerton", + -14.561882972717283 + ], + [ + "▁YWCA", + -14.561896324157717 + ], + [ + "▁Launchpad", + -14.561899185180664 + ], + [ + "▁uninterested", + -14.561901092529297 + ], + [ + "▁catchphrase", + -14.561903953552246 + ], + [ + "▁Givenchy", + -14.561914443969728 + ], + [ + "▁Cortland", + -14.561930656433104 + ], + [ + "▁Bidding", + -14.561932563781738 + ], + [ + "▁Braking", + -14.561932563781738 + ], + [ + "▁Gilroy", + -14.561933517456056 + ], + [ + "▁Blouse", + -14.561938285827637 + ], + [ + "Echo", + -14.561941146850586 + ], + [ + "▁Inspiron", + -14.56194305419922 + ], + [ + "Mex", + -14.56195831298828 + ], + [ + "macro", + -14.561959266662598 + ], + [ + "▁HCI", + -14.561972618103027 + ], + [ + "▁scrolled", + -14.56198787689209 + ], + [ + "▁Kofi", + -14.562004089355469 + ], + [ + "▁Buggy", + -14.56200885772705 + ], + [ + "▁repressive", + -14.562019348144531 + ], + [ + "▁loyalties", + -14.562020301818848 + ], + [ + "▁galvanised", + -14.562034606933594 + ], + [ + "édé", + -14.562060356140137 + ], + [ + "▁jock", + -14.562070846557615 + ], + [ + "Diver", + -14.56207275390625 + ], + [ + "▁Brenner", + -14.562073707580566 + ], + [ + "▁MPLS", + -14.562074661254885 + ], + [ + "▁Patchwork", + -14.5620756149292 + ], + [ + "▁XHTML", + -14.562076568603516 + ], + [ + "▁Bachelorette", + -14.562108993530272 + ], + [ + "▁Brodie", + -14.562140464782717 + ], + [ + "FFS", + -14.562143325805664 + ], + [ + "vastatin", + -14.562149047851562 + ], + [ + "▁unease", + -14.56215000152588 + ], + [ + "▁Stomp", + -14.562150955200195 + ], + [ + "▁Knockout", + -14.562158584594728 + ], + [ + "▁Visionary", + -14.562158584594728 + ], + [ + "▁Eastbourne", + -14.56216526031494 + ], + [ + "▁moderating", + -14.562178611755373 + ], + [ + "Woohoo", + -14.562182426452637 + ], + [ + "▁Downer", + -14.562216758728027 + ], + [ + "▁Grau", + -14.562232971191406 + ], + [ + "▁Flathead", + -14.562263488769531 + ], + [ + "▁bangles", + -14.56228256225586 + ], + [ + "▁Ghan", + -14.56228733062744 + ], + [ + "▁Guerra", + -14.562296867370604 + ], + [ + "▁hurled", + -14.562301635742188 + ], + [ + "▁clubbing", + -14.562336921691896 + ], + [ + "▁menards", + -14.562339782714844 + ], + [ + "▁Esquire", + -14.562426567077637 + ], + [ + "▁Brookline", + -14.56251335144043 + ], + [ + "▁Guesthouse", + -14.56251621246338 + ], + [ + "▁Pooja", + -14.562518119812012 + ], + [ + "▁Martindale", + -14.562527656555176 + ], + [ + "▁Ladd", + -14.56257152557373 + ], + [ + "▁Polka", + -14.56265354156494 + ], + [ + "▁awkwardness", + -14.562682151794434 + ], + [ + "▁fizzle", + -14.562751770019531 + ], + [ + "▁daydreaming", + -14.562762260437012 + ], + [ + "▁Riverfront", + -14.562772750854492 + ], + [ + "▁redistributed", + -14.562779426574709 + ], + [ + "▁Oates", + -14.562787055969238 + ], + [ + "▁Caregivers", + -14.562856674194336 + ], + [ + "▁stingy", + -14.562973022460938 + ], + [ + "Eva", + -14.56311321258545 + ], + [ + "Putin", + -14.56313705444336 + ], + [ + "–15", + -14.56318187713623 + ], + [ + "drawer", + -14.563186645507812 + ], + [ + "||", + -14.563215255737305 + ], + [ + "▁creamer", + -14.56321620941162 + ], + [ + "▁Olympians", + -14.563220977783203 + ], + [ + "qe", + -14.563270568847656 + ], + [ + "Cuban", + -14.563312530517578 + ], + [ + "▁spaceflight", + -14.5633544921875 + ], + [ + "▁Vande", + -14.56341552734375 + ], + [ + "▁mantels", + -14.563417434692385 + ], + [ + "delta", + -14.563429832458496 + ], + [ + "kinetic", + -14.563430786132812 + ], + [ + "▁Inquire", + -14.563436508178713 + ], + [ + "▁$3.8", + -14.56358242034912 + ], + [ + "▁Femme", + -14.563632011413574 + ], + [ + "Realistically", + -14.563644409179688 + ], + [ + "Phillips", + -14.563691139221191 + ], + [ + "jira", + -14.563729286193848 + ], + [ + "▁(£2", + -14.563860893249512 + ], + [ + "▁1031", + -14.563942909240724 + ], + [ + "mountable", + -14.564043045043944 + ], + [ + "▁CAPS", + -14.564148902893066 + ], + [ + "▁Stamping", + -14.564193725585938 + ], + [ + "▁$149", + -14.564213752746582 + ], + [ + "▁diffused", + -14.564226150512695 + ], + [ + "zahl", + -14.564268112182615 + ], + [ + "Eph", + -14.564291000366213 + ], + [ + "▁monomer", + -14.564352989196776 + ], + [ + "▁rabbis", + -14.564430236816406 + ], + [ + "IDC", + -14.564471244812012 + ], + [ + "▁keyboardist", + -14.564475059509276 + ], + [ + "valo", + -14.56452465057373 + ], + [ + "▁Needham", + -14.56452751159668 + ], + [ + "▁beeps", + -14.564610481262209 + ], + [ + "▁Kink", + -14.564668655395508 + ], + [ + "▁Sable", + -14.564715385437012 + ], + [ + "Tent", + -14.564748764038086 + ], + [ + "▁Karel", + -14.564806938171388 + ], + [ + "▁outpace", + -14.5648832321167 + ], + [ + "Funk", + -14.564905166625977 + ], + [ + "▁scammer", + -14.564910888671877 + ], + [ + "tribe", + -14.564926147460938 + ], + [ + "▁Wilma", + -14.56496524810791 + ], + [ + "vick", + -14.564969062805176 + ], + [ + "▁PUL", + -14.564969062805176 + ], + [ + "▁fuelling", + -14.564981460571287 + ], + [ + "▁SNA", + -14.565134048461914 + ], + [ + "▁triumphed", + -14.56518268585205 + ], + [ + "aggio", + -14.565192222595217 + ], + [ + "▁DRIVER", + -14.565196990966797 + ], + [ + "cton", + -14.565229415893556 + ], + [ + "columbia", + -14.565353393554688 + ], + [ + "START", + -14.56536102294922 + ], + [ + "▁TTL", + -14.565383911132812 + ], + [ + "▁underwritten", + -14.565420150756836 + ], + [ + "malai", + -14.565447807312012 + ], + [ + "▁Proofing", + -14.565594673156738 + ], + [ + "SECURE", + -14.565807342529297 + ], + [ + "▁roped", + -14.565834999084473 + ], + [ + "Preserving", + -14.5658597946167 + ], + [ + "▁Lyrica", + -14.565939903259276 + ], + [ + "▁Tolle", + -14.566049575805664 + ], + [ + "▁ranting", + -14.566152572631836 + ], + [ + "Printer", + -14.566211700439451 + ], + [ + "▁Stiff", + -14.566320419311523 + ], + [ + "▁Ante", + -14.56635284423828 + ], + [ + "▁Allure", + -14.566478729248049 + ], + [ + "pleasant", + -14.566506385803224 + ], + [ + "JAM", + -14.566530227661133 + ], + [ + "Accident", + -14.56656265258789 + ], + [ + "Width", + -14.566570281982422 + ], + [ + "Cllr", + -14.56657600402832 + ], + [ + "Kennedy", + -14.566580772399902 + ], + [ + "identification", + -14.566598892211914 + ], + [ + "Implementation", + -14.566601753234863 + ], + [ + "Leslie", + -14.566617012023926 + ], + [ + "abiding", + -14.566632270812988 + ], + [ + "Minimal", + -14.566673278808594 + ], + [ + "Prevention", + -14.566678047180176 + ], + [ + "▁CARS", + -14.56679344177246 + ], + [ + "hoop", + -14.566861152648926 + ], + [ + "Ville", + -14.566935539245604 + ], + [ + "ooligan", + -14.566943168640137 + ], + [ + "Diploma", + -14.56695556640625 + ], + [ + "uir", + -14.566977500915527 + ], + [ + "ladi", + -14.566978454589844 + ], + [ + "phosph", + -14.567061424255373 + ], + [ + "glycer", + -14.567142486572266 + ], + [ + "urine", + -14.567197799682615 + ], + [ + "argent", + -14.567238807678224 + ], + [ + "▁Immigrants", + -14.567272186279297 + ], + [ + "▁rusher", + -14.567283630371094 + ], + [ + "enemy", + -14.567336082458496 + ], + [ + "egal", + -14.567394256591797 + ], + [ + "▁Fizz", + -14.567398071289062 + ], + [ + "EAU", + -14.5674467086792 + ], + [ + "▁faxed", + -14.567523002624512 + ], + [ + "undo", + -14.567567825317385 + ], + [ + "▁Martino", + -14.56759548187256 + ], + [ + "▁MGA", + -14.56762981414795 + ], + [ + "TENT", + -14.567649841308594 + ], + [ + "▁moisturise", + -14.5676908493042 + ], + [ + "packer", + -14.567709922790527 + ], + [ + "▁ohms", + -14.56775951385498 + ], + [ + "assurance", + -14.567800521850586 + ], + [ + "steady", + -14.567834854125977 + ], + [ + "2/3", + -14.56783676147461 + ], + [ + "79)", + -14.56793212890625 + ], + [ + "afford", + -14.567962646484377 + ], + [ + "Administer", + -14.56797695159912 + ], + [ + "▁pedaling", + -14.568035125732422 + ], + [ + "▁Dap", + -14.568150520324709 + ], + [ + "reliant", + -14.568207740783691 + ], + [ + "▁Citro", + -14.56826114654541 + ], + [ + "▁Morro", + -14.568296432495115 + ], + [ + "kraft", + -14.568323135375977 + ], + [ + "erci", + -14.568379402160645 + ], + [ + "Nusra", + -14.568431854248049 + ], + [ + "kovsky", + -14.56849193572998 + ], + [ + "ASCO", + -14.56849765777588 + ], + [ + "betting", + -14.568517684936523 + ], + [ + "▁VEHICLE", + -14.568611145019531 + ], + [ + "▁Reh", + -14.568669319152832 + ], + [ + "nurse", + -14.56876277923584 + ], + [ + "▁OSC", + -14.568787574768066 + ], + [ + "/45", + -14.568859100341797 + ], + [ + "▁introverts", + -14.568894386291504 + ], + [ + "▁Societ", + -14.568936347961426 + ], + [ + "▁Tonya", + -14.56894302368164 + ], + [ + "▁planar", + -14.568962097167969 + ], + [ + "▁naught", + -14.56896686553955 + ], + [ + "ungu", + -14.568982124328612 + ], + [ + "tetra", + -14.568995475769045 + ], + [ + "tpd", + -14.56903839111328 + ], + [ + "▁winder", + -14.569076538085938 + ], + [ + "Identifiable", + -14.569091796875 + ], + [ + "▁Baccalaureate", + -14.569091796875 + ], + [ + "▁Basingstoke", + -14.569091796875 + ], + [ + "▁Bonneville", + -14.569091796875 + ], + [ + "▁Budweiser", + -14.569091796875 + ], + [ + "▁Khashoggi", + -14.569091796875 + ], + [ + "▁Olmsted", + -14.569091796875 + ], + [ + "▁Splendor", + -14.569091796875 + ], + [ + "▁antecedent", + -14.569091796875 + ], + [ + "▁metronome", + -14.569091796875 + ], + [ + "▁propagating", + -14.569091796875 + ], + [ + "▁reconciling", + -14.569091796875 + ], + [ + "▁reincarnation", + -14.569091796875 + ], + [ + "▁Kauffman", + -14.569092750549316 + ], + [ + "▁Nautilus", + -14.569092750549316 + ], + [ + "▁Indu", + -14.569093704223633 + ], + [ + "▁growling", + -14.569093704223633 + ], + [ + "▁premiering", + -14.569093704223633 + ], + [ + "▁Athenian", + -14.56909465789795 + ], + [ + "luminescence", + -14.569095611572266 + ], + [ + "▁Qigong", + -14.569096565246582 + ], + [ + "▁Toastmasters", + -14.569096565246582 + ], + [ + "▁unduly", + -14.569099426269531 + ], + [ + "▁Rotorua", + -14.569101333618164 + ], + [ + "▁Kevlar", + -14.56910514831543 + ], + [ + "▁shaggy", + -14.56910514831543 + ], + [ + "▁enviroment", + -14.569107055664062 + ], + [ + "immunodeficiency", + -14.56911277770996 + ], + [ + "▁Chunky", + -14.56911563873291 + ], + [ + "▁Nicklaus", + -14.569121360778809 + ], + [ + "▁Permian", + -14.569121360778809 + ], + [ + "▁UNTIL", + -14.569127082824709 + ], + [ + "▁abhor", + -14.569128036499023 + ], + [ + "abc", + -14.569132804870604 + ], + [ + "▁purview", + -14.569143295288086 + ], + [ + "▁Maidstone", + -14.569151878356934 + ], + [ + "OPERATION", + -14.569154739379885 + ], + [ + "▁Borderlands", + -14.56916046142578 + ], + [ + "▁spielen", + -14.569169998168944 + ], + [ + "▁Enigma", + -14.569178581237791 + ], + [ + "▁Trafficking", + -14.569215774536133 + ], + [ + "▁1804", + -14.56922435760498 + ], + [ + "▁Danvers", + -14.56923484802246 + ], + [ + "▁superimposed", + -14.569238662719728 + ], + [ + "▁Albright", + -14.569255828857422 + ], + [ + "▁Kwe", + -14.569262504577637 + ], + [ + "▁condoms", + -14.569289207458496 + ], + [ + "▁Maximus", + -14.569337844848633 + ], + [ + "▁LOUIS", + -14.569343566894531 + ], + [ + "▁Bogdan", + -14.569368362426758 + ], + [ + "▁weirdly", + -14.569384574890137 + ], + [ + "▁Macao", + -14.569397926330566 + ], + [ + "Antioxidant", + -14.569429397583008 + ], + [ + "1-16", + -14.569439888000488 + ], + [ + "founding", + -14.5694580078125 + ], + [ + "▁Amitabh", + -14.56946086883545 + ], + [ + "▁Servo", + -14.56947135925293 + ], + [ + "peng", + -14.569480895996094 + ], + [ + "▁Slough", + -14.569506645202637 + ], + [ + "CDF", + -14.569510459899902 + ], + [ + "▁deafness", + -14.569527626037598 + ], + [ + "▁TSC", + -14.569538116455078 + ], + [ + "▁Cheney", + -14.569540023803713 + ], + [ + "CPU", + -14.569561958312988 + ], + [ + "▁okra", + -14.569586753845217 + ], + [ + "▁Wicca", + -14.569621086120604 + ], + [ + "▁diehard", + -14.569637298583984 + ], + [ + "▁DME", + -14.56964111328125 + ], + [ + "Tried", + -14.569703102111816 + ], + [ + "▁DEEP", + -14.569764137268066 + ], + [ + "▁proscar", + -14.56980037689209 + ], + [ + "EPC", + -14.569809913635254 + ], + [ + "▁twisty", + -14.569815635681152 + ], + [ + "▁Tully", + -14.569884300231934 + ], + [ + "eeeee", + -14.569890022277832 + ], + [ + "▁Pero", + -14.569903373718262 + ], + [ + "▁(2000", + -14.56994342803955 + ], + [ + "loppy", + -14.569951057434082 + ], + [ + "▁piety", + -14.569955825805664 + ], + [ + "▁Kisses", + -14.569960594177246 + ], + [ + "▁confesses", + -14.569976806640623 + ], + [ + "▁puede", + -14.570023536682127 + ], + [ + "▁Amids", + -14.570076942443848 + ], + [ + "▁Moen", + -14.57011890411377 + ], + [ + "▁Cough", + -14.570134162902832 + ], + [ + "washer", + -14.570181846618652 + ], + [ + "▁Nc", + -14.57029628753662 + ], + [ + "▁Reborn", + -14.570343017578123 + ], + [ + "happen", + -14.570351600646973 + ], + [ + "▁regrowth", + -14.570394515991213 + ], + [ + "Delay", + -14.570436477661133 + ], + [ + "Callaghan", + -14.570500373840332 + ], + [ + "Filing", + -14.570524215698242 + ], + [ + "GUARANTEED", + -14.57068157196045 + ], + [ + "▁Riff", + -14.5707368850708 + ], + [ + "sickness", + -14.570744514465332 + ], + [ + "busy", + -14.570745468139648 + ], + [ + "habi", + -14.570793151855469 + ], + [ + "▁specialism", + -14.570878028869627 + ], + [ + "cik", + -14.570918083190918 + ], + [ + "1.20", + -14.57099723815918 + ], + [ + "Cone", + -14.571008682250977 + ], + [ + "▁Quilted", + -14.571046829223633 + ], + [ + "Sala", + -14.571083068847656 + ], + [ + "DHA", + -14.57111644744873 + ], + [ + "▁GMB", + -14.571196556091309 + ], + [ + "▁Ambala", + -14.571211814880373 + ], + [ + "▁MSD", + -14.57122039794922 + ], + [ + "globe", + -14.571269035339355 + ], + [ + "LinkedIn", + -14.57136058807373 + ], + [ + "pork", + -14.571383476257324 + ], + [ + "mene", + -14.571402549743652 + ], + [ + "▁Kenner", + -14.571517944335938 + ], + [ + "osomal", + -14.5715970993042 + ], + [ + "▁embodying", + -14.571622848510742 + ], + [ + "▁shackle", + -14.571645736694336 + ], + [ + "Mira", + -14.571654319763184 + ], + [ + "▁hydrangeas", + -14.571677207946776 + ], + [ + "▁beefy", + -14.57172966003418 + ], + [ + "▁Leland", + -14.571732521057127 + ], + [ + "alicious", + -14.571782112121582 + ], + [ + "Fulfil", + -14.571788787841797 + ], + [ + "Dub", + -14.571815490722656 + ], + [ + "9-18", + -14.571850776672363 + ], + [ + "▁Vou", + -14.57187271118164 + ], + [ + "▁Prey", + -14.571895599365234 + ], + [ + "NITE", + -14.571945190429688 + ], + [ + "▁isotope", + -14.572083473205566 + ], + [ + "Loo", + -14.572088241577148 + ], + [ + "▁kebab", + -14.572092056274414 + ], + [ + "-4000", + -14.57209300994873 + ], + [ + "▁archers", + -14.5721435546875 + ], + [ + "▁critter", + -14.572181701660156 + ], + [ + "▁Lookup", + -14.572216987609863 + ], + [ + "▁Cerro", + -14.572236061096191 + ], + [ + "▁oncologists", + -14.572251319885254 + ], + [ + "▁overbought", + -14.572315216064451 + ], + [ + "▁Tiber", + -14.572396278381348 + ], + [ + "▁Bama", + -14.572417259216309 + ], + [ + "▁kre", + -14.572487831115724 + ], + [ + "▁Ranjan", + -14.572513580322266 + ], + [ + ">>>>", + -14.572517395019531 + ], + [ + "LLO", + -14.57254123687744 + ], + [ + "▁breaded", + -14.572586059570312 + ], + [ + "personalized", + -14.57259750366211 + ], + [ + "=5", + -14.572622299194336 + ], + [ + "▁darkly", + -14.57266330718994 + ], + [ + "▁Ripp", + -14.572842597961426 + ], + [ + "▁Bhavan", + -14.572914123535156 + ], + [ + "WAG", + -14.573002815246582 + ], + [ + "▁1801", + -14.573087692260742 + ], + [ + "Alias", + -14.57310390472412 + ], + [ + "erberg", + -14.573145866394045 + ], + [ + "▁disconnecting", + -14.573159217834473 + ], + [ + "Lawyers", + -14.573186874389648 + ], + [ + "▁Bayley", + -14.57318878173828 + ], + [ + "▁nomad", + -14.57329273223877 + ], + [ + "ilities", + -14.573304176330566 + ], + [ + "8.2%", + -14.573352813720703 + ], + [ + "▁isotopes", + -14.573387145996094 + ], + [ + "HEY", + -14.573442459106444 + ], + [ + "cella", + -14.57352352142334 + ], + [ + "▁biggie", + -14.573528289794922 + ], + [ + "▁Bani", + -14.573543548583984 + ], + [ + "Disp", + -14.573561668395996 + ], + [ + "▁keystone", + -14.573578834533691 + ], + [ + "Lamp", + -14.57362937927246 + ], + [ + "▁Margo", + -14.573740005493164 + ], + [ + "tiger", + -14.573752403259276 + ], + [ + "isser", + -14.573755264282228 + ], + [ + "▁limiter", + -14.573894500732422 + ], + [ + "fixes", + -14.573904037475586 + ], + [ + "▁Arre", + -14.573925971984863 + ], + [ + "UTA", + -14.57394313812256 + ], + [ + "Stitch", + -14.573970794677734 + ], + [ + "▁MCSA", + -14.57398509979248 + ], + [ + "principal", + -14.574020385742188 + ], + [ + "Pioneer", + -14.574026107788086 + ], + [ + "Promoting", + -14.574027061462402 + ], + [ + "levi", + -14.574027061462402 + ], + [ + "Tuition", + -14.57402801513672 + ], + [ + "Intermediate", + -14.5740327835083 + ], + [ + "continuous", + -14.574034690856934 + ], + [ + "▁Reprint", + -14.574034690856934 + ], + [ + "Qualified", + -14.574037551879885 + ], + [ + "Married", + -14.574040412902832 + ], + [ + "vulnerable", + -14.57404613494873 + ], + [ + "Parse", + -14.574052810668944 + ], + [ + "Curtis", + -14.574055671691896 + ], + [ + "zuela", + -14.574055671691896 + ], + [ + "acceptance", + -14.574060440063477 + ], + [ + "bacteria", + -14.5740966796875 + ], + [ + "Proud", + -14.574139595031738 + ], + [ + "blastoma", + -14.574139595031738 + ], + [ + "Iranian", + -14.57422161102295 + ], + [ + "paragraph", + -14.57422924041748 + ], + [ + "▁compar", + -14.574254035949709 + ], + [ + "affiliate", + -14.574254989624023 + ], + [ + "Creator", + -14.57425594329834 + ], + [ + "injured", + -14.57427978515625 + ], + [ + "▁%)", + -14.574304580688477 + ], + [ + "dermal", + -14.574423789978027 + ], + [ + "▁shortcake", + -14.574461936950684 + ], + [ + "hov", + -14.574511528015137 + ], + [ + "shev", + -14.57451343536377 + ], + [ + "bedded", + -14.574536323547363 + ], + [ + "▁fireman", + -14.574556350708008 + ], + [ + "▁lunges", + -14.574560165405272 + ], + [ + "▁#19", + -14.574566841125488 + ], + [ + "DEAL", + -14.574721336364746 + ], + [ + "DHS", + -14.574748039245604 + ], + [ + "PBS", + -14.5747652053833 + ], + [ + "▁Tahir", + -14.57489013671875 + ], + [ + "Characterization", + -14.5750093460083 + ], + [ + "nasal", + -14.575191497802734 + ], + [ + "agnol", + -14.575213432312012 + ], + [ + "▁yogis", + -14.575242042541504 + ], + [ + "▁Coun", + -14.575251579284668 + ], + [ + "▁peeler", + -14.575282096862791 + ], + [ + "employer", + -14.57532787322998 + ], + [ + "Homeowners", + -14.575407028198242 + ], + [ + "▁Toaster", + -14.575458526611328 + ], + [ + "documents", + -14.575464248657228 + ], + [ + "▁Vast", + -14.57563591003418 + ], + [ + "skiy", + -14.57568359375 + ], + [ + "▁DPM", + -14.57573699951172 + ], + [ + "assign", + -14.575772285461426 + ], + [ + "UZZ", + -14.575804710388184 + ], + [ + "lily", + -14.575854301452637 + ], + [ + "▁Lewin", + -14.57588005065918 + ], + [ + "Vector", + -14.57589054107666 + ], + [ + "▁MATERIAL", + -14.575912475585938 + ], + [ + "ERSON", + -14.57594108581543 + ], + [ + "▁Acadian", + -14.57598114013672 + ], + [ + "▁Christo", + -14.575998306274414 + ], + [ + "etus", + -14.576072692871094 + ], + [ + "▁Kne", + -14.576111793518066 + ], + [ + "▁Trevi", + -14.5762300491333 + ], + [ + "▁Straus", + -14.576290130615234 + ], + [ + "▁dodged", + -14.576330184936523 + ], + [ + "ocytic", + -14.576337814331056 + ], + [ + "▁Haut", + -14.576367378234863 + ], + [ + "COPYRIGHT", + -14.576391220092772 + ], + [ + "Leviticus", + -14.576391220092772 + ], + [ + "REGISTRATION", + -14.576391220092772 + ], + [ + "▁Keurig", + -14.576391220092772 + ], + [ + "▁Macclesfield", + -14.576391220092772 + ], + [ + "▁Revitalization", + -14.576391220092772 + ], + [ + "▁UNIVERSITY", + -14.576391220092772 + ], + [ + "▁doctrinal", + -14.576391220092772 + ], + [ + "▁econometric", + -14.576391220092772 + ], + [ + "▁grievous", + -14.576391220092772 + ], + [ + "▁mercenary", + -14.576391220092772 + ], + [ + "▁vertices", + -14.576391220092772 + ], + [ + "▁wrangling", + -14.576391220092772 + ], + [ + "▁Custody", + -14.57639217376709 + ], + [ + "▁acupressure", + -14.57639217376709 + ], + [ + "▁dyslexic", + -14.57639217376709 + ], + [ + "▁zeolite", + -14.57639217376709 + ], + [ + "▁Bahamian", + -14.576393127441406 + ], + [ + "▁Devotional", + -14.576393127441406 + ], + [ + "▁lunatic", + -14.576393127441406 + ], + [ + "▁scheming", + -14.576393127441406 + ], + [ + "▁Armageddon", + -14.576394081115724 + ], + [ + "▁NETWORK", + -14.576394081115724 + ], + [ + "▁amperage", + -14.57639503479004 + ], + [ + "▁Afrikaans", + -14.576395988464355 + ], + [ + "▁Whittaker", + -14.576395988464355 + ], + [ + "▁jaguar", + -14.576395988464355 + ], + [ + "▁dictation", + -14.576398849487305 + ], + [ + "▁sprocket", + -14.576398849487305 + ], + [ + "▁snuggly", + -14.576401710510254 + ], + [ + "▁DPRK", + -14.57640266418457 + ], + [ + "▁Pompano", + -14.576403617858888 + ], + [ + "▁Matheson", + -14.576406478881836 + ], + [ + "▁Streep", + -14.576406478881836 + ], + [ + "▁Camellia", + -14.576409339904783 + ], + [ + "▁meatloaf", + -14.576409339904783 + ], + [ + "▁Coughlin", + -14.576410293579102 + ], + [ + "▁Cinque", + -14.576416015625 + ], + [ + "▁libido", + -14.576421737670898 + ], + [ + "▁Interfaith", + -14.576425552368164 + ], + [ + "▁Charlene", + -14.57642650604248 + ], + [ + "▁Composing", + -14.57642650604248 + ], + [ + "▁PTFE", + -14.57642650604248 + ], + [ + "▁Thoreau", + -14.576427459716797 + ], + [ + "▁improvising", + -14.576427459716797 + ], + [ + "▁obverse", + -14.576427459716797 + ], + [ + "▁GOLF", + -14.576437950134276 + ], + [ + "▁umami", + -14.57644271850586 + ], + [ + "BUILT", + -14.576444625854492 + ], + [ + "▁Astrid", + -14.576446533203123 + ], + [ + "▁ANYONE", + -14.576451301574709 + ], + [ + "▁WINNER", + -14.576486587524414 + ], + [ + "▁mange", + -14.576492309570312 + ], + [ + "Pam", + -14.576496124267578 + ], + [ + "▁eHarmony", + -14.576498985290527 + ], + [ + "▁unproven", + -14.576499938964844 + ], + [ + "▁Elmira", + -14.576501846313477 + ], + [ + "▁Malaria", + -14.576519012451172 + ], + [ + "▁Anzac", + -14.5765380859375 + ], + [ + "▁Tompkins", + -14.57654857635498 + ], + [ + "▁Lala", + -14.57655429840088 + ], + [ + "▁Nightclub", + -14.576568603515623 + ], + [ + "▁Bowden", + -14.576583862304688 + ], + [ + "▁(02)", + -14.57662296295166 + ], + [ + "▁Gateshead", + -14.576623916625977 + ], + [ + "▁Burden", + -14.576667785644531 + ], + [ + "▁Nasser", + -14.576695442199709 + ], + [ + "▁Moline", + -14.576699256896973 + ], + [ + "1:55", + -14.576704025268556 + ], + [ + "▁Leander", + -14.576706886291504 + ], + [ + "▁postponement", + -14.576745986938477 + ], + [ + "0.5%", + -14.5767822265625 + ], + [ + "weiler", + -14.57679271697998 + ], + [ + "▁Stampede", + -14.576794624328612 + ], + [ + "ackling", + -14.576804161071776 + ], + [ + "▁Malloy", + -14.57680892944336 + ], + [ + "genomic", + -14.576837539672852 + ], + [ + "Accordingly", + -14.576875686645508 + ], + [ + "4.95", + -14.576899528503418 + ], + [ + "▁retraction", + -14.57690143585205 + ], + [ + "▁buoys", + -14.57690715789795 + ], + [ + "▁Honeycomb", + -14.576971054077148 + ], + [ + "▁tabbed", + -14.576984405517578 + ], + [ + "▁BOOM", + -14.576991081237791 + ], + [ + "▁dwindled", + -14.577011108398438 + ], + [ + "nash", + -14.577028274536133 + ], + [ + "▁seattle", + -14.577051162719728 + ], + [ + "▁MCSE", + -14.577071189880373 + ], + [ + "▁Ridgefield", + -14.577160835266112 + ], + [ + "▁dissected", + -14.577162742614746 + ], + [ + "▁Roaring", + -14.57720184326172 + ], + [ + "bergen", + -14.57721710205078 + ], + [ + "8800", + -14.577227592468262 + ], + [ + "▁sharpest", + -14.577248573303224 + ], + [ + "▁angina", + -14.57724952697754 + ], + [ + "▁thickener", + -14.577269554138184 + ], + [ + "rattan", + -14.5772705078125 + ], + [ + "▁obsessively", + -14.577289581298828 + ], + [ + "▁14:00", + -14.577302932739258 + ], + [ + "▁Sensex", + -14.577366828918455 + ], + [ + "▁HOR", + -14.577377319335938 + ], + [ + "▁waned", + -14.577390670776367 + ], + [ + "Prote", + -14.577449798583984 + ], + [ + "▁defuse", + -14.577462196350098 + ], + [ + "▁reparations", + -14.577531814575195 + ], + [ + "▁transgressions", + -14.577536582946776 + ], + [ + "stripe", + -14.577557563781738 + ], + [ + "phospho", + -14.577604293823242 + ], + [ + "▁15:00", + -14.577695846557615 + ], + [ + "▁bathrobes", + -14.577704429626465 + ], + [ + "▁Salim", + -14.577716827392578 + ], + [ + "▁easements", + -14.577717781066896 + ], + [ + "▁Cee", + -14.577935218811035 + ], + [ + "HIP", + -14.577943801879885 + ], + [ + "therapeutic", + -14.577972412109377 + ], + [ + "▁vida", + -14.578022003173828 + ], + [ + "▁Juni", + -14.578025817871094 + ], + [ + "▁thankfulness", + -14.578032493591309 + ], + [ + "dane", + -14.578046798706056 + ], + [ + "▁ELC", + -14.5780668258667 + ], + [ + "trusion", + -14.578113555908203 + ], + [ + "transition", + -14.57831573486328 + ], + [ + "▁weirdness", + -14.578319549560549 + ], + [ + "witted", + -14.578323364257812 + ], + [ + "▁£1000", + -14.578333854675291 + ], + [ + "▁pinhole", + -14.57834243774414 + ], + [ + "▁Colla", + -14.578354835510254 + ], + [ + "▁Mikro", + -14.57835578918457 + ], + [ + "▁Rascal", + -14.578378677368164 + ], + [ + "imble", + -14.578399658203123 + ], + [ + "DEV", + -14.578469276428224 + ], + [ + "▁Evangel", + -14.578478813171388 + ], + [ + "▁Frye", + -14.578495025634766 + ], + [ + "▁Jock", + -14.57850170135498 + ], + [ + "71)", + -14.578509330749512 + ], + [ + "▁Shatter", + -14.578574180603027 + ], + [ + "zny", + -14.578622817993164 + ], + [ + "▁concisely", + -14.57877254486084 + ], + [ + "feldt", + -14.57878875732422 + ], + [ + "▁previewed", + -14.57882595062256 + ], + [ + "GREEN", + -14.578853607177734 + ], + [ + "1-18", + -14.57890796661377 + ], + [ + "tergovernmental", + -14.578914642333984 + ], + [ + "installing", + -14.578927040100098 + ], + [ + "▁Toku", + -14.578972816467283 + ], + [ + "▁Sorting", + -14.57899570465088 + ], + [ + "▁Quora", + -14.57904052734375 + ], + [ + "▁Mariano", + -14.57907009124756 + ], + [ + "▁oscillations", + -14.579081535339355 + ], + [ + "▁#30", + -14.579132080078123 + ], + [ + "▁characterizing", + -14.57929801940918 + ], + [ + "babu", + -14.57932186126709 + ], + [ + "▁Mandate", + -14.57932949066162 + ], + [ + "▁Tobi", + -14.579392433166504 + ], + [ + "homa", + -14.579401016235352 + ], + [ + "Maple", + -14.579425811767578 + ], + [ + "▁Gower", + -14.579583168029783 + ], + [ + "▁Bangle", + -14.579618453979492 + ], + [ + "RICH", + -14.579662322998049 + ], + [ + "ORSE", + -14.579671859741213 + ], + [ + "▁Disrupt", + -14.579755783081056 + ], + [ + "Digging", + -14.57982349395752 + ], + [ + "▁Tatar", + -14.579846382141112 + ], + [ + "▁Goodwood", + -14.579923629760742 + ], + [ + "▁initialized", + -14.580194473266602 + ], + [ + "▁1834", + -14.580214500427246 + ], + [ + "0.45", + -14.580309867858888 + ], + [ + "ZD", + -14.580323219299316 + ], + [ + "rington", + -14.58047866821289 + ], + [ + "▁shackles", + -14.5805025100708 + ], + [ + "elie", + -14.580520629882812 + ], + [ + "▁showpiece", + -14.58052921295166 + ], + [ + "▁Bonnet", + -14.580538749694824 + ], + [ + "SDI", + -14.580567359924316 + ], + [ + "animation", + -14.580617904663086 + ], + [ + "allowing", + -14.580670356750488 + ], + [ + "▁Kran", + -14.58074951171875 + ], + [ + "regulate", + -14.58095932006836 + ], + [ + "ppler", + -14.581018447875977 + ], + [ + "oidal", + -14.581069946289062 + ], + [ + "▁FTD", + -14.58112335205078 + ], + [ + "▁32,000", + -14.581172943115234 + ], + [ + "▁Mussel", + -14.581194877624512 + ], + [ + "▁Jawa", + -14.581228256225586 + ], + [ + "FMC", + -14.581293106079102 + ], + [ + "vich", + -14.581377983093262 + ], + [ + "▁Metz", + -14.58138656616211 + ], + [ + "▁sony", + -14.581398963928224 + ], + [ + "▁£400", + -14.581419944763184 + ], + [ + "▁Godwin", + -14.581450462341309 + ], + [ + "16%", + -14.5814790725708 + ], + [ + "ilan", + -14.581486701965332 + ], + [ + "romo", + -14.581515312194824 + ], + [ + "Equipped", + -14.581551551818848 + ], + [ + "Calcium", + -14.58155632019043 + ], + [ + "Paradise", + -14.581558227539062 + ], + [ + "administered", + -14.581565856933594 + ], + [ + "Deluxe", + -14.581570625305176 + ], + [ + "Depression", + -14.581571578979492 + ], + [ + "Quantum", + -14.58157444000244 + ], + [ + "UFC", + -14.581585884094238 + ], + [ + "Watson", + -14.58160400390625 + ], + [ + "explain", + -14.581618309020996 + ], + [ + "Concert", + -14.581635475158691 + ], + [ + "technologies", + -14.581647872924805 + ], + [ + "laughs", + -14.581676483154297 + ], + [ + "entertainment", + -14.58169651031494 + ], + [ + "▁earthworm", + -14.581698417663574 + ], + [ + "wilder", + -14.581782341003418 + ], + [ + "LEE", + -14.581857681274414 + ], + [ + "identical", + -14.581875801086426 + ], + [ + "▁Effort", + -14.581891059875488 + ], + [ + "bata", + -14.581900596618652 + ], + [ + "▁Discounted", + -14.581908226013184 + ], + [ + "grilled", + -14.581916809082031 + ], + [ + "▁14-16", + -14.582037925720217 + ], + [ + "freezing", + -14.582060813903809 + ], + [ + "4.4%", + -14.5820951461792 + ], + [ + "▁Wadi", + -14.58220672607422 + ], + [ + "readable", + -14.582207679748535 + ], + [ + "▁Booz", + -14.58225917816162 + ], + [ + "inkel", + -14.582292556762695 + ], + [ + "▁Merci", + -14.58230972290039 + ], + [ + "trus", + -14.582420349121094 + ], + [ + "▁commercialized", + -14.58244800567627 + ], + [ + "cumen", + -14.582449913024902 + ], + [ + "($", + -14.582480430603027 + ], + [ + "WTS", + -14.58255672454834 + ], + [ + "HAP", + -14.582569122314451 + ], + [ + "Lea", + -14.582601547241213 + ], + [ + "fool", + -14.58262062072754 + ], + [ + "▁Rukh", + -14.582684516906738 + ], + [ + "▁RSM", + -14.582693099975586 + ], + [ + "theory", + -14.582695960998535 + ], + [ + "▁hotbed", + -14.582711219787598 + ], + [ + "▁digitize", + -14.582714080810549 + ], + [ + "pendant", + -14.582816123962402 + ], + [ + "quenching", + -14.582837104797363 + ], + [ + "Presenting", + -14.58288860321045 + ], + [ + "▁HOM", + -14.582889556884766 + ], + [ + "Penny", + -14.5829496383667 + ], + [ + "presentation", + -14.58297634124756 + ], + [ + "0.25", + -14.582977294921877 + ], + [ + "▁bandit", + -14.582983016967772 + ], + [ + "Recording", + -14.583149909973145 + ], + [ + "▁NME", + -14.58316421508789 + ], + [ + "▁Ananda", + -14.583202362060549 + ], + [ + "Tick", + -14.583213806152344 + ], + [ + "▁reintroduce", + -14.58321762084961 + ], + [ + "▁Kier", + -14.583240509033203 + ], + [ + "▁Cornelia", + -14.583250999450684 + ], + [ + "cooler", + -14.583266258239746 + ], + [ + "▁dosa", + -14.58338737487793 + ], + [ + "▁Tso", + -14.58344268798828 + ], + [ + "▁Quar", + -14.583710670471191 + ], + [ + "▁sedate", + -14.583738327026367 + ], + [ + "Collaborating", + -14.583744049072266 + ], + [ + "Feasibility", + -14.583744049072266 + ], + [ + "VINTAGE", + -14.583744049072266 + ], + [ + "▁Aylesbury", + -14.583744049072266 + ], + [ + "▁Britannica", + -14.583744049072266 + ], + [ + "▁FINANCIAL", + -14.583744049072266 + ], + [ + "▁Incubator", + -14.583744049072266 + ], + [ + "▁JESUS", + -14.583744049072266 + ], + [ + "▁Jeopardy", + -14.583744049072266 + ], + [ + "▁Rapunzel", + -14.583744049072266 + ], + [ + "▁Serpentine", + -14.583744049072266 + ], + [ + "▁Sulphur", + -14.583744049072266 + ], + [ + "▁Tylenol", + -14.583744049072266 + ], + [ + "▁Willoughby", + -14.583744049072266 + ], + [ + "▁Winnebago", + -14.583744049072266 + ], + [ + "▁amygdala", + -14.583744049072266 + ], + [ + "▁geometries", + -14.583744049072266 + ], + [ + "▁perjury", + -14.583744049072266 + ], + [ + "▁sensuous", + -14.583744049072266 + ], + [ + "▁GRAMMY", + -14.583745002746582 + ], + [ + "▁KwaZulu", + -14.583745002746582 + ], + [ + "▁Therapies", + -14.583745002746582 + ], + [ + "▁Woolworths", + -14.583745002746582 + ], + [ + "▁enzymatic", + -14.583745002746582 + ], + [ + "▁Memorabilia", + -14.583745956420898 + ], + [ + "▁TRUTH", + -14.583745956420898 + ], + [ + "▁cytochrome", + -14.583745956420898 + ], + [ + "▁FUTURE", + -14.583746910095217 + ], + [ + "Showcasing", + -14.583747863769531 + ], + [ + "▁Polycarbonate", + -14.58375072479248 + ], + [ + "▁REGARD", + -14.583751678466797 + ], + [ + "▁grumbling", + -14.583751678466797 + ], + [ + "▁deodorize", + -14.583752632141112 + ], + [ + "▁Frugal", + -14.58375358581543 + ], + [ + "▁corporal", + -14.583754539489746 + ], + [ + "▁Tivoli", + -14.583757400512695 + ], + [ + "▁glories", + -14.583762168884276 + ], + [ + "▁urinal", + -14.583763122558594 + ], + [ + "▁washbasin", + -14.583765029907228 + ], + [ + "▁Beatrix", + -14.583767890930176 + ], + [ + "FULL", + -14.58377170562744 + ], + [ + "▁HBCU", + -14.58377170562744 + ], + [ + "▁tangential", + -14.583776473999023 + ], + [ + "▁locum", + -14.583778381347656 + ], + [ + "▁Bipolar", + -14.583779335021973 + ], + [ + "vinci", + -14.583788871765137 + ], + [ + "▁Zain", + -14.583795547485352 + ], + [ + "▁malaise", + -14.583795547485352 + ], + [ + "▁dragonfly", + -14.583797454833984 + ], + [ + "▁Entities", + -14.5837984085083 + ], + [ + "▁tempeh", + -14.583800315856934 + ], + [ + "▁chowder", + -14.583805084228516 + ], + [ + "▁floodlight", + -14.58380889892578 + ], + [ + "▁Fong", + -14.583812713623049 + ], + [ + "▁liquorice", + -14.58381462097168 + ], + [ + "▁bombshell", + -14.583822250366213 + ], + [ + "▁Diamondbacks", + -14.583823204040527 + ], + [ + "▁Wrapping", + -14.583833694458008 + ], + [ + "▁Gallipoli", + -14.583844184875488 + ], + [ + "▁craftspeople", + -14.583849906921388 + ], + [ + "▁Bodega", + -14.583868980407717 + ], + [ + "▁Interpreter", + -14.583870887756348 + ], + [ + "▁CCleaner", + -14.583877563476562 + ], + [ + "▁barstools", + -14.583901405334473 + ], + [ + "▁bioactive", + -14.583927154541016 + ], + [ + "glutamine", + -14.58393096923828 + ], + [ + "▁FaceBook", + -14.58394718170166 + ], + [ + "▁tinsel", + -14.583951950073242 + ], + [ + "▁Rockingham", + -14.58398723602295 + ], + [ + "Howdy", + -14.584038734436035 + ], + [ + "▁sv", + -14.584043502807615 + ], + [ + "Mara", + -14.584071159362791 + ], + [ + "▁glitzy", + -14.584088325500488 + ], + [ + "▁Zoya", + -14.584091186523438 + ], + [ + "ffrey", + -14.584094047546388 + ], + [ + "▁Jetty", + -14.584114074707031 + ], + [ + "yyyy", + -14.584147453308104 + ], + [ + "▁resourcefulness", + -14.584153175354004 + ], + [ + "▁Veranda", + -14.584158897399902 + ], + [ + "▁Caddy", + -14.584174156188965 + ], + [ + "▁$5.5", + -14.584224700927734 + ], + [ + "▁combative", + -14.584237098693848 + ], + [ + "▁preloaded", + -14.584258079528809 + ], + [ + "▁Searches", + -14.584283828735352 + ], + [ + "▁2017/2018", + -14.584315299987791 + ], + [ + "▁Firebase", + -14.58431625366211 + ], + [ + "▁Handbags", + -14.584321022033691 + ], + [ + "▁Displaying", + -14.584393501281738 + ], + [ + "Wiz", + -14.58443832397461 + ], + [ + "▁colloidal", + -14.584470748901367 + ], + [ + "▁Lighter", + -14.584478378295898 + ], + [ + "representation", + -14.58450412750244 + ], + [ + "▁Hanne", + -14.584517478942873 + ], + [ + "▁Pacha", + -14.58457374572754 + ], + [ + "▁Elevated", + -14.584604263305664 + ], + [ + "▁Albertson", + -14.5846586227417 + ], + [ + "▁mowed", + -14.584670066833496 + ], + [ + "▁Dumas", + -14.584693908691406 + ], + [ + "Proficient", + -14.58470058441162 + ], + [ + "▁Laid", + -14.584702491760254 + ], + [ + "▁Watergate", + -14.584712982177734 + ], + [ + "zona", + -14.584716796875 + ], + [ + "Smaller", + -14.584746360778809 + ], + [ + "▁gills", + -14.584787368774414 + ], + [ + "Thur", + -14.584869384765623 + ], + [ + "▁caged", + -14.58492946624756 + ], + [ + "▁creaminess", + -14.584968566894531 + ], + [ + "▁Cardboard", + -14.585077285766602 + ], + [ + "▁Gadgets", + -14.585126876831056 + ], + [ + "▁twi", + -14.585187911987305 + ], + [ + "▁2028", + -14.585222244262695 + ], + [ + "publisher", + -14.585265159606934 + ], + [ + "▁Utd", + -14.585284233093262 + ], + [ + "caused", + -14.585312843322754 + ], + [ + "▁thrifty", + -14.585360527038574 + ], + [ + "▁paging", + -14.585376739501951 + ], + [ + "strophe", + -14.585453987121582 + ], + [ + "▁Shaq", + -14.585478782653809 + ], + [ + "efa", + -14.58550262451172 + ], + [ + "fad", + -14.58554458618164 + ], + [ + "▁Kristine", + -14.585577011108398 + ], + [ + "▁mediating", + -14.585623741149902 + ], + [ + "Curve", + -14.585640907287598 + ], + [ + "▁Schlu", + -14.585670471191406 + ], + [ + "▁liquidator", + -14.58571434020996 + ], + [ + "Dow", + -14.58571720123291 + ], + [ + "▁outweighed", + -14.585721015930176 + ], + [ + "counterintuitive", + -14.585737228393556 + ], + [ + "Dai", + -14.585755348205566 + ], + [ + "▁heaped", + -14.58576488494873 + ], + [ + "▁undercurrent", + -14.585878372192385 + ], + [ + "▁haggle", + -14.585938453674316 + ], + [ + "▁outperforming", + -14.585973739624023 + ], + [ + "synchronous", + -14.586008071899414 + ], + [ + "▁1/5", + -14.586008071899414 + ], + [ + "▁Goh", + -14.586055755615234 + ], + [ + "3/8", + -14.586138725280762 + ], + [ + "▁snowmen", + -14.586198806762695 + ], + [ + "▁DST", + -14.5862398147583 + ], + [ + "▁paneer", + -14.586246490478516 + ], + [ + "▁Hilo", + -14.586257934570312 + ], + [ + "▁landscapers", + -14.586310386657717 + ], + [ + "ULAR", + -14.586368560791016 + ], + [ + "Leste", + -14.586371421813965 + ], + [ + "▁headland", + -14.586397171020508 + ], + [ + "2:06", + -14.586416244506836 + ], + [ + "▁regenerated", + -14.586552619934082 + ], + [ + "EBIT", + -14.586607933044434 + ], + [ + "nef", + -14.586627006530762 + ], + [ + "▁Nadi", + -14.586715698242188 + ], + [ + "▁Punt", + -14.586726188659668 + ], + [ + "▁Helmut", + -14.586729049682615 + ], + [ + "Gua", + -14.586737632751465 + ], + [ + "▁Vihar", + -14.586847305297852 + ], + [ + "mede", + -14.586894989013672 + ], + [ + "▁stockists", + -14.586902618408203 + ], + [ + "oia", + -14.587125778198242 + ], + [ + "▁Visio", + -14.587207794189451 + ], + [ + "oscopic", + -14.587241172790527 + ], + [ + "oah", + -14.587440490722656 + ], + [ + "▁Kanna", + -14.587522506713867 + ], + [ + "TRIX", + -14.587611198425291 + ], + [ + "▁legalizing", + -14.587651252746582 + ], + [ + "cud", + -14.58768367767334 + ], + [ + "▁SAF", + -14.587711334228516 + ], + [ + "519", + -14.587735176086426 + ], + [ + "▁silencer", + -14.587841033935549 + ], + [ + "▁speedboat", + -14.587848663330078 + ], + [ + "programme", + -14.588082313537598 + ], + [ + "▁chipper", + -14.58813190460205 + ], + [ + "▁quietness", + -14.588263511657717 + ], + [ + "OVAL", + -14.588315963745115 + ], + [ + "▁manning", + -14.58837890625 + ], + [ + "▁homebuyer", + -14.588424682617188 + ], + [ + "▁Alina", + -14.588454246520996 + ], + [ + "1.11", + -14.58851146697998 + ], + [ + "▁plumes", + -14.588541984558104 + ], + [ + "▁1:5", + -14.588738441467283 + ], + [ + "▁Cowell", + -14.588859558105469 + ], + [ + "wayne", + -14.588860511779783 + ], + [ + "Plane", + -14.588909149169922 + ], + [ + "▁simmered", + -14.588973999023438 + ], + [ + "▁putative", + -14.588987350463867 + ], + [ + "Significant", + -14.588988304138184 + ], + [ + "Myers", + -14.58901309967041 + ], + [ + "programming", + -14.589018821716309 + ], + [ + "Sphere", + -14.589024543762209 + ], + [ + "▁Lorna", + -14.58906078338623 + ], + [ + "▁Welles", + -14.589103698730469 + ], + [ + "Zombie", + -14.589107513427734 + ], + [ + "Conversation", + -14.58911418914795 + ], + [ + "Prompt", + -14.589118003845217 + ], + [ + "propelled", + -14.589118003845217 + ], + [ + "Administrator", + -14.589120864868164 + ], + [ + "Philippine", + -14.58912181854248 + ], + [ + "treasure", + -14.58912181854248 + ], + [ + "Weird", + -14.58913803100586 + ], + [ + "southern", + -14.589157104492188 + ], + [ + "Wallace", + -14.589159965515137 + ], + [ + "Dirty", + -14.589163780212402 + ], + [ + "Cathy", + -14.58918571472168 + ], + [ + "stocking", + -14.589202880859377 + ], + [ + "▁$550", + -14.589226722717283 + ], + [ + "Respond", + -14.589239120483398 + ], + [ + "Vacation", + -14.589271545410156 + ], + [ + "LGBT", + -14.589387893676758 + ], + [ + "Silent", + -14.589405059814451 + ], + [ + "fowl", + -14.589409828186035 + ], + [ + "AFC", + -14.589418411254885 + ], + [ + "▁Sela", + -14.589459419250488 + ], + [ + "commissioned", + -14.589519500732422 + ], + [ + "tongue", + -14.589654922485352 + ], + [ + "▁Safely", + -14.589655876159668 + ], + [ + "▁Bursa", + -14.589666366577148 + ], + [ + "ensure", + -14.589698791503906 + ], + [ + "0/1", + -14.589725494384766 + ], + [ + "lyce", + -14.589747428894045 + ], + [ + "▁bassline", + -14.589767456054688 + ], + [ + "Carlos", + -14.589821815490724 + ], + [ + "▁SAU", + -14.589963912963867 + ], + [ + "▁tibia", + -14.589975357055664 + ], + [ + "▁Wrath", + -14.58997917175293 + ], + [ + "tipped", + -14.589991569519045 + ], + [ + "▁Reviewers", + -14.590084075927734 + ], + [ + "▁Arian", + -14.590087890625 + ], + [ + "behavioral", + -14.590118408203123 + ], + [ + "▁Ernesto", + -14.590126037597656 + ], + [ + "throwing", + -14.590171813964844 + ], + [ + "▁hunker", + -14.590171813964844 + ], + [ + "Importantly", + -14.590209007263184 + ], + [ + "avali", + -14.590221405029297 + ], + [ + "▁hilt", + -14.59026336669922 + ], + [ + "▁BMO", + -14.590280532836914 + ], + [ + "▁Deeper", + -14.590291023254396 + ], + [ + "ectin", + -14.590299606323242 + ], + [ + "▁Kola", + -14.590341567993164 + ], + [ + "▁theologians", + -14.59036636352539 + ], + [ + "Maxim", + -14.590435028076172 + ], + [ + "Assisted", + -14.590498924255373 + ], + [ + "▁gew", + -14.590514183044434 + ], + [ + "Advert", + -14.59053897857666 + ], + [ + "▁repute", + -14.59060764312744 + ], + [ + "▁Pressed", + -14.5906982421875 + ], + [ + "▁negates", + -14.59070110321045 + ], + [ + "Folks", + -14.590780258178713 + ], + [ + "▁remade", + -14.590822219848633 + ], + [ + "▁Goldie", + -14.59088897705078 + ], + [ + "Forever", + -14.590923309326172 + ], + [ + "▁pocketbook", + -14.590935707092283 + ], + [ + "coaster", + -14.591024398803713 + ], + [ + "0183;32;", + -14.591151237487791 + ], + [ + "icrodermabrasion", + -14.591151237487791 + ], + [ + "▁Epsilon", + -14.591151237487791 + ], + [ + "▁Fernandes", + -14.591151237487791 + ], + [ + "▁Khartoum", + -14.591151237487791 + ], + [ + "▁curiosities", + -14.591151237487791 + ], + [ + "▁gingivitis", + -14.591151237487791 + ], + [ + "▁mercenaries", + -14.591151237487791 + ], + [ + "▁narcolepsy", + -14.591151237487791 + ], + [ + "▁naysayers", + -14.591151237487791 + ], + [ + "▁provolone", + -14.591151237487791 + ], + [ + "▁swedish", + -14.591151237487791 + ], + [ + "▁thursday", + -14.591151237487791 + ], + [ + "▁Wainwright", + -14.59115219116211 + ], + [ + "▁abomination", + -14.59115219116211 + ], + [ + "▁systolic", + -14.59115219116211 + ], + [ + "GEORGE", + -14.591153144836426 + ], + [ + "▁GNOME", + -14.591153144836426 + ], + [ + "▁syntactic", + -14.591153144836426 + ], + [ + "▁Googling", + -14.591154098510742 + ], + [ + "▁corvette", + -14.591154098510742 + ], + [ + "▁conceivably", + -14.59115505218506 + ], + [ + "▁Biloxi", + -14.591156959533691 + ], + [ + "▁Kigali", + -14.591156959533691 + ], + [ + "▁divination", + -14.591156959533691 + ], + [ + "▁predation", + -14.591156959533691 + ], + [ + "▁olympic", + -14.591161727905272 + ], + [ + "▁Aleksandr", + -14.59116268157959 + ], + [ + "▁homogeniz", + -14.591163635253906 + ], + [ + "▁Roxbury", + -14.591166496276855 + ], + [ + "▁timbre", + -14.591168403625488 + ], + [ + "▁Gratuit", + -14.59117031097412 + ], + [ + "▁queried", + -14.59117031097412 + ], + [ + "▁Wycombe", + -14.591171264648438 + ], + [ + "▁TERMS", + -14.591172218322754 + ], + [ + "▁abject", + -14.591172218322754 + ], + [ + "▁Crabtree", + -14.59117317199707 + ], + [ + "▁Florentine", + -14.591174125671388 + ], + [ + "▁paramilitary", + -14.591174125671388 + ], + [ + "▁Berklee", + -14.591175079345703 + ], + [ + "▁Vivint", + -14.591180801391602 + ], + [ + "plegia", + -14.591181755065918 + ], + [ + "▁Palmyra", + -14.591181755065918 + ], + [ + "▁UMBC", + -14.591181755065918 + ], + [ + "▁Cranston", + -14.5911865234375 + ], + [ + "▁medicated", + -14.591196060180664 + ], + [ + "▁Dvd", + -14.591201782226562 + ], + [ + "▁polymerase", + -14.591215133666992 + ], + [ + "▁MIUI", + -14.59122085571289 + ], + [ + "▁holler", + -14.591222763061523 + ], + [ + "▁Kanban", + -14.591227531433104 + ], + [ + "▁rammed", + -14.591227531433104 + ], + [ + "▁Bosworth", + -14.591240882873535 + ], + [ + "▁mopping", + -14.591257095336914 + ], + [ + "▁scammed", + -14.591259956359863 + ], + [ + "▁Superbowl", + -14.591268539428713 + ], + [ + "▁faking", + -14.591282844543455 + ], + [ + "VX", + -14.591297149658203 + ], + [ + "▁Abercrombie", + -14.591299057006836 + ], + [ + "▁Kyrie", + -14.59132194519043 + ], + [ + "▁Illumination", + -14.591347694396973 + ], + [ + "▁gander", + -14.591358184814451 + ], + [ + "▁scarier", + -14.591360092163086 + ], + [ + "britain", + -14.5913724899292 + ], + [ + "plasia", + -14.59139347076416 + ], + [ + "▁cnc", + -14.591397285461426 + ], + [ + "▁rainstorm", + -14.591423988342283 + ], + [ + "▁Carrington", + -14.591442108154297 + ], + [ + "0.11", + -14.591485023498535 + ], + [ + "▁Mosley", + -14.59151840209961 + ], + [ + "▁ONCE", + -14.591567993164062 + ], + [ + "▁polymeric", + -14.591604232788086 + ], + [ + "▁Considerations", + -14.59162425994873 + ], + [ + "▁MRSA", + -14.59162425994873 + ], + [ + "▁commendation", + -14.591629981994627 + ], + [ + "▁tiebreaker", + -14.591639518737791 + ], + [ + "hypersensitivity", + -14.591646194458008 + ], + [ + "ENTE", + -14.591691970825195 + ], + [ + "eeb", + -14.591703414916992 + ], + [ + "▁Bahru", + -14.591703414916992 + ], + [ + "▁headwinds", + -14.591703414916992 + ], + [ + "▁Nunes", + -14.59170913696289 + ], + [ + "BJP", + -14.59172534942627 + ], + [ + "▁PARIS", + -14.591726303100586 + ], + [ + "▁choreograph", + -14.59178352355957 + ], + [ + "▁deflated", + -14.591784477233888 + ], + [ + "fertility", + -14.591827392578123 + ], + [ + "▁Harbin", + -14.591830253601074 + ], + [ + "oho", + -14.591837882995604 + ], + [ + "WFP", + -14.591843605041504 + ], + [ + "▁Denison", + -14.59188461303711 + ], + [ + "▁OPPO", + -14.591915130615234 + ], + [ + "▁Throat", + -14.591925621032717 + ], + [ + "seek", + -14.591948509216309 + ], + [ + "▁Montag", + -14.591983795166016 + ], + [ + "▁Ervin", + -14.591998100280762 + ], + [ + "Mir", + -14.592010498046877 + ], + [ + "▁Hassel", + -14.59201431274414 + ], + [ + "xion", + -14.592033386230469 + ], + [ + "053", + -14.592060089111328 + ], + [ + "▁undermount", + -14.59206485748291 + ], + [ + "cile", + -14.592070579528809 + ], + [ + "▁Booklet", + -14.592113494873049 + ], + [ + "▁methodically", + -14.592161178588867 + ], + [ + "TAB", + -14.592188835144045 + ], + [ + "▁Superstore", + -14.592220306396484 + ], + [ + "Medica", + -14.592262268066406 + ], + [ + "Troy", + -14.592324256896973 + ], + [ + "riba", + -14.592382431030272 + ], + [ + "AAAAA", + -14.592394828796388 + ], + [ + "▁biohazard", + -14.59241008758545 + ], + [ + "▁1-2-3", + -14.592412948608398 + ], + [ + "▁newsworthy", + -14.592413902282717 + ], + [ + "039;", + -14.592424392700195 + ], + [ + "CHRIS", + -14.592443466186523 + ], + [ + "▁Proactive", + -14.592516899108888 + ], + [ + "▁Upright", + -14.592547416687012 + ], + [ + "▁JVM", + -14.59255313873291 + ], + [ + "▁KID", + -14.592621803283691 + ], + [ + "▁Solved", + -14.592631340026855 + ], + [ + "▁bodes", + -14.592668533325195 + ], + [ + "1967", + -14.592676162719728 + ], + [ + "ué", + -14.592697143554688 + ], + [ + "chette", + -14.592703819274902 + ], + [ + "pem", + -14.592764854431152 + ], + [ + "▁schoolhouse", + -14.592764854431152 + ], + [ + "▁TXT", + -14.59278392791748 + ], + [ + "▁prioritised", + -14.592820167541504 + ], + [ + "▁Fabri", + -14.592850685119627 + ], + [ + "▁Baseline", + -14.592883110046388 + ], + [ + "▁Repairing", + -14.592958450317385 + ], + [ + "▁McEl", + -14.592968940734863 + ], + [ + "▁radiological", + -14.59300422668457 + ], + [ + "Wade", + -14.593016624450684 + ], + [ + "anity", + -14.59303379058838 + ], + [ + "▁Minerva", + -14.59303855895996 + ], + [ + "▁replenishing", + -14.593064308166504 + ], + [ + "▁Emo", + -14.59306526184082 + ], + [ + "Advertise", + -14.593080520629885 + ], + [ + "▁suspenseful", + -14.59317684173584 + ], + [ + "Mask", + -14.593188285827637 + ], + [ + "▁attentively", + -14.593188285827637 + ], + [ + "flyer", + -14.593189239501951 + ], + [ + "▁purged", + -14.593225479125977 + ], + [ + "panic", + -14.593268394470217 + ], + [ + "▁heartless", + -14.593273162841797 + ], + [ + "zioni", + -14.593318939208984 + ], + [ + "▁talkative", + -14.593351364135742 + ], + [ + "▁Derm", + -14.593364715576172 + ], + [ + "IHS", + -14.593417167663574 + ], + [ + "▁Manuka", + -14.59343433380127 + ], + [ + "Classical", + -14.593453407287598 + ], + [ + "usb", + -14.593481063842772 + ], + [ + "▁TLR", + -14.593504905700684 + ], + [ + "▁27.5", + -14.59351921081543 + ], + [ + "▁Coward", + -14.593528747558594 + ], + [ + "▁preemptive", + -14.593631744384766 + ], + [ + "▁Osteopath", + -14.593653678894045 + ], + [ + "▁11:5", + -14.59365463256836 + ], + [ + "▁Marquee", + -14.593762397766112 + ], + [ + "▁undoing", + -14.593783378601074 + ], + [ + "▁overwritten", + -14.59382152557373 + ], + [ + "44)", + -14.593847274780272 + ], + [ + "▁USAF", + -14.593960762023926 + ], + [ + "TICS", + -14.594000816345217 + ], + [ + "▁Akira", + -14.594054222106934 + ], + [ + "yaz", + -14.594117164611816 + ], + [ + "graphical", + -14.59414291381836 + ], + [ + "▁RTF", + -14.594156265258787 + ], + [ + "1943", + -14.594165802001951 + ], + [ + "ksh", + -14.594185829162598 + ], + [ + "▁Magee", + -14.594278335571287 + ], + [ + "▁moveable", + -14.594311714172363 + ], + [ + "Dealer", + -14.594354629516602 + ], + [ + "▁TAI", + -14.594362258911133 + ], + [ + "BAT", + -14.59438133239746 + ], + [ + "▁$115", + -14.594388961791992 + ], + [ + "▁personified", + -14.594390869140623 + ], + [ + "▁Forming", + -14.59442138671875 + ], + [ + "Juan", + -14.59444808959961 + ], + [ + "iowa", + -14.594473838806152 + ], + [ + "▁Yip", + -14.594481468200684 + ], + [ + "Ingredient", + -14.594491004943848 + ], + [ + "4-19", + -14.594499588012695 + ], + [ + "▁AUM", + -14.594732284545898 + ], + [ + "▁Crain", + -14.594754219055176 + ], + [ + "▁Guang", + -14.594768524169922 + ], + [ + "▁AAU", + -14.594770431518556 + ], + [ + "Levitt", + -14.594792366027832 + ], + [ + "▁Helpers", + -14.594799995422363 + ], + [ + "▁Rupees", + -14.594843864440918 + ], + [ + "▁choker", + -14.59485149383545 + ], + [ + "▁Aung", + -14.594862937927246 + ], + [ + "▁Refreshing", + -14.594886779785156 + ], + [ + "▁roofline", + -14.594910621643066 + ], + [ + "▁waterpark", + -14.594911575317385 + ], + [ + "▁Supp", + -14.594918251037598 + ], + [ + "▁braced", + -14.594929695129396 + ], + [ + "▁Angelic", + -14.594938278198242 + ], + [ + "Mouse", + -14.594953536987305 + ], + [ + "▁08:00", + -14.594995498657228 + ], + [ + "▁Jahr", + -14.595013618469238 + ], + [ + "▁Lice", + -14.595014572143556 + ], + [ + "MRF", + -14.595206260681152 + ], + [ + "INNER", + -14.595240592956545 + ], + [ + "▁Sharpie", + -14.59524154663086 + ], + [ + "▁Provisional", + -14.59524917602539 + ], + [ + "paris", + -14.595259666442873 + ], + [ + "IBS", + -14.595324516296388 + ], + [ + "lja", + -14.595380783081056 + ], + [ + "▁Kirch", + -14.595524787902832 + ], + [ + "▁Gogo", + -14.595528602600098 + ], + [ + "wx", + -14.595702171325684 + ], + [ + "▁Kham", + -14.59571647644043 + ], + [ + "▁Pasha", + -14.595723152160645 + ], + [ + "vata", + -14.59572982788086 + ], + [ + "▁jihadists", + -14.59582805633545 + ], + [ + "idoo", + -14.595909118652344 + ], + [ + "immel", + -14.596067428588867 + ], + [ + "▁Bij", + -14.596146583557127 + ], + [ + "ccompanying", + -14.596226692199709 + ], + [ + "vary", + -14.59624481201172 + ], + [ + "▁sportsbooks", + -14.596271514892578 + ], + [ + "▁UML", + -14.596277236938477 + ], + [ + "▁sleeker", + -14.596332550048828 + ], + [ + "oozy", + -14.59640121459961 + ], + [ + "positioned", + -14.59645175933838 + ], + [ + "fron", + -14.59645652770996 + ], + [ + "▁10-14", + -14.59653377532959 + ], + [ + "▁$750,000", + -14.596540451049805 + ], + [ + "▁sociopath", + -14.596623420715332 + ], + [ + "▁FRE", + -14.596705436706545 + ], + [ + "▁Whitehouse", + -14.596725463867188 + ], + [ + "BOARD", + -14.596734046936035 + ], + [ + "▁Sprinter", + -14.596742630004885 + ], + [ + "Creativity", + -14.59675407409668 + ], + [ + "Depth", + -14.596756935119627 + ], + [ + "SAA", + -14.596757888793944 + ], + [ + "Facing", + -14.596760749816896 + ], + [ + "fantasy", + -14.596773147583008 + ], + [ + "chicago", + -14.596796989440918 + ], + [ + "conforming", + -14.596807479858398 + ], + [ + "Dominic", + -14.596823692321776 + ], + [ + "nour", + -14.596881866455078 + ], + [ + "▁Biosciences", + -14.596881866455078 + ], + [ + "hardware", + -14.596935272216797 + ], + [ + "▁Mingle", + -14.5969820022583 + ], + [ + "▁nodules", + -14.596982955932615 + ], + [ + "announced", + -14.59699821472168 + ], + [ + "DOM", + -14.597020149230955 + ], + [ + "Travelling", + -14.597023963928224 + ], + [ + "▁hoarder", + -14.597027778625488 + ], + [ + "3-24", + -14.597036361694336 + ], + [ + "Beginner", + -14.597073554992676 + ], + [ + "Movement", + -14.597074508666992 + ], + [ + "Determin", + -14.597105026245115 + ], + [ + "▁Hain", + -14.597118377685549 + ], + [ + "essler", + -14.597132682800291 + ], + [ + "▁Agio", + -14.59720516204834 + ], + [ + "timber", + -14.59723949432373 + ], + [ + "▁Highlander", + -14.597264289855955 + ], + [ + "Jackie", + -14.597284317016602 + ], + [ + "diverse", + -14.597302436828612 + ], + [ + "▁Xia", + -14.59736156463623 + ], + [ + "whelming", + -14.597379684448242 + ], + [ + "▁slipcover", + -14.597400665283203 + ], + [ + "▁Blob", + -14.597406387329102 + ], + [ + "rland", + -14.597490310668944 + ], + [ + "▁videogames", + -14.597617149353027 + ], + [ + "▁touts", + -14.597620964050291 + ], + [ + "Energi", + -14.597700119018556 + ], + [ + "▁showerhead", + -14.59770393371582 + ], + [ + "▁Sku", + -14.597723960876465 + ], + [ + "esophageal", + -14.597813606262209 + ], + [ + "Supporters", + -14.597814559936523 + ], + [ + "brooke", + -14.597820281982422 + ], + [ + "seminar", + -14.597901344299316 + ], + [ + "ulum", + -14.59790325164795 + ], + [ + "holi", + -14.597978591918944 + ], + [ + "▁PVD", + -14.59801959991455 + ], + [ + "▁faultless", + -14.59808349609375 + ], + [ + "▁ADB", + -14.598151206970217 + ], + [ + "▁$0.5", + -14.59815502166748 + ], + [ + "idin", + -14.598318099975586 + ], + [ + "▁genie", + -14.598359107971191 + ], + [ + "▁petro", + -14.598406791687012 + ], + [ + "▁Tariq", + -14.598430633544922 + ], + [ + "▁Malaya", + -14.598448753356934 + ], + [ + "▁complimenting", + -14.598480224609377 + ], + [ + "Laurie", + -14.59848976135254 + ], + [ + "Coaching", + -14.5985107421875 + ], + [ + "▁biannual", + -14.59851360321045 + ], + [ + "▁octagon", + -14.59852123260498 + ], + [ + "▁Ciudad", + -14.598613739013672 + ], + [ + "▁Cuyahoga", + -14.598613739013672 + ], + [ + "▁Ellsworth", + -14.598613739013672 + ], + [ + "▁Gynecology", + -14.598613739013672 + ], + [ + "▁Tragedy", + -14.598613739013672 + ], + [ + "▁appendices", + -14.598613739013672 + ], + [ + "▁cauldron", + -14.598613739013672 + ], + [ + "▁industrious", + -14.598613739013672 + ], + [ + "▁interoperable", + -14.598613739013672 + ], + [ + "▁perceiving", + -14.598613739013672 + ], + [ + "▁phosphorylation", + -14.598613739013672 + ], + [ + "▁suffocating", + -14.598613739013672 + ], + [ + "▁superstitious", + -14.598613739013672 + ], + [ + "▁nashville", + -14.598614692687988 + ], + [ + "congruent", + -14.598615646362305 + ], + [ + "▁Laxmi", + -14.598615646362305 + ], + [ + "▁Smadav", + -14.598615646362305 + ], + [ + "▁delineation", + -14.598615646362305 + ], + [ + "Khan", + -14.59861660003662 + ], + [ + "Glaucoma", + -14.598621368408203 + ], + [ + "▁displacing", + -14.598621368408203 + ], + [ + "▁Beasley", + -14.598625183105469 + ], + [ + "▁disintegration", + -14.598625183105469 + ], + [ + "▁prosecco", + -14.598627090454102 + ], + [ + "▁Pesticide", + -14.598628997802734 + ], + [ + "▁modulus", + -14.598638534545898 + ], + [ + "▁Vallejo", + -14.59865379333496 + ], + [ + "▁doorknob", + -14.598655700683594 + ], + [ + "▁ionization", + -14.598661422729492 + ], + [ + "▁subcategories", + -14.598663330078123 + ], + [ + "▁Burrell", + -14.598670959472656 + ], + [ + "▁unaffordable", + -14.598679542541504 + ], + [ + "▁Nashua", + -14.59868049621582 + ], + [ + "▁Castello", + -14.598681449890137 + ], + [ + "▁FOIA", + -14.5986909866333 + ], + [ + "▁DEATH", + -14.5986967086792 + ], + [ + "▁Fuego", + -14.598709106445312 + ], + [ + "▁autonomic", + -14.598711967468262 + ], + [ + "▁Deakin", + -14.598722457885742 + ], + [ + "▁Marxism", + -14.59874153137207 + ], + [ + "▁Trau", + -14.598758697509766 + ], + [ + "▁disused", + -14.598773956298828 + ], + [ + "▁Messina", + -14.598817825317385 + ], + [ + "▁Flagship", + -14.598840713500977 + ], + [ + "alinga", + -14.598848342895508 + ], + [ + "▁Langdon", + -14.598855018615724 + ], + [ + "▁retroactively", + -14.59885597229004 + ], + [ + "▁Feline", + -14.598859786987305 + ], + [ + "▁Enbridge", + -14.598883628845217 + ], + [ + "▁Chilton", + -14.598891258239746 + ], + [ + "▁Preet", + -14.598892211914062 + ], + [ + "▁chairwoman", + -14.598894119262695 + ], + [ + "▁traffickers", + -14.598905563354492 + ], + [ + "▁Spoken", + -14.59892749786377 + ], + [ + "3.7%", + -14.598933219909668 + ], + [ + "▁Gaiman", + -14.598957061767578 + ], + [ + "▁Octane", + -14.598976135253906 + ], + [ + "umbra", + -14.599081993103027 + ], + [ + "raff", + -14.59908390045166 + ], + [ + "HIT", + -14.599102020263672 + ], + [ + "▁fashionistas", + -14.599140167236328 + ], + [ + "▁GNSS", + -14.59914207458496 + ], + [ + "▁repeatability", + -14.599238395690918 + ], + [ + "Url", + -14.5992431640625 + ], + [ + "▁Combe", + -14.599263191223145 + ], + [ + "▁leptin", + -14.599278450012209 + ], + [ + "singing", + -14.599287033081056 + ], + [ + "▁Elgar", + -14.599308967590332 + ], + [ + "Blazing", + -14.59931182861328 + ], + [ + "▁Endgame", + -14.599319458007812 + ], + [ + "▁Foyer", + -14.59933090209961 + ], + [ + "▁Susanne", + -14.59934425354004 + ], + [ + "▁BlackRock", + -14.599367141723633 + ], + [ + "▁Stationary", + -14.599376678466797 + ], + [ + "▁Caffe", + -14.59938144683838 + ], + [ + "Gillingham", + -14.599421501159668 + ], + [ + "▁Kirin", + -14.599453926086426 + ], + [ + "▁snowshoeing", + -14.599482536315918 + ], + [ + "▁baseboards", + -14.599510192871094 + ], + [ + "▁TCA", + -14.599529266357422 + ], + [ + "▁Teamwork", + -14.59954833984375 + ], + [ + "rational", + -14.599746704101562 + ], + [ + "▁dino", + -14.599774360656738 + ], + [ + "▁NRS", + -14.599780082702637 + ], + [ + "Understandably", + -14.599790573120115 + ], + [ + "▁rehabilitated", + -14.599807739257812 + ], + [ + "araj", + -14.599809646606444 + ], + [ + "▁Powerball", + -14.599851608276367 + ], + [ + "eighth", + -14.599870681762695 + ], + [ + "▁xmas", + -14.59992790222168 + ], + [ + "nosis", + -14.599977493286133 + ], + [ + "▁conjured", + -14.59999942779541 + ], + [ + "10:30", + -14.600014686584473 + ], + [ + "▁grafted", + -14.60003662109375 + ], + [ + "▁Raffles", + -14.600043296813965 + ], + [ + "KON", + -14.600089073181152 + ], + [ + "3-17", + -14.600127220153809 + ], + [ + "▁underweight", + -14.600142478942873 + ], + [ + "▁predominate", + -14.6001558303833 + ], + [ + "1962", + -14.60016632080078 + ], + [ + "▁Schei", + -14.600173950195312 + ], + [ + "▁Orbital", + -14.60021686553955 + ], + [ + "▁Silber", + -14.600249290466309 + ], + [ + "OCC", + -14.600278854370115 + ], + [ + "▁Millar", + -14.600282669067385 + ], + [ + "ativity", + -14.600321769714355 + ], + [ + "HELP", + -14.600408554077148 + ], + [ + "▁PLANT", + -14.600408554077148 + ], + [ + "▁Clima", + -14.600411415100098 + ], + [ + "NLP", + -14.60049057006836 + ], + [ + "HCC", + -14.600512504577637 + ], + [ + "▁knitters", + -14.60063934326172 + ], + [ + "▁Freeware", + -14.600754737854004 + ], + [ + "▁upsell", + -14.600811958312988 + ], + [ + "▁downtrend", + -14.600820541381836 + ], + [ + "▁subsea", + -14.600863456726074 + ], + [ + "▁#17", + -14.60086441040039 + ], + [ + "▁Unlocking", + -14.600869178771973 + ], + [ + "▁dispensation", + -14.6008882522583 + ], + [ + "$40", + -14.600942611694336 + ], + [ + "Hate", + -14.600964546203612 + ], + [ + "▁Yellen", + -14.60098934173584 + ], + [ + "Spam", + -14.60102081298828 + ], + [ + "▁Roque", + -14.601059913635254 + ], + [ + "▁anarchy", + -14.601078033447266 + ], + [ + "cracy", + -14.601104736328123 + ], + [ + "▁Braden", + -14.601120948791504 + ], + [ + "tracker", + -14.601134300231934 + ], + [ + "▁Talon", + -14.601158142089844 + ], + [ + "poort", + -14.601168632507324 + ], + [ + "▁pathos", + -14.60119342803955 + ], + [ + "faction", + -14.60122013092041 + ], + [ + "▁$74", + -14.601242065429688 + ], + [ + "holland", + -14.601245880126951 + ], + [ + "kj", + -14.601354598999023 + ], + [ + "=””", + -14.601396560668944 + ], + [ + "▁tableau", + -14.6014986038208 + ], + [ + "▁korean", + -14.601585388183594 + ], + [ + "attu", + -14.6016206741333 + ], + [ + "▁Letterman", + -14.60177516937256 + ], + [ + "▁Expat", + -14.601786613464355 + ], + [ + "▁Groot", + -14.601819038391112 + ], + [ + "▁Gulch", + -14.60188102722168 + ], + [ + "▁Wrench", + -14.601898193359377 + ], + [ + "Este", + -14.601901054382324 + ], + [ + "▁Foothills", + -14.601906776428224 + ], + [ + "▁dramatis", + -14.6019926071167 + ], + [ + "▁litigants", + -14.602004051208496 + ], + [ + "▁UIC", + -14.602031707763672 + ], + [ + "▁enchilada", + -14.60204792022705 + ], + [ + "▁nymph", + -14.602079391479492 + ], + [ + "tome", + -14.60218906402588 + ], + [ + "▁Capa", + -14.602194786071776 + ], + [ + "▁glyco", + -14.602210998535156 + ], + [ + "resourced", + -14.60231876373291 + ], + [ + "▁Tsai", + -14.60242748260498 + ], + [ + "dimension", + -14.602437019348145 + ], + [ + "▁plastering", + -14.602537155151367 + ], + [ + "▁Bombers", + -14.602643966674805 + ], + [ + "▁microsite", + -14.602656364440918 + ], + [ + "▁19:1", + -14.602723121643066 + ], + [ + "▁Tye", + -14.602734565734863 + ], + [ + "landia", + -14.602758407592772 + ], + [ + "▁wisest", + -14.60277271270752 + ], + [ + "MASTER", + -14.602801322937012 + ], + [ + "▁Divider", + -14.602934837341309 + ], + [ + "▁fut", + -14.60293674468994 + ], + [ + "OPT", + -14.602970123291016 + ], + [ + "phar", + -14.603164672851562 + ], + [ + "Esteem", + -14.603196144104004 + ], + [ + "▁Maho", + -14.6032075881958 + ], + [ + "collect", + -14.603238105773926 + ], + [ + "▁aristocrat", + -14.603306770324709 + ], + [ + "lectromechanical", + -14.603355407714844 + ], + [ + "▁LEADER", + -14.603437423706056 + ], + [ + "▁crafter", + -14.60348129272461 + ], + [ + "▁foursome", + -14.603521347045898 + ], + [ + "▁Chern", + -14.60352611541748 + ], + [ + "▁Wain", + -14.60352897644043 + ], + [ + "Effortless", + -14.60354995727539 + ], + [ + "▁Plow", + -14.603562355041504 + ], + [ + "▁Mord", + -14.60364818572998 + ], + [ + "Technica", + -14.603771209716797 + ], + [ + "DDR", + -14.603787422180176 + ], + [ + "▁Pesto", + -14.603789329528809 + ], + [ + "▁Ruin", + -14.603822708129885 + ], + [ + "wonder", + -14.603894233703612 + ], + [ + "▁WOD", + -14.603963851928713 + ], + [ + "▁Tubing", + -14.604012489318848 + ], + [ + "ESPN", + -14.604050636291504 + ], + [ + "2200", + -14.604071617126465 + ], + [ + "pulsion", + -14.60414218902588 + ], + [ + "deploy", + -14.604196548461914 + ], + [ + "opera", + -14.604325294494627 + ], + [ + "ewicz", + -14.60433292388916 + ], + [ + "▁$0.4", + -14.604345321655272 + ], + [ + "ometri", + -14.60438632965088 + ], + [ + "1:27", + -14.604410171508787 + ], + [ + "Opinion", + -14.60443115234375 + ], + [ + "Eligibility", + -14.6044340133667 + ], + [ + "Ceramic", + -14.604439735412598 + ], + [ + "Difficult", + -14.604449272155762 + ], + [ + "Verizon", + -14.604458808898926 + ], + [ + "Nicholas", + -14.60446071624756 + ], + [ + "transmission", + -14.604461669921877 + ], + [ + "musik", + -14.60446548461914 + ], + [ + "Harvey", + -14.604480743408203 + ], + [ + "RSVP", + -14.604483604431152 + ], + [ + "▁inexpensively", + -14.604541778564451 + ], + [ + "HCS", + -14.604558944702148 + ], + [ + "▁Omer", + -14.604558944702148 + ], + [ + "Remain", + -14.604631423950195 + ], + [ + "Contrast", + -14.60463809967041 + ], + [ + "laminate", + -14.604681015014648 + ], + [ + "elius", + -14.604701042175291 + ], + [ + "Personalized", + -14.604706764221191 + ], + [ + "Fundamentally", + -14.604707717895508 + ], + [ + "ventura", + -14.604726791381836 + ], + [ + "CHEN", + -14.60474681854248 + ], + [ + "Fiction", + -14.60478401184082 + ], + [ + "Clever", + -14.60483455657959 + ], + [ + "▁defini", + -14.604952812194824 + ], + [ + "Piano", + -14.604966163635254 + ], + [ + "attended", + -14.604970932006836 + ], + [ + "▁tuba", + -14.604970932006836 + ], + [ + "CHER", + -14.60503387451172 + ], + [ + "Nord", + -14.605042457580566 + ], + [ + "giu", + -14.60505199432373 + ], + [ + "Specification", + -14.605059623718262 + ], + [ + "▁chrono", + -14.605108261108398 + ], + [ + "1.14", + -14.60511302947998 + ], + [ + "4.6%", + -14.60512638092041 + ], + [ + "expo", + -14.605165481567385 + ], + [ + "▁$650", + -14.605209350585938 + ], + [ + "starring", + -14.605216979980469 + ], + [ + "outstanding", + -14.60522174835205 + ], + [ + "gazette", + -14.605391502380373 + ], + [ + "eglazing", + -14.605420112609863 + ], + [ + "▁dishonor", + -14.60545253753662 + ], + [ + "▁Erich", + -14.605545997619627 + ], + [ + "Arabic", + -14.605594635009766 + ], + [ + "▁Hora", + -14.605695724487305 + ], + [ + "amour", + -14.605758666992188 + ], + [ + "▁Intercom", + -14.605782508850098 + ], + [ + "▁30-35", + -14.60581398010254 + ], + [ + "▁Trai", + -14.605843544006348 + ], + [ + "Blogger", + -14.60584831237793 + ], + [ + "▁blanch", + -14.605862617492676 + ], + [ + "1,2", + -14.605878829956056 + ], + [ + "HMC", + -14.605917930603027 + ], + [ + "ekar", + -14.605955123901367 + ], + [ + "▁prescient", + -14.606019973754885 + ], + [ + "ielle", + -14.60603141784668 + ], + [ + "▁frontrunner", + -14.606119155883787 + ], + [ + "▁Celesti", + -14.606121063232422 + ], + [ + "Musculoskeletal", + -14.60613250732422 + ], + [ + "Testimony", + -14.60613250732422 + ], + [ + "▁Bautista", + -14.60613250732422 + ], + [ + "▁Canaveral", + -14.60613250732422 + ], + [ + "▁Djibouti", + -14.60613250732422 + ], + [ + "▁Eldorado", + -14.60613250732422 + ], + [ + "▁Guadalajara", + -14.60613250732422 + ], + [ + "▁Hermione", + -14.60613250732422 + ], + [ + "▁Lethbridge", + -14.60613250732422 + ], + [ + "▁Mukherjee", + -14.60613250732422 + ], + [ + "▁Svalbard", + -14.60613250732422 + ], + [ + "▁Tabernacle", + -14.60613250732422 + ], + [ + "▁beautification", + -14.60613250732422 + ], + [ + "▁coercion", + -14.60613250732422 + ], + [ + "▁fajita", + -14.60613250732422 + ], + [ + "▁hgtv", + -14.60613250732422 + ], + [ + "▁igniting", + -14.60613250732422 + ], + [ + "▁inconclusive", + -14.60613250732422 + ], + [ + "▁rebuttal", + -14.60613250732422 + ], + [ + "▁sanitizing", + -14.60613250732422 + ], + [ + "▁secretarial", + -14.60613250732422 + ], + [ + "▁tyrosine", + -14.60613250732422 + ], + [ + "▁undesired", + -14.60613250732422 + ], + [ + "▁Frisbee", + -14.606133460998535 + ], + [ + "▁México", + -14.606133460998535 + ], + [ + "▁Othello", + -14.606133460998535 + ], + [ + "▁infallible", + -14.606133460998535 + ], + [ + "▁postponing", + -14.606133460998535 + ], + [ + "▁qualms", + -14.606133460998535 + ], + [ + "▁Simmonds", + -14.606134414672852 + ], + [ + "▁sartorial", + -14.606134414672852 + ], + [ + "▁Allegro", + -14.606135368347168 + ], + [ + "▁Territorial", + -14.606135368347168 + ], + [ + "▁fastidious", + -14.606138229370115 + ], + [ + "▁Beneficial", + -14.606142044067385 + ], + [ + "▁deterrence", + -14.606142044067385 + ], + [ + "▁electrolysis", + -14.606142044067385 + ], + [ + "▁quibble", + -14.606143951416016 + ], + [ + "▁Micheal", + -14.606146812438965 + ], + [ + "▁£200,000", + -14.60614776611328 + ], + [ + "▁JAMES", + -14.606151580810549 + ], + [ + "▁Yolanda", + -14.606152534484863 + ], + [ + "▁aureus", + -14.60615348815918 + ], + [ + "▁stench", + -14.606157302856444 + ], + [ + "▁riparian", + -14.606159210205078 + ], + [ + "▁Dietetic", + -14.606161117553713 + ], + [ + "▁Kershaw", + -14.606162071228027 + ], + [ + "▁Sublime", + -14.60616397857666 + ], + [ + "▁Geophysical", + -14.606165885925291 + ], + [ + "▁brazilian", + -14.606165885925291 + ], + [ + "▁CANADA", + -14.606175422668455 + ], + [ + "▁PRACTI", + -14.60617733001709 + ], + [ + "▁optometry", + -14.60617733001709 + ], + [ + "▁Oliveira", + -14.606195449829102 + ], + [ + "▁12.6", + -14.606196403503418 + ], + [ + "▁PageRank", + -14.606213569641112 + ], + [ + "▁Travers", + -14.606250762939451 + ], + [ + "▁Nimble", + -14.606263160705566 + ], + [ + "▁Stallone", + -14.606266975402832 + ], + [ + "▁Bonsai", + -14.60628604888916 + ], + [ + "▁Liars", + -14.606303215026855 + ], + [ + "▁SNS", + -14.606307983398438 + ], + [ + "▁Milligan", + -14.60630989074707 + ], + [ + "00000000", + -14.60633659362793 + ], + [ + "▁PUMA", + -14.60633945465088 + ], + [ + "▁Reece", + -14.606352806091309 + ], + [ + "▁Sangha", + -14.606362342834473 + ], + [ + "▁Greenwald", + -14.606364250183104 + ], + [ + "▁Dahlia", + -14.606380462646484 + ], + [ + "▁seahorse", + -14.60640811920166 + ], + [ + "▁Napoleonic", + -14.606411933898926 + ], + [ + "▁Carving", + -14.60643482208252 + ], + [ + "▁prebiotic", + -14.606485366821287 + ], + [ + "▁disbursed", + -14.606493949890137 + ], + [ + "▁beagle", + -14.606497764587402 + ], + [ + "▁Nozzle", + -14.606513023376465 + ], + [ + "▁Meli", + -14.60651683807373 + ], + [ + "▁northernmost", + -14.606563568115234 + ], + [ + "▁Lomond", + -14.606585502624512 + ], + [ + "▁Dauphin", + -14.606586456298828 + ], + [ + "▁Freiburg", + -14.60660171508789 + ], + [ + "▁19:00", + -14.606605529785156 + ], + [ + "▁TDI", + -14.60662078857422 + ], + [ + "▁precondition", + -14.606629371643066 + ], + [ + "▁Rij", + -14.606630325317385 + ], + [ + "▁Lewisham", + -14.606640815734863 + ], + [ + "▁ridesharing", + -14.606644630432127 + ], + [ + "22-23", + -14.606672286987305 + ], + [ + "▁Stitches", + -14.606685638427734 + ], + [ + "▁Selector", + -14.60670280456543 + ], + [ + "▁Shuffle", + -14.606754302978516 + ], + [ + "CTL", + -14.60676383972168 + ], + [ + "▁muddled", + -14.606821060180664 + ], + [ + "Pun", + -14.606844902038574 + ], + [ + "▁.500", + -14.606870651245115 + ], + [ + "Qaida", + -14.606881141662598 + ], + [ + "ficial", + -14.606928825378418 + ], + [ + "▁13:1", + -14.607110977172852 + ], + [ + "▁VIEWS", + -14.607118606567385 + ], + [ + "▁Vikas", + -14.607126235961914 + ], + [ + "▁Refined", + -14.607179641723633 + ], + [ + "▁Futon", + -14.607226371765137 + ], + [ + "RIDGE", + -14.60726833343506 + ], + [ + "KAN", + -14.607292175292969 + ], + [ + "FEA", + -14.607306480407717 + ], + [ + "▁Eames", + -14.607324600219728 + ], + [ + "▁telepathic", + -14.607332229614258 + ], + [ + "CFS", + -14.607403755187988 + ], + [ + "▁biotin", + -14.607406616210938 + ], + [ + "▁Petrov", + -14.607413291931152 + ], + [ + "Specify", + -14.607423782348633 + ], + [ + "Lightly", + -14.607460021972656 + ], + [ + "TSL", + -14.607505798339844 + ], + [ + "▁$84", + -14.60752010345459 + ], + [ + "▁disclaim", + -14.607523918151855 + ], + [ + "▁willfully", + -14.607526779174805 + ], + [ + "▁docile", + -14.60753345489502 + ], + [ + "▁Scarpe", + -14.60764503479004 + ], + [ + "▁Puente", + -14.60771942138672 + ], + [ + "onte", + -14.607820510864258 + ], + [ + "▁Ponce", + -14.60787582397461 + ], + [ + "punctu", + -14.607876777648926 + ], + [ + "politan", + -14.607894897460938 + ], + [ + "▁kya", + -14.607943534851074 + ], + [ + "6-18", + -14.60796070098877 + ], + [ + "▁Corsica", + -14.607998847961426 + ], + [ + "Vita", + -14.608025550842283 + ], + [ + "▁1060", + -14.6080322265625 + ], + [ + "▁Commissioned", + -14.60805320739746 + ], + [ + "▁Salva", + -14.608132362365724 + ], + [ + "▁Fairway", + -14.608156204223633 + ], + [ + "schein", + -14.608235359191896 + ], + [ + "▁satu", + -14.608243942260742 + ], + [ + "▁Antonin", + -14.608322143554688 + ], + [ + "▁Westgate", + -14.608351707458496 + ], + [ + "arid", + -14.608359336853027 + ], + [ + "▁Engle", + -14.608388900756836 + ], + [ + "▁sunblock", + -14.608427047729492 + ], + [ + "▁(183", + -14.608442306518556 + ], + [ + "Passing", + -14.608470916748049 + ], + [ + "GAT", + -14.608516693115234 + ], + [ + "▁Batu", + -14.608529090881348 + ], + [ + "▁Mahan", + -14.608577728271484 + ], + [ + "▁EDTA", + -14.60862922668457 + ], + [ + "▁fete", + -14.608790397644045 + ], + [ + "carboxyl", + -14.608808517456056 + ], + [ + "▁blinked", + -14.60888671875 + ], + [ + ".5%)", + -14.608887672424316 + ], + [ + "▁Totem", + -14.60904312133789 + ], + [ + "▁Quintet", + -14.609227180480955 + ], + [ + "▁Jokes", + -14.609240531921388 + ], + [ + "elma", + -14.609304428100586 + ], + [ + "▁HRS", + -14.60935878753662 + ], + [ + "▁Kendal", + -14.609374046325684 + ], + [ + "▁darkroom", + -14.609376907348633 + ], + [ + "▁monographs", + -14.609395027160645 + ], + [ + "Medline", + -14.609469413757324 + ], + [ + "elek", + -14.609539031982422 + ], + [ + "▁Nanda", + -14.609541893005373 + ], + [ + "▁eels", + -14.609543800354004 + ], + [ + "▁Madera", + -14.60954475402832 + ], + [ + "▁outnumbered", + -14.60956859588623 + ], + [ + "▁haircare", + -14.609599113464355 + ], + [ + "Bern", + -14.609615325927734 + ], + [ + "elegant", + -14.609626770019531 + ], + [ + "yles", + -14.609663963317873 + ], + [ + "▁DPP", + -14.60968017578125 + ], + [ + "▁motorcyclist", + -14.609711647033691 + ], + [ + "▁designee", + -14.609743118286133 + ], + [ + "lassen", + -14.609760284423828 + ], + [ + "▁upswing", + -14.609769821166992 + ], + [ + "▁Firefighter", + -14.609771728515623 + ], + [ + "5.6%", + -14.609803199768066 + ], + [ + "finn", + -14.609832763671877 + ], + [ + "unden", + -14.609864234924316 + ], + [ + "▁Andrey", + -14.6099271774292 + ], + [ + "▁hippies", + -14.609930038452148 + ], + [ + "Invent", + -14.609944343566896 + ], + [ + "▁Polygon", + -14.609956741333008 + ], + [ + "1.5%", + -14.610136032104492 + ], + [ + "▁frit", + -14.610240936279297 + ], + [ + "munity", + -14.6102876663208 + ], + [ + "▁slipcovers", + -14.610289573669434 + ], + [ + "HPC", + -14.610326766967772 + ], + [ + "Marketwire", + -14.61040496826172 + ], + [ + "▁Cyst", + -14.61043930053711 + ], + [ + "▁Petco", + -14.61046314239502 + ], + [ + "chetti", + -14.610536575317385 + ], + [ + "▁Tecno", + -14.610584259033203 + ], + [ + "▁kebabs", + -14.61059284210205 + ], + [ + "6600", + -14.610600471496582 + ], + [ + "ferry", + -14.610669136047363 + ], + [ + "Bless", + -14.61067008972168 + ], + [ + "0.0%", + -14.610689163208008 + ], + [ + "▁NRG", + -14.610751152038574 + ], + [ + "triple", + -14.610828399658203 + ], + [ + "Collective", + -14.610971450805664 + ], + [ + "ORAL", + -14.610980033874512 + ], + [ + "wether", + -14.61112117767334 + ], + [ + "Wat", + -14.611226081848145 + ], + [ + "▁ogre", + -14.611230850219728 + ], + [ + "osti", + -14.611265182495115 + ], + [ + "▁streetscape", + -14.61143398284912 + ], + [ + "▁ACID", + -14.611451148986816 + ], + [ + "Vid", + -14.611520767211914 + ], + [ + "▁collard", + -14.61156940460205 + ], + [ + "▁RCS", + -14.611607551574709 + ], + [ + "▁IOM", + -14.611618995666504 + ], + [ + "ogne", + -14.611679077148438 + ], + [ + "▁xeno", + -14.611699104309082 + ], + [ + "▁Allowed", + -14.611722946166992 + ], + [ + "▁parsnips", + -14.611772537231444 + ], + [ + "HEC", + -14.611773490905762 + ], + [ + "▁gauged", + -14.611886024475098 + ], + [ + "2:55", + -14.611919403076172 + ], + [ + "▁Decline", + -14.611926078796388 + ], + [ + "Passengers", + -14.611952781677246 + ], + [ + "CTU", + -14.611979484558104 + ], + [ + "▁Emir", + -14.611982345581056 + ], + [ + "hig", + -14.612018585205078 + ], + [ + "ymphonic", + -14.612077713012695 + ], + [ + "lege", + -14.612082481384276 + ], + [ + "Sophomore", + -14.61216926574707 + ], + [ + "Arrival", + -14.612178802490234 + ], + [ + "Isaac", + -14.612181663513184 + ], + [ + "ingredient", + -14.612200736999512 + ], + [ + "protocol", + -14.612201690673828 + ], + [ + "Musical", + -14.61220645904541 + ], + [ + "Inventory", + -14.612208366394045 + ], + [ + "arche", + -14.612208366394045 + ], + [ + "nked", + -14.612212181091309 + ], + [ + "Bangladesh", + -14.612213134765623 + ], + [ + "Speech", + -14.612215995788574 + ], + [ + "copyright", + -14.612287521362305 + ], + [ + "▁LLB", + -14.612306594848633 + ], + [ + "boring", + -14.612310409545898 + ], + [ + "▁Warne", + -14.612323760986328 + ], + [ + "Partnership", + -14.612335205078123 + ], + [ + "trud", + -14.612354278564451 + ], + [ + "cicle", + -14.612364768981934 + ], + [ + "Carr", + -14.612383842468262 + ], + [ + "5.2%", + -14.612406730651855 + ], + [ + "parenting", + -14.61242389678955 + ], + [ + "omia", + -14.612432479858398 + ], + [ + "mediation", + -14.612526893615724 + ], + [ + "myself", + -14.612550735473633 + ], + [ + "▁Brach", + -14.612586975097656 + ], + [ + "DAA", + -14.61260223388672 + ], + [ + "▁Burglar", + -14.612675666809082 + ], + [ + "▁Lani", + -14.61268424987793 + ], + [ + "hugh", + -14.612709999084473 + ], + [ + "compression", + -14.612799644470217 + ], + [ + "Gender", + -14.612870216369627 + ], + [ + "objective", + -14.612988471984863 + ], + [ + "▁mythic", + -14.613014221191406 + ], + [ + "▁intertwine", + -14.61310863494873 + ], + [ + "franc", + -14.613200187683104 + ], + [ + "▁foreshadow", + -14.613205909729004 + ], + [ + "▁distillers", + -14.613239288330078 + ], + [ + "Cabinet", + -14.613323211669922 + ], + [ + "▁generalize", + -14.61339282989502 + ], + [ + "Bernard", + -14.613405227661133 + ], + [ + "▁Schol", + -14.613457679748535 + ], + [ + "▁unequivocal", + -14.613466262817385 + ], + [ + "▁paradoxical", + -14.613492012023926 + ], + [ + "▁soundbar", + -14.613507270812988 + ], + [ + "▁Jailbreak", + -14.61352825164795 + ], + [ + "Sandra", + -14.61355972290039 + ], + [ + "▁Algae", + -14.613607406616213 + ], + [ + "▁iframe", + -14.613637924194336 + ], + [ + "▁BAP", + -14.613676071166992 + ], + [ + "▁Enlighten", + -14.613677024841309 + ], + [ + "▁DAMAGE", + -14.613699913024902 + ], + [ + "Affiliation", + -14.61370849609375 + ], + [ + "Substantial", + -14.61370849609375 + ], + [ + "▁800-356-4092", + -14.61370849609375 + ], + [ + "▁Barracuda", + -14.61370849609375 + ], + [ + "▁Casserole", + -14.61370849609375 + ], + [ + "▁Centuries", + -14.61370849609375 + ], + [ + "▁HAPPEN", + -14.61370849609375 + ], + [ + "▁Reindeer", + -14.61370849609375 + ], + [ + "▁addendum", + -14.61370849609375 + ], + [ + "▁centennial", + -14.61370849609375 + ], + [ + "▁corduroy", + -14.61370849609375 + ], + [ + "▁hegemony", + -14.61370849609375 + ], + [ + "▁parochial", + -14.61370849609375 + ], + [ + "▁pheromone", + -14.61370849609375 + ], + [ + "▁repatriation", + -14.61370849609375 + ], + [ + "▁stupendous", + -14.61370849609375 + ], + [ + "Engraved", + -14.613709449768066 + ], + [ + "▁malignancy", + -14.613709449768066 + ], + [ + "▁nightmarish", + -14.613709449768066 + ], + [ + "▁Ghibli", + -14.6137113571167 + ], + [ + "▁Brandywine", + -14.613712310791016 + ], + [ + "▁Naturopathic", + -14.613712310791016 + ], + [ + "▁iguana", + -14.613712310791016 + ], + [ + "▁Vivaldi", + -14.613714218139648 + ], + [ + "▁Grenache", + -14.613715171813965 + ], + [ + "▁Salerno", + -14.613717079162598 + ], + [ + "▁Venezia", + -14.613717079162598 + ], + [ + "▁Confluence", + -14.61371898651123 + ], + [ + "▁Jyoti", + -14.61371898651123 + ], + [ + "▁certifies", + -14.61372184753418 + ], + [ + "▁Iverson", + -14.613722801208496 + ], + [ + "▁taxonomic", + -14.613722801208496 + ], + [ + "▁Harbaugh", + -14.613725662231444 + ], + [ + "▁SWIFT", + -14.613725662231444 + ], + [ + "▁Arvind", + -14.613726615905762 + ], + [ + "▁Borussia", + -14.613727569580078 + ], + [ + "▁Arafat", + -14.61373805999756 + ], + [ + "▁Wrestle", + -14.61373805999756 + ], + [ + "▁Toxicology", + -14.613739967346191 + ], + [ + "▁feign", + -14.613739967346191 + ], + [ + "▁eggnog", + -14.613741874694824 + ], + [ + "▁guava", + -14.613741874694824 + ], + [ + "▁blaring", + -14.61375331878662 + ], + [ + "▁Tarrant", + -14.613762855529783 + ], + [ + "▁talcum", + -14.613764762878418 + ], + [ + "▁multiplicity", + -14.613770484924316 + ], + [ + "▁unconfirmed", + -14.613777160644531 + ], + [ + "▁Healey", + -14.613779067993164 + ], + [ + "▁Kenyatta", + -14.613791465759276 + ], + [ + "▁Workbench", + -14.613795280456545 + ], + [ + "▁decryption", + -14.613800048828123 + ], + [ + "▁Handheld", + -14.61383056640625 + ], + [ + "▁rhizome", + -14.613836288452148 + ], + [ + "▁Romanesque", + -14.613849639892578 + ], + [ + "▁Zoology", + -14.613871574401855 + ], + [ + "▁aimlessly", + -14.613876342773438 + ], + [ + "▁Warbler", + -14.613900184631348 + ], + [ + "▁Alipay", + -14.613901138305664 + ], + [ + "▁Seung", + -14.613916397094728 + ], + [ + "▁petitioned", + -14.613931655883787 + ], + [ + "Proceeds", + -14.613943099975586 + ], + [ + "▁pullout", + -14.61396598815918 + ], + [ + "turk", + -14.61398983001709 + ], + [ + "▁scaly", + -14.613999366760254 + ], + [ + "▁Wildfire", + -14.614022254943848 + ], + [ + "lighter", + -14.61402702331543 + ], + [ + "▁jordans", + -14.61402702331543 + ], + [ + "▁bobby", + -14.614039421081545 + ], + [ + "corrected", + -14.614053726196287 + ], + [ + "Lago", + -14.614054679870604 + ], + [ + "▁trampled", + -14.61406421661377 + ], + [ + "▁Partridge", + -14.614065170288086 + ], + [ + "▁GameStop", + -14.614073753356934 + ], + [ + "▁salesmen", + -14.614089012145996 + ], + [ + "▁garter", + -14.614090919494627 + ], + [ + "▁MILES", + -14.614120483398438 + ], + [ + "▁Fb", + -14.614134788513184 + ], + [ + "▁improvisational", + -14.614144325256348 + ], + [ + "▁sisterhood", + -14.614145278930664 + ], + [ + "▁regretting", + -14.614216804504396 + ], + [ + "▁facie", + -14.614264488220217 + ], + [ + "▁Declan", + -14.614288330078123 + ], + [ + "RUP", + -14.614299774169922 + ], + [ + "13%", + -14.614334106445312 + ], + [ + "▁thrashing", + -14.614344596862791 + ], + [ + "▁Torture", + -14.61435317993164 + ], + [ + "▁VND", + -14.614383697509766 + ], + [ + "avy", + -14.614401817321776 + ], + [ + "▁Marysville", + -14.614447593688965 + ], + [ + "flexible", + -14.614450454711914 + ], + [ + "▁Bader", + -14.614497184753418 + ], + [ + "3.1%", + -14.614502906799316 + ], + [ + "▁Halton", + -14.614509582519531 + ], + [ + "▁Kagan", + -14.614540100097656 + ], + [ + "▁Xenon", + -14.614553451538086 + ], + [ + "▁Descent", + -14.614554405212402 + ], + [ + "▁Riser", + -14.614572525024414 + ], + [ + "▁Troms", + -14.614606857299805 + ], + [ + "eena", + -14.614654541015623 + ], + [ + "▁Winfield", + -14.614673614501951 + ], + [ + "gba", + -14.614675521850586 + ], + [ + "Strip", + -14.614709854125977 + ], + [ + "terror", + -14.614726066589355 + ], + [ + "▁Dever", + -14.614738464355469 + ], + [ + "▁dampened", + -14.614747047424316 + ], + [ + "▁scratchy", + -14.614813804626465 + ], + [ + "Memo", + -14.614832878112791 + ], + [ + "▁tampon", + -14.61484146118164 + ], + [ + "▁shyness", + -14.614867210388184 + ], + [ + "▁subtracted", + -14.614885330200195 + ], + [ + "COIN", + -14.614904403686523 + ], + [ + "▁puja", + -14.614924430847168 + ], + [ + "Peru", + -14.614925384521484 + ], + [ + "▁Merely", + -14.614948272705078 + ], + [ + "▁Brigid", + -14.61496639251709 + ], + [ + "▁personalizing", + -14.615020751953123 + ], + [ + "rending", + -14.615071296691896 + ], + [ + "baz", + -14.615097045898438 + ], + [ + "▁inclusiveness", + -14.615215301513672 + ], + [ + "▁Shimon", + -14.615301132202148 + ], + [ + "buro", + -14.615316390991213 + ], + [ + "▁Kura", + -14.615323066711426 + ], + [ + "▁credentialed", + -14.615349769592283 + ], + [ + "▁elicited", + -14.61539363861084 + ], + [ + "▁Coru", + -14.615396499633787 + ], + [ + "Packing", + -14.615474700927734 + ], + [ + "▁SANTA", + -14.615555763244627 + ], + [ + "▁Masterclass", + -14.615588188171388 + ], + [ + "▁rulemaking", + -14.615589141845703 + ], + [ + "-11-0", + -14.615612030029297 + ], + [ + "▁Messe", + -14.61566925048828 + ], + [ + "▁Alva", + -14.615680694580078 + ], + [ + "▁Austro", + -14.615694046020508 + ], + [ + "▁Khad", + -14.615752220153809 + ], + [ + "▁META", + -14.615777969360352 + ], + [ + "▁radiologists", + -14.615805625915527 + ], + [ + "ranch", + -14.615806579589844 + ], + [ + "screening", + -14.615836143493652 + ], + [ + "FAB", + -14.61589813232422 + ], + [ + "▁dram", + -14.6159029006958 + ], + [ + "▁Ulta", + -14.615985870361328 + ], + [ + "▁proudest", + -14.61598777770996 + ], + [ + "▁precepts", + -14.616050720214844 + ], + [ + "▁02:4", + -14.61605453491211 + ], + [ + "▁Bhat", + -14.616064071655272 + ], + [ + "▁futurist", + -14.616080284118652 + ], + [ + "▁Galvan", + -14.616150856018066 + ], + [ + "▁auntie", + -14.616354942321776 + ], + [ + "▁Striker", + -14.616414070129396 + ], + [ + "▁Indio", + -14.616510391235352 + ], + [ + "▁Lawsuit", + -14.616547584533691 + ], + [ + "▁gunk", + -14.616575241088867 + ], + [ + "▁Bagan", + -14.616576194763184 + ], + [ + "▁Relaxing", + -14.616597175598145 + ], + [ + "usu", + -14.616758346557615 + ], + [ + "UCA", + -14.616817474365234 + ], + [ + "▁2560", + -14.61681842803955 + ], + [ + "96)", + -14.616994857788086 + ], + [ + "▁Eller", + -14.617035865783691 + ], + [ + "▁confessing", + -14.617035865783691 + ], + [ + "cora", + -14.617047309875488 + ], + [ + "▁Samir", + -14.617067337036133 + ], + [ + "lugged", + -14.617111206054688 + ], + [ + "▁Creo", + -14.61712646484375 + ], + [ + "ADR", + -14.617132186889648 + ], + [ + "▁Tartan", + -14.61717128753662 + ], + [ + "▁Quadra", + -14.61717700958252 + ], + [ + "RAFT", + -14.61721897125244 + ], + [ + "Fuji", + -14.617222785949709 + ], + [ + "▁pregame", + -14.617222785949709 + ], + [ + "Armor", + -14.61723804473877 + ], + [ + "Conducted", + -14.61728858947754 + ], + [ + "Southwest", + -14.617341995239258 + ], + [ + "▁MBR", + -14.617355346679688 + ], + [ + "▁Beyer", + -14.617386817932127 + ], + [ + "▁cannabinoid", + -14.617388725280762 + ], + [ + "vigo", + -14.617451667785645 + ], + [ + "piratory", + -14.61746311187744 + ], + [ + "clark", + -14.61746883392334 + ], + [ + "▁muss", + -14.617507934570312 + ], + [ + "ALCO", + -14.617511749267578 + ], + [ + "lema", + -14.617615699768066 + ], + [ + "▁Michi", + -14.617621421813965 + ], + [ + "potato", + -14.61762237548828 + ], + [ + "phine", + -14.617630004882812 + ], + [ + "▁Torn", + -14.617720603942873 + ], + [ + "shka", + -14.617880821228027 + ], + [ + "▁hollowed", + -14.617883682250977 + ], + [ + "▁TIF", + -14.61794376373291 + ], + [ + "▁buckled", + -14.618006706237791 + ], + [ + "▁Coen", + -14.61802864074707 + ], + [ + "ORT", + -14.618165969848633 + ], + [ + "jector", + -14.618229866027832 + ], + [ + "▁EPO", + -14.618257522583008 + ], + [ + "▁(1920", + -14.61830711364746 + ], + [ + "▁Occur", + -14.618319511413574 + ], + [ + "▁Sabo", + -14.618325233459473 + ], + [ + "Cork", + -14.618365287780762 + ], + [ + "▁Worthy", + -14.618377685546877 + ], + [ + "▁FUT", + -14.61838436126709 + ], + [ + "▁fascinates", + -14.618389129638672 + ], + [ + "5:20", + -14.618492126464844 + ], + [ + "uzzle", + -14.618504524230955 + ], + [ + "Gard", + -14.618562698364258 + ], + [ + "▁Divya", + -14.618570327758787 + ], + [ + "pressor", + -14.618571281433104 + ], + [ + "▁Franck", + -14.618700981140137 + ], + [ + "▁Gare", + -14.618870735168455 + ], + [ + "Cali", + -14.618934631347656 + ], + [ + "▁Klar", + -14.618986129760742 + ], + [ + "BEGIN", + -14.61903953552246 + ], + [ + "▁pocketed", + -14.619049072265623 + ], + [ + "▁playset", + -14.619100570678713 + ], + [ + "schau", + -14.619193077087402 + ], + [ + "Assembly", + -14.619230270385742 + ], + [ + "2-16", + -14.619270324707031 + ], + [ + "▁miR", + -14.619279861450195 + ], + [ + "▁Damaged", + -14.619312286376951 + ], + [ + "▁droplet", + -14.619440078735352 + ], + [ + "▁Idee", + -14.61958122253418 + ], + [ + "▁Naya", + -14.619589805603027 + ], + [ + "scription", + -14.619626998901367 + ], + [ + "bran", + -14.619741439819336 + ], + [ + "▁Funded", + -14.619749069213867 + ], + [ + "dui", + -14.619927406311035 + ], + [ + "▁cyberattacks", + -14.619929313659668 + ], + [ + "Achieving", + -14.619976997375488 + ], + [ + "bacterium", + -14.619980812072754 + ], + [ + "custodial", + -14.619986534118652 + ], + [ + "▁Knew", + -14.619994163513184 + ], + [ + "Appropriate", + -14.619997024536133 + ], + [ + "▁SCT", + -14.62000846862793 + ], + [ + "subscription", + -14.620020866394045 + ], + [ + "Abdul", + -14.620024681091309 + ], + [ + "Wikimedia", + -14.620024681091309 + ], + [ + "Legacy", + -14.620025634765623 + ], + [ + "victim", + -14.620030403137209 + ], + [ + "Jacques", + -14.620035171508787 + ], + [ + "Croatia", + -14.620038032531738 + ], + [ + "Healing", + -14.620038986206056 + ], + [ + "Travis", + -14.620041847229004 + ], + [ + "Bloomberg", + -14.62004566192627 + ], + [ + "Leicester", + -14.620073318481444 + ], + [ + "▁Computation", + -14.620078086853027 + ], + [ + "▁Pata", + -14.620162010192873 + ], + [ + "straw", + -14.62016487121582 + ], + [ + "Reflection", + -14.62027072906494 + ], + [ + "Absolute", + -14.620331764221191 + ], + [ + "constrained", + -14.620347023010254 + ], + [ + "▁Hendrick", + -14.620361328125 + ], + [ + "1:14", + -14.620408058166504 + ], + [ + "Bubble", + -14.620433807373049 + ], + [ + "▁ALJ", + -14.620673179626465 + ], + [ + "considered", + -14.620710372924805 + ], + [ + "humid", + -14.620824813842772 + ], + [ + "PTSD", + -14.62086582183838 + ], + [ + "▁LEG", + -14.620903968811035 + ], + [ + "ebook", + -14.62102222442627 + ], + [ + "Podcast", + -14.62107753753662 + ], + [ + "▁conservator", + -14.621082305908203 + ], + [ + "DID", + -14.621123313903809 + ], + [ + "SPIRE", + -14.62124252319336 + ], + [ + "▁PLAYER", + -14.621271133422852 + ], + [ + "alley", + -14.621292114257812 + ], + [ + "Recorded", + -14.621317863464355 + ], + [ + "entia", + -14.621317863464355 + ], + [ + "Saturated", + -14.621338844299316 + ], + [ + "▁Defiance", + -14.621341705322266 + ], + [ + "▁Europcar", + -14.621341705322266 + ], + [ + "▁Ferreira", + -14.621341705322266 + ], + [ + "▁Ginseng", + -14.621341705322266 + ], + [ + "▁Probability", + -14.621341705322266 + ], + [ + "▁Sanitary", + -14.621341705322266 + ], + [ + "▁desalination", + -14.621341705322266 + ], + [ + "▁inconsequential", + -14.621341705322266 + ], + [ + "▁umpteen", + -14.621341705322266 + ], + [ + "▁unmistakably", + -14.621341705322266 + ], + [ + "▁Bleeding", + -14.621342658996582 + ], + [ + "▁Innocence", + -14.621342658996582 + ], + [ + "▁Neapolitan", + -14.621342658996582 + ], + [ + "▁Radiance", + -14.621342658996582 + ], + [ + "▁YOURSELF", + -14.621342658996582 + ], + [ + "▁altruism", + -14.621342658996582 + ], + [ + "▁arthritic", + -14.621342658996582 + ], + [ + "▁permanence", + -14.621342658996582 + ], + [ + "▁impersonate", + -14.621343612670898 + ], + [ + "dactyl", + -14.621345520019531 + ], + [ + "▁Cerrito", + -14.621346473693848 + ], + [ + "▁Jaitley", + -14.621347427368164 + ], + [ + "▁Innovators", + -14.62134838104248 + ], + [ + "▁Talmud", + -14.62134838104248 + ], + [ + "Allergies", + -14.621350288391112 + ], + [ + "Supposedly", + -14.621352195739746 + ], + [ + "▁brocade", + -14.621353149414062 + ], + [ + "▁metaphysics", + -14.62135410308838 + ], + [ + "▁Moncton", + -14.621355056762695 + ], + [ + "▁Thayer", + -14.621359825134276 + ], + [ + "▁Monaghan", + -14.621362686157228 + ], + [ + "▁CSIRO", + -14.621368408203123 + ], + [ + "▁frigate", + -14.621373176574709 + ], + [ + "▁Ballarat", + -14.621381759643556 + ], + [ + "▁deafening", + -14.621381759643556 + ], + [ + "▁biomechanical", + -14.621382713317873 + ], + [ + "▁Matisse", + -14.62138557434082 + ], + [ + "▁Lockhart", + -14.62139129638672 + ], + [ + "▁Devonshire", + -14.621393203735352 + ], + [ + "6.3%", + -14.6214017868042 + ], + [ + "Hugh", + -14.62140941619873 + ], + [ + "▁styl", + -14.621421813964844 + ], + [ + "▁Tyneside", + -14.621424674987791 + ], + [ + "▁harmonise", + -14.621426582336426 + ], + [ + "▁Piotr", + -14.621435165405272 + ], + [ + "▁redemptive", + -14.62143611907959 + ], + [ + "▁Gabbana", + -14.621441841125488 + ], + [ + "▁Veeam", + -14.621441841125488 + ], + [ + "▁tactful", + -14.621452331542969 + ], + [ + "▁dermis", + -14.62145709991455 + ], + [ + "▁$88", + -14.62147331237793 + ], + [ + "▁Channing", + -14.62147331237793 + ], + [ + "▁Jewellers", + -14.62147617340088 + ], + [ + "▁yogic", + -14.62148094177246 + ], + [ + "▁strappy", + -14.621517181396484 + ], + [ + "▁Deepika", + -14.621543884277344 + ], + [ + "▁clung", + -14.621551513671877 + ], + [ + "▁‘‘", + -14.62155532836914 + ], + [ + "▁dyna", + -14.621601104736328 + ], + [ + "▁Salaam", + -14.62160301208496 + ], + [ + "TEE", + -14.621630668640137 + ], + [ + "▁oboe", + -14.62170124053955 + ], + [ + "▁Butterfield", + -14.621709823608398 + ], + [ + "riga", + -14.62171745300293 + ], + [ + "▁microfinance", + -14.6217679977417 + ], + [ + "1946", + -14.62177276611328 + ], + [ + "▁WINTER", + -14.621779441833496 + ], + [ + "▁Gea", + -14.621795654296877 + ], + [ + "▁disallowed", + -14.621828079223633 + ], + [ + "▁Hamish", + -14.621849060058594 + ], + [ + "▁Attributes", + -14.62186336517334 + ], + [ + "▁Ayer", + -14.621910095214844 + ], + [ + "▁Foldable", + -14.62191677093506 + ], + [ + "▁endgame", + -14.621925354003906 + ], + [ + "oooooo", + -14.621929168701172 + ], + [ + "▁Blythe", + -14.621932983398438 + ], + [ + "▁blissfully", + -14.622003555297852 + ], + [ + "▁deconstruction", + -14.62201690673828 + ], + [ + "▁Johar", + -14.622017860412598 + ], + [ + "▁xenon", + -14.622050285339355 + ], + [ + "▁Biscuits", + -14.622060775756836 + ], + [ + "▁Overstock", + -14.622066497802734 + ], + [ + "▁Edgewood", + -14.62209415435791 + ], + [ + "▁ratify", + -14.622097969055176 + ], + [ + "▁MoMA", + -14.622116088867188 + ], + [ + "mtr", + -14.622129440307615 + ], + [ + "▁Renner", + -14.62213897705078 + ], + [ + "▁Gameplay", + -14.622145652770996 + ], + [ + "▁subsist", + -14.622170448303224 + ], + [ + "▁Basu", + -14.622182846069336 + ], + [ + "▁australian", + -14.622241973876951 + ], + [ + "DMC", + -14.62228298187256 + ], + [ + "lamo", + -14.622316360473633 + ], + [ + "▁Elected", + -14.622382164001465 + ], + [ + "▁Opa", + -14.622447967529297 + ], + [ + "confirmed", + -14.622448921203612 + ], + [ + "▁Arjuna", + -14.622458457946776 + ], + [ + "▁undertone", + -14.62246799468994 + ], + [ + "Moo", + -14.622475624084473 + ], + [ + "▁coffeemaker", + -14.622515678405762 + ], + [ + "▁rubberized", + -14.622612953186035 + ], + [ + "▁Calf", + -14.622634887695312 + ], + [ + "▁gravestone", + -14.622647285461426 + ], + [ + "▁Jenni", + -14.622652053833008 + ], + [ + "▁750,000", + -14.622735023498535 + ], + [ + "lume", + -14.622756958007812 + ], + [ + "▁Kotlin", + -14.622766494750977 + ], + [ + "▁perpetuated", + -14.622840881347656 + ], + [ + "▁rationing", + -14.62285614013672 + ], + [ + "##", + -14.622879981994627 + ], + [ + "–16", + -14.622884750366213 + ], + [ + "▁Urn", + -14.622920989990234 + ], + [ + "▁acoustical", + -14.622928619384766 + ], + [ + "▁cava", + -14.622941970825195 + ], + [ + "LEM", + -14.62295150756836 + ], + [ + "▁posturing", + -14.622962951660156 + ], + [ + "▁conceptualize", + -14.622980117797852 + ], + [ + "judgemental", + -14.622998237609863 + ], + [ + "▁ISIL", + -14.623016357421877 + ], + [ + "▁Salvi", + -14.62303352355957 + ], + [ + "▁triplets", + -14.623059272766112 + ], + [ + "▁CMU", + -14.62311553955078 + ], + [ + "ohl", + -14.62322998046875 + ], + [ + "SHO", + -14.623268127441406 + ], + [ + "ylated", + -14.623308181762695 + ], + [ + "▁alms", + -14.623319625854492 + ], + [ + "▁Laurier", + -14.623408317565918 + ], + [ + "7-13", + -14.623507499694824 + ], + [ + "▁Yasu", + -14.623516082763672 + ], + [ + "beast", + -14.623575210571287 + ], + [ + "loping", + -14.623587608337402 + ], + [ + "VOID", + -14.623613357543944 + ], + [ + "▁Ney", + -14.623666763305664 + ], + [ + "mpeg", + -14.623713493347168 + ], + [ + "▁02:5", + -14.623740196228027 + ], + [ + "dix", + -14.623779296875 + ], + [ + "rosso", + -14.623794555664062 + ], + [ + "▁regex", + -14.62379550933838 + ], + [ + "▁Firebird", + -14.623908996582031 + ], + [ + "Deer", + -14.623920440673828 + ], + [ + "▁Minot", + -14.62393569946289 + ], + [ + "▁piglet", + -14.623971939086914 + ], + [ + "elton", + -14.624005317687988 + ], + [ + "EPR", + -14.624045372009276 + ], + [ + "Theoretically", + -14.624201774597168 + ], + [ + "▁Perrin", + -14.624235153198242 + ], + [ + "calm", + -14.624268531799316 + ], + [ + "▁Annan", + -14.624295234680176 + ], + [ + "▁KAL", + -14.624297142028809 + ], + [ + "▁chalked", + -14.624377250671388 + ], + [ + "CROSS", + -14.624388694763184 + ], + [ + "▁Kj", + -14.62445068359375 + ], + [ + "▁Southgate", + -14.624499320983888 + ], + [ + "emco", + -14.62460231781006 + ], + [ + "matte", + -14.62464427947998 + ], + [ + "Aunt", + -14.624671936035156 + ], + [ + "▁sprigs", + -14.624792098999023 + ], + [ + "▁Sade", + -14.624899864196776 + ], + [ + "▁Vier", + -14.624903678894045 + ], + [ + "▁Shure", + -14.624906539916992 + ], + [ + "marin", + -14.62502098083496 + ], + [ + "DRP", + -14.625125885009766 + ], + [ + "Imprint", + -14.625126838684082 + ], + [ + "tainer", + -14.625150680541992 + ], + [ + "▁Servi", + -14.625150680541992 + ], + [ + "▁sachet", + -14.625158309936523 + ], + [ + "▁pivoting", + -14.625259399414062 + ], + [ + "▁annihilate", + -14.625288963317873 + ], + [ + "▁DMD", + -14.625296592712402 + ], + [ + "TWO", + -14.625371932983398 + ], + [ + "▁Khao", + -14.625380516052246 + ], + [ + "Motorists", + -14.625412940979004 + ], + [ + "morpho", + -14.625466346740724 + ], + [ + "▁Hermit", + -14.625502586364746 + ], + [ + "▁birdhouse", + -14.62553596496582 + ], + [ + "▁Eze", + -14.625545501708984 + ], + [ + "▁pirated", + -14.625557899475098 + ], + [ + "▁grouse", + -14.62562656402588 + ], + [ + "▁Radha", + -14.62564182281494 + ], + [ + "Roller", + -14.625653266906738 + ], + [ + "OUL", + -14.625690460205078 + ], + [ + "techno", + -14.625740051269531 + ], + [ + "▁bui", + -14.625777244567873 + ], + [ + "▁(2008", + -14.625788688659668 + ], + [ + "▁Grate", + -14.625835418701172 + ], + [ + "Prefer", + -14.625897407531738 + ], + [ + "lowes", + -14.625903129577637 + ], + [ + "Shak", + -14.625946998596191 + ], + [ + "aneous", + -14.626032829284668 + ], + [ + "▁Seva", + -14.626152992248535 + ], + [ + "adhya", + -14.626228332519531 + ], + [ + "▁basses", + -14.626240730285645 + ], + [ + "▁consort", + -14.62626838684082 + ], + [ + "Footnote", + -14.626291275024414 + ], + [ + "▁PLN", + -14.626466751098633 + ], + [ + "▁refilled", + -14.62647533416748 + ], + [ + "▁Changi", + -14.62648105621338 + ], + [ + "▁1.10", + -14.626673698425291 + ], + [ + "pka", + -14.626678466796877 + ], + [ + "reservation", + -14.6267671585083 + ], + [ + "▁Dozen", + -14.62677764892578 + ], + [ + "▁NEAR", + -14.626842498779297 + ], + [ + "heh", + -14.626911163330078 + ], + [ + "matologist", + -14.626928329467772 + ], + [ + "▁IOL", + -14.62700653076172 + ], + [ + "▁Sandman", + -14.627058029174805 + ], + [ + "cological", + -14.627070426940918 + ], + [ + "▁Okla", + -14.627108573913574 + ], + [ + "▁airlift", + -14.627179145812988 + ], + [ + "Coral", + -14.6272554397583 + ], + [ + "Estimate", + -14.6272554397583 + ], + [ + "▁Lute", + -14.627257347106934 + ], + [ + "▁Murr", + -14.627284049987791 + ], + [ + "gana", + -14.62729549407959 + ], + [ + "CNT", + -14.627426147460938 + ], + [ + "▁discolor", + -14.627426147460938 + ], + [ + "▁Samo", + -14.62742805480957 + ], + [ + "OOOO", + -14.627458572387695 + ], + [ + "Vue", + -14.62754249572754 + ], + [ + "extreme", + -14.627671241760254 + ], + [ + "ASTM", + -14.627687454223633 + ], + [ + "▁Chard", + -14.62777614593506 + ], + [ + "▁Patek", + -14.627822875976562 + ], + [ + "liya", + -14.627846717834473 + ], + [ + "069", + -14.627861976623535 + ], + [ + "Slightly", + -14.627870559692385 + ], + [ + "Alternate", + -14.627872467041016 + ], + [ + "Preliminary", + -14.627873420715332 + ], + [ + "Complimentary", + -14.627888679504396 + ], + [ + "Revelation", + -14.627888679504396 + ], + [ + "candidate", + -14.627890586853027 + ], + [ + "Audience", + -14.62789249420166 + ], + [ + "Diabetes", + -14.627896308898926 + ], + [ + "▁Gant", + -14.62790584564209 + ], + [ + "activating", + -14.627907752990724 + ], + [ + "graduation", + -14.627907752990724 + ], + [ + "Summit", + -14.627910614013672 + ], + [ + "Telephone", + -14.627923011779783 + ], + [ + "Stanley", + -14.627969741821287 + ], + [ + "▁50-50", + -14.627970695495604 + ], + [ + "Roughly", + -14.62804889678955 + ], + [ + "Kindle", + -14.628064155578612 + ], + [ + "appeal", + -14.62807273864746 + ], + [ + "▁byproducts", + -14.628084182739258 + ], + [ + "LSE", + -14.628125190734863 + ], + [ + "Worldwide", + -14.628204345703123 + ], + [ + "Coverage", + -14.62820529937744 + ], + [ + "Dublin", + -14.628228187561035 + ], + [ + "▁brussels", + -14.628228187561035 + ], + [ + "ulata", + -14.628240585327148 + ], + [ + "▁motionless", + -14.628318786621094 + ], + [ + "podcast", + -14.62831974029541 + ], + [ + "mputee", + -14.628331184387209 + ], + [ + "inspiration", + -14.6283540725708 + ], + [ + "rling", + -14.6283597946167 + ], + [ + "glance", + -14.628400802612305 + ], + [ + "▁disengage", + -14.62840461730957 + ], + [ + "CLASS", + -14.628425598144531 + ], + [ + "▁excavate", + -14.628433227539062 + ], + [ + "▁peso", + -14.628549575805664 + ], + [ + "▁overpowered", + -14.628554344177246 + ], + [ + "▁2019-04-1", + -14.628581047058104 + ], + [ + "ctrl", + -14.628647804260254 + ], + [ + "ecology", + -14.628726959228516 + ], + [ + "▁simul", + -14.62875270843506 + ], + [ + "▁Returned", + -14.628775596618652 + ], + [ + "Tales", + -14.62879467010498 + ], + [ + "averse", + -14.628891944885254 + ], + [ + "Allison", + -14.62892246246338 + ], + [ + "’", + -14.629034042358398 + ], + [ + "▁Filtration", + -14.629034042358398 + ], + [ + "▁MICHAEL", + -14.629034042358398 + ], + [ + "▁ambivalent", + -14.629034042358398 + ], + [ + "▁Amphitheatre", + -14.629034996032717 + ], + [ + "▁Flamenco", + -14.629034996032717 + ], + [ + "▁McCullough", + -14.629034996032717 + ], + [ + "▁Muskegon", + -14.629034996032717 + ], + [ + "▁POLICE", + -14.629034996032717 + ], + [ + "▁aphasia", + -14.629034996032717 + ], + [ + "▁aristocracy", + -14.629034996032717 + ], + [ + "▁emancipation", + -14.629034996032717 + ], + [ + "▁hypothermia", + -14.629034996032717 + ], + [ + "▁rectifier", + -14.629034996032717 + ], + [ + "▁saviour", + -14.629034996032717 + ], + [ + "▁unmanageable", + -14.629034996032717 + ], + [ + "▁Econometric", + -14.629035949707031 + ], + [ + "▁Faridabad", + -14.629035949707031 + ], + [ + "▁crinkle", + -14.629035949707031 + ], + [ + "▁nonchalant", + -14.629035949707031 + ], + [ + "▁tramadol", + -14.629036903381348 + ], + [ + "▁Merrimack", + -14.629037857055664 + ], + [ + "▁Parachute", + -14.629037857055664 + ], + [ + "▁Weymouth", + -14.629037857055664 + ], + [ + "▁mojito", + -14.62903881072998 + ], + [ + "▁HOWEVER", + -14.62904167175293 + ], + [ + "▁chastise", + -14.629043579101562 + ], + [ + "▁Corinne", + -14.629046440124512 + ], + [ + "▁Aldridge", + -14.62904930114746 + ], + [ + "▁Muncie", + -14.629051208496094 + ], + [ + "▁Transgender", + -14.62905502319336 + ], + [ + "▁inculcate", + -14.62905979156494 + ], + [ + "▁Plywood", + -14.62907886505127 + ], + [ + "▁clamber", + -14.629095077514648 + ], + [ + "▁thinnest", + -14.629107475280762 + ], + [ + "▁Biomass", + -14.62911319732666 + ], + [ + "▁Bachmann", + -14.629117012023926 + ], + [ + "▁armature", + -14.629117012023926 + ], + [ + "▁$39.99", + -14.629131317138672 + ], + [ + "▁northerly", + -14.629137992858888 + ], + [ + "Cruz", + -14.629138946533203 + ], + [ + "▁unaffiliated", + -14.629138946533203 + ], + [ + "▁tugging", + -14.629141807556152 + ], + [ + "LAST", + -14.629144668579102 + ], + [ + "▁sensuality", + -14.62914752960205 + ], + [ + "▁Hellboy", + -14.629154205322266 + ], + [ + "▁MetLife", + -14.629154205322266 + ], + [ + "▁sepia", + -14.629158020019531 + ], + [ + "▁Sumter", + -14.62916088104248 + ], + [ + "▁Deliveries", + -14.629162788391112 + ], + [ + "▁Vertex", + -14.629172325134276 + ], + [ + "▁Tennant", + -14.629180908203123 + ], + [ + "1:29", + -14.629217147827148 + ], + [ + "minister", + -14.62922191619873 + ], + [ + "▁underutilized", + -14.629255294799805 + ], + [ + "▁commonwealth", + -14.629271507263184 + ], + [ + "lingua", + -14.629281044006348 + ], + [ + "▁realignment", + -14.629281044006348 + ], + [ + "▁biodynamic", + -14.629300117492676 + ], + [ + "▁Australasian", + -14.629304885864258 + ], + [ + "▁Berries", + -14.629317283630373 + ], + [ + "Macro", + -14.62932586669922 + ], + [ + "GIL", + -14.629332542419434 + ], + [ + "▁Surety", + -14.62935733795166 + ], + [ + "▁substring", + -14.62935733795166 + ], + [ + "▁racehorse", + -14.629375457763672 + ], + [ + "Uncertainty", + -14.62938117980957 + ], + [ + "Useful", + -14.629406929016112 + ], + [ + "▁truthfulness", + -14.62940788269043 + ], + [ + "▁Autographed", + -14.629462242126465 + ], + [ + "▁Layton", + -14.62948226928711 + ], + [ + "▁Rhett", + -14.62951374053955 + ], + [ + "▁encroaching", + -14.629515647888184 + ], + [ + "▁Rubicon", + -14.629551887512209 + ], + [ + "▁frescoes", + -14.629575729370115 + ], + [ + "Glyph", + -14.629627227783203 + ], + [ + "▁Rockaway", + -14.629629135131836 + ], + [ + "▁Barnum", + -14.629653930664062 + ], + [ + "coaching", + -14.62966537475586 + ], + [ + "▁Ageing", + -14.629691123962402 + ], + [ + "▁PII", + -14.629717826843262 + ], + [ + "▁Gunter", + -14.629719734191896 + ], + [ + "▁wailing", + -14.629725456237791 + ], + [ + "Nee", + -14.62973976135254 + ], + [ + "▁telltale", + -14.629756927490234 + ], + [ + "▁corroded", + -14.629766464233398 + ], + [ + "isten", + -14.629801750183104 + ], + [ + "▁Lawrenceville", + -14.629831314086914 + ], + [ + "▁interrogated", + -14.62985134124756 + ], + [ + "▁Kandy", + -14.629864692687988 + ], + [ + "▁cymbals", + -14.629876136779783 + ], + [ + "▁pangs", + -14.629891395568848 + ], + [ + "▁plucking", + -14.630010604858398 + ], + [ + "▁Tether", + -14.630054473876951 + ], + [ + "Collab", + -14.630066871643066 + ], + [ + "finalist", + -14.630075454711914 + ], + [ + "▁summarised", + -14.630130767822266 + ], + [ + "▁screed", + -14.630162239074709 + ], + [ + "▁Manish", + -14.630189895629885 + ], + [ + "▁LPGA", + -14.630208969116213 + ], + [ + "▁Exceptions", + -14.630219459533691 + ], + [ + "▁Fortran", + -14.630241394042969 + ], + [ + "▁basilica", + -14.630253791809082 + ], + [ + "▁Krug", + -14.63026237487793 + ], + [ + "Actively", + -14.630264282226562 + ], + [ + "▁15:5", + -14.630297660827637 + ], + [ + "voo", + -14.6303071975708 + ], + [ + "▁blanche", + -14.630366325378418 + ], + [ + "!!!!!!!!!!!!", + -14.630377769470217 + ], + [ + "▁PANEL", + -14.630434036254885 + ], + [ + "▁Emotion", + -14.630483627319336 + ], + [ + "▁Citysearch", + -14.630510330200195 + ], + [ + "▁Hoffmann", + -14.630555152893066 + ], + [ + "9/16", + -14.63058090209961 + ], + [ + "Bran", + -14.630585670471191 + ], + [ + "iwan", + -14.63077163696289 + ], + [ + "▁Goodell", + -14.630782127380373 + ], + [ + "▁flanking", + -14.63079071044922 + ], + [ + "▁boreal", + -14.630908012390137 + ], + [ + "▁Zest", + -14.63091278076172 + ], + [ + "▁Woking", + -14.630914688110352 + ], + [ + "▁WARS", + -14.630949020385742 + ], + [ + "▁shaming", + -14.630983352661133 + ], + [ + "▁operand", + -14.63099765777588 + ], + [ + "▁Bering", + -14.631006240844728 + ], + [ + "Came", + -14.631017684936523 + ], + [ + "▁Colman", + -14.63106918334961 + ], + [ + "MIM", + -14.631078720092772 + ], + [ + "TOW", + -14.631237983703612 + ], + [ + "merci", + -14.631437301635742 + ], + [ + "▁Mandal", + -14.631475448608398 + ], + [ + "▁Machin", + -14.631514549255373 + ], + [ + "▁Crested", + -14.631528854370115 + ], + [ + "pizza", + -14.631543159484863 + ], + [ + "▁Forecasting", + -14.631545066833496 + ], + [ + "▁AROUND", + -14.63156795501709 + ], + [ + "carbonate", + -14.631656646728516 + ], + [ + "▁dame", + -14.631674766540527 + ], + [ + "▁Koop", + -14.631704330444336 + ], + [ + "Cue", + -14.631787300109863 + ], + [ + "▁$2500", + -14.631814002990724 + ], + [ + "▁Ledge", + -14.631844520568848 + ], + [ + "▁overheated", + -14.631861686706545 + ], + [ + "Soil", + -14.63187026977539 + ], + [ + "Lily", + -14.631884574890137 + ], + [ + "agora", + -14.63194751739502 + ], + [ + "▁adornments", + -14.631986618041992 + ], + [ + "DET", + -14.632004737854004 + ], + [ + "Texture", + -14.63202667236328 + ], + [ + "▁Makh", + -14.632078170776367 + ], + [ + "ignore", + -14.632108688354492 + ], + [ + "▁fabricators", + -14.63213062286377 + ], + [ + "▁Fairly", + -14.632131576538086 + ], + [ + "▁tempering", + -14.632156372070312 + ], + [ + "GFP", + -14.632177352905272 + ], + [ + "earnings", + -14.632211685180664 + ], + [ + "▁Bridg", + -14.632250785827637 + ], + [ + "verte", + -14.63236141204834 + ], + [ + "▁WINE", + -14.632403373718262 + ], + [ + "▁XJ", + -14.632431983947754 + ], + [ + "institutional", + -14.632568359375 + ], + [ + "owy", + -14.632593154907228 + ], + [ + "▁darkish", + -14.63266658782959 + ], + [ + "▁Therma", + -14.632670402526855 + ], + [ + "▁Cotswold", + -14.632943153381348 + ], + [ + "▁Rupee", + -14.632956504821776 + ], + [ + "▁Pupil", + -14.632972717285156 + ], + [ + "▁Tanker", + -14.632979393005373 + ], + [ + "▁1796", + -14.633044242858888 + ], + [ + "▁BBM", + -14.633050918579102 + ], + [ + "▁fingertip", + -14.633064270019531 + ], + [ + "▁swiped", + -14.63333797454834 + ], + [ + "MANN", + -14.633340835571287 + ], + [ + "adder", + -14.63336944580078 + ], + [ + "missive", + -14.633455276489258 + ], + [ + "▁Shou", + -14.633460998535156 + ], + [ + "3:21", + -14.63354778289795 + ], + [ + "▁wasabi", + -14.633563995361328 + ], + [ + "boxed", + -14.63360595703125 + ], + [ + "ayed", + -14.633721351623535 + ], + [ + "5.4%", + -14.6337308883667 + ], + [ + "blon", + -14.63377285003662 + ], + [ + "attachment", + -14.633779525756836 + ], + [ + "olfo", + -14.63382053375244 + ], + [ + "ferred", + -14.633837699890137 + ], + [ + "omorph", + -14.633880615234377 + ], + [ + "▁meatball", + -14.6339111328125 + ], + [ + "invisible", + -14.634015083312988 + ], + [ + "▁Civ", + -14.634029388427734 + ], + [ + "▁1814", + -14.634038925170898 + ], + [ + "Compile", + -14.63405990600586 + ], + [ + "Mongo", + -14.634108543395996 + ], + [ + "▁MDT", + -14.63418197631836 + ], + [ + "Wrapper", + -14.63420581817627 + ], + [ + "▁Schmid", + -14.634235382080078 + ], + [ + "▁Mey", + -14.63428783416748 + ], + [ + "MOTION", + -14.634289741516112 + ], + [ + "▁(77", + -14.63441276550293 + ], + [ + "▁Waka", + -14.634428024291992 + ], + [ + "Salad", + -14.634456634521484 + ], + [ + "▁harboring", + -14.634495735168455 + ], + [ + "hugger", + -14.634516716003418 + ], + [ + "Experiment", + -14.634517669677734 + ], + [ + "▁(63", + -14.634538650512695 + ], + [ + "glam", + -14.634560585021973 + ], + [ + "▁Tj", + -14.634581565856934 + ], + [ + "▁waft", + -14.634624481201172 + ], + [ + "Nah", + -14.634657859802246 + ], + [ + "▁Nestor", + -14.63466739654541 + ], + [ + "▁contextualize", + -14.634842872619627 + ], + [ + "▁08:1", + -14.634868621826172 + ], + [ + "Tite", + -14.634904861450195 + ], + [ + "▁WMD", + -14.634925842285156 + ], + [ + "SPF", + -14.634936332702637 + ], + [ + "▁sulk", + -14.63496208190918 + ], + [ + "▁Taran", + -14.63497257232666 + ], + [ + "decorated", + -14.634997367858888 + ], + [ + "hena", + -14.635071754455566 + ], + [ + "▁Lighted", + -14.635157585144045 + ], + [ + "▁JoAnn", + -14.63521671295166 + ], + [ + "separate", + -14.635226249694824 + ], + [ + "▁FUEL", + -14.635234832763672 + ], + [ + "iPod", + -14.63524055480957 + ], + [ + "▁10.9", + -14.635272026062012 + ], + [ + "▁Poz", + -14.635320663452148 + ], + [ + "▁Leela", + -14.635336875915527 + ], + [ + "▁Ayu", + -14.635395050048828 + ], + [ + "▁Cinna", + -14.635451316833496 + ], + [ + "▁Starz", + -14.63559913635254 + ], + [ + "▁Spir", + -14.635703086853027 + ], + [ + ".02.201", + -14.635765075683594 + ], + [ + "Leverage", + -14.635787963867188 + ], + [ + "Approx", + -14.635825157165527 + ], + [ + "Collaboration", + -14.63582706451416 + ], + [ + "Trevor", + -14.635828018188477 + ], + [ + "Mercedes", + -14.635835647583008 + ], + [ + "Samantha", + -14.635836601257324 + ], + [ + "boutique", + -14.635838508605955 + ], + [ + "Czech", + -14.63584041595459 + ], + [ + "automated", + -14.635842323303224 + ], + [ + "faculty", + -14.635842323303224 + ], + [ + "receptor", + -14.63585090637207 + ], + [ + "yyy", + -14.63589572906494 + ], + [ + "stationary", + -14.635923385620115 + ], + [ + "▁HRH", + -14.635951042175291 + ], + [ + "preferred", + -14.635972023010254 + ], + [ + "▁Yell", + -14.635981559753418 + ], + [ + "Nearby", + -14.635991096496582 + ], + [ + "▁optimizer", + -14.635998725891112 + ], + [ + "Caption", + -14.636009216308594 + ], + [ + "Glue", + -14.636034965515137 + ], + [ + "atrice", + -14.636035919189451 + ], + [ + "Pharmacy", + -14.63605499267578 + ], + [ + "▁‘90", + -14.636078834533691 + ], + [ + "groove", + -14.636090278625488 + ], + [ + "Headquarter", + -14.636137008666992 + ], + [ + "7/16", + -14.636148452758787 + ], + [ + "▁elongate", + -14.636187553405762 + ], + [ + "Tutor", + -14.63626766204834 + ], + [ + "▁encircle", + -14.63629913330078 + ], + [ + "LEP", + -14.636357307434082 + ], + [ + "LIKE", + -14.636361122131348 + ], + [ + "▁embers", + -14.636380195617676 + ], + [ + "AGA", + -14.636382102966309 + ], + [ + "▁Hetero", + -14.636393547058104 + ], + [ + "nuptial", + -14.63640308380127 + ], + [ + "regarding", + -14.636404991149902 + ], + [ + "ully", + -14.6364107131958 + ], + [ + "GTC", + -14.63641357421875 + ], + [ + "Spoke", + -14.63642120361328 + ], + [ + "controller", + -14.636422157287598 + ], + [ + "6:50", + -14.636430740356444 + ], + [ + "Raja", + -14.636444091796877 + ], + [ + "yssa", + -14.636445999145508 + ], + [ + "▁Beng", + -14.636456489562988 + ], + [ + "MOL", + -14.636467933654783 + ], + [ + "▁Aja", + -14.636487007141112 + ], + [ + "▁Distiller", + -14.63652801513672 + ], + [ + "▁Draco", + -14.636571884155272 + ], + [ + "▁monolith", + -14.636622428894045 + ], + [ + "▁misjudg", + -14.636627197265623 + ], + [ + "▁retroactive", + -14.636630058288574 + ], + [ + "Luis", + -14.636711120605469 + ], + [ + "6.4%", + -14.636716842651367 + ], + [ + "▁Nisha", + -14.636774063110352 + ], + [ + "▁AUSTRALIA", + -14.636786460876465 + ], + [ + "▁Drayton", + -14.636786460876465 + ], + [ + "▁McManus", + -14.636786460876465 + ], + [ + "▁Tupperware", + -14.636786460876465 + ], + [ + "▁Twickenham", + -14.636786460876465 + ], + [ + "▁aggrieved", + -14.636786460876465 + ], + [ + "▁cervix", + -14.636786460876465 + ], + [ + "▁crumpled", + -14.636786460876465 + ], + [ + "▁diocesan", + -14.636786460876465 + ], + [ + "▁manipulator", + -14.636786460876465 + ], + [ + "▁potentiometer", + -14.636786460876465 + ], + [ + "▁rectification", + -14.636786460876465 + ], + [ + "▁talisman", + -14.636786460876465 + ], + [ + "▁teriyaki", + -14.636786460876465 + ], + [ + "▁throbbing", + -14.636786460876465 + ], + [ + "▁wednesday", + -14.636786460876465 + ], + [ + "Jealous", + -14.63678741455078 + ], + [ + "▁Belichick", + -14.63678741455078 + ], + [ + "▁Complementary", + -14.63678741455078 + ], + [ + "▁Limpopo", + -14.63678741455078 + ], + [ + "▁Souvenir", + -14.63678741455078 + ], + [ + "▁beguiling", + -14.63678741455078 + ], + [ + "▁javelin", + -14.63678741455078 + ], + [ + "▁Brigitte", + -14.636788368225098 + ], + [ + "▁evasive", + -14.636788368225098 + ], + [ + "▁homophobic", + -14.636788368225098 + ], + [ + "tracranial", + -14.636789321899414 + ], + [ + "▁Wilkerson", + -14.63679027557373 + ], + [ + "▁handiwork", + -14.63679027557373 + ], + [ + "▁JAPAN", + -14.636791229248049 + ], + [ + "▁nouveau", + -14.636792182922363 + ], + [ + "▁Mulligan", + -14.636794090270996 + ], + [ + "▁Gallatin", + -14.636795997619627 + ], + [ + "▁Shoreditch", + -14.636795997619627 + ], + [ + "▁Cartwright", + -14.636797904968262 + ], + [ + "▁Neonatal", + -14.636801719665527 + ], + [ + "▁Hov", + -14.63680648803711 + ], + [ + "▁Dentures", + -14.636808395385742 + ], + [ + "▁dabbling", + -14.636811256408691 + ], + [ + "▁BRICS", + -14.63681411743164 + ], + [ + "▁trekkers", + -14.63681411743164 + ], + [ + "▁magnate", + -14.636817932128906 + ], + [ + "▁Monsoon", + -14.63681983947754 + ], + [ + "▁Geiger", + -14.636822700500488 + ], + [ + "▁Garnier", + -14.636823654174805 + ], + [ + "TfL", + -14.636832237243652 + ], + [ + "▁immaterial", + -14.636833190917969 + ], + [ + "▁Rajput", + -14.636834144592283 + ], + [ + "▁Clarinet", + -14.636852264404297 + ], + [ + "▁housewares", + -14.636855125427246 + ], + [ + "▁OASIS", + -14.636862754821776 + ], + [ + "▁catnip", + -14.636868476867676 + ], + [ + "▁bronchial", + -14.636869430541992 + ], + [ + "▁PARKING", + -14.636874198913574 + ], + [ + "▁Handrail", + -14.636885643005373 + ], + [ + "▁Hazelnut", + -14.636899948120115 + ], + [ + "▁Berkley", + -14.636900901794434 + ], + [ + "▁Linksys", + -14.63691234588623 + ], + [ + "Horrible", + -14.63692569732666 + ], + [ + "▁Longitude", + -14.63693904876709 + ], + [ + "▁Gaal", + -14.63694667816162 + ], + [ + "▁fretting", + -14.636961936950684 + ], + [ + "▁Multipurpose", + -14.636970520019531 + ], + [ + "Screw", + -14.636977195739746 + ], + [ + "▁Jiffy", + -14.636978149414062 + ], + [ + "▁rebalancing", + -14.636981964111328 + ], + [ + "▁sangria", + -14.637001991271973 + ], + [ + "▁Wastewater", + -14.637008666992188 + ], + [ + "▁Patrice", + -14.6370267868042 + ], + [ + "Offici", + -14.637042045593262 + ], + [ + "offering", + -14.63705062866211 + ], + [ + "▁suitors", + -14.637056350708008 + ], + [ + "▁Akh", + -14.637120246887209 + ], + [ + "▁Emilie", + -14.63713836669922 + ], + [ + "▁Impulse", + -14.637149810791016 + ], + [ + "▁Informati", + -14.63715362548828 + ], + [ + "▁Fjord", + -14.63718605041504 + ], + [ + "▁Handsome", + -14.637188911437988 + ], + [ + "▁Delano", + -14.63719081878662 + ], + [ + "▁Choc", + -14.637203216552734 + ], + [ + "▁faceless", + -14.6372652053833 + ], + [ + "▁sua", + -14.637284278869627 + ], + [ + "▁Benito", + -14.637289047241213 + ], + [ + "Syrian", + -14.6373291015625 + ], + [ + "▁rearrangement", + -14.637337684631348 + ], + [ + "▁buoyed", + -14.637338638305664 + ], + [ + "▁Christening", + -14.637341499328612 + ], + [ + "zhen", + -14.637392044067385 + ], + [ + "▁Almanac", + -14.637435913085938 + ], + [ + "▁Cooley", + -14.637445449829102 + ], + [ + "▁MIPS", + -14.637481689453123 + ], + [ + "▁Huffman", + -14.637490272521973 + ], + [ + "▁Krystal", + -14.63753604888916 + ], + [ + "ichen", + -14.637560844421388 + ], + [ + "▁playability", + -14.637614250183104 + ], + [ + "▁Williamsport", + -14.637681007385254 + ], + [ + "▁Intercept", + -14.637683868408203 + ], + [ + "probe", + -14.63771629333496 + ], + [ + "????????????", + -14.637778282165527 + ], + [ + "Aloe", + -14.637825012207031 + ], + [ + "Larger", + -14.637869834899902 + ], + [ + "▁premiership", + -14.637882232666016 + ], + [ + "clothing", + -14.63788604736328 + ], + [ + "▁undersigned", + -14.637887001037598 + ], + [ + "▁Mahon", + -14.637901306152344 + ], + [ + "▁Hitman", + -14.637917518615724 + ], + [ + "▁Permanente", + -14.637946128845217 + ], + [ + "▁liber", + -14.637958526611328 + ], + [ + "▁Homeowner", + -14.637964248657228 + ], + [ + "▁Meaningful", + -14.638001441955566 + ], + [ + "▁Xtra", + -14.638075828552246 + ], + [ + "▁FRAN", + -14.638100624084473 + ], + [ + "▁Playtech", + -14.638134002685549 + ], + [ + "Wir", + -14.63816738128662 + ], + [ + "▁Bayview", + -14.638309478759766 + ], + [ + "▁Huey", + -14.638323783874512 + ], + [ + "▁Buda", + -14.638398170471191 + ], + [ + "WEL", + -14.638399124145508 + ], + [ + "▁capitalisation", + -14.638420104980469 + ], + [ + "pigmentation", + -14.638506889343262 + ], + [ + "seasoned", + -14.638513565063477 + ], + [ + "▁Helio", + -14.63852882385254 + ], + [ + "▁Eni", + -14.638571739196776 + ], + [ + "▁RPI", + -14.63858413696289 + ], + [ + "▁lamenting", + -14.638587951660156 + ], + [ + "▁comming", + -14.638591766357422 + ], + [ + "▁scoreline", + -14.638602256774902 + ], + [ + "▁Burri", + -14.638622283935549 + ], + [ + "▁Hilltop", + -14.638631820678713 + ], + [ + "Airline", + -14.638656616210938 + ], + [ + "Highlight", + -14.63870334625244 + ], + [ + "falling", + -14.6387300491333 + ], + [ + "▁Pavers", + -14.638763427734377 + ], + [ + "ayev", + -14.63877010345459 + ], + [ + "▁Saud", + -14.638801574707031 + ], + [ + "▁Wilt", + -14.638839721679688 + ], + [ + "6:40", + -14.6388521194458 + ], + [ + "Avon", + -14.638921737670898 + ], + [ + "▁numerically", + -14.638934135437012 + ], + [ + "▁payee", + -14.638938903808594 + ], + [ + "▁Faroe", + -14.63894271850586 + ], + [ + "▁parsed", + -14.638945579528809 + ], + [ + "▁Designation", + -14.639090538024902 + ], + [ + "▁Reactive", + -14.639148712158203 + ], + [ + "1953", + -14.639150619506836 + ], + [ + "PARENT", + -14.639159202575684 + ], + [ + "▁$225", + -14.639161109924316 + ], + [ + "▁Balo", + -14.639189720153809 + ], + [ + "▁inkling", + -14.6392183303833 + ], + [ + "▁jawline", + -14.639256477355955 + ], + [ + "venir", + -14.639307022094728 + ], + [ + "liability", + -14.639312744140623 + ], + [ + "▁THB", + -14.63933563232422 + ], + [ + "nanometer", + -14.63944149017334 + ], + [ + "Hank", + -14.639472007751465 + ], + [ + "▁300%", + -14.639491081237791 + ], + [ + "SYNC", + -14.63951301574707 + ], + [ + "omme", + -14.639532089233398 + ], + [ + "lijk", + -14.639573097229004 + ], + [ + "▁Waterman", + -14.63959503173828 + ], + [ + "▁gouge", + -14.639599800109863 + ], + [ + "methane", + -14.639612197875977 + ], + [ + "▁Boi", + -14.639618873596191 + ], + [ + "WIRE", + -14.639832496643066 + ], + [ + "havn", + -14.6398344039917 + ], + [ + "▁mountaintop", + -14.6398344039917 + ], + [ + "alsa", + -14.640045166015623 + ], + [ + "feat", + -14.640165328979492 + ], + [ + "huri", + -14.64023780822754 + ], + [ + "▁Roomba", + -14.64028263092041 + ], + [ + "klar", + -14.640288352966309 + ], + [ + "▁retrain", + -14.640419960021973 + ], + [ + "camping", + -14.640457153320312 + ], + [ + "iraj", + -14.640478134155272 + ], + [ + "▁seafarers", + -14.640491485595703 + ], + [ + "▁SFP", + -14.640495300292969 + ], + [ + "PHS", + -14.640602111816406 + ], + [ + "▁viewport", + -14.640924453735352 + ], + [ + "▁Southland", + -14.640961647033691 + ], + [ + "▁24.5", + -14.64113712310791 + ], + [ + "EEN", + -14.64128875732422 + ], + [ + "teil", + -14.641386032104492 + ], + [ + "▁authorise", + -14.641404151916504 + ], + [ + "▁Sumi", + -14.64141082763672 + ], + [ + "▁Zine", + -14.641453742980955 + ], + [ + "▁Weimar", + -14.64162826538086 + ], + [ + "Russ", + -14.64163875579834 + ], + [ + "▁countrymen", + -14.64163875579834 + ], + [ + "context", + -14.641693115234377 + ], + [ + "▁McNeil", + -14.641717910766602 + ], + [ + "▁ABT", + -14.641846656799316 + ], + [ + "beh", + -14.642030715942385 + ], + [ + "▁Kasi", + -14.64214324951172 + ], + [ + "▁Highlanders", + -14.642229080200195 + ], + [ + "PLR", + -14.642264366149902 + ], + [ + "▁Colli", + -14.642309188842772 + ], + [ + "▁scrutinise", + -14.64233684539795 + ], + [ + "wicking", + -14.642374992370604 + ], + [ + "honda", + -14.642492294311523 + ], + [ + "▁FPL", + -14.642502784729004 + ], + [ + "▁mortise", + -14.64255428314209 + ], + [ + "▁amply", + -14.642601013183594 + ], + [ + "▁Gable", + -14.642617225646973 + ], + [ + "▁McGovern", + -14.642619132995604 + ], + [ + "▁Vip", + -14.642658233642578 + ], + [ + "▁Neuron", + -14.642711639404297 + ], + [ + "▁Hsu", + -14.642719268798828 + ], + [ + "bated", + -14.64272403717041 + ], + [ + "citation", + -14.642732620239258 + ], + [ + "lessons", + -14.642757415771484 + ], + [ + "icula", + -14.64284324645996 + ], + [ + "▁COLD", + -14.64296054840088 + ], + [ + "forever", + -14.643075942993164 + ], + [ + "inari", + -14.643077850341797 + ], + [ + "▁slurp", + -14.643095016479492 + ], + [ + "▁steeple", + -14.643158912658691 + ], + [ + "Continental", + -14.643281936645508 + ], + [ + "orbit", + -14.643299102783203 + ], + [ + "IDO", + -14.643352508544922 + ], + [ + "naro", + -14.64335823059082 + ], + [ + "shawn", + -14.643394470214844 + ], + [ + "▁Swain", + -14.643400192260742 + ], + [ + "nsk", + -14.643426895141602 + ], + [ + "▁Deny", + -14.643458366394045 + ], + [ + "Signing", + -14.643494606018066 + ], + [ + "▁Airbag", + -14.643620491027832 + ], + [ + "referential", + -14.64362335205078 + ], + [ + "▁Fennel", + -14.643630981445312 + ], + [ + "Peri", + -14.643631935119627 + ], + [ + "▁COMPA", + -14.643692016601562 + ], + [ + "Medic", + -14.643757820129396 + ], + [ + "▁Yaw", + -14.643786430358888 + ], + [ + "wielding", + -14.643798828125 + ], + [ + "Subscription", + -14.643810272216797 + ], + [ + "Difference", + -14.643813133239746 + ], + [ + "Lavender", + -14.643823623657228 + ], + [ + "principle", + -14.643824577331545 + ], + [ + "procedure", + -14.643826484680176 + ], + [ + "Conflict", + -14.643827438354492 + ], + [ + "Administration", + -14.643841743469238 + ], + [ + "Galaxy", + -14.643842697143556 + ], + [ + "Sunshine", + -14.643844604492188 + ], + [ + "purchasing", + -14.64384651184082 + ], + [ + "Certification", + -14.64384937286377 + ], + [ + "FIRST", + -14.643853187561035 + ], + [ + "1965", + -14.643875122070312 + ], + [ + "Nepal", + -14.64388370513916 + ], + [ + "Banana", + -14.643926620483398 + ], + [ + "STEIN", + -14.643969535827637 + ], + [ + "▁Fraction", + -14.643986701965332 + ], + [ + "▁annotate", + -14.644004821777344 + ], + [ + "▁flipper", + -14.64400577545166 + ], + [ + "Cohen", + -14.644006729125977 + ], + [ + "zoic", + -14.644027709960938 + ], + [ + "nion", + -14.644036293029783 + ], + [ + "responsibility", + -14.644037246704102 + ], + [ + "5600", + -14.64404010772705 + ], + [ + "▁nub", + -14.64405345916748 + ], + [ + "▁$30.00", + -14.644064903259276 + ], + [ + "▁backstop", + -14.644083976745604 + ], + [ + "parameter", + -14.644219398498535 + ], + [ + "everyday", + -14.644250869750977 + ], + [ + "Cardi", + -14.64428424835205 + ], + [ + "▁Aku", + -14.644314765930176 + ], + [ + "Testimonial", + -14.644317626953123 + ], + [ + "occupier", + -14.644317626953123 + ], + [ + "▁ALP", + -14.644343376159668 + ], + [ + "▁Sapien", + -14.64438819885254 + ], + [ + "▁congesti", + -14.644468307495115 + ], + [ + "nickel", + -14.644489288330078 + ], + [ + "▁bourgeois", + -14.644502639770508 + ], + [ + "Grind", + -14.644505500793455 + ], + [ + "Ghana", + -14.644542694091797 + ], + [ + "INFRINGEMENT", + -14.644598007202148 + ], + [ + "Plagiarism", + -14.644598960876465 + ], + [ + "matopoietic", + -14.644598960876465 + ], + [ + "▁844-222-4922", + -14.644598960876465 + ], + [ + "▁CRISPR", + -14.644598960876465 + ], + [ + "▁Insignia", + -14.644598960876465 + ], + [ + "▁Karlsruhe", + -14.644598960876465 + ], + [ + "▁MacMillan", + -14.644598960876465 + ], + [ + "▁Orkney", + -14.644598960876465 + ], + [ + "▁Prodigy", + -14.644598960876465 + ], + [ + "▁Punisher", + -14.644598960876465 + ], + [ + "▁Scituate", + -14.644598960876465 + ], + [ + "▁Sriracha", + -14.644598960876465 + ], + [ + "▁Waukesha", + -14.644598960876465 + ], + [ + "▁estuaries", + -14.644598960876465 + ], + [ + "▁pernicious", + -14.644598960876465 + ], + [ + "▁sirloin", + -14.644598960876465 + ], + [ + "▁unspeakable", + -14.644598960876465 + ], + [ + "▁Arbeit", + -14.64459991455078 + ], + [ + "▁dremel", + -14.64459991455078 + ], + [ + "▁votive", + -14.64459991455078 + ], + [ + "▁Brethren", + -14.644600868225098 + ], + [ + "▁Chamonix", + -14.644600868225098 + ], + [ + "▁Engraving", + -14.644601821899414 + ], + [ + "DOCUMENT", + -14.644603729248049 + ], + [ + "▁ischemia", + -14.644603729248049 + ], + [ + "▁cyborg", + -14.644604682922363 + ], + [ + "▁Escrow", + -14.64460563659668 + ], + [ + "▁Yangtze", + -14.644606590270996 + ], + [ + "▁fairgrounds", + -14.644607543945312 + ], + [ + "▁Neurological", + -14.644618034362791 + ], + [ + "▁PokerStars", + -14.644618034362791 + ], + [ + "▁Clovis", + -14.644619941711426 + ], + [ + "▁DOPING", + -14.64462184906006 + ], + [ + "▁Roald", + -14.644624710083008 + ], + [ + "▁Fowl", + -14.64463710784912 + ], + [ + "▁volition", + -14.644639015197754 + ], + [ + "▁glamping", + -14.644641876220703 + ], + [ + "▁Pringle", + -14.644646644592283 + ], + [ + "▁Histories", + -14.64465045928955 + ], + [ + "▁Secular", + -14.644652366638184 + ], + [ + "▁Huntsman", + -14.644660949707031 + ], + [ + "Rural", + -14.644668579101562 + ], + [ + "▁Starburst", + -14.644670486450195 + ], + [ + "▁waistcoat", + -14.644671440124512 + ], + [ + "▁Pelham", + -14.644675254821776 + ], + [ + "▁Evacu", + -14.644680976867676 + ], + [ + "▁Parlor", + -14.644697189331056 + ], + [ + "▁trudge", + -14.64470386505127 + ], + [ + "▁divestment", + -14.644719123840332 + ], + [ + "CDS", + -14.644721031188965 + ], + [ + "▁Parrish", + -14.644728660583496 + ], + [ + "Finland", + -14.64473819732666 + ], + [ + "▁shockwave", + -14.644746780395508 + ], + [ + "▁concealment", + -14.644761085510254 + ], + [ + "▁Curated", + -14.644767761230469 + ], + [ + "▁Tarpon", + -14.644794464111328 + ], + [ + "▁Gopher", + -14.644827842712402 + ], + [ + "▁Micromax", + -14.64482879638672 + ], + [ + "▁AirAsia", + -14.644834518432615 + ], + [ + "▁unassisted", + -14.64484691619873 + ], + [ + "▁Truckee", + -14.64486312866211 + ], + [ + "▁hubris", + -14.644886016845703 + ], + [ + "▁Nacho", + -14.644916534423828 + ], + [ + "yrene", + -14.644923210144045 + ], + [ + "▁2014/15", + -14.64493465423584 + ], + [ + "Period", + -14.644946098327637 + ], + [ + "▁gunned", + -14.64494800567627 + ], + [ + "▁Macomb", + -14.644957542419434 + ], + [ + "▁sundeck", + -14.644976615905762 + ], + [ + "▁Beaverton", + -14.645000457763672 + ], + [ + "▁Jud", + -14.645014762878418 + ], + [ + "▁unconnected", + -14.64501667022705 + ], + [ + "▁Loftus", + -14.645026206970217 + ], + [ + "▁speedometer", + -14.645073890686035 + ], + [ + "▁Dima", + -14.645087242126465 + ], + [ + "▁Sapa", + -14.645095825195312 + ], + [ + "▁devolved", + -14.645096778869627 + ], + [ + "▁Mullin", + -14.645166397094728 + ], + [ + "▁abolitionist", + -14.645170211791992 + ], + [ + "▁zealand", + -14.645170211791992 + ], + [ + "▁OSCE", + -14.645174980163574 + ], + [ + "▁gully", + -14.645186424255373 + ], + [ + "▁Teaser", + -14.645223617553713 + ], + [ + "implant", + -14.645252227783203 + ], + [ + "▁Trance", + -14.64527702331543 + ], + [ + "Trap", + -14.645278930664062 + ], + [ + "▁spewing", + -14.645302772521973 + ], + [ + "Reclining", + -14.645303726196287 + ], + [ + "vapor", + -14.645306587219238 + ], + [ + "▁extractive", + -14.64531135559082 + ], + [ + "Henri", + -14.64538860321045 + ], + [ + "▁Homelessness", + -14.64540195465088 + ], + [ + "▁Nikolai", + -14.645414352416992 + ], + [ + "▁ISV", + -14.64541721343994 + ], + [ + "▁1821", + -14.64549160003662 + ], + [ + "▁footbridge", + -14.645515441894531 + ], + [ + "▁underinsured", + -14.64552116394043 + ], + [ + "▁stowed", + -14.645527839660645 + ], + [ + "▁Hunan", + -14.645569801330566 + ], + [ + "▁cytotec", + -14.64557933807373 + ], + [ + "▁precipitated", + -14.645648002624512 + ], + [ + "▁BCG", + -14.645682334899902 + ], + [ + "▁flinch", + -14.645685195922852 + ], + [ + "▁Mitsu", + -14.64569091796875 + ], + [ + "▁birthright", + -14.64573097229004 + ], + [ + "▁stifled", + -14.645811080932615 + ], + [ + "▁Washable", + -14.645828247070312 + ], + [ + "▁Neme", + -14.645831108093262 + ], + [ + "pter", + -14.645865440368652 + ], + [ + "▁+91", + -14.645872116088867 + ], + [ + "▁especial", + -14.645947456359863 + ], + [ + "▁maxed", + -14.64596462249756 + ], + [ + "▁succumbing", + -14.645966529846191 + ], + [ + "▁ATLAS", + -14.645983695983888 + ], + [ + "1939", + -14.64601993560791 + ], + [ + "▁Roch", + -14.646036148071287 + ], + [ + "▁Gozo", + -14.646058082580566 + ], + [ + "Lifetime", + -14.64612865447998 + ], + [ + "▁Tapas", + -14.646160125732422 + ], + [ + "▁misread", + -14.646199226379396 + ], + [ + "hahaha", + -14.646252632141112 + ], + [ + "mechani", + -14.64627456665039 + ], + [ + "Briefly", + -14.6463041305542 + ], + [ + "hahahaha", + -14.6463041305542 + ], + [ + "▁Yule", + -14.64636516571045 + ], + [ + "▁dewy", + -14.64640998840332 + ], + [ + "▁acceptability", + -14.64641571044922 + ], + [ + "▁Payless", + -14.64656925201416 + ], + [ + "▁Kiva", + -14.646629333496094 + ], + [ + "▁limping", + -14.646635055541992 + ], + [ + "TDC", + -14.646724700927734 + ], + [ + "DAT", + -14.646899223327637 + ], + [ + "▁Buc", + -14.646934509277344 + ], + [ + "Seated", + -14.646976470947266 + ], + [ + "▁Mecha", + -14.647052764892578 + ], + [ + "ripple", + -14.647083282470703 + ], + [ + "▁NPT", + -14.647181510925291 + ], + [ + "▁bidet", + -14.647207260131836 + ], + [ + "motiv", + -14.647263526916504 + ], + [ + "arion", + -14.647268295288086 + ], + [ + "SEP", + -14.647320747375488 + ], + [ + "pharmaceutical", + -14.647321701049805 + ], + [ + "RECO", + -14.647421836853027 + ], + [ + "▁tamed", + -14.647433280944824 + ], + [ + "cale", + -14.647568702697754 + ], + [ + "counting", + -14.647690773010254 + ], + [ + "Corin", + -14.647696495056152 + ], + [ + "Anytime", + -14.647842407226562 + ], + [ + "rrrr", + -14.64785099029541 + ], + [ + "▁Yap", + -14.647857666015623 + ], + [ + "Considered", + -14.647900581359863 + ], + [ + "▁interviewees", + -14.648014068603516 + ], + [ + "▁2/1", + -14.648038864135742 + ], + [ + "▁Hamel", + -14.648049354553224 + ], + [ + "▁persisting", + -14.648073196411133 + ], + [ + "SHP", + -14.648103713989258 + ], + [ + "textual", + -14.64814567565918 + ], + [ + "▁stipulate", + -14.648149490356444 + ], + [ + "▁Doula", + -14.648241996765137 + ], + [ + "▁foamy", + -14.648310661315918 + ], + [ + "▁Brava", + -14.64831256866455 + ], + [ + "▁moisten", + -14.6483154296875 + ], + [ + "JAX", + -14.648388862609863 + ], + [ + "▁Maximo", + -14.648396492004396 + ], + [ + "2:27", + -14.648526191711426 + ], + [ + "massage", + -14.648661613464355 + ], + [ + "▁mobo", + -14.64873218536377 + ], + [ + "▁orthotic", + -14.64880084991455 + ], + [ + "liar", + -14.648865699768066 + ], + [ + "▁MCM", + -14.648886680603027 + ], + [ + "▁Newsom", + -14.648914337158203 + ], + [ + "▁Wesson", + -14.64892578125 + ], + [ + "▁Zeke", + -14.648968696594238 + ], + [ + "▁skated", + -14.6489839553833 + ], + [ + "Grad", + -14.649057388305664 + ], + [ + "▁Flee", + -14.649117469787598 + ], + [ + "▁vesting", + -14.649285316467283 + ], + [ + "3:18", + -14.6492919921875 + ], + [ + "▁Bop", + -14.64930820465088 + ], + [ + "7:50", + -14.649410247802734 + ], + [ + "▁conjunct", + -14.649499893188477 + ], + [ + "▁Acqua", + -14.64950466156006 + ], + [ + "▁Daf", + -14.649530410766602 + ], + [ + "▁Matty", + -14.64956760406494 + ], + [ + "▁reek", + -14.649660110473633 + ], + [ + "▁ACV", + -14.649665832519531 + ], + [ + "Gang", + -14.64974880218506 + ], + [ + "▁Sanger", + -14.649858474731444 + ], + [ + "▁SRM", + -14.649887084960938 + ], + [ + "ugging", + -14.649897575378418 + ], + [ + "_0", + -14.649913787841797 + ], + [ + "Denis", + -14.649986267089844 + ], + [ + "▁foodstuff", + -14.650025367736816 + ], + [ + "▁Judgement", + -14.650144577026367 + ], + [ + "POINT", + -14.650240898132324 + ], + [ + "9:20", + -14.650264739990234 + ], + [ + "Habitat", + -14.650320053100586 + ], + [ + "▁Khat", + -14.650355339050291 + ], + [ + "LLC", + -14.650432586669922 + ], + [ + "0]", + -14.650525093078612 + ], + [ + "▁Komp", + -14.650529861450195 + ], + [ + "ophy", + -14.650609970092772 + ], + [ + "▁12+", + -14.650609970092772 + ], + [ + "▁accidently", + -14.650700569152832 + ], + [ + "trotter", + -14.650917053222656 + ], + [ + "▁guttering", + -14.65098762512207 + ], + [ + "1:1", + -14.651068687438965 + ], + [ + "kele", + -14.651086807250977 + ], + [ + "roast", + -14.651284217834473 + ], + [ + "temper", + -14.651284217834473 + ], + [ + "▁OHS", + -14.651366233825684 + ], + [ + "▁traditionalist", + -14.651371002197266 + ], + [ + "embar", + -14.651410102844238 + ], + [ + "▁eyelet", + -14.65163803100586 + ], + [ + "▁investigational", + -14.65176773071289 + ], + [ + "▁Gna", + -14.651777267456056 + ], + [ + "familiar", + -14.651835441589355 + ], + [ + "initiated", + -14.651835441589355 + ], + [ + "diabetic", + -14.651880264282228 + ], + [ + "council", + -14.65188694000244 + ], + [ + "Substitution", + -14.651893615722656 + ], + [ + "Capacity", + -14.651895523071287 + ], + [ + "Huawei", + -14.651899337768556 + ], + [ + "kindergarten", + -14.651899337768556 + ], + [ + "enforcement", + -14.65190601348877 + ], + [ + "affordable", + -14.651922225952148 + ], + [ + "Goodbye", + -14.651923179626465 + ], + [ + "Dial", + -14.651932716369627 + ], + [ + "Documentation", + -14.651968002319336 + ], + [ + "▁NVR", + -14.652026176452637 + ], + [ + "EVEN", + -14.652034759521484 + ], + [ + "Casual", + -14.652106285095217 + ], + [ + "▁Fifa", + -14.65212059020996 + ], + [ + "Obstetric", + -14.652192115783691 + ], + [ + "▁11/2", + -14.652267456054688 + ], + [ + "▁shit", + -14.652278900146484 + ], + [ + "▁Caval", + -14.652288436889648 + ], + [ + "▁softwood", + -14.652315139770508 + ], + [ + "1:28", + -14.65237045288086 + ], + [ + "wanda", + -14.65237808227539 + ], + [ + "Constructed", + -14.652388572692873 + ], + [ + "CTC", + -14.652393341064451 + ], + [ + "HOT", + -14.652410507202148 + ], + [ + "xxxxxx", + -14.652437210083008 + ], + [ + "degradable", + -14.652440071105955 + ], + [ + "Buffer", + -14.652453422546388 + ], + [ + "Nostalgia", + -14.652472496032717 + ], + [ + "Oréal", + -14.652472496032717 + ], + [ + "▁Benitez", + -14.652472496032717 + ], + [ + "▁Definitive", + -14.652472496032717 + ], + [ + "▁Huckabee", + -14.652472496032717 + ], + [ + "▁Jumeirah", + -14.652472496032717 + ], + [ + "▁Londonderry", + -14.652472496032717 + ], + [ + "▁Raritan", + -14.652472496032717 + ], + [ + "▁Transmitter", + -14.652472496032717 + ], + [ + "▁arduino", + -14.652472496032717 + ], + [ + "▁asymptomatic", + -14.652472496032717 + ], + [ + "▁enormity", + -14.652472496032717 + ], + [ + "▁foosball", + -14.652472496032717 + ], + [ + "▁harbinger", + -14.652472496032717 + ], + [ + "▁laundromat", + -14.652472496032717 + ], + [ + "▁plutonium", + -14.652472496032717 + ], + [ + "▁ASSOCIATION", + -14.652473449707031 + ], + [ + "▁Ogilvy", + -14.652473449707031 + ], + [ + "▁Rensselaer", + -14.652473449707031 + ], + [ + "▁capricious", + -14.652473449707031 + ], + [ + "▁jukebox", + -14.652473449707031 + ], + [ + "▁reuniting", + -14.652473449707031 + ], + [ + "▁malnourished", + -14.652474403381348 + ], + [ + "▁mobilisation", + -14.652474403381348 + ], + [ + "▁Endorsement", + -14.652475357055664 + ], + [ + "▁Neoprene", + -14.652475357055664 + ], + [ + "▁Pedersen", + -14.652475357055664 + ], + [ + "▁Slurry", + -14.652475357055664 + ], + [ + "▁Ziegler", + -14.65247631072998 + ], + [ + "▁anaesthesia", + -14.65247631072998 + ], + [ + "scorbic", + -14.652477264404297 + ], + [ + "Processor", + -14.65247917175293 + ], + [ + "▁Spooky", + -14.65247917175293 + ], + [ + "militariz", + -14.652480125427246 + ], + [ + "▁Gaylord", + -14.65248680114746 + ], + [ + "▁Wirral", + -14.652487754821776 + ], + [ + "▁(855)", + -14.65248966217041 + ], + [ + "▁Dhawan", + -14.65248966217041 + ], + [ + "FOLD", + -14.652493476867676 + ], + [ + "▁Obsession", + -14.652493476867676 + ], + [ + "▁Sunnyvale", + -14.652496337890623 + ], + [ + "▁urchin", + -14.65249729156494 + ], + [ + "▁trinity", + -14.65250015258789 + ], + [ + "▁Terrence", + -14.652501106262209 + ], + [ + "▁EcoBoost", + -14.652505874633787 + ], + [ + "▁Kermit", + -14.652506828308104 + ], + [ + "▁snagging", + -14.652509689331056 + ], + [ + "▁measly", + -14.652511596679688 + ], + [ + "▁plexus", + -14.652511596679688 + ], + [ + "▁disinformation", + -14.65251922607422 + ], + [ + "▁(1979)", + -14.652520179748535 + ], + [ + "▁Proclamation", + -14.652520179748535 + ], + [ + "▁bayonet", + -14.652523040771484 + ], + [ + "▁Geoscience", + -14.652535438537598 + ], + [ + "Taco", + -14.65254020690918 + ], + [ + "▁fumbling", + -14.652541160583496 + ], + [ + "▁falsify", + -14.652544975280762 + ], + [ + "Notably", + -14.652548789978027 + ], + [ + "▁chickenpox", + -14.65255641937256 + ], + [ + "▁Matlab", + -14.65256118774414 + ], + [ + "▁VJ", + -14.65256404876709 + ], + [ + "▁Oppenheimer", + -14.65257740020752 + ], + [ + "▁Pavement", + -14.652580261230469 + ], + [ + "manufacturer", + -14.652582168579102 + ], + [ + "▁cricketers", + -14.652585983276367 + ], + [ + "▁koala", + -14.652588844299316 + ], + [ + "▁replenishment", + -14.652606010437012 + ], + [ + "Osmosis", + -14.652615547180176 + ], + [ + "Successfully", + -14.65263843536377 + ], + [ + "▁forgave", + -14.652645111083984 + ], + [ + "▁emigration", + -14.652647972106934 + ], + [ + "▁ranching", + -14.652647972106934 + ], + [ + "▁AGREEMENT", + -14.652654647827148 + ], + [ + "duo", + -14.65266227722168 + ], + [ + "▁Rajya", + -14.652676582336426 + ], + [ + "▁diction", + -14.652684211730955 + ], + [ + "▁DBMS", + -14.652719497680664 + ], + [ + "▁Panzer", + -14.652721405029297 + ], + [ + "▁vibrato", + -14.65272331237793 + ], + [ + "▁paradoxically", + -14.652734756469728 + ], + [ + "▁seabirds", + -14.65274429321289 + ], + [ + "▁Ferries", + -14.652780532836914 + ], + [ + "▁outbuildings", + -14.652795791625977 + ], + [ + "carving", + -14.652823448181152 + ], + [ + "▁disenfranchised", + -14.65284252166748 + ], + [ + "▁megaphone", + -14.652850151062012 + ], + [ + "REMEMBER", + -14.652851104736328 + ], + [ + "▁Foxtel", + -14.652907371520996 + ], + [ + "▁snowboarders", + -14.652913093566896 + ], + [ + "▁UMD", + -14.652924537658691 + ], + [ + "▁Witcher", + -14.652925491333008 + ], + [ + "▁espoused", + -14.65292739868164 + ], + [ + "▁Alvaro", + -14.65293312072754 + ], + [ + "Aerobic", + -14.65294361114502 + ], + [ + "▁Stationery", + -14.652961730957031 + ], + [ + "▁Wellcome", + -14.652983665466309 + ], + [ + "Figuring", + -14.653011322021484 + ], + [ + "▁striping", + -14.653087615966797 + ], + [ + "▁Muck", + -14.653122901916504 + ], + [ + "▁habitually", + -14.653157234191896 + ], + [ + "▁sigma", + -14.65317153930664 + ], + [ + "▁staycation", + -14.65317153930664 + ], + [ + "▁Aretha", + -14.653193473815918 + ], + [ + "Mild", + -14.653273582458496 + ], + [ + "▁plummeting", + -14.65329647064209 + ], + [ + "▁Flav", + -14.653386116027832 + ], + [ + "Directory", + -14.653389930725098 + ], + [ + "Comprising", + -14.653401374816896 + ], + [ + "▁Hotpoint", + -14.65340518951416 + ], + [ + "▁fittingly", + -14.653419494628906 + ], + [ + "▁moaning", + -14.653429985046388 + ], + [ + "▁misrepresent", + -14.653459548950195 + ], + [ + "▁noninvasive", + -14.653477668762209 + ], + [ + "▁shivering", + -14.653510093688965 + ], + [ + "▁unfollow", + -14.65351390838623 + ], + [ + "▁Slug", + -14.653541564941406 + ], + [ + "▁WorldCat", + -14.653586387634276 + ], + [ + "▁Boden", + -14.653648376464844 + ], + [ + "▁DIYer", + -14.653731346130373 + ], + [ + "▁Crumb", + -14.653746604919434 + ], + [ + "comedy", + -14.653768539428713 + ], + [ + "▁Esse", + -14.653776168823242 + ], + [ + "Pruning", + -14.653788566589355 + ], + [ + "▁reliving", + -14.653861045837402 + ], + [ + "▁inwardly", + -14.653864860534668 + ], + [ + "▁Sealant", + -14.6538667678833 + ], + [ + "▁dampening", + -14.653874397277832 + ], + [ + "▁Septic", + -14.65388011932373 + ], + [ + "▁vetoed", + -14.653949737548828 + ], + [ + "▁weakly", + -14.653970718383787 + ], + [ + "▁1829", + -14.65400218963623 + ], + [ + "▁exterminate", + -14.654060363769531 + ], + [ + "▁Assigned", + -14.654112815856934 + ], + [ + "Tron", + -14.654168128967283 + ], + [ + "Seth", + -14.654203414916992 + ], + [ + "▁chuckled", + -14.654211044311523 + ], + [ + "▁Caron", + -14.654239654541016 + ], + [ + "▁Incoming", + -14.654289245605469 + ], + [ + "delic", + -14.654314994812012 + ], + [ + "Plast", + -14.654319763183594 + ], + [ + "▁deux", + -14.654370307922363 + ], + [ + "spiration", + -14.654437065124512 + ], + [ + "NGA", + -14.65444564819336 + ], + [ + "crossed", + -14.654464721679688 + ], + [ + "▁infringed", + -14.654587745666504 + ], + [ + "Hali", + -14.654618263244627 + ], + [ + "▁erecting", + -14.654641151428224 + ], + [ + "TEAM", + -14.654647827148438 + ], + [ + "chromatic", + -14.65464973449707 + ], + [ + "Millennials", + -14.654696464538574 + ], + [ + "▁resend", + -14.654742240905762 + ], + [ + "▁Sitter", + -14.654786109924316 + ], + [ + "Earring", + -14.654867172241213 + ], + [ + "▁Lehr", + -14.654967308044434 + ], + [ + "▁Gauri", + -14.655035018920898 + ], + [ + "▁Beal", + -14.655113220214844 + ], + [ + "▁earner", + -14.655232429504396 + ], + [ + "▁Southbank", + -14.655253410339355 + ], + [ + "3-16", + -14.655378341674805 + ], + [ + "▁Trucker", + -14.655407905578612 + ], + [ + "7:20", + -14.655627250671388 + ], + [ + "▁dieters", + -14.655682563781738 + ], + [ + "famil", + -14.655804634094238 + ], + [ + "▁Maura", + -14.655839920043944 + ], + [ + "▁Osmo", + -14.655940055847168 + ], + [ + "▁Wert", + -14.655973434448242 + ], + [ + "▁Layered", + -14.655975341796877 + ], + [ + "Highland", + -14.656044006347656 + ], + [ + "Assign", + -14.656061172485352 + ], + [ + "Sai", + -14.65606689453125 + ], + [ + "flour", + -14.656131744384766 + ], + [ + "affect", + -14.656140327453612 + ], + [ + "laria", + -14.656155586242676 + ], + [ + "▁Iridium", + -14.65615940093994 + ], + [ + "Connected", + -14.656167030334473 + ], + [ + "oglio", + -14.656192779541016 + ], + [ + "5.7%", + -14.656198501586914 + ], + [ + "Quarterback", + -14.656274795532228 + ], + [ + "Fern", + -14.656474113464355 + ], + [ + "glich", + -14.656484603881836 + ], + [ + "▁15.0", + -14.656530380249023 + ], + [ + "auke", + -14.656561851501465 + ], + [ + "▁Ryde", + -14.656588554382324 + ], + [ + "▁jeweller", + -14.656639099121094 + ], + [ + "▁Moul", + -14.656648635864258 + ], + [ + "▁Styled", + -14.656658172607422 + ], + [ + "▁Folly", + -14.656681060791016 + ], + [ + "▁Leona", + -14.656694412231444 + ], + [ + "▁soothed", + -14.656695365905762 + ], + [ + "▁idly", + -14.65671157836914 + ], + [ + "▁Duro", + -14.656725883483888 + ], + [ + "▁cataloging", + -14.65674114227295 + ], + [ + "further", + -14.656755447387695 + ], + [ + "▁stipulation", + -14.656757354736328 + ], + [ + "▁ESF", + -14.656822204589844 + ], + [ + "26%", + -14.656834602355955 + ], + [ + "▁$4.2", + -14.656834602355955 + ], + [ + "▁ALM", + -14.656949043273926 + ], + [ + "zuki", + -14.656989097595217 + ], + [ + "kio", + -14.657031059265137 + ], + [ + "▁eyelets", + -14.65703582763672 + ], + [ + "▁Cip", + -14.657095909118652 + ], + [ + "luke", + -14.657144546508787 + ], + [ + "kund", + -14.657339096069336 + ], + [ + "▁Gorda", + -14.657366752624512 + ], + [ + "▁Irishman", + -14.657383918762209 + ], + [ + "▁2-1/2", + -14.657564163208008 + ], + [ + "harris", + -14.657594680786133 + ], + [ + "▁Hooded", + -14.65762710571289 + ], + [ + "▁loner", + -14.657641410827637 + ], + [ + "LUM", + -14.65769863128662 + ], + [ + "14-0", + -14.657703399658203 + ], + [ + "▁Freehold", + -14.657873153686523 + ], + [ + "▁1992)", + -14.657919883728027 + ], + [ + "prayer", + -14.657923698425291 + ], + [ + "▁Sender", + -14.657940864562988 + ], + [ + "▁Universalist", + -14.657968521118164 + ], + [ + "embro", + -14.657988548278809 + ], + [ + "cheer", + -14.65801239013672 + ], + [ + "▁antennae", + -14.658032417297363 + ], + [ + "▁Croton", + -14.658052444458008 + ], + [ + "brad", + -14.65810775756836 + ], + [ + "▁TCR", + -14.6581449508667 + ], + [ + "▁Brio", + -14.658225059509276 + ], + [ + "passes", + -14.658275604248049 + ], + [ + "▁Summertime", + -14.658332824707031 + ], + [ + "loathing", + -14.658344268798828 + ], + [ + "▁Bibi", + -14.658367156982422 + ], + [ + "1935", + -14.658418655395508 + ], + [ + "▁Chha", + -14.658609390258787 + ], + [ + "HRP", + -14.65886116027832 + ], + [ + "▁Tern", + -14.658892631530762 + ], + [ + "▁Wix", + -14.658935546875 + ], + [ + "hika", + -14.6589994430542 + ], + [ + "▁Waa", + -14.65904712677002 + ], + [ + "goni", + -14.659106254577637 + ], + [ + "ucco", + -14.65916347503662 + ], + [ + "▁minut", + -14.659183502197266 + ], + [ + "9-14", + -14.659229278564451 + ], + [ + "circulation", + -14.659269332885742 + ], + [ + "Recognize", + -14.659367561340332 + ], + [ + "Conver", + -14.659380912780762 + ], + [ + "SDP", + -14.659438133239746 + ], + [ + "adic", + -14.659443855285645 + ], + [ + "electron", + -14.659473419189451 + ], + [ + "easter", + -14.659497261047363 + ], + [ + "▁Pharmaco", + -14.659512519836426 + ], + [ + "▁Lucca", + -14.659567832946776 + ], + [ + "▁combatants", + -14.659629821777344 + ], + [ + "canned", + -14.65963649749756 + ], + [ + "▁Lalit", + -14.659690856933594 + ], + [ + "▁1802", + -14.65973663330078 + ], + [ + "▁NCI", + -14.659798622131348 + ], + [ + "czynski", + -14.659845352172852 + ], + [ + "▁Swat", + -14.659903526306152 + ], + [ + "▁provisioned", + -14.659907341003418 + ], + [ + "liffe", + -14.659913063049316 + ], + [ + "ravi", + -14.659950256347656 + ], + [ + "Keen", + -14.659956932067873 + ], + [ + "▁wrangle", + -14.660001754760742 + ], + [ + "Daughter", + -14.66000270843506 + ], + [ + "fascist", + -14.660003662109377 + ], + [ + "Pregnancy", + -14.66001319885254 + ], + [ + "Appeal", + -14.660014152526855 + ], + [ + "Happiness", + -14.660018920898438 + ], + [ + "Deborah", + -14.660026550292969 + ], + [ + "Trigger", + -14.660028457641602 + ], + [ + "Cardinal", + -14.660033226013184 + ], + [ + "Pittsburgh", + -14.6600341796875 + ], + [ + "removable", + -14.66003704071045 + ], + [ + "Darren", + -14.660073280334473 + ], + [ + "kernel", + -14.660078048706056 + ], + [ + "ooooooo", + -14.66008758544922 + ], + [ + "passion", + -14.660088539123535 + ], + [ + "Wallpaper", + -14.660100936889648 + ], + [ + "Thumb", + -14.660115242004396 + ], + [ + "irritant", + -14.660127639770508 + ], + [ + "distant", + -14.660149574279783 + ], + [ + "Exhibition", + -14.660165786743164 + ], + [ + "console", + -14.660179138183594 + ], + [ + "▁merciless", + -14.66019344329834 + ], + [ + "▁Sina", + -14.660199165344238 + ], + [ + "Treasurer", + -14.660222053527832 + ], + [ + "Compete", + -14.660231590270996 + ], + [ + "Manufactured", + -14.660243034362791 + ], + [ + "exactly", + -14.660244941711426 + ], + [ + "murder", + -14.66026496887207 + ], + [ + "▁Taught", + -14.660272598266602 + ], + [ + "▁Berea", + -14.66029453277588 + ], + [ + "Potato", + -14.660304069519045 + ], + [ + "latte", + -14.66033172607422 + ], + [ + "▁Pigment", + -14.66036319732666 + ], + [ + "▁colonize", + -14.660369873046877 + ], + [ + "▁BEYOND", + -14.660408973693848 + ], + [ + "▁Calumet", + -14.660408973693848 + ], + [ + "▁Chauffeur", + -14.660408973693848 + ], + [ + "▁Dietitian", + -14.660408973693848 + ], + [ + "▁Horticultural", + -14.660408973693848 + ], + [ + "▁INCIDENTAL", + -14.660408973693848 + ], + [ + "▁McKinnon", + -14.660408973693848 + ], + [ + "▁Snohomish", + -14.660408973693848 + ], + [ + "▁Wollongong", + -14.660408973693848 + ], + [ + "▁depriving", + -14.660408973693848 + ], + [ + "▁dorothy", + -14.660408973693848 + ], + [ + "▁egalitarian", + -14.660408973693848 + ], + [ + "▁elopement", + -14.660408973693848 + ], + [ + "▁kombucha", + -14.660408973693848 + ], + [ + "▁placket", + -14.660408973693848 + ], + [ + "▁voodoo", + -14.660408973693848 + ], + [ + "▁Balochistan", + -14.660409927368164 + ], + [ + "▁Ghetto", + -14.660409927368164 + ], + [ + "▁Metabolism", + -14.660409927368164 + ], + [ + "▁Miraculous", + -14.660409927368164 + ], + [ + "▁Physiotherapist", + -14.660409927368164 + ], + [ + "▁Villiers", + -14.660409927368164 + ], + [ + "▁discursive", + -14.660409927368164 + ], + [ + "▁reshuffle", + -14.660409927368164 + ], + [ + "▁vitreous", + -14.660409927368164 + ], + [ + "amniotic", + -14.66041088104248 + ], + [ + "▁saddest", + -14.66041088104248 + ], + [ + "▁Dulwich", + -14.660411834716797 + ], + [ + "▁acquittal", + -14.660412788391112 + ], + [ + "▁hypnotis", + -14.66041374206543 + ], + [ + "▁pantheon", + -14.66041374206543 + ], + [ + "▁Yoruba", + -14.660414695739746 + ], + [ + "▁Koenig", + -14.660415649414062 + ], + [ + "Gizmo", + -14.66041660308838 + ], + [ + "▁APU", + -14.66041660308838 + ], + [ + "stearate", + -14.660417556762695 + ], + [ + "▁Dilemma", + -14.660420417785645 + ], + [ + "▁HONOR", + -14.660420417785645 + ], + [ + "▁purifies", + -14.660420417785645 + ], + [ + "▁HuffPost", + -14.660422325134276 + ], + [ + "▁Keswick", + -14.660422325134276 + ], + [ + "▁malformed", + -14.660422325134276 + ], + [ + "▁Shanahan", + -14.660423278808594 + ], + [ + "▁Electrolux", + -14.66042423248291 + ], + [ + "▁Meehan", + -14.660426139831545 + ], + [ + "▁OCEAN", + -14.660426139831545 + ], + [ + "▁OPTIONS", + -14.660426139831545 + ], + [ + "▁Planetarium", + -14.660430908203123 + ], + [ + "▁sweatpants", + -14.660430908203123 + ], + [ + "▁Bolivar", + -14.660435676574709 + ], + [ + "▁McMurray", + -14.660439491271973 + ], + [ + "▁Dubois", + -14.660445213317873 + ], + [ + "▁fraternal", + -14.660446166992188 + ], + [ + "▁goblet", + -14.660449981689451 + ], + [ + "Stair", + -14.66045379638672 + ], + [ + "▁Hydration", + -14.66045379638672 + ], + [ + "▁bevelled", + -14.660454750061035 + ], + [ + "odia", + -14.660457611083984 + ], + [ + "▁INTERNET", + -14.660457611083984 + ], + [ + "▁Glossary", + -14.6604642868042 + ], + [ + "▁Mathias", + -14.660472869873049 + ], + [ + "▁mystique", + -14.660476684570312 + ], + [ + "▁Citric", + -14.660484313964844 + ], + [ + "Priority", + -14.660490036010742 + ], + [ + "▁Rackspace", + -14.660491943359377 + ], + [ + "▁Basildon", + -14.660510063171388 + ], + [ + "▁digitisation", + -14.660516738891602 + ], + [ + "▁Springbok", + -14.660533905029297 + ], + [ + "▁Datuk", + -14.66054344177246 + ], + [ + "▁romeo", + -14.660550117492676 + ], + [ + "▁parliamentarian", + -14.660560607910156 + ], + [ + "▁vicariously", + -14.660562515258787 + ], + [ + "▁favourably", + -14.660563468933104 + ], + [ + "▁RedHat", + -14.660564422607422 + ], + [ + "▁Bogart", + -14.660573959350586 + ], + [ + "▁Marle", + -14.66058349609375 + ], + [ + "▁Boba", + -14.660587310791016 + ], + [ + "▁Kasper", + -14.66059398651123 + ], + [ + "▁Insane", + -14.660600662231444 + ], + [ + "▁bedbugs", + -14.660600662231444 + ], + [ + "▁SPARK", + -14.660618782043455 + ], + [ + "▁Winkler", + -14.660622596740724 + ], + [ + "▁Bunting", + -14.660626411437988 + ], + [ + "▁LAPD", + -14.660638809204102 + ], + [ + "▁Congresswoman", + -14.66064453125 + ], + [ + "▁ISRO", + -14.660645484924316 + ], + [ + "▁radiometric", + -14.660704612731934 + ], + [ + "▁Konica", + -14.660717010498049 + ], + [ + "▁Runaway", + -14.660720825195312 + ], + [ + "▁Acceptable", + -14.660725593566896 + ], + [ + "▁PPTP", + -14.660762786865234 + ], + [ + "▁ISDN", + -14.66076374053955 + ], + [ + "▁Egress", + -14.660797119140623 + ], + [ + "cleaner", + -14.660811424255373 + ], + [ + "▁Tracing", + -14.660823822021484 + ], + [ + "▁Meister", + -14.660826683044434 + ], + [ + "Freshman", + -14.660837173461914 + ], + [ + "▁magnifier", + -14.660842895507812 + ], + [ + "▁1809", + -14.660859107971191 + ], + [ + "▁Optus", + -14.66086483001709 + ], + [ + "▁Pinewood", + -14.660943984985352 + ], + [ + "▁Aisha", + -14.660947799682615 + ], + [ + "▁revere", + -14.660969734191896 + ], + [ + "▁Tempered", + -14.66098403930664 + ], + [ + "▁Markov", + -14.661026000976562 + ], + [ + "▁Alternately", + -14.661043167114258 + ], + [ + "zou", + -14.661100387573242 + ], + [ + "▁costumed", + -14.661104202270508 + ], + [ + "▁Manpower", + -14.661112785339355 + ], + [ + "▁Hiding", + -14.66111660003662 + ], + [ + "Marina", + -14.661138534545898 + ], + [ + "▁doodling", + -14.66114330291748 + ], + [ + "▁stabilised", + -14.66116714477539 + ], + [ + "▁Holcomb", + -14.66117000579834 + ], + [ + "▁Dipper", + -14.661215782165527 + ], + [ + "pek", + -14.661235809326172 + ], + [ + "'0\"", + -14.66125202178955 + ], + [ + "scheid", + -14.661282539367676 + ], + [ + "енно", + -14.66128635406494 + ], + [ + "bashi", + -14.661330223083496 + ], + [ + "Glow", + -14.661337852478027 + ], + [ + "▁dike", + -14.661337852478027 + ], + [ + "▁Mornington", + -14.661391258239746 + ], + [ + "▁Rosalind", + -14.661396980285645 + ], + [ + "▁Bonny", + -14.661412239074709 + ], + [ + "▁Antonia", + -14.661423683166504 + ], + [ + "▁rejoined", + -14.661436080932615 + ], + [ + "▁HCV", + -14.661455154418944 + ], + [ + "▁letterbox", + -14.661459922790527 + ], + [ + "emming", + -14.661460876464844 + ], + [ + "Gum", + -14.66148281097412 + ], + [ + "▁straightaway", + -14.66149616241455 + ], + [ + "▁tremor", + -14.661529541015623 + ], + [ + "Deriv", + -14.661543846130373 + ], + [ + "shaking", + -14.661555290222168 + ], + [ + "iwi", + -14.661687850952148 + ], + [ + "▁redeveloped", + -14.66174030303955 + ], + [ + "▁Amend", + -14.661741256713867 + ], + [ + "▁SPCA", + -14.66184425354004 + ], + [ + "▁houseboat", + -14.661852836608888 + ], + [ + "▁Barca", + -14.66193389892578 + ], + [ + "laced", + -14.662055015563965 + ], + [ + "▁180,000", + -14.662076950073242 + ], + [ + "▁Provident", + -14.66209602355957 + ], + [ + "pov", + -14.662156105041504 + ], + [ + "▁Treehouse", + -14.662196159362791 + ], + [ + "brecht", + -14.66219711303711 + ], + [ + "▁Sardar", + -14.662201881408691 + ], + [ + "▁Outboard", + -14.662249565124512 + ], + [ + "▁EVENTS", + -14.662251472473145 + ], + [ + "calendar", + -14.662307739257812 + ], + [ + "▁churchyard", + -14.662320137023926 + ], + [ + "/10.1", + -14.662355422973633 + ], + [ + "2.10", + -14.662408828735352 + ], + [ + "▁Ceres", + -14.662508964538574 + ], + [ + "▁15-16", + -14.66252899169922 + ], + [ + "▁Abbie", + -14.6625337600708 + ], + [ + "Brave", + -14.662638664245604 + ], + [ + "▁Trisha", + -14.662704467773438 + ], + [ + "suppress", + -14.662708282470703 + ], + [ + "▁Scor", + -14.662758827209473 + ], + [ + "ESSA", + -14.662776947021484 + ], + [ + "vasion", + -14.662894248962402 + ], + [ + "▁thicket", + -14.66289520263672 + ], + [ + "▁JAS", + -14.662896156311035 + ], + [ + "opinion", + -14.66292667388916 + ], + [ + "▁Jovi", + -14.662986755371094 + ], + [ + "▁($15", + -14.663002014160156 + ], + [ + "▁Delia", + -14.6630277633667 + ], + [ + "convenient", + -14.663031578063965 + ], + [ + "▁Bionic", + -14.663078308105469 + ], + [ + "▁14.1", + -14.663091659545898 + ], + [ + "▁Stamped", + -14.663116455078123 + ], + [ + "GOV", + -14.663336753845217 + ], + [ + "hind", + -14.663371086120604 + ], + [ + "dari", + -14.663395881652832 + ], + [ + "qvist", + -14.663395881652832 + ], + [ + "▁Markdown", + -14.66348361968994 + ], + [ + "▁lemony", + -14.663501739501951 + ], + [ + "▁Boxed", + -14.66352653503418 + ], + [ + "▁juga", + -14.663531303405762 + ], + [ + "PATCH", + -14.663548469543455 + ], + [ + "elberg", + -14.663613319396973 + ], + [ + "▁mrp", + -14.663780212402344 + ], + [ + "ukan", + -14.663784980773926 + ], + [ + "Ellis", + -14.663872718811035 + ], + [ + "▁Winch", + -14.663890838623049 + ], + [ + "quez", + -14.66393756866455 + ], + [ + "▁Stunt", + -14.664024353027344 + ], + [ + "▁forecasters", + -14.66417121887207 + ], + [ + "valuation", + -14.6641845703125 + ], + [ + "▁bounding", + -14.664207458496094 + ], + [ + "▁ZF", + -14.66425609588623 + ], + [ + "▁sprig", + -14.664258003234863 + ], + [ + "▁1823", + -14.664299964904783 + ], + [ + "▁(21)", + -14.664308547973633 + ], + [ + "▁pureed", + -14.66444492340088 + ], + [ + "counterterrorism", + -14.664511680603027 + ], + [ + "adjustment", + -14.66451358795166 + ], + [ + "Moto", + -14.66451930999756 + ], + [ + "▁Kept", + -14.664527893066406 + ], + [ + "ASM", + -14.66457462310791 + ], + [ + "uzo", + -14.664626121520996 + ], + [ + "▁BOB", + -14.66469669342041 + ], + [ + "▁Vint", + -14.66476345062256 + ], + [ + "OWA", + -14.664783477783203 + ], + [ + "▁meatless", + -14.664807319641112 + ], + [ + "▁crevice", + -14.66482925415039 + ], + [ + "glove", + -14.664841651916504 + ], + [ + "▁Azz", + -14.664851188659668 + ], + [ + "▁Gula", + -14.664875984191896 + ], + [ + "1961", + -14.665009498596191 + ], + [ + "▁earphone", + -14.665104866027832 + ], + [ + "halten", + -14.665217399597168 + ], + [ + "bole", + -14.665278434753418 + ], + [ + "▁overhauling", + -14.66529655456543 + ], + [ + "WHEN", + -14.665328025817873 + ], + [ + "brid", + -14.665337562561035 + ], + [ + "roop", + -14.665366172790527 + ], + [ + "rii", + -14.66537857055664 + ], + [ + "▁conch", + -14.66539192199707 + ], + [ + "Amato", + -14.66547679901123 + ], + [ + "ISTS", + -14.665505409240724 + ], + [ + "lves", + -14.665581703186035 + ], + [ + "▁Mangal", + -14.665793418884276 + ], + [ + "▁Kenton", + -14.66580581665039 + ], + [ + "▁Collet", + -14.665810585021973 + ], + [ + "▁Bho", + -14.665874481201172 + ], + [ + "▁Moby", + -14.665955543518066 + ], + [ + "2:19", + -14.666017532348633 + ], + [ + "▁preempt", + -14.666096687316896 + ], + [ + "▁Lucid", + -14.666114807128906 + ], + [ + "BBQ", + -14.666157722473145 + ], + [ + "donna", + -14.666179656982422 + ], + [ + "057", + -14.666190147399902 + ], + [ + "▁Speck", + -14.666244506835938 + ], + [ + "▁3-8", + -14.666252136230469 + ], + [ + "▁Beren", + -14.666301727294922 + ], + [ + "ostatic", + -14.666322708129885 + ], + [ + "▁wonton", + -14.666341781616213 + ], + [ + "▁leaped", + -14.666403770446776 + ], + [ + "czak", + -14.666460990905762 + ], + [ + "▁whitetail", + -14.666462898254396 + ], + [ + "▁pestle", + -14.666479110717772 + ], + [ + "Sometime", + -14.666550636291504 + ], + [ + "▁Mailbox", + -14.666625022888184 + ], + [ + "▁NYE", + -14.666666984558104 + ], + [ + "▁Kae", + -14.666711807250977 + ], + [ + "▁indiscriminate", + -14.666762351989746 + ], + [ + "nati", + -14.66678237915039 + ], + [ + "▁comprehending", + -14.666919708251951 + ], + [ + "▁Filtering", + -14.666955947875977 + ], + [ + "bura", + -14.666988372802734 + ], + [ + "oddy", + -14.667021751403809 + ], + [ + "▁sensitively", + -14.667078971862791 + ], + [ + "ritu", + -14.667203903198242 + ], + [ + "▁.45", + -14.667256355285645 + ], + [ + "1954", + -14.66726303100586 + ], + [ + "▁phthalates", + -14.667306900024414 + ], + [ + "▁yachting", + -14.667319297790527 + ], + [ + "▁dynamical", + -14.667445182800291 + ], + [ + "▁Abid", + -14.667550086975098 + ], + [ + "▁Neha", + -14.667672157287598 + ], + [ + "▁Metcalf", + -14.667736053466797 + ], + [ + "▁Fiddle", + -14.66778564453125 + ], + [ + "▁Tada", + -14.66782283782959 + ], + [ + "▁EDR", + -14.667824745178224 + ], + [ + "▁888-5", + -14.66783046722412 + ], + [ + "▁MIMO", + -14.667832374572754 + ], + [ + "UAT", + -14.667926788330078 + ], + [ + "▁sciatic", + -14.667997360229492 + ], + [ + "▁GLC", + -14.668012619018556 + ], + [ + "▁Appalachia", + -14.668063163757324 + ], + [ + "MART", + -14.66809368133545 + ], + [ + "kaki", + -14.668095588684082 + ], + [ + "closure", + -14.6681489944458 + ], + [ + "divorce", + -14.668212890625 + ], + [ + "spyware", + -14.668213844299316 + ], + [ + "Grease", + -14.668214797973633 + ], + [ + "removal", + -14.668219566345217 + ], + [ + "Decorate", + -14.668220520019531 + ], + [ + "Miracle", + -14.668222427368164 + ], + [ + "Mouth", + -14.668222427368164 + ], + [ + "Milwaukee", + -14.668227195739746 + ], + [ + "trouble", + -14.66822910308838 + ], + [ + "deferred", + -14.668258666992188 + ], + [ + "Entity", + -14.66826057434082 + ], + [ + "jewelry", + -14.668275833129885 + ], + [ + "Bailey", + -14.668291091918944 + ], + [ + "Disable", + -14.668291091918944 + ], + [ + "entz", + -14.668353080749512 + ], + [ + "▁Alcatraz", + -14.66840934753418 + ], + [ + "▁BETWEEN", + -14.66840934753418 + ], + [ + "▁Chipmunk", + -14.66840934753418 + ], + [ + "▁Iyengar", + -14.66840934753418 + ], + [ + "▁Lanzarote", + -14.66840934753418 + ], + [ + "▁aggregating", + -14.66840934753418 + ], + [ + "▁circumstantial", + -14.66840934753418 + ], + [ + "▁confiscation", + -14.66840934753418 + ], + [ + "▁cornucopia", + -14.66840934753418 + ], + [ + "▁cosmetology", + -14.66840934753418 + ], + [ + "▁depleting", + -14.66840934753418 + ], + [ + "▁derailleur", + -14.66840934753418 + ], + [ + "▁emollient", + -14.66840934753418 + ], + [ + "▁february", + -14.66840934753418 + ], + [ + "▁gruelling", + -14.66840934753418 + ], + [ + "▁plasmid", + -14.66840934753418 + ], + [ + "▁reciprocating", + -14.66840934753418 + ], + [ + "▁serendipity", + -14.66840934753418 + ], + [ + "▁tantamount", + -14.66840934753418 + ], + [ + "▁unreachable", + -14.66840934753418 + ], + [ + "▁wheelbarrow", + -14.66840934753418 + ], + [ + "▁wheezing", + -14.66840934753418 + ], + [ + "▁McFadden", + -14.668410301208496 + ], + [ + "▁Phoenicia", + -14.668410301208496 + ], + [ + "▁Catharine", + -14.668412208557127 + ], + [ + "FACTORY", + -14.668413162231444 + ], + [ + "▁Mahjong", + -14.668413162231444 + ], + [ + "▁Presidio", + -14.668413162231444 + ], + [ + "▁handcuffs", + -14.668413162231444 + ], + [ + "▁Eskimo", + -14.668416976928713 + ], + [ + "▁eulogy", + -14.668421745300291 + ], + [ + "▁MAJOR", + -14.66842555999756 + ], + [ + "▁SWEET", + -14.668428421020508 + ], + [ + "▁Maasai", + -14.668429374694824 + ], + [ + "Conveniently", + -14.668431282043455 + ], + [ + "▁INDIRECT", + -14.668434143066406 + ], + [ + "▁contraband", + -14.668435096740724 + ], + [ + "▁Ravelry", + -14.668445587158203 + ], + [ + "▁presumptive", + -14.668445587158203 + ], + [ + "▁Federated", + -14.668447494506836 + ], + [ + "▁censure", + -14.668453216552734 + ], + [ + "▁Kippur", + -14.668456077575684 + ], + [ + "10.5", + -14.66845703125 + ], + [ + "▁IndyCar", + -14.66845703125 + ], + [ + "▁palmetto", + -14.668468475341797 + ], + [ + "▁Estelle", + -14.668472290039062 + ], + [ + "▁hatchery", + -14.668474197387695 + ], + [ + "▁Propulsion", + -14.668475151062012 + ], + [ + "Emphasiz", + -14.668477058410645 + ], + [ + "▁Amrita", + -14.668481826782228 + ], + [ + "▁desecrat", + -14.668484687805176 + ], + [ + "▁psychometric", + -14.668497085571287 + ], + [ + "Globe", + -14.668499946594238 + ], + [ + "Heroes", + -14.668503761291504 + ], + [ + "▁Mancini", + -14.66850471496582 + ], + [ + "▁Admiralty", + -14.66851043701172 + ], + [ + "▁MUGA", + -14.66851043701172 + ], + [ + "▁INCLUDES", + -14.668511390686035 + ], + [ + "▁Penske", + -14.668511390686035 + ], + [ + "Meyer", + -14.66851806640625 + ], + [ + "▁Washburn", + -14.668524742126465 + ], + [ + "▁revolutionaries", + -14.66852855682373 + ], + [ + "▁Billiard", + -14.668533325195312 + ], + [ + "▁dishwashing", + -14.668537139892578 + ], + [ + "▁Billionaire", + -14.66854190826416 + ], + [ + "ADVANCE", + -14.668551445007324 + ], + [ + "▁Intranet", + -14.66855239868164 + ], + [ + "abundance", + -14.668569564819336 + ], + [ + "▁Bahia", + -14.668586730957031 + ], + [ + "▁Twinkle", + -14.668594360351562 + ], + [ + "▁Greenbelt", + -14.668625831604004 + ], + [ + "enco", + -14.668636322021484 + ], + [ + "▁mirage", + -14.668645858764648 + ], + [ + "▁10.10", + -14.66864776611328 + ], + [ + "▁Langford", + -14.668663024902344 + ], + [ + "▁ostrac", + -14.668668746948242 + ], + [ + "Avi", + -14.668684005737305 + ], + [ + "msn", + -14.668686866760254 + ], + [ + "▁Bellamy", + -14.668726921081545 + ], + [ + "▁trachea", + -14.668745994567873 + ], + [ + "▁reenactment", + -14.66877269744873 + ], + [ + "▁UKIP", + -14.66877555847168 + ], + [ + "▁crossbow", + -14.66877555847168 + ], + [ + "▁Opry", + -14.668779373168944 + ], + [ + "▁buckling", + -14.668797492980955 + ], + [ + "▁oddities", + -14.668816566467283 + ], + [ + "▁Burrito", + -14.668817520141602 + ], + [ + "▁zipping", + -14.668818473815918 + ], + [ + "▁ROSE", + -14.66882038116455 + ], + [ + "Estimated", + -14.6688232421875 + ], + [ + "▁uae", + -14.668848037719728 + ], + [ + "▁UTV", + -14.668895721435549 + ], + [ + "▁enumerated", + -14.668911933898926 + ], + [ + "ensky", + -14.66891860961914 + ], + [ + "Helpful", + -14.668928146362305 + ], + [ + "glaze", + -14.668964385986328 + ], + [ + "▁Yeovil", + -14.66896629333496 + ], + [ + "▁Hillsdale", + -14.668994903564451 + ], + [ + "InterContinental", + -14.668997764587402 + ], + [ + "▁Anson", + -14.6690092086792 + ], + [ + "LCA", + -14.669021606445312 + ], + [ + "▁alumna", + -14.669034957885742 + ], + [ + "Widespread", + -14.66904067993164 + ], + [ + "▁antisocial", + -14.669079780578612 + ], + [ + "▁subjectivity", + -14.669089317321776 + ], + [ + "▁governorship", + -14.669096946716309 + ], + [ + "▁yawning", + -14.669127464294434 + ], + [ + "prog", + -14.669134140014648 + ], + [ + "SOP", + -14.66915798187256 + ], + [ + "▁ITEMS", + -14.669177055358888 + ], + [ + "▁wrenches", + -14.66920566558838 + ], + [ + "zur", + -14.669232368469238 + ], + [ + "▁oddball", + -14.669243812561035 + ], + [ + "▁befriended", + -14.669257164001465 + ], + [ + "Dissolve", + -14.669265747070312 + ], + [ + "iyar", + -14.669281959533691 + ], + [ + "itel", + -14.669318199157717 + ], + [ + "throughout", + -14.66932201385498 + ], + [ + "▁demolishing", + -14.669323921203612 + ], + [ + "▁permeated", + -14.669326782226562 + ], + [ + "▁metabolize", + -14.66933536529541 + ], + [ + "lifestyle", + -14.669355392456056 + ], + [ + "▁Brice", + -14.66938018798828 + ], + [ + "▁Slit", + -14.669398307800291 + ], + [ + "▁SAV", + -14.669404029846191 + ], + [ + "▁dismisses", + -14.669479370117188 + ], + [ + "HMRC", + -14.669483184814451 + ], + [ + "Seminar", + -14.669493675231934 + ], + [ + "▁Carport", + -14.669528007507324 + ], + [ + "▁Golem", + -14.669561386108398 + ], + [ + "▁tightest", + -14.669577598571776 + ], + [ + "▁ESET", + -14.669591903686523 + ], + [ + "▁harmonized", + -14.669636726379396 + ], + [ + "▁nitpick", + -14.66967487335205 + ], + [ + "▁(160", + -14.669690132141112 + ], + [ + "nari", + -14.669723510742188 + ], + [ + "haar", + -14.669776916503906 + ], + [ + "▁Rupp", + -14.669777870178224 + ], + [ + "▁Millet", + -14.669800758361816 + ], + [ + "▁lengthened", + -14.669801712036133 + ], + [ + "▁Hollande", + -14.669857025146484 + ], + [ + "▁18-24", + -14.6698579788208 + ], + [ + "▁RTX", + -14.66994857788086 + ], + [ + "▁churned", + -14.6699800491333 + ], + [ + "▁#22", + -14.669987678527832 + ], + [ + "▁modus", + -14.670002937316896 + ], + [ + "▁$2.9", + -14.670013427734377 + ], + [ + "▁Theories", + -14.670036315917969 + ], + [ + "patrick", + -14.670050621032717 + ], + [ + "▁Rubik", + -14.670061111450195 + ], + [ + "milie", + -14.67011547088623 + ], + [ + "▁Manna", + -14.67017650604248 + ], + [ + "▁Lapland", + -14.670207023620604 + ], + [ + "7.8%", + -14.670220375061035 + ], + [ + "frank", + -14.67022705078125 + ], + [ + "▁impound", + -14.670269012451172 + ], + [ + "▁contractually", + -14.67029094696045 + ], + [ + "▁frisk", + -14.67030429840088 + ], + [ + "▁emanates", + -14.670312881469728 + ], + [ + "▁suave", + -14.67035675048828 + ], + [ + "RIAN", + -14.670382499694824 + ], + [ + "▁Tamarind", + -14.670434951782228 + ], + [ + "wreck", + -14.67050075531006 + ], + [ + "▁savoring", + -14.670503616333008 + ], + [ + "urgent", + -14.670637130737305 + ], + [ + "Household", + -14.670721054077148 + ], + [ + "▁DJing", + -14.670724868774414 + ], + [ + "Clare", + -14.670825958251951 + ], + [ + "Folder", + -14.670831680297852 + ], + [ + "▁overactive", + -14.67084503173828 + ], + [ + "▁tendering", + -14.67084789276123 + ], + [ + "▁Playbook", + -14.670862197875977 + ], + [ + "▁Melting", + -14.670942306518556 + ], + [ + "▁MLP", + -14.670944213867188 + ], + [ + "▁Whitewater", + -14.670964241027832 + ], + [ + "CPI", + -14.670974731445312 + ], + [ + "▁stewed", + -14.671006202697754 + ], + [ + "▁Crock", + -14.671021461486816 + ], + [ + "▁avg", + -14.671082496643066 + ], + [ + "8.6%", + -14.67119026184082 + ], + [ + "▁Deming", + -14.671194076538086 + ], + [ + "designing", + -14.671202659606934 + ], + [ + "zab", + -14.671281814575195 + ], + [ + "▁unraveling", + -14.67131519317627 + ], + [ + "▁remixed", + -14.671364784240724 + ], + [ + "▁Vald", + -14.671381950378418 + ], + [ + "▁Dhu", + -14.671393394470217 + ], + [ + "▁ASR", + -14.671429634094238 + ], + [ + "▁Nobu", + -14.67147731781006 + ], + [ + "▁Postage", + -14.671550750732422 + ], + [ + "▁PVA", + -14.671567916870115 + ], + [ + "▁pushback", + -14.67160701751709 + ], + [ + "▁perches", + -14.67162036895752 + ], + [ + "Maha", + -14.671648979187012 + ], + [ + "▁hashes", + -14.67165184020996 + ], + [ + "trench", + -14.671659469604492 + ], + [ + "heroes", + -14.671700477600098 + ], + [ + "Timing", + -14.671764373779297 + ], + [ + "▁Brno", + -14.671829223632812 + ], + [ + "uniform", + -14.671874046325684 + ], + [ + "1850", + -14.671947479248049 + ], + [ + "▁pune", + -14.67198371887207 + ], + [ + "entering", + -14.672001838684082 + ], + [ + "▁Retin", + -14.672063827514648 + ], + [ + "oggi", + -14.672138214111328 + ], + [ + "▁ilk", + -14.672245979309082 + ], + [ + "▁Binh", + -14.672333717346191 + ], + [ + "▁brokered", + -14.672371864318848 + ], + [ + "▁GND", + -14.672396659851074 + ], + [ + "▁airplay", + -14.672438621520996 + ], + [ + "▁Nec", + -14.672520637512209 + ], + [ + "▁Daz", + -14.672532081604004 + ], + [ + "▁Kazi", + -14.672537803649902 + ], + [ + "Optionally", + -14.672587394714355 + ], + [ + "▁MSME", + -14.672624588012695 + ], + [ + "▁Zaha", + -14.672627449035645 + ], + [ + "▁SHEL", + -14.67263126373291 + ], + [ + "▁Emmet", + -14.672640800476074 + ], + [ + "Rabbi", + -14.672769546508787 + ], + [ + "▁silic", + -14.672784805297852 + ], + [ + "▁Kolb", + -14.67282485961914 + ], + [ + "▁Kingman", + -14.67283535003662 + ], + [ + "▁Bowles", + -14.67297077178955 + ], + [ + "anova", + -14.673049926757812 + ], + [ + "WITH", + -14.673067092895508 + ], + [ + "▁Foothill", + -14.673072814941406 + ], + [ + "▁libro", + -14.673145294189451 + ], + [ + "▁Goodall", + -14.6731538772583 + ], + [ + "7.6%", + -14.673317909240724 + ], + [ + "▁70.3", + -14.673365592956545 + ], + [ + "▁CGL", + -14.673382759094238 + ], + [ + "▁chatroom", + -14.673394203186035 + ], + [ + "WEN", + -14.673477172851562 + ], + [ + "▁idealized", + -14.673523902893066 + ], + [ + "▁lapsed", + -14.673700332641602 + ], + [ + "▁Hairstyle", + -14.673717498779297 + ], + [ + "alaba", + -14.673739433288574 + ], + [ + "▁jut", + -14.673805236816406 + ], + [ + "4-14", + -14.67385196685791 + ], + [ + "▁stonework", + -14.673925399780272 + ], + [ + "▁Seren", + -14.674032211303713 + ], + [ + "gentle", + -14.67404556274414 + ], + [ + "bois", + -14.674155235290527 + ], + [ + "▁clamor", + -14.67430305480957 + ], + [ + "Pty", + -14.674355506896973 + ], + [ + "▁Univer", + -14.674442291259766 + ], + [ + "1.02", + -14.674504280090332 + ], + [ + "▁wii", + -14.674525260925291 + ], + [ + "LNG", + -14.674554824829102 + ], + [ + "loot", + -14.67457103729248 + ], + [ + "Seasonal", + -14.67458724975586 + ], + [ + "Flop", + -14.67459201812744 + ], + [ + "BMD", + -14.67465591430664 + ], + [ + "opod", + -14.674711227416992 + ], + [ + "▁barricades", + -14.674736976623535 + ], + [ + "Donated", + -14.674744606018066 + ], + [ + "▁Takashi", + -14.674762725830078 + ], + [ + "Magnifi", + -14.674821853637695 + ], + [ + "▁tendered", + -14.674871444702148 + ], + [ + "Shall", + -14.674973487854004 + ], + [ + "hazardous", + -14.674975395202637 + ], + [ + "aig", + -14.674985885620115 + ], + [ + "▁Blackwood", + -14.675058364868164 + ], + [ + "wab", + -14.67508602142334 + ], + [ + "oggle", + -14.675240516662598 + ], + [ + "Debug", + -14.675559043884276 + ], + [ + "▁pulverize", + -14.6755952835083 + ], + [ + "▁uninstaller", + -14.675628662109377 + ], + [ + "Pond", + -14.675689697265623 + ], + [ + "1902", + -14.675710678100586 + ], + [ + "▁FLAT", + -14.67581558227539 + ], + [ + "▁plotter", + -14.675950050354004 + ], + [ + "▁categor", + -14.675992012023926 + ], + [ + "▁Keh", + -14.676002502441406 + ], + [ + "▁unsecure", + -14.676128387451172 + ], + [ + "FEST", + -14.676226615905762 + ], + [ + "▁unsurprising", + -14.676231384277344 + ], + [ + "Serious", + -14.676254272460938 + ], + [ + "trodden", + -14.676257133483888 + ], + [ + "showing", + -14.67625904083252 + ], + [ + "▁Marant", + -14.676287651062012 + ], + [ + "ethical", + -14.676311492919922 + ], + [ + "▁Hutchins", + -14.676346778869627 + ], + [ + "▁Prisma", + -14.67635726928711 + ], + [ + "furnished", + -14.676424026489258 + ], + [ + "temporary", + -14.676433563232422 + ], + [ + "Amendment", + -14.676434516906738 + ], + [ + "Champaign", + -14.676437377929688 + ], + [ + "Patriot", + -14.676470756530762 + ], + [ + "Centrifugal", + -14.676473617553713 + ], + [ + "▁Astrologer", + -14.676473617553713 + ], + [ + "▁CERTIFIED", + -14.676473617553713 + ], + [ + "▁Chippewa", + -14.676473617553713 + ], + [ + "▁Congolese", + -14.676473617553713 + ], + [ + "▁Dolomites", + -14.676473617553713 + ], + [ + "▁Hidalgo", + -14.676473617553713 + ], + [ + "▁Ignacio", + -14.676473617553713 + ], + [ + "▁LICENSE", + -14.676473617553713 + ], + [ + "▁Lansdowne", + -14.676473617553713 + ], + [ + "▁Obedience", + -14.676473617553713 + ], + [ + "▁Punishment", + -14.676473617553713 + ], + [ + "▁QWERTY", + -14.676473617553713 + ], + [ + "▁Rawalpindi", + -14.676473617553713 + ], + [ + "▁Tamilnadu", + -14.676473617553713 + ], + [ + "▁causative", + -14.676473617553713 + ], + [ + "▁decommissioning", + -14.676473617553713 + ], + [ + "▁dehydrator", + -14.676473617553713 + ], + [ + "▁demonstrable", + -14.676473617553713 + ], + [ + "▁disingenuous", + -14.676473617553713 + ], + [ + "▁fiddly", + -14.676473617553713 + ], + [ + "▁oxycodone", + -14.676473617553713 + ], + [ + "▁prophylaxis", + -14.676473617553713 + ], + [ + "▁scrupulous", + -14.676473617553713 + ], + [ + "▁watercress", + -14.676473617553713 + ], + [ + "▁Wallingford", + -14.676474571228027 + ], + [ + "▁flecks", + -14.676474571228027 + ], + [ + "▁untamed", + -14.676474571228027 + ], + [ + "▁Zwingle", + -14.676475524902344 + ], + [ + "▁Acceleration", + -14.67647647857666 + ], + [ + "▁sucrose", + -14.67647933959961 + ], + [ + "▁Stormtrooper", + -14.676481246948242 + ], + [ + "▁Mailchimp", + -14.676484107971191 + ], + [ + "Malcolm", + -14.676486015319824 + ], + [ + "▁dahlia", + -14.676486015319824 + ], + [ + "▁Cockburn", + -14.676488876342772 + ], + [ + "cysteine", + -14.67648983001709 + ], + [ + "▁mechanistic", + -14.67648983001709 + ], + [ + "Imperial", + -14.676491737365724 + ], + [ + "dependency", + -14.67649269104004 + ], + [ + "▁QUESTIONS", + -14.67649269104004 + ], + [ + "Boeing", + -14.676495552062988 + ], + [ + "Requirements", + -14.676499366760254 + ], + [ + "▁Osbourne", + -14.67650032043457 + ], + [ + "▁adherents", + -14.676502227783203 + ], + [ + "▁Dunlap", + -14.676507949829102 + ], + [ + "suffering", + -14.67651081085205 + ], + [ + "▁Countess", + -14.676521301269531 + ], + [ + "▁Laughlin", + -14.67652416229248 + ], + [ + "▁dislocated", + -14.676526069641112 + ], + [ + "Gravity", + -14.67652988433838 + ], + [ + "▁fibreglass", + -14.676533699035645 + ], + [ + "▁Evoque", + -14.676535606384276 + ], + [ + "▁parched", + -14.676554679870604 + ], + [ + "diameter", + -14.676557540893556 + ], + [ + "▁Jamison", + -14.676558494567873 + ], + [ + "▁savannah", + -14.676559448242188 + ], + [ + "▁KOYO", + -14.676573753356934 + ], + [ + "Ahmad", + -14.676578521728516 + ], + [ + "▁Gaulle", + -14.676581382751465 + ], + [ + "▁guardrail", + -14.67660140991211 + ], + [ + "▁1993)", + -14.676606178283691 + ], + [ + "▁scrubbed", + -14.676607131958008 + ], + [ + "Wildlife", + -14.676612854003906 + ], + [ + "▁Najib", + -14.676612854003906 + ], + [ + "▁Stingray", + -14.676612854003906 + ], + [ + "▁Celts", + -14.676617622375488 + ], + [ + "▁suds", + -14.67661952972412 + ], + [ + "▁Konami", + -14.676623344421388 + ], + [ + "▁Hardwick", + -14.676650047302246 + ], + [ + "▁Warmer", + -14.676663398742676 + ], + [ + "Radar", + -14.676674842834473 + ], + [ + "▁Fluffy", + -14.676681518554688 + ], + [ + "CLOUD", + -14.67668628692627 + ], + [ + "▁Lowcountry", + -14.676697731018066 + ], + [ + "▁bugged", + -14.676698684692385 + ], + [ + "▁hairspray", + -14.676714897155762 + ], + [ + "VIDEO", + -14.676715850830078 + ], + [ + "▁JBoss", + -14.676728248596191 + ], + [ + "▁Lovato", + -14.676734924316406 + ], + [ + "▁Paver", + -14.676738739013672 + ], + [ + "Comedy", + -14.676755905151367 + ], + [ + "absorption", + -14.6767578125 + ], + [ + "▁turnstile", + -14.676765441894531 + ], + [ + "▁BASF", + -14.67676830291748 + ], + [ + "▁Jerky", + -14.676774978637695 + ], + [ + "▁LePage", + -14.676777839660645 + ], + [ + "▁supernova", + -14.676801681518556 + ], + [ + "igar", + -14.676813125610352 + ], + [ + "▁Malden", + -14.67681884765625 + ], + [ + "decoration", + -14.676824569702148 + ], + [ + "▁inflationary", + -14.676868438720703 + ], + [ + "Knock", + -14.67686939239502 + ], + [ + "Jac", + -14.676887512207031 + ], + [ + "▁vanishes", + -14.676892280578612 + ], + [ + "Cluster", + -14.676895141601562 + ], + [ + "▁Tulum", + -14.67690372467041 + ], + [ + "▁Williston", + -14.676912307739258 + ], + [ + "▁incubated", + -14.676929473876951 + ], + [ + "▁Shipley", + -14.676939964294434 + ], + [ + "▁Profitable", + -14.67695140838623 + ], + [ + "▁obstetrician", + -14.676960945129396 + ], + [ + "▁Gabriella", + -14.676963806152344 + ], + [ + "▁Centra", + -14.676968574523926 + ], + [ + "▁Kinney", + -14.676993370056152 + ], + [ + "Alter", + -14.677011489868164 + ], + [ + "▁Mudd", + -14.677051544189451 + ], + [ + "resolved", + -14.677074432373049 + ], + [ + "▁sheltering", + -14.677081108093262 + ], + [ + "SHARP", + -14.677131652832031 + ], + [ + "▁Coyle", + -14.677132606506348 + ], + [ + "Crash", + -14.677172660827637 + ], + [ + "AJA", + -14.677217483520508 + ], + [ + "grease", + -14.677241325378418 + ], + [ + "▁RMIT", + -14.67724609375 + ], + [ + "crypto", + -14.67728328704834 + ], + [ + "▁12:01", + -14.6773042678833 + ], + [ + "▁smelting", + -14.677312850952148 + ], + [ + "▁Fiddler", + -14.677350997924805 + ], + [ + "▁Couches", + -14.67735767364502 + ], + [ + "▁Lanier", + -14.67740249633789 + ], + [ + "▁Claimant", + -14.677408218383787 + ], + [ + "▁$1,4", + -14.677413940429688 + ], + [ + "detailed", + -14.67750358581543 + ], + [ + "4.8%", + -14.6775484085083 + ], + [ + "Liam", + -14.67755889892578 + ], + [ + "Nap", + -14.677590370178224 + ], + [ + "▁Calam", + -14.677595138549805 + ], + [ + "▁Rucker", + -14.67759609222412 + ], + [ + "DEO", + -14.677600860595703 + ], + [ + "-0-0", + -14.677611351013184 + ], + [ + "▁liquidated", + -14.67761516571045 + ], + [ + "preservation", + -14.677618026733398 + ], + [ + "▁epidemiological", + -14.67762851715088 + ], + [ + "classroom", + -14.677694320678713 + ], + [ + "▁Zou", + -14.677757263183594 + ], + [ + "▁Gust", + -14.677789688110352 + ], + [ + "Serge", + -14.677806854248049 + ], + [ + "▁DKK", + -14.677830696105955 + ], + [ + "▁unfilled", + -14.677851676940918 + ], + [ + "Dock", + -14.677915573120115 + ], + [ + "▁deviated", + -14.677939414978027 + ], + [ + "▁Hamburger", + -14.67798900604248 + ], + [ + "▁Samar", + -14.678110122680664 + ], + [ + "▁encrypting", + -14.678114891052246 + ], + [ + "▁stinking", + -14.678123474121094 + ], + [ + "▁Mauro", + -14.678176879882812 + ], + [ + "▁Trackback", + -14.678337097167969 + ], + [ + "▁Shanti", + -14.678339004516602 + ], + [ + "desc", + -14.678448677062988 + ], + [ + "llusion", + -14.678470611572266 + ], + [ + "▁penning", + -14.678536415100098 + ], + [ + "Swim", + -14.678545951843262 + ], + [ + "▁Nido", + -14.678589820861816 + ], + [ + "gef", + -14.678661346435549 + ], + [ + "THS", + -14.678668022155762 + ], + [ + "▁scro", + -14.678711891174316 + ], + [ + "URU", + -14.678778648376465 + ], + [ + "▁Sault", + -14.67880153656006 + ], + [ + "▁formulae", + -14.67886447906494 + ], + [ + "▁faze", + -14.67886734008789 + ], + [ + "german", + -14.678889274597168 + ], + [ + "▁Stahl", + -14.678945541381836 + ], + [ + "▁Galan", + -14.678995132446287 + ], + [ + "▁Dhara", + -14.679020881652832 + ], + [ + "▁Vishwa", + -14.679022789001465 + ], + [ + "FES", + -14.679112434387209 + ], + [ + "unlock", + -14.679150581359863 + ], + [ + "Mogul", + -14.679153442382812 + ], + [ + "▁Soni", + -14.67919921875 + ], + [ + "▁darned", + -14.679205894470217 + ], + [ + "▁panelled", + -14.67928981781006 + ], + [ + "▁Nada", + -14.679344177246094 + ], + [ + "▁(#4", + -14.67938232421875 + ], + [ + "▁tallying", + -14.679594993591309 + ], + [ + "▁£90", + -14.67959976196289 + ], + [ + "▁romanticism", + -14.67966651916504 + ], + [ + "▁Mitra", + -14.67972469329834 + ], + [ + "viii", + -14.67984676361084 + ], + [ + "adze", + -14.679886817932127 + ], + [ + "SDS", + -14.679889678955078 + ], + [ + "servant", + -14.679905891418455 + ], + [ + "iven", + -14.679931640625 + ], + [ + "psycho", + -14.67996597290039 + ], + [ + "▁Daycare", + -14.68000316619873 + ], + [ + "Lok", + -14.680106163024902 + ], + [ + "▁Nader", + -14.68012809753418 + ], + [ + "▁PBL", + -14.680185317993164 + ], + [ + "anzi", + -14.680205345153809 + ], + [ + "▁stil", + -14.680221557617188 + ], + [ + "▁80/20", + -14.680242538452148 + ], + [ + "oggy", + -14.68038845062256 + ], + [ + "▁Banksy", + -14.680548667907717 + ], + [ + "▁Internally", + -14.680566787719728 + ], + [ + "••", + -14.68069839477539 + ], + [ + "▁Heroic", + -14.680737495422363 + ], + [ + "▁enameled", + -14.680801391601562 + ], + [ + "moji", + -14.680830955505373 + ], + [ + "▁Litho", + -14.68084716796875 + ], + [ + "▁Oncol", + -14.68096160888672 + ], + [ + "Shut", + -14.680967330932615 + ], + [ + "▁jitter", + -14.680999755859377 + ], + [ + "▁candida", + -14.681078910827637 + ], + [ + "pyri", + -14.681133270263672 + ], + [ + "▁triglyceride", + -14.681147575378418 + ], + [ + "▁Umar", + -14.68117332458496 + ], + [ + "▁Gump", + -14.68125057220459 + ], + [ + "▁spon", + -14.681289672851562 + ], + [ + "▁1775", + -14.681324005126951 + ], + [ + "▁JIS", + -14.681371688842772 + ], + [ + "▁$61", + -14.681520462036133 + ], + [ + "assemble", + -14.681550979614258 + ], + [ + "pressing", + -14.681605339050291 + ], + [ + "MEMBERS", + -14.681666374206545 + ], + [ + "▁ARV", + -14.68170928955078 + ], + [ + "▁policymaking", + -14.681748390197754 + ], + [ + "▁BETA", + -14.68187141418457 + ], + [ + "▁topographical", + -14.681918144226074 + ], + [ + "9.9%", + -14.681933403015137 + ], + [ + "Quant", + -14.681992530822754 + ], + [ + "▁gazed", + -14.68202304840088 + ], + [ + "▁AUDI", + -14.682086944580078 + ], + [ + "▁Valent", + -14.68216323852539 + ], + [ + "0.80", + -14.682191848754885 + ], + [ + "▁comme", + -14.682194709777832 + ], + [ + "acted", + -14.682238578796388 + ], + [ + "▁Mastermind", + -14.682271003723145 + ], + [ + "tagged", + -14.682280540466309 + ], + [ + "▁Ural", + -14.68228816986084 + ], + [ + "▁tubers", + -14.682297706604004 + ], + [ + "glie", + -14.68230438232422 + ], + [ + "▁dropbox", + -14.682397842407228 + ], + [ + "GOOD", + -14.682490348815918 + ], + [ + "▁punter", + -14.68253231048584 + ], + [ + "XV", + -14.682573318481444 + ], + [ + "MATE", + -14.68259048461914 + ], + [ + "▁WISH", + -14.682632446289062 + ], + [ + "africa", + -14.68270206451416 + ], + [ + "▁surrealist", + -14.682710647583008 + ], + [ + "▁WPS", + -14.682791709899902 + ], + [ + "▁MOA", + -14.682916641235352 + ], + [ + "▁warship", + -14.683096885681152 + ], + [ + "ntergovernmental", + -14.68313980102539 + ], + [ + "▁socialized", + -14.683274269104004 + ], + [ + "Fay", + -14.683342933654783 + ], + [ + "▁0161", + -14.683389663696287 + ], + [ + "5-17", + -14.683481216430664 + ], + [ + "joe", + -14.683514595031738 + ], + [ + "aqi", + -14.683524131774902 + ], + [ + "▁£80", + -14.683592796325684 + ], + [ + "▁ballooning", + -14.683634757995604 + ], + [ + "locating", + -14.683667182922363 + ], + [ + "hertz", + -14.683669090270996 + ], + [ + "leven", + -14.683783531188965 + ], + [ + "▁Suez", + -14.683813095092772 + ], + [ + "▁COCO", + -14.683837890625 + ], + [ + "▁scavenge", + -14.683849334716797 + ], + [ + "▁$499", + -14.683880805969238 + ], + [ + "▁encroach", + -14.683969497680664 + ], + [ + "▁submerge", + -14.684072494506836 + ], + [ + "▁conspire", + -14.68408203125 + ], + [ + "▁formulaic", + -14.68411922454834 + ], + [ + "▁glisten", + -14.684165000915527 + ], + [ + "▁Randi", + -14.68416690826416 + ], + [ + "fisher", + -14.684171676635742 + ], + [ + "▁croquet", + -14.684196472167969 + ], + [ + "▁reprimand", + -14.684205055236816 + ], + [ + "▁queued", + -14.684212684631348 + ], + [ + "Verb", + -14.684310913085938 + ], + [ + "▁respirator", + -14.68434238433838 + ], + [ + "▁coincidental", + -14.684412956237791 + ], + [ + "CORRECT", + -14.684455871582031 + ], + [ + "oxetine", + -14.684544563293455 + ], + [ + "5′", + -14.684600830078123 + ], + [ + "Flaherty", + -14.684603691101074 + ], + [ + "Proprietary", + -14.684603691101074 + ], + [ + "▁Endocrinology", + -14.684603691101074 + ], + [ + "▁Infirmary", + -14.684603691101074 + ], + [ + "▁Poseidon", + -14.684603691101074 + ], + [ + "▁Salamanca", + -14.684603691101074 + ], + [ + "▁Saraswati", + -14.684603691101074 + ], + [ + "▁dormitories", + -14.684603691101074 + ], + [ + "▁larynx", + -14.684603691101074 + ], + [ + "▁monopolies", + -14.684603691101074 + ], + [ + "▁paragliding", + -14.684603691101074 + ], + [ + "▁rehabilitative", + -14.684603691101074 + ], + [ + "▁scepticism", + -14.684603691101074 + ], + [ + "▁tabernacle", + -14.684603691101074 + ], + [ + "▁unaltered", + -14.684603691101074 + ], + [ + "▁welterweight", + -14.684603691101074 + ], + [ + "▁Geyser", + -14.68460464477539 + ], + [ + "▁miniscule", + -14.68460464477539 + ], + [ + "▁orthogonal", + -14.68460464477539 + ], + [ + "▁pedometer", + -14.68460464477539 + ], + [ + "▁phablet", + -14.68460464477539 + ], + [ + "▁BlueStacks", + -14.684605598449709 + ], + [ + "▁minimization", + -14.684605598449709 + ], + [ + "▁Moriarty", + -14.684606552124023 + ], + [ + "▁lorries", + -14.684606552124023 + ], + [ + "▁Leitrim", + -14.684608459472656 + ], + [ + "▁kansas", + -14.684608459472656 + ], + [ + "▁Carpentry", + -14.684609413146973 + ], + [ + "▁Northumbria", + -14.684609413146973 + ], + [ + "▁caldera", + -14.684610366821287 + ], + [ + "▁fortuitous", + -14.684618949890137 + ], + [ + "▁tussle", + -14.684619903564451 + ], + [ + "▁snicker", + -14.684622764587402 + ], + [ + "▁Gilead", + -14.684636116027832 + ], + [ + "▁geotechnical", + -14.684637069702148 + ], + [ + "▁zillion", + -14.684642791748049 + ], + [ + "▁pylon", + -14.68464469909668 + ], + [ + "▁Chainsaw", + -14.684645652770996 + ], + [ + "▁seduction", + -14.684645652770996 + ], + [ + "▁Tortilla", + -14.68465805053711 + ], + [ + "Accurate", + -14.684659004211426 + ], + [ + "▁Enclosed", + -14.684659957885742 + ], + [ + "▁Unauthorized", + -14.684666633605955 + ], + [ + "Tire", + -14.684674263000488 + ], + [ + "▁ExpressVPN", + -14.684677124023438 + ], + [ + "▁buggies", + -14.68467903137207 + ], + [ + "▁Supplementary", + -14.684684753417969 + ], + [ + "▁Primark", + -14.684685707092283 + ], + [ + "▁Zermatt", + -14.684709548950195 + ], + [ + "▁swindle", + -14.68471622467041 + ], + [ + "▁Lipscomb", + -14.684733390808104 + ], + [ + "▁Effie", + -14.684755325317385 + ], + [ + "Ephesians", + -14.68477725982666 + ], + [ + "▁sunbathe", + -14.68478012084961 + ], + [ + "▁Zucker", + -14.684781074523926 + ], + [ + "acrylic", + -14.684783935546877 + ], + [ + "Fifteen", + -14.684789657592772 + ], + [ + "Vegetable", + -14.68479347229004 + ], + [ + "Embrace", + -14.68479824066162 + ], + [ + "Advocate", + -14.684799194335938 + ], + [ + "fanatics", + -14.684807777404783 + ], + [ + "▁Hangover", + -14.684825897216797 + ], + [ + "▁Submitting", + -14.684831619262695 + ], + [ + "architect", + -14.684832572937012 + ], + [ + "swimming", + -14.68483829498291 + ], + [ + "sampling", + -14.684839248657228 + ], + [ + "▁zealot", + -14.684840202331545 + ], + [ + "▁Coldplay", + -14.684850692749023 + ], + [ + "Stanford", + -14.684866905212402 + ], + [ + "wholesale", + -14.684866905212402 + ], + [ + "▁Saracen", + -14.684885025024414 + ], + [ + "▁minibuses", + -14.684901237487791 + ], + [ + "8888", + -14.684906005859377 + ], + [ + "Mention", + -14.684921264648438 + ], + [ + "▁Shao", + -14.684931755065918 + ], + [ + "viz", + -14.68494987487793 + ], + [ + "▁9-1-1", + -14.68496036529541 + ], + [ + "Qatar", + -14.684986114501951 + ], + [ + "▁topological", + -14.68498706817627 + ], + [ + "▁Rafting", + -14.685002326965332 + ], + [ + "ина", + -14.685009002685549 + ], + [ + "▁Nek", + -14.685043334960938 + ], + [ + "▁Marsden", + -14.68505573272705 + ], + [ + "▁Hughesnet", + -14.685065269470217 + ], + [ + "▁Arrested", + -14.685093879699709 + ], + [ + "▁Doreen", + -14.685128211975098 + ], + [ + "dimethyl", + -14.68517017364502 + ], + [ + "▁edict", + -14.685200691223145 + ], + [ + "▁Prussian", + -14.685220718383787 + ], + [ + "▁tinder", + -14.685225486755373 + ], + [ + "edema", + -14.68529224395752 + ], + [ + "straat", + -14.68529224395752 + ], + [ + "▁Aguero", + -14.685347557067873 + ], + [ + "Everywhere", + -14.6853609085083 + ], + [ + "▁squatter", + -14.685441970825195 + ], + [ + "▁Pollen", + -14.685443878173828 + ], + [ + "▁Modena", + -14.685466766357422 + ], + [ + "▁Gomes", + -14.685480117797852 + ], + [ + "▁queso", + -14.685503005981444 + ], + [ + "▁Chopper", + -14.685527801513672 + ], + [ + "Franco", + -14.685544967651367 + ], + [ + "crisp", + -14.685544967651367 + ], + [ + "▁sass", + -14.68554973602295 + ], + [ + "Organized", + -14.685575485229492 + ], + [ + "animated", + -14.685576438903809 + ], + [ + "passive", + -14.68558120727539 + ], + [ + "koro", + -14.685603141784668 + ], + [ + "▁frolic", + -14.6856107711792 + ], + [ + "▁forgettable", + -14.685663223266602 + ], + [ + "Incorporate", + -14.685680389404297 + ], + [ + "ZW", + -14.685711860656738 + ], + [ + "GAS", + -14.685724258422852 + ], + [ + "▁CFI", + -14.68573760986328 + ], + [ + "▁130,000", + -14.685746192932127 + ], + [ + "▁Retrofit", + -14.685772895812988 + ], + [ + "Gulf", + -14.685783386230469 + ], + [ + "Enquire", + -14.685798645019531 + ], + [ + "▁Cloak", + -14.685808181762695 + ], + [ + "velocity", + -14.685832023620604 + ], + [ + "Ridge", + -14.685872077941896 + ], + [ + "arum", + -14.685907363891602 + ], + [ + "▁LHC", + -14.685908317565918 + ], + [ + "4.9%", + -14.68592643737793 + ], + [ + "HVAC", + -14.68594741821289 + ], + [ + "homework", + -14.685993194580078 + ], + [ + "1917", + -14.686007499694824 + ], + [ + "Intentional", + -14.68603515625 + ], + [ + "isode", + -14.68604564666748 + ], + [ + "▁Prose", + -14.686068534851074 + ], + [ + "▁reiki", + -14.686073303222656 + ], + [ + "DHC", + -14.68617057800293 + ], + [ + "▁Asahi", + -14.68622875213623 + ], + [ + "UCLA", + -14.68625259399414 + ], + [ + "▁WBA", + -14.68631362915039 + ], + [ + "▁vermin", + -14.68632984161377 + ], + [ + "▁Adrien", + -14.686334609985352 + ], + [ + "▁shamanic", + -14.68643569946289 + ], + [ + "▁Bap", + -14.686469078063965 + ], + [ + "▁summoning", + -14.686545372009276 + ], + [ + "▁locksmithing", + -14.686579704284668 + ], + [ + "▁1805", + -14.686683654785156 + ], + [ + "▁Beaux", + -14.686814308166504 + ], + [ + "Playwright", + -14.686872482299805 + ], + [ + "NAV", + -14.686917304992676 + ], + [ + "XGA", + -14.686931610107422 + ], + [ + "▁quilters", + -14.686946868896484 + ], + [ + "▁PMO", + -14.686955451965332 + ], + [ + "upholstered", + -14.686993598937988 + ], + [ + "▁Waite", + -14.687006950378418 + ], + [ + "23%", + -14.687018394470217 + ], + [ + "Highlighting", + -14.68708324432373 + ], + [ + "▁convertor", + -14.687108039855955 + ], + [ + "Hull", + -14.687172889709473 + ], + [ + "Billing", + -14.687204360961914 + ], + [ + "▁internalize", + -14.687233924865724 + ], + [ + "Rosen", + -14.687288284301758 + ], + [ + "Faced", + -14.687442779541016 + ], + [ + "▁Mambo", + -14.68751621246338 + ], + [ + "AMBER", + -14.68752098083496 + ], + [ + "▁2/2", + -14.687541961669922 + ], + [ + "▁FWD", + -14.687588691711426 + ], + [ + "▁Dios", + -14.687686920166016 + ], + [ + "▁abstracted", + -14.687718391418455 + ], + [ + "▁SVR", + -14.687746047973633 + ], + [ + "reveal", + -14.688010215759276 + ], + [ + "▁meow", + -14.688047409057615 + ], + [ + "▁Swine", + -14.688067436218262 + ], + [ + "xxxx", + -14.688105583190918 + ], + [ + "▁Modal", + -14.688108444213867 + ], + [ + "Sgt", + -14.688124656677246 + ], + [ + "Sanders", + -14.68813705444336 + ], + [ + "▁Hades", + -14.688148498535156 + ], + [ + "VON", + -14.688226699829102 + ], + [ + "▁12:5", + -14.688262939453123 + ], + [ + "▁Incorrect", + -14.688395500183104 + ], + [ + "▁Mallet", + -14.688431739807127 + ], + [ + "Roth", + -14.688472747802734 + ], + [ + "Disruption", + -14.688535690307615 + ], + [ + "▁myelo", + -14.688726425170898 + ], + [ + "▁Caine", + -14.688756942749023 + ], + [ + "Yard", + -14.688798904418944 + ], + [ + "Estate", + -14.6888427734375 + ], + [ + "CFP", + -14.688852310180664 + ], + [ + "▁cabal", + -14.688879013061523 + ], + [ + "▁Odi", + -14.688946723937988 + ], + [ + "▁greenest", + -14.688994407653809 + ], + [ + "cruz", + -14.68913459777832 + ], + [ + "arno", + -14.689149856567385 + ], + [ + "▁Noord", + -14.689208030700684 + ], + [ + "▁Nils", + -14.689285278320312 + ], + [ + "▁Kaw", + -14.689287185668944 + ], + [ + "▁Raju", + -14.68931007385254 + ], + [ + "▁wanderer", + -14.689311027526855 + ], + [ + "▁mountaineer", + -14.689343452453612 + ], + [ + "▁laurel", + -14.689411163330078 + ], + [ + "▁Lanza", + -14.689522743225098 + ], + [ + "enton", + -14.68954849243164 + ], + [ + "▁insurgent", + -14.689579010009766 + ], + [ + "uncle", + -14.689729690551758 + ], + [ + "▁954-", + -14.689779281616213 + ], + [ + "▁Mino", + -14.689861297607422 + ], + [ + "▁Technic", + -14.689886093139648 + ], + [ + "▁dotting", + -14.689896583557127 + ], + [ + "INIT", + -14.689911842346191 + ], + [ + "▁bettor", + -14.68991470336914 + ], + [ + "Specialising", + -14.690000534057615 + ], + [ + "▁recheck", + -14.690135955810549 + ], + [ + "0-450", + -14.690217971801758 + ], + [ + "2.45", + -14.690338134765623 + ], + [ + "▁06:3", + -14.690375328063965 + ], + [ + "▁Maki", + -14.69039249420166 + ], + [ + "▁Stoneman", + -14.690394401550291 + ], + [ + "fern", + -14.69040298461914 + ], + [ + "▁Candid", + -14.690536499023438 + ], + [ + "Diamant", + -14.690627098083496 + ], + [ + "CVE", + -14.690654754638672 + ], + [ + "katsu", + -14.690692901611328 + ], + [ + "▁Woll", + -14.69072151184082 + ], + [ + "epic", + -14.690829277038574 + ], + [ + "▁transplanting", + -14.690840721130373 + ], + [ + "▁Wiggle", + -14.690841674804688 + ], + [ + "ANGER", + -14.690972328186035 + ], + [ + "Halo", + -14.690996170043944 + ], + [ + "▁Herbie", + -14.690998077392578 + ], + [ + "nama", + -14.691123008728027 + ], + [ + "▁Deeply", + -14.691149711608888 + ], + [ + "▁Cleary", + -14.69118595123291 + ], + [ + "Mhz", + -14.691206932067873 + ], + [ + "▁vertebra", + -14.691213607788086 + ], + [ + "oulou", + -14.691239356994627 + ], + [ + "tonne", + -14.691244125366213 + ], + [ + "bedding", + -14.69151496887207 + ], + [ + "▁shampooing", + -14.691685676574709 + ], + [ + "▁13.1", + -14.691862106323242 + ], + [ + "▁Eko", + -14.691882133483888 + ], + [ + "▁Arpa", + -14.691886901855469 + ], + [ + "▁republish", + -14.691944122314451 + ], + [ + "▁Katsu", + -14.691993713378906 + ], + [ + "▁5/6", + -14.692103385925291 + ], + [ + "OCO", + -14.692198753356934 + ], + [ + "▁rekindle", + -14.692198753356934 + ], + [ + "▁collate", + -14.692234992980955 + ], + [ + "▁eX", + -14.692316055297852 + ], + [ + "Wai", + -14.692337036132812 + ], + [ + "▁THING", + -14.692481994628906 + ], + [ + "▁colloquial", + -14.692556381225586 + ], + [ + "rrrrr", + -14.6925630569458 + ], + [ + "Trouble", + -14.69257926940918 + ], + [ + "5.1%", + -14.692683219909668 + ], + [ + "▁Sura", + -14.692760467529297 + ], + [ + "▁irradiat", + -14.69276237487793 + ], + [ + "Snorkel", + -14.692789077758787 + ], + [ + "Proponents", + -14.692800521850586 + ], + [ + "▁Hastelloy", + -14.692800521850586 + ], + [ + "▁Renovated", + -14.692800521850586 + ], + [ + "▁Stochastic", + -14.692800521850586 + ], + [ + "▁Thessalonians", + -14.692800521850586 + ], + [ + "▁Wikileaks", + -14.692800521850586 + ], + [ + "▁activites", + -14.692800521850586 + ], + [ + "▁biscotti", + -14.692800521850586 + ], + [ + "▁blustery", + -14.692800521850586 + ], + [ + "▁cavitation", + -14.692800521850586 + ], + [ + "▁disparaging", + -14.692800521850586 + ], + [ + "▁extrinsic", + -14.692800521850586 + ], + [ + "▁impotence", + -14.692800521850586 + ], + [ + "▁pessimism", + -14.692800521850586 + ], + [ + "▁scrabble", + -14.692800521850586 + ], + [ + "▁shag", + -14.692800521850586 + ], + [ + "▁ubiquity", + -14.692800521850586 + ], + [ + "▁ultimatum", + -14.692800521850586 + ], + [ + "▁Andromeda", + -14.692801475524902 + ], + [ + "▁circumcision", + -14.692801475524902 + ], + [ + "▁Esperanza", + -14.69280242919922 + ], + [ + "▁Grimsby", + -14.692803382873535 + ], + [ + "▁Manassas", + -14.692803382873535 + ], + [ + "▁discontinuation", + -14.692803382873535 + ], + [ + "▁imprudent", + -14.692804336547852 + ], + [ + "▁reccomend", + -14.692804336547852 + ], + [ + "▁unabated", + -14.692804336547852 + ], + [ + "▁Earnhardt", + -14.692806243896484 + ], + [ + "▁LibreOffice", + -14.692806243896484 + ], + [ + "▁Schmitt", + -14.692806243896484 + ], + [ + "▁bollywood", + -14.692806243896484 + ], + [ + "▁scarecrow", + -14.692806243896484 + ], + [ + "▁RESEARCH", + -14.6928071975708 + ], + [ + "▁IMDb", + -14.692808151245115 + ], + [ + "▁Moravian", + -14.69281005859375 + ], + [ + "▁Herefordshire", + -14.692811965942385 + ], + [ + "▁Polyethylene", + -14.692811965942385 + ], + [ + "▁Sediment", + -14.692811965942385 + ], + [ + "▁SOLUTIONS", + -14.692819595336914 + ], + [ + "▁Critique", + -14.692828178405762 + ], + [ + "▁Kissinger", + -14.692829132080078 + ], + [ + "▁emulating", + -14.69283390045166 + ], + [ + "▁Bellagio", + -14.69283676147461 + ], + [ + "▁Bancroft", + -14.692840576171877 + ], + [ + "▁Reclamation", + -14.692842483520508 + ], + [ + "▁(1970)", + -14.69285774230957 + ], + [ + "▁orifice", + -14.692861557006836 + ], + [ + "▁amiable", + -14.692885398864746 + ], + [ + ",00,000", + -14.692891120910645 + ], + [ + "▁Dooley", + -14.69289493560791 + ], + [ + "▁Gannon", + -14.69289493560791 + ], + [ + "▁vermouth", + -14.692898750305176 + ], + [ + "▁Utilization", + -14.692907333374023 + ], + [ + "▁forlorn", + -14.692909240722656 + ], + [ + "▁Bennington", + -14.692925453186035 + ], + [ + "▁octet", + -14.692928314208984 + ], + [ + "Convenient", + -14.69295883178711 + ], + [ + "▁Nonfiction", + -14.69296646118164 + ], + [ + "▁decommissioned", + -14.692967414855955 + ], + [ + "▁$9.95", + -14.692971229553224 + ], + [ + "▁curable", + -14.693001747131348 + ], + [ + "▁landlocked", + -14.69300937652588 + ], + [ + "▁Carrara", + -14.693010330200195 + ], + [ + "▁Widescreen", + -14.693013191223145 + ], + [ + "▁Dodson", + -14.693015098571776 + ], + [ + "hene", + -14.69301700592041 + ], + [ + "▁unwrapped", + -14.693017959594728 + ], + [ + "▁denver", + -14.693020820617676 + ], + [ + "▁Juve", + -14.693035125732422 + ], + [ + "▁FMLA", + -14.69308376312256 + ], + [ + "$12", + -14.693098068237305 + ], + [ + "▁Lumix", + -14.69311809539795 + ], + [ + "▁jag", + -14.693134307861328 + ], + [ + "▁Mashable", + -14.69316291809082 + ], + [ + "kilometre", + -14.693178176879885 + ], + [ + "Tragically", + -14.6931791305542 + ], + [ + "capacitor", + -14.693187713623049 + ], + [ + "▁Bronte", + -14.693196296691896 + ], + [ + "▁Livonia", + -14.693197250366213 + ], + [ + "▁Adware", + -14.69320583343506 + ], + [ + "FRONT", + -14.693207740783691 + ], + [ + "horror", + -14.693218231201172 + ], + [ + "▁windowsill", + -14.69322395324707 + ], + [ + "Frederick", + -14.69322681427002 + ], + [ + "Entrance", + -14.693227767944336 + ], + [ + "identifiable", + -14.693229675292969 + ], + [ + "▁Naveen", + -14.693236351013184 + ], + [ + "Sofia", + -14.6932373046875 + ], + [ + "▁metaphorically", + -14.693269729614258 + ], + [ + "Carolina", + -14.69327449798584 + ], + [ + "▁Ferti", + -14.693282127380373 + ], + [ + "fulfilling", + -14.693286895751951 + ], + [ + "haut", + -14.693293571472168 + ], + [ + "Beacon", + -14.693296432495115 + ], + [ + "▁refillable", + -14.693310737609863 + ], + [ + "sqft", + -14.693316459655762 + ], + [ + "Engagement", + -14.693320274353027 + ], + [ + "▁iMovie", + -14.693334579467772 + ], + [ + "▁Akhtar", + -14.693350791931152 + ], + [ + "▁Hickman", + -14.69335651397705 + ], + [ + "▁macbook", + -14.693404197692873 + ], + [ + "▁Mindset", + -14.693406105041504 + ], + [ + "▁hushed", + -14.6934175491333 + ], + [ + "▁Fain", + -14.693506240844728 + ], + [ + "hund", + -14.693511962890623 + ], + [ + "▁spooked", + -14.693523406982422 + ], + [ + "▁Marais", + -14.693537712097168 + ], + [ + "Turbo", + -14.693538665771484 + ], + [ + "▁Gatos", + -14.69354248046875 + ], + [ + "Alpine", + -14.6935453414917 + ], + [ + "▁Frameless", + -14.693546295166016 + ], + [ + "▁jumbled", + -14.693547248840332 + ], + [ + "▁Superbike", + -14.69355297088623 + ], + [ + "▁congresses", + -14.693557739257812 + ], + [ + "▁Herschel", + -14.693643569946287 + ], + [ + "▁preformed", + -14.693650245666504 + ], + [ + "▁outfitting", + -14.693657875061035 + ], + [ + "Safari", + -14.69367790222168 + ], + [ + "Dive", + -14.69370937347412 + ], + [ + "▁quack", + -14.693754196166992 + ], + [ + "▁NBN", + -14.69376277923584 + ], + [ + "▁Curio", + -14.693781852722168 + ], + [ + "Kerry", + -14.693782806396484 + ], + [ + "▁Pasture", + -14.693806648254396 + ], + [ + "▁confound", + -14.693807601928713 + ], + [ + "clone", + -14.693812370300291 + ], + [ + "WATCH", + -14.693851470947266 + ], + [ + "▁VIX", + -14.693862915039062 + ], + [ + "▁Asha", + -14.693902969360352 + ], + [ + "▁Minne", + -14.693910598754885 + ], + [ + "6:18", + -14.69391918182373 + ], + [ + "▁Sprinkler", + -14.693960189819336 + ], + [ + "skating", + -14.693968772888184 + ], + [ + "▁leashes", + -14.69397258758545 + ], + [ + "▁trickling", + -14.69398593902588 + ], + [ + "Cinema", + -14.694047927856444 + ], + [ + "▁Raoul", + -14.694058418273926 + ], + [ + ":10.10", + -14.694059371948242 + ], + [ + "▁Rattan", + -14.694071769714355 + ], + [ + "▁Gilman", + -14.694096565246582 + ], + [ + "▁temperamental", + -14.694189071655272 + ], + [ + "▁plait", + -14.69419002532959 + ], + [ + "▁distorting", + -14.694232940673828 + ], + [ + "Compute", + -14.694262504577637 + ], + [ + "ixon", + -14.694339752197266 + ], + [ + "▁Hyun", + -14.694358825683594 + ], + [ + "▁GAO", + -14.694406509399414 + ], + [ + "Haul", + -14.694462776184082 + ], + [ + "▁signified", + -14.694500923156738 + ], + [ + "▁vowing", + -14.694507598876951 + ], + [ + "▁Shorten", + -14.6945161819458 + ], + [ + "▁erred", + -14.69452667236328 + ], + [ + "▁Hotmail", + -14.694534301757812 + ], + [ + "▁Ashburn", + -14.694546699523926 + ], + [ + "roku", + -14.694555282592772 + ], + [ + "▁handicapper", + -14.694555282592772 + ], + [ + "ndig", + -14.694604873657228 + ], + [ + "▁Bobbie", + -14.694607734680176 + ], + [ + "kev", + -14.69464111328125 + ], + [ + "▁Unblock", + -14.69465446472168 + ], + [ + "INGER", + -14.694674491882324 + ], + [ + "BIM", + -14.69473361968994 + ], + [ + "▁13-15", + -14.694766998291016 + ], + [ + "▁icebreaker", + -14.694774627685549 + ], + [ + "Photographer", + -14.69485855102539 + ], + [ + "▁Chave", + -14.69486141204834 + ], + [ + "▁warmup", + -14.694903373718262 + ], + [ + "▁VEX", + -14.694969177246094 + ], + [ + "200,000", + -14.694975852966309 + ], + [ + "▁futur", + -14.695077896118164 + ], + [ + "▁Scared", + -14.695127487182615 + ], + [ + "▁12.4", + -14.695202827453612 + ], + [ + "▁lunge", + -14.695213317871094 + ], + [ + "msi", + -14.69521427154541 + ], + [ + "▁Crossover", + -14.695219993591309 + ], + [ + "▁brit", + -14.695249557495115 + ], + [ + "▁Foxy", + -14.69532871246338 + ], + [ + "dici", + -14.69537353515625 + ], + [ + "▁Nasir", + -14.69538116455078 + ], + [ + "uler", + -14.695427894592283 + ], + [ + "imu", + -14.695479393005373 + ], + [ + "POD", + -14.69554042816162 + ], + [ + "fever", + -14.695611000061035 + ], + [ + "▁hock", + -14.695733070373535 + ], + [ + "▁ESO", + -14.69577407836914 + ], + [ + "5050", + -14.695783615112305 + ], + [ + "▁Somme", + -14.695798873901367 + ], + [ + "uelo", + -14.695829391479492 + ], + [ + "▁interment", + -14.6958646774292 + ], + [ + "▁moen", + -14.695998191833496 + ], + [ + "▁JOE", + -14.696051597595217 + ], + [ + "▁remaster", + -14.696064949035645 + ], + [ + "▁DMX", + -14.696104049682615 + ], + [ + "1964", + -14.696128845214844 + ], + [ + "▁reconnected", + -14.696245193481444 + ], + [ + "Atlas", + -14.696287155151367 + ], + [ + "▁Zappa", + -14.69629955291748 + ], + [ + "▁BHA", + -14.69639015197754 + ], + [ + "▁pinstripe", + -14.696404457092283 + ], + [ + "lowest", + -14.696405410766602 + ], + [ + "nnington", + -14.696478843688965 + ], + [ + "▁coincid", + -14.69648265838623 + ], + [ + "Timber", + -14.696490287780762 + ], + [ + "Mist", + -14.696698188781738 + ], + [ + "mbps", + -14.696792602539062 + ], + [ + "▁Wij", + -14.696849822998049 + ], + [ + "▁pushchair", + -14.696868896484377 + ], + [ + "▁TNA", + -14.696962356567385 + ], + [ + "/400", + -14.696966171264648 + ], + [ + "▁Caye", + -14.697165489196776 + ], + [ + "▁williams", + -14.69719696044922 + ], + [ + "▁Blanchett", + -14.697282791137695 + ], + [ + "▁BRING", + -14.697296142578123 + ], + [ + "DAM", + -14.697306632995604 + ], + [ + "Kom", + -14.697355270385742 + ], + [ + "▁Hearn", + -14.69736099243164 + ], + [ + "1:07", + -14.69738483428955 + ], + [ + "▁refereed", + -14.69743537902832 + ], + [ + "▁Masai", + -14.69747829437256 + ], + [ + "▁ibn", + -14.69756031036377 + ], + [ + "packaging", + -14.697562217712402 + ], + [ + "▁breathtakingly", + -14.697583198547363 + ], + [ + "▁providence", + -14.697684288024902 + ], + [ + "▁08:5", + -14.697733879089355 + ], + [ + "▁MEET", + -14.697735786437988 + ], + [ + "Printing", + -14.697793960571287 + ], + [ + "▁$77", + -14.69780445098877 + ], + [ + "▁$3.4", + -14.697815895080566 + ], + [ + "▁Neff", + -14.69782829284668 + ], + [ + "▁Gentile", + -14.697870254516602 + ], + [ + "93)", + -14.697964668273926 + ], + [ + "haj", + -14.698068618774414 + ], + [ + "TDA", + -14.698124885559082 + ], + [ + "MFP", + -14.698183059692385 + ], + [ + "thali", + -14.6982421875 + ], + [ + "▁UPF", + -14.698331832885742 + ], + [ + "▁retry", + -14.698402404785156 + ], + [ + "▁BVI", + -14.698530197143556 + ], + [ + "vaca", + -14.698760986328123 + ], + [ + "▁NEM", + -14.698819160461426 + ], + [ + "artan", + -14.69884204864502 + ], + [ + "CDA", + -14.698917388916016 + ], + [ + "exceptional", + -14.699041366577148 + ], + [ + "▁Tema", + -14.699052810668944 + ], + [ + "▁Daring", + -14.699200630187988 + ], + [ + "▁10-0", + -14.699219703674316 + ], + [ + "▁Ilya", + -14.699220657348633 + ], + [ + "▁choco", + -14.699337005615234 + ], + [ + "EVM", + -14.699363708496094 + ], + [ + "POSE", + -14.699397087097168 + ], + [ + "▁Taub", + -14.69953441619873 + ], + [ + "cution", + -14.699559211730955 + ], + [ + "fay", + -14.6996488571167 + ], + [ + "rural", + -14.69968032836914 + ], + [ + "▁gripper", + -14.699722290039062 + ], + [ + "▁Jamb", + -14.699739456176758 + ], + [ + "ACL", + -14.69986057281494 + ], + [ + "▁SPIN", + -14.699950218200684 + ], + [ + "▁flout", + -14.699950218200684 + ], + [ + "▁Leben", + -14.70001983642578 + ], + [ + "1:25", + -14.700078964233398 + ], + [ + "▁strikeout", + -14.700108528137209 + ], + [ + "▁13:3", + -14.700209617614746 + ], + [ + "▁$170", + -14.700292587280272 + ], + [ + "▁12000", + -14.700324058532717 + ], + [ + "tronix", + -14.700357437133787 + ], + [ + "▁Weill", + -14.700366973876951 + ], + [ + "WHM", + -14.700429916381836 + ], + [ + "depot", + -14.700446128845217 + ], + [ + "HIA", + -14.700494766235352 + ], + [ + "BIKE", + -14.700584411621094 + ], + [ + "▁HSN", + -14.700613975524902 + ], + [ + "▁Crafter", + -14.700618743896484 + ], + [ + "▁CMR", + -14.700698852539062 + ], + [ + "Rai", + -14.700770378112791 + ], + [ + "VZ", + -14.700790405273438 + ], + [ + "▁Katelyn", + -14.700817108154297 + ], + [ + "▁remiss", + -14.700910568237305 + ], + [ + "narayan", + -14.700933456420898 + ], + [ + "duff", + -14.700963973999023 + ], + [ + "Chick", + -14.700974464416504 + ], + [ + "▁£45", + -14.70097827911377 + ], + [ + "▁CONTAIN", + -14.701042175292969 + ], + [ + "▁Corrupt", + -14.701058387756348 + ], + [ + "▁Forza", + -14.701064109802246 + ], + [ + "Liposuction", + -14.701065063476562 + ], + [ + "▁Ambleside", + -14.701065063476562 + ], + [ + "▁Dundalk", + -14.701065063476562 + ], + [ + "▁Escondido", + -14.701065063476562 + ], + [ + "▁McFarland", + -14.701065063476562 + ], + [ + "▁Mindanao", + -14.701065063476562 + ], + [ + "▁Monrovia", + -14.701065063476562 + ], + [ + "▁Serengeti", + -14.701065063476562 + ], + [ + "▁Westmoreland", + -14.701065063476562 + ], + [ + "▁caribou", + -14.701065063476562 + ], + [ + "▁climactic", + -14.701065063476562 + ], + [ + "▁crannies", + -14.701065063476562 + ], + [ + "▁ecclesiastical", + -14.701065063476562 + ], + [ + "▁finasteride", + -14.701065063476562 + ], + [ + "▁hematite", + -14.701065063476562 + ], + [ + "▁holocaust", + -14.701065063476562 + ], + [ + "▁inevitability", + -14.701065063476562 + ], + [ + "▁interlocutor", + -14.701065063476562 + ], + [ + "▁nanotubes", + -14.701065063476562 + ], + [ + "▁nolvadex", + -14.701065063476562 + ], + [ + "▁paraphrasing", + -14.701065063476562 + ], + [ + "▁Amphitheater", + -14.70106601715088 + ], + [ + "▁CHURCH", + -14.70106601715088 + ], + [ + "▁Pricewaterhouse", + -14.70106601715088 + ], + [ + "Ecclesiastes", + -14.701066970825195 + ], + [ + "▁Aadhar", + -14.701066970825195 + ], + [ + "▁Mesquite", + -14.701066970825195 + ], + [ + "▁focaccia", + -14.701066970825195 + ], + [ + "▁reticle", + -14.701066970825195 + ], + [ + "▁apprised", + -14.701067924499512 + ], + [ + "▁Krueger", + -14.701069831848145 + ], + [ + "▁albino", + -14.70107078552246 + ], + [ + "▁Vertigo", + -14.701071739196776 + ], + [ + "▁colonoscopy", + -14.701072692871094 + ], + [ + "▁neoclassical", + -14.701072692871094 + ], + [ + "▁nitrite", + -14.701072692871094 + ], + [ + "▁hydrothermal", + -14.701077461242676 + ], + [ + "▁Maurer", + -14.701080322265623 + ], + [ + "▁Propeller", + -14.701080322265623 + ], + [ + "▁Arvada", + -14.701085090637209 + ], + [ + "▁Shrek", + -14.701085090637209 + ], + [ + "▁coziness", + -14.701087951660156 + ], + [ + "▁Minden", + -14.701089859008787 + ], + [ + "▁Cloudflare", + -14.701091766357422 + ], + [ + "▁Muskoka", + -14.701091766357422 + ], + [ + "▁electors", + -14.701095581054688 + ], + [ + "▁veracity", + -14.70109748840332 + ], + [ + "Heavily", + -14.701098442077637 + ], + [ + "▁CISSP", + -14.701099395751951 + ], + [ + "▁Aberdeenshire", + -14.701101303100586 + ], + [ + "▁sinusitis", + -14.701105117797852 + ], + [ + "▁Depository", + -14.7011079788208 + ], + [ + "▁Gilded", + -14.70111083984375 + ], + [ + "▁subroutine", + -14.70111083984375 + ], + [ + "▁Microfiber", + -14.701112747192385 + ], + [ + "▁Leadpages", + -14.70111846923828 + ], + [ + "▁Suitcase", + -14.701120376586914 + ], + [ + "▁Paseo", + -14.701123237609863 + ], + [ + "▁Manitou", + -14.701126098632812 + ], + [ + "▁Multilingual", + -14.701126098632812 + ], + [ + "▁Sophos", + -14.701126098632812 + ], + [ + "▁$1.25", + -14.701149940490724 + ], + [ + "▁Plummer", + -14.70116138458252 + ], + [ + "▁Sweetheart", + -14.701168060302734 + ], + [ + "▁micrograms", + -14.701172828674316 + ], + [ + "wolves", + -14.70118522644043 + ], + [ + "▁fifa", + -14.70118808746338 + ], + [ + "▁roping", + -14.701189994812012 + ], + [ + "▁WORKING", + -14.701190948486328 + ], + [ + "▁ductile", + -14.701194763183594 + ], + [ + "▁Boswell", + -14.70120334625244 + ], + [ + "▁Herndon", + -14.701217651367188 + ], + [ + "▁Intercultural", + -14.701218605041504 + ], + [ + "▁Hofstra", + -14.701239585876465 + ], + [ + "▁Subversion", + -14.701248168945312 + ], + [ + "▁throes", + -14.701251983642578 + ], + [ + "▁Hamill", + -14.701273918151855 + ], + [ + "Grassroots", + -14.70129108428955 + ], + [ + "▁preamble", + -14.7012939453125 + ], + [ + "baja", + -14.701301574707031 + ], + [ + "▁YOUNG", + -14.701305389404297 + ], + [ + "▁laborer", + -14.701349258422852 + ], + [ + "▁wingspan", + -14.701367378234863 + ], + [ + "Tight", + -14.701451301574709 + ], + [ + "▁descendent", + -14.701478004455566 + ], + [ + "absolute", + -14.7014799118042 + ], + [ + "▁Sweetwater", + -14.70150089263916 + ], + [ + "▁dancehall", + -14.701502799987791 + ], + [ + "▁obliterated", + -14.701522827148438 + ], + [ + "affirming", + -14.70153522491455 + ], + [ + "▁adsorb", + -14.701581001281738 + ], + [ + "▁mudroom", + -14.701581001281738 + ], + [ + "Sleeve", + -14.701605796813965 + ], + [ + "▁Dumpling", + -14.70161247253418 + ], + [ + "▁Underhill", + -14.701617240905762 + ], + [ + "▁kde", + -14.701623916625977 + ], + [ + "Integrating", + -14.701642990112305 + ], + [ + "hectare", + -14.70164680480957 + ], + [ + "complicated", + -14.701647758483888 + ], + [ + "Tournament", + -14.70165729522705 + ], + [ + "gression", + -14.701668739318848 + ], + [ + "▁supercharger", + -14.701669692993164 + ], + [ + "▁Suma", + -14.70167064666748 + ], + [ + "▁Tummy", + -14.701671600341797 + ], + [ + "iendo", + -14.701672554016112 + ], + [ + "▁Godhead", + -14.701685905456545 + ], + [ + "supplement", + -14.70168685913086 + ], + [ + "Suzanne", + -14.701688766479492 + ], + [ + "gadget", + -14.701693534851074 + ], + [ + "spectral", + -14.701693534851074 + ], + [ + "Approach", + -14.70169734954834 + ], + [ + "Katherine", + -14.70169734954834 + ], + [ + "Opportunity", + -14.701699256896973 + ], + [ + "Distribution", + -14.701700210571287 + ], + [ + "Physics", + -14.701703071594238 + ], + [ + "Norwegian", + -14.701704025268556 + ], + [ + "▁Swab", + -14.701704978942873 + ], + [ + "Mississippi", + -14.701705932617188 + ], + [ + "regulating", + -14.70171070098877 + ], + [ + "Vienna", + -14.70171356201172 + ], + [ + "prompt", + -14.70171356201172 + ], + [ + "▁unleashes", + -14.701720237731934 + ], + [ + "Rhode", + -14.701730728149414 + ], + [ + "▁instructable", + -14.701743125915527 + ], + [ + "configure", + -14.70174503326416 + ], + [ + "▁HEI", + -14.701861381530762 + ], + [ + "▁Photon", + -14.701865196228027 + ], + [ + "Dong", + -14.701876640319824 + ], + [ + "▁Garten", + -14.701878547668455 + ], + [ + "▁Hye", + -14.701916694641112 + ], + [ + "▁Werk", + -14.701919555664062 + ], + [ + "▁goddesses", + -14.701921463012695 + ], + [ + "▁Dern", + -14.701939582824709 + ], + [ + "▁multicolor", + -14.701944351196287 + ], + [ + "ruf", + -14.701946258544922 + ], + [ + "▁Laine", + -14.70195770263672 + ], + [ + "▁amicus", + -14.701959609985352 + ], + [ + "baseball", + -14.701976776123049 + ], + [ + "Vegan", + -14.70200252532959 + ], + [ + "laya", + -14.702016830444336 + ], + [ + "Suppress", + -14.702045440673828 + ], + [ + "▁windproof", + -14.702051162719728 + ], + [ + "▁stingray", + -14.702083587646484 + ], + [ + "authorization", + -14.702086448669434 + ], + [ + "Hindi", + -14.7020902633667 + ], + [ + "Increasingly", + -14.702133178710938 + ], + [ + "▁Pixma", + -14.702138900756836 + ], + [ + "Nurture", + -14.702165603637695 + ], + [ + "▁Standardization", + -14.702237129211426 + ], + [ + "▁Perugia", + -14.70225429534912 + ], + [ + "bellied", + -14.702300071716309 + ], + [ + "Victory", + -14.702311515808104 + ], + [ + "▁MATTER", + -14.702325820922852 + ], + [ + "▁Boyne", + -14.70236110687256 + ], + [ + "▁faltering", + -14.702372550964355 + ], + [ + "▁tibial", + -14.702407836914062 + ], + [ + "▁Roadshow", + -14.702441215515137 + ], + [ + "▁Eleventh", + -14.702469825744627 + ], + [ + "▁Gilda", + -14.702474594116213 + ], + [ + "Planner", + -14.70248031616211 + ], + [ + "ptera", + -14.702564239501951 + ], + [ + "eBay", + -14.702569961547852 + ], + [ + "3.5%", + -14.702603340148926 + ], + [ + "▁Lank", + -14.702607154846191 + ], + [ + "▁starches", + -14.702624320983888 + ], + [ + "▁$3.6", + -14.7027006149292 + ], + [ + "▁Czar", + -14.702717781066896 + ], + [ + "▁reformation", + -14.702759742736816 + ], + [ + "=\"_", + -14.702780723571776 + ], + [ + "mune", + -14.702802658081056 + ], + [ + "0-00", + -14.7028226852417 + ], + [ + "2040", + -14.702836036682127 + ], + [ + "spite", + -14.702900886535645 + ], + [ + "▁pushups", + -14.702972412109377 + ], + [ + "expressed", + -14.703054428100586 + ], + [ + "niak", + -14.703164100646973 + ], + [ + "▁Hester", + -14.703166007995604 + ], + [ + "▁Wayland", + -14.703227996826172 + ], + [ + "▁peacefulness", + -14.703329086303713 + ], + [ + "▁mejor", + -14.703436851501465 + ], + [ + "mood", + -14.703438758850098 + ], + [ + "▁Lohan", + -14.703487396240234 + ], + [ + "SIE", + -14.703521728515623 + ], + [ + "▁1760", + -14.703611373901367 + ], + [ + "▁backhoe", + -14.7036714553833 + ], + [ + "▁Tenor", + -14.703701972961426 + ], + [ + "NIK", + -14.703777313232422 + ], + [ + "immigration", + -14.703824043273926 + ], + [ + "▁Saki", + -14.703828811645508 + ], + [ + "▁Blatt", + -14.704066276550291 + ], + [ + "▁Caf", + -14.704115867614746 + ], + [ + "▁Sportsbook", + -14.704160690307615 + ], + [ + "manuel", + -14.704267501831056 + ], + [ + "Weber", + -14.704272270202637 + ], + [ + "Packed", + -14.704456329345703 + ], + [ + "▁Centerville", + -14.7047119140625 + ], + [ + "Protesters", + -14.70480251312256 + ], + [ + "▁citrusy", + -14.704859733581545 + ], + [ + "ploid", + -14.704876899719238 + ], + [ + "▁mister", + -14.705000877380373 + ], + [ + "▁Myspace", + -14.705215454101562 + ], + [ + "▁dormer", + -14.70523166656494 + ], + [ + "Omni", + -14.705281257629396 + ], + [ + "Helper", + -14.705313682556152 + ], + [ + "▁Neue", + -14.705349922180176 + ], + [ + "▁Goggle", + -14.70537567138672 + ], + [ + "▁Pulled", + -14.705377578735352 + ], + [ + "airport", + -14.705438613891602 + ], + [ + "▁ballpoint", + -14.705488204956056 + ], + [ + "▁UAC", + -14.705510139465332 + ], + [ + "▁Margoli", + -14.705526351928713 + ], + [ + "ehring", + -14.705540657043455 + ], + [ + "valuable", + -14.705567359924316 + ], + [ + "maru", + -14.70567226409912 + ], + [ + "rize", + -14.705772399902344 + ], + [ + "▁thug", + -14.7058687210083 + ], + [ + "▁Crawler", + -14.705923080444336 + ], + [ + "UNDER", + -14.705924034118652 + ], + [ + "▁Taf", + -14.705928802490234 + ], + [ + "4-17", + -14.7060546875 + ], + [ + "1:38", + -14.706140518188477 + ], + [ + "appearance", + -14.70614242553711 + ], + [ + "▁purveyor", + -14.70614242553711 + ], + [ + "▁tidbit", + -14.706157684326172 + ], + [ + "▁osteopath", + -14.706158638000488 + ], + [ + "rigid", + -14.706257820129396 + ], + [ + "ное", + -14.706316947937012 + ], + [ + "chalk", + -14.7063627243042 + ], + [ + "leek", + -14.7064208984375 + ], + [ + "1/3", + -14.706461906433104 + ], + [ + "▁Hangout", + -14.706514358520508 + ], + [ + "trimmed", + -14.706549644470217 + ], + [ + "▁Seite", + -14.70660400390625 + ], + [ + "oooh", + -14.70672607421875 + ], + [ + "ugli", + -14.706788063049316 + ], + [ + "▁tosses", + -14.706804275512695 + ], + [ + "▁nestle", + -14.70691967010498 + ], + [ + "worship", + -14.707171440124512 + ], + [ + "ggled", + -14.707182884216309 + ], + [ + "▁$135", + -14.707235336303713 + ], + [ + "lactic", + -14.70727252960205 + ], + [ + "delay", + -14.707283020019531 + ], + [ + "▁bisect", + -14.70729637145996 + ], + [ + "omitting", + -14.70729923248291 + ], + [ + "▁fritters", + -14.707367897033691 + ], + [ + "▁trite", + -14.707569122314451 + ], + [ + "▁transcendental", + -14.70772647857666 + ], + [ + "FIF", + -14.707731246948242 + ], + [ + "▁Heaton", + -14.707762718200684 + ], + [ + "azan", + -14.707798957824709 + ], + [ + "ophore", + -14.707807540893556 + ], + [ + "1050", + -14.707842826843262 + ], + [ + "▁Capita", + -14.707853317260742 + ], + [ + "scrip", + -14.708037376403809 + ], + [ + "▁shocker", + -14.708050727844238 + ], + [ + "12%", + -14.708078384399414 + ], + [ + "lissa", + -14.708184242248535 + ], + [ + "▁$0.7", + -14.708203315734863 + ], + [ + "flick", + -14.708218574523926 + ], + [ + "Equal", + -14.708294868469238 + ], + [ + "▁WITHIN", + -14.708318710327148 + ], + [ + "▁ducting", + -14.708344459533691 + ], + [ + "winkle", + -14.708409309387209 + ], + [ + "▁shortcoming", + -14.70847511291504 + ], + [ + "▁ARTIST", + -14.708477973937988 + ], + [ + "Earl", + -14.708528518676758 + ], + [ + "Troubleshoot", + -14.708602905273438 + ], + [ + "▁JSP", + -14.708709716796877 + ], + [ + "ryu", + -14.708782196044922 + ], + [ + "RETE", + -14.708927154541016 + ], + [ + "▁rehydrate", + -14.70900535583496 + ], + [ + "▁slasher", + -14.709013938903809 + ], + [ + "▁rougher", + -14.70901870727539 + ], + [ + "fication", + -14.709049224853516 + ], + [ + "▁IMDB", + -14.709087371826172 + ], + [ + "▁gravely", + -14.709087371826172 + ], + [ + "▁gratin", + -14.709226608276367 + ], + [ + "▁paddled", + -14.709232330322266 + ], + [ + "▁RPS", + -14.709332466125488 + ], + [ + "▁Cagliari", + -14.70939826965332 + ], + [ + "▁Greenbrier", + -14.70939826965332 + ], + [ + "▁Obsidian", + -14.70939826965332 + ], + [ + "▁Rousseau", + -14.70939826965332 + ], + [ + "▁Saatchi", + -14.70939826965332 + ], + [ + "▁Selangor", + -14.70939826965332 + ], + [ + "▁Shibuya", + -14.70939826965332 + ], + [ + "▁Wenatchee", + -14.70939826965332 + ], + [ + "▁adjudicator", + -14.70939826965332 + ], + [ + "▁ciprofloxacin", + -14.70939826965332 + ], + [ + "▁demeanour", + -14.70939826965332 + ], + [ + "▁disheartened", + -14.70939826965332 + ], + [ + "▁ellipse", + -14.70939826965332 + ], + [ + "▁hyacinth", + -14.70939826965332 + ], + [ + "▁indecisive", + -14.70939826965332 + ], + [ + "▁sanctification", + -14.70939826965332 + ], + [ + "▁shrivel", + -14.70939826965332 + ], + [ + "▁snowmobiling", + -14.70939826965332 + ], + [ + "▁unrecognizable", + -14.70939826965332 + ], + [ + "▁GIVEAWAY", + -14.709399223327637 + ], + [ + "▁prioritising", + -14.709399223327637 + ], + [ + "▁vengeful", + -14.709399223327637 + ], + [ + "Nittany", + -14.709400177001951 + ], + [ + "▁Menzies", + -14.709400177001951 + ], + [ + "Immersive", + -14.70940113067627 + ], + [ + "▁Bracknell", + -14.70940113067627 + ], + [ + "▁Lattice", + -14.70940113067627 + ], + [ + "Embellish", + -14.709402084350586 + ], + [ + "▁notarized", + -14.70940399169922 + ], + [ + "▁Diazepam", + -14.709404945373535 + ], + [ + "▁symposia", + -14.709405899047852 + ], + [ + "▁Mangalore", + -14.709406852722168 + ], + [ + "▁nibbling", + -14.709406852722168 + ], + [ + "▁Kalahari", + -14.709407806396484 + ], + [ + "▁Blount", + -14.709409713745115 + ], + [ + "▁MetaTrader", + -14.709409713745115 + ], + [ + "▁quipped", + -14.709410667419434 + ], + [ + "▁Waverley", + -14.709413528442385 + ], + [ + "▁Yandex", + -14.709417343139648 + ], + [ + "▁cetera", + -14.709417343139648 + ], + [ + "▁Barrister", + -14.709421157836914 + ], + [ + "▁wedded", + -14.709421157836914 + ], + [ + "▁Pepperdine", + -14.709423065185549 + ], + [ + "Siddharth", + -14.709424018859863 + ], + [ + "▁antipsychotic", + -14.709428787231444 + ], + [ + "▁Corporal", + -14.709440231323242 + ], + [ + "▁shoo", + -14.709447860717772 + ], + [ + "▁Breyer", + -14.709454536437988 + ], + [ + "▁Altoona", + -14.709457397460938 + ], + [ + "▁Sarkar", + -14.709465026855469 + ], + [ + "▁DisplayPort", + -14.709467887878418 + ], + [ + "▁UCSB", + -14.709470748901367 + ], + [ + "▁DoubleTree", + -14.709478378295898 + ], + [ + "▁Reptile", + -14.709485054016112 + ], + [ + "▁Qo", + -14.709489822387695 + ], + [ + "▁Jindal", + -14.709502220153809 + ], + [ + "▁Banbury", + -14.709507942199709 + ], + [ + "▁littering", + -14.709554672241213 + ], + [ + "▁pathologies", + -14.70955753326416 + ], + [ + "▁Bluffton", + -14.70956039428711 + ], + [ + "▁Crossrail", + -14.709564208984377 + ], + [ + "▁ESXi", + -14.709569931030272 + ], + [ + "▁Clegg", + -14.709574699401855 + ], + [ + "▁Consequences", + -14.709574699401855 + ], + [ + "▁Sorrow", + -14.709576606750488 + ], + [ + "ftp", + -14.709588050842283 + ], + [ + "trapped", + -14.709602355957031 + ], + [ + "▁shoplifting", + -14.709604263305664 + ], + [ + "▁Cooney", + -14.709609031677246 + ], + [ + "Recommend", + -14.709612846374512 + ], + [ + "▁devaluation", + -14.709688186645508 + ], + [ + "▁zany", + -14.709697723388672 + ], + [ + "▁Haworth", + -14.709699630737305 + ], + [ + "lse", + -14.709736824035645 + ], + [ + "▁Abandoned", + -14.70974063873291 + ], + [ + "▁ESOP", + -14.70974349975586 + ], + [ + "▁showstopper", + -14.709808349609377 + ], + [ + "▁attenuated", + -14.709875106811523 + ], + [ + "Pitt", + -14.709881782531738 + ], + [ + "▁Rampage", + -14.709881782531738 + ], + [ + "6.2%", + -14.709915161132812 + ], + [ + "▁Glenda", + -14.709933280944824 + ], + [ + "▁armband", + -14.709933280944824 + ], + [ + "▁orthodoxy", + -14.710046768188477 + ], + [ + "BSE", + -14.71009349822998 + ], + [ + "farmer", + -14.710122108459473 + ], + [ + "▁dipole", + -14.710124015808104 + ], + [ + "▁enveloping", + -14.7101469039917 + ], + [ + "omania", + -14.710164070129396 + ], + [ + "▁Wiseman", + -14.710211753845217 + ], + [ + "▁unwrap", + -14.71021842956543 + ], + [ + "Prospective", + -14.710220336914062 + ], + [ + "thriller", + -14.710235595703123 + ], + [ + "▁Adventurer", + -14.710247993469238 + ], + [ + "▁grammatically", + -14.710247993469238 + ], + [ + "caregiver", + -14.710249900817873 + ], + [ + "Immigration", + -14.710253715515137 + ], + [ + "Walmart", + -14.71025848388672 + ], + [ + "▁‘80", + -14.710278511047363 + ], + [ + "Midnight", + -14.710298538208008 + ], + [ + "▁Russellville", + -14.710298538208008 + ], + [ + "freight", + -14.710312843322754 + ], + [ + "Skype", + -14.710319519042969 + ], + [ + "gion", + -14.710325241088867 + ], + [ + "BLACK", + -14.710333824157717 + ], + [ + "engaging", + -14.710334777832031 + ], + [ + "▁CDO", + -14.710350036621094 + ], + [ + "throughput", + -14.71035099029541 + ], + [ + "GGG", + -14.710381507873535 + ], + [ + "▁Sunscreen", + -14.710381507873535 + ], + [ + "▁Emile", + -14.710432052612305 + ], + [ + "independence", + -14.710440635681152 + ], + [ + "Genetic", + -14.710456848144531 + ], + [ + "▁SSID", + -14.710466384887695 + ], + [ + "Lotus", + -14.710476875305176 + ], + [ + "analyse", + -14.710503578186035 + ], + [ + "▁Shaver", + -14.71053409576416 + ], + [ + "Celtic", + -14.710553169250488 + ], + [ + "spatial", + -14.71055507659912 + ], + [ + "▁Pender", + -14.71058464050293 + ], + [ + "▁Landfill", + -14.71059513092041 + ], + [ + "PPA", + -14.710620880126951 + ], + [ + "receive", + -14.71062469482422 + ], + [ + "▁Quilter", + -14.71065902709961 + ], + [ + "Posting", + -14.710673332214355 + ], + [ + "▁retrospectively", + -14.71067714691162 + ], + [ + "▁insular", + -14.710725784301758 + ], + [ + "NEA", + -14.710811614990234 + ], + [ + "▁Salu", + -14.710848808288574 + ], + [ + "Virus", + -14.710875511169434 + ], + [ + "Crushing", + -14.710959434509276 + ], + [ + "▁Grandmaster", + -14.710992813110352 + ], + [ + "velo", + -14.711034774780272 + ], + [ + "▁Renal", + -14.711081504821776 + ], + [ + "▁webserver", + -14.71111297607422 + ], + [ + "▁engraver", + -14.711116790771484 + ], + [ + "Reel", + -14.711127281188965 + ], + [ + "▁fallow", + -14.711130142211914 + ], + [ + "nutri", + -14.711188316345217 + ], + [ + "reflexive", + -14.711198806762695 + ], + [ + "éli", + -14.711315155029297 + ], + [ + "▁Tame", + -14.711316108703612 + ], + [ + "▁Shaving", + -14.711333274841309 + ], + [ + "▁Shauna", + -14.711353302001951 + ], + [ + "▁PFD", + -14.711371421813965 + ], + [ + "▁formwork", + -14.711382865905762 + ], + [ + "erva", + -14.711423873901367 + ], + [ + "hydrated", + -14.711427688598633 + ], + [ + "▁Lucian", + -14.711442947387695 + ], + [ + "▁Esper", + -14.711443901062012 + ], + [ + "GIN", + -14.71152687072754 + ], + [ + "▁Cameo", + -14.711548805236816 + ], + [ + "Meh", + -14.71156120300293 + ], + [ + "▁Misha", + -14.711594581604004 + ], + [ + "▁soaker", + -14.71160125732422 + ], + [ + "▁Innovate", + -14.711634635925291 + ], + [ + "GMP", + -14.711685180664062 + ], + [ + "officially", + -14.711711883544922 + ], + [ + "▁Borges", + -14.711715698242188 + ], + [ + "▁exerting", + -14.711869239807127 + ], + [ + "▁Guilt", + -14.711892127990724 + ], + [ + "▁Claiming", + -14.711958885192873 + ], + [ + "soup", + -14.711977005004885 + ], + [ + "▁DCF", + -14.712023735046388 + ], + [ + "AGING", + -14.712026596069336 + ], + [ + "▁Hesse", + -14.712257385253906 + ], + [ + "▁urbanism", + -14.712309837341309 + ], + [ + "filler", + -14.712372779846191 + ], + [ + "estri", + -14.712396621704102 + ], + [ + "0.35", + -14.712404251098633 + ], + [ + "Garlic", + -14.71242904663086 + ], + [ + "▁McKi", + -14.712501525878906 + ], + [ + "Shore", + -14.712611198425291 + ], + [ + "ignano", + -14.712639808654783 + ], + [ + "emptive", + -14.712658882141112 + ], + [ + "chew", + -14.712661743164062 + ], + [ + "Accounting", + -14.712669372558594 + ], + [ + "LZ", + -14.712676048278809 + ], + [ + "▁chattering", + -14.712779998779297 + ], + [ + "Bearing", + -14.712788581848145 + ], + [ + "partement", + -14.713057518005373 + ], + [ + "RAH", + -14.713083267211914 + ], + [ + "8-18", + -14.713133811950684 + ], + [ + "▁Bina", + -14.713224411010742 + ], + [ + "UNO", + -14.71322536468506 + ], + [ + "Falling", + -14.713242530822754 + ], + [ + "neer", + -14.713262557983398 + ], + [ + "Lyc", + -14.713282585144045 + ], + [ + "grill", + -14.71331024169922 + ], + [ + "▁argon", + -14.713338851928713 + ], + [ + "5:10", + -14.71341037750244 + ], + [ + "mitsu", + -14.713421821594238 + ], + [ + "▁Kine", + -14.713492393493652 + ], + [ + "lingen", + -14.7135009765625 + ], + [ + "2.16", + -14.713537216186523 + ], + [ + "▁retest", + -14.713580131530762 + ], + [ + "gag", + -14.713641166687012 + ], + [ + "▁bookworm", + -14.713672637939451 + ], + [ + "▁Pappa", + -14.71368408203125 + ], + [ + "▁debutant", + -14.713698387145996 + ], + [ + "Citizen", + -14.713702201843262 + ], + [ + "▁Deme", + -14.713702201843262 + ], + [ + "▁Peaches", + -14.71389102935791 + ], + [ + "▁centrality", + -14.713906288146973 + ], + [ + "Volt", + -14.71395492553711 + ], + [ + "▁secrete", + -14.713994026184082 + ], + [ + "▁Shami", + -14.71418285369873 + ], + [ + "▁bibliographical", + -14.714205741882324 + ], + [ + "▁gluco", + -14.714295387268066 + ], + [ + "▁Interpol", + -14.714330673217772 + ], + [ + "▁overwinter", + -14.714346885681152 + ], + [ + "▁easygoing", + -14.714411735534668 + ], + [ + "▁CTP", + -14.71446132659912 + ], + [ + "▁CDU", + -14.714576721191406 + ], + [ + "GREAT", + -14.714637756347656 + ], + [ + "galact", + -14.714665412902832 + ], + [ + "▁adornment", + -14.714754104614258 + ], + [ + "Dimension", + -14.714923858642578 + ], + [ + "▁EQU", + -14.714961051940918 + ], + [ + "▁Khur", + -14.715084075927734 + ], + [ + "00\"", + -14.715237617492676 + ], + [ + "adol", + -14.71545696258545 + ], + [ + "▁1807", + -14.715560913085938 + ], + [ + "heen", + -14.715584754943848 + ], + [ + "▁compo", + -14.715587615966797 + ], + [ + "SLAM", + -14.715620040893556 + ], + [ + "connecting", + -14.71566390991211 + ], + [ + "▁honoree", + -14.715664863586426 + ], + [ + "▁13.2", + -14.71572208404541 + ], + [ + "▁conservatively", + -14.715781211853027 + ], + [ + "lami", + -14.715821266174316 + ], + [ + "lado", + -14.715840339660645 + ], + [ + "CYC", + -14.71585750579834 + ], + [ + "AEC", + -14.715934753417969 + ], + [ + "▁Fret", + -14.715965270996094 + ], + [ + "▁Aquaman", + -14.716018676757812 + ], + [ + "modify", + -14.716046333312988 + ], + [ + "▁FDT", + -14.716071128845217 + ], + [ + "▁11.8", + -14.71613311767578 + ], + [ + "▁Eta", + -14.716160774230955 + ], + [ + "▁RAR", + -14.716296195983888 + ], + [ + "STAT", + -14.716363906860352 + ], + [ + "▁Kora", + -14.71637725830078 + ], + [ + "▁interviewee", + -14.71639347076416 + ], + [ + "Republic", + -14.71641731262207 + ], + [ + "▁totalled", + -14.716438293457031 + ], + [ + "▁RAND", + -14.716789245605469 + ], + [ + "9]", + -14.716840744018556 + ], + [ + "▁Smt", + -14.716852188110352 + ], + [ + "▁12-13", + -14.716882705688477 + ], + [ + "▁Motivate", + -14.716915130615234 + ], + [ + "▁12,500", + -14.71693229675293 + ], + [ + "1:21", + -14.717034339904783 + ], + [ + "▁£20,000", + -14.717061042785645 + ], + [ + "1.40", + -14.717062950134276 + ], + [ + "icola", + -14.717124938964844 + ], + [ + "▁halve", + -14.717144966125488 + ], + [ + "▁devastate", + -14.717203140258787 + ], + [ + "▁bisa", + -14.717262268066406 + ], + [ + "▁Gunner", + -14.717278480529783 + ], + [ + "87)", + -14.717315673828123 + ], + [ + "▁Parkside", + -14.717329025268556 + ], + [ + "▁BCM", + -14.71734619140625 + ], + [ + "GPR", + -14.71741008758545 + ], + [ + "2:37", + -14.717561721801758 + ], + [ + "▁Twister", + -14.717584609985352 + ], + [ + "▁Paola", + -14.717658996582031 + ], + [ + "▁hulk", + -14.71775245666504 + ], + [ + "Accomplish", + -14.717793464660645 + ], + [ + "SPONSOR", + -14.717802047729492 + ], + [ + "▁Gymnasium", + -14.717802047729492 + ], + [ + "▁Inaugural", + -14.717802047729492 + ], + [ + "▁Kaleidoscope", + -14.717802047729492 + ], + [ + "▁Kennesaw", + -14.717802047729492 + ], + [ + "▁Machiavelli", + -14.717802047729492 + ], + [ + "▁Manipulation", + -14.717802047729492 + ], + [ + "▁Scissors", + -14.717802047729492 + ], + [ + "▁Sennheiser", + -14.717802047729492 + ], + [ + "▁Stearate", + -14.717802047729492 + ], + [ + "▁Stryker", + -14.717802047729492 + ], + [ + "▁countenance", + -14.717802047729492 + ], + [ + "▁disavow", + -14.717802047729492 + ], + [ + "▁evaporative", + -14.717802047729492 + ], + [ + "▁karnataka", + -14.717802047729492 + ], + [ + "▁mnemonic", + -14.717802047729492 + ], + [ + "▁neutrophil", + -14.717802047729492 + ], + [ + "▁obsolescence", + -14.717802047729492 + ], + [ + "▁ophthalmology", + -14.717802047729492 + ], + [ + "▁serendipitous", + -14.717802047729492 + ], + [ + "▁uncontrollably", + -14.717802047729492 + ], + [ + "arginine", + -14.717803001403809 + ], + [ + "▁Argyll", + -14.717803001403809 + ], + [ + "▁Deirdre", + -14.717803001403809 + ], + [ + "▁Locomotive", + -14.717803001403809 + ], + [ + "▁coagulation", + -14.717803001403809 + ], + [ + "▁obtuse", + -14.717803001403809 + ], + [ + "▁resonator", + -14.717803001403809 + ], + [ + "▁Vinyasa", + -14.717803955078123 + ], + [ + "▁cellophane", + -14.717803955078123 + ], + [ + "▁impeding", + -14.717803955078123 + ], + [ + "▁semolina", + -14.717803955078123 + ], + [ + "citabine", + -14.71780490875244 + ], + [ + "▁Rutledge", + -14.71780490875244 + ], + [ + "▁alphanumeric", + -14.71780490875244 + ], + [ + "▁gantry", + -14.717805862426758 + ], + [ + "▁motility", + -14.717805862426758 + ], + [ + "▁Oldsmobile", + -14.717806816101074 + ], + [ + "▁Jigsaw", + -14.71780776977539 + ], + [ + "▁Intimate", + -14.717808723449709 + ], + [ + "▁Clergy", + -14.717809677124023 + ], + [ + "▁Luiz", + -14.717809677124023 + ], + [ + "▁Peptide", + -14.717809677124023 + ], + [ + "▁bentonite", + -14.71781063079834 + ], + [ + "▁UEFI", + -14.717811584472656 + ], + [ + "▁Shazam", + -14.717812538146973 + ], + [ + "▁Roxanne", + -14.717813491821287 + ], + [ + "▁creasing", + -14.717813491821287 + ], + [ + "▁Schilling", + -14.717814445495604 + ], + [ + "▁servlet", + -14.717814445495604 + ], + [ + "▁Shutterstock", + -14.717815399169922 + ], + [ + "▁megabytes", + -14.717816352844238 + ], + [ + "▁oscillate", + -14.717816352844238 + ], + [ + "▁MIGHT", + -14.717818260192873 + ], + [ + "▁Babcock", + -14.717819213867188 + ], + [ + "▁convolution", + -14.717819213867188 + ], + [ + "▁Parvati", + -14.717822074890137 + ], + [ + "▁Spieth", + -14.717822074890137 + ], + [ + "▁Rojas", + -14.71782398223877 + ], + [ + "▁warfarin", + -14.717827796936035 + ], + [ + "▁Collateral", + -14.717828750610352 + ], + [ + "▁Faraday", + -14.717828750610352 + ], + [ + "▁PEACE", + -14.717832565307615 + ], + [ + "▁Balmoral", + -14.71783447265625 + ], + [ + "▁Eskom", + -14.717836380004885 + ], + [ + "▁Steinbeck", + -14.717836380004885 + ], + [ + "▁slosh", + -14.717840194702148 + ], + [ + "▁Inequality", + -14.717841148376465 + ], + [ + "▁Shortcuts", + -14.71784496307373 + ], + [ + "▁Marigold", + -14.717845916748049 + ], + [ + "▁PepsiCo", + -14.717851638793944 + ], + [ + "▁valium", + -14.717851638793944 + ], + [ + "▁cryogenic", + -14.717855453491213 + ], + [ + "▁bassinet", + -14.717859268188477 + ], + [ + "▁HOUR", + -14.717863082885742 + ], + [ + "▁dietician", + -14.717884063720703 + ], + [ + "▁Zoological", + -14.717886924743652 + ], + [ + "▁Haddad", + -14.717887878417969 + ], + [ + "▁Nauru", + -14.717899322509766 + ], + [ + "▁biosphere", + -14.717902183532717 + ], + [ + "▁tradespeople", + -14.717904090881348 + ], + [ + "▁Sprague", + -14.717909812927246 + ], + [ + "▁Quintana", + -14.717920303344728 + ], + [ + "▁antifreeze", + -14.717933654785156 + ], + [ + "▁snarky", + -14.717935562133787 + ], + [ + "▁Mendez", + -14.717936515808104 + ], + [ + "Irregular", + -14.717949867248535 + ], + [ + "uncharacteristic", + -14.71798038482666 + ], + [ + "▁cocky", + -14.717999458312988 + ], + [ + "▁unedited", + -14.718003273010254 + ], + [ + "▁Gurley", + -14.718031883239746 + ], + [ + "▁Quark", + -14.71804141998291 + ], + [ + "▁webcomic", + -14.718047142028809 + ], + [ + "▁Manoj", + -14.71805191040039 + ], + [ + "▁Tiffin", + -14.718066215515137 + ], + [ + "▁Ozzy", + -14.718071937561035 + ], + [ + "▁Rigby", + -14.718083381652832 + ], + [ + "Westphalia", + -14.718104362487791 + ], + [ + "▁Stallion", + -14.718149185180664 + ], + [ + "▁foolishness", + -14.718149185180664 + ], + [ + "▁Duong", + -14.718158721923828 + ], + [ + "▁Jukebox", + -14.718174934387209 + ], + [ + "mozilla", + -14.718183517456056 + ], + [ + "▁flailing", + -14.718242645263672 + ], + [ + "▁Marla", + -14.718276977539062 + ], + [ + "▁wring", + -14.71830940246582 + ], + [ + "▁Timberland", + -14.718326568603516 + ], + [ + "▁conclusively", + -14.718340873718262 + ], + [ + "▁Stig", + -14.718347549438477 + ], + [ + "▁Mantle", + -14.718350410461426 + ], + [ + "▁panned", + -14.718358039855955 + ], + [ + "▁Pierson", + -14.718361854553224 + ], + [ + "▁Oregonian", + -14.7184419631958 + ], + [ + "Aquila", + -14.718442916870115 + ], + [ + "▁SeaWorld", + -14.718499183654783 + ], + [ + "▁tartness", + -14.718515396118164 + ], + [ + "ctl", + -14.718565940856934 + ], + [ + "▁reinvested", + -14.718599319458008 + ], + [ + "watches", + -14.718637466430664 + ], + [ + "▁Iggy", + -14.718647003173828 + ], + [ + "▁hissing", + -14.71865177154541 + ], + [ + "▁summing", + -14.718663215637209 + ], + [ + "▁pipework", + -14.718692779541016 + ], + [ + "8-29", + -14.718703269958496 + ], + [ + "▁breakneck", + -14.718705177307127 + ], + [ + "FIB", + -14.71878147125244 + ], + [ + "Ukrainian", + -14.718812942504885 + ], + [ + "sanctioned", + -14.71885871887207 + ], + [ + "Religious", + -14.718863487243652 + ], + [ + "semester", + -14.718863487243652 + ], + [ + "Tropical", + -14.718864440917969 + ], + [ + "Satellite", + -14.718868255615234 + ], + [ + "Spencer", + -14.71886920928955 + ], + [ + "Constitution", + -14.718870162963867 + ], + [ + "Sacred", + -14.718870162963867 + ], + [ + "Venezuela", + -14.718871116638184 + ], + [ + "Eugene", + -14.7188720703125 + ], + [ + "Columbus", + -14.718873023986816 + ], + [ + "Caribbean", + -14.71887493133545 + ], + [ + "Massachusetts", + -14.71887493133545 + ], + [ + "twenty", + -14.718878746032717 + ], + [ + "Orient", + -14.718884468078612 + ], + [ + "horizontal", + -14.718894958496094 + ], + [ + "▁liquidate", + -14.718897819519045 + ], + [ + "Sustainability", + -14.71889877319336 + ], + [ + "▁Progression", + -14.718911170959473 + ], + [ + "Maggie", + -14.718918800354004 + ], + [ + "Phishing", + -14.7189302444458 + ], + [ + "ERROR", + -14.718939781188965 + ], + [ + "▁BRIT", + -14.71894073486328 + ], + [ + "Oakland", + -14.718975067138672 + ], + [ + "Genius", + -14.71901798248291 + ], + [ + "Willow", + -14.719039916992188 + ], + [ + "Preferred", + -14.71906566619873 + ], + [ + "▁woe", + -14.719066619873049 + ], + [ + "▁eminently", + -14.71910572052002 + ], + [ + "Animation", + -14.719130516052246 + ], + [ + "Retailers", + -14.719134330749512 + ], + [ + "guaranteed", + -14.71913719177246 + ], + [ + "idhi", + -14.71914768218994 + ], + [ + "▁Freestanding", + -14.71916389465332 + ], + [ + "▁Anastasi", + -14.7191743850708 + ], + [ + "▁1777", + -14.719204902648926 + ], + [ + "▁04:4", + -14.719234466552734 + ], + [ + "christian", + -14.71926975250244 + ], + [ + "▁Sandal", + -14.71927547454834 + ], + [ + "▁Munn", + -14.719276428222656 + ], + [ + "Arctic", + -14.719279289245604 + ], + [ + "▁Dank", + -14.719290733337402 + ], + [ + "SCRIPT", + -14.719308853149414 + ], + [ + "somewhat", + -14.719327926635742 + ], + [ + "▁Dalit", + -14.71934700012207 + ], + [ + "▁doormat", + -14.719406127929688 + ], + [ + "▁quietest", + -14.719437599182127 + ], + [ + "Palate", + -14.719439506530762 + ], + [ + "▁PFC", + -14.719442367553713 + ], + [ + "nouncing", + -14.71946620941162 + ], + [ + "Speculation", + -14.719475746154783 + ], + [ + "▁2-8", + -14.71949291229248 + ], + [ + "▁PHIL", + -14.719493865966797 + ], + [ + "▁Primate", + -14.719532012939451 + ], + [ + "▁KIA", + -14.719615936279297 + ], + [ + "▁WHIP", + -14.71966552734375 + ], + [ + "CFC", + -14.7196683883667 + ], + [ + "▁Mimosa", + -14.719700813293455 + ], + [ + "soap", + -14.71975040435791 + ], + [ + "▁Thad", + -14.719846725463867 + ], + [ + "FIGURE", + -14.719879150390623 + ], + [ + "Salmon", + -14.719958305358888 + ], + [ + "brac", + -14.719979286193848 + ], + [ + "OAC", + -14.719995498657228 + ], + [ + "▁ESB", + -14.720017433166504 + ], + [ + "▁Globalization", + -14.720025062561035 + ], + [ + "archy", + -14.72012996673584 + ], + [ + "▁camouflaged", + -14.720186233520508 + ], + [ + "▁Nikko", + -14.720189094543455 + ], + [ + "▁Discovered", + -14.72024917602539 + ], + [ + "2:07", + -14.720303535461426 + ], + [ + "▁$125,000", + -14.720343589782717 + ], + [ + "ADC", + -14.720352172851562 + ], + [ + "▁Leong", + -14.72040557861328 + ], + [ + "Facil", + -14.720434188842772 + ], + [ + "▁Mancha", + -14.720443725585938 + ], + [ + "Beast", + -14.720453262329102 + ], + [ + "▁Opp", + -14.720484733581545 + ], + [ + "adura", + -14.720487594604492 + ], + [ + "1963", + -14.72053337097168 + ], + [ + "Goalkeeper", + -14.72061538696289 + ], + [ + "▁Jardine", + -14.72061538696289 + ], + [ + "▁UNDERSTAND", + -14.720703125 + ], + [ + "BORN", + -14.720727920532228 + ], + [ + "▁Lugo", + -14.72075653076172 + ], + [ + "▁1817", + -14.72076416015625 + ], + [ + "▁Myra", + -14.720786094665527 + ], + [ + "▁naan", + -14.720829963684082 + ], + [ + "annie", + -14.72092056274414 + ], + [ + "Headed", + -14.72095775604248 + ], + [ + "▁repealing", + -14.720966339111328 + ], + [ + "BUCK", + -14.720976829528809 + ], + [ + "gateway", + -14.720979690551758 + ], + [ + "opathies", + -14.720986366271973 + ], + [ + "basa", + -14.720995903015137 + ], + [ + "▁sacking", + -14.72113037109375 + ], + [ + "physiology", + -14.72115993499756 + ], + [ + "▁abounding", + -14.721220016479492 + ], + [ + "Shout", + -14.721280097961426 + ], + [ + "▁Takeda", + -14.721346855163574 + ], + [ + "▁Rohr", + -14.721368789672852 + ], + [ + "Gosh", + -14.721373558044434 + ], + [ + "▁Payable", + -14.721481323242188 + ], + [ + "Hoping", + -14.721508026123049 + ], + [ + "▁photobook", + -14.72152042388916 + ], + [ + "▁sri", + -14.721529960632324 + ], + [ + "FUND", + -14.72158908843994 + ], + [ + "▁moldy", + -14.721633911132812 + ], + [ + "▁Geese", + -14.721678733825684 + ], + [ + "auction", + -14.72175407409668 + ], + [ + "▁703-", + -14.72182273864746 + ], + [ + "elah", + -14.72189235687256 + ], + [ + "9.7%", + -14.72193717956543 + ], + [ + "▁ABBA", + -14.721985816955566 + ], + [ + "▁businesswoman", + -14.722078323364258 + ], + [ + "Courtney", + -14.722161293029783 + ], + [ + "5.8%", + -14.722168922424316 + ], + [ + "▁Fili", + -14.722196578979492 + ], + [ + "5,600", + -14.722285270690918 + ], + [ + "▁Jok", + -14.722288131713867 + ], + [ + "▁tiene", + -14.722318649291992 + ], + [ + "▁BSI", + -14.722354888916016 + ], + [ + "ativ", + -14.722540855407717 + ], + [ + "▁myopia", + -14.722546577453612 + ], + [ + "▁Pano", + -14.722558975219728 + ], + [ + "▁amped", + -14.72263240814209 + ], + [ + "▁DELL", + -14.72269344329834 + ], + [ + "▁Latif", + -14.72270965576172 + ], + [ + "▁fainting", + -14.722716331481934 + ], + [ + "mondo", + -14.72275161743164 + ], + [ + "assen", + -14.72278118133545 + ], + [ + "▁overpaying", + -14.722833633422852 + ], + [ + "irl", + -14.723011016845703 + ], + [ + "TTE", + -14.72305679321289 + ], + [ + "▁Dieter", + -14.723067283630373 + ], + [ + "NAI", + -14.723094940185549 + ], + [ + "▁oscillation", + -14.72316074371338 + ], + [ + "▁pollutant", + -14.723191261291504 + ], + [ + "=“", + -14.723222732543944 + ], + [ + "anthan", + -14.723304748535156 + ], + [ + "▁Rovi", + -14.723329544067385 + ], + [ + "▁dramatiz", + -14.72340488433838 + ], + [ + "Thick", + -14.723503112792969 + ], + [ + "▁Sneaker", + -14.72352695465088 + ], + [ + "DALE", + -14.723586082458496 + ], + [ + "Shaw", + -14.723592758178713 + ], + [ + "▁$275", + -14.723612785339355 + ], + [ + "optical", + -14.723697662353516 + ], + [ + "1:36", + -14.723756790161133 + ], + [ + "CSO", + -14.723774909973145 + ], + [ + "8080", + -14.723878860473633 + ], + [ + "FOOD", + -14.723917961120604 + ], + [ + "▁macho", + -14.7239408493042 + ], + [ + "▁glamor", + -14.723966598510742 + ], + [ + "▁Shocking", + -14.72397232055664 + ], + [ + "▁ASQ", + -14.723976135253906 + ], + [ + "▁Chitra", + -14.723989486694336 + ], + [ + "▁clipper", + -14.72410488128662 + ], + [ + "Maoist", + -14.724143981933594 + ], + [ + "▁Adore", + -14.72418212890625 + ], + [ + "roglyph", + -14.724184036254885 + ], + [ + "9,999", + -14.724198341369627 + ], + [ + "Luna", + -14.724214553833008 + ], + [ + "romance", + -14.724305152893066 + ], + [ + "ULI", + -14.72433376312256 + ], + [ + "▁tae", + -14.724339485168455 + ], + [ + "FHS", + -14.724407196044922 + ], + [ + "tope", + -14.724428176879885 + ], + [ + "▁Swede", + -14.724482536315918 + ], + [ + "▁ARTS", + -14.724507331848145 + ], + [ + "▁vocally", + -14.724602699279783 + ], + [ + "▁1150", + -14.724645614624023 + ], + [ + "Keyword", + -14.724753379821776 + ], + [ + "▁desp", + -14.72478199005127 + ], + [ + "▁Keno", + -14.72478485107422 + ], + [ + "Marcel", + -14.724865913391112 + ], + [ + "▁Shand", + -14.72488498687744 + ], + [ + "▁Levite", + -14.72494411468506 + ], + [ + "▁Onboard", + -14.724945068359377 + ], + [ + "bender", + -14.72498607635498 + ], + [ + "▁Silverton", + -14.725022315979004 + ], + [ + "▁shad", + -14.725079536437988 + ], + [ + "▁Stoney", + -14.725133895874023 + ], + [ + "▁Ovi", + -14.725138664245604 + ], + [ + "Rodent", + -14.72516632080078 + ], + [ + "9–1", + -14.72527313232422 + ], + [ + "baik", + -14.725324630737305 + ], + [ + "Wipe", + -14.725349426269531 + ], + [ + "earning", + -14.725369453430176 + ], + [ + "overall", + -14.725400924682615 + ], + [ + "Quiz", + -14.725404739379885 + ], + [ + "Bah", + -14.725427627563477 + ], + [ + "cropping", + -14.72553253173828 + ], + [ + "▁confide", + -14.725561141967772 + ], + [ + "VIE", + -14.72568130493164 + ], + [ + "▁Amazonian", + -14.72572898864746 + ], + [ + "rgi", + -14.72582721710205 + ], + [ + "Deut", + -14.725890159606934 + ], + [ + "▁manufac", + -14.726238250732422 + ], + [ + "WOT", + -14.726245880126951 + ], + [ + "▁hardscape", + -14.726274490356444 + ], + [ + "Relevance", + -14.726276397705078 + ], + [ + "TUESDAY", + -14.726276397705078 + ], + [ + "▁Aeronautics", + -14.726276397705078 + ], + [ + "▁Akamai", + -14.726276397705078 + ], + [ + "▁Breitling", + -14.726276397705078 + ], + [ + "▁Chomsky", + -14.726276397705078 + ], + [ + "▁Exynos", + -14.726276397705078 + ], + [ + "▁Ingersoll", + -14.726276397705078 + ], + [ + "▁Intrepid", + -14.726276397705078 + ], + [ + "▁Propecia", + -14.726276397705078 + ], + [ + "▁absinthe", + -14.726276397705078 + ], + [ + "▁aforesaid", + -14.726276397705078 + ], + [ + "▁canapés", + -14.726276397705078 + ], + [ + "▁desirability", + -14.726276397705078 + ], + [ + "▁disparage", + -14.726276397705078 + ], + [ + "▁exhilaration", + -14.726276397705078 + ], + [ + "▁handcuffed", + -14.726276397705078 + ], + [ + "▁penicillin", + -14.726276397705078 + ], + [ + "▁provost", + -14.726276397705078 + ], + [ + "▁prudence", + -14.726276397705078 + ], + [ + "▁redundancies", + -14.726276397705078 + ], + [ + "▁synchronicity", + -14.726276397705078 + ], + [ + "▁vestibular", + -14.726276397705078 + ], + [ + "▁Barnabas", + -14.726277351379396 + ], + [ + "▁ketamine", + -14.726277351379396 + ], + [ + "▁muesli", + -14.726277351379396 + ], + [ + "▁preconceptions", + -14.726277351379396 + ], + [ + "▁xenophobia", + -14.726277351379396 + ], + [ + "▁Gingrich", + -14.726278305053713 + ], + [ + "▁Kyocera", + -14.726278305053713 + ], + [ + "▁disarmament", + -14.726278305053713 + ], + [ + "Nutmeg", + -14.726279258728027 + ], + [ + "▁draping", + -14.726279258728027 + ], + [ + "▁ugliness", + -14.726279258728027 + ], + [ + "illinois", + -14.726280212402344 + ], + [ + "▁Bariatric", + -14.726280212402344 + ], + [ + "▁Legionnaire", + -14.726280212402344 + ], + [ + "▁Thermador", + -14.726280212402344 + ], + [ + "▁frieze", + -14.726280212402344 + ], + [ + "▁retinopathy", + -14.726280212402344 + ], + [ + "▁Sanofi", + -14.72628116607666 + ], + [ + "▁Inferno", + -14.726282119750977 + ], + [ + "▁HACCP", + -14.72628402709961 + ], + [ + "▁Gradient", + -14.726285934448242 + ], + [ + "▁Bioinformatics", + -14.72628688812256 + ], + [ + "▁Schiller", + -14.726290702819824 + ], + [ + "▁Nicotine", + -14.726293563842772 + ], + [ + "▁Rajendra", + -14.726293563842772 + ], + [ + "▁Fitchburg", + -14.72629451751709 + ], + [ + "▁Praxis", + -14.726296424865724 + ], + [ + "▁peony", + -14.72629737854004 + ], + [ + "▁Roatan", + -14.726299285888672 + ], + [ + "▁Capabilities", + -14.726300239562988 + ], + [ + "▁vellum", + -14.726304054260254 + ], + [ + "▁Cruces", + -14.72630500793457 + ], + [ + "▁calving", + -14.72630500793457 + ], + [ + "▁Dickey", + -14.726306915283203 + ], + [ + "▁unscramble", + -14.726306915283203 + ], + [ + "▁GFCI", + -14.726308822631836 + ], + [ + "▁haptic", + -14.726310729980469 + ], + [ + "▁Calligraphy", + -14.726314544677734 + ], + [ + "2:47", + -14.726330757141112 + ], + [ + "▁wireframes", + -14.726335525512695 + ], + [ + "▁frailty", + -14.726337432861328 + ], + [ + "▁Spock", + -14.726340293884276 + ], + [ + "▁Horsham", + -14.726341247558594 + ], + [ + "Enfant", + -14.72634220123291 + ], + [ + "▁Driftwood", + -14.72635269165039 + ], + [ + "▁taillights", + -14.726358413696287 + ], + [ + "▁interject", + -14.726373672485352 + ], + [ + "▁CDM", + -14.726380348205566 + ], + [ + "▁Danforth", + -14.7263822555542 + ], + [ + "▁hyperactive", + -14.726386070251465 + ], + [ + "▁Dusk", + -14.726414680480955 + ], + [ + "essner", + -14.72641658782959 + ], + [ + "▁BASIS", + -14.726462364196776 + ], + [ + "▁imbibe", + -14.726462364196776 + ], + [ + "▁Behavioural", + -14.726512908935549 + ], + [ + "▁Blurb", + -14.726539611816406 + ], + [ + "▁Quoc", + -14.726564407348633 + ], + [ + "▁devalue", + -14.726588249206545 + ], + [ + "▁bugging", + -14.726593017578123 + ], + [ + "▁Tilda", + -14.72659969329834 + ], + [ + "▁orchestrating", + -14.726619720458984 + ], + [ + "▁Cowley", + -14.72663116455078 + ], + [ + "▁CHINA", + -14.726638793945312 + ], + [ + "LIBER", + -14.726639747619627 + ], + [ + "▁PSR", + -14.726704597473145 + ], + [ + "▁seagrass", + -14.726726531982422 + ], + [ + "▁SHRM", + -14.726733207702637 + ], + [ + "Fostering", + -14.726747512817385 + ], + [ + "▁Caruso", + -14.72675323486328 + ], + [ + "▁rescinded", + -14.72677230834961 + ], + [ + "▁Hazrat", + -14.72677993774414 + ], + [ + "▁Glance", + -14.72679615020752 + ], + [ + "▁TAFE", + -14.726798057556152 + ], + [ + "useful", + -14.726812362670898 + ], + [ + "▁Morningside", + -14.726820945739746 + ], + [ + "▁Suzie", + -14.726823806762695 + ], + [ + "viruses", + -14.726831436157228 + ], + [ + "▁godsend", + -14.72683811187744 + ], + [ + "Distraction", + -14.7268648147583 + ], + [ + "▁17.3", + -14.726880073547363 + ], + [ + "▁Yoder", + -14.726888656616213 + ], + [ + "▁Kellie", + -14.726924896240234 + ], + [ + "9.75", + -14.72692584991455 + ], + [ + "▁Bowery", + -14.726927757263184 + ], + [ + "Arrive", + -14.726945877075195 + ], + [ + "anticipate", + -14.72695255279541 + ], + [ + "▁Winkle", + -14.726977348327637 + ], + [ + "▁£600", + -14.726987838745115 + ], + [ + "▁ghostwriter", + -14.727009773254396 + ], + [ + "▁Nourishing", + -14.727015495300291 + ], + [ + "▁Fisch", + -14.72703456878662 + ], + [ + "▁Eurostar", + -14.727100372314451 + ], + [ + "▁Moha", + -14.727107048034668 + ], + [ + "▁PTS", + -14.72711181640625 + ], + [ + "▁discredited", + -14.72711181640625 + ], + [ + "gusta", + -14.727120399475098 + ], + [ + "▁DDT", + -14.727152824401855 + ], + [ + "▁languishing", + -14.727153778076172 + ], + [ + "▁Witches", + -14.727177619934082 + ], + [ + "▁Revue", + -14.72719955444336 + ], + [ + "▁testifies", + -14.727200508117676 + ], + [ + "▁18650", + -14.72720432281494 + ], + [ + "▁signposted", + -14.727208137512209 + ], + [ + "UBI", + -14.727212905883787 + ], + [ + "▁Olli", + -14.727214813232422 + ], + [ + "▁corroborate", + -14.727219581604004 + ], + [ + "Column", + -14.72727394104004 + ], + [ + "▁damsel", + -14.72729206085205 + ], + [ + "▁Sidewalk", + -14.727361679077148 + ], + [ + "hauer", + -14.727362632751465 + ], + [ + "tsuki", + -14.727398872375488 + ], + [ + "imagining", + -14.727410316467283 + ], + [ + "Enlarge", + -14.727444648742676 + ], + [ + "▁screenwriting", + -14.727487564086914 + ], + [ + "Apologies", + -14.727511405944824 + ], + [ + "▁Rhea", + -14.727519989013672 + ], + [ + "wrinkle", + -14.727534294128418 + ], + [ + "Destination", + -14.7275390625 + ], + [ + "kitchenaid", + -14.727540969848633 + ], + [ + "Variety", + -14.727554321289062 + ], + [ + "▁35-40", + -14.727556228637695 + ], + [ + "Acrylic", + -14.727559089660645 + ], + [ + "Spicy", + -14.72756004333496 + ], + [ + "conversation", + -14.72756004333496 + ], + [ + "Panasonic", + -14.727560997009276 + ], + [ + "australia", + -14.72756290435791 + ], + [ + "Timothy", + -14.727563858032228 + ], + [ + "believing", + -14.727567672729492 + ], + [ + "▁undersea", + -14.72757339477539 + ], + [ + "fantastic", + -14.727582931518556 + ], + [ + "▁EXO", + -14.727584838867188 + ], + [ + "therapist", + -14.727585792541504 + ], + [ + "Poetry", + -14.72758674621582 + ], + [ + "▁Launches", + -14.727643013000488 + ], + [ + "▁Baez", + -14.7277193069458 + ], + [ + "occasion", + -14.727775573730469 + ], + [ + "Chrysler", + -14.727794647216797 + ], + [ + "executed", + -14.727874755859377 + ], + [ + "Pier", + -14.727890014648438 + ], + [ + "WAYS", + -14.727910041809082 + ], + [ + "▁showbiz", + -14.72792625427246 + ], + [ + "Carolyn", + -14.727931022644045 + ], + [ + "Cara", + -14.727951049804688 + ], + [ + "suitable", + -14.727983474731444 + ], + [ + "Stepping", + -14.728028297424316 + ], + [ + "nagh", + -14.728094100952148 + ], + [ + "Roasted", + -14.728157043457031 + ], + [ + "▁Esri", + -14.728171348571776 + ], + [ + "Titan", + -14.728182792663574 + ], + [ + "Grain", + -14.728248596191406 + ], + [ + "PLEX", + -14.72827434539795 + ], + [ + "▁Senna", + -14.72830867767334 + ], + [ + "▁AISI", + -14.7283353805542 + ], + [ + "▁Bevan", + -14.72852897644043 + ], + [ + "▁unevenly", + -14.728545188903809 + ], + [ + "indeed", + -14.728555679321287 + ], + [ + "▁Shilling", + -14.728593826293944 + ], + [ + "Chile", + -14.728608131408691 + ], + [ + "▁Kuch", + -14.72863483428955 + ], + [ + "▁Maxima", + -14.72869873046875 + ], + [ + "▁transgress", + -14.728713989257812 + ], + [ + "▁Arad", + -14.72873306274414 + ], + [ + "▁Jerk", + -14.728845596313477 + ], + [ + "▁Nui", + -14.72895050048828 + ], + [ + "▁Recep", + -14.729071617126465 + ], + [ + "Seen", + -14.729114532470703 + ], + [ + "passed", + -14.729118347167969 + ], + [ + "▁Zora", + -14.72914218902588 + ], + [ + "▁manta", + -14.729202270507812 + ], + [ + "LAG", + -14.729204177856444 + ], + [ + "revenue", + -14.729211807250977 + ], + [ + "▁09:3", + -14.729253768920898 + ], + [ + "▁Tyrell", + -14.729397773742676 + ], + [ + "▁(17)", + -14.729405403137209 + ], + [ + "BFS", + -14.729412078857422 + ], + [ + "▁quantitatively", + -14.729440689086914 + ], + [ + "▁Pela", + -14.729443550109863 + ], + [ + "EWS", + -14.729445457458496 + ], + [ + "▁handsomely", + -14.729469299316406 + ], + [ + "▁Saraki", + -14.729487419128418 + ], + [ + "▁Meir", + -14.72950839996338 + ], + [ + "▁rouse", + -14.729557037353516 + ], + [ + "Rib", + -14.729700088500977 + ], + [ + "BID", + -14.729819297790527 + ], + [ + "PGA", + -14.729839324951172 + ], + [ + "▁Busan", + -14.729924201965332 + ], + [ + "▁rejoiced", + -14.730002403259276 + ], + [ + "▁Tiller", + -14.730046272277832 + ], + [ + "MIX", + -14.730307579040527 + ], + [ + "▁RCT", + -14.730310440063477 + ], + [ + "sticky", + -14.730334281921388 + ], + [ + "▁Spor", + -14.730337142944336 + ], + [ + "Brass", + -14.730417251586914 + ], + [ + "crawler", + -14.730508804321287 + ], + [ + "▁reaped", + -14.730513572692873 + ], + [ + "▁Moulin", + -14.730515480041504 + ], + [ + "▁Adjuster", + -14.730667114257812 + ], + [ + "exec", + -14.730685234069824 + ], + [ + "Jewel", + -14.730746269226074 + ], + [ + "correction", + -14.730758666992188 + ], + [ + "▁tusk", + -14.730817794799805 + ], + [ + "2400", + -14.730842590332031 + ], + [ + "▁Persi", + -14.730908393859863 + ], + [ + "Crossing", + -14.730920791625977 + ], + [ + "fuelled", + -14.731082916259766 + ], + [ + "ото", + -14.731184005737305 + ], + [ + "jf", + -14.731200218200684 + ], + [ + "▁£2,000", + -14.731202125549316 + ], + [ + "▁misting", + -14.731396675109863 + ], + [ + "▁NIN", + -14.731542587280272 + ], + [ + "11:00", + -14.731563568115234 + ], + [ + "▁Tramp", + -14.731578826904297 + ], + [ + "▁Scandi", + -14.731611251831056 + ], + [ + "▁Naming", + -14.731616973876951 + ], + [ + "▁needlework", + -14.731637001037598 + ], + [ + "▁Drafting", + -14.731738090515137 + ], + [ + "▁micronutrients", + -14.731738090515137 + ], + [ + "▁Kalli", + -14.731748580932615 + ], + [ + "▁hydrangea", + -14.731781005859377 + ], + [ + "▁Beanie", + -14.731794357299805 + ], + [ + "▁markdown", + -14.731810569763184 + ], + [ + "▁Grub", + -14.73194980621338 + ], + [ + "14001", + -14.731968879699709 + ], + [ + "▁Hostess", + -14.731972694396973 + ], + [ + "▁Hush", + -14.732037544250488 + ], + [ + "vou", + -14.732102394104004 + ], + [ + "FACE", + -14.732439041137695 + ], + [ + "▁Fiori", + -14.732467651367188 + ], + [ + "1010", + -14.73257541656494 + ], + [ + "hofer", + -14.732648849487305 + ], + [ + "▁essayist", + -14.732666969299316 + ], + [ + "▁fabricator", + -14.732678413391112 + ], + [ + "footer", + -14.732765197753906 + ], + [ + "▁headquarter", + -14.732831954956056 + ], + [ + "▁interes", + -14.732860565185549 + ], + [ + "Colo", + -14.732976913452148 + ], + [ + "PEP", + -14.733019828796388 + ], + [ + "▁Jaco", + -14.733099937438965 + ], + [ + "contain", + -14.733186721801758 + ], + [ + "NTC", + -14.73320484161377 + ], + [ + "WWW", + -14.733243942260742 + ], + [ + "▁Ghani", + -14.733304023742676 + ], + [ + "Allergen", + -14.733352661132812 + ], + [ + "▁Socialism", + -14.733466148376465 + ], + [ + "INFORM", + -14.733528137207031 + ], + [ + "beating", + -14.733591079711914 + ], + [ + "scientist", + -14.733610153198242 + ], + [ + "jai", + -14.733672142028809 + ], + [ + "▁Sahel", + -14.733675956726074 + ], + [ + "Filmed", + -14.733701705932615 + ], + [ + "CPD", + -14.73372745513916 + ], + [ + "Consolidat", + -14.733736991882324 + ], + [ + "атно", + -14.73375415802002 + ], + [ + "▁Kahl", + -14.73392391204834 + ], + [ + "▁Carbonate", + -14.734030723571776 + ], + [ + "▁trunking", + -14.734065055847168 + ], + [ + "▁2027", + -14.734066009521484 + ], + [ + "LEO", + -14.734146118164062 + ], + [ + "▁WX", + -14.734170913696287 + ], + [ + "intern", + -14.734187126159668 + ], + [ + "HSU", + -14.734245300292969 + ], + [ + "▁Tabor", + -14.734261512756348 + ], + [ + "▁Northstar", + -14.73430347442627 + ], + [ + "▁Approve", + -14.734314918518066 + ], + [ + "IMF", + -14.734353065490724 + ], + [ + "▁KOA", + -14.734399795532228 + ], + [ + "▁Ronin", + -14.734411239624023 + ], + [ + "▁Baal", + -14.734463691711426 + ], + [ + "▁Desh", + -14.73458480834961 + ], + [ + "lexic", + -14.734623908996582 + ], + [ + "Whip", + -14.734673500061035 + ], + [ + "▁8-2", + -14.734698295593262 + ], + [ + "▁Fuchs", + -14.734779357910156 + ], + [ + "GIA", + -14.73478889465332 + ], + [ + "▁CRF", + -14.734795570373535 + ], + [ + "variate", + -14.73480224609375 + ], + [ + "ovine", + -14.7348051071167 + ], + [ + "Periodontal", + -14.734823226928713 + ], + [ + "Ridiculous", + -14.734823226928713 + ], + [ + "Solvusoft", + -14.734823226928713 + ], + [ + "Sufficient", + -14.734823226928713 + ], + [ + "kaspersky", + -14.734823226928713 + ], + [ + "▁Bonaparte", + -14.734823226928713 + ], + [ + "▁Cruelty", + -14.734823226928713 + ], + [ + "▁Cthulhu", + -14.734823226928713 + ], + [ + "▁Gauntlet", + -14.734823226928713 + ], + [ + "▁Impreza", + -14.734823226928713 + ], + [ + "▁Monterrey", + -14.734823226928713 + ], + [ + "▁Pkwy", + -14.734823226928713 + ], + [ + "▁Separating", + -14.734823226928713 + ], + [ + "▁cantaloupe", + -14.734823226928713 + ], + [ + "▁entwined", + -14.734823226928713 + ], + [ + "▁gargantuan", + -14.734823226928713 + ], + [ + "▁idolatry", + -14.734823226928713 + ], + [ + "▁metatarsal", + -14.734823226928713 + ], + [ + "▁methotrexate", + -14.734823226928713 + ], + [ + "▁prefrontal", + -14.734823226928713 + ], + [ + "▁prophesy", + -14.734823226928713 + ], + [ + "▁rezeptfrei", + -14.734823226928713 + ], + [ + "▁sandblasting", + -14.734823226928713 + ], + [ + "▁scoliosis", + -14.734823226928713 + ], + [ + "▁vitiligo", + -14.734823226928713 + ], + [ + "▁Oshkosh", + -14.734824180603027 + ], + [ + "▁nginx", + -14.734824180603027 + ], + [ + "▁nirvana", + -14.734824180603027 + ], + [ + "▁rhodium", + -14.734824180603027 + ], + [ + "Invoicing", + -14.734825134277344 + ], + [ + "▁Vedanta", + -14.734825134277344 + ], + [ + "▁deviant", + -14.73482608795166 + ], + [ + "▁typifie", + -14.73482608795166 + ], + [ + "▁iniquity", + -14.734827041625977 + ], + [ + "▁jovial", + -14.734827041625977 + ], + [ + "▁gyroscope", + -14.734827995300291 + ], + [ + "▁Concealed", + -14.734829902648926 + ], + [ + "▁Pharrell", + -14.734829902648926 + ], + [ + "▁typology", + -14.734830856323242 + ], + [ + "▁revocable", + -14.73483180999756 + ], + [ + "▁Sudoku", + -14.734832763671877 + ], + [ + "▁Hypertension", + -14.734833717346191 + ], + [ + "▁cloister", + -14.734833717346191 + ], + [ + "▁spiky", + -14.734833717346191 + ], + [ + "▁Turismo", + -14.734834671020508 + ], + [ + "▁Hutchison", + -14.73483657836914 + ], + [ + "▁bonanza", + -14.73483657836914 + ], + [ + "▁mishandled", + -14.73483657836914 + ], + [ + "▁picturing", + -14.734837532043455 + ], + [ + "▁astonishment", + -14.73483943939209 + ], + [ + "▁Skunk", + -14.734840393066406 + ], + [ + "▁borax", + -14.734848022460938 + ], + [ + "SBI", + -14.73484992980957 + ], + [ + "▁omnibus", + -14.73484992980957 + ], + [ + "▁stubble", + -14.734850883483888 + ], + [ + "▁satiety", + -14.73485279083252 + ], + [ + "▁tepid", + -14.734853744506836 + ], + [ + "▁Archival", + -14.734854698181152 + ], + [ + "▁mummies", + -14.734855651855469 + ], + [ + "▁Eyebrow", + -14.73486042022705 + ], + [ + "▁Chac", + -14.734861373901367 + ], + [ + "▁concurrency", + -14.73486328125 + ], + [ + "▁intermodal", + -14.734872817993164 + ], + [ + "▁boozy", + -14.734892845153809 + ], + [ + "▁Vaporizer", + -14.73490047454834 + ], + [ + "▁900,000", + -14.734903335571287 + ], + [ + "▁Skelton", + -14.734915733337402 + ], + [ + "▁kanji", + -14.734922409057615 + ], + [ + "▁Ziggy", + -14.73492431640625 + ], + [ + "▁undying", + -14.73492431640625 + ], + [ + "▁Tayyi", + -14.734925270080566 + ], + [ + "▁Keats", + -14.7349271774292 + ], + [ + "▁18:4", + -14.734938621520996 + ], + [ + "▁unscheduled", + -14.734941482543944 + ], + [ + "▁Anarchy", + -14.734942436218262 + ], + [ + "▁Colonies", + -14.734945297241213 + ], + [ + "▁Ashraf", + -14.734952926635742 + ], + [ + "▁Wolfpack", + -14.734960556030272 + ], + [ + "▁taming", + -14.73496437072754 + ], + [ + "▁babysit", + -14.734968185424805 + ], + [ + "▁larval", + -14.734977722167969 + ], + [ + "▁Uncommon", + -14.734993934631348 + ], + [ + "▁respawn", + -14.73501968383789 + ], + [ + "▁Revlon", + -14.735024452209473 + ], + [ + "▁ICAO", + -14.735028266906738 + ], + [ + "▁Birla", + -14.735060691833496 + ], + [ + "▁Fanning", + -14.735095977783203 + ], + [ + "▁2019-4-2", + -14.73509693145752 + ], + [ + "▁PPV", + -14.735113143920898 + ], + [ + "▁Inquirer", + -14.735164642333984 + ], + [ + "▁Oakdale", + -14.73519515991211 + ], + [ + "▁sewerage", + -14.735200881958008 + ], + [ + "▁secularism", + -14.735215187072754 + ], + [ + "▁Bessie", + -14.735243797302246 + ], + [ + "▁Campfire", + -14.735279083251951 + ], + [ + "1:32", + -14.735280990600586 + ], + [ + "▁endoscope", + -14.73528289794922 + ], + [ + "▁snooker", + -14.735311508178713 + ], + [ + "2700", + -14.735318183898926 + ], + [ + "▁Bluehost", + -14.735331535339355 + ], + [ + "/1.4", + -14.735345840454102 + ], + [ + "▁parapet", + -14.735353469848633 + ], + [ + "gupta", + -14.735374450683594 + ], + [ + "▁hoteliers", + -14.735428810119627 + ], + [ + "(12)", + -14.735461235046388 + ], + [ + "▁2035", + -14.735478401184082 + ], + [ + "▁Caltech", + -14.735479354858398 + ], + [ + "pico", + -14.735532760620115 + ], + [ + "▁FSX", + -14.735548973083496 + ], + [ + "▁Wholesalers", + -14.735552787780762 + ], + [ + "▁litigated", + -14.735576629638672 + ], + [ + "▁whitewashed", + -14.735588073730469 + ], + [ + "▁QPR", + -14.73561191558838 + ], + [ + "▁Canelo", + -14.735652923583984 + ], + [ + "▁deduced", + -14.735669136047363 + ], + [ + "▁GBA", + -14.735718727111816 + ], + [ + "▁CQC", + -14.73574447631836 + ], + [ + "▁borehole", + -14.735747337341309 + ], + [ + "▁Mochi", + -14.735772132873535 + ], + [ + "Reflect", + -14.73577880859375 + ], + [ + "plicity", + -14.735788345336914 + ], + [ + "▁Zep", + -14.73584270477295 + ], + [ + "owie", + -14.735895156860352 + ], + [ + "▁HCL", + -14.735910415649414 + ], + [ + "▁relaunched", + -14.735925674438477 + ], + [ + "hehe", + -14.735968589782717 + ], + [ + "▁weirdo", + -14.735968589782717 + ], + [ + "▁legwork", + -14.73598575592041 + ], + [ + "▁$7.99", + -14.736004829406738 + ], + [ + "▁surmount", + -14.73600959777832 + ], + [ + "▁Chula", + -14.736055374145508 + ], + [ + "advance", + -14.73606014251709 + ], + [ + "▁Insul", + -14.73606777191162 + ], + [ + "▁26.5", + -14.736093521118164 + ], + [ + "gesch", + -14.736159324645996 + ], + [ + "▁Mythic", + -14.736200332641602 + ], + [ + "▁fraying", + -14.73624324798584 + ], + [ + "udy", + -14.736244201660156 + ], + [ + "Orthodox", + -14.736294746398926 + ], + [ + "Adorable", + -14.736299514770508 + ], + [ + "▁activewear", + -14.73630428314209 + ], + [ + "Proposal", + -14.736309051513672 + ], + [ + "Dentist", + -14.73631763458252 + ], + [ + "spoiler", + -14.736320495605469 + ], + [ + "injection", + -14.736326217651367 + ], + [ + "Replacing", + -14.736330032348633 + ], + [ + "Nuclear", + -14.736353874206545 + ], + [ + "▁crunches", + -14.736367225646973 + ], + [ + "Nashville", + -14.73638916015625 + ], + [ + "spinning", + -14.73642349243164 + ], + [ + "visited", + -14.736494064331056 + ], + [ + "▁$399", + -14.736495018005373 + ], + [ + "SUPER", + -14.736506462097168 + ], + [ + "▁Xcel", + -14.736555099487305 + ], + [ + "▁Sizing", + -14.736600875854492 + ], + [ + "▁MSW", + -14.736658096313477 + ], + [ + "Confidentiality", + -14.73666763305664 + ], + [ + "▁Shetty", + -14.73671817779541 + ], + [ + "Truthfully", + -14.73672103881836 + ], + [ + "dumb", + -14.73672580718994 + ], + [ + "minimal", + -14.73680019378662 + ], + [ + "Nose", + -14.736845970153809 + ], + [ + "Zinc", + -14.736867904663086 + ], + [ + "▁WTC", + -14.73691177368164 + ], + [ + "climbing", + -14.73695945739746 + ], + [ + "Tamil", + -14.737013816833496 + ], + [ + "Drake", + -14.737022399902344 + ], + [ + "12:00", + -14.737040519714355 + ], + [ + "▁Sump", + -14.737055778503418 + ], + [ + "▁adrenalin", + -14.737065315246582 + ], + [ + "halle", + -14.73707389831543 + ], + [ + "▁Eee", + -14.73707389831543 + ], + [ + "lloway", + -14.737119674682615 + ], + [ + "▁Ambien", + -14.73716163635254 + ], + [ + "▁Elwood", + -14.737244606018066 + ], + [ + "▁microstructure", + -14.737252235412598 + ], + [ + "yielding", + -14.737268447875977 + ], + [ + "veri", + -14.737272262573242 + ], + [ + "2:18", + -14.737284660339355 + ], + [ + "essentially", + -14.7373046875 + ], + [ + "▁Cotter", + -14.737309455871582 + ], + [ + "4:13", + -14.737384796142578 + ], + [ + "▁meteoric", + -14.73745822906494 + ], + [ + "fetch", + -14.737504959106444 + ], + [ + "Attending", + -14.737505912780762 + ], + [ + "ето", + -14.737518310546877 + ], + [ + "▁($12", + -14.737520217895508 + ], + [ + "▁Wold", + -14.737523078918455 + ], + [ + "VEC", + -14.737526893615724 + ], + [ + "▁MCL", + -14.737619400024414 + ], + [ + "jumping", + -14.737693786621094 + ], + [ + "▁DOING", + -14.737789154052734 + ], + [ + "gery", + -14.737842559814451 + ], + [ + "▁bisque", + -14.73788833618164 + ], + [ + "▁110%", + -14.737893104553224 + ], + [ + "zna", + -14.73792839050293 + ], + [ + "▁Rabat", + -14.737981796264648 + ], + [ + "tracing", + -14.738014221191406 + ], + [ + "▁Commun", + -14.738069534301758 + ], + [ + "etsy", + -14.738110542297363 + ], + [ + "ashley", + -14.738150596618652 + ], + [ + "▁Juliana", + -14.738167762756348 + ], + [ + "▁combust", + -14.738214492797852 + ], + [ + "szcz", + -14.738236427307127 + ], + [ + "▁matchless", + -14.73825740814209 + ], + [ + "4,800", + -14.738325119018556 + ], + [ + "Noble", + -14.738329887390137 + ], + [ + "▁20.5", + -14.738358497619627 + ], + [ + "▁gallant", + -14.738360404968262 + ], + [ + "▁strayed", + -14.738384246826172 + ], + [ + "▁Bazar", + -14.738431930541992 + ], + [ + "Killer", + -14.73851203918457 + ], + [ + "▁Viacom", + -14.738542556762695 + ], + [ + "AIRE", + -14.738638877868652 + ], + [ + "▁cargoes", + -14.738686561584473 + ], + [ + "▁Podge", + -14.738713264465332 + ], + [ + "5500", + -14.738734245300291 + ], + [ + "Hemp", + -14.738836288452148 + ], + [ + "▁algo", + -14.738862037658691 + ], + [ + "▁tensor", + -14.738885879516602 + ], + [ + "▁Murat", + -14.738936424255373 + ], + [ + "▁SELLER", + -14.738991737365724 + ], + [ + "▁Danilo", + -14.739020347595217 + ], + [ + "statt", + -14.739043235778809 + ], + [ + "▁Verme", + -14.739055633544922 + ], + [ + "▁Maja", + -14.739112854003906 + ], + [ + "UAN", + -14.739113807678224 + ], + [ + "Quit", + -14.739116668701172 + ], + [ + "Wanted", + -14.739126205444336 + ], + [ + "FIS", + -14.7391357421875 + ], + [ + "▁Waterville", + -14.739187240600586 + ], + [ + "synth", + -14.739205360412598 + ], + [ + "067", + -14.73939037322998 + ], + [ + "▁versi", + -14.739523887634276 + ], + [ + "▁Coping", + -14.739623069763184 + ], + [ + "iamo", + -14.739646911621094 + ], + [ + "▁Fawn", + -14.73972988128662 + ], + [ + "▁individualised", + -14.739859580993652 + ], + [ + "▁minimised", + -14.739906311035156 + ], + [ + "figura", + -14.739911079406738 + ], + [ + "▁preform", + -14.739930152893066 + ], + [ + "0002", + -14.739974975585938 + ], + [ + "trait", + -14.740034103393556 + ], + [ + "▁BOOKS", + -14.740225791931152 + ], + [ + "ANSI", + -14.740296363830566 + ], + [ + "▁23.5", + -14.740331649780272 + ], + [ + "Rabbit", + -14.74034023284912 + ], + [ + "▁Esch", + -14.740399360656738 + ], + [ + "▁Vacant", + -14.740541458129885 + ], + [ + "Plum", + -14.740555763244627 + ], + [ + "Busch", + -14.74063205718994 + ], + [ + "▁birders", + -14.740644454956056 + ], + [ + "8-16", + -14.740657806396484 + ], + [ + "▁CUTE", + -14.740673065185549 + ], + [ + "GING", + -14.740696907043455 + ], + [ + "▁LCP", + -14.740700721740724 + ], + [ + "▁Mailer", + -14.74082374572754 + ], + [ + "▁Alco", + -14.740827560424805 + ], + [ + "▁faceplate", + -14.740925788879396 + ], + [ + "▁creat", + -14.740959167480469 + ], + [ + "▁SENT", + -14.74096393585205 + ], + [ + "▁Hawker", + -14.74096965789795 + ], + [ + "▁Equ", + -14.741257667541504 + ], + [ + "mancer", + -14.7412748336792 + ], + [ + "Teenage", + -14.741284370422363 + ], + [ + "▁Fie", + -14.741303443908691 + ], + [ + "▁Exempt", + -14.741394996643066 + ], + [ + "▁Ethyl", + -14.741405487060549 + ], + [ + "▁Zend", + -14.741447448730469 + ], + [ + "▁Dwell", + -14.74147605895996 + ], + [ + "ushka", + -14.741506576538086 + ], + [ + "ABL", + -14.741530418395996 + ], + [ + "nev", + -14.741570472717283 + ], + [ + "energise", + -14.741628646850586 + ], + [ + "1941", + -14.741682052612305 + ], + [ + "▁Dingle", + -14.741683959960938 + ], + [ + "Nina", + -14.741716384887695 + ], + [ + "▁MAF", + -14.742107391357422 + ], + [ + "nuncia", + -14.742327690124512 + ], + [ + "urne", + -14.74233055114746 + ], + [ + "▁2-7", + -14.742416381835938 + ], + [ + "▁Ansel", + -14.742424011230469 + ], + [ + "▁mahi", + -14.74245834350586 + ], + [ + "▁Hula", + -14.74250316619873 + ], + [ + "8.25", + -14.742525100708008 + ], + [ + "▁LPA", + -14.742534637451172 + ], + [ + "▁fanboy", + -14.742566108703612 + ], + [ + "▁emboss", + -14.742568016052246 + ], + [ + "fec", + -14.742627143859863 + ], + [ + "Moderate", + -14.742769241333008 + ], + [ + "▁Fateh", + -14.742786407470703 + ], + [ + "Hedge", + -14.74281406402588 + ], + [ + "▁achievers", + -14.74289608001709 + ], + [ + "TRAN", + -14.74294090270996 + ], + [ + "▁prayerful", + -14.74302864074707 + ], + [ + "hita", + -14.74305534362793 + ], + [ + "▁Ivanov", + -14.743093490600586 + ], + [ + "onix", + -14.743097305297852 + ], + [ + "BOUND", + -14.74317455291748 + ], + [ + "FOA", + -14.74318790435791 + ], + [ + "▁debrief", + -14.74319839477539 + ], + [ + "▁Kym", + -14.743305206298828 + ], + [ + "contractor", + -14.74333381652832 + ], + [ + "ijk", + -14.7433500289917 + ], + [ + "gosa", + -14.743362426757812 + ], + [ + "Cookie", + -14.743369102478027 + ], + [ + "cknowledging", + -14.743406295776367 + ], + [ + "▁Austral", + -14.743425369262695 + ], + [ + "chlorothiazide", + -14.743444442749023 + ], + [ + "▁Attestation", + -14.743444442749023 + ], + [ + "▁Chanukah", + -14.743444442749023 + ], + [ + "▁Coffey", + -14.743444442749023 + ], + [ + "▁Cymru", + -14.743444442749023 + ], + [ + "▁EITHER", + -14.743444442749023 + ], + [ + "▁Expenditure", + -14.743444442749023 + ], + [ + "▁FAVORITE", + -14.743444442749023 + ], + [ + "▁Guadeloupe", + -14.743444442749023 + ], + [ + "▁Heineken", + -14.743444442749023 + ], + [ + "▁IRONMAN", + -14.743444442749023 + ], + [ + "▁Influential", + -14.743444442749023 + ], + [ + "▁Kinesiology", + -14.743444442749023 + ], + [ + "▁Pheasant", + -14.743444442749023 + ], + [ + "▁RESPONSIBLE", + -14.743444442749023 + ], + [ + "▁Rubbish", + -14.743444442749023 + ], + [ + "▁aeronautical", + -14.743444442749023 + ], + [ + "▁allegorical", + -14.743444442749023 + ], + [ + "▁arraignment", + -14.743444442749023 + ], + [ + "▁ayurvedic", + -14.743444442749023 + ], + [ + "▁beleaguered", + -14.743444442749023 + ], + [ + "▁defensible", + -14.743444442749023 + ], + [ + "▁incinerator", + -14.743444442749023 + ], + [ + "▁preposterous", + -14.743444442749023 + ], + [ + "▁resveratrol", + -14.743444442749023 + ], + [ + "▁rhododendron", + -14.743444442749023 + ], + [ + "▁smoldering", + -14.743444442749023 + ], + [ + "▁telecommuting", + -14.743444442749023 + ], + [ + "▁telescoping", + -14.743444442749023 + ], + [ + "▁trapezoid", + -14.743444442749023 + ], + [ + "▁unquestionable", + -14.743444442749023 + ], + [ + "CHAPTER", + -14.74344539642334 + ], + [ + "▁AirPods", + -14.743446350097656 + ], + [ + "▁TensorFlow", + -14.743446350097656 + ], + [ + "▁agrarian", + -14.743446350097656 + ], + [ + "▁TRANSFER", + -14.743447303771973 + ], + [ + "▁pacifier", + -14.743447303771973 + ], + [ + "▁Leamington", + -14.743448257446287 + ], + [ + "▁orienteering", + -14.743448257446287 + ], + [ + "▁Gonzalo", + -14.743449211120604 + ], + [ + "▁Refining", + -14.743450164794922 + ], + [ + "▁martyrdom", + -14.743451118469238 + ], + [ + "▁dispersing", + -14.743454933166504 + ], + [ + "▁Muffler", + -14.74345588684082 + ], + [ + "▁Aachen", + -14.743460655212402 + ], + [ + "▁INVESTMENT", + -14.743460655212402 + ], + [ + "▁vulcan", + -14.743460655212402 + ], + [ + "▁$800,000", + -14.74346160888672 + ], + [ + "▁Chastain", + -14.743465423583984 + ], + [ + "▁UNIQUE", + -14.743470191955566 + ], + [ + "▁imposter", + -14.743471145629885 + ], + [ + "▁Cranbrook", + -14.743473052978516 + ], + [ + "▁unencrypted", + -14.74347686767578 + ], + [ + "▁Figaro", + -14.743481636047363 + ], + [ + "▁Equities", + -14.743485450744627 + ], + [ + "▁codified", + -14.743486404418944 + ], + [ + "▁microbiological", + -14.743490219116213 + ], + [ + "▁Gravy", + -14.743491172790527 + ], + [ + "▁Aquaculture", + -14.743492126464844 + ], + [ + "▁Nilsson", + -14.74349308013916 + ], + [ + "▁Mythology", + -14.743494987487791 + ], + [ + "▁COBRA", + -14.743499755859377 + ], + [ + "▁Tablespoon", + -14.743499755859377 + ], + [ + "▁Deferred", + -14.743511199951172 + ], + [ + "▁unyielding", + -14.74351406097412 + ], + [ + "▁transgenic", + -14.74351692199707 + ], + [ + "Fence", + -14.743517875671388 + ], + [ + "▁handicapping", + -14.74351978302002 + ], + [ + "▁ferrule", + -14.743529319763184 + ], + [ + "▁Grubb", + -14.7435302734375 + ], + [ + "▁$14.99", + -14.743553161621094 + ], + [ + "▁RESTful", + -14.74356460571289 + ], + [ + "▁tonsils", + -14.743568420410156 + ], + [ + "WILL", + -14.743579864501951 + ], + [ + "▁defray", + -14.743585586547852 + ], + [ + "Compliment", + -14.743589401245115 + ], + [ + "▁gruff", + -14.743590354919434 + ], + [ + "rowski", + -14.743607521057127 + ], + [ + "afari", + -14.743609428405762 + ], + [ + "▁rotted", + -14.743616104125977 + ], + [ + "▁rubella", + -14.743623733520508 + ], + [ + "▁Padilla", + -14.74362564086914 + ], + [ + "▁Kwik", + -14.743663787841797 + ], + [ + "▁Eyewear", + -14.743664741516112 + ], + [ + "▁Wordsworth", + -14.743667602539062 + ], + [ + "▁Mumford", + -14.743674278259276 + ], + [ + "▁kDa", + -14.743682861328123 + ], + [ + "▁unreported", + -14.743682861328123 + ], + [ + "▁JOSE", + -14.743721961975098 + ], + [ + "▁1826", + -14.743738174438477 + ], + [ + "Marty", + -14.74375343322754 + ], + [ + "▁eventuality", + -14.74378776550293 + ], + [ + "▁Darden", + -14.743830680847168 + ], + [ + "Chang", + -14.7438325881958 + ], + [ + "▁Rousey", + -14.743836402893066 + ], + [ + "▁Gundam", + -14.74384880065918 + ], + [ + "▁judo", + -14.74386978149414 + ], + [ + "▁Dressage", + -14.743886947631836 + ], + [ + "▁ObamaCare", + -14.743891716003418 + ], + [ + "COMMAND", + -14.743947982788086 + ], + [ + "▁DTH", + -14.74394989013672 + ], + [ + "huizen", + -14.743968963623049 + ], + [ + "▁Aegis", + -14.74400520324707 + ], + [ + "▁PHOTOS", + -14.744009971618652 + ], + [ + "intercontinental", + -14.744032859802246 + ], + [ + "orski", + -14.744068145751951 + ], + [ + "$$", + -14.744072914123535 + ], + [ + "▁Stevenage", + -14.744112968444824 + ], + [ + "▁encircled", + -14.744114875793455 + ], + [ + "▁Newswire", + -14.744118690490724 + ], + [ + "▁Graco", + -14.744126319885254 + ], + [ + "▁operandi", + -14.74413776397705 + ], + [ + "▁Snapper", + -14.744146347045898 + ], + [ + "▁Hohen", + -14.744150161743164 + ], + [ + "▁Farrow", + -14.744183540344238 + ], + [ + "▁Karthik", + -14.74418830871582 + ], + [ + "Victorian", + -14.744217872619627 + ], + [ + "▁Alpe", + -14.744341850280762 + ], + [ + "▁deconstructed", + -14.744351387023926 + ], + [ + "▁shoal", + -14.744389533996582 + ], + [ + "unni", + -14.744441032409668 + ], + [ + "▁Southlake", + -14.744492530822754 + ], + [ + "▁puller", + -14.744508743286133 + ], + [ + "▁mastic", + -14.744553565979004 + ], + [ + "▁Chatbot", + -14.74461841583252 + ], + [ + "▁reposted", + -14.744638442993164 + ], + [ + "▁slimline", + -14.74464511871338 + ], + [ + "Phenomenal", + -14.74467945098877 + ], + [ + "▁earpiece", + -14.744708061218262 + ], + [ + "▁hoisted", + -14.744732856750488 + ], + [ + "Farber", + -14.74477195739746 + ], + [ + "teeth", + -14.74483585357666 + ], + [ + "▁Curtin", + -14.744837760925291 + ], + [ + "russian", + -14.744839668273926 + ], + [ + "1:13", + -14.744840621948242 + ], + [ + "feather", + -14.74486255645752 + ], + [ + "▁foolishly", + -14.744866371154783 + ], + [ + "juan", + -14.744873046875 + ], + [ + "▁workaholic", + -14.744924545288086 + ], + [ + "▁Dickie", + -14.744951248168944 + ], + [ + "▁1822", + -14.745004653930664 + ], + [ + "▁Spiced", + -14.74500846862793 + ], + [ + "▁Rayon", + -14.745010375976562 + ], + [ + "▁laidback", + -14.74502658843994 + ], + [ + "circulated", + -14.745061874389648 + ], + [ + "hydraulic", + -14.745121955871582 + ], + [ + "Referral", + -14.745124816894531 + ], + [ + "Qualification", + -14.745162963867188 + ], + [ + "inflicted", + -14.745163917541504 + ], + [ + "Torrent", + -14.745166778564451 + ], + [ + "▁Davi", + -14.745172500610352 + ], + [ + "Celebrity", + -14.745177268981934 + ], + [ + "trivial", + -14.745179176330566 + ], + [ + "vegetable", + -14.745180130004885 + ], + [ + "Religion", + -14.745182991027832 + ], + [ + "promising", + -14.745186805725098 + ], + [ + "Supreme", + -14.745192527770996 + ], + [ + "Daisy", + -14.745195388793944 + ], + [ + "Cheryl", + -14.745199203491213 + ], + [ + "Terminal", + -14.745201110839844 + ], + [ + "Kristen", + -14.74520778656006 + ], + [ + "Medicaid", + -14.745216369628906 + ], + [ + "▁Caliper", + -14.745220184326172 + ], + [ + "▁Stitched", + -14.745227813720703 + ], + [ + "Countries", + -14.74522876739502 + ], + [ + "▁Pavlo", + -14.745237350463867 + ], + [ + "Murder", + -14.745251655578612 + ], + [ + "Cowboy", + -14.74525260925293 + ], + [ + "▁SEED", + -14.745283126831056 + ], + [ + "Moses", + -14.745315551757812 + ], + [ + "Promotional", + -14.74533748626709 + ], + [ + "▁Pasco", + -14.74538230895996 + ], + [ + "icator", + -14.745450973510742 + ], + [ + "▁PRICES", + -14.745481491088867 + ], + [ + "Inclusive", + -14.74549674987793 + ], + [ + "shaping", + -14.745502471923828 + ], + [ + "trooper", + -14.745516777038574 + ], + [ + "Wander", + -14.745532989501951 + ], + [ + "Pala", + -14.745540618896484 + ], + [ + "1908", + -14.74562168121338 + ], + [ + "Spanning", + -14.745665550231934 + ], + [ + "Interface", + -14.745736122131348 + ], + [ + "lulu", + -14.745776176452637 + ], + [ + "inspect", + -14.74579906463623 + ], + [ + "▁harken", + -14.74583339691162 + ], + [ + "▁TRAIN", + -14.745877265930176 + ], + [ + "Severe", + -14.745893478393556 + ], + [ + "▁Witte", + -14.745923042297363 + ], + [ + "Yourself", + -14.74593734741211 + ], + [ + "▁Hyd", + -14.745945930480955 + ], + [ + "'", + -14.745970726013184 + ], + [ + "▁Monti", + -14.746037483215332 + ], + [ + "cambridge", + -14.74604606628418 + ], + [ + "▁Woodley", + -14.74605941772461 + ], + [ + "▁Athol", + -14.746081352233888 + ], + [ + "▁Woodhouse", + -14.746091842651367 + ], + [ + "tissue", + -14.746146202087402 + ], + [ + "Defender", + -14.746192932128906 + ], + [ + "arine", + -14.746301651000977 + ], + [ + "1.80", + -14.746318817138672 + ], + [ + "Fallon", + -14.746333122253418 + ], + [ + "▁waver", + -14.746356010437012 + ], + [ + "▁Lovett", + -14.746380805969238 + ], + [ + "▁Bandra", + -14.74639129638672 + ], + [ + "Elliott", + -14.746475219726562 + ], + [ + "▁combin", + -14.746521949768066 + ], + [ + "+++", + -14.74652862548828 + ], + [ + "▁Mortar", + -14.74655818939209 + ], + [ + "Suit", + -14.746573448181152 + ], + [ + "UFA", + -14.746612548828123 + ], + [ + "▁Fave", + -14.74671745300293 + ], + [ + "▁Faction", + -14.746779441833496 + ], + [ + "▁15-17", + -14.74680233001709 + ], + [ + "Contra", + -14.746891975402832 + ], + [ + "▁trav", + -14.74690055847168 + ], + [ + "▁Cheating", + -14.746957778930664 + ], + [ + "4-18", + -14.7470121383667 + ], + [ + "▁Quali", + -14.747020721435549 + ], + [ + "▁Wolfson", + -14.74703884124756 + ], + [ + "filtered", + -14.74710178375244 + ], + [ + "inti", + -14.747201919555664 + ], + [ + "▁overturning", + -14.747233390808104 + ], + [ + "▁lumped", + -14.747248649597168 + ], + [ + "▁Bolivian", + -14.747265815734863 + ], + [ + "TFC", + -14.747270584106444 + ], + [ + "▁devilish", + -14.747340202331545 + ], + [ + "Viola", + -14.747429847717283 + ], + [ + "Invoice", + -14.747430801391602 + ], + [ + "▁STARS", + -14.747441291809082 + ], + [ + "▁DVB", + -14.747482299804688 + ], + [ + "▁consortia", + -14.747488021850586 + ], + [ + "Eq", + -14.747538566589355 + ], + [ + "supposed", + -14.74756145477295 + ], + [ + "Vale", + -14.74757957458496 + ], + [ + "▁susan", + -14.747902870178224 + ], + [ + "▁rekey", + -14.747907638549805 + ], + [ + "3,900", + -14.747954368591309 + ], + [ + "conti", + -14.74799346923828 + ], + [ + "diz", + -14.748026847839355 + ], + [ + "donia", + -14.748113632202148 + ], + [ + "▁Replaced", + -14.74817943572998 + ], + [ + "▁wedged", + -14.748196601867676 + ], + [ + "▁Schwa", + -14.748246192932127 + ], + [ + "▁Erect", + -14.748400688171388 + ], + [ + "various", + -14.74842643737793 + ], + [ + "xf", + -14.748427391052246 + ], + [ + "conception", + -14.748465538024902 + ], + [ + "Purchased", + -14.748478889465332 + ], + [ + "asih", + -14.748513221740724 + ], + [ + "Xbox", + -14.748520851135254 + ], + [ + "enaar", + -14.748568534851074 + ], + [ + "▁deutsch", + -14.748589515686035 + ], + [ + "▁fiddler", + -14.748597145080566 + ], + [ + "▁16:2", + -14.748628616333008 + ], + [ + "▁modernised", + -14.74871063232422 + ], + [ + "revolution", + -14.74878978729248 + ], + [ + "yaan", + -14.74882984161377 + ], + [ + "▁Chaz", + -14.748892784118652 + ], + [ + "▁fineness", + -14.748894691467283 + ], + [ + "PDU", + -14.749139785766602 + ], + [ + "▁Panna", + -14.749326705932615 + ], + [ + "spina", + -14.749444007873535 + ], + [ + "▁paraben", + -14.749475479125977 + ], + [ + "sensitized", + -14.749500274658203 + ], + [ + "▁80+", + -14.749500274658203 + ], + [ + "▁joist", + -14.74952793121338 + ], + [ + "LLU", + -14.749581336975098 + ], + [ + "▁Overton", + -14.749590873718262 + ], + [ + "stepping", + -14.749613761901855 + ], + [ + "▁dentin", + -14.74962043762207 + ], + [ + "▁perfumed", + -14.749669075012209 + ], + [ + "▁Sulf", + -14.74989891052246 + ], + [ + "▁EDB", + -14.749921798706056 + ], + [ + "/2.0", + -14.749923706054688 + ], + [ + "DURA", + -14.749933242797852 + ], + [ + "larger", + -14.74998950958252 + ], + [ + "▁optically", + -14.750011444091797 + ], + [ + "xanth", + -14.750113487243652 + ], + [ + "▁flexi", + -14.750121116638184 + ], + [ + "▁Zell", + -14.750248908996582 + ], + [ + "pelle", + -14.750276565551758 + ], + [ + "nement", + -14.750303268432615 + ], + [ + "▁squashed", + -14.75033950805664 + ], + [ + "▁Oud", + -14.750371932983398 + ], + [ + "▁Rumi", + -14.750478744506836 + ], + [ + "▁Fag", + -14.750483512878418 + ], + [ + "▁Angi", + -14.750532150268556 + ], + [ + "▁decaf", + -14.750574111938477 + ], + [ + "contra", + -14.750600814819336 + ], + [ + "atif", + -14.750611305236816 + ], + [ + "quish", + -14.750617980957031 + ], + [ + "odized", + -14.750632286071776 + ], + [ + "▁mache", + -14.750752449035645 + ], + [ + "▁Albin", + -14.750905990600586 + ], + [ + "annoy", + -14.750929832458496 + ], + [ + "▁Lamm", + -14.750971794128418 + ], + [ + "2:09", + -14.751049041748049 + ], + [ + "(!)", + -14.75113296508789 + ], + [ + "▁1827", + -14.751139640808104 + ], + [ + "SORT", + -14.75119686126709 + ], + [ + "Pep", + -14.751286506652832 + ], + [ + "acker", + -14.751296043395996 + ], + [ + "basin", + -14.75139045715332 + ], + [ + "▁inanimate", + -14.751424789428713 + ], + [ + "7.4%", + -14.751437187194824 + ], + [ + "▁Jib", + -14.751598358154297 + ], + [ + "gynecologist", + -14.751627922058104 + ], + [ + "▁Competi", + -14.751652717590332 + ], + [ + "▁dehydrate", + -14.75174045562744 + ], + [ + "▁Joann", + -14.751808166503906 + ], + [ + "▁Licensee", + -14.75186538696289 + ], + [ + "peach", + -14.7518892288208 + ], + [ + "▁CMB", + -14.751898765563965 + ], + [ + "▁Bongo", + -14.751934051513672 + ], + [ + "▁Scorch", + -14.751978874206545 + ], + [ + "▁diatom", + -14.752057075500488 + ], + [ + "▁forsaken", + -14.752058029174805 + ], + [ + "axillofacial", + -14.752079010009766 + ], + [ + "elliptic", + -14.75212287902832 + ], + [ + "schmidt", + -14.752130508422852 + ], + [ + "▁suffrage", + -14.752134323120115 + ], + [ + "▁brazil", + -14.752135276794434 + ], + [ + "▁Staunton", + -14.7521390914917 + ], + [ + "▁Asturias", + -14.752140045166016 + ], + [ + "▁Beowulf", + -14.752140045166016 + ], + [ + "▁Braindumps", + -14.752140045166016 + ], + [ + "▁Catechism", + -14.752140045166016 + ], + [ + "▁Cozumel", + -14.752140045166016 + ], + [ + "▁Criminology", + -14.752140045166016 + ], + [ + "▁Goulburn", + -14.752140045166016 + ], + [ + "▁Iroquois", + -14.752140045166016 + ], + [ + "▁Kowloon", + -14.752140045166016 + ], + [ + "▁Sunbrella", + -14.752140045166016 + ], + [ + "▁Tertiary", + -14.752140045166016 + ], + [ + "▁adenocarcinoma", + -14.752140045166016 + ], + [ + "▁bucolic", + -14.752140045166016 + ], + [ + "▁carabiner", + -14.752140045166016 + ], + [ + "▁carnivorous", + -14.752140045166016 + ], + [ + "▁decentralised", + -14.752140045166016 + ], + [ + "▁exacerbation", + -14.752140045166016 + ], + [ + "▁ghastly", + -14.752140045166016 + ], + [ + "▁goosebumps", + -14.752140045166016 + ], + [ + "▁quadratic", + -14.752140045166016 + ], + [ + "▁virtuosity", + -14.752140045166016 + ], + [ + "Spectacle", + -14.752140998840332 + ], + [ + "▁AUTOMATIC", + -14.752140998840332 + ], + [ + "▁turkish", + -14.752140998840332 + ], + [ + "decentralization", + -14.752141952514648 + ], + [ + "▁Iglesia", + -14.752141952514648 + ], + [ + "▁adipex", + -14.752141952514648 + ], + [ + "▁INRDeals", + -14.75214385986328 + ], + [ + "▁LEATHER", + -14.752144813537598 + ], + [ + "▁Avignon", + -14.752145767211914 + ], + [ + "▁spigot", + -14.752145767211914 + ], + [ + "▁Crucible", + -14.75214672088623 + ], + [ + "▁WHILE", + -14.75214672088623 + ], + [ + "▁Spandex", + -14.75214958190918 + ], + [ + "▁Chronograph", + -14.752150535583496 + ], + [ + "▁Kubota", + -14.752150535583496 + ], + [ + "▁disorientation", + -14.752150535583496 + ], + [ + "▁Clutter", + -14.752154350280762 + ], + [ + "▁stratified", + -14.752154350280762 + ], + [ + "▁Midwifery", + -14.752159118652344 + ], + [ + "▁Nanotechnology", + -14.752164840698242 + ], + [ + "▁Yonkers", + -14.752169609069824 + ], + [ + "▁EARTH", + -14.752171516418455 + ], + [ + "отор", + -14.752177238464355 + ], + [ + "▁MARKETING", + -14.752189636230469 + ], + [ + "▁miRNA", + -14.752191543579102 + ], + [ + "▁Armagh", + -14.752193450927734 + ], + [ + "▁maxillofacial", + -14.75220012664795 + ], + [ + "▁panacea", + -14.752202987670898 + ], + [ + "▁henceforth", + -14.752206802368164 + ], + [ + "▁Brasserie", + -14.752212524414062 + ], + [ + "▁McCallum", + -14.752214431762695 + ], + [ + "▁Mansour", + -14.752220153808594 + ], + [ + "▁ceasing", + -14.752224922180176 + ], + [ + "▁transposition", + -14.752227783203123 + ], + [ + "▁retrofitted", + -14.752229690551758 + ], + [ + "▁livre", + -14.752236366271973 + ], + [ + "▁antithesis", + -14.752256393432615 + ], + [ + "▁FRS", + -14.752257347106934 + ], + [ + "▁kelly", + -14.752262115478516 + ], + [ + "▁Infinix", + -14.752277374267578 + ], + [ + "▁Coarse", + -14.752286911010742 + ], + [ + "▁Teleport", + -14.752297401428224 + ], + [ + "▁Loomis", + -14.752305030822754 + ], + [ + "▁polyp", + -14.752326011657717 + ], + [ + "▁Crestwood", + -14.752327919006348 + ], + [ + "▁Zionism", + -14.752354621887209 + ], + [ + "▁Dodger", + -14.75238037109375 + ], + [ + "▁contemporaneous", + -14.752382278442385 + ], + [ + "▁Miser", + -14.752387046813965 + ], + [ + "▁emboldened", + -14.752408981323242 + ], + [ + "▁Sadler", + -14.752419471740724 + ], + [ + "▁waders", + -14.752456665039062 + ], + [ + "▁Holley", + -14.752470970153809 + ], + [ + "▁whimper", + -14.752470970153809 + ], + [ + "▁camphor", + -14.752521514892578 + ], + [ + "▁Poodle", + -14.752533912658691 + ], + [ + "▁shamelessly", + -14.752549171447754 + ], + [ + "▁dependably", + -14.75256061553955 + ], + [ + "▁Aspiring", + -14.752562522888184 + ], + [ + "▁discouragement", + -14.75256633758545 + ], + [ + "▁Bellator", + -14.752574920654297 + ], + [ + "▁opined", + -14.752586364746094 + ], + [ + "▁Putney", + -14.752617835998535 + ], + [ + "▁Cantina", + -14.752635955810549 + ], + [ + "▁Finney", + -14.752635955810549 + ], + [ + "6.9%", + -14.752643585205078 + ], + [ + "▁Molo", + -14.75267219543457 + ], + [ + "▁Twig", + -14.75268268585205 + ], + [ + "▁Goji", + -14.75269603729248 + ], + [ + "▁hatchet", + -14.752708435058594 + ], + [ + "▁MEETING", + -14.752737998962402 + ], + [ + "▁Copier", + -14.752769470214844 + ], + [ + "trax", + -14.75278663635254 + ], + [ + "▁thong", + -14.752854347229004 + ], + [ + "▁cosine", + -14.752901077270508 + ], + [ + "▁Richfield", + -14.752910614013672 + ], + [ + "1903", + -14.752943992614746 + ], + [ + "▁Saxony", + -14.752951622009276 + ], + [ + "▁viaduct", + -14.752974510192873 + ], + [ + "▁preaches", + -14.753114700317385 + ], + [ + "▁Infused", + -14.753138542175291 + ], + [ + "▁$240", + -14.753141403198242 + ], + [ + "Tronic", + -14.753162384033203 + ], + [ + "▁Supportive", + -14.753204345703123 + ], + [ + "▁McCra", + -14.753260612487791 + ], + [ + "▁Leaching", + -14.753273010253906 + ], + [ + "▁intersected", + -14.753273010253906 + ], + [ + "riving", + -14.75328254699707 + ], + [ + "▁CPO", + -14.753296852111816 + ], + [ + "▁Bebe", + -14.753313064575195 + ], + [ + "▁UART", + -14.75344944000244 + ], + [ + "rogation", + -14.753509521484377 + ], + [ + "▁JDK", + -14.753525733947754 + ], + [ + "▁snub", + -14.753558158874512 + ], + [ + "▁Westland", + -14.753594398498535 + ], + [ + "▁Funko", + -14.753613471984863 + ], + [ + "▁FOSS", + -14.753619194030762 + ], + [ + "▁Namibian", + -14.75362491607666 + ], + [ + "joo", + -14.75367259979248 + ], + [ + "BBB", + -14.753724098205566 + ], + [ + "▁copay", + -14.7537260055542 + ], + [ + "schoolers", + -14.75374698638916 + ], + [ + "▁#21", + -14.75377082824707 + ], + [ + "▁WSO", + -14.753813743591309 + ], + [ + "broiled", + -14.753934860229492 + ], + [ + "legend", + -14.753944396972656 + ], + [ + "▁Inglewood", + -14.753944396972656 + ], + [ + "▁Finishes", + -14.75395393371582 + ], + [ + "TERN", + -14.753958702087402 + ], + [ + "combination", + -14.753969192504885 + ], + [ + "▁Islay", + -14.754027366638184 + ], + [ + "Dozens", + -14.754079818725586 + ], + [ + "Cosmetic", + -14.754094123840332 + ], + [ + "frills", + -14.754096031188965 + ], + [ + "Identification", + -14.754106521606444 + ], + [ + "Tiffany", + -14.754111289978027 + ], + [ + "Diversity", + -14.75411605834961 + ], + [ + "Spiral", + -14.75411605834961 + ], + [ + "Colombia", + -14.754120826721191 + ], + [ + "▁stator", + -14.754124641418455 + ], + [ + "Malware", + -14.754133224487305 + ], + [ + "▁Awake", + -14.75413703918457 + ], + [ + "Nationwide", + -14.754144668579102 + ], + [ + "▁SHEET", + -14.754145622253418 + ], + [ + "▁superstore", + -14.754148483276367 + ], + [ + "Garcia", + -14.754149436950684 + ], + [ + "miracle", + -14.754168510437012 + ], + [ + "enberry", + -14.754169464111328 + ], + [ + "Worship", + -14.754176139831545 + ], + [ + "orientation", + -14.754191398620604 + ], + [ + "modulation", + -14.75419807434082 + ], + [ + "Sixty", + -14.75425624847412 + ], + [ + "▁taint", + -14.754265785217283 + ], + [ + "▁Crag", + -14.754290580749512 + ], + [ + "▁Cid", + -14.75432300567627 + ], + [ + "Periodically", + -14.754328727722168 + ], + [ + "Execute", + -14.754337310791016 + ], + [ + "sculpt", + -14.754352569580078 + ], + [ + "Marble", + -14.754356384277344 + ], + [ + "OFA", + -14.754402160644531 + ], + [ + "Danielle", + -14.754433631896973 + ], + [ + "consultant", + -14.754438400268556 + ], + [ + "▁irregularly", + -14.754448890686035 + ], + [ + "rova", + -14.754471778869627 + ], + [ + "whip", + -14.75453281402588 + ], + [ + "▁MARS", + -14.754545211791992 + ], + [ + "Mona", + -14.754570960998535 + ], + [ + "IIS", + -14.754612922668455 + ], + [ + "Intra", + -14.754631042480469 + ], + [ + "valle", + -14.754655838012695 + ], + [ + "▁tipple", + -14.754800796508787 + ], + [ + "Hiking", + -14.754828453063965 + ], + [ + "▁Trico", + -14.75485610961914 + ], + [ + "laj", + -14.754898071289062 + ], + [ + "▁airsoft", + -14.75516414642334 + ], + [ + "gite", + -14.75529670715332 + ], + [ + "leck", + -14.755308151245115 + ], + [ + "▁spaciousness", + -14.755308151245115 + ], + [ + "khon", + -14.755311965942385 + ], + [ + "HEAT", + -14.755422592163086 + ], + [ + "▁disregarding", + -14.755462646484377 + ], + [ + "▁mezzo", + -14.755526542663574 + ], + [ + "▁Statisti", + -14.755569458007812 + ], + [ + "Sulfur", + -14.755606651306152 + ], + [ + "SNO", + -14.7557373046875 + ], + [ + "hugging", + -14.755759239196776 + ], + [ + "▁GNS", + -14.755767822265623 + ], + [ + "terio", + -14.755817413330078 + ], + [ + "▁SAML", + -14.755850791931152 + ], + [ + "morgan", + -14.755993843078612 + ], + [ + "▁Nationality", + -14.756007194519045 + ], + [ + "sleeping", + -14.756065368652344 + ], + [ + "▁eachother", + -14.756068229675291 + ], + [ + "Hail", + -14.75608253479004 + ], + [ + "vain", + -14.756089210510254 + ], + [ + "▁HSL", + -14.75611686706543 + ], + [ + "▁978-0-", + -14.7561616897583 + ], + [ + "cuba", + -14.756214141845703 + ], + [ + "▁09:5", + -14.756271362304688 + ], + [ + "▁auditioning", + -14.756295204162598 + ], + [ + "xxx", + -14.756333351135254 + ], + [ + ".04.201", + -14.75637149810791 + ], + [ + "partum", + -14.756394386291504 + ], + [ + "texture", + -14.756410598754885 + ], + [ + "▁JUNE", + -14.756454467773438 + ], + [ + "▁£30,000", + -14.756476402282717 + ], + [ + "▁Lode", + -14.756514549255373 + ], + [ + "▁Acai", + -14.756566047668455 + ], + [ + "▁majorities", + -14.756580352783203 + ], + [ + "pinterest", + -14.75660800933838 + ], + [ + "▁MBP", + -14.75662612915039 + ], + [ + "▁Moray", + -14.756685256958008 + ], + [ + "=6", + -14.756731986999512 + ], + [ + "Loading", + -14.756735801696776 + ], + [ + "▁TCG", + -14.756787300109863 + ], + [ + "CCR", + -14.756789207458496 + ], + [ + "▁TBC", + -14.756893157958984 + ], + [ + "▁2,100", + -14.756909370422363 + ], + [ + "▁Alves", + -14.756930351257324 + ], + [ + "YEAR", + -14.756961822509766 + ], + [ + "6500", + -14.75697898864746 + ], + [ + "biblio", + -14.757060050964355 + ], + [ + "conditioner", + -14.757081985473633 + ], + [ + "▁33,000", + -14.757144927978516 + ], + [ + "UDA", + -14.757204055786133 + ], + [ + "IQUE", + -14.757245063781738 + ], + [ + "▁tutored", + -14.757444381713867 + ], + [ + "▁SLT", + -14.75744915008545 + ], + [ + "ersten", + -14.757548332214355 + ], + [ + "programmatically", + -14.757646560668944 + ], + [ + "arrington", + -14.757725715637209 + ], + [ + "saint", + -14.757726669311523 + ], + [ + "▁Jacoby", + -14.757735252380373 + ], + [ + "▁Bootle", + -14.757749557495115 + ], + [ + "grim", + -14.757752418518066 + ], + [ + "▁Karol", + -14.757835388183594 + ], + [ + "Lawn", + -14.75783920288086 + ], + [ + ".05%", + -14.75787353515625 + ], + [ + "Seq", + -14.757909774780272 + ], + [ + "▁Thankful", + -14.757965087890623 + ], + [ + "▁Zwi", + -14.758018493652344 + ], + [ + "▁firemen", + -14.75805377960205 + ], + [ + "RATED", + -14.758065223693848 + ], + [ + "▁socialite", + -14.758084297180176 + ], + [ + "▁registrant", + -14.7581205368042 + ], + [ + "▁Reeve", + -14.758153915405272 + ], + [ + "sketch", + -14.758167266845703 + ], + [ + "▁Barge", + -14.758170127868652 + ], + [ + "mystify", + -14.758177757263184 + ], + [ + "▁Pasa", + -14.758177757263184 + ], + [ + "Liv", + -14.758255958557127 + ], + [ + "STING", + -14.758286476135254 + ], + [ + "▁1792", + -14.758299827575684 + ], + [ + "7.25", + -14.75839614868164 + ], + [ + "▁fireside", + -14.758445739746094 + ], + [ + "BSP", + -14.758456230163574 + ], + [ + "gonal", + -14.758458137512209 + ], + [ + "enhall", + -14.75847053527832 + ], + [ + "imic", + -14.758480072021484 + ], + [ + "▁storehouse", + -14.758509635925291 + ], + [ + "robo", + -14.75856113433838 + ], + [ + "Bounded", + -14.758584976196287 + ], + [ + "2600", + -14.758626937866213 + ], + [ + "alem", + -14.758753776550291 + ], + [ + "noch", + -14.75878620147705 + ], + [ + "▁melodrama", + -14.758818626403809 + ], + [ + "▁$7.5", + -14.7588529586792 + ], + [ + "voke", + -14.75887680053711 + ], + [ + "▁armhole", + -14.758877754211426 + ], + [ + "▁fader", + -14.758955001831056 + ], + [ + "▁Eben", + -14.75896167755127 + ], + [ + "▁Perma", + -14.759075164794922 + ], + [ + "-19)", + -14.759106636047363 + ], + [ + "nike", + -14.759138107299805 + ], + [ + "1:09", + -14.759202003479004 + ], + [ + "▁Bernd", + -14.759233474731444 + ], + [ + "▁Layla", + -14.759326934814451 + ], + [ + "WILLIAM", + -14.75933074951172 + ], + [ + "▁Pasc", + -14.75942611694336 + ], + [ + "▁Miu", + -14.759496688842772 + ], + [ + "Pound", + -14.759512901306152 + ], + [ + "▁Buz", + -14.759601593017578 + ], + [ + "9.6%", + -14.759637832641602 + ], + [ + "1919", + -14.760008811950684 + ], + [ + "▁Mita", + -14.760010719299316 + ], + [ + "FAO", + -14.760109901428224 + ], + [ + "Bain", + -14.760196685791016 + ], + [ + "▁immobili", + -14.760313034057615 + ], + [ + "▁Warby", + -14.760330200195312 + ], + [ + "▁Cresc", + -14.76047134399414 + ], + [ + "GEO", + -14.760594367980955 + ], + [ + "issant", + -14.760640144348145 + ], + [ + "▁Wigg", + -14.760648727416992 + ], + [ + "boi", + -14.760734558105469 + ], + [ + "▁Imagin", + -14.760780334472656 + ], + [ + "▁UCAS", + -14.76085090637207 + ], + [ + "chromat", + -14.76085376739502 + ], + [ + "▁Concentrator", + -14.760910987854004 + ], + [ + "▁Figueroa", + -14.760910987854004 + ], + [ + "Rejoice", + -14.76091194152832 + ], + [ + "THURSDAY", + -14.76091194152832 + ], + [ + "▁04/26/2019", + -14.76091194152832 + ], + [ + "▁9001:2015", + -14.76091194152832 + ], + [ + "▁AUGUST", + -14.76091194152832 + ], + [ + "▁Aguilera", + -14.76091194152832 + ], + [ + "▁Annotated", + -14.76091194152832 + ], + [ + "▁Biltmore", + -14.76091194152832 + ], + [ + "▁Chittagong", + -14.76091194152832 + ], + [ + "▁Crenshaw", + -14.76091194152832 + ], + [ + "▁FESTIVAL", + -14.76091194152832 + ], + [ + "▁Frenzy", + -14.76091194152832 + ], + [ + "▁Trailblazer", + -14.76091194152832 + ], + [ + "▁dribbling", + -14.76091194152832 + ], + [ + "▁idiopathic", + -14.76091194152832 + ], + [ + "▁incredulous", + -14.76091194152832 + ], + [ + "▁indomitable", + -14.76091194152832 + ], + [ + "▁snatching", + -14.76091194152832 + ], + [ + "▁DELETE", + -14.760912895202637 + ], + [ + "▁trestle", + -14.760912895202637 + ], + [ + "▁Hepsia", + -14.76091480255127 + ], + [ + "▁Astrophysics", + -14.760915756225586 + ], + [ + "▁squabble", + -14.760915756225586 + ], + [ + "Evolving", + -14.76091766357422 + ], + [ + "▁effusion", + -14.76091766357422 + ], + [ + "▁fibroid", + -14.76091766357422 + ], + [ + "▁Sympathy", + -14.760918617248535 + ], + [ + "▁briquette", + -14.760920524597168 + ], + [ + "▁druid", + -14.760921478271484 + ], + [ + "▁stratification", + -14.7609281539917 + ], + [ + "Anticipate", + -14.760929107666016 + ], + [ + "▁Geothermal", + -14.760929107666016 + ], + [ + "▁Hackathon", + -14.760929107666016 + ], + [ + "▁Barbarian", + -14.76093292236328 + ], + [ + "▁Dusseldorf", + -14.76093292236328 + ], + [ + "▁Cyborg", + -14.760934829711914 + ], + [ + "▁shrill", + -14.760940551757812 + ], + [ + "▁Nizam", + -14.760941505432127 + ], + [ + "▁vibratory", + -14.760948181152344 + ], + [ + "▁Sinister", + -14.760951042175291 + ], + [ + "▁Brookhaven", + -14.760952949523926 + ], + [ + "▁LEAVE", + -14.760955810546877 + ], + [ + "▁lycra", + -14.76096248626709 + ], + [ + "▁septum", + -14.76097011566162 + ], + [ + "▁Telltale", + -14.760974884033203 + ], + [ + "▁assembler", + -14.760974884033203 + ], + [ + "▁smidge", + -14.760977745056152 + ], + [ + "▁lugging", + -14.760984420776367 + ], + [ + "▁Mortality", + -14.76098918914795 + ], + [ + "▁overlapped", + -14.760997772216797 + ], + [ + "▁$29.95", + -14.761000633239746 + ], + [ + "▁Gretel", + -14.761000633239746 + ], + [ + "▁UPMC", + -14.761027336120604 + ], + [ + "▁glycolic", + -14.761028289794922 + ], + [ + "▁reclusive", + -14.761030197143556 + ], + [ + "▁Messrs", + -14.761058807373049 + ], + [ + "▁$0.99", + -14.761066436767578 + ], + [ + "▁Banyan", + -14.761073112487791 + ], + [ + "▁propellant", + -14.761080741882324 + ], + [ + "▁trendsetter", + -14.76108741760254 + ], + [ + "ccc", + -14.761088371276855 + ], + [ + "▁Slideshow", + -14.761090278625488 + ], + [ + "▁Vinnie", + -14.76112174987793 + ], + [ + "▁bobbing", + -14.761123657226562 + ], + [ + "▁Caritas", + -14.76112937927246 + ], + [ + "▁PowerEdge", + -14.761151313781738 + ], + [ + "▁Trimming", + -14.761153221130373 + ], + [ + "▁Baroda", + -14.761154174804688 + ], + [ + "24/7", + -14.7611722946167 + ], + [ + "waiving", + -14.761176109313965 + ], + [ + "▁Shaikh", + -14.761178970336914 + ], + [ + "cultura", + -14.761197090148926 + ], + [ + "▁Rushmore", + -14.761200904846191 + ], + [ + "▁collarbone", + -14.76121997833252 + ], + [ + "▁Longboat", + -14.761225700378418 + ], + [ + "▁Antioxidants", + -14.761274337768556 + ], + [ + "▁panache", + -14.761275291442873 + ], + [ + "▁Mariachi", + -14.761292457580566 + ], + [ + "▁Barista", + -14.761303901672363 + ], + [ + "▁detonated", + -14.761306762695312 + ], + [ + "▁Sonoran", + -14.761378288269045 + ], + [ + "▁Drucker", + -14.76138401031494 + ], + [ + "TREAT", + -14.761411666870115 + ], + [ + "▁Skillet", + -14.76144790649414 + ], + [ + "ABE", + -14.761466979980469 + ], + [ + "Syria", + -14.76147174835205 + ], + [ + "▁Beattie", + -14.761473655700684 + ], + [ + "▁Rioja", + -14.761476516723633 + ], + [ + "HERE", + -14.761515617370604 + ], + [ + "▁Dreamliner", + -14.761590003967283 + ], + [ + "▁Malabar", + -14.761597633361816 + ], + [ + "▁swaddle", + -14.761639595031738 + ], + [ + "▁debunked", + -14.761677742004396 + ], + [ + "▁Taoist", + -14.761713027954102 + ], + [ + "▁Lyd", + -14.761723518371582 + ], + [ + "ocratic", + -14.761734008789062 + ], + [ + "10-11", + -14.761746406555176 + ], + [ + "▁Tomlin", + -14.76179313659668 + ], + [ + "▁FamilySearch", + -14.761802673339844 + ], + [ + "▁Regeneration", + -14.761844635009766 + ], + [ + "▁dank", + -14.761865615844728 + ], + [ + "▁13:00", + -14.761951446533203 + ], + [ + "2:43", + -14.761982917785645 + ], + [ + "▁invigorated", + -14.761993408203123 + ], + [ + "▁readmission", + -14.762007713317873 + ], + [ + "7500", + -14.762041091918944 + ], + [ + "▁capri", + -14.762066841125488 + ], + [ + "▁Trailhead", + -14.7620849609375 + ], + [ + "remlin", + -14.762123107910156 + ], + [ + "▁Calculated", + -14.762248992919922 + ], + [ + "roofing", + -14.762289047241213 + ], + [ + "fries", + -14.76231288909912 + ], + [ + "▁Lewisville", + -14.76231575012207 + ], + [ + "▁Sudha", + -14.762340545654297 + ], + [ + "REIT", + -14.762364387512209 + ], + [ + "▁Hyman", + -14.762429237365724 + ], + [ + "ithi", + -14.762435913085938 + ], + [ + "▁focusses", + -14.76247787475586 + ], + [ + "▁smacked", + -14.762574195861816 + ], + [ + "▁loomed", + -14.762642860412598 + ], + [ + "candle", + -14.762704849243164 + ], + [ + "éro", + -14.76270866394043 + ], + [ + "BSA", + -14.762731552124023 + ], + [ + "genius", + -14.76274585723877 + ], + [ + "▁Prezi", + -14.762937545776367 + ], + [ + "▁Kapit", + -14.762986183166504 + ], + [ + "▁coarsely", + -14.763059616088867 + ], + [ + "▁Purim", + -14.763071060180664 + ], + [ + "Observation", + -14.76307201385498 + ], + [ + "▁Sile", + -14.763087272644045 + ], + [ + "Merchant", + -14.763101577758787 + ], + [ + "dhering", + -14.763107299804688 + ], + [ + "Opportunities", + -14.763117790222168 + ], + [ + "Emmanuel", + -14.7631196975708 + ], + [ + "Miguel", + -14.7631196975708 + ], + [ + "Suicide", + -14.763121604919434 + ], + [ + "Administrative", + -14.763123512268066 + ], + [ + "Arnold", + -14.763123512268066 + ], + [ + "renewal", + -14.763129234313965 + ], + [ + "completion", + -14.76313018798828 + ], + [ + "Southampton", + -14.763132095336914 + ], + [ + "disaster", + -14.763144493103027 + ], + [ + "Richmond", + -14.763147354125977 + ], + [ + "Durham", + -14.763154983520508 + ], + [ + "smelling", + -14.763155937194824 + ], + [ + "Volvo", + -14.763161659240724 + ], + [ + "▁1550", + -14.76316738128662 + ], + [ + "▁manhood", + -14.763176918029783 + ], + [ + "▁Haunt", + -14.763209342956545 + ], + [ + "▁Galen", + -14.763214111328123 + ], + [ + "discussed", + -14.763370513916016 + ], + [ + "Elaine", + -14.763387680053713 + ], + [ + "ASK", + -14.763407707214355 + ], + [ + "▁buffed", + -14.763456344604492 + ], + [ + "▁Technik", + -14.76346492767334 + ], + [ + "geography", + -14.763472557067873 + ], + [ + "▁thoroughness", + -14.763503074645996 + ], + [ + "Geoff", + -14.763508796691896 + ], + [ + "ZN", + -14.76352310180664 + ], + [ + "▁Vickie", + -14.76353931427002 + ], + [ + "canvas", + -14.763574600219728 + ], + [ + "▁Braga", + -14.763582229614258 + ], + [ + "Underlying", + -14.763782501220703 + ], + [ + "glad", + -14.763784408569336 + ], + [ + "WTO", + -14.76379108428955 + ], + [ + "ugly", + -14.763792991638184 + ], + [ + "▁Deus", + -14.763848304748535 + ], + [ + "visiting", + -14.763940811157228 + ], + [ + "diary", + -14.763949394226074 + ], + [ + "▁Sousa", + -14.764153480529783 + ], + [ + "▁Aftermarket", + -14.7643461227417 + ], + [ + "▁APY", + -14.764347076416016 + ], + [ + "▁Ziploc", + -14.76442527770996 + ], + [ + "Sierra", + -14.764447212219238 + ], + [ + "▁psyched", + -14.76449489593506 + ], + [ + "▁octo", + -14.764606475830078 + ], + [ + "THO", + -14.764609336853027 + ], + [ + "▁Saka", + -14.764610290527344 + ], + [ + "▁cui", + -14.764644622802734 + ], + [ + "Wyn", + -14.764657974243164 + ], + [ + "▁repose", + -14.764673233032228 + ], + [ + "SHINE", + -14.764726638793944 + ], + [ + "adjust", + -14.76477336883545 + ], + [ + "▁dented", + -14.764866828918455 + ], + [ + "▁PHD", + -14.765013694763184 + ], + [ + "▁RCW", + -14.765061378479004 + ], + [ + "Vox", + -14.765085220336914 + ], + [ + "▁Ngu", + -14.765118598937988 + ], + [ + "alignment", + -14.765132904052734 + ], + [ + "Bench", + -14.765138626098633 + ], + [ + "▁7-3", + -14.765198707580566 + ], + [ + "Tenant", + -14.765337944030762 + ], + [ + "armor", + -14.765347480773926 + ], + [ + "▁regrow", + -14.76549243927002 + ], + [ + "▁DMR", + -14.76555061340332 + ], + [ + "▁molt", + -14.765559196472168 + ], + [ + "▁CKD", + -14.765584945678713 + ], + [ + "Shabab", + -14.765605926513672 + ], + [ + "6.8%", + -14.765626907348633 + ], + [ + "lantern", + -14.765663146972656 + ], + [ + "ignite", + -14.765706062316896 + ], + [ + "▁MFI", + -14.765732765197754 + ], + [ + "▁Starship", + -14.765764236450195 + ], + [ + "ujan", + -14.765872955322266 + ], + [ + "Gloss", + -14.76624584197998 + ], + [ + "▁accuser", + -14.766302108764648 + ], + [ + "▁WCF", + -14.76634407043457 + ], + [ + "▁keine", + -14.766351699829102 + ], + [ + "BLS", + -14.766358375549316 + ], + [ + "▁Beckman", + -14.766432762145996 + ], + [ + "chimp", + -14.766451835632324 + ], + [ + "▁Schott", + -14.766457557678224 + ], + [ + "▁Rabb", + -14.766534805297852 + ], + [ + "▁Aristo", + -14.766584396362305 + ], + [ + "▁SPG", + -14.766648292541504 + ], + [ + "anesthesiologist", + -14.766772270202637 + ], + [ + "▁comprar", + -14.76682949066162 + ], + [ + "remodel", + -14.76688003540039 + ], + [ + "▁Darin", + -14.766880989074709 + ], + [ + "BUG", + -14.766959190368652 + ], + [ + "diac", + -14.766986846923828 + ], + [ + "▁carnivore", + -14.767030715942385 + ], + [ + "▁tearful", + -14.767054557800291 + ], + [ + "▁Yann", + -14.76716136932373 + ], + [ + "▁appointee", + -14.767176628112791 + ], + [ + "▁Pham", + -14.767194747924805 + ], + [ + "stereo", + -14.767256736755373 + ], + [ + "Brake", + -14.767257690429688 + ], + [ + "racer", + -14.767267227172852 + ], + [ + "▁Tui", + -14.767528533935549 + ], + [ + "▁cannula", + -14.76755428314209 + ], + [ + "smaller", + -14.76758861541748 + ], + [ + "jara", + -14.767606735229492 + ], + [ + "▁metalwork", + -14.767807960510254 + ], + [ + "▁Winding", + -14.767834663391112 + ], + [ + "VOX", + -14.767871856689451 + ], + [ + "eev", + -14.767973899841309 + ], + [ + "▁Arsen", + -14.76800537109375 + ], + [ + "xico", + -14.768047332763672 + ], + [ + "▁Vay", + -14.76806354522705 + ], + [ + "2:35", + -14.768068313598633 + ], + [ + "laka", + -14.768077850341797 + ], + [ + "▁Oxi", + -14.768102645874023 + ], + [ + "cinema", + -14.768224716186523 + ], + [ + "▁Kuwaiti", + -14.768356323242188 + ], + [ + "▁concret", + -14.768383979797363 + ], + [ + "Baked", + -14.768412590026855 + ], + [ + "▁portland", + -14.768470764160156 + ], + [ + "Kurt", + -14.768567085266112 + ], + [ + "▁NPL", + -14.768630027770996 + ], + [ + "▁Ogre", + -14.76867961883545 + ], + [ + "lazy", + -14.768699645996094 + ], + [ + "curl", + -14.768739700317385 + ], + [ + "glider", + -14.768744468688965 + ], + [ + "▁Irvin", + -14.768763542175291 + ], + [ + "▁precept", + -14.768765449523926 + ], + [ + "▁17:2", + -14.768842697143556 + ], + [ + "▁Corrie", + -14.768884658813477 + ], + [ + "▁keyring", + -14.768988609313965 + ], + [ + "eeeeeee", + -14.76909637451172 + ], + [ + "hula", + -14.769113540649414 + ], + [ + "recent", + -14.769171714782717 + ], + [ + "▁underwriter", + -14.769174575805664 + ], + [ + "▁Platter", + -14.769219398498535 + ], + [ + "cedent", + -14.76928997039795 + ], + [ + "scence", + -14.76931095123291 + ], + [ + "▁slather", + -14.769404411315918 + ], + [ + "DIG", + -14.76943302154541 + ], + [ + "Veterinarian", + -14.769478797912598 + ], + [ + "gigabyte", + -14.769479751586914 + ], + [ + "technological", + -14.769525527954102 + ], + [ + "▁retinol", + -14.769532203674316 + ], + [ + "sclero", + -14.76963996887207 + ], + [ + "ENDO", + -14.769661903381348 + ], + [ + "1–3", + -14.769695281982422 + ], + [ + "Abdominal", + -14.769761085510254 + ], + [ + "Interoperability", + -14.769761085510254 + ], + [ + "Ordinarily", + -14.769761085510254 + ], + [ + "sabotaging", + -14.769761085510254 + ], + [ + "▁Conveyancing", + -14.769761085510254 + ], + [ + "▁Gippsland", + -14.769761085510254 + ], + [ + "▁HUAWEI", + -14.769761085510254 + ], + [ + "▁Honduran", + -14.769761085510254 + ], + [ + "▁Initiation", + -14.769761085510254 + ], + [ + "▁MacLeod", + -14.769761085510254 + ], + [ + "▁Meghalaya", + -14.769761085510254 + ], + [ + "▁POLICY", + -14.769761085510254 + ], + [ + "▁Rodrigues", + -14.769761085510254 + ], + [ + "▁cacophony", + -14.769761085510254 + ], + [ + "▁commonalities", + -14.769761085510254 + ], + [ + "▁conspiracies", + -14.769761085510254 + ], + [ + "▁excretion", + -14.769761085510254 + ], + [ + "▁gazillion", + -14.769761085510254 + ], + [ + "▁lycopene", + -14.769761085510254 + ], + [ + "▁ombudsman", + -14.769761085510254 + ], + [ + "▁resplendent", + -14.769761085510254 + ], + [ + "▁spiciness", + -14.769761085510254 + ], + [ + "▁stabilisation", + -14.769761085510254 + ], + [ + "▁tourmaline", + -14.769761085510254 + ], + [ + "Condolence", + -14.76976203918457 + ], + [ + "▁elongation", + -14.76976203918457 + ], + [ + "▁untidy", + -14.76976203918457 + ], + [ + "Constraint", + -14.769762992858888 + ], + [ + "▁Marmaris", + -14.769762992858888 + ], + [ + "▁Schaumburg", + -14.769762992858888 + ], + [ + "▁tomatillo", + -14.769762992858888 + ], + [ + "▁vesicle", + -14.769762992858888 + ], + [ + "▁Grenfell", + -14.769763946533203 + ], + [ + "▁Lanarkshire", + -14.76976490020752 + ], + [ + "▁Portobello", + -14.76976490020752 + ], + [ + "▁virulent", + -14.76976490020752 + ], + [ + "▁Alsace", + -14.769765853881836 + ], + [ + "Hiddleston", + -14.769766807556152 + ], + [ + "▁basmati", + -14.769766807556152 + ], + [ + "▁tarragon", + -14.769766807556152 + ], + [ + "▁Abilities", + -14.769768714904783 + ], + [ + "▁Kaunas", + -14.769769668579102 + ], + [ + "▁Trayvon", + -14.769769668579102 + ], + [ + "▁Militia", + -14.769774436950684 + ], + [ + "▁Untold", + -14.769775390625 + ], + [ + "▁Photovoltaic", + -14.76977825164795 + ], + [ + "▁Varnish", + -14.76977825164795 + ], + [ + "▁unplugging", + -14.769781112670898 + ], + [ + "▁Suriname", + -14.769782066345217 + ], + [ + "▁Telekom", + -14.769783973693848 + ], + [ + "▁Horatio", + -14.76978588104248 + ], + [ + "▁Savory", + -14.769787788391112 + ], + [ + "▁Lukaku", + -14.76978874206543 + ], + [ + "▁discolouration", + -14.76978874206543 + ], + [ + "▁hydrology", + -14.76979160308838 + ], + [ + "▁Zappos", + -14.769792556762695 + ], + [ + "▁bonkers", + -14.769793510437012 + ], + [ + "▁Eisner", + -14.769795417785645 + ], + [ + "▁GARAGE", + -14.769797325134276 + ], + [ + "▁Ludlow", + -14.76980209350586 + ], + [ + "▁Comrade", + -14.769804000854492 + ], + [ + "▁naturopathic", + -14.769804000854492 + ], + [ + "▁protrusion", + -14.76980686187744 + ], + [ + "▁soiree", + -14.769811630249023 + ], + [ + "▁Raisin", + -14.769817352294922 + ], + [ + "▁gunpowder", + -14.769818305969238 + ], + [ + "▁Reims", + -14.769824028015137 + ], + [ + "▁Tailgate", + -14.769829750061035 + ], + [ + "▁Barossa", + -14.769837379455566 + ], + [ + "▁NTSC", + -14.769843101501465 + ], + [ + "▁Ragnar", + -14.769843101501465 + ], + [ + "▁Bonaire", + -14.769859313964844 + ], + [ + "▁Cigna", + -14.769877433776855 + ], + [ + "▁Yucca", + -14.769877433776855 + ], + [ + "▁Chariot", + -14.769889831542969 + ], + [ + "▁southerly", + -14.769890785217283 + ], + [ + "▁negation", + -14.769895553588867 + ], + [ + "▁1783", + -14.7698974609375 + ], + [ + "▁spaniel", + -14.769898414611816 + ], + [ + "▁Unfinished", + -14.769905090332031 + ], + [ + "▁enjoin", + -14.769905090332031 + ], + [ + "▁reimagining", + -14.769908905029297 + ], + [ + "▁NLRB", + -14.76992130279541 + ], + [ + "▁Jacinto", + -14.769932746887209 + ], + [ + "▁16:3", + -14.76994514465332 + ], + [ + "▁Celiac", + -14.76995086669922 + ], + [ + "▁townspeople", + -14.7699613571167 + ], + [ + "dashi", + -14.769968032836914 + ], + [ + "Pianist", + -14.769969940185549 + ], + [ + "▁Deduction", + -14.769979476928713 + ], + [ + "▁expendable", + -14.769987106323242 + ], + [ + "rnberg", + -14.770000457763672 + ], + [ + "▁YTD", + -14.770008087158203 + ], + [ + "▁Paintball", + -14.770055770874023 + ], + [ + "▁uninspiring", + -14.770071029663086 + ], + [ + "FFF", + -14.770180702209473 + ], + [ + "▁machinist", + -14.770181655883787 + ], + [ + "▁twang", + -14.770191192626951 + ], + [ + "▁hippy", + -14.770196914672852 + ], + [ + "▁Haddon", + -14.770200729370115 + ], + [ + "▁slaughterhouse", + -14.770256996154783 + ], + [ + "▁predisposed", + -14.770269393920898 + ], + [ + "Cox", + -14.770270347595217 + ], + [ + "▁Caledonian", + -14.77027702331543 + ], + [ + "▁preschooler", + -14.770341873168944 + ], + [ + "charya", + -14.770359992980955 + ], + [ + "▁Cristian", + -14.770362854003906 + ], + [ + "▁Talley", + -14.770395278930664 + ], + [ + "▁slough", + -14.77039909362793 + ], + [ + "▁Bandera", + -14.770415306091309 + ], + [ + "▁Martinsville", + -14.770442962646484 + ], + [ + "▁DHT", + -14.770453453063965 + ], + [ + "▁debian", + -14.770458221435549 + ], + [ + "▁Blown", + -14.770606994628906 + ], + [ + "▁glint", + -14.770614624023438 + ], + [ + "▁Shorty", + -14.770623207092283 + ], + [ + "FSS", + -14.77074909210205 + ], + [ + "▁Batavia", + -14.770753860473633 + ], + [ + "▁blighted", + -14.770794868469238 + ], + [ + "MME", + -14.770899772644045 + ], + [ + "NRC", + -14.771003723144531 + ], + [ + "▁Remodelers", + -14.771029472351074 + ], + [ + "evsky", + -14.771044731140137 + ], + [ + "▁twitching", + -14.771044731140137 + ], + [ + "▁Rendering", + -14.771072387695312 + ], + [ + "▁Oda", + -14.77108669281006 + ], + [ + "GOOG", + -14.771112442016602 + ], + [ + "▁quadrupled", + -14.771121978759766 + ], + [ + "▁Gallant", + -14.771197319030762 + ], + [ + "▁NoSQL", + -14.771204948425291 + ], + [ + "DIAN", + -14.771212577819824 + ], + [ + "▁Pleasanton", + -14.771224975585938 + ], + [ + "TILE", + -14.771276473999023 + ], + [ + "▁Didier", + -14.7712984085083 + ], + [ + "SOLD", + -14.771322250366213 + ], + [ + "▁Worthing", + -14.771343231201172 + ], + [ + "pep", + -14.77138900756836 + ], + [ + "▁ushering", + -14.771390914916992 + ], + [ + "▁overtook", + -14.771427154541016 + ], + [ + "▁newsprint", + -14.77150821685791 + ], + [ + "▁Blowing", + -14.771567344665527 + ], + [ + "DSP", + -14.771581649780272 + ], + [ + "▁Cherish", + -14.77171516418457 + ], + [ + "1924", + -14.77178955078125 + ], + [ + "trophy", + -14.771821022033691 + ], + [ + "opted", + -14.771869659423828 + ], + [ + "Nominal", + -14.771896362304688 + ], + [ + "Blanc", + -14.77194881439209 + ], + [ + "ARRY", + -14.771974563598633 + ], + [ + "CHU", + -14.771976470947266 + ], + [ + "▁assuredly", + -14.772011756896973 + ], + [ + "▁convertion", + -14.772012710571287 + ], + [ + "lted", + -14.77202606201172 + ], + [ + "Snake", + -14.77207851409912 + ], + [ + "▁18′′", + -14.77209186553955 + ], + [ + "▁solidifying", + -14.772095680236816 + ], + [ + "occupation", + -14.772101402282717 + ], + [ + "▁Krup", + -14.772130012512209 + ], + [ + "Miscellaneous", + -14.772150039672852 + ], + [ + "▁Cogni", + -14.772161483764648 + ], + [ + "Devote", + -14.772180557250977 + ], + [ + "widget", + -14.772186279296877 + ], + [ + "PWA", + -14.772187232971191 + ], + [ + "Instructor", + -14.772194862365724 + ], + [ + "WORLD", + -14.772198677062988 + ], + [ + "Emerald", + -14.772210121154783 + ], + [ + "infrastructure", + -14.772210121154783 + ], + [ + "Kenneth", + -14.772212028503418 + ], + [ + "attempt", + -14.772212028503418 + ], + [ + "Shirley", + -14.77221393585205 + ], + [ + "christmas", + -14.772216796875 + ], + [ + "courtesy", + -14.77221965789795 + ], + [ + "Florence", + -14.772221565246582 + ], + [ + "Apache", + -14.772223472595217 + ], + [ + "levitra", + -14.772226333618164 + ], + [ + "emerging", + -14.772254943847656 + ], + [ + "▁confit", + -14.772260665893556 + ], + [ + "Shred", + -14.772321701049805 + ], + [ + "Salary", + -14.772333145141602 + ], + [ + "▁Abdo", + -14.772335052490234 + ], + [ + "discretion", + -14.772424697875977 + ], + [ + "username", + -14.772446632385254 + ], + [ + "▁plying", + -14.77247714996338 + ], + [ + "▁Rambo", + -14.772515296936035 + ], + [ + "▁_________", + -14.772521018981934 + ], + [ + "▁benchtop", + -14.772523880004885 + ], + [ + "▁Quik", + -14.772574424743652 + ], + [ + "hyped", + -14.772631645202637 + ], + [ + "disco", + -14.772679328918455 + ], + [ + "▁deservedly", + -14.772743225097656 + ], + [ + "▁wagered", + -14.772747993469238 + ], + [ + "▁leche", + -14.77275562286377 + ], + [ + "▁CEST", + -14.772781372070312 + ], + [ + "▁disgraceful", + -14.772787094116213 + ], + [ + "GEM", + -14.772814750671388 + ], + [ + "▁(182", + -14.772819519042969 + ], + [ + "000/-", + -14.772857666015623 + ], + [ + "GRS", + -14.7728910446167 + ], + [ + "▁Alston", + -14.772944450378418 + ], + [ + "▁Volo", + -14.77308464050293 + ], + [ + "concessional", + -14.773128509521484 + ], + [ + "▁backhaul", + -14.773159980773926 + ], + [ + "▁Chowk", + -14.77316665649414 + ], + [ + "khov", + -14.773205757141112 + ], + [ + "▁Nao", + -14.773207664489746 + ], + [ + "▁Salter", + -14.773228645324709 + ], + [ + "Ode", + -14.77326202392578 + ], + [ + "▁schoolgirl", + -14.77328872680664 + ], + [ + "Seize", + -14.773322105407717 + ], + [ + "Wales", + -14.773338317871094 + ], + [ + "adv", + -14.773375511169434 + ], + [ + "ulasi", + -14.773382186889648 + ], + [ + "▁Bestseller", + -14.773551940917969 + ], + [ + "▁Nephi", + -14.77357292175293 + ], + [ + "NDE", + -14.773626327514648 + ], + [ + "agreed", + -14.773651123046877 + ], + [ + "▁Stacked", + -14.773828506469728 + ], + [ + "▁Noelle", + -14.773829460144045 + ], + [ + "▁Venti", + -14.773877143859863 + ], + [ + "▁empath", + -14.773880004882812 + ], + [ + "trium", + -14.773919105529783 + ], + [ + "alapa", + -14.773930549621582 + ], + [ + "uploaded", + -14.774036407470703 + ], + [ + "aswamy", + -14.774048805236816 + ], + [ + "VERT", + -14.77411651611328 + ], + [ + "▁Natura", + -14.77431583404541 + ], + [ + "quid", + -14.774322509765623 + ], + [ + "▁Snowmass", + -14.774421691894531 + ], + [ + "1:22", + -14.774429321289062 + ], + [ + "kuri", + -14.774441719055176 + ], + [ + "▁pj", + -14.77453327178955 + ], + [ + "angular", + -14.774554252624512 + ], + [ + "shish", + -14.774619102478027 + ], + [ + "▁pax", + -14.774720191955566 + ], + [ + "Kong", + -14.774828910827637 + ], + [ + "Marx", + -14.77495574951172 + ], + [ + "▁stylesheet", + -14.77497386932373 + ], + [ + "=10", + -14.77499008178711 + ], + [ + "▁4/4", + -14.775004386901855 + ], + [ + "▁Halen", + -14.775018692016602 + ], + [ + "▁hashing", + -14.77508544921875 + ], + [ + "▁sistem", + -14.775176048278809 + ], + [ + "▁Whig", + -14.775285720825195 + ], + [ + "▁TUI", + -14.775355339050291 + ], + [ + "entered", + -14.775465965270996 + ], + [ + "▁Alana", + -14.775520324707031 + ], + [ + "▁detoxing", + -14.77553653717041 + ], + [ + "1:37", + -14.775605201721191 + ], + [ + "▁Tempur", + -14.775633811950684 + ], + [ + "conformist", + -14.775678634643556 + ], + [ + "▁Jaffa", + -14.775853157043455 + ], + [ + "ucu", + -14.775867462158203 + ], + [ + "▁Noctu", + -14.77601718902588 + ], + [ + "▁corticosteroid", + -14.776037216186523 + ], + [ + "▁contaminant", + -14.776066780090332 + ], + [ + "▁typographic", + -14.776118278503418 + ], + [ + "▁RCB", + -14.776155471801758 + ], + [ + "apillary", + -14.77620792388916 + ], + [ + "#039;", + -14.776220321655272 + ], + [ + "Gur", + -14.776307106018066 + ], + [ + "▁MACD", + -14.776336669921877 + ], + [ + "▁Deserve", + -14.77640151977539 + ], + [ + "▁Bobcat", + -14.776409149169922 + ], + [ + "7400", + -14.77651023864746 + ], + [ + "trance", + -14.77656364440918 + ], + [ + "▁Mohr", + -14.776795387268066 + ], + [ + "$300", + -14.776803016662598 + ], + [ + "▁Donbas", + -14.776874542236328 + ], + [ + "▁Stef", + -14.776927947998049 + ], + [ + "2,900", + -14.776933670043944 + ], + [ + "▁Shon", + -14.776947021484377 + ], + [ + "9.15", + -14.777098655700684 + ], + [ + "▁earmark", + -14.777103424072266 + ], + [ + "▁extoll", + -14.777146339416504 + ], + [ + "SWR", + -14.77717113494873 + ], + [ + "MEM", + -14.777174949645996 + ], + [ + "aqa", + -14.777274131774902 + ], + [ + "zner", + -14.777288436889648 + ], + [ + "Bali", + -14.777515411376951 + ], + [ + "arne", + -14.777649879455566 + ], + [ + "▁Checkpoint", + -14.7777738571167 + ], + [ + "▁08:2", + -14.777788162231444 + ], + [ + "▁Mahama", + -14.777791023254396 + ], + [ + "ZM", + -14.777843475341797 + ], + [ + "▁972-", + -14.777846336364746 + ], + [ + "Tina", + -14.77791976928711 + ], + [ + "▁Nyc", + -14.777920722961426 + ], + [ + "Transcript", + -14.777944564819336 + ], + [ + "▁possi", + -14.77803897857666 + ], + [ + "▁clothesline", + -14.77809238433838 + ], + [ + "▁Boho", + -14.778135299682615 + ], + [ + "▁transpose", + -14.778141021728516 + ], + [ + "▁muff", + -14.77816677093506 + ], + [ + "▁Hoist", + -14.778199195861816 + ], + [ + "▁turnip", + -14.77820110321045 + ], + [ + "dude", + -14.77823543548584 + ], + [ + "▁concoct", + -14.778250694274902 + ], + [ + "▁MCG", + -14.778360366821287 + ], + [ + "wellness", + -14.778464317321776 + ], + [ + "linski", + -14.778499603271484 + ], + [ + "DEVELOP", + -14.778672218322754 + ], + [ + "Glutathione", + -14.77868938446045 + ], + [ + "▁800-367-9920", + -14.77868938446045 + ], + [ + "▁Diligence", + -14.77868938446045 + ], + [ + "▁Gleason", + -14.77868938446045 + ], + [ + "▁Kombucha", + -14.77868938446045 + ], + [ + "▁Percival", + -14.77868938446045 + ], + [ + "▁Seagull", + -14.77868938446045 + ], + [ + "▁Tijuana", + -14.77868938446045 + ], + [ + "▁effervescent", + -14.77868938446045 + ], + [ + "▁indignation", + -14.77868938446045 + ], + [ + "▁minutiae", + -14.77868938446045 + ], + [ + "▁oxymoron", + -14.77868938446045 + ], + [ + "▁pancetta", + -14.77868938446045 + ], + [ + "▁precocious", + -14.77868938446045 + ], + [ + "▁ravages", + -14.77868938446045 + ], + [ + "▁sturdier", + -14.77868938446045 + ], + [ + "▁styrofoam", + -14.77868938446045 + ], + [ + "▁uncompressed", + -14.77868938446045 + ], + [ + "▁unflattering", + -14.77868938446045 + ], + [ + "rheumatic", + -14.778690338134766 + ], + [ + "▁Apostille", + -14.778690338134766 + ], + [ + "▁Mauritania", + -14.778690338134766 + ], + [ + "▁Puccini", + -14.778690338134766 + ], + [ + "▁Scooby", + -14.778690338134766 + ], + [ + "▁rheumatism", + -14.778690338134766 + ], + [ + "▁vestibule", + -14.778690338134766 + ], + [ + "Pursuing", + -14.778691291809082 + ], + [ + "▁deranged", + -14.778691291809082 + ], + [ + "▁pharmacokinetic", + -14.778691291809082 + ], + [ + "▁burlesque", + -14.778692245483398 + ], + [ + "▁flaring", + -14.778692245483398 + ], + [ + "▁McNally", + -14.778693199157717 + ], + [ + "▁strumming", + -14.778695106506348 + ], + [ + "Entrants", + -14.778696060180664 + ], + [ + "▁ROBERT", + -14.778696060180664 + ], + [ + "▁animating", + -14.778696060180664 + ], + [ + "▁anthropogenic", + -14.778696060180664 + ], + [ + "▁bdrm", + -14.778696060180664 + ], + [ + "▁bokeh", + -14.778696060180664 + ], + [ + "▁Aamir", + -14.77869701385498 + ], + [ + "▁Tauranga", + -14.77869701385498 + ], + [ + "▁Grasshopper", + -14.778698921203612 + ], + [ + "▁Valverde", + -14.778698921203612 + ], + [ + "▁linseed", + -14.778698921203612 + ], + [ + "▁RSPCA", + -14.778700828552246 + ], + [ + "▁Cafeteria", + -14.778701782226562 + ], + [ + "▁hydrolysis", + -14.778701782226562 + ], + [ + "▁Diversified", + -14.778705596923828 + ], + [ + "▁Ancestor", + -14.778708457946776 + ], + [ + "▁Ranbir", + -14.778709411621094 + ], + [ + "Roche", + -14.77871036529541 + ], + [ + "▁Arusha", + -14.77871036529541 + ], + [ + "▁lychee", + -14.778711318969728 + ], + [ + "▁shrank", + -14.778716087341309 + ], + [ + "▁conjugated", + -14.778717041015623 + ], + [ + "▁LeBlanc", + -14.77871799468994 + ], + [ + "▁patchouli", + -14.77872085571289 + ], + [ + "▁raison", + -14.778731346130373 + ], + [ + "▁parlance", + -14.778732299804688 + ], + [ + "▁Broomfield", + -14.77873992919922 + ], + [ + "▁Raquel", + -14.77873992919922 + ], + [ + "▁PAST", + -14.778745651245115 + ], + [ + "▁Concise", + -14.778746604919434 + ], + [ + "arang", + -14.77874755859375 + ], + [ + "▁mohair", + -14.7787504196167 + ], + [ + "quintessentially", + -14.778759956359863 + ], + [ + "▁Hilliard", + -14.77876091003418 + ], + [ + "▁gazelle", + -14.778767585754396 + ], + [ + "▁Spyware", + -14.778775215148926 + ], + [ + "folded", + -14.778776168823242 + ], + [ + "▁Clementine", + -14.778783798217772 + ], + [ + "▁Aldean", + -14.778788566589355 + ], + [ + "▁Isilon", + -14.778800010681152 + ], + [ + "▁Salaries", + -14.778800964355469 + ], + [ + "▁WFA", + -14.778806686401367 + ], + [ + "▁unsealed", + -14.77880859375 + ], + [ + "▁hydrogenated", + -14.778818130493164 + ], + [ + "▁bundt", + -14.77881908416748 + ], + [ + "▁(212)", + -14.778860092163086 + ], + [ + "▁MOTHER", + -14.778863906860352 + ], + [ + "▁Nandi", + -14.778871536254885 + ], + [ + "▁Pasteur", + -14.778876304626465 + ], + [ + "▁radiography", + -14.778876304626465 + ], + [ + "▁DivX", + -14.778888702392578 + ], + [ + "▁tabular", + -14.778895378112791 + ], + [ + "▁Brutus", + -14.778924942016602 + ], + [ + "▁warlord", + -14.77893352508545 + ], + [ + "▁dilation", + -14.778942108154297 + ], + [ + "▁Duomo", + -14.7789888381958 + ], + [ + "▁Noonan", + -14.77900505065918 + ], + [ + "▁Limitations", + -14.779009819030762 + ], + [ + "dronate", + -14.77903652191162 + ], + [ + "lizumab", + -14.779065132141112 + ], + [ + "▁LISTEN", + -14.77907371520996 + ], + [ + "▁$3.99", + -14.779086112976074 + ], + [ + "Narrator", + -14.779102325439451 + ], + [ + "▁RISC", + -14.779114723205566 + ], + [ + "▁scalpel", + -14.779139518737791 + ], + [ + "▁underbelly", + -14.779149055480955 + ], + [ + "▁Classifier", + -14.779197692871094 + ], + [ + "elina", + -14.779204368591309 + ], + [ + "▁Potty", + -14.779215812683104 + ], + [ + "▁stubbornness", + -14.779227256774902 + ], + [ + "▁beckoning", + -14.779248237609863 + ], + [ + "▁Dinesh", + -14.779301643371582 + ], + [ + "▁Holbrook", + -14.779325485229492 + ], + [ + "▁niggle", + -14.779327392578123 + ], + [ + "▁neem", + -14.77933406829834 + ], + [ + "▁Calico", + -14.779352188110352 + ], + [ + "▁Mattie", + -14.779356956481934 + ], + [ + "▁smock", + -14.779356956481934 + ], + [ + "▁interred", + -14.779367446899414 + ], + [ + "▁sulf", + -14.779452323913574 + ], + [ + "▁$3.50", + -14.779460906982422 + ], + [ + "▁Crunchy", + -14.779462814331056 + ], + [ + "▁Bellini", + -14.779468536376951 + ], + [ + "intermediate", + -14.779470443725586 + ], + [ + "▁Mame", + -14.77947998046875 + ], + [ + "▁Puglia", + -14.779584884643556 + ], + [ + "▁lacy", + -14.779586791992188 + ], + [ + "ORN", + -14.779643058776855 + ], + [ + "badge", + -14.779644966125488 + ], + [ + "▁Hinton", + -14.779698371887209 + ], + [ + "▁werewolves", + -14.779742240905762 + ], + [ + "▁Aristocrat", + -14.779803276062012 + ], + [ + "▁PTZ", + -14.77980613708496 + ], + [ + "homme", + -14.77983283996582 + ], + [ + "▁Capone", + -14.77987003326416 + ], + [ + "▁Thong", + -14.779915809631348 + ], + [ + "▁unblocked", + -14.779936790466309 + ], + [ + "IVES", + -14.779946327209473 + ], + [ + "▁Mallard", + -14.779949188232422 + ], + [ + "▁motorised", + -14.77996826171875 + ], + [ + "▁Kain", + -14.779982566833496 + ], + [ + "▁Samoan", + -14.780046463012695 + ], + [ + "▁ideologically", + -14.780092239379885 + ], + [ + "▁Matador", + -14.78022289276123 + ], + [ + "▁Rosemont", + -14.780308723449709 + ], + [ + "▁sifted", + -14.780315399169922 + ], + [ + "1:26", + -14.780363082885742 + ], + [ + "▁Aalto", + -14.78050136566162 + ], + [ + "arratt", + -14.780557632446287 + ], + [ + "IYA", + -14.78060817718506 + ], + [ + "Bead", + -14.780617713928224 + ], + [ + "Arranging", + -14.780627250671388 + ], + [ + "▁screencast", + -14.780632019042969 + ], + [ + "▁Infini", + -14.780654907226562 + ], + [ + "▁Millard", + -14.780694961547852 + ], + [ + "▁JLL", + -14.780776977539062 + ], + [ + "▁prospered", + -14.780920028686523 + ], + [ + "leash", + -14.780932426452637 + ], + [ + "ferential", + -14.780964851379396 + ], + [ + "illah", + -14.780965805053713 + ], + [ + "Bha", + -14.781091690063477 + ], + [ + "Immediate", + -14.781118392944336 + ], + [ + "medicate", + -14.781121253967283 + ], + [ + "Treasure", + -14.781145095825195 + ], + [ + "▁faintly", + -14.781184196472168 + ], + [ + "▁imitated", + -14.781203269958496 + ], + [ + "▁Lox", + -14.78127098083496 + ], + [ + "=8", + -14.78127384185791 + ], + [ + "synthesis", + -14.781279563903809 + ], + [ + "Sweep", + -14.781310081481934 + ], + [ + "lje", + -14.78131866455078 + ], + [ + "Contribution", + -14.78132438659668 + ], + [ + "RAG", + -14.781327247619627 + ], + [ + "▁Cashier", + -14.781331062316896 + ], + [ + "▁Relaxed", + -14.781357765197754 + ], + [ + "Twice", + -14.78135871887207 + ], + [ + "Yield", + -14.781375885009766 + ], + [ + "Mindfulness", + -14.781376838684082 + ], + [ + "Upstairs", + -14.781380653381348 + ], + [ + "Boulder", + -14.781387329101562 + ], + [ + "Agricultural", + -14.78138828277588 + ], + [ + "Retired", + -14.781392097473145 + ], + [ + "Surgery", + -14.781392097473145 + ], + [ + "Babies", + -14.78139305114746 + ], + [ + "Parliament", + -14.781394958496094 + ], + [ + "▁Cpl", + -14.781394958496094 + ], + [ + "Fleet", + -14.78139591217041 + ], + [ + "plice", + -14.781396865844728 + ], + [ + "Louisiana", + -14.781405448913574 + ], + [ + "Diesel", + -14.78140926361084 + ], + [ + "improving", + -14.781410217285156 + ], + [ + "▁cogent", + -14.78141975402832 + ], + [ + "impossible", + -14.78142547607422 + ], + [ + "Elementary", + -14.781434059143066 + ], + [ + "exciting", + -14.781434059143066 + ], + [ + "▁DECK", + -14.781523704528809 + ], + [ + "▁thrush", + -14.781524658203123 + ], + [ + "Resort", + -14.781529426574709 + ], + [ + "Shaun", + -14.7815523147583 + ], + [ + "phane", + -14.78161907196045 + ], + [ + "Davies", + -14.781668663024902 + ], + [ + "▁millwork", + -14.781694412231444 + ], + [ + "adaptive", + -14.78173542022705 + ], + [ + "DEP", + -14.781752586364746 + ], + [ + "Bingo", + -14.781782150268556 + ], + [ + "iyi", + -14.781785011291504 + ], + [ + "▁swarming", + -14.781848907470703 + ], + [ + "marble", + -14.781853675842283 + ], + [ + "broke", + -14.781895637512209 + ], + [ + "accepted", + -14.7819242477417 + ], + [ + "Coaches", + -14.78196907043457 + ], + [ + "mhz", + -14.781984329223633 + ], + [ + "▁mln", + -14.782057762145996 + ], + [ + "Mobi", + -14.78211784362793 + ], + [ + "interested", + -14.78211784362793 + ], + [ + "▁£85", + -14.782208442687988 + ], + [ + "▁Kati", + -14.78221035003662 + ], + [ + "parse", + -14.78225040435791 + ], + [ + "####", + -14.782392501831056 + ], + [ + "Visitation", + -14.782402992248535 + ], + [ + "▁Fae", + -14.782414436340332 + ], + [ + "roga", + -14.78244400024414 + ], + [ + "Comb", + -14.78248691558838 + ], + [ + "▁parlay", + -14.782556533813477 + ], + [ + "▁MOON", + -14.78255844116211 + ], + [ + "▁SDSU", + -14.782670021057127 + ], + [ + "vella", + -14.782679557800291 + ], + [ + "NOVO", + -14.78282356262207 + ], + [ + "stepped", + -14.782851219177246 + ], + [ + "▁allspice", + -14.782881736755373 + ], + [ + "Painted", + -14.782903671264648 + ], + [ + "pushing", + -14.782906532287598 + ], + [ + "▁Extender", + -14.782934188842772 + ], + [ + "5300", + -14.782971382141112 + ], + [ + "creditworthiness", + -14.782977104187012 + ], + [ + "businesses", + -14.782999038696287 + ], + [ + "Curry", + -14.783007621765137 + ], + [ + "▁Naik", + -14.783035278320312 + ], + [ + "Fanatic", + -14.783073425292969 + ], + [ + "Ongoing", + -14.783129692077637 + ], + [ + "Reaching", + -14.783157348632812 + ], + [ + "Branch", + -14.78317928314209 + ], + [ + "▁TSI", + -14.783296585083008 + ], + [ + "▁$120,000", + -14.78336238861084 + ], + [ + "4:40", + -14.783365249633787 + ], + [ + "▁Kahlo", + -14.783419609069824 + ], + [ + "Bark", + -14.783428192138672 + ], + [ + "0.75", + -14.78346347808838 + ], + [ + "▁Councilmember", + -14.78352165222168 + ], + [ + "deployment", + -14.783538818359377 + ], + [ + "Recruit", + -14.783561706542969 + ], + [ + "4-15", + -14.78361701965332 + ], + [ + "QIP", + -14.78363800048828 + ], + [ + "▁Verna", + -14.783666610717772 + ], + [ + "melting", + -14.783671379089355 + ], + [ + "▁NAA", + -14.783696174621582 + ], + [ + "1.75", + -14.783710479736328 + ], + [ + "▁30-50", + -14.783730506896973 + ], + [ + "Ethan", + -14.783740997314451 + ], + [ + "pale", + -14.783758163452148 + ], + [ + "6.75", + -14.783759117126465 + ], + [ + "▁dise", + -14.783851623535156 + ], + [ + "▁Meow", + -14.783891677856444 + ], + [ + "▁Corian", + -14.783915519714355 + ], + [ + "▁Fryer", + -14.783915519714355 + ], + [ + "NCB", + -14.783997535705566 + ], + [ + "flap", + -14.7841796875 + ], + [ + "▁Jeter", + -14.784224510192873 + ], + [ + "▁Sharm", + -14.78428840637207 + ], + [ + "Limitation", + -14.78437042236328 + ], + [ + "▁Muta", + -14.78437328338623 + ], + [ + "▁shallower", + -14.784395217895508 + ], + [ + "▁necktie", + -14.78442668914795 + ], + [ + "▁blazed", + -14.784432411193848 + ], + [ + "RUB", + -14.784483909606934 + ], + [ + "▁Awaken", + -14.784513473510742 + ], + [ + "Sporting", + -14.784521102905272 + ], + [ + "▁MSL", + -14.78454303741455 + ], + [ + "jig", + -14.784578323364258 + ], + [ + "▁Sidi", + -14.78461742401123 + ], + [ + "▁rivet", + -14.784690856933594 + ], + [ + "▁Canva", + -14.784784317016602 + ], + [ + "1:48", + -14.784845352172852 + ], + [ + "▁0845", + -14.784857749938965 + ], + [ + "▁crappie", + -14.784981727600098 + ], + [ + "▁hardiness", + -14.784995079040527 + ], + [ + "▁sinker", + -14.78504467010498 + ], + [ + "Manuel", + -14.78519344329834 + ], + [ + "▁Shareholder", + -14.78524112701416 + ], + [ + "ATL", + -14.78542709350586 + ], + [ + "▁Oost", + -14.785454750061035 + ], + [ + "pvc", + -14.785489082336426 + ], + [ + "1:02", + -14.785630226135254 + ], + [ + "▁thud", + -14.785685539245604 + ], + [ + "inden", + -14.785736083984377 + ], + [ + "▁Anja", + -14.78578758239746 + ], + [ + "etched", + -14.785807609558104 + ], + [ + "JAN", + -14.78587532043457 + ], + [ + "Madam", + -14.785887718200684 + ], + [ + "▁Stabil", + -14.785958290100098 + ], + [ + "appreciated", + -14.786005973815918 + ], + [ + "pronation", + -14.786026000976562 + ], + [ + "Viva", + -14.786109924316406 + ], + [ + "▁Kamen", + -14.786124229431152 + ], + [ + "▁KRA", + -14.786125183105469 + ], + [ + "▁Talos", + -14.78622817993164 + ], + [ + "Branding", + -14.786266326904297 + ], + [ + "▁correspondingly", + -14.78633975982666 + ], + [ + "▁validator", + -14.786388397216797 + ], + [ + "▁1794", + -14.786503791809082 + ], + [ + "gasse", + -14.786688804626465 + ], + [ + "▁8.5\"", + -14.78672218322754 + ], + [ + "▁stoma", + -14.786723136901855 + ], + [ + "Abdu", + -14.786731719970703 + ], + [ + "▁$45,000", + -14.78674030303955 + ], + [ + "MEDIA", + -14.786822319030762 + ], + [ + "▁Kiara", + -14.786857604980469 + ], + [ + "bitter", + -14.7869234085083 + ], + [ + "▁DAX", + -14.787025451660156 + ], + [ + "▁Tark", + -14.787099838256836 + ], + [ + "▁Jeri", + -14.78710651397705 + ], + [ + "▁agitate", + -14.787108421325684 + ], + [ + "▁GSD", + -14.787124633789062 + ], + [ + "▁headmaster", + -14.787275314331056 + ], + [ + "Scalable", + -14.787343978881836 + ], + [ + "▁remand", + -14.787458419799805 + ], + [ + "▁TELL", + -14.787583351135254 + ], + [ + "▁HOMES", + -14.787589073181152 + ], + [ + "fallen", + -14.787593841552734 + ], + [ + "▁savanna", + -14.787609100341797 + ], + [ + "▁HOSPITAL", + -14.787612915039062 + ], + [ + "▁2018-202", + -14.787649154663086 + ], + [ + "▁Kyrgyz", + -14.787694931030272 + ], + [ + "BACKGROUND", + -14.78769874572754 + ], + [ + "COLUMBIA", + -14.78769874572754 + ], + [ + "matrimony", + -14.78769874572754 + ], + [ + "vancouver", + -14.78769874572754 + ], + [ + "▁Absinthe", + -14.78769874572754 + ], + [ + "▁Anesthesiology", + -14.78769874572754 + ], + [ + "▁Ayatollah", + -14.78769874572754 + ], + [ + "▁Erskine", + -14.78769874572754 + ], + [ + "▁QUOTE", + -14.78769874572754 + ], + [ + "▁Villarreal", + -14.78769874572754 + ], + [ + "▁contraindicated", + -14.78769874572754 + ], + [ + "▁conversant", + -14.78769874572754 + ], + [ + "▁dwarves", + -14.78769874572754 + ], + [ + "▁exogenous", + -14.78769874572754 + ], + [ + "▁iSCSI", + -14.78769874572754 + ], + [ + "▁impersonation", + -14.78769874572754 + ], + [ + "▁impurity", + -14.78769874572754 + ], + [ + "▁pragmatism", + -14.78769874572754 + ], + [ + "▁symphonies", + -14.78769874572754 + ], + [ + "▁unprofitable", + -14.78769874572754 + ], + [ + "▁BIRTHDAY", + -14.787699699401855 + ], + [ + "▁Babysitter", + -14.787699699401855 + ], + [ + "▁Squamish", + -14.787699699401855 + ], + [ + "▁Stetson", + -14.787699699401855 + ], + [ + "▁Tomahawk", + -14.787699699401855 + ], + [ + "▁Adderall", + -14.787700653076172 + ], + [ + "▁astrophysics", + -14.787700653076172 + ], + [ + "Specimen", + -14.787701606750488 + ], + [ + "▁MOSFET", + -14.787701606750488 + ], + [ + "▁Permaculture", + -14.787701606750488 + ], + [ + "▁Hashimoto", + -14.787702560424805 + ], + [ + "▁Xilinx", + -14.787702560424805 + ], + [ + "▁Immanuel", + -14.78770351409912 + ], + [ + "▁jetzt", + -14.78770351409912 + ], + [ + "▁Registrant", + -14.787704467773438 + ], + [ + "definable", + -14.787705421447754 + ], + [ + "▁hoarse", + -14.787708282470703 + ], + [ + "▁palazzo", + -14.787708282470703 + ], + [ + "▁Hermosa", + -14.787710189819336 + ], + [ + "▁Buzzfeed", + -14.787714004516602 + ], + [ + "▁stereotyping", + -14.787714004516602 + ], + [ + "▁Calibre", + -14.787714958190918 + ], + [ + "▁spiking", + -14.787714958190918 + ], + [ + "▁VOICE", + -14.787717819213867 + ], + [ + "▁Abarth", + -14.787718772888184 + ], + [ + "ARTICLE", + -14.7877197265625 + ], + [ + "▁Duggan", + -14.78772258758545 + ], + [ + "▁kabob", + -14.787724494934082 + ], + [ + "▁altcoin", + -14.787725448608398 + ], + [ + "▁phosphorous", + -14.787725448608398 + ], + [ + "▁silage", + -14.787726402282717 + ], + [ + "▁OHSAS", + -14.787727355957031 + ], + [ + "▁Alfresco", + -14.78773021697998 + ], + [ + "▁Shaffer", + -14.78773021697998 + ], + [ + "▁Collapse", + -14.787731170654297 + ], + [ + "▁Eunice", + -14.787736892700195 + ], + [ + "▁Docklands", + -14.787748336791992 + ], + [ + "▁HEATED", + -14.787750244140623 + ], + [ + "▁mannerisms", + -14.787750244140623 + ], + [ + "▁Composed", + -14.787753105163574 + ], + [ + "▁Guildhall", + -14.787758827209473 + ], + [ + "Symbol", + -14.787759780883787 + ], + [ + "▁Autotrader", + -14.787768363952637 + ], + [ + "STREAM", + -14.787775039672852 + ], + [ + "▁Nashik", + -14.78778076171875 + ], + [ + "▁basting", + -14.787785530090332 + ], + [ + "▁Looney", + -14.787786483764648 + ], + [ + "▁Pretzel", + -14.787787437438965 + ], + [ + "▁Madigan", + -14.787789344787598 + ], + [ + "▁laudable", + -14.787805557250977 + ], + [ + "▁Regenerative", + -14.787822723388672 + ], + [ + "▁Holyoke", + -14.787824630737305 + ], + [ + "▁photocopies", + -14.787824630737305 + ], + [ + "ELLER", + -14.78782558441162 + ], + [ + "▁Shoemaker", + -14.78787612915039 + ], + [ + "▁Simms", + -14.787881851196287 + ], + [ + "▁VAIO", + -14.787894248962402 + ], + [ + "CML", + -14.787904739379885 + ], + [ + "▁WNBA", + -14.787908554077148 + ], + [ + "▁Vaseline", + -14.78791332244873 + ], + [ + "NAB", + -14.787917137145996 + ], + [ + "▁earthen", + -14.787919044494627 + ], + [ + "▁clenched", + -14.787938117980955 + ], + [ + "▁Flask", + -14.787945747375488 + ], + [ + "▁Motherhood", + -14.787972450256348 + ], + [ + "▁Stink", + -14.787983894348145 + ], + [ + "▁hydrodynamic", + -14.788020133972168 + ], + [ + "LIES", + -14.788076400756836 + ], + [ + "▁Hookup", + -14.788077354431152 + ], + [ + "▁masterclasses", + -14.788084030151367 + ], + [ + "acqua", + -14.788141250610352 + ], + [ + "ancia", + -14.78814697265625 + ], + [ + "▁sadistic", + -14.788155555725098 + ], + [ + ".12.201", + -14.788192749023438 + ], + [ + "OOL", + -14.78819751739502 + ], + [ + "▁moistened", + -14.7882080078125 + ], + [ + "▁Realtek", + -14.788223266601562 + ], + [ + "nnigan", + -14.788272857666016 + ], + [ + "▁clamoring", + -14.788272857666016 + ], + [ + "▁spammy", + -14.788307189941406 + ], + [ + "▁beholden", + -14.788405418395996 + ], + [ + "▁dewatering", + -14.788457870483398 + ], + [ + "▁Gana", + -14.788529396057127 + ], + [ + "7,8", + -14.788533210754396 + ], + [ + "▁Benning", + -14.788566589355469 + ], + [ + "8-14", + -14.78858470916748 + ], + [ + "ndorf", + -14.78859043121338 + ], + [ + "▁Ridgewood", + -14.788599014282228 + ], + [ + "▁penance", + -14.7886323928833 + ], + [ + "▁hybridization", + -14.788636207580566 + ], + [ + "▁fluxes", + -14.788640022277832 + ], + [ + "▁gearboxes", + -14.78867244720459 + ], + [ + "▁Catania", + -14.788708686828612 + ], + [ + "▁Salz", + -14.788731575012209 + ], + [ + "clue", + -14.788765907287598 + ], + [ + "▁mastercard", + -14.788799285888672 + ], + [ + "volution", + -14.788817405700684 + ], + [ + "▁ruthlessly", + -14.78884506225586 + ], + [ + "▁indigent", + -14.78893756866455 + ], + [ + "Gina", + -14.789011001586914 + ], + [ + "▁coinage", + -14.789077758789062 + ], + [ + "▁Nieto", + -14.789111137390137 + ], + [ + "▁Offside", + -14.789144515991213 + ], + [ + "conazole", + -14.789155960083008 + ], + [ + "▁Sensational", + -14.789156913757324 + ], + [ + "▁SUITE", + -14.78916358947754 + ], + [ + "pfer", + -14.789189338684082 + ], + [ + "lodi", + -14.78923511505127 + ], + [ + "▁Biotin", + -14.789325714111328 + ], + [ + "▁Gari", + -14.789385795593262 + ], + [ + "▁Hoyt", + -14.78944492340088 + ], + [ + "schle", + -14.789499282836914 + ], + [ + "Investor", + -14.78957748413086 + ], + [ + "▁costlier", + -14.78960418701172 + ], + [ + "▁stile", + -14.78964614868164 + ], + [ + "▁Tarp", + -14.789752960205078 + ], + [ + "▁Refinance", + -14.789827346801758 + ], + [ + "▁Redis", + -14.789836883544922 + ], + [ + "▁Detailing", + -14.78987979888916 + ], + [ + "bacher", + -14.789894104003906 + ], + [ + "▁450,000", + -14.789961814880373 + ], + [ + "▁Langer", + -14.789983749389648 + ], + [ + "▁overeat", + -14.790009498596191 + ], + [ + "▁chalky", + -14.790067672729492 + ], + [ + "▁OBS", + -14.790077209472656 + ], + [ + "lij", + -14.790112495422363 + ], + [ + "amah", + -14.790118217468262 + ], + [ + "Kart", + -14.79016399383545 + ], + [ + "▁Nasty", + -14.790206909179688 + ], + [ + "▁relished", + -14.790206909179688 + ], + [ + "▁reconsidered", + -14.790249824523926 + ], + [ + "hanger", + -14.790356636047363 + ], + [ + "CRP", + -14.790462493896484 + ], + [ + "Witness", + -14.790493965148926 + ], + [ + "▁AMEX", + -14.790521621704102 + ], + [ + "Firefighters", + -14.790610313415527 + ], + [ + "Dispatch", + -14.79063892364502 + ], + [ + "Jupiter", + -14.79064655303955 + ], + [ + "Ugh", + -14.79064655303955 + ], + [ + "Ottawa", + -14.7906494140625 + ], + [ + "Berkeley", + -14.790650367736816 + ], + [ + "Analytics", + -14.79065227508545 + ], + [ + "Transmission", + -14.790655136108398 + ], + [ + "Facilities", + -14.790657043457031 + ], + [ + "recycled", + -14.790660858154297 + ], + [ + "Television", + -14.790664672851562 + ], + [ + "suicide", + -14.79067325592041 + ], + [ + "distressed", + -14.79068374633789 + ], + [ + "▁Veli", + -14.790708541870115 + ], + [ + "▁7-12", + -14.790709495544434 + ], + [ + "Hurry", + -14.790777206420898 + ], + [ + "certificate", + -14.79078483581543 + ], + [ + "quartz", + -14.790791511535645 + ], + [ + "▁Kada", + -14.790807723999023 + ], + [ + "▁1799", + -14.790899276733398 + ], + [ + "▁poi", + -14.790905952453612 + ], + [ + "Monica", + -14.790934562683104 + ], + [ + "▁Ubi", + -14.790955543518066 + ], + [ + "▁Shor", + -14.790960311889648 + ], + [ + "explained", + -14.7910737991333 + ], + [ + "zenith", + -14.791080474853516 + ], + [ + "=20", + -14.791139602661133 + ], + [ + "lucid", + -14.791190147399902 + ], + [ + "▁Langham", + -14.791210174560549 + ], + [ + "textured", + -14.791293144226074 + ], + [ + "▁Mizu", + -14.791299819946287 + ], + [ + "▁crisper", + -14.79139804840088 + ], + [ + "Lowe", + -14.791468620300291 + ], + [ + "Roma", + -14.791525840759276 + ], + [ + "pencil", + -14.79156494140625 + ], + [ + "▁Seme", + -14.791666030883787 + ], + [ + "1.18", + -14.791692733764648 + ], + [ + "Obtaining", + -14.791783332824709 + ], + [ + "KICK", + -14.791804313659668 + ], + [ + "▁Raha", + -14.791829109191896 + ], + [ + "oyer", + -14.791868209838867 + ], + [ + "Broadcast", + -14.79188346862793 + ], + [ + "▁BMG", + -14.79199504852295 + ], + [ + "▁27001", + -14.791998863220217 + ], + [ + "▁PBC", + -14.79201316833496 + ], + [ + "▁1787", + -14.792033195495604 + ], + [ + "Valeria", + -14.792054176330566 + ], + [ + "▁OWNERS", + -14.792078971862791 + ], + [ + "transferase", + -14.792122840881348 + ], + [ + "▁suis", + -14.792183876037598 + ], + [ + "1959", + -14.79223918914795 + ], + [ + "1:46", + -14.792309761047363 + ], + [ + "▁Slime", + -14.792325973510742 + ], + [ + "EIR", + -14.79235553741455 + ], + [ + "felicit", + -14.79237461090088 + ], + [ + "▁GCP", + -14.79244899749756 + ], + [ + "▁Somewhat", + -14.792524337768556 + ], + [ + "ufa", + -14.792580604553224 + ], + [ + "▁Suu", + -14.79260540008545 + ], + [ + "0026", + -14.792685508728027 + ], + [ + "Turk", + -14.79276180267334 + ], + [ + ".06.201", + -14.792767524719238 + ], + [ + "▁Ratan", + -14.792795181274414 + ], + [ + "▁Cram", + -14.792856216430664 + ], + [ + "▁38,000", + -14.792871475219728 + ], + [ + "▁Overcome", + -14.792938232421877 + ], + [ + "▁longhorn", + -14.792961120605469 + ], + [ + "▁gobbled", + -14.792996406555176 + ], + [ + "▁GRC", + -14.793142318725586 + ], + [ + "▁pervert", + -14.793146133422852 + ], + [ + "▁PRIME", + -14.793150901794434 + ], + [ + "(11)", + -14.793209075927734 + ], + [ + "▁Guaid", + -14.79322338104248 + ], + [ + "▁McNa", + -14.793274879455566 + ], + [ + "▁Maier", + -14.793282508850098 + ], + [ + "▁Batten", + -14.793292999267578 + ], + [ + "tomato", + -14.793449401855469 + ], + [ + "▁Diplomatic", + -14.793479919433594 + ], + [ + "Roaming", + -14.793567657470703 + ], + [ + "LTP", + -14.793578147888184 + ], + [ + "▁Chuan", + -14.793620109558104 + ], + [ + "migrant", + -14.793753623962402 + ], + [ + "ULIP", + -14.793818473815918 + ], + [ + "ASU", + -14.793821334838867 + ], + [ + "4500", + -14.79389476776123 + ], + [ + "Magick", + -14.79391860961914 + ], + [ + "photograph", + -14.794014930725098 + ], + [ + "relations", + -14.794018745422363 + ], + [ + "▁Agni", + -14.794032096862791 + ], + [ + "▁+10", + -14.794047355651855 + ], + [ + "URT", + -14.794121742248535 + ], + [ + "CJA", + -14.7941312789917 + ], + [ + "▁TKO", + -14.794278144836426 + ], + [ + "spiro", + -14.79432487487793 + ], + [ + "yperpigmentation", + -14.79433536529541 + ], + [ + "▁pollinator", + -14.794341087341309 + ], + [ + "▁Bahr", + -14.794448852539062 + ], + [ + "▁climat", + -14.794498443603516 + ], + [ + "7100", + -14.794554710388184 + ], + [ + "FAN", + -14.79457950592041 + ], + [ + "▁Wetland", + -14.794595718383787 + ], + [ + "Vasil", + -14.794610977172852 + ], + [ + "iyya", + -14.794631958007812 + ], + [ + "cono", + -14.794666290283203 + ], + [ + "▁Atwater", + -14.794672966003418 + ], + [ + "▁Osteo", + -14.794781684875488 + ], + [ + "Romania", + -14.794805526733398 + ], + [ + "▁Heartbreak", + -14.794811248779297 + ], + [ + "▁Peele", + -14.794888496398926 + ], + [ + "slinging", + -14.794916152954102 + ], + [ + "zmi", + -14.794960975646973 + ], + [ + "▁$1,6", + -14.794970512390137 + ], + [ + "Unsure", + -14.795021057128906 + ], + [ + "chler", + -14.795021057128906 + ], + [ + "6-24", + -14.795056343078612 + ], + [ + "malarial", + -14.79509735107422 + ], + [ + "▁SEGA", + -14.795151710510254 + ], + [ + "eswara", + -14.79518985748291 + ], + [ + "▁BCD", + -14.795204162597656 + ], + [ + "6.6%", + -14.795206069946287 + ], + [ + ">]", + -14.795222282409668 + ], + [ + "fung", + -14.795360565185549 + ], + [ + "Thy", + -14.795395851135254 + ], + [ + "Glam", + -14.795437812805176 + ], + [ + "▁whitewash", + -14.795450210571287 + ], + [ + "тн", + -14.795494079589844 + ], + [ + "hockey", + -14.795519828796388 + ], + [ + "▁18:1", + -14.795530319213867 + ], + [ + "▁Pediatr", + -14.795533180236816 + ], + [ + "BIZ", + -14.795592308044434 + ], + [ + "▁Kandi", + -14.795604705810549 + ], + [ + "▁20:0", + -14.79561710357666 + ], + [ + "gonia", + -14.795625686645508 + ], + [ + "▁Bruin", + -14.795693397521973 + ], + [ + "▁glib", + -14.795693397521973 + ], + [ + "medication", + -14.795743942260742 + ], + [ + "▁1798", + -14.795841217041016 + ], + [ + "▁Lasse", + -14.795977592468262 + ], + [ + "icial", + -14.796025276184082 + ], + [ + "Adorn", + -14.796095848083496 + ], + [ + "▁presale", + -14.79611873626709 + ], + [ + "wol", + -14.796207427978516 + ], + [ + "gta", + -14.796234130859377 + ], + [ + "gele", + -14.79632568359375 + ], + [ + "hhhhh", + -14.796499252319336 + ], + [ + "aspi", + -14.796546936035156 + ], + [ + "▁Tutu", + -14.796551704406738 + ], + [ + "urrey", + -14.796576499938965 + ], + [ + "▁COLLECT", + -14.796625137329102 + ], + [ + "▁Hyster", + -14.79670238494873 + ], + [ + "MAIN", + -14.79674243927002 + ], + [ + "▁Bancorp", + -14.79675579071045 + ], + [ + "Deductible", + -14.79679012298584 + ], + [ + "SEPTEMBER", + -14.79679012298584 + ], + [ + "Solskjaer", + -14.79679012298584 + ], + [ + "▁Aarhus", + -14.79679012298584 + ], + [ + "▁Compilation", + -14.79679012298584 + ], + [ + "▁Cumberbatch", + -14.79679012298584 + ], + [ + "▁Daffodil", + -14.79679012298584 + ], + [ + "▁Dandenong", + -14.79679012298584 + ], + [ + "▁HOUSTON", + -14.79679012298584 + ], + [ + "▁Incarnation", + -14.79679012298584 + ], + [ + "▁Kilauea", + -14.79679012298584 + ], + [ + "▁Mesmerizing", + -14.79679012298584 + ], + [ + "▁Possibilities", + -14.79679012298584 + ], + [ + "▁Puebla", + -14.79679012298584 + ], + [ + "▁RESERVED", + -14.79679012298584 + ], + [ + "▁Restylane", + -14.79679012298584 + ], + [ + "▁Roscoe", + -14.79679012298584 + ], + [ + "▁Yogyakarta", + -14.79679012298584 + ], + [ + "▁arraigned", + -14.79679012298584 + ], + [ + "▁blasphemy", + -14.79679012298584 + ], + [ + "▁chlorinated", + -14.79679012298584 + ], + [ + "▁ecumenical", + -14.79679012298584 + ], + [ + "▁etcetera", + -14.79679012298584 + ], + [ + "▁exfoliator", + -14.79679012298584 + ], + [ + "▁gravitas", + -14.79679012298584 + ], + [ + "▁intelligible", + -14.79679012298584 + ], + [ + "▁litigating", + -14.79679012298584 + ], + [ + "▁meritorious", + -14.79679012298584 + ], + [ + "▁precipitous", + -14.79679012298584 + ], + [ + "▁Laundering", + -14.796791076660156 + ], + [ + "▁unsubstantiated", + -14.796791076660156 + ], + [ + "Raven", + -14.796792984008787 + ], + [ + "▁etiology", + -14.796792984008787 + ], + [ + "▁Bluestacks", + -14.796793937683104 + ], + [ + "▁arbiter", + -14.796794891357422 + ], + [ + "▁progeny", + -14.796794891357422 + ], + [ + "▁Lacoste", + -14.796798706054688 + ], + [ + "▁SLEEP", + -14.796798706054688 + ], + [ + "▁Ameritrade", + -14.796802520751951 + ], + [ + "▁Islamophobia", + -14.796802520751951 + ], + [ + "▁contravene", + -14.79680347442627 + ], + [ + "▁nanomaterials", + -14.79680347442627 + ], + [ + "▁Acclaim", + -14.796805381774902 + ], + [ + "▁HELOC", + -14.796805381774902 + ], + [ + "▁myrtle", + -14.796805381774902 + ], + [ + "▁festivity", + -14.796807289123535 + ], + [ + "▁girdle", + -14.796813011169434 + ], + [ + "▁Foxconn", + -14.79681396484375 + ], + [ + "▁muffled", + -14.79681396484375 + ], + [ + "▁STUFF", + -14.796814918518066 + ], + [ + "▁Earhart", + -14.7968168258667 + ], + [ + "▁deleterious", + -14.796820640563965 + ], + [ + "▁Sandalwood", + -14.796822547912598 + ], + [ + "▁bidirectional", + -14.796832084655762 + ], + [ + "▁Coolidge", + -14.796833992004396 + ], + [ + "▁Ashcroft", + -14.796839714050291 + ], + [ + "▁Spector", + -14.79684352874756 + ], + [ + "▁kerala", + -14.79684352874756 + ], + [ + "Identified", + -14.796844482421877 + ], + [ + "▁Modernist", + -14.79685115814209 + ], + [ + "▁Revisited", + -14.79685401916504 + ], + [ + "▁Diffuser", + -14.796854972839355 + ], + [ + "▁Cherries", + -14.796862602233888 + ], + [ + "▁Perimeter", + -14.796878814697266 + ], + [ + "▁Hummel", + -14.796882629394531 + ], + [ + "▁Hollister", + -14.79688549041748 + ], + [ + "▁misspelling", + -14.796887397766112 + ], + [ + "▁Elective", + -14.796890258789062 + ], + [ + "▁Puig", + -14.796897888183594 + ], + [ + "lethal", + -14.796904563903809 + ], + [ + "▁mullet", + -14.796913146972656 + ], + [ + "▁Integrator", + -14.796929359436035 + ], + [ + "▁WebRTC", + -14.796939849853516 + ], + [ + "oosh", + -14.796950340270996 + ], + [ + "▁FOMC", + -14.796957969665527 + ], + [ + "▁Lupe", + -14.796961784362791 + ], + [ + "▁PetSmart", + -14.796969413757324 + ], + [ + "knacks", + -14.79697036743164 + ], + [ + "▁OpenSSL", + -14.796990394592283 + ], + [ + "▁decontamination", + -14.796992301940918 + ], + [ + "▁Jojo", + -14.79701805114746 + ], + [ + "Wesley", + -14.797022819519045 + ], + [ + "Argument", + -14.797026634216309 + ], + [ + "▁retracement", + -14.797026634216309 + ], + [ + "▁NATURE", + -14.797036170959473 + ], + [ + "▁referrer", + -14.797039031982422 + ], + [ + "▁cubby", + -14.7970609664917 + ], + [ + "▁blindfolded", + -14.797063827514648 + ], + [ + "▁Pinball", + -14.79706573486328 + ], + [ + "▁1770", + -14.797067642211914 + ], + [ + "▁solvency", + -14.79708480834961 + ], + [ + "1933", + -14.797115325927734 + ], + [ + "▁nonviolence", + -14.797118186950684 + ], + [ + "▁Dawes", + -14.797126770019531 + ], + [ + "▁untitled", + -14.797151565551758 + ], + [ + "NSS", + -14.797164916992188 + ], + [ + "▁sierra", + -14.797168731689451 + ], + [ + "▁Seamus", + -14.797174453735352 + ], + [ + "▁Rocco", + -14.79718780517578 + ], + [ + "▁Throwback", + -14.797196388244627 + ], + [ + "Furnish", + -14.797199249267578 + ], + [ + "▁apologist", + -14.797231674194336 + ], + [ + "▁Clough", + -14.79723834991455 + ], + [ + "▁regressive", + -14.797250747680664 + ], + [ + "▁Nawa", + -14.79727840423584 + ], + [ + "fade", + -14.797411918640137 + ], + [ + "▁drooping", + -14.797412872314451 + ], + [ + "▁Lifeguard", + -14.797445297241213 + ], + [ + "▁villainous", + -14.797450065612791 + ], + [ + "▁LSAT", + -14.797452926635742 + ], + [ + "▁conspicuously", + -14.797475814819336 + ], + [ + "▁consigned", + -14.797539710998535 + ], + [ + "▁12.5%", + -14.797593116760254 + ], + [ + "▁busily", + -14.79762363433838 + ], + [ + "ROAD", + -14.797685623168944 + ], + [ + "industrie", + -14.797690391540527 + ], + [ + "▁disinfecting", + -14.797696113586426 + ], + [ + "▁colonized", + -14.79773235321045 + ], + [ + "▁hunched", + -14.797762870788574 + ], + [ + "Colon", + -14.79776668548584 + ], + [ + "unapologetically", + -14.797836303710938 + ], + [ + "▁Jive", + -14.7978515625 + ], + [ + "LTC", + -14.797869682312012 + ], + [ + "Crow", + -14.797896385192873 + ], + [ + "trendiest", + -14.797918319702148 + ], + [ + "▁Trapping", + -14.797966957092283 + ], + [ + "▁Tipping", + -14.79805850982666 + ], + [ + "▁swooning", + -14.798075675964355 + ], + [ + "▁OAS", + -14.798076629638672 + ], + [ + "▁hardcopy", + -14.798081398010254 + ], + [ + "▁Newburgh", + -14.798151969909668 + ], + [ + "▁BTT", + -14.798163414001465 + ], + [ + "▁decomposed", + -14.798165321350098 + ], + [ + "goat", + -14.79820156097412 + ], + [ + "▁Translated", + -14.79823112487793 + ], + [ + "▁loci", + -14.798254013061523 + ], + [ + "▁grazed", + -14.798277854919434 + ], + [ + "▁Fatah", + -14.798291206359863 + ], + [ + "▁EMDR", + -14.798293113708496 + ], + [ + "3166", + -14.798306465148926 + ], + [ + "▁Shakespearean", + -14.798325538635254 + ], + [ + "▁Galley", + -14.79834270477295 + ], + [ + "scribbled", + -14.798349380493164 + ], + [ + "fex", + -14.798373222351074 + ], + [ + "▁Dries", + -14.798397064208984 + ], + [ + "Toast", + -14.798501014709473 + ], + [ + "▁06:4", + -14.798527717590332 + ], + [ + "▁proportionally", + -14.798639297485352 + ], + [ + "▁reconstructing", + -14.798653602600098 + ], + [ + "▁tiptoe", + -14.79865837097168 + ], + [ + "▁16:00", + -14.798680305480955 + ], + [ + "ratna", + -14.798715591430664 + ], + [ + "▁Woe", + -14.798782348632812 + ], + [ + "encephalo", + -14.798816680908203 + ], + [ + "–14", + -14.798836708068848 + ], + [ + "▁Oba", + -14.798895835876465 + ], + [ + "pounder", + -14.798901557922363 + ], + [ + "66%", + -14.798961639404297 + ], + [ + "kuma", + -14.79896354675293 + ], + [ + "6300", + -14.798969268798828 + ], + [ + "▁Lawler", + -14.798993110656738 + ], + [ + "▁ECN", + -14.799077033996582 + ], + [ + "éné", + -14.7992525100708 + ], + [ + "ltima", + -14.79925537109375 + ], + [ + "BPA", + -14.799260139465332 + ], + [ + "▁gasped", + -14.799275398254396 + ], + [ + "▁harry", + -14.79929542541504 + ], + [ + "▁Starlight", + -14.799342155456545 + ], + [ + "▁inheriting", + -14.799349784851074 + ], + [ + "▁opportunist", + -14.79936408996582 + ], + [ + "Stripe", + -14.79936695098877 + ], + [ + "oula", + -14.799371719360352 + ], + [ + "▁hampering", + -14.79939079284668 + ], + [ + "vola", + -14.79952621459961 + ], + [ + "▁flicked", + -14.79954433441162 + ], + [ + "yoko", + -14.799580574035645 + ], + [ + "SFM", + -14.799599647521973 + ], + [ + "▁8/10", + -14.799633979797363 + ], + [ + "Assemble", + -14.799654960632324 + ], + [ + "▁deducting", + -14.799736976623535 + ], + [ + "immediate", + -14.799757957458496 + ], + [ + "▁Pik", + -14.799851417541504 + ], + [ + "jiu", + -14.799877166748049 + ], + [ + "▁dann", + -14.799894332885742 + ], + [ + "ount", + -14.799901008605955 + ], + [ + "Synopsis", + -14.799939155578612 + ], + [ + "Worst", + -14.79994297027588 + ], + [ + "pharmacology", + -14.79994297027588 + ], + [ + "▁augmenting", + -14.799954414367676 + ], + [ + "PubMed", + -14.799956321716309 + ], + [ + "▁Amma", + -14.799960136413574 + ], + [ + "fundamental", + -14.799971580505373 + ], + [ + "Appendix", + -14.79997444152832 + ], + [ + "PPE", + -14.79999542236328 + ], + [ + "corrosion", + -14.799997329711914 + ], + [ + "Therapy", + -14.800003051757812 + ], + [ + "Argentina", + -14.800004005432127 + ], + [ + "platinum", + -14.800010681152344 + ], + [ + "Venice", + -14.800028800964355 + ], + [ + "Denmark", + -14.800031661987305 + ], + [ + "excited", + -14.800044059753418 + ], + [ + "Awareness", + -14.800082206726074 + ], + [ + "▁Spiro", + -14.800088882446287 + ], + [ + "inspection", + -14.800110816955566 + ], + [ + "▁BUYER", + -14.800127983093262 + ], + [ + "wicket", + -14.800153732299805 + ], + [ + "Upcoming", + -14.800156593322754 + ], + [ + "Agreement", + -14.800182342529297 + ], + [ + "▁**********", + -14.800259590148926 + ], + [ + "▁Rahim", + -14.80030918121338 + ], + [ + "▁Romo", + -14.800376892089844 + ], + [ + "Appointment", + -14.800421714782717 + ], + [ + "▁wrest", + -14.800430297851562 + ], + [ + "▁\"1\"", + -14.800454139709473 + ], + [ + "▁VPC", + -14.800464630126951 + ], + [ + "▁Arai", + -14.800498008728027 + ], + [ + "▁Absolut", + -14.800637245178224 + ], + [ + "breast", + -14.800773620605469 + ], + [ + "TRAC", + -14.800775527954102 + ], + [ + "▁Baht", + -14.800801277160645 + ], + [ + "Shooting", + -14.800899505615234 + ], + [ + "kino", + -14.800946235656738 + ], + [ + "▁athens", + -14.800984382629396 + ], + [ + "noble", + -14.801193237304688 + ], + [ + "Servlet", + -14.801226615905762 + ], + [ + "▁mashing", + -14.80125331878662 + ], + [ + "▁Stott", + -14.801286697387695 + ], + [ + "underappreciated", + -14.801305770874023 + ], + [ + "Expected", + -14.801349639892578 + ], + [ + "▁1811", + -14.801362037658691 + ], + [ + "▁mig", + -14.801441192626951 + ], + [ + "▁Kalan", + -14.801478385925291 + ], + [ + "errett", + -14.801533699035645 + ], + [ + "▁bengal", + -14.80168628692627 + ], + [ + "▁Laver", + -14.801700592041016 + ], + [ + "verk", + -14.80170726776123 + ], + [ + "▁£19", + -14.801761627197266 + ], + [ + "Brig", + -14.80177879333496 + ], + [ + "Passive", + -14.801806449890137 + ], + [ + "▁buttress", + -14.801807403564451 + ], + [ + "Busy", + -14.801824569702148 + ], + [ + "▁Sealer", + -14.80194091796875 + ], + [ + "▁FAU", + -14.801998138427734 + ], + [ + "▁CIV", + -14.802119255065918 + ], + [ + "▁GSE", + -14.802123069763184 + ], + [ + "Renting", + -14.802315711975098 + ], + [ + "▁14.4", + -14.802325248718262 + ], + [ + "photographer", + -14.802328109741213 + ], + [ + "checker", + -14.80238437652588 + ], + [ + "7.3%", + -14.80247402191162 + ], + [ + "▁Kabir", + -14.80255889892578 + ], + [ + "▁greenback", + -14.80258846282959 + ], + [ + "▁CAFE", + -14.80263900756836 + ], + [ + "plasmic", + -14.802738189697266 + ], + [ + "MPH", + -14.802762985229492 + ], + [ + "throated", + -14.802785873413086 + ], + [ + "▁muni", + -14.802871704101562 + ], + [ + "▁administrate", + -14.80288314819336 + ], + [ + "▁dla", + -14.803030967712402 + ], + [ + "▁normality", + -14.803046226501465 + ], + [ + "▁songbook", + -14.803081512451172 + ], + [ + "233;", + -14.803324699401855 + ], + [ + "▁DVC", + -14.803339004516602 + ], + [ + "▁bpa", + -14.803390502929688 + ], + [ + "▁forgetful", + -14.80341625213623 + ], + [ + "▁Klee", + -14.803424835205078 + ], + [ + "forecast", + -14.80343246459961 + ], + [ + "8.8%", + -14.803486824035645 + ], + [ + "▁grommet", + -14.803564071655272 + ], + [ + "▁scallion", + -14.803576469421388 + ], + [ + "▁Bilder", + -14.80368423461914 + ], + [ + "BROAD", + -14.803704261779783 + ], + [ + "▁radiologist", + -14.803762435913086 + ], + [ + "▁1819", + -14.803775787353516 + ], + [ + "▁Keil", + -14.803784370422363 + ], + [ + "Remov", + -14.80386734008789 + ], + [ + "Gaz", + -14.803948402404783 + ], + [ + "▁knitter", + -14.803991317749023 + ], + [ + "Chester", + -14.80410099029541 + ], + [ + "▁Psyche", + -14.80438232421875 + ], + [ + "gada", + -14.804545402526855 + ], + [ + "SSB", + -14.804610252380373 + ], + [ + "8.75", + -14.804622650146484 + ], + [ + "insight", + -14.804659843444824 + ], + [ + "Continued", + -14.804682731628418 + ], + [ + "▁stagger", + -14.804744720458984 + ], + [ + "▁Brahman", + -14.804969787597656 + ], + [ + "▁$249", + -14.805234909057615 + ], + [ + "bravo", + -14.805328369140623 + ], + [ + "▁downwind", + -14.805429458618164 + ], + [ + "RAMP", + -14.805448532104492 + ], + [ + "BWA", + -14.805502891540527 + ], + [ + "▁Portia", + -14.805611610412598 + ], + [ + "zey", + -14.805620193481444 + ], + [ + "SBURG", + -14.805630683898926 + ], + [ + "bajo", + -14.805824279785156 + ], + [ + "▁Indirect", + -14.80591869354248 + ], + [ + "▁Rainy", + -14.805927276611328 + ], + [ + "▁cobra", + -14.805937767028809 + ], + [ + "ENVIRONMENT", + -14.805963516235352 + ], + [ + "▁Attenborough", + -14.805963516235352 + ], + [ + "▁Balenciaga", + -14.805963516235352 + ], + [ + "▁Diffusion", + -14.805963516235352 + ], + [ + "▁Trivandrum", + -14.805963516235352 + ], + [ + "▁admonition", + -14.805963516235352 + ], + [ + "▁baptised", + -14.805963516235352 + ], + [ + "▁deplorable", + -14.805963516235352 + ], + [ + "▁propriety", + -14.805963516235352 + ], + [ + "▁toboggan", + -14.805963516235352 + ], + [ + "McPhee", + -14.805964469909668 + ], + [ + "NetworkExposure", + -14.805964469909668 + ], + [ + "Rhubarb", + -14.805964469909668 + ], + [ + "▁Deficit", + -14.805964469909668 + ], + [ + "▁ENGLISH", + -14.805964469909668 + ], + [ + "▁HoloLens", + -14.805964469909668 + ], + [ + "▁Occidental", + -14.805964469909668 + ], + [ + "▁Woodpecker", + -14.805964469909668 + ], + [ + "▁colostrum", + -14.805964469909668 + ], + [ + "▁exoskeleton", + -14.805964469909668 + ], + [ + "▁gabapentin", + -14.805964469909668 + ], + [ + "▁immunisation", + -14.805964469909668 + ], + [ + "▁multiplies", + -14.805964469909668 + ], + [ + "▁nebulizer", + -14.805964469909668 + ], + [ + "▁ungodly", + -14.805964469909668 + ], + [ + "▁Rafflecopter", + -14.805965423583984 + ], + [ + "▁fluorite", + -14.805965423583984 + ], + [ + "▁FACEBOOK", + -14.8059663772583 + ], + [ + "▁numerology", + -14.8059663772583 + ], + [ + "▁Caviar", + -14.805967330932615 + ], + [ + "▁Cheetah", + -14.805967330932615 + ], + [ + "▁Ellicott", + -14.805968284606934 + ], + [ + "▁Barbeque", + -14.80596923828125 + ], + [ + "▁Chantilly", + -14.805970191955566 + ], + [ + "▁Debussy", + -14.805970191955566 + ], + [ + "▁Oshawa", + -14.805970191955566 + ], + [ + "▁anamorphic", + -14.805970191955566 + ], + [ + "▁gallstones", + -14.805970191955566 + ], + [ + "▁terrazzo", + -14.805970191955566 + ], + [ + "▁perinatal", + -14.8059720993042 + ], + [ + "▁Furnished", + -14.805973052978516 + ], + [ + "▁implode", + -14.805973052978516 + ], + [ + "▁kidnappers", + -14.805975914001465 + ], + [ + "▁forethought", + -14.805983543395996 + ], + [ + "▁continuance", + -14.805985450744627 + ], + [ + "▁bosom", + -14.805988311767578 + ], + [ + "▁Teradata", + -14.805989265441896 + ], + [ + "▁Barbuda", + -14.805990219116213 + ], + [ + "▁gawk", + -14.805991172790527 + ], + [ + "▁Convocation", + -14.805992126464844 + ], + [ + "▁torturing", + -14.805992126464844 + ], + [ + "▁TiVo", + -14.80599308013916 + ], + [ + "▁trapeze", + -14.80599308013916 + ], + [ + "▁zijn", + -14.805994033813477 + ], + [ + "▁Gaucho", + -14.805994987487791 + ], + [ + "▁collation", + -14.805994987487791 + ], + [ + "▁convocation", + -14.805994987487791 + ], + [ + "▁jiffy", + -14.805994987487791 + ], + [ + "▁(1978)", + -14.806002616882324 + ], + [ + "▁rabbinic", + -14.80600357055664 + ], + [ + "▁3/4′′", + -14.806004524230955 + ], + [ + "▁MCQ", + -14.80600643157959 + ], + [ + "▁Bedspread", + -14.806012153625488 + ], + [ + "▁Celery", + -14.80601692199707 + ], + [ + "▁Peckham", + -14.806017875671388 + ], + [ + "▁ravenous", + -14.806021690368652 + ], + [ + "▁featherweight", + -14.806022644042969 + ], + [ + "▁Excise", + -14.806023597717283 + ], + [ + "▁Yvette", + -14.806023597717283 + ], + [ + "▁microclimate", + -14.806028366088867 + ], + [ + "▁chapped", + -14.8060302734375 + ], + [ + "▁Multiplication", + -14.806035041809082 + ], + [ + "▁Conviction", + -14.806037902832031 + ], + [ + "▁Bourke", + -14.806042671203612 + ], + [ + "▁placemats", + -14.806048393249512 + ], + [ + "Spongebob", + -14.806049346923828 + ], + [ + "▁cronies", + -14.80605697631836 + ], + [ + "▁modded", + -14.806059837341309 + ], + [ + "▁Dassault", + -14.806066513061523 + ], + [ + "▁firecracker", + -14.80607795715332 + ], + [ + "▁Neeson", + -14.806079864501951 + ], + [ + "▁corbel", + -14.806085586547852 + ], + [ + "▁Snowmobile", + -14.806089401245115 + ], + [ + "▁Brickell", + -14.806092262268066 + ], + [ + "▁Pilgrimage", + -14.806095123291016 + ], + [ + "▁immobile", + -14.806100845336914 + ], + [ + "▁tinned", + -14.806105613708496 + ], + [ + "▁granularity", + -14.806109428405762 + ], + [ + "▁Cosby", + -14.806111335754396 + ], + [ + "▁Hassle", + -14.806111335754396 + ], + [ + "▁06:5", + -14.80611801147461 + ], + [ + "▁Bastian", + -14.806154251098633 + ], + [ + "▁calluses", + -14.806172370910645 + ], + [ + "ecco", + -14.806182861328123 + ], + [ + "▁Cassette", + -14.806185722351074 + ], + [ + "▁slapstick", + -14.806188583374023 + ], + [ + "▁Brecon", + -14.806190490722656 + ], + [ + "▁furlongs", + -14.806206703186035 + ], + [ + "▁Pae", + -14.806211471557615 + ], + [ + "▁Disciples", + -14.806241989135742 + ], + [ + "▁Mangan", + -14.80625057220459 + ], + [ + "▁Longines", + -14.80625820159912 + ], + [ + "▁Screenplay", + -14.806259155273438 + ], + [ + "▁batik", + -14.806260108947754 + ], + [ + "chondri", + -14.806273460388184 + ], + [ + "▁Plough", + -14.80627727508545 + ], + [ + "▁McDavid", + -14.80630111694336 + ], + [ + "▁skilfully", + -14.806310653686523 + ], + [ + "▁Ovation", + -14.806325912475586 + ], + [ + "Taxpayers", + -14.806326866149902 + ], + [ + "▁Guilty", + -14.806333541870115 + ], + [ + "▁Dowling", + -14.806339263916016 + ], + [ + "▁Kilda", + -14.806353569030762 + ], + [ + "▁TomTom", + -14.806354522705078 + ], + [ + "▁Mfg", + -14.806356430053713 + ], + [ + "▁concocted", + -14.806373596191406 + ], + [ + "unter", + -14.806378364562988 + ], + [ + "▁Allaah", + -14.806388854980469 + ], + [ + "▁Uranium", + -14.806441307067873 + ], + [ + "▁Burnout", + -14.80644989013672 + ], + [ + "5.9%", + -14.806485176086426 + ], + [ + "▁wettest", + -14.806517601013184 + ], + [ + "▁skiff", + -14.806548118591309 + ], + [ + "▁Atiku", + -14.80661392211914 + ], + [ + "▁Gruber", + -14.80666446685791 + ], + [ + "▁DPD", + -14.806668281555176 + ], + [ + "▁FERC", + -14.806675910949709 + ], + [ + "▁silverado", + -14.806719779968262 + ], + [ + "▁McClain", + -14.806756019592283 + ], + [ + "Butler", + -14.8067626953125 + ], + [ + "▁Rhymes", + -14.806777000427246 + ], + [ + "▁Edwardsville", + -14.80677890777588 + ], + [ + "BREAKING", + -14.80681610107422 + ], + [ + "▁Entryway", + -14.8068208694458 + ], + [ + "courage", + -14.806910514831545 + ], + [ + "▁rangefinder", + -14.806915283203123 + ], + [ + "▁fanned", + -14.806926727294922 + ], + [ + "quita", + -14.806927680969238 + ], + [ + "▁reappeared", + -14.806962013244627 + ], + [ + "▁urbanisation", + -14.806987762451172 + ], + [ + "▁augusta", + -14.80700397491455 + ], + [ + "▁Medway", + -14.80705738067627 + ], + [ + "▁Vinay", + -14.807069778442385 + ], + [ + "▁proviso", + -14.807095527648926 + ], + [ + "▁Morty", + -14.807098388671877 + ], + [ + "imum", + -14.807106971740724 + ], + [ + "▁rideshare", + -14.80712890625 + ], + [ + "▁Installment", + -14.80715274810791 + ], + [ + "▁Weighted", + -14.80719757080078 + ], + [ + "Proceed", + -14.80724811553955 + ], + [ + "▁SNES", + -14.80726146697998 + ], + [ + "▁KKR", + -14.80727481842041 + ], + [ + "▁Nerf", + -14.807308197021484 + ], + [ + "▁contradicting", + -14.807320594787598 + ], + [ + "▁Catfish", + -14.80734634399414 + ], + [ + "Agra", + -14.807428359985352 + ], + [ + "▁Tourette", + -14.807513236999512 + ], + [ + "▁confessional", + -14.807523727416992 + ], + [ + "▁$7,500", + -14.807594299316406 + ], + [ + "0.0.0", + -14.807698249816896 + ], + [ + "lowski", + -14.807751655578612 + ], + [ + "▁Klang", + -14.807757377624512 + ], + [ + "ERK", + -14.807825088500977 + ], + [ + "▁ladybug", + -14.807830810546877 + ], + [ + "▁Fright", + -14.80788230895996 + ], + [ + "▁Guideline", + -14.807943344116213 + ], + [ + "▁melding", + -14.80796718597412 + ], + [ + "▁Ajit", + -14.80804443359375 + ], + [ + "▁factually", + -14.808049201965332 + ], + [ + "ettler", + -14.80806827545166 + ], + [ + "▁Ril", + -14.80811882019043 + ], + [ + "▁Unlocked", + -14.808175086975098 + ], + [ + "chana", + -14.80817699432373 + ], + [ + "▁sportier", + -14.808252334594728 + ], + [ + "2090", + -14.808277130126951 + ], + [ + "▁PDS", + -14.808343887329102 + ], + [ + "▁loudness", + -14.808406829833984 + ], + [ + "▁computationally", + -14.8085355758667 + ], + [ + "▁hydroxyl", + -14.808537483215332 + ], + [ + "Pinterest", + -14.8086519241333 + ], + [ + "▁conjoin", + -14.808727264404297 + ], + [ + "Nickel", + -14.808772087097168 + ], + [ + "▁NFV", + -14.808789253234863 + ], + [ + "▁MEMBER", + -14.808856010437012 + ], + [ + "nelson", + -14.808930397033691 + ], + [ + "▁gainful", + -14.80905055999756 + ], + [ + "ETC", + -14.809106826782228 + ], + [ + "0.03", + -14.809128761291504 + ], + [ + "BUILD", + -14.80937957763672 + ], + [ + "Placing", + -14.809402465820312 + ], + [ + "Investigation", + -14.809409141540527 + ], + [ + "intellectual", + -14.809409141540527 + ], + [ + "soprano", + -14.80941104888916 + ], + [ + "Exciting", + -14.809412956237791 + ], + [ + "Vacuum", + -14.809417724609377 + ], + [ + "administration", + -14.809423446655272 + ], + [ + "Retirement", + -14.809425354003906 + ], + [ + "Hyundai", + -14.809428215026855 + ], + [ + "Chemistry", + -14.809429168701172 + ], + [ + "Platinum", + -14.809429168701172 + ], + [ + "reinforced", + -14.809431076049805 + ], + [ + "PayPal", + -14.809454917907717 + ], + [ + "analyze", + -14.809477806091309 + ], + [ + "Assistance", + -14.809486389160156 + ], + [ + "Almond", + -14.80949878692627 + ], + [ + "folio", + -14.809510231018066 + ], + [ + "Prison", + -14.809518814086914 + ], + [ + "▁Hoc", + -14.80953598022461 + ], + [ + "▁hitched", + -14.809545516967772 + ], + [ + "Montana", + -14.809565544128418 + ], + [ + "▁noob", + -14.809585571289062 + ], + [ + "LOOK", + -14.80960464477539 + ], + [ + "▁***********", + -14.809606552124023 + ], + [ + "▁armless", + -14.809651374816896 + ], + [ + "healthcare", + -14.80966854095459 + ], + [ + "wip", + -14.809691429138184 + ], + [ + "issima", + -14.809769630432127 + ], + [ + "▁Fungi", + -14.809785842895508 + ], + [ + "Percent", + -14.809789657592772 + ], + [ + "pril", + -14.809877395629885 + ], + [ + "▁emulated", + -14.80993366241455 + ], + [ + "▁Thrust", + -14.809940338134766 + ], + [ + "OCR", + -14.810002326965332 + ], + [ + "chou", + -14.810129165649414 + ], + [ + "Wiring", + -14.810137748718262 + ], + [ + "hump", + -14.810142517089844 + ], + [ + "Witch", + -14.810144424438477 + ], + [ + "kaku", + -14.810277938842772 + ], + [ + "▁RNC", + -14.81035614013672 + ], + [ + "Agency", + -14.810443878173828 + ], + [ + "Exclude", + -14.810528755187988 + ], + [ + "SHOOT", + -14.810534477233888 + ], + [ + "▁woodblock", + -14.810566902160645 + ], + [ + "rried", + -14.810580253601074 + ], + [ + "hydrous", + -14.810616493225098 + ], + [ + "▁teary", + -14.81062126159668 + ], + [ + "▁locust", + -14.810623168945312 + ], + [ + "▁mixologist", + -14.810670852661133 + ], + [ + "Linked", + -14.810676574707031 + ], + [ + "Pumpkin", + -14.810725212097168 + ], + [ + "NVIT", + -14.810758590698242 + ], + [ + "▁hawker", + -14.810856819152832 + ], + [ + "▁reeled", + -14.810909271240234 + ], + [ + "Permit", + -14.810941696166992 + ], + [ + "ucky", + -14.811057090759276 + ], + [ + "▁legitimize", + -14.811089515686035 + ], + [ + "Groove", + -14.811105728149414 + ], + [ + "▁fingerprinting", + -14.811139106750488 + ], + [ + "RDO", + -14.811166763305664 + ], + [ + "FDC", + -14.811184883117676 + ], + [ + "accent", + -14.811198234558104 + ], + [ + "▁vw", + -14.811288833618164 + ], + [ + "VINE", + -14.81131076812744 + ], + [ + "Reminder", + -14.811318397521973 + ], + [ + "▁Reiner", + -14.81135368347168 + ], + [ + "▁Venmo", + -14.811375617980955 + ], + [ + "narrow", + -14.811481475830078 + ], + [ + "looked", + -14.81152057647705 + ], + [ + "▁ppg", + -14.811617851257324 + ], + [ + "▁NZD", + -14.811638832092283 + ], + [ + "Azure", + -14.8116455078125 + ], + [ + "Shame", + -14.811677932739258 + ], + [ + "pap", + -14.811741828918455 + ], + [ + "▁Sahi", + -14.811798095703123 + ], + [ + "Addo", + -14.811800956726074 + ], + [ + "ragg", + -14.811870574951172 + ], + [ + "▁interning", + -14.81196117401123 + ], + [ + "louis", + -14.811982154846191 + ], + [ + "▁Popper", + -14.812034606933594 + ], + [ + "Habit", + -14.812058448791504 + ], + [ + "▁Nubia", + -14.812073707580566 + ], + [ + "1921", + -14.812195777893066 + ], + [ + "Focused", + -14.81225872039795 + ], + [ + "ouz", + -14.812260627746582 + ], + [ + "▁Taper", + -14.812347412109377 + ], + [ + "Crafted", + -14.812371253967283 + ], + [ + "▁Kimber", + -14.81242561340332 + ], + [ + "▁Leek", + -14.812482833862305 + ], + [ + "Accu", + -14.812535285949709 + ], + [ + "Offset", + -14.812579154968262 + ], + [ + "▁Lenz", + -14.812606811523438 + ], + [ + "ahoo", + -14.812607765197754 + ], + [ + "EZE", + -14.812724113464355 + ], + [ + "conci", + -14.812782287597656 + ], + [ + "europe", + -14.812783241271973 + ], + [ + "raba", + -14.812807083129885 + ], + [ + "▁eReader", + -14.812860488891602 + ], + [ + "▁vulture", + -14.812906265258787 + ], + [ + "▁cultivar", + -14.812920570373535 + ], + [ + "▁Anselm", + -14.812959671020508 + ], + [ + "▁Oden", + -14.81299877166748 + ], + [ + "entral", + -14.813047409057615 + ], + [ + "▁PPR", + -14.8130521774292 + ], + [ + "kona", + -14.813077926635742 + ], + [ + "▁Shrub", + -14.81307888031006 + ], + [ + "stabili", + -14.813097953796388 + ], + [ + "▁Raglan", + -14.81311321258545 + ], + [ + "RAW", + -14.813119888305664 + ], + [ + "halia", + -14.813227653503418 + ], + [ + "TIG", + -14.813251495361328 + ], + [ + "▁Horan", + -14.813430786132812 + ], + [ + "hendi", + -14.813566207885742 + ], + [ + "FBI", + -14.813812255859377 + ], + [ + "▁DDA", + -14.813888549804688 + ], + [ + "▁reprogram", + -14.813937187194824 + ], + [ + "▁unapologetic", + -14.814173698425291 + ], + [ + "TET", + -14.814281463623049 + ], + [ + "PHR", + -14.814348220825195 + ], + [ + "traveling", + -14.8143892288208 + ], + [ + "▁lamina", + -14.814395904541016 + ], + [ + "rval", + -14.814443588256836 + ], + [ + "corporation", + -14.81445026397705 + ], + [ + "CFT", + -14.81448745727539 + ], + [ + "▁Warri", + -14.814491271972656 + ], + [ + "▁Reina", + -14.81452465057373 + ], + [ + "anthi", + -14.814535140991213 + ], + [ + "ocarp", + -14.814539909362791 + ], + [ + "1923", + -14.814580917358398 + ], + [ + "▁14:5", + -14.814593315124512 + ], + [ + "REASON", + -14.814659118652344 + ], + [ + "▁frill", + -14.81476593017578 + ], + [ + "▁Posi", + -14.81484603881836 + ], + [ + "Flame", + -14.814888954162598 + ], + [ + "crine", + -14.814891815185549 + ], + [ + "Disciple", + -14.814940452575684 + ], + [ + "▁Lue", + -14.814955711364746 + ], + [ + "RFC", + -14.81503677368164 + ], + [ + "▁Hibernate", + -14.81522274017334 + ], + [ + "▁dynasties", + -14.81522274017334 + ], + [ + "▁undrafted", + -14.81522274017334 + ], + [ + "Dreyfus", + -14.815223693847656 + ], + [ + "Excitement", + -14.815223693847656 + ], + [ + "FEBRUARY", + -14.815223693847656 + ], + [ + "manhattan", + -14.815223693847656 + ], + [ + "methionine", + -14.815223693847656 + ], + [ + "▁888-353-1299", + -14.815223693847656 + ], + [ + "▁Abundant", + -14.815223693847656 + ], + [ + "▁Aphrodite", + -14.815223693847656 + ], + [ + "▁Avenida", + -14.815223693847656 + ], + [ + "▁Chaudhary", + -14.815223693847656 + ], + [ + "▁Crucifix", + -14.815223693847656 + ], + [ + "▁DeGeneres", + -14.815223693847656 + ], + [ + "▁Limassol", + -14.815223693847656 + ], + [ + "▁OTHERWISE", + -14.815223693847656 + ], + [ + "▁Paducah", + -14.815223693847656 + ], + [ + "▁Riccardo", + -14.815223693847656 + ], + [ + "▁Schenectady", + -14.815223693847656 + ], + [ + "▁Sorcerer", + -14.815223693847656 + ], + [ + "▁Valhalla", + -14.815223693847656 + ], + [ + "▁Varieties", + -14.815223693847656 + ], + [ + "▁Watanabe", + -14.815223693847656 + ], + [ + "▁accoutrement", + -14.815223693847656 + ], + [ + "▁clomiphene", + -14.815223693847656 + ], + [ + "▁compositing", + -14.815223693847656 + ], + [ + "▁contagion", + -14.815223693847656 + ], + [ + "▁convulsions", + -14.815223693847656 + ], + [ + "▁delegating", + -14.815223693847656 + ], + [ + "▁fertilizing", + -14.815223693847656 + ], + [ + "▁foreboding", + -14.815223693847656 + ], + [ + "▁gecko", + -14.815223693847656 + ], + [ + "▁gynecology", + -14.815223693847656 + ], + [ + "▁microbiota", + -14.815223693847656 + ], + [ + "▁phytoplankton", + -14.815223693847656 + ], + [ + "▁pranayama", + -14.815223693847656 + ], + [ + "▁tuesday", + -14.815223693847656 + ], + [ + "▁xenophobic", + -14.815223693847656 + ], + [ + "Tremendous", + -14.815224647521973 + ], + [ + "▁Kontakt", + -14.815224647521973 + ], + [ + "▁ameliorate", + -14.815224647521973 + ], + [ + "▁Gronkowski", + -14.815225601196287 + ], + [ + "▁enumeration", + -14.815225601196287 + ], + [ + "Terrestrial", + -14.815227508544922 + ], + [ + "▁Amadeus", + -14.815227508544922 + ], + [ + "▁Ainsworth", + -14.815229415893556 + ], + [ + "▁Harrods", + -14.815229415893556 + ], + [ + "▁Hootsuite", + -14.815231323242188 + ], + [ + "▁Twilio", + -14.815232276916504 + ], + [ + "▁Malbec", + -14.815237045288086 + ], + [ + "▁cranny", + -14.815237045288086 + ], + [ + "▁leapfrog", + -14.815237998962402 + ], + [ + "▁Cladding", + -14.81523895263672 + ], + [ + "▁slugging", + -14.81523895263672 + ], + [ + "▁Antimicrobial", + -14.815242767333984 + ], + [ + "Migraine", + -14.8152437210083 + ], + [ + "▁Quorum", + -14.815245628356934 + ], + [ + "▁Sentosa", + -14.815245628356934 + ], + [ + "▁Gwynn", + -14.815253257751465 + ], + [ + "▁nasties", + -14.815255165100098 + ], + [ + "▁Nicosia", + -14.815258026123049 + ], + [ + "▁Witchcraft", + -14.81527328491211 + ], + [ + "▁mauve", + -14.815274238586426 + ], + [ + "▁Doughty", + -14.815285682678224 + ], + [ + "▁Ararat", + -14.815288543701172 + ], + [ + "▁Montauk", + -14.815296173095703 + ], + [ + "▁Cushman", + -14.815303802490234 + ], + [ + "▁Enugu", + -14.815308570861816 + ], + [ + "TARGET", + -14.815311431884766 + ], + [ + "▁Ambition", + -14.81531810760498 + ], + [ + "exact", + -14.81533432006836 + ], + [ + "▁Synology", + -14.815337181091309 + ], + [ + "▁nudging", + -14.815340995788574 + ], + [ + "▁Kiki", + -14.815342903137209 + ], + [ + "▁unscented", + -14.81534481048584 + ], + [ + "▁Wilkie", + -14.815345764160156 + ], + [ + "▁crawlspace", + -14.815349578857422 + ], + [ + "▁criticising", + -14.815350532531738 + ], + [ + "▁unframed", + -14.81536102294922 + ], + [ + "▁Larsson", + -14.815361976623535 + ], + [ + "▁PhenQ", + -14.815380096435549 + ], + [ + "▁Lionsgate", + -14.81538200378418 + ], + [ + "▁antimony", + -14.815393447875977 + ], + [ + "▁naira", + -14.815399169921877 + ], + [ + "▁EEOC", + -14.815404891967772 + ], + [ + "▁breastfed", + -14.815411567687988 + ], + [ + "▁ventral", + -14.815414428710938 + ], + [ + "▁hemmed", + -14.815415382385254 + ], + [ + "▁Stumble", + -14.815427780151367 + ], + [ + "▁Rafale", + -14.81543254852295 + ], + [ + "▁fedora", + -14.815447807312012 + ], + [ + "▁Bookshelf", + -14.815452575683594 + ], + [ + "▁Ctr", + -14.815457344055176 + ], + [ + "▁Comstock", + -14.815476417541504 + ], + [ + "▁Augsburg", + -14.815485954284668 + ], + [ + "▁Giver", + -14.815494537353516 + ], + [ + "▁inferno", + -14.81550407409668 + ], + [ + "▁forehand", + -14.815522193908691 + ], + [ + "▁Begum", + -14.81554126739502 + ], + [ + "▁Amusing", + -14.815542221069336 + ], + [ + "▁Dollhouse", + -14.815542221069336 + ], + [ + "▁Ballistic", + -14.815564155578612 + ], + [ + "▁Malachi", + -14.815564155578612 + ], + [ + "▁Gladwell", + -14.81556797027588 + ], + [ + "▁eggshell", + -14.815587043762209 + ], + [ + "▁Bookkeeping", + -14.815596580505373 + ], + [ + "▁preceptor", + -14.8156099319458 + ], + [ + "▁Elektro", + -14.815634727478027 + ], + [ + "▁batman", + -14.815637588500977 + ], + [ + "▁idli", + -14.81565284729004 + ], + [ + "icion", + -14.81565761566162 + ], + [ + "▁Orson", + -14.815670013427734 + ], + [ + "AKT", + -14.815725326538086 + ], + [ + "▁Panini", + -14.815732955932615 + ], + [ + "▁PUSH", + -14.815751075744627 + ], + [ + "▁Hobie", + -14.81582260131836 + ], + [ + "followed", + -14.815826416015623 + ], + [ + "▁Sitecore", + -14.815877914428713 + ], + [ + "▁arterio", + -14.815895080566406 + ], + [ + "▁hardwired", + -14.815905570983888 + ], + [ + "▁magnetite", + -14.815959930419922 + ], + [ + "globin", + -14.815963745117188 + ], + [ + "▁Sewell", + -14.81600856781006 + ], + [ + "▁Leung", + -14.816021919250488 + ], + [ + "▁SPP", + -14.816028594970703 + ], + [ + "▁rappel", + -14.816058158874512 + ], + [ + "▁Snail", + -14.816089630126951 + ], + [ + "▁Veneto", + -14.81609058380127 + ], + [ + "▁Comple", + -14.816132545471191 + ], + [ + "▁Jenson", + -14.816154479980469 + ], + [ + "▁MONTHS", + -14.816181182861328 + ], + [ + "▁cutbacks", + -14.816195487976074 + ], + [ + "cinnam", + -14.81622314453125 + ], + [ + "▁Eoin", + -14.816241264343262 + ], + [ + "▁implicate", + -14.81628131866455 + ], + [ + "▁removalist", + -14.816343307495115 + ], + [ + "10.2", + -14.816398620605469 + ], + [ + "▁Shearer", + -14.816502571105955 + ], + [ + "▁romantically", + -14.816516876220703 + ], + [ + "▁Harri", + -14.816713333129885 + ], + [ + "▁Kole", + -14.81676959991455 + ], + [ + "▁Sandton", + -14.816773414611816 + ], + [ + "▁puffer", + -14.816789627075195 + ], + [ + "Secur", + -14.816824913024902 + ], + [ + "▁Alban", + -14.8168363571167 + ], + [ + "▁Westeros", + -14.816861152648926 + ], + [ + "▁Avira", + -14.816902160644531 + ], + [ + "▁subtitled", + -14.816916465759276 + ], + [ + "▁dmg", + -14.816959381103516 + ], + [ + "▁slacker", + -14.817025184631348 + ], + [ + "olph", + -14.817092895507812 + ], + [ + "▁homeschooled", + -14.81721305847168 + ], + [ + "ISTIC", + -14.817252159118652 + ], + [ + "▁nominally", + -14.81743335723877 + ], + [ + "ALICE", + -14.817487716674805 + ], + [ + "▁Mute", + -14.817495346069336 + ], + [ + "performed", + -14.817508697509766 + ], + [ + "▁NLR", + -14.817609786987305 + ], + [ + "▁Modernism", + -14.817768096923828 + ], + [ + "▁$93", + -14.817789077758787 + ], + [ + "saved", + -14.81785011291504 + ], + [ + "▁Rondo", + -14.817862510681152 + ], + [ + "▁NEMA", + -14.817947387695312 + ], + [ + "liana", + -14.817973136901855 + ], + [ + "▁peachy", + -14.817981719970703 + ], + [ + "▁garam", + -14.818007469177246 + ], + [ + "▁CISO", + -14.81813144683838 + ], + [ + "▁hark", + -14.818242073059082 + ], + [ + "zé", + -14.818259239196776 + ], + [ + "FUN", + -14.818299293518066 + ], + [ + "beef", + -14.818330764770508 + ], + [ + "CPM", + -14.818341255187988 + ], + [ + "FCU", + -14.818387985229492 + ], + [ + "assy", + -14.818395614624023 + ], + [ + "1912", + -14.818405151367188 + ], + [ + "1220", + -14.818434715270996 + ], + [ + "kaka", + -14.818443298339844 + ], + [ + "1:06", + -14.818462371826172 + ], + [ + "▁creamier", + -14.818464279174805 + ], + [ + "▁EAC", + -14.818469047546388 + ], + [ + "▁posse", + -14.81849193572998 + ], + [ + "▁Karting", + -14.81850814819336 + ], + [ + "Enroll", + -14.818517684936523 + ], + [ + "▁relishing", + -14.818519592285156 + ], + [ + "▁RTD", + -14.81852912902832 + ], + [ + "▁Tied", + -14.818532943725586 + ], + [ + "▁boomer", + -14.818533897399902 + ], + [ + "▁instil", + -14.818556785583496 + ], + [ + "▁Roan", + -14.818659782409668 + ], + [ + "Permission", + -14.818660736083984 + ], + [ + "▁Thanh", + -14.818737030029297 + ], + [ + "naked", + -14.818760871887209 + ], + [ + "Linking", + -14.818830490112305 + ], + [ + "reliable", + -14.818906784057615 + ], + [ + "Parallel", + -14.81891918182373 + ], + [ + "Diagnostic", + -14.818922996520996 + ], + [ + "warrior", + -14.81893825531006 + ], + [ + "▁professorship", + -14.818952560424805 + ], + [ + "narrative", + -14.818955421447754 + ], + [ + "absorbed", + -14.81895637512207 + ], + [ + "pregnancy", + -14.818958282470703 + ], + [ + "spectacular", + -14.818960189819336 + ], + [ + "swift", + -14.818961143493652 + ], + [ + "quinone", + -14.818964958190918 + ], + [ + "releasing", + -14.818974494934082 + ], + [ + "confidential", + -14.818978309631348 + ], + [ + "Duncan", + -14.819016456604004 + ], + [ + "▁Toshi", + -14.819052696228027 + ], + [ + "verified", + -14.819056510925291 + ], + [ + "Chapel", + -14.81905746459961 + ], + [ + "Electricity", + -14.819093704223633 + ], + [ + "Quil", + -14.819107055664062 + ], + [ + "▁RIB", + -14.81910800933838 + ], + [ + "▁£500,000", + -14.819138526916504 + ], + [ + "shelter", + -14.8191499710083 + ], + [ + "CSIR", + -14.819151878356934 + ], + [ + "cork", + -14.819175720214844 + ], + [ + "▁Oku", + -14.819184303283691 + ], + [ + "SAD", + -14.81919765472412 + ], + [ + "▁Luzon", + -14.819210052490234 + ], + [ + "Infant", + -14.819231033325195 + ], + [ + "rosh", + -14.819242477416992 + ], + [ + "▁04:2", + -14.819310188293455 + ], + [ + "3.19", + -14.819315910339355 + ], + [ + "▁Gabriela", + -14.819342613220217 + ], + [ + "▁Dense", + -14.819358825683594 + ], + [ + "dyke", + -14.819395065307615 + ], + [ + "▁Pesca", + -14.819425582885742 + ], + [ + "Harold", + -14.81944465637207 + ], + [ + "becoming", + -14.819456100463867 + ], + [ + "иа", + -14.819504737854004 + ], + [ + "▁Carina", + -14.819573402404783 + ], + [ + "lowering", + -14.819586753845217 + ], + [ + "▁carnal", + -14.819697380065918 + ], + [ + "Iceland", + -14.819737434387209 + ], + [ + "▁Robby", + -14.819745063781738 + ], + [ + "▁Incomplete", + -14.81975269317627 + ], + [ + "pler", + -14.819762229919434 + ], + [ + "linx", + -14.81982135772705 + ], + [ + "▁Fishery", + -14.81984043121338 + ], + [ + "▁protester", + -14.819868087768556 + ], + [ + "(13)", + -14.819881439208984 + ], + [ + "aggle", + -14.81990909576416 + ], + [ + "▁BLT", + -14.819952011108398 + ], + [ + "Virtually", + -14.820043563842772 + ], + [ + "aky", + -14.820075035095217 + ], + [ + "▁Jasmin", + -14.82009983062744 + ], + [ + "▁McCla", + -14.820110321044922 + ], + [ + "▁SoHo", + -14.820219993591309 + ], + [ + "8.3%", + -14.820222854614258 + ], + [ + "Barnes", + -14.820222854614258 + ], + [ + "Clone", + -14.820446014404297 + ], + [ + "▁kcal", + -14.820460319519045 + ], + [ + "▁sonata", + -14.8204984664917 + ], + [ + "▁MTR", + -14.820544242858888 + ], + [ + "▁Thankyou", + -14.820574760437012 + ], + [ + "1-10", + -14.820589065551758 + ], + [ + "▁Natl", + -14.820590019226074 + ], + [ + "▁Bernal", + -14.820611953735352 + ], + [ + "amanda", + -14.820724487304688 + ], + [ + "mapped", + -14.820831298828123 + ], + [ + "HIM", + -14.820967674255373 + ], + [ + "▁nullify", + -14.82099437713623 + ], + [ + "▁1795", + -14.821133613586426 + ], + [ + "consciously", + -14.821170806884766 + ], + [ + "Buster", + -14.821207046508787 + ], + [ + "FCE", + -14.82131004333496 + ], + [ + "▁Gillis", + -14.821313858032228 + ], + [ + "▁Hibb", + -14.821403503417969 + ], + [ + "▁roto", + -14.821406364440918 + ], + [ + "▁1660", + -14.821415901184082 + ], + [ + "ficient", + -14.82143497467041 + ], + [ + "▁CAKE", + -14.821460723876951 + ], + [ + "▁Chocolat", + -14.821635246276855 + ], + [ + "▁McNab", + -14.821635246276855 + ], + [ + "▁hellish", + -14.821678161621094 + ], + [ + "▁MiG", + -14.821707725524902 + ], + [ + "quilt", + -14.821718215942385 + ], + [ + "5:40", + -14.821727752685549 + ], + [ + "journalist", + -14.821768760681152 + ], + [ + "Weaver", + -14.821770668029783 + ], + [ + "insurer", + -14.821877479553224 + ], + [ + "▁Micha", + -14.821881294250488 + ], + [ + "CDP", + -14.821982383728027 + ], + [ + "▁Fluke", + -14.82204246520996 + ], + [ + "opedia", + -14.82217788696289 + ], + [ + "▁Nez", + -14.822354316711426 + ], + [ + "▁Masha", + -14.822406768798828 + ], + [ + "▁endow", + -14.822436332702637 + ], + [ + "▁Protea", + -14.822574615478516 + ], + [ + "zki", + -14.822585105895996 + ], + [ + "▁Scud", + -14.822759628295898 + ], + [ + "▁carwash", + -14.82284164428711 + ], + [ + "▁anticancer", + -14.82289981842041 + ], + [ + "▁unseat", + -14.822955131530762 + ], + [ + "▁Hangar", + -14.82296085357666 + ], + [ + "▁Ojo", + -14.823058128356934 + ], + [ + "redeem", + -14.823084831237791 + ], + [ + "▁Magneto", + -14.82309341430664 + ], + [ + "▁Writ", + -14.82310962677002 + ], + [ + "▁$3.1", + -14.82314682006836 + ], + [ + "1:19", + -14.823166847229004 + ], + [ + "▁transportable", + -14.823196411132812 + ], + [ + "▁KGB", + -14.823229789733888 + ], + [ + "ETO", + -14.823287010192873 + ], + [ + "▁puke", + -14.82329559326172 + ], + [ + "▁homesick", + -14.82337760925293 + ], + [ + "haft", + -14.823413848876951 + ], + [ + "▁CCH", + -14.823420524597168 + ], + [ + "▁TPE", + -14.82347297668457 + ], + [ + "▁chocolat", + -14.823612213134766 + ], + [ + "Leah", + -14.823657989501951 + ], + [ + "canal", + -14.82371997833252 + ], + [ + "Bump", + -14.823725700378418 + ], + [ + "▁zircon", + -14.823856353759766 + ], + [ + "▁Cutie", + -14.823939323425291 + ], + [ + "▁LIMIT", + -14.823987007141112 + ], + [ + "Bir", + -14.824009895324709 + ], + [ + "Calvin", + -14.824322700500488 + ], + [ + "▁bronchi", + -14.82447338104248 + ], + [ + "▁EFFECT", + -14.824567794799805 + ], + [ + "Pancreatic", + -14.82456874847412 + ], + [ + "▁Bolshevik", + -14.82456874847412 + ], + [ + "▁Cartagena", + -14.82456874847412 + ], + [ + "▁Cellulose", + -14.82456874847412 + ], + [ + "▁Emancipation", + -14.82456874847412 + ], + [ + "▁Enquiry", + -14.82456874847412 + ], + [ + "▁Immunization", + -14.82456874847412 + ], + [ + "▁Metairie", + -14.82456874847412 + ], + [ + "▁Osceola", + -14.82456874847412 + ], + [ + "▁Plexiglas", + -14.82456874847412 + ], + [ + "▁Sodexo", + -14.82456874847412 + ], + [ + "▁anticoagulant", + -14.82456874847412 + ], + [ + "▁denizens", + -14.82456874847412 + ], + [ + "▁jettison", + -14.82456874847412 + ], + [ + "▁persevering", + -14.82456874847412 + ], + [ + "▁propranolol", + -14.82456874847412 + ], + [ + "▁scavenging", + -14.82456874847412 + ], + [ + "▁shiitake", + -14.82456874847412 + ], + [ + "▁telemarketer", + -14.82456874847412 + ], + [ + "▁CONSIDER", + -14.824569702148438 + ], + [ + "▁FLOWERS", + -14.824569702148438 + ], + [ + "▁Glencoe", + -14.824569702148438 + ], + [ + "▁Nginx", + -14.824569702148438 + ], + [ + "▁Ramaphosa", + -14.824569702148438 + ], + [ + "▁conjugation", + -14.824569702148438 + ], + [ + "▁immunoglobulin", + -14.824569702148438 + ], + [ + "▁misnomer", + -14.824569702148438 + ], + [ + "▁oklahoma", + -14.824569702148438 + ], + [ + "▁participle", + -14.824569702148438 + ], + [ + "▁peddling", + -14.824569702148438 + ], + [ + "▁unimpressed", + -14.824569702148438 + ], + [ + "▁Bigelow", + -14.824570655822754 + ], + [ + "▁Ingalls", + -14.824570655822754 + ], + [ + "▁Murakami", + -14.824570655822754 + ], + [ + "▁stroking", + -14.824570655822754 + ], + [ + "▁annealing", + -14.82457160949707 + ], + [ + "▁demerit", + -14.82457160949707 + ], + [ + "▁mortuary", + -14.82457160949707 + ], + [ + "▁Colossus", + -14.824572563171388 + ], + [ + "▁repatriate", + -14.824572563171388 + ], + [ + "▁Ghosn", + -14.824573516845703 + ], + [ + "▁Kirkpatrick", + -14.824575424194336 + ], + [ + "▁Divergent", + -14.824576377868652 + ], + [ + "▁tidak", + -14.824577331542969 + ], + [ + "▁Snider", + -14.824578285217283 + ], + [ + "▁Folklore", + -14.82458209991455 + ], + [ + "INTRODUCTION", + -14.824586868286133 + ], + [ + "▁Integer", + -14.824586868286133 + ], + [ + "▁Gunnison", + -14.82458782196045 + ], + [ + "▁Passaic", + -14.824596405029297 + ], + [ + "▁stairlift", + -14.824603080749512 + ], + [ + "▁Hyperloop", + -14.824609756469728 + ], + [ + "▁Flutter", + -14.82461166381836 + ], + [ + "▁Nugent", + -14.82461166381836 + ], + [ + "▁ecotourism", + -14.824613571166992 + ], + [ + "▁(1971)", + -14.824617385864258 + ], + [ + "▁(1974)", + -14.824617385864258 + ], + [ + "▁Compassionate", + -14.824629783630373 + ], + [ + "▁Righteous", + -14.824631690979004 + ], + [ + "▁Maidenhead", + -14.824634552001951 + ], + [ + "▁Remediation", + -14.82463550567627 + ], + [ + "▁placid", + -14.824639320373535 + ], + [ + "▁Tirana", + -14.824642181396484 + ], + [ + "▁Nominated", + -14.824649810791016 + ], + [ + "▁Musings", + -14.824652671813965 + ], + [ + "▁Brendon", + -14.824661254882812 + ], + [ + "▁Swimsuit", + -14.824665069580078 + ], + [ + "▁Biosphere", + -14.82466983795166 + ], + [ + "OBJECTIVE", + -14.824684143066406 + ], + [ + "▁Earnest", + -14.824684143066406 + ], + [ + "▁Rechargeable", + -14.82469081878662 + ], + [ + "▁reintroduction", + -14.824695587158203 + ], + [ + "▁peacekeepers", + -14.824705123901367 + ], + [ + "CHESTER", + -14.824712753295898 + ], + [ + "▁repellant", + -14.824746131896973 + ], + [ + "▁sedimentation", + -14.824746131896973 + ], + [ + "▁Reinvestment", + -14.824748039245604 + ], + [ + "▁operable", + -14.824758529663086 + ], + [ + "ORES", + -14.824774742126465 + ], + [ + "▁millimetres", + -14.824776649475098 + ], + [ + "▁CAUSE", + -14.824817657470703 + ], + [ + "▁Bushwick", + -14.82482624053955 + ], + [ + "▁CCE", + -14.824851036071776 + ], + [ + "▁undercarriage", + -14.824856758117676 + ], + [ + "▁depository", + -14.824880599975586 + ], + [ + "87-9", + -14.824893951416016 + ], + [ + "▁Rainer", + -14.824894905090332 + ], + [ + "▁strangle", + -14.824905395507812 + ], + [ + "▁Euler", + -14.824911117553713 + ], + [ + "▁smartwatches", + -14.824923515319824 + ], + [ + "csi", + -14.824950218200684 + ], + [ + "▁ECHO", + -14.824959754943848 + ], + [ + "▁naman", + -14.824959754943848 + ], + [ + "▁Julianne", + -14.825000762939451 + ], + [ + "▁Candela", + -14.825066566467283 + ], + [ + "▁unpainted", + -14.82509708404541 + ], + [ + "ekka", + -14.825108528137209 + ], + [ + "SKF", + -14.825119018554688 + ], + [ + "STANDING", + -14.82515811920166 + ], + [ + "▁hurriedly", + -14.825170516967772 + ], + [ + "▁21:00", + -14.825180053710938 + ], + [ + "▁Malia", + -14.82519817352295 + ], + [ + "▁Albi", + -14.825201988220217 + ], + [ + "PACE", + -14.82523250579834 + ], + [ + "▁Nusa", + -14.825311660766602 + ], + [ + "damp", + -14.82532024383545 + ], + [ + "wijk", + -14.82533836364746 + ], + [ + "▁Incorporation", + -14.825357437133787 + ], + [ + "▁stepmother", + -14.825373649597168 + ], + [ + "▁Usability", + -14.825379371643066 + ], + [ + "▁Galena", + -14.82540798187256 + ], + [ + "▁Nettle", + -14.825410842895508 + ], + [ + "▁Familia", + -14.82542896270752 + ], + [ + "▁Anvil", + -14.825448989868164 + ], + [ + "▁Freezing", + -14.825478553771973 + ], + [ + "palli", + -14.825496673583984 + ], + [ + "▁racecourse", + -14.825511932373049 + ], + [ + "▁Bylaws", + -14.825515747070312 + ], + [ + "reinterpretation", + -14.825523376464844 + ], + [ + "▁TONS", + -14.825551986694336 + ], + [ + "▁Ashram", + -14.825570106506348 + ], + [ + "▁spurious", + -14.825706481933594 + ], + [ + "▁relinquished", + -14.825709342956545 + ], + [ + "▁crossbody", + -14.825742721557615 + ], + [ + "Liber", + -14.82577419281006 + ], + [ + "▁Ambro", + -14.825778007507324 + ], + [ + "▁midcentury", + -14.825819969177246 + ], + [ + "▁elucidat", + -14.82584285736084 + ], + [ + "aap", + -14.825860977172852 + ], + [ + "▁Arro", + -14.825884819030762 + ], + [ + "offline", + -14.825942993164062 + ], + [ + "▁Incense", + -14.825960159301758 + ], + [ + "▁disgraced", + -14.826017379760742 + ], + [ + "▁transcended", + -14.82607078552246 + ], + [ + "CRL", + -14.826098442077637 + ], + [ + "300,000", + -14.826111793518066 + ], + [ + "▁nontraditional", + -14.826167106628418 + ], + [ + "▁hospitalisation", + -14.82620334625244 + ], + [ + "▁Donner", + -14.826231002807615 + ], + [ + "Cursor", + -14.826313018798828 + ], + [ + "▁Visi", + -14.826343536376951 + ], + [ + "▁checkboxes", + -14.826375007629396 + ], + [ + "Marinate", + -14.826430320739746 + ], + [ + "/01/2019", + -14.826459884643556 + ], + [ + "▁Basco", + -14.826478004455566 + ], + [ + "0:40", + -14.826485633850098 + ], + [ + "iero", + -14.826555252075195 + ], + [ + "▁piggyback", + -14.826566696166992 + ], + [ + "▁latched", + -14.826601028442385 + ], + [ + "Kryst", + -14.826604843139648 + ], + [ + "▁Frankel", + -14.826641082763672 + ], + [ + "▁Schau", + -14.826645851135254 + ], + [ + "avies", + -14.826669692993164 + ], + [ + "Clarify", + -14.826784133911133 + ], + [ + "kanda", + -14.82679557800293 + ], + [ + "▁barium", + -14.826831817626951 + ], + [ + "malgamated", + -14.826845169067385 + ], + [ + "▁Dougie", + -14.826916694641112 + ], + [ + "▁APEC", + -14.82702922821045 + ], + [ + "▁Alpen", + -14.827048301696776 + ], + [ + "1:08", + -14.827099800109863 + ], + [ + "▁RCM", + -14.827109336853027 + ], + [ + "▁Steamer", + -14.827146530151367 + ], + [ + "▁brothel", + -14.827187538146973 + ], + [ + "▁Fashionable", + -14.827197074890137 + ], + [ + "▁0.20", + -14.827202796936035 + ], + [ + "LIF", + -14.827299118041992 + ], + [ + "▁punctured", + -14.82732391357422 + ], + [ + "▁Ranga", + -14.82736110687256 + ], + [ + "medica", + -14.827398300170898 + ], + [ + "▁shellac", + -14.827473640441896 + ], + [ + "▁Roster", + -14.82747745513916 + ], + [ + "arrange", + -14.82748031616211 + ], + [ + "0.65", + -14.827510833740234 + ], + [ + "▁cooper", + -14.82753562927246 + ], + [ + "learned", + -14.827553749084473 + ], + [ + "▁Moc", + -14.827631950378418 + ], + [ + "▁vaporize", + -14.827653884887695 + ], + [ + "▁redoing", + -14.82769012451172 + ], + [ + "▁Ewan", + -14.82780647277832 + ], + [ + "azioni", + -14.827826499938965 + ], + [ + "▁annul", + -14.82785415649414 + ], + [ + "Outgoing", + -14.827857971191406 + ], + [ + "1937", + -14.827916145324709 + ], + [ + "▁defile", + -14.82792854309082 + ], + [ + "algia", + -14.827970504760742 + ], + [ + "▁Chantal", + -14.828030586242676 + ], + [ + "▁Norah", + -14.82803440093994 + ], + [ + "▁Morin", + -14.828125 + ], + [ + "0,500", + -14.828186988830566 + ], + [ + "▁Rumor", + -14.828310012817385 + ], + [ + "privileged", + -14.82839298248291 + ], + [ + "fugi", + -14.828493118286133 + ], + [ + "▁Linz", + -14.828496932983398 + ], + [ + "Versatile", + -14.828518867492676 + ], + [ + "▁smelter", + -14.828521728515623 + ], + [ + "▁weasel", + -14.82852554321289 + ], + [ + "Voucher", + -14.828535079956056 + ], + [ + "Optimal", + -14.82854175567627 + ], + [ + "Jurassic", + -14.828546524047852 + ], + [ + "Ambassador", + -14.828554153442385 + ], + [ + "Synthetic", + -14.828556060791016 + ], + [ + "Cognitive", + -14.828557014465332 + ], + [ + "inflated", + -14.828561782836914 + ], + [ + "Palestine", + -14.828564643859863 + ], + [ + "▁internalized", + -14.828567504882812 + ], + [ + "Decorating", + -14.828568458557127 + ], + [ + "dessert", + -14.828568458557127 + ], + [ + "Nebraska", + -14.828569412231444 + ], + [ + "grandparents", + -14.828577041625977 + ], + [ + "Starbucks", + -14.828590393066406 + ], + [ + "Minecraft", + -14.828591346740724 + ], + [ + "embedded", + -14.828603744506836 + ], + [ + "▁TNC", + -14.82861328125 + ], + [ + "арт", + -14.828627586364746 + ], + [ + "glucose", + -14.828633308410645 + ], + [ + "Harmony", + -14.828640937805176 + ], + [ + "▁massager", + -14.828643798828123 + ], + [ + "Passport", + -14.828682899475098 + ], + [ + "▁PAINT", + -14.828688621520996 + ], + [ + "▁£70", + -14.828691482543944 + ], + [ + "inflammation", + -14.828693389892578 + ], + [ + "lubber", + -14.828695297241213 + ], + [ + "detection", + -14.828713417053224 + ], + [ + "JOHN", + -14.828750610351562 + ], + [ + "▁Astral", + -14.828782081604004 + ], + [ + "▁Confi", + -14.828798294067385 + ], + [ + "fireplace", + -14.828800201416016 + ], + [ + "introduced", + -14.82884120941162 + ], + [ + "navigate", + -14.828848838806152 + ], + [ + "calibration", + -14.828849792480469 + ], + [ + "▁1366", + -14.828902244567873 + ], + [ + "Wealth", + -14.828941345214844 + ], + [ + "Encrypt", + -14.828972816467283 + ], + [ + "Personality", + -14.828994750976562 + ], + [ + "▁Rooted", + -14.829148292541504 + ], + [ + "tuition", + -14.829153060913086 + ], + [ + "▁PPD", + -14.829301834106444 + ], + [ + "▁baha", + -14.829368591308594 + ], + [ + "▁Corso", + -14.829461097717283 + ], + [ + "▁horned", + -14.829496383666992 + ], + [ + "Blaze", + -14.829509735107422 + ], + [ + "▁Lorenz", + -14.829512596130373 + ], + [ + "Biblio", + -14.829534530639648 + ], + [ + "Carrying", + -14.829545021057127 + ], + [ + "increased", + -14.829562187194824 + ], + [ + "▁waypoints", + -14.829599380493164 + ], + [ + "▁EMG", + -14.829745292663574 + ], + [ + "▁PDR", + -14.829828262329102 + ], + [ + "blended", + -14.829835891723633 + ], + [ + "actress", + -14.829950332641602 + ], + [ + "Freeze", + -14.829963684082031 + ], + [ + "fending", + -14.829967498779297 + ], + [ + "▁Seki", + -14.830144882202148 + ], + [ + "annually", + -14.8302583694458 + ], + [ + "▁Karina", + -14.83028793334961 + ], + [ + "3:23", + -14.830318450927734 + ], + [ + "UPE", + -14.830327987670898 + ], + [ + "▁Selva", + -14.830401420593262 + ], + [ + "▁Snag", + -14.830411911010742 + ], + [ + "MMO", + -14.830449104309082 + ], + [ + "ritis", + -14.830453872680664 + ], + [ + "poke", + -14.830462455749512 + ], + [ + "alpine", + -14.830464363098145 + ], + [ + "REY", + -14.83047103881836 + ], + [ + "guine", + -14.830522537231444 + ], + [ + "▁$3.9", + -14.830669403076172 + ], + [ + "Cement", + -14.830716133117676 + ], + [ + "axy", + -14.83074951171875 + ], + [ + "Massive", + -14.830760955810549 + ], + [ + "▁tiff", + -14.830784797668455 + ], + [ + "musician", + -14.830798149108888 + ], + [ + "calculated", + -14.83082389831543 + ], + [ + "MASH", + -14.830891609191896 + ], + [ + "▁perky", + -14.830894470214844 + ], + [ + "priv", + -14.830939292907717 + ], + [ + "▁NOTES", + -14.830985069274902 + ], + [ + "Futur", + -14.831161499023438 + ], + [ + "▁Cask", + -14.831217765808104 + ], + [ + "EOL", + -14.831221580505373 + ], + [ + "grape", + -14.83124542236328 + ], + [ + "▁Setter", + -14.83128261566162 + ], + [ + "▁GLS", + -14.831294059753418 + ], + [ + "▁GSP", + -14.831324577331545 + ], + [ + "▁Zay", + -14.831341743469238 + ], + [ + "GAP", + -14.831354141235352 + ], + [ + "Sweat", + -14.831433296203612 + ], + [ + "▁Trou", + -14.831671714782717 + ], + [ + "▁$220", + -14.831730842590332 + ], + [ + "roofed", + -14.831787109375 + ], + [ + "leben", + -14.831851959228516 + ], + [ + "▁stoplight", + -14.831859588623049 + ], + [ + "1911", + -14.831873893737791 + ], + [ + "▁Tartu", + -14.831915855407717 + ], + [ + "▁Bruck", + -14.832072257995604 + ], + [ + "Lentil", + -14.83208465576172 + ], + [ + "▁Farid", + -14.832155227661133 + ], + [ + "▁Caste", + -14.832235336303713 + ], + [ + "▁Sula", + -14.83226490020752 + ], + [ + "▁Washed", + -14.832275390625 + ], + [ + "conquer", + -14.832286834716797 + ], + [ + "Joey", + -14.83237361907959 + ], + [ + "▁Thelma", + -14.832385063171388 + ], + [ + "▁frightful", + -14.832472801208496 + ], + [ + "hrung", + -14.832526206970217 + ], + [ + "jug", + -14.832635879516602 + ], + [ + "▁bucking", + -14.832647323608398 + ], + [ + "▁Scoot", + -14.832649230957031 + ], + [ + "0.2%", + -14.832719802856444 + ], + [ + "stellung", + -14.832765579223633 + ], + [ + "▁plop", + -14.83281135559082 + ], + [ + "elsen", + -14.832825660705566 + ], + [ + "luka", + -14.8328857421875 + ], + [ + "usted", + -14.832983016967772 + ], + [ + "▁Hoppe", + -14.83298397064209 + ], + [ + "▁stampede", + -14.833016395568848 + ], + [ + "▁Luisa", + -14.833125114440918 + ], + [ + "wede", + -14.833148956298828 + ], + [ + "▁Billet", + -14.83321762084961 + ], + [ + "▁Talc", + -14.83322525024414 + ], + [ + "▁causeway", + -14.83327579498291 + ], + [ + "Loud", + -14.833294868469238 + ], + [ + "▁spore", + -14.833389282226562 + ], + [ + "▁confiscate", + -14.833402633666992 + ], + [ + "maze", + -14.833431243896484 + ], + [ + "HAS", + -14.833470344543455 + ], + [ + "▁Lync", + -14.833483695983888 + ], + [ + "boil", + -14.833550453186035 + ], + [ + "PIP", + -14.833609580993652 + ], + [ + "iviti", + -14.833612442016602 + ], + [ + "ocaine", + -14.833724975585938 + ], + [ + "▁Puna", + -14.833745002746582 + ], + [ + "plasty", + -14.833769798278809 + ], + [ + "phytes", + -14.833780288696287 + ], + [ + "ssance", + -14.833781242370604 + ], + [ + "feeder", + -14.833815574645996 + ], + [ + "1:24", + -14.833818435668944 + ], + [ + "cillo", + -14.833822250366213 + ], + [ + "zinc", + -14.833844184875488 + ], + [ + "▁conven", + -14.8338623046875 + ], + [ + "ISCO", + -14.833929061889648 + ], + [ + ".01.201", + -14.833956718444824 + ], + [ + "▁1788", + -14.83396053314209 + ], + [ + "chop", + -14.833969116210938 + ], + [ + "▁1:12", + -14.833980560302734 + ], + [ + "vietnam", + -14.83400058746338 + ], + [ + "Esposito", + -14.834002494812012 + ], + [ + "▁Juarez", + -14.834002494812012 + ], + [ + "▁Lichfield", + -14.834002494812012 + ], + [ + "▁Antalya", + -14.834003448486328 + ], + [ + "▁Chernobyl", + -14.834003448486328 + ], + [ + "▁Corbusier", + -14.834003448486328 + ], + [ + "▁Coutinho", + -14.834003448486328 + ], + [ + "▁Dumbledore", + -14.834003448486328 + ], + [ + "▁HUNDRED", + -14.834003448486328 + ], + [ + "▁Haridwar", + -14.834003448486328 + ], + [ + "▁MINUTES", + -14.834003448486328 + ], + [ + "▁Mogadishu", + -14.834003448486328 + ], + [ + "▁Staphylococcus", + -14.834003448486328 + ], + [ + "▁consternation", + -14.834003448486328 + ], + [ + "▁critiquing", + -14.834003448486328 + ], + [ + "▁disputing", + -14.834003448486328 + ], + [ + "▁doldrums", + -14.834003448486328 + ], + [ + "▁humongous", + -14.834003448486328 + ], + [ + "▁hustling", + -14.834003448486328 + ], + [ + "▁inhospitable", + -14.834003448486328 + ], + [ + "▁jubilant", + -14.834003448486328 + ], + [ + "▁leprosy", + -14.834003448486328 + ], + [ + "▁lullaby", + -14.834003448486328 + ], + [ + "▁phytonutrients", + -14.834003448486328 + ], + [ + "▁protégé", + -14.834003448486328 + ], + [ + "▁rapeseed", + -14.834003448486328 + ], + [ + "▁soufflé", + -14.834003448486328 + ], + [ + "▁stethoscope", + -14.834003448486328 + ], + [ + "▁strategizing", + -14.834003448486328 + ], + [ + "▁travesty", + -14.834003448486328 + ], + [ + "▁treachery", + -14.834003448486328 + ], + [ + "▁unmissable", + -14.834003448486328 + ], + [ + "Parallax", + -14.834004402160645 + ], + [ + "▁chamfer", + -14.834004402160645 + ], + [ + "▁luminance", + -14.834004402160645 + ], + [ + "▁Beaulieu", + -14.83400535583496 + ], + [ + "▁Raytheon", + -14.83400535583496 + ], + [ + "▁sympathise", + -14.834006309509276 + ], + [ + "▁Cascadia", + -14.834007263183594 + ], + [ + "▁pylori", + -14.834007263183594 + ], + [ + "▁antenatal", + -14.834009170532228 + ], + [ + "▁hypotension", + -14.834010124206545 + ], + [ + "▁Panchayat", + -14.834012031555176 + ], + [ + "Unmanned", + -14.834014892578123 + ], + [ + "▁Sprocket", + -14.834014892578123 + ], + [ + "▁macronutrient", + -14.834014892578123 + ], + [ + "▁Alpharetta", + -14.834016799926758 + ], + [ + "▁Nutrisystem", + -14.834016799926758 + ], + [ + "▁Ebates", + -14.834022521972656 + ], + [ + "▁Smurf", + -14.834024429321287 + ], + [ + "▁AGAINST", + -14.834027290344238 + ], + [ + "▁renegade", + -14.834027290344238 + ], + [ + "▁PRIOR", + -14.834029197692873 + ], + [ + "▁Collider", + -14.834031105041504 + ], + [ + "globulin", + -14.83403205871582 + ], + [ + "▁Plural", + -14.83403778076172 + ], + [ + "▁Endowed", + -14.834040641784668 + ], + [ + "▁DVLA", + -14.8340425491333 + ], + [ + "▁tendonitis", + -14.8340425491333 + ], + [ + "▁(1967)", + -14.8340482711792 + ], + [ + "▁(1976)", + -14.834050178527832 + ], + [ + "▁unchallenged", + -14.83405876159668 + ], + [ + "▁hyena", + -14.834061622619627 + ], + [ + "indian", + -14.83407211303711 + ], + [ + "▁Enrico", + -14.83407497406006 + ], + [ + "▁Sebring", + -14.834075927734377 + ], + [ + "▁Munnar", + -14.834091186523438 + ], + [ + "▁Bramble", + -14.834107398986816 + ], + [ + "▁Stadt", + -14.834111213684082 + ], + [ + "▁Balfour", + -14.834113121032717 + ], + [ + "▁Microchip", + -14.834113121032717 + ], + [ + "▁abscond", + -14.834115982055664 + ], + [ + "Oregano", + -14.83411693572998 + ], + [ + "▁Autopilot", + -14.83411693572998 + ], + [ + "▁Folic", + -14.834121704101562 + ], + [ + "▁Modbus", + -14.834121704101562 + ], + [ + "▁$19.95", + -14.83412742614746 + ], + [ + "10.1002/", + -14.834141731262209 + ], + [ + "▁Bonfire", + -14.834152221679688 + ], + [ + "▁Gorham", + -14.834174156188965 + ], + [ + "▁Caribou", + -14.834181785583496 + ], + [ + "▁Matric", + -14.834184646606444 + ], + [ + "urga", + -14.834186553955078 + ], + [ + "▁Oldenburg", + -14.834190368652344 + ], + [ + "▁tyrannical", + -14.83419132232666 + ], + [ + "▁WEIGHT", + -14.83419418334961 + ], + [ + "▁utah", + -14.834200859069824 + ], + [ + "▁overreact", + -14.834202766418455 + ], + [ + "▁assuage", + -14.834203720092772 + ], + [ + "▁(202)", + -14.834216117858888 + ], + [ + "quoted", + -14.834220886230469 + ], + [ + "▁Dijk", + -14.834220886230469 + ], + [ + "▁Hallows", + -14.834224700927734 + ], + [ + "TRESS", + -14.834230422973633 + ], + [ + "▁Sunnyside", + -14.834239959716797 + ], + [ + "▁hoppy", + -14.83424949645996 + ], + [ + "▁CUDA", + -14.83425235748291 + ], + [ + "▁recertification", + -14.834291458129885 + ], + [ + "▁Mobius", + -14.834294319152832 + ], + [ + "▁HomeAway", + -14.834306716918944 + ], + [ + "▁abbot", + -14.834311485290527 + ], + [ + "▁Sumo", + -14.83432388305664 + ], + [ + "▁unorganized", + -14.834324836730955 + ], + [ + "▁Glenview", + -14.834333419799805 + ], + [ + "▁derailment", + -14.834341049194336 + ], + [ + "6.1%", + -14.834342002868652 + ], + [ + "▁patting", + -14.834346771240234 + ], + [ + "Accessed", + -14.83437442779541 + ], + [ + "koop", + -14.83440113067627 + ], + [ + "▁Stinson", + -14.834415435791016 + ], + [ + "▁Whitehorse", + -14.834430694580078 + ], + [ + "FILTER", + -14.834431648254396 + ], + [ + "▁vCenter", + -14.834455490112305 + ], + [ + "▁02:1", + -14.83446979522705 + ], + [ + "▁tenement", + -14.834531784057615 + ], + [ + "▁Informa", + -14.834554672241213 + ], + [ + "▁affront", + -14.834562301635742 + ], + [ + "▁pretreatment", + -14.834595680236816 + ], + [ + "▁22:00", + -14.83459758758545 + ], + [ + "7–1", + -14.83461856842041 + ], + [ + "▁housewarming", + -14.83462619781494 + ], + [ + "▁individualistic", + -14.83464241027832 + ], + [ + "▁Nani", + -14.834659576416016 + ], + [ + "▁Precast", + -14.834660530090332 + ], + [ + "▁BYOB", + -14.834681510925291 + ], + [ + "▁Bastion", + -14.834691047668455 + ], + [ + "▁15-18", + -14.834714889526367 + ], + [ + "Aquamarine", + -14.834729194641112 + ], + [ + "▁Stitcher", + -14.83475399017334 + ], + [ + "pounding", + -14.834771156311035 + ], + [ + "▁Lilies", + -14.834794044494627 + ], + [ + "▁Redux", + -14.83487606048584 + ], + [ + "▁acoustically", + -14.83488655090332 + ], + [ + "▁Bothell", + -14.834941864013672 + ], + [ + "▁mineralized", + -14.83496379852295 + ], + [ + "▁Akai", + -14.83500862121582 + ], + [ + "▁Jalal", + -14.835020065307615 + ], + [ + "▁Liquidator", + -14.835040092468262 + ], + [ + "▁Mundi", + -14.835104942321776 + ], + [ + "fate", + -14.83525848388672 + ], + [ + "▁Misfit", + -14.835286140441896 + ], + [ + "▁kickstarter", + -14.835354804992676 + ], + [ + "▁Parasite", + -14.83547306060791 + ], + [ + "▁fearlessly", + -14.83551025390625 + ], + [ + "▁grandest", + -14.8355131149292 + ], + [ + "Avg", + -14.8355712890625 + ], + [ + "▁absolve", + -14.835580825805664 + ], + [ + "Kol", + -14.835631370544434 + ], + [ + "▁SBT", + -14.835676193237305 + ], + [ + "Skate", + -14.835689544677734 + ], + [ + "▁HSV", + -14.835803985595703 + ], + [ + "▁candidly", + -14.83581256866455 + ], + [ + "allocated", + -14.8358793258667 + ], + [ + "IMATE", + -14.83594799041748 + ], + [ + "temple", + -14.83595371246338 + ], + [ + "▁Rada", + -14.836000442504885 + ], + [ + "▁Sturm", + -14.8360013961792 + ], + [ + "RPM", + -14.83602809906006 + ], + [ + "▁conqueror", + -14.83611297607422 + ], + [ + "cology", + -14.836125373840332 + ], + [ + "▁Kumbh", + -14.836141586303713 + ], + [ + "▁Prawn", + -14.83621311187744 + ], + [ + "▁Ravine", + -14.836262702941896 + ], + [ + "▁chaste", + -14.836358070373535 + ], + [ + "▁Nabi", + -14.8363676071167 + ], + [ + "▁tricho", + -14.836374282836914 + ], + [ + "LSU", + -14.836393356323242 + ], + [ + "Instruction", + -14.836441040039062 + ], + [ + "RCE", + -14.836466789245604 + ], + [ + "▁Converted", + -14.836514472961426 + ], + [ + "▁22-24", + -14.83653736114502 + ], + [ + "▁Utter", + -14.836602210998535 + ], + [ + "▁Rashi", + -14.836605072021484 + ], + [ + "faster", + -14.836678504943848 + ], + [ + "▁Arenal", + -14.83669376373291 + ], + [ + "▁Sacha", + -14.83670711517334 + ], + [ + "▁Rakhi", + -14.83675479888916 + ], + [ + "▁Nagel", + -14.836795806884766 + ], + [ + "zhu", + -14.836797714233398 + ], + [ + "▁Corpse", + -14.83713436126709 + ], + [ + "▁Marni", + -14.837150573730469 + ], + [ + "▁Miso", + -14.837154388427734 + ], + [ + "dzie", + -14.837252616882324 + ], + [ + "▁unitary", + -14.837255477905272 + ], + [ + "▁Bucky", + -14.83729076385498 + ], + [ + "▁grist", + -14.837319374084473 + ], + [ + "▁skydive", + -14.837376594543455 + ], + [ + ".10.201", + -14.837421417236328 + ], + [ + "Strain", + -14.837467193603516 + ], + [ + "▁puffing", + -14.837589263916016 + ], + [ + "▁painterly", + -14.837591171264648 + ], + [ + "7.2%", + -14.837646484375 + ], + [ + "▁Trott", + -14.837654113769531 + ], + [ + "Referenced", + -14.83772087097168 + ], + [ + "oerner", + -14.837818145751951 + ], + [ + "▁Brighten", + -14.837924003601074 + ], + [ + "▁Thak", + -14.837925910949709 + ], + [ + "Fundamental", + -14.83792781829834 + ], + [ + "psu", + -14.837946891784668 + ], + [ + "▁marveled", + -14.837955474853516 + ], + [ + "▁geode", + -14.838010787963867 + ], + [ + "82%", + -14.83802604675293 + ], + [ + "gali", + -14.838095664978027 + ], + [ + "▁Hine", + -14.838114738464355 + ], + [ + "▁COX", + -14.838138580322266 + ], + [ + "fica", + -14.838160514831545 + ], + [ + "Undoubtedly", + -14.838220596313477 + ], + [ + "Notwithstanding", + -14.83823013305664 + ], + [ + "franchise", + -14.838245391845703 + ], + [ + "corrosive", + -14.838253021240234 + ], + [ + "Acupuncture", + -14.838260650634766 + ], + [ + "Penguin", + -14.838260650634766 + ], + [ + "Reliance", + -14.838260650634766 + ], + [ + "Garnish", + -14.838263511657717 + ], + [ + "Decorative", + -14.838269233703612 + ], + [ + "Advent", + -14.838275909423828 + ], + [ + "autonomous", + -14.838275909423828 + ], + [ + "Hockey", + -14.838276863098145 + ], + [ + "Hindu", + -14.838285446166992 + ], + [ + "Agenda", + -14.83829402923584 + ], + [ + "percussion", + -14.838296890258787 + ], + [ + "Concord", + -14.838326454162598 + ], + [ + "Copies", + -14.83839225769043 + ], + [ + "▁seri", + -14.83841323852539 + ], + [ + "▁Lair", + -14.838422775268556 + ], + [ + "Quartz", + -14.8384428024292 + ], + [ + "Rough", + -14.838457107543944 + ], + [ + "Nigel", + -14.83847713470459 + ], + [ + "▁Kaf", + -14.838571548461914 + ], + [ + "Tampa", + -14.838643074035645 + ], + [ + "merce", + -14.838656425476074 + ], + [ + "Narrow", + -14.83869457244873 + ], + [ + "Dialog", + -14.838695526123049 + ], + [ + "Observe", + -14.83877182006836 + ], + [ + "bigger", + -14.838772773742676 + ], + [ + "droid", + -14.838784217834473 + ], + [ + "▁Heap", + -14.83881664276123 + ], + [ + "9-19", + -14.838828086853027 + ], + [ + "▁Orland", + -14.838865280151367 + ], + [ + "▁Landis", + -14.838873863220217 + ], + [ + "Scheme", + -14.838895797729492 + ], + [ + "▁Kele", + -14.838907241821287 + ], + [ + "▁???????????", + -14.838913917541504 + ], + [ + "developmental", + -14.838964462280272 + ], + [ + "hewn", + -14.839008331298828 + ], + [ + "notably", + -14.839078903198242 + ], + [ + "7:40", + -14.839168548583984 + ], + [ + "▁UCD", + -14.839177131652832 + ], + [ + "▁pigtail", + -14.83927059173584 + ], + [ + "HIGH", + -14.839284896850586 + ], + [ + "Roc", + -14.83932399749756 + ], + [ + "▁CRAZY", + -14.839363098144531 + ], + [ + "Scarlet", + -14.839388847351074 + ], + [ + "lieb", + -14.839427947998049 + ], + [ + "Expanding", + -14.839463233947754 + ], + [ + "▁Kishore", + -14.83946418762207 + ], + [ + "Payday", + -14.839517593383787 + ], + [ + "Nominate", + -14.83956813812256 + ], + [ + "clearly", + -14.839632034301758 + ], + [ + "ostat", + -14.839695930480955 + ], + [ + "continued", + -14.839803695678713 + ], + [ + "havan", + -14.83985424041748 + ], + [ + "NCF", + -14.83987808227539 + ], + [ + "finishing", + -14.83992862701416 + ], + [ + "Commander", + -14.839998245239258 + ], + [ + "▁mettle", + -14.840030670166016 + ], + [ + "Vince", + -14.840096473693848 + ], + [ + "▁Dien", + -14.840134620666504 + ], + [ + "▁Georgi", + -14.840229988098145 + ], + [ + "cancerous", + -14.840258598327637 + ], + [ + ".3%)", + -14.8405179977417 + ], + [ + "▁tamari", + -14.840532302856444 + ], + [ + "▁ALTER", + -14.840754508972168 + ], + [ + "3,100", + -14.840803146362305 + ], + [ + "networking", + -14.84080696105957 + ], + [ + "▁Caldera", + -14.84085178375244 + ], + [ + "igra", + -14.840859413146973 + ], + [ + "▁Baruch", + -14.84090805053711 + ], + [ + "Teeth", + -14.840975761413574 + ], + [ + "KAY", + -14.841050148010254 + ], + [ + "▁vivi", + -14.841070175170898 + ], + [ + "▁Holdem", + -14.841115951538086 + ], + [ + "▁26.2", + -14.841212272644045 + ], + [ + "▁Viber", + -14.841219902038574 + ], + [ + "▁bargained", + -14.84128761291504 + ], + [ + "▁factsheet", + -14.841341972351074 + ], + [ + "Cuba", + -14.841477394104004 + ], + [ + "▁$82", + -14.8414888381958 + ], + [ + "▁metabolite", + -14.8414888381958 + ], + [ + "SSR", + -14.841514587402344 + ], + [ + "fragment", + -14.841538429260254 + ], + [ + "dramatic", + -14.841550827026367 + ], + [ + "Trusted", + -14.841560363769531 + ], + [ + "▁Glider", + -14.841889381408691 + ], + [ + "NOM", + -14.841962814331056 + ], + [ + "rauch", + -14.842022895812988 + ], + [ + "▁Lieu", + -14.842022895812988 + ], + [ + "▁Cust", + -14.842041969299316 + ], + [ + "VHS", + -14.84206485748291 + ], + [ + "▁Schulman", + -14.84212589263916 + ], + [ + "hedra", + -14.842203140258787 + ], + [ + "potter", + -14.842292785644531 + ], + [ + "HIMA", + -14.842302322387695 + ], + [ + "▁14:3", + -14.842308044433594 + ], + [ + "indiana", + -14.842345237731934 + ], + [ + "Pea", + -14.842450141906738 + ], + [ + "▁Meath", + -14.842494010925291 + ], + [ + "▁LTL", + -14.842514991760254 + ], + [ + "▁13.8", + -14.84251880645752 + ], + [ + "FTC", + -14.842528343200684 + ], + [ + "▁Skan", + -14.842578887939451 + ], + [ + "5,500", + -14.8426513671875 + ], + [ + ".11.201", + -14.842662811279297 + ], + [ + "loqui", + -14.842679023742676 + ], + [ + "▁Yag", + -14.842785835266112 + ], + [ + "▁kindest", + -14.842796325683594 + ], + [ + "▁quadri", + -14.842880249023438 + ], + [ + "Camel", + -14.842888832092283 + ], + [ + "Quinn", + -14.842926025390623 + ], + [ + "Filled", + -14.842981338500977 + ], + [ + "▁noose", + -14.84300708770752 + ], + [ + "dear", + -14.843101501464844 + ], + [ + "▁Farsi", + -14.843154907226562 + ], + [ + "▁Consume", + -14.843175888061523 + ], + [ + "▁ganz", + -14.8431978225708 + ], + [ + "hhhhhh", + -14.843233108520508 + ], + [ + "▁Bilbo", + -14.843267440795898 + ], + [ + "▁vacu", + -14.843279838562012 + ], + [ + "▁Obvious", + -14.843294143676758 + ], + [ + "▁CAIR", + -14.84329605102539 + ], + [ + "▁12:45", + -14.843305587768556 + ], + [ + "▁Pittsburg", + -14.84335994720459 + ], + [ + "CAUTION", + -14.84352684020996 + ], + [ + "Disciplinary", + -14.84352684020996 + ], + [ + "Roethlisberger", + -14.84352684020996 + ], + [ + "▁CENTRAL", + -14.84352684020996 + ], + [ + "▁Carvalho", + -14.84352684020996 + ], + [ + "▁Courtenay", + -14.84352684020996 + ], + [ + "▁Endeavor", + -14.84352684020996 + ], + [ + "▁Escherichia", + -14.84352684020996 + ], + [ + "▁GlaxoSmithKline", + -14.84352684020996 + ], + [ + "▁Hartlepool", + -14.84352684020996 + ], + [ + "▁Lubricant", + -14.84352684020996 + ], + [ + "▁McGuinness", + -14.84352684020996 + ], + [ + "▁Medellin", + -14.84352684020996 + ], + [ + "▁Mifflin", + -14.84352684020996 + ], + [ + "▁Nainital", + -14.84352684020996 + ], + [ + "▁Oedipus", + -14.84352684020996 + ], + [ + "▁Patiala", + -14.84352684020996 + ], + [ + "▁Vermilion", + -14.84352684020996 + ], + [ + "▁WYSIWYG", + -14.84352684020996 + ], + [ + "▁anthocyanin", + -14.84352684020996 + ], + [ + "▁chrysler", + -14.84352684020996 + ], + [ + "▁euphemism", + -14.84352684020996 + ], + [ + "▁hestitate", + -14.84352684020996 + ], + [ + "▁hilarity", + -14.84352684020996 + ], + [ + "▁homonymous", + -14.84352684020996 + ], + [ + "▁hysterectomy", + -14.84352684020996 + ], + [ + "▁justifiably", + -14.84352684020996 + ], + [ + "▁nosebleed", + -14.84352684020996 + ], + [ + "▁paracetamol", + -14.84352684020996 + ], + [ + "▁reproducibility", + -14.84352684020996 + ], + [ + "▁unfavourable", + -14.84352684020996 + ], + [ + "▁unhinged", + -14.84352684020996 + ], + [ + "▁venetian", + -14.84352684020996 + ], + [ + "floxacin", + -14.843527793884276 + ], + [ + "▁aplomb", + -14.843527793884276 + ], + [ + "▁unfailing", + -14.843527793884276 + ], + [ + "▁Emergencies", + -14.843528747558594 + ], + [ + "▁Rakuten", + -14.843528747558594 + ], + [ + "▁RePEc", + -14.843528747558594 + ], + [ + "▁SERIOUS", + -14.843528747558594 + ], + [ + "▁evading", + -14.843528747558594 + ], + [ + "▁epidermal", + -14.84352970123291 + ], + [ + "▁plexiglass", + -14.84352970123291 + ], + [ + "▁Bldg", + -14.843530654907228 + ], + [ + "▁impostor", + -14.843530654907228 + ], + [ + "▁Zidane", + -14.843531608581545 + ], + [ + "▁vitriol", + -14.843531608581545 + ], + [ + "▁Combinator", + -14.84353256225586 + ], + [ + "▁MANILA", + -14.843534469604492 + ], + [ + "▁Nakamoto", + -14.843534469604492 + ], + [ + "▁Cymbal", + -14.843536376953123 + ], + [ + "▁babbling", + -14.84353733062744 + ], + [ + "▁gleefully", + -14.843538284301758 + ], + [ + "▁Gdansk", + -14.843539237976074 + ], + [ + "▁boomerang", + -14.843539237976074 + ], + [ + "▁OUTSIDE", + -14.84354019165039 + ], + [ + "▁Navarre", + -14.843544006347656 + ], + [ + "▁macaque", + -14.843544960021973 + ], + [ + "▁Gilliam", + -14.843545913696287 + ], + [ + "hili", + -14.843547821044922 + ], + [ + "▁TOTALLY", + -14.843551635742188 + ], + [ + "rburgring", + -14.843552589416504 + ], + [ + "▁Brantford", + -14.84355354309082 + ], + [ + "▁Dianabol", + -14.843554496765137 + ], + [ + "▁Steffen", + -14.84355640411377 + ], + [ + "▁transcendence", + -14.843558311462402 + ], + [ + "▁Pollack", + -14.84356689453125 + ], + [ + "▁microarray", + -14.843570709228516 + ], + [ + "▁FileMaker", + -14.843579292297363 + ], + [ + "▁Olsson", + -14.84358024597168 + ], + [ + "▁dazed", + -14.843583106994627 + ], + [ + "▁raindrops", + -14.843602180480955 + ], + [ + "▁Yammer", + -14.843605995178224 + ], + [ + "▁rescheduling", + -14.84360694885254 + ], + [ + "▁Kasich", + -14.843620300292969 + ], + [ + "▁reenact", + -14.843623161315918 + ], + [ + "▁Hobson", + -14.84362506866455 + ], + [ + "▁Tenancy", + -14.843631744384766 + ], + [ + "▁deterring", + -14.843634605407717 + ], + [ + "▁Fredrick", + -14.843646049499512 + ], + [ + "▁Weakness", + -14.843647956848145 + ], + [ + "▁immortalized", + -14.843647956848145 + ], + [ + "▁wagging", + -14.843667030334473 + ], + [ + "▁Olney", + -14.843676567077637 + ], + [ + "▁(203)", + -14.843683242797852 + ], + [ + "▁Morality", + -14.84369945526123 + ], + [ + "▁Nouvel", + -14.843704223632812 + ], + [ + "▁LEAST", + -14.843706130981444 + ], + [ + "▁Artesia", + -14.843713760375977 + ], + [ + "▁Dispers", + -14.843716621398926 + ], + [ + "▁Ensign", + -14.84372615814209 + ], + [ + "▁Sunbeam", + -14.843729972839355 + ], + [ + "▁Novelty", + -14.84373378753662 + ], + [ + "▁Canonical", + -14.843756675720217 + ], + [ + "▁Susannah", + -14.843757629394531 + ], + [ + "▁Deception", + -14.84376049041748 + ], + [ + "▁Fetal", + -14.84377098083496 + ], + [ + "▁populating", + -14.843789100646973 + ], + [ + "SPEAK", + -14.843799591064451 + ], + [ + "ahab", + -14.843804359436035 + ], + [ + "▁2008-09", + -14.84382438659668 + ], + [ + "▁suppleness", + -14.84385871887207 + ], + [ + "▁Touchscreen", + -14.843873023986816 + ], + [ + "Validate", + -14.843884468078612 + ], + [ + "▁Koto", + -14.843896865844728 + ], + [ + "▁lapped", + -14.843896865844728 + ], + [ + "▁EFS", + -14.843939781188965 + ], + [ + "▁Roundabout", + -14.843954086303713 + ], + [ + "▁annals", + -14.84395694732666 + ], + [ + "▁sidetracked", + -14.843961715698242 + ], + [ + "zimmer", + -14.84399127960205 + ], + [ + "▁reattach", + -14.84402561187744 + ], + [ + "▁portico", + -14.844037055969238 + ], + [ + "▁Maryam", + -14.844042778015137 + ], + [ + "▁ascetic", + -14.844046592712402 + ], + [ + "▁Horned", + -14.844061851501465 + ], + [ + "▁GABA", + -14.84408950805664 + ], + [ + "▁pluralism", + -14.844121932983398 + ], + [ + "▁Robber", + -14.844138145446776 + ], + [ + "▁polemic", + -14.844186782836914 + ], + [ + "▁rustling", + -14.844244003295898 + ], + [ + "cuase", + -14.84429931640625 + ], + [ + "tzky", + -14.844311714172363 + ], + [ + "▁eyeglass", + -14.84434986114502 + ], + [ + "▁officiated", + -14.84434986114502 + ], + [ + "▁Moulding", + -14.844467163085938 + ], + [ + "▁roleplaying", + -14.844504356384276 + ], + [ + "▁Posey", + -14.844517707824709 + ], + [ + "▁Acharya", + -14.844521522521973 + ], + [ + "▁Faulk", + -14.844541549682615 + ], + [ + "▁garnishment", + -14.844542503356934 + ], + [ + "▁Relational", + -14.844547271728516 + ], + [ + "▁iPlayer", + -14.844562530517578 + ], + [ + "Implemented", + -14.844573974609377 + ], + [ + "YSL", + -14.844594955444336 + ], + [ + "▁inversely", + -14.844597816467283 + ], + [ + "▁Klux", + -14.844653129577637 + ], + [ + "Outer", + -14.844686508178713 + ], + [ + "▁PTE", + -14.844751358032228 + ], + [ + "Spook", + -14.844761848449709 + ], + [ + "▁Ligue", + -14.844781875610352 + ], + [ + "eril", + -14.844870567321776 + ], + [ + "▁Gerd", + -14.84489631652832 + ], + [ + "▁Carlow", + -14.844971656799316 + ], + [ + "▁OFFERS", + -14.84498119354248 + ], + [ + "▁Airsoft", + -14.844983100891112 + ], + [ + "▁taproom", + -14.845011711120604 + ], + [ + "Reporter", + -14.84504508972168 + ], + [ + "▁Emmaus", + -14.845050811767578 + ], + [ + "Wax", + -14.845060348510742 + ], + [ + "▁Amari", + -14.845080375671388 + ], + [ + "▁simile", + -14.845133781433104 + ], + [ + "▁kitted", + -14.8451509475708 + ], + [ + "WSL", + -14.845172882080078 + ], + [ + "▁FSI", + -14.84521484375 + ], + [ + "▁decanter", + -14.845443725585938 + ], + [ + "▁Hopewell", + -14.845446586608888 + ], + [ + "Dyna", + -14.84546947479248 + ], + [ + "▁Joburg", + -14.84547233581543 + ], + [ + "▁Bilal", + -14.845523834228516 + ], + [ + "Condo", + -14.845738410949709 + ], + [ + "▁Wack", + -14.845808029174805 + ], + [ + "highland", + -14.845867156982422 + ], + [ + "▁successively", + -14.845871925354004 + ], + [ + "▁trippy", + -14.845884323120115 + ], + [ + "fidelity", + -14.845892906188965 + ], + [ + "NIB", + -14.845943450927734 + ], + [ + "ragging", + -14.845946311950684 + ], + [ + "comforter", + -14.845970153808594 + ], + [ + "▁Miri", + -14.846025466918944 + ], + [ + "TINA", + -14.846038818359377 + ], + [ + "Lexi", + -14.846041679382324 + ], + [ + "Invoke", + -14.846144676208496 + ], + [ + "▁110,000", + -14.846221923828123 + ], + [ + "▁CTV", + -14.846258163452148 + ], + [ + "▁nightfall", + -14.846258163452148 + ], + [ + "▁Berth", + -14.846360206604004 + ], + [ + "▁Newbie", + -14.846407890319824 + ], + [ + "desire", + -14.846437454223633 + ], + [ + "Farmer", + -14.846513748168944 + ], + [ + "missed", + -14.846539497375488 + ], + [ + "▁Schnit", + -14.846542358398438 + ], + [ + "▁legalities", + -14.846602439880373 + ], + [ + "exclude", + -14.846617698669434 + ], + [ + "PST", + -14.84666919708252 + ], + [ + "OTHER", + -14.846702575683594 + ], + [ + "TEEN", + -14.846807479858398 + ], + [ + "afia", + -14.84685230255127 + ], + [ + "▁archer", + -14.84686279296875 + ], + [ + "▁Tanzanian", + -14.846945762634276 + ], + [ + "NEF", + -14.846949577331545 + ], + [ + "▁WPP", + -14.84705638885498 + ], + [ + "▁schon", + -14.847100257873535 + ], + [ + "▁Debtor", + -14.84716510772705 + ], + [ + "▁Rancher", + -14.84717082977295 + ], + [ + "1:01", + -14.847177505493164 + ], + [ + "gye", + -14.847295761108398 + ], + [ + "MONT", + -14.84734344482422 + ], + [ + "45,000", + -14.847417831420898 + ], + [ + "▁Constantin", + -14.847477912902832 + ], + [ + "HUD", + -14.847479820251465 + ], + [ + "rlich", + -14.847522735595703 + ], + [ + "▁standardisation", + -14.847554206848145 + ], + [ + "▁Plumb", + -14.84756088256836 + ], + [ + "▁loveliest", + -14.847631454467772 + ], + [ + "PTP", + -14.847752571105955 + ], + [ + "▁FMV", + -14.84779167175293 + ], + [ + "▁LCR", + -14.84782600402832 + ], + [ + "lyceryl", + -14.847867012023926 + ], + [ + "Acoustic", + -14.847918510437012 + ], + [ + "CSF", + -14.847951889038086 + ], + [ + "viva", + -14.847959518432615 + ], + [ + "▁$700,000", + -14.847989082336426 + ], + [ + "sweetened", + -14.848015785217283 + ], + [ + "essie", + -14.8480224609375 + ], + [ + "Completing", + -14.848033905029297 + ], + [ + "Motivation", + -14.848039627075195 + ], + [ + "Approval", + -14.848063468933104 + ], + [ + "Departure", + -14.848063468933104 + ], + [ + "Cynthia", + -14.84807014465332 + ], + [ + "Kindergarten", + -14.848072052001951 + ], + [ + "celebrate", + -14.848073959350586 + ], + [ + "angshan", + -14.848079681396484 + ], + [ + "Volkswagen", + -14.848081588745115 + ], + [ + "volatile", + -14.848081588745115 + ], + [ + "Afghanistan", + -14.848084449768066 + ], + [ + "repped", + -14.84811019897461 + ], + [ + "Insulation", + -14.84812355041504 + ], + [ + "Alicia", + -14.848125457763672 + ], + [ + "▁bleu", + -14.848133087158203 + ], + [ + "abula", + -14.848161697387695 + ], + [ + "Recycling", + -14.848169326782228 + ], + [ + "discussion", + -14.848173141479492 + ], + [ + "unsubscribe", + -14.848191261291504 + ], + [ + "Bucket", + -14.848217010498049 + ], + [ + "▁Numb", + -14.848234176635742 + ], + [ + "Charity", + -14.84828758239746 + ], + [ + "▁11/1", + -14.848288536071776 + ], + [ + "▁disarming", + -14.848407745361328 + ], + [ + "Soldier", + -14.848424911499023 + ], + [ + "Banner", + -14.848444938659668 + ], + [ + "▁computerised", + -14.84847640991211 + ], + [ + "▁08:0", + -14.848583221435549 + ], + [ + "▁Blah", + -14.84861660003662 + ], + [ + "▁Adri", + -14.848627090454102 + ], + [ + "▁Authorize", + -14.848628044128418 + ], + [ + "manchester", + -14.848718643188477 + ], + [ + "▁helplessly", + -14.848767280578612 + ], + [ + "certify", + -14.848848342895508 + ], + [ + "potentially", + -14.84886360168457 + ], + [ + "truly", + -14.84888744354248 + ], + [ + "Introduced", + -14.848910331726074 + ], + [ + "▁04:0", + -14.848956108093262 + ], + [ + "Accepted", + -14.84924030303955 + ], + [ + "▁bluesy", + -14.849262237548828 + ], + [ + "▁Answered", + -14.849272727966309 + ], + [ + "YAN", + -14.849273681640623 + ], + [ + "Jared", + -14.84927749633789 + ], + [ + "▁01:4", + -14.849281311035156 + ], + [ + "Nerd", + -14.849323272705078 + ], + [ + "▁Guitarist", + -14.849384307861328 + ], + [ + "▁14:4", + -14.849457740783691 + ], + [ + "paired", + -14.849576950073242 + ], + [ + "ARPA", + -14.849627494812012 + ], + [ + "cursion", + -14.84964656829834 + ], + [ + "RINA", + -14.84974193572998 + ], + [ + "▁Dismiss", + -14.84984016418457 + ], + [ + "Resolved", + -14.849990844726562 + ], + [ + "Transit", + -14.850083351135254 + ], + [ + "▁Errol", + -14.850176811218262 + ], + [ + "▁SPM", + -14.850191116333008 + ], + [ + "gura", + -14.85019874572754 + ], + [ + "CMG", + -14.850210189819336 + ], + [ + "▁masher", + -14.850220680236816 + ], + [ + "▁COMPL", + -14.850241661071776 + ], + [ + "▁Kipp", + -14.85025405883789 + ], + [ + "▁Thoughtful", + -14.850275993347168 + ], + [ + "▁Usman", + -14.850295066833496 + ], + [ + "asque", + -14.850337028503418 + ], + [ + "▁mantis", + -14.850360870361328 + ], + [ + "aww", + -14.850408554077148 + ], + [ + "▁Egger", + -14.85048007965088 + ], + [ + "▁Bandai", + -14.850626945495604 + ], + [ + "▁Forging", + -14.85065460205078 + ], + [ + "▁Relation", + -14.85071849822998 + ], + [ + "▁cognitively", + -14.850719451904297 + ], + [ + "mimetic", + -14.850722312927246 + ], + [ + "Detach", + -14.850781440734863 + ], + [ + "Latino", + -14.850839614868164 + ], + [ + "▁Nutritionist", + -14.85094165802002 + ], + [ + "▁Rayne", + -14.851000785827637 + ], + [ + ".#200", + -14.851386070251465 + ], + [ + "▁Dho", + -14.851388931274414 + ], + [ + "Jab", + -14.851404190063477 + ], + [ + "Alarm", + -14.851497650146484 + ], + [ + "BOUR", + -14.851656913757324 + ], + [ + "▁PBA", + -14.85167407989502 + ], + [ + "▁Contempor", + -14.851675987243652 + ], + [ + "▁Bowler", + -14.851707458496094 + ], + [ + "▁Choke", + -14.851722717285156 + ], + [ + "▁Aryan", + -14.851730346679688 + ], + [ + "Mob", + -14.8517427444458 + ], + [ + "Gimme", + -14.851744651794434 + ], + [ + "PEM", + -14.851800918579102 + ], + [ + "▁redeploy", + -14.851832389831545 + ], + [ + "▁Nando", + -14.851900100708008 + ], + [ + "7-10", + -14.852005004882812 + ], + [ + "▁niet", + -14.85204029083252 + ], + [ + "▁1806", + -14.85204792022705 + ], + [ + "ера", + -14.852078437805176 + ], + [ + "11:30", + -14.852147102355955 + ], + [ + "▁617-", + -14.85232162475586 + ], + [ + "1938", + -14.852334976196287 + ], + [ + "▁11.7", + -14.852352142333984 + ], + [ + "antine", + -14.85238552093506 + ], + [ + "▁Concours", + -14.852432250976562 + ], + [ + "winding", + -14.85246753692627 + ], + [ + "▁Notts", + -14.852476119995115 + ], + [ + "9:50", + -14.85250759124756 + ], + [ + "▁enumerate", + -14.852542877197266 + ], + [ + "-3-1", + -14.852566719055176 + ], + [ + "conte", + -14.852614402770996 + ], + [ + "ltr", + -14.852723121643066 + ], + [ + "▁Ronde", + -14.85283660888672 + ], + [ + "0500", + -14.852842330932615 + ], + [ + "MCO", + -14.852879524230955 + ], + [ + "▁constru", + -14.852912902832031 + ], + [ + "IZATION", + -14.852933883666992 + ], + [ + "cipher", + -14.85293960571289 + ], + [ + "▁breech", + -14.85301685333252 + ], + [ + "DEAR", + -14.85302448272705 + ], + [ + "▁Usual", + -14.853078842163086 + ], + [ + "▁naturopath", + -14.85309600830078 + ], + [ + "▁Seco", + -14.853096961975098 + ], + [ + "Roubaix", + -14.853141784667969 + ], + [ + "masquerading", + -14.853141784667969 + ], + [ + "▁888-666-1846", + -14.853141784667969 + ], + [ + "▁888-739-5110", + -14.853141784667969 + ], + [ + "▁Abundance", + -14.853141784667969 + ], + [ + "▁Cappadocia", + -14.853141784667969 + ], + [ + "▁Fawcett", + -14.853141784667969 + ], + [ + "▁Grenadine", + -14.853141784667969 + ], + [ + "▁Hampden", + -14.853141784667969 + ], + [ + "▁Hoddesdon", + -14.853141784667969 + ], + [ + "▁Humphries", + -14.853141784667969 + ], + [ + "▁Jacquard", + -14.853141784667969 + ], + [ + "▁Merchandising", + -14.853141784667969 + ], + [ + "▁Monsieur", + -14.853141784667969 + ], + [ + "▁Nunavut", + -14.853141784667969 + ], + [ + "▁Pedagogy", + -14.853141784667969 + ], + [ + "▁Pochettino", + -14.853141784667969 + ], + [ + "▁Spokesperson", + -14.853141784667969 + ], + [ + "▁enlarging", + -14.853141784667969 + ], + [ + "▁grooving", + -14.853141784667969 + ], + [ + "▁infatuation", + -14.853141784667969 + ], + [ + "▁pathophysiology", + -14.853141784667969 + ], + [ + "▁polyunsaturated", + -14.853141784667969 + ], + [ + "▁somersault", + -14.853141784667969 + ], + [ + "▁торрент", + -14.853141784667969 + ], + [ + "▁ANNUAL", + -14.853142738342283 + ], + [ + "▁Alcantara", + -14.853142738342283 + ], + [ + "▁Asterisk", + -14.853142738342283 + ], + [ + "▁Gondola", + -14.853142738342283 + ], + [ + "▁Lowndes", + -14.853142738342283 + ], + [ + "▁Mufti", + -14.853142738342283 + ], + [ + "▁Referendum", + -14.853142738342283 + ], + [ + "▁Rumsfeld", + -14.853142738342283 + ], + [ + "▁hypoglycemia", + -14.853142738342283 + ], + [ + "▁melodious", + -14.853142738342283 + ], + [ + "▁purplish", + -14.853142738342283 + ], + [ + "▁repulsive", + -14.853142738342283 + ], + [ + "▁ungrateful", + -14.853142738342283 + ], + [ + "Laszlo", + -14.853143692016602 + ], + [ + "▁Discipleship", + -14.853143692016602 + ], + [ + "▁Ibanez", + -14.853143692016602 + ], + [ + "▁Oliphant", + -14.853143692016602 + ], + [ + "▁adenosine", + -14.853143692016602 + ], + [ + "▁lactate", + -14.853143692016602 + ], + [ + "LENGTH", + -14.853144645690918 + ], + [ + "▁Coltrane", + -14.853145599365234 + ], + [ + "▁Hertford", + -14.853145599365234 + ], + [ + "▁Paribas", + -14.853145599365234 + ], + [ + "▁extort", + -14.85314655303955 + ], + [ + "▁Weebly", + -14.853148460388184 + ], + [ + "▁velour", + -14.853148460388184 + ], + [ + "sixth", + -14.8531494140625 + ], + [ + "▁Bromwich", + -14.8531494140625 + ], + [ + "▁Lonsdale", + -14.8531494140625 + ], + [ + "▁Venerable", + -14.853150367736816 + ], + [ + "▁immunological", + -14.853150367736816 + ], + [ + "▁Estrella", + -14.853151321411133 + ], + [ + "▁Telemundo", + -14.853151321411133 + ], + [ + "▁SUBMIT", + -14.85315227508545 + ], + [ + "▁Klamath", + -14.853153228759766 + ], + [ + "▁Cayuga", + -14.853154182434082 + ], + [ + "▁Derived", + -14.853154182434082 + ], + [ + "▁Elantra", + -14.853154182434082 + ], + [ + "▁Wakanda", + -14.853155136108398 + ], + [ + "▁piazza", + -14.853156089782717 + ], + [ + "▁Palatine", + -14.853157043457031 + ], + [ + "▁gullible", + -14.853157997131348 + ], + [ + "▁Autobiography", + -14.853161811828612 + ], + [ + "▁Pulsar", + -14.853163719177246 + ], + [ + "▁Stabilization", + -14.853168487548828 + ], + [ + "▁Interlocking", + -14.853169441223145 + ], + [ + "▁ROYAL", + -14.853172302246094 + ], + [ + "Cryptocurrencies", + -14.853175163269045 + ], + [ + "▁excision", + -14.853179931640623 + ], + [ + "▁Valletta", + -14.853181838989258 + ], + [ + "▁zebrafish", + -14.85318660736084 + ], + [ + "▁titration", + -14.853189468383787 + ], + [ + "▁concurred", + -14.853192329406738 + ], + [ + "▁Ulcer", + -14.853196144104004 + ], + [ + "▁monorail", + -14.85319709777832 + ], + [ + "▁spellbinding", + -14.85319709777832 + ], + [ + "▁Tillerson", + -14.853205680847168 + ], + [ + "▁Evangelism", + -14.8532075881958 + ], + [ + "▁WWDC", + -14.853226661682127 + ], + [ + "▁Nueva", + -14.853241920471191 + ], + [ + "▁Erlang", + -14.853253364562988 + ], + [ + "▁Schreiber", + -14.853263854980469 + ], + [ + "▁Balaji", + -14.853269577026367 + ], + [ + "▁Schofield", + -14.853277206420898 + ], + [ + "hauser", + -14.853283882141112 + ], + [ + "▁Bartender", + -14.85329818725586 + ], + [ + "▁Vesper", + -14.853309631347656 + ], + [ + "▁ashram", + -14.853314399719238 + ], + [ + "▁2011-2012", + -14.853320121765137 + ], + [ + "▁proctor", + -14.853323936462402 + ], + [ + "▁Rhinestone", + -14.853337287902832 + ], + [ + "▁Restless", + -14.85335922241211 + ], + [ + "▁Fairchild", + -14.853364944458008 + ], + [ + "▁rephrase", + -14.85341739654541 + ], + [ + "▁Tulsi", + -14.853423118591309 + ], + [ + "▁Duh", + -14.853428840637209 + ], + [ + "▁Picturesque", + -14.853450775146484 + ], + [ + "▁knotty", + -14.85345458984375 + ], + [ + "▁muttering", + -14.853501319885254 + ], + [ + "▁FACTS", + -14.853538513183594 + ], + [ + "▁trickery", + -14.85353946685791 + ], + [ + "▁piecemeal", + -14.853581428527832 + ], + [ + "▁crystallized", + -14.853620529174805 + ], + [ + "▁Deepwater", + -14.853633880615234 + ], + [ + "ranga", + -14.853635787963867 + ], + [ + "▁WIPO", + -14.853668212890623 + ], + [ + "▁coerced", + -14.85367488861084 + ], + [ + "▁Annika", + -14.85368824005127 + ], + [ + "▁algal", + -14.8537015914917 + ], + [ + "▁NDIS", + -14.853711128234863 + ], + [ + "▁zipcode", + -14.853721618652344 + ], + [ + "▁GIMP", + -14.853723526000977 + ], + [ + "▁Maddy", + -14.853754043579102 + ], + [ + "▁throwaway", + -14.853821754455566 + ], + [ + "▁Xander", + -14.853839874267578 + ], + [ + "▁DRAW", + -14.853894233703612 + ], + [ + "▁NuGet", + -14.853910446166992 + ], + [ + "▁2:2", + -14.85393524169922 + ], + [ + "▁Horus", + -14.853997230529783 + ], + [ + "▁twirling", + -14.854007720947266 + ], + [ + "▁postmarked", + -14.854063987731934 + ], + [ + "▁monotone", + -14.854068756103516 + ], + [ + "▁delineated", + -14.854135513305664 + ], + [ + "▁midsection", + -14.854144096374512 + ], + [ + "▁vaso", + -14.854215621948242 + ], + [ + "▁Rockport", + -14.854236602783203 + ], + [ + "graff", + -14.85435390472412 + ], + [ + "▁Graceland", + -14.85440731048584 + ], + [ + "HARRIS", + -14.854451179504396 + ], + [ + "calf", + -14.854461669921877 + ], + [ + "▁chiseled", + -14.854498863220217 + ], + [ + "▁Legionella", + -14.85453987121582 + ], + [ + "▁BRAKE", + -14.854568481445312 + ], + [ + "▁stillbirth", + -14.854597091674805 + ], + [ + "▁SOLO", + -14.854618072509766 + ], + [ + "▁Lingo", + -14.85463523864746 + ], + [ + "▁mindlessly", + -14.854657173156738 + ], + [ + "atou", + -14.854698181152344 + ], + [ + "▁mourned", + -14.854734420776367 + ], + [ + "▁Dogwood", + -14.854798316955566 + ], + [ + "▁Receptionist", + -14.854832649230955 + ], + [ + "▁limped", + -14.854840278625488 + ], + [ + "▁Armored", + -14.854963302612305 + ], + [ + "2:22", + -14.85505199432373 + ], + [ + "Neat", + -14.855060577392578 + ], + [ + "▁harshness", + -14.855096817016602 + ], + [ + "2–3", + -14.85509967803955 + ], + [ + "Santo", + -14.855215072631836 + ], + [ + "▁Hiram", + -14.855222702026367 + ], + [ + "Burke", + -14.855223655700684 + ], + [ + "▁heiress", + -14.855236053466797 + ], + [ + "▁Bluewater", + -14.85527229309082 + ], + [ + "▁fumbled", + -14.855273246765137 + ], + [ + "▁correlat", + -14.855298042297363 + ], + [ + "▁technicalities", + -14.855299949645996 + ], + [ + "▁Staining", + -14.855412483215332 + ], + [ + "▁pagina", + -14.855459213256836 + ], + [ + "▁MyBB", + -14.855618476867676 + ], + [ + "▁singularly", + -14.85565185546875 + ], + [ + "▁figur", + -14.85568618774414 + ], + [ + "▁Kunda", + -14.85569190979004 + ], + [ + "▁Joon", + -14.855762481689451 + ], + [ + "▁breadboard", + -14.855762481689451 + ], + [ + "▁TCL", + -14.855836868286133 + ], + [ + "Telling", + -14.855899810791016 + ], + [ + "HSE", + -14.855979919433594 + ], + [ + "▁Padua", + -14.856009483337402 + ], + [ + "▁Maude", + -14.856051445007324 + ], + [ + "petition", + -14.85623550415039 + ], + [ + "▁Vara", + -14.856245040893556 + ], + [ + "9.2%", + -14.856271743774414 + ], + [ + "Mick", + -14.856374740600586 + ], + [ + "▁downplayed", + -14.856378555297852 + ], + [ + "▁overfishing", + -14.856428146362305 + ], + [ + "▁thundering", + -14.856536865234377 + ], + [ + "▁Hollie", + -14.856640815734863 + ], + [ + "▁UHS", + -14.856664657592772 + ], + [ + "xiang", + -14.856746673583984 + ], + [ + "Hebrew", + -14.856797218322754 + ], + [ + "▁resistive", + -14.856870651245115 + ], + [ + "▁Haida", + -14.856952667236328 + ], + [ + "NDR", + -14.856993675231934 + ], + [ + "▁Boland", + -14.85707664489746 + ], + [ + "▁Keg", + -14.857083320617676 + ], + [ + "98%", + -14.857091903686523 + ], + [ + "pica", + -14.857247352600098 + ], + [ + "▁underfunded", + -14.857275009155272 + ], + [ + "▁Elmore", + -14.85735321044922 + ], + [ + "aucus", + -14.857383728027344 + ], + [ + "▁Suv", + -14.857447624206545 + ], + [ + "▁Simba", + -14.857451438903809 + ], + [ + "GSM", + -14.857452392578123 + ], + [ + "OCH", + -14.857481956481934 + ], + [ + "ABAD", + -14.857497215270996 + ], + [ + "▁Bred", + -14.857599258422852 + ], + [ + "Thirteen", + -14.857644081115724 + ], + [ + "▁headteacher", + -14.857718467712402 + ], + [ + "stretched", + -14.857749938964844 + ], + [ + "communist", + -14.857832908630373 + ], + [ + "DOWN", + -14.857898712158203 + ], + [ + "▁McCr", + -14.857905387878418 + ], + [ + "Complaint", + -14.857927322387695 + ], + [ + "▁$87", + -14.857939720153809 + ], + [ + "defunct", + -14.857940673828123 + ], + [ + "Responsive", + -14.857954978942873 + ], + [ + "Engaging", + -14.857958793640137 + ], + [ + "Aerial", + -14.857965469360352 + ], + [ + "repellent", + -14.857967376708984 + ], + [ + "physician", + -14.857972145080566 + ], + [ + "Currency", + -14.857973098754885 + ], + [ + "Efficiency", + -14.8579740524292 + ], + [ + "porcelain", + -14.857980728149414 + ], + [ + "wrought", + -14.857980728149414 + ], + [ + "Amsterdam", + -14.85798454284668 + ], + [ + "enrollment", + -14.857985496520996 + ], + [ + "recognised", + -14.857986450195312 + ], + [ + "▁touchstone", + -14.857989311218262 + ], + [ + "Idea", + -14.857990264892578 + ], + [ + "ventilated", + -14.85799503326416 + ], + [ + "Griffin", + -14.85800075531006 + ], + [ + "Idaho", + -14.858001708984377 + ], + [ + "implementation", + -14.858004570007324 + ], + [ + "Pepper", + -14.858051300048828 + ], + [ + "▁evangelistic", + -14.858074188232422 + ], + [ + "Forecast", + -14.858125686645508 + ], + [ + "cyclist", + -14.858150482177734 + ], + [ + "▁stimulator", + -14.858203887939451 + ], + [ + "▁Potion", + -14.858227729797363 + ], + [ + "Header", + -14.858316421508787 + ], + [ + "▁overcooked", + -14.858375549316406 + ], + [ + "calibrate", + -14.858379364013672 + ], + [ + "Sift", + -14.858457565307615 + ], + [ + "Synth", + -14.858492851257324 + ], + [ + "▁Erb", + -14.85862922668457 + ], + [ + "CFR", + -14.858660697937012 + ], + [ + "egner", + -14.858720779418944 + ], + [ + "▁Ulla", + -14.858739852905272 + ], + [ + "▁Paloma", + -14.85874366760254 + ], + [ + "BCL", + -14.85875129699707 + ], + [ + "16-0", + -14.8587646484375 + ], + [ + "burton", + -14.858821868896484 + ], + [ + "consulting", + -14.85891819000244 + ], + [ + "Foam", + -14.858924865722656 + ], + [ + "5-28", + -14.858945846557615 + ], + [ + "lvey", + -14.858957290649414 + ], + [ + "aigh", + -14.858959197998049 + ], + [ + "Retrieved", + -14.85898208618164 + ], + [ + "▁PWC", + -14.859047889709473 + ], + [ + "▁Whitefield", + -14.859128952026367 + ], + [ + "▁Hough", + -14.85914707183838 + ], + [ + "Raised", + -14.85926342010498 + ], + [ + "Hanging", + -14.859416007995604 + ], + [ + "▁Yung", + -14.859472274780272 + ], + [ + "▁Nationalist", + -14.859514236450195 + ], + [ + "▁Manju", + -14.85956573486328 + ], + [ + "Relating", + -14.859661102294922 + ], + [ + "chord", + -14.859695434570312 + ], + [ + "▁CHAT", + -14.859850883483888 + ], + [ + "Direction", + -14.859972953796388 + ], + [ + "ARRA", + -14.860030174255373 + ], + [ + "annu", + -14.860074996948242 + ], + [ + "Flush", + -14.860154151916504 + ], + [ + "▁MUR", + -14.860203742980955 + ], + [ + "FAP", + -14.860206604003906 + ], + [ + "1931", + -14.860269546508787 + ], + [ + "▁intercede", + -14.860398292541504 + ], + [ + "▁ACER", + -14.860601425170898 + ], + [ + "Rivi", + -14.860675811767578 + ], + [ + "▁Mose", + -14.860713958740234 + ], + [ + "▁Emme", + -14.860733032226562 + ], + [ + "circa", + -14.86091136932373 + ], + [ + "NCL", + -14.861019134521484 + ], + [ + "▁AERO", + -14.861072540283203 + ], + [ + "▁PRESIDENT", + -14.86108684539795 + ], + [ + "▁Rother", + -14.861140251159668 + ], + [ + "▁WMU", + -14.861186981201172 + ], + [ + "nivers", + -14.861248016357422 + ], + [ + "Loft", + -14.861248970031738 + ], + [ + "▁nosy", + -14.861287117004396 + ], + [ + "1:04", + -14.86148166656494 + ], + [ + "1.99", + -14.861526489257812 + ], + [ + "▁JMS", + -14.86159324645996 + ], + [ + "–30", + -14.861671447753906 + ], + [ + "▁CCL", + -14.861946105957031 + ], + [ + "▁Syriac", + -14.862100601196287 + ], + [ + "FIR", + -14.862143516540527 + ], + [ + "▁Eros", + -14.862295150756836 + ], + [ + "▁assassinate", + -14.862347602844238 + ], + [ + "Eau", + -14.862353324890137 + ], + [ + "ggan", + -14.862361907958984 + ], + [ + "▁flail", + -14.862366676330566 + ], + [ + "▁Amina", + -14.86246395111084 + ], + [ + "▁Abandon", + -14.862494468688965 + ], + [ + "beek", + -14.86252498626709 + ], + [ + "CGI", + -14.862608909606934 + ], + [ + "Ranch", + -14.86264991760254 + ], + [ + "ekh", + -14.862753868103027 + ], + [ + "ocr", + -14.862828254699709 + ], + [ + "Scaffold", + -14.862839698791504 + ], + [ + "Fenerbah", + -14.862850189208984 + ], + [ + "Astronomers", + -14.8628511428833 + ], + [ + "Brilliance", + -14.8628511428833 + ], + [ + "deprecating", + -14.8628511428833 + ], + [ + "▁Ashtanga", + -14.8628511428833 + ], + [ + "▁Brattleboro", + -14.8628511428833 + ], + [ + "▁Capistrano", + -14.8628511428833 + ], + [ + "▁Drosophila", + -14.8628511428833 + ], + [ + "▁Garibaldi", + -14.8628511428833 + ], + [ + "▁Guaranty", + -14.8628511428833 + ], + [ + "▁Kerberos", + -14.8628511428833 + ], + [ + "▁Mockingbird", + -14.8628511428833 + ], + [ + "▁OPPORTUNITY", + -14.8628511428833 + ], + [ + "▁Propaganda", + -14.8628511428833 + ], + [ + "▁Receivable", + -14.8628511428833 + ], + [ + "▁Rejuvenation", + -14.8628511428833 + ], + [ + "▁SCHEDULE", + -14.8628511428833 + ], + [ + "▁angiogenesis", + -14.8628511428833 + ], + [ + "▁benevolence", + -14.8628511428833 + ], + [ + "▁mascarpone", + -14.8628511428833 + ], + [ + "▁monetizing", + -14.8628511428833 + ], + [ + "▁narcissism", + -14.8628511428833 + ], + [ + "▁phylogenetic", + -14.8628511428833 + ], + [ + "▁premeditated", + -14.8628511428833 + ], + [ + "▁prophesied", + -14.8628511428833 + ], + [ + "▁rehabilitating", + -14.8628511428833 + ], + [ + "▁salicylic", + -14.8628511428833 + ], + [ + "▁telomere", + -14.8628511428833 + ], + [ + "▁throttling", + -14.8628511428833 + ], + [ + "▁unsalted", + -14.8628511428833 + ], + [ + "▁zeitgeist", + -14.8628511428833 + ], + [ + "Corresponding", + -14.862852096557615 + ], + [ + "▁REPRESENTATIONS", + -14.862852096557615 + ], + [ + "Glucosamine", + -14.862853050231934 + ], + [ + "Negotiate", + -14.862853050231934 + ], + [ + "▁Calendula", + -14.862853050231934 + ], + [ + "▁Pancras", + -14.862853050231934 + ], + [ + "vaunted", + -14.86285400390625 + ], + [ + "▁Messianic", + -14.86285400390625 + ], + [ + "▁Ricciardo", + -14.86285400390625 + ], + [ + "▁ligature", + -14.86285400390625 + ], + [ + "▁Glamorous", + -14.862854957580566 + ], + [ + "▁Krypton", + -14.862854957580566 + ], + [ + "▁Aptitude", + -14.8628568649292 + ], + [ + "▁Jermaine", + -14.8628568649292 + ], + [ + "▁immemorial", + -14.862857818603516 + ], + [ + "▁delirium", + -14.862858772277832 + ], + [ + "▁Tapestry", + -14.862859725952148 + ], + [ + "▁aquamarine", + -14.862862586975098 + ], + [ + "▁Corrigan", + -14.862863540649414 + ], + [ + "▁wispy", + -14.862865447998049 + ], + [ + "▁chromatin", + -14.862866401672363 + ], + [ + "▁AntiVirus", + -14.86286735534668 + ], + [ + "▁Bristow", + -14.862868309020996 + ], + [ + "▁Reportedly", + -14.862870216369627 + ], + [ + "▁holiest", + -14.862872123718262 + ], + [ + "▁buddha", + -14.862873077392578 + ], + [ + "▁imperil", + -14.862874984741213 + ], + [ + "▁hyperbolic", + -14.862876892089844 + ], + [ + "▁Meizu", + -14.862882614135742 + ], + [ + "▁Solitude", + -14.862882614135742 + ], + [ + "▁Elmhurst", + -14.86288356781006 + ], + [ + "▁Meteorology", + -14.862884521484377 + ], + [ + "▁Llewellyn", + -14.862886428833008 + ], + [ + "▁TNPSC", + -14.862886428833008 + ], + [ + "▁Naidu", + -14.862889289855955 + ], + [ + "▁Picasa", + -14.862890243530272 + ], + [ + "▁Paralegal", + -14.862896919250488 + ], + [ + "▁Konrad", + -14.862897872924805 + ], + [ + "hawaii", + -14.862900733947754 + ], + [ + "odiatric", + -14.862902641296388 + ], + [ + "▁Educated", + -14.862909317016602 + ], + [ + "▁Medtronic", + -14.862911224365234 + ], + [ + "▁Balmain", + -14.862913131713867 + ], + [ + "▁modulating", + -14.862913131713867 + ], + [ + "▁Fendi", + -14.862918853759766 + ], + [ + "▁divisible", + -14.862924575805664 + ], + [ + "▁Moulton", + -14.86292839050293 + ], + [ + "▁Mousse", + -14.862933158874512 + ], + [ + "▁Purification", + -14.862934112548828 + ], + [ + "▁camellia", + -14.862936973571776 + ], + [ + "yikes", + -14.862940788269045 + ], + [ + "▁Pesach", + -14.862944602966309 + ], + [ + "▁Eggplant", + -14.862971305847168 + ], + [ + "____", + -14.862994194030762 + ], + [ + "▁Lexicon", + -14.863027572631836 + ], + [ + "▁Missionaries", + -14.863030433654783 + ], + [ + "▁TOUCH", + -14.863037109375 + ], + [ + "▁Brentford", + -14.863041877746582 + ], + [ + "▁cartography", + -14.863048553466797 + ], + [ + "▁regalia", + -14.86305809020996 + ], + [ + "▁fretboard", + -14.863059043884276 + ], + [ + "▁Rop", + -14.86310863494873 + ], + [ + "▁muttered", + -14.863134384155272 + ], + [ + "mixing", + -14.863178253173828 + ], + [ + "▁singularity", + -14.8632173538208 + ], + [ + "nault", + -14.863243103027344 + ], + [ + "Cure", + -14.86325740814209 + ], + [ + "▁PRIDE", + -14.86326503753662 + ], + [ + ":10.11", + -14.863393783569336 + ], + [ + "▁cowgirl", + -14.86342430114746 + ], + [ + "▁CSX", + -14.863452911376951 + ], + [ + "▁Pao", + -14.863470077514648 + ], + [ + "flesh", + -14.863485336303713 + ], + [ + "▁Adaptor", + -14.86348819732666 + ], + [ + "▁Drury", + -14.863490104675291 + ], + [ + "▁pretrial", + -14.863516807556152 + ], + [ + "Cosmo", + -14.863519668579102 + ], + [ + "▁attentiveness", + -14.863526344299316 + ], + [ + "Retinol", + -14.863565444946287 + ], + [ + "▁Avanti", + -14.86361312866211 + ], + [ + "▁978-1-", + -14.863625526428224 + ], + [ + "trauma", + -14.863648414611816 + ], + [ + "▁Biafra", + -14.863649368286133 + ], + [ + "▁smallpox", + -14.863655090332031 + ], + [ + "▁slashes", + -14.863662719726562 + ], + [ + "▁Klingon", + -14.863677024841309 + ], + [ + "▁SEVEN", + -14.863699913024902 + ], + [ + "▁poach", + -14.863701820373535 + ], + [ + "▁ripeness", + -14.863743782043455 + ], + [ + "▁Naim", + -14.86378574371338 + ], + [ + "weigh", + -14.863821029663086 + ], + [ + "▁benedict", + -14.863851547241213 + ], + [ + "▁Sassy", + -14.863879203796388 + ], + [ + "▁PVR", + -14.863931655883787 + ], + [ + "Kale", + -14.86404800415039 + ], + [ + "Salon", + -14.86414623260498 + ], + [ + "▁ZFS", + -14.864195823669434 + ], + [ + "▁dispatches", + -14.864251136779783 + ], + [ + "▁Ramada", + -14.864269256591797 + ], + [ + "▁Esports", + -14.864339828491213 + ], + [ + "▁$850", + -14.864381790161133 + ], + [ + "▁Louver", + -14.864388465881348 + ], + [ + "▁Starkey", + -14.864398956298828 + ], + [ + "▁Pennant", + -14.864419937133787 + ], + [ + "▁practicalities", + -14.864435195922852 + ], + [ + "▁yardstick", + -14.864538192749023 + ], + [ + "▁Maxime", + -14.864587783813477 + ], + [ + "▁Caden", + -14.86463451385498 + ], + [ + "enius", + -14.864652633666992 + ], + [ + "▁Issuer", + -14.86472988128662 + ], + [ + "▁400%", + -14.864788055419922 + ], + [ + "Compressing", + -14.864790916442873 + ], + [ + "▁Lucien", + -14.864968299865724 + ], + [ + "▁Kunz", + -14.865004539489746 + ], + [ + "▁Mohd", + -14.865029335021973 + ], + [ + "ена", + -14.865077018737791 + ], + [ + "▁panko", + -14.86509132385254 + ], + [ + "▁painlessly", + -14.865124702453612 + ], + [ + "▁Boyz", + -14.86512851715088 + ], + [ + "ollah", + -14.86513328552246 + ], + [ + "▁fittest", + -14.865175247192385 + ], + [ + "Animator", + -14.865270614624023 + ], + [ + "1958", + -14.86527156829834 + ], + [ + "▁starkly", + -14.865363121032717 + ], + [ + "▁cipro", + -14.865397453308104 + ], + [ + "▁aggressiveness", + -14.865452766418455 + ], + [ + "▁Ihre", + -14.86557674407959 + ], + [ + "▁DBT", + -14.865880012512209 + ], + [ + "▁Hoya", + -14.86599063873291 + ], + [ + "▁$0.50", + -14.866011619567873 + ], + [ + "rken", + -14.86607551574707 + ], + [ + "▁Midas", + -14.86610984802246 + ], + [ + "hrend", + -14.866280555725098 + ], + [ + "▁Crepe", + -14.866339683532717 + ], + [ + "▁Princesses", + -14.86641788482666 + ], + [ + "▁balding", + -14.86656379699707 + ], + [ + "atkins", + -14.86666202545166 + ], + [ + "elmann", + -14.866830825805664 + ], + [ + "▁1730", + -14.8668794631958 + ], + [ + "ALLEN", + -14.866910934448242 + ], + [ + "fruct", + -14.867022514343262 + ], + [ + "DEQ", + -14.867026329040527 + ], + [ + "NPS", + -14.86703395843506 + ], + [ + "▁Raghu", + -14.867091178894045 + ], + [ + "▁aspired", + -14.86711597442627 + ], + [ + "▁pipette", + -14.86713695526123 + ], + [ + "▁Maza", + -14.867164611816406 + ], + [ + "▁Waterhouse", + -14.867237091064451 + ], + [ + "▁Hourly", + -14.867283821105955 + ], + [ + "laughter", + -14.867301940917969 + ], + [ + "browse", + -14.867424011230469 + ], + [ + "▁showrunner", + -14.86749267578125 + ], + [ + "▁Docking", + -14.867520332336426 + ], + [ + "▁Measured", + -14.867531776428224 + ], + [ + "replica", + -14.867563247680664 + ], + [ + "schule", + -14.867753028869627 + ], + [ + "▁elaborately", + -14.86779499053955 + ], + [ + "▁Taser", + -14.86781883239746 + ], + [ + "NST", + -14.867826461791992 + ], + [ + "HAY", + -14.867833137512209 + ], + [ + "▁Haka", + -14.867847442626951 + ], + [ + "Substitute", + -14.867918968200684 + ], + [ + "Assignment", + -14.867934226989746 + ], + [ + "Transparency", + -14.867959022521973 + ], + [ + "molecule", + -14.867959022521973 + ], + [ + "Exodus", + -14.867959976196287 + ], + [ + "Anxiety", + -14.867962837219238 + ], + [ + "Elephant", + -14.867964744567873 + ], + [ + "Jasmine", + -14.867966651916504 + ], + [ + "Punjab", + -14.867977142333984 + ], + [ + "simplified", + -14.867981910705566 + ], + [ + "Sneak", + -14.867990493774414 + ], + [ + "countries", + -14.867995262145996 + ], + [ + "understood", + -14.867996215820312 + ], + [ + "▁mung", + -14.867996215820312 + ], + [ + "exhaust", + -14.867997169494627 + ], + [ + "gritty", + -14.867998123168944 + ], + [ + "sacred", + -14.868019104003906 + ], + [ + "calcium", + -14.868029594421388 + ], + [ + "Vocal", + -14.868030548095703 + ], + [ + "▁$1,7", + -14.868037223815918 + ], + [ + "hobby", + -14.868075370788574 + ], + [ + "deficient", + -14.86808967590332 + ], + [ + "clutch", + -14.868090629577637 + ], + [ + "Excessive", + -14.8681001663208 + ], + [ + "Sheriff", + -14.868112564086914 + ], + [ + "▁Wray", + -14.868120193481444 + ], + [ + "commitment", + -14.868136405944824 + ], + [ + "BIOS", + -14.868168830871582 + ], + [ + "Coastal", + -14.868203163146973 + ], + [ + "Xchange", + -14.868230819702148 + ], + [ + "▁wilful", + -14.868244171142578 + ], + [ + "muscular", + -14.86824893951416 + ], + [ + "▁airship", + -14.868264198303224 + ], + [ + "Independence", + -14.868277549743652 + ], + [ + "▁Chok", + -14.868285179138184 + ], + [ + "▁CLASSIC", + -14.868308067321776 + ], + [ + "Conversion", + -14.868337631225586 + ], + [ + "▁Charged", + -14.86836051940918 + ], + [ + "centro", + -14.86838722229004 + ], + [ + "olani", + -14.868389129638672 + ], + [ + "7-17", + -14.868407249450684 + ], + [ + "CVS", + -14.868447303771973 + ], + [ + "▁0.18", + -14.868447303771973 + ], + [ + "▁bypasses", + -14.86856174468994 + ], + [ + "Bravo", + -14.868653297424316 + ], + [ + "freak", + -14.868700981140137 + ], + [ + "▁Hoog", + -14.86872673034668 + ], + [ + "LSC", + -14.868749618530272 + ], + [ + "separated", + -14.868751525878906 + ], + [ + "organizer", + -14.868797302246094 + ], + [ + "▁Winemaker", + -14.868844032287598 + ], + [ + "glasses", + -14.868865966796877 + ], + [ + "Focuses", + -14.86900806427002 + ], + [ + "3:27", + -14.869009017944336 + ], + [ + "Stereo", + -14.869240760803224 + ], + [ + "technique", + -14.869439125061035 + ], + [ + "ikko", + -14.869441032409668 + ], + [ + "▁$98", + -14.86944580078125 + ], + [ + "▁soooooo", + -14.869545936584473 + ], + [ + "▁loca", + -14.86956024169922 + ], + [ + "▁viper", + -14.869643211364746 + ], + [ + "izio", + -14.869656562805176 + ], + [ + "prospect", + -14.86968231201172 + ], + [ + "Rivet", + -14.869769096374512 + ], + [ + "▁Bamberg", + -14.869906425476074 + ], + [ + "Refine", + -14.869954109191896 + ], + [ + "matik", + -14.87002944946289 + ], + [ + "lobed", + -14.870030403137209 + ], + [ + "AMG", + -14.870035171508787 + ], + [ + "Neh", + -14.870054244995115 + ], + [ + "▁gmc", + -14.8700590133667 + ], + [ + "▁tradi", + -14.870076179504396 + ], + [ + "OOSE", + -14.870077133178713 + ], + [ + "Azhar", + -14.870078086853027 + ], + [ + "artistic", + -14.870080947875977 + ], + [ + "quam", + -14.870144844055176 + ], + [ + "Bail", + -14.870152473449709 + ], + [ + "▁commonest", + -14.870173454284668 + ], + [ + "RHA", + -14.870180130004885 + ], + [ + "contaminated", + -14.870244026184082 + ], + [ + "▁peerless", + -14.87032699584961 + ], + [ + "nitro", + -14.870341300964355 + ], + [ + "queue", + -14.870370864868164 + ], + [ + "▁Wahoo", + -14.870407104492188 + ], + [ + "▁ALLOW", + -14.870502471923828 + ], + [ + "▁Roam", + -14.870522499084473 + ], + [ + "▁Sokol", + -14.870555877685549 + ], + [ + "catalyzed", + -14.870558738708496 + ], + [ + "cuz", + -14.87062168121338 + ], + [ + "WPC", + -14.870750427246094 + ], + [ + "▁fantasize", + -14.87087345123291 + ], + [ + "▁pajama", + -14.870933532714844 + ], + [ + "1:47", + -14.870953559875488 + ], + [ + "▁overcook", + -14.87099266052246 + ], + [ + "▁bathrobe", + -14.871024131774902 + ], + [ + "▁Melee", + -14.871036529541016 + ], + [ + "caption", + -14.871170997619627 + ], + [ + "▁protectionism", + -14.871179580688477 + ], + [ + "▁MDX", + -14.871183395385742 + ], + [ + "▁vitr", + -14.871220588684082 + ], + [ + "▁Gouda", + -14.871323585510254 + ], + [ + "GCS", + -14.871362686157228 + ], + [ + "▁3730", + -14.871376991271973 + ], + [ + "▁madam", + -14.871710777282717 + ], + [ + "4/5", + -14.871803283691406 + ], + [ + "▁02:3", + -14.87184715270996 + ], + [ + "nava", + -14.871867179870604 + ], + [ + "▁04:1", + -14.87189483642578 + ], + [ + "2030", + -14.872014999389648 + ], + [ + "▁humiliate", + -14.872053146362305 + ], + [ + "▁acclimate", + -14.872097969055176 + ], + [ + "▁$71", + -14.872329711914062 + ], + [ + "ubmersible", + -14.872376441955566 + ], + [ + "pital", + -14.872422218322754 + ], + [ + "9.4%", + -14.872430801391602 + ], + [ + "utte", + -14.872446060180664 + ], + [ + "▁Peppa", + -14.872450828552246 + ], + [ + "▁bookseller", + -14.872512817382812 + ], + [ + "83%", + -14.87252426147461 + ], + [ + "▁Parsi", + -14.87259006500244 + ], + [ + "RSR", + -14.872604370117188 + ], + [ + "BEIJING", + -14.87265396118164 + ], + [ + "arguable", + -14.872654914855955 + ], + [ + "disambiguation", + -14.872654914855955 + ], + [ + "▁Actuator", + -14.872654914855955 + ], + [ + "▁Camberwell", + -14.872654914855955 + ], + [ + "▁Culpeper", + -14.872654914855955 + ], + [ + "▁Diocesan", + -14.872654914855955 + ], + [ + "▁Enneagram", + -14.872654914855955 + ], + [ + "▁Kottayam", + -14.872654914855955 + ], + [ + "▁Metallurgy", + -14.872654914855955 + ], + [ + "▁Orpington", + -14.872654914855955 + ], + [ + "▁Pocahontas", + -14.872654914855955 + ], + [ + "▁SUPPLY", + -14.872654914855955 + ], + [ + "▁Scarecrow", + -14.872654914855955 + ], + [ + "▁accompanist", + -14.872654914855955 + ], + [ + "▁crayfish", + -14.872654914855955 + ], + [ + "▁elastane", + -14.872654914855955 + ], + [ + "▁myofascial", + -14.872654914855955 + ], + [ + "▁scribbling", + -14.872654914855955 + ], + [ + "▁sturgeon", + -14.872654914855955 + ], + [ + "▁tufting", + -14.872654914855955 + ], + [ + "▁rezoning", + -14.872655868530272 + ], + [ + "▁Bianchi", + -14.872657775878906 + ], + [ + "▁Chiswick", + -14.872657775878906 + ], + [ + "▁Coupling", + -14.872657775878906 + ], + [ + "▁Glamorgan", + -14.872657775878906 + ], + [ + "▁frisbee", + -14.872657775878906 + ], + [ + "▁ratepayers", + -14.872657775878906 + ], + [ + "baar", + -14.872658729553224 + ], + [ + "▁Sommelier", + -14.872658729553224 + ], + [ + "▁Veracruz", + -14.872658729553224 + ], + [ + "▁MASSIVE", + -14.87265968322754 + ], + [ + "▁Maccabi", + -14.87265968322754 + ], + [ + "▁aspirated", + -14.872660636901855 + ], + [ + "▁bramble", + -14.872663497924805 + ], + [ + "Easiest", + -14.872665405273438 + ], + [ + "▁Borealis", + -14.872666358947754 + ], + [ + "▁Rangoon", + -14.872666358947754 + ], + [ + "▁Ralston", + -14.87266731262207 + ], + [ + "▁Apnea", + -14.872674942016602 + ], + [ + "▁Murdock", + -14.872682571411133 + ], + [ + "▁Anambra", + -14.87268352508545 + ], + [ + "▁Eduard", + -14.872688293457031 + ], + [ + "▁Tropicana", + -14.872689247131348 + ], + [ + "▁Delilah", + -14.872693061828612 + ], + [ + "▁Rawlings", + -14.872699737548828 + ], + [ + "▁(1969)", + -14.87270736694336 + ], + [ + "▁Quetta", + -14.87271785736084 + ], + [ + "▁misinformed", + -14.872718811035156 + ], + [ + "▁Citrate", + -14.872723579406738 + ], + [ + "▁Melania", + -14.872729301452637 + ], + [ + "▁Sinhala", + -14.872746467590332 + ], + [ + "▁unsteady", + -14.872747421264648 + ], + [ + "▁Offaly", + -14.872751235961914 + ], + [ + "wazi", + -14.872754096984863 + ], + [ + "▁Quer", + -14.872756004333496 + ], + [ + "▁Pewter", + -14.872760772705078 + ], + [ + "Rash", + -14.872761726379396 + ], + [ + "Nurturing", + -14.872764587402344 + ], + [ + "▁tobago", + -14.872764587402344 + ], + [ + "▁Fredericton", + -14.872773170471191 + ], + [ + "Combat", + -14.872791290283203 + ], + [ + "▁Nurseries", + -14.872810363769531 + ], + [ + "▁Pinned", + -14.872812271118164 + ], + [ + "▁Offense", + -14.87281322479248 + ], + [ + "▁bobble", + -14.87281322479248 + ], + [ + "▁Captive", + -14.872814178466797 + ], + [ + "Specially", + -14.872819900512695 + ], + [ + "▁urology", + -14.87282371520996 + ], + [ + "▁shoestring", + -14.872825622558594 + ], + [ + "▁Tierney", + -14.872828483581545 + ], + [ + "▁expeditiously", + -14.872838020324709 + ], + [ + "CRYPT", + -14.872845649719238 + ], + [ + "▁metrology", + -14.872847557067873 + ], + [ + "▁Emi", + -14.87285614013672 + ], + [ + "▁Bellaire", + -14.872879981994627 + ], + [ + "Ordering", + -14.87289047241211 + ], + [ + "Bonjour", + -14.872901916503906 + ], + [ + "treme", + -14.872940063476562 + ], + [ + "-08-2018", + -14.872946739196776 + ], + [ + "▁gunmetal", + -14.87295150756836 + ], + [ + "▁Sarge", + -14.873056411743164 + ], + [ + "▁JIRA", + -14.873064041137695 + ], + [ + "▁chairlift", + -14.873101234436035 + ], + [ + "EACH", + -14.873102188110352 + ], + [ + "▁Telef", + -14.873125076293944 + ], + [ + "▁Kasey", + -14.873170852661133 + ], + [ + "▁Biddle", + -14.873220443725586 + ], + [ + "▁actualize", + -14.873224258422852 + ], + [ + "▁drugged", + -14.873231887817385 + ], + [ + "▁McGar", + -14.87327003479004 + ], + [ + "▁speakeasy", + -14.873291015625 + ], + [ + "▁RHEL", + -14.873335838317873 + ], + [ + "▁Burley", + -14.873343467712402 + ], + [ + "▁statins", + -14.873353004455566 + ], + [ + "▁hedgerow", + -14.873353958129885 + ], + [ + "▁scoffed", + -14.873360633850098 + ], + [ + "▁rekindled", + -14.873376846313477 + ], + [ + "▁Thule", + -14.87341022491455 + ], + [ + "Compiled", + -14.87342643737793 + ], + [ + "▁firepit", + -14.873443603515623 + ], + [ + "▁overthrown", + -14.873486518859863 + ], + [ + "▁Keychain", + -14.873533248901367 + ], + [ + "▁recordkeeping", + -14.873601913452148 + ], + [ + "▁Eisenberg", + -14.873607635498049 + ], + [ + "▁Haney", + -14.873661041259766 + ], + [ + "▁uppermost", + -14.873712539672852 + ], + [ + "▁keno", + -14.873786926269531 + ], + [ + "ECM", + -14.873808860778809 + ], + [ + "▁Goya", + -14.87386703491211 + ], + [ + "COAT", + -14.873891830444336 + ], + [ + "▁overstuffed", + -14.873899459838867 + ], + [ + "RYT", + -14.873919486999512 + ], + [ + "▁TRAILER", + -14.873929977416992 + ], + [ + "▁PALM", + -14.873978614807127 + ], + [ + "▁Spel", + -14.873994827270508 + ], + [ + "legacy", + -14.874104499816896 + ], + [ + "▁sorrowful", + -14.874126434326172 + ], + [ + "pride", + -14.87421989440918 + ], + [ + "▁fangs", + -14.874238967895508 + ], + [ + "▁karting", + -14.87435817718506 + ], + [ + "▁featurette", + -14.874372482299805 + ], + [ + "CRN", + -14.874378204345703 + ], + [ + "▁Bookshop", + -14.874395370483398 + ], + [ + "Bartolo", + -14.874442100524902 + ], + [ + "▁Chipset", + -14.87445068359375 + ], + [ + "2:08", + -14.874455451965332 + ], + [ + "▁Bloor", + -14.874499320983888 + ], + [ + "▁nontoxic", + -14.87452793121338 + ], + [ + "▁2009-10", + -14.874574661254885 + ], + [ + "▁erupting", + -14.8745756149292 + ], + [ + "▁Samu", + -14.874689102172852 + ], + [ + "▁taunting", + -14.874773979187012 + ], + [ + "▁Spies", + -14.874798774719238 + ], + [ + "▁Marius", + -14.874869346618652 + ], + [ + "▁Mockup", + -14.87487506866455 + ], + [ + "odium", + -14.874885559082031 + ], + [ + "mention", + -14.875078201293944 + ], + [ + "▁Arranged", + -14.87517261505127 + ], + [ + "2070", + -14.87525749206543 + ], + [ + "▁Wealthy", + -14.875269889831545 + ], + [ + "pharmacologic", + -14.875306129455566 + ], + [ + "▁lye", + -14.875328063964844 + ], + [ + "▁halwa", + -14.87536907196045 + ], + [ + "▁credentialing", + -14.875375747680664 + ], + [ + "▁Mogh", + -14.875398635864258 + ], + [ + "▁Karna", + -14.875401496887209 + ], + [ + "Nate", + -14.875411033630373 + ], + [ + "1918", + -14.875462532043455 + ], + [ + "▁0.10", + -14.875540733337402 + ], + [ + "▁Nasa", + -14.875572204589844 + ], + [ + "yala", + -14.875612258911133 + ], + [ + "▁0-60", + -14.875691413879396 + ], + [ + "Vee", + -14.875692367553713 + ], + [ + "Kul", + -14.875873565673828 + ], + [ + "CAMP", + -14.875936508178713 + ], + [ + "▁Vitae", + -14.8760347366333 + ], + [ + "Neg", + -14.876097679138184 + ], + [ + "YAY", + -14.87612247467041 + ], + [ + "▁Alexia", + -14.876421928405762 + ], + [ + "Alene", + -14.876423835754396 + ], + [ + "▁Olin", + -14.876425743103027 + ], + [ + "nsw", + -14.876495361328123 + ], + [ + "▁underpaid", + -14.876547813415527 + ], + [ + "Representative", + -14.876644134521484 + ], + [ + "stupid", + -14.87673568725586 + ], + [ + "THY", + -14.87695598602295 + ], + [ + "▁£24", + -14.877047538757324 + ], + [ + "1/18", + -14.877052307128906 + ], + [ + "▁Misch", + -14.877059936523438 + ], + [ + "▁BANG", + -14.877063751220703 + ], + [ + "▁LFO", + -14.877081871032717 + ], + [ + "coke", + -14.87714385986328 + ], + [ + "ulen", + -14.877151489257812 + ], + [ + "NEE", + -14.87718677520752 + ], + [ + "▁zep", + -14.877270698547363 + ], + [ + "▁Barring", + -14.877317428588867 + ], + [ + "▁mustered", + -14.877321243286133 + ], + [ + "zap", + -14.877447128295898 + ], + [ + "1.65", + -14.877455711364746 + ], + [ + "▁Slap", + -14.87746524810791 + ], + [ + "▁Kaza", + -14.87749195098877 + ], + [ + "▁Salic", + -14.877640724182127 + ], + [ + "▁sincerest", + -14.877645492553713 + ], + [ + "▁Affiliated", + -14.87769603729248 + ], + [ + "▁generalised", + -14.877766609191896 + ], + [ + "9-22", + -14.877795219421388 + ], + [ + "1926", + -14.877802848815918 + ], + [ + "esner", + -14.877901077270508 + ], + [ + "▁NMC", + -14.877922058105469 + ], + [ + "Desire", + -14.87793254852295 + ], + [ + "Bicycle", + -14.87797737121582 + ], + [ + "▁Patil", + -14.877988815307615 + ], + [ + "Celebration", + -14.878066062927246 + ], + [ + "participant", + -14.878071784973145 + ], + [ + "departure", + -14.87807273864746 + ], + [ + "Collaborative", + -14.878073692321776 + ], + [ + "Splash", + -14.878079414367676 + ], + [ + "Cricket", + -14.878080368041992 + ], + [ + "Philosophy", + -14.878081321716309 + ], + [ + "Minneapolis", + -14.87808322906494 + ], + [ + "diagnostic", + -14.87808322906494 + ], + [ + "Puerto", + -14.878085136413574 + ], + [ + "Netherlands", + -14.878087997436523 + ], + [ + "Edinburgh", + -14.878096580505373 + ], + [ + "Enrollment", + -14.878103256225586 + ], + [ + "▁TMA", + -14.878106117248535 + ], + [ + "suspect", + -14.878108978271484 + ], + [ + "NPR", + -14.878144264221191 + ], + [ + "Removal", + -14.878204345703123 + ], + [ + "Voters", + -14.878230094909668 + ], + [ + "Breed", + -14.87831211090088 + ], + [ + "Riley", + -14.878332138061523 + ], + [ + "▁WCC", + -14.878337860107422 + ], + [ + "Failed", + -14.878377914428713 + ], + [ + "Danish", + -14.878408432006836 + ], + [ + "menstrual", + -14.87842845916748 + ], + [ + "Powell", + -14.878482818603516 + ], + [ + "Becky", + -14.878534317016602 + ], + [ + "▁MTP", + -14.878615379333496 + ], + [ + "▁Deir", + -14.878620147705078 + ], + [ + "▁Pancha", + -14.878687858581545 + ], + [ + "Crop", + -14.878721237182615 + ], + [ + "▁screech", + -14.878735542297363 + ], + [ + "repeated", + -14.878792762756348 + ], + [ + "▁stupidly", + -14.878795623779297 + ], + [ + "Boast", + -14.87887954711914 + ], + [ + "processes", + -14.878914833068848 + ], + [ + "▁Recycler", + -14.878915786743164 + ], + [ + "▁Sentra", + -14.878923416137695 + ], + [ + "▁critiqued", + -14.87895679473877 + ], + [ + "Handler", + -14.879054069519045 + ], + [ + "establishing", + -14.879096031188965 + ], + [ + "▁hv", + -14.879117012023926 + ], + [ + "▁SIB", + -14.879308700561523 + ], + [ + "kerk", + -14.879310607910156 + ], + [ + "Principle", + -14.87939739227295 + ], + [ + "▁Concur", + -14.879406929016112 + ], + [ + "Colonel", + -14.879413604736328 + ], + [ + "▁peeked", + -14.879414558410645 + ], + [ + "▁Cornel", + -14.87944221496582 + ], + [ + "▁boathouse", + -14.879488945007324 + ], + [ + "▁karst", + -14.879549026489258 + ], + [ + "Smash", + -14.879591941833496 + ], + [ + "Deutsche", + -14.87971019744873 + ], + [ + "▁Coda", + -14.879731178283691 + ], + [ + "▁BOLD", + -14.879765510559082 + ], + [ + "Respondents", + -14.879780769348145 + ], + [ + "▁Polycom", + -14.879786491394045 + ], + [ + "▁Catal", + -14.879825592041016 + ], + [ + "▁gibb", + -14.879830360412598 + ], + [ + "quiring", + -14.879839897155762 + ], + [ + "▁DSi", + -14.879921913146973 + ], + [ + "Collecting", + -14.879976272583008 + ], + [ + "▁Nasi", + -14.88010597229004 + ], + [ + "▁FEB", + -14.880178451538086 + ], + [ + "▁Lomb", + -14.880200386047363 + ], + [ + "▁Seaton", + -14.88040542602539 + ], + [ + "ethoxy", + -14.88047695159912 + ], + [ + "▁Natale", + -14.880488395690918 + ], + [ + "▁Elko", + -14.880521774291992 + ], + [ + "ningen", + -14.880558013916016 + ], + [ + "▁HUF", + -14.880619049072266 + ], + [ + "carers", + -14.880799293518066 + ], + [ + "▁maximised", + -14.880802154541016 + ], + [ + "uller", + -14.880812644958496 + ], + [ + "▁$5.7", + -14.880828857421877 + ], + [ + "▁EML", + -14.880882263183594 + ], + [ + "0-18", + -14.880897521972656 + ], + [ + "2:04", + -14.880906105041504 + ], + [ + "youtu", + -14.88111400604248 + ], + [ + "▁Lieber", + -14.881114959716797 + ], + [ + "Bite", + -14.881217956542969 + ], + [ + "▁Corgi", + -14.881226539611816 + ], + [ + "cyte", + -14.881230354309082 + ], + [ + "Perfectly", + -14.881266593933104 + ], + [ + "Acetyl", + -14.88129997253418 + ], + [ + "▁Smug", + -14.881387710571287 + ], + [ + "NIST", + -14.881415367126465 + ], + [ + "faux", + -14.881505966186523 + ], + [ + "▁Tense", + -14.881529808044434 + ], + [ + "▁Debi", + -14.881599426269531 + ], + [ + "planting", + -14.881715774536133 + ], + [ + "▁DTR", + -14.88180446624756 + ], + [ + "▁crouch", + -14.881827354431152 + ], + [ + "▁Krieg", + -14.881853103637695 + ], + [ + "▁segregate", + -14.88195514678955 + ], + [ + "▁Urb", + -14.882095336914062 + ], + [ + "amhari", + -14.88210105895996 + ], + [ + "▁Jeanie", + -14.882194519042969 + ], + [ + "▁Paradis", + -14.882213592529297 + ], + [ + "▁devolve", + -14.88230037689209 + ], + [ + "clan", + -14.882349967956545 + ], + [ + "Ortho", + -14.88238525390625 + ], + [ + "24%", + -14.882535934448242 + ], + [ + "▁dogmatic", + -14.882554054260254 + ], + [ + "ithromycin", + -14.88255500793457 + ], + [ + "Occupancy", + -14.882555961608888 + ], + [ + "expletive", + -14.882555961608888 + ], + [ + "▁AirBnB", + -14.882555961608888 + ], + [ + "▁Astounding", + -14.882555961608888 + ], + [ + "▁Eclectic", + -14.882555961608888 + ], + [ + "▁Lactobacillus", + -14.882555961608888 + ], + [ + "▁Midlothian", + -14.882555961608888 + ], + [ + "▁POSSIBLE", + -14.882555961608888 + ], + [ + "▁Peregrine", + -14.882555961608888 + ], + [ + "▁Reconnaissance", + -14.882555961608888 + ], + [ + "▁Rendezvous", + -14.882555961608888 + ], + [ + "▁STUDENTS", + -14.882555961608888 + ], + [ + "▁Shostakovich", + -14.882555961608888 + ], + [ + "▁Steroids", + -14.882555961608888 + ], + [ + "▁Sulawesi", + -14.882555961608888 + ], + [ + "▁THOMAS", + -14.882555961608888 + ], + [ + "▁demarcation", + -14.882555961608888 + ], + [ + "▁embalming", + -14.882555961608888 + ], + [ + "▁falsified", + -14.882555961608888 + ], + [ + "▁harpsichord", + -14.882555961608888 + ], + [ + "▁illusory", + -14.882555961608888 + ], + [ + "▁lazily", + -14.882555961608888 + ], + [ + "▁oxytocin", + -14.882555961608888 + ], + [ + "▁pecuniary", + -14.882555961608888 + ], + [ + "▁rambunctious", + -14.882555961608888 + ], + [ + "▁reiterating", + -14.882555961608888 + ], + [ + "▁tachycardia", + -14.882555961608888 + ], + [ + "▁tarpaulin", + -14.882555961608888 + ], + [ + "▁unopposed", + -14.882555961608888 + ], + [ + "▁untouchable", + -14.882555961608888 + ], + [ + "▁Myntra", + -14.882556915283203 + ], + [ + "▁chenille", + -14.882556915283203 + ], + [ + "▁mandrel", + -14.882556915283203 + ], + [ + "▁narrating", + -14.882556915283203 + ], + [ + "Legislators", + -14.88255786895752 + ], + [ + "▁Nesbitt", + -14.88255786895752 + ], + [ + "▁Revolver", + -14.88255786895752 + ], + [ + "▁SQUARE", + -14.88255786895752 + ], + [ + "▁lynching", + -14.88255786895752 + ], + [ + "▁1-855-", + -14.882558822631836 + ], + [ + "▁Bonaventure", + -14.882558822631836 + ], + [ + "▁Intracoastal", + -14.882558822631836 + ], + [ + "▁Brompton", + -14.882559776306152 + ], + [ + "▁chafing", + -14.882560729980469 + ], + [ + "Pasquale", + -14.882561683654783 + ], + [ + "▁DIEGO", + -14.882562637329102 + ], + [ + "▁Derwent", + -14.882563591003418 + ], + [ + "▁houzz", + -14.882564544677734 + ], + [ + "▁sordid", + -14.88256549835205 + ], + [ + "▁heaving", + -14.882568359375 + ], + [ + "▁Neurosurgery", + -14.882570266723633 + ], + [ + "▁Soaring", + -14.882574081420898 + ], + [ + "▁saucy", + -14.882575988769531 + ], + [ + "▁Coppola", + -14.882576942443848 + ], + [ + "▁Keratin", + -14.882577896118164 + ], + [ + "▁Groundhog", + -14.88257884979248 + ], + [ + "▁Wahlberg", + -14.88257884979248 + ], + [ + "▁Imitation", + -14.882579803466797 + ], + [ + "▁RNLI", + -14.882580757141112 + ], + [ + "▁antiretroviral", + -14.882580757141112 + ], + [ + "▁Hummus", + -14.88258171081543 + ], + [ + "▁Voltaire", + -14.882583618164062 + ], + [ + "▁Elkhart", + -14.88258457183838 + ], + [ + "▁Poisson", + -14.882591247558594 + ], + [ + "▁equatorial", + -14.882591247558594 + ], + [ + "▁Rakhine", + -14.882594108581545 + ], + [ + "▁draperies", + -14.882596015930176 + ], + [ + "▁verbose", + -14.88259983062744 + ], + [ + "▁Caliente", + -14.882603645324709 + ], + [ + "▁Marcellus", + -14.882603645324709 + ], + [ + "▁Cajon", + -14.88260555267334 + ], + [ + "▁Vieira", + -14.882607460021973 + ], + [ + "▁affable", + -14.882610321044922 + ], + [ + "▁FOREVER", + -14.882620811462402 + ], + [ + "▁retardation", + -14.882622718811035 + ], + [ + "▁Woodlawn", + -14.882624626159668 + ], + [ + "Locally", + -14.882634162902832 + ], + [ + "▁Harrell", + -14.882650375366213 + ], + [ + "▁Lebron", + -14.882665634155272 + ], + [ + "▁mangled", + -14.88266944885254 + ], + [ + "▁Cassini", + -14.88267421722412 + ], + [ + "▁Hatteras", + -14.882680892944336 + ], + [ + "▁ASCAP", + -14.8826904296875 + ], + [ + "▁Municipalities", + -14.882698059082031 + ], + [ + "5.000", + -14.882702827453612 + ], + [ + "▁DuPage", + -14.88270378112793 + ], + [ + "▁causality", + -14.882708549499512 + ], + [ + "▁recursion", + -14.882710456848145 + ], + [ + "▁Belcher", + -14.88271141052246 + ], + [ + "▁Gladwin", + -14.882713317871094 + ], + [ + "MENTAL", + -14.882723808288574 + ], + [ + "▁Leyton", + -14.882735252380373 + ], + [ + "▁GDDR", + -14.88274383544922 + ], + [ + "▁monica", + -14.882752418518066 + ], + [ + "▁2007-08", + -14.882753372192385 + ], + [ + "▁Carbondale", + -14.882762908935549 + ], + [ + "▁depositors", + -14.882763862609863 + ], + [ + "▁rebelled", + -14.882765769958496 + ], + [ + "▁impartiality", + -14.882768630981444 + ], + [ + "chandran", + -14.882770538330078 + ], + [ + "▁undetermined", + -14.882772445678713 + ], + [ + "▁cheesecloth", + -14.88277816772461 + ], + [ + "▁ureter", + -14.88278865814209 + ], + [ + "▁$49.99", + -14.882793426513672 + ], + [ + "▁profusion", + -14.882801055908203 + ], + [ + "moro", + -14.88282585144043 + ], + [ + "BCP", + -14.882837295532228 + ], + [ + "▁Bullard", + -14.882842063903809 + ], + [ + "Affirmation", + -14.882889747619627 + ], + [ + "▁haphazardly", + -14.882904052734377 + ], + [ + "▁Audiology", + -14.882908821105955 + ], + [ + "▁bequeathed", + -14.88291835784912 + ], + [ + "▁Bmw", + -14.882927894592283 + ], + [ + "▁charade", + -14.88293743133545 + ], + [ + "▁switchbacks", + -14.882943153381348 + ], + [ + "▁Centerpieces", + -14.88297176361084 + ], + [ + "▁Hottest", + -14.88298797607422 + ], + [ + "▁17:1", + -14.883012771606444 + ], + [ + "▁Bakewell", + -14.883036613464355 + ], + [ + "▁DIRECTOR", + -14.883066177368164 + ], + [ + "▁Enron", + -14.88307762145996 + ], + [ + "▁Kingsbury", + -14.883087158203123 + ], + [ + "▁retell", + -14.88308811187744 + ], + [ + "▁fervently", + -14.883094787597656 + ], + [ + "▁isoform", + -14.883097648620604 + ], + [ + "▁Vinod", + -14.883113861083984 + ], + [ + "▁Daydream", + -14.883133888244627 + ], + [ + "▁Grouse", + -14.883137702941896 + ], + [ + "▁cynic", + -14.883145332336426 + ], + [ + "▁Ballmer", + -14.883201599121094 + ], + [ + "▁yanked", + -14.883233070373535 + ], + [ + "▁hoover", + -14.883252143859863 + ], + [ + "aesthetic", + -14.88325309753418 + ], + [ + "▁capitul", + -14.883277893066406 + ], + [ + "▁Longmont", + -14.883301734924316 + ], + [ + "ozzo", + -14.883320808410645 + ], + [ + "▁grayscale", + -14.88332748413086 + ], + [ + "▁Chania", + -14.883387565612791 + ], + [ + "▁Ballina", + -14.883389472961426 + ], + [ + "2:41", + -14.883393287658691 + ], + [ + "PNG", + -14.883400917053224 + ], + [ + "▁Mattia", + -14.883405685424805 + ], + [ + "▁Bullion", + -14.883432388305664 + ], + [ + "▁AirPlay", + -14.88349151611328 + ], + [ + "▁flipside", + -14.88349151611328 + ], + [ + "▁Magu", + -14.88351345062256 + ], + [ + "▁submittal", + -14.883517265319824 + ], + [ + "▁Sketches", + -14.883560180664062 + ], + [ + "▁Applebee", + -14.883567810058594 + ], + [ + "▁corporat", + -14.883591651916504 + ], + [ + "▁Olathe", + -14.883609771728516 + ], + [ + "Elle", + -14.883614540100098 + ], + [ + "▁Tustin", + -14.883681297302246 + ], + [ + "conne", + -14.883695602416992 + ], + [ + "▁Chewy", + -14.883696556091309 + ], + [ + "▁$6.5", + -14.883708953857422 + ], + [ + "▁impatiently", + -14.883744239807127 + ], + [ + "▁curveball", + -14.883853912353516 + ], + [ + "▁16:0", + -14.883907318115234 + ], + [ + "▁Salish", + -14.883929252624512 + ], + [ + "▁Vocalist", + -14.883950233459473 + ], + [ + "▁hearsay", + -14.883965492248535 + ], + [ + "▁Pathologist", + -14.883966445922852 + ], + [ + "▁DIT", + -14.883968353271484 + ], + [ + "▁Songbook", + -14.883968353271484 + ], + [ + "Covering", + -14.884007453918455 + ], + [ + "▁15000", + -14.884007453918455 + ], + [ + "2015-0", + -14.88401222229004 + ], + [ + "▁Geller", + -14.884078025817873 + ], + [ + "▁interconnecting", + -14.884081840515137 + ], + [ + "PTO", + -14.884095191955566 + ], + [ + "▁choy", + -14.884185791015623 + ], + [ + "▁Rawls", + -14.884265899658203 + ], + [ + "SKY", + -14.884268760681152 + ], + [ + "▁Calabria", + -14.88427448272705 + ], + [ + "▁Colourful", + -14.88430118560791 + ], + [ + "▁IDEAS", + -14.884316444396973 + ], + [ + "7.9%", + -14.88441562652588 + ], + [ + "▁Symp", + -14.88441562652588 + ], + [ + "▁unofficially", + -14.88442039489746 + ], + [ + "▁Tumbler", + -14.88463306427002 + ], + [ + "▁Borden", + -14.884690284729004 + ], + [ + "AUC", + -14.88477897644043 + ], + [ + "▁COMES", + -14.884800910949709 + ], + [ + "▁brunches", + -14.884808540344238 + ], + [ + "▁sledge", + -14.88482666015625 + ], + [ + "ellner", + -14.88489818572998 + ], + [ + "▁Posture", + -14.884913444519045 + ], + [ + "▁140,000", + -14.884952545166016 + ], + [ + "▁garish", + -14.884973526000977 + ], + [ + "joke", + -14.884995460510254 + ], + [ + "▁TRIM", + -14.885064125061035 + ], + [ + "▁dcor", + -14.88508129119873 + ], + [ + "economi", + -14.885124206542969 + ], + [ + "faire", + -14.885124206542969 + ], + [ + "▁Marana", + -14.885150909423828 + ], + [ + "▁steed", + -14.885157585144045 + ], + [ + "▁Vandal", + -14.885175704956056 + ], + [ + "elker", + -14.885210990905762 + ], + [ + "▁fanny", + -14.88523006439209 + ], + [ + "ooze", + -14.885235786437988 + ], + [ + "▁defenseless", + -14.885305404663086 + ], + [ + "ostomy", + -14.8853120803833 + ], + [ + "▁Marry", + -14.885343551635742 + ], + [ + ">>>>>", + -14.88535213470459 + ], + [ + "8.4%", + -14.885358810424805 + ], + [ + "▁trialled", + -14.885436058044434 + ], + [ + "0047", + -14.885586738586426 + ], + [ + "evic", + -14.885638236999512 + ], + [ + "▁lovelies", + -14.885871887207031 + ], + [ + "onomy", + -14.885881423950195 + ], + [ + "▁20-22", + -14.885900497436523 + ], + [ + "2299", + -14.88593292236328 + ], + [ + "Liver", + -14.88607406616211 + ], + [ + "Folk", + -14.886101722717283 + ], + [ + "Archi", + -14.886143684387209 + ], + [ + "▁decreed", + -14.886188507080078 + ], + [ + "ccelerating", + -14.88623046875 + ], + [ + "▁Poop", + -14.886245727539062 + ], + [ + "CHAMP", + -14.886260986328123 + ], + [ + "modeling", + -14.886470794677734 + ], + [ + "1536", + -14.88650894165039 + ], + [ + "UES", + -14.886537551879885 + ], + [ + "2:29", + -14.886666297912598 + ], + [ + "Panelists", + -14.88670825958252 + ], + [ + "MMY", + -14.886767387390137 + ], + [ + "▁Charan", + -14.88685417175293 + ], + [ + "▁Manley", + -14.886863708496094 + ], + [ + "▁kip", + -14.886958122253418 + ], + [ + "▁Abou", + -14.887005805969238 + ], + [ + "▁Voo", + -14.887030601501465 + ], + [ + "▁overblown", + -14.887030601501465 + ], + [ + "SVILLE", + -14.8871431350708 + ], + [ + "vitt", + -14.887171745300291 + ], + [ + "Zoe", + -14.887205123901367 + ], + [ + "▁NAIA", + -14.887227058410645 + ], + [ + "Cody", + -14.887319564819336 + ], + [ + "------------", + -14.887338638305664 + ], + [ + "▁0.07", + -14.88741683959961 + ], + [ + "▁Undo", + -14.887669563293455 + ], + [ + "▁Azam", + -14.887792587280272 + ], + [ + "▁Backer", + -14.887856483459473 + ], + [ + "2019-02-1", + -14.887904167175291 + ], + [ + "Depart", + -14.88811206817627 + ], + [ + "Threat", + -14.888166427612305 + ], + [ + "Jade", + -14.888242721557615 + ], + [ + "Vera", + -14.888249397277832 + ], + [ + "Prerequisite", + -14.888257026672363 + ], + [ + "shattering", + -14.888261795043944 + ], + [ + "Asphalt", + -14.888276100158691 + ], + [ + "Renewable", + -14.888277053833008 + ], + [ + "Detect", + -14.888283729553224 + ], + [ + "samsung", + -14.888286590576172 + ], + [ + "anonymous", + -14.888288497924805 + ], + [ + "Kudos", + -14.88828945159912 + ], + [ + "expense", + -14.888290405273438 + ], + [ + "Plumbing", + -14.888291358947754 + ], + [ + "Laboratory", + -14.888297080993652 + ], + [ + "emergence", + -14.888298034667969 + ], + [ + "Glasgow", + -14.888299942016602 + ], + [ + "Programming", + -14.888320922851562 + ], + [ + "Mickey", + -14.888325691223145 + ], + [ + "▁Haag", + -14.888339042663574 + ], + [ + "Baking", + -14.88840389251709 + ], + [ + "Aircraft", + -14.88843059539795 + ], + [ + "garlic", + -14.888465881347656 + ], + [ + "Toxic", + -14.888479232788086 + ], + [ + "▁singletrack", + -14.88858413696289 + ], + [ + "Eden", + -14.888646125793455 + ], + [ + "bleached", + -14.888686180114746 + ], + [ + "prevention", + -14.888710975646973 + ], + [ + "agreement", + -14.888727188110352 + ], + [ + "contribute", + -14.888738632202148 + ], + [ + "Specialized", + -14.88874340057373 + ], + [ + "▁Humanist", + -14.888808250427246 + ], + [ + "Peach", + -14.888835906982422 + ], + [ + "lecture", + -14.888859748840332 + ], + [ + "Highest", + -14.888861656188965 + ], + [ + "▁Malkin", + -14.888920783996582 + ], + [ + "▁Kunal", + -14.8889799118042 + ], + [ + "▁Mourne", + -14.889004707336426 + ], + [ + "▁Telco", + -14.88910961151123 + ], + [ + "biased", + -14.889113426208496 + ], + [ + "▁04:5", + -14.88912296295166 + ], + [ + "Caster", + -14.889189720153809 + ], + [ + "stealing", + -14.889204025268556 + ], + [ + "Actress", + -14.889237403869627 + ], + [ + "▁cyp", + -14.889333724975586 + ], + [ + "Chung", + -14.889360427856444 + ], + [ + "inelli", + -14.88943099975586 + ], + [ + "vPN", + -14.8894624710083 + ], + [ + "FSL", + -14.889467239379885 + ], + [ + "OPEC", + -14.889673233032228 + ], + [ + "▁Croat", + -14.889710426330566 + ], + [ + "CCU", + -14.889711380004885 + ], + [ + "rotation", + -14.88973331451416 + ], + [ + "Regularly", + -14.889827728271484 + ], + [ + "▁Maury", + -14.889838218688965 + ], + [ + "Quo", + -14.889891624450684 + ], + [ + "Doom", + -14.89004135131836 + ], + [ + "▁Yew", + -14.890082359313965 + ], + [ + "JPG", + -14.890209197998049 + ], + [ + "▁Guidebook", + -14.890289306640623 + ], + [ + "Drugstore", + -14.890310287475586 + ], + [ + "8:15", + -14.890313148498535 + ], + [ + "Trained", + -14.890348434448242 + ], + [ + "▁Thien", + -14.890355110168455 + ], + [ + "ilita", + -14.890445709228516 + ], + [ + "Switching", + -14.890531539916992 + ], + [ + "▁Hoshi", + -14.890541076660156 + ], + [ + "▁Homme", + -14.89062213897705 + ], + [ + "ZOO", + -14.890789031982422 + ], + [ + "▁gpm", + -14.890841484069824 + ], + [ + "3333", + -14.890859603881836 + ], + [ + "1555", + -14.890876770019531 + ], + [ + "▁reparation", + -14.890993118286133 + ], + [ + "▁transgression", + -14.891047477722168 + ], + [ + "▁Fuss", + -14.891054153442385 + ], + [ + "Haus", + -14.891077995300291 + ], + [ + "diagram", + -14.891234397888184 + ], + [ + "Tactic", + -14.891451835632324 + ], + [ + "HSC", + -14.891480445861816 + ], + [ + "▁karen", + -14.891523361206056 + ], + [ + "▁COLLA", + -14.891561508178713 + ], + [ + "▁19:2", + -14.891687393188477 + ], + [ + ".0%)", + -14.891851425170898 + ], + [ + "kkan", + -14.891857147216797 + ], + [ + "▁instigate", + -14.891955375671388 + ], + [ + "▁cleat", + -14.8920316696167 + ], + [ + "▁ballgame", + -14.892051696777344 + ], + [ + "▁sputter", + -14.892074584960938 + ], + [ + "▁TSR", + -14.892083168029783 + ], + [ + "▁Yugoslav", + -14.892101287841797 + ], + [ + "▁reassemble", + -14.89212703704834 + ], + [ + "▁imprison", + -14.892147064208984 + ], + [ + "hydrogen", + -14.892230033874512 + ], + [ + "Phillip", + -14.8922700881958 + ], + [ + "▁RECEIVE", + -14.892324447631836 + ], + [ + "▁Guilin", + -14.892328262329102 + ], + [ + "Gator", + -14.892330169677734 + ], + [ + "phobe", + -14.892359733581545 + ], + [ + "▁GBC", + -14.89242458343506 + ], + [ + "▁crucifix", + -14.892537117004396 + ], + [ + "PUBLISH", + -14.892556190490724 + ], + [ + "Psoriasis", + -14.892556190490724 + ], + [ + "▁Allegiance", + -14.892556190490724 + ], + [ + "▁Assortment", + -14.892556190490724 + ], + [ + "▁Baguio", + -14.892556190490724 + ], + [ + "▁COMMERCIAL", + -14.892556190490724 + ], + [ + "▁CORPORATION", + -14.892556190490724 + ], + [ + "▁Dachshund", + -14.892556190490724 + ], + [ + "▁Dazzling", + -14.892556190490724 + ], + [ + "▁Etobicoke", + -14.892556190490724 + ], + [ + "▁Gestalt", + -14.892556190490724 + ], + [ + "▁Hialeah", + -14.892556190490724 + ], + [ + "▁Indemnity", + -14.892556190490724 + ], + [ + "▁Inquiries", + -14.892556190490724 + ], + [ + "▁Lecithin", + -14.892556190490724 + ], + [ + "▁Moffitt", + -14.892556190490724 + ], + [ + "▁Musketeer", + -14.892556190490724 + ], + [ + "▁Psychedelic", + -14.892556190490724 + ], + [ + "▁Puyallup", + -14.892556190490724 + ], + [ + "▁Sequential", + -14.892556190490724 + ], + [ + "▁Sibelius", + -14.892556190490724 + ], + [ + "▁Whedon", + -14.892556190490724 + ], + [ + "▁amphitheatre", + -14.892556190490724 + ], + [ + "▁breathalyzer", + -14.892556190490724 + ], + [ + "▁chardonnay", + -14.892556190490724 + ], + [ + "▁crazier", + -14.892556190490724 + ], + [ + "▁exacerbating", + -14.892556190490724 + ], + [ + "▁glioblastoma", + -14.892556190490724 + ], + [ + "▁honeysuckle", + -14.892556190490724 + ], + [ + "▁palpitations", + -14.892556190490724 + ], + [ + "▁phenotypic", + -14.892556190490724 + ], + [ + "▁rickety", + -14.892556190490724 + ], + [ + "▁unrestrained", + -14.892556190490724 + ], + [ + "▁wolverine", + -14.892556190490724 + ], + [ + "▁Haverford", + -14.89255714416504 + ], + [ + "▁dystrophy", + -14.89255714416504 + ], + [ + "▁intensification", + -14.89255714416504 + ], + [ + "▁Alvarado", + -14.892558097839355 + ], + [ + "▁Féin", + -14.892558097839355 + ], + [ + "Descriptive", + -14.892559051513672 + ], + [ + "▁Clarisonic", + -14.892559051513672 + ], + [ + "▁IFTTT", + -14.892560958862305 + ], + [ + "▁Juilliard", + -14.892560958862305 + ], + [ + "▁Equator", + -14.892562866210938 + ], + [ + "▁bursaries", + -14.892562866210938 + ], + [ + "▁dulce", + -14.892562866210938 + ], + [ + "▁homily", + -14.892562866210938 + ], + [ + "▁Immunity", + -14.892563819885254 + ], + [ + "▁criminology", + -14.892563819885254 + ], + [ + "▁Esthetic", + -14.892566680908203 + ], + [ + "▁Muscular", + -14.89256763458252 + ], + [ + "▁Selkirk", + -14.892569541931152 + ], + [ + "▁meagre", + -14.892571449279783 + ], + [ + "▁Autistic", + -14.892572402954102 + ], + [ + "▁Whipple", + -14.892573356628418 + ], + [ + "▁Avril", + -14.89257526397705 + ], + [ + "▁behoove", + -14.89257526397705 + ], + [ + "plegic", + -14.892576217651367 + ], + [ + "▁primacy", + -14.892580032348633 + ], + [ + "▁unjustified", + -14.892582893371582 + ], + [ + "▁duffel", + -14.892585754394531 + ], + [ + "▁Comanche", + -14.89258861541748 + ], + [ + "▁Stabilizer", + -14.89258861541748 + ], + [ + "▁Trousers", + -14.892595291137695 + ], + [ + "▁gummies", + -14.892597198486328 + ], + [ + "▁Butternut", + -14.89259910583496 + ], + [ + "▁(1975)", + -14.892602920532228 + ], + [ + "▁Lenoir", + -14.892602920532228 + ], + [ + "▁unpaved", + -14.892611503601074 + ], + [ + "▁Heisman", + -14.892617225646973 + ], + [ + "▁Unconventional", + -14.892620086669922 + ], + [ + "▁Underpants", + -14.892634391784668 + ], + [ + "▁partisanship", + -14.892638206481934 + ], + [ + "▁Darnell", + -14.8926420211792 + ], + [ + "▁Yeats", + -14.89264965057373 + ], + [ + "▁CCNP", + -14.89265251159668 + ], + [ + "▁Mirren", + -14.892654418945312 + ], + [ + "▁Cadence", + -14.892658233642578 + ], + [ + "▁Stagger", + -14.892658233642578 + ], + [ + "▁Bernice", + -14.892660140991213 + ], + [ + "▁Lynette", + -14.892660140991213 + ], + [ + "obtain", + -14.89266586303711 + ], + [ + "plier", + -14.892670631408691 + ], + [ + "▁lonesome", + -14.89267635345459 + ], + [ + "▁Became", + -14.89268398284912 + ], + [ + "▁Joao", + -14.892685890197754 + ], + [ + "▁Girona", + -14.892692565917969 + ], + [ + "▁Gelato", + -14.892701148986816 + ], + [ + "▁Neutrality", + -14.892704963684082 + ], + [ + "▁Nagaland", + -14.892711639404297 + ], + [ + "▁actuation", + -14.892723083496094 + ], + [ + "▁STB", + -14.892739295959473 + ], + [ + "▁Rebirth", + -14.892749786376951 + ], + [ + "▁Quickbooks", + -14.892762184143066 + ], + [ + "▁mutagen", + -14.8927640914917 + ], + [ + "▁reconfiguration", + -14.892779350280762 + ], + [ + "▁Removalists", + -14.892840385437012 + ], + [ + "▁Universitas", + -14.892840385437012 + ], + [ + "▁nanoscale", + -14.892851829528809 + ], + [ + "▁Gusto", + -14.892862319946287 + ], + [ + "▁junkyard", + -14.892914772033691 + ], + [ + "▁TICKETS", + -14.892915725708008 + ], + [ + "NSAID", + -14.892937660217283 + ], + [ + "▁sisal", + -14.892938613891602 + ], + [ + "▁ELSE", + -14.8929443359375 + ], + [ + "▁Nollywood", + -14.892953872680664 + ], + [ + "ophyte", + -14.892958641052246 + ], + [ + "▁Strava", + -14.892959594726562 + ], + [ + "▁XBOX", + -14.892961502075195 + ], + [ + "▁ecard", + -14.892964363098145 + ], + [ + "▁Peerless", + -14.89297580718994 + ], + [ + "▁brooches", + -14.893022537231444 + ], + [ + "▁makerspace", + -14.893028259277344 + ], + [ + "1922", + -14.893033027648926 + ], + [ + "▁Thurman", + -14.893051147460938 + ], + [ + "▁aegis", + -14.89305305480957 + ], + [ + "▁fetuses", + -14.893067359924316 + ], + [ + "▁Jiangxi", + -14.89308261871338 + ], + [ + "▁Succession", + -14.893088340759276 + ], + [ + "▁esthetician", + -14.89309310913086 + ], + [ + "▁acidification", + -14.893118858337402 + ], + [ + "▁graveside", + -14.893121719360352 + ], + [ + "▁Calderon", + -14.893128395080566 + ], + [ + "▁chromo", + -14.893143653869627 + ], + [ + "▁steepest", + -14.893156051635742 + ], + [ + "aixar", + -14.893189430236816 + ], + [ + "▁preppy", + -14.89321231842041 + ], + [ + "▁Principality", + -14.893217086791992 + ], + [ + "▁CCNA", + -14.893243789672852 + ], + [ + "▁laundered", + -14.893254280090332 + ], + [ + "▁airstrip", + -14.893255233764648 + ], + [ + "▁Beltway", + -14.89327907562256 + ], + [ + "▁iDevice", + -14.893280982971191 + ], + [ + "▁Albury", + -14.89329719543457 + ], + [ + "▁reconnection", + -14.893322944641112 + ], + [ + "▁Coors", + -14.89332675933838 + ], + [ + "▁politeness", + -14.89333724975586 + ], + [ + "▁expell", + -14.893340110778809 + ], + [ + "Maid", + -14.893360137939451 + ], + [ + "HONG", + -14.8934907913208 + ], + [ + "▁Nomura", + -14.8934907913208 + ], + [ + "▁Vasco", + -14.893515586853027 + ], + [ + "▁funicular", + -14.893525123596191 + ], + [ + "▁Ladybug", + -14.89352798461914 + ], + [ + "▁tenfold", + -14.893532752990724 + ], + [ + "▁videotaped", + -14.893542289733888 + ], + [ + "▁Farage", + -14.893613815307615 + ], + [ + "▁genial", + -14.893617630004885 + ], + [ + "▁Aggie", + -14.893769264221191 + ], + [ + "▁Blizz", + -14.893793106079102 + ], + [ + "▁Madoff", + -14.89383316040039 + ], + [ + "7/8", + -14.8938627243042 + ], + [ + "▁Lakefront", + -14.893881797790527 + ], + [ + "▁receded", + -14.89390754699707 + ], + [ + "▁PRODUCTION", + -14.893911361694336 + ], + [ + "ELLING", + -14.893929481506348 + ], + [ + "▁60-70", + -14.89403247833252 + ], + [ + "▁slung", + -14.89406967163086 + ], + [ + "▁READING", + -14.89407444000244 + ], + [ + "Linear", + -14.894134521484377 + ], + [ + "▁chauffeured", + -14.894170761108398 + ], + [ + "Northeast", + -14.894192695617676 + ], + [ + "▁Lightfoot", + -14.894272804260254 + ], + [ + "▁eardrum", + -14.894359588623049 + ], + [ + "▁stacker", + -14.89443016052246 + ], + [ + "11-15", + -14.894511222839355 + ], + [ + "▁ABM", + -14.894545555114746 + ], + [ + "▁heathen", + -14.894582748413086 + ], + [ + "Citi", + -14.894679069519045 + ], + [ + "▁Exton", + -14.894760131835938 + ], + [ + "▁24′′", + -14.89480209350586 + ], + [ + "▁Garret", + -14.89487075805664 + ], + [ + "▁Stix", + -14.895050048828123 + ], + [ + "▁decoded", + -14.895106315612791 + ], + [ + "▁Satanic", + -14.89515209197998 + ], + [ + ".09.201", + -14.895169258117676 + ], + [ + "Spell", + -14.895206451416016 + ], + [ + "▁relapsed", + -14.895410537719728 + ], + [ + "▁parentage", + -14.895462036132812 + ], + [ + "▁Messer", + -14.895562171936035 + ], + [ + "Phyll", + -14.8955659866333 + ], + [ + "Desp", + -14.895593643188477 + ], + [ + "ecchi", + -14.895611763000488 + ], + [ + "▁peeing", + -14.895635604858398 + ], + [ + "▁Clog", + -14.895708084106444 + ], + [ + "Formed", + -14.895710945129396 + ], + [ + ".05.201", + -14.89572048187256 + ], + [ + "▁malty", + -14.89574909210205 + ], + [ + "▁Scottie", + -14.89577293395996 + ], + [ + "▁enduro", + -14.895869255065918 + ], + [ + "▁Airstream", + -14.896014213562012 + ], + [ + "▁1000+", + -14.896117210388184 + ], + [ + "▁Fairness", + -14.896125793457031 + ], + [ + "▁Schme", + -14.896467208862305 + ], + [ + "▁37,000", + -14.896482467651367 + ], + [ + "facto", + -14.896501541137695 + ], + [ + "▁Koni", + -14.89653778076172 + ], + [ + "▁Palaeo", + -14.896651268005373 + ], + [ + "▁Pickett", + -14.896761894226074 + ], + [ + "▁maliciously", + -14.896770477294922 + ], + [ + "▁bluebird", + -14.89678192138672 + ], + [ + "▁malted", + -14.89686393737793 + ], + [ + "naise", + -14.896892547607422 + ], + [ + "WALL", + -14.89691925048828 + ], + [ + "▁Samaj", + -14.897015571594238 + ], + [ + "▁Refuse", + -14.897052764892578 + ], + [ + "▁hoopla", + -14.897074699401855 + ], + [ + "1,200", + -14.89719581604004 + ], + [ + "pyrid", + -14.89725399017334 + ], + [ + "▁Fj", + -14.897334098815918 + ], + [ + "▁Campsite", + -14.897409439086914 + ], + [ + "DEW", + -14.897415161132812 + ], + [ + "smoker", + -14.897547721862791 + ], + [ + "▁Peli", + -14.897582054138184 + ], + [ + "eshwara", + -14.89761447906494 + ], + [ + "▁Gamal", + -14.897666931152344 + ], + [ + "▁Haile", + -14.897703170776367 + ], + [ + "▁Suzan", + -14.897725105285645 + ], + [ + "▁3/2", + -14.897802352905272 + ], + [ + "Breathe", + -14.897812843322754 + ], + [ + "▁SOD", + -14.897963523864746 + ], + [ + "▁09:0", + -14.897974014282228 + ], + [ + "65,000", + -14.898037910461426 + ], + [ + "▁shard", + -14.89809513092041 + ], + [ + "▁resubmit", + -14.898122787475586 + ], + [ + "1.60", + -14.89819049835205 + ], + [ + "▁Sansa", + -14.898347854614258 + ], + [ + "obvious", + -14.8983736038208 + ], + [ + "organizing", + -14.8985013961792 + ], + [ + "cchini", + -14.898509979248049 + ], + [ + "▁Marek", + -14.8985595703125 + ], + [ + "Gradually", + -14.898563385009766 + ], + [ + "▁flyover", + -14.898567199707031 + ], + [ + "Procedure", + -14.898569107055664 + ], + [ + "Routine", + -14.898569107055664 + ], + [ + "swept", + -14.898576736450195 + ], + [ + "Athletic", + -14.898582458496094 + ], + [ + "Gluten", + -14.898584365844728 + ], + [ + "Laughter", + -14.898585319519045 + ], + [ + "ACCESS", + -14.898587226867676 + ], + [ + "Avengers", + -14.898592948913574 + ], + [ + "Memories", + -14.89859390258789 + ], + [ + "Pleasant", + -14.89859676361084 + ], + [ + "Shakespeare", + -14.89859676361084 + ], + [ + "Brisbane", + -14.898597717285156 + ], + [ + "Mutual", + -14.898597717285156 + ], + [ + "energetic", + -14.898600578308104 + ], + [ + "licensing", + -14.898609161376951 + ], + [ + "Violet", + -14.898614883422852 + ], + [ + "kohler", + -14.898627281188965 + ], + [ + "Madrid", + -14.898632049560549 + ], + [ + "Meghan", + -14.898637771606444 + ], + [ + "Conservation", + -14.89864444732666 + ], + [ + "gassing", + -14.89865493774414 + ], + [ + "therefore", + -14.89865779876709 + ], + [ + "tattoo", + -14.898664474487305 + ], + [ + "▁Leaning", + -14.898664474487305 + ], + [ + "Petersburg", + -14.898666381835938 + ], + [ + "Saturn", + -14.898667335510254 + ], + [ + "oxidation", + -14.89867115020752 + ], + [ + "Freak", + -14.898695945739746 + ], + [ + "Prediction", + -14.898758888244627 + ], + [ + "▁Elie", + -14.898768424987791 + ], + [ + "▁Doran", + -14.898887634277344 + ], + [ + "▁1720", + -14.898895263671877 + ], + [ + "surfer", + -14.89890480041504 + ], + [ + "SPEED", + -14.898905754089355 + ], + [ + "▁GTK", + -14.89899730682373 + ], + [ + "▁flit", + -14.89921760559082 + ], + [ + "ruz", + -14.89922332763672 + ], + [ + "Lifestyle", + -14.899286270141602 + ], + [ + "(15)", + -14.899446487426758 + ], + [ + "▁schw", + -14.899534225463867 + ], + [ + "Marian", + -14.899600982666016 + ], + [ + "anime", + -14.899678230285645 + ], + [ + "uah", + -14.899698257446287 + ], + [ + "nares", + -14.89971923828125 + ], + [ + "FHA", + -14.899775505065918 + ], + [ + "knight", + -14.899788856506348 + ], + [ + "0900", + -14.899821281433104 + ], + [ + "▁FEW", + -14.899887084960938 + ], + [ + "▁Mavic", + -14.899937629699709 + ], + [ + "Bronze", + -14.899991989135742 + ], + [ + "▁WHL", + -14.900164604187012 + ], + [ + "caller", + -14.900196075439451 + ], + [ + "▁Multifunction", + -14.900262832641602 + ], + [ + "▁60/40", + -14.900266647338867 + ], + [ + "eisen", + -14.900423049926758 + ], + [ + "▁Prinz", + -14.900458335876465 + ], + [ + "$35", + -14.900482177734377 + ], + [ + "▁Bunn", + -14.900547981262209 + ], + [ + "konomi", + -14.90061092376709 + ], + [ + "▁accede", + -14.90069580078125 + ], + [ + "67%", + -14.900700569152832 + ], + [ + "teryx", + -14.90074634552002 + ], + [ + "▁Balsam", + -14.900893211364746 + ], + [ + "ERING", + -14.900894165039062 + ], + [ + "graha", + -14.900965690612791 + ], + [ + "Waterproof", + -14.901009559631348 + ], + [ + "Katy", + -14.90107250213623 + ], + [ + "Drama", + -14.901105880737305 + ], + [ + "quilla", + -14.901107788085938 + ], + [ + "▁$105", + -14.90118408203125 + ], + [ + "▁Hamza", + -14.90121841430664 + ], + [ + "▁fingernail", + -14.901273727416992 + ], + [ + "▁Corinthian", + -14.901286125183104 + ], + [ + "Corey", + -14.901308059692385 + ], + [ + "RAID", + -14.901411056518556 + ], + [ + "fuji", + -14.901504516601562 + ], + [ + "Devon", + -14.90152072906494 + ], + [ + "condo", + -14.901528358459473 + ], + [ + "▁Zang", + -14.901591300964355 + ], + [ + "▁Massimo", + -14.901618957519531 + ], + [ + "5,6", + -14.901667594909668 + ], + [ + "referenced", + -14.901688575744627 + ], + [ + "Erica", + -14.901787757873535 + ], + [ + "reply", + -14.901844024658203 + ], + [ + "▁Blyth", + -14.901975631713867 + ], + [ + "▁irrigate", + -14.902054786682127 + ], + [ + "1949", + -14.902070999145508 + ], + [ + "buddy", + -14.902094841003418 + ], + [ + "1907", + -14.90216827392578 + ], + [ + "▁Panta", + -14.90217399597168 + ], + [ + "▁eons", + -14.902212142944336 + ], + [ + "▁Wasser", + -14.902324676513672 + ], + [ + "▁MYR", + -14.902363777160645 + ], + [ + "arrest", + -14.90239429473877 + ], + [ + "AUTO", + -14.902402877807615 + ], + [ + "▁$73", + -14.902436256408691 + ], + [ + "▁$1,5", + -14.902437210083008 + ], + [ + "▁tensioner", + -14.90257453918457 + ], + [ + "▁Fenn", + -14.902582168579102 + ], + [ + "SSH", + -14.902636528015137 + ], + [ + "Adrenaline", + -14.90265655517578 + ], + [ + "Carnitine", + -14.90265655517578 + ], + [ + "▁ACCURACY", + -14.90265655517578 + ], + [ + "▁Benevolent", + -14.90265655517578 + ], + [ + "▁Cuisinart", + -14.90265655517578 + ], + [ + "▁FODMAP", + -14.90265655517578 + ], + [ + "▁Hildebrand", + -14.90265655517578 + ], + [ + "▁Valladolid", + -14.90265655517578 + ], + [ + "▁alginate", + -14.90265655517578 + ], + [ + "▁coercive", + -14.90265655517578 + ], + [ + "▁decomposing", + -14.90265655517578 + ], + [ + "▁detritus", + -14.90265655517578 + ], + [ + "▁exoplanet", + -14.90265655517578 + ], + [ + "▁hemodialysis", + -14.90265655517578 + ], + [ + "▁implausible", + -14.90265655517578 + ], + [ + "▁merriment", + -14.90265655517578 + ], + [ + "▁pasteurized", + -14.90265655517578 + ], + [ + "▁pharaoh", + -14.90265655517578 + ], + [ + "▁photojournalism", + -14.90265655517578 + ], + [ + "▁sanctified", + -14.90265655517578 + ], + [ + "▁schizophrenic", + -14.90265655517578 + ], + [ + "▁tantalising", + -14.90265655517578 + ], + [ + "▁tryptophan", + -14.90265655517578 + ], + [ + "▁viticulture", + -14.90265655517578 + ], + [ + "▁DeWitt", + -14.902657508850098 + ], + [ + "▁Khyber", + -14.902657508850098 + ], + [ + "▁Parthenon", + -14.902657508850098 + ], + [ + "▁Pawtucket", + -14.902657508850098 + ], + [ + "▁Schindler", + -14.902657508850098 + ], + [ + "▁Trombone", + -14.902657508850098 + ], + [ + "▁blackcurrant", + -14.902657508850098 + ], + [ + "▁militarily", + -14.902657508850098 + ], + [ + "▁obituaries", + -14.902657508850098 + ], + [ + "▁unreserved", + -14.902657508850098 + ], + [ + "▁untenable", + -14.902657508850098 + ], + [ + "Identifies", + -14.902658462524414 + ], + [ + "▁MAXIMU", + -14.90265941619873 + ], + [ + "▁Plafond", + -14.90265941619873 + ], + [ + "▁distancing", + -14.90265941619873 + ], + [ + "▁enmity", + -14.90265941619873 + ], + [ + "▁Blumenthal", + -14.902660369873049 + ], + [ + "▁Mortuary", + -14.902660369873049 + ], + [ + "▁Stavanger", + -14.902660369873049 + ], + [ + "▁homophobia", + -14.902660369873049 + ], + [ + "▁BLOOD", + -14.902661323547363 + ], + [ + "▁TARDIS", + -14.902661323547363 + ], + [ + "▁SUPPLIERS", + -14.90266227722168 + ], + [ + "▁Fukuoka", + -14.902663230895996 + ], + [ + "▁Lavrov", + -14.902664184570312 + ], + [ + "▁Mickelson", + -14.902664184570312 + ], + [ + "▁officiant", + -14.902665138244627 + ], + [ + "▁Fantasia", + -14.902666091918944 + ], + [ + "▁Kluwer", + -14.902666091918944 + ], + [ + "▁shambles", + -14.902666091918944 + ], + [ + "▁Cognos", + -14.902667999267578 + ], + [ + "▁Razorbacks", + -14.902668952941896 + ], + [ + "▁daredevil", + -14.902668952941896 + ], + [ + "+20", + -14.902671813964844 + ], + [ + "▁FIFO", + -14.902677536010742 + ], + [ + "▁DRINK", + -14.90267848968506 + ], + [ + "▁Activator", + -14.902680397033691 + ], + [ + "▁ashtray", + -14.902680397033691 + ], + [ + "▁haggling", + -14.902680397033691 + ], + [ + "▁bioavailability", + -14.902681350708008 + ], + [ + "▁Rasmus", + -14.902682304382324 + ], + [ + "▁Lonnie", + -14.902684211730955 + ], + [ + "▁Magpies", + -14.902685165405272 + ], + [ + "▁grinned", + -14.902695655822754 + ], + [ + "▁(1973)", + -14.902705192565918 + ], + [ + "▁McAllen", + -14.902705192565918 + ], + [ + "Punch", + -14.902725219726562 + ], + [ + "▁kickass", + -14.902729988098145 + ], + [ + "▁Brayden", + -14.902737617492676 + ], + [ + "▁Ginsburg", + -14.902743339538574 + ], + [ + "▁titanic", + -14.90274429321289 + ], + [ + "▁Labuan", + -14.902748107910156 + ], + [ + "▁Corrective", + -14.902755737304688 + ], + [ + "▁Coleridge", + -14.902772903442385 + ], + [ + "▁Secretaries", + -14.902779579162598 + ], + [ + "▁Chorley", + -14.902785301208496 + ], + [ + "▁mucosal", + -14.902790069580078 + ], + [ + "▁untangle", + -14.90279483795166 + ], + [ + "▁Illini", + -14.90280818939209 + ], + [ + "▁Darshan", + -14.902819633483888 + ], + [ + "▁chieftain", + -14.902835845947266 + ], + [ + "▁Undead", + -14.902838706970217 + ], + [ + "▁Dravid", + -14.902849197387695 + ], + [ + "▁Kronos", + -14.902849197387695 + ], + [ + "▁cutthroat", + -14.902854919433594 + ], + [ + "▁unwashed", + -14.90286350250244 + ], + [ + "▁captors", + -14.90287971496582 + ], + [ + "▁Finite", + -14.90288257598877 + ], + [ + "▁Yeoman", + -14.902891159057615 + ], + [ + "▁constriction", + -14.902896881103516 + ], + [ + "▁Podium", + -14.902915000915527 + ], + [ + "▁Devlin", + -14.902935981750488 + ], + [ + "▁autocomplete", + -14.902959823608398 + ], + [ + "▁Farrar", + -14.902966499328612 + ], + [ + "ireann", + -14.902968406677246 + ], + [ + "▁permissive", + -14.902981758117676 + ], + [ + "▁chimps", + -14.902984619140623 + ], + [ + "▁backwaters", + -14.902986526489258 + ], + [ + "▁ROTC", + -14.903045654296877 + ], + [ + "0.1%", + -14.903071403503418 + ], + [ + "▁valiantly", + -14.903071403503418 + ], + [ + "▁maligned", + -14.90307903289795 + ], + [ + "▁durian", + -14.90312385559082 + ], + [ + "▁Conqueror", + -14.903149604797363 + ], + [ + "CBL", + -14.903162002563477 + ], + [ + "▁Cuddle", + -14.903162956237791 + ], + [ + "▁distasteful", + -14.903205871582031 + ], + [ + "▁Schie", + -14.90323543548584 + ], + [ + "▁ouster", + -14.903264999389648 + ], + [ + "▁wafting", + -14.90332317352295 + ], + [ + "saying", + -14.903380393981934 + ], + [ + "▁Ricard", + -14.903392791748049 + ], + [ + "▁FOUNDATION", + -14.903399467468262 + ], + [ + "▁machete", + -14.90345287322998 + ], + [ + "SBG", + -14.903453826904297 + ], + [ + "Ender", + -14.903457641601562 + ], + [ + "OYO", + -14.903467178344728 + ], + [ + "▁prorated", + -14.903470039367676 + ], + [ + "▁Broughton", + -14.9035062789917 + ], + [ + "▁Wingate", + -14.903507232666016 + ], + [ + "▁Splitter", + -14.903510093688965 + ], + [ + "▁inertial", + -14.90351390838623 + ], + [ + "▁Whaka", + -14.903520584106444 + ], + [ + "▁cupped", + -14.903534889221191 + ], + [ + "ocin", + -14.903546333312988 + ], + [ + "▁Fonte", + -14.903685569763184 + ], + [ + "▁Fagan", + -14.903746604919434 + ], + [ + "▁briskly", + -14.90379238128662 + ], + [ + "▁contradicted", + -14.903825759887695 + ], + [ + "▁Komm", + -14.903838157653809 + ], + [ + "▁flapper", + -14.903871536254885 + ], + [ + "▁Ethanol", + -14.90392017364502 + ], + [ + "▁Johnstown", + -14.903926849365234 + ], + [ + "▁Charlestown", + -14.903993606567385 + ], + [ + "1:49", + -14.904024124145508 + ], + [ + "▁Oban", + -14.90402889251709 + ], + [ + "▁Freya", + -14.904034614562988 + ], + [ + "▁saltiness", + -14.90407943725586 + ], + [ + "▁Standby", + -14.904173851013184 + ], + [ + "zide", + -14.904285430908203 + ], + [ + "▁Leahy", + -14.904364585876465 + ], + [ + "▁incentivise", + -14.904403686523438 + ], + [ + "▁Belkin", + -14.904437065124512 + ], + [ + "▁VICE", + -14.904438972473145 + ], + [ + "▁Rishi", + -14.904528617858888 + ], + [ + "▁rediscovering", + -14.904542922973633 + ], + [ + "▁Carbone", + -14.904619216918944 + ], + [ + "▁Hauser", + -14.904747009277344 + ], + [ + "▁Coucher", + -14.90480136871338 + ], + [ + "▁varnished", + -14.90480899810791 + ], + [ + "counterclockwise", + -14.904820442199709 + ], + [ + "waka", + -14.90499496459961 + ], + [ + "▁clichéd", + -14.905033111572266 + ], + [ + "IDAD", + -14.905157089233398 + ], + [ + "9.3%", + -14.905317306518556 + ], + [ + "▁Daft", + -14.905323028564451 + ], + [ + "homi", + -14.905362129211426 + ], + [ + "▁\"0\"", + -14.9053955078125 + ], + [ + "▁Halter", + -14.905397415161133 + ], + [ + "▁$1200", + -14.905421257019045 + ], + [ + "▁readied", + -14.905449867248535 + ], + [ + "▁WRX", + -14.905632019042969 + ], + [ + "Scrub", + -14.905645370483398 + ], + [ + "▁Frosty", + -14.90568733215332 + ], + [ + "▁qualitatively", + -14.905717849731444 + ], + [ + "▁whereupon", + -14.905790328979492 + ], + [ + "▁fatter", + -14.905848503112791 + ], + [ + "motive", + -14.905957221984863 + ], + [ + "▁Deleted", + -14.905985832214355 + ], + [ + "Crypt", + -14.90598964691162 + ], + [ + "▁governorate", + -14.906065940856934 + ], + [ + "VIM", + -14.906107902526855 + ], + [ + "▁125,000", + -14.906112670898438 + ], + [ + "PSU", + -14.906166076660156 + ], + [ + "▁reloaded", + -14.906168937683104 + ], + [ + "▁baste", + -14.906172752380373 + ], + [ + "▁deliverability", + -14.906173706054688 + ], + [ + "▁strangeness", + -14.906235694885254 + ], + [ + "ILT", + -14.9063081741333 + ], + [ + "▁hydrological", + -14.906349182128906 + ], + [ + "Static", + -14.906354904174805 + ], + [ + "▁Liberian", + -14.90636920928955 + ], + [ + "yue", + -14.906420707702637 + ], + [ + "▁Moshi", + -14.9064302444458 + ], + [ + "7:15", + -14.906497955322266 + ], + [ + "▁£25,000", + -14.906540870666504 + ], + [ + "▁flicking", + -14.906574249267578 + ], + [ + "6800", + -14.906583786010742 + ], + [ + "▁LINKS", + -14.906587600708008 + ], + [ + "TLA", + -14.906659126281738 + ], + [ + "▁06:2", + -14.906659126281738 + ], + [ + "▁presto", + -14.906698226928713 + ], + [ + "▁petri", + -14.906892776489258 + ], + [ + "▁Chica", + -14.907097816467283 + ], + [ + "1956", + -14.907099723815918 + ], + [ + "▁$0.8", + -14.907106399536133 + ], + [ + "addressed", + -14.907110214233398 + ], + [ + "angler", + -14.907268524169922 + ], + [ + "ilated", + -14.907268524169922 + ], + [ + "HUB", + -14.90730094909668 + ], + [ + "▁Limiting", + -14.907304763793944 + ], + [ + "Cliff", + -14.907308578491213 + ], + [ + "aaaaa", + -14.90748119354248 + ], + [ + "▁Cayo", + -14.907758712768556 + ], + [ + "holtz", + -14.907915115356444 + ], + [ + "▁Fav", + -14.908008575439451 + ], + [ + "▁Mote", + -14.9080228805542 + ], + [ + "▁19:5", + -14.908035278320312 + ], + [ + "▁ROH", + -14.90804958343506 + ], + [ + "SEF", + -14.908056259155272 + ], + [ + "davis", + -14.908163070678713 + ], + [ + "▁$350,000", + -14.908254623413086 + ], + [ + "▁Petitioner", + -14.90827178955078 + ], + [ + "placing", + -14.908281326293944 + ], + [ + "Alma", + -14.908291816711426 + ], + [ + "▁Cavalli", + -14.908321380615234 + ], + [ + "/1.8", + -14.908349990844728 + ], + [ + "▁Shiro", + -14.908380508422852 + ], + [ + "▁SIU", + -14.908552169799805 + ], + [ + "▁misfire", + -14.908570289611816 + ], + [ + "▁Frau", + -14.908586502075195 + ], + [ + "▁Explor", + -14.908599853515623 + ], + [ + "▁Jahan", + -14.908636093139648 + ], + [ + "▁FIM", + -14.908642768859863 + ], + [ + "▁divinely", + -14.90867805480957 + ], + [ + "naud", + -14.908886909484863 + ], + [ + "zzz", + -14.908915519714355 + ], + [ + "Pupils", + -14.90896701812744 + ], + [ + "Redeem", + -14.90898895263672 + ], + [ + "Porcelain", + -14.908992767333984 + ], + [ + "association", + -14.9089994430542 + ], + [ + "Cannabis", + -14.909003257751465 + ], + [ + "Strawberry", + -14.90900421142578 + ], + [ + "Titanium", + -14.909013748168944 + ], + [ + "Magnet", + -14.909014701843262 + ], + [ + "Mediterranean", + -14.909016609191896 + ], + [ + "Silence", + -14.90902042388916 + ], + [ + "television", + -14.909022331237791 + ], + [ + "Fraud", + -14.90903091430664 + ], + [ + "Appearance", + -14.909038543701172 + ], + [ + "Charming", + -14.909039497375488 + ], + [ + "Psychology", + -14.909065246582031 + ], + [ + "studied", + -14.909067153930664 + ], + [ + "Divine", + -14.909100532531738 + ], + [ + "propane", + -14.909110069274902 + ], + [ + "stripped", + -14.909132957458496 + ], + [ + "Lightning", + -14.90917682647705 + ], + [ + "Yummy", + -14.909194946289062 + ], + [ + "▁habe", + -14.909219741821287 + ], + [ + "Venus", + -14.90923309326172 + ], + [ + "latency", + -14.909244537353516 + ], + [ + "ghz", + -14.90925407409668 + ], + [ + "behaviour", + -14.909259796142578 + ], + [ + "Handling", + -14.909343719482422 + ], + [ + "lichkeit", + -14.9093599319458 + ], + [ + "▁GTE", + -14.909375190734863 + ], + [ + "▁Belton", + -14.909379959106444 + ], + [ + "rimi", + -14.909381866455078 + ], + [ + "TONE", + -14.909418106079102 + ], + [ + "GERS", + -14.909431457519531 + ], + [ + "enhance", + -14.909541130065918 + ], + [ + "Malta", + -14.909554481506348 + ], + [ + "Classroom", + -14.909600257873535 + ], + [ + "fau", + -14.909623146057127 + ], + [ + "▁Flore", + -14.909649848937988 + ], + [ + "Panda", + -14.909707069396973 + ], + [ + "Sheila", + -14.909747123718262 + ], + [ + "Compass", + -14.909757614135742 + ], + [ + "MPEG", + -14.910014152526855 + ], + [ + "KEL", + -14.910116195678713 + ], + [ + "▁Arran", + -14.91018772125244 + ], + [ + "Bacon", + -14.910276412963867 + ], + [ + "protecting", + -14.910440444946287 + ], + [ + "deleted", + -14.910561561584473 + ], + [ + "utsch", + -14.910576820373535 + ], + [ + "3:14", + -14.910590171813965 + ], + [ + "reiner", + -14.910655975341797 + ], + [ + "Ella", + -14.910706520080566 + ], + [ + "▁Naira", + -14.910707473754885 + ], + [ + "psc", + -14.910881996154783 + ], + [ + "Mahar", + -14.910999298095703 + ], + [ + "apua", + -14.91106414794922 + ], + [ + "addition", + -14.91111660003662 + ], + [ + "0100", + -14.911161422729492 + ], + [ + "▁Stork", + -14.9111909866333 + ], + [ + "situation", + -14.91119384765625 + ], + [ + "hulu", + -14.91125202178955 + ], + [ + "MOV", + -14.911301612854004 + ], + [ + "9.90", + -14.911341667175291 + ], + [ + "▁croon", + -14.911347389221191 + ], + [ + "▁Gearbox", + -14.911507606506348 + ], + [ + "▁2019/20", + -14.911541938781738 + ], + [ + "▁Nesting", + -14.911550521850586 + ], + [ + "▁gloat", + -14.911554336547852 + ], + [ + "▁Duster", + -14.91175651550293 + ], + [ + "▁Basra", + -14.911767959594728 + ], + [ + "▁masterwork", + -14.91188621520996 + ], + [ + "Bernie", + -14.911941528320312 + ], + [ + "▁propell", + -14.911968231201172 + ], + [ + "▁unroll", + -14.911992073059082 + ], + [ + "curry", + -14.912022590637209 + ], + [ + "▁droop", + -14.912080764770508 + ], + [ + "bagh", + -14.912115097045898 + ], + [ + "composition", + -14.91212272644043 + ], + [ + "▁kingpin", + -14.912126541137695 + ], + [ + "▁Tricho", + -14.91216278076172 + ], + [ + "Washing", + -14.91222858428955 + ], + [ + "Kane", + -14.91246223449707 + ], + [ + "OCKET", + -14.912467002868652 + ], + [ + "▁panna", + -14.912531852722168 + ], + [ + "▁Andalusia", + -14.912622451782228 + ], + [ + "▁[2019-04-1", + -14.91262435913086 + ], + [ + "▁disband", + -14.912714958190918 + ], + [ + "▁crutch", + -14.912739753723145 + ], + [ + "CALIFORNIA", + -14.912860870361328 + ], + [ + "COFFEE", + -14.912860870361328 + ], + [ + "Prevalence", + -14.912860870361328 + ], + [ + "Resignation", + -14.912860870361328 + ], + [ + "infiltrating", + -14.912860870361328 + ], + [ + "▁ABSOLUTELY", + -14.912860870361328 + ], + [ + "▁ALREADY", + -14.912860870361328 + ], + [ + "▁Bikaner", + -14.912860870361328 + ], + [ + "▁Camouflage", + -14.912860870361328 + ], + [ + "▁Flannery", + -14.912860870361328 + ], + [ + "▁Heraklion", + -14.912860870361328 + ], + [ + "▁INDUSTRIAL", + -14.912860870361328 + ], + [ + "▁McRae", + -14.912860870361328 + ], + [ + "▁Mozzarella", + -14.912860870361328 + ], + [ + "▁Multnomah", + -14.912860870361328 + ], + [ + "▁Patanjali", + -14.912860870361328 + ], + [ + "▁Schuylkill", + -14.912860870361328 + ], + [ + "▁Tolstoy", + -14.912860870361328 + ], + [ + "▁Trondheim", + -14.912860870361328 + ], + [ + "▁bilirubin", + -14.912860870361328 + ], + [ + "▁bruschetta", + -14.912860870361328 + ], + [ + "▁bulimia", + -14.912860870361328 + ], + [ + "▁chrysanthemum", + -14.912860870361328 + ], + [ + "▁eloquence", + -14.912860870361328 + ], + [ + "▁euthanized", + -14.912860870361328 + ], + [ + "▁hydrocodone", + -14.912860870361328 + ], + [ + "▁marzipan", + -14.912860870361328 + ], + [ + "▁minneapolis", + -14.912860870361328 + ], + [ + "▁monounsaturated", + -14.912860870361328 + ], + [ + "▁nebulous", + -14.912860870361328 + ], + [ + "▁rippling", + -14.912860870361328 + ], + [ + "▁rollicking", + -14.912860870361328 + ], + [ + "▁trifecta", + -14.912860870361328 + ], + [ + "▁unhindered", + -14.912860870361328 + ], + [ + "▁192.168.1", + -14.912861824035645 + ], + [ + "▁888-548-5870", + -14.912861824035645 + ], + [ + "▁diabolical", + -14.912861824035645 + ], + [ + "▁epistle", + -14.912861824035645 + ], + [ + "▁misgivings", + -14.912861824035645 + ], + [ + "▁saccharin", + -14.912861824035645 + ], + [ + "▁Ranveer", + -14.91286277770996 + ], + [ + "Unsecured", + -14.912863731384276 + ], + [ + "▁Cervantes", + -14.912863731384276 + ], + [ + "▁Sapporo", + -14.912863731384276 + ], + [ + "▁contravention", + -14.912863731384276 + ], + [ + "▁decelerate", + -14.912863731384276 + ], + [ + "REQUIRE", + -14.912864685058594 + ], + [ + "▁Doomsday", + -14.912864685058594 + ], + [ + "▁Extravaganza", + -14.912864685058594 + ], + [ + "▁Figurine", + -14.912864685058594 + ], + [ + "▁Mulholland", + -14.912864685058594 + ], + [ + "▁Atlassian", + -14.91286563873291 + ], + [ + "▁Claudius", + -14.91286563873291 + ], + [ + "▁indeterminate", + -14.91286563873291 + ], + [ + "▁atrocity", + -14.912866592407228 + ], + [ + "▁yamaha", + -14.912866592407228 + ], + [ + "▁Emporia", + -14.912867546081545 + ], + [ + "▁Pranayama", + -14.912867546081545 + ], + [ + "▁Barbican", + -14.91286849975586 + ], + [ + "▁Breen", + -14.912869453430176 + ], + [ + "▁thiamine", + -14.912869453430176 + ], + [ + "▁girder", + -14.912870407104492 + ], + [ + "▁Lacquer", + -14.912871360778809 + ], + [ + "▁PARTNER", + -14.912871360778809 + ], + [ + "▁ineffectual", + -14.912871360778809 + ], + [ + "▁Arnhem", + -14.912872314453123 + ], + [ + "▁Meijer", + -14.912874221801758 + ], + [ + "▁cadaver", + -14.912877082824709 + ], + [ + "▁saunter", + -14.91287899017334 + ], + [ + "▁Engadget", + -14.912881851196287 + ], + [ + "▁dormancy", + -14.912883758544922 + ], + [ + "▁Ramallah", + -14.912888526916504 + ], + [ + "▁Youssef", + -14.912888526916504 + ], + [ + "▁CARL", + -14.912896156311035 + ], + [ + "▁expungement", + -14.912896156311035 + ], + [ + "▁scowl", + -14.912897109985352 + ], + [ + "▁Strathmore", + -14.9128999710083 + ], + [ + "▁Stagecoach", + -14.912901878356934 + ], + [ + "▁Lupus", + -14.912903785705566 + ], + [ + "Prolonged", + -14.912907600402832 + ], + [ + "▁flatulence", + -14.912908554077148 + ], + [ + "▁biomaterials", + -14.91291046142578 + ], + [ + "▁cramming", + -14.912911415100098 + ], + [ + "▁RSPB", + -14.912912368774414 + ], + [ + "▁Mercure", + -14.912914276123049 + ], + [ + "▁Mixtape", + -14.912914276123049 + ], + [ + "▁Pittsfield", + -14.912919044494627 + ], + [ + "▁whitish", + -14.912920951843262 + ], + [ + "▁fucked", + -14.912925720214844 + ], + [ + "▁Respite", + -14.91292667388916 + ], + [ + "▁ruse", + -14.912928581237791 + ], + [ + "▁Telenor", + -14.912931442260742 + ], + [ + "▁Searle", + -14.912936210632324 + ], + [ + "▁JDBC", + -14.91293716430664 + ], + [ + "▁polyamide", + -14.91293716430664 + ], + [ + "▁Remanufactured", + -14.91294002532959 + ], + [ + "▁Mersey", + -14.912949562072754 + ], + [ + "▁repelled", + -14.912949562072754 + ], + [ + "▁hydrogel", + -14.912956237792969 + ], + [ + "▁alternation", + -14.9129638671875 + ], + [ + "▁Taskforce", + -14.912967681884766 + ], + [ + "▁purebred", + -14.912968635559082 + ], + [ + "▁Basecamp", + -14.91297435760498 + ], + [ + "▁Carrillo", + -14.91297721862793 + ], + [ + "Induced", + -14.912978172302246 + ], + [ + "▁billowing", + -14.912981986999512 + ], + [ + "▁Winslet", + -14.912982940673828 + ], + [ + "▁Lazada", + -14.91298484802246 + ], + [ + "▁gaggle", + -14.912985801696776 + ], + [ + "▁nondescript", + -14.913009643554688 + ], + [ + "▁junta", + -14.913015365600586 + ], + [ + "▁Thermometer", + -14.913020133972168 + ], + [ + "Explicit", + -14.913023948669434 + ], + [ + "▁minicab", + -14.91302490234375 + ], + [ + "▁Kindred", + -14.913028717041016 + ], + [ + "▁Granola", + -14.91305160522461 + ], + [ + "▁DreamWorks", + -14.913052558898926 + ], + [ + "▁bassoon", + -14.913055419921877 + ], + [ + "▁GPRS", + -14.913061141967772 + ], + [ + "▁Copley", + -14.913073539733888 + ], + [ + "▁grisly", + -14.913076400756836 + ], + [ + "PERFORM", + -14.913082122802734 + ], + [ + "▁Duvall", + -14.913084983825684 + ], + [ + "▁Larissa", + -14.913093566894531 + ], + [ + "▁Heartbeat", + -14.91309928894043 + ], + [ + "▁Covina", + -14.91310214996338 + ], + [ + "▁Badlands", + -14.913107872009276 + ], + [ + "▁Huston", + -14.913129806518556 + ], + [ + "WAKE", + -14.913158416748049 + ], + [ + "▁bereft", + -14.913174629211426 + ], + [ + "▁copolymer", + -14.913203239440918 + ], + [ + "▁phlegm", + -14.913204193115234 + ], + [ + "▁Theorem", + -14.913215637207031 + ], + [ + "▁Arcane", + -14.91323471069336 + ], + [ + "▁Thromb", + -14.913247108459473 + ], + [ + "▁Casella", + -14.91327953338623 + ], + [ + "▁Homeopathy", + -14.913290977478027 + ], + [ + "▁Posada", + -14.913311004638672 + ], + [ + "▁seaplane", + -14.913335800170898 + ], + [ + "▁Harford", + -14.913339614868164 + ], + [ + "▁Alcoholics", + -14.91334342956543 + ], + [ + "▁Greenspan", + -14.91334629058838 + ], + [ + "▁Suction", + -14.913350105285645 + ], + [ + "▁Sandro", + -14.913352012634276 + ], + [ + "▁Hoang", + -14.913381576538086 + ], + [ + "▁EKG", + -14.91338348388672 + ], + [ + "▁ridding", + -14.913395881652832 + ], + [ + "▁overspending", + -14.913410186767578 + ], + [ + "▁18:2", + -14.913432121276855 + ], + [ + "▁potties", + -14.913432121276855 + ], + [ + "▁cohesiveness", + -14.913433074951172 + ], + [ + "▁prohibitively", + -14.913467407226562 + ], + [ + "▁Inquisition", + -14.913507461547852 + ], + [ + "▁Testking", + -14.913530349731444 + ], + [ + "▁thrashed", + -14.913534164428713 + ], + [ + "▁luckiest", + -14.913549423217772 + ], + [ + "▁Dhar", + -14.913556098937988 + ], + [ + "collin", + -14.913620948791504 + ], + [ + "▁gusty", + -14.913670539855955 + ], + [ + "▁08:3", + -14.913674354553224 + ], + [ + "▁domiciled", + -14.913718223571776 + ], + [ + "▁Alteration", + -14.913721084594728 + ], + [ + "▁gynecological", + -14.913729667663574 + ], + [ + "▁Prisoners", + -14.91373348236084 + ], + [ + "▁Converge", + -14.91374397277832 + ], + [ + "forbes", + -14.913766860961914 + ], + [ + "pastor", + -14.91378116607666 + ], + [ + "examined", + -14.913787841796877 + ], + [ + "letta", + -14.91380786895752 + ], + [ + "▁1791", + -14.913823127746582 + ], + [ + "▁ambushed", + -14.913840293884276 + ], + [ + "▁chairmanship", + -14.91387939453125 + ], + [ + "▁devalued", + -14.913883209228516 + ], + [ + "risco", + -14.913901329040527 + ], + [ + "▁garnishes", + -14.913944244384766 + ], + [ + "▁Nasal", + -14.91397476196289 + ], + [ + "▁CRB", + -14.914039611816406 + ], + [ + "▁WVU", + -14.914051055908203 + ], + [ + "▁Vishal", + -14.91408634185791 + ], + [ + "▁GONE", + -14.914092063903809 + ], + [ + "1440", + -14.91409683227539 + ], + [ + "7.15", + -14.914159774780272 + ], + [ + "flake", + -14.914325714111328 + ], + [ + "Tested", + -14.914340019226074 + ], + [ + "▁Frisch", + -14.914353370666504 + ], + [ + "▁Douro", + -14.914362907409668 + ], + [ + "▁FARC", + -14.914377212524414 + ], + [ + "▁Leaflet", + -14.914395332336426 + ], + [ + "▁Ingham", + -14.914420127868652 + ], + [ + "▁bumble", + -14.914436340332031 + ], + [ + "▁argumentation", + -14.914490699768066 + ], + [ + "▁wavering", + -14.91450309753418 + ], + [ + "bliss", + -14.914525985717772 + ], + [ + "▁Mantis", + -14.914602279663086 + ], + [ + "▁jurist", + -14.914610862731934 + ], + [ + "▁Modernization", + -14.914674758911133 + ], + [ + "nyk", + -14.914697647094728 + ], + [ + "▁Flashlight", + -14.914751052856444 + ], + [ + "versity", + -14.914819717407228 + ], + [ + "chuan", + -14.91486644744873 + ], + [ + "▁LETTER", + -14.914891242980955 + ], + [ + "▁overspend", + -14.914904594421388 + ], + [ + "▁decorum", + -14.91496753692627 + ], + [ + "▁woodcut", + -14.91502285003662 + ], + [ + "▁hospitalised", + -14.915035247802734 + ], + [ + "▁Dube", + -14.915081024169922 + ], + [ + "adav", + -14.915112495422363 + ], + [ + "▁Urine", + -14.915204048156738 + ], + [ + "▁pubic", + -14.915270805358888 + ], + [ + "3:19", + -14.91527271270752 + ], + [ + "▁floaters", + -14.915289878845217 + ], + [ + "▁hitches", + -14.915301322937012 + ], + [ + "▁Hallowe", + -14.915329933166504 + ], + [ + "▁formalize", + -14.915428161621094 + ], + [ + "▁eHealth", + -14.915512084960938 + ], + [ + "URC", + -14.915538787841797 + ], + [ + "Photoshop", + -14.91555118560791 + ], + [ + "adres", + -14.915620803833008 + ], + [ + "fou", + -14.915694236755373 + ], + [ + "Mania", + -14.91578483581543 + ], + [ + "▁splendidly", + -14.915863037109377 + ], + [ + "▁Dene", + -14.91594696044922 + ], + [ + "Barber", + -14.915968894958496 + ], + [ + "▁NIO", + -14.915969848632812 + ], + [ + "▁reverting", + -14.916044235229492 + ], + [ + "▁Khair", + -14.916053771972656 + ], + [ + "WARN", + -14.91617202758789 + ], + [ + "Leigh", + -14.916204452514648 + ], + [ + "YAL", + -14.916359901428224 + ], + [ + "hashi", + -14.91642951965332 + ], + [ + "▁chroma", + -14.916484832763672 + ], + [ + "Satan", + -14.916485786437988 + ], + [ + "Antonio", + -14.916499137878418 + ], + [ + "▁Saree", + -14.916526794433594 + ], + [ + "RAF", + -14.916560173034668 + ], + [ + "▁Tyga", + -14.916661262512209 + ], + [ + "▁darting", + -14.916748046875 + ], + [ + "JON", + -14.916797637939451 + ], + [ + "▁WannaCry", + -14.917040824890137 + ], + [ + "cien", + -14.917125701904297 + ], + [ + "deutsche", + -14.917170524597168 + ], + [ + "ELEC", + -14.917210578918455 + ], + [ + "▁Laga", + -14.917276382446287 + ], + [ + "▁Linton", + -14.917302131652832 + ], + [ + "▁cede", + -14.91733455657959 + ], + [ + "▁Crum", + -14.917356491088867 + ], + [ + "тра", + -14.917366027832031 + ], + [ + "▁micrometer", + -14.917598724365234 + ], + [ + "▁Moreau", + -14.918015480041504 + ], + [ + "▁Kirkman", + -14.91805362701416 + ], + [ + "Priv", + -14.918081283569336 + ], + [ + "▁nahi", + -14.918132781982422 + ], + [ + "Marion", + -14.918158531188965 + ], + [ + "3:50", + -14.918183326721191 + ], + [ + "Cleaner", + -14.918188095092772 + ], + [ + "luff", + -14.918228149414062 + ], + [ + "▁Krum", + -14.9182767868042 + ], + [ + "▁Lilli", + -14.918295860290527 + ], + [ + "▁watercourse", + -14.918339729309082 + ], + [ + "ebay", + -14.918349266052246 + ], + [ + "▁Airmen", + -14.918395042419434 + ], + [ + "predates", + -14.918397903442385 + ], + [ + "▁Wyman", + -14.91847324371338 + ], + [ + "▁analytically", + -14.91847324371338 + ], + [ + "▁31.5", + -14.918564796447754 + ], + [ + "▁outdoorsman", + -14.918594360351562 + ], + [ + "OKO", + -14.91860294342041 + ], + [ + "▁CIB", + -14.918659210205078 + ], + [ + "ZAN", + -14.918688774108888 + ], + [ + "liang", + -14.918701171875 + ], + [ + "Gala", + -14.91872215270996 + ], + [ + "▁songstress", + -14.91877269744873 + ], + [ + "▁maka", + -14.918807983398438 + ], + [ + "▁NOLA", + -14.91882610321045 + ], + [ + "Lap", + -14.918828010559082 + ], + [ + "▁federalism", + -14.918996810913086 + ], + [ + "kilter", + -14.919224739074709 + ], + [ + "fraction", + -14.919319152832031 + ], + [ + "hassle", + -14.919410705566406 + ], + [ + "underestimating", + -14.919414520263672 + ], + [ + "recommend", + -14.919442176818848 + ], + [ + "verify", + -14.919452667236328 + ], + [ + "nomie", + -14.919468879699709 + ], + [ + "frigidaire", + -14.919475555419922 + ], + [ + "Destroy", + -14.919479370117188 + ], + [ + "Nominations", + -14.919488906860352 + ], + [ + "parametric", + -14.919519424438477 + ], + [ + "interrupt", + -14.91952419281006 + ], + [ + "pioneer", + -14.91952896118164 + ], + [ + "expansion", + -14.91953182220459 + ], + [ + "Adidas", + -14.91953468322754 + ], + [ + "Corporation", + -14.91953468322754 + ], + [ + "Mystery", + -14.91953468322754 + ], + [ + "Sebastian", + -14.919535636901855 + ], + [ + "genuine", + -14.919536590576172 + ], + [ + "Indigenous", + -14.919538497924805 + ], + [ + "Teresa", + -14.919540405273438 + ], + [ + "brilliant", + -14.919543266296388 + ], + [ + "gorgeous", + -14.919544219970703 + ], + [ + "warranty", + -14.919547080993652 + ], + [ + "Jiang", + -14.919548034667969 + ], + [ + "coconut", + -14.919551849365234 + ], + [ + "Yamaha", + -14.919557571411133 + ], + [ + "Accessories", + -14.919559478759766 + ], + [ + "Farrell", + -14.91956901550293 + ], + [ + "allergic", + -14.919574737548828 + ], + [ + "1:03", + -14.919635772705078 + ], + [ + "Mango", + -14.919658660888672 + ], + [ + "▁RTU", + -14.919662475585938 + ], + [ + "▁Securi", + -14.919705390930176 + ], + [ + "Leeds", + -14.919708251953123 + ], + [ + "▁starlet", + -14.919794082641602 + ], + [ + "correlation", + -14.919822692871094 + ], + [ + "Proudly", + -14.919838905334473 + ], + [ + "▁Shag", + -14.919849395751951 + ], + [ + "cairn", + -14.91987419128418 + ], + [ + "implemented", + -14.919893264770508 + ], + [ + "Accent", + -14.919907569885254 + ], + [ + "Oddly", + -14.919923782348633 + ], + [ + "enclosed", + -14.919936180114746 + ], + [ + "piracy", + -14.91997241973877 + ], + [ + "bwa", + -14.919991493225098 + ], + [ + "▁Garb", + -14.91999626159668 + ], + [ + "vibe", + -14.920034408569336 + ], + [ + "delayed", + -14.920049667358398 + ], + [ + "Waist", + -14.920083045959473 + ], + [ + "Hah", + -14.920245170593262 + ], + [ + "tumble", + -14.920374870300291 + ], + [ + "encio", + -14.920541763305664 + ], + [ + "▁overseer", + -14.920568466186523 + ], + [ + "Effectively", + -14.920716285705566 + ], + [ + "cdc", + -14.920921325683594 + ], + [ + "Singh", + -14.92099666595459 + ], + [ + "CLEAN", + -14.921009063720703 + ], + [ + "ovitch", + -14.92101001739502 + ], + [ + "stability", + -14.921038627624512 + ], + [ + "▁relaxant", + -14.921064376831056 + ], + [ + "▁BUN", + -14.921137809753418 + ], + [ + "▁Spun", + -14.92115592956543 + ], + [ + "Ether", + -14.92123031616211 + ], + [ + "▁Legit", + -14.921249389648438 + ], + [ + "▁Picket", + -14.921320915222168 + ], + [ + "neither", + -14.921353340148926 + ], + [ + "▁Pique", + -14.92147731781006 + ], + [ + "▁Brest", + -14.921712875366213 + ], + [ + "▁50-100", + -14.921884536743164 + ], + [ + "▁amphibian", + -14.921936988830566 + ], + [ + "Folding", + -14.921944618225098 + ], + [ + "▁filo", + -14.921968460083008 + ], + [ + "▁Pogo", + -14.922006607055664 + ], + [ + "switching", + -14.922103881835938 + ], + [ + "Optical", + -14.922236442565918 + ], + [ + "Acute", + -14.9222993850708 + ], + [ + "aaaaaa", + -14.92234992980957 + ], + [ + "▁CORT", + -14.92237377166748 + ], + [ + "dealing", + -14.922460556030272 + ], + [ + "13-6", + -14.922490119934082 + ], + [ + "ASIC", + -14.922493934631348 + ], + [ + "▁LBS", + -14.922505378723145 + ], + [ + "▁RMD", + -14.922517776489258 + ], + [ + "▁Harvester", + -14.92258358001709 + ], + [ + "▁Primera", + -14.92258644104004 + ], + [ + "▁foreclose", + -14.922605514526367 + ], + [ + "lightweight", + -14.922662734985352 + ], + [ + "▁Mui", + -14.922723770141602 + ], + [ + "▁Moringa", + -14.9227933883667 + ], + [ + "▁whittle", + -14.92286205291748 + ], + [ + "Gwen", + -14.922938346862791 + ], + [ + "20-1", + -14.922975540161133 + ], + [ + "▁philosophi", + -14.923022270202637 + ], + [ + "▁cytotoxic", + -14.923144340515137 + ], + [ + "▁PorousFlow", + -14.92316436767578 + ], + [ + "Experiential", + -14.92317008972168 + ], + [ + "Inflammatory", + -14.92317008972168 + ], + [ + "Osteoporosis", + -14.92317008972168 + ], + [ + "arussalam", + -14.92317008972168 + ], + [ + "▁Almeida", + -14.92317008972168 + ], + [ + "▁Ashkenazi", + -14.92317008972168 + ], + [ + "▁Confucius", + -14.92317008972168 + ], + [ + "▁Dyslexia", + -14.92317008972168 + ], + [ + "▁Economies", + -14.92317008972168 + ], + [ + "▁Eminence", + -14.92317008972168 + ], + [ + "▁FABULOUS", + -14.92317008972168 + ], + [ + "▁Gayatri", + -14.92317008972168 + ], + [ + "▁Illawarra", + -14.92317008972168 + ], + [ + "▁LOCATED", + -14.92317008972168 + ], + [ + "▁Mackinac", + -14.92317008972168 + ], + [ + "▁Pontifical", + -14.92317008972168 + ], + [ + "▁Talladega", + -14.92317008972168 + ], + [ + "▁Toowoomba", + -14.92317008972168 + ], + [ + "▁aerobatic", + -14.92317008972168 + ], + [ + "▁ambivalence", + -14.92317008972168 + ], + [ + "▁clairvoyant", + -14.92317008972168 + ], + [ + "▁exasperated", + -14.92317008972168 + ], + [ + "▁ferocity", + -14.92317008972168 + ], + [ + "▁fiduciaries", + -14.92317008972168 + ], + [ + "▁marjoram", + -14.92317008972168 + ], + [ + "▁pluripotent", + -14.92317008972168 + ], + [ + "▁spectroscopic", + -14.92317008972168 + ], + [ + "▁tremolo", + -14.92317008972168 + ], + [ + "▁untrustworthy", + -14.92317008972168 + ], + [ + "▁Absalom", + -14.923171043395996 + ], + [ + "▁COUNCIL", + -14.923171043395996 + ], + [ + "▁Sausalito", + -14.923171043395996 + ], + [ + "▁Valdosta", + -14.923171043395996 + ], + [ + "▁congenial", + -14.923171043395996 + ], + [ + "▁guerilla", + -14.923171043395996 + ], + [ + "▁kickboxing", + -14.923171043395996 + ], + [ + "▁servitude", + -14.923171043395996 + ], + [ + "▁Mankato", + -14.923171997070312 + ], + [ + "▁Millenium", + -14.923172950744627 + ], + [ + "▁igloo", + -14.923172950744627 + ], + [ + "▁Molecule", + -14.923173904418944 + ], + [ + "▁cosmological", + -14.923173904418944 + ], + [ + "▁Biryani", + -14.923175811767578 + ], + [ + "▁reverberation", + -14.923176765441896 + ], + [ + "▁Ossetia", + -14.923178672790527 + ], + [ + "▁Trommel", + -14.923178672790527 + ], + [ + "▁Translucent", + -14.923179626464844 + ], + [ + "▁kratom", + -14.923179626464844 + ], + [ + "▁jostle", + -14.923187255859377 + ], + [ + "▁Petrochemical", + -14.923188209533691 + ], + [ + "vulsion", + -14.923190116882324 + ], + [ + "▁Cusack", + -14.923190116882324 + ], + [ + "▁biosynthesis", + -14.923190116882324 + ], + [ + "▁Palgrave", + -14.923192024230955 + ], + [ + "▁Montoya", + -14.923194885253906 + ], + [ + "▁ulcerative", + -14.92319679260254 + ], + [ + "▁Leandro", + -14.923198699951172 + ], + [ + "▁archangel", + -14.923199653625488 + ], + [ + "▁Storyteller", + -14.923205375671388 + ], + [ + "▁McArthur", + -14.923208236694336 + ], + [ + "▁LiDAR", + -14.923211097717283 + ], + [ + "▁LATEST", + -14.923213005065918 + ], + [ + "▁Ansari", + -14.923215866088867 + ], + [ + "▁libations", + -14.9232177734375 + ], + [ + "▁synthase", + -14.9232177734375 + ], + [ + "▁Poké", + -14.92322063446045 + ], + [ + "▁Shogun", + -14.923235893249512 + ], + [ + "▁Kitsap", + -14.923236846923828 + ], + [ + "▁7:05", + -14.923239707946776 + ], + [ + "▁Walpole", + -14.923239707946776 + ], + [ + "▁Debris", + -14.92325210571289 + ], + [ + "Italia", + -14.923262596130373 + ], + [ + "▁blotting", + -14.92326545715332 + ], + [ + "▁Bracken", + -14.923301696777344 + ], + [ + "▁Sitemap", + -14.923311233520508 + ], + [ + "URAL", + -14.923319816589355 + ], + [ + "▁zwei", + -14.923325538635254 + ], + [ + "▁Fleck", + -14.923333168029783 + ], + [ + "▁adwords", + -14.923344612121582 + ], + [ + "▁congressmen", + -14.92335319519043 + ], + [ + "▁flagrant", + -14.923358917236328 + ], + [ + "▁probationary", + -14.923359870910645 + ], + [ + "▁OpenShift", + -14.923372268676758 + ], + [ + "▁Lackey", + -14.923383712768556 + ], + [ + "▁Merrell", + -14.92339038848877 + ], + [ + "▁reassigned", + -14.92339038848877 + ], + [ + "▁assertiveness", + -14.923394203186035 + ], + [ + "▁lynx", + -14.923406600952148 + ], + [ + "▁unmotivated", + -14.923418998718262 + ], + [ + "▁Buford", + -14.923433303833008 + ], + [ + "▁Judson", + -14.923436164855955 + ], + [ + "▁(03)", + -14.92345142364502 + ], + [ + "▁Interceptor", + -14.923469543457031 + ], + [ + "▁weaned", + -14.923473358154297 + ], + [ + "▁spoilage", + -14.92347812652588 + ], + [ + "▁Essie", + -14.923505783081056 + ], + [ + "▁Competitiveness", + -14.923517227172852 + ], + [ + "▁HomePod", + -14.92353343963623 + ], + [ + "▁Lillard", + -14.923535346984863 + ], + [ + "▁Indication", + -14.92353630065918 + ], + [ + "▁midsummer", + -14.923542022705078 + ], + [ + "acclimat", + -14.923544883728027 + ], + [ + "▁Blackmagic", + -14.923558235168455 + ], + [ + "▁Groundwater", + -14.923559188842772 + ], + [ + "▁Gurung", + -14.923563957214355 + ], + [ + "▁Melaka", + -14.92357063293457 + ], + [ + "▁Ciaran", + -14.923604011535645 + ], + [ + "▁Barrymore", + -14.923644065856934 + ], + [ + "▁SPARC", + -14.923646926879885 + ], + [ + "▁uncoated", + -14.92365550994873 + ], + [ + "▁ADAM", + -14.923656463623049 + ], + [ + "▁inventiveness", + -14.923670768737791 + ], + [ + "▁moviegoers", + -14.92369556427002 + ], + [ + "ldap", + -14.923757553100586 + ], + [ + "Curiously", + -14.92377471923828 + ], + [ + "▁Nuit", + -14.923834800720217 + ], + [ + "▁Ciao", + -14.923843383789062 + ], + [ + "▁Llama", + -14.92385482788086 + ], + [ + "▁Koji", + -14.92386245727539 + ], + [ + "▁gushed", + -14.923882484436035 + ], + [ + "▁recessive", + -14.92389678955078 + ], + [ + "NRT", + -14.923955917358398 + ], + [ + "▁urdu", + -14.923988342285156 + ], + [ + "sneak", + -14.924030303955078 + ], + [ + "▁Carmine", + -14.924041748046877 + ], + [ + "▁midstream", + -14.924072265625 + ], + [ + "▁punctually", + -14.92408561706543 + ], + [ + "▁Connery", + -14.924112319946287 + ], + [ + "▁magnetically", + -14.92413330078125 + ], + [ + "▁Borderline", + -14.924139976501465 + ], + [ + "▁PAID", + -14.924217224121094 + ], + [ + "SOCIA", + -14.924237251281738 + ], + [ + "▁Kame", + -14.92425537109375 + ], + [ + "▁straightener", + -14.92426586151123 + ], + [ + "▁Rewind", + -14.924297332763672 + ], + [ + "salad", + -14.924298286437988 + ], + [ + "▁Waymo", + -14.92435359954834 + ], + [ + "▁liveliness", + -14.92436408996582 + ], + [ + "▁steadfastly", + -14.924372673034668 + ], + [ + "SLR", + -14.924386978149414 + ], + [ + "8.45", + -14.92441463470459 + ], + [ + "achus", + -14.924437522888184 + ], + [ + "LANG", + -14.924455642700195 + ], + [ + "▁whirling", + -14.924463272094728 + ], + [ + "▁Prospectus", + -14.924479484558104 + ], + [ + "ADP", + -14.924488067626951 + ], + [ + "▁Jalen", + -14.924580574035645 + ], + [ + "▁Brightness", + -14.924582481384276 + ], + [ + "▁Stickman", + -14.924607276916504 + ], + [ + "▁Rogen", + -14.9246187210083 + ], + [ + "▁rustle", + -14.924638748168944 + ], + [ + "▁Crayon", + -14.924659729003906 + ], + [ + "▁Montage", + -14.924667358398438 + ], + [ + "▁gunpoint", + -14.92475414276123 + ], + [ + "▁Lorain", + -14.924782752990724 + ], + [ + "▁Magne", + -14.92486572265625 + ], + [ + "permit", + -14.924901008605955 + ], + [ + "▁Beaded", + -14.924986839294434 + ], + [ + "Familiarity", + -14.925130844116213 + ], + [ + "▁Seaford", + -14.925150871276855 + ], + [ + "ран", + -14.925177574157717 + ], + [ + "▁PAYE", + -14.925209045410156 + ], + [ + "▁Rebuilding", + -14.925275802612305 + ], + [ + "▁Lovin", + -14.92530345916748 + ], + [ + "▁Sancho", + -14.92532444000244 + ], + [ + "Flyer", + -14.925325393676758 + ], + [ + "▁gaffer", + -14.92534065246582 + ], + [ + "qc", + -14.925355911254885 + ], + [ + "shopify", + -14.925400733947754 + ], + [ + "▁AWARE", + -14.925432205200195 + ], + [ + "▁Zoey", + -14.92543601989746 + ], + [ + "▁Shaking", + -14.925469398498535 + ], + [ + "▁Condens", + -14.92548942565918 + ], + [ + "▁absurdly", + -14.925549507141112 + ], + [ + "▁Cocoon", + -14.92570686340332 + ], + [ + "OLLY", + -14.92578411102295 + ], + [ + "▁nudged", + -14.925800323486328 + ], + [ + "7.1%", + -14.9259033203125 + ], + [ + "▁Lillie", + -14.926031112670898 + ], + [ + "EDU", + -14.926209449768066 + ], + [ + "LDS", + -14.926384925842283 + ], + [ + "▁flav", + -14.92640495300293 + ], + [ + "▁Riverhead", + -14.92644500732422 + ], + [ + "▁eShop", + -14.926456451416016 + ], + [ + "Maximize", + -14.926557540893556 + ], + [ + "▁LLM", + -14.92661190032959 + ], + [ + "▁Rowhouse", + -14.92662239074707 + ], + [ + "▁$96", + -14.926798820495604 + ], + [ + "unica", + -14.92682933807373 + ], + [ + "▁Tanna", + -14.92686653137207 + ], + [ + "sterreich", + -14.926870346069336 + ], + [ + "UEL", + -14.926883697509766 + ], + [ + "rupa", + -14.926898956298828 + ], + [ + "▁chub", + -14.926942825317385 + ], + [ + "▁Risen", + -14.927024841308594 + ], + [ + "NFC", + -14.927056312561035 + ], + [ + "▁Kiko", + -14.927189826965332 + ], + [ + "UDE", + -14.927396774291992 + ], + [ + "▁$4.8", + -14.927414894104004 + ], + [ + "physiological", + -14.927424430847168 + ], + [ + "5800", + -14.927467346191406 + ], + [ + "▁Inglis", + -14.927486419677734 + ], + [ + "ungan", + -14.92772102355957 + ], + [ + "▁Maryville", + -14.92775821685791 + ], + [ + "▁Rhee", + -14.927855491638184 + ], + [ + "▁Wider", + -14.928014755249023 + ], + [ + "▁Raft", + -14.928122520446776 + ], + [ + "2:03", + -14.928126335144045 + ], + [ + "cooker", + -14.928215980529783 + ], + [ + "gade", + -14.928228378295898 + ], + [ + "ecting", + -14.928274154663086 + ], + [ + "▁wetness", + -14.928308486938477 + ], + [ + "▁TMT", + -14.92832374572754 + ], + [ + "arthur", + -14.928369522094728 + ], + [ + "millionaire", + -14.928422927856444 + ], + [ + "▁14-15", + -14.928556442260742 + ], + [ + "▁Steelhead", + -14.928624153137209 + ], + [ + "▁Loz", + -14.92867946624756 + ], + [ + "▁baptismal", + -14.92879867553711 + ], + [ + "▁Herts", + -14.929329872131348 + ], + [ + "▁Schenk", + -14.92935562133789 + ], + [ + "indicator", + -14.929452896118164 + ], + [ + "▁Ashoka", + -14.929522514343262 + ], + [ + "representative", + -14.929678916931152 + ], + [ + "▁cosmo", + -14.929773330688477 + ], + [ + "HEM", + -14.929984092712402 + ], + [ + "introduction", + -14.930026054382324 + ], + [ + "9-17", + -14.93003749847412 + ], + [ + "supervised", + -14.930106163024902 + ], + [ + "Bullying", + -14.930121421813965 + ], + [ + "nobody", + -14.93012809753418 + ], + [ + "Combination", + -14.930158615112305 + ], + [ + "Melanie", + -14.930168151855469 + ], + [ + "▁RELAT", + -14.930170059204102 + ], + [ + "Brought", + -14.93017292022705 + ], + [ + "Architectural", + -14.930173873901367 + ], + [ + "Bulgaria", + -14.930173873901367 + ], + [ + "Municipal", + -14.930174827575684 + ], + [ + "Briggs", + -14.93017578125 + ], + [ + "sustainability", + -14.930179595947266 + ], + [ + "Modular", + -14.930184364318848 + ], + [ + "eventually", + -14.930188179016112 + ], + [ + "filament", + -14.930188179016112 + ], + [ + "Basket", + -14.93019199371338 + ], + [ + "Detox", + -14.930198669433594 + ], + [ + "Dairy", + -14.930219650268556 + ], + [ + "shrink", + -14.930243492126465 + ], + [ + "Cardiff", + -14.930256843566896 + ], + [ + "Martinez", + -14.930265426635742 + ], + [ + "Joyce", + -14.93026638031006 + ], + [ + "Geneva", + -14.93028163909912 + ], + [ + "▁NPD", + -14.9302978515625 + ], + [ + "▁Thala", + -14.93033218383789 + ], + [ + "textile", + -14.930344581604004 + ], + [ + "experimental", + -14.930412292480469 + ], + [ + "▁Halogen", + -14.93042278289795 + ], + [ + "ESL", + -14.930463790893556 + ], + [ + "Bruno", + -14.9304838180542 + ], + [ + "spinner", + -14.930550575256348 + ], + [ + "introducing", + -14.930567741394045 + ], + [ + "▁Leonid", + -14.930645942687988 + ], + [ + "nasa", + -14.930676460266112 + ], + [ + "SBE", + -14.930729866027832 + ], + [ + "▁signifi", + -14.930879592895508 + ], + [ + "▁Aromatic", + -14.93093204498291 + ], + [ + "▁Snook", + -14.93093490600586 + ], + [ + "Printable", + -14.931028366088867 + ], + [ + "unlimited", + -14.931221961975098 + ], + [ + "Brady", + -14.931232452392578 + ], + [ + "Infinity", + -14.93129825592041 + ], + [ + "ONLY", + -14.931306838989258 + ], + [ + "morris", + -14.93132209777832 + ], + [ + "Nag", + -14.931353569030762 + ], + [ + "▁Federalist", + -14.931392669677734 + ], + [ + "ESD", + -14.931466102600098 + ], + [ + "▁polisher", + -14.931599617004396 + ], + [ + "Owning", + -14.931607246398926 + ], + [ + "Debit", + -14.931779861450195 + ], + [ + "1522", + -14.931791305541992 + ], + [ + "▁£1.5", + -14.931838989257812 + ], + [ + "▁15.1", + -14.93202018737793 + ], + [ + "▁Leng", + -14.932031631469728 + ], + [ + "VSC", + -14.932089805603027 + ], + [ + "▁Murad", + -14.932211875915527 + ], + [ + "▁$0.6", + -14.932334899902344 + ], + [ + "ILIA", + -14.932413101196287 + ], + [ + "Shri", + -14.932413101196287 + ], + [ + "▁cymbal", + -14.932415962219238 + ], + [ + "▁ELECT", + -14.932417869567873 + ], + [ + "Uniform", + -14.93251895904541 + ], + [ + "brae", + -14.932637214660645 + ], + [ + "Letting", + -14.932653427124023 + ], + [ + "hoda", + -14.9326753616333 + ], + [ + "REACH", + -14.93270778656006 + ], + [ + "limb", + -14.932796478271484 + ], + [ + "hankar", + -14.93281364440918 + ], + [ + "Luck", + -14.93288230895996 + ], + [ + "▁punctuate", + -14.932989120483398 + ], + [ + "3-13", + -14.933065414428713 + ], + [ + "▁desde", + -14.933189392089844 + ], + [ + "▁Nayak", + -14.93319320678711 + ], + [ + "▁espouse", + -14.933207511901855 + ], + [ + "UCO", + -14.933218002319336 + ], + [ + "▁Ruh", + -14.9332275390625 + ], + [ + "Cave", + -14.933284759521484 + ], + [ + "cussion", + -14.933308601379396 + ], + [ + "▁Rett", + -14.933378219604492 + ], + [ + "uttle", + -14.933425903320312 + ], + [ + "▁gnaw", + -14.93346881866455 + ], + [ + "clamation", + -14.93349266052246 + ], + [ + "BlacKkKlansman", + -14.933587074279783 + ], + [ + "Circumstances", + -14.933587074279783 + ], + [ + "Cultivating", + -14.933587074279783 + ], + [ + "INGREDIENTS", + -14.933587074279783 + ], + [ + "Ignorance", + -14.933587074279783 + ], + [ + "Khawaja", + -14.933587074279783 + ], + [ + "Serendipity", + -14.933587074279783 + ], + [ + "Spontaneous", + -14.933587074279783 + ], + [ + "▁888-405-7720", + -14.933587074279783 + ], + [ + "▁BEAUTY", + -14.933587074279783 + ], + [ + "▁Equilibrium", + -14.933587074279783 + ], + [ + "▁Etruscan", + -14.933587074279783 + ], + [ + "▁FLORIST", + -14.933587074279783 + ], + [ + "▁Issaquah", + -14.933587074279783 + ], + [ + "▁Optometrist", + -14.933587074279783 + ], + [ + "▁Polypropylene", + -14.933587074279783 + ], + [ + "▁Scavenger", + -14.933587074279783 + ], + [ + "▁Sorority", + -14.933587074279783 + ], + [ + "▁TOGETHER", + -14.933587074279783 + ], + [ + "▁Villanueva", + -14.933587074279783 + ], + [ + "▁accruing", + -14.933587074279783 + ], + [ + "▁cesarean", + -14.933587074279783 + ], + [ + "▁desolation", + -14.933587074279783 + ], + [ + "▁endometrial", + -14.933587074279783 + ], + [ + "▁entrapment", + -14.933587074279783 + ], + [ + "▁guaranty", + -14.933587074279783 + ], + [ + "▁imperceptible", + -14.933587074279783 + ], + [ + "▁lisinopril", + -14.933587074279783 + ], + [ + "▁mistletoe", + -14.933587074279783 + ], + [ + "▁officinalis", + -14.933587074279783 + ], + [ + "▁Humidity", + -14.933588027954102 + ], + [ + "▁Lewandowski", + -14.933588027954102 + ], + [ + "▁ceviche", + -14.933588027954102 + ], + [ + "▁clatter", + -14.933588027954102 + ], + [ + "▁equating", + -14.933588027954102 + ], + [ + "▁Halliday", + -14.933588981628418 + ], + [ + "▁rarities", + -14.933588981628418 + ], + [ + "ShareAlike", + -14.933589935302734 + ], + [ + "▁rattlesnake", + -14.933589935302734 + ], + [ + "▁Pattinson", + -14.93359088897705 + ], + [ + "▁Siegfried", + -14.93359088897705 + ], + [ + "▁99.99%", + -14.93359375 + ], + [ + "▁Phipps", + -14.933595657348633 + ], + [ + "▁Treadmill", + -14.933597564697266 + ], + [ + "▁quadcopter", + -14.933597564697266 + ], + [ + "▁JONES", + -14.933598518371582 + ], + [ + "▁Phrases", + -14.933599472045898 + ], + [ + "▁Stubbs", + -14.933603286743164 + ], + [ + "▁Duggar", + -14.93360710144043 + ], + [ + "▁Chaucer", + -14.933610916137695 + ], + [ + "Legitimate", + -14.933615684509276 + ], + [ + "▁Clinique", + -14.933618545532228 + ], + [ + "▁Froome", + -14.933618545532228 + ], + [ + "▁Tiguan", + -14.93362045288086 + ], + [ + "▁Wasatch", + -14.933622360229492 + ], + [ + "▁Zendesk", + -14.933622360229492 + ], + [ + "▁ORCID", + -14.93362522125244 + ], + [ + "Keeffe", + -14.933626174926758 + ], + [ + "▁haystack", + -14.933626174926758 + ], + [ + "▁(1972)", + -14.933627128601074 + ], + [ + "▁FREEDOM", + -14.933632850646973 + ], + [ + "▁Informatica", + -14.933632850646973 + ], + [ + "▁Rapture", + -14.933638572692873 + ], + [ + "▁Generac", + -14.933639526367188 + ], + [ + "▁Randwick", + -14.933648109436035 + ], + [ + "▁gratuities", + -14.933650970458984 + ], + [ + "▁absorbency", + -14.9336519241333 + ], + [ + "▁$500.00", + -14.933656692504885 + ], + [ + "▁WebSphere", + -14.93366813659668 + ], + [ + "▁Hilbert", + -14.933669090270996 + ], + [ + "▁FlashScore", + -14.933670997619627 + ], + [ + "▁Squibb", + -14.933671951293944 + ], + [ + "▁Agape", + -14.933675765991213 + ], + [ + "▁[?]", + -14.933680534362791 + ], + [ + "▁Dubstep", + -14.93368911743164 + ], + [ + "▁decoupling", + -14.93369483947754 + ], + [ + "▁interlinked", + -14.93369483947754 + ], + [ + "▁statuette", + -14.933696746826172 + ], + [ + "▁Prozac", + -14.93369960784912 + ], + [ + "▁Aramco", + -14.933707237243652 + ], + [ + "▁Medusa", + -14.933707237243652 + ], + [ + "▁strident", + -14.933707237243652 + ], + [ + "▁Moeller", + -14.93371868133545 + ], + [ + "▁Pandya", + -14.933719635009766 + ], + [ + "▁chocolatier", + -14.933719635009766 + ], + [ + "▁medicare", + -14.933719635009766 + ], + [ + "▁Kaolin", + -14.933720588684082 + ], + [ + "▁HTSUS", + -14.933723449707031 + ], + [ + "zuka", + -14.93372917175293 + ], + [ + "▁Butterworth", + -14.93373966217041 + ], + [ + "▁pandora", + -14.93373966217041 + ], + [ + "▁transitory", + -14.933740615844728 + ], + [ + "▁Suffer", + -14.933746337890623 + ], + [ + "▁Allyson", + -14.933751106262209 + ], + [ + "▁bushfire", + -14.933755874633787 + ], + [ + "Goat", + -14.933762550354004 + ], + [ + "▁MediaTek", + -14.933780670166016 + ], + [ + "▁Xiong", + -14.933780670166016 + ], + [ + "▁Brack", + -14.933788299560549 + ], + [ + "▁Cargill", + -14.933791160583496 + ], + [ + "▁SanDisk", + -14.933819770812988 + ], + [ + "▁Watercolour", + -14.93385410308838 + ], + [ + "▁Dripping", + -14.933857917785645 + ], + [ + "▁fireflies", + -14.933866500854492 + ], + [ + "ABD", + -14.933876037597656 + ], + [ + "2222", + -14.933879852294922 + ], + [ + "▁HWY", + -14.933881759643556 + ], + [ + "▁NSAIDs", + -14.933882713317873 + ], + [ + "▁Superheroes", + -14.933892250061035 + ], + [ + "▁Sallie", + -14.933893203735352 + ], + [ + "▁Tangent", + -14.933900833129885 + ], + [ + "▁deposed", + -14.933954238891602 + ], + [ + "▁nuptials", + -14.933966636657717 + ], + [ + "▁Yahya", + -14.93398380279541 + ], + [ + "▁flavoursome", + -14.934009552001951 + ], + [ + "3,4", + -14.93401050567627 + ], + [ + "▁Snowball", + -14.934048652648926 + ], + [ + "▁haulage", + -14.934048652648926 + ], + [ + "wreaking", + -14.934052467346191 + ], + [ + "▁Maplewood", + -14.934085845947266 + ], + [ + "▁Etna", + -14.934087753295898 + ], + [ + "▁Fishermen", + -14.93411636352539 + ], + [ + "▁Racquet", + -14.93411922454834 + ], + [ + "▁$6.00", + -14.93412971496582 + ], + [ + "Moved", + -14.934138298034668 + ], + [ + "▁hulking", + -14.93422031402588 + ], + [ + "▁Rasa", + -14.934221267700195 + ], + [ + "▁geographer", + -14.934244155883787 + ], + [ + "▁Wulf", + -14.93425750732422 + ], + [ + "▁confounded", + -14.934264183044434 + ], + [ + "DRM", + -14.9342679977417 + ], + [ + "▁tbh", + -14.93427276611328 + ], + [ + "▁unlisted", + -14.934283256530762 + ], + [ + "▁warheads", + -14.934285163879396 + ], + [ + "▁Dynamite", + -14.934354782104492 + ], + [ + "▁modularity", + -14.9343900680542 + ], + [ + "ologize", + -14.934391975402832 + ], + [ + "Ounce", + -14.934396743774414 + ], + [ + "▁Tsum", + -14.934409141540527 + ], + [ + "▁Anglian", + -14.934433937072754 + ], + [ + "▁refactor", + -14.934497833251951 + ], + [ + "▁MBOX", + -14.934508323669434 + ], + [ + "▁Reyna", + -14.934510231018066 + ], + [ + "▁goldsmith", + -14.934514999389648 + ], + [ + "▁Ringwood", + -14.934526443481444 + ], + [ + "▁Productive", + -14.934565544128418 + ], + [ + "▁Greystone", + -14.934603691101074 + ], + [ + "▁disintegrated", + -14.934615135192873 + ], + [ + "▁unjustly", + -14.93465518951416 + ], + [ + "▁sprawled", + -14.9346923828125 + ], + [ + "shipment", + -14.934739112854004 + ], + [ + "▁kurta", + -14.934792518615724 + ], + [ + "▁culling", + -14.934799194335938 + ], + [ + "▁Fijian", + -14.934804916381836 + ], + [ + "▁professing", + -14.934818267822266 + ], + [ + "Shaft", + -14.934852600097656 + ], + [ + "▁masque", + -14.934892654418944 + ], + [ + "Squad", + -14.934893608093262 + ], + [ + "NAH", + -14.934904098510742 + ], + [ + "▁HIF", + -14.934934616088867 + ], + [ + "▁groaning", + -14.934950828552246 + ], + [ + "certainly", + -14.934981346130373 + ], + [ + "THERM", + -14.934996604919434 + ], + [ + "▁Pettit", + -14.935007095336914 + ], + [ + "▁pentagon", + -14.935038566589355 + ], + [ + "terconnectedness", + -14.935115814208984 + ], + [ + "poet", + -14.935131072998049 + ], + [ + "▁GOLDEN", + -14.93514347076416 + ], + [ + "JOR", + -14.935173988342283 + ], + [ + "▁11′′", + -14.935227394104004 + ], + [ + "▁transacting", + -14.935240745544434 + ], + [ + "▁Denzel", + -14.935276985168455 + ], + [ + "▁wielded", + -14.935369491577148 + ], + [ + "▁betraying", + -14.935396194458008 + ], + [ + "clusive", + -14.935440063476562 + ], + [ + "▁DUN", + -14.935492515563965 + ], + [ + "shiny", + -14.935503005981444 + ], + [ + "▁sanctioning", + -14.935544967651367 + ], + [ + "▁RAK", + -14.935556411743164 + ], + [ + "▁grimy", + -14.935571670532228 + ], + [ + "SWE", + -14.935579299926758 + ], + [ + "raocular", + -14.935582160949709 + ], + [ + "▁unblocking", + -14.93564224243164 + ], + [ + "culum", + -14.935848236083984 + ], + [ + "1929", + -14.935869216918944 + ], + [ + "1110", + -14.935928344726562 + ], + [ + "evski", + -14.935940742492676 + ], + [ + "▁Napkin", + -14.936070442199709 + ], + [ + "▁melodramatic", + -14.936079978942873 + ], + [ + "▁Assn", + -14.936137199401855 + ], + [ + "PRM", + -14.936230659484863 + ], + [ + "Angie", + -14.936250686645508 + ], + [ + "▁WRAP", + -14.936261177062988 + ], + [ + "▁07:0", + -14.936264991760254 + ], + [ + "▁elation", + -14.936305046081545 + ], + [ + "aghi", + -14.936335563659668 + ], + [ + "▁riverbed", + -14.936370849609377 + ], + [ + "▁Corker", + -14.936511039733888 + ], + [ + "▁Scholl", + -14.93654727935791 + ], + [ + "▁Aveda", + -14.936567306518556 + ], + [ + "▁Meiji", + -14.93661403656006 + ], + [ + "Enact", + -14.936718940734863 + ], + [ + "BMS", + -14.936755180358888 + ], + [ + "▁18:5", + -14.936761856079102 + ], + [ + "guero", + -14.936936378479004 + ], + [ + "▁Backward", + -14.936944007873535 + ], + [ + "▁Meena", + -14.9369478225708 + ], + [ + "▁Adjusting", + -14.936949729919434 + ], + [ + "▁Geologist", + -14.937047958374023 + ], + [ + "▁brushless", + -14.93712329864502 + ], + [ + "▁Dimple", + -14.937143325805664 + ], + [ + "▁BCAA", + -14.937173843383787 + ], + [ + "pressive", + -14.937188148498535 + ], + [ + "▁Tawa", + -14.9371976852417 + ], + [ + "▁mario", + -14.937291145324709 + ], + [ + "Faster", + -14.937295913696287 + ], + [ + "▁Kachin", + -14.937301635742188 + ], + [ + "▁Upward", + -14.937349319458008 + ], + [ + "Glaci", + -14.937373161315918 + ], + [ + "▁normalizing", + -14.93738079071045 + ], + [ + "Revolution", + -14.937421798706056 + ], + [ + "▁Parnell", + -14.937422752380373 + ], + [ + "4:18", + -14.937451362609863 + ], + [ + "MCP", + -14.937808990478516 + ], + [ + "▁3:2", + -14.937819480895996 + ], + [ + "illette", + -14.937898635864258 + ], + [ + "HAB", + -14.937933921813965 + ], + [ + "▁Halley", + -14.93795680999756 + ], + [ + "▁Parkview", + -14.937984466552734 + ], + [ + "izzo", + -14.93800163269043 + ], + [ + "▁stinger", + -14.938043594360352 + ], + [ + "YYYY", + -14.938173294067385 + ], + [ + "▁stopwatch", + -14.938192367553713 + ], + [ + "▁Urbanism", + -14.938552856445312 + ], + [ + "▁firebox", + -14.938599586486816 + ], + [ + "▁Arianna", + -14.938618659973145 + ], + [ + "BTS", + -14.938645362854004 + ], + [ + "fingered", + -14.938657760620115 + ], + [ + "▁McNeill", + -14.938668251037598 + ], + [ + "riddle", + -14.938750267028809 + ], + [ + "ulz", + -14.93882942199707 + ], + [ + "proud", + -14.938889503479004 + ], + [ + "▁dispo", + -14.93896484375 + ], + [ + "▁Keck", + -14.939020156860352 + ], + [ + "▁Gaudi", + -14.939059257507324 + ], + [ + "▁42,000", + -14.939099311828612 + ], + [ + "8.9%", + -14.939126014709473 + ], + [ + "▁Starry", + -14.93917751312256 + ], + [ + "Geographic", + -14.939187049865724 + ], + [ + "joule", + -14.939276695251465 + ], + [ + "▁wove", + -14.939376831054688 + ], + [ + "▁VCP", + -14.939423561096191 + ], + [ + "▁Lakeway", + -14.939478874206545 + ], + [ + "collected", + -14.93948745727539 + ], + [ + "Modify", + -14.939556121826172 + ], + [ + "Gurion", + -14.939581871032717 + ], + [ + "▁bacc", + -14.939780235290527 + ], + [ + "▁Veen", + -14.939935684204102 + ], + [ + "▁Sugi", + -14.939937591552734 + ], + [ + "Grasse", + -14.940024375915527 + ], + [ + "lucent", + -14.940191268920898 + ], + [ + "▁Armen", + -14.940369606018066 + ], + [ + "FLEX", + -14.940476417541504 + ], + [ + "▁despot", + -14.9406099319458 + ], + [ + "kpa", + -14.940629005432127 + ], + [ + "Frequent", + -14.94064712524414 + ], + [ + "▁$4.1", + -14.940649032592772 + ], + [ + "▁Ganja", + -14.940713882446287 + ], + [ + "Contractors", + -14.940725326538086 + ], + [ + "▁Cotta", + -14.940755844116213 + ], + [ + "distributed", + -14.94079303741455 + ], + [ + "▁Dict", + -14.940804481506348 + ], + [ + "Assassin", + -14.940817832946776 + ], + [ + "Pediatric", + -14.940821647644045 + ], + [ + "Rhythm", + -14.940828323364258 + ], + [ + "Husband", + -14.94085693359375 + ], + [ + "Victim", + -14.940872192382812 + ], + [ + "Plaintiff", + -14.940882682800291 + ], + [ + "Transparent", + -14.940909385681152 + ], + [ + "Resistant", + -14.940911293029783 + ], + [ + "Marijuana", + -14.940912246704102 + ], + [ + "Jeremiah", + -14.94091510772705 + ], + [ + "tufted", + -14.940916061401367 + ], + [ + "Kathryn", + -14.940917015075684 + ], + [ + "Scatter", + -14.94091796875 + ], + [ + "Exposure", + -14.940918922424316 + ], + [ + "Integrity", + -14.940919876098633 + ], + [ + "groomed", + -14.940921783447266 + ], + [ + "Pokémon", + -14.940926551818848 + ], + [ + "Ethiopia", + -14.940927505493164 + ], + [ + "Cincinnati", + -14.94093132019043 + ], + [ + "Generator", + -14.94093418121338 + ], + [ + "Jenkins", + -14.94093418121338 + ], + [ + "Chloe", + -14.940936088562012 + ], + [ + "florida", + -14.940937042236328 + ], + [ + "Seamless", + -14.94093894958496 + ], + [ + "evaluating", + -14.940942764282228 + ], + [ + "▁Grob", + -14.940942764282228 + ], + [ + "revolutionary", + -14.94095230102539 + ], + [ + "Worse", + -14.940958976745604 + ], + [ + "Wage", + -14.94097137451172 + ], + [ + "Guidelines", + -14.940973281860352 + ], + [ + "favourite", + -14.940998077392578 + ], + [ + "▁Starling", + -14.941007614135742 + ], + [ + "LEAN", + -14.941021919250488 + ], + [ + "israel", + -14.941049575805664 + ], + [ + "OCD", + -14.94113063812256 + ], + [ + "partition", + -14.941154479980469 + ], + [ + "▁16.1", + -14.941211700439451 + ], + [ + "Connie", + -14.94126033782959 + ], + [ + "presidential", + -14.94127368927002 + ], + [ + "Christie", + -14.941296577453612 + ], + [ + "CNR", + -14.941299438476562 + ], + [ + "imaging", + -14.941322326660156 + ], + [ + "▁Kori", + -14.941329956054688 + ], + [ + "Fault", + -14.941351890563965 + ], + [ + "Specialty", + -14.94137191772461 + ], + [ + "▁Tuned", + -14.941526412963867 + ], + [ + "▁STEPS", + -14.941532135009766 + ], + [ + "Cheat", + -14.94156265258789 + ], + [ + "Organisers", + -14.941574096679688 + ], + [ + "jumper", + -14.941593170166016 + ], + [ + "Everton", + -14.941594123840332 + ], + [ + "btw", + -14.941611289978027 + ], + [ + "Lounge", + -14.941730499267578 + ], + [ + "LBA", + -14.941766738891602 + ], + [ + "ACION", + -14.941781997680664 + ], + [ + "barred", + -14.941794395446776 + ], + [ + "▁friggin", + -14.94182586669922 + ], + [ + "Lumin", + -14.941865921020508 + ], + [ + "scrolling", + -14.942131996154783 + ], + [ + "blok", + -14.942184448242188 + ], + [ + "▁ultralight", + -14.94218921661377 + ], + [ + "Broadly", + -14.942218780517578 + ], + [ + "chaff", + -14.942253112792969 + ], + [ + ".03.201", + -14.942360877990724 + ], + [ + "seeding", + -14.942442893981934 + ], + [ + "▁Tartar", + -14.942445755004885 + ], + [ + "SSU", + -14.942458152770996 + ], + [ + "▁symp", + -14.942533493041992 + ], + [ + "PLU", + -14.942800521850586 + ], + [ + "Outsource", + -14.942852973937988 + ], + [ + "▁friendlies", + -14.942967414855955 + ], + [ + "▁PLL", + -14.94298267364502 + ], + [ + "▁Apu", + -14.942988395690918 + ], + [ + "surround", + -14.943015098571776 + ], + [ + "forged", + -14.943022727966309 + ], + [ + "▁hcl", + -14.943071365356444 + ], + [ + "Amazingly", + -14.943106651306152 + ], + [ + "Brooke", + -14.943181991577148 + ], + [ + "RIBA", + -14.943238258361816 + ], + [ + "legg", + -14.943243026733398 + ], + [ + "Mainly", + -14.943256378173828 + ], + [ + "NICE", + -14.943310737609863 + ], + [ + "▁autoplay", + -14.943312644958496 + ], + [ + "shaded", + -14.9434232711792 + ], + [ + "▁mussel", + -14.94345760345459 + ], + [ + "▁attenuate", + -14.943511009216309 + ], + [ + "▁switchboard", + -14.943567276000977 + ], + [ + "▁Anker", + -14.943646430969238 + ], + [ + "angka", + -14.943815231323242 + ], + [ + "▁Annabel", + -14.943838119506836 + ], + [ + "equa", + -14.943912506103516 + ], + [ + "▁TRP", + -14.943951606750488 + ], + [ + "RUT", + -14.943964004516602 + ], + [ + "baking", + -14.943989753723145 + ], + [ + "ussel", + -14.944046020507812 + ], + [ + "DESIGN", + -14.94408130645752 + ], + [ + "Obituary", + -14.94411277770996 + ], + [ + "▁Crimestoppers", + -14.94411277770996 + ], + [ + "▁DECEMBER", + -14.94411277770996 + ], + [ + "▁FURTHER", + -14.94411277770996 + ], + [ + "▁Gehrig", + -14.94411277770996 + ], + [ + "▁Kenilworth", + -14.94411277770996 + ], + [ + "▁POSSIBILITY", + -14.94411277770996 + ], + [ + "▁Rehoboth", + -14.94411277770996 + ], + [ + "▁Shopxall", + -14.94411277770996 + ], + [ + "▁encircling", + -14.94411277770996 + ], + [ + "▁hemorrhagic", + -14.94411277770996 + ], + [ + "▁hydrochloric", + -14.94411277770996 + ], + [ + "▁irresistibly", + -14.94411277770996 + ], + [ + "▁lactating", + -14.94411277770996 + ], + [ + "▁leprechaun", + -14.94411277770996 + ], + [ + "▁lozenge", + -14.94411277770996 + ], + [ + "▁olanzapine", + -14.94411277770996 + ], + [ + "▁pressurised", + -14.94411277770996 + ], + [ + "▁procrastinator", + -14.94411277770996 + ], + [ + "▁Aggarwal", + -14.944113731384276 + ], + [ + "▁Corvallis", + -14.944113731384276 + ], + [ + "▁Epidemic", + -14.944113731384276 + ], + [ + "▁Freemasonry", + -14.944113731384276 + ], + [ + "▁Hezekiah", + -14.944113731384276 + ], + [ + "▁Khamenei", + -14.944113731384276 + ], + [ + "▁Mausoleum", + -14.944113731384276 + ], + [ + "▁Midwives", + -14.944113731384276 + ], + [ + "▁Mojito", + -14.944113731384276 + ], + [ + "▁POTUS", + -14.944113731384276 + ], + [ + "▁embolism", + -14.944113731384276 + ], + [ + "▁gibberish", + -14.944113731384276 + ], + [ + "▁obliging", + -14.944113731384276 + ], + [ + "▁ostentatious", + -14.944113731384276 + ], + [ + "▁poblano", + -14.944113731384276 + ], + [ + "▁precipice", + -14.944113731384276 + ], + [ + "▁similiar", + -14.944113731384276 + ], + [ + "▁vaudeville", + -14.944113731384276 + ], + [ + ":10.1016/", + -14.944114685058594 + ], + [ + "ashvili", + -14.944114685058594 + ], + [ + "▁cruciate", + -14.944114685058594 + ], + [ + "▁Accrington", + -14.94411563873291 + ], + [ + "▁Ceredigion", + -14.944116592407228 + ], + [ + "▁Primavera", + -14.944116592407228 + ], + [ + "▁ulterior", + -14.944117546081545 + ], + [ + "▁Spokesman", + -14.944119453430176 + ], + [ + "detroit", + -14.944120407104492 + ], + [ + "▁bumblebee", + -14.944120407104492 + ], + [ + "▁costuming", + -14.944122314453123 + ], + [ + "▁groggy", + -14.944122314453123 + ], + [ + "▁maladies", + -14.944122314453123 + ], + [ + "▁Makkah", + -14.944125175476074 + ], + [ + "▁merrier", + -14.944125175476074 + ], + [ + "▁tirade", + -14.944125175476074 + ], + [ + "▁frolicking", + -14.944127082824709 + ], + [ + "▁hypoxia", + -14.944127082824709 + ], + [ + "▁VMWare", + -14.944131851196287 + ], + [ + "▁Rosenbaum", + -14.944135665893556 + ], + [ + "▁Woolwich", + -14.944138526916504 + ], + [ + "▁Megapixel", + -14.944140434265137 + ], + [ + "▁parabolic", + -14.944144248962402 + ], + [ + "▁Kebab", + -14.94414520263672 + ], + [ + "▁Bridle", + -14.944149017333984 + ], + [ + "▁COMPLETELY", + -14.944149017333984 + ], + [ + "▁enquiring", + -14.94415283203125 + ], + [ + "Legislat", + -14.94416046142578 + ], + [ + "▁Tanglewood", + -14.94416046142578 + ], + [ + "▁Aliexpress", + -14.944164276123049 + ], + [ + "▁turfgrass", + -14.944165229797363 + ], + [ + "▁Woburn", + -14.944183349609377 + ], + [ + "▁Peltier", + -14.944190979003906 + ], + [ + "▁vandalized", + -14.944191932678224 + ], + [ + "▁wieder", + -14.944193840026855 + ], + [ + "▁Brazos", + -14.944196701049805 + ], + [ + "▁£250,000", + -14.944198608398438 + ], + [ + "▁Hannity", + -14.944201469421388 + ], + [ + "▁Ralf", + -14.944209098815918 + ], + [ + "▁Cormac", + -14.944218635559082 + ], + [ + "▁upsurge", + -14.944220542907717 + ], + [ + "▁Chalkboard", + -14.944221496582031 + ], + [ + "▁Duckworth", + -14.944223403930664 + ], + [ + "▁boilerplate", + -14.94423484802246 + ], + [ + "▁stylistically", + -14.944239616394045 + ], + [ + "▁Meltdown", + -14.944247245788574 + ], + [ + "▁impure", + -14.94424819946289 + ], + [ + "heir", + -14.944254875183104 + ], + [ + "▁MacLean", + -14.944258689880373 + ], + [ + "▁Hizb", + -14.944262504577637 + ], + [ + "sailing", + -14.944308280944824 + ], + [ + "perturb", + -14.944329261779783 + ], + [ + "▁unabashedly", + -14.944334030151367 + ], + [ + "▁reappearance", + -14.944342613220217 + ], + [ + "▁dimer", + -14.944353103637695 + ], + [ + "Encompass", + -14.944361686706545 + ], + [ + "▁Roseanne", + -14.944378852844238 + ], + [ + "▁NECA", + -14.94438648223877 + ], + [ + "▁Smeg", + -14.944409370422363 + ], + [ + "▁Superfund", + -14.944417953491213 + ], + [ + "▁dominoes", + -14.94442367553711 + ], + [ + "zheng", + -14.94445514678955 + ], + [ + "▁forgetfulness", + -14.94445514678955 + ], + [ + "▁Distant", + -14.944464683532717 + ], + [ + "Garrett", + -14.944472312927246 + ], + [ + "▁caramelize", + -14.944500923156738 + ], + [ + "▁leery", + -14.944504737854004 + ], + [ + "▁Boar", + -14.94450855255127 + ], + [ + "▁penalised", + -14.944537162780762 + ], + [ + "Modi", + -14.94458293914795 + ], + [ + "Casting", + -14.944616317749023 + ], + [ + "▁lasix", + -14.944640159606934 + ], + [ + "▁Burnsville", + -14.944659233093262 + ], + [ + "▁Snoopy", + -14.944671630859377 + ], + [ + "▁patriarchy", + -14.94468879699707 + ], + [ + "7:35", + -14.944710731506348 + ], + [ + "▁Xender", + -14.944741249084473 + ], + [ + "continental", + -14.944743156433104 + ], + [ + "▁boyhood", + -14.944746971130373 + ], + [ + "▁Dham", + -14.944748878479004 + ], + [ + "▁Dazzle", + -14.944769859313965 + ], + [ + "▁honorably", + -14.94480037689209 + ], + [ + "▁WHEELS", + -14.944865226745604 + ], + [ + "▁Huntley", + -14.944886207580566 + ], + [ + "▁CONNECTION", + -14.94490909576416 + ], + [ + "▁neutralizing", + -14.944910049438477 + ], + [ + "▁metabolized", + -14.94491195678711 + ], + [ + "▁crass", + -14.94491958618164 + ], + [ + "dispers", + -14.944920539855955 + ], + [ + "▁snowpack", + -14.944944381713867 + ], + [ + "▁OWL", + -14.944960594177246 + ], + [ + "▁epithet", + -14.945001602172852 + ], + [ + "NSE", + -14.945011138916016 + ], + [ + "Challa", + -14.94503402709961 + ], + [ + "▁Elizabethtown", + -14.945039749145508 + ], + [ + "▁puny", + -14.945107460021973 + ], + [ + "▁bruce", + -14.945133209228516 + ], + [ + "▁reassembled", + -14.945134162902832 + ], + [ + "▁SPRINGS", + -14.945143699645996 + ], + [ + "▁Sandeep", + -14.94515609741211 + ], + [ + "▁advisories", + -14.94519329071045 + ], + [ + "▁theorized", + -14.945196151733398 + ], + [ + "$150", + -14.945209503173828 + ], + [ + "▁Injector", + -14.945213317871094 + ], + [ + "▁chewable", + -14.945219993591309 + ], + [ + "▁phenom", + -14.945274353027344 + ], + [ + "▁splen", + -14.945389747619627 + ], + [ + "▁silkscreen", + -14.945399284362791 + ], + [ + "▁refuted", + -14.945404052734377 + ], + [ + "▁hoodia", + -14.94542121887207 + ], + [ + "▁stickiness", + -14.945430755615234 + ], + [ + "▁selflessness", + -14.945444107055664 + ], + [ + "Payout", + -14.945459365844728 + ], + [ + "▁Mattresses", + -14.94546890258789 + ], + [ + "Parser", + -14.945483207702637 + ], + [ + "Fro", + -14.945484161376951 + ], + [ + "▁Cease", + -14.94552993774414 + ], + [ + "▁veganism", + -14.94553565979004 + ], + [ + "▁Wasserman", + -14.945564270019531 + ], + [ + "qar", + -14.945608139038086 + ], + [ + "duration", + -14.945613861083984 + ], + [ + "▁Felton", + -14.945618629455566 + ], + [ + "▁universality", + -14.945670127868652 + ], + [ + "Dictator", + -14.945672988891602 + ], + [ + "ibilty", + -14.945684432983398 + ], + [ + "Encoding", + -14.945697784423828 + ], + [ + "ankle", + -14.945703506469728 + ], + [ + "▁springform", + -14.945703506469728 + ], + [ + "▁Arby", + -14.94577980041504 + ], + [ + "▁EXCEL", + -14.94577980041504 + ], + [ + "▁FCPA", + -14.94591236114502 + ], + [ + "icule", + -14.94597625732422 + ], + [ + "▁jive", + -14.94603157043457 + ], + [ + "▁Kathi", + -14.946202278137209 + ], + [ + "viable", + -14.946319580078123 + ], + [ + "▁grieved", + -14.946346282958984 + ], + [ + "▁directorate", + -14.946392059326172 + ], + [ + "insider", + -14.946440696716309 + ], + [ + "CAROL", + -14.946513175964355 + ], + [ + "OSU", + -14.946533203125 + ], + [ + "▁scouted", + -14.946569442749023 + ], + [ + "▁coarser", + -14.946584701538086 + ], + [ + "▁daub", + -14.94660758972168 + ], + [ + "Diff", + -14.94663143157959 + ], + [ + "▁deciphering", + -14.946638107299805 + ], + [ + "Scrip", + -14.946757316589355 + ], + [ + "▁$1,8", + -14.94681167602539 + ], + [ + "▁Rajan", + -14.946889877319336 + ], + [ + "pula", + -14.946967124938965 + ], + [ + "▁headspace", + -14.94705867767334 + ], + [ + "▁butchered", + -14.94707202911377 + ], + [ + "▁sacramental", + -14.947123527526855 + ], + [ + "▁adoptable", + -14.94714069366455 + ], + [ + "▁criminality", + -14.94729995727539 + ], + [ + "▁Pixi", + -14.947317123413086 + ], + [ + "▁Laminated", + -14.947349548339844 + ], + [ + "▁needlepoint", + -14.947365760803224 + ], + [ + "▁manageability", + -14.947502136230469 + ], + [ + "▁Srl", + -14.94751262664795 + ], + [ + "▁Kuri", + -14.947549819946287 + ], + [ + "▁Fauna", + -14.947629928588867 + ], + [ + "▁Markey", + -14.947731971740724 + ], + [ + "▁12.8", + -14.947775840759276 + ], + [ + "▁Thro", + -14.94789218902588 + ], + [ + "▁marten", + -14.947917938232422 + ], + [ + "▁lengthier", + -14.947919845581056 + ], + [ + "Premise", + -14.94798469543457 + ], + [ + "6:15", + -14.948131561279297 + ], + [ + "ELLY", + -14.948275566101074 + ], + [ + "▁pinpointed", + -14.948282241821287 + ], + [ + "▁BOAT", + -14.948305130004885 + ], + [ + "inventor", + -14.948404312133787 + ], + [ + "▁Sybil", + -14.948426246643066 + ], + [ + "EEF", + -14.948469161987305 + ], + [ + "▁scuffle", + -14.948500633239746 + ], + [ + "▁JCC", + -14.948551177978516 + ], + [ + "Hyperlink", + -14.948606491088867 + ], + [ + "▁untie", + -14.948698043823242 + ], + [ + "▁Gulin", + -14.948724746704102 + ], + [ + "▁Metab", + -14.948850631713867 + ], + [ + "▁Razak", + -14.948869705200195 + ], + [ + "▁syllabi", + -14.94888687133789 + ], + [ + "▁CAME", + -14.9489164352417 + ], + [ + "Duo", + -14.948969841003418 + ], + [ + "▁Cita", + -14.949030876159668 + ], + [ + "omega", + -14.949057579040527 + ], + [ + "Carlo", + -14.949108123779297 + ], + [ + "oldid", + -14.949125289916992 + ], + [ + "▁Indexes", + -14.94927215576172 + ], + [ + "voluntary", + -14.949315071105955 + ], + [ + "▁Rado", + -14.949403762817385 + ], + [ + "▁lolly", + -14.949418067932127 + ], + [ + "Yale", + -14.94961166381836 + ], + [ + "Rhyme", + -14.949726104736328 + ], + [ + "Hydra", + -14.949800491333008 + ], + [ + "mcg", + -14.94990348815918 + ], + [ + "▁Stoic", + -14.94995403289795 + ], + [ + "tantra", + -14.950011253356934 + ], + [ + "Mack", + -14.950244903564451 + ], + [ + "▁$9,000", + -14.95025634765625 + ], + [ + "2.95", + -14.950309753417969 + ], + [ + "Klein", + -14.950310707092283 + ], + [ + "▁itemize", + -14.950335502624512 + ], + [ + "aaah", + -14.950553894042969 + ], + [ + "▁gumbo", + -14.9506254196167 + ], + [ + "▁1.25\"", + -14.950642585754396 + ], + [ + "▁BOE", + -14.950658798217772 + ], + [ + "JET", + -14.950782775878906 + ], + [ + "solved", + -14.950902938842772 + ], + [ + "ILITY", + -14.951005935668944 + ], + [ + ":00:00", + -14.95106315612793 + ], + [ + "fice", + -14.95119857788086 + ], + [ + "Cock", + -14.951252937316896 + ], + [ + "9.1%", + -14.951268196105955 + ], + [ + "Hug", + -14.95127296447754 + ], + [ + "EGAN", + -14.951287269592283 + ], + [ + "▁$4.00", + -14.951326370239258 + ], + [ + "▁imam", + -14.95143222808838 + ], + [ + "enkel", + -14.951544761657717 + ], + [ + "scholar", + -14.951726913452148 + ], + [ + "defeated", + -14.951738357543944 + ], + [ + "Practicing", + -14.951742172241213 + ], + [ + "Ceiling", + -14.951752662658691 + ], + [ + "Spoiler", + -14.951760292053224 + ], + [ + "Indulge", + -14.951769828796388 + ], + [ + "Medication", + -14.951772689819336 + ], + [ + "Invitation", + -14.9517822265625 + ], + [ + "flavoured", + -14.951789855957031 + ], + [ + "Phantom", + -14.951793670654297 + ], + [ + "Ethics", + -14.951796531677246 + ], + [ + "Motorcycle", + -14.951796531677246 + ], + [ + "Rodriguez", + -14.951796531677246 + ], + [ + "Ubuntu", + -14.95179843902588 + ], + [ + "compensated", + -14.95179843902588 + ], + [ + "Chevrolet", + -14.951801300048828 + ], + [ + "cholesterol", + -14.951802253723145 + ], + [ + "Proceedings", + -14.95180320739746 + ], + [ + "superior", + -14.95180606842041 + ], + [ + "Lopez", + -14.951818466186523 + ], + [ + "Crunch", + -14.951825141906738 + ], + [ + "Motorola", + -14.951842308044434 + ], + [ + "Salesforce", + -14.95184326171875 + ], + [ + "importance", + -14.951852798461914 + ], + [ + "ecchia", + -14.951882362365724 + ], + [ + "hatter", + -14.95189380645752 + ], + [ + "WRT", + -14.95192050933838 + ], + [ + "Workplace", + -14.951997756958008 + ], + [ + "Princeton", + -14.952028274536133 + ], + [ + "conceptual", + -14.952109336853027 + ], + [ + "glycemic", + -14.952123641967772 + ], + [ + "Tammy", + -14.952152252197266 + ], + [ + "▁HLS", + -14.95216178894043 + ], + [ + "carried", + -14.952178001403809 + ], + [ + "ABOUT", + -14.952221870422363 + ], + [ + "gluten", + -14.952226638793944 + ], + [ + "Simpson", + -14.952240943908691 + ], + [ + "Impressive", + -14.95224666595459 + ], + [ + "Anime", + -14.952255249023438 + ], + [ + "Railway", + -14.952260971069336 + ], + [ + "Newspaper", + -14.952299118041992 + ], + [ + "Rustic", + -14.952340126037598 + ], + [ + "Anywhere", + -14.952375411987305 + ], + [ + "mainstream", + -14.952488899230955 + ], + [ + "▁Caen", + -14.952543258666992 + ], + [ + "▁Moya", + -14.952545166015623 + ], + [ + "towards", + -14.952696800231934 + ], + [ + "▁JMC", + -14.952753067016602 + ], + [ + "EFCC", + -14.952770233154297 + ], + [ + "▁Benji", + -14.952838897705078 + ], + [ + "▁Jamil", + -14.95284938812256 + ], + [ + "Strand", + -14.95292854309082 + ], + [ + "kiri", + -14.95317840576172 + ], + [ + "HRO", + -14.95326042175293 + ], + [ + "licitation", + -14.953262329101562 + ], + [ + "ино", + -14.9533052444458 + ], + [ + "▁Morsi", + -14.953325271606444 + ], + [ + "glare", + -14.953455924987791 + ], + [ + "▁HMA", + -14.95345973968506 + ], + [ + "▁Fuzz", + -14.953535079956056 + ], + [ + "▁Paschal", + -14.9535551071167 + ], + [ + "Finishing", + -14.953758239746094 + ], + [ + "octa", + -14.953778266906738 + ], + [ + "evac", + -14.953842163085938 + ], + [ + "[...]", + -14.95384407043457 + ], + [ + "9.25", + -14.953895568847656 + ], + [ + "EEZ", + -14.953904151916504 + ], + [ + "imov", + -14.953936576843262 + ], + [ + "certifi", + -14.95394515991211 + ], + [ + "Owing", + -14.954031944274902 + ], + [ + "▁SERVE", + -14.954102516174316 + ], + [ + "Huh", + -14.954118728637695 + ], + [ + "▁Pilate", + -14.954134941101074 + ], + [ + "▁15.4", + -14.954136848449709 + ], + [ + "WAC", + -14.954144477844238 + ], + [ + "▁incubate", + -14.954154014587402 + ], + [ + "Assisting", + -14.954264640808104 + ], + [ + "OUCH", + -14.954363822937012 + ], + [ + "3.99", + -14.954397201538086 + ], + [ + "▁rulebook", + -14.954458236694336 + ], + [ + "STYLE", + -14.95451831817627 + ], + [ + "5555", + -14.954546928405762 + ], + [ + "obtrusive", + -14.954682350158691 + ], + [ + "7-16", + -14.954703330993652 + ], + [ + "Cecil", + -14.954712867736816 + ], + [ + "1-17", + -14.954716682434082 + ], + [ + "96%", + -14.954723358154297 + ], + [ + "▁888-738-5526", + -14.954751014709473 + ], + [ + "▁Elastane", + -14.954751014709473 + ], + [ + "▁Fraunhofer", + -14.954751014709473 + ], + [ + "▁Maharishi", + -14.954751014709473 + ], + [ + "▁Paltrow", + -14.954751014709473 + ], + [ + "▁Philistines", + -14.954751014709473 + ], + [ + "▁Proprietor", + -14.954751014709473 + ], + [ + "▁Quixote", + -14.954751014709473 + ], + [ + "▁Trumbull", + -14.954751014709473 + ], + [ + "▁Zithromax", + -14.954751014709473 + ], + [ + "▁autosomal", + -14.954751014709473 + ], + [ + "▁unencumbered", + -14.954751014709473 + ], + [ + "▁unscripted", + -14.954751014709473 + ], + [ + "JANUARY", + -14.954751968383787 + ], + [ + "▁Aeronautical", + -14.954751968383787 + ], + [ + "▁Descendants", + -14.954751968383787 + ], + [ + "▁Gatineau", + -14.954751968383787 + ], + [ + "▁Haverhill", + -14.954751968383787 + ], + [ + "▁Marylebone", + -14.954751968383787 + ], + [ + "▁Podiatry", + -14.954751968383787 + ], + [ + "▁RESPONSIBILITY", + -14.954751968383787 + ], + [ + "▁Recherche", + -14.954751968383787 + ], + [ + "▁Seminyak", + -14.954751968383787 + ], + [ + "▁aquaponic", + -14.954751968383787 + ], + [ + "▁beautician", + -14.954751968383787 + ], + [ + "▁binaural", + -14.954751968383787 + ], + [ + "▁bounties", + -14.954751968383787 + ], + [ + "▁epithelium", + -14.954751968383787 + ], + [ + "▁fenugreek", + -14.954751968383787 + ], + [ + "▁frazzled", + -14.954751968383787 + ], + [ + "▁repertory", + -14.954751968383787 + ], + [ + "▁stockpiling", + -14.954751968383787 + ], + [ + "▁synchronizing", + -14.954751968383787 + ], + [ + "▁tamoxifen", + -14.954751968383787 + ], + [ + "▁underwhelmed", + -14.954751968383787 + ], + [ + "▁uninvited", + -14.954751968383787 + ], + [ + "▁Algiers", + -14.954752922058104 + ], + [ + "▁Barnaby", + -14.954752922058104 + ], + [ + "▁einfach", + -14.954752922058104 + ], + [ + "▁glycerol", + -14.954752922058104 + ], + [ + "▁Guerrilla", + -14.954753875732422 + ], + [ + "▁Keepsake", + -14.954753875732422 + ], + [ + "▁Segundo", + -14.954753875732422 + ], + [ + "▁insinuate", + -14.954753875732422 + ], + [ + "▁tamales", + -14.954753875732422 + ], + [ + "▁Culpepper", + -14.954754829406738 + ], + [ + "▁Fibrosis", + -14.954755783081056 + ], + [ + "▁archeologist", + -14.954755783081056 + ], + [ + "▁Yoast", + -14.954756736755373 + ], + [ + "▁Ziggler", + -14.954757690429688 + ], + [ + "▁doomsday", + -14.954757690429688 + ], + [ + "▁patisserie", + -14.954757690429688 + ], + [ + "▁Lagerfeld", + -14.954758644104004 + ], + [ + "▁gaudy", + -14.954758644104004 + ], + [ + "Kenny", + -14.95475959777832 + ], + [ + "▁Antilles", + -14.95475959777832 + ], + [ + "▁plyometric", + -14.95475959777832 + ], + [ + "▁FALSE", + -14.954760551452637 + ], + [ + "▁Adelphi", + -14.954761505126951 + ], + [ + "▁Infusionsoft", + -14.954763412475586 + ], + [ + "▁Kettlebell", + -14.954763412475586 + ], + [ + "▁colchicine", + -14.954763412475586 + ], + [ + "▁Godrej", + -14.95476531982422 + ], + [ + "▁NordVPN", + -14.954767227172852 + ], + [ + "▁REPLACE", + -14.954767227172852 + ], + [ + "▁Rajkot", + -14.954769134521484 + ], + [ + "▁polyvinyl", + -14.954769134521484 + ], + [ + "▁Karolina", + -14.954784393310549 + ], + [ + "▁Giroud", + -14.95478630065918 + ], + [ + "▁PureVPN", + -14.954792022705078 + ], + [ + "▁Jervis", + -14.954795837402344 + ], + [ + "▁shamrock", + -14.954797744750977 + ], + [ + "▁Xiamen", + -14.95480251312256 + ], + [ + "▁Bunnies", + -14.954806327819824 + ], + [ + "▁KitKat", + -14.954809188842772 + ], + [ + "▁annulment", + -14.95481300354004 + ], + [ + "▁perusal", + -14.954818725585938 + ], + [ + "▁moonshine", + -14.954821586608888 + ], + [ + "▁Batista", + -14.954822540283203 + ], + [ + "GivingTuesday", + -14.954827308654783 + ], + [ + "▁Karlsson", + -14.954829216003418 + ], + [ + "▁valence", + -14.954829216003418 + ], + [ + "▁Jarrod", + -14.954833984375 + ], + [ + "▁Transplantation", + -14.954838752746582 + ], + [ + "▁Maisie", + -14.954840660095217 + ], + [ + "▁Bissell", + -14.954848289489746 + ], + [ + "▁resealable", + -14.954848289489746 + ], + [ + "itudinal", + -14.95485019683838 + ], + [ + "▁Roberson", + -14.954852104187012 + ], + [ + "12.5", + -14.954854011535645 + ], + [ + "Continually", + -14.954854011535645 + ], + [ + "▁Palmerston", + -14.954854011535645 + ], + [ + "▁endangerment", + -14.95485496520996 + ], + [ + "▁Nuffield", + -14.954855918884276 + ], + [ + "cargo", + -14.954856872558594 + ], + [ + "▁bemused", + -14.95486068725586 + ], + [ + "▁rezept", + -14.954864501953123 + ], + [ + "▁acheive", + -14.954869270324709 + ], + [ + "Quercus", + -14.954870223999023 + ], + [ + "▁rancid", + -14.954876899719238 + ], + [ + "▁Gruden", + -14.954877853393556 + ], + [ + "▁radiographs", + -14.954889297485352 + ], + [ + "▁Collette", + -14.95490264892578 + ], + [ + "▁Ambani", + -14.954903602600098 + ], + [ + "▁Harrisonburg", + -14.95490550994873 + ], + [ + "▁unmodified", + -14.954916000366213 + ], + [ + "▁prolapse", + -14.954927444458008 + ], + [ + "▁unholy", + -14.954933166503906 + ], + [ + "▁shirtless", + -14.954940795898438 + ], + [ + "▁Chabad", + -14.954944610595703 + ], + [ + "▁misusing", + -14.954948425292969 + ], + [ + "Nowhere", + -14.95495891571045 + ], + [ + "▁Activism", + -14.954989433288574 + ], + [ + "▁Seaweed", + -14.954997062683104 + ], + [ + "▁sulfuric", + -14.955005645751951 + ], + [ + "spekt", + -14.955020904541016 + ], + [ + "▁0-7", + -14.955039024353027 + ], + [ + "hexyl", + -14.95507526397705 + ], + [ + "▁mercilessly", + -14.955078125 + ], + [ + "▁compartmentaliz", + -14.955086708068848 + ], + [ + "▁unworn", + -14.95509433746338 + ], + [ + "Loneliness", + -14.955108642578123 + ], + [ + "▁CFTC", + -14.955121994018556 + ], + [ + "1:51", + -14.95512580871582 + ], + [ + "Leap", + -14.95513153076172 + ], + [ + "▁customarily", + -14.955160140991213 + ], + [ + "▁snoop", + -14.95516300201416 + ], + [ + "▁Madsen", + -14.955164909362791 + ], + [ + "▁excreted", + -14.955181121826172 + ], + [ + "vile", + -14.95518398284912 + ], + [ + "2:36", + -14.955187797546388 + ], + [ + "▁rationalization", + -14.955190658569336 + ], + [ + "10-30", + -14.955195426940918 + ], + [ + "▁Grassley", + -14.95520305633545 + ], + [ + "▁mourners", + -14.95520305633545 + ], + [ + "▁Fedex", + -14.955209732055664 + ], + [ + "▁unmanaged", + -14.955209732055664 + ], + [ + "▁Shootout", + -14.955215454101562 + ], + [ + "icci", + -14.955219268798828 + ], + [ + "▁solidifies", + -14.955244064331056 + ], + [ + "ruba", + -14.955249786376951 + ], + [ + "▁Appetite", + -14.955270767211914 + ], + [ + "▁legislated", + -14.955270767211914 + ], + [ + "▁propping", + -14.955288887023926 + ], + [ + "▁Huddle", + -14.955300331115724 + ], + [ + "▁Ozil", + -14.955327987670898 + ], + [ + "uffield", + -14.95533275604248 + ], + [ + "▁BMJ", + -14.955387115478516 + ], + [ + "▁Vide", + -14.955410957336426 + ], + [ + "▁Jahangir", + -14.95542812347412 + ], + [ + "Disruptive", + -14.955437660217283 + ], + [ + "mailbox", + -14.955455780029297 + ], + [ + "▁Phon", + -14.955469131469728 + ], + [ + "▁innards", + -14.955480575561523 + ], + [ + "▁plumbed", + -14.955564498901367 + ], + [ + "▁Salina", + -14.95556926727295 + ], + [ + "▁gunning", + -14.9556245803833 + ], + [ + "▁WORDS", + -14.95562744140625 + ], + [ + "▁Namco", + -14.955633163452148 + ], + [ + "▁prefixes", + -14.955693244934082 + ], + [ + "▁Motorbike", + -14.955698013305664 + ], + [ + "▁Varma", + -14.955703735351562 + ], + [ + "▁Smol", + -14.955705642700195 + ], + [ + "gordon", + -14.9557466506958 + ], + [ + "▁Greensburg", + -14.955842971801758 + ], + [ + "▁unfairness", + -14.95586395263672 + ], + [ + "▁safekeeping", + -14.955918312072754 + ], + [ + "▁superfast", + -14.955921173095703 + ], + [ + "▁topiary", + -14.955970764160156 + ], + [ + "Ordered", + -14.955974578857422 + ], + [ + "meda", + -14.955986976623535 + ], + [ + "enten", + -14.955999374389648 + ], + [ + "▁Pronto", + -14.956015586853027 + ], + [ + "▁Scull", + -14.956043243408203 + ], + [ + "PLACE", + -14.956107139587402 + ], + [ + "▁Dinh", + -14.956162452697754 + ], + [ + "▁Usain", + -14.956177711486816 + ], + [ + "▁jetta", + -14.956192016601562 + ], + [ + "▁Samford", + -14.956221580505373 + ], + [ + "▁Hendon", + -14.956266403198242 + ], + [ + "Requiring", + -14.956270217895508 + ], + [ + "▁Summerlin", + -14.956295013427734 + ], + [ + "▁licked", + -14.956422805786133 + ], + [ + "hypno", + -14.956439018249512 + ], + [ + "▁selflessly", + -14.956440925598145 + ], + [ + "haru", + -14.95645809173584 + ], + [ + "▁Kantar", + -14.956482887268066 + ], + [ + "▁Vizio", + -14.95651912689209 + ], + [ + "▁03:5", + -14.956521034240724 + ], + [ + "▁Antler", + -14.95652961730957 + ], + [ + "▁Ashish", + -14.956552505493164 + ], + [ + "▁Reinhardt", + -14.956557273864746 + ], + [ + "ADAS", + -14.956587791442873 + ], + [ + "pock", + -14.956607818603516 + ], + [ + "▁Alissa", + -14.956615447998049 + ], + [ + "Minn", + -14.956668853759766 + ], + [ + "▁scull", + -14.956744194030762 + ], + [ + "▁Cef", + -14.9568452835083 + ], + [ + "▁alarmingly", + -14.956871032714844 + ], + [ + "▁savored", + -14.95693588256836 + ], + [ + "▁Messier", + -14.956995010375977 + ], + [ + "▁humanely", + -14.957022666931152 + ], + [ + "▁VGN", + -14.95708465576172 + ], + [ + "condens", + -14.957115173339844 + ], + [ + "Statistically", + -14.957459449768066 + ], + [ + "0:45", + -14.95750904083252 + ], + [ + "▁lasso", + -14.957510948181152 + ], + [ + "▁16:4", + -14.957755088806152 + ], + [ + "▁Professionalism", + -14.95785140991211 + ], + [ + "▁Rockwood", + -14.957880020141602 + ], + [ + "▁Refresher", + -14.957905769348145 + ], + [ + "▁Fluff", + -14.957942962646484 + ], + [ + "doom", + -14.958043098449709 + ], + [ + "▁unix", + -14.958157539367676 + ], + [ + "1:52", + -14.95816135406494 + ], + [ + "Kala", + -14.95823097229004 + ], + [ + "ponder", + -14.958303451538086 + ], + [ + "▁0.09", + -14.95840072631836 + ], + [ + "▁Interconnect", + -14.958541870117188 + ], + [ + "2:24", + -14.958773612976074 + ], + [ + "▁Freund", + -14.958832740783691 + ], + [ + "▁Stade", + -14.958854675292969 + ], + [ + "▁Nieder", + -14.95887565612793 + ], + [ + "▁Lesnar", + -14.95907497406006 + ], + [ + "gatherer", + -14.959260940551758 + ], + [ + "▁Esqui", + -14.959274291992188 + ], + [ + "AVEN", + -14.959476470947266 + ], + [ + "Trailer", + -14.959514617919922 + ], + [ + "sicht", + -14.959824562072754 + ], + [ + "retailer", + -14.959912300109863 + ], + [ + "moda", + -14.95998764038086 + ], + [ + "▁personification", + -14.960006713867188 + ], + [ + "▁$190", + -14.9600191116333 + ], + [ + "▁Lowland", + -14.960232734680176 + ], + [ + "Brock", + -14.960272789001465 + ], + [ + "employ", + -14.96027946472168 + ], + [ + "▁SSG", + -14.960289001464844 + ], + [ + "rotor", + -14.96035385131836 + ], + [ + "▁Cumb", + -14.96038818359375 + ], + [ + "▁Euph", + -14.960514068603516 + ], + [ + "LOU", + -14.960565567016602 + ], + [ + "рн", + -14.96062469482422 + ], + [ + "allowed", + -14.960658073425291 + ], + [ + "8:50", + -14.960704803466797 + ], + [ + "▁OTR", + -14.960777282714844 + ], + [ + "ichiro", + -14.96093463897705 + ], + [ + "▁Vasili", + -14.960967063903809 + ], + [ + "perception", + -14.960991859436035 + ], + [ + "▁Ocho", + -14.961175918579102 + ], + [ + "CFD", + -14.961176872253418 + ], + [ + "▁17:3", + -14.961249351501465 + ], + [ + "Reid", + -14.961268424987791 + ], + [ + "▁counterweight", + -14.961435317993164 + ], + [ + "▁Sensi", + -14.961525917053224 + ], + [ + "marco", + -14.961604118347168 + ], + [ + "2.25", + -14.961636543273926 + ], + [ + "DPR", + -14.961725234985352 + ], + [ + "doro", + -14.96176815032959 + ], + [ + "8:40", + -14.961936950683594 + ], + [ + "▁hindu", + -14.961990356445312 + ], + [ + "PPG", + -14.962016105651855 + ], + [ + "▁dba", + -14.962041854858398 + ], + [ + "▁guestbook", + -14.962197303771973 + ], + [ + "▁viele", + -14.962200164794922 + ], + [ + "▁impregnat", + -14.962343215942385 + ], + [ + "▁paler", + -14.962359428405762 + ], + [ + "FEL", + -14.962407112121582 + ], + [ + "▁SWAN", + -14.96242618560791 + ], + [ + "carriage", + -14.96242904663086 + ], + [ + "Thorn", + -14.962536811828612 + ], + [ + "▁Categor", + -14.96254825592041 + ], + [ + "NMA", + -14.962629318237305 + ], + [ + "deceased", + -14.962686538696287 + ], + [ + "Supplied", + -14.962740898132324 + ], + [ + "Committed", + -14.962764739990234 + ], + [ + "Qube", + -14.962766647338867 + ], + [ + "refrigerator", + -14.96277904510498 + ], + [ + "Wisdom", + -14.962779998779297 + ], + [ + "Tutorial", + -14.962780952453612 + ], + [ + "vacuum", + -14.96278190612793 + ], + [ + "Spectrum", + -14.96278476715088 + ], + [ + "Henderson", + -14.962786674499512 + ], + [ + "Personnel", + -14.962786674499512 + ], + [ + "Infrastructure", + -14.962787628173828 + ], + [ + "Gartner", + -14.962788581848145 + ], + [ + "representing", + -14.96279239654541 + ], + [ + "Albany", + -14.96280002593994 + ], + [ + "Basketball", + -14.96280002593994 + ], + [ + "fatigue", + -14.962806701660156 + ], + [ + "simulation", + -14.96281623840332 + ], + [ + "sliding", + -14.962850570678713 + ], + [ + "▁Quadri", + -14.962861061096191 + ], + [ + "Hazel", + -14.96286392211914 + ], + [ + "Comparative", + -14.962892532348633 + ], + [ + "mysql", + -14.962894439697266 + ], + [ + "VCL", + -14.962931632995604 + ], + [ + "Assume", + -14.962934494018556 + ], + [ + "Subsequently", + -14.9629487991333 + ], + [ + "▁15:3", + -14.962960243225098 + ], + [ + "1016", + -14.963008880615234 + ], + [ + "Adoption", + -14.9630708694458 + ], + [ + "intentional", + -14.963077545166016 + ], + [ + "zeug", + -14.96308422088623 + ], + [ + "MAKE", + -14.963122367858888 + ], + [ + "boundary", + -14.963140487670898 + ], + [ + "▁McCrea", + -14.963231086730955 + ], + [ + "▁blushing", + -14.963265419006348 + ], + [ + "Sonoma", + -14.963294982910156 + ], + [ + "▁23:1", + -14.963305473327637 + ], + [ + "▁macaron", + -14.96335506439209 + ], + [ + "10000", + -14.963372230529783 + ], + [ + "Bosch", + -14.963420867919922 + ], + [ + "53%", + -14.963448524475098 + ], + [ + "Heidi", + -14.963533401489258 + ], + [ + "Gavin", + -14.96363353729248 + ], + [ + "▁Grote", + -14.963810920715332 + ], + [ + "literal", + -14.963839530944824 + ], + [ + "literate", + -14.963860511779783 + ], + [ + "cuna", + -14.963930130004885 + ], + [ + "FIFA", + -14.96399974822998 + ], + [ + "PETER", + -14.96417236328125 + ], + [ + "Rebel", + -14.96431827545166 + ], + [ + "uccio", + -14.964441299438477 + ], + [ + "ukkah", + -14.96445083618164 + ], + [ + "baca", + -14.9646635055542 + ], + [ + "Smell", + -14.964666366577148 + ], + [ + "Pha", + -14.964813232421877 + ], + [ + "ltimo", + -14.964829444885254 + ], + [ + "Feeding", + -14.964831352233888 + ], + [ + "TSO", + -14.96488094329834 + ], + [ + "▁Handbag", + -14.964885711669922 + ], + [ + "Foul", + -14.96498680114746 + ], + [ + "1021", + -14.965001106262209 + ], + [ + "▁satiate", + -14.9650297164917 + ], + [ + "zyna", + -14.965091705322266 + ], + [ + "▁Kowal", + -14.965147018432615 + ], + [ + "porn", + -14.965182304382324 + ], + [ + "OBO", + -14.965210914611816 + ], + [ + "▁DCU", + -14.965272903442385 + ], + [ + "▁abdomin", + -14.965370178222656 + ], + [ + "Transformational", + -14.965417861938477 + ], + [ + "ANTI", + -14.965441703796388 + ], + [ + "aquet", + -14.965449333190918 + ], + [ + "5:18", + -14.965473175048828 + ], + [ + "▁Janitorial", + -14.965503692626951 + ], + [ + "WEDNESDAY", + -14.96550464630127 + ], + [ + "acyclovir", + -14.96550464630127 + ], + [ + "lakshmi", + -14.96550464630127 + ], + [ + "tocopherol", + -14.96550464630127 + ], + [ + "▁Andrzej", + -14.96550464630127 + ], + [ + "▁Braunfels", + -14.96550464630127 + ], + [ + "▁Catapult", + -14.96550464630127 + ], + [ + "▁Cinematography", + -14.96550464630127 + ], + [ + "▁Crandall", + -14.96550464630127 + ], + [ + "▁Dimethicone", + -14.96550464630127 + ], + [ + "▁Ecstasy", + -14.96550464630127 + ], + [ + "▁Executor", + -14.96550464630127 + ], + [ + "▁Foosball", + -14.96550464630127 + ], + [ + "▁INDUSTRY", + -14.96550464630127 + ], + [ + "▁Stravinsky", + -14.96550464630127 + ], + [ + "▁Volusia", + -14.96550464630127 + ], + [ + "▁Wroclaw", + -14.96550464630127 + ], + [ + "▁archdiocese", + -14.96550464630127 + ], + [ + "▁chuffed", + -14.96550464630127 + ], + [ + "▁ciabatta", + -14.96550464630127 + ], + [ + "▁declarative", + -14.96550464630127 + ], + [ + "▁disloyal", + -14.96550464630127 + ], + [ + "▁divestiture", + -14.96550464630127 + ], + [ + "▁elaborating", + -14.96550464630127 + ], + [ + "▁epitomise", + -14.96550464630127 + ], + [ + "▁exhalation", + -14.96550464630127 + ], + [ + "▁jeopardizing", + -14.96550464630127 + ], + [ + "▁ravishing", + -14.96550464630127 + ], + [ + "▁reconstituted", + -14.96550464630127 + ], + [ + "▁rooibos", + -14.96550464630127 + ], + [ + "▁taxonomies", + -14.96550464630127 + ], + [ + "▁Inhibitor", + -14.965505599975586 + ], + [ + "▁affluence", + -14.965505599975586 + ], + [ + "▁regrettably", + -14.965505599975586 + ], + [ + "▁CRUISE", + -14.965506553649902 + ], + [ + "▁Leggings", + -14.965506553649902 + ], + [ + "▁Meilleur", + -14.965506553649902 + ], + [ + "SPOILER", + -14.96550750732422 + ], + [ + "▁icicles", + -14.965508460998535 + ], + [ + "▁Abidjan", + -14.965509414672852 + ], + [ + "▁charlotte", + -14.965509414672852 + ], + [ + "▁DeSoto", + -14.965510368347168 + ], + [ + "▁Ruislip", + -14.965513229370115 + ], + [ + "▁TREATMENT", + -14.965514183044434 + ], + [ + "undra", + -14.965518951416016 + ], + [ + "▁Bhakti", + -14.965518951416016 + ], + [ + "▁thimble", + -14.965518951416016 + ], + [ + "▁Apartheid", + -14.965519905090332 + ], + [ + "▁Petronas", + -14.96552276611328 + ], + [ + "▁Presario", + -14.96552562713623 + ], + [ + "▁bravado", + -14.96552562713623 + ], + [ + "▁chambre", + -14.96552562713623 + ], + [ + "▁Clomid", + -14.965526580810549 + ], + [ + "▁McNair", + -14.965529441833496 + ], + [ + "▁polypeptide", + -14.965530395507812 + ], + [ + "▁Roscommon", + -14.965534210205078 + ], + [ + "▁clapped", + -14.965535163879396 + ], + [ + "▁disruptors", + -14.965535163879396 + ], + [ + "▁Jonathon", + -14.96553897857666 + ], + [ + "▁Borrowing", + -14.965539932250977 + ], + [ + "▁Bovada", + -14.965542793273926 + ], + [ + "▁MLC", + -14.965551376342772 + ], + [ + "▁Phonics", + -14.96555233001709 + ], + [ + "▁Lycra", + -14.96555519104004 + ], + [ + "▁Rosenfeld", + -14.96555995941162 + ], + [ + "▁peridot", + -14.965560913085938 + ], + [ + "▁Frazer", + -14.965568542480469 + ], + [ + "▁2019/2020", + -14.96557903289795 + ], + [ + "▁thyself", + -14.96557903289795 + ], + [ + "▁calico", + -14.965581893920898 + ], + [ + "▁Netgear", + -14.965583801269531 + ], + [ + "▁unverified", + -14.965583801269531 + ], + [ + "SMTP", + -14.96558666229248 + ], + [ + "888-607-", + -14.965591430664062 + ], + [ + "▁Gunther", + -14.965598106384276 + ], + [ + "▁Applique", + -14.965608596801758 + ], + [ + "▁vivre", + -14.965611457824709 + ], + [ + "▁2008-2009", + -14.965615272521973 + ], + [ + "▁motorcycling", + -14.965620040893556 + ], + [ + "▁Garrick", + -14.965621948242188 + ], + [ + "scottish", + -14.96562671661377 + ], + [ + "▁gaga", + -14.96563720703125 + ], + [ + "▁prenuptial", + -14.965643882751465 + ], + [ + "▁Cheong", + -14.96564483642578 + ], + [ + "▁Blaise", + -14.965646743774414 + ], + [ + "DISNEY", + -14.965653419494627 + ], + [ + "▁Canter", + -14.965670585632324 + ], + [ + "▁unbelief", + -14.965675354003906 + ], + [ + "▁digitised", + -14.96568202972412 + ], + [ + "▁mistaking", + -14.96568202972412 + ], + [ + "▁NEEDED", + -14.965691566467283 + ], + [ + "▁Privat", + -14.965699195861816 + ], + [ + "▁Orbitz", + -14.965703010559082 + ], + [ + "▁sickened", + -14.965704917907717 + ], + [ + "▁SEBI", + -14.96571445465088 + ], + [ + "▁Feeney", + -14.965720176696776 + ], + [ + "▁Coronary", + -14.965728759765623 + ], + [ + "▁enroute", + -14.96573257446289 + ], + [ + "Knopf", + -14.96573543548584 + ], + [ + "▁mumps", + -14.965744018554688 + ], + [ + "▁Argus", + -14.965753555297852 + ], + [ + "Settling", + -14.9657564163208 + ], + [ + "▁Sixties", + -14.965764999389648 + ], + [ + "▁Chipping", + -14.96581745147705 + ], + [ + "▁plasterboard", + -14.965845108032228 + ], + [ + "▁clenching", + -14.96587085723877 + ], + [ + "▁Sendai", + -14.96587371826172 + ], + [ + "▁scurrying", + -14.965882301330566 + ], + [ + "86%", + -14.965900421142578 + ], + [ + "▁intercity", + -14.965901374816896 + ], + [ + "▁Minibus", + -14.96591091156006 + ], + [ + "6:23", + -14.965912818908691 + ], + [ + "▁hemlock", + -14.965936660766602 + ], + [ + "▁Atoll", + -14.965937614440918 + ], + [ + "gazer", + -14.965964317321776 + ], + [ + "▁lameness", + -14.96596622467041 + ], + [ + "▁Backcountry", + -14.965983390808104 + ], + [ + "▁Represents", + -14.96599006652832 + ], + [ + "Homework", + -14.966072082519531 + ], + [ + "▁Monza", + -14.966103553771973 + ], + [ + "▁Slender", + -14.966111183166504 + ], + [ + "Recognised", + -14.966114044189451 + ], + [ + "Chance", + -14.966121673583984 + ], + [ + "▁subtext", + -14.9661226272583 + ], + [ + "▁supplanted", + -14.966126441955566 + ], + [ + "▁Argentinean", + -14.96613883972168 + ], + [ + "Alphons", + -14.96616268157959 + ], + [ + "▁Backstage", + -14.966197967529297 + ], + [ + "▁Candies", + -14.966215133666992 + ], + [ + "▁Gazi", + -14.966238975524902 + ], + [ + "▁dyson", + -14.966248512268066 + ], + [ + "bloc", + -14.96629524230957 + ], + [ + "▁Hasina", + -14.966299057006836 + ], + [ + "▁Viruses", + -14.966320037841797 + ], + [ + "▁anatomically", + -14.96634006500244 + ], + [ + "▁Hartmann", + -14.966368675231934 + ], + [ + "▁chook", + -14.966384887695312 + ], + [ + "▁credo", + -14.966405868530272 + ], + [ + "▁1808", + -14.966418266296388 + ], + [ + "7:14", + -14.96648120880127 + ], + [ + "▁Micronesia", + -14.966537475585938 + ], + [ + "compute", + -14.966557502746582 + ], + [ + "0.40", + -14.966730117797852 + ], + [ + "warmer", + -14.966753959655762 + ], + [ + "Tenn", + -14.966757774353027 + ], + [ + "priest", + -14.966806411743164 + ], + [ + "timing", + -14.966845512390137 + ], + [ + "▁conflate", + -14.96684741973877 + ], + [ + "▁Overture", + -14.966849327087402 + ], + [ + "▁Bile", + -14.966882705688477 + ], + [ + "3–1", + -14.96689510345459 + ], + [ + "▁Jez", + -14.96696662902832 + ], + [ + "▁headwind", + -14.967033386230469 + ], + [ + "vanna", + -14.967052459716797 + ], + [ + "▁Jafar", + -14.967122077941896 + ], + [ + "▁Deux", + -14.96713924407959 + ], + [ + "▁DNN", + -14.967150688171388 + ], + [ + "▁negated", + -14.967199325561523 + ], + [ + "▁mintage", + -14.96721076965332 + ], + [ + "phytic", + -14.967215538024902 + ], + [ + "Caleb", + -14.967238426208496 + ], + [ + "Xtreme", + -14.967259407043455 + ], + [ + "▁Eigen", + -14.967273712158203 + ], + [ + "▁Orang", + -14.967293739318848 + ], + [ + "▁precio", + -14.967294692993164 + ], + [ + "Zap", + -14.967309951782228 + ], + [ + "▁gravitated", + -14.967367172241213 + ], + [ + "▁spanner", + -14.967459678649902 + ], + [ + "bruk", + -14.96746063232422 + ], + [ + "▁Nampa", + -14.9674654006958 + ], + [ + "▁giggled", + -14.967529296875 + ], + [ + "▁Altona", + -14.96755313873291 + ], + [ + "Hou", + -14.967602729797363 + ], + [ + "▁FPV", + -14.96762752532959 + ], + [ + "cido", + -14.96763515472412 + ], + [ + "▁Validat", + -14.96772575378418 + ], + [ + "▁Longford", + -14.967753410339355 + ], + [ + "▁crispness", + -14.967976570129396 + ], + [ + "WTH", + -14.96803379058838 + ], + [ + "▁fermenter", + -14.968132972717283 + ], + [ + "▁heeded", + -14.96817398071289 + ], + [ + "▁Baraka", + -14.968222618103027 + ], + [ + "destine", + -14.96823787689209 + ], + [ + "▁PBR", + -14.968238830566406 + ], + [ + "1880", + -14.968419075012209 + ], + [ + "▁Whisperer", + -14.968453407287598 + ], + [ + "TAK", + -14.968498229980469 + ], + [ + "Acorn", + -14.968705177307127 + ], + [ + "▁slaughtering", + -14.968769073486328 + ], + [ + "▁Roden", + -14.968917846679688 + ], + [ + "▁cumul", + -14.96896266937256 + ], + [ + "Rumour", + -14.968966484069824 + ], + [ + "▁Baca", + -14.969002723693848 + ], + [ + "▁neutralized", + -14.969124794006348 + ], + [ + "▁Kaylee", + -14.96912670135498 + ], + [ + "▁wildland", + -14.96912670135498 + ], + [ + "▁Birchbox", + -14.969154357910156 + ], + [ + "▁WTI", + -14.969193458557127 + ], + [ + "▁Spieler", + -14.969554901123049 + ], + [ + "tsov", + -14.969605445861816 + ], + [ + "WDM", + -14.969663619995115 + ], + [ + "eeee", + -14.969785690307615 + ], + [ + "▁Shahi", + -14.969820022583008 + ], + [ + "2-18", + -14.969823837280272 + ], + [ + "▁Shawl", + -14.969887733459473 + ], + [ + "▁priestly", + -14.970078468322754 + ], + [ + "▁Dredge", + -14.97035026550293 + ], + [ + "▁Gew", + -14.970414161682127 + ], + [ + "Heck", + -14.970643043518066 + ], + [ + "6-48", + -14.970658302307127 + ], + [ + "ENCY", + -14.970662117004396 + ], + [ + "▁Blooming", + -14.970672607421877 + ], + [ + "▁Sportster", + -14.970672607421877 + ], + [ + "5–1", + -14.970791816711426 + ], + [ + "▁Lapp", + -14.970839500427246 + ], + [ + "▁TPG", + -14.970946311950684 + ], + [ + "▁Zug", + -14.970980644226074 + ], + [ + "OIA", + -14.97103786468506 + ], + [ + "Baha", + -14.971096992492676 + ], + [ + "▁Maku", + -14.97110080718994 + ], + [ + "▁Dyk", + -14.971117973327637 + ], + [ + "itsky", + -14.971243858337402 + ], + [ + "advent", + -14.971251487731934 + ], + [ + "$1,000", + -14.971273422241213 + ], + [ + "oyne", + -14.971293449401855 + ], + [ + "SLV", + -14.971345901489258 + ], + [ + "Kari", + -14.97135066986084 + ], + [ + "dominantly", + -14.971369743347168 + ], + [ + "▁Commandant", + -14.97138214111328 + ], + [ + "QUIT", + -14.97145175933838 + ], + [ + "▁$18,000", + -14.971516609191896 + ], + [ + "8.7%", + -14.971558570861816 + ], + [ + "Luxe", + -14.971723556518556 + ], + [ + "fferent", + -14.971779823303224 + ], + [ + "ynn", + -14.971860885620115 + ], + [ + "▁09:4", + -14.971993446350098 + ], + [ + "▁Identi", + -14.972110748291016 + ], + [ + "▁Edie", + -14.972113609313965 + ], + [ + "toast", + -14.972146034240724 + ], + [ + "▁Netbook", + -14.972230911254885 + ], + [ + "ovian", + -14.972234725952148 + ], + [ + "▁PGP", + -14.972243309020996 + ], + [ + "pecializing", + -14.972379684448242 + ], + [ + "Pole", + -14.972405433654783 + ], + [ + "NSO", + -14.972410202026367 + ], + [ + "▁Abdi", + -14.972420692443848 + ], + [ + "▁BEAR", + -14.972529411315918 + ], + [ + "MOTO", + -14.972612380981444 + ], + [ + "FORMAT", + -14.972745895385742 + ], + [ + "??!!", + -14.973064422607422 + ], + [ + "DISC", + -14.973084449768066 + ], + [ + "EVAN", + -14.973134994506836 + ], + [ + "▁Vise", + -14.97321891784668 + ], + [ + "▁philo", + -14.973280906677246 + ], + [ + "ECU", + -14.973390579223633 + ], + [ + "abou", + -14.97348976135254 + ], + [ + "considering", + -14.973550796508787 + ], + [ + "avatar", + -14.973557472229004 + ], + [ + "▁Lpn", + -14.973600387573242 + ], + [ + "▁laze", + -14.973688125610352 + ], + [ + "▁Neale", + -14.97369384765625 + ], + [ + "▁Pelo", + -14.973730087280272 + ], + [ + "Unsurprisingly", + -14.973835945129396 + ], + [ + "▁SNR", + -14.973855018615724 + ], + [ + "Spouse", + -14.97385597229004 + ], + [ + "0.3%", + -14.973856925964355 + ], + [ + "Avocado", + -14.973872184753418 + ], + [ + "▁Capra", + -14.973885536193848 + ], + [ + "Judith", + -14.973889350891112 + ], + [ + "Survival", + -14.97389030456543 + ], + [ + "Navigation", + -14.97389316558838 + ], + [ + "Webinar", + -14.97389316558838 + ], + [ + "Chapman", + -14.973895072937012 + ], + [ + "diagnosis", + -14.973896026611328 + ], + [ + "Economy", + -14.973899841308594 + ], + [ + "arguably", + -14.97390079498291 + ], + [ + "algebra", + -14.97390365600586 + ], + [ + "challenging", + -14.973906517028809 + ], + [ + "northern", + -14.973922729492188 + ], + [ + "Squeeze", + -14.973939895629885 + ], + [ + "Montreal", + -14.97395133972168 + ], + [ + "audience", + -14.973953247070312 + ], + [ + "greasy", + -14.973955154418944 + ], + [ + "wizard", + -14.973956108093262 + ], + [ + "bandwidth", + -14.973958969116213 + ], + [ + "splitting", + -14.97396755218506 + ], + [ + "Angry", + -14.973968505859377 + ], + [ + "subtle", + -14.973971366882324 + ], + [ + "Freelance", + -14.973973274230955 + ], + [ + "▁Rebbe", + -14.974004745483398 + ], + [ + "ventricular", + -14.974027633666992 + ], + [ + "manifest", + -14.974047660827637 + ], + [ + "Apollo", + -14.974084854125977 + ], + [ + "10-7", + -14.974087715148926 + ], + [ + "▁sunlit", + -14.974102973937988 + ], + [ + "▁DSG", + -14.974115371704102 + ], + [ + "7.7%", + -14.974164962768556 + ], + [ + "Sweeney", + -14.974251747131348 + ], + [ + "Furness", + -14.97428035736084 + ], + [ + "Fiona", + -14.974349975585938 + ], + [ + "selfie", + -14.974350929260254 + ], + [ + "showroom", + -14.974363327026367 + ], + [ + "2.75", + -14.97438907623291 + ], + [ + "Kitty", + -14.974390029907228 + ], + [ + "Bennett", + -14.974410057067873 + ], + [ + "▁Vina", + -14.974454879760742 + ], + [ + "Grady", + -14.974465370178224 + ], + [ + "bringing", + -14.974599838256836 + ], + [ + "vaulted", + -14.974610328674316 + ], + [ + "▁Lauer", + -14.974611282348633 + ], + [ + "André", + -14.974690437316896 + ], + [ + "outlook", + -14.974691390991213 + ], + [ + "Stru", + -14.974736213684082 + ], + [ + "boni", + -14.974773406982422 + ], + [ + "▁Carpal", + -14.974807739257812 + ], + [ + "bladder", + -14.974843978881836 + ], + [ + "▁CAREER", + -14.974987983703612 + ], + [ + "backyard", + -14.975015640258787 + ], + [ + "▁Fira", + -14.97503662109375 + ], + [ + "▁semicolon", + -14.975168228149414 + ], + [ + "▁Bertie", + -14.975212097167969 + ], + [ + "Confess", + -14.975336074829102 + ], + [ + "Evangeli", + -14.97534465789795 + ], + [ + "▁scorch", + -14.975468635559082 + ], + [ + "Cabin", + -14.975469589233398 + ], + [ + "3-33", + -14.97555923461914 + ], + [ + "3:22", + -14.975564002990724 + ], + [ + "swinging", + -14.975598335266112 + ], + [ + "UBA", + -14.975605010986328 + ], + [ + "hoven", + -14.975618362426758 + ], + [ + "▁emirate", + -14.9757080078125 + ], + [ + "▁tribulation", + -14.975712776184082 + ], + [ + "▁upholster", + -14.975714683532717 + ], + [ + "mbros", + -14.975767135620115 + ], + [ + "▁coerce", + -14.975774765014648 + ], + [ + "▁obliterate", + -14.975775718688965 + ], + [ + "▁Landau", + -14.975802421569824 + ], + [ + "▁portage", + -14.975834846496582 + ], + [ + "▁Neutron", + -14.975835800170898 + ], + [ + "▁neutralise", + -14.97587776184082 + ], + [ + "▁bequeath", + -14.975975036621094 + ], + [ + "glazing", + -14.97598361968994 + ], + [ + "▁Blox", + -14.976059913635254 + ], + [ + "3700", + -14.97607421875 + ], + [ + "Superman", + -14.976115226745604 + ], + [ + "▁Iberia", + -14.976120948791504 + ], + [ + "ANDO", + -14.976151466369627 + ], + [ + "▁Pendle", + -14.976202011108398 + ], + [ + "kira", + -14.97621250152588 + ], + [ + "issile", + -14.976226806640623 + ], + [ + "prehensive", + -14.976344108581545 + ], + [ + "DePauw", + -14.976373672485352 + ], + [ + "Efficacy", + -14.976373672485352 + ], + [ + "Schizophrenia", + -14.976373672485352 + ], + [ + "▁Ambedkar", + -14.976373672485352 + ], + [ + "▁Chewbacca", + -14.976373672485352 + ], + [ + "▁Daenerys", + -14.976373672485352 + ], + [ + "▁Grindelwald", + -14.976373672485352 + ], + [ + "▁Hackensack", + -14.976373672485352 + ], + [ + "▁Hydrangea", + -14.976373672485352 + ], + [ + "▁Kolhapur", + -14.976373672485352 + ], + [ + "▁Kurosawa", + -14.976373672485352 + ], + [ + "▁Languedoc", + -14.976373672485352 + ], + [ + "▁Manitowoc", + -14.976373672485352 + ], + [ + "▁Marrakesh", + -14.976373672485352 + ], + [ + "▁Minnetonka", + -14.976373672485352 + ], + [ + "▁Narcissist", + -14.976373672485352 + ], + [ + "▁SAMSUNG", + -14.976373672485352 + ], + [ + "▁Transcendental", + -14.976373672485352 + ], + [ + "▁antagonism", + -14.976373672485352 + ], + [ + "▁belligerent", + -14.976373672485352 + ], + [ + "▁boudoir", + -14.976373672485352 + ], + [ + "▁cardiomyopathy", + -14.976373672485352 + ], + [ + "▁diluting", + -14.976373672485352 + ], + [ + "▁enchancment", + -14.976373672485352 + ], + [ + "▁epistemology", + -14.976373672485352 + ], + [ + "▁glazier", + -14.976373672485352 + ], + [ + "▁homogeneity", + -14.976373672485352 + ], + [ + "▁immeasurably", + -14.976373672485352 + ], + [ + "▁interferon", + -14.976373672485352 + ], + [ + "▁quandary", + -14.976373672485352 + ], + [ + "▁roaches", + -14.976373672485352 + ], + [ + "▁smorgasbord", + -14.976373672485352 + ], + [ + "▁unsavory", + -14.976373672485352 + ], + [ + "▁Invesco", + -14.976374626159668 + ], + [ + "▁dailies", + -14.976374626159668 + ], + [ + "▁jaggery", + -14.976374626159668 + ], + [ + "carnitine", + -14.976375579833984 + ], + [ + "▁Aitken", + -14.976375579833984 + ], + [ + "▁Hmong", + -14.976375579833984 + ], + [ + "▁IMPROVE", + -14.976375579833984 + ], + [ + "▁Esperanto", + -14.9763765335083 + ], + [ + "▁Malwarebytes", + -14.9763765335083 + ], + [ + "▁Narcotics", + -14.9763765335083 + ], + [ + "▁caesarean", + -14.9763765335083 + ], + [ + "▁crucible", + -14.976377487182615 + ], + [ + "▁Chiltern", + -14.976378440856934 + ], + [ + "▁Westinghouse", + -14.97637939453125 + ], + [ + "▁decried", + -14.97637939453125 + ], + [ + "▁venerated", + -14.97637939453125 + ], + [ + "▁Gallardo", + -14.976381301879885 + ], + [ + "▁LOVING", + -14.976381301879885 + ], + [ + "▁Limbaugh", + -14.976384162902832 + ], + [ + "▁Smashwords", + -14.976386070251465 + ], + [ + "▁topamax", + -14.976386070251465 + ], + [ + "▁infertile", + -14.976387977600098 + ], + [ + "▁Estrada", + -14.976388931274414 + ], + [ + "▁Roderick", + -14.976388931274414 + ], + [ + "▁Raheem", + -14.97638988494873 + ], + [ + "Seemingly", + -14.976390838623049 + ], + [ + "▁moorland", + -14.976391792297363 + ], + [ + "▁Yamuna", + -14.97639274597168 + ], + [ + "▁dragonflies", + -14.976393699645996 + ], + [ + "ufu", + -14.976394653320312 + ], + [ + "▁destinies", + -14.976394653320312 + ], + [ + "▁marimba", + -14.976394653320312 + ], + [ + "▁Happier", + -14.976398468017578 + ], + [ + "▁Métis", + -14.976399421691896 + ], + [ + "▁economize", + -14.976400375366213 + ], + [ + "▁tolerating", + -14.976405143737791 + ], + [ + "▁Nimbus", + -14.97640609741211 + ], + [ + "▁Cobham", + -14.976407051086426 + ], + [ + "piston", + -14.976410865783691 + ], + [ + "▁McDougall", + -14.976411819458008 + ], + [ + "▁trekked", + -14.976411819458008 + ], + [ + "saponin", + -14.976420402526855 + ], + [ + "▁Moseley", + -14.976421356201172 + ], + [ + "Archaeologists", + -14.97642993927002 + ], + [ + "▁WPMU", + -14.97642993927002 + ], + [ + "▁Pershing", + -14.976432800292969 + ], + [ + "▁BSOD", + -14.976439476013184 + ], + [ + "▁Hideaway", + -14.976439476013184 + ], + [ + "▁spunky", + -14.976439476013184 + ], + [ + "lanceolate", + -14.976442337036133 + ], + [ + "▁Novomatic", + -14.976449012756348 + ], + [ + "▁biweekly", + -14.976449012756348 + ], + [ + "▁godmother", + -14.976449012756348 + ], + [ + "▁stirrup", + -14.976451873779297 + ], + [ + "▁13.4", + -14.976454734802246 + ], + [ + "▁hydronic", + -14.976458549499512 + ], + [ + "▁Shutdown", + -14.976460456848145 + ], + [ + "▁unfurnished", + -14.976466178894045 + ], + [ + "▁Scotiabank", + -14.97646713256836 + ], + [ + "▁FIGHT", + -14.976468086242676 + ], + [ + "▁immorality", + -14.97647190093994 + ], + [ + "▁Wickham", + -14.976480484008787 + ], + [ + "▁(20%)", + -14.976481437683104 + ], + [ + "▁Sunroof", + -14.976485252380373 + ], + [ + "▁RingCentral", + -14.976491928100586 + ], + [ + "▁Castilla", + -14.97649383544922 + ], + [ + "▁murine", + -14.97651195526123 + ], + [ + "▁Marston", + -14.976524353027344 + ], + [ + "▁Parsley", + -14.976540565490724 + ], + [ + "▁Adkins", + -14.976543426513672 + ], + [ + "▁blindsided", + -14.976577758789062 + ], + [ + "nashi", + -14.976588249206545 + ], + [ + "▁Multinational", + -14.976592063903809 + ], + [ + "▁Helsing", + -14.976593017578123 + ], + [ + "▁switchgear", + -14.976598739624023 + ], + [ + "▁reverie", + -14.976629257202148 + ], + [ + "▁subsume", + -14.976637840270996 + ], + [ + "▁herniation", + -14.976638793945312 + ], + [ + "▁Rennes", + -14.976644515991213 + ], + [ + "▁Ballpark", + -14.976646423339844 + ], + [ + "▁Kutcher", + -14.976661682128906 + ], + [ + "▁stymied", + -14.976667404174805 + ], + [ + "▁Esprit", + -14.976701736450195 + ], + [ + "▁$4.4", + -14.976703643798828 + ], + [ + "▁Antibody", + -14.976709365844728 + ], + [ + "▁grippy", + -14.976713180541992 + ], + [ + "▁Parris", + -14.976737022399902 + ], + [ + "▁Supa", + -14.976737022399902 + ], + [ + "▁Goyal", + -14.976770401000977 + ], + [ + "▁caucuses", + -14.976773262023926 + ], + [ + "Pause", + -14.976836204528809 + ], + [ + "▁Qld", + -14.976869583129885 + ], + [ + "▁teetering", + -14.976885795593262 + ], + [ + "▁Dryden", + -14.976888656616213 + ], + [ + "▁Handball", + -14.97690200805664 + ], + [ + "Screening", + -14.976921081542969 + ], + [ + "▁Swartz", + -14.97692584991455 + ], + [ + "▁Lashes", + -14.97696018218994 + ], + [ + "1101", + -14.97699737548828 + ], + [ + "▁polity", + -14.977008819580078 + ], + [ + "▁demoted", + -14.977038383483888 + ], + [ + "CLASSES", + -14.977043151855469 + ], + [ + "▁Tapered", + -14.97704792022705 + ], + [ + "▁PLOS", + -14.977051734924316 + ], + [ + "alanine", + -14.977060317993164 + ], + [ + "RECOMMENDED", + -14.97706127166748 + ], + [ + "▁emote", + -14.977065086364746 + ], + [ + "▁sleeplessness", + -14.977083206176758 + ], + [ + "▁VEHICLES", + -14.977096557617188 + ], + [ + "▁underperformed", + -14.977112770080566 + ], + [ + "▁confided", + -14.977148056030272 + ], + [ + "▁Scrambler", + -14.977161407470703 + ], + [ + "▁alibi", + -14.977170944213867 + ], + [ + "▁duplexes", + -14.977201461791992 + ], + [ + "▁Snapback", + -14.977225303649902 + ], + [ + "▁impressionist", + -14.977252006530762 + ], + [ + "'10\"", + -14.977298736572266 + ], + [ + "▁Moyes", + -14.977300643920898 + ], + [ + "LIPS", + -14.977306365966797 + ], + [ + "▁serialization", + -14.97732925415039 + ], + [ + "▁postulate", + -14.977370262145996 + ], + [ + "haem", + -14.97741413116455 + ], + [ + "▁steamboat", + -14.977416038513184 + ], + [ + "▁suspenders", + -14.977422714233398 + ], + [ + "craftsman", + -14.977447509765623 + ], + [ + "▁Outpost", + -14.977471351623535 + ], + [ + "▁Cinta", + -14.977533340454102 + ], + [ + "▁Roshan", + -14.97757339477539 + ], + [ + "▁Kutch", + -14.977577209472656 + ], + [ + "▁reentry", + -14.977690696716309 + ], + [ + "▁QUEEN", + -14.977743148803713 + ], + [ + "▁Stargate", + -14.977859497070312 + ], + [ + "▁unfocused", + -14.977867126464844 + ], + [ + "▁anxiousness", + -14.97788143157959 + ], + [ + "▁sceneries", + -14.977887153625488 + ], + [ + "▁countywide", + -14.977901458740234 + ], + [ + "GBR", + -14.977917671203612 + ], + [ + "▁leatherette", + -14.977952003479004 + ], + [ + "▁parallelism", + -14.978026390075684 + ], + [ + "▁burrowing", + -14.978045463562012 + ], + [ + "▁beeping", + -14.978056907653809 + ], + [ + "ergic", + -14.978066444396973 + ], + [ + "▁Graphical", + -14.978097915649414 + ], + [ + "Xpert", + -14.978110313415527 + ], + [ + "▁Shima", + -14.978142738342283 + ], + [ + "Grandma", + -14.978143692016602 + ], + [ + "▁ODD", + -14.978188514709473 + ], + [ + "ndrogenic", + -14.978263854980469 + ], + [ + "zina", + -14.97828197479248 + ], + [ + "▁overvalued", + -14.978288650512695 + ], + [ + "▁bouldering", + -14.978338241577148 + ], + [ + "performer", + -14.978350639343262 + ], + [ + "▁shied", + -14.978589057922363 + ], + [ + "indiscriminately", + -14.978620529174805 + ], + [ + "▁XSM", + -14.978697776794434 + ], + [ + "▁tallies", + -14.978866577148438 + ], + [ + "Burger", + -14.978951454162598 + ], + [ + "▁foodborne", + -14.97909450531006 + ], + [ + "▁Restored", + -14.97913932800293 + ], + [ + "▁STAFF", + -14.97914218902588 + ], + [ + "interpretation", + -14.97916316986084 + ], + [ + "▁EFTA", + -14.979193687438965 + ], + [ + "▁applauding", + -14.979222297668455 + ], + [ + "▁cruelly", + -14.97923183441162 + ], + [ + "Matti", + -14.97927951812744 + ], + [ + "▁Hakim", + -14.979291915893556 + ], + [ + "▁feathery", + -14.979307174682615 + ], + [ + "1:41", + -14.979403495788574 + ], + [ + "▁Hainan", + -14.979411125183104 + ], + [ + "▁Zuk", + -14.979425430297852 + ], + [ + "▁fairest", + -14.979454040527344 + ], + [ + "▁$155", + -14.979467391967772 + ], + [ + "▁Fredrik", + -14.979476928710938 + ], + [ + "▁Sauk", + -14.979689598083496 + ], + [ + "▁frictionless", + -14.97971534729004 + ], + [ + "▁Trotter", + -14.979723930358888 + ], + [ + "▁offloading", + -14.979732513427734 + ], + [ + "▁bravest", + -14.979785919189451 + ], + [ + "HEP", + -14.979843139648438 + ], + [ + "Oooo", + -14.98006534576416 + ], + [ + "▁MEK", + -14.980074882507324 + ], + [ + "▁Asics", + -14.980111122131348 + ], + [ + "▁Mannheim", + -14.98013687133789 + ], + [ + "▁lacto", + -14.980154037475586 + ], + [ + "▁WET", + -14.980169296264648 + ], + [ + "▁Kyi", + -14.980223655700684 + ], + [ + "JEE", + -14.980303764343262 + ], + [ + "▁GAIN", + -14.980307579040527 + ], + [ + "▁AKG", + -14.980443954467772 + ], + [ + "▁Pyle", + -14.980483055114746 + ], + [ + "▁Assemblyman", + -14.980506896972656 + ], + [ + "▁Northgate", + -14.980511665344238 + ], + [ + "EFER", + -14.980531692504885 + ], + [ + "▁01:0", + -14.980664253234863 + ], + [ + "▁resuscitate", + -14.98068904876709 + ], + [ + "▁handstand", + -14.98078441619873 + ], + [ + "▁punchline", + -14.980846405029297 + ], + [ + "▁counterparty", + -14.980947494506836 + ], + [ + "▁10:10", + -14.98109245300293 + ], + [ + "▁CREAM", + -14.981119155883787 + ], + [ + "▁rnd", + -14.98121166229248 + ], + [ + "▁LUC", + -14.981279373168944 + ], + [ + "▁Fitter", + -14.981367111206056 + ], + [ + "▁Kemper", + -14.981371879577637 + ], + [ + "▁Pilar", + -14.9814453125 + ], + [ + "▁Trois", + -14.981647491455078 + ], + [ + "katu", + -14.981684684753418 + ], + [ + "1:42", + -14.98178005218506 + ], + [ + "▁Bartel", + -14.981877326965332 + ], + [ + "▁Mirai", + -14.981884002685549 + ], + [ + "AFE", + -14.981989860534668 + ], + [ + "▁Tiff", + -14.982003211975098 + ], + [ + "NCAP", + -14.982012748718262 + ], + [ + "▁Takeover", + -14.982073783874512 + ], + [ + "19%", + -14.9821138381958 + ], + [ + "hleb", + -14.982148170471191 + ], + [ + "▁PLT", + -14.982175827026367 + ], + [ + "analytical", + -14.982461929321287 + ], + [ + "▁Linder", + -14.982479095458984 + ], + [ + "▁Libri", + -14.982582092285156 + ], + [ + "▁Auch", + -14.982635498046877 + ], + [ + "GSC", + -14.982745170593262 + ], + [ + "▁sprinted", + -14.982763290405272 + ], + [ + "▁541-", + -14.98292636871338 + ], + [ + "▁LBJ", + -14.983134269714355 + ], + [ + "esqui", + -14.983158111572266 + ], + [ + "▁Asiatic", + -14.983181953430176 + ], + [ + "▁ESV", + -14.983259201049805 + ], + [ + "ENNE", + -14.983325004577637 + ], + [ + "▁Allende", + -14.983360290527344 + ], + [ + "▁Studi", + -14.98344898223877 + ], + [ + "▁sumac", + -14.983562469482422 + ], + [ + "proving", + -14.983612060546877 + ], + [ + "protest", + -14.983708381652832 + ], + [ + "▁GAF", + -14.983859062194824 + ], + [ + "▁prepay", + -14.983869552612305 + ], + [ + "▁sharpie", + -14.983928680419922 + ], + [ + "maple", + -14.984190940856934 + ], + [ + "mputation", + -14.98421859741211 + ], + [ + "▁Brough", + -14.98427677154541 + ], + [ + "▁Omani", + -14.984280586242676 + ], + [ + "▁Suda", + -14.984328269958496 + ], + [ + "▁Neverland", + -14.984456062316896 + ], + [ + "▁03:0", + -14.984603881835938 + ], + [ + "destroy", + -14.984606742858888 + ], + [ + "▁Mellow", + -14.98460865020752 + ], + [ + "▁07:1", + -14.984728813171388 + ], + [ + "▁Amaze", + -14.984728813171388 + ], + [ + "Lava", + -14.984822273254396 + ], + [ + "Brenda", + -14.984902381896973 + ], + [ + "▁Welder", + -14.985017776489258 + ], + [ + "▁BTEC", + -14.985036849975586 + ], + [ + "Enabling", + -14.985069274902344 + ], + [ + "Conscious", + -14.98509407043457 + ], + [ + "cursor", + -14.985095977783203 + ], + [ + "Ignore", + -14.985103607177734 + ], + [ + "Shandong", + -14.985106468200684 + ], + [ + "infested", + -14.985108375549316 + ], + [ + "Promise", + -14.985118865966797 + ], + [ + "symptom", + -14.98512077331543 + ], + [ + "Citrus", + -14.985126495361328 + ], + [ + "Kathleen", + -14.985127449035645 + ], + [ + "Supervisor", + -14.985127449035645 + ], + [ + "restoration", + -14.98512840270996 + ], + [ + "Cuisine", + -14.985129356384276 + ], + [ + "Carnival", + -14.985133171081545 + ], + [ + "Warranty", + -14.985133171081545 + ], + [ + "Montgomery", + -14.985135078430176 + ], + [ + "stimulating", + -14.985136032104492 + ], + [ + "Vanessa", + -14.985137939453123 + ], + [ + "Jews", + -14.98517608642578 + ], + [ + "Donovan", + -14.98519229888916 + ], + [ + "▁gaffe", + -14.985209465026855 + ], + [ + "Vermont", + -14.985215187072754 + ], + [ + "Editorial", + -14.985223770141602 + ], + [ + "Nexus", + -14.985258102416992 + ], + [ + "duplication", + -14.985269546508787 + ], + [ + "summit", + -14.985280990600586 + ], + [ + "diploma", + -14.985295295715332 + ], + [ + "igraphy", + -14.985302925109863 + ], + [ + "Afternoon", + -14.985311508178713 + ], + [ + "Commit", + -14.985323905944824 + ], + [ + "racism", + -14.985555648803713 + ], + [ + "poison", + -14.985640525817873 + ], + [ + "▁microbe", + -14.985648155212402 + ], + [ + "COLOR", + -14.985713005065918 + ], + [ + "OSTER", + -14.985963821411133 + ], + [ + "6:14", + -14.986123085021973 + ], + [ + "viola", + -14.986124038696287 + ], + [ + "▁Bode", + -14.986130714416504 + ], + [ + "vip", + -14.986146926879885 + ], + [ + "▁AQA", + -14.986167907714844 + ], + [ + "jitsu", + -14.986266136169434 + ], + [ + "▁decant", + -14.986273765563965 + ], + [ + "▁Sonu", + -14.98637580871582 + ], + [ + "streak", + -14.986404418945312 + ], + [ + "▁Birdie", + -14.986432075500488 + ], + [ + "installer", + -14.986486434936523 + ], + [ + "ideally", + -14.986516952514648 + ], + [ + "BTL", + -14.98660373687744 + ], + [ + "cracking", + -14.98668384552002 + ], + [ + "▁immigrate", + -14.986762046813965 + ], + [ + "▁Tasha", + -14.98696517944336 + ], + [ + "▁Organo", + -14.987058639526367 + ], + [ + "stoke", + -14.987064361572266 + ], + [ + "7.30", + -14.987066268920898 + ], + [ + "▁painkiller", + -14.987070083618164 + ], + [ + "warehouse", + -14.987077713012695 + ], + [ + "HDR", + -14.987099647521973 + ], + [ + "DEPOSIT", + -14.987302780151367 + ], + [ + "framework", + -14.987312316894531 + ], + [ + "heumatoid", + -14.987342834472656 + ], + [ + "▁misogynist", + -14.987361907958984 + ], + [ + "Cyclists", + -14.9873628616333 + ], + [ + "Negligence", + -14.9873628616333 + ], + [ + "bulwark", + -14.9873628616333 + ], + [ + "▁Ahmadinejad", + -14.9873628616333 + ], + [ + "▁Alkaline", + -14.9873628616333 + ], + [ + "▁Aotearoa", + -14.9873628616333 + ], + [ + "▁Colloquium", + -14.9873628616333 + ], + [ + "▁Crosstrek", + -14.9873628616333 + ], + [ + "▁Dordogne", + -14.9873628616333 + ], + [ + "▁Embiid", + -14.9873628616333 + ], + [ + "▁LaGuardia", + -14.9873628616333 + ], + [ + "▁Maldonado", + -14.9873628616333 + ], + [ + "▁Marmalade", + -14.9873628616333 + ], + [ + "▁Montpelier", + -14.9873628616333 + ], + [ + "▁Mpumalanga", + -14.9873628616333 + ], + [ + "▁OCTOBER", + -14.9873628616333 + ], + [ + "▁Poroshenko", + -14.9873628616333 + ], + [ + "▁Rheumatology", + -14.9873628616333 + ], + [ + "▁Tahrir", + -14.9873628616333 + ], + [ + "▁Tavares", + -14.9873628616333 + ], + [ + "▁VALLEY", + -14.9873628616333 + ], + [ + "▁Waddesdon", + -14.9873628616333 + ], + [ + "▁dachshund", + -14.9873628616333 + ], + [ + "▁dastardly", + -14.9873628616333 + ], + [ + "▁dendritic", + -14.9873628616333 + ], + [ + "▁desirous", + -14.9873628616333 + ], + [ + "▁exorcism", + -14.9873628616333 + ], + [ + "▁frittata", + -14.9873628616333 + ], + [ + "▁furosemide", + -14.9873628616333 + ], + [ + "▁geocaching", + -14.9873628616333 + ], + [ + "▁indignant", + -14.9873628616333 + ], + [ + "▁intramural", + -14.9873628616333 + ], + [ + "▁leniency", + -14.9873628616333 + ], + [ + "▁microfluidic", + -14.9873628616333 + ], + [ + "▁naloxone", + -14.9873628616333 + ], + [ + "▁obfuscation", + -14.9873628616333 + ], + [ + "▁reforestation", + -14.9873628616333 + ], + [ + "▁silagra", + -14.9873628616333 + ], + [ + "▁uneasiness", + -14.9873628616333 + ], + [ + "▁wrinkling", + -14.9873628616333 + ], + [ + "▁TELEVISION", + -14.987363815307615 + ], + [ + "▁VOLUME", + -14.987363815307615 + ], + [ + "▁innuendo", + -14.987363815307615 + ], + [ + "▁vestiges", + -14.987363815307615 + ], + [ + "Applause", + -14.987364768981934 + ], + [ + "▁Brabant", + -14.987364768981934 + ], + [ + "▁Maximilian", + -14.98736572265625 + ], + [ + "▁Schweitzer", + -14.98736572265625 + ], + [ + "▁clinker", + -14.987366676330566 + ], + [ + "▁Elohim", + -14.9873685836792 + ], + [ + "▁Pitchfork", + -14.9873685836792 + ], + [ + "▁Warehousing", + -14.9873685836792 + ], + [ + "▁Chekhov", + -14.987369537353516 + ], + [ + "▁kitesurfing", + -14.987369537353516 + ], + [ + "▁IMMEDIATELY", + -14.987370491027832 + ], + [ + "▁Cockpit", + -14.987371444702148 + ], + [ + "▁Paphos", + -14.98737335205078 + ], + [ + "▁Connaught", + -14.987374305725098 + ], + [ + "▁congratulatory", + -14.98737621307373 + ], + [ + "YEAH", + -14.987378120422363 + ], + [ + "▁LVMH", + -14.987378120422363 + ], + [ + "▁disillusionment", + -14.987378120422363 + ], + [ + "▁Solapur", + -14.98737907409668 + ], + [ + "▁Laramie", + -14.987380981445312 + ], + [ + "Microelectronics", + -14.987381935119627 + ], + [ + "▁sexiest", + -14.987384796142578 + ], + [ + "▁McHugh", + -14.987385749816896 + ], + [ + "▁BECOME", + -14.987386703491213 + ], + [ + "▁Trellis", + -14.987388610839844 + ], + [ + "▁Pitbull", + -14.987394332885742 + ], + [ + "▁peplum", + -14.987394332885742 + ], + [ + "▁EBSCO", + -14.987396240234377 + ], + [ + "▁scapula", + -14.987396240234377 + ], + [ + "▁NJCAA", + -14.987397193908691 + ], + [ + "▁steadiness", + -14.987398147583008 + ], + [ + "▁tuscan", + -14.987399101257324 + ], + [ + "▁Annuity", + -14.98740005493164 + ], + [ + "▁Farnham", + -14.987401008605955 + ], + [ + "▁Manifold", + -14.987403869628906 + ], + [ + "▁grimace", + -14.987417221069336 + ], + [ + "▁brawn", + -14.987421989440918 + ], + [ + "▁masseuse", + -14.987421989440918 + ], + [ + "▁dubstep", + -14.98742389678955 + ], + [ + "▁Jefferies", + -14.987435340881348 + ], + [ + "▁Cretan", + -14.987448692321776 + ], + [ + "▁£150,000", + -14.987459182739258 + ], + [ + "▁jotting", + -14.987462997436523 + ], + [ + "emus", + -14.987476348876951 + ], + [ + "▁Rincon", + -14.987481117248535 + ], + [ + "ococcal", + -14.987503051757812 + ], + [ + "▁Monahan", + -14.987504005432127 + ], + [ + "▁Shipbuilding", + -14.987504959106444 + ], + [ + "▁LINQ", + -14.987506866455078 + ], + [ + "▁Krugman", + -14.987510681152344 + ], + [ + "▁COUNTER", + -14.987513542175291 + ], + [ + "▁UAH", + -14.98751735687256 + ], + [ + "▁reorganizing", + -14.98751735687256 + ], + [ + "▁Shafer", + -14.987521171569824 + ], + [ + "▁DeMarco", + -14.98752498626709 + ], + [ + "▁Whitmore", + -14.987533569335938 + ], + [ + "TEXT", + -14.98753547668457 + ], + [ + "▁recombination", + -14.987541198730469 + ], + [ + "▁Cern", + -14.987550735473633 + ], + [ + "▁counterattack", + -14.987550735473633 + ], + [ + "▁Vieux", + -14.987561225891112 + ], + [ + "▁Seacrest", + -14.98756217956543 + ], + [ + "▁Fungus", + -14.987616539001465 + ], + [ + "▁Wonka", + -14.98761749267578 + ], + [ + "▁Alexei", + -14.987642288208008 + ], + [ + "▁canvassing", + -14.98764419555664 + ], + [ + "▁BRIGHT", + -14.987666130065918 + ], + [ + "▁stockbroker", + -14.98767375946045 + ], + [ + "▁reflectivity", + -14.987676620483398 + ], + [ + "▁disoriented", + -14.98768424987793 + ], + [ + "▁Plen", + -14.98770236968994 + ], + [ + "▁Murph", + -14.987735748291016 + ], + [ + "▁Mittel", + -14.987784385681152 + ], + [ + "▁repackaged", + -14.98779010772705 + ], + [ + "bonding", + -14.987791061401367 + ], + [ + "▁Thales", + -14.987793922424316 + ], + [ + "▁Burnside", + -14.987812042236328 + ], + [ + "WOLF", + -14.987818717956545 + ], + [ + "▁Robben", + -14.987875938415527 + ], + [ + "▁Karak", + -14.987881660461426 + ], + [ + "▁Brier", + -14.987907409667969 + ], + [ + "▁Gangster", + -14.987916946411133 + ], + [ + "▁Monroeville", + -14.987918853759766 + ], + [ + "Burning", + -14.987927436828612 + ], + [ + "▁Saltwater", + -14.987937927246094 + ], + [ + "▁$5.2", + -14.987957000732422 + ], + [ + "▁understudies", + -14.987959861755373 + ], + [ + "▁Volcanoes", + -14.987966537475586 + ], + [ + "▁chiral", + -14.987967491149902 + ], + [ + "▁raptors", + -14.987998008728027 + ], + [ + "glandular", + -14.988011360168455 + ], + [ + "▁Ansible", + -14.988041877746582 + ], + [ + "▁Lipton", + -14.988082885742188 + ], + [ + "▁Heywood", + -14.988085746765137 + ], + [ + "1(1):", + -14.98808765411377 + ], + [ + "▁Armitage", + -14.988109588623049 + ], + [ + "▁undated", + -14.98813819885254 + ], + [ + "▁FIU", + -14.988204002380373 + ], + [ + "▁Barbra", + -14.9882173538208 + ], + [ + "▁ihre", + -14.988225936889648 + ], + [ + "▁praline", + -14.988225936889648 + ], + [ + "Wie", + -14.98826789855957 + ], + [ + "▁Vermeer", + -14.98829174041748 + ], + [ + "▁reassessment", + -14.988295555114746 + ], + [ + "▁Diversion", + -14.988308906555176 + ], + [ + "▁knockdown", + -14.988317489624023 + ], + [ + "publ", + -14.988322257995604 + ], + [ + "▁Hoard", + -14.988340377807615 + ], + [ + "▁Ringling", + -14.988418579101562 + ], + [ + "▁tramway", + -14.988524436950684 + ], + [ + "▁demeaning", + -14.98853874206543 + ], + [ + "▁dialling", + -14.988577842712402 + ], + [ + "▁Barolo", + -14.988587379455566 + ], + [ + "▁Bakr", + -14.988664627075195 + ], + [ + "Booth", + -14.988691329956056 + ], + [ + "▁mult", + -14.98872184753418 + ], + [ + "▁crosswise", + -14.988734245300291 + ], + [ + "▁Canned", + -14.988743782043455 + ], + [ + "▁Rizal", + -14.988837242126465 + ], + [ + "▁hacer", + -14.988847732543944 + ], + [ + "fist", + -14.98887538909912 + ], + [ + "▁Underworld", + -14.98888874053955 + ], + [ + "▁Screaming", + -14.988926887512209 + ], + [ + "▁Whispering", + -14.988927841186523 + ], + [ + "▁necessitat", + -14.988935470581056 + ], + [ + "▁cutlets", + -14.988944053649902 + ], + [ + "▁Loudon", + -14.9889497756958 + ], + [ + "▁esl", + -14.988961219787598 + ], + [ + "4:19", + -14.98902702331543 + ], + [ + "5:21", + -14.98910427093506 + ], + [ + "▁Dorman", + -14.989139556884766 + ], + [ + "soundcloud", + -14.98914623260498 + ], + [ + "▁riverboat", + -14.989203453063965 + ], + [ + "Malik", + -14.989233016967772 + ], + [ + "loka", + -14.989243507385254 + ], + [ + "▁cloaked", + -14.98940658569336 + ], + [ + "▁hauntingly", + -14.98947525024414 + ], + [ + "gaku", + -14.989519119262695 + ], + [ + "▁postulated", + -14.98952865600586 + ], + [ + "▁UHC", + -14.989568710327148 + ], + [ + "▁LASER", + -14.989606857299805 + ], + [ + "▁Ophthalmol", + -14.989633560180664 + ], + [ + "odin", + -14.989726066589355 + ], + [ + "Naturopath", + -14.989768028259276 + ], + [ + "mundi", + -14.9898042678833 + ], + [ + "▁horsemen", + -14.989886283874512 + ], + [ + "▁MINT", + -14.989933967590332 + ], + [ + "1865", + -14.98995590209961 + ], + [ + "hunk", + -14.99010181427002 + ], + [ + "1-800-", + -14.99025058746338 + ], + [ + "TPP", + -14.99026870727539 + ], + [ + "DIRECT", + -14.99028205871582 + ], + [ + "FOC", + -14.990317344665527 + ], + [ + "begin", + -14.990364074707031 + ], + [ + "Pip", + -14.990375518798828 + ], + [ + "▁jap", + -14.990375518798828 + ], + [ + "▁BLAST", + -14.990455627441406 + ], + [ + "▁veering", + -14.990466117858888 + ], + [ + "Incremental", + -14.99046802520752 + ], + [ + "63%", + -14.990470886230469 + ], + [ + "2:25", + -14.990473747253418 + ], + [ + "▁Negra", + -14.99049186706543 + ], + [ + "ACTIVE", + -14.99050521850586 + ], + [ + "▁Benefic", + -14.99053192138672 + ], + [ + "GRP", + -14.99062156677246 + ], + [ + "ceta", + -14.99064826965332 + ], + [ + "urry", + -14.99079704284668 + ], + [ + "▁midlife", + -14.990821838378906 + ], + [ + "ASSC", + -14.991153717041016 + ], + [ + "▁Miki", + -14.991241455078123 + ], + [ + "Whose", + -14.991302490234377 + ], + [ + "▁Bhawan", + -14.991303443908691 + ], + [ + "▁Ruger", + -14.991363525390623 + ], + [ + "wagen", + -14.991415977478027 + ], + [ + "descend", + -14.991425514221191 + ], + [ + "0-002", + -14.991506576538086 + ], + [ + "james", + -14.99161148071289 + ], + [ + "▁Alvi", + -14.991646766662598 + ], + [ + "Careful", + -14.99180793762207 + ], + [ + "1,2,3", + -14.99192237854004 + ], + [ + "furlong", + -14.991943359375 + ], + [ + "6/7", + -14.991963386535645 + ], + [ + "alayan", + -14.992090225219728 + ], + [ + "▁visage", + -14.9921293258667 + ], + [ + "▁Biologist", + -14.992154121398926 + ], + [ + "schlag", + -14.992159843444824 + ], + [ + "▁$86", + -14.992199897766112 + ], + [ + "Squee", + -14.992441177368164 + ], + [ + "▁limber", + -14.992593765258787 + ], + [ + "▁codify", + -14.992698669433594 + ], + [ + "▁KOH", + -14.99272632598877 + ], + [ + "Hydrate", + -14.992730140686035 + ], + [ + "2:17", + -14.992842674255373 + ], + [ + "▁CREA", + -14.992887496948242 + ], + [ + "▁Ferenc", + -14.99292278289795 + ], + [ + "▁Stringer", + -14.993185997009276 + ], + [ + "CSM", + -14.993197441101074 + ], + [ + "▁Surah", + -14.993200302124023 + ], + [ + "▁LBD", + -14.993270874023438 + ], + [ + "Abba", + -14.993332862854004 + ], + [ + "stripping", + -14.993365287780762 + ], + [ + "spiral", + -14.993420600891112 + ], + [ + "▁Sergi", + -14.99354076385498 + ], + [ + "Hog", + -14.993576049804688 + ], + [ + "omorphic", + -14.993674278259276 + ], + [ + "▁781-", + -14.993733406066896 + ], + [ + "▁Warlock", + -14.99378776550293 + ], + [ + "▁1095", + -14.993796348571776 + ], + [ + "URED", + -14.993922233581545 + ], + [ + "isko", + -14.993967056274414 + ], + [ + "▁SBD", + -14.994049072265623 + ], + [ + "OXY", + -14.994165420532228 + ], + [ + "TiO", + -14.99426555633545 + ], + [ + "▁04:3", + -14.994277000427246 + ], + [ + "ISER", + -14.994338989257812 + ], + [ + "2:23", + -14.99440860748291 + ], + [ + "▁macula", + -14.994491577148438 + ], + [ + "▁ALK", + -14.994515419006348 + ], + [ + "▁Montero", + -14.994728088378906 + ], + [ + "▁09:2", + -14.994735717773438 + ], + [ + "▁Salud", + -14.99477481842041 + ], + [ + "▁ITSM", + -14.995014190673828 + ], + [ + "▁BNC", + -14.995160102844238 + ], + [ + "niece", + -14.995162010192873 + ], + [ + "eichen", + -14.995197296142578 + ], + [ + "▁Achiever", + -14.99545955657959 + ], + [ + "LIO", + -14.99548053741455 + ], + [ + "▁parson", + -14.99548625946045 + ], + [ + "5:22", + -14.99551773071289 + ], + [ + "associate", + -14.99588108062744 + ], + [ + "▁Blume", + -14.995884895324709 + ], + [ + "9:40", + -14.995923042297363 + ], + [ + "Subtract", + -14.99594783782959 + ], + [ + "Christoph", + -14.995990753173828 + ], + [ + "▁$325", + -14.996058464050291 + ], + [ + "▁$8.5", + -14.996097564697266 + ], + [ + "▁CRD", + -14.996235847473145 + ], + [ + "▁recalculate", + -14.996308326721191 + ], + [ + "▁0.99", + -14.996315956115724 + ], + [ + "solicitation", + -14.996318817138672 + ], + [ + "Afghan", + -14.99642276763916 + ], + [ + "satisfied", + -14.996424674987791 + ], + [ + "scholastic", + -14.996460914611816 + ], + [ + "Obesity", + -14.996468544006348 + ], + [ + "Vegetarian", + -14.99647617340088 + ], + [ + "membrane", + -14.996478080749512 + ], + [ + "Varsity", + -14.99648094177246 + ], + [ + "indulgent", + -14.996488571166992 + ], + [ + "survival", + -14.996496200561523 + ], + [ + "stimulation", + -14.99649715423584 + ], + [ + "Arkansas", + -14.996501922607422 + ], + [ + "vibration", + -14.996501922607422 + ], + [ + "UNESCO", + -14.996505737304688 + ], + [ + "Nevada", + -14.996519088745115 + ], + [ + "theoretical", + -14.996520042419434 + ], + [ + "Philippe", + -14.996521949768066 + ], + [ + "Firefox", + -14.996525764465332 + ], + [ + "▁Repeater", + -14.996541023254396 + ], + [ + "Vanilla", + -14.996542930603027 + ], + [ + "fabrication", + -14.996577262878418 + ], + [ + "Armenia", + -14.996578216552734 + ], + [ + "Uganda", + -14.996591567993164 + ], + [ + "Friendship", + -14.996596336364746 + ], + [ + "encourage", + -14.996601104736328 + ], + [ + "Graduat", + -14.996617317199709 + ], + [ + "Utility", + -14.996624946594238 + ], + [ + "exhibition", + -14.99665355682373 + ], + [ + "chinese", + -14.996657371520996 + ], + [ + "MADE", + -14.99680233001709 + ], + [ + "▁Hoof", + -14.996854782104492 + ], + [ + "Patron", + -14.996867179870604 + ], + [ + "▁beckon", + -14.997023582458496 + ], + [ + "potent", + -14.997039794921877 + ], + [ + "coordinated", + -14.997060775756836 + ], + [ + "LOOM", + -14.99706745147705 + ], + [ + "Ferrari", + -14.997114181518556 + ], + [ + "Invalid", + -14.99714469909668 + ], + [ + "delicious", + -14.997197151184082 + ], + [ + "italian", + -14.997227668762209 + ], + [ + "▁Amphi", + -14.997269630432127 + ], + [ + "typic", + -14.997354507446287 + ], + [ + "Packers", + -14.99746036529541 + ], + [ + "▁inaugurate", + -14.997873306274414 + ], + [ + "▁PREP", + -14.997923851013184 + ], + [ + "mayer", + -14.997940063476562 + ], + [ + "▁Randle", + -14.99832534790039 + ], + [ + "copa", + -14.998435974121094 + ], + [ + "▁WARRANT", + -14.998437881469728 + ], + [ + "▁Reykjav", + -14.99846076965332 + ], + [ + "Curcumin", + -14.99847412109375 + ], + [ + "Perseverance", + -14.99847412109375 + ], + [ + "Residual", + -14.99847412109375 + ], + [ + "phosphatase", + -14.99847412109375 + ], + [ + "wisconsin", + -14.99847412109375 + ], + [ + "▁888-320-4449", + -14.99847412109375 + ], + [ + "▁Bitdefender", + -14.99847412109375 + ], + [ + "▁Camtasia", + -14.99847412109375 + ], + [ + "▁DeSantis", + -14.99847412109375 + ], + [ + "▁ESSENTIAL", + -14.99847412109375 + ], + [ + "▁Enclave", + -14.99847412109375 + ], + [ + "▁Hargreaves", + -14.99847412109375 + ], + [ + "▁Hellenistic", + -14.99847412109375 + ], + [ + "▁LaGrange", + -14.99847412109375 + ], + [ + "▁McMullen", + -14.99847412109375 + ], + [ + "▁Mendelssohn", + -14.99847412109375 + ], + [ + "▁Skechers", + -14.99847412109375 + ], + [ + "▁Tecumseh", + -14.99847412109375 + ], + [ + "▁Uxbridge", + -14.99847412109375 + ], + [ + "▁bridegroom", + -14.99847412109375 + ], + [ + "▁crematorium", + -14.99847412109375 + ], + [ + "▁decrepit", + -14.99847412109375 + ], + [ + "▁dejected", + -14.99847412109375 + ], + [ + "▁demonstrative", + -14.99847412109375 + ], + [ + "▁ensconced", + -14.99847412109375 + ], + [ + "▁epinephrine", + -14.99847412109375 + ], + [ + "▁escorting", + -14.99847412109375 + ], + [ + "▁flustered", + -14.99847412109375 + ], + [ + "▁malignancies", + -14.99847412109375 + ], + [ + "▁maturities", + -14.99847412109375 + ], + [ + "▁observatories", + -14.99847412109375 + ], + [ + "▁odkaz", + -14.99847412109375 + ], + [ + "▁pacifist", + -14.99847412109375 + ], + [ + "▁recuperating", + -14.99847412109375 + ], + [ + "▁stalagmite", + -14.99847412109375 + ], + [ + "▁symbiosis", + -14.99847412109375 + ], + [ + "▁tilapia", + -14.99847412109375 + ], + [ + "▁xylitol", + -14.99847412109375 + ], + [ + "▁Incoloy", + -14.998475074768066 + ], + [ + "▁biliary", + -14.998475074768066 + ], + [ + "consensual", + -14.998476028442385 + ], + [ + "▁10.1007/", + -14.998476028442385 + ], + [ + "▁Euphrates", + -14.998476028442385 + ], + [ + "▁[+]", + -14.998476028442385 + ], + [ + "▁merrily", + -14.998476028442385 + ], + [ + "▁Leavitt", + -14.9984769821167 + ], + [ + "▁goalscorer", + -14.998477935791016 + ], + [ + "▁spokespeople", + -14.998478889465332 + ], + [ + "▁Adreno", + -14.998479843139648 + ], + [ + "▁EXTENT", + -14.998479843139648 + ], + [ + "▁Mehndi", + -14.998479843139648 + ], + [ + "▁wobbling", + -14.998479843139648 + ], + [ + "▁Binocular", + -14.99848175048828 + ], + [ + "▁Enquirer", + -14.99848175048828 + ], + [ + "▁hitchhike", + -14.998482704162598 + ], + [ + "▁WGBH", + -14.998483657836914 + ], + [ + "▁dowry", + -14.998483657836914 + ], + [ + "▁adroit", + -14.99848461151123 + ], + [ + "▁imprecise", + -14.99848461151123 + ], + [ + "▁Antiquities", + -14.998485565185549 + ], + [ + "▁impotent", + -14.998485565185549 + ], + [ + "▁Freightliner", + -14.99848747253418 + ], + [ + "▁Gesture", + -14.998488426208496 + ], + [ + "▁Hendrik", + -14.998489379882812 + ], + [ + "▁climes", + -14.998489379882812 + ], + [ + "▁phagocyt", + -14.998490333557127 + ], + [ + "emporomandibu", + -14.998491287231444 + ], + [ + "Albeit", + -14.998495101928713 + ], + [ + "▁Shiatsu", + -14.998495101928713 + ], + [ + "▁Fairytale", + -14.998496055603027 + ], + [ + "▁Pradhan", + -14.998496055603027 + ], + [ + "▁siRNA", + -14.998498916625977 + ], + [ + "▁myeloid", + -14.99850368499756 + ], + [ + "▁Lusaka", + -14.998506546020508 + ], + [ + "▁Wegmans", + -14.998506546020508 + ], + [ + "▁sacral", + -14.998509407043455 + ], + [ + "▁botox", + -14.998513221740724 + ], + [ + "▁Pajama", + -14.998517036437988 + ], + [ + "▁mesquite", + -14.998517036437988 + ], + [ + "▁Lemongrass", + -14.998519897460938 + ], + [ + "▁TransUnion", + -14.998520851135254 + ], + [ + "▁produit", + -14.998525619506836 + ], + [ + "▁Motocross", + -14.998530387878418 + ], + [ + "Albatros", + -14.998533248901367 + ], + [ + "▁Aficio", + -14.998542785644531 + ], + [ + "▁leukocyte", + -14.99854850769043 + ], + [ + "▁brining", + -14.998550415039062 + ], + [ + "▁Raipur", + -14.998565673828123 + ], + [ + "▁AICPA", + -14.998567581176758 + ], + [ + "▁subgenre", + -14.998575210571287 + ], + [ + "▁gambit", + -14.998578071594238 + ], + [ + "▁Knightsbridge", + -14.998581886291504 + ], + [ + "▁Consort", + -14.998600006103516 + ], + [ + "▁Nanak", + -14.998605728149414 + ], + [ + "harvard", + -14.998617172241213 + ], + [ + "▁$6.99", + -14.998627662658691 + ], + [ + "▁MARTIN", + -14.998627662658691 + ], + [ + "▁proteomics", + -14.99863052368164 + ], + [ + "▁Movable", + -14.998631477355955 + ], + [ + "▁Makita", + -14.998634338378906 + ], + [ + "▁Technion", + -14.998638153076172 + ], + [ + "▁deviating", + -14.998644828796388 + ], + [ + "abroad", + -14.998658180236816 + ], + [ + "▁pixelated", + -14.998661994934082 + ], + [ + "▁Visalia", + -14.998669624328612 + ], + [ + "4:50", + -14.998679161071776 + ], + [ + "▁CPG", + -14.998684883117676 + ], + [ + "▁Renoir", + -14.998692512512209 + ], + [ + "▁Agora", + -14.998700141906738 + ], + [ + "▁photobooth", + -14.998720169067385 + ], + [ + "▁Luminous", + -14.99875259399414 + ], + [ + "▁pathfinder", + -14.998767852783203 + ], + [ + "impson", + -14.998802185058594 + ], + [ + "▁Barometer", + -14.998811721801758 + ], + [ + "▁Rosales", + -14.99881649017334 + ], + [ + "▁acidosis", + -14.998846054077148 + ], + [ + "▁radiocarbon", + -14.998855590820312 + ], + [ + "▁Krieger", + -14.998881340026855 + ], + [ + "▁zippy", + -14.998909950256348 + ], + [ + "Flora", + -14.998937606811523 + ], + [ + "▁woollen", + -14.9989652633667 + ], + [ + "▁Sequin", + -14.998976707458496 + ], + [ + "▁Elkins", + -14.999005317687988 + ], + [ + "▁depreciated", + -14.999016761779783 + ], + [ + "7010", + -14.999019622802734 + ], + [ + "glomerat", + -14.999021530151367 + ], + [ + "▁Kogan", + -14.999052047729492 + ], + [ + "▁travelogue", + -14.999055862426758 + ], + [ + "▁Styx", + -14.999059677124023 + ], + [ + "Swimmers", + -14.999094009399414 + ], + [ + "▁Bowel", + -14.99911117553711 + ], + [ + "▁whittled", + -14.99912452697754 + ], + [ + "Outline", + -14.999133110046388 + ], + [ + "uyo", + -14.999133110046388 + ], + [ + "▁Lyra", + -14.99913501739502 + ], + [ + "Directly", + -14.999147415161133 + ], + [ + "▁rootstock", + -14.999156951904297 + ], + [ + "▁pistes", + -14.999168395996094 + ], + [ + "▁foreshadowing", + -14.999222755432127 + ], + [ + "zynski", + -14.999266624450684 + ], + [ + "▁ActiveX", + -14.999320030212402 + ], + [ + "▁Ombre", + -14.999321937561035 + ], + [ + "5700", + -14.999347686767578 + ], + [ + "▁lookalike", + -14.999353408813477 + ], + [ + "▁Killian", + -14.99935531616211 + ], + [ + "▁DMZ", + -14.99936294555664 + ], + [ + "▁Hamster", + -14.999396324157717 + ], + [ + "▁unapproved", + -14.999415397644045 + ], + [ + "▁reinvigorated", + -14.999427795410156 + ], + [ + "PSP", + -14.99943733215332 + ], + [ + "▁cocker", + -14.999444961547852 + ], + [ + "▁Lasik", + -14.999464988708496 + ], + [ + "▁unfunded", + -14.999471664428713 + ], + [ + "▁Edging", + -14.999488830566406 + ], + [ + "▁JAVA", + -14.99951171875 + ], + [ + "▁Pittman", + -14.999547004699709 + ], + [ + "▁waterlogged", + -14.999561309814451 + ], + [ + "▁immobilize", + -14.999590873718262 + ], + [ + "▁photocopying", + -14.999593734741213 + ], + [ + "▁dermatological", + -14.999597549438477 + ], + [ + "▁shoebox", + -14.999611854553224 + ], + [ + "▁ADAPT", + -14.999683380126951 + ], + [ + "matism", + -14.999685287475586 + ], + [ + "▁HGV", + -14.999717712402344 + ], + [ + "▁Zimmermann", + -14.999747276306152 + ], + [ + "▁CHANGES", + -14.999799728393556 + ], + [ + "▁Cheeky", + -14.999800682067873 + ], + [ + "lisp", + -14.999895095825195 + ], + [ + "▁Balcon", + -14.999934196472168 + ], + [ + "Woodland", + -14.99993896484375 + ], + [ + "arvin", + -14.999994277954102 + ], + [ + "▁Lanai", + -14.999996185302734 + ], + [ + "▁Ruston", + -15.000028610229492 + ], + [ + "▁Hebei", + -15.000097274780272 + ], + [ + "▁WEEKEND", + -15.000102043151855 + ], + [ + "▁Chapin", + -15.000146865844728 + ], + [ + "GIB", + -15.000168800354004 + ], + [ + "▁reheated", + -15.000168800354004 + ], + [ + "▁Parra", + -15.0001859664917 + ], + [ + "▁Adept", + -15.00019359588623 + ], + [ + "▁Egret", + -15.000222206115724 + ], + [ + "VLP", + -15.000224113464355 + ], + [ + "▁Harald", + -15.00024127960205 + ], + [ + "anyway", + -15.000435829162598 + ], + [ + "▁Diplomate", + -15.000487327575684 + ], + [ + "▁Paulette", + -15.000534057617188 + ], + [ + "▁Giggle", + -15.000558853149414 + ], + [ + "▁Hino", + -15.000572204589844 + ], + [ + "NEED", + -15.000624656677246 + ], + [ + "▁$35.00", + -15.000646591186523 + ], + [ + "▁shoutout", + -15.000652313232422 + ], + [ + "▁VIBE", + -15.000709533691406 + ], + [ + "▁Strick", + -15.000738143920898 + ], + [ + "STATE", + -15.000794410705566 + ], + [ + "bmw", + -15.00080108642578 + ], + [ + "oub", + -15.000813484191896 + ], + [ + "▁swarmed", + -15.000847816467283 + ], + [ + "▁17-18", + -15.000852584838867 + ], + [ + "▁Glaser", + -15.000866889953612 + ], + [ + "▁Vibram", + -15.000882148742676 + ], + [ + "5:13", + -15.000892639160156 + ], + [ + "▁watchlist", + -15.000956535339355 + ], + [ + "▁Makin", + -15.00102996826172 + ], + [ + "▁smearing", + -15.001049041748049 + ], + [ + "RFP", + -15.001104354858398 + ], + [ + "▁10/1", + -15.001126289367676 + ], + [ + "▁madman", + -15.001126289367676 + ], + [ + "▁channelled", + -15.001260757446287 + ], + [ + "▁booed", + -15.00127410888672 + ], + [ + "▁scold", + -15.00127410888672 + ], + [ + "0:47", + -15.001282691955566 + ], + [ + "▁provisionally", + -15.001288414001465 + ], + [ + "8:35", + -15.001490592956545 + ], + [ + "▁Tatsu", + -15.001567840576172 + ], + [ + "accord", + -15.001611709594728 + ], + [ + "▁Mehra", + -15.001625061035156 + ], + [ + "▁1-1/2\"", + -15.001633644104004 + ], + [ + "▁parkour", + -15.001664161682127 + ], + [ + "▁Asgard", + -15.001667976379396 + ], + [ + "ALB", + -15.001684188842772 + ], + [ + "▁Brind", + -15.001712799072266 + ], + [ + "▁customising", + -15.001721382141112 + ], + [ + "BURY", + -15.001794815063477 + ], + [ + "▁Summerville", + -15.001821517944336 + ], + [ + "▁Valk", + -15.001825332641602 + ], + [ + "Darling", + -15.001874923706056 + ], + [ + "Mock", + -15.002071380615234 + ], + [ + "7:27", + -15.002297401428224 + ], + [ + "ysse", + -15.00235652923584 + ], + [ + "Transitioning", + -15.002659797668455 + ], + [ + "▁0-10", + -15.002723693847656 + ], + [ + "▁denounc", + -15.002744674682615 + ], + [ + "▁Haier", + -15.002766609191896 + ], + [ + "▁Beli", + -15.002925872802734 + ], + [ + "▁lovin", + -15.00292682647705 + ], + [ + "Basil", + -15.002944946289062 + ], + [ + "HAVE", + -15.002969741821287 + ], + [ + "MOU", + -15.003080368041992 + ], + [ + "▁Zaman", + -15.00308322906494 + ], + [ + "▁Briton", + -15.003118515014648 + ], + [ + "▁taw", + -15.003127098083496 + ], + [ + "▁RSL", + -15.00322723388672 + ], + [ + "combine", + -15.003276824951172 + ], + [ + "▁McCull", + -15.003398895263672 + ], + [ + "99.00", + -15.003485679626465 + ], + [ + "▁AVP", + -15.003625869750977 + ], + [ + "Rita", + -15.003686904907228 + ], + [ + "▁OHV", + -15.003692626953123 + ], + [ + "DOF", + -15.003710746765137 + ], + [ + "▁Crowder", + -15.003718376159668 + ], + [ + "ASTA", + -15.003721237182615 + ], + [ + "GMC", + -15.003889083862305 + ], + [ + "▁replayed", + -15.004019737243652 + ], + [ + "Basler", + -15.004021644592283 + ], + [ + "▁Garber", + -15.00418186187744 + ], + [ + "▁winches", + -15.00418472290039 + ], + [ + "shaker", + -15.004290580749512 + ], + [ + "novi", + -15.004388809204102 + ], + [ + "▁Weldon", + -15.00441551208496 + ], + [ + "▁1797", + -15.00454807281494 + ], + [ + "▁Huma", + -15.004621505737305 + ], + [ + "Institut", + -15.00465488433838 + ], + [ + "▁DEALER", + -15.004815101623535 + ], + [ + "▁korea", + -15.005355834960938 + ], + [ + "▁Dingo", + -15.005396842956545 + ], + [ + "UNITY", + -15.005411148071287 + ], + [ + "grande", + -15.005579948425291 + ], + [ + "uranga", + -15.005630493164062 + ], + [ + "dictionary", + -15.005638122558594 + ], + [ + "▁Shish", + -15.005704879760742 + ], + [ + "▁airdrop", + -15.005802154541016 + ], + [ + "ocytosis", + -15.005806922912598 + ], + [ + "ATTLE", + -15.005833625793455 + ], + [ + "▁Davin", + -15.00596809387207 + ], + [ + "▁Quince", + -15.005971908569336 + ], + [ + "▁Kenan", + -15.006243705749512 + ], + [ + "▁trouv", + -15.006387710571287 + ], + [ + "zug", + -15.006400108337402 + ], + [ + "0-255", + -15.00644588470459 + ], + [ + "TDS", + -15.006542205810549 + ], + [ + "6:22", + -15.006706237792969 + ], + [ + "▁Mime", + -15.0067138671875 + ], + [ + "pach", + -15.00678253173828 + ], + [ + "Salvo", + -15.007006645202637 + ], + [ + "▁Novella", + -15.007017135620115 + ], + [ + "▁Talia", + -15.007143020629885 + ], + [ + "▁Tamer", + -15.00723361968994 + ], + [ + "▁Vlog", + -15.007431030273438 + ], + [ + "▁perfumer", + -15.00750732421875 + ], + [ + "PLUG", + -15.007652282714844 + ], + [ + "curator", + -15.007736206054688 + ], + [ + "Economist", + -15.00778865814209 + ], + [ + "Encouraging", + -15.00792407989502 + ], + [ + "Fascinating", + -15.00795078277588 + ], + [ + "subsidized", + -15.007972717285156 + ], + [ + "Jaguar", + -15.007977485656738 + ], + [ + "Infinite", + -15.007979393005373 + ], + [ + "Kimberly", + -15.007981300354004 + ], + [ + "Intellectual", + -15.00798225402832 + ], + [ + "Attractive", + -15.00798511505127 + ], + [ + "proliferation", + -15.007987022399902 + ], + [ + "Amenities", + -15.007990837097168 + ], + [ + "Census", + -15.007993698120115 + ], + [ + "Literature", + -15.007993698120115 + ], + [ + "Portuguese", + -15.007994651794434 + ], + [ + "opportunity", + -15.008002281188965 + ], + [ + "Disease", + -15.00800609588623 + ], + [ + "▁Pili", + -15.00800609588623 + ], + [ + "Microwave", + -15.008021354675291 + ], + [ + "WhatsApp", + -15.00802516937256 + ], + [ + "Mobility", + -15.00802993774414 + ], + [ + "Capric", + -15.008041381835938 + ], + [ + "quarry", + -15.008044242858888 + ], + [ + "Err", + -15.008052825927734 + ], + [ + "telephone", + -15.008054733276367 + ], + [ + "Palace", + -15.00806713104248 + ], + [ + "▁05:0", + -15.00810432434082 + ], + [ + "Honour", + -15.008106231689451 + ], + [ + "essence", + -15.00810718536377 + ], + [ + "Mapping", + -15.0081787109375 + ], + [ + "asek", + -15.008204460144045 + ], + [ + "WEEK", + -15.008224487304688 + ], + [ + "Hilton", + -15.008231163024902 + ], + [ + "NIP", + -15.008234024047852 + ], + [ + "SAE", + -15.008277893066406 + ], + [ + "immediately", + -15.008298873901367 + ], + [ + "coverage", + -15.008307456970217 + ], + [ + "collector", + -15.00831699371338 + ], + [ + "▁MCB", + -15.008407592773438 + ], + [ + "Floating", + -15.008464813232422 + ], + [ + "▁Entertain", + -15.008474349975586 + ], + [ + "niche", + -15.008478164672852 + ], + [ + "SYSTEM", + -15.008489608764648 + ], + [ + "▁victoria", + -15.00854206085205 + ], + [ + "Robertson", + -15.00857162475586 + ], + [ + "rogue", + -15.0086030960083 + ], + [ + "MOE", + -15.00860595703125 + ], + [ + "▁Champa", + -15.008634567260742 + ], + [ + "▁nationalistic", + -15.008642196655272 + ], + [ + "umenta", + -15.008666038513184 + ], + [ + "aktiv", + -15.008696556091309 + ], + [ + "composed", + -15.008745193481444 + ], + [ + "Jag", + -15.008753776550291 + ], + [ + "Skilled", + -15.008769035339355 + ], + [ + "CYL", + -15.008773803710938 + ], + [ + "Patio", + -15.008782386779783 + ], + [ + "▁$230", + -15.008814811706545 + ], + [ + "▁registr", + -15.008955955505373 + ], + [ + "▁$185", + -15.009191513061523 + ], + [ + "umph", + -15.00919246673584 + ], + [ + "▁RYA", + -15.009233474731444 + ], + [ + "▁bicyclist", + -15.009434700012209 + ], + [ + "xpedition", + -15.00943660736084 + ], + [ + "▁detainee", + -15.009467124938965 + ], + [ + "CHANGE", + -15.009520530700684 + ], + [ + "▁Solu", + -15.009568214416504 + ], + [ + "▁physiologic", + -15.00959300994873 + ], + [ + "Mood", + -15.00968074798584 + ], + [ + "▁apportion", + -15.009685516357422 + ], + [ + "▁STUNNING", + -15.009709358215332 + ], + [ + "▁Tennyson", + -15.009709358215332 + ], + [ + "▁baboon", + -15.009709358215332 + ], + [ + "▁emphysema", + -15.009709358215332 + ], + [ + "Configurable", + -15.009710311889648 + ], + [ + "Cumulative", + -15.009710311889648 + ], + [ + "iruvananthapuram", + -15.009710311889648 + ], + [ + "montgomery", + -15.009710311889648 + ], + [ + "sebaceous", + -15.009710311889648 + ], + [ + "▁ADVISED", + -15.009710311889648 + ], + [ + "▁ALBUM", + -15.009710311889648 + ], + [ + "▁Chillicothe", + -15.009710311889648 + ], + [ + "▁Chippendale", + -15.009710311889648 + ], + [ + "▁Constituency", + -15.009710311889648 + ], + [ + "▁Depakote", + -15.009710311889648 + ], + [ + "▁GALLERY", + -15.009710311889648 + ], + [ + "▁Gaithersburg", + -15.009710311889648 + ], + [ + "▁Gyllenhaal", + -15.009710311889648 + ], + [ + "▁Huckleberry", + -15.009710311889648 + ], + [ + "▁Instructable", + -15.009710311889648 + ], + [ + "▁Liebherr", + -15.009710311889648 + ], + [ + "▁MOUNTAIN", + -15.009710311889648 + ], + [ + "▁Occupied", + -15.009710311889648 + ], + [ + "▁Pompidou", + -15.009710311889648 + ], + [ + "▁Rajapaksa", + -15.009710311889648 + ], + [ + "▁Swinburne", + -15.009710311889648 + ], + [ + "▁Tendulkar", + -15.009710311889648 + ], + [ + "▁WORLDWIDE", + -15.009710311889648 + ], + [ + "▁countermeasures", + -15.009710311889648 + ], + [ + "▁escapism", + -15.009710311889648 + ], + [ + "▁gratified", + -15.009710311889648 + ], + [ + "▁guillotine", + -15.009710311889648 + ], + [ + "▁kaleidoscopic", + -15.009710311889648 + ], + [ + "▁memorably", + -15.009710311889648 + ], + [ + "▁misdeeds", + -15.009710311889648 + ], + [ + "▁nystagmus", + -15.009710311889648 + ], + [ + "▁ordnance", + -15.009710311889648 + ], + [ + "▁porcupine", + -15.009710311889648 + ], + [ + "▁stratospheric", + -15.009710311889648 + ], + [ + "▁trigonometry", + -15.009710311889648 + ], + [ + "▁unbeknownst", + -15.009710311889648 + ], + [ + "▁Dominguez", + -15.009711265563965 + ], + [ + "▁aqueduct", + -15.009711265563965 + ], + [ + "▁mehndi", + -15.009711265563965 + ], + [ + "▁diastolic", + -15.009713172912598 + ], + [ + "▁palatial", + -15.009713172912598 + ], + [ + "▁tufts", + -15.009714126586914 + ], + [ + "▁Cobain", + -15.00971508026123 + ], + [ + "▁Hattiesburg", + -15.00971508026123 + ], + [ + "▁resurgent", + -15.00971508026123 + ], + [ + "verizon", + -15.009716033935549 + ], + [ + "▁Matsumoto", + -15.009716033935549 + ], + [ + "▁Niseko", + -15.009716033935549 + ], + [ + "▁Hebdo", + -15.009716987609863 + ], + [ + "▁francisco", + -15.009716987609863 + ], + [ + "▁Skagit", + -15.00971794128418 + ], + [ + "▁Adonis", + -15.009719848632812 + ], + [ + "▁misdiagnosed", + -15.009719848632812 + ], + [ + "▁ensnare", + -15.009721755981444 + ], + [ + "▁styrene", + -15.009723663330078 + ], + [ + "▁Soothing", + -15.009725570678713 + ], + [ + "▁Werewolf", + -15.009726524353027 + ], + [ + "▁Cheerios", + -15.009730339050291 + ], + [ + "▁subsidence", + -15.00973129272461 + ], + [ + "▁morgue", + -15.009739875793455 + ], + [ + "▁(1957)", + -15.00974178314209 + ], + [ + "▁Filipina", + -15.00974178314209 + ], + [ + "▁Liszt", + -15.00974464416504 + ], + [ + "▁goldmine", + -15.00975227355957 + ], + [ + "▁COMPASS", + -15.009753227233888 + ], + [ + "▁$$$[", + -15.009757041931152 + ], + [ + "▁Mackintosh", + -15.009764671325684 + ], + [ + "▁Macromedia", + -15.00976848602295 + ], + [ + "▁POF", + -15.00976848602295 + ], + [ + "▁sanitise", + -15.009775161743164 + ], + [ + "▁TITAN", + -15.009778022766112 + ], + [ + "▁burnished", + -15.009778022766112 + ], + [ + "▁Piedra", + -15.00978183746338 + ], + [ + "▁Teesside", + -15.009784698486328 + ], + [ + "▁blimp", + -15.00978946685791 + ], + [ + "▁tightrope", + -15.009790420532228 + ], + [ + "▁Bodrum", + -15.009794235229492 + ], + [ + "▁jowl", + -15.009798049926758 + ], + [ + "▁Expeditionary", + -15.009805679321287 + ], + [ + "▁Childress", + -15.009806632995604 + ], + [ + "▁Handmaid", + -15.009812355041504 + ], + [ + "cense", + -15.009815216064451 + ], + [ + "▁Settlers", + -15.009815216064451 + ], + [ + "▁Gleeson", + -15.009819984436035 + ], + [ + "▁Shanta", + -15.009825706481934 + ], + [ + "▁Nymph", + -15.00982666015625 + ], + [ + "▁Bakker", + -15.009838104248049 + ], + [ + "▁RAPID", + -15.00986099243164 + ], + [ + "▁underlayment", + -15.009861946105955 + ], + [ + "▁scruffy", + -15.00986671447754 + ], + [ + "▁Fenty", + -15.009881973266602 + ], + [ + "▁telepathy", + -15.009882926940918 + ], + [ + "▁Viceroy", + -15.009891510009766 + ], + [ + "▁Ashanti", + -15.009892463684082 + ], + [ + "▁Laravel", + -15.009893417358398 + ], + [ + "▁Tamiya", + -15.00989818572998 + ], + [ + "▁survivorship", + -15.009899139404297 + ], + [ + "▁epicentre", + -15.009906768798828 + ], + [ + "▁hdmi", + -15.009912490844728 + ], + [ + "▁paragon", + -15.009913444519045 + ], + [ + "▁patella", + -15.009921073913574 + ], + [ + "removed", + -15.009922981262209 + ], + [ + "▁pithy", + -15.009922981262209 + ], + [ + "PRICE", + -15.00993824005127 + ], + [ + "▁prodding", + -15.009939193725586 + ], + [ + "▁Jakub", + -15.009943008422852 + ], + [ + "▁racy", + -15.009952545166016 + ], + [ + "▁0.25%", + -15.00995922088623 + ], + [ + "Anxious", + -15.009967803955078 + ], + [ + "▁cranio", + -15.00997257232666 + ], + [ + "▁Primus", + -15.009976387023926 + ], + [ + "▁ORANGE", + -15.009979248046877 + ], + [ + "▁Enduring", + -15.009984016418455 + ], + [ + "▁Goodluck", + -15.009992599487305 + ], + [ + "▁CHEAP", + -15.009997367858888 + ], + [ + "▁Maslow", + -15.010004043579102 + ], + [ + "▁MicroSD", + -15.010007858276367 + ], + [ + "▁pretension", + -15.010014533996582 + ], + [ + "▁100-105", + -15.010034561157228 + ], + [ + "▁Veterinarians", + -15.010068893432615 + ], + [ + "▁enlistment", + -15.010091781616213 + ], + [ + "▁trickiest", + -15.010096549987791 + ], + [ + "▁Arnaud", + -15.010130882263184 + ], + [ + "gement", + -15.010181427001951 + ], + [ + "▁resolutely", + -15.010193824768066 + ], + [ + "▁Pulley", + -15.0101957321167 + ], + [ + "▁Nortel", + -15.010198593139648 + ], + [ + "▁pinwheel", + -15.010207176208496 + ], + [ + "▁Joyful", + -15.010226249694824 + ], + [ + "▁MSDN", + -15.010238647460938 + ], + [ + "▁Lombardy", + -15.010255813598633 + ], + [ + "▁Siesta", + -15.010260581970217 + ], + [ + "▁reprimanded", + -15.010261535644531 + ], + [ + "▁distillate", + -15.010263442993164 + ], + [ + "geki", + -15.010272026062012 + ], + [ + "▁Appears", + -15.010290145874023 + ], + [ + "▁Homebrew", + -15.010296821594238 + ], + [ + "▁multibillion", + -15.010296821594238 + ], + [ + "▁Lune", + -15.010302543640137 + ], + [ + "▁Katana", + -15.01030445098877 + ], + [ + "jala", + -15.010319709777832 + ], + [ + "▁Brookside", + -15.010323524475098 + ], + [ + "▁unifies", + -15.01033878326416 + ], + [ + "▁Dulce", + -15.010353088378906 + ], + [ + "▁Fenner", + -15.010357856750488 + ], + [ + "▁unloved", + -15.01040267944336 + ], + [ + "▁Councilwoman", + -15.010408401489258 + ], + [ + "▁Charlottetown", + -15.010420799255373 + ], + [ + "▁Kirsch", + -15.010428428649902 + ], + [ + "▁floatation", + -15.010431289672852 + ], + [ + "▁Unused", + -15.01044750213623 + ], + [ + "▁Brightening", + -15.01045036315918 + ], + [ + "EDITOR", + -15.010462760925291 + ], + [ + "▁constricted", + -15.010504722595217 + ], + [ + "▁Rutter", + -15.010539054870604 + ], + [ + "MEF", + -15.010540008544922 + ], + [ + "▁Jamieson", + -15.010540962219238 + ], + [ + "▁PLANET", + -15.010541915893556 + ], + [ + "▁Cyndi", + -15.010615348815918 + ], + [ + "▁PSTN", + -15.01061725616455 + ], + [ + "▁honking", + -15.01062297821045 + ], + [ + "▁Specter", + -15.010634422302246 + ], + [ + "▁iShares", + -15.010642051696776 + ], + [ + "▁Haunting", + -15.010663032531738 + ], + [ + "▁cosmetically", + -15.010663986206056 + ], + [ + "▁damnation", + -15.01071071624756 + ], + [ + "▁palaeo", + -15.010747909545898 + ], + [ + "▁squealing", + -15.010750770568848 + ], + [ + "▁hardwearing", + -15.01077938079834 + ], + [ + "▁plunk", + -15.010801315307615 + ], + [ + "▁interceptor", + -15.010870933532717 + ], + [ + "Jaume", + -15.010891914367676 + ], + [ + "▁haywire", + -15.010910987854004 + ], + [ + "▁Alexey", + -15.01091766357422 + ], + [ + "awful", + -15.010920524597168 + ], + [ + "▁Tricky", + -15.011009216308594 + ], + [ + "▁VNC", + -15.011031150817873 + ], + [ + "hdrs", + -15.011066436767578 + ], + [ + "▁UMW", + -15.011070251464844 + ], + [ + "▁Rouen", + -15.011088371276855 + ], + [ + "NEVER", + -15.01113224029541 + ], + [ + "▁Commune", + -15.011144638061523 + ], + [ + "▁wavered", + -15.011194229125977 + ], + [ + "▁Goldfish", + -15.011204719543455 + ], + [ + "▁Mandr", + -15.011208534240724 + ], + [ + "▁cycl", + -15.011235237121582 + ], + [ + "▁17:4", + -15.011322975158691 + ], + [ + "▁LATER", + -15.01133918762207 + ], + [ + "▁Turnover", + -15.011383056640623 + ], + [ + "▁trumped", + -15.011408805847168 + ], + [ + "▁Smoo", + -15.011422157287598 + ], + [ + "▁10.000", + -15.01148796081543 + ], + [ + "▁oddity", + -15.01148796081543 + ], + [ + "▁Carmelo", + -15.011502265930176 + ], + [ + "▁Nitin", + -15.011518478393556 + ], + [ + "▁crooner", + -15.01158046722412 + ], + [ + "blanca", + -15.01163101196289 + ], + [ + "▁remodelers", + -15.011763572692873 + ], + [ + "▁ORDERS", + -15.011804580688477 + ], + [ + "CPP", + -15.011817932128906 + ], + [ + "▁hoisting", + -15.011860847473145 + ], + [ + "▁defrag", + -15.011897087097168 + ], + [ + "nehmen", + -15.011917114257812 + ], + [ + "▁encase", + -15.011942863464355 + ], + [ + "FMG", + -15.011956214904783 + ], + [ + "▁courageously", + -15.01196002960205 + ], + [ + "▁Jayson", + -15.011996269226074 + ], + [ + "JUNK", + -15.012052536010742 + ], + [ + "▁warez", + -15.01208782196045 + ], + [ + "▁Rodan", + -15.012160301208496 + ], + [ + "ostasis", + -15.012256622314451 + ], + [ + "▁hearken", + -15.012272834777832 + ], + [ + "▁manila", + -15.01230525970459 + ], + [ + "Vicki", + -15.012336730957031 + ], + [ + "▁sunning", + -15.012362480163574 + ], + [ + "▁$1,9", + -15.012372970581056 + ], + [ + "▁vce", + -15.01238250732422 + ], + [ + "lomo", + -15.012447357177734 + ], + [ + "UNEP", + -15.012451171875 + ], + [ + "prefix", + -15.012451171875 + ], + [ + "▁SAID", + -15.012484550476074 + ], + [ + "▁Cromer", + -15.012584686279297 + ], + [ + "kenmore", + -15.012617111206056 + ], + [ + "▁availabe", + -15.012646675109863 + ], + [ + "immuno", + -15.012652397155762 + ], + [ + "lantis", + -15.012691497802734 + ], + [ + "YON", + -15.012694358825684 + ], + [ + "▁Shaolin", + -15.012701988220217 + ], + [ + "▁Summerfield", + -15.0128173828125 + ], + [ + "▁Nira", + -15.012866020202637 + ], + [ + "▁Bosh", + -15.012868881225586 + ], + [ + "Wick", + -15.012869834899902 + ], + [ + "transcript", + -15.012882232666016 + ], + [ + "▁Archae", + -15.012883186340332 + ], + [ + "▁Paley", + -15.012951850891112 + ], + [ + "MAKER", + -15.012986183166504 + ], + [ + "▁autoclave", + -15.013021469116213 + ], + [ + "ooey", + -15.01304531097412 + ], + [ + "▁motorboat", + -15.013068199157717 + ], + [ + "▁Osun", + -15.013283729553224 + ], + [ + "mittance", + -15.013331413269045 + ], + [ + "▁Nadd", + -15.013387680053713 + ], + [ + "Yong", + -15.013401985168455 + ], + [ + "2:02", + -15.013625144958496 + ], + [ + "Leftover", + -15.013683319091797 + ], + [ + "COVER", + -15.013786315917969 + ], + [ + "▁Ovid", + -15.013813972473145 + ], + [ + "▁Fuqua", + -15.013826370239258 + ], + [ + "▁Dataset", + -15.013846397399902 + ], + [ + "▁Shiba", + -15.014141082763672 + ], + [ + "00:4", + -15.014492988586426 + ], + [ + "idence", + -15.014544486999512 + ], + [ + "▁PKK", + -15.014549255371094 + ], + [ + "▁framerate", + -15.014695167541504 + ], + [ + "adjusting", + -15.014715194702148 + ], + [ + "terrestrial", + -15.014970779418944 + ], + [ + "▁23:2", + -15.015031814575195 + ], + [ + "ocative", + -15.015331268310549 + ], + [ + "voter", + -15.015348434448242 + ], + [ + "france", + -15.015433311462402 + ], + [ + "▁shuck", + -15.01545524597168 + ], + [ + "vlad", + -15.015497207641602 + ], + [ + "CHIP", + -15.015524864196776 + ], + [ + "▁SHAPE", + -15.015573501586914 + ], + [ + "рат", + -15.015578269958496 + ], + [ + "▁Renn", + -15.01558780670166 + ], + [ + "▁SRX", + -15.01583766937256 + ], + [ + "kush", + -15.015945434570312 + ], + [ + "HOOD", + -15.01595973968506 + ], + [ + "▁Arra", + -15.016142845153809 + ], + [ + "▁Majorca", + -15.016160011291504 + ], + [ + "RGC", + -15.016282081604004 + ], + [ + "haji", + -15.016307830810549 + ], + [ + "25.00", + -15.016332626342772 + ], + [ + "▁Valu", + -15.016397476196287 + ], + [ + "▁Kanter", + -15.01650047302246 + ], + [ + "Tub", + -15.016529083251951 + ], + [ + "▁Spei", + -15.01658821105957 + ], + [ + "▁FIDE", + -15.016592979431152 + ], + [ + "0008", + -15.016643524169922 + ], + [ + "1.0%", + -15.016732215881348 + ], + [ + "ILLE", + -15.016740798950195 + ], + [ + "cached", + -15.016776084899902 + ], + [ + "▁snipe", + -15.016858100891112 + ], + [ + "0.19", + -15.017125129699709 + ], + [ + "▁Crumble", + -15.017168045043944 + ], + [ + "▁brownfield", + -15.01724910736084 + ], + [ + "Cheng", + -15.01731014251709 + ], + [ + "Objection", + -15.017341613769531 + ], + [ + "▁Clavi", + -15.017440795898438 + ], + [ + "aunch", + -15.017462730407717 + ], + [ + "bsp", + -15.017485618591309 + ], + [ + "▁Merk", + -15.017534255981444 + ], + [ + "▁Mosco", + -15.017539024353027 + ], + [ + "ridgid", + -15.017569541931152 + ], + [ + "▁20:3", + -15.01760196685791 + ], + [ + "hadoop", + -15.017662048339844 + ], + [ + "laura", + -15.017740249633787 + ], + [ + "▁disempower", + -15.017791748046877 + ], + [ + "▁Argon", + -15.017857551574709 + ], + [ + "shwar", + -15.017892837524414 + ], + [ + "foto", + -15.017977714538574 + ], + [ + "contributory", + -15.018083572387695 + ], + [ + "▁Woodburn", + -15.01810073852539 + ], + [ + "▁1774", + -15.01812744140625 + ], + [ + "HOLD", + -15.018207550048828 + ], + [ + "hori", + -15.018312454223633 + ], + [ + "ANTIC", + -15.018414497375488 + ], + [ + "usser", + -15.018500328063965 + ], + [ + "pierre", + -15.018594741821287 + ], + [ + "inska", + -15.018627166748049 + ], + [ + "▁$450,000", + -15.018635749816896 + ], + [ + "▁Lira", + -15.018671035766602 + ], + [ + "▁REDD", + -15.018733024597168 + ], + [ + "▁14.8", + -15.01889991760254 + ], + [ + "0101", + -15.018916130065918 + ], + [ + "gamma", + -15.01904296875 + ], + [ + "▁WSP", + -15.019064903259276 + ], + [ + "▁$1,600", + -15.019094467163086 + ], + [ + "apse", + -15.0191011428833 + ], + [ + "▁973-", + -15.019227027893066 + ], + [ + "Prospect", + -15.0194673538208 + ], + [ + "Meadow", + -15.019546508789062 + ], + [ + "ESCO", + -15.019559860229492 + ], + [ + "Elevate", + -15.019571304321287 + ], + [ + "corrupt", + -15.019572257995604 + ], + [ + "Horizon", + -15.019583702087402 + ], + [ + "Worry", + -15.0195894241333 + ], + [ + "Dolphin", + -15.019590377807615 + ], + [ + "Favourite", + -15.01959228515625 + ], + [ + "Criteria", + -15.019597053527832 + ], + [ + "▁SPH", + -15.01959991455078 + ], + [ + "Welsh", + -15.019604682922363 + ], + [ + "intrusive", + -15.019604682922363 + ], + [ + "collision", + -15.019617080688477 + ], + [ + "Essex", + -15.019619941711426 + ], + [ + "Lindsey", + -15.019619941711426 + ], + [ + "Pilates", + -15.019619941711426 + ], + [ + "registry", + -15.01962184906006 + ], + [ + "squeeze", + -15.019623756408691 + ], + [ + "innovative", + -15.019628524780272 + ], + [ + "Limestone", + -15.019630432128906 + ], + [ + "Stroke", + -15.019630432128906 + ], + [ + "Valerie", + -15.019631385803224 + ], + [ + "defeating", + -15.019636154174805 + ], + [ + "abrasive", + -15.019640922546388 + ], + [ + "Gilbert", + -15.019644737243652 + ], + [ + "Pension", + -15.019646644592283 + ], + [ + "Cannon", + -15.0196533203125 + ], + [ + "harmony", + -15.019654273986816 + ], + [ + "Rochester", + -15.01965618133545 + ], + [ + "quity", + -15.019737243652344 + ], + [ + "grandson", + -15.019750595092772 + ], + [ + "strung", + -15.019801139831545 + ], + [ + "▁05:5", + -15.019816398620604 + ], + [ + "Greet", + -15.019818305969238 + ], + [ + "Steward", + -15.019822120666504 + ], + [ + "Fitted", + -15.019834518432615 + ], + [ + "Passionate", + -15.019951820373535 + ], + [ + "easier", + -15.019973754882812 + ], + [ + "greatest", + -15.019974708557127 + ], + [ + "Blair", + -15.019976615905762 + ], + [ + "▁Kruse", + -15.01999855041504 + ], + [ + "▁13:4", + -15.02013874053955 + ], + [ + "Gutter", + -15.020163536071776 + ], + [ + "membership", + -15.020163536071776 + ], + [ + "controlling", + -15.02019500732422 + ], + [ + "Landmark", + -15.020225524902344 + ], + [ + "▁parasit", + -15.020288467407228 + ], + [ + "Basement", + -15.020301818847656 + ], + [ + "osum", + -15.020319938659668 + ], + [ + "▁Sickle", + -15.020380973815918 + ], + [ + "▁Colonia", + -15.02038288116455 + ], + [ + "▁baptize", + -15.02047348022461 + ], + [ + "▁mutate", + -15.020505905151367 + ], + [ + "▁Qiu", + -15.020516395568848 + ], + [ + "▁Symbolic", + -15.020553588867188 + ], + [ + "Cargo", + -15.02057933807373 + ], + [ + "Mandy", + -15.02061367034912 + ], + [ + "▁Antiqu", + -15.020770072937012 + ], + [ + "Cheek", + -15.02077865600586 + ], + [ + "▁ORGAN", + -15.02089786529541 + ], + [ + "Reluctant", + -15.021073341369627 + ], + [ + "▁Altrincham", + -15.021073341369627 + ], + [ + "▁BOTOX", + -15.021073341369627 + ], + [ + "▁Commemorative", + -15.021073341369627 + ], + [ + "▁DISTRICT", + -15.021073341369627 + ], + [ + "▁DaVinci", + -15.021073341369627 + ], + [ + "▁Ecumenical", + -15.021073341369627 + ], + [ + "▁Gilmour", + -15.021073341369627 + ], + [ + "▁Hounslow", + -15.021073341369627 + ], + [ + "▁INCREDIBLE", + -15.021073341369627 + ], + [ + "▁Invictus", + -15.021073341369627 + ], + [ + "▁Rapporteur", + -15.021073341369627 + ], + [ + "▁Skopje", + -15.021073341369627 + ], + [ + "▁Svetlana", + -15.021073341369627 + ], + [ + "▁Symfony", + -15.021073341369627 + ], + [ + "▁Vittorio", + -15.021073341369627 + ], + [ + "▁Vonnegut", + -15.021073341369627 + ], + [ + "▁Wisbech", + -15.021073341369627 + ], + [ + "▁chipmunk", + -15.021073341369627 + ], + [ + "▁constitutive", + -15.021073341369627 + ], + [ + "▁equitably", + -15.021073341369627 + ], + [ + "▁escarpment", + -15.021073341369627 + ], + [ + "▁febrile", + -15.021073341369627 + ], + [ + "▁forewarned", + -15.021073341369627 + ], + [ + "▁harmonization", + -15.021073341369627 + ], + [ + "▁illegible", + -15.021073341369627 + ], + [ + "▁incriminating", + -15.021073341369627 + ], + [ + "▁infomercial", + -15.021073341369627 + ], + [ + "▁intergalactic", + -15.021073341369627 + ], + [ + "▁irrepressible", + -15.021073341369627 + ], + [ + "▁menagerie", + -15.021073341369627 + ], + [ + "▁obscuring", + -15.021073341369627 + ], + [ + "▁peloton", + -15.021073341369627 + ], + [ + "▁pornographic", + -15.021073341369627 + ], + [ + "▁quadriceps", + -15.021073341369627 + ], + [ + "▁rhinoceros", + -15.021073341369627 + ], + [ + "▁stabilising", + -15.021073341369627 + ], + [ + "▁summarily", + -15.021073341369627 + ], + [ + "▁unafraid", + -15.021073341369627 + ], + [ + "rttemberg", + -15.021074295043944 + ], + [ + "▁CheapOair", + -15.021074295043944 + ], + [ + "▁Franchising", + -15.021074295043944 + ], + [ + "▁Hierarchy", + -15.021074295043944 + ], + [ + "▁Johnathan", + -15.021074295043944 + ], + [ + "▁SUBJECT", + -15.021074295043944 + ], + [ + "▁TEXAS", + -15.021074295043944 + ], + [ + "▁elegans", + -15.021074295043944 + ], + [ + "▁hypnotize", + -15.021074295043944 + ], + [ + "▁Juvederm", + -15.021075248718262 + ], + [ + "▁minaret", + -15.021075248718262 + ], + [ + "▁wriggle", + -15.021075248718262 + ], + [ + "▁Competencies", + -15.021076202392578 + ], + [ + "▁Easley", + -15.021076202392578 + ], + [ + "▁Pilsner", + -15.021076202392578 + ], + [ + "▁fattening", + -15.021076202392578 + ], + [ + "▁Beveled", + -15.021077156066896 + ], + [ + "▁Cohort", + -15.021077156066896 + ], + [ + "▁SHOPPING", + -15.021077156066896 + ], + [ + "Feldspar", + -15.021079063415527 + ], + [ + "▁Cristobal", + -15.021079063415527 + ], + [ + "▁nonresident", + -15.021079063415527 + ], + [ + "▁sanctify", + -15.021079063415527 + ], + [ + "▁levity", + -15.021080017089844 + ], + [ + "Physicist", + -15.02108097076416 + ], + [ + "▁Quigley", + -15.02108097076416 + ], + [ + "▁impermeable", + -15.02108097076416 + ], + [ + "▁languid", + -15.02108097076416 + ], + [ + "▁Vazquez", + -15.021082878112791 + ], + [ + "▁anonymized", + -15.02108383178711 + ], + [ + "▁Spartacus", + -15.021084785461426 + ], + [ + "▁condi", + -15.021084785461426 + ], + [ + "▁ziploc", + -15.021084785461426 + ], + [ + "▁RULES", + -15.02108669281006 + ], + [ + "▁Bentleigh", + -15.021087646484377 + ], + [ + "▁Snowdonia", + -15.021088600158691 + ], + [ + "▁Violations", + -15.021089553833008 + ], + [ + "▁Windhoek", + -15.021090507507324 + ], + [ + "▁Maddox", + -15.021093368530272 + ], + [ + "▁ANSYS", + -15.021096229553224 + ], + [ + "▁Beanstalk", + -15.021096229553224 + ], + [ + "▁Karzai", + -15.021096229553224 + ], + [ + "▁NBCUniversal", + -15.021096229553224 + ], + [ + "▁contrarian", + -15.021096229553224 + ], + [ + "▁Wolters", + -15.021100997924805 + ], + [ + "▁Compiler", + -15.02110195159912 + ], + [ + "▁gooseneck", + -15.02110767364502 + ], + [ + "▁Dialysis", + -15.021111488342283 + ], + [ + "▁electrolytic", + -15.021111488342283 + ], + [ + "▁Tunbridge", + -15.021113395690918 + ], + [ + "Eazy", + -15.021117210388184 + ], + [ + "▁Segura", + -15.0211181640625 + ], + [ + "▁(1977)", + -15.021124839782717 + ], + [ + "▁bonito", + -15.02113151550293 + ], + [ + "▁geochemical", + -15.021132469177246 + ], + [ + "▁Scrappy", + -15.021133422851562 + ], + [ + "▁SMILE", + -15.02114200592041 + ], + [ + "▁Burleigh", + -15.021143913269045 + ], + [ + "▁Idlib", + -15.021143913269045 + ], + [ + "▁Shaheen", + -15.021150588989258 + ], + [ + "▁accross", + -15.021161079406738 + ], + [ + "▁stratum", + -15.021161079406738 + ], + [ + "▁Ducey", + -15.021169662475586 + ], + [ + "▁autocratic", + -15.021175384521484 + ], + [ + "▁duress", + -15.021175384521484 + ], + [ + "▁Stevia", + -15.02118968963623 + ], + [ + "▁Starfleet", + -15.021211624145508 + ], + [ + "▁Tensor", + -15.02121353149414 + ], + [ + "igkeit", + -15.02121639251709 + ], + [ + "▁Andersson", + -15.021220207214355 + ], + [ + "▁halide", + -15.021222114562988 + ], + [ + "▁Halsey", + -15.021223068237305 + ], + [ + "▁Haymarket", + -15.021225929260254 + ], + [ + "()+", + -15.021228790283203 + ], + [ + "stitching", + -15.021247863769531 + ], + [ + "▁Manicure", + -15.021249771118164 + ], + [ + "▁lyricism", + -15.021252632141112 + ], + [ + "▁laminator", + -15.02125644683838 + ], + [ + "▁duffle", + -15.021257400512695 + ], + [ + "▁Devonport", + -15.021268844604492 + ], + [ + "▁Zarif", + -15.02127170562744 + ], + [ + "Programme", + -15.021280288696287 + ], + [ + "▁Lucite", + -15.021283149719238 + ], + [ + "▁grubby", + -15.021283149719238 + ], + [ + "▁prostrate", + -15.02128791809082 + ], + [ + "▁counterclaim", + -15.02129077911377 + ], + [ + "▁flagpole", + -15.0212984085083 + ], + [ + "▁soundproofing", + -15.021300315856934 + ], + [ + "▁cDNA", + -15.02130126953125 + ], + [ + "▁pronto", + -15.021310806274414 + ], + [ + "Defined", + -15.02131175994873 + ], + [ + "▁Oceanography", + -15.02131175994873 + ], + [ + "Corona", + -15.021334648132324 + ], + [ + "Experimental", + -15.021345138549805 + ], + [ + "▁Buckhead", + -15.021347999572754 + ], + [ + "▁Brooch", + -15.021361351013184 + ], + [ + "Rory", + -15.021382331848145 + ], + [ + "▁prepackaged", + -15.021388053894045 + ], + [ + "▁PostScript", + -15.021390914916992 + ], + [ + "▁quince", + -15.021406173706056 + ], + [ + "missible", + -15.021411895751951 + ], + [ + "▁picnicking", + -15.021414756774902 + ], + [ + "▁Dizzy", + -15.021419525146484 + ], + [ + "▁CIBC", + -15.021458625793455 + ], + [ + "▁Kinne", + -15.021462440490724 + ], + [ + "drupal", + -15.021472930908203 + ], + [ + "▁internment", + -15.02147388458252 + ], + [ + "▁Dailey", + -15.021485328674316 + ], + [ + "▁vlogger", + -15.021503448486328 + ], + [ + "▁Stormwater", + -15.02150535583496 + ], + [ + "▁vapers", + -15.021514892578123 + ], + [ + "▁Bream", + -15.021544456481934 + ], + [ + "▁NHRA", + -15.021560668945312 + ], + [ + "GbE", + -15.021613121032717 + ], + [ + "▁Iceberg", + -15.021613121032717 + ], + [ + "▁wormhole", + -15.02161979675293 + ], + [ + "▁Hemming", + -15.02163791656494 + ], + [ + "galley", + -15.021647453308104 + ], + [ + "bilify", + -15.021649360656738 + ], + [ + "▁$165", + -15.021650314331056 + ], + [ + "▁Broadcasters", + -15.021662712097168 + ], + [ + "▁ENOUGH", + -15.021666526794434 + ], + [ + "▁Bayshore", + -15.021672248840332 + ], + [ + "▁squished", + -15.021738052368164 + ], + [ + "Clarification", + -15.021746635437012 + ], + [ + "▁refiners", + -15.021767616271973 + ], + [ + "▁datum", + -15.02179718017578 + ], + [ + "▁tomboy", + -15.021821975708008 + ], + [ + "ulence", + -15.02182388305664 + ], + [ + "▁Nineteenth", + -15.021833419799805 + ], + [ + "smoked", + -15.021860122680664 + ], + [ + "realestate", + -15.021876335144045 + ], + [ + "▁martyred", + -15.021878242492676 + ], + [ + "Fairy", + -15.02188491821289 + ], + [ + "▁ziplock", + -15.021915435791016 + ], + [ + "▁scuffed", + -15.02193546295166 + ], + [ + "▁gimmicky", + -15.02198314666748 + ], + [ + "▁Reflector", + -15.02198600769043 + ], + [ + "Romagna", + -15.02198886871338 + ], + [ + "▁Ewald", + -15.022024154663086 + ], + [ + "▁Cronin", + -15.022031784057615 + ], + [ + "▁Mendi", + -15.0220365524292 + ], + [ + "12-4", + -15.022046089172363 + ], + [ + "▁Vigne", + -15.022100448608398 + ], + [ + "Offered", + -15.022127151489258 + ], + [ + "▁Stopped", + -15.022157669067385 + ], + [ + "▁corroborated", + -15.022159576416016 + ], + [ + "▁Homeschooling", + -15.022187232971191 + ], + [ + "▁relishes", + -15.02220058441162 + ], + [ + "villi", + -15.022252082824709 + ], + [ + "▁Mountaineering", + -15.022292137145996 + ], + [ + "▁Motive", + -15.022368431091309 + ], + [ + "▁kayakers", + -15.022446632385254 + ], + [ + "ektor", + -15.022509574890137 + ], + [ + "Oriental", + -15.0225191116333 + ], + [ + "▁homeschoolers", + -15.02254867553711 + ], + [ + "▁victimization", + -15.022562980651855 + ], + [ + "▁flattery", + -15.022568702697754 + ], + [ + "▁apparatuses", + -15.022624015808104 + ], + [ + "▁HCl", + -15.022629737854004 + ], + [ + "▁Scented", + -15.022649765014648 + ], + [ + "▁$145", + -15.022666931152344 + ], + [ + "▁Leif", + -15.022713661193848 + ], + [ + "1905", + -15.022717475891112 + ], + [ + "▁handymen", + -15.022740364074709 + ], + [ + "ERIAL", + -15.022801399230955 + ], + [ + "▁implantable", + -15.022833824157717 + ], + [ + "▁tux", + -15.022871017456056 + ], + [ + "ARCO", + -15.022921562194824 + ], + [ + "eshwari", + -15.022997856140137 + ], + [ + "SNAP", + -15.023032188415527 + ], + [ + "▁jeopardized", + -15.023043632507324 + ], + [ + "▁TDR", + -15.02306079864502 + ], + [ + "Relocating", + -15.023104667663574 + ], + [ + "▁Fairies", + -15.023163795471191 + ], + [ + "▁manhunt", + -15.02326488494873 + ], + [ + "▁Krause", + -15.023319244384766 + ], + [ + "▁Lorne", + -15.023361206054688 + ], + [ + "▁Miko", + -15.02337646484375 + ], + [ + "▁Rajasthani", + -15.023426055908203 + ], + [ + "▁MCX", + -15.023438453674316 + ], + [ + "▁Scholarly", + -15.023500442504885 + ], + [ + "▁discretely", + -15.023504257202148 + ], + [ + "9:23", + -15.023505210876465 + ], + [ + "1.17", + -15.023536682128906 + ], + [ + "▁SRO", + -15.023555755615234 + ], + [ + "▁16-17", + -15.02358341217041 + ], + [ + "▁Sibi", + -15.02358341217041 + ], + [ + "Europa", + -15.02366065979004 + ], + [ + "▁matron", + -15.023917198181152 + ], + [ + "▁defensemen", + -15.023969650268556 + ], + [ + "Vari", + -15.024051666259766 + ], + [ + "▁latching", + -15.02424144744873 + ], + [ + "ESTER", + -15.024258613586426 + ], + [ + "THAT", + -15.024303436279297 + ], + [ + "▁maar", + -15.024319648742676 + ], + [ + "▁deductive", + -15.02437686920166 + ], + [ + "ttingen", + -15.024510383605955 + ], + [ + "GAD", + -15.024550437927246 + ], + [ + "▁alexa", + -15.024584770202637 + ], + [ + "eventbrite", + -15.024703025817873 + ], + [ + "▁spatter", + -15.024839401245115 + ], + [ + "ANDOM", + -15.024942398071287 + ], + [ + "Carroll", + -15.02517318725586 + ], + [ + "▁tater", + -15.02521514892578 + ], + [ + "plenty", + -15.025301933288574 + ], + [ + "symmetry", + -15.025309562683104 + ], + [ + "hyun", + -15.025313377380373 + ], + [ + "▁Spool", + -15.025348663330078 + ], + [ + "leisch", + -15.025350570678713 + ], + [ + "tinyurl", + -15.025383949279783 + ], + [ + "smtp", + -15.025602340698242 + ], + [ + "Endorse", + -15.02570915222168 + ], + [ + "▁Coursera", + -15.02576732635498 + ], + [ + "1892", + -15.02592658996582 + ], + [ + "jaz", + -15.026164054870604 + ], + [ + "▁Glyco", + -15.0263032913208 + ], + [ + "Mitch", + -15.026325225830078 + ], + [ + "guang", + -15.026585578918455 + ], + [ + "Babe", + -15.026992797851562 + ], + [ + "▁Shige", + -15.027064323425291 + ], + [ + "▁Karoo", + -15.027109146118164 + ], + [ + "▁trach", + -15.027125358581545 + ], + [ + "▁(£3", + -15.027406692504885 + ], + [ + "▁Nihon", + -15.027416229248049 + ], + [ + "▁Kaine", + -15.027474403381348 + ], + [ + "▁assistantship", + -15.027507781982422 + ], + [ + "▁Leder", + -15.027676582336426 + ], + [ + "warp", + -15.027750015258787 + ], + [ + "medal", + -15.02784252166748 + ], + [ + ".6%)", + -15.027960777282717 + ], + [ + "▁Mutation", + -15.027971267700195 + ], + [ + "qualifying", + -15.028100967407228 + ], + [ + "▁Climber", + -15.02812385559082 + ], + [ + "▁Salk", + -15.02827262878418 + ], + [ + "8:18", + -15.028340339660645 + ], + [ + "▁tracksuit", + -15.028369903564451 + ], + [ + "TUM", + -15.028386116027832 + ], + [ + "▁Quadro", + -15.028414726257324 + ], + [ + "zhong", + -15.028521537780762 + ], + [ + "Allergic", + -15.028553009033203 + ], + [ + "▁Dysfunction", + -15.028573036193848 + ], + [ + "CAB", + -15.028590202331545 + ], + [ + "beng", + -15.028620719909668 + ], + [ + "1957", + -15.028675079345703 + ], + [ + "merger", + -15.028676986694336 + ], + [ + "▁suburbia", + -15.028786659240724 + ], + [ + "OUGHT", + -15.028852462768556 + ], + [ + "Cyclo", + -15.028868675231934 + ], + [ + "▁$119", + -15.028889656066896 + ], + [ + "Chau", + -15.028895378112791 + ], + [ + "Thon", + -15.029129981994627 + ], + [ + "▁Zola", + -15.029136657714844 + ], + [ + "Hike", + -15.029304504394531 + ], + [ + "▁Cowes", + -15.029314994812012 + ], + [ + "▁asthmatic", + -15.029391288757324 + ], + [ + "debate", + -15.029481887817385 + ], + [ + "▁Leger", + -15.02967929840088 + ], + [ + "▁Valli", + -15.02973747253418 + ], + [ + "▁BSB", + -15.029738426208496 + ], + [ + "STACK", + -15.029805183410645 + ], + [ + "entrusting", + -15.029820442199709 + ], + [ + "▁Chiba", + -15.029901504516602 + ], + [ + "iaceae", + -15.030052185058594 + ], + [ + "dropped", + -15.03007984161377 + ], + [ + "▁Chiara", + -15.03011703491211 + ], + [ + "MGP", + -15.030153274536133 + ], + [ + "NUR", + -15.030213356018066 + ], + [ + "correctly", + -15.030255317687988 + ], + [ + "Robo", + -15.030267715454102 + ], + [ + ".08.201", + -15.030314445495604 + ], + [ + "lieu", + -15.03037452697754 + ], + [ + "ouette", + -15.030472755432127 + ], + [ + "LTS", + -15.030668258666992 + ], + [ + "39%", + -15.030861854553224 + ], + [ + "▁Skylar", + -15.030872344970703 + ], + [ + "MIME", + -15.030882835388184 + ], + [ + "disrupt", + -15.031007766723633 + ], + [ + "▁Speer", + -15.031112670898438 + ], + [ + "utilized", + -15.031267166137695 + ], + [ + "CHICAGO", + -15.031316757202148 + ], + [ + "▁0345", + -15.031343460083008 + ], + [ + "Relevant", + -15.031349182128906 + ], + [ + "Garbage", + -15.031354904174805 + ], + [ + "itou", + -15.031356811523438 + ], + [ + "glacial", + -15.031359672546388 + ], + [ + "▁Jesper", + -15.031359672546388 + ], + [ + "Sexual", + -15.0313720703125 + ], + [ + "Brittany", + -15.03137493133545 + ], + [ + "Scratch", + -15.03137493133545 + ], + [ + "Migration", + -15.031375885009766 + ], + [ + "Schmidt", + -15.031378746032717 + ], + [ + "Bamboo", + -15.031379699707031 + ], + [ + "occupancy", + -15.031381607055664 + ], + [ + "Lancaster", + -15.03138828277588 + ], + [ + "governing", + -15.031391143798828 + ], + [ + "mystery", + -15.031391143798828 + ], + [ + "Pokemon", + -15.031401634216309 + ], + [ + "polyester", + -15.031402587890623 + ], + [ + "Kidney", + -15.031413078308104 + ], + [ + "basketball", + -15.031414031982422 + ], + [ + "Naomi", + -15.031414985656738 + ], + [ + "Drizzle", + -15.031428337097168 + ], + [ + "▁Puy", + -15.0314302444458 + ], + [ + "VoIP", + -15.031431198120115 + ], + [ + "Pvt", + -15.03143310546875 + ], + [ + "▁Layne", + -15.031434059143066 + ], + [ + "intense", + -15.031449317932127 + ], + [ + "Harmon", + -15.03146266937256 + ], + [ + "pertaining", + -15.031471252441406 + ], + [ + "spanning", + -15.031476020812988 + ], + [ + "transformation", + -15.031506538391112 + ], + [ + "Ellie", + -15.031527519226074 + ], + [ + "Toddler", + -15.03153133392334 + ], + [ + "Disability", + -15.031577110290527 + ], + [ + "Glide", + -15.031586647033691 + ], + [ + "cheapest", + -15.031594276428224 + ], + [ + "Diego", + -15.031609535217283 + ], + [ + "Applicant", + -15.031623840332031 + ], + [ + "breeze", + -15.03165340423584 + ], + [ + "Sunflower", + -15.031718254089355 + ], + [ + "countertop", + -15.031744003295898 + ], + [ + "Climb", + -15.031807899475098 + ], + [ + "Burton", + -15.031864166259766 + ], + [ + "▁emigrate", + -15.03196620941162 + ], + [ + "▁vaccinate", + -15.031967163085938 + ], + [ + "▁Bulgari", + -15.032014846801758 + ], + [ + "Wellington", + -15.032036781311035 + ], + [ + "CEPT", + -15.032037734985352 + ], + [ + "▁Coyne", + -15.032060623168944 + ], + [ + "0-001", + -15.032160758972168 + ], + [ + "Recognized", + -15.032207489013672 + ], + [ + "▁Daria", + -15.032245635986328 + ], + [ + "▁twinge", + -15.03227710723877 + ], + [ + "▁unwitting", + -15.03232479095459 + ], + [ + "GPC", + -15.032337188720703 + ], + [ + "▁pensive", + -15.032377243041992 + ], + [ + "Taxes", + -15.032456398010254 + ], + [ + "▁Fontan", + -15.03245735168457 + ], + [ + "Distributed", + -15.032465934753418 + ], + [ + "Delivered", + -15.032485008239746 + ], + [ + "Romantic", + -15.032492637634276 + ], + [ + "ceres", + -15.032526016235352 + ], + [ + "▁DTE", + -15.032541275024414 + ], + [ + "Controversy", + -15.032567024230955 + ], + [ + "▁844-329-5869", + -15.032567977905272 + ], + [ + "▁BIGGEST", + -15.032567977905272 + ], + [ + "▁Broyhill", + -15.032567977905272 + ], + [ + "▁CENTURY", + -15.032567977905272 + ], + [ + "▁Carpathian", + -15.032567977905272 + ], + [ + "▁Cuthbert", + -15.032567977905272 + ], + [ + "▁DEPARTMENT", + -15.032567977905272 + ], + [ + "▁DIESEL", + -15.032567977905272 + ], + [ + "▁DuBois", + -15.032567977905272 + ], + [ + "▁Emulsion", + -15.032567977905272 + ], + [ + "▁Excalibur", + -15.032567977905272 + ], + [ + "▁FOOTBALL", + -15.032567977905272 + ], + [ + "▁Hinkley", + -15.032567977905272 + ], + [ + "▁Invincible", + -15.032567977905272 + ], + [ + "▁Ishmael", + -15.032567977905272 + ], + [ + "▁JCPenney", + -15.032567977905272 + ], + [ + "▁Niantic", + -15.032567977905272 + ], + [ + "▁Smokies", + -15.032567977905272 + ], + [ + "▁armadillo", + -15.032567977905272 + ], + [ + "▁disintegrating", + -15.032567977905272 + ], + [ + "▁dyspepsia", + -15.032567977905272 + ], + [ + "▁equanimity", + -15.032567977905272 + ], + [ + "▁giclee", + -15.032567977905272 + ], + [ + "▁hypnotherapist", + -15.032567977905272 + ], + [ + "▁obelisk", + -15.032567977905272 + ], + [ + "▁overzealous", + -15.032567977905272 + ], + [ + "▁periodontitis", + -15.032567977905272 + ], + [ + "▁petticoat", + -15.032567977905272 + ], + [ + "▁prophylactic", + -15.032567977905272 + ], + [ + "▁rebuffed", + -15.032567977905272 + ], + [ + "▁recidivism", + -15.032567977905272 + ], + [ + "▁rummaging", + -15.032567977905272 + ], + [ + "▁uninhibited", + -15.032567977905272 + ], + [ + "▁unseasonably", + -15.032567977905272 + ], + [ + "▁14891718502", + -15.03256893157959 + ], + [ + "▁14891718529", + -15.03256893157959 + ], + [ + "▁48315449180", + -15.03256893157959 + ], + [ + "▁Bixby", + -15.03256893157959 + ], + [ + "▁Donahue", + -15.03256893157959 + ], + [ + "▁homocysteine", + -15.032569885253906 + ], + [ + "▁virulence", + -15.032569885253906 + ], + [ + "▁Abdallah", + -15.032570838928224 + ], + [ + "▁Battlegrounds", + -15.032570838928224 + ], + [ + "▁Coffman", + -15.032570838928224 + ], + [ + "▁Offenders", + -15.032570838928224 + ], + [ + "▁Swoosh", + -15.032570838928224 + ], + [ + "▁itinerant", + -15.032570838928224 + ], + [ + "▁Dagenham", + -15.03257179260254 + ], + [ + "▁Schuyler", + -15.03257179260254 + ], + [ + "▁RUNNING", + -15.032572746276855 + ], + [ + "▁abhorrent", + -15.032572746276855 + ], + [ + "▁Uhuru", + -15.032573699951172 + ], + [ + "▁Annoying", + -15.032574653625488 + ], + [ + "▁Rialto", + -15.032575607299805 + ], + [ + "▁electrocution", + -15.032575607299805 + ], + [ + "▁Marauder", + -15.03257656097412 + ], + [ + "▁Propylene", + -15.03257656097412 + ], + [ + "▁Piccolo", + -15.032577514648438 + ], + [ + "▁Bioethics", + -15.032578468322754 + ], + [ + "▁hypertrophy", + -15.03257942199707 + ], + [ + "▁(847)", + -15.03258228302002 + ], + [ + "▁Dartmoor", + -15.03258228302002 + ], + [ + "▁cushy", + -15.03258228302002 + ], + [ + "Southeast", + -15.032583236694336 + ], + [ + "▁Diageo", + -15.032583236694336 + ], + [ + "▁abacus", + -15.032583236694336 + ], + [ + "▁chalice", + -15.032584190368652 + ], + [ + "▁stammer", + -15.032584190368652 + ], + [ + "▁sagebrush", + -15.032591819763184 + ], + [ + "▁Baggage", + -15.032596588134766 + ], + [ + "▁Vitamix", + -15.032600402832031 + ], + [ + "▁bombastic", + -15.032601356506348 + ], + [ + "▁Piaget", + -15.03260326385498 + ], + [ + "▁karmic", + -15.03260326385498 + ], + [ + "▁DUTY", + -15.032604217529297 + ], + [ + "▁Emacs", + -15.03260612487793 + ], + [ + "▁INNOVAT", + -15.032608032226562 + ], + [ + "▁Emirate", + -15.032609939575195 + ], + [ + "▁Bowflex", + -15.03261375427246 + ], + [ + "▁Lanark", + -15.03261375427246 + ], + [ + "▁geisha", + -15.032617568969728 + ], + [ + "▁THOUGHT", + -15.032621383666992 + ], + [ + "▁Matterhorn", + -15.032623291015623 + ], + [ + "▁Phelan", + -15.032625198364258 + ], + [ + "▁meteorology", + -15.032631874084473 + ], + [ + "▁braille", + -15.032633781433104 + ], + [ + "▁Bertram", + -15.032637596130373 + ], + [ + "▁quartile", + -15.032637596130373 + ], + [ + "▁MYOB", + -15.032639503479004 + ], + [ + "▁chugging", + -15.032639503479004 + ], + [ + "▁COBIT", + -15.032645225524902 + ], + [ + "curbless", + -15.032647132873535 + ], + [ + "▁retold", + -15.032648086547852 + ], + [ + "▁bailiff", + -15.032649993896484 + ], + [ + "▁grapeseed", + -15.032649993896484 + ], + [ + "▁misdirected", + -15.032652854919434 + ], + [ + "▁Grisham", + -15.032657623291016 + ], + [ + "▁strove", + -15.032657623291016 + ], + [ + "▁democratization", + -15.032660484313965 + ], + [ + "▁raglan", + -15.032679557800291 + ], + [ + "▁conferring", + -15.032691955566406 + ], + [ + "RUNNER", + -15.032696723937988 + ], + [ + "▁Wanaka", + -15.032703399658203 + ], + [ + "▁crowdsourced", + -15.032723426818848 + ], + [ + "Newport", + -15.03272819519043 + ], + [ + "▁brainwashed", + -15.03274917602539 + ], + [ + "Archived", + -15.032750129699709 + ], + [ + "▁Symptom", + -15.032751083374023 + ], + [ + "pnoea", + -15.03275203704834 + ], + [ + "▁shinier", + -15.032763481140137 + ], + [ + "▁breadwinner", + -15.032767295837402 + ], + [ + "1.19", + -15.032773971557615 + ], + [ + "▁radioactivity", + -15.032784461975098 + ], + [ + "▁stupa", + -15.03279972076416 + ], + [ + "*******", + -15.032814025878906 + ], + [ + "▁radiofrequency", + -15.032814025878906 + ], + [ + "▁stirrer", + -15.032814025878906 + ], + [ + "▁lemur", + -15.032830238342283 + ], + [ + "▁Gentoo", + -15.032846450805664 + ], + [ + "▁EGFR", + -15.032848358154297 + ], + [ + "▁homewares", + -15.032856941223145 + ], + [ + "moncler", + -15.032862663269045 + ], + [ + "▁Geisha", + -15.032864570617676 + ], + [ + "arek", + -15.032876014709473 + ], + [ + "▁Premises", + -15.032883644104004 + ], + [ + "Spangled", + -15.03289031982422 + ], + [ + "▁Arapahoe", + -15.032892227172852 + ], + [ + "Callback", + -15.032894134521484 + ], + [ + "▁thailand", + -15.032917976379396 + ], + [ + "▁ralph", + -15.03292751312256 + ], + [ + "▁Chancery", + -15.032941818237305 + ], + [ + "▁jittery", + -15.032941818237305 + ], + [ + "▁Esau", + -15.03300666809082 + ], + [ + "Michele", + -15.033008575439451 + ], + [ + "▁Blowout", + -15.033011436462402 + ], + [ + "▁Powertrain", + -15.033045768737791 + ], + [ + "▁privatized", + -15.033066749572754 + ], + [ + "▁Ochs", + -15.033082962036133 + ], + [ + "▁submitter", + -15.033085823059082 + ], + [ + "▁fossilized", + -15.033099174499512 + ], + [ + "▁Midget", + -15.033123970031738 + ], + [ + "▁alumnae", + -15.033143997192385 + ], + [ + "▁NVMe", + -15.033150672912598 + ], + [ + "▁UCSD", + -15.03318214416504 + ], + [ + "Launching", + -15.033204078674316 + ], + [ + "▁Rainey", + -15.033205032348633 + ], + [ + "▁CONTINUE", + -15.033246994018556 + ], + [ + "▁landmass", + -15.033260345458984 + ], + [ + "▁Supergirl", + -15.033270835876465 + ], + [ + "2019-04-0", + -15.033288955688477 + ], + [ + "▁demonize", + -15.033300399780272 + ], + [ + "▁strangled", + -15.03331470489502 + ], + [ + "▁Chatting", + -15.033329010009766 + ], + [ + "▁Coolest", + -15.033339500427246 + ], + [ + "▁COAST", + -15.03335666656494 + ], + [ + "▁Saipan", + -15.033381462097168 + ], + [ + "▁swordfish", + -15.03339672088623 + ], + [ + "▁Marist", + -15.033404350280762 + ], + [ + "▁unlined", + -15.033413887023926 + ], + [ + "▁boatload", + -15.033417701721191 + ], + [ + "▁Suga", + -15.033418655395508 + ], + [ + "▁DOOM", + -15.033435821533203 + ], + [ + "▁Mittal", + -15.03343677520752 + ], + [ + "▁Pascoe", + -15.033469200134276 + ], + [ + "▁disinfected", + -15.033474922180176 + ], + [ + "▁Intuition", + -15.03360652923584 + ], + [ + "▁McClelland", + -15.033617973327637 + ], + [ + "ROME", + -15.033629417419434 + ], + [ + "▁Margate", + -15.033656120300291 + ], + [ + "▁PNW", + -15.033747673034668 + ], + [ + "▁Nighthawk", + -15.033753395080566 + ], + [ + "▁Snowboarding", + -15.033770561218262 + ], + [ + "▁Portability", + -15.033803939819336 + ], + [ + "▁carpooling", + -15.03384494781494 + ], + [ + "▁midseason", + -15.033950805664062 + ], + [ + "RIK", + -15.033964157104492 + ], + [ + "▁Murcia", + -15.034003257751465 + ], + [ + "▁Patra", + -15.034040451049805 + ], + [ + "michel", + -15.034114837646484 + ], + [ + "▁NAMM", + -15.034128189086914 + ], + [ + "▁Sanrio", + -15.034157752990724 + ], + [ + "▁$90,000", + -15.03421115875244 + ], + [ + "▁Tosca", + -15.034235000610352 + ], + [ + "▁reflectance", + -15.034239768981934 + ], + [ + "▁costumers", + -15.034249305725098 + ], + [ + "▁TELE", + -15.034256935119627 + ], + [ + "▁Empowered", + -15.03428554534912 + ], + [ + "▁Bloch", + -15.03439998626709 + ], + [ + "collecting", + -15.034485816955566 + ], + [ + "Kohl", + -15.034575462341309 + ], + [ + "Jacket", + -15.034632682800291 + ], + [ + "▁Amway", + -15.034646034240724 + ], + [ + "▁undercooked", + -15.034676551818848 + ], + [ + "▁tostada", + -15.0347318649292 + ], + [ + "▁Nucl", + -15.03478717803955 + ], + [ + "▁forceps", + -15.034805297851562 + ], + [ + "▁redox", + -15.034809112548828 + ], + [ + "▁Produkt", + -15.034900665283203 + ], + [ + "▁transcending", + -15.034934043884276 + ], + [ + "▁GIANT", + -15.035006523132324 + ], + [ + "Bog", + -15.035032272338867 + ], + [ + "ckmann", + -15.035035133361816 + ], + [ + "▁recuse", + -15.035148620605469 + ], + [ + "▁Italianate", + -15.03515338897705 + ], + [ + "▁06:1", + -15.035274505615234 + ], + [ + "▁Tetsu", + -15.035284996032717 + ], + [ + "▁Geode", + -15.035310745239258 + ], + [ + "3.0%", + -15.035329818725586 + ], + [ + "▁Dumbo", + -15.035402297973633 + ], + [ + "▁cartwheel", + -15.035493850708008 + ], + [ + "▁Pennine", + -15.035524368286133 + ], + [ + "▁SSIS", + -15.035581588745115 + ], + [ + "Bonnie", + -15.035810470581056 + ], + [ + "2–1", + -15.035919189453123 + ], + [ + "petrol", + -15.036035537719728 + ], + [ + "▁Hyland", + -15.036140441894531 + ], + [ + "▁Mulla", + -15.036237716674805 + ], + [ + "perience", + -15.036385536193848 + ], + [ + "Converter", + -15.036389350891112 + ], + [ + "formula", + -15.036413192749023 + ], + [ + "Ohhh", + -15.036493301391602 + ], + [ + "0.90", + -15.036578178405762 + ], + [ + "donate", + -15.03659725189209 + ], + [ + "▁transect", + -15.036629676818848 + ], + [ + "leverage", + -15.036643981933594 + ], + [ + "7:1", + -15.036724090576172 + ], + [ + "▁Shania", + -15.036894798278809 + ], + [ + "blah", + -15.036907196044922 + ], + [ + "sheep", + -15.03701400756836 + ], + [ + "faz", + -15.03703498840332 + ], + [ + "1934", + -15.037036895751951 + ], + [ + "ramming", + -15.037360191345217 + ], + [ + "arctic", + -15.037511825561523 + ], + [ + "mistress", + -15.037548065185549 + ], + [ + "2:42", + -15.037569999694824 + ], + [ + "▁1,250", + -15.037578582763672 + ], + [ + "CHM", + -15.037641525268556 + ], + [ + "▁Shuk", + -15.037739753723145 + ], + [ + "kmph", + -15.037749290466309 + ], + [ + "▁Balk", + -15.037888526916504 + ], + [ + "HSAA", + -15.037919998168944 + ], + [ + "▁Ecco", + -15.037944793701172 + ], + [ + "▁Trab", + -15.037965774536133 + ], + [ + "▁Afrika", + -15.038107872009276 + ], + [ + "Interpersonal", + -15.038166999816896 + ], + [ + "▁13.7", + -15.03859806060791 + ], + [ + "▁democratize", + -15.038810729980469 + ], + [ + "▁Gigg", + -15.038857460021973 + ], + [ + "▁proximate", + -15.038902282714844 + ], + [ + "ALLER", + -15.038915634155272 + ], + [ + "▁Sasa", + -15.039087295532228 + ], + [ + "▁SCCA", + -15.039173126220703 + ], + [ + "▁gris", + -15.039191246032717 + ], + [ + "▁$1,300", + -15.03957748413086 + ], + [ + "0.55", + -15.039580345153809 + ], + [ + "Felt", + -15.039586067199709 + ], + [ + "2:31", + -15.039647102355955 + ], + [ + "▁Safi", + -15.039680480957031 + ], + [ + "▁Rabo", + -15.03969669342041 + ], + [ + "PIO", + -15.039729118347168 + ], + [ + "ardini", + -15.03982925415039 + ], + [ + "▁Logistic", + -15.039887428283691 + ], + [ + "rinse", + -15.04011058807373 + ], + [ + "▁Govinda", + -15.040149688720703 + ], + [ + "▁Housekeeper", + -15.0401611328125 + ], + [ + "▁precipitat", + -15.040504455566406 + ], + [ + "crab", + -15.040643692016602 + ], + [ + "Fail", + -15.0406494140625 + ], + [ + "imple", + -15.04065990447998 + ], + [ + "Dunn", + -15.04082489013672 + ], + [ + "▁kenn", + -15.040884971618652 + ], + [ + "SCHE", + -15.04092502593994 + ], + [ + "Alba", + -15.040928840637209 + ], + [ + "APPS", + -15.040931701660156 + ], + [ + "bagger", + -15.041030883789062 + ], + [ + "VLT", + -15.041075706481934 + ], + [ + "▁Suzi", + -15.041160583496094 + ], + [ + "Panic", + -15.041318893432615 + ], + [ + "▁06:0", + -15.041318893432615 + ], + [ + "▁Basie", + -15.041318893432615 + ], + [ + "▁Volga", + -15.041414260864258 + ], + [ + "▁CHART", + -15.0414457321167 + ], + [ + "cadia", + -15.04156494140625 + ], + [ + "▁SAIL", + -15.041693687438965 + ], + [ + "ydd", + -15.041698455810549 + ], + [ + "▁Palomar", + -15.041705131530762 + ], + [ + "organize", + -15.041749000549316 + ], + [ + "ilani", + -15.041804313659668 + ], + [ + "0.98", + -15.04230499267578 + ], + [ + "fren", + -15.042327880859377 + ], + [ + "▁Flicker", + -15.042402267456056 + ], + [ + "avian", + -15.042572021484377 + ], + [ + "▁Magda", + -15.042624473571776 + ], + [ + "Gregory", + -15.042656898498535 + ], + [ + "▁bacteri", + -15.042670249938965 + ], + [ + "Doping", + -15.042776107788086 + ], + [ + "▁Glor", + -15.042797088623049 + ], + [ + "▁Volpe", + -15.04287338256836 + ], + [ + "▁FSF", + -15.04323387145996 + ], + [ + "Convenience", + -15.04326343536377 + ], + [ + "Fragrance", + -15.043264389038086 + ], + [ + "flammable", + -15.043265342712402 + ], + [ + "Cylinder", + -15.04327392578125 + ], + [ + "Reynolds", + -15.04328727722168 + ], + [ + "Algeria", + -15.043288230895996 + ], + [ + "feasible", + -15.043292999267578 + ], + [ + "happily", + -15.043293952941896 + ], + [ + "Automotive", + -15.043295860290527 + ], + [ + "suspended", + -15.043295860290527 + ], + [ + "Persian", + -15.04330062866211 + ], + [ + "Disaster", + -15.043302536010742 + ], + [ + "tutorial", + -15.043309211730955 + ], + [ + "Marathon", + -15.043320655822754 + ], + [ + "Commitment", + -15.04332447052002 + ], + [ + "Organizing", + -15.043325424194336 + ], + [ + "enabling", + -15.043331146240234 + ], + [ + "Kraft", + -15.043347358703612 + ], + [ + "thankfully", + -15.04334831237793 + ], + [ + "▁Semifinal", + -15.043353080749512 + ], + [ + "scholarship", + -15.043364524841309 + ], + [ + "fermented", + -15.043374061584473 + ], + [ + "MBT", + -15.043381690979004 + ], + [ + "slavery", + -15.043391227722168 + ], + [ + "wildlife", + -15.043417930603027 + ], + [ + "▁09:1", + -15.04344654083252 + ], + [ + "literary", + -15.04349422454834 + ], + [ + "Riverside", + -15.043512344360352 + ], + [ + "Cozy", + -15.043537139892578 + ], + [ + "▁ventilate", + -15.043596267700195 + ], + [ + "▁distaste", + -15.043599128723145 + ], + [ + "Zhou", + -15.043603897094728 + ], + [ + "Passenger", + -15.04367446899414 + ], + [ + "bono", + -15.043712615966797 + ], + [ + "▁Haan", + -15.04373550415039 + ], + [ + "Failing", + -15.04378604888916 + ], + [ + "Chalk", + -15.043792724609377 + ], + [ + "destroying", + -15.04383373260498 + ], + [ + "Curator", + -15.043885231018066 + ], + [ + "▁unabashed", + -15.043951988220217 + ], + [ + "EASY", + -15.043984413146973 + ], + [ + "LPT", + -15.043989181518556 + ], + [ + "heritage", + -15.044074058532717 + ], + [ + "instance", + -15.044079780578612 + ], + [ + "SPAM", + -15.044096946716309 + ], + [ + "▁Tranquil", + -15.04417610168457 + ], + [ + "Denise", + -15.044177055358888 + ], + [ + "▁TOYOTA", + -15.044195175170898 + ], + [ + "CONGRATULATIONS", + -15.044196128845217 + ], + [ + "▁CloudFlare", + -15.044196128845217 + ], + [ + "▁Farnborough", + -15.044196128845217 + ], + [ + "▁Mackerel", + -15.044196128845217 + ], + [ + "▁Mahathir", + -15.044196128845217 + ], + [ + "▁Oconee", + -15.044196128845217 + ], + [ + "▁Poughkeepsie", + -15.044196128845217 + ], + [ + "▁Schiphol", + -15.044196128845217 + ], + [ + "▁SmackDown", + -15.044196128845217 + ], + [ + "▁Trundle", + -15.044196128845217 + ], + [ + "▁Vajpayee", + -15.044196128845217 + ], + [ + "▁aphrodisiac", + -15.044196128845217 + ], + [ + "▁arthroscopic", + -15.044196128845217 + ], + [ + "▁attenuator", + -15.044196128845217 + ], + [ + "▁benzodiazepines", + -15.044196128845217 + ], + [ + "▁edamame", + -15.044196128845217 + ], + [ + "▁equivalency", + -15.044196128845217 + ], + [ + "▁housewives", + -15.044196128845217 + ], + [ + "▁intricacy", + -15.044196128845217 + ], + [ + "▁knelt", + -15.044196128845217 + ], + [ + "▁lacklustre", + -15.044196128845217 + ], + [ + "▁lectern", + -15.044196128845217 + ], + [ + "▁osprey", + -15.044196128845217 + ], + [ + "▁permafrost", + -15.044196128845217 + ], + [ + "▁perovskite", + -15.044196128845217 + ], + [ + "▁photocopier", + -15.044196128845217 + ], + [ + "▁recluse", + -15.044196128845217 + ], + [ + "▁repossessed", + -15.044196128845217 + ], + [ + "▁DEMAND", + -15.044197082519531 + ], + [ + "▁Naxos", + -15.044197082519531 + ], + [ + "▁Zambezi", + -15.044197082519531 + ], + [ + "▁parenthesis", + -15.044197082519531 + ], + [ + "▁Adoration", + -15.044198036193848 + ], + [ + "▁Atonement", + -15.044198036193848 + ], + [ + "▁Disqus", + -15.044198036193848 + ], + [ + "▁TRANSACTION", + -15.044198036193848 + ], + [ + "▁azithromycin", + -15.044198036193848 + ], + [ + "▁scabbard", + -15.044198036193848 + ], + [ + "Seizure", + -15.044198989868164 + ], + [ + "▁transcribing", + -15.044198989868164 + ], + [ + "▁Shreya", + -15.04419994354248 + ], + [ + "▁Bessemer", + -15.044200897216797 + ], + [ + "▁Sascha", + -15.044200897216797 + ], + [ + "▁TRANSPORT", + -15.044200897216797 + ], + [ + "▁Hernando", + -15.044201850891112 + ], + [ + "▁MQTT", + -15.044201850891112 + ], + [ + "▁Sugarloaf", + -15.044201850891112 + ], + [ + "Genpact", + -15.04420280456543 + ], + [ + "▁Hartsfield", + -15.04420280456543 + ], + [ + "▁durango", + -15.04420280456543 + ], + [ + "▁smudging", + -15.04420280456543 + ], + [ + "▁Anurag", + -15.044203758239746 + ], + [ + "▁injurious", + -15.044203758239746 + ], + [ + "▁locomotion", + -15.044203758239746 + ], + [ + "▁urinating", + -15.044207572937012 + ], + [ + "▁embezzl", + -15.044209480285645 + ], + [ + "▁Custard", + -15.044211387634276 + ], + [ + "▁Mourning", + -15.044212341308594 + ], + [ + "▁Ramayana", + -15.04421329498291 + ], + [ + "▁pretence", + -15.044214248657228 + ], + [ + "▁blanco", + -15.04421615600586 + ], + [ + "▁Inoue", + -15.044219017028809 + ], + [ + "▁Sculptor", + -15.044219017028809 + ], + [ + "▁cherub", + -15.044222831726074 + ], + [ + "▁RETAIL", + -15.044224739074709 + ], + [ + "▁bragged", + -15.04422664642334 + ], + [ + "▁brewpub", + -15.044233322143556 + ], + [ + "▁intermingle", + -15.044235229492188 + ], + [ + "▁Javanese", + -15.044236183166504 + ], + [ + "tisserie", + -15.044238090515137 + ], + [ + "▁Phosphorus", + -15.044243812561035 + ], + [ + "▁FREAK", + -15.044244766235352 + ], + [ + "▁diwali", + -15.0442476272583 + ], + [ + "▁ataxia", + -15.044248580932615 + ], + [ + "replacing", + -15.044249534606934 + ], + [ + "▁abutting", + -15.044251441955566 + ], + [ + "▁Walcott", + -15.044254302978516 + ], + [ + "▁brazing", + -15.044255256652832 + ], + [ + "▁itchiness", + -15.044264793395996 + ], + [ + "▁ramparts", + -15.044266700744627 + ], + [ + "▁Contender", + -15.044273376464844 + ], + [ + "▁Ghoul", + -15.044275283813477 + ], + [ + "▁Jeannette", + -15.044282913208008 + ], + [ + "▁Pinehurst", + -15.044289588928224 + ], + [ + "▁Zamora", + -15.04429817199707 + ], + [ + "▁zoology", + -15.04430103302002 + ], + [ + "▁Nuys", + -15.04430866241455 + ], + [ + "▁bumbling", + -15.04430866241455 + ], + [ + "dhgate", + -15.044321060180664 + ], + [ + "otak", + -15.04432201385498 + ], + [ + "▁Helpdesk", + -15.04432201385498 + ], + [ + "▁Severity", + -15.044323921203612 + ], + [ + "▁Fenix", + -15.04432487487793 + ], + [ + "▁Abington", + -15.04433250427246 + ], + [ + "▁Kimono", + -15.044333457946776 + ], + [ + "▁Ojai", + -15.04433536529541 + ], + [ + "▁Titusville", + -15.044341087341309 + ], + [ + "▁powerboat", + -15.044342041015623 + ], + [ + "27-29", + -15.044351577758787 + ], + [ + "▁Deptford", + -15.044365882873535 + ], + [ + "▁bicep", + -15.044368743896484 + ], + [ + "▁2010-2011", + -15.044387817382812 + ], + [ + "▁Sexuality", + -15.044392585754396 + ], + [ + "▁tugged", + -15.044392585754396 + ], + [ + "Volunteering", + -15.044398307800291 + ], + [ + "▁tunable", + -15.044404029846191 + ], + [ + "▁Quirky", + -15.04441261291504 + ], + [ + "▁patronizing", + -15.04441261291504 + ], + [ + "▁Beloit", + -15.044413566589355 + ], + [ + "▁Jogger", + -15.044414520263672 + ], + [ + "▁Galvin", + -15.044425964355469 + ], + [ + "▁Beretta", + -15.04444408416748 + ], + [ + "▁reintegration", + -15.04444408416748 + ], + [ + "▁patted", + -15.04445457458496 + ], + [ + "rnstein", + -15.044459342956545 + ], + [ + "▁groundhog", + -15.04449462890625 + ], + [ + "▁Bottas", + -15.044498443603516 + ], + [ + "▁Unwind", + -15.04450225830078 + ], + [ + "Humility", + -15.044503211975098 + ], + [ + "elaar", + -15.04450798034668 + ], + [ + "▁Inspectorate", + -15.044508934020996 + ], + [ + "▁Swank", + -15.044510841369627 + ], + [ + "▁patrolled", + -15.04452896118164 + ], + [ + "▁audiology", + -15.044550895690918 + ], + [ + "▁Mooresville", + -15.044554710388184 + ], + [ + "projekt", + -15.044561386108398 + ], + [ + "▁ORGANIC", + -15.044561386108398 + ], + [ + "Msg", + -15.044575691223145 + ], + [ + "Partnering", + -15.044575691223145 + ], + [ + "▁Micra", + -15.044575691223145 + ], + [ + "▁rfg", + -15.044598579406738 + ], + [ + "2150-", + -15.04460906982422 + ], + [ + "▁beckoned", + -15.044612884521484 + ], + [ + "▁scolded", + -15.044612884521484 + ], + [ + "▁2013/14", + -15.04461669921875 + ], + [ + "▁LastPass", + -15.044644355773926 + ], + [ + "▁Heartburn", + -15.04464626312256 + ], + [ + "▁Diode", + -15.044669151306152 + ], + [ + "▁Cristal", + -15.044707298278809 + ], + [ + "patterned", + -15.044724464416504 + ], + [ + "▁Hawes", + -15.044724464416504 + ], + [ + "▁Israelite", + -15.044734001159668 + ], + [ + "▁jobseekers", + -15.044825553894045 + ], + [ + "▁Ramones", + -15.044829368591309 + ], + [ + "▁Sativa", + -15.04483413696289 + ], + [ + "▁urologist", + -15.044842720031738 + ], + [ + "Consulting", + -15.04486083984375 + ], + [ + "▁intentionality", + -15.044861793518066 + ], + [ + "▁FIBA", + -15.04488468170166 + ], + [ + "▁spiralling", + -15.044897079467772 + ], + [ + "▁liberalization", + -15.044915199279783 + ], + [ + "orbital", + -15.045076370239258 + ], + [ + "FMCSA", + -15.04511833190918 + ], + [ + "Sears", + -15.045161247253418 + ], + [ + "challenged", + -15.045164108276367 + ], + [ + "▁scoresheet", + -15.045165061950684 + ], + [ + "▁retaliated", + -15.045173645019531 + ], + [ + "MEE", + -15.045207977294922 + ], + [ + "ozier", + -15.045210838317873 + ], + [ + "terpene", + -15.045255661010742 + ], + [ + "▁Monumental", + -15.045259475708008 + ], + [ + "passionate", + -15.04530143737793 + ], + [ + "▁reignite", + -15.045318603515623 + ], + [ + "▁£350", + -15.04534149169922 + ], + [ + "Hitch", + -15.045376777648926 + ], + [ + "▁Putty", + -15.045425415039062 + ], + [ + "▁retweeted", + -15.045429229736328 + ], + [ + "▁Keita", + -15.04543399810791 + ], + [ + "▁marshland", + -15.045482635498049 + ], + [ + "pumping", + -15.045496940612791 + ], + [ + "▁Rusk", + -15.045560836791992 + ], + [ + "▁Rebound", + -15.045561790466309 + ], + [ + "▁Battleship", + -15.045653343200684 + ], + [ + "Stimulate", + -15.045654296875 + ], + [ + "▁redneck", + -15.04568099975586 + ], + [ + "▁wholegrain", + -15.0457181930542 + ], + [ + "▁Yona", + -15.045811653137209 + ], + [ + "▁moonstone", + -15.045811653137209 + ], + [ + "▁embellishing", + -15.045822143554688 + ], + [ + "▁pompous", + -15.045822143554688 + ], + [ + "▁Woodard", + -15.045827865600586 + ], + [ + "▁swampy", + -15.0458402633667 + ], + [ + "▁TWIN", + -15.045848846435549 + ], + [ + "▁btu", + -15.04586696624756 + ], + [ + "▁dullness", + -15.045892715454102 + ], + [ + "▁gutsy", + -15.045893669128418 + ], + [ + "▁reactivated", + -15.045920372009276 + ], + [ + "flix", + -15.045942306518556 + ], + [ + "Elvis", + -15.045995712280272 + ], + [ + "▁WEEKS", + -15.046079635620115 + ], + [ + "eanu", + -15.046095848083496 + ], + [ + "▁Westbury", + -15.046159744262695 + ], + [ + "1:54", + -15.046186447143556 + ], + [ + "Herald", + -15.046241760253906 + ], + [ + "▁Valdes", + -15.04624366760254 + ], + [ + "▁Cré", + -15.04625129699707 + ], + [ + "▁Yuba", + -15.04625415802002 + ], + [ + "hooray", + -15.046262741088867 + ], + [ + "▁Bodhi", + -15.046262741088867 + ], + [ + "▁Haku", + -15.046341896057127 + ], + [ + "▁Localization", + -15.046374320983888 + ], + [ + "▁Riverbed", + -15.046422004699709 + ], + [ + "▁mariners", + -15.04642391204834 + ], + [ + "Overweight", + -15.04645824432373 + ], + [ + "▁Chui", + -15.046506881713867 + ], + [ + "25′′", + -15.046554565429688 + ], + [ + "▁Spritz", + -15.046558380126951 + ], + [ + "▁Imagery", + -15.046697616577148 + ], + [ + "Exact", + -15.046751976013184 + ], + [ + "97-1", + -15.046814918518066 + ], + [ + "▁equalization", + -15.046825408935549 + ], + [ + "▁Gerrit", + -15.046870231628418 + ], + [ + "▁Drom", + -15.046899795532228 + ], + [ + "▁tamb", + -15.04699993133545 + ], + [ + "▁prefixed", + -15.047118186950684 + ], + [ + "Tattoo", + -15.047168731689451 + ], + [ + "▁Refus", + -15.04718017578125 + ], + [ + "ENU", + -15.047231674194336 + ], + [ + "arthro", + -15.047263145446776 + ], + [ + "unicode", + -15.04748821258545 + ], + [ + "▁vocab", + -15.047490119934082 + ], + [ + "Ravi", + -15.047532081604004 + ], + [ + "▁OHL", + -15.047697067260742 + ], + [ + "▁Rocher", + -15.047719955444336 + ], + [ + "hehehe", + -15.047735214233398 + ], + [ + "ENV", + -15.04804801940918 + ], + [ + "therington", + -15.048126220703123 + ], + [ + "diag", + -15.048147201538086 + ], + [ + "▁shutoff", + -15.048155784606934 + ], + [ + "▁instinctual", + -15.048165321350098 + ], + [ + "CYCLE", + -15.048245429992676 + ], + [ + "Biological", + -15.048272132873535 + ], + [ + "▁synod", + -15.048330307006836 + ], + [ + "▁Klay", + -15.048377990722656 + ], + [ + "2:48", + -15.048486709594728 + ], + [ + "mule", + -15.048569679260254 + ], + [ + "brite", + -15.048657417297363 + ], + [ + "ccino", + -15.04868984222412 + ], + [ + "▁Bijou", + -15.048750877380373 + ], + [ + "STOCK", + -15.048752784729004 + ], + [ + "▁Hund", + -15.048842430114746 + ], + [ + "▁Kalk", + -15.049234390258787 + ], + [ + "2:28", + -15.049243927001951 + ], + [ + "▁SBIR", + -15.049355506896973 + ], + [ + "0-162", + -15.049420356750488 + ], + [ + "▁Arthro", + -15.049684524536133 + ], + [ + "▁teeter", + -15.04993724822998 + ], + [ + "cevic", + -15.049957275390623 + ], + [ + "methi", + -15.050045013427734 + ], + [ + "2(3):", + -15.050048828125 + ], + [ + "Delighted", + -15.050050735473633 + ], + [ + "napper", + -15.05009937286377 + ], + [ + "▁Donato", + -15.05013942718506 + ], + [ + "Musk", + -15.05019474029541 + ], + [ + "UWS", + -15.050413131713867 + ], + [ + "CBO", + -15.050525665283203 + ], + [ + "▁panhandle", + -15.05060577392578 + ], + [ + "WWE", + -15.050647735595703 + ], + [ + "RIL", + -15.05076503753662 + ], + [ + "▁Belk", + -15.05088233947754 + ], + [ + "▁Shader", + -15.050965309143066 + ], + [ + "khet", + -15.05098819732666 + ], + [ + "constructing", + -15.050992965698242 + ], + [ + "6-21", + -15.051106452941896 + ], + [ + "specification", + -15.051167488098145 + ], + [ + "▁Hardee", + -15.051186561584473 + ], + [ + "▁SOAR", + -15.051376342773438 + ], + [ + "▁Kosta", + -15.051466941833496 + ], + [ + "5:28", + -15.05160903930664 + ], + [ + "▁14.6", + -15.051722526550291 + ], + [ + "6-13", + -15.051725387573242 + ], + [ + "▁MACH", + -15.051774978637695 + ], + [ + "CADE", + -15.051787376403809 + ], + [ + "▁barrow", + -15.051883697509766 + ], + [ + "▁Carpe", + -15.05198574066162 + ], + [ + "▁jeu", + -15.052066802978516 + ], + [ + "▁Burgh", + -15.052175521850586 + ], + [ + "richmond", + -15.052289009094238 + ], + [ + "▁sniffer", + -15.052810668945312 + ], + [ + "bunk", + -15.053060531616213 + ], + [ + "HCP", + -15.05307960510254 + ], + [ + "wohl", + -15.053264617919922 + ], + [ + "▁Fahr", + -15.0533447265625 + ], + [ + "dihydro", + -15.053441047668455 + ], + [ + "▁Sandia", + -15.053571701049805 + ], + [ + "1936", + -15.053607940673828 + ], + [ + "axter", + -15.053669929504396 + ], + [ + "ufen", + -15.053874969482422 + ], + [ + "Thanx", + -15.053960800170898 + ], + [ + "▁1781", + -15.054015159606934 + ], + [ + "Champ", + -15.05409049987793 + ], + [ + "ANDRE", + -15.05424690246582 + ], + [ + "frequently", + -15.054253578186035 + ], + [ + "HAU", + -15.054312705993652 + ], + [ + "▁TRIO", + -15.054391860961914 + ], + [ + "Poet", + -15.054400444030762 + ], + [ + "▁£120", + -15.054529190063477 + ], + [ + "▁Potent", + -15.05470371246338 + ], + [ + "NINE", + -15.05473518371582 + ], + [ + "wiec", + -15.054827690124512 + ], + [ + "ridium", + -15.054841995239258 + ], + [ + "▁Prick", + -15.055041313171388 + ], + [ + "Meantime", + -15.05506706237793 + ], + [ + "▁GENER", + -15.055086135864258 + ], + [ + "Flavor", + -15.055106163024902 + ], + [ + "▁HEX", + -15.055120468139648 + ], + [ + "indulgence", + -15.055140495300291 + ], + [ + "Yacht", + -15.055264472961426 + ], + [ + "Incorporating", + -15.055274963378906 + ], + [ + "Intuitive", + -15.05528736114502 + ], + [ + "Producing", + -15.055292129516602 + ], + [ + "Circuit", + -15.05529499053955 + ], + [ + "Mushroom", + -15.055296897888184 + ], + [ + "Therapeutic", + -15.055299758911133 + ], + [ + "shrunk", + -15.055303573608398 + ], + [ + "Solving", + -15.055305480957031 + ], + [ + "HEALTH", + -15.055306434631348 + ], + [ + "Illegal", + -15.055306434631348 + ], + [ + "Frequency", + -15.05531883239746 + ], + [ + "numbing", + -15.055322647094728 + ], + [ + "Buffet", + -15.055330276489258 + ], + [ + "Recognition", + -15.055330276489258 + ], + [ + "▁Schro", + -15.055334091186523 + ], + [ + "Swansea", + -15.05533504486084 + ], + [ + "satisfaction", + -15.05533504486084 + ], + [ + "Chiropractic", + -15.055335998535156 + ], + [ + "Chennai", + -15.055339813232422 + ], + [ + "Freight", + -15.055339813232422 + ], + [ + "enriched", + -15.055339813232422 + ], + [ + "Xiaomi", + -15.055340766906738 + ], + [ + "Excellence", + -15.055341720581056 + ], + [ + "Manhattan", + -15.055342674255373 + ], + [ + "Calgary", + -15.055350303649902 + ], + [ + "Reliability", + -15.055350303649902 + ], + [ + "Segment", + -15.055363655090332 + ], + [ + "Queensland", + -15.05536937713623 + ], + [ + "Triangle", + -15.055370330810549 + ], + [ + "Laminate", + -15.05538845062256 + ], + [ + "cancelling", + -15.05538845062256 + ], + [ + "Defence", + -15.055392265319824 + ], + [ + "Catering", + -15.05539894104004 + ], + [ + "Beaver", + -15.055418968200684 + ], + [ + "Sunrise", + -15.055439949035645 + ], + [ + "Joanne", + -15.055484771728516 + ], + [ + "▁03:3", + -15.055500984191896 + ], + [ + "Romance", + -15.055506706237791 + ], + [ + "labelled", + -15.055511474609377 + ], + [ + "Louisville", + -15.055529594421388 + ], + [ + "▁Malek", + -15.055586814880373 + ], + [ + "partnership", + -15.05560302734375 + ], + [ + "Inspector", + -15.055641174316406 + ], + [ + "▁splat", + -15.055648803710938 + ], + [ + "Forbes", + -15.055652618408203 + ], + [ + "Mj", + -15.0556640625 + ], + [ + "Simplify", + -15.055710792541504 + ], + [ + "Hardwood", + -15.05575942993164 + ], + [ + "▁dilate", + -15.055760383605955 + ], + [ + "Whale", + -15.055928230285645 + ], + [ + "▁Buoy", + -15.055950164794922 + ], + [ + "SINGAPORE", + -15.055960655212402 + ], + [ + "VOLUNTEER", + -15.055960655212402 + ], + [ + "▁888-609-", + -15.055960655212402 + ], + [ + "▁BREEAM", + -15.055960655212402 + ], + [ + "▁Bourgeois", + -15.055960655212402 + ], + [ + "▁Camcorder", + -15.055960655212402 + ], + [ + "▁Chechnya", + -15.055960655212402 + ], + [ + "▁Cordillera", + -15.055960655212402 + ], + [ + "▁Donohue", + -15.055960655212402 + ], + [ + "▁Dungannon", + -15.055960655212402 + ], + [ + "▁Embraer", + -15.055960655212402 + ], + [ + "▁Healdsburg", + -15.055960655212402 + ], + [ + "▁Huxley", + -15.055960655212402 + ], + [ + "▁Larnaca", + -15.055960655212402 + ], + [ + "▁Obasanjo", + -15.055960655212402 + ], + [ + "▁STAINLESS", + -15.055960655212402 + ], + [ + "▁Sumitomo", + -15.055960655212402 + ], + [ + "▁Trujillo", + -15.055960655212402 + ], + [ + "▁accretion", + -15.055960655212402 + ], + [ + "▁antisemitism", + -15.055960655212402 + ], + [ + "▁calibrating", + -15.055960655212402 + ], + [ + "▁celsius", + -15.055960655212402 + ], + [ + "▁conciliation", + -15.055960655212402 + ], + [ + "▁flotilla", + -15.055960655212402 + ], + [ + "▁hypothalamus", + -15.055960655212402 + ], + [ + "▁paleontologist", + -15.055960655212402 + ], + [ + "▁parodies", + -15.055960655212402 + ], + [ + "▁safflower", + -15.055960655212402 + ], + [ + "▁schooner", + -15.055960655212402 + ], + [ + "▁tantalum", + -15.055960655212402 + ], + [ + "▁Cormier", + -15.05596160888672 + ], + [ + "▁appartment", + -15.05596160888672 + ], + [ + "▁fission", + -15.05596160888672 + ], + [ + "▁gouache", + -15.05596160888672 + ], + [ + "Volatile", + -15.055962562561035 + ], + [ + "▁Blenheim", + -15.055962562561035 + ], + [ + "▁Lululemon", + -15.055962562561035 + ], + [ + "▁alliteration", + -15.055962562561035 + ], + [ + "▁colonnade", + -15.055962562561035 + ], + [ + "▁derided", + -15.055962562561035 + ], + [ + "JCPOA", + -15.055963516235352 + ], + [ + "▁Miserables", + -15.055963516235352 + ], + [ + "▁punjabi", + -15.055963516235352 + ], + [ + "▁Piraeus", + -15.055964469909668 + ], + [ + "▁DOLLAR", + -15.055965423583984 + ], + [ + "▁ethereum", + -15.0559663772583 + ], + [ + "▁whammy", + -15.0559663772583 + ], + [ + "XBRL", + -15.055968284606934 + ], + [ + "▁glabrous", + -15.055968284606934 + ], + [ + "actuated", + -15.05596923828125 + ], + [ + "▁Catheter", + -15.05596923828125 + ], + [ + "▁delirious", + -15.05596923828125 + ], + [ + "▁Altamonte", + -15.055971145629885 + ], + [ + "▁Waddell", + -15.055971145629885 + ], + [ + "▁latrine", + -15.055973052978516 + ], + [ + "▁squelch", + -15.055973052978516 + ], + [ + "▁Greaves", + -15.055974960327148 + ], + [ + "▁Grigio", + -15.055975914001465 + ], + [ + "▁Wagyu", + -15.055977821350098 + ], + [ + "Shelf", + -15.055978775024414 + ], + [ + "▁Rostov", + -15.05597972869873 + ], + [ + "▁tolerability", + -15.05597972869873 + ], + [ + "▁Confetti", + -15.055980682373049 + ], + [ + "▁Talvik", + -15.055980682373049 + ], + [ + "▁shanty", + -15.055981636047363 + ], + [ + "▁quebec", + -15.055983543395996 + ], + [ + "▁stylised", + -15.055983543395996 + ], + [ + "▁Cyclops", + -15.055984497070312 + ], + [ + "▁Buttermilk", + -15.055987358093262 + ], + [ + "▁illiquid", + -15.055987358093262 + ], + [ + "▁categorizing", + -15.055988311767578 + ], + [ + "▁Mukesh", + -15.055989265441896 + ], + [ + "sylvania", + -15.055990219116213 + ], + [ + "▁Tabasco", + -15.055991172790527 + ], + [ + "▁disquiet", + -15.055991172790527 + ], + [ + "▁perioperative", + -15.055991172790527 + ], + [ + "▁Baikal", + -15.05599308013916 + ], + [ + "▁Kailash", + -15.05599594116211 + ], + [ + "▁evidentiary", + -15.05599594116211 + ], + [ + "navy", + -15.056002616882324 + ], + [ + "▁Kratom", + -15.056002616882324 + ], + [ + "▁LAUNCH", + -15.056004524230955 + ], + [ + "▁Napster", + -15.056004524230955 + ], + [ + "▁microfibre", + -15.05600643157959 + ], + [ + "▁Prabang", + -15.056008338928224 + ], + [ + "▁Antiquity", + -15.056011199951172 + ], + [ + "▁Hendry", + -15.056012153625488 + ], + [ + "▁Merkur", + -15.056012153625488 + ], + [ + "▁ALPHA", + -15.056013107299805 + ], + [ + "ICKER", + -15.05601406097412 + ], + [ + "▁Ultron", + -15.05601406097412 + ], + [ + "▁cryotherapy", + -15.056015968322754 + ], + [ + "▁Whitechapel", + -15.05601692199707 + ], + [ + "▁Peralta", + -15.056021690368652 + ], + [ + "▁COMMON", + -15.05602741241455 + ], + [ + "▁CORNER", + -15.056029319763184 + ], + [ + "▁ducklings", + -15.056031227111816 + ], + [ + "▁flopped", + -15.056032180786133 + ], + [ + "▁Autonomy", + -15.056034088134766 + ], + [ + "▁NTSB", + -15.056036949157717 + ], + [ + "▁Glebe", + -15.05604076385498 + ], + [ + "▁obama", + -15.05604076385498 + ], + [ + "▁EURUSD", + -15.056044578552246 + ], + [ + "▁Bhutto", + -15.056050300598145 + ], + [ + "▁Clarins", + -15.056053161621094 + ], + [ + "▁Haircut", + -15.056055068969728 + ], + [ + "▁Contestants", + -15.056069374084473 + ], + [ + "▁razed", + -15.056070327758787 + ], + [ + "▁Sibley", + -15.056086540222168 + ], + [ + "▁Reserva", + -15.0560884475708 + ], + [ + "divided", + -15.056092262268066 + ], + [ + "▁Nidra", + -15.0560941696167 + ], + [ + "▁ghostwriting", + -15.056096076965332 + ], + [ + "walter", + -15.056099891662598 + ], + [ + "▁Addams", + -15.05610466003418 + ], + [ + "▁motley", + -15.056112289428713 + ], + [ + "▁Surplus", + -15.05611515045166 + ], + [ + "▁Milanese", + -15.056122779846191 + ], + [ + "▁Francine", + -15.056124687194824 + ], + [ + "▁Teague", + -15.056127548217772 + ], + [ + "▁recalibrate", + -15.056129455566406 + ], + [ + "▁passerby", + -15.056130409240724 + ], + [ + "▁Creditors", + -15.05613899230957 + ], + [ + "▁yoyo", + -15.056143760681152 + ], + [ + "▁mignon", + -15.056145668029783 + ], + [ + "▁Camila", + -15.056159019470217 + ], + [ + "▁Saddleback", + -15.056175231933594 + ], + [ + "padded", + -15.056190490722656 + ], + [ + "▁exhortation", + -15.056192398071287 + ], + [ + "▁Naylor", + -15.05620002746582 + ], + [ + "▁GRACE", + -15.056208610534668 + ], + [ + "▁microgrid", + -15.056219100952148 + ], + [ + "▁Imogen", + -15.056222915649414 + ], + [ + "▁Hazelwood", + -15.05622386932373 + ], + [ + "▁untoward", + -15.056231498718262 + ], + [ + "▁nanoparticle", + -15.056232452392578 + ], + [ + "▁Dumont", + -15.05624294281006 + ], + [ + "▁Juul", + -15.05624771118164 + ], + [ + "▁infuriated", + -15.05627155303955 + ], + [ + "▁Listeria", + -15.05628490447998 + ], + [ + "crowned", + -15.05629539489746 + ], + [ + "▁Shopkins", + -15.056306838989258 + ], + [ + "▁relented", + -15.05633544921875 + ], + [ + "▁frilly", + -15.056344032287598 + ], + [ + "▁Stockbridge", + -15.056344985961914 + ], + [ + "▁Purdy", + -15.056364059448242 + ], + [ + "▁Marcello", + -15.056382179260254 + ], + [ + "▁OCLC", + -15.05639362335205 + ], + [ + "▁lazing", + -15.056405067443848 + ], + [ + "Walsh", + -15.056422233581545 + ], + [ + "▁Cambrian", + -15.056426048278809 + ], + [ + "▁Boaz", + -15.056442260742188 + ], + [ + "▁Ochoa", + -15.056446075439451 + ], + [ + "▁LUCK", + -15.056452751159668 + ], + [ + "▁lithography", + -15.05646514892578 + ], + [ + "▁GRASS", + -15.056533813476562 + ], + [ + "▁Grundy", + -15.056562423706056 + ], + [ + "▁Rossini", + -15.056562423706056 + ], + [ + "▁slouchy", + -15.056572914123535 + ], + [ + "fenced", + -15.056598663330078 + ], + [ + "▁Revolt", + -15.056636810302734 + ], + [ + "▁PAWS", + -15.056649208068848 + ], + [ + "▁XPath", + -15.056703567504885 + ], + [ + "▁ELITE", + -15.056747436523438 + ], + [ + "▁Macaw", + -15.05675983428955 + ], + [ + "▁soapstone", + -15.056785583496094 + ], + [ + "ouni", + -15.056791305541992 + ], + [ + "▁Bynum", + -15.056836128234863 + ], + [ + "▁Groton", + -15.05685806274414 + ], + [ + "▁Neilson", + -15.056859970092772 + ], + [ + "▁detangle", + -15.056888580322266 + ], + [ + "Strom", + -15.056891441345217 + ], + [ + "bmp", + -15.05690860748291 + ], + [ + "▁quips", + -15.05691909790039 + ], + [ + "▁Sharepoint", + -15.056943893432615 + ], + [ + "▁squandering", + -15.05703830718994 + ], + [ + "▁Taney", + -15.057066917419434 + ], + [ + "▁speakerphone", + -15.057126998901367 + ], + [ + "▁Somaliland", + -15.057183265686035 + ], + [ + "MOUTH", + -15.057211875915527 + ], + [ + "▁grayish", + -15.057265281677246 + ], + [ + "utty", + -15.057307243347168 + ], + [ + "UnitedHealthcare", + -15.057332992553713 + ], + [ + "▁Bolden", + -15.057348251342772 + ], + [ + "Inspire", + -15.05740451812744 + ], + [ + "▁whizzing", + -15.057405471801758 + ], + [ + "UMC", + -15.057433128356934 + ], + [ + "▁NUC", + -15.057472229003906 + ], + [ + "▁Pippin", + -15.057482719421388 + ], + [ + "▁backsplashes", + -15.05749797821045 + ], + [ + "banana", + -15.057530403137209 + ], + [ + "▁RENTAL", + -15.05753231048584 + ], + [ + "6,800", + -15.057539939880373 + ], + [ + "▁Constructing", + -15.05756950378418 + ], + [ + "▁opine", + -15.05756950378418 + ], + [ + "▁Colter", + -15.057647705078123 + ], + [ + "Bilateral", + -15.057666778564451 + ], + [ + "▁extinguishing", + -15.057666778564451 + ], + [ + "Closer", + -15.057703971862791 + ], + [ + "▁burly", + -15.057859420776367 + ], + [ + "promissory", + -15.057960510253906 + ], + [ + "▁fiercest", + -15.057977676391602 + ], + [ + "implement", + -15.05802059173584 + ], + [ + "Crab", + -15.058035850524902 + ], + [ + "9-16", + -15.058039665222168 + ], + [ + "▁typesetting", + -15.058063507080078 + ], + [ + "▁puppeteer", + -15.058079719543455 + ], + [ + "▁Havelock", + -15.058218002319336 + ], + [ + "▁Hammon", + -15.058218955993652 + ], + [ + "▁keyframe", + -15.058265686035156 + ], + [ + "couldn", + -15.05832862854004 + ], + [ + "▁Seaboard", + -15.058341979980469 + ], + [ + "publik", + -15.058379173278809 + ], + [ + "▁Whitefish", + -15.058428764343262 + ], + [ + "PSYCH", + -15.058444023132324 + ], + [ + "▁OPTIM", + -15.05859661102295 + ], + [ + "▁kry", + -15.05875301361084 + ], + [ + "torch", + -15.058797836303713 + ], + [ + "poem", + -15.05893611907959 + ], + [ + "▁Awful", + -15.058940887451172 + ], + [ + "▁0.17", + -15.059232711791992 + ], + [ + "▁CAPE", + -15.059282302856444 + ], + [ + "▁Liguria", + -15.059293746948242 + ], + [ + "preview", + -15.05930233001709 + ], + [ + "construct", + -15.05937671661377 + ], + [ + "▁Shanna", + -15.059430122375488 + ], + [ + "▁OTG", + -15.059481620788574 + ], + [ + "▁Schoenberg", + -15.059572219848633 + ], + [ + "▁Fib", + -15.059584617614746 + ], + [ + "▁tipper", + -15.05960750579834 + ], + [ + "▁juxtapose", + -15.059609413146973 + ], + [ + "Stuck", + -15.059710502624512 + ], + [ + "zzard", + -15.059718132019045 + ], + [ + "Estonia", + -15.05972385406494 + ], + [ + "ibh", + -15.05984592437744 + ], + [ + "▁dsl", + -15.059900283813477 + ], + [ + "oncology", + -15.059943199157717 + ], + [ + "bail", + -15.059988975524902 + ], + [ + "▁Newsroom", + -15.060028076171877 + ], + [ + "▁Kelso", + -15.060141563415527 + ], + [ + "▁Quang", + -15.06025218963623 + ], + [ + "▁PDM", + -15.06027603149414 + ], + [ + "▁Dinge", + -15.060447692871094 + ], + [ + "utf", + -15.060553550720217 + ], + [ + "▁Seche", + -15.06060791015625 + ], + [ + "▁Pollo", + -15.060905456542969 + ], + [ + "▁lambast", + -15.06093692779541 + ], + [ + "▁Retiree", + -15.06106662750244 + ], + [ + "ynne", + -15.061102867126465 + ], + [ + "▁Jacin", + -15.061144828796388 + ], + [ + "▁convo", + -15.0612211227417 + ], + [ + "tacular", + -15.06126594543457 + ], + [ + "▁lear", + -15.06128215789795 + ], + [ + "▁encontr", + -15.061285972595217 + ], + [ + "▁improviser", + -15.061634063720703 + ], + [ + "1.41", + -15.061798095703123 + ], + [ + "ornament", + -15.061824798583984 + ], + [ + "Remix", + -15.061853408813477 + ], + [ + "preet", + -15.0619535446167 + ], + [ + "▁cartoonish", + -15.062121391296388 + ], + [ + "Knee", + -15.062159538269045 + ], + [ + "idze", + -15.062235832214355 + ], + [ + "▁Chola", + -15.06224536895752 + ], + [ + "1310", + -15.06225872039795 + ], + [ + "▁MetaMod", + -15.062341690063477 + ], + [ + "▁Mitchel", + -15.062403678894045 + ], + [ + "▁Cush", + -15.062467575073242 + ], + [ + "▁Starlet", + -15.0625638961792 + ], + [ + "▁USMC", + -15.062664985656738 + ], + [ + "Router", + -15.062833786010742 + ], + [ + ".08%", + -15.06283473968506 + ], + [ + "▁HALO", + -15.06285285949707 + ], + [ + "▁molto", + -15.062854766845703 + ], + [ + "0-216", + -15.062929153442385 + ], + [ + "1885", + -15.06293487548828 + ], + [ + "▁Flourish", + -15.06297206878662 + ], + [ + "▁CARDS", + -15.06300449371338 + ], + [ + "CBP", + -15.063098907470703 + ], + [ + "Rug", + -15.063104629516602 + ], + [ + "eacock", + -15.063414573669434 + ], + [ + "▁relearn", + -15.06358814239502 + ], + [ + "NRF", + -15.063620567321776 + ], + [ + "▁Neva", + -15.063715934753418 + ], + [ + "▁Aliment", + -15.06398868560791 + ], + [ + "▁0844", + -15.06411361694336 + ], + [ + "▁Alcal", + -15.06412124633789 + ], + [ + "▁Kessel", + -15.06417751312256 + ], + [ + "▁14.3", + -15.064210891723633 + ], + [ + "glorifying", + -15.06426239013672 + ], + [ + "▁$4.7", + -15.06450366973877 + ], + [ + "▁Woof", + -15.06451416015625 + ], + [ + "4:14", + -15.06454372406006 + ], + [ + "▁DESIGNER", + -15.064589500427246 + ], + [ + "▁Filippo", + -15.06478786468506 + ], + [ + "▁Spil", + -15.064857482910156 + ], + [ + "1:57", + -15.064871788024902 + ], + [ + "shree", + -15.065025329589844 + ], + [ + "3-14", + -15.0650634765625 + ], + [ + "gruppe", + -15.065065383911133 + ], + [ + "guay", + -15.065160751342772 + ], + [ + "▁Gruen", + -15.065385818481444 + ], + [ + "▁chug", + -15.065435409545898 + ], + [ + "▁Jiao", + -15.065481185913086 + ], + [ + "PEG", + -15.065529823303224 + ], + [ + "striped", + -15.065559387207031 + ], + [ + "▁mongo", + -15.06557273864746 + ], + [ + "LEMENT", + -15.06562614440918 + ], + [ + "%0", + -15.065897941589355 + ], + [ + "▁Petter", + -15.06598663330078 + ], + [ + "niger", + -15.065998077392578 + ], + [ + "OOPS", + -15.066014289855955 + ], + [ + "MLB", + -15.066067695617676 + ], + [ + "2018-06-", + -15.066282272338867 + ], + [ + "▁7700", + -15.06631851196289 + ], + [ + "leap", + -15.066330909729004 + ], + [ + "Institution", + -15.066376686096191 + ], + [ + "actualization", + -15.066376686096191 + ], + [ + "▁Malan", + -15.06637954711914 + ], + [ + "▁EDP", + -15.066442489624023 + ], + [ + "Fitting", + -15.066485404968262 + ], + [ + "wishers", + -15.066624641418455 + ], + [ + "Morph", + -15.066689491271973 + ], + [ + "▁Rhoda", + -15.06678867340088 + ], + [ + "BDA", + -15.06686305999756 + ], + [ + "▁EGR", + -15.066998481750488 + ], + [ + "▁JAW", + -15.067031860351562 + ], + [ + "MSFT", + -15.067069053649902 + ], + [ + "▁misstep", + -15.067145347595217 + ], + [ + "compulsive", + -15.0673246383667 + ], + [ + "anyang", + -15.067338943481444 + ], + [ + "▁bureaucrat", + -15.067353248596191 + ], + [ + "Hazard", + -15.067365646362305 + ], + [ + "Peanut", + -15.067392349243164 + ], + [ + "▁Varna", + -15.06739616394043 + ], + [ + "Surround", + -15.06740951538086 + ], + [ + "Scholar", + -15.067418098449709 + ], + [ + "▁Henna", + -15.067434310913086 + ], + [ + "Neighbour", + -15.06747055053711 + ], + [ + "Collectibles", + -15.067502975463867 + ], + [ + "Precise", + -15.06750774383545 + ], + [ + "fluence", + -15.06750774383545 + ], + [ + "configurable", + -15.067516326904297 + ], + [ + "Natasha", + -15.067519187927246 + ], + [ + "Sixteen", + -15.067520141601562 + ], + [ + "Visible", + -15.067526817321776 + ], + [ + "Quarry", + -15.067527770996094 + ], + [ + "Beverly", + -15.06753158569336 + ], + [ + "Memphis", + -15.067533493041992 + ], + [ + "Jamaica", + -15.067537307739258 + ], + [ + "Weapon", + -15.067538261413574 + ], + [ + "yesterday", + -15.067540168762209 + ], + [ + "wisdom", + -15.06754207611084 + ], + [ + "incredible", + -15.067543983459473 + ], + [ + "stemmed", + -15.067545890808104 + ], + [ + "Robbie", + -15.06755542755127 + ], + [ + "variety", + -15.067561149597168 + ], + [ + "rotating", + -15.067607879638672 + ], + [ + "Olympus", + -15.06763744354248 + ], + [ + "FPO", + -15.067669868469238 + ], + [ + "▁Lyrical", + -15.067681312561035 + ], + [ + "peanut", + -15.067689895629885 + ], + [ + "Reasonable", + -15.067790031433104 + ], + [ + "diagnosed", + -15.067816734313965 + ], + [ + "Artistic", + -15.067852973937988 + ], + [ + "▁Kalu", + -15.067859649658203 + ], + [ + "▁amputated", + -15.067864418029783 + ], + [ + "Ghirardelli", + -15.067865371704102 + ], + [ + "palooza", + -15.067865371704102 + ], + [ + "▁Abdulaziz", + -15.067865371704102 + ], + [ + "▁Ancelotti", + -15.067865371704102 + ], + [ + "▁Antichrist", + -15.067865371704102 + ], + [ + "▁Ballantyne", + -15.067865371704102 + ], + [ + "▁Bowdoin", + -15.067865371704102 + ], + [ + "▁Comprehension", + -15.067865371704102 + ], + [ + "▁Detergent", + -15.067865371704102 + ], + [ + "▁Eldridge", + -15.067865371704102 + ], + [ + "▁Gorbachev", + -15.067865371704102 + ], + [ + "▁Gwyneth", + -15.067865371704102 + ], + [ + "▁Invigorate", + -15.067865371704102 + ], + [ + "▁JACKSON", + -15.067865371704102 + ], + [ + "▁Jawaharlal", + -15.067865371704102 + ], + [ + "▁Jorgensen", + -15.067865371704102 + ], + [ + "▁Kobayashi", + -15.067865371704102 + ], + [ + "▁Narasimha", + -15.067865371704102 + ], + [ + "▁Pritzker", + -15.067865371704102 + ], + [ + "▁Sheboygan", + -15.067865371704102 + ], + [ + "▁Shinjuku", + -15.067865371704102 + ], + [ + "▁Streisand", + -15.067865371704102 + ], + [ + "▁Thoracic", + -15.067865371704102 + ], + [ + "▁apostrophe", + -15.067865371704102 + ], + [ + "▁dehydrogenase", + -15.067865371704102 + ], + [ + "▁impersonating", + -15.067865371704102 + ], + [ + "▁jubilee", + -15.067865371704102 + ], + [ + "▁mercurial", + -15.067865371704102 + ], + [ + "▁periodontist", + -15.067865371704102 + ], + [ + "▁rhinitis", + -15.067865371704102 + ], + [ + "▁traips", + -15.067865371704102 + ], + [ + "▁transliteration", + -15.067865371704102 + ], + [ + "▁unhurried", + -15.067865371704102 + ], + [ + "▁unleaded", + -15.067865371704102 + ], + [ + "▁Kavanagh", + -15.067866325378418 + ], + [ + "▁Newquay", + -15.067866325378418 + ], + [ + "▁arthropod", + -15.067866325378418 + ], + [ + "▁decoupage", + -15.067866325378418 + ], + [ + "▁vinyasa", + -15.067866325378418 + ], + [ + "▁Banshee", + -15.067867279052734 + ], + [ + "▁Convection", + -15.067867279052734 + ], + [ + "▁Embossing", + -15.067867279052734 + ], + [ + "▁prostheses", + -15.067867279052734 + ], + [ + "▁uncaring", + -15.067867279052734 + ], + [ + "▁Spidey", + -15.06786823272705 + ], + [ + "▁aberrant", + -15.06786823272705 + ], + [ + "▁Schrader", + -15.067869186401367 + ], + [ + "▁foibles", + -15.067869186401367 + ], + [ + "▁Conyers", + -15.067870140075684 + ], + [ + "▁Estancia", + -15.067872047424316 + ], + [ + "▁demented", + -15.067872047424316 + ], + [ + "▁libretto", + -15.067872047424316 + ], + [ + "▁AGENCY", + -15.067873001098633 + ], + [ + "▁Sakthi", + -15.067873001098633 + ], + [ + "▁COLOUR", + -15.06787395477295 + ], + [ + "▁Vendetta", + -15.067874908447266 + ], + [ + "▁VoLTE", + -15.067874908447266 + ], + [ + "▁César", + -15.067875862121582 + ], + [ + "▁Felony", + -15.067875862121582 + ], + [ + "▁Fermentation", + -15.067875862121582 + ], + [ + "▁louvre", + -15.067875862121582 + ], + [ + "▁geophysics", + -15.067878723144531 + ], + [ + "▁Mehmet", + -15.067880630493164 + ], + [ + "▁Ammunition", + -15.06788158416748 + ], + [ + "▁Staats", + -15.06788158416748 + ], + [ + "▁cyclocross", + -15.06788158416748 + ], + [ + "▁Transatlantic", + -15.067882537841797 + ], + [ + "▁Yojana", + -15.067886352539062 + ], + [ + "▁neurosurgery", + -15.067886352539062 + ], + [ + "▁Sperry", + -15.067893028259276 + ], + [ + "▁FAITH", + -15.067895889282228 + ], + [ + "▁Shaggy", + -15.06789779663086 + ], + [ + "▁Malpractice", + -15.067898750305176 + ], + [ + "▁foretold", + -15.067904472351074 + ], + [ + "▁Regulated", + -15.067909240722656 + ], + [ + "▁Yisrael", + -15.067912101745604 + ], + [ + "▁Reconstructive", + -15.067914009094238 + ], + [ + "▁Sistine", + -15.067914009094238 + ], + [ + "▁skatepark", + -15.067916870117188 + ], + [ + "▁godfather", + -15.067919731140137 + ], + [ + "▁jutting", + -15.06792163848877 + ], + [ + "▁CENTRE", + -15.067925453186035 + ], + [ + "▁(1968)", + -15.06793212890625 + ], + [ + "▁Alonzo", + -15.067944526672363 + ], + [ + "▁clamour", + -15.067944526672363 + ], + [ + "▁disengagement", + -15.067955017089844 + ], + [ + "▁radii", + -15.067956924438477 + ], + [ + "▁Akram", + -15.067967414855955 + ], + [ + "▁concord", + -15.06796932220459 + ], + [ + "▁Orphanage", + -15.067992210388184 + ], + [ + "▁Cortina", + -15.06799602508545 + ], + [ + "▁undemocratic", + -15.067999839782717 + ], + [ + "▁Bhadrak", + -15.068000793457031 + ], + [ + "infant", + -15.068008422851562 + ], + [ + "▁stratton", + -15.068023681640623 + ], + [ + "▁Xiaolong", + -15.068025588989258 + ], + [ + "▁Guava", + -15.068026542663574 + ], + [ + "▁Shivaji", + -15.068034172058104 + ], + [ + "▁submenu", + -15.068034172058104 + ], + [ + "▁catheterization", + -15.068039894104004 + ], + [ + "▁début", + -15.068045616149902 + ], + [ + "▁Eloise", + -15.068050384521484 + ], + [ + "▁salivating", + -15.06805419921875 + ], + [ + "▁legibility", + -15.06806468963623 + ], + [ + "▁Shuster", + -15.068070411682127 + ], + [ + "Proposed", + -15.068074226379396 + ], + [ + "▁Chicano", + -15.06808853149414 + ], + [ + "imbi", + -15.06809425354004 + ], + [ + "▁Sultanate", + -15.068095207214355 + ], + [ + "▁puppetry", + -15.068097114562988 + ], + [ + "▁reprogramming", + -15.068102836608888 + ], + [ + "▁Inslee", + -15.068119049072266 + ], + [ + "▁Vassar", + -15.068126678466797 + ], + [ + "▁tangling", + -15.068135261535645 + ], + [ + "▁Spitzer", + -15.068143844604492 + ], + [ + "▁Dominant", + -15.06814670562744 + ], + [ + "▁GetResponse", + -15.068169593811035 + ], + [ + "▁evangelize", + -15.06817626953125 + ], + [ + "▁torpedoes", + -15.068180084228516 + ], + [ + "▁Ravenna", + -15.068209648132324 + ], + [ + "smartphone", + -15.06821346282959 + ], + [ + "▁trinket", + -15.068263053894045 + ], + [ + "CONCLUSIONS", + -15.068265914916992 + ], + [ + "▁Provincetown", + -15.068266868591309 + ], + [ + "▁judiciously", + -15.06829071044922 + ], + [ + "▁Ylang", + -15.068319320678713 + ], + [ + "Prioritize", + -15.068363189697266 + ], + [ + "▁Hirst", + -15.068365097045898 + ], + [ + "9:14", + -15.068403244018556 + ], + [ + "Disabled", + -15.068405151367188 + ], + [ + "▁silversmith", + -15.068405151367188 + ], + [ + "▁Guiana", + -15.068419456481934 + ], + [ + "▁RIPE", + -15.068419456481934 + ], + [ + "▁Marymount", + -15.068424224853516 + ], + [ + "IMPLY", + -15.068432807922363 + ], + [ + "▁impounded", + -15.068456649780272 + ], + [ + "MySQL", + -15.068461418151855 + ], + [ + "▁FRANCE", + -15.068463325500488 + ], + [ + "HND", + -15.068469047546388 + ], + [ + "▁percutaneous", + -15.068538665771484 + ], + [ + "▁EMD", + -15.068564414978027 + ], + [ + "▁CNAME", + -15.068589210510254 + ], + [ + "▁brainwave", + -15.068625450134276 + ], + [ + "5:14", + -15.06864070892334 + ], + [ + "triggered", + -15.068642616271973 + ], + [ + "rugby", + -15.068647384643556 + ], + [ + "▁Vasu", + -15.068649291992188 + ], + [ + "▁Kuching", + -15.068717956542969 + ], + [ + "▁varnishes", + -15.068770408630373 + ], + [ + "▁Chesterton", + -15.068793296813965 + ], + [ + "▁KONG", + -15.068838119506836 + ], + [ + "▁Anakin", + -15.068862915039062 + ], + [ + "▁firestorm", + -15.06886386871338 + ], + [ + "▁pounced", + -15.06889533996582 + ], + [ + "▁faltered", + -15.06890869140625 + ], + [ + "▁Skyler", + -15.068921089172363 + ], + [ + "EFE", + -15.068963050842283 + ], + [ + "▁Poway", + -15.068981170654297 + ], + [ + "▁trawling", + -15.069005012512209 + ], + [ + "▁Renewed", + -15.069031715393066 + ], + [ + "KEEP", + -15.069053649902344 + ], + [ + "▁07:5", + -15.06907844543457 + ], + [ + "▁refried", + -15.069082260131836 + ], + [ + "▁Hoyer", + -15.069092750549316 + ], + [ + "▁Inscription", + -15.069096565246582 + ], + [ + "▁epitomized", + -15.069162368774414 + ], + [ + "▁25-27", + -15.06916332244873 + ], + [ + "▁Sucker", + -15.069178581237791 + ], + [ + "osmos", + -15.069184303283691 + ], + [ + "▁Bargaining", + -15.069242477416992 + ], + [ + "Freebie", + -15.069302558898926 + ], + [ + "▁Leaky", + -15.069327354431152 + ], + [ + "Diseno", + -15.069402694702148 + ], + [ + "▁hoverboard", + -15.069429397583008 + ], + [ + "▁recaptured", + -15.069430351257324 + ], + [ + "Loughlin", + -15.069506645202637 + ], + [ + "▁Cosme", + -15.06953239440918 + ], + [ + "1.0.0", + -15.069554328918455 + ], + [ + "▁Marrow", + -15.069581985473633 + ], + [ + "▁covertly", + -15.069602012634276 + ], + [ + "▁Kyung", + -15.069602966308594 + ], + [ + "negating", + -15.069643020629885 + ], + [ + "Moonlight", + -15.06965446472168 + ], + [ + "Contributed", + -15.069661140441896 + ], + [ + "▁fruitiness", + -15.069690704345703 + ], + [ + "▁Williamstown", + -15.0697660446167 + ], + [ + "▁Solent", + -15.069814682006836 + ], + [ + "anovic", + -15.069995880126951 + ], + [ + "INTEG", + -15.070076942443848 + ], + [ + "▁superficially", + -15.070087432861328 + ], + [ + "▁Payson", + -15.070113182067873 + ], + [ + "▁Eldon", + -15.070161819458008 + ], + [ + "▁Kennett", + -15.07016372680664 + ], + [ + "éré", + -15.070195198059082 + ], + [ + "▁physiologically", + -15.070219039916992 + ], + [ + "▁Doggie", + -15.070300102233888 + ], + [ + "▁Grew", + -15.070466995239258 + ], + [ + "LOM", + -15.070515632629396 + ], + [ + "promo", + -15.070516586303713 + ], + [ + "OLET", + -15.07053565979004 + ], + [ + "▁03:4", + -15.070551872253418 + ], + [ + "haq", + -15.070680618286133 + ], + [ + "▁Ibero", + -15.07069969177246 + ], + [ + "▁priestess", + -15.070708274841309 + ], + [ + "Kernel", + -15.070727348327637 + ], + [ + "contextual", + -15.07077693939209 + ], + [ + "▁Metra", + -15.07082462310791 + ], + [ + "underground", + -15.070923805236816 + ], + [ + "▁Mami", + -15.070945739746094 + ], + [ + "▁Aldrin", + -15.07104778289795 + ], + [ + "▁Deniz", + -15.07107925415039 + ], + [ + "▁DNF", + -15.071080207824709 + ], + [ + "Anita", + -15.071114540100098 + ], + [ + "1122", + -15.071280479431152 + ], + [ + "PMCID", + -15.071463584899902 + ], + [ + "chul", + -15.071475982666016 + ], + [ + "▁Originat", + -15.071535110473633 + ], + [ + "analog", + -15.071568489074709 + ], + [ + "davidson", + -15.071572303771973 + ], + [ + "▁Duda", + -15.071796417236328 + ], + [ + "▁Huay", + -15.07183074951172 + ], + [ + "▁seismo", + -15.0718412399292 + ], + [ + "▁morn", + -15.0719575881958 + ], + [ + "Pharm", + -15.072163581848145 + ], + [ + "0.26", + -15.072183609008787 + ], + [ + "freund", + -15.072274208068848 + ], + [ + "COTS", + -15.07229232788086 + ], + [ + "kriti", + -15.072416305541992 + ], + [ + "epson", + -15.072524070739746 + ], + [ + "▁Culp", + -15.072668075561523 + ], + [ + "1.09", + -15.07273006439209 + ], + [ + "JES", + -15.07282829284668 + ], + [ + "acki", + -15.072864532470703 + ], + [ + "TTT", + -15.072908401489258 + ], + [ + "▁Holling", + -15.073006629943848 + ], + [ + "▁$599", + -15.07302188873291 + ], + [ + "rinda", + -15.073230743408203 + ], + [ + "▁EEO", + -15.073254585266112 + ], + [ + "Disconnect", + -15.073393821716309 + ], + [ + "Connex", + -15.073508262634276 + ], + [ + "raving", + -15.073637962341309 + ], + [ + "serif", + -15.073809623718262 + ], + [ + "phagi", + -15.073841094970703 + ], + [ + "▁groundcover", + -15.073848724365234 + ], + [ + "pocalypse", + -15.0740966796875 + ], + [ + "ELP", + -15.074175834655762 + ], + [ + "POC", + -15.07422924041748 + ], + [ + "Dustin", + -15.074262619018556 + ], + [ + "▁Camus", + -15.074397087097168 + ], + [ + "▁Huan", + -15.074419975280762 + ], + [ + "Retain", + -15.074509620666504 + ], + [ + "ати", + -15.07453155517578 + ], + [ + "она", + -15.07468605041504 + ], + [ + "▁Bhav", + -15.07468605041504 + ], + [ + "phonia", + -15.074714660644531 + ], + [ + "urnal", + -15.074736595153809 + ], + [ + "▁Lingua", + -15.0747652053833 + ], + [ + "▁cré", + -15.07486629486084 + ], + [ + "Manu", + -15.074987411499023 + ], + [ + "▁foretell", + -15.075156211853027 + ], + [ + "11-12", + -15.0753755569458 + ], + [ + "▁Prank", + -15.075443267822266 + ], + [ + "▁Trex", + -15.075474739074709 + ], + [ + "▁Treb", + -15.075526237487791 + ], + [ + "▁Gluco", + -15.075532913208008 + ], + [ + "▁Spud", + -15.075584411621094 + ], + [ + "▁SMF", + -15.075756072998049 + ], + [ + "nication", + -15.07579517364502 + ], + [ + "▁Stamper", + -15.07580280303955 + ], + [ + "Env", + -15.075873374938965 + ], + [ + "Reform", + -15.076047897338867 + ], + [ + "▁deselect", + -15.0760498046875 + ], + [ + "universe", + -15.076090812683104 + ], + [ + "1906", + -15.076165199279783 + ], + [ + "▁Smartwatch", + -15.076228141784668 + ], + [ + "▁Juke", + -15.076229095458984 + ], + [ + "AGED", + -15.076461791992188 + ], + [ + "▁Rausch", + -15.076508522033691 + ], + [ + "larg", + -15.076513290405272 + ], + [ + "judgment", + -15.076621055603027 + ], + [ + "▁17.7", + -15.076848983764648 + ], + [ + "ylparaben", + -15.076956748962402 + ], + [ + "▁Efes", + -15.076961517333984 + ], + [ + "▁Communi", + -15.077049255371094 + ], + [ + "groutable", + -15.077113151550291 + ], + [ + "ixx", + -15.077116012573242 + ], + [ + "▁GSX", + -15.077131271362305 + ], + [ + "▁Solver", + -15.077205657958984 + ], + [ + "▁Womb", + -15.077277183532717 + ], + [ + "▁Tyco", + -15.077391624450684 + ], + [ + "estan", + -15.077454566955566 + ], + [ + "▁Tonka", + -15.077472686767578 + ], + [ + "▁Cosy", + -15.077635765075684 + ], + [ + "dijk", + -15.077704429626465 + ], + [ + "gabe", + -15.077800750732422 + ], + [ + "▁Marjor", + -15.077866554260254 + ], + [ + "mistake", + -15.077899932861328 + ], + [ + "▁Gigant", + -15.077947616577148 + ], + [ + "6,400", + -15.078112602233888 + ], + [ + "HALL", + -15.078176498413086 + ], + [ + "élé", + -15.078282356262209 + ], + [ + "▁Shwe", + -15.078402519226074 + ], + [ + "Porn", + -15.078483581542969 + ], + [ + "entangle", + -15.078487396240234 + ], + [ + "▁mutt", + -15.078594207763672 + ], + [ + "Huang", + -15.078619956970217 + ], + [ + "zane", + -15.078621864318848 + ], + [ + "relaying", + -15.078661918640137 + ], + [ + "cubic", + -15.078670501708984 + ], + [ + "Batch", + -15.079134941101074 + ], + [ + "1928", + -15.079201698303224 + ], + [ + "▁Devan", + -15.079285621643066 + ], + [ + "▁Circul", + -15.079422950744627 + ], + [ + "▁Continua", + -15.079480171203612 + ], + [ + "тан", + -15.079548835754396 + ], + [ + "jordan", + -15.07960319519043 + ], + [ + "Duplicate", + -15.07981300354004 + ], + [ + "Restricted", + -15.079818725585938 + ], + [ + "IMPACT", + -15.079834938049316 + ], + [ + "INCLUDING", + -15.07985496520996 + ], + [ + "Collision", + -15.07985782623291 + ], + [ + "Inspiring", + -15.079863548278809 + ], + [ + "Wolves", + -15.07986545562744 + ], + [ + "mechanics", + -15.07986545562744 + ], + [ + "defendant", + -15.07986831665039 + ], + [ + "sacrifice", + -15.07986831665039 + ], + [ + "Garcinia", + -15.079870223999023 + ], + [ + "Patterson", + -15.07987117767334 + ], + [ + "Worcester", + -15.079872131347656 + ], + [ + "tournament", + -15.079872131347656 + ], + [ + "Raspberry", + -15.079874992370604 + ], + [ + "Compensation", + -15.079875946044922 + ], + [ + "Champagne", + -15.079876899719238 + ], + [ + "Tanzania", + -15.079876899719238 + ], + [ + "Krishna", + -15.079883575439451 + ], + [ + "governance", + -15.079883575439451 + ], + [ + "Belgian", + -15.079886436462402 + ], + [ + "communities", + -15.07988739013672 + ], + [ + "DEE", + -15.079890251159668 + ], + [ + "Mozilla", + -15.07990264892578 + ], + [ + "canadian", + -15.079906463623049 + ], + [ + "Dehydration", + -15.079913139343262 + ], + [ + "Mitochondria", + -15.079913139343262 + ], + [ + "Rejuvenate", + -15.079913139343262 + ], + [ + "Shabaab", + -15.079913139343262 + ], + [ + "elizabeth", + -15.079913139343262 + ], + [ + "ishvili", + -15.079913139343262 + ], + [ + "priligy", + -15.079913139343262 + ], + [ + "schneider", + -15.079913139343262 + ], + [ + "▁888-282-0476", + -15.079913139343262 + ], + [ + "▁ACTUALLY", + -15.079913139343262 + ], + [ + "▁Alnwick", + -15.079913139343262 + ], + [ + "▁Bloglovin", + -15.079913139343262 + ], + [ + "▁EEPROM", + -15.079913139343262 + ], + [ + "▁Jalapeno", + -15.079913139343262 + ], + [ + "▁Kinshasa", + -15.079913139343262 + ], + [ + "▁MAGAZINE", + -15.079913139343262 + ], + [ + "▁Mycobacterium", + -15.079913139343262 + ], + [ + "▁OpenStreetMap", + -15.079913139343262 + ], + [ + "▁Sedgwick", + -15.079913139343262 + ], + [ + "▁Stanislaus", + -15.079913139343262 + ], + [ + "▁Titicaca", + -15.079913139343262 + ], + [ + "▁Travertine", + -15.079913139343262 + ], + [ + "▁Tuskegee", + -15.079913139343262 + ], + [ + "▁Walthamstow", + -15.079913139343262 + ], + [ + "▁Waukegan", + -15.079913139343262 + ], + [ + "▁Ypsilanti", + -15.079913139343262 + ], + [ + "▁constancy", + -15.079913139343262 + ], + [ + "▁displeased", + -15.079913139343262 + ], + [ + "▁drudgery", + -15.079913139343262 + ], + [ + "▁epidemiologist", + -15.079913139343262 + ], + [ + "▁galvanizing", + -15.079913139343262 + ], + [ + "▁ginormous", + -15.079913139343262 + ], + [ + "▁incineration", + -15.079913139343262 + ], + [ + "▁mesenchymal", + -15.079913139343262 + ], + [ + "▁mollusc", + -15.079913139343262 + ], + [ + "▁paucity", + -15.079913139343262 + ], + [ + "▁subsidizing", + -15.079913139343262 + ], + [ + "▁survivability", + -15.079913139343262 + ], + [ + "▁synchronisation", + -15.079913139343262 + ], + [ + "▁tendinitis", + -15.079913139343262 + ], + [ + "▁tripartite", + -15.079913139343262 + ], + [ + "▁unceasing", + -15.079913139343262 + ], + [ + "▁vagaries", + -15.079913139343262 + ], + [ + "▁Catamaran", + -15.079914093017578 + ], + [ + "▁Wausau", + -15.079914093017578 + ], + [ + "▁archeology", + -15.079914093017578 + ], + [ + "▁deluded", + -15.079914093017578 + ], + [ + "▁Jezebel", + -15.079915046691896 + ], + [ + "▁Vandenberg", + -15.079915046691896 + ], + [ + "▁cabriole", + -15.079915046691896 + ], + [ + "▁emigrants", + -15.079915046691896 + ], + [ + "▁leukaemia", + -15.079915046691896 + ], + [ + "▁Andalucia", + -15.079916000366213 + ], + [ + "▁Subramani", + -15.079916000366213 + ], + [ + "▁bracts", + -15.079916000366213 + ], + [ + "▁swoosh", + -15.079916000366213 + ], + [ + "▁Amnesia", + -15.079917907714844 + ], + [ + "▁inoperable", + -15.079917907714844 + ], + [ + "▁straddling", + -15.079917907714844 + ], + [ + "▁Sorento", + -15.07991886138916 + ], + [ + "▁Yeshiva", + -15.079919815063477 + ], + [ + "▁neuroimaging", + -15.079919815063477 + ], + [ + "▁Bottega", + -15.079920768737791 + ], + [ + "▁discontinuing", + -15.079920768737791 + ], + [ + "▁hankering", + -15.079920768737791 + ], + [ + "meditation", + -15.07992172241211 + ], + [ + "▁primrose", + -15.079922676086426 + ], + [ + "jessica", + -15.079923629760742 + ], + [ + "poetry", + -15.079923629760742 + ], + [ + "▁bocce", + -15.079923629760742 + ], + [ + "▁energising", + -15.07992458343506 + ], + [ + "▁plenum", + -15.07992458343506 + ], + [ + "▁Hornsby", + -15.079925537109377 + ], + [ + "▁fluoxetine", + -15.079926490783691 + ], + [ + "▁scrutinizing", + -15.079926490783691 + ], + [ + "▁Scaffolding", + -15.079928398132324 + ], + [ + "▁collating", + -15.079928398132324 + ], + [ + "▁Compostela", + -15.07992935180664 + ], + [ + "▁EXACT", + -15.07992935180664 + ], + [ + "▁Rotunda", + -15.07992935180664 + ], + [ + "▁Annandale", + -15.079930305480955 + ], + [ + "wurst", + -15.07993221282959 + ], + [ + "▁HbA", + -15.07993221282959 + ], + [ + "▁Musgrave", + -15.079934120178224 + ], + [ + "Café", + -15.079936027526855 + ], + [ + "▁Aupair", + -15.079938888549805 + ], + [ + "phpBB", + -15.079941749572754 + ], + [ + "▁toppling", + -15.079941749572754 + ], + [ + "▁ALLOWED", + -15.07994270324707 + ], + [ + "Planned", + -15.07994556427002 + ], + [ + "▁nitride", + -15.07994556427002 + ], + [ + "▁Shockwave", + -15.079950332641602 + ], + [ + "settled", + -15.0799560546875 + ], + [ + "▁foreshore", + -15.0799560546875 + ], + [ + "▁soviet", + -15.0799560546875 + ], + [ + "▁README", + -15.079957008361816 + ], + [ + "▁Rendell", + -15.079957962036133 + ], + [ + "▁hesitating", + -15.07995891571045 + ], + [ + "▁Magyar", + -15.079960823059082 + ], + [ + "▁Duarte", + -15.079963684082031 + ], + [ + "▁ribbing", + -15.07996654510498 + ], + [ + "insulin", + -15.079968452453612 + ], + [ + "▁LEARNING", + -15.07997226715088 + ], + [ + "▁vexing", + -15.079975128173828 + ], + [ + "▁Titanfall", + -15.079976081848145 + ], + [ + "▁Feliz", + -15.07997703552246 + ], + [ + "▁immunoassay", + -15.079983711242676 + ], + [ + "▁bilge", + -15.07999038696289 + ], + [ + "▁Breaux", + -15.079992294311523 + ], + [ + "▁moines", + -15.080002784729004 + ], + [ + "▁Haupt", + -15.080004692077637 + ], + [ + "Evergreen", + -15.080007553100586 + ], + [ + "▁(10%)", + -15.080013275146484 + ], + [ + "▁auburn", + -15.080016136169434 + ], + [ + "martial", + -15.080020904541016 + ], + [ + "LION", + -15.08003044128418 + ], + [ + "▁Nonlinear", + -15.080034255981444 + ], + [ + "▁Antitrust", + -15.080035209655762 + ], + [ + "▁Pippa", + -15.080037117004396 + ], + [ + "▁tuple", + -15.080039024353027 + ], + [ + "Acne", + -15.08004093170166 + ], + [ + "▁Noblesville", + -15.080041885375977 + ], + [ + "▁polluters", + -15.080041885375977 + ], + [ + "▁Harriman", + -15.080056190490724 + ], + [ + "▁resettled", + -15.080056190490724 + ], + [ + "▁Aharon", + -15.080058097839355 + ], + [ + "▁WANTED", + -15.080066680908203 + ], + [ + "▁Candlestick", + -15.080070495605469 + ], + [ + "▁Rightmove", + -15.080071449279783 + ], + [ + "▁Ardmore", + -15.080076217651367 + ], + [ + "▁exploitative", + -15.080080032348633 + ], + [ + "▁lewd", + -15.080089569091797 + ], + [ + "▁HOLY", + -15.08009147644043 + ], + [ + "▁ELEMENT", + -15.08010482788086 + ], + [ + "Egyptian", + -15.080122947692873 + ], + [ + "▁Royston", + -15.08012580871582 + ], + [ + "▁infantile", + -15.080141067504885 + ], + [ + "▁stork", + -15.080142974853516 + ], + [ + "▁datememe", + -15.080150604248049 + ], + [ + "▁Unesco", + -15.080155372619627 + ], + [ + "▁Heyward", + -15.08016300201416 + ], + [ + "lagged", + -15.080164909362791 + ], + [ + "▁Cleland", + -15.08016872406006 + ], + [ + "▁MariaDB", + -15.08017921447754 + ], + [ + "tipping", + -15.080180168151855 + ], + [ + "▁protege", + -15.080193519592283 + ], + [ + "▁ZOOM", + -15.080204010009766 + ], + [ + "cumulative", + -15.080212593078612 + ], + [ + "▁Corby", + -15.080223083496094 + ], + [ + "▁middling", + -15.080239295959473 + ], + [ + "▁buckthorn", + -15.080245018005373 + ], + [ + "▁undeserved", + -15.080252647399902 + ], + [ + "▁DISCLAIMS", + -15.080265998840332 + ], + [ + "▁McComb", + -15.080272674560549 + ], + [ + "▁Leftovers", + -15.080275535583496 + ], + [ + "▁resourcing", + -15.080281257629396 + ], + [ + "Wicked", + -15.080291748046877 + ], + [ + "▁Wilmer", + -15.080310821533203 + ], + [ + "▁Gambit", + -15.080316543579102 + ], + [ + "▁CRV", + -15.080340385437012 + ], + [ + "▁demography", + -15.080366134643556 + ], + [ + "▁Maclean", + -15.080382347106934 + ], + [ + "▁chomping", + -15.080391883850098 + ], + [ + "▁squee", + -15.08039665222168 + ], + [ + "translate", + -15.08041286468506 + ], + [ + "▁unhide", + -15.080438613891602 + ], + [ + "▁breadcrumb", + -15.080467224121094 + ], + [ + "▁Brecht", + -15.080480575561523 + ], + [ + "▁Trough", + -15.080487251281738 + ], + [ + "▁Physiol", + -15.080495834350586 + ], + [ + "rimming", + -15.080503463745115 + ], + [ + "▁Horizonte", + -15.080521583557127 + ], + [ + "▁disinterest", + -15.080527305603027 + ], + [ + "▁incinerate", + -15.08052921295166 + ], + [ + "▁Marconi", + -15.080538749694824 + ], + [ + "NHL", + -15.080540657043455 + ], + [ + "▁Renato", + -15.080552101135254 + ], + [ + "▁Perlman", + -15.080554008483888 + ], + [ + "▁Alcoa", + -15.080559730529783 + ], + [ + "▁Dominik", + -15.080565452575684 + ], + [ + "▁Kenai", + -15.080589294433594 + ], + [ + "▁Sportswear", + -15.080608367919922 + ], + [ + "▁Nautica", + -15.0806303024292 + ], + [ + "▁pickpocket", + -15.080639839172363 + ], + [ + "▁Linwood", + -15.080646514892578 + ], + [ + "▁DKNY", + -15.080649375915527 + ], + [ + "▁legume", + -15.080656051635742 + ], + [ + "▁Yaris", + -15.080680847167969 + ], + [ + "Arrived", + -15.080681800842283 + ], + [ + "leef", + -15.08073902130127 + ], + [ + "▁Lehmann", + -15.080748558044434 + ], + [ + "▁Kaufmann", + -15.080758094787598 + ], + [ + "▁Headband", + -15.080775260925291 + ], + [ + "CHIN", + -15.080791473388672 + ], + [ + "FEMA", + -15.08082389831543 + ], + [ + "receiver", + -15.080832481384276 + ], + [ + "1947", + -15.080841064453123 + ], + [ + "▁Wipro", + -15.080857276916504 + ], + [ + "▁Rhizo", + -15.080877304077148 + ], + [ + "▁bandleader", + -15.080877304077148 + ], + [ + "Collector", + -15.080900192260742 + ], + [ + "▁magpie", + -15.080907821655272 + ], + [ + "WAM", + -15.080921173095703 + ], + [ + "▁Metcalfe", + -15.080938339233398 + ], + [ + "▁Whin", + -15.080968856811523 + ], + [ + "▁physic", + -15.080992698669434 + ], + [ + "aditya", + -15.080998420715332 + ], + [ + "Gathering", + -15.081027030944824 + ], + [ + "▁holdbacks", + -15.081034660339355 + ], + [ + "▁Blackhawk", + -15.08104419708252 + ], + [ + "fencing", + -15.08109188079834 + ], + [ + "▁Symon", + -15.08110809326172 + ], + [ + "▁childbearing", + -15.081134796142578 + ], + [ + "obium", + -15.081154823303224 + ], + [ + "▁Foles", + -15.081218719482422 + ], + [ + "▁Mixture", + -15.081254959106444 + ], + [ + "morbidities", + -15.081258773803713 + ], + [ + "Framework", + -15.08131504058838 + ], + [ + "▁19-21", + -15.08131980895996 + ], + [ + "varying", + -15.08138370513916 + ], + [ + "Awarded", + -15.081408500671388 + ], + [ + "▁reversion", + -15.081438064575195 + ], + [ + "BLOCK", + -15.08145809173584 + ], + [ + "Wilhelmina", + -15.08148956298828 + ], + [ + "▁bodybuilder", + -15.081541061401367 + ], + [ + "▁OpenID", + -15.081620216369627 + ], + [ + "▁Andrade", + -15.081668853759766 + ], + [ + "pupil", + -15.081719398498535 + ], + [ + "8266", + -15.081789016723633 + ], + [ + "▁HILLS", + -15.081892013549805 + ], + [ + "EYE", + -15.081931114196776 + ], + [ + "▁carbine", + -15.081933975219728 + ], + [ + "$250", + -15.081989288330078 + ], + [ + "▁sniffed", + -15.081997871398926 + ], + [ + "▁tartare", + -15.082023620605469 + ], + [ + "▁Dalian", + -15.082027435302734 + ], + [ + "3:28", + -15.082093238830566 + ], + [ + "▁primo", + -15.082108497619627 + ], + [ + "▁unlearn", + -15.082128524780272 + ], + [ + "Preserve", + -15.08216381072998 + ], + [ + "Assessing", + -15.08220672607422 + ], + [ + "▁Filo", + -15.082257270812988 + ], + [ + "Hurray", + -15.082343101501465 + ], + [ + "▁Sooke", + -15.08243179321289 + ], + [ + "▁squeaking", + -15.082559585571287 + ], + [ + "Allahu", + -15.082606315612791 + ], + [ + "mitar", + -15.08262062072754 + ], + [ + "▁Murali", + -15.082736015319824 + ], + [ + "Carrier", + -15.082836151123049 + ], + [ + "▁Tantra", + -15.082874298095703 + ], + [ + "▁Yehuda", + -15.082921981811523 + ], + [ + "MIE", + -15.08305835723877 + ], + [ + "▁vai", + -15.08310604095459 + ], + [ + "ufo", + -15.083117485046388 + ], + [ + "addish", + -15.083196640014648 + ], + [ + "▁reformist", + -15.083223342895508 + ], + [ + "Billion", + -15.08322811126709 + ], + [ + "MFS", + -15.083271026611328 + ], + [ + "▁($14", + -15.083293914794922 + ], + [ + "exotic", + -15.083531379699709 + ], + [ + "HUNG", + -15.08383846282959 + ], + [ + "7:25", + -15.083842277526855 + ], + [ + "HOM", + -15.08393383026123 + ], + [ + "▁Dulci", + -15.08393669128418 + ], + [ + "▁$65,000", + -15.083985328674316 + ], + [ + "attract", + -15.084035873413086 + ], + [ + "▁MOBI", + -15.084095001220703 + ], + [ + "▁Linkage", + -15.084288597106934 + ], + [ + "ippmann", + -15.084304809570312 + ], + [ + "▁Henrique", + -15.08450412750244 + ], + [ + "▁21:1", + -15.084549903869627 + ], + [ + "LEGO", + -15.084650993347168 + ], + [ + "Compound", + -15.0846529006958 + ], + [ + "EMCO", + -15.08470344543457 + ], + [ + "1105", + -15.084930419921877 + ], + [ + "significantly", + -15.08499813079834 + ], + [ + "еро", + -15.085001945495604 + ], + [ + "Analog", + -15.085159301757812 + ], + [ + "▁Glaz", + -15.085227012634276 + ], + [ + "▁Mahdi", + -15.085467338562012 + ], + [ + "▁radiographic", + -15.085474014282228 + ], + [ + "Regulate", + -15.0855131149292 + ], + [ + "▁Daze", + -15.085554122924805 + ], + [ + "MVP", + -15.085663795471191 + ], + [ + "▁Sangh", + -15.085869789123535 + ], + [ + "fauna", + -15.085997581481934 + ], + [ + "SML", + -15.086189270019531 + ], + [ + "▁Gonzo", + -15.086396217346191 + ], + [ + "lucia", + -15.087027549743652 + ], + [ + "entries", + -15.087275505065918 + ], + [ + "petal", + -15.087413787841797 + ], + [ + "unker", + -15.08743953704834 + ], + [ + "8.30", + -15.087477684020996 + ], + [ + "▁Burnet", + -15.087483406066896 + ], + [ + "▁843-", + -15.087528228759766 + ], + [ + "▁McCourt", + -15.087581634521484 + ], + [ + "derick", + -15.087584495544434 + ], + [ + "Chew", + -15.087675094604492 + ], + [ + "ата", + -15.087721824645996 + ], + [ + "▁MICE", + -15.087943077087402 + ], + [ + "PAF", + -15.08797836303711 + ], + [ + "Override", + -15.088051795959473 + ], + [ + "exception", + -15.088125228881836 + ], + [ + "exhibit", + -15.088172912597656 + ], + [ + "▁Trina", + -15.088340759277344 + ], + [ + "iscus", + -15.08844757080078 + ], + [ + "lasse", + -15.088629722595217 + ], + [ + "BPL", + -15.088756561279297 + ], + [ + "TLC", + -15.088770866394045 + ], + [ + "geographic", + -15.088894844055176 + ], + [ + "sufficiently", + -15.088909149169922 + ], + [ + "▁Gadd", + -15.08898639678955 + ], + [ + "quero", + -15.089041709899902 + ], + [ + "▁Jaca", + -15.08917999267578 + ], + [ + "1542", + -15.08921718597412 + ], + [ + "▁Kahan", + -15.089219093322754 + ], + [ + "14-7", + -15.089289665222168 + ], + [ + "▁Educa", + -15.089329719543455 + ], + [ + "▁Cabal", + -15.089414596557615 + ], + [ + "lijke", + -15.089431762695312 + ], + [ + "▁presuppose", + -15.089473724365234 + ], + [ + "▁masterplan", + -15.089601516723633 + ], + [ + "isano", + -15.08962345123291 + ], + [ + "NISON", + -15.089681625366213 + ], + [ + "skeleton", + -15.089702606201172 + ], + [ + "capitalist", + -15.08971881866455 + ], + [ + "WIRED", + -15.089930534362791 + ], + [ + "EKO", + -15.090121269226074 + ], + [ + "▁musket", + -15.090167045593262 + ], + [ + "▁Norco", + -15.090188026428224 + ], + [ + "3:05", + -15.09021282196045 + ], + [ + "▁Unger", + -15.090312004089355 + ], + [ + "▁Envi", + -15.090463638305664 + ], + [ + "eguro", + -15.090547561645508 + ], + [ + "▁Inspir", + -15.090721130371094 + ], + [ + "behavior", + -15.090742111206056 + ], + [ + "Byron", + -15.090798377990724 + ], + [ + "▁bopp", + -15.090819358825684 + ], + [ + "penta", + -15.090840339660645 + ], + [ + "▁Langston", + -15.090867042541504 + ], + [ + "ARIS", + -15.090975761413574 + ], + [ + "OLDER", + -15.091243743896484 + ], + [ + "▁medicament", + -15.09153938293457 + ], + [ + "Shameless", + -15.091562271118164 + ], + [ + "Kerr", + -15.091614723205566 + ], + [ + "UCH", + -15.091714859008787 + ], + [ + "▁misprint", + -15.091747283935549 + ], + [ + "transitive", + -15.091761589050291 + ], + [ + "Append", + -15.091779708862305 + ], + [ + "00-0", + -15.09178066253662 + ], + [ + "populate", + -15.091815948486328 + ], + [ + "Composi", + -15.091861724853516 + ], + [ + "Crawl", + -15.091899871826172 + ], + [ + "LDPE", + -15.09193515777588 + ], + [ + "FLU", + -15.09199333190918 + ], + [ + "hiking", + -15.092019081115724 + ], + [ + "DEFAULT", + -15.092108726501465 + ], + [ + "▁Aurangabad", + -15.092108726501465 + ], + [ + "▁Basquiat", + -15.092108726501465 + ], + [ + "▁Caballero", + -15.092108726501465 + ], + [ + "▁Dunfermline", + -15.092108726501465 + ], + [ + "▁Espinosa", + -15.092108726501465 + ], + [ + "▁Griezmann", + -15.092108726501465 + ], + [ + "▁LLumar", + -15.092108726501465 + ], + [ + "▁Margherita", + -15.092108726501465 + ], + [ + "▁Pellegrini", + -15.092108726501465 + ], + [ + "▁Recurring", + -15.092108726501465 + ], + [ + "▁Ridgway", + -15.092108726501465 + ], + [ + "▁Sondheim", + -15.092108726501465 + ], + [ + "▁Speyside", + -15.092108726501465 + ], + [ + "▁aflatoxin", + -15.092108726501465 + ], + [ + "▁anaphylaxis", + -15.092108726501465 + ], + [ + "▁bromide", + -15.092108726501465 + ], + [ + "▁candelabra", + -15.092108726501465 + ], + [ + "▁destabilizing", + -15.092108726501465 + ], + [ + "▁disfigured", + -15.092108726501465 + ], + [ + "▁eMarketer", + -15.092108726501465 + ], + [ + "▁extenuating", + -15.092108726501465 + ], + [ + "▁harmonizing", + -15.092108726501465 + ], + [ + "▁inadequacies", + -15.092108726501465 + ], + [ + "▁miscalculation", + -15.092108726501465 + ], + [ + "▁misogyny", + -15.092108726501465 + ], + [ + "▁obsidian", + -15.092108726501465 + ], + [ + "▁persimmon", + -15.092108726501465 + ], + [ + "▁pumice", + -15.092108726501465 + ], + [ + "▁regurgitate", + -15.092108726501465 + ], + [ + "▁sphincter", + -15.092108726501465 + ], + [ + "▁stargazing", + -15.092108726501465 + ], + [ + "▁tiramisu", + -15.092108726501465 + ], + [ + "▁unhygienic", + -15.092108726501465 + ], + [ + "▁zovirax", + -15.092108726501465 + ], + [ + "▁Helmholtz", + -15.09210968017578 + ], + [ + "▁Lowestoft", + -15.09210968017578 + ], + [ + "▁conservancy", + -15.09210968017578 + ], + [ + "▁platitudes", + -15.09210968017578 + ], + [ + "▁vegetal", + -15.09210968017578 + ], + [ + "192.168.0.", + -15.092110633850098 + ], + [ + "▁Clymer", + -15.092110633850098 + ], + [ + "▁igneous", + -15.092110633850098 + ], + [ + "▁triptych", + -15.092110633850098 + ], + [ + "▁tulsa", + -15.092111587524414 + ], + [ + "▁Argonaut", + -15.09211254119873 + ], + [ + "▁Shabbos", + -15.09211254119873 + ], + [ + "▁TRANSMISSION", + -15.09211254119873 + ], + [ + "siloxane", + -15.092113494873049 + ], + [ + "▁Commissar", + -15.092113494873049 + ], + [ + "▁PLASTIC", + -15.092113494873049 + ], + [ + "▁panniers", + -15.092113494873049 + ], + [ + "▁Lunenburg", + -15.092114448547363 + ], + [ + "▁PREVENT", + -15.09211540222168 + ], + [ + "▁Bacillus", + -15.092116355895996 + ], + [ + "▁Taranaki", + -15.092116355895996 + ], + [ + "▁Identities", + -15.092117309570312 + ], + [ + "▁Wexler", + -15.092118263244627 + ], + [ + "▁Filament", + -15.092119216918944 + ], + [ + "▁geopolitics", + -15.092120170593262 + ], + [ + "▁misperception", + -15.092120170593262 + ], + [ + "▁Cheesy", + -15.092121124267578 + ], + [ + "▁Choctaw", + -15.092121124267578 + ], + [ + "▁scalding", + -15.092121124267578 + ], + [ + "▁earthenware", + -15.092123985290527 + ], + [ + "▁hawthorn", + -15.09212589263916 + ], + [ + "▁Coppell", + -15.092126846313477 + ], + [ + "▁Miliband", + -15.092126846313477 + ], + [ + "▁Eason", + -15.092127799987791 + ], + [ + "▁Meatballs", + -15.092127799987791 + ], + [ + "▁Vixen", + -15.092132568359377 + ], + [ + "▁Tiwari", + -15.092133522033691 + ], + [ + "▁Icarus", + -15.092135429382324 + ], + [ + "Vibe", + -15.092138290405272 + ], + [ + "▁modifiable", + -15.092138290405272 + ], + [ + "▁Doylestown", + -15.09213924407959 + ], + [ + "▁Rotarian", + -15.09213924407959 + ], + [ + "▁intercooler", + -15.09213924407959 + ], + [ + "▁rotunda", + -15.092140197753906 + ], + [ + "▁Cunard", + -15.09214210510254 + ], + [ + "▁Interracial", + -15.092144012451172 + ], + [ + "▁TWRP", + -15.092144012451172 + ], + [ + "▁Alpina", + -15.092147827148438 + ], + [ + "▁Eyeshadow", + -15.092147827148438 + ], + [ + "▁Irfan", + -15.092147827148438 + ], + [ + "▁quizzed", + -15.092147827148438 + ], + [ + "▁Rubinstein", + -15.09214973449707 + ], + [ + "▁Bearcats", + -15.092150688171388 + ], + [ + "▁Indices", + -15.092154502868652 + ], + [ + "▁jackfruit", + -15.0921630859375 + ], + [ + "▁$5,500", + -15.092166900634766 + ], + [ + "▁Seabourn", + -15.092169761657717 + ], + [ + "▁Prehistoric", + -15.092172622680664 + ], + [ + "▁Columba", + -15.09217643737793 + ], + [ + "▁Mechanicsburg", + -15.092181205749512 + ], + [ + "▁Gazelle", + -15.09218406677246 + ], + [ + "▁Sisterhood", + -15.092185020446776 + ], + [ + "▁Wyeth", + -15.092185020446776 + ], + [ + "▁Moonshine", + -15.092190742492676 + ], + [ + "▁Bahasa", + -15.092204093933104 + ], + [ + "▁Watchtower", + -15.092206001281738 + ], + [ + "-1/8\"", + -15.092209815979004 + ], + [ + "▁Berhad", + -15.09221076965332 + ], + [ + "▁Sangria", + -15.092215538024902 + ], + [ + "▁Receptor", + -15.092218399047852 + ], + [ + "▁Cooperstown", + -15.092222213745115 + ], + [ + "▁Perthshire", + -15.09222412109375 + ], + [ + "▁Jeong", + -15.0922269821167 + ], + [ + "▁Rosalie", + -15.092236518859863 + ], + [ + "▁Vichy", + -15.092239379882812 + ], + [ + "▁Chivas", + -15.092247009277344 + ], + [ + "▁Opium", + -15.092257499694824 + ], + [ + "▁litmus", + -15.092259407043455 + ], + [ + "▁MINING", + -15.092267990112305 + ], + [ + "▁pizzazz", + -15.09226894378662 + ], + [ + "▁Anushka", + -15.092281341552734 + ], + [ + "▁frugality", + -15.092283248901367 + ], + [ + "Contributor", + -15.092292785644531 + ], + [ + "▁Unseen", + -15.092303276062012 + ], + [ + "Incredibly", + -15.092314720153809 + ], + [ + "▁Rosberg", + -15.092330932617188 + ], + [ + "▁Sewage", + -15.092331886291504 + ], + [ + "Appreciate", + -15.09233856201172 + ], + [ + "Turmeric", + -15.09234619140625 + ], + [ + "discriminatory", + -15.09235668182373 + ], + [ + "▁wallcovering", + -15.09235668182373 + ], + [ + "Impression", + -15.092361450195312 + ], + [ + "Cinderella", + -15.092366218566896 + ], + [ + "Turtle", + -15.092369079589844 + ], + [ + "administrator", + -15.092369079589844 + ], + [ + "equilibrium", + -15.092369079589844 + ], + [ + "Gonzalez", + -15.09237289428711 + ], + [ + "▁Bertol", + -15.09237289428711 + ], + [ + "Rosemary", + -15.092373847961426 + ], + [ + "Restoration", + -15.092378616333008 + ], + [ + "legitimate", + -15.092379570007324 + ], + [ + "Alzheimer", + -15.092381477355955 + ], + [ + "Polyester", + -15.092381477355955 + ], + [ + "▁Dinnerware", + -15.092381477355955 + ], + [ + "Fourteen", + -15.092382431030272 + ], + [ + "8:16", + -15.09238338470459 + ], + [ + "Guidance", + -15.09238338470459 + ], + [ + "Thames", + -15.09238338470459 + ], + [ + "controversial", + -15.09238338470459 + ], + [ + "Quebec", + -15.092384338378906 + ], + [ + "▁$0.01", + -15.092384338378906 + ], + [ + "▁marinating", + -15.092384338378906 + ], + [ + "Whiplash", + -15.09238624572754 + ], + [ + "Delaware", + -15.09239387512207 + ], + [ + "Accessibility", + -15.092395782470703 + ], + [ + "Gloria", + -15.09239673614502 + ], + [ + "OECD", + -15.092409133911133 + ], + [ + "Jensen", + -15.092424392700195 + ], + [ + "▁Ensuite", + -15.092427253723145 + ], + [ + "▁Accordion", + -15.092429161071776 + ], + [ + "Courage", + -15.092439651489258 + ], + [ + "Rotary", + -15.092440605163574 + ], + [ + "▁Bhandar", + -15.092443466186523 + ], + [ + "hardened", + -15.092446327209473 + ], + [ + "▁Clarkston", + -15.092448234558104 + ], + [ + "▁mimicry", + -15.092448234558104 + ], + [ + "Pundit", + -15.092463493347168 + ], + [ + "▁expunged", + -15.092474937438965 + ], + [ + "blick", + -15.092479705810549 + ], + [ + "Console", + -15.092496871948242 + ], + [ + "▁2012/13", + -15.092501640319824 + ], + [ + "▁exonerated", + -15.092507362365724 + ], + [ + "Seventy", + -15.092510223388672 + ], + [ + "destruction", + -15.092510223388672 + ], + [ + "barrier", + -15.092520713806152 + ], + [ + "▁Unfair", + -15.092531204223633 + ], + [ + "entrance", + -15.092533111572266 + ], + [ + "Joanna", + -15.09254550933838 + ], + [ + "▁Remarketing", + -15.092556953430176 + ], + [ + "▁Teamsters", + -15.09256362915039 + ], + [ + "Serena", + -15.092564582824709 + ], + [ + "▁Millbrook", + -15.092575073242188 + ], + [ + "▁irregularity", + -15.092578887939451 + ], + [ + "Giveaway", + -15.092586517333984 + ], + [ + "▁Dhamma", + -15.0925931930542 + ], + [ + "▁mochi", + -15.09260082244873 + ], + [ + "Perez", + -15.092605590820312 + ], + [ + "▁outermost", + -15.092608451843262 + ], + [ + "▁Turnkey", + -15.092611312866213 + ], + [ + "▁recklessness", + -15.092641830444336 + ], + [ + "promotional", + -15.092652320861816 + ], + [ + "▁Radiat", + -15.092659950256348 + ], + [ + "▁Cipro", + -15.092662811279297 + ], + [ + "Petrarch", + -15.09267807006836 + ], + [ + "washington", + -15.09272003173828 + ], + [ + "▁sarah", + -15.092737197875977 + ], + [ + "▁reignited", + -15.09274196624756 + ], + [ + "▁Bansal", + -15.09275722503662 + ], + [ + "▁foment", + -15.092768669128418 + ], + [ + "▁Cameroonian", + -15.09282112121582 + ], + [ + "▁schism", + -15.09282112121582 + ], + [ + "▁Kullu", + -15.092839241027832 + ], + [ + "▁waitresses", + -15.092852592468262 + ], + [ + "▁Nuance", + -15.09286880493164 + ], + [ + "Climbing", + -15.092893600463867 + ], + [ + "Norton", + -15.0928955078125 + ], + [ + "▁Formatting", + -15.09289836883545 + ], + [ + "▁resupply", + -15.09290599822998 + ], + [ + "▁MyChart", + -15.092941284179688 + ], + [ + "Panama", + -15.092979431152344 + ], + [ + "▁reelected", + -15.092979431152344 + ], + [ + "▁tailpipe", + -15.09299087524414 + ], + [ + "▁hookah", + -15.093022346496582 + ], + [ + "Alexis", + -15.093027114868164 + ], + [ + "▁jello", + -15.093084335327148 + ], + [ + "▁freakish", + -15.09308624267578 + ], + [ + "▁birdsong", + -15.093091011047363 + ], + [ + "▁seminaries", + -15.093140602111816 + ], + [ + "▁Happ", + -15.09316062927246 + ], + [ + "electricity", + -15.093161582946776 + ], + [ + "mammary", + -15.093165397644045 + ], + [ + "2:57", + -15.093169212341309 + ], + [ + "▁sedated", + -15.093191146850586 + ], + [ + "adapted", + -15.093241691589355 + ], + [ + "▁Stackable", + -15.093241691589355 + ], + [ + "▁Parole", + -15.093274116516112 + ], + [ + "namespace", + -15.093289375305176 + ], + [ + "▁Flashpoint", + -15.093300819396973 + ], + [ + "▁fatherhood", + -15.09337329864502 + ], + [ + "1030", + -15.093379020690918 + ], + [ + "DML", + -15.093391418457031 + ], + [ + "corpora", + -15.093391418457031 + ], + [ + "▁torrid", + -15.093398094177246 + ], + [ + "Instance", + -15.0934476852417 + ], + [ + "disciplin", + -15.093463897705078 + ], + [ + "OTTO", + -15.093475341796877 + ], + [ + "▁repositioned", + -15.093509674072266 + ], + [ + "Fog", + -15.093521118164062 + ], + [ + "▁Welded", + -15.093530654907228 + ], + [ + "▁Shona", + -15.093534469604492 + ], + [ + "Translating", + -15.093578338623049 + ], + [ + "Dried", + -15.093581199645996 + ], + [ + "▁RDX", + -15.09362506866455 + ], + [ + "▁civilised", + -15.093637466430664 + ], + [ + "▁21-22", + -15.09364128112793 + ], + [ + "sadly", + -15.09365177154541 + ], + [ + "Fewer", + -15.093666076660156 + ], + [ + "▁01:3", + -15.0936861038208 + ], + [ + "Sheep", + -15.093689918518066 + ], + [ + "▁flirted", + -15.093689918518066 + ], + [ + "Movers", + -15.093716621398926 + ], + [ + "▁crocus", + -15.093731880187988 + ], + [ + "▁mobster", + -15.09375 + ], + [ + "▁Haug", + -15.093786239624023 + ], + [ + "▁Redbridge", + -15.0938138961792 + ], + [ + "descript", + -15.09381866455078 + ], + [ + "▁minerality", + -15.093826293945312 + ], + [ + "invested", + -15.09383487701416 + ], + [ + "▁Roundhouse", + -15.093852043151855 + ], + [ + "▁Damned", + -15.09389305114746 + ], + [ + "▁Altered", + -15.093899726867676 + ], + [ + "▁PRISM", + -15.09395694732666 + ], + [ + "▁Corden", + -15.093960762023926 + ], + [ + "treaters", + -15.093964576721191 + ], + [ + "▁theatrics", + -15.093964576721191 + ], + [ + "6–1", + -15.093984603881836 + ], + [ + "4:23", + -15.093987464904783 + ], + [ + "▁ESX", + -15.094002723693848 + ], + [ + "▁Squaw", + -15.094038009643556 + ], + [ + "Hypo", + -15.094056129455566 + ], + [ + "▁Hocking", + -15.094073295593262 + ], + [ + "▁Lockout", + -15.094136238098145 + ], + [ + "activist", + -15.09414768218994 + ], + [ + "URBAN", + -15.094196319580078 + ], + [ + "▁marchers", + -15.094212532043455 + ], + [ + "absorbable", + -15.094237327575684 + ], + [ + "mattress", + -15.094260215759276 + ], + [ + "raq", + -15.094350814819336 + ], + [ + "attach", + -15.094478607177734 + ], + [ + "awarded", + -15.094552993774414 + ], + [ + "DFW", + -15.09462833404541 + ], + [ + "▁____________", + -15.09464168548584 + ], + [ + "FAIR", + -15.094703674316406 + ], + [ + "▁Postcode", + -15.094718933105469 + ], + [ + "oltz", + -15.094761848449709 + ], + [ + "▁semifinalist", + -15.094779014587402 + ], + [ + "▁BOND", + -15.094794273376465 + ], + [ + "▁Ruhr", + -15.094823837280272 + ], + [ + "▁banquette", + -15.094977378845217 + ], + [ + "▁rereading", + -15.095001220703123 + ], + [ + "MAY", + -15.095195770263672 + ], + [ + "ilene", + -15.095393180847168 + ], + [ + "agonist", + -15.095413208007812 + ], + [ + "logie", + -15.095660209655762 + ], + [ + "OVEN", + -15.095677375793455 + ], + [ + "▁Futura", + -15.095779418945312 + ], + [ + "▁Fazio", + -15.09581184387207 + ], + [ + "4:05", + -15.096055030822754 + ], + [ + "aliasing", + -15.096104621887209 + ], + [ + "8:28", + -15.096172332763672 + ], + [ + "ENTER", + -15.096383094787598 + ], + [ + "Keyboard", + -15.096384048461914 + ], + [ + "9.98", + -15.096396446228027 + ], + [ + "▁boson", + -15.096436500549316 + ], + [ + "8:13", + -15.096522331237791 + ], + [ + "Sector", + -15.096582412719728 + ], + [ + "▁Seeger", + -15.09681510925293 + ], + [ + "zzini", + -15.096904754638672 + ], + [ + "blica", + -15.09697151184082 + ], + [ + "0.23", + -15.097039222717283 + ], + [ + "1916", + -15.097143173217772 + ], + [ + "▁Volker", + -15.097189903259276 + ], + [ + "▁LIU", + -15.097307205200195 + ], + [ + "▁knockoff", + -15.0973482131958 + ], + [ + "0-96", + -15.097469329833984 + ], + [ + "▁Cramp", + -15.097726821899414 + ], + [ + "Musician", + -15.09792137145996 + ], + [ + "costume", + -15.097979545593262 + ], + [ + "DOOR", + -15.098031997680664 + ], + [ + "wij", + -15.098072052001951 + ], + [ + "▁Obey", + -15.09845733642578 + ], + [ + "2:01", + -15.098672866821287 + ], + [ + "perfection", + -15.098864555358888 + ], + [ + "▁Clif", + -15.098886489868164 + ], + [ + "LEAD", + -15.098969459533691 + ], + [ + "Gamb", + -15.099005699157717 + ], + [ + "▁Drei", + -15.099103927612305 + ], + [ + "▁802.1", + -15.09920597076416 + ], + [ + "ABF", + -15.099214553833008 + ], + [ + "8.1%", + -15.099220275878906 + ], + [ + "▁Goud", + -15.09922981262207 + ], + [ + "Geographically", + -15.09924030303955 + ], + [ + "0/80", + -15.0993013381958 + ], + [ + "COLL", + -15.09933376312256 + ], + [ + "▁Chandan", + -15.099346160888672 + ], + [ + "5:16", + -15.099430084228516 + ], + [ + "▁Hoodia", + -15.09959316253662 + ], + [ + "flute", + -15.099617958068848 + ], + [ + "okka", + -15.099756240844728 + ], + [ + "ARIO", + -15.099803924560549 + ], + [ + "▁annexe", + -15.099884033203123 + ], + [ + "▁Ecclesia", + -15.099894523620604 + ], + [ + "▁extern", + -15.100079536437988 + ], + [ + "▁misbehave", + -15.100436210632324 + ], + [ + "▁$8.00", + -15.100576400756836 + ], + [ + "▁$4,500", + -15.100692749023438 + ], + [ + "curved", + -15.100757598876951 + ], + [ + "4/15", + -15.100788116455078 + ], + [ + "▁Furu", + -15.100844383239746 + ], + [ + "physio", + -15.101242065429688 + ], + [ + "Ummm", + -15.10135269165039 + ], + [ + "▁$40.00", + -15.101390838623049 + ], + [ + "hofen", + -15.10153102874756 + ], + [ + "Aspirin", + -15.101675033569336 + ], + [ + "1927", + -15.10196590423584 + ], + [ + "modest", + -15.102100372314451 + ], + [ + "▁561-", + -15.102134704589844 + ], + [ + "quisition", + -15.102144241333008 + ], + [ + "nologie", + -15.102295875549316 + ], + [ + "CONFIG", + -15.102396965026855 + ], + [ + "CFO", + -15.102574348449709 + ], + [ + "▁Chul", + -15.102603912353516 + ], + [ + "▁WARM", + -15.102812767028809 + ], + [ + "▁1784", + -15.102845191955566 + ], + [ + "ickens", + -15.102910041809082 + ], + [ + "prav", + -15.102933883666992 + ], + [ + "dawg", + -15.102961540222168 + ], + [ + "▁Neko", + -15.102980613708496 + ], + [ + "villain", + -15.102987289428713 + ], + [ + "▁underperform", + -15.103031158447266 + ], + [ + "HOOK", + -15.103157043457031 + ], + [ + "SBU", + -15.10316276550293 + ], + [ + "▁Bintan", + -15.103466033935549 + ], + [ + "▁Magdalen", + -15.103473663330078 + ], + [ + "▁Montego", + -15.103473663330078 + ], + [ + "▁RECOMMEND", + -15.10367202758789 + ], + [ + "▁schoolteacher", + -15.103679656982422 + ], + [ + "UNK", + -15.103747367858888 + ], + [ + "▁TSH", + -15.103754997253418 + ], + [ + "▁depreciate", + -15.103851318359377 + ], + [ + "▁legislate", + -15.103851318359377 + ], + [ + "ffff", + -15.103979110717772 + ], + [ + "▁piezo", + -15.104004859924316 + ], + [ + "▁Steyn", + -15.104046821594238 + ], + [ + "definite", + -15.104141235351562 + ], + [ + "1:33", + -15.104180335998535 + ], + [ + "Pear", + -15.10421371459961 + ], + [ + "OKEN", + -15.104262351989746 + ], + [ + "▁cerebro", + -15.104451179504396 + ], + [ + "DIMENSION", + -15.104454040527344 + ], + [ + "cholinergic", + -15.104454040527344 + ], + [ + "▁Aberffrwd", + -15.104454040527344 + ], + [ + "▁Armistice", + -15.104454040527344 + ], + [ + "▁Chiffon", + -15.104454040527344 + ], + [ + "▁Constituent", + -15.104454040527344 + ], + [ + "▁Dunblane", + -15.104454040527344 + ], + [ + "▁ELECTRONIC", + -15.104454040527344 + ], + [ + "▁HEAVY", + -15.104454040527344 + ], + [ + "▁INCREASE", + -15.104454040527344 + ], + [ + "▁Jaisalmer", + -15.104454040527344 + ], + [ + "▁Kincaid", + -15.104454040527344 + ], + [ + "▁Kozhikode", + -15.104454040527344 + ], + [ + "▁Kyushu", + -15.104454040527344 + ], + [ + "▁Murrieta", + -15.104454040527344 + ], + [ + "▁Okeechobee", + -15.104454040527344 + ], + [ + "▁Phenoxyethanol", + -15.104454040527344 + ], + [ + "▁Ruidoso", + -15.104454040527344 + ], + [ + "▁Sciatica", + -15.104454040527344 + ], + [ + "▁Snickers", + -15.104454040527344 + ], + [ + "▁Tiananmen", + -15.104454040527344 + ], + [ + "▁Trillium", + -15.104454040527344 + ], + [ + "▁ULTIMATE", + -15.104454040527344 + ], + [ + "▁Umatilla", + -15.104454040527344 + ], + [ + "▁Uniqlo", + -15.104454040527344 + ], + [ + "▁Valkyrie", + -15.104454040527344 + ], + [ + "▁Wozniak", + -15.104454040527344 + ], + [ + "▁conjuring", + -15.104454040527344 + ], + [ + "▁impersonator", + -15.104454040527344 + ], + [ + "▁moccasin", + -15.104454040527344 + ], + [ + "▁salvador", + -15.104454040527344 + ], + [ + "▁shrapnel", + -15.104454040527344 + ], + [ + "▁sleazy", + -15.104454040527344 + ], + [ + "▁suckling", + -15.104454040527344 + ], + [ + "▁syphilis", + -15.104454040527344 + ], + [ + "▁typhoid", + -15.104454040527344 + ], + [ + "▁unblemished", + -15.104454040527344 + ], + [ + "▁unscrambling", + -15.104454040527344 + ], + [ + "▁angiography", + -15.10445499420166 + ], + [ + "▁arrondissement", + -15.10445499420166 + ], + [ + "▁sterilise", + -15.10445499420166 + ], + [ + "▁Stipendi", + -15.104455947875977 + ], + [ + "▁subjugate", + -15.104455947875977 + ], + [ + "▁detonation", + -15.104456901550291 + ], + [ + "▁glomerul", + -15.104456901550291 + ], + [ + "▁pandering", + -15.104456901550291 + ], + [ + "▁Currencies", + -15.104458808898926 + ], + [ + "▁Matriculation", + -15.104458808898926 + ], + [ + "▁Swinton", + -15.104458808898926 + ], + [ + "▁confining", + -15.104458808898926 + ], + [ + "▁Kearns", + -15.104459762573242 + ], + [ + "▁cavalier", + -15.104459762573242 + ], + [ + "▁floodgates", + -15.104459762573242 + ], + [ + "▁Technicolor", + -15.10446071624756 + ], + [ + "Osteoarthritis", + -15.104461669921877 + ], + [ + "▁POWDER", + -15.104461669921877 + ], + [ + "ионн", + -15.104462623596191 + ], + [ + "▁Bannister", + -15.104462623596191 + ], + [ + "▁Hawkesbury", + -15.10446548461914 + ], + [ + "▁Bukhara", + -15.104466438293455 + ], + [ + "▁GRAPHICS", + -15.104466438293455 + ], + [ + "▁oolong", + -15.104466438293455 + ], + [ + "▁hyperplasia", + -15.104469299316406 + ], + [ + "▁synthroid", + -15.104469299316406 + ], + [ + "▁macroscopic", + -15.104470252990724 + ], + [ + "▁Fortinet", + -15.104472160339355 + ], + [ + "▁Lupita", + -15.104472160339355 + ], + [ + "▁agonising", + -15.104472160339355 + ], + [ + "▁Trillion", + -15.104473114013672 + ], + [ + "▁Hemlock", + -15.104474067687988 + ], + [ + "▁Imaginary", + -15.104475021362305 + ], + [ + "▁parathyroid", + -15.10447597503662 + ], + [ + "▁caesar", + -15.104476928710938 + ], + [ + "▁Plenary", + -15.10448169708252 + ], + [ + "▁Interference", + -15.104483604431152 + ], + [ + "▁Koehler", + -15.104485511779783 + ], + [ + "▁Bacardi", + -15.104486465454102 + ], + [ + "▁Maribor", + -15.104490280151367 + ], + [ + "▁bloodthirsty", + -15.104490280151367 + ], + [ + "Intrinsic", + -15.104494094848633 + ], + [ + "▁Bhutanese", + -15.104494094848633 + ], + [ + "-09-2018", + -15.10449504852295 + ], + [ + "▁Longitudinal", + -15.104496002197266 + ], + [ + "▁unsanitary", + -15.104498863220217 + ], + [ + "▁Erosion", + -15.104499816894531 + ], + [ + "▁Newburyport", + -15.104499816894531 + ], + [ + "BATHROOM", + -15.104500770568848 + ], + [ + "▁TANF", + -15.10450267791748 + ], + [ + "▁Wilshire", + -15.10450267791748 + ], + [ + "▁sinuous", + -15.104509353637695 + ], + [ + "DIRECTIONS", + -15.104512214660645 + ], + [ + "▁Duramax", + -15.104512214660645 + ], + [ + "▁Fresco", + -15.104522705078123 + ], + [ + "▁Saal", + -15.104522705078123 + ], + [ + "▁Brunello", + -15.10452365875244 + ], + [ + "▁Encino", + -15.104524612426758 + ], + [ + "▁Hussey", + -15.104528427124023 + ], + [ + "▁TONIGHT", + -15.10452938079834 + ], + [ + "/12/2018", + -15.104530334472656 + ], + [ + "▁macys", + -15.104530334472656 + ], + [ + "▁Barbershop", + -15.104544639587402 + ], + [ + "▁dwelt", + -15.10454559326172 + ], + [ + "▁gesso", + -15.104548454284668 + ], + [ + "▁Insoles", + -15.104552268981934 + ], + [ + "▁Sule", + -15.104561805725098 + ], + [ + "▁Bhatia", + -15.10456371307373 + ], + [ + "▁Sarnia", + -15.10457706451416 + ], + [ + "▁SOHO", + -15.10457992553711 + ], + [ + "▁comparator", + -15.104580879211426 + ], + [ + "▁dabbing", + -15.10458278656006 + ], + [ + "▁Tignes", + -15.104588508605955 + ], + [ + "▁Nickerson", + -15.104608535766602 + ], + [ + "▁Headlamp", + -15.10461139678955 + ], + [ + "▁CafePress", + -15.104612350463867 + ], + [ + "▁Bremer", + -15.104620933532717 + ], + [ + "▁Wolfsburg", + -15.104625701904297 + ], + [ + "schrift", + -15.10462760925293 + ], + [ + "▁Rennie", + -15.104628562927246 + ], + [ + "▁Longoria", + -15.104639053344728 + ], + [ + "flavor", + -15.104649543762209 + ], + [ + "▁prze", + -15.104676246643066 + ], + [ + "▁PowerShot", + -15.10468292236328 + ], + [ + "mitchell", + -15.104689598083496 + ], + [ + "▁Dutchess", + -15.104696273803713 + ], + [ + "▁empat", + -15.104731559753418 + ], + [ + "▁Fairmount", + -15.104758262634276 + ], + [ + "▁Lampard", + -15.104763984680176 + ], + [ + "Kiernan", + -15.10478401184082 + ], + [ + "▁Latimer", + -15.104805946350098 + ], + [ + "▁Chisel", + -15.104844093322754 + ], + [ + "▁fondest", + -15.10485553741455 + ], + [ + "Reveal", + -15.104893684387209 + ], + [ + "▁$12.00", + -15.104904174804688 + ], + [ + "ukha", + -15.104907035827637 + ], + [ + "▁wizardry", + -15.104907035827637 + ], + [ + "Cobblestone", + -15.10491943359375 + ], + [ + "▁Bhagat", + -15.104948043823242 + ], + [ + "▁Palmdale", + -15.10495948791504 + ], + [ + "Happily", + -15.104970932006836 + ], + [ + "Neighbor", + -15.104972839355469 + ], + [ + "Efforts", + -15.104979515075684 + ], + [ + "▁repelling", + -15.104981422424316 + ], + [ + "horizon", + -15.10499382019043 + ], + [ + "▁Anhui", + -15.10499382019043 + ], + [ + "Bacteria", + -15.104999542236328 + ], + [ + "praying", + -15.105003356933594 + ], + [ + "fresco", + -15.105010986328123 + ], + [ + "Internship", + -15.105021476745604 + ], + [ + "restriction", + -15.105023384094238 + ], + [ + "▁EBOOK", + -15.105024337768556 + ], + [ + "Siemens", + -15.105027198791504 + ], + [ + "hormone", + -15.105027198791504 + ], + [ + "Puzzle", + -15.10502815246582 + ], + [ + "▁Walther", + -15.10502815246582 + ], + [ + "Sophia", + -15.105029106140137 + ], + [ + "Molecular", + -15.105030059814451 + ], + [ + "Oftentimes", + -15.105031967163086 + ], + [ + "▁papier", + -15.105031967163086 + ], + [ + "Ferguson", + -15.105035781860352 + ], + [ + "acetate", + -15.105035781860352 + ], + [ + "Neighborhood", + -15.10504150390625 + ], + [ + "anniversary", + -15.10504913330078 + ], + [ + "extraordinary", + -15.10504913330078 + ], + [ + "Ernest", + -15.10505199432373 + ], + [ + "iTunes", + -15.105056762695312 + ], + [ + "▁Godaddy", + -15.105064392089844 + ], + [ + "Gardner", + -15.105072975158691 + ], + [ + "Pearson", + -15.105072975158691 + ], + [ + "▁USATF", + -15.105076789855955 + ], + [ + "▁Brockton", + -15.10509967803955 + ], + [ + "henge", + -15.105143547058104 + ], + [ + "▁hysterically", + -15.105164527893066 + ], + [ + "▁GYM", + -15.105189323425291 + ], + [ + "Solomon", + -15.105195999145508 + ], + [ + "Warehouse", + -15.10520839691162 + ], + [ + "paved", + -15.105216026306152 + ], + [ + "uzhou", + -15.105216026306152 + ], + [ + "▁appraising", + -15.105223655700684 + ], + [ + "▁Upstream", + -15.105246543884276 + ], + [ + "Tasty", + -15.105270385742188 + ], + [ + "GLO", + -15.105277061462402 + ], + [ + "kanksha", + -15.105280876159668 + ], + [ + "▁clergyman", + -15.105284690856934 + ], + [ + "LTR", + -15.105295181274414 + ], + [ + "▁Sridhar", + -15.105298042297363 + ], + [ + "puzzle", + -15.105323791503906 + ], + [ + "▁Eritrean", + -15.10533046722412 + ], + [ + "specialized", + -15.105332374572754 + ], + [ + "▁Overdrive", + -15.105342864990234 + ], + [ + "adrenal", + -15.105344772338867 + ], + [ + "ancient", + -15.105385780334473 + ], + [ + "▁Jayden", + -15.105403900146484 + ], + [ + "▁Zeitung", + -15.105422019958496 + ], + [ + "BROOK", + -15.105432510375977 + ], + [ + "▁bandsaw", + -15.10544490814209 + ], + [ + "fastest", + -15.105448722839355 + ], + [ + "1840", + -15.10546875 + ], + [ + "invention", + -15.10547924041748 + ], + [ + "yorkshire", + -15.105485916137695 + ], + [ + "▁Perera", + -15.105486869812012 + ], + [ + "▁NCIS", + -15.105487823486328 + ], + [ + "▁Outward", + -15.105501174926758 + ], + [ + "▁QSL", + -15.105531692504885 + ], + [ + "▁Costner", + -15.105539321899414 + ], + [ + "▁Bergeron", + -15.105561256408691 + ], + [ + "misappropriation", + -15.105572700500488 + ], + [ + "▁dislodged", + -15.105606079101562 + ], + [ + "▁automaton", + -15.105619430541992 + ], + [ + "▁floundering", + -15.105624198913574 + ], + [ + "Properly", + -15.105676651000977 + ], + [ + "1888", + -15.105714797973633 + ], + [ + "Inspection", + -15.105719566345217 + ], + [ + "▁Earley", + -15.105727195739746 + ], + [ + "Kristi", + -15.105743408203123 + ], + [ + "architectural", + -15.10575008392334 + ], + [ + "Johann", + -15.105770111083984 + ], + [ + "▁#26", + -15.105786323547363 + ], + [ + "LOFT", + -15.10579776763916 + ], + [ + "limitation", + -15.105828285217283 + ], + [ + "Compassion", + -15.105897903442385 + ], + [ + "WTF", + -15.1058988571167 + ], + [ + "▁16.8", + -15.105916023254396 + ], + [ + "▁Neely", + -15.10601043701172 + ], + [ + "ribbon", + -15.106017112731934 + ], + [ + "▁subbed", + -15.10611343383789 + ], + [ + "Lynch", + -15.106127738952637 + ], + [ + "▁Visor", + -15.106155395507812 + ], + [ + "▁Epping", + -15.10625171661377 + ], + [ + "▁cherishes", + -15.106285095214844 + ], + [ + "▁Lundy", + -15.106343269348145 + ], + [ + "requested", + -15.106348991394045 + ], + [ + "▁$7.50", + -15.106409072875977 + ], + [ + "Officially", + -15.106475830078123 + ], + [ + "?????????", + -15.106515884399414 + ], + [ + "▁deflecting", + -15.106523513793944 + ], + [ + "Franchise", + -15.10653018951416 + ], + [ + "▁sinning", + -15.106643676757812 + ], + [ + "downtown", + -15.10670280456543 + ], + [ + "Immuno", + -15.106707572937012 + ], + [ + "▁FEAR", + -15.106746673583984 + ], + [ + "criminalization", + -15.106781959533691 + ], + [ + "9.8%", + -15.106810569763184 + ], + [ + "▁Plesk", + -15.106879234313965 + ], + [ + "▁Sisk", + -15.106989860534668 + ], + [ + "chandra", + -15.106990814208984 + ], + [ + "▁DEALS", + -15.10707950592041 + ], + [ + "collective", + -15.107141494750977 + ], + [ + "▁Faiz", + -15.107233047485352 + ], + [ + "▁Whittle", + -15.10724925994873 + ], + [ + "▁Cowen", + -15.107352256774902 + ], + [ + "PAK", + -15.1073579788208 + ], + [ + "Tamura", + -15.107501983642578 + ], + [ + "Telescop", + -15.107523918151855 + ], + [ + "Kesler", + -15.10757064819336 + ], + [ + "9:35", + -15.107596397399902 + ], + [ + "prasad", + -15.107858657836914 + ], + [ + "awah", + -15.107894897460938 + ], + [ + "▁SRV", + -15.108036994934082 + ], + [ + "▁Merch", + -15.108229637145996 + ], + [ + "bible", + -15.108296394348145 + ], + [ + "Archer", + -15.10830783843994 + ], + [ + "▁polygraph", + -15.108373641967772 + ], + [ + "2,600", + -15.108516693115234 + ], + [ + "[00:", + -15.10851764678955 + ], + [ + "▁16:5", + -15.108587265014648 + ], + [ + "AXS", + -15.10876178741455 + ], + [ + "ATIONAL", + -15.10887050628662 + ], + [ + "▁Edina", + -15.108879089355469 + ], + [ + "Hippo", + -15.108922958374023 + ], + [ + "▁DAO", + -15.108926773071287 + ], + [ + "▁EQT", + -15.1089448928833 + ], + [ + "▁Norge", + -15.109206199645996 + ], + [ + "Silva", + -15.109268188476562 + ], + [ + "BEY", + -15.109296798706056 + ], + [ + "PBX", + -15.109357833862305 + ], + [ + "TRIC", + -15.109447479248049 + ], + [ + "▁VIVA", + -15.109458923339844 + ], + [ + "▁cancelation", + -15.109859466552734 + ], + [ + "▁thrombo", + -15.109968185424805 + ], + [ + "POLL", + -15.11001968383789 + ], + [ + "▁Nyong", + -15.110118865966797 + ], + [ + "Serbia", + -15.110156059265137 + ], + [ + "▁USCG", + -15.110190391540527 + ], + [ + "committal", + -15.110252380371094 + ], + [ + "▁Helga", + -15.110528945922852 + ], + [ + "▁£26", + -15.110532760620115 + ], + [ + "▁Plein", + -15.110647201538086 + ], + [ + "Striving", + -15.110712051391602 + ], + [ + "▁Betis", + -15.110878944396973 + ], + [ + "▁Rudra", + -15.111035346984863 + ], + [ + "fender", + -15.111211776733398 + ], + [ + "▁Macaron", + -15.11127471923828 + ], + [ + "huan", + -15.11141300201416 + ], + [ + "luit", + -15.111443519592283 + ], + [ + "gather", + -15.111488342285156 + ], + [ + "1/2′′", + -15.111861228942873 + ], + [ + "▁28-30", + -15.111976623535156 + ], + [ + "PACT", + -15.11204433441162 + ], + [ + "ASIS", + -15.11216640472412 + ], + [ + "Demetri", + -15.112242698669434 + ], + [ + "▁Unveil", + -15.112457275390623 + ], + [ + "▁Frieden", + -15.112506866455078 + ], + [ + "▁Serenade", + -15.11256217956543 + ], + [ + "GRAPHIC", + -15.11256980895996 + ], + [ + "▁Alisha", + -15.112607955932615 + ], + [ + "nthic", + -15.112977981567385 + ], + [ + "pacific", + -15.112984657287598 + ], + [ + "6:13", + -15.11306381225586 + ], + [ + "▁printmaker", + -15.113065719604492 + ], + [ + "Burst", + -15.113279342651367 + ], + [ + "▁PROMO", + -15.11344051361084 + ], + [ + "▁Etching", + -15.113470077514648 + ], + [ + "2:52", + -15.11378002166748 + ], + [ + "2:33", + -15.113804817199709 + ], + [ + "▁40-45", + -15.11384105682373 + ], + [ + "▁infringer", + -15.11384105682373 + ], + [ + "1860", + -15.11387825012207 + ], + [ + "BANK", + -15.113995552062988 + ], + [ + "▁15:2", + -15.114086151123049 + ], + [ + "rocco", + -15.114105224609377 + ], + [ + "▁OLG", + -15.114237785339355 + ], + [ + "Adapting", + -15.114344596862791 + ], + [ + "scald", + -15.114455223083496 + ], + [ + "01/01/20", + -15.114670753479004 + ], + [ + "2:32", + -15.114739418029783 + ], + [ + "ileka", + -15.114808082580566 + ], + [ + "stealer", + -15.114871978759766 + ], + [ + "4444", + -15.114931106567385 + ], + [ + "▁foci", + -15.115036010742188 + ], + [ + "▁Whisker", + -15.115163803100586 + ], + [ + "LCL", + -15.11526870727539 + ], + [ + "▁ERM", + -15.115578651428224 + ], + [ + "yasa", + -15.115707397460938 + ], + [ + "▁Rizk", + -15.11575412750244 + ], + [ + "HAIR", + -15.11586093902588 + ], + [ + "9-33", + -15.115978240966797 + ], + [ + "HealthDay", + -15.115985870361328 + ], + [ + "AUTH", + -15.115986824035645 + ], + [ + "lvarez", + -15.116047859191896 + ], + [ + "4,100", + -15.116117477416992 + ], + [ + "▁disburse", + -15.116118431091309 + ], + [ + "LDL", + -15.11615753173828 + ], + [ + "▁Neri", + -15.116198539733888 + ], + [ + "THERE", + -15.116267204284668 + ], + [ + "▁energ", + -15.116548538208008 + ], + [ + "BAY", + -15.11656665802002 + ], + [ + "▁BIB", + -15.116753578186035 + ], + [ + "▁1769", + -15.116762161254885 + ], + [ + "lymphocyte", + -15.116811752319336 + ], + [ + "▁12:10", + -15.11685276031494 + ], + [ + "▁immunosuppressi", + -15.116951942443848 + ], + [ + "Compulsory", + -15.11695384979248 + ], + [ + "Enthusiasm", + -15.11695384979248 + ], + [ + "apoptotic", + -15.11695384979248 + ], + [ + "▁800-211-2519", + -15.11695384979248 + ], + [ + "▁800-313-5780", + -15.11695384979248 + ], + [ + "▁844-244-6166", + -15.11695384979248 + ], + [ + "▁ConocoPhillips", + -15.11695384979248 + ], + [ + "▁Farnsworth", + -15.11695384979248 + ], + [ + "▁Gehry", + -15.11695384979248 + ], + [ + "▁Hashanah", + -15.11695384979248 + ], + [ + "▁Kwazulu", + -15.11695384979248 + ], + [ + "▁McNulty", + -15.11695384979248 + ], + [ + "▁Menopause", + -15.11695384979248 + ], + [ + "▁Mezzanine", + -15.11695384979248 + ], + [ + "▁Okonkwo", + -15.11695384979248 + ], + [ + "▁REQUIREMENTS", + -15.11695384979248 + ], + [ + "▁Saguaro", + -15.11695384979248 + ], + [ + "▁Sorensen", + -15.11695384979248 + ], + [ + "▁Teixeira", + -15.11695384979248 + ], + [ + "▁Travolta", + -15.11695384979248 + ], + [ + "▁Triglyceride", + -15.11695384979248 + ], + [ + "▁Triomphe", + -15.11695384979248 + ], + [ + "▁WRITING", + -15.11695384979248 + ], + [ + "▁Whitsunday", + -15.11695384979248 + ], + [ + "▁admixture", + -15.11695384979248 + ], + [ + "▁anthracite", + -15.11695384979248 + ], + [ + "▁apotheke", + -15.11695384979248 + ], + [ + "▁arthroscopy", + -15.11695384979248 + ], + [ + "▁coffered", + -15.11695384979248 + ], + [ + "▁endocrinology", + -15.11695384979248 + ], + [ + "▁evaporating", + -15.11695384979248 + ], + [ + "▁favoritism", + -15.11695384979248 + ], + [ + "▁gnarly", + -15.11695384979248 + ], + [ + "▁grungy", + -15.11695384979248 + ], + [ + "▁habanero", + -15.11695384979248 + ], + [ + "▁hyperbaric", + -15.11695384979248 + ], + [ + "▁misshapen", + -15.11695384979248 + ], + [ + "▁muriatic", + -15.11695384979248 + ], + [ + "▁omniscient", + -15.11695384979248 + ], + [ + "▁pertussis", + -15.11695384979248 + ], + [ + "▁rockabilly", + -15.11695384979248 + ], + [ + "▁securitization", + -15.11695384979248 + ], + [ + "▁synthesizing", + -15.11695384979248 + ], + [ + "▁undiluted", + -15.11695384979248 + ], + [ + "HAHAHA", + -15.116954803466797 + ], + [ + "denuclearization", + -15.116954803466797 + ], + [ + "▁Epistle", + -15.116954803466797 + ], + [ + "▁Gadsden", + -15.116954803466797 + ], + [ + "▁Mizzou", + -15.116954803466797 + ], + [ + "▁Sociedad", + -15.116954803466797 + ], + [ + "▁seafaring", + -15.116954803466797 + ], + [ + "▁Aramaic", + -15.116955757141112 + ], + [ + "▁Takahashi", + -15.116955757141112 + ], + [ + "▁berserk", + -15.116955757141112 + ], + [ + "▁sprue", + -15.116955757141112 + ], + [ + "▁walrus", + -15.116955757141112 + ], + [ + "▁Billerica", + -15.11695671081543 + ], + [ + "▁amicably", + -15.11695671081543 + ], + [ + "▁pyrolysis", + -15.11695671081543 + ], + [ + "▁Havasu", + -15.116958618164062 + ], + [ + "▁EVERYWHERE", + -15.11695957183838 + ], + [ + "▁Izmir", + -15.11695957183838 + ], + [ + "▁Mahmood", + -15.11695957183838 + ], + [ + "▁Bahadur", + -15.116960525512695 + ], + [ + "▁Diagonal", + -15.116960525512695 + ], + [ + "▁TRADING", + -15.116960525512695 + ], + [ + "▁gouging", + -15.116960525512695 + ], + [ + "▁relent", + -15.116960525512695 + ], + [ + "▁Sagrada", + -15.116961479187012 + ], + [ + "▁Benelux", + -15.116962432861328 + ], + [ + "▁Expedited", + -15.11696434020996 + ], + [ + "▁Ferrara", + -15.11696434020996 + ], + [ + "▁Klimt", + -15.116965293884276 + ], + [ + "▁Moschino", + -15.116965293884276 + ], + [ + "▁Sigmund", + -15.116965293884276 + ], + [ + "▁disciplining", + -15.116965293884276 + ], + [ + "▁inundation", + -15.116965293884276 + ], + [ + "▁Stanislav", + -15.116966247558594 + ], + [ + "▁mercies", + -15.116966247558594 + ], + [ + "▁Carcass", + -15.11697006225586 + ], + [ + "▁spilt", + -15.116971015930176 + ], + [ + "▁Heaney", + -15.116971969604492 + ], + [ + "▁psychoanalysis", + -15.116971969604492 + ], + [ + "▁Sojourn", + -15.116972923278809 + ], + [ + "▁£300,000", + -15.116972923278809 + ], + [ + "▁configurator", + -15.116975784301758 + ], + [ + "brenner", + -15.11697769165039 + ], + [ + "▁Glimmer", + -15.11697769165039 + ], + [ + "▁Futurama", + -15.116986274719238 + ], + [ + "▁19:4", + -15.116988182067873 + ], + [ + "▁lanky", + -15.116992950439451 + ], + [ + "▁Alegre", + -15.116994857788086 + ], + [ + "▁(1961)", + -15.116997718811035 + ], + [ + "▁biofeedback", + -15.116998672485352 + ], + [ + "▁Nadella", + -15.1170015335083 + ], + [ + "▁transfixed", + -15.117010116577148 + ], + [ + "▁Browder", + -15.117011070251465 + ], + [ + "▁Aerosmith", + -15.11701488494873 + ], + [ + "▁(703)", + -15.117016792297363 + ], + [ + "NKJV", + -15.117018699645996 + ], + [ + "▁pronouncing", + -15.117020606994627 + ], + [ + "▁siesta", + -15.117020606994627 + ], + [ + "▁rencontre", + -15.11703109741211 + ], + [ + "▁Swimwear", + -15.117034912109377 + ], + [ + "▁fistula", + -15.117035865783691 + ], + [ + "▁Eyelash", + -15.117039680480955 + ], + [ + "▁nipping", + -15.117039680480955 + ], + [ + "▁Mizuno", + -15.117048263549805 + ], + [ + "▁Fuchsia", + -15.117053031921388 + ], + [ + "▁Woodbine", + -15.117053031921388 + ], + [ + "▁Sidhu", + -15.117055892944336 + ], + [ + "▁demure", + -15.117059707641602 + ], + [ + "▁Musique", + -15.117067337036133 + ], + [ + "▁tenancies", + -15.117067337036133 + ], + [ + "▁Retrograde", + -15.117072105407717 + ], + [ + "▁politicized", + -15.117074012756348 + ], + [ + "▁Gurney", + -15.11707878112793 + ], + [ + "▁Mohali", + -15.117080688476562 + ], + [ + "▁Omron", + -15.117080688476562 + ], + [ + "▁Stalker", + -15.117082595825195 + ], + [ + "▁paddies", + -15.117082595825195 + ], + [ + "▁Rainfall", + -15.117084503173828 + ], + [ + "▁Shriver", + -15.117093086242676 + ], + [ + "▁incitement", + -15.117101669311523 + ], + [ + "▁Proust", + -15.117104530334473 + ], + [ + "▁elderflower", + -15.117106437683104 + ], + [ + "▁dripped", + -15.117107391357422 + ], + [ + "▁engrained", + -15.117108345031738 + ], + [ + "▁Breakaway", + -15.117114067077637 + ], + [ + "▁$14.95", + -15.117122650146484 + ], + [ + "▁Lismore", + -15.117125511169434 + ], + [ + "▁dublin", + -15.117125511169434 + ], + [ + "▁Henkel", + -15.1171293258667 + ], + [ + "▁Plexus", + -15.117138862609863 + ], + [ + "▁Assistive", + -15.117143630981444 + ], + [ + "▁Finchley", + -15.117161750793455 + ], + [ + "▁horsemanship", + -15.117168426513672 + ], + [ + "▁Qasim", + -15.117178916931152 + ], + [ + "▁Juanita", + -15.117179870605469 + ], + [ + "▁Beltran", + -15.117181777954102 + ], + [ + "▁Seventies", + -15.117182731628418 + ], + [ + "▁Domenico", + -15.117185592651367 + ], + [ + "▁bioscience", + -15.117198944091797 + ], + [ + "▁Bodyguard", + -15.117199897766112 + ], + [ + "▁Rosslyn", + -15.117201805114746 + ], + [ + "▁disenchanted", + -15.117206573486328 + ], + [ + "▁marginalization", + -15.117229461669922 + ], + [ + "▁rhythmically", + -15.117241859436035 + ], + [ + "▁JoJo", + -15.117250442504885 + ], + [ + "▁Warlord", + -15.117262840270996 + ], + [ + "▁proportionality", + -15.117271423339844 + ], + [ + "▁BOGO", + -15.11730670928955 + ], + [ + "▁Asimov", + -15.117315292358398 + ], + [ + "NCING", + -15.117327690124512 + ], + [ + "▁GREY", + -15.11733627319336 + ], + [ + "▁Shariah", + -15.117342948913574 + ], + [ + "▁surrealism", + -15.117344856262209 + ], + [ + "illiard", + -15.117351531982422 + ], + [ + "▁ProQuest", + -15.117356300354004 + ], + [ + "▁Seashore", + -15.117362022399902 + ], + [ + "fangled", + -15.117401123046877 + ], + [ + "▁DUAL", + -15.117436408996582 + ], + [ + "▁888-29", + -15.117446899414062 + ], + [ + "1.23", + -15.11746883392334 + ], + [ + "AUGHT", + -15.117487907409668 + ], + [ + "transcontinental", + -15.117487907409668 + ], + [ + "Confession", + -15.117497444152832 + ], + [ + "▁hummer", + -15.11751651763916 + ], + [ + "▁coinsurance", + -15.117560386657717 + ], + [ + "▁Armies", + -15.117561340332031 + ], + [ + "▁gnawing", + -15.117568969726562 + ], + [ + "jord", + -15.117608070373535 + ], + [ + "▁Arakan", + -15.117613792419434 + ], + [ + "Abram", + -15.117653846740724 + ], + [ + "AMZN", + -15.117687225341797 + ], + [ + "Beatriz", + -15.117694854736328 + ], + [ + "▁orginal", + -15.117713928222656 + ], + [ + "▁Canteen", + -15.117720603942873 + ], + [ + "karna", + -15.117738723754885 + ], + [ + "▁Betway", + -15.117740631103516 + ], + [ + "▁staunchly", + -15.117801666259766 + ], + [ + "Caffeine", + -15.117802619934082 + ], + [ + "Dessert", + -15.117806434631348 + ], + [ + "▁Standish", + -15.11780834197998 + ], + [ + "Confidence", + -15.117839813232422 + ], + [ + "Expansion", + -15.117840766906738 + ], + [ + "occurrence", + -15.117846488952637 + ], + [ + "junior", + -15.117852210998535 + ], + [ + "Autism", + -15.117853164672852 + ], + [ + "Lambert", + -15.11785888671875 + ], + [ + "Suzuki", + -15.117860794067385 + ], + [ + "celebrity", + -15.1178617477417 + ], + [ + "paradise", + -15.1178617477417 + ], + [ + "Alexandra", + -15.117865562438965 + ], + [ + "Cellular", + -15.11786651611328 + ], + [ + "Gazette", + -15.11786651611328 + ], + [ + "Legendary", + -15.117875099182127 + ], + [ + "Attendance", + -15.117883682250977 + ], + [ + "▁Sadness", + -15.117883682250977 + ], + [ + "Tunnel", + -15.117884635925291 + ], + [ + "▁plundered", + -15.11788558959961 + ], + [ + "▁Mears", + -15.117895126342772 + ], + [ + "Tension", + -15.117910385131836 + ], + [ + "▁Hoskins", + -15.117911338806152 + ], + [ + "depression", + -15.117968559265137 + ], + [ + "excuse", + -15.117968559265137 + ], + [ + "premier", + -15.11798095703125 + ], + [ + "▁murmured", + -15.117986679077148 + ], + [ + "NOAA", + -15.117996215820312 + ], + [ + "Witnesses", + -15.117999076843262 + ], + [ + "PWR", + -15.118000984191896 + ], + [ + "▁codenamed", + -15.118061065673828 + ], + [ + "▁creaking", + -15.118099212646484 + ], + [ + "▁Sunita", + -15.118108749389648 + ], + [ + "playground", + -15.118122100830078 + ], + [ + "Scanner", + -15.118130683898926 + ], + [ + "▁deadpan", + -15.11815071105957 + ], + [ + "uvet", + -15.118163108825684 + ], + [ + "▁siloed", + -15.11818790435791 + ], + [ + "Modified", + -15.11820125579834 + ], + [ + "▁distiller", + -15.118206977844238 + ], + [ + "▁$99.99", + -15.118234634399414 + ], + [ + "▁21-23", + -15.118239402770996 + ], + [ + "▁Collinsville", + -15.118242263793944 + ], + [ + "▁Diddy", + -15.118298530578612 + ], + [ + "▁144,000", + -15.11832332611084 + ], + [ + "PMT", + -15.11834716796875 + ], + [ + "Hairspray", + -15.11835479736328 + ], + [ + "Starring", + -15.118390083312988 + ], + [ + "karcher", + -15.11839199066162 + ], + [ + "platter", + -15.118419647216797 + ], + [ + "▁Degas", + -15.118452072143556 + ], + [ + "▁SFU", + -15.118489265441896 + ], + [ + "▁Negan", + -15.118505477905272 + ], + [ + "Pixar", + -15.118515968322754 + ], + [ + "▁shrieking", + -15.118521690368652 + ], + [ + "▁Lapid", + -15.11852741241455 + ], + [ + "▁jiggle", + -15.1185302734375 + ], + [ + "▁Winehouse", + -15.118599891662598 + ], + [ + "▁equalised", + -15.11865520477295 + ], + [ + "▁02:2", + -15.118669509887695 + ], + [ + "Composition", + -15.11868953704834 + ], + [ + "Matches", + -15.118744850158691 + ], + [ + "▁Foodbank", + -15.118769645690918 + ], + [ + "notification", + -15.118776321411133 + ], + [ + "Carmen", + -15.118804931640623 + ], + [ + "▁Glazer", + -15.11882781982422 + ], + [ + "▁nestling", + -15.118864059448242 + ], + [ + "▁Kamer", + -15.118878364562988 + ], + [ + "▁teardown", + -15.118988037109377 + ], + [ + "▁STATEMENT", + -15.118996620178224 + ], + [ + "5:32", + -15.119070053100586 + ], + [ + "SCOPE", + -15.119085311889648 + ], + [ + "▁waterslide", + -15.11909294128418 + ], + [ + "▁$195", + -15.119107246398926 + ], + [ + "▁metalworking", + -15.119214057922363 + ], + [ + "Nutritional", + -15.11927890777588 + ], + [ + "▁Salvadoran", + -15.119468688964844 + ], + [ + "PRB", + -15.119513511657717 + ], + [ + "diario", + -15.119587898254396 + ], + [ + "LUMIN", + -15.119619369506836 + ], + [ + "geoscience", + -15.1196870803833 + ], + [ + "▁Mahara", + -15.119722366333008 + ], + [ + "etsky", + -15.119755744934082 + ], + [ + "Pune", + -15.119756698608398 + ], + [ + "▁NVQ", + -15.119768142700195 + ], + [ + "▁Spree", + -15.11981964111328 + ], + [ + "▁musing", + -15.11989402770996 + ], + [ + "HOPE", + -15.119958877563477 + ], + [ + "▁Lawrie", + -15.119977951049805 + ], + [ + "braid", + -15.12001609802246 + ], + [ + "▁Lancia", + -15.120134353637695 + ], + [ + "=0.00", + -15.120266914367676 + ], + [ + "▁Comprise", + -15.120416641235352 + ], + [ + "Soothe", + -15.120498657226562 + ], + [ + "▁Enhancer", + -15.120570182800291 + ], + [ + "mutual", + -15.120771408081056 + ], + [ + "▁Kristoff", + -15.120827674865724 + ], + [ + "RRT", + -15.120879173278809 + ], + [ + "▁PAIR", + -15.120898246765137 + ], + [ + "▁logotype", + -15.12096881866455 + ], + [ + "▁consul", + -15.12114143371582 + ], + [ + "▁Caribe", + -15.121253967285156 + ], + [ + "CHT", + -15.1212739944458 + ], + [ + "▁stabiliser", + -15.12132453918457 + ], + [ + "karma", + -15.121509552001951 + ], + [ + "9:13", + -15.121511459350586 + ], + [ + "priya", + -15.121642112731934 + ], + [ + "▁modder", + -15.121710777282717 + ], + [ + "BAH", + -15.121726989746094 + ], + [ + "warden", + -15.121968269348145 + ], + [ + "2:39", + -15.122017860412598 + ], + [ + "1925", + -15.122124671936035 + ], + [ + "▁Lennar", + -15.122135162353516 + ], + [ + "▁Juju", + -15.122175216674805 + ], + [ + "2:38", + -15.12217903137207 + ], + [ + "cannon", + -15.122373580932615 + ], + [ + "URY", + -15.122476577758787 + ], + [ + "▁Limburg", + -15.122514724731444 + ], + [ + "deaf", + -15.12259292602539 + ], + [ + "▁Juba", + -15.122644424438477 + ], + [ + "22-0", + -15.122653007507324 + ], + [ + "▁Rediscover", + -15.12279987335205 + ], + [ + "verein", + -15.122888565063477 + ], + [ + "▁TSG", + -15.123021125793455 + ], + [ + "REVIEW", + -15.123271942138672 + ], + [ + "curse", + -15.123332023620604 + ], + [ + "21-24", + -15.123418807983398 + ], + [ + "2.05", + -15.12350368499756 + ], + [ + "▁Bucc", + -15.123634338378906 + ], + [ + "▁Alhaj", + -15.123804092407228 + ], + [ + "9.30", + -15.123879432678224 + ], + [ + "0991", + -15.123900413513184 + ], + [ + "afra", + -15.12392807006836 + ], + [ + "▁Klub", + -15.124155044555664 + ], + [ + "▁Keira", + -15.12416934967041 + ], + [ + "▁Kelp", + -15.124177932739258 + ], + [ + "struc", + -15.124343872070312 + ], + [ + "▁SADC", + -15.124369621276855 + ], + [ + "▁PTR", + -15.12438678741455 + ], + [ + "▁Madhav", + -15.124438285827637 + ], + [ + "1040", + -15.12456512451172 + ], + [ + "emeber", + -15.124642372131348 + ], + [ + "▁hypertext", + -15.12473487854004 + ], + [ + "afir", + -15.124874114990234 + ], + [ + "00.000", + -15.124879837036133 + ], + [ + "▁Croke", + -15.12492561340332 + ], + [ + "▁Zafar", + -15.12512493133545 + ], + [ + "0:10", + -15.125371932983398 + ], + [ + "▁Fujian", + -15.12539291381836 + ], + [ + "1394", + -15.125596046447754 + ], + [ + "aggregate", + -15.125781059265137 + ], + [ + "▁Rood", + -15.125868797302246 + ], + [ + "Logix", + -15.125900268554688 + ], + [ + "500/-", + -15.125978469848633 + ], + [ + "▁commandeer", + -15.126129150390623 + ], + [ + "Lapse", + -15.126157760620115 + ], + [ + "aaaaaaaa", + -15.12625217437744 + ], + [ + "▁diferent", + -15.126422882080078 + ], + [ + "▁Teil", + -15.126457214355469 + ], + [ + "▁Magni", + -15.126672744750977 + ], + [ + "▁necess", + -15.126715660095217 + ], + [ + "▁Strider", + -15.126940727233888 + ], + [ + "SIGHT", + -15.12695026397705 + ], + [ + ".07.201", + -15.127079963684082 + ], + [ + "Splitting", + -15.127310752868652 + ], + [ + "▁07:00", + -15.127534866333008 + ], + [ + "▁ritualistic", + -15.127598762512209 + ], + [ + "▁MKT", + -15.127650260925291 + ], + [ + "▁Odu", + -15.127740859985352 + ], + [ + "Initialize", + -15.127800941467283 + ], + [ + "Denomina", + -15.127967834472656 + ], + [ + "44-4", + -15.128009796142578 + ], + [ + "▁45-60", + -15.1280517578125 + ], + [ + "MUL", + -15.12835693359375 + ], + [ + "▁Schulte", + -15.128409385681152 + ], + [ + "▁CELEBR", + -15.128538131713867 + ], + [ + "1.68", + -15.128665924072266 + ], + [ + "carlo", + -15.128765106201172 + ], + [ + "▁£800", + -15.128896713256836 + ], + [ + "▁eavesdrop", + -15.128904342651367 + ], + [ + "uritan", + -15.128968238830566 + ], + [ + "ortho", + -15.129055976867676 + ], + [ + "▁Revell", + -15.129264831542969 + ], + [ + "▁pandan", + -15.129281997680664 + ], + [ + "▁vicarious", + -15.12937831878662 + ], + [ + "▁15:4", + -15.12944507598877 + ], + [ + "Yama", + -15.129450798034668 + ], + [ + "1893", + -15.129487991333008 + ], + [ + "▁cephal", + -15.129584312438965 + ], + [ + "ASUS", + -15.12959098815918 + ], + [ + "millimetre", + -15.129599571228027 + ], + [ + "Procedural", + -15.12961196899414 + ], + [ + "▁Birkenstock", + -15.12961196899414 + ], + [ + "▁Bulawayo", + -15.12961196899414 + ], + [ + "▁Cappuccino", + -15.12961196899414 + ], + [ + "▁Dunbartonshire", + -15.12961196899414 + ], + [ + "▁Elektra", + -15.12961196899414 + ], + [ + "▁Esmeralda", + -15.12961196899414 + ], + [ + "▁Fabregas", + -15.12961196899414 + ], + [ + "▁Giordano", + -15.12961196899414 + ], + [ + "▁Klondike", + -15.12961196899414 + ], + [ + "▁Langkawi", + -15.12961196899414 + ], + [ + "▁McFarlane", + -15.12961196899414 + ], + [ + "▁Montepulciano", + -15.12961196899414 + ], + [ + "▁Pimlico", + -15.12961196899414 + ], + [ + "▁Ristorante", + -15.12961196899414 + ], + [ + "▁Suboxone", + -15.12961196899414 + ], + [ + "▁Terracotta", + -15.12961196899414 + ], + [ + "▁Wraith", + -15.12961196899414 + ], + [ + "▁chihuahua", + -15.12961196899414 + ], + [ + "▁chromosomal", + -15.12961196899414 + ], + [ + "▁confederation", + -15.12961196899414 + ], + [ + "▁degreaser", + -15.12961196899414 + ], + [ + "▁disobedient", + -15.12961196899414 + ], + [ + "▁erudite", + -15.12961196899414 + ], + [ + "▁flurries", + -15.12961196899414 + ], + [ + "▁garbanzo", + -15.12961196899414 + ], + [ + "▁gargoyle", + -15.12961196899414 + ], + [ + "▁hydrolyzed", + -15.12961196899414 + ], + [ + "▁hyundai", + -15.12961196899414 + ], + [ + "▁interleukin", + -15.12961196899414 + ], + [ + "▁jostling", + -15.12961196899414 + ], + [ + "▁kinesiology", + -15.12961196899414 + ], + [ + "▁lewisham", + -15.12961196899414 + ], + [ + "▁paralysed", + -15.12961196899414 + ], + [ + "▁polythene", + -15.12961196899414 + ], + [ + "▁purgatory", + -15.12961196899414 + ], + [ + "▁unconscionable", + -15.12961196899414 + ], + [ + "▁unimpeded", + -15.12961196899414 + ], + [ + "▁unkempt", + -15.12961196899414 + ], + [ + "▁unquestioned", + -15.12961196899414 + ], + [ + "Annihilation", + -15.129612922668455 + ], + [ + "Intriguing", + -15.129612922668455 + ], + [ + "Necessity", + -15.129612922668455 + ], + [ + "▁Aishwarya", + -15.129612922668455 + ], + [ + "▁Feminism", + -15.129612922668455 + ], + [ + "▁Fonseca", + -15.129612922668455 + ], + [ + "▁HSDPA", + -15.129612922668455 + ], + [ + "▁Interlaken", + -15.129612922668455 + ], + [ + "▁Thrasher", + -15.129612922668455 + ], + [ + "▁Treasuries", + -15.129612922668455 + ], + [ + "▁WEDDING", + -15.129612922668455 + ], + [ + "▁angioplasty", + -15.129612922668455 + ], + [ + "▁endocrinologist", + -15.129612922668455 + ], + [ + "▁gTLD", + -15.129612922668455 + ], + [ + "▁gynecomastia", + -15.129612922668455 + ], + [ + "▁heretofore", + -15.129612922668455 + ], + [ + "▁operetta", + -15.129612922668455 + ], + [ + "▁oxazepam", + -15.129612922668455 + ], + [ + "▁suffocation", + -15.129612922668455 + ], + [ + "Affordability", + -15.129613876342772 + ], + [ + "▁Balearic", + -15.129613876342772 + ], + [ + "▁Crichton", + -15.129613876342772 + ], + [ + "▁Teriyaki", + -15.129613876342772 + ], + [ + "▁raffia", + -15.129613876342772 + ], + [ + "▁reawaken", + -15.129613876342772 + ], + [ + "Reputable", + -15.12961483001709 + ], + [ + "▁knoll", + -15.12961483001709 + ], + [ + "▁Bourdain", + -15.129615783691406 + ], + [ + "▁Stratocaster", + -15.129615783691406 + ], + [ + "▁mutilated", + -15.129615783691406 + ], + [ + "▁24*7", + -15.129616737365724 + ], + [ + "▁Giacomo", + -15.129616737365724 + ], + [ + "▁cordoned", + -15.129616737365724 + ], + [ + "▁OUTDOOR", + -15.12961769104004 + ], + [ + "▁TRICARE", + -15.129618644714355 + ], + [ + "▁Impaired", + -15.129619598388672 + ], + [ + "▁Kravitz", + -15.129619598388672 + ], + [ + "▁ADVANCED", + -15.129620552062988 + ], + [ + "▁Chickpea", + -15.129621505737305 + ], + [ + "▁telugu", + -15.129621505737305 + ], + [ + "▁lignin", + -15.12962245941162 + ], + [ + "▁telephonic", + -15.12962245941162 + ], + [ + "▁incognito", + -15.129624366760254 + ], + [ + "▁parasailing", + -15.129624366760254 + ], + [ + "Gastrointestinal", + -15.12962532043457 + ], + [ + "▁stereoscopic", + -15.12962818145752 + ], + [ + "▁supposition", + -15.12962818145752 + ], + [ + "▁infamy", + -15.129631042480469 + ], + [ + "▁Vicksburg", + -15.129631996154783 + ], + [ + "▁clamshell", + -15.12963581085205 + ], + [ + "▁heparin", + -15.129636764526367 + ], + [ + "▁Dzong", + -15.129637718200684 + ], + [ + "▁Fidget", + -15.129637718200684 + ], + [ + "▁corollary", + -15.129639625549316 + ], + [ + "▁marbling", + -15.129639625549316 + ], + [ + "▁Kinross", + -15.12964153289795 + ], + [ + "▁Carolinian", + -15.129644393920898 + ], + [ + "▁cctv", + -15.12964916229248 + ], + [ + "▁Optimism", + -15.129653930664062 + ], + [ + "▁Peony", + -15.129656791687012 + ], + [ + "▁Potluck", + -15.129656791687012 + ], + [ + "▁bubblegum", + -15.129657745361328 + ], + [ + "▁Spezia", + -15.129667282104492 + ], + [ + "▁WILEY", + -15.129676818847656 + ], + [ + "▁Sartre", + -15.129679679870604 + ], + [ + "▁SDRAM", + -15.129680633544922 + ], + [ + "▁abalone", + -15.129683494567873 + ], + [ + "utha", + -15.129687309265137 + ], + [ + "▁Garvey", + -15.129688262939451 + ], + [ + "▁Lindbergh", + -15.129688262939451 + ], + [ + "▁Tagore", + -15.12968921661377 + ], + [ + "▁EXIF", + -15.129690170288086 + ], + [ + "▁Cipher", + -15.12969207763672 + ], + [ + "▁Lancome", + -15.1296968460083 + ], + [ + "▁Velux", + -15.1296968460083 + ], + [ + "▁chloroplast", + -15.129698753356934 + ], + [ + "▁jerry", + -15.1297025680542 + ], + [ + "▁Rosenstein", + -15.129709243774414 + ], + [ + "▁malady", + -15.129712104797363 + ], + [ + "▁Equalizer", + -15.12971305847168 + ], + [ + "▁Rubbed", + -15.129714012145996 + ], + [ + "▁headwaters", + -15.129714965820312 + ], + [ + "▁Trulia", + -15.129717826843262 + ], + [ + "▁Galore", + -15.12972927093506 + ], + [ + "▁revoking", + -15.129738807678224 + ], + [ + "▁Clipping", + -15.12976360321045 + ], + [ + "▁Nitric", + -15.129770278930664 + ], + [ + "▁VRBO", + -15.129773139953612 + ], + [ + "▁microRNA", + -15.129776000976562 + ], + [ + "▁Grainger", + -15.129779815673828 + ], + [ + "▁Turkmen", + -15.129788398742676 + ], + [ + "scotland", + -15.129803657531738 + ], + [ + "▁praiseworthy", + -15.129803657531738 + ], + [ + "▁Burgos", + -15.129806518554688 + ], + [ + "▁Pilsen", + -15.129806518554688 + ], + [ + "▁microglia", + -15.129806518554688 + ], + [ + "▁Reggio", + -15.12981128692627 + ], + [ + "▁Exmouth", + -15.129815101623535 + ], + [ + "▁$59.99", + -15.1298246383667 + ], + [ + "▁THREAD", + -15.129853248596191 + ], + [ + "▁Ponies", + -15.129865646362305 + ], + [ + "▁ghouls", + -15.129865646362305 + ], + [ + "▁Romanticism", + -15.129899978637695 + ], + [ + "▁Andalusian", + -15.129907608032228 + ], + [ + "▁ylang", + -15.12991428375244 + ], + [ + "▁terpenes", + -15.129919052124023 + ], + [ + "▁ewes", + -15.129927635192873 + ], + [ + "▁eyewitnesses", + -15.129934310913086 + ], + [ + "▁DiSC", + -15.129937171936035 + ], + [ + "▁droopy", + -15.129940032958984 + ], + [ + "▁Crackdown", + -15.129969596862791 + ], + [ + "▁Marple", + -15.129998207092283 + ], + [ + "▁JUMP", + -15.130011558532717 + ], + [ + "▁inferiority", + -15.13001537322998 + ], + [ + "6.35", + -15.130075454711914 + ], + [ + "▁Portico", + -15.130081176757812 + ], + [ + "▁Lurie", + -15.130082130432127 + ], + [ + "▁flirtation", + -15.130109786987305 + ], + [ + "▁brandishing", + -15.130118370056152 + ], + [ + "▁crouched", + -15.130123138427734 + ], + [ + "▁Blondie", + -15.13012409210205 + ], + [ + "▁Staley", + -15.13014030456543 + ], + [ + "▁Hanford", + -15.130181312561035 + ], + [ + "ectomies", + -15.130196571350098 + ], + [ + "▁Dinah", + -15.130242347717283 + ], + [ + "RADA", + -15.130261421203612 + ], + [ + "▁27-28", + -15.130329132080078 + ], + [ + "▁Underwear", + -15.130337715148926 + ], + [ + "▁Kimchi", + -15.130352973937988 + ], + [ + "5:11", + -15.130355834960938 + ], + [ + "▁noncompliance", + -15.130411148071287 + ], + [ + "▁satiated", + -15.13042449951172 + ], + [ + "▁lutein", + -15.130427360534668 + ], + [ + "▁sappy", + -15.130453109741213 + ], + [ + "▁trashcan", + -15.130481719970703 + ], + [ + "▁delimited", + -15.130494117736816 + ], + [ + "▁transposed", + -15.130502700805664 + ], + [ + "▁Mariota", + -15.130545616149902 + ], + [ + "▁Ariane", + -15.13056468963623 + ], + [ + "competent", + -15.130606651306152 + ], + [ + "▁bionic", + -15.13064670562744 + ], + [ + "▁moisturised", + -15.130671501159668 + ], + [ + "▁invertebrate", + -15.130776405334473 + ], + [ + "declared", + -15.13079071044922 + ], + [ + "▁sardine", + -15.13079833984375 + ], + [ + "Primarily", + -15.130800247192385 + ], + [ + "LONDON", + -15.130817413330078 + ], + [ + "carbohydrate", + -15.130819320678713 + ], + [ + "Excited", + -15.13082218170166 + ], + [ + "illustrator", + -15.13082504272461 + ], + [ + "▁Starcraft", + -15.130828857421877 + ], + [ + "Avatar", + -15.130830764770508 + ], + [ + "wrapping", + -15.130831718444824 + ], + [ + "Overseas", + -15.13083553314209 + ], + [ + "territorial", + -15.130837440490724 + ], + [ + "Hydraulic", + -15.13083839416504 + ], + [ + "celebration", + -15.13083839416504 + ], + [ + "Strategies", + -15.130841255187988 + ], + [ + "Florist", + -15.130845069885254 + ], + [ + "Lebanon", + -15.13084602355957 + ], + [ + "▁musky", + -15.130847930908203 + ], + [ + "Jerusalem", + -15.13084888458252 + ], + [ + "Sanchez", + -15.130850791931152 + ], + [ + "Toilet", + -15.130852699279783 + ], + [ + "nucleotide", + -15.130857467651367 + ], + [ + "Precious", + -15.130866050720217 + ], + [ + "Gauge", + -15.130868911743164 + ], + [ + "Jelly", + -15.130871772766112 + ], + [ + "▁Libertyville", + -15.130871772766112 + ], + [ + "Wayfair", + -15.130874633789062 + ], + [ + "flexibility", + -15.130877494812012 + ], + [ + "▁squinting", + -15.13088321685791 + ], + [ + "Population", + -15.13088607788086 + ], + [ + "Queue", + -15.130892753601074 + ], + [ + "▁posited", + -15.130898475646973 + ], + [ + "USCIS", + -15.130914688110352 + ], + [ + "▁Verity", + -15.13092803955078 + ], + [ + "▁Deccan", + -15.130945205688477 + ], + [ + "▁brussel", + -15.13095474243164 + ], + [ + "▁spillover", + -15.1309814453125 + ], + [ + "Widget", + -15.130987167358398 + ], + [ + "clearance", + -15.130996704101562 + ], + [ + "▁Cinder", + -15.131021499633787 + ], + [ + "Inspirational", + -15.131023406982422 + ], + [ + "precise", + -15.131024360656738 + ], + [ + "cartoon", + -15.131058692932127 + ], + [ + "BLUE", + -15.131096839904783 + ], + [ + "▁McFa", + -15.131114959716797 + ], + [ + "01:00", + -15.13114070892334 + ], + [ + "Marilyn", + -15.131155014038086 + ], + [ + "▁blushes", + -15.131173133850098 + ], + [ + "6.30", + -15.131187438964844 + ], + [ + "▁revue", + -15.131239891052246 + ], + [ + "publicized", + -15.131277084350586 + ], + [ + "Springfield", + -15.13129997253418 + ], + [ + "Wife", + -15.131315231323242 + ], + [ + "Alpes", + -15.131322860717772 + ], + [ + "Strictly", + -15.131365776062012 + ], + [ + "3900", + -15.13137435913086 + ], + [ + "fingerprint", + -15.131414413452148 + ], + [ + "Pedic", + -15.13143253326416 + ], + [ + "Captcha", + -15.131449699401855 + ], + [ + "grep", + -15.131494522094728 + ], + [ + "MISC", + -15.131500244140623 + ], + [ + "▁inducement", + -15.131516456604004 + ], + [ + "▁Pré", + -15.13152027130127 + ], + [ + "▁Sandhill", + -15.131587982177734 + ], + [ + "▁Flew", + -15.131591796875 + ], + [ + "▁Nutter", + -15.131673812866213 + ], + [ + "7:21", + -15.131780624389648 + ], + [ + "▁Wishlist", + -15.131808280944824 + ], + [ + "▁Corbi", + -15.131905555725098 + ], + [ + "Disseminat", + -15.131975173950195 + ], + [ + "▁gastroenterolog", + -15.131982803344728 + ], + [ + "Composer", + -15.132002830505373 + ], + [ + "Marketers", + -15.13205337524414 + ], + [ + "▁EARN", + -15.132054328918455 + ], + [ + "TNF", + -15.132211685180664 + ], + [ + "▁1779", + -15.13222885131836 + ], + [ + "▁Ninh", + -15.132259368896484 + ], + [ + "▁Technol", + -15.132312774658203 + ], + [ + "LPR", + -15.132315635681152 + ], + [ + "Lawyer", + -15.132375717163086 + ], + [ + "▁Prati", + -15.132424354553224 + ], + [ + "VPP", + -15.132492065429688 + ], + [ + "serialize", + -15.1325044631958 + ], + [ + "▁Backpacker", + -15.132614135742188 + ], + [ + "Funded", + -15.132625579833984 + ], + [ + "radol", + -15.132682800292969 + ], + [ + "DRIVE", + -15.13271427154541 + ], + [ + "0.37", + -15.1327543258667 + ], + [ + "62%", + -15.132789611816406 + ], + [ + "INCH", + -15.132858276367188 + ], + [ + "▁rgb", + -15.132863998413086 + ], + [ + "Pose", + -15.13300609588623 + ], + [ + "▁Sigur", + -15.133070945739746 + ], + [ + "▁Rafe", + -15.133206367492676 + ], + [ + "brimmed", + -15.133222579956056 + ], + [ + "Stranger", + -15.133379936218262 + ], + [ + "94%", + -15.133408546447754 + ], + [ + "▁Kenworth", + -15.13352394104004 + ], + [ + "JCP", + -15.133600234985352 + ], + [ + "4:24", + -15.133611679077148 + ], + [ + "Halloran", + -15.133678436279297 + ], + [ + "▁cataclysmic", + -15.13375186920166 + ], + [ + "▁mosey", + -15.133916854858398 + ], + [ + "▁Cassell", + -15.133917808532717 + ], + [ + "Brighton", + -15.134018898010254 + ], + [ + "rotate", + -15.134060859680176 + ], + [ + "UPON", + -15.134076118469238 + ], + [ + "mpkins", + -15.134090423583984 + ], + [ + "udha", + -15.134273529052734 + ], + [ + "▁Grier", + -15.134318351745604 + ], + [ + "▁Ketu", + -15.134455680847168 + ], + [ + "JRS", + -15.134614944458008 + ], + [ + "▁Suki", + -15.134660720825195 + ], + [ + "2:58", + -15.134743690490724 + ], + [ + "adilla", + -15.134830474853516 + ], + [ + "▁Poser", + -15.134980201721191 + ], + [ + "▁Lusk", + -15.135091781616213 + ], + [ + "MICH", + -15.135189056396484 + ], + [ + "▁Microbiol", + -15.135213851928713 + ], + [ + "urse", + -15.135290145874023 + ], + [ + "6:19", + -15.13534927368164 + ], + [ + "▁BFG", + -15.135420799255373 + ], + [ + "btc", + -15.13548469543457 + ], + [ + "kauf", + -15.135541915893556 + ], + [ + "1.1.1", + -15.135547637939451 + ], + [ + "▁Holler", + -15.135639190673828 + ], + [ + "ekker", + -15.135966300964355 + ], + [ + "bacon", + -15.136082649230955 + ], + [ + "vap", + -15.13609790802002 + ], + [ + "▁Chapo", + -15.136136054992676 + ], + [ + "KHz", + -15.136468887329102 + ], + [ + "1562", + -15.136579513549805 + ], + [ + "▁Dolph", + -15.136582374572754 + ], + [ + "ARGO", + -15.136920928955078 + ], + [ + "▁Mulan", + -15.137052536010742 + ], + [ + "▁Shiver", + -15.137054443359377 + ], + [ + "ESR", + -15.137104988098145 + ], + [ + "lowrider", + -15.137211799621582 + ], + [ + "▁Differ", + -15.137226104736328 + ], + [ + "▁Haro", + -15.137322425842283 + ], + [ + "▁Troupe", + -15.137386322021484 + ], + [ + "riger", + -15.137487411499023 + ], + [ + "strup", + -15.137520790100098 + ], + [ + "0.38", + -15.137749671936035 + ], + [ + "podge", + -15.13780117034912 + ], + [ + "OCT", + -15.13803768157959 + ], + [ + "▁mamma", + -15.138038635253906 + ], + [ + "▁Pyar", + -15.138063430786133 + ], + [ + "▁Matz", + -15.13814640045166 + ], + [ + "▁tema", + -15.13835620880127 + ], + [ + "▁coving", + -15.138357162475586 + ], + [ + "▁$999", + -15.138402938842772 + ], + [ + "▁Teru", + -15.13856029510498 + ], + [ + "AHB", + -15.138809204101562 + ], + [ + "▁Nasri", + -15.138916015625 + ], + [ + "▁?????????????", + -15.138975143432615 + ], + [ + "▁mournful", + -15.139171600341797 + ], + [ + "▁Hoku", + -15.139233589172363 + ], + [ + "uomo", + -15.13957691192627 + ], + [ + "lewski", + -15.139631271362305 + ], + [ + "▁Greig", + -15.139668464660645 + ], + [ + "▁Enumerat", + -15.139745712280272 + ], + [ + "Franchi", + -15.139761924743652 + ], + [ + "▁Arista", + -15.140074729919434 + ], + [ + "▁Rask", + -15.140288352966309 + ], + [ + "▁Ursa", + -15.14036464691162 + ], + [ + "JOB", + -15.140432357788086 + ], + [ + "(1):1", + -15.1405029296875 + ], + [ + "▁Petrie", + -15.14055633544922 + ], + [ + "▁Naba", + -15.14057159423828 + ], + [ + "▁TYR", + -15.140619277954102 + ], + [ + "▁$16,000", + -15.140861511230469 + ], + [ + "nstig", + -15.14093017578125 + ], + [ + "▁disbelieve", + -15.141090393066406 + ], + [ + "▁Zanu", + -15.14126968383789 + ], + [ + "▁Tunic", + -15.141534805297852 + ], + [ + "Alta", + -15.141609191894531 + ], + [ + "Helm", + -15.141619682312012 + ], + [ + "▁Brion", + -15.141719818115234 + ], + [ + "qw", + -15.141789436340332 + ], + [ + "▁disenfranchise", + -15.14182949066162 + ], + [ + "▁paralyze", + -15.14183235168457 + ], + [ + "▁dystopia", + -15.14187240600586 + ], + [ + "ouge", + -15.141913414001465 + ], + [ + "maran", + -15.141969680786133 + ], + [ + "linden", + -15.142013549804688 + ], + [ + "▁embolden", + -15.142034530639648 + ], + [ + "▁Parson", + -15.142044067382812 + ], + [ + "MIKE", + -15.142057418823242 + ], + [ + "▁chomp", + -15.14208984375 + ], + [ + "▁Croce", + -15.142144203186035 + ], + [ + "IPPA", + -15.142215728759766 + ], + [ + "4/18", + -15.142257690429688 + ], + [ + "▁Disadvantage", + -15.142261505126951 + ], + [ + "mandibula", + -15.142411231994627 + ], + [ + "Arginine", + -15.14243221282959 + ], + [ + "▁Cagayan", + -15.14243221282959 + ], + [ + "▁Otolaryngology", + -15.14243221282959 + ], + [ + "▁Petoskey", + -15.14243221282959 + ], + [ + "▁propriocepti", + -15.14243221282959 + ], + [ + "▁turbidity", + -15.14243221282959 + ], + [ + "PHILADELPHIA", + -15.142433166503906 + ], + [ + "habersham", + -15.142433166503906 + ], + [ + "proliferative", + -15.142433166503906 + ], + [ + "▁Anheuser", + -15.142433166503906 + ], + [ + "▁Aqueduct", + -15.142433166503906 + ], + [ + "▁Bereavement", + -15.142433166503906 + ], + [ + "▁Catawba", + -15.142433166503906 + ], + [ + "▁Conservatoire", + -15.142433166503906 + ], + [ + "▁Copacabana", + -15.142433166503906 + ], + [ + "▁Disappearing", + -15.142433166503906 + ], + [ + "▁Fassbender", + -15.142433166503906 + ], + [ + "▁Hotchkis", + -15.142433166503906 + ], + [ + "▁Humidifier", + -15.142433166503906 + ], + [ + "▁Hydroxide", + -15.142433166503906 + ], + [ + "▁Impairment", + -15.142433166503906 + ], + [ + "▁Internacional", + -15.142433166503906 + ], + [ + "▁Lafarge", + -15.142433166503906 + ], + [ + "▁Licensure", + -15.142433166503906 + ], + [ + "▁Mnangagwa", + -15.142433166503906 + ], + [ + "▁Morecambe", + -15.142433166503906 + ], + [ + "▁Okavango", + -15.142433166503906 + ], + [ + "▁Plovdiv", + -15.142433166503906 + ], + [ + "▁Plunkett", + -15.142433166503906 + ], + [ + "▁Pondicherry", + -15.142433166503906 + ], + [ + "▁Sirisena", + -15.142433166503906 + ], + [ + "▁Specsavers", + -15.142433166503906 + ], + [ + "▁Spectroscopy", + -15.142433166503906 + ], + [ + "▁Spirulina", + -15.142433166503906 + ], + [ + "▁Stimulation", + -15.142433166503906 + ], + [ + "▁Terminology", + -15.142433166503906 + ], + [ + "▁Voorhees", + -15.142433166503906 + ], + [ + "▁adjudged", + -15.142433166503906 + ], + [ + "▁adversities", + -15.142433166503906 + ], + [ + "▁anachronistic", + -15.142433166503906 + ], + [ + "▁anonymised", + -15.142433166503906 + ], + [ + "▁bibliographies", + -15.142433166503906 + ], + [ + "▁blasphemous", + -15.142433166503906 + ], + [ + "▁bludgeon", + -15.142433166503906 + ], + [ + "▁bureaucracies", + -15.142433166503906 + ], + [ + "▁conjunctivitis", + -15.142433166503906 + ], + [ + "▁culpability", + -15.142433166503906 + ], + [ + "▁debenture", + -15.142433166503906 + ], + [ + "▁dissociation", + -15.142433166503906 + ], + [ + "▁encyclopedic", + -15.142433166503906 + ], + [ + "▁eosinophil", + -15.142433166503906 + ], + [ + "▁faeces", + -15.142433166503906 + ], + [ + "▁hysteresis", + -15.142433166503906 + ], + [ + "▁manoeuvring", + -15.142433166503906 + ], + [ + "▁mesmerised", + -15.142433166503906 + ], + [ + "▁militancy", + -15.142433166503906 + ], + [ + "▁monetarily", + -15.142433166503906 + ], + [ + "▁nouvelle", + -15.142433166503906 + ], + [ + "▁predominance", + -15.142433166503906 + ], + [ + "▁recalcitrant", + -15.142433166503906 + ], + [ + "▁scoundrel", + -15.142433166503906 + ], + [ + "▁trombonist", + -15.142433166503906 + ], + [ + "▁vanadium", + -15.142433166503906 + ], + [ + "▁vindicated", + -15.142433166503906 + ], + [ + "THERAPY", + -15.142434120178224 + ], + [ + "▁Allotment", + -15.142434120178224 + ], + [ + "▁Detachment", + -15.142434120178224 + ], + [ + "▁Giancarlo", + -15.142434120178224 + ], + [ + "▁Osiris", + -15.142434120178224 + ], + [ + "▁cosmonaut", + -15.142434120178224 + ], + [ + "▁typist", + -15.142434120178224 + ], + [ + "▁MANAGER", + -15.14243507385254 + ], + [ + "▁delimiter", + -15.14243507385254 + ], + [ + "▁wholesaling", + -15.14243507385254 + ], + [ + "UNRWA", + -15.142436981201172 + ], + [ + "▁tidings", + -15.142437934875488 + ], + [ + "▁3/16\"", + -15.142438888549805 + ], + [ + "▁Leapfrog", + -15.142438888549805 + ], + [ + "▁Accutane", + -15.142440795898438 + ], + [ + "▁AngularJS", + -15.142440795898438 + ], + [ + "▁epilogue", + -15.142440795898438 + ], + [ + "▁gilbert", + -15.142440795898438 + ], + [ + "▁monofilament", + -15.142440795898438 + ], + [ + "▁tidied", + -15.142440795898438 + ], + [ + "▁craggy", + -15.142441749572754 + ], + [ + "▁disassembling", + -15.14244270324707 + ], + [ + "▁hemophilia", + -15.14244270324707 + ], + [ + "▁Gaurav", + -15.142443656921388 + ], + [ + "▁Bhushan", + -15.142446517944336 + ], + [ + "otulinum", + -15.142449378967283 + ], + [ + "▁Poshmark", + -15.142450332641602 + ], + [ + "▁reductive", + -15.142450332641602 + ], + [ + "▁Womack", + -15.142451286315918 + ], + [ + "▁Archivist", + -15.142452239990234 + ], + [ + "▁Bouncy", + -15.142452239990234 + ], + [ + "▁Goofy", + -15.142452239990234 + ], + [ + "▁Ouija", + -15.142452239990234 + ], + [ + "▁cardiopulmonary", + -15.142452239990234 + ], + [ + "▁Pikmin", + -15.14245319366455 + ], + [ + "▁Tripadvisor", + -15.14245319366455 + ], + [ + "▁Viscount", + -15.14245319366455 + ], + [ + "▁awestruck", + -15.14245319366455 + ], + [ + "▁Aileen", + -15.142455101013184 + ], + [ + "▁relativism", + -15.142457008361816 + ], + [ + "▁WOMAN", + -15.142457962036133 + ], + [ + "▁Rakesh", + -15.14245891571045 + ], + [ + "▁mortified", + -15.142459869384766 + ], + [ + "▁Herrmann", + -15.142460823059082 + ], + [ + "▁Zimbra", + -15.142460823059082 + ], + [ + "ffmpeg", + -15.142464637756348 + ], + [ + "▁Senegalese", + -15.142465591430664 + ], + [ + "▁slatted", + -15.14246654510498 + ], + [ + "SHADOW", + -15.142470359802246 + ], + [ + "▁Benfica", + -15.142470359802246 + ], + [ + "▁Pillsbury", + -15.142470359802246 + ], + [ + "▁Ardern", + -15.142473220825195 + ], + [ + "▁(1962)", + -15.142474174499512 + ], + [ + "Automating", + -15.142475128173828 + ], + [ + "▁Restructuring", + -15.142476081848145 + ], + [ + "▁squaring", + -15.142476081848145 + ], + [ + "▁hydrostatic", + -15.14248275756836 + ], + [ + "▁Pompey", + -15.142486572265623 + ], + [ + "voltaic", + -15.142491340637209 + ], + [ + "▁transduction", + -15.142491340637209 + ], + [ + "▁Wattpad", + -15.142496109008787 + ], + [ + "▁Braddock", + -15.142497062683104 + ], + [ + "▁WinZip", + -15.142498970031738 + ], + [ + "▁télé", + -15.142498970031738 + ], + [ + "▁Palawan", + -15.142499923706056 + ], + [ + "▁synergistically", + -15.142501831054688 + ], + [ + "▁Schafer", + -15.142502784729004 + ], + [ + "▁Cadiz", + -15.142505645751951 + ], + [ + "▁shimmy", + -15.142513275146484 + ], + [ + "▁Rauner", + -15.14252471923828 + ], + [ + "▁vulgaris", + -15.14253044128418 + ], + [ + "▁gullies", + -15.142533302307127 + ], + [ + "▁unrealized", + -15.142539978027344 + ], + [ + "▁PROTECTION", + -15.142544746398926 + ], + [ + "▁Nunez", + -15.14254665374756 + ], + [ + "▁Munoz", + -15.142552375793455 + ], + [ + "▁2007-2008", + -15.142559051513672 + ], + [ + "▁translocation", + -15.142562866210938 + ], + [ + "▁idiomatic", + -15.142568588256836 + ], + [ + "▁Transnational", + -15.142577171325684 + ], + [ + "▁tannic", + -15.142578125 + ], + [ + "FUSION", + -15.142581939697266 + ], + [ + "▁(0-0)", + -15.142582893371582 + ], + [ + "▁Hynes", + -15.14259147644043 + ], + [ + "▁Dekker", + -15.142592430114746 + ], + [ + "▁Duchy", + -15.142592430114746 + ], + [ + "RUSSIA", + -15.142596244812012 + ], + [ + "▁Camelback", + -15.142601013183594 + ], + [ + "▁Solvent", + -15.14260196685791 + ], + [ + "nastas", + -15.142602920532228 + ], + [ + "▁Crusoe", + -15.142607688903809 + ], + [ + "▁telnet", + -15.142624855041504 + ], + [ + "▁rosehip", + -15.142632484436035 + ], + [ + "▁Lucius", + -15.142643928527832 + ], + [ + "▁IPSec", + -15.142651557922363 + ], + [ + "▁ADDED", + -15.14266872406006 + ], + [ + "▁Beltsville", + -15.142674446105955 + ], + [ + "▁WordCamp", + -15.142683029174805 + ], + [ + "▁Felice", + -15.142708778381348 + ], + [ + "▁Leduc", + -15.142722129821776 + ], + [ + "▁Scenery", + -15.14272403717041 + ], + [ + "▁abridged", + -15.142728805541992 + ], + [ + "▁Tahitian", + -15.142738342285156 + ], + [ + "▁aftershave", + -15.142738342285156 + ], + [ + "▁Annenberg", + -15.142741203308104 + ], + [ + "▁Hatchback", + -15.142769813537598 + ], + [ + "▁01:5", + -15.142772674560549 + ], + [ + "▁Reardon", + -15.142792701721191 + ], + [ + "▁Hightower", + -15.14279842376709 + ], + [ + "▁divot", + -15.142828941345217 + ], + [ + "▁cutesy", + -15.142861366271973 + ], + [ + "▁Seguin", + -15.142885208129885 + ], + [ + "▁braising", + -15.142888069152832 + ], + [ + "▁Oceanfront", + -15.142939567565918 + ], + [ + "▁Gulfport", + -15.142962455749512 + ], + [ + "▁bodega", + -15.142969131469728 + ], + [ + "▁discordant", + -15.142992973327637 + ], + [ + "▁Hippie", + -15.142996788024902 + ], + [ + "▁Weatherford", + -15.143033981323242 + ], + [ + "▁snooping", + -15.143047332763672 + ], + [ + "▁ECTS", + -15.143062591552734 + ], + [ + "▁Hendersonville", + -15.143085479736328 + ], + [ + "▁sputtering", + -15.143088340759276 + ], + [ + "▁crouching", + -15.143095016479492 + ], + [ + "▁downtrodden", + -15.143117904663086 + ], + [ + "▁Tizen", + -15.1431245803833 + ], + [ + "neoplastic", + -15.14312744140625 + ], + [ + "▁immobilized", + -15.14313507080078 + ], + [ + "▁scolding", + -15.143176078796388 + ], + [ + "▁Uruguayan", + -15.143197059631348 + ], + [ + "▁PeopleSoft", + -15.143221855163574 + ], + [ + "▁Iliad", + -15.14322280883789 + ], + [ + "▁misrepresenting", + -15.143256187438965 + ], + [ + "▁YPG", + -15.143258094787598 + ], + [ + "▁Schneiderman", + -15.143271446228027 + ], + [ + "▁testicular", + -15.143272399902344 + ], + [ + "▁curr", + -15.14328670501709 + ], + [ + "▁gutting", + -15.143301963806152 + ], + [ + "▁Undertaker", + -15.143317222595217 + ], + [ + "▁hymnal", + -15.143327713012695 + ], + [ + "Naga", + -15.143338203430176 + ], + [ + "▁Boathouse", + -15.14335823059082 + ], + [ + "▁Tandy", + -15.143397331237791 + ], + [ + "▁piezoelectric", + -15.14342975616455 + ], + [ + "▁$270", + -15.14344882965088 + ], + [ + "▁hostesses", + -15.143454551696776 + ], + [ + "▁Marlowe", + -15.14346694946289 + ], + [ + "▁adamantly", + -15.143494606018066 + ], + [ + "▁indented", + -15.143494606018066 + ], + [ + "Esta", + -15.143502235412598 + ], + [ + "Graft", + -15.14356517791748 + ], + [ + "▁$179", + -15.143576622009276 + ], + [ + "▁Blacksmith", + -15.143596649169922 + ], + [ + "Feather", + -15.14362335205078 + ], + [ + "▁Catlin", + -15.14364242553711 + ], + [ + "Insect", + -15.14365577697754 + ], + [ + "▁Yamato", + -15.143675804138184 + ], + [ + "▁plucky", + -15.143685340881348 + ], + [ + "advise", + -15.143689155578612 + ], + [ + "1878", + -15.143704414367676 + ], + [ + "▁clearinghouse", + -15.14371109008789 + ], + [ + "Carrot", + -15.143776893615724 + ], + [ + "3-27", + -15.143784523010254 + ], + [ + "▁boldest", + -15.143796920776367 + ], + [ + "itaire", + -15.143913269042969 + ], + [ + "▁Kantor", + -15.143914222717283 + ], + [ + "credential", + -15.14396858215332 + ], + [ + "HAPPY", + -15.143969535827637 + ], + [ + "Patience", + -15.14397430419922 + ], + [ + "Hungry", + -15.143977165222168 + ], + [ + "nourished", + -15.143978118896484 + ], + [ + "neighbor", + -15.14398193359375 + ], + [ + "Dubbed", + -15.143988609313965 + ], + [ + "Violence", + -15.143996238708496 + ], + [ + "Tribune", + -15.143998146057127 + ], + [ + "Aberdeen", + -15.143999099731444 + ], + [ + "monetary", + -15.144007682800291 + ], + [ + "proprietary", + -15.144007682800291 + ], + [ + "conviction", + -15.14402961730957 + ], + [ + "dividend", + -15.144049644470217 + ], + [ + "Universities", + -15.144057273864746 + ], + [ + "Verified", + -15.144064903259276 + ], + [ + "Undergraduate", + -15.144073486328123 + ], + [ + "Jungle", + -15.144086837768556 + ], + [ + "Addiction", + -15.144125938415527 + ], + [ + "▁MSDS", + -15.144197463989258 + ], + [ + "Darkest", + -15.144214630126951 + ], + [ + "44%", + -15.144227027893066 + ], + [ + "unfinished", + -15.144227027893066 + ], + [ + "Transformation", + -15.14423942565918 + ], + [ + "Reject", + -15.14426612854004 + ], + [ + "WBC", + -15.144269943237305 + ], + [ + "▁Cookery", + -15.144271850585938 + ], + [ + "▁$4.3", + -15.144281387329102 + ], + [ + "Nikki", + -15.14428997039795 + ], + [ + "whenever", + -15.144304275512695 + ], + [ + "continuity", + -15.144338607788086 + ], + [ + "▁reaffirming", + -15.144354820251465 + ], + [ + "Breeze", + -15.144397735595703 + ], + [ + "Singing", + -15.144476890563965 + ], + [ + "Wiley", + -15.14448356628418 + ], + [ + "Richardson", + -15.144490242004396 + ], + [ + "▁serenade", + -15.144502639770508 + ], + [ + "Peterson", + -15.144646644592283 + ], + [ + "1.88", + -15.144658088684082 + ], + [ + "eclipse", + -15.144762992858888 + ], + [ + "jual", + -15.144794464111328 + ], + [ + "BRAND", + -15.144815444946287 + ], + [ + "▁Serif", + -15.1450777053833 + ], + [ + "▁Cueva", + -15.145109176635742 + ], + [ + "▁scree", + -15.145112991333008 + ], + [ + "0003", + -15.145150184631348 + ], + [ + "Fool", + -15.145153045654297 + ], + [ + "hagen", + -15.145184516906738 + ], + [ + "▁kohl", + -15.145187377929688 + ], + [ + "▁Marque", + -15.145459175109863 + ], + [ + "Outlet", + -15.145622253417969 + ], + [ + "▁$375", + -15.145649909973145 + ], + [ + "1:34", + -15.14573860168457 + ], + [ + "steem", + -15.145771026611328 + ], + [ + "FROM", + -15.14580249786377 + ], + [ + "luckily", + -15.14588451385498 + ], + [ + "8-17", + -15.14599609375 + ], + [ + "▁oppor", + -15.146016120910645 + ], + [ + "Orbit", + -15.146039962768556 + ], + [ + "▁PKR", + -15.14606761932373 + ], + [ + "zquez", + -15.146100044250488 + ], + [ + "▁Dutta", + -15.146111488342283 + ], + [ + "dienst", + -15.146145820617676 + ], + [ + "villian", + -15.146162033081056 + ], + [ + "Herbal", + -15.146265029907228 + ], + [ + "▁VSP", + -15.146281242370604 + ], + [ + "▁leuco", + -15.146306037902832 + ], + [ + "gnome", + -15.146331787109377 + ], + [ + "Nicknamed", + -15.146406173706056 + ], + [ + "charity", + -15.14640998840332 + ], + [ + "Realization", + -15.14651584625244 + ], + [ + "Dodge", + -15.14657497406006 + ], + [ + "▁LIV", + -15.14678955078125 + ], + [ + "Insightful", + -15.146897315979004 + ], + [ + "Hanlon", + -15.14707088470459 + ], + [ + "SHARE", + -15.147212028503418 + ], + [ + "▁Rehman", + -15.147587776184082 + ], + [ + "▁20:5", + -15.147820472717283 + ], + [ + "Sponsoring", + -15.14787769317627 + ], + [ + "▁Pizz", + -15.14804744720459 + ], + [ + "Battling", + -15.14826488494873 + ], + [ + "▁weitere", + -15.148351669311523 + ], + [ + "▁Brasilia", + -15.14857006072998 + ], + [ + "▁Kanda", + -15.148578643798828 + ], + [ + "whit", + -15.148603439331056 + ], + [ + "Gonz", + -15.14866542816162 + ], + [ + "appen", + -15.148674011230469 + ], + [ + "▁Gustin", + -15.1488037109375 + ], + [ + "▁Pigg", + -15.148880958557127 + ], + [ + "Shipper", + -15.148988723754885 + ], + [ + "LIX", + -15.149063110351562 + ], + [ + "fasten", + -15.149063110351562 + ], + [ + "▁specie", + -15.149145126342772 + ], + [ + "Nested", + -15.149166107177734 + ], + [ + "uvan", + -15.149266242980955 + ], + [ + "icolo", + -15.149335861206056 + ], + [ + "Chess", + -15.14992332458496 + ], + [ + "SDLT", + -15.150307655334473 + ], + [ + "iffle", + -15.15042209625244 + ], + [ + "0.36", + -15.150497436523438 + ], + [ + "0.31", + -15.150565147399902 + ], + [ + "▁Kiyo", + -15.15060806274414 + ], + [ + "EQUI", + -15.150665283203123 + ], + [ + "▁Zakaria", + -15.15078830718994 + ], + [ + "Bengal", + -15.150872230529783 + ], + [ + "PnP", + -15.15087604522705 + ], + [ + "ographically", + -15.151074409484863 + ], + [ + "FDG", + -15.15123176574707 + ], + [ + "▁Polite", + -15.15123462677002 + ], + [ + "▁Qibla", + -15.15123462677002 + ], + [ + "HADM", + -15.151455879211426 + ], + [ + "▁alumina", + -15.151623725891112 + ], + [ + "▁Castellan", + -15.151741981506348 + ], + [ + "▁19:0", + -15.151761054992676 + ], + [ + "Castro", + -15.151925086975098 + ], + [ + "NAK", + -15.15212345123291 + ], + [ + "Float", + -15.152154922485352 + ], + [ + "▁shipowner", + -15.152227401733398 + ], + [ + "uuu", + -15.152332305908203 + ], + [ + "?!?!?!", + -15.152349472045898 + ], + [ + "▁Damar", + -15.152466773986816 + ], + [ + "▁Garba", + -15.152482986450195 + ], + [ + "▁candleli", + -15.152541160583496 + ], + [ + "Piper", + -15.152748107910156 + ], + [ + "▁Feller", + -15.152828216552734 + ], + [ + "▁lept", + -15.152832984924316 + ], + [ + "cardio", + -15.152939796447754 + ], + [ + "▁Sauc", + -15.153013229370115 + ], + [ + "Clement", + -15.153156280517578 + ], + [ + "▁Chandon", + -15.15351104736328 + ], + [ + "Hatch", + -15.15373420715332 + ], + [ + "▁Bluebell", + -15.15377712249756 + ], + [ + "raza", + -15.15379238128662 + ], + [ + "lotti", + -15.153912544250488 + ], + [ + "umbi", + -15.153916358947754 + ], + [ + "iflora", + -15.154000282287598 + ], + [ + "Colorful", + -15.15401840209961 + ], + [ + "▁gynecologic", + -15.15431308746338 + ], + [ + "▁Kurz", + -15.15454387664795 + ], + [ + "terrelationship", + -15.154581069946287 + ], + [ + "CONTINU", + -15.154654502868652 + ], + [ + "8:55", + -15.154688835144045 + ], + [ + "infidel", + -15.154739379882812 + ], + [ + "▁7404", + -15.154756546020508 + ], + [ + "▁crystallize", + -15.154881477355955 + ], + [ + "gaya", + -15.154905319213867 + ], + [ + "▁expeditious", + -15.155176162719728 + ], + [ + "▁Biochem", + -15.155177116394045 + ], + [ + "▁05:4", + -15.15524196624756 + ], + [ + "numeric", + -15.155281066894531 + ], + [ + "▁empiric", + -15.155295372009276 + ], + [ + "prev", + -15.155298233032228 + ], + [ + "▁HBM", + -15.155355453491213 + ], + [ + "WOO", + -15.155387878417969 + ], + [ + "Confucian", + -15.155418395996094 + ], + [ + "Distracted", + -15.15541934967041 + ], + [ + "▁Aldeburgh", + -15.15541934967041 + ], + [ + "▁Coelho", + -15.15541934967041 + ], + [ + "▁Fallujah", + -15.15541934967041 + ], + [ + "▁FanDuel", + -15.15541934967041 + ], + [ + "▁Voldemort", + -15.15541934967041 + ], + [ + "▁Yanukovych", + -15.15541934967041 + ], + [ + "▁uncooperative", + -15.15541934967041 + ], + [ + "▁unexplainable", + -15.15541934967041 + ], + [ + "▁APPLIANCE", + -15.155420303344728 + ], + [ + "▁Alessandra", + -15.155420303344728 + ], + [ + "▁Anthropological", + -15.155420303344728 + ], + [ + "▁Assertion", + -15.155420303344728 + ], + [ + "▁Barangay", + -15.155420303344728 + ], + [ + "▁Boniface", + -15.155420303344728 + ], + [ + "▁Contingent", + -15.155420303344728 + ], + [ + "▁Jaffray", + -15.155420303344728 + ], + [ + "▁Kabbalah", + -15.155420303344728 + ], + [ + "▁Lannister", + -15.155420303344728 + ], + [ + "▁Mowbray", + -15.155420303344728 + ], + [ + "▁Okidata", + -15.155420303344728 + ], + [ + "▁Resuscitation", + -15.155420303344728 + ], + [ + "▁Sinfonia", + -15.155420303344728 + ], + [ + "▁Songkran", + -15.155420303344728 + ], + [ + "▁Tocopherol", + -15.155420303344728 + ], + [ + "▁Whidbey", + -15.155420303344728 + ], + [ + "▁abdicate", + -15.155420303344728 + ], + [ + "▁asymptotic", + -15.155420303344728 + ], + [ + "▁calendula", + -15.155420303344728 + ], + [ + "▁concatenate", + -15.155420303344728 + ], + [ + "▁consecration", + -15.155420303344728 + ], + [ + "▁cortisone", + -15.155420303344728 + ], + [ + "▁despondent", + -15.155420303344728 + ], + [ + "▁disfigurement", + -15.155420303344728 + ], + [ + "▁emulsify", + -15.155420303344728 + ], + [ + "▁encapsulating", + -15.155420303344728 + ], + [ + "▁extrapolation", + -15.155420303344728 + ], + [ + "▁hobbled", + -15.155420303344728 + ], + [ + "▁hollandaise", + -15.155420303344728 + ], + [ + "▁incubating", + -15.155420303344728 + ], + [ + "▁insurrection", + -15.155420303344728 + ], + [ + "▁interminable", + -15.155420303344728 + ], + [ + "▁mercantile", + -15.155420303344728 + ], + [ + "▁monopolistic", + -15.155420303344728 + ], + [ + "▁neodymium", + -15.155420303344728 + ], + [ + "▁sprucing", + -15.155420303344728 + ], + [ + "▁sycamore", + -15.155420303344728 + ], + [ + "▁unaccounted", + -15.155420303344728 + ], + [ + "▁Cunha", + -15.155421257019045 + ], + [ + "▁cataloguing", + -15.155421257019045 + ], + [ + "▁Manteca", + -15.15542221069336 + ], + [ + "▁terabytes", + -15.15542221069336 + ], + [ + "▁glycosides", + -15.155423164367676 + ], + [ + "Sahitya", + -15.155424118041992 + ], + [ + "▁Lichtenstein", + -15.155424118041992 + ], + [ + "▁Physiological", + -15.155424118041992 + ], + [ + "▁bartending", + -15.155424118041992 + ], + [ + "▁Hydrochloride", + -15.155425071716309 + ], + [ + "▁Gangtok", + -15.155426025390623 + ], + [ + "▁Travelocity", + -15.155426025390623 + ], + [ + "▁cytometry", + -15.155426025390623 + ], + [ + "▁stranglehold", + -15.155426025390623 + ], + [ + "▁Anabaptist", + -15.15542697906494 + ], + [ + "▁Remnant", + -15.15542697906494 + ], + [ + "▁idaho", + -15.15542697906494 + ], + [ + "▁plodding", + -15.155427932739258 + ], + [ + "▁Quizzes", + -15.155428886413574 + ], + [ + "▁Voigt", + -15.155428886413574 + ], + [ + "▁Discontinued", + -15.155431747436523 + ], + [ + "▁Photonics", + -15.155431747436523 + ], + [ + "▁Bouchard", + -15.15543270111084 + ], + [ + "▁CASINO", + -15.15543270111084 + ], + [ + "▁Wofford", + -15.15543270111084 + ], + [ + "▁Arndt", + -15.155433654785156 + ], + [ + "HENRY", + -15.155434608459473 + ], + [ + "▁hydrophilic", + -15.155434608459473 + ], + [ + "▁Celgene", + -15.155435562133787 + ], + [ + "▁WEATHER", + -15.155436515808104 + ], + [ + "▁hematology", + -15.155437469482422 + ], + [ + "▁citalopram", + -15.155438423156738 + ], + [ + "▁lasagne", + -15.155438423156738 + ], + [ + "▁reconvene", + -15.155438423156738 + ], + [ + "▁heartstrings", + -15.155441284179688 + ], + [ + "▁mimicked", + -15.155442237854004 + ], + [ + "▁GUESS", + -15.155444145202637 + ], + [ + "cephaly", + -15.15544605255127 + ], + [ + "▁splotch", + -15.15544891357422 + ], + [ + "/03/2019", + -15.155451774597168 + ], + [ + "▁Frieze", + -15.155451774597168 + ], + [ + "▁Hofmann", + -15.15545654296875 + ], + [ + "▁sullen", + -15.155457496643066 + ], + [ + "▁Evangeline", + -15.155460357666016 + ], + [ + "▁XVIII", + -15.155461311340332 + ], + [ + "▁lignite", + -15.155463218688965 + ], + [ + "▁Colfax", + -15.155468940734863 + ], + [ + "▁Carlsberg", + -15.155472755432127 + ], + [ + "▁Skylanders", + -15.155473709106444 + ], + [ + "▁womenswear", + -15.155473709106444 + ], + [ + "▁windswept", + -15.155479431152344 + ], + [ + "▁catalysis", + -15.155488014221191 + ], + [ + "▁(25%)", + -15.15549373626709 + ], + [ + "▁rabble", + -15.155497550964355 + ], + [ + "▁$49.95", + -15.155506134033203 + ], + [ + "▁Quicksilver", + -15.155511856079102 + ], + [ + "▁RAAF", + -15.15551471710205 + ], + [ + "▁Extinguisher", + -15.155516624450684 + ], + [ + "▁edifying", + -15.155542373657228 + ], + [ + "▁untrusted", + -15.155545234680176 + ], + [ + "▁erratically", + -15.155548095703123 + ], + [ + "▁UNLV", + -15.155550956726074 + ], + [ + "▁Microscope", + -15.155552864074709 + ], + [ + "▁Consular", + -15.155558586120604 + ], + [ + "EATER", + -15.155574798583984 + ], + [ + "▁INDEX", + -15.155580520629885 + ], + [ + "▁Francophone", + -15.155598640441896 + ], + [ + "▁Mantua", + -15.155609130859377 + ], + [ + "UAV", + -15.155621528625488 + ], + [ + "Melanoma", + -15.15562343597412 + ], + [ + "▁Cannibal", + -15.15563678741455 + ], + [ + "▁Sardis", + -15.155670166015623 + ], + [ + "▁Lottie", + -15.15567398071289 + ], + [ + "stellen", + -15.155677795410156 + ], + [ + "▁remortgage", + -15.155677795410156 + ], + [ + "8:36", + -15.155691146850586 + ], + [ + "▁revelry", + -15.155696868896484 + ], + [ + "Aligned", + -15.155704498291016 + ], + [ + "▁skirmishes", + -15.155734062194824 + ], + [ + "▁caramelised", + -15.155747413635254 + ], + [ + "▁Tramway", + -15.155786514282228 + ], + [ + "▁plaintext", + -15.155799865722656 + ], + [ + "▁Aerosol", + -15.155828475952148 + ], + [ + "▁Msgr", + -15.15583610534668 + ], + [ + "▁Reimer", + -15.155850410461426 + ], + [ + "▁synchronised", + -15.155858039855955 + ], + [ + "▁camino", + -15.155861854553224 + ], + [ + "▁Undercover", + -15.155863761901855 + ], + [ + "▁unambiguously", + -15.155928611755373 + ], + [ + "Bitumen", + -15.15593719482422 + ], + [ + "▁Munson", + -15.15593719482422 + ], + [ + "▁adsense", + -15.156024932861328 + ], + [ + "▁remitted", + -15.15607738494873 + ], + [ + "▁Furry", + -15.156105995178224 + ], + [ + "▁Cookware", + -15.156147003173828 + ], + [ + "MSH", + -15.156149864196776 + ], + [ + "▁terrorize", + -15.15615177154541 + ], + [ + "▁Keynesian", + -15.156169891357422 + ], + [ + "▁Arles", + -15.156173706054688 + ], + [ + "▁Sinner", + -15.156185150146484 + ], + [ + "▁Latour", + -15.156190872192385 + ], + [ + "▁Glenmore", + -15.156203269958496 + ], + [ + "▁UNSW", + -15.15622329711914 + ], + [ + "▁Casita", + -15.156230926513672 + ], + [ + "▁excruciatingly", + -15.156237602233888 + ], + [ + "▁SunPower", + -15.15627670288086 + ], + [ + "▁+61", + -15.156277656555176 + ], + [ + "▁authenticating", + -15.15628719329834 + ], + [ + "▁Interventional", + -15.156298637390137 + ], + [ + "0-240", + -15.156307220458984 + ], + [ + "▁farmworkers", + -15.156307220458984 + ], + [ + "▁balked", + -15.156309127807615 + ], + [ + "▁FLIP", + -15.15633773803711 + ], + [ + "▁$14,000", + -15.156341552734377 + ], + [ + "▁Amended", + -15.15636920928955 + ], + [ + "▁Cutlery", + -15.156464576721191 + ], + [ + "▁Flannel", + -15.156490325927734 + ], + [ + "▁preteen", + -15.156493186950684 + ], + [ + "▁Langton", + -15.15653133392334 + ], + [ + "oxane", + -15.156546592712402 + ], + [ + "▁Gerardo", + -15.156577110290527 + ], + [ + "MDL", + -15.156579971313477 + ], + [ + "▁replanting", + -15.156608581542969 + ], + [ + "▁Gough", + -15.156620979309082 + ], + [ + "▁BOMB", + -15.156662940979004 + ], + [ + "jacob", + -15.156698226928713 + ], + [ + "▁brasserie", + -15.156731605529783 + ], + [ + "0006", + -15.156818389892578 + ], + [ + "▁$4.6", + -15.156890869140623 + ], + [ + "9-99", + -15.156892776489258 + ], + [ + "▁millisecond", + -15.156954765319824 + ], + [ + "confirm", + -15.156994819641112 + ], + [ + "▁Doggy", + -15.15700912475586 + ], + [ + "▁Sweetene", + -15.157029151916504 + ], + [ + "▁Doerr", + -15.157081604003906 + ], + [ + "decimal", + -15.157201766967772 + ], + [ + "victorian", + -15.157282829284668 + ], + [ + "physicist", + -15.1572904586792 + ], + [ + "fructose", + -15.15731143951416 + ], + [ + "Antoine", + -15.157312393188477 + ], + [ + "navigation", + -15.157312393188477 + ], + [ + "Lonely", + -15.15731716156006 + ], + [ + "Claudia", + -15.157319068908691 + ], + [ + "Eternal", + -15.157319068908691 + ], + [ + "Audible", + -15.15732192993164 + ], + [ + "relieving", + -15.157323837280272 + ], + [ + "Lorenzo", + -15.157325744628906 + ], + [ + "opposite", + -15.157325744628906 + ], + [ + "decrease", + -15.157326698303224 + ], + [ + "facilitate", + -15.157326698303224 + ], + [ + "concentration", + -15.15732765197754 + ], + [ + "maturity", + -15.15732765197754 + ], + [ + "Einstein", + -15.157328605651855 + ], + [ + "-0-1", + -15.157331466674805 + ], + [ + "Horizontal", + -15.157331466674805 + ], + [ + "Myanmar", + -15.15733528137207 + ], + [ + "turquoise", + -15.15733528137207 + ], + [ + "Adelaide", + -15.157336235046388 + ], + [ + "collapse", + -15.157341957092283 + ], + [ + "colleague", + -15.157347679138184 + ], + [ + "glorious", + -15.157352447509766 + ], + [ + "▁DECT", + -15.157352447509766 + ], + [ + "▁Walz", + -15.157353401184082 + ], + [ + "▁adaptogen", + -15.157354354858398 + ], + [ + "Naked", + -15.157358169555664 + ], + [ + "emergent", + -15.157358169555664 + ], + [ + "Brussels", + -15.157360076904297 + ], + [ + "Revised", + -15.157472610473633 + ], + [ + "▁pouf", + -15.157476425170898 + ], + [ + "▁aphid", + -15.157477378845217 + ], + [ + "▁sympat", + -15.157501220703123 + ], + [ + "Dumpster", + -15.157511711120604 + ], + [ + "spotted", + -15.157515525817873 + ], + [ + "ecaf", + -15.15756130218506 + ], + [ + "▁Repayment", + -15.15759563446045 + ], + [ + "Lazy", + -15.157617568969728 + ], + [ + "▁Tivo", + -15.157626152038574 + ], + [ + "Kingston", + -15.157636642456056 + ], + [ + "MSRP", + -15.157686233520508 + ], + [ + "Inflation", + -15.157691955566406 + ], + [ + "wetting", + -15.157700538635254 + ], + [ + "▁Caspar", + -15.157729148864746 + ], + [ + "Harley", + -15.157732009887695 + ], + [ + "Trademark", + -15.157798767089844 + ], + [ + "▁scriptwriter", + -15.157814979553224 + ], + [ + "household", + -15.157851219177246 + ], + [ + "blanket", + -15.157861709594728 + ], + [ + "Urge", + -15.157866477966309 + ], + [ + "▁Gerson", + -15.15788459777832 + ], + [ + "bunny", + -15.157912254333496 + ], + [ + "Consultant", + -15.15797996520996 + ], + [ + "Benchmark", + -15.157984733581545 + ], + [ + "Mighty", + -15.158050537109377 + ], + [ + "vario", + -15.158164978027344 + ], + [ + "LAUGH", + -15.158167839050291 + ], + [ + "Robotic", + -15.158190727233888 + ], + [ + "eckel", + -15.15823459625244 + ], + [ + "Smartphone", + -15.158248901367188 + ], + [ + "circum", + -15.158280372619627 + ], + [ + "▁Tammi", + -15.158357620239258 + ], + [ + "rdoba", + -15.158407211303713 + ], + [ + "Aust", + -15.158428192138672 + ], + [ + "spider", + -15.158514976501465 + ], + [ + "helluva", + -15.158523559570312 + ], + [ + "CRASH", + -15.158524513244627 + ], + [ + "▁0.16", + -15.158560752868652 + ], + [ + "Sauteed", + -15.15857219696045 + ], + [ + "helion", + -15.158580780029297 + ], + [ + "Restart", + -15.15861988067627 + ], + [ + "▁Chose", + -15.158758163452148 + ], + [ + "▁Homi", + -15.15876293182373 + ], + [ + "Nilsen", + -15.158798217773438 + ], + [ + "TFP", + -15.158984184265137 + ], + [ + "Pivot", + -15.158991813659668 + ], + [ + "▁01:1", + -15.15909194946289 + ], + [ + "▁Lancelot", + -15.159106254577637 + ], + [ + "lawfoot", + -15.159113883972168 + ], + [ + "▁Blond", + -15.159357070922852 + ], + [ + "luz", + -15.15943431854248 + ], + [ + "Pork", + -15.159460067749023 + ], + [ + "sprout", + -15.159488677978516 + ], + [ + "▁Ripon", + -15.159520149230955 + ], + [ + "▁21:2", + -15.159613609313965 + ], + [ + "▁Crimp", + -15.159823417663574 + ], + [ + "Trash", + -15.159927368164062 + ], + [ + "ENTLY", + -15.159990310668944 + ], + [ + "▁Dyno", + -15.160107612609863 + ], + [ + "talkie", + -15.160244941711426 + ], + [ + "▁plexi", + -15.160258293151855 + ], + [ + "▁Posse", + -15.160375595092772 + ], + [ + "06:20:", + -15.160511016845703 + ], + [ + "Placement", + -15.160619735717772 + ], + [ + "▁NANO", + -15.16069507598877 + ], + [ + "▁Kenji", + -15.16075038909912 + ], + [ + "▁Escalat", + -15.160924911499023 + ], + [ + "FON", + -15.160978317260742 + ], + [ + "▁Wager", + -15.161072731018066 + ], + [ + "▁Superfood", + -15.16114330291748 + ], + [ + "eighton", + -15.161267280578612 + ], + [ + "7:17", + -15.16131591796875 + ], + [ + "NUTS", + -15.161422729492188 + ], + [ + ".01%", + -15.16148853302002 + ], + [ + "▁Limp", + -15.161656379699709 + ], + [ + "▁BELT", + -15.16167449951172 + ], + [ + "▁Mystical", + -15.161888122558594 + ], + [ + "arthi", + -15.161946296691896 + ], + [ + "Trailing", + -15.161979675292969 + ], + [ + "2880", + -15.162264823913574 + ], + [ + "mantle", + -15.162331581115724 + ], + [ + "▁Zohar", + -15.162412643432615 + ], + [ + "9:55", + -15.162503242492676 + ], + [ + "▁teabag", + -15.162555694580078 + ], + [ + "▁Blok", + -15.162623405456545 + ], + [ + "8:19", + -15.162670135498049 + ], + [ + "COOL", + -15.162696838378906 + ], + [ + "▁Retire", + -15.162711143493652 + ], + [ + "▁loath", + -15.16285228729248 + ], + [ + "7300", + -15.162861824035645 + ], + [ + "scheme", + -15.163003921508787 + ], + [ + "Accurately", + -15.163068771362305 + ], + [ + "1587", + -15.163073539733888 + ], + [ + "guitarist", + -15.163185119628906 + ], + [ + "Centralized", + -15.16323947906494 + ], + [ + "egler", + -15.163311004638672 + ], + [ + "▁RDM", + -15.163331985473633 + ], + [ + "▁Zapp", + -15.163501739501951 + ], + [ + "▁Skater", + -15.163576126098633 + ], + [ + "▁Kapur", + -15.163647651672363 + ], + [ + "metrix", + -15.163721084594728 + ], + [ + "akul", + -15.163837432861328 + ], + [ + "othermic", + -15.1638822555542 + ], + [ + "backward", + -15.164006233215332 + ], + [ + "nnnn", + -15.16401481628418 + ], + [ + "otonic", + -15.16403579711914 + ], + [ + "▁adoptee", + -15.164109230041504 + ], + [ + "Boneless", + -15.164278030395508 + ], + [ + "ITTING", + -15.164280891418455 + ], + [ + "▁Klass", + -15.16431999206543 + ], + [ + "▁Citra", + -15.164501190185549 + ], + [ + "Lyon", + -15.164596557617188 + ], + [ + "3.45", + -15.16492748260498 + ], + [ + "▁ausge", + -15.164946556091309 + ], + [ + "$400", + -15.16499137878418 + ], + [ + "8,400", + -15.165102005004885 + ], + [ + "0-697", + -15.1651029586792 + ], + [ + "▁sambal", + -15.16515064239502 + ], + [ + "▁Speculat", + -15.165335655212402 + ], + [ + "▁COACH", + -15.165428161621094 + ], + [ + "▁informati", + -15.16562271118164 + ], + [ + "▁Tigris", + -15.165656089782717 + ], + [ + "rythm", + -15.165696144104004 + ], + [ + "▁Entrust", + -15.165715217590332 + ], + [ + "druck", + -15.165974617004396 + ], + [ + "DCL", + -15.166072845458984 + ], + [ + "Mattress", + -15.166166305541992 + ], + [ + "interbank", + -15.166184425354004 + ], + [ + "▁numero", + -15.166403770446776 + ], + [ + "kunst", + -15.16640853881836 + ], + [ + "SPOT", + -15.166410446166992 + ], + [ + "Nev", + -15.166481971740724 + ], + [ + "▁Sacco", + -15.166518211364746 + ], + [ + "Aggregate", + -15.166521072387695 + ], + [ + "▁Finca", + -15.166646003723145 + ], + [ + "1:53", + -15.16688060760498 + ], + [ + "▁Chibi", + -15.167030334472656 + ], + [ + "▁Boda", + -15.167069435119627 + ], + [ + "▁Roti", + -15.167083740234377 + ], + [ + "INGHAM", + -15.16724967956543 + ], + [ + "1772", + -15.167280197143556 + ], + [ + "Homo", + -15.167686462402344 + ], + [ + "▁Katrin", + -15.167798042297363 + ], + [ + "▁persecute", + -15.167977333068848 + ], + [ + "▁entrench", + -15.168252944946287 + ], + [ + "▁josh", + -15.168270111083984 + ], + [ + "▁posthumous", + -15.1683349609375 + ], + [ + "officio", + -15.168391227722168 + ], + [ + "▁cytoplasm", + -15.16853141784668 + ], + [ + "▁francophone", + -15.168577194213867 + ], + [ + "Cannabidiol", + -15.168578147888184 + ], + [ + "Mesothelioma", + -15.168578147888184 + ], + [ + "Propagation", + -15.168578147888184 + ], + [ + "Shaughnessy", + -15.168578147888184 + ], + [ + "iRunFar", + -15.168578147888184 + ], + [ + "ipvanish", + -15.168578147888184 + ], + [ + "▁888-276-5932", + -15.168578147888184 + ], + [ + "▁APPLICABLE", + -15.168578147888184 + ], + [ + "▁Apothecary", + -15.168578147888184 + ], + [ + "▁Distortion", + -15.168578147888184 + ], + [ + "▁Extinction", + -15.168578147888184 + ], + [ + "▁Facilitation", + -15.168578147888184 + ], + [ + "▁Halliburton", + -15.168578147888184 + ], + [ + "▁Hogarth", + -15.168578147888184 + ], + [ + "▁Kickboxing", + -15.168578147888184 + ], + [ + "▁Lajpat", + -15.168578147888184 + ], + [ + "▁Lollapalooza", + -15.168578147888184 + ], + [ + "▁Novgorod", + -15.168578147888184 + ], + [ + "▁Orthodontist", + -15.168578147888184 + ], + [ + "▁REGISTERED", + -15.168578147888184 + ], + [ + "▁Regeneron", + -15.168578147888184 + ], + [ + "▁SALVAGEDATA", + -15.168578147888184 + ], + [ + "▁Siddiqui", + -15.168578147888184 + ], + [ + "▁Tupelo", + -15.168578147888184 + ], + [ + "▁Zaragoza", + -15.168578147888184 + ], + [ + "▁brackish", + -15.168578147888184 + ], + [ + "▁cannibalism", + -15.168578147888184 + ], + [ + "▁cerebrovascular", + -15.168578147888184 + ], + [ + "▁desegregation", + -15.168578147888184 + ], + [ + "▁entrancing", + -15.168578147888184 + ], + [ + "▁excrement", + -15.168578147888184 + ], + [ + "▁frankincense", + -15.168578147888184 + ], + [ + "▁gastritis", + -15.168578147888184 + ], + [ + "▁grosgrain", + -15.168578147888184 + ], + [ + "▁hieroglyphic", + -15.168578147888184 + ], + [ + "▁impassable", + -15.168578147888184 + ], + [ + "▁inadmissible", + -15.168578147888184 + ], + [ + "▁indemnification", + -15.168578147888184 + ], + [ + "▁interplanetary", + -15.168578147888184 + ], + [ + "▁majeure", + -15.168578147888184 + ], + [ + "▁penitentiary", + -15.168578147888184 + ], + [ + "▁podiatry", + -15.168578147888184 + ], + [ + "▁psychoanalyst", + -15.168578147888184 + ], + [ + "▁salvaging", + -15.168578147888184 + ], + [ + "▁subservient", + -15.168578147888184 + ], + [ + "▁tummies", + -15.168578147888184 + ], + [ + "▁verbiage", + -15.168578147888184 + ], + [ + "▁vermicelli", + -15.168578147888184 + ], + [ + "▁Chutney", + -15.1685791015625 + ], + [ + "▁Hairdresser", + -15.1685791015625 + ], + [ + "▁Hematology", + -15.1685791015625 + ], + [ + "▁Mujahid", + -15.1685791015625 + ], + [ + "▁oxidizing", + -15.1685791015625 + ], + [ + "▁888-270-6611", + -15.168580055236816 + ], + [ + "▁888-282-0465", + -15.168580055236816 + ], + [ + "▁Ilkley", + -15.168580055236816 + ], + [ + "▁mechatronic", + -15.168580055236816 + ], + [ + "▁Saanich", + -15.168581008911133 + ], + [ + "▁educative", + -15.168581008911133 + ], + [ + "▁quarried", + -15.168581008911133 + ], + [ + "▁REDUCED", + -15.16858196258545 + ], + [ + "▁Synapse", + -15.16858196258545 + ], + [ + "▁CREATIVE", + -15.168583869934082 + ], + [ + "▁Memento", + -15.168583869934082 + ], + [ + "▁Mohamad", + -15.168583869934082 + ], + [ + "▁Moisturis", + -15.168583869934082 + ], + [ + "▁bigoted", + -15.168583869934082 + ], + [ + "▁concurrence", + -15.168583869934082 + ], + [ + "▁duchess", + -15.168583869934082 + ], + [ + "▁geodesic", + -15.168583869934082 + ], + [ + "▁squawk", + -15.168583869934082 + ], + [ + "berkeley", + -15.168584823608398 + ], + [ + "▁AUCTION", + -15.168584823608398 + ], + [ + "FWIW", + -15.168585777282717 + ], + [ + "▁BROTHER", + -15.168585777282717 + ], + [ + "▁Skaggs", + -15.168585777282717 + ], + [ + "▁Wallabies", + -15.168585777282717 + ], + [ + "▁extricate", + -15.168585777282717 + ], + [ + "7-77", + -15.168587684631348 + ], + [ + "▁Husain", + -15.168587684631348 + ], + [ + "▁Toothbrush", + -15.16858959197998 + ], + [ + "Ubiquiti", + -15.168590545654297 + ], + [ + "kloof", + -15.168591499328612 + ], + [ + "▁halving", + -15.168591499328612 + ], + [ + "▁LIFETIME", + -15.16859531402588 + ], + [ + "▁Hazzard", + -15.168596267700195 + ], + [ + "▁Pareto", + -15.168596267700195 + ], + [ + "▁corsage", + -15.168597221374512 + ], + [ + "▁shofar", + -15.168598175048828 + ], + [ + "Quarantine", + -15.168599128723145 + ], + [ + "▁Oberoi", + -15.168599128723145 + ], + [ + "▁outmoded", + -15.16860008239746 + ], + [ + "▁Ratchet", + -15.168601036071776 + ], + [ + "▁Argumentative", + -15.16860294342041 + ], + [ + "[*]", + -15.168603897094728 + ], + [ + "Conceal", + -15.16860580444336 + ], + [ + "▁reassert", + -15.16860580444336 + ], + [ + "▁Biographical", + -15.168606758117676 + ], + [ + "▁bribing", + -15.168606758117676 + ], + [ + "▁Cheerleading", + -15.168609619140623 + ], + [ + "▁FLORAL", + -15.168611526489258 + ], + [ + "▁Interpreting", + -15.168611526489258 + ], + [ + "FERENCE", + -15.168612480163574 + ], + [ + "▁Barnstable", + -15.168612480163574 + ], + [ + "▁Strachan", + -15.168612480163574 + ], + [ + "▁candidature", + -15.168614387512209 + ], + [ + "▁pricy", + -15.16861629486084 + ], + [ + "▁Tranquility", + -15.168618202209473 + ], + [ + "▁Augmentation", + -15.168619155883787 + ], + [ + "▁Khalsa", + -15.168622970581056 + ], + [ + "▁Holyrood", + -15.168624877929688 + ], + [ + "▁diffusing", + -15.168624877929688 + ], + [ + "corsair", + -15.168633460998535 + ], + [ + "▁inflator", + -15.1686372756958 + ], + [ + "▁federated", + -15.168644905090332 + ], + [ + "▁calzone", + -15.168651580810549 + ], + [ + "▁£40,000", + -15.168652534484863 + ], + [ + "▁WalMart", + -15.168656349182127 + ], + [ + "▁Bharatpur", + -15.168662071228027 + ], + [ + "▁Dhanush", + -15.168662071228027 + ], + [ + "▁AIIMS", + -15.168668746948242 + ], + [ + "▁Transient", + -15.168673515319824 + ], + [ + "▁dispelled", + -15.168673515319824 + ], + [ + "▁Facade", + -15.168676376342772 + ], + [ + "▁Ahmet", + -15.16868019104004 + ], + [ + "▁Anglesey", + -15.168683052062988 + ], + [ + "▁bawl", + -15.168683052062988 + ], + [ + "▁choreographic", + -15.16868495941162 + ], + [ + "ritter", + -15.168692588806152 + ], + [ + "▁senatorial", + -15.168696403503418 + ], + [ + "▁Scanlon", + -15.168701171875 + ], + [ + "▁lollies", + -15.168701171875 + ], + [ + "▁delicatessen", + -15.16870403289795 + ], + [ + "▁resistivity", + -15.168710708618164 + ], + [ + "▁hawkish", + -15.16871738433838 + ], + [ + "▁(50%)", + -15.168720245361328 + ], + [ + "▁Traits", + -15.16872787475586 + ], + [ + "▁Chabot", + -15.168730735778809 + ], + [ + "▁decongestant", + -15.168730735778809 + ], + [ + "JROTC", + -15.168734550476074 + ], + [ + "▁Profitability", + -15.16873836517334 + ], + [ + "thrombotic", + -15.168739318847656 + ], + [ + "▁sunburst", + -15.168742179870604 + ], + [ + "▁jinx", + -15.168750762939451 + ], + [ + "▁Favour", + -15.168760299682615 + ], + [ + "▁hazing", + -15.168766021728516 + ], + [ + "▁hCG", + -15.16877555847168 + ], + [ + "▁tikka", + -15.168777465820312 + ], + [ + "UMKC", + -15.168778419494627 + ], + [ + "▁Almaty", + -15.168785095214844 + ], + [ + "▁EMPLOYEES", + -15.16879177093506 + ], + [ + "▁tardy", + -15.168797492980955 + ], + [ + "▁Hayek", + -15.168811798095703 + ], + [ + "▁Hattie", + -15.1688232421875 + ], + [ + "kirchen", + -15.168825149536133 + ], + [ + "▁Matlock", + -15.168851852416992 + ], + [ + "▁simulcast", + -15.168851852416992 + ], + [ + "▁Hanshan", + -15.168852806091309 + ], + [ + "▁Snowshoe", + -15.168871879577637 + ], + [ + "ogenetic", + -15.168877601623535 + ], + [ + "▁Minoan", + -15.16889190673828 + ], + [ + "▁evocation", + -15.168907165527344 + ], + [ + "▁Furlong", + -15.168937683105469 + ], + [ + "▁Epoch", + -15.168953895568848 + ], + [ + "▁Lovelace", + -15.168960571289062 + ], + [ + "▁conceptualizing", + -15.168962478637695 + ], + [ + "▁Nagano", + -15.168971061706545 + ], + [ + "▁PROFITS", + -15.16897678375244 + ], + [ + "▁2019-03-", + -15.16899299621582 + ], + [ + "▁Clarets", + -15.16899585723877 + ], + [ + "Synerg", + -15.169000625610352 + ], + [ + "▁Robbery", + -15.169014930725098 + ], + [ + "▁Celina", + -15.16901683807373 + ], + [ + "▁Lopes", + -15.16903305053711 + ], + [ + "▁Snooker", + -15.16903305053711 + ], + [ + "▁Splat", + -15.169089317321776 + ], + [ + "▁Quechua", + -15.169102668762209 + ], + [ + "▁Millersville", + -15.16913890838623 + ], + [ + "▁Swirlr", + -15.16915512084961 + ], + [ + "▁slathered", + -15.169172286987305 + ], + [ + "▁dustbin", + -15.16922664642334 + ], + [ + "▁Crofton", + -15.169235229492188 + ], + [ + "SHIRT", + -15.169240951538086 + ], + [ + "▁Superdry", + -15.169246673583984 + ], + [ + "▁Herpes", + -15.169267654418944 + ], + [ + "▁Slippery", + -15.169273376464844 + ], + [ + "▁preinstalled", + -15.16928005218506 + ], + [ + "▁placate", + -15.16929054260254 + ], + [ + "▁tcp", + -15.169293403625488 + ], + [ + "▁proportionately", + -15.169294357299805 + ], + [ + "▁expressiveness", + -15.16932773590088 + ], + [ + "▁kodi", + -15.169346809387209 + ], + [ + "▁Outbound", + -15.169351577758787 + ], + [ + "▁debriefing", + -15.16935920715332 + ], + [ + "▁Bighorn", + -15.169377326965332 + ], + [ + "▁Sakai", + -15.169443130493164 + ], + [ + "ANGLE", + -15.16944694519043 + ], + [ + "▁cantilevered", + -15.169459342956545 + ], + [ + "▁Fleury", + -15.169473648071287 + ], + [ + "Predictably", + -15.16948413848877 + ], + [ + "▁Hillsong", + -15.16951847076416 + ], + [ + "henium", + -15.169623374938965 + ], + [ + "▁ragtime", + -15.169634819030762 + ], + [ + "▁BOWL", + -15.169641494750977 + ], + [ + "▁enlivened", + -15.169654846191406 + ], + [ + "praise", + -15.169659614562988 + ], + [ + "▁Quinton", + -15.169665336608888 + ], + [ + "lagu", + -15.169681549072266 + ], + [ + "▁Carnation", + -15.169684410095217 + ], + [ + "POLITI", + -15.16969871520996 + ], + [ + "▁23:3", + -15.16982364654541 + ], + [ + "Dividing", + -15.169836044311523 + ], + [ + "osperm", + -15.169843673706056 + ], + [ + "▁roughest", + -15.169977188110352 + ], + [ + "Henshaw", + -15.169981956481934 + ], + [ + "oussi", + -15.169987678527832 + ], + [ + "▁splintered", + -15.17005729675293 + ], + [ + "▁BEARING", + -15.170183181762695 + ], + [ + "▁FIPS", + -15.170226097106934 + ], + [ + "16949", + -15.17023468017578 + ], + [ + "▁£700", + -15.170363426208496 + ], + [ + "Geez", + -15.17037582397461 + ], + [ + "kubo", + -15.170552253723145 + ], + [ + "▁SQU", + -15.170594215393066 + ], + [ + "▁Yulia", + -15.170717239379885 + ], + [ + "Advertisement", + -15.17072582244873 + ], + [ + "informatics", + -15.170771598815918 + ], + [ + "Consistency", + -15.170788764953612 + ], + [ + "3:15", + -15.170790672302246 + ], + [ + "Ergonomic", + -15.17081069946289 + ], + [ + "▁Cobbler", + -15.170811653137209 + ], + [ + "Shuttle", + -15.170814514160156 + ], + [ + "Literally", + -15.170822143554688 + ], + [ + "1702", + -15.17082691192627 + ], + [ + "Dangerous", + -15.170828819274902 + ], + [ + "catalyst", + -15.170832633972168 + ], + [ + "Exhaust", + -15.170833587646484 + ], + [ + "▁Beebe", + -15.170833587646484 + ], + [ + "Peggy", + -15.170839309692385 + ], + [ + "Maurice", + -15.170842170715332 + ], + [ + "suspension", + -15.170843124389648 + ], + [ + "McCarthy", + -15.17084503173828 + ], + [ + "Shrimp", + -15.170845985412598 + ], + [ + "headquartered", + -15.17084789276123 + ], + [ + "Mathematics", + -15.170849800109863 + ], + [ + "consecutive", + -15.170853614807127 + ], + [ + "aggression", + -15.170856475830078 + ], + [ + "applicable", + -15.170856475830078 + ], + [ + "Voyage", + -15.170859336853027 + ], + [ + "prohibit", + -15.170863151550291 + ], + [ + "Sesame", + -15.170865058898926 + ], + [ + "arrived", + -15.17086696624756 + ], + [ + "administrative", + -15.170868873596191 + ], + [ + "submission", + -15.17087459564209 + ], + [ + "CONNECT", + -15.170881271362305 + ], + [ + "eauteous", + -15.170881271362305 + ], + [ + "Guild", + -15.170883178710938 + ], + [ + "Utilities", + -15.170891761779783 + ], + [ + "Ninety", + -15.170904159545898 + ], + [ + "Recruitment", + -15.170912742614746 + ], + [ + "Pluto", + -15.170928001403809 + ], + [ + "▁Importers", + -15.170976638793944 + ], + [ + "Consent", + -15.170992851257324 + ], + [ + "Magical", + -15.170997619628906 + ], + [ + "Reception", + -15.17100429534912 + ], + [ + "alleged", + -15.171005249023438 + ], + [ + "Shutter", + -15.171008110046388 + ], + [ + "inspirational", + -15.171026229858398 + ], + [ + "Pierce", + -15.171186447143556 + ], + [ + "Coleman", + -15.171238899230955 + ], + [ + "measurement", + -15.171331405639648 + ], + [ + "Rarely", + -15.17140007019043 + ], + [ + "encoded", + -15.1714506149292 + ], + [ + "rican", + -15.17147445678711 + ], + [ + "▁combatant", + -15.171476364135742 + ], + [ + "Roland", + -15.17149829864502 + ], + [ + "▁Headmaster", + -15.171557426452637 + ], + [ + "GaN", + -15.171589851379396 + ], + [ + "Authority", + -15.17166805267334 + ], + [ + "Fond", + -15.171669960021973 + ], + [ + "▁Choker", + -15.171671867370604 + ], + [ + "chrys", + -15.171696662902832 + ], + [ + "▁$55,000", + -15.171711921691896 + ], + [ + "▁Mazur", + -15.171741485595703 + ], + [ + "Barrel", + -15.17180061340332 + ], + [ + "Parties", + -15.171937942504885 + ], + [ + "Chronicle", + -15.172154426574709 + ], + [ + "▁Giang", + -15.172264099121094 + ], + [ + "Institutional", + -15.172389030456545 + ], + [ + "▁CFU", + -15.172661781311035 + ], + [ + "delegate", + -15.172686576843262 + ], + [ + "▁skf", + -15.173027992248535 + ], + [ + "4900", + -15.173128128051758 + ], + [ + "▁Capaciti", + -15.173171043395996 + ], + [ + "▁pvp", + -15.173247337341309 + ], + [ + "2(2):", + -15.173291206359863 + ], + [ + "▁Mangia", + -15.173367500305176 + ], + [ + "▁Govind", + -15.173415184020996 + ], + [ + "▁Pawl", + -15.17343807220459 + ], + [ + "30.00", + -15.17361545562744 + ], + [ + "▁Chinn", + -15.173724174499512 + ], + [ + "▁Crouse", + -15.173791885375977 + ], + [ + "▁Ultrabook", + -15.174190521240234 + ], + [ + "▁Imperi", + -15.174269676208496 + ], + [ + "Fluid", + -15.17432689666748 + ], + [ + "▁Pegg", + -15.174449920654297 + ], + [ + "▁Komar", + -15.174453735351562 + ], + [ + "▁Alterna", + -15.174590110778809 + ], + [ + "PERT", + -15.17480182647705 + ], + [ + "tronomic", + -15.174822807312012 + ], + [ + "▁Kemal", + -15.174955368041992 + ], + [ + "6-14", + -15.174981117248535 + ], + [ + "▁Spitz", + -15.175078392028809 + ], + [ + "Artwork", + -15.17508316040039 + ], + [ + "pivot", + -15.175371170043944 + ], + [ + "wringing", + -15.17552375793457 + ], + [ + "▁misplace", + -15.175593376159668 + ], + [ + "▁stewardess", + -15.175599098205566 + ], + [ + "ightened", + -15.175615310668944 + ], + [ + "▁Lombardo", + -15.175735473632812 + ], + [ + "servator", + -15.175769805908203 + ], + [ + "▁19:3", + -15.17603874206543 + ], + [ + "jeux", + -15.17624855041504 + ], + [ + "▁governess", + -15.176511764526367 + ], + [ + "▁Alpert", + -15.176737785339355 + ], + [ + "fumi", + -15.176758766174316 + ], + [ + "2:56", + -15.176837921142578 + ], + [ + "yoku", + -15.176908493041992 + ], + [ + "▁Plover", + -15.176968574523926 + ], + [ + "74%", + -15.177058219909668 + ], + [ + "aryngeal", + -15.1771240234375 + ], + [ + "9:05", + -15.17722988128662 + ], + [ + "▁Feral", + -15.177250862121582 + ], + [ + "123456", + -15.177258491516112 + ], + [ + "▁GENE", + -15.177312850952148 + ], + [ + "osophical", + -15.17733383178711 + ], + [ + "▁Defy", + -15.177434921264648 + ], + [ + "▁repudiate", + -15.177448272705078 + ], + [ + "Describ", + -15.177617073059082 + ], + [ + "Amico", + -15.177714347839355 + ], + [ + "▁Rauch", + -15.177857398986816 + ], + [ + "Gerard", + -15.178159713745115 + ], + [ + "▁EXEC", + -15.178182601928713 + ], + [ + "SiO", + -15.178211212158203 + ], + [ + "▁Breda", + -15.17829132080078 + ], + [ + "FDP", + -15.178306579589844 + ], + [ + "02:00", + -15.178359985351562 + ], + [ + "3:11", + -15.178491592407228 + ], + [ + "mbley", + -15.17849349975586 + ], + [ + "▁Salma", + -15.17931079864502 + ], + [ + "▁dialectic", + -15.17934799194336 + ], + [ + "1:56", + -15.179705619812012 + ], + [ + "▁Brico", + -15.179723739624023 + ], + [ + "▁TYPO", + -15.179771423339844 + ], + [ + "3:02", + -15.179975509643556 + ], + [ + "constructive", + -15.180089950561523 + ], + [ + "12-7", + -15.180303573608398 + ], + [ + "▁23:59", + -15.180359840393066 + ], + [ + "▁Tauri", + -15.18048095703125 + ], + [ + "shedding", + -15.180700302124023 + ], + [ + "zept", + -15.18095588684082 + ], + [ + "▁07:2", + -15.18105125427246 + ], + [ + "▁Tiara", + -15.181195259094238 + ], + [ + "▁Appetit", + -15.181292533874512 + ], + [ + "▁positiv", + -15.181309700012209 + ], + [ + "▁relegate", + -15.181310653686523 + ], + [ + "▁detonate", + -15.18131160736084 + ], + [ + "▁TICKET", + -15.181429862976074 + ], + [ + "▁undoubted", + -15.181669235229492 + ], + [ + "▁OCP", + -15.18179988861084 + ], + [ + "▁acquiesce", + -15.181906700134276 + ], + [ + "Fructose", + -15.181910514831545 + ], + [ + "▁Connoisseur", + -15.181910514831545 + ], + [ + "▁Pyeongchang", + -15.181910514831545 + ], + [ + "Accommodate", + -15.18191146850586 + ], + [ + "CLEVELAND", + -15.18191146850586 + ], + [ + "Defamation", + -15.18191146850586 + ], + [ + "Exemplary", + -15.18191146850586 + ], + [ + "Forstchen", + -15.18191146850586 + ], + [ + "Matthieu", + -15.18191146850586 + ], + [ + "▁Cilantro", + -15.18191146850586 + ], + [ + "▁Cumulus", + -15.18191146850586 + ], + [ + "▁DIFFERENCE", + -15.18191146850586 + ], + [ + "▁Duxbury", + -15.18191146850586 + ], + [ + "▁Fethiye", + -15.18191146850586 + ], + [ + "▁Fujairah", + -15.18191146850586 + ], + [ + "▁Galbraith", + -15.18191146850586 + ], + [ + "▁Grumpy", + -15.18191146850586 + ], + [ + "▁Guterres", + -15.18191146850586 + ], + [ + "▁Gwalior", + -15.18191146850586 + ], + [ + "▁Habanero", + -15.18191146850586 + ], + [ + "▁Huguenot", + -15.18191146850586 + ], + [ + "▁Mariposa", + -15.18191146850586 + ], + [ + "▁Mizoram", + -15.18191146850586 + ], + [ + "▁Murchison", + -15.18191146850586 + ], + [ + "▁ORLEANS", + -15.18191146850586 + ], + [ + "▁PERMITTED", + -15.18191146850586 + ], + [ + "▁Pseudomonas", + -15.18191146850586 + ], + [ + "▁Superannuation", + -15.18191146850586 + ], + [ + "▁TOMORROW", + -15.18191146850586 + ], + [ + "▁TRAFFIC", + -15.18191146850586 + ], + [ + "▁Thibodeau", + -15.18191146850586 + ], + [ + "▁Wilfrid", + -15.18191146850586 + ], + [ + "▁anesthetist", + -15.18191146850586 + ], + [ + "▁astrophysicist", + -15.18191146850586 + ], + [ + "▁clavicle", + -15.18191146850586 + ], + [ + "▁demonstrably", + -15.18191146850586 + ], + [ + "▁drizzling", + -15.18191146850586 + ], + [ + "▁expediency", + -15.18191146850586 + ], + [ + "▁fulcrum", + -15.18191146850586 + ], + [ + "▁gregarious", + -15.18191146850586 + ], + [ + "▁perceptible", + -15.18191146850586 + ], + [ + "▁peregrine", + -15.18191146850586 + ], + [ + "▁pygmy", + -15.18191146850586 + ], + [ + "▁recompense", + -15.18191146850586 + ], + [ + "▁sriracha", + -15.18191146850586 + ], + [ + "▁superconducting", + -15.18191146850586 + ], + [ + "▁tarantula", + -15.18191146850586 + ], + [ + "▁torturous", + -15.18191146850586 + ], + [ + "▁unbearably", + -15.18191146850586 + ], + [ + "▁undergarments", + -15.18191146850586 + ], + [ + "▁voluptuous", + -15.18191146850586 + ], + [ + "TRIBUTION", + -15.181912422180176 + ], + [ + "tribune", + -15.181912422180176 + ], + [ + "▁Carrefour", + -15.181912422180176 + ], + [ + "▁demagogue", + -15.181912422180176 + ], + [ + "▁hurrah", + -15.181912422180176 + ], + [ + "▁FRENCH", + -15.181913375854492 + ], + [ + "▁Suleiman", + -15.181913375854492 + ], + [ + "▁Welbeck", + -15.181913375854492 + ], + [ + "▁hepatocellular", + -15.181913375854492 + ], + [ + "▁oxidase", + -15.181913375854492 + ], + [ + "▁ugliest", + -15.181913375854492 + ], + [ + "Executing", + -15.181914329528809 + ], + [ + "1-6556", + -15.181915283203123 + ], + [ + "▁Cullinan", + -15.181915283203123 + ], + [ + "▁Imelda", + -15.181915283203123 + ], + [ + "▁ineptitude", + -15.181915283203123 + ], + [ + "▁Skeeter", + -15.18191623687744 + ], + [ + "▁mystified", + -15.18191623687744 + ], + [ + "▁CONFERENCE", + -15.181917190551758 + ], + [ + "▁labrador", + -15.181917190551758 + ], + [ + "▁Skanska", + -15.18191909790039 + ], + [ + "▁shrubbery", + -15.18191909790039 + ], + [ + "▁Paleolithic", + -15.181920051574709 + ], + [ + "▁thermocouple", + -15.181921005249023 + ], + [ + "▁Vaillant", + -15.18192195892334 + ], + [ + "▁Nemesis", + -15.181924819946287 + ], + [ + "▁Grecian", + -15.181925773620604 + ], + [ + "▁PARTIES", + -15.181925773620604 + ], + [ + "▁eavesdropping", + -15.181927680969238 + ], + [ + "▁steadied", + -15.181927680969238 + ], + [ + "▁Barstow", + -15.181930541992188 + ], + [ + "▁Kiefer", + -15.181930541992188 + ], + [ + "▁Rajkumar", + -15.181931495666504 + ], + [ + "▁fumigation", + -15.18193244934082 + ], + [ + "▁Distributing", + -15.181933403015137 + ], + [ + "▁Lugano", + -15.181939125061035 + ], + [ + "▁revving", + -15.181939125061035 + ], + [ + "▁ANZAC", + -15.181947708129885 + ], + [ + "▁Esteban", + -15.181947708129885 + ], + [ + "▁Kusama", + -15.181947708129885 + ], + [ + "▁interweave", + -15.1819486618042 + ], + [ + "▁protonix", + -15.181949615478516 + ], + [ + "▁Lindberg", + -15.181957244873049 + ], + [ + "▁Tioga", + -15.181957244873049 + ], + [ + "▁Condé", + -15.181958198547363 + ], + [ + "▁Tilburg", + -15.181962013244627 + ], + [ + "▁Zapata", + -15.181962013244627 + ], + [ + "▁Waldron", + -15.181971549987791 + ], + [ + "▁Kirchner", + -15.18197536468506 + ], + [ + "▁yucca", + -15.181977272033691 + ], + [ + "▁Gaynor", + -15.181979179382324 + ], + [ + "▁maximization", + -15.181983947753906 + ], + [ + "▁undeclared", + -15.181983947753906 + ], + [ + "▁2006-2007", + -15.181984901428224 + ], + [ + "▁Ringgit", + -15.181984901428224 + ], + [ + "▁yankee", + -15.181989669799805 + ], + [ + "▁sintering", + -15.18199062347412 + ], + [ + "▁purevpn", + -15.181991577148438 + ], + [ + "▁sexier", + -15.18199348449707 + ], + [ + "▁FORWARD", + -15.18199634552002 + ], + [ + "▁Bentonite", + -15.181999206542969 + ], + [ + "▁suboptimal", + -15.182000160217283 + ], + [ + "▁Terrance", + -15.182002067565918 + ], + [ + "▁Amicus", + -15.182004928588867 + ], + [ + "▁satanic", + -15.182008743286133 + ], + [ + "▁Riemann", + -15.182011604309082 + ], + [ + "▁$24.99", + -15.182018280029297 + ], + [ + "▁Norcross", + -15.18202018737793 + ], + [ + "▁AutoNation", + -15.18203067779541 + ], + [ + "▁Gastonia", + -15.182031631469728 + ], + [ + "▁crankcase", + -15.182035446166992 + ], + [ + "▁peice", + -15.182036399841309 + ], + [ + "▁Birthplace", + -15.18203830718994 + ], + [ + "▁Tremont", + -15.18203830718994 + ], + [ + "▁Paraben", + -15.182040214538574 + ], + [ + "▁strutting", + -15.182042121887209 + ], + [ + "▁Zorro", + -15.182050704956056 + ], + [ + "▁Kinsey", + -15.182053565979004 + ], + [ + "capturing", + -15.182056427001951 + ], + [ + "▁Terrapin", + -15.182065963745115 + ], + [ + "▁Kennard", + -15.182069778442385 + ], + [ + "▁LiveCareer", + -15.182072639465332 + ], + [ + "▁Natick", + -15.182072639465332 + ], + [ + "▁Fortitude", + -15.182080268859863 + ], + [ + "▁Statham", + -15.182080268859863 + ], + [ + "▁geocache", + -15.182085990905762 + ], + [ + "▁bastards", + -15.18209457397461 + ], + [ + "▁Dixit", + -15.182098388671877 + ], + [ + "▁Diehl", + -15.182103157043455 + ], + [ + "▁Dermot", + -15.18210506439209 + ], + [ + "▁wtf", + -15.18210506439209 + ], + [ + "▁biosecurity", + -15.182106971740724 + ], + [ + "odendron", + -15.18211841583252 + ], + [ + "▁Khaki", + -15.182132720947266 + ], + [ + "▁sublingual", + -15.182135581970217 + ], + [ + "▁Dubbo", + -15.182144165039062 + ], + [ + "▁interagency", + -15.18214988708496 + ], + [ + "▁staid", + -15.182167053222656 + ], + [ + "▁ontologies", + -15.182175636291504 + ], + [ + "▁RECEIVED", + -15.182220458984377 + ], + [ + "▁Mohit", + -15.182221412658691 + ], + [ + "▁Abaco", + -15.182233810424805 + ], + [ + "8:29", + -15.182236671447754 + ], + [ + "▁Marshfield", + -15.182249069213867 + ], + [ + "▁drunkenness", + -15.182252883911133 + ], + [ + "▁TestKing", + -15.18227767944336 + ], + [ + "▁farmstead", + -15.182279586791992 + ], + [ + "▁Crutch", + -15.182297706604004 + ], + [ + "▁Sandoz", + -15.18230438232422 + ], + [ + "▁barnyard", + -15.182330131530762 + ], + [ + "Consequence", + -15.182333946228027 + ], + [ + "▁shearling", + -15.18233871459961 + ], + [ + "legitimis", + -15.182342529296877 + ], + [ + "▁Jemma", + -15.182380676269531 + ], + [ + "▁Bodybuilding", + -15.182397842407228 + ], + [ + "▁buttercup", + -15.1824369430542 + ], + [ + "▁adjudicated", + -15.18244171142578 + ], + [ + "NonCommercial", + -15.182453155517578 + ], + [ + "▁Arabella", + -15.182453155517578 + ], + [ + "▁Javed", + -15.182476997375488 + ], + [ + "▁Caplan", + -15.182502746582031 + ], + [ + "1:58", + -15.18252658843994 + ], + [ + "keller", + -15.182576179504396 + ], + [ + "▁Kathie", + -15.182607650756836 + ], + [ + "▁Greenleaf", + -15.182650566101074 + ], + [ + "▁Decoder", + -15.18265438079834 + ], + [ + "▁Nominating", + -15.18265438079834 + ], + [ + "▁Webroot", + -15.18270778656006 + ], + [ + "▁Suprema", + -15.182726860046388 + ], + [ + "▁2016/2017", + -15.1827392578125 + ], + [ + "▁paediatrician", + -15.182745933532717 + ], + [ + "▁mocktail", + -15.182780265808104 + ], + [ + "Schaller", + -15.182784080505373 + ], + [ + "TAKING", + -15.182784080505373 + ], + [ + "▁Kurtis", + -15.1827974319458 + ], + [ + "▁Battlestar", + -15.182798385620115 + ], + [ + "▁Marcella", + -15.182822227478027 + ], + [ + "▁Postgres", + -15.182830810546877 + ], + [ + "▁engendered", + -15.182830810546877 + ], + [ + "▁Elasto", + -15.182851791381836 + ], + [ + "▁Eatery", + -15.182880401611328 + ], + [ + "▁Haque", + -15.182903289794922 + ], + [ + "▁Persie", + -15.182923316955566 + ], + [ + "▁Ajmer", + -15.18295669555664 + ], + [ + "0005", + -15.183029174804688 + ], + [ + "▁Adeline", + -15.18304443359375 + ], + [ + "PARK", + -15.183050155639648 + ], + [ + "▁Drifter", + -15.183058738708496 + ], + [ + "▁cartographer", + -15.183104515075684 + ], + [ + "▁Camara", + -15.183140754699709 + ], + [ + "▁castaway", + -15.183152198791504 + ], + [ + "Specified", + -15.183199882507324 + ], + [ + "▁oxidise", + -15.183271408081056 + ], + [ + "▁Elbert", + -15.183292388916016 + ], + [ + "▁Bugle", + -15.183341026306152 + ], + [ + "▁Remus", + -15.183392524719238 + ], + [ + "▁Franconia", + -15.183405876159668 + ], + [ + "▁Chipper", + -15.183469772338867 + ], + [ + "Provenance", + -15.183478355407717 + ], + [ + "▁Reiter", + -15.183544158935549 + ], + [ + "ijl", + -15.183587074279783 + ], + [ + "▁Carsten", + -15.18362808227539 + ], + [ + "▁phthalate", + -15.183756828308104 + ], + [ + "▁Fifi", + -15.18377685546875 + ], + [ + "▁newlywed", + -15.183996200561523 + ], + [ + "etna", + -15.184008598327637 + ], + [ + "Repetition", + -15.18402099609375 + ], + [ + "▁Storybook", + -15.184029579162598 + ], + [ + "blame", + -15.184107780456545 + ], + [ + "▁Firearm", + -15.184192657470703 + ], + [ + "Crisp", + -15.18422508239746 + ], + [ + "▁Buffon", + -15.184249877929688 + ], + [ + "genome", + -15.184270858764648 + ], + [ + "▁Malawian", + -15.184351921081545 + ], + [ + "prong", + -15.184412956237791 + ], + [ + "▁NXP", + -15.18442153930664 + ], + [ + "▁pooja", + -15.184468269348145 + ], + [ + "happiness", + -15.184483528137209 + ], + [ + "DESCRIPTION", + -15.184490203857422 + ], + [ + "insurgency", + -15.184494972229004 + ], + [ + "Immune", + -15.184499740600586 + ], + [ + "Discipline", + -15.18452262878418 + ], + [ + "murphy", + -15.18452262878418 + ], + [ + "Asbestos", + -15.184523582458496 + ], + [ + "Charcoal", + -15.184531211853027 + ], + [ + "Shenzhen", + -15.184532165527344 + ], + [ + "Licensing", + -15.184535026550291 + ], + [ + "Meredith", + -15.184535026550291 + ], + [ + "café", + -15.184535026550291 + ], + [ + "Schneider", + -15.184537887573242 + ], + [ + "Clarity", + -15.18453884124756 + ], + [ + "Financing", + -15.184539794921877 + ], + [ + "catalytic", + -15.184539794921877 + ], + [ + "Interaction", + -15.184541702270508 + ], + [ + "preparation", + -15.184542655944824 + ], + [ + "Crisis", + -15.18454360961914 + ], + [ + "Eclipse", + -15.18454360961914 + ], + [ + "Lithuania", + -15.18454360961914 + ], + [ + "Muhammad", + -15.18454647064209 + ], + [ + "Treasury", + -15.184550285339355 + ], + [ + "LOCAL", + -15.184552192687988 + ], + [ + "explicit", + -15.184553146362305 + ], + [ + "Colonial", + -15.18455982208252 + ], + [ + "Dixie", + -15.184564590454102 + ], + [ + "policies", + -15.184564590454102 + ], + [ + "Arlington", + -15.18456745147705 + ], + [ + "Armstrong", + -15.1845703125 + ], + [ + "Supplies", + -15.184572219848633 + ], + [ + "Valencia", + -15.184581756591797 + ], + [ + "▁Seedling", + -15.18459415435791 + ], + [ + "BONUS", + -15.184597969055176 + ], + [ + "spicy", + -15.184597969055176 + ], + [ + "Organizers", + -15.184612274169922 + ], + [ + "Sonny", + -15.184621810913086 + ], + [ + "Elastic", + -15.1846342086792 + ], + [ + "ISING", + -15.18463897705078 + ], + [ + "blurb", + -15.184640884399414 + ], + [ + "Settlement", + -15.18465805053711 + ], + [ + "Camden", + -15.18474292755127 + ], + [ + "Crispy", + -15.18474578857422 + ], + [ + "pecially", + -15.184748649597168 + ], + [ + "fabrik", + -15.184751510620115 + ], + [ + "WoW", + -15.18476390838623 + ], + [ + "Apprentice", + -15.184844970703123 + ], + [ + "Rookie", + -15.184853553771973 + ], + [ + "machinery", + -15.184916496276855 + ], + [ + "Realizing", + -15.184918403625488 + ], + [ + "▁juggler", + -15.184946060180664 + ], + [ + "▁Nirmal", + -15.184955596923828 + ], + [ + "Aggregat", + -15.184956550598145 + ], + [ + "wmv", + -15.184965133666992 + ], + [ + "▁Winne", + -15.185009956359863 + ], + [ + "Heavenly", + -15.185011863708496 + ], + [ + "idase", + -15.185023307800291 + ], + [ + "▁Consultati", + -15.185044288635254 + ], + [ + "▁Kriti", + -15.185067176818848 + ], + [ + "Rogue", + -15.185141563415527 + ], + [ + "Practically", + -15.185163497924805 + ], + [ + "Jeep", + -15.185166358947754 + ], + [ + "▁DONATE", + -15.185181617736816 + ], + [ + "7:45", + -15.185280799865724 + ], + [ + "Dixon", + -15.185296058654783 + ], + [ + "divine", + -15.18533420562744 + ], + [ + "▁Rahu", + -15.185453414916992 + ], + [ + "essex", + -15.18547534942627 + ], + [ + "▁Kaff", + -15.185659408569336 + ], + [ + "Nigerian", + -15.185726165771484 + ], + [ + "▁$2,4", + -15.185748100280762 + ], + [ + "Iraqi", + -15.185900688171388 + ], + [ + "Sodium", + -15.185972213745115 + ], + [ + "▁DMK", + -15.18598461151123 + ], + [ + "▁Paramus", + -15.18625831604004 + ], + [ + "▁OMNI", + -15.186342239379885 + ], + [ + "1,600", + -15.186378479003906 + ], + [ + "NWR", + -15.186464309692385 + ], + [ + "▁defection", + -15.186474800109863 + ], + [ + "▁administra", + -15.186552047729492 + ], + [ + "▁clef", + -15.186773300170898 + ], + [ + "▁HHV", + -15.186877250671388 + ], + [ + "Genie", + -15.186906814575195 + ], + [ + "00-000", + -15.186938285827637 + ], + [ + "ERIE", + -15.187049865722656 + ], + [ + "glide", + -15.18714714050293 + ], + [ + "▁Govern", + -15.1873197555542 + ], + [ + "Propose", + -15.187329292297363 + ], + [ + "3:25", + -15.187370300292969 + ], + [ + "▁$130,000", + -15.187532424926758 + ], + [ + "ropod", + -15.187607765197754 + ], + [ + "5:50", + -15.187684059143066 + ], + [ + "JIM", + -15.187685012817385 + ], + [ + "▁Tashi", + -15.18772315979004 + ], + [ + "▁mitten", + -15.187785148620604 + ], + [ + "Francois", + -15.187914848327637 + ], + [ + "▁Respir", + -15.188020706176758 + ], + [ + "10-9", + -15.18844509124756 + ], + [ + "turkey", + -15.188691139221191 + ], + [ + "helpful", + -15.188854217529297 + ], + [ + "PINE", + -15.188947677612305 + ], + [ + "Politi", + -15.18903923034668 + ], + [ + "▁Pampa", + -15.189468383789062 + ], + [ + "91%", + -15.18962860107422 + ], + [ + "▁zyg", + -15.189708709716797 + ], + [ + "▁Llang", + -15.189921379089355 + ], + [ + "▁Imperfect", + -15.189955711364746 + ], + [ + "▁Jahn", + -15.19007396697998 + ], + [ + "▁717-", + -15.19030475616455 + ], + [ + "▁Sarna", + -15.190776824951172 + ], + [ + "▁conserva", + -15.190972328186035 + ], + [ + "▁homie", + -15.19100856781006 + ], + [ + "0-466", + -15.19107151031494 + ], + [ + "surance", + -15.191218376159668 + ], + [ + "▁chafe", + -15.191274642944336 + ], + [ + "▁Pawan", + -15.191455841064451 + ], + [ + "▁Quack", + -15.191590309143066 + ], + [ + "Trax", + -15.191662788391112 + ], + [ + "INDE", + -15.192112922668455 + ], + [ + "▁pessimist", + -15.19212532043457 + ], + [ + "▁gunfight", + -15.192138671875 + ], + [ + "▁hick", + -15.192174911499023 + ], + [ + "MNP", + -15.192220687866213 + ], + [ + "▁Narco", + -15.19224452972412 + ], + [ + "jail", + -15.192523956298828 + ], + [ + "▁Kesar", + -15.19266128540039 + ], + [ + "JIA", + -15.192713737487791 + ], + [ + "▁Mince", + -15.19275951385498 + ], + [ + "tapped", + -15.192826271057127 + ], + [ + "Hollow", + -15.193126678466797 + ], + [ + "dwarfed", + -15.193225860595703 + ], + [ + "▁practic", + -15.193495750427246 + ], + [ + "EDEN", + -15.193867683410645 + ], + [ + "4:27", + -15.194014549255373 + ], + [ + "koski", + -15.194072723388672 + ], + [ + "▁INDIAN", + -15.19422721862793 + ], + [ + "▁Farhan", + -15.194258689880373 + ], + [ + "robotic", + -15.194275856018066 + ], + [ + "1603", + -15.194354057312012 + ], + [ + "▁GRIND", + -15.19442653656006 + ], + [ + "▁Universitat", + -15.194438934326172 + ], + [ + "▁Nairn", + -15.194463729858398 + ], + [ + "INARY", + -15.194520950317385 + ], + [ + "EBRA", + -15.194547653198242 + ], + [ + "▁Mazar", + -15.194831848144531 + ], + [ + "kinesis", + -15.19489288330078 + ], + [ + "▁categorise", + -15.194896697998049 + ], + [ + "▁supercharge", + -15.194986343383787 + ], + [ + "▁vanquish", + -15.195060729980469 + ], + [ + "2:54", + -15.195164680480955 + ], + [ + "7:24", + -15.195246696472168 + ], + [ + "2:46", + -15.195362091064451 + ], + [ + "▁Caledon", + -15.195394515991213 + ], + [ + "▁800-338-", + -15.19542407989502 + ], + [ + "▁Cowichan", + -15.19542407989502 + ], + [ + "▁Eugenia", + -15.19542407989502 + ], + [ + "▁Pidcock", + -15.19542407989502 + ], + [ + "▁Pinocchio", + -15.19542407989502 + ], + [ + "▁bermuda", + -15.19542407989502 + ], + [ + "▁chondroitin", + -15.19542407989502 + ], + [ + "▁iodide", + -15.19542407989502 + ], + [ + "Abruzzo", + -15.195425033569336 + ], + [ + "Intrigued", + -15.195425033569336 + ], + [ + "▁ASSOCIATES", + -15.195425033569336 + ], + [ + "▁Ambulatory", + -15.195425033569336 + ], + [ + "▁Arequipa", + -15.195425033569336 + ], + [ + "▁Armadillo", + -15.195425033569336 + ], + [ + "▁Aspergillus", + -15.195425033569336 + ], + [ + "▁Atacama", + -15.195425033569336 + ], + [ + "▁Baguette", + -15.195425033569336 + ], + [ + "▁Desjardins", + -15.195425033569336 + ], + [ + "▁Fandango", + -15.195425033569336 + ], + [ + "▁Gurdwara", + -15.195425033569336 + ], + [ + "▁MacGregor", + -15.195425033569336 + ], + [ + "▁Natchez", + -15.195425033569336 + ], + [ + "▁Nathanael", + -15.195425033569336 + ], + [ + "▁Nutanix", + -15.195425033569336 + ], + [ + "▁Penzance", + -15.195425033569336 + ], + [ + "▁Pergamon", + -15.195425033569336 + ], + [ + "▁Precautions", + -15.195425033569336 + ], + [ + "▁Siobhan", + -15.195425033569336 + ], + [ + "▁Société", + -15.195425033569336 + ], + [ + "▁Tourmaline", + -15.195425033569336 + ], + [ + "▁Tribulation", + -15.195425033569336 + ], + [ + "▁WHMCS", + -15.195425033569336 + ], + [ + "▁Xarelto", + -15.195425033569336 + ], + [ + "▁bifida", + -15.195425033569336 + ], + [ + "▁declination", + -15.195425033569336 + ], + [ + "▁disapproving", + -15.195425033569336 + ], + [ + "▁dysphagia", + -15.195425033569336 + ], + [ + "▁exfoliant", + -15.195425033569336 + ], + [ + "▁hyperthermia", + -15.195425033569336 + ], + [ + "▁inexcusable", + -15.195425033569336 + ], + [ + "▁inexhaustible", + -15.195425033569336 + ], + [ + "▁neuropathic", + -15.195425033569336 + ], + [ + "▁nintendo", + -15.195425033569336 + ], + [ + "▁recumbent", + -15.195425033569336 + ], + [ + "▁sandblasted", + -15.195425033569336 + ], + [ + "▁sauvignon", + -15.195425033569336 + ], + [ + "▁skittish", + -15.195425033569336 + ], + [ + "▁spirulina", + -15.195425033569336 + ], + [ + "▁taxidermy", + -15.195425033569336 + ], + [ + "▁triangulation", + -15.195425033569336 + ], + [ + "▁unconvincing", + -15.195425033569336 + ], + [ + "Accumulation", + -15.195425987243652 + ], + [ + "Agostino", + -15.195425987243652 + ], + [ + "licobacter", + -15.195425987243652 + ], + [ + "▁McCaffrey", + -15.195425987243652 + ], + [ + "▁Wayfarer", + -15.195425987243652 + ], + [ + "▁circumferential", + -15.195425987243652 + ], + [ + "▁commerical", + -15.195425987243652 + ], + [ + "▁Tofino", + -15.195426940917969 + ], + [ + "▁CarRentals", + -15.195427894592283 + ], + [ + "▁Ekaterin", + -15.195427894592283 + ], + [ + "▁Lundqvist", + -15.195427894592283 + ], + [ + "▁NETGEAR", + -15.195427894592283 + ], + [ + "▁Sherbrooke", + -15.195427894592283 + ], + [ + "vimeo", + -15.195429801940918 + ], + [ + "▁CHROME", + -15.195430755615234 + ], + [ + "▁Peebles", + -15.19543170928955 + ], + [ + "▁Popsicle", + -15.19543170928955 + ], + [ + "▁anthrax", + -15.19543170928955 + ], + [ + "▁Gottfried", + -15.195433616638184 + ], + [ + "▁GraphQL", + -15.1954345703125 + ], + [ + "▁Soweto", + -15.1954345703125 + ], + [ + "▁Ideology", + -15.195436477661133 + ], + [ + "▁tortuous", + -15.195436477661133 + ], + [ + "▁CONTEST", + -15.195439338684082 + ], + [ + "▁Doolittle", + -15.195439338684082 + ], + [ + "▁taurine", + -15.195440292358398 + ], + [ + "▁Calloway", + -15.195441246032717 + ], + [ + "▁Snooze", + -15.195442199707031 + ], + [ + "matthew", + -15.195443153381348 + ], + [ + "▁Limoges", + -15.195446014404297 + ], + [ + "▁Cyrillic", + -15.195446968078612 + ], + [ + "▁CREEK", + -15.195452690124512 + ], + [ + "▁racquetball", + -15.195453643798828 + ], + [ + "▁paladin", + -15.195454597473145 + ], + [ + "Conceived", + -15.195457458496094 + ], + [ + "-8873", + -15.19546127319336 + ], + [ + "▁Weightlifting", + -15.195462226867676 + ], + [ + "▁cyberbullying", + -15.195462226867676 + ], + [ + "▁Gosselin", + -15.19546604156494 + ], + [ + "▁Khushi", + -15.195467948913574 + ], + [ + "▁Malacca", + -15.195467948913574 + ], + [ + "▁savoir", + -15.195469856262209 + ], + [ + "▁Neurosci", + -15.19547176361084 + ], + [ + "▁bleep", + -15.195473670959473 + ], + [ + "▁Farmingdale", + -15.195476531982422 + ], + [ + "▁dumbfounded", + -15.195478439331056 + ], + [ + "▁Jumia", + -15.195480346679688 + ], + [ + "▁Kunming", + -15.19548225402832 + ], + [ + "Ambrosio", + -15.195488929748535 + ], + [ + "▁Subhash", + -15.195499420166016 + ], + [ + "▁hovercraft", + -15.195504188537598 + ], + [ + "▁biocompatible", + -15.195510864257812 + ], + [ + "▁Pratap", + -15.195513725280762 + ], + [ + "▁BUTTER", + -15.195518493652344 + ], + [ + "▁shirk", + -15.19552230834961 + ], + [ + "▁unibody", + -15.195527076721191 + ], + [ + "▁Stronghold", + -15.19552993774414 + ], + [ + "▁Manulife", + -15.195531845092772 + ], + [ + "amzn", + -15.195537567138672 + ], + [ + "▁Maputo", + -15.195539474487305 + ], + [ + "phosphoryl", + -15.19554042816162 + ], + [ + "▁Ferndale", + -15.195545196533203 + ], + [ + "▁Dartford", + -15.195547103881836 + ], + [ + "▁honorarium", + -15.19555950164795 + ], + [ + "▁Tidewater", + -15.195574760437012 + ], + [ + "▁Kairos", + -15.195576667785645 + ], + [ + "▁Yossi", + -15.195586204528809 + ], + [ + "▁ultrafine", + -15.195594787597656 + ], + [ + "▁eventualities", + -15.195599555969238 + ], + [ + "▁Darrin", + -15.195600509643556 + ], + [ + "▁Paneer", + -15.195600509643556 + ], + [ + "▁Gwendol", + -15.195611000061035 + ], + [ + "▁MBTA", + -15.195632934570312 + ], + [ + "▁Colvin", + -15.195633888244627 + ], + [ + "▁krill", + -15.195637702941896 + ], + [ + "▁Trichy", + -15.195638656616213 + ], + [ + "▁photosynthetic", + -15.19565200805664 + ], + [ + "▁Rayleigh", + -15.195652961730955 + ], + [ + "▁Tiago", + -15.195667266845703 + ], + [ + "▁DARPA", + -15.195673942565918 + ], + [ + "▁IICRC", + -15.195673942565918 + ], + [ + "▁burl", + -15.195737838745115 + ], + [ + "▁spectre", + -15.195743560791016 + ], + [ + "▁Pomeranian", + -15.195745468139648 + ], + [ + "Revisit", + -15.195751190185549 + ], + [ + "▁(1-1)", + -15.195759773254396 + ], + [ + "▁Mullan", + -15.195760726928713 + ], + [ + "▁shrunken", + -15.195760726928713 + ], + [ + "▁sentimentality", + -15.195764541625977 + ], + [ + "▁lasik", + -15.195771217346191 + ], + [ + "▁Bayonne", + -15.195795059204102 + ], + [ + "▁70-200", + -15.195817947387695 + ], + [ + "▁Trello", + -15.195817947387695 + ], + [ + "Swelling", + -15.195855140686035 + ], + [ + "▁discoloured", + -15.195855140686035 + ], + [ + "▁monopod", + -15.1958646774292 + ], + [ + "▁majestically", + -15.195867538452148 + ], + [ + "▁defame", + -15.195873260498049 + ], + [ + "▁bulbous", + -15.195894241333008 + ], + [ + "▁TANK", + -15.195900917053224 + ], + [ + "▁Avoidance", + -15.195903778076172 + ], + [ + "▁Strongest", + -15.195930480957031 + ], + [ + "▁Piglet", + -15.195940971374512 + ], + [ + "▁terrorizing", + -15.195950508117676 + ], + [ + "▁$13,000", + -15.195951461791992 + ], + [ + "▁oxen", + -15.195968627929688 + ], + [ + "▁streetlights", + -15.1959810256958 + ], + [ + "▁Individualized", + -15.19599723815918 + ], + [ + "▁dissenters", + -15.196027755737305 + ], + [ + "▁Halford", + -15.196036338806152 + ], + [ + "▁prankster", + -15.196049690246582 + ], + [ + "▁acclimatize", + -15.196053504943848 + ], + [ + "Coriander", + -15.196112632751465 + ], + [ + "▁pediment", + -15.196158409118652 + ], + [ + "▁Gillard", + -15.19616413116455 + ], + [ + "▁Nivea", + -15.196185111999512 + ], + [ + "▁Frontera", + -15.196187019348145 + ], + [ + "▁prayerfully", + -15.196188926696776 + ], + [ + "▁Waterpark", + -15.196244239807127 + ], + [ + "▁reinvestment", + -15.19626235961914 + ], + [ + "▁Blister", + -15.196313858032228 + ], + [ + "▁Straub", + -15.196318626403809 + ], + [ + "▁titer", + -15.196334838867188 + ], + [ + "▁Buckland", + -15.196340560913086 + ], + [ + "▁unmounted", + -15.19634246826172 + ], + [ + "▁seafloor", + -15.196349143981934 + ], + [ + "▁ADMIN", + -15.196358680725098 + ], + [ + "▁Headteacher", + -15.19642448425293 + ], + [ + "▁jewelery", + -15.196436882019045 + ], + [ + "▁Artistry", + -15.19651699066162 + ], + [ + "1:59", + -15.196537017822266 + ], + [ + "▁Coolant", + -15.1965970993042 + ], + [ + "OOM", + -15.196602821350098 + ], + [ + "▁Liao", + -15.196648597717283 + ], + [ + "Nausea", + -15.196695327758787 + ], + [ + "▁Concourse", + -15.196769714355469 + ], + [ + "▁Denham", + -15.196822166442873 + ], + [ + "▁bushland", + -15.1968994140625 + ], + [ + "▁predispose", + -15.196918487548828 + ], + [ + "rhabd", + -15.196969032287598 + ], + [ + "▁$0.25", + -15.19704818725586 + ], + [ + "ETCH", + -15.19710636138916 + ], + [ + "▁+49", + -15.197222709655762 + ], + [ + "▁Felder", + -15.197226524353027 + ], + [ + "▁Cavill", + -15.197233200073242 + ], + [ + "▁czar", + -15.197279930114746 + ], + [ + "dependence", + -15.197555541992188 + ], + [ + "Consolidate", + -15.197620391845703 + ], + [ + "▁Nauti", + -15.197815895080566 + ], + [ + "▁beseech", + -15.198102951049805 + ], + [ + "9:37", + -15.198113441467283 + ], + [ + "▁Fillet", + -15.19814682006836 + ], + [ + "Maximizing", + -15.198203086853027 + ], + [ + "Companion", + -15.198261260986328 + ], + [ + "▁Gumbo", + -15.1983060836792 + ], + [ + "▁mooncake", + -15.19833278656006 + ], + [ + "thrift", + -15.198357582092283 + ], + [ + "culpa", + -15.198381423950195 + ], + [ + "PRODUCT", + -15.198400497436523 + ], + [ + "whiplash", + -15.198402404785156 + ], + [ + "Ordinary", + -15.198416709899902 + ], + [ + "Sliding", + -15.198420524597168 + ], + [ + "Cinnamon", + -15.198421478271484 + ], + [ + "Twilight", + -15.19842529296875 + ], + [ + "Mitsubishi", + -15.198426246643066 + ], + [ + "arrelage", + -15.198430061340332 + ], + [ + "Telegram", + -15.19843578338623 + ], + [ + "comparison", + -15.198437690734863 + ], + [ + "Sacramento", + -15.19843864440918 + ], + [ + "fulfillment", + -15.19843864440918 + ], + [ + "Prescription", + -15.198439598083496 + ], + [ + "Sheffield", + -15.198439598083496 + ], + [ + "Porsche", + -15.198440551757812 + ], + [ + "Technologies", + -15.198442459106444 + ], + [ + "Pamela", + -15.198444366455078 + ], + [ + "receiving", + -15.198445320129396 + ], + [ + "Engineered", + -15.198448181152344 + ], + [ + "▁goggle", + -15.198453903198242 + ], + [ + "Whitney", + -15.198469161987305 + ], + [ + "whistle", + -15.198501586914062 + ], + [ + "limestone", + -15.198511123657228 + ], + [ + "Providence", + -15.198514938354492 + ], + [ + "recruit", + -15.198515892028809 + ], + [ + "Bradford", + -15.198529243469238 + ], + [ + "tribal", + -15.198569297790527 + ], + [ + "▁Flix", + -15.198596954345703 + ], + [ + "motorized", + -15.198649406433104 + ], + [ + "Preston", + -15.198694229125977 + ], + [ + "silica", + -15.19870662689209 + ], + [ + "▁Wilmot", + -15.198707580566406 + ], + [ + "Brendan", + -15.198721885681152 + ], + [ + "Napoli", + -15.198766708374023 + ], + [ + "Zulu", + -15.198775291442873 + ], + [ + "Derby", + -15.198823928833008 + ], + [ + "▁Falco", + -15.1989107131958 + ], + [ + "Consultation", + -15.198949813842772 + ], + [ + "▁Visco", + -15.1990327835083 + ], + [ + "▁Chimp", + -15.19908332824707 + ], + [ + "Pesa", + -15.199191093444824 + ], + [ + "wallpaper", + -15.199234008789062 + ], + [ + "RFID", + -15.199292182922363 + ], + [ + "▁Dinar", + -15.199323654174805 + ], + [ + "▁gpu", + -15.19948387145996 + ], + [ + "▁Mitsui", + -15.199512481689451 + ], + [ + "(*)", + -15.199673652648926 + ], + [ + "Futuristic", + -15.19971752166748 + ], + [ + "Spotlight", + -15.199810981750488 + ], + [ + "▁Wohl", + -15.199899673461914 + ], + [ + "▁fasci", + -15.200119018554688 + ], + [ + "Sharpen", + -15.200169563293455 + ], + [ + "haadi", + -15.200201988220217 + ], + [ + "blur", + -15.200235366821287 + ], + [ + "Toggle", + -15.200252532958984 + ], + [ + "opharyngeal", + -15.200270652770996 + ], + [ + "▁cozies", + -15.200443267822266 + ], + [ + "▁Erdo", + -15.200464248657228 + ], + [ + "emps", + -15.200478553771973 + ], + [ + "▁Mahadev", + -15.200495719909668 + ], + [ + "▁Ehrlich", + -15.20072078704834 + ], + [ + "▁Nikol", + -15.200811386108398 + ], + [ + "rainbow", + -15.200898170471191 + ], + [ + "Ameen", + -15.201000213623049 + ], + [ + "munition", + -15.201305389404297 + ], + [ + "JPEG", + -15.201354026794434 + ], + [ + "1011", + -15.20139503479004 + ], + [ + "[11:", + -15.201446533203123 + ], + [ + "6:27", + -15.201592445373535 + ], + [ + "Bullish", + -15.201781272888184 + ], + [ + "YUM", + -15.20191478729248 + ], + [ + "DDM", + -15.202131271362305 + ], + [ + "3:24", + -15.202152252197266 + ], + [ + "8:21", + -15.202350616455078 + ], + [ + "▁Patrik", + -15.202731132507324 + ], + [ + "▁Girardi", + -15.202937126159668 + ], + [ + "gemini", + -15.20303726196289 + ], + [ + "7,600", + -15.203600883483888 + ], + [ + "9:18", + -15.203690528869627 + ], + [ + "handel", + -15.203784942626951 + ], + [ + "alvin", + -15.20383644104004 + ], + [ + "▁ACMP", + -15.203899383544922 + ], + [ + "▁BLIND", + -15.203970909118652 + ], + [ + "affair", + -15.204068183898926 + ], + [ + "1–4", + -15.204333305358888 + ], + [ + "Airfare", + -15.20446491241455 + ], + [ + "▁$200.00", + -15.204486846923828 + ], + [ + "ISTE", + -15.204607009887695 + ], + [ + "5:15", + -15.204734802246094 + ], + [ + "▁Soloist", + -15.204936027526855 + ], + [ + "▁LOTR", + -15.205048561096191 + ], + [ + "▁Majid", + -15.205072402954102 + ], + [ + "▁Werth", + -15.20523166656494 + ], + [ + "▁Kuhl", + -15.205360412597656 + ], + [ + "overflow", + -15.205451011657717 + ], + [ + "Coverlets", + -15.205452919006348 + ], + [ + "Presidential", + -15.205543518066406 + ], + [ + "Libya", + -15.205755233764648 + ], + [ + "▁radiologic", + -15.20649242401123 + ], + [ + "▁Matara", + -15.206522941589355 + ], + [ + "WAF", + -15.206585884094238 + ], + [ + "2:26", + -15.206660270690918 + ], + [ + "8(1):", + -15.207358360290527 + ], + [ + "▁rollator", + -15.207582473754885 + ], + [ + "▁Ludo", + -15.207598686218262 + ], + [ + "▁Sadiq", + -15.20768928527832 + ], + [ + "▁Musco", + -15.20777416229248 + ], + [ + "▁effectual", + -15.20812702178955 + ], + [ + "odling", + -15.208467483520508 + ], + [ + "▁08:30", + -15.208468437194824 + ], + [ + "▁Kiku", + -15.208529472351074 + ], + [ + "7.75", + -15.20858383178711 + ], + [ + "oracle", + -15.208616256713867 + ], + [ + "abbo", + -15.2091064453125 + ], + [ + "▁Foucault", + -15.20912265777588 + ], + [ + "▁McCauley", + -15.20912265777588 + ], + [ + "▁misbehaving", + -15.20912265777588 + ], + [ + "▁streusel", + -15.20912265777588 + ], + [ + "EXTREMELY", + -15.209123611450195 + ], + [ + "MOSCOW", + -15.209123611450195 + ], + [ + "PukiWiki", + -15.209123611450195 + ], + [ + "▁Albemarle", + -15.209123611450195 + ], + [ + "▁Aurobindo", + -15.209123611450195 + ], + [ + "▁Balanchine", + -15.209123611450195 + ], + [ + "▁Batangas", + -15.209123611450195 + ], + [ + "▁COMMUNICATION", + -15.209123611450195 + ], + [ + "▁Calistoga", + -15.209123611450195 + ], + [ + "▁Charlemagne", + -15.209123611450195 + ], + [ + "▁Compendium", + -15.209123611450195 + ], + [ + "▁Connemara", + -15.209123611450195 + ], + [ + "▁Eggleston", + -15.209123611450195 + ], + [ + "▁Ernakulam", + -15.209123611450195 + ], + [ + "▁Kamehameha", + -15.209123611450195 + ], + [ + "▁Kashyap", + -15.209123611450195 + ], + [ + "▁Kootenay", + -15.209123611450195 + ], + [ + "▁Manfrotto", + -15.209123611450195 + ], + [ + "▁Parmigiano", + -15.209123611450195 + ], + [ + "▁Paysafecard", + -15.209123611450195 + ], + [ + "▁Penobscot", + -15.209123611450195 + ], + [ + "▁Plattsburgh", + -15.209123611450195 + ], + [ + "▁Rottweiler", + -15.209123611450195 + ], + [ + "▁Spaulding", + -15.209123611450195 + ], + [ + "▁Tohoku", + -15.209123611450195 + ], + [ + "▁Umberto", + -15.209123611450195 + ], + [ + "▁Wedgwood", + -15.209123611450195 + ], + [ + "▁embarass", + -15.209123611450195 + ], + [ + "▁estoppel", + -15.209123611450195 + ], + [ + "▁evaluative", + -15.209123611450195 + ], + [ + "▁fibrin", + -15.209123611450195 + ], + [ + "▁keurig", + -15.209123611450195 + ], + [ + "▁lebih", + -15.209123611450195 + ], + [ + "▁monstrosity", + -15.209123611450195 + ], + [ + "▁musculature", + -15.209123611450195 + ], + [ + "▁preponderance", + -15.209123611450195 + ], + [ + "▁ricochet", + -15.209123611450195 + ], + [ + "▁sluice", + -15.209123611450195 + ], + [ + "▁sublimated", + -15.209123611450195 + ], + [ + "▁subreddit", + -15.209123611450195 + ], + [ + "▁taffeta", + -15.209123611450195 + ], + [ + "▁unconcerned", + -15.209123611450195 + ], + [ + "▁uninhabitable", + -15.209123611450195 + ], + [ + "▁vindictive", + -15.209123611450195 + ], + [ + "▁wildebeest", + -15.209123611450195 + ], + [ + "Salutations", + -15.209124565124512 + ], + [ + "phenomenological", + -15.209124565124512 + ], + [ + "▁ENGINEERING", + -15.209124565124512 + ], + [ + "▁Likud", + -15.209124565124512 + ], + [ + "▁chamois", + -15.209124565124512 + ], + [ + "▁pectoral", + -15.209124565124512 + ], + [ + "▁vassal", + -15.209125518798828 + ], + [ + "▁Chatterjee", + -15.209126472473145 + ], + [ + "▁Detachable", + -15.209126472473145 + ], + [ + "▁McCracken", + -15.209126472473145 + ], + [ + "▁glycoprotein", + -15.209126472473145 + ], + [ + "▁DISPLAY", + -15.209128379821776 + ], + [ + "▁Gretzky", + -15.209128379821776 + ], + [ + "▁Panamera", + -15.209128379821776 + ], + [ + "▁bouillon", + -15.209128379821776 + ], + [ + "Replenish", + -15.209129333496094 + ], + [ + "▁Catalytic", + -15.209129333496094 + ], + [ + "▁flippant", + -15.209129333496094 + ], + [ + "▁cybernetic", + -15.20913028717041 + ], + [ + "▁Gainsborough", + -15.209131240844728 + ], + [ + "▁Winstrol", + -15.209131240844728 + ], + [ + "▁espanol", + -15.209131240844728 + ], + [ + "▁googly", + -15.209131240844728 + ], + [ + "Funnily", + -15.209132194519045 + ], + [ + "▁Gulliver", + -15.209132194519045 + ], + [ + "▁Ichigo", + -15.20913314819336 + ], + [ + "▁Grinnell", + -15.209134101867676 + ], + [ + "▁symlink", + -15.209135055541992 + ], + [ + "▁Lombok", + -15.209136009216309 + ], + [ + "▁Ribeiro", + -15.209136009216309 + ], + [ + "▁psychotropic", + -15.209136009216309 + ], + [ + "▁linoleic", + -15.209136962890623 + ], + [ + "▁Oxnard", + -15.20913791656494 + ], + [ + "▁russet", + -15.20913791656494 + ], + [ + "▁Opéra", + -15.20914077758789 + ], + [ + "▁cowardice", + -15.20914077758789 + ], + [ + "▁Doritos", + -15.209141731262209 + ], + [ + "▁concordance", + -15.20914363861084 + ], + [ + "▁Rococo", + -15.209144592285156 + ], + [ + "▁Snorkeling", + -15.209144592285156 + ], + [ + "▁pompom", + -15.209150314331056 + ], + [ + "▁Bridgwater", + -15.209151268005373 + ], + [ + "▁diurnal", + -15.209151268005373 + ], + [ + "▁(844)", + -15.20915412902832 + ], + [ + "▁conscription", + -15.209155082702637 + ], + [ + "▁lysate", + -15.209155082702637 + ], + [ + "▁microchipped", + -15.209156036376951 + ], + [ + "▁Sevierville", + -15.209157943725586 + ], + [ + "▁awsome", + -15.209158897399902 + ], + [ + "▁misconfigured", + -15.209158897399902 + ], + [ + "▁Hossein", + -15.20915985107422 + ], + [ + "▁Ballerina", + -15.209161758422852 + ], + [ + "▁Tyrrell", + -15.209162712097168 + ], + [ + "▁mishandling", + -15.2091646194458 + ], + [ + "▁glycine", + -15.209165573120115 + ], + [ + "▁GOVERNMENT", + -15.209166526794434 + ], + [ + "▁Shildon", + -15.209169387817385 + ], + [ + "▁Tailwind", + -15.2091703414917 + ], + [ + "▁Rubenstein", + -15.209172248840332 + ], + [ + "▁(1963)", + -15.209173202514648 + ], + [ + "▁Slingshot", + -15.209176063537598 + ], + [ + "▁Decathlon", + -15.209179878234863 + ], + [ + "▁SINCE", + -15.209179878234863 + ], + [ + "▁Severance", + -15.20918083190918 + ], + [ + "▁Waynesboro", + -15.209182739257812 + ], + [ + "▁Mandolin", + -15.209186553955078 + ], + [ + "▁cGMP", + -15.209186553955078 + ], + [ + "▁Vagrant", + -15.209190368652344 + ], + [ + "▁Olimpi", + -15.20919132232666 + ], + [ + "▁Yerba", + -15.209192276000977 + ], + [ + "TEACHER", + -15.209196090698242 + ], + [ + "▁AECOM", + -15.209198951721191 + ], + [ + "▁Torbay", + -15.209198951721191 + ], + [ + "▁embroideries", + -15.209198951721191 + ], + [ + "SHIRE", + -15.209199905395508 + ], + [ + "▁specular", + -15.209199905395508 + ], + [ + "▁Refinishing", + -15.209200859069824 + ], + [ + "▁Haddock", + -15.20920181274414 + ], + [ + "▁surreptitiously", + -15.209205627441406 + ], + [ + "▁subdirectory", + -15.209206581115724 + ], + [ + "rmitage", + -15.20920753479004 + ], + [ + "▁Essure", + -15.209216117858888 + ], + [ + "▁refuelling", + -15.20921802520752 + ], + [ + "OLOGICAL", + -15.209218978881836 + ], + [ + "▁Negara", + -15.209223747253418 + ], + [ + "▁SIGNED", + -15.209233283996582 + ], + [ + "▁Boateng", + -15.209237098693848 + ], + [ + "▁Taupo", + -15.20924472808838 + ], + [ + "▁Overseer", + -15.209248542785645 + ], + [ + "▁Jerez", + -15.20926570892334 + ], + [ + "▁unpolished", + -15.20926570892334 + ], + [ + "▁Lumira", + -15.209287643432615 + ], + [ + "▁unstuck", + -15.209291458129885 + ], + [ + "▁2009-2010", + -15.209296226501465 + ], + [ + "▁Cavity", + -15.209307670593262 + ], + [ + "▁Prefect", + -15.209318161010742 + ], + [ + "▁utero", + -15.209341049194336 + ], + [ + "Centrify", + -15.209343910217283 + ], + [ + "▁muggy", + -15.209362983703612 + ], + [ + "▁Fairhaven", + -15.20936393737793 + ], + [ + "▁knickers", + -15.20937728881836 + ], + [ + "DEBUG", + -15.20938205718994 + ], + [ + "▁5-10%", + -15.20938205718994 + ], + [ + "▁LOUD", + -15.209431648254396 + ], + [ + "▁Sickness", + -15.209486961364746 + ], + [ + "▁placental", + -15.209494590759276 + ], + [ + "▁Jetpack", + -15.209495544433594 + ], + [ + "▁extradited", + -15.209502220153809 + ], + [ + "▁milkweed", + -15.209508895874023 + ], + [ + "icchio", + -15.209528923034668 + ], + [ + "▁888-247", + -15.209559440612791 + ], + [ + "▁Dreamcast", + -15.209577560424805 + ], + [ + "▁rootkit", + -15.209580421447754 + ], + [ + "▁Hopping", + -15.20958423614502 + ], + [ + "▁pacify", + -15.20958423614502 + ], + [ + "▁Ratner", + -15.20959758758545 + ], + [ + "▁Horgan", + -15.209612846374512 + ], + [ + "▁rewiring", + -15.20961570739746 + ], + [ + "▁ajax", + -15.2096529006958 + ], + [ + "▁Liquidity", + -15.209686279296877 + ], + [ + "▁Tween", + -15.20969009399414 + ], + [ + "▁suffixes", + -15.209712028503418 + ], + [ + "▁greenbelt", + -15.209732055664062 + ], + [ + "▁icecream", + -15.209732055664062 + ], + [ + "▁pooches", + -15.209758758544922 + ], + [ + "▁Timken", + -15.209771156311035 + ], + [ + "▁Morten", + -15.209843635559082 + ], + [ + "▁tomcat", + -15.20985507965088 + ], + [ + "▁Dominick", + -15.209901809692385 + ], + [ + "▁Cephal", + -15.209968566894531 + ], + [ + "▁paradoxes", + -15.209969520568848 + ], + [ + "▁Kylo", + -15.20998477935791 + ], + [ + "izado", + -15.209999084472656 + ], + [ + "▁Vinson", + -15.210023880004885 + ], + [ + "▁Rubric", + -15.210034370422363 + ], + [ + "▁Carrey", + -15.210044860839844 + ], + [ + "▁Meraki", + -15.210044860839844 + ], + [ + "venta", + -15.21004581451416 + ], + [ + "▁Barrick", + -15.210092544555664 + ], + [ + "▁Mantri", + -15.210134506225586 + ], + [ + "▁Guestbook", + -15.210161209106444 + ], + [ + "▁Tippett", + -15.210176467895508 + ], + [ + "VAST", + -15.210200309753418 + ], + [ + "0-90%", + -15.210243225097656 + ], + [ + "▁angola", + -15.210308074951172 + ], + [ + "▁Platz", + -15.210315704345703 + ], + [ + "roque", + -15.210387229919434 + ], + [ + "Singular", + -15.210410118103027 + ], + [ + "▁Civita", + -15.210472106933594 + ], + [ + "▁trespasser", + -15.21048355102539 + ], + [ + "▁SOLAR", + -15.21049976348877 + ], + [ + "▁prototypical", + -15.21058750152588 + ], + [ + "▁Patten", + -15.210614204406738 + ], + [ + "ecchio", + -15.210875511169434 + ], + [ + "Shiller", + -15.211016654968262 + ], + [ + "▁penetr", + -15.211051940917969 + ], + [ + "01753", + -15.211074829101562 + ], + [ + "Brewer", + -15.211153030395508 + ], + [ + "▁skyward", + -15.21129035949707 + ], + [ + "Congratulation", + -15.211381912231444 + ], + [ + "▁Jabra", + -15.21144962310791 + ], + [ + "▁camaro", + -15.211519241333008 + ], + [ + "▁Terps", + -15.211533546447754 + ], + [ + "3:29", + -15.211762428283691 + ], + [ + "introduce", + -15.211767196655272 + ], + [ + "▁Stutz", + -15.211791038513184 + ], + [ + "ngga", + -15.211906433105469 + ], + [ + "fahren", + -15.212129592895508 + ], + [ + "▁Metaphor", + -15.212284088134766 + ], + [ + "Defensive", + -15.212347984313965 + ], + [ + "▁Scheu", + -15.212410926818848 + ], + [ + "▁mutter", + -15.212414741516112 + ], + [ + "caffeinated", + -15.21245288848877 + ], + [ + "refined", + -15.212469100952148 + ], + [ + "Intense", + -15.212482452392578 + ], + [ + "Terrorism", + -15.212483406066896 + ], + [ + "Technician", + -15.212501525878906 + ], + [ + "Cottage", + -15.212507247924805 + ], + [ + "NOTICE", + -15.212507247924805 + ], + [ + "confessed", + -15.21250820159912 + ], + [ + "Hernandez", + -15.21251106262207 + ], + [ + "beverage", + -15.21251106262207 + ], + [ + "Dawson", + -15.212514877319336 + ], + [ + "Democracy", + -15.212515830993652 + ], + [ + "supervisor", + -15.212520599365234 + ], + [ + "injected", + -15.21252155303955 + ], + [ + "advocate", + -15.2125244140625 + ], + [ + "Bangkok", + -15.212525367736816 + ], + [ + "Butterfly", + -15.21252727508545 + ], + [ + "Purdue", + -15.21252727508545 + ], + [ + "Broadband", + -15.212528228759766 + ], + [ + "Amateur", + -15.212529182434082 + ], + [ + "Dependent", + -15.21254062652588 + ], + [ + "Mueller", + -15.212542533874512 + ], + [ + "Logging", + -15.212567329406738 + ], + [ + "Indigo", + -15.21257209777832 + ], + [ + "george", + -15.212590217590332 + ], + [ + "transportation", + -15.212631225585938 + ], + [ + "modification", + -15.212653160095217 + ], + [ + "acquisition", + -15.21265697479248 + ], + [ + "Sauce", + -15.212675094604492 + ], + [ + "▁substan", + -15.212686538696287 + ], + [ + "Shelter", + -15.212695121765137 + ], + [ + "whisk", + -15.212712287902832 + ], + [ + "secular", + -15.212725639343262 + ], + [ + "jdk", + -15.212950706481934 + ], + [ + "Kiwi", + -15.213035583496094 + ], + [ + "87%", + -15.213109970092772 + ], + [ + "Indonesian", + -15.213150024414062 + ], + [ + "▁Zeller", + -15.2132568359375 + ], + [ + "leisure", + -15.21327304840088 + ], + [ + "convenience", + -15.213274955749512 + ], + [ + "Bigger", + -15.213393211364746 + ], + [ + "Mamma", + -15.213521003723145 + ], + [ + "▁Durst", + -15.213556289672852 + ], + [ + "▁$280", + -15.213582992553713 + ], + [ + "▁TIGER", + -15.21358871459961 + ], + [ + "foster", + -15.213624954223633 + ], + [ + "TRIBUTE", + -15.213671684265137 + ], + [ + "▁Grae", + -15.213749885559082 + ], + [ + "fisted", + -15.213932037353516 + ], + [ + "▁reneg", + -15.213974952697754 + ], + [ + "passport", + -15.21402359008789 + ], + [ + "▁CRUD", + -15.214024543762209 + ], + [ + "Nikon", + -15.214073181152344 + ], + [ + "couch", + -15.21407413482666 + ], + [ + "auga", + -15.214126586914062 + ], + [ + "▁Shinzo", + -15.214232444763184 + ], + [ + "▁£29", + -15.214261054992676 + ], + [ + "Floral", + -15.214428901672363 + ], + [ + "тно", + -15.214599609375 + ], + [ + "▁Buble", + -15.214599609375 + ], + [ + "turismo", + -15.214731216430664 + ], + [ + "Turf", + -15.215253829956056 + ], + [ + "▁nuov", + -15.21550178527832 + ], + [ + "3:55", + -15.215660095214844 + ], + [ + "hesus", + -15.215993881225586 + ], + [ + "▁$0.10", + -15.216111183166504 + ], + [ + "Docket", + -15.21657371520996 + ], + [ + "▁anthemic", + -15.216773986816406 + ], + [ + "▁Wilber", + -15.216787338256836 + ], + [ + "▁Belgi", + -15.216825485229492 + ], + [ + "7:55", + -15.216948509216309 + ], + [ + "▁Extens", + -15.217000007629396 + ], + [ + "SCAL", + -15.217080116271973 + ], + [ + "1.0.2", + -15.217129707336426 + ], + [ + "enghis", + -15.217135429382324 + ], + [ + "poration", + -15.217598915100098 + ], + [ + "amora", + -15.217887878417969 + ], + [ + "▁trample", + -15.217961311340332 + ], + [ + "ана", + -15.218040466308594 + ], + [ + "Paisa", + -15.2180814743042 + ], + [ + "▁Jeremi", + -15.218145370483398 + ], + [ + "-10-0", + -15.218279838562012 + ], + [ + "Parish", + -15.218329429626465 + ], + [ + "Masa", + -15.218459129333496 + ], + [ + "99.95", + -15.218546867370604 + ], + [ + "Merci", + -15.21888828277588 + ], + [ + "Pedia", + -15.218954086303713 + ], + [ + "▁MICR", + -15.219029426574709 + ], + [ + "▁Philippi", + -15.219173431396484 + ], + [ + "Shattered", + -15.219264030456545 + ], + [ + "4:17", + -15.219504356384276 + ], + [ + "▁Jovan", + -15.219722747802734 + ], + [ + "▁Fascia", + -15.21985912322998 + ], + [ + "▁tutti", + -15.220112800598145 + ], + [ + "gastroenterology", + -15.220417022705078 + ], + [ + "▁CONVE", + -15.220515251159668 + ], + [ + "hdl", + -15.220553398132324 + ], + [ + "2:53", + -15.220633506774902 + ], + [ + "PHOTO", + -15.220710754394531 + ], + [ + "▁vibra", + -15.220935821533203 + ], + [ + "▁Wattle", + -15.221124649047852 + ], + [ + "▁astound", + -15.221482276916504 + ], + [ + "iddhi", + -15.221487998962402 + ], + [ + "Chel", + -15.221644401550291 + ], + [ + "▁Jordy", + -15.221660614013672 + ], + [ + "rquez", + -15.221673011779783 + ], + [ + "5:35", + -15.22189712524414 + ], + [ + "OVO", + -15.222127914428713 + ], + [ + "▁Suva", + -15.222396850585938 + ], + [ + "▁privatize", + -15.22240924835205 + ], + [ + "▁enshrine", + -15.222411155700684 + ], + [ + "[00:0", + -15.22248077392578 + ], + [ + "▁PhotoShop", + -15.222609519958496 + ], + [ + "▁Sindhu", + -15.222685813903809 + ], + [ + "▁dichotom", + -15.222750663757324 + ], + [ + "▁McKen", + -15.222820281982422 + ], + [ + "▁Pharmacol", + -15.222860336303713 + ], + [ + "0–1", + -15.222992897033691 + ], + [ + "Resurrect", + -15.223011016845703 + ], + [ + "Cortisol", + -15.22301197052002 + ], + [ + "Enquiries", + -15.22301197052002 + ], + [ + "PREVIOUS", + -15.22301197052002 + ], + [ + "synuclein", + -15.22301197052002 + ], + [ + "▁800-361-3020", + -15.22301197052002 + ], + [ + "▁800-381-0266", + -15.22301197052002 + ], + [ + "▁888-269-5556", + -15.22301197052002 + ], + [ + "▁888-303-3882", + -15.22301197052002 + ], + [ + "▁Audacity", + -15.22301197052002 + ], + [ + "▁Bensenville", + -15.22301197052002 + ], + [ + "▁Bonhoeffer", + -15.22301197052002 + ], + [ + "▁Bosphorus", + -15.22301197052002 + ], + [ + "▁Cosmetology", + -15.22301197052002 + ], + [ + "▁Demonstrator", + -15.22301197052002 + ], + [ + "▁Deschutes", + -15.22301197052002 + ], + [ + "▁Dvorak", + -15.22301197052002 + ], + [ + "▁ENTERPRISE", + -15.22301197052002 + ], + [ + "▁Edelweiss", + -15.22301197052002 + ], + [ + "▁Fermanagh", + -15.22301197052002 + ], + [ + "▁Fortaleza", + -15.22301197052002 + ], + [ + "▁Genealogist", + -15.22301197052002 + ], + [ + "▁Gilgamesh", + -15.22301197052002 + ], + [ + "▁Habsburg", + -15.22301197052002 + ], + [ + "▁INSTRUCTION", + -15.22301197052002 + ], + [ + "▁Knysna", + -15.22301197052002 + ], + [ + "▁Leprechaun", + -15.22301197052002 + ], + [ + "▁Macadamia", + -15.22301197052002 + ], + [ + "▁Magdeburg", + -15.22301197052002 + ], + [ + "▁Mahabharata", + -15.22301197052002 + ], + [ + "▁Maldivian", + -15.22301197052002 + ], + [ + "▁Mandeville", + -15.22301197052002 + ], + [ + "▁Montalcino", + -15.22301197052002 + ], + [ + "▁Padukone", + -15.22301197052002 + ], + [ + "▁Pleistocene", + -15.22301197052002 + ], + [ + "▁Purgatory", + -15.22301197052002 + ], + [ + "▁Qureshi", + -15.22301197052002 + ], + [ + "▁TOSHIBA", + -15.22301197052002 + ], + [ + "▁Tazewell", + -15.22301197052002 + ], + [ + "▁Trapezium", + -15.22301197052002 + ], + [ + "▁Triassic", + -15.22301197052002 + ], + [ + "▁Unplugged", + -15.22301197052002 + ], + [ + "▁VILLAGE", + -15.22301197052002 + ], + [ + "▁abseiling", + -15.22301197052002 + ], + [ + "▁acetylcholine", + -15.22301197052002 + ], + [ + "▁autophagy", + -15.22301197052002 + ], + [ + "▁bonafide", + -15.22301197052002 + ], + [ + "▁caspase", + -15.22301197052002 + ], + [ + "▁combinatorial", + -15.22301197052002 + ], + [ + "▁dysentery", + -15.22301197052002 + ], + [ + "▁epistemological", + -15.22301197052002 + ], + [ + "▁etymology", + -15.22301197052002 + ], + [ + "▁flirtatious", + -15.22301197052002 + ], + [ + "▁gerbera", + -15.22301197052002 + ], + [ + "▁hermeneutic", + -15.22301197052002 + ], + [ + "▁iHeartRadio", + -15.22301197052002 + ], + [ + "▁kibbutz", + -15.22301197052002 + ], + [ + "▁klonopin", + -15.22301197052002 + ], + [ + "▁miscreants", + -15.22301197052002 + ], + [ + "▁panchayat", + -15.22301197052002 + ], + [ + "▁psoriatic", + -15.22301197052002 + ], + [ + "▁sanctum", + -15.22301197052002 + ], + [ + "▁sorcery", + -15.22301197052002 + ], + [ + "▁tamilnadu", + -15.22301197052002 + ], + [ + "▁tourbillon", + -15.22301197052002 + ], + [ + "▁vBulletin", + -15.22301197052002 + ], + [ + "▁800-301-7010", + -15.223012924194336 + ], + [ + "▁800-335-6975", + -15.223012924194336 + ], + [ + "▁Kaitlin", + -15.223012924194336 + ], + [ + "▁Macallan", + -15.223012924194336 + ], + [ + "▁Presbytery", + -15.223012924194336 + ], + [ + "▁circumnavigate", + -15.223012924194336 + ], + [ + "▁coursing", + -15.223012924194336 + ], + [ + "decentralisation", + -15.223013877868652 + ], + [ + "▁Nephrology", + -15.223013877868652 + ], + [ + "▁Udemy", + -15.223013877868652 + ], + [ + "▁Unlawful", + -15.223013877868652 + ], + [ + "▁deejay", + -15.223013877868652 + ], + [ + "▁inoculation", + -15.223013877868652 + ], + [ + "▁netizens", + -15.223013877868652 + ], + [ + "Outrageous", + -15.223014831542969 + ], + [ + "▁32/64", + -15.223014831542969 + ], + [ + "▁Exterminator", + -15.223014831542969 + ], + [ + "▁Hobbes", + -15.223014831542969 + ], + [ + "▁Unternehmen", + -15.223014831542969 + ], + [ + "▁psychedelia", + -15.223014831542969 + ], + [ + "▁Guevara", + -15.223015785217283 + ], + [ + "▁LEGEND", + -15.223015785217283 + ], + [ + "▁REMOTE", + -15.223015785217283 + ], + [ + "▁MORNING", + -15.223016738891602 + ], + [ + "▁cesium", + -15.223016738891602 + ], + [ + "▁Minotaur", + -15.223017692565918 + ], + [ + "▁WORKSHOP", + -15.223018646240234 + ], + [ + "▁EMPIRE", + -15.22301959991455 + ], + [ + "▁Ricketts", + -15.22301959991455 + ], + [ + "▁Brisket", + -15.223020553588867 + ], + [ + "▁supplication", + -15.223020553588867 + ], + [ + "▁Ponderosa", + -15.2230224609375 + ], + [ + "▁Lorelei", + -15.223024368286133 + ], + [ + "▁digicam", + -15.223024368286133 + ], + [ + "▁dispassionate", + -15.223024368286133 + ], + [ + "centrism", + -15.22302532196045 + ], + [ + "▁Eckhart", + -15.22302532196045 + ], + [ + "▁cajole", + -15.22302532196045 + ], + [ + "▁Absorption", + -15.223027229309082 + ], + [ + "BOTTOM", + -15.223029136657717 + ], + [ + "▁Lauryn", + -15.223031044006348 + ], + [ + "▁saddens", + -15.223031997680664 + ], + [ + "▁anathema", + -15.22303295135498 + ], + [ + "▁goddamn", + -15.223033905029297 + ], + [ + "▁honcho", + -15.223034858703612 + ], + [ + "▁Flacco", + -15.22303581237793 + ], + [ + "▁Sandpiper", + -15.22303581237793 + ], + [ + "▁forefinger", + -15.22303581237793 + ], + [ + "▁frostbite", + -15.223037719726562 + ], + [ + "▁geomagnetic", + -15.22303867340088 + ], + [ + "▁Ranieri", + -15.223040580749512 + ], + [ + "▁(1958)", + -15.223045349121094 + ], + [ + "▁Giovanna", + -15.223045349121094 + ], + [ + "▁Larimer", + -15.22304630279541 + ], + [ + "▁challah", + -15.22304630279541 + ], + [ + "▁pyrite", + -15.22304630279541 + ], + [ + "▁revelatory", + -15.223047256469728 + ], + [ + "▁Swagger", + -15.223052978515623 + ], + [ + "▁Bartolome", + -15.22305393218994 + ], + [ + "▁ITALY", + -15.223054885864258 + ], + [ + "▁DropCatch", + -15.22305965423584 + ], + [ + "▁Hongkong", + -15.223061561584473 + ], + [ + "▁23:38:3", + -15.223065376281738 + ], + [ + "▁(818)", + -15.223067283630373 + ], + [ + "▁Piercing", + -15.223068237304688 + ], + [ + "▁Nexium", + -15.223076820373535 + ], + [ + "▁nodular", + -15.223076820373535 + ], + [ + "▁CHANCE", + -15.223078727722168 + ], + [ + "gastroesophageal", + -15.2230806350708 + ], + [ + "▁Bethune", + -15.223081588745115 + ], + [ + "▁teepee", + -15.223081588745115 + ], + [ + "▁Tubular", + -15.223082542419434 + ], + [ + "▁fuming", + -15.223082542419434 + ], + [ + "▁TMNT", + -15.223085403442385 + ], + [ + "▁preamplifier", + -15.223085403442385 + ], + [ + "▁coniferous", + -15.223095893859863 + ], + [ + "▁taxicab", + -15.223095893859863 + ], + [ + "▁Durbin", + -15.223102569580078 + ], + [ + "▁Skimmer", + -15.223106384277344 + ], + [ + "▁Bamford", + -15.22310733795166 + ], + [ + "▁Omnibus", + -15.223109245300291 + ], + [ + "▁Montevideo", + -15.223111152648926 + ], + [ + "▁Sunburst", + -15.223111152648926 + ], + [ + "▁FORGET", + -15.223115921020508 + ], + [ + "▁plopped", + -15.223118782043455 + ], + [ + "▁unappreciated", + -15.223119735717772 + ], + [ + "▁towpath", + -15.223124504089355 + ], + [ + "▁5/16\"", + -15.22312831878662 + ], + [ + "▁Fulani", + -15.223133087158203 + ], + [ + "▁Kenobi", + -15.223135948181152 + ], + [ + "▁20-30%", + -15.223139762878418 + ], + [ + "▁celtic", + -15.22314739227295 + ], + [ + "▁Ribbed", + -15.223149299621582 + ], + [ + "▁ASPCA", + -15.223153114318848 + ], + [ + "▁Xanthan", + -15.223154067993164 + ], + [ + "▁laxity", + -15.223154067993164 + ], + [ + "▁Clearinghouse", + -15.22315502166748 + ], + [ + "▁Hydrating", + -15.223170280456545 + ], + [ + "▁Octavo", + -15.223173141479492 + ], + [ + "▁(310)", + -15.223176956176758 + ], + [ + "▁$175,000", + -15.223184585571287 + ], + [ + "▁Suraj", + -15.223186492919922 + ], + [ + "▁chicory", + -15.223189353942873 + ], + [ + "▁fogging", + -15.223200798034668 + ], + [ + "▁polarisation", + -15.223237991333008 + ], + [ + "rboreal", + -15.22323989868164 + ], + [ + "▁Stucco", + -15.22324562072754 + ], + [ + "▁LTTE", + -15.223246574401855 + ], + [ + "▁relativistic", + -15.223248481750488 + ], + [ + "▁Jaclyn", + -15.22325611114502 + ], + [ + "▁Castile", + -15.223268508911133 + ], + [ + "▁subaru", + -15.223270416259766 + ], + [ + "▁FireWire", + -15.223274230957031 + ], + [ + "▁Wavy", + -15.223275184631348 + ], + [ + "▁Lingfield", + -15.223276138305664 + ], + [ + "guila", + -15.223289489746094 + ], + [ + "▁luau", + -15.22329330444336 + ], + [ + "quarium", + -15.223295211791992 + ], + [ + "▁linguistically", + -15.22329807281494 + ], + [ + "▁Babson", + -15.223304748535156 + ], + [ + "▁DTLA", + -15.223311424255373 + ], + [ + "▁sunshade", + -15.223322868347168 + ], + [ + "▁Dermatol", + -15.223341941833496 + ], + [ + "▁Zoosk", + -15.223342895507812 + ], + [ + "PREPARE", + -15.223348617553713 + ], + [ + "▁Joystick", + -15.223353385925291 + ], + [ + "▁Encouragement", + -15.22335433959961 + ], + [ + "▁Atticus", + -15.223362922668455 + ], + [ + "▁Toilette", + -15.22337532043457 + ], + [ + "▁eccentricity", + -15.223381996154783 + ], + [ + "▁innkeeper", + -15.223381996154783 + ], + [ + "▁Menace", + -15.223413467407228 + ], + [ + "▁CZK", + -15.223418235778809 + ], + [ + "▁covalent", + -15.22342300415039 + ], + [ + "▁colloquially", + -15.223426818847656 + ], + [ + "▁Buckner", + -15.223433494567873 + ], + [ + "▁deactivation", + -15.223443031311035 + ], + [ + "▁Contador", + -15.223456382751465 + ], + [ + "▁Patong", + -15.223461151123049 + ], + [ + "▁Retinal", + -15.223503112792969 + ], + [ + "▁Eighties", + -15.22351360321045 + ], + [ + "▁Clearview", + -15.223546028137209 + ], + [ + "▁CareCredit", + -15.22354793548584 + ], + [ + "▁Horley", + -15.223577499389648 + ], + [ + "▁salvo", + -15.22361660003662 + ], + [ + "chola", + -15.223637580871582 + ], + [ + "▁Igbo", + -15.223638534545898 + ], + [ + "▁pansies", + -15.223660469055176 + ], + [ + "▁skateboarders", + -15.223660469055176 + ], + [ + "▁Subbed", + -15.22366428375244 + ], + [ + "▁WebKit", + -15.22366714477539 + ], + [ + "▁Migos", + -15.2236967086792 + ], + [ + "▁Ribble", + -15.223713874816896 + ], + [ + "imidazo", + -15.22375774383545 + ], + [ + "▁Renaud", + -15.223803520202637 + ], + [ + "7:18", + -15.223843574523926 + ], + [ + "▁arbitral", + -15.223896980285645 + ], + [ + "vaya", + -15.22390842437744 + ], + [ + "▁Luft", + -15.223955154418944 + ], + [ + "▁Attleboro", + -15.223963737487791 + ], + [ + "▁fearlessness", + -15.223966598510742 + ], + [ + "▁Turku", + -15.223978996276855 + ], + [ + "HRTAC", + -15.224004745483398 + ], + [ + "▁Freebies", + -15.22400951385498 + ], + [ + "▁salic", + -15.224045753479004 + ], + [ + "▁Yantra", + -15.2241792678833 + ], + [ + "▁Jerri", + -15.224294662475586 + ], + [ + "▁Fushi", + -15.224327087402344 + ], + [ + "15-0", + -15.224432945251465 + ], + [ + "▁droit", + -15.224435806274414 + ], + [ + "▁Estado", + -15.22447109222412 + ], + [ + "▁Chaka", + -15.224587440490724 + ], + [ + "MOST", + -15.224634170532228 + ], + [ + "▁blindfold", + -15.224701881408691 + ], + [ + "gesetz", + -15.2247314453125 + ], + [ + "▁NAIL", + -15.224860191345217 + ], + [ + "▁scrollbar", + -15.22492218017578 + ], + [ + "assault", + -15.225221633911133 + ], + [ + "Coulomb", + -15.225482940673828 + ], + [ + "▁Wacker", + -15.225619316101074 + ], + [ + "planner", + -15.225763320922852 + ], + [ + "▁Summa", + -15.226078033447266 + ], + [ + "uuuuu", + -15.226092338562012 + ], + [ + "Boro", + -15.226099014282228 + ], + [ + "▁entreat", + -15.226140975952148 + ], + [ + "▁Leaderboard", + -15.226336479187012 + ], + [ + "retardant", + -15.226388931274414 + ], + [ + "ANDREW", + -15.226469039916992 + ], + [ + "4:29", + -15.226502418518066 + ], + [ + "ennett", + -15.226513862609863 + ], + [ + "Barclay", + -15.226613998413086 + ], + [ + "▁Esso", + -15.22663116455078 + ], + [ + "negotiate", + -15.22667121887207 + ], + [ + "Gadget", + -15.22673511505127 + ], + [ + "Farewell", + -15.22673797607422 + ], + [ + "Elegance", + -15.2267427444458 + ], + [ + "Collagen", + -15.226746559143066 + ], + [ + "Scenario", + -15.226758003234863 + ], + [ + "decided", + -15.226759910583496 + ], + [ + "distinguish", + -15.226765632629396 + ], + [ + "Sensitive", + -15.226774215698242 + ], + [ + "▁Brussel", + -15.226778984069824 + ], + [ + "Moisture", + -15.22677993774414 + ], + [ + "Accredited", + -15.22678279876709 + ], + [ + "Http", + -15.22678279876709 + ], + [ + "SERVPRO", + -15.22678279876709 + ], + [ + "DOWNLOAD", + -15.226786613464355 + ], + [ + "Technological", + -15.226788520812988 + ], + [ + "Magnesium", + -15.22679042816162 + ], + [ + "threshold", + -15.226798057556152 + ], + [ + "Schwartz", + -15.226799011230469 + ], + [ + "Kelsey", + -15.226799964904783 + ], + [ + "NORTH", + -15.226799964904783 + ], + [ + "indulge", + -15.226799964904783 + ], + [ + "Thomson", + -15.226800918579102 + ], + [ + "Veronica", + -15.226800918579102 + ], + [ + "communicate", + -15.226800918579102 + ], + [ + "Abbott", + -15.226801872253418 + ], + [ + "Cambodia", + -15.226805686950684 + ], + [ + "Communities", + -15.226805686950684 + ], + [ + "favour", + -15.226808547973633 + ], + [ + "municipal", + -15.22680950164795 + ], + [ + "bamboo", + -15.226811408996582 + ], + [ + "stylish", + -15.22681713104248 + ], + [ + "suggestions", + -15.226831436157228 + ], + [ + "Arjun", + -15.226835250854492 + ], + [ + "Provence", + -15.226840019226074 + ], + [ + "Adaptive", + -15.22684097290039 + ], + [ + "Cocktail", + -15.226842880249023 + ], + [ + "recruitment", + -15.22684383392334 + ], + [ + "LARGE", + -15.226861953735352 + ], + [ + "PSY", + -15.226861953735352 + ], + [ + "Urgent", + -15.226873397827148 + ], + [ + "duplex", + -15.226877212524414 + ], + [ + "documentation", + -15.226881980895996 + ], + [ + "bargain", + -15.226906776428224 + ], + [ + "enemies", + -15.226919174194336 + ], + [ + "midnight", + -15.226953506469728 + ], + [ + "6:11", + -15.226971626281738 + ], + [ + "Otto", + -15.226981163024902 + ], + [ + "banned", + -15.2269926071167 + ], + [ + "Sheikh", + -15.226993560791016 + ], + [ + "Lyme", + -15.227005004882812 + ], + [ + "gambling", + -15.22724437713623 + ], + [ + "▁dink", + -15.227254867553713 + ], + [ + "▁lira", + -15.227265357971191 + ], + [ + "▁Capote", + -15.22727108001709 + ], + [ + "offender", + -15.227300643920898 + ], + [ + "somewhere", + -15.227300643920898 + ], + [ + "Rudy", + -15.227377891540527 + ], + [ + "excessive", + -15.227411270141602 + ], + [ + "▁Delica", + -15.227579116821287 + ], + [ + "comparable", + -15.22783374786377 + ], + [ + "pension", + -15.227890968322754 + ], + [ + "▁23:4", + -15.227897644042969 + ], + [ + "TOY", + -15.227898597717283 + ], + [ + "Ditto", + -15.227993965148926 + ], + [ + "ouffe", + -15.228072166442873 + ], + [ + "Psychological", + -15.228141784667969 + ], + [ + "1870", + -15.228371620178224 + ], + [ + "univers", + -15.228487014770508 + ], + [ + "hilton", + -15.22853660583496 + ], + [ + "▁connote", + -15.228829383850098 + ], + [ + "uuuu", + -15.228856086730955 + ], + [ + "▁comport", + -15.228859901428224 + ], + [ + "6:24", + -15.22889232635498 + ], + [ + "chute", + -15.228927612304688 + ], + [ + "00-260", + -15.228983879089355 + ], + [ + "FDF", + -15.22898769378662 + ], + [ + "▁spatio", + -15.229066848754885 + ], + [ + "olvency", + -15.229268074035645 + ], + [ + "boiler", + -15.229305267333984 + ], + [ + "ovskaya", + -15.229314804077148 + ], + [ + "▁Kauf", + -15.229585647583008 + ], + [ + "1722", + -15.229607582092283 + ], + [ + "FSR", + -15.230280876159668 + ], + [ + "▁braise", + -15.230291366577148 + ], + [ + "▁GIVEN", + -15.230295181274414 + ], + [ + "disorder", + -15.230508804321287 + ], + [ + "▁LABEL", + -15.230751991271973 + ], + [ + "▁Kaul", + -15.230953216552734 + ], + [ + "THON", + -15.230987548828123 + ], + [ + "4:55", + -15.231112480163574 + ], + [ + "Metallic", + -15.231184005737305 + ], + [ + "▁occlus", + -15.23121452331543 + ], + [ + "77,000", + -15.231240272521973 + ], + [ + "tutor", + -15.231416702270508 + ], + [ + "ODIUM", + -15.231510162353516 + ], + [ + "guda", + -15.231534004211426 + ], + [ + "▁Paoli", + -15.231609344482422 + ], + [ + "▁riotous", + -15.231664657592772 + ], + [ + "rahim", + -15.231791496276855 + ], + [ + "▁Taupe", + -15.232035636901855 + ], + [ + "Constructive", + -15.23219871520996 + ], + [ + "▁Zayn", + -15.232263565063477 + ], + [ + "▁reexamine", + -15.232295989990234 + ], + [ + "2009-0", + -15.23237419128418 + ], + [ + "ери", + -15.232375144958496 + ], + [ + "NOAH", + -15.232666969299316 + ], + [ + "▁cress", + -15.23275661468506 + ], + [ + "1313", + -15.23276710510254 + ], + [ + "stephen", + -15.232958793640137 + ], + [ + "▁Tibb", + -15.23301124572754 + ], + [ + "rault", + -15.233025550842283 + ], + [ + "▁7.1.1", + -15.233062744140623 + ], + [ + "▁Cornet", + -15.233134269714355 + ], + [ + "ита", + -15.233148574829102 + ], + [ + "Batching", + -15.233165740966797 + ], + [ + "iyeh", + -15.233210563659668 + ], + [ + "▁plod", + -15.233375549316406 + ], + [ + "personnel", + -15.233399391174316 + ], + [ + "Sylva", + -15.233457565307615 + ], + [ + "8:09", + -15.233719825744627 + ], + [ + "AILED", + -15.233734130859377 + ], + [ + "raag", + -15.233805656433104 + ], + [ + "▁Wyld", + -15.233840942382812 + ], + [ + "disappear", + -15.233953475952148 + ], + [ + "7-73", + -15.234211921691896 + ], + [ + "AFFORD", + -15.234393119812012 + ], + [ + "4:02", + -15.23446559906006 + ], + [ + "▁effet", + -15.234532356262209 + ], + [ + "▁Antifa", + -15.234561920166016 + ], + [ + "▁PRIV", + -15.234594345092772 + ], + [ + ",000,000,000", + -15.234692573547363 + ], + [ + "ONLINE", + -15.234955787658691 + ], + [ + "▁spectro", + -15.235224723815918 + ], + [ + "▁Kassi", + -15.235278129577637 + ], + [ + "Midwest", + -15.235319137573242 + ], + [ + "4:04", + -15.23547649383545 + ], + [ + "▁Yemi", + -15.235610008239746 + ], + [ + "oplasm", + -15.236105918884276 + ], + [ + "ARROW", + -15.236285209655762 + ], + [ + "▁anoint", + -15.236308097839355 + ], + [ + "▁07:4", + -15.236309051513672 + ], + [ + "Fame", + -15.23645305633545 + ], + [ + "9780", + -15.23648166656494 + ], + [ + "▁hypothesize", + -15.236495018005373 + ], + [ + "▁truncate", + -15.236495018005373 + ], + [ + "▁obligate", + -15.236496925354004 + ], + [ + "▁enthuse", + -15.23649787902832 + ], + [ + "Regina", + -15.236559867858888 + ], + [ + "▁enslave", + -15.236562728881836 + ], + [ + "3-38", + -15.236592292785645 + ], + [ + "▁Equestria", + -15.236841201782228 + ], + [ + "SIMON", + -15.237058639526367 + ], + [ + "▁Savanna", + -15.237069129943848 + ], + [ + "▁$2,2", + -15.237090110778809 + ], + [ + "▁888-492-0", + -15.23709487915039 + ], + [ + "Contingency", + -15.237095832824709 + ], + [ + "LOPRAM", + -15.237096786499023 + ], + [ + "Licorice", + -15.237096786499023 + ], + [ + "Pietermaritzburg", + -15.237096786499023 + ], + [ + "rouzabad", + -15.237096786499023 + ], + [ + "swarovski", + -15.237096786499023 + ], + [ + "▁Aquifer", + -15.237096786499023 + ], + [ + "▁Arithmetic", + -15.237096786499023 + ], + [ + "▁Bermondsey", + -15.237096786499023 + ], + [ + "▁Cyclades", + -15.237096786499023 + ], + [ + "▁Emmerson", + -15.237096786499023 + ], + [ + "▁GALAXY", + -15.237096786499023 + ], + [ + "▁Geschichte", + -15.237096786499023 + ], + [ + "▁Gynecologist", + -15.237096786499023 + ], + [ + "▁Ishikawa", + -15.237096786499023 + ], + [ + "▁Kedarnath", + -15.237096786499023 + ], + [ + "▁Kestrel", + -15.237096786499023 + ], + [ + "▁Luftwaffe", + -15.237096786499023 + ], + [ + "▁Menendez", + -15.237096786499023 + ], + [ + "▁Montmartre", + -15.237096786499023 + ], + [ + "▁NECESSARY", + -15.237096786499023 + ], + [ + "▁Parganas", + -15.237096786499023 + ], + [ + "▁Quotient", + -15.237096786499023 + ], + [ + "▁Sikorsky", + -15.237096786499023 + ], + [ + "▁Sotomayor", + -15.237096786499023 + ], + [ + "▁Tavistock", + -15.237096786499023 + ], + [ + "▁Tempranillo", + -15.237096786499023 + ], + [ + "▁Tsipras", + -15.237096786499023 + ], + [ + "▁Winifred", + -15.237096786499023 + ], + [ + "▁Wohnzimmer", + -15.237096786499023 + ], + [ + "▁bewilderment", + -15.237096786499023 + ], + [ + "▁caliphate", + -15.237096786499023 + ], + [ + "▁congruence", + -15.237096786499023 + ], + [ + "▁disheveled", + -15.237096786499023 + ], + [ + "▁disorienting", + -15.237096786499023 + ], + [ + "▁disponible", + -15.237096786499023 + ], + [ + "▁effigy", + -15.237096786499023 + ], + [ + "▁expropriation", + -15.237096786499023 + ], + [ + "▁heartthrob", + -15.237096786499023 + ], + [ + "▁inexorably", + -15.237096786499023 + ], + [ + "▁nutraceutical", + -15.237096786499023 + ], + [ + "▁paralyzing", + -15.237096786499023 + ], + [ + "▁potpourri", + -15.237096786499023 + ], + [ + "▁presbyopia", + -15.237096786499023 + ], + [ + "▁reticent", + -15.237096786499023 + ], + [ + "▁stalactites", + -15.237096786499023 + ], + [ + "▁strenght", + -15.237096786499023 + ], + [ + "▁subluxation", + -15.237096786499023 + ], + [ + "▁tzatziki", + -15.237096786499023 + ], + [ + "▁unshakable", + -15.237096786499023 + ], + [ + "▁unworkable", + -15.237096786499023 + ], + [ + "▁vociferous", + -15.237096786499023 + ], + [ + "Demonstrating", + -15.23709774017334 + ], + [ + "▁Bechtel", + -15.23709774017334 + ], + [ + "▁Katniss", + -15.23709774017334 + ], + [ + "▁Kaushal", + -15.23709774017334 + ], + [ + "▁Singularity", + -15.23709774017334 + ], + [ + "▁alyssum", + -15.23709774017334 + ], + [ + "▁minnesota", + -15.23709774017334 + ], + [ + "▁monopolize", + -15.23709774017334 + ], + [ + "▁nexium", + -15.23709774017334 + ], + [ + "KANSAS", + -15.237098693847656 + ], + [ + "▁Oviedo", + -15.237098693847656 + ], + [ + "▁asphyxia", + -15.237098693847656 + ], + [ + "▁calcification", + -15.237098693847656 + ], + [ + "Bouncing", + -15.237099647521973 + ], + [ + "▁Cannery", + -15.237099647521973 + ], + [ + "▁Griswold", + -15.237100601196287 + ], + [ + "▁Gryphon", + -15.237100601196287 + ], + [ + "▁McAvoy", + -15.237100601196287 + ], + [ + "▁endovascular", + -15.237100601196287 + ], + [ + "▁AGENTS", + -15.237101554870604 + ], + [ + "▁liminal", + -15.237101554870604 + ], + [ + "▁mandible", + -15.237101554870604 + ], + [ + "▁Broderick", + -15.237102508544922 + ], + [ + "▁FABRIC", + -15.237102508544922 + ], + [ + "▁Myriad", + -15.237102508544922 + ], + [ + "▁10:20", + -15.237103462219238 + ], + [ + "▁Javelin", + -15.237103462219238 + ], + [ + "▁Katharina", + -15.237104415893556 + ], + [ + "▁commissar", + -15.237105369567873 + ], + [ + "Fascist", + -15.237106323242188 + ], + [ + "nywayanyday", + -15.237106323242188 + ], + [ + "▁inasmuch", + -15.237106323242188 + ], + [ + "▁agitating", + -15.23710823059082 + ], + [ + "▁Shutterfly", + -15.237109184265137 + ], + [ + "▁norovirus", + -15.237110137939451 + ], + [ + "▁Akufo", + -15.23711109161377 + ], + [ + "▁bioethics", + -15.23711109161377 + ], + [ + "▁Cartesian", + -15.237112045288086 + ], + [ + "▁Abacus", + -15.237112998962402 + ], + [ + "▁Clydesdale", + -15.237112998962402 + ], + [ + "▁prodded", + -15.237112998962402 + ], + [ + "▁Powys", + -15.23711395263672 + ], + [ + "▁fraiche", + -15.23711395263672 + ], + [ + "▁Terrarium", + -15.237114906311035 + ], + [ + "▁Pharmacies", + -15.237115859985352 + ], + [ + "▁Reinhart", + -15.237115859985352 + ], + [ + "puerto", + -15.2371187210083 + ], + [ + "▁Threatened", + -15.2371187210083 + ], + [ + "▁imbedded", + -15.2371187210083 + ], + [ + "▁Concussion", + -15.237119674682615 + ], + [ + "▁ConvertKit", + -15.23712158203125 + ], + [ + "▁helipad", + -15.23712158203125 + ], + [ + "▁Ivanhoe", + -15.237122535705566 + ], + [ + "▁turboprop", + -15.2371244430542 + ], + [ + "▁Brescia", + -15.237125396728516 + ], + [ + "▁congresswoman", + -15.237125396728516 + ], + [ + "▁collocation", + -15.237126350402832 + ], + [ + "▁Birgit", + -15.237128257751465 + ], + [ + "▁Maggiore", + -15.23712921142578 + ], + [ + "▁Keogh", + -15.237133979797363 + ], + [ + "▁Temperance", + -15.237133979797363 + ], + [ + "▁unbounded", + -15.237133979797363 + ], + [ + "▁Hitchens", + -15.23713493347168 + ], + [ + "▁Vanquish", + -15.237135887145996 + ], + [ + "▁Improvisation", + -15.237140655517578 + ], + [ + "▁Biophysics", + -15.237142562866213 + ], + [ + "▁aviary", + -15.237142562866213 + ], + [ + "Dependence", + -15.237152099609377 + ], + [ + "▁Assamese", + -15.237152099609377 + ], + [ + "▁Terminus", + -15.237152099609377 + ], + [ + "▁COBOL", + -15.237154006958008 + ], + [ + "▁Potosi", + -15.237159729003906 + ], + [ + "▁Shukla", + -15.237159729003906 + ], + [ + "▁trotted", + -15.23716163635254 + ], + [ + "▁Adorama", + -15.237164497375488 + ], + [ + "▁Ripken", + -15.237165451049805 + ], + [ + "OTTAWA", + -15.23716640472412 + ], + [ + "▁Alberni", + -15.237170219421388 + ], + [ + "▁Tupac", + -15.237174034118652 + ], + [ + "▁Espanol", + -15.237178802490234 + ], + [ + "MECHANI", + -15.237188339233398 + ], + [ + "▁AliExpress", + -15.23719596862793 + ], + [ + "▁Vegeta", + -15.237196922302246 + ], + [ + "escens", + -15.237199783325195 + ], + [ + "▁Univision", + -15.237199783325195 + ], + [ + "▁Vecchio", + -15.237202644348145 + ], + [ + "▁Elinor", + -15.23720359802246 + ], + [ + "▁Impressionism", + -15.237208366394045 + ], + [ + "▁Bradstreet", + -15.237215042114258 + ], + [ + "▁breastmilk", + -15.23721694946289 + ], + [ + "▁Rudyard", + -15.237218856811523 + ], + [ + "▁ROBOT", + -15.2372407913208 + ], + [ + "▁birdwatching", + -15.237244606018066 + ], + [ + "▁Sinaloa", + -15.237248420715332 + ], + [ + "▁SLU", + -15.23725128173828 + ], + [ + "▁Rockhampton", + -15.237264633178713 + ], + [ + "▁Wasabi", + -15.23726749420166 + ], + [ + "▁photoreceptor", + -15.237277030944824 + ], + [ + "▁chequered", + -15.237278938293455 + ], + [ + "▁AnyFormat", + -15.23728084564209 + ], + [ + "▁Vardy", + -15.23728084564209 + ], + [ + "▁sandcastle", + -15.237281799316406 + ], + [ + "▁Vistaprint", + -15.237296104431152 + ], + [ + "KUALA", + -15.237303733825684 + ], + [ + "▁Dapper", + -15.237303733825684 + ], + [ + "▁Chairwoman", + -15.237316131591797 + ], + [ + "▁£15,000", + -15.237318992614746 + ], + [ + "▁limerick", + -15.237324714660645 + ], + [ + "▁Drowning", + -15.23733139038086 + ], + [ + "▁autocad", + -15.23733139038086 + ], + [ + "▁Rimini", + -15.237333297729492 + ], + [ + "▁GOES", + -15.237337112426758 + ], + [ + "▁BATTLE", + -15.23733901977539 + ], + [ + "LEIGH", + -15.23734188079834 + ], + [ + "▁gerrymandering", + -15.237369537353516 + ], + [ + "▁Paradiso", + -15.237378120422363 + ], + [ + "▁wierd", + -15.237388610839844 + ], + [ + "▁landmines", + -15.23738956451416 + ], + [ + "▁unheated", + -15.237390518188477 + ], + [ + "▁voile", + -15.237401008605955 + ], + [ + "▁Coogan", + -15.237415313720703 + ], + [ + "▁Randomized", + -15.237424850463867 + ], + [ + "▁Mangrove", + -15.237442016601562 + ], + [ + "▁unsaid", + -15.23744297027588 + ], + [ + "▁Ornamental", + -15.237465858459473 + ], + [ + "▁cocked", + -15.237469673156738 + ], + [ + "▁Chambersburg", + -15.237470626831056 + ], + [ + "▁passionfruit", + -15.237473487854004 + ], + [ + "▁reactivation", + -15.237506866455078 + ], + [ + "▁enforceability", + -15.237526893615724 + ], + [ + "▁polymerization", + -15.237542152404783 + ], + [ + "▁Himansh", + -15.237547874450684 + ], + [ + "teborg", + -15.237558364868164 + ], + [ + "▁Backstreet", + -15.23755931854248 + ], + [ + "▁shiplap", + -15.237567901611328 + ], + [ + "▁Smokehouse", + -15.237632751464844 + ], + [ + "▁IDBI", + -15.237648963928224 + ], + [ + "▁Foresight", + -15.237671852111816 + ], + [ + "▁SDHC", + -15.237706184387209 + ], + [ + "▁Wheatland", + -15.237727165222168 + ], + [ + "▁deferment", + -15.237738609313965 + ], + [ + "▁Elissa", + -15.237763404846191 + ], + [ + "▁croquette", + -15.237805366516112 + ], + [ + "▁quartzite", + -15.237822532653809 + ], + [ + "▁FEET", + -15.2378568649292 + ], + [ + "Uncover", + -15.23786735534668 + ], + [ + "▁OSCAR", + -15.237907409667969 + ], + [ + "▁Pangea", + -15.237922668457031 + ], + [ + "▁Jetstar", + -15.23792552947998 + ], + [ + "▁Doubtful", + -15.237984657287598 + ], + [ + "Fragrant", + -15.238007545471191 + ], + [ + "▁terrorized", + -15.238507270812988 + ], + [ + "Studied", + -15.238564491271973 + ], + [ + "▁talaga", + -15.238580703735352 + ], + [ + "▁Fractional", + -15.238622665405272 + ], + [ + "1.55", + -15.23867130279541 + ], + [ + "▁rowboat", + -15.23873805999756 + ], + [ + "ouquet", + -15.238821029663086 + ], + [ + "Johan", + -15.238821983337402 + ], + [ + "▁enticement", + -15.238954544067385 + ], + [ + "▁20:4", + -15.2389554977417 + ], + [ + "lign", + -15.239042282104492 + ], + [ + "▁Boykin", + -15.239084243774414 + ], + [ + "▁Mudder", + -15.2391939163208 + ], + [ + "initialize", + -15.239397048950195 + ], + [ + "4:42", + -15.23946475982666 + ], + [ + "missouri", + -15.2395658493042 + ], + [ + "UMN", + -15.239606857299805 + ], + [ + "▁$1,800", + -15.239701271057127 + ], + [ + "▁parishioner", + -15.239741325378418 + ], + [ + "▁cyberattack", + -15.239798545837402 + ], + [ + "became", + -15.23984432220459 + ], + [ + "▁Glau", + -15.239890098571776 + ], + [ + "▁environ", + -15.240079879760742 + ], + [ + "▁Capella", + -15.240219116210938 + ], + [ + "1.49", + -15.240350723266602 + ], + [ + "▁Radish", + -15.240442276000977 + ], + [ + "▁Kanji", + -15.240586280822754 + ], + [ + "1741", + -15.240639686584473 + ], + [ + "Roar", + -15.240734100341797 + ], + [ + "▁Cobble", + -15.240762710571287 + ], + [ + "▁goatee", + -15.24094581604004 + ], + [ + "NNIE", + -15.241023063659668 + ], + [ + "▁Cowles", + -15.241153717041016 + ], + [ + "Manifest", + -15.241229057312012 + ], + [ + "knuckle", + -15.24124813079834 + ], + [ + "8:27", + -15.241254806518556 + ], + [ + "Bumble", + -15.241266250610352 + ], + [ + "Hunger", + -15.241266250610352 + ], + [ + "bazaar", + -15.241271018981934 + ], + [ + "EBITDA", + -15.241273880004885 + ], + [ + "vaccination", + -15.241281509399414 + ], + [ + "Zenith", + -15.241287231445312 + ], + [ + "exploration", + -15.241292953491213 + ], + [ + "Educators", + -15.241293907165527 + ], + [ + "Circular", + -15.241294860839844 + ], + [ + "Regulatory", + -15.241294860839844 + ], + [ + "CLEAR", + -15.241297721862791 + ], + [ + "Plymouth", + -15.24129867553711 + ], + [ + "Ninja", + -15.241300582885742 + ], + [ + "irrigation", + -15.241303443908691 + ], + [ + "veteran", + -15.241310119628906 + ], + [ + "thirty", + -15.24131202697754 + ], + [ + "Kayak", + -15.241321563720703 + ], + [ + "convex", + -15.241326332092283 + ], + [ + "Dietary", + -15.241336822509766 + ], + [ + "assistance", + -15.241357803344728 + ], + [ + "▁9689", + -15.2413969039917 + ], + [ + "CAGR", + -15.24140167236328 + ], + [ + "Barack", + -15.241403579711914 + ], + [ + "▁MOMENT", + -15.241408348083496 + ], + [ + "Edison", + -15.241410255432127 + ], + [ + "addiction", + -15.241410255432127 + ], + [ + "7,800", + -15.24142837524414 + ], + [ + "transplant", + -15.24145221710205 + ], + [ + "Sponsorship", + -15.241459846496582 + ], + [ + "automatically", + -15.24147605895996 + ], + [ + "Jessie", + -15.241485595703123 + ], + [ + "▁03:1", + -15.24148654937744 + ], + [ + "radiation", + -15.241501808166504 + ], + [ + "Cupid", + -15.241518020629885 + ], + [ + "afka", + -15.241663932800291 + ], + [ + "Teddy", + -15.241698265075684 + ], + [ + "Moody", + -15.24170970916748 + ], + [ + "WASH", + -15.241717338562012 + ], + [ + "particular", + -15.241729736328123 + ], + [ + "▁CIMB", + -15.241816520690918 + ], + [ + "thymi", + -15.241823196411133 + ], + [ + "growers", + -15.241854667663574 + ], + [ + "rattle", + -15.241864204406738 + ], + [ + "Harness", + -15.241978645324709 + ], + [ + "ninja", + -15.241999626159668 + ], + [ + "olsky", + -15.242076873779297 + ], + [ + "nsulating", + -15.242194175720217 + ], + [ + "▁salivar", + -15.242355346679688 + ], + [ + "ggler", + -15.242403030395508 + ], + [ + "Owl", + -15.242445945739746 + ], + [ + "Gigi", + -15.242499351501465 + ], + [ + "▁Ophi", + -15.242523193359377 + ], + [ + "colli", + -15.24258041381836 + ], + [ + "TRUST", + -15.242600440979004 + ], + [ + "▁Hernia", + -15.242695808410645 + ], + [ + "15-3", + -15.242724418640137 + ], + [ + "▁$1,400", + -15.24275016784668 + ], + [ + "MWh", + -15.242926597595217 + ], + [ + "Alder", + -15.24298858642578 + ], + [ + "▁Khul", + -15.243043899536133 + ], + [ + "ghoul", + -15.24314022064209 + ], + [ + "Democrat", + -15.243528366088867 + ], + [ + "▁Navara", + -15.243973731994627 + ], + [ + "▁SCALE", + -15.24409008026123 + ], + [ + "1481", + -15.244165420532228 + ], + [ + "7–2", + -15.244216918945312 + ], + [ + "fric", + -15.24432373046875 + ], + [ + "▁Pinar", + -15.244414329528809 + ], + [ + "▁Medalist", + -15.244508743286133 + ], + [ + "▁Serafin", + -15.24466323852539 + ], + [ + "kirche", + -15.24481201171875 + ], + [ + "utsu", + -15.244871139526367 + ], + [ + "▁$7.9", + -15.245001792907717 + ], + [ + "5,900", + -15.245060920715332 + ], + [ + "▁Gaol", + -15.24537467956543 + ], + [ + "luorocarbon", + -15.245668411254885 + ], + [ + "cript", + -15.245685577392578 + ], + [ + "▁theorize", + -15.246097564697266 + ], + [ + "▁HCF", + -15.246160507202148 + ], + [ + "▁Kamil", + -15.24628734588623 + ], + [ + "▁hydrologic", + -15.24630355834961 + ], + [ + "▁Tarik", + -15.24655818939209 + ], + [ + "Shiva", + -15.246630668640137 + ], + [ + "==>", + -15.246695518493652 + ], + [ + "▁CONDO", + -15.246870040893556 + ], + [ + "▁glac", + -15.24704360961914 + ], + [ + "▁Symmetri", + -15.24705410003662 + ], + [ + "NPP", + -15.247249603271484 + ], + [ + "???????????????", + -15.247618675231934 + ], + [ + "OLIC", + -15.247681617736816 + ], + [ + "ripped", + -15.248279571533203 + ], + [ + "OZEN", + -15.24857234954834 + ], + [ + "▁Silesia", + -15.248666763305664 + ], + [ + "▁Feud", + -15.248926162719728 + ], + [ + "Meara", + -15.24942684173584 + ], + [ + "▁Terrasse", + -15.24946117401123 + ], + [ + "denture", + -15.249567985534668 + ], + [ + "Encode", + -15.249608039855955 + ], + [ + "FLT", + -15.24963092803955 + ], + [ + "6:35", + -15.249679565429688 + ], + [ + "▁Mehl", + -15.250516891479492 + ], + [ + "▁warrantee", + -15.250657081604004 + ], + [ + "referral", + -15.250692367553713 + ], + [ + "CONCLUSION", + -15.250901222229004 + ], + [ + "eductive", + -15.25128936767578 + ], + [ + "▁biochemist", + -15.251350402832031 + ], + [ + "▁FLORIDA", + -15.251362800598145 + ], + [ + "myelitis", + -15.251367568969728 + ], + [ + "adelphia", + -15.251368522644045 + ], + [ + "▁ACADEMY", + -15.251381874084473 + ], + [ + "▁Aguirre", + -15.251381874084473 + ], + [ + "▁Berlioz", + -15.251381874084473 + ], + [ + "▁Canggu", + -15.251381874084473 + ], + [ + "▁Narragansett", + -15.251381874084473 + ], + [ + "▁Podiatrist", + -15.251381874084473 + ], + [ + "▁Srivastava", + -15.251381874084473 + ], + [ + "▁earlobe", + -15.251381874084473 + ], + [ + "▁metronidazole", + -15.251381874084473 + ], + [ + "▁nightgown", + -15.251381874084473 + ], + [ + "▁tumult", + -15.251381874084473 + ], + [ + "Cisneros", + -15.251382827758787 + ], + [ + "Congenital", + -15.251382827758787 + ], + [ + "UNFCCC", + -15.251382827758787 + ], + [ + "propionate", + -15.251382827758787 + ], + [ + "▁ADVENTURE", + -15.251382827758787 + ], + [ + "▁ASSEMBLY", + -15.251382827758787 + ], + [ + "▁Bacchus", + -15.251382827758787 + ], + [ + "▁Bonifacio", + -15.251382827758787 + ], + [ + "▁Carburetor", + -15.251382827758787 + ], + [ + "▁Cormorant", + -15.251382827758787 + ], + [ + "▁DeLorean", + -15.251382827758787 + ], + [ + "▁Deodorant", + -15.251382827758787 + ], + [ + "▁Eglinton", + -15.251382827758787 + ], + [ + "▁Fulcrum", + -15.251382827758787 + ], + [ + "▁Grommet", + -15.251382827758787 + ], + [ + "▁Haliburton", + -15.251382827758787 + ], + [ + "▁LaRue", + -15.251382827758787 + ], + [ + "▁McEwan", + -15.251382827758787 + ], + [ + "▁Orpheus", + -15.251382827758787 + ], + [ + "▁PRODUCE", + -15.251382827758787 + ], + [ + "▁Pendidikan", + -15.251382827758787 + ], + [ + "▁Sephardic", + -15.251382827758787 + ], + [ + "▁Stoughton", + -15.251382827758787 + ], + [ + "▁Tiramisu", + -15.251382827758787 + ], + [ + "▁Vinaigrette", + -15.251382827758787 + ], + [ + "▁anodised", + -15.251382827758787 + ], + [ + "▁beryllium", + -15.251382827758787 + ], + [ + "▁cabernet", + -15.251382827758787 + ], + [ + "▁calligrapher", + -15.251382827758787 + ], + [ + "▁charlatan", + -15.251382827758787 + ], + [ + "▁delineating", + -15.251382827758787 + ], + [ + "▁disincentive", + -15.251382827758787 + ], + [ + "▁electroplating", + -15.251382827758787 + ], + [ + "▁fudgy", + -15.251382827758787 + ], + [ + "▁hodgepodge", + -15.251382827758787 + ], + [ + "▁ideologue", + -15.251382827758787 + ], + [ + "▁marsupial", + -15.251382827758787 + ], + [ + "▁nutritive", + -15.251382827758787 + ], + [ + "▁squeamish", + -15.251382827758787 + ], + [ + "▁unplayable", + -15.251382827758787 + ], + [ + "▁unsatisfying", + -15.251382827758787 + ], + [ + "▁ADVISOR", + -15.251383781433104 + ], + [ + "▁Dystrophy", + -15.251383781433104 + ], + [ + "▁Sevenoaks", + -15.251383781433104 + ], + [ + "▁assiduous", + -15.251383781433104 + ], + [ + "▁bursitis", + -15.251383781433104 + ], + [ + "▁pictogram", + -15.251383781433104 + ], + [ + "Oxidation", + -15.251384735107422 + ], + [ + "▁Hasselblad", + -15.251384735107422 + ], + [ + "▁manatees", + -15.251384735107422 + ], + [ + "▁pfizer", + -15.251384735107422 + ], + [ + "▁POPULAR", + -15.251385688781738 + ], + [ + "▁Wanderlust", + -15.251386642456056 + ], + [ + "▁gearshifts", + -15.251386642456056 + ], + [ + "▁Oncologist", + -15.251387596130373 + ], + [ + "▁protozoa", + -15.251387596130373 + ], + [ + "▁yasmin", + -15.251388549804688 + ], + [ + "▁lancaster", + -15.251389503479004 + ], + [ + "▁ZigBee", + -15.25139045715332 + ], + [ + "▁Emporio", + -15.251391410827637 + ], + [ + "▁mariachi", + -15.251391410827637 + ], + [ + "▁GVWR", + -15.251392364501951 + ], + [ + "▁Beauchamp", + -15.25139331817627 + ], + [ + "▁Ashburton", + -15.251394271850586 + ], + [ + "▁Poznan", + -15.251395225524902 + ], + [ + "▁Telephony", + -15.25139617919922 + ], + [ + "▁perturbed", + -15.25139617919922 + ], + [ + "▁McKesson", + -15.251398086547852 + ], + [ + "▁Parekh", + -15.251398086547852 + ], + [ + "▁Sedalia", + -15.251399040222168 + ], + [ + "▁achilles", + -15.251399040222168 + ], + [ + "▁matriculation", + -15.251399040222168 + ], + [ + "▁Concealer", + -15.251401901245115 + ], + [ + "▁neurobiology", + -15.251401901245115 + ], + [ + "OTCQB", + -15.251402854919434 + ], + [ + "▁Clarissa", + -15.25140380859375 + ], + [ + "▁Instacart", + -15.25140380859375 + ], + [ + "inducible", + -15.251404762268066 + ], + [ + "▁neurontin", + -15.251404762268066 + ], + [ + "▁Curzon", + -15.251405715942385 + ], + [ + "▁diffrent", + -15.251408576965332 + ], + [ + "▁Sackett", + -15.25141143798828 + ], + [ + "▁nuevo", + -15.25141143798828 + ], + [ + "▁Kakadu", + -15.251412391662598 + ], + [ + "▁Mcdonald", + -15.251415252685549 + ], + [ + "▁comparability", + -15.251415252685549 + ], + [ + "▁Quench", + -15.251416206359863 + ], + [ + "▁Adagio", + -15.25141716003418 + ], + [ + "▁Cryptography", + -15.25141716003418 + ], + [ + "▁Cypher", + -15.25141716003418 + ], + [ + "▁Langtang", + -15.251426696777344 + ], + [ + "▁pluralistic", + -15.25142765045166 + ], + [ + "▁Fergie", + -15.25143051147461 + ], + [ + "▁Telkom", + -15.25143051147461 + ], + [ + "▁Kampong", + -15.25143337249756 + ], + [ + "▁Microfinance", + -15.251434326171877 + ], + [ + "▁aftershocks", + -15.251437187194824 + ], + [ + "▁transcoding", + -15.251449584960938 + ], + [ + "▁Flyknit", + -15.25145149230957 + ], + [ + "▁clotted", + -15.25145149230957 + ], + [ + "▁mistook", + -15.25145435333252 + ], + [ + "▁Gerais", + -15.251455307006836 + ], + [ + "Pneumonia", + -15.25146484375 + ], + [ + "▁buena", + -15.251465797424316 + ], + [ + "Frustration", + -15.251474380493164 + ], + [ + "▁Midwife", + -15.251482009887695 + ], + [ + "▁Wessex", + -15.251484870910645 + ], + [ + "▁munchies", + -15.251484870910645 + ], + [ + "▁Victorious", + -15.251490592956545 + ], + [ + "▁habeas", + -15.251494407653809 + ], + [ + "▁Redfern", + -15.25149631500244 + ], + [ + "▁Lonesome", + -15.251501083374023 + ], + [ + "▁Sensible", + -15.251505851745604 + ], + [ + "▁subspecialty", + -15.251507759094238 + ], + [ + "▁Sandblast", + -15.251509666442873 + ], + [ + "▁Morrell", + -15.251517295837402 + ], + [ + "▁herdsmen", + -15.251522064208984 + ], + [ + "▁Presque", + -15.2515230178833 + ], + [ + "▁checkerboard", + -15.2515230178833 + ], + [ + "▁Shamanic", + -15.2515287399292 + ], + [ + "▁starburst", + -15.251529693603516 + ], + [ + "▁Coulson", + -15.251535415649414 + ], + [ + "▁unashamedly", + -15.251535415649414 + ], + [ + "▁Keenum", + -15.251543045043944 + ], + [ + "▁memorise", + -15.251553535461426 + ], + [ + "▁Thorium", + -15.251559257507324 + ], + [ + "▁slugger", + -15.251568794250488 + ], + [ + "▁Nicobar", + -15.251572608947754 + ], + [ + "▁Seacoast", + -15.25157356262207 + ], + [ + "▁Replies", + -15.251575469970703 + ], + [ + "▁subatomic", + -15.251579284667969 + ], + [ + "▁Harrah", + -15.251593589782717 + ], + [ + "▁Narita", + -15.251605033874512 + ], + [ + "▁Profiling", + -15.251612663269045 + ], + [ + "▁muito", + -15.251614570617676 + ], + [ + "▁Hilux", + -15.251615524291992 + ], + [ + "▁Levinson", + -15.25161838531494 + ], + [ + "▁croak", + -15.25164794921875 + ], + [ + "▁tailwind", + -15.251653671264648 + ], + [ + "PATIENT", + -15.251666069030762 + ], + [ + "0:35", + -15.251669883728027 + ], + [ + "▁Marsala", + -15.251680374145508 + ], + [ + "▁Wholesome", + -15.251684188842772 + ], + [ + "▁dissapointed", + -15.251689910888672 + ], + [ + "▁Minutemen", + -15.251704216003418 + ], + [ + "▁bedridden", + -15.251714706420898 + ], + [ + "▁Haslam", + -15.25172996520996 + ], + [ + "▁Wittenberg", + -15.25174045562744 + ], + [ + "▁Koala", + -15.251749038696287 + ], + [ + "▁Yoshida", + -15.251755714416504 + ], + [ + "▁abscesses", + -15.251757621765137 + ], + [ + "Rutger", + -15.25176239013672 + ], + [ + "Migrating", + -15.251771926879885 + ], + [ + "arrival", + -15.251789093017578 + ], + [ + "▁vanquished", + -15.251805305480955 + ], + [ + "▁Flemington", + -15.251811981201172 + ], + [ + "▁Outbreak", + -15.251829147338867 + ], + [ + "▁Renata", + -15.2518310546875 + ], + [ + "▁HKEY", + -15.251834869384766 + ], + [ + "Refrain", + -15.25184154510498 + ], + [ + "▁subcategory", + -15.251842498779297 + ], + [ + "▁Tilley", + -15.251863479614258 + ], + [ + "▁Coastguard", + -15.25190544128418 + ], + [ + "▁binging", + -15.251910209655762 + ], + [ + "▁Airbrush", + -15.251922607421877 + ], + [ + "ANCY", + -15.251928329467772 + ], + [ + "chapin", + -15.251937866210938 + ], + [ + "▁TODO", + -15.251937866210938 + ], + [ + "▁ipv", + -15.251943588256836 + ], + [ + "asinghe", + -15.251958847045898 + ], + [ + "▁Cocaine", + -15.25196361541748 + ], + [ + "▁Lanham", + -15.251968383789062 + ], + [ + "▁Sketchbook", + -15.251972198486328 + ], + [ + "▁Cantrell", + -15.251977920532228 + ], + [ + "▁Underwriting", + -15.251986503601074 + ], + [ + "▁Frankston", + -15.252004623413086 + ], + [ + "▁Selina", + -15.252007484436035 + ], + [ + "Acqui", + -15.252036094665527 + ], + [ + "▁SPDR", + -15.25204086303711 + ], + [ + "▁Hafiz", + -15.252068519592283 + ], + [ + "3/8′′", + -15.252102851867676 + ], + [ + "▁Odeon", + -15.252105712890623 + ], + [ + "3–2", + -15.252120971679688 + ], + [ + "▁oleh", + -15.252123832702637 + ], + [ + "▁FiOS", + -15.252130508422852 + ], + [ + "▁Geert", + -15.252385139465332 + ], + [ + "▁Songbird", + -15.252406120300291 + ], + [ + "▁Gorton", + -15.252431869506836 + ], + [ + "▁Surry", + -15.25243854522705 + ], + [ + "holistic", + -15.252461433410645 + ], + [ + "▁Maidan", + -15.252482414245604 + ], + [ + "▁sonically", + -15.252485275268556 + ], + [ + "▁MIRA", + -15.25254249572754 + ], + [ + "▁Dimitrov", + -15.252593040466309 + ], + [ + "▁glamourous", + -15.252692222595217 + ], + [ + "▁favela", + -15.252850532531738 + ], + [ + "4:11", + -15.25303554534912 + ], + [ + "▁EXPERT", + -15.253050804138184 + ], + [ + "▁receivership", + -15.253090858459473 + ], + [ + "1-28", + -15.253241539001465 + ], + [ + "referencing", + -15.253277778625488 + ], + [ + "▁DPW", + -15.25340747833252 + ], + [ + "▁Tirol", + -15.253458976745604 + ], + [ + "Winston", + -15.253483772277832 + ], + [ + "▁$2,0", + -15.25372314453125 + ], + [ + ".02%", + -15.253826141357422 + ], + [ + "▁$5.6", + -15.253830909729004 + ], + [ + "Stroll", + -15.25383186340332 + ], + [ + "▁maniacal", + -15.253849983215332 + ], + [ + "/10/2018", + -15.253894805908203 + ], + [ + "▁Zeeland", + -15.253944396972656 + ], + [ + "▁hallucination", + -15.254222869873049 + ], + [ + "veined", + -15.254379272460938 + ], + [ + "▁beadwork", + -15.25440502166748 + ], + [ + "Undertake", + -15.254464149475098 + ], + [ + "izzard", + -15.254472732543944 + ], + [ + "▁WILLIAMS", + -15.254612922668455 + ], + [ + "IZZ", + -15.254645347595217 + ], + [ + "▁negra", + -15.255085945129396 + ], + [ + "▁Netherland", + -15.255146980285645 + ], + [ + "3:56", + -15.255183219909668 + ], + [ + "4:31", + -15.255243301391602 + ], + [ + "72%", + -15.255250930786133 + ], + [ + "calculate", + -15.255345344543455 + ], + [ + "Wagering", + -15.255390167236328 + ], + [ + "embang", + -15.255412101745604 + ], + [ + "TRONIC", + -15.255430221557615 + ], + [ + "Rosie", + -15.255441665649414 + ], + [ + "wrapper", + -15.255441665649414 + ], + [ + "22-8", + -15.255614280700684 + ], + [ + "▁Madelyn", + -15.255712509155272 + ], + [ + "residency", + -15.255752563476562 + ], + [ + "1896", + -15.255845069885254 + ], + [ + "Cartoon", + -15.255864143371582 + ], + [ + "Spartan", + -15.255881309509276 + ], + [ + "▁Mirra", + -15.25588607788086 + ], + [ + "attractive", + -15.25596046447754 + ], + [ + "Thrift", + -15.255962371826172 + ], + [ + "Easier", + -15.255963325500488 + ], + [ + "Yikes", + -15.255967140197754 + ], + [ + "Perspective", + -15.255970001220703 + ], + [ + "Simplicity", + -15.255974769592283 + ], + [ + "Luxurious", + -15.255975723266602 + ], + [ + "Fleur", + -15.255976676940918 + ], + [ + "Peppermint", + -15.255976676940918 + ], + [ + "Injuries", + -15.255977630615234 + ], + [ + "Baptism", + -15.255980491638184 + ], + [ + "conformity", + -15.255983352661133 + ], + [ + "scooter", + -15.255990028381348 + ], + [ + "Poverty", + -15.255990982055664 + ], + [ + "Vodafone", + -15.255993843078612 + ], + [ + "Harriet", + -15.255996704101562 + ], + [ + "demonstrate", + -15.255998611450195 + ], + [ + "Bermuda", + -15.256001472473145 + ], + [ + "Zimbabwe", + -15.256004333496094 + ], + [ + "strewn", + -15.256004333496094 + ], + [ + "Katrina", + -15.25600814819336 + ], + [ + "Norfolk", + -15.256017684936523 + ], + [ + "Viagra", + -15.256026268005373 + ], + [ + "Artisan", + -15.256028175354004 + ], + [ + "Tribal", + -15.2560453414917 + ], + [ + "Authentication", + -15.25605297088623 + ], + [ + "earlier", + -15.256053924560549 + ], + [ + "Updating", + -15.256075859069824 + ], + [ + "Rugby", + -15.25607681274414 + ], + [ + "Behavioral", + -15.256089210510254 + ], + [ + "Glory", + -15.25609302520752 + ], + [ + "Genetically", + -15.25620174407959 + ], + [ + "WHITE", + -15.256216049194336 + ], + [ + "Underground", + -15.256353378295898 + ], + [ + "▁Corsi", + -15.25639820098877 + ], + [ + "grinder", + -15.256463050842283 + ], + [ + "swirl", + -15.256548881530762 + ], + [ + "▁Techni", + -15.256805419921877 + ], + [ + "9:12", + -15.256816864013672 + ], + [ + "ecosystem", + -15.25687313079834 + ], + [ + "▁segui", + -15.256917953491213 + ], + [ + "EADS", + -15.256919860839844 + ], + [ + "1(2):", + -15.257028579711914 + ], + [ + "Bunny", + -15.25715160369873 + ], + [ + "Postcard", + -15.257286071777344 + ], + [ + "▁CEREC", + -15.257401466369627 + ], + [ + "▁Dimon", + -15.257612228393556 + ], + [ + "packet", + -15.257667541503906 + ], + [ + "bsorbance", + -15.257784843444824 + ], + [ + "addled", + -15.258122444152832 + ], + [ + "OGUE", + -15.258132934570312 + ], + [ + "8:45", + -15.258174896240234 + ], + [ + "▁Hollins", + -15.258237838745115 + ], + [ + "▁judi", + -15.258255004882812 + ], + [ + "Trish", + -15.25829792022705 + ], + [ + "▁lioness", + -15.258441925048828 + ], + [ + "provincial", + -15.258466720581056 + ], + [ + "divide", + -15.258475303649902 + ], + [ + "fluff", + -15.258673667907717 + ], + [ + "▁Clasp", + -15.258821487426758 + ], + [ + "prene", + -15.258925437927246 + ], + [ + "▁Santosh", + -15.259087562561035 + ], + [ + "▁Hiero", + -15.259236335754396 + ], + [ + "▁$250.00", + -15.259303092956545 + ], + [ + "logix", + -15.25936222076416 + ], + [ + "backpack", + -15.259488105773926 + ], + [ + "▁GRIP", + -15.259550094604492 + ], + [ + "Chapelle", + -15.259620666503906 + ], + [ + "▁$5.9", + -15.259957313537598 + ], + [ + "▁Rapha", + -15.26000690460205 + ], + [ + "▁$2.0", + -15.260120391845703 + ], + [ + "assembling", + -15.260139465332031 + ], + [ + "▁Brugge", + -15.260431289672852 + ], + [ + "▁Rosacea", + -15.26056671142578 + ], + [ + "▁EXTRACT", + -15.260573387145996 + ], + [ + "8:05", + -15.260708808898926 + ], + [ + "600,000", + -15.26086139678955 + ], + [ + "flv", + -15.260920524597168 + ], + [ + "2,400", + -15.260945320129396 + ], + [ + "▁REPL", + -15.260966300964355 + ], + [ + "▁Mondeo", + -15.261033058166504 + ], + [ + "▁Salafi", + -15.261263847351074 + ], + [ + "▁Ziel", + -15.261286735534668 + ], + [ + "Resolving", + -15.261348724365234 + ], + [ + "RACK", + -15.26154899597168 + ], + [ + "chna", + -15.26156234741211 + ], + [ + "1.0.1", + -15.261573791503906 + ], + [ + "Lund", + -15.261613845825195 + ], + [ + "GID", + -15.261908531188965 + ], + [ + "▁seperat", + -15.261919975280762 + ], + [ + "Echoing", + -15.262128829956056 + ], + [ + "▁wattle", + -15.262255668640137 + ], + [ + "▁£3,000", + -15.262632369995115 + ], + [ + "1915", + -15.262670516967772 + ], + [ + "lectrophysiology", + -15.262933731079102 + ], + [ + "Resist", + -15.262943267822266 + ], + [ + "oumarin", + -15.263238906860352 + ], + [ + "OLOGI", + -15.263400077819824 + ], + [ + "0,0", + -15.263787269592283 + ], + [ + "antana", + -15.26381778717041 + ], + [ + "▁$7.4", + -15.26388931274414 + ], + [ + "▁silv", + -15.263891220092772 + ], + [ + "coupling", + -15.26410675048828 + ], + [ + "▁$24,000", + -15.264189720153809 + ], + [ + "phoric", + -15.264256477355955 + ], + [ + "▁Shiel", + -15.264483451843262 + ], + [ + "rachel", + -15.26498317718506 + ], + [ + "▁gyrat", + -15.265175819396973 + ], + [ + "▁Qualify", + -15.265373229980469 + ], + [ + "▁Tahu", + -15.265535354614258 + ], + [ + "▁scurry", + -15.265775680541992 + ], + [ + "Depreciation", + -15.265874862670898 + ], + [ + "MINNEAPOLIS", + -15.265874862670898 + ], + [ + "Occupant", + -15.265874862670898 + ], + [ + "Prestigious", + -15.265874862670898 + ], + [ + "Rhiannon", + -15.265874862670898 + ], + [ + "barcelona", + -15.265874862670898 + ], + [ + "convulsive", + -15.265874862670898 + ], + [ + "▁Appetizers", + -15.265874862670898 + ], + [ + "▁Beaujolais", + -15.265874862670898 + ], + [ + "▁Benidorm", + -15.265874862670898 + ], + [ + "▁Chaffee", + -15.265874862670898 + ], + [ + "▁Chakraborty", + -15.265874862670898 + ], + [ + "▁Cimarron", + -15.265874862670898 + ], + [ + "▁Conservatories", + -15.265874862670898 + ], + [ + "▁Cornhusker", + -15.265874862670898 + ], + [ + "▁Dreadnought", + -15.265874862670898 + ], + [ + "▁Fellaini", + -15.265874862670898 + ], + [ + "▁Ferragamo", + -15.265874862670898 + ], + [ + "▁Ghulam", + -15.265874862670898 + ], + [ + "▁Gustafson", + -15.265874862670898 + ], + [ + "▁Ketchikan", + -15.265874862670898 + ], + [ + "▁Metallurgical", + -15.265874862670898 + ], + [ + "▁Mulcahy", + -15.265874862670898 + ], + [ + "▁OBTAINED", + -15.265874862670898 + ], + [ + "▁Persuasion", + -15.265874862670898 + ], + [ + "▁Pilipinas", + -15.265874862670898 + ], + [ + "▁Remedial", + -15.265874862670898 + ], + [ + "▁Rhododendron", + -15.265874862670898 + ], + [ + "▁Tiverton", + -15.265874862670898 + ], + [ + "▁Tremblay", + -15.265874862670898 + ], + [ + "▁Wimberley", + -15.265874862670898 + ], + [ + "▁agronomist", + -15.265874862670898 + ], + [ + "▁alkaloids", + -15.265874862670898 + ], + [ + "▁amblyopia", + -15.265874862670898 + ], + [ + "▁cellulosic", + -15.265874862670898 + ], + [ + "▁diphtheria", + -15.265874862670898 + ], + [ + "▁fraternities", + -15.265874862670898 + ], + [ + "▁infatuated", + -15.265874862670898 + ], + [ + "▁invisibly", + -15.265874862670898 + ], + [ + "▁irrefutable", + -15.265874862670898 + ], + [ + "▁poignancy", + -15.265874862670898 + ], + [ + "▁scrawled", + -15.265874862670898 + ], + [ + "▁ungreased", + -15.265874862670898 + ], + [ + "Controversial", + -15.265875816345217 + ], + [ + "hamilton", + -15.265875816345217 + ], + [ + "▁Disparities", + -15.265875816345217 + ], + [ + "▁Eevee", + -15.265875816345217 + ], + [ + "▁Maktoum", + -15.265875816345217 + ], + [ + "▁Mnuchin", + -15.265875816345217 + ], + [ + "▁Montezuma", + -15.265875816345217 + ], + [ + "▁Pilkington", + -15.265875816345217 + ], + [ + "▁Pratchett", + -15.265875816345217 + ], + [ + "▁boutonniere", + -15.265875816345217 + ], + [ + "▁capsaicin", + -15.265875816345217 + ], + [ + "▁cerebellum", + -15.265875816345217 + ], + [ + "▁depravity", + -15.265875816345217 + ], + [ + "▁oesophagus", + -15.265875816345217 + ], + [ + "▁outgrew", + -15.265875816345217 + ], + [ + "Cloutier", + -15.265876770019531 + ], + [ + "Lakshman", + -15.265876770019531 + ], + [ + "▁Alumnae", + -15.265876770019531 + ], + [ + "▁Isidro", + -15.265877723693848 + ], + [ + "▁waltham", + -15.265877723693848 + ], + [ + "▁Cavanaugh", + -15.265878677368164 + ], + [ + "▁Escalade", + -15.265878677368164 + ], + [ + "▁Hossain", + -15.265878677368164 + ], + [ + "▁Mujeres", + -15.265878677368164 + ], + [ + "▁Revenant", + -15.265878677368164 + ], + [ + "▁electrophoresis", + -15.265878677368164 + ], + [ + "flecked", + -15.26587963104248 + ], + [ + "▁isopropyl", + -15.26587963104248 + ], + [ + "cincinnati", + -15.265880584716797 + ], + [ + "▁Leningrad", + -15.26588249206543 + ], + [ + "▁dispersant", + -15.26588249206543 + ], + [ + "▁lysine", + -15.26588249206543 + ], + [ + "▁CrazyBulk", + -15.265884399414062 + ], + [ + "▁Shipwreck", + -15.265884399414062 + ], + [ + "▁Nakuru", + -15.265886306762695 + ], + [ + "▁MONSTER", + -15.265887260437012 + ], + [ + "▁Hinsdale", + -15.265888214111328 + ], + [ + "▁RZR", + -15.265888214111328 + ], + [ + "▁misbehavior", + -15.265888214111328 + ], + [ + "▁Beehive", + -15.265889167785645 + ], + [ + "▁telehandler", + -15.265889167785645 + ], + [ + "ikulski", + -15.26589012145996 + ], + [ + "▁Cineplex", + -15.26589012145996 + ], + [ + "▁ielts", + -15.26589012145996 + ], + [ + "▁Ajanta", + -15.265891075134276 + ], + [ + "▁EAGLE", + -15.265892028808594 + ], + [ + "▁Shaykh", + -15.265892028808594 + ], + [ + "Discreet", + -15.265893936157228 + ], + [ + "▁sioux", + -15.265894889831545 + ], + [ + "▁Toulon", + -15.265901565551758 + ], + [ + "▁Fluency", + -15.26590347290039 + ], + [ + "▁Rawlins", + -15.26590347290039 + ], + [ + "▁GETTING", + -15.26590633392334 + ], + [ + "▁Leominster", + -15.26590633392334 + ], + [ + "▁molestation", + -15.26590633392334 + ], + [ + "▁interlaced", + -15.265907287597656 + ], + [ + "▁Commended", + -15.265909194946287 + ], + [ + "▁lurid", + -15.265909194946287 + ], + [ + "▁Curvy", + -15.265910148620604 + ], + [ + "▁ceaselessly", + -15.265912055969238 + ], + [ + "▁flitting", + -15.265913009643556 + ], + [ + "▁beleive", + -15.265920639038086 + ], + [ + "▁CareerBuilder", + -15.26592254638672 + ], + [ + "▁Casanova", + -15.26592254638672 + ], + [ + "▁Chesney", + -15.265923500061035 + ], + [ + "▁Iguana", + -15.265926361083984 + ], + [ + "▁Scriptural", + -15.265926361083984 + ], + [ + "▁maceration", + -15.265931129455566 + ], + [ + "▁whiny", + -15.2659330368042 + ], + [ + "▁Thornhill", + -15.265934944152832 + ], + [ + "▁Alternator", + -15.265935897827148 + ], + [ + "▁Laverne", + -15.265936851501465 + ], + [ + "▁croup", + -15.26593780517578 + ], + [ + "▁zonal", + -15.265939712524414 + ], + [ + "▁tithing", + -15.26594066619873 + ], + [ + "▁intraoperative", + -15.26594352722168 + ], + [ + "▁mutiny", + -15.265944480895996 + ], + [ + "Ambitious", + -15.265946388244627 + ], + [ + "▁spurned", + -15.265950202941896 + ], + [ + "▁confectionary", + -15.265951156616213 + ], + [ + "▁McCarty", + -15.265953063964844 + ], + [ + "▁Pradeep", + -15.26595687866211 + ], + [ + "▁gentile", + -15.26595687866211 + ], + [ + "▁Biblioth", + -15.265962600708008 + ], + [ + "▁dammit", + -15.265968322753906 + ], + [ + "▁hogging", + -15.265969276428224 + ], + [ + "▁pickleball", + -15.26597785949707 + ], + [ + "▁Killeen", + -15.26598072052002 + ], + [ + "▁unsaved", + -15.265984535217283 + ], + [ + "▁fragmentary", + -15.265995979309082 + ], + [ + "▁Fahey", + -15.265996932983398 + ], + [ + "▁Calicut", + -15.265999794006348 + ], + [ + "▁artifice", + -15.266002655029297 + ], + [ + "▁Hosni", + -15.266009330749512 + ], + [ + "▁Jawbone", + -15.266009330749512 + ], + [ + "▁forbade", + -15.266020774841309 + ], + [ + "▁Sangeet", + -15.266021728515623 + ], + [ + "▁Rogan", + -15.266023635864258 + ], + [ + "▁Ipsos", + -15.266031265258787 + ], + [ + "▁ActionScript", + -15.266033172607422 + ], + [ + "▁Freudian", + -15.266045570373535 + ], + [ + "▁bourgeoisie", + -15.266054153442385 + ], + [ + "▁Fashionista", + -15.266058921813965 + ], + [ + "▁DeMarcus", + -15.266061782836914 + ], + [ + "▁Castaway", + -15.266072273254396 + ], + [ + "▁multiracial", + -15.26607608795166 + ], + [ + "▁Oakleigh", + -15.266077041625977 + ], + [ + "▁Suncoast", + -15.266077041625977 + ], + [ + "▁NDTV", + -15.266083717346191 + ], + [ + "▁vernal", + -15.266098022460938 + ], + [ + "▁Conwy", + -15.26610279083252 + ], + [ + "▁10-15%", + -15.266108512878418 + ], + [ + "▁immunized", + -15.266111373901367 + ], + [ + "Paradoxically", + -15.266119956970217 + ], + [ + "▁Bushnell", + -15.266125679016112 + ], + [ + "▁Pacino", + -15.266153335571287 + ], + [ + "▁photorealistic", + -15.266157150268556 + ], + [ + "▁Grandchildren", + -15.266169548034668 + ], + [ + "SHORE", + -15.2661714553833 + ], + [ + "▁Moraine", + -15.266186714172363 + ], + [ + "▁Jewry", + -15.266190528869627 + ], + [ + "▁Osgood", + -15.266191482543944 + ], + [ + "▁snakeskin", + -15.266197204589844 + ], + [ + "▁Booneville", + -15.26620101928711 + ], + [ + "▁Chitty", + -15.266202926635742 + ], + [ + "▁coronal", + -15.266228675842283 + ], + [ + "▁Ocado", + -15.266237258911133 + ], + [ + "▁Priestley", + -15.26625919342041 + ], + [ + "▁Doubletree", + -15.266260147094728 + ], + [ + "▁Dilip", + -15.26626205444336 + ], + [ + "▁Rumours", + -15.26627254486084 + ], + [ + "▁SunTrust", + -15.266312599182127 + ], + [ + "▁METHOD", + -15.26633071899414 + ], + [ + "▁06:00", + -15.266337394714355 + ], + [ + "▁Etude", + -15.266348838806152 + ], + [ + "▁$7.00", + -15.266363143920898 + ], + [ + "▁Waugh", + -15.266403198242188 + ], + [ + "▁Chopped", + -15.266408920288086 + ], + [ + "Flurry", + -15.266425132751465 + ], + [ + "▁$6.50", + -15.266456604003906 + ], + [ + "▁Copco", + -15.266472816467283 + ], + [ + "▁Cerner", + -15.266473770141602 + ], + [ + "▁Jeffries", + -15.2664794921875 + ], + [ + "▁kerato", + -15.26650619506836 + ], + [ + "▁galette", + -15.266541481018066 + ], + [ + "▁Angling", + -15.266626358032228 + ], + [ + "7:23", + -15.266693115234377 + ], + [ + "▁serger", + -15.266709327697754 + ], + [ + "HAWK", + -15.2667236328125 + ], + [ + "▁Deaconess", + -15.266736030578612 + ], + [ + "▁perfumery", + -15.266773223876951 + ], + [ + "▁tarball", + -15.266791343688965 + ], + [ + "RESOURCE", + -15.266897201538086 + ], + [ + "▁Regret", + -15.266912460327148 + ], + [ + "NRP", + -15.266984939575195 + ], + [ + "▁Treble", + -15.26698875427246 + ], + [ + "Chloro", + -15.267128944396973 + ], + [ + "onkers", + -15.2672119140625 + ], + [ + "▁MILLER", + -15.267349243164062 + ], + [ + "NISA", + -15.267635345458984 + ], + [ + "0,000)", + -15.267682075500488 + ], + [ + "institution", + -15.267735481262209 + ], + [ + "▁peatland", + -15.267767906188965 + ], + [ + "18-34", + -15.267800331115724 + ], + [ + "axil", + -15.267805099487305 + ], + [ + "bildung", + -15.267874717712402 + ], + [ + "caccia", + -15.268054962158203 + ], + [ + "Gurugram", + -15.268123626708984 + ], + [ + "arrison", + -15.2681245803833 + ], + [ + "▁reconfirm", + -15.268146514892578 + ], + [ + "Unscrew", + -15.268367767333984 + ], + [ + "▁Quirk", + -15.268391609191896 + ], + [ + "voking", + -15.268534660339355 + ], + [ + "webdesign", + -15.268731117248535 + ], + [ + "▁chopstick", + -15.269041061401367 + ], + [ + "▁pitfall", + -15.269219398498535 + ], + [ + "6:33", + -15.26937484741211 + ], + [ + "oteric", + -15.269415855407717 + ], + [ + "periodic", + -15.269572257995604 + ], + [ + "affar", + -15.269755363464355 + ], + [ + "▁Bulge", + -15.269767761230469 + ], + [ + "▁reenter", + -15.26992130279541 + ], + [ + "▁Tegra", + -15.269939422607422 + ], + [ + "▁bursa", + -15.269972801208496 + ], + [ + "TALK", + -15.269984245300291 + ], + [ + "8820", + -15.27015495300293 + ], + [ + "▁($0.", + -15.27020263671875 + ], + [ + "▁assignee", + -15.270328521728516 + ], + [ + "Guarantee", + -15.270329475402832 + ], + [ + "qualify", + -15.27045440673828 + ], + [ + "7-43", + -15.270610809326172 + ], + [ + "6:17", + -15.270637512207031 + ], + [ + "Shelby", + -15.270869255065918 + ], + [ + "tumblr", + -15.270878791809082 + ], + [ + "desirable", + -15.270880699157717 + ], + [ + "millimeter", + -15.270883560180664 + ], + [ + "Crucial", + -15.270886421203612 + ], + [ + "Analytical", + -15.270901679992676 + ], + [ + "apartheid", + -15.270901679992676 + ], + [ + "CONTACT", + -15.270907402038574 + ], + [ + "Antarctic", + -15.270912170410156 + ], + [ + "Impossible", + -15.270912170410156 + ], + [ + "Tasting", + -15.270915985107422 + ], + [ + "Stacey", + -15.270919799804688 + ], + [ + "investigator", + -15.270919799804688 + ], + [ + "Randall", + -15.270922660827637 + ], + [ + "toddler", + -15.270922660827637 + ], + [ + "Biggest", + -15.270930290222168 + ], + [ + "Logistics", + -15.270930290222168 + ], + [ + "feminine", + -15.270931243896484 + ], + [ + "Leisure", + -15.2709321975708 + ], + [ + "scenario", + -15.2709321975708 + ], + [ + "persistent", + -15.270933151245115 + ], + [ + "NAG", + -15.270936012268066 + ], + [ + "illustrated", + -15.270936012268066 + ], + [ + "Resistance", + -15.2709379196167 + ], + [ + "cuisine", + -15.270941734313965 + ], + [ + "saturation", + -15.270947456359863 + ], + [ + "voucher", + -15.270956993103027 + ], + [ + "conductive", + -15.270957946777344 + ], + [ + "Bespoke", + -15.27096176147461 + ], + [ + "refugee", + -15.27096462249756 + ], + [ + "FedEx", + -15.270965576171877 + ], + [ + "Occupational", + -15.27096939086914 + ], + [ + "Munich", + -15.270970344543455 + ], + [ + "chloride", + -15.27097225189209 + ], + [ + "friction", + -15.270976066589355 + ], + [ + "documentary", + -15.270977020263672 + ], + [ + "Spotify", + -15.270989418029783 + ], + [ + "Confirmation", + -15.27099323272705 + ], + [ + "exclusion", + -15.270999908447266 + ], + [ + "goodbye", + -15.271003723144531 + ], + [ + "Cornell", + -15.271039009094238 + ], + [ + "Caravan", + -15.271044731140137 + ], + [ + "beginner", + -15.271059036254885 + ], + [ + "VALUE", + -15.271066665649414 + ], + [ + "Hampton", + -15.271100044250488 + ], + [ + "mommy", + -15.27113151550293 + ], + [ + "Interim", + -15.271148681640623 + ], + [ + "congress", + -15.271154403686523 + ], + [ + "arrangement", + -15.271162986755373 + ], + [ + "Keynote", + -15.271206855773926 + ], + [ + "chunk", + -15.271239280700684 + ], + [ + "capsule", + -15.271258354187012 + ], + [ + "percentage", + -15.271269798278809 + ], + [ + "▁Fibro", + -15.271334648132324 + ], + [ + "gastric", + -15.271337509155272 + ], + [ + "▁MOBO", + -15.27141571044922 + ], + [ + "3030", + -15.27145004272461 + ], + [ + "▁logarithmic", + -15.27153491973877 + ], + [ + "Nordic", + -15.271644592285156 + ], + [ + "WNT", + -15.271702766418455 + ], + [ + "9:16", + -15.271732330322266 + ], + [ + "Irving", + -15.271766662597656 + ], + [ + "frequent", + -15.272364616394045 + ], + [ + "dipped", + -15.272382736206056 + ], + [ + "demographic", + -15.272578239440918 + ], + [ + "Bubb", + -15.272624969482422 + ], + [ + "Textile", + -15.272671699523926 + ], + [ + "▁Macaroni", + -15.27267837524414 + ], + [ + "тран", + -15.272811889648438 + ], + [ + "Badge", + -15.272957801818848 + ], + [ + "7:13", + -15.273056030273438 + ], + [ + "6:21", + -15.273260116577148 + ], + [ + "▁Proportion", + -15.27331256866455 + ], + [ + "swiss", + -15.2734956741333 + ], + [ + "Observer", + -15.273524284362791 + ], + [ + "▁Tulli", + -15.273600578308104 + ], + [ + "dodge", + -15.273783683776855 + ], + [ + "Dipped", + -15.273988723754885 + ], + [ + "legation", + -15.274045944213867 + ], + [ + "▁$0.9", + -15.27409553527832 + ], + [ + "INDU", + -15.274261474609377 + ], + [ + "vitro", + -15.274418830871582 + ], + [ + "nimous", + -15.274742126464844 + ], + [ + "▁Ahern", + -15.27515983581543 + ], + [ + "▁unceremonious", + -15.275696754455566 + ], + [ + "mittel", + -15.27574348449707 + ], + [ + "4:03", + -15.275853157043455 + ], + [ + "CRIME", + -15.275994300842283 + ], + [ + "lainen", + -15.27625560760498 + ], + [ + "attie", + -15.27659797668457 + ], + [ + "nnage", + -15.276787757873535 + ], + [ + "▁0333", + -15.276796340942385 + ], + [ + "▁Dacia", + -15.277231216430664 + ], + [ + "▁$11,000", + -15.277313232421877 + ], + [ + "▁Kalka", + -15.277507781982422 + ], + [ + "▁$4.9", + -15.277517318725586 + ], + [ + "▁07:3", + -15.277739524841309 + ], + [ + "hype", + -15.277891159057615 + ], + [ + "▁jardin", + -15.27803897857666 + ], + [ + "bromo", + -15.278071403503418 + ], + [ + "Jub", + -15.27813720703125 + ], + [ + "Anyhoo", + -15.278237342834473 + ], + [ + "▁bough", + -15.278339385986328 + ], + [ + "akkal", + -15.278480529785156 + ], + [ + "8,800", + -15.278705596923828 + ], + [ + "▁Shashi", + -15.278785705566406 + ], + [ + "SHAW", + -15.278848648071287 + ], + [ + "Convention", + -15.27899932861328 + ], + [ + "enhagen", + -15.279194831848145 + ], + [ + "JOIN", + -15.279296875 + ], + [ + "97%", + -15.27931022644043 + ], + [ + "▁Intellect", + -15.279337882995604 + ], + [ + "▁Ligu", + -15.27937126159668 + ], + [ + "▁extrem", + -15.279868125915527 + ], + [ + "▁TRAIL", + -15.279929161071776 + ], + [ + "hahah", + -15.280034065246582 + ], + [ + "▁Marmon", + -15.28006362915039 + ], + [ + "▁Vocation", + -15.28006649017334 + ], + [ + "Buckle", + -15.280312538146973 + ], + [ + "Barrett", + -15.280499458312988 + ], + [ + "▁888-565-", + -15.280579566955566 + ], + [ + "▁844-244-6190", + -15.280580520629885 + ], + [ + "▁844-244-6220", + -15.280580520629885 + ], + [ + "▁888-666-1744", + -15.280580520629885 + ], + [ + "▁Blaisdell", + -15.280580520629885 + ], + [ + "▁Osinbajo", + -15.280580520629885 + ], + [ + "▁Sevastopol", + -15.280580520629885 + ], + [ + "▁Stoddard", + -15.280580520629885 + ], + [ + "▁armistice", + -15.280580520629885 + ], + [ + "▁aspartame", + -15.280580520629885 + ], + [ + "▁beaucoup", + -15.280580520629885 + ], + [ + "▁dulcimer", + -15.280580520629885 + ], + [ + "▁hibernating", + -15.280580520629885 + ], + [ + "▁jennifer", + -15.280580520629885 + ], + [ + "▁laissez", + -15.280580520629885 + ], + [ + "▁manoeuvrability", + -15.280580520629885 + ], + [ + "▁neurotoxin", + -15.280580520629885 + ], + [ + "▁teleprompter", + -15.280580520629885 + ], + [ + "▁unpunished", + -15.280580520629885 + ], + [ + "▁walgreens", + -15.280580520629885 + ], + [ + "Capacitor", + -15.2805814743042 + ], + [ + "NAEYC", + -15.2805814743042 + ], + [ + "Vientiane", + -15.2805814743042 + ], + [ + "Visceral", + -15.2805814743042 + ], + [ + "▁800-273-3217", + -15.2805814743042 + ], + [ + "▁AVENUE", + -15.2805814743042 + ], + [ + "▁Anaconda", + -15.2805814743042 + ], + [ + "▁Athabasca", + -15.2805814743042 + ], + [ + "▁Bicentennial", + -15.2805814743042 + ], + [ + "▁Brouwer", + -15.2805814743042 + ], + [ + "▁Bwindi", + -15.2805814743042 + ], + [ + "▁Conduit", + -15.2805814743042 + ], + [ + "▁Crematorium", + -15.2805814743042 + ], + [ + "▁Custodian", + -15.2805814743042 + ], + [ + "▁Finkelstein", + -15.2805814743042 + ], + [ + "▁Firenze", + -15.2805814743042 + ], + [ + "▁Forsaken", + -15.2805814743042 + ], + [ + "▁Fresnel", + -15.2805814743042 + ], + [ + "▁Futurity", + -15.2805814743042 + ], + [ + "▁Hazlewood", + -15.2805814743042 + ], + [ + "▁Hiawatha", + -15.2805814743042 + ], + [ + "▁Higdon", + -15.2805814743042 + ], + [ + "▁IPVanish", + -15.2805814743042 + ], + [ + "▁Incumbent", + -15.2805814743042 + ], + [ + "▁Kennewick", + -15.2805814743042 + ], + [ + "▁Khwaja", + -15.2805814743042 + ], + [ + "▁Kilgore", + -15.2805814743042 + ], + [ + "▁LUXURY", + -15.2805814743042 + ], + [ + "▁Leibniz", + -15.2805814743042 + ], + [ + "▁Melendez", + -15.2805814743042 + ], + [ + "▁Milpitas", + -15.2805814743042 + ], + [ + "▁Museveni", + -15.2805814743042 + ], + [ + "▁Oscillator", + -15.2805814743042 + ], + [ + "▁Preakness", + -15.2805814743042 + ], + [ + "▁ROBLOX", + -15.2805814743042 + ], + [ + "▁Smackdown", + -15.2805814743042 + ], + [ + "▁Stooges", + -15.2805814743042 + ], + [ + "▁Stourbridge", + -15.2805814743042 + ], + [ + "▁Sumerian", + -15.2805814743042 + ], + [ + "▁Tagalog", + -15.2805814743042 + ], + [ + "▁Tallulah", + -15.2805814743042 + ], + [ + "▁Turnitin", + -15.2805814743042 + ], + [ + "▁apocrypha", + -15.2805814743042 + ], + [ + "▁circumcised", + -15.2805814743042 + ], + [ + "▁domineering", + -15.2805814743042 + ], + [ + "▁easiness", + -15.2805814743042 + ], + [ + "▁eukaryotic", + -15.2805814743042 + ], + [ + "▁folkloric", + -15.2805814743042 + ], + [ + "▁metamorphose", + -15.2805814743042 + ], + [ + "▁miniaturized", + -15.2805814743042 + ], + [ + "▁numismatic", + -15.2805814743042 + ], + [ + "▁petroglyphs", + -15.2805814743042 + ], + [ + "▁pfSense", + -15.2805814743042 + ], + [ + "▁phylogeny", + -15.2805814743042 + ], + [ + "▁proliferating", + -15.2805814743042 + ], + [ + "▁swashbuckling", + -15.2805814743042 + ], + [ + "▁tourniquet", + -15.2805814743042 + ], + [ + "▁vocabularies", + -15.2805814743042 + ], + [ + "▁ATTACK", + -15.280582427978516 + ], + [ + "▁insemination", + -15.280582427978516 + ], + [ + "▁snaking", + -15.280583381652832 + ], + [ + "▁sterility", + -15.280583381652832 + ], + [ + "▁245/45", + -15.280584335327148 + ], + [ + "▁Adityanath", + -15.280584335327148 + ], + [ + "▁Beaconsfield", + -15.280585289001465 + ], + [ + "▁Beetlejuice", + -15.280585289001465 + ], + [ + "▁metastasize", + -15.28058624267578 + ], + [ + "▁Carnaval", + -15.280587196350098 + ], + [ + "▁Clacton", + -15.280587196350098 + ], + [ + "▁Babbitt", + -15.280588150024414 + ], + [ + "▁CONCEPT", + -15.28058910369873 + ], + [ + "▁Cosgrove", + -15.28058910369873 + ], + [ + "▁Desirable", + -15.28058910369873 + ], + [ + "azhagan", + -15.280590057373049 + ], + [ + "▁committment", + -15.280591011047363 + ], + [ + "▁stamens", + -15.280591011047363 + ], + [ + "▁testicles", + -15.280591011047363 + ], + [ + "▁tiempo", + -15.280591011047363 + ], + [ + "▁Toothpaste", + -15.28059196472168 + ], + [ + "▁expediting", + -15.28059196472168 + ], + [ + "▁Metaphysics", + -15.280592918395996 + ], + [ + "▁Presiding", + -15.280592918395996 + ], + [ + "▁floodwaters", + -15.280592918395996 + ], + [ + "▁qPCR", + -15.280593872070312 + ], + [ + "Funniest", + -15.280596733093262 + ], + [ + "▁Licensor", + -15.280596733093262 + ], + [ + "▁moraine", + -15.280598640441896 + ], + [ + "▁Kybella", + -15.280599594116213 + ], + [ + "▁Podesta", + -15.280600547790527 + ], + [ + "▁ascendancy", + -15.280600547790527 + ], + [ + "▁Liddell", + -15.280601501464844 + ], + [ + "▁Jethro", + -15.280604362487791 + ], + [ + "▁porcine", + -15.280604362487791 + ], + [ + "▁CARPET", + -15.28060531616211 + ], + [ + "▁Weiwei", + -15.28060531616211 + ], + [ + "▁cicada", + -15.280606269836426 + ], + [ + "▁Multiplier", + -15.280607223510742 + ], + [ + "▁synovia", + -15.280609130859377 + ], + [ + "▁Elasticsearch", + -15.280611991882324 + ], + [ + "▁variably", + -15.280611991882324 + ], + [ + "▁Brocade", + -15.28061294555664 + ], + [ + "▁polyphonic", + -15.280613899230955 + ], + [ + "▁Teardrop", + -15.280616760253906 + ], + [ + "▁communique", + -15.280619621276855 + ], + [ + "▁Kansai", + -15.28062629699707 + ], + [ + "▁(1959)", + -15.280627250671388 + ], + [ + "▁Ekiti", + -15.280627250671388 + ], + [ + "▁foetus", + -15.280630111694336 + ], + [ + "▁DIRECTLY", + -15.280634880065918 + ], + [ + "▁Azusa", + -15.280635833740234 + ], + [ + "▁Menorca", + -15.2806396484375 + ], + [ + "Kadavul", + -15.28064250946045 + ], + [ + "▁Encoder", + -15.28064250946045 + ], + [ + "▁Bremerton", + -15.280644416809082 + ], + [ + "▁MAGNET", + -15.280645370483398 + ], + [ + "▁ganglion", + -15.280651092529297 + ], + [ + "▁ionizing", + -15.280652046203612 + ], + [ + "▁pitchfork", + -15.280654907226562 + ], + [ + "▁Murrell", + -15.280658721923828 + ], + [ + "▁Bunbury", + -15.280659675598145 + ], + [ + "▁godliness", + -15.28066062927246 + ], + [ + "▁supervillain", + -15.280665397644045 + ], + [ + "▁Pawnee", + -15.280670166015623 + ], + [ + "▁Hatchery", + -15.280672073364258 + ], + [ + "▁Cluj", + -15.28067684173584 + ], + [ + "▁ALLOY", + -15.280677795410156 + ], + [ + "▁corolla", + -15.280678749084473 + ], + [ + "▁$24.95", + -15.280679702758787 + ], + [ + "▁Abode", + -15.280688285827637 + ], + [ + "▁shekels", + -15.280694007873535 + ], + [ + "▁Denied", + -15.280702590942385 + ], + [ + "PRAYER", + -15.280704498291016 + ], + [ + "▁Swamy", + -15.280705451965332 + ], + [ + "▁objet", + -15.280705451965332 + ], + [ + "▁UNLESS", + -15.280733108520508 + ], + [ + "▁slitting", + -15.280739784240724 + ], + [ + "▁snapchat", + -15.280739784240724 + ], + [ + "▁DeWalt", + -15.280749320983888 + ], + [ + "▁hacksaw", + -15.280754089355469 + ], + [ + "▁Kaizen", + -15.280766487121582 + ], + [ + "▁unaided", + -15.280778884887695 + ], + [ + "▁molly", + -15.28078269958496 + ], + [ + "▁MediaWiki", + -15.280803680419922 + ], + [ + "▁Branford", + -15.280805587768556 + ], + [ + "▁Haskins", + -15.28081512451172 + ], + [ + "▁mohawk", + -15.280838012695312 + ], + [ + "▁sunbeds", + -15.280845642089844 + ], + [ + "▁Tombstone", + -15.280853271484377 + ], + [ + "▁Tycoon", + -15.280853271484377 + ], + [ + "▁Veneta", + -15.280854225158691 + ], + [ + "▁happenstance", + -15.280856132507324 + ], + [ + "RealEstate", + -15.280875205993652 + ], + [ + "▁Polarized", + -15.280877113342283 + ], + [ + "▁Ashlee", + -15.280878067016602 + ], + [ + "▁ATTEND", + -15.280885696411133 + ], + [ + "▁Draghi", + -15.280892372131348 + ], + [ + "▁FOMO", + -15.280908584594728 + ], + [ + "▁Homeopathic", + -15.28091049194336 + ], + [ + "▁Warnock", + -15.280939102172852 + ], + [ + "▁Keanu", + -15.280941009521484 + ], + [ + "▁Glencore", + -15.280943870544434 + ], + [ + "▁Handgun", + -15.280982971191406 + ], + [ + "▁Taichi", + -15.280982971191406 + ], + [ + "▁tRNA", + -15.281025886535645 + ], + [ + "▁Dissent", + -15.281048774719238 + ], + [ + "▁Arielle", + -15.281058311462402 + ], + [ + "▁Factories", + -15.281061172485352 + ], + [ + "▁Okoro", + -15.281061172485352 + ], + [ + "▁sateen", + -15.28106689453125 + ], + [ + "politik", + -15.281071662902832 + ], + [ + "JAZZ", + -15.281085968017578 + ], + [ + "2:51", + -15.281089782714844 + ], + [ + "▁myopic", + -15.281237602233888 + ], + [ + "▁nought", + -15.28127098083496 + ], + [ + "▁Lungu", + -15.28131103515625 + ], + [ + "▁Deviation", + -15.281400680541992 + ], + [ + "rzburg", + -15.28140354156494 + ], + [ + "▁Turntable", + -15.281449317932127 + ], + [ + "bazar", + -15.281457901000977 + ], + [ + "▁Maung", + -15.28149127960205 + ], + [ + "uzzo", + -15.281606674194336 + ], + [ + "plish", + -15.28170108795166 + ], + [ + "▁Principe", + -15.281807899475098 + ], + [ + "▁Journ", + -15.28181266784668 + ], + [ + "▁23:5", + -15.281843185424805 + ], + [ + "1305", + -15.28198528289795 + ], + [ + "▁Jordyn", + -15.282017707824709 + ], + [ + "ifolium", + -15.282038688659668 + ], + [ + "Argent", + -15.28213119506836 + ], + [ + "▁Knitter", + -15.28224277496338 + ], + [ + "▁mediastin", + -15.282255172729492 + ], + [ + "BENZ", + -15.282262802124023 + ], + [ + "▁digitizer", + -15.282303810119627 + ], + [ + "▁protea", + -15.282347679138184 + ], + [ + "▁Rodger", + -15.282426834106444 + ], + [ + "▁Flank", + -15.28243350982666 + ], + [ + "▁Jeevan", + -15.28243350982666 + ], + [ + "▁Vesel", + -15.282472610473633 + ], + [ + "▁TATA", + -15.282493591308594 + ], + [ + "▁Expose", + -15.282546043395996 + ], + [ + "▁drunkard", + -15.282548904418944 + ], + [ + "+00:00", + -15.282669067382812 + ], + [ + "Practise", + -15.282719612121582 + ], + [ + "Cleanse", + -15.282866477966309 + ], + [ + "Quoting", + -15.282910346984863 + ], + [ + "omodoro", + -15.283039093017578 + ], + [ + "▁LOWER", + -15.283092498779297 + ], + [ + "▁Staub", + -15.283160209655762 + ], + [ + "▁Motu", + -15.28363037109375 + ], + [ + "▁Giorgi", + -15.28366756439209 + ], + [ + "▁JAMB", + -15.283716201782228 + ], + [ + "▁Shyam", + -15.283793449401855 + ], + [ + "ascii", + -15.283809661865234 + ], + [ + "▁centimetre", + -15.283845901489258 + ], + [ + "▁Frauen", + -15.28386116027832 + ], + [ + "▁nodule", + -15.283884048461914 + ], + [ + "1545", + -15.284005165100098 + ], + [ + "▁EXPLO", + -15.284095764160156 + ], + [ + "3(4):", + -15.284295082092283 + ], + [ + "▁habla", + -15.28442096710205 + ], + [ + "▁spammer", + -15.284510612487791 + ], + [ + "abilty", + -15.28461742401123 + ], + [ + "Discern", + -15.284676551818848 + ], + [ + "▁matric", + -15.284707069396973 + ], + [ + "1073", + -15.28475570678711 + ], + [ + "sapien", + -15.284883499145508 + ], + [ + "▁Kamara", + -15.285061836242676 + ], + [ + "▁sensitize", + -15.285072326660156 + ], + [ + "▁Geeta", + -15.285093307495115 + ], + [ + "Adresse", + -15.285094261169434 + ], + [ + "▁PIPE", + -15.285170555114746 + ], + [ + "aversham", + -15.285270690917969 + ], + [ + "QUIN", + -15.28561019897461 + ], + [ + "discuss", + -15.28566074371338 + ], + [ + "▁mkv", + -15.285754203796388 + ], + [ + "lucas", + -15.285775184631348 + ], + [ + "Approximate", + -15.285781860351562 + ], + [ + "Subsequent", + -15.285799980163574 + ], + [ + "stolen", + -15.285892486572266 + ], + [ + "▁FOREST", + -15.286015510559082 + ], + [ + "Immigrant", + -15.28603172302246 + ], + [ + "simplification", + -15.28603458404541 + ], + [ + "Refrigerate", + -15.286046981811523 + ], + [ + "biblical", + -15.286046981811523 + ], + [ + "Execution", + -15.286050796508787 + ], + [ + "Amnesty", + -15.286057472229004 + ], + [ + "Examination", + -15.286069869995115 + ], + [ + "Embedded", + -15.286073684692385 + ], + [ + "testosterone", + -15.2860746383667 + ], + [ + "Simmons", + -15.286076545715332 + ], + [ + "Gibson", + -15.286081314086914 + ], + [ + "forgiveness", + -15.286084175109863 + ], + [ + "abandoned", + -15.286088943481444 + ], + [ + "opportunities", + -15.286088943481444 + ], + [ + "bathtub", + -15.286089897155762 + ], + [ + "Jerome", + -15.286090850830078 + ], + [ + "separation", + -15.286091804504396 + ], + [ + "Trauma", + -15.286092758178713 + ], + [ + "Cathedral", + -15.286094665527344 + ], + [ + "Hospice", + -15.286094665527344 + ], + [ + "▁Collaborator", + -15.286094665527344 + ], + [ + "Gorilla", + -15.28611660003662 + ], + [ + "Hoop", + -15.28611660003662 + ], + [ + "literacy", + -15.28611660003662 + ], + [ + "Acceptance", + -15.286121368408203 + ], + [ + "Removable", + -15.28616428375244 + ], + [ + "Picnic", + -15.286166191101074 + ], + [ + "wavelength", + -15.28616714477539 + ], + [ + "scanning", + -15.286184310913086 + ], + [ + "Accessible", + -15.286192893981934 + ], + [ + "Francisco", + -15.286282539367676 + ], + [ + "Showcase", + -15.28632926940918 + ], + [ + "Biology", + -15.286334991455078 + ], + [ + "Robust", + -15.286365509033203 + ], + [ + "impressive", + -15.286391258239746 + ], + [ + "Depot", + -15.286399841308594 + ], + [ + "Beloved", + -15.286419868469238 + ], + [ + "▁Joost", + -15.286429405212402 + ], + [ + "CCTV", + -15.286463737487791 + ], + [ + "Flour", + -15.286463737487791 + ], + [ + "▁Nabil", + -15.286516189575195 + ], + [ + "Rhino", + -15.28688621520996 + ], + [ + "Variant", + -15.28704833984375 + ], + [ + "flation", + -15.287101745605469 + ], + [ + "▁gymnastic", + -15.287139892578123 + ], + [ + "Handbook", + -15.287696838378906 + ], + [ + "reddy", + -15.287771224975586 + ], + [ + "uoco", + -15.287782669067385 + ], + [ + "▁7714", + -15.288084030151367 + ], + [ + "EBS", + -15.28824234008789 + ], + [ + "loaf", + -15.288267135620115 + ], + [ + "4:22", + -15.288355827331545 + ], + [ + "ovenproof", + -15.288545608520508 + ], + [ + "IVITY", + -15.28871250152588 + ], + [ + "1306", + -15.28874397277832 + ], + [ + "0-063", + -15.288814544677734 + ], + [ + "▁Phono", + -15.288873672485352 + ], + [ + "2033", + -15.28888702392578 + ], + [ + "voyage", + -15.288914680480955 + ], + [ + "0-062", + -15.289066314697266 + ], + [ + "chapel", + -15.289188385009766 + ], + [ + "INSON", + -15.289409637451172 + ], + [ + "UVU", + -15.289507865905762 + ], + [ + "LIZ", + -15.28954792022705 + ], + [ + "0:37", + -15.289620399475098 + ], + [ + "▁logarithm", + -15.289709091186523 + ], + [ + "essaywriter", + -15.289711952209473 + ], + [ + "SORB", + -15.290069580078123 + ], + [ + "Kumar", + -15.290139198303224 + ], + [ + "17-4", + -15.29014778137207 + ], + [ + "attitude", + -15.29033088684082 + ], + [ + "▁Claud", + -15.290376663208008 + ], + [ + "▁Graceful", + -15.290538787841797 + ], + [ + "9-34", + -15.290650367736816 + ], + [ + "▁Lipid", + -15.29071044921875 + ], + [ + "Mahony", + -15.291157722473145 + ], + [ + "▁Schwei", + -15.291394233703612 + ], + [ + "▁CATCH", + -15.291508674621582 + ], + [ + "▁Farris", + -15.29155731201172 + ], + [ + "8:08", + -15.291666030883787 + ], + [ + "cienne", + -15.291777610778809 + ], + [ + "francis", + -15.291805267333984 + ], + [ + "Deletion", + -15.291862487792969 + ], + [ + "▁verbalize", + -15.292051315307615 + ], + [ + "▁spiteful", + -15.292118072509766 + ], + [ + "▁Munk", + -15.292223930358888 + ], + [ + "▁Isobel", + -15.292322158813477 + ], + [ + "▁Riedel", + -15.292677879333496 + ], + [ + "▁Reinhard", + -15.293024063110352 + ], + [ + "ngrej", + -15.29308795928955 + ], + [ + "▁Jonesboro", + -15.293140411376951 + ], + [ + "HEAP", + -15.29315948486328 + ], + [ + "minium", + -15.293578147888184 + ], + [ + "Jude", + -15.293596267700195 + ], + [ + "imide", + -15.2936429977417 + ], + [ + "aggar", + -15.293774604797363 + ], + [ + "▁clench", + -15.294618606567385 + ], + [ + "▁terrify", + -15.294628143310549 + ], + [ + "Admit", + -15.294647216796877 + ], + [ + "ECTOR", + -15.294730186462402 + ], + [ + "0.44", + -15.294760704040527 + ], + [ + "symmetrical", + -15.294825553894045 + ], + [ + "communicati", + -15.294881820678713 + ], + [ + "1–5", + -15.29497528076172 + ], + [ + "▁Mathematica", + -15.294978141784668 + ], + [ + "▁excrete", + -15.294989585876465 + ], + [ + "Messi", + -15.29499626159668 + ], + [ + "▁Illuminati", + -15.295092582702637 + ], + [ + "▁$4000", + -15.295174598693848 + ], + [ + "▁Horri", + -15.295358657836914 + ], + [ + "▁Benign", + -15.295494079589844 + ], + [ + "▁POLISH", + -15.29550552368164 + ], + [ + "Disappointing", + -15.295506477355955 + ], + [ + "Molybdenum", + -15.295506477355955 + ], + [ + "Preservative", + -15.295506477355955 + ], + [ + "TEMPLATE", + -15.295506477355955 + ], + [ + "modafinil", + -15.295506477355955 + ], + [ + "sensitizing", + -15.295506477355955 + ], + [ + "toffehoff", + -15.295506477355955 + ], + [ + "▁Akismet", + -15.295506477355955 + ], + [ + "▁Alleppey", + -15.295506477355955 + ], + [ + "▁CONCACAF", + -15.295506477355955 + ], + [ + "▁Clojure", + -15.295506477355955 + ], + [ + "▁Constabulary", + -15.295506477355955 + ], + [ + "▁DELICIOUS", + -15.295506477355955 + ], + [ + "▁Endearing", + -15.295506477355955 + ], + [ + "▁Enormous", + -15.295506477355955 + ], + [ + "▁FEDERAL", + -15.295506477355955 + ], + [ + "▁Gorakhpur", + -15.295506477355955 + ], + [ + "▁Krampus", + -15.295506477355955 + ], + [ + "▁McAuliffe", + -15.295506477355955 + ], + [ + "▁Monteverde", + -15.295506477355955 + ], + [ + "▁Ouachita", + -15.295506477355955 + ], + [ + "▁Plasmodium", + -15.295506477355955 + ], + [ + "▁Portofino", + -15.295506477355955 + ], + [ + "▁Rajinikanth", + -15.295506477355955 + ], + [ + "▁Sequencing", + -15.295506477355955 + ], + [ + "▁Tagaytay", + -15.295506477355955 + ], + [ + "▁Villeneuve", + -15.295506477355955 + ], + [ + "▁WRITTEN", + -15.295506477355955 + ], + [ + "▁arboretum", + -15.295506477355955 + ], + [ + "▁deodorizing", + -15.295506477355955 + ], + [ + "▁depraved", + -15.295506477355955 + ], + [ + "▁diltiazem", + -15.295506477355955 + ], + [ + "▁dissociate", + -15.295506477355955 + ], + [ + "▁encasing", + -15.295506477355955 + ], + [ + "▁epistemic", + -15.295506477355955 + ], + [ + "▁epitaph", + -15.295506477355955 + ], + [ + "▁erythema", + -15.295506477355955 + ], + [ + "▁evacuating", + -15.295506477355955 + ], + [ + "▁excercise", + -15.295506477355955 + ], + [ + "▁falciparum", + -15.295506477355955 + ], + [ + "▁fallacies", + -15.295506477355955 + ], + [ + "▁flabbergasted", + -15.295506477355955 + ], + [ + "▁gesturing", + -15.295506477355955 + ], + [ + "▁indoctrinate", + -15.295506477355955 + ], + [ + "▁inopportune", + -15.295506477355955 + ], + [ + "▁interpolated", + -15.295506477355955 + ], + [ + "▁lullabies", + -15.295506477355955 + ], + [ + "▁nepotism", + -15.295506477355955 + ], + [ + "▁opossum", + -15.295506477355955 + ], + [ + "▁parakeet", + -15.295506477355955 + ], + [ + "▁piranha", + -15.295506477355955 + ], + [ + "▁plaudits", + -15.295506477355955 + ], + [ + "▁predilection", + -15.295506477355955 + ], + [ + "▁regurgitation", + -15.295506477355955 + ], + [ + "▁spectrophotomet", + -15.295506477355955 + ], + [ + "▁summarising", + -15.295506477355955 + ], + [ + "▁uncountable", + -15.295506477355955 + ], + [ + "▁unforeseeable", + -15.295506477355955 + ], + [ + "▁unknowable", + -15.295506477355955 + ], + [ + "▁unrequited", + -15.295506477355955 + ], + [ + "▁vermiculite", + -15.295506477355955 + ], + [ + "Facilitating", + -15.295507431030272 + ], + [ + "Unbeknown", + -15.295507431030272 + ], + [ + "▁Clasico", + -15.295507431030272 + ], + [ + "▁Disguise", + -15.295507431030272 + ], + [ + "▁Remastered", + -15.295507431030272 + ], + [ + "▁Vidyalaya", + -15.295507431030272 + ], + [ + "▁hematoma", + -15.295507431030272 + ], + [ + "127.0.0.1", + -15.29550838470459 + ], + [ + "Prolific", + -15.29550838470459 + ], + [ + "▁Bexar", + -15.29550838470459 + ], + [ + "▁Halibut", + -15.29550838470459 + ], + [ + "▁Hingoli", + -15.29550838470459 + ], + [ + "▁Kiyosaki", + -15.29550838470459 + ], + [ + "▁Shastri", + -15.29550838470459 + ], + [ + "▁pulsar", + -15.29550838470459 + ], + [ + "▁Baumgartner", + -15.295509338378906 + ], + [ + "▁Humayun", + -15.295509338378906 + ], + [ + "▁papyrus", + -15.295509338378906 + ], + [ + "▁EXCHANGE", + -15.29551124572754 + ], + [ + "▁Fawkes", + -15.29551124572754 + ], + [ + "▁MIDDLE", + -15.29551124572754 + ], + [ + "▁Gamecocks", + -15.295512199401855 + ], + [ + "▁Suppression", + -15.295512199401855 + ], + [ + "▁exposé", + -15.295512199401855 + ], + [ + "▁neoplasm", + -15.295512199401855 + ], + [ + "▁antithetical", + -15.295514106750488 + ], + [ + "▁Thriving", + -15.295515060424805 + ], + [ + "▁stigmatized", + -15.295515060424805 + ], + [ + "▁Fiennes", + -15.29551601409912 + ], + [ + "▁Resistors", + -15.29551601409912 + ], + [ + "▁abstention", + -15.29551601409912 + ], + [ + "▁espinho", + -15.29551601409912 + ], + [ + "▁veneration", + -15.29551601409912 + ], + [ + "ANDROID", + -15.295516967773438 + ], + [ + "▁servicio", + -15.295517921447754 + ], + [ + "FFICIENT", + -15.29551887512207 + ], + [ + "▁Palomino", + -15.29551887512207 + ], + [ + "▁Velasco", + -15.295524597167969 + ], + [ + "▁Zagato", + -15.295524597167969 + ], + [ + "▁pagination", + -15.295524597167969 + ], + [ + "▁vetiver", + -15.295524597167969 + ], + [ + "▁softgels", + -15.295526504516602 + ], + [ + "▁Kaibab", + -15.295527458190918 + ], + [ + "▁Rookery", + -15.295527458190918 + ], + [ + "▁revved", + -15.295527458190918 + ], + [ + "▁trudging", + -15.295527458190918 + ], + [ + "▁Panamanian", + -15.295528411865234 + ], + [ + "▁Mahmud", + -15.295531272888184 + ], + [ + "▁Uluru", + -15.2955322265625 + ], + [ + "▁GLORY", + -15.295534133911133 + ], + [ + "▁Taxonomy", + -15.295534133911133 + ], + [ + "▁bussiness", + -15.295534133911133 + ], + [ + "▁Cheeseburger", + -15.295536041259766 + ], + [ + "▁Caprese", + -15.295536994934082 + ], + [ + "▁dakota", + -15.295536994934082 + ], + [ + "▁Dunstable", + -15.295537948608398 + ], + [ + "presbyter", + -15.295538902282717 + ], + [ + "▁terrine", + -15.295544624328612 + ], + [ + "▁Matheny", + -15.29554557800293 + ], + [ + "▁apportionment", + -15.29554557800293 + ], + [ + "▁(1956)", + -15.295546531677246 + ], + [ + "▁annulled", + -15.295546531677246 + ], + [ + "▁Ipoh", + -15.29554843902588 + ], + [ + "▁Patnaik", + -15.29554843902588 + ], + [ + "▁diverging", + -15.295550346374512 + ], + [ + "▁Camacho", + -15.295551300048828 + ], + [ + "▁Seifert", + -15.295554161071776 + ], + [ + "▁Owensboro", + -15.295562744140623 + ], + [ + "▁Kokomo", + -15.29556369781494 + ], + [ + "▁BizTalk", + -15.295565605163574 + ], + [ + "▁Kumasi", + -15.295565605163574 + ], + [ + "▁Foreword", + -15.295568466186523 + ], + [ + "▁Bruyne", + -15.29556941986084 + ], + [ + "▁Quantities", + -15.29556941986084 + ], + [ + "▁(1966)", + -15.295573234558104 + ], + [ + "▁Lisburn", + -15.295573234558104 + ], + [ + "▁unimpressive", + -15.295576095581056 + ], + [ + "▁Ozzie", + -15.295577049255373 + ], + [ + "▁exuding", + -15.295578956604004 + ], + [ + "▁Endoscopy", + -15.29557991027832 + ], + [ + "▁trampling", + -15.295586585998535 + ], + [ + "▁BioWare", + -15.295587539672852 + ], + [ + "▁molested", + -15.295594215393066 + ], + [ + "▁Peroxide", + -15.2955961227417 + ], + [ + "▁shorebirds", + -15.295597076416016 + ], + [ + "Eminent", + -15.295602798461914 + ], + [ + "▁externship", + -15.295618057250977 + ], + [ + "▁Rubble", + -15.29562282562256 + ], + [ + "▁GameCube", + -15.295624732971191 + ], + [ + "▁Carabin", + -15.295631408691406 + ], + [ + "▁Funchal", + -15.295631408691406 + ], + [ + "▁Tralee", + -15.295637130737305 + ], + [ + "▁DStv", + -15.29564380645752 + ], + [ + "Contamination", + -15.295646667480469 + ], + [ + "▁Angelique", + -15.295648574829102 + ], + [ + "▁Munchkin", + -15.295671463012695 + ], + [ + "▁Cranford", + -15.29568099975586 + ], + [ + "▁Verdot", + -15.295681953430176 + ], + [ + "▁Fondation", + -15.295686721801758 + ], + [ + "▁Bungee", + -15.295694351196287 + ], + [ + "▁ROUTE", + -15.295702934265137 + ], + [ + "▁coroutine", + -15.295703887939451 + ], + [ + "▁gigging", + -15.29570770263672 + ], + [ + "▁colonisation", + -15.295708656311035 + ], + [ + "▁Hulme", + -15.295717239379885 + ], + [ + "▁Robinhood", + -15.29572868347168 + ], + [ + "▁breeches", + -15.29572868347168 + ], + [ + "▁intestate", + -15.295732498168944 + ], + [ + "▁ASRock", + -15.295742988586426 + ], + [ + "▁extremly", + -15.295754432678224 + ], + [ + "▁Brookdale", + -15.295758247375488 + ], + [ + "▁reauthorization", + -15.29576015472412 + ], + [ + "▁Liberator", + -15.295774459838867 + ], + [ + "▁microcomputer", + -15.295777320861816 + ], + [ + ".03.2019", + -15.295780181884766 + ], + [ + "▁KZN", + -15.295787811279297 + ], + [ + "▁Pushkin", + -15.295787811279297 + ], + [ + "▁exult", + -15.295805931091309 + ], + [ + "▁stewart", + -15.295831680297852 + ], + [ + "▁Severus", + -15.295832633972168 + ], + [ + "▁Garvin", + -15.295879364013672 + ], + [ + "▁Craftsmen", + -15.295886993408203 + ], + [ + "▁Supernova", + -15.29588794708252 + ], + [ + "4–1", + -15.295890808105469 + ], + [ + "▁Yamada", + -15.295894622802734 + ], + [ + "▁treetops", + -15.29590892791748 + ], + [ + "▁Kunden", + -15.295942306518556 + ], + [ + "▁fallacious", + -15.295947074890137 + ], + [ + "▁ProRes", + -15.29596710205078 + ], + [ + "▁PAINTING", + -15.295990943908691 + ], + [ + "▁$4.95", + -15.295994758605955 + ], + [ + "▁2018-07-", + -15.296069145202637 + ], + [ + "▁Agustin", + -15.296124458312988 + ], + [ + "▁CRACK", + -15.29619026184082 + ], + [ + "ucchi", + -15.296204566955566 + ], + [ + "Depict", + -15.296284675598145 + ], + [ + "▁slanderous", + -15.296286582946776 + ], + [ + "▁Bartley", + -15.29629135131836 + ], + [ + "▁Haak", + -15.296320915222168 + ], + [ + "▁Chaney", + -15.296392440795898 + ], + [ + "▁Bosque", + -15.296514511108398 + ], + [ + "qvc", + -15.296648025512695 + ], + [ + "locity", + -15.296700477600098 + ], + [ + "▁polaroid", + -15.296765327453612 + ], + [ + "▁Tutsi", + -15.296805381774902 + ], + [ + "▁Faerie", + -15.296830177307127 + ], + [ + "▁Liqui", + -15.296836853027344 + ], + [ + "▁clerkship", + -15.296913146972656 + ], + [ + "uggy", + -15.296944618225098 + ], + [ + "▁impinge", + -15.297045707702637 + ], + [ + "0183", + -15.297123908996582 + ], + [ + "8:07", + -15.297130584716797 + ], + [ + "pagos", + -15.297133445739746 + ], + [ + "6,600", + -15.29725456237793 + ], + [ + "▁Fathom", + -15.29728889465332 + ], + [ + "▁Rocca", + -15.297310829162598 + ], + [ + "0.87", + -15.297341346740724 + ], + [ + "▁Purana", + -15.297409057617188 + ], + [ + "▁Amstel", + -15.297441482543944 + ], + [ + "RUCE", + -15.297478675842283 + ], + [ + "три", + -15.297561645507812 + ], + [ + "▁Communal", + -15.297723770141602 + ], + [ + "5:29", + -15.297856330871582 + ], + [ + "▁$75.00", + -15.29796314239502 + ], + [ + "▁fiendish", + -15.29800510406494 + ], + [ + "▁$5.99", + -15.29816722869873 + ], + [ + "PHASE", + -15.298208236694336 + ], + [ + "Vivi", + -15.298982620239258 + ], + [ + "▁downspout", + -15.29900360107422 + ], + [ + "Digest", + -15.299253463745115 + ], + [ + "▁Bioscience", + -15.299262046813965 + ], + [ + "▁Nordisk", + -15.299334526062012 + ], + [ + "3:54", + -15.299436569213867 + ], + [ + "ngioma", + -15.299518585205078 + ], + [ + "▁EXIST", + -15.299721717834473 + ], + [ + "Suppl", + -15.299725532531738 + ], + [ + "tersectionality", + -15.299728393554688 + ], + [ + "Civic", + -15.29995822906494 + ], + [ + "khtar", + -15.30001163482666 + ], + [ + "0.94", + -15.300071716308594 + ], + [ + "Shortlisted", + -15.300251007080078 + ], + [ + "Tradition", + -15.300670623779297 + ], + [ + "▁Tickle", + -15.300731658935549 + ], + [ + "Vaccine", + -15.301142692565918 + ], + [ + "Castell", + -15.30123519897461 + ], + [ + "coordinate", + -15.301312446594238 + ], + [ + "▁disaggregat", + -15.301366806030272 + ], + [ + "decorating", + -15.30138111114502 + ], + [ + "Falcon", + -15.301408767700195 + ], + [ + "Beneath", + -15.301421165466309 + ], + [ + "Diagnosis", + -15.301431655883787 + ], + [ + "Prophet", + -15.301432609558104 + ], + [ + "Withdrawal", + -15.301433563232422 + ], + [ + "Bohemian", + -15.301437377929688 + ], + [ + "Incident", + -15.3014497756958 + ], + [ + "Knife", + -15.30145263671875 + ], + [ + "Presence", + -15.301454544067385 + ], + [ + "Extraordinary", + -15.3014554977417 + ], + [ + "Archbishop", + -15.301456451416016 + ], + [ + "illustration", + -15.301457405090332 + ], + [ + "Airbnb", + -15.30146026611328 + ], + [ + "concentrate", + -15.30146026611328 + ], + [ + "Aloha", + -15.30146312713623 + ], + [ + "Ivory", + -15.30146312713623 + ], + [ + "Wagner", + -15.301464080810549 + ], + [ + "Triumph", + -15.301465034484863 + ], + [ + "Wyoming", + -15.301468849182127 + ], + [ + "examining", + -15.301468849182127 + ], + [ + "repository", + -15.301470756530762 + ], + [ + "ambitious", + -15.301472663879396 + ], + [ + "Laurel", + -15.301473617553713 + ], + [ + "McCain", + -15.301473617553713 + ], + [ + "involving", + -15.301474571228027 + ], + [ + "Snapchat", + -15.301480293273926 + ], + [ + "Bridal", + -15.301483154296877 + ], + [ + "Junction", + -15.301485061645508 + ], + [ + "Simulation", + -15.301488876342772 + ], + [ + "summary", + -15.301488876342772 + ], + [ + "Windsor", + -15.30148983001709 + ], + [ + "Flint", + -15.301490783691406 + ], + [ + "accessories", + -15.301493644714355 + ], + [ + "Ronnie", + -15.301498413085938 + ], + [ + "Universe", + -15.301499366760254 + ], + [ + "enthusiastic", + -15.301504135131836 + ], + [ + "Fraser", + -15.301505088806152 + ], + [ + "surcharge", + -15.301506042480469 + ], + [ + "Amelia", + -15.301508903503418 + ], + [ + "tightening", + -15.301508903503418 + ], + [ + "Ledger", + -15.301509857177734 + ], + [ + "Dishwasher", + -15.301515579223633 + ], + [ + "dialogue", + -15.301517486572266 + ], + [ + "Ethernet", + -15.301522254943848 + ], + [ + "Aviation", + -15.301530838012695 + ], + [ + "Hoffman", + -15.301543235778809 + ], + [ + "everybody", + -15.301562309265137 + ], + [ + "Arcade", + -15.301590919494627 + ], + [ + "Payroll", + -15.301603317260742 + ], + [ + "Kaiser", + -15.301607131958008 + ], + [ + "Oncology", + -15.30161190032959 + ], + [ + "Saute", + -15.301612854003906 + ], + [ + "Trivia", + -15.301629066467283 + ], + [ + "Clayton", + -15.30163860321045 + ], + [ + "Dashboard", + -15.301753997802734 + ], + [ + "thickness", + -15.301761627197266 + ], + [ + "newspaper", + -15.301924705505373 + ], + [ + "torque", + -15.302078247070312 + ], + [ + "Pedro", + -15.302106857299805 + ], + [ + "iiiii", + -15.30211353302002 + ], + [ + "▁MENU", + -15.302115440368652 + ], + [ + "▁Sauber", + -15.302163124084473 + ], + [ + "HORN", + -15.30268669128418 + ], + [ + "5(2):", + -15.302810668945312 + ], + [ + "▁hace", + -15.30294132232666 + ], + [ + "4:35", + -15.302945137023926 + ], + [ + "▁Sniff", + -15.302955627441406 + ], + [ + "▁Sokolov", + -15.302958488464355 + ], + [ + "dustrialisation", + -15.3031587600708 + ], + [ + "Hugo", + -15.303315162658691 + ], + [ + "7,400", + -15.303473472595217 + ], + [ + "Jeune", + -15.30349826812744 + ], + [ + "Dilute", + -15.30357551574707 + ], + [ + "bouche", + -15.303924560546877 + ], + [ + "▁Chako", + -15.303926467895508 + ], + [ + "▁immunohisto", + -15.304156303405762 + ], + [ + "aktu", + -15.304158210754396 + ], + [ + "▁bowtie", + -15.304179191589355 + ], + [ + "vocational", + -15.304203033447266 + ], + [ + "▁pathologic", + -15.304304122924805 + ], + [ + "13-7", + -15.304319381713867 + ], + [ + "toledo", + -15.30445957183838 + ], + [ + "scrub", + -15.304553031921388 + ], + [ + "ominated", + -15.304648399353027 + ], + [ + "Pablo", + -15.304752349853516 + ], + [ + "canthus", + -15.304837226867676 + ], + [ + "3:09", + -15.30487823486328 + ], + [ + "▁THOR", + -15.305171012878418 + ], + [ + "▁Riaz", + -15.305989265441896 + ], + [ + "Operational", + -15.306048393249512 + ], + [ + "▁Biju", + -15.30608081817627 + ], + [ + "avasana", + -15.3060884475708 + ], + [ + "▁Kassel", + -15.306442260742188 + ], + [ + "▁HEMI", + -15.306490898132324 + ], + [ + "HINT", + -15.306779861450195 + ], + [ + "▁0208", + -15.30685329437256 + ], + [ + "▁einige", + -15.30696964263916 + ], + [ + "▁RealVect", + -15.307063102722168 + ], + [ + "▁simpli", + -15.307085037231444 + ], + [ + "▁Boerne", + -15.307178497314451 + ], + [ + "lune", + -15.307448387145996 + ], + [ + "▁($18", + -15.307955741882324 + ], + [ + "▁Mossad", + -15.30799388885498 + ], + [ + "Viral", + -15.308045387268066 + ], + [ + "neurysm", + -15.308279037475586 + ], + [ + "▁naphtha", + -15.308313369750977 + ], + [ + "strophysical", + -15.308341979980469 + ], + [ + "▁Abdulla", + -15.308403968811035 + ], + [ + "6.67", + -15.308459281921388 + ], + [ + "Plankton", + -15.308469772338867 + ], + [ + "Betting", + -15.308523178100586 + ], + [ + "▁Chimera", + -15.309059143066406 + ], + [ + "borate", + -15.309212684631348 + ], + [ + "▁shelve", + -15.309330940246582 + ], + [ + "ologue", + -15.30933952331543 + ], + [ + "1590", + -15.309340476989746 + ], + [ + "Impress", + -15.309633255004885 + ], + [ + "▁$349", + -15.309670448303224 + ], + [ + "8:14", + -15.309680938720703 + ], + [ + "▁DISCOVER", + -15.30970859527588 + ], + [ + "Uninstall", + -15.309895515441896 + ], + [ + "ozza", + -15.309911727905272 + ], + [ + "▁Faul", + -15.30996322631836 + ], + [ + "TRADE", + -15.3099946975708 + ], + [ + "▁adjudicate", + -15.310054779052734 + ], + [ + "autres", + -15.310063362121582 + ], + [ + "▁Assyria", + -15.310064315795898 + ], + [ + "▁0.0000", + -15.31025218963623 + ], + [ + "▁Switzer", + -15.310628890991213 + ], + [ + "ceuticals", + -15.310647010803224 + ], + [ + "PITTSBURGH", + -15.310657501220703 + ], + [ + "Remuneration", + -15.310657501220703 + ], + [ + "Shippensburg", + -15.310657501220703 + ], + [ + "Smirnoff", + -15.310657501220703 + ], + [ + "Sébastien", + -15.310657501220703 + ], + [ + "culturist", + -15.310657501220703 + ], + [ + "▁AFFILIATES", + -15.310657501220703 + ], + [ + "▁Aberystwyth", + -15.310657501220703 + ], + [ + "▁Ambivli", + -15.310657501220703 + ], + [ + "▁Apparatus", + -15.310657501220703 + ], + [ + "▁Bewertung", + -15.310657501220703 + ], + [ + "▁Byzantium", + -15.310657501220703 + ], + [ + "▁CABINET", + -15.310657501220703 + ], + [ + "▁Exchequer", + -15.310657501220703 + ], + [ + "▁FAILURE", + -15.310657501220703 + ], + [ + "▁Frankincense", + -15.310657501220703 + ], + [ + "▁Gbagbo", + -15.310657501220703 + ], + [ + "▁Guacamole", + -15.310657501220703 + ], + [ + "▁Hygienist", + -15.310657501220703 + ], + [ + "▁JUNCTION", + -15.310657501220703 + ], + [ + "▁LUMPUR", + -15.310657501220703 + ], + [ + "▁McConaughey", + -15.310657501220703 + ], + [ + "▁Muzaffar", + -15.310657501220703 + ], + [ + "▁Oladipo", + -15.310657501220703 + ], + [ + "▁Pavillion", + -15.310657501220703 + ], + [ + "▁Tennessean", + -15.310657501220703 + ], + [ + "▁Transducer", + -15.310657501220703 + ], + [ + "▁Veyron", + -15.310657501220703 + ], + [ + "▁confortable", + -15.310657501220703 + ], + [ + "▁deliberative", + -15.310657501220703 + ], + [ + "▁demonetisation", + -15.310657501220703 + ], + [ + "▁emaciated", + -15.310657501220703 + ], + [ + "▁epitomizing", + -15.310657501220703 + ], + [ + "▁keppra", + -15.310657501220703 + ], + [ + "▁militaries", + -15.310657501220703 + ], + [ + "▁pneumococcal", + -15.310657501220703 + ], + [ + "▁polysaccharides", + -15.310657501220703 + ], + [ + "▁portmanteau", + -15.310657501220703 + ], + [ + "▁prostaglandin", + -15.310657501220703 + ], + [ + "▁stapling", + -15.310657501220703 + ], + [ + "▁telephoning", + -15.310657501220703 + ], + [ + "▁zolpidem", + -15.310657501220703 + ], + [ + "Anecdotal", + -15.31065845489502 + ], + [ + "Coleoptera", + -15.31065845489502 + ], + [ + "Glutamine", + -15.31065845489502 + ], + [ + "JERSEY", + -15.31065845489502 + ], + [ + "Obsolete", + -15.31065845489502 + ], + [ + "▁ANTONIO", + -15.31065845489502 + ], + [ + "▁Airdrie", + -15.31065845489502 + ], + [ + "▁Contreras", + -15.31065845489502 + ], + [ + "▁Guarantor", + -15.31065845489502 + ], + [ + "▁Hibernian", + -15.31065845489502 + ], + [ + "▁INSTALLATION", + -15.31065845489502 + ], + [ + "▁Livelihood", + -15.31065845489502 + ], + [ + "▁Raikkonen", + -15.31065845489502 + ], + [ + "▁Torpedo", + -15.31065845489502 + ], + [ + "▁dhcp", + -15.31065845489502 + ], + [ + "▁luthier", + -15.31065845489502 + ], + [ + "▁pueblo", + -15.31065845489502 + ], + [ + "▁quagmire", + -15.31065845489502 + ], + [ + "▁unconvinced", + -15.31065845489502 + ], + [ + "Disabling", + -15.310659408569336 + ], + [ + "LIBRARY", + -15.310659408569336 + ], + [ + "▁Abreu", + -15.310659408569336 + ], + [ + "▁Bhagavad", + -15.310659408569336 + ], + [ + "▁Compartment", + -15.310659408569336 + ], + [ + "▁Conveyance", + -15.310659408569336 + ], + [ + "▁Hoxton", + -15.310659408569336 + ], + [ + "▁Kuiper", + -15.310659408569336 + ], + [ + "▁Vivekananda", + -15.310659408569336 + ], + [ + "▁nanostructures", + -15.310659408569336 + ], + [ + "▁reviled", + -15.310659408569336 + ], + [ + "▁Padgett", + -15.310660362243652 + ], + [ + "▁farmacia", + -15.310660362243652 + ], + [ + "▁Apopka", + -15.310661315917969 + ], + [ + "▁Coupé", + -15.310661315917969 + ], + [ + "▁animatronic", + -15.310661315917969 + ], + [ + "▁Psychotherapist", + -15.310662269592283 + ], + [ + "▁Yamaguchi", + -15.310662269592283 + ], + [ + "▁CHEESE", + -15.310663223266602 + ], + [ + "▁EFFECTIVE", + -15.310663223266602 + ], + [ + "▁Sturgis", + -15.310663223266602 + ], + [ + "▁alkalinity", + -15.310663223266602 + ], + [ + "▁petiole", + -15.310663223266602 + ], + [ + "Suffrage", + -15.310664176940918 + ], + [ + "▁Cronulla", + -15.310664176940918 + ], + [ + "▁avowed", + -15.310664176940918 + ], + [ + "▁Gretna", + -15.31066608428955 + ], + [ + "▁Giulio", + -15.310667037963867 + ], + [ + "▁pauper", + -15.310667037963867 + ], + [ + "▁Liaoning", + -15.310667991638184 + ], + [ + "▁Ramakrishna", + -15.310667991638184 + ], + [ + "aqbanking", + -15.3106689453125 + ], + [ + "▁Grampian", + -15.310669898986816 + ], + [ + "▁Ravindra", + -15.310669898986816 + ], + [ + "▁interweaving", + -15.310669898986816 + ], + [ + "▁Dependable", + -15.31067180633545 + ], + [ + "▁intruding", + -15.31067180633545 + ], + [ + "▁Neurobiology", + -15.310672760009766 + ], + [ + "esteiron", + -15.310673713684082 + ], + [ + "▁Shroud", + -15.310673713684082 + ], + [ + "▁Cleaver", + -15.310674667358398 + ], + [ + "▁$900,000", + -15.310675621032717 + ], + [ + "▁adderall", + -15.310675621032717 + ], + [ + "Midfielder", + -15.310676574707031 + ], + [ + "▁Geophysics", + -15.310680389404297 + ], + [ + "▁Adafruit", + -15.310681343078612 + ], + [ + "ntipasto", + -15.31068229675293 + ], + [ + "▁Silvio", + -15.310683250427246 + ], + [ + "joseph", + -15.31068515777588 + ], + [ + "▁rumination", + -15.31068515777588 + ], + [ + "▁Saxena", + -15.310686111450195 + ], + [ + "▁Gagnon", + -15.310687065124512 + ], + [ + "▁globular", + -15.310688018798828 + ], + [ + "▁Biodegradable", + -15.310688972473145 + ], + [ + "▁Agrawal", + -15.31068992614746 + ], + [ + "▁emacs", + -15.310690879821776 + ], + [ + "▁jugular", + -15.310690879821776 + ], + [ + "▁Wimpy", + -15.310691833496094 + ], + [ + "▁Ayesha", + -15.310693740844728 + ], + [ + "▁Beggar", + -15.310693740844728 + ], + [ + "▁MiFID", + -15.310694694519045 + ], + [ + "▁queasy", + -15.310694694519045 + ], + [ + "▁Bhajan", + -15.31069564819336 + ], + [ + "▁Hydrogenated", + -15.310697555541992 + ], + [ + "▁organelles", + -15.310697555541992 + ], + [ + "▁polytechnic", + -15.310698509216309 + ], + [ + "▁Mercator", + -15.310699462890623 + ], + [ + "▁Forerunner", + -15.31070041656494 + ], + [ + "▁vindication", + -15.31070041656494 + ], + [ + "▁humdrum", + -15.310705184936523 + ], + [ + "▁Ought", + -15.310708999633787 + ], + [ + "/09/2018", + -15.310709953308104 + ], + [ + "▁Gambino", + -15.310709953308104 + ], + [ + "▁slurred", + -15.310709953308104 + ], + [ + "▁cytotoxicity", + -15.310711860656738 + ], + [ + "▁43-101", + -15.310713768005373 + ], + [ + "▁trivet", + -15.310715675354004 + ], + [ + "▁Monoxide", + -15.31071662902832 + ], + [ + "▁(619)", + -15.310721397399902 + ], + [ + "▁immobility", + -15.310723304748535 + ], + [ + "▁nipped", + -15.310725212097168 + ], + [ + "▁Sculpting", + -15.310726165771484 + ], + [ + "▁Ostrich", + -15.310729026794434 + ], + [ + "▁guzzle", + -15.310729026794434 + ], + [ + "▁Montfort", + -15.31072998046875 + ], + [ + "▁jammies", + -15.31072998046875 + ], + [ + "▁(1965)", + -15.310730934143066 + ], + [ + "▁valorem", + -15.310735702514648 + ], + [ + "▁Ukip", + -15.310741424560549 + ], + [ + "▁Dengue", + -15.310750961303713 + ], + [ + "▁moyen", + -15.31075382232666 + ], + [ + "▁Milliken", + -15.310754776000977 + ], + [ + ":8002", + -15.310758590698242 + ], + [ + "▁bioengineering", + -15.310760498046877 + ], + [ + "▁vestments", + -15.310761451721191 + ], + [ + "▁Vajra", + -15.310762405395508 + ], + [ + "▁Pulver", + -15.31076717376709 + ], + [ + "▁Boudreaux", + -15.310769081115724 + ], + [ + "▁SAARC", + -15.310769081115724 + ], + [ + "▁jacksonville", + -15.310770988464355 + ], + [ + "▁Brinkley", + -15.310771942138672 + ], + [ + "▁Marsalis", + -15.310776710510254 + ], + [ + "▁foregone", + -15.310782432556152 + ], + [ + "HAMMER", + -15.310785293579102 + ], + [ + "▁Flatiron", + -15.31078815460205 + ], + [ + "▁Teigen", + -15.310791015625 + ], + [ + "▁Oolong", + -15.310796737670898 + ], + [ + "▁sinensis", + -15.310798645019531 + ], + [ + "▁Lanvin", + -15.31080436706543 + ], + [ + "▁Metroplex", + -15.310806274414062 + ], + [ + "▁Wynwood", + -15.310821533203123 + ], + [ + "▁Headingley", + -15.310845375061035 + ], + [ + "whale", + -15.3108491897583 + ], + [ + "▁Ambler", + -15.3108491897583 + ], + [ + "▁Ahrens", + -15.310850143432615 + ], + [ + "▁psalmist", + -15.310857772827148 + ], + [ + "9-36", + -15.310867309570312 + ], + [ + "▁tabulated", + -15.310868263244627 + ], + [ + "▁replicable", + -15.310871124267578 + ], + [ + "▁Osmond", + -15.310873031616213 + ], + [ + "▁unattached", + -15.310873031616213 + ], + [ + "▁Vedra", + -15.31087875366211 + ], + [ + "▁Gilpin", + -15.310894966125488 + ], + [ + "9(1):", + -15.310895919799805 + ], + [ + "▁FATHER", + -15.310905456542969 + ], + [ + "▁2006-07", + -15.310908317565918 + ], + [ + "▁Mobley", + -15.310909271240234 + ], + [ + "▁Chieftain", + -15.310911178588867 + ], + [ + "▁Makerspace", + -15.310914039611816 + ], + [ + "▁Condit", + -15.310918807983398 + ], + [ + "▁interpretative", + -15.310918807983398 + ], + [ + "▁Eltham", + -15.310935020446776 + ], + [ + "▁sternum", + -15.310935020446776 + ], + [ + "▁Romford", + -15.310937881469728 + ], + [ + "▁prejudicial", + -15.310946464538574 + ], + [ + "▁stubby", + -15.31095027923584 + ], + [ + "▁Molten", + -15.310953140258787 + ], + [ + "▁Worley", + -15.310956001281738 + ], + [ + "rotary", + -15.31098461151123 + ], + [ + "▁Jumpsuit", + -15.31100368499756 + ], + [ + "▁HairMax", + -15.31101131439209 + ], + [ + "▁Deuce", + -15.311028480529783 + ], + [ + "▁0:1", + -15.311054229736328 + ], + [ + "▁Niven", + -15.311092376708984 + ], + [ + "▁Redditch", + -15.311100006103516 + ], + [ + "▁Arezzo", + -15.311108589172363 + ], + [ + "jalan", + -15.311116218566896 + ], + [ + "▁Falun", + -15.311137199401855 + ], + [ + "▁recompile", + -15.31115436553955 + ], + [ + "▁conned", + -15.311161994934082 + ], + [ + "▁gegen", + -15.311202049255373 + ], + [ + "▁Roddy", + -15.311345100402832 + ], + [ + "▁Pancho", + -15.311366081237791 + ], + [ + "▁05:2", + -15.311372756958008 + ], + [ + "▁acrid", + -15.311467170715332 + ], + [ + "▁Tawny", + -15.31153964996338 + ], + [ + "▁phosphoric", + -15.311552047729492 + ], + [ + "lauf", + -15.31162452697754 + ], + [ + "▁YANG", + -15.311704635620115 + ], + [ + "▁Batik", + -15.311717987060549 + ], + [ + "▁Coombe", + -15.311768531799316 + ], + [ + "▁Larne", + -15.311768531799316 + ], + [ + "▁Valiba", + -15.311893463134766 + ], + [ + "▁thermoelectric", + -15.311902046203612 + ], + [ + "▁genset", + -15.311985969543455 + ], + [ + "▁Carmelite", + -15.312034606933594 + ], + [ + "Requisition", + -15.312211990356444 + ], + [ + "▁Tarek", + -15.312384605407717 + ], + [ + "▁amaranth", + -15.312416076660156 + ], + [ + "ETOC", + -15.312429428100586 + ], + [ + "ZYME", + -15.31243133544922 + ], + [ + "▁Merkle", + -15.312600135803224 + ], + [ + "EVAL", + -15.312641143798828 + ], + [ + "Liaise", + -15.312686920166016 + ], + [ + "▁05:1", + -15.312858581542969 + ], + [ + "▁TRIAL", + -15.313079833984377 + ], + [ + "▁TRIPLE", + -15.313264846801758 + ], + [ + "METER", + -15.31337070465088 + ], + [ + "pubescent", + -15.31338596343994 + ], + [ + "▁jeopardise", + -15.31341552734375 + ], + [ + "COPPA", + -15.313494682312012 + ], + [ + "1633", + -15.31357765197754 + ], + [ + "1887", + -15.313718795776367 + ], + [ + "radius", + -15.31387996673584 + ], + [ + "Quart", + -15.314087867736816 + ], + [ + "Incentiv", + -15.314233779907228 + ], + [ + "▁harpist", + -15.314239501953123 + ], + [ + "sniff", + -15.314468383789062 + ], + [ + "▁extol", + -15.314478874206545 + ], + [ + "9:46", + -15.314576148986816 + ], + [ + "2:49", + -15.314624786376951 + ], + [ + "Condensed", + -15.31468391418457 + ], + [ + "▁Changsha", + -15.314752578735352 + ], + [ + "▁Cragg", + -15.314865112304688 + ], + [ + "alamansi", + -15.314884185791016 + ], + [ + "▁Nawab", + -15.31495189666748 + ], + [ + "Torch", + -15.315032958984377 + ], + [ + "ilroy", + -15.315134048461914 + ], + [ + "8:37", + -15.315262794494627 + ], + [ + "osoma", + -15.315471649169922 + ], + [ + "stolic", + -15.315550804138184 + ], + [ + "▁fritter", + -15.315616607666016 + ], + [ + "nominal", + -15.31565284729004 + ], + [ + "▁Leonhard", + -15.315746307373049 + ], + [ + "kingdom", + -15.316021919250488 + ], + [ + "qiu", + -15.316262245178224 + ], + [ + "5:05", + -15.316265106201172 + ], + [ + "Woah", + -15.316343307495115 + ], + [ + "▁Captivate", + -15.3165283203125 + ], + [ + "Marcia", + -15.3165864944458 + ], + [ + "7(1):", + -15.316876411437988 + ], + [ + "Underneath", + -15.31701946258545 + ], + [ + "Visibility", + -15.31703281402588 + ], + [ + "Watermelon", + -15.317034721374512 + ], + [ + "Accuracy", + -15.31703758239746 + ], + [ + "censor", + -15.317044258117676 + ], + [ + "Aesthetic", + -15.317045211791992 + ], + [ + "Dissertation", + -15.31704807281494 + ], + [ + "Hound", + -15.317061424255373 + ], + [ + "Seventeen", + -15.317062377929688 + ], + [ + "Alphabet", + -15.317065238952637 + ], + [ + "Landlord", + -15.317070960998535 + ], + [ + "survivor", + -15.317073822021484 + ], + [ + "Pinnacle", + -15.317076683044434 + ], + [ + "Tobacco", + -15.31707763671875 + ], + [ + "Advocacy", + -15.317079544067385 + ], + [ + "Larson", + -15.317079544067385 + ], + [ + "Audrey", + -15.3170804977417 + ], + [ + "Responsibility", + -15.317081451416016 + ], + [ + "Tottenham", + -15.317081451416016 + ], + [ + "turbine", + -15.317094802856444 + ], + [ + "automotive", + -15.31710433959961 + ], + [ + "Opposition", + -15.317105293273926 + ], + [ + "Monroe", + -15.31710720062256 + ], + [ + "funnel", + -15.317111015319824 + ], + [ + "Ethical", + -15.31711483001709 + ], + [ + "Freddie", + -15.317118644714355 + ], + [ + "Cypress", + -15.317120552062988 + ], + [ + "fifty", + -15.31713581085205 + ], + [ + "himself", + -15.317166328430176 + ], + [ + "preserving", + -15.31718635559082 + ], + [ + "Obamacare", + -15.317187309265137 + ], + [ + "Interact", + -15.317214012145996 + ], + [ + "systematic", + -15.317245483398438 + ], + [ + "HDMI", + -15.31725788116455 + ], + [ + "environmentalism", + -15.317261695861816 + ], + [ + "Supplemental", + -15.31727123260498 + ], + [ + "4:33", + -15.31727695465088 + ], + [ + "Kerala", + -15.317337989807127 + ], + [ + "Jacksonville", + -15.317340850830078 + ], + [ + "Latvia", + -15.317376136779783 + ], + [ + "peaceful", + -15.317394256591797 + ], + [ + "overdue", + -15.317408561706545 + ], + [ + "Defensively", + -15.317412376403809 + ], + [ + "Unified", + -15.317423820495604 + ], + [ + "tariff", + -15.317493438720703 + ], + [ + "Implant", + -15.31766414642334 + ], + [ + "scary", + -15.317773818969728 + ], + [ + "Lydia", + -15.318007469177246 + ], + [ + "5:45", + -15.318022727966309 + ], + [ + "GROUP", + -15.318059921264648 + ], + [ + "▁$150.00", + -15.318188667297363 + ], + [ + "▁$5.4", + -15.318841934204102 + ], + [ + "1.83", + -15.318888664245604 + ], + [ + "3(2):", + -15.319208145141602 + ], + [ + "harbor", + -15.31942653656006 + ], + [ + "▁Rhineland", + -15.319437980651855 + ], + [ + "alban", + -15.319585800170898 + ], + [ + "▁Eleg", + -15.320119857788086 + ], + [ + "Birch", + -15.320297241210938 + ], + [ + "Consist", + -15.320362091064451 + ], + [ + "Jorge", + -15.320805549621582 + ], + [ + "8:32", + -15.32106590270996 + ], + [ + "▁Daub", + -15.321187019348145 + ], + [ + "sancti", + -15.321677207946776 + ], + [ + "1.96", + -15.321857452392578 + ], + [ + "▁PEEP", + -15.32201862335205 + ], + [ + "terrace", + -15.322216987609863 + ], + [ + "▁chanel", + -15.322771072387695 + ], + [ + "▁Pohl", + -15.32294464111328 + ], + [ + "[2012", + -15.322949409484863 + ], + [ + "13-11", + -15.323108673095703 + ], + [ + "▁Numeric", + -15.323156356811523 + ], + [ + "4:21", + -15.323301315307615 + ], + [ + "travers", + -15.32359790802002 + ], + [ + "▁Maniac", + -15.323758125305176 + ], + [ + "7:08", + -15.323882102966309 + ], + [ + "Holistic", + -15.324076652526855 + ], + [ + "JECT", + -15.324615478515623 + ], + [ + "Domenic", + -15.324641227722168 + ], + [ + "Gothic", + -15.324868202209473 + ], + [ + "latino", + -15.324904441833496 + ], + [ + "1877", + -15.324910163879396 + ], + [ + "haku", + -15.325121879577637 + ], + [ + "organise", + -15.325504302978516 + ], + [ + "omprised", + -15.325570106506348 + ], + [ + "TRATE", + -15.32559299468994 + ], + [ + "▁Lubri", + -15.325664520263672 + ], + [ + "▁discolour", + -15.325689315795898 + ], + [ + "caution", + -15.325881958007812 + ], + [ + "▁11:40", + -15.325908660888672 + ], + [ + "0:27", + -15.325926780700684 + ], + [ + "▁Convict", + -15.325931549072266 + ], + [ + "onoclonal", + -15.325982093811035 + ], + [ + "▁Zircon", + -15.326006889343262 + ], + [ + "4:28", + -15.32603359222412 + ], + [ + "baptist", + -15.326034545898438 + ], + [ + "COLUMBUS", + -15.326042175292969 + ], + [ + "Etisalat", + -15.326042175292969 + ], + [ + "Levetiracetam", + -15.326042175292969 + ], + [ + "Participatory", + -15.326042175292969 + ], + [ + "▁Acapulco", + -15.326042175292969 + ], + [ + "▁Arabidopsis", + -15.326042175292969 + ], + [ + "▁CEILING", + -15.326042175292969 + ], + [ + "▁Copernicus", + -15.326042175292969 + ], + [ + "▁Dorfman", + -15.326042175292969 + ], + [ + "▁Dwarka", + -15.326042175292969 + ], + [ + "▁EXECUTIVE", + -15.326042175292969 + ], + [ + "▁Estepona", + -15.326042175292969 + ], + [ + "▁Expiration", + -15.326042175292969 + ], + [ + "▁Feldenkrais", + -15.326042175292969 + ], + [ + "▁Folkestone", + -15.326042175292969 + ], + [ + "▁Jabalpur", + -15.326042175292969 + ], + [ + "▁Kodaikanal", + -15.326042175292969 + ], + [ + "▁Linalool", + -15.326042175292969 + ], + [ + "▁Liqueur", + -15.326042175292969 + ], + [ + "▁Muammar", + -15.326042175292969 + ], + [ + "▁Negotiator", + -15.326042175292969 + ], + [ + "▁Nowitzki", + -15.326042175292969 + ], + [ + "▁Omegle", + -15.326042175292969 + ], + [ + "▁Pagalworld", + -15.326042175292969 + ], + [ + "▁Parlour", + -15.326042175292969 + ], + [ + "▁Peloponnes", + -15.326042175292969 + ], + [ + "▁Prednisone", + -15.326042175292969 + ], + [ + "▁Pujols", + -15.326042175292969 + ], + [ + "▁QUIET", + -15.326042175292969 + ], + [ + "▁Quaternary", + -15.326042175292969 + ], + [ + "▁Sandringham", + -15.326042175292969 + ], + [ + "▁Sangiovese", + -15.326042175292969 + ], + [ + "▁Vagabond", + -15.326042175292969 + ], + [ + "▁Viognier", + -15.326042175292969 + ], + [ + "▁WHATSOEVER", + -15.326042175292969 + ], + [ + "▁acolyte", + -15.326042175292969 + ], + [ + "▁arthroplasty", + -15.326042175292969 + ], + [ + "▁catacombs", + -15.326042175292969 + ], + [ + "▁celeriac", + -15.326042175292969 + ], + [ + "▁deliberating", + -15.326042175292969 + ], + [ + "▁espadrille", + -15.326042175292969 + ], + [ + "▁gestural", + -15.326042175292969 + ], + [ + "▁glucocorticoid", + -15.326042175292969 + ], + [ + "▁gravitating", + -15.326042175292969 + ], + [ + "▁halloumi", + -15.326042175292969 + ], + [ + "▁henchmen", + -15.326042175292969 + ], + [ + "▁laparoscopy", + -15.326042175292969 + ], + [ + "▁malfeasance", + -15.326042175292969 + ], + [ + "▁nuestra", + -15.326042175292969 + ], + [ + "▁polygamy", + -15.326042175292969 + ], + [ + "▁relapsing", + -15.326042175292969 + ], + [ + "▁seltzer", + -15.326042175292969 + ], + [ + "▁transparencies", + -15.326042175292969 + ], + [ + "▁tribulus", + -15.326042175292969 + ], + [ + "▁ukraine", + -15.326042175292969 + ], + [ + "▁uncompromised", + -15.326042175292969 + ], + [ + "Commemorat", + -15.326043128967283 + ], + [ + "Escalante", + -15.326043128967283 + ], + [ + "▁Cardamom", + -15.326043128967283 + ], + [ + "▁Cheadle", + -15.326043128967283 + ], + [ + "▁Khomeini", + -15.326043128967283 + ], + [ + "▁Misdemeanor", + -15.326043128967283 + ], + [ + "▁centurion", + -15.326043128967283 + ], + [ + "▁gingival", + -15.326043128967283 + ], + [ + "▁summative", + -15.326043128967283 + ], + [ + "▁Applicator", + -15.326044082641602 + ], + [ + "▁Cinematographer", + -15.326044082641602 + ], + [ + "▁Escorts", + -15.326044082641602 + ], + [ + "▁Kleenex", + -15.326044082641602 + ], + [ + "▁Prieto", + -15.326044082641602 + ], + [ + "▁Tesoro", + -15.326044082641602 + ], + [ + "▁mezcal", + -15.326044082641602 + ], + [ + "UNKNOWN", + -15.326045036315918 + ], + [ + "afavieh", + -15.326045036315918 + ], + [ + "▁BRITISH", + -15.326045036315918 + ], + [ + "▁Hypothesis", + -15.326045989990234 + ], + [ + "▁Sourdough", + -15.326045989990234 + ], + [ + "▁Sydenham", + -15.326045989990234 + ], + [ + "▁annealed", + -15.326045989990234 + ], + [ + "▁livability", + -15.326045989990234 + ], + [ + "▁qartulad", + -15.326045989990234 + ], + [ + "▁snafu", + -15.326045989990234 + ], + [ + "▁Meticulously", + -15.32604694366455 + ], + [ + "▁Stenographer", + -15.32604694366455 + ], + [ + "▁agronomic", + -15.32604694366455 + ], + [ + "▁alibaba", + -15.32604694366455 + ], + [ + "▁ACTIVITY", + -15.326047897338867 + ], + [ + "▁Daewoo", + -15.326047897338867 + ], + [ + "▁strangling", + -15.326047897338867 + ], + [ + "▁Beauregard", + -15.326048851013184 + ], + [ + "▁Invermere", + -15.326048851013184 + ], + [ + "▁fringing", + -15.326048851013184 + ], + [ + "rachnid", + -15.3260498046875 + ], + [ + "▁rascal", + -15.3260498046875 + ], + [ + "▁Guerlain", + -15.326050758361816 + ], + [ + "▁goaltending", + -15.326050758361816 + ], + [ + "▁culpable", + -15.326051712036133 + ], + [ + "▁solvable", + -15.326051712036133 + ], + [ + "▁Rajeev", + -15.32605266571045 + ], + [ + "▁greece", + -15.32605266571045 + ], + [ + "▁Choreography", + -15.326053619384766 + ], + [ + "▁Naukri", + -15.326053619384766 + ], + [ + "▁Visayas", + -15.326053619384766 + ], + [ + "▁burpees", + -15.326053619384766 + ], + [ + "▁thievery", + -15.326053619384766 + ], + [ + "▁writhing", + -15.326053619384766 + ], + [ + "▁Derrida", + -15.326055526733398 + ], + [ + "▁Pfaff", + -15.326055526733398 + ], + [ + "▁uglier", + -15.326055526733398 + ], + [ + "Hematite", + -15.326056480407717 + ], + [ + "▁suturing", + -15.326057434082031 + ], + [ + "▁Sassoon", + -15.326062202453612 + ], + [ + "▁Zoloft", + -15.326062202453612 + ], + [ + "▁Insanity", + -15.32606315612793 + ], + [ + "▁lexus", + -15.32606315612793 + ], + [ + "9:27", + -15.326064109802246 + ], + [ + "▁Gannett", + -15.326064109802246 + ], + [ + "▁Hydrolyz", + -15.326064109802246 + ], + [ + "▁Yorba", + -15.326072692871094 + ], + [ + "▁outrigger", + -15.326072692871094 + ], + [ + "▁percolate", + -15.326072692871094 + ], + [ + "▁Braveheart", + -15.326074600219728 + ], + [ + "▁Brembo", + -15.326074600219728 + ], + [ + "▁Charmaine", + -15.326074600219728 + ], + [ + "hereafter", + -15.326078414916992 + ], + [ + "▁Phenix", + -15.326079368591309 + ], + [ + "▁uTorrent", + -15.326079368591309 + ], + [ + "▁Hyrule", + -15.32608127593994 + ], + [ + "▁Doosan", + -15.32608413696289 + ], + [ + "▁Aldershot", + -15.326086044311523 + ], + [ + "▁Negev", + -15.326086044311523 + ], + [ + "▁Jayhawks", + -15.326088905334473 + ], + [ + "▁Poncho", + -15.326089859008787 + ], + [ + "▁TurboTax", + -15.326089859008787 + ], + [ + "▁montreal", + -15.326089859008787 + ], + [ + "▁falsetto", + -15.326091766357422 + ], + [ + "▁LabVIEW", + -15.326092720031738 + ], + [ + "▁praxis", + -15.326092720031738 + ], + [ + "▁droning", + -15.326093673706056 + ], + [ + "▁(860)", + -15.326095581054688 + ], + [ + "▁cytoplasmic", + -15.326098442077637 + ], + [ + "/11/2018", + -15.326101303100586 + ], + [ + "▁Toomey", + -15.326101303100586 + ], + [ + "▁FIXED", + -15.326102256774902 + ], + [ + "▁lumberjack", + -15.326106071472168 + ], + [ + "▁Luffy", + -15.326107025146484 + ], + [ + "▁Novotel", + -15.326107025146484 + ], + [ + "▁Antigone", + -15.326111793518066 + ], + [ + "▁Disodium", + -15.326117515563965 + ], + [ + "▁banyak", + -15.326127052307127 + ], + [ + "▁Cloverdale", + -15.326128005981444 + ], + [ + "▁osteopathic", + -15.326128959655762 + ], + [ + "▁Guadal", + -15.326130867004396 + ], + [ + "▁renegotiation", + -15.326133728027344 + ], + [ + "▁Brantley", + -15.326143264770508 + ], + [ + "benzyl", + -15.32614803314209 + ], + [ + "▁Lambeau", + -15.32614803314209 + ], + [ + "▁trident", + -15.326148986816406 + ], + [ + "▁£2.50", + -15.326149940490724 + ], + [ + "▁Craftsmanship", + -15.32615089416504 + ], + [ + "Forthcoming", + -15.326151847839355 + ], + [ + "▁Gowda", + -15.326152801513672 + ], + [ + "▁Dorking", + -15.326159477233888 + ], + [ + "ciliary", + -15.326160430908203 + ], + [ + "▁Exmoor", + -15.32616138458252 + ], + [ + "▁Tarmac", + -15.32616138458252 + ], + [ + "▁Euston", + -15.326162338256836 + ], + [ + "▁subcompact", + -15.326162338256836 + ], + [ + "▁Patriarchate", + -15.326177597045898 + ], + [ + "▁Sackville", + -15.326194763183594 + ], + [ + "3:01", + -15.326200485229492 + ], + [ + "▁shuffleboard", + -15.326200485229492 + ], + [ + "▁mitral", + -15.326205253601074 + ], + [ + "▁MzE", + -15.326208114624023 + ], + [ + "▁proscribed", + -15.326223373413086 + ], + [ + "▁NYFW", + -15.326227188110352 + ], + [ + "xlsx", + -15.326238632202148 + ], + [ + "Palooza", + -15.32624053955078 + ], + [ + "▁Blackbeard", + -15.326241493225098 + ], + [ + "▁naivety", + -15.326249122619627 + ], + [ + "▁passphrase", + -15.326260566711426 + ], + [ + "▁Tinkerbell", + -15.32627010345459 + ], + [ + "▁XAML", + -15.32627010345459 + ], + [ + "▁Comerica", + -15.326275825500488 + ], + [ + "▁2005-06", + -15.326281547546388 + ], + [ + "▁Adelson", + -15.326305389404297 + ], + [ + "Referencing", + -15.326306343078612 + ], + [ + "▁Aesop", + -15.32631492614746 + ], + [ + "▁Turque", + -15.326326370239258 + ], + [ + "▁OpenCart", + -15.326338768005373 + ], + [ + "ASAHI", + -15.326345443725586 + ], + [ + "▁Waterstones", + -15.32636547088623 + ], + [ + "▁shoehorn", + -15.32636833190918 + ], + [ + "▁Nigella", + -15.326386451721191 + ], + [ + "▁jetties", + -15.326411247253418 + ], + [ + "ontinence", + -15.32643222808838 + ], + [ + "▁guile", + -15.326438903808594 + ], + [ + "7:43", + -15.326454162597656 + ], + [ + "▁Provencal", + -15.32653522491455 + ], + [ + "▁ulceration", + -15.32654094696045 + ], + [ + "▁Piaf", + -15.326603889465332 + ], + [ + "▁corgi", + -15.326656341552734 + ], + [ + "▁Eubank", + -15.326677322387695 + ], + [ + "▁Leggett", + -15.32671070098877 + ], + [ + "▁Versi", + -15.326714515686035 + ], + [ + "▁Canaanite", + -15.326805114746094 + ], + [ + "▁almanac", + -15.326866149902344 + ], + [ + "▁Climax", + -15.326876640319824 + ], + [ + "▁Hipster", + -15.326892852783203 + ], + [ + "▁tranquilizer", + -15.326964378356934 + ], + [ + "Gamification", + -15.327019691467283 + ], + [ + "▁Schaub", + -15.3273344039917 + ], + [ + "▁Twigg", + -15.327435493469238 + ], + [ + "▁bolus", + -15.327860832214355 + ], + [ + "Bibliographic", + -15.32794189453125 + ], + [ + "▁£2,500", + -15.328081130981444 + ], + [ + "curred", + -15.328086853027344 + ], + [ + "UFO", + -15.328299522399902 + ], + [ + "▁Deviant", + -15.328313827514648 + ], + [ + "9:01", + -15.328479766845703 + ], + [ + "5:55", + -15.328500747680664 + ], + [ + "Condemn", + -15.328568458557127 + ], + [ + "▁Thiamin", + -15.328683853149414 + ], + [ + "▁Kishan", + -15.328690528869627 + ], + [ + "▁Altair", + -15.328699111938477 + ], + [ + "3:37", + -15.328774452209473 + ], + [ + "20-0", + -15.329058647155762 + ], + [ + "7:39", + -15.329115867614746 + ], + [ + "▁Priestess", + -15.329166412353516 + ], + [ + "vascularization", + -15.329245567321776 + ], + [ + "1045", + -15.3297700881958 + ], + [ + "lemma", + -15.32985782623291 + ], + [ + "▁parsnip", + -15.330000877380373 + ], + [ + "▁tentacle", + -15.330087661743164 + ], + [ + "Settle", + -15.330162048339844 + ], + [ + "01131", + -15.330177307128906 + ], + [ + "Diaz", + -15.330284118652344 + ], + [ + "▁Hadron", + -15.330293655395508 + ], + [ + "▁Uncertain", + -15.330451011657717 + ], + [ + "▁Rashtr", + -15.330483436584473 + ], + [ + "▁déj", + -15.330581665039062 + ], + [ + "Harbor", + -15.33074951171875 + ], + [ + "▁TRUMP", + -15.330791473388672 + ], + [ + "▁Bausch", + -15.331049919128418 + ], + [ + "▁schlep", + -15.331088066101074 + ], + [ + "bogie", + -15.331457138061523 + ], + [ + "Dieser", + -15.331698417663574 + ], + [ + "Nevermind", + -15.331870079040527 + ], + [ + "▁$1,100", + -15.33189868927002 + ], + [ + "khali", + -15.332119941711426 + ], + [ + "Sudden", + -15.332669258117676 + ], + [ + "Helmet", + -15.332785606384276 + ], + [ + "Ethnic", + -15.332818031311035 + ], + [ + "Behavior", + -15.33285903930664 + ], + [ + "Abortion", + -15.33290958404541 + ], + [ + "Savvy", + -15.332921028137209 + ], + [ + "Whisper", + -15.33292293548584 + ], + [ + "Fatigue", + -15.332924842834473 + ], + [ + "Gypsy", + -15.332932472229004 + ], + [ + "Balancing", + -15.332935333251951 + ], + [ + "Scholastic", + -15.332935333251951 + ], + [ + "Rufus", + -15.332941055297852 + ], + [ + "CONTROL", + -15.332942008972168 + ], + [ + "Infrared", + -15.332942008972168 + ], + [ + "Edmond", + -15.332950592041016 + ], + [ + "appraisal", + -15.332951545715332 + ], + [ + "Decades", + -15.332955360412598 + ], + [ + "initiative", + -15.33295726776123 + ], + [ + "accommodation", + -15.332958221435549 + ], + [ + "attorney", + -15.332958221435549 + ], + [ + "Horror", + -15.332959175109863 + ], + [ + "Toshiba", + -15.332962036132812 + ], + [ + "elephant", + -15.332965850830078 + ], + [ + "Connecticut", + -15.332969665527344 + ], + [ + "measuring", + -15.332971572875977 + ], + [ + "ravaged", + -15.332971572875977 + ], + [ + "Felix", + -15.332980155944824 + ], + [ + "Bollywood", + -15.332983016967772 + ], + [ + "elimination", + -15.33298397064209 + ], + [ + "Balloon", + -15.332984924316406 + ], + [ + "Prague", + -15.332984924316406 + ], + [ + "impression", + -15.332989692687988 + ], + [ + "inclined", + -15.333023071289062 + ], + [ + "tossed", + -15.333029747009276 + ], + [ + "accidentally", + -15.33304214477539 + ], + [ + "toronto", + -15.333051681518556 + ], + [ + "damaging", + -15.333057403564451 + ], + [ + "Commentary", + -15.3330659866333 + ], + [ + "Priest", + -15.333066940307615 + ], + [ + "tented", + -15.333091735839844 + ], + [ + "Xavier", + -15.333093643188477 + ], + [ + "fracture", + -15.333107948303224 + ], + [ + "Classification", + -15.333125114440918 + ], + [ + "Epson", + -15.333131790161133 + ], + [ + "revised", + -15.333136558532717 + ], + [ + "pumpkin", + -15.333179473876951 + ], + [ + "Athens", + -15.333208084106444 + ], + [ + "Bedding", + -15.333256721496582 + ], + [ + "bitcoin", + -15.33330535888672 + ], + [ + "meaningful", + -15.333356857299805 + ], + [ + "5:46", + -15.333415031433104 + ], + [ + "Drunk", + -15.33344841003418 + ], + [ + "Frontier", + -15.333477020263672 + ], + [ + "assume", + -15.333621978759766 + ], + [ + "-2-0", + -15.333674430847168 + ], + [ + "Packet", + -15.333924293518066 + ], + [ + "ayuda", + -15.333979606628418 + ], + [ + "73-7", + -15.333990097045898 + ], + [ + "Edmund", + -15.33409595489502 + ], + [ + "escalate", + -15.33414077758789 + ], + [ + "WOOL", + -15.334280967712402 + ], + [ + "shutter", + -15.334402084350586 + ], + [ + "7:11", + -15.334431648254396 + ], + [ + "phoid", + -15.334554672241213 + ], + [ + "Avalon", + -15.334732055664062 + ], + [ + "▁organiza", + -15.334761619567873 + ], + [ + "▁comorbid", + -15.334823608398438 + ], + [ + "▁tyro", + -15.334906578063965 + ], + [ + "▁Satchel", + -15.334915161132812 + ], + [ + "Representation", + -15.334946632385254 + ], + [ + "6:36", + -15.334948539733888 + ], + [ + "mplied", + -15.334956169128418 + ], + [ + "00-051", + -15.33509635925293 + ], + [ + "petite", + -15.335180282592772 + ], + [ + "Schema", + -15.335224151611328 + ], + [ + "▁Dialect", + -15.335349082946776 + ], + [ + "▁welche", + -15.335371017456056 + ], + [ + "biography", + -15.335606575012209 + ], + [ + "▁Shoebox", + -15.335927963256836 + ], + [ + "TIKA", + -15.336249351501465 + ], + [ + "Defeat", + -15.336301803588867 + ], + [ + "3:26", + -15.3363676071167 + ], + [ + "6.0%", + -15.336389541625977 + ], + [ + "▁accentuat", + -15.336691856384276 + ], + [ + "9:15", + -15.336858749389648 + ], + [ + "Goldwyn", + -15.336889266967772 + ], + [ + "olanda", + -15.337122917175291 + ], + [ + "3:04", + -15.337169647216797 + ], + [ + "7:38", + -15.337309837341309 + ], + [ + "▁Cambi", + -15.337369918823242 + ], + [ + "Defend", + -15.33751106262207 + ], + [ + "ummel", + -15.337746620178224 + ], + [ + "▁garda", + -15.338099479675291 + ], + [ + "3:03", + -15.338104248046877 + ], + [ + "▁Synergi", + -15.338556289672852 + ], + [ + "▁dialectical", + -15.338881492614746 + ], + [ + "decyl", + -15.338944435119627 + ], + [ + "Mechanic", + -15.33922004699707 + ], + [ + "▁Spyro", + -15.339391708374023 + ], + [ + "1874", + -15.339543342590332 + ], + [ + "▁Voca", + -15.339544296264648 + ], + [ + "djoining", + -15.340080261230469 + ], + [ + "4–5", + -15.340143203735352 + ], + [ + "altteri", + -15.34018898010254 + ], + [ + "93%", + -15.340453147888184 + ], + [ + "cension", + -15.340658187866213 + ], + [ + "ACTOR", + -15.34068489074707 + ], + [ + "▁£1,500", + -15.340861320495604 + ], + [ + "▁Malala", + -15.340892791748049 + ], + [ + "▁abbreviate", + -15.341066360473633 + ], + [ + "▁mesmerize", + -15.341066360473633 + ], + [ + "sealable", + -15.34108829498291 + ], + [ + "1-08-2018", + -15.341251373291016 + ], + [ + "▁woeful", + -15.34146213531494 + ], + [ + "eclampsia", + -15.341567039489746 + ], + [ + "▁kappa", + -15.341650009155272 + ], + [ + "AWSFirehoseError", + -15.341667175292969 + ], + [ + "Cascading", + -15.341667175292969 + ], + [ + "Donoghue", + -15.341667175292969 + ], + [ + "Herodotus", + -15.341667175292969 + ], + [ + "Stuyvesant", + -15.341667175292969 + ], + [ + "Zolpidem", + -15.341667175292969 + ], + [ + "shalimar", + -15.341667175292969 + ], + [ + "ycorrhizal", + -15.341667175292969 + ], + [ + "▁800-376-4281", + -15.341667175292969 + ], + [ + "▁800-540-4530", + -15.341667175292969 + ], + [ + "▁844-244-6199", + -15.341667175292969 + ], + [ + "▁888-229-8830", + -15.341667175292969 + ], + [ + "▁888-349-8884", + -15.341667175292969 + ], + [ + "▁888-400-5746", + -15.341667175292969 + ], + [ + "▁Armidale", + -15.341667175292969 + ], + [ + "▁Bernabeu", + -15.341667175292969 + ], + [ + "▁Cabriolet", + -15.341667175292969 + ], + [ + "▁Cemeteries", + -15.341667175292969 + ], + [ + "▁Charlevoix", + -15.341667175292969 + ], + [ + "▁Chertsey", + -15.341667175292969 + ], + [ + "▁Commendation", + -15.341667175292969 + ], + [ + "▁Dictionaries", + -15.341667175292969 + ], + [ + "▁Elliptical", + -15.341667175292969 + ], + [ + "▁Ganguly", + -15.341667175292969 + ], + [ + "▁Kamchatka", + -15.341667175292969 + ], + [ + "▁Kennebunk", + -15.341667175292969 + ], + [ + "▁Lyndhurst", + -15.341667175292969 + ], + [ + "▁Montemayor", + -15.341667175292969 + ], + [ + "▁Montreux", + -15.341667175292969 + ], + [ + "▁Obstructive", + -15.341667175292969 + ], + [ + "▁Paignton", + -15.341667175292969 + ], + [ + "▁Panchkula", + -15.341667175292969 + ], + [ + "▁Phineas", + -15.341667175292969 + ], + [ + "▁Resiliency", + -15.341667175292969 + ], + [ + "▁SURVEY", + -15.341667175292969 + ], + [ + "▁Sambalpur", + -15.341667175292969 + ], + [ + "▁Streatham", + -15.341667175292969 + ], + [ + "▁Surabaya", + -15.341667175292969 + ], + [ + "▁Sutcliffe", + -15.341667175292969 + ], + [ + "▁Tadalafil", + -15.341667175292969 + ], + [ + "▁Thaddeus", + -15.341667175292969 + ], + [ + "▁Transfiguration", + -15.341667175292969 + ], + [ + "▁Tullamore", + -15.341667175292969 + ], + [ + "▁barbecuing", + -15.341667175292969 + ], + [ + "▁catharsis", + -15.341667175292969 + ], + [ + "▁cockatoo", + -15.341667175292969 + ], + [ + "▁daunted", + -15.341667175292969 + ], + [ + "▁dextrose", + -15.341667175292969 + ], + [ + "▁echocardiogram", + -15.341667175292969 + ], + [ + "▁fiftieth", + -15.341667175292969 + ], + [ + "▁genocidal", + -15.341667175292969 + ], + [ + "▁hemispheric", + -15.341667175292969 + ], + [ + "▁idiosyncrasies", + -15.341667175292969 + ], + [ + "▁illuminator", + -15.341667175292969 + ], + [ + "▁minstrel", + -15.341667175292969 + ], + [ + "▁obstinate", + -15.341667175292969 + ], + [ + "▁pantyhose", + -15.341667175292969 + ], + [ + "▁peugeot", + -15.341667175292969 + ], + [ + "▁proletariat", + -15.341667175292969 + ], + [ + "▁rutabaga", + -15.341667175292969 + ], + [ + "▁whirring", + -15.341667175292969 + ], + [ + "lectrocardiogram", + -15.341668128967283 + ], + [ + "▁800-244-0167", + -15.341668128967283 + ], + [ + "▁888-389-5731", + -15.341668128967283 + ], + [ + "▁Bjork", + -15.341668128967283 + ], + [ + "▁CRYSTAL", + -15.341668128967283 + ], + [ + "▁FRIENDLY", + -15.341668128967283 + ], + [ + "▁GOOGLE", + -15.341668128967283 + ], + [ + "▁Javits", + -15.341668128967283 + ], + [ + "▁McGinnis", + -15.341668128967283 + ], + [ + "▁OPINION", + -15.341668128967283 + ], + [ + "▁Sofitel", + -15.341668128967283 + ], + [ + "▁Szechuan", + -15.341668128967283 + ], + [ + "▁Velazquez", + -15.341668128967283 + ], + [ + "▁ZDNet", + -15.341668128967283 + ], + [ + "▁extensibility", + -15.341668128967283 + ], + [ + "▁haemoglobin", + -15.341668128967283 + ], + [ + "▁linguine", + -15.341668128967283 + ], + [ + "▁mtDNA", + -15.341668128967283 + ], + [ + "Credibility", + -15.341669082641602 + ], + [ + "▁Camarillo", + -15.341669082641602 + ], + [ + "▁SEPARATE", + -15.341669082641602 + ], + [ + "▁STRETCH", + -15.341669082641602 + ], + [ + "▁Schmitz", + -15.341669082641602 + ], + [ + "▁Sebelius", + -15.341669082641602 + ], + [ + "▁alabaster", + -15.341669082641602 + ], + [ + "▁astroturf", + -15.341669082641602 + ], + [ + "▁Watauga", + -15.341670989990234 + ], + [ + "▁agronomy", + -15.341670989990234 + ], + [ + "▁atorvastatin", + -15.341670989990234 + ], + [ + "▁petabyte", + -15.341670989990234 + ], + [ + "▁clunker", + -15.34167194366455 + ], + [ + "anthemum", + -15.341672897338867 + ], + [ + "▁WESTERN", + -15.341672897338867 + ], + [ + "Retiring", + -15.341673851013184 + ], + [ + "▁Brainerd", + -15.341673851013184 + ], + [ + "▁QUART", + -15.341673851013184 + ], + [ + "▁Sentencing", + -15.341673851013184 + ], + [ + "▁tangibly", + -15.3416748046875 + ], + [ + "▁Sociological", + -15.341676712036133 + ], + [ + "▁deplore", + -15.341676712036133 + ], + [ + "▁ghoulish", + -15.341676712036133 + ], + [ + "▁goverment", + -15.34167766571045 + ], + [ + "▁memoranda", + -15.34167766571045 + ], + [ + "▁Marengo", + -15.341678619384766 + ], + [ + "▁Pristine", + -15.341678619384766 + ], + [ + "▁gestalt", + -15.341678619384766 + ], + [ + "▁Swenson", + -15.341680526733398 + ], + [ + "▁cubbies", + -15.341681480407717 + ], + [ + "▁Cardenas", + -15.341682434082031 + ], + [ + "▁Celeron", + -15.341682434082031 + ], + [ + "▁Chiapas", + -15.341683387756348 + ], + [ + "▁Fetish", + -15.341683387756348 + ], + [ + "▁Cherbourg", + -15.341684341430664 + ], + [ + "▁Telescopic", + -15.341684341430664 + ], + [ + "▁Igloo", + -15.34168529510498 + ], + [ + "▁submarkets", + -15.34168529510498 + ], + [ + "▁Quarries", + -15.341687202453612 + ], + [ + "▁porcini", + -15.341687202453612 + ], + [ + "Thunderstorms", + -15.341689109802246 + ], + [ + "▁Cyanide", + -15.34169101715088 + ], + [ + "▁fusible", + -15.34169101715088 + ], + [ + "▁kiteboarding", + -15.34169101715088 + ], + [ + "▁Derecho", + -15.341691970825195 + ], + [ + "▁Hostgator", + -15.341692924499512 + ], + [ + "▁Mahendra", + -15.341692924499512 + ], + [ + "▁isotopic", + -15.341693878173828 + ], + [ + "▁Instapaper", + -15.341694831848145 + ], + [ + "▁nubuck", + -15.341696739196776 + ], + [ + "▁stilted", + -15.341696739196776 + ], + [ + "▁Campervan", + -15.341699600219728 + ], + [ + "▁Monkees", + -15.341700553894045 + ], + [ + "▁scupper", + -15.341702461242676 + ], + [ + "▁brulee", + -15.341704368591309 + ], + [ + "▁Huerta", + -15.341707229614258 + ], + [ + "▁Ainsley", + -15.341710090637209 + ], + [ + "▁cassis", + -15.34171199798584 + ], + [ + "Halcyon", + -15.341715812683104 + ], + [ + "▁ONTAP", + -15.341715812683104 + ], + [ + "▁clonal", + -15.341720581054688 + ], + [ + "▁Tameside", + -15.34172248840332 + ], + [ + "▁Palmitate", + -15.341723442077637 + ], + [ + "▁Centrum", + -15.341724395751951 + ], + [ + "▁laggard", + -15.34172534942627 + ], + [ + "▁gallantry", + -15.341726303100586 + ], + [ + "▁Agribusiness", + -15.341729164123535 + ], + [ + "▁Coincidence", + -15.341736793518066 + ], + [ + "trypsin", + -15.341737747192385 + ], + [ + "▁upskilling", + -15.34174346923828 + ], + [ + "▁prismatic", + -15.34174633026123 + ], + [ + "▁LIDAR", + -15.341750144958496 + ], + [ + "▁Angora", + -15.341751098632812 + ], + [ + "▁Jabong", + -15.341752052307127 + ], + [ + "▁Lasagna", + -15.341755867004396 + ], + [ + "▁jotted", + -15.341755867004396 + ], + [ + "▁Warburton", + -15.341756820678713 + ], + [ + "▁provigil", + -15.341756820678713 + ], + [ + "▁Cassius", + -15.341763496398926 + ], + [ + "▁Softail", + -15.341766357421877 + ], + [ + "▁falconry", + -15.341769218444824 + ], + [ + "▁Garuda", + -15.341772079467772 + ], + [ + "▁Tisdale", + -15.341772079467772 + ], + [ + "▁unaddressed", + -15.341774940490724 + ], + [ + "▁Hubspot", + -15.341776847839355 + ], + [ + "▁Fianna", + -15.341778755187988 + ], + [ + "▁heredity", + -15.34178352355957 + ], + [ + "▁(40%)", + -15.341784477233888 + ], + [ + "▁(3-1)", + -15.341802597045898 + ], + [ + "▁(30%)", + -15.341803550720217 + ], + [ + "▁Containment", + -15.341803550720217 + ], + [ + "▁BERLIN", + -15.341805458068848 + ], + [ + "▁HEALTHY", + -15.34181785583496 + ], + [ + "▁ALONE", + -15.34182071685791 + ], + [ + "▁Stamina", + -15.341825485229492 + ], + [ + "▁neophyte", + -15.341826438903809 + ], + [ + "▁Epiphone", + -15.341839790344238 + ], + [ + "▁Standalone", + -15.341840744018556 + ], + [ + "Artikel", + -15.341841697692873 + ], + [ + "▁ALEXA", + -15.341842651367188 + ], + [ + "▁Altria", + -15.341853141784668 + ], + [ + "▁STARTED", + -15.341858863830566 + ], + [ + "▁(312)", + -15.341863632202148 + ], + [ + "▁gumtree", + -15.341891288757324 + ], + [ + "▁pilaf", + -15.34189224243164 + ], + [ + "▁Catwoman", + -15.34190273284912 + ], + [ + "▁(303)", + -15.341903686523438 + ], + [ + "▁Compens", + -15.341904640197754 + ], + [ + "▁MasterChef", + -15.341904640197754 + ], + [ + "▁Dedham", + -15.34190559387207 + ], + [ + "▁YKK", + -15.341915130615234 + ], + [ + "▁Carnage", + -15.3419189453125 + ], + [ + "▁Laminator", + -15.341924667358398 + ], + [ + "00-800", + -15.341957092285156 + ], + [ + "▁lampoon", + -15.341970443725586 + ], + [ + "▁Ishq", + -15.34197235107422 + ], + [ + "▁faintest", + -15.342019081115724 + ], + [ + "▁Haider", + -15.342023849487305 + ], + [ + "▁Dianna", + -15.342046737670898 + ], + [ + "cereus", + -15.342220306396484 + ], + [ + "▁Kandahar", + -15.342220306396484 + ], + [ + "▁prosaic", + -15.34222412109375 + ], + [ + "▁ascendant", + -15.342290878295898 + ], + [ + "67531", + -15.342305183410645 + ], + [ + "▁LibraryThing", + -15.34235668182373 + ], + [ + "▁Tromp", + -15.34256362915039 + ], + [ + "▁Okada", + -15.342571258544922 + ], + [ + "3050", + -15.342692375183104 + ], + [ + "▁Kiew", + -15.34271240234375 + ], + [ + "▁Gordie", + -15.342728614807127 + ], + [ + "▁Pimple", + -15.3427734375 + ], + [ + "▁Racket", + -15.342780113220217 + ], + [ + "2019-04-16", + -15.342924118041992 + ], + [ + "Fertilize", + -15.342961311340332 + ], + [ + "▁Lorca", + -15.342971801757812 + ], + [ + "▁DIET", + -15.343084335327148 + ], + [ + "phew", + -15.34316062927246 + ], + [ + "Submerge", + -15.343218803405762 + ], + [ + "▁Relish", + -15.343575477600098 + ], + [ + "1466", + -15.344023704528809 + ], + [ + "Loren", + -15.34409999847412 + ], + [ + "UJA", + -15.34429931640625 + ], + [ + "eglia", + -15.344550132751465 + ], + [ + "▁Skylark", + -15.344900131225586 + ], + [ + "mekong", + -15.345189094543455 + ], + [ + "Compost", + -15.345632553100586 + ], + [ + "▁coincident", + -15.34564971923828 + ], + [ + "▁Taleb", + -15.345842361450195 + ], + [ + "▁Grohl", + -15.345962524414062 + ], + [ + "1718", + -15.346233367919922 + ], + [ + "Humid", + -15.346415519714355 + ], + [ + "stituto", + -15.346532821655272 + ], + [ + "▁Belmond", + -15.346623420715332 + ], + [ + "Ooooh", + -15.34667682647705 + ], + [ + "arachne", + -15.34746265411377 + ], + [ + "ALKER", + -15.34763240814209 + ], + [ + "▁crimin", + -15.347899436950684 + ], + [ + "▁Rasta", + -15.348000526428224 + ], + [ + "bongo", + -15.34800910949707 + ], + [ + "▁Gradle", + -15.348608016967772 + ], + [ + "estimation", + -15.348891258239746 + ], + [ + "▁SIXT", + -15.34889793395996 + ], + [ + "Cascade", + -15.348934173583984 + ], + [ + "Familiar", + -15.348998069763184 + ], + [ + "Protocol", + -15.349019050598145 + ], + [ + "Politicians", + -15.349021911621094 + ], + [ + "Violent", + -15.34902286529541 + ], + [ + "04/19/2019", + -15.349032402038574 + ], + [ + "Celebrities", + -15.349040985107422 + ], + [ + "Nineteen", + -15.349040985107422 + ], + [ + "Whistle", + -15.349055290222168 + ], + [ + "Acquisition", + -15.3490629196167 + ], + [ + "Recreational", + -15.349063873291016 + ], + [ + "Generating", + -15.349064826965332 + ], + [ + "UNITED", + -15.34906768798828 + ], + [ + "Yvonne", + -15.349068641662598 + ], + [ + "invitation", + -15.349072456359863 + ], + [ + "congrats", + -15.34907341003418 + ], + [ + "Mohammad", + -15.34908390045166 + ], + [ + "Laundry", + -15.349085807800291 + ], + [ + "Lieutenant", + -15.34908676147461 + ], + [ + "continuing", + -15.34908962249756 + ], + [ + "Literary", + -15.349092483520508 + ], + [ + "NVIDIA", + -15.349092483520508 + ], + [ + "abdominal", + -15.349092483520508 + ], + [ + "Renaissance", + -15.349095344543455 + ], + [ + "SERVICE", + -15.349098205566406 + ], + [ + "Norwich", + -15.349102973937988 + ], + [ + "Sandwich", + -15.349103927612305 + ], + [ + "duplicate", + -15.349103927612305 + ], + [ + "BlackBerry", + -15.349119186401367 + ], + [ + "Charging", + -15.34912109375 + ], + [ + "literature", + -15.34912109375 + ], + [ + "accessibility", + -15.349133491516112 + ], + [ + "bureau", + -15.349139213562012 + ], + [ + "Bureau", + -15.349149703979492 + ], + [ + "potency", + -15.349154472351074 + ], + [ + "Expertise", + -15.349157333374023 + ], + [ + "Graduation", + -15.349161148071287 + ], + [ + "Kelley", + -15.349183082580566 + ], + [ + "5:27", + -15.34919548034668 + ], + [ + "balloon", + -15.34926414489746 + ], + [ + "worry", + -15.34927463531494 + ], + [ + "lmaz", + -15.34933853149414 + ], + [ + "Spurs", + -15.349345207214355 + ], + [ + "Newark", + -15.349346160888672 + ], + [ + "47-5", + -15.349420547485352 + ], + [ + "Mediation", + -15.349431037902832 + ], + [ + "cocktail", + -15.34943389892578 + ], + [ + "chasing", + -15.349435806274414 + ], + [ + "▁Sammi", + -15.34952449798584 + ], + [ + "authoritarianism", + -15.34957790374756 + ], + [ + "MARKET", + -15.349620819091797 + ], + [ + "Disinfect", + -15.349631309509276 + ], + [ + "molding", + -15.34963607788086 + ], + [ + "BENEFITS", + -15.349709510803224 + ], + [ + "Trademarkia", + -15.349848747253418 + ], + [ + "▁Ginni", + -15.349915504455566 + ], + [ + "Microbe", + -15.350008010864258 + ], + [ + "greave", + -15.350227355957031 + ], + [ + "▁$8.9", + -15.350384712219238 + ], + [ + "excess", + -15.35052490234375 + ], + [ + "rambler", + -15.350605010986328 + ], + [ + "7:31", + -15.350671768188477 + ], + [ + "▁Florent", + -15.35097599029541 + ], + [ + "▁$300.00", + -15.351052284240724 + ], + [ + "alumni", + -15.35126781463623 + ], + [ + "▁Ullman", + -15.351515769958496 + ], + [ + "▁Idiom", + -15.351672172546388 + ], + [ + "▁Nagle", + -15.352378845214844 + ], + [ + "UTO", + -15.352551460266112 + ], + [ + "▁Essa", + -15.352559089660645 + ], + [ + "Sexy", + -15.352590560913086 + ], + [ + "▁Waitress", + -15.352622032165527 + ], + [ + "▁quai", + -15.352745056152344 + ], + [ + "▁Tragic", + -15.352747917175291 + ], + [ + "Occurr", + -15.35290813446045 + ], + [ + "▁Kerch", + -15.353365898132324 + ], + [ + "▁Szym", + -15.353940963745115 + ], + [ + "▁Becket", + -15.354106903076172 + ], + [ + "$10,000", + -15.354374885559082 + ], + [ + "9:17", + -15.355157852172852 + ], + [ + "▁malign", + -15.355303764343262 + ], + [ + "Eradicat", + -15.355386734008787 + ], + [ + "▁urethra", + -15.355433464050291 + ], + [ + "6:31", + -15.355488777160645 + ], + [ + "19.00", + -15.355533599853516 + ], + [ + "▁Tassel", + -15.356368064880373 + ], + [ + "deepwater", + -15.356420516967772 + ], + [ + "▁Illuminate", + -15.356945991516112 + ], + [ + "▁Molok", + -15.357088088989258 + ], + [ + "▁PROFIT", + -15.357107162475586 + ], + [ + "ujitsu", + -15.35714626312256 + ], + [ + "Semite", + -15.357314109802246 + ], + [ + "▁12.04", + -15.357461929321287 + ], + [ + "FFFFFF", + -15.357536315917969 + ], + [ + "Assembling", + -15.357540130615234 + ], + [ + "adrenergic", + -15.357540130615234 + ], + [ + "ahmedabad", + -15.357540130615234 + ], + [ + "▁800-466-6240", + -15.357540130615234 + ], + [ + "▁844-244-3120", + -15.357540130615234 + ], + [ + "▁Amplified", + -15.357540130615234 + ], + [ + "▁Ancillary", + -15.357540130615234 + ], + [ + "▁Bechdel", + -15.357540130615234 + ], + [ + "▁Benzoate", + -15.357540130615234 + ], + [ + "▁Beresford", + -15.357540130615234 + ], + [ + "▁Bhattacharya", + -15.357540130615234 + ], + [ + "▁Calabasas", + -15.357540130615234 + ], + [ + "▁Catriona", + -15.357540130615234 + ], + [ + "▁Claptrap", + -15.357540130615234 + ], + [ + "▁Clematis", + -15.357540130615234 + ], + [ + "▁Consumable", + -15.357540130615234 + ], + [ + "▁DECISION", + -15.357540130615234 + ], + [ + "▁Hurghada", + -15.357540130615234 + ], + [ + "▁ISLAMABAD", + -15.357540130615234 + ], + [ + "▁Inmarsat", + -15.357540130615234 + ], + [ + "▁Lafferty", + -15.357540130615234 + ], + [ + "▁McHale", + -15.357540130615234 + ], + [ + "▁Muhammed", + -15.357540130615234 + ], + [ + "▁NAVIGATION", + -15.357540130615234 + ], + [ + "▁Ngorongoro", + -15.357540130615234 + ], + [ + "▁OWASP", + -15.357540130615234 + ], + [ + "▁Onondaga", + -15.357540130615234 + ], + [ + "▁POSITIVE", + -15.357540130615234 + ], + [ + "▁Peculiar", + -15.357540130615234 + ], + [ + "▁Penticton", + -15.357540130615234 + ], + [ + "▁Rodriquez", + -15.357540130615234 + ], + [ + "▁STEERING", + -15.357540130615234 + ], + [ + "▁Sarbanes", + -15.357540130615234 + ], + [ + "▁Satisfactory", + -15.357540130615234 + ], + [ + "▁Tenderloin", + -15.357540130615234 + ], + [ + "▁Virtuoso", + -15.357540130615234 + ], + [ + "▁Wellbutrin", + -15.357540130615234 + ], + [ + "▁Wittgenstein", + -15.357540130615234 + ], + [ + "▁abominable", + -15.357540130615234 + ], + [ + "▁antipasti", + -15.357540130615234 + ], + [ + "▁apostasy", + -15.357540130615234 + ], + [ + "▁chambray", + -15.357540130615234 + ], + [ + "▁chlamydia", + -15.357540130615234 + ], + [ + "▁crostini", + -15.357540130615234 + ], + [ + "▁hemorrhaging", + -15.357540130615234 + ], + [ + "▁literacies", + -15.357540130615234 + ], + [ + "▁liturgies", + -15.357540130615234 + ], + [ + "▁nVidia", + -15.357540130615234 + ], + [ + "▁pulverizing", + -15.357540130615234 + ], + [ + "▁sebagai", + -15.357540130615234 + ], + [ + "▁también", + -15.357540130615234 + ], + [ + "▁troponin", + -15.357540130615234 + ], + [ + "▁virtuosic", + -15.357540130615234 + ], + [ + "Versatility", + -15.35754108428955 + ], + [ + "Zoroastrian", + -15.35754108428955 + ], + [ + "▁Appomattox", + -15.35754108428955 + ], + [ + "▁Duolingo", + -15.35754108428955 + ], + [ + "▁Ingraham", + -15.35754108428955 + ], + [ + "▁Kelleher", + -15.35754108428955 + ], + [ + "▁Kilmarnock", + -15.35754108428955 + ], + [ + "▁Kuznetsov", + -15.35754108428955 + ], + [ + "▁Phenomenon", + -15.35754108428955 + ], + [ + "▁Tambayan", + -15.35754108428955 + ], + [ + "▁confederate", + -15.35754108428955 + ], + [ + "▁debauchery", + -15.35754108428955 + ], + [ + "▁indoctrination", + -15.35754108428955 + ], + [ + "▁pejorative", + -15.35754108428955 + ], + [ + "▁profesional", + -15.35754108428955 + ], + [ + "▁reservist", + -15.35754108428955 + ], + [ + "▁unadorned", + -15.35754108428955 + ], + [ + "MINIMUM", + -15.357542037963867 + ], + [ + "Pendulum", + -15.357542037963867 + ], + [ + "▁Molasses", + -15.357542037963867 + ], + [ + "▁Placerville", + -15.357542037963867 + ], + [ + "▁Yountville", + -15.357542037963867 + ], + [ + "▁Zeitgeist", + -15.357542037963867 + ], + [ + "▁cradling", + -15.357542037963867 + ], + [ + "▁omnipotent", + -15.357542037963867 + ], + [ + "▁prodigal", + -15.357542037963867 + ], + [ + "amsterdam", + -15.357542991638184 + ], + [ + "▁GRANITE", + -15.357542991638184 + ], + [ + "▁KINGDOM", + -15.357542991638184 + ], + [ + "▁sledgehammer", + -15.357542991638184 + ], + [ + "Lokomotiv", + -15.3575439453125 + ], + [ + "▁Napolitano", + -15.3575439453125 + ], + [ + "▁transversal", + -15.3575439453125 + ], + [ + "▁traversal", + -15.3575439453125 + ], + [ + "▁FUJI", + -15.357545852661133 + ], + [ + "▁Poirier", + -15.35754680633545 + ], + [ + "▁attunement", + -15.35754680633545 + ], + [ + "▁excitable", + -15.35754680633545 + ], + [ + "▁Gompa", + -15.357547760009766 + ], + [ + "▁diatribe", + -15.357547760009766 + ], + [ + "▁InBev", + -15.357548713684082 + ], + [ + "▁Kabuki", + -15.357548713684082 + ], + [ + "▁Occult", + -15.357548713684082 + ], + [ + "▁Taobao", + -15.357548713684082 + ], + [ + "▁athleisure", + -15.357548713684082 + ], + [ + "▁denigrate", + -15.357548713684082 + ], + [ + "▁humerus", + -15.357548713684082 + ], + [ + "▁Summaries", + -15.357551574707031 + ], + [ + "▁5/8′′", + -15.357553482055664 + ], + [ + "▁Birkbeck", + -15.357553482055664 + ], + [ + "▁McKeown", + -15.357553482055664 + ], + [ + "▁Sylvie", + -15.357553482055664 + ], + [ + "▁Aisling", + -15.35755443572998 + ], + [ + "▁altimeter", + -15.357555389404297 + ], + [ + "▁sabbath", + -15.357555389404297 + ], + [ + "▁meritocracy", + -15.357556343078612 + ], + [ + "▁smooch", + -15.357556343078612 + ], + [ + "▁Almeria", + -15.357559204101562 + ], + [ + "▁Hayneedle", + -15.357559204101562 + ], + [ + "▁Nephew", + -15.35756015777588 + ], + [ + "▁Ehlers", + -15.357561111450195 + ], + [ + "▁vagrant", + -15.357561111450195 + ], + [ + "▁SCUBA", + -15.357562065124512 + ], + [ + "▁Casumo", + -15.357563018798828 + ], + [ + "▁discontinuous", + -15.357563018798828 + ], + [ + "▁McClen", + -15.357563972473145 + ], + [ + "▁Sturridge", + -15.357563972473145 + ], + [ + "▁Illuminating", + -15.35756492614746 + ], + [ + "▁Dhanbad", + -15.357565879821776 + ], + [ + "▁COUPON", + -15.35756778717041 + ], + [ + "▁Debugging", + -15.35756778717041 + ], + [ + "▁Riyal", + -15.357568740844728 + ], + [ + "▁Tuvalu", + -15.357568740844728 + ], + [ + "▁IRCTC", + -15.357571601867676 + ], + [ + "▁Methodologies", + -15.357571601867676 + ], + [ + "▁bourse", + -15.357571601867676 + ], + [ + "▁Carneros", + -15.357573509216309 + ], + [ + "▁disaffected", + -15.357574462890623 + ], + [ + "▁enslavement", + -15.357574462890623 + ], + [ + "▁Sheepskin", + -15.357576370239258 + ], + [ + "▁Poehler", + -15.357577323913574 + ], + [ + "▁derision", + -15.35758113861084 + ], + [ + "▁Braunschweig", + -15.357583999633787 + ], + [ + "▁Fluorite", + -15.357586860656738 + ], + [ + "▁Lindstrom", + -15.357586860656738 + ], + [ + "▁Sukkot", + -15.357588768005373 + ], + [ + "▁maimed", + -15.357589721679688 + ], + [ + "▁kingfisher", + -15.35759162902832 + ], + [ + "▁SCORM", + -15.357593536376951 + ], + [ + "▁Energizer", + -15.357596397399902 + ], + [ + "▁Ecran", + -15.357598304748535 + ], + [ + "▁Zentangle", + -15.357600212097168 + ], + [ + "▁sintered", + -15.357603073120115 + ], + [ + "amylase", + -15.357605934143066 + ], + [ + "▁Libertad", + -15.357605934143066 + ], + [ + "▁(1964)", + -15.3576078414917 + ], + [ + "▁Scarves", + -15.35761260986328 + ], + [ + "▁Screensaver", + -15.35761547088623 + ], + [ + "▁Bulletproof", + -15.357616424560549 + ], + [ + "▁Carrasco", + -15.357622146606444 + ], + [ + "▁sociocultural", + -15.357622146606444 + ], + [ + "▁talons", + -15.357623100280762 + ], + [ + "▁Analogue", + -15.357624053955078 + ], + [ + "▁Unwanted", + -15.357625007629396 + ], + [ + "▁Vicarage", + -15.357625007629396 + ], + [ + "▁Reasonably", + -15.357625961303713 + ], + [ + "▁PROUD", + -15.357632637023926 + ], + [ + "▁Employability", + -15.357635498046877 + ], + [ + "▁Sevier", + -15.357636451721191 + ], + [ + "agogic", + -15.357637405395508 + ], + [ + "▁Whitworth", + -15.357644081115724 + ], + [ + "▁Humberside", + -15.35764503479004 + ], + [ + "▁Benioff", + -15.357645988464355 + ], + [ + "▁Chauhan", + -15.357645988464355 + ], + [ + "pyridine", + -15.357646942138672 + ], + [ + "▁Osseo", + -15.35764980316162 + ], + [ + "▁SHUT", + -15.35764980316162 + ], + [ + "▁brogue", + -15.357656478881836 + ], + [ + "▁townsfolk", + -15.357659339904783 + ], + [ + "▁Foshan", + -15.357664108276367 + ], + [ + "▁sarcastically", + -15.357665061950684 + ], + [ + "▁Segmentation", + -15.357667922973633 + ], + [ + "▁Incidence", + -15.357670783996582 + ], + [ + "▁McWilliams", + -15.357675552368164 + ], + [ + "▁titling", + -15.357675552368164 + ], + [ + "alexander", + -15.357677459716797 + ], + [ + "▁FXCM", + -15.357677459716797 + ], + [ + "▁Venturi", + -15.357684135437012 + ], + [ + "4:32", + -15.357686042785645 + ], + [ + "▁Longfellow", + -15.357687950134276 + ], + [ + "▁produk", + -15.357690811157228 + ], + [ + "▁Scholz", + -15.357695579528809 + ], + [ + "▁Holborn", + -15.35770034790039 + ], + [ + "▁Bhaskar", + -15.357702255249023 + ], + [ + "▁Modbury", + -15.357708930969238 + ], + [ + "▁macaw", + -15.357709884643556 + ], + [ + "▁TERRI", + -15.357712745666504 + ], + [ + "▁Folau", + -15.357717514038086 + ], + [ + "▁Hannaford", + -15.357721328735352 + ], + [ + "▁bobblehead", + -15.357722282409668 + ], + [ + "▁Minimalism", + -15.357723236083984 + ], + [ + "▁(360)", + -15.3577299118042 + ], + [ + "▁Tencel", + -15.357738494873049 + ], + [ + "▁Hubei", + -15.357748985290527 + ], + [ + "▁Ambrosia", + -15.357763290405272 + ], + [ + "▁Perseus", + -15.357789039611816 + ], + [ + "▁Ventana", + -15.357789993286133 + ], + [ + "▁NationMaster", + -15.357793807983398 + ], + [ + "▁Eriksen", + -15.357810020446776 + ], + [ + "7:54", + -15.357816696166992 + ], + [ + "▁Fiery", + -15.357817649841309 + ], + [ + "▁Mangum", + -15.35787868499756 + ], + [ + "▁Manifestation", + -15.35788345336914 + ], + [ + "▁Shinji", + -15.357888221740724 + ], + [ + "▁£5.00", + -15.357903480529783 + ], + [ + "▁juego", + -15.357934951782228 + ], + [ + "▁Lekki", + -15.35794162750244 + ], + [ + "▁distribu", + -15.357954978942873 + ], + [ + "▁CHRISTIAN", + -15.357987403869627 + ], + [ + "▁Neeraj", + -15.357989311218262 + ], + [ + "▁Authenticator", + -15.358030319213867 + ], + [ + "▁Motif", + -15.358091354370115 + ], + [ + "▁Caprice", + -15.35813808441162 + ], + [ + "▁Trudy", + -15.358149528503418 + ], + [ + "Sacral", + -15.358269691467283 + ], + [ + "7900", + -15.35832691192627 + ], + [ + "Claudine", + -15.35832691192627 + ], + [ + "HSBC", + -15.358373641967772 + ], + [ + "eitner", + -15.358376502990724 + ], + [ + "aheed", + -15.35842514038086 + ], + [ + "▁Paltz", + -15.358549118041992 + ], + [ + "▁Flintstone", + -15.35861110687256 + ], + [ + "▁Schiavo", + -15.358624458312988 + ], + [ + "bbett", + -15.358697891235352 + ], + [ + "Compelling", + -15.358728408813477 + ], + [ + "▁wherewithal", + -15.358732223510742 + ], + [ + "▁Gansu", + -15.358869552612305 + ], + [ + "germain", + -15.35890769958496 + ], + [ + "▁Padova", + -15.358925819396973 + ], + [ + "▁ganglia", + -15.358957290649414 + ], + [ + "▁Subcontractor", + -15.359014511108398 + ], + [ + "QUIZ", + -15.359095573425291 + ], + [ + "STEVE", + -15.359100341796877 + ], + [ + "8:58", + -15.359106063842772 + ], + [ + "▁Benzyl", + -15.359111785888672 + ], + [ + "▁faerie", + -15.35927391052246 + ], + [ + "cluding", + -15.359342575073242 + ], + [ + "Snoring", + -15.359472274780272 + ], + [ + "▁Slush", + -15.359553337097168 + ], + [ + "Wombat", + -15.3600435256958 + ], + [ + "▁Farooq", + -15.360121726989746 + ], + [ + "▁Gregorio", + -15.360152244567873 + ], + [ + "▁Arnett", + -15.36026668548584 + ], + [ + "EXUS", + -15.360549926757812 + ], + [ + "sourceforge", + -15.360590934753418 + ], + [ + "murti", + -15.360633850097656 + ], + [ + "2(1):", + -15.360666275024414 + ], + [ + "adoption", + -15.361075401306152 + ], + [ + "84%", + -15.361257553100586 + ], + [ + "3.56", + -15.361329078674316 + ], + [ + "3:58", + -15.36137580871582 + ], + [ + "▁Multan", + -15.36168384552002 + ], + [ + "valdi", + -15.361905097961426 + ], + [ + "tradition", + -15.362020492553713 + ], + [ + "▁congratulation", + -15.362037658691406 + ], + [ + "▁jogger", + -15.362115859985352 + ], + [ + "▁Lavish", + -15.362211227416992 + ], + [ + "2,3,4", + -15.362261772155762 + ], + [ + "▁Pagli", + -15.362441062927246 + ], + [ + "▁AMAZON", + -15.36264705657959 + ], + [ + "dipine", + -15.362960815429688 + ], + [ + "▁kamu", + -15.363046646118164 + ], + [ + "Obj", + -15.363299369812012 + ], + [ + "Arsenic", + -15.363303184509276 + ], + [ + "ASSET", + -15.363476753234863 + ], + [ + "RATOR", + -15.363524436950684 + ], + [ + "▁arbitrate", + -15.363551139831545 + ], + [ + "syndrom", + -15.363625526428224 + ], + [ + "▁Gewinn", + -15.363764762878418 + ], + [ + "▁Lemur", + -15.363791465759276 + ], + [ + "Sterile", + -15.36404514312744 + ], + [ + "Extractions", + -15.364161491394045 + ], + [ + "crem", + -15.36430549621582 + ], + [ + "▁informaci", + -15.364639282226562 + ], + [ + "4:56", + -15.364700317382812 + ], + [ + "locution", + -15.364806175231934 + ], + [ + "25-54", + -15.36506462097168 + ], + [ + "Intervention", + -15.365331649780272 + ], + [ + "structuring", + -15.365365028381348 + ], + [ + "surfacing", + -15.365371704101562 + ], + [ + "Cousin", + -15.3654203414917 + ], + [ + "Presumably", + -15.3654203414917 + ], + [ + "Sourcing", + -15.365433692932127 + ], + [ + "Hypnosis", + -15.365436553955078 + ], + [ + "WELCOME", + -15.365443229675291 + ], + [ + "Exquisite", + -15.36544418334961 + ], + [ + "Embed", + -15.365448951721191 + ], + [ + "sulfide", + -15.365450859069824 + ], + [ + "Upgrading", + -15.365452766418455 + ], + [ + "millennial", + -15.365453720092772 + ], + [ + "Scheduling", + -15.365456581115724 + ], + [ + "Miriam", + -15.365468978881836 + ], + [ + "Grief", + -15.365469932556152 + ], + [ + "politician", + -15.365469932556152 + ], + [ + "Intensive", + -15.365471839904783 + ], + [ + "Vampire", + -15.36547565460205 + ], + [ + "Netanyahu", + -15.365476608276367 + ], + [ + "foreclosure", + -15.365476608276367 + ], + [ + "zombie", + -15.365476608276367 + ], + [ + "Announced", + -15.365477561950684 + ], + [ + "catheter", + -15.365477561950684 + ], + [ + "defensive", + -15.36548137664795 + ], + [ + "Coyote", + -15.365483283996582 + ], + [ + "deliberate", + -15.365486145019531 + ], + [ + "Cyprus", + -15.365488052368164 + ], + [ + "Boutique", + -15.36548900604248 + ], + [ + "Trojan", + -15.36548900604248 + ], + [ + "magnitude", + -15.36548900604248 + ], + [ + "agriculture", + -15.365492820739746 + ], + [ + "collaborative", + -15.365492820739746 + ], + [ + "Madonna", + -15.36549472808838 + ], + [ + "terrible", + -15.36549472808838 + ], + [ + "Rahman", + -15.365498542785645 + ], + [ + "Rahul", + -15.365501403808594 + ], + [ + "Kylie", + -15.365504264831545 + ], + [ + "welfare", + -15.365504264831545 + ], + [ + "Intended", + -15.36550521850586 + ], + [ + "Vietnamese", + -15.365508079528809 + ], + [ + "SECTION", + -15.36550998687744 + ], + [ + "equitable", + -15.365514755249023 + ], + [ + "criteria", + -15.36551570892334 + ], + [ + "Julius", + -15.365518569946287 + ], + [ + "Determination", + -15.365522384643556 + ], + [ + "Holmes", + -15.365530967712402 + ], + [ + "Northwestern", + -15.365535736083984 + ], + [ + "TODAY", + -15.36553955078125 + ], + [ + "Geography", + -15.3655424118042 + ], + [ + "Nottingham", + -15.365556716918944 + ], + [ + "Documentary", + -15.365561485290527 + ], + [ + "Pedal", + -15.365572929382324 + ], + [ + "Walnut", + -15.365591049194336 + ], + [ + "psychotic", + -15.365602493286133 + ], + [ + "Kendall", + -15.365615844726562 + ], + [ + "Defendant", + -15.36566925048828 + ], + [ + "hostile", + -15.36569118499756 + ], + [ + "Curtain", + -15.365694046020508 + ], + [ + "nesthesiologists", + -15.365777969360352 + ], + [ + "acquainted", + -15.365808486938477 + ], + [ + "infinite", + -15.36583137512207 + ], + [ + "assistant", + -15.365837097167969 + ], + [ + "Morrow", + -15.36585521697998 + ], + [ + "clause", + -15.365978240966797 + ], + [ + "scanner", + -15.366033554077148 + ], + [ + "onnier", + -15.366117477416992 + ], + [ + "houston", + -15.366132736206056 + ], + [ + "9:45", + -15.366181373596191 + ], + [ + "▁05:3", + -15.366389274597168 + ], + [ + "3:31", + -15.366586685180664 + ], + [ + "▁Nesb", + -15.367030143737791 + ], + [ + "Albania", + -15.36769962310791 + ], + [ + "7:37", + -15.367833137512209 + ], + [ + "0.89", + -15.367941856384276 + ], + [ + "perkins", + -15.368087768554688 + ], + [ + "1165", + -15.368363380432127 + ], + [ + "▁unripe", + -15.368492126464844 + ], + [ + "▁Sabe", + -15.368596076965332 + ], + [ + "▁$699", + -15.368597984313965 + ], + [ + "▁Ioan", + -15.36879062652588 + ], + [ + "▁televis", + -15.369022369384766 + ], + [ + "2018-12-2", + -15.369205474853516 + ], + [ + "▁TASTE", + -15.36932373046875 + ], + [ + "▁secede", + -15.369677543640137 + ], + [ + "voort", + -15.369762420654297 + ], + [ + "histochemistry", + -15.370433807373049 + ], + [ + "▁electromagnet", + -15.370676040649414 + ], + [ + "viene", + -15.371183395385742 + ], + [ + "topathological", + -15.371910095214844 + ], + [ + "▁Elyse", + -15.37212371826172 + ], + [ + "1609", + -15.37229061126709 + ], + [ + "▁Filipp", + -15.37229347229004 + ], + [ + "тат", + -15.372779846191406 + ], + [ + "▁snook", + -15.372797012329102 + ], + [ + "▁Shiga", + -15.372885704040527 + ], + [ + "▁galvanize", + -15.373069763183594 + ], + [ + "▁refurb", + -15.373458862304688 + ], + [ + ")(2)(", + -15.37347412109375 + ], + [ + "======", + -15.373600006103516 + ], + [ + "▁IMPORT", + -15.37362575531006 + ], + [ + "▁SUPPLIE", + -15.373663902282717 + ], + [ + "▁[2019-03-2", + -15.37366771697998 + ], + [ + "Anticipating", + -15.373669624328612 + ], + [ + "Hierarchical", + -15.373669624328612 + ], + [ + "TEHRAN", + -15.373669624328612 + ], + [ + "Turbulence", + -15.373669624328612 + ], + [ + "Turbulent", + -15.373669624328612 + ], + [ + "centrifugation", + -15.373669624328612 + ], + [ + "echocardiography", + -15.373669624328612 + ], + [ + "▁AbbVie", + -15.373669624328612 + ], + [ + "▁América", + -15.373669624328612 + ], + [ + "▁Anniversaries", + -15.373669624328612 + ], + [ + "▁Beulah", + -15.373669624328612 + ], + [ + "▁Cairngorms", + -15.373669624328612 + ], + [ + "▁Caruana", + -15.373669624328612 + ], + [ + "▁Chenoweth", + -15.373669624328612 + ], + [ + "▁Cousteau", + -15.373669624328612 + ], + [ + "▁DeRozan", + -15.373669624328612 + ], + [ + "▁Edouard", + -15.373669624328612 + ], + [ + "▁Espinoza", + -15.373669624328612 + ], + [ + "▁Eugenio", + -15.373669624328612 + ], + [ + "▁Exorcist", + -15.373669624328612 + ], + [ + "▁FORTUNE", + -15.373669624328612 + ], + [ + "▁Fiduciary", + -15.373669624328612 + ], + [ + "▁Fuerteventura", + -15.373669624328612 + ], + [ + "▁Gormley", + -15.373669624328612 + ], + [ + "▁Higuain", + -15.373669624328612 + ], + [ + "▁Khaimah", + -15.373669624328612 + ], + [ + "▁Khajuraho", + -15.373669624328612 + ], + [ + "▁Lufkin", + -15.373669624328612 + ], + [ + "▁McDonagh", + -15.373669624328612 + ], + [ + "▁Naughton", + -15.373669624328612 + ], + [ + "▁OPERATOR", + -15.373669624328612 + ], + [ + "▁Pleiades", + -15.373669624328612 + ], + [ + "▁PyeongChang", + -15.373669624328612 + ], + [ + "▁Quinnipiac", + -15.373669624328612 + ], + [ + "▁Shaftesbury", + -15.373669624328612 + ], + [ + "▁Trifecta", + -15.373669624328612 + ], + [ + "▁Twitpic", + -15.373669624328612 + ], + [ + "▁Urquhart", + -15.373669624328612 + ], + [ + "▁Wissenschaft", + -15.373669624328612 + ], + [ + "▁affinities", + -15.373669624328612 + ], + [ + "▁ambiguities", + -15.373669624328612 + ], + [ + "▁amitriptyline", + -15.373669624328612 + ], + [ + "▁cuisinart", + -15.373669624328612 + ], + [ + "▁diametrically", + -15.373669624328612 + ], + [ + "▁difficile", + -15.373669624328612 + ], + [ + "▁eclipsing", + -15.373669624328612 + ], + [ + "▁encyclical", + -15.373669624328612 + ], + [ + "▁flamethrower", + -15.373669624328612 + ], + [ + "▁foolhardy", + -15.373669624328612 + ], + [ + "▁glorification", + -15.373669624328612 + ], + [ + "▁intertwining", + -15.373669624328612 + ], + [ + "▁moissanite", + -15.373669624328612 + ], + [ + "▁norepinephrine", + -15.373669624328612 + ], + [ + "▁penniless", + -15.373669624328612 + ], + [ + "▁plusieurs", + -15.373669624328612 + ], + [ + "▁presumptuous", + -15.373669624328612 + ], + [ + "▁ransacked", + -15.373669624328612 + ], + [ + "▁teflon", + -15.373669624328612 + ], + [ + "▁threshing", + -15.373669624328612 + ], + [ + "▁unreliability", + -15.373669624328612 + ], + [ + "▁untethered", + -15.373669624328612 + ], + [ + "▁vaccinating", + -15.373669624328612 + ], + [ + "▁zambia", + -15.373669624328612 + ], + [ + "reperfusion", + -15.37367057800293 + ], + [ + "▁Ankeny", + -15.37367057800293 + ], + [ + "▁Briscoe", + -15.37367057800293 + ], + [ + "▁Creuset", + -15.37367057800293 + ], + [ + "▁Rudraksha", + -15.37367057800293 + ], + [ + "▁Telefonica", + -15.37367057800293 + ], + [ + "▁Viscosity", + -15.37367057800293 + ], + [ + "▁hedonistic", + -15.37367057800293 + ], + [ + "▁miffed", + -15.37367057800293 + ], + [ + "▁militarization", + -15.37367057800293 + ], + [ + "▁Demetrius", + -15.373671531677246 + ], + [ + "▁Hochschule", + -15.373671531677246 + ], + [ + "▁Venkatesh", + -15.373671531677246 + ], + [ + "▁killexams", + -15.373671531677246 + ], + [ + "▁ravaging", + -15.373671531677246 + ], + [ + "▁vmware", + -15.373671531677246 + ], + [ + "Annuities", + -15.373672485351562 + ], + [ + "▁BIRTH", + -15.373672485351562 + ], + [ + "▁Nucleus", + -15.373672485351562 + ], + [ + "▁Whistleblower", + -15.37367343902588 + ], + [ + "▁aliexpress", + -15.37367343902588 + ], + [ + "▁meerkat", + -15.37367343902588 + ], + [ + "▁adorably", + -15.373674392700195 + ], + [ + "▁distributive", + -15.373674392700195 + ], + [ + "▁epitope", + -15.373675346374512 + ], + [ + "Addendum", + -15.373676300048828 + ], + [ + "▁Tyrolean", + -15.373676300048828 + ], + [ + "▁shuttling", + -15.373676300048828 + ], + [ + "▁cytology", + -15.37367820739746 + ], + [ + "▁senescence", + -15.373679161071776 + ], + [ + "▁Thyssen", + -15.373682022094728 + ], + [ + "▁Nikhil", + -15.373682975769045 + ], + [ + "▁disentangle", + -15.373682975769045 + ], + [ + "▁ALMOST", + -15.37368392944336 + ], + [ + "▁Qualities", + -15.373684883117676 + ], + [ + "▁immaturity", + -15.373684883117676 + ], + [ + "▁misdiagnosis", + -15.373684883117676 + ], + [ + "▁Accomplishment", + -15.373685836791992 + ], + [ + "▁Harlingen", + -15.373685836791992 + ], + [ + "▁LIBOR", + -15.373686790466309 + ], + [ + "▁Malkovich", + -15.373686790466309 + ], + [ + "▁Mizrahi", + -15.373687744140623 + ], + [ + "▁comparably", + -15.373687744140623 + ], + [ + "▁favicon", + -15.373687744140623 + ], + [ + "▁Texaco", + -15.37368869781494 + ], + [ + "▁paratrooper", + -15.37368869781494 + ], + [ + "▁Schaffer", + -15.373690605163574 + ], + [ + "▁unidirectional", + -15.373690605163574 + ], + [ + "Elaborate", + -15.37369155883789 + ], + [ + "▁PROMOTION", + -15.37369155883789 + ], + [ + "▁Lindgren", + -15.373692512512209 + ], + [ + "▁Lingerie", + -15.37369441986084 + ], + [ + "▁Saheb", + -15.37369441986084 + ], + [ + "▁Stadler", + -15.373696327209473 + ], + [ + "▁Trestle", + -15.373696327209473 + ], + [ + "▁tahoe", + -15.373696327209473 + ], + [ + "▁STRIKE", + -15.373698234558104 + ], + [ + "▁jogged", + -15.373698234558104 + ], + [ + "▁maxillary", + -15.373699188232422 + ], + [ + "▁mythologies", + -15.373699188232422 + ], + [ + "▁Ballantine", + -15.373700141906738 + ], + [ + "▁Viscose", + -15.373700141906738 + ], + [ + "▁Displacement", + -15.373701095581056 + ], + [ + "▁Telecaster", + -15.373701095581056 + ], + [ + "▁papacy", + -15.373702049255373 + ], + [ + "▁Mahinda", + -15.373703002929688 + ], + [ + "▁Ketogenic", + -15.373703956604004 + ], + [ + "▁crummy", + -15.373703956604004 + ], + [ + "▁Parsonage", + -15.37370491027832 + ], + [ + "▁Hammett", + -15.373705863952637 + ], + [ + "▁liveaboard", + -15.373705863952637 + ], + [ + "▁CLARK", + -15.373706817626951 + ], + [ + "▁marathi", + -15.373706817626951 + ], + [ + "▁cornflour", + -15.37370777130127 + ], + [ + "▁metamorphic", + -15.37370777130127 + ], + [ + "▁Komatsu", + -15.373711585998535 + ], + [ + "▁Shakira", + -15.373713493347168 + ], + [ + "▁Vergara", + -15.373714447021484 + ], + [ + "▁Thornbury", + -15.3737154006958 + ], + [ + "▁Brainiac", + -15.373716354370115 + ], + [ + "▁Pontoon", + -15.37371826171875 + ], + [ + "▁Giotto", + -15.373723030090332 + ], + [ + "▁Latency", + -15.373724937438965 + ], + [ + "▁hailstorm", + -15.373724937438965 + ], + [ + "▁Bloodhound", + -15.373727798461914 + ], + [ + "Auberge", + -15.373730659484863 + ], + [ + "▁PLDT", + -15.373730659484863 + ], + [ + "compatibilities", + -15.37373161315918 + ], + [ + "▁ponderosa", + -15.373736381530762 + ], + [ + "▁Venturing", + -15.373739242553713 + ], + [ + "▁pommel", + -15.373740196228027 + ], + [ + "▁VIVO", + -15.373753547668455 + ], + [ + "▁Mahomes", + -15.373756408691406 + ], + [ + "▁Kayleigh", + -15.37375831604004 + ], + [ + "▁WCAG", + -15.37375831604004 + ], + [ + "▁Crysis", + -15.37376308441162 + ], + [ + "▁groping", + -15.373764038085938 + ], + [ + "▁dictatorial", + -15.37376880645752 + ], + [ + "▁Screwdriver", + -15.373772621154783 + ], + [ + "▁Affective", + -15.373773574829102 + ], + [ + "▁preeclampsia", + -15.373773574829102 + ], + [ + "▁Sybase", + -15.373779296875 + ], + [ + "▁Kampot", + -15.373784065246582 + ], + [ + "▁Yeung", + -15.37380599975586 + ], + [ + "▁nanowire", + -15.373817443847656 + ], + [ + "▁PATIO", + -15.373820304870604 + ], + [ + "▁Kwame", + -15.37382698059082 + ], + [ + "▁(512)", + -15.373830795288086 + ], + [ + "▁biplane", + -15.373835563659668 + ], + [ + "▁recapping", + -15.373836517333984 + ], + [ + "▁Prezzi", + -15.3738374710083 + ], + [ + "▁Hazara", + -15.373838424682615 + ], + [ + "▁(510)", + -15.373845100402832 + ], + [ + "▁Trevino", + -15.373846054077148 + ], + [ + "▁fanaticism", + -15.37388038635254 + ], + [ + "▁Nonsense", + -15.37388515472412 + ], + [ + "▁motorcade", + -15.373889923095703 + ], + [ + "▁Zadar", + -15.373894691467283 + ], + [ + "▁Backhoe", + -15.373908042907717 + ], + [ + "▁Durkin", + -15.373910903930664 + ], + [ + "▁Opaque", + -15.37391471862793 + ], + [ + "Disadvantages", + -15.373915672302246 + ], + [ + "▁mommies", + -15.373919486999512 + ], + [ + "▁Agilent", + -15.373920440673828 + ], + [ + "▁Kawhi", + -15.373934745788574 + ], + [ + "▁Kilmer", + -15.373961448669434 + ], + [ + "zeichn", + -15.373992919921877 + ], + [ + "▁Macedon", + -15.373997688293455 + ], + [ + "▁canapes", + -15.374008178710938 + ], + [ + "▁Bentham", + -15.374009132385254 + ], + [ + "▁homologous", + -15.374018669128418 + ], + [ + "▁FOAM", + -15.37403964996338 + ], + [ + "▁Farrah", + -15.374051094055176 + ], + [ + "69-2", + -15.374073028564451 + ], + [ + "▁Januar", + -15.374112129211426 + ], + [ + "Oblique", + -15.37411403656006 + ], + [ + "Corkscrew", + -15.374135971069336 + ], + [ + "kommt", + -15.374135971069336 + ], + [ + "300-135", + -15.374197006225586 + ], + [ + "TITUDE", + -15.374237060546877 + ], + [ + "9,600", + -15.37424087524414 + ], + [ + "Freckle", + -15.374436378479004 + ], + [ + "▁Torsion", + -15.374449729919434 + ], + [ + "8:26", + -15.374520301818848 + ], + [ + "5,00", + -15.374971389770508 + ], + [ + "▁Saini", + -15.37509059906006 + ], + [ + ".09%", + -15.375178337097168 + ], + [ + "Meditate", + -15.375286102294922 + ], + [ + "▁sublicense", + -15.375417709350586 + ], + [ + "Quiring", + -15.375432014465332 + ], + [ + "▁destin", + -15.375574111938477 + ], + [ + "stabilized", + -15.376097679138184 + ], + [ + "0-071", + -15.37615966796875 + ], + [ + "Refracti", + -15.376287460327148 + ], + [ + "peptic", + -15.376383781433104 + ], + [ + "microenvironment", + -15.376440048217772 + ], + [ + "▁Distri", + -15.376457214355469 + ], + [ + "▁Turret", + -15.376495361328123 + ], + [ + "7777", + -15.376940727233888 + ], + [ + "Redefining", + -15.3770751953125 + ], + [ + "0:09", + -15.377082824707031 + ], + [ + "▁Pietra", + -15.37740421295166 + ], + [ + "webhost", + -15.377455711364746 + ], + [ + "8:48", + -15.377476692199709 + ], + [ + "▁psychoanalytic", + -15.37752914428711 + ], + [ + "1335", + -15.377619743347168 + ], + [ + "▁Happies", + -15.377798080444336 + ], + [ + "▁Tamale", + -15.37809944152832 + ], + [ + "▁Seidel", + -15.378358840942385 + ], + [ + "▁Muppet", + -15.37840175628662 + ], + [ + "▁Quota", + -15.378403663635254 + ], + [ + "▁polyphenol", + -15.378445625305176 + ], + [ + "utyrate", + -15.378512382507324 + ], + [ + "saari", + -15.37852668762207 + ], + [ + "▁WASTE", + -15.379033088684082 + ], + [ + "▁$6,500", + -15.379261016845703 + ], + [ + ".06%", + -15.379310607910156 + ], + [ + "constitutional", + -15.379323959350586 + ], + [ + "▁$6.4", + -15.37989902496338 + ], + [ + "ospermum", + -15.380112648010254 + ] + ], + "byte_fallback": false + } +} \ No newline at end of file diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..43b22263235c39b835df95d771567d2b8e8311f4 --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,967 @@ +{ + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "3": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "4": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "5": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "6": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "7": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "8": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "9": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "10": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "11": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "12": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "13": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "14": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "15": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "16": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "17": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "18": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "19": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "20": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "21": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "22": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "23": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "24": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "25": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "26": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "27": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "28": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "29": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "30": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "31": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "33": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "34": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "35": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "36": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "37": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "38": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "39": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "40": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "41": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "42": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "43": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "44": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "45": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "46": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "47": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "48": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "49": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "50": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "51": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "52": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "53": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "54": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "55": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "56": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "57": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "58": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "59": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "60": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "61": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "62": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "63": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "64": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "65": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "66": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "67": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "68": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "69": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "70": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "71": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "72": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "73": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "74": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "75": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "76": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "77": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "78": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "79": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "80": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "81": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "82": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "83": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "84": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "85": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "86": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "87": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "88": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "89": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "90": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "91": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "92": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "93": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "94": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "95": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "96": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "97": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "98": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "99": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "100": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "101": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "102": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "103": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "104": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "105": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": true, + "eos_token": "", + "full_tokenizer_file": null, + "mask_token": "", + "mask_token_sent": "", + "model_max_length": 4000, + "offset": 103, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "PegasusTokenizer", + "unk_token": "" +} diff --git a/training_args.bin b/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..414eb5b4ae768f65d552836e385fb98e691e2ae3 --- /dev/null +++ b/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:23e12409b9c9cfd7d7b52182fc564798e45e5487778afffb135affefc52abd19 +size 5176